From 2c8a6f63d4173657bf3327ca6c8ec8128f8fe0a8 Mon Sep 17 00:00:00 2001 From: olim7t Date: Tue, 16 Jul 2019 10:15:46 -0700 Subject: [PATCH 001/979] JAVA-2327: Include mapper modules in distribution --- distribution/pom.xml | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/distribution/pom.xml b/distribution/pom.xml index 65d106bd9b1..3c5df3699d0 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -45,6 +45,16 @@ java-driver-query-builder ${project.version} + + ${project.groupId} + java-driver-mapper-runtime + ${project.version} + + + ${project.groupId} + java-driver-mapper-processor + ${project.version} + From 7d6de1769c621d394569ce9fa367a4cbb4950cb0 Mon Sep 17 00:00:00 2001 From: olim7t Date: Tue, 16 Jul 2019 10:19:58 -0700 Subject: [PATCH 002/979] JAVA-2327: Fix javadoc links in mapper manual --- manual/mapper/daos/README.md | 6 ++-- manual/mapper/daos/delete/README.md | 14 ++++---- manual/mapper/daos/getentity/README.md | 16 ++++----- manual/mapper/daos/insert/README.md | 10 +++--- manual/mapper/daos/null_saving/README.md | 10 +++--- manual/mapper/daos/query/README.md | 16 ++++----- manual/mapper/daos/queryprovider/README.md | 16 ++++----- manual/mapper/daos/select/README.md | 12 +++---- manual/mapper/daos/setentity/README.md | 10 +++--- .../daos/statement_attributes/README.md | 2 +- manual/mapper/daos/update/README.md | 8 ++--- manual/mapper/entities/README.md | 34 +++++++++---------- manual/mapper/mapper/README.md | 10 +++--- 13 files changed, 82 insertions(+), 82 deletions(-) diff --git a/manual/mapper/daos/README.md b/manual/mapper/daos/README.md index c502d5ebb86..e5183d1178e 100644 --- a/manual/mapper/daos/README.md +++ b/manual/mapper/daos/README.md @@ -135,7 +135,7 @@ In this case, any annotations declared in `Dao1` would be chosen over `Dao2`. To control how the hierarchy is scanned, annotate interfaces with [@HierarchyScanStrategy]. -[@Dao]: http://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/mapper/annotations/Dao.html -[@DefaultNullSavingStrategy]: http://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html -[@HierarchyScanStrategy]: http://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html +[@Dao]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/Dao.html +[@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html +[@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html [Entity Inheritance]: ../entities/#inheritance diff --git a/manual/mapper/daos/delete/README.md b/manual/mapper/daos/delete/README.md index 9ff526e8a36..25ef69df1b1 100644 --- a/manual/mapper/daos/delete/README.md +++ b/manual/mapper/daos/delete/README.md @@ -100,13 +100,13 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the [naming strategy](../../entities/#naming-strategy)). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html -[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html -[@Delete]: https://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/mapper/annotations/Delete.html -[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/core/cql/ResultSet.html -[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html +[@Delete]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/Delete.html +[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/cql/ResultSet.html +[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html \ No newline at end of file diff --git a/manual/mapper/daos/getentity/README.md b/manual/mapper/daos/getentity/README.md index 90aef52a455..73b9701e4e4 100644 --- a/manual/mapper/daos/getentity/README.md +++ b/manual/mapper/daos/getentity/README.md @@ -69,14 +69,14 @@ If the return type doesn't match the parameter type (for example [PagingIterable [AsyncResultSet]), the mapper processor will issue a compile-time error. -[@GetEntity]: https://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html -[GettableByName]: https://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/core/data/GettableByName.html -[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/core/PagingIterable.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/core/cql/ResultSet.html -[Row]: https://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/core/cql/Row.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/core/data/UdtValue.html +[@GetEntity]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[GettableByName]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/data/GettableByName.html +[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/PagingIterable.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/cql/ResultSet.html +[Row]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/cql/Row.html +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/data/UdtValue.html diff --git a/manual/mapper/daos/insert/README.md b/manual/mapper/daos/insert/README.md index 7c5b6ae015c..cdb929d6e89 100644 --- a/manual/mapper/daos/insert/README.md +++ b/manual/mapper/daos/insert/README.md @@ -92,11 +92,11 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the [naming strategy](../../entities/#naming-strategy)). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@Insert]: https://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/mapper/annotations/Insert.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/core/cql/ResultSet.html -[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- -[ResultSet#getExecutionInfo()]: https://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/core/cql/ResultSet.html#getExecutionInfo-- +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@Insert]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/Insert.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/cql/ResultSet.html +[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- +[ResultSet#getExecutionInfo()]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/cql/ResultSet.html#getExecutionInfo-- diff --git a/manual/mapper/daos/null_saving/README.md b/manual/mapper/daos/null_saving/README.md index e54b90e6e9f..cfb60c4f445 100644 --- a/manual/mapper/daos/null_saving/README.md +++ b/manual/mapper/daos/null_saving/README.md @@ -93,10 +93,10 @@ public interface UserDao extends InventoryDao { } ``` -[@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[MapperException]: https://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/mapper/MapperException.html -[DO_NOT_SET]: https://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#DO_NOT_SET -[SET_TO_NULL]: https://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#SET_TO_NULL +[@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[MapperException]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/MapperException.html +[DO_NOT_SET]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#DO_NOT_SET +[SET_TO_NULL]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#SET_TO_NULL [CASSANDRA-7304]: https://issues.apache.org/jira/browse/CASSANDRA-7304 diff --git a/manual/mapper/daos/query/README.md b/manual/mapper/daos/query/README.md index 4261516c5ee..63720573b1f 100644 --- a/manual/mapper/daos/query/README.md +++ b/manual/mapper/daos/query/README.md @@ -102,14 +102,14 @@ Then: query succeeds or not depends on whether the session that the mapper was built with has a [default keyspace]. -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@Query]: https://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/mapper/annotations/Query.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/core/cql/ResultSet.html -[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- -[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/core/PagingIterable.html -[Row]: https://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/core/cql/Row.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@Query]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/Query.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/cql/ResultSet.html +[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- +[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/PagingIterable.html +[Row]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/cql/Row.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html diff --git a/manual/mapper/daos/queryprovider/README.md b/manual/mapper/daos/queryprovider/README.md index 5a7b3443264..f4daea12bd2 100644 --- a/manual/mapper/daos/queryprovider/README.md +++ b/manual/mapper/daos/queryprovider/README.md @@ -137,11 +137,11 @@ Here is the full implementation: the desired [PagingIterable][PagingIterable]. -[@QueryProvider]: https://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html -[providerClass]: https://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerClass-- -[entityHelpers]: https://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#entityHelpers-- -[providerMethod]: https://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerMethod-- -[MapperContext]: https://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/mapper/MapperContext.html -[EntityHelper]: https://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/mapper/EntityHelper.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/core/cql/ResultSet.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/core/PagingIterable.html +[@QueryProvider]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html +[providerClass]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerClass-- +[entityHelpers]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#entityHelpers-- +[providerMethod]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerMethod-- +[MapperContext]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/MapperContext.html +[EntityHelper]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/EntityHelper.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/cql/ResultSet.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/PagingIterable.html diff --git a/manual/mapper/daos/select/README.md b/manual/mapper/daos/select/README.md index 696741d78d9..d1b347614b1 100644 --- a/manual/mapper/daos/select/README.md +++ b/manual/mapper/daos/select/README.md @@ -85,12 +85,12 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the [naming strategy](../../entities/#naming-strategy)). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html -[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html -[@Select]: https://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/mapper/annotations/Select.html -[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/core/PagingIterable.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html +[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html +[@Select]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/Select.html +[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/PagingIterable.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html diff --git a/manual/mapper/daos/setentity/README.md b/manual/mapper/daos/setentity/README.md index 79a1fc0715f..8817f9c4ef2 100644 --- a/manual/mapper/daos/setentity/README.md +++ b/manual/mapper/daos/setentity/README.md @@ -63,8 +63,8 @@ BoundStatement bind(Product product, BoundStatement statement); If you use a void method with [BoundStatement], the mapper processor will issue a compile-time warning. -[@SetEntity]: https://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/mapper/annotations/SetEntity.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[BoundStatementBuilder]: https://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/core/cql/BoundStatementBuilder.html -[SettableByName]: https://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/core/data/SettableByName.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/core/data/UdtValue.html +[@SetEntity]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/SetEntity.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[BoundStatementBuilder]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/cql/BoundStatementBuilder.html +[SettableByName]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/data/SettableByName.html +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/data/UdtValue.html diff --git a/manual/mapper/daos/statement_attributes/README.md b/manual/mapper/daos/statement_attributes/README.md index 27ca395145a..e41ab7f1d63 100644 --- a/manual/mapper/daos/statement_attributes/README.md +++ b/manual/mapper/daos/statement_attributes/README.md @@ -60,4 +60,4 @@ Product product = dao.findById(1, builder -> builder.setConsistencyLevel(DefaultConsistencyLevel.QUORUM)); ``` -[@StatementAttributes]: https://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/mapper/annotations/StatementAttributes.html \ No newline at end of file +[@StatementAttributes]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/StatementAttributes.html \ No newline at end of file diff --git a/manual/mapper/daos/update/README.md b/manual/mapper/daos/update/README.md index 119d0f26569..4fb1162d0ef 100644 --- a/manual/mapper/daos/update/README.md +++ b/manual/mapper/daos/update/README.md @@ -126,11 +126,11 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the naming convention). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@Update]: https://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/mapper/annotations/Update.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@Update]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/Update.html -[AsyncResultSet]: http://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html [Boolean]: https://docs.oracle.com/javase/8/docs/api/index.html?java/lang/Boolean.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html -[ResultSet]: http://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/core/cql/ResultSet.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/cql/ResultSet.html diff --git a/manual/mapper/entities/README.md b/manual/mapper/entities/README.md index 135292ea136..1a4ff2a6e8e 100644 --- a/manual/mapper/entities/README.md +++ b/manual/mapper/entities/README.md @@ -445,21 +445,21 @@ the same level. To control how the class hierarchy is scanned, annotate classes with [@HierarchyScanStrategy]. -[@ClusteringColumn]: http://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html -[@CqlName]: http://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/mapper/annotations/CqlName.html -[@Dao]: http://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/mapper/annotations/Dao.html -[@Entity]: http://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/mapper/annotations/Entity.html -[NameConverter]: http://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/mapper/entity/naming/NameConverter.html -[NamingConvention]: http://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/mapper/entity/naming/NamingConvention.html -[@NamingStrategy]: http://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/mapper/annotations/NamingStrategy.html -[@PartitionKey]: http://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html -[@Computed]: http://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/mapper/annotations/Computed.html -[@Select]: http://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/mapper/annotations/Select.html -[@Insert]: http://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/mapper/annotations/Insert.html -[@Update]: http://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/mapper/annotations/Update.html -[@GetEntity]: http://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html -[@Query]: http://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/mapper/annotations/Query.html +[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html +[@CqlName]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/CqlName.html +[@Dao]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/Dao.html +[@Entity]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/Entity.html +[NameConverter]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/entity/naming/NameConverter.html +[NamingConvention]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/entity/naming/NamingConvention.html +[@NamingStrategy]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/NamingStrategy.html +[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html +[@Computed]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/Computed.html +[@Select]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/Select.html +[@Insert]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/Insert.html +[@Update]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/Update.html +[@GetEntity]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html +[@Query]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/Query.html [aliases]: http://cassandra.apache.org/doc/latest/cql/dml.html?#aliases -[@Transient]: http://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/mapper/annotations/Transient.html -[@TransientProperties]: http://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/mapper/annotations/TransientProperties.html -[@HierarchyScanStrategy]: http://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html +[@Transient]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/Transient.html +[@TransientProperties]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/TransientProperties.html +[@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html diff --git a/manual/mapper/mapper/README.md b/manual/mapper/mapper/README.md index 99b1d4fa762..43bfbc94a70 100644 --- a/manual/mapper/mapper/README.md +++ b/manual/mapper/mapper/README.md @@ -140,8 +140,8 @@ ProductDao dao3 = inventoryMapper.productDao("keyspace3", "table3"); The DAO's keyspace and table can also be injected into custom query strings; see [Query methods](../daos/query/). -[CqlIdentifier]: http://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/core/CqlIdentifier.html -[@DaoFactory]: http://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.html -[@DaoKeyspace]: http://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/mapper/annotations/DaoKeyspace.html -[@DaoTable]: http://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/mapper/annotations/DaoTable.html -[@Mapper]: http://docs.datastax.com/en/drivers/java/4.0/com/datastax/oss/driver/api/mapper/annotations/Mapper.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/CqlIdentifier.html +[@DaoFactory]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.html +[@DaoKeyspace]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/DaoKeyspace.html +[@DaoTable]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/DaoTable.html +[@Mapper]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/Mapper.html From cb19d372a8547db0dc86f4305d06fe609858d0e4 Mon Sep 17 00:00:00 2001 From: olim7t Date: Tue, 16 Jul 2019 10:34:24 -0700 Subject: [PATCH 003/979] JAVA-2328: Fix statement attributes link in mapper manual --- manual/mapper/daos/delete/README.md | 7 ++++--- manual/mapper/daos/insert/README.md | 7 ++++--- manual/mapper/daos/query/README.md | 7 ++++--- manual/mapper/daos/select/README.md | 7 ++++--- manual/mapper/daos/update/README.md | 7 ++++--- 5 files changed, 20 insertions(+), 15 deletions(-) diff --git a/manual/mapper/daos/delete/README.md b/manual/mapper/daos/delete/README.md index 25ef69df1b1..d84a549da1b 100644 --- a/manual/mapper/daos/delete/README.md +++ b/manual/mapper/daos/delete/README.md @@ -44,9 +44,10 @@ the method must have corresponding parameters (same name, and a compatible Java void deleteIfDescriptionMatches(UUID productId, String expectedDescription); ``` -A [StatementAttributes](../statement_attributes/) can be added as the **last** parameter. This -allows you to customize customize certain aspects of the request (page size, timeout, etc.) at -runtime. +A `Function` or `UnaryOperator` +can be added as the **last** parameter. It will be applied to the statement before execution. This +allows you to customize certain aspects of the request (page size, timeout, etc) at runtime. See +[statement attributes](../statement_attributes/). ### Return type diff --git a/manual/mapper/daos/insert/README.md b/manual/mapper/daos/insert/README.md index cdb929d6e89..35fa18bcf22 100644 --- a/manual/mapper/daos/insert/README.md +++ b/manual/mapper/daos/insert/README.md @@ -25,9 +25,10 @@ void insertWithTtl(Product product, int ttl); The annotation can define a [null saving strategy](../null_saving/) that applies to the properties of the entity to insert. -A [StatementAttributes](../statement_attributes/) can be added as the **last** parameter. This -allows you to customize customize certain aspects of the request (page size, timeout, etc.) at -runtime. +A `Function` or `UnaryOperator` +can be added as the **last** parameter. It will be applied to the statement before execution. This +allows you to customize certain aspects of the request (page size, timeout, etc) at runtime. See +[statement attributes](../statement_attributes/). ### Return type diff --git a/manual/mapper/daos/query/README.md b/manual/mapper/daos/query/README.md index 63720573b1f..9660add5b22 100644 --- a/manual/mapper/daos/query/README.md +++ b/manual/mapper/daos/query/README.md @@ -25,9 +25,10 @@ long countByIdAndYear(int id, int year); The annotation can define a [null saving strategy](../null_saving/) that applies to the method parameters. -A [StatementAttributes](../statement_attributes/) can be added as the **last** parameter. This -allows you to customize customize certain aspects of the request (page size, timeout, etc.) at -runtime. +A `Function` or `UnaryOperator` +can be added as the **last** parameter. It will be applied to the statement before execution. This +allows you to customize certain aspects of the request (page size, timeout, etc) at runtime. See +[statement attributes](../statement_attributes/). ### Return type diff --git a/manual/mapper/daos/select/README.md b/manual/mapper/daos/select/README.md index d1b347614b1..963efc35369 100644 --- a/manual/mapper/daos/select/README.md +++ b/manual/mapper/daos/select/README.md @@ -28,9 +28,10 @@ for each, with the same name and a compatible Java type. PagingIterable findByDescription(String searchString); ``` -A [StatementAttributes](../statement_attributes/) can be added as the **last** parameter. This -allows you to customize customize certain aspects of the request (page size, timeout, etc.) at -runtime. +A `Function` or `UnaryOperator` +can be added as the **last** parameter. It will be applied to the statement before execution. This +allows you to customize certain aspects of the request (page size, timeout, etc) at runtime. See +[statement attributes](../statement_attributes/). ### Return type diff --git a/manual/mapper/daos/update/README.md b/manual/mapper/daos/update/README.md index 4fb1162d0ef..f1b06b42249 100644 --- a/manual/mapper/daos/update/README.md +++ b/manual/mapper/daos/update/README.md @@ -73,9 +73,10 @@ template.setDescription("Coming soon"); // all other properties remain null dao.updateWhereIdIn(template, 42, 43); // Will only update 'description' on the selected rows ``` -A [StatementAttributes](../statement_attributes/) can be added as the **last** parameter. This -allows you to customize customize certain aspects of the request (page size, timeout, etc.) at -runtime. +A `Function` or `UnaryOperator` +can be added as the **last** parameter. It will be applied to the statement before execution. This +allows you to customize certain aspects of the request (page size, timeout, etc) at runtime. See +[statement attributes](../statement_attributes/). ### Return type From 5c7e39b7da4e91601d2c17e8ee73680ca70b6950 Mon Sep 17 00:00:00 2001 From: Adam Holmberg Date: Tue, 16 Jul 2019 13:46:50 -0500 Subject: [PATCH 004/979] copyright symbol --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 8b85257af38..b6755a722e1 100644 --- a/README.md +++ b/README.md @@ -70,7 +70,7 @@ See the [upgrade guide](upgrade_guide/) for details. ## License -Copyright DataStax, Inc. +© DataStax, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. From 995cbeac57e605532beb44dbf4fd62f0ca85d760 Mon Sep 17 00:00:00 2001 From: Tomasz Lelek Date: Wed, 17 Jul 2019 15:59:10 +0200 Subject: [PATCH 005/979] Developer Guide Async Examples (#1256) --- .../concurrent/LimitConcurrencyCustom.java | 139 +++++++++++++ .../LimitConcurrencyCustomAsync.java | 191 ++++++++++++++++++ 2 files changed, 330 insertions(+) create mode 100644 examples/src/main/java/com/datastax/oss/driver/examples/concurrent/LimitConcurrencyCustom.java create mode 100644 examples/src/main/java/com/datastax/oss/driver/examples/concurrent/LimitConcurrencyCustomAsync.java diff --git a/examples/src/main/java/com/datastax/oss/driver/examples/concurrent/LimitConcurrencyCustom.java b/examples/src/main/java/com/datastax/oss/driver/examples/concurrent/LimitConcurrencyCustom.java new file mode 100644 index 00000000000..8569b1c0340 --- /dev/null +++ b/examples/src/main/java/com/datastax/oss/driver/examples/concurrent/LimitConcurrencyCustom.java @@ -0,0 +1,139 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.examples.concurrent; + +import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.bindMarker; +import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.insertInto; + +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.CqlSessionBuilder; +import com.datastax.oss.driver.api.core.cql.PreparedStatement; +import java.util.UUID; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Semaphore; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; + +/** + * Creates a keyspace and table, and loads data using a multi-threaded approach. + * + *

This example makes usage of a {@link CqlSession#execute(String)} method, which is responsible + * for executing requests in a blocking way. It uses {@link ExecutorService} to limit number of + * concurrent request to {@code CONCURRENCY_LEVEL}. It leverages {@link CompletableFuture} to + * achieve concurrency. It maintains at most {@code IN_FLIGHT_REQUESTS} using {@link Semaphore}. + * + *

Preconditions: + * + *

    + *
  • An Apache Cassandra(R) cluster is running and accessible through the contact points + * identified by basic.contact-points (see application.conf). + *
+ * + *

Side effects: + * + *

    + *
  • creates a new keyspace "examples" in the session. If a keyspace with this name already + * exists, it will be reused; + *
  • creates a table "examples.tbl_sample_kv". If it exists already, it will be reused; + *
  • inserts a TOTAL_NUMBER_OF_INSERTS of rows into the table. + *
+ * + * @see Java driver online + * manual + */ +public class LimitConcurrencyCustom { + private static final int CONCURRENCY_LEVEL = 32; + private static final int TOTAL_NUMBER_OF_INSERTS = 10_000; + private static final int IN_FLIGHT_REQUESTS = 500; + // Semaphore for limiting number of in-flight requests. + private static final Semaphore SEMAPHORE = new Semaphore(IN_FLIGHT_REQUESTS); + + // Create CountDownLatch that wait for completion of all pending requests + private static final CountDownLatch REQUEST_LATCH = new CountDownLatch(TOTAL_NUMBER_OF_INSERTS); + + public static void main(String[] args) throws InterruptedException { + + try (CqlSession session = new CqlSessionBuilder().build()) { + createSchema(session); + insertConcurrent(session); + } + } + + private static void insertConcurrent(CqlSession session) throws InterruptedException { + PreparedStatement pst = + session.prepare( + insertInto("examples", "tbl_sample_kv") + .value("id", bindMarker("id")) + .value("value", bindMarker("value")) + .build()); + + // Used to track number of total inserts + AtomicInteger insertsCounter = new AtomicInteger(); + + // Executor service with CONCURRENCY_LEVEL number of threads that states an upper limit + // on number of request in progress. + ExecutorService executor = Executors.newFixedThreadPool(CONCURRENCY_LEVEL); + + // For every i we will insert a record to db + for (int i = 0; i < TOTAL_NUMBER_OF_INSERTS; i++) { + // Before submitting a request, we need to acquire 1 permit. + // If there is no permits available it blocks caller thread. + SEMAPHORE.acquire(); + // Copy to final variable for usage in a separate thread + final int counter = i; + + // We are running CqlSession.execute in a separate thread pool (executor) + executor.submit( + () -> { + try { + session.execute(pst.bind().setUuid("id", UUID.randomUUID()).setInt("value", counter)); + insertsCounter.incrementAndGet(); + } catch (Throwable t) { + // On production you should leverage logger and use logger.error() method. + t.printStackTrace(); + } finally { + // Signal that processing of this request finishes + REQUEST_LATCH.countDown(); + // Once the request is executed, we release 1 permit. + // By doing so we allow caller thread to submit another async request. + SEMAPHORE.release(); + } + }); + } + // Await for execution of TOTAL_NUMBER_OF_INSERTS + REQUEST_LATCH.await(); + + System.out.println( + String.format( + "Finished executing %s queries with a concurrency level of %s.", + insertsCounter.get(), CONCURRENCY_LEVEL)); + // Shutdown executor to free resources + executor.shutdown(); + executor.awaitTermination(10, TimeUnit.SECONDS); + } + + private static void createSchema(CqlSession session) { + session.execute( + "CREATE KEYSPACE IF NOT EXISTS examples " + + "WITH replication = {'class': 'SimpleStrategy', 'replication_factor': 1}"); + + session.execute( + "CREATE TABLE IF NOT EXISTS examples.tbl_sample_kv (id uuid, value int, PRIMARY KEY (id))"); + } +} diff --git a/examples/src/main/java/com/datastax/oss/driver/examples/concurrent/LimitConcurrencyCustomAsync.java b/examples/src/main/java/com/datastax/oss/driver/examples/concurrent/LimitConcurrencyCustomAsync.java new file mode 100644 index 00000000000..45287098e5d --- /dev/null +++ b/examples/src/main/java/com/datastax/oss/driver/examples/concurrent/LimitConcurrencyCustomAsync.java @@ -0,0 +1,191 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.examples.concurrent; + +import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.bindMarker; +import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.insertInto; + +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.CqlSessionBuilder; +import com.datastax.oss.driver.api.core.cql.AsyncResultSet; +import com.datastax.oss.driver.api.core.cql.PreparedStatement; +import java.util.ArrayList; +import java.util.List; +import java.util.UUID; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.BiConsumer; + +/** + * Creates a keyspace and table, and loads data using an async API. + * + *

This example makes usage of a {@link CqlSession#executeAsync(String)} method, which is + * responsible for executing requests in a non-blocking way. It uses {@link CompletableFuture} to + * limit number of concurrent request to {@code CONCURRENCY_LEVEL}. + * + *

Preconditions: + * + *

    + *
  • An Apache Cassandra(R) cluster is running and accessible through the contact points + * identified by basic.contact-points (see application.conf). + *
+ * + *

Side effects: + * + *

    + *
  • creates a new keyspace "examples" in the session. If a keyspace with this name already + * exists, it will be reused; + *
  • creates a table "examples.tbl_sample_kv". If it exist already, it will be reused; + *
  • inserts a TOTAL_NUMBER_OF_INSERTS of rows into the table. + *
+ * + * @see Java driver online manual + */ +public class LimitConcurrencyCustomAsync { + private static final int CONCURRENCY_LEVEL = 32; + private static final int TOTAL_NUMBER_OF_INSERTS = 10_000; + // Used to track number of total inserts + private static final AtomicInteger INSERTS_COUNTER = new AtomicInteger(); + + public static void main(String[] args) throws InterruptedException, ExecutionException { + + try (CqlSession session = new CqlSessionBuilder().build()) { + createSchema(session); + insertConcurrent(session); + } + } + + private static void insertConcurrent(CqlSession session) + throws InterruptedException, ExecutionException { + PreparedStatement pst = + session.prepare( + insertInto("examples", "tbl_sample_kv") + .value("id", bindMarker("id")) + .value("value", bindMarker("value")) + .build()); + + // Construct CONCURRENCY_LEVEL number of ranges. + // Each range will be executed independently. + List ranges = createRanges(CONCURRENCY_LEVEL, TOTAL_NUMBER_OF_INSERTS); + + // List of pending CONCURRENCY_LEVEL features that we will wait for at the end of the program. + List> pending = new ArrayList<>(); + + // Every range will have dedicated CompletableFuture handling the execution. + for (Range range : ranges) { + pending.add(executeOneAtATime(session, pst, range)); + } + + // Wait for completion of all CONCURRENCY_LEVEL pending CompletableFeatures + CompletableFuture.allOf(pending.toArray(new CompletableFuture[0])).get(); + + System.out.println( + String.format( + "Finished executing %s queries with a concurrency level of %s.", + INSERTS_COUNTER.get(), CONCURRENCY_LEVEL)); + } + + private static CompletableFuture executeOneAtATime( + CqlSession session, PreparedStatement pst, Range range) { + + CompletableFuture lastFeature = null; + for (int i = range.getFrom(); i < range.getTo(); i++) { + int counter = i; + // If this is a first request init the lastFeature. + if (lastFeature == null) { + lastFeature = executeInsert(session, pst, counter); + } else { + // If lastFeature is already created, chain next async action. + // The next action will execute only after the lastFeature will finish. + // If the lastFeature finishes with failure, the subsequent chained executions + // will not be invoked. If you wish to alter that behaviour and recover from failure + // add the exceptionally() call after whenComplete() of lastFeature. + lastFeature = lastFeature.thenCompose((ignored) -> executeInsert(session, pst, counter)); + } + } + return lastFeature; + } + + private static CompletableFuture executeInsert( + CqlSession session, PreparedStatement pst, int counter) { + + return session + .executeAsync(pst.bind().setUuid("id", UUID.randomUUID()).setInt("value", counter)) + .toCompletableFuture() + .whenComplete( + (BiConsumer) + (asyncResultSet, throwable) -> { + if (throwable == null) { + // When the Feature completes and there is no exception - increment counter. + INSERTS_COUNTER.incrementAndGet(); + } else { + // On production you should leverage logger and use logger.error() method. + throwable.printStackTrace(); + } + }); + } + + private static List createRanges(int concurrencyLevel, int totalNumberOfInserts) { + ArrayList ranges = new ArrayList<>(); + int numberOfElementsInRange = totalNumberOfInserts / concurrencyLevel; + // Create concurrencyLevel number of Ranges. + for (int i = 0; i < concurrencyLevel; i++) { + // If this is a last range give it all remaining elements. + // It may be longer than numberOfElementsInRange in case of + // totalNumberOfInserts / concurrencyLevel will return floating point number. + if (i == concurrencyLevel - 1) { + ranges.add(new Range(i * numberOfElementsInRange, totalNumberOfInserts)); + } else { + // Construct Ranges with numberOfElementsInRange elements. + ranges.add(new Range(i * numberOfElementsInRange, (i + 1) * numberOfElementsInRange)); + } + } + return ranges; + } + + private static void createSchema(CqlSession session) { + session.execute( + "CREATE KEYSPACE IF NOT EXISTS examples " + + "WITH replication = {'class': 'SimpleStrategy', 'replication_factor': 1}"); + + session.execute( + "CREATE TABLE IF NOT EXISTS examples.tbl_sample_kv (id uuid, value int, PRIMARY KEY (id))"); + } + + private static class Range { + private final int from; + private final int to; + + private Range(int from, int to) { + this.from = from; + this.to = to; + } + + public int getFrom() { + return from; + } + + public int getTo() { + return to; + } + + @Override + public String toString() { + return "Range{" + "from=" + from + ", to=" + to + '}'; + } + } +} From fff24ce1b23818cf31d70e4451b254e3f8055935 Mon Sep 17 00:00:00 2001 From: olim7t Date: Thu, 11 Jul 2019 09:55:34 -0700 Subject: [PATCH 006/979] JAVA-2332: Destroy connection pool when a node gets removed --- changelog/README.md | 1 + .../core/metadata/NodeStateEvent.java | 13 ++++ .../internal/core/session/PoolManager.java | 8 ++- .../api/core/session/RemovedNodeIT.java | 68 +++++++++++++++++++ .../driver/api/testinfra/ccm/CcmBridge.java | 4 ++ 5 files changed, 92 insertions(+), 2 deletions(-) create mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/api/core/session/RemovedNodeIT.java diff --git a/changelog/README.md b/changelog/README.md index 3755de324c2..cb8bb96c9db 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.2.0 (in progress) +- [bug] JAVA-2332: Destroy connection pool when a node gets removed - [bug] JAVA-2324: Add support for primitive shorts in mapper - [bug] JAVA-2325: Allow "is" prefix for boolean getters in mapped entities - [improvement] JAVA-2308: Add customWhereClause to `@Delete` diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/NodeStateEvent.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/NodeStateEvent.java index 8a5d9e54f48..6365d65626f 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/NodeStateEvent.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/NodeStateEvent.java @@ -20,6 +20,12 @@ import java.util.Objects; import net.jcip.annotations.Immutable; +/** + * The transition of a node from one {@linkplain NodeState state} to another. + * + *

For simplicity, this is also used to represent a node addition ({@code oldState=null, + * newState=UNKNOWN}) or removal ({@code oldState=newState=null}). + */ @Immutable public class NodeStateEvent { public static NodeStateEvent changed(NodeState oldState, NodeState newState, DefaultNode node) { @@ -36,8 +42,15 @@ public static NodeStateEvent removed(DefaultNode node) { return new NodeStateEvent(null, null, node); } + /** The state before the change, or {@code null} if this is an addition or a removal. */ public final NodeState oldState; + + /** + * The state after the change ({@link NodeState#UNKNOWN} if the node was just added), or {@code + * null} if this is a removal. + */ public final NodeState newState; + public final DefaultNode node; private NodeStateEvent(NodeState oldState, NodeState newState, DefaultNode node) { diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/session/PoolManager.java b/core/src/main/java/com/datastax/oss/driver/internal/core/session/PoolManager.java index 610669e965b..b43c0c8c448 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/session/PoolManager.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/session/PoolManager.java @@ -314,10 +314,14 @@ private void processStateEvent(NodeStateEvent event) { NodeState newState = event.newState; if (pending.containsKey(node)) { pendingStateEvents.put(node, event); - } else if (newState == NodeState.FORCED_DOWN) { + } else if (newState == null || newState == NodeState.FORCED_DOWN) { ChannelPool pool = pools.remove(node); if (pool != null) { - LOG.debug("[{}] {} was FORCED_DOWN, destroying pool", logPrefix, node); + LOG.debug( + "[{}] {} was {}, destroying pool", + logPrefix, + node, + newState == null ? "removed" : newState.name()); pool.closeAsync() .exceptionally( error -> { diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/session/RemovedNodeIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/api/core/session/RemovedNodeIT.java new file mode 100644 index 00000000000..4c3bb0b7fef --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/oss/driver/api/core/session/RemovedNodeIT.java @@ -0,0 +1,68 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.core.session; + +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.metadata.NodeStateListener; +import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.utils.ConditionChecker; +import com.datastax.oss.driver.internal.core.pool.ChannelPool; +import com.datastax.oss.driver.internal.core.session.DefaultSession; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Map; +import org.junit.ClassRule; +import org.junit.Test; + +public class RemovedNodeIT { + + @ClassRule public static CustomCcmRule ccmRule = CustomCcmRule.builder().withNodes(2).build(); + + @Test + public void should_signal_and_destroy_pool_when_node_gets_removed() { + RemovalListener removalListener = new RemovalListener(); + try (CqlSession session = CqlSession.builder().withNodeStateListener(removalListener).build()) { + ccmRule.getCcmBridge().nodetool(2, "decommission"); + ConditionChecker.checkThat(() -> removalListener.removedNode != null).becomesTrue(); + + Map pools = ((DefaultSession) session).getPools(); + ConditionChecker.checkThat(() -> pools.containsKey(removalListener.removedNode)) + .becomesFalse(); + } + } + + static class RemovalListener implements NodeStateListener { + + volatile Node removedNode; + + @Override + public void onRemove(@NonNull Node node) { + removedNode = node; + } + + @Override + public void onAdd(@NonNull Node node) {} + + @Override + public void onUp(@NonNull Node node) {} + + @Override + public void onDown(@NonNull Node node) {} + + @Override + public void close() throws Exception {} + } +} diff --git a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CcmBridge.java b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CcmBridge.java index 54783a3d664..f10a2cdeba3 100644 --- a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CcmBridge.java +++ b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CcmBridge.java @@ -228,6 +228,10 @@ public void create() { } } + public void nodetool(int node, String... args) { + execute(String.format("node%d nodetool %s", node, Joiner.on(" ").join(args))); + } + public void dsetool(int node, String... args) { execute(String.format("node%d dsetool %s", node, Joiner.on(" ").join(args))); } From 0927315e7abdc7c8c0c7da5b3a1761cda5707192 Mon Sep 17 00:00:00 2001 From: olim7t Date: Thu, 18 Jul 2019 13:40:32 -0700 Subject: [PATCH 007/979] Improve javadocs of Session.getName() --- .../datastax/oss/driver/api/core/session/Session.java | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/session/Session.java b/core/src/main/java/com/datastax/oss/driver/api/core/session/Session.java index 65c49988f0c..498cea42935 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/session/Session.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/session/Session.java @@ -64,9 +64,15 @@ public interface Session extends AsyncAutoCloseable { Session.class.getResource("/com/datastax/oss/driver/Driver.properties")); /** - * The unique name identifying this client. + * The unique name identifying this session instance. This is used as a prefix for log messages + * and metrics. * - * @see DefaultDriverOption#SESSION_NAME + *

This gets populated from the option {@code basic.session-name} in the configuration. If that + * option is absent, the driver will generate an identifier composed of the letter 's' followed by + * an incrementing counter. + * + *

Note that this is purely a client-side identifier; in particular, it has no relation with + * {@code system.local.cluster_name} on the server. */ @NonNull String getName(); From 434c93fb6c6edd4dacdd37be9d02fef05be3e4be Mon Sep 17 00:00:00 2001 From: olim7t Date: Sun, 7 Jul 2019 16:06:06 -0700 Subject: [PATCH 008/979] JAVA-2320: Expose more attributes on mapper Select for individual query clauses --- changelog/README.md | 1 + .../driver/mapper/SelectOtherClausesIT.java | 289 ++++++++++++++++++ manual/mapper/daos/select/README.md | 33 +- .../processor/dao/DaoMethodGenerator.java | 67 ++-- .../dao/DaoSelectMethodGenerator.java | 142 +++++++-- .../dao/DaoInsertMethodGeneratorTest.java | 4 +- .../dao/DaoSelectMethodGeneratorTest.java | 35 --- .../driver/api/mapper/annotations/Select.java | 65 +++- 8 files changed, 519 insertions(+), 117 deletions(-) create mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/mapper/SelectOtherClausesIT.java diff --git a/changelog/README.md b/changelog/README.md index cb8bb96c9db..e52857b5d8b 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.2.0 (in progress) +- [improvement] JAVA-2320: Expose more attributes on mapper Select for individual query clauses - [bug] JAVA-2332: Destroy connection pool when a node gets removed - [bug] JAVA-2324: Add support for primitive shorts in mapper - [bug] JAVA-2325: Allow "is" prefix for boolean getters in mapped entities diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SelectOtherClausesIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SelectOtherClausesIT.java new file mode 100644 index 00000000000..2bbcf3bef98 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SelectOtherClausesIT.java @@ -0,0 +1,289 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.mapper; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.PagingIterable; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.mapper.MapperBuilder; +import com.datastax.oss.driver.api.mapper.annotations.ClusteringColumn; +import com.datastax.oss.driver.api.mapper.annotations.Computed; +import com.datastax.oss.driver.api.mapper.annotations.CqlName; +import com.datastax.oss.driver.api.mapper.annotations.Dao; +import com.datastax.oss.driver.api.mapper.annotations.DaoFactory; +import com.datastax.oss.driver.api.mapper.annotations.DaoKeyspace; +import com.datastax.oss.driver.api.mapper.annotations.Entity; +import com.datastax.oss.driver.api.mapper.annotations.Insert; +import com.datastax.oss.driver.api.mapper.annotations.Mapper; +import com.datastax.oss.driver.api.mapper.annotations.PartitionKey; +import com.datastax.oss.driver.api.mapper.annotations.Select; +import com.datastax.oss.driver.api.testinfra.CassandraRequirement; +import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import com.datastax.oss.driver.categories.ParallelizableTests; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +@Category(ParallelizableTests.class) +@CassandraRequirement(min = "3.6", description = "Uses PER PARTITION LIMIT") +public class SelectOtherClausesIT { + + private static CcmRule ccm = CcmRule.getInstance(); + + private static SessionRule sessionRule = SessionRule.builder(ccm).build(); + + @ClassRule public static TestRule chain = RuleChain.outerRule(ccm).around(sessionRule); + + private static SimpleDao dao; + + @BeforeClass + public static void setup() { + CqlSession session = sessionRule.session(); + + for (String query : + ImmutableList.of("CREATE TABLE simple (k int, cc int, v int, PRIMARY KEY (k, cc))")) { + session.execute( + SimpleStatement.builder(query).setExecutionProfile(sessionRule.slowProfile()).build()); + } + + TestMapper mapper = TestMapper.builder(session).build(); + dao = mapper.simpleDao(sessionRule.keyspace()); + + for (int k = 0; k < 2; k++) { + for (int cc = 0; cc < 10; cc++) { + dao.insert(new Simple(k, cc, 1)); + } + } + } + + @Test + public void should_select_with_limit() { + PagingIterable elements = dao.selectWithLimit(10); + assertThat(elements.isFullyFetched()).isTrue(); + assertThat(elements.getAvailableWithoutFetching()).isEqualTo(10); + + elements = dao.selectWithLimit(0, 5); + assertThat(elements.isFullyFetched()).isTrue(); + assertThat(elements.getAvailableWithoutFetching()).isEqualTo(5); + + elements = dao.selectWithLimit(0, 0, 1); + assertThat(elements.isFullyFetched()).isTrue(); + assertThat(elements.getAvailableWithoutFetching()).isEqualTo(1); + } + + @Test + public void should_select_with_per_partition_limit() { + PagingIterable elements = dao.selectWithPerPartitionLimit(5); + assertThat(elements.isFullyFetched()).isTrue(); + assertThat(elements.getAvailableWithoutFetching()).isEqualTo(10); + + Map elementCountPerPartition = new HashMap<>(); + for (Simple element : elements) { + elementCountPerPartition.compute(element.getK(), (k, v) -> (v == null) ? 1 : v + 1); + } + assertThat(elementCountPerPartition).hasSize(2).containsEntry(0, 5).containsEntry(1, 5); + } + + @Test + public void should_select_with_order_by() { + PagingIterable elements = dao.selectByCcDesc(0); + int previousCc = Integer.MAX_VALUE; + for (Simple element : elements) { + assertThat(element.getCc()).isLessThan(previousCc); + previousCc = element.getCc(); + } + } + + @Test + public void should_select_with_group_by() { + PagingIterable sums = dao.selectSumByK(); + assertThat(sums.all()).hasSize(2).containsOnly(new Sum(0, 10), new Sum(1, 10)); + } + + @Test + public void should_select_with_allow_filtering() { + PagingIterable elements = dao.selectByCc(1); + assertThat(elements.all()).hasSize(2).containsOnly(new Simple(0, 1, 1), new Simple(1, 1, 1)); + } + + @Mapper + public interface TestMapper { + @DaoFactory + SimpleDao simpleDao(@DaoKeyspace CqlIdentifier keyspace); + + static MapperBuilder builder(CqlSession session) { + return new SelectOtherClausesIT_TestMapperBuilder(session); + } + } + + @Dao + public interface SimpleDao { + @Insert + void insert(Simple simple); + + @Select(limit = ":l") + PagingIterable selectWithLimit(@CqlName("l") int l); + + @Select(limit = ":l") + PagingIterable selectWithLimit(int k, @CqlName("l") int l); + + /** + * Contrived since the query will return at most a single row, but this is just to check that + * {@code l} doesn't need an explicit name when the full primary key is provided. + */ + @Select(limit = ":l") + PagingIterable selectWithLimit(int k, int cc, int l); + + @Select(perPartitionLimit = ":perPartitionLimit") + PagingIterable selectWithPerPartitionLimit( + @CqlName("perPartitionLimit") int perPartitionLimit); + + @Select(orderBy = "cc DESC") + PagingIterable selectByCcDesc(int k); + + @Select(groupBy = "k") + PagingIterable selectSumByK(); + + @Select(customWhereClause = "cc = :cc", allowFiltering = true) + PagingIterable selectByCc(int cc); + } + + @Entity + public static class Simple { + @PartitionKey private int k; + @ClusteringColumn private int cc; + private int v; + + public Simple() {} + + public Simple(int k, int cc, int v) { + this.k = k; + this.cc = cc; + this.v = v; + } + + public int getK() { + return k; + } + + public void setK(int k) { + this.k = k; + } + + public int getCc() { + return cc; + } + + public void setCc(int cc) { + this.cc = cc; + } + + public int getV() { + return v; + } + + public void setV(int v) { + this.v = v; + } + + @Override + public boolean equals(Object other) { + if (other == this) { + return true; + } else if (other instanceof Simple) { + Simple that = (Simple) other; + return this.k == that.k && this.cc == that.cc && this.v == that.v; + } else { + return false; + } + } + + @Override + public int hashCode() { + return Objects.hash(k, cc, v); + } + + @Override + public String toString() { + return String.format("Simple(%d, %d, %d)", k, cc, v); + } + } + + @Entity + @CqlName("simple") + public static class Sum { + private int k; + + @Computed("sum(v)") + private int value; + + public Sum() {} + + public Sum(int k, int value) { + this.k = k; + this.value = value; + } + + public int getK() { + return k; + } + + public void setK(int k) { + this.k = k; + } + + public int getValue() { + return value; + } + + public void setValue(int value) { + this.value = value; + } + + @Override + public boolean equals(Object other) { + if (other == this) { + return true; + } else if (other instanceof Sum) { + Sum that = (Sum) other; + return this.k == that.k && this.value == that.value; + } else { + return false; + } + } + + @Override + public int hashCode() { + return Objects.hash(k, value); + } + + @Override + public String toString() { + return String.format("Sum(%d, %d)", k, value); + } + } +} diff --git a/manual/mapper/daos/select/README.md b/manual/mapper/daos/select/README.md index a5024856838..aea98135b6a 100644 --- a/manual/mapper/daos/select/README.md +++ b/manual/mapper/daos/select/README.md @@ -13,11 +13,11 @@ public interface ProductDao { ### Parameters -If the annotation doesn't have a `customWhereClause`, the mapper defaults to a selection by primary -key (partition key + clustering columns). The method's parameters must match the types of the -[primary key columns](../../entities/#primary-key-columns), in the exact order (as defined by the -[@PartitionKey] and [@ClusteringColumn] annotations). The parameter names don't necessarily need to -match the names of the columns. +If the annotation doesn't have a [customWhereClause()], the mapper defaults to a selection by +primary key (partition key + clustering columns). The method's parameters must match the types of +the [primary key columns](../../entities/#primary-key-columns), in the exact order (as defined by +the [@PartitionKey] and [@ClusteringColumn] annotations). The parameter names don't necessarily need +to match the names of the columns. To select more than one entity within a partition, a subset of primary key components may be specified as long as enough parameters are provided to account for the partition key. @@ -51,7 +51,7 @@ public interface ProductDao { } ``` -If the annotation has a `customWhereClause`, it completely replaces the WHERE clause. The provided +If the annotation has a [customWhereClause()], it completely replaces the WHERE clause. The provided string can contain named placeholders. In that case, the method must have a corresponding parameter for each, with the same name and a compatible Java type. @@ -60,6 +60,21 @@ for each, with the same name and a compatible Java type. PagingIterable findByDescription(String searchString); ``` +The generated SELECT query can be further customized with [limit()], [perPartitionLimit()], +[orderBy()], [groupBy()] and [allowFiltering()]. Some of these clauses can also contain placeholders +whose values will be provided through additional method parameters. Note that it is sometimes not +possible to determine if a parameter is a primary key component or a placeholder value; therefore +the rule is that **if your method takes a partial primary key, the first parameter that is not a +primary key component must be explicitly annotated with +[@CqlName](../../entities/#user-provided-names)**. For example if the primary key is `((day int, +hour int, minute int), ts timestamp)`: + +```java +// Annotate 'l' so that it's not mistaken for the second PK component +@Select(limit = ":l") +PagingIterable findDailySales(int day, @CqlName("l") int l); +``` + A `Function` or `UnaryOperator` can be added as the **last** parameter. It will be applied to the statement before execution. This allows you to customize certain aspects of the request (page size, timeout, etc) at runtime. See @@ -122,6 +137,12 @@ entity class and the [naming strategy](../../entities/#naming-strategy)). [@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html [@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html [@Select]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/Select.html +[allowFiltering()]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/Select.html#allowFiltering-- +[customWhereClause()]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/Select.html#customWhereClause-- +[groupBy()]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/Select.html#groupBy-- +[limit()]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/Select.html#limit-- +[orderBy()]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/Select.html#orderBy-- +[perPartitionLimit()]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/Select.html#perPartitionLimit-- [MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html [PagingIterable]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/PagingIterable.html diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoMethodGenerator.java index 1c1ed756366..3ef07d67d0c 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoMethodGenerator.java @@ -79,43 +79,22 @@ protected DaoReturnType parseAndValidateReturnType( } protected void maybeAddTtl(String ttl, MethodSpec.Builder methodBuilder) { - if (!ttl.isEmpty()) { - if (ttl.startsWith(":")) { - String bindMarkerName = ttl.substring(1); - try { - CqlIdentifier.fromCql(bindMarkerName); - } catch (IllegalArgumentException ignored) { - context - .getMessager() - .warn( - methodElement, - "Invalid ttl value: " - + "'%s' is not a valid placeholder, the generated query will probably fail", - ttl); - } - methodBuilder.addCode(".usingTtl($T.bindMarker($S))", QueryBuilder.class, bindMarkerName); - } else { - try { - Integer.parseInt(ttl); - } catch (NumberFormatException ignored) { - context - .getMessager() - .warn( - methodElement, - "Invalid ttl value: " - + "'%s' is not a bind marker name and can't be parsed as a literal integer " - + "either, the generated query will probably fail", - ttl); - } - methodBuilder.addCode(".usingTtl($L)", ttl); - } - } + maybeAddSimpleClause(ttl, Integer::parseInt, "usingTtl", "ttl", methodBuilder); } protected void maybeAddTimestamp(String timestamp, MethodSpec.Builder methodBuilder) { - if (!timestamp.isEmpty()) { - if (timestamp.startsWith(":")) { - String bindMarkerName = timestamp.substring(1); + maybeAddSimpleClause(timestamp, Long::parseLong, "usingTimestamp", "timestamp", methodBuilder); + } + + protected void maybeAddSimpleClause( + String annotationValue, + Function numberParser, + String dslMethodName, + String valueDescription, + MethodSpec.Builder methodBuilder) { + if (!annotationValue.isEmpty()) { + if (annotationValue.startsWith(":")) { + String bindMarkerName = annotationValue.substring(1); try { CqlIdentifier.fromCql(bindMarkerName); } catch (IllegalArgumentException ignored) { @@ -123,26 +102,30 @@ protected void maybeAddTimestamp(String timestamp, MethodSpec.Builder methodBuil .getMessager() .warn( methodElement, - "Invalid timestamp value: " + "Invalid " + + valueDescription + + " value: " + "'%s' is not a valid placeholder, the generated query will probably fail", - timestamp); + annotationValue); } methodBuilder.addCode( - ".usingTimestamp($T.bindMarker($S))", QueryBuilder.class, bindMarkerName); + ".$L($T.bindMarker($S))", dslMethodName, QueryBuilder.class, bindMarkerName); } else { try { - Long.parseLong(timestamp); + Number ignored = numberParser.apply(annotationValue); } catch (NumberFormatException ignored) { context .getMessager() .warn( methodElement, - "Invalid timestamp value: " - + "'%s' is not a bind marker name and can't be parsed as a literal long " + "Invalid " + + valueDescription + + " value: " + + "'%s' is not a bind marker name and can't be parsed as a number literal " + "either, the generated query will probably fail", - timestamp); + annotationValue); } - methodBuilder.addCode(".usingTimestamp($L)", timestamp); + methodBuilder.addCode(".$L($L)", dslMethodName, annotationValue); } } } diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGenerator.java index 6eac11af07e..b721c5b4936 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGenerator.java @@ -25,15 +25,19 @@ import com.datastax.oss.driver.api.core.cql.BoundStatement; import com.datastax.oss.driver.api.core.cql.BoundStatementBuilder; import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.core.metadata.schema.ClusteringOrder; +import com.datastax.oss.driver.api.mapper.annotations.CqlName; import com.datastax.oss.driver.api.mapper.annotations.Select; import com.datastax.oss.driver.internal.mapper.processor.ProcessorContext; import com.datastax.oss.driver.internal.mapper.processor.entity.EntityDefinition; import com.datastax.oss.driver.internal.mapper.processor.entity.PropertyDefinition; import com.datastax.oss.driver.internal.mapper.processor.util.generation.GeneratedCodePatterns; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; +import com.google.common.base.Splitter; import com.squareup.javapoet.ClassName; import com.squareup.javapoet.CodeBlock; import com.squareup.javapoet.MethodSpec; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Optional; @@ -77,8 +81,10 @@ public Optional generate() { EntityDefinition entityDefinition = context.getEntityFactory().getDefinition(entityElement); // Validate the parameters: - // - if there is a custom clause, they are free-form (they'll be used as bind variables) - // - otherwise, they must be an exact match for the entity's primary key + // - if there is a custom clause, they are free-form (they'll be used as bind variables) + // - otherwise, we accept the primary key components or a subset thereof (possibly empty to + // select all rows), followed by free-form parameters bound to the secondary clauses (such as + // LIMIT). // In either case, a Function can be added in last // position. List parameters = methodElement.getParameters(); @@ -87,32 +93,61 @@ public Optional generate() { if (boundStatementFunction != null) { parameters = parameters.subList(0, parameters.size() - 1); } + + final List primaryKeyParameters; + final List freeFormParameters; Select selectAnnotation = methodElement.getAnnotation(Select.class); assert selectAnnotation != null; // otherwise we wouldn't have gotten into this class String customClause = selectAnnotation.customWhereClause(); - // select without where criteria is ok. - // if parameters are provided, we must have at least enough to match partition key. - if (customClause.isEmpty() - && !parameters.isEmpty() + if (parameters.isEmpty()) { + primaryKeyParameters = freeFormParameters = Collections.emptyList(); + } else if (customClause.isEmpty()) { + // If we have a partial primary key *and* free-form parameters, things get ambiguous because + // we don't know where the primary key ends. By convention, we require the first free-form + // parameter to be annotated with @CqlName in those cases. + // So the boundary is either when we have enough parameters for a full primary key, or when we + // encounter the first annotated parameter. + int firstNamedParameter = parameters.size(); + for (int i = 0; i < parameters.size(); i++) { + if (parameters.get(i).getAnnotation(CqlName.class) != null) { + firstNamedParameter = i; + break; + } + } + int primaryKeyEnd = Math.min(firstNamedParameter, entityDefinition.getPrimaryKey().size()); + if (primaryKeyEnd >= parameters.size()) { + primaryKeyParameters = parameters; + freeFormParameters = Collections.emptyList(); + } else { + primaryKeyParameters = parameters.subList(0, primaryKeyEnd); + freeFormParameters = parameters.subList(primaryKeyEnd, parameters.size()); + } + } else { + primaryKeyParameters = Collections.emptyList(); + freeFormParameters = parameters; + } + + // If we have parameters for some primary key components, validate that the types match: + if (!primaryKeyParameters.isEmpty() && !EntityUtils.areParametersValid( context, methodElement, entityElement, entityDefinition, - parameters, + primaryKeyParameters, Select.class, "don't use a custom clause")) { return Optional.empty(); } // Generate the method: - final int parameterSize = parameters.size(); String helperFieldName = enclosingClass.addEntityHelperField(ClassName.get(entityElement)); String statementName = enclosingClass.addPreparedStatement( methodElement, (methodBuilder, requestName) -> - generateSelectRequest(methodBuilder, requestName, helperFieldName, parameterSize)); + generateSelectRequest( + methodBuilder, requestName, helperFieldName, primaryKeyParameters.size())); CodeBlock.Builder methodBodyBuilder = CodeBlock.builder(); @@ -122,25 +157,26 @@ public Optional generate() { statementName); populateBuilderWithStatementAttributes(methodBodyBuilder, methodElement); populateBuilderWithFunction(methodBodyBuilder, boundStatementFunction); - if (parameters.size() > 0) { - if (customClause.isEmpty()) { - // Parameters are the PK components, we allow them to be named differently - List primaryKeyNames = - entityDefinition.getPrimaryKey().stream() - .map(PropertyDefinition::getCqlName) - .collect(Collectors.toList()); - warnIfCqlNamePresent(parameters); + + if (!primaryKeyParameters.isEmpty()) { + List primaryKeyNames = + entityDefinition.getPrimaryKey().stream() + .map(PropertyDefinition::getCqlName) + .collect(Collectors.toList()) + .subList(0, primaryKeyParameters.size()); + GeneratedCodePatterns.bindParameters( + primaryKeyParameters, primaryKeyNames, methodBodyBuilder, enclosingClass, context, false); + } + + if (!freeFormParameters.isEmpty()) { + if (validateCqlNamesPresent(freeFormParameters)) { GeneratedCodePatterns.bindParameters( - parameters, primaryKeyNames, methodBodyBuilder, enclosingClass, context, false); + freeFormParameters, methodBodyBuilder, enclosingClass, context, false); } else { - if (validateCqlNamesPresent(parameters)) { - GeneratedCodePatterns.bindParameters( - parameters, methodBodyBuilder, enclosingClass, context, false); - } else { - return Optional.empty(); - } + return Optional.empty(); } } + methodBodyBuilder .add("\n") .addStatement("$T boundStatement = boundStatementBuilder.build()", BoundStatement.class); @@ -157,22 +193,66 @@ private void generateSelectRequest( MethodSpec.Builder methodBuilder, String requestName, String helperFieldName, - int parameterSize) { - String customWhereClause = methodElement.getAnnotation(Select.class).customWhereClause(); + int numberOfPrimaryKeyPartsInWhereClause) { + Select annotation = methodElement.getAnnotation(Select.class); + String customWhereClause = annotation.customWhereClause(); if (customWhereClause.isEmpty()) { - methodBuilder.addStatement( - "$T $L = $L.selectByPrimaryKeyParts($L).build()", + methodBuilder.addCode( + "$[$T $L = $L.selectByPrimaryKeyParts($L)", SimpleStatement.class, requestName, helperFieldName, - parameterSize); + numberOfPrimaryKeyPartsInWhereClause); } else { - methodBuilder.addStatement( - "$T $L = $L.selectStart().whereRaw($S).build()", + methodBuilder.addCode( + "$[$T $L = $L.selectStart().whereRaw($S)", SimpleStatement.class, requestName, helperFieldName, customWhereClause); } + maybeAddSimpleClause(annotation.limit(), Integer::parseInt, "limit", "limit", methodBuilder); + maybeAddSimpleClause( + annotation.perPartitionLimit(), + Integer::parseInt, + "perPartitionLimit", + "perPartitionLimit", + methodBuilder); + for (String orderingSpec : annotation.orderBy()) { + addOrdering(orderingSpec, methodBuilder); + } + for (String groupByColumn : annotation.groupBy()) { + methodBuilder.addCode(".groupBy($S)", groupByColumn); + } + if (annotation.allowFiltering()) { + methodBuilder.addCode(".allowFiltering()"); + } + methodBuilder.addCode(".build();$]\n"); } + + private void addOrdering(String orderingSpec, MethodSpec.Builder methodBuilder) { + List tokens = ON_SPACES.splitToList(orderingSpec); + ClusteringOrder clusteringOrder; + if (tokens.size() != 2 || (clusteringOrder = parseClusteringOrder(tokens.get(1))) == null) { + context + .getMessager() + .error( + methodElement, + "Can't parse ordering '%s', expected a column name followed by ASC or DESC", + orderingSpec); + return; + } + methodBuilder.addCode( + ".orderBy($S, $T.$L)", tokens.get(0), ClusteringOrder.class, clusteringOrder); + } + + private ClusteringOrder parseClusteringOrder(String spec) { + try { + return ClusteringOrder.valueOf(spec.toUpperCase()); + } catch (IllegalArgumentException e) { + return null; + } + } + + private static final Splitter ON_SPACES = Splitter.on(' ').omitEmptyStrings(); } diff --git a/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoInsertMethodGeneratorTest.java b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoInsertMethodGeneratorTest.java index b118dd626ee..5f986c335fa 100644 --- a/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoInsertMethodGeneratorTest.java +++ b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoInsertMethodGeneratorTest.java @@ -90,7 +90,7 @@ public static Object[][] warningSignatures() { }, { "Invalid ttl value: " - + "'foo' is not a bind marker name and can't be parsed as a literal integer either, " + + "'foo' is not a bind marker name and can't be parsed as a number literal either, " + "the generated query will probably fail", MethodSpec.methodBuilder("insert") .addAnnotation( @@ -113,7 +113,7 @@ public static Object[][] warningSignatures() { }, { "Invalid timestamp value: " - + "'foo' is not a bind marker name and can't be parsed as a literal long either, " + + "'foo' is not a bind marker name and can't be parsed as a number literal either, " + "the generated query will probably fail", MethodSpec.methodBuilder("insert") .addAnnotation( diff --git a/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGeneratorTest.java b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGeneratorTest.java index 87b829cac50..0cabc78d435 100644 --- a/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGeneratorTest.java +++ b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGeneratorTest.java @@ -15,16 +15,12 @@ */ package com.datastax.oss.driver.internal.mapper.processor.dao; -import com.datastax.oss.driver.api.mapper.annotations.CqlName; import com.datastax.oss.driver.api.mapper.annotations.Select; -import com.squareup.javapoet.AnnotationSpec; import com.squareup.javapoet.MethodSpec; -import com.squareup.javapoet.ParameterSpec; import com.squareup.javapoet.ParameterizedTypeName; import com.tngtech.java.junit.dataprovider.DataProvider; import com.tngtech.java.junit.dataprovider.DataProviderRunner; import com.tngtech.java.junit.dataprovider.UseDataProvider; -import java.util.UUID; import java.util.concurrent.CompletionStage; import javax.lang.model.element.Modifier; import org.junit.Test; @@ -74,37 +70,6 @@ public static Object[][] invalidSignatures() { .returns(ENTITY_CLASS_NAME) .build(), }, - { - "Select methods that don't use a custom clause must match the primary key components " - + "in the exact order (expected primary key of Product: [java.util.UUID]). Too many " - + "parameters provided", - MethodSpec.methodBuilder("select") - .addAnnotation(Select.class) - .addModifiers(Modifier.PUBLIC, Modifier.ABSTRACT) - .addParameter(UUID.class, "id") - .addParameter(String.class, "extra") - .returns(ENTITY_CLASS_NAME) - .build(), - }, }; } - - @Test - public void should_warn_when_non_bind_marker_has_cql_name() { - should_succeed_with_expected_warning( - "Method select(java.util.UUID): parameter id does not refer " - + "to a bind marker, @CqlName annotation will be ignored", - MethodSpec.methodBuilder("select") - .addAnnotation(Select.class) - .addModifiers(Modifier.PUBLIC, Modifier.ABSTRACT) - .addParameter( - ParameterSpec.builder(UUID.class, "id") - .addAnnotation( - AnnotationSpec.builder(CqlName.class) - .addMember("value", "$S", "irrelevant") - .build()) - .build()) - .returns(ENTITY_CLASS_NAME) - .build()); - } } diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Select.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Select.java index 6238e1e3d5f..ef62c55e48d 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Select.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Select.java @@ -50,7 +50,9 @@ * (partition key + clustering columns). The method's parameters must match the types of the primary * key columns, in the exact order (which is defined by the integer values of the {@link * PartitionKey} and {@link ClusteringColumn} annotations in the entity class). The parameter names - * don't necessarily need to match the names of the columns. + * don't necessarily need to match the names of the columns. It is also possible for the method to + * only take a partial primary key (the first n columns), in which case it will return + * multiple entities. * *

If {@link #customWhereClause()} is not empty, it completely replaces the WHERE clause. The * provided string can contain named placeholders. In that case, the method must have a @@ -61,6 +63,21 @@ * PagingIterable<Product> findByDescription(String searchString); * * + * The generated SELECT query can be further customized with {@link #limit()}, {@link + * #perPartitionLimit()}, {@link #orderBy()}, {@link #groupBy()} and {@link #allowFiltering()}. Some + * of these clauses can also contain placeholders whose values will be provided through additional + * method parameters. Note that it is sometimes not possible to determine if a parameter is a + * primary key component or a placeholder value; therefore the rule is that if your method takes + * a partial primary key, the first parameter that is not a primary key component must be explicitly + * annotated with {@link CqlName}. For example if the primary key is {@code ((day int, hour int, + * minute int), ts timestamp)}: + * + *

+ * // Annotate 'l' so that it's not mistaken for the second PK component
+ * @Select(limit = ":l")
+ * PagingIterable<Sale> findDailySales(int day, @CqlName("l") int l);
+ * 
+ * *

A {@link Function Function<BoundStatementBuilder, BoundStatementBuilder>} or {@link * UnaryOperator UnaryOperator<BoundStatementBuilder>} can be added as the last * parameter. It will be applied to the statement before execution. This allows you to customize @@ -130,4 +147,50 @@ * the top-level javadocs of this class for more explanations. */ String customWhereClause() default ""; + + /** + * The LIMIT to use in the SELECT query. + * + *

If this starts with ":", it is interpreted as a named placeholder (that must have a + * corresponding parameter in the method signature). Otherwise, it must be a literal integer + * value. + * + *

If the placeholder name is invalid or the literal can't be parsed as an integer (according + * to the rules of {@link Integer#parseInt(String)}), the mapper will issue a compile-time + * warning. + */ + String limit() default ""; + + /** + * The PER PARTITION LIMIT to use in the SELECT query. + * + *

If this starts with ":", it is interpreted as a named placeholder (that must have a + * corresponding parameter in the method signature). Otherwise, it must be a literal integer + * value. + * + *

If the placeholder name is invalid or the literal can't be parsed as an integer (according + * to the rules of {@link Integer#parseInt(String)}), the mapper will issue a compile-time + * warning. + */ + String perPartitionLimit() default ""; + + /** + * A list of orderings to add to an ORDER BY clause in the SELECT query. + * + *

Each element must be a column name followed by a space and the word "ASC" or "DESC". If + * there are multiple columns, pass an array: + * + *

+   * @Select(orderBy = {"hour DESC", "minute DESC"})
+   * 
+ * + *

If an element can't be parsed, the mapper will issue a compile-time error. + */ + String[] orderBy() default {}; + + /** A list of column names to be added to a GROUP BY clause in the SELECT query. */ + String[] groupBy() default {}; + + /** Whether to add an ALLOW FILTERING clause to the SELECT query. */ + boolean allowFiltering() default false; } From b4aa02da3282198f21e9b32f82be2ff0d35e69fd Mon Sep 17 00:00:00 2001 From: olim7t Date: Wed, 3 Jul 2019 10:19:31 -0700 Subject: [PATCH 009/979] JAVA-2306: Clear security tokens from memory immediately after use --- changelog/README.md | 1 + .../oss/driver/api/core/auth/Authenticator.java | 12 ++++++++++-- .../oss/driver/api/core/auth/SyncAuthenticator.java | 12 ++++++++++-- .../core/auth/PlainTextAuthProviderBase.java | 2 +- pom.xml | 2 +- 5 files changed, 23 insertions(+), 6 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index e52857b5d8b..83fb5628215 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.2.0 (in progress) +- [improvement] JAVA-2306: Clear security tokens from memory immediately after use - [improvement] JAVA-2320: Expose more attributes on mapper Select for individual query clauses - [bug] JAVA-2332: Destroy connection pool when a node gets removed - [bug] JAVA-2324: Add support for primitive shorts in mapper diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/auth/Authenticator.java b/core/src/main/java/com/datastax/oss/driver/api/core/auth/Authenticator.java index dd92762577e..4f55545731a 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/auth/Authenticator.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/auth/Authenticator.java @@ -57,7 +57,11 @@ public interface Authenticator { * Obtain an initial response token for initializing the SASL handshake. * * @return a completion stage that will complete with the initial response to send to the server - * (which may be {@code null}). + * (which may be {@code null}). Note that, if the returned byte buffer is writable, the driver + * will clear its contents immediately after use (to avoid keeping sensitive + * information in memory); do not reuse the same buffer across multiple invocations. + * Alternatively, if the contents are not sensitive, you can make the buffer {@linkplain + * ByteBuffer#asReadOnlyBuffer() read-only} and safely reuse it. */ @NonNull CompletionStage initialResponse(); @@ -68,7 +72,11 @@ public interface Authenticator { * * @param challenge the server's SASL challenge. * @return a completion stage that will complete with the updated SASL token (which may be null to - * indicate the client requires no further action). + * indicate the client requires no further action). Note that, if the returned byte buffer is + * writable, the driver will clear its contents immediately after use (to avoid keeping + * sensitive information in memory); do not reuse the same buffer across multiple invocations. + * Alternatively, if the contents are not sensitive, you can make the buffer {@linkplain + * ByteBuffer#asReadOnlyBuffer() read-only} and safely reuse it. */ @NonNull CompletionStage evaluateChallenge(@Nullable ByteBuffer challenge); diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/auth/SyncAuthenticator.java b/core/src/main/java/com/datastax/oss/driver/api/core/auth/SyncAuthenticator.java index d2d1d5d5f3b..f1a8acfb11a 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/auth/SyncAuthenticator.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/auth/SyncAuthenticator.java @@ -35,7 +35,11 @@ public interface SyncAuthenticator extends Authenticator { *

{@link #initialResponse()} calls this and wraps the result in an immediately completed * future. * - * @return The initial response to send to the server (which may be {@code null}). + * @return The initial response to send to the server (which may be {@code null}). Note that, if + * the returned byte buffer is writable, the driver will clear its contents immediately + * after use (to avoid keeping sensitive information in memory); do not reuse the same buffer + * across multiple invocations. Alternatively, if the contents are not sensitive, you can make + * the buffer {@linkplain ByteBuffer#asReadOnlyBuffer() read-only} and safely reuse it. */ @Nullable ByteBuffer initialResponseSync(); @@ -48,7 +52,11 @@ public interface SyncAuthenticator extends Authenticator { * * @param challenge the server's SASL challenge; may be {@code null}. * @return The updated SASL token (which may be {@code null} to indicate the client requires no - * further action). + * further action). Note that, if the returned byte buffer is writable, the driver will + * clear its contents immediately after use (to avoid keeping sensitive information in + * memory); do not reuse the same buffer across multiple invocations. Alternatively, if the + * contents are not sensitive, you can make the buffer {@linkplain + * ByteBuffer#asReadOnlyBuffer() read-only} and safely reuse it. */ @Nullable ByteBuffer evaluateChallengeSync(@Nullable ByteBuffer challenge); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/auth/PlainTextAuthProviderBase.java b/core/src/main/java/com/datastax/oss/driver/internal/core/auth/PlainTextAuthProviderBase.java index 723c1dbf584..55a5999d05e 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/auth/PlainTextAuthProviderBase.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/auth/PlainTextAuthProviderBase.java @@ -149,7 +149,7 @@ private ByteBuffer toUtf8Bytes(char[] charArray) { @Override @Nullable public ByteBuffer initialResponseSync() { - return initialToken.duplicate(); + return initialToken; } @Override diff --git a/pom.xml b/pom.xml index d191a3d7664..b3971df48af 100644 --- a/pom.xml +++ b/pom.xml @@ -51,7 +51,7 @@ 25.1-jre 2.1.11 4.0.5 - 1.4.5 + 1.4.6-SNAPSHOT 4.1.34.Final 1.7.26 From 8e1d9955ff28c482b448742bdc90ed1ceeb278b9 Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 22 Jul 2019 09:46:53 -0700 Subject: [PATCH 010/979] Install dependencies in the Travis build environment if they are snapshots --- .travis.yml | 1 + install-snapshots.sh | 21 +++++++++++++++++++++ 2 files changed, 22 insertions(+) create mode 100755 install-snapshots.sh diff --git a/.travis.yml b/.travis.yml index a7f970a8c20..6ad52b119ae 100644 --- a/.travis.yml +++ b/.travis.yml @@ -13,6 +13,7 @@ matrix: before_install: # Require JDK8 for compiling - jdk_switcher use openjdk8 + - ./install-snapshots.sh install: mvn install -DskipTests=true -Dmaven.javadoc.skip=true -B -V script: mvn test -Djacoco.skip=true -B -V cache: diff --git a/install-snapshots.sh b/install-snapshots.sh new file mode 100755 index 00000000000..893e92af9a6 --- /dev/null +++ b/install-snapshots.sh @@ -0,0 +1,21 @@ +#!/bin/sh + +# Install dependencies in the Travis build environment if they are snapshots. +# See .travis.yml + +set -u + +install_snapshot() +{ + URL=$1 + DIRECTORY_NAME=$2 + # Assume the snapshot we want is on the head of the default branch + git clone --depth 1 ${URL} /tmp/${DIRECTORY_NAME} + { + cd /tmp/${DIRECTORY_NAME} + mvn install -DskipTests + } +} + +grep -q '.*-SNAPSHOT' pom.xml && \ + install_snapshot https://github.com/datastax/native-protocol.git native-protocol From e2131575921577743df8d1181b9411ddd90649b8 Mon Sep 17 00:00:00 2001 From: olim7t Date: Wed, 24 Jul 2019 08:31:40 -0700 Subject: [PATCH 011/979] Fix unshaded Guava imports in mapper-processor --- .../oss/driver/internal/mapper/processor/MapperProcessor.java | 2 +- .../internal/mapper/processor/dao/DaoSelectMethodGenerator.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/MapperProcessor.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/MapperProcessor.java index 218e55a2c2a..de00c00058e 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/MapperProcessor.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/MapperProcessor.java @@ -19,9 +19,9 @@ import com.datastax.oss.driver.api.mapper.annotations.Entity; import com.datastax.oss.driver.api.mapper.annotations.Mapper; import com.datastax.oss.driver.shaded.guava.common.base.Strings; +import com.datastax.oss.driver.shaded.guava.common.base.Throwables; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; import com.google.auto.service.AutoService; -import com.google.common.base.Throwables; import java.lang.annotation.Annotation; import java.util.Map; import java.util.Set; diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGenerator.java index b721c5b4936..2ea9cf788b6 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGenerator.java @@ -32,8 +32,8 @@ import com.datastax.oss.driver.internal.mapper.processor.entity.EntityDefinition; import com.datastax.oss.driver.internal.mapper.processor.entity.PropertyDefinition; import com.datastax.oss.driver.internal.mapper.processor.util.generation.GeneratedCodePatterns; +import com.datastax.oss.driver.shaded.guava.common.base.Splitter; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; -import com.google.common.base.Splitter; import com.squareup.javapoet.ClassName; import com.squareup.javapoet.CodeBlock; import com.squareup.javapoet.MethodSpec; From 6c32beece5bc595c98c4be3490f0d43226e39a72 Mon Sep 17 00:00:00 2001 From: olim7t Date: Tue, 23 Jul 2019 08:56:38 -0700 Subject: [PATCH 012/979] JAVA-2359: Allow default keyspace at the mapper level --- changelog/README.md | 1 + .../datastax/oss/driver/mapper/SelectIT.java | 15 +++--- manual/mapper/mapper/README.md | 9 ++++ .../mapper/MapperBuilderGenerator.java | 3 +- .../oss/driver/api/mapper/MapperBuilder.java | 52 +++++++++++++++++++ .../internal/mapper/DefaultMapperContext.java | 11 +++- 6 files changed, 81 insertions(+), 10 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 83fb5628215..8860fd6214d 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.2.0 (in progress) +- [improvement] JAVA-2359: Allow default keyspace at the mapper level - [improvement] JAVA-2306: Clear security tokens from memory immediately after use - [improvement] JAVA-2320: Expose more attributes on mapper Select for individual query clauses - [bug] JAVA-2332: Destroy connection pool when a node gets removed diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SelectIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SelectIT.java index 3fd946d0a36..cc1543cd56c 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SelectIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SelectIT.java @@ -17,13 +17,11 @@ import static org.assertj.core.api.Assertions.assertThat; -import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.PagingIterable; import com.datastax.oss.driver.api.core.cql.SimpleStatement; import com.datastax.oss.driver.api.mapper.annotations.Dao; import com.datastax.oss.driver.api.mapper.annotations.DaoFactory; -import com.datastax.oss.driver.api.mapper.annotations.DaoKeyspace; import com.datastax.oss.driver.api.mapper.annotations.DefaultNullSavingStrategy; import com.datastax.oss.driver.api.mapper.annotations.Delete; import com.datastax.oss.driver.api.mapper.annotations.Insert; @@ -67,9 +65,12 @@ public static void setup() { SimpleStatement.builder(query).setExecutionProfile(sessionRule.slowProfile()).build()); } - InventoryMapper inventoryMapper = new SelectIT_InventoryMapperBuilder(session).build(); - dao = inventoryMapper.productDao(sessionRule.keyspace()); - saleDao = inventoryMapper.productSaleDao(sessionRule.keyspace()); + InventoryMapper inventoryMapper = + new SelectIT_InventoryMapperBuilder(session) + .withDefaultKeyspace(sessionRule.keyspace()) + .build(); + dao = inventoryMapper.productDao(); + saleDao = inventoryMapper.productSaleDao(); } @Before @@ -164,10 +165,10 @@ public void should_select_by_primary_key_sales() { @Mapper public interface InventoryMapper { @DaoFactory - ProductDao productDao(@DaoKeyspace CqlIdentifier keyspace); + ProductDao productDao(); @DaoFactory - ProductSaleDao productSaleDao(@DaoKeyspace CqlIdentifier keyspace); + ProductSaleDao productSaleDao(); } @Dao diff --git a/manual/mapper/mapper/README.md b/manual/mapper/mapper/README.md index 43bfbc94a70..1e3ae140fea 100644 --- a/manual/mapper/mapper/README.md +++ b/manual/mapper/mapper/README.md @@ -81,6 +81,15 @@ ProductDao productDao(@DaoKeyspace String keyspace); ProductDao productDao(@DaoTable CqlIdentifier table); ``` +You can also specify a default keyspace when building the mapper, it will be used for all methods +that don't have a `@DaoKeyspace` parameter: + +```java +InventoryMapper inventoryMapper = new InventoryMapperBuilder(session) + .withDefaultKeyspace("keyspace1") + .build(); +``` + The mapper maintains an interface cache. Calling a factory method with the same arguments will yield the same DAO instance: diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/mapper/MapperBuilderGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/mapper/MapperBuilderGenerator.java index 2da368dced7..1900547e9c6 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/mapper/MapperBuilderGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/mapper/MapperBuilderGenerator.java @@ -75,7 +75,8 @@ protected JavaFile.Builder getContents() { .addAnnotation(Override.class) .returns(ClassName.get(interfaceElement)) .addStatement( - "$1T context = new $1T(session, customState)", DefaultMapperContext.class) + "$1T context = new $1T(session, defaultKeyspaceId, customState)", + DefaultMapperContext.class) .addStatement( "return new $T(context)", GeneratedNames.mapperImplementation(interfaceElement)) diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/MapperBuilder.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/MapperBuilder.java index d45d772ee5e..69becbe18f5 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/MapperBuilder.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/MapperBuilder.java @@ -15,7 +15,9 @@ */ package com.datastax.oss.driver.api.mapper; +import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.mapper.annotations.DaoFactory; import com.datastax.oss.driver.api.mapper.annotations.Mapper; import com.datastax.oss.driver.api.mapper.annotations.QueryProvider; import edu.umd.cs.findbugs.annotations.NonNull; @@ -32,6 +34,7 @@ public abstract class MapperBuilder { protected final CqlSession session; + protected CqlIdentifier defaultKeyspaceId; protected Map customState; protected MapperBuilder(CqlSession session) { @@ -39,6 +42,55 @@ protected MapperBuilder(CqlSession session) { this.customState = new HashMap<>(); } + /** + * Specifies a default keyspace that will be used for all DAOs built with this mapper (unless they + * specify their own keyspace). + * + *

In other words, given the following definitions: + * + *

+   * @Mapper
+   * public interface InventoryMapper {
+   *   @DaoFactory
+   *   ProductDao productDao();
+   *
+   *   @DaoFactory
+   *   ProductDao productDao(@DaoKeyspace CqlIdentifier keyspace);
+   * }
+   *
+   * InventoryMapper mapper1 = new InventoryMapperBuilder(session)
+   *     .withDefaultKeyspace(CqlIdentifier.fromCql("ks1"))
+   *     .build();
+   * InventoryMapper mapper2 = new InventoryMapperBuilder(session)
+   *     .withDefaultKeyspace(CqlIdentifier.fromCql("ks2"))
+   *     .build();
+   * 
+ * + * Then: + * + *
    + *
  • {@code mapper1.productDao()} will use keyspace {@code ks1}; + *
  • {@code mapper2.productDao()} will use keyspace {@code ks2}; + *
  • {@code mapper1.productDao(CqlIdentifier.fromCql("ks3"))} will use keyspace {@code ks3}. + *
+ * + * @see DaoFactory + */ + @NonNull + public MapperBuilder withDefaultKeyspace(@Nullable CqlIdentifier keyspaceId) { + this.defaultKeyspaceId = keyspaceId; + return this; + } + + /** + * Shortcut for {@link #withDefaultKeyspace(CqlIdentifier) + * withDefaultKeyspace(CqlIdentifier.fromCql(keyspaceName))}. + */ + @NonNull + public MapperBuilder withDefaultKeyspace(@Nullable String keyspaceName) { + return withDefaultKeyspace(keyspaceName == null ? null : CqlIdentifier.fromCql(keyspaceName)); + } + /** * Stores custom state that will be propagated to {@link MapperContext#getCustomState()}. * diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DefaultMapperContext.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DefaultMapperContext.java index 1169c833cfc..c6b7961daf0 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DefaultMapperContext.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DefaultMapperContext.java @@ -38,15 +38,22 @@ public class DefaultMapperContext implements MapperContext { private final Map customState; public DefaultMapperContext( - @NonNull CqlSession session, @NonNull Map customState) { + @NonNull CqlSession session, + @Nullable CqlIdentifier keyspaceId, + @NonNull Map customState) { this( session, - null, + keyspaceId, null, new ConcurrentHashMap<>(), NullAllowingImmutableMap.copyOf(customState)); } + public DefaultMapperContext( + @NonNull CqlSession session, @NonNull Map customState) { + this(session, null, customState); + } + private DefaultMapperContext( CqlSession session, CqlIdentifier keyspaceId, From c1e3b0894563538ba5976406957096310ac13dca Mon Sep 17 00:00:00 2001 From: olim7t Date: Wed, 24 Jul 2019 16:53:07 -0700 Subject: [PATCH 013/979] Don't reference current version in POM snippets The latest version is now displayed in the root README thanks to the maven badge. We don't need to repeat it in every POM snippet. Show a Maven variable instead: it should be pretty clear that it's a placeholder. That way we don't need to search and replace through the whole manual before each release. --- README.md | 4 ++-- manual/core/README.md | 2 +- manual/core/compression/README.md | 2 +- manual/core/integration/README.md | 14 +++++++------- manual/core/shaded_jar/README.md | 6 +++--- manual/mapper/README.md | 4 ++-- manual/query_builder/README.md | 2 +- 7 files changed, 17 insertions(+), 17 deletions(-) diff --git a/README.md b/README.md index b6755a722e1..9ad4ff3ed55 100644 --- a/README.md +++ b/README.md @@ -24,13 +24,13 @@ are multiple modules, all prefixed with `java-driver-`. com.datastax.oss java-driver-core - 4.1.0 + ${driver.version} com.datastax.oss java-driver-query-builder - 4.1.0 + ${driver.version} ``` diff --git a/manual/core/README.md b/manual/core/README.md index 9f0bedaee08..12d9043d7d9 100644 --- a/manual/core/README.md +++ b/manual/core/README.md @@ -7,7 +7,7 @@ following coordinates: com.datastax.oss java-driver-core - 4.1.0 + ${driver.version} ``` diff --git a/manual/core/compression/README.md b/manual/core/compression/README.md index 59c065ab7fb..25a5c1d4c91 100644 --- a/manual/core/compression/README.md +++ b/manual/core/compression/README.md @@ -70,4 +70,4 @@ Dependency: Always double-check the exact Snappy version needed; you can find it in the driver's [parent POM]. -[parent POM]: https://search.maven.org/#artifactdetails%7Ccom.datastax.oss%7Cjava-driver-parent%7C4.1.0%7Cpom \ No newline at end of file +[parent POM]: https://search.maven.org/search?q=g:com.datastax.oss%20AND%20a:java-driver-parent&core=gav \ No newline at end of file diff --git a/manual/core/integration/README.md b/manual/core/integration/README.md index e7cbcea9095..7ed04f40527 100644 --- a/manual/core/integration/README.md +++ b/manual/core/integration/README.md @@ -39,7 +39,7 @@ dependencies, and tell Maven that we're going to use Java 8: com.datastax.oss java-driver-core - 4.1.0 + ${driver.version} ch.qos.logback @@ -176,7 +176,7 @@ repositories { } dependencies { - compile group: 'com.datastax.oss', name: 'java-driver-core', version: '4.1.0' + compile group: 'com.datastax.oss', name: 'java-driver-core', version: '${driver.version}' compile group: 'ch.qos.logback', name: 'logback-classic', version: '1.2.3' } ``` @@ -260,7 +260,7 @@ In that case, you can exclude the dependency: com.datastax.oss java-driver-core - 4.1.0 + ${driver.version} com.typesafe @@ -288,7 +288,7 @@ are not available on your platform, you can exclude the following dependencies: com.datastax.oss java-driver-core - 4.1.0 + ${driver.version} com.github.jnr @@ -322,7 +322,7 @@ and never call [Session.getMetrics] anywhere in your application, you can remove com.datastax.oss java-driver-core - 4.1.0 + ${driver.version} io.dropwizard.metrics @@ -343,7 +343,7 @@ If all of these metrics are disabled, you can remove the dependency: com.datastax.oss java-driver-core - 4.1.0 + ${driver.version} org.hdrhistogram @@ -369,7 +369,7 @@ exclude them: com.datastax.oss java-driver-core - 4.1.0 + ${driver.version} com.github.stephenc.jcip diff --git a/manual/core/shaded_jar/README.md b/manual/core/shaded_jar/README.md index 377b2e0c7cf..8e988418d34 100644 --- a/manual/core/shaded_jar/README.md +++ b/manual/core/shaded_jar/README.md @@ -12,7 +12,7 @@ package name: com.datastax.oss java-driver-core-shaded - 4.1.0 + ${driver.version} ``` @@ -23,12 +23,12 @@ dependency to the non-shaded JAR: com.datastax.oss java-driver-core-shaded - 4.1.0 + ${driver.version} com.datastax.oss java-driver-query-builder - 4.1.0 + ${driver.version} com.datastax.oss diff --git a/manual/mapper/README.md b/manual/mapper/README.md index f83f08d475c..1de64c90df2 100644 --- a/manual/mapper/README.md +++ b/manual/mapper/README.md @@ -12,7 +12,7 @@ It is published as two artifacts: com.datastax.oss java-driver-mapper-processor - 4.1.0 + ${driver.version} ``` @@ -25,7 +25,7 @@ It is published as two artifacts: com.datastax.oss java-driver-mapper-runtime - 4.1.0 + ${driver.version} ``` diff --git a/manual/query_builder/README.md b/manual/query_builder/README.md index 2ee2e7fdd3f..a2c1618fce8 100644 --- a/manual/query_builder/README.md +++ b/manual/query_builder/README.md @@ -14,7 +14,7 @@ To use it in your application, add the following dependency: com.datastax.oss java-driver-query-builder - 4.1.0 + ${driver.version} ``` From fb664b3667a1a0011465b05e0a0cba59f808f7a5 Mon Sep 17 00:00:00 2001 From: olim7t Date: Wed, 24 Jul 2019 17:00:42 -0700 Subject: [PATCH 014/979] Reference mapper in root README --- README.md | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 9ad4ff3ed55..10ec520887e 100644 --- a/README.md +++ b/README.md @@ -32,13 +32,19 @@ are multiple modules, all prefixed with `java-driver-`. java-driver-query-builder ${driver.version} + + + com.datastax.oss + java-driver-mapper-runtime + ${driver.version} + ``` Note that the query builder is now published as a separate artifact, you'll need to add the dependency if you plan to use it. Refer to each module's manual for more details ([core](manual/core/), [query -builder](manual/query_builder/)). +builder](manual/query_builder/), [mapper](manual/mapper)). [com.datastax.oss]: http://search.maven.org/#search%7Cga%7C1%7Cg%3A%22com.datastax.oss%22 From 4f295098c0757f4580264c852ae0798a66d6a379 Mon Sep 17 00:00:00 2001 From: olim7t Date: Wed, 24 Jul 2019 15:46:18 -0700 Subject: [PATCH 015/979] JAVA-2358: Fix list of reserved CQL keywords These are the words that cannot be passed to CqlIdentifier.fromCql(), unless they are quoted. --- changelog/README.md | 1 + .../driver/internal/core/util/Strings.java | 27 +++++++++++-------- 2 files changed, 17 insertions(+), 11 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 8860fd6214d..10a33d7fb46 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.2.0 (in progress) +- [bug] JAVA-2358: Fix list of reserved CQL keywords - [improvement] JAVA-2359: Allow default keyspace at the mapper level - [improvement] JAVA-2306: Clear security tokens from memory immediately after use - [improvement] JAVA-2320: Expose more attributes on mapper Select for individual query clauses diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/util/Strings.java b/core/src/main/java/com/datastax/oss/driver/internal/core/util/Strings.java index 50063799a8e..eb95d2cbc18 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/util/Strings.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/util/Strings.java @@ -261,7 +261,6 @@ private Strings() {} "allow", "alter", "and", - "any", "apply", "asc", "authorize", @@ -270,34 +269,40 @@ private Strings() {} "by", "columnfamily", "create", + "default", "delete", "desc", + "describe", "drop", - "each_quorum", + "entries", + "execute", "from", + "full", "grant", + "if", "in", "index", - "inet", "infinity", "insert", "into", + "is", "keyspace", - "keyspaces", "limit", - "local_one", - "local_quorum", + "materialized", + "mbean", + "mbeans", "modify", "nan", "norecursive", + "not", + "null", "of", "on", - "one", + "or", "order", - "password", "primary", - "quorum", "rename", + "replace", "revoke", "schema", "select", @@ -305,13 +310,13 @@ private Strings() {} "table", "to", "token", - "three", "truncate", - "two", "unlogged", + "unset", "update", "use", "using", + "view", "where", "with"); } From ca4a7e3bef27c7fb56cf8f834478ca01cb47207d Mon Sep 17 00:00:00 2001 From: olim7t Date: Thu, 25 Jul 2019 10:24:21 -0700 Subject: [PATCH 016/979] JAVA-2367: Fix column names in EntityHelper.updateByPrimaryKey --- changelog/README.md | 1 + .../oss/driver/mapper/UpdateNamingIT.java | 125 ++++++++++++++++++ ...lperUpdateByPrimaryKeyMethodGenerator.java | 2 +- 3 files changed, 127 insertions(+), 1 deletion(-) create mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateNamingIT.java diff --git a/changelog/README.md b/changelog/README.md index 10a33d7fb46..6fba37f8d1d 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.2.0 (in progress) +- [bug] JAVA-2367: Fix column names in EntityHelper.updateByPrimaryKey - [bug] JAVA-2358: Fix list of reserved CQL keywords - [improvement] JAVA-2359: Allow default keyspace at the mapper level - [improvement] JAVA-2306: Clear security tokens from memory immediately after use diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateNamingIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateNamingIT.java new file mode 100644 index 00000000000..c3676206bfb --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateNamingIT.java @@ -0,0 +1,125 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.mapper; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.mapper.MapperBuilder; +import com.datastax.oss.driver.api.mapper.annotations.Dao; +import com.datastax.oss.driver.api.mapper.annotations.DaoFactory; +import com.datastax.oss.driver.api.mapper.annotations.Entity; +import com.datastax.oss.driver.api.mapper.annotations.Mapper; +import com.datastax.oss.driver.api.mapper.annotations.NamingStrategy; +import com.datastax.oss.driver.api.mapper.annotations.PartitionKey; +import com.datastax.oss.driver.api.mapper.annotations.Select; +import com.datastax.oss.driver.api.mapper.annotations.Update; +import com.datastax.oss.driver.api.mapper.entity.naming.NamingConvention; +import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import com.datastax.oss.driver.categories.ParallelizableTests; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +/** + * For JAVA-2367: ensure that PK column names are properly handled in the WHERE clause of a + * generated UPDATE query. + */ +@Category(ParallelizableTests.class) +public class UpdateNamingIT { + private static CcmRule ccm = CcmRule.getInstance(); + + private static SessionRule sessionRule = SessionRule.builder(ccm).build(); + + @ClassRule public static TestRule chain = RuleChain.outerRule(ccm).around(sessionRule); + + private static TestDao dao; + + @BeforeClass + public static void setup() { + CqlSession session = sessionRule.session(); + session.execute( + SimpleStatement.builder("CREATE TABLE foo(mykey int PRIMARY KEY, value int)") + .setExecutionProfile(sessionRule.slowProfile()) + .build()); + + TestMapper mapper = + TestMapper.builder(session).withDefaultKeyspace(sessionRule.keyspace()).build(); + dao = mapper.dao(); + } + + @Test + public void should_update_with_case_insensitive_pk_name() { + dao.update(new Foo(1, 1)); + Foo foo = dao.get(1); + assertThat(foo.getValue()).isEqualTo(1); + } + + @Mapper + public interface TestMapper { + + @DaoFactory + TestDao dao(); + + static MapperBuilder builder(CqlSession session) { + return new UpdateNamingIT_TestMapperBuilder(session); + } + } + + @Dao + public interface TestDao { + @Select + Foo get(int key); + + @Update + void update(Foo template); + } + + @Entity + @NamingStrategy(convention = NamingConvention.CASE_INSENSITIVE) + public static class Foo { + @PartitionKey private int myKey; + private int value; + + public Foo() {} + + public Foo(int myKey, int value) { + this.myKey = myKey; + this.value = value; + } + + public int getMyKey() { + return myKey; + } + + public void setMyKey(int myKey) { + this.myKey = myKey; + } + + public int getValue() { + return value; + } + + public void setValue(int value) { + this.value = value; + } + } +} diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperUpdateByPrimaryKeyMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperUpdateByPrimaryKeyMethodGenerator.java index 6bb0d72a365..400ea2313f9 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperUpdateByPrimaryKeyMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperUpdateByPrimaryKeyMethodGenerator.java @@ -46,7 +46,7 @@ public Optional generate() { for (PropertyDefinition property : entityDefinition.getPrimaryKey()) { methodBuilder.addCode( - "\n.where($1T.column($2S).isEqualTo($3T.bindMarker($2S)))", + "\n.where($1T.column($2L).isEqualTo($3T.bindMarker($2L)))", Relation.class, property.getCqlName(), QueryBuilder.class); From 321696b492add84aa7126acf685dded040ebc647 Mon Sep 17 00:00:00 2001 From: olim7t Date: Wed, 17 Jul 2019 17:34:19 -0700 Subject: [PATCH 017/979] JAVA-2338: Revisit toString() for data container types --- changelog/README.md | 1 + core/revapi.json | 15 ++++++ .../datastax/oss/driver/api/core/cql/Row.java | 52 +++++++++++++++++++ .../oss/driver/api/core/data/TupleValue.java | 38 ++++++++++++++ .../oss/driver/api/core/data/UdtValue.java | 38 ++++++++++++++ .../internal/core/data/DefaultTupleValue.java | 5 -- .../internal/core/data/DefaultUdtValue.java | 5 -- .../core/data/DefaultUdtValueTest.java | 2 +- 8 files changed, 145 insertions(+), 11 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 6fba37f8d1d..da0623e07d5 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.2.0 (in progress) +- [improvement] JAVA-2338: Revisit toString() for data container types - [bug] JAVA-2367: Fix column names in EntityHelper.updateByPrimaryKey - [bug] JAVA-2358: Fix list of reserved CQL keywords - [improvement] JAVA-2359: Allow default keyspace at the mapper level diff --git a/core/revapi.json b/core/revapi.json index c5e5069d7fa..295110bafd3 100644 --- a/core/revapi.json +++ b/core/revapi.json @@ -4794,6 +4794,21 @@ "new": "method java.util.Spliterator com.datastax.oss.driver.api.core.PagingIterable::spliterator() @ com.datastax.oss.driver.api.core.cql.ResultSet", "annotation": "@edu.umd.cs.findbugs.annotations.NonNull", "justification": "JAVA-2247: PagingIterable implementations should implement spliterator()" + }, + { + "code": "java.method.addedToInterface", + "new": "method java.lang.String com.datastax.oss.driver.api.core.cql.Row::toString()", + "justification": "False positive -- all objects implicitly have toString()" + }, + { + "code": "java.method.addedToInterface", + "new": "method java.lang.String com.datastax.oss.driver.api.core.data.TupleValue::toString()", + "justification": "False positive -- all objects implicitly have toString()" + }, + { + "code": "java.method.addedToInterface", + "new": "method java.lang.String com.datastax.oss.driver.api.core.data.UdtValue::toString()", + "justification": "False positive -- all objects implicitly have toString()" } ] } diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/cql/Row.java b/core/src/main/java/com/datastax/oss/driver/api/core/cql/Row.java index 9a2e88e27e8..49b85c54719 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/cql/Row.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/cql/Row.java @@ -19,6 +19,7 @@ import com.datastax.oss.driver.api.core.data.GettableByIndex; import com.datastax.oss.driver.api.core.data.GettableByName; import com.datastax.oss.driver.api.core.detach.Detachable; +import com.datastax.oss.driver.api.core.type.codec.TypeCodec; import edu.umd.cs.findbugs.annotations.NonNull; /** @@ -33,4 +34,55 @@ public interface Row extends GettableByIndex, GettableByName, GettableById, Deta /** @return the column definitions contained in this result set. */ @NonNull ColumnDefinitions getColumnDefinitions(); + + /** + * Returns a string representation of the contents of this row. + * + *

This produces a comma-separated list enclosed in square brackets. Each column is represented + * by its name, followed by a column and the value as a CQL literal. For example: + * + *

+   * [id:1, name:'test']
+   * 
+ * + * Notes: + * + *
    + *
  • This method does not sanitize its output in any way. In particular, no effort is made to + * limit output size: all columns are included, and large strings or blobs will be appended + * as-is. + *
  • Be mindful of how you expose the result. For example, in high-security environments, it + * might be undesirable to leak data in application logs. + *
+ */ + @NonNull + default String getFormattedContents() { + StringBuilder result = new StringBuilder("["); + ColumnDefinitions definitions = getColumnDefinitions(); + for (int i = 0; i < definitions.size(); i++) { + if (i > 0) { + result.append(", "); + } + ColumnDefinition definition = definitions.get(i); + String name = definition.getName().asCql(true); + TypeCodec codec = codecRegistry().codecFor(definition.getType()); + Object value = codec.decode(getBytesUnsafe(i), protocolVersion()); + result.append(name).append(':').append(codec.format(value)); + } + return result.append("]").toString(); + } + + /** + * Returns an abstract representation of this object, that may not include the row's + * contents. + * + *

The driver's built-in {@link Row} implementation returns the default format of {@link + * Object#toString()}: the class name, followed by the at-sign and the hash code of the object. + * + *

Omitting the contents was a deliberate choice, because we feel it would make it too easy to + * accidentally leak data (e.g. in application logs). If you want the contents, use {@link + * #getFormattedContents()}. + */ + @Override + String toString(); } diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/data/TupleValue.java b/core/src/main/java/com/datastax/oss/driver/api/core/data/TupleValue.java index 5937bc0517c..e9e9c91be00 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/data/TupleValue.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/data/TupleValue.java @@ -34,4 +34,42 @@ public interface TupleValue extends GettableByIndex, SettableByIndex @NonNull TupleType getType(); + + /** + * Returns a string representation of the contents of this tuple. + * + *

This produces a CQL literal, for example: + * + *

+   * (1,'test')
+   * 
+ * + * Notes: + * + *
    + *
  • This method does not sanitize its output in any way. In particular, no effort is made to + * limit output size: all fields are included, and large strings or blobs will be appended + * as-is. + *
  • Be mindful of how you expose the result. For example, in high-security environments, it + * might be undesirable to leak data in application logs. + *
+ */ + @NonNull + default String getFormattedContents() { + return codecRegistry().codecFor(getType()).format(this); + } + + /** + * Returns an abstract representation of this object, that may not include the tuple's + * contents. + * + *

The driver's built-in {@link TupleValue} implementation returns the default format of {@link + * Object#toString()}: the class name, followed by the at-sign and the hash code of the object. + * + *

Omitting the contents was a deliberate choice, because we feel it would make it too easy to + * accidentally leak data (e.g. in application logs). If you want the contents, use {@link + * #getFormattedContents()}. + */ + @Override + String toString(); } diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/data/UdtValue.java b/core/src/main/java/com/datastax/oss/driver/api/core/data/UdtValue.java index bfdebdfd7fa..41f5f0361de 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/data/UdtValue.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/data/UdtValue.java @@ -35,4 +35,42 @@ public interface UdtValue @NonNull UserDefinedType getType(); + + /** + * Returns a string representation of the contents of this UDT. + * + *

This produces a CQL literal, for example: + * + *

+   * {street:'42 Main Street',zip:12345}
+   * 
+ * + * Notes: + * + *
    + *
  • This method does not sanitize its output in any way. In particular, no effort is made to + * limit output size: all fields are included, and large strings or blobs will be appended + * as-is. + *
  • Be mindful of how you expose the result. For example, in high-security environments, it + * might be undesirable to leak data in application logs. + *
+ */ + @NonNull + default String getFormattedContents() { + return codecRegistry().codecFor(getType()).format(this); + } + + /** + * Returns an abstract representation of this object, that may not include the UDT's + * contents. + * + *

The driver's built-in {@link UdtValue} implementation returns the default format of {@link + * Object#toString()}: the class name, followed by the at-sign and the hash code of the object. + * + *

Omitting the contents was a deliberate choice, because we feel it would make it too easy to + * accidentally leak data (e.g. in application logs). If you want the contents, use {@link + * #getFormattedContents()}. + */ + @Override + String toString(); } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/data/DefaultTupleValue.java b/core/src/main/java/com/datastax/oss/driver/internal/core/data/DefaultTupleValue.java index 9317a3f5a36..15a36bbd9b7 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/data/DefaultTupleValue.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/data/DefaultTupleValue.java @@ -167,11 +167,6 @@ public int hashCode() { return result; } - @Override - public String toString() { - return codecRegistry().codecFor(type).format(this); - } - private static class SerializationProxy implements Serializable { private static final long serialVersionUID = 1; diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/data/DefaultUdtValue.java b/core/src/main/java/com/datastax/oss/driver/internal/core/data/DefaultUdtValue.java index 5bed077a76d..b0528a4a195 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/data/DefaultUdtValue.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/data/DefaultUdtValue.java @@ -171,11 +171,6 @@ public int hashCode() { return result; } - @Override - public String toString() { - return codecRegistry().codecFor(type).format(this); - } - /** * @serialData The type of the tuple, followed by an array of byte arrays representing the values * (null values are represented by {@code null}). diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/data/DefaultUdtValueTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/data/DefaultUdtValueTest.java index c097528e46d..0bec3c152b3 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/data/DefaultUdtValueTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/data/DefaultUdtValueTest.java @@ -123,7 +123,7 @@ public void should_format_to_string() { UdtValue udt = type.newValue().setString("t", "foobar").setDouble("d", 3.14); - assertThat(udt.toString()).isEqualTo("{t:'foobar',i:NULL,d:3.14}"); + assertThat(udt.getFormattedContents()).isEqualTo("{t:'foobar',i:NULL,d:3.14}"); } @Test From b8474df3699df2f7e695fd9a43c108782d5fb61b Mon Sep 17 00:00:00 2001 From: olim7t Date: Fri, 19 Jul 2019 09:52:42 -0700 Subject: [PATCH 018/979] JAVA-2336: Expose byte utility methods in the public API --- changelog/README.md | 1 + .../oss/driver/api/core/data/ByteUtils.java | 89 +++++++++++++++++++ .../oss/driver/examples/datatypes/Blobs.java | 10 +-- 3 files changed, 95 insertions(+), 5 deletions(-) create mode 100644 core/src/main/java/com/datastax/oss/driver/api/core/data/ByteUtils.java diff --git a/changelog/README.md b/changelog/README.md index da0623e07d5..a6cbfdc90c7 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.2.0 (in progress) +- [improvement] JAVA-2336: Expose byte utility methods in the public API - [improvement] JAVA-2338: Revisit toString() for data container types - [bug] JAVA-2367: Fix column names in EntityHelper.updateByPrimaryKey - [bug] JAVA-2358: Fix list of reserved CQL keywords diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/data/ByteUtils.java b/core/src/main/java/com/datastax/oss/driver/api/core/data/ByteUtils.java new file mode 100644 index 00000000000..232a75543cb --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/api/core/data/ByteUtils.java @@ -0,0 +1,89 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.core.data; + +import com.datastax.oss.protocol.internal.util.Bytes; +import java.nio.ByteBuffer; + +/** + * A set of static utility methods to work with byte buffers (associated with CQL type {@code + * blob}). + */ +public class ByteUtils { + + // Implementation note: this is just a gateway to the internal `Bytes` class in native-protocol. + // The difference is that this one is part of the public API. + + /** + * Converts a blob to its CQL hex string representation. + * + *

A CQL blob string representation consists of the hexadecimal representation of the blob + * bytes prefixed by "0x". + * + * @param bytes the blob/bytes to convert to a string. + * @return the CQL string representation of {@code bytes}. If {@code bytes} is {@code null}, this + * method returns {@code null}. + */ + public static String toHexString(ByteBuffer bytes) { + return Bytes.toHexString(bytes); + } + + /** + * Converts a blob to its CQL hex string representation. + * + *

A CQL blob string representation consists of the hexadecimal representation of the blob + * bytes prefixed by "0x". + * + * @param byteArray the blob/bytes array to convert to a string. + * @return the CQL string representation of {@code bytes}. If {@code bytes} is {@code null}, this + * method returns {@code null}. + */ + public static String toHexString(byte[] byteArray) { + return Bytes.toHexString(byteArray); + } + + /** + * Parses a hex string representing a CQL blob. + * + *

The input should be a valid representation of a CQL blob, i.e. it must start by "0x" + * followed by the hexadecimal representation of the blob bytes. + * + * @param str the CQL blob string representation to parse. + * @return the bytes corresponding to {@code str}. If {@code str} is {@code null}, this method + * returns {@code null}. + * @throws IllegalArgumentException if {@code str} is not a valid CQL blob string. + */ + public static ByteBuffer fromHexString(String str) { + return Bytes.fromHexString(str); + } + + /** + * Extracts the content of the provided {@code ByteBuffer} as a byte array. + * + *

This method works with any type of {@code ByteBuffer} (direct and non-direct ones), but when + * the buffer is backed by an array, it will try to avoid copy when possible. As a consequence, + * changes to the returned byte array may or may not reflect into the initial buffer. + * + * @param bytes the buffer whose contents to extract. + * @return a byte array with the contents of {@code bytes}. That array may be the array backing + * {@code bytes} if this can avoid a copy. + */ + public static byte[] getArray(ByteBuffer bytes) { + return Bytes.getArray(bytes); + } + + private ByteUtils() {} +} diff --git a/examples/src/main/java/com/datastax/oss/driver/examples/datatypes/Blobs.java b/examples/src/main/java/com/datastax/oss/driver/examples/datatypes/Blobs.java index 343a03306e9..84cf419f96b 100644 --- a/examples/src/main/java/com/datastax/oss/driver/examples/datatypes/Blobs.java +++ b/examples/src/main/java/com/datastax/oss/driver/examples/datatypes/Blobs.java @@ -19,7 +19,7 @@ import com.datastax.oss.driver.api.core.cql.BoundStatement; import com.datastax.oss.driver.api.core.cql.PreparedStatement; import com.datastax.oss.driver.api.core.cql.Row; -import com.datastax.oss.protocol.internal.util.Bytes; +import com.datastax.oss.driver.api.core.data.ByteUtils; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; @@ -143,7 +143,7 @@ private static void retrieveSimpleColumn(CqlSession session) { // - even then, the backing array might be larger than the buffer's contents // // The driver provides a utility method that handles those details for you: - byte[] array = Bytes.getArray(buffer); + byte[] array = ByteUtils.getArray(buffer); assert array.length == 16; for (byte b : array) { assert b == (byte) 0xFF; @@ -175,7 +175,7 @@ private static void insertConcurrent(CqlSession session) { session.prepare("INSERT INTO examples.blobs (k, b) VALUES (1, :b)"); // This is another convenient utility provided by the driver. It's useful for tests. - ByteBuffer buffer = Bytes.fromHexString("0xffffff"); + ByteBuffer buffer = ByteUtils.fromHexString("0xffffff"); // When you pass a byte buffer to a bound statement, it creates a shallow copy internally with // the buffer.duplicate() method. @@ -188,7 +188,7 @@ private static void insertConcurrent(CqlSession session) { session.execute(boundStatement); Row row = session.execute("SELECT b FROM examples.blobs WHERE k = 1").one(); assert row != null; - assert Objects.equals(Bytes.toHexString(row.getByteBuffer("b")), "0xffffff"); + assert Objects.equals(ByteUtils.toHexString(row.getByteBuffer("b")), "0xffffff"); buffer.flip(); @@ -199,7 +199,7 @@ private static void insertConcurrent(CqlSession session) { session.execute(boundStatement); row = session.execute("SELECT b FROM examples.blobs WHERE k = 1").one(); assert row != null; - assert Objects.equals(Bytes.toHexString(row.getByteBuffer("b")), "0xaaffff"); + assert Objects.equals(ByteUtils.toHexString(row.getByteBuffer("b")), "0xaaffff"); // This will also happen if you use the async API, e.g. create the bound statement, call // executeAsync() on it and reuse the buffer immediately. From 5a32b7843fb83b9ec6774f2b27623800acec9725 Mon Sep 17 00:00:00 2001 From: Erik Merkle Date: Fri, 12 Jul 2019 15:59:34 -0500 Subject: [PATCH 019/979] JAVA-2337: Fix CQL to Java type mapping documentation --- manual/core/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/manual/core/README.md b/manual/core/README.md index 12d9043d7d9..33b34b4ad7b 100644 --- a/manual/core/README.md +++ b/manual/core/README.md @@ -254,7 +254,7 @@ See [AccessibleByName] for an explanation of the conversion rules. | user-defined types | getUDTValue | [UDTValue] | [User-defined types](udts/) | | uuid | getUuid | java.util.UUID | | | varchar | getString | java.lang.String | | -| varint | getVarint | java.math.BigInteger | | +| varint | getBigInteger | java.math.BigInteger | | Sometimes the driver has to infer a CQL type from a Java type (for example when handling the values of [simple statements](statements/simple/)); for those that have multiple CQL equivalents, it makes From 8d7a6ac85505dcecdf0417e18caf845d6c8fb3c0 Mon Sep 17 00:00:00 2001 From: olim7t Date: Fri, 19 Jul 2019 17:09:59 -0700 Subject: [PATCH 020/979] JAVA-2302: Better target mapper errors and warnings for inherited methods Motivation: If - the mapper processes a class or interface that inherits from another type - that parent type is not part of the current compilation round (e.g. it's in a JAR dependency of the current project) - that parent type is not annotated correctly Then the errors or warnings issued by the mapper processor do not provide enough context, because they can't be linked to a source file and line number. Modifications: Detect that situation in DecoratedMessager, and issue the messages on the child type instead. Result: The errors or warnings will appear on a source file, with a contextual prefix like "[getId() inherited from BaseEntity]". --- changelog/README.md | 1 + .../processor/CodeGeneratorFactory.java | 10 +- .../mapper/processor/DecoratedMessager.java | 145 +++++++++++++++++- .../DefaultCodeGeneratorFactory.java | 31 ++-- .../mapper/processor/MapperProcessor.java | 4 +- .../dao/DaoDeleteMethodGenerator.java | 15 +- .../dao/DaoGetEntityMethodGenerator.java | 10 +- .../dao/DaoImplementationGenerator.java | 9 +- .../dao/DaoInsertMethodGenerator.java | 5 +- .../processor/dao/DaoMethodGenerator.java | 17 +- .../dao/DaoQueryMethodGenerator.java | 3 +- .../dao/DaoQueryProviderMethodGenerator.java | 4 +- .../dao/DaoSelectMethodGenerator.java | 9 +- .../dao/DaoSetEntityMethodGenerator.java | 7 +- .../dao/DaoUpdateMethodGenerator.java | 5 +- .../mapper/processor/dao/EntityUtils.java | 8 +- .../entity/DefaultEntityFactory.java | 71 ++++++--- .../processor/entity/EntityFactory.java | 2 +- .../MapperDaoFactoryMethodGenerator.java | 7 + .../mapper/MapperImplementationGenerator.java | 5 +- .../dao/DaoDeleteMethodGeneratorTest.java | 3 +- .../dao/DaoUpdateMethodGeneratorTest.java | 6 +- .../DaoCompiledMethodGeneratorTest.java | 4 +- 23 files changed, 304 insertions(+), 77 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index a6cbfdc90c7..720b9364d48 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.2.0 (in progress) +- [improvement] JAVA-2302: Better target mapper errors and warnings for inherited methods - [improvement] JAVA-2336: Expose byte utility methods in the public API - [improvement] JAVA-2338: Revisit toString() for data container types - [bug] JAVA-2367: Fix column names in EntityHelper.updateByPrimaryKey diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/CodeGeneratorFactory.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/CodeGeneratorFactory.java index adc3604b002..36f62cb1866 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/CodeGeneratorFactory.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/CodeGeneratorFactory.java @@ -56,7 +56,9 @@ public interface CodeGeneratorFactory { * @see #newMapperImplementation(TypeElement) */ Optional newMapperImplementationMethod( - ExecutableElement methodElement, MapperImplementationSharedCode enclosingClass); + ExecutableElement methodElement, + TypeElement processedType, + MapperImplementationSharedCode enclosingClass); /** The builder associated to a {@link Mapper}-annotated interface. */ CodeGenerator newMapperBuilder(TypeElement interfaceElement); @@ -64,9 +66,8 @@ Optional newMapperImplementationMethod( /** * The implementation of a {@link Dao}-annotated interface. * - *

The default code factory calls {@link #newDaoImplementationMethod(ExecutableElement, Map, - * DaoImplementationSharedCode)} for each non-static, non-default method, but this is not a hard - * requirement. + *

The default code factory calls {@link #newDaoImplementationMethod} for each non-static, + * non-default method, but this is not a hard requirement. */ CodeGenerator newDaoImplementation(TypeElement interfaceElement); @@ -80,6 +81,7 @@ Optional newMapperImplementationMethod( Optional newDaoImplementationMethod( ExecutableElement methodElement, Map typeParameters, + TypeElement processedType, DaoImplementationSharedCode enclosingClass); DaoReturnTypeParser getDaoReturnTypeParser(); diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/DecoratedMessager.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/DecoratedMessager.java index 45afe226fca..36c49e681c1 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/DecoratedMessager.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/DecoratedMessager.java @@ -15,8 +15,11 @@ */ package com.datastax.oss.driver.internal.mapper.processor; +import edu.umd.cs.findbugs.annotations.NonNull; import javax.annotation.processing.Messager; import javax.lang.model.element.Element; +import javax.lang.model.element.ExecutableElement; +import javax.lang.model.element.TypeElement; import javax.tools.Diagnostic; /** Wraps {@link Messager} to provide convenience methods. */ @@ -28,15 +31,145 @@ public DecoratedMessager(Messager messager) { this.messager = messager; } - public void warn(Element element, String template, Object... arguments) { - messager.printMessage(Diagnostic.Kind.WARNING, String.format(template, arguments), element); - } - + /** Emits a global warning that doesn't target a particular element. */ public void warn(String template, Object... arguments) { messager.printMessage(Diagnostic.Kind.WARNING, String.format(template, arguments)); } - public void error(Element element, String template, Object... arguments) { - messager.printMessage(Diagnostic.Kind.ERROR, String.format(template, arguments), element); + /** Emits a warning for a type. */ + public void warn(TypeElement typeElement, String template, Object... arguments) { + messager.printMessage(Diagnostic.Kind.WARNING, String.format(template, arguments), typeElement); + } + + /** Emits an error for a type. */ + public void error(TypeElement typeElement, String template, Object... arguments) { + messager.printMessage(Diagnostic.Kind.ERROR, String.format(template, arguments), typeElement); + } + + /** + * Emits a warning for a program element that might be inherited from another type. + * + * @param targetElement the element to target. + * @param processedType the type that we were processing when we detected the issue. + */ + public void warn( + Element targetElement, TypeElement processedType, String template, Object... arguments) { + new ElementMessager(targetElement, processedType) + .print(Diagnostic.Kind.WARNING, template, arguments); + } + + /** + * Emits an error for a program element that might be inherited from another type. + * + * @param targetElement the element to target. + * @param processedType the type that we were processing when we detected the issue. + */ + public void error( + Element targetElement, TypeElement processedType, String template, Object... arguments) { + new ElementMessager(targetElement, processedType) + .print(Diagnostic.Kind.ERROR, template, arguments); + } + + /** + * Abstracts logic to produce better messages if the target element is inherited from a compiled + * type. + * + *

Consider the following situation: + * + *

+   *   interface BaseDao {
+   *     @Select
+   *     void select();
+   *   }
+   *   @Dao
+   *   interface ConcreteDao extends BaseDao {}
+   * 
+ * + * If {@code BaseDao} belongs to a JAR dependency, it is already compiled and the warning or error + * message can't reference a file or line number, it doesn't even mention {@code ConcreteDao}. + * + *

The goal of this class is to detect those cases, and issue the message on {@code + * ConcreteDao} instead. + */ + private class ElementMessager { + + private final Element actualTargetElement; + // Additional location information that will get prepended to the message + private final String locationInfo; + + /** + * @param processedType the type that we are currently processing ({@code ConcreteDao} in the + * example above). + */ + ElementMessager(@NonNull Element intendedTargetElement, @NonNull TypeElement processedType) { + + TypeElement declaringType; + switch (intendedTargetElement.getKind()) { + case CLASS: + case INTERFACE: + if (processedType.equals(intendedTargetElement) + || isSourceFile((TypeElement) intendedTargetElement)) { + this.actualTargetElement = intendedTargetElement; + this.locationInfo = ""; + } else { + this.actualTargetElement = processedType; + this.locationInfo = + String.format("[Ancestor %s]", intendedTargetElement.getSimpleName()); + } + break; + case FIELD: + case METHOD: + case CONSTRUCTOR: + declaringType = (TypeElement) intendedTargetElement.getEnclosingElement(); + if (processedType.equals(declaringType) || isSourceFile(declaringType)) { + this.actualTargetElement = intendedTargetElement; + this.locationInfo = ""; + } else { + this.actualTargetElement = processedType; + this.locationInfo = + String.format( + "[%s inherited from %s] ", + intendedTargetElement, declaringType.getSimpleName()); + } + break; + case PARAMETER: + ExecutableElement method = + (ExecutableElement) intendedTargetElement.getEnclosingElement(); + declaringType = (TypeElement) method.getEnclosingElement(); + if (processedType.equals(declaringType) || isSourceFile(declaringType)) { + this.actualTargetElement = intendedTargetElement; + this.locationInfo = ""; + } else { + this.actualTargetElement = processedType; + this.locationInfo = + String.format( + "[Parameter %s of %s inherited from %s] ", + intendedTargetElement.getSimpleName(), + method.getSimpleName(), + declaringType.getSimpleName()); + } + break; + default: + // We don't emit messages for other types of elements in the mapper processor. Handle + // gracefully nevertheless: + this.actualTargetElement = intendedTargetElement; + this.locationInfo = ""; + break; + } + } + + void print(Diagnostic.Kind level, String template, Object... arguments) { + messager.printMessage( + level, String.format(locationInfo + template, arguments), actualTargetElement); + } + + private boolean isSourceFile(TypeElement element) { + try { + Class.forName(element.getQualifiedName().toString()); + return false; + } catch (ClassNotFoundException e) { + return true; + } + } } } diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/DefaultCodeGeneratorFactory.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/DefaultCodeGeneratorFactory.java index c9703829ed2..78624381458 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/DefaultCodeGeneratorFactory.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/DefaultCodeGeneratorFactory.java @@ -75,10 +75,13 @@ public CodeGenerator newMapperImplementation(TypeElement interfaceElement) { @Override public Optional newMapperImplementationMethod( - ExecutableElement methodElement, MapperImplementationSharedCode enclosingClass) { + ExecutableElement methodElement, + TypeElement processedType, + MapperImplementationSharedCode enclosingClass) { if (methodElement.getAnnotation(DaoFactory.class) != null) { return Optional.of( - new MapperDaoFactoryMethodGenerator(methodElement, enclosingClass, context)); + new MapperDaoFactoryMethodGenerator( + methodElement, processedType, enclosingClass, context)); } else { return Optional.empty(); } @@ -98,32 +101,40 @@ public CodeGenerator newDaoImplementation(TypeElement interfaceElement) { public Optional newDaoImplementationMethod( ExecutableElement methodElement, Map typeParameters, + TypeElement processedType, DaoImplementationSharedCode enclosingClass) { if (methodElement.getAnnotation(SetEntity.class) != null) { return Optional.of( - new DaoSetEntityMethodGenerator(methodElement, typeParameters, enclosingClass, context)); + new DaoSetEntityMethodGenerator( + methodElement, typeParameters, processedType, enclosingClass, context)); } else if (methodElement.getAnnotation(Insert.class) != null) { return Optional.of( - new DaoInsertMethodGenerator(methodElement, typeParameters, enclosingClass, context)); + new DaoInsertMethodGenerator( + methodElement, typeParameters, processedType, enclosingClass, context)); } else if (methodElement.getAnnotation(GetEntity.class) != null) { return Optional.of( - new DaoGetEntityMethodGenerator(methodElement, typeParameters, enclosingClass, context)); + new DaoGetEntityMethodGenerator( + methodElement, typeParameters, processedType, enclosingClass, context)); } else if (methodElement.getAnnotation(Select.class) != null) { return Optional.of( - new DaoSelectMethodGenerator(methodElement, typeParameters, enclosingClass, context)); + new DaoSelectMethodGenerator( + methodElement, typeParameters, processedType, enclosingClass, context)); } else if (methodElement.getAnnotation(Delete.class) != null) { return Optional.of( - new DaoDeleteMethodGenerator(methodElement, typeParameters, enclosingClass, context)); + new DaoDeleteMethodGenerator( + methodElement, typeParameters, processedType, enclosingClass, context)); } else if (methodElement.getAnnotation(Query.class) != null) { return Optional.of( - new DaoQueryMethodGenerator(methodElement, typeParameters, enclosingClass, context)); + new DaoQueryMethodGenerator( + methodElement, typeParameters, processedType, enclosingClass, context)); } else if (methodElement.getAnnotation(Update.class) != null) { return Optional.of( - new DaoUpdateMethodGenerator(methodElement, typeParameters, enclosingClass, context)); + new DaoUpdateMethodGenerator( + methodElement, typeParameters, processedType, enclosingClass, context)); } else if (methodElement.getAnnotation(QueryProvider.class) != null) { return Optional.of( new DaoQueryProviderMethodGenerator( - methodElement, typeParameters, enclosingClass, context)); + methodElement, typeParameters, processedType, enclosingClass, context)); } else { return Optional.empty(); } diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/MapperProcessor.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/MapperProcessor.java index de00c00058e..95a5eb5c61d 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/MapperProcessor.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/MapperProcessor.java @@ -109,7 +109,7 @@ protected void processAnnotatedTypes( for (Element element : roundEnvironment.getElementsAnnotatedWith(annotationClass)) { if (element.getKind() != expectedKind) { messager.error( - element, + (TypeElement) element, "Only %s elements can be annotated with %s", expectedKind, annotationClass.getSimpleName()); @@ -120,7 +120,7 @@ protected void processAnnotatedTypes( generatorFactory.apply(typeElement).generate(); } catch (Exception e) { messager.error( - element, + (TypeElement) element, "Unexpected error while writing generated code: %s", Throwables.getStackTraceAsString(e)); } diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGenerator.java index f58462db448..5cf2c7f26c5 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGenerator.java @@ -54,9 +54,10 @@ public class DaoDeleteMethodGenerator extends DaoMethodGenerator { public DaoDeleteMethodGenerator( ExecutableElement methodElement, Map typeParameters, + TypeElement processedType, DaoImplementationSharedCode enclosingClass, ProcessorContext context) { - super(methodElement, typeParameters, enclosingClass, context); + super(methodElement, typeParameters, processedType, enclosingClass, context); } protected Set getSupportedReturnTypes() { @@ -74,6 +75,7 @@ public Optional generate() { .getMessager() .error( methodElement, + processedType, "Invalid annotation parameters: %s cannot have both ifExists and customIfClause", Delete.class.getSimpleName()); return Optional.empty(); @@ -97,6 +99,7 @@ public Optional generate() { .getMessager() .error( methodElement, + processedType, "Wrong number of parameters: %s methods with no custom clause " + "must take either an entity instance, or the partition key components", Delete.class.getSimpleName()); @@ -117,6 +120,7 @@ public Optional generate() { .getMessager() .error( methodElement, + processedType, "Invalid parameter list: %s methods that have a custom where clause " + "must not take an Entity (%s) as a parameter", Delete.class.getSimpleName(), @@ -131,6 +135,7 @@ public Optional generate() { .getMessager() .error( methodElement, + processedType, "Missing entity class: %s methods that do not operate on an entity " + "instance must have an 'entityClass' argument", Delete.class.getSimpleName()); @@ -154,6 +159,7 @@ public Optional generate() { .getMessager() .error( methodElement, + processedType, "Invalid parameter list: %s methods that have a custom if clause" + "must specify the entire primary key (expected primary keys of %s: %s)", Delete.class.getSimpleName(), @@ -169,12 +175,13 @@ public Optional generate() { primaryKeyParameterCount = primaryKeyParameters.size(); if (!EntityUtils.areParametersValid( - context, - methodElement, entityElement, entityDefinition, primaryKeyParameters, Delete.class, + context, + methodElement, + processedType, "do not operate on an entity instance and lack a custom where clause")) { return Optional.empty(); } @@ -243,6 +250,7 @@ public Optional generate() { .getMessager() .error( methodElement, + processedType, "Wrong number of parameters: %s methods can only have additional " + "parameters if they specify a custom WHERE or IF clause", Delete.class.getSimpleName()); @@ -297,6 +305,7 @@ private TypeElement getEntityFromAnnotation() { .getMessager() .warn( methodElement, + processedType, "Too many entity classes: %s must have at most one 'entityClass' argument " + "(will use the first one: %s)", Delete.class.getSimpleName(), diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoGetEntityMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoGetEntityMethodGenerator.java index c7961d87a00..116713f5779 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoGetEntityMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoGetEntityMethodGenerator.java @@ -53,9 +53,10 @@ private enum Transformation { public DaoGetEntityMethodGenerator( ExecutableElement methodElement, Map typeParameters, + TypeElement processedType, DaoImplementationSharedCode enclosingClass, ProcessorContext context) { - super(methodElement, typeParameters, enclosingClass, context); + super(methodElement, typeParameters, processedType, enclosingClass, context); } @Override @@ -67,6 +68,7 @@ public Optional generate() { .getMessager() .error( methodElement, + processedType, "Wrong number of parameters: %s methods must have exactly one", GetEntity.class.getSimpleName()); return Optional.empty(); @@ -82,7 +84,8 @@ public Optional generate() { context .getMessager() .error( - parameterElement, + methodElement, + processedType, "Invalid parameter type: %s methods must take a %s, %s or %s", GetEntity.class.getSimpleName(), GettableByName.class.getSimpleName(), @@ -105,6 +108,7 @@ public Optional generate() { .getMessager() .error( methodElement, + processedType, "Invalid return type: %s methods must return %s if the argument is %s", GetEntity.class.getSimpleName(), PagingIterable.class.getSimpleName(), @@ -119,6 +123,7 @@ public Optional generate() { .getMessager() .error( methodElement, + processedType, "Invalid return type: %s methods must return %s if the argument is %s", GetEntity.class.getSimpleName(), MappedAsyncPagingIterable.class.getSimpleName(), @@ -134,6 +139,7 @@ public Optional generate() { .getMessager() .error( methodElement, + processedType, "Invalid return type: " + "%s methods must return a %s-annotated class, or a %s or %s thereof", GetEntity.class.getSimpleName(), diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoImplementationGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoImplementationGenerator.java index d4824cae357..b14e6fb3e7b 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoImplementationGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoImplementationGenerator.java @@ -253,6 +253,7 @@ private Map parseTypeParameters(TypeMirror mirror) { .getMessager() .error( element, + interfaceElement, "Could not resolve type parameter %s " + "on %s from child interfaces. This error usually means an interface " + "was inappropriately annotated with @%s. Interfaces should only be annotated " @@ -303,10 +304,10 @@ protected JavaFile.Builder getContents() { .addSuperinterface(ClassName.get(interfaceElement)); for (TypeMirror mirror : interfaces) { - TypeElement interfaceElement = (TypeElement) context.getTypeUtils().asElement(mirror); + TypeElement parentInterfaceElement = (TypeElement) context.getTypeUtils().asElement(mirror); Map typeParameters = parseTypeParameters(mirror); - for (Element child : interfaceElement.getEnclosedElements()) { + for (Element child : parentInterfaceElement.getEnclosedElements()) { if (child.getKind() == ElementKind.METHOD) { ExecutableElement methodElement = (ExecutableElement) child; Set modifiers = methodElement.getModifiers(); @@ -314,12 +315,14 @@ protected JavaFile.Builder getContents() { Optional maybeGenerator = context .getCodeGeneratorFactory() - .newDaoImplementationMethod(methodElement, typeParameters, this); + .newDaoImplementationMethod( + methodElement, typeParameters, interfaceElement, this); if (!maybeGenerator.isPresent()) { context .getMessager() .error( methodElement, + interfaceElement, "Unrecognized method signature: no implementation will be generated"); } else { maybeGenerator.flatMap(MethodGenerator::generate).ifPresent(classBuilder::addMethod); diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoInsertMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoInsertMethodGenerator.java index 528f2492e95..687753da23c 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoInsertMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoInsertMethodGenerator.java @@ -53,9 +53,10 @@ public class DaoInsertMethodGenerator extends DaoMethodGenerator { public DaoInsertMethodGenerator( ExecutableElement methodElement, Map typeParameters, + TypeElement processedType, DaoImplementationSharedCode enclosingClass, ProcessorContext context) { - super(methodElement, typeParameters, enclosingClass, context); + super(methodElement, typeParameters, processedType, enclosingClass, context); nullSavingStrategyValidation = new NullSavingStrategyValidation(context); } @@ -94,6 +95,7 @@ public Optional generate() { .getMessager() .error( methodElement, + processedType, "%s methods must take the entity to insert as the first parameter", Insert.class.getSimpleName()); return Optional.empty(); @@ -111,6 +113,7 @@ public Optional generate() { .getMessager() .error( methodElement, + processedType, "Invalid return type: %s methods must return the same entity as their argument ", Insert.class.getSimpleName()); return Optional.empty(); diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoMethodGenerator.java index 3ef07d67d0c..f39a97179c3 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoMethodGenerator.java @@ -42,6 +42,7 @@ public abstract class DaoMethodGenerator implements MethodGenerator { protected final ExecutableElement methodElement; + protected final TypeElement processedType; protected final DaoImplementationSharedCode enclosingClass; protected final ProcessorContext context; protected final Map typeParameters; @@ -49,10 +50,12 @@ public abstract class DaoMethodGenerator implements MethodGenerator { public DaoMethodGenerator( ExecutableElement methodElement, Map typeParameters, + TypeElement processedType, DaoImplementationSharedCode enclosingClass, ProcessorContext context) { this.methodElement = methodElement; this.typeParameters = typeParameters; + this.processedType = processedType; this.enclosingClass = enclosingClass; this.context = context; } @@ -70,6 +73,7 @@ protected DaoReturnType parseAndValidateReturnType( .getMessager() .error( methodElement, + processedType, "Invalid return type: %s methods must return one of %s", annotationName, validKinds); @@ -102,6 +106,7 @@ protected void maybeAddSimpleClause( .getMessager() .warn( methodElement, + processedType, "Invalid " + valueDescription + " value: " @@ -118,6 +123,7 @@ protected void maybeAddSimpleClause( .getMessager() .warn( methodElement, + processedType, "Invalid " + valueDescription + " value: " @@ -193,11 +199,11 @@ protected boolean validateCqlNamesPresent(List parame context .getMessager() .error( - parameter, - "Method %s: parameter %s is declared in a compiled method " + methodElement, + processedType, + "Parameter %s is declared in a compiled method " + "and refers to a bind marker " + "and thus must be annotated with @%s", - methodElement, parameter.getSimpleName(), CqlName.class.getSimpleName()); valid = false; @@ -214,10 +220,9 @@ protected void warnIfCqlNamePresent(List parameters) context .getMessager() .warn( - parameter, - "Method %s: parameter %s does not refer to a bind marker, " - + "@%s annotation will be ignored", methodElement, + processedType, + "Parameter %s does not refer to a bind marker, " + "@%s annotation will be ignored", parameter.getSimpleName(), CqlName.class.getSimpleName()); } diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoQueryMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoQueryMethodGenerator.java index ded58c2d0ce..0d3250c7282 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoQueryMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoQueryMethodGenerator.java @@ -46,9 +46,10 @@ public class DaoQueryMethodGenerator extends DaoMethodGenerator { public DaoQueryMethodGenerator( ExecutableElement methodElement, Map typeParameters, + TypeElement processedType, DaoImplementationSharedCode enclosingClass, ProcessorContext context) { - super(methodElement, typeParameters, enclosingClass, context); + super(methodElement, typeParameters, processedType, enclosingClass, context); this.queryString = methodElement.getAnnotation(Query.class).value(); nullSavingStrategyValidation = new NullSavingStrategyValidation(context); } diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoQueryProviderMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoQueryProviderMethodGenerator.java index 49a06533b94..0278211a99d 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoQueryProviderMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoQueryProviderMethodGenerator.java @@ -40,9 +40,10 @@ public class DaoQueryProviderMethodGenerator extends DaoMethodGenerator { public DaoQueryProviderMethodGenerator( ExecutableElement methodElement, Map typeParameters, + TypeElement processedType, DaoImplementationSharedCode enclosingClass, ProcessorContext context) { - super(methodElement, typeParameters, enclosingClass, context); + super(methodElement, typeParameters, processedType, enclosingClass, context); } @Override @@ -114,6 +115,7 @@ private List getEntityHelperTypes() { .getMessager() .error( methodElement, + processedType, "Invalid annotation configuration: the elements in %s.entityHelpers " + "must be %s-annotated classes (offending element: %s)", QueryProvider.class.getSimpleName(), diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGenerator.java index 2ea9cf788b6..d958c2460c2 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGenerator.java @@ -53,9 +53,10 @@ public class DaoSelectMethodGenerator extends DaoMethodGenerator { public DaoSelectMethodGenerator( ExecutableElement methodElement, Map typeParameters, + TypeElement processedType, DaoImplementationSharedCode enclosingClass, ProcessorContext context) { - super(methodElement, typeParameters, enclosingClass, context); + super(methodElement, typeParameters, processedType, enclosingClass, context); } protected Set getSupportedReturnTypes() { @@ -130,12 +131,13 @@ public Optional generate() { // If we have parameters for some primary key components, validate that the types match: if (!primaryKeyParameters.isEmpty() && !EntityUtils.areParametersValid( - context, - methodElement, entityElement, entityDefinition, primaryKeyParameters, Select.class, + context, + methodElement, + processedType, "don't use a custom clause")) { return Optional.empty(); } @@ -238,6 +240,7 @@ private void addOrdering(String orderingSpec, MethodSpec.Builder methodBuilder) .getMessager() .error( methodElement, + processedType, "Can't parse ordering '%s', expected a column name followed by ASC or DESC", orderingSpec); return; diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSetEntityMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSetEntityMethodGenerator.java index 05d679dab3c..0729612cef2 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSetEntityMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSetEntityMethodGenerator.java @@ -39,9 +39,10 @@ public class DaoSetEntityMethodGenerator extends DaoMethodGenerator { public DaoSetEntityMethodGenerator( ExecutableElement methodElement, Map typeParameters, + TypeElement processedType, DaoImplementationSharedCode enclosingClass, ProcessorContext context) { - super(methodElement, typeParameters, enclosingClass, context); + super(methodElement, typeParameters, processedType, enclosingClass, context); nullSavingStrategyValidation = new NullSavingStrategyValidation(context); } @@ -59,6 +60,7 @@ public Optional generate() { .getMessager() .error( methodElement, + processedType, "Wrong number of parameters: %s methods must have two", SetEntity.class.getSimpleName()); return Optional.empty(); @@ -84,6 +86,7 @@ public Optional generate() { .getMessager() .error( methodElement, + processedType, "Wrong parameter types: %s methods must take a %s " + "and an annotated entity (in any order)", SetEntity.class.getSimpleName(), @@ -100,6 +103,7 @@ public Optional generate() { .getMessager() .warn( methodElement, + processedType, "BoundStatement is immutable, " + "this method will not modify '%s' in place. " + "It should probably return BoundStatement rather than void", @@ -110,6 +114,7 @@ public Optional generate() { .getMessager() .error( methodElement, + processedType, "Invalid return type: %s methods must either be void, or return the same " + "type as their settable parameter (in this case, %s to match '%s')", SetEntity.class.getSimpleName(), diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoUpdateMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoUpdateMethodGenerator.java index 865f7b1ddc8..c5be215b9ee 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoUpdateMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoUpdateMethodGenerator.java @@ -52,9 +52,10 @@ public class DaoUpdateMethodGenerator extends DaoMethodGenerator { public DaoUpdateMethodGenerator( ExecutableElement methodElement, Map typeParameters, + TypeElement processedType, DaoImplementationSharedCode enclosingClass, ProcessorContext context) { - super(methodElement, typeParameters, enclosingClass, context); + super(methodElement, typeParameters, processedType, enclosingClass, context); nullSavingStrategyValidation = new NullSavingStrategyValidation(context); } @@ -84,6 +85,7 @@ public Optional generate() { .getMessager() .error( methodElement, + processedType, "%s methods must take the entity to update as the first parameter", Update.class.getSimpleName()); return Optional.empty(); @@ -220,6 +222,7 @@ private void maybeAddIfClause(MethodSpec.Builder methodBuilder, Update annotatio .getMessager() .error( methodElement, + processedType, "Invalid annotation parameters: %s cannot have both ifExists and customIfClause", Update.class.getSimpleName()); } diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/EntityUtils.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/EntityUtils.java index e0b4176f5cf..c0fddd3c098 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/EntityUtils.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/EntityUtils.java @@ -98,12 +98,13 @@ public static TypeElement asEntityElement( * message is emitted on the given method element. */ public static boolean areParametersValid( - ProcessorContext context, - ExecutableElement methodElement, TypeElement entityElement, EntityDefinition entityDefinition, List parameters, Class annotationClass, + ProcessorContext context, + ExecutableElement methodElement, + TypeElement processedType, String exceptionCondition) { List primaryKeyTypes = entityDefinition.getPrimaryKey().stream() @@ -121,6 +122,7 @@ public static boolean areParametersValid( .getMessager() .error( methodElement, + processedType, "Invalid parameter list: %s methods that %s " + "must at least specify partition key components " + "(expected partition key of %s: %s)", @@ -136,6 +138,7 @@ public static boolean areParametersValid( .getMessager() .error( methodElement, + processedType, "Invalid parameter list: %s methods that %s " + "must match the primary key components in the exact order " + "(expected primary key of %s: %s). Too many parameters provided", @@ -155,6 +158,7 @@ public static boolean areParametersValid( .getMessager() .error( methodElement, + processedType, "Invalid parameter list: %s methods that %s " + "must match the primary key components in the exact order " + "(expected primary key of %s: %s). Mismatch at index %d: %s should be %s", diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/DefaultEntityFactory.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/DefaultEntityFactory.java index 8e0fda6a935..b9dd5deda3e 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/DefaultEntityFactory.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/DefaultEntityFactory.java @@ -76,8 +76,8 @@ public DefaultEntityFactory(ProcessorContext context) { } @Override - public EntityDefinition getDefinition(TypeElement classElement) { - Set types = HierarchyScanner.resolveTypeHierarchy(classElement, context); + public EntityDefinition getDefinition(TypeElement processedClass) { + Set types = HierarchyScanner.resolveTypeHierarchy(processedClass, context); Set typeHierarchy = Sets.newLinkedHashSet(); for (TypeMirror type : types) { typeHierarchy.add((TypeElement) context.getTypeUtils().asElement(type)); @@ -141,8 +141,14 @@ public EntityDefinition getDefinition(TypeElement classElement) { VariableElement field = findField(typeHierarchy, propertyName, typeMirror); Map, Annotation> propertyAnnotations = - scanPropertyAnnotations(typeHierarchy, getMethod, field); - if (isTransient(propertyAnnotations, propertyName, transientProperties, getMethod, field)) { + scanPropertyAnnotations(typeHierarchy, getMethod, field, processedClass); + if (isTransient( + propertyAnnotations, + propertyName, + transientProperties, + getMethod, + field, + processedClass)) { continue; } @@ -150,7 +156,7 @@ public EntityDefinition getDefinition(TypeElement classElement) { int clusteringColumnIndex = getClusteringColumnIndex(propertyAnnotations); Optional customCqlName = getCustomCqlName(propertyAnnotations); Optional computedFormula = - getComputedFormula(propertyAnnotations, getMethod, field); + getComputedFormula(propertyAnnotations, getMethod, field, processedClass); PropertyType propertyType = PropertyType.parse(typeMirror, context); PropertyDefinition property = @@ -171,6 +177,7 @@ public EntityDefinition getDefinition(TypeElement classElement) { .getMessager() .error( getMethod, + processedClass, "Duplicate partition key index: if multiple properties are annotated " + "with @%s, the annotation must be parameterized with an integer " + "indicating the position. Found duplicate index %d for %s and %s.", @@ -187,6 +194,7 @@ public EntityDefinition getDefinition(TypeElement classElement) { .getMessager() .error( getMethod, + processedClass, "Duplicate clustering column index: if multiple properties are annotated " + "with @%s, the annotation must be parameterized with an integer " + "indicating the position. Found duplicate index %d for %s and %s.", @@ -207,19 +215,19 @@ public EntityDefinition getDefinition(TypeElement classElement) { context .getMessager() .error( - classElement, + processedClass, "@%s-annotated class must have at least one property defined.", Entity.class.getSimpleName()); } - String entityName = Introspector.decapitalize(classElement.getSimpleName().toString()); - String defaultKeyspace = classElement.getAnnotation(Entity.class).defaultKeyspace(); + String entityName = Introspector.decapitalize(processedClass.getSimpleName().toString()); + String defaultKeyspace = processedClass.getAnnotation(Entity.class).defaultKeyspace(); return new DefaultEntityDefinition( - ClassName.get(classElement), + ClassName.get(processedClass), entityName, defaultKeyspace.isEmpty() ? null : defaultKeyspace, - Optional.ofNullable(classElement.getAnnotation(CqlName.class)).map(CqlName::value), + Optional.ofNullable(processedClass.getAnnotation(CqlName.class)).map(CqlName::value), ImmutableList.copyOf(partitionKey.values()), ImmutableList.copyOf(clusteringColumns.values()), regularColumns.build(), @@ -292,7 +300,8 @@ private int getClusteringColumnIndex(Map, Annotation private Optional getComputedFormula( Map, Annotation> annotations, ExecutableElement getMethod, - @Nullable VariableElement field) { + @Nullable VariableElement field, + TypeElement processedClass) { Computed annotation = (Computed) annotations.get(Computed.class); if (annotation != null) { @@ -301,7 +310,9 @@ private Optional getComputedFormula( if (value.isEmpty()) { Element element = field != null && field.getAnnotation(Computed.class) != null ? field : getMethod; - context.getMessager().error(element, "@Computed value should be non-empty."); + context + .getMessager() + .error(element, processedClass, "@Computed value should be non-empty."); } return Optional.of(value); } @@ -316,7 +327,8 @@ private CqlNameGenerator buildCqlNameGenerator(Set typeHierarchy) { } NamingStrategy namingStrategy = annotation.get().getAnnotation(); - Element classElement = annotation.get().getElement(); + // Safe cast because the annotation can only be used on types: + TypeElement classElement = (TypeElement) annotation.get().getElement(); if (namingStrategy == null) { return CqlNameGenerator.DEFAULT; } @@ -402,7 +414,8 @@ private boolean isTransient( String propertyName, Set transientProperties, ExecutableElement getMethod, - @Nullable VariableElement field) { + @Nullable VariableElement field, + TypeElement processedClass) { Transient transientAnnotation = (Transient) annotations.get(Transient.class); // check if property name is included in @TransientProperties @@ -422,6 +435,7 @@ private boolean isTransient( .getMessager() .error( element, + processedClass, "Property that is considered transient cannot be annotated with @%s.", exclusiveAnnotation.getSimpleName()); } @@ -439,12 +453,16 @@ private Set getTransientPropertyNames(Set typeHierarchy) { } private void reportMultipleAnnotationError( - Element element, Class a0, Class a1) { + Element element, + Class a0, + Class a1, + TypeElement processedClass) { if (a0 == a1) { context .getMessager() .warn( element, + processedClass, "@%s should be used either on the field or the getter, but not both. " + "The annotation on this field will be ignored.", a0.getSimpleName()); @@ -453,6 +471,7 @@ private void reportMultipleAnnotationError( .getMessager() .error( element, + processedClass, "Properties can't be annotated with both @%s and @%s.", a0.getSimpleName(), a1.getSimpleName()); @@ -462,13 +481,14 @@ private void reportMultipleAnnotationError( private Map, Annotation> scanPropertyAnnotations( Set typeHierarchy, ExecutableElement getMethod, - @Nullable VariableElement field) { + @Nullable VariableElement field, + TypeElement processedClass) { Map, Annotation> annotations = Maps.newHashMap(); // scan methods first as they should take precedence. - scanMethodAnnotations(typeHierarchy, getMethod, annotations); + scanMethodAnnotations(typeHierarchy, getMethod, annotations, processedClass); if (field != null) { - scanFieldAnnotations(field, annotations); + scanFieldAnnotations(field, annotations, processedClass); } return ImmutableMap.copyOf(annotations); @@ -486,7 +506,9 @@ private Class getExclusiveAnnotation( } private void scanFieldAnnotations( - VariableElement field, Map, Annotation> annotations) { + VariableElement field, + Map, Annotation> annotations, + TypeElement processedClass) { Class exclusiveAnnotation = getExclusiveAnnotation(annotations); for (Class annotationClass : PROPERTY_ANNOTATIONS) { Annotation annotation = field.getAnnotation(annotationClass); @@ -495,7 +517,8 @@ private void scanFieldAnnotations( if (exclusiveAnnotation == null) { exclusiveAnnotation = annotationClass; } else { - reportMultipleAnnotationError(field, exclusiveAnnotation, annotationClass); + reportMultipleAnnotationError( + field, exclusiveAnnotation, annotationClass, processedClass); } } if (!annotations.containsKey(annotationClass)) { @@ -508,7 +531,8 @@ private void scanFieldAnnotations( private void scanMethodAnnotations( Set typeHierarchy, ExecutableElement getMethod, - Map, Annotation> annotations) { + Map, Annotation> annotations, + TypeElement processedClass) { Class exclusiveAnnotation = getExclusiveAnnotation(annotations); for (Class annotationClass : PROPERTY_ANNOTATIONS) { Optional> annotation = @@ -519,7 +543,10 @@ private void scanMethodAnnotations( exclusiveAnnotation = annotationClass; } else { reportMultipleAnnotationError( - annotation.get().getElement(), exclusiveAnnotation, annotationClass); + annotation.get().getElement(), + exclusiveAnnotation, + annotationClass, + processedClass); } } if (!annotations.containsKey(annotationClass)) { diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityFactory.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityFactory.java index 737acc8823e..270b0a28323 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityFactory.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityFactory.java @@ -24,5 +24,5 @@ public interface EntityFactory { * Parses an {@link Entity}-annotated POJO and returns a descriptor of its properties and * annotations. */ - EntityDefinition getDefinition(TypeElement classElement); + EntityDefinition getDefinition(TypeElement processedClass); } diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/mapper/MapperDaoFactoryMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/mapper/MapperDaoFactoryMethodGenerator.java index 4d9ff985f3b..2637ca4b391 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/mapper/MapperDaoFactoryMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/mapper/MapperDaoFactoryMethodGenerator.java @@ -44,14 +44,17 @@ public class MapperDaoFactoryMethodGenerator implements MethodGenerator { private final ExecutableElement methodElement; + private final TypeElement processedType; private final MapperImplementationSharedCode enclosingClass; private final ProcessorContext context; public MapperDaoFactoryMethodGenerator( ExecutableElement methodElement, + TypeElement processedType, MapperImplementationSharedCode enclosingClass, ProcessorContext context) { this.methodElement = methodElement; + this.processedType = processedType; this.enclosingClass = enclosingClass; this.context = context; } @@ -89,6 +92,7 @@ public Optional generate() { .getMessager() .error( methodElement, + processedType, "Invalid return type: %s methods must return a %s-annotated interface, " + "or future thereof", DaoFactory.class.getSimpleName(), @@ -119,6 +123,7 @@ public Optional generate() { .getMessager() .error( methodElement, + processedType, "Invalid parameter annotations: " + "%s method parameters must be annotated with @%s or @%s", DaoFactory.class.getSimpleName(), @@ -176,6 +181,7 @@ private String validateKeyspaceOrTableParameter( .getMessager() .error( candidate, + processedType, "Invalid parameter annotations: " + "only one %s method parameter can be annotated with @%s", DaoFactory.class.getSimpleName(), @@ -189,6 +195,7 @@ private String validateKeyspaceOrTableParameter( .getMessager() .error( candidate, + processedType, "Invalid parameter type: @%s-annotated parameter of %s methods must be of type %s or %s", annotation.getSimpleName(), DaoFactory.class.getSimpleName(), diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/mapper/MapperImplementationGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/mapper/MapperImplementationGenerator.java index ea4d44103de..0dc40f4b11f 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/mapper/MapperImplementationGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/mapper/MapperImplementationGenerator.java @@ -100,12 +100,15 @@ protected JavaFile.Builder getContents() { Set modifiers = methodElement.getModifiers(); if (!modifiers.contains(Modifier.STATIC) && !modifiers.contains(Modifier.DEFAULT)) { Optional maybeGenerator = - context.getCodeGeneratorFactory().newMapperImplementationMethod(methodElement, this); + context + .getCodeGeneratorFactory() + .newMapperImplementationMethod(methodElement, interfaceElement, this); if (!maybeGenerator.isPresent()) { context .getMessager() .error( methodElement, + interfaceElement, "Unrecognized method signature: no implementation will be generated"); } else { maybeGenerator.flatMap(MethodGenerator::generate).ifPresent(classContents::addMethod); diff --git a/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGeneratorTest.java b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGeneratorTest.java index 09bd83f30ce..637aad8be3d 100644 --- a/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGeneratorTest.java +++ b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGeneratorTest.java @@ -145,8 +145,7 @@ public static Object[][] invalidSignatures() { @Test public void should_warn_when_non_bind_marker_has_cql_name() { should_succeed_with_expected_warning( - "delete(java.util.UUID,java.lang.String): parameter id does not refer " - + "to a bind marker, @CqlName annotation will be ignored", + "Parameter id does not refer to a bind marker, @CqlName annotation will be ignored", MethodSpec.methodBuilder("delete") .addAnnotation( AnnotationSpec.builder(Delete.class) diff --git a/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoUpdateMethodGeneratorTest.java b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoUpdateMethodGeneratorTest.java index 3093b92aedf..69a03d1b418 100644 --- a/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoUpdateMethodGeneratorTest.java +++ b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoUpdateMethodGeneratorTest.java @@ -94,7 +94,7 @@ public static Object[][] invalidSignatures() { @Test public void should_warn_when_non_bind_marker_has_cql_name() { should_succeed_with_expected_warning( - "Method update(test.Product,java.lang.String): parameter entity does not refer " + "Parameter entity does not refer " + "to a bind marker, @CqlName annotation will be ignored", MethodSpec.methodBuilder("update") .addAnnotation( @@ -125,7 +125,7 @@ public void should_process_timestamp(String timestamp, CodeBlock expected) { // given ProcessorContext processorContext = mock(ProcessorContext.class); DaoUpdateMethodGenerator daoUpdateMethodGenerator = - new DaoUpdateMethodGenerator(null, null, null, processorContext); + new DaoUpdateMethodGenerator(null, null, null, null, processorContext); MethodSpec.Builder builder = MethodSpec.constructorBuilder(); // when @@ -141,7 +141,7 @@ public void should_process_ttl(String ttl, CodeBlock expected) { // given ProcessorContext processorContext = mock(ProcessorContext.class); DaoUpdateMethodGenerator daoUpdateMethodGenerator = - new DaoUpdateMethodGenerator(null, null, null, processorContext); + new DaoUpdateMethodGenerator(null, null, null, null, processorContext); MethodSpec.Builder builder = MethodSpec.constructorBuilder(); // when diff --git a/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/compiled/DaoCompiledMethodGeneratorTest.java b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/compiled/DaoCompiledMethodGeneratorTest.java index b67a33289cb..96a5e88e449 100644 --- a/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/compiled/DaoCompiledMethodGeneratorTest.java +++ b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/compiled/DaoCompiledMethodGeneratorTest.java @@ -30,8 +30,8 @@ public class DaoCompiledMethodGeneratorTest extends DaoMethodGeneratorTest { @Test public void should_fail_with_expected_error() { should_fail_with_expected_error( - "findByDescriptionCompiledWrong(java.lang.String): parameter arg0 " - + "is declared in a compiled method " + "[findByDescriptionCompiledWrong(java.lang.String) inherited from CompiledProductDao] " + + "Parameter arg0 is declared in a compiled method " + "and refers to a bind marker " + "and thus must be annotated with @CqlName", "test", From 1fc9869e95e1e9521a9e5a5cca95c38db1fd99b7 Mon Sep 17 00:00:00 2001 From: olim7t Date: Wed, 24 Jul 2019 17:44:37 -0700 Subject: [PATCH 021/979] JAVA-2365: Redeclare default constants when an enum is abstracted behind an interface --- changelog/README.md | 2 ++ .../oss/driver/api/core/ConsistencyLevel.java | 12 ++++++++++++ .../oss/driver/api/core/DefaultConsistencyLevel.java | 3 +++ .../oss/driver/api/core/DefaultProtocolVersion.java | 6 +++++- .../oss/driver/api/core/ProtocolVersion.java | 5 +++++ .../datastax/oss/driver/api/core/cql/BatchType.java | 4 ++++ .../oss/driver/api/core/cql/DefaultBatchType.java | 3 +++ .../api/core/servererrors/DefaultWriteType.java | 3 +++ .../oss/driver/api/core/servererrors/WriteType.java | 9 +++++++++ 9 files changed, 46 insertions(+), 1 deletion(-) diff --git a/changelog/README.md b/changelog/README.md index 720b9364d48..d6d4e3e2588 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,8 @@ ### 4.2.0 (in progress) +- [improvement] JAVA-2365: Redeclare default constants when an enum is abstracted behind an + interface - [improvement] JAVA-2302: Better target mapper errors and warnings for inherited methods - [improvement] JAVA-2336: Expose byte utility methods in the public API - [improvement] JAVA-2338: Revisit toString() for data container types diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/ConsistencyLevel.java b/core/src/main/java/com/datastax/oss/driver/api/core/ConsistencyLevel.java index 65e32308fca..87fdcb079d3 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/ConsistencyLevel.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/ConsistencyLevel.java @@ -26,6 +26,18 @@ */ public interface ConsistencyLevel { + ConsistencyLevel ANY = DefaultConsistencyLevel.ANY; + ConsistencyLevel ONE = DefaultConsistencyLevel.ONE; + ConsistencyLevel TWO = DefaultConsistencyLevel.TWO; + ConsistencyLevel THREE = DefaultConsistencyLevel.THREE; + ConsistencyLevel QUORUM = DefaultConsistencyLevel.QUORUM; + ConsistencyLevel ALL = DefaultConsistencyLevel.ALL; + ConsistencyLevel LOCAL_ONE = DefaultConsistencyLevel.LOCAL_ONE; + ConsistencyLevel LOCAL_QUORUM = DefaultConsistencyLevel.LOCAL_QUORUM; + ConsistencyLevel EACH_QUORUM = DefaultConsistencyLevel.EACH_QUORUM; + ConsistencyLevel SERIAL = DefaultConsistencyLevel.SERIAL; + ConsistencyLevel LOCAL_SERIAL = DefaultConsistencyLevel.LOCAL_SERIAL; + /** The numerical value that the level is encoded to in protocol frames. */ int getProtocolCode(); diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/DefaultConsistencyLevel.java b/core/src/main/java/com/datastax/oss/driver/api/core/DefaultConsistencyLevel.java index 34d8875eb8e..acda65e6743 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/DefaultConsistencyLevel.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/DefaultConsistencyLevel.java @@ -35,6 +35,9 @@ public enum DefaultConsistencyLevel implements ConsistencyLevel { SERIAL(ProtocolConstants.ConsistencyLevel.SERIAL), LOCAL_SERIAL(ProtocolConstants.ConsistencyLevel.LOCAL_SERIAL), ; + // Note that, for the sake of convenience, we also expose shortcuts to these constants on the + // ConsistencyLevel interface. If you add a new enum constant, remember to update the interface as + // well. private final int protocolCode; diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/DefaultProtocolVersion.java b/core/src/main/java/com/datastax/oss/driver/api/core/DefaultProtocolVersion.java index feda0c2afc8..1a207a9ede9 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/DefaultProtocolVersion.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/DefaultProtocolVersion.java @@ -37,7 +37,11 @@ public enum DefaultProtocolVersion implements ProtocolVersion { * * @see ProtocolVersion#isBeta() */ - V5(ProtocolConstants.Version.V5, true); + V5(ProtocolConstants.Version.V5, true), + ; + // Note that, for the sake of convenience, we also expose shortcuts to these constants on the + // ProtocolVersion interface. If you add a new enum constant, remember to update the interface as + // well. private final int code; private final boolean beta; diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/ProtocolVersion.java b/core/src/main/java/com/datastax/oss/driver/api/core/ProtocolVersion.java index e39837cc090..cbc061432d3 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/ProtocolVersion.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/ProtocolVersion.java @@ -26,6 +26,11 @@ * {@code ProtocolVersion}s are {@link DefaultProtocolVersion} instances. */ public interface ProtocolVersion { + + ProtocolVersion V3 = DefaultProtocolVersion.V3; + ProtocolVersion V4 = DefaultProtocolVersion.V4; + ProtocolVersion V5 = DefaultProtocolVersion.V5; + /** The default version used for {@link Detachable detached} objects. */ // Implementation note: we can't use the ProtocolVersionRegistry here, this has to be a // compile-time constant. diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/cql/BatchType.java b/core/src/main/java/com/datastax/oss/driver/api/core/cql/BatchType.java index f81d6c326bf..f019e295370 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/cql/BatchType.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/cql/BatchType.java @@ -24,6 +24,10 @@ */ public interface BatchType { + BatchType LOGGED = DefaultBatchType.LOGGED; + BatchType UNLOGGED = DefaultBatchType.UNLOGGED; + BatchType COUNTER = DefaultBatchType.COUNTER; + /** The numerical value that the batch type is encoded to. */ byte getProtocolCode(); diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/cql/DefaultBatchType.java b/core/src/main/java/com/datastax/oss/driver/api/core/cql/DefaultBatchType.java index f941d48906d..a8f978d807e 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/cql/DefaultBatchType.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/cql/DefaultBatchType.java @@ -38,6 +38,9 @@ public enum DefaultBatchType implements BatchType { */ COUNTER(ProtocolConstants.BatchType.COUNTER), ; + // Note that, for the sake of convenience, we also expose shortcuts to these constants on the + // BatchType interface. If you add a new enum constant, remember to update the interface as + // well. private final byte code; diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/servererrors/DefaultWriteType.java b/core/src/main/java/com/datastax/oss/driver/api/core/servererrors/DefaultWriteType.java index b7fe225ff61..dc12e937aa8 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/servererrors/DefaultWriteType.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/servererrors/DefaultWriteType.java @@ -57,4 +57,7 @@ public enum DefaultWriteType implements WriteType { */ CDC, ; + // Note that, for the sake of convenience, we also expose shortcuts to these constants on the + // WriteType interface. If you add a new enum constant, remember to update the interface as + // well. } diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/servererrors/WriteType.java b/core/src/main/java/com/datastax/oss/driver/api/core/servererrors/WriteType.java index e34d90ad78b..c64a301d5af 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/servererrors/WriteType.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/servererrors/WriteType.java @@ -29,6 +29,15 @@ */ public interface WriteType { + WriteType SIMPLE = DefaultWriteType.SIMPLE; + WriteType BATCH = DefaultWriteType.BATCH; + WriteType UNLOGGED_BATCH = DefaultWriteType.UNLOGGED_BATCH; + WriteType COUNTER = DefaultWriteType.COUNTER; + WriteType BATCH_LOG = DefaultWriteType.BATCH_LOG; + WriteType CAS = DefaultWriteType.CAS; + WriteType VIEW = DefaultWriteType.VIEW; + WriteType CDC = DefaultWriteType.CDC; + /** The textual representation that the write type is encoded to in protocol frames. */ @NonNull String name(); From b66f064ec8423e4da97c729a46c9e81a7166444f Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 29 Jul 2019 11:17:53 -0700 Subject: [PATCH 022/979] JAVA-2369: Change mapper annotations retention to runtime --- changelog/README.md | 1 + .../driver/api/mapper/annotations/ClusteringColumn.java | 2 +- .../oss/driver/api/mapper/annotations/Computed.java | 7 +++++++ .../oss/driver/api/mapper/annotations/CqlName.java | 2 +- .../datastax/oss/driver/api/mapper/annotations/Dao.java | 2 +- .../oss/driver/api/mapper/annotations/DaoFactory.java | 2 +- .../oss/driver/api/mapper/annotations/DaoKeyspace.java | 2 +- .../oss/driver/api/mapper/annotations/DaoTable.java | 2 +- .../api/mapper/annotations/DefaultNullSavingStrategy.java | 2 +- .../datastax/oss/driver/api/mapper/annotations/Delete.java | 2 +- .../datastax/oss/driver/api/mapper/annotations/Entity.java | 2 +- .../oss/driver/api/mapper/annotations/GetEntity.java | 2 +- .../datastax/oss/driver/api/mapper/annotations/Insert.java | 2 +- .../datastax/oss/driver/api/mapper/annotations/Mapper.java | 2 +- .../oss/driver/api/mapper/annotations/NamingStrategy.java | 2 +- .../oss/driver/api/mapper/annotations/PartitionKey.java | 2 +- .../datastax/oss/driver/api/mapper/annotations/Query.java | 2 +- .../oss/driver/api/mapper/annotations/QueryProvider.java | 2 +- .../datastax/oss/driver/api/mapper/annotations/Select.java | 2 +- .../oss/driver/api/mapper/annotations/SetEntity.java | 2 +- .../driver/api/mapper/annotations/StatementAttributes.java | 2 +- .../oss/driver/api/mapper/annotations/Transient.java | 2 +- .../driver/api/mapper/annotations/TransientProperties.java | 7 +++++++ .../datastax/oss/driver/api/mapper/annotations/Update.java | 2 +- 24 files changed, 36 insertions(+), 21 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index d6d4e3e2588..5a683c22fdf 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.2.0 (in progress) +- [improvement] JAVA-2369: Change mapper annotations retention to runtime - [improvement] JAVA-2365: Redeclare default constants when an enum is abstracted behind an interface - [improvement] JAVA-2302: Better target mapper errors and warnings for inherited methods diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.java index 6a54cdb30e1..7fd80dfc8e7 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.java @@ -47,7 +47,7 @@ *

This annotation is mutually exclusive with {@link PartitionKey}. */ @Target({ElementType.FIELD, ElementType.METHOD}) -@Retention(RetentionPolicy.CLASS) +@Retention(RetentionPolicy.RUNTIME) public @interface ClusteringColumn { /** diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Computed.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Computed.java index d867dd0bb09..ef9b79f1863 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Computed.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Computed.java @@ -15,6 +15,11 @@ */ package com.datastax.oss.driver.api.mapper.annotations; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + /** * Annotates the field or getter of an {@link Entity} property, to indicate that when retrieving * data that the property should be set to the result of computation on the Cassandra side, @@ -27,6 +32,8 @@ * private int writeTime; * */ +@Target({ElementType.FIELD, ElementType.METHOD}) +@Retention(RetentionPolicy.RUNTIME) public @interface Computed { /** diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/CqlName.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/CqlName.java index e90703a6a17..374d0dfe5c1 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/CqlName.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/CqlName.java @@ -56,7 +56,7 @@ * entity. */ @Target({ElementType.TYPE, ElementType.FIELD, ElementType.METHOD, ElementType.PARAMETER}) -@Retention(RetentionPolicy.CLASS) +@Retention(RetentionPolicy.RUNTIME) public @interface CqlName { /** diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Dao.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Dao.java index 876f8a98a99..be23c4eb4bf 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Dao.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Dao.java @@ -55,5 +55,5 @@ * */ @Target(ElementType.TYPE) -@Retention(RetentionPolicy.CLASS) +@Retention(RetentionPolicy.RUNTIME) public @interface Dao {} diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.java index dddcdbcced7..8c886c8c53d 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.java @@ -92,5 +92,5 @@ * Note that the cache is a simple map with no eviction mechanism. */ @Target(ElementType.METHOD) -@Retention(RetentionPolicy.CLASS) +@Retention(RetentionPolicy.RUNTIME) public @interface DaoFactory {} diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/DaoKeyspace.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/DaoKeyspace.java index c6f6f462cce..d0935c45fb5 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/DaoKeyspace.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/DaoKeyspace.java @@ -41,5 +41,5 @@ * @see DaoFactory */ @Target(ElementType.PARAMETER) -@Retention(RetentionPolicy.CLASS) +@Retention(RetentionPolicy.RUNTIME) public @interface DaoKeyspace {} diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/DaoTable.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/DaoTable.java index 71efa4bbf7f..c0e1c882937 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/DaoTable.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/DaoTable.java @@ -41,5 +41,5 @@ * @see DaoFactory */ @Target(ElementType.PARAMETER) -@Retention(RetentionPolicy.CLASS) +@Retention(RetentionPolicy.RUNTIME) public @interface DaoTable {} diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.java index db5aa0b621d..e1f92a761c5 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.java @@ -53,7 +53,7 @@ * Query} and {@link SetEntity} methods. */ @Target(ElementType.TYPE) -@Retention(RetentionPolicy.CLASS) +@Retention(RetentionPolicy.RUNTIME) public @interface DefaultNullSavingStrategy { NullSavingStrategy value(); } diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Delete.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Delete.java index 324aa24eb81..ecdd06a6fd8 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Delete.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Delete.java @@ -122,7 +122,7 @@ * entity class and the naming convention). */ @Target(ElementType.METHOD) -@Retention(RetentionPolicy.CLASS) +@Retention(RetentionPolicy.RUNTIME) public @interface Delete { /** diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Entity.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Entity.java index 3466cf0d610..a8046f33adc 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Entity.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Entity.java @@ -74,7 +74,7 @@ * inside other entities (to map UDT columns). */ @Target(ElementType.TYPE) -@Retention(RetentionPolicy.CLASS) +@Retention(RetentionPolicy.RUNTIME) public @interface Entity { /** * Specifies a default keyspace to use when doing operations on this entity. diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/GetEntity.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/GetEntity.java index 85017ffe03b..ff4d5a8805c 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/GetEntity.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/GetEntity.java @@ -101,5 +101,5 @@ * {@link AsyncResultSet}), the mapper processor will issue a compile-time error. */ @Target(ElementType.METHOD) -@Retention(RetentionPolicy.CLASS) +@Retention(RetentionPolicy.RUNTIME) public @interface GetEntity {} diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Insert.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Insert.java index a43f2b04e45..43caf972c8c 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Insert.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Insert.java @@ -117,7 +117,7 @@ * entity class and the naming convention). */ @Target(ElementType.METHOD) -@Retention(RetentionPolicy.CLASS) +@Retention(RetentionPolicy.RUNTIME) public @interface Insert { /** Whether to append an IF NOT EXISTS clause at the end of the generated INSERT query. */ boolean ifNotExists() default false; diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Mapper.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Mapper.java index 163095c3758..5463639b8f8 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Mapper.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Mapper.java @@ -47,7 +47,7 @@ *

The interface should define one or more {@link DaoFactory} methods. */ @Target(ElementType.TYPE) -@Retention(RetentionPolicy.CLASS) +@Retention(RetentionPolicy.RUNTIME) public @interface Mapper { /** diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/NamingStrategy.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/NamingStrategy.java index fc947f16239..2970e0e3207 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/NamingStrategy.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/NamingStrategy.java @@ -41,7 +41,7 @@ * NamingConvention#SNAKE_CASE_INSENSITIVE}. */ @Target(ElementType.TYPE) -@Retention(RetentionPolicy.CLASS) +@Retention(RetentionPolicy.RUNTIME) public @interface NamingStrategy { /** diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.java index d062efad9cb..1cfb4c31812 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.java @@ -48,7 +48,7 @@ *

This annotation is mutually exclusive with {@link ClusteringColumn}. */ @Target({ElementType.FIELD, ElementType.METHOD}) -@Retention(RetentionPolicy.CLASS) +@Retention(RetentionPolicy.RUNTIME) public @interface PartitionKey { /** diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Query.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Query.java index 5292ba65875..7f4d246aa17 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Query.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Query.java @@ -137,7 +137,7 @@ * */ @Target(ElementType.METHOD) -@Retention(RetentionPolicy.CLASS) +@Retention(RetentionPolicy.RUNTIME) public @interface Query { /** diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.java index 4c6d3951c8e..64bbeef210a 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.java @@ -57,7 +57,7 @@ * @see MapperBuilder#withCustomState(Object, Object) */ @Target(ElementType.METHOD) -@Retention(RetentionPolicy.CLASS) +@Retention(RetentionPolicy.RUNTIME) public @interface QueryProvider { /** diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Select.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Select.java index ef62c55e48d..ea870d407aa 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Select.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Select.java @@ -134,7 +134,7 @@ * entity class and the naming convention). */ @Target(ElementType.METHOD) -@Retention(RetentionPolicy.CLASS) +@Retention(RetentionPolicy.RUNTIME) public @interface Select { /** diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/SetEntity.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/SetEntity.java index 9b4a9d74a08..834c549c3b7 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/SetEntity.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/SetEntity.java @@ -88,7 +88,7 @@ * compile-time warning. */ @Target(ElementType.METHOD) -@Retention(RetentionPolicy.CLASS) +@Retention(RetentionPolicy.RUNTIME) public @interface SetEntity { /** diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/StatementAttributes.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/StatementAttributes.java index d62e55fd007..8190ebfb18e 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/StatementAttributes.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/StatementAttributes.java @@ -33,7 +33,7 @@ * second. */ @Target(ElementType.METHOD) -@Retention(RetentionPolicy.CLASS) +@Retention(RetentionPolicy.RUNTIME) public @interface StatementAttributes { /** * The name of the execution profile to use. diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Transient.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Transient.java index 91836b8f6b1..e88a2f140ab 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Transient.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Transient.java @@ -36,5 +36,5 @@ * ClusteringColumn} annotations. */ @Target({ElementType.FIELD, ElementType.METHOD}) -@Retention(RetentionPolicy.CLASS) +@Retention(RetentionPolicy.RUNTIME) public @interface Transient {} diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/TransientProperties.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/TransientProperties.java index 1206f82e0f3..f1960613c75 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/TransientProperties.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/TransientProperties.java @@ -15,6 +15,11 @@ */ package com.datastax.oss.driver.api.mapper.annotations; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + /** * Annotates an {@link Entity} to indicate which properties should be considered 'transient', * meaning that they should not be mapped to any column (neither during reads nor writes). @@ -39,6 +44,8 @@ * implementing classes will share a common configuration without needing to explicitly annotate * each property with a {@link Transient} annotation. */ +@Target({ElementType.TYPE}) +@Retention(RetentionPolicy.RUNTIME) public @interface TransientProperties { /** Specifies a list of property names that should be considered transient. */ diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Update.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Update.java index 8f816c60bee..f16b87a97a7 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Update.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Update.java @@ -130,7 +130,7 @@ * entity class and the naming convention). */ @Target(ElementType.METHOD) -@Retention(RetentionPolicy.CLASS) +@Retention(RetentionPolicy.RUNTIME) public @interface Update { /** From 37dee5af2dcafa81cded49425fcd8ef1c13ce311 Mon Sep 17 00:00:00 2001 From: olim7t Date: Tue, 30 Jul 2019 15:06:45 -0700 Subject: [PATCH 023/979] Update javadocs of Dao annotation --- .../com/datastax/oss/driver/api/mapper/annotations/Dao.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Dao.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Dao.java index be23c4eb4bf..89a9edd2076 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Dao.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Dao.java @@ -49,9 +49,10 @@ *

  • {@link GetEntity} *
  • {@link Insert} *
  • {@link Query} + *
  • {@link QueryProvider} *
  • {@link Select} *
  • {@link SetEntity} - * + *
  • {@link Update} * */ @Target(ElementType.TYPE) From fd7a7a61b33317e0fcd0865cb389e66eafa2698f Mon Sep 17 00:00:00 2001 From: olim7t Date: Wed, 31 Jul 2019 08:42:46 -0700 Subject: [PATCH 024/979] Add revapi ignores for JAVA-2369 See b66f064ec --- mapper-runtime/revapi.json | 42 +++++++++++++++++++++++++++++++++++++- 1 file changed, 41 insertions(+), 1 deletion(-) diff --git a/mapper-runtime/revapi.json b/mapper-runtime/revapi.json index 0f06ffd3673..a75b7572354 100644 --- a/mapper-runtime/revapi.json +++ b/mapper-runtime/revapi.json @@ -17,6 +17,46 @@ ] } } - } + }, + "ignore": [ + { + "regex": true, + "code": "java.annotation.attributeValueChanged", + "old": "@interface com\.datastax\.oss\.driver\.api\.mapper\.annotations\..*", + "annotationType": "java.lang.annotation.Retention", + "attribute": "value", + "oldValue": "java.lang.annotation.RetentionPolicy.CLASS", + "newValue": "java.lang.annotation.RetentionPolicy.RUNTIME", + "justification": "JAVA-2369: Change mapper annotations retention to runtime" + }, + { + "code": "java.annotation.added", + "old": "@interface com.datastax.oss.driver.api.mapper.annotations.Computed", + "new": "@interface com.datastax.oss.driver.api.mapper.annotations.Computed", + "annotation": "@java.lang.annotation.Retention(java.lang.annotation.RetentionPolicy.RUNTIME)", + "justification": "Oversight, should have been annotated this way from the start" + }, + { + "code": "java.annotation.added", + "old": "@interface com.datastax.oss.driver.api.mapper.annotations.Computed", + "new": "@interface com.datastax.oss.driver.api.mapper.annotations.Computed", + "annotation": "@java.lang.annotation.Target({java.lang.annotation.ElementType.FIELD, java.lang.annotation.ElementType.METHOD})", + "justification": "Oversight, should have been annotated this way from the start" + }, + { + "code": "java.annotation.added", + "old": "@interface com.datastax.oss.driver.api.mapper.annotations.TransientProperties", + "new": "@interface com.datastax.oss.driver.api.mapper.annotations.TransientProperties", + "annotation": "@java.lang.annotation.Retention(java.lang.annotation.RetentionPolicy.RUNTIME)", + "justification": "Oversight, should have been annotated this way from the start" + }, + { + "code": "java.annotation.added", + "old": "@interface com.datastax.oss.driver.api.mapper.annotations.TransientProperties", + "new": "@interface com.datastax.oss.driver.api.mapper.annotations.TransientProperties", + "annotation": "@java.lang.annotation.Target({java.lang.annotation.ElementType.TYPE})", + "justification": "Oversight, should have been annotated this way from the start" + } + ] } } From 0dbf86570ff29ad66e4ea280ba7f0808bb917254 Mon Sep 17 00:00:00 2001 From: olim7t Date: Wed, 31 Jul 2019 08:09:52 -0700 Subject: [PATCH 025/979] Force Trusty distribution in Travis build The current default distribution doesn't have jdk_switcher anymore. --- .travis.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.travis.yml b/.travis.yml index 6ad52b119ae..50c05e76856 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,4 +1,5 @@ language: java +dist: trusty sudo: false # see https://sormuras.github.io/blog/2018-03-20-jdk-matrix.html matrix: From c55ff4c289b2aec2444c82ba2cc7687cd6e770c5 Mon Sep 17 00:00:00 2001 From: Greg Bestland Date: Wed, 12 Jun 2019 12:04:14 -0500 Subject: [PATCH 026/979] JAVA-2236: Add methods to set the auth provider programmatically --- changelog/README.md | 1 + .../core/session/ProgrammaticArguments.java | 21 +++++++- .../api/core/session/SessionBuilder.java | 33 +++++++++++++ .../ProgrammaticPlainTextAuthProvider.java | 46 ++++++++++++++++++ .../core/context/DefaultDriverContext.java | 27 +++++++---- core/src/main/resources/reference.conf | 3 ++ .../core/auth/PlainTextAuthProviderIT.java | 14 ++++++ manual/core/authentication/README.md | 48 +++++++++++++++++-- 8 files changed, 177 insertions(+), 16 deletions(-) create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/auth/ProgrammaticPlainTextAuthProvider.java diff --git a/changelog/README.md b/changelog/README.md index 5a683c22fdf..f89ebf9d14a 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.2.0 (in progress) +- [improvement] JAVA-2236: Add methods to set the auth provider programmatically - [improvement] JAVA-2369: Change mapper annotations retention to runtime - [improvement] JAVA-2365: Redeclare default constants when an enum is abstracted behind an interface diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/session/ProgrammaticArguments.java b/core/src/main/java/com/datastax/oss/driver/api/core/session/ProgrammaticArguments.java index aeda89d3f2f..50f875c4141 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/session/ProgrammaticArguments.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/session/ProgrammaticArguments.java @@ -15,6 +15,7 @@ */ package com.datastax.oss.driver.api.core.session; +import com.datastax.oss.driver.api.core.auth.AuthProvider; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.metadata.NodeStateListener; import com.datastax.oss.driver.api.core.metadata.schema.SchemaChangeListener; @@ -48,6 +49,7 @@ public static Builder builder() { private final Map localDatacenters; private final Map> nodeFilters; private final ClassLoader classLoader; + private final AuthProvider authProvider; private ProgrammaticArguments( @NonNull List> typeCodecs, @@ -56,7 +58,8 @@ private ProgrammaticArguments( @Nullable RequestTracker requestTracker, @NonNull Map localDatacenters, @NonNull Map> nodeFilters, - @Nullable ClassLoader classLoader) { + @Nullable ClassLoader classLoader, + @Nullable AuthProvider authProvider) { this.typeCodecs = typeCodecs; this.nodeStateListener = nodeStateListener; this.schemaChangeListener = schemaChangeListener; @@ -64,6 +67,7 @@ private ProgrammaticArguments( this.localDatacenters = localDatacenters; this.nodeFilters = nodeFilters; this.classLoader = classLoader; + this.authProvider = authProvider; } @NonNull @@ -101,6 +105,11 @@ public ClassLoader getClassLoader() { return classLoader; } + @Nullable + public AuthProvider getAuthProvider() { + return authProvider; + } + public static class Builder { private ImmutableList.Builder> typeCodecsBuilder = ImmutableList.builder(); @@ -111,6 +120,7 @@ public static class Builder { private ImmutableMap.Builder> nodeFiltersBuilder = ImmutableMap.builder(); private ClassLoader classLoader; + private AuthProvider authProvider; @NonNull public Builder addTypeCodecs(@NonNull TypeCodec... typeCodecs) { @@ -172,6 +182,12 @@ public Builder withClassLoader(@Nullable ClassLoader classLoader) { return this; } + @NonNull + public Builder withAuthProvider(@Nullable AuthProvider authProvider) { + this.authProvider = authProvider; + return this; + } + @NonNull public ProgrammaticArguments build() { return new ProgrammaticArguments( @@ -181,7 +197,8 @@ public ProgrammaticArguments build() { requestTracker, localDatacentersBuilder.build(), nodeFiltersBuilder.build(), - classLoader); + classLoader, + authProvider); } } } diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java b/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java index 587f8c4e225..644dbb043d7 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java @@ -17,6 +17,7 @@ import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.auth.AuthProvider; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; @@ -29,6 +30,7 @@ import com.datastax.oss.driver.api.core.tracker.RequestTracker; import com.datastax.oss.driver.api.core.type.codec.TypeCodec; import com.datastax.oss.driver.internal.core.ContactPoints; +import com.datastax.oss.driver.internal.core.auth.ProgrammaticPlainTextAuthProvider; import com.datastax.oss.driver.internal.core.config.typesafe.DefaultDriverConfigLoader; import com.datastax.oss.driver.internal.core.context.DefaultDriverContext; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; @@ -217,6 +219,37 @@ public SelfT withRequestTracker(@Nullable RequestTracker requestTracker) { return self; } + /** + * Register an authentication provider to use with the session. + * + *

    If the provider is specified programmatically with this method, it overrides the + * configuration (that is, the {@code advanced.auth-provider.class} option will be ignored). + */ + @NonNull + public SelfT withAuthProvider(@Nullable AuthProvider authProvider) { + this.programmaticArgumentsBuilder.withAuthProvider(authProvider); + return self; + } + + /** + * Configures the session to use plaintext authentication with the given username and password. + * + *

    This methods calls {@link #withAuthProvider(AuthProvider)} to register a special provider + * implementation. Therefore calling it overrides the configuration (that is, the {@code + * advanced.auth-provider.class} option will be ignored). + * + *

    Note that this approach holds the credentials in clear text in memory, which makes them + * vulnerable to an attacker who is able to perform memory dumps. If this is not acceptable for + * you, consider writing your own {@link AuthProvider} implementation (the internal class {@code + * PlainTextAuthProviderBase} is a good starting point), and providing it either with {@link + * #withAuthProvider(AuthProvider)} or via the configuration ({@code + * advanced.auth-provider.class}). + */ + @NonNull + public SelfT withAuthCredentials(@NonNull String username, @NonNull String password) { + return withAuthProvider(new ProgrammaticPlainTextAuthProvider(username, password)); + } + /** * Specifies the datacenter that is considered "local" by the load balancing policy. * diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/auth/ProgrammaticPlainTextAuthProvider.java b/core/src/main/java/com/datastax/oss/driver/internal/core/auth/ProgrammaticPlainTextAuthProvider.java new file mode 100644 index 00000000000..b6ade1c6c31 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/auth/ProgrammaticPlainTextAuthProvider.java @@ -0,0 +1,46 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.auth; + +import com.datastax.oss.driver.api.core.session.SessionBuilder; +import edu.umd.cs.findbugs.annotations.NonNull; +import net.jcip.annotations.ThreadSafe; + +/** + * Alternative plaintext auth provider that receives the credentials programmatically instead of + * pulling them from the configuration. + * + * @see SessionBuilder#withAuthCredentials(String, String) + */ +@ThreadSafe +public class ProgrammaticPlainTextAuthProvider extends PlainTextAuthProviderBase { + private final String username; + private final String password; + + public ProgrammaticPlainTextAuthProvider(String username, String password) { + // This will typically be built before the session so we don't know the log prefix yet. Pass an + // empty string, it's only used in one log message. + super(""); + this.username = username; + this.password = password; + } + + @NonNull + @Override + protected Credentials getCredentials() { + return new Credentials(username.toCharArray(), password.toCharArray()); + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java index da99c1d9a87..e0636ef9438 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java @@ -126,8 +126,6 @@ public class DefaultDriverContext implements InternalDriverContext { new LazyReference<>("timestampGenerator", this::buildTimestampGenerator, cycleDetector); private final LazyReference addressTranslatorRef = new LazyReference<>("addressTranslator", this::buildAddressTranslator, cycleDetector); - private final LazyReference> authProviderRef = - new LazyReference<>("authProvider", this::buildAuthProvider, cycleDetector); private final LazyReference> sslEngineFactoryRef = new LazyReference<>("sslEngineFactory", this::buildSslEngineFactory, cycleDetector); @@ -185,6 +183,7 @@ public class DefaultDriverContext implements InternalDriverContext { private final LazyReference nodeStateListenerRef; private final LazyReference schemaChangeListenerRef; private final LazyReference requestTrackerRef; + private final LazyReference> authProviderRef; private final DriverConfig config; private final DriverConfigLoader configLoader; @@ -194,6 +193,7 @@ public class DefaultDriverContext implements InternalDriverContext { private final NodeStateListener nodeStateListenerFromBuilder; private final SchemaChangeListener schemaChangeListenerFromBuilder; private final RequestTracker requestTrackerFromBuilder; + private final AuthProvider authProviderFromBuilder; private final Map localDatacentersFromBuilder; private final Map> nodeFiltersFromBuilder; private final ClassLoader classLoader; @@ -226,6 +226,11 @@ public DefaultDriverContext( () -> buildSchemaChangeListener(schemaChangeListenerFromBuilder), cycleDetector); this.requestTrackerFromBuilder = programmaticArguments.getRequestTracker(); + this.authProviderFromBuilder = programmaticArguments.getAuthProvider(); + + this.authProviderRef = + new LazyReference<>( + "authProvider", () -> buildAuthProvider(authProviderFromBuilder), cycleDetector); this.requestTrackerRef = new LazyReference<>( "requestTracker", () -> buildRequestTracker(requestTrackerFromBuilder), cycleDetector); @@ -335,14 +340,6 @@ protected AddressTranslator buildAddressTranslator() { DefaultDriverOption.ADDRESS_TRANSLATOR_CLASS))); } - protected Optional buildAuthProvider() { - return Reflection.buildFromConfig( - this, - DefaultDriverOption.AUTH_PROVIDER_CLASS, - AuthProvider.class, - "com.datastax.oss.driver.internal.core.auth"); - } - protected Optional buildSslEngineFactory() { return Reflection.buildFromConfig( this, @@ -525,6 +522,16 @@ protected RequestTracker buildRequestTracker(RequestTracker requestTrackerFromBu DefaultDriverOption.REQUEST_TRACKER_CLASS))); } + protected Optional buildAuthProvider(AuthProvider authProviderFromBuilder) { + return (authProviderFromBuilder != null) + ? Optional.of(authProviderFromBuilder) + : Reflection.buildFromConfig( + this, + DefaultDriverOption.AUTH_PROVIDER_CLASS, + AuthProvider.class, + "com.datastax.oss.driver.internal.core.auth"); + } + @NonNull @Override public String getSessionName() { diff --git a/core/src/main/resources/reference.conf b/core/src/main/resources/reference.conf index 1252bec02f6..3ee70be5933 100644 --- a/core/src/main/resources/reference.conf +++ b/core/src/main/resources/reference.conf @@ -388,6 +388,9 @@ datastax-java-driver { # # The driver provides a single implementation out of the box: PlainTextAuthProvider, that uses # plain-text credentials. It requires the `username` and `password` options below. + # If storing clear text credentials in the configuration is not acceptable for you, consider + # providing them programmatically with SessionBuilder#withAuthCredentials, or writing your own + # provider implementation. # # You can also specify a custom class that implements AuthProvider and has a public # constructor with a DriverContext argument. diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderIT.java index d25e0b275ff..49db7e5cbf1 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderIT.java @@ -20,6 +20,7 @@ import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; +import com.datastax.oss.driver.api.core.session.SessionBuilder; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.internal.core.auth.PlainTextAuthProvider; @@ -59,6 +60,19 @@ public void should_connect_with_credentials() { } } + @Test + public void should_connect_with_programmatic_credentials() { + + SessionBuilder builder = + SessionUtils.baseBuilder() + .addContactEndPoints(ccm.getContactPoints()) + .withAuthCredentials("cassandra", "cassandra"); + + try (CqlSession session = (CqlSession) builder.build()) { + session.execute("select * from system.local"); + } + } + @Test(expected = AllNodesFailedException.class) public void should_not_connect_with_invalid_credentials() { DriverConfigLoader loader = diff --git a/manual/core/authentication/README.md b/manual/core/authentication/README.md index b79a0a28801..bcb421fc948 100644 --- a/manual/core/authentication/README.md +++ b/manual/core/authentication/README.md @@ -1,7 +1,13 @@ ## Authentication -Cassandra's binary protocol supports SASL-based authentication. To enable it, define an -`auth-provider` section in the [configuration](../configuration/) +Cassandra's binary protocol supports [SASL]-based authentication. To use it, you must provide an +*auth provider* that will authenticate with the server every time a new connection gets established. + +This can be done in two ways: + +### In the configuration + +Define an `auth-provider` section in the [configuration](../configuration/): ``` datastax-java-driver { @@ -15,16 +21,50 @@ datastax-java-driver { Authentication must be configured before opening a session, it cannot be changed at runtime. -[PlainTextAuthProvider] is provided out of the box, for simple username/password authentication +`PlainTextAuthProvider` is provided out of the box, for simple username/password authentication (intended to work with the server-side `PasswordAuthenticator`). The credentials can be changed at runtime, they will be used for new connection attempts once the configuration gets reloaded. You can also write your own provider; it must implement [AuthProvider] and declare a public constructor with a [DriverContext] argument. +``` +datastax-java-driver { + advanced.auth-provider { + class = com.mycompany.MyCustomAuthProvider + ... // any custom options your provider might use + } +} +``` + +### Programmatically + +You can also pass an authenticator instance while building the session: + +```java +CqlSession session = + CqlSession.builder() + .withAuthProvider(new MyCustomAuthProvider()) + .build(); +``` + +For convenience, there is a shortcut that takes the credentials directly. This is equivalent to +using `PlainTextAuthProvider` in the configuration: + +```java +CqlSession session = + CqlSession.builder() + .withAuthCredentials("user", "pass") + .build(); +``` + +One downside of `withAuthCredentials` is that the credentials are stored in clear text in memory; +this means they are vulnerable to an attacker who is able to perform memory dumps. If this is not +acceptable for you, consider writing your own [AuthProvider] implementation (the internal class +`PlainTextAuthProviderBase` is a good starting point). + [SASL]: https://en.wikipedia.org/wiki/Simple_Authentication_and_Security_Layer [AuthProvider]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/auth/AuthProvider.html [DriverContext]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/context/DriverContext.html -[PlainTextAuthProvider]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/internal/core/auth/PlainTextAuthProvider.html From 310cd3365fd7d17d1fbfb8f8d10ab5312950ee81 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 24 Jun 2019 12:59:26 +0200 Subject: [PATCH 027/979] JAVA-2303: Ignore peer rows matching the control host's RPC address --- changelog/README.md | 1 + .../core/metadata/DefaultTopologyMonitor.java | 53 ++++++-- .../metadata/DefaultTopologyMonitorTest.java | 125 ++++++++++++++++-- 3 files changed, 156 insertions(+), 23 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index f89ebf9d14a..b20ef0dcd31 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.2.0 (in progress) +- [improvement] JAVA-2303: Ignore peer rows matching the control host's RPC address - [improvement] JAVA-2236: Add methods to set the auth provider programmatically - [improvement] JAVA-2369: Change mapper annotations retention to runtime - [improvement] JAVA-2365: Redeclare default constants when an enum is abstracted behind an diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultTopologyMonitor.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultTopologyMonitor.java index dc7829ab3e7..5e448185fbd 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultTopologyMonitor.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultTopologyMonitor.java @@ -197,14 +197,18 @@ public CompletionStage> refreshNodeList() { (controlNodeResult, peersResult) -> { List nodeInfos = new ArrayList<>(); AdminRow localRow = controlNodeResult.iterator().next(); - InetSocketAddress localBroadcastRpcAddress = getBroadcastRpcAddress(localRow); + InetSocketAddress localBroadcastRpcAddress = + getBroadcastRpcAddress(localRow, localEndPoint); nodeInfos.add(nodeInfoBuilder(localRow, localBroadcastRpcAddress, localEndPoint).build()); for (AdminRow peerRow : peersResult) { if (isPeerValid(peerRow)) { - InetSocketAddress peerBroadcastRpcAddress = getBroadcastRpcAddress(peerRow); - NodeInfo nodeInfo = - nodeInfoBuilder(peerRow, peerBroadcastRpcAddress, localEndPoint).build(); - nodeInfos.add(nodeInfo); + InetSocketAddress peerBroadcastRpcAddress = + getBroadcastRpcAddress(peerRow, localEndPoint); + if (peerBroadcastRpcAddress != null) { + NodeInfo nodeInfo = + nodeInfoBuilder(peerRow, peerBroadcastRpcAddress, localEndPoint).build(); + nodeInfos.add(nodeInfo); + } } } return nodeInfos; @@ -263,8 +267,10 @@ private Optional firstPeerRowAsNodeInfo(AdminResult result, EndPoint l if (iterator.hasNext()) { AdminRow row = iterator.next(); if (isPeerValid(row)) { - InetSocketAddress peerBroadcastRpcAddress = getBroadcastRpcAddress(row); - return Optional.of(nodeInfoBuilder(row, peerBroadcastRpcAddress, localEndPoint).build()); + return Optional.ofNullable(getBroadcastRpcAddress(row, localEndPoint)) + .map( + broadcastRpcAddress -> + nodeInfoBuilder(row, broadcastRpcAddress, localEndPoint).build()); } } return Optional.empty(); @@ -364,7 +370,7 @@ protected DefaultNodeInfo.Builder nodeInfoBuilder( private Optional findInPeers( AdminResult result, InetSocketAddress broadcastRpcAddressToFind, EndPoint localEndPoint) { for (AdminRow row : result) { - InetSocketAddress broadcastRpcAddress = getBroadcastRpcAddress(row); + InetSocketAddress broadcastRpcAddress = getBroadcastRpcAddress(row, localEndPoint); if (broadcastRpcAddress != null && broadcastRpcAddress.equals(broadcastRpcAddressToFind) && isPeerValid(row)) { @@ -383,8 +389,10 @@ private Optional findInPeers( for (AdminRow row : result) { UUID hostId = row.getUuid("host_id"); if (hostId != null && hostId.equals(hostIdToFind) && isPeerValid(row)) { - InetSocketAddress broadcastRpcAddress = getBroadcastRpcAddress(row); - return Optional.of(nodeInfoBuilder(row, broadcastRpcAddress, localEndPoint).build()); + return Optional.ofNullable(getBroadcastRpcAddress(row, localEndPoint)) + .map( + broadcastRpcAddress -> + nodeInfoBuilder(row, broadcastRpcAddress, localEndPoint).build()); } } LOG.debug("[{}] Could not find any peer row matching {}", logPrefix, hostIdToFind); @@ -407,11 +415,17 @@ private void savePort(DriverChannel channel) { * Determines the broadcast RPC address of the node represented by the given row. * * @param row The row to inspect; can represent either a local (control) node or a peer node. + * @param localEndPoint the control node endpoint that was used to query the node's system tables. + * This is a parameter because it would be racy to call {@code + * controlConnection.channel().getEndPoint()} from within this method, as the control + * connection may have changed its channel since. So this parameter must be provided by the + * caller. * @return the broadcast RPC address of the node, if it could be determined; or {@code null} * otherwise. */ @Nullable - protected InetSocketAddress getBroadcastRpcAddress(@NonNull AdminRow row) { + protected InetSocketAddress getBroadcastRpcAddress( + @NonNull AdminRow row, @NonNull EndPoint localEndPoint) { // in system.peers or system.local InetAddress broadcastRpcInetAddress = row.getInetAddress("rpc_address"); if (broadcastRpcInetAddress == null) { @@ -434,7 +448,22 @@ protected InetSocketAddress getBroadcastRpcAddress(@NonNull AdminRow row) { broadcastRpcPort = port == -1 ? 0 : port; } } - return new InetSocketAddress(broadcastRpcInetAddress, broadcastRpcPort); + InetSocketAddress broadcastRpcAddress = + new InetSocketAddress(broadcastRpcInetAddress, broadcastRpcPort); + if (row.contains("peer") && broadcastRpcAddress.equals(localEndPoint.resolve())) { + // JAVA-2303: if the peer is actually the control node, ignore that peer as it is likely + // a misconfiguration problem. + LOG.warn( + "[{}] Control node {} has an entry for itself in {}: this entry will be ignored. " + + "This is likely due to a misconfiguration; please verify your rpc_address " + + "configuration in cassandra.yaml on all nodes in your cluster.", + logPrefix, + localEndPoint, + retrievePeerTableName()); + return null; + } + + return broadcastRpcAddress; } /** diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/DefaultTopologyMonitorTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/DefaultTopologyMonitorTest.java index 084e74b1f74..1b10a647611 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/DefaultTopologyMonitorTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/DefaultTopologyMonitorTest.java @@ -18,7 +18,9 @@ import static com.datastax.oss.driver.Assertions.assertThat; import static com.datastax.oss.driver.Assertions.assertThatStage; import static org.assertj.core.api.Assertions.fail; +import static org.assertj.core.api.Assertions.filter; import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.atLeast; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.spy; @@ -26,6 +28,10 @@ import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; +import ch.qos.logback.classic.Level; +import ch.qos.logback.classic.Logger; +import ch.qos.logback.classic.spi.ILoggingEvent; +import ch.qos.logback.core.Appender; import com.datastax.oss.driver.api.core.addresstranslation.AddressTranslator; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverConfig; @@ -62,16 +68,19 @@ import java.util.UUID; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionStage; +import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; +import org.mockito.ArgumentCaptor; +import org.mockito.Captor; import org.mockito.Mock; import org.mockito.MockitoAnnotations; +import org.slf4j.LoggerFactory; @RunWith(DataProviderRunner.class) public class DefaultTopologyMonitorTest { - private static final InetSocketAddress ADDRESS1 = new InetSocketAddress("127.0.0.1", 9042); private static final InetSocketAddress ADDRESS2 = new InetSocketAddress("127.0.0.2", 9042); @Mock private InternalDriverContext context; @@ -81,11 +90,17 @@ public class DefaultTopologyMonitorTest { @Mock private DriverChannel channel; @Mock protected MetricsFactory metricsFactory; + @Mock private Appender appender; + @Captor private ArgumentCaptor loggingEventCaptor; + private DefaultNode node1; private DefaultNode node2; private TestTopologyMonitor topologyMonitor; + private Logger logger; + private Level initialLogLevel; + @Before public void setup() { MockitoAnnotations.initMocks(this); @@ -107,6 +122,17 @@ public void setup() { when(context.getControlConnection()).thenReturn(controlConnection); topologyMonitor = new TestTopologyMonitor(context); + + logger = (Logger) LoggerFactory.getLogger(DefaultTopologyMonitor.class); + initialLogLevel = logger.getLevel(); + logger.setLevel(Level.INFO); + logger.addAppender(appender); + } + + @After + public void teardown() { + logger.detachAppender(appender); + logger.setLevel(initialLogLevel); } @Test @@ -239,15 +265,15 @@ public void should_refresh_node_from_peers_if_broadcast_address_is_not_present_V @Test public void should_get_new_node_from_peers() { // Given - AdminRow peer3 = mockPeersRow(3, UUID.randomUUID()); - AdminRow peer2 = mockPeersRow(2, node2.getHostId()); - AdminRow peer1 = mockPeersRow(1, node1.getHostId()); + AdminRow peer3 = mockPeersRow(4, UUID.randomUUID()); + AdminRow peer2 = mockPeersRow(3, node2.getHostId()); + AdminRow peer1 = mockPeersRow(2, node1.getHostId()); topologyMonitor.isSchemaV2 = false; topologyMonitor.stubQueries( new StubbedQuery("SELECT * FROM system.peers", mockResult(peer3, peer2, peer1))); // When - CompletionStage> futureInfo = topologyMonitor.getNewNodeInfo(ADDRESS1); + CompletionStage> futureInfo = topologyMonitor.getNewNodeInfo(ADDRESS2); // Then assertThatStage(futureInfo) @@ -255,7 +281,7 @@ public void should_get_new_node_from_peers() { maybeInfo -> { assertThat(maybeInfo.isPresent()).isTrue(); NodeInfo info = maybeInfo.get(); - assertThat(info.getDatacenter()).isEqualTo("dc1"); + assertThat(info.getDatacenter()).isEqualTo("dc2"); }); // The rpc_address in each row should have been tried, only the last row should have been // converted @@ -272,15 +298,15 @@ public void should_get_new_node_from_peers() { @Test public void should_get_new_node_from_peers_v2() { // Given - AdminRow peer3 = mockPeersV2Row(3, UUID.randomUUID()); - AdminRow peer2 = mockPeersV2Row(2, node2.getHostId()); - AdminRow peer1 = mockPeersV2Row(1, node1.getHostId()); + AdminRow peer3 = mockPeersV2Row(4, UUID.randomUUID()); + AdminRow peer2 = mockPeersV2Row(3, node2.getHostId()); + AdminRow peer1 = mockPeersV2Row(2, node1.getHostId()); topologyMonitor.isSchemaV2 = true; topologyMonitor.stubQueries( new StubbedQuery("SELECT * FROM system.peers_v2", mockResult(peer3, peer2, peer1))); // When - CompletionStage> futureInfo = topologyMonitor.getNewNodeInfo(ADDRESS1); + CompletionStage> futureInfo = topologyMonitor.getNewNodeInfo(ADDRESS2); // Then assertThatStage(futureInfo) @@ -288,7 +314,7 @@ public void should_get_new_node_from_peers_v2() { maybeInfo -> { assertThat(maybeInfo.isPresent()).isTrue(); NodeInfo info = maybeInfo.get(); - assertThat(info.getDatacenter()).isEqualTo("dc1"); + assertThat(info.getDatacenter()).isEqualTo("dc2"); }); // The natove in each row should have been tried, only the last row should have been // converted @@ -358,6 +384,10 @@ public void should_skip_invalid_peers_row(String columnToCheck) { // Then assertThatStage(futureInfo).isSuccess(maybeInfo -> assertThat(maybeInfo).isEmpty()); assertThat(node2.broadcastAddress).isNotNull().isEqualTo(ADDRESS2); + assertLog( + Level.WARN, + "[null] Found invalid row in system.peers for peer: /127.0.0.2. " + + "This is likely a gossip or snitch issue, this node will be ignored."); } @Test @@ -390,6 +420,10 @@ public void should_skip_invalid_peers_row_v2(String columnToCheck) { // Then assertThatStage(futureInfo).isSuccess(maybeInfo -> assertThat(maybeInfo).isEmpty()); assertThat(node2.broadcastAddress).isNotNull().isEqualTo(ADDRESS2); + assertLog( + Level.WARN, + "[null] Found invalid row in system.peers_v2 for peer: /127.0.0.2. " + + "This is likely a gossip or snitch issue, this node will be ignored."); } @DataProvider @@ -415,6 +449,67 @@ public void should_stop_executing_queries_once_closed() { .isFailed(error -> assertThat(error).isInstanceOf(IllegalStateException.class)); } + @Test + public void should_warn_when_control_host_found_in_system_peers() { + // Given + AdminRow local = mockLocalRow(1, node1.getHostId()); + AdminRow peer3 = mockPeersRow(3, UUID.randomUUID()); + AdminRow peer2 = mockPeersRow(2, node2.getHostId()); + AdminRow peer1 = mockPeersRow(1, node2.getHostId()); // invalid + topologyMonitor.stubQueries( + new StubbedQuery("SELECT * FROM system.local", mockResult(local)), + new StubbedQuery("SELECT * FROM system.peers_v2", Collections.emptyMap(), null, true), + new StubbedQuery("SELECT * FROM system.peers", mockResult(peer3, peer2, peer1))); + + // When + CompletionStage> futureInfos = topologyMonitor.refreshNodeList(); + + // Then + assertThatStage(futureInfos) + .isSuccess( + infos -> + assertThat(infos) + .hasSize(3) + .extractingResultOf("getEndPoint") + .containsOnlyOnce(node1.getEndPoint())); + assertLog( + Level.WARN, + "[null] Control node /127.0.0.1:9042 has an entry for itself in system.peers: " + + "this entry will be ignored. This is likely due to a misconfiguration; " + + "please verify your rpc_address configuration in cassandra.yaml on " + + "all nodes in your cluster."); + } + + @Test + public void should_warn_when_control_host_found_in_system_peers_v2() { + // Given + AdminRow local = mockLocalRow(1, node1.getHostId()); + AdminRow peer3 = mockPeersRow(3, UUID.randomUUID()); + AdminRow peer2 = mockPeersRow(2, node2.getHostId()); + AdminRow peer1 = mockPeersRow(1, node2.getHostId()); // invalid + topologyMonitor.stubQueries( + new StubbedQuery("SELECT * FROM system.local", mockResult(local)), + new StubbedQuery("SELECT * FROM system.peers_v2", mockResult(peer3, peer2, peer1))); + + // When + CompletionStage> futureInfos = topologyMonitor.refreshNodeList(); + + // Then + assertThatStage(futureInfos) + .isSuccess( + infos -> + assertThat(infos) + .hasSize(3) + .extractingResultOf("getEndPoint") + .containsOnlyOnce(node1.getEndPoint())); + assertLog( + Level.WARN, + "[null] Control node /127.0.0.1:9042 has an entry for itself in system.peers_v2: " + + "this entry will be ignored. This is likely due to a misconfiguration; " + + "please verify your rpc_address configuration in cassandra.yaml on " + + "all nodes in your cluster."); + } + /** Mocks the query execution logic. */ private static class TestTopologyMonitor extends DefaultTopologyMonitor { @@ -539,4 +634,12 @@ private AdminResult mockResult(AdminRow... rows) { when(result.iterator()).thenReturn(Iterators.forArray(rows)); return result; } + + private void assertLog(Level level, String message) { + verify(appender, atLeast(1)).doAppend(loggingEventCaptor.capture()); + Iterable logs = + filter(loggingEventCaptor.getAllValues()).with("level", level).get(); + assertThat(logs).hasSize(1); + assertThat(logs.iterator().next().getFormattedMessage()).contains(message); + } } From 05a55cc2d1bd697b27eb6651cc58720ca22655b3 Mon Sep 17 00:00:00 2001 From: olim7t Date: Wed, 31 Jul 2019 16:54:57 -0700 Subject: [PATCH 028/979] Rename method in DefaultTopologyMonitor --- .../core/metadata/DefaultTopologyMonitor.java | 19 ++++++++----------- 1 file changed, 8 insertions(+), 11 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultTopologyMonitor.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultTopologyMonitor.java index 5e448185fbd..2b146134532 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultTopologyMonitor.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultTopologyMonitor.java @@ -120,7 +120,7 @@ public CompletionStage> refreshNode(Node node) { query( channel, "SELECT * FROM " - + retrievePeerTableName() + + getPeerTableName() + " WHERE peer = :address and peer_port = :port", ImmutableMap.of( "address", @@ -131,12 +131,12 @@ public CompletionStage> refreshNode(Node node) { query = query( channel, - "SELECT * FROM " + retrievePeerTableName() + " WHERE peer = :address", + "SELECT * FROM " + getPeerTableName() + " WHERE peer = :address", ImmutableMap.of("address", node.getBroadcastAddress().get().getAddress())); } return query.thenApply(result -> firstPeerRowAsNodeInfo(result, localEndPoint)); } else { - return query(channel, "SELECT * FROM " + retrievePeerTableName()) + return query(channel, "SELECT * FROM " + getPeerTableName()) .thenApply(result -> findInPeers(result, node.getHostId(), localEndPoint)); } } @@ -149,7 +149,7 @@ public CompletionStage> getNewNodeInfo(InetSocketAddress broa LOG.debug("[{}] Fetching info for new node {}", logPrefix, broadcastRpcAddress); DriverChannel channel = controlConnection.channel(); EndPoint localEndPoint = channel.getEndPoint(); - return query(channel, "SELECT * FROM " + retrievePeerTableName()) + return query(channel, "SELECT * FROM " + getPeerTableName()) .thenApply(result -> findInPeers(result, broadcastRpcAddress, localEndPoint)); } @@ -255,11 +255,8 @@ private CompletionStage query(DriverChannel channel, String querySt return query(channel, queryString, Collections.emptyMap()); } - private String retrievePeerTableName() { - if (isSchemaV2) { - return "system.peers_v2"; - } - return "system.peers"; + private String getPeerTableName() { + return isSchemaV2 ? "system.peers_v2" : "system.peers"; } private Optional firstPeerRowAsNodeInfo(AdminResult result, EndPoint localEndPoint) { @@ -459,7 +456,7 @@ protected InetSocketAddress getBroadcastRpcAddress( + "configuration in cassandra.yaml on all nodes in your cluster.", logPrefix, localEndPoint, - retrievePeerTableName()); + getPeerTableName()); return null; } @@ -485,7 +482,7 @@ protected boolean isPeerValid(AdminRow peerRow) { "[{}] Found invalid row in {} for peer: {}. " + "This is likely a gossip or snitch issue, this node will be ignored.", logPrefix, - retrievePeerTableName(), + getPeerTableName(), peerRow.getInetAddress("peer")); } return valid; From 7404f4956f2b4fdfab666671dc16ab46e7927b30 Mon Sep 17 00:00:00 2001 From: olim7t Date: Wed, 31 Jul 2019 16:45:13 -0700 Subject: [PATCH 029/979] Suppress atomic update warnings in IntelliJ IDEA --- .../oss/driver/internal/core/channel/InFlightHandler.java | 2 +- .../oss/driver/internal/core/channel/StreamIdGenerator.java | 5 +++-- .../oss/driver/internal/core/metadata/NodeStateManager.java | 2 +- .../internal/core/session/throttling/SettableNanoClock.java | 2 +- 4 files changed, 6 insertions(+), 5 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/InFlightHandler.java b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/InFlightHandler.java index d6d69306871..df34cdef58f 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/InFlightHandler.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/InFlightHandler.java @@ -227,7 +227,7 @@ public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception wasInFlight = false; callback = orphaned.get(streamId); if (callback == null) { - LOG.trace("[{}] Got response on unknown stream id {}, skipping", streamId); + LOG.trace("[{}] Got response on unknown stream id {}, skipping", logPrefix, streamId); return; } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/StreamIdGenerator.java b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/StreamIdGenerator.java index 77e985064b4..756112c8f77 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/StreamIdGenerator.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/StreamIdGenerator.java @@ -39,7 +39,8 @@ class StreamIdGenerator { this.availableIds = this.maxAvailableIds; } - @SuppressWarnings("NonAtomicVolatileUpdate") // see explanation in class Javadoc + // safe because a given instance is always called from the same I/O thread + @SuppressWarnings({"NonAtomicVolatileUpdate", "NonAtomicOperationOnVolatileField"}) int acquire() { int id = ids.nextClearBit(0); if (id >= maxAvailableIds) { @@ -50,7 +51,7 @@ int acquire() { return id; } - @SuppressWarnings("NonAtomicVolatileUpdate") + @SuppressWarnings({"NonAtomicVolatileUpdate", "NonAtomicOperationOnVolatileField"}) void release(int id) { if (ids.get(id)) { availableIds++; diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/NodeStateManager.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/NodeStateManager.java index b2f264f30f9..1df418ca8de 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/NodeStateManager.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/NodeStateManager.java @@ -124,7 +124,7 @@ private void markInitialized() { } // Updates to DefaultNode's volatile fields are confined to the admin thread - @SuppressWarnings("NonAtomicVolatileUpdate") + @SuppressWarnings({"NonAtomicVolatileUpdate", "NonAtomicOperationOnVolatileField"}) private void onChannelEvent(ChannelEvent event) { assert adminExecutor.inEventLoop(); if (closeWasCalled) { diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/session/throttling/SettableNanoClock.java b/core/src/test/java/com/datastax/oss/driver/internal/core/session/throttling/SettableNanoClock.java index b12fbf35582..51cbd666f51 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/session/throttling/SettableNanoClock.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/session/throttling/SettableNanoClock.java @@ -25,7 +25,7 @@ public long nanoTime() { } // This is racy, but in our tests it's never read concurrently - @SuppressWarnings("NonAtomicVolatileUpdate") + @SuppressWarnings({"NonAtomicVolatileUpdate", "NonAtomicOperationOnVolatileField"}) void add(long increment) { nanoTime += increment; } From 4c2d26d18af94c8a700f2611f00b1f3b2a2d0656 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 5 Jul 2019 17:13:50 +0200 Subject: [PATCH 030/979] JAVA-2323: Handle restart of a node with same host_id but a different address --- changelog/README.md | 1 + .../oss/driver/api/core/metadata/Node.java | 6 +++ .../core/metadata/AddNodeRefresh.java | 20 ++++++++-- .../internal/core/metadata/DefaultNode.java | 38 ++++++++----------- .../core/metadata/FullNodeListRefresh.java | 2 +- .../core/metadata/InitialNodeListRefresh.java | 4 +- .../core/metadata/MetadataManager.java | 2 +- .../internal/core/metadata/NodesRefresh.java | 10 ++++- .../core/metadata/AddNodeRefreshTest.java | 37 ++++++++++++++++-- .../metadata/FullNodeListRefreshTest.java | 17 +++++---- .../core/metadata/MetadataManagerTest.java | 1 + .../driver/api/core/metadata/NodeStateIT.java | 8 +++- 12 files changed, 102 insertions(+), 44 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index b20ef0dcd31..624822afa67 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.2.0 (in progress) +- [bug] JAVA-2323: Handle restart of a node with same host_id but a different address - [improvement] JAVA-2303: Ignore peer rows matching the control host's RPC address - [improvement] JAVA-2236: Add methods to set the auth provider programmatically - [improvement] JAVA-2369: Change mapper annotations retention to runtime diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/metadata/Node.java b/core/src/main/java/com/datastax/oss/driver/api/core/metadata/Node.java index 2f5d11c4071..69959473d83 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/metadata/Node.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/metadata/Node.java @@ -18,6 +18,7 @@ import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.api.core.loadbalancing.LoadBalancingPolicy; import com.datastax.oss.driver.api.core.loadbalancing.NodeDistance; +import com.datastax.oss.driver.api.core.session.Session; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import java.net.InetSocketAddress; @@ -30,6 +31,11 @@ * *

    This object is mutable, all of its properties may be updated at runtime to reflect the latest * state of the node. + * + *

    Note that the default implementation returned by the driver uses reference equality. A + * {@link Session} will always return the same instance for a given {@link #getHostId() host id}. + * However, instances coming from different sessions will not be equal, even if they refer to the + * same host id. */ public interface Node { diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/AddNodeRefresh.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/AddNodeRefresh.java index 088d5d0ea68..19868e8dc04 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/AddNodeRefresh.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/AddNodeRefresh.java @@ -37,11 +37,10 @@ public class AddNodeRefresh extends NodesRefresh { public Result compute( DefaultMetadata oldMetadata, boolean tokenMapEnabled, InternalDriverContext context) { Map oldNodes = oldMetadata.getNodes(); - if (oldNodes.containsKey(newNodeInfo.getHostId())) { - return new Result(oldMetadata); - } else { + Node existing = oldNodes.get(newNodeInfo.getHostId()); + if (existing == null) { DefaultNode newNode = new DefaultNode(newNodeInfo.getEndPoint(), context); - copyInfos(newNodeInfo, newNode, null, context.getSessionName()); + copyInfos(newNodeInfo, newNode, null, context); Map newNodes = ImmutableMap.builder() .putAll(oldNodes) @@ -50,6 +49,19 @@ public Result compute( return new Result( oldMetadata.withNodes(newNodes, tokenMapEnabled, false, null, context), ImmutableList.of(NodeStateEvent.added(newNode))); + } else { + // If a node is restarted after changing its broadcast RPC address, Cassandra considers that + // an addition, even though the host_id hasn't changed :( + // Update the existing instance and emit an UP event to trigger a pool reconnection. + if (!existing.getEndPoint().equals(newNodeInfo.getEndPoint())) { + copyInfos(newNodeInfo, ((DefaultNode) existing), null, context); + assert newNodeInfo.getBroadcastRpcAddress().isPresent(); // always for peer nodes + return new Result( + oldMetadata, + ImmutableList.of(TopologyEvent.suggestUp(newNodeInfo.getBroadcastRpcAddress().get()))); + } else { + return new Result(oldMetadata); + } } } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultNode.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultNode.java index a4858fcc03f..a9f80561438 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultNode.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultNode.java @@ -22,6 +22,7 @@ import com.datastax.oss.driver.api.core.metadata.NodeState; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.internal.core.metrics.NodeMetricUpdater; +import com.datastax.oss.driver.internal.core.metrics.NoopNodeMetricUpdater; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import java.net.InetSocketAddress; @@ -39,8 +40,8 @@ @ThreadSafe public class DefaultNode implements Node { - private final EndPoint endPoint; - private final NodeMetricUpdater metricUpdater; + private volatile EndPoint endPoint; + private volatile NodeMetricUpdater metricUpdater; volatile InetSocketAddress broadcastRpcAddress; volatile InetSocketAddress broadcastAddress; @@ -80,6 +81,18 @@ public EndPoint getEndPoint() { return endPoint; } + public void setEndPoint(@NonNull EndPoint newEndPoint, @NonNull InternalDriverContext context) { + if (!newEndPoint.equals(endPoint)) { + endPoint = newEndPoint; + + // The endpoint is also used to build metric names, so make sure they get updated + NodeMetricUpdater previousMetricUpdater = metricUpdater; + if (!(previousMetricUpdater instanceof NoopNodeMetricUpdater)) { + metricUpdater = context.getMetricsFactory().newNodeUpdater(this); + } + } + } + @NonNull @Override public Optional getBroadcastRpcAddress() { @@ -165,28 +178,9 @@ public NodeMetricUpdater getMetricUpdater() { return metricUpdater; } - @Override - public boolean equals(Object other) { - if (other == this) { - return true; - } else if (other instanceof Node) { - Node that = (Node) other; - // hostId is the natural identifier, but unfortunately we don't know it for contact points - // until the driver has opened the first connection. - return this.endPoint.equals(that.getEndPoint()); - } else { - return false; - } - } - - @Override - public int hashCode() { - return endPoint.hashCode(); - } - @Override public String toString() { - return endPoint.toString(); + return String.format("%s(%s,%s)", super.toString(), hostId, endPoint); } /** Note: deliberately not exposed by the public interface. */ diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/FullNodeListRefresh.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/FullNodeListRefresh.java index 7b6aeae48e2..665d9b264d9 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/FullNodeListRefresh.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/FullNodeListRefresh.java @@ -72,7 +72,7 @@ public Result compute( if (tokenFactory == null && nodeInfo.getPartitioner() != null) { tokenFactory = tokenFactoryRegistry.tokenFactoryFor(nodeInfo.getPartitioner()); } - tokensChanged |= copyInfos(nodeInfo, node, tokenFactory, logPrefix); + tokensChanged |= copyInfos(nodeInfo, node, tokenFactory, context); } Set removed = Sets.difference(oldNodes.keySet(), seen); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/InitialNodeListRefresh.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/InitialNodeListRefresh.java index 96ed3b0d19e..92262b72a47 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/InitialNodeListRefresh.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/InitialNodeListRefresh.java @@ -74,7 +74,7 @@ public Result compute( if (tokenFactory == null && nodeInfo.getPartitioner() != null) { tokenFactory = tokenFactoryRegistry.tokenFactoryFor(nodeInfo.getPartitioner()); } - tokensChanged |= copyInfos(nodeInfo, node, tokenFactory, logPrefix); + tokensChanged |= copyInfos(nodeInfo, node, tokenFactory, context); newNodesBuilder.put(node.getHostId(), node); } @@ -82,7 +82,7 @@ public Result compute( ImmutableList.Builder eventsBuilder = ImmutableList.builder(); for (DefaultNode newNode : newNodes.values()) { - if (!contactPoints.contains(newNode)) { + if (findIn(contactPoints, newNode.getEndPoint()) == null) { eventsBuilder.add(NodeStateEvent.added(newNode)); } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/MetadataManager.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/MetadataManager.java index 2c4dcfa7f3e..d5acdbb6702 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/MetadataManager.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/MetadataManager.java @@ -163,7 +163,7 @@ public CompletionStage refreshNode(Node node) { maybeInfo -> { if (maybeInfo.isPresent()) { boolean tokensChanged = - NodesRefresh.copyInfos(maybeInfo.get(), (DefaultNode) node, null, logPrefix); + NodesRefresh.copyInfos(maybeInfo.get(), (DefaultNode) node, null, context); if (tokensChanged) { apply(new TokensChangedRefresh()); } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/NodesRefresh.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/NodesRefresh.java index 664db194dcd..015ad1ad070 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/NodesRefresh.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/NodesRefresh.java @@ -16,6 +16,7 @@ package com.datastax.oss.driver.internal.core.metadata; import com.datastax.oss.driver.api.core.Version; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.internal.core.metadata.token.TokenFactory; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import java.util.Collections; @@ -34,7 +35,12 @@ abstract class NodesRefresh implements MetadataRefresh { * mutate the tokens in-place, so there is no way to check this after the fact). */ protected static boolean copyInfos( - NodeInfo nodeInfo, DefaultNode node, TokenFactory tokenFactory, String logPrefix) { + NodeInfo nodeInfo, + DefaultNode node, + TokenFactory tokenFactory, + InternalDriverContext context) { + + node.setEndPoint(nodeInfo.getEndPoint(), context); node.broadcastRpcAddress = nodeInfo.getBroadcastRpcAddress().orElse(null); node.broadcastAddress = nodeInfo.getBroadcastAddress().orElse(null); node.listenAddress = nodeInfo.getListenAddress().orElse(null); @@ -48,7 +54,7 @@ protected static boolean copyInfos( } catch (IllegalArgumentException e) { LOG.warn( "[{}] Error converting Cassandra version '{}' for {}", - logPrefix, + context.getSessionName(), versionString, node.getEndPoint()); } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/AddNodeRefreshTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/AddNodeRefreshTest.java index 52d509ada88..c5186d527f4 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/AddNodeRefreshTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/AddNodeRefreshTest.java @@ -35,8 +35,6 @@ @RunWith(MockitoJUnitRunner.class) public class AddNodeRefreshTest { - private static final InetSocketAddress ADDRESS1 = new InetSocketAddress("127.0.0.1", 9042); - private static final InetSocketAddress ADDRESS2 = new InetSocketAddress("127.0.0.2", 9042); @Mock private InternalDriverContext context; @Mock protected MetricsFactory metricsFactory; @@ -84,7 +82,7 @@ public void should_add_new_node() { } @Test - public void should_not_add_existing_node() { + public void should_not_add_existing_node_with_same_id_and_endpoint() { // Given DefaultMetadata oldMetadata = new DefaultMetadata( @@ -108,4 +106,37 @@ public void should_not_add_existing_node() { assertThat(node1.getRack()).isNull(); assertThat(result.events).isEmpty(); } + + @Test + public void should_add_existing_node_with_same_id_but_different_endpoint() { + // Given + DefaultMetadata oldMetadata = + new DefaultMetadata( + ImmutableMap.of(node1.getHostId(), node1), Collections.emptyMap(), null); + DefaultEndPoint newEndPoint = TestNodeFactory.newEndPoint(2); + InetSocketAddress newBroadcastRpcAddress = newEndPoint.resolve(); + UUID newSchemaVersion = Uuids.random(); + DefaultNodeInfo newNodeInfo = + DefaultNodeInfo.builder() + .withHostId(node1.getHostId()) + .withEndPoint(newEndPoint) + .withDatacenter("dc1") + .withRack("rack2") + .withSchemaVersion(newSchemaVersion) + .withBroadcastRpcAddress(newBroadcastRpcAddress) + .build(); + AddNodeRefresh refresh = new AddNodeRefresh(newNodeInfo); + + // When + MetadataRefresh.Result result = refresh.compute(oldMetadata, false, context); + + // Then + Map newNodes = result.newMetadata.getNodes(); + assertThat(newNodes).hasSize(1).containsEntry(node1.getHostId(), node1); + assertThat(node1.getEndPoint()).isEqualTo(newEndPoint); + assertThat(node1.getDatacenter()).isEqualTo("dc1"); + assertThat(node1.getRack()).isEqualTo("rack2"); + assertThat(node1.getSchemaVersion()).isEqualTo(newSchemaVersion); + assertThat(result.events).containsExactly(TopologyEvent.suggestUp(newBroadcastRpcAddress)); + } } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/FullNodeListRefreshTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/FullNodeListRefreshTest.java index 1d7b0b0d02f..bdf064ed528 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/FullNodeListRefreshTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/FullNodeListRefreshTest.java @@ -18,6 +18,7 @@ import static com.datastax.oss.driver.Assertions.assertThat; import static org.mockito.Mockito.when; +import com.datastax.oss.driver.api.core.metadata.EndPoint; import com.datastax.oss.driver.api.core.uuid.Uuids; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.internal.core.metrics.MetricsFactory; @@ -39,7 +40,8 @@ public class FullNodeListRefreshTest { private DefaultNode node1; private DefaultNode node2; - private DefaultNode node3; + private EndPoint endPoint3; + private UUID hostId3; @Before public void setup() { @@ -47,7 +49,9 @@ public void setup() { node1 = TestNodeFactory.newNode(1, context); node2 = TestNodeFactory.newNode(2, context); - node3 = TestNodeFactory.newNode(3, context); + + endPoint3 = TestNodeFactory.newEndPoint(3); + hostId3 = UUID.randomUUID(); } @Test @@ -64,18 +68,15 @@ public void should_add_and_remove_nodes() { .withEndPoint(node2.getEndPoint()) .withHostId(node2.getHostId()) .build(), - DefaultNodeInfo.builder() - .withEndPoint(node3.getEndPoint()) - .withHostId(node3.getHostId()) - .build()); + DefaultNodeInfo.builder().withEndPoint(endPoint3).withHostId(hostId3).build()); FullNodeListRefresh refresh = new FullNodeListRefresh(newInfos); // When MetadataRefresh.Result result = refresh.compute(oldMetadata, false, context); // Then - assertThat(result.newMetadata.getNodes()) - .containsOnlyKeys(node2.getHostId(), node3.getHostId()); + assertThat(result.newMetadata.getNodes()).containsOnlyKeys(node2.getHostId(), hostId3); + DefaultNode node3 = (DefaultNode) result.newMetadata.getNodes().get(hostId3); assertThat(result.events) .containsOnly(NodeStateEvent.removed(node1), NodeStateEvent.added(node3)); } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/MetadataManagerTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/MetadataManagerTest.java index 9e449fff95d..ca02ec2260b 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/MetadataManagerTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/MetadataManagerTest.java @@ -189,6 +189,7 @@ public void should_refresh_single_node() { NodeInfo info = mock(NodeInfo.class); when(info.getDatacenter()).thenReturn("dc1"); when(info.getHostId()).thenReturn(UUID.randomUUID()); + when(info.getEndPoint()).thenReturn(node.getEndPoint()); when(topologyMonitor.refreshNode(node)) .thenReturn(CompletableFuture.completedFuture(Optional.of(info))); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/metadata/NodeStateIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/api/core/metadata/NodeStateIT.java index 7032f392853..0b41ece3ff6 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/metadata/NodeStateIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/api/core/metadata/NodeStateIT.java @@ -617,7 +617,13 @@ private void expect(NodeStateEvent... expectedEvents) { for (NodeStateEvent expected : expectedEvents) { try { NodeStateEvent actual = stateEvents.poll(10, TimeUnit.SECONDS); - assertThat(actual).isEqualTo(expected); + assertThat(actual).isNotNull(); + + // Don't compare events directly: some tests call this method with nodes obtained from + // another session instance, and nodes are compared by reference. + assertThat(actual.oldState).isEqualTo(expected.oldState); + assertThat(actual.newState).isEqualTo(expected.newState); + assertThat(actual.node.getHostId()).isEqualTo(expected.node.getHostId()); } catch (InterruptedException e) { fail("Interrupted while waiting for event"); } From b9709a15e3e2e05a84348bd9480f80c820ca1cfc Mon Sep 17 00:00:00 2001 From: olim7t Date: Thu, 1 Aug 2019 18:07:12 -0700 Subject: [PATCH 031/979] JAVA-2372: Add compatibility section to the root README --- README.md | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/README.md b/README.md index 10ec520887e..380007e4356 100644 --- a/README.md +++ b/README.md @@ -48,6 +48,20 @@ builder](manual/query_builder/), [mapper](manual/mapper)). [com.datastax.oss]: http://search.maven.org/#search%7Cga%7C1%7Cg%3A%22com.datastax.oss%22 +## Compatibility + +The driver is compatible with Apache Cassandra® 2.1 and higher, and DataStax Enterprise 4.7 and +higher. + +It requires Java 8 or higher. + +If using DataStax Enterprise, the [DataStax Enterprise Java +driver](http://docs.datastax.com/en/developer/java-driver-dse/latest) provides more features and +better compatibility. + +Disclaimer: Some DataStax/DataStax Enterprise products might partially work on big-endian systems, +but DataStax does not officially support these systems. + ## Migrating from previous versions Java driver 4 is **not binary compatible** with previous versions. However, most of the concepts From 017a4dff1b00d9f6e465814a3366e4215982df31 Mon Sep 17 00:00:00 2001 From: Nafer Sanabria Date: Tue, 9 Jul 2019 12:40:50 -0500 Subject: [PATCH 032/979] Bump Typesafe config to 1.3.4 --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index b3971df48af..31ee2e4f7dc 100644 --- a/pom.xml +++ b/pom.xml @@ -47,7 +47,7 @@ true UTF-8 UTF-8 - 1.3.3 + 1.3.4 25.1-jre 2.1.11 4.0.5 From d95f9654599970357f35dafe23b61e9ff0106341 Mon Sep 17 00:00:00 2001 From: olim7t Date: Fri, 2 Aug 2019 12:00:31 -0700 Subject: [PATCH 033/979] Update dependency versions --- core-shaded/pom.xml | 1 + pom.xml | 23 +++++++++++++---------- 2 files changed, 14 insertions(+), 10 deletions(-) diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 5e76d2018b0..b22863c0156 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -264,6 +264,7 @@ !org.jboss.marshalling.*, !sun.misc.*, !sun.security.*, + !com.oracle.svm.core.annotate.*, * - 1.1.7.2 - 1.5.1 + 1.1.7.3 + 1.6.0 - 3.12.1 + 3.13.1 1.3 4.12 1.2.3 4.12.0 - 0.8.8 + 0.8.9 1.0 2.28 2.5.0 @@ -114,7 +114,7 @@ com.github.jnr jnr-ffi - 2.1.9 + 2.1.10 org.xerial.snappy @@ -129,7 +129,7 @@ com.github.jnr jnr-posix - 3.0.49 + 3.0.50 io.dropwizard.metrics @@ -179,7 +179,7 @@ org.mockito mockito-core - 2.25.0 + 2.28.2 com.datastax.oss.simulacron @@ -269,7 +269,7 @@ com.google.testing.compile compile-testing - 0.15 + 0.18 @@ -379,7 +379,7 @@ org.revapi revapi-java - 0.18.2 + 0.19.1 @@ -400,6 +400,9 @@ 1.8 -Xep:FutureReturnValueIgnored:OFF + -Xep:MockitoInternalUsage:OFF + -XepExcludedPaths:.*/target/generated-sources/.* + -XepExcludedPaths:.*/target/generated-test-sources/.* true true From ba5bf9a9b42e1126ce4b0ceb75bde3603f4f48bb Mon Sep 17 00:00:00 2001 From: olim7t Date: Fri, 2 Aug 2019 12:01:13 -0700 Subject: [PATCH 034/979] Update ErrorProne version and fix new warnings --- .../oss/driver/api/core/PagingIterable.java | 1 + .../oss/driver/api/core/data/CqlDuration.java | 2 +- .../oss/driver/api/core/uuid/Uuids.java | 1 + .../core/AsyncPagingIterableWrapper.java | 2 +- .../CassandraProtocolVersionRegistry.java | 2 +- .../internal/core/PagingIterableWrapper.java | 2 +- .../internal/core/cql/CqlRequestHandler.java | 6 +- .../core/cql/DefaultColumnDefinitions.java | 3 +- .../driver/internal/core/cql/DefaultRow.java | 3 +- .../internal/core/cql/QueryTraceFetcher.java | 2 +- .../driver/internal/core/cql/ResultSets.java | 2 +- .../internal/core/data/DefaultTupleValue.java | 3 +- .../internal/core/data/DefaultUdtValue.java | 3 +- .../internal/core/data/IdentifierIndex.java | 2 +- .../schema/ShallowUserDefinedType.java | 4 +- .../metadata/schema/parsing/RawColumn.java | 12 +- .../schema/parsing/SimpleJsonParser.java | 2 + .../metadata/schema/parsing/TableParser.java | 10 +- .../metadata/schema/parsing/ViewParser.java | 4 +- .../core/metadata/token/TokenRangeBase.java | 4 +- .../oss/driver/internal/core/os/Native.java | 2 + .../codec/registry/CachingCodecRegistry.java | 4 +- .../core/util/concurrent/RunOrSchedule.java | 4 +- .../core/channel/DriverChannelTest.java | 2 +- .../core/channel/EmbeddedEndPoint.java | 7 - .../core/cql/CqlRequestHandlerTest.java | 4 +- .../cql/PagingIterableSpliteratorTest.java | 5 +- .../core/cql/RequestHandlerTestHarness.java | 5 - .../internal/core/cql/StatementSizeTest.java | 2 +- .../core/data/AccessibleByIdTestBase.java | 4 +- .../core/session/DefaultSessionPoolsTest.java | 1 - .../type/codec/ZonedTimestampCodecTest.java | 2 +- .../basic/CreateAndPopulateKeyspace.java | 1 + .../concurrent/LimitConcurrencyCustom.java | 1 + .../oss/driver/examples/datatypes/Blobs.java | 1 + .../examples/paging/RandomPagingRestUi.java | 2 +- .../driver/api/core/cql/AsyncResultSetIT.java | 5 +- .../api/core/cql/SimpleStatementIT.java | 7 +- .../api/core/tracker/RequestLoggerIT.java | 3 +- .../type/codec/registry/CodecRegistryIT.java | 4 +- .../driver/api/querybuilder/JsonInsertIT.java | 14 +- .../oss/driver/mapper/DefaultKeyspaceIT.java | 48 ++++-- .../driver/mapper/EntityPolymorphismIT.java | 154 +++++++++++------- .../oss/driver/mapper/InventoryITBase.java | 80 +++++---- .../oss/driver/mapper/NestedUdtIT.java | 7 - .../driver/mapper/NullSavingStrategyIT.java | 17 +- .../processor/DefaultProcessorContext.java | 1 - .../processor/dao/DaoMethodGenerator.java | 2 +- .../dao/DaoQueryProviderMethodGenerator.java | 1 + ...lperDeleteByPrimaryKeyMethodGenerator.java | 10 -- ...eleteByPrimaryKeyPartsMethodGenerator.java | 6 +- ...ntityHelperDeleteStartMethodGenerator.java | 10 -- .../entity/EntityHelperGenerator.java | 22 +-- .../EntityHelperGetMethodGenerator.java | 5 +- .../EntityHelperInsertMethodGenerator.java | 6 +- ...lperSelectByPrimaryKeyMethodGenerator.java | 10 -- ...electByPrimaryKeyPartsMethodGenerator.java | 10 -- ...ntityHelperSelectStartMethodGenerator.java | 6 +- .../EntityHelperSetMethodGenerator.java | 7 +- ...lperUpdateByPrimaryKeyMethodGenerator.java | 6 +- ...ntityHelperUpdateStartMethodGenerator.java | 6 +- .../processor/util/HierarchyScanner.java | 10 +- .../processor/util/HierarchyScannerTest.java | 2 + .../oss/driver/internal/mapper/DaoBase.java | 2 +- pom.xml | 4 +- 65 files changed, 280 insertions(+), 302 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/PagingIterable.java b/core/src/main/java/com/datastax/oss/driver/api/core/PagingIterable.java index f84c74c8b6a..d2fca141005 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/PagingIterable.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/PagingIterable.java @@ -102,6 +102,7 @@ default ElementT one() { * reasonable number of results. */ @NonNull + @SuppressWarnings("MixedMutabilityReturnType") default List all() { if (!iterator().hasNext()) { return Collections.emptyList(); diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/data/CqlDuration.java b/core/src/main/java/com/datastax/oss/driver/api/core/data/CqlDuration.java index 1db7e1d8d4f..c41e02e56e1 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/data/CqlDuration.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/data/CqlDuration.java @@ -81,7 +81,7 @@ public final class CqlDuration implements TemporalAmount { private CqlDuration(int months, int days, long nanoseconds) { // Makes sure that all the values are negative if one of them is if ((months < 0 || days < 0 || nanoseconds < 0) - && ((months > 0 || days > 0 || nanoseconds > 0))) { + && (months > 0 || days > 0 || nanoseconds > 0)) { throw new IllegalArgumentException( String.format( "All values must be either negative or positive, got %d months, %d days, %d nanoseconds", diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/uuid/Uuids.java b/core/src/main/java/com/datastax/oss/driver/api/core/uuid/Uuids.java index 7e82b4c685f..0356f7e52f1 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/uuid/Uuids.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/uuid/Uuids.java @@ -165,6 +165,7 @@ private static String getProcessPiece() { } if (pid == null) { try { + @SuppressWarnings("StringSplitter") String pidJmx = ManagementFactory.getRuntimeMXBean().getName().split("@")[0]; pid = Integer.parseInt(pidJmx); LOG.info("PID obtained through JMX: {}", pid); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/AsyncPagingIterableWrapper.java b/core/src/main/java/com/datastax/oss/driver/internal/core/AsyncPagingIterableWrapper.java index 77490e57416..fb69bbcfccf 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/AsyncPagingIterableWrapper.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/AsyncPagingIterableWrapper.java @@ -44,7 +44,7 @@ public AsyncPagingIterableWrapper( new AbstractIterator() { @Override protected TargetT computeNext() { - return (sourceIterator.hasNext()) + return sourceIterator.hasNext() ? elementMapper.apply(sourceIterator.next()) : endOfData(); } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/CassandraProtocolVersionRegistry.java b/core/src/main/java/com/datastax/oss/driver/internal/core/CassandraProtocolVersionRegistry.java index f76f2d9b0fa..d84b13cb72d 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/CassandraProtocolVersionRegistry.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/CassandraProtocolVersionRegistry.java @@ -101,7 +101,7 @@ public Optional downgrade(ProtocolVersion version) { } else { ProtocolVersion previousVersion = previousEntry.getValue(); // Beta versions are skipped during negotiation - return (previousVersion.isBeta()) ? downgrade(previousVersion) : Optional.of(previousVersion); + return previousVersion.isBeta() ? downgrade(previousVersion) : Optional.of(previousVersion); } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/PagingIterableWrapper.java b/core/src/main/java/com/datastax/oss/driver/internal/core/PagingIterableWrapper.java index ab8989ebbd3..55d12bb10ee 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/PagingIterableWrapper.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/PagingIterableWrapper.java @@ -65,7 +65,7 @@ public PagingIterableWrapper( new AbstractIterator() { @Override protected TargetT computeNext() { - return (sourceIterator.hasNext()) + return sourceIterator.hasNext() ? elementMapper.apply(sourceIterator.next()) : endOfData(); } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java index f01206e29a9..10cafc7b4bc 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java @@ -231,7 +231,7 @@ private Timeout scheduleTimeout(Duration timeoutDuration) { // If we raced with session shutdown the timer might be closed already, rethrow with a more // explicit message result.completeExceptionally( - ("cannot be started once stopped".equals(e.getMessage())) + "cannot be started once stopped".equals(e.getMessage()) ? new IllegalStateException("Session is closed") : e); } @@ -616,8 +616,8 @@ public void onResponse(Frame responseFrame) { }), (schemaInAgreement, metadata) -> schemaInAgreement) .whenComplete( - ((schemaInAgreement, error) -> - setFinalResult(schemaChange, responseFrame, schemaInAgreement, this))); + (schemaInAgreement, error) -> + setFinalResult(schemaChange, responseFrame, schemaInAgreement, this)); } else if (responseMessage instanceof SetKeyspace) { SetKeyspace setKeyspace = (SetKeyspace) responseMessage; session diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultColumnDefinitions.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultColumnDefinitions.java index 74b345d79bb..87cc99066bd 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultColumnDefinitions.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultColumnDefinitions.java @@ -111,7 +111,8 @@ private Object writeReplace() { return new SerializationProxy(this); } - private void readObject(ObjectInputStream stream) throws InvalidObjectException { + private void readObject(@SuppressWarnings("unused") ObjectInputStream stream) + throws InvalidObjectException { // Should never be called since we serialized a proxy throw new InvalidObjectException("Proxy required"); } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultRow.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultRow.java index 1b4db7968f6..4a1190a599f 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultRow.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultRow.java @@ -134,7 +134,8 @@ private Object writeReplace() { return new SerializationProxy(this); } - private void readObject(ObjectInputStream stream) throws InvalidObjectException { + private void readObject(@SuppressWarnings("unused") ObjectInputStream stream) + throws InvalidObjectException { // Should never be called since we serialized a proxy throw new InvalidObjectException("Proxy required"); } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/QueryTraceFetcher.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/QueryTraceFetcher.java index ebe7f906c25..383f4f38487 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/QueryTraceFetcher.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/QueryTraceFetcher.java @@ -58,7 +58,7 @@ class QueryTraceFetcher { String regularConsistency = config.getString(DefaultDriverOption.REQUEST_CONSISTENCY); String traceConsistency = config.getString(DefaultDriverOption.REQUEST_TRACE_CONSISTENCY); this.config = - (traceConsistency.equals(regularConsistency)) + traceConsistency.equals(regularConsistency) ? config : config.withString(DefaultDriverOption.REQUEST_CONSISTENCY, traceConsistency); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/ResultSets.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/ResultSets.java index dfd5fc8def1..50fe6127c5f 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/ResultSets.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/ResultSets.java @@ -20,7 +20,7 @@ public class ResultSets { public static ResultSet newInstance(AsyncResultSet firstPage) { - return (firstPage.hasMorePages()) + return firstPage.hasMorePages() ? new MultiPageResultSet(firstPage) : new SinglePageResultSet(firstPage); } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/data/DefaultTupleValue.java b/core/src/main/java/com/datastax/oss/driver/internal/core/data/DefaultTupleValue.java index 15a36bbd9b7..e24f5c54ac2 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/data/DefaultTupleValue.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/data/DefaultTupleValue.java @@ -107,7 +107,8 @@ private Object writeReplace() { return new SerializationProxy(this); } - private void readObject(ObjectInputStream stream) throws InvalidObjectException { + private void readObject(@SuppressWarnings("unused") ObjectInputStream stream) + throws InvalidObjectException { // Should never be called since we serialized a proxy throw new InvalidObjectException("Proxy required"); } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/data/DefaultUdtValue.java b/core/src/main/java/com/datastax/oss/driver/internal/core/data/DefaultUdtValue.java index b0528a4a195..24be5dd58ab 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/data/DefaultUdtValue.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/data/DefaultUdtValue.java @@ -179,7 +179,8 @@ private Object writeReplace() { return new SerializationProxy(this); } - private void readObject(ObjectInputStream stream) throws InvalidObjectException { + private void readObject(@SuppressWarnings("unused") ObjectInputStream stream) + throws InvalidObjectException { // Should never be called since we serialized a proxy throw new InvalidObjectException("Proxy required"); } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/data/IdentifierIndex.java b/core/src/main/java/com/datastax/oss/driver/internal/core/data/IdentifierIndex.java index 0d649220df5..24454f6e625 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/data/IdentifierIndex.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/data/IdentifierIndex.java @@ -58,7 +58,7 @@ public IdentifierIndex(List ids) { */ public int firstIndexOf(String name) { Integer index = - (Strings.isDoubleQuoted(name)) + Strings.isDoubleQuoted(name) ? byCaseSensitiveName.get(Strings.unDoubleQuote(name)) : byCaseInsensitiveName.get(name.toLowerCase()); return (index == null) ? -1 : index; diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/ShallowUserDefinedType.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/ShallowUserDefinedType.java index 21d8124c3de..4e2975c6d8c 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/ShallowUserDefinedType.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/ShallowUserDefinedType.java @@ -137,12 +137,12 @@ public void attach(@NonNull AttachmentPoint attachmentPoint) { "This implementation should only be used internally, this is likely a driver bug"); } - private void readObject(ObjectInputStream s) throws IOException { + private void readObject(@SuppressWarnings("unused") ObjectInputStream s) throws IOException { throw new UnsupportedOperationException( "This implementation should only be used internally, this is likely a driver bug"); } - private void writeObject(ObjectOutputStream s) throws IOException { + private void writeObject(@SuppressWarnings("unused") ObjectOutputStream s) throws IOException { throw new UnsupportedOperationException( "This implementation should only be used internally, this is likely a driver bug"); } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/RawColumn.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/RawColumn.java index c9f65de601d..49f31604cf6 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/RawColumn.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/RawColumn.java @@ -17,7 +17,6 @@ import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.metadata.schema.ColumnMetadata; -import com.datastax.oss.driver.api.core.type.UserDefinedType; import com.datastax.oss.driver.internal.core.adminrequest.AdminRow; import com.datastax.oss.driver.shaded.guava.common.collect.Lists; import com.datastax.oss.driver.shaded.guava.common.primitives.Ints; @@ -51,8 +50,7 @@ public class RawColumn implements Comparable { public final String indexType; public final Map indexOptions; - private RawColumn( - AdminRow row, CqlIdentifier keyspaceId, Map userTypes) { + private RawColumn(AdminRow row) { // Cassandra < 3.0: // CREATE TABLE system.schema_columns ( // keyspace_name text, @@ -139,17 +137,15 @@ private static int rank(String kind) { } } - public static List toRawColumns( - Collection rows, - CqlIdentifier keyspaceId, - Map userTypes) { + @SuppressWarnings("MixedMutabilityReturnType") + public static List toRawColumns(Collection rows) { if (rows.isEmpty()) { return Collections.emptyList(); } else { // Use a mutable list, we might remove some elements later List result = Lists.newArrayListWithExpectedSize(rows.size()); for (AdminRow row : rows) { - result.add(new RawColumn(row, keyspaceId, userTypes)); + result.add(new RawColumn(row)); } return result; } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/SimpleJsonParser.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/SimpleJsonParser.java index 8da63a9018c..097569830c0 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/SimpleJsonParser.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/SimpleJsonParser.java @@ -45,6 +45,7 @@ private SimpleJsonParser(String input) { this.input = input; } + @SuppressWarnings("MixedMutabilityReturnType") public static List parseStringList(String input) { if (input == null || input.isEmpty()) { return Collections.emptyList(); @@ -73,6 +74,7 @@ public static List parseStringList(String input) { } } + @SuppressWarnings("MixedMutabilityReturnType") public static Map parseStringMap(String input) { if (input == null || input.isEmpty()) { return Collections.emptyMap(); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/TableParser.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/TableParser.java index d7b09b7b11f..3a020ca26e1 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/TableParser.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/TableParser.java @@ -121,13 +121,11 @@ public TableMetadata parseTable( tableRow.getString( tableRow.contains("table_name") ? "table_name" : "columnfamily_name")); - UUID uuid = (tableRow.contains("id")) ? tableRow.getUuid("id") : tableRow.getUuid("cf_id"); + UUID uuid = tableRow.contains("id") ? tableRow.getUuid("id") : tableRow.getUuid("cf_id"); List rawColumns = RawColumn.toRawColumns( - rows.columns().getOrDefault(keyspaceId, ImmutableMultimap.of()).get(tableId), - keyspaceId, - userTypes); + rows.columns().getOrDefault(keyspaceId, ImmutableMultimap.of()).get(tableId)); if (rawColumns.isEmpty()) { LOG.warn( "[{}] Processing TABLE refresh for {}.{} but found no matching rows, skipping", @@ -234,9 +232,7 @@ TableMetadata parseVirtualTable( List rawColumns = RawColumn.toRawColumns( - rows.virtualColumns().getOrDefault(keyspaceId, ImmutableMultimap.of()).get(tableId), - keyspaceId, - userTypes); + rows.virtualColumns().getOrDefault(keyspaceId, ImmutableMultimap.of()).get(tableId)); if (rawColumns.isEmpty()) { LOG.warn( "[{}] Processing TABLE refresh for {}.{} but found no matching rows, skipping", diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/ViewParser.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/ViewParser.java index 48bdac0a07e..06fc7d354fb 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/ViewParser.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/ViewParser.java @@ -87,9 +87,7 @@ public ViewMetadata parseView( List rawColumns = RawColumn.toRawColumns( - rows.columns().getOrDefault(keyspaceId, ImmutableMultimap.of()).get(viewId), - keyspaceId, - userTypes); + rows.columns().getOrDefault(keyspaceId, ImmutableMultimap.of()).get(viewId)); if (rawColumns.isEmpty()) { LOG.warn( "[{}] Processing VIEW refresh for {}.{} but found no matching rows, skipping", diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/TokenRangeBase.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/TokenRangeBase.java index 4c3ffe21b50..92e2b55c43a 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/TokenRangeBase.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/TokenRangeBase.java @@ -159,8 +159,8 @@ public List intersectWith(@NonNull TokenRange that) { if (t1.intersects(t2)) { intersected.add( newTokenRange( - (contains(t1, t2.getStart(), true)) ? t2.getStart() : t1.getStart(), - (contains(t1, t2.getEnd(), false)) ? t2.getEnd() : t1.getEnd())); + contains(t1, t2.getStart(), true) ? t2.getStart() : t1.getStart(), + contains(t1, t2.getEnd(), false) ? t2.getEnd() : t1.getEnd())); } } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/os/Native.java b/core/src/main/java/com/datastax/oss/driver/internal/core/os/Native.java index 25df2d5d23a..02de90d3cf4 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/os/Native.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/os/Native.java @@ -156,7 +156,9 @@ private Timeval(Runtime runtime) { /** @see LibCLoader */ private static class PosixLoader { + @SuppressWarnings("VariableNameSameAsType") private static final jnr.posix.POSIX POSIX; + private static final boolean GET_PID_AVAILABLE; static { diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistry.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistry.java index f85ccc40c3e..d0e22888717 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistry.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistry.java @@ -249,9 +249,7 @@ protected TypeCodec codecFor( protected boolean matches( @NonNull TypeCodec codec, @NonNull GenericType javaType, boolean isJavaCovariant) { - return (isJavaCovariant) - ? codec.getJavaType().isSupertypeOf(javaType) - : codec.accepts(javaType); + return isJavaCovariant ? codec.getJavaType().isSupertypeOf(javaType) : codec.accepts(javaType); } @NonNull diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/util/concurrent/RunOrSchedule.java b/core/src/main/java/com/datastax/oss/driver/internal/core/util/concurrent/RunOrSchedule.java index 3bf689a4670..daf2809b794 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/util/concurrent/RunOrSchedule.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/util/concurrent/RunOrSchedule.java @@ -81,13 +81,13 @@ public static CompletionStage on( executor .submit(task) .addListener( - ((Future> f) -> { + (Future> f) -> { if (f.isSuccess()) { CompletableFutures.completeFrom(f.getNow(), result); } else { result.completeExceptionally(f.cause()); } - })); + }); return result; } } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/channel/DriverChannelTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/channel/DriverChannelTest.java index 75ebcab9efa..d543c83fe53 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/channel/DriverChannelTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/channel/DriverChannelTest.java @@ -62,7 +62,7 @@ public void setup() { writeCoalescer = new MockWriteCoalescer(); driverChannel = new DriverChannel( - new EmbeddedEndPoint(channel), channel, writeCoalescer, DefaultProtocolVersion.V3); + new EmbeddedEndPoint(), channel, writeCoalescer, DefaultProtocolVersion.V3); } /** diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/channel/EmbeddedEndPoint.java b/core/src/test/java/com/datastax/oss/driver/internal/core/channel/EmbeddedEndPoint.java index cb4b5071b18..d844bc35fb4 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/channel/EmbeddedEndPoint.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/channel/EmbeddedEndPoint.java @@ -17,18 +17,11 @@ import com.datastax.oss.driver.api.core.metadata.EndPoint; import edu.umd.cs.findbugs.annotations.NonNull; -import io.netty.channel.embedded.EmbeddedChannel; import java.net.SocketAddress; /** Endpoint implementation for unit tests that use an embedded Netty channel. */ public class EmbeddedEndPoint implements EndPoint { - private final SocketAddress address; - - public EmbeddedEndPoint(EmbeddedChannel channel) { - this.address = channel.remoteAddress(); - } - @NonNull @Override public SocketAddress resolve() { diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandlerTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandlerTest.java index 78542f4adb5..ca1ff3d3639 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandlerTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandlerTest.java @@ -194,11 +194,11 @@ public void should_reprepare_on_the_fly_if_not_prepared() throws InterruptedExce // Before we proceed, mock the PREPARE exchange that will occur as soon as we complete the // first response. node1Behavior.mockFollowupRequest( - Prepare.class, defaultFrameOf(new Prepared(mockId.array(), null, null, null))); + Prepare.class, defaultFrameOf(new Prepared(Bytes.getArray(mockId), null, null, null))); node1Behavior.setWriteSuccess(); node1Behavior.setResponseSuccess( - defaultFrameOf(new Unprepared("mock message", mockId.array()))); + defaultFrameOf(new Unprepared("mock message", Bytes.getArray(mockId)))); // Should now re-prepare, re-execute and succeed. assertThatStage(resultSetFuture).isSuccess(); diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/cql/PagingIterableSpliteratorTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/cql/PagingIterableSpliteratorTest.java index a7d7e66ec81..da5cdb483e9 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/cql/PagingIterableSpliteratorTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/cql/PagingIterableSpliteratorTest.java @@ -21,7 +21,6 @@ import com.datastax.oss.driver.api.core.PagingIterable; import com.datastax.oss.driver.api.core.cql.ColumnDefinitions; import com.datastax.oss.driver.api.core.cql.ExecutionInfo; -import com.datastax.oss.driver.internal.core.cql.PagingIterableSpliterator.Builder; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import com.datastax.oss.driver.shaded.guava.common.collect.Lists; import com.tngtech.java.junit.dataprovider.DataProvider; @@ -46,7 +45,7 @@ public class PagingIterableSpliteratorTest { public void should_split_with_estimated_size( int size, int chunkSize, List expectedLeft, List expectedRight) { // given - Builder builder = + PagingIterableSpliterator.Builder builder = PagingIterableSpliterator.builder(iterableOfSize(size)) .withEstimatedSize(size) .withChunkSize(chunkSize); @@ -110,7 +109,7 @@ public static Iterable splitsWithEstimatedSize() { public void should_split_with_unknown_size( int size, int chunkSize, List expectedLeft, List expectedRight) { // given - Builder builder = + PagingIterableSpliterator.Builder builder = PagingIterableSpliterator.builder(iterableOfSize(size)).withChunkSize(chunkSize); // when PagingIterableSpliterator right = builder.build(); diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/cql/RequestHandlerTestHarness.java b/core/src/test/java/com/datastax/oss/driver/internal/core/cql/RequestHandlerTestHarness.java index 11ba0cd5f55..dfe90e9b8d8 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/cql/RequestHandlerTestHarness.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/cql/RequestHandlerTestHarness.java @@ -53,7 +53,6 @@ import com.datastax.oss.driver.internal.core.util.concurrent.CapturingTimer.CapturedTimeout; import com.datastax.oss.protocol.internal.Frame; import io.netty.channel.EventLoopGroup; -import io.netty.util.TimerTask; import java.time.Duration; import java.util.ArrayList; import java.util.HashMap; @@ -189,10 +188,6 @@ public CapturedTimeout nextScheduledTimeout() { return timer.getNextTimeout(); } - public void runNextTask() { - TimerTask task = timer.getNextTimeout().task(); - } - @Override public void close() { timer.stop(); diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/cql/StatementSizeTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/cql/StatementSizeTest.java index 59ca780136f..726704844bf 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/cql/StatementSizeTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/cql/StatementSizeTest.java @@ -170,7 +170,7 @@ public void should_measure_size_of_bound_statement() { + (2 + PREPARED_ID.length) + (2 + RESULT_METADATA_ID.length) + 2 // size of value list - + 2 * (4) // two null values (size = -1) + + 2 * 4 // two null values (size = -1) + 4 // fetch size ; assertThat(v5SizeOf(statement)).isEqualTo(expectedSize); diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/data/AccessibleByIdTestBase.java b/core/src/test/java/com/datastax/oss/driver/internal/core/data/AccessibleByIdTestBase.java index ad3ee2f199e..9a480a5e21f 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/data/AccessibleByIdTestBase.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/data/AccessibleByIdTestBase.java @@ -468,15 +468,15 @@ public void should_get_with_explicit_codec_by_name() { assertThat(s).isEqualTo("1"); } - @SuppressWarnings("UnusedAssignment") @Test(expected = IllegalArgumentException.class) + @SuppressWarnings("CheckReturnValue") public void should_fail_when_id_does_not_exists() { final CqlIdentifier invalidField = CqlIdentifier.fromInternal("invalidField"); // Given T t = newInstance(ImmutableList.of(DataTypes.INT), attachmentPoint); // When - t = t.setInt(invalidField, 1); + t.setInt(invalidField, 1); // Then the method will throw IllegalArgumentException up to the client. } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/session/DefaultSessionPoolsTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/session/DefaultSessionPoolsTest.java index dfd616d1e54..838cc12a69c 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/session/DefaultSessionPoolsTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/session/DefaultSessionPoolsTest.java @@ -577,7 +577,6 @@ public void should_not_recreate_pool_if_node_is_forced_back_up_but_ignored() { when(node2.getDistance()).thenReturn(NodeDistance.IGNORED); ChannelPool pool1 = mockPool(node1); - ChannelPool pool2 = mockPool(node2); ChannelPool pool3 = mockPool(node3); MockChannelPoolFactoryHelper factoryHelper = MockChannelPoolFactoryHelper.builder(channelPoolFactory) diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/ZonedTimestampCodecTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/ZonedTimestampCodecTest.java index 5fb73d0ec76..0a93c44bffd 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/ZonedTimestampCodecTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/ZonedTimestampCodecTest.java @@ -183,7 +183,7 @@ public void should_accept_raw_type() { @Test public void should_accept_object() { codec = new ZonedTimestampCodec(); - assertThat(codec.accepts(ZonedDateTime.now())).isTrue(); + assertThat(codec.accepts(ZonedDateTime.now(ZoneOffset.systemDefault()))).isTrue(); assertThat(codec.accepts(Integer.MIN_VALUE)).isFalse(); } } diff --git a/examples/src/main/java/com/datastax/oss/driver/examples/basic/CreateAndPopulateKeyspace.java b/examples/src/main/java/com/datastax/oss/driver/examples/basic/CreateAndPopulateKeyspace.java index 7b6d75c53dd..3cf92424e9b 100644 --- a/examples/src/main/java/com/datastax/oss/driver/examples/basic/CreateAndPopulateKeyspace.java +++ b/examples/src/main/java/com/datastax/oss/driver/examples/basic/CreateAndPopulateKeyspace.java @@ -42,6 +42,7 @@ * @see Java driver online * manual */ +@SuppressWarnings("CatchAndPrintStackTrace") public class CreateAndPopulateKeyspace { public static void main(String[] args) { diff --git a/examples/src/main/java/com/datastax/oss/driver/examples/concurrent/LimitConcurrencyCustom.java b/examples/src/main/java/com/datastax/oss/driver/examples/concurrent/LimitConcurrencyCustom.java index 8569b1c0340..9962d414044 100644 --- a/examples/src/main/java/com/datastax/oss/driver/examples/concurrent/LimitConcurrencyCustom.java +++ b/examples/src/main/java/com/datastax/oss/driver/examples/concurrent/LimitConcurrencyCustom.java @@ -57,6 +57,7 @@ * @see Java driver online * manual */ +@SuppressWarnings("CatchAndPrintStackTrace") public class LimitConcurrencyCustom { private static final int CONCURRENCY_LEVEL = 32; private static final int TOTAL_NUMBER_OF_INSERTS = 10_000; diff --git a/examples/src/main/java/com/datastax/oss/driver/examples/datatypes/Blobs.java b/examples/src/main/java/com/datastax/oss/driver/examples/datatypes/Blobs.java index 84cf419f96b..5dc5802018f 100644 --- a/examples/src/main/java/com/datastax/oss/driver/examples/datatypes/Blobs.java +++ b/examples/src/main/java/com/datastax/oss/driver/examples/datatypes/Blobs.java @@ -150,6 +150,7 @@ private static void retrieveSimpleColumn(CqlSession session) { } } + @SuppressWarnings("ByteBufferBackingArray") private static void retrieveMapColumn(CqlSession session) { Row row = session.execute("SELECT b, m FROM examples.blobs WHERE k = 1").one(); diff --git a/examples/src/main/java/com/datastax/oss/driver/examples/paging/RandomPagingRestUi.java b/examples/src/main/java/com/datastax/oss/driver/examples/paging/RandomPagingRestUi.java index 7e0ed814ec4..14e66657e12 100644 --- a/examples/src/main/java/com/datastax/oss/driver/examples/paging/RandomPagingRestUi.java +++ b/examples/src/main/java/com/datastax/oss/driver/examples/paging/RandomPagingRestUi.java @@ -255,7 +255,7 @@ public UserVideosResponse getUserVideos( URI previous = (page == 1) ? null : uri.getAbsolutePathBuilder().queryParam("page", page - 1).build(); - URI next = (empty) ? null : uri.getAbsolutePathBuilder().queryParam("page", page + 1).build(); + URI next = empty ? null : uri.getAbsolutePathBuilder().queryParam("page", page + 1).build(); return new UserVideosResponse(videos, previous, next); } } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cql/AsyncResultSetIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cql/AsyncResultSetIT.java index 83749d5259e..aaff6449730 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cql/AsyncResultSetIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cql/AsyncResultSetIT.java @@ -126,7 +126,7 @@ public void should_iterate_over_all_pages_asynchronously_single_partition() thro PageStatistics stats = result.toCompletableFuture().get(); assertThat(stats.rows).isEqualTo(ROWS_PER_PARTITION); - assertThat(stats.pages).isEqualTo((int) (Math.ceil(ROWS_PER_PARTITION / (double) PAGE_SIZE))); + assertThat(stats.pages).isEqualTo((int) Math.ceil(ROWS_PER_PARTITION / (double) PAGE_SIZE)); } @Test @@ -141,8 +141,7 @@ public void should_iterate_over_all_pages_asynchronously_cross_partition() throw PageStatistics stats = result.toCompletableFuture().get(); assertThat(stats.rows).isEqualTo(ROWS_PER_PARTITION * 2); - assertThat(stats.pages) - .isEqualTo((int) (Math.ceil(ROWS_PER_PARTITION * 2 / (double) PAGE_SIZE))); + assertThat(stats.pages).isEqualTo((int) Math.ceil(ROWS_PER_PARTITION * 2 / (double) PAGE_SIZE)); } private static class PageStatistics { diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cql/SimpleStatementIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cql/SimpleStatementIT.java index 2cc6d520da5..4fe30d29557 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cql/SimpleStatementIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cql/SimpleStatementIT.java @@ -200,10 +200,9 @@ public void should_use_timestamp_when_set() { public void should_use_tracing_when_set() { // TODO currently there's no way to validate tracing was set since trace id is not set // also write test to verify it is not set. - ResultSet result = - sessionRule - .session() - .execute(SimpleStatement.builder("select * from test").setTracing().build()); + sessionRule + .session() + .execute(SimpleStatement.builder("select * from test").setTracing().build()); } @Test diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/tracker/RequestLoggerIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/api/core/tracker/RequestLoggerIT.java index 6ab1010c7ea..f40c5925efc 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/tracker/RequestLoggerIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/api/core/tracker/RequestLoggerIT.java @@ -34,7 +34,6 @@ import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; -import com.datastax.oss.driver.api.core.cql.ResultSet; import com.datastax.oss.driver.api.core.cql.SimpleStatement; import com.datastax.oss.driver.api.core.servererrors.ServerError; import com.datastax.oss.driver.api.testinfra.loadbalancing.SortingLoadBalancingPolicy; @@ -374,7 +373,7 @@ public void should_log_successful_nodes_on_successful_request() { .prime(when(QUERY).then(rows().row("release_version", "3.0.0"))); // When - ResultSet set = sessionRuleNode.session().execute(QUERY); + sessionRuleNode.session().execute(QUERY); // Then verify(appender, new Timeout(500, VerificationModeFactory.times(2))) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistryIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistryIT.java index e10dc9135cb..361b397a4ac 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistryIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistryIT.java @@ -261,8 +261,8 @@ private static class OptionalCodec extends MappingCodec, T> { Predicate isAbsent = (i) -> i == null - || ((i instanceof Collection && ((Collection) i).isEmpty())) - || ((i instanceof Map) && ((Map) i).isEmpty()); + || (i instanceof Collection && ((Collection) i).isEmpty()) + || (i instanceof Map && ((Map) i).isEmpty()); OptionalCodec(TypeCodec innerCodec) { super( diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/querybuilder/JsonInsertIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/api/querybuilder/JsonInsertIT.java index e2140783549..1c80267ab4c 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/querybuilder/JsonInsertIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/api/querybuilder/JsonInsertIT.java @@ -247,11 +247,15 @@ public String toString() { } @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - User user = (User) o; - return id == user.id && age == user.age && Objects.equals(name, user.name); + public boolean equals(Object other) { + if (this == other) { + return true; + } else if (other instanceof User) { + User that = (User) other; + return this.id == that.id && this.age == that.age && Objects.equals(this.name, that.name); + } else { + return false; + } } @Override diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/DefaultKeyspaceIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/DefaultKeyspaceIT.java index 9f3c7238919..7f8b93cf961 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/DefaultKeyspaceIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/DefaultKeyspaceIT.java @@ -294,12 +294,16 @@ public void setDescription(String description) { } @Override - public boolean equals(Object o) { - - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - ProductSimpleDefaultKs that = (ProductSimpleDefaultKs) o; - return Objects.equals(id, that.id) && Objects.equals(description, that.description); + public boolean equals(Object other) { + if (this == other) { + return true; + } else if (other instanceof ProductSimpleDefaultKs) { + ProductSimpleDefaultKs that = (ProductSimpleDefaultKs) other; + return Objects.equals(this.id, that.id) + && Objects.equals(this.description, that.description); + } else { + return false; + } } @Override @@ -342,12 +346,16 @@ public void setDescription(String description) { } @Override - public boolean equals(Object o) { - - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - ProductSimpleDefaultKsNotSet that = (ProductSimpleDefaultKsNotSet) o; - return Objects.equals(id, that.id) && Objects.equals(description, that.description); + public boolean equals(Object other) { + if (this == other) { + return true; + } else if (other instanceof ProductSimpleDefaultKsNotSet) { + ProductSimpleDefaultKsNotSet that = (ProductSimpleDefaultKsNotSet) other; + return Objects.equals(this.id, that.id) + && Objects.equals(this.description, that.description); + } else { + return false; + } } @Override @@ -390,12 +398,16 @@ public void setDescription(String description) { } @Override - public boolean equals(Object o) { - - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - ProductSimpleWithoutKs that = (ProductSimpleWithoutKs) o; - return Objects.equals(id, that.id) && Objects.equals(description, that.description); + public boolean equals(Object other) { + if (this == other) { + return true; + } else if (other instanceof ProductSimpleWithoutKs) { + ProductSimpleWithoutKs that = (ProductSimpleWithoutKs) other; + return Objects.equals(this.id, that.id) + && Objects.equals(this.description, that.description); + } else { + return false; + } } @Override diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/EntityPolymorphismIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/EntityPolymorphismIT.java index 2905f0d694d..e6ba38946e8 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/EntityPolymorphismIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/EntityPolymorphismIT.java @@ -444,11 +444,14 @@ public void setY(int y) { } @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Point2D point2D = (Point2D) o; - return x == point2D.x && y == point2D.y; + public boolean equals(Object other) { + if (this == other) return true; + else if (other instanceof Point2D) { + Point2D that = (Point2D) other; + return this.x == that.x && this.y == that.y; + } else { + return false; + } } @Override @@ -478,12 +481,15 @@ public void setZ(int z) { } @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - if (!super.equals(o)) return false; - Point3D point3D = (Point3D) o; - return z == point3D.z; + public boolean equals(Object other) { + if (this == other) { + return true; + } else if (other instanceof Point3D) { + Point3D that = (Point3D) other; + return super.equals(that) && this.z == that.z; + } else { + return false; + } } @Override @@ -535,11 +541,15 @@ public void setTags(Set tags) { } @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Shape shape = (Shape) o; - return Objects.equals(id, shape.id) && Objects.equals(tags, shape.tags); + public boolean equals(Object other) { + if (this == other) { + return true; + } else if (other instanceof Shape) { + Shape that = (Shape) other; + return Objects.equals(id, that.id) && Objects.equals(tags, that.tags); + } else { + return false; + } } @Override @@ -579,7 +589,7 @@ public UUID getId() { @Override public double getArea() { - return Math.PI * (Math.pow(getRadius(), 2)); + return Math.PI * Math.pow(getRadius(), 2); } public double getRadius() { @@ -610,12 +620,17 @@ public void setCenter(Point2D center) { } @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - if (!super.equals(o)) return false; - Circle circle = (Circle) o; - return Double.compare(circle.radius, radius) == 0 && center.equals(circle.center); + public boolean equals(Object other) { + if (this == other) { + return true; + } else if (other instanceof Circle) { + Circle that = (Circle) other; + return super.equals(that) + && Double.compare(that.radius, radius) == 0 + && center.equals(that.center); + } else { + return false; + } } @Override @@ -679,12 +694,17 @@ public double getArea() { } @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - if (!super.equals(o)) return false; - Rectangle rectangle = (Rectangle) o; - return bottomLeft.equals(rectangle.bottomLeft) && topRight.equals(rectangle.topRight); + public boolean equals(Object other) { + if (this == other) { + return true; + } else if (other instanceof Rectangle) { + Rectangle that = (Rectangle) other; + return super.equals(that) + && bottomLeft.equals(that.bottomLeft) + && topRight.equals(that.topRight); + } else { + return false; + } } @Override @@ -697,8 +717,6 @@ public int hashCode() { @Entity static class Square extends Rectangle implements WriteTimeProvider { - private Point2D height; - @Computed("writetime(bottom_left)") private long writeTime; @@ -787,12 +805,15 @@ public double getVolume() { } @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - if (!super.equals(o)) return false; - Sphere sphere = (Sphere) o; - return writeTime == sphere.writeTime; + public boolean equals(Object other) { + if (this == other) { + return true; + } else if (other instanceof Sphere) { + Sphere that = (Sphere) other; + return super.equals(that) && writeTime == that.writeTime; + } else { + return false; + } } @Override @@ -820,11 +841,15 @@ public void setLocation(String location) { } @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - LocatableItem that = (LocatableItem) o; - return Objects.equals(location, that.location); + public boolean equals(Object other) { + if (this == other) { + return true; + } else if (other instanceof LocatableItem) { + LocatableItem that = (LocatableItem) other; + return Objects.equals(this.location, that.location); + } else { + return false; + } } @Override @@ -855,12 +880,15 @@ public void setName(String name) { } @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - if (!super.equals(o)) return false; - Asset asset = (Asset) o; - return Objects.equals(name, asset.name); + public boolean equals(Object other) { + if (this == other) { + return true; + } else if (other instanceof Asset) { + Asset that = (Asset) other; + return super.equals(that) && Objects.equals(this.name, that.name); + } else { + return false; + } } @Override @@ -894,12 +922,15 @@ public void setId(UUID id) { } @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - if (!super.equals(o)) return false; - Device device = (Device) o; - return id.equals(device.id); + public boolean equals(Object other) { + if (this == other) { + return true; + } else if (other instanceof Device) { + Device that = (Device) other; + return super.equals(that) && this.id.equals(that.id); + } else { + return false; + } } @Override @@ -960,12 +991,15 @@ public void setId(UUID id) { } @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - if (!super.equals(o)) return false; - SimpleDevice that = (SimpleDevice) o; - return inUse == that.inUse && id.equals(that.id); + public boolean equals(Object other) { + if (this == other) { + return true; + } else if (other instanceof SimpleDevice) { + SimpleDevice that = (SimpleDevice) other; + return super.equals(that) && this.inUse == that.inUse && this.id.equals(that.id); + } else { + return false; + } } @Override diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/InventoryITBase.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/InventoryITBase.java index 057e12af1fb..fad9b4d7f90 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/InventoryITBase.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/InventoryITBase.java @@ -130,17 +130,17 @@ public void setDimensions(Dimensions dimensions) { } @Override - public boolean equals(Object o) { - if (this == o) { + public boolean equals(Object other) { + if (other == this) { return true; - } - if (o == null || getClass() != o.getClass()) { + } else if (other instanceof Product) { + Product that = (Product) other; + return Objects.equals(id, that.id) + && Objects.equals(description, that.description) + && Objects.equals(dimensions, that.dimensions); + } else { return false; } - Product product = (Product) o; - return Objects.equals(id, product.id) - && Objects.equals(description, product.description) - && Objects.equals(dimensions, product.dimensions); } @Override @@ -181,11 +181,15 @@ public void setDescription(String description) { } @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - ProductWithoutId that = (ProductWithoutId) o; - return Objects.equals(description, that.description); + public boolean equals(Object other) { + if (other == this) { + return true; + } else if (other instanceof ProductWithoutId) { + ProductWithoutId that = (ProductWithoutId) other; + return Objects.equals(description, that.description); + } else { + return false; + } } @Override @@ -239,15 +243,15 @@ public void setHeight(int height) { } @Override - public boolean equals(Object o) { - if (this == o) { + public boolean equals(Object other) { + if (this == other) { return true; - } - if (o == null || getClass() != o.getClass()) { + } else if (other instanceof Dimensions) { + Dimensions that = (Dimensions) other; + return this.length == that.length && this.width == that.width && this.height == that.height; + } else { return false; } - Dimensions that = (Dimensions) o; - return length == that.length && width == that.width && height == that.height; } @Override @@ -280,11 +284,15 @@ public void setId(UUID id) { } @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - OnlyPK onlyPK = (OnlyPK) o; - return Objects.equals(id, onlyPK.id); + public boolean equals(Object other) { + if (this == other) { + return true; + } else if (other instanceof OnlyPK) { + OnlyPK that = (OnlyPK) other; + return Objects.equals(this.id, that.id); + } else { + return false; + } } @Override @@ -374,16 +382,20 @@ public void setCount(int count) { } @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - ProductSale that = (ProductSale) o; - return Double.compare(that.price, price) == 0 - && count == that.count - && id.equals(that.id) - && day.equals(that.day) - && ts.equals(that.ts) - && customerId == that.customerId; + public boolean equals(Object other) { + if (this == other) { + return true; + } else if (other instanceof ProductSale) { + ProductSale that = (ProductSale) other; + return Double.compare(this.price, that.price) == 0 + && this.count == that.count + && this.id.equals(that.id) + && this.day.equals(that.day) + && this.ts.equals(that.ts) + && this.customerId == that.customerId; + } else { + return false; + } } @Override diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/NestedUdtIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/NestedUdtIT.java index 06c17f4edc7..0f7bad30c15 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/NestedUdtIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/NestedUdtIT.java @@ -128,9 +128,6 @@ public void clearContainerData() { @Test public void should_insert_and_retrieve_entity_with_nested_udts() { - // Given - CqlSession session = sessionRule.session(); - // When containerDao.save(SAMPLE_CONTAINER); Container retrievedEntity = containerDao.loadByPk(SAMPLE_CONTAINER.getId()); @@ -142,8 +139,6 @@ public void should_insert_and_retrieve_entity_with_nested_udts() { @Test public void should_insert_do_not_set_to_null_udts() { // Given - CqlSession session = sessionRule.session(); - containerDao.save(SAMPLE_CONTAINER); Container retrievedEntity = containerDao.loadByPk(SAMPLE_CONTAINER.getId()); @@ -158,8 +153,6 @@ public void should_insert_do_not_set_to_null_udts() { @Test public void should_insert_set_to_null_udts() { // Given - CqlSession session = sessionRule.session(); - containerDao.save(SAMPLE_CONTAINER); Container retrievedEntity = containerDao.loadByPk(SAMPLE_CONTAINER.getId()); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/NullSavingStrategyIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/NullSavingStrategyIT.java index c3b1d5ddfbc..51cf85f549e 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/NullSavingStrategyIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/NullSavingStrategyIT.java @@ -229,12 +229,17 @@ public void setDescription(String description) { } @Override - public boolean equals(Object o) { - - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - ProductSimple that = (ProductSimple) o; - return Objects.equals(id, that.id) && Objects.equals(description, that.description); + public boolean equals(Object other) { + + if (this == other) { + return true; + } else if (other instanceof ProductSimple) { + ProductSimple that = (ProductSimple) other; + return Objects.equals(this.id, that.id) + && Objects.equals(this.description, that.description); + } else { + return false; + } } @Override diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/DefaultProcessorContext.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/DefaultProcessorContext.java index fe27d1c78f8..e6d3062539c 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/DefaultProcessorContext.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/DefaultProcessorContext.java @@ -41,7 +41,6 @@ public class DefaultProcessorContext implements ProcessorContext { private final DecoratedMessager messager; private final Types typeUtils; private final Elements elementUtils; - private boolean logsEnabled; private final Classes classUtils; private final JavaPoetFiler filer; private final LoggingGenerator loggingGenerator; diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoMethodGenerator.java index f39a97179c3..9e43425742d 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoMethodGenerator.java @@ -117,7 +117,7 @@ protected void maybeAddSimpleClause( ".$L($T.bindMarker($S))", dslMethodName, QueryBuilder.class, bindMarkerName); } else { try { - Number ignored = numberParser.apply(annotationValue); + Number unused = numberParser.apply(annotationValue); } catch (NumberFormatException ignored) { context .getMessager() diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoQueryProviderMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoQueryProviderMethodGenerator.java index 0278211a99d..00d77e0c6bd 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoQueryProviderMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoQueryProviderMethodGenerator.java @@ -99,6 +99,7 @@ private TypeMirror getProviderClass() { return context.getTypeUtils().getPrimitiveType(TypeKind.INT); } + @SuppressWarnings("MixedMutabilityReturnType") private List getEntityHelperTypes() { AnnotationMirror annotationMirror = getQueryProviderAnnotationMirror(); for (Map.Entry entry : diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperDeleteByPrimaryKeyMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperDeleteByPrimaryKeyMethodGenerator.java index ca80fbfc2a2..ecaeffde411 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperDeleteByPrimaryKeyMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperDeleteByPrimaryKeyMethodGenerator.java @@ -17,22 +17,12 @@ import com.datastax.oss.driver.api.querybuilder.delete.Delete; import com.datastax.oss.driver.internal.mapper.processor.MethodGenerator; -import com.datastax.oss.driver.internal.mapper.processor.ProcessorContext; import com.squareup.javapoet.MethodSpec; import java.util.Optional; import javax.lang.model.element.Modifier; public class EntityHelperDeleteByPrimaryKeyMethodGenerator implements MethodGenerator { - private final EntityDefinition entityDefinition; - - public EntityHelperDeleteByPrimaryKeyMethodGenerator( - EntityDefinition entityDefinition, - EntityHelperGenerator enclosingClass, - ProcessorContext context) { - this.entityDefinition = entityDefinition; - } - @Override public Optional generate() { MethodSpec.Builder deleteByPrimaryKeyBuilder = diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperDeleteByPrimaryKeyPartsMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperDeleteByPrimaryKeyPartsMethodGenerator.java index 4770d60bd2b..ba15bc11456 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperDeleteByPrimaryKeyPartsMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperDeleteByPrimaryKeyPartsMethodGenerator.java @@ -20,7 +20,6 @@ import com.datastax.oss.driver.api.querybuilder.delete.Delete; import com.datastax.oss.driver.api.querybuilder.delete.DeleteSelection; import com.datastax.oss.driver.internal.mapper.processor.MethodGenerator; -import com.datastax.oss.driver.internal.mapper.processor.ProcessorContext; import com.squareup.javapoet.MethodSpec; import com.squareup.javapoet.TypeName; import java.util.Optional; @@ -30,10 +29,7 @@ public class EntityHelperDeleteByPrimaryKeyPartsMethodGenerator implements Metho private final EntityDefinition entityDefinition; - public EntityHelperDeleteByPrimaryKeyPartsMethodGenerator( - EntityDefinition entityDefinition, - EntityHelperGenerator enclosingClass, - ProcessorContext context) { + public EntityHelperDeleteByPrimaryKeyPartsMethodGenerator(EntityDefinition entityDefinition) { this.entityDefinition = entityDefinition; } diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperDeleteStartMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperDeleteStartMethodGenerator.java index f1f497dacf9..9e574564b19 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperDeleteStartMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperDeleteStartMethodGenerator.java @@ -18,22 +18,12 @@ import com.datastax.oss.driver.api.querybuilder.QueryBuilder; import com.datastax.oss.driver.api.querybuilder.delete.DeleteSelection; import com.datastax.oss.driver.internal.mapper.processor.MethodGenerator; -import com.datastax.oss.driver.internal.mapper.processor.ProcessorContext; import com.squareup.javapoet.MethodSpec; import java.util.Optional; import javax.lang.model.element.Modifier; public class EntityHelperDeleteStartMethodGenerator implements MethodGenerator { - private final EntityDefinition entityDefinition; - - public EntityHelperDeleteStartMethodGenerator( - EntityDefinition entityDefinition, - EntityHelperGenerator enclosingClass, - ProcessorContext context) { - this.entityDefinition = entityDefinition; - } - @Override public Optional generate() { MethodSpec.Builder deleteStartBuilder = diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperGenerator.java index 63c29c79964..743b396ff2b 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperGenerator.java @@ -106,17 +106,17 @@ protected JavaFile.Builder getContents() { for (MethodGenerator methodGenerator : ImmutableList.of( - new EntityHelperSetMethodGenerator(entityDefinition, this, context), - new EntityHelperGetMethodGenerator(entityDefinition, this, context), - new EntityHelperInsertMethodGenerator(entityDefinition, this, context), - new EntityHelperSelectByPrimaryKeyPartsMethodGenerator(entityDefinition, this, context), - new EntityHelperSelectByPrimaryKeyMethodGenerator(entityDefinition, this, context), - new EntityHelperSelectStartMethodGenerator(entityDefinition, this, context), - new EntityHelperDeleteStartMethodGenerator(entityDefinition, this, context), - new EntityHelperDeleteByPrimaryKeyPartsMethodGenerator(entityDefinition, this, context), - new EntityHelperDeleteByPrimaryKeyMethodGenerator(entityDefinition, this, context), - new EntityHelperUpdateStartMethodGenerator(entityDefinition, this, context), - new EntityHelperUpdateByPrimaryKeyMethodGenerator(entityDefinition, this, context))) { + new EntityHelperSetMethodGenerator(entityDefinition, this), + new EntityHelperGetMethodGenerator(entityDefinition, this), + new EntityHelperInsertMethodGenerator(entityDefinition), + new EntityHelperSelectByPrimaryKeyPartsMethodGenerator(), + new EntityHelperSelectByPrimaryKeyMethodGenerator(), + new EntityHelperSelectStartMethodGenerator(entityDefinition), + new EntityHelperDeleteStartMethodGenerator(), + new EntityHelperDeleteByPrimaryKeyPartsMethodGenerator(entityDefinition), + new EntityHelperDeleteByPrimaryKeyMethodGenerator(), + new EntityHelperUpdateStartMethodGenerator(entityDefinition), + new EntityHelperUpdateByPrimaryKeyMethodGenerator(entityDefinition))) { methodGenerator.generate().ifPresent(classContents::addMethod); } diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperGetMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperGetMethodGenerator.java index 2f73c2be27a..b6e4f67182c 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperGetMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperGetMethodGenerator.java @@ -18,7 +18,6 @@ import com.datastax.oss.driver.api.core.data.GettableByName; import com.datastax.oss.driver.api.core.data.UdtValue; import com.datastax.oss.driver.internal.mapper.processor.MethodGenerator; -import com.datastax.oss.driver.internal.mapper.processor.ProcessorContext; import com.datastax.oss.driver.internal.mapper.processor.util.generation.BindableHandlingSharedCode; import com.datastax.oss.driver.internal.mapper.processor.util.generation.GeneratedCodePatterns; import com.datastax.oss.driver.internal.mapper.processor.util.generation.PropertyType; @@ -41,9 +40,7 @@ public class EntityHelperGetMethodGenerator implements MethodGenerator { private final BindableHandlingSharedCode enclosingClass; public EntityHelperGetMethodGenerator( - EntityDefinition entityDefinition, - BindableHandlingSharedCode enclosingClass, - ProcessorContext context) { + EntityDefinition entityDefinition, BindableHandlingSharedCode enclosingClass) { this.entityDefinition = entityDefinition; this.enclosingClass = enclosingClass; } diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperInsertMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperInsertMethodGenerator.java index c21a8ba1bde..8026cc02f06 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperInsertMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperInsertMethodGenerator.java @@ -19,7 +19,6 @@ import com.datastax.oss.driver.api.querybuilder.insert.InsertInto; import com.datastax.oss.driver.api.querybuilder.insert.RegularInsert; import com.datastax.oss.driver.internal.mapper.processor.MethodGenerator; -import com.datastax.oss.driver.internal.mapper.processor.ProcessorContext; import com.squareup.javapoet.MethodSpec; import java.util.Optional; import javax.lang.model.element.Modifier; @@ -28,10 +27,7 @@ public class EntityHelperInsertMethodGenerator implements MethodGenerator { private final EntityDefinition entityDefinition; - public EntityHelperInsertMethodGenerator( - EntityDefinition entityDefinition, - EntityHelperGenerator enclosingClass, - ProcessorContext context) { + public EntityHelperInsertMethodGenerator(EntityDefinition entityDefinition) { this.entityDefinition = entityDefinition; } diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperSelectByPrimaryKeyMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperSelectByPrimaryKeyMethodGenerator.java index fbb63329264..799302e5e1a 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperSelectByPrimaryKeyMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperSelectByPrimaryKeyMethodGenerator.java @@ -17,22 +17,12 @@ import com.datastax.oss.driver.api.querybuilder.select.Select; import com.datastax.oss.driver.internal.mapper.processor.MethodGenerator; -import com.datastax.oss.driver.internal.mapper.processor.ProcessorContext; import com.squareup.javapoet.MethodSpec; import java.util.Optional; import javax.lang.model.element.Modifier; public class EntityHelperSelectByPrimaryKeyMethodGenerator implements MethodGenerator { - private final EntityDefinition entityDefinition; - - public EntityHelperSelectByPrimaryKeyMethodGenerator( - EntityDefinition entityDefinition, - EntityHelperGenerator enclosingClass, - ProcessorContext context) { - this.entityDefinition = entityDefinition; - } - @Override public Optional generate() { MethodSpec.Builder selectByPrimaryKeyBuilder = diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperSelectByPrimaryKeyPartsMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperSelectByPrimaryKeyPartsMethodGenerator.java index 79c06fda321..2c0d11c6999 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperSelectByPrimaryKeyPartsMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperSelectByPrimaryKeyPartsMethodGenerator.java @@ -18,7 +18,6 @@ import com.datastax.oss.driver.api.querybuilder.QueryBuilder; import com.datastax.oss.driver.api.querybuilder.select.Select; import com.datastax.oss.driver.internal.mapper.processor.MethodGenerator; -import com.datastax.oss.driver.internal.mapper.processor.ProcessorContext; import com.squareup.javapoet.MethodSpec; import com.squareup.javapoet.TypeName; import java.util.Optional; @@ -26,15 +25,6 @@ public class EntityHelperSelectByPrimaryKeyPartsMethodGenerator implements MethodGenerator { - private final EntityDefinition entityDefinition; - - public EntityHelperSelectByPrimaryKeyPartsMethodGenerator( - EntityDefinition entityDefinition, - EntityHelperGenerator enclosingClass, - ProcessorContext context) { - this.entityDefinition = entityDefinition; - } - @Override public Optional generate() { MethodSpec.Builder selectByPrimaryKeyPartsBuilder = diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperSelectStartMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperSelectStartMethodGenerator.java index 8f1601a569b..4abbabb6bd8 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperSelectStartMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperSelectStartMethodGenerator.java @@ -19,7 +19,6 @@ import com.datastax.oss.driver.api.querybuilder.select.Select; import com.datastax.oss.driver.api.querybuilder.select.SelectFrom; import com.datastax.oss.driver.internal.mapper.processor.MethodGenerator; -import com.datastax.oss.driver.internal.mapper.processor.ProcessorContext; import com.squareup.javapoet.MethodSpec; import java.util.Optional; import javax.lang.model.element.Modifier; @@ -28,10 +27,7 @@ public class EntityHelperSelectStartMethodGenerator implements MethodGenerator { private final EntityDefinition entityDefinition; - public EntityHelperSelectStartMethodGenerator( - EntityDefinition entityDefinition, - EntityHelperGenerator enclosingClass, - ProcessorContext context) { + public EntityHelperSelectStartMethodGenerator(EntityDefinition entityDefinition) { this.entityDefinition = entityDefinition; } diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperSetMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperSetMethodGenerator.java index 7daf8ad0b57..5e3042c10ac 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperSetMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperSetMethodGenerator.java @@ -18,7 +18,6 @@ import com.datastax.oss.driver.api.core.data.SettableByName; import com.datastax.oss.driver.api.mapper.entity.saving.NullSavingStrategy; import com.datastax.oss.driver.internal.mapper.processor.MethodGenerator; -import com.datastax.oss.driver.internal.mapper.processor.ProcessorContext; import com.datastax.oss.driver.internal.mapper.processor.util.generation.BindableHandlingSharedCode; import com.datastax.oss.driver.internal.mapper.processor.util.generation.GeneratedCodePatterns; import com.squareup.javapoet.ClassName; @@ -34,15 +33,11 @@ public class EntityHelperSetMethodGenerator implements MethodGenerator { private final EntityDefinition entityDefinition; private final BindableHandlingSharedCode enclosingClass; - private final ProcessorContext context; public EntityHelperSetMethodGenerator( - EntityDefinition entityDefinition, - BindableHandlingSharedCode enclosingClass, - ProcessorContext context) { + EntityDefinition entityDefinition, BindableHandlingSharedCode enclosingClass) { this.entityDefinition = entityDefinition; this.enclosingClass = enclosingClass; - this.context = context; } @Override diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperUpdateByPrimaryKeyMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperUpdateByPrimaryKeyMethodGenerator.java index 400ea2313f9..289284ccf53 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperUpdateByPrimaryKeyMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperUpdateByPrimaryKeyMethodGenerator.java @@ -18,7 +18,6 @@ import com.datastax.oss.driver.api.querybuilder.QueryBuilder; import com.datastax.oss.driver.api.querybuilder.relation.Relation; import com.datastax.oss.driver.internal.mapper.processor.MethodGenerator; -import com.datastax.oss.driver.internal.mapper.processor.ProcessorContext; import com.datastax.oss.driver.internal.querybuilder.update.DefaultUpdate; import com.squareup.javapoet.MethodSpec; import java.util.Optional; @@ -28,10 +27,7 @@ public class EntityHelperUpdateByPrimaryKeyMethodGenerator implements MethodGene private final EntityDefinition entityDefinition; - EntityHelperUpdateByPrimaryKeyMethodGenerator( - EntityDefinition entityDefinition, - EntityHelperGenerator enclosingClass, - ProcessorContext context) { + EntityHelperUpdateByPrimaryKeyMethodGenerator(EntityDefinition entityDefinition) { this.entityDefinition = entityDefinition; } diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperUpdateStartMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperUpdateStartMethodGenerator.java index e9b85820d79..ad2928e0606 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperUpdateStartMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperUpdateStartMethodGenerator.java @@ -20,7 +20,6 @@ import com.datastax.oss.driver.api.querybuilder.QueryBuilder; import com.datastax.oss.driver.api.querybuilder.update.UpdateStart; import com.datastax.oss.driver.internal.mapper.processor.MethodGenerator; -import com.datastax.oss.driver.internal.mapper.processor.ProcessorContext; import com.datastax.oss.driver.internal.querybuilder.update.DefaultUpdate; import com.squareup.javapoet.MethodSpec; import java.util.Optional; @@ -30,10 +29,7 @@ public class EntityHelperUpdateStartMethodGenerator implements MethodGenerator { private final EntityDefinition entityDefinition; - EntityHelperUpdateStartMethodGenerator( - EntityDefinition entityDefinition, - EntityHelperGenerator enclosingClass, - ProcessorContext context) { + EntityHelperUpdateStartMethodGenerator(EntityDefinition entityDefinition) { this.entityDefinition = entityDefinition; } diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/util/HierarchyScanner.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/util/HierarchyScanner.java index 3372ce8c996..65a3a1da580 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/util/HierarchyScanner.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/util/HierarchyScanner.java @@ -140,8 +140,8 @@ private static class HierarchyScanStrategyOptions { } } - boolean atHighest(TypeMirror mirror) { - return highestAncestor != null && highestAncestor.equals(mirror); + boolean atHighest(TypeMirror mirror, ProcessorContext context) { + return highestAncestor != null && context.getTypeUtils().isSameType(mirror, highestAncestor); } } @@ -171,14 +171,14 @@ private static void traverseHierarchy( } Set interfacesToScan = Collections.emptySet(); - boolean atHighestClass = hierarchyScanStrategy.atHighest(classElement.asType()); + boolean atHighestClass = hierarchyScanStrategy.atHighest(classElement.asType(), context); while (!atHighestClass) { // add super class TypeMirror superClass = classElement.getSuperclass(); TypeElement superClassElement = null; if (superClass.getKind() == TypeKind.DECLARED) { superClassElement = (TypeElement) context.getTypeUtils().asElement(superClass); - atHighestClass = hierarchyScanStrategy.atHighest(superClass); + atHighestClass = hierarchyScanStrategy.atHighest(superClass, context); if (!atHighestClass || hierarchyScanStrategy.includeHighestAncestor) { if (!typeConsumer.apply(superClass)) { return; @@ -228,7 +228,7 @@ private static void scanInterfaces( TypeElement interfaceElement = (TypeElement) context.getTypeUtils().asElement(interfaceType); // skip if at highest ancestor. - boolean atHighest = hierarchyScanStrategy.atHighest(interfaceType); + boolean atHighest = hierarchyScanStrategy.atHighest(interfaceType, context); if (!atHighest || hierarchyScanStrategy.includeHighestAncestor) { if (!typeConsumer.apply(interfaceType)) { return; diff --git a/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/util/HierarchyScannerTest.java b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/util/HierarchyScannerTest.java index 26a511bb429..61bab446f24 100644 --- a/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/util/HierarchyScannerTest.java +++ b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/util/HierarchyScannerTest.java @@ -59,6 +59,8 @@ public HierarchyScannerTest() { Mockito.when(classUtils.isSame(Mockito.any(Element.class), Mockito.any(Class.class))) .thenReturn(false); + Mockito.when(types.isSameType(Mockito.any(TypeMirror.class), Mockito.any(TypeMirror.class))) + .thenAnswer(invocation -> invocation.getArgument(0) == invocation.getArgument(1)); } @Test diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DaoBase.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DaoBase.java index 6ceb62799a0..e5f817ab43d 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DaoBase.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DaoBase.java @@ -163,7 +163,7 @@ public BoundStatementBuilder populateBoundStatementWithStatementAttributes( } private ConsistencyLevel getConsistencyLevelFromName(String name) { - InternalDriverContext idContext = (InternalDriverContext) (context.getSession().getContext()); + InternalDriverContext idContext = (InternalDriverContext) context.getSession().getContext(); ConsistencyLevelRegistry registry = idContext.getConsistencyLevelRegistry(); return registry.codeToLevel(registry.nameToCode(name)); } diff --git a/pom.xml b/pom.xml index 9d8b0a212a1..7a9292a846a 100644 --- a/pom.xml +++ b/pom.xml @@ -412,12 +412,12 @@ org.codehaus.plexus plexus-compiler-javac-errorprone - 2.8 + 2.8.5 com.google.errorprone error_prone_core - 2.2.0 + 2.3.3 From 1e08237899f971fc5722c552202039e97590194a Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 5 Aug 2019 15:12:34 -0700 Subject: [PATCH 035/979] Add missing call to request tracker --- .../datastax/oss/driver/internal/core/cql/CqlRequestHandler.java | 1 + 1 file changed, 1 insertion(+) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java index 10cafc7b4bc..4951697756c 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java @@ -687,6 +687,7 @@ private void processErrorResponse(Error errorMessage) { } } } else if (exception instanceof RequestThrottlingException) { + trackNodeError(node, exception, NANOTIME_NOT_MEASURED_YET); setFinalError(exception, node, execution); return null; } From 4b83aa3b88bd4d067004e5dd00630427a1f9d5e9 Mon Sep 17 00:00:00 2001 From: olim7t Date: Tue, 6 Aug 2019 15:35:25 -0700 Subject: [PATCH 036/979] JAVA-2378: Clarify javadocs of routing info in Request --- .../oss/driver/api/core/session/Request.java | 52 +++++++++++-------- 1 file changed, 29 insertions(+), 23 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/session/Request.java b/core/src/main/java/com/datastax/oss/driver/api/core/session/Request.java index b9645fc7e43..b10bd74b4c6 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/session/Request.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/session/Request.java @@ -78,47 +78,53 @@ public interface Request { CqlIdentifier getKeyspace(); /** - * The keyspace to use for token-aware routing, if no {@link #getKeyspace() per-request keyspace} - * is defined, or {@code null} if this request does not use token-aware routing. + * The keyspace to use for token-aware routing. * - *

    See {@link #getRoutingKey()} for a detailed explanation of token-aware routing. + *

    Note that if a {@linkplain #getKeyspace() per-request keyspace} is already defined for this + * request, it takes precedence over this method. * - *

    Note that this is the only way to define a routing keyspace for protocol v4 or lower. + *

    See {@link #getRoutingKey()} for a detailed explanation of token-aware routing. */ @Nullable CqlIdentifier getRoutingKeyspace(); /** - * The (encoded) partition key to use for token-aware routing, or {@code null} if this request - * does not use token-aware routing. + * The partition key to use for token-aware routing. * - *

    When the driver picks a coordinator to execute a request, it prioritizes the replicas of the - * partition that this query operates on, in order to avoid an extra network jump on the server - * side. To find these replicas, it needs a keyspace (which is where the replication settings are - * defined) and a key, that are computed the following way: + *

    For each request, the driver tries to determine a routing keyspace and a + * routing key by calling the following methods: * *

      - *
    • if a per-request keyspace is specified with {@link #getKeyspace()}, it is used as the - * keyspace; - *
    • otherwise, if {@link #getRoutingKeyspace()} is specified, it is used as the keyspace; - *
    • otherwise, if {@link Session#getKeyspace()} is not {@code null}, it is used as the - * keyspace; - *
    • if a routing token is defined with {@link #getRoutingToken()}, it is used as the key; - *
    • otherwise, the result of this method is used as the key. + *
    • routing keyspace: + *
        + *
      • the result of {@link #getKeyspace()}, if not null; + *
      • otherwise, the result of {@link #getRoutingKeyspace()}, if not null; + *
      • otherwise, the result of {@link Session#getKeyspace()}, if not empty; + *
      • otherwise, null. + *
      + *
    • routing key: + *
        + *
      • the result of {@link #getRoutingToken()}, if not null; + *
      • otherwise, the result of {@link #getRoutingKey()}, if not null; + *
      • otherwise, null. + *
      *
    * - * If either keyspace or key is {@code null} at the end of this process, then token-aware routing - * is disabled. + * This provides a hint of the partition that the request operates on. When the driver picks a + * coordinator for execution, it will prioritize the replicas that own that partition, in order to + * avoid an extra network jump on the server side. + * + *

    Routing information is optional: if either keyspace or key is null, token-aware routing is + * disabled for this request. */ @Nullable ByteBuffer getRoutingKey(); /** - * The token to use for token-aware routing, or {@code null} if this request does not use - * token-aware routing. + * The token to use for token-aware routing. * - *

    This is the same information as {@link #getRoutingKey()}, but already hashed in a token. It - * is probably more useful for analytics tools that "shard" a query on a set of token ranges. + *

    This is an alternative to {@link #getRoutingKey()}. Both methods represent the same + * information, a request can provide one or the other. * *

    See {@link #getRoutingKey()} for a detailed explanation of token-aware routing. */ From b9e44c0f3b82851d4324b638adc4308830f97d78 Mon Sep 17 00:00:00 2001 From: olim7t Date: Tue, 23 Jul 2019 08:13:27 -0700 Subject: [PATCH 037/979] JAVA-2351: Add a driver example for the object mapper --- changelog/README.md | 1 + examples/pom.xml | 25 +++ .../mapper/KillrVideoMapperExample.java | 192 ++++++++++++++++++ .../mapper/killrvideo/KillrVideoMapper.java | 37 ++++ .../user/CreateUserQueryProvider.java | 129 ++++++++++++ .../killrvideo/user/LoginQueryProvider.java | 72 +++++++ .../killrvideo/user/PasswordHashing.java | 37 ++++ .../examples/mapper/killrvideo/user/User.java | 95 +++++++++ .../killrvideo/user/UserCredentials.java | 61 ++++++ .../mapper/killrvideo/user/UserDao.java | 71 +++++++ .../video/CreateVideoQueryProvider.java | 134 ++++++++++++ .../mapper/killrvideo/video/LatestVideo.java | 63 ++++++ .../mapper/killrvideo/video/UserVideo.java | 54 +++++ .../mapper/killrvideo/video/Video.java | 91 +++++++++ .../mapper/killrvideo/video/VideoBase.java | 79 +++++++ .../mapper/killrvideo/video/VideoByTag.java | 69 +++++++ .../mapper/killrvideo/video/VideoDao.java | 70 +++++++ examples/src/main/resources/application.conf | 8 + .../src/main/resources/killrvideo_schema.cql | 137 +++++++++++++ 19 files changed, 1425 insertions(+) create mode 100644 examples/src/main/java/com/datastax/oss/driver/examples/mapper/KillrVideoMapperExample.java create mode 100644 examples/src/main/java/com/datastax/oss/driver/examples/mapper/killrvideo/KillrVideoMapper.java create mode 100644 examples/src/main/java/com/datastax/oss/driver/examples/mapper/killrvideo/user/CreateUserQueryProvider.java create mode 100644 examples/src/main/java/com/datastax/oss/driver/examples/mapper/killrvideo/user/LoginQueryProvider.java create mode 100644 examples/src/main/java/com/datastax/oss/driver/examples/mapper/killrvideo/user/PasswordHashing.java create mode 100644 examples/src/main/java/com/datastax/oss/driver/examples/mapper/killrvideo/user/User.java create mode 100644 examples/src/main/java/com/datastax/oss/driver/examples/mapper/killrvideo/user/UserCredentials.java create mode 100644 examples/src/main/java/com/datastax/oss/driver/examples/mapper/killrvideo/user/UserDao.java create mode 100644 examples/src/main/java/com/datastax/oss/driver/examples/mapper/killrvideo/video/CreateVideoQueryProvider.java create mode 100644 examples/src/main/java/com/datastax/oss/driver/examples/mapper/killrvideo/video/LatestVideo.java create mode 100644 examples/src/main/java/com/datastax/oss/driver/examples/mapper/killrvideo/video/UserVideo.java create mode 100644 examples/src/main/java/com/datastax/oss/driver/examples/mapper/killrvideo/video/Video.java create mode 100644 examples/src/main/java/com/datastax/oss/driver/examples/mapper/killrvideo/video/VideoBase.java create mode 100644 examples/src/main/java/com/datastax/oss/driver/examples/mapper/killrvideo/video/VideoByTag.java create mode 100644 examples/src/main/java/com/datastax/oss/driver/examples/mapper/killrvideo/video/VideoDao.java create mode 100644 examples/src/main/resources/killrvideo_schema.cql diff --git a/changelog/README.md b/changelog/README.md index 624822afa67..da31d94f5b7 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.2.0 (in progress) +- [improvement] JAVA-2351: Add a driver example for the object mapper - [bug] JAVA-2323: Handle restart of a node with same host_id but a different address - [improvement] JAVA-2303: Ignore peer rows matching the control host's RPC address - [improvement] JAVA-2236: Add methods to set the auth provider programmatically diff --git a/examples/pom.xml b/examples/pom.xml index ba662d6dafa..c4500bc762e 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -45,6 +45,11 @@ java-driver-query-builder ${project.version} + + ${project.groupId} + java-driver-mapper-runtime + ${project.version} + @@ -143,11 +148,31 @@ logback-classic runtime + + + at.favre.lib + bcrypt + 0.8.0 + + + maven-compiler-plugin + + 1.8 + 1.8 + + + com.datastax.oss + java-driver-mapper-processor + ${project.version} + + + + org.revapi revapi-maven-plugin diff --git a/examples/src/main/java/com/datastax/oss/driver/examples/mapper/KillrVideoMapperExample.java b/examples/src/main/java/com/datastax/oss/driver/examples/mapper/KillrVideoMapperExample.java new file mode 100644 index 00000000000..2f8af1eb501 --- /dev/null +++ b/examples/src/main/java/com/datastax/oss/driver/examples/mapper/KillrVideoMapperExample.java @@ -0,0 +1,192 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.examples.mapper; + +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.PagingIterable; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.examples.mapper.killrvideo.KillrVideoMapper; +import com.datastax.oss.driver.examples.mapper.killrvideo.user.User; +import com.datastax.oss.driver.examples.mapper.killrvideo.user.UserDao; +import com.datastax.oss.driver.examples.mapper.killrvideo.video.LatestVideo; +import com.datastax.oss.driver.examples.mapper.killrvideo.video.UserVideo; +import com.datastax.oss.driver.examples.mapper.killrvideo.video.Video; +import com.datastax.oss.driver.examples.mapper.killrvideo.video.VideoByTag; +import com.datastax.oss.driver.examples.mapper.killrvideo.video.VideoDao; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.time.Instant; +import java.time.ZoneOffset; +import java.time.format.DateTimeFormatter; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Optional; +import java.util.Set; +import java.util.UUID; +import java.util.stream.Collectors; + +/** + * Uses the driver's object mapper to interact with a schema. + * + *

    We use the data model of the KillrVideo sample + * application. The mapped entities and DAOs are in the {@link + * com.datastax.oss.driver.examples.mapper.killrvideo} package. We only cover a subset of the data + * model (ratings, stats, recommendations and comments are not covered). + * + *

    Preconditions: + * + *

      + *
    • An Apache Cassandra(R) cluster is running and accessible through the contacts points + * identified by basic.contact-points (see application.conf). + *
    + * + *

    Side effects: + * + *

      + *
    • creates a new keyspace "killrvideo" in the session. If a keyspace with this name already + * exists, it will be reused; + *
    • creates the tables of the KillrVideo data model, if they don't already exist; + *
    • inserts a new user, or reuse the existing one if the email address is already taken; + *
    • inserts a video for that user. + *
    + * + * @see Java + * Driver Mapper manual + */ +public class KillrVideoMapperExample { + + private static final CqlIdentifier KEYSPACE_ID = CqlIdentifier.fromCql("killrvideo"); + + public static void main(String[] args) { + + try (CqlSession session = CqlSession.builder().build()) { + + maybeCreateSchema(session); + + KillrVideoMapper mapper = + KillrVideoMapper.builder(session).withDefaultKeyspace(KEYSPACE_ID).build(); + + // Create a new user + UserDao userDao = mapper.userDao(); + + User user = + new User(UUID.randomUUID(), "test", "user", "testuser@example.com", Instant.now()); + + if (userDao.create(user, "password123".toCharArray())) { + System.out.println("Created " + user); + } else { + user = userDao.getByEmail("testuser@example.com"); + System.out.println("Reusing existing " + user); + } + + // Creating another user with the same email should fail + assert !userDao.create( + new User(UUID.randomUUID(), "test2", "user", "testuser@example.com", Instant.now()), + "secret123".toCharArray()); + + // Simulate login attempts + tryLogin(userDao, "testuser@example.com", "password123"); + tryLogin(userDao, "testuser@example.com", "secret123"); + + // Insert a video + VideoDao videoDao = mapper.videoDao(); + + Video video = new Video(); + video.setUserid(user.getUserid()); + video.setName( + "Getting Started with DataStax Apache Cassandra as a Service on DataStax Constellation"); + video.setLocation("https://www.youtube.com/watch?v=68xzKpcZURA"); + Set tags = new HashSet<>(); + tags.add("apachecassandra"); + tags.add("nosql"); + tags.add("hybridcloud"); + video.setTags(tags); + + videoDao.create(video); + System.out.printf("Created video [%s] %s%n", video.getVideoid(), video.getName()); + + // Check that associated denormalized tables have also been updated: + PagingIterable userVideos = videoDao.getByUser(user.getUserid()); + System.out.printf("Videos for %s %s:%n", user.getFirstname(), user.getLastname()); + for (UserVideo userVideo : userVideos) { + System.out.printf(" [%s] %s%n", userVideo.getVideoid(), userVideo.getName()); + } + + PagingIterable latestVideos = videoDao.getLatest(todaysTimestamp()); + System.out.println("Latest videos:"); + for (LatestVideo latestVideo : latestVideos) { + System.out.printf(" [%s] %s%n", latestVideo.getVideoid(), latestVideo.getName()); + } + + PagingIterable videosByTag = videoDao.getByTag("apachecassandra"); + System.out.println("Videos tagged with apachecassandra:"); + for (VideoByTag videoByTag : videosByTag) { + System.out.printf(" [%s] %s%n", videoByTag.getVideoid(), videoByTag.getName()); + } + + // Update the existing video: + Video template = new Video(); + template.setVideoid(video.getVideoid()); + template.setName( + "Getting Started with DataStax Apache Cassandra® as a Service on DataStax Constellation"); + videoDao.update(template); + // Reload the whole entity and check the fields + video = videoDao.get(video.getVideoid()); + System.out.printf("Updated name for video %s: %s%n", video.getVideoid(), video.getName()); + } catch (Exception e) { + e.printStackTrace(); + } + } + + private static void tryLogin(UserDao userDao, String email, String password) { + Optional maybeUser = userDao.login(email, password.toCharArray()); + System.out.printf( + "Logging in with %s/%s: %s%n", + email, password, maybeUser.isPresent() ? "Success" : "Failure"); + } + + private static void maybeCreateSchema(CqlSession session) throws Exception { + session.execute( + SimpleStatement.newInstance( + "CREATE KEYSPACE IF NOT EXISTS killrvideo WITH replication = {'class': 'SimpleStrategy', 'replication_factor': 1}") + .setExecutionProfileName("slow")); + session.execute("USE killrvideo"); + for (String statement : getStatements("killrvideo_schema.cql")) { + session.execute(SimpleStatement.newInstance(statement).setExecutionProfileName("slow")); + } + } + + private static List getStatements(String fileName) throws Exception { + Path path = Paths.get(ClassLoader.getSystemResource(fileName).toURI()); + String contents = new String(Files.readAllBytes(path), StandardCharsets.UTF_8); + return Arrays.stream(contents.split(";")) + .map(String::trim) + .filter(s -> !s.isEmpty()) + .collect(Collectors.toList()); + } + + /** + * KillrVideo uses a textual timestamp to partition recent video. Build the timestamp for today to + * fetch our latest insertions. + */ + private static String todaysTimestamp() { + return DateTimeFormatter.ofPattern("yyyyMMdd").withZone(ZoneOffset.UTC).format(Instant.now()); + } +} diff --git a/examples/src/main/java/com/datastax/oss/driver/examples/mapper/killrvideo/KillrVideoMapper.java b/examples/src/main/java/com/datastax/oss/driver/examples/mapper/killrvideo/KillrVideoMapper.java new file mode 100644 index 00000000000..81bbd0a1106 --- /dev/null +++ b/examples/src/main/java/com/datastax/oss/driver/examples/mapper/killrvideo/KillrVideoMapper.java @@ -0,0 +1,37 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.examples.mapper.killrvideo; + +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.mapper.MapperBuilder; +import com.datastax.oss.driver.api.mapper.annotations.DaoFactory; +import com.datastax.oss.driver.api.mapper.annotations.Mapper; +import com.datastax.oss.driver.examples.mapper.killrvideo.user.UserDao; +import com.datastax.oss.driver.examples.mapper.killrvideo.video.VideoDao; + +@Mapper +public interface KillrVideoMapper { + + @DaoFactory + UserDao userDao(); + + @DaoFactory + VideoDao videoDao(); + + static MapperBuilder builder(CqlSession session) { + return new KillrVideoMapperBuilder(session); + } +} diff --git a/examples/src/main/java/com/datastax/oss/driver/examples/mapper/killrvideo/user/CreateUserQueryProvider.java b/examples/src/main/java/com/datastax/oss/driver/examples/mapper/killrvideo/user/CreateUserQueryProvider.java new file mode 100644 index 00000000000..dd70ce39f95 --- /dev/null +++ b/examples/src/main/java/com/datastax/oss/driver/examples/mapper/killrvideo/user/CreateUserQueryProvider.java @@ -0,0 +1,129 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.examples.mapper.killrvideo.user; + +import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.bindMarker; + +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.DefaultConsistencyLevel; +import com.datastax.oss.driver.api.core.cql.BoundStatementBuilder; +import com.datastax.oss.driver.api.core.cql.PreparedStatement; +import com.datastax.oss.driver.api.core.cql.ResultSet; +import com.datastax.oss.driver.api.mapper.MapperContext; +import com.datastax.oss.driver.api.mapper.entity.EntityHelper; +import com.datastax.oss.driver.api.mapper.entity.saving.NullSavingStrategy; +import java.time.Instant; +import java.util.Objects; +import java.util.UUID; + +/** + * Provides the implementation of {@link UserDao#create}. + * + *

    Package-private visibility is sufficient, this will be called only from the generated DAO + * implementation. + */ +class CreateUserQueryProvider { + + private final CqlSession session; + private final EntityHelper userHelper; + private final EntityHelper credentialsHelper; + private final PreparedStatement preparedInsertCredentials; + private final PreparedStatement preparedInsertUser; + private final PreparedStatement preparedDeleteCredentials; + private final PreparedStatement preparedDeleteUser; + + CreateUserQueryProvider( + MapperContext context, + EntityHelper userHelper, + EntityHelper credentialsHelper) { + + this.session = context.getSession(); + + this.userHelper = userHelper; + this.credentialsHelper = credentialsHelper; + + this.preparedInsertCredentials = + session.prepare(credentialsHelper.insert().ifNotExists().asCql()); + this.preparedInsertUser = session.prepare(userHelper.insert().asCql()); + this.preparedDeleteCredentials = + session.prepare( + credentialsHelper + .deleteByPrimaryKey() + .ifColumn("userid") + .isEqualTo(bindMarker("userid")) + .builder() + .setConsistencyLevel(DefaultConsistencyLevel.ANY) + .build()); + this.preparedDeleteUser = + session.prepare( + userHelper + .deleteByPrimaryKey() + .ifExists() + .builder() + .setConsistencyLevel(DefaultConsistencyLevel.ANY) + .build()); + } + + boolean create(User user, char[] password) { + Objects.requireNonNull(user.getUserid()); + Objects.requireNonNull(user.getEmail()); + if (user.getCreatedDate() == null) { + user.setCreatedDate(Instant.now()); + } + + try { + // Insert the user first: otherwise there would be a short window where we have credentials + // without a corresponding user in the database, and this is considered an error state in + // LoginQueryProvider + insertUser(user); + if (!insertCredentialsIfNotExists(user.getEmail(), password, user.getUserid())) { + // email already exists + session.execute(preparedDeleteUser.bind(user.getUserid())); + return false; + } + return true; + } catch (Exception insertException) { + // Clean up and rethrow + try { + session.execute(preparedDeleteUser.bind(user.getUserid())); + } catch (Exception e) { + insertException.addSuppressed(e); + } + try { + session.execute(preparedDeleteCredentials.bind(user.getEmail(), user.getUserid())); + } catch (Exception e) { + insertException.addSuppressed(e); + } + throw insertException; + } + } + + private boolean insertCredentialsIfNotExists(String email, char[] password, UUID userId) { + String passwordHash = PasswordHashing.hash(Objects.requireNonNull(password)); + UserCredentials credentials = + new UserCredentials(Objects.requireNonNull(email), passwordHash, userId); + BoundStatementBuilder insertCredentials = preparedInsertCredentials.boundStatementBuilder(); + credentialsHelper.set(credentials, insertCredentials, NullSavingStrategy.DO_NOT_SET); + ResultSet resultSet = session.execute(insertCredentials.build()); + return resultSet.wasApplied(); + } + + private void insertUser(User user) { + BoundStatementBuilder insertUser = preparedInsertUser.boundStatementBuilder(); + userHelper.set(user, insertUser, NullSavingStrategy.DO_NOT_SET); + session.execute(insertUser.build()); + } +} diff --git a/examples/src/main/java/com/datastax/oss/driver/examples/mapper/killrvideo/user/LoginQueryProvider.java b/examples/src/main/java/com/datastax/oss/driver/examples/mapper/killrvideo/user/LoginQueryProvider.java new file mode 100644 index 00000000000..7b968a65bc2 --- /dev/null +++ b/examples/src/main/java/com/datastax/oss/driver/examples/mapper/killrvideo/user/LoginQueryProvider.java @@ -0,0 +1,72 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.examples.mapper.killrvideo.user; + +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.cql.PreparedStatement; +import com.datastax.oss.driver.api.core.cql.Row; +import com.datastax.oss.driver.api.mapper.MapperContext; +import com.datastax.oss.driver.api.mapper.entity.EntityHelper; +import java.util.Optional; +import java.util.UUID; + +/** + * Provides the implementation of {@link UserDao#login}. + * + *

    Package-private visibility is sufficient, this will be called only from the generated DAO + * implementation. + */ +class LoginQueryProvider { + + private final CqlSession session; + private final EntityHelper userHelper; + private final PreparedStatement preparedSelectCredentials; + private final PreparedStatement preparedSelectUser; + + LoginQueryProvider( + MapperContext context, + EntityHelper userHelper, + EntityHelper credentialsHelper) { + + this.session = context.getSession(); + + this.userHelper = userHelper; + + this.preparedSelectCredentials = + session.prepare(credentialsHelper.selectByPrimaryKey().asCql()); + this.preparedSelectUser = session.prepare(userHelper.selectByPrimaryKey().asCql()); + } + + Optional login(String email, char[] password) { + return Optional.ofNullable(session.execute(preparedSelectCredentials.bind(email)).one()) + .flatMap( + credentialsRow -> { + String hashedPassword = credentialsRow.getString("password"); + if (PasswordHashing.matches(password, hashedPassword)) { + UUID userid = credentialsRow.getUuid("userid"); + Row userRow = session.execute(preparedSelectUser.bind(userid)).one(); + if (userRow == null) { + throw new IllegalStateException( + "Should have found matching row for userid " + userid); + } else { + return Optional.of(userHelper.get(userRow)); + } + } else { + return Optional.empty(); + } + }); + } +} diff --git a/examples/src/main/java/com/datastax/oss/driver/examples/mapper/killrvideo/user/PasswordHashing.java b/examples/src/main/java/com/datastax/oss/driver/examples/mapper/killrvideo/user/PasswordHashing.java new file mode 100644 index 00000000000..e20a628f594 --- /dev/null +++ b/examples/src/main/java/com/datastax/oss/driver/examples/mapper/killrvideo/user/PasswordHashing.java @@ -0,0 +1,37 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.examples.mapper.killrvideo.user; + +import at.favre.lib.crypto.bcrypt.BCrypt; + +/** + * Utility methods to safely store passwords in the database. + * + *

    We rely on a third-party implementation of the bcrypt password hash function. + * + * @see patrickfav/bcrypt + */ +public class PasswordHashing { + + public static String hash(char[] password) { + return BCrypt.withDefaults().hashToString(12, password); + } + + public static boolean matches(char[] password, String hash) { + BCrypt.Result result = BCrypt.verifyer().verify(password, hash); + return result.verified; + } +} diff --git a/examples/src/main/java/com/datastax/oss/driver/examples/mapper/killrvideo/user/User.java b/examples/src/main/java/com/datastax/oss/driver/examples/mapper/killrvideo/user/User.java new file mode 100644 index 00000000000..3f1a53e00c2 --- /dev/null +++ b/examples/src/main/java/com/datastax/oss/driver/examples/mapper/killrvideo/user/User.java @@ -0,0 +1,95 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.examples.mapper.killrvideo.user; + +import com.datastax.oss.driver.api.mapper.annotations.CqlName; +import com.datastax.oss.driver.api.mapper.annotations.Entity; +import com.datastax.oss.driver.api.mapper.annotations.PartitionKey; +import java.time.Instant; +import java.util.StringJoiner; +import java.util.UUID; + +@Entity +@CqlName("users") +public class User { + + @PartitionKey private UUID userid; + private String firstname; + private String lastname; + private String email; + private Instant createdDate; + + public User(UUID userid, String firstname, String lastname, String email, Instant createdDate) { + this.userid = userid; + this.firstname = firstname; + this.lastname = lastname; + this.email = email; + this.createdDate = createdDate; + } + + public User() {} + + public UUID getUserid() { + return userid; + } + + public void setUserid(UUID userid) { + this.userid = userid; + } + + public String getFirstname() { + return firstname; + } + + public void setFirstname(String firstname) { + this.firstname = firstname; + } + + public String getLastname() { + return lastname; + } + + public void setLastname(String lastname) { + this.lastname = lastname; + } + + public String getEmail() { + return email; + } + + public void setEmail(String email) { + this.email = email; + } + + public Instant getCreatedDate() { + return createdDate; + } + + public void setCreatedDate(Instant createdDate) { + this.createdDate = createdDate; + } + + @Override + public String toString() { + return new StringJoiner(", ", User.class.getSimpleName() + "[", "]") + .add("userid=" + userid) + .add("firstname='" + firstname + "'") + .add("lastname='" + lastname + "'") + .add("email='" + email + "'") + .add("createdDate=" + createdDate) + .toString(); + } +} diff --git a/examples/src/main/java/com/datastax/oss/driver/examples/mapper/killrvideo/user/UserCredentials.java b/examples/src/main/java/com/datastax/oss/driver/examples/mapper/killrvideo/user/UserCredentials.java new file mode 100644 index 00000000000..d6ed8b4a4ab --- /dev/null +++ b/examples/src/main/java/com/datastax/oss/driver/examples/mapper/killrvideo/user/UserCredentials.java @@ -0,0 +1,61 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.examples.mapper.killrvideo.user; + +import com.datastax.oss.driver.api.mapper.annotations.Entity; +import com.datastax.oss.driver.api.mapper.annotations.PartitionKey; +import java.util.UUID; + +@Entity +public class UserCredentials { + @PartitionKey private String email; + + private String password; + + private UUID userid; + + public UserCredentials(String email, String password, UUID userid) { + this.email = email; + this.password = password; + this.userid = userid; + } + + public UserCredentials() {} + + public String getEmail() { + return email; + } + + public void setEmail(String email) { + this.email = email; + } + + public String getPassword() { + return password; + } + + public void setPassword(String password) { + this.password = password; + } + + public UUID getUserid() { + return userid; + } + + public void setUserid(UUID userid) { + this.userid = userid; + } +} diff --git a/examples/src/main/java/com/datastax/oss/driver/examples/mapper/killrvideo/user/UserDao.java b/examples/src/main/java/com/datastax/oss/driver/examples/mapper/killrvideo/user/UserDao.java new file mode 100644 index 00000000000..f4aadc45a06 --- /dev/null +++ b/examples/src/main/java/com/datastax/oss/driver/examples/mapper/killrvideo/user/UserDao.java @@ -0,0 +1,71 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.examples.mapper.killrvideo.user; + +import com.datastax.oss.driver.api.mapper.annotations.Dao; +import com.datastax.oss.driver.api.mapper.annotations.QueryProvider; +import com.datastax.oss.driver.api.mapper.annotations.Select; +import java.util.Optional; +import java.util.UUID; + +@Dao +public interface UserDao { + + /** Simple selection by full primary key. */ + @Select + User get(UUID userid); + + @Select + UserCredentials getCredentials(String email); + + /** + * An alternative to query providers is default methods that call other methods on the DAO. + * + *

    The only drawback is that those other methods have to be part of the DAO's public API. + */ + default User getByEmail(String email) { + UserCredentials credentials = getCredentials(email); + return (credentials == null) ? null : get(credentials.getUserid()); + } + + /** + * Creating a user is more than a single insert: we have to update two different tables, check + * that the email is not used already, and handle password encryption. + * + *

    We use a query provider to wrap everything into a single method. + * + *

    Note that you could opt for a more layered approach: only expose basic operations on the DAO + * (insertCredentialsIfNotExists, insertUser...) and add a service layer on top for more complex + * logic. Both designs are valid, this is a matter of personal choice. + * + * @return {@code true} if the new user was created, or {@code false} if this email address was + * already taken. + */ + @QueryProvider( + providerClass = CreateUserQueryProvider.class, + entityHelpers = {User.class, UserCredentials.class}) + boolean create(User user, char[] password); + + /** + * Similar to {@link #create}, this encapsulates encryption so we use a query provider. + * + * @return the authenticated user, or {@link Optional#empty()} if the credentials are invalid. + */ + @QueryProvider( + providerClass = LoginQueryProvider.class, + entityHelpers = {User.class, UserCredentials.class}) + Optional login(String email, char[] password); +} diff --git a/examples/src/main/java/com/datastax/oss/driver/examples/mapper/killrvideo/video/CreateVideoQueryProvider.java b/examples/src/main/java/com/datastax/oss/driver/examples/mapper/killrvideo/video/CreateVideoQueryProvider.java new file mode 100644 index 00000000000..85704290903 --- /dev/null +++ b/examples/src/main/java/com/datastax/oss/driver/examples/mapper/killrvideo/video/CreateVideoQueryProvider.java @@ -0,0 +1,134 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.examples.mapper.killrvideo.video; + +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.cql.BatchStatement; +import com.datastax.oss.driver.api.core.cql.BatchStatementBuilder; +import com.datastax.oss.driver.api.core.cql.BoundStatement; +import com.datastax.oss.driver.api.core.cql.BoundStatementBuilder; +import com.datastax.oss.driver.api.core.cql.DefaultBatchType; +import com.datastax.oss.driver.api.core.cql.PreparedStatement; +import com.datastax.oss.driver.api.mapper.MapperContext; +import com.datastax.oss.driver.api.mapper.entity.EntityHelper; +import com.datastax.oss.driver.api.mapper.entity.saving.NullSavingStrategy; +import java.time.Instant; +import java.time.ZoneOffset; +import java.time.format.DateTimeFormatter; +import java.util.UUID; + +/** + * Provides the implementation of {@link VideoDao#create}. + * + *

    Package-private visibility is sufficient, this will be called only from the generated DAO + * implementation. + */ +class CreateVideoQueryProvider { + + private final CqlSession session; + private final EntityHelper

    Also, note that this queries a different table: DAOs are not limited to a single entity, the + * return type of the method dictates what rows will be mapped to. + */ + @Select + PagingIterable getByUser(UUID userid); + + /** Other selection by partial primary key, for another table. */ + @Select + PagingIterable getLatest(String yyyymmdd); + + /** Other selection by partial primary key, for yet another table. */ + @Select + PagingIterable getByTag(String tag); + + /** + * Creating a video is a bit more complex: because of denormalization, it involves multiple + * tables. + * + *

    A query provider is a nice way to wrap all the queries in a single operation, and hide the + * details from the DAO interface. + */ + @QueryProvider( + providerClass = CreateVideoQueryProvider.class, + entityHelpers = {Video.class, UserVideo.class, LatestVideo.class, VideoByTag.class}) + void create(Video video); + + /** + * Update using a template: the template must have its full primary key set; beyond that, any + * non-null field will be considered as a value to SET on the target row. + * + *

    Note that we specify the null saving strategy for emphasis, but this is the default. + */ + @Update(nullSavingStrategy = NullSavingStrategy.DO_NOT_SET) + void update(Video template); +} diff --git a/examples/src/main/resources/application.conf b/examples/src/main/resources/application.conf index 002018efc91..12cb19a84d0 100644 --- a/examples/src/main/resources/application.conf +++ b/examples/src/main/resources/application.conf @@ -11,4 +11,12 @@ datastax-java-driver { max-concurrent-requests = 32 max-queue-size = 10000 } + + advanced.request.warn-if-set-keyspace = false + + profiles { + slow { + basic.request.timeout = 10 seconds + } + } } \ No newline at end of file diff --git a/examples/src/main/resources/killrvideo_schema.cql b/examples/src/main/resources/killrvideo_schema.cql new file mode 100644 index 00000000000..24728d550d0 --- /dev/null +++ b/examples/src/main/resources/killrvideo_schema.cql @@ -0,0 +1,137 @@ +// User credentials, keyed by email address so we can authenticate +CREATE TABLE IF NOT EXISTS user_credentials ( + email text, + password text, + userid uuid, + PRIMARY KEY (email) +); + +// Users keyed by id +CREATE TABLE IF NOT EXISTS users ( + userid uuid, + firstname text, + lastname text, + email text, + created_date timestamp, + PRIMARY KEY (userid) +); + +// Videos by id +CREATE TABLE IF NOT EXISTS videos ( + videoid uuid, + userid uuid, + name text, + description text, + location text, + location_type int, + preview_image_location text, + tags set, + added_date timestamp, + PRIMARY KEY (videoid) +); + +// One-to-many from user point of view (lookup table) +CREATE TABLE IF NOT EXISTS user_videos ( + userid uuid, + added_date timestamp, + videoid uuid, + name text, + preview_image_location text, + PRIMARY KEY (userid, added_date, videoid) +) WITH CLUSTERING ORDER BY (added_date DESC, videoid ASC); + +// Track latest videos, grouped by day (if we ever develop a bad hotspot from the daily grouping here, we could mitigate by +// splitting the row using an arbitrary group number, making the partition key (yyyymmdd, group_number)) +CREATE TABLE IF NOT EXISTS latest_videos ( + yyyymmdd text, + added_date timestamp, + videoid uuid, + userid uuid, + name text, + preview_image_location text, + PRIMARY KEY (yyyymmdd, added_date, videoid) +) WITH CLUSTERING ORDER BY (added_date DESC, videoid ASC); + +// Video ratings (counter table) +CREATE TABLE IF NOT EXISTS video_ratings ( + videoid uuid, + rating_counter counter, + rating_total counter, + PRIMARY KEY (videoid) +); + +// Video ratings by user (to try and mitigate voting multiple times) +CREATE TABLE IF NOT EXISTS video_ratings_by_user ( + videoid uuid, + userid uuid, + rating int, + PRIMARY KEY (videoid, userid) +); + +// Records the number of views/playbacks of a video +CREATE TABLE IF NOT EXISTS video_playback_stats ( + videoid uuid, + views counter, + PRIMARY KEY (videoid) +); + +// Recommendations by user (powered by Spark), with the newest videos added to the site always first +CREATE TABLE IF NOT EXISTS video_recommendations ( + userid uuid, + added_date timestamp, + videoid uuid, + rating float, + authorid uuid, + name text, + preview_image_location text, + PRIMARY KEY(userid, added_date, videoid) +) WITH CLUSTERING ORDER BY (added_date DESC, videoid ASC); + +// Recommendations by video (powered by Spark) +CREATE TABLE IF NOT EXISTS video_recommendations_by_video ( + videoid uuid, + userid uuid, + rating float, + added_date timestamp STATIC, + authorid uuid STATIC, + name text STATIC, + preview_image_location text STATIC, + PRIMARY KEY(videoid, userid) +); + +// Index for tag keywords +CREATE TABLE IF NOT EXISTS videos_by_tag ( + tag text, + videoid uuid, + added_date timestamp, + userid uuid, + name text, + preview_image_location text, + tagged_date timestamp, + PRIMARY KEY (tag, videoid) +); + +// Index for tags by first letter in the tag +CREATE TABLE IF NOT EXISTS tags_by_letter ( + first_letter text, + tag text, + PRIMARY KEY (first_letter, tag) +); + +// Comments for a given video +CREATE TABLE IF NOT EXISTS comments_by_video ( + videoid uuid, + commentid timeuuid, + userid uuid, + comment text, + PRIMARY KEY (videoid, commentid) +) WITH CLUSTERING ORDER BY (commentid DESC); + +// Comments for a given user +CREATE TABLE IF NOT EXISTS comments_by_user ( + userid uuid, + commentid timeuuid, + videoid uuid, + comment text, + PRIMARY KEY (userid, commentid) +) WITH CLUSTERING ORDER BY (commentid DESC); From 5abbe0318e9cb2999bf64069d0e3e783c4e54edb Mon Sep 17 00:00:00 2001 From: olim7t Date: Tue, 30 Jul 2019 14:55:08 -0700 Subject: [PATCH 038/979] JAVA-2371: Handle null elements in collections on the decode path --- changelog/README.md | 1 + .../internal/core/type/codec/ListCodec.java | 16 +++++++--- .../internal/core/type/codec/MapCodec.java | 30 ++++++++++++------- .../internal/core/type/codec/SetCodec.java | 16 +++++++--- .../core/type/codec/ListCodecTest.java | 14 +++++++++ .../core/type/codec/MapCodecTest.java | 10 +++++++ .../core/type/codec/SetCodecTest.java | 6 ++++ 7 files changed, 75 insertions(+), 18 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index da31d94f5b7..78b96f3403b 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.2.0 (in progress) +- [improvement] JAVA-2371: Handle null elements in collections on the decode path - [improvement] JAVA-2351: Add a driver example for the object mapper - [bug] JAVA-2323: Handle restart of a node with same host_id but a different address - [improvement] JAVA-2303: Ignore peer rows matching the control host's RPC address diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/ListCodec.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/ListCodec.java index dd4001e3930..749ba8493f1 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/ListCodec.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/ListCodec.java @@ -115,11 +115,19 @@ public List decode( int size = input.getInt(); List result = new ArrayList<>(size); for (int i = 0; i < size; i++) { + ElementT element; int elementSize = input.getInt(); - ByteBuffer encodedElement = input.slice(); - encodedElement.limit(elementSize); - input.position(input.position() + elementSize); - result.add(elementCodec.decode(encodedElement, protocolVersion)); + // Allow null elements on the decode path, because Cassandra might return such collections + // for some computed values in the future -- e.g. SELECT ttl(some_collection) + if (elementSize < 0) { + element = null; + } else { + ByteBuffer encodedElement = input.slice(); + encodedElement.limit(elementSize); + input.position(input.position() + elementSize); + element = elementCodec.decode(encodedElement, protocolVersion); + } + result.add(element); } return result; } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/MapCodec.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/MapCodec.java index 4f330b3ab59..b85b21522cf 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/MapCodec.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/MapCodec.java @@ -134,18 +134,28 @@ public Map decode( int size = input.getInt(); Map result = Maps.newLinkedHashMapWithExpectedSize(size); for (int i = 0; i < size; i++) { + KeyT key; int keySize = input.getInt(); - ByteBuffer encodedKey = input.slice(); - encodedKey.limit(keySize); - input.position(input.position() + keySize); - KeyT key = keyCodec.decode(encodedKey, protocolVersion); - + // Allow null elements on the decode path, because Cassandra might return such collections + // for some computed values in the future -- e.g. SELECT ttl(some_collection) + if (keySize < 0) { + key = null; + } else { + ByteBuffer encodedKey = input.slice(); + encodedKey.limit(keySize); + input.position(input.position() + keySize); + key = keyCodec.decode(encodedKey, protocolVersion); + } + ValueT value; int valueSize = input.getInt(); - ByteBuffer encodedValue = input.slice(); - encodedValue.limit(valueSize); - input.position(input.position() + valueSize); - ValueT value = valueCodec.decode(encodedValue, protocolVersion); - + if (valueSize < 0) { + value = null; + } else { + ByteBuffer encodedValue = input.slice(); + encodedValue.limit(valueSize); + input.position(input.position() + valueSize); + value = valueCodec.decode(encodedValue, protocolVersion); + } result.put(key, value); } return result; diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/SetCodec.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/SetCodec.java index 7dc0c930c6e..7f23f14e990 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/SetCodec.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/SetCodec.java @@ -116,11 +116,19 @@ public Set decode( int size = input.getInt(); Set result = Sets.newLinkedHashSetWithExpectedSize(size); for (int i = 0; i < size; i++) { + ElementT element; int elementSize = input.getInt(); - ByteBuffer encodedElement = input.slice(); - encodedElement.limit(elementSize); - input.position(input.position() + elementSize); - result.add(elementCodec.decode(encodedElement, protocolVersion)); + // Allow null elements on the decode path, because Cassandra might return such collections + // for some computed values in the future -- e.g. SELECT ttl(some_collection) + if (elementSize < 0) { + element = null; + } else { + ByteBuffer encodedElement = input.slice(); + encodedElement.limit(elementSize); + input.position(input.position() + elementSize); + element = elementCodec.decode(encodedElement, protocolVersion); + } + result.add(element); } return result; } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/ListCodecTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/ListCodecTest.java index 7260a2ee3ac..34e870998ec 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/ListCodecTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/ListCodecTest.java @@ -93,6 +93,20 @@ public void should_decode_non_empty_list() { .containsExactly(1, 2, 3); } + @Test + public void should_decode_list_with_null_elements() { + when(elementCodec.decode(Bytes.fromHexString("0x0002"), ProtocolVersion.DEFAULT)).thenReturn(2); + assertThat( + decode( + "0x" + + "00000002" // number of elements + + "FFFFFFFF" // size of element 1 (-1 for null) + + "00000002" // size of element 2 + + "0002" // contents of element 2 + )) + .containsExactly(null, 2); + } + @Test public void should_format_null_list() { assertThat(format(null)).isEqualTo("NULL"); diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/MapCodecTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/MapCodecTest.java index 96de17f75e8..0fbeace16b1 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/MapCodecTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/MapCodecTest.java @@ -118,6 +118,16 @@ public void should_decode_non_empty_map() { .containsEntry("c", 3); } + @Test + public void should_decode_map_with_null_elements() { + when(keyCodec.decode(Bytes.fromHexString("0x10"), ProtocolVersion.DEFAULT)).thenReturn("a"); + when(valueCodec.decode(Bytes.fromHexString("0x0002"), ProtocolVersion.DEFAULT)).thenReturn(2); + assertThat(decode("0x" + "00000002" + "0000000110" + "FFFFFFFF" + "FFFFFFFF" + "000000020002")) + .containsOnlyKeys("a", null) + .containsEntry("a", null) + .containsEntry(null, 2); + } + @Test public void should_format_null_map() { assertThat(format(null)).isEqualTo("NULL"); diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/SetCodecTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/SetCodecTest.java index 9e6b590d2f4..6f8f8878c65 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/SetCodecTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/SetCodecTest.java @@ -93,6 +93,12 @@ public void should_decode_non_empty_set() { .containsExactly(1, 2, 3); } + @Test + public void should_decode_set_with_null_elements() { + when(elementCodec.decode(Bytes.fromHexString("0x01"), ProtocolVersion.DEFAULT)).thenReturn(1); + assertThat(decode("0x" + "00000002" + "0000000101" + "FFFFFFFF")).containsExactly(1, null); + } + @Test public void should_format_null_set() { assertThat(format(null)).isEqualTo("NULL"); From daeecd7d81f0b44731e3aa250d02cbbee13c4741 Mon Sep 17 00:00:00 2001 From: olim7t Date: Tue, 6 Aug 2019 15:54:36 -0700 Subject: [PATCH 039/979] Reorder instructions in collection codecs for clarity --- .../oss/driver/internal/core/type/codec/ListCodec.java | 2 +- .../oss/driver/internal/core/type/codec/MapCodec.java | 4 ++-- .../oss/driver/internal/core/type/codec/SetCodec.java | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/ListCodec.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/ListCodec.java index 749ba8493f1..a84f666b7d7 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/ListCodec.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/ListCodec.java @@ -124,8 +124,8 @@ public List decode( } else { ByteBuffer encodedElement = input.slice(); encodedElement.limit(elementSize); - input.position(input.position() + elementSize); element = elementCodec.decode(encodedElement, protocolVersion); + input.position(input.position() + elementSize); } result.add(element); } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/MapCodec.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/MapCodec.java index b85b21522cf..d5f16b5336c 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/MapCodec.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/MapCodec.java @@ -143,8 +143,8 @@ public Map decode( } else { ByteBuffer encodedKey = input.slice(); encodedKey.limit(keySize); - input.position(input.position() + keySize); key = keyCodec.decode(encodedKey, protocolVersion); + input.position(input.position() + keySize); } ValueT value; int valueSize = input.getInt(); @@ -153,8 +153,8 @@ public Map decode( } else { ByteBuffer encodedValue = input.slice(); encodedValue.limit(valueSize); - input.position(input.position() + valueSize); value = valueCodec.decode(encodedValue, protocolVersion); + input.position(input.position() + valueSize); } result.put(key, value); } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/SetCodec.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/SetCodec.java index 7f23f14e990..41ddb40d6f3 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/SetCodec.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/SetCodec.java @@ -125,8 +125,8 @@ public Set decode( } else { ByteBuffer encodedElement = input.slice(); encodedElement.limit(elementSize); - input.position(input.position() + elementSize); element = elementCodec.decode(encodedElement, protocolVersion); + input.position(input.position() + elementSize); } result.add(element); } From d322a949e6229ff3aed0da93d4b4eaf04aa5b381 Mon Sep 17 00:00:00 2001 From: olim7t Date: Thu, 1 Aug 2019 18:20:01 -0700 Subject: [PATCH 040/979] JAVA-2377: Add a config option to make driver threads daemon --- changelog/README.md | 1 + .../oss/driver/api/core/config/DefaultDriverOption.java | 2 ++ .../internal/core/context/DefaultNettyOptions.java | 4 ++++ core/src/main/resources/reference.conf | 9 +++++++++ 4 files changed, 16 insertions(+) diff --git a/changelog/README.md b/changelog/README.md index 78b96f3403b..5f42611f4e9 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.2.0 (in progress) +- [improvement] JAVA-2377: Add a config option to make driver threads daemon - [improvement] JAVA-2371: Handle null elements in collections on the decode path - [improvement] JAVA-2351: Add a driver example for the object mapper - [bug] JAVA-2323: Handle restart of a node with same host_id but a different address diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java index 89d8365de78..863870f2feb 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java @@ -173,6 +173,8 @@ public enum DefaultDriverOption implements DriverOption { NETTY_TIMER_TICKS_PER_WHEEL("advanced.netty.timer.ticks-per-wheel"), REQUEST_LOG_WARNINGS("advanced.request.log-warnings"), + + NETTY_DAEMON("advanced.netty.daemon"), ; private final String path; diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultNettyOptions.java b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultNettyOptions.java index c4badab96a3..10da06c8308 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultNettyOptions.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultNettyOptions.java @@ -61,6 +61,7 @@ public class DefaultNettyOptions implements NettyOptions { public DefaultNettyOptions(InternalDriverContext context) { this.config = context.getConfig().getDefaultProfile(); + boolean daemon = config.getBoolean(DefaultDriverOption.NETTY_DAEMON); int ioGroupSize = config.getInt(DefaultDriverOption.NETTY_IO_SIZE); this.ioShutdownQuietPeriod = config.getInt(DefaultDriverOption.NETTY_IO_SHUTDOWN_QUIET_PERIOD); this.ioShutdownTimeout = config.getInt(DefaultDriverOption.NETTY_IO_SHUTDOWN_TIMEOUT); @@ -78,6 +79,7 @@ public DefaultNettyOptions(InternalDriverContext context) { new ThreadFactoryBuilder() .setThreadFactory(safeFactory) .setNameFormat(context.getSessionName() + "-io-%d") + .setDaemon(daemon) .build(); this.ioEventLoopGroup = new NioEventLoopGroup(ioGroupSize, ioThreadFactory); @@ -85,6 +87,7 @@ public DefaultNettyOptions(InternalDriverContext context) { new ThreadFactoryBuilder() .setThreadFactory(safeFactory) .setNameFormat(context.getSessionName() + "-admin-%d") + .setDaemon(daemon) .build(); this.adminEventLoopGroup = new DefaultEventLoopGroup(adminGroupSize, adminThreadFactory); // setup the Timer @@ -92,6 +95,7 @@ public DefaultNettyOptions(InternalDriverContext context) { new ThreadFactoryBuilder() .setThreadFactory(safeFactory) .setNameFormat(context.getSessionName() + "-timer-%d") + .setDaemon(daemon) .build(); Duration tickDuration = config.getDuration(DefaultDriverOption.NETTY_TIMER_TICK_DURATION); diff --git a/core/src/main/resources/reference.conf b/core/src/main/resources/reference.conf index 3ee70be5933..0c0aef5e6d7 100644 --- a/core/src/main/resources/reference.conf +++ b/core/src/main/resources/reference.conf @@ -1381,6 +1381,15 @@ datastax-java-driver { # Options related to the Netty event loop groups used internally by the driver. advanced.netty { + + # Whether the threads created by the driver should be daemon threads. + # This will apply to the threads in io-group, admin-group, and the timer thread. + # + # Required: yes + # Modifiable at runtime: no + # Overridable in a profile: no + daemon = false + # The event loop group used for I/O operations (reading and writing to Cassandra nodes). # By default, threads in this group are named after the session name, "-io-" and an incrementing # counter, for example "s0-io-0". From 41521dd64ecd21493aaacff361da5e0aaf6ad084 Mon Sep 17 00:00:00 2001 From: olim7t Date: Wed, 7 Aug 2019 11:32:56 -0700 Subject: [PATCH 041/979] Fix ErrorProne warning in example --- .../oss/driver/examples/mapper/KillrVideoMapperExample.java | 1 + pom.xml | 5 +++-- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/examples/src/main/java/com/datastax/oss/driver/examples/mapper/KillrVideoMapperExample.java b/examples/src/main/java/com/datastax/oss/driver/examples/mapper/KillrVideoMapperExample.java index 2f8af1eb501..9e80ae12014 100644 --- a/examples/src/main/java/com/datastax/oss/driver/examples/mapper/KillrVideoMapperExample.java +++ b/examples/src/main/java/com/datastax/oss/driver/examples/mapper/KillrVideoMapperExample.java @@ -70,6 +70,7 @@ * @see Java * Driver Mapper manual */ +@SuppressWarnings("CatchAndPrintStackTrace") public class KillrVideoMapperExample { private static final CqlIdentifier KEYSPACE_ID = CqlIdentifier.fromCql("killrvideo"); diff --git a/pom.xml b/pom.xml index 7a9292a846a..1af637768bf 100644 --- a/pom.xml +++ b/pom.xml @@ -401,8 +401,9 @@ -Xep:FutureReturnValueIgnored:OFF -Xep:MockitoInternalUsage:OFF - -XepExcludedPaths:.*/target/generated-sources/.* - -XepExcludedPaths:.*/target/generated-test-sources/.* + + -XepExcludedPaths:.*/target/(?:generated-sources|generated-test-sources)/.* + true true From 38cb05a1a00bb22f6de116111729b36bf9150321 Mon Sep 17 00:00:00 2001 From: olim7t Date: Tue, 6 Aug 2019 18:06:59 -0700 Subject: [PATCH 042/979] JAVA-2370: Remove auto-service plugin from mapper processor --- changelog/README.md | 1 + mapper-processor/pom.xml | 11 ++++++----- .../internal/mapper/processor/MapperProcessor.java | 3 --- .../services/javax.annotation.processing.Processor | 1 + pom.xml | 5 ----- 5 files changed, 8 insertions(+), 13 deletions(-) create mode 100644 mapper-processor/src/main/resources/META-INF/services/javax.annotation.processing.Processor diff --git a/changelog/README.md b/changelog/README.md index 5f42611f4e9..6e4e84e6865 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.2.0 (in progress) +- [improvement] JAVA-2370: Remove auto-service plugin from mapper processor - [improvement] JAVA-2377: Add a config option to make driver threads daemon - [improvement] JAVA-2371: Handle null elements in collections on the decode path - [improvement] JAVA-2351: Add a driver example for the object mapper diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 3d3538f4e59..07b755f862c 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -42,11 +42,6 @@ com.squareup javapoet - - com.google.auto.service - auto-service - true - com.github.stephenc.jcip jcip-annotations @@ -84,6 +79,12 @@ + + maven-compiler-plugin + + none + + maven-jar-plugin diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/MapperProcessor.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/MapperProcessor.java index 95a5eb5c61d..c397d972c7a 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/MapperProcessor.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/MapperProcessor.java @@ -21,7 +21,6 @@ import com.datastax.oss.driver.shaded.guava.common.base.Strings; import com.datastax.oss.driver.shaded.guava.common.base.Throwables; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; -import com.google.auto.service.AutoService; import java.lang.annotation.Annotation; import java.util.Map; import java.util.Set; @@ -29,7 +28,6 @@ import javax.annotation.processing.AbstractProcessor; import javax.annotation.processing.Filer; import javax.annotation.processing.ProcessingEnvironment; -import javax.annotation.processing.Processor; import javax.annotation.processing.RoundEnvironment; import javax.lang.model.SourceVersion; import javax.lang.model.element.Element; @@ -38,7 +36,6 @@ import javax.lang.model.util.Elements; import javax.lang.model.util.Types; -@AutoService(Processor.class) public class MapperProcessor extends AbstractProcessor { private static final boolean DEFAULT_MAPPER_LOGS_ENABLED = true; diff --git a/mapper-processor/src/main/resources/META-INF/services/javax.annotation.processing.Processor b/mapper-processor/src/main/resources/META-INF/services/javax.annotation.processing.Processor new file mode 100644 index 00000000000..a7ff54415a6 --- /dev/null +++ b/mapper-processor/src/main/resources/META-INF/services/javax.annotation.processing.Processor @@ -0,0 +1 @@ +com.datastax.oss.driver.internal.mapper.processor.MapperProcessor diff --git a/pom.xml b/pom.xml index 1af637768bf..67a1e09c5c3 100644 --- a/pom.xml +++ b/pom.xml @@ -156,11 +156,6 @@ javapoet 1.11.1 - - com.google.auto.service - auto-service - 1.0-rc4 - junit junit From bde0a900ce33df5b95a60c2b11b58501cc0d6166 Mon Sep 17 00:00:00 2001 From: olim7t Date: Thu, 1 Aug 2019 11:48:18 -0700 Subject: [PATCH 043/979] JAVA-2375: Use per-request keyspace when repreparing on the fly --- changelog/README.md | 1 + .../internal/core/cql/CqlRequestHandler.java | 2 +- .../core/session/RepreparePayload.java | 5 +++ .../api/core/cql/PerRequestKeyspaceIT.java | 41 +++++++++++++++++++ 4 files changed, 48 insertions(+), 1 deletion(-) diff --git a/changelog/README.md b/changelog/README.md index 6e4e84e6865..8400dd924dc 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.2.0 (in progress) +- [bug] JAVA-2375: Use per-request keyspace when repreparing on the fly - [improvement] JAVA-2370: Remove auto-service plugin from mapper processor - [improvement] JAVA-2377: Add a config option to make driver threads daemon - [improvement] JAVA-2371: Handle null elements in collections on the decode path diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java index 4951697756c..0b85aa68f70 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java @@ -654,7 +654,7 @@ private void processErrorResponse(Error errorMessage) { "Tried to execute unprepared query %s but we don't have the data to reprepare it", Bytes.toHexString(id))); } - Prepare reprepareMessage = new Prepare(repreparePayload.query); + Prepare reprepareMessage = repreparePayload.toMessage(); ThrottledAdminRequestHandler reprepareHandler = new ThrottledAdminRequestHandler( channel, diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/session/RepreparePayload.java b/core/src/main/java/com/datastax/oss/driver/internal/core/session/RepreparePayload.java index eaa9541a59f..d1e09278a9f 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/session/RepreparePayload.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/session/RepreparePayload.java @@ -17,6 +17,7 @@ import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.internal.core.cql.DefaultPreparedStatement; +import com.datastax.oss.protocol.internal.request.Prepare; import java.nio.ByteBuffer; import java.util.Map; import net.jcip.annotations.Immutable; @@ -45,4 +46,8 @@ public RepreparePayload( this.keyspace = keyspace; this.customPayload = customPayload; } + + public Prepare toMessage() { + return new Prepare(query, keyspace == null ? null : keyspace.asInternal()); + } } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cql/PerRequestKeyspaceIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cql/PerRequestKeyspaceIT.java index 1cd46307efe..88190a39e32 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cql/PerRequestKeyspaceIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cql/PerRequestKeyspaceIT.java @@ -25,6 +25,8 @@ import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.categories.ParallelizableTests; +import java.nio.ByteBuffer; +import java.time.Duration; import org.junit.Before; import org.junit.Rule; import org.junit.Test; @@ -203,4 +205,43 @@ public void should_prepare_statement_with_keyspace() { .one(); assertThat(row.getInt(0)).isEqualTo(1); } + + @Test + @CassandraRequirement(min = "4.0") + public void should_reprepare_statement_with_keyspace_on_the_fly() { + // Create a separate session because we don't want it to have a default keyspace + try (CqlSession session = SessionUtils.newSession(ccmRule)) { + executeDdl( + session, + String.format( + "CREATE TABLE IF NOT EXISTS %s.bar (k int primary key)", sessionRule.keyspace())); + PreparedStatement pst = + session.prepare( + SimpleStatement.newInstance("SELECT * FROM bar WHERE k=?") + .setKeyspace(sessionRule.keyspace())); + + // Drop and re-create the table to invalidate the prepared statement server side + executeDdl(session, String.format("DROP TABLE %s.bar", sessionRule.keyspace())); + executeDdl( + session, + String.format("CREATE TABLE %s.bar (k int primary key)", sessionRule.keyspace())); + assertThat(preparedStatementExistsOnServer(session, pst.getId())).isFalse(); + + // This will re-prepare on the fly + session.execute(pst.bind(0)); + assertThat(preparedStatementExistsOnServer(session, pst.getId())).isTrue(); + } + } + + private void executeDdl(CqlSession session, String query) { + session.execute(SimpleStatement.builder(query).setTimeout(Duration.ofSeconds(30)).build()); + } + + private boolean preparedStatementExistsOnServer(CqlSession session, ByteBuffer id) { + ResultSet resultSet = + session.execute( + SimpleStatement.newInstance( + "SELECT * FROM system.prepared_statements WHERE prepared_id = ?", id)); + return resultSet.iterator().hasNext(); + } } From 8e42676d6a2d7db7b8912409f63a72e92cd7e151 Mon Sep 17 00:00:00 2001 From: olim7t Date: Wed, 7 Aug 2019 15:55:47 -0700 Subject: [PATCH 044/979] Bump version to 4.2.0-SNAPSHOT --- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- pom.xml | 2 +- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 10 files changed, 10 insertions(+), 10 deletions(-) diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index b22863c0156..4867c6578c4 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -22,7 +22,7 @@ com.datastax.oss java-driver-parent - 4.1.1-SNAPSHOT + 4.2.0-SNAPSHOT java-driver-core-shaded diff --git a/core/pom.xml b/core/pom.xml index c436191ece9..9c925c913b4 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.1.1-SNAPSHOT + 4.2.0-SNAPSHOT java-driver-core diff --git a/distribution/pom.xml b/distribution/pom.xml index a4e8fb60508..7918284c369 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.1.1-SNAPSHOT + 4.2.0-SNAPSHOT java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index c4500bc762e..1c8053de782 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.1.1-SNAPSHOT + 4.2.0-SNAPSHOT java-driver-examples diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 1d8ee49696f..b2ca1785468 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.1.1-SNAPSHOT + 4.2.0-SNAPSHOT java-driver-integration-tests diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 07b755f862c..0a6aa020c81 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.1.1-SNAPSHOT + 4.2.0-SNAPSHOT java-driver-mapper-processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index da5be25bded..60e2481e104 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.1.1-SNAPSHOT + 4.2.0-SNAPSHOT java-driver-mapper-runtime diff --git a/pom.xml b/pom.xml index 67a1e09c5c3..a34fa29fcd3 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ com.datastax.oss java-driver-parent - 4.1.1-SNAPSHOT + 4.2.0-SNAPSHOT pom DataStax Java driver for Apache Cassandra(R) diff --git a/query-builder/pom.xml b/query-builder/pom.xml index e2ac5398bd9..5fe6dd58261 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.1.1-SNAPSHOT + 4.2.0-SNAPSHOT java-driver-query-builder diff --git a/test-infra/pom.xml b/test-infra/pom.xml index 828f24fc144..2c5c5de59d3 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.1.1-SNAPSHOT + 4.2.0-SNAPSHOT java-driver-test-infra From a3d2ef2a3bc1052cd0c09d6af415db20ce30f11c Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 5 Aug 2019 15:02:52 -0700 Subject: [PATCH 045/979] Don't wrap unchecked exceptions in DriverExecutionException Per DriverExecutionException's contract. This reverts 4d79ac8e09d4e1b08b0828aac931474554941b6e. --- .../util/concurrent/CompletableFutures.java | 2 ++ .../core/config/DriverConfigValidationIT.java | 4 +--- .../oss/driver/api/core/cql/QueryTraceIT.java | 23 ++----------------- 3 files changed, 5 insertions(+), 24 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/util/concurrent/CompletableFutures.java b/core/src/main/java/com/datastax/oss/driver/internal/core/util/concurrent/CompletableFutures.java index 26c244db7e3..bd3973ddfd9 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/util/concurrent/CompletableFutures.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/util/concurrent/CompletableFutures.java @@ -18,6 +18,7 @@ import com.datastax.oss.driver.api.core.DriverException; import com.datastax.oss.driver.api.core.DriverExecutionException; import com.datastax.oss.driver.shaded.guava.common.base.Preconditions; +import com.datastax.oss.driver.shaded.guava.common.base.Throwables; import java.util.List; import java.util.concurrent.CancellationException; import java.util.concurrent.CompletableFuture; @@ -147,6 +148,7 @@ public static T getUninterruptibly(CompletionStage stage) { if (cause instanceof DriverException) { throw ((DriverException) cause).copy(); } + Throwables.throwIfUnchecked(cause); throw new DriverExecutionException(cause); } } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/config/DriverConfigValidationIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/api/core/config/DriverConfigValidationIT.java index 60fb91fe6b9..911122746e1 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/config/DriverConfigValidationIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/api/core/config/DriverConfigValidationIT.java @@ -18,7 +18,6 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; -import com.datastax.oss.driver.api.core.DriverExecutionException; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.api.testinfra.simulacron.SimulacronRule; import com.datastax.oss.driver.categories.ParallelizableTests; @@ -57,8 +56,7 @@ private void should_fail_to_init_with_invalid_policy(DefaultDriverOption option) assertThatThrownBy(() -> SessionUtils.newSession(simulacron, loader)) .satisfies( error -> { - assertThat(error).isInstanceOf(DriverExecutionException.class); - assertThat(error.getCause()) + assertThat(error) .isInstanceOf(IllegalArgumentException.class) .hasMessageContaining( "Can't find class AClassThatDoesNotExist " diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cql/QueryTraceIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cql/QueryTraceIT.java index f01007ce3e1..1ff2f083c87 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cql/QueryTraceIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cql/QueryTraceIT.java @@ -18,14 +18,11 @@ import static org.assertj.core.api.Assertions.assertThat; import com.datastax.oss.driver.api.core.CqlSession; -import com.datastax.oss.driver.api.core.DriverExecutionException; import com.datastax.oss.driver.api.core.metadata.EndPoint; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.categories.ParallelizableTests; import java.net.InetSocketAddress; -import org.hamcrest.Description; -import org.hamcrest.TypeSafeMatcher; import org.junit.ClassRule; import org.junit.Rule; import org.junit.Test; @@ -55,24 +52,8 @@ public void should_not_have_tracing_id_when_tracing_disabled() { assertThat(executionInfo.getTracingId()).isNull(); - // Should get a DriverExecutionException with an underlying IllegalStateException indicating - // Tracing was disabled. - thrown.expect(DriverExecutionException.class); - String expectedMessage = "Tracing was disabled for this request"; - thrown.expectCause( - new TypeSafeMatcher() { - @Override - public void describeTo(Description description) { - description.appendText( - "Expected IllegalStateException with message of '" + expectedMessage + "'"); - } - - @Override - protected boolean matchesSafely(Throwable item) { - return item instanceof IllegalStateException - && item.getMessage().equals(expectedMessage); - } - }); + thrown.expect(IllegalStateException.class); + thrown.expectMessage("Tracing was disabled for this request"); executionInfo.getQueryTrace(); } From d5ffa10709479371492b8df7cf19829d52fc8d6b Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 5 Aug 2019 11:39:10 -0700 Subject: [PATCH 046/979] Generify AdminRequestHandler Motivation: We now have a need to read the response for message types other than ROWS. AdminResult is not well-suited for this; we could introduce subclasses, but that would require awkward casting. Modifications: Generify AdminRequestHandler over the element type of the returned future. That type is provided at construction time (this is abstracted behind the static factory methods). Result: Clients use the factory method matching their expected response type; they get a future that contains the right object directly. --- .../adminrequest/AdminRequestHandler.java | 75 +++++++++++-------- .../ThrottledAdminRequestHandler.java | 58 ++++++++++++-- .../internal/core/cql/CqlPrepareHandler.java | 8 +- .../internal/core/cql/CqlRequestHandler.java | 7 +- .../internal/core/session/ReprepareOnUp.java | 18 +++-- .../core/session/ReprepareOnUpTest.java | 15 +++- 6 files changed, 127 insertions(+), 54 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/adminrequest/AdminRequestHandler.java b/core/src/main/java/com/datastax/oss/driver/internal/core/adminrequest/AdminRequestHandler.java index 6ccf1651e1f..6ab32f1adc6 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/adminrequest/AdminRequestHandler.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/adminrequest/AdminRequestHandler.java @@ -28,6 +28,7 @@ import com.datastax.oss.protocol.internal.request.Query; import com.datastax.oss.protocol.internal.request.query.QueryOptions; import com.datastax.oss.protocol.internal.response.Result; +import com.datastax.oss.protocol.internal.response.result.Prepared; import com.datastax.oss.protocol.internal.response.result.Rows; import io.netty.util.concurrent.Future; import io.netty.util.concurrent.ScheduledFuture; @@ -46,15 +47,22 @@ /** Handles the lifecyle of an admin request (such as a node refresh or schema refresh query). */ @ThreadSafe -public class AdminRequestHandler implements ResponseCallback { +public class AdminRequestHandler implements ResponseCallback { private static final Logger LOG = LoggerFactory.getLogger(AdminRequestHandler.class); - public static AdminRequestHandler query( + public static AdminRequestHandler call( DriverChannel channel, Query query, Duration timeout, String logPrefix) { - return createAdminRequestHandler(channel, query, Collections.emptyMap(), timeout, logPrefix); + return new AdminRequestHandler<>( + channel, + query, + Frame.NO_PAYLOAD, + timeout, + logPrefix, + "call '" + query.query + "'", + com.datastax.oss.protocol.internal.response.result.Void.class); } - public static AdminRequestHandler query( + public static AdminRequestHandler query( DriverChannel channel, String query, Map parameters, @@ -65,25 +73,15 @@ public static AdminRequestHandler query( new Query( query, buildQueryOptions(pageSize, serialize(parameters, channel.protocolVersion()), null)); - return createAdminRequestHandler(channel, message, parameters, timeout, logPrefix); - } - - private static AdminRequestHandler createAdminRequestHandler( - DriverChannel channel, - Query message, - Map parameters, - Duration timeout, - String logPrefix) { - String debugString = "query '" + message.query + "'"; if (!parameters.isEmpty()) { debugString += " with parameters " + parameters; } - return new AdminRequestHandler( - channel, message, Frame.NO_PAYLOAD, timeout, logPrefix, debugString); + return new AdminRequestHandler<>( + channel, message, Frame.NO_PAYLOAD, timeout, logPrefix, debugString, Rows.class); } - public static AdminRequestHandler query( + public static AdminRequestHandler query( DriverChannel channel, String query, Duration timeout, int pageSize, String logPrefix) { return query(channel, query, Collections.emptyMap(), timeout, pageSize, logPrefix); } @@ -94,27 +92,30 @@ public static AdminRequestHandler query( private final Duration timeout; private final String logPrefix; private final String debugString; - protected final CompletableFuture result = new CompletableFuture<>(); + private final Class expectedResponseType; + protected final CompletableFuture result = new CompletableFuture<>(); // This is only ever accessed on the channel's event loop, so it doesn't need to be volatile private ScheduledFuture timeoutFuture; - public AdminRequestHandler( + protected AdminRequestHandler( DriverChannel channel, Message message, Map customPayload, Duration timeout, String logPrefix, - String debugString) { + String debugString, + Class expectedResponseType) { this.channel = channel; this.message = message; this.customPayload = customPayload; this.timeout = timeout; this.logPrefix = logPrefix; this.debugString = debugString; + this.expectedResponseType = expectedResponseType; } - public CompletionStage start() { + public CompletionStage start() { LOG.debug("[{}] Executing {}", logPrefix, this); channel.write(message, false, customPayload, this).addListener(this::onWriteComplete); return result; @@ -158,22 +159,31 @@ public void onResponse(Frame responseFrame) { } Message message = responseFrame.message; LOG.debug("[{}] Got response {}", logPrefix, responseFrame.message); - if (message instanceof Rows) { + if (!expectedResponseType.isInstance(message)) { + // Note that this also covers error responses, no need to get too fancy here + setFinalError(new UnexpectedResponseException(debugString, message)); + } else if (expectedResponseType == Rows.class) { Rows rows = (Rows) message; ByteBuffer pagingState = rows.getMetadata().pagingState; AdminRequestHandler nextHandler = (pagingState == null) ? null : this.copy(pagingState); - setFinalResult(new AdminResult(rows, nextHandler, channel.protocolVersion())); - } else if (message instanceof Result) { - - // Internal prepares are only "reprepare on up" types of queries, where we only care about - // success, not the actual result, so this is good enough: + // The public factory methods guarantee that expectedResponseType and ResultT always match: + @SuppressWarnings("unchecked") + ResultT result = (ResultT) new AdminResult(rows, nextHandler, channel.protocolVersion()); + setFinalResult(result); + } else if (expectedResponseType == Prepared.class) { + Prepared prepared = (Prepared) message; + @SuppressWarnings("unchecked") + ResultT result = (ResultT) ByteBuffer.wrap(prepared.preparedQueryId); + setFinalResult(result); + } else if (expectedResponseType + == com.datastax.oss.protocol.internal.response.result.Void.class) { setFinalResult(null); } else { - setFinalError(new UnexpectedResponseException(debugString, message)); + setFinalError(new AssertionError("Unhandled response type" + expectedResponseType)); } } - protected boolean setFinalResult(AdminResult result) { + protected boolean setFinalResult(ResultT result) { return this.result.complete(result); } @@ -181,19 +191,20 @@ protected boolean setFinalError(Throwable error) { return result.completeExceptionally(error); } - private AdminRequestHandler copy(ByteBuffer pagingState) { + private AdminRequestHandler copy(ByteBuffer pagingState) { assert message instanceof Query; Query current = (Query) this.message; QueryOptions currentOptions = current.options; QueryOptions newOptions = buildQueryOptions(currentOptions.pageSize, currentOptions.namedValues, pagingState); - return new AdminRequestHandler( + return new AdminRequestHandler<>( channel, new Query(current.query, newOptions), customPayload, timeout, logPrefix, - debugString); + debugString, + expectedResponseType); } private static QueryOptions buildQueryOptions( diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/adminrequest/ThrottledAdminRequestHandler.java b/core/src/main/java/com/datastax/oss/driver/internal/core/adminrequest/ThrottledAdminRequestHandler.java index a4d9809a32d..712f338c8da 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/adminrequest/ThrottledAdminRequestHandler.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/adminrequest/ThrottledAdminRequestHandler.java @@ -23,6 +23,9 @@ import com.datastax.oss.driver.internal.core.channel.DriverChannel; import com.datastax.oss.driver.internal.core.metrics.SessionMetricUpdater; import com.datastax.oss.protocol.internal.Message; +import com.datastax.oss.protocol.internal.response.Result; +import com.datastax.oss.protocol.internal.response.result.Prepared; +import com.datastax.oss.protocol.internal.response.result.Rows; import edu.umd.cs.findbugs.annotations.NonNull; import java.nio.ByteBuffer; import java.time.Duration; @@ -32,13 +35,55 @@ import net.jcip.annotations.ThreadSafe; @ThreadSafe -public class ThrottledAdminRequestHandler extends AdminRequestHandler implements Throttled { +public class ThrottledAdminRequestHandler extends AdminRequestHandler + implements Throttled { + + public static ThrottledAdminRequestHandler query( + DriverChannel channel, + Message message, + Map customPayload, + Duration timeout, + RequestThrottler throttler, + SessionMetricUpdater metricUpdater, + String logPrefix, + String debugString) { + return new ThrottledAdminRequestHandler<>( + channel, + message, + customPayload, + timeout, + throttler, + metricUpdater, + logPrefix, + debugString, + Rows.class); + } + + public static ThrottledAdminRequestHandler prepare( + DriverChannel channel, + Message message, + Map customPayload, + Duration timeout, + RequestThrottler throttler, + SessionMetricUpdater metricUpdater, + String logPrefix) { + return new ThrottledAdminRequestHandler<>( + channel, + message, + customPayload, + timeout, + throttler, + metricUpdater, + logPrefix, + message.toString(), + Prepared.class); + } private final long startTimeNanos; private final RequestThrottler throttler; private final SessionMetricUpdater metricUpdater; - public ThrottledAdminRequestHandler( + protected ThrottledAdminRequestHandler( DriverChannel channel, Message message, Map customPayload, @@ -46,15 +91,16 @@ public ThrottledAdminRequestHandler( RequestThrottler throttler, SessionMetricUpdater metricUpdater, String logPrefix, - String debugString) { - super(channel, message, customPayload, timeout, logPrefix, debugString); + String debugString, + Class expectedResponseType) { + super(channel, message, customPayload, timeout, logPrefix, debugString, expectedResponseType); this.startTimeNanos = System.nanoTime(); this.throttler = throttler; this.metricUpdater = metricUpdater; } @Override - public CompletionStage start() { + public CompletionStage start() { // Don't write request yet, wait for green light from throttler throttler.register(this); return result; @@ -79,7 +125,7 @@ public void onThrottleFailure(@NonNull RequestThrottlingException error) { } @Override - protected boolean setFinalResult(AdminResult result) { + protected boolean setFinalResult(ResultT result) { boolean wasSet = super.setFinalResult(result); if (wasSet) { throttler.signalSuccess(this); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlPrepareHandler.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlPrepareHandler.java index cc9c9ea0cfb..2caa19649d2 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlPrepareHandler.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlPrepareHandler.java @@ -55,6 +55,7 @@ import io.netty.util.Timer; import io.netty.util.concurrent.Future; import io.netty.util.concurrent.GenericFutureListener; +import java.nio.ByteBuffer; import java.time.Duration; import java.util.AbstractMap; import java.util.ArrayList; @@ -279,16 +280,15 @@ private CompletionStage prepareOnOtherNode(Node node) { LOG.trace("[{}] Could not get a channel to reprepare on {}, skipping", logPrefix, node); return CompletableFuture.completedFuture(null); } else { - ThrottledAdminRequestHandler handler = - new ThrottledAdminRequestHandler( + ThrottledAdminRequestHandler handler = + ThrottledAdminRequestHandler.prepare( channel, message, request.getCustomPayload(), timeout, throttler, session.getMetricUpdater(), - logPrefix, - message.toString()); + logPrefix); return handler .start() .handle( diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java index 0b85aa68f70..d9f785ee5b5 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java @@ -655,16 +655,15 @@ private void processErrorResponse(Error errorMessage) { Bytes.toHexString(id))); } Prepare reprepareMessage = repreparePayload.toMessage(); - ThrottledAdminRequestHandler reprepareHandler = - new ThrottledAdminRequestHandler( + ThrottledAdminRequestHandler reprepareHandler = + ThrottledAdminRequestHandler.prepare( channel, reprepareMessage, repreparePayload.customPayload, timeout, throttler, sessionMetricUpdater, - logPrefix, - "Reprepare " + reprepareMessage.toString()); + logPrefix); reprepareHandler .start() .handle( diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/session/ReprepareOnUp.java b/core/src/main/java/com/datastax/oss/driver/internal/core/session/ReprepareOnUp.java index bd65c045673..367314c5c6c 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/session/ReprepareOnUp.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/session/ReprepareOnUp.java @@ -220,11 +220,10 @@ private void startWorker() { } } else { RepreparePayload payload = toReprepare.poll(); - queryAsync( + prepareAsync( new Prepare( payload.query, (payload.keyspace == null ? null : payload.keyspace.asInternal())), - payload.customPayload, - String.format("Reprepare '%s'", payload.query)) + payload.customPayload) .handle( (result, error) -> { // Don't log, AdminRequestHandler does already @@ -237,8 +236,8 @@ private void startWorker() { @VisibleForTesting protected CompletionStage queryAsync( Message message, Map customPayload, String debugString) { - ThrottledAdminRequestHandler reprepareHandler = - new ThrottledAdminRequestHandler( + ThrottledAdminRequestHandler reprepareHandler = + ThrottledAdminRequestHandler.query( channel, message, customPayload, @@ -249,4 +248,13 @@ protected CompletionStage queryAsync( debugString); return reprepareHandler.start(); } + + @VisibleForTesting + protected CompletionStage prepareAsync( + Message message, Map customPayload) { + ThrottledAdminRequestHandler reprepareHandler = + ThrottledAdminRequestHandler.prepare( + channel, message, customPayload, timeout, throttler, metricUpdater, logPrefix); + return reprepareHandler.start(); + } } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/session/ReprepareOnUpTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/session/ReprepareOnUpTest.java index 6bb875d1dbd..3f13f91ea77 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/session/ReprepareOnUpTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/session/ReprepareOnUpTest.java @@ -334,15 +334,24 @@ protected CompletionStage queryAsync( queries.add(new MockAdminQuery(message, resultFuture)); return resultFuture; } + + @Override + protected CompletionStage prepareAsync( + Message message, Map customPayload) { + CompletableFuture resultFuture = new CompletableFuture<>(); + queries.add(new MockAdminQuery(message, resultFuture)); + return resultFuture; + } } private static class MockAdminQuery { private final Message request; - private final CompletableFuture resultFuture; + private final CompletableFuture resultFuture; - public MockAdminQuery(Message request, CompletableFuture resultFuture) { + @SuppressWarnings("unchecked") + public MockAdminQuery(Message request, CompletableFuture resultFuture) { this.request = request; - this.resultFuture = resultFuture; + this.resultFuture = (CompletableFuture) resultFuture; } } From 1a5abb1036cb671a2e6874e13cda6c11c4720f51 Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 5 Aug 2019 15:09:52 -0700 Subject: [PATCH 047/979] JAVA-2379: Fail fast if prepared id doesn't match when repreparing on the fly --- changelog/README.md | 1 + .../internal/core/cql/CqlRequestHandler.java | 27 ++++++++--- .../api/core/cql/PreparedStatementIT.java | 45 +++++++++++++++---- 3 files changed, 60 insertions(+), 13 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 8400dd924dc..c92d547b74a 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.2.0 (in progress) +- [improvement] JAVA-2379: Fail fast if prepared id doesn't match when repreparing on the fly - [bug] JAVA-2375: Use per-request keyspace when repreparing on the fly - [improvement] JAVA-2370: Remove auto-service plugin from mapper processor - [improvement] JAVA-2377: Add a config option to make driver threads daemon diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java index d9f785ee5b5..c5f3985957d 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java @@ -645,14 +645,18 @@ public void onResponse(Frame responseFrame) { private void processErrorResponse(Error errorMessage) { if (errorMessage.code == ProtocolConstants.ErrorCode.UNPREPARED) { - LOG.trace("[{}] Statement is not prepared on {}, repreparing", logPrefix, node); - ByteBuffer id = ByteBuffer.wrap(((Unprepared) errorMessage).id); - RepreparePayload repreparePayload = session.getRepreparePayloads().get(id); + ByteBuffer idToReprepare = ByteBuffer.wrap(((Unprepared) errorMessage).id); + LOG.trace( + "[{}] Statement {} is not prepared on {}, repreparing", + logPrefix, + Bytes.toHexString(idToReprepare), + node); + RepreparePayload repreparePayload = session.getRepreparePayloads().get(idToReprepare); if (repreparePayload == null) { throw new IllegalStateException( String.format( "Tried to execute unprepared query %s but we don't have the data to reprepare it", - Bytes.toHexString(id))); + Bytes.toHexString(idToReprepare))); } Prepare reprepareMessage = repreparePayload.toMessage(); ThrottledAdminRequestHandler reprepareHandler = @@ -667,7 +671,7 @@ private void processErrorResponse(Error errorMessage) { reprepareHandler .start() .handle( - (result, exception) -> { + (repreparedId, exception) -> { if (exception != null) { // If the error is not recoverable, surface it to the client instead of retrying if (exception instanceof UnexpectedResponseException) { @@ -695,6 +699,19 @@ private void processErrorResponse(Error errorMessage) { LOG.trace("[{}] Reprepare failed, trying next node", logPrefix); sendRequest(null, queryPlan, execution, retryCount, false); } else { + if (!repreparedId.equals(idToReprepare)) { + IllegalStateException illegalStateException = + new IllegalStateException( + String.format( + "ID mismatch while trying to reprepare (expected %s, got %s). " + + "This prepared statement won't work anymore. " + + "This usually happens when you run a 'USE...' query after " + + "the statement was prepared.", + Bytes.toHexString(idToReprepare), + Bytes.toHexString(repreparedId))); + trackNodeError(node, illegalStateException, NANOTIME_NOT_MEASURED_YET); + setFinalError(illegalStateException, node, execution); + } LOG.trace("[{}] Reprepare sucessful, retrying", logPrefix); sendRequest(node, queryPlan, execution, retryCount, false); } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cql/PreparedStatementIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cql/PreparedStatementIT.java index 994890eeb81..386722be8f5 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cql/PreparedStatementIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cql/PreparedStatementIT.java @@ -15,8 +15,8 @@ */ package com.datastax.oss.driver.api.core.cql; -import static junit.framework.TestCase.fail; import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; import com.codahale.metrics.Gauge; import com.datastax.oss.driver.api.core.CqlSession; @@ -37,6 +37,7 @@ import java.nio.ByteBuffer; import java.time.Duration; import java.util.concurrent.CompletionStage; +import junit.framework.TestCase; import org.junit.Before; import org.junit.Rule; import org.junit.Test; @@ -83,15 +84,17 @@ public void setupSchema() { "INSERT INTO prepared_statement_test (a, b, c) VALUES (2, 2, 2)", "INSERT INTO prepared_statement_test (a, b, c) VALUES (3, 3, 3)", "INSERT INTO prepared_statement_test (a, b, c) VALUES (4, 4, 4)")) { - sessionRule - .session() - .execute( - SimpleStatement.builder(query) - .setExecutionProfile(sessionRule.slowProfile()) - .build()); + executeDdl(query); } } + private void executeDdl(String query) { + sessionRule + .session() + .execute( + SimpleStatement.builder(query).setExecutionProfile(sessionRule.slowProfile()).build()); + } + @Test public void should_have_empty_result_definitions_for_insert_query_without_bound_variable() { try (CqlSession session = SessionUtils.newSession(ccmRule, sessionRule.keyspace())) { @@ -194,7 +197,7 @@ public void should_update_metadata_when_schema_changed_across_pages() { for (Row row : rows.currentPage()) { try { row.getInt("d"); - fail("expected an error"); + TestCase.fail("expected an error"); } catch (IllegalArgumentException e) { /*expected*/ } @@ -417,6 +420,32 @@ public void should_create_separate_instances_for_different_statement_parameters( assertThat(firstPageOf(session.executeAsync(preparedStatement2.bind()))).hasSize(4); } + /** + * This test relies on CASSANDRA-15252 to reproduce the error condition. If the bug gets fixed in + * Cassandra, we'll need to add a version restriction. + * + * @see CASSANDRA-15252 + */ + @Test + public void should_fail_fast_if_id_changes_on_reprepare() { + try (CqlSession session = SessionUtils.newSession(ccmRule)) { + PreparedStatement preparedStatement = + session.prepare( + String.format( + "SELECT * FROM %s.prepared_statement_test WHERE a = ?", sessionRule.keyspace())); + + session.execute("USE " + sessionRule.keyspace().asCql(false)); + + // Drop and recreate the table to invalidate the prepared statement server-side + executeDdl("DROP TABLE prepared_statement_test"); + executeDdl("CREATE TABLE prepared_statement_test (a int PRIMARY KEY, b int, c int)"); + + assertThatThrownBy(() -> session.execute(preparedStatement.bind(1))) + .isInstanceOf(IllegalStateException.class) + .hasMessageContaining("ID mismatch while trying to reprepare"); + } + } + private static Iterable firstPageOf(CompletionStage stage) { return CompletableFutures.getUninterruptibly(stage).currentPage(); } From 07550431e7b0628bdda0ac26f0fd8191fdf282c2 Mon Sep 17 00:00:00 2001 From: olim7t Date: Wed, 7 Aug 2019 18:19:29 -0700 Subject: [PATCH 048/979] JAVA-2135: Refactor integration test packages --- .../core/retry/DefaultRetryPolicy.java | 11 +-- .../oss/driver/{api => }/core/ConnectIT.java | 5 +- .../{api => }/core/ConnectKeyspaceIT.java | 5 +- .../ProtocolVersionInitialNegotiationIT.java | 6 +- .../core/ProtocolVersionMixedClusterIT.java | 5 +- .../core/auth/PlainTextAuthProviderIT.java | 2 +- .../core/compression/DirectCompressionIT.java | 2 +- .../core/compression/HeapCompressionIT.java | 2 +- .../core/config/DriverConfigValidationIT.java | 4 +- .../core/config/DriverExecutionProfileIT.java | 5 +- .../DriverExecutionProfileReloadIT.java | 2 +- .../connection/ChannelSocketOptionsIT.java | 2 +- .../core/connection/FrameLengthIT.java | 3 +- .../core/context/LifecycleListenerIT.java | 2 +- .../{api => }/core/cql/AsyncResultSetIT.java | 9 +- .../{api => }/core/cql/BatchStatementIT.java | 11 ++- .../{api => }/core/cql/BoundStatementIT.java | 11 ++- .../core/cql/ExecutionInfoWarningsIT.java | 6 +- .../core/cql/PagingIterableSpliteratorIT.java | 9 +- .../core/cql/PerRequestKeyspaceIT.java | 9 +- .../core/cql/PreparedStatementIT.java | 9 +- .../{api => }/core/cql/QueryTraceIT.java | 5 +- .../{api => }/core/cql/SimpleStatementIT.java | 7 +- .../{api => }/core/data/DataTypeIT.java | 7 +- .../core/heartbeat/HeartbeatDisabledIT.java | 2 +- .../{api => }/core/heartbeat/HeartbeatIT.java | 2 +- .../DefaultLoadBalancingPolicyIT.java | 3 +- .../core/loadbalancing/NodeTargetingIT.java | 2 +- .../PerProfileLoadBalancingPolicyIT.java | 4 +- .../core/metadata/ByteOrderedTokenIT.java | 2 +- .../metadata/ByteOrderedTokenVnodesIT.java | 2 +- .../core/metadata/CaseSensitiveUdtIT.java | 2 +- .../{api => }/core/metadata/DescribeIT.java | 2 +- .../core/metadata/Murmur3TokenIT.java | 2 +- .../core/metadata/Murmur3TokenVnodesIT.java | 4 +- .../core/metadata/NodeMetadataIT.java | 4 +- .../{api => }/core/metadata/NodeStateIT.java | 7 +- .../core/metadata/RandomTokenIT.java | 2 +- .../core/metadata/RandomTokenVnodesIT.java | 2 +- .../core/metadata/SchemaAgreementIT.java | 2 +- .../core/metadata/SchemaChangesIT.java | 90 ++++++++++--------- .../{api => }/core/metadata/SchemaIT.java | 4 +- .../core/metadata/TableOptionsIT.java | 2 +- .../{api => }/core/metadata/TokenITBase.java | 4 +- .../{api => }/core/metrics/MetricsIT.java | 4 +- .../core/retry/DefaultRetryPolicyIT.java | 3 +- .../core/retry/PerProfileRetryPolicyIT.java | 4 +- .../{api => }/core/session/ExceptionIT.java | 2 +- .../{api => }/core/session/RemovedNodeIT.java | 2 +- .../core/session/RequestProcessorIT.java | 2 +- .../{api => }/core/session/ShutdownIT.java | 2 +- .../core/specex/SpeculativeExecutionIT.java | 3 +- ...tSslEngineFactoryHostnameValidationIT.java | 2 +- .../core/ssl/DefaultSslEngineFactoryIT.java | 2 +- ...efaultSslEngineFactoryPropertyBasedIT.java | 2 +- ...eFactoryPropertyBasedWithClientAuthIT.java | 2 +- ...faultSslEngineFactoryWithClientAuthIT.java | 2 +- .../core/throttling/ThrottlingIT.java | 2 +- .../core/tracker/RequestLoggerIT.java | 2 +- .../tracker/RequestNodeLoggerExample.java | 2 +- .../core/type/codec/CqlIntToStringCodec.java | 2 +- .../type/codec/registry/CodecRegistryIT.java | 2 +- .../guava/internal/KeyRequestProcessor.java | 2 +- .../querybuilder/JacksonJsonCodec.java | 2 +- .../{api => }/querybuilder/JsonInsertIT.java | 2 +- 65 files changed, 220 insertions(+), 111 deletions(-) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/core/ConnectIT.java (97%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/core/ConnectKeyspaceIT.java (93%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/core/ProtocolVersionInitialNegotiationIT.java (92%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/core/ProtocolVersionMixedClusterIT.java (97%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/core/auth/PlainTextAuthProviderIT.java (98%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/core/compression/DirectCompressionIT.java (98%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/core/compression/HeapCompressionIT.java (98%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/core/config/DriverConfigValidationIT.java (94%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/core/config/DriverExecutionProfileIT.java (97%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/core/config/DriverExecutionProfileReloadIT.java (99%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/core/connection/ChannelSocketOptionsIT.java (98%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/core/connection/FrameLengthIT.java (97%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/core/context/LifecycleListenerIT.java (98%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/core/cql/AsyncResultSetIT.java (94%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/core/cql/BatchStatementIT.java (96%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/core/cql/BoundStatementIT.java (97%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/core/cql/ExecutionInfoWarningsIT.java (96%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/core/cql/PagingIterableSpliteratorIT.java (92%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/core/cql/PerRequestKeyspaceIT.java (95%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/core/cql/PreparedStatementIT.java (97%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/core/cql/QueryTraceIT.java (94%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/core/cql/SimpleStatementIT.java (98%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/core/data/DataTypeIT.java (98%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/core/heartbeat/HeartbeatDisabledIT.java (97%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/core/heartbeat/HeartbeatIT.java (99%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/core/loadbalancing/DefaultLoadBalancingPolicyIT.java (98%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/core/loadbalancing/NodeTargetingIT.java (98%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/core/loadbalancing/PerProfileLoadBalancingPolicyIT.java (96%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/core/metadata/ByteOrderedTokenIT.java (97%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/core/metadata/ByteOrderedTokenVnodesIT.java (97%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/core/metadata/CaseSensitiveUdtIT.java (99%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/core/metadata/DescribeIT.java (99%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/core/metadata/Murmur3TokenIT.java (97%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/core/metadata/Murmur3TokenVnodesIT.java (95%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/core/metadata/NodeMetadataIT.java (95%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/core/metadata/NodeStateIT.java (98%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/core/metadata/RandomTokenIT.java (97%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/core/metadata/RandomTokenVnodesIT.java (97%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/core/metadata/SchemaAgreementIT.java (98%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/core/metadata/SchemaChangesIT.java (86%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/core/metadata/SchemaIT.java (98%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/core/metadata/TableOptionsIT.java (98%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/core/metadata/TokenITBase.java (98%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/core/metrics/MetricsIT.java (96%) rename integration-tests/src/test/java/com/datastax/oss/driver/{internal => }/core/retry/DefaultRetryPolicyIT.java (99%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/core/retry/PerProfileRetryPolicyIT.java (97%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/core/session/ExceptionIT.java (99%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/core/session/RemovedNodeIT.java (97%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/core/session/RequestProcessorIT.java (99%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/core/session/ShutdownIT.java (99%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/core/specex/SpeculativeExecutionIT.java (99%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/core/ssl/DefaultSslEngineFactoryHostnameValidationIT.java (98%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/core/ssl/DefaultSslEngineFactoryIT.java (98%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/core/ssl/DefaultSslEngineFactoryPropertyBasedIT.java (97%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/core/ssl/DefaultSslEngineFactoryPropertyBasedWithClientAuthIT.java (98%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/core/ssl/DefaultSslEngineFactoryWithClientAuthIT.java (98%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/core/throttling/ThrottlingIT.java (98%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/core/tracker/RequestLoggerIT.java (99%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/core/tracker/RequestNodeLoggerExample.java (98%) rename integration-tests/src/test/java/com/datastax/oss/driver/{internal => }/core/type/codec/CqlIntToStringCodec.java (97%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/core/type/codec/registry/CodecRegistryIT.java (99%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/querybuilder/JacksonJsonCodec.java (98%) rename integration-tests/src/test/java/com/datastax/oss/driver/{api => }/querybuilder/JsonInsertIT.java (99%) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/retry/DefaultRetryPolicy.java b/core/src/main/java/com/datastax/oss/driver/internal/core/retry/DefaultRetryPolicy.java index c15cfb41baa..b988f66fce8 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/retry/DefaultRetryPolicy.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/retry/DefaultRetryPolicy.java @@ -59,26 +59,27 @@ public class DefaultRetryPolicy implements RetryPolicy { private static final Logger LOG = LoggerFactory.getLogger(DefaultRetryPolicy.class); @VisibleForTesting - static final String RETRYING_ON_READ_TIMEOUT = + public static final String RETRYING_ON_READ_TIMEOUT = "[{}] Retrying on read timeout on same host (consistency: {}, required responses: {}, " + "received responses: {}, data retrieved: {}, retries: {})"; @VisibleForTesting - static final String RETRYING_ON_WRITE_TIMEOUT = + public static final String RETRYING_ON_WRITE_TIMEOUT = "[{}] Retrying on write timeout on same host (consistency: {}, write type: {}, " + "required acknowledgments: {}, received acknowledgments: {}, retries: {})"; @VisibleForTesting - static final String RETRYING_ON_UNAVAILABLE = + public static final String RETRYING_ON_UNAVAILABLE = "[{}] Retrying on unavailable exception on next host (consistency: {}, " + "required replica: {}, alive replica: {}, retries: {})"; @VisibleForTesting - static final String RETRYING_ON_ABORTED = + public static final String RETRYING_ON_ABORTED = "[{}] Retrying on aborted request on next host (retries: {})"; @VisibleForTesting - static final String RETRYING_ON_ERROR = "[{}] Retrying on node error on next host (retries: {})"; + public static final String RETRYING_ON_ERROR = + "[{}] Retrying on node error on next host (retries: {})"; private final String logPrefix; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/ConnectIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/ConnectIT.java similarity index 97% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/core/ConnectIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/ConnectIT.java index 6d1b525f23a..995da1903b4 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/ConnectIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/ConnectIT.java @@ -13,13 +13,16 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.core; +package com.datastax.oss.driver.core; import static com.datastax.oss.driver.api.testinfra.utils.ConditionChecker.checkThat; import static java.util.concurrent.TimeUnit.SECONDS; import static junit.framework.TestCase.fail; import static org.assertj.core.api.Assertions.assertThat; +import com.datastax.oss.driver.api.core.AllNodesFailedException; +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.DriverException; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; import com.datastax.oss.driver.api.core.context.DriverContext; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/ConnectKeyspaceIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/ConnectKeyspaceIT.java similarity index 93% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/core/ConnectKeyspaceIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/ConnectKeyspaceIT.java index 32347d57e0b..51c0280cd0a 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/ConnectKeyspaceIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/ConnectKeyspaceIT.java @@ -13,10 +13,13 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.core; +package com.datastax.oss.driver.core; import static org.assertj.core.api.Assertions.assertThat; +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.InvalidKeyspaceException; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; import com.datastax.oss.driver.api.core.session.Session; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/ProtocolVersionInitialNegotiationIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/ProtocolVersionInitialNegotiationIT.java similarity index 92% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/core/ProtocolVersionInitialNegotiationIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/ProtocolVersionInitialNegotiationIT.java index d1f228c803d..4f21a76fd6a 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/ProtocolVersionInitialNegotiationIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/ProtocolVersionInitialNegotiationIT.java @@ -13,11 +13,15 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.core; +package com.datastax.oss.driver.core; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.fail; +import com.datastax.oss.driver.api.core.AllNodesFailedException; +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.DefaultProtocolVersion; +import com.datastax.oss.driver.api.core.UnsupportedProtocolVersionException; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; import com.datastax.oss.driver.api.testinfra.CassandraRequirement; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/ProtocolVersionMixedClusterIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/ProtocolVersionMixedClusterIT.java similarity index 97% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/core/ProtocolVersionMixedClusterIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/ProtocolVersionMixedClusterIT.java index 2d8c08ddeb2..a2b1851b2d3 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/ProtocolVersionMixedClusterIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/ProtocolVersionMixedClusterIT.java @@ -13,11 +13,14 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.core; +package com.datastax.oss.driver.core; import static com.datastax.oss.driver.assertions.Assertions.assertThat; import static org.assertj.core.api.Assertions.fail; +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.DefaultProtocolVersion; +import com.datastax.oss.driver.api.core.UnsupportedProtocolVersionException; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/auth/PlainTextAuthProviderIT.java similarity index 98% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/auth/PlainTextAuthProviderIT.java index 49db7e5cbf1..4453f069d5f 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/auth/PlainTextAuthProviderIT.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.core.auth; +package com.datastax.oss.driver.core.auth; import com.datastax.oss.driver.api.core.AllNodesFailedException; import com.datastax.oss.driver.api.core.CqlSession; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/compression/DirectCompressionIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/compression/DirectCompressionIT.java similarity index 98% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/core/compression/DirectCompressionIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/compression/DirectCompressionIT.java index e788e5352f8..bc1ab888085 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/compression/DirectCompressionIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/compression/DirectCompressionIT.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.core.compression; +package com.datastax.oss.driver.core.compression; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.offset; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/compression/HeapCompressionIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/compression/HeapCompressionIT.java similarity index 98% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/core/compression/HeapCompressionIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/compression/HeapCompressionIT.java index 809b0083ac3..10d320bb2b2 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/compression/HeapCompressionIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/compression/HeapCompressionIT.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.core.compression; +package com.datastax.oss.driver.core.compression; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.offset; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/config/DriverConfigValidationIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/config/DriverConfigValidationIT.java similarity index 94% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/core/config/DriverConfigValidationIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/config/DriverConfigValidationIT.java index 911122746e1..7aeec29eee0 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/config/DriverConfigValidationIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/config/DriverConfigValidationIT.java @@ -13,11 +13,13 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.core.config; +package com.datastax.oss.driver.core.config; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverConfigLoader; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.api.testinfra.simulacron.SimulacronRule; import com.datastax.oss.driver.categories.ParallelizableTests; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/config/DriverExecutionProfileIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/config/DriverExecutionProfileIT.java similarity index 97% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/core/config/DriverExecutionProfileIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/config/DriverExecutionProfileIT.java index f261ac0d3c4..9ccf6557022 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/config/DriverExecutionProfileIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/config/DriverExecutionProfileIT.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.core.config; +package com.datastax.oss.driver.core.config; import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.noRows; import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.serverError; @@ -25,6 +25,9 @@ import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.DriverTimeoutException; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverConfigLoader; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.cql.AsyncResultSet; import com.datastax.oss.driver.api.core.cql.BatchStatement; import com.datastax.oss.driver.api.core.cql.BatchStatementBuilder; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/config/DriverExecutionProfileReloadIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/config/DriverExecutionProfileReloadIT.java similarity index 99% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/core/config/DriverExecutionProfileReloadIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/config/DriverExecutionProfileReloadIT.java index 8f03e8aad94..0a42e6df7da 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/config/DriverExecutionProfileReloadIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/config/DriverExecutionProfileReloadIT.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.core.config; +package com.datastax.oss.driver.core.config; import static com.datastax.oss.driver.internal.core.config.typesafe.DefaultDriverConfigLoader.DEFAULT_CONFIG_SUPPLIER; import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.noRows; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/connection/ChannelSocketOptionsIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/connection/ChannelSocketOptionsIT.java similarity index 98% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/core/connection/ChannelSocketOptionsIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/connection/ChannelSocketOptionsIT.java index 72ae2fdad61..e9dd65b9b18 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/connection/ChannelSocketOptionsIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/connection/ChannelSocketOptionsIT.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.core.connection; +package com.datastax.oss.driver.core.connection; import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.SOCKET_KEEP_ALIVE; import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.SOCKET_LINGER_INTERVAL; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/connection/FrameLengthIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/connection/FrameLengthIT.java similarity index 97% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/core/connection/FrameLengthIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/connection/FrameLengthIT.java index 5366d76e31b..ff3eaf8379c 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/connection/FrameLengthIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/connection/FrameLengthIT.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.core.connection; +package com.datastax.oss.driver.core.connection; import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.noRows; import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.rows; @@ -25,6 +25,7 @@ import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; +import com.datastax.oss.driver.api.core.connection.FrameTooLongException; import com.datastax.oss.driver.api.core.context.DriverContext; import com.datastax.oss.driver.api.core.cql.AsyncResultSet; import com.datastax.oss.driver.api.core.cql.SimpleStatement; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/context/LifecycleListenerIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/context/LifecycleListenerIT.java similarity index 98% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/core/context/LifecycleListenerIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/context/LifecycleListenerIT.java index 58600623734..2434c1cf8a6 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/context/LifecycleListenerIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/context/LifecycleListenerIT.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.core.context; +package com.datastax.oss.driver.core.context; import static java.util.concurrent.TimeUnit.SECONDS; import static org.assertj.core.api.Assertions.assertThat; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cql/AsyncResultSetIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/AsyncResultSetIT.java similarity index 94% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/core/cql/AsyncResultSetIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/cql/AsyncResultSetIT.java index aaff6449730..87526e529c7 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cql/AsyncResultSetIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/AsyncResultSetIT.java @@ -13,12 +13,19 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.core.cql; +package com.datastax.oss.driver.core.cql; import static org.assertj.core.api.Assertions.assertThat; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.cql.AsyncResultSet; +import com.datastax.oss.driver.api.core.cql.BatchStatement; +import com.datastax.oss.driver.api.core.cql.BatchStatementBuilder; +import com.datastax.oss.driver.api.core.cql.DefaultBatchType; +import com.datastax.oss.driver.api.core.cql.PreparedStatement; +import com.datastax.oss.driver.api.core.cql.Row; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cql/BatchStatementIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BatchStatementIT.java similarity index 96% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/core/cql/BatchStatementIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BatchStatementIT.java index 85f21a7a79c..9246ce92780 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cql/BatchStatementIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BatchStatementIT.java @@ -13,13 +13,22 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.core.cql; +package com.datastax.oss.driver.core.cql; import static org.assertj.core.api.Assertions.assertThat; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; +import com.datastax.oss.driver.api.core.cql.BatchStatement; +import com.datastax.oss.driver.api.core.cql.BatchStatementBuilder; +import com.datastax.oss.driver.api.core.cql.BoundStatement; +import com.datastax.oss.driver.api.core.cql.DefaultBatchType; +import com.datastax.oss.driver.api.core.cql.PreparedStatement; +import com.datastax.oss.driver.api.core.cql.ResultSet; +import com.datastax.oss.driver.api.core.cql.Row; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.core.cql.Statement; import com.datastax.oss.driver.api.core.servererrors.InvalidQueryException; import com.datastax.oss.driver.api.testinfra.CassandraRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cql/BoundStatementIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BoundStatementIT.java similarity index 97% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/core/cql/BoundStatementIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BoundStatementIT.java index 7ea0b4c154a..10ac76cd36d 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cql/BoundStatementIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BoundStatementIT.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.core.cql; +package com.datastax.oss.driver.core.cql; import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.noRows; import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.query; @@ -29,6 +29,13 @@ import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.cql.AsyncResultSet; +import com.datastax.oss.driver.api.core.cql.BoundStatement; +import com.datastax.oss.driver.api.core.cql.PreparedStatement; +import com.datastax.oss.driver.api.core.cql.ResultSet; +import com.datastax.oss.driver.api.core.cql.Row; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.core.cql.SimpleStatementBuilder; import com.datastax.oss.driver.api.core.metadata.token.Token; import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; import com.datastax.oss.driver.api.testinfra.CassandraRequirement; @@ -37,10 +44,10 @@ import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.api.testinfra.simulacron.SimulacronRule; import com.datastax.oss.driver.categories.ParallelizableTests; +import com.datastax.oss.driver.core.type.codec.CqlIntToStringCodec; import com.datastax.oss.driver.internal.core.DefaultProtocolFeature; import com.datastax.oss.driver.internal.core.ProtocolVersionRegistry; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; -import com.datastax.oss.driver.internal.core.type.codec.CqlIntToStringCodec; import com.datastax.oss.driver.internal.core.util.RoutingKey; import com.datastax.oss.driver.internal.core.util.concurrent.CompletableFutures; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cql/ExecutionInfoWarningsIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/ExecutionInfoWarningsIT.java similarity index 96% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/core/cql/ExecutionInfoWarningsIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/cql/ExecutionInfoWarningsIT.java index 67d473b5c3e..778fbb94813 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cql/ExecutionInfoWarningsIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/ExecutionInfoWarningsIT.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package com.datastax.oss.driver.api.core.cql; +package com.datastax.oss.driver.core.cql; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.timeout; @@ -26,6 +26,10 @@ import ch.qos.logback.core.Appender; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.cql.ExecutionInfo; +import com.datastax.oss.driver.api.core.cql.ResultSet; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.core.cql.Statement; import com.datastax.oss.driver.api.testinfra.CassandraRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; import com.datastax.oss.driver.api.testinfra.session.SessionRule; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cql/PagingIterableSpliteratorIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PagingIterableSpliteratorIT.java similarity index 92% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/core/cql/PagingIterableSpliteratorIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PagingIterableSpliteratorIT.java index 476510097b1..46c133331b7 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cql/PagingIterableSpliteratorIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PagingIterableSpliteratorIT.java @@ -13,13 +13,20 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.core.cql; +package com.datastax.oss.driver.core.cql; import static org.assertj.core.api.Assertions.assertThat; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.cql.BatchStatement; +import com.datastax.oss.driver.api.core.cql.BatchStatementBuilder; +import com.datastax.oss.driver.api.core.cql.DefaultBatchType; +import com.datastax.oss.driver.api.core.cql.PreparedStatement; +import com.datastax.oss.driver.api.core.cql.ResultSet; +import com.datastax.oss.driver.api.core.cql.Row; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.categories.ParallelizableTests; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cql/PerRequestKeyspaceIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PerRequestKeyspaceIT.java similarity index 95% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/core/cql/PerRequestKeyspaceIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PerRequestKeyspaceIT.java index 88190a39e32..32e9e331c52 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cql/PerRequestKeyspaceIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PerRequestKeyspaceIT.java @@ -13,13 +13,20 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.core.cql; +package com.datastax.oss.driver.core.cql; import static org.assertj.core.api.Assertions.assertThat; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; +import com.datastax.oss.driver.api.core.cql.BatchStatement; +import com.datastax.oss.driver.api.core.cql.DefaultBatchType; +import com.datastax.oss.driver.api.core.cql.PreparedStatement; +import com.datastax.oss.driver.api.core.cql.ResultSet; +import com.datastax.oss.driver.api.core.cql.Row; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.core.cql.Statement; import com.datastax.oss.driver.api.testinfra.CassandraRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; import com.datastax.oss.driver.api.testinfra.session.SessionRule; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cql/PreparedStatementIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PreparedStatementIT.java similarity index 97% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/core/cql/PreparedStatementIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PreparedStatementIT.java index 386722be8f5..8aa04a35b0a 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cql/PreparedStatementIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PreparedStatementIT.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.core.cql; +package com.datastax.oss.driver.core.cql; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; @@ -23,6 +23,13 @@ import com.datastax.oss.driver.api.core.DefaultProtocolVersion; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; +import com.datastax.oss.driver.api.core.cql.AsyncResultSet; +import com.datastax.oss.driver.api.core.cql.BoundStatement; +import com.datastax.oss.driver.api.core.cql.ColumnDefinitions; +import com.datastax.oss.driver.api.core.cql.PreparedStatement; +import com.datastax.oss.driver.api.core.cql.ResultSet; +import com.datastax.oss.driver.api.core.cql.Row; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; import com.datastax.oss.driver.api.core.servererrors.InvalidQueryException; import com.datastax.oss.driver.api.core.type.DataTypes; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cql/QueryTraceIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/QueryTraceIT.java similarity index 94% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/core/cql/QueryTraceIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/cql/QueryTraceIT.java index 1ff2f083c87..883e5df2eb1 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cql/QueryTraceIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/QueryTraceIT.java @@ -13,11 +13,14 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.core.cql; +package com.datastax.oss.driver.core.cql; import static org.assertj.core.api.Assertions.assertThat; import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.cql.ExecutionInfo; +import com.datastax.oss.driver.api.core.cql.QueryTrace; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; import com.datastax.oss.driver.api.core.metadata.EndPoint; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; import com.datastax.oss.driver.api.testinfra.session.SessionRule; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cql/SimpleStatementIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/SimpleStatementIT.java similarity index 98% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/core/cql/SimpleStatementIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/cql/SimpleStatementIT.java index 4fe30d29557..a3cc9995113 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cql/SimpleStatementIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/SimpleStatementIT.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.core.cql; +package com.datastax.oss.driver.core.cql; import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.noRows; import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.when; @@ -24,6 +24,11 @@ import com.datastax.oss.driver.api.core.DefaultConsistencyLevel; import com.datastax.oss.driver.api.core.DriverTimeoutException; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.cql.AsyncResultSet; +import com.datastax.oss.driver.api.core.cql.ResultSet; +import com.datastax.oss.driver.api.core.cql.Row; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.core.cql.Statement; import com.datastax.oss.driver.api.core.servererrors.InvalidQueryException; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; import com.datastax.oss.driver.api.testinfra.session.SessionRule; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/data/DataTypeIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/data/DataTypeIT.java similarity index 98% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/core/data/DataTypeIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/data/DataTypeIT.java index 0510d95b96e..a71fdb3367a 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/data/DataTypeIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/data/DataTypeIT.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.core.data; +package com.datastax.oss.driver.core.data; import static org.assertj.core.api.Assertions.assertThat; import static org.junit.Assert.fail; @@ -29,6 +29,11 @@ import com.datastax.oss.driver.api.core.cql.Row; import com.datastax.oss.driver.api.core.cql.SimpleStatement; import com.datastax.oss.driver.api.core.cql.Statement; +import com.datastax.oss.driver.api.core.data.CqlDuration; +import com.datastax.oss.driver.api.core.data.SettableByIndex; +import com.datastax.oss.driver.api.core.data.SettableByName; +import com.datastax.oss.driver.api.core.data.TupleValue; +import com.datastax.oss.driver.api.core.data.UdtValue; import com.datastax.oss.driver.api.core.type.CustomType; import com.datastax.oss.driver.api.core.type.DataType; import com.datastax.oss.driver.api.core.type.DataTypes; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/heartbeat/HeartbeatDisabledIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/heartbeat/HeartbeatDisabledIT.java similarity index 97% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/core/heartbeat/HeartbeatDisabledIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/heartbeat/HeartbeatDisabledIT.java index c6312e6484b..ca992b0bc3e 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/heartbeat/HeartbeatDisabledIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/heartbeat/HeartbeatDisabledIT.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.core.heartbeat; +package com.datastax.oss.driver.core.heartbeat; import static java.util.concurrent.TimeUnit.SECONDS; import static org.assertj.core.api.Assertions.assertThat; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/heartbeat/HeartbeatIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/heartbeat/HeartbeatIT.java similarity index 99% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/core/heartbeat/HeartbeatIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/heartbeat/HeartbeatIT.java index febf21f93ff..dfee4c81fc8 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/heartbeat/HeartbeatIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/heartbeat/HeartbeatIT.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.core.heartbeat; +package com.datastax.oss.driver.core.heartbeat; import static com.datastax.oss.driver.api.testinfra.utils.ConditionChecker.checkThat; import static com.datastax.oss.driver.api.testinfra.utils.NodeUtils.waitForDown; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/loadbalancing/DefaultLoadBalancingPolicyIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/loadbalancing/DefaultLoadBalancingPolicyIT.java similarity index 98% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/core/loadbalancing/DefaultLoadBalancingPolicyIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/loadbalancing/DefaultLoadBalancingPolicyIT.java index 41309a36f8a..1370718a4e2 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/loadbalancing/DefaultLoadBalancingPolicyIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/loadbalancing/DefaultLoadBalancingPolicyIT.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.core.loadbalancing; +package com.datastax.oss.driver.core.loadbalancing; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.fail; @@ -26,6 +26,7 @@ import com.datastax.oss.driver.api.core.cql.ResultSet; import com.datastax.oss.driver.api.core.cql.SimpleStatement; import com.datastax.oss.driver.api.core.cql.Statement; +import com.datastax.oss.driver.api.core.loadbalancing.NodeDistance; import com.datastax.oss.driver.api.core.metadata.EndPoint; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.metadata.NodeState; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/loadbalancing/NodeTargetingIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/loadbalancing/NodeTargetingIT.java similarity index 98% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/core/loadbalancing/NodeTargetingIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/loadbalancing/NodeTargetingIT.java index 0ee418acb1e..ed7898dd84d 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/loadbalancing/NodeTargetingIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/loadbalancing/NodeTargetingIT.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package com.datastax.oss.driver.api.core.loadbalancing; +package com.datastax.oss.driver.core.loadbalancing; import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.unavailable; import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.when; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/loadbalancing/PerProfileLoadBalancingPolicyIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/loadbalancing/PerProfileLoadBalancingPolicyIT.java similarity index 96% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/core/loadbalancing/PerProfileLoadBalancingPolicyIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/loadbalancing/PerProfileLoadBalancingPolicyIT.java index 88e2ee8b24c..58b918852a4 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/loadbalancing/PerProfileLoadBalancingPolicyIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/loadbalancing/PerProfileLoadBalancingPolicyIT.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.core.loadbalancing; +package com.datastax.oss.driver.core.loadbalancing; import static com.datastax.oss.driver.assertions.Assertions.assertThat; @@ -25,6 +25,8 @@ import com.datastax.oss.driver.api.core.cql.ResultSet; import com.datastax.oss.driver.api.core.cql.SimpleStatement; import com.datastax.oss.driver.api.core.cql.Statement; +import com.datastax.oss.driver.api.core.loadbalancing.LoadBalancingPolicy; +import com.datastax.oss.driver.api.core.loadbalancing.NodeDistance; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/metadata/ByteOrderedTokenIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/ByteOrderedTokenIT.java similarity index 97% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/core/metadata/ByteOrderedTokenIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/ByteOrderedTokenIT.java index 357c1078f99..b7e3aab8a5d 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/metadata/ByteOrderedTokenIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/ByteOrderedTokenIT.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.core.metadata; +package com.datastax.oss.driver.core.metadata; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/metadata/ByteOrderedTokenVnodesIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/ByteOrderedTokenVnodesIT.java similarity index 97% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/core/metadata/ByteOrderedTokenVnodesIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/ByteOrderedTokenVnodesIT.java index 239b660345a..8ea6b65f58b 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/metadata/ByteOrderedTokenVnodesIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/ByteOrderedTokenVnodesIT.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.core.metadata; +package com.datastax.oss.driver.core.metadata; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/metadata/CaseSensitiveUdtIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/CaseSensitiveUdtIT.java similarity index 99% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/core/metadata/CaseSensitiveUdtIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/CaseSensitiveUdtIT.java index 0587f29441e..a5d96084860 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/metadata/CaseSensitiveUdtIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/CaseSensitiveUdtIT.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.core.metadata; +package com.datastax.oss.driver.core.metadata; import static org.assertj.core.api.Assertions.assertThat; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/metadata/DescribeIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/DescribeIT.java similarity index 99% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/core/metadata/DescribeIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/DescribeIT.java index 94fb74ae1a8..660893c9f60 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/metadata/DescribeIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/DescribeIT.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.core.metadata; +package com.datastax.oss.driver.core.metadata; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.fail; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/metadata/Murmur3TokenIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/Murmur3TokenIT.java similarity index 97% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/core/metadata/Murmur3TokenIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/Murmur3TokenIT.java index 0009eb29323..e8dce5ce065 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/metadata/Murmur3TokenIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/Murmur3TokenIT.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.core.metadata; +package com.datastax.oss.driver.core.metadata; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/metadata/Murmur3TokenVnodesIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/Murmur3TokenVnodesIT.java similarity index 95% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/core/metadata/Murmur3TokenVnodesIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/Murmur3TokenVnodesIT.java index ed384765f9d..d7439c92010 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/metadata/Murmur3TokenVnodesIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/Murmur3TokenVnodesIT.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.core.metadata; +package com.datastax.oss.driver.core.metadata; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; @@ -54,6 +54,6 @@ protected CqlSession session() { @BeforeClass public static void createSchema() { - TokenITBase.createSchema(sessionRule.session()); + createSchema(sessionRule.session()); } } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/metadata/NodeMetadataIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/NodeMetadataIT.java similarity index 95% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/core/metadata/NodeMetadataIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/NodeMetadataIT.java index 0fc9bf3258a..2db23d1425b 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/metadata/NodeMetadataIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/NodeMetadataIT.java @@ -13,12 +13,14 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.core.metadata; +package com.datastax.oss.driver.core.metadata; import static org.assertj.core.api.Assertions.assertThat; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.loadbalancing.NodeDistance; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.metadata.NodeState; import com.datastax.oss.driver.api.testinfra.ccm.CcmBridge; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/metadata/NodeStateIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/NodeStateIT.java similarity index 98% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/core/metadata/NodeStateIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/NodeStateIT.java index 0b41ece3ff6..9f47c212f1c 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/metadata/NodeStateIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/NodeStateIT.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.core.metadata; +package com.datastax.oss.driver.core.metadata; import static com.datastax.oss.driver.assertions.Assertions.assertThat; import static com.datastax.oss.driver.assertions.Assertions.fail; @@ -32,6 +32,11 @@ import com.datastax.oss.driver.api.core.context.DriverContext; import com.datastax.oss.driver.api.core.loadbalancing.LoadBalancingPolicy; import com.datastax.oss.driver.api.core.loadbalancing.NodeDistance; +import com.datastax.oss.driver.api.core.metadata.EndPoint; +import com.datastax.oss.driver.api.core.metadata.Metadata; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.metadata.NodeState; +import com.datastax.oss.driver.api.core.metadata.NodeStateListener; import com.datastax.oss.driver.api.core.session.Request; import com.datastax.oss.driver.api.core.session.Session; import com.datastax.oss.driver.api.testinfra.session.SessionRule; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/metadata/RandomTokenIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/RandomTokenIT.java similarity index 97% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/core/metadata/RandomTokenIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/RandomTokenIT.java index bdfad30aee0..97591689c48 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/metadata/RandomTokenIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/RandomTokenIT.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.core.metadata; +package com.datastax.oss.driver.core.metadata; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/metadata/RandomTokenVnodesIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/RandomTokenVnodesIT.java similarity index 97% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/core/metadata/RandomTokenVnodesIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/RandomTokenVnodesIT.java index 587b003b27f..bc74935e824 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/metadata/RandomTokenVnodesIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/RandomTokenVnodesIT.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.core.metadata; +package com.datastax.oss.driver.core.metadata; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/metadata/SchemaAgreementIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaAgreementIT.java similarity index 98% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/core/metadata/SchemaAgreementIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaAgreementIT.java index 80934c5e129..a9977357b1e 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/metadata/SchemaAgreementIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaAgreementIT.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.core.metadata; +package com.datastax.oss.driver.core.metadata; import static org.assertj.core.api.Assertions.assertThat; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/metadata/SchemaChangesIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaChangesIT.java similarity index 86% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/core/metadata/SchemaChangesIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaChangesIT.java index be1d7ebf16b..6c81710552c 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/metadata/SchemaChangesIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaChangesIT.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.core.metadata; +package com.datastax.oss.driver.core.metadata; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.mock; @@ -23,6 +23,7 @@ import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; +import com.datastax.oss.driver.api.core.metadata.Metadata; import com.datastax.oss.driver.api.core.metadata.schema.ColumnMetadata; import com.datastax.oss.driver.api.core.metadata.schema.SchemaChangeListener; import com.datastax.oss.driver.api.core.type.DataTypes; @@ -85,9 +86,9 @@ public void should_handle_keyspace_creation() { newKeyspaceId), metadata -> metadata.getKeyspace(newKeyspaceId), keyspace -> { - assertThat(keyspace.getName()).isEqualTo(newKeyspaceId); - assertThat(keyspace.isDurableWrites()).isTrue(); - assertThat(keyspace.getReplication()) + Assertions.assertThat(keyspace.getName()).isEqualTo(newKeyspaceId); + Assertions.assertThat(keyspace.isDurableWrites()).isTrue(); + Assertions.assertThat(keyspace.getReplication()) .hasSize(2) .containsEntry("class", "org.apache.cassandra.locator.SimpleStrategy") .containsEntry("replication_factor", "1"); @@ -126,7 +127,7 @@ public void should_handle_keyspace_update() { + "AND durable_writes = 'false'", newKeyspaceId.asCql(true)), metadata -> metadata.getKeyspace(newKeyspaceId), - newKeyspace -> assertThat(newKeyspace.isDurableWrites()).isFalse(), + newKeyspace -> Assertions.assertThat(newKeyspace.isDurableWrites()).isFalse(), (listener, oldKeyspace, newKeyspace) -> verify(listener).onKeyspaceUpdated(newKeyspace, oldKeyspace), newKeyspaceId); @@ -143,17 +144,18 @@ public void should_handle_table_creation() { .orElseThrow(IllegalStateException::new) .getTable(CqlIdentifier.fromInternal("foo")), table -> { - assertThat(table.getKeyspace()).isEqualTo(adminSessionRule.keyspace()); - assertThat(table.getName().asInternal()).isEqualTo("foo"); - assertThat(table.getColumns()).containsOnlyKeys(CqlIdentifier.fromInternal("k")); - assertThat(table.getColumn(CqlIdentifier.fromInternal("k"))) + Assertions.assertThat(table.getKeyspace()).isEqualTo(adminSessionRule.keyspace()); + Assertions.assertThat(table.getName().asInternal()).isEqualTo("foo"); + Assertions.assertThat(table.getColumns()) + .containsOnlyKeys(CqlIdentifier.fromInternal("k")); + Assertions.assertThat(table.getColumn(CqlIdentifier.fromInternal("k"))) .hasValueSatisfying( k -> { - assertThat(k.getType()).isEqualTo(DataTypes.INT); + Assertions.assertThat(k.getType()).isEqualTo(DataTypes.INT); Assertions.assertThat(table.getPartitionKey()) .containsExactly(k); }); - assertThat(table.getClusteringColumns()).isEmpty(); + Assertions.assertThat(table.getClusteringColumns()).isEmpty(); }, (listener, table) -> verify(listener).onTableCreated(table)); } @@ -179,7 +181,8 @@ public void should_handle_table_update() { metadata .getKeyspace(adminSessionRule.keyspace()) .flatMap(ks -> ks.getTable(CqlIdentifier.fromInternal("foo"))), - newTable -> assertThat(newTable.getColumn(CqlIdentifier.fromInternal("v"))).isPresent(), + newTable -> + Assertions.assertThat(newTable.getColumn(CqlIdentifier.fromInternal("v"))).isPresent(), (listener, oldTable, newTable) -> verify(listener).onTableUpdated(newTable, oldTable)); } @@ -193,10 +196,11 @@ public void should_handle_type_creation() { .getKeyspace(adminSessionRule.keyspace()) .flatMap(ks -> ks.getUserDefinedType(CqlIdentifier.fromInternal("t"))), type -> { - assertThat(type.getKeyspace()).isEqualTo(adminSessionRule.keyspace()); - assertThat(type.getName().asInternal()).isEqualTo("t"); - assertThat(type.getFieldNames()).containsExactly(CqlIdentifier.fromInternal("i")); - assertThat(type.getFieldTypes()).containsExactly(DataTypes.INT); + Assertions.assertThat(type.getKeyspace()).isEqualTo(adminSessionRule.keyspace()); + Assertions.assertThat(type.getName().asInternal()).isEqualTo("t"); + Assertions.assertThat(type.getFieldNames()) + .containsExactly(CqlIdentifier.fromInternal("i")); + Assertions.assertThat(type.getFieldTypes()).containsExactly(DataTypes.INT); }, (listener, type) -> verify(listener).onUserDefinedTypeCreated(type)); } @@ -223,7 +227,7 @@ public void should_handle_type_update() { .getKeyspace(adminSessionRule.keyspace()) .flatMap(ks -> ks.getUserDefinedType(CqlIdentifier.fromInternal("t"))), newType -> - assertThat(newType.getFieldNames()) + Assertions.assertThat(newType.getFieldNames()) .containsExactly(CqlIdentifier.fromInternal("i"), CqlIdentifier.fromInternal("j")), (listener, oldType, newType) -> verify(listener).onUserDefinedTypeUpdated(newType, oldType)); @@ -243,12 +247,13 @@ public void should_handle_view_creation() { .getKeyspace(adminSessionRule.keyspace()) .flatMap(ks -> ks.getView(CqlIdentifier.fromInternal("highscores"))), view -> { - assertThat(view.getKeyspace()).isEqualTo(adminSessionRule.keyspace()); - assertThat(view.getName().asInternal()).isEqualTo("highscores"); - assertThat(view.getBaseTable().asInternal()).isEqualTo("scores"); - assertThat(view.includesAllColumns()).isFalse(); - assertThat(view.getWhereClause()).hasValue("game IS NOT NULL AND score IS NOT NULL"); - assertThat(view.getColumns()) + Assertions.assertThat(view.getKeyspace()).isEqualTo(adminSessionRule.keyspace()); + Assertions.assertThat(view.getName().asInternal()).isEqualTo("highscores"); + Assertions.assertThat(view.getBaseTable().asInternal()).isEqualTo("scores"); + Assertions.assertThat(view.includesAllColumns()).isFalse(); + Assertions.assertThat(view.getWhereClause()) + .hasValue("game IS NOT NULL AND score IS NOT NULL"); + Assertions.assertThat(view.getColumns()) .containsOnlyKeys( CqlIdentifier.fromInternal("game"), CqlIdentifier.fromInternal("score"), @@ -291,7 +296,7 @@ public void should_handle_view_update() { .getKeyspace(adminSessionRule.keyspace()) .flatMap(ks -> ks.getView(CqlIdentifier.fromInternal("highscores"))), newView -> - assertThat(newView.getOptions().get(CqlIdentifier.fromInternal("comment"))) + Assertions.assertThat(newView.getOptions().get(CqlIdentifier.fromInternal("comment"))) .isEqualTo("The best score for each game"), (listener, oldView, newView) -> verify(listener).onViewUpdated(newView, oldView)); } @@ -308,13 +313,14 @@ public void should_handle_function_creation() { .getKeyspace(adminSessionRule.keyspace()) .flatMap(ks -> ks.getFunction(CqlIdentifier.fromInternal("id"), DataTypes.INT)), function -> { - assertThat(function.getKeyspace()).isEqualTo(adminSessionRule.keyspace()); - assertThat(function.getSignature().getName().asInternal()).isEqualTo("id"); - assertThat(function.getSignature().getParameterTypes()).containsExactly(DataTypes.INT); - assertThat(function.getReturnType()).isEqualTo(DataTypes.INT); - assertThat(function.getLanguage()).isEqualTo("java"); - assertThat(function.isCalledOnNullInput()).isFalse(); - assertThat(function.getBody()).isEqualTo("return i;"); + Assertions.assertThat(function.getKeyspace()).isEqualTo(adminSessionRule.keyspace()); + Assertions.assertThat(function.getSignature().getName().asInternal()).isEqualTo("id"); + Assertions.assertThat(function.getSignature().getParameterTypes()) + .containsExactly(DataTypes.INT); + Assertions.assertThat(function.getReturnType()).isEqualTo(DataTypes.INT); + Assertions.assertThat(function.getLanguage()).isEqualTo("java"); + Assertions.assertThat(function.isCalledOnNullInput()).isFalse(); + Assertions.assertThat(function.getBody()).isEqualTo("return i;"); }, (listener, function) -> verify(listener).onFunctionCreated(function)); } @@ -348,7 +354,7 @@ public void should_handle_function_update() { metadata .getKeyspace(adminSessionRule.keyspace()) .flatMap(ks -> ks.getFunction(CqlIdentifier.fromInternal("id"), DataTypes.INT)), - newFunction -> assertThat(newFunction.getBody()).isEqualTo("return j;"), + newFunction -> Assertions.assertThat(newFunction.getBody()).isEqualTo("return j;"), (listener, oldFunction, newFunction) -> verify(listener).onFunctionUpdated(newFunction, oldFunction)); } @@ -365,15 +371,17 @@ public void should_handle_aggregate_creation() { .getKeyspace(adminSessionRule.keyspace()) .flatMap(ks -> ks.getAggregate(CqlIdentifier.fromInternal("sum"), DataTypes.INT)), aggregate -> { - assertThat(aggregate.getKeyspace()).isEqualTo(adminSessionRule.keyspace()); - assertThat(aggregate.getSignature().getName().asInternal()).isEqualTo("sum"); - assertThat(aggregate.getSignature().getParameterTypes()).containsExactly(DataTypes.INT); - assertThat(aggregate.getStateType()).isEqualTo(DataTypes.INT); - assertThat(aggregate.getStateFuncSignature().getName().asInternal()).isEqualTo("plus"); - assertThat(aggregate.getStateFuncSignature().getParameterTypes()) + Assertions.assertThat(aggregate.getKeyspace()).isEqualTo(adminSessionRule.keyspace()); + Assertions.assertThat(aggregate.getSignature().getName().asInternal()).isEqualTo("sum"); + Assertions.assertThat(aggregate.getSignature().getParameterTypes()) + .containsExactly(DataTypes.INT); + Assertions.assertThat(aggregate.getStateType()).isEqualTo(DataTypes.INT); + Assertions.assertThat(aggregate.getStateFuncSignature().getName().asInternal()) + .isEqualTo("plus"); + Assertions.assertThat(aggregate.getStateFuncSignature().getParameterTypes()) .containsExactly(DataTypes.INT, DataTypes.INT); - assertThat(aggregate.getFinalFuncSignature()).isEmpty(); - assertThat(aggregate.getInitCond()).hasValue(0); + Assertions.assertThat(aggregate.getFinalFuncSignature()).isEmpty(); + Assertions.assertThat(aggregate.getInitCond()).hasValue(0); }, (listener, aggregate) -> verify(listener).onAggregateCreated(aggregate)); } @@ -408,7 +416,7 @@ public void should_handle_aggregate_update() { metadata .getKeyspace(adminSessionRule.keyspace()) .flatMap(ks -> ks.getAggregate(CqlIdentifier.fromInternal("sum"), DataTypes.INT)), - newAggregate -> assertThat(newAggregate.getInitCond()).hasValue(1), + newAggregate -> Assertions.assertThat(newAggregate.getInitCond()).hasValue(1), (listener, oldAggregate, newAggregate) -> verify(listener).onAggregateUpdated(newAggregate, oldAggregate)); } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/metadata/SchemaIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaIT.java similarity index 98% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/core/metadata/SchemaIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaIT.java index 96f13aa8141..e9c227284d5 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/metadata/SchemaIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaIT.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.core.metadata; +package com.datastax.oss.driver.core.metadata; import static org.assertj.core.api.Assertions.assertThat; @@ -24,6 +24,8 @@ import com.datastax.oss.driver.api.core.config.DriverConfigLoader; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.core.metadata.Metadata; +import com.datastax.oss.driver.api.core.metadata.TokenMap; import com.datastax.oss.driver.api.core.metadata.schema.ColumnMetadata; import com.datastax.oss.driver.api.core.metadata.schema.KeyspaceMetadata; import com.datastax.oss.driver.api.core.metadata.schema.TableMetadata; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/metadata/TableOptionsIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/TableOptionsIT.java similarity index 98% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/core/metadata/TableOptionsIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/TableOptionsIT.java index 13e4e314df3..e26d3661bf4 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/metadata/TableOptionsIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/TableOptionsIT.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.core.metadata; +package com.datastax.oss.driver.core.metadata; import static org.assertj.core.api.Assertions.assertThat; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/metadata/TokenITBase.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/TokenITBase.java similarity index 98% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/core/metadata/TokenITBase.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/TokenITBase.java index 2fa682ccc2b..08ac446b22f 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/metadata/TokenITBase.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/TokenITBase.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.core.metadata; +package com.datastax.oss.driver.core.metadata; import static org.assertj.core.api.Assertions.assertThat; @@ -25,6 +25,8 @@ import com.datastax.oss.driver.api.core.cql.Row; import com.datastax.oss.driver.api.core.cql.SimpleStatement; import com.datastax.oss.driver.api.core.cql.Statement; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.metadata.TokenMap; import com.datastax.oss.driver.api.core.metadata.token.Token; import com.datastax.oss.driver.api.core.metadata.token.TokenRange; import com.datastax.oss.driver.api.core.session.Session; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/metrics/MetricsIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metrics/MetricsIT.java similarity index 96% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/core/metrics/MetricsIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/metrics/MetricsIT.java index 5ac70b54b7b..0c577c8508a 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/metrics/MetricsIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metrics/MetricsIT.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.core.metrics; +package com.datastax.oss.driver.core.metrics; import static org.assertj.core.api.Assertions.assertThat; @@ -23,6 +23,8 @@ import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.metrics.DefaultNodeMetric; +import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.api.testinfra.utils.ConditionChecker; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/internal/core/retry/DefaultRetryPolicyIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/retry/DefaultRetryPolicyIT.java similarity index 99% rename from integration-tests/src/test/java/com/datastax/oss/driver/internal/core/retry/DefaultRetryPolicyIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/retry/DefaultRetryPolicyIT.java index 4527de4edf6..61039ddd642 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/internal/core/retry/DefaultRetryPolicyIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/retry/DefaultRetryPolicyIT.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.internal.core.retry; +package com.datastax.oss.driver.core.retry; import static com.datastax.oss.simulacron.common.codec.ConsistencyLevel.LOCAL_QUORUM; import static com.datastax.oss.simulacron.common.codec.WriteType.BATCH_LOG; @@ -53,6 +53,7 @@ import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.api.testinfra.simulacron.QueryCounter; import com.datastax.oss.driver.api.testinfra.simulacron.SimulacronRule; +import com.datastax.oss.driver.internal.core.retry.DefaultRetryPolicy; import com.datastax.oss.simulacron.common.cluster.ClusterSpec; import com.datastax.oss.simulacron.common.stubbing.CloseType; import com.datastax.oss.simulacron.common.stubbing.DisconnectAction; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/retry/PerProfileRetryPolicyIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/retry/PerProfileRetryPolicyIT.java similarity index 97% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/core/retry/PerProfileRetryPolicyIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/retry/PerProfileRetryPolicyIT.java index 69d0631e767..8c9d35f2b1e 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/retry/PerProfileRetryPolicyIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/retry/PerProfileRetryPolicyIT.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.core.retry; +package com.datastax.oss.driver.core.retry; import static com.datastax.oss.driver.assertions.Assertions.assertThat; import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.noRows; @@ -29,6 +29,8 @@ import com.datastax.oss.driver.api.core.cql.ResultSet; import com.datastax.oss.driver.api.core.cql.SimpleStatement; import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.retry.RetryDecision; +import com.datastax.oss.driver.api.core.retry.RetryPolicy; import com.datastax.oss.driver.api.core.servererrors.CoordinatorException; import com.datastax.oss.driver.api.core.servererrors.UnavailableException; import com.datastax.oss.driver.api.core.servererrors.WriteType; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/session/ExceptionIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/session/ExceptionIT.java similarity index 99% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/core/session/ExceptionIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/session/ExceptionIT.java index e0a26b93a2f..1c3b922a870 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/session/ExceptionIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/session/ExceptionIT.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.core.session; +package com.datastax.oss.driver.core.session; import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.unavailable; import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.when; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/session/RemovedNodeIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/session/RemovedNodeIT.java similarity index 97% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/core/session/RemovedNodeIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/session/RemovedNodeIT.java index 4c3bb0b7fef..b9192786c9b 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/session/RemovedNodeIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/session/RemovedNodeIT.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.core.session; +package com.datastax.oss.driver.core.session; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.metadata.Node; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/session/RequestProcessorIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/session/RequestProcessorIT.java similarity index 99% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/core/session/RequestProcessorIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/session/RequestProcessorIT.java index e695e5a616a..dca70a5b0c2 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/session/RequestProcessorIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/session/RequestProcessorIT.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.core.session; +package com.datastax.oss.driver.core.session; import static org.assertj.core.api.Assertions.assertThat; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/session/ShutdownIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/session/ShutdownIT.java similarity index 99% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/core/session/ShutdownIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/session/ShutdownIT.java index 7e24fac779e..9f10af80395 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/session/ShutdownIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/session/ShutdownIT.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.core.session; +package com.datastax.oss.driver.core.session; import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.noRows; import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.when; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/specex/SpeculativeExecutionIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/specex/SpeculativeExecutionIT.java similarity index 99% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/core/specex/SpeculativeExecutionIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/specex/SpeculativeExecutionIT.java index fa36c143b42..cae21420723 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/specex/SpeculativeExecutionIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/specex/SpeculativeExecutionIT.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.core.specex; +package com.datastax.oss.driver.core.specex; import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.isBootstrapping; import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.noRows; @@ -29,6 +29,7 @@ import com.datastax.oss.driver.api.core.context.DriverContext; import com.datastax.oss.driver.api.core.cql.ResultSet; import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.core.specex.SpeculativeExecutionPolicy; import com.datastax.oss.driver.api.testinfra.loadbalancing.SortingLoadBalancingPolicy; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.api.testinfra.simulacron.QueryCounter; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/ssl/DefaultSslEngineFactoryHostnameValidationIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/ssl/DefaultSslEngineFactoryHostnameValidationIT.java similarity index 98% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/core/ssl/DefaultSslEngineFactoryHostnameValidationIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/ssl/DefaultSslEngineFactoryHostnameValidationIT.java index eb594fcb22f..b3fc2644191 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/ssl/DefaultSslEngineFactoryHostnameValidationIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/ssl/DefaultSslEngineFactoryHostnameValidationIT.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.core.ssl; +package com.datastax.oss.driver.core.ssl; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/ssl/DefaultSslEngineFactoryIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/ssl/DefaultSslEngineFactoryIT.java similarity index 98% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/core/ssl/DefaultSslEngineFactoryIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/ssl/DefaultSslEngineFactoryIT.java index cdc972f2dce..51409ccbb53 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/ssl/DefaultSslEngineFactoryIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/ssl/DefaultSslEngineFactoryIT.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.core.ssl; +package com.datastax.oss.driver.core.ssl; import com.datastax.oss.driver.api.core.AllNodesFailedException; import com.datastax.oss.driver.api.core.CqlSession; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/ssl/DefaultSslEngineFactoryPropertyBasedIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/ssl/DefaultSslEngineFactoryPropertyBasedIT.java similarity index 97% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/core/ssl/DefaultSslEngineFactoryPropertyBasedIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/ssl/DefaultSslEngineFactoryPropertyBasedIT.java index 98eeff1cab8..927309eef00 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/ssl/DefaultSslEngineFactoryPropertyBasedIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/ssl/DefaultSslEngineFactoryPropertyBasedIT.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.core.ssl; +package com.datastax.oss.driver.core.ssl; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/ssl/DefaultSslEngineFactoryPropertyBasedWithClientAuthIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/ssl/DefaultSslEngineFactoryPropertyBasedWithClientAuthIT.java similarity index 98% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/core/ssl/DefaultSslEngineFactoryPropertyBasedWithClientAuthIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/ssl/DefaultSslEngineFactoryPropertyBasedWithClientAuthIT.java index e0fcdb81503..dfe7674c15e 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/ssl/DefaultSslEngineFactoryPropertyBasedWithClientAuthIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/ssl/DefaultSslEngineFactoryPropertyBasedWithClientAuthIT.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.core.ssl; +package com.datastax.oss.driver.core.ssl; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/ssl/DefaultSslEngineFactoryWithClientAuthIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/ssl/DefaultSslEngineFactoryWithClientAuthIT.java similarity index 98% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/core/ssl/DefaultSslEngineFactoryWithClientAuthIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/ssl/DefaultSslEngineFactoryWithClientAuthIT.java index b0fd67b91ec..e9f4e5508b8 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/ssl/DefaultSslEngineFactoryWithClientAuthIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/ssl/DefaultSslEngineFactoryWithClientAuthIT.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.core.ssl; +package com.datastax.oss.driver.core.ssl; import com.datastax.oss.driver.api.core.AllNodesFailedException; import com.datastax.oss.driver.api.core.CqlSession; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/throttling/ThrottlingIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/throttling/ThrottlingIT.java similarity index 98% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/core/throttling/ThrottlingIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/throttling/ThrottlingIT.java index 5e2acc06fd2..12d21dce299 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/throttling/ThrottlingIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/throttling/ThrottlingIT.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.core.throttling; +package com.datastax.oss.driver.core.throttling; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.RequestThrottlingException; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/tracker/RequestLoggerIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/tracker/RequestLoggerIT.java similarity index 99% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/core/tracker/RequestLoggerIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/tracker/RequestLoggerIT.java index f40c5925efc..1686bbc67fe 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/tracker/RequestLoggerIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/tracker/RequestLoggerIT.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.core.tracker; +package com.datastax.oss.driver.core.tracker; import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.rows; import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.serverError; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/tracker/RequestNodeLoggerExample.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/tracker/RequestNodeLoggerExample.java similarity index 98% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/core/tracker/RequestNodeLoggerExample.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/tracker/RequestNodeLoggerExample.java index 77441987700..cf9ba4abd40 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/tracker/RequestNodeLoggerExample.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/tracker/RequestNodeLoggerExample.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.core.tracker; +package com.datastax.oss.driver.core.tracker; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/internal/core/type/codec/CqlIntToStringCodec.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/type/codec/CqlIntToStringCodec.java similarity index 97% rename from integration-tests/src/test/java/com/datastax/oss/driver/internal/core/type/codec/CqlIntToStringCodec.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/type/codec/CqlIntToStringCodec.java index f52d139f1b4..c133d3932b0 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/internal/core/type/codec/CqlIntToStringCodec.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/type/codec/CqlIntToStringCodec.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.internal.core.type.codec; +package com.datastax.oss.driver.core.type.codec; import com.datastax.oss.driver.api.core.ProtocolVersion; import com.datastax.oss.driver.api.core.type.DataType; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistryIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/type/codec/registry/CodecRegistryIT.java similarity index 99% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistryIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/type/codec/registry/CodecRegistryIT.java index 361b397a4ac..dd07cd54b96 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistryIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/type/codec/registry/CodecRegistryIT.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.core.type.codec.registry; +package com.datastax.oss.driver.core.type.codec.registry; import static org.assertj.core.api.Assertions.assertThat; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/example/guava/internal/KeyRequestProcessor.java b/integration-tests/src/test/java/com/datastax/oss/driver/example/guava/internal/KeyRequestProcessor.java index 949db88e389..6895e359e1d 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/example/guava/internal/KeyRequestProcessor.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/example/guava/internal/KeyRequestProcessor.java @@ -18,8 +18,8 @@ import com.datastax.oss.driver.api.core.cql.AsyncResultSet; import com.datastax.oss.driver.api.core.cql.SimpleStatement; import com.datastax.oss.driver.api.core.session.Request; -import com.datastax.oss.driver.api.core.session.RequestProcessorIT; import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import com.datastax.oss.driver.core.session.RequestProcessorIT; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.internal.core.cql.CqlRequestAsyncProcessor; import com.datastax.oss.driver.internal.core.session.DefaultSession; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/querybuilder/JacksonJsonCodec.java b/integration-tests/src/test/java/com/datastax/oss/driver/querybuilder/JacksonJsonCodec.java similarity index 98% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/querybuilder/JacksonJsonCodec.java rename to integration-tests/src/test/java/com/datastax/oss/driver/querybuilder/JacksonJsonCodec.java index 37fb471774b..f6ab774dfd3 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/querybuilder/JacksonJsonCodec.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/querybuilder/JacksonJsonCodec.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.querybuilder; +package com.datastax.oss.driver.querybuilder; import com.datastax.oss.driver.api.core.ProtocolVersion; import com.datastax.oss.driver.api.core.type.DataType; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/querybuilder/JsonInsertIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/querybuilder/JsonInsertIT.java similarity index 99% rename from integration-tests/src/test/java/com/datastax/oss/driver/api/querybuilder/JsonInsertIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/querybuilder/JsonInsertIT.java index 1c80267ab4c..ef91c928a0e 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/querybuilder/JsonInsertIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/querybuilder/JsonInsertIT.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.querybuilder; +package com.datastax.oss.driver.querybuilder; import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.bindMarker; import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.insertInto; From fd665bcd0bfe98572e7e2fbaa929fe176b92b1e6 Mon Sep 17 00:00:00 2001 From: olim7t Date: Thu, 8 Aug 2019 06:54:36 -0700 Subject: [PATCH 049/979] JAVA-2153: Standardize test rule usage in ITs - favor class rules over method rules (when possible). - split tests that were using CcmRule and SimulacronRule within the same class. - use upper snake case for class rule names. - update contribution guidelines. --- CONTRIBUTING.md | 39 +++- .../datastax/oss/driver/core/ConnectIT.java | 22 +- .../oss/driver/core/ConnectKeyspaceIT.java | 15 +- .../core/auth/PlainTextAuthProviderIT.java | 12 +- .../core/compression/DirectCompressionIT.java | 13 +- .../core/compression/HeapCompressionIT.java | 13 +- .../core/config/DriverConfigValidationIT.java | 8 +- .../config/DriverExecutionProfileCcmIT.java | 97 +++++++++ .../DriverExecutionProfileReloadIT.java | 28 ++- ...> DriverExecutionProfileSimulacronIT.java} | 97 ++------- .../connection/ChannelSocketOptionsIT.java | 12 +- .../driver/core/connection/FrameLengthIT.java | 26 +-- .../core/context/LifecycleListenerIT.java | 8 +- .../oss/driver/core/cql/AsyncResultSetIT.java | 29 +-- .../oss/driver/core/cql/BatchStatementIT.java | 8 +- ...tementIT.java => BoundStatementCcmIT.java} | 176 ++-------------- .../core/cql/BoundStatementSimulacronIT.java | 188 ++++++++++++++++++ .../core/cql/ExecutionInfoWarningsIT.java | 15 +- .../core/cql/PagingIterableSpliteratorIT.java | 17 +- .../driver/core/cql/PreparedStatementIT.java | 106 +++++----- .../oss/driver/core/cql/QueryTraceIT.java | 13 +- ...ementIT.java => SimpleStatementCcmIT.java} | 129 +++--------- .../core/cql/SimpleStatementSimulacronIT.java | 103 ++++++++++ .../oss/driver/core/data/DataTypeIT.java | 53 ++--- .../core/heartbeat/HeartbeatDisabledIT.java | 7 +- .../driver/core/heartbeat/HeartbeatIT.java | 19 +- .../DefaultLoadBalancingPolicyIT.java | 33 +-- .../core/loadbalancing/NodeTargetingIT.java | 32 +-- .../PerProfileLoadBalancingPolicyIT.java | 21 +- .../core/metadata/ByteOrderedTokenIT.java | 13 +- .../metadata/ByteOrderedTokenVnodesIT.java | 13 +- .../core/metadata/CaseSensitiveUdtIT.java | 15 +- .../oss/driver/core/metadata/DescribeIT.java | 33 +-- .../driver/core/metadata/Murmur3TokenIT.java | 13 +- .../core/metadata/Murmur3TokenVnodesIT.java | 13 +- .../driver/core/metadata/NodeMetadataIT.java | 6 +- .../driver/core/metadata/RandomTokenIT.java | 13 +- .../core/metadata/RandomTokenVnodesIT.java | 13 +- .../core/metadata/SchemaAgreementIT.java | 27 +-- .../oss/driver/core/metrics/MetricsIT.java | 8 +- .../core/retry/DefaultRetryPolicyIT.java | 59 +++--- .../core/retry/PerProfileRetryPolicyIT.java | 24 ++- .../oss/driver/core/session/ExceptionIT.java | 22 +- .../driver/core/session/RemovedNodeIT.java | 5 +- .../core/session/RequestProcessorIT.java | 23 ++- .../oss/driver/core/session/ShutdownIT.java | 8 +- .../core/specex/SpeculativeExecutionIT.java | 13 +- ...tSslEngineFactoryHostnameValidationIT.java | 5 +- .../core/ssl/DefaultSslEngineFactoryIT.java | 10 +- ...efaultSslEngineFactoryPropertyBasedIT.java | 5 +- ...eFactoryPropertyBasedWithClientAuthIT.java | 5 +- ...faultSslEngineFactoryWithClientAuthIT.java | 7 +- .../type/codec/registry/CodecRegistryIT.java | 35 ++-- .../oss/driver/mapper/ComputedIT.java | 31 +-- .../oss/driver/mapper/DefaultKeyspaceIT.java | 45 +++-- .../mapper/DefaultNullSavingStrategyIT.java | 20 +- .../datastax/oss/driver/mapper/DeleteIT.java | 17 +- .../driver/mapper/EntityPolymorphismIT.java | 31 +-- .../oss/driver/mapper/GetEntityIT.java | 25 +-- .../datastax/oss/driver/mapper/InsertIT.java | 32 +-- .../oss/driver/mapper/NamingStrategyIT.java | 19 +- .../oss/driver/mapper/NestedUdtIT.java | 17 +- .../driver/mapper/NullSavingStrategyIT.java | 27 +-- .../oss/driver/mapper/PrimitivesIT.java | 13 +- .../mapper/QueryKeyspaceAndTableIT.java | 18 +- .../oss/driver/mapper/QueryProviderIT.java | 14 +- .../oss/driver/mapper/QueryReturnTypesIT.java | 14 +- .../mapper/SelectCustomWhereClauseIT.java | 16 +- .../datastax/oss/driver/mapper/SelectIT.java | 16 +- .../driver/mapper/SelectOtherClausesIT.java | 14 +- .../oss/driver/mapper/SetEntityIT.java | 28 +-- .../driver/mapper/StatementAttributesIT.java | 45 +++-- .../oss/driver/mapper/TransientIT.java | 20 +- .../driver/mapper/UpdateCustomIfClauseIT.java | 24 +-- .../datastax/oss/driver/mapper/UpdateIT.java | 38 ++-- .../oss/driver/mapper/UpdateNamingIT.java | 14 +- .../datastax/oss/driver/osgi/OsgiBaseIT.java | 5 +- .../oss/driver/querybuilder/JsonInsertIT.java | 26 +-- 78 files changed, 1277 insertions(+), 1019 deletions(-) create mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/core/config/DriverExecutionProfileCcmIT.java rename integration-tests/src/test/java/com/datastax/oss/driver/core/config/{DriverExecutionProfileIT.java => DriverExecutionProfileSimulacronIT.java} (59%) rename integration-tests/src/test/java/com/datastax/oss/driver/core/cql/{BoundStatementIT.java => BoundStatementCcmIT.java} (68%) create mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BoundStatementSimulacronIT.java rename integration-tests/src/test/java/com/datastax/oss/driver/core/cql/{SimpleStatementIT.java => SimpleStatementCcmIT.java} (74%) create mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/core/cql/SimpleStatementSimulacronIT.java diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index b34db231955..2ee0f377742 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -258,9 +258,11 @@ process, which can be either one of: For an example of a CCM-based test, see `PlainTextAuthProviderIT`. +#### Categories + Integration tests are divided into three categories: -#### Parallelizable tests +##### Parallelizable tests These tests can be run in parallel, to speed up the build. They either use: * dedicated Simulacron instances. These are lightweight, and Simulacron will manage the ports to @@ -268,7 +270,9 @@ These tests can be run in parallel, to speed up the build. They either use: * a shared, one-node CCM cluster. Each test works in its own keyspace. The build runs them with a configurable degree of parallelism (currently 8). The shared CCM cluster -is initialized the first time it's used, and stopped before moving on to serial tests. +is initialized the first time it's used, and stopped before moving on to serial tests. Note that we +run with `parallel=classes`, which means methods within the same class never run concurrent to each +other. To make an integration test parallelizable, annotate it with `@Category(ParallelizableTests.class)`. If you use CCM, it **must** be with `CcmRule`. @@ -276,7 +280,7 @@ If you use CCM, it **must** be with `CcmRule`. For an example of a Simulacron-based parallelizable test, see `NodeTargetingIT`. For a CCM-based test, see `DirectCompressionIT`. -#### Serial tests +##### Serial tests These tests cannot run in parallel, in general because they require CCM clusters of different sizes, or with a specific configuration (we never run more than one CCM cluster simultaneously: it would be @@ -293,7 +297,7 @@ Note: if multiple serial tests have a common "base" class, do not pull up `Custo child class must have its own instance. Otherwise they share the same CCM instance, and the first one destroys it on teardown. See `TokenITBase` for how to organize code in those cases. -#### Isolated tests +##### Isolated tests Not only can those tests not run in parallel, they also require specific environment tweaks, typically system properties that need to be set before initialization. @@ -305,6 +309,33 @@ To isolate an integration test, annotate it with `@Category(IsolatedTests.class) For an example, see `HeapCompressionIT`. +#### About test rules + +Do not mix `CcmRule` and `SimulacronRule` in the same test. It makes things harder to follow, and +can be inefficient (if the `SimulacronRule` is method-level, it will create a Simulacron cluster for +every test method, even those that only need CCM). + +Try to use `@ClassRule` as much as possible: it's more efficient to reuse the same resource across +all test methods. The only exceptions are: +* CCM tests that use `@CassandraRequirement` restrictions at the method level (ex: + `BatchStatementIT`). +* tests where you *really* need to restart from a clean state for every method. + +When you use `@ClassRule`, your rules need to be static; also make them final and use constant +naming conventions, like `CCM_RULE`. + +When you use a server rule (`CcmRule` or `SimulacronRule`) and a `SessionRule` at the same level, +wrap them into a rule chain to ensure proper initialization order: + +```java +private static final CcmRule CCM_RULE = CcmRule.getInstance(); +private static final SessionRule SESSION_RULE = SessionRule.builder(CCM_RULE).build(); + +@ClassRule +public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); +``` + +This is not necessary if the server rule is a `@ClassRule` and the session rule is a `@Rule`. ## Running the tests diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/ConnectIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/ConnectIT.java index 995da1903b4..79acda6dd3c 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/ConnectIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/ConnectIT.java @@ -56,27 +56,27 @@ public class ConnectIT { @ClassRule - public static SimulacronRule simulacronRule = + public static final SimulacronRule SIMULACRON_RULE = new SimulacronRule(ClusterSpec.builder().withNodes(2)); @Rule public ExpectedException thrown = ExpectedException.none(); @Before public void setup() { - simulacronRule.cluster().acceptConnections(); + SIMULACRON_RULE.cluster().acceptConnections(); } @Test public void should_fail_fast_if_contact_points_unreachable_and_reconnection_disabled() { // Given - simulacronRule.cluster().rejectConnections(0, RejectScope.STOP); + SIMULACRON_RULE.cluster().rejectConnections(0, RejectScope.STOP); thrown.expect(AllNodesFailedException.class); thrown.expectMessage( "Could not reach any contact point, make sure you've provided valid addresses"); // When - SessionUtils.newSession(simulacronRule); + SessionUtils.newSession(SIMULACRON_RULE); // Then the exception is thrown } @@ -84,7 +84,7 @@ public void should_fail_fast_if_contact_points_unreachable_and_reconnection_disa @Test public void should_wait_for_contact_points_if_reconnection_enabled() throws Exception { // Given - simulacronRule.cluster().rejectConnections(0, RejectScope.STOP); + SIMULACRON_RULE.cluster().rejectConnections(0, RejectScope.STOP); // When DriverConfigLoader loader = @@ -96,7 +96,7 @@ public void should_wait_for_contact_points_if_reconnection_enabled() throws Exce .withDuration(DefaultDriverOption.RECONNECTION_BASE_DELAY, Duration.ofMillis(500)) .build(); CompletableFuture sessionFuture = - newSessionAsync(simulacronRule, loader).toCompletableFuture(); + newSessionAsync(SIMULACRON_RULE, loader).toCompletableFuture(); // wait a bit to ensure we have a couple of reconnections, otherwise we might race and allow // reconnections before the initial attempt TimeUnit.SECONDS.sleep(2); @@ -105,7 +105,7 @@ public void should_wait_for_contact_points_if_reconnection_enabled() throws Exce assertThat(sessionFuture).isNotCompleted(); // When - simulacronRule.cluster().acceptConnections(); + SIMULACRON_RULE.cluster().acceptConnections(); // Then this doesn't throw Session session = sessionFuture.get(2, TimeUnit.SECONDS); @@ -125,14 +125,14 @@ public void should_cleanup_on_lbp_init_failure() { .without(DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER) .build(); CqlSession.builder() - .addContactEndPoints(simulacronRule.getContactPoints()) + .addContactEndPoints(SIMULACRON_RULE.getContactPoints()) .withConfigLoader(loader) .build(); fail("Should have thrown a DriverException for no DC with explicit contact point"); } catch (DriverException ignored) { } // One second should be plenty of time for connections to close server side - checkThat(() -> simulacronRule.cluster().getConnections().getConnections().isEmpty()) + checkThat(() -> SIMULACRON_RULE.cluster().getConnections().getConnections().isEmpty()) .before(1, SECONDS) .becomesTrue(); } @@ -144,10 +144,10 @@ public void should_cleanup_on_lbp_init_failure() { @Test public void should_mark_unreachable_contact_points_as_local_and_schedule_reconnections() { // Reject connections only on one node - BoundCluster boundCluster = simulacronRule.cluster(); + BoundCluster boundCluster = SIMULACRON_RULE.cluster(); boundCluster.node(0).rejectConnections(0, RejectScope.STOP); - try (CqlSession session = SessionUtils.newSession(simulacronRule)) { + try (CqlSession session = SessionUtils.newSession(SIMULACRON_RULE)) { Map nodes = session.getMetadata().getNodes(); // Node states are updated asynchronously, so guard against race conditions ConditionChecker.checkThat( diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/ConnectKeyspaceIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/ConnectKeyspaceIT.java index 51c0280cd0a..03aa2c383ab 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/ConnectKeyspaceIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/ConnectKeyspaceIT.java @@ -35,23 +35,24 @@ @Category(ParallelizableTests.class) public class ConnectKeyspaceIT { - private static CcmRule ccm = CcmRule.getInstance(); + private static final CcmRule CCM_RULE = CcmRule.getInstance(); - private static SessionRule sessionRule = SessionRule.builder(ccm).build(); + private static final SessionRule SESSION_RULE = SessionRule.builder(CCM_RULE).build(); - @ClassRule public static TestRule chain = RuleChain.outerRule(ccm).around(sessionRule); + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); @Test public void should_connect_to_existing_keyspace() { - CqlIdentifier keyspace = sessionRule.keyspace(); - try (Session session = SessionUtils.newSession(ccm, keyspace)) { + CqlIdentifier keyspace = SESSION_RULE.keyspace(); + try (Session session = SessionUtils.newSession(CCM_RULE, keyspace)) { assertThat(session.getKeyspace()).hasValue(keyspace); } } @Test public void should_connect_with_no_keyspace() { - try (Session session = SessionUtils.newSession(ccm)) { + try (Session session = SessionUtils.newSession(CCM_RULE)) { assertThat(session.getKeyspace()).isEmpty(); } } @@ -72,6 +73,6 @@ public void should_fail_to_connect_to_non_existent_keyspace_when_reconnecting_on private void should_fail_to_connect_to_non_existent_keyspace(DriverConfigLoader loader) { CqlIdentifier keyspace = CqlIdentifier.fromInternal("does not exist"); - SessionUtils.newSession(ccm, keyspace, loader); + SessionUtils.newSession(CCM_RULE, keyspace, loader); } } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/auth/PlainTextAuthProviderIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/auth/PlainTextAuthProviderIT.java index 4453f069d5f..8558f1d650c 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/auth/PlainTextAuthProviderIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/auth/PlainTextAuthProviderIT.java @@ -33,7 +33,7 @@ public class PlainTextAuthProviderIT { @ClassRule - public static CustomCcmRule ccm = + public static final CustomCcmRule CCM_RULE = CustomCcmRule.builder() .withCassandraConfiguration("authenticator", "PasswordAuthenticator") .withJvmArgs("-Dcassandra.superuser_setup_delay_ms=0") @@ -41,7 +41,7 @@ public class PlainTextAuthProviderIT { @BeforeClass public static void sleepForAuth() { - if (ccm.getCassandraVersion().compareTo(Version.V2_2_0) < 0) { + if (CCM_RULE.getCassandraVersion().compareTo(Version.V2_2_0) < 0) { // Sleep for 1 second to allow C* auth to do its work. This is only needed for 2.1 Uninterruptibles.sleepUninterruptibly(1, TimeUnit.SECONDS); } @@ -55,7 +55,7 @@ public void should_connect_with_credentials() { .withString(DefaultDriverOption.AUTH_PROVIDER_USER_NAME, "cassandra") .withString(DefaultDriverOption.AUTH_PROVIDER_PASSWORD, "cassandra") .build(); - try (CqlSession session = SessionUtils.newSession(ccm, loader)) { + try (CqlSession session = SessionUtils.newSession(CCM_RULE, loader)) { session.execute("select * from system.local"); } } @@ -65,7 +65,7 @@ public void should_connect_with_programmatic_credentials() { SessionBuilder builder = SessionUtils.baseBuilder() - .addContactEndPoints(ccm.getContactPoints()) + .addContactEndPoints(CCM_RULE.getContactPoints()) .withAuthCredentials("cassandra", "cassandra"); try (CqlSession session = (CqlSession) builder.build()) { @@ -81,14 +81,14 @@ public void should_not_connect_with_invalid_credentials() { .withString(DefaultDriverOption.AUTH_PROVIDER_USER_NAME, "baduser") .withString(DefaultDriverOption.AUTH_PROVIDER_PASSWORD, "badpass") .build(); - try (CqlSession session = SessionUtils.newSession(ccm, loader)) { + try (CqlSession session = SessionUtils.newSession(CCM_RULE, loader)) { session.execute("select * from system.local"); } } @Test(expected = AllNodesFailedException.class) public void should_not_connect_without_credentials() { - try (CqlSession session = SessionUtils.newSession(ccm)) { + try (CqlSession session = SessionUtils.newSession(CCM_RULE)) { session.execute("select * from system.local"); } } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/compression/DirectCompressionIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/compression/DirectCompressionIT.java index bc1ab888085..ef2d9803369 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/compression/DirectCompressionIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/compression/DirectCompressionIT.java @@ -40,21 +40,22 @@ @Category(ParallelizableTests.class) public class DirectCompressionIT { - private static CcmRule ccmRule = CcmRule.getInstance(); + private static final CcmRule CCM_RULE = CcmRule.getInstance(); - private static SessionRule schemaSessionRule = - SessionRule.builder(ccmRule) + private static final SessionRule SCHEMA_SESSION_RULE = + SessionRule.builder(CCM_RULE) .withConfigLoader( SessionUtils.configLoaderBuilder() .withDuration(DefaultDriverOption.REQUEST_TIMEOUT, Duration.ofSeconds(30)) .build()) .build(); - @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(schemaSessionRule); + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SCHEMA_SESSION_RULE); @BeforeClass public static void setup() { - schemaSessionRule + SCHEMA_SESSION_RULE .session() .execute("CREATE TABLE test (k text PRIMARY KEY, t text, i int, f float)"); } @@ -89,7 +90,7 @@ private void createAndCheckCluster(String compressorOption) { .withString(DefaultDriverOption.PROTOCOL_COMPRESSION, compressorOption) .build(); try (CqlSession session = - SessionUtils.newSession(ccmRule, schemaSessionRule.keyspace(), loader)) { + SessionUtils.newSession(CCM_RULE, SCHEMA_SESSION_RULE.keyspace(), loader)) { // Run a couple of simple test queries ResultSet rs = session.execute( diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/compression/HeapCompressionIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/compression/HeapCompressionIT.java index 10d320bb2b2..64b890fd7f6 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/compression/HeapCompressionIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/compression/HeapCompressionIT.java @@ -45,21 +45,22 @@ public class HeapCompressionIT { System.setProperty("io.netty.noUnsafe", "true"); } - private static CustomCcmRule ccmRule = CustomCcmRule.builder().build(); + private static final CustomCcmRule CCM_RULE = CustomCcmRule.builder().build(); - private static SessionRule schemaSessionRule = - SessionRule.builder(ccmRule) + private static final SessionRule SCHEMA_SESSION_RULE = + SessionRule.builder(CCM_RULE) .withConfigLoader( SessionUtils.configLoaderBuilder() .withDuration(DefaultDriverOption.REQUEST_TIMEOUT, Duration.ofSeconds(30)) .build()) .build(); - @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(schemaSessionRule); + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SCHEMA_SESSION_RULE); @BeforeClass public static void setup() { - schemaSessionRule + SCHEMA_SESSION_RULE .session() .execute("CREATE TABLE test (k text PRIMARY KEY, t text, i int, f float)"); } @@ -92,7 +93,7 @@ private void createAndCheckCluster(String compressorOption) { .withString(DefaultDriverOption.PROTOCOL_COMPRESSION, compressorOption) .build(); try (CqlSession session = - SessionUtils.newSession(ccmRule, schemaSessionRule.keyspace(), loader)) { + SessionUtils.newSession(CCM_RULE, SCHEMA_SESSION_RULE.keyspace(), loader)) { // Run a couple of simple test queries ResultSet rs = session.execute( diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/config/DriverConfigValidationIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/config/DriverConfigValidationIT.java index 7aeec29eee0..5e208eed657 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/config/DriverConfigValidationIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/config/DriverConfigValidationIT.java @@ -24,6 +24,7 @@ import com.datastax.oss.driver.api.testinfra.simulacron.SimulacronRule; import com.datastax.oss.driver.categories.ParallelizableTests; import com.datastax.oss.simulacron.common.cluster.ClusterSpec; +import org.junit.ClassRule; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; @@ -32,7 +33,10 @@ @Category(ParallelizableTests.class) public class DriverConfigValidationIT { - @Rule public SimulacronRule simulacron = new SimulacronRule(ClusterSpec.builder().withNodes(1)); + @ClassRule + public static final SimulacronRule SIMULACRON_RULE = + new SimulacronRule(ClusterSpec.builder().withNodes(1)); + @Rule public ExpectedException thrown = ExpectedException.none(); @Test @@ -55,7 +59,7 @@ public void should_fail_to_init_with_invalid_policy() { private void should_fail_to_init_with_invalid_policy(DefaultDriverOption option) { DriverConfigLoader loader = SessionUtils.configLoaderBuilder().withString(option, "AClassThatDoesNotExist").build(); - assertThatThrownBy(() -> SessionUtils.newSession(simulacron, loader)) + assertThatThrownBy(() -> SessionUtils.newSession(SIMULACRON_RULE, loader)) .satisfies( error -> { assertThat(error) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/config/DriverExecutionProfileCcmIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/config/DriverExecutionProfileCcmIT.java new file mode 100644 index 00000000000..391ef7b5c39 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/config/DriverExecutionProfileCcmIT.java @@ -0,0 +1,97 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.core.config; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverConfigLoader; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.cql.AsyncResultSet; +import com.datastax.oss.driver.api.core.cql.BatchStatement; +import com.datastax.oss.driver.api.core.cql.BatchStatementBuilder; +import com.datastax.oss.driver.api.core.cql.DefaultBatchType; +import com.datastax.oss.driver.api.core.cql.PreparedStatement; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.session.SessionUtils; +import com.datastax.oss.driver.categories.ParallelizableTests; +import com.datastax.oss.driver.internal.core.util.concurrent.CompletableFutures; +import java.util.concurrent.CompletionStage; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +@Category(ParallelizableTests.class) +public class DriverExecutionProfileCcmIT { + + @ClassRule public static final CcmRule CCM_RULE = CcmRule.getInstance(); + + @Test + public void should_use_profile_page_size() { + DriverConfigLoader loader = + SessionUtils.configLoaderBuilder() + .withInt(DefaultDriverOption.REQUEST_PAGE_SIZE, 100) + .startProfile("smallpages") + .withInt(DefaultDriverOption.REQUEST_PAGE_SIZE, 10) + .build(); + try (CqlSession session = SessionUtils.newSession(CCM_RULE, loader)) { + + CqlIdentifier keyspace = SessionUtils.uniqueKeyspaceId(); + DriverExecutionProfile slowProfile = SessionUtils.slowProfile(session); + SessionUtils.createKeyspace(session, keyspace, slowProfile); + + session.execute(String.format("USE %s", keyspace.asCql(false))); + + // load 500 rows (value beyond page size). + session.execute( + SimpleStatement.builder( + "CREATE TABLE IF NOT EXISTS test (k int, v int, PRIMARY KEY (k,v))") + .setExecutionProfile(slowProfile) + .build()); + PreparedStatement prepared = session.prepare("INSERT INTO test (k, v) values (0, ?)"); + BatchStatementBuilder bs = + BatchStatement.builder(DefaultBatchType.UNLOGGED).setExecutionProfile(slowProfile); + for (int i = 0; i < 500; i++) { + bs.addStatement(prepared.bind(i)); + } + session.execute(bs.build()); + + String query = "SELECT * FROM test where k=0"; + // Execute query without profile, should use global page size (100) + CompletionStage future = session.executeAsync(query); + AsyncResultSet result = CompletableFutures.getUninterruptibly(future); + assertThat(result.remaining()).isEqualTo(100); + result = CompletableFutures.getUninterruptibly(result.fetchNextPage()); + // next fetch should also be 100 pages. + assertThat(result.remaining()).isEqualTo(100); + + // Execute query with profile, should use profile page size + future = + session.executeAsync( + SimpleStatement.builder(query).setExecutionProfileName("smallpages").build()); + result = CompletableFutures.getUninterruptibly(future); + assertThat(result.remaining()).isEqualTo(10); + // next fetch should also be 10 pages. + result = CompletableFutures.getUninterruptibly(result.fetchNextPage()); + assertThat(result.remaining()).isEqualTo(10); + + SessionUtils.dropKeyspace(session, keyspace, slowProfile); + } + } +} diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/config/DriverExecutionProfileReloadIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/config/DriverExecutionProfileReloadIT.java index 0a42e6df7da..c2f0971cedd 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/config/DriverExecutionProfileReloadIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/config/DriverExecutionProfileReloadIT.java @@ -34,16 +34,26 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; +import org.junit.Before; +import org.junit.ClassRule; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; public class DriverExecutionProfileReloadIT { - @Rule public SimulacronRule simulacron = new SimulacronRule(ClusterSpec.builder().withNodes(3)); + @ClassRule + public static final SimulacronRule SIMULACRON_RULE = + new SimulacronRule(ClusterSpec.builder().withNodes(3)); @Rule public ExpectedException thrown = ExpectedException.none(); + @Before + public void clearPrimes() { + SIMULACRON_RULE.cluster().clearLogs(); + SIMULACRON_RULE.cluster().clearPrimes(true); + } + @Test public void should_periodically_reload_configuration() throws Exception { String query = "mockquery"; @@ -61,9 +71,9 @@ public void should_periodically_reload_configuration() throws Exception { (CqlSession) SessionUtils.baseBuilder() .withConfigLoader(loader) - .addContactEndPoints(simulacron.getContactPoints()) + .addContactEndPoints(SIMULACRON_RULE.getContactPoints()) .build()) { - simulacron.cluster().prime(when(query).then(noRows()).delay(4, TimeUnit.SECONDS)); + SIMULACRON_RULE.cluster().prime(when(query).then(noRows()).delay(4, TimeUnit.SECONDS)); // Expect timeout since default session timeout is 2s try { @@ -99,9 +109,9 @@ public void should_reload_configuration_when_event_fired() throws Exception { (CqlSession) SessionUtils.baseBuilder() .withConfigLoader(loader) - .addContactEndPoints(simulacron.getContactPoints()) + .addContactEndPoints(SIMULACRON_RULE.getContactPoints()) .build()) { - simulacron.cluster().prime(when(query).then(noRows()).delay(4, TimeUnit.SECONDS)); + SIMULACRON_RULE.cluster().prime(when(query).then(noRows()).delay(4, TimeUnit.SECONDS)); // Expect timeout since default session timeout is 2s try { @@ -136,9 +146,9 @@ public void should_not_allow_dynamically_adding_profile() throws Exception { (CqlSession) SessionUtils.baseBuilder() .withConfigLoader(loader) - .addContactEndPoints(simulacron.getContactPoints()) + .addContactEndPoints(SIMULACRON_RULE.getContactPoints()) .build()) { - simulacron.cluster().prime(when(query).then(noRows()).delay(4, TimeUnit.SECONDS)); + SIMULACRON_RULE.cluster().prime(when(query).then(noRows()).delay(4, TimeUnit.SECONDS)); // Expect failure because profile doesn't exist. try { @@ -178,9 +188,9 @@ public void should_reload_profile_config_when_reloading_config() throws Exceptio (CqlSession) SessionUtils.baseBuilder() .withConfigLoader(loader) - .addContactEndPoints(simulacron.getContactPoints()) + .addContactEndPoints(SIMULACRON_RULE.getContactPoints()) .build()) { - simulacron.cluster().prime(when(query).then(noRows()).delay(4, TimeUnit.SECONDS)); + SIMULACRON_RULE.cluster().prime(when(query).then(noRows()).delay(4, TimeUnit.SECONDS)); // Expect failure because profile doesn't exist. try { diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/config/DriverExecutionProfileIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/config/DriverExecutionProfileSimulacronIT.java similarity index 59% rename from integration-tests/src/test/java/com/datastax/oss/driver/core/config/DriverExecutionProfileIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/config/DriverExecutionProfileSimulacronIT.java index 9ccf6557022..763e816b746 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/config/DriverExecutionProfileIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/config/DriverExecutionProfileSimulacronIT.java @@ -22,49 +22,45 @@ import static org.junit.Assert.fail; import com.datastax.oss.driver.api.core.AllNodesFailedException; -import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.DriverTimeoutException; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; -import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; -import com.datastax.oss.driver.api.core.cql.AsyncResultSet; -import com.datastax.oss.driver.api.core.cql.BatchStatement; -import com.datastax.oss.driver.api.core.cql.BatchStatementBuilder; -import com.datastax.oss.driver.api.core.cql.DefaultBatchType; -import com.datastax.oss.driver.api.core.cql.PreparedStatement; import com.datastax.oss.driver.api.core.cql.SimpleStatement; import com.datastax.oss.driver.api.core.servererrors.ServerError; -import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.api.testinfra.simulacron.SimulacronRule; import com.datastax.oss.driver.categories.ParallelizableTests; -import com.datastax.oss.driver.internal.core.util.concurrent.CompletableFutures; import com.datastax.oss.simulacron.common.cluster.ClusterSpec; import com.datastax.oss.simulacron.common.cluster.QueryLog; import java.time.Duration; import java.util.Optional; -import java.util.concurrent.CompletionStage; import java.util.concurrent.TimeUnit; +import org.junit.Before; +import org.junit.ClassRule; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.ExpectedException; @Category(ParallelizableTests.class) -public class DriverExecutionProfileIT { +public class DriverExecutionProfileSimulacronIT { - @Rule public SimulacronRule simulacron = new SimulacronRule(ClusterSpec.builder().withNodes(3)); - - @Rule public CcmRule ccm = CcmRule.getInstance(); + @ClassRule + public static final SimulacronRule SIMULACRON_RULE = + new SimulacronRule(ClusterSpec.builder().withNodes(3)); @Rule public ExpectedException thrown = ExpectedException.none(); - // TODO: Test with reprepare on all nodes profile configuration + @Before + public void clearPrimes() { + SIMULACRON_RULE.cluster().clearLogs(); + SIMULACRON_RULE.cluster().clearPrimes(true); + } @Test public void should_fail_if_config_profile_specified_doesnt_exist() { - try (CqlSession session = SessionUtils.newSession(simulacron)) { + try (CqlSession session = SessionUtils.newSession(SIMULACRON_RULE)) { SimpleStatement statement = SimpleStatement.builder("select * from system.local") .setExecutionProfileName("IDONTEXIST") @@ -84,10 +80,10 @@ public void should_use_profile_request_timeout() { .startProfile("olap") .withDuration(DefaultDriverOption.REQUEST_TIMEOUT, Duration.ofSeconds(10)) .build(); - try (CqlSession session = SessionUtils.newSession(simulacron, loader)) { + try (CqlSession session = SessionUtils.newSession(SIMULACRON_RULE, loader)) { String query = "mockquery"; // configure query with delay of 4 seconds. - simulacron.cluster().prime(when(query).then(noRows()).delay(4, TimeUnit.SECONDS)); + SIMULACRON_RULE.cluster().prime(when(query).then(noRows()).delay(4, TimeUnit.SECONDS)); // Execute query without profile, should timeout with default session timeout (2s). try { @@ -109,10 +105,10 @@ public void should_use_profile_default_idempotence() { .startProfile("idem") .withBoolean(DefaultDriverOption.REQUEST_DEFAULT_IDEMPOTENCE, true) .build(); - try (CqlSession session = SessionUtils.newSession(simulacron, loader)) { + try (CqlSession session = SessionUtils.newSession(SIMULACRON_RULE, loader)) { String query = "mockquery"; // configure query with server error which should invoke onRequestError in retry policy. - simulacron.cluster().prime(when(query).then(serverError("fail"))); + SIMULACRON_RULE.cluster().prime(when(query).then(serverError("fail"))); // Execute query without profile, should fail because couldn't be retried. try { @@ -136,14 +132,14 @@ public void should_use_profile_consistency() { .withString(DefaultDriverOption.REQUEST_CONSISTENCY, "LOCAL_QUORUM") .withString(DefaultDriverOption.REQUEST_SERIAL_CONSISTENCY, "LOCAL_SERIAL") .build(); - try (CqlSession session = SessionUtils.newSession(simulacron, loader)) { + try (CqlSession session = SessionUtils.newSession(SIMULACRON_RULE, loader)) { String query = "mockquery"; // Execute query without profile, should use default CLs (LOCAL_ONE, SERIAL). session.execute(query); Optional log = - simulacron.cluster().getLogs().getQueryLogs().stream() + SIMULACRON_RULE.cluster().getLogs().getQueryLogs().stream() .filter(q -> q.getQuery().equals(query)) .findFirst(); @@ -155,13 +151,13 @@ public void should_use_profile_consistency() { assertThat(l.getSerialConsistency().toString()).isEqualTo("SERIAL"); }); - simulacron.cluster().clearLogs(); + SIMULACRON_RULE.cluster().clearLogs(); // Execute query with profile, should use profile CLs session.execute(SimpleStatement.builder(query).setExecutionProfileName("cl").build()); log = - simulacron.cluster().getLogs().getQueryLogs().stream() + SIMULACRON_RULE.cluster().getLogs().getQueryLogs().stream() .filter(q -> q.getQuery().equals(query)) .findFirst(); @@ -174,57 +170,4 @@ public void should_use_profile_consistency() { }); } } - - @Test - public void should_use_profile_page_size() { - DriverConfigLoader loader = - SessionUtils.configLoaderBuilder() - .withInt(DefaultDriverOption.REQUEST_PAGE_SIZE, 100) - .startProfile("smallpages") - .withInt(DefaultDriverOption.REQUEST_PAGE_SIZE, 10) - .build(); - try (CqlSession session = SessionUtils.newSession(ccm, loader)) { - - CqlIdentifier keyspace = SessionUtils.uniqueKeyspaceId(); - DriverExecutionProfile slowProfile = SessionUtils.slowProfile(session); - SessionUtils.createKeyspace(session, keyspace, slowProfile); - - session.execute(String.format("USE %s", keyspace.asCql(false))); - - // load 500 rows (value beyond page size). - session.execute( - SimpleStatement.builder( - "CREATE TABLE IF NOT EXISTS test (k int, v int, PRIMARY KEY (k,v))") - .setExecutionProfile(slowProfile) - .build()); - PreparedStatement prepared = session.prepare("INSERT INTO test (k, v) values (0, ?)"); - BatchStatementBuilder bs = - BatchStatement.builder(DefaultBatchType.UNLOGGED).setExecutionProfile(slowProfile); - for (int i = 0; i < 500; i++) { - bs.addStatement(prepared.bind(i)); - } - session.execute(bs.build()); - - String query = "SELECT * FROM test where k=0"; - // Execute query without profile, should use global page size (100) - CompletionStage future = session.executeAsync(query); - AsyncResultSet result = CompletableFutures.getUninterruptibly(future); - assertThat(result.remaining()).isEqualTo(100); - result = CompletableFutures.getUninterruptibly(result.fetchNextPage()); - // next fetch should also be 100 pages. - assertThat(result.remaining()).isEqualTo(100); - - // Execute query with profile, should use profile page size - future = - session.executeAsync( - SimpleStatement.builder(query).setExecutionProfileName("smallpages").build()); - result = CompletableFutures.getUninterruptibly(future); - assertThat(result.remaining()).isEqualTo(10); - // next fetch should also be 10 pages. - result = CompletableFutures.getUninterruptibly(result.fetchNextPage()); - assertThat(result.remaining()).isEqualTo(10); - - SessionUtils.dropKeyspace(session, keyspace, slowProfile); - } - } } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/connection/ChannelSocketOptionsIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/connection/ChannelSocketOptionsIT.java index e9dd65b9b18..5384a5b2ff3 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/connection/ChannelSocketOptionsIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/connection/ChannelSocketOptionsIT.java @@ -49,7 +49,8 @@ @Category(ParallelizableTests.class) public class ChannelSocketOptionsIT { - private static SimulacronRule simulacron = new SimulacronRule(ClusterSpec.builder().withNodes(1)); + private static final SimulacronRule SIMULACRON_RULE = + new SimulacronRule(ClusterSpec.builder().withNodes(1)); private static DriverConfigLoader loader = SessionUtils.configLoaderBuilder() @@ -61,14 +62,15 @@ public class ChannelSocketOptionsIT { .withInt(DefaultDriverOption.SOCKET_SEND_BUFFER_SIZE, 123456) .build(); - private static SessionRule sessionRule = - SessionRule.builder(simulacron).withConfigLoader(loader).build(); + private static final SessionRule SESSION_RULE = + SessionRule.builder(SIMULACRON_RULE).withConfigLoader(loader).build(); - @ClassRule public static TestRule chain = RuleChain.outerRule(simulacron).around(sessionRule); + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(SIMULACRON_RULE).around(SESSION_RULE); @Test public void should_report_socket_options() { - Session session = sessionRule.session(); + Session session = SESSION_RULE.session(); DriverExecutionProfile config = session.getContext().getConfig().getDefaultProfile(); assertThat(config.getBoolean(SOCKET_TCP_NODELAY)).isTrue(); assertThat(config.getBoolean(SOCKET_KEEP_ALIVE)).isFalse(); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/connection/FrameLengthIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/connection/FrameLengthIT.java index ff3eaf8379c..f8c5fb44d6f 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/connection/FrameLengthIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/connection/FrameLengthIT.java @@ -43,7 +43,7 @@ import java.nio.ByteBuffer; import java.util.concurrent.CompletionStage; import java.util.concurrent.TimeUnit; -import org.junit.Before; +import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Test; import org.junit.experimental.categories.Category; @@ -52,7 +52,8 @@ @Category(ParallelizableTests.class) public class FrameLengthIT { - private static SimulacronRule simulacron = new SimulacronRule(ClusterSpec.builder().withNodes(1)); + private static final SimulacronRule SIMULACRON_RULE = + new SimulacronRule(ClusterSpec.builder().withNodes(1)); private static DriverConfigLoader loader = SessionUtils.configLoaderBuilder() @@ -62,10 +63,11 @@ public class FrameLengthIT { .withBytes(DefaultDriverOption.PROTOCOL_MAX_FRAME_LENGTH, 100 * 1024) .build(); - private static SessionRule sessionRule = - SessionRule.builder(simulacron).withConfigLoader(loader).build(); + private static final SessionRule SESSION_RULE = + SessionRule.builder(SIMULACRON_RULE).withConfigLoader(loader).build(); - @ClassRule public static TestRule chain = RuleChain.outerRule(simulacron).around(sessionRule); + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(SIMULACRON_RULE).around(SESSION_RULE); private static final SimpleStatement LARGE_QUERY = SimpleStatement.newInstance("select * from foo").setIdempotent(true); @@ -74,21 +76,21 @@ public class FrameLengthIT { private static final Buffer ONE_HUNDRED_KB = ByteBuffer.allocate(100 * 1024).limit(100 * 1024); - @Before - public void primeQueries() { - simulacron + @BeforeClass + public static void primeQueries() { + SIMULACRON_RULE .cluster() .prime( when(LARGE_QUERY.getQuery()) .then(rows().row("result", ONE_HUNDRED_KB).columnTypes("result", "blob").build())); - simulacron + SIMULACRON_RULE .cluster() .prime(when(SLOW_QUERY.getQuery()).then(noRows()).delay(60, TimeUnit.SECONDS)); } @Test(expected = FrameTooLongException.class) public void should_fail_if_request_exceeds_max_frame_length() { - sessionRule + SESSION_RULE .session() .execute(SimpleStatement.newInstance("insert into foo (k) values (?)", ONE_HUNDRED_KB)); } @@ -96,9 +98,9 @@ public void should_fail_if_request_exceeds_max_frame_length() { @Test public void should_fail_if_response_exceeds_max_frame_length() { CompletionStage slowResultFuture = - sessionRule.session().executeAsync(SLOW_QUERY); + SESSION_RULE.session().executeAsync(SLOW_QUERY); try { - sessionRule.session().execute(LARGE_QUERY); + SESSION_RULE.session().execute(LARGE_QUERY); fail("Expected a " + FrameTooLongException.class.getSimpleName()); } catch (FrameTooLongException e) { // expected diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/context/LifecycleListenerIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/context/LifecycleListenerIT.java index 2434c1cf8a6..bf2be2b82cc 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/context/LifecycleListenerIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/context/LifecycleListenerIT.java @@ -44,7 +44,7 @@ public class LifecycleListenerIT { @ClassRule - public static SimulacronRule simulacronRule = + public static final SimulacronRule SIMULACRON_RULE = new SimulacronRule(ClusterSpec.builder().withNodes(1)); @Test @@ -67,12 +67,12 @@ public void should_not_notify_listener_when_init_fails() { assertThat(listener.ready).isFalse(); assertThat(listener.closed).isFalse(); - simulacronRule.cluster().rejectConnections(0, RejectScope.STOP); + SIMULACRON_RULE.cluster().rejectConnections(0, RejectScope.STOP); try (CqlSession session = newSession(listener)) { fail("Expected AllNodesFailedException"); } catch (AllNodesFailedException ignored) { } finally { - simulacronRule.cluster().acceptConnections(); + SIMULACRON_RULE.cluster().acceptConnections(); } assertThat(listener.ready).isFalse(); ConditionChecker.checkThat(() -> listener.closed).before(1, SECONDS).becomesTrue(); @@ -81,7 +81,7 @@ public void should_not_notify_listener_when_init_fails() { private CqlSession newSession(TestLifecycleListener listener) { TestContext context = new TestContext(new DefaultDriverConfigLoader(), listener); return CompletableFutures.getUninterruptibly( - DefaultSession.init(context, simulacronRule.getContactPoints(), null)); + DefaultSession.init(context, SIMULACRON_RULE.getContactPoints(), null)); } public static class TestLifecycleListener implements LifecycleListener { diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/AsyncResultSetIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/AsyncResultSetIT.java index 87526e529c7..934bd6be6b1 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/AsyncResultSetIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/AsyncResultSetIT.java @@ -49,31 +49,32 @@ public class AsyncResultSetIT { private static final String PARTITION_KEY1 = "part"; private static final String PARTITION_KEY2 = "part2"; - private static CcmRule ccm = CcmRule.getInstance(); + private static final CcmRule CCM_RULE = CcmRule.getInstance(); - private static SessionRule sessionRule = - SessionRule.builder(ccm) + private static final SessionRule SESSION_RULE = + SessionRule.builder(CCM_RULE) .withConfigLoader( SessionUtils.configLoaderBuilder() .withInt(DefaultDriverOption.REQUEST_PAGE_SIZE, PAGE_SIZE) .build()) .build(); - @ClassRule public static TestRule chain = RuleChain.outerRule(ccm).around(sessionRule); + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); @BeforeClass public static void setupSchema() { // create table and load data across two partitions so we can test paging across tokens. - sessionRule + SESSION_RULE .session() .execute( SimpleStatement.builder( "CREATE TABLE IF NOT EXISTS test (k0 text, k1 int, v int, PRIMARY KEY(k0, k1))") - .setExecutionProfile(sessionRule.slowProfile()) + .setExecutionProfile(SESSION_RULE.slowProfile()) .build()); PreparedStatement prepared = - sessionRule.session().prepare("INSERT INTO test (k0, k1, v) VALUES (?, ?, ?)"); + SESSION_RULE.session().prepare("INSERT INTO test (k0, k1, v) VALUES (?, ?, ?)"); BatchStatementBuilder batchPart1 = BatchStatement.builder(DefaultBatchType.UNLOGGED); BatchStatementBuilder batchPart2 = BatchStatement.builder(DefaultBatchType.UNLOGGED); @@ -83,12 +84,12 @@ public static void setupSchema() { prepared.bind(PARTITION_KEY2, i + ROWS_PER_PARTITION, i + ROWS_PER_PARTITION)); } - sessionRule + SESSION_RULE .session() - .execute(batchPart1.setExecutionProfile(sessionRule.slowProfile()).build()); - sessionRule + .execute(batchPart1.setExecutionProfile(SESSION_RULE.slowProfile()).build()); + SESSION_RULE .session() - .execute(batchPart2.setExecutionProfile(sessionRule.slowProfile()).build()); + .execute(batchPart2.setExecutionProfile(SESSION_RULE.slowProfile()).build()); } @Test @@ -96,7 +97,7 @@ public void should_only_iterate_over_rows_in_current_page() throws Exception { // very basic test that just ensures that iterating over an AsyncResultSet only visits the first // page. CompletionStage result = - sessionRule + SESSION_RULE .session() .executeAsync( SimpleStatement.builder("SELECT * FROM test where k0 = ?") @@ -122,7 +123,7 @@ public void should_only_iterate_over_rows_in_current_page() throws Exception { public void should_iterate_over_all_pages_asynchronously_single_partition() throws Exception { // Validates async paging behavior over single partition. CompletionStage result = - sessionRule + SESSION_RULE .session() .executeAsync( SimpleStatement.builder("SELECT * FROM test where k0 = ?") @@ -140,7 +141,7 @@ public void should_iterate_over_all_pages_asynchronously_single_partition() thro public void should_iterate_over_all_pages_asynchronously_cross_partition() throws Exception { // Validates async paging behavior over a range query. CompletionStage result = - sessionRule + SESSION_RULE .session() .executeAsync("SELECT * FROM test") .thenCompose(new AsyncResultSetConsumingFunction()); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BatchStatementIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BatchStatementIT.java index 9246ce92780..358ed2b0f02 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BatchStatementIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BatchStatementIT.java @@ -48,11 +48,11 @@ @Category(ParallelizableTests.class) public class BatchStatementIT { - private CcmRule ccm = CcmRule.getInstance(); + private CcmRule ccmRule = CcmRule.getInstance(); - private SessionRule sessionRule = SessionRule.builder(ccm).build(); + private SessionRule sessionRule = SessionRule.builder(ccmRule).build(); - @Rule public TestRule chain = RuleChain.outerRule(ccm).around(sessionRule); + @Rule public TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); @Rule public TestName name = new TestName(); @@ -345,7 +345,7 @@ public void should_not_allow_unset_value_when_protocol_less_than_v4() { SessionUtils.configLoaderBuilder() .withString(DefaultDriverOption.PROTOCOL_VERSION, "V3") .build(); - try (CqlSession v3Session = SessionUtils.newSession(ccm, sessionRule.keyspace(), loader)) { + try (CqlSession v3Session = SessionUtils.newSession(ccmRule, sessionRule.keyspace(), loader)) { PreparedStatement prepared = v3Session.prepare("INSERT INTO test (k0, k1, v) values (?, ?, ?)"); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BoundStatementIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BoundStatementCcmIT.java similarity index 68% rename from integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BoundStatementIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BoundStatementCcmIT.java index 10ac76cd36d..1f04f6212d3 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BoundStatementIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BoundStatementCcmIT.java @@ -15,9 +15,6 @@ */ package com.datastax.oss.driver.core.cql; -import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.noRows; -import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.query; -import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.when; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assumptions.assumeThat; @@ -25,7 +22,6 @@ import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.DefaultConsistencyLevel; -import com.datastax.oss.driver.api.core.DriverTimeoutException; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; @@ -42,7 +38,6 @@ import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; -import com.datastax.oss.driver.api.testinfra.simulacron.SimulacronRule; import com.datastax.oss.driver.categories.ParallelizableTests; import com.datastax.oss.driver.core.type.codec.CqlIntToStringCodec; import com.datastax.oss.driver.internal.core.DefaultProtocolFeature; @@ -51,20 +46,13 @@ import com.datastax.oss.driver.internal.core.util.RoutingKey; import com.datastax.oss.driver.internal.core.util.concurrent.CompletableFutures; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; -import com.datastax.oss.protocol.internal.Message; -import com.datastax.oss.protocol.internal.request.Execute; import com.datastax.oss.protocol.internal.util.Bytes; import com.datastax.oss.protocol.internal.util.collection.NullAllowingImmutableMap; -import com.datastax.oss.simulacron.common.cluster.ClusterSpec; -import com.datastax.oss.simulacron.common.cluster.QueryLog; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; import java.nio.ByteBuffer; import java.time.Duration; import java.util.List; import java.util.Map; import java.util.concurrent.CompletionStage; -import java.util.concurrent.TimeUnit; import java.util.function.Function; import org.junit.Before; import org.junit.Rule; @@ -76,23 +64,21 @@ import org.junit.rules.TestRule; @Category(ParallelizableTests.class) -public class BoundStatementIT { +public class BoundStatementCcmIT { - @Rule public SimulacronRule simulacron = new SimulacronRule(ClusterSpec.builder().withNodes(1)); + private CcmRule ccmRule = CcmRule.getInstance(); - private CcmRule ccm = CcmRule.getInstance(); - - private final boolean atLeastV4 = ccm.getHighestProtocolVersion().getCode() >= 4; + private final boolean atLeastV4 = ccmRule.getHighestProtocolVersion().getCode() >= 4; private SessionRule sessionRule = - SessionRule.builder(ccm) + SessionRule.builder(ccmRule) .withConfigLoader( SessionUtils.configLoaderBuilder() .withInt(DefaultDriverOption.REQUEST_PAGE_SIZE, 20) .build()) .build(); - @Rule public TestRule chain = RuleChain.outerRule(ccm).around(sessionRule); + @Rule public TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); @Rule public TestName name = new TestName(); @@ -141,19 +127,13 @@ public void setupSchema() { .build()); } - @Before - public void clearPrimes() { - simulacron.cluster().clearLogs(); - simulacron.cluster().clearPrimes(true); - } - @Test(expected = IllegalStateException.class) public void should_not_allow_unset_value_when_protocol_less_than_v4() { DriverConfigLoader loader = SessionUtils.configLoaderBuilder() .withString(DefaultDriverOption.PROTOCOL_VERSION, "V3") .build(); - try (CqlSession v3Session = SessionUtils.newSession(ccm, sessionRule.keyspace(), loader)) { + try (CqlSession v3Session = SessionUtils.newSession(ccmRule, sessionRule.keyspace(), loader)) { PreparedStatement prepared = v3Session.prepare("INSERT INTO test2 (k, v0) values (?, ?)"); BoundStatement boundStatement = @@ -166,7 +146,7 @@ public void should_not_allow_unset_value_when_protocol_less_than_v4() { @Test public void should_not_write_tombstone_if_value_is_implicitly_unset() { assumeThat(atLeastV4).as("unset values require protocol V4+").isTrue(); - try (CqlSession session = SessionUtils.newSession(ccm, sessionRule.keyspace())) { + try (CqlSession session = SessionUtils.newSession(ccmRule, sessionRule.keyspace())) { PreparedStatement prepared = session.prepare("INSERT INTO test2 (k, v0) values (?, ?)"); session.execute(prepared.bind(name.getMethodName(), VALUE)); @@ -181,7 +161,7 @@ public void should_not_write_tombstone_if_value_is_implicitly_unset() { @Test public void should_write_tombstone_if_value_is_explicitly_unset() { assumeThat(atLeastV4).as("unset values require protocol V4+").isTrue(); - try (CqlSession session = SessionUtils.newSession(ccm, sessionRule.keyspace())) { + try (CqlSession session = SessionUtils.newSession(ccmRule, sessionRule.keyspace())) { PreparedStatement prepared = session.prepare("INSERT INTO test2 (k, v0) values (?, ?)"); session.execute(prepared.bind(name.getMethodName(), VALUE)); @@ -200,7 +180,7 @@ public void should_write_tombstone_if_value_is_explicitly_unset() { @Test public void should_write_tombstone_if_value_is_explicitly_unset_on_builder() { assumeThat(atLeastV4).as("unset values require protocol V4+").isTrue(); - try (CqlSession session = SessionUtils.newSession(ccm, sessionRule.keyspace())) { + try (CqlSession session = SessionUtils.newSession(ccmRule, sessionRule.keyspace())) { PreparedStatement prepared = session.prepare("INSERT INTO test2 (k, v0) values (?, ?)"); session.execute(prepared.bind(name.getMethodName(), VALUE)); @@ -219,7 +199,7 @@ public void should_write_tombstone_if_value_is_explicitly_unset_on_builder() { @Test public void should_have_empty_result_definitions_for_update_query() { - try (CqlSession session = SessionUtils.newSession(ccm, sessionRule.keyspace())) { + try (CqlSession session = SessionUtils.newSession(ccmRule, sessionRule.keyspace())) { PreparedStatement prepared = session.prepare("INSERT INTO test2 (k, v0) values (?, ?)"); assertThat(prepared.getResultSetDefinitions()).hasSize(0); @@ -231,7 +211,7 @@ public void should_have_empty_result_definitions_for_update_query() { @Test public void should_bind_null_value_when_setting_values_in_bulk() { - try (CqlSession session = SessionUtils.newSession(ccm, sessionRule.keyspace())) { + try (CqlSession session = SessionUtils.newSession(ccmRule, sessionRule.keyspace())) { PreparedStatement prepared = session.prepare("INSERT INTO test2 (k, v0) values (?, ?)"); BoundStatement boundStatement = prepared.bind(name.getMethodName(), null); assertThat(boundStatement.get(1, TypeCodecs.INT)).isNull(); @@ -261,7 +241,7 @@ public void should_allow_custom_codecs_when_setting_values_in_bulk() { @Test public void should_use_page_size_from_simple_statement() { - try (CqlSession session = SessionUtils.newSession(ccm, sessionRule.keyspace())) { + try (CqlSession session = SessionUtils.newSession(ccmRule, sessionRule.keyspace())) { SimpleStatement st = SimpleStatement.builder("SELECT v FROM test").setPageSize(10).build(); PreparedStatement prepared = session.prepare(st); CompletionStage future = session.executeAsync(prepared.bind()); @@ -274,7 +254,7 @@ public void should_use_page_size_from_simple_statement() { @Test public void should_use_page_size() { - try (CqlSession session = SessionUtils.newSession(ccm, sessionRule.keyspace())) { + try (CqlSession session = SessionUtils.newSession(ccmRule, sessionRule.keyspace())) { // set page size on simple statement, but will be unused since // overridden by bound statement. SimpleStatement st = SimpleStatement.builder("SELECT v FROM test").setPageSize(10).build(); @@ -288,132 +268,6 @@ public void should_use_page_size() { } } - @Test - public void should_use_consistencies_from_simple_statement() { - try (CqlSession session = SessionUtils.newSession(simulacron)) { - SimpleStatement st = - SimpleStatement.builder("SELECT * FROM test where k = ?") - .setConsistencyLevel(DefaultConsistencyLevel.TWO) - .setSerialConsistencyLevel(DefaultConsistencyLevel.LOCAL_SERIAL) - .build(); - PreparedStatement prepared = session.prepare(st); - simulacron.cluster().clearLogs(); - // since query is unprimed, we use a text value for bind parameter as this is - // what simulacron expects for unprimed statements. - session.execute(prepared.bind("0")); - - List logs = simulacron.cluster().getLogs().getQueryLogs(); - assertThat(logs).hasSize(1); - - QueryLog log = logs.get(0); - - Message message = log.getFrame().message; - assertThat(message).isInstanceOf(Execute.class); - Execute execute = (Execute) message; - assertThat(execute.options.consistency) - .isEqualTo(DefaultConsistencyLevel.TWO.getProtocolCode()); - assertThat(execute.options.serialConsistency) - .isEqualTo(DefaultConsistencyLevel.LOCAL_SERIAL.getProtocolCode()); - } - } - - @Test - public void should_use_consistencies() { - try (CqlSession session = SessionUtils.newSession(simulacron)) { - // set consistencies on simple statement, but they will be unused since - // overridden by bound statement. - SimpleStatement st = - SimpleStatement.builder("SELECT * FROM test where k = ?") - .setConsistencyLevel(DefaultConsistencyLevel.TWO) - .setSerialConsistencyLevel(DefaultConsistencyLevel.LOCAL_SERIAL) - .build(); - PreparedStatement prepared = session.prepare(st); - simulacron.cluster().clearLogs(); - // since query is unprimed, we use a text value for bind parameter as this is - // what simulacron expects for unprimed statements. - session.execute( - prepared - .boundStatementBuilder("0") - .setConsistencyLevel(DefaultConsistencyLevel.THREE) - .setSerialConsistencyLevel(DefaultConsistencyLevel.SERIAL) - .build()); - - List logs = simulacron.cluster().getLogs().getQueryLogs(); - assertThat(logs).hasSize(1); - - QueryLog log = logs.get(0); - - Message message = log.getFrame().message; - assertThat(message).isInstanceOf(Execute.class); - Execute execute = (Execute) message; - assertThat(execute.options.consistency) - .isEqualTo(DefaultConsistencyLevel.THREE.getProtocolCode()); - assertThat(execute.options.serialConsistency) - .isEqualTo(DefaultConsistencyLevel.SERIAL.getProtocolCode()); - } - } - - @Test - public void should_use_timeout_from_simple_statement() { - try (CqlSession session = SessionUtils.newSession(simulacron)) { - Map params = ImmutableMap.of("k", 0); - Map paramTypes = ImmutableMap.of("k", "int"); - simulacron - .cluster() - .prime( - when(query( - "mock query", - Lists.newArrayList( - com.datastax.oss.simulacron.common.codec.ConsistencyLevel.ONE), - params, - paramTypes)) - .then(noRows()) - .delay(1500, TimeUnit.MILLISECONDS)); - SimpleStatement st = - SimpleStatement.builder("mock query") - .setTimeout(Duration.ofSeconds(1)) - .setConsistencyLevel(DefaultConsistencyLevel.ONE) - .build(); - PreparedStatement prepared = session.prepare(st); - - thrown.expect(DriverTimeoutException.class); - thrown.expectMessage("Query timed out after PT1S"); - - session.execute(prepared.bind(0)); - } - } - - @Test - public void should_use_timeout() { - try (CqlSession session = SessionUtils.newSession(simulacron)) { - Map params = ImmutableMap.of("k", 0); - Map paramTypes = ImmutableMap.of("k", "int"); - // set timeout on simple statement, but will be unused since overridden by bound statement. - simulacron - .cluster() - .prime( - when(query( - "mock query", - Lists.newArrayList( - com.datastax.oss.simulacron.common.codec.ConsistencyLevel.ONE), - params, - paramTypes)) - .then(noRows()) - .delay(1500, TimeUnit.MILLISECONDS)); - SimpleStatement st = - SimpleStatement.builder("mock query") - .setTimeout(Duration.ofSeconds(1)) - .setConsistencyLevel(DefaultConsistencyLevel.ONE) - .build(); - PreparedStatement prepared = session.prepare(st); - - thrown.expect(DriverTimeoutException.class); - thrown.expectMessage("Query timed out after PT0.15S"); - - session.execute(prepared.bind(0).setTimeout(Duration.ofMillis(150))); - } - } - @Test public void should_propagate_attributes_when_preparing_a_simple_statement() { CqlSession session = sessionRule.session(); @@ -498,7 +352,7 @@ public void should_propagate_attributes_when_preparing_a_simple_statement() { @Test @CassandraRequirement(min = "2.2") public void should_compute_routing_key_when_indices_randomly_distributed() { - try (CqlSession session = SessionUtils.newSession(ccm, sessionRule.keyspace())) { + try (CqlSession session = SessionUtils.newSession(ccmRule, sessionRule.keyspace())) { PreparedStatement ps = session.prepare("INSERT INTO test3 (v, pk2, pk1) VALUES (?,?,?)"); @@ -533,7 +387,7 @@ private static void verifyUnset( private CqlSession sessionWithCustomCodec(CqlIntToStringCodec codec) { return (CqlSession) SessionUtils.baseBuilder() - .addContactEndPoints(ccm.getContactPoints()) + .addContactEndPoints(ccmRule.getContactPoints()) .withKeyspace(sessionRule.keyspace()) .addTypeCodecs(codec) .build(); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BoundStatementSimulacronIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BoundStatementSimulacronIT.java new file mode 100644 index 00000000000..54b3333dbdf --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BoundStatementSimulacronIT.java @@ -0,0 +1,188 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.core.cql; + +import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.noRows; +import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.query; +import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.when; +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.DefaultConsistencyLevel; +import com.datastax.oss.driver.api.core.DriverTimeoutException; +import com.datastax.oss.driver.api.core.cql.PreparedStatement; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.testinfra.session.SessionUtils; +import com.datastax.oss.driver.api.testinfra.simulacron.SimulacronRule; +import com.datastax.oss.driver.categories.ParallelizableTests; +import com.datastax.oss.protocol.internal.Message; +import com.datastax.oss.protocol.internal.request.Execute; +import com.datastax.oss.simulacron.common.cluster.ClusterSpec; +import com.datastax.oss.simulacron.common.cluster.QueryLog; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Lists; +import java.time.Duration; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Rule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.rules.ExpectedException; + +@Category(ParallelizableTests.class) +public class BoundStatementSimulacronIT { + + @ClassRule + public static final SimulacronRule SIMULACRON_RULE = + new SimulacronRule(ClusterSpec.builder().withNodes(1)); + + @Rule public ExpectedException thrown = ExpectedException.none(); + + @Before + public void clearPrimes() { + SIMULACRON_RULE.cluster().clearLogs(); + SIMULACRON_RULE.cluster().clearPrimes(true); + } + + @Test + public void should_use_consistencies_from_simple_statement() { + try (CqlSession session = SessionUtils.newSession(SIMULACRON_RULE)) { + SimpleStatement st = + SimpleStatement.builder("SELECT * FROM test where k = ?") + .setConsistencyLevel(DefaultConsistencyLevel.TWO) + .setSerialConsistencyLevel(DefaultConsistencyLevel.LOCAL_SERIAL) + .build(); + PreparedStatement prepared = session.prepare(st); + SIMULACRON_RULE.cluster().clearLogs(); + // since query is unprimed, we use a text value for bind parameter as this is + // what simulacron expects for unprimed statements. + session.execute(prepared.bind("0")); + + List logs = SIMULACRON_RULE.cluster().getLogs().getQueryLogs(); + assertThat(logs).hasSize(1); + + QueryLog log = logs.get(0); + + Message message = log.getFrame().message; + assertThat(message).isInstanceOf(Execute.class); + Execute execute = (Execute) message; + assertThat(execute.options.consistency) + .isEqualTo(DefaultConsistencyLevel.TWO.getProtocolCode()); + assertThat(execute.options.serialConsistency) + .isEqualTo(DefaultConsistencyLevel.LOCAL_SERIAL.getProtocolCode()); + } + } + + @Test + public void should_use_consistencies() { + try (CqlSession session = SessionUtils.newSession(SIMULACRON_RULE)) { + // set consistencies on simple statement, but they will be unused since + // overridden by bound statement. + SimpleStatement st = + SimpleStatement.builder("SELECT * FROM test where k = ?") + .setConsistencyLevel(DefaultConsistencyLevel.TWO) + .setSerialConsistencyLevel(DefaultConsistencyLevel.LOCAL_SERIAL) + .build(); + PreparedStatement prepared = session.prepare(st); + SIMULACRON_RULE.cluster().clearLogs(); + // since query is unprimed, we use a text value for bind parameter as this is + // what simulacron expects for unprimed statements. + session.execute( + prepared + .boundStatementBuilder("0") + .setConsistencyLevel(DefaultConsistencyLevel.THREE) + .setSerialConsistencyLevel(DefaultConsistencyLevel.SERIAL) + .build()); + + List logs = SIMULACRON_RULE.cluster().getLogs().getQueryLogs(); + assertThat(logs).hasSize(1); + + QueryLog log = logs.get(0); + + Message message = log.getFrame().message; + assertThat(message).isInstanceOf(Execute.class); + Execute execute = (Execute) message; + assertThat(execute.options.consistency) + .isEqualTo(DefaultConsistencyLevel.THREE.getProtocolCode()); + assertThat(execute.options.serialConsistency) + .isEqualTo(DefaultConsistencyLevel.SERIAL.getProtocolCode()); + } + } + + @Test + public void should_use_timeout_from_simple_statement() { + try (CqlSession session = SessionUtils.newSession(SIMULACRON_RULE)) { + Map params = ImmutableMap.of("k", 0); + Map paramTypes = ImmutableMap.of("k", "int"); + SIMULACRON_RULE + .cluster() + .prime( + when(query( + "mock query", + Lists.newArrayList( + com.datastax.oss.simulacron.common.codec.ConsistencyLevel.ONE), + params, + paramTypes)) + .then(noRows()) + .delay(1500, TimeUnit.MILLISECONDS)); + SimpleStatement st = + SimpleStatement.builder("mock query") + .setTimeout(Duration.ofSeconds(1)) + .setConsistencyLevel(DefaultConsistencyLevel.ONE) + .build(); + PreparedStatement prepared = session.prepare(st); + + thrown.expect(DriverTimeoutException.class); + thrown.expectMessage("Query timed out after PT1S"); + + session.execute(prepared.bind(0)); + } + } + + @Test + public void should_use_timeout() { + try (CqlSession session = SessionUtils.newSession(SIMULACRON_RULE)) { + Map params = ImmutableMap.of("k", 0); + Map paramTypes = ImmutableMap.of("k", "int"); + // set timeout on simple statement, but will be unused since overridden by bound statement. + SIMULACRON_RULE + .cluster() + .prime( + when(query( + "mock query", + Lists.newArrayList( + com.datastax.oss.simulacron.common.codec.ConsistencyLevel.ONE), + params, + paramTypes)) + .then(noRows()) + .delay(1500, TimeUnit.MILLISECONDS)); + SimpleStatement st = + SimpleStatement.builder("mock query") + .setTimeout(Duration.ofSeconds(1)) + .setConsistencyLevel(DefaultConsistencyLevel.ONE) + .build(); + PreparedStatement prepared = session.prepare(st); + + thrown.expect(DriverTimeoutException.class); + thrown.expectMessage("Query timed out after PT0.15S"); + + session.execute(prepared.bind(0).setTimeout(Duration.ofMillis(150))); + } + } +} diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/ExecutionInfoWarningsIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/ExecutionInfoWarningsIT.java index 778fbb94813..01b7cef5a97 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/ExecutionInfoWarningsIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/ExecutionInfoWarningsIT.java @@ -57,13 +57,13 @@ public class ExecutionInfoWarningsIT { private static final String KEY = "test"; - private final CustomCcmRule ccm = + private CustomCcmRule ccmRule = new CustomCcmRule.Builder() // set the warn threshold to 5Kb (default is 64Kb in newer versions) .withCassandraConfiguration("batch_size_warn_threshold_in_kb", "5") .build(); - private final SessionRule sessionRule = - SessionRule.builder(ccm) + private SessionRule sessionRule = + SessionRule.builder(ccmRule) .withConfigLoader( SessionUtils.configLoaderBuilder() .withInt(DefaultDriverOption.REQUEST_PAGE_SIZE, 20) @@ -75,7 +75,7 @@ public class ExecutionInfoWarningsIT { .build()) .build(); - @Rule public final TestRule ccmRule = RuleChain.outerRule(ccm).around(sessionRule); + @Rule public TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); @Mock private Appender appender; @Captor private ArgumentCaptor loggingEventCaptor; @@ -83,7 +83,7 @@ public class ExecutionInfoWarningsIT { private Level originalLoggerLevel; @Before - public void setupLogger() { + public void createSchema() { // table with simple primary key, single cell. sessionRule .session() @@ -99,7 +99,10 @@ public void setupLogger() { .addPositionalValues(KEY, i) .build()); } - // setup the log appender + } + + @Before + public void setupLogger() { logger = (Logger) LoggerFactory.getLogger(CqlRequestHandler.class); originalLoggerLevel = logger.getLevel(); logger.setLevel(Level.WARN); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PagingIterableSpliteratorIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PagingIterableSpliteratorIT.java index 46c133331b7..0ea751ee41a 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PagingIterableSpliteratorIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PagingIterableSpliteratorIT.java @@ -50,37 +50,38 @@ @Category(ParallelizableTests.class) public class PagingIterableSpliteratorIT { - private static CcmRule ccm = CcmRule.getInstance(); + private static final CcmRule CCM_RULE = CcmRule.getInstance(); - private static SessionRule sessionRule = SessionRule.builder(ccm).build(); + private static final SessionRule SESSION_RULE = SessionRule.builder(CCM_RULE).build(); - @ClassRule public static TestRule chain = RuleChain.outerRule(ccm).around(sessionRule); + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); @BeforeClass public static void setupSchema() { - sessionRule + SESSION_RULE .session() .execute( SimpleStatement.builder( "CREATE TABLE IF NOT EXISTS test (k0 int, k1 int, v int, PRIMARY KEY(k0, k1))") - .setExecutionProfile(sessionRule.slowProfile()) + .setExecutionProfile(SESSION_RULE.slowProfile()) .build()); PreparedStatement prepared = - sessionRule.session().prepare("INSERT INTO test (k0, k1, v) VALUES (?, ?, ?)"); + SESSION_RULE.session().prepare("INSERT INTO test (k0, k1, v) VALUES (?, ?, ?)"); for (int i = 0; i < 20_000; i += 1_000) { BatchStatementBuilder batch = BatchStatement.builder(DefaultBatchType.UNLOGGED); for (int j = 0; j < 1_000; j++) { int n = i + j; batch.addStatement(prepared.bind(0, n, n)); } - sessionRule.session().execute(batch.setExecutionProfile(sessionRule.slowProfile()).build()); + SESSION_RULE.session().execute(batch.setExecutionProfile(SESSION_RULE.slowProfile()).build()); } } @Test @UseDataProvider("pageSizes") public void should_consume_spliterator(int pageSize, boolean parallel) throws Exception { - CqlSession session = sessionRule.session(); + CqlSession session = SESSION_RULE.session(); DriverExecutionProfile profile = session .getContext() diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PreparedStatementIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PreparedStatementIT.java index 8aa04a35b0a..77876567c3a 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PreparedStatementIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PreparedStatementIT.java @@ -64,17 +64,14 @@ @Category(ParallelizableTests.class) public class PreparedStatementIT { - private final CcmRule ccmRule = CcmRule.getInstance(); + private CcmRule ccmRule = CcmRule.getInstance(); - private final SessionRule sessionRule = + private SessionRule sessionRule = SessionRule.builder(ccmRule) .withConfigLoader( SessionUtils.configLoaderBuilder() .withInt(DefaultDriverOption.REQUEST_PAGE_SIZE, 2) .withDuration(DefaultDriverOption.REQUEST_TIMEOUT, Duration.ofSeconds(30)) - .withStringList( - DefaultDriverOption.METRICS_SESSION_ENABLED, - ImmutableList.of(DefaultSessionMetric.CQL_PREPARED_CACHE_SIZE.getPath())) .build()) .build(); @@ -86,6 +83,7 @@ public class PreparedStatementIT { public void setupSchema() { for (String query : ImmutableList.of( + "DROP TABLE IF EXISTS prepared_statement_test", "CREATE TABLE prepared_statement_test (a int PRIMARY KEY, b int, c int)", "INSERT INTO prepared_statement_test (a, b, c) VALUES (1, 1, 1)", "INSERT INTO prepared_statement_test (a, b, c) VALUES (2, 2, 2)", @@ -375,56 +373,59 @@ private void should_not_store_metadata_for_conditional_updates(CqlSession sessio @Test public void should_return_same_instance_when_repreparing_query() { - // Given - CqlSession session = sessionRule.session(); - assertThat(getPreparedCacheSize(session)).isEqualTo(0); - String query = "SELECT * FROM prepared_statement_test WHERE a = ?"; - - // When - PreparedStatement preparedStatement1 = session.prepare(query); - PreparedStatement preparedStatement2 = session.prepare(query); - - // Then - assertThat(preparedStatement1).isSameAs(preparedStatement2); - assertThat(getPreparedCacheSize(session)).isEqualTo(1); + try (CqlSession session = sessionWithCacheSizeMetric()) { + // Given + assertThat(getPreparedCacheSize(session)).isEqualTo(0); + String query = "SELECT * FROM prepared_statement_test WHERE a = ?"; + + // When + PreparedStatement preparedStatement1 = session.prepare(query); + PreparedStatement preparedStatement2 = session.prepare(query); + + // Then + assertThat(preparedStatement1).isSameAs(preparedStatement2); + assertThat(getPreparedCacheSize(session)).isEqualTo(1); + } } /** Just to illustrate that the driver does not sanitize query strings. */ @Test public void should_create_separate_instances_for_differently_formatted_queries() { - // Given - CqlSession session = sessionRule.session(); - assertThat(getPreparedCacheSize(session)).isEqualTo(0); - - // When - PreparedStatement preparedStatement1 = - session.prepare("SELECT * FROM prepared_statement_test WHERE a = ?"); - PreparedStatement preparedStatement2 = - session.prepare("select * from prepared_statement_test where a = ?"); - - // Then - assertThat(preparedStatement1).isNotSameAs(preparedStatement2); - assertThat(getPreparedCacheSize(session)).isEqualTo(2); + try (CqlSession session = sessionWithCacheSizeMetric()) { + // Given + assertThat(getPreparedCacheSize(session)).isEqualTo(0); + + // When + PreparedStatement preparedStatement1 = + session.prepare("SELECT * FROM prepared_statement_test WHERE a = ?"); + PreparedStatement preparedStatement2 = + session.prepare("select * from prepared_statement_test where a = ?"); + + // Then + assertThat(preparedStatement1).isNotSameAs(preparedStatement2); + assertThat(getPreparedCacheSize(session)).isEqualTo(2); + } } @Test public void should_create_separate_instances_for_different_statement_parameters() { - // Given - CqlSession session = sessionRule.session(); - assertThat(getPreparedCacheSize(session)).isEqualTo(0); - SimpleStatement statement = - SimpleStatement.newInstance("SELECT * FROM prepared_statement_test"); - - // When - PreparedStatement preparedStatement1 = session.prepare(statement.setPageSize(1)); - PreparedStatement preparedStatement2 = session.prepare(statement.setPageSize(4)); - - // Then - assertThat(preparedStatement1).isNotSameAs(preparedStatement2); - assertThat(getPreparedCacheSize(session)).isEqualTo(2); - // Each bound statement uses the page size it was prepared with - assertThat(firstPageOf(session.executeAsync(preparedStatement1.bind()))).hasSize(1); - assertThat(firstPageOf(session.executeAsync(preparedStatement2.bind()))).hasSize(4); + try (CqlSession session = sessionWithCacheSizeMetric()) { + // Given + assertThat(getPreparedCacheSize(session)).isEqualTo(0); + SimpleStatement statement = + SimpleStatement.newInstance("SELECT * FROM prepared_statement_test"); + + // When + PreparedStatement preparedStatement1 = session.prepare(statement.setPageSize(1)); + PreparedStatement preparedStatement2 = session.prepare(statement.setPageSize(4)); + + // Then + assertThat(preparedStatement1).isNotSameAs(preparedStatement2); + assertThat(getPreparedCacheSize(session)).isEqualTo(2); + // Each bound statement uses the page size it was prepared with + assertThat(firstPageOf(session.executeAsync(preparedStatement1.bind()))).hasSize(1); + assertThat(firstPageOf(session.executeAsync(preparedStatement2.bind()))).hasSize(4); + } } /** @@ -457,6 +458,19 @@ private static Iterable firstPageOf(CompletionStage stage) return CompletableFutures.getUninterruptibly(stage).currentPage(); } + private CqlSession sessionWithCacheSizeMetric() { + return SessionUtils.newSession( + ccmRule, + sessionRule.keyspace(), + SessionUtils.configLoaderBuilder() + .withInt(DefaultDriverOption.REQUEST_PAGE_SIZE, 2) + .withDuration(DefaultDriverOption.REQUEST_TIMEOUT, Duration.ofSeconds(30)) + .withStringList( + DefaultDriverOption.METRICS_SESSION_ENABLED, + ImmutableList.of(DefaultSessionMetric.CQL_PREPARED_CACHE_SIZE.getPath())) + .build()); + } + @SuppressWarnings("unchecked") private static long getPreparedCacheSize(CqlSession session) { return session diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/QueryTraceIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/QueryTraceIT.java index 883e5df2eb1..f6c724e67bb 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/QueryTraceIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/QueryTraceIT.java @@ -37,18 +37,19 @@ @Category(ParallelizableTests.class) public class QueryTraceIT { - private static CcmRule ccmRule = CcmRule.getInstance(); + private static final CcmRule CCM_RULE = CcmRule.getInstance(); - private static SessionRule sessionRule = SessionRule.builder(ccmRule).build(); + private static final SessionRule SESSION_RULE = SessionRule.builder(CCM_RULE).build(); - @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); @Rule public ExpectedException thrown = ExpectedException.none(); @Test public void should_not_have_tracing_id_when_tracing_disabled() { ExecutionInfo executionInfo = - sessionRule + SESSION_RULE .session() .execute("SELECT release_version FROM system.local") .getExecutionInfo(); @@ -63,7 +64,7 @@ public void should_not_have_tracing_id_when_tracing_disabled() { @Test public void should_fetch_trace_when_tracing_enabled() { ExecutionInfo executionInfo = - sessionRule + SESSION_RULE .session() .execute( SimpleStatement.builder("SELECT release_version FROM system.local") @@ -77,7 +78,7 @@ public void should_fetch_trace_when_tracing_enabled() { assertThat(queryTrace.getTracingId()).isEqualTo(executionInfo.getTracingId()); assertThat(queryTrace.getRequestType()).isEqualTo("Execute CQL3 query"); assertThat(queryTrace.getDurationMicros()).isPositive(); - EndPoint contactPoint = ccmRule.getContactPoints().iterator().next(); + EndPoint contactPoint = CCM_RULE.getContactPoints().iterator().next(); assertThat(queryTrace.getCoordinator()) .isEqualTo(((InetSocketAddress) contactPoint.resolve()).getAddress()); assertThat(queryTrace.getParameters()) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/SimpleStatementIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/SimpleStatementCcmIT.java similarity index 74% rename from integration-tests/src/test/java/com/datastax/oss/driver/core/cql/SimpleStatementIT.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/cql/SimpleStatementCcmIT.java index a3cc9995113..a237c23a9ef 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/SimpleStatementIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/SimpleStatementCcmIT.java @@ -15,14 +15,10 @@ */ package com.datastax.oss.driver.core.cql; -import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.noRows; -import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.when; import static org.assertj.core.api.Assertions.assertThat; import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.CqlSession; -import com.datastax.oss.driver.api.core.DefaultConsistencyLevel; -import com.datastax.oss.driver.api.core.DriverTimeoutException; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.cql.AsyncResultSet; import com.datastax.oss.driver.api.core.cql.ResultSet; @@ -33,71 +29,53 @@ import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; -import com.datastax.oss.driver.api.testinfra.simulacron.SimulacronRule; import com.datastax.oss.driver.categories.ParallelizableTests; import com.datastax.oss.driver.internal.core.util.concurrent.CompletableFutures; -import com.datastax.oss.protocol.internal.Message; -import com.datastax.oss.protocol.internal.request.Query; -import com.datastax.oss.simulacron.common.cluster.ClusterSpec; -import com.datastax.oss.simulacron.common.cluster.QueryLog; -import java.time.Duration; import java.util.List; import java.util.concurrent.CompletionStage; import java.util.concurrent.TimeUnit; -import org.junit.Before; import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; -import org.junit.rules.ExpectedException; import org.junit.rules.RuleChain; import org.junit.rules.TestName; import org.junit.rules.TestRule; @Category(ParallelizableTests.class) -public class SimpleStatementIT { +public class SimpleStatementCcmIT { - private static CcmRule ccm = CcmRule.getInstance(); + private static final CcmRule CCM_RULE = CcmRule.getInstance(); - private static SimulacronRule simulacron = new SimulacronRule(ClusterSpec.builder().withNodes(1)); - - private static SessionRule sessionRule = - SessionRule.builder(ccm) + private static final SessionRule SESSION_RULE = + SessionRule.builder(CCM_RULE) .withConfigLoader( SessionUtils.configLoaderBuilder() .withInt(DefaultDriverOption.REQUEST_PAGE_SIZE, 20) .build()) .build(); - private static SessionRule simulacronSessionRule = - SessionRule.builder(simulacron).build(); - - @ClassRule public static TestRule ccmChain = RuleChain.outerRule(ccm).around(sessionRule); - @ClassRule - public static TestRule simulacronChain = - RuleChain.outerRule(simulacron).around(simulacronSessionRule); + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); @Rule public TestName name = new TestName(); - @Rule public ExpectedException thrown = ExpectedException.none(); - private static final String KEY = "test"; @BeforeClass public static void setupSchema() { // table where every column forms the primary key. - sessionRule + SESSION_RULE .session() .execute( SimpleStatement.builder( "CREATE TABLE IF NOT EXISTS test (k text, v int, PRIMARY KEY(k, v))") - .setExecutionProfile(sessionRule.slowProfile()) + .setExecutionProfile(SESSION_RULE.slowProfile()) .build()); for (int i = 0; i < 100; i++) { - sessionRule + SESSION_RULE .session() .execute( SimpleStatement.builder("INSERT INTO test (k, v) VALUES (?, ?)") @@ -106,32 +84,26 @@ public static void setupSchema() { } // table with simple primary key, single cell. - sessionRule + SESSION_RULE .session() .execute( SimpleStatement.builder("CREATE TABLE IF NOT EXISTS test2 (k text primary key, v int)") - .setExecutionProfile(sessionRule.slowProfile()) + .setExecutionProfile(SESSION_RULE.slowProfile()) .build()); } - @Before - public void clearPrimes() { - simulacron.cluster().clearLogs(); - simulacron.cluster().clearPrimes(true); - } - @Test public void should_use_paging_state_when_copied() { Statement st = SimpleStatement.builder(String.format("SELECT v FROM test WHERE k='%s'", KEY)).build(); - ResultSet result = sessionRule.session().execute(st); + ResultSet result = SESSION_RULE.session().execute(st); // given a query created from a copy of a previous query with paging state from previous queries // response. st = st.copy(result.getExecutionInfo().getPagingState()); // when executing that query. - result = sessionRule.session().execute(st); + result = SESSION_RULE.session().execute(st); // then the response should start on the page boundary. assertThat(result.iterator().next().getInt("v")).isEqualTo(20); @@ -141,7 +113,7 @@ public void should_use_paging_state_when_copied() { public void should_use_paging_state_when_provided_to_new_statement() { Statement st = SimpleStatement.builder(String.format("SELECT v FROM test WHERE k='%s'", KEY)).build(); - ResultSet result = sessionRule.session().execute(st); + ResultSet result = SESSION_RULE.session().execute(st); // given a query created from a copy of a previous query with paging state from previous queries // response. @@ -151,7 +123,7 @@ public void should_use_paging_state_when_provided_to_new_statement() { .build(); // when executing that query. - result = sessionRule.session().execute(st); + result = SESSION_RULE.session().execute(st); // then the response should start on the page boundary. assertThat(result.iterator().next().getInt("v")).isEqualTo(20); @@ -162,7 +134,7 @@ public void should_use_paging_state_when_provided_to_new_statement() { public void should_fail_if_using_paging_state_from_different_query() { Statement st = SimpleStatement.builder("SELECT v FROM test WHERE k=:k").addNamedValue("k", KEY).build(); - ResultSet result = sessionRule.session().execute(st); + ResultSet result = SESSION_RULE.session().execute(st); // TODO Expect PagingStateException @@ -183,7 +155,7 @@ public void should_use_timestamp_when_set() { .setQueryTimestamp(timestamp) .build(); - sessionRule.session().execute(insert); + SESSION_RULE.session().execute(insert); // when retrieving writetime of cell from that insert. SimpleStatement select = @@ -191,7 +163,7 @@ public void should_use_timestamp_when_set() { .addPositionalValue(name.getMethodName()) .build(); - ResultSet result = sessionRule.session().execute(select); + ResultSet result = SESSION_RULE.session().execute(select); List rows = result.all(); assertThat(rows).hasSize(1); @@ -205,7 +177,7 @@ public void should_use_timestamp_when_set() { public void should_use_tracing_when_set() { // TODO currently there's no way to validate tracing was set since trace id is not set // also write test to verify it is not set. - sessionRule + SESSION_RULE .session() .execute(SimpleStatement.builder("select * from test").setTracing().build()); } @@ -220,7 +192,7 @@ public void should_use_positional_values() { .build(); // when executing that statement - sessionRule.session().execute(insert); + SESSION_RULE.session().execute(insert); // then we should be able to retrieve the data as inserted. SimpleStatement select = @@ -228,7 +200,7 @@ public void should_use_positional_values() { .addPositionalValue(name.getMethodName()) .build(); - ResultSet result = sessionRule.session().execute(select); + ResultSet result = SESSION_RULE.session().execute(select); List rows = result.all(); assertThat(rows).hasSize(1); @@ -247,7 +219,7 @@ public void should_allow_nulls_in_positional_values() { .build(); // when executing that statement - sessionRule.session().execute(insert); + SESSION_RULE.session().execute(insert); // then we should be able to retrieve the data as inserted. SimpleStatement select = @@ -255,7 +227,7 @@ public void should_allow_nulls_in_positional_values() { .addPositionalValue(name.getMethodName()) .build(); - ResultSet result = sessionRule.session().execute(select); + ResultSet result = SESSION_RULE.session().execute(select); List rows = result.all(); assertThat(rows).hasSize(1); @@ -273,7 +245,7 @@ public void should_fail_when_too_many_positional_values_provided() { .build(); // when executing that statement - sessionRule.session().execute(insert); + SESSION_RULE.session().execute(insert); // then the server will throw an InvalidQueryException which is thrown up to the client. } @@ -287,7 +259,7 @@ public void should_fail_when_not_enough_positional_values_provided() { .build(); // when executing that statement - sessionRule.session().execute(insert); + SESSION_RULE.session().execute(insert); // then the server will throw an InvalidQueryException which is thrown up to the client. } @@ -302,7 +274,7 @@ public void should_use_named_values() { .build(); // when executing that statement - sessionRule.session().execute(insert); + SESSION_RULE.session().execute(insert); // then we should be able to retrieve the data as inserted. SimpleStatement select = @@ -310,7 +282,7 @@ public void should_use_named_values() { .addNamedValue("k", name.getMethodName()) .build(); - ResultSet result = sessionRule.session().execute(select); + ResultSet result = SESSION_RULE.session().execute(select); List rows = result.all(); assertThat(rows).hasSize(1); @@ -329,7 +301,7 @@ public void should_allow_nulls_in_named_values() { .build(); // when executing that statement - sessionRule.session().execute(insert); + SESSION_RULE.session().execute(insert); // then we should be able to retrieve the data as inserted. SimpleStatement select = @@ -337,7 +309,7 @@ public void should_allow_nulls_in_named_values() { .addNamedValue("k", name.getMethodName()) .build(); - ResultSet result = sessionRule.session().execute(select); + ResultSet result = SESSION_RULE.session().execute(select); List rows = result.all(); assertThat(rows).hasSize(1); @@ -355,7 +327,7 @@ public void should_fail_when_named_value_missing() { .build(); // when executing that statement - sessionRule.session().execute(insert); + SESSION_RULE.session().execute(insert); // then the server will throw an InvalidQueryException which is thrown up to the client. } @@ -382,56 +354,17 @@ public void should_use_positional_value_with_case_sensitive_id() { SimpleStatement.builder("SELECT count(*) FROM test2 WHERE k=:\"theKey\"") .addNamedValue(CqlIdentifier.fromCql("\"theKey\""), 0) .build(); - Row row = sessionRule.session().execute(statement).one(); + Row row = SESSION_RULE.session().execute(statement).one(); assertThat(row.getLong(0)).isEqualTo(0); } @Test public void should_use_page_size() { Statement st = SimpleStatement.builder("SELECT v FROM test").setPageSize(10).build(); - CompletionStage future = sessionRule.session().executeAsync(st); + CompletionStage future = SESSION_RULE.session().executeAsync(st); AsyncResultSet result = CompletableFutures.getUninterruptibly(future); // Should have only fetched 10 (page size) rows. assertThat(result.remaining()).isEqualTo(10); } - - @Test - public void should_use_consistencies() { - SimpleStatement st = - SimpleStatement.builder("SELECT * FROM test where k = ?") - .setConsistencyLevel(DefaultConsistencyLevel.TWO) - .setSerialConsistencyLevel(DefaultConsistencyLevel.LOCAL_SERIAL) - .build(); - simulacronSessionRule.session().execute(st); - - List logs = simulacron.cluster().getLogs().getQueryLogs(); - assertThat(logs).hasSize(1); - - QueryLog log = logs.get(0); - - Message message = log.getFrame().message; - assertThat(message).isInstanceOf(Query.class); - Query query = (Query) message; - assertThat(query.options.consistency).isEqualTo(DefaultConsistencyLevel.TWO.getProtocolCode()); - assertThat(query.options.serialConsistency) - .isEqualTo(DefaultConsistencyLevel.LOCAL_SERIAL.getProtocolCode()); - } - - @Test - public void should_use_timeout() { - simulacron - .cluster() - .prime(when("mock query").then(noRows()).delay(1500, TimeUnit.MILLISECONDS)); - SimpleStatement st = - SimpleStatement.builder("mock query") - .setTimeout(Duration.ofSeconds(1)) - .setConsistencyLevel(DefaultConsistencyLevel.ONE) - .build(); - - thrown.expect(DriverTimeoutException.class); - thrown.expectMessage("Query timed out after PT1S"); - - simulacronSessionRule.session().execute(st); - } } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/SimpleStatementSimulacronIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/SimpleStatementSimulacronIT.java new file mode 100644 index 00000000000..1c6fa8f2737 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/SimpleStatementSimulacronIT.java @@ -0,0 +1,103 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.core.cql; + +import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.noRows; +import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.when; +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.DefaultConsistencyLevel; +import com.datastax.oss.driver.api.core.DriverTimeoutException; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import com.datastax.oss.driver.api.testinfra.simulacron.SimulacronRule; +import com.datastax.oss.driver.categories.ParallelizableTests; +import com.datastax.oss.protocol.internal.Message; +import com.datastax.oss.protocol.internal.request.Query; +import com.datastax.oss.simulacron.common.cluster.ClusterSpec; +import com.datastax.oss.simulacron.common.cluster.QueryLog; +import java.time.Duration; +import java.util.List; +import java.util.concurrent.TimeUnit; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Rule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.rules.ExpectedException; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +@Category(ParallelizableTests.class) +public class SimpleStatementSimulacronIT { + + private static final SimulacronRule SIMULACRON_RULE = + new SimulacronRule(ClusterSpec.builder().withNodes(1)); + + private static final SessionRule SESSION_RULE = + SessionRule.builder(SIMULACRON_RULE).build(); + + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(SIMULACRON_RULE).around(SESSION_RULE); + + @Rule public ExpectedException thrown = ExpectedException.none(); + + @Before + public void clearPrimes() { + SIMULACRON_RULE.cluster().clearLogs(); + SIMULACRON_RULE.cluster().clearPrimes(true); + } + + @Test + public void should_use_consistencies() { + SimpleStatement st = + SimpleStatement.builder("SELECT * FROM test where k = ?") + .setConsistencyLevel(DefaultConsistencyLevel.TWO) + .setSerialConsistencyLevel(DefaultConsistencyLevel.LOCAL_SERIAL) + .build(); + SESSION_RULE.session().execute(st); + + List logs = SIMULACRON_RULE.cluster().getLogs().getQueryLogs(); + assertThat(logs).hasSize(1); + + QueryLog log = logs.get(0); + + Message message = log.getFrame().message; + assertThat(message).isInstanceOf(Query.class); + Query query = (Query) message; + assertThat(query.options.consistency).isEqualTo(DefaultConsistencyLevel.TWO.getProtocolCode()); + assertThat(query.options.serialConsistency) + .isEqualTo(DefaultConsistencyLevel.LOCAL_SERIAL.getProtocolCode()); + } + + @Test + public void should_use_timeout() { + SIMULACRON_RULE + .cluster() + .prime(when("mock query").then(noRows()).delay(1500, TimeUnit.MILLISECONDS)); + SimpleStatement st = + SimpleStatement.builder("mock query") + .setTimeout(Duration.ofSeconds(1)) + .setConsistencyLevel(DefaultConsistencyLevel.ONE) + .build(); + + thrown.expect(DriverTimeoutException.class); + thrown.expectMessage("Query timed out after PT1S"); + + SESSION_RULE.session().execute(st); + } +} diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/data/DataTypeIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/data/DataTypeIT.java index a71fdb3367a..99fe014f180 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/data/DataTypeIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/data/DataTypeIT.java @@ -87,11 +87,12 @@ @Category(ParallelizableTests.class) @RunWith(DataProviderRunner.class) public class DataTypeIT { - private static CcmRule ccm = CcmRule.getInstance(); + private static final CcmRule CCM_RULE = CcmRule.getInstance(); - private static SessionRule sessionRule = SessionRule.builder(ccm).build(); + private static final SessionRule SESSION_RULE = SessionRule.builder(CCM_RULE).build(); - @ClassRule public static TestRule chain = RuleChain.outerRule(ccm).around(sessionRule); + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); @Rule public TestName name = new TestName(); @@ -148,7 +149,7 @@ DataTypes.VARINT, new BigInteger(Integer.toString(Integer.MAX_VALUE) + "000") } }; - Version version = ccm.getCassandraVersion(); + Version version = CCM_RULE.getCassandraVersion(); // Filter types if they aren't supported by cassandra version in use. return Arrays.stream(samples) .filter( @@ -245,7 +246,7 @@ public static Object[][] typeSamples() { UserDefinedType udt = new DefaultUserDefinedType( - sessionRule.keyspace(), + SESSION_RULE.keyspace(), CqlIdentifier.fromCql(userTypeFor(types)), false, typeNames, @@ -286,14 +287,14 @@ public static void createTable() { } } - sessionRule + SESSION_RULE .session() .execute( SimpleStatement.builder( String.format( "CREATE TABLE IF NOT EXISTS %s (k int primary key, %s)", tableName, String.join(",", columnData))) - .setExecutionProfile(sessionRule.slowProfile()) + .setExecutionProfile(SESSION_RULE.slowProfile()) .build()); } @@ -309,7 +310,7 @@ private static int nextKey() { @Test public void should_insert_non_primary_key_column_simple_statement_using_format( DataType dataType, K value, K expectedPrimitiveValue) { - TypeCodec codec = sessionRule.session().getContext().getCodecRegistry().codecFor(dataType); + TypeCodec codec = SESSION_RULE.session().getContext().getCodecRegistry().codecFor(dataType); int key = nextKey(); String columnName = columnNameFor(dataType); @@ -322,7 +323,7 @@ public void should_insert_non_primary_key_column_simple_statement_using_form .addPositionalValue(key) .build(); - sessionRule.session().execute(insert); + SESSION_RULE.session().execute(insert); SimpleStatement select = SimpleStatement.builder(String.format("SELECT %s FROM %s where k=?", columnName, tableName)) @@ -345,7 +346,7 @@ public void should_insert_non_primary_key_column_simple_statement_positional .addPositionalValues(key, value) .build(); - sessionRule.session().execute(insert); + SESSION_RULE.session().execute(insert); SimpleStatement select = SimpleStatement.builder(String.format("SELECT %s FROM %s where k=?", columnName, tableName)) @@ -369,7 +370,7 @@ public void should_insert_non_primary_key_column_simple_statement_named_valu .addNamedValue("v", value) .build(); - sessionRule.session().execute(insert); + SESSION_RULE.session().execute(insert); SimpleStatement select = SimpleStatement.builder(String.format("SELECT %s FROM %s where k=?", columnName, tableName)) @@ -391,18 +392,18 @@ public void should_insert_non_primary_key_column_bound_statement_positional_ String.format("INSERT INTO %s (k, %s) values (?, ?)", tableName, columnName)) .build(); - PreparedStatement preparedInsert = sessionRule.session().prepare(insert); + PreparedStatement preparedInsert = SESSION_RULE.session().prepare(insert); BoundStatementBuilder boundBuilder = preparedInsert.boundStatementBuilder(); boundBuilder = setValue(0, boundBuilder, DataTypes.INT, key); boundBuilder = setValue(1, boundBuilder, dataType, value); BoundStatement boundInsert = boundBuilder.build(); - sessionRule.session().execute(boundInsert); + SESSION_RULE.session().execute(boundInsert); SimpleStatement select = SimpleStatement.builder(String.format("SELECT %s FROM %s where k=?", columnName, tableName)) .build(); - PreparedStatement preparedSelect = sessionRule.session().prepare(select); + PreparedStatement preparedSelect = SESSION_RULE.session().prepare(select); BoundStatement boundSelect = setValue(0, preparedSelect.bind(), DataTypes.INT, key); readValue(boundSelect, dataType, value, expectedPrimitiveValue); @@ -420,19 +421,19 @@ public void should_insert_non_primary_key_column_bound_statement_named_value String.format("INSERT INTO %s (k, %s) values (:k, :v)", tableName, columnName)) .build(); - PreparedStatement preparedInsert = sessionRule.session().prepare(insert); + PreparedStatement preparedInsert = SESSION_RULE.session().prepare(insert); BoundStatementBuilder boundBuilder = preparedInsert.boundStatementBuilder(); boundBuilder = setValue("k", boundBuilder, DataTypes.INT, key); boundBuilder = setValue("v", boundBuilder, dataType, value); BoundStatement boundInsert = boundBuilder.build(); - sessionRule.session().execute(boundInsert); + SESSION_RULE.session().execute(boundInsert); SimpleStatement select = SimpleStatement.builder( String.format("SELECT %s FROM %s where k=:k", columnName, tableName)) .build(); - PreparedStatement preparedSelect = sessionRule.session().prepare(select); + PreparedStatement preparedSelect = SESSION_RULE.session().prepare(select); BoundStatement boundSelect = setValue("k", preparedSelect.bind(), DataTypes.INT, key); boundSelect = boundSelect.setInt("k", key); @@ -442,8 +443,8 @@ public void should_insert_non_primary_key_column_bound_statement_named_value private static > S setValue( int index, S bs, DataType dataType, Object value) { TypeCodec codec = - sessionRule.session() != null - ? sessionRule.session().getContext().getCodecRegistry().codecFor(dataType) + SESSION_RULE.session() != null + ? SESSION_RULE.session().getContext().getCodecRegistry().codecFor(dataType) : null; // set to null if value is null instead of getting possible NPE when casting from null to @@ -534,8 +535,8 @@ private static > S setValue( private static > S setValue( String name, S bs, DataType dataType, Object value) { TypeCodec codec = - sessionRule.session() != null - ? sessionRule.session().getContext().getCodecRegistry().codecFor(dataType) + SESSION_RULE.session() != null + ? SESSION_RULE.session().getContext().getCodecRegistry().codecFor(dataType) : null; // set to null if value is null instead of getting possible NPE when casting from null to @@ -626,8 +627,8 @@ private static > S setValue( private void readValue( Statement select, DataType dataType, K value, K expectedPrimitiveValue) { TypeCodec codec = - sessionRule.session().getContext().getCodecRegistry().codecFor(dataType); - ResultSet result = sessionRule.session().execute(select); + SESSION_RULE.session().getContext().getCodecRegistry().codecFor(dataType); + ResultSet result = SESSION_RULE.session().execute(select); String columnName = columnNameFor(dataType); @@ -750,7 +751,7 @@ private void readValue( } // Decode directly using the codec - ProtocolVersion protocolVersion = sessionRule.session().getContext().getProtocolVersion(); + ProtocolVersion protocolVersion = SESSION_RULE.session().getContext().getProtocolVersion(); assertThat(codec.decode(row.getBytesUnsafe(columnName), protocolVersion)).isEqualTo(value); assertThat(codec.decode(row.getBytesUnsafe(0), protocolVersion)).isEqualTo(value); } @@ -768,14 +769,14 @@ private static String typeFor(DataType dataType) { fieldParts.add(fieldName + " " + fieldType); } - sessionRule + SESSION_RULE .session() .execute( SimpleStatement.builder( String.format( "CREATE TYPE IF NOT EXISTS %s (%s)", udt.getName().asCql(false), String.join(",", fieldParts))) - .setExecutionProfile(sessionRule.slowProfile()) + .setExecutionProfile(SESSION_RULE.slowProfile()) .build()); // Chances are the UDT isn't labeled as frozen in the context we're given, so we add it as diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/heartbeat/HeartbeatDisabledIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/heartbeat/HeartbeatDisabledIT.java index ca992b0bc3e..fd2f37d82af 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/heartbeat/HeartbeatDisabledIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/heartbeat/HeartbeatDisabledIT.java @@ -33,7 +33,8 @@ public class HeartbeatDisabledIT { @ClassRule - public static SimulacronRule simulacron = new SimulacronRule(ClusterSpec.builder().withNodes(2)); + public static final SimulacronRule SIMULACRON_RULE = + new SimulacronRule(ClusterSpec.builder().withNodes(2)); @Test public void should_not_send_heartbeat_when_disabled() throws InterruptedException { @@ -43,7 +44,7 @@ public void should_not_send_heartbeat_when_disabled() throws InterruptedExceptio SessionUtils.configLoaderBuilder() .withDuration(DefaultDriverOption.HEARTBEAT_INTERVAL, Duration.ofSeconds(0)) .build(); - try (CqlSession session = SessionUtils.newSession(simulacron, loader)) { + try (CqlSession session = SessionUtils.newSession(SIMULACRON_RULE, loader)) { AtomicInteger heartbeats = registerHeartbeatListener(); SECONDS.sleep(35); @@ -53,7 +54,7 @@ public void should_not_send_heartbeat_when_disabled() throws InterruptedExceptio private AtomicInteger registerHeartbeatListener() { AtomicInteger nonControlHeartbeats = new AtomicInteger(); - simulacron + SIMULACRON_RULE .cluster() .registerQueryListener( (n, l) -> nonControlHeartbeats.incrementAndGet(), diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/heartbeat/HeartbeatIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/heartbeat/HeartbeatIT.java index dfee4c81fc8..ea3ebf661ae 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/heartbeat/HeartbeatIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/heartbeat/HeartbeatIT.java @@ -50,14 +50,16 @@ import java.util.function.Predicate; import java.util.stream.Collectors; import org.junit.Before; -import org.junit.Rule; +import org.junit.ClassRule; import org.junit.Test; import org.junit.experimental.categories.Category; @Category(ParallelizableTests.class) public class HeartbeatIT { - @Rule public SimulacronRule simulacron = new SimulacronRule(ClusterSpec.builder().withNodes(1)); + @ClassRule + public static final SimulacronRule SIMULACRON_RULE = + new SimulacronRule(ClusterSpec.builder().withNodes(1)); private static final String QUERY = "select * from foo"; private static final Predicate IS_OPTION_REQUEST = @@ -67,9 +69,10 @@ public class HeartbeatIT { @Before public void setUp() { - simulacron.cluster().clearLogs(); - simulacron.cluster().clearPrimes(true); - simulacronNode = simulacron.cluster().getNodes().iterator().next(); + SIMULACRON_RULE.cluster().acceptConnections(); + SIMULACRON_RULE.cluster().clearLogs(); + SIMULACRON_RULE.cluster().clearPrimes(true); + simulacronNode = SIMULACRON_RULE.cluster().getNodes().iterator().next(); } @Test @@ -151,7 +154,7 @@ public void should_send_heartbeat_when_requests_being_written_but_nothing_receiv throws InterruptedException { // Prime a query that will never return a response. String noResponseQueryStr = "delay"; - simulacron.cluster().prime(when(noResponseQueryStr).then(noResult())); + SIMULACRON_RULE.cluster().prime(when(noResponseQueryStr).then(noResult())); try (CqlSession session = newSession()) { AtomicInteger heartbeats = countHeartbeatsOnRegularConnection(); @@ -218,7 +221,7 @@ private CqlSession newSession(ProgrammaticDriverConfigLoaderBuilder loaderBuilde .withDuration(DefaultDriverOption.CONNECTION_INIT_QUERY_TIMEOUT, Duration.ofSeconds(2)) .withDuration(DefaultDriverOption.RECONNECTION_MAX_DELAY, Duration.ofSeconds(1)) .build(); - return SessionUtils.newSession(simulacron, loader); + return SessionUtils.newSession(SIMULACRON_RULE, loader); } private AtomicInteger countHeartbeatsOnRegularConnection() { @@ -232,7 +235,7 @@ private AtomicInteger countHeartbeatsOnControlConnection() { private AtomicInteger countHeartbeats(boolean regularConnection) { SocketAddress controlConnectionAddress = findControlConnectionAddress(); AtomicInteger count = new AtomicInteger(); - simulacron + SIMULACRON_RULE .cluster() .registerQueryListener( (n, l) -> count.incrementAndGet(), diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/loadbalancing/DefaultLoadBalancingPolicyIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/loadbalancing/DefaultLoadBalancingPolicyIT.java index 1370718a4e2..708219b1a61 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/loadbalancing/DefaultLoadBalancingPolicyIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/loadbalancing/DefaultLoadBalancingPolicyIT.java @@ -58,10 +58,10 @@ public class DefaultLoadBalancingPolicyIT { private static final String LOCAL_DC = "dc1"; - private static CustomCcmRule ccmRule = CustomCcmRule.builder().withNodes(4, 1).build(); + private static final CustomCcmRule CCM_RULE = CustomCcmRule.builder().withNodes(4, 1).build(); - private static SessionRule sessionRule = - SessionRule.builder(ccmRule) + private static final SessionRule SESSION_RULE = + SessionRule.builder(CCM_RULE) .withKeyspace(false) .withConfigLoader( SessionUtils.configLoaderBuilder() @@ -69,11 +69,12 @@ public class DefaultLoadBalancingPolicyIT { .build()) .build(); - @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); @BeforeClass public static void setup() { - CqlSession session = sessionRule.session(); + CqlSession session = SESSION_RULE.session(); session.execute( "CREATE KEYSPACE test " + "WITH replication = {'class': 'NetworkTopologyStrategy', 'dc1': 2, 'dc2': 1}"); @@ -82,7 +83,7 @@ public static void setup() { @Test public void should_ignore_remote_dcs() { - for (Node node : sessionRule.session().getMetadata().getNodes().values()) { + for (Node node : SESSION_RULE.session().getMetadata().getNodes().values()) { if (LOCAL_DC.equals(node.getDatacenter())) { assertThat(node.getDistance()).isEqualTo(NodeDistance.LOCAL); assertThat(node.getState()).isEqualTo(NodeState.UP); @@ -100,7 +101,7 @@ public void should_ignore_remote_dcs() { @Test public void should_use_round_robin_on_local_dc_when_not_enough_routing_information() { ByteBuffer routingKey = TypeCodecs.INT.encodePrimitive(1, ProtocolVersion.DEFAULT); - TokenMap tokenMap = sessionRule.session().getMetadata().getTokenMap().get(); + TokenMap tokenMap = SESSION_RULE.session().getMetadata().getTokenMap().get(); // TODO add statements with setKeyspace when that is supported List statements = ImmutableList.of( @@ -119,7 +120,7 @@ public void should_use_round_robin_on_local_dc_when_not_enough_routing_informati for (Statement statement : statements) { List coordinators = new ArrayList<>(); for (int i = 0; i < 12; i++) { - ResultSet rs = sessionRule.session().execute(statement); + ResultSet rs = SESSION_RULE.session().execute(statement); Node coordinator = rs.getExecutionInfo().getCoordinator(); assertThat(coordinator.getDatacenter()).isEqualTo(LOCAL_DC); coordinators.add(coordinator); @@ -136,7 +137,7 @@ public void should_use_round_robin_on_local_dc_when_not_enough_routing_informati public void should_prioritize_replicas_when_routing_information_present() { CqlIdentifier keyspace = CqlIdentifier.fromCql("test"); ByteBuffer routingKey = TypeCodecs.INT.encodePrimitive(1, ProtocolVersion.DEFAULT); - TokenMap tokenMap = sessionRule.session().getMetadata().getTokenMap().get(); + TokenMap tokenMap = SESSION_RULE.session().getMetadata().getTokenMap().get(); Set localReplicas = new HashSet<>(); for (Node replica : tokenMap.getReplicas(keyspace, routingKey)) { if (replica.getDatacenter().equals(LOCAL_DC)) { @@ -160,7 +161,7 @@ public void should_prioritize_replicas_when_routing_information_present() { // reasonable distribution: Map hits = new HashMap<>(); for (int i = 0; i < 2000; i++) { - ResultSet rs = sessionRule.session().execute(statement); + ResultSet rs = SESSION_RULE.session().execute(statement); Node coordinator = rs.getExecutionInfo().getCoordinator(); assertThat(localReplicas).contains(coordinator); assertThat(coordinator.getDatacenter()).isEqualTo(LOCAL_DC); @@ -177,9 +178,9 @@ public void should_prioritize_replicas_when_routing_information_present() { public void should_hit_non_replicas_when_routing_information_present_but_all_replicas_down() { CqlIdentifier keyspace = CqlIdentifier.fromCql("test"); ByteBuffer routingKey = TypeCodecs.INT.encodePrimitive(1, ProtocolVersion.DEFAULT); - TokenMap tokenMap = sessionRule.session().getMetadata().getTokenMap().get(); + TokenMap tokenMap = SESSION_RULE.session().getMetadata().getTokenMap().get(); - InternalDriverContext context = (InternalDriverContext) sessionRule.session().getContext(); + InternalDriverContext context = (InternalDriverContext) SESSION_RULE.session().getContext(); Set localReplicas = new HashSet<>(); for (Node replica : tokenMap.getReplicas(keyspace, routingKey)) { @@ -205,7 +206,7 @@ public void should_hit_non_replicas_when_routing_information_present_but_all_rep for (Statement statement : statements) { List coordinators = new ArrayList<>(); for (int i = 0; i < 6; i++) { - ResultSet rs = sessionRule.session().execute(statement); + ResultSet rs = SESSION_RULE.session().execute(statement); Node coordinator = rs.getExecutionInfo().getCoordinator(); coordinators.add(coordinator); assertThat(coordinator.getDatacenter()).isEqualTo(LOCAL_DC); @@ -229,7 +230,7 @@ public void should_hit_non_replicas_when_routing_information_present_but_all_rep @Test public void should_apply_node_filter() { Set localNodes = new HashSet<>(); - for (Node node : sessionRule.session().getMetadata().getNodes().values()) { + for (Node node : SESSION_RULE.session().getMetadata().getNodes().values()) { if (node.getDatacenter().equals(LOCAL_DC)) { localNodes.add(node); } @@ -243,8 +244,8 @@ public void should_apply_node_filter() { // Open a separate session with a filter try (CqlSession session = SessionUtils.newSession( - ccmRule, - sessionRule.keyspace(), + CCM_RULE, + SESSION_RULE.keyspace(), null, null, node -> !node.getEndPoint().equals(ignoredEndPoint))) { diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/loadbalancing/NodeTargetingIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/loadbalancing/NodeTargetingIT.java index ed7898dd84d..0c9c61bb22a 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/loadbalancing/NodeTargetingIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/loadbalancing/NodeTargetingIT.java @@ -40,7 +40,7 @@ import java.net.InetSocketAddress; import java.util.concurrent.TimeUnit; import org.junit.Before; -import org.junit.Rule; +import org.junit.ClassRule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.RuleChain; @@ -49,17 +49,20 @@ @Category(ParallelizableTests.class) public class NodeTargetingIT { - private SimulacronRule simulacron = new SimulacronRule(ClusterSpec.builder().withNodes(5)); + private static final SimulacronRule SIMULACRON_RULE = + new SimulacronRule(ClusterSpec.builder().withNodes(5)); - private SessionRule sessionRule = SessionRule.builder(simulacron).build(); + private static final SessionRule SESSION_RULE = + SessionRule.builder(SIMULACRON_RULE).build(); - @Rule public TestRule chain = RuleChain.outerRule(simulacron).around(sessionRule); + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(SIMULACRON_RULE).around(SESSION_RULE); @Before public void clear() { - simulacron.cluster().clearLogs(); - simulacron.cluster().clearPrimes(true); - simulacron.cluster().node(4).stop(); + SIMULACRON_RULE.cluster().clearLogs(); + SIMULACRON_RULE.cluster().clearPrimes(true); + SIMULACRON_RULE.cluster().node(4).stop(); ConditionChecker.checkThat(() -> getNode(4).getState() == NodeState.DOWN) .before(5, TimeUnit.SECONDS); } @@ -74,7 +77,7 @@ public void should_use_node_on_statement() { Statement statement = SimpleStatement.newInstance("select * system.local").setNode(node); // when statement is executed - ResultSet result = sessionRule.session().execute(statement); + ResultSet result = SESSION_RULE.session().execute(statement); // then the query should have been sent to the configured node. assertThat(result.getExecutionInfo().getCoordinator()).isEqualTo(node); @@ -84,14 +87,17 @@ public void should_use_node_on_statement() { @Test public void should_fail_if_node_fails_query() { String query = "mock"; - simulacron.cluster().node(3).prime(when(query).then(unavailable(ConsistencyLevel.ALL, 1, 0))); + SIMULACRON_RULE + .cluster() + .node(3) + .prime(when(query).then(unavailable(ConsistencyLevel.ALL, 1, 0))); // given a statement with a node configured to fail the given query. Node node3 = getNode(3); Statement statement = SimpleStatement.newInstance(query).setNode(node3); // when statement is executed an error should be raised. try { - sessionRule.session().execute(statement); + SESSION_RULE.session().execute(statement); fail("Should have thrown AllNodesFailedException"); } catch (AllNodesFailedException e) { assertThat(e.getErrors().size()).isEqualTo(1); @@ -107,7 +113,7 @@ public void should_fail_if_node_is_not_connected() { Statement statement = SimpleStatement.newInstance("select * system.local").setNode(node4); try { // when statement is executed - sessionRule.session().execute(statement); + SESSION_RULE.session().execute(statement); fail("Query should have failed"); } catch (NoNodeAvailableException e) { assertThat(e.getErrors()).isEmpty(); @@ -121,10 +127,10 @@ public void should_fail_if_node_is_not_connected() { } private Node getNode(int id) { - BoundNode boundNode = simulacron.cluster().node(id); + BoundNode boundNode = SIMULACRON_RULE.cluster().node(id); assertThat(boundNode).isNotNull(); InetSocketAddress address = (InetSocketAddress) boundNode.getAddress(); - return sessionRule + return SESSION_RULE .session() .getMetadata() .findNode(address) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/loadbalancing/PerProfileLoadBalancingPolicyIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/loadbalancing/PerProfileLoadBalancingPolicyIT.java index 58b918852a4..2ee5aca6aee 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/loadbalancing/PerProfileLoadBalancingPolicyIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/loadbalancing/PerProfileLoadBalancingPolicyIT.java @@ -45,12 +45,12 @@ public class PerProfileLoadBalancingPolicyIT { // 3 2-node DCs - private static SimulacronRule simulacron = + private static final SimulacronRule SIMULACRON_RULE = new SimulacronRule(ClusterSpec.builder().withNodes(2, 2, 2)); // default lb policy should consider dc1 local, profile1 dc3, profile2 empty. - private static SessionRule sessionRule = - SessionRule.builder(simulacron) + private static final SessionRule SESSION_RULE = + SessionRule.builder(SIMULACRON_RULE) .withConfigLoader( SessionUtils.configLoaderBuilder() .withString(DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER, "dc1") @@ -61,20 +61,21 @@ public class PerProfileLoadBalancingPolicyIT { .build()) .build(); - @ClassRule public static TestRule chain = RuleChain.outerRule(simulacron).around(sessionRule); + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(SIMULACRON_RULE).around(SESSION_RULE); private static String QUERY_STRING = "select * from foo"; private static final SimpleStatement QUERY = SimpleStatement.newInstance(QUERY_STRING); @Before public void clear() { - simulacron.cluster().clearLogs(); + SIMULACRON_RULE.cluster().clearLogs(); } @BeforeClass public static void setup() { // sanity checks - DriverContext context = sessionRule.session().getContext(); + DriverContext context = SESSION_RULE.session().getContext(); DriverConfig config = context.getConfig(); assertThat(config.getProfiles()).containsKeys("profile1", "profile2"); @@ -89,7 +90,7 @@ public static void setup() { assertThat(defaultPolicy).isSameAs(policy2).isNotSameAs(policy1); - for (Node node : sessionRule.session().getMetadata().getNodes().values()) { + for (Node node : SESSION_RULE.session().getMetadata().getNodes().values()) { // if node is in dc2 it should be ignored, otherwise (dc1, dc3) it should be local. NodeDistance expectedDistance = node.getDatacenter().equals("dc2") ? NodeDistance.IGNORED : NodeDistance.LOCAL; @@ -102,7 +103,7 @@ public void should_use_policy_from_request_profile() { // Since profile1 uses dc3 as localDC, only those nodes should receive these queries. Statement statement = QUERY.setExecutionProfileName("profile1"); for (int i = 0; i < 10; i++) { - ResultSet result = sessionRule.session().execute(statement); + ResultSet result = SESSION_RULE.session().execute(statement); assertThat(result.getExecutionInfo().getCoordinator().getDatacenter()).isEqualTo("dc3"); } @@ -116,7 +117,7 @@ public void should_use_policy_from_config_when_not_configured_in_request_profile // Since profile2 does not define an lbp config, it should use default which uses dc1. Statement statement = QUERY.setExecutionProfileName("profile2"); for (int i = 0; i < 10; i++) { - ResultSet result = sessionRule.session().execute(statement); + ResultSet result = SESSION_RULE.session().execute(statement); assertThat(result.getExecutionInfo().getCoordinator().getDatacenter()).isEqualTo("dc1"); } @@ -128,7 +129,7 @@ public void should_use_policy_from_config_when_not_configured_in_request_profile private void assertQueryInDc(int dc, int expectedPerNode) { for (int i = 0; i < 2; i++) { assertThat( - simulacron.cluster().dc(dc).node(i).getLogs().getQueryLogs().stream() + SIMULACRON_RULE.cluster().dc(dc).node(i).getLogs().getQueryLogs().stream() .filter(l -> l.getQuery().equals(QUERY_STRING))) .as("Expected query count to be %d for dc %d", 5, i) .hasSize(expectedPerNode); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/ByteOrderedTokenIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/ByteOrderedTokenIT.java index b7e3aab8a5d..dbc5dc06c2a 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/ByteOrderedTokenIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/ByteOrderedTokenIT.java @@ -29,11 +29,11 @@ public class ByteOrderedTokenIT extends TokenITBase { - private static CustomCcmRule ccmRule = + private static final CustomCcmRule CCM_RULE = CustomCcmRule.builder().withNodes(3).withCreateOption("-p ByteOrderedPartitioner").build(); - private static SessionRule sessionRule = - SessionRule.builder(ccmRule) + private static final SessionRule SESSION_RULE = + SessionRule.builder(CCM_RULE) .withKeyspace(false) .withConfigLoader( SessionUtils.configLoaderBuilder() @@ -41,7 +41,8 @@ public class ByteOrderedTokenIT extends TokenITBase { .build()) .build(); - @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); public ByteOrderedTokenIT() { super("org.apache.cassandra.dht.ByteOrderedPartitioner", ByteOrderedToken.class, false); @@ -49,11 +50,11 @@ public ByteOrderedTokenIT() { @Override protected CqlSession session() { - return sessionRule.session(); + return SESSION_RULE.session(); } @BeforeClass public static void createSchema() { - TokenITBase.createSchema(sessionRule.session()); + TokenITBase.createSchema(SESSION_RULE.session()); } } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/ByteOrderedTokenVnodesIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/ByteOrderedTokenVnodesIT.java index 8ea6b65f58b..76e9e08fff4 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/ByteOrderedTokenVnodesIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/ByteOrderedTokenVnodesIT.java @@ -29,15 +29,15 @@ public class ByteOrderedTokenVnodesIT extends TokenITBase { - private static CustomCcmRule ccmRule = + private static final CustomCcmRule CCM_RULE = CustomCcmRule.builder() .withNodes(3) .withCreateOption("-p ByteOrderedPartitioner") .withCreateOption("--vnodes") .build(); - private static SessionRule sessionRule = - SessionRule.builder(ccmRule) + private static final SessionRule SESSION_RULE = + SessionRule.builder(CCM_RULE) .withKeyspace(false) .withConfigLoader( SessionUtils.configLoaderBuilder() @@ -45,7 +45,8 @@ public class ByteOrderedTokenVnodesIT extends TokenITBase { .build()) .build(); - @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); public ByteOrderedTokenVnodesIT() { super("org.apache.cassandra.dht.ByteOrderedPartitioner", ByteOrderedToken.class, true); @@ -53,11 +54,11 @@ public ByteOrderedTokenVnodesIT() { @Override protected CqlSession session() { - return sessionRule.session(); + return SESSION_RULE.session(); } @BeforeClass public static void createSchema() { - TokenITBase.createSchema(sessionRule.session()); + TokenITBase.createSchema(SESSION_RULE.session()); } } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/CaseSensitiveUdtIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/CaseSensitiveUdtIT.java index a5d96084860..4e1fda2aa3c 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/CaseSensitiveUdtIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/CaseSensitiveUdtIT.java @@ -46,23 +46,24 @@ @Category(ParallelizableTests.class) public class CaseSensitiveUdtIT { - private static CcmRule ccmRule = CcmRule.getInstance(); + private static final CcmRule CCM_RULE = CcmRule.getInstance(); - private static SessionRule sessionRule = - SessionRule.builder(ccmRule) + private static final SessionRule SESSION_RULE = + SessionRule.builder(CCM_RULE) .withConfigLoader( SessionUtils.configLoaderBuilder() .withDuration(DefaultDriverOption.REQUEST_TIMEOUT, Duration.ofSeconds(30)) .build()) .build(); - @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); @Test public void should_expose_metadata_with_correct_case() { - boolean supportsFunctions = ccmRule.getCassandraVersion().compareTo(Version.V2_2_0) >= 0; + boolean supportsFunctions = CCM_RULE.getCassandraVersion().compareTo(Version.V2_2_0) >= 0; - CqlSession session = sessionRule.session(); + CqlSession session = SESSION_RULE.session(); session.execute("CREATE TYPE \"Address\"(street text)"); @@ -91,7 +92,7 @@ public void should_expose_metadata_with_correct_case() { KeyspaceMetadata keyspace = session .getMetadata() - .getKeyspace(sessionRule.keyspace()) + .getKeyspace(SESSION_RULE.keyspace()) .orElseThrow(() -> new AssertionError("Couldn't find rule's keyspace")); UserDefinedType addressType = diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/DescribeIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/DescribeIT.java index 660893c9f60..a68408ee9af 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/DescribeIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/DescribeIT.java @@ -46,13 +46,13 @@ @Category(ParallelizableTests.class) public class DescribeIT { - private static final Logger logger = LoggerFactory.getLogger(DescribeIT.class); + private static final Logger LOG = LoggerFactory.getLogger(DescribeIT.class); - private static CcmRule ccmRule = CcmRule.getInstance(); + private static final CcmRule CCM_RULE = CcmRule.getInstance(); // disable debouncer to speed up test. - private static SessionRule sessionRule = - SessionRule.builder(ccmRule) + private static final SessionRule SESSION_RULE = + SessionRule.builder(CCM_RULE) .withKeyspace(false) .withConfigLoader( SessionUtils.configLoaderBuilder() @@ -61,7 +61,8 @@ public class DescribeIT { .build()) .build(); - @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); /** * Creates a keyspace using a variety of features and ensures {@link @@ -82,7 +83,7 @@ public void create_schema_and_ensure_exported_cql_is_as_expected() { String keyspaceAsCql = keyspace.asCql(true); String expectedCql = getExpectedCqlString(keyspaceAsCql); - CqlSession session = sessionRule.session(); + CqlSession session = SESSION_RULE.session(); // create keyspace session.execute( @@ -128,7 +129,7 @@ public void create_schema_and_ensure_exported_cql_is_as_expected() { keyspaceAsCql)); // date type requries 2.2+ - if (ccmRule.getCassandraVersion().compareTo(Version.V2_2_0) >= 0) { + if (CCM_RULE.getCassandraVersion().compareTo(Version.V2_2_0) >= 0) { // A table that will have materialized views (copied from mv docs) session.execute( "CREATE TABLE cyclist_mv(cid uuid, name text, age int, birthday date, country text, " @@ -138,7 +139,7 @@ public void create_schema_and_ensure_exported_cql_is_as_expected() { session.execute("CREATE INDEX cyclist_by_country ON cyclist_mv(country)"); // materialized views require 3.0+ - if (ccmRule.getCassandraVersion().compareTo(Version.V3_0_0) >= 0) { + if (CCM_RULE.getCassandraVersion().compareTo(Version.V3_0_0) >= 0) { // A materialized view for cyclist_mv, reverse clustering. created first to ensure creation // order does not matter, alphabetical does. session.execute( @@ -177,7 +178,7 @@ public void create_schema_and_ensure_exported_cql_is_as_expected() { session.execute("CREATE INDEX rrank ON rank_by_year_and_name(rank)"); // udfs and udas require 2.22+ - if (ccmRule.getCassandraVersion().compareTo(Version.V2_2_0) >= 0) { + if (CCM_RULE.getCassandraVersion().compareTo(Version.V2_2_0) >= 0) { // UDFs session.execute( "CREATE OR REPLACE FUNCTION avgState ( state tuple, val int ) CALLED ON NULL INPUT RETURNS tuple LANGUAGE java AS \n" @@ -205,12 +206,12 @@ public void create_schema_and_ensure_exported_cql_is_as_expected() { assertThat(originalKsMeta.get().getUserDefinedTypes()).isEmpty(); // validate that the exported schema matches what was expected exactly. - Optional ks = sessionRule.session().getMetadata().getKeyspace(keyspace); + Optional ks = SESSION_RULE.session().getMetadata().getKeyspace(keyspace); assertThat(ks.get().describeWithChildren(true).trim()).isEqualTo(expectedCql); // Also validate that when you create a Session with schema already created that the exported // string is the same. - try (CqlSession newSession = SessionUtils.newSession(ccmRule)) { + try (CqlSession newSession = SessionUtils.newSession(CCM_RULE)) { ks = newSession.getMetadata().getKeyspace(keyspace); assertThat(ks.get().describeWithChildren(true).trim()).isEqualTo(expectedCql); } @@ -218,7 +219,7 @@ public void create_schema_and_ensure_exported_cql_is_as_expected() { private String getExpectedCqlString(String keyspace) { String majorMinor = - ccmRule.getCassandraVersion().getMajor() + "." + ccmRule.getCassandraVersion().getMinor(); + CCM_RULE.getCassandraVersion().getMajor() + "." + CCM_RULE.getCassandraVersion().getMinor(); String resourceName = "/describe_it_test_" + majorMinor + ".cql"; Closer closer = Closer.create(); @@ -226,8 +227,8 @@ private String getExpectedCqlString(String keyspace) { InputStream is = DescribeIT.class.getResourceAsStream(resourceName); if (is == null) { // If no schema file is defined for tested cassandra version, just try 3.11. - if (ccmRule.getCassandraVersion().compareTo(Version.V3_0_0) >= 0) { - logger.warn("Could not find schema file for {}, assuming C* 3.11.x", majorMinor); + if (CCM_RULE.getCassandraVersion().compareTo(Version.V3_0_0) >= 0) { + LOG.warn("Could not find schema file for {}, assuming C* 3.11.x", majorMinor); is = DescribeIT.class.getResourceAsStream("/describe_it_test_3.11.cql"); if (is == null) { throw new IOException(); @@ -241,13 +242,13 @@ private String getExpectedCqlString(String keyspace) { ByteStreams.copy(is, ps); return baos.toString().replaceAll("ks_0", keyspace).trim(); } catch (IOException e) { - logger.warn("Failure to read {}", resourceName, e); + LOG.warn("Failure to read {}", resourceName, e); fail("Unable to read " + resourceName + " is it defined?", e); } finally { try { closer.close(); } catch (IOException e) { // no op - logger.warn("Failure closing streams", e); + LOG.warn("Failure closing streams", e); } } return ""; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/Murmur3TokenIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/Murmur3TokenIT.java index e8dce5ce065..e3a6faaaa44 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/Murmur3TokenIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/Murmur3TokenIT.java @@ -29,10 +29,10 @@ public class Murmur3TokenIT extends TokenITBase { - private static CustomCcmRule ccmRule = CustomCcmRule.builder().withNodes(3).build(); + private static final CustomCcmRule CCM_RULE = CustomCcmRule.builder().withNodes(3).build(); - private static SessionRule sessionRule = - SessionRule.builder(ccmRule) + private static final SessionRule SESSION_RULE = + SessionRule.builder(CCM_RULE) .withKeyspace(false) .withConfigLoader( SessionUtils.configLoaderBuilder() @@ -40,7 +40,8 @@ public class Murmur3TokenIT extends TokenITBase { .build()) .build(); - @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); public Murmur3TokenIT() { super("org.apache.cassandra.dht.Murmur3Partitioner", Murmur3Token.class, false); @@ -48,11 +49,11 @@ public Murmur3TokenIT() { @Override protected CqlSession session() { - return sessionRule.session(); + return SESSION_RULE.session(); } @BeforeClass public static void createSchema() { - TokenITBase.createSchema(sessionRule.session()); + TokenITBase.createSchema(SESSION_RULE.session()); } } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/Murmur3TokenVnodesIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/Murmur3TokenVnodesIT.java index d7439c92010..54bb1d0db26 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/Murmur3TokenVnodesIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/Murmur3TokenVnodesIT.java @@ -29,11 +29,11 @@ public class Murmur3TokenVnodesIT extends TokenITBase { - private static CustomCcmRule ccmRule = + private static final CustomCcmRule CCM_RULE = CustomCcmRule.builder().withNodes(3).withCreateOption("--vnodes").build(); - private static SessionRule sessionRule = - SessionRule.builder(ccmRule) + private static final SessionRule SESSION_RULE = + SessionRule.builder(CCM_RULE) .withKeyspace(false) .withConfigLoader( SessionUtils.configLoaderBuilder() @@ -41,7 +41,8 @@ public class Murmur3TokenVnodesIT extends TokenITBase { .build()) .build(); - @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); public Murmur3TokenVnodesIT() { super("org.apache.cassandra.dht.Murmur3Partitioner", Murmur3Token.class, true); @@ -49,11 +50,11 @@ public Murmur3TokenVnodesIT() { @Override protected CqlSession session() { - return sessionRule.session(); + return SESSION_RULE.session(); } @BeforeClass public static void createSchema() { - createSchema(sessionRule.session()); + createSchema(SESSION_RULE.session()); } } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/NodeMetadataIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/NodeMetadataIT.java index 2db23d1425b..c236294e9ef 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/NodeMetadataIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/NodeMetadataIT.java @@ -38,11 +38,11 @@ @Category(ParallelizableTests.class) public class NodeMetadataIT { - @ClassRule public static CcmRule ccmRule = CcmRule.getInstance(); + @ClassRule public static final CcmRule CCM_RULE = CcmRule.getInstance(); @Test public void should_expose_node_metadata() { - try (CqlSession session = SessionUtils.newSession(ccmRule)) { + try (CqlSession session = SessionUtils.newSession(CCM_RULE)) { Node node = getUniqueNode(session); // Run a few basic checks given what we know about our test environment: assertThat(node.getEndPoint()).isNotNull(); @@ -56,7 +56,7 @@ public void should_expose_node_metadata() { assertThat(node.getRack()).isEqualTo("r1"); if (!CcmBridge.DSE_ENABLEMENT) { // CcmBridge does not report accurate C* versions for DSE, only approximated values - assertThat(node.getCassandraVersion()).isEqualTo(ccmRule.getCassandraVersion()); + assertThat(node.getCassandraVersion()).isEqualTo(CCM_RULE.getCassandraVersion()); } assertThat(node.getState()).isSameAs(NodeState.UP); assertThat(node.getDistance()).isSameAs(NodeDistance.LOCAL); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/RandomTokenIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/RandomTokenIT.java index 97591689c48..4134fd8c8a2 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/RandomTokenIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/RandomTokenIT.java @@ -29,11 +29,11 @@ public class RandomTokenIT extends TokenITBase { - private static CustomCcmRule ccmRule = + private static final CustomCcmRule CCM_RULE = CustomCcmRule.builder().withNodes(3).withCreateOption("-p RandomPartitioner").build(); - private static SessionRule sessionRule = - SessionRule.builder(ccmRule) + private static final SessionRule SESSION_RULE = + SessionRule.builder(CCM_RULE) .withKeyspace(false) .withConfigLoader( SessionUtils.configLoaderBuilder() @@ -41,7 +41,8 @@ public class RandomTokenIT extends TokenITBase { .build()) .build(); - @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); public RandomTokenIT() { super("org.apache.cassandra.dht.RandomPartitioner", RandomToken.class, false); @@ -49,11 +50,11 @@ public RandomTokenIT() { @Override protected CqlSession session() { - return sessionRule.session(); + return SESSION_RULE.session(); } @BeforeClass public static void createSchema() { - TokenITBase.createSchema(sessionRule.session()); + TokenITBase.createSchema(SESSION_RULE.session()); } } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/RandomTokenVnodesIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/RandomTokenVnodesIT.java index bc74935e824..924ed515169 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/RandomTokenVnodesIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/RandomTokenVnodesIT.java @@ -29,15 +29,15 @@ public class RandomTokenVnodesIT extends TokenITBase { - private static CustomCcmRule ccmRule = + private static final CustomCcmRule CCM_RULE = CustomCcmRule.builder() .withNodes(3) .withCreateOption("-p RandomPartitioner") .withCreateOption("--vnodes") .build(); - private static SessionRule sessionRule = - SessionRule.builder(ccmRule) + private static final SessionRule SESSION_RULE = + SessionRule.builder(CCM_RULE) .withKeyspace(false) .withConfigLoader( SessionUtils.configLoaderBuilder() @@ -45,7 +45,8 @@ public class RandomTokenVnodesIT extends TokenITBase { .build()) .build(); - @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); public RandomTokenVnodesIT() { super("org.apache.cassandra.dht.RandomPartitioner", RandomToken.class, true); @@ -53,11 +54,11 @@ public RandomTokenVnodesIT() { @Override protected CqlSession session() { - return sessionRule.session(); + return SESSION_RULE.session(); } @BeforeClass public static void createSchema() { - TokenITBase.createSchema(sessionRule.session()); + TokenITBase.createSchema(SESSION_RULE.session()); } } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaAgreementIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaAgreementIT.java index a9977357b1e..54751bc8065 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaAgreementIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaAgreementIT.java @@ -35,9 +35,9 @@ public class SchemaAgreementIT { - private static CustomCcmRule ccm = CustomCcmRule.builder().withNodes(3).build(); - private static SessionRule sessionRule = - SessionRule.builder(ccm) + private static final CustomCcmRule CCM_RULE = CustomCcmRule.builder().withNodes(3).build(); + private static final SessionRule SESSION_RULE = + SessionRule.builder(CCM_RULE) .withConfigLoader( SessionUtils.configLoaderBuilder() .withDuration(DefaultDriverOption.REQUEST_TIMEOUT, Duration.ofSeconds(30)) @@ -50,7 +50,8 @@ public class SchemaAgreementIT { .build()) .build(); - @ClassRule public static RuleChain ruleChain = RuleChain.outerRule(ccm).around(sessionRule); + @ClassRule + public static final RuleChain CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); @Rule public TestName name = new TestName(); @@ -59,34 +60,34 @@ public void should_succeed_when_all_nodes_agree() { ResultSet result = createTable(); assertThat(result.getExecutionInfo().isSchemaInAgreement()).isTrue(); - assertThat(sessionRule.session().checkSchemaAgreement()).isTrue(); + assertThat(SESSION_RULE.session().checkSchemaAgreement()).isTrue(); } @Test public void should_fail_on_timeout() { - ccm.getCcmBridge().pause(2); + CCM_RULE.getCcmBridge().pause(2); try { // Can't possibly agree since one node is paused. ResultSet result = createTable(); assertThat(result.getExecutionInfo().isSchemaInAgreement()).isFalse(); - assertThat(sessionRule.session().checkSchemaAgreement()).isFalse(); + assertThat(SESSION_RULE.session().checkSchemaAgreement()).isFalse(); } finally { - ccm.getCcmBridge().resume(2); + CCM_RULE.getCcmBridge().resume(2); } } @Test public void should_agree_when_up_nodes_agree() { - ccm.getCcmBridge().stop(2); + CCM_RULE.getCcmBridge().stop(2); try { // Should agree since up hosts should agree. ResultSet result = createTable(); assertThat(result.getExecutionInfo().isSchemaInAgreement()).isTrue(); - assertThat(sessionRule.session().checkSchemaAgreement()).isTrue(); + assertThat(SESSION_RULE.session().checkSchemaAgreement()).isTrue(); } finally { - ccm.getCcmBridge().start(2); + CCM_RULE.getCcmBridge().start(2); } } @@ -98,7 +99,7 @@ public void should_fail_if_timeout_is_zero() { .withDuration( DefaultDriverOption.CONTROL_CONNECTION_AGREEMENT_TIMEOUT, Duration.ofSeconds(0)) .build(); - try (CqlSession session = SessionUtils.newSession(ccm, sessionRule.keyspace(), loader)) { + try (CqlSession session = SessionUtils.newSession(CCM_RULE, SESSION_RULE.keyspace(), loader)) { ResultSet result = createTable(session); // Should not agree because schema metadata is disabled @@ -108,7 +109,7 @@ public void should_fail_if_timeout_is_zero() { } private ResultSet createTable() { - return createTable(sessionRule.session()); + return createTable(SESSION_RULE.session()); } private final AtomicInteger tableCounter = new AtomicInteger(); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metrics/MetricsIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metrics/MetricsIT.java index 0c577c8508a..fbe958e8e72 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/metrics/MetricsIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metrics/MetricsIT.java @@ -39,7 +39,7 @@ @Category(ParallelizableTests.class) public class MetricsIT { - @ClassRule public static CcmRule ccmRule = CcmRule.getInstance(); + @ClassRule public static final CcmRule CCM_RULE = CcmRule.getInstance(); @Test public void should_expose_metrics() { @@ -49,7 +49,7 @@ public void should_expose_metrics() { DefaultDriverOption.METRICS_SESSION_ENABLED, Collections.singletonList("cql-requests")) .build(); - try (CqlSession session = SessionUtils.newSession(ccmRule, loader)) { + try (CqlSession session = SessionUtils.newSession(CCM_RULE, loader)) { for (int i = 0; i < 10; i++) { session.execute("SELECT release_version FROM system.local"); } @@ -86,7 +86,7 @@ public void should_expose_bytes_sent_and_received() { DefaultDriverOption.METRICS_NODE_ENABLED, Lists.newArrayList("bytes-sent", "bytes-received")) .build(); - try (CqlSession session = SessionUtils.newSession(ccmRule, loader)) { + try (CqlSession session = SessionUtils.newSession(CCM_RULE, loader)) { for (int i = 0; i < 10; i++) { session.execute("SELECT release_version FROM system.local"); } @@ -121,7 +121,7 @@ public void should_not_expose_metrics_if_disabled() { .withStringList(DefaultDriverOption.METRICS_SESSION_ENABLED, Collections.emptyList()) .withStringList(DefaultDriverOption.METRICS_NODE_ENABLED, Collections.emptyList()) .build(); - try (CqlSession session = SessionUtils.newSession(ccmRule, loader)) { + try (CqlSession session = SessionUtils.newSession(CCM_RULE, loader)) { for (int i = 0; i < 10; i++) { session.execute("SELECT release_version FROM system.local"); } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/retry/DefaultRetryPolicyIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/retry/DefaultRetryPolicyIT.java index 61039ddd642..66531b19d50 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/retry/DefaultRetryPolicyIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/retry/DefaultRetryPolicyIT.java @@ -74,12 +74,12 @@ @RunWith(DataProviderRunner.class) public class DefaultRetryPolicyIT { - - public static @ClassRule SimulacronRule simulacron = + @ClassRule + public static final SimulacronRule SIMULACRON_RULE = new SimulacronRule(ClusterSpec.builder().withNodes(3)); public @Rule SessionRule sessionRule = - SessionRule.builder(simulacron) + SessionRule.builder(SIMULACRON_RULE) .withConfigLoader( SessionUtils.configLoaderBuilder() .withBoolean(DefaultDriverOption.REQUEST_DEFAULT_IDEMPOTENCE, true) @@ -104,7 +104,7 @@ public class DefaultRetryPolicyIT { @SuppressWarnings("deprecation") private final QueryCounter counter = - QueryCounter.builder(simulacron.cluster()) + QueryCounter.builder(SIMULACRON_RULE.cluster()) .withFilter((l) -> l.getQuery().equals(queryStr)) .build(); @@ -117,8 +117,8 @@ public void setup() { // the log prefix we expect in retry logging messages. logPrefix = sessionRule.session().getName() + "|default"; // clear activity logs and primes between tests since simulacron instance is shared. - simulacron.cluster().clearLogs(); - simulacron.cluster().clearPrimes(true); + SIMULACRON_RULE.cluster().clearLogs(); + SIMULACRON_RULE.cluster().clearPrimes(true); } @After @@ -130,7 +130,10 @@ public void teardown() { @Test public void should_not_retry_on_read_timeout_when_data_present() { // given a node that will respond to query with a read timeout where data is present. - simulacron.cluster().node(0).prime(when(queryStr).then(readTimeout(LOCAL_QUORUM, 1, 3, true))); + SIMULACRON_RULE + .cluster() + .node(0) + .prime(when(queryStr).then(readTimeout(LOCAL_QUORUM, 1, 3, true))); try { // when executing a query @@ -156,7 +159,10 @@ public void should_not_retry_on_read_timeout_when_less_than_blockFor_received() // given a node that will respond to a query with a read timeout where 2 out of 3 responses are // received. // in this case, digest requests succeeded, but not the data request. - simulacron.cluster().node(0).prime(when(queryStr).then(readTimeout(LOCAL_QUORUM, 2, 3, false))); + SIMULACRON_RULE + .cluster() + .node(0) + .prime(when(queryStr).then(readTimeout(LOCAL_QUORUM, 2, 3, false))); try { // when executing a query @@ -182,7 +188,10 @@ public void should_retry_on_read_timeout_when_enough_responses_and_data_not_pres // given a node that will respond to a query with a read timeout where 3 out of 3 responses are // received, // but data is not present. - simulacron.cluster().node(0).prime(when(queryStr).then(readTimeout(LOCAL_QUORUM, 3, 3, false))); + SIMULACRON_RULE + .cluster() + .node(0) + .prime(when(queryStr).then(readTimeout(LOCAL_QUORUM, 3, 3, false))); try { // when executing a query. @@ -217,7 +226,7 @@ public void should_retry_on_read_timeout_when_enough_responses_and_data_not_pres @Test public void should_retry_on_next_host_on_connection_error_if_idempotent() { // given a node that will close its connection as result of receiving a query. - simulacron + SIMULACRON_RULE .cluster() .node(0) .prime( @@ -232,11 +241,11 @@ public void should_retry_on_next_host_on_connection_error_if_idempotent() { assertThat(result.getExecutionInfo().getErrors()).hasSize(1); Map.Entry error = result.getExecutionInfo().getErrors().get(0); assertThat(error.getKey().getEndPoint().resolve()) - .isEqualTo(simulacron.cluster().node(0).inetSocketAddress()); + .isEqualTo(SIMULACRON_RULE.cluster().node(0).inetSocketAddress()); assertThat(error.getValue()).isInstanceOf(ClosedConnectionException.class); // the host that returned the response should be node 1. assertThat(result.getExecutionInfo().getCoordinator().getEndPoint().resolve()) - .isEqualTo(simulacron.cluster().node(1).inetSocketAddress()); + .isEqualTo(SIMULACRON_RULE.cluster().node(1).inetSocketAddress()); // should have been retried. counter.assertTotalCount(2); @@ -252,7 +261,7 @@ public void should_retry_on_next_host_on_connection_error_if_idempotent() { @Test public void should_keep_retrying_on_next_host_on_connection_error() { // given a request for which every node will close its connection upon receiving it. - simulacron + SIMULACRON_RULE .cluster() .prime( when(queryStr) @@ -284,7 +293,7 @@ public void should_keep_retrying_on_next_host_on_connection_error() { @Test public void should_not_retry_on_connection_error_if_non_idempotent() { // given a node that will close its connection as result of receiving a query. - simulacron + SIMULACRON_RULE .cluster() .node(0) .prime( @@ -316,7 +325,7 @@ public void should_not_retry_on_connection_error_if_non_idempotent() { @Test public void should_retry_on_write_timeout_if_write_type_batch_log() { // given a node that will respond to query with a write timeout with write type of batch log. - simulacron + SIMULACRON_RULE .cluster() .node(0) .prime(when(queryStr).then(writeTimeout(LOCAL_QUORUM, 1, 3, BATCH_LOG))); @@ -368,7 +377,7 @@ public void should_not_retry_on_write_timeout_if_write_type_non_batch_log( com.datastax.oss.simulacron.common.codec.WriteType writeType) { // given a node that will respond to query with a write timeout with write type that is not // batch log. - simulacron + SIMULACRON_RULE .cluster() .node(0) .prime(when(queryStr).then(writeTimeout(LOCAL_QUORUM, 1, 3, writeType))); @@ -394,7 +403,7 @@ public void should_not_retry_on_write_timeout_if_write_type_non_batch_log( @Test public void should_not_retry_on_write_timeout_if_write_type_batch_log_but_non_idempotent() { // given a node that will respond to query with a write timeout with write type of batch log. - simulacron + SIMULACRON_RULE .cluster() .node(0) .prime(when(queryStr).then(writeTimeout(LOCAL_QUORUM, 1, 3, BATCH_LOG))); @@ -423,7 +432,7 @@ public void should_not_retry_on_write_timeout_if_write_type_batch_log_but_non_id @Test public void should_retry_on_next_host_on_unavailable() { // given a node that will respond to a query with an unavailable. - simulacron.cluster().node(0).prime(when(queryStr).then(unavailable(LOCAL_QUORUM, 3, 0))); + SIMULACRON_RULE.cluster().node(0).prime(when(queryStr).then(unavailable(LOCAL_QUORUM, 3, 0))); // when executing a query. ResultSet result = sessionRule.session().execute(queryStr); @@ -433,11 +442,11 @@ public void should_retry_on_next_host_on_unavailable() { assertThat(result.getExecutionInfo().getErrors()).hasSize(1); Map.Entry error = result.getExecutionInfo().getErrors().get(0); assertThat(error.getKey().getEndPoint().resolve()) - .isEqualTo(simulacron.cluster().node(0).inetSocketAddress()); + .isEqualTo(SIMULACRON_RULE.cluster().node(0).inetSocketAddress()); assertThat(error.getValue()).isInstanceOf(UnavailableException.class); // the host that returned the response should be node 1. assertThat(result.getExecutionInfo().getCoordinator().getEndPoint().resolve()) - .isEqualTo(simulacron.cluster().node(1).inetSocketAddress()); + .isEqualTo(SIMULACRON_RULE.cluster().node(1).inetSocketAddress()); // should have been retried on another host. counter.assertTotalCount(2); @@ -454,8 +463,8 @@ public void should_retry_on_next_host_on_unavailable() { @Test public void should_only_retry_once_on_unavailable() { // given two nodes that will respond to a query with an unavailable. - simulacron.cluster().node(0).prime(when(queryStr).then(unavailable(LOCAL_QUORUM, 3, 0))); - simulacron.cluster().node(1).prime(when(queryStr).then(unavailable(LOCAL_QUORUM, 3, 0))); + SIMULACRON_RULE.cluster().node(0).prime(when(queryStr).then(unavailable(LOCAL_QUORUM, 3, 0))); + SIMULACRON_RULE.cluster().node(1).prime(when(queryStr).then(unavailable(LOCAL_QUORUM, 3, 0))); try { // when executing a query. @@ -465,7 +474,7 @@ public void should_only_retry_once_on_unavailable() { // then we should get an unavailable exception with the host being node 1 (since it was second // tried). assertThat(ue.getCoordinator().getEndPoint().resolve()) - .isEqualTo(simulacron.cluster().node(1).inetSocketAddress()); + .isEqualTo(SIMULACRON_RULE.cluster().node(1).inetSocketAddress()); assertThat(ue.getConsistencyLevel()).isEqualTo(DefaultConsistencyLevel.LOCAL_QUORUM); assertThat(ue.getRequired()).isEqualTo(3); assertThat(ue.getAlive()).isEqualTo(0); @@ -479,7 +488,7 @@ public void should_only_retry_once_on_unavailable() { @Test public void should_keep_retrying_on_next_host_on_error_response() { // given every node responding with a server error. - simulacron.cluster().prime(when(queryStr).then(serverError("this is a server error"))); + SIMULACRON_RULE.cluster().prime(when(queryStr).then(serverError("this is a server error"))); try { // when executing a query. @@ -507,7 +516,7 @@ public void should_keep_retrying_on_next_host_on_error_response() { @Test public void should_not_retry_on_next_host_on_error_response_if_non_idempotent() { // given every node responding with a server error. - simulacron.cluster().prime(when(queryStr).then(serverError("this is a server error"))); + SIMULACRON_RULE.cluster().prime(when(queryStr).then(serverError("this is a server error"))); try { // when executing a query that is not idempotent diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/retry/PerProfileRetryPolicyIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/retry/PerProfileRetryPolicyIT.java index 8c9d35f2b1e..0b851f4b7f6 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/retry/PerProfileRetryPolicyIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/retry/PerProfileRetryPolicyIT.java @@ -58,10 +58,11 @@ public class PerProfileRetryPolicyIT { // Shared across all tests methods. - private static SimulacronRule simulacron = new SimulacronRule(ClusterSpec.builder().withNodes(2)); + private static final SimulacronRule SIMULACRON_RULE = + new SimulacronRule(ClusterSpec.builder().withNodes(2)); - private static SessionRule sessionRule = - SessionRule.builder(simulacron) + private static final SessionRule SESSION_RULE = + SessionRule.builder(SIMULACRON_RULE) .withConfigLoader( SessionUtils.configLoaderBuilder() .withClass( @@ -75,26 +76,27 @@ public class PerProfileRetryPolicyIT { .build()) .build(); - @ClassRule public static TestRule chain = RuleChain.outerRule(simulacron).around(sessionRule); + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(SIMULACRON_RULE).around(SESSION_RULE); private static String QUERY_STRING = "select * from foo"; private static final SimpleStatement QUERY = SimpleStatement.newInstance(QUERY_STRING); @SuppressWarnings("deprecation") private final QueryCounter counter = - QueryCounter.builder(simulacron.cluster()) + QueryCounter.builder(SIMULACRON_RULE.cluster()) .withFilter((l) -> l.getQuery().equals(QUERY_STRING)) .build(); @Before public void clear() { - simulacron.cluster().clearLogs(); + SIMULACRON_RULE.cluster().clearLogs(); } @BeforeClass public static void setup() { // node 0 will return an unavailable to query. - simulacron + SIMULACRON_RULE .cluster() .node(0) .prime( @@ -103,10 +105,10 @@ public static void setup() { unavailable( com.datastax.oss.simulacron.common.codec.ConsistencyLevel.ONE, 1, 0))); // node 1 will return a valid empty rows response. - simulacron.cluster().node(1).prime(when(QUERY_STRING).then(noRows())); + SIMULACRON_RULE.cluster().node(1).prime(when(QUERY_STRING).then(noRows())); // sanity checks - DriverContext context = sessionRule.session().getContext(); + DriverContext context = SESSION_RULE.session().getContext(); DriverConfig config = context.getConfig(); assertThat(config.getProfiles()).containsKeys("profile1", "profile2"); @@ -127,14 +129,14 @@ public static void setup() { @Test(expected = UnavailableException.class) public void should_use_policy_from_request_profile() { // since profile1 uses a NoRetryPolicy, UnavailableException should surface to client. - sessionRule.session().execute(QUERY.setExecutionProfileName("profile1")); + SESSION_RULE.session().execute(QUERY.setExecutionProfileName("profile1")); } @Test public void should_use_policy_from_config_when_not_configured_in_request_profile() { // since profile2 has no configured retry policy, it should defer to configuration which uses // DefaultRetryPolicy, which should try request on next host (host 1). - ResultSet result = sessionRule.session().execute(QUERY.setExecutionProfileName("profile2")); + ResultSet result = SESSION_RULE.session().execute(QUERY.setExecutionProfileName("profile2")); // expect an unavailable exception to be present in errors. List> errors = result.getExecutionInfo().getErrors(); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/session/ExceptionIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/session/ExceptionIT.java index 1c3b922a870..52802ca98cc 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/session/ExceptionIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/session/ExceptionIT.java @@ -47,10 +47,11 @@ @Category(ParallelizableTests.class) public class ExceptionIT { - private static SimulacronRule simulacron = new SimulacronRule(ClusterSpec.builder().withNodes(2)); + private static final SimulacronRule SIMULACRON_RULE = + new SimulacronRule(ClusterSpec.builder().withNodes(2)); - private static SessionRule sessionRule = - SessionRule.builder(simulacron) + private static final SessionRule SESSION_RULE = + SessionRule.builder(SIMULACRON_RULE) .withConfigLoader( SessionUtils.configLoaderBuilder() .withClass( @@ -60,19 +61,20 @@ public class ExceptionIT { .build()) .build(); - @ClassRule public static TestRule chain = RuleChain.outerRule(simulacron).around(sessionRule); + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(SIMULACRON_RULE).around(SESSION_RULE); private static String QUERY_STRING = "select * from foo"; @Before public void clear() { - simulacron.cluster().clearLogs(); + SIMULACRON_RULE.cluster().clearLogs(); } @Test public void should_expose_execution_info_on_exceptions() { // Given - simulacron + SIMULACRON_RULE .cluster() .node(0) .prime( @@ -80,20 +82,20 @@ public void should_expose_execution_info_on_exceptions() { .then( unavailable( com.datastax.oss.simulacron.common.codec.ConsistencyLevel.ONE, 1, 0))); - simulacron + SIMULACRON_RULE .cluster() .node(1) .prime(when(QUERY_STRING).then(PrimeDsl.invalid("Mock error message"))); // Then - assertThatThrownBy(() -> sessionRule.session().execute(QUERY_STRING)) + assertThatThrownBy(() -> SESSION_RULE.session().execute(QUERY_STRING)) .isInstanceOf(InvalidQueryException.class) .satisfies( exception -> { ExecutionInfo info = ((InvalidQueryException) exception).getExecutionInfo(); assertThat(info).isNotNull(); assertThat(info.getCoordinator().getEndPoint().resolve()) - .isEqualTo(simulacron.cluster().node(1).inetSocketAddress()); + .isEqualTo(SIMULACRON_RULE.cluster().node(1).inetSocketAddress()); assertThat(((SimpleStatement) info.getStatement()).getQuery()) .isEqualTo(QUERY_STRING); @@ -114,7 +116,7 @@ public void should_expose_execution_info_on_exceptions() { assertThat(errors).hasSize(1); Map.Entry entry0 = errors.get(0); assertThat(entry0.getKey().getEndPoint().resolve()) - .isEqualTo(simulacron.cluster().node(0).inetSocketAddress()); + .isEqualTo(SIMULACRON_RULE.cluster().node(0).inetSocketAddress()); Throwable node0Exception = entry0.getValue(); assertThat(node0Exception).isInstanceOf(UnavailableException.class); // ExecutionInfo is not exposed for retried errors diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/session/RemovedNodeIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/session/RemovedNodeIT.java index b9192786c9b..acabf3a59c7 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/session/RemovedNodeIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/session/RemovedNodeIT.java @@ -29,13 +29,14 @@ public class RemovedNodeIT { - @ClassRule public static CustomCcmRule ccmRule = CustomCcmRule.builder().withNodes(2).build(); + @ClassRule + public static final CustomCcmRule CCM_RULE = CustomCcmRule.builder().withNodes(2).build(); @Test public void should_signal_and_destroy_pool_when_node_gets_removed() { RemovalListener removalListener = new RemovalListener(); try (CqlSession session = CqlSession.builder().withNodeStateListener(removalListener).build()) { - ccmRule.getCcmBridge().nodetool(2, "decommission"); + CCM_RULE.getCcmBridge().nodetool(2, "decommission"); ConditionChecker.checkThat(() -> removalListener.removedNode != null).becomesTrue(); Map pools = ((DefaultSession) session).getPools(); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/session/RequestProcessorIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/session/RequestProcessorIT.java index dca70a5b0c2..7fe18e3044f 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/session/RequestProcessorIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/session/RequestProcessorIT.java @@ -65,11 +65,12 @@ @Category(ParallelizableTests.class) public class RequestProcessorIT { - private static CcmRule ccm = CcmRule.getInstance(); + private static final CcmRule CCM_RULE = CcmRule.getInstance(); - private static SessionRule sessionRule = SessionRule.builder(ccm).build(); + private static final SessionRule SESSION_RULE = SessionRule.builder(CCM_RULE).build(); - @ClassRule public static TestRule chain = RuleChain.outerRule(ccm).around(sessionRule); + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); @Rule public ExpectedException thrown = ExpectedException.none(); @@ -78,15 +79,15 @@ public class RequestProcessorIT { @BeforeClass public static void setupSchema() { // table with clustering key where v1 == v0 * 2. - sessionRule + SESSION_RULE .session() .execute( SimpleStatement.builder( "CREATE TABLE IF NOT EXISTS test (k text, v0 int, v1 int, PRIMARY KEY(k, v0))") - .setExecutionProfile(sessionRule.slowProfile()) + .setExecutionProfile(SESSION_RULE.slowProfile()) .build()); for (int i = 0; i < 100; i++) { - sessionRule + SESSION_RULE .session() .execute( SimpleStatement.builder("INSERT INTO test (k, v0, v1) VALUES (?, ?, ?)") @@ -97,14 +98,14 @@ public static void setupSchema() { private GuavaSession newSession(CqlIdentifier keyspace) { return GuavaSessionUtils.builder() - .addContactEndPoints(ccm.getContactPoints()) + .addContactEndPoints(CCM_RULE.getContactPoints()) .withKeyspace(keyspace) .build(); } @Test public void should_use_custom_request_processor_for_prepareAsync() throws Exception { - try (GuavaSession session = newSession(sessionRule.keyspace())) { + try (GuavaSession session = newSession(SESSION_RULE.keyspace())) { ListenableFuture preparedFuture = session.prepareAsync("select * from test"); @@ -123,7 +124,7 @@ public void should_use_custom_request_processor_for_prepareAsync() throws Except @Test public void should_use_custom_request_processor_for_handling_special_request_type() throws Exception { - try (GuavaSession session = newSession(sessionRule.keyspace())) { + try (GuavaSession session = newSession(SESSION_RULE.keyspace())) { // RequestProcessor executes "select v from test where k = " and returns v as Integer. int v1 = session.execute(new KeyRequest(5), KeyRequestProcessor.INT_TYPE); assertThat(v1).isEqualTo(10); // v1 = v0 * 2 @@ -136,7 +137,7 @@ public void should_use_custom_request_processor_for_handling_special_request_typ @Test public void should_use_custom_request_processor_for_executeAsync() throws Exception { - try (GuavaSession session = newSession(sessionRule.keyspace())) { + try (GuavaSession session = newSession(SESSION_RULE.keyspace())) { ListenableFuture future = session.executeAsync("select * from test"); AsyncResultSet result = Uninterruptibles.getUninterruptibly(future); assertThat(Iterables.size(result.currentPage())).isEqualTo(100); @@ -150,7 +151,7 @@ public void should_throw_illegal_argument_exception_if_no_matching_processor_fou // IllegalArgumentException // should be thrown. thrown.expect(IllegalArgumentException.class); - sessionRule + SESSION_RULE .session() .execute(SimpleStatement.newInstance("select * from test"), GuavaSession.ASYNC); } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/session/ShutdownIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/session/ShutdownIT.java index 9f10af80395..96d8ca1b68e 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/session/ShutdownIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/session/ShutdownIT.java @@ -42,7 +42,7 @@ public class ShutdownIT { @ClassRule - public static SimulacronRule simulacronRule = + public static final SimulacronRule SIMULACRON_RULE = new SimulacronRule(ClusterSpec.builder().withNodes(1)); private static final String QUERY_STRING = "select * from foo"; @@ -52,10 +52,10 @@ public void should_fail_requests_when_session_is_closed() throws Exception { // Given // Prime with a bit of delay to increase the chance that a query will be aborted in flight when // we force-close the session - simulacronRule + SIMULACRON_RULE .cluster() .prime(when(QUERY_STRING).then(noRows()).delay(20, TimeUnit.MILLISECONDS)); - CqlSession session = SessionUtils.newSession(simulacronRule); + CqlSession session = SessionUtils.newSession(SIMULACRON_RULE); // When // Max out the in-flight requests on the connection (from a separate thread pool to get a bit of @@ -131,7 +131,7 @@ public void should_fail_requests_when_session_is_closed() throws Exception { @Test public void should_handle_getting_closed_twice() { - CqlSession session = SessionUtils.newSession(simulacronRule); + CqlSession session = SessionUtils.newSession(SIMULACRON_RULE); session.close(); session.close(); } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/specex/SpeculativeExecutionIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/specex/SpeculativeExecutionIT.java index cae21420723..f1cae68f0b0 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/specex/SpeculativeExecutionIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/specex/SpeculativeExecutionIT.java @@ -56,18 +56,19 @@ public class SpeculativeExecutionIT { private static final SimpleStatement QUERY = SimpleStatement.newInstance(QUERY_STRING); // Shared across all tests methods. - public static @ClassRule SimulacronRule simulacron = + @ClassRule + public static final SimulacronRule SIMULACRON_RULE = new SimulacronRule(ClusterSpec.builder().withNodes(3)); @SuppressWarnings("deprecation") private final QueryCounter counter = - QueryCounter.builder(simulacron.cluster()) + QueryCounter.builder(SIMULACRON_RULE.cluster()) .withFilter((l) -> l.getQuery().equals(QUERY_STRING)) .build(); @Before public void clear() { - simulacron.cluster().clearPrimes(true); + SIMULACRON_RULE.cluster().clearPrimes(true); } @Test @@ -308,7 +309,7 @@ public void should_not_speculatively_execute_when_defined_in_profile() { // Build a new Cluster instance for each test, because we need different configurations private CqlSession buildSession(int maxSpeculativeExecutions, long speculativeDelayMs) { return SessionUtils.newSession( - simulacron, + SIMULACRON_RULE, SessionUtils.configLoaderBuilder() .withDuration( DefaultDriverOption.REQUEST_TIMEOUT, Duration.ofMillis(SPECULATIVE_DELAY * 10)) @@ -390,7 +391,7 @@ private CqlSession buildSessionWithProfile( builder = builder.startProfile("profile2").withString(DefaultDriverOption.REQUEST_CONSISTENCY, "ONE"); - CqlSession session = SessionUtils.newSession(simulacron, builder.build()); + CqlSession session = SessionUtils.newSession(SIMULACRON_RULE, builder.build()); // validate profile data DriverContext context = session.getContext(); @@ -429,6 +430,6 @@ private CqlSession buildSessionWithProfile( } private void primeNode(int id, PrimeDsl.PrimeBuilder primeBuilder) { - simulacron.cluster().node(id).prime(primeBuilder); + SIMULACRON_RULE.cluster().node(id).prime(primeBuilder); } } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/ssl/DefaultSslEngineFactoryHostnameValidationIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/ssl/DefaultSslEngineFactoryHostnameValidationIT.java index b3fc2644191..e6f8260736f 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/ssl/DefaultSslEngineFactoryHostnameValidationIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/ssl/DefaultSslEngineFactoryHostnameValidationIT.java @@ -27,7 +27,8 @@ public class DefaultSslEngineFactoryHostnameValidationIT { - @ClassRule public static CustomCcmRule ccm = CustomCcmRule.builder().withSslLocalhostCn().build(); + @ClassRule + public static final CustomCcmRule CCM_RULE = CustomCcmRule.builder().withSslLocalhostCn().build(); /** * Ensures that SSL connectivity can be established with hostname validation enabled when the @@ -48,7 +49,7 @@ public void should_connect_if_hostname_validation_enabled_and_hostname_matches() DefaultDriverOption.SSL_TRUSTSTORE_PASSWORD, CcmBridge.DEFAULT_CLIENT_TRUSTSTORE_PASSWORD) .build(); - try (CqlSession session = SessionUtils.newSession(ccm, loader)) { + try (CqlSession session = SessionUtils.newSession(CCM_RULE, loader)) { session.execute("select * from system.local"); } } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/ssl/DefaultSslEngineFactoryIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/ssl/DefaultSslEngineFactoryIT.java index 51409ccbb53..8fd8567559b 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/ssl/DefaultSslEngineFactoryIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/ssl/DefaultSslEngineFactoryIT.java @@ -28,7 +28,7 @@ public class DefaultSslEngineFactoryIT { - @ClassRule public static CustomCcmRule ccm = CustomCcmRule.builder().withSsl().build(); + @ClassRule public static final CustomCcmRule CCM_RULE = CustomCcmRule.builder().withSsl().build(); @Test public void should_connect_with_ssl() { @@ -44,7 +44,7 @@ public void should_connect_with_ssl() { CcmBridge.DEFAULT_CLIENT_TRUSTSTORE_PASSWORD) .build(); - try (CqlSession session = SessionUtils.newSession(ccm, loader)) { + try (CqlSession session = SessionUtils.newSession(CCM_RULE, loader)) { session.execute("select * from system.local"); } } @@ -63,7 +63,7 @@ public void should_not_connect_if_hostname_validation_enabled_and_hostname_does_ DefaultDriverOption.SSL_TRUSTSTORE_PASSWORD, CcmBridge.DEFAULT_CLIENT_TRUSTSTORE_PASSWORD) .build(); - try (CqlSession session = SessionUtils.newSession(ccm, loader)) { + try (CqlSession session = SessionUtils.newSession(CCM_RULE, loader)) { session.execute("select * from system.local"); } } @@ -75,14 +75,14 @@ public void should_not_connect_if_truststore_not_provided() { .withClass(DefaultDriverOption.SSL_ENGINE_FACTORY_CLASS, DefaultSslEngineFactory.class) .withBoolean(DefaultDriverOption.SSL_HOSTNAME_VALIDATION, false) .build(); - try (CqlSession session = SessionUtils.newSession(ccm, loader)) { + try (CqlSession session = SessionUtils.newSession(CCM_RULE, loader)) { session.execute("select * from system.local"); } } @Test(expected = AllNodesFailedException.class) public void should_not_connect_if_not_using_ssl() { - try (CqlSession session = SessionUtils.newSession(ccm)) { + try (CqlSession session = SessionUtils.newSession(CCM_RULE)) { session.execute("select * from system.local"); } } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/ssl/DefaultSslEngineFactoryPropertyBasedIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/ssl/DefaultSslEngineFactoryPropertyBasedIT.java index 927309eef00..2c62cfdbc6e 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/ssl/DefaultSslEngineFactoryPropertyBasedIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/ssl/DefaultSslEngineFactoryPropertyBasedIT.java @@ -30,7 +30,8 @@ @Category(IsolatedTests.class) public class DefaultSslEngineFactoryPropertyBasedIT { - @ClassRule public static CustomCcmRule ccm = CustomCcmRule.builder().withSslLocalhostCn().build(); + @ClassRule + public static final CustomCcmRule CCM_RULE = CustomCcmRule.builder().withSslLocalhostCn().build(); @Test public void should_connect_with_ssl() { @@ -42,7 +43,7 @@ public void should_connect_with_ssl() { SessionUtils.configLoaderBuilder() .withClass(DefaultDriverOption.SSL_ENGINE_FACTORY_CLASS, DefaultSslEngineFactory.class) .build(); - try (CqlSession session = SessionUtils.newSession(ccm, loader)) { + try (CqlSession session = SessionUtils.newSession(CCM_RULE, loader)) { session.execute("select * from system.local"); } } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/ssl/DefaultSslEngineFactoryPropertyBasedWithClientAuthIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/ssl/DefaultSslEngineFactoryPropertyBasedWithClientAuthIT.java index dfe7674c15e..ab6a38a9832 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/ssl/DefaultSslEngineFactoryPropertyBasedWithClientAuthIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/ssl/DefaultSslEngineFactoryPropertyBasedWithClientAuthIT.java @@ -30,7 +30,8 @@ @Category(IsolatedTests.class) public class DefaultSslEngineFactoryPropertyBasedWithClientAuthIT { - @ClassRule public static CustomCcmRule ccm = CustomCcmRule.builder().withSslAuth().build(); + @ClassRule + public static final CustomCcmRule CCM_RULE = CustomCcmRule.builder().withSslAuth().build(); @Test public void should_connect_with_ssl_using_client_auth() { @@ -47,7 +48,7 @@ public void should_connect_with_ssl_using_client_auth() { .withClass(DefaultDriverOption.SSL_ENGINE_FACTORY_CLASS, DefaultSslEngineFactory.class) .withBoolean(DefaultDriverOption.SSL_HOSTNAME_VALIDATION, false) .build(); - try (CqlSession session = SessionUtils.newSession(ccm, loader)) { + try (CqlSession session = SessionUtils.newSession(CCM_RULE, loader)) { session.execute("select * from system.local"); } } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/ssl/DefaultSslEngineFactoryWithClientAuthIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/ssl/DefaultSslEngineFactoryWithClientAuthIT.java index e9f4e5508b8..bd66fd8b04b 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/ssl/DefaultSslEngineFactoryWithClientAuthIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/ssl/DefaultSslEngineFactoryWithClientAuthIT.java @@ -28,7 +28,8 @@ public class DefaultSslEngineFactoryWithClientAuthIT { - @ClassRule public static CustomCcmRule ccm = CustomCcmRule.builder().withSslAuth().build(); + @ClassRule + public static final CustomCcmRule CCM_RULE = CustomCcmRule.builder().withSslAuth().build(); @Test public void should_connect_with_ssl_using_client_auth() { @@ -49,7 +50,7 @@ public void should_connect_with_ssl_using_client_auth() { DefaultDriverOption.SSL_TRUSTSTORE_PASSWORD, CcmBridge.DEFAULT_CLIENT_TRUSTSTORE_PASSWORD) .build(); - try (CqlSession session = SessionUtils.newSession(ccm, loader)) { + try (CqlSession session = SessionUtils.newSession(CCM_RULE, loader)) { session.execute("select * from system.local"); } } @@ -67,7 +68,7 @@ public void should_not_connect_with_ssl_using_client_auth_if_keystore_not_set() DefaultDriverOption.SSL_TRUSTSTORE_PASSWORD, CcmBridge.DEFAULT_CLIENT_TRUSTSTORE_PASSWORD) .build(); - try (CqlSession session = SessionUtils.newSession(ccm, loader)) { + try (CqlSession session = SessionUtils.newSession(CCM_RULE, loader)) { session.execute("select * from system.local"); } } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/type/codec/registry/CodecRegistryIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/type/codec/registry/CodecRegistryIT.java index dd07cd54b96..3bcc8b76a83 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/type/codec/registry/CodecRegistryIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/type/codec/registry/CodecRegistryIT.java @@ -59,11 +59,12 @@ @Category(ParallelizableTests.class) public class CodecRegistryIT { - private static CcmRule ccm = CcmRule.getInstance(); + private static final CcmRule CCM_RULE = CcmRule.getInstance(); - private static SessionRule sessionRule = SessionRule.builder(ccm).build(); + private static final SessionRule SESSION_RULE = SessionRule.builder(CCM_RULE).build(); - @ClassRule public static TestRule chain = RuleChain.outerRule(ccm).around(sessionRule); + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); @Rule public TestName name = new TestName(); @@ -72,19 +73,19 @@ public class CodecRegistryIT { @BeforeClass public static void createSchema() { // table with simple primary key, single cell. - sessionRule + SESSION_RULE .session() .execute( SimpleStatement.builder("CREATE TABLE IF NOT EXISTS test (k text primary key, v int)") - .setExecutionProfile(sessionRule.slowProfile()) + .setExecutionProfile(SESSION_RULE.slowProfile()) .build()); // table with map value - sessionRule + SESSION_RULE .session() .execute( SimpleStatement.builder( "CREATE TABLE IF NOT EXISTS test2 (k0 text, k1 int, v map, primary key (k0, k1))") - .setExecutionProfile(sessionRule.slowProfile()) + .setExecutionProfile(SESSION_RULE.slowProfile()) .build()); } @@ -130,7 +131,7 @@ public Float parse(String value) { @Test public void should_throw_exception_if_no_codec_registered_for_type_set() { PreparedStatement prepared = - sessionRule.session().prepare("INSERT INTO test (k, v) values (?, ?)"); + SESSION_RULE.session().prepare("INSERT INTO test (k, v) values (?, ?)"); thrown.expect(CodecNotFoundException.class); @@ -141,14 +142,14 @@ public void should_throw_exception_if_no_codec_registered_for_type_set() { @Test public void should_throw_exception_if_no_codec_registered_for_type_get() { PreparedStatement prepared = - sessionRule.session().prepare("INSERT INTO test (k, v) values (?, ?)"); + SESSION_RULE.session().prepare("INSERT INTO test (k, v) values (?, ?)"); BoundStatement insert = prepared.boundStatementBuilder().setString(0, name.getMethodName()).setInt(1, 2).build(); - sessionRule.session().execute(insert); + SESSION_RULE.session().execute(insert); ResultSet result = - sessionRule + SESSION_RULE .session() .execute( SimpleStatement.builder("SELECT v from test where k = ?") @@ -173,8 +174,8 @@ public void should_be_able_to_register_and_use_custom_codec() { (CqlSession) SessionUtils.baseBuilder() .addTypeCodecs(new FloatCIntCodec()) - .addContactEndPoints(ccm.getContactPoints()) - .withKeyspace(sessionRule.keyspace()) + .addContactEndPoints(CCM_RULE.getContactPoints()) + .withKeyspace(SESSION_RULE.keyspace()) .build()) { PreparedStatement prepared = session.prepare("INSERT INTO test (k, v) values (?, ?)"); @@ -294,8 +295,8 @@ public void should_be_able_to_register_and_use_custom_codec_with_generic_type() (CqlSession) SessionUtils.baseBuilder() .addTypeCodecs(optionalMapCodec, mapWithOptionalValueCodec) - .addContactEndPoints(ccm.getContactPoints()) - .withKeyspace(sessionRule.keyspace()) + .addContactEndPoints(CCM_RULE.getContactPoints()) + .withKeyspace(SESSION_RULE.keyspace()) .build()) { PreparedStatement prepared = session.prepare("INSERT INTO test2 (k0, k1, v) values (?, ?, ?)"); @@ -381,8 +382,8 @@ public void should_be_able_to_handle_empty_collections() { try (CqlSession session = (CqlSession) SessionUtils.baseBuilder() - .addContactEndPoints(ccm.getContactPoints()) - .withKeyspace(sessionRule.keyspace()) + .addContactEndPoints(CCM_RULE.getContactPoints()) + .withKeyspace(SESSION_RULE.keyspace()) .build()) { // Using prepared statements (CQL type is known) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/ComputedIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/ComputedIT.java index 6ce28ec2419..80d2c08a15c 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/ComputedIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/ComputedIT.java @@ -59,11 +59,12 @@ @Category(ParallelizableTests.class) public class ComputedIT { - private static CcmRule ccm = CcmRule.getInstance(); + private static final CcmRule CCM_RULE = CcmRule.getInstance(); - private static SessionRule sessionRule = SessionRule.builder(ccm).build(); + private static final SessionRule SESSION_RULE = SessionRule.builder(CCM_RULE).build(); - @ClassRule public static TestRule chain = RuleChain.outerRule(ccm).around(sessionRule); + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); @Rule public ExpectedException thrown = ExpectedException.none(); @@ -73,13 +74,13 @@ public class ComputedIT { @BeforeClass public static void setup() { - CqlSession session = sessionRule.session(); + CqlSession session = SESSION_RULE.session(); for (String query : ImmutableList.of( "CREATE TABLE computed_entity(id int, c_id int, v int, primary key (id, c_id))")) { session.execute( - SimpleStatement.builder(query).setExecutionProfile(sessionRule.slowProfile()).build()); + SimpleStatement.builder(query).setExecutionProfile(SESSION_RULE.slowProfile()).build()); } mapper = new ComputedIT_TestMapperBuilder(session).build(); @@ -87,7 +88,7 @@ public static void setup() { @Test public void should_not_include_computed_values_in_insert() { - ComputedDao computedDao = mapper.computedDao(sessionRule.keyspace()); + ComputedDao computedDao = mapper.computedDao(SESSION_RULE.keyspace()); int key = keyProvider.incrementAndGet(); ComputedEntity entity = new ComputedEntity(key, 1, 2); @@ -101,7 +102,7 @@ public void should_not_include_computed_values_in_insert() { @Test public void should_return_computed_values_in_select() { - ComputedDao computedDao = mapper.computedDao(sessionRule.keyspace()); + ComputedDao computedDao = mapper.computedDao(SESSION_RULE.keyspace()); int key = keyProvider.incrementAndGet(); long time = System.currentTimeMillis() - 1000; @@ -119,7 +120,7 @@ public void should_return_computed_values_in_select() { @Test public void should_not_include_computed_values_in_delete() { // should not be the case since delete operates on primary key.. - ComputedDao computedDao = mapper.computedDao(sessionRule.keyspace()); + ComputedDao computedDao = mapper.computedDao(SESSION_RULE.keyspace()); int key = keyProvider.incrementAndGet(); ComputedEntity entity = new ComputedEntity(key, 1, 2); @@ -135,10 +136,10 @@ public void should_not_include_computed_values_in_delete() { @Test public void should_not_include_computed_values_in_SetEntity() { - ComputedDao computedDao = mapper.computedDao(sessionRule.keyspace()); + ComputedDao computedDao = mapper.computedDao(SESSION_RULE.keyspace()); int key = keyProvider.incrementAndGet(); - CqlSession session = sessionRule.session(); + CqlSession session = SESSION_RULE.session(); PreparedStatement preparedStatement = session.prepare("INSERT INTO computed_entity (id, c_id, v) VALUES (?, ?, ?)"); BoundStatementBuilder builder = preparedStatement.boundStatementBuilder(); @@ -156,14 +157,14 @@ public void should_not_include_computed_values_in_SetEntity() { @Test public void should_return_computed_values_in_GetEntity() { - ComputedDao computedDao = mapper.computedDao(sessionRule.keyspace()); + ComputedDao computedDao = mapper.computedDao(SESSION_RULE.keyspace()); int key = keyProvider.incrementAndGet(); long time = System.currentTimeMillis() - 1000; ComputedEntity entity = new ComputedEntity(key, 1, 2); computedDao.saveWithTime(entity, 3600, time); - CqlSession session = sessionRule.session(); + CqlSession session = SESSION_RULE.session(); /* * Query with the computed values included. @@ -197,14 +198,14 @@ public void should_return_computed_values_in_GetEntity() { @Test public void should_fail_if_alias_does_not_match_cqlName() { - ComputedDao computedDao = mapper.computedDao(sessionRule.keyspace()); + ComputedDao computedDao = mapper.computedDao(SESSION_RULE.keyspace()); int key = keyProvider.incrementAndGet(); long time = System.currentTimeMillis() - 1000; ComputedEntity entity = new ComputedEntity(key, 1, 2); computedDao.saveWithTime(entity, 3600, time); - CqlSession session = sessionRule.session(); + CqlSession session = SESSION_RULE.session(); /* * Query with the computed values included. @@ -230,7 +231,7 @@ public void should_fail_if_alias_does_not_match_cqlName() { @Test public void should_return_computed_values_in_query() { - ComputedDao computedDao = mapper.computedDao(sessionRule.keyspace()); + ComputedDao computedDao = mapper.computedDao(SESSION_RULE.keyspace()); int key = keyProvider.incrementAndGet(); long time = System.currentTimeMillis() - 1000; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/DefaultKeyspaceIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/DefaultKeyspaceIT.java index 7f8b93cf961..236425af51a 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/DefaultKeyspaceIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/DefaultKeyspaceIT.java @@ -53,28 +53,28 @@ @Category(ParallelizableTests.class) public class DefaultKeyspaceIT { private static final String DEFAULT_KEYSPACE = "default_keyspace"; - private static CcmRule ccm = CcmRule.getInstance(); + private static final CcmRule CCM_RULE = CcmRule.getInstance(); - private static SessionRule sessionRule = SessionRule.builder(ccm).build(); + private static final SessionRule SESSION_RULE = SessionRule.builder(CCM_RULE).build(); - private static SessionRule sessionWithNoKeyspaceRule = - SessionRule.builder(ccm).withKeyspace(false).build(); - - private static InventoryMapper mapper; + private static final SessionRule SESSION_WITH_NO_KEYSPACE_RULE = + SessionRule.builder(CCM_RULE).withKeyspace(false).build(); @ClassRule - public static TestRule chain = - RuleChain.outerRule(ccm).around(sessionRule).around(sessionWithNoKeyspaceRule); + public static final TestRule chain = + RuleChain.outerRule(CCM_RULE).around(SESSION_RULE).around(SESSION_WITH_NO_KEYSPACE_RULE); + + private static InventoryMapper mapper; @BeforeClass public static void setup() { - CqlSession session = sessionRule.session(); + CqlSession session = SESSION_RULE.session(); session.execute( SimpleStatement.builder( String.format( "CREATE KEYSPACE IF NOT EXISTS %s WITH replication = {'class': 'SimpleStrategy', 'replication_factor': 1}", DEFAULT_KEYSPACE)) - .setExecutionProfile(sessionRule.slowProfile()) + .setExecutionProfile(SESSION_RULE.slowProfile()) .build()); session.execute( @@ -82,19 +82,19 @@ public static void setup() { String.format( "CREATE TABLE %s.product_simple_default_ks(id uuid PRIMARY KEY, description text)", DEFAULT_KEYSPACE)) - .setExecutionProfile(sessionRule.slowProfile()) + .setExecutionProfile(SESSION_RULE.slowProfile()) .build()); session.execute( SimpleStatement.builder( "CREATE TABLE product_simple_without_ks(id uuid PRIMARY KEY, description text)") - .setExecutionProfile(sessionRule.slowProfile()) + .setExecutionProfile(SESSION_RULE.slowProfile()) .build()); session.execute( SimpleStatement.builder( "CREATE TABLE product_simple_default_ks(id uuid PRIMARY KEY, description text)") - .setExecutionProfile(sessionRule.slowProfile()) + .setExecutionProfile(SESSION_RULE.slowProfile()) .build()); mapper = new DefaultKeyspaceIT_InventoryMapperBuilder(session).build(); @@ -120,7 +120,7 @@ public void should_fail_to_insert_if_default_ks_and_dao_ks_not_provided() { assertThatThrownBy( () -> { InventoryMapperKsNotSet mapper = - new DefaultKeyspaceIT_InventoryMapperKsNotSetBuilder(sessionRule.session()) + new DefaultKeyspaceIT_InventoryMapperKsNotSetBuilder(SESSION_RULE.session()) .build(); mapper.productDaoDefaultKsNotSet(); }) @@ -147,7 +147,7 @@ public void should_insert_preferring_dao_factory_ks_over_entity_default_ks() { // Given ProductSimpleDefaultKs product = new ProductSimpleDefaultKs(UUID.randomUUID(), "desc_1"); ProductSimpleDaoDefaultKs dao = - mapper.productDaoEntityDefaultOverridden(sessionRule.keyspace()); + mapper.productDaoEntityDefaultOverridden(SESSION_RULE.keyspace()); assertThat(dao.findById(product.id)).isNull(); // When @@ -167,7 +167,7 @@ public void should_fail_dao_initialization_if_keyspace_not_specified() { // entity has no keyspace InventoryMapperKsNotSet mapper = new DefaultKeyspaceIT_InventoryMapperKsNotSetBuilder( - sessionWithNoKeyspaceRule.session()) + SESSION_WITH_NO_KEYSPACE_RULE.session()) .build(); mapper.productDaoDefaultKsNotSet(); }) @@ -186,7 +186,8 @@ public void should_initialize_dao_if_keyspace_not_specified_but_not_needed() { // entity has no keyspace // but dao methods don't require keyspace (GetEntity, SetEntity) InventoryMapperKsNotSet mapper = - new DefaultKeyspaceIT_InventoryMapperKsNotSetBuilder(sessionWithNoKeyspaceRule.session()) + new DefaultKeyspaceIT_InventoryMapperKsNotSetBuilder( + SESSION_WITH_NO_KEYSPACE_RULE.session()) .build(); mapper.productDaoGetAndSetOnly(); } @@ -194,20 +195,22 @@ public void should_initialize_dao_if_keyspace_not_specified_but_not_needed() { @Test public void should_initialize_dao_if_default_ks_provided() { InventoryMapper mapper = - new DefaultKeyspaceIT_InventoryMapperBuilder(sessionWithNoKeyspaceRule.session()).build(); + new DefaultKeyspaceIT_InventoryMapperBuilder(SESSION_WITH_NO_KEYSPACE_RULE.session()) + .build(); // session has no keyspace, but entity does mapper.productDaoDefaultKs(); - mapper.productDaoEntityDefaultOverridden(sessionRule.keyspace()); + mapper.productDaoEntityDefaultOverridden(SESSION_RULE.keyspace()); } @Test public void should_initialize_dao_if_dao_ks_provided() { InventoryMapperKsNotSet mapper = - new DefaultKeyspaceIT_InventoryMapperKsNotSetBuilder(sessionWithNoKeyspaceRule.session()) + new DefaultKeyspaceIT_InventoryMapperKsNotSetBuilder( + SESSION_WITH_NO_KEYSPACE_RULE.session()) .build(); // session has no keyspace, but dao has parameter mapper.productDaoDefaultKsNotSetOverridden( - sessionRule.keyspace(), CqlIdentifier.fromCql("product_simple_default_ks")); + SESSION_RULE.keyspace(), CqlIdentifier.fromCql("product_simple_default_ks")); } @Mapper diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/DefaultNullSavingStrategyIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/DefaultNullSavingStrategyIT.java index dc2b707e3d6..cbcf0cc4f5e 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/DefaultNullSavingStrategyIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/DefaultNullSavingStrategyIT.java @@ -56,24 +56,26 @@ @CassandraRequirement(min = "2.2", description = "support for unset values") public class DefaultNullSavingStrategyIT { - private static CcmRule ccm = CcmRule.getInstance(); - private static SessionRule sessionRule = SessionRule.builder(ccm).build(); - @ClassRule public static TestRule chain = RuleChain.outerRule(ccm).around(sessionRule); + private static final CcmRule CCM_RULE = CcmRule.getInstance(); + private static final SessionRule SESSION_RULE = SessionRule.builder(CCM_RULE).build(); + + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); private static TestMapper mapper; private static PreparedStatement prepared; @BeforeClass public static void createSchema() { - CqlSession session = sessionRule.session(); + CqlSession session = SESSION_RULE.session(); session.execute( SimpleStatement.builder("CREATE TABLE foo(k int PRIMARY KEY, v int)") - .setExecutionProfile(sessionRule.slowProfile()) + .setExecutionProfile(SESSION_RULE.slowProfile()) .build()); mapper = new DefaultNullSavingStrategyIT_TestMapperBuilder(session).build(); - prepared = sessionRule.session().prepare("INSERT INTO foo (k, v) values (:k, :v)"); + prepared = SESSION_RULE.session().prepare("INSERT INTO foo (k, v) values (:k, :v)"); } @Test @@ -142,17 +144,17 @@ private void assertSetEntityStrategy( Foo foo = new Foo(1, null); BoundStatementBuilder builder = prepared.boundStatementBuilder(); daoMethod.accept(builder, foo); - sessionRule.session().execute(builder.build()); + SESSION_RULE.session().execute(builder.build()); validateData(expectedStrategy); } private void reset() { - CqlSession session = sessionRule.session(); + CqlSession session = SESSION_RULE.session(); session.execute("INSERT INTO foo (k, v) VALUES (1, 1)"); } private void validateData(NullSavingStrategy expectedStrategy) { - CqlSession session = sessionRule.session(); + CqlSession session = SESSION_RULE.session(); Row row = session.execute("SELECT v FROM foo WHERE k = 1").one(); switch (expectedStrategy) { case DO_NOT_SET: diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/DeleteIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/DeleteIT.java index 087d1f5ae50..5b78e3d58e9 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/DeleteIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/DeleteIT.java @@ -53,11 +53,12 @@ @Category(ParallelizableTests.class) public class DeleteIT extends InventoryITBase { - private static CcmRule ccm = CcmRule.getInstance(); + private static final CcmRule CCM_RULE = CcmRule.getInstance(); - private static SessionRule sessionRule = SessionRule.builder(ccm).build(); + private static final SessionRule SESSION_RULE = SessionRule.builder(CCM_RULE).build(); - @ClassRule public static TestRule chain = RuleChain.outerRule(ccm).around(sessionRule); + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); private static ProductDao dao; @@ -65,16 +66,16 @@ public class DeleteIT extends InventoryITBase { @BeforeClass public static void setup() { - CqlSession session = sessionRule.session(); + CqlSession session = SESSION_RULE.session(); - for (String query : createStatements(ccm)) { + for (String query : createStatements(CCM_RULE)) { session.execute( - SimpleStatement.builder(query).setExecutionProfile(sessionRule.slowProfile()).build()); + SimpleStatement.builder(query).setExecutionProfile(SESSION_RULE.slowProfile()).build()); } InventoryMapper inventoryMapper = new DeleteIT_InventoryMapperBuilder(session).build(); - dao = inventoryMapper.productDao(sessionRule.keyspace()); - saleDao = inventoryMapper.productSaleDao(sessionRule.keyspace()); + dao = inventoryMapper.productDao(SESSION_RULE.keyspace()); + saleDao = inventoryMapper.productSaleDao(SESSION_RULE.keyspace()); } @Before diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/EntityPolymorphismIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/EntityPolymorphismIT.java index e6ba38946e8..fadbee7816e 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/EntityPolymorphismIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/EntityPolymorphismIT.java @@ -69,17 +69,18 @@ @Category(ParallelizableTests.class) @RunWith(DataProviderRunner.class) public class EntityPolymorphismIT { - private static CcmRule ccm = CcmRule.getInstance(); + private static final CcmRule CCM_RULE = CcmRule.getInstance(); - private static SessionRule sessionRule = SessionRule.builder(ccm).build(); + private static final SessionRule SESSION_RULE = SessionRule.builder(CCM_RULE).build(); - private static TestMapper mapper; + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); - @ClassRule public static TestRule chain = RuleChain.outerRule(ccm).around(sessionRule); + private static TestMapper mapper; @BeforeClass public static void setup() { - CqlSession session = sessionRule.session(); + CqlSession session = SESSION_RULE.session(); for (String query : ImmutableList.of( "CREATE TYPE point2d (\"X\" int, \"Y\" int)", @@ -94,7 +95,7 @@ public static void setup() { "CREATE TABLE tracked_devices (device_id uuid PRIMARY KEY, name text, location text)", "CREATE TABLE simple_devices (id uuid PRIMARY KEY, in_use boolean)")) { session.execute( - SimpleStatement.builder(query).setExecutionProfile(sessionRule.slowProfile()).build()); + SimpleStatement.builder(query).setExecutionProfile(SESSION_RULE.slowProfile()).build()); } mapper = new EntityPolymorphismIT_TestMapperBuilder(session).build(); } @@ -247,8 +248,8 @@ public void should_set_and_get_entity_then_update_then_delete( Consumer updater, SimpleStatement insertStatement, SimpleStatement selectStatement) { - BaseDao dao = daoProvider.apply(sessionRule.keyspace()); - CqlSession session = sessionRule.session(); + BaseDao dao = daoProvider.apply(SESSION_RULE.keyspace()); + CqlSession session = SESSION_RULE.session(); PreparedStatement prepared = session.prepare(insertStatement); BoundStatementBuilder bs = prepared.boundStatementBuilder(); @@ -280,7 +281,7 @@ public void should_save_and_retrieve_circle() { // * annotations, but these are primarily used for // verifying inheritance behavior in Sphere. // * verifies writeTime is set. - CircleDao dao = mapper.circleDao(sessionRule.keyspace()); + CircleDao dao = mapper.circleDao(SESSION_RULE.keyspace()); long writeTime = System.currentTimeMillis() - 1000; Circle circle = new Circle(new Point2D(11, 22), 12.34); @@ -297,7 +298,7 @@ public void should_save_and_retrieve_rectangle() { // * CqlName("rect_id") on getId renames id property to rect_id // * annotations work, but these are primarily used for // verifying inheritance behavior in Square. - RectangleDao dao = mapper.rectangleDao(sessionRule.keyspace()); + RectangleDao dao = mapper.rectangleDao(SESSION_RULE.keyspace()); Rectangle rectangle = new Rectangle(new Point2D(20, 30), new Point2D(50, 60)); dao.save(rectangle); @@ -312,7 +313,7 @@ public void should_save_and_retrieve_square() { // * height remains transient even though we define field/getter/setter // * getBottomLeft() retains CqlName from parent. // * verifies writeTime is set. - SquareDao dao = mapper.squareDao(sessionRule.keyspace()); + SquareDao dao = mapper.squareDao(SESSION_RULE.keyspace()); long writeTime = System.currentTimeMillis() - 1000; Square square = new Square(new Point2D(20, 30), new Point2D(50, 60)); @@ -333,7 +334,7 @@ public void should_save_and_retrieve_sphere() { // * Override setRadius to return Sphere causes no issues. // * Interface method getVolume() is skipped because no field exists. // * WriteTime is inherited, so queried and set. - SphereDao dao = mapper.sphereDao(sessionRule.keyspace()); + SphereDao dao = mapper.sphereDao(SESSION_RULE.keyspace()); long writeTime = System.currentTimeMillis() - 1000; Sphere sphere = new Sphere(new Point3D(11, 22, 33), 34.56); @@ -349,7 +350,7 @@ public void should_save_and_retrieve_device() throws Exception { // verifies the hierarchy scanner behavior around Device: // * by virtue of Assert setting highestAncestor to Asset.class, location property from // LocatableItem should not be included - DeviceDao dao = mapper.deviceDao(sessionRule.keyspace(), CqlIdentifier.fromCql("devices")); + DeviceDao dao = mapper.deviceDao(SESSION_RULE.keyspace(), CqlIdentifier.fromCql("devices")); // save should be successful as location property omitted. Device device = new Device("my device", "New York"); @@ -377,7 +378,7 @@ public void should_save_and_retrieve_tracked_device() throws Exception { // include LocatableItem's location property, even though Asset defines // a strategy that excludes it. TrackedDeviceDao dao = - mapper.trackedDeviceDao(sessionRule.keyspace(), CqlIdentifier.fromCql("tracked_devices")); + mapper.trackedDeviceDao(SESSION_RULE.keyspace(), CqlIdentifier.fromCql("tracked_devices")); TrackedDevice device = new TrackedDevice("my device", "New York"); dao.save(device); @@ -400,7 +401,7 @@ public void should_save_and_retrieve_simple_device() { // verifies the hierarchy scanner behavior around SimpleDevice: // * Since SimpleDevice defines a @HierarchyScanStrategy that prevents // scanning of ancestors, only its properties (id, inUse) should be included. - SimpleDeviceDao dao = mapper.simpleDeviceDao(sessionRule.keyspace()); + SimpleDeviceDao dao = mapper.simpleDeviceDao(SESSION_RULE.keyspace()); SimpleDevice device = new SimpleDevice(true); dao.save(device); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/GetEntityIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/GetEntityIT.java index a3ea7b53517..dd789a70925 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/GetEntityIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/GetEntityIT.java @@ -48,25 +48,26 @@ @Category(ParallelizableTests.class) public class GetEntityIT extends InventoryITBase { - private static CcmRule ccm = CcmRule.getInstance(); + private static final CcmRule CCM_RULE = CcmRule.getInstance(); - private static SessionRule sessionRule = SessionRule.builder(ccm).build(); + private static final SessionRule SESSION_RULE = SessionRule.builder(CCM_RULE).build(); - @ClassRule public static TestRule chain = RuleChain.outerRule(ccm).around(sessionRule); + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); private static ProductDao dao; @BeforeClass public static void setup() { - CqlSession session = sessionRule.session(); + CqlSession session = SESSION_RULE.session(); - for (String query : createStatements(ccm)) { + for (String query : createStatements(CCM_RULE)) { session.execute( - SimpleStatement.builder(query).setExecutionProfile(sessionRule.slowProfile()).build()); + SimpleStatement.builder(query).setExecutionProfile(SESSION_RULE.slowProfile()).build()); } InventoryMapper inventoryMapper = new GetEntityIT_InventoryMapperBuilder(session).build(); - dao = inventoryMapper.productDao(sessionRule.keyspace()); + dao = inventoryMapper.productDao(SESSION_RULE.keyspace()); dao.save(FLAMETHROWER); dao.save(MP3_DOWNLOAD); @@ -74,7 +75,7 @@ public static void setup() { @Test public void should_get_entity_from_row() { - CqlSession session = sessionRule.session(); + CqlSession session = SESSION_RULE.session(); ResultSet rs = session.execute( SimpleStatement.newInstance( @@ -88,7 +89,7 @@ public void should_get_entity_from_row() { @Test public void should_get_entity_from_first_row_of_result_set() { - CqlSession session = sessionRule.session(); + CqlSession session = SESSION_RULE.session(); ResultSet rs = session.execute("SELECT * FROM product"); Product product = dao.getOne(rs); @@ -98,7 +99,7 @@ public void should_get_entity_from_first_row_of_result_set() { @Test public void should_get_entity_from_first_row_of_async_result_set() { - CqlSession session = sessionRule.session(); + CqlSession session = SESSION_RULE.session(); AsyncResultSet rs = CompletableFutures.getUninterruptibly(session.executeAsync("SELECT * FROM product")); @@ -109,7 +110,7 @@ public void should_get_entity_from_first_row_of_async_result_set() { @Test public void should_get_iterable_from_result_set() { - CqlSession session = sessionRule.session(); + CqlSession session = SESSION_RULE.session(); ResultSet rs = session.execute("SELECT * FROM product"); PagingIterable products = dao.get(rs); assertThat(Sets.newHashSet(products)).containsOnly(FLAMETHROWER, MP3_DOWNLOAD); @@ -117,7 +118,7 @@ public void should_get_iterable_from_result_set() { @Test public void should_get_async_iterable_from_async_result_set() { - CqlSession session = sessionRule.session(); + CqlSession session = SESSION_RULE.session(); AsyncResultSet rs = CompletableFutures.getUninterruptibly(session.executeAsync("SELECT * FROM product")); MappedAsyncPagingIterable products = dao.get(rs); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/InsertIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/InsertIT.java index c0f9129fc3b..da7cade385b 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/InsertIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/InsertIT.java @@ -50,34 +50,34 @@ @Category(ParallelizableTests.class) public class InsertIT extends InventoryITBase { - private static CcmRule ccm = CcmRule.getInstance(); + private static final CcmRule CCM_RULE = CcmRule.getInstance(); - private static SessionRule sessionRule = SessionRule.builder(ccm).build(); + private static final SessionRule SESSION_RULE = SessionRule.builder(CCM_RULE).build(); - @ClassRule public static TestRule chain = RuleChain.outerRule(ccm).around(sessionRule); + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); private static ProductDao dao; - private static InventoryMapper inventoryMapper; @BeforeClass public static void setup() { - CqlSession session = sessionRule.session(); + CqlSession session = SESSION_RULE.session(); - for (String query : createStatements(ccm)) { + for (String query : createStatements(CCM_RULE)) { session.execute( - SimpleStatement.builder(query).setExecutionProfile(sessionRule.slowProfile()).build()); + SimpleStatement.builder(query).setExecutionProfile(SESSION_RULE.slowProfile()).build()); } - inventoryMapper = new InsertIT_InventoryMapperBuilder(session).build(); - dao = inventoryMapper.productDao(sessionRule.keyspace()); + InventoryMapper inventoryMapper = new InsertIT_InventoryMapperBuilder(session).build(); + dao = inventoryMapper.productDao(SESSION_RULE.keyspace()); } @Before public void clearProductData() { - CqlSession session = sessionRule.session(); + CqlSession session = SESSION_RULE.session(); session.execute( SimpleStatement.builder("TRUNCATE product") - .setExecutionProfile(sessionRule.slowProfile()) + .setExecutionProfile(SESSION_RULE.slowProfile()) .build()); } @@ -123,7 +123,7 @@ public void should_insert_entity_with_bound_timestamp() { long timestamp = 1234; dao.saveWithBoundTimestamp(FLAMETHROWER, timestamp); - CqlSession session = sessionRule.session(); + CqlSession session = SESSION_RULE.session(); Row row = session .execute( @@ -141,7 +141,7 @@ public void should_insert_entity_with_literal_timestamp() { dao.saveWithLiteralTimestamp(FLAMETHROWER); - CqlSession session = sessionRule.session(); + CqlSession session = SESSION_RULE.session(); Row row = session .execute( @@ -160,7 +160,7 @@ public void should_insert_entity_with_bound_ttl() { int insertedTtl = 86400; dao.saveWithBoundTtl(FLAMETHROWER, insertedTtl); - CqlSession session = sessionRule.session(); + CqlSession session = SESSION_RULE.session(); Row row = session .execute( @@ -177,7 +177,7 @@ public void should_insert_entity_with_literal_ttl() { dao.saveWithLiteralTtl(FLAMETHROWER); - CqlSession session = sessionRule.session(); + CqlSession session = SESSION_RULE.session(); Row row = session .execute( @@ -195,7 +195,7 @@ public void should_insert_entity_with_bound_timestamp_asynchronously() { long timestamp = 1234; CompletableFutures.getUninterruptibly(dao.saveAsyncWithBoundTimestamp(FLAMETHROWER, timestamp)); - CqlSession session = sessionRule.session(); + CqlSession session = SESSION_RULE.session(); Row row = session .execute( diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/NamingStrategyIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/NamingStrategyIT.java index 095ea3113e1..038f3e11761 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/NamingStrategyIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/NamingStrategyIT.java @@ -61,17 +61,18 @@ @Category(ParallelizableTests.class) public class NamingStrategyIT { - private static CcmRule ccm = CcmRule.getInstance(); + private static final CcmRule CCM_RULE = CcmRule.getInstance(); - private static SessionRule sessionRule = SessionRule.builder(ccm).build(); + private static final SessionRule SESSION_RULE = SessionRule.builder(CCM_RULE).build(); - @ClassRule public static TestRule chain = RuleChain.outerRule(ccm).around(sessionRule); + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); private static TestMapper mapper; @BeforeClass public static void setup() { - CqlSession session = sessionRule.session(); + CqlSession session = SESSION_RULE.session(); for (String query : ImmutableList.of( @@ -80,7 +81,7 @@ public static void setup() { "CREATE TABLE test_NameConverterEntity(test_entityId int primary key)", "CREATE TABLE custom_entity(custom_id int primary key)")) { session.execute( - SimpleStatement.builder(query).setExecutionProfile(sessionRule.slowProfile()).build()); + SimpleStatement.builder(query).setExecutionProfile(SESSION_RULE.slowProfile()).build()); } mapper = new NamingStrategyIT_TestMapperBuilder(session).build(); @@ -88,7 +89,7 @@ public static void setup() { @Test public void should_map_entity_with_default_naming_strategy() { - DefaultStrategyEntityDao dao = mapper.defaultStrategyEntityDao(sessionRule.keyspace()); + DefaultStrategyEntityDao dao = mapper.defaultStrategyEntityDao(SESSION_RULE.keyspace()); DefaultStrategyEntity entity = new DefaultStrategyEntity(1); dao.save(entity); @@ -98,7 +99,7 @@ public void should_map_entity_with_default_naming_strategy() { @Test public void should_map_entity_with_non_default_convention() { - UpperSnakeCaseEntityDao dao = mapper.upperSnakeCaseEntityDao(sessionRule.keyspace()); + UpperSnakeCaseEntityDao dao = mapper.upperSnakeCaseEntityDao(SESSION_RULE.keyspace()); UpperSnakeCaseEntity entity = new UpperSnakeCaseEntity(1); dao.save(entity); @@ -108,7 +109,7 @@ public void should_map_entity_with_non_default_convention() { @Test public void should_map_entity_with_name_converter() { - NameConverterEntityDao dao = mapper.nameConverterEntityDao(sessionRule.keyspace()); + NameConverterEntityDao dao = mapper.nameConverterEntityDao(SESSION_RULE.keyspace()); NameConverterEntity entity = new NameConverterEntity(1); dao.save(entity); @@ -118,7 +119,7 @@ public void should_map_entity_with_name_converter() { @Test public void should_map_entity_with_custom_names() { - CustomNamesEntityDao dao = mapper.customNamesEntityDao(sessionRule.keyspace()); + CustomNamesEntityDao dao = mapper.customNamesEntityDao(SESSION_RULE.keyspace()); CustomNamesEntity entity = new CustomNamesEntity(1); dao.save(entity); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/NestedUdtIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/NestedUdtIT.java index 0f7bad30c15..6bcbde6ffff 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/NestedUdtIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/NestedUdtIT.java @@ -56,11 +56,12 @@ @CassandraRequirement(min = "2.2", description = "support for unset values") public class NestedUdtIT { - private static CcmRule ccm = CcmRule.getInstance(); + private static final CcmRule CCM_RULE = CcmRule.getInstance(); - private static SessionRule sessionRule = SessionRule.builder(ccm).build(); + private static final SessionRule SESSION_RULE = SessionRule.builder(CCM_RULE).build(); - @ClassRule public static TestRule chain = RuleChain.outerRule(ccm).around(sessionRule); + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); private static UUID CONTAINER_ID = UUID.randomUUID(); private static final Container SAMPLE_CONTAINER = @@ -97,7 +98,7 @@ public class NestedUdtIT { @BeforeClass public static void setup() { - CqlSession session = sessionRule.session(); + CqlSession session = SESSION_RULE.session(); for (String query : ImmutableList.of( @@ -110,19 +111,19 @@ public static void setup() { + "map3 frozen>>>" + ")")) { session.execute( - SimpleStatement.builder(query).setExecutionProfile(sessionRule.slowProfile()).build()); + SimpleStatement.builder(query).setExecutionProfile(SESSION_RULE.slowProfile()).build()); } UdtsMapper udtsMapper = new NestedUdtIT_UdtsMapperBuilder(session).build(); - containerDao = udtsMapper.containerDao(sessionRule.keyspace()); + containerDao = udtsMapper.containerDao(SESSION_RULE.keyspace()); } @Before public void clearContainerData() { - CqlSession session = sessionRule.session(); + CqlSession session = SESSION_RULE.session(); session.execute( SimpleStatement.builder("TRUNCATE container") - .setExecutionProfile(sessionRule.slowProfile()) + .setExecutionProfile(SESSION_RULE.slowProfile()) .build()); } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/NullSavingStrategyIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/NullSavingStrategyIT.java index 51cf85f549e..32f71041b19 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/NullSavingStrategyIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/NullSavingStrategyIT.java @@ -49,27 +49,28 @@ @Category(ParallelizableTests.class) public class NullSavingStrategyIT { - private static CcmRule ccm = CcmRule.getInstance(); + private static final CcmRule CCM_RULE = CcmRule.getInstance(); - private static SessionRule sessionRule = - SessionRule.builder(ccm) + private static final SessionRule SESSION_RULE = + SessionRule.builder(CCM_RULE) .withConfigLoader( DriverConfigLoader.programmaticBuilder() .withString(DefaultDriverOption.PROTOCOL_VERSION, "V3") .build()) .build(); - private static InventoryMapper mapper; + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); - @ClassRule public static TestRule chain = RuleChain.outerRule(ccm).around(sessionRule); + private static InventoryMapper mapper; @BeforeClass public static void setup() { - CqlSession session = sessionRule.session(); + CqlSession session = SESSION_RULE.session(); session.execute( SimpleStatement.builder( "CREATE TABLE product_simple(id uuid PRIMARY KEY, description text)") - .setExecutionProfile(sessionRule.slowProfile()) + .setExecutionProfile(SESSION_RULE.slowProfile()) .build()); mapper = new NullSavingStrategyIT_InventoryMapperBuilder(session).build(); @@ -77,7 +78,7 @@ public static void setup() { @Test public void should_throw_when_try_to_construct_dao_with_DO_NOT_SET_strategy_for_V3_protocol() { - assertThatThrownBy(() -> mapper.productDao(sessionRule.keyspace())) + assertThatThrownBy(() -> mapper.productDao(SESSION_RULE.keyspace())) .isInstanceOf(MapperException.class) .hasMessage("You cannot use NullSavingStrategy.DO_NOT_SET for protocol version V3."); } @@ -85,7 +86,7 @@ public void should_throw_when_try_to_construct_dao_with_DO_NOT_SET_strategy_for_ @Test public void should_throw_when_try_to_construct_dao_with_DO_NOT_SET_implicit_strategy_for_V3_protocol() { - assertThatThrownBy(() -> mapper.productDaoImplicit(sessionRule.keyspace())) + assertThatThrownBy(() -> mapper.productDaoImplicit(SESSION_RULE.keyspace())) .isInstanceOf(MapperException.class) .hasMessage("You cannot use NullSavingStrategy.DO_NOT_SET for protocol version V3."); } @@ -93,27 +94,27 @@ public void should_throw_when_try_to_construct_dao_with_DO_NOT_SET_strategy_for_ @Test public void should_throw_when_try_to_construct_dao_with_DO_NOT_SET_strategy_set_globally_for_V3_protocol() { - assertThatThrownBy(() -> mapper.productDaoDefault(sessionRule.keyspace())) + assertThatThrownBy(() -> mapper.productDaoDefault(SESSION_RULE.keyspace())) .isInstanceOf(MapperException.class) .hasMessage("You cannot use NullSavingStrategy.DO_NOT_SET for protocol version V3."); } @Test public void should_do_not_throw_when_construct_dao_with_global_level_SET_TO_NULL() { - assertThatCode(() -> mapper.productDaoGlobalLevelSetToNull(sessionRule.keyspace())) + assertThatCode(() -> mapper.productDaoGlobalLevelSetToNull(SESSION_RULE.keyspace())) .doesNotThrowAnyException(); } @Test public void should_do_not_throw_when_construct_dao_with_parent_interface_SET_TO_NULL() { - assertThatCode(() -> mapper.productDaoSetToNullFromParentInterface(sessionRule.keyspace())) + assertThatCode(() -> mapper.productDaoSetToNullFromParentInterface(SESSION_RULE.keyspace())) .doesNotThrowAnyException(); } @Test public void should_do_not_throw_when_construct_dao_with_global_level_DO_NOT_SET_and_local_override_to_SET_TO_NULL() { - assertThatCode(() -> mapper.productDaoLocalOverride(sessionRule.keyspace())) + assertThatCode(() -> mapper.productDaoLocalOverride(SESSION_RULE.keyspace())) .doesNotThrowAnyException(); } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/PrimitivesIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/PrimitivesIT.java index 024cff2e172..55d4c6c2f00 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/PrimitivesIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/PrimitivesIT.java @@ -45,11 +45,12 @@ @Category(ParallelizableTests.class) public class PrimitivesIT { - private static CcmRule ccm = CcmRule.getInstance(); + private static final CcmRule CCM_RULE = CcmRule.getInstance(); - private static SessionRule sessionRule = SessionRule.builder(ccm).build(); + private static final SessionRule SESSION_RULE = SessionRule.builder(CCM_RULE).build(); - @ClassRule public static TestRule chain = RuleChain.outerRule(ccm).around(sessionRule); + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); @Rule public ExpectedException thrown = ExpectedException.none(); @@ -57,7 +58,7 @@ public class PrimitivesIT { @BeforeClass public static void setup() { - CqlSession session = sessionRule.session(); + CqlSession session = SESSION_RULE.session(); session.execute( SimpleStatement.builder( "CREATE TABLE primitives_entity(" @@ -68,14 +69,14 @@ public static void setup() { + "long_col bigint, " + "float_col float," + "double_col double)") - .setExecutionProfile(sessionRule.slowProfile()) + .setExecutionProfile(SESSION_RULE.slowProfile()) .build()); mapper = new PrimitivesIT_TestMapperBuilder(session).build(); } @Test public void should_not_include_computed_values_in_insert() { - PrimitivesDao primitivesDao = mapper.primitivesDao(sessionRule.keyspace()); + PrimitivesDao primitivesDao = mapper.primitivesDao(SESSION_RULE.keyspace()); PrimitivesEntity expected = new PrimitivesEntity(0, true, (byte) 2, (short) 3, 4L, 5.0f, 6.0d); primitivesDao.save(expected); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/QueryKeyspaceAndTableIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/QueryKeyspaceAndTableIT.java index 1cdb28ddedf..571852f09d5 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/QueryKeyspaceAndTableIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/QueryKeyspaceAndTableIT.java @@ -46,9 +46,11 @@ @Category(ParallelizableTests.class) public class QueryKeyspaceAndTableIT { - private static CcmRule ccm = CcmRule.getInstance(); - private static SessionRule sessionRule = SessionRule.builder(ccm).build(); - @ClassRule public static TestRule chain = RuleChain.outerRule(ccm).around(sessionRule); + private static final CcmRule CCM_RULE = CcmRule.getInstance(); + private static final SessionRule SESSION_RULE = SessionRule.builder(CCM_RULE).build(); + + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); private static final CqlIdentifier FOO_TABLE_ID = CqlIdentifier.fromCql("foo"); private static final CqlIdentifier OTHER_KEYSPACE = @@ -60,7 +62,7 @@ public class QueryKeyspaceAndTableIT { @BeforeClass public static void createSchema() { - CqlSession session = sessionRule.session(); + CqlSession session = SESSION_RULE.session(); for (String query : ImmutableList.of( @@ -70,7 +72,7 @@ public static void createSchema() { OTHER_KEYSPACE.asCql(false)), String.format("CREATE TABLE %s.foo(k int PRIMARY KEY)", OTHER_KEYSPACE.asCql(false)))) { session.execute( - SimpleStatement.builder(query).setExecutionProfile(sessionRule.slowProfile()).build()); + SimpleStatement.builder(query).setExecutionProfile(SESSION_RULE.slowProfile()).build()); } session.execute("INSERT INTO foo (k) VALUES (1)"); @@ -85,7 +87,7 @@ public static void createSchema() { @Test public void should_substitute_keyspaceId_and_tableId() { DaoWithKeyspaceAndTableId dao = - mapper.daoWithKeyspaceAndTableId(sessionRule.keyspace(), FOO_TABLE_ID); + mapper.daoWithKeyspaceAndTableId(SESSION_RULE.keyspace(), FOO_TABLE_ID); assertThat(dao.count()).isEqualTo(1); } @@ -106,7 +108,7 @@ public void should_fail_to_substitute_tableId_if_dao_has_no_table() { "Cannot substitute ${tableId} in query " + "'SELECT count(*) FROM ${keyspaceId}.${tableId}': " + "the DAO wasn't built with a table"); - mapper.daoWithKeyspaceAndTableId(sessionRule.keyspace(), null); + mapper.daoWithKeyspaceAndTableId(SESSION_RULE.keyspace(), null); } @Test @@ -128,7 +130,7 @@ public void should_fail_to_substitute_qualifiedTableId_if_dao_has_no_table() { "Cannot substitute ${qualifiedTableId} in query " + "'SELECT count(*) FROM ${qualifiedTableId}': " + "the DAO wasn't built with a table"); - mapper.daoWithQualifiedTableId(sessionRule.keyspace(), null); + mapper.daoWithQualifiedTableId(SESSION_RULE.keyspace(), null); } @Dao diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/QueryProviderIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/QueryProviderIT.java index 2b8d887927b..6cb5c37f2ac 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/QueryProviderIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/QueryProviderIT.java @@ -53,9 +53,11 @@ @Category(ParallelizableTests.class) public class QueryProviderIT { - private static CcmRule ccm = CcmRule.getInstance(); - private static SessionRule sessionRule = SessionRule.builder(ccm).build(); - @ClassRule public static TestRule chain = RuleChain.outerRule(ccm).around(sessionRule); + private static final CcmRule CCM_RULE = CcmRule.getInstance(); + private static final SessionRule SESSION_RULE = SessionRule.builder(CCM_RULE).build(); + + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); // Dummy counter to exercize the "custom state" feature: it gets incremented each time the query // provider is called. @@ -65,21 +67,21 @@ public class QueryProviderIT { @BeforeClass public static void setup() { - CqlSession session = sessionRule.session(); + CqlSession session = SESSION_RULE.session(); session.execute( SimpleStatement.builder( "CREATE TABLE sensor_reading(id int, month int, day int, value double, " + "PRIMARY KEY (id, month, day)) " + "WITH CLUSTERING ORDER BY (month DESC, day DESC)") - .setExecutionProfile(sessionRule.slowProfile()) + .setExecutionProfile(SESSION_RULE.slowProfile()) .build()); SensorMapper mapper = new QueryProviderIT_SensorMapperBuilder(session) .withCustomState("executionCount", executionCount) .build(); - dao = mapper.sensorDao(sessionRule.keyspace()); + dao = mapper.sensorDao(SESSION_RULE.keyspace()); } @Test diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/QueryReturnTypesIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/QueryReturnTypesIT.java index 0b156611347..bf2091ef56e 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/QueryReturnTypesIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/QueryReturnTypesIT.java @@ -60,9 +60,11 @@ @Category(ParallelizableTests.class) public class QueryReturnTypesIT { - private static CcmRule ccm = CcmRule.getInstance(); - private static SessionRule sessionRule = SessionRule.builder(ccm).build(); - @ClassRule public static TestRule chain = RuleChain.outerRule(ccm).around(sessionRule); + private static final CcmRule CCM_RULE = CcmRule.getInstance(); + private static final SessionRule SESSION_RULE = SessionRule.builder(CCM_RULE).build(); + + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); @Rule public ExpectedException thrown = ExpectedException.none(); @@ -70,16 +72,16 @@ public class QueryReturnTypesIT { @BeforeClass public static void createSchema() { - CqlSession session = sessionRule.session(); + CqlSession session = SESSION_RULE.session(); session.execute( SimpleStatement.builder( "CREATE TABLE test_entity(id int, rank int, value int, PRIMARY KEY(id, rank))") - .setExecutionProfile(sessionRule.slowProfile()) + .setExecutionProfile(SESSION_RULE.slowProfile()) .build()); TestMapper mapper = new QueryReturnTypesIT_TestMapperBuilder(session).build(); - dao = mapper.dao(sessionRule.keyspace(), CqlIdentifier.fromCql("test_entity")); + dao = mapper.dao(SESSION_RULE.keyspace(), CqlIdentifier.fromCql("test_entity")); } @Before diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SelectCustomWhereClauseIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SelectCustomWhereClauseIT.java index 1d8336053cb..dcf1cc4e0f3 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SelectCustomWhereClauseIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SelectCustomWhereClauseIT.java @@ -48,26 +48,26 @@ @CassandraRequirement(min = "3.4", description = "Creates a SASI index") public class SelectCustomWhereClauseIT extends InventoryITBase { - private static CcmRule ccm = CcmRule.getInstance(); + private static final CcmRule CCM_RULE = CcmRule.getInstance(); + private static final SessionRule SESSION_RULE = SessionRule.builder(CCM_RULE).build(); - private static SessionRule sessionRule = SessionRule.builder(ccm).build(); - - @ClassRule public static TestRule chain = RuleChain.outerRule(ccm).around(sessionRule); + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); private static ProductDao dao; @BeforeClass public static void setup() { - CqlSession session = sessionRule.session(); + CqlSession session = SESSION_RULE.session(); - for (String query : createStatements(ccm)) { + for (String query : createStatements(CCM_RULE)) { session.execute( - SimpleStatement.builder(query).setExecutionProfile(sessionRule.slowProfile()).build()); + SimpleStatement.builder(query).setExecutionProfile(SESSION_RULE.slowProfile()).build()); } InventoryMapper inventoryMapper = new SelectCustomWhereClauseIT_InventoryMapperBuilder(session).build(); - dao = inventoryMapper.productDao(sessionRule.keyspace()); + dao = inventoryMapper.productDao(SESSION_RULE.keyspace()); } @Before diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SelectIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SelectIT.java index cc1543cd56c..e47b86f74f3 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SelectIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SelectIT.java @@ -46,11 +46,11 @@ @Category(ParallelizableTests.class) public class SelectIT extends InventoryITBase { - private static CcmRule ccm = CcmRule.getInstance(); + private static final CcmRule CCM_RULE = CcmRule.getInstance(); + private static final SessionRule SESSION_RULE = SessionRule.builder(CCM_RULE).build(); - private static SessionRule sessionRule = SessionRule.builder(ccm).build(); - - @ClassRule public static TestRule chain = RuleChain.outerRule(ccm).around(sessionRule); + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); private static ProductDao dao; @@ -58,16 +58,16 @@ public class SelectIT extends InventoryITBase { @BeforeClass public static void setup() { - CqlSession session = sessionRule.session(); + CqlSession session = SESSION_RULE.session(); - for (String query : createStatements(ccm)) { + for (String query : createStatements(CCM_RULE)) { session.execute( - SimpleStatement.builder(query).setExecutionProfile(sessionRule.slowProfile()).build()); + SimpleStatement.builder(query).setExecutionProfile(SESSION_RULE.slowProfile()).build()); } InventoryMapper inventoryMapper = new SelectIT_InventoryMapperBuilder(session) - .withDefaultKeyspace(sessionRule.keyspace()) + .withDefaultKeyspace(SESSION_RULE.keyspace()) .build(); dao = inventoryMapper.productDao(); saleDao = inventoryMapper.productSaleDao(); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SelectOtherClausesIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SelectOtherClausesIT.java index 2bbcf3bef98..5b479a13f55 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SelectOtherClausesIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SelectOtherClausesIT.java @@ -52,26 +52,26 @@ @CassandraRequirement(min = "3.6", description = "Uses PER PARTITION LIMIT") public class SelectOtherClausesIT { - private static CcmRule ccm = CcmRule.getInstance(); + private static final CcmRule CCM_RULE = CcmRule.getInstance(); + private static final SessionRule SESSION_RULE = SessionRule.builder(CCM_RULE).build(); - private static SessionRule sessionRule = SessionRule.builder(ccm).build(); - - @ClassRule public static TestRule chain = RuleChain.outerRule(ccm).around(sessionRule); + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); private static SimpleDao dao; @BeforeClass public static void setup() { - CqlSession session = sessionRule.session(); + CqlSession session = SESSION_RULE.session(); for (String query : ImmutableList.of("CREATE TABLE simple (k int, cc int, v int, PRIMARY KEY (k, cc))")) { session.execute( - SimpleStatement.builder(query).setExecutionProfile(sessionRule.slowProfile()).build()); + SimpleStatement.builder(query).setExecutionProfile(SESSION_RULE.slowProfile()).build()); } TestMapper mapper = TestMapper.builder(session).build(); - dao = mapper.simpleDao(sessionRule.keyspace()); + dao = mapper.simpleDao(SESSION_RULE.keyspace()); for (int k = 0; k < 2; k++) { for (int cc = 0; cc < 10; cc++) { diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SetEntityIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SetEntityIT.java index b382dc56349..32be286325f 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SetEntityIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SetEntityIT.java @@ -45,11 +45,11 @@ @Category(ParallelizableTests.class) public class SetEntityIT extends InventoryITBase { - private static CcmRule ccm = CcmRule.getInstance(); + private static final CcmRule CCM_RULE = CcmRule.getInstance(); + private static final SessionRule SESSION_RULE = SessionRule.builder(CCM_RULE).build(); - private static SessionRule sessionRule = SessionRule.builder(ccm).build(); - - @ClassRule public static TestRule chain = RuleChain.outerRule(ccm).around(sessionRule); + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); private static ProductDao dao; @@ -57,20 +57,20 @@ public class SetEntityIT extends InventoryITBase { @BeforeClass public static void setup() { - CqlSession session = sessionRule.session(); + CqlSession session = SESSION_RULE.session(); - for (String query : createStatements(ccm)) { + for (String query : createStatements(CCM_RULE)) { session.execute( - SimpleStatement.builder(query).setExecutionProfile(sessionRule.slowProfile()).build()); + SimpleStatement.builder(query).setExecutionProfile(SESSION_RULE.slowProfile()).build()); } inventoryMapper = new SetEntityIT_InventoryMapperBuilder(session).build(); - dao = inventoryMapper.productDao(sessionRule.keyspace()); + dao = inventoryMapper.productDao(SESSION_RULE.keyspace()); } @Test public void should_set_entity_on_bound_statement() { - CqlSession session = sessionRule.session(); + CqlSession session = SESSION_RULE.session(); PreparedStatement preparedStatement = session.prepare("INSERT INTO product (id, description, dimensions) VALUES (?, ?, ?)"); BoundStatement boundStatement = preparedStatement.bind(); @@ -82,7 +82,7 @@ public void should_set_entity_on_bound_statement() { @Test public void should_set_entity_on_bound_statement_builder() { - CqlSession session = sessionRule.session(); + CqlSession session = SESSION_RULE.session(); PreparedStatement preparedStatement = session.prepare("INSERT INTO product (id, description, dimensions) VALUES (?, ?, ?)"); BoundStatementBuilder builder = preparedStatement.boundStatementBuilder(); @@ -95,7 +95,7 @@ public void should_set_entity_on_bound_statement_builder() { @Test public void should_set_entity_on_bound_statement_setting_null() { - CqlSession session = sessionRule.session(); + CqlSession session = SESSION_RULE.session(); PreparedStatement preparedStatement = session.prepare("INSERT INTO product (id, description, dimensions) VALUES (?, ?, ?)"); BoundStatementBuilder builder = preparedStatement.boundStatementBuilder(); @@ -110,7 +110,7 @@ public void should_set_entity_on_bound_statement_setting_null() { @Test public void should_set_entity_on_bound_statement_without_setting_null() { - CqlSession session = sessionRule.session(); + CqlSession session = SESSION_RULE.session(); PreparedStatement preparedStatement = session.prepare("INSERT INTO product (id, description, dimensions) VALUES (?, ?, ?)"); BoundStatementBuilder builder = preparedStatement.boundStatementBuilder(); @@ -126,11 +126,11 @@ public void should_set_entity_on_bound_statement_without_setting_null() { @Test public void should_set_entity_on_udt_value() { - CqlSession session = sessionRule.session(); + CqlSession session = SESSION_RULE.session(); UserDefinedType udtType = session .getMetadata() - .getKeyspace(sessionRule.keyspace()) + .getKeyspace(SESSION_RULE.keyspace()) .orElseThrow(AssertionError::new) .getUserDefinedType("dimensions") .orElseThrow(AssertionError::new); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/StatementAttributesIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/StatementAttributesIT.java index ed5990bf542..bc19efbe52f 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/StatementAttributesIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/StatementAttributesIT.java @@ -62,10 +62,13 @@ public class StatementAttributesIT { - private static SimulacronRule simulacronRule = + private static final SimulacronRule SIMULACRON_RULE = new SimulacronRule(ClusterSpec.builder().withNodes(1)); - private static SessionRule sessionRule = SessionRule.builder(simulacronRule).build(); - @ClassRule public static TestRule chain = RuleChain.outerRule(simulacronRule).around(sessionRule); + private static final SessionRule SESSION_RULE = + SessionRule.builder(SIMULACRON_RULE).build(); + + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(SIMULACRON_RULE).around(SESSION_RULE); @Rule public ExpectedException thrown = ExpectedException.none(); @@ -98,90 +101,90 @@ public static void setupClass() { primeUpdateQuery(); InventoryMapper inventoryMapper = - new StatementAttributesIT_InventoryMapperBuilder(sessionRule.session()).build(); + new StatementAttributesIT_InventoryMapperBuilder(SESSION_RULE.session()).build(); dao = inventoryMapper.simpleDao(); } @Before public void setup() { - simulacronRule.cluster().clearLogs(); + SIMULACRON_RULE.cluster().clearLogs(); } @Test public void should_honor_runtime_attributes_on_insert() { dao.save(simple, statementFunction); - ClusterQueryLogReport report = simulacronRule.cluster().getLogs(); + ClusterQueryLogReport report = SIMULACRON_RULE.cluster().getLogs(); validateQueryOptions(report.getQueryLogs().get(0), true); } @Test public void should_honor_annotation_attributes_on_insert() { dao.save2(simple); - ClusterQueryLogReport report = simulacronRule.cluster().getLogs(); + ClusterQueryLogReport report = SIMULACRON_RULE.cluster().getLogs(); validateQueryOptions(report.getQueryLogs().get(0), false); } @Test public void should_use_runtime_attributes_over_annotation_attributes() { dao.save3(simple, statementFunction); - ClusterQueryLogReport report = simulacronRule.cluster().getLogs(); + ClusterQueryLogReport report = SIMULACRON_RULE.cluster().getLogs(); validateQueryOptions(report.getQueryLogs().get(0), false); } @Test public void should_honor_runtime_attributes_on_delete() { dao.delete(simple, statementFunction); - ClusterQueryLogReport report = simulacronRule.cluster().getLogs(); + ClusterQueryLogReport report = SIMULACRON_RULE.cluster().getLogs(); validateQueryOptions(report.getQueryLogs().get(0), true); } @Test public void should_honor_annotation_attributes_on_delete() { dao.delete2(simple); - ClusterQueryLogReport report = simulacronRule.cluster().getLogs(); + ClusterQueryLogReport report = SIMULACRON_RULE.cluster().getLogs(); validateQueryOptions(report.getQueryLogs().get(0), false); } @Test public void should_honor_runtime_attributes_on_select() { dao.findByPk(simple.getPk(), statementFunction); - ClusterQueryLogReport report = simulacronRule.cluster().getLogs(); + ClusterQueryLogReport report = SIMULACRON_RULE.cluster().getLogs(); validateQueryOptions(report.getQueryLogs().get(0), true); } @Test public void should_honor_annotation_attributes_on_select() { dao.findByPk2(simple.getPk()); - ClusterQueryLogReport report = simulacronRule.cluster().getLogs(); + ClusterQueryLogReport report = SIMULACRON_RULE.cluster().getLogs(); validateQueryOptions(report.getQueryLogs().get(0), false); } @Test public void should_honor_runtime_attributes_on_query() { dao.count(simple.getPk(), statementFunction); - ClusterQueryLogReport report = simulacronRule.cluster().getLogs(); + ClusterQueryLogReport report = SIMULACRON_RULE.cluster().getLogs(); validateQueryOptions(report.getQueryLogs().get(0), true); } @Test public void should_honor_annotation_attributes_on_query() { dao.count2(simple.getPk()); - ClusterQueryLogReport report = simulacronRule.cluster().getLogs(); + ClusterQueryLogReport report = SIMULACRON_RULE.cluster().getLogs(); validateQueryOptions(report.getQueryLogs().get(0), false); } @Test public void should_honor_runtime_attributes_on_update() { dao.update(simple, statementFunction); - ClusterQueryLogReport report = simulacronRule.cluster().getLogs(); + ClusterQueryLogReport report = SIMULACRON_RULE.cluster().getLogs(); validateQueryOptions(report.getQueryLogs().get(0), true); } @Test public void should_honor_annotation_attributes_on_update() { dao.update2(simple); - ClusterQueryLogReport report = simulacronRule.cluster().getLogs(); + ClusterQueryLogReport report = SIMULACRON_RULE.cluster().getLogs(); validateQueryOptions(report.getQueryLogs().get(0), false); } @@ -195,7 +198,7 @@ public void should_fail_runtime_attributes_bad() { private static void primeInsertQuery() { Map params = ImmutableMap.of("pk", simple.getPk(), "data", simple.getData()); Map paramTypes = ImmutableMap.of("pk", "uuid", "data", "ascii"); - simulacronRule + SIMULACRON_RULE .cluster() .prime( when(query( @@ -211,7 +214,7 @@ private static void primeInsertQuery() { private static void primeDeleteQuery() { Map params = ImmutableMap.of("pk", simple.getPk()); Map paramTypes = ImmutableMap.of("pk", "uuid"); - simulacronRule + SIMULACRON_RULE .cluster() .prime( when(query( @@ -228,7 +231,7 @@ private static void primeDeleteQuery() { private static void primeSelectQuery() { Map params = ImmutableMap.of("pk", simple.getPk()); Map paramTypes = ImmutableMap.of("pk", "uuid"); - simulacronRule + SIMULACRON_RULE .cluster() .prime( when(query( @@ -245,7 +248,7 @@ private static void primeSelectQuery() { private static void primeCountQuery() { Map params = ImmutableMap.of("pk", simple.getPk()); Map paramTypes = ImmutableMap.of("pk", "uuid"); - simulacronRule + SIMULACRON_RULE .cluster() .prime( when(query( @@ -262,7 +265,7 @@ private static void primeCountQuery() { private static void primeUpdateQuery() { Map params = ImmutableMap.of("pk", simple.getPk(), "data", simple.getData()); Map paramTypes = ImmutableMap.of("pk", "uuid", "data", "ascii"); - simulacronRule + SIMULACRON_RULE .cluster() .prime( when(query( diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/TransientIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/TransientIT.java index 9dd2ff85d7b..0200a463da3 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/TransientIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/TransientIT.java @@ -47,11 +47,11 @@ @Category(ParallelizableTests.class) public class TransientIT { - private static CcmRule ccm = CcmRule.getInstance(); + private static final CcmRule CCM_RULE = CcmRule.getInstance(); + private static final SessionRule SESSION_RULE = SessionRule.builder(CCM_RULE).build(); - private static SessionRule sessionRule = SessionRule.builder(ccm).build(); - - @ClassRule public static TestRule chain = RuleChain.outerRule(ccm).around(sessionRule); + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); private static TestMapper mapper; @@ -59,11 +59,11 @@ public class TransientIT { @BeforeClass public static void setup() { - CqlSession session = sessionRule.session(); + CqlSession session = SESSION_RULE.session(); session.execute( SimpleStatement.builder("CREATE TABLE entity(id int primary key, v int)") - .setExecutionProfile(sessionRule.slowProfile()) + .setExecutionProfile(SESSION_RULE.slowProfile()) .build()); mapper = new TransientIT_TestMapperBuilder(session).build(); @@ -73,7 +73,7 @@ public static void setup() { public void should_ignore_field_with_transient_annotated_field() { EntityWithTransientAnnotatedFieldDao dao = mapper.entityWithTransientAnnotatedFieldDao( - sessionRule.keyspace(), CqlIdentifier.fromCql("entity")); + SESSION_RULE.keyspace(), CqlIdentifier.fromCql("entity")); int key = keyProvider.incrementAndGet(); EntityWithTransientAnnotatedField entity = new EntityWithTransientAnnotatedField(key, 1, 7); @@ -90,7 +90,7 @@ public void should_ignore_field_with_transient_annotated_field() { public void should_ignore_field_with_transient_annotated_getter() { EntityWithTransientAnnotatedGetterDao dao = mapper.entityWithTransientAnnotatedGetterDao( - sessionRule.keyspace(), CqlIdentifier.fromCql("entity")); + SESSION_RULE.keyspace(), CqlIdentifier.fromCql("entity")); int key = keyProvider.incrementAndGet(); EntityWithTransientAnnotatedGetter entity = new EntityWithTransientAnnotatedGetter(key, 1, 7); @@ -107,7 +107,7 @@ public void should_ignore_field_with_transient_annotated_getter() { public void should_ignore_field_with_transient_keyword() { EntityWithTransientKeywordDao dao = mapper.entityWithTransientKeywordDao( - sessionRule.keyspace(), CqlIdentifier.fromCql("entity")); + SESSION_RULE.keyspace(), CqlIdentifier.fromCql("entity")); int key = keyProvider.incrementAndGet(); EntityWithTransientKeyword entity = new EntityWithTransientKeyword(key, 1, 7); @@ -124,7 +124,7 @@ public void should_ignore_field_with_transient_keyword() { public void should_ignore_properties_included_in_transient_properties_keyword() { EntityWithTransientPropertiesAnnotationDao dao = mapper.entityWithTransientPropertiesAnnotation( - sessionRule.keyspace(), CqlIdentifier.fromCql("entity")); + SESSION_RULE.keyspace(), CqlIdentifier.fromCql("entity")); int key = keyProvider.incrementAndGet(); EntityWithTransientPropertiesAnnotation entity = diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateCustomIfClauseIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateCustomIfClauseIT.java index eb0fa32e3fa..11636cd6887 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateCustomIfClauseIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateCustomIfClauseIT.java @@ -48,34 +48,34 @@ @CassandraRequirement(min = "3.11.0", description = "UDT fields in IF clause") public class UpdateCustomIfClauseIT extends InventoryITBase { - private static CcmRule ccm = CcmRule.getInstance(); + private static final CcmRule CCM_RULE = CcmRule.getInstance(); + private static final SessionRule SESSION_RULE = SessionRule.builder(CCM_RULE).build(); - private static SessionRule sessionRule = SessionRule.builder(ccm).build(); - - @ClassRule public static TestRule chain = RuleChain.outerRule(ccm).around(sessionRule); + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); private static ProductDao dao; - private static InventoryMapper inventoryMapper; @BeforeClass public static void setup() { - CqlSession session = sessionRule.session(); + CqlSession session = SESSION_RULE.session(); - for (String query : createStatements(ccm)) { + for (String query : createStatements(CCM_RULE)) { session.execute( - SimpleStatement.builder(query).setExecutionProfile(sessionRule.slowProfile()).build()); + SimpleStatement.builder(query).setExecutionProfile(SESSION_RULE.slowProfile()).build()); } - inventoryMapper = new UpdateCustomIfClauseIT_InventoryMapperBuilder(session).build(); - dao = inventoryMapper.productDao(sessionRule.keyspace()); + InventoryMapper inventoryMapper = + new UpdateCustomIfClauseIT_InventoryMapperBuilder(session).build(); + dao = inventoryMapper.productDao(SESSION_RULE.keyspace()); } @Before public void clearProductData() { - CqlSession session = sessionRule.session(); + CqlSession session = SESSION_RULE.session(); session.execute( SimpleStatement.builder("TRUNCATE product") - .setExecutionProfile(sessionRule.slowProfile()) + .setExecutionProfile(SESSION_RULE.slowProfile()) .build()); } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateIT.java index d81627fda5a..878d1fa1db7 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateIT.java @@ -50,34 +50,34 @@ @Category(ParallelizableTests.class) public class UpdateIT extends InventoryITBase { - private static CcmRule ccm = CcmRule.getInstance(); + private static final CcmRule CCM_RULE = CcmRule.getInstance(); + private static final SessionRule SESSION_RULE = SessionRule.builder(CCM_RULE).build(); - private static SessionRule sessionRule = SessionRule.builder(ccm).build(); - - @ClassRule public static TestRule chain = RuleChain.outerRule(ccm).around(sessionRule); + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); private static ProductDao dao; private static InventoryMapper inventoryMapper; @BeforeClass public static void setup() { - CqlSession session = sessionRule.session(); + CqlSession session = SESSION_RULE.session(); - for (String query : createStatements(ccm)) { + for (String query : createStatements(CCM_RULE)) { session.execute( - SimpleStatement.builder(query).setExecutionProfile(sessionRule.slowProfile()).build()); + SimpleStatement.builder(query).setExecutionProfile(SESSION_RULE.slowProfile()).build()); } inventoryMapper = new UpdateIT_InventoryMapperBuilder(session).build(); - dao = inventoryMapper.productDao(sessionRule.keyspace()); + dao = inventoryMapper.productDao(SESSION_RULE.keyspace()); } @Before public void clearProductData() { - CqlSession session = sessionRule.session(); + CqlSession session = SESSION_RULE.session(); session.execute( SimpleStatement.builder("TRUNCATE product") - .setExecutionProfile(sessionRule.slowProfile()) + .setExecutionProfile(SESSION_RULE.slowProfile()) .build()); } @@ -148,7 +148,7 @@ public void should_update_entity_with_timestamp() { long timestamp = 1234; dao.updateWithBoundTimestamp(FLAMETHROWER, timestamp); - CqlSession session = sessionRule.session(); + CqlSession session = SESSION_RULE.session(); Row row = session .execute( @@ -166,7 +166,7 @@ public void should_update_entity_with_timestamp_literal() { dao.updateWithTimestampLiteral(FLAMETHROWER); - CqlSession session = sessionRule.session(); + CqlSession session = SESSION_RULE.session(); Row row = session .execute( @@ -185,7 +185,7 @@ public void should_update_entity_with_ttl() { int ttl = 100_000; dao.updateWithBoundTtl(FLAMETHROWER, ttl); - CqlSession session = sessionRule.session(); + CqlSession session = SESSION_RULE.session(); Row row = session .execute( @@ -202,7 +202,7 @@ public void should_update_entity_with_ttl_literal() { dao.updateWithTtlLiteral(FLAMETHROWER); - CqlSession session = sessionRule.session(); + CqlSession session = SESSION_RULE.session(); Row row = session .execute( @@ -221,7 +221,7 @@ public void should_update_entity_with_timestamp_asynchronously() { CompletableFutures.getUninterruptibly( dao.updateAsyncWithBoundTimestamp(FLAMETHROWER, timestamp)); - CqlSession session = sessionRule.session(); + CqlSession session = SESSION_RULE.session(); Row row = session .execute( @@ -279,7 +279,7 @@ public void should_not_update_entity_if_not_exists_asynchronously() { @Test public void should_throw_when_try_to_use_dao_with_update_only_pk() { - assertThatThrownBy(() -> inventoryMapper.onlyPkDao(sessionRule.keyspace())) + assertThatThrownBy(() -> inventoryMapper.onlyPkDao(SESSION_RULE.keyspace())) .isInstanceOf(MapperException.class) .hasMessageContaining("Entity OnlyPK does not have any non PK columns."); } @@ -323,10 +323,10 @@ public void should_not_update_entity_and_return_was_not_applied_async() { @Test public void should_update_entity_without_pk_placeholders_matching_custom_where_in_clause() { // given - ProductWithoutIdDao dao = inventoryMapper.productWithoutIdDao(sessionRule.keyspace()); + ProductWithoutIdDao dao = inventoryMapper.productWithoutIdDao(SESSION_RULE.keyspace()); UUID idOne = UUID.randomUUID(); UUID idTwo = UUID.randomUUID(); - sessionRule + SESSION_RULE .session() .execute( SimpleStatement.newInstance( @@ -334,7 +334,7 @@ public void should_update_entity_without_pk_placeholders_matching_custom_where_i idOne, 1, "a")); - sessionRule + SESSION_RULE .session() .execute( SimpleStatement.newInstance( diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateNamingIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateNamingIT.java index c3676206bfb..4281d392a84 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateNamingIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateNamingIT.java @@ -45,24 +45,24 @@ */ @Category(ParallelizableTests.class) public class UpdateNamingIT { - private static CcmRule ccm = CcmRule.getInstance(); + private static final CcmRule CCM_RULE = CcmRule.getInstance(); + private static final SessionRule SESSION_RULE = SessionRule.builder(CCM_RULE).build(); - private static SessionRule sessionRule = SessionRule.builder(ccm).build(); - - @ClassRule public static TestRule chain = RuleChain.outerRule(ccm).around(sessionRule); + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); private static TestDao dao; @BeforeClass public static void setup() { - CqlSession session = sessionRule.session(); + CqlSession session = SESSION_RULE.session(); session.execute( SimpleStatement.builder("CREATE TABLE foo(mykey int PRIMARY KEY, value int)") - .setExecutionProfile(sessionRule.slowProfile()) + .setExecutionProfile(SESSION_RULE.slowProfile()) .build()); TestMapper mapper = - TestMapper.builder(session).withDefaultKeyspace(sessionRule.keyspace()).build(); + TestMapper.builder(session).withDefaultKeyspace(SESSION_RULE.keyspace()).build(); dao = mapper.dao(); } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/osgi/OsgiBaseIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/osgi/OsgiBaseIT.java index 934368d6168..2ead9e6faef 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/osgi/OsgiBaseIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/osgi/OsgiBaseIT.java @@ -45,7 +45,8 @@ @Category(IsolatedTests.class) public abstract class OsgiBaseIT { - @ClassRule public static CustomCcmRule ccmRule = CustomCcmRule.builder().withNodes(1).build(); + @ClassRule + public static final CustomCcmRule CCM_RULE = CustomCcmRule.builder().withNodes(1).build(); /** @return config loader to be used to create session. */ protected abstract DriverConfigLoader configLoader(); @@ -58,7 +59,7 @@ public abstract class OsgiBaseIT { public void should_connect_and_query() { SessionBuilder builder = CqlSession.builder() - .addContactEndPoints(ccmRule.getContactPoints()) + .addContactEndPoints(CCM_RULE.getContactPoints()) // use the driver's ClassLoader instead of the OSGI application thread's. .withClassLoader(CqlSession.class.getClassLoader()) .withConfigLoader(configLoader()); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/querybuilder/JsonInsertIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/querybuilder/JsonInsertIT.java index ef91c928a0e..8e70627462e 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/querybuilder/JsonInsertIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/querybuilder/JsonInsertIT.java @@ -49,30 +49,32 @@ @Category(ParallelizableTests.class) @CassandraRequirement(min = "2.2", description = "JSON support in Cassandra was added in 2.2") public class JsonInsertIT { - private static final CcmRule ccmRule = CcmRule.getInstance(); - private static final JacksonJsonCodec JACKSON_JSON_CODEC = - new JacksonJsonCodec<>(User.class); + private static final CcmRule CCM_RULE = CcmRule.getInstance(); - private static SessionRule sessionRule = - SessionRule.builder(ccmRule) + private static final SessionRule SESSION_RULE = + SessionRule.builder(CCM_RULE) .withConfigLoader( SessionUtils.configLoaderBuilder() .withDuration(DefaultDriverOption.REQUEST_TIMEOUT, Duration.ofSeconds(30)) .build()) .build(); - @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); + + private static final JacksonJsonCodec JACKSON_JSON_CODEC = + new JacksonJsonCodec<>(User.class); @BeforeClass public static void setup() { - sessionRule + SESSION_RULE .session() .execute("CREATE TABLE json_jackson_row(id int PRIMARY KEY, name text, age int)"); } @After public void clearTable() { - sessionRule.session().execute("TRUNCATE TABLE json_jackson_row"); + SESSION_RULE.session().execute("TRUNCATE TABLE json_jackson_row"); } @Test @@ -195,8 +197,8 @@ public void should_insert_json_using_simple_statement_with_codec_registry() { private CqlSession sessionWithCustomCodec() { return (CqlSession) SessionUtils.baseBuilder() - .addContactEndPoints(ccmRule.getContactPoints()) - .withKeyspace(sessionRule.keyspace()) + .addContactEndPoints(CCM_RULE.getContactPoints()) + .withKeyspace(SESSION_RULE.keyspace()) .addTypeCodecs(JACKSON_JSON_CODEC) .build(); } @@ -205,8 +207,8 @@ private CqlSession sessionWithCustomCodec() { private CqlSession sessionWithoutCustomCodec() { return (CqlSession) SessionUtils.baseBuilder() - .addContactEndPoints(ccmRule.getContactPoints()) - .withKeyspace(sessionRule.keyspace()) + .addContactEndPoints(CCM_RULE.getContactPoints()) + .withKeyspace(SESSION_RULE.keyspace()) .build(); } From f6052fe388c89c7e8681f5a134eb96c104cfce24 Mon Sep 17 00:00:00 2001 From: olim7t Date: Thu, 8 Aug 2019 14:33:13 -0700 Subject: [PATCH 050/979] Adjust ConnectIT now that thrown exception is not wrapped --- .../datastax/oss/driver/core/ConnectIT.java | 28 +++++++++---------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/ConnectIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/ConnectIT.java index 79acda6dd3c..6475fab9f84 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/ConnectIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/ConnectIT.java @@ -17,12 +17,11 @@ import static com.datastax.oss.driver.api.testinfra.utils.ConditionChecker.checkThat; import static java.util.concurrent.TimeUnit.SECONDS; -import static junit.framework.TestCase.fail; import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; import com.datastax.oss.driver.api.core.AllNodesFailedException; import com.datastax.oss.driver.api.core.CqlSession; -import com.datastax.oss.driver.api.core.DriverException; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; import com.datastax.oss.driver.api.core.context.DriverContext; @@ -119,18 +118,19 @@ public void should_wait_for_contact_points_if_reconnection_enabled() throws Exce */ @Test public void should_cleanup_on_lbp_init_failure() { - try { - DriverConfigLoader loader = - SessionUtils.configLoaderBuilder() - .without(DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER) - .build(); - CqlSession.builder() - .addContactEndPoints(SIMULACRON_RULE.getContactPoints()) - .withConfigLoader(loader) - .build(); - fail("Should have thrown a DriverException for no DC with explicit contact point"); - } catch (DriverException ignored) { - } + DriverConfigLoader loader = + SessionUtils.configLoaderBuilder() + .without(DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER) + .build(); + assertThatThrownBy( + () -> + CqlSession.builder() + .addContactEndPoints(SIMULACRON_RULE.getContactPoints()) + .withConfigLoader(loader) + .build()) + .isInstanceOf(IllegalStateException.class) + .hasMessageContaining( + "You provided explicit contact points, the local DC must be specified"); // One second should be plenty of time for connections to close server side checkThat(() -> SIMULACRON_RULE.cluster().getConnections().getConnections().isEmpty()) .before(1, SECONDS) From 0902662d418fa3fe6ac256831faecdf4754589d1 Mon Sep 17 00:00:00 2001 From: olim7t Date: Thu, 8 Aug 2019 14:33:37 -0700 Subject: [PATCH 051/979] Add version requirement on DeleteIT --- .../test/java/com/datastax/oss/driver/mapper/DeleteIT.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/DeleteIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/DeleteIT.java index 5b78e3d58e9..5fb3d49ba60 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/DeleteIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/DeleteIT.java @@ -35,6 +35,7 @@ import com.datastax.oss.driver.api.mapper.annotations.Mapper; import com.datastax.oss.driver.api.mapper.annotations.Select; import com.datastax.oss.driver.api.mapper.entity.saving.NullSavingStrategy; +import com.datastax.oss.driver.api.testinfra.CassandraRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.categories.ParallelizableTests; @@ -51,6 +52,9 @@ import org.junit.rules.TestRule; @Category(ParallelizableTests.class) +@CassandraRequirement( + min = "3.0", + description = ">= in WHERE clause not supported in legacy versions") public class DeleteIT extends InventoryITBase { private static final CcmRule CCM_RULE = CcmRule.getInstance(); From 340f3505108911feff2deb911387bee1c06bcb74 Mon Sep 17 00:00:00 2001 From: olim7t Date: Thu, 8 Aug 2019 14:36:27 -0700 Subject: [PATCH 052/979] Adjust null saving strategy in UpdateNamingIT --- .../java/com/datastax/oss/driver/mapper/UpdateNamingIT.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateNamingIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateNamingIT.java index 4281d392a84..740ef6c53de 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateNamingIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateNamingIT.java @@ -15,6 +15,7 @@ */ package com.datastax.oss.driver.mapper; +import static com.datastax.oss.driver.api.mapper.entity.saving.NullSavingStrategy.SET_TO_NULL; import static org.assertj.core.api.Assertions.assertThat; import com.datastax.oss.driver.api.core.CqlSession; @@ -22,6 +23,7 @@ import com.datastax.oss.driver.api.mapper.MapperBuilder; import com.datastax.oss.driver.api.mapper.annotations.Dao; import com.datastax.oss.driver.api.mapper.annotations.DaoFactory; +import com.datastax.oss.driver.api.mapper.annotations.DefaultNullSavingStrategy; import com.datastax.oss.driver.api.mapper.annotations.Entity; import com.datastax.oss.driver.api.mapper.annotations.Mapper; import com.datastax.oss.driver.api.mapper.annotations.NamingStrategy; @@ -85,6 +87,7 @@ static MapperBuilder builder(CqlSession session) { } @Dao + @DefaultNullSavingStrategy(SET_TO_NULL) public interface TestDao { @Select Foo get(int key); From 23fd104c6b8d11424a05a941c98143866ba9d7a8 Mon Sep 17 00:00:00 2001 From: olim7t Date: Thu, 8 Aug 2019 15:11:17 -0700 Subject: [PATCH 053/979] Add version requirement in PrimitivesIT --- .../test/java/com/datastax/oss/driver/mapper/PrimitivesIT.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/PrimitivesIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/PrimitivesIT.java index 55d4c6c2f00..73896740ca1 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/PrimitivesIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/PrimitivesIT.java @@ -29,6 +29,7 @@ import com.datastax.oss.driver.api.mapper.annotations.PartitionKey; import com.datastax.oss.driver.api.mapper.annotations.Select; import com.datastax.oss.driver.api.mapper.entity.saving.NullSavingStrategy; +import com.datastax.oss.driver.api.testinfra.CassandraRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.categories.ParallelizableTests; @@ -43,6 +44,7 @@ import org.junit.rules.TestRule; @Category(ParallelizableTests.class) +@CassandraRequirement(min = "2.2", description = "smallint is a reserved keyword in 2.1") public class PrimitivesIT { private static final CcmRule CCM_RULE = CcmRule.getInstance(); From bfce1bec6f00c9f52a4680a083e2a3f5e4faef58 Mon Sep 17 00:00:00 2001 From: olim7t Date: Thu, 8 Aug 2019 15:26:11 -0700 Subject: [PATCH 054/979] Don't use exact match to check TTL in ComputedIT --- .../java/com/datastax/oss/driver/mapper/ComputedIT.java | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/ComputedIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/ComputedIT.java index 80d2c08a15c..48952b0c823 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/ComputedIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/ComputedIT.java @@ -16,6 +16,7 @@ package com.datastax.oss.driver.mapper; import static com.datastax.oss.driver.assertions.Assertions.assertThat; +import static org.assertj.core.data.Offset.offset; import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.CqlSession; @@ -113,7 +114,7 @@ public void should_return_computed_values_in_select() { assertThat(retrievedValue.getId()).isEqualTo(key); assertThat(retrievedValue.getcId()).isEqualTo(1); assertThat(retrievedValue.getV()).isEqualTo(2); - assertThat(retrievedValue.getTtl()).isEqualTo(3600); + assertThat(retrievedValue.getTtl()).isCloseTo(3600, offset(10)); assertThat(retrievedValue.getWritetime()).isEqualTo(time); } @@ -192,7 +193,7 @@ public void should_return_computed_values_in_GetEntity() { assertThat(retrievedValue.getV()).isEqualTo(2); // these should be set - assertThat(retrievedValue.getTtl()).isEqualTo(3600); + assertThat(retrievedValue.getTtl()).isCloseTo(3600, offset(10)); assertThat(retrievedValue.getWritetime()).isEqualTo(time); } @@ -244,7 +245,7 @@ public void should_return_computed_values_in_query() { assertThat(retrievedValue.getV()).isEqualTo(2); // these should be set - assertThat(retrievedValue.getTtl()).isEqualTo(3600); + assertThat(retrievedValue.getTtl()).isCloseTo(3600, offset(10)); assertThat(retrievedValue.getWritetime()).isEqualTo(time); } From 67f650040872fe9dd6d2cb46a6f6e737cd5c044a Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 12 Aug 2019 15:28:36 -0700 Subject: [PATCH 055/979] Fix minor details around programmatic authentication --- .../oss/driver/api/core/session/SessionBuilder.java | 4 ++-- .../driver/internal/core/context/DefaultDriverContext.java | 6 +++--- core/src/main/resources/reference.conf | 3 +++ 3 files changed, 8 insertions(+), 5 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java b/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java index 644dbb043d7..e0e36bfeb8f 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java @@ -208,7 +208,7 @@ public SelfT withSchemaChangeListener(@Nullable SchemaChangeListener schemaChang } /** - * Register a request tracker to use with the session. + * Registers a request tracker to use with the session. * *

    If the tracker is specified programmatically with this method, it overrides the * configuration (that is, the {@code request.tracker.class} option will be ignored). @@ -220,7 +220,7 @@ public SelfT withRequestTracker(@Nullable RequestTracker requestTracker) { } /** - * Register an authentication provider to use with the session. + * Registers an authentication provider to use with the session. * *

    If the provider is specified programmatically with this method, it overrides the * configuration (that is, the {@code advanced.auth-provider.class} option will be ignored). diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java index e0636ef9438..c0687b65ad4 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java @@ -193,7 +193,6 @@ public class DefaultDriverContext implements InternalDriverContext { private final NodeStateListener nodeStateListenerFromBuilder; private final SchemaChangeListener schemaChangeListenerFromBuilder; private final RequestTracker requestTrackerFromBuilder; - private final AuthProvider authProviderFromBuilder; private final Map localDatacentersFromBuilder; private final Map> nodeFiltersFromBuilder; private final ClassLoader classLoader; @@ -226,11 +225,12 @@ public DefaultDriverContext( () -> buildSchemaChangeListener(schemaChangeListenerFromBuilder), cycleDetector); this.requestTrackerFromBuilder = programmaticArguments.getRequestTracker(); - this.authProviderFromBuilder = programmaticArguments.getAuthProvider(); this.authProviderRef = new LazyReference<>( - "authProvider", () -> buildAuthProvider(authProviderFromBuilder), cycleDetector); + "authProvider", + () -> buildAuthProvider(programmaticArguments.getAuthProvider()), + cycleDetector); this.requestTrackerRef = new LazyReference<>( "requestTracker", () -> buildRequestTracker(requestTrackerFromBuilder), cycleDetector); diff --git a/core/src/main/resources/reference.conf b/core/src/main/resources/reference.conf index 0c0aef5e6d7..ed8a0c1b997 100644 --- a/core/src/main/resources/reference.conf +++ b/core/src/main/resources/reference.conf @@ -382,6 +382,9 @@ datastax-java-driver { # Required: no. If the 'class' child option is absent, no authentication will occur. # Modifiable at runtime: no # Overridable in a profile: no + # + # Note that the contents of this section can be overridden programmatically with + # SessionBuilder.withAuthProvider or SessionBuilder.withAuthCredentials. advanced.auth-provider { # The class of the provider. If it is not qualified, the driver assumes that it resides in the # package com.datastax.oss.driver.internal.core.auth. From 64cfb8df92d33ece21593037d2a22225ed183a28 Mon Sep 17 00:00:00 2001 From: olim7t Date: Tue, 13 Aug 2019 16:43:38 -0700 Subject: [PATCH 056/979] Move PlainTextAuthProviderBase to the public API --- .../core/auth/PlainTextAuthProviderBase.java | 5 +---- .../datastax/oss/driver/api/core/session/SessionBuilder.java | 3 ++- .../oss/driver/internal/core/auth/PlainTextAuthProvider.java | 1 + .../core/auth/ProgrammaticPlainTextAuthProvider.java | 1 + 4 files changed, 5 insertions(+), 5 deletions(-) rename core/src/main/java/com/datastax/oss/driver/{internal => api}/core/auth/PlainTextAuthProviderBase.java (95%) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/auth/PlainTextAuthProviderBase.java b/core/src/main/java/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderBase.java similarity index 95% rename from core/src/main/java/com/datastax/oss/driver/internal/core/auth/PlainTextAuthProviderBase.java rename to core/src/main/java/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderBase.java index 55a5999d05e..3f0a649c7a1 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/auth/PlainTextAuthProviderBase.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderBase.java @@ -13,11 +13,8 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.internal.core.auth; +package com.datastax.oss.driver.api.core.auth; -import com.datastax.oss.driver.api.core.auth.AuthProvider; -import com.datastax.oss.driver.api.core.auth.Authenticator; -import com.datastax.oss.driver.api.core.auth.SyncAuthenticator; import com.datastax.oss.driver.api.core.metadata.EndPoint; import com.datastax.oss.driver.api.core.session.Session; import com.datastax.oss.driver.shaded.guava.common.base.Charsets; diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java b/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java index e0e36bfeb8f..cb3f355f929 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java @@ -18,6 +18,7 @@ import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.auth.AuthProvider; +import com.datastax.oss.driver.api.core.auth.PlainTextAuthProviderBase; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; @@ -240,7 +241,7 @@ public SelfT withAuthProvider(@Nullable AuthProvider authProvider) { * *

    Note that this approach holds the credentials in clear text in memory, which makes them * vulnerable to an attacker who is able to perform memory dumps. If this is not acceptable for - * you, consider writing your own {@link AuthProvider} implementation (the internal class {@code + * you, consider writing your own {@link AuthProvider} implementation ({@link * PlainTextAuthProviderBase} is a good starting point), and providing it either with {@link * #withAuthProvider(AuthProvider)} or via the configuration ({@code * advanced.auth-provider.class}). diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/auth/PlainTextAuthProvider.java b/core/src/main/java/com/datastax/oss/driver/internal/core/auth/PlainTextAuthProvider.java index 2946a89b71a..cb0e40d9600 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/auth/PlainTextAuthProvider.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/auth/PlainTextAuthProvider.java @@ -15,6 +15,7 @@ */ package com.datastax.oss.driver.internal.core.auth; +import com.datastax.oss.driver.api.core.auth.PlainTextAuthProviderBase; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.context.DriverContext; diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/auth/ProgrammaticPlainTextAuthProvider.java b/core/src/main/java/com/datastax/oss/driver/internal/core/auth/ProgrammaticPlainTextAuthProvider.java index b6ade1c6c31..dd0b7d4a188 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/auth/ProgrammaticPlainTextAuthProvider.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/auth/ProgrammaticPlainTextAuthProvider.java @@ -15,6 +15,7 @@ */ package com.datastax.oss.driver.internal.core.auth; +import com.datastax.oss.driver.api.core.auth.PlainTextAuthProviderBase; import com.datastax.oss.driver.api.core.session.SessionBuilder; import edu.umd.cs.findbugs.annotations.NonNull; import net.jcip.annotations.ThreadSafe; From 8da400d1769fae375fc5ff373e566d18598ac378 Mon Sep 17 00:00:00 2001 From: olim7t Date: Wed, 14 Aug 2019 10:00:27 -0700 Subject: [PATCH 057/979] Make PlainTextAuthProviderBase.Credentials public This will be slightly more flexible if custom implementations want to create instances outside of the provider subclass. --- .../oss/driver/api/core/auth/PlainTextAuthProviderBase.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderBase.java b/core/src/main/java/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderBase.java index 3f0a649c7a1..a82b6ec0bfb 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderBase.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderBase.java @@ -79,7 +79,7 @@ public void close() { // nothing to do } - protected static class Credentials { + public static class Credentials { private final char[] username; private final char[] password; From 4c25704f06b46a044ac2d7db1e235198be99d641 Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 12 Aug 2019 15:06:22 -0700 Subject: [PATCH 058/979] JAVA-2390: Add methods to set the SSL engine factory programmatically --- changelog/README.md | 1 + .../core/session/ProgrammaticArguments.java | 21 ++++- .../api/core/session/SessionBuilder.java | 33 +++++++ .../ssl/ProgrammaticSslEngineFactory.java | 67 +++++++++++++ .../core/context/DefaultDriverContext.java | 26 +++-- core/src/main/resources/reference.conf | 3 + .../driver/core/ssl/ProgrammaticSslIT.java | 94 +++++++++++++++++++ manual/core/ssl/README.md | 23 ++++- 8 files changed, 255 insertions(+), 13 deletions(-) create mode 100644 core/src/main/java/com/datastax/oss/driver/api/core/ssl/ProgrammaticSslEngineFactory.java create mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/core/ssl/ProgrammaticSslIT.java diff --git a/changelog/README.md b/changelog/README.md index c92d547b74a..76fe25c8c4e 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.2.0 (in progress) +- [improvement] JAVA-2390: Add methods to set the SSL engine factory programmatically - [improvement] JAVA-2379: Fail fast if prepared id doesn't match when repreparing on the fly - [bug] JAVA-2375: Use per-request keyspace when repreparing on the fly - [improvement] JAVA-2370: Remove auto-service plugin from mapper processor diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/session/ProgrammaticArguments.java b/core/src/main/java/com/datastax/oss/driver/api/core/session/ProgrammaticArguments.java index 50f875c4141..34f002f04e9 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/session/ProgrammaticArguments.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/session/ProgrammaticArguments.java @@ -19,6 +19,7 @@ import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.metadata.NodeStateListener; import com.datastax.oss.driver.api.core.metadata.schema.SchemaChangeListener; +import com.datastax.oss.driver.api.core.ssl.SslEngineFactory; import com.datastax.oss.driver.api.core.tracker.RequestTracker; import com.datastax.oss.driver.api.core.type.codec.TypeCodec; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; @@ -50,6 +51,7 @@ public static Builder builder() { private final Map> nodeFilters; private final ClassLoader classLoader; private final AuthProvider authProvider; + private final SslEngineFactory sslEngineFactory; private ProgrammaticArguments( @NonNull List> typeCodecs, @@ -59,7 +61,8 @@ private ProgrammaticArguments( @NonNull Map localDatacenters, @NonNull Map> nodeFilters, @Nullable ClassLoader classLoader, - @Nullable AuthProvider authProvider) { + @Nullable AuthProvider authProvider, + @Nullable SslEngineFactory sslEngineFactory) { this.typeCodecs = typeCodecs; this.nodeStateListener = nodeStateListener; this.schemaChangeListener = schemaChangeListener; @@ -68,6 +71,7 @@ private ProgrammaticArguments( this.nodeFilters = nodeFilters; this.classLoader = classLoader; this.authProvider = authProvider; + this.sslEngineFactory = sslEngineFactory; } @NonNull @@ -110,6 +114,11 @@ public AuthProvider getAuthProvider() { return authProvider; } + @Nullable + public SslEngineFactory getSslEngineFactory() { + return sslEngineFactory; + } + public static class Builder { private ImmutableList.Builder> typeCodecsBuilder = ImmutableList.builder(); @@ -121,6 +130,7 @@ public static class Builder { ImmutableMap.builder(); private ClassLoader classLoader; private AuthProvider authProvider; + private SslEngineFactory sslEngineFactory; @NonNull public Builder addTypeCodecs(@NonNull TypeCodec... typeCodecs) { @@ -188,6 +198,12 @@ public Builder withAuthProvider(@Nullable AuthProvider authProvider) { return this; } + @NonNull + public Builder withSslEngineFactory(@Nullable SslEngineFactory sslEngineFactory) { + this.sslEngineFactory = sslEngineFactory; + return this; + } + @NonNull public ProgrammaticArguments build() { return new ProgrammaticArguments( @@ -198,7 +214,8 @@ public ProgrammaticArguments build() { localDatacentersBuilder.build(), nodeFiltersBuilder.build(), classLoader, - authProvider); + authProvider, + sslEngineFactory); } } } diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java b/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java index cb3f355f929..890d3cf41d1 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java @@ -28,6 +28,8 @@ import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.metadata.NodeStateListener; import com.datastax.oss.driver.api.core.metadata.schema.SchemaChangeListener; +import com.datastax.oss.driver.api.core.ssl.ProgrammaticSslEngineFactory; +import com.datastax.oss.driver.api.core.ssl.SslEngineFactory; import com.datastax.oss.driver.api.core.tracker.RequestTracker; import com.datastax.oss.driver.api.core.type.codec.TypeCodec; import com.datastax.oss.driver.internal.core.ContactPoints; @@ -51,6 +53,7 @@ import java.util.concurrent.CompletionStage; import java.util.function.Predicate; import java.util.function.Supplier; +import javax.net.ssl.SSLContext; import net.jcip.annotations.NotThreadSafe; /** @@ -251,6 +254,36 @@ public SelfT withAuthCredentials(@NonNull String username, @NonNull String passw return withAuthProvider(new ProgrammaticPlainTextAuthProvider(username, password)); } + /** + * Registers an SSL engine factory for the session. + * + *

    If the factory is provided programmatically with this method, it overrides the configuration + * (that is, the {@code advanced.ssl-engine-factory} option will be ignored). + */ + @NonNull + public SelfT withSslEngineFactory(@Nullable SslEngineFactory sslEngineFactory) { + this.programmaticArgumentsBuilder.withSslEngineFactory(sslEngineFactory); + return self; + } + + /** + * Configures the session to use SSL with the given context. + * + *

    This is a convenience method for clients that already have an {@link SSLContext} instance. + * It wraps its argument into a {@link ProgrammaticSslEngineFactory}, and passes it to {@link + * #withSslEngineFactory(SslEngineFactory)}. + * + *

    If you use this method, there is no way to customize cipher suites, or turn on host name + * validation. Also, note that SSL engines will be created with advisory peer information ({@link + * SSLContext#createSSLEngine(String, int)}) whenever possible. If you need finer control, write + * your own factory. + */ + @NonNull + public SelfT withSslContext(@Nullable SSLContext sslContext) { + return withSslEngineFactory( + sslContext == null ? null : new ProgrammaticSslEngineFactory(sslContext)); + } + /** * Specifies the datacenter that is considered "local" by the load balancing policy. * diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/ssl/ProgrammaticSslEngineFactory.java b/core/src/main/java/com/datastax/oss/driver/api/core/ssl/ProgrammaticSslEngineFactory.java new file mode 100644 index 00000000000..01afac32c7e --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/api/core/ssl/ProgrammaticSslEngineFactory.java @@ -0,0 +1,67 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.core.ssl; + +import com.datastax.oss.driver.api.core.metadata.EndPoint; +import com.datastax.oss.driver.api.core.session.SessionBuilder; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.net.InetSocketAddress; +import java.net.SocketAddress; +import javax.net.ssl.SSLContext; +import javax.net.ssl.SSLEngine; + +/** + * An SSL engine factory that allows you to configure the driver programmatically, by passing your + * own {@link SSLContext}. + * + *

    Unlike the configuration-based approach, this class does not allow you to customize cipher + * suites, or turn on host name validation. Also, note that it will create SSL engines with advisory + * peer information ({@link SSLContext#createSSLEngine(String, int)}) whenever possible. + * + *

    If those defaults do not work for you, it should be pretty straightforward to write your own + * implementation by extending or duplicating this class. + * + * @see SessionBuilder#withSslEngineFactory(SslEngineFactory) + * @see SessionBuilder#withSslContext(SSLContext) + */ +public class ProgrammaticSslEngineFactory implements SslEngineFactory { + + protected final SSLContext sslContext; + + public ProgrammaticSslEngineFactory(SSLContext sslContext) { + this.sslContext = sslContext; + } + + @NonNull + @Override + public SSLEngine newSslEngine(@NonNull EndPoint remoteEndpoint) { + SSLEngine engine; + SocketAddress remoteAddress = remoteEndpoint.resolve(); + if (remoteAddress instanceof InetSocketAddress) { + InetSocketAddress socketAddress = (InetSocketAddress) remoteAddress; + engine = sslContext.createSSLEngine(socketAddress.getHostName(), socketAddress.getPort()); + } else { + engine = sslContext.createSSLEngine(); + } + engine.setUseClientMode(true); + return engine; + } + + @Override + public void close() { + // nothing to do + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java index c0687b65ad4..e4c2df05461 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java @@ -126,8 +126,7 @@ public class DefaultDriverContext implements InternalDriverContext { new LazyReference<>("timestampGenerator", this::buildTimestampGenerator, cycleDetector); private final LazyReference addressTranslatorRef = new LazyReference<>("addressTranslator", this::buildAddressTranslator, cycleDetector); - private final LazyReference> sslEngineFactoryRef = - new LazyReference<>("sslEngineFactory", this::buildSslEngineFactory, cycleDetector); + private final LazyReference> sslEngineFactoryRef; private final LazyReference eventBusRef = new LazyReference<>("eventBus", this::buildEventBus, cycleDetector); @@ -234,6 +233,11 @@ public DefaultDriverContext( this.requestTrackerRef = new LazyReference<>( "requestTracker", () -> buildRequestTracker(requestTrackerFromBuilder), cycleDetector); + this.sslEngineFactoryRef = + new LazyReference<>( + "sslEngineFactory", + () -> buildSslEngineFactory(programmaticArguments.getSslEngineFactory()), + cycleDetector); this.nodeFiltersFromBuilder = programmaticArguments.getNodeFilters(); this.classLoader = programmaticArguments.getClassLoader(); } @@ -340,12 +344,14 @@ protected AddressTranslator buildAddressTranslator() { DefaultDriverOption.ADDRESS_TRANSLATOR_CLASS))); } - protected Optional buildSslEngineFactory() { - return Reflection.buildFromConfig( - this, - DefaultDriverOption.SSL_ENGINE_FACTORY_CLASS, - SslEngineFactory.class, - "com.datastax.oss.driver.internal.core.ssl"); + protected Optional buildSslEngineFactory(SslEngineFactory factoryFromBuilder) { + return (factoryFromBuilder != null) + ? Optional.of(factoryFromBuilder) + : Reflection.buildFromConfig( + this, + DefaultDriverOption.SSL_ENGINE_FACTORY_CLASS, + SslEngineFactory.class, + "com.datastax.oss.driver.internal.core.ssl"); } protected EventBus buildEventBus() { @@ -394,8 +400,8 @@ protected NettyOptions buildNettyOptions() { } protected Optional buildSslHandlerFactory() { - // If a JDK-based factory was provided through the public API, syncWrapper it - return buildSslEngineFactory().map(JdkSslHandlerFactory::new); + // If a JDK-based factory was provided through the public API, wrap it + return getSslEngineFactory().map(JdkSslHandlerFactory::new); // For more advanced options (like using Netty's native OpenSSL support instead of the JDK), // extend DefaultDriverContext and override this method diff --git a/core/src/main/resources/reference.conf b/core/src/main/resources/reference.conf index ed8a0c1b997..d80d9762010 100644 --- a/core/src/main/resources/reference.conf +++ b/core/src/main/resources/reference.conf @@ -409,6 +409,9 @@ datastax-java-driver { # Required: no. If the 'class' child option is absent, SSL won't be activated. # Modifiable at runtime: no # Overridable in a profile: no + # + # Note that the contents of this section can be overridden programmatically with + # SessionBuilder.withSslEngineFactory or SessionBuilder#withSslContext. advanced.ssl-engine-factory { # The class of the factory. If it is not qualified, the driver assumes that it resides in the # package com.datastax.oss.driver.internal.core.ssl. diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/ssl/ProgrammaticSslIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/ssl/ProgrammaticSslIT.java new file mode 100644 index 00000000000..78d1d629df0 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/ssl/ProgrammaticSslIT.java @@ -0,0 +1,94 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.core.ssl; + +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.ssl.ProgrammaticSslEngineFactory; +import com.datastax.oss.driver.api.core.ssl.SslEngineFactory; +import com.datastax.oss.driver.api.testinfra.ccm.CcmBridge; +import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.session.SessionUtils; +import java.io.InputStream; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.security.KeyStore; +import java.security.SecureRandom; +import javax.net.ssl.KeyManagerFactory; +import javax.net.ssl.SSLContext; +import javax.net.ssl.TrustManagerFactory; +import org.junit.ClassRule; +import org.junit.Test; + +public class ProgrammaticSslIT { + + @ClassRule public static final CustomCcmRule CCM_RULE = CustomCcmRule.builder().withSsl().build(); + + @Test + public void should_connect_with_programmatic_factory() { + SslEngineFactory factory = new ProgrammaticSslEngineFactory(createSslContext()); + try (CqlSession session = + (CqlSession) + SessionUtils.baseBuilder() + .addContactEndPoints(CCM_RULE.getContactPoints()) + .withSslEngineFactory(factory) + .build()) { + session.execute("select * from system.local"); + } + } + + @Test + public void should_connect_with_programmatic_ssl_context() { + SSLContext sslContext = createSslContext(); + try (CqlSession session = + (CqlSession) + SessionUtils.baseBuilder() + .addContactEndPoints(CCM_RULE.getContactPoints()) + .withSslContext(sslContext) + .build()) { + session.execute("select * from system.local"); + } + } + + private static SSLContext createSslContext() { + try { + SSLContext context = SSLContext.getInstance("SSL"); + TrustManagerFactory tmf = + TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); + try (InputStream tsf = + Files.newInputStream( + Paths.get(CcmBridge.DEFAULT_CLIENT_TRUSTSTORE_FILE.getAbsolutePath()))) { + KeyStore ts = KeyStore.getInstance("JKS"); + char[] password = CcmBridge.DEFAULT_CLIENT_TRUSTSTORE_PASSWORD.toCharArray(); + ts.load(tsf, password); + tmf.init(ts); + } + KeyManagerFactory kmf = + KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm()); + try (InputStream ksf = + Files.newInputStream( + Paths.get(CcmBridge.DEFAULT_CLIENT_KEYSTORE_FILE.getAbsolutePath()))) { + KeyStore ks = KeyStore.getInstance("JKS"); + char[] password = CcmBridge.DEFAULT_CLIENT_KEYSTORE_PASSWORD.toCharArray(); + ks.load(ksf, password); + kmf.init(ks, password); + } + context.init(kmf.getKeyManagers(), tmf.getTrustManagers(), new SecureRandom()); + return context; + } catch (Exception e) { + throw new AssertionError("Unexpected error while creating SSL context", e); + } + } +} diff --git a/manual/core/ssl/README.md b/manual/core/ssl/README.md index 9d28cf315e1..88c34aa3a94 100644 --- a/manual/core/ssl/README.md +++ b/manual/core/ssl/README.md @@ -110,7 +110,7 @@ use [JSSE system properties]: -Djavax.net.ssl.keyStorePassword=password123 ``` -#### JSSE, programmatic +#### JSSE, custom factory If you need more control than what system properties allow, you need to write your own engine factory. If you just need specific configuration on the `SSLEngine`, you can extend the default @@ -145,6 +145,27 @@ datastax-java-driver { } ``` +#### JSSE, programmatic + +You can also provide a factory instance programmatically. This will take precedence over the +configuration: + +```java +SslEngineFactory yourFactory = ... +CqlSession session = CqlSession.builder() + .withSslEngineFactory(yourFactory) + .build(); +``` + +There is also a convenience shortcut if you just want to use an existing `javax.net.ssl.SSLContext`: + +```java +SSLContext sslContext = ... +CqlSession session = CqlSession.builder() + .withSslContext(sslContext) + .build(); +``` + #### Netty Netty provides a more efficient SSL implementation based on native OpenSSL support. It's possible to From d14cbbac8aeb4815917d90305476e9c826327fa0 Mon Sep 17 00:00:00 2001 From: olim7t Date: Wed, 14 Aug 2019 11:17:24 -0700 Subject: [PATCH 059/979] Pass endpoint and authenticator name to PlainTextAuthProviderBase.getCredentials Even though they aren't used by the current implementations, they could theoretically influence the generation of the credentials. --- .../oss/driver/api/core/auth/PlainTextAuthProviderBase.java | 5 +++-- .../oss/driver/internal/core/auth/PlainTextAuthProvider.java | 4 +++- .../core/auth/ProgrammaticPlainTextAuthProvider.java | 4 +++- 3 files changed, 9 insertions(+), 4 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderBase.java b/core/src/main/java/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderBase.java index a82b6ec0bfb..b44e6f8765c 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderBase.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderBase.java @@ -56,13 +56,14 @@ protected PlainTextAuthProviderBase(@NonNull String logPrefix) { *

    This is invoked every time the driver opens a new connection. */ @NonNull - protected abstract Credentials getCredentials(); + protected abstract Credentials getCredentials( + @NonNull EndPoint endPoint, @NonNull String serverAuthenticator); @NonNull @Override public Authenticator newAuthenticator( @NonNull EndPoint endPoint, @NonNull String serverAuthenticator) { - return new PlainTextAuthenticator(getCredentials()); + return new PlainTextAuthenticator(getCredentials(endPoint, serverAuthenticator)); } @Override diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/auth/PlainTextAuthProvider.java b/core/src/main/java/com/datastax/oss/driver/internal/core/auth/PlainTextAuthProvider.java index cb0e40d9600..2d664063933 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/auth/PlainTextAuthProvider.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/auth/PlainTextAuthProvider.java @@ -19,6 +19,7 @@ import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.context.DriverContext; +import com.datastax.oss.driver.api.core.metadata.EndPoint; import edu.umd.cs.findbugs.annotations.NonNull; import net.jcip.annotations.ThreadSafe; @@ -53,7 +54,8 @@ public PlainTextAuthProvider(DriverContext context) { @NonNull @Override - protected Credentials getCredentials() { + protected Credentials getCredentials( + @NonNull EndPoint endPoint, @NonNull String serverAuthenticator) { return new Credentials( config.getString(DefaultDriverOption.AUTH_PROVIDER_USER_NAME).toCharArray(), config.getString(DefaultDriverOption.AUTH_PROVIDER_PASSWORD).toCharArray()); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/auth/ProgrammaticPlainTextAuthProvider.java b/core/src/main/java/com/datastax/oss/driver/internal/core/auth/ProgrammaticPlainTextAuthProvider.java index dd0b7d4a188..0b395240f53 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/auth/ProgrammaticPlainTextAuthProvider.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/auth/ProgrammaticPlainTextAuthProvider.java @@ -16,6 +16,7 @@ package com.datastax.oss.driver.internal.core.auth; import com.datastax.oss.driver.api.core.auth.PlainTextAuthProviderBase; +import com.datastax.oss.driver.api.core.metadata.EndPoint; import com.datastax.oss.driver.api.core.session.SessionBuilder; import edu.umd.cs.findbugs.annotations.NonNull; import net.jcip.annotations.ThreadSafe; @@ -41,7 +42,8 @@ public ProgrammaticPlainTextAuthProvider(String username, String password) { @NonNull @Override - protected Credentials getCredentials() { + protected Credentials getCredentials( + @NonNull EndPoint endPoint, @NonNull String serverAuthenticator) { return new Credentials(username.toCharArray(), password.toCharArray()); } } From 061266109b95ea33da76d23e6994c9d001108362 Mon Sep 17 00:00:00 2001 From: olim7t Date: Thu, 15 Aug 2019 09:23:47 -0700 Subject: [PATCH 060/979] Link to PlainTextAuthProviderBase javadocs in manual --- manual/core/authentication/README.md | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/manual/core/authentication/README.md b/manual/core/authentication/README.md index bcb421fc948..a5ab8816870 100644 --- a/manual/core/authentication/README.md +++ b/manual/core/authentication/README.md @@ -60,11 +60,12 @@ CqlSession session = One downside of `withAuthCredentials` is that the credentials are stored in clear text in memory; this means they are vulnerable to an attacker who is able to perform memory dumps. If this is not -acceptable for you, consider writing your own [AuthProvider] implementation (the internal class -`PlainTextAuthProviderBase` is a good starting point). +acceptable for you, consider writing your own [AuthProvider] implementation +([PlainTextAuthProviderBase] is a good starting point). [SASL]: https://en.wikipedia.org/wiki/Simple_Authentication_and_Security_Layer -[AuthProvider]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/auth/AuthProvider.html -[DriverContext]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/context/DriverContext.html +[AuthProvider]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/auth/AuthProvider.html +[DriverContext]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/context/DriverContext.html +([PlainTextAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderBase.html \ No newline at end of file From 7f02d20b137bf2bb330509372212eab9d9dc04fe Mon Sep 17 00:00:00 2001 From: olim7t Date: Thu, 15 Aug 2019 09:29:32 -0700 Subject: [PATCH 061/979] Upgrade Netty to 4.1.39.Final This version fixes multiple HTTP/2 security issues. We don't use HTTP/2 in the driver, but this will avoid triggering CVE warnings. --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index a34fa29fcd3..22b9362f1e6 100644 --- a/pom.xml +++ b/pom.xml @@ -52,7 +52,7 @@ 2.1.11 4.0.5 1.4.6-SNAPSHOT - 4.1.38.Final + 4.1.39.Final 1.7.26 1.1.7.3 From 94aa9913b03e192abe7627999897263542f60572 Mon Sep 17 00:00:00 2001 From: olim7t Date: Thu, 15 Aug 2019 09:41:46 -0700 Subject: [PATCH 062/979] JAVA-2329: Fix mapper message to reference primary key instead of partition key --- .../internal/mapper/processor/dao/DaoDeleteMethodGenerator.java | 2 +- .../mapper/processor/dao/DaoDeleteMethodGeneratorTest.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGenerator.java index 5cf2c7f26c5..53e68c514e5 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGenerator.java @@ -101,7 +101,7 @@ public Optional generate() { methodElement, processedType, "Wrong number of parameters: %s methods with no custom clause " - + "must take either an entity instance, or the partition key components", + + "must take either an entity instance, or the primary key components", Delete.class.getSimpleName()); return Optional.empty(); } diff --git a/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGeneratorTest.java b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGeneratorTest.java index 637aad8be3d..52688dca0d6 100644 --- a/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGeneratorTest.java +++ b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGeneratorTest.java @@ -57,7 +57,7 @@ public static Object[][] invalidSignatures() { }, { "Wrong number of parameters: Delete methods with no custom clause " - + "must take either an entity instance, or the partition key components", + + "must take either an entity instance, or the primary key components", MethodSpec.methodBuilder("delete") .addAnnotation(Delete.class) .addModifiers(Modifier.PUBLIC, Modifier.ABSTRACT) From 5ae010b6984dc8708cc7871dd92b54a3622e0ef2 Mon Sep 17 00:00:00 2001 From: Tomasz Lelek Date: Mon, 19 Aug 2019 08:57:58 +0200 Subject: [PATCH 063/979] JAVA-2394: BaseCcmRule DseRequirement max should use DseVersion, not Cassandra version (#1317) --- changelog/README.md | 1 + .../com/datastax/oss/driver/api/testinfra/ccm/BaseCcmRule.java | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/changelog/README.md b/changelog/README.md index 76fe25c8c4e..cde9d37bf60 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -33,6 +33,7 @@ - [bug] JAVA-2312: Handle UDTs with names that clash with collection types - [improvement] JAVA-2307: Improve `@Select` and `@Delete` by not requiring full primary key - [improvement] JAVA-2315: Improve extensibility of session builder +- [buh] JAVA-2394: BaseCcmRule DseRequirement max should use DseVersion, not Cassandra version ### 4.1.0 diff --git a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/BaseCcmRule.java b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/BaseCcmRule.java index 42c754f0dba..c902434aac2 100644 --- a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/BaseCcmRule.java +++ b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/BaseCcmRule.java @@ -122,7 +122,7 @@ public void evaluate() { if (!dseRequirement.max().isEmpty()) { Version maxVersion = Version.parse(dseRequirement.max()); - if (maxVersion.compareTo(ccmBridge.getCassandraVersion()) <= 0) { + if (maxVersion.compareTo(dseVersion) <= 0) { return buildErrorStatement(maxVersion, dseRequirement.description(), true, true); } } From f6980df6e065bca4ed2db28297d56e8f55c8d514 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 20 Aug 2019 19:01:45 +0200 Subject: [PATCH 064/979] Upgrade jackson-databind to 2.9.9.3 --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 22b9362f1e6..65514184b68 100644 --- a/pom.xml +++ b/pom.xml @@ -70,7 +70,7 @@ 2.0.1 1.1.4 2.9.9 - 2.9.9.1 + 2.9.9.3 From 02e5d50a197551fec8e7d53c7a5c01a097bef435 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 20 Aug 2019 19:03:25 +0200 Subject: [PATCH 065/979] OSGi tests should use distinct versions for Jackson core and databind --- integration-tests/pom.xml | 2 ++ .../java/com/datastax/oss/driver/osgi/BundleOptions.java | 5 ++++- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index b2ca1785468..ac3bb257b1f 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -150,6 +150,7 @@ ${guava.version} ${hdrhistogram.version} ${jackson.version} + ${jackson-databind.version} ${logback.version} ${lz4.version} ${metrics.version} @@ -174,6 +175,7 @@ ${guava.version} ${hdrhistogram.version} ${jackson.version} + ${jackson-databind.version} ${logback.version} ${lz4.version} ${metrics.version} diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/osgi/BundleOptions.java b/integration-tests/src/test/java/com/datastax/oss/driver/osgi/BundleOptions.java index 1a259d04466..7facd7afb68 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/osgi/BundleOptions.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/osgi/BundleOptions.java @@ -29,6 +29,8 @@ public class BundleOptions { public static CompositeOption baseOptions() { + // These options should only include dependencies that must be present in both the non-shaded + // and shaded driver versions. // Note: the bundles below include Netty; these bundles are not required by // the shaded core driver bundle, but they need to be present in all cases because // the test-infra bundle requires the (non-shaded) Netty bundle. @@ -119,9 +121,10 @@ public static CompositeOption logbackBundles() { public static CompositeOption jacksonBundles() { String jacksonVersion = getVersion("jackson.version"); + String jacksonDatabindVersion = getVersion("jackson-databind.version"); return () -> options( - mavenBundle("com.fasterxml.jackson.core", "jackson-databind", jacksonVersion), + mavenBundle("com.fasterxml.jackson.core", "jackson-databind", jacksonDatabindVersion), mavenBundle("com.fasterxml.jackson.core", "jackson-core", jacksonVersion), mavenBundle("com.fasterxml.jackson.core", "jackson-annotations", jacksonVersion)); } From 2e248a9a8b7a7f2aad7aad4bf09e982b785c1aa7 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Wed, 21 Aug 2019 12:40:45 +0200 Subject: [PATCH 066/979] Clarify the contents of bundles included in methods baseOptions() and testBundles() --- .../datastax/oss/driver/osgi/BundleOptions.java | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/osgi/BundleOptions.java b/integration-tests/src/test/java/com/datastax/oss/driver/osgi/BundleOptions.java index 7facd7afb68..0a083364a63 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/osgi/BundleOptions.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/osgi/BundleOptions.java @@ -29,14 +29,14 @@ public class BundleOptions { public static CompositeOption baseOptions() { - // These options should only include dependencies that must be present in both the non-shaded - // and shaded driver versions. - // Note: the bundles below include Netty; these bundles are not required by - // the shaded core driver bundle, but they need to be present in all cases because - // the test-infra bundle requires the (non-shaded) Netty bundle. + // In theory, the options declared here should only include bundles that must be present + // in order for both the non-shaded and shaded driver versions to work properly. + // Bundles that should be present only for the non-shaded driver version should be declared + // elsewhere. + // However we have two exceptions: Netty and FasterXML Jackson; both need to be present in all + // cases because the test bundles requires their presence (see #testBundles method). return () -> options( - nettyBundles(), mavenBundle( "com.datastax.oss", "java-driver-shaded-guava", getVersion("guava.version")), mavenBundle("io.dropwizard.metrics", "metrics-core", getVersion("metrics.version")), @@ -92,7 +92,8 @@ public static CompositeOption testBundles() { options( driverTestInfraBundle(), simulacronBundles(), - jacksonBundles(), + nettyBundles(), // required by the test infra bundle, even for the shaded jar + jacksonBundles(), // required by the Simulacron bundle, even for the shaded jar mavenBundle( "org.apache.commons", "commons-exec", System.getProperty("commons-exec.version")), mavenBundle("org.assertj", "assertj-core", System.getProperty("assertj.version")), From d91d7419ae1e6e3a74464f92155e6d4a90bcd10b Mon Sep 17 00:00:00 2001 From: olim7t Date: Wed, 21 Aug 2019 12:35:18 -0700 Subject: [PATCH 067/979] Improve javadocs to indicate which objects are mutable --- .../java/com/datastax/oss/driver/api/core/CqlSession.java | 6 +++++- .../datastax/oss/driver/api/core/CqlSessionBuilder.java | 6 +++++- .../datastax/oss/driver/api/core/cql/BatchStatement.java | 8 +++++++- .../oss/driver/api/core/cql/BatchStatementBuilder.java | 5 +++++ .../oss/driver/api/core/cql/BoundStatementBuilder.java | 5 +++++ .../oss/driver/api/core/cql/PreparedStatement.java | 2 ++ .../datastax/oss/driver/api/core/cql/SimpleStatement.java | 8 +++++++- .../oss/driver/api/core/cql/SimpleStatementBuilder.java | 5 +++++ .../com/datastax/oss/driver/api/core/data/TupleValue.java | 6 +++--- .../com/datastax/oss/driver/api/core/data/UdtValue.java | 6 +++--- .../oss/driver/api/core/session/SessionBuilder.java | 2 ++ 11 files changed, 49 insertions(+), 10 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/CqlSession.java b/core/src/main/java/com/datastax/oss/driver/api/core/CqlSession.java index 04a98054dc0..8af93ca6ddd 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/CqlSession.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/CqlSession.java @@ -31,7 +31,11 @@ /** A specialized session with convenience methods to execute CQL statements. */ public interface CqlSession extends Session { - /** Returns a builder to create a new instance. */ + /** + * Returns a builder to create a new instance. + * + *

    Note that this builder is mutable and not thread-safe. + */ @NonNull static CqlSessionBuilder builder() { return new CqlSessionBuilder(); diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/CqlSessionBuilder.java b/core/src/main/java/com/datastax/oss/driver/api/core/CqlSessionBuilder.java index 064b6b12779..19ac5a6fc60 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/CqlSessionBuilder.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/CqlSessionBuilder.java @@ -19,7 +19,11 @@ import edu.umd.cs.findbugs.annotations.NonNull; import net.jcip.annotations.NotThreadSafe; -/** Helper class to build a {@link CqlSession} instance. */ +/** + * Helper class to build a {@link CqlSession} instance. + * + *

    This class is mutable and not thread-safe. + */ @NotThreadSafe public class CqlSessionBuilder extends SessionBuilder { diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/cql/BatchStatement.java b/core/src/main/java/com/datastax/oss/driver/api/core/cql/BatchStatement.java index 1219dad4475..41e72aa0dfe 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/cql/BatchStatement.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/cql/BatchStatement.java @@ -128,7 +128,11 @@ static BatchStatement newInstance( null); } - /** Returns a builder to create an instance of the default implementation. */ + /** + * Returns a builder to create an instance of the default implementation. + * + *

    Note that this builder is mutable and not thread-safe. + */ @NonNull static BatchStatementBuilder builder(@NonNull BatchType batchType) { return new BatchStatementBuilder(batchType); @@ -137,6 +141,8 @@ static BatchStatementBuilder builder(@NonNull BatchType batchType) { /** * Returns a builder to create an instance of the default implementation, copying the fields of * the given statement. + * + *

    Note that this builder is mutable and not thread-safe. */ @NonNull static BatchStatementBuilder builder(@NonNull BatchStatement template) { diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/cql/BatchStatementBuilder.java b/core/src/main/java/com/datastax/oss/driver/api/core/cql/BatchStatementBuilder.java index de3283b4a36..373be6ac57b 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/cql/BatchStatementBuilder.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/cql/BatchStatementBuilder.java @@ -24,6 +24,11 @@ import java.util.Arrays; import net.jcip.annotations.NotThreadSafe; +/** + * A builder to create a batch statement. + * + *

    This class is mutable and not thread-safe. + */ @NotThreadSafe public class BatchStatementBuilder extends StatementBuilder { diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/cql/BoundStatementBuilder.java b/core/src/main/java/com/datastax/oss/driver/api/core/cql/BoundStatementBuilder.java index 579dd8e399b..6ccfedb1608 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/cql/BoundStatementBuilder.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/cql/BoundStatementBuilder.java @@ -30,6 +30,11 @@ import java.util.Map; import net.jcip.annotations.NotThreadSafe; +/** + * A builder to create a bound statement. + * + *

    This class is mutable and not thread-safe. + */ @NotThreadSafe public class BoundStatementBuilder extends StatementBuilder implements Bindable { diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/cql/PreparedStatement.java b/core/src/main/java/com/datastax/oss/driver/api/core/cql/PreparedStatement.java index b9f9a0fdccf..f412b9a3bc4 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/cql/PreparedStatement.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/cql/PreparedStatement.java @@ -134,6 +134,8 @@ void setResultMetadata( /** * Returns a builder to construct an executable statement. * + *

    Note that this builder is mutable and not thread-safe. + * * @see #bind(Object...) */ @NonNull diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/cql/SimpleStatement.java b/core/src/main/java/com/datastax/oss/driver/api/core/cql/SimpleStatement.java index f15efc7df7c..cf7bec7fa7a 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/cql/SimpleStatement.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/cql/SimpleStatement.java @@ -146,7 +146,11 @@ static SimpleStatement newInstance( null); } - /** Returns a builder to create an instance of the default implementation. */ + /** + * Returns a builder to create an instance of the default implementation. + * + *

    Note that this builder is mutable and not thread-safe. + */ @NonNull static SimpleStatementBuilder builder(@NonNull String query) { return new SimpleStatementBuilder(query); @@ -155,6 +159,8 @@ static SimpleStatementBuilder builder(@NonNull String query) { /** * Returns a builder to create an instance of the default implementation, copying the fields of * the given statement. + * + *

    Note that this builder is mutable and not thread-safe. */ @NonNull static SimpleStatementBuilder builder(@NonNull SimpleStatement template) { diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/cql/SimpleStatementBuilder.java b/core/src/main/java/com/datastax/oss/driver/api/core/cql/SimpleStatementBuilder.java index 4a1a9e32233..e66a6711041 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/cql/SimpleStatementBuilder.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/cql/SimpleStatementBuilder.java @@ -26,6 +26,11 @@ import java.util.Map; import net.jcip.annotations.NotThreadSafe; +/** + * A builder to create a simple statement. + * + *

    This class is mutable and not thread-safe. + */ @NotThreadSafe public class SimpleStatementBuilder extends StatementBuilder { diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/data/TupleValue.java b/core/src/main/java/com/datastax/oss/driver/api/core/data/TupleValue.java index e9e9c91be00..4a5727cdda0 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/data/TupleValue.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/data/TupleValue.java @@ -26,9 +26,9 @@ * *

    A tuple value is attached if and only if its type is attached (see {@link Detachable}). * - *

    The default implementation returned by the driver is immutable and serializable. If you write - * your own implementation, it should at least be thread-safe; serializability is not mandatory, but - * recommended for use with some 3rd-party tools like Apache Spark ™. + *

    The default implementation returned by the driver is mutable and serializable. If you write + * your own implementation, serializability is not mandatory, but recommended for use with some + * 3rd-party tools like Apache Spark ™. */ public interface TupleValue extends GettableByIndex, SettableByIndex { diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/data/UdtValue.java b/core/src/main/java/com/datastax/oss/driver/api/core/data/UdtValue.java index 41f5f0361de..df5c6c0cc97 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/data/UdtValue.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/data/UdtValue.java @@ -26,9 +26,9 @@ * *

    A tuple value is attached if and only if its type is attached (see {@link Detachable}). * - *

    The default implementation returned by the driver is immutable and serializable. If you write - * your own implementation, it should at least be thread-safe; serializability is not mandatory, but - * recommended for use with some 3rd-party tools like Apache Spark ™. + *

    The default implementation returned by the driver is mutable and serializable. If you write + * your own implementation, serializability is not mandatory, but recommended for use with some + * 3rd-party tools like Apache Spark ™. */ public interface UdtValue extends GettableById, GettableByName, SettableById, SettableByName { diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java b/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java index 890d3cf41d1..40f0e95fcab 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java @@ -61,6 +61,8 @@ * *

    You only need to deal with this directly if you use custom driver extensions. For the default * session implementation, see {@link CqlSession#builder()}. + * + *

    This class is mutable and not thread-safe. */ @NotThreadSafe public abstract class SessionBuilder { From ab4302910937cb729a86ac5e8b211fd82c7d9420 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 26 Aug 2019 11:00:23 +0200 Subject: [PATCH 068/979] Remove redundant escape characters in regular expressions --- .../main/java/com/datastax/oss/driver/api/core/Version.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/Version.java b/core/src/main/java/com/datastax/oss/driver/api/core/Version.java index f70db10c252..5c4a5e8de2c 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/Version.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/Version.java @@ -39,7 +39,8 @@ public class Version implements Comparable { private static final String VERSION_REGEXP = - "(\\d+)\\.(\\d+)(\\.\\d+)?(\\.\\d+)?([~\\-]\\w[.\\w]*(?:\\-\\w[.\\w]*)*)?(\\+[.\\w]+)?"; + "(\\d+)\\.(\\d+)(\\.\\d+)?(\\.\\d+)?([~\\-]\\w[.\\w]*(?:-\\w[.\\w]*)*)?(\\+[.\\w]+)?"; + private static final Pattern pattern = Pattern.compile(VERSION_REGEXP); public static final Version V2_1_0 = parse("2.1.0"); @@ -111,7 +112,7 @@ public static Version parse(@Nullable String version) { pr == null || pr.isEmpty() ? null : pr.substring(1) - .split("\\-"); // drop initial '-' or '~' then split on the remaining ones + .split("-"); // drop initial '-' or '~' then split on the remaining ones String bl = matcher.group(6); String build = bl == null || bl.isEmpty() ? null : bl.substring(1); // drop the initial '+' From a3b2a28d1d0f6afa16f4666aac695f2046194e9e Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 26 Aug 2019 11:00:51 +0200 Subject: [PATCH 069/979] Annotate constants in Version class with `@NonNull` --- .../java/com/datastax/oss/driver/api/core/Version.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/Version.java b/core/src/main/java/com/datastax/oss/driver/api/core/Version.java index 5c4a5e8de2c..0e86cd8f077 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/Version.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/Version.java @@ -43,10 +43,10 @@ public class Version implements Comparable { private static final Pattern pattern = Pattern.compile(VERSION_REGEXP); - public static final Version V2_1_0 = parse("2.1.0"); - public static final Version V2_2_0 = parse("2.2.0"); - public static final Version V3_0_0 = parse("3.0.0"); - public static final Version V4_0_0 = parse("4.0.0"); + @NonNull public static final Version V2_1_0 = Objects.requireNonNull(parse("2.1.0")); + @NonNull public static final Version V2_2_0 = Objects.requireNonNull(parse("2.2.0")); + @NonNull public static final Version V3_0_0 = Objects.requireNonNull(parse("3.0.0")); + @NonNull public static final Version V4_0_0 = Objects.requireNonNull(parse("4.0.0")); private final int major; private final int minor; From 50dbe1e7c4ce917f5511361168b1a71de74569d9 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 26 Aug 2019 11:01:15 +0200 Subject: [PATCH 070/979] Use Integer.compare to simplify Version comparisons --- .../main/java/com/datastax/oss/driver/api/core/Version.java | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/Version.java b/core/src/main/java/com/datastax/oss/driver/api/core/Version.java index 0e86cd8f077..c1ba41c7345 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/Version.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/Version.java @@ -252,9 +252,7 @@ public int compareTo(@NonNull Version other) { } } - return preReleases.length == other.preReleases.length - ? 0 - : (preReleases.length < other.preReleases.length ? -1 : 1); + return Integer.compare(preReleases.length, other.preReleases.length); } @Override From a3e5e320a801b636a8e1c8243668920b50fe2f8e Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 26 Aug 2019 10:36:53 -0700 Subject: [PATCH 071/979] Add revapi ignores for new annotations on Version constants See a3b2a28d1d0f6afa16f4666aac695f2046194e9e. --- core/revapi.json | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/core/revapi.json b/core/revapi.json index 295110bafd3..82b812baa93 100644 --- a/core/revapi.json +++ b/core/revapi.json @@ -4809,6 +4809,13 @@ "code": "java.method.addedToInterface", "new": "method java.lang.String com.datastax.oss.driver.api.core.data.UdtValue::toString()", "justification": "False positive -- all objects implicitly have toString()" + }, + { + "regex": true, + "code": "java.annotation.added", + "old": "field com\\.datastax\\.oss\\.driver\\.api\\.core\\.Version.V.*", + "annotation": "@edu.umd.cs.findbugs.annotations.NonNull", + "justification": "Marking constants as non-null doesn't break existing code" } ] } From 6c2df6ed499c637bc3a4829c72ee6c5f58e33e2c Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 26 Aug 2019 10:38:34 -0700 Subject: [PATCH 072/979] Bump native-protocol to 1.4.6 --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 65514184b68..9389b5085e6 100644 --- a/pom.xml +++ b/pom.xml @@ -51,7 +51,7 @@ 25.1-jre 2.1.11 4.0.5 - 1.4.6-SNAPSHOT + 1.4.6 4.1.39.Final 1.7.26 From 6bbc525380963c8d210c135820ee321c5785725b Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 26 Aug 2019 10:47:04 -0700 Subject: [PATCH 073/979] Fix install-snapshots.sh to not error out when no snapshots --- install-snapshots.sh | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/install-snapshots.sh b/install-snapshots.sh index 893e92af9a6..76b0ad15166 100755 --- a/install-snapshots.sh +++ b/install-snapshots.sh @@ -17,5 +17,7 @@ install_snapshot() } } -grep -q '.*-SNAPSHOT' pom.xml && \ +grep -q '.*-SNAPSHOT' pom.xml +if [ $? -eq 0 ] ; then install_snapshot https://github.com/datastax/native-protocol.git native-protocol +fi From dcdcbeae99092b53d54852415a1c9cc245db33fb Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 26 Aug 2019 11:10:52 -0700 Subject: [PATCH 074/979] Update version in docs --- README.md | 4 +-- changelog/README.md | 2 +- manual/case_sensitivity/README.md | 10 +++--- manual/core/README.md | 26 +++++++------- manual/core/address_resolution/README.md | 4 +-- manual/core/async/README.md | 2 +- manual/core/authentication/README.md | 6 ++-- manual/core/configuration/README.md | 20 +++++------ manual/core/control_connection/README.md | 2 +- manual/core/custom_codecs/README.md | 6 ++-- manual/core/detachable_types/README.md | 14 ++++---- manual/core/integration/README.md | 6 ++-- manual/core/load_balancing/README.md | 10 +++--- manual/core/metadata/README.md | 6 ++-- manual/core/metadata/node/README.md | 18 +++++----- manual/core/metadata/schema/README.md | 12 +++---- manual/core/metadata/token/README.md | 4 +-- manual/core/native_protocol/README.md | 4 +-- manual/core/paging/README.md | 4 +-- manual/core/performance/README.md | 10 +++--- manual/core/pooling/README.md | 2 +- manual/core/query_timestamps/README.md | 2 +- manual/core/reconnection/README.md | 8 ++--- manual/core/request_tracker/README.md | 2 +- manual/core/retries/README.md | 34 +++++++++---------- manual/core/speculative_execution/README.md | 2 +- manual/core/statements/README.md | 8 ++--- manual/core/statements/batch/README.md | 2 +- .../statements/per_query_keyspace/README.md | 2 +- manual/core/statements/prepared/README.md | 4 +-- manual/core/statements/simple/README.md | 2 +- manual/core/temporal_types/README.md | 8 ++--- manual/core/throttling/README.md | 6 ++-- manual/core/tracing/README.md | 4 +-- manual/core/tuples/README.md | 4 +-- manual/core/udts/README.md | 4 +-- manual/mapper/daos/README.md | 6 ++-- manual/mapper/daos/delete/README.md | 14 ++++---- manual/mapper/daos/getentity/README.md | 16 ++++----- manual/mapper/daos/insert/README.md | 10 +++--- manual/mapper/daos/null_saving/README.md | 10 +++--- manual/mapper/daos/query/README.md | 16 ++++----- manual/mapper/daos/queryprovider/README.md | 16 ++++----- manual/mapper/daos/select/README.md | 24 ++++++------- manual/mapper/daos/setentity/README.md | 10 +++--- .../daos/statement_attributes/README.md | 2 +- manual/mapper/daos/update/README.md | 8 ++--- manual/mapper/entities/README.md | 34 +++++++++---------- manual/mapper/mapper/README.md | 10 +++--- manual/osgi/README.md | 2 +- manual/query_builder/README.md | 6 ++-- manual/query_builder/condition/README.md | 2 +- manual/query_builder/delete/README.md | 4 +-- manual/query_builder/insert/README.md | 2 +- manual/query_builder/relation/README.md | 4 +-- manual/query_builder/schema/README.md | 2 +- .../query_builder/schema/aggregate/README.md | 2 +- .../query_builder/schema/function/README.md | 2 +- manual/query_builder/schema/index/README.md | 2 +- .../query_builder/schema/keyspace/README.md | 2 +- .../schema/materialized_view/README.md | 4 +-- manual/query_builder/schema/table/README.md | 6 ++-- manual/query_builder/schema/type/README.md | 2 +- manual/query_builder/select/README.md | 4 +-- manual/query_builder/term/README.md | 4 +-- manual/query_builder/truncate/README.md | 2 +- manual/query_builder/update/README.md | 4 +-- 67 files changed, 248 insertions(+), 248 deletions(-) diff --git a/README.md b/README.md index 380007e4356..b649c0b54cb 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@ *If you're reading this on github.com, please note that this is the readme for the development version and that some features described here might not yet have been released. You can find the documentation for latest version through [DataStax Docs] or via the release tags, e.g. -[4.1.0](https://github.com/datastax/java-driver/tree/4.1.0).* +[4.2.0](https://github.com/datastax/java-driver/tree/4.2.0).* A modern, feature-rich and highly tunable Java client library for [Apache Cassandra®] \(2.1+) and [DataStax Enterprise] \(4.7+), using exclusively Cassandra's binary protocol and Cassandra Query @@ -80,7 +80,7 @@ See the [upgrade guide](upgrade_guide/) for details. * [Changelog] * [FAQ] -[API docs]: http://www.datastax.com/drivers/java/4.0 +[API docs]: https://docs.datastax.com/en/drivers/java/4.2 [JIRA]: https://datastax-oss.atlassian.net/browse/JAVA [Mailing list]: https://groups.google.com/a/lists.datastax.com/forum/#!forum/java-driver-user [@dsJavaDriver]: https://twitter.com/dsJavaDriver diff --git a/changelog/README.md b/changelog/README.md index cde9d37bf60..5a52f55e787 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -2,7 +2,7 @@ -### 4.2.0 (in progress) +### 4.2.0 - [improvement] JAVA-2390: Add methods to set the SSL engine factory programmatically - [improvement] JAVA-2379: Fail fast if prepared id doesn't match when repreparing on the fly diff --git a/manual/case_sensitivity/README.md b/manual/case_sensitivity/README.md index 98fa681e718..d1d5244b51e 100644 --- a/manual/case_sensitivity/README.md +++ b/manual/case_sensitivity/README.md @@ -106,11 +106,11 @@ For "consuming" methods, string overloads are also provided for convenience, for * in other cases, the string is always assumed to be in CQL form, and converted on the fly with `CqlIdentifier.fromCql`. -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/CqlIdentifier.html -[Row]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/cql/Row.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/data/UdtValue.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/data/AccessibleByName.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/CqlIdentifier.html +[Row]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/Row.html +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/data/UdtValue.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/data/AccessibleByName.html ### Good practices diff --git a/manual/core/README.md b/manual/core/README.md index 33b34b4ad7b..25111876103 100644 --- a/manual/core/README.md +++ b/manual/core/README.md @@ -310,18 +310,18 @@ for (ColumnDefinitions.Definition definition : row.getColumnDefinitions()) { } ``` -[CqlSession]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/CqlSession.html -[CqlSession#builder()]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/CqlSession.html#builder-- -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/cql/ResultSet.html -[Row]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/cql/Row.html -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/CqlIdentifier.html -[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/data/AccessibleByName.html -[GenericType]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/type/reflect/GenericType.html -[CqlDuration]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/data/CqlDuration.html -[TupleValue]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/data/TupleValue.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/data/UdtValue.html -[SessionBuilder.addContactPoint()]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoint-java.net.InetSocketAddress- -[SessionBuilder.addContactPoints()]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoints-java.util.Collection- -[SessionBuilder.withLocalDatacenter()]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withLocalDatacenter-java.lang.String- +[CqlSession]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/CqlSession.html +[CqlSession#builder()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/CqlSession.html#builder-- +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/ResultSet.html +[Row]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/Row.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/CqlIdentifier.html +[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/data/AccessibleByName.html +[GenericType]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/type/reflect/GenericType.html +[CqlDuration]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/data/CqlDuration.html +[TupleValue]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/data/TupleValue.html +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/data/UdtValue.html +[SessionBuilder.addContactPoint()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoint-java.net.InetSocketAddress- +[SessionBuilder.addContactPoints()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoints-java.util.Collection- +[SessionBuilder.withLocalDatacenter()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withLocalDatacenter-java.lang.String- [CASSANDRA-10145]: https://issues.apache.org/jira/browse/CASSANDRA-10145 \ No newline at end of file diff --git a/manual/core/address_resolution/README.md b/manual/core/address_resolution/README.md index 7ed2d77c8f4..cde5535232a 100644 --- a/manual/core/address_resolution/README.md +++ b/manual/core/address_resolution/README.md @@ -113,8 +113,8 @@ Cassandra node: domain name of the target instance. Then it performs a forward DNS lookup of the domain name; the EC2 DNS does the private/public switch automatically based on location). -[AddressTranslator]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/addresstranslation/AddressTranslator.html -[Ec2MultiRegionAddressTranslator]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/internal/core/addresstranslation/Ec2MultiRegionAddressTranslator.html +[AddressTranslator]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/addresstranslation/AddressTranslator.html +[Ec2MultiRegionAddressTranslator]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/internal/core/addresstranslation/Ec2MultiRegionAddressTranslator.html [cassandra.yaml]: https://docs.datastax.com/en/cassandra/3.x/cassandra/configuration/configCassandra_yaml.html [rpc_address]: https://docs.datastax.com/en/cassandra/3.x/cassandra/configuration/configCassandra_yaml.html?scroll=configCassandra_yaml__rpc_address diff --git a/manual/core/async/README.md b/manual/core/async/README.md index e6625704d83..53344a4e087 100644 --- a/manual/core/async/README.md +++ b/manual/core/async/README.md @@ -193,4 +193,4 @@ documentation for more details and an example. [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html \ No newline at end of file +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html \ No newline at end of file diff --git a/manual/core/authentication/README.md b/manual/core/authentication/README.md index a5ab8816870..243aaa03eb7 100644 --- a/manual/core/authentication/README.md +++ b/manual/core/authentication/README.md @@ -66,6 +66,6 @@ acceptable for you, consider writing your own [AuthProvider] implementation [SASL]: https://en.wikipedia.org/wiki/Simple_Authentication_and_Security_Layer -[AuthProvider]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/auth/AuthProvider.html -[DriverContext]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/context/DriverContext.html -([PlainTextAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderBase.html \ No newline at end of file +[AuthProvider]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/auth/AuthProvider.html +[DriverContext]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/context/DriverContext.html +[PlainTextAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderBase.html \ No newline at end of file diff --git a/manual/core/configuration/README.md b/manual/core/configuration/README.md index e87b0ed25fe..5838360f89f 100644 --- a/manual/core/configuration/README.md +++ b/manual/core/configuration/README.md @@ -495,16 +495,16 @@ config.getDefaultProfile().getString(MyCustomOption.ADMIN_EMAIL); config.getDefaultProfile().getInt(MyCustomOption.AWESOMENESS_FACTOR); ``` -[DriverConfig]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/config/DriverConfig.html -[DriverExecutionProfile]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/config/DriverExecutionProfile.html -[DriverContext]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/context/DriverContext.html -[DriverOption]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/config/DriverOption.html -[DefaultDriverOption]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/config/DefaultDriverOption.html -[DriverConfigLoader]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html -[DriverConfigLoader.fromClasspath]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromClasspath-java.lang.String- -[DriverConfigLoader.fromFile]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromFile-java.io.File- -[DriverConfigLoader.fromUrl]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromUrl-java.net.URL- -[DriverConfigLoader.programmaticBuilder]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#programmaticBuilder-- +[DriverConfig]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/config/DriverConfig.html +[DriverExecutionProfile]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/config/DriverExecutionProfile.html +[DriverContext]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/context/DriverContext.html +[DriverOption]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/config/DriverOption.html +[DefaultDriverOption]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/config/DefaultDriverOption.html +[DriverConfigLoader]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html +[DriverConfigLoader.fromClasspath]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromClasspath-java.lang.String- +[DriverConfigLoader.fromFile]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromFile-java.io.File- +[DriverConfigLoader.fromUrl]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromUrl-java.net.URL- +[DriverConfigLoader.programmaticBuilder]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#programmaticBuilder-- [Typesafe Config]: https://github.com/typesafehub/config [config standard behavior]: https://github.com/typesafehub/config#standard-behavior diff --git a/manual/core/control_connection/README.md b/manual/core/control_connection/README.md index 0c53a34a27e..2549ed66327 100644 --- a/manual/core/control_connection/README.md +++ b/manual/core/control_connection/README.md @@ -23,4 +23,4 @@ There are a few options to fine tune the control connection behavior in the `advanced.control-connection` and `advanced.metadata` sections; see the [metadata](../metadata/) pages and the [reference configuration](../configuration/reference/) for all the details. -[Node.getOpenConnections]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- \ No newline at end of file +[Node.getOpenConnections]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- \ No newline at end of file diff --git a/manual/core/custom_codecs/README.md b/manual/core/custom_codecs/README.md index 76edfcc9cb7..5fcbca3f997 100644 --- a/manual/core/custom_codecs/README.md +++ b/manual/core/custom_codecs/README.md @@ -223,6 +223,6 @@ private static String formatRow(Row row) { } ``` -[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html -[GenericType]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/type/reflect/GenericType.html -[TypeCodec]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html +[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html +[GenericType]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/type/reflect/GenericType.html +[TypeCodec]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html diff --git a/manual/core/detachable_types/README.md b/manual/core/detachable_types/README.md index 9d406be586f..63f32507819 100644 --- a/manual/core/detachable_types/README.md +++ b/manual/core/detachable_types/README.md @@ -130,13 +130,13 @@ Even then, the defaults used by detached objects might be good enough for you: Otherwise, just make sure you reattach objects any time you deserialize them or create them from scratch. -[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html -[CodecRegistry#DEFAULT]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html#DEFAULT -[DataType]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/type/DataType.html -[Detachable]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/detach/Detachable.html -[Session]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/session/Session.html -[ColumnDefinition]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/cql/ColumnDefinition.html -[Row]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/cql/Row.html +[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html +[CodecRegistry#DEFAULT]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html#DEFAULT +[DataType]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/type/DataType.html +[Detachable]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/detach/Detachable.html +[Session]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/Session.html +[ColumnDefinition]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/ColumnDefinition.html +[Row]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/Row.html [Java serialization]: https://docs.oracle.com/javase/tutorial/jndi/objects/serial.html [protocol specifications]: https://github.com/datastax/native-protocol/tree/1.x/src/main/resources diff --git a/manual/core/integration/README.md b/manual/core/integration/README.md index 7ed04f40527..1dc77bb5d3c 100644 --- a/manual/core/integration/README.md +++ b/manual/core/integration/README.md @@ -419,6 +419,6 @@ The remaining core driver dependencies are the only ones that are truly mandator [guava]: https://github.com/google/guava/issues/2721 [annotation processing]: https://docs.oracle.com/javase/8/docs/technotes/tools/windows/javac.html#sthref65 -[Session.getMetrics]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/session/Session.html#getMetrics-- -[SessionBuilder.addContactPoint]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoint-java.net.InetSocketAddress- -[Uuids]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/uuid/Uuids.html +[Session.getMetrics]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/Session.html#getMetrics-- +[SessionBuilder.addContactPoint]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoint-java.net.InetSocketAddress- +[Uuids]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/uuid/Uuids.html diff --git a/manual/core/load_balancing/README.md b/manual/core/load_balancing/README.md index e7dd4d8962e..c660a9d6e65 100644 --- a/manual/core/load_balancing/README.md +++ b/manual/core/load_balancing/README.md @@ -274,8 +274,8 @@ Then it uses the "closest" distance for any given node. For example: * policy1 changes its suggestion to IGNORED. node1 is set to REMOTE; * policy1 changes its suggestion to REMOTE. node1 stays at REMOTE. -[DriverContext]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/context/DriverContext.html -[LoadBalancingPolicy]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/loadbalancing/LoadBalancingPolicy.html -[getRoutingKeyspace()]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKeyspace-- -[getRoutingToken()]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/session/Request.html#getRoutingToken-- -[getRoutingKey()]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- \ No newline at end of file +[DriverContext]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/context/DriverContext.html +[LoadBalancingPolicy]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/loadbalancing/LoadBalancingPolicy.html +[getRoutingKeyspace()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKeyspace-- +[getRoutingToken()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/Request.html#getRoutingToken-- +[getRoutingKey()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- \ No newline at end of file diff --git a/manual/core/metadata/README.md b/manual/core/metadata/README.md index de47cd19b4a..470634ad3bc 100644 --- a/manual/core/metadata/README.md +++ b/manual/core/metadata/README.md @@ -42,6 +42,6 @@ Set tokenRanges = tokenMap.getTokenRanges(keyspace.getName(), node); This is a big improvement over previous versions of the driver, where it was possible to observe a new keyspace in the schema metadata before the token metadata was updated. -[Session#getMetadata]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/session/Session.html#getMetadata-- -[Metadata]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/metadata/Metadata.html -[Node]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/metadata/Node.html \ No newline at end of file +[Session#getMetadata]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/Session.html#getMetadata-- +[Metadata]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/metadata/Metadata.html +[Node]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/metadata/Node.html \ No newline at end of file diff --git a/manual/core/metadata/node/README.md b/manual/core/metadata/node/README.md index a0d281ffcfd..a4a93cd2039 100644 --- a/manual/core/metadata/node/README.md +++ b/manual/core/metadata/node/README.md @@ -73,12 +73,12 @@ beyond the scope of this document; if you're interested, study the `TopologyMoni the source code. -[Metadata#getNodes]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/metadata/Metadata.html#getNodes-- -[Node]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/metadata/Node.html -[Node#getState()]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/metadata/Node.html#getState-- -[Node#getDatacenter()]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/metadata/Node.html#getDatacenter-- -[Node#getRack()]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/metadata/Node.html#getRack-- -[Node#getDistance()]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/metadata/Node.html#getDistance-- -[Node#getOpenConnections()]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- -[Node#isReconnecting()]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/metadata/Node.html#isReconnecting-- -[NodeState]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/metadata/NodeState.html \ No newline at end of file +[Metadata#getNodes]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/metadata/Metadata.html#getNodes-- +[Node]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/metadata/Node.html +[Node#getState()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/metadata/Node.html#getState-- +[Node#getDatacenter()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/metadata/Node.html#getDatacenter-- +[Node#getRack()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/metadata/Node.html#getRack-- +[Node#getDistance()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/metadata/Node.html#getDistance-- +[Node#getOpenConnections()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- +[Node#isReconnecting()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/metadata/Node.html#isReconnecting-- +[NodeState]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/metadata/NodeState.html \ No newline at end of file diff --git a/manual/core/metadata/schema/README.md b/manual/core/metadata/schema/README.md index c6308209669..b8a166fa157 100644 --- a/manual/core/metadata/schema/README.md +++ b/manual/core/metadata/schema/README.md @@ -207,11 +207,11 @@ Some of the data in the [token map](../token/) relies on keyspace metadata (any unavailable for the excluded keyspaces. -[Metadata#getKeyspaces]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/metadata/Metadata.html#getKeyspaces-- -[SchemaChangeListener]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListener.html -[SchemaChangeListenerBase]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListenerBase.html -[Session#setSchemaMetadataEnabled]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/session/Session.html#setSchemaMetadataEnabled-java.lang.Boolean- -[Session#checkSchemaAgreementAsync]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/session/Session.html#checkSchemaAgreementAsync-- -[ExecutionInfo#isSchemaInAgreement]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#isSchemaInAgreement-- +[Metadata#getKeyspaces]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/metadata/Metadata.html#getKeyspaces-- +[SchemaChangeListener]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListener.html +[SchemaChangeListenerBase]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListenerBase.html +[Session#setSchemaMetadataEnabled]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/Session.html#setSchemaMetadataEnabled-java.lang.Boolean- +[Session#checkSchemaAgreementAsync]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/Session.html#checkSchemaAgreementAsync-- +[ExecutionInfo#isSchemaInAgreement]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#isSchemaInAgreement-- [JAVA-750]: https://datastax-oss.atlassian.net/browse/JAVA-750 \ No newline at end of file diff --git a/manual/core/metadata/token/README.md b/manual/core/metadata/token/README.md index 9934400b2ef..f2827ee9f43 100644 --- a/manual/core/metadata/token/README.md +++ b/manual/core/metadata/token/README.md @@ -159,5 +159,5 @@ on [schema metadata](../schema/). If schema metadata is disabled or filtered, to also be unavailable for the excluded keyspaces. -[Metadata#getTokenMap]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/metadata/Metadata.html#getTokenMap-- -[TokenMap]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/metadata/TokenMap.html \ No newline at end of file +[Metadata#getTokenMap]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/metadata/Metadata.html#getTokenMap-- +[TokenMap]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/metadata/TokenMap.html \ No newline at end of file diff --git a/manual/core/native_protocol/README.md b/manual/core/native_protocol/README.md index ff48e2b6252..ae948cdc9af 100644 --- a/manual/core/native_protocol/README.md +++ b/manual/core/native_protocol/README.md @@ -106,5 +106,5 @@ If you want to see the details of mixed cluster negotiation, enable `DEBUG` leve [protocol spec]: https://github.com/datastax/native-protocol/tree/1.x/src/main/resources [driver3]: https://docs.datastax.com/en/developer/java-driver/3.5/manual/native_protocol/ -[ExecutionInfo.getWarnings]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getWarnings-- -[Request.getCustomPayload]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/session/Request.html#getCustomPayload-- \ No newline at end of file +[ExecutionInfo.getWarnings]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getWarnings-- +[Request.getCustomPayload]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/Request.html#getCustomPayload-- \ No newline at end of file diff --git a/manual/core/paging/README.md b/manual/core/paging/README.md index 444f249ff5f..a2b15586df7 100644 --- a/manual/core/paging/README.md +++ b/manual/core/paging/README.md @@ -177,5 +177,5 @@ think you can get away with the performance hit. We recommend that you: * set a hard limit on the highest possible page number, to prevent malicious clients from triggering queries that would skip a huge amount of rows. -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/cql/ResultSet.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/ResultSet.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html diff --git a/manual/core/performance/README.md b/manual/core/performance/README.md index 34635c9ed5f..f07287570c5 100644 --- a/manual/core/performance/README.md +++ b/manual/core/performance/README.md @@ -345,8 +345,8 @@ possible to reuse the same event loop group for I/O, admin tasks, and even your (the driver's internal code is fully asynchronous so it will never block any thread). The timer is the only one that will have to stay on a separate thread. -[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/data/AccessibleByName.html -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/CqlIdentifier.html -[CqlSession.prepare(SimpleStatement)]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/CqlSession.html#prepare-com.datastax.oss.driver.api.core.cql.SimpleStatement- -[GenericType]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/type/reflect/GenericType.html -[Statement.setNode()]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/cql/Statement.html#setNode-com.datastax.oss.driver.api.core.metadata.Node- \ No newline at end of file +[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/data/AccessibleByName.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/CqlIdentifier.html +[CqlSession.prepare(SimpleStatement)]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/CqlSession.html#prepare-com.datastax.oss.driver.api.core.cql.SimpleStatement- +[GenericType]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/type/reflect/GenericType.html +[Statement.setNode()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/Statement.html#setNode-com.datastax.oss.driver.api.core.metadata.Node- \ No newline at end of file diff --git a/manual/core/pooling/README.md b/manual/core/pooling/README.md index e734deebdbb..e840c6c185a 100644 --- a/manual/core/pooling/README.md +++ b/manual/core/pooling/README.md @@ -144,5 +144,5 @@ you experience the issue, here's what to look out for: Try adding more connections per node. Thanks to the driver's hot-reload mechanism, you can do that at runtime and see the effects immediately. -[CqlSession]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/CqlSession.html +[CqlSession]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/CqlSession.html [CASSANDRA-8086]: https://issues.apache.org/jira/browse/CASSANDRA-8086 \ No newline at end of file diff --git a/manual/core/query_timestamps/README.md b/manual/core/query_timestamps/README.md index 615bd282c7a..8e203c45f17 100644 --- a/manual/core/query_timestamps/README.md +++ b/manual/core/query_timestamps/README.md @@ -174,7 +174,7 @@ Here is the order of precedence of all the methods described so far: 3. otherwise, if the timestamp generator assigned a timestamp, use it; 4. otherwise, let the server assign the timestamp. -[TimestampGenerator]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/time/TimestampGenerator.html +[TimestampGenerator]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/time/TimestampGenerator.html [gettimeofday]: http://man7.org/linux/man-pages/man2/settimeofday.2.html [JNR]: https://github.com/jnr/jnr-ffi diff --git a/manual/core/reconnection/README.md b/manual/core/reconnection/README.md index 97f342c5017..59e93480255 100644 --- a/manual/core/reconnection/README.md +++ b/manual/core/reconnection/README.md @@ -56,7 +56,7 @@ is the exponential one with the default values, and the control connection is in * [t = 3] node2's pool tries to open the last missing connection, which succeeds. The pool is back to its expected size, node2's reconnection stops. -[ConstantReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/internal/core/connection/ConstantReconnectionPolicy.html -[DriverContext]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/context/DriverContext.html -[ExponentialReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/internal/core/connection/ExponentialReconnectionPolicy.html -[ReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/connection/ReconnectionPolicy.html \ No newline at end of file +[ConstantReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/internal/core/connection/ConstantReconnectionPolicy.html +[DriverContext]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/context/DriverContext.html +[ExponentialReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/internal/core/connection/ExponentialReconnectionPolicy.html +[ReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/connection/ReconnectionPolicy.html \ No newline at end of file diff --git a/manual/core/request_tracker/README.md b/manual/core/request_tracker/README.md index 2498b07c7ad..50e4be58698 100644 --- a/manual/core/request_tracker/README.md +++ b/manual/core/request_tracker/README.md @@ -106,4 +106,4 @@ all FROM users WHERE user_id=? [v0=42] com.datastax.oss.driver.api.core.servererrors.InvalidQueryException: Undefined column name all ``` -[RequestTracker]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/tracker/RequestTracker.html +[RequestTracker]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/tracker/RequestTracker.html diff --git a/manual/core/retries/README.md b/manual/core/retries/README.md index cc6ed2f2fde..60042f5e72d 100644 --- a/manual/core/retries/README.md +++ b/manual/core/retries/README.md @@ -163,20 +163,20 @@ configuration). Each request uses its declared profile's policy. If it doesn't declare any profile, or if the profile doesn't have a dedicated policy, then the default profile's policy is used. -[AllNodesFailedException]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/AllNodesFailedException.html -[ClosedConnectionException]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/connection/ClosedConnectionException.html -[DriverTimeoutException]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/DriverTimeoutException.html -[FunctionFailureException]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/servererrors/FunctionFailureException.html -[HeartbeatException]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/connection/HeartbeatException.html -[ProtocolError]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/servererrors/ProtocolError.html -[OverloadedException]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/servererrors/OverloadedException.html -[QueryValidationException]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/servererrors/QueryValidationException.html -[ReadFailureException]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/servererrors/ReadFailureException.html -[ReadTimeoutException]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/servererrors/ReadTimeoutException.html -[RetryDecision]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/retry/RetryDecision.html -[RetryPolicy]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/retry/RetryPolicy.html -[ServerError]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/servererrors/ServerError.html -[TruncateException]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/servererrors/TruncateException.html -[UnavailableException]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/servererrors/UnavailableException.html -[WriteFailureException]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/servererrors/WriteFailureException.html -[WriteTimeoutException]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/servererrors/WriteTimeoutException.html +[AllNodesFailedException]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/AllNodesFailedException.html +[ClosedConnectionException]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/connection/ClosedConnectionException.html +[DriverTimeoutException]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/DriverTimeoutException.html +[FunctionFailureException]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/servererrors/FunctionFailureException.html +[HeartbeatException]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/connection/HeartbeatException.html +[ProtocolError]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/servererrors/ProtocolError.html +[OverloadedException]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/servererrors/OverloadedException.html +[QueryValidationException]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/servererrors/QueryValidationException.html +[ReadFailureException]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/servererrors/ReadFailureException.html +[ReadTimeoutException]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/servererrors/ReadTimeoutException.html +[RetryDecision]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/retry/RetryDecision.html +[RetryPolicy]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/retry/RetryPolicy.html +[ServerError]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/servererrors/ServerError.html +[TruncateException]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/servererrors/TruncateException.html +[UnavailableException]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/servererrors/UnavailableException.html +[WriteFailureException]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/servererrors/WriteFailureException.html +[WriteTimeoutException]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/servererrors/WriteTimeoutException.html diff --git a/manual/core/speculative_execution/README.md b/manual/core/speculative_execution/README.md index 22d71264acd..d23cb8b423b 100644 --- a/manual/core/speculative_execution/README.md +++ b/manual/core/speculative_execution/README.md @@ -238,4 +238,4 @@ profiles have the same configuration). Each request uses its declared profile's policy. If it doesn't declare any profile, or if the profile doesn't have a dedicated policy, then the default profile's policy is used. -[SpeculativeExecutionPolicy]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/specex/SpeculativeExecutionPolicy.html \ No newline at end of file +[SpeculativeExecutionPolicy]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/specex/SpeculativeExecutionPolicy.html \ No newline at end of file diff --git a/manual/core/statements/README.md b/manual/core/statements/README.md index 2ea78442155..0544d4d16b0 100644 --- a/manual/core/statements/README.md +++ b/manual/core/statements/README.md @@ -49,7 +49,7 @@ the [configuration](../configuration/). Namely, these are: idempotent flag, quer consistency levels and page size. We recommended the configuration approach whenever possible (you can create execution profiles to capture common combinations of those options). -[Statement]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/cql/Statement.html -[StatementBuilder]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/cql/StatementBuilder.html -[execute]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/session/Session.html#execute-com.datastax.oss.driver.api.core.cql.Statement- -[executeAsync]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/session/Session.html#executeAsync-com.datastax.oss.driver.api.core.cql.Statement- +[Statement]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/Statement.html +[StatementBuilder]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/StatementBuilder.html +[execute]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/Session.html#execute-com.datastax.oss.driver.api.core.cql.Statement- +[executeAsync]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/Session.html#executeAsync-com.datastax.oss.driver.api.core.cql.Statement- diff --git a/manual/core/statements/batch/README.md b/manual/core/statements/batch/README.md index 2d66fb0a92e..f082e984058 100644 --- a/manual/core/statements/batch/README.md +++ b/manual/core/statements/batch/README.md @@ -51,6 +51,6 @@ In addition, simple statements with named parameters are currently not supported due to a [protocol limitation][CASSANDRA-10246] that will be fixed in a future version). If you try to execute such a batch, an `IllegalArgumentException` is thrown. -[BatchStatement]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/cql/BatchStatement.html +[BatchStatement]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/BatchStatement.html [batch_dse]: http://docs.datastax.com/en/dse/6.7/cql/cql/cql_using/useBatch.html [CASSANDRA-10246]: https://issues.apache.org/jira/browse/CASSANDRA-10246 \ No newline at end of file diff --git a/manual/core/statements/per_query_keyspace/README.md b/manual/core/statements/per_query_keyspace/README.md index 01ace009512..66f1c7e3be6 100644 --- a/manual/core/statements/per_query_keyspace/README.md +++ b/manual/core/statements/per_query_keyspace/README.md @@ -115,6 +115,6 @@ SimpleStatement statement = At some point in the future, when Cassandra 4 becomes prevalent and using a per-query keyspace is the norm, we'll probably deprecate `setRoutingKeyspace()`. -[token-aware routing]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- +[token-aware routing]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- [CASSANDRA-10145]: https://issues.apache.org/jira/browse/CASSANDRA-10145 \ No newline at end of file diff --git a/manual/core/statements/prepared/README.md b/manual/core/statements/prepared/README.md index 6157cb8358e..7026c7c4824 100644 --- a/manual/core/statements/prepared/README.md +++ b/manual/core/statements/prepared/README.md @@ -312,8 +312,8 @@ With Cassandra 4 and [native protocol](../../native_protocol/) v5, this issue is new version with the response; the driver updates its local cache transparently, and the client can observe the new columns in the result set. -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[Session.prepare]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/CqlSession.html#prepare-com.datastax.oss.driver.api.core.cql.SimpleStatement- +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[Session.prepare]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/CqlSession.html#prepare-com.datastax.oss.driver.api.core.cql.SimpleStatement- [CASSANDRA-10786]: https://issues.apache.org/jira/browse/CASSANDRA-10786 [CASSANDRA-10813]: https://issues.apache.org/jira/browse/CASSANDRA-10813 [guava eviction]: https://github.com/google/guava/wiki/CachesExplained#reference-based-eviction \ No newline at end of file diff --git a/manual/core/statements/simple/README.md b/manual/core/statements/simple/README.md index e8f066d89de..6e84dc7e33e 100644 --- a/manual/core/statements/simple/README.md +++ b/manual/core/statements/simple/README.md @@ -170,4 +170,4 @@ session.execute( Or you could also use [prepared statements](../prepared/), which don't have this limitation since parameter types are known in advance. -[SimpleStatement]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/cql/SimpleStatement.html +[SimpleStatement]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/SimpleStatement.html diff --git a/manual/core/temporal_types/README.md b/manual/core/temporal_types/README.md index 9d690ad9fab..386b3e6a4e5 100644 --- a/manual/core/temporal_types/README.md +++ b/manual/core/temporal_types/README.md @@ -135,7 +135,7 @@ System.out.println(dateTime.minus(CqlDuration.from("1h15s15ns"))); // prints "2018-10-03T22:59:44.999999985-07:00[America/Los_Angeles]" ``` -[CqlDuration]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/data/CqlDuration.html -[TypeCodecs.ZONED_TIMESTAMP_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#ZONED_TIMESTAMP_SYSTEM -[TypeCodecs.ZONED_TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#ZONED_TIMESTAMP_UTC -[TypeCodecs.zonedTimestampAt()]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#zonedTimestampAt-java.time.ZoneId- \ No newline at end of file +[CqlDuration]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/data/CqlDuration.html +[TypeCodecs.ZONED_TIMESTAMP_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#ZONED_TIMESTAMP_SYSTEM +[TypeCodecs.ZONED_TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#ZONED_TIMESTAMP_UTC +[TypeCodecs.zonedTimestampAt()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#zonedTimestampAt-java.time.ZoneId- \ No newline at end of file diff --git a/manual/core/throttling/README.md b/manual/core/throttling/README.md index 81628e9c2ee..1d628bd746e 100644 --- a/manual/core/throttling/README.md +++ b/manual/core/throttling/README.md @@ -138,6 +138,6 @@ datastax-java-driver { If you enable `throttling.delay`, make sure to also check the associated extra options to correctly size the underlying histograms (`metrics.session.throttling.delay.*`). -[RequestThrottlingException]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/RequestThrottlingException.html -[AllNodesFailedException]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/AllNodesFailedException.html -[BusyConnectionException]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/connection/BusyConnectionException.html \ No newline at end of file +[RequestThrottlingException]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/RequestThrottlingException.html +[AllNodesFailedException]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/AllNodesFailedException.html +[BusyConnectionException]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/connection/BusyConnectionException.html \ No newline at end of file diff --git a/manual/core/tracing/README.md b/manual/core/tracing/README.md index 512860a156a..1de5f559df9 100644 --- a/manual/core/tracing/README.md +++ b/manual/core/tracing/README.md @@ -100,5 +100,5 @@ for (TraceEvent event : trace.getEvents()) { If you call `getQueryTrace()` for a statement that didn't have tracing enabled, an exception is thrown. -[ExecutionInfo]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html -[QueryTrace]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/cql/QueryTrace.html +[ExecutionInfo]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html +[QueryTrace]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/QueryTrace.html diff --git a/manual/core/tuples/README.md b/manual/core/tuples/README.md index fd90c13b8bf..94f38dd8858 100644 --- a/manual/core/tuples/README.md +++ b/manual/core/tuples/README.md @@ -127,5 +127,5 @@ BoundStatement bs = [cql_doc]: https://docs.datastax.com/en/cql/3.3/cql/cql_reference/tupleType.html -[TupleType]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/type/TupleType.html -[TupleValue]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/data/TupleValue.html +[TupleType]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/type/TupleType.html +[TupleValue]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/data/TupleValue.html diff --git a/manual/core/udts/README.md b/manual/core/udts/README.md index 63767f321a6..e0d7e60ea1b 100644 --- a/manual/core/udts/README.md +++ b/manual/core/udts/README.md @@ -119,5 +119,5 @@ session.execute(bs); [cql_doc]: https://docs.datastax.com/en/cql/3.3/cql/cql_reference/cqlRefUDType.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/data/UdtValue.html -[UserDefinedType]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/type/UserDefinedType.html \ No newline at end of file +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/data/UdtValue.html +[UserDefinedType]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/type/UserDefinedType.html \ No newline at end of file diff --git a/manual/mapper/daos/README.md b/manual/mapper/daos/README.md index e5183d1178e..5714d010b6c 100644 --- a/manual/mapper/daos/README.md +++ b/manual/mapper/daos/README.md @@ -135,7 +135,7 @@ In this case, any annotations declared in `Dao1` would be chosen over `Dao2`. To control how the hierarchy is scanned, annotate interfaces with [@HierarchyScanStrategy]. -[@Dao]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/Dao.html -[@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html -[@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html +[@Dao]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/Dao.html +[@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html +[@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html [Entity Inheritance]: ../entities/#inheritance diff --git a/manual/mapper/daos/delete/README.md b/manual/mapper/daos/delete/README.md index 3dc1766dc83..614b7a519c0 100644 --- a/manual/mapper/daos/delete/README.md +++ b/manual/mapper/daos/delete/README.md @@ -134,13 +134,13 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the [naming strategy](../../entities/#naming-strategy)). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html -[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html -[@Delete]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/Delete.html -[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/cql/ResultSet.html -[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html +[@Delete]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/Delete.html +[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/ResultSet.html +[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html \ No newline at end of file diff --git a/manual/mapper/daos/getentity/README.md b/manual/mapper/daos/getentity/README.md index 73b9701e4e4..06b87db4138 100644 --- a/manual/mapper/daos/getentity/README.md +++ b/manual/mapper/daos/getentity/README.md @@ -69,14 +69,14 @@ If the return type doesn't match the parameter type (for example [PagingIterable [AsyncResultSet]), the mapper processor will issue a compile-time error. -[@GetEntity]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html -[GettableByName]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/data/GettableByName.html -[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/PagingIterable.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/cql/ResultSet.html -[Row]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/cql/Row.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/data/UdtValue.html +[@GetEntity]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[GettableByName]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/data/GettableByName.html +[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/PagingIterable.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/ResultSet.html +[Row]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/Row.html +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/data/UdtValue.html diff --git a/manual/mapper/daos/insert/README.md b/manual/mapper/daos/insert/README.md index 35fa18bcf22..a6d909a4a03 100644 --- a/manual/mapper/daos/insert/README.md +++ b/manual/mapper/daos/insert/README.md @@ -93,11 +93,11 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the [naming strategy](../../entities/#naming-strategy)). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@Insert]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/Insert.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/cql/ResultSet.html -[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- -[ResultSet#getExecutionInfo()]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/cql/ResultSet.html#getExecutionInfo-- +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@Insert]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/Insert.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/ResultSet.html +[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- +[ResultSet#getExecutionInfo()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/ResultSet.html#getExecutionInfo-- diff --git a/manual/mapper/daos/null_saving/README.md b/manual/mapper/daos/null_saving/README.md index cfb60c4f445..008545d278e 100644 --- a/manual/mapper/daos/null_saving/README.md +++ b/manual/mapper/daos/null_saving/README.md @@ -93,10 +93,10 @@ public interface UserDao extends InventoryDao { } ``` -[@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[MapperException]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/MapperException.html -[DO_NOT_SET]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#DO_NOT_SET -[SET_TO_NULL]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#SET_TO_NULL +[@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[MapperException]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/MapperException.html +[DO_NOT_SET]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#DO_NOT_SET +[SET_TO_NULL]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#SET_TO_NULL [CASSANDRA-7304]: https://issues.apache.org/jira/browse/CASSANDRA-7304 diff --git a/manual/mapper/daos/query/README.md b/manual/mapper/daos/query/README.md index 9660add5b22..ec1d4e4369b 100644 --- a/manual/mapper/daos/query/README.md +++ b/manual/mapper/daos/query/README.md @@ -103,14 +103,14 @@ Then: query succeeds or not depends on whether the session that the mapper was built with has a [default keyspace]. -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@Query]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/Query.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/cql/ResultSet.html -[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- -[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/PagingIterable.html -[Row]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/cql/Row.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@Query]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/Query.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/ResultSet.html +[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- +[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/PagingIterable.html +[Row]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/Row.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html diff --git a/manual/mapper/daos/queryprovider/README.md b/manual/mapper/daos/queryprovider/README.md index f4daea12bd2..12c1c7561f3 100644 --- a/manual/mapper/daos/queryprovider/README.md +++ b/manual/mapper/daos/queryprovider/README.md @@ -137,11 +137,11 @@ Here is the full implementation: the desired [PagingIterable][PagingIterable]. -[@QueryProvider]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html -[providerClass]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerClass-- -[entityHelpers]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#entityHelpers-- -[providerMethod]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerMethod-- -[MapperContext]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/MapperContext.html -[EntityHelper]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/EntityHelper.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/cql/ResultSet.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/PagingIterable.html +[@QueryProvider]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html +[providerClass]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerClass-- +[entityHelpers]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#entityHelpers-- +[providerMethod]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerMethod-- +[MapperContext]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/MapperContext.html +[EntityHelper]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/EntityHelper.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/ResultSet.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/PagingIterable.html diff --git a/manual/mapper/daos/select/README.md b/manual/mapper/daos/select/README.md index aea98135b6a..c41dcf41bf5 100644 --- a/manual/mapper/daos/select/README.md +++ b/manual/mapper/daos/select/README.md @@ -133,18 +133,18 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the [naming strategy](../../entities/#naming-strategy)). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html -[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html -[@Select]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/Select.html -[allowFiltering()]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/Select.html#allowFiltering-- -[customWhereClause()]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/Select.html#customWhereClause-- -[groupBy()]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/Select.html#groupBy-- -[limit()]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/Select.html#limit-- -[orderBy()]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/Select.html#orderBy-- -[perPartitionLimit()]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/Select.html#perPartitionLimit-- -[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/PagingIterable.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html +[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html +[@Select]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/Select.html +[allowFiltering()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/Select.html#allowFiltering-- +[customWhereClause()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/Select.html#customWhereClause-- +[groupBy()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/Select.html#groupBy-- +[limit()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/Select.html#limit-- +[orderBy()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/Select.html#orderBy-- +[perPartitionLimit()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/Select.html#perPartitionLimit-- +[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/PagingIterable.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html diff --git a/manual/mapper/daos/setentity/README.md b/manual/mapper/daos/setentity/README.md index 8817f9c4ef2..f714e8c7c3e 100644 --- a/manual/mapper/daos/setentity/README.md +++ b/manual/mapper/daos/setentity/README.md @@ -63,8 +63,8 @@ BoundStatement bind(Product product, BoundStatement statement); If you use a void method with [BoundStatement], the mapper processor will issue a compile-time warning. -[@SetEntity]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/SetEntity.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[BoundStatementBuilder]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/cql/BoundStatementBuilder.html -[SettableByName]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/data/SettableByName.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/data/UdtValue.html +[@SetEntity]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/SetEntity.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[BoundStatementBuilder]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/BoundStatementBuilder.html +[SettableByName]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/data/SettableByName.html +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/data/UdtValue.html diff --git a/manual/mapper/daos/statement_attributes/README.md b/manual/mapper/daos/statement_attributes/README.md index e41ab7f1d63..ab5a6160147 100644 --- a/manual/mapper/daos/statement_attributes/README.md +++ b/manual/mapper/daos/statement_attributes/README.md @@ -60,4 +60,4 @@ Product product = dao.findById(1, builder -> builder.setConsistencyLevel(DefaultConsistencyLevel.QUORUM)); ``` -[@StatementAttributes]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/StatementAttributes.html \ No newline at end of file +[@StatementAttributes]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/StatementAttributes.html \ No newline at end of file diff --git a/manual/mapper/daos/update/README.md b/manual/mapper/daos/update/README.md index f1b06b42249..1af7fb2d4c3 100644 --- a/manual/mapper/daos/update/README.md +++ b/manual/mapper/daos/update/README.md @@ -127,11 +127,11 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the naming convention). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@Update]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/Update.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@Update]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/Update.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html [Boolean]: https://docs.oracle.com/javase/8/docs/api/index.html?java/lang/Boolean.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/cql/ResultSet.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/ResultSet.html diff --git a/manual/mapper/entities/README.md b/manual/mapper/entities/README.md index 1a4ff2a6e8e..8582bc05351 100644 --- a/manual/mapper/entities/README.md +++ b/manual/mapper/entities/README.md @@ -445,21 +445,21 @@ the same level. To control how the class hierarchy is scanned, annotate classes with [@HierarchyScanStrategy]. -[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html -[@CqlName]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/CqlName.html -[@Dao]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/Dao.html -[@Entity]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/Entity.html -[NameConverter]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/entity/naming/NameConverter.html -[NamingConvention]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/entity/naming/NamingConvention.html -[@NamingStrategy]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/NamingStrategy.html -[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html -[@Computed]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/Computed.html -[@Select]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/Select.html -[@Insert]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/Insert.html -[@Update]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/Update.html -[@GetEntity]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html -[@Query]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/Query.html +[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html +[@CqlName]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/CqlName.html +[@Dao]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/Dao.html +[@Entity]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/Entity.html +[NameConverter]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/entity/naming/NameConverter.html +[NamingConvention]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/entity/naming/NamingConvention.html +[@NamingStrategy]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/NamingStrategy.html +[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html +[@Computed]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/Computed.html +[@Select]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/Select.html +[@Insert]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/Insert.html +[@Update]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/Update.html +[@GetEntity]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html +[@Query]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/Query.html [aliases]: http://cassandra.apache.org/doc/latest/cql/dml.html?#aliases -[@Transient]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/Transient.html -[@TransientProperties]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/TransientProperties.html -[@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html +[@Transient]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/Transient.html +[@TransientProperties]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/TransientProperties.html +[@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html diff --git a/manual/mapper/mapper/README.md b/manual/mapper/mapper/README.md index 1e3ae140fea..c9552590e2a 100644 --- a/manual/mapper/mapper/README.md +++ b/manual/mapper/mapper/README.md @@ -149,8 +149,8 @@ ProductDao dao3 = inventoryMapper.productDao("keyspace3", "table3"); The DAO's keyspace and table can also be injected into custom query strings; see [Query methods](../daos/query/). -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/CqlIdentifier.html -[@DaoFactory]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.html -[@DaoKeyspace]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/DaoKeyspace.html -[@DaoTable]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/DaoTable.html -[@Mapper]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/mapper/annotations/Mapper.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/CqlIdentifier.html +[@DaoFactory]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.html +[@DaoKeyspace]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/DaoKeyspace.html +[@DaoTable]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/DaoTable.html +[@Mapper]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/Mapper.html diff --git a/manual/osgi/README.md b/manual/osgi/README.md index 7ee77bfec24..57e93f88f8e 100644 --- a/manual/osgi/README.md +++ b/manual/osgi/README.md @@ -71,5 +71,5 @@ starting the driver: [driver configuration]: ../core/configuration [OSGi]:https://www.osgi.org [JNR]: https://github.com/jnr/jnr-ffi -[withClassLoader()]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withClassLoader-java.lang.ClassLoader- +[withClassLoader()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withClassLoader-java.lang.ClassLoader- [JAVA-1127]:https://datastax-oss.atlassian.net/browse/JAVA-1127 \ No newline at end of file diff --git a/manual/query_builder/README.md b/manual/query_builder/README.md index a2c1618fce8..b4977fd6332 100644 --- a/manual/query_builder/README.md +++ b/manual/query_builder/README.md @@ -184,6 +184,6 @@ For a complete tour of the API, browse the child pages in this manual: * [Terms](term/) * [Idempotence](idempotence/) -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/CqlIdentifier.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/CqlIdentifier.html diff --git a/manual/query_builder/condition/README.md b/manual/query_builder/condition/README.md index 1c5af6551c6..ce88cd1bdd7 100644 --- a/manual/query_builder/condition/README.md +++ b/manual/query_builder/condition/README.md @@ -132,4 +132,4 @@ It is mutually exclusive with column conditions: if you previously specified col the statement, they will be ignored; conversely, adding a column condition cancels a previous IF EXISTS clause. -[Condition]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/querybuilder/condition/Condition.html +[Condition]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/querybuilder/condition/Condition.html diff --git a/manual/query_builder/delete/README.md b/manual/query_builder/delete/README.md index 1beb1f91fb4..a952ef00a25 100644 --- a/manual/query_builder/delete/README.md +++ b/manual/query_builder/delete/README.md @@ -141,5 +141,5 @@ deleteFrom("user") Conditions are a common feature used by UPDATE and DELETE, so they have a [dedicated page](../condition) in this manual. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[Selector]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/querybuilder/select/Selector.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[Selector]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/querybuilder/select/Selector.html diff --git a/manual/query_builder/insert/README.md b/manual/query_builder/insert/README.md index 15e1701d2d2..2db2ef18462 100644 --- a/manual/query_builder/insert/README.md +++ b/manual/query_builder/insert/README.md @@ -114,4 +114,4 @@ is executed. This is distinctly different than setting the value to null. Passin this method will only remove the USING TTL clause from the query, which will not alter the TTL (if one is set) in Cassandra. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html \ No newline at end of file +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html \ No newline at end of file diff --git a/manual/query_builder/relation/README.md b/manual/query_builder/relation/README.md index 24a6d07ed6a..06581110e20 100644 --- a/manual/query_builder/relation/README.md +++ b/manual/query_builder/relation/README.md @@ -201,5 +201,5 @@ This should be used with caution, as it's possible to generate invalid CQL that execution time; on the other hand, it can be used as a workaround to handle new CQL features that are not yet covered by the query builder. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[Relation]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/querybuilder/relation/Relation.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[Relation]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/querybuilder/relation/Relation.html diff --git a/manual/query_builder/schema/README.md b/manual/query_builder/schema/README.md index 56c4fc86784..a5bd0234528 100644 --- a/manual/query_builder/schema/README.md +++ b/manual/query_builder/schema/README.md @@ -44,4 +44,4 @@ element type: * [function](function/) * [aggregate](aggregate/) -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/aggregate/README.md b/manual/query_builder/schema/aggregate/README.md index 2b52470b590..2ec960ea136 100644 --- a/manual/query_builder/schema/aggregate/README.md +++ b/manual/query_builder/schema/aggregate/README.md @@ -76,4 +76,4 @@ dropAggregate("average").ifExists(); // DROP AGGREGATE IF EXISTS average ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/function/README.md b/manual/query_builder/schema/function/README.md index 5ae566bc0b6..a55de4a2366 100644 --- a/manual/query_builder/schema/function/README.md +++ b/manual/query_builder/schema/function/README.md @@ -92,4 +92,4 @@ dropFunction("log").ifExists(); // DROP FUNCTION IF EXISTS log ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/index/README.md b/manual/query_builder/schema/index/README.md index 9ab07315e67..fabd578de88 100644 --- a/manual/query_builder/schema/index/README.md +++ b/manual/query_builder/schema/index/README.md @@ -99,4 +99,4 @@ dropIndex("my_idx").ifExists(); // DROP INDEX IF EXISTS my_idx ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/keyspace/README.md b/manual/query_builder/schema/keyspace/README.md index dc2cb73eb28..1a96cc3643b 100644 --- a/manual/query_builder/schema/keyspace/README.md +++ b/manual/query_builder/schema/keyspace/README.md @@ -83,6 +83,6 @@ dropKeyspace("cycling").ifExists(); // DROP KEYSPACE IF EXISTS cycling ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/materialized_view/README.md b/manual/query_builder/schema/materialized_view/README.md index 4da83286a3a..c88acc11822 100644 --- a/manual/query_builder/schema/materialized_view/README.md +++ b/manual/query_builder/schema/materialized_view/README.md @@ -85,5 +85,5 @@ dropTable("cyclist_by_age").ifExists(); // DROP MATERIALIZED VIEW IF EXISTS cyclist_by_age ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html -[RelationStructure]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/querybuilder/schema/RelationStructure.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[RelationStructure]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/querybuilder/schema/RelationStructure.html diff --git a/manual/query_builder/schema/table/README.md b/manual/query_builder/schema/table/README.md index 3ca6ab0c1fe..6113aefedf4 100644 --- a/manual/query_builder/schema/table/README.md +++ b/manual/query_builder/schema/table/README.md @@ -107,6 +107,6 @@ dropTable("cyclist_name").ifExists(); // DROP TABLE IF EXISTS cyclist_name ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html -[CreateTableWithOptions]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/querybuilder/schema/CreateTableWithOptions.html -[AlterTableWithOptions]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/querybuilder/schema/AlterTableWithOptions.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[CreateTableWithOptions]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/querybuilder/schema/CreateTableWithOptions.html +[AlterTableWithOptions]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/querybuilder/schema/AlterTableWithOptions.html diff --git a/manual/query_builder/schema/type/README.md b/manual/query_builder/schema/type/README.md index 34c548cea12..4ee1bf1a8f2 100644 --- a/manual/query_builder/schema/type/README.md +++ b/manual/query_builder/schema/type/README.md @@ -88,4 +88,4 @@ dropTable("address").ifExists(); // DROP TYPE IF EXISTS address ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/select/README.md b/manual/query_builder/select/README.md index aa08c4c848f..d0c77fa048b 100644 --- a/manual/query_builder/select/README.md +++ b/manual/query_builder/select/README.md @@ -391,5 +391,5 @@ selectFrom("user").all().allowFiltering(); // SELECT * FROM user ALLOW FILTERING ``` -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[Selector]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/querybuilder/select/Selector.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[Selector]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/querybuilder/select/Selector.html diff --git a/manual/query_builder/term/README.md b/manual/query_builder/term/README.md index 34b88d5f609..017f0d8eea8 100644 --- a/manual/query_builder/term/README.md +++ b/manual/query_builder/term/README.md @@ -105,5 +105,5 @@ This should be used with caution, as it's possible to generate invalid CQL that execution time; on the other hand, it can be used as a workaround to handle new CQL features that are not yet covered by the query builder. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html \ No newline at end of file +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html \ No newline at end of file diff --git a/manual/query_builder/truncate/README.md b/manual/query_builder/truncate/README.md index 2ac35b71983..f4ed51babb5 100644 --- a/manual/query_builder/truncate/README.md +++ b/manual/query_builder/truncate/README.md @@ -17,4 +17,4 @@ Truncate truncate2 = truncate(CqlIdentifier.fromCql("mytable")); Note that, at this stage, the query is ready to build. After creating a TRUNCATE query it does not take any values. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html diff --git a/manual/query_builder/update/README.md b/manual/query_builder/update/README.md index c4422673877..8761b0dc239 100644 --- a/manual/query_builder/update/README.md +++ b/manual/query_builder/update/README.md @@ -251,5 +251,5 @@ update("foo") Conditions are a common feature used by UPDATE and DELETE, so they have a [dedicated page](../condition) in this manual. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[Assignment]: https://docs.datastax.com/en/drivers/java/4.1/com/datastax/oss/driver/api/querybuilder/update/Assignment.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[Assignment]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/querybuilder/update/Assignment.html From 650a8011b2dc4c36320408e863b058ac49257c11 Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 26 Aug 2019 11:14:20 -0700 Subject: [PATCH 075/979] [maven-release-plugin] prepare release 4.2.0 --- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 10 files changed, 11 insertions(+), 11 deletions(-) diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 4867c6578c4..420d1aa6c94 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -22,7 +22,7 @@ com.datastax.oss java-driver-parent - 4.2.0-SNAPSHOT + 4.2.0 java-driver-core-shaded diff --git a/core/pom.xml b/core/pom.xml index 9c925c913b4..2c4b7980ef8 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.2.0-SNAPSHOT + 4.2.0 java-driver-core diff --git a/distribution/pom.xml b/distribution/pom.xml index 7918284c369..b12d642aa59 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.2.0-SNAPSHOT + 4.2.0 java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index 1c8053de782..f4d6a9f358e 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.2.0-SNAPSHOT + 4.2.0 java-driver-examples diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index ac3bb257b1f..fc2fae5d6bb 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.2.0-SNAPSHOT + 4.2.0 java-driver-integration-tests diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 0a6aa020c81..f8d89306973 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.2.0-SNAPSHOT + 4.2.0 java-driver-mapper-processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index 60e2481e104..8ad16760e9f 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.2.0-SNAPSHOT + 4.2.0 java-driver-mapper-runtime diff --git a/pom.xml b/pom.xml index 9389b5085e6..cc53e65abaa 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ com.datastax.oss java-driver-parent - 4.2.0-SNAPSHOT + 4.2.0 pom DataStax Java driver for Apache Cassandra(R) @@ -697,7 +697,7 @@ limitations under the License.]]> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - HEAD + 4.2.0 diff --git a/query-builder/pom.xml b/query-builder/pom.xml index 5fe6dd58261..23184e659fa 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.2.0-SNAPSHOT + 4.2.0 java-driver-query-builder diff --git a/test-infra/pom.xml b/test-infra/pom.xml index 2c5c5de59d3..3604d8cbdb0 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.2.0-SNAPSHOT + 4.2.0 java-driver-test-infra From 3b98df2f1c83a93b3dd4d6641d8a43c84cdae335 Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 26 Aug 2019 11:16:29 -0700 Subject: [PATCH 076/979] [maven-release-plugin] prepare for next development iteration --- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 10 files changed, 11 insertions(+), 11 deletions(-) diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 420d1aa6c94..993f17b7929 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -22,7 +22,7 @@ com.datastax.oss java-driver-parent - 4.2.0 + 4.2.1-SNAPSHOT java-driver-core-shaded diff --git a/core/pom.xml b/core/pom.xml index 2c4b7980ef8..d18fd716e9e 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.2.0 + 4.2.1-SNAPSHOT java-driver-core diff --git a/distribution/pom.xml b/distribution/pom.xml index b12d642aa59..4e842cd385b 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.2.0 + 4.2.1-SNAPSHOT java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index f4d6a9f358e..c51e7d216c8 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.2.0 + 4.2.1-SNAPSHOT java-driver-examples diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index fc2fae5d6bb..b5f01caae6c 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.2.0 + 4.2.1-SNAPSHOT java-driver-integration-tests diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index f8d89306973..2a6a73f7ed8 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.2.0 + 4.2.1-SNAPSHOT java-driver-mapper-processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index 8ad16760e9f..ea8526e6d04 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.2.0 + 4.2.1-SNAPSHOT java-driver-mapper-runtime diff --git a/pom.xml b/pom.xml index cc53e65abaa..6da8c641f30 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ com.datastax.oss java-driver-parent - 4.2.0 + 4.2.1-SNAPSHOT pom DataStax Java driver for Apache Cassandra(R) @@ -697,7 +697,7 @@ limitations under the License.]]> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - 4.2.0 + HEAD diff --git a/query-builder/pom.xml b/query-builder/pom.xml index 23184e659fa..d6d1754e237 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.2.0 + 4.2.1-SNAPSHOT java-driver-query-builder diff --git a/test-infra/pom.xml b/test-infra/pom.xml index 3604d8cbdb0..c063276b1f6 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.2.0 + 4.2.1-SNAPSHOT java-driver-test-infra From 82a8c06850ed3b721646aaabe7fd81813fbd42c5 Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 26 Aug 2019 16:15:45 -0700 Subject: [PATCH 077/979] Fix minor typo in changelog --- changelog/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/changelog/README.md b/changelog/README.md index 5a52f55e787..6bb7046240f 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -33,7 +33,7 @@ - [bug] JAVA-2312: Handle UDTs with names that clash with collection types - [improvement] JAVA-2307: Improve `@Select` and `@Delete` by not requiring full primary key - [improvement] JAVA-2315: Improve extensibility of session builder -- [buh] JAVA-2394: BaseCcmRule DseRequirement max should use DseVersion, not Cassandra version +- [bug] JAVA-2394: BaseCcmRule DseRequirement max should use DseVersion, not Cassandra version ### 4.1.0 From 74d6ef58d218758ef271b3cf170fbf78429d0600 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Wed, 28 Aug 2019 15:27:11 +0200 Subject: [PATCH 078/979] Remove references to deprecated DriverOptionConfigBuilder --- .../internal/core/config/DriverOptionConfigBuilder.java | 3 +-- .../typesafe/DefaultDriverConfigLoaderBuilder.java | 9 ++++++--- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/config/DriverOptionConfigBuilder.java b/core/src/main/java/com/datastax/oss/driver/internal/core/config/DriverOptionConfigBuilder.java index 65a3fa4b97b..2bf872e8159 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/config/DriverOptionConfigBuilder.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/config/DriverOptionConfigBuilder.java @@ -16,7 +16,6 @@ package com.datastax.oss.driver.internal.core.config; import com.datastax.oss.driver.api.core.config.DriverOption; -import com.datastax.oss.driver.internal.core.config.typesafe.DefaultDriverConfigLoaderBuilder; import edu.umd.cs.findbugs.annotations.CheckReturnValue; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; @@ -24,7 +23,7 @@ import java.util.List; import java.util.Map; -/** @see DefaultDriverConfigLoaderBuilder */ +/** @see com.datastax.oss.driver.internal.core.config.typesafe.DefaultDriverConfigLoaderBuilder */ @Deprecated public interface DriverOptionConfigBuilder { diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultDriverConfigLoaderBuilder.java b/core/src/main/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultDriverConfigLoaderBuilder.java index 233df8e31ab..33d6d409bf1 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultDriverConfigLoaderBuilder.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultDriverConfigLoaderBuilder.java @@ -16,7 +16,6 @@ package com.datastax.oss.driver.internal.core.config.typesafe; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; -import com.datastax.oss.driver.internal.core.config.DriverOptionConfigBuilder; import com.datastax.oss.protocol.internal.util.collection.NullAllowingImmutableMap; import com.typesafe.config.Config; import com.typesafe.config.ConfigFactory; @@ -33,7 +32,8 @@ @NotThreadSafe @Deprecated public class DefaultDriverConfigLoaderBuilder - implements DriverOptionConfigBuilder { + implements com.datastax.oss.driver.internal.core.config.DriverOptionConfigBuilder< + DefaultDriverConfigLoaderBuilder> { private NullAllowingImmutableMap.Builder values = NullAllowingImmutableMap.builder(); @@ -86,7 +86,10 @@ public DefaultDriverConfigLoaderBuilder with(@NonNull String path, @Nullable Obj } /** A builder for specifying options at a profile level using {@code withXXX} methods. */ - public static final class ProfileBuilder implements DriverOptionConfigBuilder { + @Deprecated + public static final class ProfileBuilder + implements com.datastax.oss.driver.internal.core.config.DriverOptionConfigBuilder< + ProfileBuilder> { final NullAllowingImmutableMap.Builder values = NullAllowingImmutableMap.builder(); From 2f2e69a6b443c55111deeb32f8068ab26324f2e0 Mon Sep 17 00:00:00 2001 From: olim7t Date: Fri, 30 Aug 2019 15:29:06 -0700 Subject: [PATCH 079/979] Bump version to 4.3.0-SNAPSHOT --- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- pom.xml | 2 +- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 10 files changed, 10 insertions(+), 10 deletions(-) diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 993f17b7929..4777f67e870 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -22,7 +22,7 @@ com.datastax.oss java-driver-parent - 4.2.1-SNAPSHOT + 4.3.0-SNAPSHOT java-driver-core-shaded diff --git a/core/pom.xml b/core/pom.xml index d18fd716e9e..a4ff504af86 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.2.1-SNAPSHOT + 4.3.0-SNAPSHOT java-driver-core diff --git a/distribution/pom.xml b/distribution/pom.xml index 4e842cd385b..14426961fcf 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.2.1-SNAPSHOT + 4.3.0-SNAPSHOT java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index c51e7d216c8..ce01e7e506e 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.2.1-SNAPSHOT + 4.3.0-SNAPSHOT java-driver-examples diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index b5f01caae6c..a00d9457e6d 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.2.1-SNAPSHOT + 4.3.0-SNAPSHOT java-driver-integration-tests diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 2a6a73f7ed8..35487a7a773 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.2.1-SNAPSHOT + 4.3.0-SNAPSHOT java-driver-mapper-processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index ea8526e6d04..39e670ee09c 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.2.1-SNAPSHOT + 4.3.0-SNAPSHOT java-driver-mapper-runtime diff --git a/pom.xml b/pom.xml index 6da8c641f30..083c1e601aa 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ com.datastax.oss java-driver-parent - 4.2.1-SNAPSHOT + 4.3.0-SNAPSHOT pom DataStax Java driver for Apache Cassandra(R) diff --git a/query-builder/pom.xml b/query-builder/pom.xml index d6d1754e237..c3f78d2c6d7 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.2.1-SNAPSHOT + 4.3.0-SNAPSHOT java-driver-query-builder diff --git a/test-infra/pom.xml b/test-infra/pom.xml index c063276b1f6..08ca57cde35 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.2.1-SNAPSHOT + 4.3.0-SNAPSHOT java-driver-test-infra From 57aaa73da3fdbfe5ccec2375f9dc889caa9484c0 Mon Sep 17 00:00:00 2001 From: olim7t Date: Fri, 30 Aug 2019 15:28:23 -0700 Subject: [PATCH 080/979] Prepare changelog for next iteration --- changelog/README.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/changelog/README.md b/changelog/README.md index 6bb7046240f..1852708b98a 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -2,6 +2,9 @@ +### 4.3.0 (in progress) + + ### 4.2.0 - [improvement] JAVA-2390: Add methods to set the SSL engine factory programmatically From 7655e03324b5b046339e92b1dd1c12bb775d0b48 Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 16 Sep 2019 10:48:48 -0700 Subject: [PATCH 081/979] Prepare 4.2.x branch for new iteration --- changelog/README.md | 3 +++ core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- pom.xml | 2 +- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 11 files changed, 13 insertions(+), 10 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 5a52f55e787..38b14da483c 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -2,6 +2,9 @@ +### 4.2.1 (in progress) + + ### 4.2.0 - [improvement] JAVA-2390: Add methods to set the SSL engine factory programmatically diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 420d1aa6c94..993f17b7929 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -22,7 +22,7 @@ com.datastax.oss java-driver-parent - 4.2.0 + 4.2.1-SNAPSHOT java-driver-core-shaded diff --git a/core/pom.xml b/core/pom.xml index 2c4b7980ef8..d18fd716e9e 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.2.0 + 4.2.1-SNAPSHOT java-driver-core diff --git a/distribution/pom.xml b/distribution/pom.xml index b12d642aa59..4e842cd385b 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.2.0 + 4.2.1-SNAPSHOT java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index f4d6a9f358e..c51e7d216c8 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.2.0 + 4.2.1-SNAPSHOT java-driver-examples diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index fc2fae5d6bb..b5f01caae6c 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.2.0 + 4.2.1-SNAPSHOT java-driver-integration-tests diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index f8d89306973..2a6a73f7ed8 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.2.0 + 4.2.1-SNAPSHOT java-driver-mapper-processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index 8ad16760e9f..ea8526e6d04 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.2.0 + 4.2.1-SNAPSHOT java-driver-mapper-runtime diff --git a/pom.xml b/pom.xml index cc53e65abaa..cc253960fd8 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ com.datastax.oss java-driver-parent - 4.2.0 + 4.2.1-SNAPSHOT pom DataStax Java driver for Apache Cassandra(R) diff --git a/query-builder/pom.xml b/query-builder/pom.xml index 23184e659fa..d6d1754e237 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.2.0 + 4.2.1-SNAPSHOT java-driver-query-builder diff --git a/test-infra/pom.xml b/test-infra/pom.xml index 3604d8cbdb0..c063276b1f6 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.2.0 + 4.2.1-SNAPSHOT java-driver-test-infra From 03d97598d8bd20f31856016615c881d1c06b4acb Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 16 Sep 2019 11:19:15 -0700 Subject: [PATCH 082/979] JAVA-2430: Use variable metadata to infer the routing keyspace on bound statements The previous version was erroneously using the result metadata, which is empty for non-conditional mutations. --- changelog/README.md | 1 + .../core/cql/DefaultBoundStatement.java | 2 +- .../driver/core/cql/PreparedStatementIT.java | 34 +++++++++++++++++++ 3 files changed, 36 insertions(+), 1 deletion(-) diff --git a/changelog/README.md b/changelog/README.md index 38b14da483c..297a5278127 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.2.1 (in progress) +- [bug] JAVA-2430: Use variable metadata to infer the routing keyspace on bound statements ### 4.2.0 diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultBoundStatement.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultBoundStatement.java index b0842670f06..66bd1dd87ba 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultBoundStatement.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultBoundStatement.java @@ -258,7 +258,7 @@ public CqlIdentifier getRoutingKeyspace() { if (routingKeyspace != null) { return routingKeyspace; } else { - ColumnDefinitions definitions = preparedStatement.getResultSetDefinitions(); + ColumnDefinitions definitions = preparedStatement.getVariableDefinitions(); return (definitions.size() == 0) ? null : definitions.get(0).getKeyspace(); } } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PreparedStatementIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PreparedStatementIT.java index 77876567c3a..298987becab 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PreparedStatementIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PreparedStatementIT.java @@ -30,6 +30,7 @@ import com.datastax.oss.driver.api.core.cql.ResultSet; import com.datastax.oss.driver.api.core.cql.Row; import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.core.metadata.token.Token; import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; import com.datastax.oss.driver.api.core.servererrors.InvalidQueryException; import com.datastax.oss.driver.api.core.type.DataTypes; @@ -38,6 +39,8 @@ import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.categories.ParallelizableTests; +import com.datastax.oss.driver.internal.core.metadata.token.DefaultTokenMap; +import com.datastax.oss.driver.internal.core.metadata.token.TokenFactory; import com.datastax.oss.driver.internal.core.util.concurrent.CompletableFutures; import com.datastax.oss.protocol.internal.util.Bytes; import com.google.common.collect.ImmutableList; @@ -454,6 +457,37 @@ public void should_fail_fast_if_id_changes_on_reprepare() { } } + @Test + public void should_infer_routing_information_when_partition_key_is_bound() { + should_infer_routing_information_when_partition_key_is_bound( + "SELECT a FROM prepared_statement_test WHERE a = ?"); + should_infer_routing_information_when_partition_key_is_bound( + "INSERT INTO prepared_statement_test (a) VALUES (?)"); + should_infer_routing_information_when_partition_key_is_bound( + "UPDATE prepared_statement_test SET b = 1 WHERE a = ?"); + should_infer_routing_information_when_partition_key_is_bound( + "DELETE FROM prepared_statement_test WHERE a = ?"); + } + + private void should_infer_routing_information_when_partition_key_is_bound(String queryString) { + CqlSession session = sessionRule.session(); + TokenFactory tokenFactory = + ((DefaultTokenMap) session.getMetadata().getTokenMap().orElseThrow(AssertionError::new)) + .getTokenFactory(); + + // We'll bind a=1 in the query, check what token this is supposed to produce + Token expectedToken = + session + .execute("SELECT token(a) FROM prepared_statement_test WHERE a = 1") + .one() + .getToken(0); + + BoundStatement boundStatement = session.prepare(queryString).bind().setInt("a", 1); + + assertThat(boundStatement.getRoutingKeyspace()).isEqualTo(sessionRule.keyspace()); + assertThat(tokenFactory.hash(boundStatement.getRoutingKey())).isEqualTo(expectedToken); + } + private static Iterable firstPageOf(CompletionStage stage) { return CompletableFutures.getUninterruptibly(stage).currentPage(); } From 60dd30073ee0ff2b7662657975f67a0cd90e8703 Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 16 Sep 2019 16:41:40 -0700 Subject: [PATCH 083/979] JAVA-2443: Compute prepared statement PK indices for protocol v3 --- changelog/README.md | 1 + .../driver/internal/core/cql/Conversions.java | 52 +++++++++-- .../internal/core/cql/ConversionsTest.java | 88 +++++++++++++++++++ 3 files changed, 135 insertions(+), 6 deletions(-) create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/cql/ConversionsTest.java diff --git a/changelog/README.md b/changelog/README.md index 297a5278127..92a0ea0cc02 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.2.1 (in progress) +- [bug] JAVA-2443: Compute prepared statement PK indices for protocol v3 - [bug] JAVA-2430: Use variable metadata to infer the routing keyspace on bound statements ### 4.2.0 diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/Conversions.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/Conversions.java index 652092e0cff..4826e31e9d6 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/Conversions.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/Conversions.java @@ -35,6 +35,8 @@ import com.datastax.oss.driver.api.core.cql.SimpleStatement; import com.datastax.oss.driver.api.core.cql.Statement; import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.metadata.schema.ColumnMetadata; +import com.datastax.oss.driver.api.core.metadata.schema.RelationMetadata; import com.datastax.oss.driver.api.core.metadata.token.Token; import com.datastax.oss.driver.api.core.servererrors.AlreadyExistsException; import com.datastax.oss.driver.api.core.servererrors.BootstrappingException; @@ -63,6 +65,7 @@ import com.datastax.oss.driver.internal.core.metadata.token.ByteOrderedToken; import com.datastax.oss.driver.internal.core.metadata.token.Murmur3Token; import com.datastax.oss.driver.internal.core.metadata.token.RandomToken; +import com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import com.datastax.oss.driver.shaded.guava.common.primitives.Ints; import com.datastax.oss.protocol.internal.Message; @@ -358,11 +361,21 @@ public static ColumnDefinitions getResultDefinitions( public static DefaultPreparedStatement toPreparedStatement( Prepared response, PrepareRequest request, InternalDriverContext context) { + ColumnDefinitions variableDefinitions = + toColumnDefinitions(response.variablesMetadata, context); + + int[] pkIndicesInResponse = response.variablesMetadata.pkIndices; + // null means a legacy protocol version that doesn't provide the info, try to compute it + List pkIndices = + (pkIndicesInResponse == null) + ? computePkIndices(variableDefinitions, context) + : Ints.asList(pkIndicesInResponse); + return new DefaultPreparedStatement( ByteBuffer.wrap(response.preparedQueryId).asReadOnlyBuffer(), request.getQuery(), - toColumnDefinitions(response.variablesMetadata, context), - asList(response.variablesMetadata.pkIndices), + variableDefinitions, + pkIndices, (response.resultMetadataId == null) ? null : ByteBuffer.wrap(response.resultMetadataId).asReadOnlyBuffer(), @@ -396,12 +409,39 @@ public static ColumnDefinitions toColumnDefinitions( return DefaultColumnDefinitions.valueOf(ImmutableList.copyOf(values)); } - public static List asList(int[] pkIndices) { - if (pkIndices == null || pkIndices.length == 0) { + public static List computePkIndices( + ColumnDefinitions variables, InternalDriverContext context) { + if (variables.size() == 0) { return Collections.emptyList(); - } else { - return Ints.asList(pkIndices); } + // The rest of the computation relies on the fact that CQL does not have joins: all variables + // belong to the same keyspace and table. + ColumnDefinition firstVariable = variables.get(0); + return context + .getMetadataManager() + .getMetadata() + .getKeyspace(firstVariable.getKeyspace()) + .flatMap(ks -> ks.getTable(firstVariable.getTable())) + .map(RelationMetadata::getPartitionKey) + .map(pk -> findIndices(pk, variables)) + .orElse(Collections.emptyList()); + } + + // Find at which position in `variables` each element of `partitionKey` appears + @VisibleForTesting + static List findIndices(List partitionKey, ColumnDefinitions variables) { + ImmutableList.Builder result = + ImmutableList.builderWithExpectedSize(partitionKey.size()); + for (ColumnMetadata pkColumn : partitionKey) { + int firstIndex = variables.firstIndexOf(pkColumn.getName()); + if (firstIndex < 0) { + // If a single column is missing, we can abort right away + return Collections.emptyList(); + } else { + result.add(firstIndex); + } + } + return result.build(); } public static CoordinatorException toThrowable( diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/cql/ConversionsTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/cql/ConversionsTest.java new file mode 100644 index 00000000000..4d52ab2323a --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/cql/ConversionsTest.java @@ -0,0 +1,88 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.cql; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.cql.ColumnDefinition; +import com.datastax.oss.driver.api.core.cql.ColumnDefinitions; +import com.datastax.oss.driver.api.core.metadata.schema.ColumnMetadata; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; +import java.util.List; +import org.junit.Test; + +public class ConversionsTest { + @Test + public void should_find_pk_indices_if_all_bound() { + assertThat(Conversions.findIndices(partitionKey("pk"), variables("pk"))).containsExactly(0); + assertThat(Conversions.findIndices(partitionKey("pk"), variables("pk", "c"))) + .containsExactly(0); + assertThat(Conversions.findIndices(partitionKey("pk"), variables("c", "pk"))) + .containsExactly(1); + assertThat( + Conversions.findIndices( + partitionKey("pk1", "pk2", "pk3"), + variables("c1", "pk2", "pk3", "c2", "pk1", "c3"))) + .containsExactly(4, 1, 2); + } + + @Test + public void should_use_first_pk_index_if_bound_multiple_times() { + assertThat(Conversions.findIndices(partitionKey("pk"), variables("pk", "pk"))) + .containsExactly(0); + assertThat(Conversions.findIndices(partitionKey("pk"), variables("pk", "c1", "pk", "c2"))) + .containsExactly(0); + assertThat( + Conversions.findIndices( + partitionKey("pk1", "pk2", "pk3"), + variables("c1", "pk2", "pk3", "c2", "pk1", "c3", "pk1", "pk2"))) + .containsExactly(4, 1, 2); + } + + @Test + public void should_return_empty_pk_indices_if_at_least_one_component_not_bound() { + assertThat(Conversions.findIndices(partitionKey("pk"), variables("c1", "c2"))).isEmpty(); + assertThat( + Conversions.findIndices( + partitionKey("pk1", "pk2", "pk3"), variables("c1", "pk2", "c2", "pk1", "c3"))) + .isEmpty(); + } + + private List partitionKey(String... columnNames) { + ImmutableList.Builder columns = + ImmutableList.builderWithExpectedSize(columnNames.length); + for (String columnName : columnNames) { + ColumnMetadata column = mock(ColumnMetadata.class); + when(column.getName()).thenReturn(CqlIdentifier.fromInternal(columnName)); + columns.add(column); + } + return columns.build(); + } + + private ColumnDefinitions variables(String... columnNames) { + ImmutableList.Builder columns = + ImmutableList.builderWithExpectedSize(columnNames.length); + for (String columnName : columnNames) { + ColumnDefinition column = mock(ColumnDefinition.class); + when(column.getName()).thenReturn(CqlIdentifier.fromInternal(columnName)); + columns.add(column); + } + return DefaultColumnDefinitions.valueOf(columns.build()); + } +} From ccb47943ea2d41b0ae1b9417cbae2ea7b39b30e9 Mon Sep 17 00:00:00 2001 From: olim7t Date: Thu, 12 Sep 2019 10:17:23 -0700 Subject: [PATCH 084/979] Improve comments about ShallowUserDefinedType --- .../metadata/schema/ShallowUserDefinedType.java | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/ShallowUserDefinedType.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/ShallowUserDefinedType.java index 4e2975c6d8c..44852689a68 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/ShallowUserDefinedType.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/ShallowUserDefinedType.java @@ -20,6 +20,7 @@ import com.datastax.oss.driver.api.core.detach.AttachmentPoint; import com.datastax.oss.driver.api.core.type.DataType; import com.datastax.oss.driver.api.core.type.UserDefinedType; +import com.datastax.oss.driver.internal.core.metadata.schema.parsing.UserDefinedTypeParser; import com.datastax.oss.driver.internal.core.type.DefaultUserDefinedType; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; @@ -33,11 +34,16 @@ /** * A temporary UDT implementation that only contains the keyspace and name. * - *

    When we process a schema refresh that spans multiple UDTs, we can't fully materialize them - * right away, because they might depend on each other and the system table query does not return - * them in topological order. So we do a first pass where UDT field that are also UDTs are resolved - * as instances of this class, then a topological sort, then a second pass to replace all shallow - * definitions by the actual instance (which will be a {@link DefaultUserDefinedType}). + *

    When we refresh a keyspace's UDTs, we can't fully materialize them right away, because they + * might depend on each other and the system table query does not return them in topological order. + * So we do a first pass where UDTs that are nested into other UDTsare resolved as instances of this + * class, then a topological sort, then a second pass to replace all shallow definitions by the + * actual instance (which will be a {@link DefaultUserDefinedType}). + * + *

    This type is also used in the schema builder's internal representation: the keyspace, name and + * frozen-ness are the only things we need to generate a query string. + * + * @see UserDefinedTypeParser */ @Immutable public class ShallowUserDefinedType implements UserDefinedType, Serializable { From b5d2127be49c65f9d3fd5e763fc70838a7698fff Mon Sep 17 00:00:00 2001 From: olim7t Date: Thu, 12 Sep 2019 16:45:22 -0700 Subject: [PATCH 085/979] JAVA-2436: Extract simplified parse method in DataTypeCqlNameParser --- .../parsing/DataTypeClassNameParser.java | 47 ++++++++++++++----- .../core/type/UserDefinedTypeBuilder.java | 9 +++- 2 files changed, 44 insertions(+), 12 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/DataTypeClassNameParser.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/DataTypeClassNameParser.java index d2470c0d48a..6b4a5ec11fe 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/DataTypeClassNameParser.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/DataTypeClassNameParser.java @@ -17,6 +17,7 @@ import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.DefaultProtocolVersion; +import com.datastax.oss.driver.api.core.detach.AttachmentPoint; import com.datastax.oss.driver.api.core.type.DataType; import com.datastax.oss.driver.api.core.type.DataTypes; import com.datastax.oss.driver.api.core.type.UserDefinedType; @@ -60,6 +61,30 @@ public DataType parse( String toParse, Map userTypes, InternalDriverContext context) { + // We take keyspaceId as a parameter because of the parent interface, but it's actually unused + // by this implementation. + return parse(toParse, userTypes, context, context.getSessionName()); + } + + /** + * Simplified parse method for external use. + * + *

    This is intended for use in Cassandra's UDF implementation (the current version uses the + * similar method from driver 3). + */ + public DataType parse(String toParse, AttachmentPoint attachmentPoint) { + return parse( + toParse, + null, // No caching of user types: nested types will always be fully re-parsed + attachmentPoint, + "parser"); + } + + private DataType parse( + String toParse, + Map userTypes, + AttachmentPoint attachmentPoint, + String logPrefix) { boolean frozen = false; if (isReversed(toParse)) { // Just skip the ReversedType part, we don't care @@ -74,20 +99,20 @@ public DataType parse( if (next.startsWith("org.apache.cassandra.db.marshal.ListType")) { DataType elementType = - parse(keyspaceId, parser.getTypeParameters().get(0), userTypes, context); + parse(parser.getTypeParameters().get(0), userTypes, attachmentPoint, logPrefix); return DataTypes.listOf(elementType, frozen); } if (next.startsWith("org.apache.cassandra.db.marshal.SetType")) { DataType elementType = - parse(keyspaceId, parser.getTypeParameters().get(0), userTypes, context); + parse(parser.getTypeParameters().get(0), userTypes, attachmentPoint, logPrefix); return DataTypes.setOf(elementType, frozen); } if (next.startsWith("org.apache.cassandra.db.marshal.MapType")) { List parameters = parser.getTypeParameters(); - DataType keyType = parse(keyspaceId, parameters.get(0), userTypes, context); - DataType valueType = parse(keyspaceId, parameters.get(1), userTypes, context); + DataType keyType = parse(parameters.get(0), userTypes, attachmentPoint, logPrefix); + DataType valueType = parse(parameters.get(1), userTypes, attachmentPoint, logPrefix); return DataTypes.mapOf(keyType, valueType, frozen); } @@ -95,7 +120,7 @@ public DataType parse( LOG.warn( "[{}] Got o.a.c.db.marshal.FrozenType for something else than a collection, " + "this driver version might be too old for your version of Cassandra", - context.getSessionName()); + logPrefix); if (next.startsWith("org.apache.cassandra.db.marshal.UserType")) { ++parser.idx; // skipping '(' @@ -104,7 +129,7 @@ public DataType parse( parser.skipBlankAndComma(); String typeName = TypeCodecs.TEXT.decode( - Bytes.fromHexString("0x" + parser.readOne()), context.getProtocolVersion()); + Bytes.fromHexString("0x" + parser.readOne()), attachmentPoint.getProtocolVersion()); if (typeName == null) { throw new AssertionError("Type name cannot be null, this is a server bug"); } @@ -120,11 +145,11 @@ public DataType parse( parser.skipBlankAndComma(); for (Map.Entry entry : nameAndTypeParameters.entrySet()) { CqlIdentifier fieldName = CqlIdentifier.fromInternal(entry.getKey()); - DataType fieldType = parse(keyspaceId, entry.getValue(), userTypes, context); + DataType fieldType = parse(entry.getValue(), userTypes, attachmentPoint, logPrefix); builder.withField(fieldName, fieldType); } - // create a frozen UserType since C* 2.x UDTs are always frozen. - return builder.frozen().build(); + // Create a frozen UserType since C* 2.x UDTs are always frozen. + return builder.frozen().withAttachmentPoint(attachmentPoint).build(); } } @@ -132,9 +157,9 @@ public DataType parse( List rawTypes = parser.getTypeParameters(); ImmutableList.Builder componentTypesBuilder = ImmutableList.builder(); for (String rawType : rawTypes) { - componentTypesBuilder.add(parse(keyspaceId, rawType, userTypes, context)); + componentTypesBuilder.add(parse(rawType, userTypes, attachmentPoint, logPrefix)); } - return new DefaultTupleType(componentTypesBuilder.build(), context); + return new DefaultTupleType(componentTypesBuilder.build(), attachmentPoint); } DataType type = NATIVE_TYPES_BY_CLASS_NAME.get(next); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/UserDefinedTypeBuilder.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/UserDefinedTypeBuilder.java index 1bd04ad005d..7b3657da6fd 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/type/UserDefinedTypeBuilder.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/UserDefinedTypeBuilder.java @@ -16,6 +16,7 @@ package com.datastax.oss.driver.internal.core.type; import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.detach.AttachmentPoint; import com.datastax.oss.driver.api.core.type.DataType; import com.datastax.oss.driver.api.core.type.UserDefinedType; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; @@ -37,6 +38,7 @@ public class UserDefinedTypeBuilder { private boolean frozen; private final ImmutableList.Builder fieldNames; private final ImmutableList.Builder fieldTypes; + private AttachmentPoint attachmentPoint = AttachmentPoint.NONE; public UserDefinedTypeBuilder(CqlIdentifier keyspaceName, CqlIdentifier typeName) { this.keyspaceName = keyspaceName; @@ -69,8 +71,13 @@ public UserDefinedTypeBuilder frozen() { return this; } + public UserDefinedTypeBuilder withAttachmentPoint(AttachmentPoint attachmentPoint) { + this.attachmentPoint = attachmentPoint; + return this; + } + public UserDefinedType build() { return new DefaultUserDefinedType( - keyspaceName, typeName, frozen, fieldNames.build(), fieldTypes.build()); + keyspaceName, typeName, frozen, fieldNames.build(), fieldTypes.build(), attachmentPoint); } } From 944445aaf551ecf4fdc1fec9229a61725a1e4074 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 20 Sep 2019 16:11:22 +0300 Subject: [PATCH 086/979] JAVA-2451: Make zero a valid estimated size for PagingIterableSpliterator --- changelog/README.md | 1 + .../oss/driver/internal/core/cql/PagingIterableSpliterator.java | 2 +- .../driver/internal/core/cql/PagingIterableSpliteratorTest.java | 2 ++ 3 files changed, 4 insertions(+), 1 deletion(-) diff --git a/changelog/README.md b/changelog/README.md index 92a0ea0cc02..e7906d4db42 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.2.1 (in progress) +- [bug] JAVA-2451: Make zero a valid estimated size for PagingIterableSpliterator - [bug] JAVA-2443: Compute prepared statement PK indices for protocol v3 - [bug] JAVA-2430: Use variable metadata to infer the routing keyspace on bound statements diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/PagingIterableSpliterator.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/PagingIterableSpliterator.java index 977b1521f6b..1a17f26ec46 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/PagingIterableSpliterator.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/PagingIterableSpliterator.java @@ -140,7 +140,7 @@ public static class Builder { @NonNull public Builder withEstimatedSize(long estimatedSize) { - Preconditions.checkArgument(estimatedSize > 0, "estimatedSize must be > 0"); + Preconditions.checkArgument(estimatedSize >= 0, "estimatedSize must be >= 0"); this.estimatedSize = estimatedSize; return this; } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/cql/PagingIterableSpliteratorTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/cql/PagingIterableSpliteratorTest.java index da5cdb483e9..2e1356f5a99 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/cql/PagingIterableSpliteratorTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/cql/PagingIterableSpliteratorTest.java @@ -86,6 +86,7 @@ public void should_split_with_estimated_size( @DataProvider public static Iterable splitsWithEstimatedSize() { List> arguments = new ArrayList<>(); + arguments.add(Lists.newArrayList(0, 1, ImmutableList.of(), ImmutableList.of())); arguments.add(Lists.newArrayList(1, 1, ImmutableList.of(), ImmutableList.of(0))); arguments.add(Lists.newArrayList(1, 2, ImmutableList.of(), ImmutableList.of(0))); arguments.add(Lists.newArrayList(2, 1, ImmutableList.of(0), ImmutableList.of(1))); @@ -144,6 +145,7 @@ public void should_split_with_unknown_size( @DataProvider public static Iterable splitsWithUnknownSize() { List> arguments = new ArrayList<>(); + arguments.add(Lists.newArrayList(0, 1, ImmutableList.of(), ImmutableList.of())); arguments.add(Lists.newArrayList(1, 1, ImmutableList.of(0), ImmutableList.of())); arguments.add(Lists.newArrayList(1, 2, ImmutableList.of(0), ImmutableList.of())); arguments.add(Lists.newArrayList(2, 1, ImmutableList.of(0), ImmutableList.of(1))); From ed2ee0477f9fde8342ec945c5455ef35f3fb2282 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 20 Sep 2019 16:12:53 +0300 Subject: [PATCH 087/979] Fix typo in comment --- .../oss/driver/internal/core/cql/PagingIterableSpliterator.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/PagingIterableSpliterator.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/PagingIterableSpliterator.java index 1a17f26ec46..7ed10778f56 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/PagingIterableSpliterator.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/PagingIterableSpliterator.java @@ -89,7 +89,7 @@ public boolean tryAdvance(Consumer action) { @Nullable public Spliterator trySplit() { if (estimatedSize != Long.MAX_VALUE && estimatedSize <= chunkSize) { - // There is not point in splitting if the number of remaining elements is below the chunk size + // There is no point in splitting if the number of remaining elements is below the chunk size return null; } ElementT row = iterable.one(); From 6bd4991048e0afb3cc1eb221677b8344caec4b3e Mon Sep 17 00:00:00 2001 From: Greg Bestland Date: Mon, 9 Sep 2019 09:23:12 -0500 Subject: [PATCH 088/979] JAVA-2426: Fix month pattern in CqlDuration documentation --- changelog/README.md | 1 + .../java/com/datastax/oss/driver/api/core/data/CqlDuration.java | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/changelog/README.md b/changelog/README.md index e7906d4db42..053db990e52 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.2.1 (in progress) +- [documentation] JAVA-2426: Fix month pattern in CqlDuration documentation - [bug] JAVA-2451: Make zero a valid estimated size for PagingIterableSpliterator - [bug] JAVA-2443: Compute prepared statement PK indices for protocol v3 - [bug] JAVA-2430: Use variable metadata to infer the routing keyspace on bound statements diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/data/CqlDuration.java b/core/src/main/java/com/datastax/oss/driver/api/core/data/CqlDuration.java index c41e02e56e1..40b6b41ee60 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/data/CqlDuration.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/data/CqlDuration.java @@ -115,7 +115,7 @@ public static CqlDuration newInstance(int months, int days, long nanoseconds) { *

  • multiple digits followed by a time unit like: 12h30m where the time unit can be: *
      *
    • {@code y}: years - *
    • {@code m}: months + *
    • {@code mo}: months *
    • {@code w}: weeks *
    • {@code d}: days *
    • {@code h}: hours From 6254d23474dc30f3188803b61289dd5586ca4f59 Mon Sep 17 00:00:00 2001 From: Erik Merkle Date: Fri, 13 Sep 2019 14:01:50 -0500 Subject: [PATCH 089/979] JAVA-2429: Document expected types on DefaultDriverOption --- changelog/README.md | 1 + .../api/core/config/DefaultDriverOption.java | 607 ++++++++++++++++++ 2 files changed, 608 insertions(+) diff --git a/changelog/README.md b/changelog/README.md index 053db990e52..30a01695afd 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.2.1 (in progress) +- [documentation] JAVA-2429: Document expected types on DefaultDriverOption - [documentation] JAVA-2426: Fix month pattern in CqlDuration documentation - [bug] JAVA-2451: Make zero a valid estimated size for PagingIterableSpliterator - [bug] JAVA-2443: Compute prepared statement PK indices for protocol v3 diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java index 863870f2feb..2ad96a8aea1 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java @@ -23,157 +23,764 @@ *

      Refer to {@code reference.conf} in the driver codebase for a full description of each option. */ public enum DefaultDriverOption implements DriverOption { + /** + * The contact points to use for the initial connection to the cluster. + * + *

      Value type: {@link java.util.List List}<{@link String}> + */ CONTACT_POINTS("basic.contact-points"), + /** + * A name that uniquely identifies the driver instance. + * + *

      Value-type: {@link String} + */ SESSION_NAME("basic.session-name"), + /** + * The name of the keyspace that the session should initially be connected to. + * + *

      Value-type: {@link String} + */ SESSION_KEYSPACE("basic.session-keyspace"), + /** + * How often the driver tries to reload the configuration. + * + *

      Value-type: {@link java.time.Duration Duration} + */ CONFIG_RELOAD_INTERVAL("basic.config-reload-interval"), + /** + * How long the driver waits for a request to complete. + * + *

      Value-type: {@link java.time.Duration Duration} + */ REQUEST_TIMEOUT("basic.request.timeout"), + /** + * The consistency level. + * + *

      Value-Type: {@link String} + */ REQUEST_CONSISTENCY("basic.request.consistency"), + /** + * The page size. + * + *

      Value-Type: int + */ REQUEST_PAGE_SIZE("basic.request.page-size"), + /** + * The serial consistency level. + * + *

      Value-type: {@link String} + */ REQUEST_SERIAL_CONSISTENCY("basic.request.serial-consistency"), + /** + * The default idempotence of a request. + * + *

      Value-type: boolean + */ REQUEST_DEFAULT_IDEMPOTENCE("basic.request.default-idempotence"), + // LOAD_BALANCING_POLICY is a collection of sub-properties LOAD_BALANCING_POLICY("basic.load-balancing-policy"), + /** + * The class of the load balancing policy. + * + *

      Value-type: {@link String} + */ LOAD_BALANCING_POLICY_CLASS("basic.load-balancing-policy.class"), + /** + * The datacenter that is considered "local". + * + *

      Value-type: {@link String} + */ LOAD_BALANCING_LOCAL_DATACENTER("basic.load-balancing-policy.local-datacenter"), + /** + * A custom filter to include/exclude nodes. + * + *

      Value-Type: {@link String} + */ LOAD_BALANCING_FILTER_CLASS("basic.load-balancing-policy.filter.class"), + /** + * The timeout to use for internal queries that run as part of the initialization process + * + *

      Value-type: {@link java.time.Duration Duration} + */ CONNECTION_INIT_QUERY_TIMEOUT("advanced.connection.init-query-timeout"), + /** + * The timeout to use when the driver changes the keyspace on a connection at runtime. + * + *

      Value-type: {@link java.time.Duration Duration} + */ CONNECTION_SET_KEYSPACE_TIMEOUT("advanced.connection.set-keyspace-timeout"), + /** + * The maximum number of requests that can be executed concurrently on a connection + * + *

      Value-type: int + */ CONNECTION_MAX_REQUESTS("advanced.connection.max-requests-per-connection"), + /** + * The maximum number of "orphaned" requests before a connection gets closed automatically. + * + *

      Value-type: int + */ CONNECTION_MAX_ORPHAN_REQUESTS("advanced.connection.max-orphan-requests"), + /** + * Whether to log non-fatal errors when the driver tries to open a new connection. + * + *

      Value-type: boolean + */ CONNECTION_WARN_INIT_ERROR("advanced.connection.warn-on-init-error"), + /** + * The number of connections in the LOCAL pool. + * + *

      Value-type: int + */ CONNECTION_POOL_LOCAL_SIZE("advanced.connection.pool.local.size"), + /** + * The number of connections in the REMOTE pool. + * + *

      Value-type: int + */ CONNECTION_POOL_REMOTE_SIZE("advanced.connection.pool.remote.size"), + /** + * Whether to schedule reconnection attempts if all contact points are unreachable on the first + * initialization attempt. + * + *

      Value-type: boolean + */ RECONNECT_ON_INIT("advanced.reconnect-on-init"), + /** + * The class of the reconnection policy. + * + *

      Value-type: {@link String} + */ RECONNECTION_POLICY_CLASS("advanced.reconnection-policy.class"), + /** + * Base delay for computing time between reconnection attempts. + * + *

      Value-type: {@link java.time.Duration Duration} + */ RECONNECTION_BASE_DELAY("advanced.reconnection-policy.base-delay"), + /** + * Maximum delay between reconnection attempts. + * + *

      Value-type: {@link java.time.Duration Duration} + */ RECONNECTION_MAX_DELAY("advanced.reconnection-policy.max-delay"), + // RETRY_POLICY is a collection of sub-properties RETRY_POLICY("advanced.retry-policy"), + /** + * The class of the retry policy. + * + *

      Value-type: {@link String} + */ RETRY_POLICY_CLASS("advanced.retry-policy.class"), + // SPECULATIVE_EXECUTION_POLICY is a collection of sub-properties SPECULATIVE_EXECUTION_POLICY("advanced.speculative-execution-policy"), + /** + * The class of the speculative execution policy. + * + *

      Value-type: {@link String} + */ SPECULATIVE_EXECUTION_POLICY_CLASS("advanced.speculative-execution-policy.class"), + /** + * The maximum number of executions. + * + *

      Value-type: int + */ SPECULATIVE_EXECUTION_MAX("advanced.speculative-execution-policy.max-executions"), + /** + * The delay between each execution. + * + *

      Value-type: {@link java.time.Duration Duration} + */ SPECULATIVE_EXECUTION_DELAY("advanced.speculative-execution-policy.delay"), + /** + * The class of the authentication provider. + * + *

      Value-type: {@link String} + */ AUTH_PROVIDER_CLASS("advanced.auth-provider.class"), + /** + * Plain text auth provider username. + * + *

      Value-type: {@link String} + */ AUTH_PROVIDER_USER_NAME("advanced.auth-provider.username"), + /** + * Plain text auth provider password. + * + *

      Value-type: {@link String} + */ AUTH_PROVIDER_PASSWORD("advanced.auth-provider.password"), + /** + * The class of the SSL Engine Factory. + * + *

      Value-type: {@link String} + */ SSL_ENGINE_FACTORY_CLASS("advanced.ssl-engine-factory.class"), + /** + * The cipher suites to enable when creating an SSLEngine for a connection. + * + *

      Value type: {@link java.util.List List}<{@link String}> + */ SSL_CIPHER_SUITES("advanced.ssl-engine-factory.cipher-suites"), + /** + * Whether or not to require validation that the hostname of the server certificate's common name + * matches the hostname of the server being connected to. + * + *

      Value-type: boolean + */ SSL_HOSTNAME_VALIDATION("advanced.ssl-engine-factory.hostname-validation"), + /** + * The location of the keystore file. + * + *

      Value-type: {@link String} + */ SSL_KEYSTORE_PATH("advanced.ssl-engine-factory.keystore-path"), + /** + * The keystore password. + * + *

      Value-type: {@link String} + */ SSL_KEYSTORE_PASSWORD("advanced.ssl-engine-factory.keystore-password"), + /** + * The location of the truststore file. + * + *

      Value-type: {@link String} + */ SSL_TRUSTSTORE_PATH("advanced.ssl-engine-factory.truststore-path"), + /** + * The truststore password. + * + *

      Value-type: {@link String} + */ SSL_TRUSTSTORE_PASSWORD("advanced.ssl-engine-factory.truststore-password"), + /** + * The class of the generator that assigns a microsecond timestamp to each request. + * + *

      Value-type: {@link String} + */ TIMESTAMP_GENERATOR_CLASS("advanced.timestamp-generator.class"), + /** + * Whether to force the driver to use Java's millisecond-precision system clock. + * + *

      Value-type: boolean + */ TIMESTAMP_GENERATOR_FORCE_JAVA_CLOCK("advanced.timestamp-generator.force-java-clock"), + /** + * How far in the future timestamps are allowed to drift before the warning is logged. + * + *

      Value-type: {@link java.time.Duration Duration} + */ TIMESTAMP_GENERATOR_DRIFT_WARNING_THRESHOLD( "advanced.timestamp-generator.drift-warning.threshold"), + /** + * How often the warning will be logged if timestamps keep drifting above the threshold. + * + *

      Value-type: {@link java.time.Duration Duration} + */ TIMESTAMP_GENERATOR_DRIFT_WARNING_INTERVAL("advanced.timestamp-generator.drift-warning.interval"), + /** + * The class of a session-wide component that tracks the outcome of requests. + * + *

      Value-type: {@link String} + */ REQUEST_TRACKER_CLASS("advanced.request-tracker.class"), + /** + * Whether to log successful requests. + * + *

      Value-type: boolean + */ REQUEST_LOGGER_SUCCESS_ENABLED("advanced.request-tracker.logs.success.enabled"), + /** + * The threshold to classify a successful request as "slow". + * + *

      Value-type: {@link java.time.Duration Duration} + */ REQUEST_LOGGER_SLOW_THRESHOLD("advanced.request-tracker.logs.slow.threshold"), + /** + * Whether to log slow requests. + * + *

      Value-type: boolean + */ REQUEST_LOGGER_SLOW_ENABLED("advanced.request-tracker.logs.slow.enabled"), + /** + * Whether to log failed requests. + * + *

      Value-type: boolean + */ REQUEST_LOGGER_ERROR_ENABLED("advanced.request-tracker.logs.error.enabled"), + /** + * The maximum length of the query string in the log message. + * + *

      Value-type: int + */ REQUEST_LOGGER_MAX_QUERY_LENGTH("advanced.request-tracker.logs.max-query-length"), + /** + * Whether to log bound values in addition to the query string. + * + *

      Value-type: boolean + */ REQUEST_LOGGER_VALUES("advanced.request-tracker.logs.show-values"), + /** + * The maximum length for bound values in the log message. + * + *

      Value-type: int + */ REQUEST_LOGGER_MAX_VALUE_LENGTH("advanced.request-tracker.logs.max-value-length"), + /** + * The maximum number of bound values to log. + * + *

      Value-type: int + */ REQUEST_LOGGER_MAX_VALUES("advanced.request-tracker.logs.max-values"), + /** + * Whether to log stack traces for failed queries. + * + *

      Value-type: boolean + */ REQUEST_LOGGER_STACK_TRACES("advanced.request-tracker.logs.show-stack-traces"), + /** + * The class of a session-wide component that controls the rate at which requests are executed. + * + *

      Value-type: {@link String} + */ REQUEST_THROTTLER_CLASS("advanced.throttler.class"), + /** + * The maximum number of requests that are allowed to execute in parallel. + * + *

      Value-type: int + */ REQUEST_THROTTLER_MAX_CONCURRENT_REQUESTS("advanced.throttler.max-concurrent-requests"), + /** + * The maximum allowed request rate. + * + *

      Value-type: int + */ REQUEST_THROTTLER_MAX_REQUESTS_PER_SECOND("advanced.throttler.max-requests-per-second"), + /** + * The maximum number of requests that can be enqueued when the throttling threshold is exceeded. + * + *

      Value-type: int + */ REQUEST_THROTTLER_MAX_QUEUE_SIZE("advanced.throttler.max-queue-size"), + /** + * How often the throttler attempts to dequeue requests. + * + *

      Value-type: {@link java.time.Duration Duration} + */ REQUEST_THROTTLER_DRAIN_INTERVAL("advanced.throttler.drain-interval"), + /** + * The class of a session-wide component that listens for node state changes. + * + *

      Value-type: {@link String} + */ METADATA_NODE_STATE_LISTENER_CLASS("advanced.node-state-listener.class"), + /** + * The class of a session-wide component that listens for schema changes. + * + *

      Value-type: {@link String} + */ METADATA_SCHEMA_CHANGE_LISTENER_CLASS("advanced.schema-change-listener.class"), + /** + * The class of the address translator to use to convert the addresses sent by Cassandra nodes + * into ones that the driver uses to connect. + * + *

      Value-type: {@link String} + */ ADDRESS_TRANSLATOR_CLASS("advanced.address-translator.class"), + /** + * The native protocol version to use. + * + *

      Value-type: {@link String} + */ PROTOCOL_VERSION("advanced.protocol.version"), + /** + * The name of the algorithm used to compress protocol frames. + * + *

      Value-type: {@link String} + */ PROTOCOL_COMPRESSION("advanced.protocol.compression"), + /** + * The maximum length, in bytes, of the frames supported by the driver. + * + *

      Value-type: long + */ PROTOCOL_MAX_FRAME_LENGTH("advanced.protocol.max-frame-length"), + /** + * Whether a warning is logged when a request (such as a CQL `USE ...`) changes the active + * keyspace. + * + *

      Value-type: boolean + */ REQUEST_WARN_IF_SET_KEYSPACE("advanced.request.warn-if-set-keyspace"), + /** + * How many times the driver will attempt to fetch the query trace if it is not ready yet. + * + *

      Value-type: int + */ REQUEST_TRACE_ATTEMPTS("advanced.request.trace.attempts"), + /** + * The interval between each attempt. + * + *

      Value-type: {@link java.time.Duration Duration} + */ REQUEST_TRACE_INTERVAL("advanced.request.trace.interval"), + /** + * The consistency level to use for trace queries. + * + *

      Value-type: {@link String} + */ REQUEST_TRACE_CONSISTENCY("advanced.request.trace.consistency"), + /** + * List of enabled session-level metrics. + * + *

      Value type: {@link java.util.List List}<{@link String}> + */ METRICS_SESSION_ENABLED("advanced.metrics.session.enabled"), + /** + * List of enabled node-level metrics. + * + *

      Value type: {@link java.util.List List}<{@link String}> + */ METRICS_NODE_ENABLED("advanced.metrics.node.enabled"), + /** + * The largest latency that we expect to record for requests. + * + *

      Value-type: {@link java.time.Duration Duration} + */ METRICS_SESSION_CQL_REQUESTS_HIGHEST("advanced.metrics.session.cql-requests.highest-latency"), + /** + * The number of significant decimal digits to which internal structures will maintain for + * requests. + * + *

      Value-type: int + */ METRICS_SESSION_CQL_REQUESTS_DIGITS("advanced.metrics.session.cql-requests.significant-digits"), + /** + * The interval at which percentile data is refreshed for requests. + * + *

      Value-type: {@link java.time.Duration Duration} + */ METRICS_SESSION_CQL_REQUESTS_INTERVAL("advanced.metrics.session.cql-requests.refresh-interval"), + /** + * The largest latency that we expect to record for throttling. + * + *

      Value-type: {@link java.time.Duration Duration} + */ METRICS_SESSION_THROTTLING_HIGHEST("advanced.metrics.session.throttling.delay.highest-latency"), + /** + * The number of significant decimal digits to which internal structures will maintain for + * throttling. + * + *

      Value-type: int + */ METRICS_SESSION_THROTTLING_DIGITS("advanced.metrics.session.throttling.delay.significant-digits"), + /** + * The interval at which percentile data is refreshed for throttling. + * + *

      Value-type: {@link java.time.Duration Duration} + */ METRICS_SESSION_THROTTLING_INTERVAL("advanced.metrics.session.throttling.delay.refresh-interval"), + /** + * The largest latency that we expect to record for requests. + * + *

      Value-type: {@link java.time.Duration Duration} + */ METRICS_NODE_CQL_MESSAGES_HIGHEST("advanced.metrics.node.cql-messages.highest-latency"), + /** + * The number of significant decimal digits to which internal structures will maintain for + * requests. + * + *

      Value-type: int + */ METRICS_NODE_CQL_MESSAGES_DIGITS("advanced.metrics.node.cql-messages.significant-digits"), + /** + * The interval at which percentile data is refreshed for requests. + * + *

      Value-type: {@link java.time.Duration Duration} + */ METRICS_NODE_CQL_MESSAGES_INTERVAL("advanced.metrics.node.cql-messages.refresh-interval"), + /** + * Whether or not to disable the Nagle algorithm. + * + *

      Value-type: boolean + */ SOCKET_TCP_NODELAY("advanced.socket.tcp-no-delay"), + /** + * Whether or not to enable TCP keep-alive probes. + * + *

      Value-type: boolean + */ SOCKET_KEEP_ALIVE("advanced.socket.keep-alive"), + /** + * Whether or not to allow address reuse. + * + *

      Value-type: boolean + */ SOCKET_REUSE_ADDRESS("advanced.socket.reuse-address"), + /** + * Sets the linger interval. + * + *

      Value-type: int + */ SOCKET_LINGER_INTERVAL("advanced.socket.linger-interval"), + /** + * Sets a hint to the size of the underlying buffers for incoming network I/O. + * + *

      Value-type: int + */ SOCKET_RECEIVE_BUFFER_SIZE("advanced.socket.receive-buffer-size"), + /** + * Sets a hint to the size of the underlying buffers for outgoing network I/O. + * + *

      Value-type: int + */ SOCKET_SEND_BUFFER_SIZE("advanced.socket.send-buffer-size"), + /** + * The connection heartbeat interval. + * + *

      Value-type: {@link java.time.Duration Duration} + */ HEARTBEAT_INTERVAL("advanced.heartbeat.interval"), + /** + * How long the driver waits for the response to a heartbeat. + * + *

      Value-type: {@link java.time.Duration Duration} + */ HEARTBEAT_TIMEOUT("advanced.heartbeat.timeout"), + /** + * How long the driver waits to propagate a Topology event. + * + *

      Value-type: {@link java.time.Duration Duration} + */ METADATA_TOPOLOGY_WINDOW("advanced.metadata.topology-event-debouncer.window"), + /** + * The maximum number of events that can accumulate. + * + *

      Value-type: int + */ METADATA_TOPOLOGY_MAX_EVENTS("advanced.metadata.topology-event-debouncer.max-events"), + /** + * Whether schema metadata is enabled. + * + *

      Value-type: boolean + */ METADATA_SCHEMA_ENABLED("advanced.metadata.schema.enabled"), + /** + * The timeout for the requests to the schema tables. + * + *

      Value-type: {@link java.time.Duration Duration} + */ METADATA_SCHEMA_REQUEST_TIMEOUT("advanced.metadata.schema.request-timeout"), + /** + * The page size for the requests to the schema tables. + * + *

      Value-type: int + */ METADATA_SCHEMA_REQUEST_PAGE_SIZE("advanced.metadata.schema.request-page-size"), + /** + * The list of keyspaces for which schema and token metadata should be maintained. + * + *

      Value type: {@link java.util.List List}<{@link String}> + */ METADATA_SCHEMA_REFRESHED_KEYSPACES("advanced.metadata.schema.refreshed-keyspaces"), + /** + * How long the driver waits to apply a refresh. + * + *

      Value-type: {@link java.time.Duration Duration} + */ METADATA_SCHEMA_WINDOW("advanced.metadata.schema.debouncer.window"), + /** + * The maximum number of refreshes that can accumulate. + * + *

      Value-type: int + */ METADATA_SCHEMA_MAX_EVENTS("advanced.metadata.schema.debouncer.max-events"), + /** + * Whether token metadata is enabled. + * + *

      Value-type: boolean + */ METADATA_TOKEN_MAP_ENABLED("advanced.metadata.token-map.enabled"), + /** + * How long the driver waits for responses to control queries. + * + *

      Value-type: {@link java.time.Duration Duration} + */ CONTROL_CONNECTION_TIMEOUT("advanced.control-connection.timeout"), + /** + * The interval between each schema agreement check attempt. + * + *

      Value-type: {@link java.time.Duration Duration} + */ CONTROL_CONNECTION_AGREEMENT_INTERVAL("advanced.control-connection.schema-agreement.interval"), + /** + * The timeout after which schema agreement fails. + * + *

      Value-type: {@link java.time.Duration Duration} + */ CONTROL_CONNECTION_AGREEMENT_TIMEOUT("advanced.control-connection.schema-agreement.timeout"), + /** + * Whether to log a warning if schema agreement fails. + * + *

      Value-type: boolean + */ CONTROL_CONNECTION_AGREEMENT_WARN("advanced.control-connection.schema-agreement.warn-on-failure"), + /** + * Whether `Session.prepare` calls should be sent to all nodes in the cluster. + * + *

      Value-type: boolean + */ PREPARE_ON_ALL_NODES("advanced.prepared-statements.prepare-on-all-nodes"), + /** + * Whether the driver tries to prepare on new nodes at all. + * + *

      Value-type: boolean + */ REPREPARE_ENABLED("advanced.prepared-statements.reprepare-on-up.enabled"), + /** + * Whether to check `system.prepared_statements` on the target node before repreparing. + * + *

      Value-type: boolean + */ REPREPARE_CHECK_SYSTEM_TABLE("advanced.prepared-statements.reprepare-on-up.check-system-table"), + /** + * The maximum number of statements that should be reprepared. + * + *

      Value-type: int + */ REPREPARE_MAX_STATEMENTS("advanced.prepared-statements.reprepare-on-up.max-statements"), + /** + * The maximum number of concurrent requests when repreparing. + * + *

      Value-type: int + */ REPREPARE_MAX_PARALLELISM("advanced.prepared-statements.reprepare-on-up.max-parallelism"), + /** + * The request timeout when repreparing. + * + *

      Value-type: {@link java.time.Duration Duration} + */ REPREPARE_TIMEOUT("advanced.prepared-statements.reprepare-on-up.timeout"), + /** + * The number of threads in the I/O group. + * + *

      Value-type: int + */ NETTY_IO_SIZE("advanced.netty.io-group.size"), + /** + * Quiet period for I/O group shutdown. + * + *

      Value-type: int + */ NETTY_IO_SHUTDOWN_QUIET_PERIOD("advanced.netty.io-group.shutdown.quiet-period"), + /** + * Max time to wait for I/O group shutdown. + * + *

      Value-type: int + */ NETTY_IO_SHUTDOWN_TIMEOUT("advanced.netty.io-group.shutdown.timeout"), + /** + * Units for I/O group quiet period and timeout. + * + *

      Value-type: {@link String} + */ NETTY_IO_SHUTDOWN_UNIT("advanced.netty.io-group.shutdown.unit"), + /** + * The number of threads in the Admin group. + * + *

      Value-type: int + */ NETTY_ADMIN_SIZE("advanced.netty.admin-group.size"), + /** + * Quiet period for admin group shutdown. + * + *

      Value-type: int + */ NETTY_ADMIN_SHUTDOWN_QUIET_PERIOD("advanced.netty.admin-group.shutdown.quiet-period"), + /** + * Units for admin group quiet period and timeout. + * + *

      Value-type: {@link String} + */ NETTY_ADMIN_SHUTDOWN_TIMEOUT("advanced.netty.admin-group.shutdown.timeout"), + /** + * Units for admin group quite period and timeout. + * + *

      Value-type: {@link String} + */ NETTY_ADMIN_SHUTDOWN_UNIT("advanced.netty.admin-group.shutdown.unit"), + /** + * How many times the coalescer is allowed to reschedule itself when it did no work. + * + *

      Value-type: int + */ COALESCER_MAX_RUNS("advanced.coalescer.max-runs-with-no-work"), + /** + * The coalescer reschedule interval. + * + *

      Value-type: {@link java.time.Duration Duration} + */ COALESCER_INTERVAL("advanced.coalescer.reschedule-interval"), + /** + * Whether to resolve the addresses passed to `basic.contact-points`. + * + *

      Value-type: boolean + */ RESOLVE_CONTACT_POINTS("advanced.resolve-contact-points"), + /** + * This is how frequent the timer should wake up to check for timed-out tasks or speculative + * executions. + * + *

      Value-type: {@link java.time.Duration Duration} + */ NETTY_TIMER_TICK_DURATION("advanced.netty.timer.tick-duration"), + /** + * Number of ticks in the Timer wheel. + * + *

      Value-type: int + */ NETTY_TIMER_TICKS_PER_WHEEL("advanced.netty.timer.ticks-per-wheel"), + /** + * Whether logging of server warnings generated during query execution should be disabled by the + * driver. + * + *

      Value-type: boolean + */ REQUEST_LOG_WARNINGS("advanced.request.log-warnings"), + /** + * Whether the threads created by the driver should be daemon threads. + * + *

      Value-type: boolean + */ NETTY_DAEMON("advanced.netty.daemon"), ; From 8354f22c42412e7c5ad7235485efc9fa801f9cdb Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Tue, 24 Sep 2019 09:52:25 -0500 Subject: [PATCH 090/979] JAVA-2402 (#1330) JAVA-2402: Add setTracing(boolean) to StatementBuilder --- changelog/README.md | 1 + .../driver/api/core/cql/StatementBuilder.java | 15 +++- .../api/core/cql/StatementBuilderTest.java | 76 +++++++++++++++++++ 3 files changed, 90 insertions(+), 2 deletions(-) create mode 100644 core/src/test/java/com/datastax/oss/driver/api/core/cql/StatementBuilderTest.java diff --git a/changelog/README.md b/changelog/README.md index 1852708b98a..1179d01dcbc 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.3.0 (in progress) +- [improvement] JAVA-2402: Add setTracing(boolean) to StatementBuilder ### 4.2.0 diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/cql/StatementBuilder.java b/core/src/main/java/com/datastax/oss/driver/api/core/cql/StatementBuilder.java index 209672fa412..0db0754b937 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/cql/StatementBuilder.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/cql/StatementBuilder.java @@ -154,10 +154,21 @@ public SelfT setIdempotence(@Nullable Boolean idempotent) { return self; } - /** @see Statement#setTracing(boolean) */ + /** + * This method is a shortcut to {@link #setTracing(boolean)} with an argument of true. It is + * preserved to maintain API compatibility. + * + * @see Statement#setTracing(boolean) + */ @NonNull public SelfT setTracing() { - this.tracing = true; + return setTracing(true); + } + + /** @see Statement#setTracing(boolean) */ + @NonNull + public SelfT setTracing(boolean tracing) { + this.tracing = tracing; return self; } diff --git a/core/src/test/java/com/datastax/oss/driver/api/core/cql/StatementBuilderTest.java b/core/src/test/java/com/datastax/oss/driver/api/core/cql/StatementBuilderTest.java new file mode 100644 index 00000000000..93745f155da --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/api/core/cql/StatementBuilderTest.java @@ -0,0 +1,76 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.core.cql; + +import static org.assertj.core.api.Assertions.assertThat; + +import org.junit.Test; + +public class StatementBuilderTest { + + private static class NullStatementBuilder + extends StatementBuilder { + + public NullStatementBuilder() { + super(); + } + + public NullStatementBuilder(SimpleStatement template) { + super(template); + } + + @Override + public SimpleStatement build() { + return null; + } + } + + @Test + public void should_handle_set_tracing_without_args() { + + NullStatementBuilder builder = new NullStatementBuilder(); + assertThat(builder.tracing).isFalse(); + builder.setTracing(); + assertThat(builder.tracing).isTrue(); + } + + @Test + public void should_handle_set_tracing_with_args() { + + NullStatementBuilder builder = new NullStatementBuilder(); + assertThat(builder.tracing).isFalse(); + builder.setTracing(true); + assertThat(builder.tracing).isTrue(); + builder.setTracing(false); + assertThat(builder.tracing).isFalse(); + } + + @Test + public void should_override_template() { + + SimpleStatement template = SimpleStatement.builder("select * from system.peers").build(); + NullStatementBuilder builder = new NullStatementBuilder(template); + assertThat(builder.tracing).isFalse(); + builder.setTracing(true); + assertThat(builder.tracing).isTrue(); + + template = SimpleStatement.builder("select * from system.peers").setTracing().build(); + builder = new NullStatementBuilder(template); + assertThat(builder.tracing).isTrue(); + builder.setTracing(false); + assertThat(builder.tracing).isFalse(); + } +} From a645d499630243c7caca815a026f411f89b24bf5 Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 23 Sep 2019 11:41:04 -0700 Subject: [PATCH 091/979] JAVA-2455: Improve logging of schema refresh errors Motivation: While debugging JAVA-2454, we noticed that the error wasn't surfaced in the logs as expected. MetadataManager.startSchemaRequest has a listener for that purpose, but it's on the wrong future; parsing errors will surface on currentSchemaRefresh. Modifications: Don't try to log errors in MetadataManager. Instead, handle it on all call sites of MetadataManager.refreshSchema, *if* the future is not returned to the client. Result: If a user-initiated refresh fails, they will get back a failed future. If an internal refresh fails, it will be logged. --- changelog/README.md | 1 + .../core/control/ControlConnection.java | 30 +++++++++++++++++-- .../internal/core/cql/CqlRequestHandler.java | 4 +-- .../core/metadata/MetadataManager.java | 19 ++++++++---- .../internal/core/session/DefaultSession.java | 14 ++++++++- .../control/ControlConnectionTestBase.java | 4 +++ .../core/session/DefaultSessionPoolsTest.java | 2 ++ 7 files changed, 63 insertions(+), 11 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 30a01695afd..11848026b71 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.2.1 (in progress) +- [improvement] JAVA-2455: Improve logging of schema refresh errors - [documentation] JAVA-2429: Document expected types on DefaultDriverOption - [documentation] JAVA-2426: Fix month pattern in CqlDuration documentation - [bug] JAVA-2451: Make zero a valid estimated size for PagingIterableSpliterator diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/control/ControlConnection.java b/core/src/main/java/com/datastax/oss/driver/internal/core/control/ControlConnection.java index 50b6ffe90f0..193121fc08b 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/control/ControlConnection.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/control/ControlConnection.java @@ -226,7 +226,20 @@ private void processStatusChange(Event event) { private void processSchemaChange(Event event) { SchemaChangeEvent sce = (SchemaChangeEvent) event; - context.getMetadataManager().refreshSchema(sce.keyspace, false, false); + context + .getMetadataManager() + .refreshSchema(sce.keyspace, false, false) + .whenComplete( + (metadata, error) -> { + if (error != null) { + Loggers.warnWithException( + LOG, + "[{}] Unexpected error while refreshing schema for a SCHEMA_CHANGE event, " + + "keeping previous version", + logPrefix, + error); + } + }); } private class SingleThreaded { @@ -467,7 +480,20 @@ private void onSuccessfulReconnect() { // first successful refresh; make sure the LBP gets initialized (this is a no-op // if it was initialized already). context.getLoadBalancingPolicyWrapper().init(); - context.getMetadataManager().refreshSchema(null, false, true); + context + .getMetadataManager() + .refreshSchema(null, false, true) + .whenComplete( + (metadata, schemaError) -> { + if (schemaError != null) { + Loggers.warnWithException( + LOG, + "[{}] Unexpected error while refreshing schema after a " + + "successful reconnection, keeping previous version", + logPrefix, + schemaError); + } + }); } catch (Throwable t) { Loggers.warnWithException( LOG, "[{}] Unexpected error on control connection reconnect", logPrefix, t); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java index c5f3985957d..a658a571655 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java @@ -608,8 +608,8 @@ public void onResponse(Frame responseFrame) { error -> { Loggers.warnWithException( LOG, - "[{}] Error while refreshing schema after DDL query, " - + "new metadata might be incomplete", + "[{}] Unexpected error while refreshing schema after DDL query, " + + "keeping previous version", logPrefix, error); return null; diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/MetadataManager.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/MetadataManager.java index d5acdbb6702..a066518ebab 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/MetadataManager.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/MetadataManager.java @@ -106,7 +106,18 @@ private void onConfigChanged(@SuppressWarnings("unused") ConfigChangeEvent event || !keyspacesBefore.equals(refreshedKeyspaces) || (!tokenMapEnabledBefore && tokenMapEnabled)) && isSchemaEnabled()) { - refreshSchema(null, false, true); + refreshSchema(null, false, true) + .whenComplete( + (metadata, error) -> { + if (error != null) { + Loggers.warnWithException( + LOG, + "[{}] Unexpected error while refreshing schema after it was re-enabled " + + "in the configuration, keeping previous version", + logPrefix, + error); + } + }); } } @@ -404,11 +415,7 @@ private void startSchemaRequest(CompletableFuture future) { .whenComplete( (v, error) -> { if (error != null) { - Loggers.warnWithException( - LOG, - "[{}] Unexpected error while refreshing schema, skipping", - logPrefix, - error); + currentSchemaRefresh.completeExceptionally(error); } singleThreaded.firstSchemaRefreshFuture.complete(null); }); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/session/DefaultSession.java b/core/src/main/java/com/datastax/oss/driver/internal/core/session/DefaultSession.java index 8f2dbcc59e7..3c7bde85e5b 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/session/DefaultSession.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/session/DefaultSession.java @@ -387,7 +387,19 @@ private void afterInitialNodeListRefresh(CqlIdentifier keyspace) { } } if (needSchemaRefresh) { - metadataManager.refreshSchema(null, false, true); + metadataManager + .refreshSchema(null, false, true) + .whenComplete( + (metadata, error) -> { + if (error != null) { + Loggers.warnWithException( + LOG, + "[{}] Unexpected error while refreshing schema during intialization, " + + "keeping previous version", + logPrefix, + error); + } + }); } metadataManager .firstSchemaRefreshFuture() diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/control/ControlConnectionTestBase.java b/core/src/test/java/com/datastax/oss/driver/internal/core/control/ControlConnectionTestBase.java index a25b7c97f52..7e0ee752d0e 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/control/ControlConnectionTestBase.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/control/ControlConnectionTestBase.java @@ -17,6 +17,8 @@ import static org.assertj.core.api.Assertions.fail; import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyBoolean; +import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.when; @@ -120,6 +122,8 @@ public void setup() { mockQueryPlan(node1, node2); when(metadataManager.refreshNodes()).thenReturn(CompletableFuture.completedFuture(null)); + when(metadataManager.refreshSchema(anyString(), anyBoolean(), anyBoolean())) + .thenReturn(CompletableFuture.completedFuture(null)); when(context.getMetadataManager()).thenReturn(metadataManager); when(context.getConfig()).thenReturn(config); diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/session/DefaultSessionPoolsTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/session/DefaultSessionPoolsTest.java index 838cc12a69c..f80813efe72 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/session/DefaultSessionPoolsTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/session/DefaultSessionPoolsTest.java @@ -132,6 +132,8 @@ public void setup() { when(metadataManager.refreshNodes()).thenReturn(CompletableFuture.completedFuture(null)); when(metadataManager.firstSchemaRefreshFuture()) .thenReturn(CompletableFuture.completedFuture(null)); + when(metadataManager.refreshSchema(null, false, true)) + .thenReturn(CompletableFuture.completedFuture(null)); when(context.getMetadataManager()).thenReturn(metadataManager); when(topologyMonitor.init()).thenReturn(CompletableFuture.completedFuture(null)); From ff0764eab63a4dbf5378e55e22b8f4ee5e16f33e Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 23 Sep 2019 10:13:11 -0700 Subject: [PATCH 092/979] JAVA-2454: Handle empty CQL type while parsing schema --- changelog/README.md | 1 + .../schema/parsing/DataTypeCqlNameParser.java | 4 ++++ .../metadata/schema/parsing/RawColumn.java | 20 ++++++++++--------- 3 files changed, 16 insertions(+), 9 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 11848026b71..8afa95b565e 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.2.1 (in progress) +- [bug] JAVA-2454: Handle "empty" CQL type while parsing schema - [improvement] JAVA-2455: Improve logging of schema refresh errors - [documentation] JAVA-2429: Document expected types on DefaultDriverOption - [documentation] JAVA-2426: Fix month pattern in CqlDuration documentation diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/DataTypeCqlNameParser.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/DataTypeCqlNameParser.java index 21c492a1231..b511c8cadc1 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/DataTypeCqlNameParser.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/DataTypeCqlNameParser.java @@ -64,6 +64,10 @@ private DataType parse( Parser parser = new Parser(toParse, 0); String type = parser.parseTypeName(); + if (type.equalsIgnoreCase(RawColumn.THRIFT_EMPTY_TYPE)) { + return DataTypes.custom(type); + } + DataType nativeType = NATIVE_TYPES_BY_NAME.get(type.toLowerCase()); if (nativeType != null) { return nativeType; diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/RawColumn.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/RawColumn.java index 49f31604cf6..1d221cae20e 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/RawColumn.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/RawColumn.java @@ -41,6 +41,14 @@ public class RawColumn implements Comparable { public static final String KIND_COMPACT_VALUE = "compact_value"; public static final String KIND_STATIC = "static"; + /** + * Upon migration from thrift to CQL, Cassandra internally creates a surrogate column "value" of + * type {@code EmptyType} for dense tables. This resolves into this CQL type name. + * + *

      This column shouldn't be exposed to the user but is currently exposed in system tables. + */ + public static final String THRIFT_EMPTY_TYPE = "empty"; + public final CqlIdentifier name; public String kind; public final int position; @@ -154,7 +162,7 @@ public static List toRawColumns(Collection rows) { /** * Helper method to filter columns while parsing a table's metadata. * - *

      Upon migration from thrift to CQL, we internally create a pair of surrogate + *

      Upon migration from thrift to CQL, Cassandra internally creates a pair of surrogate * clustering/regular columns for compact static tables. These columns shouldn't be exposed to the * user but are currently returned by C*. We also need to remove the static keyword for all other * columns in the table. @@ -177,18 +185,12 @@ public static void pruneStaticCompactTableColumns(List columns) { } } - /** - * Helper method to filter columns while parsing a table's metadata. - * - *

      Upon migration from thrift to CQL, we internally create a surrogate column "value" of type - * EmptyType for dense tables. This column shouldn't be exposed to the user but is currently - * returned by C*. - */ + /** Helper method to filter columns while parsing a table's metadata. */ public static void pruneDenseTableColumnsV3(List columns) { ListIterator iterator = columns.listIterator(); while (iterator.hasNext()) { RawColumn column = iterator.next(); - if (column.kind.equals(KIND_REGULAR) && "empty".equals(column.dataType)) { + if (column.kind.equals(KIND_REGULAR) && THRIFT_EMPTY_TYPE.equals(column.dataType)) { iterator.remove(); } } From b2d5f1460c979c15426221254118d1f163faef0c Mon Sep 17 00:00:00 2001 From: olim7t Date: Tue, 24 Sep 2019 09:48:48 -0700 Subject: [PATCH 093/979] Prepare changelog for release --- changelog/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/changelog/README.md b/changelog/README.md index 8afa95b565e..921ba9f6791 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -2,7 +2,7 @@ -### 4.2.1 (in progress) +### 4.2.1 - [bug] JAVA-2454: Handle "empty" CQL type while parsing schema - [improvement] JAVA-2455: Improve logging of schema refresh errors From 5291c56199144d1c57a8072f8e632ac2cea55941 Mon Sep 17 00:00:00 2001 From: olim7t Date: Tue, 24 Sep 2019 09:51:43 -0700 Subject: [PATCH 094/979] [maven-release-plugin] prepare release 4.2.1 --- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 10 files changed, 11 insertions(+), 11 deletions(-) diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 993f17b7929..a58aee7fe1e 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -22,7 +22,7 @@ com.datastax.oss java-driver-parent - 4.2.1-SNAPSHOT + 4.2.1 java-driver-core-shaded diff --git a/core/pom.xml b/core/pom.xml index d18fd716e9e..6382a5806af 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.2.1-SNAPSHOT + 4.2.1 java-driver-core diff --git a/distribution/pom.xml b/distribution/pom.xml index 4e842cd385b..41434beb0c5 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.2.1-SNAPSHOT + 4.2.1 java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index c51e7d216c8..67f550f4eeb 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.2.1-SNAPSHOT + 4.2.1 java-driver-examples diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index b5f01caae6c..a50150c7222 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.2.1-SNAPSHOT + 4.2.1 java-driver-integration-tests diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 2a6a73f7ed8..773a6373bdc 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.2.1-SNAPSHOT + 4.2.1 java-driver-mapper-processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index ea8526e6d04..a0f9a8b1ecf 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.2.1-SNAPSHOT + 4.2.1 java-driver-mapper-runtime diff --git a/pom.xml b/pom.xml index cc253960fd8..23521a714ec 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ com.datastax.oss java-driver-parent - 4.2.1-SNAPSHOT + 4.2.1 pom DataStax Java driver for Apache Cassandra(R) @@ -697,7 +697,7 @@ limitations under the License.]]> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - 4.2.0 + 4.2.1 diff --git a/query-builder/pom.xml b/query-builder/pom.xml index d6d1754e237..cb25ec7a50c 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.2.1-SNAPSHOT + 4.2.1 java-driver-query-builder diff --git a/test-infra/pom.xml b/test-infra/pom.xml index c063276b1f6..1dcb0dc735a 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.2.1-SNAPSHOT + 4.2.1 java-driver-test-infra From 060af118ae3a41a7df7cc136a8c2d1685f524cca Mon Sep 17 00:00:00 2001 From: olim7t Date: Tue, 24 Sep 2019 09:53:38 -0700 Subject: [PATCH 095/979] [maven-release-plugin] prepare for next development iteration --- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 10 files changed, 11 insertions(+), 11 deletions(-) diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index a58aee7fe1e..6fab61b1d90 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -22,7 +22,7 @@ com.datastax.oss java-driver-parent - 4.2.1 + 4.2.2-SNAPSHOT java-driver-core-shaded diff --git a/core/pom.xml b/core/pom.xml index 6382a5806af..6b90832da81 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.2.1 + 4.2.2-SNAPSHOT java-driver-core diff --git a/distribution/pom.xml b/distribution/pom.xml index 41434beb0c5..2ad184dd3e6 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.2.1 + 4.2.2-SNAPSHOT java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index 67f550f4eeb..128fa81ebcb 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.2.1 + 4.2.2-SNAPSHOT java-driver-examples diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index a50150c7222..e0af9ac8a62 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.2.1 + 4.2.2-SNAPSHOT java-driver-integration-tests diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 773a6373bdc..03f7d48cc17 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.2.1 + 4.2.2-SNAPSHOT java-driver-mapper-processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index a0f9a8b1ecf..0e68830ca8f 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.2.1 + 4.2.2-SNAPSHOT java-driver-mapper-runtime diff --git a/pom.xml b/pom.xml index 23521a714ec..e6540f657b8 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ com.datastax.oss java-driver-parent - 4.2.1 + 4.2.2-SNAPSHOT pom DataStax Java driver for Apache Cassandra(R) @@ -697,7 +697,7 @@ limitations under the License.]]> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - 4.2.1 + 4.2.0 diff --git a/query-builder/pom.xml b/query-builder/pom.xml index cb25ec7a50c..6a3c0664030 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.2.1 + 4.2.2-SNAPSHOT java-driver-query-builder diff --git a/test-infra/pom.xml b/test-infra/pom.xml index 1dcb0dc735a..e8043809721 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.2.1 + 4.2.2-SNAPSHOT java-driver-test-infra From ca0a7c8eb3df73dfe5b5974a80a32033b2b1ae0b Mon Sep 17 00:00:00 2001 From: Tomasz Lelek Date: Thu, 26 Sep 2019 14:21:29 +0200 Subject: [PATCH 096/979] JAVA-2466: Set idempotence to null in BatchStatement.newInstance (#1334) --- changelog/README.md | 1 + .../driver/api/core/cql/BatchStatement.java | 6 ++--- .../core/cql/CqlRequestHandlerRetryTest.java | 26 +++++++++---------- ...equestHandlerSpeculativeExecutionTest.java | 18 ++++++------- .../core/cql/CqlRequestHandlerTestBase.java | 15 +++++++++++ 5 files changed, 41 insertions(+), 25 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index a6079cfdb9d..15cecc666c5 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -5,6 +5,7 @@ ### 4.3.0 (in progress) - [improvement] JAVA-2402: Add setTracing(boolean) to StatementBuilder +- [bug] JAVA-2466: Set idempotence to null in BatchStatement.newInstance ### 4.2.1 diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/cql/BatchStatement.java b/core/src/main/java/com/datastax/oss/driver/api/core/cql/BatchStatement.java index 41e72aa0dfe..81bdb23db47 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/cql/BatchStatement.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/cql/BatchStatement.java @@ -57,7 +57,7 @@ static BatchStatement newInstance(@NonNull BatchType batchType) { null, null, Collections.emptyMap(), - false, + null, false, Long.MIN_VALUE, null, @@ -87,7 +87,7 @@ static BatchStatement newInstance( null, null, Collections.emptyMap(), - false, + null, false, Long.MIN_VALUE, null, @@ -117,7 +117,7 @@ static BatchStatement newInstance( null, null, Collections.emptyMap(), - false, + null, false, Long.MIN_VALUE, null, diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandlerRetryTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandlerRetryTest.java index 0e503a134c8..ef04f814e94 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandlerRetryTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandlerRetryTest.java @@ -34,7 +34,7 @@ import com.datastax.oss.driver.api.core.cql.AsyncResultSet; import com.datastax.oss.driver.api.core.cql.ExecutionInfo; import com.datastax.oss.driver.api.core.cql.Row; -import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.core.cql.Statement; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.metrics.DefaultNodeMetric; import com.datastax.oss.driver.api.core.retry.RetryDecision; @@ -63,7 +63,7 @@ public class CqlRequestHandlerRetryTest extends CqlRequestHandlerTestBase { @Test @UseDataProvider("allIdempotenceConfigs") public void should_always_try_next_node_if_bootstrapping( - boolean defaultIdempotence, SimpleStatement statement) { + boolean defaultIdempotence, Statement statement) { try (RequestHandlerTestHarness harness = RequestHandlerTestHarness.builder() .withDefaultIdempotence(defaultIdempotence) @@ -105,7 +105,7 @@ public void should_always_try_next_node_if_bootstrapping( @Test @UseDataProvider("allIdempotenceConfigs") public void should_always_rethrow_query_validation_error( - boolean defaultIdempotence, SimpleStatement statement) { + boolean defaultIdempotence, Statement statement) { try (RequestHandlerTestHarness harness = RequestHandlerTestHarness.builder() .withDefaultIdempotence(defaultIdempotence) @@ -145,7 +145,7 @@ public void should_always_rethrow_query_validation_error( @Test @UseDataProvider("failureAndIdempotent") public void should_try_next_node_if_idempotent_and_retry_policy_decides_so( - FailureScenario failureScenario, boolean defaultIdempotence, SimpleStatement statement) { + FailureScenario failureScenario, boolean defaultIdempotence, Statement statement) { RequestHandlerTestHarness.Builder harnessBuilder = RequestHandlerTestHarness.builder().withDefaultIdempotence(defaultIdempotence); failureScenario.mockRequestError(harnessBuilder, node1); @@ -196,7 +196,7 @@ public void should_try_next_node_if_idempotent_and_retry_policy_decides_so( @Test @UseDataProvider("failureAndIdempotent") public void should_try_same_node_if_idempotent_and_retry_policy_decides_so( - FailureScenario failureScenario, boolean defaultIdempotence, SimpleStatement statement) { + FailureScenario failureScenario, boolean defaultIdempotence, Statement statement) { RequestHandlerTestHarness.Builder harnessBuilder = RequestHandlerTestHarness.builder().withDefaultIdempotence(defaultIdempotence); failureScenario.mockRequestError(harnessBuilder, node1); @@ -247,7 +247,7 @@ public void should_try_same_node_if_idempotent_and_retry_policy_decides_so( @Test @UseDataProvider("failureAndIdempotent") public void should_ignore_error_if_idempotent_and_retry_policy_decides_so( - FailureScenario failureScenario, boolean defaultIdempotence, SimpleStatement statement) { + FailureScenario failureScenario, boolean defaultIdempotence, Statement statement) { RequestHandlerTestHarness.Builder harnessBuilder = RequestHandlerTestHarness.builder().withDefaultIdempotence(defaultIdempotence); failureScenario.mockRequestError(harnessBuilder, node1); @@ -295,7 +295,7 @@ public void should_ignore_error_if_idempotent_and_retry_policy_decides_so( @Test @UseDataProvider("failureAndIdempotent") public void should_rethrow_error_if_idempotent_and_retry_policy_decides_so( - FailureScenario failureScenario, boolean defaultIdempotence, SimpleStatement statement) { + FailureScenario failureScenario, boolean defaultIdempotence, Statement statement) { RequestHandlerTestHarness.Builder harnessBuilder = RequestHandlerTestHarness.builder().withDefaultIdempotence(defaultIdempotence); failureScenario.mockRequestError(harnessBuilder, node1); @@ -333,7 +333,7 @@ public void should_rethrow_error_if_idempotent_and_retry_policy_decides_so( @Test @UseDataProvider("failureAndNotIdempotent") public void should_rethrow_error_if_not_idempotent_and_error_unsafe_or_policy_rethrows( - FailureScenario failureScenario, boolean defaultIdempotence, SimpleStatement statement) { + FailureScenario failureScenario, boolean defaultIdempotence, Statement statement) { // For two of the possible exceptions, the retry policy is called even if the statement is not // idempotent @@ -428,7 +428,7 @@ public void mockRequestError(RequestHandlerTestHarness.Builder builder, Node nod @Override public void mockRetryPolicyDecision(RetryPolicy policy, RetryDecision decision) { when(policy.onReadTimeout( - any(SimpleStatement.class), + any(Statement.class), eq(DefaultConsistencyLevel.LOCAL_ONE), eq(2), eq(1), @@ -458,7 +458,7 @@ public void mockRequestError(RequestHandlerTestHarness.Builder builder, Node nod @Override public void mockRetryPolicyDecision(RetryPolicy policy, RetryDecision decision) { when(policy.onWriteTimeout( - any(SimpleStatement.class), + any(Statement.class), eq(DefaultConsistencyLevel.LOCAL_ONE), eq(DefaultWriteType.SIMPLE), eq(2), @@ -484,7 +484,7 @@ public void mockRequestError(RequestHandlerTestHarness.Builder builder, Node nod @Override public void mockRetryPolicyDecision(RetryPolicy policy, RetryDecision decision) { when(policy.onUnavailable( - any(SimpleStatement.class), + any(Statement.class), eq(DefaultConsistencyLevel.LOCAL_ONE), eq(2), eq(1), @@ -507,7 +507,7 @@ public void mockRequestError(RequestHandlerTestHarness.Builder builder, Node nod @Override public void mockRetryPolicyDecision(RetryPolicy policy, RetryDecision decision) { - when(policy.onErrorResponse(any(SimpleStatement.class), any(ServerError.class), eq(0))) + when(policy.onErrorResponse(any(Statement.class), any(ServerError.class), eq(0))) .thenReturn(decision); } }, @@ -524,7 +524,7 @@ public void mockRequestError(RequestHandlerTestHarness.Builder builder, Node nod @Override public void mockRetryPolicyDecision(RetryPolicy policy, RetryDecision decision) { when(policy.onRequestAborted( - any(SimpleStatement.class), any(HeartbeatException.class), eq(0))) + any(Statement.class), any(HeartbeatException.class), eq(0))) .thenReturn(decision); } }); diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandlerSpeculativeExecutionTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandlerSpeculativeExecutionTest.java index 2eca70f1dc2..6be5a39d7c1 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandlerSpeculativeExecutionTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandlerSpeculativeExecutionTest.java @@ -28,7 +28,7 @@ import com.datastax.oss.driver.api.core.NoNodeAvailableException; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.cql.AsyncResultSet; -import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.core.cql.Statement; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.metrics.DefaultNodeMetric; import com.datastax.oss.driver.api.core.servererrors.BootstrappingException; @@ -47,7 +47,7 @@ public class CqlRequestHandlerSpeculativeExecutionTest extends CqlRequestHandler @Test @UseDataProvider("nonIdempotentConfig") public void should_not_schedule_speculative_executions_if_not_idempotent( - boolean defaultIdempotence, SimpleStatement statement) { + boolean defaultIdempotence, Statement statement) { RequestHandlerTestHarness.Builder harnessBuilder = RequestHandlerTestHarness.builder().withDefaultIdempotence(defaultIdempotence); PoolBehavior node1Behavior = harnessBuilder.customBehavior(node1); @@ -71,7 +71,7 @@ public void should_not_schedule_speculative_executions_if_not_idempotent( @Test @UseDataProvider("idempotentConfig") public void should_schedule_speculative_executions( - boolean defaultIdempotence, SimpleStatement statement) throws Exception { + boolean defaultIdempotence, Statement statement) throws Exception { RequestHandlerTestHarness.Builder harnessBuilder = RequestHandlerTestHarness.builder().withDefaultIdempotence(defaultIdempotence); PoolBehavior node1Behavior = harnessBuilder.customBehavior(node1); @@ -133,7 +133,7 @@ public void should_schedule_speculative_executions( @Test @UseDataProvider("idempotentConfig") public void should_not_start_execution_if_result_complete( - boolean defaultIdempotence, SimpleStatement statement) throws Exception { + boolean defaultIdempotence, Statement statement) throws Exception { RequestHandlerTestHarness.Builder harnessBuilder = RequestHandlerTestHarness.builder().withDefaultIdempotence(defaultIdempotence); PoolBehavior node1Behavior = harnessBuilder.customBehavior(node1); @@ -191,7 +191,7 @@ public void should_not_start_execution_if_result_complete( @Test @UseDataProvider("idempotentConfig") - public void should_fail_if_no_nodes(boolean defaultIdempotence, SimpleStatement statement) { + public void should_fail_if_no_nodes(boolean defaultIdempotence, Statement statement) { RequestHandlerTestHarness.Builder harnessBuilder = RequestHandlerTestHarness.builder().withDefaultIdempotence(defaultIdempotence); // No configured behaviors => will yield an empty query plan @@ -218,7 +218,7 @@ public void should_fail_if_no_nodes(boolean defaultIdempotence, SimpleStatement @Test @UseDataProvider("idempotentConfig") public void should_fail_if_no_more_nodes_and_initial_execution_is_last( - boolean defaultIdempotence, SimpleStatement statement) throws Exception { + boolean defaultIdempotence, Statement statement) throws Exception { RequestHandlerTestHarness.Builder harnessBuilder = RequestHandlerTestHarness.builder().withDefaultIdempotence(defaultIdempotence); PoolBehavior node1Behavior = harnessBuilder.customBehavior(node1); @@ -272,7 +272,7 @@ public void should_fail_if_no_more_nodes_and_initial_execution_is_last( @Test @UseDataProvider("idempotentConfig") public void should_fail_if_no_more_nodes_and_speculative_execution_is_last( - boolean defaultIdempotence, SimpleStatement statement) throws Exception { + boolean defaultIdempotence, Statement statement) throws Exception { RequestHandlerTestHarness.Builder harnessBuilder = RequestHandlerTestHarness.builder().withDefaultIdempotence(defaultIdempotence); PoolBehavior node1Behavior = harnessBuilder.customBehavior(node1); @@ -326,7 +326,7 @@ public void should_fail_if_no_more_nodes_and_speculative_execution_is_last( @Test @UseDataProvider("idempotentConfig") public void should_retry_in_speculative_executions( - boolean defaultIdempotence, SimpleStatement statement) throws Exception { + boolean defaultIdempotence, Statement statement) throws Exception { RequestHandlerTestHarness.Builder harnessBuilder = RequestHandlerTestHarness.builder().withDefaultIdempotence(defaultIdempotence); PoolBehavior node1Behavior = harnessBuilder.customBehavior(node1); @@ -375,7 +375,7 @@ public void should_retry_in_speculative_executions( @Test @UseDataProvider("idempotentConfig") public void should_stop_retrying_other_executions_if_result_complete( - boolean defaultIdempotence, SimpleStatement statement) throws Exception { + boolean defaultIdempotence, Statement statement) throws Exception { RequestHandlerTestHarness.Builder harnessBuilder = RequestHandlerTestHarness.builder().withDefaultIdempotence(defaultIdempotence); PoolBehavior node1Behavior = harnessBuilder.customBehavior(node1); diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandlerTestBase.java b/core/src/test/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandlerTestBase.java index 54fb1e3a7b3..da7a0f607d4 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandlerTestBase.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandlerTestBase.java @@ -21,6 +21,8 @@ import com.datastax.oss.driver.TestDataProviders; import com.datastax.oss.driver.api.core.DefaultProtocolVersion; +import com.datastax.oss.driver.api.core.cql.BatchStatement; +import com.datastax.oss.driver.api.core.cql.BatchType; import com.datastax.oss.driver.api.core.cql.SimpleStatement; import com.datastax.oss.driver.api.core.metrics.NodeMetric; import com.datastax.oss.driver.internal.core.metadata.DefaultNode; @@ -56,6 +58,13 @@ public abstract class CqlRequestHandlerTestBase { SimpleStatement.builder("mock query").setIdempotence(true).build(); protected static final SimpleStatement NON_IDEMPOTENT_STATEMENT = SimpleStatement.builder("mock query").setIdempotence(false).build(); + protected static final BatchStatement UNDEFINED_IDEMPOTENCE_BATCH_STATEMENT = + BatchStatement.newInstance(BatchType.LOGGED, UNDEFINED_IDEMPOTENCE_STATEMENT); + protected static final BatchStatement IDEMPOTENT_BATCH_STATEMENT = + BatchStatement.newInstance(BatchType.LOGGED, IDEMPOTENT_STATEMENT).setIdempotent(true); + protected static final BatchStatement NON_IDEMPOTENT_BATCH_STATEMENT = + BatchStatement.newInstance(BatchType.LOGGED, NON_IDEMPOTENT_STATEMENT).setIdempotent(false); + protected static final InetSocketAddress ADDRESS1 = new InetSocketAddress("127.0.0.1", 9042); protected static final InetSocketAddress ADDRESS2 = new InetSocketAddress("127.0.0.2", 9042); protected static final InetSocketAddress ADDRESS3 = new InetSocketAddress("127.0.0.3", 9042); @@ -118,6 +127,9 @@ public static Object[][] idempotentConfig() { new Object[] {true, UNDEFINED_IDEMPOTENCE_STATEMENT}, new Object[] {false, IDEMPOTENT_STATEMENT}, new Object[] {true, IDEMPOTENT_STATEMENT}, + new Object[] {true, UNDEFINED_IDEMPOTENCE_BATCH_STATEMENT}, + new Object[] {false, IDEMPOTENT_BATCH_STATEMENT}, + new Object[] {true, IDEMPOTENT_BATCH_STATEMENT}, }; } @@ -131,6 +143,9 @@ public static Object[][] nonIdempotentConfig() { new Object[] {false, UNDEFINED_IDEMPOTENCE_STATEMENT}, new Object[] {true, NON_IDEMPOTENT_STATEMENT}, new Object[] {false, NON_IDEMPOTENT_STATEMENT}, + new Object[] {false, UNDEFINED_IDEMPOTENCE_BATCH_STATEMENT}, + new Object[] {true, NON_IDEMPOTENT_BATCH_STATEMENT}, + new Object[] {false, NON_IDEMPOTENT_BATCH_STATEMENT}, }; } From 3cb761e85e20b0eed813b5f92d44cea3f2d82d19 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Eduard=20Tudenh=C3=B6fner?= Date: Fri, 27 Sep 2019 10:49:03 +0200 Subject: [PATCH 097/979] Introduce shallowEquals() to KeyspaceMetadata (#1335) This would allow subclasses to implement their own version of a shallow comparison --- .../api/core/metadata/schema/KeyspaceMetadata.java | 14 ++++++++++++++ .../metadata/schema/refresh/SchemaRefresh.java | 9 +-------- 2 files changed, 15 insertions(+), 8 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/metadata/schema/KeyspaceMetadata.java b/core/src/main/java/com/datastax/oss/driver/api/core/metadata/schema/KeyspaceMetadata.java index abda435ba02..9b3461f62a8 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/metadata/schema/KeyspaceMetadata.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/metadata/schema/KeyspaceMetadata.java @@ -23,6 +23,7 @@ import com.datastax.oss.driver.shaded.guava.common.collect.Iterables; import edu.umd.cs.findbugs.annotations.NonNull; import java.util.Map; +import java.util.Objects; import java.util.Optional; /** A keyspace in the schema metadata. */ @@ -245,4 +246,17 @@ default String describeWithChildren(boolean pretty) { return builder.build(); } + + default boolean shallowEquals(Object other) { + if (other == this) { + return true; + } else if (other instanceof KeyspaceMetadata) { + KeyspaceMetadata that = (KeyspaceMetadata) other; + return Objects.equals(this.getName(), that.getName()) + && this.isDurableWrites() == that.isDurableWrites() + && Objects.equals(this.getReplication(), that.getReplication()); + } else { + return false; + } + } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/refresh/SchemaRefresh.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/refresh/SchemaRefresh.java index 0838b26e728..16d07e421ff 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/refresh/SchemaRefresh.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/refresh/SchemaRefresh.java @@ -30,7 +30,6 @@ import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import com.datastax.oss.driver.shaded.guava.common.collect.Sets; import java.util.Map; -import java.util.Objects; import java.util.function.BiFunction; import java.util.function.Function; import net.jcip.annotations.ThreadSafe; @@ -62,12 +61,6 @@ public Result compute( oldMetadata.withSchema(this.newKeyspaces, tokenMapEnabled, context), events.build()); } - private static boolean shallowEquals(KeyspaceMetadata keyspace1, KeyspaceMetadata keyspace2) { - return Objects.equals(keyspace1.getName(), keyspace2.getName()) - && keyspace1.isDurableWrites() == keyspace2.isDurableWrites() - && Objects.equals(keyspace1.getReplication(), keyspace2.getReplication()); - } - /** * Computes the exact set of events to emit when a keyspace has changed. * @@ -83,7 +76,7 @@ private void computeEvents( if (oldKeyspace == null) { events.add(KeyspaceChangeEvent.created(newKeyspace)); } else { - if (!shallowEquals(oldKeyspace, newKeyspace)) { + if (!oldKeyspace.shallowEquals(newKeyspace)) { events.add(KeyspaceChangeEvent.updated(oldKeyspace, newKeyspace)); } computeChildEvents(oldKeyspace, newKeyspace, events); From c410e11fffb2f01b816223100c2773fb09791d8a Mon Sep 17 00:00:00 2001 From: olim7t Date: Fri, 30 Aug 2019 15:27:27 -0700 Subject: [PATCH 098/979] JAVA-2416: Update paging section in the manual --- changelog/README.md | 1 + manual/core/paging/README.md | 40 ++++++++++++++++-------------------- 2 files changed, 19 insertions(+), 22 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 15cecc666c5..b4dd8e222e3 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.3.0 (in progress) +- [documentation] JAVA-2416: Update paging section in the manual - [improvement] JAVA-2402: Add setTracing(boolean) to StatementBuilder - [bug] JAVA-2466: Set idempotence to null in BatchStatement.newInstance diff --git a/manual/core/paging/README.md b/manual/core/paging/README.md index a2b15586df7..04ce8029556 100644 --- a/manual/core/paging/README.md +++ b/manual/core/paging/README.md @@ -64,26 +64,6 @@ for (Row row : rs) { |<------------------------ | | ``` -By default, the background fetch happens at the last moment, when there are no more "local" rows -available. If you need finer control, [ResultSet] provides the following methods: - -* `getAvailableWithoutFetching()` and `isFullyFetched()` to check the current state; -* `fetchMoreResults()` to force a page fetch. - -Here's how you could use these methods to pre-fetch the next page in advance, in order to avoid the -performance hit at the end of each page: - -```java -ResultSet rs = session.execute("your query"); -for (Row row : rs) { - // Fetch when there's only half a page left: - if (rs.getAvailableWithoutFetching() == 10 && !rs.isFullyFetched()) { - rs.fetchMoreResults(); // this is asynchronous - } - // Process the row... -} -``` - ### Asynchronous paging @@ -93,8 +73,8 @@ accidentally trigger background synchronous queries (which would defeat the whol or potentially introduce deadlocks). To avoid this problem, the driver's asynchronous API now returns a dedicated [AsyncResultSet]; -iteration only yields the current page, and the next page must be explicitly fetched. Here's how -that translates to our example: +iteration only yields the current page, and the next page must be explicitly fetched. Here's the +idiomatic way to process a result set asynchronously: ```java CompletionStage futureRs = @@ -115,6 +95,7 @@ void processRows(AsyncResultSet rs, Throwable error) { } ``` +See [Asynchronous programming](../async/) for more tips about the async API. ### Saving and reusing the paging state @@ -129,12 +110,21 @@ The driver exposes a *paging state* for that: ResultSet rs = session.execute("your query"); ByteBuffer pagingState = rs.getExecutionInfo().getPagingState(); +// Finish processing the current page +while (rs.getAvailableWithoutFetching() > 0) { + Row row = rs.one(); + // process the row +} + // Later: SimpleStatement statement = SimpleStatement.builder("your query").setPagingState(pagingState).build(); session.execute(statement); ``` +Note the loop to finish the current page after we extract the state. The new statement will start at +the beginning of the next page, so we want to make sure we don't leave a gap of unprocessed rows. + The paging state can only be reused with the exact same statement (same query string, same parameters). It is an opaque value that is only meant to be collected, stored and re-used. If you try to modify its contents or reuse it with a different statement, the results are unpredictable. @@ -177,5 +167,11 @@ think you can get away with the performance hit. We recommend that you: * set a hard limit on the highest possible page number, to prevent malicious clients from triggering queries that would skip a huge amount of rows. + +The [driver examples] include two complete web service implementations demonstrating forward-only +and random (offset-based) paging. + [ResultSet]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/ResultSet.html [AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html + +[driver examples]: https://github.com/datastax/java-driver/tree/4.x/examples/src/main/java/com/datastax/oss/driver/examples/paging From 40f5b592efe50b588997fe3e3b3ccbf9967fb922 Mon Sep 17 00:00:00 2001 From: olim7t Date: Tue, 3 Sep 2019 11:49:01 -0700 Subject: [PATCH 099/979] JAVA-2412: Cover DDL query debouncing in FAQ and upgrade guide --- changelog/README.md | 1 + faq/README.md | 14 +++++++++++++- upgrade_guide/README.md | 3 +++ 3 files changed, 17 insertions(+), 1 deletion(-) diff --git a/changelog/README.md b/changelog/README.md index b4dd8e222e3..9cde3860e7b 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.3.0 (in progress) +- [documentation] JAVA-2412: Cover DDL query debouncing in FAQ and upgrade guide - [documentation] JAVA-2416: Update paging section in the manual - [improvement] JAVA-2402: Add setTracing(boolean) to StatementBuilder - [bug] JAVA-2466: Set idempotence to null in BatchStatement.newInstance diff --git a/faq/README.md b/faq/README.md index fdf42c85723..1b00a971567 100644 --- a/faq/README.md +++ b/faq/README.md @@ -67,4 +67,16 @@ application code. The driver now uses Java 8's improved date and time API. CQL type `timestamp` is mapped to `java.time.Instant`, and the corresponding getter and setter are `getInstant` and `setInstant`. -See [Temporal types](../manual/core/temporal_types/) for more details. \ No newline at end of file +See [Temporal types](../manual/core/temporal_types/) for more details. + +### Why do DDL queries have a higher latency than driver 3? + +If you benchmark DDL queries such as `session.execute("CREATE TABLE ...")`, you will observe a +noticeably higher latency than driver 3 (about 1 second). + +This is because those queries are now *debounced*: the driver adds a short wait in an attempt to +group multiple schema changes into a single metadata refresh. If you want to mitigate this, you can +either adjust the debouncing settings, or group your schema updates while temporarily disabling the +metadata; see the [performance](../manual/core/performance/#debouncing) page. + +This only applies to DDL queries; DML statements (`SELECT`, `INSERT`...) are not debounced. diff --git a/upgrade_guide/README.md b/upgrade_guide/README.md index 56c91124b96..e3d315f9180 100644 --- a/upgrade_guide/README.md +++ b/upgrade_guide/README.md @@ -280,6 +280,9 @@ BoundStatement bs2 = ps2.bind(); assert bs2.getConsistencyLevel() == DefaultConsistencyLevel.TWO; ``` +DDL statements are now debounced; see [Why do DDL queries have a higher latency than driver +3?](../faq/#why-do-ddl-queries-have-a-higher-latency-than-driver-3) in the FAQ. + #### Dual result set APIs In 3.x, both synchronous and asynchronous execution models shared a common result set From 14a063a004c7dfcf3586528881cfbe310e1f936a Mon Sep 17 00:00:00 2001 From: olim7t Date: Tue, 3 Sep 2019 11:50:00 -0700 Subject: [PATCH 100/979] Add links between metadata and performance pages in manual --- manual/core/metadata/README.md | 3 +++ manual/core/metadata/schema/README.md | 6 +++++- manual/core/performance/README.md | 6 +++--- 3 files changed, 11 insertions(+), 4 deletions(-) diff --git a/manual/core/metadata/README.md b/manual/core/metadata/README.md index 470634ad3bc..9465404c0b6 100644 --- a/manual/core/metadata/README.md +++ b/manual/core/metadata/README.md @@ -42,6 +42,9 @@ Set tokenRanges = tokenMap.getTokenRanges(keyspace.getName(), node); This is a big improvement over previous versions of the driver, where it was possible to observe a new keyspace in the schema metadata before the token metadata was updated. +Schema and node state events are debounced. This allows you to control how often the metadata gets +refreshed. See the [Performance](../performance/#debouncing) page for more details. + [Session#getMetadata]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/Session.html#getMetadata-- [Metadata]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/metadata/Metadata.html [Node]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/metadata/Node.html \ No newline at end of file diff --git a/manual/core/metadata/schema/README.md b/manual/core/metadata/schema/README.md index b8a166fa157..1e19381fcb6 100644 --- a/manual/core/metadata/schema/README.md +++ b/manual/core/metadata/schema/README.md @@ -200,12 +200,16 @@ practice anyway: if you're in the middle of a rolling upgrade, you're probably n changes at the same time. -#### Relation to token metadata +### Relation to token metadata Some of the data in the [token map](../token/) relies on keyspace metadata (any method that takes a `CqlIdentifier` argument). If schema metadata is disabled or filtered, token metadata will also be unavailable for the excluded keyspaces. +### Performing schema updates from the client + +If you issue schema-altering requests from the driver (e.g. `session.execute("CREATE TABLE ..")`), +take a look at the [Performance](../../performance/#schema-updates) page for a few tips. [Metadata#getKeyspaces]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/metadata/Metadata.html#getKeyspaces-- [SchemaChangeListener]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListener.html diff --git a/manual/core/performance/README.md b/manual/core/performance/README.md index f07287570c5..08e8980f4b5 100644 --- a/manual/core/performance/README.md +++ b/manual/core/performance/README.md @@ -300,9 +300,9 @@ You should group your schema changes as much as possible. Every change made from a client will be pushed to all other clients, causing them to refresh their metadata. If you have multiple client instances, it might be a good idea to -[deactivate the metadata](../metadata/schema/#enabling-disabling) on other clients while you apply -the updates, and reactivate it at the end. Reactivating will trigger an immediate refresh, so you -can even ramp this up to avoid a "thundering herd" effect. +[deactivate the metadata](../metadata/schema/#enabling-disabling) on all clients while you apply the +updates, and reactivate it at the end (reactivating will trigger an immediate refresh, so you might +want to ramp up clients to avoid a "thundering herd" effect). Schema changes have to replicate to all nodes in the cluster. To minimize the chance of schema disagreement errors: From 3f6320767bf296d31eb8155e04da0a0a6592c29f Mon Sep 17 00:00:00 2001 From: olim7t Date: Wed, 7 Aug 2019 10:30:36 -0700 Subject: [PATCH 101/979] JAVA-2384: Add quick overview section to each manual page --- changelog/README.md | 1 + manual/core/address_resolution/README.md | 14 ++++++- manual/core/async/README.md | 10 +++++ manual/core/authentication/README.md | 14 ++++++- manual/core/compression/README.md | 10 +++++ manual/core/configuration/README.md | 14 +++++-- manual/core/custom_codecs/README.md | 14 +++++++ manual/core/detachable_types/README.md | 7 ++++ manual/core/idempotence/README.md | 13 ++++++- manual/core/integration/README.md | 7 +++- manual/core/load_balancing/README.md | 11 ++++++ manual/core/logging/README.md | 7 ++++ manual/core/metadata/README.md | 11 ++++++ manual/core/metadata/node/README.md | 37 ++++++++++++++++++- manual/core/metadata/schema/README.md | 32 ++++++++++++---- manual/core/metadata/token/README.md | 10 +++++ manual/core/metrics/README.md | 8 ++++ manual/core/native_protocol/README.md | 17 ++++++++- manual/core/paging/README.md | 17 +++++++++ manual/core/pooling/README.md | 14 +++++++ manual/core/query_timestamps/README.md | 14 +++++++ manual/core/reconnection/README.md | 10 +++++ manual/core/request_tracker/README.md | 12 ++++++ manual/core/retries/README.md | 11 ++++++ manual/core/speculative_execution/README.md | 12 ++++++ manual/core/ssl/README.md | 18 ++++++++- manual/core/statements/README.md | 10 +++++ manual/core/statements/batch/README.md | 14 ++++++- .../statements/per_query_keyspace/README.md | 9 +++++ manual/core/statements/prepared/README.md | 26 +++++++++++-- manual/core/statements/simple/README.md | 14 +++++++ manual/core/temporal_types/README.md | 11 ++++++ manual/core/throttling/README.md | 19 +++++++--- manual/core/tracing/README.md | 17 +++++++++ manual/core/tuples/README.md | 12 ++++++ manual/core/udts/README.md | 16 ++++++++ manual/mapper/daos/README.md | 13 +++++++ manual/mapper/entities/README.md | 23 ++++++++++++ manual/mapper/mapper/README.md | 10 +++++ 39 files changed, 507 insertions(+), 32 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 9cde3860e7b..82798b1e4cf 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.3.0 (in progress) +- [documentation] JAVA-2384: Add quick overview section to each manual page - [documentation] JAVA-2412: Cover DDL query debouncing in FAQ and upgrade guide - [documentation] JAVA-2416: Update paging section in the manual - [improvement] JAVA-2402: Add setTracing(boolean) to StatementBuilder diff --git a/manual/core/address_resolution/README.md b/manual/core/address_resolution/README.md index cde5535232a..00c31bec714 100644 --- a/manual/core/address_resolution/README.md +++ b/manual/core/address_resolution/README.md @@ -1,5 +1,16 @@ ## Address resolution +### Quick overview + +The driver uses `system.peers.rpc-address` to connect to newly discovered nodes. For special network +topologies, an address translation component can be plugged in. + +* `advanced.address-translator` in the configuration. +* none by default. Also available: EC2-specific (for deployments that span multiple regions), or + write your own. + +----- + Each node in the Cassandra cluster is uniquely identified by an IP address that the driver will use to establish connections. @@ -96,7 +107,7 @@ to configure your Cassandra nodes to broadcast public RPC addresses. However, this is not always the most cost-effective: if a client and a node are in the same region, it would be cheaper to connect over the private IP. Ideally, you'd want to pick the best address in each case. -The driver provides [Ec2MultiRegionAddressTranslator] which does exactly that. To use it, specify the following in +The driver provides `Ec2MultiRegionAddressTranslator` which does exactly that. To use it, specify the following in the [configuration](../configuration/): ``` @@ -114,7 +125,6 @@ domain name of the target instance. Then it performs a forward DNS lookup of the private/public switch automatically based on location). [AddressTranslator]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/addresstranslation/AddressTranslator.html -[Ec2MultiRegionAddressTranslator]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/internal/core/addresstranslation/Ec2MultiRegionAddressTranslator.html [cassandra.yaml]: https://docs.datastax.com/en/cassandra/3.x/cassandra/configuration/configCassandra_yaml.html [rpc_address]: https://docs.datastax.com/en/cassandra/3.x/cassandra/configuration/configCassandra_yaml.html?scroll=configCassandra_yaml__rpc_address diff --git a/manual/core/async/README.md b/manual/core/async/README.md index 53344a4e087..a2a86b42a57 100644 --- a/manual/core/async/README.md +++ b/manual/core/async/README.md @@ -1,5 +1,15 @@ ## Asynchronous programming +### Quick overview + +Async driver methods return Java 8's [CompletionStage]. + +* don't call synchronous methods from asynchronous callbacks (the driver detects that and throws). +* callbacks execute on I/O threads: consider providing your own executor for expensive computations. +* be careful not to accidentally ignore errors thrown from callbacks. + +----- + The driver exposes an asynchronous API that allows you to write fully non-blocking programs. Asynchronous methods return instances of the JDK's [CompletionStage], that can be conveniently chained and composed. diff --git a/manual/core/authentication/README.md b/manual/core/authentication/README.md index 243aaa03eb7..3b162b901aa 100644 --- a/manual/core/authentication/README.md +++ b/manual/core/authentication/README.md @@ -1,5 +1,15 @@ ## Authentication +### Quick overview + +* `advanced.auth-provider` in the configuration. +* disabled by default. Also available: plain-text credentials, or write your own. +* can also be defined programmatically: + [CqlSession.builder().withAuthCredentials][SessionBuilder.withAuthCredentials] or + [CqlSession.builder().withAuthProvider][SessionBuilder.withAuthProvider]. + +----- + Cassandra's binary protocol supports [SASL]-based authentication. To use it, you must provide an *auth provider* that will authenticate with the server every time a new connection gets established. @@ -68,4 +78,6 @@ acceptable for you, consider writing your own [AuthProvider] implementation [AuthProvider]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/auth/AuthProvider.html [DriverContext]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/context/DriverContext.html -[PlainTextAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderBase.html \ No newline at end of file +[PlainTextAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderBase.html +[SessionBuilder.withAuthCredentials]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthCredentials-java.lang.String-java.lang.String- +[SessionBuilder.withAuthProvider]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthProvider-com.datastax.oss.driver.api.core.auth.AuthProvider- \ No newline at end of file diff --git a/manual/core/compression/README.md b/manual/core/compression/README.md index 25a5c1d4c91..d71cb130fa7 100644 --- a/manual/core/compression/README.md +++ b/manual/core/compression/README.md @@ -1,5 +1,15 @@ ## Compression +### Quick overview + +Compress request and response bodies to save bandwidth. + +* `advanced.protocol.compression` in the configuration. +* disabled by default. Also available: LZ4, Snappy. +* your application **must** re-declare an explicit dependency to the compression library. + +----- + Cassandra's binary protocol supports optional compression of requests and responses. This reduces network traffic at the cost of a slight CPU overhead, therefore it will likely be beneficial when you have larger payloads, such as: diff --git a/manual/core/configuration/README.md b/manual/core/configuration/README.md index 5838360f89f..983c6c4f2ac 100644 --- a/manual/core/configuration/README.md +++ b/manual/core/configuration/README.md @@ -1,13 +1,19 @@ ## Configuration +### Quick overview + The driver's configuration is composed of options, organized in a hierarchical manner. Optionally, it can define *profiles* that customize a set of options for a particular kind of request. -The default implementation is based on the Typesafe Config framework. It can be completely -overridden if needed. - -For a complete list of built-in options, see the [reference configuration][reference.conf]. +* the default implementation is based on the Typesafe Config framework: + * the driver JAR comes with a [reference.conf] file that defines the defaults. + * you can add an `application.conf` file in the classpath (or an absolute path, or an URL). It + only needs to contain the options that you override. + * hot reloading is supported out of the box. +* the config mechanism can be completely overridden by implementing a set of driver interfaces + ([DriverConfig], [DriverExecutionProfile] and [DriverConfigLoader]) +----- ### Concepts diff --git a/manual/core/custom_codecs/README.md b/manual/core/custom_codecs/README.md index 5fcbca3f997..d65fba6334d 100644 --- a/manual/core/custom_codecs/README.md +++ b/manual/core/custom_codecs/README.md @@ -1,5 +1,18 @@ ## Custom codecs +### Quick overview + +Define custom Java to CQL mappings. + +* implement the [TypeCodec] interface. +* registering a codec: + * at init time: [CqlSession.builder().addTypeCodecs()][SessionBuilder.addTypeCodecs] +* using a codec: + * if already registered: `row.get("columnName", MyCustomType.class)` + * otherwise: `row.get("columnName", myCodec)` + +----- + Out of the box, the driver comes with [default CQL to Java mappings](../#cql-to-java-type-mapping). For example, if you read a CQL `text` column, it is mapped to its natural counterpart `java.lang.String`: @@ -226,3 +239,4 @@ private static String formatRow(Row row) { [CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html [GenericType]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/type/reflect/GenericType.html [TypeCodec]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html +[SessionBuilder.addTypeCodecs]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addTypeCodecs-com.datastax.oss.driver.api.core.type.codec.TypeCodec...- \ No newline at end of file diff --git a/manual/core/detachable_types/README.md b/manual/core/detachable_types/README.md index 63f32507819..241846f50ab 100644 --- a/manual/core/detachable_types/README.md +++ b/manual/core/detachable_types/README.md @@ -1,5 +1,12 @@ ## Detachable types +### Quick overview + +Advanced topic, only needed if you use Java serialization with driver rows or data types, or create +tuple or UDT types manually. + +----- + Some driver components need to keep an internal reference to their originating [Session]. Under specific circumstances, they can lose that reference, and you might need to reattach them. diff --git a/manual/core/idempotence/README.md b/manual/core/idempotence/README.md index 59d45d5113f..a0fe424558b 100644 --- a/manual/core/idempotence/README.md +++ b/manual/core/idempotence/README.md @@ -1,8 +1,16 @@ ## Query idempotence +### Quick overview + A request is *idempotent* if executing it multiple times leaves the database in the same state as executing it only once. +* `basic.request.default-idempotence` in the configuration (defaults to false). +* can be overridden per statement [Statement.setIdempotent] or [StatementBuilder.setIdempotence]. +* retries and speculative executions only happen for idempotent statements. + +----- + For example: * `update my_table set list_col = [1] where pk = 1` is idempotent: no matter how many times it gets @@ -37,7 +45,7 @@ SimpleStatement statement = ``` If you don't, they default to the value defined in the [configuration](../configuration/) by the -`request.default-idempotence` option; out of the box, it is set to `false`. +`basic.request.default-idempotence` option; out of the box, it is set to `false`. When you prepare a statement, its idempotence carries over to bound statements: @@ -51,3 +59,6 @@ assert bs.isIdempotent(); The query builder tries to infer idempotence automatically; refer to [its manual](../../query_builder/idempotence/) for more details. + +[Statement.setIdempotent]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/Statement.html#setIdempotent-java.lang.Boolean- +[StatementBuilder.setIdempotence]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/StatementBuilder.html#setIdempotence-java.lang.Boolean- diff --git a/manual/core/integration/README.md b/manual/core/integration/README.md index 1dc77bb5d3c..42e9ab4f63b 100644 --- a/manual/core/integration/README.md +++ b/manual/core/integration/README.md @@ -1,6 +1,11 @@ ## Integration -This page contains various information on how to integrate the driver in your application. +### Quick overview + +* sample project structures for Maven and Gradle. +* explanations about driver dependencies and when they can be manually excluded. + +----- ### Minimal project structure diff --git a/manual/core/load_balancing/README.md b/manual/core/load_balancing/README.md index c660a9d6e65..de4511fb8bc 100644 --- a/manual/core/load_balancing/README.md +++ b/manual/core/load_balancing/README.md @@ -1,5 +1,16 @@ ## Load balancing +### Quick overview + +Which nodes the driver talks to, and in which order they are tried. + +* `basic.load-balancing-policy` in the configuration. +* defaults to `DefaultLoadBalancingPolicy` (opinionated best practices). No out-of-the-box + alternatives, but you can write your own. +* can have per-profile policies. + +----- + A Cassandra cluster is typically composed of multiple nodes; the *load balancing policy* (sometimes abbreviated LBP) is a central component that determines: diff --git a/manual/core/logging/README.md b/manual/core/logging/README.md index 5d190373ec5..39f2c7a320a 100644 --- a/manual/core/logging/README.md +++ b/manual/core/logging/README.md @@ -1,5 +1,12 @@ ## Logging +### Quick overview + +* based on SLF4J. +* config file examples for Logback and Log4J. + +----- + The driver uses [SLF4J] as a logging facade. This allows you to plug in your preferred logging framework (java.util.logging, logback, log4j...) at deployment time. diff --git a/manual/core/metadata/README.md b/manual/core/metadata/README.md index 9465404c0b6..45ceb71229c 100644 --- a/manual/core/metadata/README.md +++ b/manual/core/metadata/README.md @@ -1,5 +1,16 @@ ## Metadata +### Quick overview + +[session.getMetadata()][Session#getMetadata]: node states, schema and token map. + +* immutable, provides a consistent view at a given point in time (e.g. token map always matches + schema). +* pitfall: holding onto a stale instance; must call `session.getMetadata()` again to observe + changes. + +----- + The driver exposes metadata about the Cassandra cluster via the [Session#getMetadata] method. It returns a [Metadata] object, which contains three types of information: diff --git a/manual/core/metadata/node/README.md b/manual/core/metadata/node/README.md index a4a93cd2039..9b43bd2963b 100644 --- a/manual/core/metadata/node/README.md +++ b/manual/core/metadata/node/README.md @@ -1,5 +1,16 @@ ## Node metadata +### Quick overview + +[session.getMetadata().getNodes()][Metadata#getNodes]: all nodes known to the driver (even if not +actively connected). + +* [Node] instances are mutable, the fields will update in real time. +* getting notifications: + [CqlSession.builder().withNodeStateListener][SessionBuilder.withNodeStateListener]. + +----- + [Metadata#getNodes] returns all the nodes known to the driver when the metadata was retrieved; this includes down and ignored nodes (see below), so the fact that a node is in this list does not necessarily mean that the driver is connected to it. @@ -37,7 +48,26 @@ client. [Node#getDistance()] is set by the load balancing policy. The driver does not connect to `IGNORED` nodes. The exact definition of `LOCAL` and `REMOTE` is left to the interpretation of each policy, but in general it represents the proximity to the client, and `LOCAL` nodes will be prioritized as -coordinators. They also influence pooling options. +coordinators. They also influence pooling options. + +If you need to follow node state changes, you don't need to poll the metadata manually; instead, +you can register a listener to get notified when changes occur: + +```java +NodeStateListener listener = + new NodeStateListenerBase() { + @Override + public void onUp(@NonNull Node node) { + System.out.printf("%s went UP%n", node); + } + }; +CqlSession session = CqlSession.builder() + .withNodeStateListener(listener) + .build(); +``` + +See [NodeStateListener] for the list of available methods. [NodeStateListenerBase] is a +convenience implementation with empty methods, for when you only need to override a few of them. ### Advanced topics @@ -81,4 +111,7 @@ the source code. [Node#getDistance()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/metadata/Node.html#getDistance-- [Node#getOpenConnections()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- [Node#isReconnecting()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/metadata/Node.html#isReconnecting-- -[NodeState]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/metadata/NodeState.html \ No newline at end of file +[NodeState]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/metadata/NodeState.html +[NodeStateListener]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/metadata/NodeStateListener.html +[NodeStateListenerBase]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/metadata/NodeStateListenerBase.html +[SessionBuilder.withNodeStateListener]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withNodeStateListener-com.datastax.oss.driver.api.core.metadata.NodeStateListener- \ No newline at end of file diff --git a/manual/core/metadata/schema/README.md b/manual/core/metadata/schema/README.md index 1e19381fcb6..7697ab17be4 100644 --- a/manual/core/metadata/schema/README.md +++ b/manual/core/metadata/schema/README.md @@ -1,5 +1,20 @@ ## Schema metadata +### Quick overview + +[session.getMetadata().getKeyspaces()][Metadata#getKeyspaces] + +* immutable (must invoke again to observe changes). +* getting notifications: + [CqlSession.builder().withSchemaChangeListener][SessionBuilder#withSchemaChangeListener]. +* enabling/disabling: `advanced.metadata.schema.enabled` in the configuration, or + [session.setSchemaMetadataEnabled()][Session#setSchemaMetadataEnabled]. +* filtering: `advanced.metadata.schema.refreshed-keyspaces` in the configuration. +* schema agreement: wait for the schema to replicate to all nodes (may add latency to DDL + statements). + +----- + [Metadata#getKeyspaces] returns a client-side representation of the database schema: ```java @@ -27,13 +42,15 @@ you can register a listener to get notified when changes occur: ```java SchemaChangeListener listener = - new SchemaChangeListenerBase() { - @Override - public void onTableCreated(TableMetadata table) { - System.out.println("New table: " + table.getName().asCql(true)); - } - }; -session.register(listener); + new SchemaChangeListenerBase() { + @Override + public void onTableCreated(TableMetadata table) { + System.out.println("New table: " + table.getName().asCql(true)); + } + }; +CqlSession session = CqlSession.builder() + .withSchemaChangeListener(listener) + .build(); session.execute("CREATE TABLE test.foo (k int PRIMARY KEY)"); ``` @@ -216,6 +233,7 @@ take a look at the [Performance](../../performance/#schema-updates) page for a f [SchemaChangeListenerBase]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListenerBase.html [Session#setSchemaMetadataEnabled]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/Session.html#setSchemaMetadataEnabled-java.lang.Boolean- [Session#checkSchemaAgreementAsync]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/Session.html#checkSchemaAgreementAsync-- +[SessionBuilder#withSchemaChangeListener]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSchemaChangeListener-com.datastax.oss.driver.api.core.metadata.schema.SchemaChangeListener- [ExecutionInfo#isSchemaInAgreement]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#isSchemaInAgreement-- [JAVA-750]: https://datastax-oss.atlassian.net/browse/JAVA-750 \ No newline at end of file diff --git a/manual/core/metadata/token/README.md b/manual/core/metadata/token/README.md index f2827ee9f43..2481472e7bd 100644 --- a/manual/core/metadata/token/README.md +++ b/manual/core/metadata/token/README.md @@ -1,5 +1,15 @@ ## Token metadata +### Quick overview + +[session.getMetadata().getTokenMap()][Metadata#getTokenMap] + +* used for token-aware routing or analytics clients. +* immutable (must invoke again to observe changes). +* `advanced.metadata.token-map.enabled` in the configuration (defaults to true). + +----- + [Metadata#getTokenMap] returns information about the tokens used for data replication. It is used internally by the driver to send requests to the optimal coordinator when token-aware routing is enabled. Another typical use case is data analytics clients, for example fetching a large range of diff --git a/manual/core/metrics/README.md b/manual/core/metrics/README.md index 468ea436bbe..73ebbb73263 100644 --- a/manual/core/metrics/README.md +++ b/manual/core/metrics/README.md @@ -1,5 +1,13 @@ ## Metrics +### Quick overview + +* `advanced.metrics` in the configuration. All disabled by default, can be selected individually. +* some metrics are per node, others global to the session, or both. +* unlike driver 3, JMX is not provided out of the box. You need to add the dependency manually. + +----- + The driver exposes measurements of its internal behavior through the popular [Dropwizard Metrics] library. Application developers can select which metrics are enabled, and export them to a monitoring tool. diff --git a/manual/core/native_protocol/README.md b/manual/core/native_protocol/README.md index ae948cdc9af..01bcf7e261d 100644 --- a/manual/core/native_protocol/README.md +++ b/manual/core/native_protocol/README.md @@ -1,5 +1,19 @@ ## Native protocol +### Quick overview + +Low-level binary format. Mostly irrelevant for everyday use, only governs whether certain features +are available. + +* setting the version: + * automatically negotiated during the connection (improved algorithm in driver 4, no longer an + issue in mixed clusters). + * or force with `advanced.protocol.version` in the configuration. +* reading the version: + [session.getContext().getProtocolVersion()][AttachmentPoint.getProtocolVersion]. + +----- + The native protocol defines the format of the binary messages exchanged between the driver and Cassandra over TCP. As a driver user, you don't need to know the fine details (although the [protocol spec] is available if you're curious); the most visible aspect is that some features are @@ -107,4 +121,5 @@ If you want to see the details of mixed cluster negotiation, enable `DEBUG` leve [driver3]: https://docs.datastax.com/en/developer/java-driver/3.5/manual/native_protocol/ [ExecutionInfo.getWarnings]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getWarnings-- -[Request.getCustomPayload]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/Request.html#getCustomPayload-- \ No newline at end of file +[Request.getCustomPayload]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/Request.html#getCustomPayload-- +[AttachmentPoint.getProtocolVersion]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/detach/AttachmentPoint.html#getProtocolVersion-- \ No newline at end of file diff --git a/manual/core/paging/README.md b/manual/core/paging/README.md index 04ce8029556..c8cf5c53d03 100644 --- a/manual/core/paging/README.md +++ b/manual/core/paging/README.md @@ -1,5 +1,20 @@ ## Paging +### Quick overview + +How the server splits large result sets into multiple network responses. + +* `basic.request.page-size` in the configuration. +* transparent in the synchronous API (`session.execute`): the driver fetches new pages in the + background as you iterate. +* explicit in the asynchronous API (`session.executeAsync`): + [AsyncResultSet.hasMorePages()][AsyncPagingIterable.hasMorePages] and + [AsyncResultSet.fetchNextPage()][AsyncPagingIterable.fetchNextPage]. +* paging state: record the current position and reuse it later (forward only). +* offset queries: not supported natively, but can be emulated client-side. + +----- + When a query returns many rows, it would be inefficient to return them as a single response message. Instead, the driver breaks the results into *pages* which get returned as they are needed. @@ -173,5 +188,7 @@ and random (offset-based) paging. [ResultSet]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/ResultSet.html [AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[AsyncPagingIterable.hasMorePages]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/AsyncPagingIterable.html#hasMorePages-- +[AsyncPagingIterable.fetchNextPage]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/AsyncPagingIterable.html#fetchNextPage-- [driver examples]: https://github.com/datastax/java-driver/tree/4.x/examples/src/main/java/com/datastax/oss/driver/examples/paging diff --git a/manual/core/pooling/README.md b/manual/core/pooling/README.md index e840c6c185a..4db5d9afe87 100644 --- a/manual/core/pooling/README.md +++ b/manual/core/pooling/README.md @@ -1,5 +1,19 @@ ## Connection pooling +### Quick overview + +One connection pool per node. **Many concurrent requests** per connection (don't tune like a JDBC +pool). + +* `advanced.connection` in the configuration: `max-requests-per-connection`, `pool.local.size`, + `pool.remote.size`. +* metrics (per node): `pool.open-connections`, `pool.in-flight`, `pool.available-streams`, + `pool.orphaned-streams`. +* heartbeat: driver-level keepalive, prevents idle connections from being dropped; + `advanced.heartbeat` in the configuration. + +----- + ### Basics The driver communicates with Cassandra over TCP, using the Cassandra binary protocol. This protocol diff --git a/manual/core/query_timestamps/README.md b/manual/core/query_timestamps/README.md index 8e203c45f17..12439dba4e8 100644 --- a/manual/core/query_timestamps/README.md +++ b/manual/core/query_timestamps/README.md @@ -1,5 +1,18 @@ ## Query timestamps +### Quick overview + +Defines the order in which mutations are applied on the server. Ways to set it (by order of +precedence, higher priority first): + +* `USING TIMESTAMP` in the query string. +* programmatically with [Statement.setQueryTimestamp()]. +* timestamp generator: `advanced.timestamp-generator` in the configuration. Defaults to session-wide + monotonic, also available: per-thread monotonic, server-side, or write your own. +* if the generator didn't set it, assigned server-side. + +----- + In Cassandra, each mutation has a microsecond-precision timestamp, which is used to order operations relative to each other. @@ -179,3 +192,4 @@ Here is the order of precedence of all the methods described so far: [gettimeofday]: http://man7.org/linux/man-pages/man2/settimeofday.2.html [JNR]: https://github.com/jnr/jnr-ffi [Lightweight transactions]: https://docs.datastax.com/en/dse/6.0/cql/cql/cql_using/useInsertLWT.html +[Statement.setQueryTimestamp()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/Statement.html#setQueryTimestamp-long- \ No newline at end of file diff --git a/manual/core/reconnection/README.md b/manual/core/reconnection/README.md index 59e93480255..1ac677a6ea8 100644 --- a/manual/core/reconnection/README.md +++ b/manual/core/reconnection/README.md @@ -1,5 +1,15 @@ ## Reconnection +### Quick overview + +When a connection is lost, try to reestablish it at configured intervals. + +* `advanced.reconnection-policy` in the configuration; defaults to exponential backoff, also + available: constant delay, write your own. +* applies to connection pools and the control connection. + +----- + If the driver loses a connection to a node, it tries to re-establish it according to a configurable policy. This is used in two places: diff --git a/manual/core/request_tracker/README.md b/manual/core/request_tracker/README.md index 50e4be58698..23b29c154a5 100644 --- a/manual/core/request_tracker/README.md +++ b/manual/core/request_tracker/README.md @@ -1,5 +1,16 @@ ## Request tracker +### Quick overview + +Callback that gets invoked for every request: success or error, globally and for every tried node. + +* `advanced.request-tracker` in the configuration; defaults to none, also available: request logger, + or write your own. +* or programmatically: + [CqlSession.builder().withRequestTracker()][SessionBuilder.withRequestTracker]. + +----- + The request tracker is a session-wide component that gets notified of the latency and outcome of every application request. The driver comes with an optional implementation that logs requests. @@ -107,3 +118,4 @@ com.datastax.oss.driver.api.core.servererrors.InvalidQueryException: Undefined c ``` [RequestTracker]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/tracker/RequestTracker.html +[SessionBuilder.withRequestTracker]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withRequestTracker-com.datastax.oss.driver.api.core.tracker.RequestTracker- \ No newline at end of file diff --git a/manual/core/retries/README.md b/manual/core/retries/README.md index 60042f5e72d..b8d3d034df0 100644 --- a/manual/core/retries/README.md +++ b/manual/core/retries/README.md @@ -1,5 +1,16 @@ ## Retries +### Quick overview + +What to do when a request failed on a node: retry (same or other node), rethrow, or ignore. + +* `advanced.retry-policy` in the configuration. Default policy retries at most once, in cases that + have a high chance of success; you can also write your own. +* can have per-profile policies. +* only kicks in if the query is idempotent. + +----- + When a query fails, it sometimes makes sense to retry it: the error might be temporary, or the query might work on a different node. The driver uses a *retry policy* to determine when and how to retry. It is defined in the [configuration](../configuration/): diff --git a/manual/core/speculative_execution/README.md b/manual/core/speculative_execution/README.md index d23cb8b423b..c9382f96e7d 100644 --- a/manual/core/speculative_execution/README.md +++ b/manual/core/speculative_execution/README.md @@ -1,5 +1,17 @@ ## Speculative query execution +### Quick overview + +Pre-emptively query another node if the current one takes too long to respond. + +* `advanced.speculative-execution-policy` in the configuration. +* disabled by default. Also available: constant delay, or write your own policy. +* can have per-profile policies. +* only kicks in if the query is idempotent. +* creates more traffic: tune your pool and provision your cluster accordingly. + +----- + Sometimes a Cassandra node might be experiencing difficulties (ex: long GC pause) and take longer than usual to reply. Queries sent to that node will experience bad latency. diff --git a/manual/core/ssl/README.md b/manual/core/ssl/README.md index 88c34aa3a94..1525f1975d0 100644 --- a/manual/core/ssl/README.md +++ b/manual/core/ssl/README.md @@ -1,6 +1,18 @@ ## SSL -You can secure traffic between the driver and Cassandra with SSL. There are two aspects to that: +### Quick overview + +Secure the traffic between the driver and Cassandra. + +* `advanced.ssl-engine-factory` in the configuration; defaults to none, also available: JSSE, or + write your own. +* or programmatically: + [CqlSession.builder().withSslEngineFactory()][SessionBuilder.withSslEngineFactory] or + [CqlSession.builder().withSslContext()][SessionBuilder.withSslContext]. + +----- + +There are two aspects to SSL: * **client-to-node encryption**, where the traffic is encrypted, and the client verifies the identity of the Cassandra nodes it connects to; @@ -189,4 +201,6 @@ Note that this approach relies on the driver's [internal API](../../api_conventi [dsClientToNode]: https://docs.datastax.com/en/cassandra/3.0/cassandra/configuration/secureSSLClientToNode.html [pickle]: http://thelastpickle.com/blog/2015/09/30/hardening-cassandra-step-by-step-part-1-server-to-server.html -[JSSE system properties]: http://docs.oracle.com/javase/6/docs/technotes/guides/security/jsse/JSSERefGuide.html#Customization \ No newline at end of file +[JSSE system properties]: http://docs.oracle.com/javase/6/docs/technotes/guides/security/jsse/JSSERefGuide.html#Customization +[SessionBuilder.withSslEngineFactory]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSslEngineFactory-com.datastax.oss.driver.api.core.ssl.SslEngineFactory- +[SessionBuilder.withSslContext]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSslContext-javax.net.ssl.SSLContext- diff --git a/manual/core/statements/README.md b/manual/core/statements/README.md index 0544d4d16b0..9f55eddd59e 100644 --- a/manual/core/statements/README.md +++ b/manual/core/statements/README.md @@ -1,5 +1,15 @@ ## Statements +### Quick overview + +What you pass to `session.execute()`. + +* three types: simple (textual query), bound (prepared) and batch. +* built-in implementations are **immutable**. Setters always return a new object, don't ignore the + result. + +----- + To execute a CQL query, you create a [Statement] instance and pass it to [Session#execute][execute] or [Session#executeAsync][executeAsync]. The driver provides various implementations: diff --git a/manual/core/statements/batch/README.md b/manual/core/statements/batch/README.md index f082e984058..60208588a38 100644 --- a/manual/core/statements/batch/README.md +++ b/manual/core/statements/batch/README.md @@ -1,5 +1,15 @@ ## Batch statements +### Quick overview + +Group a set of statements into an atomic operation. + +* create with [BatchStatement.newInstance()] or [BatchStatement.builder()]. +* built-in implementation is **immutable**. Setters always return a new object, don't ignore the + result. + +----- + Use [BatchStatement] to execute a set of queries as an atomic operation (refer to [Batching inserts, updates and deletes][batch_dse] to understand how to use batching effectively): @@ -52,5 +62,7 @@ due to a [protocol limitation][CASSANDRA-10246] that will be fixed in a future v to execute such a batch, an `IllegalArgumentException` is thrown. [BatchStatement]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/BatchStatement.html +[BatchStatement.newInstance()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/BatchStatement.html#newInstance-com.datastax.oss.driver.api.core.cql.BatchType- +[BatchStatement.builder()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/BatchStatement.html#builder-com.datastax.oss.driver.api.core.cql.BatchType- [batch_dse]: http://docs.datastax.com/en/dse/6.7/cql/cql/cql_using/useBatch.html -[CASSANDRA-10246]: https://issues.apache.org/jira/browse/CASSANDRA-10246 \ No newline at end of file +[CASSANDRA-10246]: https://issues.apache.org/jira/browse/CASSANDRA-10246 diff --git a/manual/core/statements/per_query_keyspace/README.md b/manual/core/statements/per_query_keyspace/README.md index 66f1c7e3be6..c1ffa2582ef 100644 --- a/manual/core/statements/per_query_keyspace/README.md +++ b/manual/core/statements/per_query_keyspace/README.md @@ -1,5 +1,14 @@ ## Per-query keyspace +### Quick overview + +Specify the keyspace separately instead of hardcoding it in the query string. + +* Cassandra 4+ / DSE 6+. +* only works with simple statements. + +----- + Sometimes it is convenient to send the keyspace separately from the query string, and without switching the whole session to that keyspace either. For example, you might have a multi-tenant setup where identical requests are executed against different keyspaces. diff --git a/manual/core/statements/prepared/README.md b/manual/core/statements/prepared/README.md index 7026c7c4824..5de7a732bac 100644 --- a/manual/core/statements/prepared/README.md +++ b/manual/core/statements/prepared/README.md @@ -1,5 +1,21 @@ ## Prepared statements +### Quick overview + +Prepare a query string once, reuse with different values. More efficient than simple statements for +queries that are used often. + +* create the prepared statement with `session.prepare()`, call [bind()][PreparedStatement.bind] or + [boundStatementBuilder()][PreparedStatement.boundStatementBuilder] on it to create executable + statements. +* the session has a built-in cache, it's OK to prepare the same string twice. +* values: `?` or `:name`, fill with `setXxx(int, ...)` or `setXxx(String, ...)` respectively. +* some values can be left unset with Cassandra 2.2+ / DSE 5+. +* built-in implementation is **immutable**. Setters always return a new object, don't ignore the + result. + +----- + Use prepared statements for queries that are executed multiple times in your application: ```java @@ -181,9 +197,9 @@ parameters. #### Unset values With [native protocol](../../native_protocol/) V3, all variables must be bound. With native protocol -V4 or above, variables can be left unset, in which case they will be ignored (no tombstones will be -generated). If you're reusing a bound statement, you can use the `unset` method to unset variables -that were previously set: +V4 (Cassandra 2.2 / DSE 5) or above, variables can be left unset, in which case they will be ignored +(no tombstones will be generated). If you're reusing a bound statement, you can use the `unset` +method to unset variables that were previously set: ```java BoundStatement bound = ps1.bind() @@ -316,4 +332,6 @@ observe the new columns in the result set. [Session.prepare]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/CqlSession.html#prepare-com.datastax.oss.driver.api.core.cql.SimpleStatement- [CASSANDRA-10786]: https://issues.apache.org/jira/browse/CASSANDRA-10786 [CASSANDRA-10813]: https://issues.apache.org/jira/browse/CASSANDRA-10813 -[guava eviction]: https://github.com/google/guava/wiki/CachesExplained#reference-based-eviction \ No newline at end of file +[guava eviction]: https://github.com/google/guava/wiki/CachesExplained#reference-based-eviction +[PreparedStatement.bind]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/PreparedStatement.html#bind-java.lang.Object...- +[PreparedStatement.boundStatementBuilder]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/PreparedStatement.html#boundStatementBuilder-java.lang.Object...- diff --git a/manual/core/statements/simple/README.md b/manual/core/statements/simple/README.md index 6e84dc7e33e..7ca92408891 100644 --- a/manual/core/statements/simple/README.md +++ b/manual/core/statements/simple/README.md @@ -1,5 +1,17 @@ ## Simple statements +### Quick overview + +For one-off executions of a raw query string. + +* create with [SimpleStatement.newInstance()] or [SimpleStatement.builder()]. +* values: `?` or `:name`, fill with `setPositionalValues()` or `setNamedValues()` respectively. + Driver has to guess target CQL types, this can lead to ambiguities. +* built-in implementation is **immutable**. Setters always return a new object, don't ignore the + result. + +----- + Use [SimpleStatement] for queries that will be executed only once (or just a few times): ```java @@ -171,3 +183,5 @@ Or you could also use [prepared statements](../prepared/), which don't have this parameter types are known in advance. [SimpleStatement]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/SimpleStatement.html +[SimpleStatement.newInstance()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/SimpleStatement.html#newInstance-java.lang.String- +[SimpleStatement.builder()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/SimpleStatement.html#builder-java.lang.String- diff --git a/manual/core/temporal_types/README.md b/manual/core/temporal_types/README.md index 386b3e6a4e5..af8e95bb7c0 100644 --- a/manual/core/temporal_types/README.md +++ b/manual/core/temporal_types/README.md @@ -1,8 +1,19 @@ ## Temporal types +### Quick overview + This page provides more details about the various CQL time types, and the Java types they are mapped to in the driver. +| CQL | Java | | +|---|---|---| +|`date` | `java.time.LocalDate` || +|`time` | `java.time.LocalTime` || +|`timestamp` | `java.time.Instant` | No time zone. Use `Instant.atZone` or register [TypeCodecs.ZONED_TIMESTAMP_SYSTEM], [TypeCodecs.ZONED_TIMESTAMP_UTC] or [TypeCodecs.zonedTimestampAt()] | +|`duration` | [CqlDuration] | Custom driver type; can't be accurately represented by any of the `java.time` types. | + +----- + ### Date and time CQL types `date` and `time` map directly to `java.time.LocalDate` and `java.time.LocalTime`. diff --git a/manual/core/throttling/README.md b/manual/core/throttling/README.md index 1d628bd746e..f776cb46bdd 100644 --- a/manual/core/throttling/README.md +++ b/manual/core/throttling/README.md @@ -1,5 +1,16 @@ ## Request throttling +### Quick overview + +Limit session throughput. + +* `advanced.throttler` in the configuration; defaults to pass-through (no throttling), also + available: concurrency-based (max simultaneous requests), rate-based (max requests per time unit), + or write your own. +* metrics: `throttling.delay`, `throttling.queue-size`, `throttling.errors`. + +----- + Throttling allows you to limit how many requests a session can execute concurrently. This is useful if you have multiple applications connecting to the same Cassandra cluster, and want to enforce some kind of SLA to ensure fair resource allocation. @@ -47,9 +58,7 @@ This is a no-op implementation: requests are simply allowed to proceed all the t Note that you will still hit a limit if all your connections run out of stream ids. In that case, requests will fail with an [AllNodesFailedException], with the `getErrors()` method returning a -[BusyConnectionException] for each node. - - +[BusyConnectionException] for each node. See the [connection pooling](../pooling/) page. #### Concurrency-based @@ -76,9 +85,7 @@ function of the number of connected nodes and the `connection.pool.*.size` and `connection.max-requests-per-connection` configuration options. Keep in mind that aggressive speculative executions and timeout options can inflate stream id consumption, so keep a safety margin. One good way to get this right is to track the `pool.available-streams` [metric](../metrics) -on every node, and make sure it never reaches 0. - - +on every node, and make sure it never reaches 0. See the [connection pooling](../pooling/) page. #### Rate-based diff --git a/manual/core/tracing/README.md b/manual/core/tracing/README.md index 1de5f559df9..6466e177276 100644 --- a/manual/core/tracing/README.md +++ b/manual/core/tracing/README.md @@ -1,5 +1,18 @@ ## Query tracing +### Quick overview + +Detailed information about the server-side internals for a given query. + +* disabled by default, must enable per statement with [Statement.setTracing()] or + [StatementBuilder.setTracing()]. +* retrieve with [ResultSet.getExecutionInfo().getTracingId()][ExecutionInfo.getTracingId()] and + [getQueryTrace()][ExecutionInfo.getQueryTrace()]. +* `advanced.request.trace` in the configuration: fine-grained control over how the driver fetches + the trace data. + +----- + To help troubleshooting performance, Cassandra offers the ability to *trace* a query, in other words capture detailed information about the the internal operations performed by all nodes in the cluster in order to build the response. @@ -102,3 +115,7 @@ thrown. [ExecutionInfo]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html [QueryTrace]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/QueryTrace.html +[Statement.setTracing()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/Statement.html#setTracing-boolean- +[StatementBuilder.setTracing()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/StatementBuilder.html#setTracing-- +[ExecutionInfo.getTracingId()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getTracingId-- +[ExecutionInfo.getQueryTrace()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getQueryTrace-- diff --git a/manual/core/tuples/README.md b/manual/core/tuples/README.md index 94f38dd8858..e5cc5564fdc 100644 --- a/manual/core/tuples/README.md +++ b/manual/core/tuples/README.md @@ -1,5 +1,17 @@ ## Tuples +### Quick overview + +Ordered set of anonymous, typed fields, e.g. `tuple`, `(1, 'a', 1.0)`. + +* `row.getTupleValue()` / `boundStatement.setTupleValue()`. +* positional getters and setters: `tupleValue.getInt(0)`, `tupleValue.setString(1, "a")`... +* getting hold of the [TupleType]: statement or session metadata, `tupleValue.getType()`, or + `DataTypes.tupleOf()`. +* creating a value from a type: `tupleType.newValue()`. + +----- + [CQL tuples][cql_doc] are ordered sets of anonymous, typed fields. They can be used as a column type in tables, or a field type in [user-defined types](../udts/): diff --git a/manual/core/udts/README.md b/manual/core/udts/README.md index e0d7e60ea1b..26a2b38b661 100644 --- a/manual/core/udts/README.md +++ b/manual/core/udts/README.md @@ -1,5 +1,21 @@ ## User-defined types +### Quick overview + +Ordered set of named, typed fields, e.g. `{ street: '1 Main St', zip: 12345}`. + +* `row.getUdtValue()` / `boundStatement.setUdtValue()`. +* positional or named getters and setters: `udtValue.getString("street")`, + `udtValue.setInt(1, 12345)`... +* getting hold of the [UserDefinedType]: + * statement or session metadata, or `udtValue.getType()`. + * `UserDefinedTypeBuilder` (not recommended, dangerous if you build a type that doesn't match the + database schema). +* creating a value from a type: `userDefinedType.newValue()`. + +----- + + [CQL user-defined types][cql_doc] are ordered sets of named, typed fields. They must be defined in a keyspace: diff --git a/manual/mapper/daos/README.md b/manual/mapper/daos/README.md index 5714d010b6c..213dfdce0eb 100644 --- a/manual/mapper/daos/README.md +++ b/manual/mapper/daos/README.md @@ -1,5 +1,17 @@ ## DAOs +### Quick overview + +Interface annotated with [@Dao]. + +* interface-level annotations: + * [@DefaultNullSavingStrategy] + * [@HierarchyScanStrategy] +* method-level annotations: query methods (see child pages). +* instantiated from a [@DaoFactory] method on the mapper. + +----- + A DAO is an interface that defines a set of query methods. In general, those queries will relate to the same [entity](../entities/) (although that is not a requirement). @@ -136,6 +148,7 @@ In this case, any annotations declared in `Dao1` would be chosen over `Dao2`. To control how the hierarchy is scanned, annotate interfaces with [@HierarchyScanStrategy]. [@Dao]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/Dao.html +[@DaoFactory]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.html [@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html [@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html [Entity Inheritance]: ../entities/#inheritance diff --git a/manual/mapper/entities/README.md b/manual/mapper/entities/README.md index 8582bc05351..15be2c1f346 100644 --- a/manual/mapper/entities/README.md +++ b/manual/mapper/entities/README.md @@ -1,5 +1,23 @@ ## Entities +### Quick overview + +POJO annotated with [@Entity], must expose a no-arg constructor. + +* class-level annotations: + * [@NamingStrategy] + * [@CqlName] + * [@HierarchyScanStrategy] +* field/method-level annotations: + * [@PartitionKey], [@ClusteringColumn] + * [@Computed] + * [@Transient] + * [@CqlName] +* can inherit annotated fields/methods and [@NamingStrategy]. Only use [@Entity] on concrete + classes. + +----- + An entity is a Java class that will be mapped to a Cassandra table or [UDT](../../core/udts). Entities are used as arguments or return types of [DAO](../daos/) methods; they can also be nested inside other entities (to map UDT columns). @@ -251,6 +269,11 @@ i.e.: private transient int notAColumn; ``` +#### Custom column name + +Override the CQL name manually with [@CqlName], see [User-provided names](#user-provided-names) +above. + ### Default keyspace You can specify a default keyspace to use when doing operations on a given entity: diff --git a/manual/mapper/mapper/README.md b/manual/mapper/mapper/README.md index c9552590e2a..f230dff9b88 100644 --- a/manual/mapper/mapper/README.md +++ b/manual/mapper/mapper/README.md @@ -1,5 +1,15 @@ ## Mapper interface +### Quick overview + +Interface annotated with [@Mapper], entry point to mapper features. + +* a corresponding builder gets generated (default: `[YourInterfacesName]Builder`). +* defines [@DaoFactory] methods that provide DAO instances. They can be parameterized by keyspace + and/or table. + +----- + The mapper interface is the top-level entry point to mapping features. It wraps a core driver session, and acts as a factory of [DAO](../daos/) objects that will be used to execute requests. From 7a27ab1f99e62b9ca61ae0c7a48968b131446b59 Mon Sep 17 00:00:00 2001 From: olim7t Date: Thu, 5 Sep 2019 11:43:09 -0700 Subject: [PATCH 102/979] JAVA-2196: Convert TableOptionsIT into a unit test Table options vary across server versions, it's hard to write robust integration tests. TableOptionsIT only covered two basic text options added in Cassandra 4 (JAVA-2090), this can be addressed equally well by a unit test. --- .../schema/parsing/TableParserTest.java | 20 +++++ .../driver/core/metadata/TableOptionsIT.java | 77 ------------------- 2 files changed, 20 insertions(+), 77 deletions(-) delete mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/TableOptionsIT.java diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/TableParserTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/TableParserTest.java index e361fb8a39d..3fab5fc11b1 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/TableParserTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/TableParserTest.java @@ -16,6 +16,7 @@ package com.datastax.oss.driver.internal.core.metadata.schema.parsing; import static com.datastax.oss.driver.Assertions.assertThat; +import static org.mockito.Mockito.when; import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.metadata.schema.ClusteringOrder; @@ -24,6 +25,7 @@ import com.datastax.oss.driver.api.core.metadata.schema.IndexMetadata; import com.datastax.oss.driver.api.core.metadata.schema.TableMetadata; import com.datastax.oss.driver.api.core.type.DataTypes; +import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; import com.datastax.oss.driver.internal.core.adminrequest.AdminRow; import com.datastax.oss.driver.internal.core.metadata.schema.queries.CassandraSchemaRows; import com.datastax.oss.driver.internal.core.metadata.schema.queries.SchemaRows; @@ -114,6 +116,24 @@ public void should_parse_modern_tables() { .containsEntry("rows_per_partition", "NONE"); } + /** Covers two additional Cassandra 4.0 options added in JAVA-2090. */ + @Test + public void should_parse_read_repair_and_additional_write_policy() { + AdminRow tableRow40 = mockModernTableRow("ks", "foo"); + when(tableRow40.get("read_repair", TypeCodecs.TEXT)).thenReturn("NONE"); + when(tableRow40.get("additional_write_policy", TypeCodecs.TEXT)).thenReturn("40p"); + + SchemaRows rows = modernRows(tableRow40, COLUMN_ROWS_3_0, INDEX_ROWS_3_0); + TableParser parser = new TableParser(rows, context); + TableMetadata table = parser.parseTable(tableRow40, KEYSPACE_ID, Collections.emptyMap()); + + checkTable(table); + + assertThat(table.getOptions()) + .containsEntry(CqlIdentifier.fromInternal("read_repair"), "NONE") + .containsEntry(CqlIdentifier.fromInternal("additional_write_policy"), "40p"); + } + // Shared between 2.2 and 3.0 tests, all expected values are the same except the 'caching' option private void checkTable(TableMetadata table) { assertThat(table.getKeyspace().asInternal()).isEqualTo("ks"); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/TableOptionsIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/TableOptionsIT.java deleted file mode 100644 index e26d3661bf4..00000000000 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/TableOptionsIT.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.datastax.oss.driver.core.metadata; - -import static org.assertj.core.api.Assertions.assertThat; - -import com.datastax.oss.driver.api.core.CqlIdentifier; -import com.datastax.oss.driver.api.core.CqlSession; -import com.datastax.oss.driver.api.core.config.DefaultDriverOption; -import com.datastax.oss.driver.api.core.metadata.schema.TableMetadata; -import com.datastax.oss.driver.api.testinfra.CassandraRequirement; -import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; -import com.datastax.oss.driver.api.testinfra.session.SessionRule; -import com.datastax.oss.driver.api.testinfra.session.SessionUtils; -import com.datastax.oss.driver.categories.ParallelizableTests; -import java.time.Duration; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.RuleChain; -import org.junit.rules.TestRule; - -@Category(ParallelizableTests.class) -public class TableOptionsIT { - private static final CqlIdentifier READ_REPAIR_KEY = CqlIdentifier.fromCql("read_repair"); - private static final CqlIdentifier ADDITIONAL_WRITE_POLICY_KEY = - CqlIdentifier.fromCql("additional_write_policy"); - - private static CcmRule ccmRule = CcmRule.getInstance(); - // disable debouncer to speed up test. - private static SessionRule sessionRule = - SessionRule.builder(ccmRule) - .withConfigLoader( - SessionUtils.configLoaderBuilder() - .withDuration(DefaultDriverOption.REQUEST_TIMEOUT, Duration.ofSeconds(30)) - .withDuration(DefaultDriverOption.METADATA_SCHEMA_WINDOW, Duration.ofSeconds(0)) - .build()) - .build(); - - @Rule public TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); - - @Test - @CassandraRequirement(min = "4.0", description = "This test covers Cassandra 4+ features") - public void should_handle_cassandra4_table_options() { - CqlSession session = sessionRule.session(); - - // A simple table with read_repair and additional_write_policy options - session.execute( - "CREATE TABLE foo(k int, a text, PRIMARY KEY(k)) " - + "WITH read_repair='NONE' AND additional_write_policy='40p'"); - - TableMetadata fooMetadata = - session - .getMetadata() - .getKeyspace(sessionRule.keyspace()) - .orElseThrow(AssertionError::new) - .getTable("foo") - .orElseThrow(AssertionError::new); - - assertThat(fooMetadata.getOptions()) - .containsEntry(READ_REPAIR_KEY, "NONE") - .containsEntry(ADDITIONAL_WRITE_POLICY_KEY, "40p"); - } -} From 797c608c54896fdc6a58dd4dbc9409e7ffc9e449 Mon Sep 17 00:00:00 2001 From: olim7t Date: Wed, 4 Sep 2019 12:55:04 -0700 Subject: [PATCH 103/979] JAVA-2419: Allow registration of user codecs at runtime --- changelog/README.md | 1 + .../api/core/detach/AttachmentPoint.java | 7 ++ .../type/codec/registry/CodecRegistry.java | 17 +++- .../codec/registry/MutableCodecRegistry.java | 58 +++++++++++++ .../core/context/DefaultDriverContext.java | 6 +- .../codec/registry/CachingCodecRegistry.java | 81 +++++++++++++++++-- .../registry/CodecRegistryConstants.java | 2 +- .../codec/registry/DefaultCodecRegistry.java | 35 ++++---- .../core/cql/RequestHandlerTestHarness.java | 4 +- .../schema/parsing/AggregateParserTest.java | 4 +- .../schema/parsing/SchemaParserTest.java | 4 +- .../registry/CachingCodecRegistryTest.java | 80 +++++++++++++++--- .../type/codec/registry/CodecRegistryIT.java | 38 +++++++++ manual/core/custom_codecs/README.md | 19 +++++ .../driver/api/querybuilder/CharsetCodec.java | 10 ++- 15 files changed, 323 insertions(+), 43 deletions(-) create mode 100644 core/src/main/java/com/datastax/oss/driver/api/core/type/codec/registry/MutableCodecRegistry.java diff --git a/changelog/README.md b/changelog/README.md index 82798b1e4cf..59059a92a4e 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.3.0 (in progress) +- [improvement] JAVA-2419: Allow registration of user codecs at runtime - [documentation] JAVA-2384: Add quick overview section to each manual page - [documentation] JAVA-2412: Cover DDL query debouncing in FAQ and upgrade guide - [documentation] JAVA-2416: Update paging section in the manual diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/detach/AttachmentPoint.java b/core/src/main/java/com/datastax/oss/driver/api/core/detach/AttachmentPoint.java index 930a72a35fd..a8151da3079 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/detach/AttachmentPoint.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/detach/AttachmentPoint.java @@ -17,6 +17,7 @@ import com.datastax.oss.driver.api.core.ProtocolVersion; import com.datastax.oss.driver.api.core.type.codec.registry.CodecRegistry; +import com.datastax.oss.driver.api.core.type.codec.registry.MutableCodecRegistry; import edu.umd.cs.findbugs.annotations.NonNull; /** @see Detachable */ @@ -39,6 +40,12 @@ public CodecRegistry getCodecRegistry() { @NonNull ProtocolVersion getProtocolVersion(); + /** + * Note that the default registry implementation returned by the driver also implements {@link + * MutableCodecRegistry}, which allows you to register new codecs at runtime. You can safely cast + * the result of this method (as long as you didn't extend the driver context to plug a custom + * registry implementation). + */ @NonNull CodecRegistry getCodecRegistry(); } diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.java b/core/src/main/java/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.java index 246bd3eeba1..9eddfe6bb3a 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.java @@ -40,13 +40,28 @@ * *

      They may also provide additional mappings to other Java types (for use with methods such as * {@link Row#get(int, Class)}, {@link TupleValue#set(int, Object, Class)}, etc.) + * + *

      The default implementation returned by the driver also implements {@link + * MutableCodecRegistry}, and we strongly recommend that custom implementations do as well. The two + * interfaces are only separate for backward compatibility, because mutability was introduced in + * 4.3.0. */ public interface CodecRegistry { /** * An immutable instance, that only handles built-in driver types (that is, primitive types, and * collections, tuples, and user defined types thereof). + * + *

      Note that, due to implementation details, this instance is a {@link MutableCodecRegistry}, + * but any attempt to {@linkplain MutableCodecRegistry#register(TypeCodec) register new codecs} + * will throw {@link UnsupportedOperationException}. */ - CodecRegistry DEFAULT = new DefaultCodecRegistry("default"); + CodecRegistry DEFAULT = + new DefaultCodecRegistry("default") { + @Override + public void register(TypeCodec newCodec) { + throw new UnsupportedOperationException("CodecRegistry.DEFAULT is immutable"); + } + }; /** * Returns a codec to handle the conversion between the given types. diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/type/codec/registry/MutableCodecRegistry.java b/core/src/main/java/com/datastax/oss/driver/api/core/type/codec/registry/MutableCodecRegistry.java new file mode 100644 index 00000000000..67d79cc138f --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/api/core/type/codec/registry/MutableCodecRegistry.java @@ -0,0 +1,58 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.core.type.codec.registry; + +import com.datastax.oss.driver.api.core.type.codec.TypeCodec; + +/** + * A codec registry that can be extended with new user codecs at runtime. + * + *

      This interface only exists to preserve backward compatibility. In practice, the default {@link + * CodecRegistry} implementation returned by the driver implements this interface, so it can safely + * be cast. + * + *

      However {@link CodecRegistry#DEFAULT} is immutable. It implements this interface, but {@link + * #register(TypeCodec)} throws an {@link UnsupportedOperationException}. + * + * @since 4.3.0 + */ +public interface MutableCodecRegistry extends CodecRegistry { + + /** + * Adds the given codec to the registry. + * + *

      This method will log a warning and ignore the codec if it collides with one already present + * in the registry. Note that the driver's built-in implementation uses internal synchronization + * to guarantee that two threads cannot register colliding codecs concurrently; registration is + * not expected to happen in a very concurrent manner, so this should not pose a performance + * issue. + */ + void register(TypeCodec codec); + + /** Invokes {@link #register(TypeCodec)} for every codec in the given list. */ + default void register(TypeCodec... codecs) { + for (TypeCodec codec : codecs) { + register(codec); + } + } + + /** Invokes {@link #register(TypeCodec)} for every codec in the given list. */ + default void register(Iterable> codecs) { + for (TypeCodec codec : codecs) { + register(codec); + } + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java index e4c2df05461..f948110ab52 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java @@ -36,6 +36,7 @@ import com.datastax.oss.driver.api.core.tracker.RequestTracker; import com.datastax.oss.driver.api.core.type.codec.TypeCodec; import com.datastax.oss.driver.api.core.type.codec.registry.CodecRegistry; +import com.datastax.oss.driver.api.core.type.codec.registry.MutableCodecRegistry; import com.datastax.oss.driver.internal.core.CassandraProtocolVersionRegistry; import com.datastax.oss.driver.internal.core.ConsistencyLevelRegistry; import com.datastax.oss.driver.internal.core.DefaultConsistencyLevelRegistry; @@ -436,8 +437,9 @@ protected RequestProcessorRegistry buildRequestProcessorRegistry() { } protected CodecRegistry buildCodecRegistry(String logPrefix, List> codecs) { - TypeCodec[] array = new TypeCodec[codecs.size()]; - return new DefaultCodecRegistry(logPrefix, codecs.toArray(array)); + MutableCodecRegistry registry = new DefaultCodecRegistry(logPrefix); + registry.register(codecs); + return registry; } protected SchemaQueriesFactory buildSchemaQueriesFactory() { diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistry.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistry.java index d0e22888717..049e79869e5 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistry.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistry.java @@ -27,7 +27,7 @@ import com.datastax.oss.driver.api.core.type.codec.CodecNotFoundException; import com.datastax.oss.driver.api.core.type.codec.TypeCodec; import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; -import com.datastax.oss.driver.api.core.type.codec.registry.CodecRegistry; +import com.datastax.oss.driver.api.core.type.codec.registry.MutableCodecRegistry; import com.datastax.oss.driver.api.core.type.reflect.GenericType; import com.datastax.oss.driver.shaded.guava.common.base.Preconditions; import com.datastax.oss.driver.shaded.guava.common.reflect.TypeToken; @@ -41,6 +41,9 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.locks.Lock; +import java.util.concurrent.locks.ReentrantLock; import net.jcip.annotations.ThreadSafe; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -57,7 +60,7 @@ * implement {@link #getCachedCodec(DataType, GenericType, boolean)}. */ @ThreadSafe -public abstract class CachingCodecRegistry implements CodecRegistry { +public abstract class CachingCodecRegistry implements MutableCodecRegistry { private static final Logger LOG = LoggerFactory.getLogger(CachingCodecRegistry.class); @@ -68,19 +71,83 @@ public abstract class CachingCodecRegistry implements CodecRegistry { protected final String logPrefix; private final TypeCodec[] primitiveCodecs; - private final TypeCodec[] userCodecs; + private final CopyOnWriteArrayList> userCodecs = new CopyOnWriteArrayList<>(); private final IntMap primitiveCodecsByCode; + private final Lock registerLock = new ReentrantLock(); protected CachingCodecRegistry( - @NonNull String logPrefix, - @NonNull TypeCodec[] primitiveCodecs, - @NonNull TypeCodec[] userCodecs) { + @NonNull String logPrefix, @NonNull TypeCodec[] primitiveCodecs) { this.logPrefix = logPrefix; this.primitiveCodecs = primitiveCodecs; - this.userCodecs = userCodecs; this.primitiveCodecsByCode = sortByProtocolCode(primitiveCodecs); } + /** + * @deprecated this constructor calls an overridable method ({@link #register(TypeCodec[])}), + * which is a bad practice. The recommended alternative is to use {@link + * #CachingCodecRegistry(String, TypeCodec[])}, then add the codecs with one of the {@link + * #register} methods. + */ + @Deprecated + protected CachingCodecRegistry( + @NonNull String logPrefix, + @NonNull TypeCodec[] primitiveCodecs, + @NonNull TypeCodec[] userCodecs) { + this(logPrefix, primitiveCodecs); + register(userCodecs); + } + + @Override + public void register(TypeCodec newCodec) { + // This method could work without synchronization, but there is a tiny race condition that would + // allow two threads to register colliding codecs (the last added codec would later be ignored, + // but without any warning). Serialize calls to avoid that: + registerLock.lock(); + try { + for (TypeCodec primitiveCodec : primitiveCodecs) { + if (collides(newCodec, primitiveCodec)) { + LOG.warn( + "[{}] Ignoring codec {} because it collides with built-in primitive codec {}", + logPrefix, + newCodec, + primitiveCodec); + return; + } + } + for (TypeCodec userCodec : userCodecs) { + if (collides(newCodec, userCodec)) { + LOG.warn( + "[{}] Ignoring codec {} because it collides with previously registered codec {}", + logPrefix, + newCodec, + userCodec); + return; + } + } + // Technically this would cover the two previous cases as well, but we want precise messages. + try { + TypeCodec cachedCodec = + getCachedCodec(newCodec.getCqlType(), newCodec.getJavaType(), false); + LOG.warn( + "[{}] Ignoring codec {} because it collides with previously generated codec {}", + logPrefix, + newCodec, + cachedCodec); + return; + } catch (CodecNotFoundException ignored) { + // Catching the exception is ugly, but it avoids breaking the internal API (e.g. by adding a + // getCachedCodecIfExists) + } + userCodecs.add(newCodec); + } finally { + registerLock.unlock(); + } + } + + private boolean collides(TypeCodec newCodec, TypeCodec oldCodec) { + return oldCodec.accepts(newCodec.getCqlType()) && oldCodec.accepts(newCodec.getJavaType()); + } + /** * Gets a complex codec from the cache. * diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/registry/CodecRegistryConstants.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/registry/CodecRegistryConstants.java index d52f79cc9f4..075b72d6cbf 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/registry/CodecRegistryConstants.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/registry/CodecRegistryConstants.java @@ -28,7 +28,7 @@ public class CodecRegistryConstants { * *

      This is exposed in case you want to call {@link * DefaultCodecRegistry#DefaultCodecRegistry(String, int, BiFunction, int, BiConsumer, - * TypeCodec[], TypeCodec[])} but only customize the caching options. + * TypeCodec[])} but only customize the caching options. */ public static final TypeCodec[] PRIMITIVE_CODECS = new TypeCodec[] { diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/registry/DefaultCodecRegistry.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/registry/DefaultCodecRegistry.java index 26a93837026..126059c4c7a 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/registry/DefaultCodecRegistry.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/registry/DefaultCodecRegistry.java @@ -48,7 +48,24 @@ public class DefaultCodecRegistry extends CachingCodecRegistry { private final LoadingCache> cache; /** - * Creates a new instance, with some amount of control over the cache behavior. + * Creates a new instance that accepts user codecs, with the default built-in codecs and the + * default cache behavior. + */ + public DefaultCodecRegistry(@NonNull String logPrefix) { + this(logPrefix, CodecRegistryConstants.PRIMITIVE_CODECS); + } + + /** + * Creates a new instance that accepts user codecs, with the given built-in codecs and the default + * cache behavior. + */ + public DefaultCodecRegistry(@NonNull String logPrefix, @NonNull TypeCodec... primitiveCodecs) { + this(logPrefix, 0, null, 0, null, primitiveCodecs); + } + + /** + * Same as {@link #DefaultCodecRegistry(String, TypeCodec[])}, but with some amount of control + * over cache behavior. * *

      Giving full access to the Guava cache API would be too much work, since it is shaded and we * have to wrap everything. If you need something that's not available here, it's easy enough to @@ -61,10 +78,9 @@ public DefaultCodecRegistry( @Nullable BiFunction, Integer> cacheWeigher, int maximumCacheWeight, @Nullable BiConsumer> cacheRemovalListener, - @NonNull TypeCodec[] primitiveCodecs, - @NonNull TypeCodec[] userCodecs) { + @NonNull TypeCodec... primitiveCodecs) { - super(logPrefix, primitiveCodecs, userCodecs); + super(logPrefix, primitiveCodecs); CacheBuilder cacheBuilder = CacheBuilder.newBuilder(); if (initialCacheCapacity > 0) { cacheBuilder.initialCapacity(initialCacheCapacity); @@ -93,17 +109,6 @@ public TypeCodec load(@NonNull CacheKey key) throws Exception { } } - public DefaultCodecRegistry(@NonNull String logPrefix, @NonNull TypeCodec... userCodecs) { - this(logPrefix, CodecRegistryConstants.PRIMITIVE_CODECS, userCodecs); - } - - public DefaultCodecRegistry( - @NonNull String logPrefix, - @NonNull TypeCodec[] primitiveCodecs, - @NonNull TypeCodec... userCodecs) { - this(logPrefix, 0, null, 0, null, primitiveCodecs, userCodecs); - } - @Override protected TypeCodec getCachedCodec( @Nullable DataType cqlType, @Nullable GenericType javaType, boolean isJavaCovariant) { diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/cql/RequestHandlerTestHarness.java b/core/src/test/java/com/datastax/oss/driver/internal/core/cql/RequestHandlerTestHarness.java index dfe90e9b8d8..bbc5ba381ee 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/cql/RequestHandlerTestHarness.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/cql/RequestHandlerTestHarness.java @@ -34,6 +34,7 @@ import com.datastax.oss.driver.api.core.session.Session; import com.datastax.oss.driver.api.core.specex.SpeculativeExecutionPolicy; import com.datastax.oss.driver.api.core.time.TimestampGenerator; +import com.datastax.oss.driver.api.core.type.codec.registry.CodecRegistry; import com.datastax.oss.driver.internal.core.DefaultConsistencyLevelRegistry; import com.datastax.oss.driver.internal.core.ProtocolFeature; import com.datastax.oss.driver.internal.core.ProtocolVersionRegistry; @@ -48,7 +49,6 @@ import com.datastax.oss.driver.internal.core.session.DefaultSession; import com.datastax.oss.driver.internal.core.session.throttling.PassThroughRequestThrottler; import com.datastax.oss.driver.internal.core.tracker.NoopRequestTracker; -import com.datastax.oss.driver.internal.core.type.codec.registry.DefaultCodecRegistry; import com.datastax.oss.driver.internal.core.util.concurrent.CapturingTimer; import com.datastax.oss.driver.internal.core.util.concurrent.CapturingTimer.CapturedTimeout; import com.datastax.oss.protocol.internal.Frame; @@ -128,7 +128,7 @@ protected RequestHandlerTestHarness(Builder builder) { .thenReturn(-1L); when(context.getSpeculativeExecutionPolicy(anyString())).thenReturn(speculativeExecutionPolicy); - when(context.getCodecRegistry()).thenReturn(new DefaultCodecRegistry("test")); + when(context.getCodecRegistry()).thenReturn(CodecRegistry.DEFAULT); when(timestampGenerator.next()).thenReturn(Long.MIN_VALUE); when(context.getTimestampGenerator()).thenReturn(timestampGenerator); diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/AggregateParserTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/AggregateParserTest.java index 14dfe6bfb4e..ee26b25c95d 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/AggregateParserTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/AggregateParserTest.java @@ -22,8 +22,8 @@ import com.datastax.oss.driver.api.core.metadata.schema.AggregateMetadata; import com.datastax.oss.driver.api.core.metadata.schema.FunctionSignature; import com.datastax.oss.driver.api.core.type.DataTypes; +import com.datastax.oss.driver.api.core.type.codec.registry.CodecRegistry; import com.datastax.oss.driver.internal.core.adminrequest.AdminRow; -import com.datastax.oss.driver.internal.core.type.codec.registry.DefaultCodecRegistry; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import com.datastax.oss.protocol.internal.util.Bytes; import java.util.Collections; @@ -57,7 +57,7 @@ public class AggregateParserTest extends SchemaParserTestBase { @Before public void setup() { - when(context.getCodecRegistry()).thenReturn(new DefaultCodecRegistry("test")); + when(context.getCodecRegistry()).thenReturn(CodecRegistry.DEFAULT); when(context.getProtocolVersion()).thenReturn(ProtocolVersion.DEFAULT); } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/SchemaParserTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/SchemaParserTest.java index 7e030628bf4..7109910705f 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/SchemaParserTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/SchemaParserTest.java @@ -22,11 +22,11 @@ import com.datastax.oss.driver.api.core.metadata.schema.FunctionSignature; import com.datastax.oss.driver.api.core.metadata.schema.KeyspaceMetadata; import com.datastax.oss.driver.api.core.type.DataTypes; +import com.datastax.oss.driver.api.core.type.codec.registry.CodecRegistry; import com.datastax.oss.driver.internal.core.metadata.MetadataRefresh; import com.datastax.oss.driver.internal.core.metadata.schema.queries.CassandraSchemaRows; import com.datastax.oss.driver.internal.core.metadata.schema.queries.SchemaRows; import com.datastax.oss.driver.internal.core.metadata.schema.refresh.SchemaRefresh; -import com.datastax.oss.driver.internal.core.type.codec.registry.DefaultCodecRegistry; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import java.util.Map; import java.util.function.Consumer; @@ -59,7 +59,7 @@ public void should_parse_legacy_keyspace_row() { @Test public void should_parse_keyspace_with_all_children() { // Needed to parse the aggregate - when(context.getCodecRegistry()).thenReturn(new DefaultCodecRegistry("test")); + when(context.getCodecRegistry()).thenReturn(CodecRegistry.DEFAULT); SchemaRefresh refresh = (SchemaRefresh) diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistryTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistryTest.java index 271bb74a2be..e5ef922ea19 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistryTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistryTest.java @@ -19,6 +19,7 @@ import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.assertj.core.api.Assertions.fail; import static org.mockito.Mockito.inOrder; +import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyZeroInteractions; import com.datastax.oss.driver.api.core.CqlIdentifier; @@ -40,6 +41,7 @@ import com.datastax.oss.driver.api.core.type.reflect.GenericType; import com.datastax.oss.driver.internal.core.type.UserDefinedTypeBuilder; import com.datastax.oss.driver.internal.core.type.codec.CqlIntToStringCodec; +import com.datastax.oss.driver.internal.core.type.codec.IntCodec; import com.datastax.oss.driver.internal.core.type.codec.ListCodec; import com.datastax.oss.driver.internal.core.type.codec.registry.CachingCodecRegistryTest.TestCachingCodecRegistry.MockCache; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; @@ -173,8 +175,9 @@ public void should_find_user_codec_for_built_in_java_type() { CqlIntToStringCodec intToStringCodec1 = new CqlIntToStringCodec(); // register a second codec to also check that the first one is preferred CqlIntToStringCodec intToStringCodec2 = new CqlIntToStringCodec(); - TestCachingCodecRegistry registry = - new TestCachingCodecRegistry(mockCache, intToStringCodec1, intToStringCodec2); + TestCachingCodecRegistry registry = new TestCachingCodecRegistry(mockCache); + registry.register(intToStringCodec1, intToStringCodec2); + verify(mockCache).lookup(DataTypes.INT, GenericType.STRING, false); // When the mapping is not ambiguous, the user type should be returned assertThat(registry.codecFor(DataTypes.INT, GenericType.STRING)).isSameAs(intToStringCodec1); @@ -192,8 +195,9 @@ public void should_find_user_codec_for_built_in_java_type() { public void should_find_user_codec_for_custom_java_type() { TextToPeriodCodec textToPeriodCodec1 = new TextToPeriodCodec(); TextToPeriodCodec textToPeriodCodec2 = new TextToPeriodCodec(); - TestCachingCodecRegistry registry = - new TestCachingCodecRegistry(mockCache, textToPeriodCodec1, textToPeriodCodec2); + TestCachingCodecRegistry registry = new TestCachingCodecRegistry(mockCache); + registry.register(textToPeriodCodec1, textToPeriodCodec2); + verify(mockCache).lookup(DataTypes.TEXT, GenericType.of(Period.class), false); assertThat(registry.codecFor(DataTypes.TEXT, GenericType.of(Period.class))) .isSameAs(textToPeriodCodec1); @@ -865,7 +869,8 @@ public void should_not_find_codec_if_java_type_unknown() { @Test public void should_not_allow_covariance_for_lookups_by_java_type() { - TestCachingCodecRegistry registry = new TestCachingCodecRegistry(mockCache, new ACodec()); + TestCachingCodecRegistry registry = new TestCachingCodecRegistry(mockCache); + registry.register(new ACodec()); InOrder inOrder = inOrder(mockCache); // covariance not allowed @@ -887,8 +892,10 @@ public void should_not_allow_covariance_for_lookups_by_java_type() { @Test public void should_allow_covariance_for_lookups_by_cql_type_and_value() { - TestCachingCodecRegistry registry = new TestCachingCodecRegistry(mockCache, new ACodec()); + TestCachingCodecRegistry registry = new TestCachingCodecRegistry(mockCache); + registry.register(new ACodec()); InOrder inOrder = inOrder(mockCache); + inOrder.verify(mockCache).lookup(DataTypes.INT, GenericType.of(A.class), false); // covariance allowed @@ -914,8 +921,10 @@ public void should_allow_covariance_for_lookups_by_cql_type_and_value() { @Test public void should_allow_covariance_for_lookups_by_value() { - TestCachingCodecRegistry registry = new TestCachingCodecRegistry(mockCache, new ACodec()); + TestCachingCodecRegistry registry = new TestCachingCodecRegistry(mockCache); + registry.register(new ACodec()); InOrder inOrder = inOrder(mockCache); + inOrder.verify(mockCache).lookup(DataTypes.INT, GenericType.of(A.class), false); // covariance allowed @@ -937,13 +946,66 @@ public void should_allow_covariance_for_lookups_by_value() { inOrder.verifyNoMoreInteractions(); } + @Test + public void should_register_user_codec_at_runtime() { + CqlIntToStringCodec intToStringCodec = new CqlIntToStringCodec(); + TestCachingCodecRegistry registry = new TestCachingCodecRegistry(mockCache); + registry.register(intToStringCodec); + // register checks the cache for collisions + verify(mockCache).lookup(DataTypes.INT, GenericType.STRING, false); + + // When the mapping is not ambiguous, the user type should be returned + assertThat(registry.codecFor(DataTypes.INT, GenericType.STRING)).isSameAs(intToStringCodec); + assertThat(registry.codecFor(DataTypes.INT, String.class)).isSameAs(intToStringCodec); + assertThat(registry.codecFor(DataTypes.INT, "")).isSameAs(intToStringCodec); + + // When there is an ambiguity with a built-in codec, the built-in codec should have priority + assertThat(registry.codecFor(DataTypes.INT)).isSameAs(TypeCodecs.INT); + assertThat(registry.codecFor("")).isSameAs(TypeCodecs.TEXT); + + verifyZeroInteractions(mockCache); + } + + @Test + public void should_ignore_user_codec_if_collides_with_builtin_codec() { + TestCachingCodecRegistry registry = new TestCachingCodecRegistry(mockCache); + + IntCodec userIntCodec = new IntCodec(); + registry.register(userIntCodec); + + assertThat(registry.codecFor(DataTypes.INT, Integer.class)).isNotSameAs(userIntCodec); + } + + @Test + public void should_ignore_user_codec_if_collides_with_other_user_codec() { + TestCachingCodecRegistry registry = new TestCachingCodecRegistry(mockCache); + CqlIntToStringCodec intToStringCodec1 = new CqlIntToStringCodec(); + CqlIntToStringCodec intToStringCodec2 = new CqlIntToStringCodec(); + + registry.register(intToStringCodec1, intToStringCodec2); + + assertThat(registry.codecFor(DataTypes.INT, GenericType.STRING)).isSameAs(intToStringCodec1); + } + + @Test + public void should_ignore_user_codec_if_collides_with_generated_codec() { + TestCachingCodecRegistry registry = new TestCachingCodecRegistry(mockCache); + + TypeCodec> userListOfIntCodec = TypeCodecs.listOf(TypeCodecs.INT); + registry.register(userListOfIntCodec); + + assertThat( + registry.codecFor(DataTypes.listOf(DataTypes.INT), GenericType.listOf(Integer.class))) + .isNotSameAs(userListOfIntCodec); + } + // Our intent is not to test Guava cache, so we don't need an actual cache here. // The only thing we want to check in our tests is if getCachedCodec was called. public static class TestCachingCodecRegistry extends CachingCodecRegistry { private final MockCache cache; - public TestCachingCodecRegistry(MockCache cache, TypeCodec... userCodecs) { - super("test", CodecRegistryConstants.PRIMITIVE_CODECS, userCodecs); + public TestCachingCodecRegistry(MockCache cache) { + super("test", CodecRegistryConstants.PRIMITIVE_CODECS); this.cache = cache; } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/type/codec/registry/CodecRegistryIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/type/codec/registry/CodecRegistryIT.java index 3bcc8b76a83..d3f266f330e 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/type/codec/registry/CodecRegistryIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/type/codec/registry/CodecRegistryIT.java @@ -29,6 +29,7 @@ import com.datastax.oss.driver.api.core.type.codec.CodecNotFoundException; import com.datastax.oss.driver.api.core.type.codec.TypeCodec; import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; +import com.datastax.oss.driver.api.core.type.codec.registry.MutableCodecRegistry; import com.datastax.oss.driver.api.core.type.reflect.GenericType; import com.datastax.oss.driver.api.core.type.reflect.GenericTypeParameter; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; @@ -205,6 +206,43 @@ public void should_be_able_to_register_and_use_custom_codec() { } } + @Test + public void should_register_custom_codec_at_runtime() { + // Still create a separate session because we don't want to interfere with other tests + try (CqlSession session = SessionUtils.newSession(CCM_RULE, SESSION_RULE.keyspace())) { + + MutableCodecRegistry registry = + (MutableCodecRegistry) session.getContext().getCodecRegistry(); + registry.register(new FloatCIntCodec()); + + PreparedStatement prepared = session.prepare("INSERT INTO test (k, v) values (?, ?)"); + + // float value for int column should work. + BoundStatement insert = + prepared + .boundStatementBuilder() + .setString(0, name.getMethodName()) + .setFloat(1, 3.14f) + .build(); + session.execute(insert); + + ResultSet result = + session.execute( + SimpleStatement.builder("SELECT v from test where k = ?") + .addPositionalValue(name.getMethodName()) + .build()); + + List rows = result.all(); + assertThat(rows).hasSize(1); + + // should be able to retrieve value back as float, some precision is lost due to going from + // int -> float. + Row row = rows.iterator().next(); + assertThat(row.getFloat("v")).isEqualTo(3.0f); + assertThat(row.getFloat(0)).isEqualTo(3.0f); + } + } + // TODO: consider moving this into source as it could be generally useful. private abstract static class MappingCodec implements TypeCodec { diff --git a/manual/core/custom_codecs/README.md b/manual/core/custom_codecs/README.md index d65fba6334d..447750d5b2d 100644 --- a/manual/core/custom_codecs/README.md +++ b/manual/core/custom_codecs/README.md @@ -7,6 +7,13 @@ Define custom Java to CQL mappings. * implement the [TypeCodec] interface. * registering a codec: * at init time: [CqlSession.builder().addTypeCodecs()][SessionBuilder.addTypeCodecs] + * at runtime: + + ```java + MutableCodecRegistry registry = + (MutableCodecRegistry) session.getContext().getCodecRegistry(); + registry.register(myCodec); + ``` * using a codec: * if already registered: `row.get("columnName", MyCustomType.class)` * otherwise: `row.get("columnName", myCodec)` @@ -100,6 +107,18 @@ CqlSession session = CqlSession.builder() .build(); ``` +You may also add codecs to an existing session at runtime: + +```java +// The cast is required for backward compatibility reasons (registry mutability was introduced in +// 4.3.0). It is safe as long as you didn't hack the driver internals to plug a custom registry +// implementation. +MutableCodecRegistry registry = + (MutableCodecRegistry) session.getContext().getCodecRegistry(); + +registry.register(new CqlIntToStringCodec()); +``` + You can now use the new mapping in your code: ```java diff --git a/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/CharsetCodec.java b/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/CharsetCodec.java index 5b16cc80f9b..640bcd52e12 100644 --- a/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/CharsetCodec.java +++ b/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/CharsetCodec.java @@ -20,6 +20,7 @@ import com.datastax.oss.driver.api.core.type.DataTypes; import com.datastax.oss.driver.api.core.type.codec.TypeCodec; import com.datastax.oss.driver.api.core.type.codec.registry.CodecRegistry; +import com.datastax.oss.driver.api.core.type.codec.registry.MutableCodecRegistry; import com.datastax.oss.driver.api.core.type.reflect.GenericType; import com.datastax.oss.driver.internal.core.type.codec.registry.DefaultCodecRegistry; import com.datastax.oss.driver.internal.querybuilder.DefaultLiteral; @@ -31,8 +32,13 @@ public class CharsetCodec implements TypeCodec { /** A registry that contains an instance of this codec. */ - public static final CodecRegistry TEST_REGISTRY = - new DefaultCodecRegistry("test", new CharsetCodec()); + public static final CodecRegistry TEST_REGISTRY; + + static { + MutableCodecRegistry registry = new DefaultCodecRegistry("test"); + registry.register(new CharsetCodec()); + TEST_REGISTRY = registry; + } @NonNull @Override From a4e8df4131571abf888eb8e5861c81be08bdf752 Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Thu, 26 Sep 2019 13:28:44 -0500 Subject: [PATCH 104/979] JAVA-2452: Allow "none" as a compression option --- changelog/README.md | 1 + .../core/context/DefaultDriverContext.java | 17 ++-- core/src/main/resources/reference.conf | 2 + .../context/DefaultDriverContextTest.java | 80 +++++++++++++++ .../context/MockedDriverContextFactory.java | 68 +++++++++++++ .../context/StartupOptionsBuilderTest.java | 99 ++++++++----------- 6 files changed, 202 insertions(+), 65 deletions(-) create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContextTest.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/context/MockedDriverContextFactory.java diff --git a/changelog/README.md b/changelog/README.md index 59059a92a4e..fba8f7b90f7 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.3.0 (in progress) +- [improvement] JAVA-2452: Allow "none" as a compression option - [improvement] JAVA-2419: Allow registration of user codecs at runtime - [documentation] JAVA-2384: Add quick overview section to each manual page - [documentation] JAVA-2412: Cover DDL query debouncing in FAQ and upgrade guide diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java index f948110ab52..0d51c15e45f 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java @@ -359,23 +359,22 @@ protected EventBus buildEventBus() { return new EventBus(getSessionName()); } - @SuppressWarnings("unchecked") protected Compressor buildCompressor() { DriverExecutionProfile defaultProfile = getConfig().getDefaultProfile(); - if (defaultProfile.isDefined(DefaultDriverOption.PROTOCOL_COMPRESSION)) { - String name = defaultProfile.getString(DefaultDriverOption.PROTOCOL_COMPRESSION); - if (name.equalsIgnoreCase("lz4")) { + String name = defaultProfile.getString(DefaultDriverOption.PROTOCOL_COMPRESSION, "none"); + assert name != null : "should use default value"; + switch (name.toLowerCase()) { + case "lz4": return new Lz4Compressor(this); - } else if (name.equalsIgnoreCase("snappy")) { + case "snappy": return new SnappyCompressor(this); - } else { + case "none": + return Compressor.none(); + default: throw new IllegalArgumentException( String.format( "Unsupported compression algorithm '%s' (from configuration option %s)", name, DefaultDriverOption.PROTOCOL_COMPRESSION.getPath())); - } - } else { - return Compressor.none(); } } diff --git a/core/src/main/resources/reference.conf b/core/src/main/resources/reference.conf index d80d9762010..2cb30241f30 100644 --- a/core/src/main/resources/reference.conf +++ b/core/src/main/resources/reference.conf @@ -715,6 +715,8 @@ datastax-java-driver { # The possible values are: # - lz4: requires net.jpountz.lz4:lz4 in the classpath. # - snappy: requires org.xerial.snappy:snappy-java in the classpath. + # - the string "none" to indicate no compression (this is functionally equivalent to omitting + # the option). # # The driver depends on the compression libraries, but they are optional. Make sure you # redeclare an explicit dependency in your project. Refer to the driver's POM or manual for the diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContextTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContextTest.java new file mode 100644 index 00000000000..c87263871be --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContextTest.java @@ -0,0 +1,80 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.context; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.internal.core.protocol.Lz4Compressor; +import com.datastax.oss.driver.internal.core.protocol.SnappyCompressor; +import com.datastax.oss.protocol.internal.Compressor; +import com.datastax.oss.protocol.internal.NoopCompressor; +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import io.netty.buffer.ByteBuf; +import java.util.Optional; +import org.junit.Test; +import org.junit.runner.RunWith; + +@RunWith(DataProviderRunner.class) +public class DefaultDriverContextTest { + + private DefaultDriverContext buildMockedContext(Optional compressionOption) { + + DriverExecutionProfile defaultProfile = mock(DriverExecutionProfile.class); + when(defaultProfile.getString(DefaultDriverOption.PROTOCOL_COMPRESSION, "none")) + .thenReturn(compressionOption.orElse("none")); + return MockedDriverContextFactory.defaultDriverContext(Optional.of(defaultProfile)); + } + + private void doCreateCompressorTest(Optional configVal, Class expectedClz) { + + DefaultDriverContext ctx = buildMockedContext(configVal); + Compressor compressor = ctx.getCompressor(); + assertThat(compressor).isNotNull(); + assertThat(compressor).isInstanceOf(expectedClz); + } + + @Test + @DataProvider({"lz4", "lZ4", "Lz4", "LZ4"}) + public void should_create_lz4_compressor(String name) { + + doCreateCompressorTest(Optional.of(name), Lz4Compressor.class); + } + + @Test + @DataProvider({"snappy", "SNAPPY", "sNaPpY", "SNapPy"}) + public void should_create_snappy_compressor(String name) { + + doCreateCompressorTest(Optional.of(name), SnappyCompressor.class); + } + + @Test + public void should_create_noop_compressor_if_undefined() { + + doCreateCompressorTest(Optional.empty(), NoopCompressor.class); + } + + @Test + @DataProvider({"none", "NONE", "NoNe", "nONe"}) + public void should_create_noop_compressor_if_defined_as_none(String name) { + + doCreateCompressorTest(Optional.of(name), NoopCompressor.class); + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/context/MockedDriverContextFactory.java b/core/src/test/java/com/datastax/oss/driver/internal/core/context/MockedDriverContextFactory.java new file mode 100644 index 00000000000..c32dfe10e3e --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/context/MockedDriverContextFactory.java @@ -0,0 +1,68 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.context; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverConfig; +import com.datastax.oss.driver.api.core.config.DriverConfigLoader; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.metadata.NodeStateListener; +import com.datastax.oss.driver.api.core.metadata.schema.SchemaChangeListener; +import com.datastax.oss.driver.api.core.session.ProgrammaticArguments; +import com.datastax.oss.driver.api.core.tracker.RequestTracker; +import com.datastax.oss.driver.shaded.guava.common.collect.Maps; +import java.util.Optional; + +class MockedDriverContextFactory { + + public static DefaultDriverContext defaultDriverContext() { + return defaultDriverContext(Optional.empty()); + } + + public static DefaultDriverContext defaultDriverContext( + Optional profileOption) { + + /* If the caller provided a profile use that, otherwise make a new one */ + final DriverExecutionProfile profile = + profileOption.orElseGet( + () -> { + DriverExecutionProfile blankProfile = mock(DriverExecutionProfile.class); + when(blankProfile.getString(DefaultDriverOption.PROTOCOL_COMPRESSION, "none")) + .thenReturn("none"); + return blankProfile; + }); + + /* Setup machinery to connect the input DriverExecutionProfile to the config loader */ + final DriverConfig driverConfig = mock(DriverConfig.class); + final DriverConfigLoader configLoader = mock(DriverConfigLoader.class); + when(configLoader.getInitialConfig()).thenReturn(driverConfig); + when(driverConfig.getDefaultProfile()).thenReturn(profile); + + ProgrammaticArguments args = + ProgrammaticArguments.builder() + .withNodeStateListener(mock(NodeStateListener.class)) + .withSchemaChangeListener(mock(SchemaChangeListener.class)) + .withRequestTracker(mock(RequestTracker.class)) + .withLocalDatacenters(Maps.newHashMap()) + .withNodeFilters(Maps.newHashMap()) + .withClassLoader(mock(ClassLoader.class)) + .build(); + return new DefaultDriverContext(configLoader, args); + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/context/StartupOptionsBuilderTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/context/StartupOptionsBuilderTest.java index 21eea2aa331..c9f1952d83f 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/context/StartupOptionsBuilderTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/context/StartupOptionsBuilderTest.java @@ -16,67 +16,34 @@ package com.datastax.oss.driver.internal.core.context; import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException; +import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; -import com.datastax.oss.driver.api.core.config.DriverConfig; -import com.datastax.oss.driver.api.core.config.DriverConfigLoader; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; -import com.datastax.oss.driver.api.core.metadata.Node; -import com.datastax.oss.driver.api.core.metadata.NodeStateListener; -import com.datastax.oss.driver.api.core.metadata.schema.SchemaChangeListener; import com.datastax.oss.driver.api.core.session.Session; -import com.datastax.oss.driver.api.core.tracker.RequestTracker; -import com.datastax.oss.driver.api.core.type.codec.TypeCodec; -import com.datastax.oss.driver.shaded.guava.common.collect.Lists; -import com.datastax.oss.driver.shaded.guava.common.collect.Maps; import com.datastax.oss.protocol.internal.request.Startup; -import java.util.List; -import java.util.Map; -import java.util.function.Predicate; -import org.junit.Before; +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import java.util.Optional; import org.junit.Test; -import org.mockito.Mock; -import org.mockito.MockitoAnnotations; +import org.junit.runner.RunWith; +@RunWith(DataProviderRunner.class) public class StartupOptionsBuilderTest { - private DefaultDriverContext defaultDriverContext; + private DefaultDriverContext buildMockedContext(String compression) { - // Mocks for instantiating the default driver context - @Mock private DriverConfigLoader configLoader; - private List> typeCodecs = Lists.newArrayList(); - @Mock private NodeStateListener nodeStateListener; - @Mock private SchemaChangeListener schemaChangeListener; - @Mock private RequestTracker requestTracker; - private Map localDatacenters = Maps.newHashMap(); - private Map> nodeFilters = Maps.newHashMap(); - @Mock private ClassLoader classLoader; - @Mock private DriverConfig driverConfig; - @Mock private DriverExecutionProfile defaultProfile; - - @Before - public void before() { - MockitoAnnotations.initMocks(this); - when(configLoader.getInitialConfig()).thenReturn(driverConfig); - when(driverConfig.getDefaultProfile()).thenReturn(defaultProfile); - } - - private void buildDriverContext() { - defaultDriverContext = - new DefaultDriverContext( - configLoader, - typeCodecs, - nodeStateListener, - schemaChangeListener, - requestTracker, - localDatacenters, - nodeFilters, - classLoader); + DriverExecutionProfile defaultProfile = mock(DriverExecutionProfile.class); + when(defaultProfile.getString(DefaultDriverOption.PROTOCOL_COMPRESSION, "none")) + .thenReturn(compression); + return MockedDriverContextFactory.defaultDriverContext(Optional.of(defaultProfile)); } private void assertDefaultStartupOptions(Startup startup) { + assertThat(startup.options).containsEntry(Startup.CQL_VERSION_KEY, "3.0.0"); assertThat(startup.options) .containsEntry( @@ -87,22 +54,42 @@ private void assertDefaultStartupOptions(Startup startup) { } @Test - public void should_build_minimal_startup_options() { - buildDriverContext(); - Startup startup = new Startup(defaultDriverContext.getStartupOptions()); + public void should_build_startup_options_with_no_compression_if_undefined() { + + DefaultDriverContext ctx = MockedDriverContextFactory.defaultDriverContext(); + Startup startup = new Startup(ctx.getStartupOptions()); + assertThat(startup.options).doesNotContainKey(Startup.COMPRESSION_KEY); + assertDefaultStartupOptions(startup); + } + + @Test + public void should_build_startup_options_with_no_compression_if_defined_as_none() { + + DefaultDriverContext ctx = buildMockedContext("none"); + Startup startup = new Startup(ctx.getStartupOptions()); assertThat(startup.options).doesNotContainKey(Startup.COMPRESSION_KEY); assertDefaultStartupOptions(startup); } @Test - public void should_build_startup_options_with_compression() { - when(defaultProfile.isDefined(DefaultDriverOption.PROTOCOL_COMPRESSION)) - .thenReturn(Boolean.TRUE); - when(defaultProfile.getString(DefaultDriverOption.PROTOCOL_COMPRESSION)).thenReturn("lz4"); - buildDriverContext(); - Startup startup = new Startup(defaultDriverContext.getStartupOptions()); + @DataProvider({"lz4", "snappy"}) + public void should_build_startup_options(String compression) { + + DefaultDriverContext ctx = buildMockedContext(compression); + Startup startup = new Startup(ctx.getStartupOptions()); // assert the compression option is present - assertThat(startup.options).containsEntry(Startup.COMPRESSION_KEY, "lz4"); + assertThat(startup.options).containsEntry(Startup.COMPRESSION_KEY, compression); assertDefaultStartupOptions(startup); } + + @Test + public void should_fail_to_build_startup_options_with_invalid_compression() { + + assertThatIllegalArgumentException() + .isThrownBy( + () -> { + DefaultDriverContext ctx = buildMockedContext("foobar"); + new Startup(ctx.getStartupOptions()); + }); + } } From 6b4ebea645f7ffb90f4a79d18b07c93d7e6a99d7 Mon Sep 17 00:00:00 2001 From: olim7t Date: Wed, 2 Oct 2019 08:21:59 -0700 Subject: [PATCH 105/979] JAVA-2470: Fix Session.OSS_DRIVER_COORDINATES for shaded JAR --- changelog/README.md | 1 + .../oss/driver/internal/core/DefaultMavenCoordinates.java | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/changelog/README.md b/changelog/README.md index 921ba9f6791..264d70dee79 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.2.1 +- [bug] JAVA-2470: Fix Session.OSS_DRIVER_COORDINATES for shaded JAR - [bug] JAVA-2454: Handle "empty" CQL type while parsing schema - [improvement] JAVA-2455: Improve logging of schema refresh errors - [documentation] JAVA-2429: Document expected types on DefaultDriverOption diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/DefaultMavenCoordinates.java b/core/src/main/java/com/datastax/oss/driver/internal/core/DefaultMavenCoordinates.java index b24a12cb940..a3d529a23cf 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/DefaultMavenCoordinates.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/DefaultMavenCoordinates.java @@ -51,7 +51,7 @@ public static DefaultMavenCoordinates buildFromResource(URL resource) { String artifactId = props.getProperty("driver.artifactId"); String version = props.getProperty("driver.version"); if (ByteBuf.class.getPackage().getName().contains("com.datastax.oss.driver.shaded")) { - version += "-shaded"; + artifactId += "-shaded"; } return new DefaultMavenCoordinates(name, groupId, artifactId, Version.parse(version)); } catch (IOException e) { From 15d737ba9f4fbc79b2220d87f9184272370a6ba3 Mon Sep 17 00:00:00 2001 From: olim7t Date: Wed, 2 Oct 2019 08:35:23 -0700 Subject: [PATCH 106/979] Fix changelog --- changelog/README.md | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/changelog/README.md b/changelog/README.md index 264d70dee79..092022ae842 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -2,9 +2,12 @@ -### 4.2.1 +### 4.2.2 (in progress) - [bug] JAVA-2470: Fix Session.OSS_DRIVER_COORDINATES for shaded JAR + +### 4.2.1 + - [bug] JAVA-2454: Handle "empty" CQL type while parsing schema - [improvement] JAVA-2455: Improve logging of schema refresh errors - [documentation] JAVA-2429: Document expected types on DefaultDriverOption From 16209514c210fd71a4397745f86d5b0d726941e3 Mon Sep 17 00:00:00 2001 From: olim7t Date: Wed, 2 Oct 2019 15:19:15 -0700 Subject: [PATCH 107/979] Make ChannelFactory.protocolVersion volatile --- .../oss/driver/internal/core/channel/ChannelFactory.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ChannelFactory.java b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ChannelFactory.java index cd936bd4b0e..2dabeb5204d 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ChannelFactory.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ChannelFactory.java @@ -58,7 +58,7 @@ public class ChannelFactory { protected final InternalDriverContext context; /** either set from the configuration, or null and will be negotiated */ - @VisibleForTesting ProtocolVersion protocolVersion; + @VisibleForTesting volatile ProtocolVersion protocolVersion; @VisibleForTesting volatile String clusterName; From 5c29269b96e57c09224e0cde19a57d17d5761e6c Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Thu, 3 Oct 2019 15:16:52 +0200 Subject: [PATCH 108/979] JAVA-2398: Improve support for optional dependencies in OSGi --- changelog/README.md | 1 + core-shaded/pom.xml | 28 +++++++++++++++++++++------- core/pom.xml | 18 +++++++++++++----- manual/osgi/README.md | 6 ++++++ 4 files changed, 41 insertions(+), 12 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 3e87aa70311..3d9b53b3c93 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.3.0 (in progress) +- [improvement] JAVA-2398: Improve support for optional dependencies in OSGi - [bug] JAVA-2470: Fix Session.OSS_DRIVER_COORDINATES for shaded JAR - [improvement] JAVA-2452: Allow "none" as a compression option - [improvement] JAVA-2419: Allow registration of user codecs at runtime diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 4777f67e870..8255077794f 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -239,17 +239,31 @@ (so reflection-based loading of policies works) --> * - + !com.datastax.oss.driver.shaded.netty.*, - !jnr.*, + !net.jcip.annotations.*, !edu.umd.cs.findbugs.annotations.*, + + jnr.*;resolution:=optional, + !com.google.protobuf.*, !com.jcraft.jzlib.*, !com.ning.compress.*, diff --git a/core/pom.xml b/core/pom.xml index a4ff504af86..d63e89028b9 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -180,14 +180,22 @@ (so reflection-based loading of policies works) --> * - + !net.jcip.annotations.*, !edu.umd.cs.findbugs.annotations.*, - !jnr.*, + + jnr.*;resolution:=optional, * diff --git a/manual/osgi/README.md b/manual/osgi/README.md index 57e93f88f8e..98a8716d75f 100644 --- a/manual/osgi/README.md +++ b/manual/osgi/README.md @@ -7,6 +7,12 @@ valid OSGi bundles: - `java-driver-query-builder` - `java-driver-core-shaded` +Note: some of the driver dependencies are not valid OSGi bundles. Most of them are optional, and the +driver can work properly without them; in such cases, the corresponding packages are declared with +optional resolution in `Import-Package` directives. However, if you need to access such packages in +an OSGi container you MUST wrap the corresponding jar in a valid OSGi bundle and make it available +for provisioning to the OSGi runtime. + ## Using the shaded jar `java-driver-core-shaded` shares the same bundle name as `java-driver-core` From 90964438ff3170b42fdc473fea27606f4ede6184 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 4 Oct 2019 13:27:16 +0200 Subject: [PATCH 109/979] Remove references to deprecated Java6Assertions class --- .../com/datastax/oss/driver/internal/core/os/NativeTest.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/os/NativeTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/os/NativeTest.java index b34015f31aa..56e7763486b 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/os/NativeTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/os/NativeTest.java @@ -15,7 +15,7 @@ */ package com.datastax.oss.driver.internal.core.os; -import static org.assertj.core.api.Java6Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThat; import org.junit.Test; From 7d1dbeb1ca73afe67be1c12c8059272d77a34f79 Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Fri, 4 Oct 2019 14:28:13 -0500 Subject: [PATCH 110/979] JAVA-2444: Add method setRoutingKey(ByteBuffer...) to StatementBuilder (#1338) JAVA-2444: Add method setRoutingKey(ByteBuffer...) to StatementBuilder --- changelog/README.md | 1 + .../driver/api/core/cql/StatementBuilder.java | 7 ++ .../api/core/cql/StatementBuilderTest.java | 64 +++++++++++++------ 3 files changed, 53 insertions(+), 19 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 3d9b53b3c93..c047470d476 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.3.0 (in progress) +- [improvement] JAVA-2444: Add method setRoutingKey(ByteBuffer...) to StatementBuilder - [improvement] JAVA-2398: Improve support for optional dependencies in OSGi - [bug] JAVA-2470: Fix Session.OSS_DRIVER_COORDINATES for shaded JAR - [improvement] JAVA-2452: Allow "none" as a compression option diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/cql/StatementBuilder.java b/core/src/main/java/com/datastax/oss/driver/api/core/cql/StatementBuilder.java index 0db0754b937..362a63b51d7 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/cql/StatementBuilder.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/cql/StatementBuilder.java @@ -20,6 +20,7 @@ import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.metadata.token.Token; +import com.datastax.oss.driver.internal.core.util.RoutingKey; import com.datastax.oss.protocol.internal.util.collection.NullAllowingImmutableMap; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; @@ -123,6 +124,12 @@ public SelfT setRoutingKey(@Nullable ByteBuffer routingKey) { return self; } + /** @see Statement#setRoutingKey(ByteBuffer...) */ + @NonNull + public SelfT setRoutingKey(@NonNull ByteBuffer... newRoutingKeyComponents) { + return setRoutingKey(RoutingKey.compose(newRoutingKeyComponents)); + } + /** @see Statement#setRoutingToken(Token) */ @NonNull public SelfT setRoutingToken(@Nullable Token routingToken) { diff --git a/core/src/test/java/com/datastax/oss/driver/api/core/cql/StatementBuilderTest.java b/core/src/test/java/com/datastax/oss/driver/api/core/cql/StatementBuilderTest.java index 93745f155da..fd5830cfd38 100644 --- a/core/src/test/java/com/datastax/oss/driver/api/core/cql/StatementBuilderTest.java +++ b/core/src/test/java/com/datastax/oss/driver/api/core/cql/StatementBuilderTest.java @@ -16,61 +16,87 @@ package com.datastax.oss.driver.api.core.cql; import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; +import com.datastax.oss.driver.shaded.guava.common.base.Charsets; +import java.nio.ByteBuffer; import org.junit.Test; public class StatementBuilderTest { - private static class NullStatementBuilder - extends StatementBuilder { + private static class MockSimpleStatementBuilder + extends StatementBuilder { - public NullStatementBuilder() { + public MockSimpleStatementBuilder() { super(); } - public NullStatementBuilder(SimpleStatement template) { + public MockSimpleStatementBuilder(SimpleStatement template) { super(template); } @Override public SimpleStatement build() { - return null; + + SimpleStatement rv = mock(SimpleStatement.class); + when(rv.isTracing()).thenReturn(this.tracing); + when(rv.getRoutingKey()).thenReturn(this.routingKey); + return rv; } } @Test public void should_handle_set_tracing_without_args() { - NullStatementBuilder builder = new NullStatementBuilder(); - assertThat(builder.tracing).isFalse(); + MockSimpleStatementBuilder builder = new MockSimpleStatementBuilder(); + assertThat(builder.build().isTracing()).isFalse(); builder.setTracing(); - assertThat(builder.tracing).isTrue(); + assertThat(builder.build().isTracing()).isTrue(); } @Test public void should_handle_set_tracing_with_args() { - NullStatementBuilder builder = new NullStatementBuilder(); - assertThat(builder.tracing).isFalse(); + MockSimpleStatementBuilder builder = new MockSimpleStatementBuilder(); + assertThat(builder.build().isTracing()).isFalse(); builder.setTracing(true); - assertThat(builder.tracing).isTrue(); + assertThat(builder.build().isTracing()).isTrue(); builder.setTracing(false); - assertThat(builder.tracing).isFalse(); + assertThat(builder.build().isTracing()).isFalse(); } @Test - public void should_override_template() { + public void should_override_set_tracing_in_template() { SimpleStatement template = SimpleStatement.builder("select * from system.peers").build(); - NullStatementBuilder builder = new NullStatementBuilder(template); - assertThat(builder.tracing).isFalse(); + MockSimpleStatementBuilder builder = new MockSimpleStatementBuilder(template); + assertThat(builder.build().isTracing()).isFalse(); builder.setTracing(true); - assertThat(builder.tracing).isTrue(); + assertThat(builder.build().isTracing()).isTrue(); template = SimpleStatement.builder("select * from system.peers").setTracing().build(); - builder = new NullStatementBuilder(template); - assertThat(builder.tracing).isTrue(); + builder = new MockSimpleStatementBuilder(template); + assertThat(builder.build().isTracing()).isTrue(); builder.setTracing(false); - assertThat(builder.tracing).isFalse(); + assertThat(builder.build().isTracing()).isFalse(); + } + + @Test + public void should_match_set_routing_key_vararg() { + + ByteBuffer buff1 = ByteBuffer.wrap("the quick brown fox".getBytes(Charsets.UTF_8)); + ByteBuffer buff2 = ByteBuffer.wrap("jumped over the lazy dog".getBytes(Charsets.UTF_8)); + + Statement expectedStmt = + SimpleStatement.builder("select * from system.peers").build().setRoutingKey(buff1, buff2); + + MockSimpleStatementBuilder builder = new MockSimpleStatementBuilder(); + Statement builderStmt = builder.setRoutingKey(buff1, buff2).build(); + assertThat(expectedStmt.getRoutingKey()).isEqualTo(builderStmt.getRoutingKey()); + + /* Confirm that order matters here */ + builderStmt = builder.setRoutingKey(buff2, buff1).build(); + assertThat(expectedStmt.getRoutingKey()).isNotEqualTo(builderStmt.getRoutingKey()); } } From b0d2fcaa42c28762ff5af0cde787f6e4016b4cb6 Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 7 Oct 2019 16:31:54 -0700 Subject: [PATCH 111/979] JAVA-2475: Fix message size when query string contains Unicode surrogates --- changelog/README.md | 1 + pom.xml | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/changelog/README.md b/changelog/README.md index 092022ae842..4c052b7f97a 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.2.2 (in progress) +- [bug] JAVA-2475: Fix message size when query string contains Unicode surrogates - [bug] JAVA-2470: Fix Session.OSS_DRIVER_COORDINATES for shaded JAR ### 4.2.1 diff --git a/pom.xml b/pom.xml index e6540f657b8..428be76f3e7 100644 --- a/pom.xml +++ b/pom.xml @@ -51,7 +51,7 @@ 25.1-jre 2.1.11 4.0.5 - 1.4.6 + 1.4.7 4.1.39.Final 1.7.26 From 817c8d6abd90d43772a9f6fbed5089c668378451 Mon Sep 17 00:00:00 2001 From: olim7t Date: Tue, 8 Oct 2019 08:50:58 -0700 Subject: [PATCH 112/979] Prepare changelog for release --- changelog/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/changelog/README.md b/changelog/README.md index 4c052b7f97a..74f2791fe59 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -2,7 +2,7 @@ -### 4.2.2 (in progress) +### 4.2.2 - [bug] JAVA-2475: Fix message size when query string contains Unicode surrogates - [bug] JAVA-2470: Fix Session.OSS_DRIVER_COORDINATES for shaded JAR From 99d18779ab58db596f65e5b2eec1a7eb341b3313 Mon Sep 17 00:00:00 2001 From: olim7t Date: Tue, 8 Oct 2019 08:54:44 -0700 Subject: [PATCH 113/979] [maven-release-plugin] prepare release 4.2.2 --- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 10 files changed, 11 insertions(+), 11 deletions(-) diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 6fab61b1d90..be04ab8e8cb 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -22,7 +22,7 @@ com.datastax.oss java-driver-parent - 4.2.2-SNAPSHOT + 4.2.2 java-driver-core-shaded diff --git a/core/pom.xml b/core/pom.xml index 6b90832da81..5185df95e8d 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.2.2-SNAPSHOT + 4.2.2 java-driver-core diff --git a/distribution/pom.xml b/distribution/pom.xml index 2ad184dd3e6..7424cd07327 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.2.2-SNAPSHOT + 4.2.2 java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index 128fa81ebcb..2536d345ce2 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.2.2-SNAPSHOT + 4.2.2 java-driver-examples diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index e0af9ac8a62..a621672b062 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.2.2-SNAPSHOT + 4.2.2 java-driver-integration-tests diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 03f7d48cc17..0056d94ae27 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.2.2-SNAPSHOT + 4.2.2 java-driver-mapper-processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index 0e68830ca8f..9902e5690f0 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.2.2-SNAPSHOT + 4.2.2 java-driver-mapper-runtime diff --git a/pom.xml b/pom.xml index 428be76f3e7..8b9f564d02a 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ com.datastax.oss java-driver-parent - 4.2.2-SNAPSHOT + 4.2.2 pom DataStax Java driver for Apache Cassandra(R) @@ -697,7 +697,7 @@ limitations under the License.]]> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - 4.2.0 + 4.2.2 diff --git a/query-builder/pom.xml b/query-builder/pom.xml index 6a3c0664030..f885f2bbcb8 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.2.2-SNAPSHOT + 4.2.2 java-driver-query-builder diff --git a/test-infra/pom.xml b/test-infra/pom.xml index e8043809721..92378f32900 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.2.2-SNAPSHOT + 4.2.2 java-driver-test-infra From 4d380600bbfff362150a8706ebe9e6c0ce890a6b Mon Sep 17 00:00:00 2001 From: olim7t Date: Tue, 8 Oct 2019 08:56:22 -0700 Subject: [PATCH 114/979] [maven-release-plugin] prepare for next development iteration --- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 10 files changed, 11 insertions(+), 11 deletions(-) diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index be04ab8e8cb..d75a4f0e216 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -22,7 +22,7 @@ com.datastax.oss java-driver-parent - 4.2.2 + 4.2.3-SNAPSHOT java-driver-core-shaded diff --git a/core/pom.xml b/core/pom.xml index 5185df95e8d..eb409ace8f3 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.2.2 + 4.2.3-SNAPSHOT java-driver-core diff --git a/distribution/pom.xml b/distribution/pom.xml index 7424cd07327..3cdb45e1b86 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.2.2 + 4.2.3-SNAPSHOT java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index 2536d345ce2..f462993ad8d 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.2.2 + 4.2.3-SNAPSHOT java-driver-examples diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index a621672b062..8960ad12911 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.2.2 + 4.2.3-SNAPSHOT java-driver-integration-tests diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 0056d94ae27..27927cab52e 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.2.2 + 4.2.3-SNAPSHOT java-driver-mapper-processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index 9902e5690f0..3bf82791cc2 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.2.2 + 4.2.3-SNAPSHOT java-driver-mapper-runtime diff --git a/pom.xml b/pom.xml index 8b9f564d02a..2da4ed88065 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ com.datastax.oss java-driver-parent - 4.2.2 + 4.2.3-SNAPSHOT pom DataStax Java driver for Apache Cassandra(R) @@ -697,7 +697,7 @@ limitations under the License.]]> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - 4.2.2 + 4.2.0 diff --git a/query-builder/pom.xml b/query-builder/pom.xml index f885f2bbcb8..0caba2a80be 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.2.2 + 4.2.3-SNAPSHOT java-driver-query-builder diff --git a/test-infra/pom.xml b/test-infra/pom.xml index 92378f32900..4ac4ecb5be2 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.2.2 + 4.2.3-SNAPSHOT java-driver-test-infra From 07238d39aa2427550d1ba497933feeb7a2e80fce Mon Sep 17 00:00:00 2001 From: olim7t Date: Wed, 9 Oct 2019 09:46:54 -0700 Subject: [PATCH 115/979] JAVA-2488: Create dedicated section for prepared statement advantages in manual --- manual/core/statements/prepared/README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/manual/core/statements/prepared/README.md b/manual/core/statements/prepared/README.md index 5de7a732bac..dd9e337bca7 100644 --- a/manual/core/statements/prepared/README.md +++ b/manual/core/statements/prepared/README.md @@ -71,6 +71,8 @@ client driver Cassandra |<--------------------------------| | ``` +### Advantages of prepared statements + Beyond saving a bit of parsing overhead on the server, prepared statements have other advantages; the `PREPARED` response also contains useful metadata about the CQL query: From b5919b6318956b13b46adff0d4cafec91795f85c Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 11 Oct 2019 14:26:27 +0300 Subject: [PATCH 116/979] Fix typo in field name --- .../driver/internal/core/channel/ProtocolInitHandler.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ProtocolInitHandler.java b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ProtocolInitHandler.java index 54cb427e365..c9f1993fc34 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ProtocolInitHandler.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ProtocolInitHandler.java @@ -129,7 +129,7 @@ private class InitRequest extends ChannelHandlerRequest { // in the initialization sequence. private Step step; private Authenticator authenticator; - private ByteBuffer authReponseToken; + private ByteBuffer authResponseToken; InitRequest(ChannelHandlerContext ctx) { super(ctx, timeoutMillis); @@ -151,7 +151,7 @@ Message getRequest() { case SET_KEYSPACE: return new Query("USE " + options.keyspace.asCql(false)); case AUTH_RESPONSE: - return new AuthResponse(authReponseToken); + return new AuthResponse(authResponseToken); case REGISTER: return new Register(options.eventTypes); default: @@ -184,7 +184,7 @@ void onResponse(Message response) { endPoint, "authenticator threw an exception", error)); } else { step = Step.AUTH_RESPONSE; - authReponseToken = token; + authResponseToken = token; send(); } }, @@ -202,7 +202,7 @@ void onResponse(Message response) { endPoint, "authenticator threw an exception", error)); } else { step = Step.AUTH_RESPONSE; - authReponseToken = token; + authResponseToken = token; send(); } }, From 5409d14ce6360ba2a60e18e014cbb8f607eff8bf Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Fri, 11 Oct 2019 09:23:52 -0500 Subject: [PATCH 117/979] JAVA-2438: Improve log messages that format Node instances (#1340) JAVA-2438: Improve log messages that format Node instances (#1340) --- .../internal/core/metadata/DefaultNode.java | 2 +- .../context/MockedDriverContextFactory.java | 2 +- .../core/metadata/DefaultNodeTest.java | 40 +++++++++++++++++++ 3 files changed, 42 insertions(+), 2 deletions(-) create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/metadata/DefaultNodeTest.java diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultNode.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultNode.java index a9f80561438..41fc3a17a6e 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultNode.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultNode.java @@ -180,7 +180,7 @@ public NodeMetricUpdater getMetricUpdater() { @Override public String toString() { - return String.format("%s(%s,%s)", super.toString(), hostId, endPoint); + return String.join("@", getHostId().toString().trim(), getEndPoint().toString().trim()); } /** Note: deliberately not exposed by the public interface. */ diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/context/MockedDriverContextFactory.java b/core/src/test/java/com/datastax/oss/driver/internal/core/context/MockedDriverContextFactory.java index c32dfe10e3e..776b227fe8c 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/context/MockedDriverContextFactory.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/context/MockedDriverContextFactory.java @@ -29,7 +29,7 @@ import com.datastax.oss.driver.shaded.guava.common.collect.Maps; import java.util.Optional; -class MockedDriverContextFactory { +public class MockedDriverContextFactory { public static DefaultDriverContext defaultDriverContext() { return defaultDriverContext(Optional.empty()); diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/DefaultNodeTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/DefaultNodeTest.java new file mode 100644 index 00000000000..66199134c12 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/DefaultNodeTest.java @@ -0,0 +1,40 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.metadata; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.api.core.metadata.EndPoint; +import com.datastax.oss.driver.internal.core.context.MockedDriverContextFactory; +import java.net.InetSocketAddress; +import java.util.UUID; +import org.junit.Test; + +public class DefaultNodeTest { + + @Test + public void should_have_expected_string_representation() { + + String uuidStr = "1e4687e6-f94e-432e-a792-216f89ef265f"; + UUID hostId = UUID.fromString(uuidStr); + EndPoint endPoint = new DefaultEndPoint(new InetSocketAddress("localhost", 9042)); + DefaultNode node = new DefaultNode(endPoint, MockedDriverContextFactory.defaultDriverContext()); + node.hostId = hostId; + + String expected = uuidStr + "@" + "localhost/127.0.0.1:9042"; + assertThat(node.toString()).isEqualTo(expected); + } +} From 972bd0bb3d0bc2f56511dcc3475da90b4a21c235 Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 14 Oct 2019 15:25:55 -0700 Subject: [PATCH 118/979] Revisit DefaultNode.toString to avoid NPE hostId can be null in the early stages of initialization. See also JAVA-2505. --- .../oss/driver/internal/core/metadata/DefaultNode.java | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultNode.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultNode.java index 41fc3a17a6e..cb6b4648868 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultNode.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultNode.java @@ -180,7 +180,10 @@ public NodeMetricUpdater getMetricUpdater() { @Override public String toString() { - return String.join("@", getHostId().toString().trim(), getEndPoint().toString().trim()); + // Include the hash code because this class uses reference equality + return String.format( + "Node(endPoint=%s, hostId=%s, hashCode=%s)", + getEndPoint(), getHostId(), Integer.toHexString(hashCode())); } /** Note: deliberately not exposed by the public interface. */ From c1aea83765b5971510cc44da32334f977fbd4dee Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 14 Oct 2019 15:32:34 -0700 Subject: [PATCH 119/979] Fix unit test --- .../oss/driver/internal/core/metadata/DefaultNodeTest.java | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/DefaultNodeTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/DefaultNodeTest.java index 66199134c12..21a68f0df9d 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/DefaultNodeTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/DefaultNodeTest.java @@ -34,7 +34,10 @@ public void should_have_expected_string_representation() { DefaultNode node = new DefaultNode(endPoint, MockedDriverContextFactory.defaultDriverContext()); node.hostId = hostId; - String expected = uuidStr + "@" + "localhost/127.0.0.1:9042"; + String expected = + String.format( + "Node(endPoint=localhost/127.0.0.1:9042, hostId=%s, hashCode=%s)", + uuidStr, Integer.toHexString(node.hashCode())); assertThat(node.toString()).isEqualTo(expected); } } From 92098f335a00ce64c0e4f6e06aaafe0bc91c6427 Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Tue, 8 Oct 2019 15:01:42 -0500 Subject: [PATCH 120/979] JAVA-2471: Consider DSE version when parsing the schema --- changelog/README.md | 1 + .../datastax/oss/driver/api/core/Version.java | 2 + .../core/metadata/DefaultTopologyMonitor.java | 30 ++-- .../core/metadata/NodeProperties.java | 29 ++++ .../queries/DefaultSchemaQueriesFactory.java | 14 ++ .../DefaultSchemaQueriesFactoryTest.java | 135 ++++++++++++++++++ 6 files changed, 199 insertions(+), 12 deletions(-) create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/metadata/NodeProperties.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/DefaultSchemaQueriesFactoryTest.java diff --git a/changelog/README.md b/changelog/README.md index d27d9ebdd0b..2feadd62d82 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.3.0 (in progress) +- [improvement] JAVA-2471: Consider DSE version when parsing the schema - [improvement] JAVA-2444: Add method setRoutingKey(ByteBuffer...) to StatementBuilder - [improvement] JAVA-2398: Improve support for optional dependencies in OSGi - [improvement] JAVA-2452: Allow "none" as a compression option diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/Version.java b/core/src/main/java/com/datastax/oss/driver/api/core/Version.java index c1ba41c7345..f8585fb9123 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/Version.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/Version.java @@ -48,6 +48,8 @@ public class Version implements Comparable { @NonNull public static final Version V3_0_0 = Objects.requireNonNull(parse("3.0.0")); @NonNull public static final Version V4_0_0 = Objects.requireNonNull(parse("4.0.0")); + @NonNull public static final Version V6_7_0 = Objects.requireNonNull(parse("6.7.0")); + private final int major; private final int minor; private final int patch; diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultTopologyMonitor.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultTopologyMonitor.java index 2b146134532..459f36d4027 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultTopologyMonitor.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultTopologyMonitor.java @@ -15,6 +15,7 @@ */ package com.datastax.oss.driver.internal.core.metadata; +import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.metadata.EndPoint; @@ -347,18 +348,23 @@ protected DefaultNodeInfo.Builder nodeInfoBuilder( listenAddress = new InetSocketAddress(listenInetAddress, listenPort); } - return DefaultNodeInfo.builder() - .withEndPoint(endPoint) - .withBroadcastRpcAddress(broadcastRpcAddress) - .withBroadcastAddress(broadcastAddress) - .withListenAddress(listenAddress) - .withDatacenter(row.getString("data_center")) - .withRack(row.getString("rack")) - .withCassandraVersion(row.getString("release_version")) - .withTokens(row.getSetOfString("tokens")) - .withPartitioner(row.getString("partitioner")) - .withHostId(Objects.requireNonNull(row.getUuid("host_id"))) - .withSchemaVersion(row.getUuid("schema_version")); + DefaultNodeInfo.Builder rv = + DefaultNodeInfo.builder() + .withEndPoint(endPoint) + .withBroadcastRpcAddress(broadcastRpcAddress) + .withBroadcastAddress(broadcastAddress) + .withListenAddress(listenAddress) + .withDatacenter(row.getString("data_center")) + .withRack(row.getString("rack")) + .withCassandraVersion(row.getString("release_version")) + .withTokens(row.getSetOfString("tokens")) + .withPartitioner(row.getString("partitioner")) + .withHostId(Objects.requireNonNull(row.getUuid("host_id"))) + .withSchemaVersion(row.getUuid("schema_version")); + + return row.contains("dse_version") + ? rv.withExtra(NodeProperties.DSE_VERSION, Version.parse(row.getString("dse_version"))) + : rv; } // Called when a new node is being added; the peers table is keyed by broadcast_address, diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/NodeProperties.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/NodeProperties.java new file mode 100644 index 00000000000..b079b1df897 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/NodeProperties.java @@ -0,0 +1,29 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.metadata; + +import com.datastax.oss.driver.api.core.Version; + +/** The keys for the additional properties stored in {@link Node#getExtras()}. */ +public class NodeProperties { + + /** + * The DSE version that the node is running. + * + *

      The associated value in {@link Node#getExtras()} is a {@link Version}). + */ + public static final String DSE_VERSION = "DSE_VERSION"; +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/DefaultSchemaQueriesFactory.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/DefaultSchemaQueriesFactory.java index aee7ccaa5cb..801d9931998 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/DefaultSchemaQueriesFactory.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/DefaultSchemaQueriesFactory.java @@ -21,6 +21,7 @@ import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.internal.core.channel.DriverChannel; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.metadata.NodeProperties; import java.util.concurrent.CompletableFuture; import net.jcip.annotations.ThreadSafe; import org.slf4j.Logger; @@ -81,6 +82,19 @@ protected SchemaQueries newInstance( } else if (version.compareTo(Version.V4_0_0) < 0) { return new Cassandra3SchemaQueries(channel, refreshFuture, config, logPrefix); } else { + + // A bit of custom logic for DSE 6.0.x. These versions report a Cassandra version of 4.0.0 + // but don't have support for system_virtual_schema tables supported by that version. To + // compensate we return the Cassandra 3 schema queries here for those versions + if (node.getExtras().containsKey(NodeProperties.DSE_VERSION)) { + + Object dseVersionObj = node.getExtras().get(NodeProperties.DSE_VERSION); + assert (dseVersionObj instanceof Version); + if (((Version) dseVersionObj).compareTo(Version.V6_7_0) < 0) { + + return new Cassandra3SchemaQueries(channel, refreshFuture, config, logPrefix); + } + } return new Cassandra4SchemaQueries(channel, refreshFuture, config, logPrefix); } } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/DefaultSchemaQueriesFactoryTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/DefaultSchemaQueriesFactoryTest.java new file mode 100644 index 00000000000..1c221d448e1 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/DefaultSchemaQueriesFactoryTest.java @@ -0,0 +1,135 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.metadata.schema.queries; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import com.datastax.oss.driver.api.core.Version; +import com.datastax.oss.driver.api.core.config.DriverConfig; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.internal.core.channel.DriverChannel; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.metadata.NodeProperties; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import java.util.Optional; +import java.util.concurrent.CompletableFuture; +import org.junit.Test; +import org.junit.runner.RunWith; + +@RunWith(DataProviderRunner.class) +public class DefaultSchemaQueriesFactoryTest { + + enum Expected { + CASS_21(Cassandra21SchemaQueries.class), + CASS_22(Cassandra22SchemaQueries.class), + CASS_3(Cassandra3SchemaQueries.class), + CASS_4(Cassandra4SchemaQueries.class); + + final Class clz; + + Expected(Class clz) { + this.clz = clz; + } + + public Class getClz() { + return clz; + } + } + + private static ImmutableList> cassandraVersions = + ImmutableList.>builder() + .add(ImmutableList.of("2.1.0", Optional.empty(), Expected.CASS_21)) + .add(ImmutableList.of("2.2.0", Optional.empty(), Expected.CASS_22)) + .add(ImmutableList.of("2.2.1", Optional.empty(), Expected.CASS_22)) + // Not a real version, just documenting behaviour of existing impl + .add(ImmutableList.of("2.3.0", Optional.empty(), Expected.CASS_22)) + // We now return you to real versions + .add(ImmutableList.of("3.0.0", Optional.empty(), Expected.CASS_3)) + .add(ImmutableList.of("3.0.1", Optional.empty(), Expected.CASS_3)) + .add(ImmutableList.of("3.1.0", Optional.empty(), Expected.CASS_3)) + .add(ImmutableList.of("4.0.0", Optional.empty(), Expected.CASS_4)) + .add(ImmutableList.of("4.0.1", Optional.empty(), Expected.CASS_4)) + .add(ImmutableList.of("4.1.0", Optional.empty(), Expected.CASS_4)) + .build(); + + private static ImmutableList> dseVersions = + ImmutableList.>builder() + // DSE 6.0.0 + .add(ImmutableList.of("4.0.0.2284", Optional.of("6.0.0"), Expected.CASS_3)) + // DSE 6.0.1 + .add(ImmutableList.of("4.0.0.2349", Optional.of("6.0.1"), Expected.CASS_3)) + // DSE 6.0.2 moved to DSE version (minus dots) in an extra element + .add(ImmutableList.of("4.0.0.602", Optional.of("6.0.2"), Expected.CASS_3)) + // DSE 6.7.0 continued with the same idea + .add(ImmutableList.of("4.0.0.670", Optional.of("6.7.0"), Expected.CASS_4)) + // DSE 6.8.0 does the same + .add(ImmutableList.of("4.0.0.680", Optional.of("6.8.0"), Expected.CASS_4)) + .build(); + + private static ImmutableList> allVersions = + ImmutableList.>builder() + .addAll(cassandraVersions) + .addAll(dseVersions) + .build(); + + @DataProvider(format = "%m %p[1] => %p[0]") + public static Iterable expected() { + + return allVersions; + } + + @Test + @UseDataProvider("expected") + public void should_return_correct_schema_queries_impl( + String cassandraVersion, Optional dseVersion, Expected expected) { + + final Node mockNode = mock(Node.class); + when(mockNode.getCassandraVersion()).thenReturn(Version.parse(cassandraVersion)); + dseVersion.ifPresent( + versionStr -> { + when(mockNode.getExtras()) + .thenReturn( + ImmutableMap.of( + NodeProperties.DSE_VERSION, Version.parse(versionStr))); + }); + + DefaultSchemaQueriesFactory factory = buildFactory(); + + @SuppressWarnings("unchecked") + SchemaQueries queries = + factory.newInstance(mockNode, mock(DriverChannel.class), mock(CompletableFuture.class)); + + assertThat(queries.getClass()).isEqualTo(expected.getClz()); + } + + private DefaultSchemaQueriesFactory buildFactory() { + + final DriverExecutionProfile mockProfile = mock(DriverExecutionProfile.class); + final DriverConfig mockConfig = mock(DriverConfig.class); + when(mockConfig.getDefaultProfile()).thenReturn(mockProfile); + final InternalDriverContext mockInternalCtx = mock(InternalDriverContext.class); + when(mockInternalCtx.getConfig()).thenReturn(mockConfig); + + return new DefaultSchemaQueriesFactory(mockInternalCtx); + } +} From 8527344d01e0167015caa78a61d150e350dd9be3 Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Wed, 9 Oct 2019 16:29:22 -0500 Subject: [PATCH 121/979] JAVA-1708: Support DSE everywhere replication strategy --- changelog/README.md | 1 + .../DefaultReplicationStrategyFactory.java | 2 + .../token/EverywhereReplicationStrategy.java | 40 +++++++++++++++++++ 3 files changed, 43 insertions(+) create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/EverywhereReplicationStrategy.java diff --git a/changelog/README.md b/changelog/README.md index 2feadd62d82..9e29df1be65 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.3.0 (in progress) +- [improvement] JAVA-1708: Support DSE "everywhere" replication strategy - [improvement] JAVA-2471: Consider DSE version when parsing the schema - [improvement] JAVA-2444: Add method setRoutingKey(ByteBuffer...) to StatementBuilder - [improvement] JAVA-2398: Improve support for optional dependencies in OSGi diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/DefaultReplicationStrategyFactory.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/DefaultReplicationStrategyFactory.java index 603d1af07bf..014dde6c9c4 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/DefaultReplicationStrategyFactory.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/DefaultReplicationStrategyFactory.java @@ -41,6 +41,8 @@ public ReplicationStrategy newInstance(Map replicationConfig) { return new SimpleReplicationStrategy(replicationConfig); case "org.apache.cassandra.locator.NetworkTopologyStrategy": return new NetworkTopologyReplicationStrategy(replicationConfig, logPrefix); + case "org.apache.cassandra.locator.EverywhereStrategy": + return new EverywhereReplicationStrategy(); default: throw new IllegalArgumentException("Unsupported replication strategy: " + strategyClass); } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/EverywhereReplicationStrategy.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/EverywhereReplicationStrategy.java new file mode 100644 index 00000000000..5cce9dacc70 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/EverywhereReplicationStrategy.java @@ -0,0 +1,40 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.metadata.token; + +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.metadata.token.Token; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSetMultimap; +import com.datastax.oss.driver.shaded.guava.common.collect.SetMultimap; +import java.util.Collection; +import java.util.List; +import java.util.Map; +import net.jcip.annotations.ThreadSafe; + +@ThreadSafe +public class EverywhereReplicationStrategy implements ReplicationStrategy { + + @Override + public SetMultimap computeReplicasByToken( + Map tokenToPrimary, List ring) { + ImmutableSetMultimap.Builder result = ImmutableSetMultimap.builder(); + Collection nodes = tokenToPrimary.values(); + for (Token token : tokenToPrimary.keySet()) { + result = result.putAll(token, nodes); + } + return result.build(); + } +} From 7b00931257f7af43cb5459ebfc605a607f9e7a10 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 15 Oct 2019 09:44:25 +0300 Subject: [PATCH 122/979] Fix failing tests in PreparedStatementIT under C* 2.1 (JAVA-2443 follow-up) --- .../driver/core/cql/PreparedStatementIT.java | 17 ++--------------- 1 file changed, 2 insertions(+), 15 deletions(-) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PreparedStatementIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PreparedStatementIT.java index 298987becab..2e25f600f03 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PreparedStatementIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PreparedStatementIT.java @@ -20,7 +20,6 @@ import com.codahale.metrics.Gauge; import com.datastax.oss.driver.api.core.CqlSession; -import com.datastax.oss.driver.api.core.DefaultProtocolVersion; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; import com.datastax.oss.driver.api.core.cql.AsyncResultSet; @@ -120,13 +119,7 @@ public void should_have_non_empty_result_definitions_for_insert_query_with_bound PreparedStatement prepared = session.prepare("INSERT INTO prepared_statement_test (a, b, c) VALUES (?, ?, ?)"); assertThat(prepared.getVariableDefinitions()).hasSize(3); - if (sessionRule.session().getContext().getProtocolVersion().getCode() - >= DefaultProtocolVersion.V4.getCode()) { - // partition key indices were introduced in V4 - assertThat(prepared.getPartitionKeyIndices()).hasSize(1); - } else { - assertThat(prepared.getPartitionKeyIndices()).isEmpty(); - } + assertThat(prepared.getPartitionKeyIndices()).hasSize(1); assertThat(prepared.getResultSetDefinitions()).isEmpty(); } } @@ -148,13 +141,7 @@ public void should_have_non_empty_variable_definitions_for_select_query_with_bou PreparedStatement prepared = session.prepare("SELECT a,b,c FROM prepared_statement_test WHERE a = ?"); assertThat(prepared.getVariableDefinitions()).hasSize(1); - if (sessionRule.session().getContext().getProtocolVersion().getCode() - >= DefaultProtocolVersion.V4.getCode()) { - // partition key indices were introduced in V4 - assertThat(prepared.getPartitionKeyIndices()).hasSize(1); - } else { - assertThat(prepared.getPartitionKeyIndices()).isEmpty(); - } + assertThat(prepared.getPartitionKeyIndices()).hasSize(1); assertThat(prepared.getResultSetDefinitions()).hasSize(3); } } From 37545e6ef5863947b63333f9fe95f405802731cd Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 14 Oct 2019 11:24:34 -0700 Subject: [PATCH 123/979] Fix error handling in ConnectIT Also increase the timeout for CI. --- .../src/test/java/com/datastax/oss/driver/core/ConnectIT.java | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/ConnectIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/ConnectIT.java index 6475fab9f84..2a639ac70e5 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/ConnectIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/ConnectIT.java @@ -107,9 +107,7 @@ public void should_wait_for_contact_points_if_reconnection_enabled() throws Exce SIMULACRON_RULE.cluster().acceptConnections(); // Then this doesn't throw - Session session = sessionFuture.get(2, TimeUnit.SECONDS); - - session.close(); + try (Session session = sessionFuture.get(30, TimeUnit.SECONDS)) {} } /** From beaad5f4bc669b6099c7e6c21f73be4a542023fe Mon Sep 17 00:00:00 2001 From: Olivier Michallat Date: Mon, 21 Oct 2019 00:57:12 -0700 Subject: [PATCH 124/979] JAVA-2505: Annotate Node.getHostId() as nullable (#1350) --- changelog/README.md | 1 + core/revapi.json | 14 +++++++++++++ .../oss/driver/api/core/metadata/Node.java | 21 +++++++++++++++++-- .../internal/core/metadata/DefaultNode.java | 2 +- 4 files changed, 35 insertions(+), 3 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 9e29df1be65..3aae6c57055 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.3.0 (in progress) +- [documentation] JAVA-2505: Annotate Node.getHostId() as nullable - [improvement] JAVA-1708: Support DSE "everywhere" replication strategy - [improvement] JAVA-2471: Consider DSE version when parsing the schema - [improvement] JAVA-2444: Add method setRoutingKey(ByteBuffer...) to StatementBuilder diff --git a/core/revapi.json b/core/revapi.json index 82b812baa93..a2e32e304e5 100644 --- a/core/revapi.json +++ b/core/revapi.json @@ -4816,6 +4816,20 @@ "old": "field com\\.datastax\\.oss\\.driver\\.api\\.core\\.Version.V.*", "annotation": "@edu.umd.cs.findbugs.annotations.NonNull", "justification": "Marking constants as non-null doesn't break existing code" + }, + { + "code": "java.annotation.removed", + "old": "method java.util.UUID com.datastax.oss.driver.api.core.metadata.Node::getHostId()", + "new": "method java.util.UUID com.datastax.oss.driver.api.core.metadata.Node::getHostId()", + "annotation": "@edu.umd.cs.findbugs.annotations.NonNull", + "justification": "JAVA-2505: Annotate Node.getHostId() as nullable" + }, + { + "code": "java.annotation.added", + "old": "method java.util.UUID com.datastax.oss.driver.api.core.metadata.Node::getHostId()", + "new": "method java.util.UUID com.datastax.oss.driver.api.core.metadata.Node::getHostId()", + "annotation": "@edu.umd.cs.findbugs.annotations.Nullable", + "justification": "JAVA-2505: Annotate Node.getHostId() as nullable" } ] } diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/metadata/Node.java b/core/src/main/java/com/datastax/oss/driver/api/core/metadata/Node.java index 69959473d83..9b67e616a5a 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/metadata/Node.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/metadata/Node.java @@ -184,9 +184,26 @@ public interface Node { * The host ID that is assigned to this node by Cassandra. This value can be used to uniquely * identify a node even when the underling IP address changes. * - *

      This information is always present. + *

      This information is always present once the session has initialized. However, there is a + * narrow corner case where a driver client can observe a null value: if a {@link + * NodeStateListener} is registered, the very first {@code onUp} call will reference a node + * that has a null id (that node is the initial contact point, and the driver hasn't read host ids + * from {@code system.local} and {@code system.peers} yet). Beyond that point — including + * any other {@code onUp} call — the host id will always be present. + * + *

      +   * CqlSession session = CqlSession.builder()
      +   *     .withNodeStateListener(
      +   *         new NodeStateListenerBase() {
      +   *           @Override
      +   *           public void onUp(@NonNull Node node) {
      +   *             // node.getHostId() == null for the first invocation only
      +   *           }
      +   *         })
      +   *     .build();
      +   * 
      */ - @NonNull + @Nullable UUID getHostId(); /** diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultNode.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultNode.java index cb6b4648868..dc7036a8777 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultNode.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultNode.java @@ -129,7 +129,7 @@ public Version getCassandraVersion() { return cassandraVersion; } - @NonNull + @Nullable @Override public UUID getHostId() { return hostId; From 54a67856444eed35b106fbb4a267d5a542a8cf39 Mon Sep 17 00:00:00 2001 From: Erik Merkle Date: Mon, 21 Oct 2019 03:35:18 -0500 Subject: [PATCH 125/979] JAVA-2480: Upgrade Jackson to 2.10.0 (#1349) --- changelog/README.md | 1 + .../json/codecs/JacksonJsonCodec.java | 22 +++++++++---------- .../driver/querybuilder/JacksonJsonCodec.java | 18 +++++++-------- pom.xml | 4 ++-- 4 files changed, 23 insertions(+), 22 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 3aae6c57055..a9079650d2c 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.3.0 (in progress) +- [improvement] JAVA-2480: Upgrade Jackson to 2.10.0 - [documentation] JAVA-2505: Annotate Node.getHostId() as nullable - [improvement] JAVA-1708: Support DSE "everywhere" replication strategy - [improvement] JAVA-2471: Consider DSE version when parsing the schema diff --git a/examples/src/main/java/com/datastax/oss/driver/examples/json/codecs/JacksonJsonCodec.java b/examples/src/main/java/com/datastax/oss/driver/examples/json/codecs/JacksonJsonCodec.java index 763d325f7c9..a9e4d2c8078 100644 --- a/examples/src/main/java/com/datastax/oss/driver/examples/json/codecs/JacksonJsonCodec.java +++ b/examples/src/main/java/com/datastax/oss/driver/examples/json/codecs/JacksonJsonCodec.java @@ -24,7 +24,7 @@ import com.datastax.oss.protocol.internal.util.Bytes; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JavaType; -import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.json.JsonMapper; import com.fasterxml.jackson.databind.type.TypeFactory; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; @@ -52,28 +52,28 @@ */ public class JacksonJsonCodec implements TypeCodec { - private final ObjectMapper objectMapper; + private final JsonMapper jsonMapper; private final GenericType javaType; /** * Creates a new instance for the provided {@code javaClass}, using a default, newly-allocated - * {@link ObjectMapper}. + * {@link JsonMapper}. * * @param javaClass the Java class this codec maps to. */ public JacksonJsonCodec(Class javaClass) { - this(javaClass, new ObjectMapper()); + this(javaClass, JsonMapper.builder().build()); } /** * Creates a new instance for the provided {@code javaClass}, and using the provided {@link - * ObjectMapper}. + * JsonMapper}. * * @param javaClass the Java class this codec maps to. */ - public JacksonJsonCodec(Class javaClass, ObjectMapper objectMapper) { + public JacksonJsonCodec(Class javaClass, JsonMapper jsonMapper) { this.javaType = GenericType.of(javaClass); - this.objectMapper = objectMapper; + this.jsonMapper = jsonMapper; } @NonNull @@ -95,7 +95,7 @@ public ByteBuffer encode(@Nullable T value, @NonNull ProtocolVersion protocolVer return null; } try { - return ByteBuffer.wrap(objectMapper.writeValueAsBytes(value)); + return ByteBuffer.wrap(jsonMapper.writeValueAsBytes(value)); } catch (JsonProcessingException e) { throw new IllegalArgumentException(e.getMessage(), e); } @@ -108,7 +108,7 @@ public T decode(@Nullable ByteBuffer bytes, @NonNull ProtocolVersion protocolVer return null; } try { - return objectMapper.readValue(Bytes.getArray(bytes), toJacksonJavaType()); + return jsonMapper.readValue(Bytes.getArray(bytes), toJacksonJavaType()); } catch (IOException e) { throw new IllegalArgumentException(e.getMessage(), e); } @@ -122,7 +122,7 @@ public String format(T value) { } String json; try { - json = objectMapper.writeValueAsString(value); + json = jsonMapper.writeValueAsString(value); } catch (IOException e) { throw new IllegalArgumentException(e.getMessage(), e); } @@ -141,7 +141,7 @@ public T parse(String value) { } String json = Strings.unquote(value); try { - return (T) objectMapper.readValue(json, toJacksonJavaType()); + return (T) jsonMapper.readValue(json, toJacksonJavaType()); } catch (IOException e) { throw new IllegalArgumentException(e.getMessage(), e); } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/querybuilder/JacksonJsonCodec.java b/integration-tests/src/test/java/com/datastax/oss/driver/querybuilder/JacksonJsonCodec.java index f6ab774dfd3..64f8b6ddcb3 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/querybuilder/JacksonJsonCodec.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/querybuilder/JacksonJsonCodec.java @@ -24,7 +24,7 @@ import com.datastax.oss.protocol.internal.util.Bytes; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JavaType; -import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.json.JsonMapper; import com.fasterxml.jackson.databind.type.TypeFactory; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; @@ -33,16 +33,16 @@ public class JacksonJsonCodec implements TypeCodec { - private final ObjectMapper objectMapper; + private final JsonMapper jsonMapper; private final GenericType javaType; JacksonJsonCodec(Class javaClass) { - this(javaClass, new ObjectMapper()); + this(javaClass, JsonMapper.builder().build()); } - private JacksonJsonCodec(Class javaClass, ObjectMapper objectMapper) { + private JacksonJsonCodec(Class javaClass, JsonMapper objectMapper) { this.javaType = GenericType.of(javaClass); - this.objectMapper = objectMapper; + this.jsonMapper = objectMapper; } @NonNull @@ -64,7 +64,7 @@ public ByteBuffer encode(@Nullable T value, @NonNull ProtocolVersion protocolVer return null; } try { - return ByteBuffer.wrap(objectMapper.writeValueAsBytes(value)); + return ByteBuffer.wrap(jsonMapper.writeValueAsBytes(value)); } catch (JsonProcessingException e) { throw new IllegalArgumentException(e.getMessage(), e); } @@ -77,7 +77,7 @@ public T decode(@Nullable ByteBuffer bytes, @NonNull ProtocolVersion protocolVer return null; } try { - return objectMapper.readValue(Bytes.getArray(bytes), toJacksonJavaType()); + return jsonMapper.readValue(Bytes.getArray(bytes), toJacksonJavaType()); } catch (IOException e) { throw new IllegalArgumentException(e.getMessage(), e); } @@ -91,7 +91,7 @@ public String format(T value) { } String json; try { - json = objectMapper.writeValueAsString(value); + json = jsonMapper.writeValueAsString(value); } catch (IOException e) { throw new IllegalArgumentException(e.getMessage(), e); } @@ -110,7 +110,7 @@ public T parse(String value) { } String json = Strings.unquote(value); try { - return (T) objectMapper.readValue(json, toJacksonJavaType()); + return (T) jsonMapper.readValue(json, toJacksonJavaType()); } catch (IOException e) { throw new IllegalArgumentException(e.getMessage(), e); } diff --git a/pom.xml b/pom.xml index 44828c22df3..730d66a7fd5 100644 --- a/pom.xml +++ b/pom.xml @@ -69,8 +69,8 @@ 2.5.0 2.0.1 1.1.4 - 2.9.9 - 2.9.9.3 + 2.10.0 + 2.10.0 From cf82b59f9377349f504913724bdc14dc74b7fdd4 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 21 Oct 2019 12:57:16 +0300 Subject: [PATCH 126/979] JAVA-2434: Add support for custom cipher suites and host name validation to ProgrammaticSslEngineFactory (#1347) --- changelog/README.md | 1 + core/revapi.json | 7 +++ .../api/core/session/SessionBuilder.java | 11 ++-- .../ssl/ProgrammaticSslEngineFactory.java | 54 +++++++++++++++++-- 4 files changed, 66 insertions(+), 7 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index a9079650d2c..7e22ebcd652 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.3.0 (in progress) +- [improvement] JAVA-2434: Add support for custom cipher suites and host name validation to ProgrammaticSslEngineFactory - [improvement] JAVA-2480: Upgrade Jackson to 2.10.0 - [documentation] JAVA-2505: Annotate Node.getHostId() as nullable - [improvement] JAVA-1708: Support DSE "everywhere" replication strategy diff --git a/core/revapi.json b/core/revapi.json index a2e32e304e5..478a972595d 100644 --- a/core/revapi.json +++ b/core/revapi.json @@ -4830,6 +4830,13 @@ "new": "method java.util.UUID com.datastax.oss.driver.api.core.metadata.Node::getHostId()", "annotation": "@edu.umd.cs.findbugs.annotations.Nullable", "justification": "JAVA-2505: Annotate Node.getHostId() as nullable" + }, + { + "code": "java.annotation.added", + "old": "parameter void com.datastax.oss.driver.api.core.ssl.ProgrammaticSslEngineFactory::(===javax.net.ssl.SSLContext===)", + "new": "parameter void com.datastax.oss.driver.api.core.ssl.ProgrammaticSslEngineFactory::(===javax.net.ssl.SSLContext===)", + "annotation": "@edu.umd.cs.findbugs.annotations.NonNull", + "justification": "JAVA-2434: added @NonNull to ProgrammaticSslEngineFactory(SSLContext) constructor" } ] } diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java b/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java index 40f0e95fcab..e6b16f2716d 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java @@ -261,6 +261,8 @@ public SelfT withAuthCredentials(@NonNull String username, @NonNull String passw * *

      If the factory is provided programmatically with this method, it overrides the configuration * (that is, the {@code advanced.ssl-engine-factory} option will be ignored). + * + * @see ProgrammaticSslEngineFactory */ @NonNull public SelfT withSslEngineFactory(@Nullable SslEngineFactory sslEngineFactory) { @@ -276,9 +278,12 @@ public SelfT withSslEngineFactory(@Nullable SslEngineFactory sslEngineFactory) { * #withSslEngineFactory(SslEngineFactory)}. * *

      If you use this method, there is no way to customize cipher suites, or turn on host name - * validation. Also, note that SSL engines will be created with advisory peer information ({@link - * SSLContext#createSSLEngine(String, int)}) whenever possible. If you need finer control, write - * your own factory. + * validation. If you need finer control, use {@link #withSslEngineFactory(SslEngineFactory)} + * directly and pass either your own implementation of {@link SslEngineFactory}, or a {@link + * ProgrammaticSslEngineFactory} created with custom cipher suites and/or host name validation. + * + *

      Also, note that SSL engines will be created with advisory peer information ({@link + * SSLContext#createSSLEngine(String, int)}) whenever possible. */ @NonNull public SelfT withSslContext(@Nullable SSLContext sslContext) { diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/ssl/ProgrammaticSslEngineFactory.java b/core/src/main/java/com/datastax/oss/driver/api/core/ssl/ProgrammaticSslEngineFactory.java index 01afac32c7e..ba99f3bf7fa 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/ssl/ProgrammaticSslEngineFactory.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/ssl/ProgrammaticSslEngineFactory.java @@ -18,18 +18,19 @@ import com.datastax.oss.driver.api.core.metadata.EndPoint; import com.datastax.oss.driver.api.core.session.SessionBuilder; import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; import java.net.InetSocketAddress; import java.net.SocketAddress; import javax.net.ssl.SSLContext; import javax.net.ssl.SSLEngine; +import javax.net.ssl.SSLParameters; /** * An SSL engine factory that allows you to configure the driver programmatically, by passing your * own {@link SSLContext}. * - *

      Unlike the configuration-based approach, this class does not allow you to customize cipher - * suites, or turn on host name validation. Also, note that it will create SSL engines with advisory - * peer information ({@link SSLContext#createSSLEngine(String, int)}) whenever possible. + *

      Note that this class will create SSL engines with advisory peer information ({@link + * SSLContext#createSSLEngine(String, int)}) whenever possible. * *

      If those defaults do not work for you, it should be pretty straightforward to write your own * implementation by extending or duplicating this class. @@ -40,9 +41,46 @@ public class ProgrammaticSslEngineFactory implements SslEngineFactory { protected final SSLContext sslContext; + protected final String[] cipherSuites; + protected final boolean requireHostnameValidation; - public ProgrammaticSslEngineFactory(SSLContext sslContext) { + /** + * Creates an instance with the given {@link SSLContext}, default cipher suites and no host name + * validation. + * + * @param sslContext the {@link SSLContext} to use. + */ + public ProgrammaticSslEngineFactory(@NonNull SSLContext sslContext) { + this(sslContext, null); + } + + /** + * Creates an instance with the given {@link SSLContext} and cipher suites, and no host name + * validation. + * + * @param sslContext the {@link SSLContext} to use. + * @param cipherSuites the cipher suites to use, or null to use the default ones. + */ + public ProgrammaticSslEngineFactory( + @NonNull SSLContext sslContext, @Nullable String[] cipherSuites) { + this(sslContext, cipherSuites, false); + } + + /** + * Creates an instance with the given {@link SSLContext}, cipher suites and host name validation. + * + * @param sslContext the {@link SSLContext} to use. + * @param cipherSuites the cipher suites to use, or null to use the default ones. + * @param requireHostnameValidation whether to enable host name validation. If enabled, host name + * validation will be done using HTTPS algorithm. + */ + public ProgrammaticSslEngineFactory( + @NonNull SSLContext sslContext, + @Nullable String[] cipherSuites, + boolean requireHostnameValidation) { this.sslContext = sslContext; + this.cipherSuites = cipherSuites; + this.requireHostnameValidation = requireHostnameValidation; } @NonNull @@ -57,6 +95,14 @@ public SSLEngine newSslEngine(@NonNull EndPoint remoteEndpoint) { engine = sslContext.createSSLEngine(); } engine.setUseClientMode(true); + if (cipherSuites != null) { + engine.setEnabledCipherSuites(cipherSuites); + } + if (requireHostnameValidation) { + SSLParameters parameters = engine.getSSLParameters(); + parameters.setEndpointIdentificationAlgorithm("HTTPS"); + engine.setSSLParameters(parameters); + } return engine; } From 56c16384c318ea9459656d9e9024e52cce73fbda Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 21 Oct 2019 13:39:25 +0300 Subject: [PATCH 127/979] JAVA-2407: Improve handling of logback configuration files in IDEs (#1319) --- changelog/README.md | 1 + mapper-processor/pom.xml | 5 ++++ .../src/test/resources/logback-test.xml | 28 +++++++++++++++++++ pom.xml | 11 ++++++++ 4 files changed, 45 insertions(+) create mode 100644 mapper-processor/src/test/resources/logback-test.xml diff --git a/changelog/README.md b/changelog/README.md index 7e22ebcd652..68a74566269 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.3.0 (in progress) +- [improvement] JAVA-2407: Improve handling of logback configuration files in IDEs - [improvement] JAVA-2434: Add support for custom cipher suites and host name validation to ProgrammaticSslEngineFactory - [improvement] JAVA-2480: Upgrade Jackson to 2.10.0 - [documentation] JAVA-2505: Annotate Node.getHostId() as nullable diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 35487a7a773..eede75e8eee 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -75,6 +75,11 @@ mockito-core test + + ch.qos.logback + logback-classic + test + diff --git a/mapper-processor/src/test/resources/logback-test.xml b/mapper-processor/src/test/resources/logback-test.xml new file mode 100644 index 00000000000..e1309c63da5 --- /dev/null +++ b/mapper-processor/src/test/resources/logback-test.xml @@ -0,0 +1,28 @@ + + + + + %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n + + + + + + + \ No newline at end of file diff --git a/pom.xml b/pom.xml index 730d66a7fd5..4927ff085d6 100644 --- a/pom.xml +++ b/pom.xml @@ -494,6 +494,9 @@ limitations under the License.]]> maven-surefire-plugin + + ${project.basedir}/src/test/resources/logback-test.xml + usedefaultlisteners @@ -502,6 +505,14 @@ limitations under the License.]]> + + maven-failsafe-plugin + + + ${project.basedir}/src/test/resources/logback-test.xml + + + org.sonatype.plugins nexus-staging-maven-plugin From e4da25d0f3103a2d6b16f83ed23d9b82cf73b8cd Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 21 Oct 2019 13:47:45 +0300 Subject: [PATCH 128/979] Replace deprecated method --- .../oss/driver/internal/core/channel/ConnectInitHandler.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ConnectInitHandler.java b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ConnectInitHandler.java index 4c7b7f642fe..3194a95557c 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ConnectInitHandler.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ConnectInitHandler.java @@ -19,6 +19,7 @@ import io.netty.channel.ChannelHandlerContext; import io.netty.channel.ChannelPromise; import io.netty.util.concurrent.Future; +import io.netty.util.concurrent.ImmediateEventExecutor; import io.netty.util.concurrent.PromiseCombiner; import java.net.SocketAddress; import net.jcip.annotations.NotThreadSafe; @@ -58,7 +59,7 @@ public void connect( realConnectPromise.addListener(future -> onRealConnect(ctx)); // Make the caller's promise wait on the other two: - PromiseCombiner combiner = new PromiseCombiner(); + PromiseCombiner combiner = new PromiseCombiner(ImmediateEventExecutor.INSTANCE); combiner.addAll(new Future[] {realConnectPromise, initPromise}); combiner.finish(callerPromise); } From fcbfef812bd226657b72f4bdfb65663ef77af192 Mon Sep 17 00:00:00 2001 From: Greg Bestland Date: Thu, 1 Aug 2019 16:13:11 -0500 Subject: [PATCH 129/979] JAVA-2356: Migrate Cloud API to OSS driver --- build.yaml | 1 + core-shaded/pom.xml | 15 ++ core/pom.xml | 4 + .../api/core/config/DefaultDriverOption.java | 2 + .../core/session/ProgrammaticArguments.java | 21 ++- .../api/core/session/SessionBuilder.java | 42 ++++- .../core/config/cloud/DbaasConfig.java | 129 +++++++++++++++ .../core/config/cloud/DbaasConfigUtil.java | 154 ++++++++++++++++++ .../core/context/DefaultDriverContext.java | 9 +- .../core/metadata/DbaasTopologyMonitor.java | 73 +++++++++ .../internal/core/metadata/SniEndPoint.java | 110 +++++++++++++ .../core/ssl/SniSslEngineFactory.java | 62 +++++++ core/src/main/resources/reference.conf | 5 + .../config/cloud/DbaasConfigUtilTest.java | 57 +++++++ .../src/test/resources/config/cloud/creds.zip | Bin 0 -> 389 bytes .../test/resources/config/cloud/metadata.json | 1 + .../oss/driver/api/core/cloud/DbaasIT.java | 54 ++++++ .../driver/api/core/cloud/SniProxyRule.java | 40 +++++ .../driver/api/core/cloud/SniProxyServer.java | 104 ++++++++++++ manual/core/integration/README.md | 23 +++ 20 files changed, 902 insertions(+), 4 deletions(-) create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/config/cloud/DbaasConfig.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/config/cloud/DbaasConfigUtil.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DbaasTopologyMonitor.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/metadata/SniEndPoint.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/ssl/SniSslEngineFactory.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/config/cloud/DbaasConfigUtilTest.java create mode 100644 core/src/test/resources/config/cloud/creds.zip create mode 100644 core/src/test/resources/config/cloud/metadata.json create mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/api/core/cloud/DbaasIT.java create mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/api/core/cloud/SniProxyRule.java create mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/api/core/cloud/SniProxyServer.java diff --git a/build.yaml b/build.yaml index 79dd3b2c84e..d9608044a0a 100644 --- a/build.yaml +++ b/build.yaml @@ -13,6 +13,7 @@ build: goals: verify --batch-mode properties: | ccm.version=$CCM_CASSANDRA_VERSION + proxy.path=$HOME/proxy - xunit: - "**/target/surefire-reports/TEST-*.xml" - "**/target/failsafe-reports/TEST-*.xml" diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 8255077794f..fd7291d4be2 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -128,6 +128,7 @@ --> com.datastax.oss:java-driver-core io.netty:* + com.fasterxml.jackson.core:* @@ -135,7 +136,21 @@ io.netty com.datastax.oss.driver.shaded.netty + + com.fasterxml.jackson + com.datastax.oss.driver.shaded.fasterxml.jackson + + + + + com.fasterxml.jackson.core:* + + META-INF/** + + + diff --git a/core/pom.xml b/core/pom.xml index d63e89028b9..2620b75bbc1 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -91,6 +91,10 @@ com.github.spotbugs spotbugs-annotations + + com.fasterxml.jackson.core + jackson-databind + ch.qos.logback logback-classic diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java index 2ad96a8aea1..18d708fce55 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java @@ -782,6 +782,8 @@ public enum DefaultDriverOption implements DriverOption { *

      Value-type: boolean */ NETTY_DAEMON("advanced.netty.daemon"), + + CLOUD_SECURE_CONNECT_BUNDLE("basic.cloud.secure-connect-bundle"), ; private final String path; diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/session/ProgrammaticArguments.java b/core/src/main/java/com/datastax/oss/driver/api/core/session/ProgrammaticArguments.java index 34f002f04e9..75d0c3be1b3 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/session/ProgrammaticArguments.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/session/ProgrammaticArguments.java @@ -26,6 +26,7 @@ import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; +import java.net.InetSocketAddress; import java.util.List; import java.util.Map; import java.util.function.Predicate; @@ -52,6 +53,7 @@ public static Builder builder() { private final ClassLoader classLoader; private final AuthProvider authProvider; private final SslEngineFactory sslEngineFactory; + private final InetSocketAddress cloudAddress; private ProgrammaticArguments( @NonNull List> typeCodecs, @@ -62,7 +64,8 @@ private ProgrammaticArguments( @NonNull Map> nodeFilters, @Nullable ClassLoader classLoader, @Nullable AuthProvider authProvider, - @Nullable SslEngineFactory sslEngineFactory) { + @Nullable SslEngineFactory sslEngineFactory, + @Nullable InetSocketAddress cloudAddress) { this.typeCodecs = typeCodecs; this.nodeStateListener = nodeStateListener; this.schemaChangeListener = schemaChangeListener; @@ -72,6 +75,7 @@ private ProgrammaticArguments( this.classLoader = classLoader; this.authProvider = authProvider; this.sslEngineFactory = sslEngineFactory; + this.cloudAddress = cloudAddress; } @NonNull @@ -119,6 +123,11 @@ public SslEngineFactory getSslEngineFactory() { return sslEngineFactory; } + @Nullable + public InetSocketAddress getCloudAddress() { + return cloudAddress; + } + public static class Builder { private ImmutableList.Builder> typeCodecsBuilder = ImmutableList.builder(); @@ -131,6 +140,7 @@ public static class Builder { private ClassLoader classLoader; private AuthProvider authProvider; private SslEngineFactory sslEngineFactory; + private InetSocketAddress cloudAddress; @NonNull public Builder addTypeCodecs(@NonNull TypeCodec... typeCodecs) { @@ -192,6 +202,12 @@ public Builder withClassLoader(@Nullable ClassLoader classLoader) { return this; } + @NonNull + public Builder withCloudAddress(@Nullable InetSocketAddress cloudAddress) { + this.cloudAddress = cloudAddress; + return this; + } + @NonNull public Builder withAuthProvider(@Nullable AuthProvider authProvider) { this.authProvider = authProvider; @@ -215,7 +231,8 @@ public ProgrammaticArguments build() { nodeFiltersBuilder.build(), classLoader, authProvider, - sslEngineFactory); + sslEngineFactory, + cloudAddress); } } } diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java b/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java index e6b16f2716d..8f01aabdfaf 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java @@ -34,11 +34,15 @@ import com.datastax.oss.driver.api.core.type.codec.TypeCodec; import com.datastax.oss.driver.internal.core.ContactPoints; import com.datastax.oss.driver.internal.core.auth.ProgrammaticPlainTextAuthProvider; +import com.datastax.oss.driver.internal.core.config.cloud.DbaasConfig; +import com.datastax.oss.driver.internal.core.config.cloud.DbaasConfigUtil; import com.datastax.oss.driver.internal.core.config.typesafe.DefaultDriverConfigLoader; import com.datastax.oss.driver.internal.core.context.DefaultDriverContext; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.internal.core.metadata.DefaultEndPoint; +import com.datastax.oss.driver.internal.core.metadata.SniEndPoint; import com.datastax.oss.driver.internal.core.session.DefaultSession; +import com.datastax.oss.driver.internal.core.ssl.SniSslEngineFactory; import com.datastax.oss.driver.internal.core.util.concurrent.BlockingOperation; import com.datastax.oss.driver.internal.core.util.concurrent.CompletableFutures; import edu.umd.cs.findbugs.annotations.NonNull; @@ -73,6 +77,7 @@ public abstract class SessionBuilder { protected DriverConfigLoader configLoader; protected Set programmaticContactPoints = new HashSet<>(); protected CqlIdentifier keyspace; + protected String cloudConfigPath; protected ProgrammaticArguments.Builder programmaticArgumentsBuilder = ProgrammaticArguments.builder(); @@ -377,6 +382,20 @@ public SelfT withClassLoader(@Nullable ClassLoader classLoader) { return self; } + /** + * Creates a SessionBuilder pre-configured for a specific Cloud endpoint or configuration file. + * Currently this supports only a path on the local filesystem pointing to the secure connect + * bundle zip file. In the future this will be extended to work with CaaS service provider + * endpoints. + * + * @param cloudConfigPath Absolute path to the secure connect bundle zip file. + */ + @NonNull + public SelfT withCloudSecureConnectBundle(@NonNull String cloudConfigPath) { + this.cloudConfigPath = cloudConfigPath; + return self; + } + /** * Creates the session with the options set by this builder. * @@ -390,7 +409,6 @@ public CompletionStage buildAsync() { CompletableFutures.propagateCancellation(wrapStage, buildStage); return wrapStage; } - /** * Convenience method to call {@link #buildAsync()} and block on the result. * @@ -410,6 +428,28 @@ protected final CompletionStage buildDefaultSessionAsync() { DriverConfigLoader configLoader = buildIfNull(this.configLoader, this::defaultConfigLoader); DriverExecutionProfile defaultConfig = configLoader.getInitialConfig().getDefaultProfile(); + if (cloudConfigPath == null) { + cloudConfigPath = + defaultConfig.getString(DefaultDriverOption.CLOUD_SECURE_CONNECT_BUNDLE, null); + } + if (cloudConfigPath != null) { + DbaasConfig dbaasConfig = DbaasConfigUtil.getConfig(cloudConfigPath); + for (String hostID : dbaasConfig.getHostIds()) { + programmaticContactPoints.add( + new SniEndPoint( + new InetSocketAddress(dbaasConfig.getSniHost(), dbaasConfig.getSniPort()), + hostID)); + } + withLocalDatacenter(dbaasConfig.getLocalDataCenter()); + if (dbaasConfig.getUsername() != null && dbaasConfig.getPassword() != null) { + withAuthCredentials(dbaasConfig.getUsername(), dbaasConfig.getPassword()); + } + SSLContext sslContext = DbaasConfigUtil.getSSLContext(dbaasConfig); + withSslEngineFactory(new SniSslEngineFactory(sslContext)); + programmaticArgumentsBuilder.withCloudAddress( + new InetSocketAddress(dbaasConfig.getSniHost(), dbaasConfig.getSniPort())); + } + List configContactPoints = defaultConfig.getStringList(DefaultDriverOption.CONTACT_POINTS, Collections.emptyList()); boolean resolveAddresses = diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/config/cloud/DbaasConfig.java b/core/src/main/java/com/datastax/oss/driver/internal/core/config/cloud/DbaasConfig.java new file mode 100644 index 00000000000..b9da82a0033 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/config/cloud/DbaasConfig.java @@ -0,0 +1,129 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.config.cloud; + +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import java.nio.file.Path; +import java.util.List; + +/** + * The POJO representation of the config.json that is distributed as part of the creds.zip. It is + * populated mostly by the config.json. With the hostIds, and localDc being filled in by the + * metadata service. + */ +@JsonIgnoreProperties(ignoreUnknown = true) +public class DbaasConfig { + + private String username; + private String password; + private String host; + private int port; + private String sniHost; + private int sniPort; + private List hostIds; + private String localDC; + private String keyStorePassword; + private String trustStorePassword; + private Path secureConnectBundlePath; + + public void setUsername(String username) { + this.username = username; + } + + public void setPassword(String password) { + this.password = password; + } + + public void setHost(String host) { + this.host = host; + } + + public void setPort(int port) { + this.port = port; + } + + public void setLocalDC(String localDC) { + this.localDC = localDC; + } + + public void setKeyStorePassword(String keyStorePassword) { + this.keyStorePassword = keyStorePassword; + } + + public void setTrustStorePassword(String trustStorePassword) { + this.trustStorePassword = trustStorePassword; + } + + public String getUsername() { + return username; + } + + public String getPassword() { + return password; + } + + public String getHost() { + return host; + } + + public int getPort() { + return port; + } + + public String getLocalDataCenter() { + return localDC; + } + + public String getKeyStorePassword() { + return keyStorePassword; + } + + public String getTrustStorePassword() { + return trustStorePassword; + } + + public String getSniHost() { + return sniHost; + } + + public void setSniHost(String sniHost) { + this.sniHost = sniHost; + } + + public int getSniPort() { + return sniPort; + } + + public void setSniPort(int sniPort) { + this.sniPort = sniPort; + } + + public List getHostIds() { + return hostIds; + } + + public void setHostIds(List hostIds) { + this.hostIds = hostIds; + } + + public Path getSecureConnectBundlePath() { + return secureConnectBundlePath; + } + + public void setSecureConnectBundlePath(Path secureConnectBundlePath) { + this.secureConnectBundlePath = secureConnectBundlePath; + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/config/cloud/DbaasConfigUtil.java b/core/src/main/java/com/datastax/oss/driver/internal/core/config/cloud/DbaasConfigUtil.java new file mode 100644 index 00000000000..8fbe7b86edd --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/config/cloud/DbaasConfigUtil.java @@ -0,0 +1,154 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.config.cloud; + +import com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting; +import com.datastax.oss.driver.shaded.guava.common.base.Splitter; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.net.URL; +import java.nio.charset.StandardCharsets; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.security.KeyStore; +import java.security.SecureRandom; +import java.util.ArrayList; +import java.util.List; +import java.util.zip.ZipEntry; +import java.util.zip.ZipFile; +import javax.net.ssl.HttpsURLConnection; +import javax.net.ssl.KeyManagerFactory; +import javax.net.ssl.SSLContext; +import javax.net.ssl.TrustManagerFactory; + +public class DbaasConfigUtil { + + private static final String CONFIG_FILE = "config.json"; + public static final String CONFIG_TRUSTSTORE_FILE = "trustStore.jks"; + public static final String CONFIG_KEYSTORE_FILE = "identity.jks"; + + private static final String METADATA_CONTACT_INFO = "contact_info"; + private static final String METADATA_CONTACT_POINTS = "contact_points"; + private static final String METADATA_LOCAL_DC = "local_dc"; + private static final String METADATA_PROXY_ADDRESS = "sni_proxy_address"; + + private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + + @NonNull + public static DbaasConfig getConfig(@NonNull String secureConnectBundlePath) { + try { + DbaasConfig config = getBaseConfig(Paths.get(secureConnectBundlePath)); + return getProxyMetadata(config); + } catch (Exception exception) { + throw new IllegalStateException( + "Unable to construct cloud configuration from path " + secureConnectBundlePath, + exception); + } + } + + @NonNull + public static SSLContext getSSLContext(@NonNull DbaasConfig config) throws Exception { + SSLContext context = SSLContext.getInstance("SSL"); + TrustManagerFactory tmf; + try (InputStream trustStoreStream = + openZippedFileInputStream(config.getSecureConnectBundlePath(), CONFIG_TRUSTSTORE_FILE)) { + KeyStore ts = KeyStore.getInstance("JKS"); + char[] trustPassword = config.getTrustStorePassword().toCharArray(); + ts.load(trustStoreStream, trustPassword); + tmf = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); + tmf.init(ts); + } + // initialize keystore. + KeyManagerFactory kmf; + try (InputStream keyStoreStream = + openZippedFileInputStream(config.getSecureConnectBundlePath(), CONFIG_KEYSTORE_FILE)) { + KeyStore ks = KeyStore.getInstance("JKS"); + char[] keyStorePassword = config.getKeyStorePassword().toCharArray(); + ks.load(keyStoreStream, keyStorePassword); + kmf = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm()); + kmf.init(ks, keyStorePassword); + } + context.init(kmf.getKeyManagers(), tmf.getTrustManagers(), new SecureRandom()); + return context; + } + + @VisibleForTesting + @NonNull + static DbaasConfig getBaseConfig(@NonNull Path secureConnectBundlePath) throws Exception { + try (InputStream jsonConfigInputStream = + openZippedFileInputStream(secureConnectBundlePath, CONFIG_FILE)) { + ObjectMapper mapper = new ObjectMapper(); + DbaasConfig config = mapper.readValue(jsonConfigInputStream, DbaasConfig.class); + config.setSecureConnectBundlePath(secureConnectBundlePath); + return config; + } + } + + @NonNull + private static InputStream openZippedFileInputStream( + @NonNull Path zipFile, @NonNull String innerFileName) throws IOException { + ZipFile zip = new ZipFile(zipFile.toFile()); + ZipEntry configEntry = zip.getEntry(innerFileName); + return zip.getInputStream(configEntry); + } + + @NonNull + private static DbaasConfig getProxyMetadata(@NonNull DbaasConfig dbaasConfig) throws Exception { + SSLContext sslContext = getSSLContext(dbaasConfig); + StringBuilder result = new StringBuilder(); + URL metaDataServiceUrl = + new URL("https://" + dbaasConfig.getHost() + ":" + dbaasConfig.getPort() + "/metadata"); + HttpsURLConnection connection = (HttpsURLConnection) metaDataServiceUrl.openConnection(); + connection.setSSLSocketFactory(sslContext.getSocketFactory()); + connection.setRequestMethod("GET"); + connection.setRequestProperty("host", "localhost"); + try (BufferedReader rd = + new BufferedReader( + new InputStreamReader(connection.getInputStream(), StandardCharsets.UTF_8))) { + + String line; + while ((line = rd.readLine()) != null) { + result.append(line); + } + } + return getConfigFromMetadataJson(dbaasConfig, result.toString()); + } + + @VisibleForTesting + @NonNull + static DbaasConfig getConfigFromMetadataJson( + @NonNull DbaasConfig dbaasConfig, @NonNull String jsonString) throws Exception { + JsonNode json = OBJECT_MAPPER.readTree(jsonString); + JsonNode contactInfo = json.get(METADATA_CONTACT_INFO); + dbaasConfig.setLocalDC(contactInfo.get(METADATA_LOCAL_DC).asText()); + List hostIds = new ArrayList<>(); + JsonNode hostIdsJSON = contactInfo.get(METADATA_CONTACT_POINTS); + for (int i = 0; i < hostIdsJSON.size(); i++) { + hostIds.add(hostIdsJSON.get(i).asText()); + } + dbaasConfig.setHostIds(hostIds); + List sniHostComplete = + Splitter.on(":").splitToList(contactInfo.get(METADATA_PROXY_ADDRESS).asText()); + dbaasConfig.setSniHost(sniHostComplete.get(0)); + dbaasConfig.setSniPort(Integer.parseInt(sniHostComplete.get(1))); + return dbaasConfig; + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java index 0d51c15e45f..3ad13f17f51 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java @@ -45,6 +45,7 @@ import com.datastax.oss.driver.internal.core.channel.DefaultWriteCoalescer; import com.datastax.oss.driver.internal.core.channel.WriteCoalescer; import com.datastax.oss.driver.internal.core.control.ControlConnection; +import com.datastax.oss.driver.internal.core.metadata.DbaasTopologyMonitor; import com.datastax.oss.driver.internal.core.metadata.DefaultTopologyMonitor; import com.datastax.oss.driver.internal.core.metadata.LoadBalancingPolicyWrapper; import com.datastax.oss.driver.internal.core.metadata.MetadataManager; @@ -79,6 +80,7 @@ import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import io.netty.buffer.ByteBuf; +import java.net.InetSocketAddress; import java.util.List; import java.util.Map; import java.util.Optional; @@ -196,6 +198,7 @@ public class DefaultDriverContext implements InternalDriverContext { private final Map localDatacentersFromBuilder; private final Map> nodeFiltersFromBuilder; private final ClassLoader classLoader; + private final InetSocketAddress cloudAddress; private final LazyReference requestLogFormatterRef = new LazyReference<>("requestLogFormatter", this::buildRequestLogFormatter, cycleDetector); @@ -241,6 +244,7 @@ public DefaultDriverContext( cycleDetector); this.nodeFiltersFromBuilder = programmaticArguments.getNodeFilters(); this.classLoader = programmaticArguments.getClassLoader(); + this.cloudAddress = programmaticArguments.getCloudAddress(); } /** @@ -416,7 +420,10 @@ protected ChannelFactory buildChannelFactory() { } protected TopologyMonitor buildTopologyMonitor() { - return new DefaultTopologyMonitor(this); + if (cloudAddress == null) { + return new DefaultTopologyMonitor(this); + } + return new DbaasTopologyMonitor(this, cloudAddress); } protected MetadataManager buildMetadataManager() { diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DbaasTopologyMonitor.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DbaasTopologyMonitor.java new file mode 100644 index 00000000000..486e8ed0e96 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DbaasTopologyMonitor.java @@ -0,0 +1,73 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.metadata; + +import com.datastax.oss.driver.api.core.metadata.EndPoint; +import com.datastax.oss.driver.internal.core.adminrequest.AdminRow; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.net.InetAddress; +import java.net.InetSocketAddress; +import java.util.UUID; + +public class DbaasTopologyMonitor extends DefaultTopologyMonitor { + private final InetSocketAddress proxyAddr; + + public DbaasTopologyMonitor(InternalDriverContext context, InetSocketAddress proxyAddr) { + super(context); + this.proxyAddr = proxyAddr; + } + + @NonNull + @Override + protected DefaultNodeInfo.Builder nodeInfoBuilder( + @NonNull AdminRow row, + @Nullable InetSocketAddress broadcastRpcAddress, + @NonNull EndPoint localEndPoint) { + UUID uuid = row.getUuid("host_id"); + + EndPoint endPoint = new SniEndPoint(proxyAddr, uuid.toString()); + + DefaultNodeInfo.Builder builder = + DefaultNodeInfo.builder() + .withEndPoint(endPoint) + .withBroadcastRpcAddress(broadcastRpcAddress); + InetAddress broadcastAddress = row.getInetAddress("broadcast_address"); // in system.local + if (broadcastAddress == null) { + broadcastAddress = row.getInetAddress("peer"); // in system.peers + } + int broadcastPort = 0; + if (row.contains("peer_port")) { + broadcastPort = row.getInteger("peer_port"); + } + builder.withBroadcastAddress(new InetSocketAddress(broadcastAddress, broadcastPort)); + InetAddress listenAddress = row.getInetAddress("listen_address"); + int listen_port = 0; + if (row.contains("listen_port")) { + listen_port = row.getInteger("listen_port"); + } + return builder + .withListenAddress(new InetSocketAddress(listenAddress, listen_port)) + .withDatacenter(row.getString("data_center")) + .withRack(row.getString("rack")) + .withCassandraVersion(row.getString("release_version")) + .withTokens(row.getSetOfString("tokens")) + .withPartitioner(row.getString("partitioner")) + .withHostId(row.getUuid("host_id")) + .withSchemaVersion(row.getUuid("schema_version")); + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/SniEndPoint.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/SniEndPoint.java new file mode 100644 index 00000000000..bfc9752d02c --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/SniEndPoint.java @@ -0,0 +1,110 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.metadata; + +import com.datastax.oss.driver.api.core.metadata.EndPoint; +import com.datastax.oss.driver.shaded.guava.common.primitives.UnsignedBytes; +import java.net.InetAddress; +import java.net.InetSocketAddress; +import java.net.UnknownHostException; +import java.util.Arrays; +import java.util.Comparator; +import java.util.Objects; +import java.util.concurrent.atomic.AtomicLong; + +public class SniEndPoint implements EndPoint { + private static final AtomicLong OFFSET = new AtomicLong(); + + private final InetSocketAddress proxyAddress; + private final String serverName; + + /** + * @param proxyAddress the address of the proxy. If it is {@linkplain + * InetSocketAddress#isUnresolved() unresolved}, each call to {@link #resolve()} will + * re-resolve it, fetch all of its A-records, and if there are more than 1 pick one in a + * round-robin fashion. + * @param serverName the SNI server name. In the context of DBaas, this is the string + * representation of the host id. + */ + public SniEndPoint(InetSocketAddress proxyAddress, String serverName) { + this.proxyAddress = Objects.requireNonNull(proxyAddress, "SNI address cannot be null"); + this.serverName = Objects.requireNonNull(serverName, "SNI Server name cannot be null"); + } + + public String getServerName() { + return serverName; + } + + @Override + public InetSocketAddress resolve() { + try { + InetAddress[] aRecords = InetAddress.getAllByName(proxyAddress.getHostName()); + if (aRecords.length == 0) { + // Probably never happens, but the JDK docs don't explicitly say so + throw new IllegalArgumentException( + "Could not resolve proxy address " + proxyAddress.getHostName()); + } + // The order of the returned address is unspecified. Sort by IP to make sure we get a true + // round-robin + Arrays.sort(aRecords, IP_COMPARATOR); + int index = (aRecords.length == 1) ? 0 : (int) OFFSET.getAndIncrement() % aRecords.length; + return new InetSocketAddress(aRecords[index], proxyAddress.getPort()); + } catch (UnknownHostException e) { + throw new IllegalArgumentException( + "Could not resolve proxy address " + proxyAddress.getHostName(), e); + } + } + + @Override + public boolean equals(Object other) { + if (other == this) { + return true; + } else if (other instanceof SniEndPoint) { + SniEndPoint that = (SniEndPoint) other; + return this.proxyAddress.equals(that.proxyAddress) && this.serverName.equals(that.serverName); + } else { + return false; + } + } + + @Override + public int hashCode() { + return Objects.hash(proxyAddress, serverName); + } + + @Override + public String toString() { + // Note that this uses the original proxy address, so if there are multiple A-records it won't + // show which one was selected. If that turns out to be a problem for debugging, we might need + // to store the result of resolve() in Connection and log that instead of the endpoint. + return proxyAddress.toString() + ":" + serverName; + } + + @Override + public String asMetricPrefix() { + String hostString = proxyAddress.getHostString(); + if (hostString == null) { + throw new IllegalArgumentException( + "Could not extract a host string from provided proxy address " + proxyAddress); + } + return hostString.replace('.', '_') + ':' + proxyAddress.getPort() + '_' + serverName; + } + + private static final Comparator IP_COMPARATOR = + (InetAddress address1, InetAddress address2) -> + UnsignedBytes.lexicographicalComparator() + .compare(address1.getAddress(), address2.getAddress()); +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/ssl/SniSslEngineFactory.java b/core/src/main/java/com/datastax/oss/driver/internal/core/ssl/SniSslEngineFactory.java new file mode 100644 index 00000000000..ed459d9aa14 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/ssl/SniSslEngineFactory.java @@ -0,0 +1,62 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.datastax.oss.driver.internal.core.ssl; + +import com.datastax.oss.driver.api.core.metadata.EndPoint; +import com.datastax.oss.driver.api.core.ssl.SslEngineFactory; +import com.datastax.oss.driver.internal.core.metadata.SniEndPoint; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; +import edu.umd.cs.findbugs.annotations.NonNull; +import javax.net.ssl.SNIHostName; +import javax.net.ssl.SSLContext; +import javax.net.ssl.SSLEngine; +import javax.net.ssl.SSLParameters; + +public class SniSslEngineFactory implements SslEngineFactory { + + private final SSLContext sslContext; + + /** Builds a new instance from the driver configuration. */ + public SniSslEngineFactory(SSLContext sslContext) { + this.sslContext = sslContext; + } + + @NonNull + @Override + public SSLEngine newSslEngine(@NonNull EndPoint remoteEndpoint) { + if (!(remoteEndpoint instanceof SniEndPoint)) { + throw new IllegalArgumentException( + String.format( + "Configuration error: can only use %s with SNI end points", + this.getClass().getSimpleName())); + } + SSLEngine engine; + SniEndPoint sniEndPoint = (SniEndPoint) remoteEndpoint; + + engine = sslContext.createSSLEngine(); + engine.setUseClientMode(true); + SSLParameters parameters = engine.getSSLParameters(); + parameters.setServerNames(ImmutableList.of(new SNIHostName(sniEndPoint.getServerName()))); + engine.setSSLParameters(parameters); + return engine; + } + + @Override + public void close() { + // nothing to do + } +} diff --git a/core/src/main/resources/reference.conf b/core/src/main/resources/reference.conf index 2cb30241f30..db4da4eba6b 100644 --- a/core/src/main/resources/reference.conf +++ b/core/src/main/resources/reference.conf @@ -188,6 +188,11 @@ datastax-java-driver { # (meaning the driver won't ever connect to it), and never included in any query plan. // filter.class= } + basic.cloud { + # The location of the cloud secure bundle used to connect to Datastax Apache Cassandra as a + # service. + // secure-connect-bundle = /location/of/secure/connect/bundle + } # ADVANCED OPTIONS ------------------------------------------------------------------------------- diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/config/cloud/DbaasConfigUtilTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/config/cloud/DbaasConfigUtilTest.java new file mode 100644 index 00000000000..7decefd1993 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/config/cloud/DbaasConfigUtilTest.java @@ -0,0 +1,57 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.config.cloud; + +import static java.nio.charset.StandardCharsets.UTF_8; +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.shaded.guava.common.io.Resources; +import java.net.URL; +import java.nio.file.Path; +import java.nio.file.Paths; +import org.junit.Test; + +public class DbaasConfigUtilTest { + + @Test + public void should_load_config_from_json() throws Exception { + + URL url = getClass().getResource("/config/cloud/creds.zip"); + Path configFile = Paths.get(url.toURI()); + + DbaasConfig config = DbaasConfigUtil.getBaseConfig(configFile); + assertThat(config.getHost()).isEqualTo("127.0.0.1"); + assertThat(config.getUsername()).isEqualTo("driversuser"); + assertThat(config.getPassword()).isEqualTo("driverspass"); + assertThat(config.getPort()).isEqualTo(30443); + assertThat(config.getLocalDataCenter()).isEqualTo("selfservicedc"); + assertThat(config.getKeyStorePassword()).isEqualTo("keystorepass"); + assertThat(config.getTrustStorePassword()).isEqualTo("trustpass"); + + String jsonMetadata = + Resources.toString(getClass().getResource("/config/cloud/metadata.json"), UTF_8); + + config = DbaasConfigUtil.getConfigFromMetadataJson(config, jsonMetadata); + assertThat(config.getLocalDataCenter()).isEqualTo("dc1"); + // Test metadata parsing + assertThat(config.getHostIds()).contains("4ac06655-f861-49f9-881e-3fee22e69b94"); + assertThat(config.getHostIds()).contains("2af7c253-3394-4a0d-bfac-f1ad81b5154d"); + assertThat(config.getHostIds()).contains("b17b6e2a-3f48-4d6a-81c1-20a0a1f3192a"); + assertThat(config.getHostIds().size()).isEqualTo(3); + assertThat(config.getSniHost()).isEqualTo("localhost"); + assertThat(config.getSniPort()).isEqualTo(30002); + } +} diff --git a/core/src/test/resources/config/cloud/creds.zip b/core/src/test/resources/config/cloud/creds.zip new file mode 100644 index 0000000000000000000000000000000000000000..9a11b1126efd49c5e932730f03da247512fbf3a2 GIT binary patch literal 389 zcmWIWW@Zs#-~hr?#qa$XpkO^C0|PgM0z-0sURq|lURH5_UT6p}1G~=^omdzyt>9*0 zWO>2NzyKz4CmiHDWWdw*zPjt;<}<$RP2sjx0yRyS+}4~6os^^WX!q@$<}!`FwekP# zP2TIA4|Q)?yRZ4yvr~&+GkQ*l>Zr+BwA_8RhvVWs6-k0uvx28Q3=KZE_(5sp_1BDd z9*731-8~?%s5xiP#j|zleG4qx&+au&IGfRXkMWb@nGK)U@`!pb=57wQ3OS$35d6uU z-SgEExx{yKTc^qH?|R+)toZm7u1}JQA9;FYKhEEGA~`uF^SMLv!i9DBQcSa_*6s`N zX6I0P5yY|==zoyk1H2iTM3@obf-DCL7Z_O52x5^CN&()iY#=R+Ko|+6#ekMEFaQ7y Cmx~kt literal 0 HcmV?d00001 diff --git a/core/src/test/resources/config/cloud/metadata.json b/core/src/test/resources/config/cloud/metadata.json new file mode 100644 index 00000000000..35aa26f67f1 --- /dev/null +++ b/core/src/test/resources/config/cloud/metadata.json @@ -0,0 +1 @@ +{"region":"local","contact_info":{"type":"sni_proxy","local_dc":"dc1","contact_points":["4ac06655-f861-49f9-881e-3fee22e69b94","2af7c253-3394-4a0d-bfac-f1ad81b5154d","b17b6e2a-3f48-4d6a-81c1-20a0a1f3192a"],"sni_proxy_address":"localhost:30002"}} diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cloud/DbaasIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cloud/DbaasIT.java new file mode 100644 index 00000000000..ca1fa85100a --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cloud/DbaasIT.java @@ -0,0 +1,54 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.core.cloud; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.fail; + +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.cql.ResultSet; +import com.datastax.oss.driver.categories.IsolatedTests; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +@Category(IsolatedTests.class) +public class DbaasIT { + + @ClassRule public static SniProxyRule proxyRule = new SniProxyRule(); + + @Test + public void should_connect_to_proxy() { + CqlSession session = + CqlSession.builder() + .withCloudSecureConnectBundle(proxyRule.getProxy().getSecureBundlePath()) + .build(); + ResultSet set = session.execute("select * from system.local"); + assertThat(set).isNotNull(); + } + + @Test + public void should_not_connect_to_proxy() { + try (CqlSession session = + CqlSession.builder() + .withCloudSecureConnectBundle(proxyRule.getProxy().getSecureBundleUnreachable()) + .build()) { + fail("Expected an IllegalStateException"); + } catch (IllegalStateException e) { + assertThat(e).hasMessageStartingWith("Unable to construct cloud configuration"); + } + } +} diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cloud/SniProxyRule.java b/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cloud/SniProxyRule.java new file mode 100644 index 00000000000..3548e945aa1 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cloud/SniProxyRule.java @@ -0,0 +1,40 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.core.cloud; + +import org.junit.rules.ExternalResource; + +public class SniProxyRule extends ExternalResource { + private final SniProxyServer proxy; + + public SniProxyRule() { + proxy = new SniProxyServer(); + } + + @Override + protected void before() { + proxy.startProxy(); + } + + @Override + protected void after() { + proxy.stopProxy(); + } + + public SniProxyServer getProxy() { + return proxy; + } +} diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cloud/SniProxyServer.java b/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cloud/SniProxyServer.java new file mode 100644 index 00000000000..6b9ca029a65 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cloud/SniProxyServer.java @@ -0,0 +1,104 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.core.cloud; + +import java.io.ByteArrayOutputStream; +import java.io.File; +import java.io.IOException; +import java.util.concurrent.TimeUnit; +import org.apache.commons.exec.CommandLine; +import org.apache.commons.exec.DefaultExecutor; +import org.apache.commons.exec.ExecuteStreamHandler; +import org.apache.commons.exec.ExecuteWatchdog; +import org.apache.commons.exec.Executor; +import org.apache.commons.exec.LogOutputStream; +import org.apache.commons.exec.PumpStreamHandler; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class SniProxyServer { + private static final Logger logger = LoggerFactory.getLogger(SniProxyServer.class); + private final String proxyPath; + private boolean isRunning = false; + + public SniProxyServer() { + proxyPath = System.getProperty("proxy.path", "./"); + } + + public void startProxy() { + CommandLine run = CommandLine.parse(proxyPath + "/run.sh"); + execute(run); + isRunning = true; + } + + public void stopProxy() { + if (isRunning) { + CommandLine findImageId = + CommandLine.parse("docker ps -a -q --filter ancestor=single_endpoint"); + String id = execute(findImageId); + CommandLine stop = CommandLine.parse("docker kill " + id); + execute(stop); + isRunning = false; + } + } + + public boolean isRunning() { + return isRunning; + } + + public String getSecureBundlePath() { + return proxyPath + "/certs/bundles/creds-v1.zip"; + } + + public String getSecureBundleNoCredsPath() { + return proxyPath + "/certs/bundles/creds-v1-wo-creds.zip"; + } + + public String getSecureBundleUnreachable() { + return proxyPath + "/certs/bundles/creds-v1-unreachable.zip"; + } + + private String execute(CommandLine cli) { + logger.debug("Executing: " + cli); + ExecuteWatchdog watchDog = new ExecuteWatchdog(TimeUnit.MINUTES.toMillis(10)); + ByteArrayOutputStream outStream = new ByteArrayOutputStream(); + try (LogOutputStream errStream = + new LogOutputStream() { + @Override + protected void processLine(String line, int logLevel) { + logger.error("sniendpointerr> {}", line); + } + }) { + Executor executor = new DefaultExecutor(); + ExecuteStreamHandler streamHandler = new PumpStreamHandler(outStream, errStream); + executor.setStreamHandler(streamHandler); + executor.setWatchdog(watchDog); + executor.setWorkingDirectory(new File(proxyPath)); + int retValue = executor.execute(cli); + if (retValue != 0) { + logger.error( + "Non-zero exit code ({}) returned from executing ccm command: {}", retValue, cli); + } + return outStream.toString(); + } catch (IOException ex) { + if (watchDog.killedProcess()) { + throw new RuntimeException("The command '" + cli + "' was killed after 10 minutes"); + } else { + throw new RuntimeException("The command '" + cli + "' failed to execute", ex); + } + } + } +} diff --git a/manual/core/integration/README.md b/manual/core/integration/README.md index 42e9ab4f63b..4713b19cb31 100644 --- a/manual/core/integration/README.md +++ b/manual/core/integration/README.md @@ -358,6 +358,29 @@ If all of these metrics are disabled, you can remove the dependency: ``` +#### Jackson + +[Jackson](https://github.com/FasterXML/jackson) is used to parse configuration files when connecting +to DataStax Apache Cassandra® as a Service. + +If you don't use that feature (that is, if you neither call +`SessionBuilder.withCloudSecureConnectBundle()` nor set the `basic.cloud.secure-connect-bundle` +configuration option), you can safely exclude the dependency: + +```xml + + com.datastax.oss + java-driver-core + ${driver.version} + + + com.fasterxml.jackson.core + jackson-databind + + + +``` + #### Documenting annotations The driver team uses annotations to document certain aspects of the code: From 0239e485428898e68e3162e2d7bf58aebae4806f Mon Sep 17 00:00:00 2001 From: tomekl007 Date: Fri, 4 Oct 2019 11:56:44 +0200 Subject: [PATCH 130/979] JAVA-2478: Allow to provide secure connect bundle via URL --- changelog/README.md | 3 + core/pom.xml | 5 + .../api/core/session/SessionBuilder.java | 56 ++++++- .../core/config/cloud/DbaasConfig.java | 12 +- .../core/config/cloud/DbaasConfigUtil.java | 36 +++-- core/src/main/resources/reference.conf | 5 + .../config/cloud/DbaasConfigUtilTest.java | 43 +++++- integration-tests/pom.xml | 5 + .../oss/driver/api/core/cloud/DbaasIT.java | 140 +++++++++++++++++- .../driver/api/core/cloud/SniProxyServer.java | 9 +- pom.xml | 6 + 11 files changed, 284 insertions(+), 36 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 68a74566269..29fd73afa39 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -2,6 +2,9 @@ +### DSOD (in progress) +- [new feature] JAVA-2478: Allow to provide secureBundle via URL + ### 4.3.0 (in progress) - [improvement] JAVA-2407: Improve handling of logback configuration files in IDEs diff --git a/core/pom.xml b/core/pom.xml index 2620b75bbc1..65642fde00f 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -120,6 +120,11 @@ mockito-core test + + com.github.tomakehurst + wiremock + test + diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java b/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java index 8f01aabdfaf..42b17d068d5 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java @@ -48,6 +48,10 @@ import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import java.net.InetSocketAddress; +import java.net.MalformedURLException; +import java.net.URL; +import java.nio.file.Path; +import java.nio.file.Paths; import java.util.Collection; import java.util.Collections; import java.util.HashSet; @@ -77,7 +81,7 @@ public abstract class SessionBuilder { protected DriverConfigLoader configLoader; protected Set programmaticContactPoints = new HashSet<>(); protected CqlIdentifier keyspace; - protected String cloudConfigPath; + protected URL cloudConfigUrl; protected ProgrammaticArguments.Builder programmaticArgumentsBuilder = ProgrammaticArguments.builder(); @@ -388,11 +392,26 @@ public SelfT withClassLoader(@Nullable ClassLoader classLoader) { * bundle zip file. In the future this will be extended to work with CaaS service provider * endpoints. * - * @param cloudConfigPath Absolute path to the secure connect bundle zip file. + * @param cloudConfigPath Path to the secure connect bundle zip file. */ @NonNull - public SelfT withCloudSecureConnectBundle(@NonNull String cloudConfigPath) { - this.cloudConfigPath = cloudConfigPath; + public SelfT withCloudSecureConnectBundle(@NonNull Path cloudConfigPath) { + try { + this.cloudConfigUrl = cloudConfigPath.toAbsolutePath().normalize().toUri().toURL(); + } catch (MalformedURLException e) { + throw new IllegalArgumentException("Incorrect format of cloudConfigUrl.", e); + } + return self; + } + + /** + * Creates a SessionBuilder pre-configured for a specific Cloud endpoint or configuration file. + * + * @param cloudConfigUrl URL from which the secure connect bundle zip could be retrieved. + */ + @NonNull + public SelfT withCloudSecureConnectBundle(@NonNull URL cloudConfigUrl) { + this.cloudConfigUrl = cloudConfigUrl; return self; } @@ -428,12 +447,16 @@ protected final CompletionStage buildDefaultSessionAsync() { DriverConfigLoader configLoader = buildIfNull(this.configLoader, this::defaultConfigLoader); DriverExecutionProfile defaultConfig = configLoader.getInitialConfig().getDefaultProfile(); - if (cloudConfigPath == null) { - cloudConfigPath = + if (cloudConfigUrl == null) { + + String configUrlString = defaultConfig.getString(DefaultDriverOption.CLOUD_SECURE_CONNECT_BUNDLE, null); + if (configUrlString != null) { + cloudConfigUrl = getURL(configUrlString); + } } - if (cloudConfigPath != null) { - DbaasConfig dbaasConfig = DbaasConfigUtil.getConfig(cloudConfigPath); + if (cloudConfigUrl != null) { + DbaasConfig dbaasConfig = DbaasConfigUtil.getConfig(cloudConfigUrl); for (String hostID : dbaasConfig.getHostIds()) { programmaticContactPoints.add( new SniEndPoint( @@ -475,6 +498,23 @@ protected final CompletionStage buildDefaultSessionAsync() { } } + /** + * Returns URL based on the configUrl setting. If the configUrl has no protocol provided, the + * method will fallback to file:// protocol and return URL that has file protocol specified. + * + * @param configUrl url to config secure bundle + * @return URL with file protocol if there was not explicit protocol provided in the configUrl + * setting + */ + private URL getURL(String configUrl) throws MalformedURLException { + + try { + return new URL(configUrl); + } catch (MalformedURLException e) { + return Paths.get(configUrl).toAbsolutePath().normalize().toUri().toURL(); + } + } + /** * This must return an instance of {@code InternalDriverContext} (it's not expressed * directly in the signature to avoid leaking that type through the protected API). diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/config/cloud/DbaasConfig.java b/core/src/main/java/com/datastax/oss/driver/internal/core/config/cloud/DbaasConfig.java index b9da82a0033..e9bbef60e5a 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/config/cloud/DbaasConfig.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/config/cloud/DbaasConfig.java @@ -16,7 +16,7 @@ package com.datastax.oss.driver.internal.core.config.cloud; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; -import java.nio.file.Path; +import java.net.URL; import java.util.List; /** @@ -37,7 +37,7 @@ public class DbaasConfig { private String localDC; private String keyStorePassword; private String trustStorePassword; - private Path secureConnectBundlePath; + private URL secureConnectBundleUrl; public void setUsername(String username) { this.username = username; @@ -119,11 +119,11 @@ public void setHostIds(List hostIds) { this.hostIds = hostIds; } - public Path getSecureConnectBundlePath() { - return secureConnectBundlePath; + public URL getSecureConnectBundleUrl() { + return secureConnectBundleUrl; } - public void setSecureConnectBundlePath(Path secureConnectBundlePath) { - this.secureConnectBundlePath = secureConnectBundlePath; + public void setSecureConnectBundleUrl(URL secureConnectBundleUrl) { + this.secureConnectBundleUrl = secureConnectBundleUrl; } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/config/cloud/DbaasConfigUtil.java b/core/src/main/java/com/datastax/oss/driver/internal/core/config/cloud/DbaasConfigUtil.java index 8fbe7b86edd..2e010bfd9e0 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/config/cloud/DbaasConfigUtil.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/config/cloud/DbaasConfigUtil.java @@ -26,14 +26,12 @@ import java.io.InputStreamReader; import java.net.URL; import java.nio.charset.StandardCharsets; -import java.nio.file.Path; -import java.nio.file.Paths; import java.security.KeyStore; import java.security.SecureRandom; import java.util.ArrayList; import java.util.List; import java.util.zip.ZipEntry; -import java.util.zip.ZipFile; +import java.util.zip.ZipInputStream; import javax.net.ssl.HttpsURLConnection; import javax.net.ssl.KeyManagerFactory; import javax.net.ssl.SSLContext; @@ -53,14 +51,13 @@ public class DbaasConfigUtil { private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); @NonNull - public static DbaasConfig getConfig(@NonNull String secureConnectBundlePath) { + public static DbaasConfig getConfig(@NonNull URL secureConnectBundleUrl) { try { - DbaasConfig config = getBaseConfig(Paths.get(secureConnectBundlePath)); + DbaasConfig config = getBaseConfig(secureConnectBundleUrl); return getProxyMetadata(config); } catch (Exception exception) { throw new IllegalStateException( - "Unable to construct cloud configuration from path " + secureConnectBundlePath, - exception); + "Unable to construct cloud configuration from url " + secureConnectBundleUrl, exception); } } @@ -69,7 +66,7 @@ public static SSLContext getSSLContext(@NonNull DbaasConfig config) throws Excep SSLContext context = SSLContext.getInstance("SSL"); TrustManagerFactory tmf; try (InputStream trustStoreStream = - openZippedFileInputStream(config.getSecureConnectBundlePath(), CONFIG_TRUSTSTORE_FILE)) { + openZippedFileInputStream(config.getSecureConnectBundleUrl(), CONFIG_TRUSTSTORE_FILE)) { KeyStore ts = KeyStore.getInstance("JKS"); char[] trustPassword = config.getTrustStorePassword().toCharArray(); ts.load(trustStoreStream, trustPassword); @@ -79,7 +76,7 @@ public static SSLContext getSSLContext(@NonNull DbaasConfig config) throws Excep // initialize keystore. KeyManagerFactory kmf; try (InputStream keyStoreStream = - openZippedFileInputStream(config.getSecureConnectBundlePath(), CONFIG_KEYSTORE_FILE)) { + openZippedFileInputStream(config.getSecureConnectBundleUrl(), CONFIG_KEYSTORE_FILE)) { KeyStore ks = KeyStore.getInstance("JKS"); char[] keyStorePassword = config.getKeyStorePassword().toCharArray(); ks.load(keyStoreStream, keyStorePassword); @@ -92,22 +89,29 @@ public static SSLContext getSSLContext(@NonNull DbaasConfig config) throws Excep @VisibleForTesting @NonNull - static DbaasConfig getBaseConfig(@NonNull Path secureConnectBundlePath) throws Exception { + static DbaasConfig getBaseConfig(@NonNull URL secureConnectBundleUrl) throws Exception { try (InputStream jsonConfigInputStream = - openZippedFileInputStream(secureConnectBundlePath, CONFIG_FILE)) { + openZippedFileInputStream(secureConnectBundleUrl, CONFIG_FILE)) { ObjectMapper mapper = new ObjectMapper(); DbaasConfig config = mapper.readValue(jsonConfigInputStream, DbaasConfig.class); - config.setSecureConnectBundlePath(secureConnectBundlePath); + config.setSecureConnectBundleUrl(secureConnectBundleUrl); return config; } } @NonNull private static InputStream openZippedFileInputStream( - @NonNull Path zipFile, @NonNull String innerFileName) throws IOException { - ZipFile zip = new ZipFile(zipFile.toFile()); - ZipEntry configEntry = zip.getEntry(innerFileName); - return zip.getInputStream(configEntry); + @NonNull URL zipFileUrl, @NonNull String innerFileName) throws IOException { + ZipInputStream zipInputStream = new ZipInputStream(zipFileUrl.openStream()); + ZipEntry entry; + while ((entry = zipInputStream.getNextEntry()) != null) { + if (entry.getName().equals(innerFileName)) { + return zipInputStream; + } + } + throw new IllegalArgumentException( + String.format( + "Unable to find innerFileName: %s in the zipFileUrl: %s", innerFileName, zipFileUrl)); } @NonNull diff --git a/core/src/main/resources/reference.conf b/core/src/main/resources/reference.conf index db4da4eba6b..b4bff43744b 100644 --- a/core/src/main/resources/reference.conf +++ b/core/src/main/resources/reference.conf @@ -191,6 +191,11 @@ datastax-java-driver { basic.cloud { # The location of the cloud secure bundle used to connect to Datastax Apache Cassandra as a # service. + # This setting must be a valid URL. + # If the protocol is not specified, it is implicitly assumed to be the `file://` protocol, + # in which case the value is expected to be a valid path on the local filesystem. + # For example, `/a/path/to/bundle` will be interpreted as `file:///a/path/to/bunde`. + # If the protocol is provided explicitly, then the value will be used as is. // secure-connect-bundle = /location/of/secure/connect/bundle } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/config/cloud/DbaasConfigUtilTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/config/cloud/DbaasConfigUtilTest.java index 7decefd1993..0839d6d97be 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/config/cloud/DbaasConfigUtilTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/config/cloud/DbaasConfigUtilTest.java @@ -15,24 +15,58 @@ */ package com.datastax.oss.driver.internal.core.config.cloud; +import static com.github.tomakehurst.wiremock.client.WireMock.aResponse; +import static com.github.tomakehurst.wiremock.client.WireMock.any; +import static com.github.tomakehurst.wiremock.client.WireMock.stubFor; +import static com.github.tomakehurst.wiremock.client.WireMock.urlEqualTo; +import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.wireMockConfig; import static java.nio.charset.StandardCharsets.UTF_8; import static org.assertj.core.api.Assertions.assertThat; import com.datastax.oss.driver.shaded.guava.common.io.Resources; +import com.github.tomakehurst.wiremock.junit.WireMockRule; +import java.net.URISyntaxException; import java.net.URL; +import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; +import org.junit.Rule; import org.junit.Test; public class DbaasConfigUtilTest { + @Rule + public WireMockRule wireMockRule = + new WireMockRule(wireMockConfig().dynamicPort().dynamicHttpsPort()); + @Test public void should_load_config_from_json() throws Exception { - URL url = getClass().getResource("/config/cloud/creds.zip"); - Path configFile = Paths.get(url.toURI()); + URL configFile = getClass().getResource("/config/cloud/creds.zip"); + DbaasConfig config = DbaasConfigUtil.getBaseConfig(configFile); + assertDbaasConfig(config); + } + @Test + public void should_load_config_from_http_service_using_external_URL() throws Exception { + // given + stubFor( + any(urlEqualTo("/config/cloud/creds.zip")) + .willReturn( + aResponse() + .withStatus(200) + .withHeader("Content-Type", "application/octet-stream") + .withBody(Files.readAllBytes(path("/config/cloud/creds.zip"))))); + // when + URL configFile = + new URL(String.format("http://localhost:%d/config/cloud/creds.zip", wireMockRule.port())); DbaasConfig config = DbaasConfigUtil.getBaseConfig(configFile); + + // then + assertDbaasConfig(config); + } + + private void assertDbaasConfig(DbaasConfig config) throws Exception { assertThat(config.getHost()).isEqualTo("127.0.0.1"); assertThat(config.getUsername()).isEqualTo("driversuser"); assertThat(config.getPassword()).isEqualTo("driverspass"); @@ -54,4 +88,9 @@ public void should_load_config_from_json() throws Exception { assertThat(config.getSniHost()).isEqualTo("localhost"); assertThat(config.getSniPort()).isEqualTo(30002); } + + private static Path path(@SuppressWarnings("SameParameterValue") String resource) + throws URISyntaxException { + return Paths.get(DbaasConfigUtilTest.class.getResource(resource).toURI()); + } } diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index a00d9457e6d..a2393914d0f 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -117,6 +117,11 @@ org.apache.felix.framework test + + com.github.tomakehurst + wiremock + test + diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cloud/DbaasIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cloud/DbaasIT.java index ca1fa85100a..2e3af8b4b0c 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cloud/DbaasIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cloud/DbaasIT.java @@ -15,27 +15,160 @@ */ package com.datastax.oss.driver.api.core.cloud; +import static com.datastax.oss.driver.api.core.cloud.SniProxyServer.CERTS_BUNDLE_SUFFIX; +import static com.github.tomakehurst.wiremock.client.WireMock.aResponse; +import static com.github.tomakehurst.wiremock.client.WireMock.any; +import static com.github.tomakehurst.wiremock.client.WireMock.stubFor; +import static com.github.tomakehurst.wiremock.client.WireMock.urlEqualTo; +import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.wireMockConfig; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.fail; import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverConfigLoader; import com.datastax.oss.driver.api.core.cql.ResultSet; import com.datastax.oss.driver.categories.IsolatedTests; +import com.github.tomakehurst.wiremock.junit.WireMockRule; +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URL; +import java.nio.file.Files; +import java.nio.file.Paths; import org.junit.ClassRule; +import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; @Category(IsolatedTests.class) public class DbaasIT { + @Rule + public WireMockRule wireMockRule = + new WireMockRule(wireMockConfig().dynamicPort().dynamicHttpsPort()); + @ClassRule public static SniProxyRule proxyRule = new SniProxyRule(); @Test - public void should_connect_to_proxy() { + public void should_connect_to_proxy_using_absolute_path() { + CqlSession session = + CqlSession.builder() + .withCloudSecureConnectBundle(Paths.get(proxyRule.getProxy().getSecureBundlePath())) + .build(); + ResultSet set = session.execute("select * from system.local"); + assertThat(set).isNotNull(); + } + + @Test + public void should_connect_to_proxy_using_relative_path() { CqlSession session = CqlSession.builder() - .withCloudSecureConnectBundle(proxyRule.getProxy().getSecureBundlePath()) + .withCloudSecureConnectBundle( + Paths.get(proxyRule.getProxy().getSecureBundleRelativePath())) + .build(); + ResultSet set = session.execute("select * from system.local"); + assertThat(set).isNotNull(); + } + + @Test + public void should_connect_to_proxy_using_file_provided_by_the_http_URL() throws IOException { + // given + stubFor( + any(urlEqualTo(CERTS_BUNDLE_SUFFIX)) + .willReturn( + aResponse() + .withStatus(200) + .withHeader("Content-Type", "application/octet-stream") + .withBody( + Files.readAllBytes( + Paths.get(proxyRule.getProxy().getSecureBundlePath()))))); + + URL configFile = + new URL(String.format("http://localhost:%d%s", wireMockRule.port(), CERTS_BUNDLE_SUFFIX)); + + // when + CqlSession session = CqlSession.builder().withCloudSecureConnectBundle(configFile).build(); + + // then + ResultSet set = session.execute("select * from system.local"); + assertThat(set).isNotNull(); + } + + @Test + public void should_connect_to_proxy_using_absolute_path_provided_in_the_session_setting() { + // given + DriverConfigLoader loader = + DriverConfigLoader.programmaticBuilder() + .withString( + DefaultDriverOption.CLOUD_SECURE_CONNECT_BUNDLE, + proxyRule.getProxy().getSecureBundlePath()) .build(); + // when + CqlSession session = CqlSession.builder().withConfigLoader(loader).build(); + + // then + ResultSet set = session.execute("select * from system.local"); + assertThat(set).isNotNull(); + } + + @Test + public void should_connect_to_proxy_using_relative_path_provided_in_the_session_setting() { + // given + DriverConfigLoader loader = + DriverConfigLoader.programmaticBuilder() + .withString( + DefaultDriverOption.CLOUD_SECURE_CONNECT_BUNDLE, + proxyRule.getProxy().getSecureBundleRelativePath()) + .build(); + // when + CqlSession session = CqlSession.builder().withConfigLoader(loader).build(); + + // then + ResultSet set = session.execute("select * from system.local"); + assertThat(set).isNotNull(); + } + + @Test + public void should_connect_to_proxy_using_url_with_file_protocol_provided_in_the_session_setting() + throws MalformedURLException { + // given + DriverConfigLoader loader = + DriverConfigLoader.programmaticBuilder() + .withString( + DefaultDriverOption.CLOUD_SECURE_CONNECT_BUNDLE, + Paths.get(proxyRule.getProxy().getSecureBundlePath()).toUri().toURL().toString()) + .build(); + // when + CqlSession session = CqlSession.builder().withConfigLoader(loader).build(); + + // then + ResultSet set = session.execute("select * from system.local"); + assertThat(set).isNotNull(); + } + + @Test + public void should_connect_to_proxy_using_url_with_http_protocol_provided_in_the_session_setting() + throws IOException { + // given + stubFor( + any(urlEqualTo(CERTS_BUNDLE_SUFFIX)) + .willReturn( + aResponse() + .withStatus(200) + .withHeader("Content-Type", "application/octet-stream") + .withBody( + Files.readAllBytes( + Paths.get(proxyRule.getProxy().getSecureBundlePath()))))); + DriverConfigLoader loader = + DriverConfigLoader.programmaticBuilder() + .withString( + DefaultDriverOption.CLOUD_SECURE_CONNECT_BUNDLE, + String.format("http://localhost:%d%s", wireMockRule.port(), CERTS_BUNDLE_SUFFIX)) + .build(); + // when + CqlSession session = CqlSession.builder().withConfigLoader(loader).build(); + + // then ResultSet set = session.execute("select * from system.local"); assertThat(set).isNotNull(); } @@ -44,7 +177,8 @@ public void should_connect_to_proxy() { public void should_not_connect_to_proxy() { try (CqlSession session = CqlSession.builder() - .withCloudSecureConnectBundle(proxyRule.getProxy().getSecureBundleUnreachable()) + .withCloudSecureConnectBundle( + Paths.get(proxyRule.getProxy().getSecureBundleUnreachable())) .build()) { fail("Expected an IllegalStateException"); } catch (IllegalStateException e) { diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cloud/SniProxyServer.java b/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cloud/SniProxyServer.java index 6b9ca029a65..18abe340edf 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cloud/SniProxyServer.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cloud/SniProxyServer.java @@ -31,6 +31,9 @@ public class SniProxyServer { private static final Logger logger = LoggerFactory.getLogger(SniProxyServer.class); + static final String CERTS_BUNDLE_SUFFIX = "/certs/bundles/creds-v1.zip"; + private static final String CERTS_BUNDLE_SUFFIX_RELATIVE = + "/certs/bundles/../bundles/creds-v1.zip"; private final String proxyPath; private boolean isRunning = false; @@ -60,7 +63,11 @@ public boolean isRunning() { } public String getSecureBundlePath() { - return proxyPath + "/certs/bundles/creds-v1.zip"; + return proxyPath + CERTS_BUNDLE_SUFFIX; + } + + public String getSecureBundleRelativePath() { + return proxyPath + CERTS_BUNDLE_SUFFIX_RELATIVE; } public String getSecureBundleNoCredsPath() { diff --git a/pom.xml b/pom.xml index 4927ff085d6..5de5c9cee07 100644 --- a/pom.xml +++ b/pom.xml @@ -266,6 +266,12 @@ compile-testing 0.18 + + com.github.tomakehurst + wiremock + 2.25.0 + test + From c790361deaa6c62318567e2f819e97cdc13e9718 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 11 Oct 2019 09:50:48 +0300 Subject: [PATCH 131/979] JAVA-2490: Allow to read the secure bundle from an InputStream (#14) --- changelog/README.md | 6 +- .../core/session/ProgrammaticArguments.java | 18 +- .../api/core/session/SessionBuilder.java | 129 +++++--- .../core/config/cloud/CloudConfig.java | 75 +++++ .../core/config/cloud/CloudConfigFactory.java | 290 ++++++++++++++++++ .../core/config/cloud/DbaasConfig.java | 129 -------- .../core/config/cloud/DbaasConfigUtil.java | 158 ---------- .../core/context/DefaultDriverContext.java | 10 +- .../core/metadata/CloudTopologyMonitor.java | 45 +++ .../core/metadata/DbaasTopologyMonitor.java | 73 ----- .../core/metadata/DefaultTopologyMonitor.java | 53 +++- .../internal/core/metadata/SniEndPoint.java | 2 +- core/src/main/resources/reference.conf | 4 + .../config/cloud/CloudConfigFactoryTest.java | 233 ++++++++++++++ .../config/cloud/DbaasConfigUtilTest.java | 96 ------ .../src/test/resources/config/cloud/creds.zip | Bin 389 -> 9805 bytes .../test/resources/config/cloud/identity.jks | Bin 0 -> 2413 bytes .../resources/config/cloud/trustStore.jks | Bin 0 -> 956 bytes .../oss/driver/api/core/cloud/CloudIT.java | 204 ++++++++++++ .../oss/driver/api/core/cloud/DbaasIT.java | 188 ------------ .../driver/api/core/cloud/SniProxyRule.java | 1 + .../driver/api/core/cloud/SniProxyServer.java | 89 ++++-- 22 files changed, 1065 insertions(+), 738 deletions(-) create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/config/cloud/CloudConfig.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/config/cloud/CloudConfigFactory.java delete mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/config/cloud/DbaasConfig.java delete mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/config/cloud/DbaasConfigUtil.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/metadata/CloudTopologyMonitor.java delete mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DbaasTopologyMonitor.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/config/cloud/CloudConfigFactoryTest.java delete mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/config/cloud/DbaasConfigUtilTest.java create mode 100644 core/src/test/resources/config/cloud/identity.jks create mode 100644 core/src/test/resources/config/cloud/trustStore.jks create mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/api/core/cloud/CloudIT.java delete mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/api/core/cloud/DbaasIT.java diff --git a/changelog/README.md b/changelog/README.md index 29fd73afa39..76f6456142a 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -2,8 +2,10 @@ -### DSOD (in progress) -- [new feature] JAVA-2478: Allow to provide secureBundle via URL +### DataStax Cloud (in progress) + +- [improvement] JAVA-2490: Allow to read the secure bundle from an InputStream +- [new feature] JAVA-2478: Allow to provide the secure bundle via URL ### 4.3.0 (in progress) diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/session/ProgrammaticArguments.java b/core/src/main/java/com/datastax/oss/driver/api/core/session/ProgrammaticArguments.java index 75d0c3be1b3..03ab781ca42 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/session/ProgrammaticArguments.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/session/ProgrammaticArguments.java @@ -53,7 +53,7 @@ public static Builder builder() { private final ClassLoader classLoader; private final AuthProvider authProvider; private final SslEngineFactory sslEngineFactory; - private final InetSocketAddress cloudAddress; + private final InetSocketAddress cloudProxyAddress; private ProgrammaticArguments( @NonNull List> typeCodecs, @@ -65,7 +65,7 @@ private ProgrammaticArguments( @Nullable ClassLoader classLoader, @Nullable AuthProvider authProvider, @Nullable SslEngineFactory sslEngineFactory, - @Nullable InetSocketAddress cloudAddress) { + @Nullable InetSocketAddress cloudProxyAddress) { this.typeCodecs = typeCodecs; this.nodeStateListener = nodeStateListener; this.schemaChangeListener = schemaChangeListener; @@ -75,7 +75,7 @@ private ProgrammaticArguments( this.classLoader = classLoader; this.authProvider = authProvider; this.sslEngineFactory = sslEngineFactory; - this.cloudAddress = cloudAddress; + this.cloudProxyAddress = cloudProxyAddress; } @NonNull @@ -124,8 +124,8 @@ public SslEngineFactory getSslEngineFactory() { } @Nullable - public InetSocketAddress getCloudAddress() { - return cloudAddress; + public InetSocketAddress getCloudProxyAddress() { + return cloudProxyAddress; } public static class Builder { @@ -140,7 +140,7 @@ public static class Builder { private ClassLoader classLoader; private AuthProvider authProvider; private SslEngineFactory sslEngineFactory; - private InetSocketAddress cloudAddress; + private InetSocketAddress cloudProxyAddress; @NonNull public Builder addTypeCodecs(@NonNull TypeCodec... typeCodecs) { @@ -203,8 +203,8 @@ public Builder withClassLoader(@Nullable ClassLoader classLoader) { } @NonNull - public Builder withCloudAddress(@Nullable InetSocketAddress cloudAddress) { - this.cloudAddress = cloudAddress; + public Builder withCloudProxyAddress(@Nullable InetSocketAddress cloudAddress) { + this.cloudProxyAddress = cloudAddress; return this; } @@ -232,7 +232,7 @@ public ProgrammaticArguments build() { classLoader, authProvider, sslEngineFactory, - cloudAddress); + cloudProxyAddress); } } } diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java b/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java index 42b17d068d5..a59a3269f16 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java @@ -34,19 +34,18 @@ import com.datastax.oss.driver.api.core.type.codec.TypeCodec; import com.datastax.oss.driver.internal.core.ContactPoints; import com.datastax.oss.driver.internal.core.auth.ProgrammaticPlainTextAuthProvider; -import com.datastax.oss.driver.internal.core.config.cloud.DbaasConfig; -import com.datastax.oss.driver.internal.core.config.cloud.DbaasConfigUtil; +import com.datastax.oss.driver.internal.core.config.cloud.CloudConfig; +import com.datastax.oss.driver.internal.core.config.cloud.CloudConfigFactory; import com.datastax.oss.driver.internal.core.config.typesafe.DefaultDriverConfigLoader; import com.datastax.oss.driver.internal.core.context.DefaultDriverContext; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.internal.core.metadata.DefaultEndPoint; -import com.datastax.oss.driver.internal.core.metadata.SniEndPoint; import com.datastax.oss.driver.internal.core.session.DefaultSession; -import com.datastax.oss.driver.internal.core.ssl.SniSslEngineFactory; import com.datastax.oss.driver.internal.core.util.concurrent.BlockingOperation; import com.datastax.oss.driver.internal.core.util.concurrent.CompletableFutures; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; +import java.io.InputStream; import java.net.InetSocketAddress; import java.net.MalformedURLException; import java.net.URL; @@ -58,6 +57,7 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.concurrent.Callable; import java.util.concurrent.CompletionStage; import java.util.function.Predicate; import java.util.function.Supplier; @@ -81,7 +81,7 @@ public abstract class SessionBuilder { protected DriverConfigLoader configLoader; protected Set programmaticContactPoints = new HashSet<>(); protected CqlIdentifier keyspace; - protected URL cloudConfigUrl; + protected Callable cloudConfigInputStream; protected ProgrammaticArguments.Builder programmaticArgumentsBuilder = ProgrammaticArguments.builder(); @@ -387,31 +387,93 @@ public SelfT withClassLoader(@Nullable ClassLoader classLoader) { } /** - * Creates a SessionBuilder pre-configured for a specific Cloud endpoint or configuration file. - * Currently this supports only a path on the local filesystem pointing to the secure connect - * bundle zip file. In the future this will be extended to work with CaaS service provider - * endpoints. + * Configures this SessionBuilder for Cloud deployments by retrieving connection information from + * the provided {@link Path}. + * + *

      To connect to a Cloud database, you must first download the secure database bundle from the + * DataStax Constellation console that contains the connection information, then instruct the + * driver to read its contents using either this method or one if its variants. + * + *

      For more information, please refer to the DataStax Constellation documentation. * * @param cloudConfigPath Path to the secure connect bundle zip file. + * @see #withCloudSecureConnectBundle(URL) + * @see #withCloudSecureConnectBundle(InputStream) */ @NonNull public SelfT withCloudSecureConnectBundle(@NonNull Path cloudConfigPath) { try { - this.cloudConfigUrl = cloudConfigPath.toAbsolutePath().normalize().toUri().toURL(); + URL cloudConfigUrl = cloudConfigPath.toAbsolutePath().normalize().toUri().toURL(); + this.cloudConfigInputStream = cloudConfigUrl::openStream; } catch (MalformedURLException e) { - throw new IllegalArgumentException("Incorrect format of cloudConfigUrl.", e); + throw new IllegalArgumentException("Incorrect format of cloudConfigPath", e); } return self; } /** - * Creates a SessionBuilder pre-configured for a specific Cloud endpoint or configuration file. + * Configures this SessionBuilder for Cloud deployments by retrieving connection information from + * the provided {@link URL}. + * + *

      To connect to a Cloud database, you must first download the secure database bundle from the + * DataStax Constellation console that contains the connection information, then instruct the + * driver to read its contents using either this method or one if its variants. + * + *

      For more information, please refer to the DataStax Constellation documentation. * - * @param cloudConfigUrl URL from which the secure connect bundle zip could be retrieved. + * @param cloudConfigUrl URL to the secure connect bundle zip file. + * @see #withCloudSecureConnectBundle(Path) + * @see #withCloudSecureConnectBundle(InputStream) */ @NonNull public SelfT withCloudSecureConnectBundle(@NonNull URL cloudConfigUrl) { - this.cloudConfigUrl = cloudConfigUrl; + this.cloudConfigInputStream = cloudConfigUrl::openStream; + return self; + } + + /** + * Configures this SessionBuilder for Cloud deployments by retrieving connection information from + * the provided {@link InputStream}. + * + *

      To connect to a Cloud database, you must first download the secure database bundle from the + * DataStax Constellation console that contains the connection information, then instruct the + * driver to read its contents using either this method or one if its variants. + * + *

      For more information, please refer to the DataStax Constellation documentation. + * + *

      Note that the provided stream will be consumed and closed when either {@link + * #build()} or {@link #buildAsync()} are called; attempting to reuse it afterwards will result in + * an error being thrown. + * + * @param cloudConfigInputStream A stream containing the secure connect bundle zip file. + * @see #withCloudSecureConnectBundle(Path) + * @see #withCloudSecureConnectBundle(URL) + */ + @NonNull + public SelfT withCloudSecureConnectBundle(@NonNull InputStream cloudConfigInputStream) { + this.cloudConfigInputStream = () -> cloudConfigInputStream; + return self; + } + + /** + * Configures this SessionBuilder to use the provided Cloud proxy endpoint. + * + *

      Normally, this method should not be called directly; the normal and easiest way to configure + * the driver for Cloud deployments is through a {@linkplain #withCloudSecureConnectBundle(URL) + * secure connect bundle}. + * + *

      Setting this option to any non-null address will make the driver use a special topology + * monitor tailored for Cloud deployments. This topology monitor assumes that the target cluster + * should be contacted through the proxy specified here, using SNI routing. + * + *

      For more information, please refer to the DataStax Constellation documentation. + * + * @param cloudProxyAddress The address of the Cloud proxy to use. + * @see Server Name Indication + */ + @NonNull + public SelfT withCloudProxyAddress(@Nullable InetSocketAddress cloudProxyAddress) { + this.programmaticArgumentsBuilder.withCloudProxyAddress(cloudProxyAddress); return self; } @@ -447,30 +509,23 @@ protected final CompletionStage buildDefaultSessionAsync() { DriverConfigLoader configLoader = buildIfNull(this.configLoader, this::defaultConfigLoader); DriverExecutionProfile defaultConfig = configLoader.getInitialConfig().getDefaultProfile(); - if (cloudConfigUrl == null) { - + if (cloudConfigInputStream == null) { String configUrlString = defaultConfig.getString(DefaultDriverOption.CLOUD_SECURE_CONNECT_BUNDLE, null); if (configUrlString != null) { - cloudConfigUrl = getURL(configUrlString); + cloudConfigInputStream = () -> getURL(configUrlString).openStream(); } } - if (cloudConfigUrl != null) { - DbaasConfig dbaasConfig = DbaasConfigUtil.getConfig(cloudConfigUrl); - for (String hostID : dbaasConfig.getHostIds()) { - programmaticContactPoints.add( - new SniEndPoint( - new InetSocketAddress(dbaasConfig.getSniHost(), dbaasConfig.getSniPort()), - hostID)); - } - withLocalDatacenter(dbaasConfig.getLocalDataCenter()); - if (dbaasConfig.getUsername() != null && dbaasConfig.getPassword() != null) { - withAuthCredentials(dbaasConfig.getUsername(), dbaasConfig.getPassword()); + if (cloudConfigInputStream != null) { + CloudConfig cloudConfig = + new CloudConfigFactory().createCloudConfig(cloudConfigInputStream.call()); + addContactEndPoints(cloudConfig.getEndPoints()); + withLocalDatacenter(cloudConfig.getLocalDatacenter()); + withSslEngineFactory(cloudConfig.getSslEngineFactory()); + withCloudProxyAddress(cloudConfig.getProxyAddress()); + if (cloudConfig.getAuthProvider().isPresent()) { + withAuthProvider(cloudConfig.getAuthProvider().get()); } - SSLContext sslContext = DbaasConfigUtil.getSSLContext(dbaasConfig); - withSslEngineFactory(new SniSslEngineFactory(sslContext)); - programmaticArgumentsBuilder.withCloudAddress( - new InetSocketAddress(dbaasConfig.getSniHost(), dbaasConfig.getSniPort())); } List configContactPoints = @@ -507,11 +562,15 @@ protected final CompletionStage buildDefaultSessionAsync() { * setting */ private URL getURL(String configUrl) throws MalformedURLException { - try { return new URL(configUrl); - } catch (MalformedURLException e) { - return Paths.get(configUrl).toAbsolutePath().normalize().toUri().toURL(); + } catch (MalformedURLException e1) { + try { + return Paths.get(configUrl).toAbsolutePath().normalize().toUri().toURL(); + } catch (MalformedURLException e2) { + e2.addSuppressed(e1); + throw e2; + } } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/config/cloud/CloudConfig.java b/core/src/main/java/com/datastax/oss/driver/internal/core/config/cloud/CloudConfig.java new file mode 100644 index 00000000000..c85d7a33b8c --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/config/cloud/CloudConfig.java @@ -0,0 +1,75 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.config.cloud; + +import com.datastax.oss.driver.api.core.auth.AuthProvider; +import com.datastax.oss.driver.api.core.metadata.EndPoint; +import com.datastax.oss.driver.api.core.ssl.SslEngineFactory; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.net.InetSocketAddress; +import java.util.List; +import java.util.Optional; +import net.jcip.annotations.ThreadSafe; + +@ThreadSafe +public class CloudConfig { + + private final InetSocketAddress proxyAddress; + private final List endPoints; + private final String localDatacenter; + private final SslEngineFactory sslEngineFactory; + @Nullable private final AuthProvider authProvider; + + CloudConfig( + @NonNull InetSocketAddress proxyAddress, + @NonNull List endPoints, + @NonNull String localDatacenter, + @NonNull SslEngineFactory sslEngineFactory, + @Nullable AuthProvider authProvider) { + this.proxyAddress = proxyAddress; + this.endPoints = ImmutableList.copyOf(endPoints); + this.localDatacenter = localDatacenter; + this.sslEngineFactory = sslEngineFactory; + this.authProvider = authProvider; + } + + @NonNull + public InetSocketAddress getProxyAddress() { + return proxyAddress; + } + + @NonNull + public List getEndPoints() { + return endPoints; + } + + @NonNull + public String getLocalDatacenter() { + return localDatacenter; + } + + @NonNull + public SslEngineFactory getSslEngineFactory() { + return sslEngineFactory; + } + + @NonNull + public Optional getAuthProvider() { + return Optional.ofNullable(authProvider); + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/config/cloud/CloudConfigFactory.java b/core/src/main/java/com/datastax/oss/driver/internal/core/config/cloud/CloudConfigFactory.java new file mode 100644 index 00000000000..9caffd95926 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/config/cloud/CloudConfigFactory.java @@ -0,0 +1,290 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.config.cloud; + +import com.datastax.oss.driver.api.core.auth.AuthProvider; +import com.datastax.oss.driver.api.core.metadata.EndPoint; +import com.datastax.oss.driver.internal.core.auth.ProgrammaticPlainTextAuthProvider; +import com.datastax.oss.driver.internal.core.metadata.SniEndPoint; +import com.datastax.oss.driver.internal.core.ssl.SniSslEngineFactory; +import com.datastax.oss.driver.shaded.guava.common.io.ByteStreams; +import com.datastax.oss.driver.shaded.guava.common.net.HostAndPort; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.io.BufferedReader; +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.net.InetSocketAddress; +import java.net.MalformedURLException; +import java.net.URL; +import java.nio.charset.StandardCharsets; +import java.security.GeneralSecurityException; +import java.security.KeyStore; +import java.security.SecureRandom; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Objects; +import java.util.zip.ZipEntry; +import java.util.zip.ZipInputStream; +import javax.net.ssl.HttpsURLConnection; +import javax.net.ssl.KeyManagerFactory; +import javax.net.ssl.SSLContext; +import javax.net.ssl.TrustManagerFactory; +import net.jcip.annotations.ThreadSafe; + +@ThreadSafe +public class CloudConfigFactory { + + /** + * Creates a {@link CloudConfig} with information fetched from the specified Cloud configuration + * URL. + * + *

      The target URL must point to a valid secure connect bundle archive in ZIP format. + * + * @param cloudConfigUrl the URL to fetch the Cloud configuration from; cannot be null. + * @throws IOException If the Cloud configuration cannot be read. + * @throws GeneralSecurityException If the Cloud SSL context cannot be created. + */ + @NonNull + public CloudConfig createCloudConfig(@NonNull URL cloudConfigUrl) + throws IOException, GeneralSecurityException { + Objects.requireNonNull(cloudConfigUrl, "cloudConfigUrl cannot be null"); + return createCloudConfig(cloudConfigUrl.openStream()); + } + + /** + * Creates a {@link CloudConfig} with information fetched from the specified {@link InputStream}. + * + *

      The stream must contain a valid secure connect bundle archive in ZIP format. Note that the + * stream will be closed after a call to that method and cannot be used anymore. + * + * @param cloudConfig the stream to read the Cloud configuration from; cannot be null. + * @throws IOException If the Cloud configuration cannot be read. + * @throws GeneralSecurityException If the Cloud SSL context cannot be created. + */ + @NonNull + public CloudConfig createCloudConfig(@NonNull InputStream cloudConfig) + throws IOException, GeneralSecurityException { + Objects.requireNonNull(cloudConfig, "cloudConfig cannot be null"); + JsonNode configJson = null; + ByteArrayOutputStream keyStoreOutputStream = null; + ByteArrayOutputStream trustStoreOutputStream = null; + ObjectMapper mapper = new ObjectMapper().configure(JsonParser.Feature.AUTO_CLOSE_SOURCE, false); + try (ZipInputStream zipInputStream = new ZipInputStream(cloudConfig)) { + ZipEntry entry; + while ((entry = zipInputStream.getNextEntry()) != null) { + String fileName = entry.getName(); + switch (fileName) { + case "config.json": + configJson = mapper.readTree(zipInputStream); + break; + case "identity.jks": + keyStoreOutputStream = new ByteArrayOutputStream(); + ByteStreams.copy(zipInputStream, keyStoreOutputStream); + break; + case "trustStore.jks": + trustStoreOutputStream = new ByteArrayOutputStream(); + ByteStreams.copy(zipInputStream, trustStoreOutputStream); + break; + } + } + } + if (configJson == null) { + throw new IllegalStateException("Invalid bundle: missing file config.json"); + } + if (keyStoreOutputStream == null) { + throw new IllegalStateException("Invalid bundle: missing file identity.jks"); + } + if (trustStoreOutputStream == null) { + throw new IllegalStateException("Invalid bundle: missing file trustStore.jks"); + } + char[] keyStorePassword = getKeyStorePassword(configJson); + char[] trustStorePassword = getTrustStorePassword(configJson); + ByteArrayInputStream keyStoreInputStream = + new ByteArrayInputStream(keyStoreOutputStream.toByteArray()); + ByteArrayInputStream trustStoreInputStream = + new ByteArrayInputStream(trustStoreOutputStream.toByteArray()); + SSLContext sslContext = + createSslContext( + keyStoreInputStream, keyStorePassword, trustStoreInputStream, trustStorePassword); + URL metadataServiceUrl = getMetadataServiceUrl(configJson); + JsonNode proxyMetadataJson; + try (BufferedReader proxyMetadata = fetchProxyMetadata(metadataServiceUrl, sslContext)) { + proxyMetadataJson = mapper.readTree(proxyMetadata); + } + InetSocketAddress sniProxyAddress = getSniProxyAddress(proxyMetadataJson); + List endPoints = getEndPoints(proxyMetadataJson, sniProxyAddress); + String localDatacenter = getLocalDatacenter(proxyMetadataJson); + SniSslEngineFactory sslEngineFactory = new SniSslEngineFactory(sslContext); + AuthProvider authProvider = getAuthProvider(configJson); + return new CloudConfig( + sniProxyAddress, endPoints, localDatacenter, sslEngineFactory, authProvider); + } + + @NonNull + protected char[] getKeyStorePassword(JsonNode configFile) { + if (configFile.has("keyStorePassword")) { + return configFile.get("keyStorePassword").asText().toCharArray(); + } else { + throw new IllegalStateException("Invalid config.json: missing field keyStorePassword"); + } + } + + @NonNull + protected char[] getTrustStorePassword(JsonNode configFile) { + if (configFile.has("trustStorePassword")) { + return configFile.get("trustStorePassword").asText().toCharArray(); + } else { + throw new IllegalStateException("Invalid config.json: missing field trustStorePassword"); + } + } + + @NonNull + protected URL getMetadataServiceUrl(JsonNode configFile) throws MalformedURLException { + if (configFile.has("host")) { + String metadataServiceHost = configFile.get("host").asText(); + if (configFile.has("port")) { + int metadataServicePort = configFile.get("port").asInt(); + return new URL("https", metadataServiceHost, metadataServicePort, "/metadata"); + } else { + throw new IllegalStateException("Invalid config.json: missing field port"); + } + } else { + throw new IllegalStateException("Invalid config.json: missing field host"); + } + } + + @Nullable + protected AuthProvider getAuthProvider(JsonNode configFile) { + if (configFile.has("username")) { + String username = configFile.get("username").asText(); + if (configFile.has("password")) { + String password = configFile.get("password").asText(); + return new ProgrammaticPlainTextAuthProvider(username, password); + } + } + return null; + } + + @NonNull + protected SSLContext createSslContext( + @NonNull ByteArrayInputStream keyStoreInputStream, + @NonNull char[] keyStorePassword, + @NonNull ByteArrayInputStream trustStoreInputStream, + @NonNull char[] trustStorePassword) + throws IOException, GeneralSecurityException { + KeyManagerFactory kmf = createKeyManagerFactory(keyStoreInputStream, keyStorePassword); + TrustManagerFactory tmf = createTrustManagerFactory(trustStoreInputStream, trustStorePassword); + SSLContext sslContext = SSLContext.getInstance("SSL"); + sslContext.init(kmf.getKeyManagers(), tmf.getTrustManagers(), new SecureRandom()); + return sslContext; + } + + @NonNull + protected KeyManagerFactory createKeyManagerFactory( + @NonNull InputStream keyStoreInputStream, @NonNull char[] keyStorePassword) + throws IOException, GeneralSecurityException { + KeyStore ks = KeyStore.getInstance("JKS"); + ks.load(keyStoreInputStream, keyStorePassword); + KeyManagerFactory kmf = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm()); + kmf.init(ks, keyStorePassword); + Arrays.fill(keyStorePassword, (char) 0); + return kmf; + } + + @NonNull + protected TrustManagerFactory createTrustManagerFactory( + @NonNull InputStream trustStoreInputStream, @NonNull char[] trustStorePassword) + throws IOException, GeneralSecurityException { + KeyStore ts = KeyStore.getInstance("JKS"); + ts.load(trustStoreInputStream, trustStorePassword); + TrustManagerFactory tmf = + TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); + tmf.init(ts); + Arrays.fill(trustStorePassword, (char) 0); + return tmf; + } + + @NonNull + protected BufferedReader fetchProxyMetadata( + @NonNull URL metadataServiceUrl, @NonNull SSLContext sslContext) throws IOException { + HttpsURLConnection connection = (HttpsURLConnection) metadataServiceUrl.openConnection(); + connection.setSSLSocketFactory(sslContext.getSocketFactory()); + connection.setRequestMethod("GET"); + connection.setRequestProperty("host", "localhost"); + return new BufferedReader( + new InputStreamReader(connection.getInputStream(), StandardCharsets.UTF_8)); + } + + @NonNull + protected String getLocalDatacenter(@NonNull JsonNode proxyMetadata) { + JsonNode contactInfo = getContactInfo(proxyMetadata); + if (contactInfo.has("local_dc")) { + return contactInfo.get("local_dc").asText(); + } else { + throw new IllegalStateException("Invalid proxy metadata: missing field local_dc"); + } + } + + @NonNull + protected InetSocketAddress getSniProxyAddress(@NonNull JsonNode proxyMetadata) { + JsonNode contactInfo = getContactInfo(proxyMetadata); + if (contactInfo.has("sni_proxy_address")) { + HostAndPort sniProxyHostAndPort = + HostAndPort.fromString(contactInfo.get("sni_proxy_address").asText()); + if (!sniProxyHostAndPort.hasPort()) { + throw new IllegalStateException( + "Invalid proxy metadata: missing port from field sni_proxy_address"); + } + return InetSocketAddress.createUnresolved( + sniProxyHostAndPort.getHost(), sniProxyHostAndPort.getPort()); + } else { + throw new IllegalStateException("Invalid proxy metadata: missing field sni_proxy_address"); + } + } + + @NonNull + protected List getEndPoints( + @NonNull JsonNode proxyMetadata, @NonNull InetSocketAddress sniProxyAddress) { + JsonNode contactInfo = getContactInfo(proxyMetadata); + if (contactInfo.has("contact_points")) { + List endPoints = new ArrayList<>(); + JsonNode hostIdsJson = contactInfo.get("contact_points"); + for (int i = 0; i < hostIdsJson.size(); i++) { + endPoints.add(new SniEndPoint(sniProxyAddress, hostIdsJson.get(i).asText())); + } + return endPoints; + } else { + throw new IllegalStateException("Invalid proxy metadata: missing field contact_points"); + } + } + + @NonNull + protected JsonNode getContactInfo(@NonNull JsonNode proxyMetadata) { + if (proxyMetadata.has("contact_info")) { + return proxyMetadata.get("contact_info"); + } else { + throw new IllegalStateException("Invalid proxy metadata: missing field contact_info"); + } + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/config/cloud/DbaasConfig.java b/core/src/main/java/com/datastax/oss/driver/internal/core/config/cloud/DbaasConfig.java deleted file mode 100644 index e9bbef60e5a..00000000000 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/config/cloud/DbaasConfig.java +++ /dev/null @@ -1,129 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.datastax.oss.driver.internal.core.config.cloud; - -import com.fasterxml.jackson.annotation.JsonIgnoreProperties; -import java.net.URL; -import java.util.List; - -/** - * The POJO representation of the config.json that is distributed as part of the creds.zip. It is - * populated mostly by the config.json. With the hostIds, and localDc being filled in by the - * metadata service. - */ -@JsonIgnoreProperties(ignoreUnknown = true) -public class DbaasConfig { - - private String username; - private String password; - private String host; - private int port; - private String sniHost; - private int sniPort; - private List hostIds; - private String localDC; - private String keyStorePassword; - private String trustStorePassword; - private URL secureConnectBundleUrl; - - public void setUsername(String username) { - this.username = username; - } - - public void setPassword(String password) { - this.password = password; - } - - public void setHost(String host) { - this.host = host; - } - - public void setPort(int port) { - this.port = port; - } - - public void setLocalDC(String localDC) { - this.localDC = localDC; - } - - public void setKeyStorePassword(String keyStorePassword) { - this.keyStorePassword = keyStorePassword; - } - - public void setTrustStorePassword(String trustStorePassword) { - this.trustStorePassword = trustStorePassword; - } - - public String getUsername() { - return username; - } - - public String getPassword() { - return password; - } - - public String getHost() { - return host; - } - - public int getPort() { - return port; - } - - public String getLocalDataCenter() { - return localDC; - } - - public String getKeyStorePassword() { - return keyStorePassword; - } - - public String getTrustStorePassword() { - return trustStorePassword; - } - - public String getSniHost() { - return sniHost; - } - - public void setSniHost(String sniHost) { - this.sniHost = sniHost; - } - - public int getSniPort() { - return sniPort; - } - - public void setSniPort(int sniPort) { - this.sniPort = sniPort; - } - - public List getHostIds() { - return hostIds; - } - - public void setHostIds(List hostIds) { - this.hostIds = hostIds; - } - - public URL getSecureConnectBundleUrl() { - return secureConnectBundleUrl; - } - - public void setSecureConnectBundleUrl(URL secureConnectBundleUrl) { - this.secureConnectBundleUrl = secureConnectBundleUrl; - } -} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/config/cloud/DbaasConfigUtil.java b/core/src/main/java/com/datastax/oss/driver/internal/core/config/cloud/DbaasConfigUtil.java deleted file mode 100644 index 2e010bfd9e0..00000000000 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/config/cloud/DbaasConfigUtil.java +++ /dev/null @@ -1,158 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.datastax.oss.driver.internal.core.config.cloud; - -import com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting; -import com.datastax.oss.driver.shaded.guava.common.base.Splitter; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import edu.umd.cs.findbugs.annotations.NonNull; -import java.io.BufferedReader; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.net.URL; -import java.nio.charset.StandardCharsets; -import java.security.KeyStore; -import java.security.SecureRandom; -import java.util.ArrayList; -import java.util.List; -import java.util.zip.ZipEntry; -import java.util.zip.ZipInputStream; -import javax.net.ssl.HttpsURLConnection; -import javax.net.ssl.KeyManagerFactory; -import javax.net.ssl.SSLContext; -import javax.net.ssl.TrustManagerFactory; - -public class DbaasConfigUtil { - - private static final String CONFIG_FILE = "config.json"; - public static final String CONFIG_TRUSTSTORE_FILE = "trustStore.jks"; - public static final String CONFIG_KEYSTORE_FILE = "identity.jks"; - - private static final String METADATA_CONTACT_INFO = "contact_info"; - private static final String METADATA_CONTACT_POINTS = "contact_points"; - private static final String METADATA_LOCAL_DC = "local_dc"; - private static final String METADATA_PROXY_ADDRESS = "sni_proxy_address"; - - private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); - - @NonNull - public static DbaasConfig getConfig(@NonNull URL secureConnectBundleUrl) { - try { - DbaasConfig config = getBaseConfig(secureConnectBundleUrl); - return getProxyMetadata(config); - } catch (Exception exception) { - throw new IllegalStateException( - "Unable to construct cloud configuration from url " + secureConnectBundleUrl, exception); - } - } - - @NonNull - public static SSLContext getSSLContext(@NonNull DbaasConfig config) throws Exception { - SSLContext context = SSLContext.getInstance("SSL"); - TrustManagerFactory tmf; - try (InputStream trustStoreStream = - openZippedFileInputStream(config.getSecureConnectBundleUrl(), CONFIG_TRUSTSTORE_FILE)) { - KeyStore ts = KeyStore.getInstance("JKS"); - char[] trustPassword = config.getTrustStorePassword().toCharArray(); - ts.load(trustStoreStream, trustPassword); - tmf = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); - tmf.init(ts); - } - // initialize keystore. - KeyManagerFactory kmf; - try (InputStream keyStoreStream = - openZippedFileInputStream(config.getSecureConnectBundleUrl(), CONFIG_KEYSTORE_FILE)) { - KeyStore ks = KeyStore.getInstance("JKS"); - char[] keyStorePassword = config.getKeyStorePassword().toCharArray(); - ks.load(keyStoreStream, keyStorePassword); - kmf = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm()); - kmf.init(ks, keyStorePassword); - } - context.init(kmf.getKeyManagers(), tmf.getTrustManagers(), new SecureRandom()); - return context; - } - - @VisibleForTesting - @NonNull - static DbaasConfig getBaseConfig(@NonNull URL secureConnectBundleUrl) throws Exception { - try (InputStream jsonConfigInputStream = - openZippedFileInputStream(secureConnectBundleUrl, CONFIG_FILE)) { - ObjectMapper mapper = new ObjectMapper(); - DbaasConfig config = mapper.readValue(jsonConfigInputStream, DbaasConfig.class); - config.setSecureConnectBundleUrl(secureConnectBundleUrl); - return config; - } - } - - @NonNull - private static InputStream openZippedFileInputStream( - @NonNull URL zipFileUrl, @NonNull String innerFileName) throws IOException { - ZipInputStream zipInputStream = new ZipInputStream(zipFileUrl.openStream()); - ZipEntry entry; - while ((entry = zipInputStream.getNextEntry()) != null) { - if (entry.getName().equals(innerFileName)) { - return zipInputStream; - } - } - throw new IllegalArgumentException( - String.format( - "Unable to find innerFileName: %s in the zipFileUrl: %s", innerFileName, zipFileUrl)); - } - - @NonNull - private static DbaasConfig getProxyMetadata(@NonNull DbaasConfig dbaasConfig) throws Exception { - SSLContext sslContext = getSSLContext(dbaasConfig); - StringBuilder result = new StringBuilder(); - URL metaDataServiceUrl = - new URL("https://" + dbaasConfig.getHost() + ":" + dbaasConfig.getPort() + "/metadata"); - HttpsURLConnection connection = (HttpsURLConnection) metaDataServiceUrl.openConnection(); - connection.setSSLSocketFactory(sslContext.getSocketFactory()); - connection.setRequestMethod("GET"); - connection.setRequestProperty("host", "localhost"); - try (BufferedReader rd = - new BufferedReader( - new InputStreamReader(connection.getInputStream(), StandardCharsets.UTF_8))) { - - String line; - while ((line = rd.readLine()) != null) { - result.append(line); - } - } - return getConfigFromMetadataJson(dbaasConfig, result.toString()); - } - - @VisibleForTesting - @NonNull - static DbaasConfig getConfigFromMetadataJson( - @NonNull DbaasConfig dbaasConfig, @NonNull String jsonString) throws Exception { - JsonNode json = OBJECT_MAPPER.readTree(jsonString); - JsonNode contactInfo = json.get(METADATA_CONTACT_INFO); - dbaasConfig.setLocalDC(contactInfo.get(METADATA_LOCAL_DC).asText()); - List hostIds = new ArrayList<>(); - JsonNode hostIdsJSON = contactInfo.get(METADATA_CONTACT_POINTS); - for (int i = 0; i < hostIdsJSON.size(); i++) { - hostIds.add(hostIdsJSON.get(i).asText()); - } - dbaasConfig.setHostIds(hostIds); - List sniHostComplete = - Splitter.on(":").splitToList(contactInfo.get(METADATA_PROXY_ADDRESS).asText()); - dbaasConfig.setSniHost(sniHostComplete.get(0)); - dbaasConfig.setSniPort(Integer.parseInt(sniHostComplete.get(1))); - return dbaasConfig; - } -} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java index 3ad13f17f51..4a20833d860 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java @@ -45,7 +45,7 @@ import com.datastax.oss.driver.internal.core.channel.DefaultWriteCoalescer; import com.datastax.oss.driver.internal.core.channel.WriteCoalescer; import com.datastax.oss.driver.internal.core.control.ControlConnection; -import com.datastax.oss.driver.internal.core.metadata.DbaasTopologyMonitor; +import com.datastax.oss.driver.internal.core.metadata.CloudTopologyMonitor; import com.datastax.oss.driver.internal.core.metadata.DefaultTopologyMonitor; import com.datastax.oss.driver.internal.core.metadata.LoadBalancingPolicyWrapper; import com.datastax.oss.driver.internal.core.metadata.MetadataManager; @@ -198,7 +198,7 @@ public class DefaultDriverContext implements InternalDriverContext { private final Map localDatacentersFromBuilder; private final Map> nodeFiltersFromBuilder; private final ClassLoader classLoader; - private final InetSocketAddress cloudAddress; + private final InetSocketAddress cloudProxyAddress; private final LazyReference requestLogFormatterRef = new LazyReference<>("requestLogFormatter", this::buildRequestLogFormatter, cycleDetector); @@ -244,7 +244,7 @@ public DefaultDriverContext( cycleDetector); this.nodeFiltersFromBuilder = programmaticArguments.getNodeFilters(); this.classLoader = programmaticArguments.getClassLoader(); - this.cloudAddress = programmaticArguments.getCloudAddress(); + this.cloudProxyAddress = programmaticArguments.getCloudProxyAddress(); } /** @@ -420,10 +420,10 @@ protected ChannelFactory buildChannelFactory() { } protected TopologyMonitor buildTopologyMonitor() { - if (cloudAddress == null) { + if (cloudProxyAddress == null) { return new DefaultTopologyMonitor(this); } - return new DbaasTopologyMonitor(this, cloudAddress); + return new CloudTopologyMonitor(this, cloudProxyAddress); } protected MetadataManager buildMetadataManager() { diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/CloudTopologyMonitor.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/CloudTopologyMonitor.java new file mode 100644 index 00000000000..6df0bf3e055 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/CloudTopologyMonitor.java @@ -0,0 +1,45 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.metadata; + +import com.datastax.oss.driver.api.core.metadata.EndPoint; +import com.datastax.oss.driver.internal.core.adminrequest.AdminRow; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.net.InetSocketAddress; +import java.util.Objects; +import java.util.UUID; + +public class CloudTopologyMonitor extends DefaultTopologyMonitor { + + private final InetSocketAddress cloudProxyAddress; + + public CloudTopologyMonitor(InternalDriverContext context, InetSocketAddress cloudProxyAddress) { + super(context); + this.cloudProxyAddress = cloudProxyAddress; + } + + @NonNull + @Override + protected EndPoint buildNodeEndPoint( + @NonNull AdminRow row, + @Nullable InetSocketAddress broadcastRpcAddress, + @NonNull EndPoint localEndPoint) { + UUID hostId = Objects.requireNonNull(row.getUuid("host_id")); + return new SniEndPoint(cloudProxyAddress, hostId.toString()); + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DbaasTopologyMonitor.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DbaasTopologyMonitor.java deleted file mode 100644 index 486e8ed0e96..00000000000 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DbaasTopologyMonitor.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.datastax.oss.driver.internal.core.metadata; - -import com.datastax.oss.driver.api.core.metadata.EndPoint; -import com.datastax.oss.driver.internal.core.adminrequest.AdminRow; -import com.datastax.oss.driver.internal.core.context.InternalDriverContext; -import edu.umd.cs.findbugs.annotations.NonNull; -import edu.umd.cs.findbugs.annotations.Nullable; -import java.net.InetAddress; -import java.net.InetSocketAddress; -import java.util.UUID; - -public class DbaasTopologyMonitor extends DefaultTopologyMonitor { - private final InetSocketAddress proxyAddr; - - public DbaasTopologyMonitor(InternalDriverContext context, InetSocketAddress proxyAddr) { - super(context); - this.proxyAddr = proxyAddr; - } - - @NonNull - @Override - protected DefaultNodeInfo.Builder nodeInfoBuilder( - @NonNull AdminRow row, - @Nullable InetSocketAddress broadcastRpcAddress, - @NonNull EndPoint localEndPoint) { - UUID uuid = row.getUuid("host_id"); - - EndPoint endPoint = new SniEndPoint(proxyAddr, uuid.toString()); - - DefaultNodeInfo.Builder builder = - DefaultNodeInfo.builder() - .withEndPoint(endPoint) - .withBroadcastRpcAddress(broadcastRpcAddress); - InetAddress broadcastAddress = row.getInetAddress("broadcast_address"); // in system.local - if (broadcastAddress == null) { - broadcastAddress = row.getInetAddress("peer"); // in system.peers - } - int broadcastPort = 0; - if (row.contains("peer_port")) { - broadcastPort = row.getInteger("peer_port"); - } - builder.withBroadcastAddress(new InetSocketAddress(broadcastAddress, broadcastPort)); - InetAddress listenAddress = row.getInetAddress("listen_address"); - int listen_port = 0; - if (row.contains("listen_port")) { - listen_port = row.getInteger("listen_port"); - } - return builder - .withListenAddress(new InetSocketAddress(listenAddress, listen_port)) - .withDatacenter(row.getString("data_center")) - .withRack(row.getString("rack")) - .withCassandraVersion(row.getString("release_version")) - .withTokens(row.getSetOfString("tokens")) - .withPartitioner(row.getString("partitioner")) - .withHostId(row.getUuid("host_id")) - .withSchemaVersion(row.getUuid("schema_version")); - } -} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultTopologyMonitor.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultTopologyMonitor.java index 459f36d4027..7d87a33ffc7 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultTopologyMonitor.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultTopologyMonitor.java @@ -295,22 +295,7 @@ protected DefaultNodeInfo.Builder nodeInfoBuilder( @Nullable InetSocketAddress broadcastRpcAddress, @NonNull EndPoint localEndPoint) { - boolean peer = row.contains("peer"); - - EndPoint endPoint; - if (peer) { - // If this node is a peer, its broadcast RPC address must be present. - Objects.requireNonNull( - broadcastRpcAddress, "broadcastRpcAddress cannot be null for a peer row"); - // Deployments that use a custom EndPoint implementation will need their own TopologyMonitor. - // One simple approach is to extend this class and override this method. - endPoint = new DefaultEndPoint(context.getAddressTranslator().translate(broadcastRpcAddress)); - } else { - // Don't rely on system.local.rpc_address for the control node, because it mistakenly - // reports the normal RPC address instead of the broadcast one (CASSANDRA-11181). We - // already know the endpoint anyway since we've just used it to query. - endPoint = localEndPoint; - } + EndPoint endPoint = buildNodeEndPoint(row, broadcastRpcAddress, localEndPoint); // in system.local InetAddress broadcastInetAddress = row.getInetAddress("broadcast_address"); @@ -367,6 +352,42 @@ protected DefaultNodeInfo.Builder nodeInfoBuilder( : rv; } + /** + * Builds the node's endpoint from the given row. + * + * @param broadcastRpcAddress this is a parameter only because we already have it when we come + * from {@link #findInPeers(AdminResult, InetSocketAddress, EndPoint)}. Callers that don't + * already have it can use {@link #getBroadcastRpcAddress}. For the control host, this can be + * null; if this node is a peer however, this cannot be null, since we use that address to + * create the node's endpoint. Callers can use {@link #isPeerValid(AdminRow)} to check that + * before calling this method. + * @param localEndPoint the control node endpoint that was used to query the node's system tables. + * This is a parameter because it would be racy to call {@code + * controlConnection.channel().getEndPoint()} from within this method, as the control + * connection may have changed its channel since. So this parameter must be provided by the + * caller. + */ + @NonNull + protected EndPoint buildNodeEndPoint( + @NonNull AdminRow row, + @Nullable InetSocketAddress broadcastRpcAddress, + @NonNull EndPoint localEndPoint) { + boolean peer = row.contains("peer"); + if (peer) { + // If this node is a peer, its broadcast RPC address must be present. + Objects.requireNonNull( + broadcastRpcAddress, "broadcastRpcAddress cannot be null for a peer row"); + // Deployments that use a custom EndPoint implementation will need their own TopologyMonitor. + // One simple approach is to extend this class and override this method. + return new DefaultEndPoint(context.getAddressTranslator().translate(broadcastRpcAddress)); + } else { + // Don't rely on system.local.rpc_address for the control node, because it mistakenly + // reports the normal RPC address instead of the broadcast one (CASSANDRA-11181). We + // already know the endpoint anyway since we've just used it to query. + return localEndPoint; + } + } + // Called when a new node is being added; the peers table is keyed by broadcast_address, // but the received event only contains broadcast_rpc_address, so // we have to traverse the whole table and check the rows one by one. diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/SniEndPoint.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/SniEndPoint.java index bfc9752d02c..d3f9e5b78f7 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/SniEndPoint.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/SniEndPoint.java @@ -36,7 +36,7 @@ public class SniEndPoint implements EndPoint { * InetSocketAddress#isUnresolved() unresolved}, each call to {@link #resolve()} will * re-resolve it, fetch all of its A-records, and if there are more than 1 pick one in a * round-robin fashion. - * @param serverName the SNI server name. In the context of DBaas, this is the string + * @param serverName the SNI server name. In the context of Cloud, this is the string * representation of the host id. */ public SniEndPoint(InetSocketAddress proxyAddress, String serverName) { diff --git a/core/src/main/resources/reference.conf b/core/src/main/resources/reference.conf index b4bff43744b..43a317662c3 100644 --- a/core/src/main/resources/reference.conf +++ b/core/src/main/resources/reference.conf @@ -196,6 +196,10 @@ datastax-java-driver { # in which case the value is expected to be a valid path on the local filesystem. # For example, `/a/path/to/bundle` will be interpreted as `file:///a/path/to/bunde`. # If the protocol is provided explicitly, then the value will be used as is. + # + # Required: no + # Modifiable at runtime: no + # Overridable in a profile: no // secure-connect-bundle = /location/of/secure/connect/bundle } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/config/cloud/CloudConfigFactoryTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/config/cloud/CloudConfigFactoryTest.java new file mode 100644 index 00000000000..56b8392190c --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/config/cloud/CloudConfigFactoryTest.java @@ -0,0 +1,233 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.config.cloud; + +import static com.datastax.oss.driver.Assertions.assertThat; +import static com.github.tomakehurst.wiremock.client.WireMock.aResponse; +import static com.github.tomakehurst.wiremock.client.WireMock.any; +import static com.github.tomakehurst.wiremock.client.WireMock.stubFor; +import static com.github.tomakehurst.wiremock.client.WireMock.urlEqualTo; +import static com.github.tomakehurst.wiremock.client.WireMock.urlPathEqualTo; +import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.wireMockConfig; +import static org.assertj.core.api.Assertions.catchThrowable; + +import com.datastax.oss.driver.internal.core.ssl.SniSslEngineFactory; +import com.fasterxml.jackson.core.JsonParseException; +import com.github.tomakehurst.wiremock.common.JettySettings; +import com.github.tomakehurst.wiremock.core.Options; +import com.github.tomakehurst.wiremock.http.AdminRequestHandler; +import com.github.tomakehurst.wiremock.http.HttpServer; +import com.github.tomakehurst.wiremock.http.HttpServerFactory; +import com.github.tomakehurst.wiremock.http.StubRequestHandler; +import com.github.tomakehurst.wiremock.jetty9.JettyHttpServer; +import com.github.tomakehurst.wiremock.junit.WireMockRule; +import com.google.common.base.Joiner; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.net.InetSocketAddress; +import java.net.URISyntaxException; +import java.net.URL; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import org.eclipse.jetty.io.NetworkTrafficListener; +import org.eclipse.jetty.server.ConnectionFactory; +import org.eclipse.jetty.server.ServerConnector; +import org.eclipse.jetty.server.SslConnectionFactory; +import org.eclipse.jetty.util.ssl.SslContextFactory; +import org.junit.Rule; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.junit.MockitoJUnitRunner; + +@RunWith(MockitoJUnitRunner.class) +public class CloudConfigFactoryTest { + + private static final String BUNDLE_PATH = "/config/cloud/creds.zip"; + + @Rule + public WireMockRule wireMockRule = + new WireMockRule( + wireMockConfig() + .httpsPort(30443) + .dynamicPort() + .httpServerFactory(new HttpsServerFactory()) + .needClientAuth(true) + .keystorePath(path("/config/cloud/identity.jks").toString()) + .keystorePassword("XS78x3GuBWas1OoA5") + .trustStorePath(path("/config/cloud/trustStore.jks").toString()) + .trustStorePassword("48ZY5r06BmpVLKxPg")); + + public CloudConfigFactoryTest() throws URISyntaxException {} + + @Test + public void should_load_config_from_local_filesystem() throws Exception { + // given + URL configFile = getClass().getResource(BUNDLE_PATH); + mockProxyMetadataService(jsonMetadata()); + // when + CloudConfigFactory cloudConfigFactory = new CloudConfigFactory(); + CloudConfig cloudConfig = cloudConfigFactory.createCloudConfig(configFile); + // then + assertCloudConfig(cloudConfig); + } + + @Test + public void should_load_config_from_external_location() throws Exception { + // given + mockHttpSecureBundle(secureBundle()); + mockProxyMetadataService(jsonMetadata()); + // when + URL configFile = new URL("http", "localhost", wireMockRule.port(), BUNDLE_PATH); + CloudConfigFactory cloudConfigFactory = new CloudConfigFactory(); + CloudConfig cloudConfig = cloudConfigFactory.createCloudConfig(configFile); + // then + assertCloudConfig(cloudConfig); + } + + @Test + public void should_throw_when_bundle_not_found() throws Exception { + // given + stubFor(any(urlEqualTo(BUNDLE_PATH)).willReturn(aResponse().withStatus(404))); + // when + URL configFile = new URL("http", "localhost", wireMockRule.port(), BUNDLE_PATH); + CloudConfigFactory cloudConfigFactory = new CloudConfigFactory(); + Throwable t = catchThrowable(() -> cloudConfigFactory.createCloudConfig(configFile)); + assertThat(t) + .isInstanceOf(FileNotFoundException.class) + .hasMessageContaining(configFile.toExternalForm()); + } + + @Test + public void should_throw_when_bundle_not_readable() throws Exception { + // given + mockHttpSecureBundle("not a zip file".getBytes(StandardCharsets.UTF_8)); + // when + URL configFile = new URL("http", "localhost", wireMockRule.port(), BUNDLE_PATH); + CloudConfigFactory cloudConfigFactory = new CloudConfigFactory(); + Throwable t = catchThrowable(() -> cloudConfigFactory.createCloudConfig(configFile)); + assertThat(t) + .isInstanceOf(IllegalStateException.class) + .hasMessageContaining("Invalid bundle: missing file config.json"); + } + + @Test + public void should_throw_when_metadata_not_found() throws Exception { + // given + mockHttpSecureBundle(secureBundle()); + stubFor(any(urlPathEqualTo("/metadata")).willReturn(aResponse().withStatus(404))); + // when + URL configFile = new URL("http", "localhost", wireMockRule.port(), BUNDLE_PATH); + CloudConfigFactory cloudConfigFactory = new CloudConfigFactory(); + Throwable t = catchThrowable(() -> cloudConfigFactory.createCloudConfig(configFile)); + assertThat(t).isInstanceOf(FileNotFoundException.class).hasMessageContaining("metadata"); + } + + @Test + public void should_throw_when_metadata_not_readable() throws Exception { + // given + mockHttpSecureBundle(secureBundle()); + mockProxyMetadataService("not a valid json payload"); + // when + URL configFile = new URL("http", "localhost", wireMockRule.port(), BUNDLE_PATH); + CloudConfigFactory cloudConfigFactory = new CloudConfigFactory(); + Throwable t = catchThrowable(() -> cloudConfigFactory.createCloudConfig(configFile)); + assertThat(t).isInstanceOf(JsonParseException.class).hasMessageContaining("Unrecognized token"); + } + + private void mockHttpSecureBundle(byte[] body) { + stubFor( + any(urlEqualTo(BUNDLE_PATH)) + .willReturn( + aResponse() + .withStatus(200) + .withHeader("Content-Type", "application/octet-stream") + .withBody(body))); + } + + private void mockProxyMetadataService(String jsonMetadata) { + stubFor( + any(urlPathEqualTo("/metadata")) + .willReturn( + aResponse() + .withStatus(200) + .withHeader("Content-Type", "application/json") + .withBody(jsonMetadata))); + } + + private byte[] secureBundle() throws IOException, URISyntaxException { + return Files.readAllBytes(path(BUNDLE_PATH)); + } + + private String jsonMetadata() throws IOException, URISyntaxException { + return Joiner.on('\n') + .join(Files.readAllLines(path("/config/cloud/metadata.json"), StandardCharsets.UTF_8)); + } + + private Path path(String resource) throws URISyntaxException { + return Paths.get(getClass().getResource(resource).toURI()); + } + + private void assertCloudConfig(CloudConfig config) { + InetSocketAddress expectedProxyAddress = InetSocketAddress.createUnresolved("localhost", 30002); + assertThat(config.getLocalDatacenter()).isEqualTo("dc1"); + assertThat(config.getProxyAddress()).isEqualTo(expectedProxyAddress); + assertThat(config.getEndPoints()).extracting("proxyAddress").containsOnly(expectedProxyAddress); + assertThat(config.getEndPoints()) + .extracting("serverName") + .containsExactly( + "4ac06655-f861-49f9-881e-3fee22e69b94", + "2af7c253-3394-4a0d-bfac-f1ad81b5154d", + "b17b6e2a-3f48-4d6a-81c1-20a0a1f3192a"); + assertThat(config.getSslEngineFactory()).isNotNull().isInstanceOf(SniSslEngineFactory.class); + } + + static { + javax.net.ssl.HttpsURLConnection.setDefaultHostnameVerifier( + (hostname, sslSession) -> hostname.equals("localhost")); + } + + // see https://github.com/tomakehurst/wiremock/issues/874 + private static class HttpsServerFactory implements HttpServerFactory { + @Override + public HttpServer buildHttpServer( + Options options, + AdminRequestHandler adminRequestHandler, + StubRequestHandler stubRequestHandler) { + return new JettyHttpServer(options, adminRequestHandler, stubRequestHandler) { + @Override + protected ServerConnector createServerConnector( + String bindAddress, + JettySettings jettySettings, + int port, + NetworkTrafficListener listener, + ConnectionFactory... connectionFactories) { + if (port == options.httpsSettings().port()) { + SslConnectionFactory sslConnectionFactory = + (SslConnectionFactory) connectionFactories[0]; + SslContextFactory sslContextFactory = sslConnectionFactory.getSslContextFactory(); + sslContextFactory.setKeyStorePassword(options.httpsSettings().keyStorePassword()); + connectionFactories = + new ConnectionFactory[] {sslConnectionFactory, connectionFactories[1]}; + } + return super.createServerConnector( + bindAddress, jettySettings, port, listener, connectionFactories); + } + }; + } + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/config/cloud/DbaasConfigUtilTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/config/cloud/DbaasConfigUtilTest.java deleted file mode 100644 index 0839d6d97be..00000000000 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/config/cloud/DbaasConfigUtilTest.java +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.datastax.oss.driver.internal.core.config.cloud; - -import static com.github.tomakehurst.wiremock.client.WireMock.aResponse; -import static com.github.tomakehurst.wiremock.client.WireMock.any; -import static com.github.tomakehurst.wiremock.client.WireMock.stubFor; -import static com.github.tomakehurst.wiremock.client.WireMock.urlEqualTo; -import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.wireMockConfig; -import static java.nio.charset.StandardCharsets.UTF_8; -import static org.assertj.core.api.Assertions.assertThat; - -import com.datastax.oss.driver.shaded.guava.common.io.Resources; -import com.github.tomakehurst.wiremock.junit.WireMockRule; -import java.net.URISyntaxException; -import java.net.URL; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import org.junit.Rule; -import org.junit.Test; - -public class DbaasConfigUtilTest { - - @Rule - public WireMockRule wireMockRule = - new WireMockRule(wireMockConfig().dynamicPort().dynamicHttpsPort()); - - @Test - public void should_load_config_from_json() throws Exception { - - URL configFile = getClass().getResource("/config/cloud/creds.zip"); - DbaasConfig config = DbaasConfigUtil.getBaseConfig(configFile); - assertDbaasConfig(config); - } - - @Test - public void should_load_config_from_http_service_using_external_URL() throws Exception { - // given - stubFor( - any(urlEqualTo("/config/cloud/creds.zip")) - .willReturn( - aResponse() - .withStatus(200) - .withHeader("Content-Type", "application/octet-stream") - .withBody(Files.readAllBytes(path("/config/cloud/creds.zip"))))); - // when - URL configFile = - new URL(String.format("http://localhost:%d/config/cloud/creds.zip", wireMockRule.port())); - DbaasConfig config = DbaasConfigUtil.getBaseConfig(configFile); - - // then - assertDbaasConfig(config); - } - - private void assertDbaasConfig(DbaasConfig config) throws Exception { - assertThat(config.getHost()).isEqualTo("127.0.0.1"); - assertThat(config.getUsername()).isEqualTo("driversuser"); - assertThat(config.getPassword()).isEqualTo("driverspass"); - assertThat(config.getPort()).isEqualTo(30443); - assertThat(config.getLocalDataCenter()).isEqualTo("selfservicedc"); - assertThat(config.getKeyStorePassword()).isEqualTo("keystorepass"); - assertThat(config.getTrustStorePassword()).isEqualTo("trustpass"); - - String jsonMetadata = - Resources.toString(getClass().getResource("/config/cloud/metadata.json"), UTF_8); - - config = DbaasConfigUtil.getConfigFromMetadataJson(config, jsonMetadata); - assertThat(config.getLocalDataCenter()).isEqualTo("dc1"); - // Test metadata parsing - assertThat(config.getHostIds()).contains("4ac06655-f861-49f9-881e-3fee22e69b94"); - assertThat(config.getHostIds()).contains("2af7c253-3394-4a0d-bfac-f1ad81b5154d"); - assertThat(config.getHostIds()).contains("b17b6e2a-3f48-4d6a-81c1-20a0a1f3192a"); - assertThat(config.getHostIds().size()).isEqualTo(3); - assertThat(config.getSniHost()).isEqualTo("localhost"); - assertThat(config.getSniPort()).isEqualTo(30002); - } - - private static Path path(@SuppressWarnings("SameParameterValue") String resource) - throws URISyntaxException { - return Paths.get(DbaasConfigUtilTest.class.getResource(resource).toURI()); - } -} diff --git a/core/src/test/resources/config/cloud/creds.zip b/core/src/test/resources/config/cloud/creds.zip index 9a11b1126efd49c5e932730f03da247512fbf3a2..3b5d1cb1cbd9d1d805eb3aaf06ace9b224d45b87 100644 GIT binary patch literal 9805 zcmaiabx>UWw(Q^*oFKuS;4Z-f3GNQT-Q8V-6EwJMu;A|QI@sXu4ukW^Iq%;0o%5ah zZcY8FX8zb+d+n)SyLp|D}4MJ^3*vigABbd^|T&p-c3Kr zKKTJguMiP&29mSA#;dJK^|K7X*6gvRiYuNYUmU0oA1X^_4V$Fdh+Y(62uP(Nank$1 z-yN!m35hff2o~i)CJ7QZ9z{@l*s{tFs3>H$o(h8e{rGV*mP^&0^Lm|-EQc&`z2yg# zFf^tiT`!u^r!8tG07l77*eI!7!0z>&&Ty3X!AyAhY<(T(s)vhPzYpo3UINt6h zmAPE*t#jYS!Hxh`b41t(Q7?E@4W8gD5e50TSE(((&nS7_Mr04qJC5y<*_%vQI@PJo zj~u&QqI{Rr=W!`t@qW0utY=^)4VvD>dk36eW@FgGuJ6~i&D8E!=lT&ZAf&nbLC3HFw3fM_ zHmH2jmO6#=Z*q>nmBN-7CyfGU&hIbdJ$+~T18x;L%RF_J_R^WIfBf7~9CN9% zdrEuM(HjhY)wOs9$p1G@N8V_zj;i~`tv@k~lVd41 zca6SlT90z&EannIWQHb|?XB_mEBYy095@;hr&JK7g?{{QYN*T#hpo3Ah8~)B^b2(Z zB31(}+A7HJ8QWkq`;u=*Y`9)!cfn`uRzSgOEr~!>XMpMWAu4P+Yh9)C)#B{6aqDGl z)weNi%2aOImIE|*HvWU4$23|1=>}eeowb~k3Yfs-XP{jKNTd|-W6G>QpYiSQ_d8gs<@P*itc8j=^Yar8Js zoxr8Ppn71ElQCNKTDz3Yy`yBcj*D$;;m%_=jQ)i12GIOf6loPr-%<9gg3F8IC@ z=ne0Zl!vG263eDKd@jw(dhrEBSgCACivhzWd^3~QHfv*gF)Nh*2|;bz z{Q4$&p8DLwZ_jZV?|VjPa_0=!z<0}&B;8qcrgY0$4N{(%Uh|($6gBzE5T75}q=7e* zc4~;k3{VSnff%L|oFm{xAWwHI^H1cZ0->s>yxPXwKtIF%Ygd0h!#g~iq)!V9D>c8C zIba_>1|1?g5c5QrO$QL>3OV9f6~?pB+%?11Br4b~gqcQq`b16dInorWxk;vQ#ryk_YBE9G>K!_*;R0D$kX007h*y%;&`aiX8jSUlG8h0R#vRl%1tPul=mmH zI?#+&xL)n~H;6$%sIH%{EZcX2y~NuEN0%h~xg@x8>x13Fo-3sEva$D8kw3Y&Rp85q z9YWjm3!OjKo|$FRc{_bPm#7!IHa&0FnaKMH_|$!cX|1lEuxC%-$X#Am+-xGdw0jjaP6Em_PFQT zHwlk>PUPk8O1Bw6w45&4Hf7Z)srcUdz3aW{Qt*e28aqijQ@lZPM8@Q0jL@uQaIr#a zNy{uWmpU3~FR|;=tl0e9od=}D@c;2F(m7WU|d$GauK8xF9lQWM}{_q+mUyvPK*QCU6 z*iZ$dq9aCeAyxJ@?%qO*cs^0U4D9t1t-8tLGqt#)=c6-zURSU`>qkML^g>jv+*jRc zdfN_1PX)ov!yy*Cy5QSYAx3K3h9XdL>-CB5L!$HWj^puj$^%ob5<@wj^J(!>Ty=)m zN7qEtD&=qF2y01UB~n!`yYOI(EUBMl?OQ(#vkb8_WLm)3EUZV;z+{({^bmbA9B8o0 z@{e*?kv;1K>ABdN0CTuaTP~R-Z#zOIqgScO3z6tzi>fpS?gM@eDF>m{QRYvanJ+6e zL%s|uyY~ahoFo2@a{WjLHm;YM(bb6!Ao<+Ks*tn4gm3nFDT&w@k7KG#ik(Ug)m@*s_dplbWun`| zA%Edx+jLOyoYrTL6@xCA(+LzE&RxnkVh#%LqMOu<&Jc)NLSrrBwMfZApm9s?V);(H zz0!9H0$KE*-HCJyAqA7Cd}ql{JT}{O4NcmXpF|;!d>BA2Av5Txv`j0{=oiXvbbH-k zQ~sgt#5#lBg(6axKysr@E!DZv@T|i=APykEJR%?HOfQKiTnRh~;4*YMTI|*OVvik`kW;dkEDM*bcCb$+m=v)UKiR((rI-tAWxflBE=ASe>da>Ha{( zN~SIVaq5ykQ(!!jk8Kr|$v=|0J16nGN~*%A%Z~a~Ag(_ABWcXq6BoB>l7Ekgz^ z!{dp|GDGTw^5d4#NN;!aVa>UIR#FaJ(jcReI7$snGLLGRATE>03_ey2%Icd^M<3AR zR;K6aUgqQ8o+pcF5HUP_KN@Rg>D$WZ^3}N@EtW2?RL-F7Q~UJfX9pKf>@a%vu!1eu7HnIdUJMg{Bq5# zMaxj}b$h=#RO#VJ{6?x$pR@1<(0WRb1F>w)4YGFu!$n_qHaERud4s1-1(tT9!7KJ> z26^8JXJWiAy!9l;xUTj9xpI$30XWapN+M_+UtxM#gH)eW1U87>^s0*}CD@Z_>W-}# zH4uE%3^G!{5b2IQ0^%yK{`P&ygBx3gO^|7kBKt;td4)-t0PbY$gK7&Utmi%s$dbsF ziK6I7)<~A*epO9W-ts;@-Qm_GgjB`ak5wq7k*_;#dGaoar}9>#j+j9D7#3k~h7%E9 zU$c*AoFKMKba};3Zx2Lvvz!u}t$JUcmq!BQT_)NbE6G?#_Hmg#VttITT8}X5n+DMR z_+lCn4gL7r9^Mj$0r4Ob!QrGO^Pr+iBO7W@;USGJ_h&<}7{b297+D}B`KqEBq zy;k(hZI^wvRCzerrugt=ru`LawXvL!nStTQYKIA@I`{K354~aLko(awpgz6l1GLQt z;KKd0IHSX{A&w|brmT;7V;vXB zHVGZ_no8KgYA`$Q89_h1C|t|f1zI*3GlYL<61G8_zmr_ht#$0EekCDP%QIoBOsM3* z4`FhOJ5mAKcB7GMzWyd6>-iic{?cn8C6f<7rmu)~cLK`wzeT_QO)j==DB@O3w zy$HNF=0S=r*sI88e|CKqlv2kg#5_RiLa;LmS2!KLy~LjpG=Ul5Q>J>XNr~zU&Yo z9zK}Qak6ThkxT-c4lLw^!?B}`D4VL`(K|=mf~~iVmL-Y(S^W}xeJy$c)lUIj2(6fj z=TnAjX&~aWpX|jIfj>O6V4M=#Ix3!S4=4C{%6qsaK*N?`MNMTpA1GDcb{hD{c8q&Y zczsiR9sG`_xs)Qk%PNqk;Zn&Kr+!N0^qFhR*}3K%ZrkcQ3QyhFPVY}P_D|!+5k#Y% z1X^-34`vfEro?~;_tbf0S&A)5ND+zU3HP{5h3iJ;;ybB?3Zi2TJL6pGrk8FNpdiTN zsD*zDdm0L0mwra}G=xHL3OjZm^amitZlC>z{ z+S{V*x20Zv2lU>w_@b3xp>nH}8G{Bp2Jb1LvOAg%v@r^1pjf;dGC5$#T8KlfOsgBu zz~@V`^=Oj{McZ%mB63$&!oK)rJ$RI)c@5#-lt_1dnrY%o>gfsTY_kR1ZM6=>V z;InIP9PJ0wN+LEt%}{%DhN}*HD>FO83&&U!@W9Y-6FC46riL}OfQrSGzh znRqY<)ccF5Xk5xj8d!+n!RkO^8hF28kll1{ah)jCbO!qF@!f$fqN7}G8caoXF>zh$ z@2LC4=vT;p6>K-->J-O0kd4huB+?nKI{CFUGCdw{vL$kAGTqz3z+q-%VP&yrrT20^KdW?a z9)fMPO4qa|vR-q?!)wyOdl!J*gv`B|ETXXxl=69l0ELLkR7?aNiWKd8#&=$BmP!hR zE*|PCnko57KVpjEzMhRZ^*G#baq9Ymrkzk2o4fkX<3mg`EI%;!IBMB{v$Jxsb8@iM zMq=!7K-$Y>e_K*t>MagB=J~1$yG!-e2R54 z=AEx){CtZAsJ-bliJ$95d+qnZT`&2o?HbMk7A86m|K(P%tMCogz-pTdkEiDKx;9sm z*bPSv)63Qr*J&EYk1-uzCi{!p!KDZ53~eN-xYsecViaSamxoKLXUx@iS3dL$thSdq zwS(bL7RC+IV6N^90$!gKhoBTKY8Z3xWT!Ee?t^g@m%a~uXO~4mNzY1Z$eSzbh9ob~ zeiIM7{Su{7Zr8-4pcdPsYfEoD#+tx0{FW^B=KHp*YKM^CXi6SK@F8(W`(dVfA~&6F?QLS+ywkn)Wqh&8`I zFtf3~h)KzhW@qa-57B5H%+I|99H2^hxdvr4@WoP3>=f82JQ*CO8_~5xH9XD_-5Wh? zWcwV306V} zPyDK>)W!-w;#UuRMtMa)lS2h1r#MX-*Dq;JEh$No#WrM#xp167{bkPk$2F;c=W>aA z3upG<4$_}FudR)VxhbQCqpi(9Ow%9sm81MElPOiDWsqeUmKv2&saBMil%;2s9hLeh zsXVP9PYZAuzKT#}reM8{kP-^uxgQGS(L)h;_CXh)TI z0A}K|a6hNtedh1SOTJLvMU}z)rL57n@inolEHu%AppEE_Ur~u5OrRh>n&6^fL7xiKAD~d zj3h-qn2qxqaaUp_|H?EA1%3XkN%#s5`mu%c049C6E*9zsBYI8DTs!)+z+VVLfDBKR zR5OIAg$56(W{CAEFm|rP+E;I~pWx<4+$Xn``LPJ!K7_{VEW4^N+Mc@DHBFGf1LM9D zj6BTe5m$9BqYXDg%}>Ty;c(PbT+HQ~sF|I5c)mNKbPRXJ*jBL z)ztM6RrFNQ4)>v1u~Wjk;vO9HoFe})ok`|C*L2C=BCh4a+BT0Uh?tg6A-h z9}x3!SlDTn6+OJ2AR1$kAtz!x`)*HCY{p*BuS4OyGM7iONEG zKHm>9*B6kdAe`t$I^_Um)jIV>R~lr2CH&U zx^fGmu^hov=0>%nvwP&EkO-I1Vl+lhw77kROnDdX0sPfa_j&_|QTLw+*C9a=v`5hucXn1`oG%ck3gN#3C6p^`-ih0LwUwh?CeXryP1zl-q$S@n^Es$}%RP#~RDYL3dPy1OIj{7K`+z zcH}-w!1~xdb28;~cM193B~tzIWuIWr1C-zq`k4XC!WncyTLZbq&6V{yju|Cp;Jl&6 z96d;RWdQ2~|E0E!(w+OG)J&GKd%Td&qK3kqs58>JbUJ1F_L{pSmD>yS(QE8ScCx~5Xg`H4BKG>ATVQ`U&i z864Ni0b45hmkf87@t4c&IDuHR##aAJ_enF;n=q;qDT(H1uesvC7`xj`z{8#=d8bk6)mMWw*L9 z(=0EJp>S8bPlPcn4R-CN9`5hxaBRx@E)0fdsHXl_$*ma-szd-UVU zK275xZ*uRaZ}f$PrLJ(#gNz)Pxx~s+C>SX6gS7w64no4klm6=XJzc4=z6K`F2nGnz z;X#`fvQCBt2AyQ?@5-;vmdTo9UM&T`R`21OjOZb+_JC5 z?3Ap!JQWTs(hJ|&gN9sE;50r3qzrW^`W=TbTe zH||ZYND`fBj?0C5hc~ih9Rm6lxr+L;9Zv9tZdclOcnY8eKWN6U&&Zo1puzLoWh1ni zig=W%_DA$};Hcm@T9Azfztwcu6O|KFa3t1Il4D=4RDS^SM|0`t$t3VXU)Ex|S1nHufVg+AS8Hddbs3ul_o3e#Tr7(@)c*>F}q7!l+ zpI5CeC$KF%2#cZrT%Jj#b1z%?jh?m} zk-7e&cW_IaXYoant8ZB^QW(Fnd7ulmhn@C{q_8euEbln%v#nJH^HW|3^-hH$*D+Cx z#NvR_r{db?-bSI5zO*5moVGxJ`wd<*wJ^qK?DZVt--Cs}vRAUdlFSL5Eju9`UU z5MyjH*p#MP*Wx4=zOrF*4wJBuFB^{E^Vd3-;?dgBtC437L^^(58Y5&@i}3LJI+|%B z%Jy^Yb4rD^M&n+e;bfL8sK&c$L$flF0|H@SiUZ<%SQIXw&9}_OBTse1rv0TGIP8?1 z(REtfE0YpX+!8<67bV<~JUU^n)V$V-DRXQqT=u)mDSy8Pk@;q{4ec#cXP{c{0YleO z#Dg+^ciya|64Tj|MXXVQ8(iUy4s#_HU7qDx8-@RS&fQ6Q( z3EqP6kThAH4CY=@v7FYg~?O65jHlYQE18CQOi5AOy;;*4Vg1|3@;waXj zG03wo3#4_3?}cnf5sV7J2D3MKmh$c@SNAC^ndzRdQY5rKDCw{f!&S=OtH(QYI>L%j zRg|#sPrm1mnTrTFH#uLqRR~NVAs_2%hfV9SEF#s2wd*{WExA70S9+;zqn}?AC0wfw zA@m8;5{P3jcwoFhHEA>ae*F0~X}&fuf7dbkYp>SRMLGM!=O?&wt8P4sI$WXdavYGP zL&gzsfWm4IZeY3yBga*z-l&rbzIrXV-;1__vUn@cdAU3h90`NT&6@@MY|wX)e;v*8 z`oTCf#?4Pybs+RgzNO~tGWzzqbr@SjXK*}A1dRwkkAc1#3GM!+(GOMzUVF?QCKJ5e zU%srU+eKm8m2E!~NY1ai)$+y_y&+PF^95>9TWD#4sGiydOj(^@V|rN4B1>txQfHQJ z7wzDG-jKL#RGStr$6bGx<4O+cC$h57Q;s;NHQXky9VnS5qE z{u1~+&pO|zm?M4a)b~}p)=V&h-BNk`S6uhaNa-Qon)1p?Ha!6oQt2vCMVR2(mJU(! zkO)$$HJVP`wI!=#CthCdCxkWhe!6KRi4%G_ys%GbAv`<{)CQE4*Gxqv1e50q+^skh zbS0G-LcJen7i6)TAi=J=vwjEY;w|`ufFy$YpO%7f-?S(I@V5Q~$oK#A?Y~xpVE(xx z^l$4yf5rOy=E7gGir$RoKld2kkpAp3{4YrV8p!??=>r_#pYiM)(4Tnr{{!^*XyTth ze+tX@*LVl`x1iz;@jrwVe=YJ4;=g+5zak=H1ODcu-!T7!i~e`af3*UCEjI@5?ZN%k v82s;me;46@y-AY~|NJNZDaqf^{z&q_Jw%wdARGXId;3s80sxp4e}4TRshKzN literal 389 zcmWIWW@Zs#-~hr?#qa$XpkO^C0|PgM0z-0sURq|lURH5_UT6p}1G~=^omdzyt>9*0 zWO>2NzyKz4CmiHDWWdw*zPjt;<}<$RP2sjx0yRyS+}4~6os^^WX!q@$<}!`FwekP# zP2TIA4|Q)?yRZ4yvr~&+GkQ*l>Zr+BwA_8RhvVWs6-k0uvx28Q3=KZE_(5sp_1BDd z9*731-8~?%s5xiP#j|zleG4qx&+au&IGfRXkMWb@nGK)U@`!pb=57wQ3OS$35d6uU z-SgEExx{yKTc^qH?|R+)toZm7u1}JQA9;FYKhEEGA~`uF^SMLv!i9DBQcSa_*6s`N zX6I0P5yY|==zoyk1H2iTM3@obf-DCL7Z_O52x5^CN&()iY#=R+Ko|+6#ekMEFaQ7y Cmx~kt diff --git a/core/src/test/resources/config/cloud/identity.jks b/core/src/test/resources/config/cloud/identity.jks new file mode 100644 index 0000000000000000000000000000000000000000..8aac969133a5c9da7aa5b6b1f9ed25bb6df3650f GIT binary patch literal 2413 zcmY+FX*3iH8^>o~nz5CwG4^Y@*0B@HR`y+E#=hJLWi4xCnUETc5JIwL$&#!I5nWj$ z++mE}*T!h@WM5a6}F3fItX9 zEE$}ZD5+B%p17ZLD5{;J4H@!MsLQpD^!JfaH9a7s*G@<)rLO19(YE2D+vO@Vr=8_{ z&ySN<-x^JrNIueA%`p?Pr-Ens+YUlvDwmGtQ6{b8Vnx>bI8i9#hHsTQ6(*8h(d6A% zKw(`>-qZ+~thE#;Q?)Z6SF3I?D2144t9ONe#jpRB8f5|Ho$xD@4Si1ptZ8`Z_*&RM zLQB9sJr=xqc(F06{ALa^&yp)`CL&UM>ZkQADlbojs!tZy1jJUhM1oa@;k}0AAjX!U z)5dr;#^K5xuG3p3puBza7de?(5}YoRS-JVJIt{_xl{F$#Iiy>1KXS&JLTbMeA<3B$ z6k;`?M@n%t?eCIWxaSs0JcIerf4`=)MH(YEAjr60iNoX!`ldd5(CDc*ylHbuh~q?d ze!OV-J+MRmKqqX6CB}Zi$0**aN*w2c%15?S*Gu4p}Wduag(3v{e;4{i#w_2&i-wCU98Whl$tnTOjaYbmbd zVbS{|BWv4Baq#+X z1BXXEeX#y2TjceI(9PvZ#*^6ZS}YBYN>j}Wznz>!f*-9(sUhmO75kyUFMy*JYI8&Cgw=?jHbMPWomK#s>r5XDP;$3pG13*pikB>L>g6=;1x z-2S4C>y`Znse~#OTUmu%joA=lzJpA+vTg&84AuA_feXn{DF_)V{u7J*903^WKRTZR zf(pry6&x9|^naFve_4*LoA&$YwX5~ZatImHbyr@J{=4p|Hg ze)6sl)exm5j}K9`(}*Y|pOP4R&1FeTeKjgrMR$v|(Y?X0(x9)7&5}MS8+H9L#kXq= zy=*Vazt`-P6xxahOGtm^jU>S_&u?O1(&>u&v4uw*P9~H?8H&lpegd=LShhy?!)woy zrfNyzX@b~Oq7yD4!zXZ)#64b)7&-R7x&85eQJwM{2(|Z_PfOABEAzKkf9@%69ma@T zmtVV=)TBi4>9g4J#z0y36{xpCNE_?W>e$WI_8d8*kDZ?D(++L7Ny}7>Zm-LIyVuf& z*tI%W=&4l_;nQ&w{yT>?wm%JbY&kjMr8yUvc>w6;YFyJh5sxYI52kQF=Ss-M&|{Ke zqAxMpI`C(K}TgO)~MnXgV#hsObXme z7BMi&)Uefc_DADpwiMI_9FJ=?tCa6Km?>b(7|~bNwgt4X<}eVSH_u&!dj2O2S^X40zm9i9Db#H**Bv z-BZu3uMmg1Jsrg0heEX;_erQ|ZjHvQcV`)~ zFZw(NudeqzlJ=WY_`tqXI~;^!7Z~4JBz)QmXWE$P7+&qpi#pRJC>>0DlP|1Na;M6P z4WTqz7OTMQDC^Jj`94u<3en=V{^-F3}%M{gKYFBQwf|onZR2xPIe9Oj?qZ+wDTNZ6@ z7vcJDjHvj8vAgy`Mo|U6R{@oYPjrdJYJ5E#?N{h0v>Y|{Ubc&qOVNLSGdz`zTJv0nnO`WO_x%i6 zYT3tmKM#k{x2S%d2@|3w$PW*Yf^i}?9P@pl&Xx7#j{@C0(J9Y{n0Jnw%=%`NxHiXc zV}7{p&Gmr^YhSNpob=xD^Ar?+fVD0L;yaZVQ8I?t*;xZVHzl_)E)YBy16J8g^9Vd# z3R(@?qN;{GLByEyp}~!tXtofSu6pm>^oSUz&xh7KnDM=>ThObP#78`F^~( zP{;e(V>Aj@Y8gNbR_M98b>10t-5W|W7 zw-u3G3fvIR>01vegrc0Uf84ICONKWB<%#upoB~c5hlD|;S-~I-6aeAWpD-(EE>eSb driD1-Km9$tu_xRKK`1TiMy$kb<9@Z{zW{*EV7veT literal 0 HcmV?d00001 diff --git a/core/src/test/resources/config/cloud/trustStore.jks b/core/src/test/resources/config/cloud/trustStore.jks new file mode 100644 index 0000000000000000000000000000000000000000..8c389a5dd00557119a0dbf55f383c04057875832 GIT binary patch literal 956 zcmezO_TO6u1_mY|W(3o0MX9;@C8*41x{04LI4DLs{5_nL^Yb53GWqM@JxKS-2|hr=bYBr&)ovBHqgfCnVR&BN-PlV6%*AScdi zXlY<+WNv6|YGP;^CC+OE;+jCYw6U~_Q3=^|jI0dIO^o~u22G4yOihf842K?hSg%`} zw}<(RS%&xQNN(Mj4|gtmosjsp!I^)B%aP7)ocAUvoOl{*y=sXfLr8+ci7UnS-NKv_ zTjnPgXh&bq`#LR7GyJVhUhk@H`{ue^_T2b)eZ!{X6Oa7AeP3WxOhn}L$d41>1=X7z zd-vMq>}k=1cimVX)z>_}_>V{5xNk~8HhcZO#KqGhYwNdlb}cMP{x?6NI+XeO>&u(; zPk1xT`>|llyvc{sA}jK%mEAJk-w4NZ?D%EM5oT_k6r}ZG(lu8Jr#m~?6>Ai~%byZj z_D@|kh<9U+f%x|WKI@wsC#^Q((SI;+@4l_muk~fP%nrV9?0#bD=J(Fa$}QJFeH0qN zv(u@I1X?Rz=hsX1Qm7_;+e> zWQ4pAe6Z!Ee3;Z3`L7|h46nk2X5Q(s z@l$obwD?J}SL-IMZRGk{b@sK3LUFwZYp~zpKjQuAVH-1(4!X=%dn;OdN=YR0_VdWD GrhNbrbz1xY literal 0 HcmV?d00001 diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cloud/CloudIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cloud/CloudIT.java new file mode 100644 index 00000000000..c4d2d0161b9 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cloud/CloudIT.java @@ -0,0 +1,204 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.core.cloud; + +import static com.github.tomakehurst.wiremock.client.WireMock.aResponse; +import static com.github.tomakehurst.wiremock.client.WireMock.any; +import static com.github.tomakehurst.wiremock.client.WireMock.stubFor; +import static com.github.tomakehurst.wiremock.client.WireMock.urlEqualTo; +import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.wireMockConfig; +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverConfigLoader; +import com.datastax.oss.driver.api.core.cql.ResultSet; +import com.datastax.oss.driver.categories.IsolatedTests; +import com.github.tomakehurst.wiremock.junit.WireMockRule; +import java.io.IOException; +import java.io.InputStream; +import java.net.URL; +import java.nio.file.Files; +import java.nio.file.Path; +import org.junit.ClassRule; +import org.junit.Rule; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +@Category(IsolatedTests.class) +public class CloudIT { + + private static final String BUNDLE_URL_PATH = "/certs/bundles/creds.zip"; + + @ClassRule public static SniProxyRule proxyRule = new SniProxyRule(); + + // Used only to host the secure connect bundle, for tests that require external URLs + @Rule + public WireMockRule wireMockRule = + new WireMockRule(wireMockConfig().dynamicPort().dynamicHttpsPort()); + + @Test + public void should_connect_to_proxy_using_path() { + ResultSet set; + Path bundle = proxyRule.getProxy().getDefaultBundlePath(); + try (CqlSession session = CqlSession.builder().withCloudSecureConnectBundle(bundle).build()) { + set = session.execute("select * from system.local"); + } + assertThat(set).isNotNull(); + } + + @Test + public void should_connect_to_proxy_without_credentials() { + ResultSet set; + Path bundle = proxyRule.getProxy().getBundleWithoutCredentialsPath(); + try (CqlSession session = + CqlSession.builder() + .withCloudSecureConnectBundle(bundle) + .withAuthCredentials("cassandra", "cassandra") + .build()) { + set = session.execute("select * from system.local"); + } + assertThat(set).isNotNull(); + } + + @Test + public void should_connect_to_proxy_using_non_normalized_path() { + Path bundle = proxyRule.getProxy().getBundlesRootPath().resolve("../bundles/creds-v1.zip"); + ResultSet set; + try (CqlSession session = CqlSession.builder().withCloudSecureConnectBundle(bundle).build()) { + set = session.execute("select * from system.local"); + } + assertThat(set).isNotNull(); + } + + @Test + public void should_connect_to_proxy_using_input_stream() throws IOException { + InputStream bundle = Files.newInputStream(proxyRule.getProxy().getDefaultBundlePath()); + ResultSet set; + try (CqlSession session = CqlSession.builder().withCloudSecureConnectBundle(bundle).build()) { + set = session.execute("select * from system.local"); + } + assertThat(set).isNotNull(); + } + + @Test + public void should_connect_to_proxy_using_URL() throws IOException { + // given + byte[] bundle = Files.readAllBytes(proxyRule.getProxy().getDefaultBundlePath()); + stubFor( + any(urlEqualTo(BUNDLE_URL_PATH)) + .willReturn( + aResponse() + .withStatus(200) + .withHeader("Content-Type", "application/octet-stream") + .withBody(bundle))); + URL bundleUrl = + new URL(String.format("http://localhost:%d%s", wireMockRule.port(), BUNDLE_URL_PATH)); + + // when + ResultSet set; + try (CqlSession session = + CqlSession.builder().withCloudSecureConnectBundle(bundleUrl).build()) { + + // then + set = session.execute("select * from system.local"); + } + assertThat(set).isNotNull(); + } + + @Test + public void should_connect_to_proxy_using_absolute_path_provided_in_the_session_setting() { + // given + String bundle = proxyRule.getProxy().getDefaultBundlePath().toString(); + DriverConfigLoader loader = + DriverConfigLoader.programmaticBuilder() + .withString(DefaultDriverOption.CLOUD_SECURE_CONNECT_BUNDLE, bundle) + .build(); + // when + ResultSet set; + try (CqlSession session = CqlSession.builder().withConfigLoader(loader).build()) { + + // then + set = session.execute("select * from system.local"); + } + assertThat(set).isNotNull(); + } + + @Test + public void should_connect_to_proxy_using_non_normalized_path_provided_in_the_session_setting() { + // given + String bundle = + proxyRule.getProxy().getBundlesRootPath().resolve("../bundles/creds-v1.zip").toString(); + DriverConfigLoader loader = + DriverConfigLoader.programmaticBuilder() + .withString(DefaultDriverOption.CLOUD_SECURE_CONNECT_BUNDLE, bundle) + .build(); + // when + ResultSet set; + try (CqlSession session = CqlSession.builder().withConfigLoader(loader).build()) { + + // then + set = session.execute("select * from system.local"); + } + assertThat(set).isNotNull(); + } + + @Test + public void + should_connect_to_proxy_using_url_with_file_protocol_provided_in_the_session_setting() { + // given + String bundle = proxyRule.getProxy().getDefaultBundlePath().toString(); + DriverConfigLoader loader = + DriverConfigLoader.programmaticBuilder() + .withString(DefaultDriverOption.CLOUD_SECURE_CONNECT_BUNDLE, bundle) + .build(); + // when + ResultSet set; + try (CqlSession session = CqlSession.builder().withConfigLoader(loader).build()) { + + // then + set = session.execute("select * from system.local"); + } + assertThat(set).isNotNull(); + } + + @Test + public void should_connect_to_proxy_using_url_with_http_protocol_provided_in_the_session_setting() + throws IOException { + // given + byte[] bundle = Files.readAllBytes(proxyRule.getProxy().getDefaultBundlePath()); + stubFor( + any(urlEqualTo(BUNDLE_URL_PATH)) + .willReturn( + aResponse() + .withStatus(200) + .withHeader("Content-Type", "application/octet-stream") + .withBody(bundle))); + String bundleUrl = String.format("http://localhost:%d%s", wireMockRule.port(), BUNDLE_URL_PATH); + DriverConfigLoader loader = + DriverConfigLoader.programmaticBuilder() + .withString(DefaultDriverOption.CLOUD_SECURE_CONNECT_BUNDLE, bundleUrl) + .build(); + // when + ResultSet set; + try (CqlSession session = CqlSession.builder().withConfigLoader(loader).build()) { + + // then + set = session.execute("select * from system.local"); + } + assertThat(set).isNotNull(); + } +} diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cloud/DbaasIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cloud/DbaasIT.java deleted file mode 100644 index 2e3af8b4b0c..00000000000 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cloud/DbaasIT.java +++ /dev/null @@ -1,188 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.datastax.oss.driver.api.core.cloud; - -import static com.datastax.oss.driver.api.core.cloud.SniProxyServer.CERTS_BUNDLE_SUFFIX; -import static com.github.tomakehurst.wiremock.client.WireMock.aResponse; -import static com.github.tomakehurst.wiremock.client.WireMock.any; -import static com.github.tomakehurst.wiremock.client.WireMock.stubFor; -import static com.github.tomakehurst.wiremock.client.WireMock.urlEqualTo; -import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.wireMockConfig; -import static org.assertj.core.api.Assertions.assertThat; -import static org.assertj.core.api.Assertions.fail; - -import com.datastax.oss.driver.api.core.CqlSession; -import com.datastax.oss.driver.api.core.config.DefaultDriverOption; -import com.datastax.oss.driver.api.core.config.DriverConfigLoader; -import com.datastax.oss.driver.api.core.cql.ResultSet; -import com.datastax.oss.driver.categories.IsolatedTests; -import com.github.tomakehurst.wiremock.junit.WireMockRule; -import java.io.IOException; -import java.net.MalformedURLException; -import java.net.URL; -import java.nio.file.Files; -import java.nio.file.Paths; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; - -@Category(IsolatedTests.class) -public class DbaasIT { - - @Rule - public WireMockRule wireMockRule = - new WireMockRule(wireMockConfig().dynamicPort().dynamicHttpsPort()); - - @ClassRule public static SniProxyRule proxyRule = new SniProxyRule(); - - @Test - public void should_connect_to_proxy_using_absolute_path() { - CqlSession session = - CqlSession.builder() - .withCloudSecureConnectBundle(Paths.get(proxyRule.getProxy().getSecureBundlePath())) - .build(); - ResultSet set = session.execute("select * from system.local"); - assertThat(set).isNotNull(); - } - - @Test - public void should_connect_to_proxy_using_relative_path() { - CqlSession session = - CqlSession.builder() - .withCloudSecureConnectBundle( - Paths.get(proxyRule.getProxy().getSecureBundleRelativePath())) - .build(); - ResultSet set = session.execute("select * from system.local"); - assertThat(set).isNotNull(); - } - - @Test - public void should_connect_to_proxy_using_file_provided_by_the_http_URL() throws IOException { - // given - stubFor( - any(urlEqualTo(CERTS_BUNDLE_SUFFIX)) - .willReturn( - aResponse() - .withStatus(200) - .withHeader("Content-Type", "application/octet-stream") - .withBody( - Files.readAllBytes( - Paths.get(proxyRule.getProxy().getSecureBundlePath()))))); - - URL configFile = - new URL(String.format("http://localhost:%d%s", wireMockRule.port(), CERTS_BUNDLE_SUFFIX)); - - // when - CqlSession session = CqlSession.builder().withCloudSecureConnectBundle(configFile).build(); - - // then - ResultSet set = session.execute("select * from system.local"); - assertThat(set).isNotNull(); - } - - @Test - public void should_connect_to_proxy_using_absolute_path_provided_in_the_session_setting() { - // given - DriverConfigLoader loader = - DriverConfigLoader.programmaticBuilder() - .withString( - DefaultDriverOption.CLOUD_SECURE_CONNECT_BUNDLE, - proxyRule.getProxy().getSecureBundlePath()) - .build(); - // when - CqlSession session = CqlSession.builder().withConfigLoader(loader).build(); - - // then - ResultSet set = session.execute("select * from system.local"); - assertThat(set).isNotNull(); - } - - @Test - public void should_connect_to_proxy_using_relative_path_provided_in_the_session_setting() { - // given - DriverConfigLoader loader = - DriverConfigLoader.programmaticBuilder() - .withString( - DefaultDriverOption.CLOUD_SECURE_CONNECT_BUNDLE, - proxyRule.getProxy().getSecureBundleRelativePath()) - .build(); - // when - CqlSession session = CqlSession.builder().withConfigLoader(loader).build(); - - // then - ResultSet set = session.execute("select * from system.local"); - assertThat(set).isNotNull(); - } - - @Test - public void should_connect_to_proxy_using_url_with_file_protocol_provided_in_the_session_setting() - throws MalformedURLException { - // given - DriverConfigLoader loader = - DriverConfigLoader.programmaticBuilder() - .withString( - DefaultDriverOption.CLOUD_SECURE_CONNECT_BUNDLE, - Paths.get(proxyRule.getProxy().getSecureBundlePath()).toUri().toURL().toString()) - .build(); - // when - CqlSession session = CqlSession.builder().withConfigLoader(loader).build(); - - // then - ResultSet set = session.execute("select * from system.local"); - assertThat(set).isNotNull(); - } - - @Test - public void should_connect_to_proxy_using_url_with_http_protocol_provided_in_the_session_setting() - throws IOException { - // given - stubFor( - any(urlEqualTo(CERTS_BUNDLE_SUFFIX)) - .willReturn( - aResponse() - .withStatus(200) - .withHeader("Content-Type", "application/octet-stream") - .withBody( - Files.readAllBytes( - Paths.get(proxyRule.getProxy().getSecureBundlePath()))))); - DriverConfigLoader loader = - DriverConfigLoader.programmaticBuilder() - .withString( - DefaultDriverOption.CLOUD_SECURE_CONNECT_BUNDLE, - String.format("http://localhost:%d%s", wireMockRule.port(), CERTS_BUNDLE_SUFFIX)) - .build(); - // when - CqlSession session = CqlSession.builder().withConfigLoader(loader).build(); - - // then - ResultSet set = session.execute("select * from system.local"); - assertThat(set).isNotNull(); - } - - @Test - public void should_not_connect_to_proxy() { - try (CqlSession session = - CqlSession.builder() - .withCloudSecureConnectBundle( - Paths.get(proxyRule.getProxy().getSecureBundleUnreachable())) - .build()) { - fail("Expected an IllegalStateException"); - } catch (IllegalStateException e) { - assertThat(e).hasMessageStartingWith("Unable to construct cloud configuration"); - } - } -} diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cloud/SniProxyRule.java b/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cloud/SniProxyRule.java index 3548e945aa1..706f337d39c 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cloud/SniProxyRule.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cloud/SniProxyRule.java @@ -18,6 +18,7 @@ import org.junit.rules.ExternalResource; public class SniProxyRule extends ExternalResource { + private final SniProxyServer proxy; public SniProxyRule() { diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cloud/SniProxyServer.java b/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cloud/SniProxyServer.java index 18abe340edf..af137f2bb70 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cloud/SniProxyServer.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cloud/SniProxyServer.java @@ -16,8 +16,9 @@ package com.datastax.oss.driver.api.core.cloud; import java.io.ByteArrayOutputStream; -import java.io.File; import java.io.IOException; +import java.nio.file.Path; +import java.nio.file.Paths; import java.util.concurrent.TimeUnit; import org.apache.commons.exec.CommandLine; import org.apache.commons.exec.DefaultExecutor; @@ -30,74 +31,110 @@ import org.slf4j.LoggerFactory; public class SniProxyServer { - private static final Logger logger = LoggerFactory.getLogger(SniProxyServer.class); - static final String CERTS_BUNDLE_SUFFIX = "/certs/bundles/creds-v1.zip"; - private static final String CERTS_BUNDLE_SUFFIX_RELATIVE = - "/certs/bundles/../bundles/creds-v1.zip"; - private final String proxyPath; - private boolean isRunning = false; + + private static final Logger LOG = LoggerFactory.getLogger(SniProxyServer.class); + + private final Path proxyPath; + private final Path bundlesRootPath; + private final Path defaultBundlePath; + private final Path bundleWithoutCredentialsPath; + private final Path bundleWithoutClientCertificatesPath; + private final Path bundleWithInvalidCAPath; + private final Path bundleWithUnreachableMetadataServicePath; + + private volatile boolean running = false; public SniProxyServer() { - proxyPath = System.getProperty("proxy.path", "./"); + this(Paths.get(System.getProperty("proxy.path", "./"))); + } + + public SniProxyServer(Path proxyPath) { + this.proxyPath = proxyPath.normalize().toAbsolutePath(); + bundlesRootPath = proxyPath.resolve("certs/bundles/"); + defaultBundlePath = bundlesRootPath.resolve("creds-v1.zip"); + bundleWithoutCredentialsPath = bundlesRootPath.resolve("creds-v1-wo-creds.zip"); + bundleWithoutClientCertificatesPath = bundlesRootPath.resolve("creds-v1-wo-cert.zip"); + bundleWithInvalidCAPath = bundlesRootPath.resolve("creds-v1-invalid-ca.zip"); + bundleWithUnreachableMetadataServicePath = bundlesRootPath.resolve("creds-v1-unreachable.zip"); } public void startProxy() { CommandLine run = CommandLine.parse(proxyPath + "/run.sh"); execute(run); - isRunning = true; + running = true; } public void stopProxy() { - if (isRunning) { + if (running) { CommandLine findImageId = CommandLine.parse("docker ps -a -q --filter ancestor=single_endpoint"); String id = execute(findImageId); CommandLine stop = CommandLine.parse("docker kill " + id); execute(stop); - isRunning = false; + running = false; } } - public boolean isRunning() { - return isRunning; + /** @return The root folder of the SNI proxy server docker image. */ + public Path getProxyPath() { + return proxyPath; + } + + /** + * @return The root folder where secure connect bundles exposed by this SNI proxy for testing + * purposes can be found. + */ + public Path getBundlesRootPath() { + return bundlesRootPath; + } + + /** + * @return The default secure connect bundle. It contains credentials and all certificates + * required to connect. + */ + public Path getDefaultBundlePath() { + return defaultBundlePath; } - public String getSecureBundlePath() { - return proxyPath + CERTS_BUNDLE_SUFFIX; + /** @return A secure connect bundle without credentials in config.json. */ + public Path getBundleWithoutCredentialsPath() { + return bundleWithoutCredentialsPath; } - public String getSecureBundleRelativePath() { - return proxyPath + CERTS_BUNDLE_SUFFIX_RELATIVE; + /** @return A secure connect bundle without client certificates (no identity.jks). */ + public Path getBundleWithoutClientCertificatesPath() { + return bundleWithoutClientCertificatesPath; } - public String getSecureBundleNoCredsPath() { - return proxyPath + "/certs/bundles/creds-v1-wo-creds.zip"; + /** @return A secure connect bundle with an invalid Certificate Authority. */ + public Path getBundleWithInvalidCAPath() { + return bundleWithInvalidCAPath; } - public String getSecureBundleUnreachable() { - return proxyPath + "/certs/bundles/creds-v1-unreachable.zip"; + /** @return A secure connect bundle with an invalid address for the Proxy Metadata Service. */ + public Path getBundleWithUnreachableMetadataServicePath() { + return bundleWithUnreachableMetadataServicePath; } private String execute(CommandLine cli) { - logger.debug("Executing: " + cli); + LOG.debug("Executing: " + cli); ExecuteWatchdog watchDog = new ExecuteWatchdog(TimeUnit.MINUTES.toMillis(10)); ByteArrayOutputStream outStream = new ByteArrayOutputStream(); try (LogOutputStream errStream = new LogOutputStream() { @Override protected void processLine(String line, int logLevel) { - logger.error("sniendpointerr> {}", line); + LOG.error("sniendpointerr> {}", line); } }) { Executor executor = new DefaultExecutor(); ExecuteStreamHandler streamHandler = new PumpStreamHandler(outStream, errStream); executor.setStreamHandler(streamHandler); executor.setWatchdog(watchDog); - executor.setWorkingDirectory(new File(proxyPath)); + executor.setWorkingDirectory(proxyPath.toFile()); int retValue = executor.execute(cli); if (retValue != 0) { - logger.error( - "Non-zero exit code ({}) returned from executing ccm command: {}", retValue, cli); + LOG.error("Non-zero exit code ({}) returned from executing ccm command: {}", retValue, cli); } return outStream.toString(); } catch (IOException ex) { From 067f1d97a1892bb78fdd2a68fa473cc54039601a Mon Sep 17 00:00:00 2001 From: Greg Bestland Date: Fri, 11 Oct 2019 09:38:19 -0500 Subject: [PATCH 132/979] JAVA-2456: Detect CaaS and change default consistency level (#11) --- .../internal/core/channel/ChannelFactory.java | 48 ++++- .../internal/core/channel/DriverChannel.java | 8 + .../core/channel/ProtocolInitHandler.java | 36 +++- .../config/typesafe/TypesafeDriverConfig.java | 58 +++++- core/src/main/resources/reference.conf | 2 +- .../driver/internal/core/TestResponses.java | 8 + .../ChannelFactoryClusterNameTest.java | 4 + ...ChannelFactoryProtocolNegotiationTest.java | 25 +++ .../ChannelFactorySupportedOptionsTest.java | 66 +++++++ .../core/channel/ChannelFactoryTestBase.java | 16 +- .../core/channel/ProtocolInitHandlerTest.java | 84 +++++++-- ...eSafeDriverConfigOverrideDefaultsTest.java | 177 ++++++++++++++++++ .../driver/core/heartbeat/HeartbeatIT.java | 7 +- pom.xml | 1 - 14 files changed, 512 insertions(+), 28 deletions(-) create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/channel/ChannelFactorySupportedOptionsTest.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/config/typesafe/TypeSafeDriverConfigOverrideDefaultsTest.java diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ChannelFactory.java b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ChannelFactory.java index 2dabeb5204d..4ab862785b3 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ChannelFactory.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ChannelFactory.java @@ -15,14 +15,17 @@ */ package com.datastax.oss.driver.internal.core.channel; +import com.datastax.oss.driver.api.core.ConsistencyLevel; import com.datastax.oss.driver.api.core.ProtocolVersion; import com.datastax.oss.driver.api.core.UnsupportedProtocolVersionException; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverConfig; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.metadata.EndPoint; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.metrics.DefaultNodeMetric; import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; +import com.datastax.oss.driver.internal.core.config.typesafe.TypesafeDriverConfig; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.internal.core.context.NettyOptions; import com.datastax.oss.driver.internal.core.metadata.DefaultNode; @@ -33,6 +36,7 @@ import com.datastax.oss.driver.internal.core.protocol.FrameEncoder; import com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting; import com.datastax.oss.driver.shaded.guava.common.base.Preconditions; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import io.netty.bootstrap.Bootstrap; import io.netty.channel.Channel; import io.netty.channel.ChannelFuture; @@ -40,6 +44,7 @@ import io.netty.channel.ChannelOption; import io.netty.channel.ChannelPipeline; import java.util.List; +import java.util.Map; import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionStage; @@ -54,6 +59,15 @@ public class ChannelFactory { private static final Logger LOG = LoggerFactory.getLogger(ChannelFactory.class); + /** A value for {@link #productType} that indicates that we are connected to Datastax Cloud. */ + private static final String DATASTAX_CLOUD_PRODUCT_TYPE = "DATASTAX_APOLLO"; + + /** + * A value for {@link #productType} that indicates that the server does not report any product + * type. + */ + private static final String UNKNOWN_PRODUCT_TYPE = "UNKNOWN"; + private final String logPrefix; protected final InternalDriverContext context; @@ -62,6 +76,14 @@ public class ChannelFactory { @VisibleForTesting volatile String clusterName; + /** + * The value of the {@code PRODUCT_TYPE} option reported by the first channel we opened, in + * response to a {@code SUPPORTED} request. + * + *

      If the server does not return that option, the value will be {@link #UNKNOWN_PRODUCT_TYPE}. + */ + @VisibleForTesting volatile String productType; + public ChannelFactory(InternalDriverContext context) { this.logPrefix = context.getSessionName(); this.context = context; @@ -166,6 +188,24 @@ private void connect( if (ChannelFactory.this.clusterName == null) { ChannelFactory.this.clusterName = driverChannel.getClusterName(); } + Map> supportedOptions = driverChannel.getOptions(); + if (ChannelFactory.this.productType == null && supportedOptions != null) { + List productTypes = supportedOptions.get("PRODUCT_TYPE"); + String productType = + productTypes != null && !productTypes.isEmpty() + ? productTypes.get(0) + : UNKNOWN_PRODUCT_TYPE; + ChannelFactory.this.productType = productType; + DriverConfig driverConfig = context.getConfig(); + if (driverConfig instanceof TypesafeDriverConfig + && productType.equals(DATASTAX_CLOUD_PRODUCT_TYPE)) { + ((TypesafeDriverConfig) driverConfig) + .overrideDefaults( + ImmutableMap.of( + DefaultDriverOption.REQUEST_CONSISTENCY, + ConsistencyLevel.LOCAL_QUORUM.name())); + } + } resultFuture.complete(driverChannel); } else { Throwable error = connectFuture.cause(); @@ -237,7 +277,13 @@ protected void initChannel(Channel channel) { HeartbeatHandler heartbeatHandler = new HeartbeatHandler(defaultConfig); ProtocolInitHandler initHandler = new ProtocolInitHandler( - context, protocolVersion, clusterName, endPoint, options, heartbeatHandler); + context, + protocolVersion, + clusterName, + endPoint, + options, + heartbeatHandler, + productType == null); ChannelPipeline pipeline = channel.pipeline(); context diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/DriverChannel.java b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/DriverChannel.java index 59978777b98..d9ace3bae51 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/DriverChannel.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/DriverChannel.java @@ -29,6 +29,7 @@ import io.netty.util.concurrent.Promise; import java.net.SocketAddress; import java.nio.ByteBuffer; +import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; import net.jcip.annotations.ThreadSafe; @@ -39,7 +40,10 @@ */ @ThreadSafe public class DriverChannel { + static final AttributeKey CLUSTER_NAME_KEY = AttributeKey.newInstance("cluster_name"); + static final AttributeKey>> OPTIONS_KEY = + AttributeKey.newInstance("options"); @SuppressWarnings("RedundantStringConstructorCall") static final Object GRACEFUL_CLOSE_MESSAGE = new String("GRACEFUL_CLOSE_MESSAGE"); @@ -120,6 +124,10 @@ public String getClusterName() { return channel.attr(CLUSTER_NAME_KEY).get(); } + public Map> getOptions() { + return channel.attr(OPTIONS_KEY).get(); + } + /** * @return the number of available stream ids on the channel. This is used to weigh channels in * pools that have a size bigger than 1, in the load balancing policy, and for monitoring diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ProtocolInitHandler.java b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ProtocolInitHandler.java index c9f1993fc34..b3662ee2419 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ProtocolInitHandler.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ProtocolInitHandler.java @@ -31,7 +31,9 @@ import com.datastax.oss.driver.internal.core.util.concurrent.UncaughtExceptions; import com.datastax.oss.protocol.internal.Message; import com.datastax.oss.protocol.internal.ProtocolConstants; +import com.datastax.oss.protocol.internal.ProtocolConstants.ErrorCode; import com.datastax.oss.protocol.internal.request.AuthResponse; +import com.datastax.oss.protocol.internal.request.Options; import com.datastax.oss.protocol.internal.request.Query; import com.datastax.oss.protocol.internal.request.Register; import com.datastax.oss.protocol.internal.request.Startup; @@ -40,6 +42,7 @@ import com.datastax.oss.protocol.internal.response.Authenticate; import com.datastax.oss.protocol.internal.response.Error; import com.datastax.oss.protocol.internal.response.Ready; +import com.datastax.oss.protocol.internal.response.Supported; import com.datastax.oss.protocol.internal.response.result.Rows; import com.datastax.oss.protocol.internal.response.result.SetKeyspace; import io.netty.channel.ChannelHandlerContext; @@ -69,14 +72,21 @@ class ProtocolInitHandler extends ConnectInitHandler { private final HeartbeatHandler heartbeatHandler; private String logPrefix; private ChannelHandlerContext ctx; + private boolean querySupportedOptions; + /** + * @param querySupportedOptions whether to send OPTIONS as the first message, to request which + * protocol options the channel supports. If this is true, the options will be stored as a + * channel attribute, and exposed via {@link DriverChannel#getOptions()}. + */ ProtocolInitHandler( InternalDriverContext context, ProtocolVersion protocolVersion, String expectedClusterName, EndPoint endPoint, DriverChannelOptions options, - HeartbeatHandler heartbeatHandler) { + HeartbeatHandler heartbeatHandler, + boolean querySupportedOptions) { this.context = context; this.endPoint = endPoint; @@ -89,6 +99,7 @@ class ProtocolInitHandler extends ConnectInitHandler { this.expectedClusterName = expectedClusterName; this.options = options; this.heartbeatHandler = heartbeatHandler; + this.querySupportedOptions = querySupportedOptions; this.logPrefix = options.ownerLogPrefix + "|connecting..."; } @@ -117,6 +128,7 @@ protected boolean setConnectSuccess() { } private enum Step { + OPTIONS, STARTUP, GET_CLUSTER_NAME, SET_KEYSPACE, @@ -133,7 +145,7 @@ private class InitRequest extends ChannelHandlerRequest { InitRequest(ChannelHandlerContext ctx) { super(ctx, timeoutMillis); - this.step = Step.STARTUP; + this.step = querySupportedOptions ? Step.OPTIONS : Step.STARTUP; } @Override @@ -144,6 +156,8 @@ String describe() { @Override Message getRequest() { switch (step) { + case OPTIONS: + return Options.INSTANCE; case STARTUP: return new Startup(context.getStartupOptions()); case GET_CLUSTER_NAME: @@ -167,7 +181,11 @@ void onResponse(Message response) { step, ProtocolUtils.opcodeString(response.opcode)); try { - if (step == Step.STARTUP && response instanceof Ready) { + if (step == Step.OPTIONS && response instanceof Supported) { + channel.attr(DriverChannel.OPTIONS_KEY).set(((Supported) response).options); + step = Step.STARTUP; + send(); + } else if (step == Step.STARTUP && response instanceof Ready) { context.getAuthProvider().ifPresent(provider -> provider.onMissingChallenge(endPoint)); step = Step.GET_CLUSTER_NAME; send(); @@ -265,12 +283,14 @@ void onResponse(Message response) { } else if (response instanceof Error) { Error error = (Error) response; // Testing for a specific string is a tad fragile but Cassandra doesn't give us a more - // precise error - // code. + // precise error code. // C* 2.1 reports a server error instead of protocol error, see CASSANDRA-9451. - if (step == Step.STARTUP - && (error.code == ProtocolConstants.ErrorCode.PROTOCOL_ERROR - || error.code == ProtocolConstants.ErrorCode.SERVER_ERROR) + boolean firstRequest = + (step == Step.OPTIONS && querySupportedOptions) || step == Step.STARTUP; + boolean serverOrProtocolError = + error.code == ErrorCode.PROTOCOL_ERROR || error.code == ErrorCode.SERVER_ERROR; + if (firstRequest + && serverOrProtocolError && error.message.contains("Invalid or unsupported protocol version")) { fail( UnsupportedProtocolVersionException.forSingleAttempt( diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/config/typesafe/TypesafeDriverConfig.java b/core/src/main/java/com/datastax/oss/driver/internal/core/config/typesafe/TypesafeDriverConfig.java index 8ed6b80dfd2..cc3f841436b 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/config/typesafe/TypesafeDriverConfig.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/config/typesafe/TypesafeDriverConfig.java @@ -19,14 +19,20 @@ import com.datastax.oss.driver.api.core.config.DriverConfig; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.config.DriverOption; import com.datastax.oss.driver.internal.core.util.Loggers; import com.datastax.oss.driver.shaded.guava.common.base.Preconditions; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import com.typesafe.config.Config; import com.typesafe.config.ConfigObject; +import com.typesafe.config.ConfigOrigin; +import com.typesafe.config.ConfigOriginFactory; import com.typesafe.config.ConfigValue; +import com.typesafe.config.ConfigValueFactory; import edu.umd.cs.findbugs.annotations.NonNull; +import java.net.URL; import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; import net.jcip.annotations.ThreadSafe; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -35,14 +41,17 @@ public class TypesafeDriverConfig implements DriverConfig { private static final Logger LOG = LoggerFactory.getLogger(TypesafeDriverConfig.class); + private static final ConfigOrigin DEFAULT_OVERRIDES_ORIGIN = + ConfigOriginFactory.newSimple("default was overridden programmatically"); private final ImmutableMap profiles; // Only used to detect if reload saw any change private volatile Config lastLoadedConfig; + private final Map defaultOverrides = new ConcurrentHashMap<>(); + public TypesafeDriverConfig(Config config) { this.lastLoadedConfig = config; - Map profileConfigs = extractProfiles(config); ImmutableMap.Builder builder = @@ -57,6 +66,7 @@ public TypesafeDriverConfig(Config config) { /** @return whether the configuration changed */ public boolean reload(Config config) { + config = applyDefaultOverrides(config); if (config.equals(lastLoadedConfig)) { return false; } else { @@ -141,4 +151,50 @@ public DriverExecutionProfile getProfile(@NonNull String profileName) { public Map getProfiles() { return profiles; } + + /** + * Replace the given options, only if the original values came from {@code + * reference.conf}: if the option was set explicitly in {@code application.conf}, then the + * override is ignored. + * + *

      The overrides are also taken into account in profiles, and survive reloads. If this method + * is invoked multiple times, the last value for each option will be used. Note that it is + * currently not possible to use {@code null} as a value. + */ + public void overrideDefaults(@NonNull Map overrides) { + defaultOverrides.putAll(overrides); + reload(lastLoadedConfig); + } + + private Config applyDefaultOverrides(Config source) { + Config result = source; + for (Map.Entry entry : defaultOverrides.entrySet()) { + String path = entry.getKey().getPath(); + Object value = entry.getValue(); + if (isDefault(source, path)) { + LOG.debug("Replacing default value for {} by {}", path, value); + result = + result.withValue( + path, ConfigValueFactory.fromAnyRef(value).withOrigin(DEFAULT_OVERRIDES_ORIGIN)); + } else { + LOG.debug( + "Ignoring default override for {} because the user has overridden the value", path); + } + } + return result; + } + + // Whether the value in the given path comes from the reference.conf in the driver JAR. + private static boolean isDefault(Config config, String path) { + if (!config.hasPath(path)) { + return false; + } + ConfigOrigin origin = config.getValue(path).origin(); + if (origin.equals(DEFAULT_OVERRIDES_ORIGIN)) { + // Same default was overridden twice, should use the last value + return true; + } + URL url = origin.url(); + return url != null && url.toString().endsWith("reference.conf"); + } } diff --git a/core/src/main/resources/reference.conf b/core/src/main/resources/reference.conf index 43a317662c3..b50a5193dc6 100644 --- a/core/src/main/resources/reference.conf +++ b/core/src/main/resources/reference.conf @@ -194,7 +194,7 @@ datastax-java-driver { # This setting must be a valid URL. # If the protocol is not specified, it is implicitly assumed to be the `file://` protocol, # in which case the value is expected to be a valid path on the local filesystem. - # For example, `/a/path/to/bundle` will be interpreted as `file:///a/path/to/bunde`. + # For example, `/a/path/to/bundle` will be interpreted as `file:/a/path/to/bunde`. # If the protocol is provided explicitly, then the value will be used as is. # # Required: no diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/TestResponses.java b/core/src/test/java/com/datastax/oss/driver/internal/core/TestResponses.java index ecb84c0aced..0b22b1aa067 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/TestResponses.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/TestResponses.java @@ -17,8 +17,10 @@ import com.datastax.oss.driver.shaded.guava.common.base.Charsets; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import com.datastax.oss.driver.shaded.guava.common.collect.Lists; import com.datastax.oss.protocol.internal.ProtocolConstants; +import com.datastax.oss.protocol.internal.response.Supported; import com.datastax.oss.protocol.internal.response.result.ColumnSpec; import com.datastax.oss.protocol.internal.response.result.DefaultRows; import com.datastax.oss.protocol.internal.response.result.RawType; @@ -26,6 +28,7 @@ import com.datastax.oss.protocol.internal.response.result.RowsMetadata; import java.nio.ByteBuffer; import java.util.List; +import java.util.Map; import java.util.Queue; public class TestResponses { @@ -43,4 +46,9 @@ public static Rows clusterNameResponse(String actualClusterName) { data.add(Lists.newArrayList(ByteBuffer.wrap(actualClusterName.getBytes(Charsets.UTF_8)))); return new DefaultRows(metadata, data); } + + public static Supported supportedResponse(String key, String value) { + Map> options = ImmutableMap.of(key, ImmutableList.of(value)); + return new Supported(options); + } } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/channel/ChannelFactoryClusterNameTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/channel/ChannelFactoryClusterNameTest.java index f61b0501c61..5fc9dcea1e6 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/channel/ChannelFactoryClusterNameTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/channel/ChannelFactoryClusterNameTest.java @@ -41,6 +41,8 @@ public void should_set_cluster_name_from_first_connection() { factory.connect( SERVER_ADDRESS, DriverChannelOptions.DEFAULT, NoopNodeMetricUpdater.INSTANCE); + writeInboundFrame( + readOutboundFrame(), TestResponses.supportedResponse("mock_key", "mock_value")); writeInboundFrame(readOutboundFrame(), new Ready()); writeInboundFrame(readOutboundFrame(), TestResponses.clusterNameResponse("mockClusterName")); @@ -61,6 +63,8 @@ public void should_check_cluster_name_for_next_connections() throws Throwable { factory.connect( SERVER_ADDRESS, DriverChannelOptions.DEFAULT, NoopNodeMetricUpdater.INSTANCE); // open a first connection that will define the cluster name + writeInboundFrame( + readOutboundFrame(), TestResponses.supportedResponse("mock_key", "mock_value")); writeInboundFrame(readOutboundFrame(), new Ready()); writeInboundFrame(readOutboundFrame(), TestResponses.clusterNameResponse("mockClusterName")); assertThatStage(channelFuture).isSuccess(); diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/channel/ChannelFactoryProtocolNegotiationTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/channel/ChannelFactoryProtocolNegotiationTest.java index 500c665cdd7..189561c161b 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/channel/ChannelFactoryProtocolNegotiationTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/channel/ChannelFactoryProtocolNegotiationTest.java @@ -26,6 +26,7 @@ import com.datastax.oss.driver.internal.core.metrics.NoopNodeMetricUpdater; import com.datastax.oss.protocol.internal.Frame; import com.datastax.oss.protocol.internal.ProtocolConstants; +import com.datastax.oss.protocol.internal.request.Options; import com.datastax.oss.protocol.internal.response.Error; import com.datastax.oss.protocol.internal.response.Ready; import com.tngtech.java.junit.dataprovider.DataProvider; @@ -72,6 +73,10 @@ public void should_fail_if_version_specified_and_not_supported_by_server(int err SERVER_ADDRESS, DriverChannelOptions.DEFAULT, NoopNodeMetricUpdater.INSTANCE); Frame requestFrame = readOutboundFrame(); + assertThat(requestFrame.message).isInstanceOf(Options.class); + writeInboundFrame(requestFrame, TestResponses.supportedResponse("mock_key", "mock_value")); + + requestFrame = readOutboundFrame(); assertThat(requestFrame.protocolVersion).isEqualTo(DefaultProtocolVersion.V4.getCode()); // Server does not support v4 writeInboundFrame( @@ -102,6 +107,10 @@ public void should_succeed_if_version_not_specified_and_server_supports_latest_s SERVER_ADDRESS, DriverChannelOptions.DEFAULT, NoopNodeMetricUpdater.INSTANCE); Frame requestFrame = readOutboundFrame(); + assertThat(requestFrame.message).isInstanceOf(Options.class); + writeInboundFrame(requestFrame, TestResponses.supportedResponse("mock_key", "mock_value")); + + requestFrame = readOutboundFrame(); assertThat(requestFrame.protocolVersion).isEqualTo(DefaultProtocolVersion.V4.getCode()); writeInboundFrame(requestFrame, new Ready()); @@ -130,6 +139,10 @@ public void should_negotiate_if_version_not_specified_and_server_supports_legacy SERVER_ADDRESS, DriverChannelOptions.DEFAULT, NoopNodeMetricUpdater.INSTANCE); Frame requestFrame = readOutboundFrame(); + assertThat(requestFrame.message).isInstanceOf(Options.class); + writeInboundFrame(requestFrame, TestResponses.supportedResponse("mock_key", "mock_value")); + + requestFrame = readOutboundFrame(); assertThat(requestFrame.protocolVersion).isEqualTo(DefaultProtocolVersion.V4.getCode()); // Server does not support v4 writeInboundFrame( @@ -137,6 +150,10 @@ public void should_negotiate_if_version_not_specified_and_server_supports_legacy // Then // Factory should initialize a new connection, that retries with the lower version + requestFrame = readOutboundFrame(); + assertThat(requestFrame.message).isInstanceOf(Options.class); + writeInboundFrame(requestFrame, TestResponses.supportedResponse("mock_key", "mock_value")); + requestFrame = readOutboundFrame(); assertThat(requestFrame.protocolVersion).isEqualTo(DefaultProtocolVersion.V3.getCode()); writeInboundFrame(requestFrame, new Ready()); @@ -165,12 +182,20 @@ public void should_fail_if_negotiation_finds_no_matching_version(int errorCode) SERVER_ADDRESS, DriverChannelOptions.DEFAULT, NoopNodeMetricUpdater.INSTANCE); Frame requestFrame = readOutboundFrame(); + assertThat(requestFrame.message).isInstanceOf(Options.class); + writeInboundFrame(requestFrame, TestResponses.supportedResponse("mock_key", "mock_value")); + + requestFrame = readOutboundFrame(); assertThat(requestFrame.protocolVersion).isEqualTo(DefaultProtocolVersion.V4.getCode()); // Server does not support v4 writeInboundFrame( requestFrame, new Error(errorCode, "Invalid or unsupported protocol version")); // Client retries with v3 + requestFrame = readOutboundFrame(); + assertThat(requestFrame.message).isInstanceOf(Options.class); + writeInboundFrame(requestFrame, TestResponses.supportedResponse("mock_key", "mock_value")); + requestFrame = readOutboundFrame(); assertThat(requestFrame.protocolVersion).isEqualTo(DefaultProtocolVersion.V3.getCode()); // Server does not support v3 diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/channel/ChannelFactorySupportedOptionsTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/channel/ChannelFactorySupportedOptionsTest.java new file mode 100644 index 00000000000..81246d782a1 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/channel/ChannelFactorySupportedOptionsTest.java @@ -0,0 +1,66 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.channel; + +import static com.datastax.oss.driver.Assertions.assertThatStage; +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.when; + +import com.datastax.oss.driver.api.core.DefaultProtocolVersion; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.internal.core.TestResponses; +import com.datastax.oss.driver.internal.core.metrics.NoopNodeMetricUpdater; +import com.datastax.oss.protocol.internal.response.Ready; +import java.util.concurrent.CompletionStage; +import org.junit.Test; + +public class ChannelFactorySupportedOptionsTest extends ChannelFactoryTestBase { + + @Test + public void should_query_supported_options_on_first_channel() throws Throwable { + // Given + when(defaultProfile.isDefined(DefaultDriverOption.PROTOCOL_VERSION)).thenReturn(false); + when(protocolVersionRegistry.highestNonBeta()).thenReturn(DefaultProtocolVersion.V4); + ChannelFactory factory = newChannelFactory(); + + // When + CompletionStage channelFuture1 = + factory.connect( + SERVER_ADDRESS, DriverChannelOptions.DEFAULT, NoopNodeMetricUpdater.INSTANCE); + writeInboundFrame( + readOutboundFrame(), TestResponses.supportedResponse("mock_key", "mock_value")); + writeInboundFrame(readOutboundFrame(), new Ready()); + writeInboundFrame(readOutboundFrame(), TestResponses.clusterNameResponse("mockClusterName")); + + // Then + assertThatStage(channelFuture1).isSuccess(); + DriverChannel channel1 = channelFuture1.toCompletableFuture().get(); + assertThat(channel1.getOptions()).containsKey("mock_key"); + assertThat(channel1.getOptions().get("mock_key")).containsOnly("mock_value"); + + // When + CompletionStage channelFuture2 = + factory.connect( + SERVER_ADDRESS, DriverChannelOptions.DEFAULT, NoopNodeMetricUpdater.INSTANCE); + writeInboundFrame(readOutboundFrame(), new Ready()); + writeInboundFrame(readOutboundFrame(), TestResponses.clusterNameResponse("mockClusterName")); + + // Then + assertThatStage(channelFuture2).isSuccess(); + DriverChannel channel2 = channelFuture2.toCompletableFuture().get(); + assertThat(channel2.getOptions()).isNull(); + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/channel/ChannelFactoryTestBase.java b/core/src/test/java/com/datastax/oss/driver/internal/core/channel/ChannelFactoryTestBase.java index afcb507bfad..8508fbae46b 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/channel/ChannelFactoryTestBase.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/channel/ChannelFactoryTestBase.java @@ -16,6 +16,7 @@ package com.datastax.oss.driver.internal.core.channel; import static java.util.concurrent.TimeUnit.MILLISECONDS; +import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.fail; import static org.mockito.Mockito.when; @@ -35,6 +36,8 @@ import com.datastax.oss.protocol.internal.Frame; import com.datastax.oss.protocol.internal.FrameCodec; import com.datastax.oss.protocol.internal.Message; +import com.datastax.oss.protocol.internal.request.Options; +import com.datastax.oss.protocol.internal.request.Startup; import com.datastax.oss.protocol.internal.response.Ready; import com.tngtech.java.junit.dataprovider.DataProviderRunner; import io.netty.bootstrap.ServerBootstrap; @@ -200,6 +203,11 @@ private void writeInboundFrame(Frame requestFrame, Message response, int protoco */ protected void completeSimpleChannelInit() { Frame requestFrame = readOutboundFrame(); + assertThat(requestFrame.message).isInstanceOf(Options.class); + writeInboundFrame(requestFrame, TestResponses.supportedResponse("mock_key", "mock_value")); + + requestFrame = readOutboundFrame(); + assertThat(requestFrame.message).isInstanceOf(Startup.class); writeInboundFrame(requestFrame, new Ready()); requestFrame = readOutboundFrame(); @@ -252,7 +260,13 @@ protected void initChannel(Channel channel) throws Exception { HeartbeatHandler heartbeatHandler = new HeartbeatHandler(defaultProfile); ProtocolInitHandler initHandler = new ProtocolInitHandler( - context, protocolVersion, clusterName, endPoint, options, heartbeatHandler); + context, + protocolVersion, + clusterName, + endPoint, + options, + heartbeatHandler, + productType == null); channel.pipeline().addLast("inflight", inFlightHandler).addLast("init", initHandler); } catch (Throwable t) { resultFuture.completeExceptionally(t); diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/channel/ProtocolInitHandlerTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/channel/ProtocolInitHandlerTest.java index 5b134f9bc26..9a2ff781b23 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/channel/ProtocolInitHandlerTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/channel/ProtocolInitHandlerTest.java @@ -38,6 +38,7 @@ import com.datastax.oss.protocol.internal.Frame; import com.datastax.oss.protocol.internal.ProtocolConstants; import com.datastax.oss.protocol.internal.request.AuthResponse; +import com.datastax.oss.protocol.internal.request.Options; import com.datastax.oss.protocol.internal.request.Query; import com.datastax.oss.protocol.internal.request.Register; import com.datastax.oss.protocol.internal.request.Startup; @@ -52,6 +53,7 @@ import java.net.InetSocketAddress; import java.time.Duration; import java.util.List; +import java.util.Map; import java.util.Optional; import java.util.concurrent.TimeUnit; import org.junit.Before; @@ -115,7 +117,8 @@ public void should_initialize() { null, END_POINT, DriverChannelOptions.DEFAULT, - heartbeatHandler)); + heartbeatHandler, + false)); ChannelFuture connectFuture = channel.connect(new InetSocketAddress("localhost", 9042)); @@ -136,6 +139,52 @@ public void should_initialize() { assertThat(connectFuture).isSuccess(); } + @Test + public void should_query_supported_options() { + channel + .pipeline() + .addLast( + "init", + new ProtocolInitHandler( + internalDriverContext, + DefaultProtocolVersion.V4, + null, + END_POINT, + DriverChannelOptions.DEFAULT, + heartbeatHandler, + true)); + + ChannelFuture connectFuture = channel.connect(new InetSocketAddress("localhost", 9042)); + + // It should send an OPTIONS message + Frame requestFrame = readOutboundFrame(); + assertThat(requestFrame.message).isInstanceOf(Options.class); + assertThat(connectFuture).isNotDone(); + + // Simulate the SUPPORTED response + writeInboundFrame(requestFrame, TestResponses.supportedResponse("mock_key", "mock_value")); + + Map> supportedOptions = channel.attr(DriverChannel.OPTIONS_KEY).get(); + assertThat(supportedOptions).containsKey("mock_key"); + assertThat(supportedOptions.get("mock_key")).containsOnly("mock_value"); + + // It should send a STARTUP message + requestFrame = readOutboundFrame(); + assertThat(requestFrame.message).isInstanceOf(Startup.class); + assertThat(connectFuture).isNotDone(); + + // Simulate a READY response + writeInboundFrame(buildInboundFrame(requestFrame, new Ready())); + + // Simulate the cluster name check + requestFrame = readOutboundFrame(); + assertThat(requestFrame.message).isInstanceOf(Query.class); + writeInboundFrame(requestFrame, TestResponses.clusterNameResponse("someClusterName")); + + // Init should complete + assertThat(connectFuture).isSuccess(); + } + @Test public void should_add_heartbeat_handler_to_pipeline_on_success() { ProtocolInitHandler protocolInitHandler = @@ -145,7 +194,8 @@ public void should_add_heartbeat_handler_to_pipeline_on_success() { null, END_POINT, DriverChannelOptions.DEFAULT, - heartbeatHandler); + heartbeatHandler, + false); channel.pipeline().addLast("init", protocolInitHandler); @@ -188,7 +238,8 @@ public void should_fail_to_initialize_if_init_query_times_out() throws Interrupt null, END_POINT, DriverChannelOptions.DEFAULT, - heartbeatHandler)); + heartbeatHandler, + false)); ChannelFuture connectFuture = channel.connect(new InetSocketAddress("localhost", 9042)); @@ -213,7 +264,8 @@ public void should_initialize_with_authentication() { null, END_POINT, DriverChannelOptions.DEFAULT, - heartbeatHandler)); + heartbeatHandler, + false)); String serverAuthenticator = "mockServerAuthenticator"; AuthProvider authProvider = mock(AuthProvider.class); @@ -277,7 +329,8 @@ public void should_invoke_auth_provider_when_server_does_not_send_challenge() { null, END_POINT, DriverChannelOptions.DEFAULT, - heartbeatHandler)); + heartbeatHandler, + false)); AuthProvider authProvider = mock(AuthProvider.class); when(internalDriverContext.getAuthProvider()).thenReturn(Optional.of(authProvider)); @@ -310,7 +363,8 @@ public void should_fail_to_initialize_if_server_sends_auth_error() throws Throwa null, END_POINT, DriverChannelOptions.DEFAULT, - heartbeatHandler)); + heartbeatHandler, + false)); String serverAuthenticator = "mockServerAuthenticator"; AuthProvider authProvider = mock(AuthProvider.class); @@ -356,7 +410,8 @@ public void should_check_cluster_name_if_provided() { "expectedClusterName", END_POINT, DriverChannelOptions.DEFAULT, - heartbeatHandler)); + heartbeatHandler, + false)); ChannelFuture connectFuture = channel.connect(new InetSocketAddress("localhost", 9042)); @@ -386,7 +441,8 @@ public void should_fail_to_initialize_if_cluster_name_does_not_match() throws Th "expectedClusterName", END_POINT, DriverChannelOptions.DEFAULT, - heartbeatHandler)); + heartbeatHandler, + false)); ChannelFuture connectFuture = channel.connect(new InetSocketAddress("localhost", 9042)); @@ -419,7 +475,8 @@ public void should_initialize_with_keyspace() { null, END_POINT, options, - heartbeatHandler)); + heartbeatHandler, + false)); ChannelFuture connectFuture = channel.connect(new InetSocketAddress("localhost", 9042)); @@ -450,7 +507,8 @@ public void should_initialize_with_events() { null, END_POINT, driverChannelOptions, - heartbeatHandler)); + heartbeatHandler, + false)); ChannelFuture connectFuture = channel.connect(new InetSocketAddress("localhost", 9042)); @@ -484,7 +542,8 @@ public void should_initialize_with_keyspace_and_events() { null, END_POINT, driverChannelOptions, - heartbeatHandler)); + heartbeatHandler, + false)); ChannelFuture connectFuture = channel.connect(new InetSocketAddress("localhost", 9042)); @@ -518,7 +577,8 @@ public void should_fail_to_initialize_if_keyspace_is_invalid() { null, END_POINT, driverChannelOptions, - heartbeatHandler)); + heartbeatHandler, + false)); ChannelFuture connectFuture = channel.connect(new InetSocketAddress("localhost", 9042)); diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/config/typesafe/TypeSafeDriverConfigOverrideDefaultsTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/config/typesafe/TypeSafeDriverConfigOverrideDefaultsTest.java new file mode 100644 index 00000000000..9541bae4bf7 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/config/typesafe/TypeSafeDriverConfigOverrideDefaultsTest.java @@ -0,0 +1,177 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.config.typesafe; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.config.DriverOption; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import com.typesafe.config.Config; +import com.typesafe.config.ConfigFactory; +import java.time.Duration; +import java.util.Map; +import org.junit.Test; + +/** Focuses on {@link TypesafeDriverConfig#overrideDefaults(Map)}. */ +public class TypeSafeDriverConfigOverrideDefaultsTest { + + @Test + public void should_replace_if_value_comes_from_reference() { + // Given + TypesafeDriverConfig config = config(""); + assertThat(config.getDefaultProfile().getString(DefaultDriverOption.REQUEST_CONSISTENCY)) + .isEqualTo("LOCAL_ONE"); + + // When + config.overrideDefaults( + ImmutableMap.of(DefaultDriverOption.REQUEST_CONSISTENCY, "LOCAL_QUORUM")); + + // Then + assertThat(config.getDefaultProfile().getString(DefaultDriverOption.REQUEST_CONSISTENCY)) + .isEqualTo("LOCAL_QUORUM"); + } + + @Test + public void should_replace_multiple_times() { + // Given + TypesafeDriverConfig config = config(""); + assertThat(config.getDefaultProfile().getString(DefaultDriverOption.REQUEST_CONSISTENCY)) + .isEqualTo("LOCAL_ONE"); + + // When + config.overrideDefaults( + ImmutableMap.of(DefaultDriverOption.REQUEST_CONSISTENCY, "LOCAL_QUORUM")); + config.overrideDefaults(ImmutableMap.of(DefaultDriverOption.REQUEST_CONSISTENCY, "TWO")); + + // Then + assertThat(config.getDefaultProfile().getString(DefaultDriverOption.REQUEST_CONSISTENCY)) + .isEqualTo("TWO"); + } + + @Test + public void should_not_replace_if_overridden_from_application() { + // Given + TypesafeDriverConfig config = + config("datastax-java-driver.basic.request.consistency = LOCAL_ONE"); + assertThat(config.getDefaultProfile().getString(DefaultDriverOption.REQUEST_CONSISTENCY)) + .isEqualTo("LOCAL_ONE"); + + // When + config.overrideDefaults( + ImmutableMap.of(DefaultDriverOption.REQUEST_CONSISTENCY, "LOCAL_QUORUM")); + + // Then + // not replaced because it was set explictly in application.conf + assertThat(config.getDefaultProfile().getString(DefaultDriverOption.REQUEST_CONSISTENCY)) + .isEqualTo("LOCAL_ONE"); + } + + @Test + public void should_handle_reloads() { + // Given + TypesafeDriverConfig config = config(""); + assertThat(config.getDefaultProfile().getString(DefaultDriverOption.REQUEST_CONSISTENCY)) + .isEqualTo("LOCAL_ONE"); + + // When + config.overrideDefaults( + ImmutableMap.of(DefaultDriverOption.REQUEST_CONSISTENCY, "LOCAL_QUORUM")); + reload(config, ""); + + // Then + assertThat(config.getDefaultProfile().getString(DefaultDriverOption.REQUEST_CONSISTENCY)) + .isEqualTo("LOCAL_QUORUM"); + + // When + reload(config, "datastax-java-driver.basic.request.consistency = ONE"); + + // Then + // overridden default not used anymore if the reload detected a user change + assertThat(config.getDefaultProfile().getString(DefaultDriverOption.REQUEST_CONSISTENCY)) + .isEqualTo("ONE"); + } + + @Test + public void should_ignore_non_existent_option() { + // Given + TypesafeDriverConfig config = config(""); + DriverOption nonExistent = () -> "non existent"; + + // When + config.overrideDefaults(ImmutableMap.of(nonExistent, "IRRELEVANT")); + + // Then + assertThat(config.getDefaultProfile().isDefined(nonExistent)).isFalse(); + } + + @Test + public void should_handle_profiles() { + // Given + TypesafeDriverConfig config = + config( + "datastax-java-driver.profiles.profile1.basic.request.consistency = TWO\n" + + "datastax-java-driver.profiles.profile2.basic.request.timeout = 5 seconds"); + DriverExecutionProfile profile1 = config.getProfile("profile1"); + DriverExecutionProfile profile2 = config.getProfile("profile2"); + DriverExecutionProfile derivedProfile21 = + profile2.withDuration(DefaultDriverOption.REQUEST_TIMEOUT, Duration.ofSeconds(10)); + DriverExecutionProfile derivedProfile22 = + profile2.withString(DefaultDriverOption.REQUEST_CONSISTENCY, "QUORUM"); + assertThat(profile1.getString(DefaultDriverOption.REQUEST_CONSISTENCY)).isEqualTo("TWO"); + assertThat(profile2.getString(DefaultDriverOption.REQUEST_CONSISTENCY)) + .isEqualTo("LOCAL_ONE"); // inherited from default profile in reference.conf + assertThat(derivedProfile21.getString(DefaultDriverOption.REQUEST_CONSISTENCY)) + .isEqualTo("LOCAL_ONE"); // inherited from default profile in reference.conf + assertThat(derivedProfile22.getString(DefaultDriverOption.REQUEST_CONSISTENCY)) + .isEqualTo("QUORUM"); // overridden programmatically + + // When + config.overrideDefaults( + ImmutableMap.of(DefaultDriverOption.REQUEST_CONSISTENCY, "LOCAL_QUORUM")); + + // Then + // Unaffected because it was set manually in application.conf: + assertThat(profile1.getString(DefaultDriverOption.REQUEST_CONSISTENCY)).isEqualTo("TWO"); + // Affected because it was using the default from reference.conf: + assertThat(profile2.getString(DefaultDriverOption.REQUEST_CONSISTENCY)) + .isEqualTo("LOCAL_QUORUM"); + // Same: + assertThat(derivedProfile21.getString(DefaultDriverOption.REQUEST_CONSISTENCY)) + .isEqualTo("LOCAL_QUORUM"); + // Unaffected because it was overridden programmatically: + assertThat(derivedProfile22.getString(DefaultDriverOption.REQUEST_CONSISTENCY)) + .isEqualTo("QUORUM"); + } + + // Builds a config based on reference.conf + the given application.conf overrides + private TypesafeDriverConfig config(String application) { + return new TypesafeDriverConfig(rawConfig(application)); + } + + private boolean reload(TypesafeDriverConfig config, String newApplication) { + return config.reload(rawConfig(newApplication)); + } + + private Config rawConfig(String application) { + ConfigFactory.invalidateCaches(); + return ConfigFactory.parseString(application) + .withFallback(ConfigFactory.defaultReference()) + .resolve() + .getConfig(DefaultDriverConfigLoader.DEFAULT_ROOT_PATH); + } +} diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/heartbeat/HeartbeatIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/heartbeat/HeartbeatIT.java index ea3ebf661ae..34c04dc8f4a 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/heartbeat/HeartbeatIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/heartbeat/HeartbeatIT.java @@ -107,11 +107,12 @@ public void should_not_send_heartbeat_during_protocol_initialization() { // Try to create a session. Note that the init query timeout is twice the heartbeat interval, so // we're sure that at least one heartbeat would be sent if it was not properly disabled during // init. - try (CqlSession session = newSession()) { + try (CqlSession ignored = newSession()) { fail("Expected session creation to fail"); } catch (Exception expected) { - // no heartbeats should have been sent while protocol was initializing. - assertThat(getHeartbeatsForNode()).isEmpty(); + // no heartbeats should have been sent while protocol was initializing, but one OPTIONS + // message is expected to be sent as part of the initialization process. + assertThat(getHeartbeatsForNode()).hasSize(1); } } diff --git a/pom.xml b/pom.xml index 5de5c9cee07..1dc9e80ab83 100644 --- a/pom.xml +++ b/pom.xml @@ -270,7 +270,6 @@ com.github.tomakehurst wiremock 2.25.0 - test From 563289f7fa89a4e0ea703cca57d427c2ca95602d Mon Sep 17 00:00:00 2001 From: Greg Bestland Date: Fri, 18 Oct 2019 15:08:04 -0500 Subject: [PATCH 133/979] JAVA-2484: Add errors for cloud misconfiguration (#20) * JAVA-2484: Add errors for cloud misconfiguration --- changelog/README.md | 1 + .../api/core/session/SessionBuilder.java | 8 ++++++-- .../oss/driver/api/core/cloud/CloudIT.java | 18 ++++++++++++++++++ 3 files changed, 25 insertions(+), 2 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 76f6456142a..d60166ad63a 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -6,6 +6,7 @@ - [improvement] JAVA-2490: Allow to read the secure bundle from an InputStream - [new feature] JAVA-2478: Allow to provide the secure bundle via URL +- [improvement] JAVA-2484: Add errors for cloud misconfiguration ### 4.3.0 (in progress) diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java b/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java index a59a3269f16..3f3a2251474 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java @@ -516,7 +516,13 @@ protected final CompletionStage buildDefaultSessionAsync() { cloudConfigInputStream = () -> getURL(configUrlString).openStream(); } } + List configContactPoints = + defaultConfig.getStringList(DefaultDriverOption.CONTACT_POINTS, Collections.emptyList()); if (cloudConfigInputStream != null) { + if (!programmaticContactPoints.isEmpty() || !configContactPoints.isEmpty()) { + throw new IllegalStateException( + "Can't use withCloudSecureConnectBundle and addContactPoint(s). They are mutually exclusive."); + } CloudConfig cloudConfig = new CloudConfigFactory().createCloudConfig(cloudConfigInputStream.call()); addContactEndPoints(cloudConfig.getEndPoints()); @@ -528,8 +534,6 @@ protected final CompletionStage buildDefaultSessionAsync() { } } - List configContactPoints = - defaultConfig.getStringList(DefaultDriverOption.CONTACT_POINTS, Collections.emptyList()); boolean resolveAddresses = defaultConfig.getBoolean(DefaultDriverOption.RESOLVE_CONTACT_POINTS, true); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cloud/CloudIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cloud/CloudIT.java index c4d2d0161b9..37bd772360b 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cloud/CloudIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cloud/CloudIT.java @@ -21,8 +21,10 @@ import static com.github.tomakehurst.wiremock.client.WireMock.urlEqualTo; import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.wireMockConfig; import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.CqlSessionBuilder; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; import com.datastax.oss.driver.api.core.cql.ResultSet; @@ -30,6 +32,7 @@ import com.github.tomakehurst.wiremock.junit.WireMockRule; import java.io.IOException; import java.io.InputStream; +import java.net.InetSocketAddress; import java.net.URL; import java.nio.file.Files; import java.nio.file.Path; @@ -201,4 +204,19 @@ public void should_connect_to_proxy_using_url_with_http_protocol_provided_in_the } assertThat(set).isNotNull(); } + + @Test + public void should_error_when_contact_points_and_secure_bundle_used() { + // given + Path bundle = proxyRule.getProxy().getBundleWithoutCredentialsPath(); + CqlSessionBuilder builder = + CqlSession.builder() + .withCloudSecureConnectBundle(bundle) + .addContactPoint(new InetSocketAddress("127.0.0.1", 9042)) + .withAuthCredentials("cassandra", "cassandra"); + assertThatThrownBy(() -> builder.build()) + .isInstanceOf(IllegalStateException.class) + .hasMessage( + "Can't use withCloudSecureConnectBundle and addContactPoint(s). They are mutually exclusive."); + } } From 29898be4014d222b213badc7b1d4ed081a7d53f9 Mon Sep 17 00:00:00 2001 From: Erik Merkle Date: Mon, 21 Oct 2019 06:27:09 -0500 Subject: [PATCH 134/979] JAVA-2503: Migrate Cloud "getting started" page to driver manual (#17) --- changelog/README.md | 1 + .../apollo/ApolloReadCassandraVersion.java | 77 +++++++++++ manual/.nav | 3 +- manual/README.md | 3 +- manual/cloud/README.md | 123 ++++++++++++++++++ 5 files changed, 205 insertions(+), 2 deletions(-) create mode 100644 examples/src/main/java/com/datastax/oss/driver/examples/apollo/ApolloReadCassandraVersion.java create mode 100644 manual/cloud/README.md diff --git a/changelog/README.md b/changelog/README.md index d60166ad63a..4d27b1f9ace 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### DataStax Cloud (in progress) +- [documentation] JAVA-2503: Migrate Cloud "getting started" page to driver manual - [improvement] JAVA-2490: Allow to read the secure bundle from an InputStream - [new feature] JAVA-2478: Allow to provide the secure bundle via URL - [improvement] JAVA-2484: Add errors for cloud misconfiguration diff --git a/examples/src/main/java/com/datastax/oss/driver/examples/apollo/ApolloReadCassandraVersion.java b/examples/src/main/java/com/datastax/oss/driver/examples/apollo/ApolloReadCassandraVersion.java new file mode 100644 index 00000000000..0124a5e95ee --- /dev/null +++ b/examples/src/main/java/com/datastax/oss/driver/examples/apollo/ApolloReadCassandraVersion.java @@ -0,0 +1,77 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.examples.apollo; + +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.cql.ResultSet; +import com.datastax.oss.driver.api.core.cql.Row; +import java.nio.file.Paths; + +/** + * Connects to a DataStax Apollo cluster and extracts basic information from it. + * + *

      Preconditions: + * + *

        + *
      • A DataStax Apollo cluster is running and accessible. + *
      • A DataStax Apollo secure connect bundle for the running cluster. + *
      + * + *

      Side effects: none. + * + * @see + * Creating an Apollo Database + * @see + * Providing access to Apollo databases + * @see + * Obtaining Apollo secure connect bundle + * @see Java driver online + * manual + */ +public class ApolloReadCassandraVersion { + + public static void main(String[] args) { + + // The Session is what you use to execute queries. It is thread-safe and should be + // reused. + try (CqlSession session = + CqlSession.builder() + // Change the path here to the secure connect bundle location (see javadocs above) + .withCloudSecureConnectBundle(Paths.get("/path/to/secure-connect-database_name.zip")) + // Change the user_name and password here for the Apollo instance + .withAuthCredentials("user_name", "password") + // Uncomment the next line to use a specific keyspace + // .withKeyspace("keyspace_name") + .build()) { + + // We use execute to send a query to Cassandra. This returns a ResultSet, which + // is essentially a collection of Row objects. + ResultSet rs = session.execute("select release_version from system.local"); + // Extract the first row (which is the only one in this case). + Row row = rs.one(); + + // Extract the value of the first (and only) column from the row. + assert row != null; + String releaseVersion = row.getString("release_version"); + System.out.printf("Cassandra version is: %s%n", releaseVersion); + } + // The try-with-resources block automatically close the session after we’re done with it. + // This step is important because it frees underlying resources (TCP connections, thread + // pools...). In a real application, you would typically do this at shutdown + // (for example, when undeploying your webapp). + } +} diff --git a/manual/.nav b/manual/.nav index f99a21741c8..89d20c57055 100644 --- a/manual/.nav +++ b/manual/.nav @@ -3,4 +3,5 @@ query_builder mapper api_conventions case_sensitivity -osgi \ No newline at end of file +osgi +cloud \ No newline at end of file diff --git a/manual/README.md b/manual/README.md index d4ed76039ce..4938dbcf541 100644 --- a/manual/README.md +++ b/manual/README.md @@ -11,4 +11,5 @@ Common topics: * [API conventions](api_conventions/) * [Case sensitivity](case_sensitivity/) -* [OSGi](osgi/) \ No newline at end of file +* [OSGi](osgi/) +* [Cloud](cloud/) \ No newline at end of file diff --git a/manual/cloud/README.md b/manual/cloud/README.md new file mode 100644 index 00000000000..cd016363116 --- /dev/null +++ b/manual/cloud/README.md @@ -0,0 +1,123 @@ +## Connecting to Apollo (Cloud) + +Using the DataStax Java Driver to connect to a DataStax Apollo database is almost identical to using +the driver to connect to any normal Apache Cassandra® database. The only differences are in how the +driver is configured in an application and that you will need to obtain a `secure connect bundle`. + +The following is a Quick Start guide to writing a simple application that can connect to an Apollo +database. + + **Tip**: DataStax recommends using the DataStax Java Driver for Apache Cassandra. You can also + use the DataStax Enterprise (DSE) Java Driver, which exposes the same API for connecting to + Cassandra databases. + +### Prerequisites + +1. [Download][Download Maven] and [install][Install Maven] Maven. +1. Create an Apollo database on [GCP][Create an Apollo database - GCP] or + [AWS][Create an Apollo database - AWS]; alternatively, have a team member provide access to their + Apollo database (instructions for [GCP][Access an Apollo database - GCP] and + [AWS][Access an Apollo database - AWS]) to obtain database connection details. +1. Download the secure connect bundle (instructions for + [GCP][Download the secure connect bundle - GCP] and + [AWS][Download the secure connect bundle - AWS]) to obtain connection credentials for your + database. + +### Procedure + +1. Edit the `pom.xml` file at the root of your and according to this [Example pom.xml file]. + +1. Initialize the DataStax Java Driver. + + a. Create a `ConnectDatabase.java` file in the `/src/main/java` directory for your Java project. + + ```sh + $ cd javaProject/src/main/java + ``` + ```sh + $ touch ConnectDatabase.java + ``` + + b. Copy the following code for your DataStax Driver into the `ConnectDatabase.java` file. + The following example implements a `ConnectDatabase` class to connect to your Apollo database, + runs a CQL query, and prints the output to the console. + + **Note:** With the `CqlSession.builder()` object, make sure to set the path to the secure + connect bundle for your Apollo database (**"/path/to/secure-connect-database_name.zip"**) in + the `withCloudSecureConnectBundle()` method as shown in the following example. + If converting from using the open source Cassandra Java Driver to the DSE Java Driver, ensure + that you change `CqlSession` to `DseSession`. + * DataStax Java Driver for Apache Cassandra 4.x (recommended) + + ```java + import com.datastax.oss.driver.api.core.CqlSession; + import com.datastax.oss.driver.api.core.cql.ResultSet; + import com.datastax.oss.driver.api.core.cql.Row; + import java.nio.file.Paths; + + public class ConnectDatabase { + + public static void main(String[] args) { + // Create the CqlSession object: + try (CqlSession session = CqlSession.builder() + // make sure you change the path to the secure connect bundle below + .withCloudSecureConnectBundle(Paths.get("/path/to/secure-connect-database_name.zip")) + .withAuthCredentials("user_name","password") + .withKeyspace("keyspace_name") + .build()) { + // Select the release_version from the system.local table: + ResultSet rs = session.execute("select release_version from system.local"); + Row row = rs.one(); + //Print the results of the CQL query to the console: + if (row != null) { + System.out.println(row.getString("release_version")); + } else { + System.out.println("An error occurred."); + } + } + } + } + ``` + * DSE Java 2.x + + ```java + import com.datastax.dse.driver.api.core.DseSession; + import com.datastax.oss.driver.api.core.cql.ResultSet; + import com.datastax.oss.driver.api.core.cql.Row; + import java.nio.file.Paths; + + public class ConnectDatabase { + + public static void main(String[] args) { + // Create the DseSession object: + try (DseSession session = DseSession.builder() + // make sure you change the path to the secure connect bundle below + .withCloudSecureConnectBundle(Paths.get("/path/to/secure-connect-database_name.zip")) + .withAuthCredentials("user_name","password") + .withKeyspace("keyspace_name") + .build()) { + // Select the release_version from the system.local table: + ResultSet rs = session.execute("select release_version from system.local"); + Row row = rs.one(); + //Print the results of the CQL query to the console: + if (row != null) { + System.out.println(row.getString("release_version")); + } else { + System.out.println("An error occurred."); + } + } + } + } + ``` + + c. Save and close the ConnectDatabase.java file. + +[Download Maven]: https://maven.apache.org/download.cgi +[Install Maven]: https://maven.apache.org/install.html +[Create an Apollo database - GCP]: https://helpdocs.datastax.com/gcp/dscloud/apollo/dscloudGettingStarted.html#dscloudCreateCluster +[Create an Apollo database - AWS]: https://helpdocs.datastax.com/aws/dscloud/apollo/dscloudGettingStarted.html#dscloudCreateCluster +[Access an Apollo database - GCP]: https://helpdocs.datastax.com/gcp/dscloud/apollo/dscloudShareClusterDetails.html +[Access an Apollo database - AWS]: https://helpdocs.datastax.com/aws/dscloud/apollo/dscloudShareClusterDetails.html +[Download the secure connect bundle - GCP]: https://helpdocs.datastax.com/gcp/dscloud/apollo/dscloudObtainingCredentials.html +[Download the secure connect bundle - AWS]: https://helpdocs.datastax.com/aws/dscloud/apollo/dscloudObtainingCredentials.html +[Example pom.xml file]: ../core/integration/#minimal-project-structure From 97a161d567b0001b8a8cd636d40f5cb5c9eaa3bb Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 21 Oct 2019 15:17:46 +0300 Subject: [PATCH 135/979] Update changelog after Cloud API merge --- changelog/README.md | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 4d27b1f9ace..59b155f7ed1 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -2,15 +2,12 @@ -### DataStax Cloud (in progress) +### 4.3.0 (in progress) - [documentation] JAVA-2503: Migrate Cloud "getting started" page to driver manual +- [improvement] JAVA-2484: Add errors for cloud misconfiguration - [improvement] JAVA-2490: Allow to read the secure bundle from an InputStream - [new feature] JAVA-2478: Allow to provide the secure bundle via URL -- [improvement] JAVA-2484: Add errors for cloud misconfiguration - -### 4.3.0 (in progress) - - [improvement] JAVA-2407: Improve handling of logback configuration files in IDEs - [improvement] JAVA-2434: Add support for custom cipher suites and host name validation to ProgrammaticSslEngineFactory - [improvement] JAVA-2480: Upgrade Jackson to 2.10.0 From 507fedb1cd5985509561a9698206136cc996a386 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 21 Oct 2019 15:47:06 +0300 Subject: [PATCH 136/979] Add missing entry to changelog --- changelog/README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/changelog/README.md b/changelog/README.md index 59b155f7ed1..75ad80af45a 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -8,6 +8,7 @@ - [improvement] JAVA-2484: Add errors for cloud misconfiguration - [improvement] JAVA-2490: Allow to read the secure bundle from an InputStream - [new feature] JAVA-2478: Allow to provide the secure bundle via URL +- [new feature] JAVA-2356: Support for DataStax Cloud API - [improvement] JAVA-2407: Improve handling of logback configuration files in IDEs - [improvement] JAVA-2434: Add support for custom cipher suites and host name validation to ProgrammaticSslEngineFactory - [improvement] JAVA-2480: Upgrade Jackson to 2.10.0 From bc74cd2036552ccaba1af6830bb378150d83891a Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 21 Oct 2019 15:58:04 +0300 Subject: [PATCH 137/979] Fix failing Json tests (JAVA-2480 follow-up) --- core/pom.xml | 4 ++++ integration-tests/pom.xml | 10 ++++++++++ pom.xml | 5 +++++ 3 files changed, 19 insertions(+) diff --git a/core/pom.xml b/core/pom.xml index 65642fde00f..9858f504f88 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -91,6 +91,10 @@ com.github.spotbugs spotbugs-annotations + + com.fasterxml.jackson.core + jackson-core + com.fasterxml.jackson.core jackson-databind diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index a2393914d0f..0d2dcd5c5d7 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -67,6 +67,16 @@ spotbugs-annotations test + + com.fasterxml.jackson.core + jackson-core + test + + + com.fasterxml.jackson.core + jackson-databind + test + com.tngtech.java junit-dataprovider diff --git a/pom.xml b/pom.xml index 1dc9e80ab83..7deb0527a11 100644 --- a/pom.xml +++ b/pom.xml @@ -256,6 +256,11 @@ javax.annotation-api 1.2 + + com.fasterxml.jackson.core + jackson-core + ${jackson.version} + com.fasterxml.jackson.core jackson-databind From e7be2e9cdc060c9a41c2afe8ea76ab7fbf948de1 Mon Sep 17 00:00:00 2001 From: Olivier Michallat Date: Tue, 22 Oct 2019 00:00:12 -0700 Subject: [PATCH 138/979] JAVA-2428: Add developer docs (#1339) --- changelog/README.md | 1 + .../internal/core/metadata/NodeInfo.java | 19 +- .../core/metadata/TopologyMonitor.java | 5 +- manual/.nav | 3 +- manual/README.md | 2 + manual/core/ssl/README.md | 26 +- manual/developer/.nav | 5 + manual/developer/README.md | 19 ++ manual/developer/admin/README.md | 323 ++++++++++++++++++ manual/developer/common/.nav | 3 + manual/developer/common/README.md | 11 + manual/developer/common/concurrency/README.md | 117 +++++++ manual/developer/common/context/README.md | 122 +++++++ manual/developer/common/event_bus/README.md | 43 +++ manual/developer/native_protocol/README.md | 180 ++++++++++ manual/developer/netty_pipeline/README.md | 161 +++++++++ manual/developer/request_execution/README.md | 314 +++++++++++++++++ 17 files changed, 1330 insertions(+), 24 deletions(-) create mode 100644 manual/developer/.nav create mode 100644 manual/developer/README.md create mode 100644 manual/developer/admin/README.md create mode 100644 manual/developer/common/.nav create mode 100644 manual/developer/common/README.md create mode 100644 manual/developer/common/concurrency/README.md create mode 100644 manual/developer/common/context/README.md create mode 100644 manual/developer/common/event_bus/README.md create mode 100644 manual/developer/native_protocol/README.md create mode 100644 manual/developer/netty_pipeline/README.md create mode 100644 manual/developer/request_execution/README.md diff --git a/changelog/README.md b/changelog/README.md index 75ad80af45a..a897d887550 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.3.0 (in progress) +- [documentation] JAVA-2428: Add developer docs - [documentation] JAVA-2503: Migrate Cloud "getting started" page to driver manual - [improvement] JAVA-2484: Add errors for cloud misconfiguration - [improvement] JAVA-2490: Allow to read the secure bundle from an InputStream diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/NodeInfo.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/NodeInfo.java index 20aac59941a..8bafcfe52f9 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/NodeInfo.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/NodeInfo.java @@ -35,7 +35,12 @@ */ public interface NodeInfo { - /** The endpoint that the driver will use to connect to the node. */ + /** + * The endpoint that the driver will use to connect to the node. + * + *

      This information is required; the driver will not function properly if this method returns + * {@code null}. + */ @NonNull EndPoint getEndPoint(); @@ -124,6 +129,9 @@ public interface NodeInfo { /** * An additional map of free-form properties, that can be used by custom implementations. They * will be copied as-is into {@link Node#getExtras()}. + * + *

      This is not required; if you don't have anything specific to report here, it can be null or + * empty. */ @Nullable Map getExtras(); @@ -138,7 +146,14 @@ public interface NodeInfo { @NonNull UUID getHostId(); - /** The current version that is associated with the nodes schema. */ + /** + * The current version that is associated with the node's schema. + * + *

      This is not required; the driver reports it in {@link Node#getSchemaVersion()}, but for + * informational purposes only. It is not used anywhere internally (schema agreement is checked + * with {@link TopologyMonitor#checkSchemaAgreement()}, which by default queries system tables + * directly, not this field). + */ @Nullable UUID getSchemaVersion(); } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/TopologyMonitor.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/TopologyMonitor.java index d01ae3d954f..f74bcd943a4 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/TopologyMonitor.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/TopologyMonitor.java @@ -44,7 +44,10 @@ public interface TopologyMonitor extends AsyncAutoCloseable { * *

      The completion of the future returned by this method marks the point when the driver * considers itself "connected" to the cluster, and proceeds with the rest of the initialization: - * refreshing the list of nodes and the metadata, opening connection pools, etc. + * refreshing the list of nodes and the metadata, opening connection pools, etc. By then, the + * topology monitor should be ready to accept calls to its other methods; in particular, {@link + * #refreshNodeList()} will be called shortly after the completion of the future, to load the + * initial list of nodes to connect to. * *

      If {@code advanced.reconnect-on-init = true} in the configuration, this method is * responsible for handling reconnection. That is, if the initial attempt to "connect" to the diff --git a/manual/.nav b/manual/.nav index 89d20c57055..35e0225438b 100644 --- a/manual/.nav +++ b/manual/.nav @@ -4,4 +4,5 @@ mapper api_conventions case_sensitivity osgi -cloud \ No newline at end of file +cloud +developer diff --git a/manual/README.md b/manual/README.md index 4938dbcf541..c3111debe2f 100644 --- a/manual/README.md +++ b/manual/README.md @@ -6,6 +6,8 @@ Driver modules: * [Query builder](query_builder/): a fluent API to create CQL queries programmatically. * [Mapper](mapper/): generates the boilerplate to execute queries and convert the results into application-level objects. +* [Developer docs](developer/): explains the codebase and internal extension points for advanced + customization. Common topics: diff --git a/manual/core/ssl/README.md b/manual/core/ssl/README.md index 1525f1975d0..105f74de563 100644 --- a/manual/core/ssl/README.md +++ b/manual/core/ssl/README.md @@ -4,8 +4,8 @@ Secure the traffic between the driver and Cassandra. -* `advanced.ssl-engine-factory` in the configuration; defaults to none, also available: JSSE, or - write your own. +* `advanced.ssl-engine-factory` in the configuration; defaults to none, also available: + config-based, or write your own. * or programmatically: [CqlSession.builder().withSslEngineFactory()][SessionBuilder.withSslEngineFactory] or [CqlSession.builder().withSslContext()][SessionBuilder.withSslContext]. @@ -178,26 +178,12 @@ CqlSession session = CqlSession.builder() .build(); ``` -#### Netty +#### Netty-tcnative -Netty provides a more efficient SSL implementation based on native OpenSSL support. It's possible to -customize the driver to use it instead of JSSE. +Netty supports native integration with OpenSSL / boringssl. The driver does not provide this out of +the box, but with a bit of custom development it is fairly easy to add. See +[SslHandlerFactory](../../developer/netty_pipeline/#ssl-handler-factory) in the developer docs. -This is an advanced topic and beyond the scope of this document, but here is an overview: - -1. add a dependency to Netty-tcnative: follow - [these instructions](http://netty.io/wiki/forked-tomcat-native.html); -2. write your own implementation of the driver's `SslHandlerFactory`. This is a higher-level - abstraction than `SslEngineFactory`, that returns a Netty `SslHandler`. You'll build this handler - with Netty's own `SslContext`; -3. write a subclass of `DefaultDriverContext` that overrides `buildSslHandlerFactory()` to return - the custom `SslHandlerFactory` you wrote in step 2. This will cause the driver to completely - ignore the `ssl-engine-factory` options in the configuration; -4. write a subclass of `SessionBuilder` that overrides `buildContext` to return the custom context - that you wrote in step 3. -5. build your session with your custom builder. - -Note that this approach relies on the driver's [internal API](../../api_conventions). [dsClientToNode]: https://docs.datastax.com/en/cassandra/3.0/cassandra/configuration/secureSSLClientToNode.html [pickle]: http://thelastpickle.com/blog/2015/09/30/hardening-cassandra-step-by-step-part-1-server-to-server.html diff --git a/manual/developer/.nav b/manual/developer/.nav new file mode 100644 index 00000000000..0bb954b1293 --- /dev/null +++ b/manual/developer/.nav @@ -0,0 +1,5 @@ +common +native_protocol +netty_pipeline +request_execution +admin diff --git a/manual/developer/README.md b/manual/developer/README.md new file mode 100644 index 00000000000..975ab16c176 --- /dev/null +++ b/manual/developer/README.md @@ -0,0 +1,19 @@ +## Developer docs + +This section explains how driver internals work. The intended audience is: + +* driver developers and contributors; +* framework authors, or architects who want to write advanced customizations and integrations. + +Most of this material will involve "internal" packages; see [API conventions](../api_conventions/) +for more explanations. + +We recommend reading about the [common infrastructure](common/) first. Then the documentation goes +from lowest to highest level: + +* [Native protocol layer](native_protocol/): binary encoding of the TCP payloads; +* [Netty pipeline](netty_pipeline/): networking and low-level stream management; +* [Request execution](request_execution/): higher-level handling of user requests and responses; +* [Administrative tasks](admin/): everything else (cluster state and metadata). + +If you're reading this on GitHub, the `.nav` file in each directory contains a suggested order. \ No newline at end of file diff --git a/manual/developer/admin/README.md b/manual/developer/admin/README.md new file mode 100644 index 00000000000..def3b6a2927 --- /dev/null +++ b/manual/developer/admin/README.md @@ -0,0 +1,323 @@ +## Administrative tasks + +Aside from the main task of [executing user requests](../request_execution), the driver also needs +to track cluster state and metadata. This is done with a number of administrative components: + +```ditaa + +---------------+ + | DriverChannel | + +-------+-------+ + |1 + | topology ++-----------------+ query +---------+---------+ events +| TopologyMonitor +------+---->| ControlConnection +-----------------+ ++-----------------+ | +---------+---------+ | + ^ | | | + | | | topology+channel V + get | +---------+ refresh| events +----------+ +node info| | schema | +------------+ EventBus | + | | | | +-+--------+ ++--------+-----+--+ | | ^ ^ +| MetadataManager |<-------+-------------+ | node| | ++--------+-------++ | | state| | + | | | add/remove v events| | + |1 | | node +------------------+ | | + +-----+----+ | +------------+ NodeStateManager +------+ | + | Metadata | | +------------------+ | + +----------+ | | + +-------------------------------------------------------+ + metadata changed events +``` + +Note: the event bus is covered in the [common infrastructure](../common/event_bus) section. + +### Control connection + +The goal of the control connection is to maintain a dedicated `DriverChannel` instance, used to: + +* listen for server-side protocol events: + * topology events (`NEW_NODE`, `REMOVED_NODE`) and status events (`UP`, `DOWN`) are published on + the event bus, to be processed by other components; + * schema events are propagated directly to the metadata manager, to trigger a refresh; +* provide a way to query system tables. In practice, this is used by: + * the topology monitor, to read node information from `system.local` and `system.peers`; + * the metadata manager, to read schema metadata from `system_schema.*`. + +It has its own reconnection mechanism (if the channel goes down, a new one will be opened to another +node in the cluster) and some logic for initialization and shutdown. + +Note that the control connection is really just an implementation detail of the metadata manager and +topology monitor: if those components are overridden with custom versions that use other means to +get their data, the driver will detect it and not initialize the control connection (at the time of +writing, the session also references the control connection directly, but that's a bug: +[JAVA-2473](https://datastax-oss.atlassian.net/browse/JAVA-2473)). + +### Metadata manager + +This component is responsible for maintaining the contents of +[session.getMetadata()](../../core/metadata/). + +One big improvement in driver 4 is that the `Metadata` object is immutable and updated atomically; +this guarantees a consistent view of the cluster at a given point in time. For example, if a +keyspace name is referenced in the token map, there will always be a corresponding +`KeyspaceMetadata` in the schema metadata. + +`MetadataManager` keeps the current `Metadata` instance in a volatile field. Each transition is +managed by a `MetadataRefresh` object that computes the new metadata, along with an optional list of +events to publish on the bus (e.g. table created, keyspace removed, etc.) The new metadata is then +written back to the volatile field. `MetadataManager` follows the [confined inner +class](../common/concurrency/#cold-path) pattern to ensure that all refreshes are applied serially, +from a single admin thread. This guarantees that two refreshes can't start from the same initial +state and overwrite each other. + +There are various types of refreshes targeting nodes, the schema or the token map. + +Note that, unlike driver 3, we only do full schema refreshes. This simplifies the code considerably, +and thanks to debouncing this should not affect performance. The schema refresh process uses a few +auxiliary components that may have different implementations depending on the Cassandra version: + +* `SchemaQueries`: launches the schema queries asynchronously, and assemble the result in a + `SchemaRows`; +* `SchemaParser`: turns the `SchemaRows` into the `SchemaRefresh`. + +When the metadata manager needs node-related data, it queries the topology monitor. When it needs +schema-related data, it uses the control connection directly to issue its queries. + +### Topology monitor + +`TopologyMonitor` abstracts how we get information about nodes in the cluster: + +* refresh the list of nodes; +* refresh an individual node, or load the information of a newly added node; +* check schema agreement; +* emit `TopologyEvent` instances on the bus when we get external signals suggesting topology changes + (node added or removed), or status changes (node down or up). + +The built-in implementation uses the control connection to query `system.local` and `system.peers`, +and listen to gossip events. + +### Node state manager + +`NodeStateManager` tracks the state of the nodes in the cluster. + +We can't simply trust gossip events because they are not always reliable (the coordinator can become +isolated and think other nodes are down). Instead, the driver uses more elaborate rules that combine +external signals with observed internal state: + +* as long as we have an active connection to a node, it is considered up, whatever gossip events + say; +* if all connections to a node are lost, and its pool has started reconnecting, it gets marked down + (we check the reconnection because the pool could have shut down for legitimate reasons, like the + node distance changing to IGNORED); +* a node is marked back up when the driver has successfully reopened at least one connection; +* if the driver is not actively trying to connect to a node (for example if it is at distance + IGNORED), then gossip events are applied directly. + +See the javadocs of `NodeState` and `TopologyEvent`, as well as the `NodeStateManager` +implementation itself, for more details. + +#### Topology events vs. node state events + +These two event types are related, but they're used at different stages: + +* `TopologyEvent` is an external signal about the state of a node (by default, a `TOPOLOGY_CHANGE` + or `STATUS_CHANGE` gossip event received on the control connection). This is considered as a mere + suggestion, that the driver may or may not decide to follow; +* `NodeStateEvent` is an actual decision made by the driver to change a node to a given state. + +`NodeStateManager` essentially transforms topology events, as well as other internal signals, into +node state events. + +In general, other driver components only react to node state events, but there are a few exceptions: +for example, if a connection pool is reconnecting and the next attempt is scheduled in 5 minutes, +but a SUGGEST_UP topology event is emitted, the pool tries to reconnect immediately. + +The best way to find where each event is used is to do a usage search of the event type. + +### How admin components work together + +Most changes to the cluster state will involve the coordinated effort of multiple admin components. +Here are a few examples: + +#### A new node gets added + +```ditaa ++-----------------+ +--------+ +----------------+ +---------------+ +---------------+ +|ControlConnection| |EventBus| |NodeStateManager| |MetadataManager| |TopologyMonitor| ++--------+--------+ +---+----+ +--------+-------+ +-------+-------+ +-------+-------+ + | | | | | ++--------+-------+ | | | | +|Receive NEW_NODE| | | | | +|gossip event | | | | | +| {d}| | | | | ++--------+-------+ | | | | + | | | | | + |TopologyEvent( | | | | + | SUGGEST_ADDED)| | | | + +--------------->| | | | + | |onTopologyEvent| | | + | +-------------->| | | + | | +------+-------+ | | + | | |check node not| | | + | | |known already | | | + | | | {d}| | | + | | +------+-------+ | | + | | | | | + | | | addNode | | + | | +---------------->| | + | | | | getNewNodeInfo | + | | | +---------------->| + | | | | | + | query(SELECT FROM system.peers) | + |<-------------------------------------------------------------------+ + +------------------------------------------------------------------->| + | | | |<----------------+ + | | | +-------+--------+ | + | | | |create and apply| | + | | | |AddNodeRefresh | | + | | | | {d}| | + | | | +-------+--------+ | + | | | | | + | | NodeChangeEvent(ADDED) | | + | |<--------------------------------+ | + | | | | | +``` + +At this point, other driver components listening on the event bus will get notified of the addition. +For example, `DefaultSession` will initialize a connection pool to the new node. + +#### A new table gets created + +```ditaa + +-----------------+ +---------------+ +---------------+ +--------+ + |ControlConnection| |MetadataManager| |TopologyMonitor| |EventBus| + +--------+--------+ +-------+-------+ +-------+-------+ +---+----+ + | | | | ++----------+----------+ | | | +|Receive SCHEMA_CHANGE| | | | +|gossip event | | | | +| {d} | | | | ++----------+----------+ | | | + | | | | + | refreshSchema | | | + +------------------------------->| | | + | |checkSchemaAgreement | | + | +-------------------->| | + | | | | + | query(SELECT FROM system.local/peers) | | + |<-----------------------------------------------------+ | + +----------------------------------------------------->| | + | | | | + | |<--------------------+ | + |query(SELECT FROM system_schema)| | | + |<-------------------------------+ | | + +------------------------------->| | | + | +-------+--------+ | | + | |Parse results | | | + | |Create and apply| | | + | |SchemaRefresh | | | + | | {d}| | | + | +-------+--------+ | | + | | | | + | | TableChangeEvent(CREATED) | + | +---------------------------------->| + | | | | +``` + +#### The last connection to an active node drops + +```ditaa + +-----------+ +--------+ +----------------+ +----+ +---------------+ + |ChannelPool| |EventBus| |NodeStateManager| |Node| |MetadataManager| + +-----+-----+ +---+----+ +-------+--------+ +-+--+ +-------+-------+ + | | | | | + |ChannelEvent(CLOSED) | | | | + +----------------------->| | | | + | |onChannelEvent | | | + +------+-----+ +--------------->| | | + | start | | |decrement | | + |reconnecting| | |openConnections | | + | {d}| | +--------------->| | + +------+-----+ | | | | + |ChannelEvent( | | | | + | RECONNECTION_STARTED) | | | | + +----------------------->| | | | + | |onChannelEvent | | | + | +--------------->| | | + | | |increment | | + | | |reconnections | | + | | +--------------->| | + | | | | | + | | +--------+--------+ | | + | | |detect node has | | | + | | |0 connections and| | | + | | |is reconnecting | | | + | | | {d} | | | + | | +--------+--------+ | | + | | |set state DOWN | | + | | +--------------->| | + | |NodeStateEvent( | | | + | | DOWN) | | | + +------+-----+ |<---------------+ | | + |reconnection| | | | | + | succeeds | | | | | + | {d}| | | | | + +------+-----+ | | | | + |ChannelEvent(OPENED) | | | | + +----------------------->| | | | + | |onChannelEvent | | | + | +--------------->| | | + | | |increment | | + | | |openConnections | | + | | +--------------->| | + | | | | | + | | +--------+--------+ | | + | | |detect node has | | | + | | |1 connection | | | + | | | {d} | | | + | | +--------+--------+ | | + | | | refreshNode | | + | | +---------------------------->| + | | | | | + | | |set state UP | | + | | +--------------->| | + | |NodeStateEvent( | | | + | | UP) | | | + | |<---------------+ | | + |ChannelEvent( | | | | + | RECONNECTION_STOPPED) | | | | + +----------------------->| | | | + | |onChannelEvent | | | + | +--------------->| | | + | | |decrement | | + | | |reconnections | | + | | +--------------->| | + | | | | | +``` + +### Extension points + +#### TopologyMonitor + +This is a standalone component because some users have asked for a way to use their own discovery +service instead of relying on system tables and gossip (see +[JAVA-1082](https://datastax-oss.atlassian.net/browse/JAVA-1082)). + +A custom implementation can be plugged by [extending the +context](../common/context/#overriding-a-context-component) and overriding `buildTopologyMonitor`. +It should: + +* implement the methods of `TopologyMonitor` by querying the discovery service; +* use some notification mechanism (or poll the service periodically) to detect when nodes go up or + down, or get added or removed, and emit the corresponding `TopologyEvent` instances on the bus. + +Read the javadocs for more details; in particular, `NodeInfo` explains how the driver uses the +information returned by the topology monitor. + +#### MetadataManager + +It's less likely that this will be overridden directly. But the schema querying and parsing logic is +abstracted behind two factories that handle the differences between Cassandra versions: +`SchemaQueriesFactory` and `SchemaParserFactory`. These are pluggable by [extending the +context](../common/context/#overriding-a-context-component) and overriding the corresponding +`buildXxx` methods. \ No newline at end of file diff --git a/manual/developer/common/.nav b/manual/developer/common/.nav new file mode 100644 index 00000000000..16c3d54dcb9 --- /dev/null +++ b/manual/developer/common/.nav @@ -0,0 +1,3 @@ +context +concurrency +event_bus \ No newline at end of file diff --git a/manual/developer/common/README.md b/manual/developer/common/README.md new file mode 100644 index 00000000000..53db9ff6f31 --- /dev/null +++ b/manual/developer/common/README.md @@ -0,0 +1,11 @@ +## Common infrastructure + +This covers utilities or concept that are shared throughout the codebase. + +The [context](context/) is what glues everything together, and your primary entry point to extend +the driver. + +We then explain the two major approaches to deal with [concurrency](concurrency/) in the driver. + +Lastly, we briefly touch on the [event bus](event_bus/), which is used to decouple some of the +internal components through asynchronous messaging. \ No newline at end of file diff --git a/manual/developer/common/concurrency/README.md b/manual/developer/common/concurrency/README.md new file mode 100644 index 00000000000..3c6078f2eb4 --- /dev/null +++ b/manual/developer/common/concurrency/README.md @@ -0,0 +1,117 @@ +## Concurrency + +The driver is a highly concurrent environment. We try to use thread confinement to simplify the +code, when that does not impact performance. + +### Hot path + +The hot path is everything that happens for a `session.execute` call. In a typical client +application, this is where the driver will likely spend the majority of its time, so it must be +fast. + +Write path: + +1. convert the statement into a protocol-level `Message` (`CqlRequestHandler` constructor); +2. find a node and a connection, and write the message to it (`CqlRequestHandler.sendRequest`); +3. assign a stream id and wrap the message into a frame (`InflightHandler.write`); +4. encode the frame into a binary payload (`FrameEncoder`). + +Read path: + +1. decode the binary payload into a frame (`FrameDecoder`); +2. find the handler that corresponds to the stream id (`InFlightHandler.channelRead`); +3. complete the client's future (`CqlRequestHandler.NodeResponseCallback.onResponse`). + +Various policies are also invoked along the way (load balancing, retry, speculative execution, +timestamp generator...), they are considered on the hot path too. + +Steps 1 and 2 of the write path happen on the client thread, and 3 and 4 on the Netty I/O thread +(which is one of the threads in `NettyOptions.ioEventLoopGroup()`). +On the read path, everything happens on the Netty I/O thread. Beyond that, we want to avoid context +switches for performance reasons: in early prototypes, we tried confining `CqlRequestHandler` to a +particular thread, but that did not work well; so you will find that the code is fairly similar to +driver 3 in terms of concurrency control (reliance on atomic structures, volatile fields, etc). + +Note: code on the hot path should prefer the `TRACE` log level. + +### Cold path + +The cold path is everything else: initialization and shutdown, metadata refreshes, tracking node +states, etc. They will typically be way less frequent than user requests, so we can tolerate a small +performance hit in order to make concurrency easier to handle. + +One pattern we use a lot is a confined inner class: + +```java +public class ControlConnection { + // some content omitted for brevity + + private final EventExecutor adminExecutor; + private final SingleThreaded singleThreaded; + + // Called from other components, from any thread + public void reconnectNow() { + RunOrSchedule.on(adminExecutor, singleThreaded::reconnectNow); + } + + private class SingleThreaded { + private void reconnectNow() { + assert adminExecutor.inEventLoop(); + // this method is only ever called from one thread, much easier to handle concurrency + } + } +} +``` + +Public outer methods such as `reconnectNow()` are called concurrently. But they delegate to a method +of the internal class, that always runs on the same `adminExecutor` thread. `RunOrSchedule.on` calls +the method directly if we're already on the target thread, otherwise it schedules a task. If we need +to propagate a result, the outer method injects a future that the inner method completes. + +`adminExecutor` is picked randomly from `NettyOptions.adminEventExecutorGroup()` at construction +time. + +Confining `SingleThreaded` simplifies the code tremendously: we can use regular, non-volatile +fields, and methods are guaranteed to always run in isolation, eliminating subtle race conditions +(this idea was borrowed from actor systems). + +### Non-blocking + +Whether on the hot or cold path, internal code **never blocks**. If an internal component needs to +execute a query, it does so asynchronously, and registers callbacks to process the results. +Examples of this can be found in `ReprepareOnUp` and `DefaultTopologyMonitor` (among others). + +The only place where the driver blocks are synchronous wrapper methods in the public API, for +example: + +```java +public interface ExecutionInfo { + // some content omitted for brevity + + default QueryTrace getQueryTrace() { + BlockingOperation.checkNotDriverThread(); + return CompletableFutures.getUninterruptibly(getQueryTraceAsync()); + } +} +``` + +`BlockingOperation` is a utility to check that those methods aren't called on I/O threads, which +could introduce deadlocks. + +Keeping the internals fully asynchronous is another major improvement over driver 3, where internal +requests were synchronous, and required multiple internal executors to avoid deadlocks. + +In driver 4, there are only two executors: `NettyOptions.ioEventLoopGroup()` and +`NettyOptions.adminEventLoopGroup()`, that are guaranteed to never run blocking tasks. They can be +shared with application code, or across multiple sessions, or can even be one and the same (in +theory, it's possible to use a single 1-thread executor, although there's probably no practical +reason to do that). + +To be exhaustive, `NettyOptions.getTimer()` also uses its own thread; we tried scheduling request +timeouts and speculative executions on I/O threads in early alphas, but that didn't perform as well +as Netty's `HashedWheelTimer`. + +So the total number of threads created by a session is +``` +advanced.netty.io-group.size + advanced.netty.admin-group.size + 1 +``` diff --git a/manual/developer/common/context/README.md b/manual/developer/common/context/README.md new file mode 100644 index 00000000000..3c6143e970d --- /dev/null +++ b/manual/developer/common/context/README.md @@ -0,0 +1,122 @@ +## Driver context + +The context holds the driver's internal components. It is exposed in the public API as +`DriverContext`, accessible via `session.getContext()`. Internally, the child interface +`InternalDriverContext` adds access to more components; finally, `DefaultDriverContext` is the +implementing class. + +### The dependency graph + +Most components initialize lazily (see `LazyReference`). They also reference each other, typically +by taking the context as a constructor argument, and extracting the dependencies they need: + +```java +public DefaultTopologyMonitor(InternalDriverContext context) { + ... + this.controlConnection = context.getControlConnection(); +} +``` + +This avoids having to handle the initialization order ourselves. It is also convenient for unit +tests: you can run a component in isolation by mocking all of its dependencies. + +Obviously, things won't go well if there are cyclic dependencies; if you make changes to the +context, you can set a system property to check the dependency graph, it will throw if a cycle is +detected (see `CycleDetector`): + +``` +-Dcom.datastax.oss.driver.DETECT_CYCLES=true +``` + +This is disabled by default, because we don't expect it to be very useful outside of testing cycles. + +### Why not use a DI framework? + +As should be clear by now, the context is a poor man's Dependency Injection framework. We +deliberately avoided third-party solutions: + +* to keep things as simple as possible, +* to avoid an additional library dependency, +* to allow end users to access components and add their own (which wouldn't work well with + compile-time approaches like Dagger). + +### Overriding a context component + +The basic approach to plug in a custom internal component is to subclass the context. + +For example, let's say you wrote a custom `NettyOptions` implementation (maybe you have multiple +sessions, and want to reuse the event loop groups instead of recreating them every time): + +```java +public class CustomNettyOptions implements NettyOptions { + ... +} +``` + +In the default context, here's how the component is managed: + +```java +public class DefaultDriverContext { + + // some content omitted for brevity + + private final LazyReference nettyOptionsRef = + new LazyReference<>("nettyOptions", this::buildNettyOptions, cycleDetector); + + protected NettyOptions buildNettyOptions() { + return new DefaultNettyOptions(this); + } + + @NonNull + @Override + public NettyOptions getNettyOptions() { + return nettyOptionsRef.get(); + } +} +``` + +To switch in your implementation, you only need to override the build method: + +```java +public class CustomContext extends DefaultDriverContext { + + public CustomContext(DriverConfigLoader configLoader, ProgrammaticArguments programmaticArguments) { + super(configLoader, programmaticArguments); + } + + @Override + protected NettyOptions buildNettyOptions() { + return new CustomNettyOptions(this); + } +} +``` + +Then you need a way to create a session that uses your custom context. The session builder is +extensible as well: + +```java +public class CustomBuilder extends SessionBuilder { + + @Override + protected DriverContext buildContext( + DriverConfigLoader configLoader, ProgrammaticArguments programmaticArguments) { + return new CustomContext(configLoader, programmaticArguments); + } + + @Override + protected CqlSession wrap(@NonNull CqlSession defaultSession) { + // Nothing to do here, nothing changes on the session type + return defaultSession; + } +} +``` + +Finally, you can use your custom builder like the regular `CqlSession.builder()`, it inherits all +the methods: + +```java +CqlSession session = new CustomBuilder() + .addContactPoint(new InetSocketAddress("1.2.3.4", 9042)) + .withLocalDatacenter("datacenter1") + .build(); +``` diff --git a/manual/developer/common/event_bus/README.md b/manual/developer/common/event_bus/README.md new file mode 100644 index 00000000000..837f8c69082 --- /dev/null +++ b/manual/developer/common/event_bus/README.md @@ -0,0 +1,43 @@ +## Event bus + +`EventBus` is a bare-bones messaging mechanism, to decouple components from each other, and +broadcast messages to more than one component at a time. + +Producers fire events on the bus; consumers register to be notified for a particular event class. +For example, `DefaultDriverConfigLoader` reloads the config periodically, and fires an event if it +detects a change: + +```java +boolean changed = driverConfig.reload(configSupplier.get()); +if (changed) { + LOG.info("[{}] Detected a configuration change", logPrefix); + eventBus.fire(ConfigChangeEvent.INSTANCE); +} +``` + +This allows other components, such as `ChannelPool`, to react to config changes dynamically: + +```java +eventBus.register( + ConfigChangeEvent.class, RunOrSchedule.on(adminExecutor, this::onConfigChanged)); + +private void onConfigChanged(ConfigChangeEvent event) { + assert adminExecutor.inEventLoop(); + // resize re-reads the pool size from the configuration and does nothing if it hasn't changed, + // which is exactly what we want. + resize(distance); +} +``` + +For simplicity, the implementation makes the following assumptions: + +* events are propagated synchronously: if their processing needs to be delayed or rescheduled to + another thread, it's the consumer's responsibility (see how the pool uses `RunOrSchedule` in the + example above); +* callbacks are not polymorphic: you must register for the exact event class. For example, if you + have `eventBus.register(B.class, callback)` and fire an `A extends B`, the callback won't catch + it (internally, this allows direct lookups instead of traversing all registered callbacks with an + `instanceof` check). + +Those choices have been good enough for the needs of the driver. That's why we use a custom +implementation rather than something more sophisticated like Guava's event bus. diff --git a/manual/developer/native_protocol/README.md b/manual/developer/native_protocol/README.md new file mode 100644 index 00000000000..11c936d272b --- /dev/null +++ b/manual/developer/native_protocol/README.md @@ -0,0 +1,180 @@ +## Native protocol layer + +The native protocol layer encodes protocol messages into binary, before they are sent over the +network. + +This part of the code lives in its own project: +[native-protocol](https://github.com/datastax/native-protocol). We extracted it to make it reusable +([Simulacron](https://github.com/datastax/simulacron) also uses it). + +The protocol specifications are available in +[native-protocol/src/main/resources](https://github.com/datastax/native-protocol/tree/1.x/src/main/resources). +These files originally come from Cassandra, we copy them over for easy access. Note that, if the +latest version is a beta (this is the case for v5 at the time of writing -- September 2019), the +specification might not be up to date. Always compare with the latest revision in +[cassandra/doc](https://github.com/apache/cassandra/tree/trunk/doc). + + +For a broad overview of how protocol types are used in the driver, let's step through an example: + +* the user calls `session.execute()` with a `SimpleStatement`. The protocol message for a + non-prepared request is `QUERY`; +* `CqlRequestHandler` uses `Conversions.toMessage` to convert the statement into a + `c.d.o.protocol.internal.request.Query`; +* `InflightHandler.write` assigns a stream id to that message, and wraps it into a + `c.d.o.protocol.internal.Frame`; +* `FrameEncoder` uses `c.d.o.protocol.internal.FrameCodec` to convert the frame to binary. + +(All types prefixed with `c.d.o.protocol.internal` belong to the native-protocol project.) + +A similar process happens on the response path: decode the incoming binary payload into a protocol +message, then convert the message into higher-level driver objects: `ResultSet`, `ExecutionInfo`, +etc. + +### Native protocol types + +#### Messages + +Every protocol message is identified by an opcode, and has a corresponding `Message` subclass. + +A `Frame` wraps a message to add metadata, such as the protocol version and stream id. + +```ditaa ++-------+ contains +------------+ +| Frame +--------->+ Message + ++-------+ +------------+ + | int opcode | + +--+---------+ + | + | +---------+ + +----+ Query | + | +---------+ + | + | +---------+ + +----+ Execute | + | +---------+ + | + | +---------+ + +----+ Rows | + +---------+ + + etc. +``` + +All value classes are immutable, but for efficiency they don't make defensive copies of their +fields. If these fields are mutable (for example collections), they shouldn't be modified after +creating a message instance. + +The code makes very few assumptions about how the messages will be used. Data is often represented +in the most simple way. For example, `ProtocolConstants` uses simple integer constants to represent +protocol codes (enums wouldn't work at that level, because we need to add new codes in the DSE +driver); the driver generally rewraps them in more type-safe structures before exposing them to +higher-level layers. + +#### Encoding/decoding + +For every message, there is a corresponding `Message.Codec` for encoding and decoding. A +`FrameCodec` relies on a set of message codecs, for one or more protocol versions. Given an incoming +frame, it looks up the right message codec to use, based on the protocol version and opcode. +Optionally, it compresses frame bodies with a `Compressor`. + + +```ditaa ++-----------------+ +-------------------+ +| FrameCodec[B] +----------------+ PrimitiveCodec[B] | ++-----------------+ +-------------------+ +| B encode(Frame) | +| Frame decode(B) +-------+ +---------------+ ++------+----------+ +--------+ Compressor[B] | + | +---------------+ + | + | +-------------------+ + +---------------------------+ Message.Codec | + 1 codec per opcode +-------------------+ + and protocol version | B encode(Message) | + | Message decode(B) | + +-------------------+ +``` + +Most of the time, you'll want to use the full set of message codecs for a given protocol version. +`CodecGroup` provides a convenient way to register multiple codecs at once. The project provides +default implementations for all supported protocol version, both for clients like the driver (e.g. +encode `QUERY`, decode `RESULT`), or servers like Simulacron (decode `QUERY` encode `RESULT`). + + +```ditaa ++-------------+ +| CodecGroup | ++------+------+ + | + | +------------------------+ + +----+ ProtocolV3ClientCodecs | + | +------------------------+ + | + | +------------------------+ + +----+ ProtocolV3ServerCodecs | + | +------------------------+ + | + | +------------------------+ + +----+ ProtocolV4ClientCodecs | + | +------------------------+ + | + | +------------------------+ + +----+ ProtocolV4ClientCodecs | + | +------------------------+ + | + | +------------------------+ + +----+ ProtocolV5ClientCodecs | + | +------------------------+ + | + | +------------------------+ + +----+ ProtocolV5ClientCodecs | + +------------------------+ +``` + +The native protocol layer is agnostic to the actual binary representation. In the driver, this +happens to be a Netty `ByteBuf`, but the encoding logic doesn't need to be aware of that. This is +expressed by the type parameter `B` in `FrameCodec`. `PrimitiveCodec` abstracts the basic +primitives to work with a `B`: how to create an instance, read and write data to it, etc. + +```java +public interface PrimitiveCodec { + B allocate(int size); + int readInt(B source); + void writeInt(int i, B dest); + ... +} +``` + +Everything else builds upon those primitives. By just switching the `PrimitiveCodec` implementation, +the whole protocol layer could be reused with a different type, such as `byte[]`. + +In summary, to initialize a `FrameCodec`, you need: + +* a `PrimitiveCodec`; +* a `Compressor` (optional); +* one or more `CodecGroup`s. + +### Integration in the driver + +The driver initializes its `FrameCodec` in `DefaultDriverContext.buildFrameCodec()`. + +* the primitive codec is `ByteBufPrimitiveCodec`, which implements the basic primitives for Netty's + `ByteBuf`; +* the compressor comes from `DefaultDriverContext.buildCompressor()`, which determines the + implementation from the configuration; +* it is built with `FrameCodec.defaultClient`, which is a shortcut to use the default client groups: + `ProtocolV3ClientCodecs`, `ProtocolV4ClientCodecs` and `ProtocolV5ClientCodecs`. + +### Extension points + +The default frame codec can be replaced by [extending the +context](../common/context/#overriding-a-context-component) to override `buildFrameCodec`. This +can be used to add or remove a protocol version, or replace a particular codec. + +If protocol versions change, `ProtocolVersionRegistry` will likely be affected as well. + +Also, depending on the nature of the protocol changes, the driver's [request +processors](../request_execution/#request-processors) might require some adjustments: either replace +them, or introduce separate ones (possibly with new `executeXxx()` methods on a custom session +interface). diff --git a/manual/developer/netty_pipeline/README.md b/manual/developer/netty_pipeline/README.md new file mode 100644 index 00000000000..58a32a67a59 --- /dev/null +++ b/manual/developer/netty_pipeline/README.md @@ -0,0 +1,161 @@ +## Netty pipeline + +With the [protocol layer](../native_protocol) in place, the next step is to build the logic for a +single server connection. + +We use [Netty](https://netty.io/) for network I/O (to learn more about Netty, [this +book](https://www.manning.com/books/netty-in-action) is an excellent resource). + +```ditaa + +----------------+ + | ChannelFactory | + +----------------+ + | connect() | + +-------+--------+ + | Application + |creates +----------------------------------------------+ + V | Outgoing | + +-------+--------+ | | +---------------------+ ^ | + | DriverChannel | | | | ProtocolInitHandler | | | + +-------+--------+ | | +---------------------+ | | + | | | | | + +-------+--------+ | | +---------------------+ | | + | Channel | | | | InFlightHandler | | | + | (Netty) | | | +---------------------+ | | + +-------+--------+ | | | | + | | | +---------------------+ | | + +-------+--------+ | | | Heartbeathandler | | | + |ChannelPipeline +---+ | +---------------------+ | | + | (Netty) | | | | | + +----------------+ | | +--------------+ +--------------+ | | + | | | FrameEncoder | | FrameDecoder | | | + | | +--------------+ +--------------+ | | + | | | | + | | +---------------------+ | | + | | | SslHandler | | | + | | | (Netty) | | | + | V +---------------------+ | | + | Incoming | + +----------------------------------------------+ + Network +``` + +Each Cassandra connection is based on a Netty `Channel`. We wrap it into our own `DriverChannel`, +that exposes higher-level operations. `ChannelFactory` is the entry point for other driver +components; it handles protocol negotiation for the first channel. + +A Netty channel has a *pipeline*, that contains a sequence of *handlers*. As a request is sent, it +goes through the pipeline top to bottom; each successive handler processes the input, and passes the +result to the next handler. Incoming responses go the other way. + +Our pipeline is configured with the following handlers: + +### SslHandler + +The implementation is provided by Netty (all the others handlers are custom implementations). + +Internally, handler instances are provided by `SslHandlerFactory`. At the user-facing level, this is +abstracted behind `SslEngineFactory`, based on Java's default SSL implementation. + +See also the [Extension points](#extension-points) section below. + +### FrameEncoder and FrameDecoder + +This is where we integrate the protocol layer, as explained +[here](../native_protocol/#integration-in-the-driver). + +Unlike the other pipeline stages, we use separate handlers for incoming and outgoing messages. + +### HeartbeatHandler + +The heartbeat is a background request sent on inactive connections (no reads since x seconds), to +make sure that they are still alive, and prevent them from being dropped by a firewall. This is +similar to TCP_KeepAlive, but we provide an application-side alternative because users don't always +have full control over their network configuration. + +`HeartbeatHandler` is based on Netty's built-in `IdleStateHandler`, so there's not much in there +apart from the details of the control request. + +### InFlightHandler + +This handler is where most of the connection logic resides. It is responsible for: + +* writing regular requests: + * find an available stream id; + * store the `ResponseCallback` provided by the client under that id; + * when the response comes in, retrieve the callback and complete it; +* cancelling a request; +* switching the connection to a new keyspace (if a USE statement was executed through the session); +* handling shutdown: gracefully (allow all request to complete), or forcefully (error out all + requests). + +The two most important methods are: + +* `write(ChannelHandlerContext, Object, ChannelPromise)`: processes outgoing messages. We accept + different types of messages, because cancellation and shutdown also use that path. See + `DriverChannel`, which abstracts those details. +* `channelRead`: processes incoming responses. + +Netty handlers are confined to the channel's event loop (a.k.a I/O thread). Therefore the code +doesn't have to be concurrent, fields can be non-volatile and methods are guaranteed not to race +with each other. + +In particular, a big difference from driver 3 is that stream ids are assigned within the event loop, +instead of from client code before writing to the channel (see also [connection +pooling](../request_execution/#connection_pooling)). `StreamIdGenerator` is not thread-safe. + +All communication between the handler and the outside world must be done through messages or channel +events. There are 3 exceptions to this rule: `getAvailableIds`, `getInflight` and `getOrphanIds`, +which are based on volatile fields. They are all used for metrics, and `getAvailableIds` is also +used to balance the load over connections to the same node (see `ChannelSet`). + +### ProtocolInitHandler + +This handler manages the protocol initialization sequence on a newly established connection (see the +`STARTUP` message in the protocol specification). + +Most of the logic resides in `InitRequest.onResponse`, which acts as a simple state machine based on +the last request sent. + +There is also a bit of custom code to ensure that the channel is not made available to clients +before the protocol is ready. This is abstracted in the parent class `ConnectInitHandler`. + +Once the initialization is complete, `ProtocolInitHandler` removes itself from the pipeline. + +### Extension points + +#### NettyOptions + +The `advanced.netty` section in the [configuration](../../core/configuration/reference/) exposes a +few high-level options. + +For more elaborate customizations, you can [extend the +context](../common/context/#overriding-a-context-component) to plug in a custom `NettyOptions` +implementation. This allows you to do things such as: + +* reusing existing event loops; +* using Netty's [native Epoll transport](https://netty.io/wiki/native-transports.html); +* adding custom handlers to the pipeline. + +#### SslHandlerFactory + +The [user-facing API](../../core/ssl/) (`advanced.ssl-engine-factory` in the configuration, or +`SessionBuilder.withSslContext` / `SessionBuilder.withSslEngineFactory`) only supports Java's +default SSL implementation. + +The driver can also work with Netty's [native +integration](https://netty.io/wiki/requirements-for-4.x.html#tls-with-openssl) with OpenSSL or +boringssl. This requires a bit of custom development against the internal API: + +* add a dependency to one of the `netty-tcnative` artifacts, following [these + instructions](http://netty.io/wiki/forked-tomcat-native.html); +* implement `SslHandlerFactory`. Typically: + * the constructor will create a Netty [SslContext] with [SslContextBuilder.forClient], and store + it in a field; + * `newSslHandler` will delegate to one of the [SslContext.newHandler] methods; +* [extend the context](../common/context/#overriding-a-context-component) and override + `buildSslHandlerFactory` to plug your custom implementation. + +[SslContext]: https://netty.io/4.1/api/io/netty/handler/ssl/SslContext.html +[SslContext.newHandler]: https://netty.io/4.1/api/io/netty/handler/ssl/SslContext.html#newHandler-io.netty.buffer.ByteBufAllocator- +[SslContextBuilder.forClient]: https://netty.io/4.1/api/io/netty/handler/ssl/SslContextBuilder.html#forClient-- \ No newline at end of file diff --git a/manual/developer/request_execution/README.md b/manual/developer/request_execution/README.md new file mode 100644 index 00000000000..a53ee5efe28 --- /dev/null +++ b/manual/developer/request_execution/README.md @@ -0,0 +1,314 @@ +## Request execution + +The [Netty pipeline](../netty_pipeline/) gives us the ability to send low-level protocol messages on +a single connection. + +The request execution layer builds upon that to: + +* manage multiple connections (many nodes, possibly many connections per node); +* abstract the protocol layer behind higher-level, user-facing types. + +The session is the main entry point. `CqlSession` is the type that users will most likely reference +in their applications. It extends a more generic `Session` type, for the sake of extensibility; this +will be explained in [Request processors](#request-processors). + + +```ditaa ++----------------------------------+ +| Session | ++----------------------------------+ +| ResultT execute( | +| RequestT, GenericType[ResultT])| ++----------------------------------+ + ^ + | ++----------------+-----------------+ +| CqlSession | ++----------------------------------+ +| ResultSet execute(Statement) | ++----------------+-----------------+ + ^ + | ++----------------+-----------------+ +| DefaultSession | ++----------------+-----------------+ + | + | + | 1 per node +-------------+ + +------------+ ChannelPool | + | +----+--------+ + | | + | | n +---------------+ + | +----+ DriverChannel | + | +---------------+ + | + | 1 +--------------------------+ + +------------+ RequestProcessorRegistry | + +----+---------------------+ + | + | n +---------------------------+ + +----+ RequestProcessor | + +---------------------------+ + | ResultT process(RequestT) | + +---------------------------+ +``` + +`DefaultSession` contains the session implementation. It follows the [confined inner +class](../common/concurrency/#cold-path) pattern to simplify concurrency. + +### Connection pooling + +```ditaa ++----------------------+ 1 +------------+ +| ChannelPool +---------+ ChannelSet | ++----------------------+ +-----+------+ +| DriverChannel next() | | ++----------+-----------+ n| + | +------+--------+ + 1| | DriverChannel | + +------+-------+ +---------------+ + | Reconnection | + +--------------+ +``` + +`ChannelPool` handles the connections to a given node, for a given session. It follows the [confined +inner class](../common/concurrency/#cold-path) pattern to simplify concurrency. There are a few +differences compared to the 3.x implementation: + +#### Fixed size + +The pool has a fixed number of connections, it doesn't grow or shrink dynamically based on current +usage. In other words, there is no more "max" size, only a "core" size. + +However, this size is specified in the configuration. If the value is changed at runtime, the driver +will detect it, and trigger a resize of all active pools. + +The rationale for removing the dynamic behavior is that it introduced a ton of complexity in the +implementation and configuration, for unclear benefits: if the load fluctuates very rapidly, then +you need to provision for the max size anyway, so you might as well run with all the connections all +the time. If on the other hand the fluctuations are rare and predictable (e.g. peak for holiday +sales), then a manual configuration change is good enough. + +#### Wait-free + +To get a connection to a node, client code calls `ChannelPool.next()`. This returns the less busy +connection, based on the the `getAvailableIds()` counter exposed by +[InFlightHandler](netty_pipeline/#in-flight-handler). + +If all connections are busy, there is no queuing; the driver moves to the next node immediately. The +rationale is that it's better to try another node that might be ready to reply, instead of +introducing an additional wait for each node. If the user wants queuing when all nodes are busy, +it's better to do it at the session level with a [throttler](../../core/throttling/), which provides +more intuitive configuration. + +Also, note that there is no preemptive acquisition of the stream id outside of the event loop: we +select a channel based on a volatile counter, so a race condition is possible; if the channel gets +full by the time we arrive in `InFlightHandler`, the client will simply get a +`BusyConnectionException` and move on to the next node. We only acquire stream ids from the event +loop, which makes it much easier to track the current load (in driver 3, "inflight count getting out +of sync" bugs were very frequent). + +The pool manages its channels with `ChannelSet`, a simple copy-on-write data structure. + +#### Built-in reconnection + +The pool has its own independent reconnection mechanism (based on the `Reconnection` utility class). +The goal is to keep the pool at its expected capacity: whenever a connection is lost, the task +starts and will try to reopen the missing connections at regular intervals. + +### Request processors + +```ditaa ++----------------------------------+ +| Session | ++----------------------------------+ +| ResultT execute( | +| RequestT, GenericType[ResultT])| ++----------------------------------+ + ^ + | ++----------------+-----------------+ +| CqlSession | ++----------------------------------+ +| ResultSet execute(Statement) | ++----------------+-----------------+ +``` + +The driver can execute different types of requests, in different ways. This is abstracted by the +top-level `Session` interface, with a very generic execution method: + +```java + ResultT execute( + RequestT request, GenericType resultType); +``` + +It takes a request, and a type token that serves as a hint at the expected result. Each `(RequestT, +ResultT)` combination defines an execution model, for example: + +| `RequestT` | `ResultT` | Execution | +| --- | --- | ---| +| `Statement` | `ResultSet` | CQL, synchronous | +| `Statement` | `CompletionStage` | CQL, asynchronous | +| `Statement` | `ReactiveResultSet` | CQL, reactive | +| `GraphStatement` | `GraphResultSet` | DSE Graph, synchronous | +| `GraphStatement` | `CompletionStage` | DSE Graph, asynchronous | + +In general, regular client code doesn't use `Session.execute` directly. Instead, child interfaces +expose more user-friendly shortcuts for a given result type: + +```java +public interface CqlSession extends Session { + default ResultSet execute(Statement statement) { + return execute(statement, Statement.SYNC); + } +} +``` + +The logic for each execution model is encapsulated in a `RequestProcessor`. +Processors are stored in a `RequestProcessorRegistry`. For each request, the session invokes the +registry to find the processor that matches the request and result types. + +```ditaa ++----------------+ 1+-----------------------------------+ +| DefaultSession +---+ RequestProcessorRegistry | ++----------------+ +-----------------------------------+ + | processorFor( | + | RequestT, GenericType[ResultT]) | + +-----------------+-----------------+ + | + |n + +----------------------+----------------------+ + | RequestProcessor[RequestT, ResultT] | + +---------------------------------------------+ + | boolean canProcess(Request, GenericType[?]) | + | ResultT process(RequestT) | + +---------------------------------------------+ + ^ + | +--------------------------+ + +---------+ CqlRequestSyncProcessor | + | +--------------------------+ + | + | +--------------------------+ + +---------+ CqlRequestAsyncProcessor | + | +--------------------------+ + | + | +--------------------------+ + +---------+ CqlPrepareSyncProcessor | + | +--------------------------+ + | + | +--------------------------+ + +---------+ CqlPrepareAsyncProcessor | + +--------------------------+ +``` + +A processor is responsible for: + +* converting the user request into [protocol-level messages](../native_protocol/); +* selecting a coordinator node, and obtaining a channel from its connection pool; +* writing the request to the channel; +* handling timeouts, retries and speculative executions; +* translating the response into user-level types. + +The `RequestProcessor` interface makes very few assumptions about the actual processing; but in +general, implementations create a handler for the lifecycle of every request. For example, +`CqlRequestHandler` is the central component for basic CQL execution. + +Processors can be implemented in terms of other processors. In particular, this is the case for +synchronous execution models, which are just a blocking wrapper around their asynchronous +counterpart. You can observe this in `CqlRequestSyncProcessor`. + +Note that preparing a statement is treated as just another execution model. It has its own +processors, that operate on a special `PrepareRequest` type: + +```java +public interface CqlSession extends Session { + default PreparedStatement prepare(SimpleStatement statement) { + return execute(new DefaultPrepareRequest(statement), PrepareRequest.SYNC); + } +} +``` + +### Extension points + +#### RequestProcessorRegistry + +You can customize the set of request processors by [extending the +context](../common/context/#overriding-a-context-component) and overriding +`buildRequestProcessorRegistry`. + +This can be used to either: + +* add your own execution models (new request types and/or return types); +* remove existing ones; +* or a combination of both. + +The driver codebase contains an integration test that provides a complete example: +[RequestProcessorIT]. It shows how you can build a session that returns Guava's `ListenableFuture` +instead of Java's `CompletionStage` (existing request type, different return type). + +[GuavaDriverContext] is the custom context subclass. It plugs a custom registry that wraps the +default async processors with [GuavaRequestAsyncProcessor], to transform the returned futures. + +Note that the default async processors are not present in the registry anymore; if you try to call +a method that returns a `CompletionStage`, it fails. See the next section for how to hide those +methods. + +#### Exposing a custom session interface + +If you add or remove execution models, you probably want to expose a session interface that matches +the underlying capabilities of the implementation. + +For example, in the [RequestProcessorIT] example mentioned in the previous section, we remove the +ability to return `CompletionStage`, but add the ability to return `ListenableFuture`. Therefore we +expose a custom [GuavaSession] with a different return type for async methods: + +```java +public interface GuavaSession extends Session { + default ListenableFuture executeAsync(Statement statement) { ... } + default ListenableFuture prepareAsync(SimpleStatement statement) { ... } +} +``` + +We need an implementation of this interface. Our new methods all have default implementations in +term of the abstract `Session.execute()`, so the only thing we need is to delegate to an existing +`Session`. The driver provides `SessionWrapper` to that effect. See [DefaultGuavaSession]: + +```java +public class DefaultGuavaSession extends SessionWrapper implements GuavaSession { + public DefaultGuavaSession(Session delegate) { + super(delegate); + } +} +``` + +Finally, we want to create an instance of this wrapper. Since we extended the context (see previous +section), we already wrote a custom builder subclass; there is another protected method we can +override to plug our wrapper. See [GuavaSessionBuilder]: + +```java +public class GuavaSessionBuilder extends SessionBuilder { + + @Override + protected DriverContext buildContext( ... ) { ... } + + @Override + protected GuavaSession wrap(CqlSession defaultSession) { + return new DefaultGuavaSession(defaultSession); + } +``` + +Client code can now use the familiar pattern to create a session: + +```java +GuavaSession session = new GuavaSessionBuilder() + .addContactEndPoints(...) + .withKeyspace("test") + .build(); +``` + +[RequestProcessorIT]: https://github.com/datastax/java-driver/blob/4.x/integration-tests/src/test/java/com/datastax/oss/driver/core/session/RequestProcessorIT.java +[GuavaDriverContext]: https://github.com/datastax/java-driver/blob/4.x/integration-tests/src/test/java/com/datastax/oss/driver/example/guava/internal/GuavaDriverContext.java +[GuavaRequestAsyncProcessor]: https://github.com/datastax/java-driver/blob/4.x/integration-tests/src/test/java/com/datastax/oss/driver/example/guava/internal/GuavaRequestAsyncProcessor.java +[GuavaSession]: https://github.com/datastax/java-driver/blob/4.x/integration-tests/src/test/java/com/datastax/oss/driver/example/guava/api/GuavaSession.java +[DefaultGuavaSession]: https://github.com/datastax/java-driver/blob/4.x/integration-tests/src/test/java/com/datastax/oss/driver/example/guava/internal/DefaultGuavaSession.java +[GuavaSessionBuilder]: https://github.com/datastax/java-driver/blob/4.x/integration-tests/src/test/java/com/datastax/oss/driver/example/guava/api/GuavaSessionBuilder.java From b6c2a98f1ad13a4028f4c2283b4e680fa195dde0 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 22 Oct 2019 10:09:24 +0300 Subject: [PATCH 139/979] JAVA-2459: Improve extensibility of existing load balancing policies (#1346) --- changelog/README.md | 2 + .../BasicLoadBalancingPolicy.java | 308 ++++++++++++++++++ .../DcInferringLoadBalancingPolicy.java | 81 +++++ .../DefaultLoadBalancingPolicy.java | 296 ++--------------- .../helper/DefaultNodeFilterHelper.java | 103 ++++++ .../helper/InferringLocalDcHelper.java | 86 +++++ .../loadbalancing/helper/LocalDcHelper.java | 48 +++ .../helper/MandatoryLocalDcHelper.java | 94 ++++++ .../helper/NodeFilterHelper.java | 45 +++ .../helper/OptionalLocalDcHelper.java | 136 ++++++++ core/src/main/resources/reference.conf | 14 +- .../BasicLoadBalancingPolicyEventsTest.java | 167 ++++++++++ .../BasicLoadBalancingPolicyInitTest.java | 215 ++++++++++++ ...BasicLoadBalancingPolicyQueryPlanTest.java | 261 +++++++++++++++ ...nferringLoadBalancingPolicyEventsTest.java | 40 +++ ...cInferringLoadBalancingPolicyInitTest.java | 220 +++++++++++++ ...rringLoadBalancingPolicyQueryPlanTest.java | 57 ++++ .../DefaultLoadBalancingPolicyEventsTest.java | 139 +------- .../DefaultLoadBalancingPolicyInitTest.java | 58 ++-- ...faultLoadBalancingPolicyQueryPlanTest.java | 158 +-------- .../DefaultLoadBalancingPolicyTestBase.java | 8 +- 21 files changed, 1946 insertions(+), 590 deletions(-) create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicy.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicy.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/helper/DefaultNodeFilterHelper.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/helper/InferringLocalDcHelper.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/helper/LocalDcHelper.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/helper/MandatoryLocalDcHelper.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/helper/NodeFilterHelper.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/helper/OptionalLocalDcHelper.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyEventsTest.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyInitTest.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyQueryPlanTest.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyEventsTest.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyInitTest.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyQueryPlanTest.java diff --git a/changelog/README.md b/changelog/README.md index a897d887550..6875f42b4b1 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,8 @@ ### 4.3.0 (in progress) +- [improvement] JAVA-2447: Mention programmatic local DC method in Default LBP error message +- [improvement] JAVA-2459: Improve extensibility of existing load balancing policies - [documentation] JAVA-2428: Add developer docs - [documentation] JAVA-2503: Migrate Cloud "getting started" page to driver manual - [improvement] JAVA-2484: Add errors for cloud misconfiguration diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicy.java b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicy.java new file mode 100644 index 00000000000..3b5b03815bc --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicy.java @@ -0,0 +1,308 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.loadbalancing; + +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.context.DriverContext; +import com.datastax.oss.driver.api.core.loadbalancing.LoadBalancingPolicy; +import com.datastax.oss.driver.api.core.loadbalancing.NodeDistance; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.metadata.NodeState; +import com.datastax.oss.driver.api.core.metadata.TokenMap; +import com.datastax.oss.driver.api.core.metadata.token.Token; +import com.datastax.oss.driver.api.core.session.Request; +import com.datastax.oss.driver.api.core.session.Session; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.loadbalancing.helper.DefaultNodeFilterHelper; +import com.datastax.oss.driver.internal.core.loadbalancing.helper.OptionalLocalDcHelper; +import com.datastax.oss.driver.internal.core.util.ArrayUtils; +import com.datastax.oss.driver.internal.core.util.collection.QueryPlan; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.nio.ByteBuffer; +import java.util.Collections; +import java.util.Map; +import java.util.Optional; +import java.util.Queue; +import java.util.Set; +import java.util.UUID; +import java.util.concurrent.CopyOnWriteArraySet; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.IntUnaryOperator; +import java.util.function.Predicate; +import net.jcip.annotations.ThreadSafe; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * A basic implementation of {@link LoadBalancingPolicy} that can serve as a building block for more + * advanced use cases. + * + *

      To activate this policy, modify the {@code basic.load-balancing-policy} section in the driver + * configuration, for example: + * + *

      + * datastax-java-driver {
      + *   basic.load-balancing-policy {
      + *     class = BasicLoadBalancingPolicy
      + *     local-datacenter = datacenter1 # optional
      + *   }
      + * }
      + * 
      + * + * See {@code reference.conf} (in the manual or core driver JAR) for more details. + * + *

      Local datacenter: This implementation will only define a local datacenter if it is + * explicitly set either through configuration or programmatically; if the local datacenter is + * unspecified, this implementation will effectively act as a datacenter-agnostic load balancing + * policy and will consider all nodes in the cluster when creating query plans, regardless of their + * datacenter. + * + *

      Query plan: This implementation prioritizes replica nodes over non-replica ones; if + * more than one replica is available, the replicas will be shuffled. Non-replica nodes will be + * included in a round-robin fashion. If the local datacenter is defined (see above), query plans + * will only include local nodes, never remote ones; if it is unspecified however, query plans may + * contain nodes from different datacenters. + * + *

      This class is not recommended for normal users who should always prefer {@link + * DefaultLoadBalancingPolicy}. + */ +@ThreadSafe +public class BasicLoadBalancingPolicy implements LoadBalancingPolicy { + + private static final Logger LOG = LoggerFactory.getLogger(BasicLoadBalancingPolicy.class); + + protected static final IntUnaryOperator INCREMENT = i -> (i == Integer.MAX_VALUE) ? 0 : i + 1; + + @NonNull protected final InternalDriverContext context; + @NonNull protected final DriverExecutionProfile profile; + @NonNull protected final String logPrefix; + + protected final AtomicInteger roundRobinAmount = new AtomicInteger(); + protected final CopyOnWriteArraySet liveNodes = new CopyOnWriteArraySet<>(); + + // private because they should be set in init() and never be modified after + private volatile DistanceReporter distanceReporter; + private volatile Predicate filter; + private volatile String localDc; + + public BasicLoadBalancingPolicy(@NonNull DriverContext context, @NonNull String profileName) { + this.context = (InternalDriverContext) context; + profile = context.getConfig().getProfile(profileName); + logPrefix = context.getSessionName() + "|" + profileName; + } + + /** @return The local datacenter, if known; empty otherwise. */ + public Optional getLocalDatacenter() { + return Optional.ofNullable(localDc); + } + + /** + * @return An immutable copy of the nodes currently considered as live; if the local datacenter is + * known, this set will contain only nodes belonging to that datacenter. + */ + public Set getLiveNodes() { + return ImmutableSet.copyOf(liveNodes); + } + + @Override + public void init(@NonNull Map nodes, @NonNull DistanceReporter distanceReporter) { + this.distanceReporter = distanceReporter; + localDc = discoverLocalDc(nodes).orElse(null); + filter = createNodeFilter(localDc, nodes); + for (Node node : nodes.values()) { + if (filter.test(node)) { + distanceReporter.setDistance(node, NodeDistance.LOCAL); + if (node.getState() != NodeState.DOWN) { + // This includes state == UNKNOWN. If the node turns out to be unreachable, this will be + // detected when we try to open a pool to it, it will get marked down and this will be + // signaled back to this policy + liveNodes.add(node); + } + } else { + distanceReporter.setDistance(node, NodeDistance.IGNORED); + } + } + } + + /** + * Returns the local datacenter, if it can be discovered, or returns {@link Optional#empty empty} + * otherwise. + * + *

      This method is called only once, during {@linkplain LoadBalancingPolicy#init(Map, + * LoadBalancingPolicy.DistanceReporter) initialization}. + * + *

      Implementors may choose to throw {@link IllegalStateException} instead of returning {@link + * Optional#empty empty}, if they require a local datacenter to be defined in order to operate + * properly. + * + * @param nodes All the nodes that were known to exist in the cluster (regardless of their state) + * when the load balancing policy was initialized. This argument is provided in case + * implementors need to inspect the cluster topology to discover the local datacenter. + * @return The local datacenter, or {@link Optional#empty empty} if none found. + * @throws IllegalStateException if the local datacenter could not be discovered, and this policy + * cannot operate without it. + */ + @NonNull + protected Optional discoverLocalDc(@NonNull Map nodes) { + return new OptionalLocalDcHelper(context, profile, logPrefix).discoverLocalDc(nodes); + } + + /** + * Creates a new node filter to use with this policy. + * + *

      This method is called only once, during {@linkplain LoadBalancingPolicy#init(Map, + * LoadBalancingPolicy.DistanceReporter) initialization}, and only after local datacenter + * discovery has been attempted. + * + * @param localDc The local datacenter that was just discovered, or null if none found. + * @param nodes All the nodes that were known to exist in the cluster (regardless of their state) + * when the load balancing policy was initialized. This argument is provided in case + * implementors need to inspect the cluster topology to create the node filter. + * @return the node filter to use. + */ + @NonNull + protected Predicate createNodeFilter( + @Nullable String localDc, @NonNull Map nodes) { + return new DefaultNodeFilterHelper(context, profile, logPrefix) + .createNodeFilter(localDc, nodes); + } + + @NonNull + @Override + public Queue newQueryPlan(@Nullable Request request, @Nullable Session session) { + // Take a snapshot since the set is concurrent: + Object[] currentNodes = liveNodes.toArray(); + + Set allReplicas = getReplicas(request, session); + int replicaCount = 0; // in currentNodes + + if (!allReplicas.isEmpty()) { + // Move replicas to the beginning + for (int i = 0; i < currentNodes.length; i++) { + Node node = (Node) currentNodes[i]; + if (allReplicas.contains(node)) { + ArrayUtils.bubbleUp(currentNodes, i, replicaCount); + replicaCount += 1; + } + } + + if (replicaCount > 1) { + shuffleHead(currentNodes, replicaCount); + } + } + + LOG.trace("[{}] Prioritizing {} local replicas", logPrefix, replicaCount); + + // Round-robin the remaining nodes + ArrayUtils.rotate( + currentNodes, + replicaCount, + currentNodes.length - replicaCount, + roundRobinAmount.getAndUpdate(INCREMENT)); + + return new QueryPlan(currentNodes); + } + + @NonNull + protected Set getReplicas(@Nullable Request request, @Nullable Session session) { + if (request == null || session == null) { + return Collections.emptySet(); + } + + // Note: we're on the hot path and the getXxx methods are potentially more than simple getters, + // so we only call each method when strictly necessary (which is why the code below looks a bit + // weird). + CqlIdentifier keyspace = request.getKeyspace(); + if (keyspace == null) { + keyspace = request.getRoutingKeyspace(); + } + if (keyspace == null && session.getKeyspace().isPresent()) { + keyspace = session.getKeyspace().get(); + } + if (keyspace == null) { + return Collections.emptySet(); + } + + Token token = request.getRoutingToken(); + ByteBuffer key = (token == null) ? request.getRoutingKey() : null; + if (token == null && key == null) { + return Collections.emptySet(); + } + + Optional maybeTokenMap = context.getMetadataManager().getMetadata().getTokenMap(); + if (maybeTokenMap.isPresent()) { + TokenMap tokenMap = maybeTokenMap.get(); + return (token != null) + ? tokenMap.getReplicas(keyspace, token) + : tokenMap.getReplicas(keyspace, key); + } else { + return Collections.emptySet(); + } + } + + /** Exposed as a protected method so that it can be accessed by tests */ + protected void shuffleHead(Object[] currentNodes, int replicaCount) { + ArrayUtils.shuffleHead(currentNodes, replicaCount); + } + + @Override + public void onAdd(@NonNull Node node) { + if (filter.test(node)) { + LOG.debug("[{}] {} was added, setting distance to LOCAL", logPrefix, node); + // Setting to a non-ignored distance triggers the session to open a pool, which will in turn + // set the node UP when the first channel gets opened. + distanceReporter.setDistance(node, NodeDistance.LOCAL); + } else { + distanceReporter.setDistance(node, NodeDistance.IGNORED); + } + } + + @Override + public void onUp(@NonNull Node node) { + if (filter.test(node)) { + // Normally this is already the case, but the filter could be dynamic and have ignored the + // node previously. + distanceReporter.setDistance(node, NodeDistance.LOCAL); + if (liveNodes.add(node)) { + LOG.debug("[{}] {} came back UP, added to live set", logPrefix, node); + } + } else { + distanceReporter.setDistance(node, NodeDistance.IGNORED); + } + } + + @Override + public void onDown(@NonNull Node node) { + if (liveNodes.remove(node)) { + LOG.debug("[{}] {} went DOWN, removed from live set", logPrefix, node); + } + } + + @Override + public void onRemove(@NonNull Node node) { + if (liveNodes.remove(node)) { + LOG.debug("[{}] {} was removed, removed from live set", logPrefix, node); + } + } + + @Override + public void close() { + // nothing to do + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicy.java b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicy.java new file mode 100644 index 00000000000..279f8b73e5c --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicy.java @@ -0,0 +1,81 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.loadbalancing; + +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.context.DriverContext; +import com.datastax.oss.driver.api.core.loadbalancing.LoadBalancingPolicy; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.internal.core.loadbalancing.helper.InferringLocalDcHelper; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Map; +import java.util.Optional; +import java.util.UUID; +import net.jcip.annotations.ThreadSafe; + +/** + * An implementation of {@link LoadBalancingPolicy} that infers the local datacenter from the + * contact points, if no datacenter was provided neither through configuration nor programmatically. + * + *

      To activate this policy, modify the {@code basic.load-balancing-policy} section in the driver + * configuration, for example: + * + *

      + * datastax-java-driver {
      + *   basic.load-balancing-policy {
      + *     class = DcInferringLoadBalancingPolicy
      + *     local-datacenter = datacenter1 # optional
      + *   }
      + * }
      + * 
      + * + * See {@code reference.conf} (in the manual or core driver JAR) for more details. + * + *

      Local datacenter: This implementation requires a local datacenter to be defined, + * otherwise it will throw an {@link IllegalStateException}. A local datacenter can be supplied + * either: + * + *

        + *
      1. Programmatically with {@link + * com.datastax.oss.driver.api.core.session.SessionBuilder#withLocalDatacenter(String) + * SessionBuilder#withLocalDatacenter(String)}; + *
      2. Through configuration, by defining the option {@link + * DefaultDriverOption#LOAD_BALANCING_LOCAL_DATACENTER + * basic.load-balancing-policy.local-datacenter}; + *
      3. Or implicitly: in this case this implementation will infer the local datacenter from the + * provided contact points, if and only if they are all located in the same datacenter. + *
      + * + *

      Query plan: see {@link BasicLoadBalancingPolicy} for details on the computation of + * query plans. + * + *

      This class is not recommended for normal users who should always prefer {@link + * DefaultLoadBalancingPolicy}. + */ +@ThreadSafe +public class DcInferringLoadBalancingPolicy extends BasicLoadBalancingPolicy { + + public DcInferringLoadBalancingPolicy( + @NonNull DriverContext context, @NonNull String profileName) { + super(context, profileName); + } + + @NonNull + @Override + protected Optional discoverLocalDc(@NonNull Map nodes) { + return new InferringLocalDcHelper(context, profile, logPrefix).discoverLocalDc(nodes); + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicy.java b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicy.java index 31fafe8e228..c7cd25215e0 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicy.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicy.java @@ -15,43 +15,15 @@ */ package com.datastax.oss.driver.internal.core.loadbalancing; -import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; -import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.context.DriverContext; -import com.datastax.oss.driver.api.core.loadbalancing.LoadBalancingPolicy; -import com.datastax.oss.driver.api.core.loadbalancing.NodeDistance; import com.datastax.oss.driver.api.core.metadata.Node; -import com.datastax.oss.driver.api.core.metadata.NodeState; -import com.datastax.oss.driver.api.core.metadata.TokenMap; -import com.datastax.oss.driver.api.core.metadata.token.Token; -import com.datastax.oss.driver.api.core.session.Request; -import com.datastax.oss.driver.api.core.session.Session; -import com.datastax.oss.driver.internal.core.context.InternalDriverContext; -import com.datastax.oss.driver.internal.core.metadata.DefaultNode; -import com.datastax.oss.driver.internal.core.metadata.MetadataManager; -import com.datastax.oss.driver.internal.core.util.ArrayUtils; -import com.datastax.oss.driver.internal.core.util.Reflection; -import com.datastax.oss.driver.internal.core.util.collection.QueryPlan; -import com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting; -import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import com.datastax.oss.driver.internal.core.loadbalancing.helper.MandatoryLocalDcHelper; import edu.umd.cs.findbugs.annotations.NonNull; -import edu.umd.cs.findbugs.annotations.Nullable; -import java.nio.ByteBuffer; -import java.util.Collections; import java.util.Map; -import java.util.Objects; import java.util.Optional; -import java.util.Queue; -import java.util.Set; import java.util.UUID; -import java.util.concurrent.CopyOnWriteArraySet; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.function.IntUnaryOperator; -import java.util.function.Predicate; import net.jcip.annotations.ThreadSafe; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; /** * The default load balancing policy implementation. @@ -69,257 +41,35 @@ * * * See {@code reference.conf} (in the manual or core driver JAR) for more details. + * + *

      Local datacenter: This implementation requires a local datacenter to be defined, + * otherwise it will throw an {@link IllegalStateException}. A local datacenter can be supplied + * either: + * + *

        + *
      1. Programmatically with {@link + * com.datastax.oss.driver.api.core.session.SessionBuilder#withLocalDatacenter(String) + * SessionBuilder#withLocalDatacenter(String)}; + *
      2. Through configuration, by defining the option {@link + * DefaultDriverOption#LOAD_BALANCING_LOCAL_DATACENTER + * basic.load-balancing-policy.local-datacenter}; + *
      3. Or implicitly, if and only if no explicit contact points were provided: in this case this + * implementation will infer the local datacenter from the implicit contact point (localhost). + *
      + * + *

      Query plan: see {@link BasicLoadBalancingPolicy} for details on the computation of + * query plans. */ @ThreadSafe -public class DefaultLoadBalancingPolicy implements LoadBalancingPolicy { - - private static final Logger LOG = LoggerFactory.getLogger(DefaultLoadBalancingPolicy.class); - private static final Predicate INCLUDE_ALL_NODES = n -> true; - private static final IntUnaryOperator INCREMENT = i -> (i == Integer.MAX_VALUE) ? 0 : i + 1; - - private final String logPrefix; - private final MetadataManager metadataManager; - private final Predicate filter; - private final AtomicInteger roundRobinAmount = new AtomicInteger(); - private final boolean isDefaultPolicy; - @VisibleForTesting final CopyOnWriteArraySet localDcLiveNodes = new CopyOnWriteArraySet<>(); - - private volatile DistanceReporter distanceReporter; - @VisibleForTesting volatile String localDc; +public class DefaultLoadBalancingPolicy extends BasicLoadBalancingPolicy { public DefaultLoadBalancingPolicy(@NonNull DriverContext context, @NonNull String profileName) { - InternalDriverContext internalContext = (InternalDriverContext) context; - - this.logPrefix = context.getSessionName() + "|" + profileName; - DriverExecutionProfile config = context.getConfig().getProfile(profileName); - this.localDc = getLocalDcFromConfig(internalContext, profileName, config); - this.isDefaultPolicy = profileName.equals(DriverExecutionProfile.DEFAULT_NAME); - - this.metadataManager = internalContext.getMetadataManager(); - - Predicate filterFromConfig = getFilterFromConfig(internalContext, profileName); - this.filter = - node -> { - String localDc1 = this.localDc; - if (localDc1 != null && !localDc1.equals(node.getDatacenter())) { - LOG.debug( - "[{}] Ignoring {} because it doesn't belong to the local DC {}", - logPrefix, - node, - localDc1); - return false; - } else if (!filterFromConfig.test(node)) { - LOG.debug( - "[{}] Ignoring {} because it doesn't match the user-provided predicate", - logPrefix, - node); - return false; - } else { - return true; - } - }; - } - - @Override - public void init(@NonNull Map nodes, @NonNull DistanceReporter distanceReporter) { - this.distanceReporter = distanceReporter; - - Set contactPoints = metadataManager.getContactPoints(); - if (localDc == null) { - if (metadataManager.wasImplicitContactPoint()) { - // We allow automatic inference of the local DC in this case - assert contactPoints.size() == 1; - Node contactPoint = contactPoints.iterator().next(); - localDc = contactPoint.getDatacenter(); - LOG.debug("[{}] Local DC set from contact point {}: {}", logPrefix, contactPoint, localDc); - } else { - throw new IllegalStateException( - "You provided explicit contact points, the local DC must be specified (see " - + DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER.getPath() - + " in the config)"); - } - } else { - ImmutableMap.Builder builder = ImmutableMap.builder(); - for (Node node : contactPoints) { - String datacenter = node.getDatacenter(); - if (!Objects.equals(localDc, datacenter)) { - builder.put(node, (datacenter == null) ? "" : datacenter); - } - } - ImmutableMap badContactPoints = builder.build(); - if (isDefaultPolicy && !badContactPoints.isEmpty()) { - LOG.warn( - "[{}] You specified {} as the local DC, but some contact points are from a different DC ({})", - logPrefix, - localDc, - badContactPoints); - } - } - - for (Node node : nodes.values()) { - if (filter.test(node)) { - distanceReporter.setDistance(node, NodeDistance.LOCAL); - if (node.getState() != NodeState.DOWN) { - // This includes state == UNKNOWN. If the node turns out to be unreachable, this will be - // detected when we try to open a pool to it, it will get marked down and this will be - // signaled back to this policy - localDcLiveNodes.add(node); - } - } else { - distanceReporter.setDistance(node, NodeDistance.IGNORED); - } - } + super(context, profileName); } @NonNull @Override - public Queue newQueryPlan(@Nullable Request request, @Nullable Session session) { - // Take a snapshot since the set is concurrent: - Object[] currentNodes = localDcLiveNodes.toArray(); - - Set allReplicas = getReplicas(request, session); - int replicaCount = 0; // in currentNodes - - if (!allReplicas.isEmpty()) { - // Move replicas to the beginning - for (int i = 0; i < currentNodes.length; i++) { - Node node = (Node) currentNodes[i]; - if (allReplicas.contains(node)) { - ArrayUtils.bubbleUp(currentNodes, i, replicaCount); - replicaCount += 1; - } - } - - if (replicaCount > 1) { - shuffleHead(currentNodes, replicaCount); - } - } - - LOG.trace("[{}] Prioritizing {} local replicas", logPrefix, replicaCount); - - // Round-robin the remaining nodes - ArrayUtils.rotate( - currentNodes, - replicaCount, - currentNodes.length - replicaCount, - roundRobinAmount.getAndUpdate(INCREMENT)); - - return new QueryPlan(currentNodes); - } - - private Set getReplicas(Request request, Session session) { - if (request == null || session == null) { - return Collections.emptySet(); - } - - // Note: we're on the hot path and the getXxx methods are potentially more than simple getters, - // so we only call each method when strictly necessary (which is why the code below looks a bit - // weird). - CqlIdentifier keyspace = request.getKeyspace(); - if (keyspace == null) { - keyspace = request.getRoutingKeyspace(); - } - if (keyspace == null && session.getKeyspace().isPresent()) { - keyspace = session.getKeyspace().get(); - } - if (keyspace == null) { - return Collections.emptySet(); - } - - Token token = request.getRoutingToken(); - ByteBuffer key = (token == null) ? request.getRoutingKey() : null; - if (token == null && key == null) { - return Collections.emptySet(); - } - - Optional maybeTokenMap = metadataManager.getMetadata().getTokenMap(); - if (maybeTokenMap.isPresent()) { - TokenMap tokenMap = maybeTokenMap.get(); - return (token != null) - ? tokenMap.getReplicas(keyspace, token) - : tokenMap.getReplicas(keyspace, key); - } else { - return Collections.emptySet(); - } - } - - @VisibleForTesting - protected void shuffleHead(Object[] currentNodes, int replicaCount) { - ArrayUtils.shuffleHead(currentNodes, replicaCount); - } - - @Override - public void onAdd(@NonNull Node node) { - if (filter.test(node)) { - LOG.debug("[{}] {} was added, setting distance to LOCAL", logPrefix, node); - // Setting to a non-ignored distance triggers the session to open a pool, which will in turn - // set the node UP when the first channel gets opened. - distanceReporter.setDistance(node, NodeDistance.LOCAL); - } else { - distanceReporter.setDistance(node, NodeDistance.IGNORED); - } - } - - @Override - public void onUp(@NonNull Node node) { - if (filter.test(node)) { - // Normally this is already the case, but the filter could be dynamic and have ignored the - // node previously. - distanceReporter.setDistance(node, NodeDistance.LOCAL); - if (localDcLiveNodes.add(node)) { - LOG.debug("[{}] {} came back UP, added to live set", logPrefix, node); - } - } else { - distanceReporter.setDistance(node, NodeDistance.IGNORED); - } - } - - @Override - public void onDown(@NonNull Node node) { - if (localDcLiveNodes.remove(node)) { - LOG.debug("[{}] {} went DOWN, removed from live set", logPrefix, node); - } - } - - @Override - public void onRemove(@NonNull Node node) { - if (localDcLiveNodes.remove(node)) { - LOG.debug("[{}] {} was removed, removed from live set", logPrefix, node); - } - } - - @Override - public void close() { - // nothing to do - } - - private String getLocalDcFromConfig( - InternalDriverContext internalContext, - @NonNull String profileName, - DriverExecutionProfile config) { - String localDc = internalContext.getLocalDatacenter(profileName); - if (localDc != null) { - LOG.debug("[{}] Local DC set from builder: {}", logPrefix, localDc); - } else { - localDc = config.getString(DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER, null); - if (localDc != null) { - LOG.debug("[{}] Local DC set from configuration: {}", logPrefix, localDc); - } - } - return localDc; - } - - @SuppressWarnings("unchecked") - private Predicate getFilterFromConfig(InternalDriverContext context, String profileName) { - Predicate filterFromBuilder = context.getNodeFilter(profileName); - return (filterFromBuilder != null) - ? filterFromBuilder - : (Predicate) - Reflection.buildFromConfig( - context, - profileName, - DefaultDriverOption.LOAD_BALANCING_FILTER_CLASS, - Predicate.class) - .orElse(INCLUDE_ALL_NODES); + protected Optional discoverLocalDc(@NonNull Map nodes) { + return new MandatoryLocalDcHelper(context, profile, logPrefix).discoverLocalDc(nodes); } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/helper/DefaultNodeFilterHelper.java b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/helper/DefaultNodeFilterHelper.java new file mode 100644 index 00000000000..10630e58fad --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/helper/DefaultNodeFilterHelper.java @@ -0,0 +1,103 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.loadbalancing.helper; + +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.util.Reflection; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.Map; +import java.util.UUID; +import java.util.function.Predicate; +import net.jcip.annotations.ThreadSafe; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * A {@link NodeFilterHelper} implementation that fetches the user-supplied filter, if any, from the + * programmatic configuration API, or else, from the driver configuration. If no user-supplied + * filter can be retrieved, a dummy filter will be used which accepts all nodes unconditionally. + * + *

      Note that, regardless of the filter supplied by the end user, if a local datacenter is defined + * the filter returned by this implementation will always reject nodes that report a datacenter + * different from the local one. + */ +@ThreadSafe +public class DefaultNodeFilterHelper implements NodeFilterHelper { + + private static final Logger LOG = LoggerFactory.getLogger(DefaultNodeFilterHelper.class); + + @NonNull protected final InternalDriverContext context; + @NonNull protected final DriverExecutionProfile profile; + @NonNull protected final String logPrefix; + + public DefaultNodeFilterHelper( + @NonNull InternalDriverContext context, + @NonNull DriverExecutionProfile profile, + @NonNull String logPrefix) { + this.context = context; + this.profile = profile; + this.logPrefix = logPrefix; + } + + @NonNull + @Override + public Predicate createNodeFilter( + @Nullable String localDc, @NonNull Map nodes) { + Predicate filterFromConfig = nodeFilterFromConfig(); + return node -> { + if (localDc != null && !localDc.equals(node.getDatacenter())) { + LOG.debug( + "[{}] Ignoring {} because it doesn't belong to the local DC {}", + logPrefix, + node, + localDc); + return false; + } else if (!filterFromConfig.test(node)) { + LOG.debug( + "[{}] Ignoring {} because it doesn't match the user-provided predicate", + logPrefix, + node); + return false; + } else { + return true; + } + }; + } + + @NonNull + protected Predicate nodeFilterFromConfig() { + Predicate filter = context.getNodeFilter(profile.getName()); + if (filter != null) { + LOG.debug("[{}] Node filter set programmatically", logPrefix); + } else { + @SuppressWarnings("unchecked") + Predicate filterFromConfig = + Reflection.buildFromConfig( + context, + profile.getName(), + DefaultDriverOption.LOAD_BALANCING_FILTER_CLASS, + Predicate.class) + .orElse(INCLUDE_ALL_NODES); + filter = filterFromConfig; + LOG.debug("[{}] Node filter set from configuration", logPrefix); + } + return filter; + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/helper/InferringLocalDcHelper.java b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/helper/InferringLocalDcHelper.java new file mode 100644 index 00000000000..7261402ef3a --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/helper/InferringLocalDcHelper.java @@ -0,0 +1,86 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.loadbalancing.helper; + +import static com.datastax.oss.driver.internal.core.time.Clock.LOG; + +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.metadata.DefaultNode; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.HashSet; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.UUID; +import net.jcip.annotations.ThreadSafe; + +/** + * An implementation of {@link LocalDcHelper} that fetches the user-supplied datacenter, if any, + * from the programmatic configuration API, or else, from the driver configuration. If no local + * datacenter is explicitly defined, this implementation infers the local datacenter from the + * contact points: if all contact points share the same datacenter, that datacenter is returned. If + * the contact points are from different datacenters, or if no contact points reported any + * datacenter, an {@link IllegalStateException} is thrown. + */ +@ThreadSafe +public class InferringLocalDcHelper extends OptionalLocalDcHelper { + + public InferringLocalDcHelper( + @NonNull InternalDriverContext context, + @NonNull DriverExecutionProfile profile, + @NonNull String logPrefix) { + super(context, profile, logPrefix); + } + + /** @return The local datacenter; always present. */ + @NonNull + @Override + public Optional discoverLocalDc(@NonNull Map nodes) { + Optional optionalLocalDc = super.discoverLocalDc(nodes); + if (optionalLocalDc.isPresent()) { + return optionalLocalDc; + } + Set datacenters = new HashSet<>(); + Set contactPoints = context.getMetadataManager().getContactPoints(); + for (Node node : contactPoints) { + String datacenter = node.getDatacenter(); + if (datacenter != null) { + datacenters.add(datacenter); + } + } + if (datacenters.size() == 1) { + String localDc = datacenters.iterator().next(); + LOG.info("[{}] Inferred local DC from contact points: {}", logPrefix, localDc); + return Optional.of(localDc); + } + if (datacenters.isEmpty()) { + throw new IllegalStateException( + "The local DC could not be inferred from contact points, please set it explicitly (see " + + DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER.getPath() + + " in the config, or set it programmatically with SessionBuilder.withLocalDatacenter)"); + } + throw new IllegalStateException( + String.format( + "No local DC was provided, but the contact points are from different DCs: %s; " + + "please set the local DC explicitly (see " + + DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER.getPath() + + " in the config, or set it programmatically with SessionBuilder.withLocalDatacenter)", + formatNodesAndDcs(contactPoints))); + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/helper/LocalDcHelper.java b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/helper/LocalDcHelper.java new file mode 100644 index 00000000000..96b223a983c --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/helper/LocalDcHelper.java @@ -0,0 +1,48 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.loadbalancing.helper; + +import com.datastax.oss.driver.api.core.loadbalancing.LoadBalancingPolicy; +import com.datastax.oss.driver.api.core.metadata.Node; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Map; +import java.util.Optional; +import java.util.UUID; +import net.jcip.annotations.ThreadSafe; + +@FunctionalInterface +@ThreadSafe +public interface LocalDcHelper { + + /** + * Returns the local datacenter, if it can be discovered, or returns {@link Optional#empty empty} + * otherwise. + * + *

      Implementors may choose to throw {@link IllegalStateException} instead of returning {@link + * Optional#empty empty}, if they require a local datacenter to be defined in order to operate + * properly. + * + * @param nodes All the nodes that were known to exist in the cluster (regardless of their state) + * when the load balancing policy was {@linkplain LoadBalancingPolicy#init(Map, + * LoadBalancingPolicy.DistanceReporter) initialized}. This argument is provided in case + * implementors need to inspect the cluster topology to discover the local datacenter. + * @return The local datacenter, or {@link Optional#empty empty} if none found. + * @throws IllegalStateException if the local datacenter could not be discovered, and this policy + * cannot operate without it. + */ + @NonNull + Optional discoverLocalDc(@NonNull Map nodes); +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/helper/MandatoryLocalDcHelper.java b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/helper/MandatoryLocalDcHelper.java new file mode 100644 index 00000000000..b772ed7c4de --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/helper/MandatoryLocalDcHelper.java @@ -0,0 +1,94 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.loadbalancing.helper; + +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.metadata.DefaultNode; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.UUID; +import net.jcip.annotations.ThreadSafe; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * An implementation of {@link LocalDcHelper} that fetches the user-supplied datacenter, if any, + * from the programmatic configuration API, or else, from the driver configuration. If no local + * datacenter is explicitly defined, this implementation will consider two distinct situations: + * + *

        + *
      1. If no explicit contact points were provided, this implementation will infer the local + * datacenter from the implicit contact point (localhost). + *
      2. If explicit contact points were provided however, this implementation will throw {@link + * IllegalStateException}. + *
      + */ +@ThreadSafe +public class MandatoryLocalDcHelper extends OptionalLocalDcHelper { + + private static final Logger LOG = LoggerFactory.getLogger(MandatoryLocalDcHelper.class); + + public MandatoryLocalDcHelper( + @NonNull InternalDriverContext context, + @NonNull DriverExecutionProfile profile, + @NonNull String logPrefix) { + super(context, profile, logPrefix); + } + + /** @return The local datacenter; always present. */ + @NonNull + @Override + public Optional discoverLocalDc(@NonNull Map nodes) { + Optional optionalLocalDc = super.discoverLocalDc(nodes); + if (optionalLocalDc.isPresent()) { + return optionalLocalDc; + } + Set contactPoints = context.getMetadataManager().getContactPoints(); + if (context.getMetadataManager().wasImplicitContactPoint()) { + // We only allow automatic inference of the local DC in this specific case + assert contactPoints.size() == 1; + Node contactPoint = contactPoints.iterator().next(); + String localDc = contactPoint.getDatacenter(); + if (localDc != null) { + LOG.debug( + "[{}] Local DC set from implicit contact point {}: {}", + logPrefix, + contactPoint, + localDc); + return Optional.of(localDc); + } else { + throw new IllegalStateException( + "The local DC could not be inferred from implicit contact point, please set it explicitly (see " + + DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER.getPath() + + " in the config, or set it programmatically with SessionBuilder.withLocalDatacenter)"); + } + } else { + throw new IllegalStateException( + "Since you provided explicit contact points, the local DC must be explicitly set (see " + + DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER.getPath() + + " in the config, or set it programmatically with SessionBuilder.withLocalDatacenter). " + + "Current contact points are: " + + formatNodesAndDcs(contactPoints) + + ". Current DCs in this cluster are: " + + formatDcs(nodes.values())); + } + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/helper/NodeFilterHelper.java b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/helper/NodeFilterHelper.java new file mode 100644 index 00000000000..2b082bfe324 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/helper/NodeFilterHelper.java @@ -0,0 +1,45 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.loadbalancing.helper; + +import com.datastax.oss.driver.api.core.loadbalancing.LoadBalancingPolicy; +import com.datastax.oss.driver.api.core.metadata.Node; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.Map; +import java.util.UUID; +import java.util.function.Predicate; +import net.jcip.annotations.ThreadSafe; + +@FunctionalInterface +@ThreadSafe +public interface NodeFilterHelper { + + Predicate INCLUDE_ALL_NODES = n -> true; + + /** + * Creates a new node filter. + * + * @param localDc The local datacenter, or null if none defined. + * @param nodes All the nodes that were known to exist in the cluster (regardless of their state) + * when the load balancing policy was {@linkplain LoadBalancingPolicy#init(Map, + * LoadBalancingPolicy.DistanceReporter) initialized}. This argument is provided in case + * implementors need to inspect the cluster topology to create the node filter. + * @return the node filter to use. + */ + @NonNull + Predicate createNodeFilter(@Nullable String localDc, @NonNull Map nodes); +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/helper/OptionalLocalDcHelper.java b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/helper/OptionalLocalDcHelper.java new file mode 100644 index 00000000000..e513b2b8b83 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/helper/OptionalLocalDcHelper.java @@ -0,0 +1,136 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.loadbalancing.helper; + +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.ArrayList; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; +import java.util.Set; +import java.util.TreeSet; +import java.util.UUID; +import net.jcip.annotations.ThreadSafe; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * An implementation of {@link LocalDcHelper} that fetches the local datacenter from the + * programmatic configuration API, or else, from the driver configuration. If no user-supplied + * datacenter can be retrieved, it returns {@link Optional#empty empty}. + */ +@ThreadSafe +public class OptionalLocalDcHelper implements LocalDcHelper { + + private static final Logger LOG = LoggerFactory.getLogger(OptionalLocalDcHelper.class); + + @NonNull protected final InternalDriverContext context; + @NonNull protected final DriverExecutionProfile profile; + @NonNull protected final String logPrefix; + + public OptionalLocalDcHelper( + @NonNull InternalDriverContext context, + @NonNull DriverExecutionProfile profile, + @NonNull String logPrefix) { + this.context = context; + this.profile = profile; + this.logPrefix = logPrefix; + } + + /** + * @return The local datacenter from the programmatic configuration API, or from the driver + * configuration; {@link Optional#empty empty} if none found. + */ + @Override + @NonNull + public Optional discoverLocalDc(@NonNull Map nodes) { + String localDc = context.getLocalDatacenter(profile.getName()); + if (localDc != null) { + LOG.debug("[{}] Local DC set programmatically: {}", logPrefix, localDc); + checkLocalDatacenterCompatibility(localDc, context.getMetadataManager().getContactPoints()); + return Optional.of(localDc); + } else if (profile.isDefined(DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER)) { + localDc = profile.getString(DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER); + LOG.debug("[{}] Local DC set from configuration: {}", logPrefix, localDc); + checkLocalDatacenterCompatibility(localDc, context.getMetadataManager().getContactPoints()); + return Optional.of(localDc); + } else { + return Optional.empty(); + } + } + + /** + * Checks if the contact points are compatible with the local datacenter specified either through + * configuration, or programmatically. + * + *

      The default implementation logs a warning when a contact point reports a datacenter + * different from the local one. + * + * @param localDc The local datacenter, as specified in the config, or programmatically. + * @param contactPoints The contact points provided when creating the session. + */ + protected void checkLocalDatacenterCompatibility( + @NonNull String localDc, Set contactPoints) { + Set badContactPoints = new LinkedHashSet<>(); + for (Node node : contactPoints) { + if (!Objects.equals(localDc, node.getDatacenter())) { + badContactPoints.add(node); + } + } + if (!badContactPoints.isEmpty()) { + LOG.warn( + "[{}] You specified {} as the local DC, but some contact points are from a different DC: {}; " + + "please provide the correct local DC, or check your contact points", + logPrefix, + localDc, + formatNodesAndDcs(badContactPoints)); + } + } + + /** + * Formats the given nodes as a string detailing each contact point and its datacenter, for + * informational purposes. + */ + @NonNull + protected String formatNodesAndDcs(Iterable nodes) { + List l = new ArrayList<>(); + for (Node node : nodes) { + l.add(node + "=" + node.getDatacenter()); + } + return String.join(", ", l); + } + + /** + * Formats the given nodes as a string detailing each distinct datacenter, for informational + * purposes. + */ + @NonNull + protected String formatDcs(Iterable nodes) { + List l = new ArrayList<>(); + for (Node node : nodes) { + if (node.getDatacenter() != null) { + l.add(node.getDatacenter()); + } + } + return String.join(", ", new TreeSet<>(l)); + } +} diff --git a/core/src/main/resources/reference.conf b/core/src/main/resources/reference.conf index b50a5193dc6..b433ff375b6 100644 --- a/core/src/main/resources/reference.conf +++ b/core/src/main/resources/reference.conf @@ -162,13 +162,13 @@ datastax-java-driver { # The datacenter that is considered "local": the default policy will only include nodes from # this datacenter in its query plans. # - # This option can only be absent if you specified no contact points: in that case, the driver - # defaults to 127.0.0.1:9042, and that node's datacenter is used as the local datacenter. - # - # As soon as you provide contact points (either through the configuration or through the cluster - # builder), you must define the local datacenter explicitly, and initialization will fail if - # this property is absent. In addition, all contact points should be from this datacenter; - # warnings will be logged for nodes that are from a different one. + # When using the default policy, this option can only be absent if you specified no contact + # points: in that case, the driver defaults to 127.0.0.1:9042, and that node's datacenter is + # used as the local datacenter. As soon as you provide contact points (either through the + # configuration or through the session builder), you must define the local datacenter + # explicitly, and initialization will fail if this property is absent. In addition, all contact + # points should be from this datacenter; warnings will be logged for nodes that are from a + # different one. # # This can also be specified programmatically with SessionBuilder.withLocalDatacenter. If both # are specified, the programmatic value takes precedence. diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyEventsTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyEventsTest.java new file mode 100644 index 00000000000..47b73c126a6 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyEventsTest.java @@ -0,0 +1,167 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.loadbalancing; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.reset; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.loadbalancing.NodeDistance; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.UUID; +import java.util.function.Predicate; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnitRunner; + +@RunWith(MockitoJUnitRunner.class) +public class BasicLoadBalancingPolicyEventsTest extends DefaultLoadBalancingPolicyTestBase { + + @Mock private Predicate filter; + + private BasicLoadBalancingPolicy policy; + + @Before + @Override + public void setup() { + super.setup(); + when(filter.test(any(Node.class))).thenReturn(true); + when(context.getNodeFilter(DriverExecutionProfile.DEFAULT_NAME)).thenReturn(filter); + when(metadataManager.getContactPoints()).thenReturn(ImmutableSet.of(node1)); + policy = createAndInitPolicy(); + reset(distanceReporter); + } + + @Test + public void should_remove_down_node_from_live_set() { + // When + policy.onDown(node2); + + // Then + assertThat(policy.getLiveNodes()).containsExactlyInAnyOrder(node1); + verify(distanceReporter, never()).setDistance(eq(node2), any(NodeDistance.class)); + // should have been called only once, during initialization, but not during onDown + verify(filter).test(node2); + } + + @Test + public void should_remove_removed_node_from_live_set() { + // When + policy.onRemove(node2); + + // Then + assertThat(policy.getLiveNodes()).containsExactlyInAnyOrder(node1); + verify(distanceReporter, never()).setDistance(eq(node2), any(NodeDistance.class)); + // should have been called only once, during initialization, but not during onRemove + verify(filter).test(node2); + } + + @Test + public void should_set_added_node_to_local() { + // When + policy.onAdd(node3); + + // Then + verify(distanceReporter).setDistance(node3, NodeDistance.LOCAL); + verify(filter).test(node3); + // Not added to the live set yet, we're waiting for the pool to open + assertThat(policy.getLiveNodes()).containsExactlyInAnyOrder(node1, node2); + } + + @Test + public void should_ignore_added_node_when_filtered() { + // Given + when(filter.test(node3)).thenReturn(false); + + // When + policy.onAdd(node3); + + // Then + verify(distanceReporter).setDistance(node3, NodeDistance.IGNORED); + assertThat(policy.getLiveNodes()).containsExactlyInAnyOrder(node1, node2); + } + + @Test + public void should_ignore_added_node_when_remote_dc() { + // Given + when(node3.getDatacenter()).thenReturn("dc2"); + + // When + policy.onAdd(node3); + + // Then + verify(distanceReporter).setDistance(node3, NodeDistance.IGNORED); + assertThat(policy.getLiveNodes()).containsExactlyInAnyOrder(node1, node2); + } + + @Test + public void should_add_up_node_to_live_set() { + // When + policy.onUp(node3); + + // Then + verify(distanceReporter).setDistance(node3, NodeDistance.LOCAL); + verify(filter).test(node3); + assertThat(policy.getLiveNodes()).containsExactlyInAnyOrder(node1, node2, node3); + } + + @Test + public void should_ignore_up_node_when_filtered() { + // Given + when(filter.test(node3)).thenReturn(false); + + // When + policy.onUp(node3); + + // Then + verify(distanceReporter).setDistance(node3, NodeDistance.IGNORED); + verify(filter).test(node3); + assertThat(policy.getLiveNodes()).containsExactlyInAnyOrder(node1, node2); + } + + @Test + public void should_ignore_up_node_when_remote_dc() { + // Given + when(node3.getDatacenter()).thenReturn("dc2"); + + // When + policy.onUp(node3); + + // Then + verify(distanceReporter).setDistance(node3, NodeDistance.IGNORED); + assertThat(policy.getLiveNodes()).containsExactlyInAnyOrder(node1, node2); + } + + @NonNull + protected BasicLoadBalancingPolicy createAndInitPolicy() { + BasicLoadBalancingPolicy policy = + new DefaultLoadBalancingPolicy(context, DriverExecutionProfile.DEFAULT_NAME); + policy.init( + ImmutableMap.of(UUID.randomUUID(), node1, UUID.randomUUID(), node2), distanceReporter); + assertThat(policy.liveNodes).containsExactlyInAnyOrder(node1, node2); + return policy; + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyInitTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyInitTest.java new file mode 100644 index 00000000000..217c01d7baa --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyInitTest.java @@ -0,0 +1,215 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.loadbalancing; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.filter; +import static org.mockito.Mockito.atLeast; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.reset; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import ch.qos.logback.classic.Level; +import ch.qos.logback.classic.spi.ILoggingEvent; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.loadbalancing.NodeDistance; +import com.datastax.oss.driver.api.core.metadata.NodeState; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.UUID; +import org.junit.Test; + +public class BasicLoadBalancingPolicyInitTest extends DefaultLoadBalancingPolicyTestBase { + + @Override + public void setup() { + super.setup(); + reset(defaultProfile); + when(defaultProfile.getName()).thenReturn(DriverExecutionProfile.DEFAULT_NAME); + } + + @Test + public void should_use_local_dc_if_provided_via_config() { + // Given + when(defaultProfile.isDefined(DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER)) + .thenReturn(true); + when(defaultProfile.getString(DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER)) + .thenReturn("dc1"); + when(metadataManager.getContactPoints()).thenReturn(ImmutableSet.of(node1)); + // the parent class sets the config option to "dc1" + BasicLoadBalancingPolicy policy = createPolicy(); + + // When + policy.init(ImmutableMap.of(UUID.randomUUID(), node1), distanceReporter); + + // Then + assertThat(policy.getLocalDatacenter()).contains("dc1"); + } + + @Test + public void should_use_local_dc_if_provided_via_context() { + // Given + when(context.getLocalDatacenter(DriverExecutionProfile.DEFAULT_NAME)).thenReturn("dc1"); + when(metadataManager.getContactPoints()).thenReturn(ImmutableSet.of(node1)); + // note: programmatic takes priority, the config won't even be inspected so no need to stub the + // option to null + BasicLoadBalancingPolicy policy = createPolicy(); + + // When + policy.init(ImmutableMap.of(UUID.randomUUID(), node1), distanceReporter); + + // Then + assertThat(policy.getLocalDatacenter()).contains("dc1"); + verify(defaultProfile, never()) + .getString(DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER, null); + } + + @Test + public void should_not_infer_local_dc_if_not_provided() { + // Given + when(defaultProfile.isDefined(DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER)) + .thenReturn(false); + BasicLoadBalancingPolicy policy = + new BasicLoadBalancingPolicy(context, DriverExecutionProfile.DEFAULT_NAME) {}; + + // When + policy.init( + ImmutableMap.of(UUID.randomUUID(), node1, UUID.randomUUID(), node2), distanceReporter); + + // Then + assertThat(policy.getLocalDatacenter()).isEmpty(); + } + + @Test + public void should_warn_if_contact_points_not_in_local_dc() { + // Given + when(context.getLocalDatacenter(DriverExecutionProfile.DEFAULT_NAME)).thenReturn("dc1"); + when(node2.getDatacenter()).thenReturn("dc2"); + when(node3.getDatacenter()).thenReturn("dc3"); + when(metadataManager.getContactPoints()).thenReturn(ImmutableSet.of(node1, node2, node3)); + BasicLoadBalancingPolicy policy = createPolicy(); + + // When + policy.init( + ImmutableMap.of( + UUID.randomUUID(), node1, UUID.randomUUID(), node2, UUID.randomUUID(), node3), + distanceReporter); + + // Then + verify(appender, atLeast(1)).doAppend(loggingEventCaptor.capture()); + Iterable warnLogs = + filter(loggingEventCaptor.getAllValues()).with("level", Level.WARN).get(); + assertThat(warnLogs).hasSize(1); + assertThat(warnLogs.iterator().next().getFormattedMessage()) + .contains( + "You specified dc1 as the local DC, but some contact points are from a different DC") + .contains("node2=dc2") + .contains("node3=dc3"); + } + + @Test + public void should_include_nodes_from_local_dc_if_local_dc_set() { + // Given + when(context.getLocalDatacenter(DriverExecutionProfile.DEFAULT_NAME)).thenReturn("dc1"); + when(node1.getState()).thenReturn(NodeState.UP); + when(node2.getState()).thenReturn(NodeState.DOWN); + when(node3.getState()).thenReturn(NodeState.UNKNOWN); + BasicLoadBalancingPolicy policy = createPolicy(); + + // When + policy.init( + ImmutableMap.of( + UUID.randomUUID(), node1, UUID.randomUUID(), node2, UUID.randomUUID(), node3), + distanceReporter); + + // Then + // Set distance for all nodes in the local DC + verify(distanceReporter).setDistance(node1, NodeDistance.LOCAL); + verify(distanceReporter).setDistance(node2, NodeDistance.LOCAL); + verify(distanceReporter).setDistance(node3, NodeDistance.LOCAL); + // But only include UP or UNKNOWN nodes in the live set + assertThat(policy.getLiveNodes()).containsExactlyInAnyOrder(node1, node3); + } + + @Test + public void should_ignore_nodes_from_remote_dcs_if_local_dc_set() { + // Given + when(context.getLocalDatacenter(DriverExecutionProfile.DEFAULT_NAME)).thenReturn("dc1"); + when(node2.getDatacenter()).thenReturn("dc2"); + when(node3.getDatacenter()).thenReturn("dc3"); + when(metadataManager.getContactPoints()).thenReturn(ImmutableSet.of(node1, node2)); + BasicLoadBalancingPolicy policy = createPolicy(); + + // When + policy.init( + ImmutableMap.of( + UUID.randomUUID(), node1, UUID.randomUUID(), node2, UUID.randomUUID(), node3), + distanceReporter); + + // Then + verify(distanceReporter).setDistance(node1, NodeDistance.LOCAL); + verify(distanceReporter).setDistance(node2, NodeDistance.IGNORED); + verify(distanceReporter).setDistance(node3, NodeDistance.IGNORED); + assertThat(policy.getLiveNodes()).containsExactlyInAnyOrder(node1); + } + + @Test + public void should_not_ignore_nodes_from_remote_dcs_if_local_dc_not_set() { + // Given + BasicLoadBalancingPolicy policy = createPolicy(); + + // When + policy.init( + ImmutableMap.of( + UUID.randomUUID(), node1, UUID.randomUUID(), node2, UUID.randomUUID(), node3), + distanceReporter); + + // Then + verify(distanceReporter).setDistance(node1, NodeDistance.LOCAL); + verify(distanceReporter).setDistance(node2, NodeDistance.LOCAL); + verify(distanceReporter).setDistance(node3, NodeDistance.LOCAL); + assertThat(policy.getLiveNodes()).containsExactlyInAnyOrder(node1, node2, node3); + } + + @Test + public void should_ignore_nodes_excluded_by_filter() { + // Given + when(context.getNodeFilter(DriverExecutionProfile.DEFAULT_NAME)) + .thenReturn(node -> node.equals(node1)); + + BasicLoadBalancingPolicy policy = createPolicy(); + + // When + policy.init( + ImmutableMap.of( + UUID.randomUUID(), node1, UUID.randomUUID(), node2, UUID.randomUUID(), node3), + distanceReporter); + + // Then + verify(distanceReporter).setDistance(node1, NodeDistance.LOCAL); + verify(distanceReporter).setDistance(node2, NodeDistance.IGNORED); + verify(distanceReporter).setDistance(node3, NodeDistance.IGNORED); + assertThat(policy.getLiveNodes()).containsExactlyInAnyOrder(node1); + } + + @NonNull + protected BasicLoadBalancingPolicy createPolicy() { + return new BasicLoadBalancingPolicy(context, DriverExecutionProfile.DEFAULT_NAME); + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyQueryPlanTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyQueryPlanTest.java new file mode 100644 index 00000000000..a1825820046 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyQueryPlanTest.java @@ -0,0 +1,261 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.loadbalancing; + +import static java.util.Collections.emptySet; +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyInt; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.BDDMockito.given; +import static org.mockito.BDDMockito.then; +import static org.mockito.Mockito.atLeast; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.context.DriverContext; +import com.datastax.oss.driver.api.core.metadata.Metadata; +import com.datastax.oss.driver.api.core.metadata.TokenMap; +import com.datastax.oss.driver.api.core.metadata.token.Token; +import com.datastax.oss.driver.api.core.session.Request; +import com.datastax.oss.driver.internal.core.session.DefaultSession; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; +import com.datastax.oss.protocol.internal.util.Bytes; +import java.nio.ByteBuffer; +import java.util.Collections; +import java.util.Optional; +import java.util.UUID; +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mock; + +public class BasicLoadBalancingPolicyQueryPlanTest extends DefaultLoadBalancingPolicyTestBase { + + protected static final CqlIdentifier KEYSPACE = CqlIdentifier.fromInternal("ks"); + protected static final ByteBuffer ROUTING_KEY = Bytes.fromHexString("0xdeadbeef"); + + @Mock protected Request request; + @Mock protected DefaultSession session; + @Mock protected Metadata metadata; + @Mock protected TokenMap tokenMap; + @Mock protected Token routingToken; + + protected BasicLoadBalancingPolicy policy; + + @Before + @Override + public void setup() { + super.setup(); + when(metadataManager.getContactPoints()).thenReturn(ImmutableSet.of(node1)); + when(metadataManager.getMetadata()).thenReturn(metadata); + when(metadata.getTokenMap()).thenAnswer(invocation -> Optional.of(this.tokenMap)); + + policy = createAndInitPolicy(); + + // Note: this test relies on the fact that the policy uses a CopyOnWriteArraySet which preserves + // insertion order. + assertThat(policy.liveNodes).containsExactly(node1, node2, node3, node4, node5); + } + + @Test + public void should_use_round_robin_when_no_request() { + // Given + request = null; + + // When + assertRoundRobinQueryPlans(); + + // Then + then(metadataManager).should(never()).getMetadata(); + } + + @Test + public void should_use_round_robin_when_no_session() { + // Given + session = null; + + // When + assertRoundRobinQueryPlans(); + + // Then + then(request).should(never()).getRoutingKey(); + then(request).should(never()).getRoutingToken(); + then(metadataManager).should(never()).getMetadata(); + } + + @Test + public void should_use_round_robin_when_request_has_no_routing_keyspace() { + // By default from Mockito: + assertThat(request.getKeyspace()).isNull(); + assertThat(request.getRoutingKeyspace()).isNull(); + + assertRoundRobinQueryPlans(); + + verify(request, never()).getRoutingKey(); + verify(request, never()).getRoutingToken(); + verify(metadataManager, never()).getMetadata(); + } + + @Test + public void should_use_round_robin_when_request_has_no_routing_key_or_token() { + when(request.getRoutingKeyspace()).thenReturn(KEYSPACE); + assertThat(request.getRoutingKey()).isNull(); + assertThat(request.getRoutingToken()).isNull(); + + assertRoundRobinQueryPlans(); + + verify(metadataManager, never()).getMetadata(); + } + + @Test + public void should_use_round_robin_when_token_map_absent() { + when(request.getRoutingKeyspace()).thenReturn(KEYSPACE); + when(request.getRoutingKey()).thenReturn(ROUTING_KEY); + + when(metadata.getTokenMap()).thenReturn(Optional.empty()); + + assertRoundRobinQueryPlans(); + + verify(metadata, atLeast(1)).getTokenMap(); + } + + @Test + public void + should_use_round_robin_when_token_map_returns_no_replicas_using_request_keyspace_and_routing_key() { + when(request.getRoutingKeyspace()).thenReturn(KEYSPACE); + when(request.getRoutingKey()).thenReturn(ROUTING_KEY); + when(tokenMap.getReplicas(KEYSPACE, ROUTING_KEY)).thenReturn(Collections.emptySet()); + + assertRoundRobinQueryPlans(); + + verify(tokenMap, atLeast(1)).getReplicas(KEYSPACE, ROUTING_KEY); + } + + @Test + public void + should_use_round_robin_when_token_map_returns_no_replicas_using_session_keyspace_and_routing_key() { + // Given + given(request.getKeyspace()).willReturn(null); + given(request.getRoutingKeyspace()).willReturn(null); + given(session.getKeyspace()).willReturn(Optional.of(KEYSPACE)); + given(request.getRoutingKey()).willReturn(ROUTING_KEY); + given(tokenMap.getReplicas(KEYSPACE, ROUTING_KEY)).willReturn(emptySet()); + // When + assertRoundRobinQueryPlans(); + // Then + then(tokenMap).should(atLeast(1)).getReplicas(KEYSPACE, ROUTING_KEY); + } + + @Test + public void + should_use_round_robin_when_token_map_returns_no_replicas_using_request_keyspace_and_routing_token() { + // Given + given(request.getKeyspace()).willReturn(null); + given(request.getRoutingKeyspace()).willReturn(KEYSPACE); + given(request.getRoutingToken()).willReturn(routingToken); + given(tokenMap.getReplicas(KEYSPACE, routingToken)).willReturn(emptySet()); + // When + assertRoundRobinQueryPlans(); + // Then + then(tokenMap).should(atLeast(1)).getReplicas(KEYSPACE, routingToken); + } + + private void assertRoundRobinQueryPlans() { + for (int i = 0; i < 3; i++) { + assertThat(policy.newQueryPlan(request, session)) + .containsExactly(node1, node2, node3, node4, node5); + assertThat(policy.newQueryPlan(request, session)) + .containsExactly(node2, node3, node4, node5, node1); + assertThat(policy.newQueryPlan(request, session)) + .containsExactly(node3, node4, node5, node1, node2); + assertThat(policy.newQueryPlan(request, session)) + .containsExactly(node4, node5, node1, node2, node3); + assertThat(policy.newQueryPlan(request, session)) + .containsExactly(node5, node1, node2, node3, node4); + } + } + + @Test + public void should_prioritize_single_replica() { + when(request.getRoutingKeyspace()).thenReturn(KEYSPACE); + when(request.getRoutingKey()).thenReturn(ROUTING_KEY); + when(tokenMap.getReplicas(KEYSPACE, ROUTING_KEY)).thenReturn(ImmutableSet.of(node3)); + + // node3 always first, round-robin on the rest + assertThat(policy.newQueryPlan(request, session)) + .containsExactly(node3, node1, node2, node4, node5); + assertThat(policy.newQueryPlan(request, session)) + .containsExactly(node3, node2, node4, node5, node1); + assertThat(policy.newQueryPlan(request, session)) + .containsExactly(node3, node4, node5, node1, node2); + assertThat(policy.newQueryPlan(request, session)) + .containsExactly(node3, node5, node1, node2, node4); + + // Should not shuffle replicas since there is only one + verify(policy, never()).shuffleHead(any(), anyInt()); + } + + @Test + public void should_prioritize_and_shuffle_replicas() { + when(request.getRoutingKeyspace()).thenReturn(KEYSPACE); + when(request.getRoutingKey()).thenReturn(ROUTING_KEY); + when(tokenMap.getReplicas(KEYSPACE, ROUTING_KEY)).thenReturn(ImmutableSet.of(node3, node5)); + + assertThat(policy.newQueryPlan(request, session)) + .containsExactly(node3, node5, node1, node2, node4); + assertThat(policy.newQueryPlan(request, session)) + .containsExactly(node3, node5, node2, node4, node1); + assertThat(policy.newQueryPlan(request, session)) + .containsExactly(node3, node5, node4, node1, node2); + + verify(policy, times(3)).shuffleHead(any(), eq(2)); + // No power of two choices with only two replicas + verify(session, never()).getPools(); + } + + protected BasicLoadBalancingPolicy createAndInitPolicy() { + // Use a subclass to disable shuffling, we just spy to make sure that the shuffling method was + // called (makes tests easier) + NonShufflingBasicLoadBalancingPolicy policy = + spy(new NonShufflingBasicLoadBalancingPolicy(context, DriverExecutionProfile.DEFAULT_NAME)); + policy.init( + ImmutableMap.of( + UUID.randomUUID(), node1, + UUID.randomUUID(), node2, + UUID.randomUUID(), node3, + UUID.randomUUID(), node4, + UUID.randomUUID(), node5), + distanceReporter); + return policy; + } + + static class NonShufflingBasicLoadBalancingPolicy extends BasicLoadBalancingPolicy { + NonShufflingBasicLoadBalancingPolicy(DriverContext context, String profileName) { + super(context, profileName); + } + + @Override + protected void shuffleHead(Object[] currentNodes, int replicaCount) { + // nothing (keep in same order) + } + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyEventsTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyEventsTest.java new file mode 100644 index 00000000000..f46c6e8a64d --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyEventsTest.java @@ -0,0 +1,40 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.loadbalancing; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.UUID; +import org.junit.runner.RunWith; +import org.mockito.junit.MockitoJUnitRunner; + +@RunWith(MockitoJUnitRunner.class) +public class DcInferringLoadBalancingPolicyEventsTest extends BasicLoadBalancingPolicyEventsTest { + + @NonNull + @Override + protected BasicLoadBalancingPolicy createAndInitPolicy() { + DcInferringLoadBalancingPolicy policy = + new DcInferringLoadBalancingPolicy(context, DriverExecutionProfile.DEFAULT_NAME); + policy.init( + ImmutableMap.of(UUID.randomUUID(), node1, UUID.randomUUID(), node2), distanceReporter); + assertThat(policy.liveNodes).containsExactlyInAnyOrder(node1, node2); + return policy; + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyInitTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyInitTest.java new file mode 100644 index 00000000000..d13be12d08c --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyInitTest.java @@ -0,0 +1,220 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.loadbalancing; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.filter; +import static org.mockito.Mockito.atLeast; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import ch.qos.logback.classic.Level; +import ch.qos.logback.classic.spi.ILoggingEvent; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.loadbalancing.NodeDistance; +import com.datastax.oss.driver.api.core.metadata.NodeState; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.UUID; +import org.junit.Test; + +public class DcInferringLoadBalancingPolicyInitTest extends DefaultLoadBalancingPolicyTestBase { + + @Test + public void should_use_local_dc_if_provided_via_config() { + // Given + when(metadataManager.getContactPoints()).thenReturn(ImmutableSet.of(node1)); + // the parent class sets the config option to "dc1" + BasicLoadBalancingPolicy policy = createPolicy(); + + // When + policy.init(ImmutableMap.of(UUID.randomUUID(), node1), distanceReporter); + + // Then + assertThat(policy.getLocalDatacenter()).contains("dc1"); + } + + @Test + public void should_use_local_dc_if_provided_via_context() { + // Given + when(context.getLocalDatacenter(DriverExecutionProfile.DEFAULT_NAME)).thenReturn("dc1"); + when(metadataManager.getContactPoints()).thenReturn(ImmutableSet.of(node1)); + // note: programmatic takes priority, the config won't even be inspected so no need to stub the + // option to null + BasicLoadBalancingPolicy policy = createPolicy(); + + // When + policy.init(ImmutableMap.of(UUID.randomUUID(), node1), distanceReporter); + + // Then + assertThat(policy.getLocalDatacenter()).contains("dc1"); + verify(defaultProfile, never()) + .getString(DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER, null); + } + + @Test + public void should_infer_local_dc_from_contact_points() { + // Given + when(defaultProfile.isDefined(DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER)) + .thenReturn(false); + when(metadataManager.getContactPoints()).thenReturn(ImmutableSet.of(node1)); + BasicLoadBalancingPolicy policy = createPolicy(); + + // When + policy.init(ImmutableMap.of(UUID.randomUUID(), node1), distanceReporter); + + // Then + assertThat(policy.getLocalDatacenter()).contains("dc1"); + } + + @Test + public void should_require_local_dc_if_contact_points_from_different_dcs() { + // Given + when(defaultProfile.isDefined(DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER)) + .thenReturn(false); + when(metadataManager.getContactPoints()).thenReturn(ImmutableSet.of(node1, node2)); + when(node2.getDatacenter()).thenReturn("dc2"); + BasicLoadBalancingPolicy policy = createPolicy(); + + thrown.expect(IllegalStateException.class); + thrown.expectMessage( + "No local DC was provided, but the contact points are from different DCs: node1=dc1, node2=dc2"); + + // When + policy.init( + ImmutableMap.of(UUID.randomUUID(), node1, UUID.randomUUID(), node2), distanceReporter); + } + + @Test + public void should_require_local_dc_if_contact_points_have_null_dcs() { + // Given + when(defaultProfile.isDefined(DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER)) + .thenReturn(false); + when(metadataManager.getContactPoints()).thenReturn(ImmutableSet.of(node1, node2)); + when(node1.getDatacenter()).thenReturn(null); + when(node2.getDatacenter()).thenReturn(null); + BasicLoadBalancingPolicy policy = createPolicy(); + + thrown.expect(IllegalStateException.class); + thrown.expectMessage( + "The local DC could not be inferred from contact points, please set it explicitly"); + + // When + policy.init( + ImmutableMap.of(UUID.randomUUID(), node1, UUID.randomUUID(), node2), distanceReporter); + } + + @Test + public void should_warn_if_contact_points_not_in_local_dc() { + // Given + when(node2.getDatacenter()).thenReturn("dc2"); + when(node3.getDatacenter()).thenReturn("dc3"); + when(metadataManager.getContactPoints()).thenReturn(ImmutableSet.of(node1, node2, node3)); + BasicLoadBalancingPolicy policy = createPolicy(); + + // When + policy.init( + ImmutableMap.of( + UUID.randomUUID(), node1, UUID.randomUUID(), node2, UUID.randomUUID(), node3), + distanceReporter); + + // Then + verify(appender, atLeast(1)).doAppend(loggingEventCaptor.capture()); + Iterable warnLogs = + filter(loggingEventCaptor.getAllValues()).with("level", Level.WARN).get(); + assertThat(warnLogs).hasSize(1); + assertThat(warnLogs.iterator().next().getFormattedMessage()) + .contains( + "You specified dc1 as the local DC, but some contact points are from a different DC") + .contains("node2=dc2") + .contains("node3=dc3"); + } + + @Test + public void should_include_nodes_from_local_dc() { + // Given + when(metadataManager.getContactPoints()).thenReturn(ImmutableSet.of(node1, node2)); + when(node1.getState()).thenReturn(NodeState.UP); + when(node2.getState()).thenReturn(NodeState.DOWN); + when(node3.getState()).thenReturn(NodeState.UNKNOWN); + BasicLoadBalancingPolicy policy = createPolicy(); + + // When + policy.init( + ImmutableMap.of( + UUID.randomUUID(), node1, UUID.randomUUID(), node2, UUID.randomUUID(), node3), + distanceReporter); + + // Then + // Set distance for all nodes in the local DC + verify(distanceReporter).setDistance(node1, NodeDistance.LOCAL); + verify(distanceReporter).setDistance(node2, NodeDistance.LOCAL); + verify(distanceReporter).setDistance(node3, NodeDistance.LOCAL); + // But only include UP or UNKNOWN nodes in the live set + assertThat(policy.getLiveNodes()).containsExactlyInAnyOrder(node1, node3); + } + + @Test + public void should_ignore_nodes_from_remote_dcs() { + // Given + when(node2.getDatacenter()).thenReturn("dc2"); + when(node3.getDatacenter()).thenReturn("dc3"); + when(metadataManager.getContactPoints()).thenReturn(ImmutableSet.of(node1, node2)); + BasicLoadBalancingPolicy policy = createPolicy(); + + // When + policy.init( + ImmutableMap.of( + UUID.randomUUID(), node1, UUID.randomUUID(), node2, UUID.randomUUID(), node3), + distanceReporter); + + // Then + verify(distanceReporter).setDistance(node1, NodeDistance.LOCAL); + verify(distanceReporter).setDistance(node2, NodeDistance.IGNORED); + verify(distanceReporter).setDistance(node3, NodeDistance.IGNORED); + assertThat(policy.getLiveNodes()).containsExactlyInAnyOrder(node1); + } + + @Test + public void should_ignore_nodes_excluded_by_filter() { + // Given + when(metadataManager.getContactPoints()).thenReturn(ImmutableSet.of(node1, node2)); + when(context.getNodeFilter(DriverExecutionProfile.DEFAULT_NAME)) + .thenReturn(node -> node.equals(node1)); + + BasicLoadBalancingPolicy policy = createPolicy(); + + // When + policy.init( + ImmutableMap.of( + UUID.randomUUID(), node1, UUID.randomUUID(), node2, UUID.randomUUID(), node3), + distanceReporter); + + // Then + verify(distanceReporter).setDistance(node1, NodeDistance.LOCAL); + verify(distanceReporter).setDistance(node2, NodeDistance.IGNORED); + verify(distanceReporter).setDistance(node3, NodeDistance.IGNORED); + assertThat(policy.getLiveNodes()).containsExactlyInAnyOrder(node1); + } + + @NonNull + protected BasicLoadBalancingPolicy createPolicy() { + return new DcInferringLoadBalancingPolicy(context, DriverExecutionProfile.DEFAULT_NAME); + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyQueryPlanTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyQueryPlanTest.java new file mode 100644 index 00000000000..6d351df0265 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyQueryPlanTest.java @@ -0,0 +1,57 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.loadbalancing; + +import static org.mockito.Mockito.spy; + +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.context.DriverContext; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import java.util.UUID; + +public class DcInferringLoadBalancingPolicyQueryPlanTest + extends BasicLoadBalancingPolicyQueryPlanTest { + + @Override + protected DcInferringLoadBalancingPolicy createAndInitPolicy() { + // Use a subclass to disable shuffling, we just spy to make sure that the shuffling method was + // called (makes tests easier) + NonShufflingDcInferringLoadBalancingPolicy policy = + spy( + new NonShufflingDcInferringLoadBalancingPolicy( + context, DriverExecutionProfile.DEFAULT_NAME)); + policy.init( + ImmutableMap.of( + UUID.randomUUID(), node1, + UUID.randomUUID(), node2, + UUID.randomUUID(), node3, + UUID.randomUUID(), node4, + UUID.randomUUID(), node5), + distanceReporter); + return policy; + } + + static class NonShufflingDcInferringLoadBalancingPolicy extends DcInferringLoadBalancingPolicy { + NonShufflingDcInferringLoadBalancingPolicy(DriverContext context, String profileName) { + super(context, profileName); + } + + @Override + protected void shuffleHead(Object[] currentNodes, int replicaCount) { + // nothing (keep in same order) + } + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyEventsTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyEventsTest.java index a1bec905103..efec5dbcf19 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyEventsTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyEventsTest.java @@ -16,148 +16,25 @@ package com.datastax.oss.driver.internal.core.loadbalancing; import static org.assertj.core.api.Assertions.assertThat; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.never; -import static org.mockito.Mockito.reset; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; -import com.datastax.oss.driver.api.core.loadbalancing.NodeDistance; -import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; -import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; +import edu.umd.cs.findbugs.annotations.NonNull; import java.util.UUID; -import java.util.function.Predicate; -import org.junit.Before; -import org.junit.Test; import org.junit.runner.RunWith; -import org.mockito.Mock; import org.mockito.junit.MockitoJUnitRunner; @RunWith(MockitoJUnitRunner.class) -public class DefaultLoadBalancingPolicyEventsTest extends DefaultLoadBalancingPolicyTestBase { +public class DefaultLoadBalancingPolicyEventsTest extends BasicLoadBalancingPolicyEventsTest { - @Mock private Predicate filter; - - private DefaultLoadBalancingPolicy policy; - - @Before + @NonNull @Override - public void setup() { - super.setup(); - - when(filter.test(any(Node.class))).thenReturn(true); - when(context.getNodeFilter(DriverExecutionProfile.DEFAULT_NAME)).thenReturn(filter); - - when(metadataManager.getContactPoints()).thenReturn(ImmutableSet.of(node1)); - - policy = new DefaultLoadBalancingPolicy(context, DriverExecutionProfile.DEFAULT_NAME); + protected DefaultLoadBalancingPolicy createAndInitPolicy() { + DefaultLoadBalancingPolicy policy = + new DefaultLoadBalancingPolicy(context, DriverExecutionProfile.DEFAULT_NAME); policy.init( ImmutableMap.of(UUID.randomUUID(), node1, UUID.randomUUID(), node2), distanceReporter); - assertThat(policy.localDcLiveNodes).containsExactlyInAnyOrder(node1, node2); - - reset(distanceReporter); - } - - @Test - public void should_remove_down_node_from_live_set() { - // When - policy.onDown(node2); - - // Then - assertThat(policy.localDcLiveNodes).containsExactlyInAnyOrder(node1); - verify(distanceReporter, never()).setDistance(eq(node2), any(NodeDistance.class)); - // should have been called only once, during initialization, but not during onDown - verify(filter).test(node2); - } - - @Test - public void should_remove_removed_node_from_live_set() { - // When - policy.onRemove(node2); - - // Then - assertThat(policy.localDcLiveNodes).containsExactlyInAnyOrder(node1); - verify(distanceReporter, never()).setDistance(eq(node2), any(NodeDistance.class)); - // should have been called only once, during initialization, but not during onRemove - verify(filter).test(node2); - } - - @Test - public void should_set_added_node_to_local() { - // When - policy.onAdd(node3); - - // Then - verify(distanceReporter).setDistance(node3, NodeDistance.LOCAL); - verify(filter).test(node3); - // Not added to the live set yet, we're waiting for the pool to open - assertThat(policy.localDcLiveNodes).containsExactlyInAnyOrder(node1, node2); - } - - @Test - public void should_ignore_added_node_when_filtered() { - // Given - when(filter.test(node3)).thenReturn(false); - - // When - policy.onAdd(node3); - - // Then - verify(distanceReporter).setDistance(node3, NodeDistance.IGNORED); - assertThat(policy.localDcLiveNodes).containsExactlyInAnyOrder(node1, node2); - } - - @Test - public void should_ignore_added_node_when_remote_dc() { - // Given - when(node3.getDatacenter()).thenReturn("dc2"); - - // When - policy.onAdd(node3); - - // Then - verify(distanceReporter).setDistance(node3, NodeDistance.IGNORED); - assertThat(policy.localDcLiveNodes).containsExactlyInAnyOrder(node1, node2); - } - - @Test - public void should_add_up_node_to_live_set() { - // When - policy.onUp(node3); - - // Then - verify(distanceReporter).setDistance(node3, NodeDistance.LOCAL); - verify(filter).test(node3); - assertThat(policy.localDcLiveNodes).containsExactlyInAnyOrder(node1, node2, node3); - } - - @Test - public void should_ignore_up_node_when_filtered() { - // Given - when(filter.test(node3)).thenReturn(false); - - // When - policy.onUp(node3); - - // Then - verify(distanceReporter).setDistance(node3, NodeDistance.IGNORED); - verify(filter).test(node3); - assertThat(policy.localDcLiveNodes).containsExactlyInAnyOrder(node1, node2); - } - - @Test - public void should_ignore_up_node_when_remote_dc() { - // Given - when(node3.getDatacenter()).thenReturn("dc2"); - - // When - policy.onUp(node3); - - // Then - verify(distanceReporter).setDistance(node3, NodeDistance.IGNORED); - assertThat(policy.localDcLiveNodes).containsExactlyInAnyOrder(node1, node2); + assertThat(policy.liveNodes).containsExactlyInAnyOrder(node1, node2); + return policy; } } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyInitTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyInitTest.java index f1d2c68fa43..a6bfd6590c8 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyInitTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyInitTest.java @@ -30,6 +30,7 @@ import com.datastax.oss.driver.api.core.metadata.NodeState; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; +import edu.umd.cs.findbugs.annotations.NonNull; import java.util.UUID; import org.junit.Test; @@ -38,29 +39,31 @@ public class DefaultLoadBalancingPolicyInitTest extends DefaultLoadBalancingPoli @Test public void should_use_local_dc_if_provided_via_config() { // Given + when(metadataManager.getContactPoints()).thenReturn(ImmutableSet.of(node1)); // the parent class sets the config option to "dc1" + DefaultLoadBalancingPolicy policy = createPolicy(); // When - DefaultLoadBalancingPolicy policy = - new DefaultLoadBalancingPolicy(context, DriverExecutionProfile.DEFAULT_NAME); + policy.init(ImmutableMap.of(UUID.randomUUID(), node1), distanceReporter); // Then - assertThat(policy.localDc).isEqualTo("dc1"); + assertThat(policy.getLocalDatacenter()).contains("dc1"); } @Test public void should_use_local_dc_if_provided_via_context() { // Given when(context.getLocalDatacenter(DriverExecutionProfile.DEFAULT_NAME)).thenReturn("dc1"); + when(metadataManager.getContactPoints()).thenReturn(ImmutableSet.of(node1)); // note: programmatic takes priority, the config won't even be inspected so no need to stub the // option to null + DefaultLoadBalancingPolicy policy = createPolicy(); // When - DefaultLoadBalancingPolicy policy = - new DefaultLoadBalancingPolicy(context, DriverExecutionProfile.DEFAULT_NAME); + policy.init(ImmutableMap.of(UUID.randomUUID(), node1), distanceReporter); // Then - assertThat(policy.localDc).isEqualTo("dc1"); + assertThat(policy.getLocalDatacenter()).contains("dc1"); verify(defaultProfile, never()) .getString(DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER, null); } @@ -68,32 +71,30 @@ public void should_use_local_dc_if_provided_via_context() { @Test public void should_infer_local_dc_if_no_explicit_contact_points() { // Given - when(defaultProfile.getString(DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER, null)) - .thenReturn(null); + when(defaultProfile.isDefined(DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER)) + .thenReturn(false); when(metadataManager.getContactPoints()).thenReturn(ImmutableSet.of(node1)); when(metadataManager.wasImplicitContactPoint()).thenReturn(true); - DefaultLoadBalancingPolicy policy = - new DefaultLoadBalancingPolicy(context, DriverExecutionProfile.DEFAULT_NAME); + DefaultLoadBalancingPolicy policy = createPolicy(); // When policy.init(ImmutableMap.of(UUID.randomUUID(), node1), distanceReporter); // Then - assertThat(policy.localDc).isEqualTo("dc1"); + assertThat(policy.getLocalDatacenter()).contains("dc1"); } @Test public void should_require_local_dc_if_explicit_contact_points() { // Given - when(defaultProfile.getString(DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER, null)) - .thenReturn(null); - when(metadataManager.getContactPoints()).thenReturn(ImmutableSet.of(node2)); + when(defaultProfile.isDefined(DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER)) + .thenReturn(false); when(metadataManager.wasImplicitContactPoint()).thenReturn(false); - DefaultLoadBalancingPolicy policy = - new DefaultLoadBalancingPolicy(context, DriverExecutionProfile.DEFAULT_NAME); + DefaultLoadBalancingPolicy policy = createPolicy(); thrown.expect(IllegalStateException.class); - thrown.expectMessage("You provided explicit contact points, the local DC must be specified"); + thrown.expectMessage( + "Since you provided explicit contact points, the local DC must be explicitly set"); // When policy.init(ImmutableMap.of(UUID.randomUUID(), node2), distanceReporter); @@ -105,8 +106,7 @@ public void should_warn_if_contact_points_not_in_local_dc() { when(node2.getDatacenter()).thenReturn("dc2"); when(node3.getDatacenter()).thenReturn("dc3"); when(metadataManager.getContactPoints()).thenReturn(ImmutableSet.of(node1, node2, node3)); - DefaultLoadBalancingPolicy policy = - new DefaultLoadBalancingPolicy(context, DriverExecutionProfile.DEFAULT_NAME); + DefaultLoadBalancingPolicy policy = createPolicy(); // When policy.init( @@ -133,8 +133,7 @@ public void should_include_nodes_from_local_dc() { when(node1.getState()).thenReturn(NodeState.UP); when(node2.getState()).thenReturn(NodeState.DOWN); when(node3.getState()).thenReturn(NodeState.UNKNOWN); - DefaultLoadBalancingPolicy policy = - new DefaultLoadBalancingPolicy(context, DriverExecutionProfile.DEFAULT_NAME); + DefaultLoadBalancingPolicy policy = createPolicy(); // When policy.init( @@ -148,7 +147,7 @@ public void should_include_nodes_from_local_dc() { verify(distanceReporter).setDistance(node2, NodeDistance.LOCAL); verify(distanceReporter).setDistance(node3, NodeDistance.LOCAL); // But only include UP or UNKNOWN nodes in the live set - assertThat(policy.localDcLiveNodes).containsExactlyInAnyOrder(node1, node3); + assertThat(policy.getLiveNodes()).containsExactlyInAnyOrder(node1, node3); } @Test @@ -157,8 +156,7 @@ public void should_ignore_nodes_from_remote_dcs() { when(node2.getDatacenter()).thenReturn("dc2"); when(node3.getDatacenter()).thenReturn("dc3"); when(metadataManager.getContactPoints()).thenReturn(ImmutableSet.of(node1, node2)); - DefaultLoadBalancingPolicy policy = - new DefaultLoadBalancingPolicy(context, DriverExecutionProfile.DEFAULT_NAME); + DefaultLoadBalancingPolicy policy = createPolicy(); // When policy.init( @@ -170,7 +168,7 @@ public void should_ignore_nodes_from_remote_dcs() { verify(distanceReporter).setDistance(node1, NodeDistance.LOCAL); verify(distanceReporter).setDistance(node2, NodeDistance.IGNORED); verify(distanceReporter).setDistance(node3, NodeDistance.IGNORED); - assertThat(policy.localDcLiveNodes).containsExactlyInAnyOrder(node1); + assertThat(policy.getLiveNodes()).containsExactlyInAnyOrder(node1); } @Test @@ -180,8 +178,7 @@ public void should_ignore_nodes_excluded_by_filter() { when(context.getNodeFilter(DriverExecutionProfile.DEFAULT_NAME)) .thenReturn(node -> node.equals(node1)); - DefaultLoadBalancingPolicy policy = - new DefaultLoadBalancingPolicy(context, DriverExecutionProfile.DEFAULT_NAME); + DefaultLoadBalancingPolicy policy = createPolicy(); // When policy.init( @@ -193,6 +190,11 @@ public void should_ignore_nodes_excluded_by_filter() { verify(distanceReporter).setDistance(node1, NodeDistance.LOCAL); verify(distanceReporter).setDistance(node2, NodeDistance.IGNORED); verify(distanceReporter).setDistance(node3, NodeDistance.IGNORED); - assertThat(policy.localDcLiveNodes).containsExactlyInAnyOrder(node1); + assertThat(policy.getLiveNodes()).containsExactlyInAnyOrder(node1); + } + + @NonNull + protected DefaultLoadBalancingPolicy createPolicy() { + return new DefaultLoadBalancingPolicy(context, DriverExecutionProfile.DEFAULT_NAME); } } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyQueryPlanTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyQueryPlanTest.java index 60d67923935..e8ed2f9aaad 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyQueryPlanTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyQueryPlanTest.java @@ -15,60 +15,23 @@ */ package com.datastax.oss.driver.internal.core.loadbalancing; -import static org.assertj.core.api.Assertions.assertThat; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyInt; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.atLeast; -import static org.mockito.Mockito.never; import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; -import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.context.DriverContext; -import com.datastax.oss.driver.api.core.metadata.Metadata; -import com.datastax.oss.driver.api.core.metadata.TokenMap; -import com.datastax.oss.driver.api.core.session.Request; -import com.datastax.oss.driver.internal.core.session.DefaultSession; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; -import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; -import com.datastax.oss.protocol.internal.util.Bytes; -import java.nio.ByteBuffer; -import java.util.Collections; -import java.util.Optional; import java.util.UUID; -import org.junit.Before; -import org.junit.Test; -import org.mockito.Mock; -public class DefaultLoadBalancingPolicyQueryPlanTest extends DefaultLoadBalancingPolicyTestBase { +public class DefaultLoadBalancingPolicyQueryPlanTest extends BasicLoadBalancingPolicyQueryPlanTest { - private static final CqlIdentifier KEYSPACE = CqlIdentifier.fromInternal("ks"); - private static final ByteBuffer ROUTING_KEY = Bytes.fromHexString("0xdeadbeef"); - - @Mock private Request request; - @Mock private DefaultSession session; - @Mock private Metadata metadata; - @Mock private TokenMap tokenMap; - - private DefaultLoadBalancingPolicy policy; - - @Before @Override - public void setup() { - super.setup(); - - when(metadataManager.getContactPoints()).thenReturn(ImmutableSet.of(node1)); - - when(metadataManager.getMetadata()).thenReturn(metadata); - when(metadata.getTokenMap()).thenAnswer(invocation -> Optional.of(this.tokenMap)); - + protected DefaultLoadBalancingPolicy createAndInitPolicy() { // Use a subclass to disable shuffling, we just spy to make sure that the shuffling method was // called (makes tests easier) - policy = spy(new NonShufflingPolicy(context, DriverExecutionProfile.DEFAULT_NAME)); + NonShufflingDefaultLoadBalancingPolicy policy = + spy( + new NonShufflingDefaultLoadBalancingPolicy( + context, DriverExecutionProfile.DEFAULT_NAME)); policy.init( ImmutableMap.of( UUID.randomUUID(), node1, @@ -77,114 +40,11 @@ public void setup() { UUID.randomUUID(), node4, UUID.randomUUID(), node5), distanceReporter); - - // Note: this test relies on the fact that the policy uses a CopyOnWriteArraySet which preserves - // insertion order. - assertThat(policy.localDcLiveNodes).containsExactly(node1, node2, node3, node4, node5); - } - - @Test - public void should_use_round_robin_when_request_has_no_routing_keyspace() { - // By default from Mockito: - assertThat(request.getKeyspace()).isNull(); - assertThat(request.getRoutingKeyspace()).isNull(); - - assertRoundRobinQueryPlans(); - - verify(request, never()).getRoutingKey(); - verify(request, never()).getRoutingToken(); - verify(metadataManager, never()).getMetadata(); - } - - @Test - public void should_use_round_robin_when_request_has_no_routing_key_or_token() { - when(request.getRoutingKeyspace()).thenReturn(KEYSPACE); - assertThat(request.getRoutingKey()).isNull(); - assertThat(request.getRoutingToken()).isNull(); - - assertRoundRobinQueryPlans(); - - verify(metadataManager, never()).getMetadata(); - } - - @Test - public void should_use_round_robin_when_token_map_absent() { - when(request.getRoutingKeyspace()).thenReturn(KEYSPACE); - when(request.getRoutingKey()).thenReturn(ROUTING_KEY); - - when(metadata.getTokenMap()).thenReturn(Optional.empty()); - - assertRoundRobinQueryPlans(); - - verify(metadata, atLeast(1)).getTokenMap(); - } - - @Test - public void should_use_round_robin_when_token_map_returns_no_replicas() { - when(request.getRoutingKeyspace()).thenReturn(KEYSPACE); - when(request.getRoutingKey()).thenReturn(ROUTING_KEY); - when(tokenMap.getReplicas(KEYSPACE, ROUTING_KEY)).thenReturn(Collections.emptySet()); - - assertRoundRobinQueryPlans(); - - verify(tokenMap, atLeast(1)).getReplicas(KEYSPACE, ROUTING_KEY); - } - - private void assertRoundRobinQueryPlans() { - for (int i = 0; i < 3; i++) { - assertThat(policy.newQueryPlan(request, session)) - .containsExactly(node1, node2, node3, node4, node5); - assertThat(policy.newQueryPlan(request, session)) - .containsExactly(node2, node3, node4, node5, node1); - assertThat(policy.newQueryPlan(request, session)) - .containsExactly(node3, node4, node5, node1, node2); - assertThat(policy.newQueryPlan(request, session)) - .containsExactly(node4, node5, node1, node2, node3); - assertThat(policy.newQueryPlan(request, session)) - .containsExactly(node5, node1, node2, node3, node4); - } - } - - @Test - public void should_prioritize_single_replica() { - when(request.getRoutingKeyspace()).thenReturn(KEYSPACE); - when(request.getRoutingKey()).thenReturn(ROUTING_KEY); - when(tokenMap.getReplicas(KEYSPACE, ROUTING_KEY)).thenReturn(ImmutableSet.of(node3)); - - // node3 always first, round-robin on the rest - assertThat(policy.newQueryPlan(request, session)) - .containsExactly(node3, node1, node2, node4, node5); - assertThat(policy.newQueryPlan(request, session)) - .containsExactly(node3, node2, node4, node5, node1); - assertThat(policy.newQueryPlan(request, session)) - .containsExactly(node3, node4, node5, node1, node2); - assertThat(policy.newQueryPlan(request, session)) - .containsExactly(node3, node5, node1, node2, node4); - - // Should not shuffle replicas since there is only one - verify(policy, never()).shuffleHead(any(), anyInt()); - } - - @Test - public void should_prioritize_and_shuffle_replicas() { - when(request.getRoutingKeyspace()).thenReturn(KEYSPACE); - when(request.getRoutingKey()).thenReturn(ROUTING_KEY); - when(tokenMap.getReplicas(KEYSPACE, ROUTING_KEY)).thenReturn(ImmutableSet.of(node3, node5)); - - assertThat(policy.newQueryPlan(request, session)) - .containsExactly(node3, node5, node1, node2, node4); - assertThat(policy.newQueryPlan(request, session)) - .containsExactly(node3, node5, node2, node4, node1); - assertThat(policy.newQueryPlan(request, session)) - .containsExactly(node3, node5, node4, node1, node2); - - verify(policy, times(3)).shuffleHead(any(), eq(2)); - // No power of two choices with only two replicas - verify(session, never()).getPools(); + return policy; } - static class NonShufflingPolicy extends DefaultLoadBalancingPolicy { - NonShufflingPolicy(DriverContext context, String profileName) { + static class NonShufflingDefaultLoadBalancingPolicy extends DefaultLoadBalancingPolicy { + NonShufflingDefaultLoadBalancingPolicy(DriverContext context, String profileName) { super(context, profileName); } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyTestBase.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyTestBase.java index e4f648eb3af..762f7bec889 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyTestBase.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyTestBase.java @@ -68,12 +68,16 @@ public void setup() { when(context.getConfig()).thenReturn(config); when(config.getProfile(DriverExecutionProfile.DEFAULT_NAME)).thenReturn(defaultProfile); - when(defaultProfile.getString(DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER, null)) + when(defaultProfile.getName()).thenReturn(DriverExecutionProfile.DEFAULT_NAME); + when(defaultProfile.isDefined(DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER)) + .thenReturn(true); + when(defaultProfile.getString(DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER)) .thenReturn("dc1"); when(context.getMetadataManager()).thenReturn(metadataManager); - logger = (Logger) LoggerFactory.getLogger(DefaultLoadBalancingPolicy.class); + logger = + (Logger) LoggerFactory.getLogger("com.datastax.oss.driver.internal.core.loadbalancing"); logger.addAppender(appender); for (Node node : ImmutableList.of(node1, node2, node3, node4, node5)) { From 3d7687c8ed2021ecb4bc7a3a1cbe0f1021731fc3 Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Tue, 22 Oct 2019 02:12:48 -0500 Subject: [PATCH 140/979] Minor tweaks to the current DefaultNode.toString() impl (#1345) --- .../internal/core/metadata/DefaultNode.java | 3 +-- .../core/metadata/DefaultNodeTest.java | 23 +++++++++++++++---- 2 files changed, 19 insertions(+), 7 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultNode.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultNode.java index dc7036a8777..566107febf0 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultNode.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultNode.java @@ -182,8 +182,7 @@ public NodeMetricUpdater getMetricUpdater() { public String toString() { // Include the hash code because this class uses reference equality return String.format( - "Node(endPoint=%s, hostId=%s, hashCode=%s)", - getEndPoint(), getHostId(), Integer.toHexString(hashCode())); + "Node(endPoint=%s, hostId=%s, hashCode=%x)", getEndPoint(), getHostId(), hashCode()); } /** Note: deliberately not exposed by the public interface. */ diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/DefaultNodeTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/DefaultNodeTest.java index 21a68f0df9d..bc9a1b86e2f 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/DefaultNodeTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/DefaultNodeTest.java @@ -25,19 +25,32 @@ public class DefaultNodeTest { + private final String uuidStr = "1e4687e6-f94e-432e-a792-216f89ef265f"; + private final UUID hostId = UUID.fromString(uuidStr); + private final EndPoint endPoint = new DefaultEndPoint(new InetSocketAddress("localhost", 9042)); + @Test public void should_have_expected_string_representation() { - String uuidStr = "1e4687e6-f94e-432e-a792-216f89ef265f"; - UUID hostId = UUID.fromString(uuidStr); - EndPoint endPoint = new DefaultEndPoint(new InetSocketAddress("localhost", 9042)); DefaultNode node = new DefaultNode(endPoint, MockedDriverContextFactory.defaultDriverContext()); node.hostId = hostId; String expected = String.format( - "Node(endPoint=localhost/127.0.0.1:9042, hostId=%s, hashCode=%s)", - uuidStr, Integer.toHexString(node.hashCode())); + "Node(endPoint=localhost/127.0.0.1:9042, hostId=1e4687e6-f94e-432e-a792-216f89ef265f, hashCode=%x)", + node.hashCode()); + assertThat(node.toString()).isEqualTo(expected); + } + + @Test + public void should_have_expected_string_representation_if_hostid_is_null() { + + DefaultNode node = new DefaultNode(endPoint, MockedDriverContextFactory.defaultDriverContext()); + node.hostId = null; + + String expected = + String.format( + "Node(endPoint=localhost/127.0.0.1:9042, hostId=null, hashCode=%x)", node.hashCode()); assertThat(node.toString()).isEqualTo(expected); } } From 6a879e6714e476833796fb7a28215e13b2070e48 Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Tue, 22 Oct 2019 02:55:25 -0500 Subject: [PATCH 141/979] Add new methods to wrap collections using CQL internal form in CqlIdentifiers (#1344) --- .../driver/internal/core/CqlIdentifiers.java | 48 +++++++++++++++---- 1 file changed, 40 insertions(+), 8 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/CqlIdentifiers.java b/core/src/main/java/com/datastax/oss/driver/internal/core/CqlIdentifiers.java index 43b2b2fe249..02be974f46a 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/CqlIdentifiers.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/CqlIdentifiers.java @@ -18,24 +18,56 @@ import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import edu.umd.cs.findbugs.annotations.NonNull; import java.util.List; import java.util.Map; +import java.util.Objects; +import java.util.function.Function; public class CqlIdentifiers { - public static List wrap(Iterable in) { - ImmutableList.Builder out = ImmutableList.builder(); + @NonNull + private static List wrap( + @NonNull Iterable in, @NonNull Function fn) { + + Objects.requireNonNull(in, "Input Iterable must not be null"); + Objects.requireNonNull(fn, "CqlIdentifier conversion function must not be null"); + ImmutableList.Builder builder = ImmutableList.builder(); for (String name : in) { - out.add(CqlIdentifier.fromCql(name)); + builder.add(fn.apply(name)); } - return out.build(); + return builder.build(); + } + + @NonNull + public static List wrap(@NonNull Iterable in) { + return wrap(in, CqlIdentifier::fromCql); + } + + @NonNull + public static List wrapInternal(@NonNull Iterable in) { + return wrap(in, CqlIdentifier::fromInternal); } - public static Map wrapKeys(Map in) { - ImmutableMap.Builder out = ImmutableMap.builder(); + @NonNull + private static Map wrapKeys( + @NonNull Map in, @NonNull Function fn) { + Objects.requireNonNull(in, "Input Map must not be null"); + Objects.requireNonNull(fn, "CqlIdentifier conversion function must not be null"); + ImmutableMap.Builder builder = ImmutableMap.builder(); for (Map.Entry entry : in.entrySet()) { - out.put(CqlIdentifier.fromCql(entry.getKey()), entry.getValue()); + builder.put(fn.apply(entry.getKey()), entry.getValue()); } - return out.build(); + return builder.build(); + } + + @NonNull + public static Map wrapKeys(@NonNull Map in) { + return wrapKeys(in, CqlIdentifier::fromCql); + } + + @NonNull + public static Map wrapKeysInternal(@NonNull Map in) { + return wrapKeys(in, CqlIdentifier::fromInternal); } } From f1c98372c7b5aad905f548762d5bbb60156ba3ea Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 22 Oct 2019 17:53:04 +0300 Subject: [PATCH 142/979] Fix failing test (JAVA-2459 follow-up) --- .../src/test/java/com/datastax/oss/driver/core/ConnectIT.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/ConnectIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/ConnectIT.java index 2a639ac70e5..887680153cb 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/ConnectIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/ConnectIT.java @@ -107,7 +107,7 @@ public void should_wait_for_contact_points_if_reconnection_enabled() throws Exce SIMULACRON_RULE.cluster().acceptConnections(); // Then this doesn't throw - try (Session session = sessionFuture.get(30, TimeUnit.SECONDS)) {} + try (Session ignored = sessionFuture.get(30, TimeUnit.SECONDS)) {} } /** @@ -128,7 +128,7 @@ public void should_cleanup_on_lbp_init_failure() { .build()) .isInstanceOf(IllegalStateException.class) .hasMessageContaining( - "You provided explicit contact points, the local DC must be specified"); + "Since you provided explicit contact points, the local DC must be explicitly set"); // One second should be plenty of time for connections to close server side checkThat(() -> SIMULACRON_RULE.cluster().getConnections().getConnections().isEmpty()) .before(1, SECONDS) From 1eb7d279c1e8fe697bcf8d8971ba50a7b487961b Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 22 Oct 2019 18:00:03 +0300 Subject: [PATCH 143/979] Extract logic to compare nodes by IP addresses --- .../loadbalancing/NodeComparator.java | 42 +++++++++++++++++++ .../SortingLoadBalancingPolicy.java | 42 ++----------------- 2 files changed, 46 insertions(+), 38 deletions(-) create mode 100644 test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/loadbalancing/NodeComparator.java diff --git a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/loadbalancing/NodeComparator.java b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/loadbalancing/NodeComparator.java new file mode 100644 index 00000000000..25bbd565f6d --- /dev/null +++ b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/loadbalancing/NodeComparator.java @@ -0,0 +1,42 @@ +package com.datastax.oss.driver.api.testinfra.loadbalancing; + +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.shaded.guava.common.primitives.UnsignedBytes; +import java.net.InetAddress; +import java.net.InetSocketAddress; +import java.util.Comparator; + +public final class NodeComparator implements Comparator { + + public static final NodeComparator INSTANCE = new NodeComparator(); + + private static final byte[] EMPTY = {}; + + private NodeComparator() {} + + @Override + public int compare(Node node1, Node node2) { + // compare address bytes, byte by byte. + byte[] address1 = + node1 + .getBroadcastAddress() + .map(InetSocketAddress::getAddress) + .map(InetAddress::getAddress) + .orElse(EMPTY); + byte[] address2 = + node2 + .getBroadcastAddress() + .map(InetSocketAddress::getAddress) + .map(InetAddress::getAddress) + .orElse(EMPTY); + + int result = UnsignedBytes.lexicographicalComparator().compare(address1, address2); + if (result != 0) { + return result; + } + + int port1 = node1.getBroadcastAddress().map(InetSocketAddress::getPort).orElse(0); + int port2 = node2.getBroadcastAddress().map(InetSocketAddress::getPort).orElse(0); + return port1 - port2; + } +} diff --git a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/loadbalancing/SortingLoadBalancingPolicy.java b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/loadbalancing/SortingLoadBalancingPolicy.java index 678b1477dee..8be63b84203 100644 --- a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/loadbalancing/SortingLoadBalancingPolicy.java +++ b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/loadbalancing/SortingLoadBalancingPolicy.java @@ -22,8 +22,7 @@ import com.datastax.oss.driver.api.core.session.Request; import com.datastax.oss.driver.api.core.session.Session; import edu.umd.cs.findbugs.annotations.NonNull; -import java.net.InetAddress; -import java.net.InetSocketAddress; +import edu.umd.cs.findbugs.annotations.Nullable; import java.util.ArrayDeque; import java.util.Map; import java.util.Queue; @@ -33,46 +32,13 @@ public class SortingLoadBalancingPolicy implements LoadBalancingPolicy { + private final Set nodes = new TreeSet<>(NodeComparator.INSTANCE); + @SuppressWarnings("unused") public SortingLoadBalancingPolicy(DriverContext context, String profileName) { // constructor needed for loading via config. } - private byte[] empty = {}; - private final Set nodes = - new TreeSet<>( - (node1, node2) -> { - // compare address bytes, byte by byte. - byte[] address1 = - node1 - .getBroadcastAddress() - .map(InetSocketAddress::getAddress) - .map(InetAddress::getAddress) - .orElse(empty); - byte[] address2 = - node2 - .getBroadcastAddress() - .map(InetSocketAddress::getAddress) - .map(InetAddress::getAddress) - .orElse(empty); - - // ipv6 vs ipv4, favor ipv6. - if (address1.length != address2.length) { - return address1.length - address2.length; - } - - for (int i = 0; i < address1.length; i++) { - int b1 = address1[i] & 0xFF; - int b2 = address2[i] & 0xFF; - if (b1 != b2) { - return b1 - b2; - } - } - int port1 = node1.getBroadcastAddress().map(InetSocketAddress::getPort).orElse(0); - int port2 = node2.getBroadcastAddress().map(InetSocketAddress::getPort).orElse(0); - return port1 - port2; - }); - public SortingLoadBalancingPolicy() {} @Override @@ -83,7 +49,7 @@ public void init(@NonNull Map nodes, @NonNull DistanceReporter dista @NonNull @Override - public Queue newQueryPlan(@NonNull Request request, @NonNull Session session) { + public Queue newQueryPlan(@Nullable Request request, @Nullable Session session) { return new ArrayDeque<>(nodes); } From 723dfe9893bb6386d60d99d51e1bd73358298d7e Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 22 Oct 2019 18:19:39 +0300 Subject: [PATCH 144/979] Add missing header --- .../testinfra/loadbalancing/NodeComparator.java | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/loadbalancing/NodeComparator.java b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/loadbalancing/NodeComparator.java index 25bbd565f6d..8d61895af42 100644 --- a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/loadbalancing/NodeComparator.java +++ b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/loadbalancing/NodeComparator.java @@ -1,3 +1,18 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package com.datastax.oss.driver.api.testinfra.loadbalancing; import com.datastax.oss.driver.api.core.metadata.Node; From deef1886c45cc7bdaa715c74f7372fd03ecf0f23 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 22 Oct 2019 19:09:02 +0300 Subject: [PATCH 145/979] Add revapi exceptions for SortingLoadBalancingPolicy changes --- test-infra/revapi.json | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/test-infra/revapi.json b/test-infra/revapi.json index ebcb74d4683..e1a98fb2b05 100644 --- a/test-infra/revapi.json +++ b/test-infra/revapi.json @@ -42,6 +42,34 @@ "old": "method com.datastax.oss.driver.internal.core.config.typesafe.DefaultDriverConfigLoaderBuilder com.datastax.oss.driver.api.testinfra.session.SessionUtils::configLoaderBuilder()", "new": "method com.datastax.oss.driver.api.core.config.ProgrammaticDriverConfigLoaderBuilder com.datastax.oss.driver.api.testinfra.session.SessionUtils::configLoaderBuilder()", "justification": "JAVA-2201: Expose a public API for programmatic config" + }, + { + "code": "java.annotation.removed", + "old": "parameter java.util.Queue com.datastax.oss.driver.api.testinfra.loadbalancing.SortingLoadBalancingPolicy::newQueryPlan(===com.datastax.oss.driver.api.core.session.Request===, com.datastax.oss.driver.api.core.session.Session)", + "new": "parameter java.util.Queue com.datastax.oss.driver.api.testinfra.loadbalancing.SortingLoadBalancingPolicy::newQueryPlan(===com.datastax.oss.driver.api.core.session.Request===, com.datastax.oss.driver.api.core.session.Session)", + "annotation": "@edu.umd.cs.findbugs.annotations.NonNull", + "justification": "Method arguments were mistakenly annotated with @NonNull" + }, + { + "code": "java.annotation.added", + "old": "parameter java.util.Queue com.datastax.oss.driver.api.testinfra.loadbalancing.SortingLoadBalancingPolicy::newQueryPlan(===com.datastax.oss.driver.api.core.session.Request===, com.datastax.oss.driver.api.core.session.Session)", + "new": "parameter java.util.Queue com.datastax.oss.driver.api.testinfra.loadbalancing.SortingLoadBalancingPolicy::newQueryPlan(===com.datastax.oss.driver.api.core.session.Request===, com.datastax.oss.driver.api.core.session.Session)", + "annotation": "@edu.umd.cs.findbugs.annotations.Nullable", + "justification": "Method arguments were mistakenly annotated with @NonNull" + }, + { + "code": "java.annotation.removed", + "old": "parameter java.util.Queue com.datastax.oss.driver.api.testinfra.loadbalancing.SortingLoadBalancingPolicy::newQueryPlan(com.datastax.oss.driver.api.core.session.Request, ===com.datastax.oss.driver.api.core.session.Session===)", + "new": "parameter java.util.Queue com.datastax.oss.driver.api.testinfra.loadbalancing.SortingLoadBalancingPolicy::newQueryPlan(com.datastax.oss.driver.api.core.session.Request, ===com.datastax.oss.driver.api.core.session.Session===)", + "annotation": "@edu.umd.cs.findbugs.annotations.NonNull", + "justification": "Method arguments were mistakenly annotated with @NonNull" + }, + { + "code": "java.annotation.added", + "old": "parameter java.util.Queue com.datastax.oss.driver.api.testinfra.loadbalancing.SortingLoadBalancingPolicy::newQueryPlan(com.datastax.oss.driver.api.core.session.Request, ===com.datastax.oss.driver.api.core.session.Session===)", + "new": "parameter java.util.Queue com.datastax.oss.driver.api.testinfra.loadbalancing.SortingLoadBalancingPolicy::newQueryPlan(com.datastax.oss.driver.api.core.session.Request, ===com.datastax.oss.driver.api.core.session.Session===)", + "annotation": "@edu.umd.cs.findbugs.annotations.Nullable", + "justification": "Method arguments were mistakenly annotated with @NonNull" } ] } From 73a709ef43687a3588e5511823939e013d888bab Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Wed, 23 Oct 2019 13:41:15 +0300 Subject: [PATCH 146/979] JAVA-2509: Mention file-based approach for Cloud configuration in the manual (#1352) --- changelog/README.md | 1 + manual/cloud/README.md | 75 +++++++++++++++++++++++++++++++++++++----- 2 files changed, 67 insertions(+), 9 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 6875f42b4b1..700781a9265 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.3.0 (in progress) +- [documentation] JAVA-2509: Mention file-based approach for Cloud configuration in the manual - [improvement] JAVA-2447: Mention programmatic local DC method in Default LBP error message - [improvement] JAVA-2459: Improve extensibility of existing load balancing policies - [documentation] JAVA-2428: Add developer docs diff --git a/manual/cloud/README.md b/manual/cloud/README.md index cd016363116..8324de5c6eb 100644 --- a/manual/cloud/README.md +++ b/manual/cloud/README.md @@ -38,15 +38,15 @@ database. $ touch ConnectDatabase.java ``` - b. Copy the following code for your DataStax Driver into the `ConnectDatabase.java` file. - The following example implements a `ConnectDatabase` class to connect to your Apollo database, - runs a CQL query, and prints the output to the console. + b. **Programmatic configuration**. Copy the following code for your DataStax Driver into the + `ConnectDatabase.java` file. The following example implements a `ConnectDatabase` class to + connect to your Apollo database, runs a CQL query, and prints the output to the console. **Note:** With the `CqlSession.builder()` object, make sure to set the path to the secure - connect bundle for your Apollo database (**"/path/to/secure-connect-database_name.zip"**) in - the `withCloudSecureConnectBundle()` method as shown in the following example. - If converting from using the open source Cassandra Java Driver to the DSE Java Driver, ensure - that you change `CqlSession` to `DseSession`. + connect bundle for your Apollo database in the `withCloudSecureConnectBundle()` method as + shown in the following example. If converting from using the open source Cassandra Java Driver + to the DSE Java Driver, ensure that you change `CqlSession` to `DseSession`. + * DataStax Java Driver for Apache Cassandra 4.x (recommended) ```java @@ -78,7 +78,7 @@ database. } } ``` - * DSE Java 2.x + * DataStax Java Driver for DataStax Enterprise (DSE) 2.x ```java import com.datastax.dse.driver.api.core.DseSession; @@ -110,7 +110,63 @@ database. } ``` - c. Save and close the ConnectDatabase.java file. + c. **File-based configuration**. An alternative to the programmatic configuration method + detailed above is to include the information required to connect in the driver's configuration + file (`application.conf`). Merge the following options with any other options that you might + want to include in the configuration file: + + ```hocon + basic { + # change this to match the target keyspace + session-keyspace = keyspace_name + cloud { + # change this to match bundle's location; can be either a path on the local filesystem + # or a valid URL, e.g. http://acme.com/path/to/secure-connect-database_name.zip + secure-connect-bundle = /path/to/secure-connect-database_name.zip + } + } + advanced { + auth-provider { + class = PlainTextAuthProvider + # change below to match the appropriate credentials + username = user_name + password = password + } + } + ``` + + For more information about the driver configuration mechanism, refer to the + [driver documentation]. + + With the above configuration, your ConnectDatabase.java file should be simplified as shown + below: + + ```java + import com.datastax.oss.driver.api.core.CqlSession; + import com.datastax.oss.driver.api.core.cql.ResultSet; + import com.datastax.oss.driver.api.core.cql.Row; + + public class ConnectDatabase { + + public static void main(String[] args) { + // Create the CqlSession object; it will read the configuration file and pick the right + // values to connect to the Apollo database. + try (CqlSession session = CqlSession.builder().build()) { + // Select the release_version from the system.local table: + ResultSet rs = session.execute("select release_version from system.local"); + Row row = rs.one(); + //Print the results of the CQL query to the console: + if (row != null) { + System.out.println(row.getString("release_version")); + } else { + System.out.println("An error occurred."); + } + } + } + } + ``` + + d. Save and close the ConnectDatabase.java file. [Download Maven]: https://maven.apache.org/download.cgi [Install Maven]: https://maven.apache.org/install.html @@ -121,3 +177,4 @@ database. [Download the secure connect bundle - GCP]: https://helpdocs.datastax.com/gcp/dscloud/apollo/dscloudObtainingCredentials.html [Download the secure connect bundle - AWS]: https://helpdocs.datastax.com/aws/dscloud/apollo/dscloudObtainingCredentials.html [Example pom.xml file]: ../core/integration/#minimal-project-structure +[driver documentation]: ../core/configuration/ \ No newline at end of file From 365d6958f1ca9e2bdab59eddfb972f137fa8b8a8 Mon Sep 17 00:00:00 2001 From: Olivier Michallat Date: Wed, 23 Oct 2019 03:42:55 -0700 Subject: [PATCH 147/979] JAVA-2476: Improve error message when codec registry inspects a collection with a null element (#1351) --- changelog/README.md | 2 + .../codec/registry/CachingCodecRegistry.java | 31 +++++++++--- .../registry/CachingCodecRegistryTest.java | 49 +++++++++++++++++++ 3 files changed, 75 insertions(+), 7 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 700781a9265..70b6d7966ee 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,8 @@ ### 4.3.0 (in progress) +- [improvement] JAVA-2476: Improve error message when codec registry inspects a collection with a + null element - [documentation] JAVA-2509: Mention file-based approach for Cloud configuration in the manual - [improvement] JAVA-2447: Mention programmatic local DC method in Default LBP error message - [improvement] JAVA-2459: Improve extensibility of existing load balancing policies diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistry.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistry.java index 049e79869e5..eec29520eb1 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistry.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistry.java @@ -329,9 +329,15 @@ protected GenericType inspectType(@NonNull Object value, @Nullable DataType c // type. return cqlType == null ? JAVA_TYPE_FOR_EMPTY_LISTS : inferJavaTypeFromCqlType(cqlType); } else { + Object firstElement = list.get(0); + if (firstElement == null) { + throw new IllegalArgumentException( + "Can't infer list codec because the first element is null " + + "(note that CQL does not allow null values in collections)"); + } GenericType elementType = inspectType( - list.get(0), cqlType == null ? null : ((ListType) cqlType).getElementType()); + firstElement, cqlType == null ? null : ((ListType) cqlType).getElementType()); return GenericType.listOf(elementType); } } else if (value instanceof Set) { @@ -339,10 +345,15 @@ protected GenericType inspectType(@NonNull Object value, @Nullable DataType c if (set.isEmpty()) { return cqlType == null ? JAVA_TYPE_FOR_EMPTY_SETS : inferJavaTypeFromCqlType(cqlType); } else { + Object firstElement = set.iterator().next(); + if (firstElement == null) { + throw new IllegalArgumentException( + "Can't infer set codec because the first element is null " + + "(note that CQL does not allow null values in collections)"); + } GenericType elementType = inspectType( - set.iterator().next(), - cqlType == null ? null : ((SetType) cqlType).getElementType()); + firstElement, cqlType == null ? null : ((SetType) cqlType).getElementType()); return GenericType.setOf(elementType); } } else if (value instanceof Map) { @@ -350,12 +361,18 @@ protected GenericType inspectType(@NonNull Object value, @Nullable DataType c if (map.isEmpty()) { return cqlType == null ? JAVA_TYPE_FOR_EMPTY_MAPS : inferJavaTypeFromCqlType(cqlType); } else { - Map.Entry entry = map.entrySet().iterator().next(); + Map.Entry firstEntry = map.entrySet().iterator().next(); + Object firstKey = firstEntry.getKey(); + Object firstValue = firstEntry.getValue(); + if (firstKey == null || firstValue == null) { + throw new IllegalArgumentException( + "Can't infer map codec because the first key and/or value is null " + + "(note that CQL does not allow null values in collections)"); + } GenericType keyType = - inspectType(entry.getKey(), cqlType == null ? null : ((MapType) cqlType).getKeyType()); + inspectType(firstKey, cqlType == null ? null : ((MapType) cqlType).getKeyType()); GenericType valueType = - inspectType( - entry.getValue(), cqlType == null ? null : ((MapType) cqlType).getValueType()); + inspectType(firstValue, cqlType == null ? null : ((MapType) cqlType).getValueType()); return GenericType.mapOf(keyType, valueType); } } else { diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistryTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistryTest.java index e5ef922ea19..6e6071a9ae2 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistryTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistryTest.java @@ -60,7 +60,10 @@ import java.time.LocalDate; import java.time.LocalTime; import java.time.Period; +import java.util.ArrayList; import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -354,6 +357,19 @@ public void should_create_list_codec_for_java_value_when_first_element_is_a_subt inOrder.verify(mockCache).lookup(null, GenericType.listOf(Inet4Address.class), true); } + @Test + public void should_throw_for_list_codec_containing_null_element() { + List value = new ArrayList<>(); + value.add(null); + + TestCachingCodecRegistry registry = new TestCachingCodecRegistry(mockCache); + assertThatThrownBy(() -> registry.codecFor(value)) + .isInstanceOf(IllegalArgumentException.class) + .hasMessage( + "Can't infer list codec because the first element is null " + + "(note that CQL does not allow null values in collections)"); + } + @Test public void should_create_set_codec_for_cql_and_java_types() { SetType cqlType = DataTypes.setOf(DataTypes.setOf(DataTypes.INT)); @@ -494,6 +510,19 @@ public void should_create_set_codec_for_java_value_when_first_element_is_a_subty inOrder.verify(mockCache).lookup(null, GenericType.setOf(Inet4Address.class), true); } + @Test + public void should_throw_for_set_codec_containing_null_element() { + Set value = new HashSet<>(); + value.add(null); + + TestCachingCodecRegistry registry = new TestCachingCodecRegistry(mockCache); + assertThatThrownBy(() -> registry.codecFor(value)) + .isInstanceOf(IllegalArgumentException.class) + .hasMessage( + "Can't infer set codec because the first element is null " + + "(note that CQL does not allow null values in collections)"); + } + @Test public void should_create_map_codec_for_cql_and_java_types() { MapType cqlType = DataTypes.mapOf(DataTypes.INT, DataTypes.mapOf(DataTypes.INT, DataTypes.INT)); @@ -672,6 +701,26 @@ public void should_create_map_codec_for_java_value_when_first_element_is_a_subty .lookup(null, GenericType.mapOf(Inet4Address.class, Inet4Address.class), true); } + @Test + public void should_throw_for_map_codec_containing_null_element() { + should_throw_for_map_codec_containing_null_element("foo", null); + should_throw_for_map_codec_containing_null_element(null, "foo"); + should_throw_for_map_codec_containing_null_element(null, null); + } + + private void should_throw_for_map_codec_containing_null_element( + String firstKey, String firstValue) { + Map value = new HashMap<>(); + value.put(firstKey, firstValue); + + TestCachingCodecRegistry registry = new TestCachingCodecRegistry(mockCache); + assertThatThrownBy(() -> registry.codecFor(value)) + .isInstanceOf(IllegalArgumentException.class) + .hasMessage( + "Can't infer map codec because the first key and/or value is null " + + "(note that CQL does not allow null values in collections)"); + } + @Test public void should_create_tuple_codec_for_cql_and_java_types() { TupleType cqlType = DataTypes.tupleOf(DataTypes.INT, DataTypes.listOf(DataTypes.TEXT)); From c1b815c407cbe31ccb7aa8c6f7ea719bd5f3da76 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Wed, 23 Oct 2019 17:52:18 +0300 Subject: [PATCH 148/979] Return immediately from getReplicas() if token map not present --- .../BasicLoadBalancingPolicy.java | 18 +++++++------- ...BasicLoadBalancingPolicyQueryPlanTest.java | 24 ++++++++++--------- 2 files changed, 22 insertions(+), 20 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicy.java b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicy.java index 3b5b03815bc..ec2cb768f8a 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicy.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicy.java @@ -225,6 +225,11 @@ protected Set getReplicas(@Nullable Request request, @Nullable Session ses return Collections.emptySet(); } + Optional maybeTokenMap = context.getMetadataManager().getMetadata().getTokenMap(); + if (!maybeTokenMap.isPresent()) { + return Collections.emptySet(); + } + // Note: we're on the hot path and the getXxx methods are potentially more than simple getters, // so we only call each method when strictly necessary (which is why the code below looks a bit // weird). @@ -245,15 +250,10 @@ protected Set getReplicas(@Nullable Request request, @Nullable Session ses return Collections.emptySet(); } - Optional maybeTokenMap = context.getMetadataManager().getMetadata().getTokenMap(); - if (maybeTokenMap.isPresent()) { - TokenMap tokenMap = maybeTokenMap.get(); - return (token != null) - ? tokenMap.getReplicas(keyspace, token) - : tokenMap.getReplicas(keyspace, key); - } else { - return Collections.emptySet(); - } + TokenMap tokenMap = maybeTokenMap.get(); + return token != null + ? tokenMap.getReplicas(keyspace, token) + : tokenMap.getReplicas(keyspace, key); } /** Exposed as a protected method so that it can be accessed by tests */ diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyQueryPlanTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyQueryPlanTest.java index a1825820046..942b2138a33 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyQueryPlanTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyQueryPlanTest.java @@ -85,7 +85,8 @@ public void should_use_round_robin_when_no_request() { assertRoundRobinQueryPlans(); // Then - then(metadataManager).should(never()).getMetadata(); + then(tokenMap).should(never()).getReplicas(any(CqlIdentifier.class), any(Token.class)); + then(tokenMap).should(never()).getReplicas(any(CqlIdentifier.class), any(ByteBuffer.class)); } @Test @@ -99,7 +100,8 @@ public void should_use_round_robin_when_no_session() { // Then then(request).should(never()).getRoutingKey(); then(request).should(never()).getRoutingToken(); - then(metadataManager).should(never()).getMetadata(); + then(tokenMap).should(never()).getReplicas(any(CqlIdentifier.class), any(Token.class)); + then(tokenMap).should(never()).getReplicas(any(CqlIdentifier.class), any(ByteBuffer.class)); } @Test @@ -110,9 +112,10 @@ public void should_use_round_robin_when_request_has_no_routing_keyspace() { assertRoundRobinQueryPlans(); - verify(request, never()).getRoutingKey(); - verify(request, never()).getRoutingToken(); - verify(metadataManager, never()).getMetadata(); + then(request).should(never()).getRoutingKey(); + then(request).should(never()).getRoutingToken(); + then(tokenMap).should(never()).getReplicas(any(CqlIdentifier.class), any(Token.class)); + then(tokenMap).should(never()).getReplicas(any(CqlIdentifier.class), any(ByteBuffer.class)); } @Test @@ -123,19 +126,18 @@ public void should_use_round_robin_when_request_has_no_routing_key_or_token() { assertRoundRobinQueryPlans(); - verify(metadataManager, never()).getMetadata(); + then(tokenMap).should(never()).getReplicas(any(CqlIdentifier.class), any(Token.class)); + then(tokenMap).should(never()).getReplicas(any(CqlIdentifier.class), any(ByteBuffer.class)); } @Test public void should_use_round_robin_when_token_map_absent() { - when(request.getRoutingKeyspace()).thenReturn(KEYSPACE); - when(request.getRoutingKey()).thenReturn(ROUTING_KEY); - when(metadata.getTokenMap()).thenReturn(Optional.empty()); assertRoundRobinQueryPlans(); - verify(metadata, atLeast(1)).getTokenMap(); + then(tokenMap).should(never()).getReplicas(any(CqlIdentifier.class), any(Token.class)); + then(tokenMap).should(never()).getReplicas(any(CqlIdentifier.class), any(ByteBuffer.class)); } @Test @@ -147,7 +149,7 @@ public void should_use_round_robin_when_token_map_absent() { assertRoundRobinQueryPlans(); - verify(tokenMap, atLeast(1)).getReplicas(KEYSPACE, ROUTING_KEY); + then(tokenMap).should(atLeast(1)).getReplicas(KEYSPACE, ROUTING_KEY); } @Test From 2f5ab8f859c7083489a715272014c8292f84f7f7 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Wed, 23 Oct 2019 20:23:04 +0300 Subject: [PATCH 149/979] Protect against poorly-implemented Request instances in BasicLoadBalancingPolicy This is mainly intended for Simulacron tests since Simulacron does not always return accurate query metadata, but can also serve as a general defensive-style programming. --- .../BasicLoadBalancingPolicy.java | 35 ++++++++++++------- ...BasicLoadBalancingPolicyQueryPlanTest.java | 12 +++++++ 2 files changed, 34 insertions(+), 13 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicy.java b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicy.java index ec2cb768f8a..23b921a6eb9 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicy.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicy.java @@ -233,20 +233,29 @@ protected Set getReplicas(@Nullable Request request, @Nullable Session ses // Note: we're on the hot path and the getXxx methods are potentially more than simple getters, // so we only call each method when strictly necessary (which is why the code below looks a bit // weird). - CqlIdentifier keyspace = request.getKeyspace(); - if (keyspace == null) { - keyspace = request.getRoutingKeyspace(); - } - if (keyspace == null && session.getKeyspace().isPresent()) { - keyspace = session.getKeyspace().get(); - } - if (keyspace == null) { - return Collections.emptySet(); - } + CqlIdentifier keyspace = null; + Token token = null; + ByteBuffer key = null; + try { + keyspace = request.getKeyspace(); + if (keyspace == null) { + keyspace = request.getRoutingKeyspace(); + } + if (keyspace == null && session.getKeyspace().isPresent()) { + keyspace = session.getKeyspace().get(); + } + if (keyspace == null) { + return Collections.emptySet(); + } - Token token = request.getRoutingToken(); - ByteBuffer key = (token == null) ? request.getRoutingKey() : null; - if (token == null && key == null) { + token = request.getRoutingToken(); + key = (token == null) ? request.getRoutingKey() : null; + if (token == null && key == null) { + return Collections.emptySet(); + } + } catch (Exception e) { + // Protect against poorly-implemented Request instances + LOG.error("Unexpected error while trying to compute query plan", e); return Collections.emptySet(); } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyQueryPlanTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyQueryPlanTest.java index 942b2138a33..c4f1689319b 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyQueryPlanTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyQueryPlanTest.java @@ -181,6 +181,18 @@ public void should_use_round_robin_when_token_map_absent() { then(tokenMap).should(atLeast(1)).getReplicas(KEYSPACE, routingToken); } + @Test + public void should_round_robin_and_log_error_when_request_throws() { + // Given + given(request.getKeyspace()).willThrow(new NullPointerException()); + // When + policy.newQueryPlan(request, session); + // Then + verify(appender).doAppend(loggingEventCaptor.capture()); + assertThat(loggingEventCaptor.getValue().getFormattedMessage()) + .contains("Unexpected error while trying to compute query plan"); + } + private void assertRoundRobinQueryPlans() { for (int i = 0; i < 3; i++) { assertThat(policy.newQueryPlan(request, session)) From dec2af12f3c7e80d055316e51258e2583505c7bf Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 25 Oct 2019 11:13:20 +0300 Subject: [PATCH 150/979] JAVA-2460: Document how to determine the local DC (#1353) --- changelog/README.md | 1 + manual/core/load_balancing/README.md | 48 +++++++++++++++++++++++++--- 2 files changed, 45 insertions(+), 4 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 70b6d7966ee..c1e3080ee76 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.3.0 (in progress) +- [documentation]: JAVA-2460: Document how to determine the local DC - [improvement] JAVA-2476: Improve error message when codec registry inspects a collection with a null element - [documentation] JAVA-2509: Mention file-based approach for Cloud configuration in the manual diff --git a/manual/core/load_balancing/README.md b/manual/core/load_balancing/README.md index de4511fb8bc..ec3e48d93e7 100644 --- a/manual/core/load_balancing/README.md +++ b/manual/core/load_balancing/README.md @@ -5,8 +5,7 @@ Which nodes the driver talks to, and in which order they are tried. * `basic.load-balancing-policy` in the configuration. -* defaults to `DefaultLoadBalancingPolicy` (opinionated best practices). No out-of-the-box - alternatives, but you can write your own. +* defaults to `DefaultLoadBalancingPolicy` (opinionated best practices). * can have per-profile policies. ----- @@ -59,7 +58,7 @@ experience, this has proven to be too complicated: it's not obvious which policy a given use case, and nested policies can sometimes affect each other's effects in subtle and hard to predict ways. -In driver 4+, we are taking a more opinionated approach: we provide a single load balancing policy, +In driver 4+, we are taking a more opinionated approach: we provide a default load balancing policy, that we consider the best choice for most cases. You can still write a [custom implementation](#custom-implementation) if you have special requirements. @@ -127,6 +126,45 @@ that case, the driver will connect to 127.0.0.1:9042, and use that node's datace for a better out-of-the-box experience for users who have just downloaded the driver; beyond that initial development phase, you should provide explicit contact points and a local datacenter. +#### Finding the local datacenter + +To check which datacenters are defined in a given cluster, you can run [`nodetool status`]. It will +print information about each node in the cluster, grouped by datacenters. Here is an example: + +``` +$ nodetool status +Datacenter: DC1 +=============== +Status=Up/Down +|/ State=Normal/Leaving/Joining/Moving +-- Address Load Tokens Owns Host ID Rack +UN 1.5 TB 256 ? rack1 +UN 1.5 TB 256 ? rack2 +UN 1.5 TB 256 ? rack3 + +Datacenter: DC2 +=============== +Status=Up/Down +|/ State=Normal/Leaving/Joining/Moving +-- Address Load Tokens Owns Host ID Rack +UN 1.5 TB 256 ? rack1 +UN 1.5 TB 256 ? rack2 +UN 1.5 TB 256 ? rack3 +``` + +To find out which datacenter should be considered local, you need to first determine which nodes the +driver is going to be co-located with, then choose their datacenter as local. In case of doubt, you +can also use [cqlsh]; if cqlsh is co-located too in the same datacenter, simply run the command +below: + +``` +cqlsh> select data_center from system.local; + +data_center +------------- +DC1 +``` + #### Token-aware The default policy is **token-aware** by default: requests will be routed in priority to the @@ -289,4 +327,6 @@ Then it uses the "closest" distance for any given node. For example: [LoadBalancingPolicy]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/loadbalancing/LoadBalancingPolicy.html [getRoutingKeyspace()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKeyspace-- [getRoutingToken()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/Request.html#getRoutingToken-- -[getRoutingKey()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- \ No newline at end of file +[getRoutingKey()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- +[`nodetool status`]: https://docs.datastax.com/en/dse/6.7/dse-dev/datastax_enterprise/tools/nodetool/toolsStatus.html +[cqlsh]: https://docs.datastax.com/en/dse/6.7/cql/cql/cql_using/startCqlshStandalone.html From 206f02b4d8fad47fa2b46f7965d6b6e1bc85e293 Mon Sep 17 00:00:00 2001 From: Erik Merkle Date: Tue, 29 Oct 2019 12:29:38 -0500 Subject: [PATCH 151/979] Restore Jackson <2.10 API compatibility (partial revert of JAVA-2480) --- .../json/codecs/JacksonJsonCodec.java | 22 +++++++++---------- .../driver/querybuilder/JacksonJsonCodec.java | 18 +++++++-------- 2 files changed, 20 insertions(+), 20 deletions(-) diff --git a/examples/src/main/java/com/datastax/oss/driver/examples/json/codecs/JacksonJsonCodec.java b/examples/src/main/java/com/datastax/oss/driver/examples/json/codecs/JacksonJsonCodec.java index a9e4d2c8078..ea9c8808b38 100644 --- a/examples/src/main/java/com/datastax/oss/driver/examples/json/codecs/JacksonJsonCodec.java +++ b/examples/src/main/java/com/datastax/oss/driver/examples/json/codecs/JacksonJsonCodec.java @@ -24,7 +24,7 @@ import com.datastax.oss.protocol.internal.util.Bytes; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JavaType; -import com.fasterxml.jackson.databind.json.JsonMapper; +import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.type.TypeFactory; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; @@ -52,28 +52,28 @@ */ public class JacksonJsonCodec implements TypeCodec { - private final JsonMapper jsonMapper; + private final ObjectMapper objectMapper; private final GenericType javaType; /** * Creates a new instance for the provided {@code javaClass}, using a default, newly-allocated - * {@link JsonMapper}. + * {@link ObjectMapper}. * * @param javaClass the Java class this codec maps to. */ public JacksonJsonCodec(Class javaClass) { - this(javaClass, JsonMapper.builder().build()); + this(javaClass, new ObjectMapper()); } /** * Creates a new instance for the provided {@code javaClass}, and using the provided {@link - * JsonMapper}. + * ObjectMapper}. * * @param javaClass the Java class this codec maps to. */ - public JacksonJsonCodec(Class javaClass, JsonMapper jsonMapper) { + public JacksonJsonCodec(Class javaClass, ObjectMapper jsonMapper) { this.javaType = GenericType.of(javaClass); - this.jsonMapper = jsonMapper; + this.objectMapper = jsonMapper; } @NonNull @@ -95,7 +95,7 @@ public ByteBuffer encode(@Nullable T value, @NonNull ProtocolVersion protocolVer return null; } try { - return ByteBuffer.wrap(jsonMapper.writeValueAsBytes(value)); + return ByteBuffer.wrap(objectMapper.writeValueAsBytes(value)); } catch (JsonProcessingException e) { throw new IllegalArgumentException(e.getMessage(), e); } @@ -108,7 +108,7 @@ public T decode(@Nullable ByteBuffer bytes, @NonNull ProtocolVersion protocolVer return null; } try { - return jsonMapper.readValue(Bytes.getArray(bytes), toJacksonJavaType()); + return objectMapper.readValue(Bytes.getArray(bytes), toJacksonJavaType()); } catch (IOException e) { throw new IllegalArgumentException(e.getMessage(), e); } @@ -122,7 +122,7 @@ public String format(T value) { } String json; try { - json = jsonMapper.writeValueAsString(value); + json = objectMapper.writeValueAsString(value); } catch (IOException e) { throw new IllegalArgumentException(e.getMessage(), e); } @@ -141,7 +141,7 @@ public T parse(String value) { } String json = Strings.unquote(value); try { - return (T) jsonMapper.readValue(json, toJacksonJavaType()); + return (T) objectMapper.readValue(json, toJacksonJavaType()); } catch (IOException e) { throw new IllegalArgumentException(e.getMessage(), e); } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/querybuilder/JacksonJsonCodec.java b/integration-tests/src/test/java/com/datastax/oss/driver/querybuilder/JacksonJsonCodec.java index 64f8b6ddcb3..f6ab774dfd3 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/querybuilder/JacksonJsonCodec.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/querybuilder/JacksonJsonCodec.java @@ -24,7 +24,7 @@ import com.datastax.oss.protocol.internal.util.Bytes; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JavaType; -import com.fasterxml.jackson.databind.json.JsonMapper; +import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.type.TypeFactory; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; @@ -33,16 +33,16 @@ public class JacksonJsonCodec implements TypeCodec { - private final JsonMapper jsonMapper; + private final ObjectMapper objectMapper; private final GenericType javaType; JacksonJsonCodec(Class javaClass) { - this(javaClass, JsonMapper.builder().build()); + this(javaClass, new ObjectMapper()); } - private JacksonJsonCodec(Class javaClass, JsonMapper objectMapper) { + private JacksonJsonCodec(Class javaClass, ObjectMapper objectMapper) { this.javaType = GenericType.of(javaClass); - this.jsonMapper = objectMapper; + this.objectMapper = objectMapper; } @NonNull @@ -64,7 +64,7 @@ public ByteBuffer encode(@Nullable T value, @NonNull ProtocolVersion protocolVer return null; } try { - return ByteBuffer.wrap(jsonMapper.writeValueAsBytes(value)); + return ByteBuffer.wrap(objectMapper.writeValueAsBytes(value)); } catch (JsonProcessingException e) { throw new IllegalArgumentException(e.getMessage(), e); } @@ -77,7 +77,7 @@ public T decode(@Nullable ByteBuffer bytes, @NonNull ProtocolVersion protocolVer return null; } try { - return jsonMapper.readValue(Bytes.getArray(bytes), toJacksonJavaType()); + return objectMapper.readValue(Bytes.getArray(bytes), toJacksonJavaType()); } catch (IOException e) { throw new IllegalArgumentException(e.getMessage(), e); } @@ -91,7 +91,7 @@ public String format(T value) { } String json; try { - json = jsonMapper.writeValueAsString(value); + json = objectMapper.writeValueAsString(value); } catch (IOException e) { throw new IllegalArgumentException(e.getMessage(), e); } @@ -110,7 +110,7 @@ public T parse(String value) { } String json = Strings.unquote(value); try { - return (T) jsonMapper.readValue(json, toJacksonJavaType()); + return (T) objectMapper.readValue(json, toJacksonJavaType()); } catch (IOException e) { throw new IllegalArgumentException(e.getMessage(), e); } From 139aa1737beb762990d8eb5ab960bc2ede9b5261 Mon Sep 17 00:00:00 2001 From: olim7t Date: Thu, 24 Oct 2019 18:08:04 -0700 Subject: [PATCH 152/979] JAVA-2516: Enable hostname validation with Cloud --- changelog/README.md | 1 + .../core/ssl/SniSslEngineFactory.java | 36 +++++++++++++++++-- 2 files changed, 34 insertions(+), 3 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index c1e3080ee76..67b9bdb4229 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.3.0 (in progress) +- [improvement] JAVA-2516: Enable hostname validation with Cloud - [documentation]: JAVA-2460: Document how to determine the local DC - [improvement] JAVA-2476: Improve error message when codec registry inspects a collection with a null element diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/ssl/SniSslEngineFactory.java b/core/src/main/java/com/datastax/oss/driver/internal/core/ssl/SniSslEngineFactory.java index ed459d9aa14..e1e3b02f592 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/ssl/SniSslEngineFactory.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/ssl/SniSslEngineFactory.java @@ -21,6 +21,8 @@ import com.datastax.oss.driver.internal.core.metadata.SniEndPoint; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import edu.umd.cs.findbugs.annotations.NonNull; +import java.net.InetSocketAddress; +import java.util.concurrent.CopyOnWriteArrayList; import javax.net.ssl.SNIHostName; import javax.net.ssl.SSLContext; import javax.net.ssl.SSLEngine; @@ -28,7 +30,12 @@ public class SniSslEngineFactory implements SslEngineFactory { + // An offset that gets added to our "fake" ports (see below). We pick this value because it is the + // start of the ephemeral port range. + private static final int FAKE_PORT_OFFSET = 49152; + private final SSLContext sslContext; + private final CopyOnWriteArrayList fakePorts = new CopyOnWriteArrayList<>(); /** Builds a new instance from the driver configuration. */ public SniSslEngineFactory(SSLContext sslContext) { @@ -44,17 +51,40 @@ public SSLEngine newSslEngine(@NonNull EndPoint remoteEndpoint) { "Configuration error: can only use %s with SNI end points", this.getClass().getSimpleName())); } - SSLEngine engine; SniEndPoint sniEndPoint = (SniEndPoint) remoteEndpoint; + InetSocketAddress address = sniEndPoint.resolve(); + String sniServerName = sniEndPoint.getServerName(); - engine = sslContext.createSSLEngine(); + // When hostname verification is enabled (with setEndpointIdentificationAlgorithm), the SSL + // engine will try to match the server's certificate against the SNI host name; if that doesn't + // work, it will fall back to the "advisory peer host" passed to createSSLEngine. + // + // In our case, the first check will never succeed because our SNI host name is not the DNS name + // (we use the Cassandra host_id instead). So we *must* set the advisory peer information. + // + // However if we use the address as-is, this leads to another issue: the advisory peer + // information is also used to cache SSL sessions internally. All of our nodes share the same + // proxy address, so the JDK tries to reuse SSL sessions across nodes. But it doesn't update the + // SNI host name every time, so it ends up opening connections to the wrong node. + // + // To avoid that, we create a unique "fake" port for every node. We still get session reuse for + // a given node, but not across nodes. This is safe because the advisory port is only used for + // session caching. + SSLEngine engine = + sslContext.createSSLEngine(address.getHostName(), getFakePort(sniServerName)); engine.setUseClientMode(true); SSLParameters parameters = engine.getSSLParameters(); - parameters.setServerNames(ImmutableList.of(new SNIHostName(sniEndPoint.getServerName()))); + parameters.setServerNames(ImmutableList.of(new SNIHostName(sniServerName))); + parameters.setEndpointIdentificationAlgorithm("HTTPS"); engine.setSSLParameters(parameters); return engine; } + private int getFakePort(String sniServerName) { + fakePorts.addIfAbsent(sniServerName); + return FAKE_PORT_OFFSET + fakePorts.indexOf(sniServerName); + } + @Override public void close() { // nothing to do From 38d94c3b3dec68ba6ce2abb05416d2976499a989 Mon Sep 17 00:00:00 2001 From: olim7t Date: Thu, 24 Oct 2019 18:10:57 -0700 Subject: [PATCH 153/979] Remove obsolete comment --- .../oss/driver/internal/core/ssl/SniSslEngineFactory.java | 1 - 1 file changed, 1 deletion(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/ssl/SniSslEngineFactory.java b/core/src/main/java/com/datastax/oss/driver/internal/core/ssl/SniSslEngineFactory.java index e1e3b02f592..7e4c82aa705 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/ssl/SniSslEngineFactory.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/ssl/SniSslEngineFactory.java @@ -37,7 +37,6 @@ public class SniSslEngineFactory implements SslEngineFactory { private final SSLContext sslContext; private final CopyOnWriteArrayList fakePorts = new CopyOnWriteArrayList<>(); - /** Builds a new instance from the driver configuration. */ public SniSslEngineFactory(SSLContext sslContext) { this.sslContext = sslContext; } From 9eafb65aca719af60f709dc1afcd9d56897057f2 Mon Sep 17 00:00:00 2001 From: olim7t Date: Wed, 25 Sep 2019 17:25:45 -0700 Subject: [PATCH 154/979] JAVA-2464: Fix initial schema refresh when reconnect-on-init is enabled Motivation: When reconnect-on-init is enabled and the driver does not connect immediately, the schema metadata is empty. This is because the initial schema refresh is chained on ControlConnection.firstConnectionAttemptFuture. The goal was to allow the session to initialize without waiting for the control connection (if there is a custom topology monitor that uses another mechanism). But in the scenario above, firstConnectionAttemptFuture is failed. Modifications: Chain the schema refresh to ControlConnection.initFuture (complete, successful connection). Remove firstConnectionAttemptFuture. Result: The schema is present at startup. Note that if a custom topology monitor is in place, and it is the metadata manager that initializes the control connection, the first schema refresh (and therefore the whole session initialization) will have to wait for the control connection to successfully connect. All things considered, this is pretty logical. --- changelog/README.md | 1 + .../core/control/ControlConnection.java | 89 +++++++++---------- .../core/metadata/MetadataManager.java | 18 ++-- .../internal/core/session/DefaultSession.java | 7 +- .../datastax/oss/driver/core/ConnectIT.java | 14 ++- 5 files changed, 71 insertions(+), 58 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 67b9bdb4229..f1204d59d31 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.3.0 (in progress) +- [bug] JAVA-2464: Fix initial schema refresh when reconnect-on-init is enabled - [improvement] JAVA-2516: Enable hostname validation with Cloud - [documentation]: JAVA-2460: Document how to determine the local DC - [improvement] JAVA-2476: Improve error message when codec registry inspects a collection with a diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/control/ControlConnection.java b/core/src/main/java/com/datastax/oss/driver/internal/core/control/ControlConnection.java index 193121fc08b..5c310c9c72f 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/control/ControlConnection.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/control/ControlConnection.java @@ -102,9 +102,8 @@ public ControlConnection(InternalDriverContext context) { * @param listenToClusterEvents whether to register for TOPOLOGY_CHANGE and STATUS_CHANGE events. * If the control connection has already initialized with another value, this is ignored. * SCHEMA_CHANGE events are always registered. - * @param reconnectOnFailure whether to schedule a reconnection if the initial attempt fails (this - * does not affect the returned future, which always represent the outcome of the initial - * attempt only). + * @param reconnectOnFailure whether to schedule a reconnection if the initial attempt fails (if + * true, the returned future will only complete once the reconnection has succeeded). * @param useInitialReconnectionSchedule if no node can be reached, the type of reconnection * schedule to use. In other words, the value that will be passed to {@link * ReconnectionPolicy#newControlConnectionSchedule(boolean)}. Note that this parameter is only @@ -130,10 +129,6 @@ public boolean isInit() { return singleThreaded.initFuture.isDone(); } - public CompletionStage firstConnectionAttemptFuture() { - return singleThreaded.firstConnectionAttemptFuture; - } - /** * The channel currently used by this control connection. This is modified concurrently in the * event of a reconnection, so it may occasionally return a closed channel (clients should be @@ -246,7 +241,6 @@ private class SingleThreaded { private final InternalDriverContext context; private final DriverConfig config; private final CompletableFuture initFuture = new CompletableFuture<>(); - private final CompletableFuture firstConnectionAttemptFuture = new CompletableFuture<>(); private boolean initWasCalled; private final CompletableFuture closeFuture = new CompletableFuture<>(); private boolean closeWasCalled; @@ -308,7 +302,6 @@ private void init( null, () -> { initFuture.complete(null); - firstConnectionAttemptFuture.complete(null); }, error -> { if (isAuthFailure(error)) { @@ -332,7 +325,6 @@ private void init( } initFuture.completeExceptionally(error); } - firstConnectionAttemptFuture.completeExceptionally(error); }); } catch (Throwable t) { initFuture.completeExceptionally(t); @@ -462,44 +454,49 @@ private void connect( } private void onSuccessfulReconnect() { - // If reconnectOnFailure was true and we've never connected before, complete the future now, - // otherwise it's already complete and this is a no-op. - initFuture.complete(null); + // If reconnectOnFailure was true and we've never connected before, complete the future now to + // signal that the initialization is complete. + boolean isFirstConnection = initFuture.complete(null); - // Always perform a full refresh (we don't know how long we were disconnected) - context - .getMetadataManager() - .refreshNodes() - .whenComplete( - (result, error) -> { - if (error != null) { - LOG.debug("[{}] Error while refreshing node list", logPrefix, error); - } else { - try { - // A failed node list refresh at startup is not fatal, so this might be the - // first successful refresh; make sure the LBP gets initialized (this is a no-op - // if it was initialized already). - context.getLoadBalancingPolicyWrapper().init(); - context - .getMetadataManager() - .refreshSchema(null, false, true) - .whenComplete( - (metadata, schemaError) -> { - if (schemaError != null) { - Loggers.warnWithException( - LOG, - "[{}] Unexpected error while refreshing schema after a " - + "successful reconnection, keeping previous version", - logPrefix, - schemaError); - } - }); - } catch (Throwable t) { - Loggers.warnWithException( - LOG, "[{}] Unexpected error on control connection reconnect", logPrefix, t); + // Otherwise, perform a full refresh (we don't know how long we were disconnected) + if (!isFirstConnection) { + context + .getMetadataManager() + .refreshNodes() + .whenComplete( + (result, error) -> { + if (error != null) { + LOG.debug("[{}] Error while refreshing node list", logPrefix, error); + } else { + try { + // A failed node list refresh at startup is not fatal, so this might be the + // first successful refresh; make sure the LBP gets initialized (this is a + // no-op if it was initialized already). + context.getLoadBalancingPolicyWrapper().init(); + context + .getMetadataManager() + .refreshSchema(null, false, true) + .whenComplete( + (metadata, schemaError) -> { + if (schemaError != null) { + Loggers.warnWithException( + LOG, + "[{}] Unexpected error while refreshing schema after a " + + "successful reconnection, keeping previous version", + logPrefix, + schemaError); + } + }); + } catch (Throwable t) { + Loggers.warnWithException( + LOG, + "[{}] Unexpected error on control connection reconnect", + logPrefix, + t); + } } - } - }); + }); + } } private void onChannelClosed(DriverChannel channel, Node node) { diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/MetadataManager.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/MetadataManager.java index a066518ebab..0dc219fd058 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/MetadataManager.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/MetadataManager.java @@ -405,7 +405,7 @@ private void startSchemaRequest(CompletableFuture future) { if (currentSchemaRefresh == null) { currentSchemaRefresh = future; LOG.debug("[{}] Starting schema refresh", logPrefix); - maybeInitControlConnection() + initControlConnectionForSchema() .thenCompose(v -> context.getTopologyMonitor().checkSchemaAgreement()) // 1. Query system tables .thenCompose(b -> schemaQueriesFactory.newInstance(future).execute()) @@ -426,17 +426,17 @@ private void startSchemaRequest(CompletableFuture future) { } } - // The control connection may or may not have been initialized already by TopologyMonitor. - private CompletionStage maybeInitControlConnection() { + // To query schema tables, we need the control connection. + // Normally that the topology monitor has already initialized it to query node tables. But if a + // custom topology monitor is in place, it might not use the control connection at all. + private CompletionStage initControlConnectionForSchema() { if (firstSchemaRefreshFuture.isDone()) { - // Not the first schema refresh, so we know init was attempted already + // We tried to refresh the schema before, so we know we called init already. Don't call it + // again since that is cheaper. return firstSchemaRefreshFuture; } else { - controlConnection.init(false, true, false); - // The control connection might fail to connect and reattempt, but for the metadata refresh - // that led us here we only care about the first attempt (metadata is not vital, so if we - // can't get it right now it's OK to move on) - return controlConnection.firstConnectionAttemptFuture(); + // Trigger init (a no-op if the topology monitor already done so) + return controlConnection.init(false, true, false); } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/session/DefaultSession.java b/core/src/main/java/com/datastax/oss/driver/internal/core/session/DefaultSession.java index 3c7bde85e5b..5b636089c16 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/session/DefaultSession.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/session/DefaultSession.java @@ -377,8 +377,11 @@ private void afterInitialNodeListRefresh(CqlIdentifier keyspace) { currentVersion, bestVersion); context.getChannelFactory().setProtocolVersion(bestVersion); + + // If the control connection has already initialized, force a reconnect to use the new + // version. + // (note: it might not have initialized yet if there is a custom TopologyMonitor) ControlConnection controlConnection = context.getControlConnection(); - // Might not have initialized yet if there is a custom TopologyMonitor if (controlConnection.isInit()) { controlConnection.reconnectNow(); // Reconnection already triggers a full schema refresh @@ -394,7 +397,7 @@ private void afterInitialNodeListRefresh(CqlIdentifier keyspace) { if (error != null) { Loggers.warnWithException( LOG, - "[{}] Unexpected error while refreshing schema during intialization, " + "[{}] Unexpected error while refreshing schema during initialization, " + "keeping previous version", logPrefix, error); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/ConnectIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/ConnectIT.java index 887680153cb..380e1b25492 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/ConnectIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/ConnectIT.java @@ -16,11 +16,14 @@ package com.datastax.oss.driver.core; import static com.datastax.oss.driver.api.testinfra.utils.ConditionChecker.checkThat; +import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.rows; +import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.when; import static java.util.concurrent.TimeUnit.SECONDS; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; import com.datastax.oss.driver.api.core.AllNodesFailedException; +import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; @@ -63,6 +66,13 @@ public class ConnectIT { @Before public void setup() { SIMULACRON_RULE.cluster().acceptConnections(); + SIMULACRON_RULE + .cluster() + .prime( + // Absolute minimum for a working schema metadata (we just want to check that it gets + // loaded at startup). + when("SELECT * FROM system_schema.keyspaces") + .then(rows().row("keyspace_name", "system"))); } @Test @@ -107,7 +117,9 @@ public void should_wait_for_contact_points_if_reconnection_enabled() throws Exce SIMULACRON_RULE.cluster().acceptConnections(); // Then this doesn't throw - try (Session ignored = sessionFuture.get(30, TimeUnit.SECONDS)) {} + try (Session session = sessionFuture.get(30, TimeUnit.SECONDS)) { + assertThat(session.getMetadata().getKeyspaces()).containsKey(CqlIdentifier.fromCql("system")); + } } /** From 814add6b1370e9f01bc211cc795c771f462d3bb0 Mon Sep 17 00:00:00 2001 From: olim7t Date: Fri, 18 Oct 2019 10:53:25 -0700 Subject: [PATCH 155/979] JAVA-2497: Ensure nodes and exceptions are serializable --- changelog/README.md | 1 + .../oss/driver/api/core/DriverException.java | 4 +- .../datastax/oss/driver/api/core/Version.java | 5 +- .../core/metadata/DefaultEndPoint.java | 5 +- .../internal/core/metadata/DefaultNode.java | 8 +- integration-tests/pom.xml | 11 +- .../oss/driver/core/SerializationIT.java | 107 ++++++++++++++++++ 7 files changed, 135 insertions(+), 6 deletions(-) create mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/core/SerializationIT.java diff --git a/changelog/README.md b/changelog/README.md index f1204d59d31..f326c6d2e08 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.3.0 (in progress) +- [improvement] JAVA-2497: Ensure nodes and exceptions are serializable - [bug] JAVA-2464: Fix initial schema refresh when reconnect-on-init is enabled - [improvement] JAVA-2516: Enable hostname validation with Cloud - [documentation]: JAVA-2460: Document how to determine the local DC diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/DriverException.java b/core/src/main/java/com/datastax/oss/driver/api/core/DriverException.java index 07f79d6e341..bf0f65f1649 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/DriverException.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/DriverException.java @@ -39,7 +39,7 @@ */ public abstract class DriverException extends RuntimeException { - private volatile ExecutionInfo executionInfo; + private transient volatile ExecutionInfo executionInfo; protected DriverException( @Nullable String message, @@ -76,6 +76,8 @@ protected DriverException( * session call. For example, individual node errors stored in {@link * AllNodesFailedException#getErrors()} or {@link ExecutionInfo#getErrors()} do not contain their * own execution info, and therefore return null from this method. + * + *

      It will also be null if you serialize and deserialize an exception. */ public ExecutionInfo getExecutionInfo() { return executionInfo; diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/Version.java b/core/src/main/java/com/datastax/oss/driver/api/core/Version.java index f8585fb9123..8e69bcb6a1d 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/Version.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/Version.java @@ -17,6 +17,7 @@ import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; +import java.io.Serializable; import java.util.Arrays; import java.util.Collections; import java.util.List; @@ -36,7 +37,9 @@ * are ignored for sorting versions. */ @Immutable -public class Version implements Comparable { +public class Version implements Comparable, Serializable { + + private static final long serialVersionUID = 1; private static final String VERSION_REGEXP = "(\\d+)\\.(\\d+)(\\.\\d+)?(\\.\\d+)?([~\\-]\\w[.\\w]*(?:-\\w[.\\w]*)*)?(\\+[.\\w]+)?"; diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultEndPoint.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultEndPoint.java index d19470afe2f..d2c3d3f44d0 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultEndPoint.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultEndPoint.java @@ -17,10 +17,13 @@ import com.datastax.oss.driver.api.core.metadata.EndPoint; import edu.umd.cs.findbugs.annotations.NonNull; +import java.io.Serializable; import java.net.InetSocketAddress; import java.util.Objects; -public class DefaultEndPoint implements EndPoint { +public class DefaultEndPoint implements EndPoint, Serializable { + + private static final long serialVersionUID = 1; private final InetSocketAddress address; private final String metricPrefix; diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultNode.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultNode.java index 566107febf0..839a4a61231 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultNode.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultNode.java @@ -25,6 +25,7 @@ import com.datastax.oss.driver.internal.core.metrics.NoopNodeMetricUpdater; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; +import java.io.Serializable; import java.net.InetSocketAddress; import java.util.Collections; import java.util.Map; @@ -38,10 +39,13 @@ * from {@link MetadataManager}'s admin thread. */ @ThreadSafe -public class DefaultNode implements Node { +public class DefaultNode implements Node, Serializable { + + private static final long serialVersionUID = 1; private volatile EndPoint endPoint; - private volatile NodeMetricUpdater metricUpdater; + // A deserialized node is not attached to a session anymore, so we don't need to retain this + private transient volatile NodeMetricUpdater metricUpdater; volatile InetSocketAddress broadcastRpcAddress; volatile InetSocketAddress broadcastAddress; diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 0d2dcd5c5d7..b06462f7827 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -15,7 +15,9 @@ limitations under the License. --> - + 4.0.0 @@ -62,6 +64,13 @@ ${project.parent.version} test + + com.datastax.oss + java-driver-core + ${project.parent.version} + test-jar + test + com.github.spotbugs spotbugs-annotations diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/SerializationIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/SerializationIT.java new file mode 100644 index 00000000000..afaffc44a0b --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/SerializationIT.java @@ -0,0 +1,107 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.core; + +import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.rows; +import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.serverError; +import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.when; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.fail; + +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.cql.ColumnDefinition; +import com.datastax.oss.driver.api.core.cql.Row; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.servererrors.ServerError; +import com.datastax.oss.driver.api.core.type.DataTypes; +import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import com.datastax.oss.driver.api.testinfra.simulacron.SimulacronRule; +import com.datastax.oss.driver.internal.SerializationHelper; +import com.datastax.oss.simulacron.common.cluster.ClusterSpec; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +public class SerializationIT { + private static final SimulacronRule SIMULACRON_RULE = + new SimulacronRule(ClusterSpec.builder().withNodes(1)); + + private static final SessionRule SESSION_RULE = + SessionRule.builder(SIMULACRON_RULE).build(); + + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(SIMULACRON_RULE).around(SESSION_RULE); + + @Before + public void clear() { + SIMULACRON_RULE.cluster().clearPrimes(true); + } + + @Test + public void should_serialize_node() { + // Given + Node node = SESSION_RULE.session().getMetadata().getNodes().values().iterator().next(); + + // When + Node deserializedNode = SerializationHelper.serializeAndDeserialize(node); + + // Then + // verify a few fields, no need to be exhaustive + assertThat(deserializedNode.getHostId()).isEqualTo(node.getHostId()); + assertThat(deserializedNode.getEndPoint()).isEqualTo(node.getEndPoint()); + assertThat(deserializedNode.getCassandraVersion()).isEqualTo(node.getCassandraVersion()); + } + + @Test + public void should_serialize_driver_exception() { + // Given + SIMULACRON_RULE.cluster().prime(when("mock query").then(serverError("mock server error"))); + try { + SESSION_RULE.session().execute("mock query"); + fail("Expected a ServerError"); + } catch (ServerError error) { + assertThat(error.getExecutionInfo()).isNotNull(); + + // When + ServerError deserializedError = SerializationHelper.serializeAndDeserialize(error); + + // Then + assertThat(deserializedError.getMessage()).isEqualTo("mock server error"); + assertThat(deserializedError.getCoordinator().getEndPoint()) + .isEqualTo(error.getCoordinator().getEndPoint()); + assertThat(deserializedError.getExecutionInfo()).isNull(); // transient + } + } + + @Test + public void should_serialize_row() { + // Given + SIMULACRON_RULE + .cluster() + .prime(when("mock query").then(rows().row("t", "mock data").columnTypes("t", "varchar"))); + Row row = SESSION_RULE.session().execute("mock query").one(); + + // When + row = SerializationHelper.serializeAndDeserialize(row); + + // Then + ColumnDefinition columnDefinition = row.getColumnDefinitions().get("t"); + assertThat(columnDefinition.getType()).isEqualTo(DataTypes.TEXT); + assertThat(row.getString("t")).isEqualTo("mock data"); + } +} From c42b415279eefcda6d03a42a49f9240bc55e8228 Mon Sep 17 00:00:00 2001 From: olim7t Date: Wed, 30 Oct 2019 10:50:34 -0700 Subject: [PATCH 156/979] Update version in docs --- README.md | 4 +-- changelog/README.md | 2 +- manual/case_sensitivity/README.md | 10 +++--- manual/core/README.md | 26 +++++++------- manual/core/address_resolution/README.md | 2 +- manual/core/async/README.md | 2 +- manual/core/authentication/README.md | 10 +++--- manual/core/configuration/README.md | 20 +++++------ manual/core/control_connection/README.md | 2 +- manual/core/custom_codecs/README.md | 8 ++--- manual/core/detachable_types/README.md | 14 ++++---- manual/core/idempotence/README.md | 4 +-- manual/core/integration/README.md | 6 ++-- manual/core/load_balancing/README.md | 10 +++--- manual/core/metadata/README.md | 6 ++-- manual/core/metadata/node/README.md | 24 ++++++------- manual/core/metadata/schema/README.md | 14 ++++---- manual/core/metadata/token/README.md | 4 +-- manual/core/native_protocol/README.md | 6 ++-- manual/core/paging/README.md | 8 ++--- manual/core/performance/README.md | 10 +++--- manual/core/pooling/README.md | 2 +- manual/core/query_timestamps/README.md | 4 +-- manual/core/reconnection/README.md | 8 ++--- manual/core/request_tracker/README.md | 4 +-- manual/core/retries/README.md | 34 +++++++++---------- manual/core/speculative_execution/README.md | 2 +- manual/core/ssl/README.md | 4 +-- manual/core/statements/README.md | 8 ++--- manual/core/statements/batch/README.md | 6 ++-- .../statements/per_query_keyspace/README.md | 2 +- manual/core/statements/prepared/README.md | 8 ++--- manual/core/statements/simple/README.md | 6 ++-- manual/core/temporal_types/README.md | 8 ++--- manual/core/throttling/README.md | 6 ++-- manual/core/tracing/README.md | 12 +++---- manual/core/tuples/README.md | 4 +-- manual/core/udts/README.md | 4 +-- manual/mapper/daos/README.md | 8 ++--- manual/mapper/daos/delete/README.md | 14 ++++---- manual/mapper/daos/getentity/README.md | 16 ++++----- manual/mapper/daos/insert/README.md | 10 +++--- manual/mapper/daos/null_saving/README.md | 10 +++--- manual/mapper/daos/query/README.md | 16 ++++----- manual/mapper/daos/queryprovider/README.md | 16 ++++----- manual/mapper/daos/select/README.md | 24 ++++++------- manual/mapper/daos/setentity/README.md | 10 +++--- .../daos/statement_attributes/README.md | 2 +- manual/mapper/daos/update/README.md | 8 ++--- manual/mapper/entities/README.md | 34 +++++++++---------- manual/mapper/mapper/README.md | 10 +++--- manual/osgi/README.md | 2 +- manual/query_builder/README.md | 6 ++-- manual/query_builder/condition/README.md | 2 +- manual/query_builder/delete/README.md | 4 +-- manual/query_builder/insert/README.md | 2 +- manual/query_builder/relation/README.md | 4 +-- manual/query_builder/schema/README.md | 2 +- .../query_builder/schema/aggregate/README.md | 2 +- .../query_builder/schema/function/README.md | 2 +- manual/query_builder/schema/index/README.md | 2 +- .../query_builder/schema/keyspace/README.md | 2 +- .../schema/materialized_view/README.md | 4 +-- manual/query_builder/schema/table/README.md | 6 ++-- manual/query_builder/schema/type/README.md | 2 +- manual/query_builder/select/README.md | 4 +-- manual/query_builder/term/README.md | 4 +-- manual/query_builder/truncate/README.md | 2 +- manual/query_builder/update/README.md | 4 +-- 69 files changed, 274 insertions(+), 274 deletions(-) diff --git a/README.md b/README.md index b649c0b54cb..b223bc8451b 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@ *If you're reading this on github.com, please note that this is the readme for the development version and that some features described here might not yet have been released. You can find the documentation for latest version through [DataStax Docs] or via the release tags, e.g. -[4.2.0](https://github.com/datastax/java-driver/tree/4.2.0).* +[4.3.0](https://github.com/datastax/java-driver/tree/4.3.0).* A modern, feature-rich and highly tunable Java client library for [Apache Cassandra®] \(2.1+) and [DataStax Enterprise] \(4.7+), using exclusively Cassandra's binary protocol and Cassandra Query @@ -80,7 +80,7 @@ See the [upgrade guide](upgrade_guide/) for details. * [Changelog] * [FAQ] -[API docs]: https://docs.datastax.com/en/drivers/java/4.2 +[API docs]: https://docs.datastax.com/en/drivers/java/4.3 [JIRA]: https://datastax-oss.atlassian.net/browse/JAVA [Mailing list]: https://groups.google.com/a/lists.datastax.com/forum/#!forum/java-driver-user [@dsJavaDriver]: https://twitter.com/dsJavaDriver diff --git a/changelog/README.md b/changelog/README.md index f326c6d2e08..9cd58abae62 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -2,7 +2,7 @@ -### 4.3.0 (in progress) +### 4.3.0 - [improvement] JAVA-2497: Ensure nodes and exceptions are serializable - [bug] JAVA-2464: Fix initial schema refresh when reconnect-on-init is enabled diff --git a/manual/case_sensitivity/README.md b/manual/case_sensitivity/README.md index d1d5244b51e..3c7bf6d561f 100644 --- a/manual/case_sensitivity/README.md +++ b/manual/case_sensitivity/README.md @@ -106,11 +106,11 @@ For "consuming" methods, string overloads are also provided for convenience, for * in other cases, the string is always assumed to be in CQL form, and converted on the fly with `CqlIdentifier.fromCql`. -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/CqlIdentifier.html -[Row]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/Row.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/data/UdtValue.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/data/AccessibleByName.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/CqlIdentifier.html +[Row]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/Row.html +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/data/UdtValue.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/data/AccessibleByName.html ### Good practices diff --git a/manual/core/README.md b/manual/core/README.md index 25111876103..3915294e50a 100644 --- a/manual/core/README.md +++ b/manual/core/README.md @@ -310,18 +310,18 @@ for (ColumnDefinitions.Definition definition : row.getColumnDefinitions()) { } ``` -[CqlSession]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/CqlSession.html -[CqlSession#builder()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/CqlSession.html#builder-- -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/ResultSet.html -[Row]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/Row.html -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/CqlIdentifier.html -[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/data/AccessibleByName.html -[GenericType]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/type/reflect/GenericType.html -[CqlDuration]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/data/CqlDuration.html -[TupleValue]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/data/TupleValue.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/data/UdtValue.html -[SessionBuilder.addContactPoint()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoint-java.net.InetSocketAddress- -[SessionBuilder.addContactPoints()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoints-java.util.Collection- -[SessionBuilder.withLocalDatacenter()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withLocalDatacenter-java.lang.String- +[CqlSession]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/CqlSession.html +[CqlSession#builder()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/CqlSession.html#builder-- +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/ResultSet.html +[Row]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/Row.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/CqlIdentifier.html +[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/data/AccessibleByName.html +[GenericType]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/type/reflect/GenericType.html +[CqlDuration]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/data/CqlDuration.html +[TupleValue]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/data/TupleValue.html +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/data/UdtValue.html +[SessionBuilder.addContactPoint()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoint-java.net.InetSocketAddress- +[SessionBuilder.addContactPoints()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoints-java.util.Collection- +[SessionBuilder.withLocalDatacenter()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withLocalDatacenter-java.lang.String- [CASSANDRA-10145]: https://issues.apache.org/jira/browse/CASSANDRA-10145 \ No newline at end of file diff --git a/manual/core/address_resolution/README.md b/manual/core/address_resolution/README.md index 00c31bec714..af498372136 100644 --- a/manual/core/address_resolution/README.md +++ b/manual/core/address_resolution/README.md @@ -124,7 +124,7 @@ Cassandra node: domain name of the target instance. Then it performs a forward DNS lookup of the domain name; the EC2 DNS does the private/public switch automatically based on location). -[AddressTranslator]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/addresstranslation/AddressTranslator.html +[AddressTranslator]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/addresstranslation/AddressTranslator.html [cassandra.yaml]: https://docs.datastax.com/en/cassandra/3.x/cassandra/configuration/configCassandra_yaml.html [rpc_address]: https://docs.datastax.com/en/cassandra/3.x/cassandra/configuration/configCassandra_yaml.html?scroll=configCassandra_yaml__rpc_address diff --git a/manual/core/async/README.md b/manual/core/async/README.md index a2a86b42a57..22cb2d63434 100644 --- a/manual/core/async/README.md +++ b/manual/core/async/README.md @@ -203,4 +203,4 @@ documentation for more details and an example. [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html \ No newline at end of file +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html \ No newline at end of file diff --git a/manual/core/authentication/README.md b/manual/core/authentication/README.md index 3b162b901aa..ecc57a2612e 100644 --- a/manual/core/authentication/README.md +++ b/manual/core/authentication/README.md @@ -76,8 +76,8 @@ acceptable for you, consider writing your own [AuthProvider] implementation [SASL]: https://en.wikipedia.org/wiki/Simple_Authentication_and_Security_Layer -[AuthProvider]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/auth/AuthProvider.html -[DriverContext]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/context/DriverContext.html -[PlainTextAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderBase.html -[SessionBuilder.withAuthCredentials]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthCredentials-java.lang.String-java.lang.String- -[SessionBuilder.withAuthProvider]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthProvider-com.datastax.oss.driver.api.core.auth.AuthProvider- \ No newline at end of file +[AuthProvider]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/auth/AuthProvider.html +[DriverContext]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/context/DriverContext.html +[PlainTextAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderBase.html +[SessionBuilder.withAuthCredentials]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthCredentials-java.lang.String-java.lang.String- +[SessionBuilder.withAuthProvider]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthProvider-com.datastax.oss.driver.api.core.auth.AuthProvider- \ No newline at end of file diff --git a/manual/core/configuration/README.md b/manual/core/configuration/README.md index 983c6c4f2ac..2e3ac592de9 100644 --- a/manual/core/configuration/README.md +++ b/manual/core/configuration/README.md @@ -501,16 +501,16 @@ config.getDefaultProfile().getString(MyCustomOption.ADMIN_EMAIL); config.getDefaultProfile().getInt(MyCustomOption.AWESOMENESS_FACTOR); ``` -[DriverConfig]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/config/DriverConfig.html -[DriverExecutionProfile]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/config/DriverExecutionProfile.html -[DriverContext]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/context/DriverContext.html -[DriverOption]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/config/DriverOption.html -[DefaultDriverOption]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/config/DefaultDriverOption.html -[DriverConfigLoader]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html -[DriverConfigLoader.fromClasspath]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromClasspath-java.lang.String- -[DriverConfigLoader.fromFile]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromFile-java.io.File- -[DriverConfigLoader.fromUrl]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromUrl-java.net.URL- -[DriverConfigLoader.programmaticBuilder]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#programmaticBuilder-- +[DriverConfig]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/config/DriverConfig.html +[DriverExecutionProfile]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/config/DriverExecutionProfile.html +[DriverContext]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/context/DriverContext.html +[DriverOption]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/config/DriverOption.html +[DefaultDriverOption]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/config/DefaultDriverOption.html +[DriverConfigLoader]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html +[DriverConfigLoader.fromClasspath]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromClasspath-java.lang.String- +[DriverConfigLoader.fromFile]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromFile-java.io.File- +[DriverConfigLoader.fromUrl]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromUrl-java.net.URL- +[DriverConfigLoader.programmaticBuilder]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#programmaticBuilder-- [Typesafe Config]: https://github.com/typesafehub/config [config standard behavior]: https://github.com/typesafehub/config#standard-behavior diff --git a/manual/core/control_connection/README.md b/manual/core/control_connection/README.md index 2549ed66327..492beff9f94 100644 --- a/manual/core/control_connection/README.md +++ b/manual/core/control_connection/README.md @@ -23,4 +23,4 @@ There are a few options to fine tune the control connection behavior in the `advanced.control-connection` and `advanced.metadata` sections; see the [metadata](../metadata/) pages and the [reference configuration](../configuration/reference/) for all the details. -[Node.getOpenConnections]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- \ No newline at end of file +[Node.getOpenConnections]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- \ No newline at end of file diff --git a/manual/core/custom_codecs/README.md b/manual/core/custom_codecs/README.md index 447750d5b2d..827d6d1a134 100644 --- a/manual/core/custom_codecs/README.md +++ b/manual/core/custom_codecs/README.md @@ -255,7 +255,7 @@ private static String formatRow(Row row) { } ``` -[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html -[GenericType]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/type/reflect/GenericType.html -[TypeCodec]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html -[SessionBuilder.addTypeCodecs]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addTypeCodecs-com.datastax.oss.driver.api.core.type.codec.TypeCodec...- \ No newline at end of file +[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html +[GenericType]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/type/reflect/GenericType.html +[TypeCodec]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html +[SessionBuilder.addTypeCodecs]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addTypeCodecs-com.datastax.oss.driver.api.core.type.codec.TypeCodec...- \ No newline at end of file diff --git a/manual/core/detachable_types/README.md b/manual/core/detachable_types/README.md index 241846f50ab..f67ce8c2bb8 100644 --- a/manual/core/detachable_types/README.md +++ b/manual/core/detachable_types/README.md @@ -137,13 +137,13 @@ Even then, the defaults used by detached objects might be good enough for you: Otherwise, just make sure you reattach objects any time you deserialize them or create them from scratch. -[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html -[CodecRegistry#DEFAULT]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html#DEFAULT -[DataType]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/type/DataType.html -[Detachable]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/detach/Detachable.html -[Session]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/Session.html -[ColumnDefinition]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/ColumnDefinition.html -[Row]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/Row.html +[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html +[CodecRegistry#DEFAULT]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html#DEFAULT +[DataType]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/type/DataType.html +[Detachable]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/detach/Detachable.html +[Session]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/Session.html +[ColumnDefinition]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/ColumnDefinition.html +[Row]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/Row.html [Java serialization]: https://docs.oracle.com/javase/tutorial/jndi/objects/serial.html [protocol specifications]: https://github.com/datastax/native-protocol/tree/1.x/src/main/resources diff --git a/manual/core/idempotence/README.md b/manual/core/idempotence/README.md index a0fe424558b..f748bc07493 100644 --- a/manual/core/idempotence/README.md +++ b/manual/core/idempotence/README.md @@ -60,5 +60,5 @@ assert bs.isIdempotent(); The query builder tries to infer idempotence automatically; refer to [its manual](../../query_builder/idempotence/) for more details. -[Statement.setIdempotent]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/Statement.html#setIdempotent-java.lang.Boolean- -[StatementBuilder.setIdempotence]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/StatementBuilder.html#setIdempotence-java.lang.Boolean- +[Statement.setIdempotent]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/Statement.html#setIdempotent-java.lang.Boolean- +[StatementBuilder.setIdempotence]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/StatementBuilder.html#setIdempotence-java.lang.Boolean- diff --git a/manual/core/integration/README.md b/manual/core/integration/README.md index 4713b19cb31..e6dfe56713c 100644 --- a/manual/core/integration/README.md +++ b/manual/core/integration/README.md @@ -447,6 +447,6 @@ The remaining core driver dependencies are the only ones that are truly mandator [guava]: https://github.com/google/guava/issues/2721 [annotation processing]: https://docs.oracle.com/javase/8/docs/technotes/tools/windows/javac.html#sthref65 -[Session.getMetrics]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/Session.html#getMetrics-- -[SessionBuilder.addContactPoint]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoint-java.net.InetSocketAddress- -[Uuids]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/uuid/Uuids.html +[Session.getMetrics]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/Session.html#getMetrics-- +[SessionBuilder.addContactPoint]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoint-java.net.InetSocketAddress- +[Uuids]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/uuid/Uuids.html diff --git a/manual/core/load_balancing/README.md b/manual/core/load_balancing/README.md index ec3e48d93e7..bc904d4d143 100644 --- a/manual/core/load_balancing/README.md +++ b/manual/core/load_balancing/README.md @@ -323,10 +323,10 @@ Then it uses the "closest" distance for any given node. For example: * policy1 changes its suggestion to IGNORED. node1 is set to REMOTE; * policy1 changes its suggestion to REMOTE. node1 stays at REMOTE. -[DriverContext]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/context/DriverContext.html -[LoadBalancingPolicy]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/loadbalancing/LoadBalancingPolicy.html -[getRoutingKeyspace()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKeyspace-- -[getRoutingToken()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/Request.html#getRoutingToken-- -[getRoutingKey()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- +[DriverContext]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/context/DriverContext.html +[LoadBalancingPolicy]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/loadbalancing/LoadBalancingPolicy.html +[getRoutingKeyspace()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKeyspace-- +[getRoutingToken()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/Request.html#getRoutingToken-- +[getRoutingKey()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- [`nodetool status`]: https://docs.datastax.com/en/dse/6.7/dse-dev/datastax_enterprise/tools/nodetool/toolsStatus.html [cqlsh]: https://docs.datastax.com/en/dse/6.7/cql/cql/cql_using/startCqlshStandalone.html diff --git a/manual/core/metadata/README.md b/manual/core/metadata/README.md index 45ceb71229c..0270a3b5df2 100644 --- a/manual/core/metadata/README.md +++ b/manual/core/metadata/README.md @@ -56,6 +56,6 @@ new keyspace in the schema metadata before the token metadata was updated. Schema and node state events are debounced. This allows you to control how often the metadata gets refreshed. See the [Performance](../performance/#debouncing) page for more details. -[Session#getMetadata]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/Session.html#getMetadata-- -[Metadata]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/metadata/Metadata.html -[Node]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/metadata/Node.html \ No newline at end of file +[Session#getMetadata]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/Session.html#getMetadata-- +[Metadata]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/metadata/Metadata.html +[Node]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/metadata/Node.html \ No newline at end of file diff --git a/manual/core/metadata/node/README.md b/manual/core/metadata/node/README.md index 9b43bd2963b..50ae9c41ebd 100644 --- a/manual/core/metadata/node/README.md +++ b/manual/core/metadata/node/README.md @@ -103,15 +103,15 @@ beyond the scope of this document; if you're interested, study the `TopologyMoni the source code. -[Metadata#getNodes]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/metadata/Metadata.html#getNodes-- -[Node]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/metadata/Node.html -[Node#getState()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/metadata/Node.html#getState-- -[Node#getDatacenter()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/metadata/Node.html#getDatacenter-- -[Node#getRack()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/metadata/Node.html#getRack-- -[Node#getDistance()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/metadata/Node.html#getDistance-- -[Node#getOpenConnections()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- -[Node#isReconnecting()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/metadata/Node.html#isReconnecting-- -[NodeState]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/metadata/NodeState.html -[NodeStateListener]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/metadata/NodeStateListener.html -[NodeStateListenerBase]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/metadata/NodeStateListenerBase.html -[SessionBuilder.withNodeStateListener]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withNodeStateListener-com.datastax.oss.driver.api.core.metadata.NodeStateListener- \ No newline at end of file +[Metadata#getNodes]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/metadata/Metadata.html#getNodes-- +[Node]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/metadata/Node.html +[Node#getState()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/metadata/Node.html#getState-- +[Node#getDatacenter()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/metadata/Node.html#getDatacenter-- +[Node#getRack()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/metadata/Node.html#getRack-- +[Node#getDistance()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/metadata/Node.html#getDistance-- +[Node#getOpenConnections()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- +[Node#isReconnecting()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/metadata/Node.html#isReconnecting-- +[NodeState]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/metadata/NodeState.html +[NodeStateListener]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/metadata/NodeStateListener.html +[NodeStateListenerBase]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/metadata/NodeStateListenerBase.html +[SessionBuilder.withNodeStateListener]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withNodeStateListener-com.datastax.oss.driver.api.core.metadata.NodeStateListener- \ No newline at end of file diff --git a/manual/core/metadata/schema/README.md b/manual/core/metadata/schema/README.md index 7697ab17be4..753c21ba538 100644 --- a/manual/core/metadata/schema/README.md +++ b/manual/core/metadata/schema/README.md @@ -228,12 +228,12 @@ unavailable for the excluded keyspaces. If you issue schema-altering requests from the driver (e.g. `session.execute("CREATE TABLE ..")`), take a look at the [Performance](../../performance/#schema-updates) page for a few tips. -[Metadata#getKeyspaces]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/metadata/Metadata.html#getKeyspaces-- -[SchemaChangeListener]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListener.html -[SchemaChangeListenerBase]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListenerBase.html -[Session#setSchemaMetadataEnabled]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/Session.html#setSchemaMetadataEnabled-java.lang.Boolean- -[Session#checkSchemaAgreementAsync]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/Session.html#checkSchemaAgreementAsync-- -[SessionBuilder#withSchemaChangeListener]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSchemaChangeListener-com.datastax.oss.driver.api.core.metadata.schema.SchemaChangeListener- -[ExecutionInfo#isSchemaInAgreement]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#isSchemaInAgreement-- +[Metadata#getKeyspaces]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/metadata/Metadata.html#getKeyspaces-- +[SchemaChangeListener]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListener.html +[SchemaChangeListenerBase]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListenerBase.html +[Session#setSchemaMetadataEnabled]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/Session.html#setSchemaMetadataEnabled-java.lang.Boolean- +[Session#checkSchemaAgreementAsync]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/Session.html#checkSchemaAgreementAsync-- +[SessionBuilder#withSchemaChangeListener]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSchemaChangeListener-com.datastax.oss.driver.api.core.metadata.schema.SchemaChangeListener- +[ExecutionInfo#isSchemaInAgreement]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#isSchemaInAgreement-- [JAVA-750]: https://datastax-oss.atlassian.net/browse/JAVA-750 \ No newline at end of file diff --git a/manual/core/metadata/token/README.md b/manual/core/metadata/token/README.md index 2481472e7bd..6641ec6025f 100644 --- a/manual/core/metadata/token/README.md +++ b/manual/core/metadata/token/README.md @@ -169,5 +169,5 @@ on [schema metadata](../schema/). If schema metadata is disabled or filtered, to also be unavailable for the excluded keyspaces. -[Metadata#getTokenMap]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/metadata/Metadata.html#getTokenMap-- -[TokenMap]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/metadata/TokenMap.html \ No newline at end of file +[Metadata#getTokenMap]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/metadata/Metadata.html#getTokenMap-- +[TokenMap]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/metadata/TokenMap.html \ No newline at end of file diff --git a/manual/core/native_protocol/README.md b/manual/core/native_protocol/README.md index 01bcf7e261d..714d7f4b986 100644 --- a/manual/core/native_protocol/README.md +++ b/manual/core/native_protocol/README.md @@ -120,6 +120,6 @@ If you want to see the details of mixed cluster negotiation, enable `DEBUG` leve [protocol spec]: https://github.com/datastax/native-protocol/tree/1.x/src/main/resources [driver3]: https://docs.datastax.com/en/developer/java-driver/3.5/manual/native_protocol/ -[ExecutionInfo.getWarnings]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getWarnings-- -[Request.getCustomPayload]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/Request.html#getCustomPayload-- -[AttachmentPoint.getProtocolVersion]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/detach/AttachmentPoint.html#getProtocolVersion-- \ No newline at end of file +[ExecutionInfo.getWarnings]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getWarnings-- +[Request.getCustomPayload]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/Request.html#getCustomPayload-- +[AttachmentPoint.getProtocolVersion]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/detach/AttachmentPoint.html#getProtocolVersion-- \ No newline at end of file diff --git a/manual/core/paging/README.md b/manual/core/paging/README.md index c8cf5c53d03..bda8c311442 100644 --- a/manual/core/paging/README.md +++ b/manual/core/paging/README.md @@ -186,9 +186,9 @@ think you can get away with the performance hit. We recommend that you: The [driver examples] include two complete web service implementations demonstrating forward-only and random (offset-based) paging. -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/ResultSet.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html -[AsyncPagingIterable.hasMorePages]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/AsyncPagingIterable.html#hasMorePages-- -[AsyncPagingIterable.fetchNextPage]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/AsyncPagingIterable.html#fetchNextPage-- +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/ResultSet.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[AsyncPagingIterable.hasMorePages]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/AsyncPagingIterable.html#hasMorePages-- +[AsyncPagingIterable.fetchNextPage]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/AsyncPagingIterable.html#fetchNextPage-- [driver examples]: https://github.com/datastax/java-driver/tree/4.x/examples/src/main/java/com/datastax/oss/driver/examples/paging diff --git a/manual/core/performance/README.md b/manual/core/performance/README.md index 08e8980f4b5..97bd9711c45 100644 --- a/manual/core/performance/README.md +++ b/manual/core/performance/README.md @@ -345,8 +345,8 @@ possible to reuse the same event loop group for I/O, admin tasks, and even your (the driver's internal code is fully asynchronous so it will never block any thread). The timer is the only one that will have to stay on a separate thread. -[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/data/AccessibleByName.html -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/CqlIdentifier.html -[CqlSession.prepare(SimpleStatement)]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/CqlSession.html#prepare-com.datastax.oss.driver.api.core.cql.SimpleStatement- -[GenericType]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/type/reflect/GenericType.html -[Statement.setNode()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/Statement.html#setNode-com.datastax.oss.driver.api.core.metadata.Node- \ No newline at end of file +[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/data/AccessibleByName.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/CqlIdentifier.html +[CqlSession.prepare(SimpleStatement)]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/CqlSession.html#prepare-com.datastax.oss.driver.api.core.cql.SimpleStatement- +[GenericType]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/type/reflect/GenericType.html +[Statement.setNode()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/Statement.html#setNode-com.datastax.oss.driver.api.core.metadata.Node- \ No newline at end of file diff --git a/manual/core/pooling/README.md b/manual/core/pooling/README.md index 4db5d9afe87..664897fd6f2 100644 --- a/manual/core/pooling/README.md +++ b/manual/core/pooling/README.md @@ -158,5 +158,5 @@ you experience the issue, here's what to look out for: Try adding more connections per node. Thanks to the driver's hot-reload mechanism, you can do that at runtime and see the effects immediately. -[CqlSession]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/CqlSession.html +[CqlSession]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/CqlSession.html [CASSANDRA-8086]: https://issues.apache.org/jira/browse/CASSANDRA-8086 \ No newline at end of file diff --git a/manual/core/query_timestamps/README.md b/manual/core/query_timestamps/README.md index 12439dba4e8..6a5ef438d66 100644 --- a/manual/core/query_timestamps/README.md +++ b/manual/core/query_timestamps/README.md @@ -187,9 +187,9 @@ Here is the order of precedence of all the methods described so far: 3. otherwise, if the timestamp generator assigned a timestamp, use it; 4. otherwise, let the server assign the timestamp. -[TimestampGenerator]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/time/TimestampGenerator.html +[TimestampGenerator]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/time/TimestampGenerator.html [gettimeofday]: http://man7.org/linux/man-pages/man2/settimeofday.2.html [JNR]: https://github.com/jnr/jnr-ffi [Lightweight transactions]: https://docs.datastax.com/en/dse/6.0/cql/cql/cql_using/useInsertLWT.html -[Statement.setQueryTimestamp()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/Statement.html#setQueryTimestamp-long- \ No newline at end of file +[Statement.setQueryTimestamp()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/Statement.html#setQueryTimestamp-long- \ No newline at end of file diff --git a/manual/core/reconnection/README.md b/manual/core/reconnection/README.md index 1ac677a6ea8..811051f7806 100644 --- a/manual/core/reconnection/README.md +++ b/manual/core/reconnection/README.md @@ -66,7 +66,7 @@ is the exponential one with the default values, and the control connection is in * [t = 3] node2's pool tries to open the last missing connection, which succeeds. The pool is back to its expected size, node2's reconnection stops. -[ConstantReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/internal/core/connection/ConstantReconnectionPolicy.html -[DriverContext]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/context/DriverContext.html -[ExponentialReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/internal/core/connection/ExponentialReconnectionPolicy.html -[ReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/connection/ReconnectionPolicy.html \ No newline at end of file +[ConstantReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/internal/core/connection/ConstantReconnectionPolicy.html +[DriverContext]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/context/DriverContext.html +[ExponentialReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/internal/core/connection/ExponentialReconnectionPolicy.html +[ReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/connection/ReconnectionPolicy.html \ No newline at end of file diff --git a/manual/core/request_tracker/README.md b/manual/core/request_tracker/README.md index 23b29c154a5..023fe842557 100644 --- a/manual/core/request_tracker/README.md +++ b/manual/core/request_tracker/README.md @@ -117,5 +117,5 @@ all FROM users WHERE user_id=? [v0=42] com.datastax.oss.driver.api.core.servererrors.InvalidQueryException: Undefined column name all ``` -[RequestTracker]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/tracker/RequestTracker.html -[SessionBuilder.withRequestTracker]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withRequestTracker-com.datastax.oss.driver.api.core.tracker.RequestTracker- \ No newline at end of file +[RequestTracker]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/tracker/RequestTracker.html +[SessionBuilder.withRequestTracker]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withRequestTracker-com.datastax.oss.driver.api.core.tracker.RequestTracker- \ No newline at end of file diff --git a/manual/core/retries/README.md b/manual/core/retries/README.md index b8d3d034df0..025bfd584ff 100644 --- a/manual/core/retries/README.md +++ b/manual/core/retries/README.md @@ -174,20 +174,20 @@ configuration). Each request uses its declared profile's policy. If it doesn't declare any profile, or if the profile doesn't have a dedicated policy, then the default profile's policy is used. -[AllNodesFailedException]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/AllNodesFailedException.html -[ClosedConnectionException]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/connection/ClosedConnectionException.html -[DriverTimeoutException]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/DriverTimeoutException.html -[FunctionFailureException]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/servererrors/FunctionFailureException.html -[HeartbeatException]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/connection/HeartbeatException.html -[ProtocolError]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/servererrors/ProtocolError.html -[OverloadedException]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/servererrors/OverloadedException.html -[QueryValidationException]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/servererrors/QueryValidationException.html -[ReadFailureException]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/servererrors/ReadFailureException.html -[ReadTimeoutException]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/servererrors/ReadTimeoutException.html -[RetryDecision]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/retry/RetryDecision.html -[RetryPolicy]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/retry/RetryPolicy.html -[ServerError]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/servererrors/ServerError.html -[TruncateException]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/servererrors/TruncateException.html -[UnavailableException]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/servererrors/UnavailableException.html -[WriteFailureException]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/servererrors/WriteFailureException.html -[WriteTimeoutException]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/servererrors/WriteTimeoutException.html +[AllNodesFailedException]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/AllNodesFailedException.html +[ClosedConnectionException]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/connection/ClosedConnectionException.html +[DriverTimeoutException]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/DriverTimeoutException.html +[FunctionFailureException]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/servererrors/FunctionFailureException.html +[HeartbeatException]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/connection/HeartbeatException.html +[ProtocolError]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/servererrors/ProtocolError.html +[OverloadedException]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/servererrors/OverloadedException.html +[QueryValidationException]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/servererrors/QueryValidationException.html +[ReadFailureException]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/servererrors/ReadFailureException.html +[ReadTimeoutException]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/servererrors/ReadTimeoutException.html +[RetryDecision]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/retry/RetryDecision.html +[RetryPolicy]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/retry/RetryPolicy.html +[ServerError]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/servererrors/ServerError.html +[TruncateException]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/servererrors/TruncateException.html +[UnavailableException]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/servererrors/UnavailableException.html +[WriteFailureException]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/servererrors/WriteFailureException.html +[WriteTimeoutException]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/servererrors/WriteTimeoutException.html diff --git a/manual/core/speculative_execution/README.md b/manual/core/speculative_execution/README.md index c9382f96e7d..3605c0768fd 100644 --- a/manual/core/speculative_execution/README.md +++ b/manual/core/speculative_execution/README.md @@ -250,4 +250,4 @@ profiles have the same configuration). Each request uses its declared profile's policy. If it doesn't declare any profile, or if the profile doesn't have a dedicated policy, then the default profile's policy is used. -[SpeculativeExecutionPolicy]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/specex/SpeculativeExecutionPolicy.html \ No newline at end of file +[SpeculativeExecutionPolicy]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/specex/SpeculativeExecutionPolicy.html \ No newline at end of file diff --git a/manual/core/ssl/README.md b/manual/core/ssl/README.md index 105f74de563..2c0fab297b5 100644 --- a/manual/core/ssl/README.md +++ b/manual/core/ssl/README.md @@ -188,5 +188,5 @@ the box, but with a bit of custom development it is fairly easy to add. See [dsClientToNode]: https://docs.datastax.com/en/cassandra/3.0/cassandra/configuration/secureSSLClientToNode.html [pickle]: http://thelastpickle.com/blog/2015/09/30/hardening-cassandra-step-by-step-part-1-server-to-server.html [JSSE system properties]: http://docs.oracle.com/javase/6/docs/technotes/guides/security/jsse/JSSERefGuide.html#Customization -[SessionBuilder.withSslEngineFactory]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSslEngineFactory-com.datastax.oss.driver.api.core.ssl.SslEngineFactory- -[SessionBuilder.withSslContext]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSslContext-javax.net.ssl.SSLContext- +[SessionBuilder.withSslEngineFactory]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSslEngineFactory-com.datastax.oss.driver.api.core.ssl.SslEngineFactory- +[SessionBuilder.withSslContext]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSslContext-javax.net.ssl.SSLContext- diff --git a/manual/core/statements/README.md b/manual/core/statements/README.md index 9f55eddd59e..9108806d658 100644 --- a/manual/core/statements/README.md +++ b/manual/core/statements/README.md @@ -59,7 +59,7 @@ the [configuration](../configuration/). Namely, these are: idempotent flag, quer consistency levels and page size. We recommended the configuration approach whenever possible (you can create execution profiles to capture common combinations of those options). -[Statement]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/Statement.html -[StatementBuilder]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/StatementBuilder.html -[execute]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/Session.html#execute-com.datastax.oss.driver.api.core.cql.Statement- -[executeAsync]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/Session.html#executeAsync-com.datastax.oss.driver.api.core.cql.Statement- +[Statement]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/Statement.html +[StatementBuilder]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/StatementBuilder.html +[execute]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/Session.html#execute-com.datastax.oss.driver.api.core.cql.Statement- +[executeAsync]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/Session.html#executeAsync-com.datastax.oss.driver.api.core.cql.Statement- diff --git a/manual/core/statements/batch/README.md b/manual/core/statements/batch/README.md index 60208588a38..68ab112bbcc 100644 --- a/manual/core/statements/batch/README.md +++ b/manual/core/statements/batch/README.md @@ -61,8 +61,8 @@ In addition, simple statements with named parameters are currently not supported due to a [protocol limitation][CASSANDRA-10246] that will be fixed in a future version). If you try to execute such a batch, an `IllegalArgumentException` is thrown. -[BatchStatement]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/BatchStatement.html -[BatchStatement.newInstance()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/BatchStatement.html#newInstance-com.datastax.oss.driver.api.core.cql.BatchType- -[BatchStatement.builder()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/BatchStatement.html#builder-com.datastax.oss.driver.api.core.cql.BatchType- +[BatchStatement]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/BatchStatement.html +[BatchStatement.newInstance()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/BatchStatement.html#newInstance-com.datastax.oss.driver.api.core.cql.BatchType- +[BatchStatement.builder()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/BatchStatement.html#builder-com.datastax.oss.driver.api.core.cql.BatchType- [batch_dse]: http://docs.datastax.com/en/dse/6.7/cql/cql/cql_using/useBatch.html [CASSANDRA-10246]: https://issues.apache.org/jira/browse/CASSANDRA-10246 diff --git a/manual/core/statements/per_query_keyspace/README.md b/manual/core/statements/per_query_keyspace/README.md index c1ffa2582ef..9bc0810274c 100644 --- a/manual/core/statements/per_query_keyspace/README.md +++ b/manual/core/statements/per_query_keyspace/README.md @@ -124,6 +124,6 @@ SimpleStatement statement = At some point in the future, when Cassandra 4 becomes prevalent and using a per-query keyspace is the norm, we'll probably deprecate `setRoutingKeyspace()`. -[token-aware routing]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- +[token-aware routing]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- [CASSANDRA-10145]: https://issues.apache.org/jira/browse/CASSANDRA-10145 \ No newline at end of file diff --git a/manual/core/statements/prepared/README.md b/manual/core/statements/prepared/README.md index dd9e337bca7..f780051c6d7 100644 --- a/manual/core/statements/prepared/README.md +++ b/manual/core/statements/prepared/README.md @@ -330,10 +330,10 @@ With Cassandra 4 and [native protocol](../../native_protocol/) v5, this issue is new version with the response; the driver updates its local cache transparently, and the client can observe the new columns in the result set. -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[Session.prepare]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/CqlSession.html#prepare-com.datastax.oss.driver.api.core.cql.SimpleStatement- +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[Session.prepare]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/CqlSession.html#prepare-com.datastax.oss.driver.api.core.cql.SimpleStatement- [CASSANDRA-10786]: https://issues.apache.org/jira/browse/CASSANDRA-10786 [CASSANDRA-10813]: https://issues.apache.org/jira/browse/CASSANDRA-10813 [guava eviction]: https://github.com/google/guava/wiki/CachesExplained#reference-based-eviction -[PreparedStatement.bind]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/PreparedStatement.html#bind-java.lang.Object...- -[PreparedStatement.boundStatementBuilder]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/PreparedStatement.html#boundStatementBuilder-java.lang.Object...- +[PreparedStatement.bind]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/PreparedStatement.html#bind-java.lang.Object...- +[PreparedStatement.boundStatementBuilder]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/PreparedStatement.html#boundStatementBuilder-java.lang.Object...- diff --git a/manual/core/statements/simple/README.md b/manual/core/statements/simple/README.md index 7ca92408891..6250085117d 100644 --- a/manual/core/statements/simple/README.md +++ b/manual/core/statements/simple/README.md @@ -182,6 +182,6 @@ session.execute( Or you could also use [prepared statements](../prepared/), which don't have this limitation since parameter types are known in advance. -[SimpleStatement]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/SimpleStatement.html -[SimpleStatement.newInstance()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/SimpleStatement.html#newInstance-java.lang.String- -[SimpleStatement.builder()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/SimpleStatement.html#builder-java.lang.String- +[SimpleStatement]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/SimpleStatement.html +[SimpleStatement.newInstance()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/SimpleStatement.html#newInstance-java.lang.String- +[SimpleStatement.builder()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/SimpleStatement.html#builder-java.lang.String- diff --git a/manual/core/temporal_types/README.md b/manual/core/temporal_types/README.md index af8e95bb7c0..529d081f484 100644 --- a/manual/core/temporal_types/README.md +++ b/manual/core/temporal_types/README.md @@ -146,7 +146,7 @@ System.out.println(dateTime.minus(CqlDuration.from("1h15s15ns"))); // prints "2018-10-03T22:59:44.999999985-07:00[America/Los_Angeles]" ``` -[CqlDuration]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/data/CqlDuration.html -[TypeCodecs.ZONED_TIMESTAMP_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#ZONED_TIMESTAMP_SYSTEM -[TypeCodecs.ZONED_TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#ZONED_TIMESTAMP_UTC -[TypeCodecs.zonedTimestampAt()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#zonedTimestampAt-java.time.ZoneId- \ No newline at end of file +[CqlDuration]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/data/CqlDuration.html +[TypeCodecs.ZONED_TIMESTAMP_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#ZONED_TIMESTAMP_SYSTEM +[TypeCodecs.ZONED_TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#ZONED_TIMESTAMP_UTC +[TypeCodecs.zonedTimestampAt()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#zonedTimestampAt-java.time.ZoneId- \ No newline at end of file diff --git a/manual/core/throttling/README.md b/manual/core/throttling/README.md index f776cb46bdd..8268c00c4dd 100644 --- a/manual/core/throttling/README.md +++ b/manual/core/throttling/README.md @@ -145,6 +145,6 @@ datastax-java-driver { If you enable `throttling.delay`, make sure to also check the associated extra options to correctly size the underlying histograms (`metrics.session.throttling.delay.*`). -[RequestThrottlingException]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/RequestThrottlingException.html -[AllNodesFailedException]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/AllNodesFailedException.html -[BusyConnectionException]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/connection/BusyConnectionException.html \ No newline at end of file +[RequestThrottlingException]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/RequestThrottlingException.html +[AllNodesFailedException]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/AllNodesFailedException.html +[BusyConnectionException]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/connection/BusyConnectionException.html \ No newline at end of file diff --git a/manual/core/tracing/README.md b/manual/core/tracing/README.md index 6466e177276..ab3af6146e3 100644 --- a/manual/core/tracing/README.md +++ b/manual/core/tracing/README.md @@ -113,9 +113,9 @@ for (TraceEvent event : trace.getEvents()) { If you call `getQueryTrace()` for a statement that didn't have tracing enabled, an exception is thrown. -[ExecutionInfo]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html -[QueryTrace]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/QueryTrace.html -[Statement.setTracing()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/Statement.html#setTracing-boolean- -[StatementBuilder.setTracing()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/StatementBuilder.html#setTracing-- -[ExecutionInfo.getTracingId()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getTracingId-- -[ExecutionInfo.getQueryTrace()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getQueryTrace-- +[ExecutionInfo]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html +[QueryTrace]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/QueryTrace.html +[Statement.setTracing()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/Statement.html#setTracing-boolean- +[StatementBuilder.setTracing()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/StatementBuilder.html#setTracing-- +[ExecutionInfo.getTracingId()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getTracingId-- +[ExecutionInfo.getQueryTrace()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getQueryTrace-- diff --git a/manual/core/tuples/README.md b/manual/core/tuples/README.md index e5cc5564fdc..9600d9a05e7 100644 --- a/manual/core/tuples/README.md +++ b/manual/core/tuples/README.md @@ -139,5 +139,5 @@ BoundStatement bs = [cql_doc]: https://docs.datastax.com/en/cql/3.3/cql/cql_reference/tupleType.html -[TupleType]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/type/TupleType.html -[TupleValue]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/data/TupleValue.html +[TupleType]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/type/TupleType.html +[TupleValue]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/data/TupleValue.html diff --git a/manual/core/udts/README.md b/manual/core/udts/README.md index 26a2b38b661..d7245262778 100644 --- a/manual/core/udts/README.md +++ b/manual/core/udts/README.md @@ -135,5 +135,5 @@ session.execute(bs); [cql_doc]: https://docs.datastax.com/en/cql/3.3/cql/cql_reference/cqlRefUDType.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/data/UdtValue.html -[UserDefinedType]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/type/UserDefinedType.html \ No newline at end of file +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/data/UdtValue.html +[UserDefinedType]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/type/UserDefinedType.html \ No newline at end of file diff --git a/manual/mapper/daos/README.md b/manual/mapper/daos/README.md index 213dfdce0eb..f40e494a9f7 100644 --- a/manual/mapper/daos/README.md +++ b/manual/mapper/daos/README.md @@ -147,8 +147,8 @@ In this case, any annotations declared in `Dao1` would be chosen over `Dao2`. To control how the hierarchy is scanned, annotate interfaces with [@HierarchyScanStrategy]. -[@Dao]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/Dao.html -[@DaoFactory]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.html -[@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html -[@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html +[@Dao]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/Dao.html +[@DaoFactory]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.html +[@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html +[@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html [Entity Inheritance]: ../entities/#inheritance diff --git a/manual/mapper/daos/delete/README.md b/manual/mapper/daos/delete/README.md index 614b7a519c0..f92abbd3434 100644 --- a/manual/mapper/daos/delete/README.md +++ b/manual/mapper/daos/delete/README.md @@ -134,13 +134,13 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the [naming strategy](../../entities/#naming-strategy)). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html -[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html -[@Delete]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/Delete.html -[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/ResultSet.html -[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html +[@Delete]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/Delete.html +[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/ResultSet.html +[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html \ No newline at end of file diff --git a/manual/mapper/daos/getentity/README.md b/manual/mapper/daos/getentity/README.md index 06b87db4138..5629df431fd 100644 --- a/manual/mapper/daos/getentity/README.md +++ b/manual/mapper/daos/getentity/README.md @@ -69,14 +69,14 @@ If the return type doesn't match the parameter type (for example [PagingIterable [AsyncResultSet]), the mapper processor will issue a compile-time error. -[@GetEntity]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html -[GettableByName]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/data/GettableByName.html -[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/PagingIterable.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/ResultSet.html -[Row]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/Row.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/data/UdtValue.html +[@GetEntity]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[GettableByName]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/data/GettableByName.html +[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/PagingIterable.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/ResultSet.html +[Row]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/Row.html +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/data/UdtValue.html diff --git a/manual/mapper/daos/insert/README.md b/manual/mapper/daos/insert/README.md index a6d909a4a03..034086688fc 100644 --- a/manual/mapper/daos/insert/README.md +++ b/manual/mapper/daos/insert/README.md @@ -93,11 +93,11 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the [naming strategy](../../entities/#naming-strategy)). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@Insert]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/Insert.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/ResultSet.html -[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- -[ResultSet#getExecutionInfo()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/ResultSet.html#getExecutionInfo-- +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@Insert]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/Insert.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/ResultSet.html +[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- +[ResultSet#getExecutionInfo()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/ResultSet.html#getExecutionInfo-- diff --git a/manual/mapper/daos/null_saving/README.md b/manual/mapper/daos/null_saving/README.md index 008545d278e..1133490bf32 100644 --- a/manual/mapper/daos/null_saving/README.md +++ b/manual/mapper/daos/null_saving/README.md @@ -93,10 +93,10 @@ public interface UserDao extends InventoryDao { } ``` -[@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[MapperException]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/MapperException.html -[DO_NOT_SET]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#DO_NOT_SET -[SET_TO_NULL]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#SET_TO_NULL +[@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[MapperException]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/MapperException.html +[DO_NOT_SET]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#DO_NOT_SET +[SET_TO_NULL]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#SET_TO_NULL [CASSANDRA-7304]: https://issues.apache.org/jira/browse/CASSANDRA-7304 diff --git a/manual/mapper/daos/query/README.md b/manual/mapper/daos/query/README.md index ec1d4e4369b..9ce2f5e5178 100644 --- a/manual/mapper/daos/query/README.md +++ b/manual/mapper/daos/query/README.md @@ -103,14 +103,14 @@ Then: query succeeds or not depends on whether the session that the mapper was built with has a [default keyspace]. -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@Query]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/Query.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/ResultSet.html -[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- -[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/PagingIterable.html -[Row]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/Row.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@Query]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/Query.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/ResultSet.html +[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- +[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/PagingIterable.html +[Row]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/Row.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html diff --git a/manual/mapper/daos/queryprovider/README.md b/manual/mapper/daos/queryprovider/README.md index 12c1c7561f3..5eda5c18d48 100644 --- a/manual/mapper/daos/queryprovider/README.md +++ b/manual/mapper/daos/queryprovider/README.md @@ -137,11 +137,11 @@ Here is the full implementation: the desired [PagingIterable][PagingIterable]. -[@QueryProvider]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html -[providerClass]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerClass-- -[entityHelpers]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#entityHelpers-- -[providerMethod]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerMethod-- -[MapperContext]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/MapperContext.html -[EntityHelper]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/EntityHelper.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/ResultSet.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/PagingIterable.html +[@QueryProvider]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html +[providerClass]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerClass-- +[entityHelpers]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#entityHelpers-- +[providerMethod]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerMethod-- +[MapperContext]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/MapperContext.html +[EntityHelper]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/EntityHelper.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/ResultSet.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/PagingIterable.html diff --git a/manual/mapper/daos/select/README.md b/manual/mapper/daos/select/README.md index c41dcf41bf5..7c69ab4e7f0 100644 --- a/manual/mapper/daos/select/README.md +++ b/manual/mapper/daos/select/README.md @@ -133,18 +133,18 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the [naming strategy](../../entities/#naming-strategy)). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html -[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html -[@Select]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/Select.html -[allowFiltering()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/Select.html#allowFiltering-- -[customWhereClause()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/Select.html#customWhereClause-- -[groupBy()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/Select.html#groupBy-- -[limit()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/Select.html#limit-- -[orderBy()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/Select.html#orderBy-- -[perPartitionLimit()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/Select.html#perPartitionLimit-- -[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/PagingIterable.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html +[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html +[@Select]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/Select.html +[allowFiltering()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/Select.html#allowFiltering-- +[customWhereClause()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/Select.html#customWhereClause-- +[groupBy()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/Select.html#groupBy-- +[limit()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/Select.html#limit-- +[orderBy()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/Select.html#orderBy-- +[perPartitionLimit()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/Select.html#perPartitionLimit-- +[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/PagingIterable.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html diff --git a/manual/mapper/daos/setentity/README.md b/manual/mapper/daos/setentity/README.md index f714e8c7c3e..1a5aaecb1c0 100644 --- a/manual/mapper/daos/setentity/README.md +++ b/manual/mapper/daos/setentity/README.md @@ -63,8 +63,8 @@ BoundStatement bind(Product product, BoundStatement statement); If you use a void method with [BoundStatement], the mapper processor will issue a compile-time warning. -[@SetEntity]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/SetEntity.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[BoundStatementBuilder]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/BoundStatementBuilder.html -[SettableByName]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/data/SettableByName.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/data/UdtValue.html +[@SetEntity]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/SetEntity.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[BoundStatementBuilder]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/BoundStatementBuilder.html +[SettableByName]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/data/SettableByName.html +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/data/UdtValue.html diff --git a/manual/mapper/daos/statement_attributes/README.md b/manual/mapper/daos/statement_attributes/README.md index ab5a6160147..23d0f801c23 100644 --- a/manual/mapper/daos/statement_attributes/README.md +++ b/manual/mapper/daos/statement_attributes/README.md @@ -60,4 +60,4 @@ Product product = dao.findById(1, builder -> builder.setConsistencyLevel(DefaultConsistencyLevel.QUORUM)); ``` -[@StatementAttributes]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/StatementAttributes.html \ No newline at end of file +[@StatementAttributes]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/StatementAttributes.html \ No newline at end of file diff --git a/manual/mapper/daos/update/README.md b/manual/mapper/daos/update/README.md index 1af7fb2d4c3..77df48de95f 100644 --- a/manual/mapper/daos/update/README.md +++ b/manual/mapper/daos/update/README.md @@ -127,11 +127,11 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the naming convention). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@Update]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/Update.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@Update]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/Update.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html [Boolean]: https://docs.oracle.com/javase/8/docs/api/index.html?java/lang/Boolean.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/ResultSet.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/ResultSet.html diff --git a/manual/mapper/entities/README.md b/manual/mapper/entities/README.md index 15be2c1f346..f0186a8b310 100644 --- a/manual/mapper/entities/README.md +++ b/manual/mapper/entities/README.md @@ -468,21 +468,21 @@ the same level. To control how the class hierarchy is scanned, annotate classes with [@HierarchyScanStrategy]. -[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html -[@CqlName]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/CqlName.html -[@Dao]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/Dao.html -[@Entity]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/Entity.html -[NameConverter]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/entity/naming/NameConverter.html -[NamingConvention]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/entity/naming/NamingConvention.html -[@NamingStrategy]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/NamingStrategy.html -[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html -[@Computed]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/Computed.html -[@Select]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/Select.html -[@Insert]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/Insert.html -[@Update]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/Update.html -[@GetEntity]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html -[@Query]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/Query.html +[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html +[@CqlName]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/CqlName.html +[@Dao]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/Dao.html +[@Entity]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/Entity.html +[NameConverter]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/entity/naming/NameConverter.html +[NamingConvention]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/entity/naming/NamingConvention.html +[@NamingStrategy]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/NamingStrategy.html +[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html +[@Computed]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/Computed.html +[@Select]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/Select.html +[@Insert]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/Insert.html +[@Update]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/Update.html +[@GetEntity]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html +[@Query]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/Query.html [aliases]: http://cassandra.apache.org/doc/latest/cql/dml.html?#aliases -[@Transient]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/Transient.html -[@TransientProperties]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/TransientProperties.html -[@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html +[@Transient]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/Transient.html +[@TransientProperties]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/TransientProperties.html +[@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html diff --git a/manual/mapper/mapper/README.md b/manual/mapper/mapper/README.md index f230dff9b88..919073a94c2 100644 --- a/manual/mapper/mapper/README.md +++ b/manual/mapper/mapper/README.md @@ -159,8 +159,8 @@ ProductDao dao3 = inventoryMapper.productDao("keyspace3", "table3"); The DAO's keyspace and table can also be injected into custom query strings; see [Query methods](../daos/query/). -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/CqlIdentifier.html -[@DaoFactory]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.html -[@DaoKeyspace]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/DaoKeyspace.html -[@DaoTable]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/DaoTable.html -[@Mapper]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/mapper/annotations/Mapper.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/CqlIdentifier.html +[@DaoFactory]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.html +[@DaoKeyspace]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/DaoKeyspace.html +[@DaoTable]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/DaoTable.html +[@Mapper]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/Mapper.html diff --git a/manual/osgi/README.md b/manual/osgi/README.md index 98a8716d75f..bfe9841de2c 100644 --- a/manual/osgi/README.md +++ b/manual/osgi/README.md @@ -77,5 +77,5 @@ starting the driver: [driver configuration]: ../core/configuration [OSGi]:https://www.osgi.org [JNR]: https://github.com/jnr/jnr-ffi -[withClassLoader()]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withClassLoader-java.lang.ClassLoader- +[withClassLoader()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withClassLoader-java.lang.ClassLoader- [JAVA-1127]:https://datastax-oss.atlassian.net/browse/JAVA-1127 \ No newline at end of file diff --git a/manual/query_builder/README.md b/manual/query_builder/README.md index b4977fd6332..429acce5e28 100644 --- a/manual/query_builder/README.md +++ b/manual/query_builder/README.md @@ -184,6 +184,6 @@ For a complete tour of the API, browse the child pages in this manual: * [Terms](term/) * [Idempotence](idempotence/) -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/CqlIdentifier.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/CqlIdentifier.html diff --git a/manual/query_builder/condition/README.md b/manual/query_builder/condition/README.md index ce88cd1bdd7..0520992bc52 100644 --- a/manual/query_builder/condition/README.md +++ b/manual/query_builder/condition/README.md @@ -132,4 +132,4 @@ It is mutually exclusive with column conditions: if you previously specified col the statement, they will be ignored; conversely, adding a column condition cancels a previous IF EXISTS clause. -[Condition]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/querybuilder/condition/Condition.html +[Condition]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/querybuilder/condition/Condition.html diff --git a/manual/query_builder/delete/README.md b/manual/query_builder/delete/README.md index a952ef00a25..5cf061ecaed 100644 --- a/manual/query_builder/delete/README.md +++ b/manual/query_builder/delete/README.md @@ -141,5 +141,5 @@ deleteFrom("user") Conditions are a common feature used by UPDATE and DELETE, so they have a [dedicated page](../condition) in this manual. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[Selector]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/querybuilder/select/Selector.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[Selector]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/querybuilder/select/Selector.html diff --git a/manual/query_builder/insert/README.md b/manual/query_builder/insert/README.md index 2db2ef18462..f4d2ab8121f 100644 --- a/manual/query_builder/insert/README.md +++ b/manual/query_builder/insert/README.md @@ -114,4 +114,4 @@ is executed. This is distinctly different than setting the value to null. Passin this method will only remove the USING TTL clause from the query, which will not alter the TTL (if one is set) in Cassandra. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html \ No newline at end of file +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html \ No newline at end of file diff --git a/manual/query_builder/relation/README.md b/manual/query_builder/relation/README.md index 06581110e20..99fe7516fc3 100644 --- a/manual/query_builder/relation/README.md +++ b/manual/query_builder/relation/README.md @@ -201,5 +201,5 @@ This should be used with caution, as it's possible to generate invalid CQL that execution time; on the other hand, it can be used as a workaround to handle new CQL features that are not yet covered by the query builder. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[Relation]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/querybuilder/relation/Relation.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[Relation]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/querybuilder/relation/Relation.html diff --git a/manual/query_builder/schema/README.md b/manual/query_builder/schema/README.md index a5bd0234528..b33eba70d23 100644 --- a/manual/query_builder/schema/README.md +++ b/manual/query_builder/schema/README.md @@ -44,4 +44,4 @@ element type: * [function](function/) * [aggregate](aggregate/) -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/aggregate/README.md b/manual/query_builder/schema/aggregate/README.md index 2ec960ea136..a7b4a8f1885 100644 --- a/manual/query_builder/schema/aggregate/README.md +++ b/manual/query_builder/schema/aggregate/README.md @@ -76,4 +76,4 @@ dropAggregate("average").ifExists(); // DROP AGGREGATE IF EXISTS average ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/function/README.md b/manual/query_builder/schema/function/README.md index a55de4a2366..2e72b321b73 100644 --- a/manual/query_builder/schema/function/README.md +++ b/manual/query_builder/schema/function/README.md @@ -92,4 +92,4 @@ dropFunction("log").ifExists(); // DROP FUNCTION IF EXISTS log ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/index/README.md b/manual/query_builder/schema/index/README.md index fabd578de88..0321bee22a6 100644 --- a/manual/query_builder/schema/index/README.md +++ b/manual/query_builder/schema/index/README.md @@ -99,4 +99,4 @@ dropIndex("my_idx").ifExists(); // DROP INDEX IF EXISTS my_idx ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/keyspace/README.md b/manual/query_builder/schema/keyspace/README.md index 1a96cc3643b..59964b88f62 100644 --- a/manual/query_builder/schema/keyspace/README.md +++ b/manual/query_builder/schema/keyspace/README.md @@ -83,6 +83,6 @@ dropKeyspace("cycling").ifExists(); // DROP KEYSPACE IF EXISTS cycling ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/materialized_view/README.md b/manual/query_builder/schema/materialized_view/README.md index c88acc11822..18702c55ce5 100644 --- a/manual/query_builder/schema/materialized_view/README.md +++ b/manual/query_builder/schema/materialized_view/README.md @@ -85,5 +85,5 @@ dropTable("cyclist_by_age").ifExists(); // DROP MATERIALIZED VIEW IF EXISTS cyclist_by_age ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html -[RelationStructure]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/querybuilder/schema/RelationStructure.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[RelationStructure]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/querybuilder/schema/RelationStructure.html diff --git a/manual/query_builder/schema/table/README.md b/manual/query_builder/schema/table/README.md index 6113aefedf4..68c97822c36 100644 --- a/manual/query_builder/schema/table/README.md +++ b/manual/query_builder/schema/table/README.md @@ -107,6 +107,6 @@ dropTable("cyclist_name").ifExists(); // DROP TABLE IF EXISTS cyclist_name ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html -[CreateTableWithOptions]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/querybuilder/schema/CreateTableWithOptions.html -[AlterTableWithOptions]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/querybuilder/schema/AlterTableWithOptions.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[CreateTableWithOptions]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/querybuilder/schema/CreateTableWithOptions.html +[AlterTableWithOptions]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/querybuilder/schema/AlterTableWithOptions.html diff --git a/manual/query_builder/schema/type/README.md b/manual/query_builder/schema/type/README.md index 4ee1bf1a8f2..52d98566b26 100644 --- a/manual/query_builder/schema/type/README.md +++ b/manual/query_builder/schema/type/README.md @@ -88,4 +88,4 @@ dropTable("address").ifExists(); // DROP TYPE IF EXISTS address ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/select/README.md b/manual/query_builder/select/README.md index d0c77fa048b..e98da7d7560 100644 --- a/manual/query_builder/select/README.md +++ b/manual/query_builder/select/README.md @@ -391,5 +391,5 @@ selectFrom("user").all().allowFiltering(); // SELECT * FROM user ALLOW FILTERING ``` -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[Selector]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/querybuilder/select/Selector.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[Selector]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/querybuilder/select/Selector.html diff --git a/manual/query_builder/term/README.md b/manual/query_builder/term/README.md index 017f0d8eea8..6d30f0344e6 100644 --- a/manual/query_builder/term/README.md +++ b/manual/query_builder/term/README.md @@ -105,5 +105,5 @@ This should be used with caution, as it's possible to generate invalid CQL that execution time; on the other hand, it can be used as a workaround to handle new CQL features that are not yet covered by the query builder. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html \ No newline at end of file +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html \ No newline at end of file diff --git a/manual/query_builder/truncate/README.md b/manual/query_builder/truncate/README.md index f4ed51babb5..e89bf37e888 100644 --- a/manual/query_builder/truncate/README.md +++ b/manual/query_builder/truncate/README.md @@ -17,4 +17,4 @@ Truncate truncate2 = truncate(CqlIdentifier.fromCql("mytable")); Note that, at this stage, the query is ready to build. After creating a TRUNCATE query it does not take any values. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html diff --git a/manual/query_builder/update/README.md b/manual/query_builder/update/README.md index 8761b0dc239..f846e5d00ff 100644 --- a/manual/query_builder/update/README.md +++ b/manual/query_builder/update/README.md @@ -251,5 +251,5 @@ update("foo") Conditions are a common feature used by UPDATE and DELETE, so they have a [dedicated page](../condition) in this manual. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[Assignment]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/querybuilder/update/Assignment.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[Assignment]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/querybuilder/update/Assignment.html From 4af0061baabe1bcc03a9a6eea0028c12a6bd2e88 Mon Sep 17 00:00:00 2001 From: olim7t Date: Wed, 30 Oct 2019 10:55:22 -0700 Subject: [PATCH 157/979] [maven-release-plugin] prepare release 4.3.0 --- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 6 ++---- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 10 files changed, 12 insertions(+), 14 deletions(-) diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index fd7291d4be2..818353d6ebc 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -22,7 +22,7 @@ com.datastax.oss java-driver-parent - 4.3.0-SNAPSHOT + 4.3.0 java-driver-core-shaded diff --git a/core/pom.xml b/core/pom.xml index 9858f504f88..0ce47ac8713 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.3.0-SNAPSHOT + 4.3.0 java-driver-core diff --git a/distribution/pom.xml b/distribution/pom.xml index 14426961fcf..3de2caee6ad 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.3.0-SNAPSHOT + 4.3.0 java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index ce01e7e506e..ec0a806770e 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.3.0-SNAPSHOT + 4.3.0 java-driver-examples diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index b06462f7827..aa62022f25f 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -15,15 +15,13 @@ limitations under the License. --> - + 4.0.0 com.datastax.oss java-driver-parent - 4.3.0-SNAPSHOT + 4.3.0 java-driver-integration-tests diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index eede75e8eee..b97cc53631e 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.3.0-SNAPSHOT + 4.3.0 java-driver-mapper-processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index 39e670ee09c..4b2faf05799 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.3.0-SNAPSHOT + 4.3.0 java-driver-mapper-runtime diff --git a/pom.xml b/pom.xml index 7deb0527a11..e8abe634f75 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ com.datastax.oss java-driver-parent - 4.3.0-SNAPSHOT + 4.3.0 pom DataStax Java driver for Apache Cassandra(R) @@ -718,7 +718,7 @@ limitations under the License.]]> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - HEAD + 4.3.0 diff --git a/query-builder/pom.xml b/query-builder/pom.xml index c3f78d2c6d7..d173cbd25c2 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.3.0-SNAPSHOT + 4.3.0 java-driver-query-builder diff --git a/test-infra/pom.xml b/test-infra/pom.xml index 08ca57cde35..e2628e89275 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.3.0-SNAPSHOT + 4.3.0 java-driver-test-infra From a8a1691bcff84efefa94192a644a5b005cf4ae92 Mon Sep 17 00:00:00 2001 From: olim7t Date: Wed, 30 Oct 2019 10:57:10 -0700 Subject: [PATCH 158/979] [maven-release-plugin] prepare for next development iteration --- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 10 files changed, 11 insertions(+), 11 deletions(-) diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 818353d6ebc..23d7cd6659a 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -22,7 +22,7 @@ com.datastax.oss java-driver-parent - 4.3.0 + 4.3.1-SNAPSHOT java-driver-core-shaded diff --git a/core/pom.xml b/core/pom.xml index 0ce47ac8713..9d90999f93c 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.3.0 + 4.3.1-SNAPSHOT java-driver-core diff --git a/distribution/pom.xml b/distribution/pom.xml index 3de2caee6ad..4c686663e70 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.3.0 + 4.3.1-SNAPSHOT java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index ec0a806770e..9cf01c77dcd 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.3.0 + 4.3.1-SNAPSHOT java-driver-examples diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index aa62022f25f..506e84daf67 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.3.0 + 4.3.1-SNAPSHOT java-driver-integration-tests diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index b97cc53631e..564b27e0b0f 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.3.0 + 4.3.1-SNAPSHOT java-driver-mapper-processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index 4b2faf05799..298cfd52f61 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.3.0 + 4.3.1-SNAPSHOT java-driver-mapper-runtime diff --git a/pom.xml b/pom.xml index e8abe634f75..cec332f0090 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ com.datastax.oss java-driver-parent - 4.3.0 + 4.3.1-SNAPSHOT pom DataStax Java driver for Apache Cassandra(R) @@ -718,7 +718,7 @@ limitations under the License.]]> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - 4.3.0 + HEAD diff --git a/query-builder/pom.xml b/query-builder/pom.xml index d173cbd25c2..941ac702c40 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.3.0 + 4.3.1-SNAPSHOT java-driver-query-builder diff --git a/test-infra/pom.xml b/test-infra/pom.xml index e2628e89275..13f7b910a04 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.3.0 + 4.3.1-SNAPSHOT java-driver-test-infra From f1a7b0c02bcde8d205e9c364f0991149b459f42a Mon Sep 17 00:00:00 2001 From: olim7t Date: Wed, 30 Oct 2019 17:32:40 -0700 Subject: [PATCH 159/979] Revisit cloud manual page Simplify structure, fix formatting. Remove DSE content (should be moved to the DSE manual). --- manual/cloud/README.md | 243 +++++++++++++++++------------------------ 1 file changed, 99 insertions(+), 144 deletions(-) diff --git a/manual/cloud/README.md b/manual/cloud/README.md index 8324de5c6eb..e006e13deb3 100644 --- a/manual/cloud/README.md +++ b/manual/cloud/README.md @@ -2,171 +2,126 @@ Using the DataStax Java Driver to connect to a DataStax Apollo database is almost identical to using the driver to connect to any normal Apache Cassandra® database. The only differences are in how the -driver is configured in an application and that you will need to obtain a `secure connect bundle`. - -The following is a Quick Start guide to writing a simple application that can connect to an Apollo -database. - - **Tip**: DataStax recommends using the DataStax Java Driver for Apache Cassandra. You can also - use the DataStax Enterprise (DSE) Java Driver, which exposes the same API for connecting to - Cassandra databases. +driver is configured in an application and that you will need to obtain a *secure connect bundle*. ### Prerequisites 1. [Download][Download Maven] and [install][Install Maven] Maven. -1. Create an Apollo database on [GCP][Create an Apollo database - GCP] or +2. Create an Apollo database on [GCP][Create an Apollo database - GCP] or [AWS][Create an Apollo database - AWS]; alternatively, have a team member provide access to their Apollo database (instructions for [GCP][Access an Apollo database - GCP] and [AWS][Access an Apollo database - AWS]) to obtain database connection details. -1. Download the secure connect bundle (instructions for +3. Download the secure connect bundle (instructions for [GCP][Download the secure connect bundle - GCP] and - [AWS][Download the secure connect bundle - AWS]) to obtain connection credentials for your - database. + [AWS][Download the secure connect bundle - AWS]), that contains connection information such as + contact points and certificates. ### Procedure -1. Edit the `pom.xml` file at the root of your and according to this [Example pom.xml file]. +Create a minimal project structure as explained [here][minimal project structure]. Then modify +`Main.java` using one of the following approaches: + +#### Programmatic configuration + +You can pass the connection information directly to `CqlSession.builder()`: + +```java +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.cql.ResultSet; +import com.datastax.oss.driver.api.core.cql.Row; +import java.nio.file.Paths; + +public class Main { + + public static void main(String[] args) { + try (CqlSession session = CqlSession.builder() + // make sure you change the path to the secure connect bundle below + .withCloudSecureConnectBundle(Paths.get("/path/to/secure-connect-database_name.zip")) + .withAuthCredentials("user_name","password") + .withKeyspace("keyspace_name") + .build()) { + + // For the sake of example, run a simple query and print the results + ResultSet rs = session.execute("select release_version from system.local"); + Row row = rs.one(); + if (row != null) { + System.out.println(row.getString("release_version")); + } else { + System.out.println("An error occurred."); + } + } + } + } +``` -1. Initialize the DataStax Java Driver. +The path to the secure connect bundle for your Apollo database is specified with +`withCloudSecureConnectBundle()`. The authentication credentials must be specified separately with +`withAuthCredentials()`, and match the username and password that were configured when creating the +Apollo database. - a. Create a `ConnectDatabase.java` file in the `/src/main/java` directory for your Java project. +Note the following: - ```sh - $ cd javaProject/src/main/java - ``` - ```sh - $ touch ConnectDatabase.java - ``` +* an SSL connection will be established automatically. Manual SSL configuration is not allowed, any + settings in the driver configuration (`advanced.ssl-engine-factory`) will be ignored; +* the secure connect bundle contains all of the necessary contact information. Specifying contact + points manually is not allowed, and will result in an error; +* if the driver configuration does not specify an explicit consistency level, it will default to + `LOCAL_QUORUM` (instead of `LOCAL_ONE` when connecting to a normal Cassandra database). - b. **Programmatic configuration**. Copy the following code for your DataStax Driver into the - `ConnectDatabase.java` file. The following example implements a `ConnectDatabase` class to - connect to your Apollo database, runs a CQL query, and prints the output to the console. +#### File-based configuration - **Note:** With the `CqlSession.builder()` object, make sure to set the path to the secure - connect bundle for your Apollo database in the `withCloudSecureConnectBundle()` method as - shown in the following example. If converting from using the open source Cassandra Java Driver - to the DSE Java Driver, ensure that you change `CqlSession` to `DseSession`. +Alternatively, the connection information can be specified in the driver's configuration file +(`application.conf`). Merge the following options with any content already present: - * DataStax Java Driver for Apache Cassandra 4.x (recommended) - - ```java - import com.datastax.oss.driver.api.core.CqlSession; - import com.datastax.oss.driver.api.core.cql.ResultSet; - import com.datastax.oss.driver.api.core.cql.Row; - import java.nio.file.Paths; - - public class ConnectDatabase { - - public static void main(String[] args) { - // Create the CqlSession object: - try (CqlSession session = CqlSession.builder() - // make sure you change the path to the secure connect bundle below - .withCloudSecureConnectBundle(Paths.get("/path/to/secure-connect-database_name.zip")) - .withAuthCredentials("user_name","password") - .withKeyspace("keyspace_name") - .build()) { - // Select the release_version from the system.local table: - ResultSet rs = session.execute("select release_version from system.local"); - Row row = rs.one(); - //Print the results of the CQL query to the console: - if (row != null) { - System.out.println(row.getString("release_version")); - } else { - System.out.println("An error occurred."); - } - } - } - } - ``` - * DataStax Java Driver for DataStax Enterprise (DSE) 2.x - - ```java - import com.datastax.dse.driver.api.core.DseSession; - import com.datastax.oss.driver.api.core.cql.ResultSet; - import com.datastax.oss.driver.api.core.cql.Row; - import java.nio.file.Paths; - - public class ConnectDatabase { - - public static void main(String[] args) { - // Create the DseSession object: - try (DseSession session = DseSession.builder() - // make sure you change the path to the secure connect bundle below - .withCloudSecureConnectBundle(Paths.get("/path/to/secure-connect-database_name.zip")) - .withAuthCredentials("user_name","password") - .withKeyspace("keyspace_name") - .build()) { - // Select the release_version from the system.local table: - ResultSet rs = session.execute("select release_version from system.local"); - Row row = rs.one(); - //Print the results of the CQL query to the console: - if (row != null) { - System.out.println(row.getString("release_version")); - } else { - System.out.println("An error occurred."); - } - } - } - } - ``` - - c. **File-based configuration**. An alternative to the programmatic configuration method - detailed above is to include the information required to connect in the driver's configuration - file (`application.conf`). Merge the following options with any other options that you might - want to include in the configuration file: - - ```hocon - basic { - # change this to match the target keyspace - session-keyspace = keyspace_name - cloud { - # change this to match bundle's location; can be either a path on the local filesystem - # or a valid URL, e.g. http://acme.com/path/to/secure-connect-database_name.zip - secure-connect-bundle = /path/to/secure-connect-database_name.zip - } - } - advanced { - auth-provider { - class = PlainTextAuthProvider - # change below to match the appropriate credentials - username = user_name - password = password - } - } - ``` - - For more information about the driver configuration mechanism, refer to the - [driver documentation]. +```properties +datastax-java-driver { + basic { + # change this to match the target keyspace + session-keyspace = keyspace_name + cloud { + # change this to match bundle's location; can be either a path on the local filesystem + # or a valid URL, e.g. http://acme.com/path/to/secure-connect-database_name.zip + secure-connect-bundle = /path/to/secure-connect-database_name.zip + } + } + advanced { + auth-provider { + class = PlainTextAuthProvider + # change below to match the appropriate credentials + username = user_name + password = password + } + } +} +``` + +For more information about the driver configuration mechanism, refer to the [driver documentation]. - With the above configuration, your ConnectDatabase.java file should be simplified as shown - below: +With the above configuration, your main Java class can be simplified as shown below: - ```java - import com.datastax.oss.driver.api.core.CqlSession; - import com.datastax.oss.driver.api.core.cql.ResultSet; - import com.datastax.oss.driver.api.core.cql.Row; +```java +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.cql.ResultSet; +import com.datastax.oss.driver.api.core.cql.Row; - public class ConnectDatabase { +public class Main { - public static void main(String[] args) { - // Create the CqlSession object; it will read the configuration file and pick the right - // values to connect to the Apollo database. - try (CqlSession session = CqlSession.builder().build()) { - // Select the release_version from the system.local table: - ResultSet rs = session.execute("select release_version from system.local"); - Row row = rs.one(); - //Print the results of the CQL query to the console: - if (row != null) { - System.out.println(row.getString("release_version")); - } else { - System.out.println("An error occurred."); - } - } + public static void main(String[] args) { + // Create the CqlSession object; it will read the configuration file and pick the right + // values to connect to the Apollo database. + try (CqlSession session = CqlSession.builder().build()) { + + ResultSet rs = session.execute("select release_version from system.local"); + Row row = rs.one(); + if (row != null) { + System.out.println(row.getString("release_version")); + } else { + System.out.println("An error occurred."); } } - ``` - - d. Save and close the ConnectDatabase.java file. + } +} +``` [Download Maven]: https://maven.apache.org/download.cgi [Install Maven]: https://maven.apache.org/install.html @@ -176,5 +131,5 @@ database. [Access an Apollo database - AWS]: https://helpdocs.datastax.com/aws/dscloud/apollo/dscloudShareClusterDetails.html [Download the secure connect bundle - GCP]: https://helpdocs.datastax.com/gcp/dscloud/apollo/dscloudObtainingCredentials.html [Download the secure connect bundle - AWS]: https://helpdocs.datastax.com/aws/dscloud/apollo/dscloudObtainingCredentials.html -[Example pom.xml file]: ../core/integration/#minimal-project-structure -[driver documentation]: ../core/configuration/ \ No newline at end of file +[minimal project structure]: ../core/integration/#minimal-project-structure +[driver documentation]: ../core/configuration/ From fe16714ee932606058a6ae674ac19c9a6eec41d1 Mon Sep 17 00:00:00 2001 From: olim7t Date: Thu, 31 Oct 2019 10:15:02 -0700 Subject: [PATCH 160/979] Prepare changelog for next iteration --- changelog/README.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/changelog/README.md b/changelog/README.md index 9cd58abae62..805ce207b68 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -2,6 +2,9 @@ +### 4.4.0 (in progress) + + ### 4.3.0 - [improvement] JAVA-2497: Ensure nodes and exceptions are serializable From 226664055d85c004ae013bb4fe86857f6eccb7d8 Mon Sep 17 00:00:00 2001 From: olim7t Date: Thu, 31 Oct 2019 10:38:30 -0700 Subject: [PATCH 161/979] Change version to next minor --- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- pom.xml | 2 +- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 10 files changed, 10 insertions(+), 10 deletions(-) diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 23d7cd6659a..46ad7345713 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -22,7 +22,7 @@ com.datastax.oss java-driver-parent - 4.3.1-SNAPSHOT + 4.4.0-SNAPSHOT java-driver-core-shaded diff --git a/core/pom.xml b/core/pom.xml index 9d90999f93c..5197d177ee2 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.3.1-SNAPSHOT + 4.4.0-SNAPSHOT java-driver-core diff --git a/distribution/pom.xml b/distribution/pom.xml index 4c686663e70..1c42435eda1 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.3.1-SNAPSHOT + 4.4.0-SNAPSHOT java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index 9cf01c77dcd..2aa9dca1a77 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.3.1-SNAPSHOT + 4.4.0-SNAPSHOT java-driver-examples diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 506e84daf67..3f313e6328b 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.3.1-SNAPSHOT + 4.4.0-SNAPSHOT java-driver-integration-tests diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 564b27e0b0f..a12ab7b5ad0 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.3.1-SNAPSHOT + 4.4.0-SNAPSHOT java-driver-mapper-processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index 298cfd52f61..e33d0349076 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.3.1-SNAPSHOT + 4.4.0-SNAPSHOT java-driver-mapper-runtime diff --git a/pom.xml b/pom.xml index cec332f0090..7232c959ce9 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ com.datastax.oss java-driver-parent - 4.3.1-SNAPSHOT + 4.4.0-SNAPSHOT pom DataStax Java driver for Apache Cassandra(R) diff --git a/query-builder/pom.xml b/query-builder/pom.xml index 941ac702c40..0345f3d4bd0 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.3.1-SNAPSHOT + 4.4.0-SNAPSHOT java-driver-query-builder diff --git a/test-infra/pom.xml b/test-infra/pom.xml index 13f7b910a04..0e1aabfc5cd 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.3.1-SNAPSHOT + 4.4.0-SNAPSHOT java-driver-test-infra From 2443d2f951fdbac5b158e7b9ce81958d0be5406c Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Thu, 21 Nov 2019 18:21:49 +0100 Subject: [PATCH 162/979] Fix minor syntax issue in comment --- core-shaded/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 46ad7345713..8177538e29d 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -143,7 +143,7 @@ + this one must be done here because its pattern is too wide --> com.fasterxml.jackson.core:* From 7e8bc2b9ce44d4643e8054861480c9643a8533f0 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Sat, 23 Nov 2019 15:07:43 +0100 Subject: [PATCH 163/979] Remove duplicate method --- .../internal/core/cql/CqlPrepareHandlerTest.java | 14 +------------- 1 file changed, 1 insertion(+), 13 deletions(-) diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/cql/CqlPrepareHandlerTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/cql/CqlPrepareHandlerTest.java index ced7d095ee1..3e0503bb1e0 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/cql/CqlPrepareHandlerTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/cql/CqlPrepareHandlerTest.java @@ -17,6 +17,7 @@ import static com.datastax.oss.driver.Assertions.assertThat; import static com.datastax.oss.driver.Assertions.assertThatStage; +import static com.datastax.oss.driver.internal.core.cql.CqlRequestHandlerTestBase.defaultFrameOf; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyBoolean; import static org.mockito.ArgumentMatchers.anyString; @@ -24,7 +25,6 @@ import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; -import com.datastax.oss.driver.api.core.DefaultProtocolVersion; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.cql.ColumnDefinitions; @@ -37,7 +37,6 @@ import com.datastax.oss.driver.internal.core.channel.ResponseCallback; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; -import com.datastax.oss.protocol.internal.Frame; import com.datastax.oss.protocol.internal.Message; import com.datastax.oss.protocol.internal.ProtocolConstants; import com.datastax.oss.protocol.internal.request.Prepare; @@ -48,7 +47,6 @@ import com.datastax.oss.protocol.internal.response.result.RowsMetadata; import com.datastax.oss.protocol.internal.util.Bytes; import java.nio.ByteBuffer; -import java.util.Collections; import java.util.Map; import java.util.concurrent.CompletionStage; import org.junit.Before; @@ -320,16 +318,6 @@ public void should_propagate_custom_payload_on_all_nodes() { } } - private static Frame defaultFrameOf(Message responseMessage) { - return Frame.forResponse( - DefaultProtocolVersion.V4.getCode(), - 0, - null, - Frame.NO_PAYLOAD, - Collections.emptyList(), - responseMessage); - } - private static Message simplePrepared() { RowsMetadata variablesMetadata = new RowsMetadata( From ef6bed7bcee3ee8057b5f2b7291bebb48cb34abb Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Sat, 23 Nov 2019 15:08:50 +0100 Subject: [PATCH 164/979] Remove unused fields --- .../driver/internal/core/cql/CqlRequestHandlerTestBase.java | 5 ----- 1 file changed, 5 deletions(-) diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandlerTestBase.java b/core/src/test/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandlerTestBase.java index da7a0f607d4..3f81d458d8e 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandlerTestBase.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandlerTestBase.java @@ -38,7 +38,6 @@ import com.datastax.oss.protocol.internal.util.Bytes; import com.tngtech.java.junit.dataprovider.DataProvider; import com.tngtech.java.junit.dataprovider.DataProviderRunner; -import java.net.InetSocketAddress; import java.nio.ByteBuffer; import java.util.ArrayDeque; import java.util.Collections; @@ -65,10 +64,6 @@ public abstract class CqlRequestHandlerTestBase { protected static final BatchStatement NON_IDEMPOTENT_BATCH_STATEMENT = BatchStatement.newInstance(BatchType.LOGGED, NON_IDEMPOTENT_STATEMENT).setIdempotent(false); - protected static final InetSocketAddress ADDRESS1 = new InetSocketAddress("127.0.0.1", 9042); - protected static final InetSocketAddress ADDRESS2 = new InetSocketAddress("127.0.0.2", 9042); - protected static final InetSocketAddress ADDRESS3 = new InetSocketAddress("127.0.0.3", 9042); - @Mock protected DefaultNode node1; @Mock protected DefaultNode node2; @Mock protected DefaultNode node3; From 715cadb8d02a77580fd180d8033426395d4e21bb Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Thu, 28 Nov 2019 11:23:04 +0100 Subject: [PATCH 165/979] Do not call toString() when logging at TRACE level --- .../oss/driver/internal/core/cql/CqlRequestHandler.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java index a658a571655..9bda9b8ce11 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java @@ -851,7 +851,7 @@ public void onFailure(Throwable error) { if (result.isDone()) { return; } - LOG.trace("[{}] Request failure, processing: {}", logPrefix, error.toString()); + LOG.trace("[{}] Request failure, processing: {}", logPrefix, error); RetryDecision decision; if (!isIdempotent || error instanceof FrameTooLongException) { decision = RetryDecision.RETHROW; From c94537aed3758a79a9c9bf888ee0a6f2a4648630 Mon Sep 17 00:00:00 2001 From: Olivier Michallat Date: Mon, 2 Dec 2019 01:22:20 -0800 Subject: [PATCH 166/979] JAVA-2546: Abort schema refresh if a query fails (#1365) * Always warn on schema query errors The `warnIfMissing` flag was introduced to handle inconsistencies in some DSE versions, but since JAVA-2471 we check that beforehand and select the appropriate `SchemaQueries` implementation. * JAVA-2546: Abort schema refresh if a query fails Motivation: The current strategy is "best effort": ignore query errors and keep parsing as much as we can. However this has undesirable consequences: we observed a case where the query to `system_schema.keyspaces` times out. Parsing proceeds with an empty keyspace list, `SchemaRefresh` thinks all keyspaces have been removed and notifies schema listeners. Modifications: If a query fails, propagate the error to the schema rows future. Ignore any remaining queries. Result: The refresh is aborted and the previous version of the schema metadata stays in place. This is either logged, or surfaced directly if the refresh was triggered manually (by mechanisms that were already in place before this commit). --- changelog/README.md | 1 + .../queries/CassandraSchemaQueries.java | 75 +++++++------------ .../queries/Cassandra3SchemaQueriesTest.java | 17 +++++ 3 files changed, 46 insertions(+), 47 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 805ce207b68..115e16105ae 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.4.0 (in progress) +- [improvement] JAVA-2546: Abort schema refresh if a query fails ### 4.3.0 diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/CassandraSchemaQueries.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/CassandraSchemaQueries.java index 8aa4ebe8f83..f0ec6211e15 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/CassandraSchemaQueries.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/CassandraSchemaQueries.java @@ -22,7 +22,6 @@ import com.datastax.oss.driver.internal.core.adminrequest.AdminResult; import com.datastax.oss.driver.internal.core.adminrequest.AdminRow; import com.datastax.oss.driver.internal.core.channel.DriverChannel; -import com.datastax.oss.driver.internal.core.util.Loggers; import com.datastax.oss.driver.internal.core.util.NanoTime; import com.datastax.oss.driver.internal.core.util.concurrent.RunOrSchedule; import com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting; @@ -132,40 +131,35 @@ private void executeOnAdminExecutor() { schemaRowsBuilder = new CassandraSchemaRows.Builder(isCassandraV3, refreshFuture, logPrefix); - query(selectKeyspacesQuery() + whereClause, schemaRowsBuilder::withKeyspaces, true); - query(selectTypesQuery() + whereClause, schemaRowsBuilder::withTypes, true); - query(selectTablesQuery() + whereClause, schemaRowsBuilder::withTables, true); - query(selectColumnsQuery() + whereClause, schemaRowsBuilder::withColumns, true); + query(selectKeyspacesQuery() + whereClause, schemaRowsBuilder::withKeyspaces); + query(selectTypesQuery() + whereClause, schemaRowsBuilder::withTypes); + query(selectTablesQuery() + whereClause, schemaRowsBuilder::withTables); + query(selectColumnsQuery() + whereClause, schemaRowsBuilder::withColumns); selectIndexesQuery() - .ifPresent(select -> query(select + whereClause, schemaRowsBuilder::withIndexes, true)); + .ifPresent(select -> query(select + whereClause, schemaRowsBuilder::withIndexes)); selectViewsQuery() - .ifPresent(select -> query(select + whereClause, schemaRowsBuilder::withViews, true)); + .ifPresent(select -> query(select + whereClause, schemaRowsBuilder::withViews)); selectFunctionsQuery() - .ifPresent(select -> query(select + whereClause, schemaRowsBuilder::withFunctions, true)); + .ifPresent(select -> query(select + whereClause, schemaRowsBuilder::withFunctions)); selectAggregatesQuery() - .ifPresent(select -> query(select + whereClause, schemaRowsBuilder::withAggregates, true)); + .ifPresent(select -> query(select + whereClause, schemaRowsBuilder::withAggregates)); selectVirtualKeyspacesQuery() - .ifPresent( - select -> query(select + whereClause, schemaRowsBuilder::withVirtualKeyspaces, false)); + .ifPresent(select -> query(select + whereClause, schemaRowsBuilder::withVirtualKeyspaces)); selectVirtualTablesQuery() - .ifPresent( - select -> query(select + whereClause, schemaRowsBuilder::withVirtualTables, false)); + .ifPresent(select -> query(select + whereClause, schemaRowsBuilder::withVirtualTables)); selectVirtualColumnsQuery() - .ifPresent( - select -> query(select + whereClause, schemaRowsBuilder::withVirtualColumns, false)); + .ifPresent(select -> query(select + whereClause, schemaRowsBuilder::withVirtualColumns)); } private void query( String queryString, - Function, CassandraSchemaRows.Builder> builderUpdater, - boolean warnIfMissing) { + Function, CassandraSchemaRows.Builder> builderUpdater) { assert adminExecutor.inEventLoop(); pendingQueries += 1; query(queryString) .whenCompleteAsync( - (result, error) -> handleResult(result, error, builderUpdater, warnIfMissing), - adminExecutor); + (result, error) -> handleResult(result, error, builderUpdater), adminExecutor); } @VisibleForTesting @@ -173,27 +167,18 @@ protected CompletionStage query(String query) { return AdminRequestHandler.query(channel, query, timeout, pageSize, logPrefix).start(); } - /** - * @param warnIfMissing whether to log a warning if the queried table does not exist: some DDAC - * versions report release_version > 4, but don't have a system_virtual_schema keyspace, so we - * want to ignore those errors silently. - */ private void handleResult( AdminResult result, Throwable error, - Function, CassandraSchemaRows.Builder> builderUpdater, - boolean warnIfMissing) { + Function, CassandraSchemaRows.Builder> builderUpdater) { + + // If another query already failed, we've already propagated the failure so just ignore this one + if (schemaRowsFuture.isCompletedExceptionally()) { + return; + } + if (error != null) { - if (warnIfMissing || !error.getMessage().contains("does not exist")) { - Loggers.warnWithException( - LOG, - "[{}] Error during schema refresh, new metadata might be incomplete", - logPrefix, - error); - } - // Proceed without the results of this query, the rest of the schema refresh will run on a - // "best effort" basis - markQueryComplete(); + schemaRowsFuture.completeExceptionally(error); } else { // Store the rows of the current page in the builder schemaRowsBuilder = builderUpdater.apply(result); @@ -201,20 +186,16 @@ private void handleResult( result .nextPage() .whenCompleteAsync( - (nextResult, nextError) -> - handleResult(nextResult, nextError, builderUpdater, warnIfMissing), + (nextResult, nextError) -> handleResult(nextResult, nextError, builderUpdater), adminExecutor); } else { - markQueryComplete(); + pendingQueries -= 1; + if (pendingQueries == 0) { + LOG.debug( + "[{}] Schema queries took {}", logPrefix, NanoTime.formatTimeSince(startTimeNs)); + schemaRowsFuture.complete(schemaRowsBuilder.build()); + } } } } - - private void markQueryComplete() { - pendingQueries -= 1; - if (pendingQueries == 0) { - LOG.debug("[{}] Schema queries took {}", logPrefix, NanoTime.formatTimeSince(startTimeNs)); - schemaRowsFuture.complete(schemaRowsBuilder.build()); - } - } } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra3SchemaQueriesTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra3SchemaQueriesTest.java index e2792935378..0e708238647 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra3SchemaQueriesTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra3SchemaQueriesTest.java @@ -323,6 +323,23 @@ public void should_ignore_malformed_rows() { }); } + @Test + public void should_abort_if_query_fails() { + SchemaQueriesWithMockedChannel queries = + new SchemaQueriesWithMockedChannel(driverChannel, null, config, "test"); + CompletionStage result = queries.execute(); + + Exception mockQueryError = new Exception("mock query error"); + + Call call = queries.calls.poll(); + assertThat(call.query).isEqualTo("SELECT * FROM system_schema.keyspaces"); + call.result.completeExceptionally(mockQueryError); + + channel.runPendingTasks(); + + assertThatStage(result).isFailed(throwable -> assertThat(throwable).isEqualTo(mockQueryError)); + } + /** Extends the class under test to mock the query execution logic. */ static class SchemaQueriesWithMockedChannel extends Cassandra3SchemaQueries { From 411a76af1948b7fd6319af12750e28809e07bf0a Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 2 Dec 2019 10:25:48 +0100 Subject: [PATCH 167/979] JAVA-2527: Allow AllNodesFailedException to accept more than one error per node (#1362) --- changelog/README.md | 1 + .../api/core/AllNodesFailedException.java | 119 ++++++++++++++---- .../oss/driver/api/core/DriverException.java | 4 +- .../api/core/NoNodeAvailableException.java | 2 +- .../core/control/ControlConnection.java | 28 +++-- .../api/core/AllNodesFailedExceptionTest.java | 93 ++++++++++++++ ...equestHandlerSpeculativeExecutionTest.java | 15 ++- .../examples/retry/DowngradingRetry.java | 9 +- .../oss/driver/core/AllNodesFailedIT.java | 104 +++++++++++++++ .../ProtocolVersionInitialNegotiationIT.java | 2 +- .../core/loadbalancing/NodeTargetingIT.java | 10 +- .../core/retry/DefaultRetryPolicyIT.java | 11 +- .../oss/driver/core/session/ShutdownIT.java | 6 +- manual/core/native_protocol/README.md | 2 +- 14 files changed, 344 insertions(+), 62 deletions(-) create mode 100644 core/src/test/java/com/datastax/oss/driver/api/core/AllNodesFailedExceptionTest.java create mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/core/AllNodesFailedIT.java diff --git a/changelog/README.md b/changelog/README.md index 115e16105ae..f87e4573101 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.4.0 (in progress) +- [bug] JAVA-2527: Allow AllNodesFailedException to accept more than one error per node - [improvement] JAVA-2546: Abort schema refresh if a query fails ### 4.3.0 diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/AllNodesFailedException.java b/core/src/main/java/com/datastax/oss/driver/api/core/AllNodesFailedException.java index a897c4d9e27..762da81092f 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/AllNodesFailedException.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/AllNodesFailedException.java @@ -17,13 +17,17 @@ import com.datastax.oss.driver.api.core.cql.ExecutionInfo; import com.datastax.oss.driver.api.core.metadata.Node; -import com.datastax.oss.driver.shaded.guava.common.base.Joiner; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import com.datastax.oss.driver.shaded.guava.common.collect.Iterables; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; +import java.util.Map.Entry; /** * Thrown when a query failed on all the coordinators it was tried on. This exception may wrap @@ -31,73 +35,140 @@ */ public class AllNodesFailedException extends DriverException { + /** @deprecated Use {@link #fromErrors(List)} instead. */ @NonNull + @Deprecated public static AllNodesFailedException fromErrors(@Nullable Map errors) { if (errors == null || errors.isEmpty()) { return new NoNodeAvailableException(); } else { - return new AllNodesFailedException(ImmutableMap.copyOf(errors)); + return new AllNodesFailedException(groupByNode(errors)); } } @NonNull - public static AllNodesFailedException fromErrors( - @Nullable List> errors) { - Map map; + public static AllNodesFailedException fromErrors(@Nullable List> errors) { if (errors == null || errors.isEmpty()) { - map = null; + return new NoNodeAvailableException(); } else { - ImmutableMap.Builder builder = ImmutableMap.builder(); - for (Map.Entry entry : errors) { - builder.put(entry); - } - map = builder.build(); + return new AllNodesFailedException(groupByNode(errors)); } - return fromErrors(map); } - private final Map errors; + private final Map> errors; + /** @deprecated Use {@link #AllNodesFailedException(String, ExecutionInfo, Iterable)} instead. */ + @Deprecated protected AllNodesFailedException( @NonNull String message, @Nullable ExecutionInfo executionInfo, @NonNull Map errors) { super(message, executionInfo, null, true); - this.errors = errors; + this.errors = toDeepImmutableMap(groupByNode(errors)); } - private AllNodesFailedException(Map errors) { + protected AllNodesFailedException( + @NonNull String message, + @Nullable ExecutionInfo executionInfo, + @NonNull Iterable>> errors) { + super(message, executionInfo, null, true); + this.errors = toDeepImmutableMap(errors); + } + + private AllNodesFailedException(Map> errors) { this( buildMessage( String.format("All %d node(s) tried for the query failed", errors.size()), errors), null, - errors); + errors.entrySet()); } - private static String buildMessage(String baseMessage, Map errors) { + private static String buildMessage(String baseMessage, Map> errors) { int limit = Math.min(errors.size(), 3); - String details = - Joiner.on(", ").withKeyValueSeparator(": ").join(Iterables.limit(errors.entrySet(), limit)); - + Iterator>> iterator = + Iterables.limit(errors.entrySet(), limit).iterator(); + StringBuilder details = new StringBuilder(); + while (iterator.hasNext()) { + Entry> entry = iterator.next(); + details.append(entry.getKey()).append(": ").append(entry.getValue()); + if (iterator.hasNext()) { + details.append(", "); + } + } return String.format( - baseMessage + " (showing first %d, use getErrors() for more: %s)", limit, details); + "%s (showing first %d nodes, use getAllErrors() for more): %s", + baseMessage, limit, details); } - /** The details of the individual error on each node. */ + /** + * An immutable map containing the first error on each tried node. + * + * @deprecated Use {@link #getAllErrors()} instead. + */ @NonNull + @Deprecated public Map getErrors() { + ImmutableMap.Builder builder = ImmutableMap.builder(); + for (Node node : errors.keySet()) { + List nodeErrors = errors.get(node); + if (!nodeErrors.isEmpty()) { + builder.put(node, nodeErrors.get(0)); + } + } + return builder.build(); + } + + /** An immutable map containing all errors on each tried node. */ + @NonNull + public Map> getAllErrors() { return errors; } @NonNull @Override public DriverException copy() { - return new AllNodesFailedException(getMessage(), getExecutionInfo(), errors); + return new AllNodesFailedException(getMessage(), getExecutionInfo(), errors.entrySet()); } @NonNull public AllNodesFailedException reword(String newMessage) { return new AllNodesFailedException( - buildMessage(newMessage, errors), getExecutionInfo(), errors); + buildMessage(newMessage, errors), getExecutionInfo(), errors.entrySet()); + } + + private static Map> groupByNode(Map errors) { + return groupByNode(errors.entrySet()); + } + + private static Map> groupByNode(Iterable> errors) { + // no need for immutable collections here + Map> map = new LinkedHashMap<>(); + for (Entry entry : errors) { + Node node = entry.getKey(); + Throwable error = entry.getValue(); + map.compute( + node, + (k, v) -> { + if (v == null) { + v = new ArrayList<>(); + } + v.add(error); + return v; + }); + } + return map; + } + + private static Map> toDeepImmutableMap(Map> errors) { + return toDeepImmutableMap(errors.entrySet()); + } + + private static Map> toDeepImmutableMap( + Iterable>> errors) { + ImmutableMap.Builder> builder = ImmutableMap.builder(); + for (Entry> entry : errors) { + builder.put(entry.getKey(), ImmutableList.copyOf(entry.getValue())); + } + return builder.build(); } } diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/DriverException.java b/core/src/main/java/com/datastax/oss/driver/api/core/DriverException.java index bf0f65f1649..9497ef15cf5 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/DriverException.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/DriverException.java @@ -74,8 +74,8 @@ protected DriverException( * *

      Note that this is only set for exceptions that are rethrown directly to the client from a * session call. For example, individual node errors stored in {@link - * AllNodesFailedException#getErrors()} or {@link ExecutionInfo#getErrors()} do not contain their - * own execution info, and therefore return null from this method. + * AllNodesFailedException#getAllErrors()} or {@link ExecutionInfo#getErrors()} do not contain + * their own execution info, and therefore return null from this method. * *

      It will also be null if you serialize and deserialize an exception. */ diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/NoNodeAvailableException.java b/core/src/main/java/com/datastax/oss/driver/api/core/NoNodeAvailableException.java index db231adf219..7b2e7c9ffae 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/NoNodeAvailableException.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/NoNodeAvailableException.java @@ -31,7 +31,7 @@ public NoNodeAvailableException() { } private NoNodeAvailableException(ExecutionInfo executionInfo) { - super("No node was available to execute the query", executionInfo, Collections.emptyMap()); + super("No node was available to execute the query", executionInfo, Collections.emptySet()); } @NonNull diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/control/ControlConnection.java b/core/src/main/java/com/datastax/oss/driver/internal/core/control/ControlConnection.java index 5c310c9c72f..a3bd11d8452 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/control/ControlConnection.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/control/ControlConnection.java @@ -48,9 +48,12 @@ import com.datastax.oss.protocol.internal.response.event.TopologyChangeEvent; import edu.umd.cs.findbugs.annotations.NonNull; import io.netty.util.concurrent.EventExecutor; +import java.util.AbstractMap.SimpleEntry; +import java.util.ArrayList; import java.util.Collection; -import java.util.LinkedHashMap; +import java.util.List; import java.util.Map; +import java.util.Map.Entry; import java.util.Queue; import java.util.WeakHashMap; import java.util.concurrent.CompletableFuture; @@ -348,7 +351,7 @@ private CompletionStage reconnect() { private void connect( Queue nodes, - Map errors, + List> errors, Runnable onSuccess, Consumer onFailure) { assert adminExecutor.inEventLoop(); @@ -390,9 +393,9 @@ private void connect( error); } } - Map newErrors = - (errors == null) ? new LinkedHashMap<>() : errors; - newErrors.put(node, error); + List> newErrors = + (errors == null) ? new ArrayList<>() : errors; + newErrors.add(new SimpleEntry<>(node, error)); context.getEventBus().fire(ChannelEvent.controlConnectionFailed(node)); connect(nodes, newErrors, onSuccess, onFailure); } @@ -573,20 +576,21 @@ private void forceClose() { } private boolean isAuthFailure(Throwable error) { - boolean authFailure = true; if (error instanceof AllNodesFailedException) { - Collection errors = ((AllNodesFailedException) error).getErrors().values(); + Collection> errors = + ((AllNodesFailedException) error).getAllErrors().values(); if (errors.size() == 0) { return false; } - for (Throwable nodeError : errors) { - if (!(nodeError instanceof AuthenticationException)) { - authFailure = false; - break; + for (List nodeErrors : errors) { + for (Throwable nodeError : nodeErrors) { + if (!(nodeError instanceof AuthenticationException)) { + return false; + } } } } - return authFailure; + return true; } private static ImmutableList buildEventTypes(boolean listenClusterEvents) { diff --git a/core/src/test/java/com/datastax/oss/driver/api/core/AllNodesFailedExceptionTest.java b/core/src/test/java/com/datastax/oss/driver/api/core/AllNodesFailedExceptionTest.java new file mode 100644 index 00000000000..020cb47a9b2 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/api/core/AllNodesFailedExceptionTest.java @@ -0,0 +1,93 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.core; + +import static com.datastax.oss.driver.api.core.ConsistencyLevel.QUORUM; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.data.MapEntry.entry; + +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.servererrors.ReadTimeoutException; +import com.datastax.oss.driver.api.core.servererrors.UnavailableException; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnitRunner; + +@RunWith(MockitoJUnitRunner.class) +public class AllNodesFailedExceptionTest { + + @Mock(name = "node1") + private Node node1; + + @Mock(name = "node2") + private Node node2; + + @SuppressWarnings("deprecation") + @Test + public void should_create_instance_from_map_of_first_errors() { + // given + UnavailableException e1 = new UnavailableException(node1, QUORUM, 2, 1); + ReadTimeoutException e2 = new ReadTimeoutException(node2, QUORUM, 2, 1, false); + Map errors = ImmutableMap.of(node1, e1, node2, e2); + // when + AllNodesFailedException e = AllNodesFailedException.fromErrors(errors); + // then + assertThat(e) + .hasMessage( + "All 2 node(s) tried for the query failed " + + "(showing first 2 nodes, use getAllErrors() for more): " + + "node1: [%s], node2: [%s]", + e1, e2); + assertThat(e.getAllErrors()) + .hasEntrySatisfying(node1, list -> assertThat(list).containsExactly(e1)); + assertThat(e.getAllErrors()) + .hasEntrySatisfying(node2, list -> assertThat(list).containsExactly(e2)); + assertThat(e.getErrors()).containsEntry(node1, e1); + assertThat(e.getErrors()).containsEntry(node2, e2); + } + + @SuppressWarnings("deprecation") + @Test + public void should_create_instance_from_list_of_all_errors() { + // given + UnavailableException e1a = new UnavailableException(node1, QUORUM, 2, 1); + ReadTimeoutException e1b = new ReadTimeoutException(node1, QUORUM, 2, 1, false); + ReadTimeoutException e2a = new ReadTimeoutException(node2, QUORUM, 2, 1, false); + List> errors = + ImmutableList.of(entry(node1, e1a), entry(node1, e1b), entry(node2, e2a)); + // when + AllNodesFailedException e = AllNodesFailedException.fromErrors(errors); + // then + assertThat(e) + .hasMessage( + "All 2 node(s) tried for the query failed " + + "(showing first 2 nodes, use getAllErrors() for more): " + + "node1: [%s, %s], node2: [%s]", + e1a, e1b, e2a); + assertThat(e.getAllErrors()) + .hasEntrySatisfying(node1, list -> assertThat(list).containsExactly(e1a, e1b)); + assertThat(e.getAllErrors()) + .hasEntrySatisfying(node2, list -> assertThat(list).containsExactly(e2a)); + assertThat(e.getErrors()).containsEntry(node1, e1a); + assertThat(e.getErrors()).containsEntry(node2, e2a); + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandlerSpeculativeExecutionTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandlerSpeculativeExecutionTest.java index 6be5a39d7c1..eb70e79b0d5 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandlerSpeculativeExecutionTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandlerSpeculativeExecutionTest.java @@ -37,6 +37,7 @@ import com.datastax.oss.protocol.internal.ProtocolConstants; import com.datastax.oss.protocol.internal.response.Error; import com.tngtech.java.junit.dataprovider.UseDataProvider; +import java.util.List; import java.util.Map; import java.util.concurrent.CompletionStage; import java.util.concurrent.TimeUnit; @@ -261,10 +262,11 @@ public void should_fail_if_no_more_nodes_and_initial_execution_is_last( .isFailed( error -> { assertThat(error).isInstanceOf(AllNodesFailedException.class); - Map nodeErrors = ((AllNodesFailedException) error).getErrors(); + Map> nodeErrors = + ((AllNodesFailedException) error).getAllErrors(); assertThat(nodeErrors).containsOnlyKeys(node1, node2); - assertThat(nodeErrors.get(node1)).isInstanceOf(BootstrappingException.class); - assertThat(nodeErrors.get(node2)).isInstanceOf(BootstrappingException.class); + assertThat(nodeErrors.get(node1).get(0)).isInstanceOf(BootstrappingException.class); + assertThat(nodeErrors.get(node2).get(0)).isInstanceOf(BootstrappingException.class); }); } } @@ -315,10 +317,11 @@ public void should_fail_if_no_more_nodes_and_speculative_execution_is_last( .isFailed( error -> { assertThat(error).isInstanceOf(AllNodesFailedException.class); - Map nodeErrors = ((AllNodesFailedException) error).getErrors(); + Map> nodeErrors = + ((AllNodesFailedException) error).getAllErrors(); assertThat(nodeErrors).containsOnlyKeys(node1, node2); - assertThat(nodeErrors.get(node1)).isInstanceOf(BootstrappingException.class); - assertThat(nodeErrors.get(node2)).isInstanceOf(BootstrappingException.class); + assertThat(nodeErrors.get(node1).get(0)).isInstanceOf(BootstrappingException.class); + assertThat(nodeErrors.get(node2).get(0)).isInstanceOf(BootstrappingException.class); }); } } diff --git a/examples/src/main/java/com/datastax/oss/driver/examples/retry/DowngradingRetry.java b/examples/src/main/java/com/datastax/oss/driver/examples/retry/DowngradingRetry.java index c2cd119a1c8..c83e2ef2296 100644 --- a/examples/src/main/java/com/datastax/oss/driver/examples/retry/DowngradingRetry.java +++ b/examples/src/main/java/com/datastax/oss/driver/examples/retry/DowngradingRetry.java @@ -34,6 +34,7 @@ import com.datastax.oss.driver.api.core.servererrors.ReadTimeoutException; import com.datastax.oss.driver.api.core.servererrors.UnavailableException; import com.datastax.oss.driver.api.core.servererrors.WriteTimeoutException; +import java.util.List; /** * This example illustrates how to implement a downgrading retry strategy from application code. @@ -418,9 +419,11 @@ private static ConsistencyLevel downgrade( private static DriverException unwrapAllNodesFailedException(DriverException e) { if (e instanceof AllNodesFailedException) { AllNodesFailedException noHostAvailable = (AllNodesFailedException) e; - for (Throwable error : noHostAvailable.getErrors().values()) { - if (error instanceof QueryConsistencyException || error instanceof UnavailableException) { - return (DriverException) error; + for (List errors : noHostAvailable.getAllErrors().values()) { + for (Throwable error : errors) { + if (error instanceof QueryConsistencyException || error instanceof UnavailableException) { + return (DriverException) error; + } } } // Couldn't find an exploitable error to unwrap: abort. diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/AllNodesFailedIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/AllNodesFailedIT.java new file mode 100644 index 00000000000..dfab751cb1a --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/AllNodesFailedIT.java @@ -0,0 +1,104 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.core; + +import static com.datastax.oss.simulacron.common.codec.ConsistencyLevel.ONE; +import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.readTimeout; +import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.when; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.fail; + +import com.datastax.oss.driver.api.core.AllNodesFailedException; +import com.datastax.oss.driver.api.core.ConsistencyLevel; +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverConfigLoader; +import com.datastax.oss.driver.api.core.context.DriverContext; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.retry.RetryDecision; +import com.datastax.oss.driver.api.core.session.Request; +import com.datastax.oss.driver.api.testinfra.session.SessionUtils; +import com.datastax.oss.driver.api.testinfra.simulacron.SimulacronRule; +import com.datastax.oss.driver.categories.ParallelizableTests; +import com.datastax.oss.driver.internal.core.retry.DefaultRetryPolicy; +import com.datastax.oss.simulacron.common.cluster.ClusterSpec; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Iterator; +import java.util.List; +import java.util.Map.Entry; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +@Category(ParallelizableTests.class) +public class AllNodesFailedIT { + + @ClassRule + public static final SimulacronRule SIMULACRON_RULE = + new SimulacronRule(ClusterSpec.builder().withNodes(2)); + + @Test + public void should_report_multiple_errors_per_node() { + SIMULACRON_RULE.cluster().prime(when("SELECT foo").then(readTimeout(ONE, 0, 0, false))); + DriverConfigLoader loader = + SessionUtils.configLoaderBuilder() + .withClass(DefaultDriverOption.RETRY_POLICY_CLASS, MultipleRetryPolicy.class) + .build(); + + try (CqlSession session = + (CqlSession) + SessionUtils.baseBuilder() + .addContactEndPoints(SIMULACRON_RULE.getContactPoints()) + .withConfigLoader(loader) + .build()) { + // when executing a query. + session.execute("SELECT foo"); + fail("AllNodesFailedException expected"); + } catch (AllNodesFailedException ex) { + assertThat(ex.getAllErrors()).hasSize(2); + Iterator>> iterator = ex.getAllErrors().entrySet().iterator(); + // first node should have been tried twice + Entry> node1Errors = iterator.next(); + assertThat(node1Errors.getValue()).hasSize(2); + // second node should have been tried twice + Entry> node2Errors = iterator.next(); + assertThat(node2Errors.getValue()).hasSize(2); + } + } + + public static class MultipleRetryPolicy extends DefaultRetryPolicy { + + public MultipleRetryPolicy(DriverContext context, String profileName) { + super(context, profileName); + } + + @Override + public RetryDecision onReadTimeout( + @NonNull Request request, + @NonNull ConsistencyLevel cl, + int blockFor, + int received, + boolean dataPresent, + int retryCount) { + // retry each node twice + if (retryCount % 2 == 0) { + return RetryDecision.RETRY_SAME; + } else { + return RetryDecision.RETRY_NEXT; + } + } + } +} diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/ProtocolVersionInitialNegotiationIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/ProtocolVersionInitialNegotiationIT.java index 4f21a76fd6a..705bfc4292d 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/ProtocolVersionInitialNegotiationIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/ProtocolVersionInitialNegotiationIT.java @@ -65,7 +65,7 @@ public void should_fail_if_provided_version_isnt_supported() { session.execute("select * from system.local"); fail("Expected an AllNodesFailedException"); } catch (AllNodesFailedException anfe) { - Throwable cause = anfe.getErrors().values().iterator().next(); + Throwable cause = anfe.getAllErrors().values().iterator().next().get(0); assertThat(cause).isInstanceOf(UnsupportedProtocolVersionException.class); UnsupportedProtocolVersionException unsupportedException = (UnsupportedProtocolVersionException) cause; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/loadbalancing/NodeTargetingIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/loadbalancing/NodeTargetingIT.java index 0c9c61bb22a..e27abe1f554 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/loadbalancing/NodeTargetingIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/loadbalancing/NodeTargetingIT.java @@ -100,8 +100,8 @@ public void should_fail_if_node_fails_query() { SESSION_RULE.session().execute(statement); fail("Should have thrown AllNodesFailedException"); } catch (AllNodesFailedException e) { - assertThat(e.getErrors().size()).isEqualTo(1); - assertThat(e.getErrors().get(node3)).isInstanceOf(UnavailableException.class); + assertThat(e.getAllErrors().size()).isEqualTo(1); + assertThat(e.getAllErrors().get(node3).get(0)).isInstanceOf(UnavailableException.class); } } @@ -116,13 +116,13 @@ public void should_fail_if_node_is_not_connected() { SESSION_RULE.session().execute(statement); fail("Query should have failed"); } catch (NoNodeAvailableException e) { - assertThat(e.getErrors()).isEmpty(); + assertThat(e.getAllErrors()).isEmpty(); } catch (AllNodesFailedException e) { // its also possible that the query is tried. This can happen if the node was marked // down, but not all connections have been closed yet. In this case, just verify that // the expected host failed. - assertThat(e.getErrors().size()).isEqualTo(1); - assertThat(e.getErrors()).containsOnlyKeys(node4); + assertThat(e.getAllErrors().size()).isEqualTo(1); + assertThat(e.getAllErrors()).containsOnlyKeys(node4); } } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/retry/DefaultRetryPolicyIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/retry/DefaultRetryPolicyIT.java index 66531b19d50..8e496db350f 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/retry/DefaultRetryPolicyIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/retry/DefaultRetryPolicyIT.java @@ -61,6 +61,7 @@ import com.tngtech.java.junit.dataprovider.DataProviderRunner; import com.tngtech.java.junit.dataprovider.UseDataProvider; import java.util.Arrays; +import java.util.List; import java.util.Map; import org.junit.After; import org.junit.Before; @@ -274,7 +275,7 @@ public void should_keep_retrying_on_next_host_on_connection_error() { } catch (AllNodesFailedException ex) { // then an AllNodesFailedException should be raised indicating that all nodes failed the // request. - assertThat(ex.getErrors()).hasSize(3); + assertThat(ex.getAllErrors()).hasSize(3); } // should have been tried on all nodes. @@ -496,9 +497,11 @@ public void should_keep_retrying_on_next_host_on_error_response() { fail("Expected an AllNodesFailedException"); } catch (AllNodesFailedException e) { // then we should get an all nodes failed exception, indicating the query was tried each node. - assertThat(e.getErrors()).hasSize(3); - for (Throwable t : e.getErrors().values()) { - assertThat(t).isInstanceOf(ServerError.class); + assertThat(e.getAllErrors()).hasSize(3); + for (List nodeErrors : e.getAllErrors().values()) { + for (Throwable nodeError : nodeErrors) { + assertThat(nodeError).isInstanceOf(ServerError.class); + } } } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/session/ShutdownIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/session/ShutdownIT.java index 96d8ca1b68e..9f84c9144de 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/session/ShutdownIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/session/ShutdownIT.java @@ -99,9 +99,9 @@ public void should_fail_requests_when_session_is_closed() throws Exception { AllNodesFailedException anfe = (AllNodesFailedException) error; // if there were 0 errors, its a NoNodeAvailableException which is // acceptable. - if (anfe.getErrors().size() > 0) { - assertThat(anfe.getErrors()).hasSize(1); - error = anfe.getErrors().values().iterator().next(); + if (anfe.getAllErrors().size() > 0) { + assertThat(anfe.getAllErrors()).hasSize(1); + error = anfe.getAllErrors().values().iterator().next().get(0); if (!(error instanceof IllegalStateException) && !error.getMessage().endsWith("is closing")) { unexpectedErrors.add(error.toString()); diff --git a/manual/core/native_protocol/README.md b/manual/core/native_protocol/README.md index 714d7f4b986..60133dfb9cb 100644 --- a/manual/core/native_protocol/README.md +++ b/manual/core/native_protocol/README.md @@ -59,7 +59,7 @@ If you force a version that is too high for the server, you'll get an error: ``` Exception in thread "main" com.datastax.oss.driver.api.core.AllNodesFailedException: - All 1 node tried for the query failed (showing first 1, use getErrors() for more: + All 1 node tried for the query failed (showing first 1 nodes, use getAllErrors() for more: /127.0.0.1:9042: com.datastax.oss.driver.api.core.UnsupportedProtocolVersionException: [/127.0.0.1:9042] Host does not support protocol version V5) ``` From 61c05755ab889c92b0aa3a3025b59314a3cadeda Mon Sep 17 00:00:00 2001 From: Olivier Michallat Date: Mon, 2 Dec 2019 01:27:40 -0800 Subject: [PATCH 168/979] JAVA-2107: Add XML formatting plugin (#1363) --- CONTRIBUTING.md | 19 +++++++++++++++---- changelog/README.md | 1 + pom.xml | 23 +++++++++++++++++++++++ 3 files changed, 39 insertions(+), 4 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 2ee0f377742..068498032cd 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -2,6 +2,8 @@ ## Code formatting +### Java + We follow the [Google Java Style Guide](https://google.github.io/styleguide/javaguide.html). See https://github.com/google/google-java-format for IDE plugins. The rules are not configurable. @@ -11,11 +13,20 @@ The build will fail if the code is not formatted. To format all files from the c mvn fmt:format ``` -Some aspects are not covered by the formatter: +Some aspects are not covered by the formatter: braces must be used with `if`, `else`, `for`, `do` +and `while` statements, even when the body is empty or contains only a single statement. + +### XML + +The build will fail if XML files are not formatted correctly. Run the following command before you +commit: + +```java +mvn xml-format:xml-format +``` -* braces must be used with `if`, `else`, `for`, `do` and `while` statements, even when the body is - empty or contains only a single statement. -* XML files: indent with two spaces and wrap to respect the column limit of 100 characters. +The formatter does not enforce a maximum line length, but please try to keep it below 100 characters +to keep files readable across all mediums (IDE, terminal, Github...). ## Coding style -- production code diff --git a/changelog/README.md b/changelog/README.md index f87e4573101..711dc56176c 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.4.0 (in progress) +- [improvement] JAVA-2107: Add XML formatting plugin - [bug] JAVA-2527: Allow AllNodesFailedException to accept more than one error per node - [improvement] JAVA-2546: Abort schema refresh if a query fails diff --git a/pom.xml b/pom.xml index 7232c959ce9..b5b7fe335cc 100644 --- a/pom.xml +++ b/pom.xml @@ -291,6 +291,11 @@ fmt-maven-plugin 2.9 + + au.com.acegi + xml-format-maven-plugin + 3.1.1 + com.mycila license-maven-plugin @@ -439,6 +444,24 @@ + + au.com.acegi + xml-format-maven-plugin + + + + xml-check + + + + + + .idea/** + **/target/** + **/dependency-reduced-pom.xml + + + com.mycila license-maven-plugin From 76b0cdd5d882d54c305d7cdf645ab768ea9a4fa6 Mon Sep 17 00:00:00 2001 From: Olivier Michallat Date: Mon, 2 Dec 2019 01:28:29 -0800 Subject: [PATCH 169/979] JAVA-2521: Use dependencyManagement for internal modules (#1360) --- core-shaded/pom.xml | 3 --- examples/pom.xml | 3 --- integration-tests/pom.xml | 5 ----- mapper-processor/pom.xml | 1 - mapper-runtime/pom.xml | 2 -- pom.xml | 36 ++++++++++++++++++++++++++++++++++++ query-builder/pom.xml | 1 - 7 files changed, 36 insertions(+), 15 deletions(-) diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 8177538e29d..1285dc0c46c 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -37,7 +37,6 @@ com.datastax.oss java-driver-core - ${project.version} diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 3f313e6328b..6582ffa26b9 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -40,32 +40,27 @@ com.datastax.oss java-driver-test-infra - ${project.parent.version} test com.datastax.oss java-driver-query-builder - ${project.parent.version} test com.datastax.oss java-driver-mapper-processor - ${project.parent.version} test true com.datastax.oss java-driver-mapper-runtime - ${project.parent.version} test com.datastax.oss java-driver-core - ${project.parent.version} test-jar test diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index a12ab7b5ad0..f7643a26b9c 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -32,7 +32,6 @@ com.datastax.oss java-driver-mapper-runtime - ${project.version} com.datastax.oss diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index e33d0349076..e3812d10584 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -33,12 +33,10 @@ ${project.groupId} java-driver-core - ${project.version} ${project.groupId} java-driver-query-builder - ${project.version} com.github.stephenc.jcip diff --git a/pom.xml b/pom.xml index b5b7fe335cc..09372d300f7 100644 --- a/pom.xml +++ b/pom.xml @@ -75,6 +75,42 @@ + + com.datastax.oss + java-driver-core + ${project.version} + + + com.datastax.oss + java-driver-core + ${project.version} + test-jar + + + com.datastax.oss + java-driver-core-shaded + ${project.version} + + + com.datastax.oss + java-driver-mapper-processor + ${project.version} + + + com.datastax.oss + java-driver-mapper-runtime + ${project.version} + + + com.datastax.oss + java-driver-query-builder + ${project.version} + + + com.datastax.oss + java-driver-test-infra + ${project.version} + com.datastax.oss native-protocol diff --git a/query-builder/pom.xml b/query-builder/pom.xml index 0345f3d4bd0..50dd18e7f42 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -33,7 +33,6 @@ com.datastax.oss java-driver-core - ${project.version} com.datastax.oss From 0f0da297ccb510a07a89371d84670e086d3eafcd Mon Sep 17 00:00:00 2001 From: AndrewFrom Date: Wed, 6 Nov 2019 14:37:29 -0600 Subject: [PATCH 170/979] Add BoundStatement ReturnType as a return type for Create, Update, and Delete operations Motivation: In the 3.x driver the object-mapper had Mapper.saveQuery(entity) and Mapper.deleteQuery(id) which returned a BoundStatement that you could later execute. This was useful for creating a list of BoundStatement that would be added to a LOGGED Batch statement for atomically saving data that is saved across several denormalized tables (to support different query patterns on the same data). While the current code sort of enables that with the @SetEntity, it however lacks a lot of the features that the other DAO methods have, for instance setting a TTL, custom IF and WHERE clauses that are are supported by the @Insert, @Update, and @Delete methods. And not all of these features may be supportable on this more generic implemenation. Modifcations: Added a new DaoReturnType.BOUND_STATEMENT that is practically identical to to the existing DaoReturnType. Since it just returns the BoundStatment instead of executing it. Result: For @Insert, @Update, @Delete, and @Query DAO methods BoundStatement is now a new supported return type. This is an additive change to the API of the Mapper so it is non-breaking. --- changelog/README.md | 1 + .../datastax/oss/driver/mapper/DeleteIT.java | 144 ++++++++++++++++++ .../datastax/oss/driver/mapper/InsertIT.java | 13 ++ .../driver/mapper/UpdateCustomIfClauseIT.java | 36 +++++ .../datastax/oss/driver/mapper/UpdateIT.java | 27 ++++ manual/mapper/daos/delete/README.md | 9 ++ manual/mapper/daos/insert/README.md | 8 +- manual/mapper/daos/query/README.md | 4 + manual/mapper/daos/update/README.md | 16 +- .../dao/DaoDeleteMethodGenerator.java | 9 +- .../dao/DaoInsertMethodGenerator.java | 2 + .../mapper/processor/dao/DaoReturnType.java | 2 + .../dao/DaoUpdateMethodGenerator.java | 9 +- .../dao/DefaultDaoReturnTypeKind.java | 6 + .../dao/DefaultDaoReturnTypeParser.java | 2 + .../dao/DaoDeleteMethodGeneratorTest.java | 2 +- .../dao/DaoInsertMethodGeneratorTest.java | 3 +- .../dao/DaoQueryMethodGeneratorTest.java | 2 +- .../dao/DaoUpdateMethodGeneratorTest.java | 2 +- .../driver/api/mapper/annotations/Delete.java | 7 + .../driver/api/mapper/annotations/Insert.java | 7 + .../driver/api/mapper/annotations/Query.java | 3 + .../driver/api/mapper/annotations/Update.java | 15 +- 23 files changed, 314 insertions(+), 15 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 711dc56176c..1715e3d8e41 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.4.0 (in progress) +- [new feature] JAVA-2532: Add BoundStatement ReturnType for insert, update, and delete DAO methods - [improvement] JAVA-2107: Add XML formatting plugin - [bug] JAVA-2527: Allow AllNodesFailedException to accept more than one error per node - [improvement] JAVA-2546: Abort schema refresh if a query fails diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/DeleteIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/DeleteIT.java index 5fb3d49ba60..18ff14cee43 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/DeleteIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/DeleteIT.java @@ -21,6 +21,7 @@ import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.PagingIterable; import com.datastax.oss.driver.api.core.cql.AsyncResultSet; +import com.datastax.oss.driver.api.core.cql.BoundStatement; import com.datastax.oss.driver.api.core.cql.ResultSet; import com.datastax.oss.driver.api.core.cql.Row; import com.datastax.oss.driver.api.core.cql.SimpleStatement; @@ -174,6 +175,21 @@ public void should_delete_with_condition() { assertThat(dao.findById(id)).isNull(); } + @Test + public void should_delete_with_condition_statement() { + UUID id = FLAMETHROWER.getId(); + assertThat(dao.findById(id)).isNotNull(); + + BoundStatement bs = dao.deleteIfDescriptionMatchesStatement(id, "foo"); + ResultSet rs = SESSION_RULE.session().execute(bs); + assertThat(rs.wasApplied()).isFalse(); + assertThat(rs.one().getString("description")).isEqualTo(FLAMETHROWER.getDescription()); + + rs = dao.deleteIfDescriptionMatches(id, FLAMETHROWER.getDescription()); + assertThat(rs.wasApplied()).isTrue(); + assertThat(dao.findById(id)).isNull(); + } + @Test public void should_delete_with_condition_asynchronously() { UUID id = FLAMETHROWER.getId(); @@ -198,6 +214,13 @@ public void should_delete_by_partition_key() { assertThat(saleDao.all().all()).containsOnly(FLAMETHROWER_SALE_5, MP3_DOWNLOAD_SALE_1); } + @Test + public void should_delete_by_partition_key_statement() { + // should delete FLAMETHROWER_SALE_[1-4] + SESSION_RULE.session().execute(saleDao.deleteByIdForDayStatement(FLAMETHROWER.getId(), DATE_1)); + assertThat(saleDao.all().all()).containsOnly(FLAMETHROWER_SALE_5, MP3_DOWNLOAD_SALE_1); + } + @Test public void should_delete_by_partition_key_and_partial_clustering() { // should delete FLAMETHROWER_SALE_{1,3,4] @@ -206,6 +229,16 @@ public void should_delete_by_partition_key_and_partial_clustering() { .containsOnly(FLAMETHROWER_SALE_2, FLAMETHROWER_SALE_5, MP3_DOWNLOAD_SALE_1); } + @Test + public void should_delete_by_partition_key_and_partial_clustering_statement() { + // should delete FLAMETHROWER_SALE_{1,3,4] + SESSION_RULE + .session() + .execute(saleDao.deleteByIdForCustomerStatement(FLAMETHROWER.getId(), DATE_1, 1)); + assertThat(saleDao.all().all()) + .containsOnly(FLAMETHROWER_SALE_2, FLAMETHROWER_SALE_5, MP3_DOWNLOAD_SALE_1); + } + @Test public void should_delete_by_primary_key_sales() { // should delete FLAMETHROWER_SALE_2 @@ -220,6 +253,23 @@ public void should_delete_by_primary_key_sales() { MP3_DOWNLOAD_SALE_1); } + @Test + public void should_delete_by_primary_key_sales_statement() { + // should delete FLAMETHROWER_SALE_2 + SESSION_RULE + .session() + .execute( + saleDao.deleteByIdForCustomerAtTimeStatement( + FLAMETHROWER.getId(), DATE_1, 2, FLAMETHROWER_SALE_2.getTs())); + assertThat(saleDao.all().all()) + .containsOnly( + FLAMETHROWER_SALE_1, + FLAMETHROWER_SALE_3, + FLAMETHROWER_SALE_4, + FLAMETHROWER_SALE_5, + MP3_DOWNLOAD_SALE_1); + } + @Test public void should_delete_if_price_matches() { ResultSet result = @@ -238,6 +288,26 @@ public void should_delete_if_price_matches() { assertThat(result.wasApplied()).isTrue(); } + @Test + public void should_delete_if_price_matchesStatement() { + BoundStatement bs = + saleDao.deleteIfPriceMatchesStatement( + FLAMETHROWER.getId(), DATE_1, 2, FLAMETHROWER_SALE_2.getTs(), 250.0); + ResultSet result = SESSION_RULE.session().execute(bs); + + assertThat(result.wasApplied()).isFalse(); + Row row = result.one(); + assertThat(row).isNotNull(); + assertThat(row.getDouble("price")).isEqualTo(500.0); + + bs = + saleDao.deleteIfPriceMatchesStatement( + FLAMETHROWER.getId(), DATE_1, 2, FLAMETHROWER_SALE_2.getTs(), 500.0); + result = SESSION_RULE.session().execute(bs); + + assertThat(result.wasApplied()).isTrue(); + } + @Test public void should_delete_if_exists_sales() { assertThat(saleDao.deleteIfExists(FLAMETHROWER.getId(), DATE_1, 2, FLAMETHROWER_SALE_2.getTs())) @@ -262,6 +332,24 @@ public void should_delete_within_time_range() { FLAMETHROWER_SALE_2, FLAMETHROWER_SALE_4, FLAMETHROWER_SALE_5, MP3_DOWNLOAD_SALE_1); } + @Test + public void should_delete_within_time_range_statement() { + // should delete FLAMETHROWER_SALE_{1,3}, but not 4 because range ends before + SESSION_RULE + .session() + .execute( + saleDao.deleteInTimeRangeStatement( + FLAMETHROWER.getId(), + DATE_1, + 1, + FLAMETHROWER_SALE_1.getTs(), + Uuids.startOf(Uuids.unixTimestamp(FLAMETHROWER_SALE_4.getTs()) - 1000))); + + assertThat(saleDao.all().all()) + .containsOnly( + FLAMETHROWER_SALE_2, FLAMETHROWER_SALE_4, FLAMETHROWER_SALE_5, MP3_DOWNLOAD_SALE_1); + } + @Test public void should_delete_if_price_matches_custom_where() { ResultSet result = @@ -280,6 +368,26 @@ public void should_delete_if_price_matches_custom_where() { assertThat(result.wasApplied()).isTrue(); } + @Test + public void should_delete_if_price_matches_custom_where_statement() { + BoundStatement bs = + saleDao.deleteCustomWhereCustomIfStatement( + 2, FLAMETHROWER.getId(), DATE_1, FLAMETHROWER_SALE_2.getTs(), 250.0); + ResultSet result = SESSION_RULE.session().execute(bs); + + assertThat(result.wasApplied()).isFalse(); + Row row = result.one(); + assertThat(row).isNotNull(); + assertThat(row.getDouble("price")).isEqualTo(500.0); + + bs = + saleDao.deleteCustomWhereCustomIfStatement( + 2, FLAMETHROWER.getId(), DATE_1, FLAMETHROWER_SALE_2.getTs(), 500.0); + result = SESSION_RULE.session().execute(bs); + + assertThat(result.wasApplied()).isTrue(); + } + @Mapper public interface InventoryMapper { @DaoFactory @@ -305,6 +413,9 @@ public interface ProductDao { @Delete(entityClass = Product.class, customIfClause = "description = :expectedDescription") ResultSet deleteIfDescriptionMatches(UUID productId, String expectedDescription); + @Delete(entityClass = Product.class, customIfClause = "description = :expectedDescription") + BoundStatement deleteIfDescriptionMatchesStatement(UUID productId, String expectedDescription); + @Delete CompletionStage deleteAsync(Product product); @@ -335,18 +446,35 @@ public interface ProductSaleDao { @Delete(entityClass = ProductSale.class) ResultSet deleteByIdForDay(UUID id, String day); + // delete all rows in partition + @Delete(entityClass = ProductSale.class) + BoundStatement deleteByIdForDayStatement(UUID id, String day); + // delete by partition key and partial clustering key @Delete(entityClass = ProductSale.class) ResultSet deleteByIdForCustomer(UUID id, String day, int customerId); + // delete by partition key and partial clustering key + @Delete(entityClass = ProductSale.class) + BoundStatement deleteByIdForCustomerStatement(UUID id, String day, int customerId); + // delete row (full primary key) @Delete(entityClass = ProductSale.class) ResultSet deleteByIdForCustomerAtTime(UUID id, String day, int customerId, UUID ts); + // delete row (full primary key) + @Delete(entityClass = ProductSale.class) + BoundStatement deleteByIdForCustomerAtTimeStatement( + UUID id, String day, int customerId, UUID ts); + @Delete(entityClass = ProductSale.class, customIfClause = "price = :expectedPrice") ResultSet deleteIfPriceMatches( UUID id, String day, int customerId, UUID ts, double expectedPrice); + @Delete(entityClass = ProductSale.class, customIfClause = "price = :expectedPrice") + BoundStatement deleteIfPriceMatchesStatement( + UUID id, String day, int customerId, UUID ts, double expectedPrice); + @Delete( entityClass = ProductSale.class, customWhereClause = @@ -354,6 +482,14 @@ ResultSet deleteIfPriceMatches( + ":endTs") ResultSet deleteInTimeRange(UUID id, String day, int customerId, UUID startTs, UUID endTs); + @Delete( + entityClass = ProductSale.class, + customWhereClause = + "id = :id and day = :day and customer_id = :customerId and ts >= :startTs and ts < " + + ":endTs") + BoundStatement deleteInTimeRangeStatement( + UUID id, String day, int customerId, UUID startTs, UUID endTs); + // transpose order of parameters so doesn't match primary key to ensure that works. @Delete( entityClass = ProductSale.class, @@ -362,6 +498,14 @@ ResultSet deleteIfPriceMatches( ResultSet deleteCustomWhereCustomIf( int customerId, UUID id, String day, UUID ts, double expectedPrice); + // transpose order of parameters so doesn't match primary key to ensure that works. + @Delete( + entityClass = ProductSale.class, + customWhereClause = "id = :id and day = :day and customer_id = :customerId and ts = :ts", + customIfClause = "price = :expectedPrice") + BoundStatement deleteCustomWhereCustomIfStatement( + int customerId, UUID id, String day, UUID ts, double expectedPrice); + @Delete(entityClass = ProductSale.class, ifExists = true) boolean deleteIfExists(UUID id, String day, int customerId, UUID ts); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/InsertIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/InsertIT.java index da7cade385b..a89db6e8bfc 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/InsertIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/InsertIT.java @@ -20,6 +20,7 @@ import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.cql.AsyncResultSet; +import com.datastax.oss.driver.api.core.cql.BoundStatement; import com.datastax.oss.driver.api.core.cql.ResultSet; import com.datastax.oss.driver.api.core.cql.Row; import com.datastax.oss.driver.api.core.cql.SimpleStatement; @@ -98,6 +99,15 @@ public void should_insert_entity_returning_result_set() { assertThat(dao.findById(FLAMETHROWER.getId())).isEqualTo(FLAMETHROWER); } + @Test + public void should_return_bound_statement_to_execute() { + assertThat(dao.findById(FLAMETHROWER.getId())).isNull(); + BoundStatement bs = dao.saveReturningBoundStatement(FLAMETHROWER); + ResultSet rs = SESSION_RULE.session().execute(bs); + assertThat(rs.getAvailableWithoutFetching()).isZero(); + assertThat(dao.findById(FLAMETHROWER.getId())).isEqualTo(FLAMETHROWER); + } + @Test public void should_insert_entity_asynchronously() { assertThat(dao.findById(FLAMETHROWER.getId())).isNull(); @@ -289,6 +299,9 @@ public interface ProductDao { @Insert ResultSet saveReturningResultSet(Product product); + @Insert + BoundStatement saveReturningBoundStatement(Product product); + @Insert(timestamp = ":timestamp") void saveWithBoundTimestamp(Product product, long timestamp); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateCustomIfClauseIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateCustomIfClauseIT.java index 11636cd6887..07fd3aba869 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateCustomIfClauseIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateCustomIfClauseIT.java @@ -20,6 +20,7 @@ import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.cql.AsyncResultSet; +import com.datastax.oss.driver.api.core.cql.BoundStatement; import com.datastax.oss.driver.api.core.cql.ResultSet; import com.datastax.oss.driver.api.core.cql.SimpleStatement; import com.datastax.oss.driver.api.mapper.annotations.CqlName; @@ -90,6 +91,22 @@ public void should_update_entity_if_condition_is_met() { assertThat(dao.updateIfLength(otherProduct, 10).wasApplied()).isEqualTo(true); } + @Test + public void should_update_entity_if_condition_is_met_statement() { + dao.update( + new Product(FLAMETHROWER.getId(), "Description for length 10", new Dimensions(10, 1, 1))); + assertThat(dao.findById(FLAMETHROWER.getId())).isNotNull(); + + Product otherProduct = + new Product(FLAMETHROWER.getId(), "Other description", new Dimensions(1, 1, 1)); + assertThat( + SESSION_RULE + .session() + .execute(dao.updateIfLengthStatement(otherProduct, 10)) + .wasApplied()) + .isEqualTo(true); + } + @Test public void should_not_update_entity_if_condition_is_not_met() { dao.update( @@ -101,6 +118,22 @@ public void should_not_update_entity_if_condition_is_not_met() { assertThat(dao.updateIfLength(otherProduct, 20).wasApplied()).isEqualTo(false); } + @Test + public void should_not_update_entity_if_condition_is_not_met_statement() { + dao.update( + new Product(FLAMETHROWER.getId(), "Description for length 10", new Dimensions(10, 1, 1))); + assertThat(dao.findById(FLAMETHROWER.getId())).isNotNull(); + + Product otherProduct = + new Product(FLAMETHROWER.getId(), "Other description", new Dimensions(1, 1, 1)); + assertThat( + SESSION_RULE + .session() + .execute(dao.updateIfLengthStatement(otherProduct, 20)) + .wasApplied()) + .isEqualTo(false); + } + @Test public void should_async_update_entity_if_condition_is_met() { dao.update( @@ -144,6 +177,9 @@ public interface ProductDao { @Update(customIfClause = "dimensions.length = :length") ResultSet updateIfLength(Product product, int length); + @Update(customIfClause = "dimensions.length = :length") + BoundStatement updateIfLengthStatement(Product product, int length); + @Update(customIfClause = "dimensions.length = :\"Length\"") CompletableFuture updateIfLengthAsync( Product product, @CqlName("\"Length\"") int length); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateIT.java index 878d1fa1db7..e7ab56f663c 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateIT.java @@ -21,6 +21,7 @@ import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.cql.AsyncResultSet; +import com.datastax.oss.driver.api.core.cql.BoundStatement; import com.datastax.oss.driver.api.core.cql.ResultSet; import com.datastax.oss.driver.api.core.cql.Row; import com.datastax.oss.driver.api.core.cql.SimpleStatement; @@ -243,6 +244,18 @@ public void should_update_entity_if_exists() { assertThat(dao.updateIfExists(otherProduct).wasApplied()).isEqualTo(true); } + @Test + public void should_update_entity_if_exists_statement() { + dao.update(FLAMETHROWER); + assertThat(dao.findById(FLAMETHROWER.getId())).isNotNull(); + + Product otherProduct = + new Product(FLAMETHROWER.getId(), "Other description", new Dimensions(1, 1, 1)); + assertThat( + SESSION_RULE.session().execute(dao.updateIfExistsStatement(otherProduct)).wasApplied()) + .isEqualTo(true); + } + @Test public void should_not_update_entity_if_not_exists() { assertThat(dao.findById(FLAMETHROWER.getId())).isNull(); @@ -252,6 +265,17 @@ public void should_not_update_entity_if_not_exists() { assertThat(dao.updateIfExists(otherProduct).wasApplied()).isEqualTo(false); } + @Test + public void should_not_update_entity_if_not_exists_statement() { + assertThat(dao.findById(FLAMETHROWER.getId())).isNull(); + + Product otherProduct = + new Product(FLAMETHROWER.getId(), "Other description", new Dimensions(1, 1, 1)); + assertThat( + SESSION_RULE.session().execute(dao.updateIfExistsStatement(otherProduct)).wasApplied()) + .isEqualTo(false); + } + @Test public void should_update_entity_if_exists_asynchronously() { dao.update(FLAMETHROWER); @@ -426,6 +450,9 @@ public interface ProductDao { @Update(ifExists = true) ResultSet updateIfExists(Product product); + @Update(ifExists = true) + BoundStatement updateIfExistsStatement(Product product); + @Update CompletableFuture updateAsync(Product product); diff --git a/manual/mapper/daos/delete/README.md b/manual/mapper/daos/delete/README.md index f92abbd3434..6d7dba65ab4 100644 --- a/manual/mapper/daos/delete/README.md +++ b/manual/mapper/daos/delete/README.md @@ -105,6 +105,14 @@ The method can return: ResultSet deleteIfDescriptionMatches(UUID productId, String expectedDescription); // if the condition fails, the result set will contain columns '[applied]' and 'description' ``` + +* a [BoundStatement]. This is intended for queries where you will execute this statement later + or in a batch. + + ```java + @Delete + BoundStatement delete(Product product); + ``` * a [CompletionStage] or [CompletableFuture] of any of the above. The method will execute the query asynchronously. Note that for result sets, you need to switch to [AsyncResultSet]. @@ -141,6 +149,7 @@ entity class and the [naming strategy](../../entities/#naming-strategy)). [@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html [ResultSet]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/ResultSet.html [ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/BoundStatement.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html \ No newline at end of file diff --git a/manual/mapper/daos/insert/README.md b/manual/mapper/daos/insert/README.md index 034086688fc..0cca26143b2 100644 --- a/manual/mapper/daos/insert/README.md +++ b/manual/mapper/daos/insert/README.md @@ -68,6 +68,12 @@ The method can return: @Insert ResultSet save(Product product); ``` +* a [BoundStatement]. This is intended for cases where you intend to execute this statement later or in a batch: + + ```java + @Insert + BoundStatement save(Product product); + ``` * a [CompletionStage] or [CompletableFuture] of any of the above. The mapper will execute the query asynchronously. @@ -98,7 +104,7 @@ entity class and the [naming strategy](../../entities/#naming-strategy)). [ResultSet]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/ResultSet.html [ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- [ResultSet#getExecutionInfo()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/ResultSet.html#getExecutionInfo-- - +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/BoundStatement.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html diff --git a/manual/mapper/daos/query/README.md b/manual/mapper/daos/query/README.md index 9ce2f5e5178..9f7f9801be1 100644 --- a/manual/mapper/daos/query/README.md +++ b/manual/mapper/daos/query/README.md @@ -54,6 +54,9 @@ The method can return: * a [ResultSet]. The method will return the raw query result, without any conversion. +* a [BoundStatement]. This is intended for queries where you will execute this statement later + or in a batch. + * a [PagingIterable]. The method will convert each row into an entity instance. * a [CompletionStage] or [CompletableFuture] of any of the above. The method will execute the query @@ -111,6 +114,7 @@ Then: [MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html [PagingIterable]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/PagingIterable.html [Row]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/Row.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/BoundStatement.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html diff --git a/manual/mapper/daos/update/README.md b/manual/mapper/daos/update/README.md index 77df48de95f..48b91a22793 100644 --- a/manual/mapper/daos/update/README.md +++ b/manual/mapper/daos/update/README.md @@ -101,6 +101,13 @@ The method can return: ResultSet updateIfExists(Product product); // if the condition fails, the result set will contain columns '[applied]' and 'description' ``` + +* a [BoundStatement]. This is intended for queries where you will execute this statement later or in a batch: + + ```java + @Update + BoundStatement update(Product product); + ``` * a [CompletionStage] or [CompletableFuture] of any of the above. The mapper will execute the query asynchronously. @@ -130,8 +137,9 @@ entity class and the naming convention). [default keyspace]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- [@Update]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/Update.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html -[Boolean]: https://docs.oracle.com/javase/8/docs/api/index.html?java/lang/Boolean.html -[CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html -[CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[Boolean]: https://docs.oracle.com/javase/8/docs/api/index.html?java/lang/Boolean.html +[CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html +[CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html [ResultSet]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/ResultSet.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/BoundStatement.html diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGenerator.java index 53e68c514e5..1fe33b4d1aa 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGenerator.java @@ -16,6 +16,7 @@ package com.datastax.oss.driver.internal.mapper.processor.dao; import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.BOOLEAN; +import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.BOUND_STATEMENT; import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.FUTURE_OF_ASYNC_RESULT_SET; import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.FUTURE_OF_BOOLEAN; import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.FUTURE_OF_VOID; @@ -62,7 +63,13 @@ public DaoDeleteMethodGenerator( protected Set getSupportedReturnTypes() { return ImmutableSet.of( - VOID, FUTURE_OF_VOID, BOOLEAN, FUTURE_OF_BOOLEAN, RESULT_SET, FUTURE_OF_ASYNC_RESULT_SET); + VOID, + FUTURE_OF_VOID, + BOOLEAN, + FUTURE_OF_BOOLEAN, + RESULT_SET, + BOUND_STATEMENT, + FUTURE_OF_ASYNC_RESULT_SET); } @Override diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoInsertMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoInsertMethodGenerator.java index 687753da23c..3c6d93c077d 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoInsertMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoInsertMethodGenerator.java @@ -16,6 +16,7 @@ package com.datastax.oss.driver.internal.mapper.processor.dao; import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.BOOLEAN; +import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.BOUND_STATEMENT; import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.ENTITY; import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.FUTURE_OF_ASYNC_RESULT_SET; import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.FUTURE_OF_BOOLEAN; @@ -71,6 +72,7 @@ protected Set getSupportedReturnTypes() { BOOLEAN, FUTURE_OF_BOOLEAN, RESULT_SET, + BOUND_STATEMENT, FUTURE_OF_ASYNC_RESULT_SET); } diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoReturnType.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoReturnType.java index 647e3d0225a..116c4812353 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoReturnType.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoReturnType.java @@ -26,6 +26,8 @@ public class DaoReturnType { public static final DaoReturnType ROW = new DaoReturnType(DefaultDaoReturnTypeKind.ROW); public static final DaoReturnType RESULT_SET = new DaoReturnType(DefaultDaoReturnTypeKind.RESULT_SET); + public static final DaoReturnType BOUND_STATEMENT = + new DaoReturnType(DefaultDaoReturnTypeKind.BOUND_STATEMENT); public static final DaoReturnType FUTURE_OF_VOID = new DaoReturnType(DefaultDaoReturnTypeKind.FUTURE_OF_VOID); public static final DaoReturnType FUTURE_OF_BOOLEAN = diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoUpdateMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoUpdateMethodGenerator.java index c5be215b9ee..53f0e5098d1 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoUpdateMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoUpdateMethodGenerator.java @@ -16,6 +16,7 @@ package com.datastax.oss.driver.internal.mapper.processor.dao; import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.BOOLEAN; +import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.BOUND_STATEMENT; import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.FUTURE_OF_ASYNC_RESULT_SET; import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.FUTURE_OF_BOOLEAN; import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.FUTURE_OF_VOID; @@ -61,7 +62,13 @@ public DaoUpdateMethodGenerator( protected Set getSupportedReturnTypes() { return ImmutableSet.of( - VOID, FUTURE_OF_VOID, RESULT_SET, FUTURE_OF_ASYNC_RESULT_SET, BOOLEAN, FUTURE_OF_BOOLEAN); + VOID, + FUTURE_OF_VOID, + RESULT_SET, + BOUND_STATEMENT, + FUTURE_OF_ASYNC_RESULT_SET, + BOOLEAN, + FUTURE_OF_BOOLEAN); } @Override diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DefaultDaoReturnTypeKind.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DefaultDaoReturnTypeKind.java index 0923fe8fcae..522edf0ac0a 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DefaultDaoReturnTypeKind.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DefaultDaoReturnTypeKind.java @@ -64,6 +64,12 @@ public void addExecuteStatement(CodeBlock.Builder methodBuilder, String helperFi methodBuilder.addStatement("return execute(boundStatement)"); } }, + BOUND_STATEMENT(false) { + @Override + public void addExecuteStatement(CodeBlock.Builder methodBuilder, String helperFieldName) { + methodBuilder.addStatement("return boundStatement"); + } + }, PAGING_ITERABLE(false) { @Override public void addExecuteStatement(CodeBlock.Builder methodBuilder, String helperFieldName) { diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DefaultDaoReturnTypeParser.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DefaultDaoReturnTypeParser.java index f27f4851a04..fb865253aff 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DefaultDaoReturnTypeParser.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DefaultDaoReturnTypeParser.java @@ -18,6 +18,7 @@ import com.datastax.oss.driver.api.core.MappedAsyncPagingIterable; import com.datastax.oss.driver.api.core.PagingIterable; import com.datastax.oss.driver.api.core.cql.AsyncResultSet; +import com.datastax.oss.driver.api.core.cql.BoundStatement; import com.datastax.oss.driver.api.core.cql.ResultSet; import com.datastax.oss.driver.api.core.cql.Row; import com.datastax.oss.driver.internal.mapper.processor.ProcessorContext; @@ -55,6 +56,7 @@ public class DefaultDaoReturnTypeParser implements DaoReturnTypeParser { .put(Long.class, DaoReturnType.LONG) .put(Row.class, DaoReturnType.ROW) .put(ResultSet.class, DaoReturnType.RESULT_SET) + .put(BoundStatement.class, DaoReturnType.BOUND_STATEMENT) .build(); /** diff --git a/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGeneratorTest.java b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGeneratorTest.java index 52688dca0d6..c915c8a5666 100644 --- a/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGeneratorTest.java +++ b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGeneratorTest.java @@ -105,7 +105,7 @@ public static Object[][] invalidSignatures() { }, { "Delete methods must return one of [VOID, FUTURE_OF_VOID, BOOLEAN, FUTURE_OF_BOOLEAN, " - + "RESULT_SET, FUTURE_OF_ASYNC_RESULT_SET]", + + "RESULT_SET, BOUND_STATEMENT, FUTURE_OF_ASYNC_RESULT_SET]", MethodSpec.methodBuilder("delete") .addAnnotation(Delete.class) .addModifiers(Modifier.PUBLIC, Modifier.ABSTRACT) diff --git a/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoInsertMethodGeneratorTest.java b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoInsertMethodGeneratorTest.java index 5f986c335fa..7f1bf078e46 100644 --- a/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoInsertMethodGeneratorTest.java +++ b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoInsertMethodGeneratorTest.java @@ -57,7 +57,8 @@ public static Object[][] invalidSignatures() { }, { "Insert methods must return one of [VOID, FUTURE_OF_VOID, ENTITY, FUTURE_OF_ENTITY, " - + "OPTIONAL_ENTITY, FUTURE_OF_OPTIONAL_ENTITY, BOOLEAN, FUTURE_OF_BOOLEAN, RESULT_SET, FUTURE_OF_ASYNC_RESULT_SET]", + + "OPTIONAL_ENTITY, FUTURE_OF_OPTIONAL_ENTITY, BOOLEAN, FUTURE_OF_BOOLEAN, RESULT_SET, BOUND_STATEMENT, " + + "FUTURE_OF_ASYNC_RESULT_SET]", MethodSpec.methodBuilder("insert") .addAnnotation(Insert.class) .addModifiers(Modifier.PUBLIC, Modifier.ABSTRACT) diff --git a/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoQueryMethodGeneratorTest.java b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoQueryMethodGeneratorTest.java index 52e8c5fdca8..ab2c0ce2458 100644 --- a/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoQueryMethodGeneratorTest.java +++ b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoQueryMethodGeneratorTest.java @@ -42,7 +42,7 @@ public static Object[][] invalidSignatures() { return new Object[][] { { "Invalid return type: Query methods must return one of [VOID, BOOLEAN, LONG, ROW, " - + "ENTITY, OPTIONAL_ENTITY, RESULT_SET, PAGING_ITERABLE, FUTURE_OF_VOID, " + + "ENTITY, OPTIONAL_ENTITY, RESULT_SET, BOUND_STATEMENT, PAGING_ITERABLE, FUTURE_OF_VOID, " + "FUTURE_OF_BOOLEAN, FUTURE_OF_LONG, FUTURE_OF_ROW, FUTURE_OF_ENTITY, " + "FUTURE_OF_OPTIONAL_ENTITY, FUTURE_OF_ASYNC_RESULT_SET, " + "FUTURE_OF_ASYNC_PAGING_ITERABLE]", diff --git a/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoUpdateMethodGeneratorTest.java b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoUpdateMethodGeneratorTest.java index 69a03d1b418..013d17a0403 100644 --- a/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoUpdateMethodGeneratorTest.java +++ b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoUpdateMethodGeneratorTest.java @@ -67,7 +67,7 @@ public static Object[][] invalidSignatures() { }, { "Invalid return type: Update methods must return one of [VOID, FUTURE_OF_VOID, " - + "RESULT_SET, FUTURE_OF_ASYNC_RESULT_SET, BOOLEAN, FUTURE_OF_BOOLEAN]", + + "RESULT_SET, BOUND_STATEMENT, FUTURE_OF_ASYNC_RESULT_SET, BOOLEAN, FUTURE_OF_BOOLEAN]", MethodSpec.methodBuilder("update") .addAnnotation(UPDATE_ANNOTATION) .addModifiers(Modifier.PUBLIC, Modifier.ABSTRACT) diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Delete.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Delete.java index ecdd06a6fd8..b16cd06b4db 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Delete.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Delete.java @@ -17,6 +17,7 @@ import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.cql.AsyncResultSet; +import com.datastax.oss.driver.api.core.cql.BoundStatement; import com.datastax.oss.driver.api.core.cql.ResultSet; import com.datastax.oss.driver.api.core.session.Session; import com.datastax.oss.driver.api.core.session.SessionBuilder; @@ -91,6 +92,12 @@ * ResultSet deleteIfDescriptionMatches(UUID productId, String expectedDescription); * // if the condition fails, the result set will contain columns '[applied]' and 'description' * + *

    • a {@link BoundStatement}. This is intended for queries where you will execute this + * statement later or in a batch. + *
      + * @Delete
      + * BoundStatement delete(Product product);
      + *       
      *
    • a {@link CompletionStage} or {@link CompletableFuture} of any of the above. The method will * execute the query asynchronously. Note that for result sets, you need to switch to {@link * AsyncResultSet}. diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Insert.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Insert.java index 43caf972c8c..62b9a46dfcb 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Insert.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Insert.java @@ -16,6 +16,7 @@ package com.datastax.oss.driver.api.mapper.annotations; import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.cql.BoundStatement; import com.datastax.oss.driver.api.core.cql.ResultSet; import com.datastax.oss.driver.api.core.session.Session; import com.datastax.oss.driver.api.core.session.SessionBuilder; @@ -91,6 +92,12 @@ * @Insert * ResultSet save(Product product); * + *
    • a {@link BoundStatement} This is intended for cases where you intend to execute this + * statement later or in a batch: + *
      + * @Insert
      + * BoundStatement save(Product product);
      + *      
      *
    • a {@link CompletionStage} or {@link CompletableFuture} of any of the above. The mapper will * execute the query asynchronously. *
      diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Query.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Query.java
      index 7f4d246aa17..3c89453d4a4 100644
      --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Query.java
      +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Query.java
      @@ -19,6 +19,7 @@
       import com.datastax.oss.driver.api.core.MappedAsyncPagingIterable;
       import com.datastax.oss.driver.api.core.PagingIterable;
       import com.datastax.oss.driver.api.core.cql.AsyncResultSet;
      +import com.datastax.oss.driver.api.core.cql.BoundStatement;
       import com.datastax.oss.driver.api.core.cql.ResultSet;
       import com.datastax.oss.driver.api.core.cql.Row;
       import com.datastax.oss.driver.api.core.session.Session;
      @@ -83,6 +84,8 @@
        *   
    • an {@link Optional} of an entity class. The method will extract the first row and convert * it, or return {@code Optional.empty()} if the result set is empty. *
    • a {@link ResultSet}. The method will return the raw query result, without any conversion. + *
    • a {@link BoundStatement}. This is intended for cases where you intend to execute this + * statement later or in a batch: *
    • a {@link PagingIterable}. The method will convert each row into an entity instance. *
    • a {@link CompletionStage} or {@link CompletableFuture} of any of the above. The method will * execute the query asynchronously. Note that for result sets and iterables, you need to diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Update.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Update.java index f16b87a97a7..759723346a8 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Update.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Update.java @@ -17,6 +17,7 @@ import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.cql.AsyncResultSet; +import com.datastax.oss.driver.api.core.cql.BoundStatement; import com.datastax.oss.driver.api.core.cql.ResultSet; import com.datastax.oss.driver.api.core.session.Session; import com.datastax.oss.driver.api.core.session.SessionBuilder; @@ -46,8 +47,8 @@ * *

      Parameters

      * - * The first parameter must be an entity instance. All of its non-PK properties will be interpreted - * as values to update. + *

      The first parameter must be an entity instance. All of its non-PK properties will be + * interpreted as values to update. * *

        *
      • If {@link #customWhereClause()} is empty, the mapper defaults to an update by primary key @@ -85,7 +86,7 @@ * *

        Return type

        * - * The method can return: + *

        The method can return: * *

          *
        • {@code void}. @@ -103,6 +104,12 @@ * ResultSet updateIfDescriptionMatches(Product product, String expectedDescription); * // if the condition fails, the result set will contain columns '[applied]' and 'description' *
    • + *
    • a {@link BoundStatement}. This is intended for queries where you will execute this + * statement later or in a batch: + *
      + * @Update
      + * BoundStatement update(Product product);
      + *      
      *
    • a {@link CompletionStage} or {@link CompletableFuture} of any of the above. The mapper will * execute the query asynchronously. Note that for result sets, you need to switch to the * asynchronous equivalent {@link AsyncResultSet}. @@ -120,7 +127,7 @@ * *

      Target keyspace and table

      * - * If a keyspace was specified when creating the DAO (see {@link DaoFactory}), then the generated + *

      If a keyspace was specified when creating the DAO (see {@link DaoFactory}), then the generated * query targets that keyspace. Otherwise, it doesn't specify a keyspace, and will only work if the * mapper was built from a {@link Session} that has a {@linkplain * SessionBuilder#withKeyspace(CqlIdentifier) default keyspace} set. From 46817dfa60a70b0c22f926600efe39cbe7891435 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 2 Dec 2019 11:40:14 +0100 Subject: [PATCH 171/979] Format XML files --- core-shaded/pom.xml | 49 +++---------------- core-shaded/src/assembly/shaded-jar.xml | 7 ++- core/pom.xml | 18 ++----- core/src/test/resources/logback-test.xml | 23 ++++----- distribution/pom.xml | 15 ++---- distribution/src/assembly/binary-tarball.xml | 22 ++------- examples/pom.xml | 36 ++------------ examples/src/main/resources/logback.xml | 29 +++++------ integration-tests/pom.xml | 12 +---- .../src/test/resources/logback-test.xml | 3 +- mapper-processor/pom.xml | 6 +-- .../src/test/resources/logback-test.xml | 21 ++++---- mapper-runtime/pom.xml | 16 ++---- pom.xml | 19 ++----- query-builder/pom.xml | 16 ++---- test-infra/pom.xml | 12 +---- 16 files changed, 77 insertions(+), 227 deletions(-) diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 1285dc0c46c..7b86e3f1032 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -18,17 +18,13 @@ --> 4.0.0 - com.datastax.oss java-driver-parent 4.4.0-SNAPSHOT - java-driver-core-shaded - DataStax Java driver for Apache Cassandra(R) - core with shaded deps - - @@ -176,10 +170,7 @@ Exclude leftovers from the shading phase (this could also be done with a resource transformer by the shade plugin itself, but this way is more flexible). --> - - META-INF/maven/com.datastax.oss/java-driver-core/**, - META-INF/maven/io.netty/**, - + META-INF/maven/com.datastax.oss/java-driver-core/**, META-INF/maven/io.netty/**, @@ -213,9 +204,7 @@ ${project.build.directory}/shaded-sources - - com.datastax.oss.driver.internal:com.datastax.oss.driver.shaded - + com.datastax.oss.driver.internal:com.datastax.oss.driver.shaded - !com.datastax.oss.driver.shaded.netty.*, + -->!com.datastax.oss.driver.shaded.netty.*, - !net.jcip.annotations.*, - !edu.umd.cs.findbugs.annotations.*, + -->!net.jcip.annotations.*, !edu.umd.cs.findbugs.annotations.*, - jnr.*;resolution:=optional, + -->jnr.*;resolution:=optional, - !com.google.protobuf.*, - !com.jcraft.jzlib.*, - !com.ning.compress.*, - !lzma.sdk.*, - !net.jpountz.xxhash.*, - !org.bouncycastle.*, - !org.conscrypt.*, - !org.apache.commons.logging.*, - !org.apache.log4j.*, - !org.apache.logging.log4j.*, - !org.eclipse.jetty.*, - !org.jboss.marshalling.*, - !sun.misc.*, - !sun.security.*, - !com.oracle.svm.core.annotate.*, - * + -->!com.google.protobuf.*, !com.jcraft.jzlib.*, !com.ning.compress.*, !lzma.sdk.*, !net.jpountz.xxhash.*, !org.bouncycastle.*, !org.conscrypt.*, !org.apache.commons.logging.*, !org.apache.log4j.*, !org.apache.logging.log4j.*, !org.eclipse.jetty.*, !org.jboss.marshalling.*, !sun.misc.*, !sun.security.*, !com.oracle.svm.core.annotate.*, * - - com.datastax.oss.driver.api.core.*, - com.datastax.oss.driver.internal.core.*, - com.datastax.oss.driver.shaded.netty.*, - + com.datastax.oss.driver.api.core.*, com.datastax.oss.driver.internal.core.*, com.datastax.oss.driver.shaded.netty.*, true diff --git a/core-shaded/src/assembly/shaded-jar.xml b/core-shaded/src/assembly/shaded-jar.xml index 3a735f36d2a..a7fffda6b65 100644 --- a/core-shaded/src/assembly/shaded-jar.xml +++ b/core-shaded/src/assembly/shaded-jar.xml @@ -1,3 +1,4 @@ + - + shaded-jar jar @@ -41,4 +40,4 @@ pom.xml - \ No newline at end of file + diff --git a/core/pom.xml b/core/pom.xml index 5197d177ee2..4beb95b9b9d 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -1,3 +1,4 @@ + 4.0.0 - com.datastax.oss java-driver-parent 4.4.0-SNAPSHOT - java-driver-core bundle - DataStax Java driver for Apache Cassandra(R) - core - com.datastax.oss @@ -130,7 +127,6 @@ test - @@ -197,9 +193,7 @@ - !net.jcip.annotations.*, - !edu.umd.cs.findbugs.annotations.*, + -->!net.jcip.annotations.*, !edu.umd.cs.findbugs.annotations.*, - jnr.*;resolution:=optional, - * + -->jnr.*;resolution:=optional, * - - com.datastax.oss.driver.*.core.* - + com.datastax.oss.driver.*.core.* diff --git a/core/src/test/resources/logback-test.xml b/core/src/test/resources/logback-test.xml index 620eccb1c0c..1aa52f9527d 100644 --- a/core/src/test/resources/logback-test.xml +++ b/core/src/test/resources/logback-test.xml @@ -1,3 +1,4 @@ + - - - %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n - - - - - - - - \ No newline at end of file + + + %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n + + + + + + + + diff --git a/distribution/pom.xml b/distribution/pom.xml index 1c42435eda1..983b1e049aa 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -1,3 +1,4 @@ + 4.0.0 - com.datastax.oss java-driver-parent 4.4.0-SNAPSHOT - java-driver-distribution jar - DataStax Java driver for Apache Cassandra(R) - binary distribution - - + binary-tarball tar.gz true - - true @@ -50,7 +47,6 @@ - true @@ -74,7 +70,6 @@ - true @@ -85,24 +80,19 @@ false sources - ${module.artifactId}-${module.version}-src.zip - + ${module.artifactId}-${module.version}-src.zip src * - - - target/apidocs apidocs - .. . @@ -111,23 +101,17 @@ LICENSE* - ../changelog - ../faq - ../manual - ../upgrade_guide - - diff --git a/examples/pom.xml b/examples/pom.xml index e34ad950790..3ac20b800ff 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -1,3 +1,4 @@ + 4.0.0 - java-driver-parent com.datastax.oss 4.4.0-SNAPSHOT - java-driver-examples DataStax Java driver for Apache Cassandra(R) - examples. - A collection of examples to demonstrate DataStax Java Driver for Apache - Cassandra(R). - - - + A collection of examples to demonstrate DataStax Java Driver for Apache Cassandra(R). - - ${project.groupId} java-driver-core @@ -47,113 +40,91 @@ ${project.groupId} java-driver-mapper-runtime - com.fasterxml.jackson.core jackson-databind - com.fasterxml.jackson.jaxrs jackson-jaxrs-base ${jackson.version} true - com.fasterxml.jackson.jaxrs jackson-jaxrs-json-provider ${jackson.version} true - - javax.json javax.json-api true - org.glassfish javax.json true runtime - - javax.ws.rs javax.ws.rs-api true - - org.glassfish.jersey.core jersey-server true - org.glassfish.jersey.media jersey-media-json-jackson true - org.glassfish.jersey.containers jersey-container-jdk-http true - - org.glassfish.hk2 hk2-api true - org.glassfish.jersey.inject jersey-hk2 true - - javax.inject javax.inject true - javax.annotation javax.annotation-api true - ch.qos.logback logback-classic runtime - at.favre.lib bcrypt 0.8.0 - - @@ -210,5 +181,4 @@ - - \ No newline at end of file + diff --git a/examples/src/main/resources/logback.xml b/examples/src/main/resources/logback.xml index db2d9e5bcb4..d8084383a99 100644 --- a/examples/src/main/resources/logback.xml +++ b/examples/src/main/resources/logback.xml @@ -1,3 +1,4 @@ + - - - - - %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n - - - - - - - - - - - \ No newline at end of file + + diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 6582ffa26b9..1dd5a102272 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -1,3 +1,4 @@ + 4.0.0 - com.datastax.oss java-driver-parent 4.4.0-SNAPSHOT - java-driver-integration-tests jar - DataStax Java driver for Apache Cassandra(R) - integration tests - false ${skipITs} ${skipITs} ${skipITs} - com.datastax.oss @@ -135,7 +131,6 @@ test - @@ -227,10 +222,7 @@ verify - - com.datastax.oss.driver.categories.ParallelizableTests, - com.datastax.oss.driver.categories.IsolatedTests - + com.datastax.oss.driver.categories.ParallelizableTests, com.datastax.oss.driver.categories.IsolatedTests true serial ${skipSerialITs} diff --git a/integration-tests/src/test/resources/logback-test.xml b/integration-tests/src/test/resources/logback-test.xml index 7a4c0da88a1..6dfad81de3e 100644 --- a/integration-tests/src/test/resources/logback-test.xml +++ b/integration-tests/src/test/resources/logback-test.xml @@ -1,3 +1,4 @@ + - \ No newline at end of file + diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index f7643a26b9c..be6d1e016e8 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -1,3 +1,4 @@ + 4.0.0 - com.datastax.oss java-driver-parent 4.4.0-SNAPSHOT - java-driver-mapper-processor - DataStax Java driver for Apache Cassandra(R) - object mapper processor - com.datastax.oss @@ -80,7 +77,6 @@ test - diff --git a/mapper-processor/src/test/resources/logback-test.xml b/mapper-processor/src/test/resources/logback-test.xml index e1309c63da5..4b5aea007a9 100644 --- a/mapper-processor/src/test/resources/logback-test.xml +++ b/mapper-processor/src/test/resources/logback-test.xml @@ -1,3 +1,4 @@ + - - - %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n - - - - - - - \ No newline at end of file + + + %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n + + + + + + + diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index e3812d10584..79d9b5e99e8 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -1,3 +1,4 @@ + 4.0.0 - com.datastax.oss java-driver-parent 4.4.0-SNAPSHOT - java-driver-mapper-runtime bundle - DataStax Java driver for Apache Cassandra(R) - object mapper runtime - ${project.groupId} @@ -47,7 +44,6 @@ spotbugs-annotations - @@ -71,14 +67,8 @@ Don't include the packages below because they aren't OSGi bundles, and the driver can live without them. --> - - !net.jcip.annotations.*, - !edu.umd.cs.findbugs.annotations.*, - * - - - com.datastax.oss.driver.*.mapper.* - + !net.jcip.annotations.*, !edu.umd.cs.findbugs.annotations.*, * + com.datastax.oss.driver.*.mapper.* diff --git a/pom.xml b/pom.xml index 09372d300f7..09f64bf97a8 100644 --- a/pom.xml +++ b/pom.xml @@ -1,3 +1,4 @@ + 4.0.0 - com.datastax.oss java-driver-parent 4.4.0-SNAPSHOT pom - DataStax Java driver for Apache Cassandra(R) - - A driver for Apache Cassandra(R) 2.1+ that works exclusively with the Cassandra Query Language - version 3 (CQL3) and Cassandra's native protocol versions 3 and above. - + A driver for Apache Cassandra(R) 2.1+ that works exclusively with the Cassandra Query Language version 3 (CQL3) and Cassandra's native protocol versions 3 and above. https://github.com/datastax/java-driver 2017 - core core-shaded @@ -42,7 +37,6 @@ distribution examples - true UTF-8 @@ -72,7 +66,6 @@ 2.10.0 2.10.0 - @@ -314,7 +307,6 @@ - @@ -447,9 +439,7 @@ -Xep:FutureReturnValueIgnored:OFF -Xep:MockitoInternalUsage:OFF - - -XepExcludedPaths:.*/target/(?:generated-sources|generated-test-sources)/.* - + -XepExcludedPaths:.*/target/(?:generated-sources|generated-test-sources)/.* true true @@ -515,8 +505,7 @@ Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and -limitations under the License.]]> - +limitations under the License.]]> src/**/*.java src/**/*.xml diff --git a/query-builder/pom.xml b/query-builder/pom.xml index 50dd18e7f42..82802503f5e 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -1,3 +1,4 @@ + 4.0.0 - com.datastax.oss java-driver-parent 4.4.0-SNAPSHOT - java-driver-query-builder bundle - DataStax Java driver for Apache Cassandra(R) - query builder - com.datastax.oss @@ -62,7 +59,6 @@ test - @@ -71,14 +67,8 @@ com.datastax.oss.driver.querybuilder - - !net.jcip.annotations.*, - !edu.umd.cs.findbugs.annotations.*, - * - - - com.datastax.oss.driver.*.querybuilder.* - + !net.jcip.annotations.*, !edu.umd.cs.findbugs.annotations.*, * + com.datastax.oss.driver.*.querybuilder.* diff --git a/test-infra/pom.xml b/test-infra/pom.xml index 0e1aabfc5cd..0a84a8add4a 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -1,3 +1,4 @@ + 4.0.0 - com.datastax.oss java-driver-parent 4.4.0-SNAPSHOT - java-driver-test-infra bundle - DataStax Java driver for Apache Cassandra(R) - test infrastructure tools - com.datastax.oss @@ -57,7 +54,6 @@ commons-exec - @@ -68,11 +64,7 @@ com.datastax.oss.driver.testinfra * - - com.datastax.oss.driver.*.testinfra.*, - com.datastax.oss.driver.assertions, - com.datastax.oss.driver.categories - + com.datastax.oss.driver.*.testinfra.*, com.datastax.oss.driver.assertions, com.datastax.oss.driver.categories From 7d88097a880089c8c740566518f1815dd78d9ae0 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Wed, 4 Dec 2019 11:05:26 +0100 Subject: [PATCH 172/979] JAVA-2557: Accept any negative length when decoding elements of tuples and UDTs (#1368) --- changelog/README.md | 3 +- .../internal/core/type/codec/TupleCodec.java | 2 +- .../internal/core/type/codec/UdtCodec.java | 2 +- .../core/type/codec/TupleCodecTest.java | 20 +++++++++++ .../core/type/codec/UdtCodecTest.java | 33 +++++++++++++++++++ 5 files changed, 57 insertions(+), 3 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 805ce207b68..e4d26b3d404 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -2,8 +2,9 @@ -### 4.4.0 (in progress) +### 4.3.1 (in progress) +- [bug] JAVA-2557: Accept any negative length when decoding elements of tuples and UDTs ### 4.3.0 diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/TupleCodec.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/TupleCodec.java index 2a900ce7a10..ad008cda7ff 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/TupleCodec.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/TupleCodec.java @@ -109,7 +109,7 @@ public TupleValue decode(@Nullable ByteBuffer bytes, @NonNull ProtocolVersion pr } int elementSize = input.getInt(); ByteBuffer element; - if (elementSize == -1) { + if (elementSize < 0) { element = null; } else { element = input.slice(); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/UdtCodec.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/UdtCodec.java index 2e2df95ad33..f515c6b117f 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/UdtCodec.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/UdtCodec.java @@ -112,7 +112,7 @@ public UdtValue decode(@Nullable ByteBuffer bytes, @NonNull ProtocolVersion prot } int elementSize = input.getInt(); ByteBuffer element; - if (elementSize == -1) { + if (elementSize < 0) { element = null; } else { element = input.slice(); diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/TupleCodecTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/TupleCodecTest.java index f7d609ea967..ab95dd49b32 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/TupleCodecTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/TupleCodecTest.java @@ -122,6 +122,26 @@ public void should_decode_tuple() { verify(textCodec).decode(Bytes.fromHexString("0x61"), ProtocolVersion.DEFAULT); } + /** Test for JAVA-2557. Ensures that the codec can decode null fields with any negative length. */ + @Test + public void should_decode_negative_element_length_as_null_field() { + TupleValue tuple = + decode( + "0x" + + "ffffffff" // field1 has length -1 + + "fffffffe" // field2 has length -2 + + "80000000" // field3 has length Integer.MIN_VALUE (-2147483648) + ); + + assertThat(tuple.isNull(0)).isTrue(); + assertThat(tuple.isNull(1)).isTrue(); + assertThat(tuple.isNull(2)).isTrue(); + + verifyZeroInteractions(intCodec); + verifyZeroInteractions(doubleCodec); + verifyZeroInteractions(textCodec); + } + @Test public void should_format_null_tuple() { assertThat(format(null)).isEqualTo("NULL"); diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/UdtCodecTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/UdtCodecTest.java index 5947cfffef3..a3bd4fe7801 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/UdtCodecTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/UdtCodecTest.java @@ -131,6 +131,39 @@ public void should_decode_udt() { verify(textCodec).decode(Bytes.fromHexString("0x61"), ProtocolVersion.DEFAULT); } + /** Test for JAVA-2557. Ensures that the codec can decode null fields with any negative length. */ + @Test + public void should_decode_negative_element_length_as_null_field() { + UdtValue udt = + decode( + "0x" + + "ffffffff" // field1 has length -1 + + "fffffffe" // field2 has length -2 + + "80000000" // field3 has length Integer.MIN_VALUE (-2147483648) + ); + + assertThat(udt.isNull(0)).isTrue(); + assertThat(udt.isNull(1)).isTrue(); + assertThat(udt.isNull(2)).isTrue(); + + verifyZeroInteractions(intCodec); + verifyZeroInteractions(doubleCodec); + verifyZeroInteractions(textCodec); + } + + @Test + public void should_decode_absent_element_as_null_field() { + UdtValue udt = decode("0x"); + + assertThat(udt.isNull(0)).isTrue(); + assertThat(udt.isNull(1)).isTrue(); + assertThat(udt.isNull(2)).isTrue(); + + verifyZeroInteractions(intCodec); + verifyZeroInteractions(doubleCodec); + verifyZeroInteractions(textCodec); + } + @Test public void should_format_null_udt() { assertThat(format(null)).isEqualTo("NULL"); From 5a58acae4aa83336b25daf6f7e06bbc96a94f3df Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Wed, 4 Dec 2019 11:14:25 +0100 Subject: [PATCH 173/979] Update version in docs --- README.md | 2 +- changelog/README.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index b223bc8451b..c5dbd912bd2 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@ *If you're reading this on github.com, please note that this is the readme for the development version and that some features described here might not yet have been released. You can find the documentation for latest version through [DataStax Docs] or via the release tags, e.g. -[4.3.0](https://github.com/datastax/java-driver/tree/4.3.0).* +[4.3.1](https://github.com/datastax/java-driver/tree/4.3.1).* A modern, feature-rich and highly tunable Java client library for [Apache Cassandra®] \(2.1+) and [DataStax Enterprise] \(4.7+), using exclusively Cassandra's binary protocol and Cassandra Query diff --git a/changelog/README.md b/changelog/README.md index e4d26b3d404..bab90535e64 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -2,7 +2,7 @@ -### 4.3.1 (in progress) +### 4.3.1 - [bug] JAVA-2557: Accept any negative length when decoding elements of tuples and UDTs From d646982a347970b97504672954889257766e6875 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Wed, 4 Dec 2019 11:19:22 +0100 Subject: [PATCH 174/979] [maven-release-plugin] prepare release 4.3.1 --- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 10 files changed, 11 insertions(+), 11 deletions(-) diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 46ad7345713..e98c1812452 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -22,7 +22,7 @@ com.datastax.oss java-driver-parent - 4.4.0-SNAPSHOT + 4.3.1 java-driver-core-shaded diff --git a/core/pom.xml b/core/pom.xml index 5197d177ee2..bfd41893b7e 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.4.0-SNAPSHOT + 4.3.1 java-driver-core diff --git a/distribution/pom.xml b/distribution/pom.xml index 1c42435eda1..fa61361bf35 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.4.0-SNAPSHOT + 4.3.1 java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index 2aa9dca1a77..9fcea94c9fa 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.4.0-SNAPSHOT + 4.3.1 java-driver-examples diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 3f313e6328b..da5a32cf553 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.4.0-SNAPSHOT + 4.3.1 java-driver-integration-tests diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index a12ab7b5ad0..aeb6c9185b0 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.4.0-SNAPSHOT + 4.3.1 java-driver-mapper-processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index e33d0349076..65f8b6f05e9 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.4.0-SNAPSHOT + 4.3.1 java-driver-mapper-runtime diff --git a/pom.xml b/pom.xml index 7232c959ce9..4e4b6e65fb7 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ com.datastax.oss java-driver-parent - 4.4.0-SNAPSHOT + 4.3.1 pom DataStax Java driver for Apache Cassandra(R) @@ -718,7 +718,7 @@ limitations under the License.]]> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - HEAD + 4.3.1 diff --git a/query-builder/pom.xml b/query-builder/pom.xml index 0345f3d4bd0..71cb22ff700 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.4.0-SNAPSHOT + 4.3.1 java-driver-query-builder diff --git a/test-infra/pom.xml b/test-infra/pom.xml index 0e1aabfc5cd..53ea65b35e3 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.4.0-SNAPSHOT + 4.3.1 java-driver-test-infra From 3ede67d127142035f2b4f4a68f51a6e012e0807f Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Wed, 4 Dec 2019 11:19:34 +0100 Subject: [PATCH 175/979] [maven-release-plugin] prepare for next development iteration --- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 10 files changed, 11 insertions(+), 11 deletions(-) diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index e98c1812452..b9ad0f1fefa 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -22,7 +22,7 @@ com.datastax.oss java-driver-parent - 4.3.1 + 4.3.2-SNAPSHOT java-driver-core-shaded diff --git a/core/pom.xml b/core/pom.xml index bfd41893b7e..b6093aed38b 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.3.1 + 4.3.2-SNAPSHOT java-driver-core diff --git a/distribution/pom.xml b/distribution/pom.xml index fa61361bf35..b0c7e978860 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.3.1 + 4.3.2-SNAPSHOT java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index 9fcea94c9fa..044f914bd3d 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.3.1 + 4.3.2-SNAPSHOT java-driver-examples diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index da5a32cf553..6f2c03a5881 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.3.1 + 4.3.2-SNAPSHOT java-driver-integration-tests diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index aeb6c9185b0..f727f02fae8 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.3.1 + 4.3.2-SNAPSHOT java-driver-mapper-processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index 65f8b6f05e9..2665fa9ae3f 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.3.1 + 4.3.2-SNAPSHOT java-driver-mapper-runtime diff --git a/pom.xml b/pom.xml index 4e4b6e65fb7..13b0646bf94 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ com.datastax.oss java-driver-parent - 4.3.1 + 4.3.2-SNAPSHOT pom DataStax Java driver for Apache Cassandra(R) @@ -718,7 +718,7 @@ limitations under the License.]]> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - 4.3.1 + HEAD diff --git a/query-builder/pom.xml b/query-builder/pom.xml index 71cb22ff700..de097f9bf3a 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.3.1 + 4.3.2-SNAPSHOT java-driver-query-builder diff --git a/test-infra/pom.xml b/test-infra/pom.xml index 53ea65b35e3..cb4bb1d0651 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.3.1 + 4.3.2-SNAPSHOT java-driver-test-infra From a69a43bdf1c8a835aeb83710307116effdf89801 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Wed, 4 Dec 2019 16:01:58 +0100 Subject: [PATCH 176/979] JAVA-2556: Make ExecutionInfo compatible with any Request type (#1367) --- changelog/README.md | 1 + .../oss/driver/api/core/cql/ExecutionInfo.java | 15 ++++++++++++++- .../driver/internal/core/cql/Conversions.java | 2 +- .../core/cql/DefaultAsyncResultSet.java | 2 +- .../internal/core/cql/DefaultExecutionInfo.java | 17 +++++++++++++---- .../core/AsyncPagingIterableWrapperTest.java | 2 +- .../core/cql/DefaultAsyncResultSetTest.java | 2 +- .../oss/driver/core/session/ExceptionIT.java | 3 +-- 8 files changed, 33 insertions(+), 11 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 2589aae0a67..2412425e63a 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.4.0 (in progress) +- [bug] JAVA-2556: Make ExecutionInfo compatible with any Request type - [new feature] JAVA-2532: Add BoundStatement ReturnType for insert, update, and delete DAO methods - [improvement] JAVA-2107: Add XML formatting plugin - [bug] JAVA-2527: Allow AllNodesFailedException to accept more than one error per node diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/cql/ExecutionInfo.java b/core/src/main/java/com/datastax/oss/driver/api/core/cql/ExecutionInfo.java index 5187966b720..5ba12decbdf 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/cql/ExecutionInfo.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/cql/ExecutionInfo.java @@ -22,6 +22,7 @@ import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.retry.RetryDecision; import com.datastax.oss.driver.api.core.servererrors.CoordinatorException; +import com.datastax.oss.driver.api.core.session.Request; import com.datastax.oss.driver.api.core.session.Session; import com.datastax.oss.driver.api.core.specex.SpeculativeExecutionPolicy; import com.datastax.oss.driver.internal.core.util.concurrent.BlockingOperation; @@ -45,8 +46,20 @@ */ public interface ExecutionInfo { - /** The statement that was executed. */ + /** @return The {@link Request} that was executed. */ @NonNull + default Request getRequest() { + return getStatement(); + } + + /** + * @return The {@link Request} that was executed, if it can be cast to {@link Statement}. + * @deprecated Use {@link #getRequest()} instead. + * @throws ClassCastException If the request that was executed cannot be cast to {@link + * Statement}. + */ + @NonNull + @Deprecated Statement getStatement(); /** diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/Conversions.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/Conversions.java index 4826e31e9d6..e0f470e9e54 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/Conversions.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/Conversions.java @@ -323,7 +323,7 @@ public static AsyncResultSet toResultSet( InternalDriverContext context) { if (result instanceof Rows) { Rows rows = (Rows) result; - Statement statement = executionInfo.getStatement(); + Statement statement = (Statement) executionInfo.getRequest(); ColumnDefinitions columnDefinitions = getResultDefinitions(rows, statement, context); return new DefaultAsyncResultSet( columnDefinitions, executionInfo, rows.getData(), session, context); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultAsyncResultSet.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultAsyncResultSet.java index b2630006b9a..f0b8d5ee4d9 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultAsyncResultSet.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultAsyncResultSet.java @@ -101,7 +101,7 @@ public CompletionStage fetchNextPage() throws IllegalStateExcept throw new IllegalStateException( "No next page. Use #hasMorePages before calling this method to avoid this error."); } - Statement statement = executionInfo.getStatement(); + Statement statement = (Statement) executionInfo.getRequest(); LOG.trace("Fetching next page for {}", statement); Statement nextStatement = statement.copy(nextState); return session.executeAsync(nextStatement); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultExecutionInfo.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultExecutionInfo.java index bf542923405..0d3bd973be3 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultExecutionInfo.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultExecutionInfo.java @@ -20,6 +20,7 @@ import com.datastax.oss.driver.api.core.cql.QueryTrace; import com.datastax.oss.driver.api.core.cql.Statement; import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.session.Request; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.internal.core.session.DefaultSession; import com.datastax.oss.driver.internal.core.util.concurrent.CompletableFutures; @@ -37,7 +38,7 @@ @Immutable public class DefaultExecutionInfo implements ExecutionInfo { - private final Statement statement; + private final Request request; private final Node coordinator; private final int speculativeExecutionCount; private final int successfulExecutionIndex; @@ -54,7 +55,7 @@ public class DefaultExecutionInfo implements ExecutionInfo { private final DriverExecutionProfile executionProfile; public DefaultExecutionInfo( - Statement statement, + Request request, Node coordinator, int speculativeExecutionCount, int successfulExecutionIndex, @@ -65,7 +66,8 @@ public DefaultExecutionInfo( DefaultSession session, InternalDriverContext context, DriverExecutionProfile executionProfile) { - this.statement = statement; + + this.request = request; this.coordinator = coordinator; this.speculativeExecutionCount = speculativeExecutionCount; this.successfulExecutionIndex = successfulExecutionIndex; @@ -86,8 +88,15 @@ public DefaultExecutionInfo( @NonNull @Override + @Deprecated public Statement getStatement() { - return statement; + return (Statement) request; + } + + @NonNull + @Override + public Request getRequest() { + return request; } @Nullable diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/AsyncPagingIterableWrapperTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/AsyncPagingIterableWrapperTest.java index 554e23c7720..7513a18d1f7 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/AsyncPagingIterableWrapperTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/AsyncPagingIterableWrapperTest.java @@ -127,7 +127,7 @@ public void should_share_iteration_progress_with_wrapped_result_set() { private ExecutionInfo mockExecutionInfo() { ExecutionInfo executionInfo = mock(ExecutionInfo.class); - when(executionInfo.getStatement()).thenAnswer(invocation -> statement); + when(executionInfo.getRequest()).thenAnswer(invocation -> statement); return executionInfo; } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/cql/DefaultAsyncResultSetTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/cql/DefaultAsyncResultSetTest.java index ebd9a6d0f0d..31a13eeb3d8 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/cql/DefaultAsyncResultSetTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/cql/DefaultAsyncResultSetTest.java @@ -58,7 +58,7 @@ public class DefaultAsyncResultSetTest { public void setup() { MockitoAnnotations.initMocks(this); - when(executionInfo.getStatement()).thenAnswer(invocation -> statement); + when(executionInfo.getRequest()).thenAnswer(invocation -> statement); when(context.getCodecRegistry()).thenReturn(CodecRegistry.DEFAULT); when(context.getProtocolVersion()).thenReturn(DefaultProtocolVersion.DEFAULT); } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/session/ExceptionIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/session/ExceptionIT.java index 52802ca98cc..abb939e0607 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/session/ExceptionIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/session/ExceptionIT.java @@ -96,8 +96,7 @@ public void should_expose_execution_info_on_exceptions() { assertThat(info).isNotNull(); assertThat(info.getCoordinator().getEndPoint().resolve()) .isEqualTo(SIMULACRON_RULE.cluster().node(1).inetSocketAddress()); - assertThat(((SimpleStatement) info.getStatement()).getQuery()) - .isEqualTo(QUERY_STRING); + assertThat(((SimpleStatement) info.getRequest()).getQuery()).isEqualTo(QUERY_STRING); // specex disabled => the initial execution completed the response assertThat(info.getSpeculativeExecutionCount()).isEqualTo(0); From 15d264e59ac98d34680bbed58ccc60690bafd779 Mon Sep 17 00:00:00 2001 From: Greg Bestland Date: Wed, 18 Dec 2019 15:36:13 -0600 Subject: [PATCH 177/979] JAVA-2523: Disallow programmatic SSL methods with cloud (#1373) --- .../api/core/session/SessionBuilder.java | 8 ++++ .../oss/driver/api/core/cloud/CloudIT.java | 46 +++++++++++++++++++ 2 files changed, 54 insertions(+) diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java b/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java index 3f3a2251474..a5776bdf1b5 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java @@ -85,6 +85,7 @@ public abstract class SessionBuilder { protected ProgrammaticArguments.Builder programmaticArgumentsBuilder = ProgrammaticArguments.builder(); + private boolean sslConfigured = false; /** * Sets the configuration loader to use. @@ -275,6 +276,7 @@ public SelfT withAuthCredentials(@NonNull String username, @NonNull String passw */ @NonNull public SelfT withSslEngineFactory(@Nullable SslEngineFactory sslEngineFactory) { + this.sslConfigured = true; this.programmaticArgumentsBuilder.withSslEngineFactory(sslEngineFactory); return self; } @@ -523,6 +525,12 @@ protected final CompletionStage buildDefaultSessionAsync() { throw new IllegalStateException( "Can't use withCloudSecureConnectBundle and addContactPoint(s). They are mutually exclusive."); } + String configuredSSLFactory = + defaultConfig.getString(DefaultDriverOption.SSL_ENGINE_FACTORY_CLASS, null); + if (sslConfigured || configuredSSLFactory != null) { + throw new IllegalStateException( + "Can't use withCloudSecureConnectBundle and explicitly specify ssl configuration. They are mutually exclusive."); + } CloudConfig cloudConfig = new CloudConfigFactory().createCloudConfig(cloudConfigInputStream.call()); addContactEndPoints(cloudConfig.getEndPoints()); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cloud/CloudIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cloud/CloudIT.java index 37bd772360b..ce5429a68c9 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cloud/CloudIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cloud/CloudIT.java @@ -28,7 +28,9 @@ import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; import com.datastax.oss.driver.api.core.cql.ResultSet; +import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.categories.IsolatedTests; +import com.datastax.oss.driver.internal.core.ssl.DefaultSslEngineFactory; import com.github.tomakehurst.wiremock.junit.WireMockRule; import java.io.IOException; import java.io.InputStream; @@ -36,6 +38,8 @@ import java.net.URL; import java.nio.file.Files; import java.nio.file.Path; +import java.security.NoSuchAlgorithmException; +import javax.net.ssl.SSLContext; import org.junit.ClassRule; import org.junit.Rule; import org.junit.Test; @@ -219,4 +223,46 @@ public void should_error_when_contact_points_and_secure_bundle_used() { .hasMessage( "Can't use withCloudSecureConnectBundle and addContactPoint(s). They are mutually exclusive."); } + + @Test + public void should_error_when_ssl_context_and_secure_bundle_used_programatic() + throws NoSuchAlgorithmException { + // given + Path bundle = proxyRule.getProxy().getBundleWithoutCredentialsPath(); + CqlSessionBuilder builder = + CqlSession.builder() + .withCloudSecureConnectBundle(bundle) + .withAuthCredentials("cassandra", "cassandra") + .withSslContext(SSLContext.getInstance("SSL")); + // then + assertThatThrownBy(() -> builder.build()) + .isInstanceOf(IllegalStateException.class) + .hasMessage( + "Can't use withCloudSecureConnectBundle and explicitly specify ssl configuration. They are mutually exclusive."); + } + + @Test + public void should_error_when_ssl_context_and_secure_bundle_used_config() + throws NoSuchAlgorithmException { + // given + + DriverConfigLoader loader = + SessionUtils.configLoaderBuilder() + .withBoolean(DefaultDriverOption.RECONNECT_ON_INIT, true) + .withClass(DefaultDriverOption.SSL_ENGINE_FACTORY_CLASS, DefaultSslEngineFactory.class) + .build(); + + Path bundle = proxyRule.getProxy().getBundleWithoutCredentialsPath(); + CqlSessionBuilder builder = + CqlSession.builder() + .withConfigLoader(loader) + .withCloudSecureConnectBundle(bundle) + .withAuthCredentials("cassandra", "cassandra"); + + // then + assertThatThrownBy(() -> builder.build()) + .isInstanceOf(IllegalStateException.class) + .hasMessage( + "Can't use withCloudSecureConnectBundle and explicitly specify ssl configuration. They are mutually exclusive."); + } } From f4eed33caab2d692f9d2e2c119a762af2724c0f8 Mon Sep 17 00:00:00 2001 From: Greg Bestland Date: Thu, 19 Dec 2019 14:15:03 -0600 Subject: [PATCH 178/979] JAVA-2533: Refine connection errors for cloud (#1374) --- .../core/config/cloud/CloudConfigFactory.java | 24 ++++++++++++++----- 1 file changed, 18 insertions(+), 6 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/config/cloud/CloudConfigFactory.java b/core/src/main/java/com/datastax/oss/driver/internal/core/config/cloud/CloudConfigFactory.java index 9caffd95926..d0c3368f500 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/config/cloud/CloudConfigFactory.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/config/cloud/CloudConfigFactory.java @@ -33,9 +33,11 @@ import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; +import java.net.ConnectException; import java.net.InetSocketAddress; import java.net.MalformedURLException; import java.net.URL; +import java.net.UnknownHostException; import java.nio.charset.StandardCharsets; import java.security.GeneralSecurityException; import java.security.KeyStore; @@ -228,12 +230,22 @@ protected TrustManagerFactory createTrustManagerFactory( @NonNull protected BufferedReader fetchProxyMetadata( @NonNull URL metadataServiceUrl, @NonNull SSLContext sslContext) throws IOException { - HttpsURLConnection connection = (HttpsURLConnection) metadataServiceUrl.openConnection(); - connection.setSSLSocketFactory(sslContext.getSocketFactory()); - connection.setRequestMethod("GET"); - connection.setRequestProperty("host", "localhost"); - return new BufferedReader( - new InputStreamReader(connection.getInputStream(), StandardCharsets.UTF_8)); + try { + HttpsURLConnection connection = (HttpsURLConnection) metadataServiceUrl.openConnection(); + connection.setSSLSocketFactory(sslContext.getSocketFactory()); + connection.setRequestMethod("GET"); + connection.setRequestProperty("host", "localhost"); + return new BufferedReader( + new InputStreamReader(connection.getInputStream(), StandardCharsets.UTF_8)); + } catch (ConnectException e) { + throw new IllegalStateException( + "Unable to connect to cloud metadata service. Please make sure your cluster is not parked or terminated", + e); + } catch (UnknownHostException e) { + throw new IllegalStateException( + "Unable to resolve host for cloud metadata service. Please make sure your cluster is not terminated", + e); + } } @NonNull From e57116369f27231c71e7b6ddfda222929591f7fc Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Thu, 26 Dec 2019 12:36:08 -0300 Subject: [PATCH 179/979] Mention #getAllErrors() instead of #getErrors() in javadocs of AllNodesFailedException --- .../datastax/oss/driver/api/core/AllNodesFailedException.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/AllNodesFailedException.java b/core/src/main/java/com/datastax/oss/driver/api/core/AllNodesFailedException.java index 762da81092f..324e7756fb6 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/AllNodesFailedException.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/AllNodesFailedException.java @@ -31,7 +31,7 @@ /** * Thrown when a query failed on all the coordinators it was tried on. This exception may wrap - * multiple errors, use {@link #getErrors()} to inspect the individual problem on each node. + * multiple errors, use {@link #getAllErrors()} to inspect individual problems on each node. */ public class AllNodesFailedException extends DriverException { From f803aa0210b14e923e83481915e3cd27de4a9b09 Mon Sep 17 00:00:00 2001 From: Olivier Michallat Date: Wed, 8 Jan 2020 10:01:14 -0800 Subject: [PATCH 180/979] JAVA-2473: Don't reconnect control connection if protocol is downgraded (#1369) Motivation: In theory, the control connection is an implementation detail of the topology monitor and metadata manager. If those components are overridden with custom versions that use another way to get the data, the rest of the driver should be able to function without initializing the control connection at all. This is currently not the case: DefaultSession references the control connection, in order to reconnect if the protocol version was downgraded after the initial connection (JAVA-1295). Modifications: Do not reconnect the control connection in that case; it's not really needed because none of the control queries use any protocol-dependent feature. This also allows us to remove MetadataManager.firstSchemaRefreshFuture() and slightly simplify session initialization. Result: DefaultSession does not reference ControlConnection. If the protocol version is downgraded, the control connection keeps using the initial version. It might switch to the "correct" version later if it reconnects to another node. Co-authored-by: Alexandre Dutra --- changelog/README.md | 1 + .../core/metadata/MetadataManager.java | 8 ---- .../internal/core/session/DefaultSession.java | 47 +++++++------------ .../core/session/DefaultSessionPoolsTest.java | 2 - 4 files changed, 19 insertions(+), 39 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 2412425e63a..4199c6f8f0b 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.4.0 (in progress) +- [improvement] JAVA-2473: Don't reconnect control connection if protocol is downgraded - [bug] JAVA-2556: Make ExecutionInfo compatible with any Request type - [new feature] JAVA-2532: Add BoundStatement ReturnType for insert, update, and delete DAO methods - [improvement] JAVA-2107: Add XML formatting plugin diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/MetadataManager.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/MetadataManager.java index 0dc219fd058..2c47ab12749 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/MetadataManager.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/MetadataManager.java @@ -246,14 +246,6 @@ public CompletionStage setSchemaEnabled(Boolean newValue) { } } - /** - * Returns a future that completes after the first schema refresh attempt, whether that attempt - * succeeded or not (we wait for that refresh at init, but if it fails it's not fatal). - */ - public CompletionStage firstSchemaRefreshFuture() { - return singleThreaded.firstSchemaRefreshFuture; - } - @NonNull @Override public CompletionStage closeFuture() { diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/session/DefaultSession.java b/core/src/main/java/com/datastax/oss/driver/internal/core/session/DefaultSession.java index 5b636089c16..4c35e7a79cd 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/session/DefaultSession.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/session/DefaultSession.java @@ -32,7 +32,6 @@ import com.datastax.oss.driver.internal.core.channel.DriverChannel; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.internal.core.context.LifecycleListener; -import com.datastax.oss.driver.internal.core.control.ControlConnection; import com.datastax.oss.driver.internal.core.metadata.MetadataManager; import com.datastax.oss.driver.internal.core.metadata.NodeStateEvent; import com.datastax.oss.driver.internal.core.metadata.NodeStateManager; @@ -362,7 +361,6 @@ private void afterInitialNodeListRefresh(CqlIdentifier keyspace) { try { boolean protocolWasForced = context.getConfig().getDefaultProfile().isDefined(DefaultDriverOption.PROTOCOL_VERSION); - boolean needSchemaRefresh = true; if (!protocolWasForced) { ProtocolVersion currentVersion = context.getProtocolVersion(); ProtocolVersion bestVersion = @@ -378,36 +376,27 @@ private void afterInitialNodeListRefresh(CqlIdentifier keyspace) { bestVersion); context.getChannelFactory().setProtocolVersion(bestVersion); - // If the control connection has already initialized, force a reconnect to use the new - // version. - // (note: it might not have initialized yet if there is a custom TopologyMonitor) - ControlConnection controlConnection = context.getControlConnection(); - if (controlConnection.isInit()) { - controlConnection.reconnectNow(); - // Reconnection already triggers a full schema refresh - needSchemaRefresh = false; - } + // Note that, with the default topology monitor, the control connection is already + // connected with currentVersion at this point. This doesn't really matter because none + // of the control queries use any protocol-dependent feature. + // Keep going as-is, the control connection might switch to the "correct" version later + // if it reconnects to another node. } } - if (needSchemaRefresh) { - metadataManager - .refreshSchema(null, false, true) - .whenComplete( - (metadata, error) -> { - if (error != null) { - Loggers.warnWithException( - LOG, - "[{}] Unexpected error while refreshing schema during initialization, " - + "keeping previous version", - logPrefix, - error); - } - }); - } metadataManager - .firstSchemaRefreshFuture() - .thenAccept(v -> afterInitialSchemaRefresh(keyspace)); - + .refreshSchema(null, false, true) + .whenComplete( + (metadata, error) -> { + if (error != null) { + Loggers.warnWithException( + LOG, + "[{}] Unexpected error while refreshing schema during initialization, " + + "keeping previous version", + logPrefix, + error); + } + afterInitialSchemaRefresh(keyspace); + }); } catch (Throwable throwable) { initFuture.completeExceptionally(throwable); } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/session/DefaultSessionPoolsTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/session/DefaultSessionPoolsTest.java index f80813efe72..7d2a66fab02 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/session/DefaultSessionPoolsTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/session/DefaultSessionPoolsTest.java @@ -130,8 +130,6 @@ public void setup() { // Init sequence: when(metadataManager.refreshNodes()).thenReturn(CompletableFuture.completedFuture(null)); - when(metadataManager.firstSchemaRefreshFuture()) - .thenReturn(CompletableFuture.completedFuture(null)); when(metadataManager.refreshSchema(null, false, true)) .thenReturn(CompletableFuture.completedFuture(null)); when(context.getMetadataManager()).thenReturn(metadataManager); From c8e008dc03537d83d9d784373fb48db1740fb167 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 26 Nov 2019 11:29:39 +0100 Subject: [PATCH 181/979] JAVA-2552: Invoke listeners on the calling thread in RequestHandlerTestHarness --- .../datastax/oss/driver/internal/core/cql/PoolBehavior.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/cql/PoolBehavior.java b/core/src/test/java/com/datastax/oss/driver/internal/core/cql/PoolBehavior.java index 55594f46aed..8b1c719fc0c 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/cql/PoolBehavior.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/cql/PoolBehavior.java @@ -32,7 +32,7 @@ import io.netty.channel.ChannelFuture; import io.netty.channel.EventLoop; import io.netty.channel.socket.DefaultSocketChannelConfig; -import io.netty.util.concurrent.GlobalEventExecutor; +import io.netty.util.concurrent.ImmediateEventExecutor; import io.netty.util.concurrent.Promise; import java.util.concurrent.CompletableFuture; @@ -59,7 +59,7 @@ public PoolBehavior(Node node, boolean createChannel) { this.channel = mock(DriverChannel.class); EventLoop eventLoop = mock(EventLoop.class); ChannelConfig config = mock(DefaultSocketChannelConfig.class); - this.writePromise = GlobalEventExecutor.INSTANCE.newPromise(); + this.writePromise = ImmediateEventExecutor.INSTANCE.newPromise(); when(channel.write(any(Message.class), anyBoolean(), anyMap(), any(ResponseCallback.class))) .thenAnswer( invocation -> { @@ -110,7 +110,7 @@ public DriverChannel getChannel() { /** Mocks a follow-up request on the same channel. */ public void mockFollowupRequest(Class expectedMessage, Frame responseFrame) { - Promise writePromise2 = GlobalEventExecutor.INSTANCE.newPromise(); + Promise writePromise2 = ImmediateEventExecutor.INSTANCE.newPromise(); CompletableFuture callbackFuture2 = new CompletableFuture<>(); when(channel.write(any(expectedMessage), anyBoolean(), anyMap(), any(ResponseCallback.class))) .thenAnswer( From 1d04d8a6072c20559f17f9f4007ac3625815b0e2 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 26 Nov 2019 11:31:44 +0100 Subject: [PATCH 182/979] JAVA-2552: Revisit CapturingTimer feature in RequestHandlerTestHarness --- .../internal/core/util/concurrent/CapturingTimer.java | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/util/concurrent/CapturingTimer.java b/core/src/test/java/com/datastax/oss/driver/internal/core/util/concurrent/CapturingTimer.java index 3b2cda52b35..4e2ed6426ce 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/util/concurrent/CapturingTimer.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/util/concurrent/CapturingTimer.java @@ -16,8 +16,6 @@ package com.datastax.oss.driver.internal.core.util.concurrent; -import static org.assertj.core.api.Assertions.fail; - import io.netty.util.Timeout; import io.netty.util.Timer; import io.netty.util.TimerTask; @@ -51,12 +49,7 @@ public Timeout newTimeout(TimerTask task, long delay, TimeUnit unit) { * tell from the returned Timeout itself. */ public CapturedTimeout getNextTimeout() { - try { - return timeoutQueue.poll(100, TimeUnit.MILLISECONDS); - } catch (InterruptedException ie) { - fail("Unexpected interruption", ie); - throw new AssertionError(); - } + return timeoutQueue.poll(); } @Override From fcb7ffdde6081a96b5ee6bd52d850a736a0e0ca8 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Wed, 8 Jan 2020 15:04:10 -0300 Subject: [PATCH 183/979] JAVA-2561: Mark DefaultUdtValue and DefaultTupleValue as not thread safe (#1370) --- .../oss/driver/internal/core/data/DefaultTupleValue.java | 8 ++++++-- .../oss/driver/internal/core/data/DefaultUdtValue.java | 8 ++++++-- 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/data/DefaultTupleValue.java b/core/src/main/java/com/datastax/oss/driver/internal/core/data/DefaultTupleValue.java index e24f5c54ac2..129f5e4a429 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/data/DefaultTupleValue.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/data/DefaultTupleValue.java @@ -29,9 +29,13 @@ import java.io.Serializable; import java.nio.ByteBuffer; import java.util.Objects; -import net.jcip.annotations.Immutable; +import net.jcip.annotations.NotThreadSafe; -@Immutable +/** + * Implementation note: contrary to most GettableBy* and SettableBy* implementations, this class is + * mutable. + */ +@NotThreadSafe public class DefaultTupleValue implements TupleValue, Serializable { private static final long serialVersionUID = 1; diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/data/DefaultUdtValue.java b/core/src/main/java/com/datastax/oss/driver/internal/core/data/DefaultUdtValue.java index 24be5dd58ab..9d8ed828241 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/data/DefaultUdtValue.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/data/DefaultUdtValue.java @@ -30,9 +30,13 @@ import java.io.Serializable; import java.nio.ByteBuffer; import java.util.Objects; -import net.jcip.annotations.Immutable; +import net.jcip.annotations.NotThreadSafe; -@Immutable +/** + * Implementation note: contrary to most GettableBy* and SettableBy* implementations, this class is + * mutable. + */ +@NotThreadSafe public class DefaultUdtValue implements UdtValue, Serializable { private static final long serialVersionUID = 1; From f593f467413bbdd5ccd4d999def31365627f5e4e Mon Sep 17 00:00:00 2001 From: Olivier Michallat Date: Wed, 8 Jan 2020 10:06:17 -0800 Subject: [PATCH 184/979] JAVA-2442: Don't check for schema agreement twice when completing a DDL query (#1372) Motivation: When we process a SCHEMA_CHANGE response to a DDL query, we check for schema agreement, and then trigger a schema refresh. But the refresh itself also checks for agreement, so we end up doing it twice. Modifications: Only check for agreement during the refresh. Introduce a new RefreshSchemaResult type, so that we can surface the outcome of the agreement in addition to the new metadata. Remove the agreement check in CqlRequestHandler, use the value surfaced by the refresh instead. Result: Schema agreement runs only once. --- changelog/README.md | 1 + .../internal/core/cql/CqlRequestHandler.java | 36 +++-- .../core/metadata/MetadataManager.java | 132 +++++++++++------- .../internal/core/session/DefaultSession.java | 5 +- 4 files changed, 106 insertions(+), 68 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 4199c6f8f0b..5b40d960013 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.4.0 (in progress) +- [improvement] JAVA-2442: Don't check for schema agreement twice when completing a DDL query - [improvement] JAVA-2473: Don't reconnect control connection if protocol is downgraded - [bug] JAVA-2556: Make ExecutionInfo compatible with any Request type - [new feature] JAVA-2532: Add BoundStatement ReturnType for insert, update, and delete DAO methods diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java index 9bda9b8ce11..19354dd0473 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java @@ -598,26 +598,24 @@ public void onResponse(Frame responseFrame) { if (responseMessage instanceof SchemaChange) { SchemaChange schemaChange = (SchemaChange) responseMessage; context - .getTopologyMonitor() - .checkSchemaAgreement() - .thenCombine( - context - .getMetadataManager() - .refreshSchema(schemaChange.keyspace, false, false) - .exceptionally( - error -> { - Loggers.warnWithException( - LOG, - "[{}] Unexpected error while refreshing schema after DDL query, " - + "keeping previous version", - logPrefix, - error); - return null; - }), - (schemaInAgreement, metadata) -> schemaInAgreement) + .getMetadataManager() + .refreshSchema(schemaChange.keyspace, false, false) .whenComplete( - (schemaInAgreement, error) -> - setFinalResult(schemaChange, responseFrame, schemaInAgreement, this)); + (result, error) -> { + boolean schemaInAgreement; + if (error != null) { + Loggers.warnWithException( + LOG, + "[{}] Unexpected error while refreshing schema after DDL query, " + + "keeping previous version", + logPrefix, + error); + schemaInAgreement = false; + } else { + schemaInAgreement = result.isSchemaInAgreement(); + } + setFinalResult(schemaChange, responseFrame, schemaInAgreement, this); + }); } else if (responseMessage instanceof SetKeyspace) { SetKeyspace setKeyspace = (SetKeyspace) responseMessage; session diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/MetadataManager.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/MetadataManager.java index 2c47ab12749..7b36ae6fe4e 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/MetadataManager.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/MetadataManager.java @@ -221,15 +221,40 @@ public void removeNode(InetSocketAddress broadcastRpcAddress) { * @param flushNow bypass the debouncer and force an immediate refresh (used to avoid a delay at * startup) */ - public CompletionStage refreshSchema( + public CompletionStage refreshSchema( String keyspace, boolean evenIfDisabled, boolean flushNow) { - CompletableFuture future = new CompletableFuture<>(); + CompletableFuture future = new CompletableFuture<>(); RunOrSchedule.on( adminExecutor, () -> singleThreaded.refreshSchema(keyspace, evenIfDisabled, flushNow, future)); return future; } + public static class RefreshSchemaResult { + private final Metadata metadata; + private final boolean isSchemaInAgreement; + + public RefreshSchemaResult(Metadata metadata, boolean isSchemaInAgreement) { + this.metadata = metadata; + this.isSchemaInAgreement = isSchemaInAgreement; + } + + public RefreshSchemaResult(Metadata metadata) { + this( + metadata, + // This constructor is used in corner cases where agreement doesn't matter + true); + } + + public Metadata getMetadata() { + return metadata; + } + + public boolean isSchemaInAgreement() { + return isSchemaInAgreement; + } + } + public boolean isSchemaEnabled() { return (schemaEnabledProgrammatically != null) ? schemaEnabledProgrammatically @@ -240,7 +265,7 @@ public CompletionStage setSchemaEnabled(Boolean newValue) { boolean wasEnabledBefore = isSchemaEnabled(); schemaEnabledProgrammatically = newValue; if (!wasEnabledBefore && isSchemaEnabled()) { - return refreshSchema(null, false, true); + return refreshSchema(null, false, true).thenApply(RefreshSchemaResult::getMetadata); } else { return CompletableFuture.completedFuture(metadata); } @@ -269,15 +294,16 @@ private class SingleThreaded { private final CompletableFuture closeFuture = new CompletableFuture<>(); private boolean closeWasCalled; private final CompletableFuture firstSchemaRefreshFuture = new CompletableFuture<>(); - private final Debouncer, CompletableFuture> + private final Debouncer< + CompletableFuture, CompletableFuture> schemaRefreshDebouncer; private final SchemaQueriesFactory schemaQueriesFactory; private final SchemaParserFactory schemaParserFactory; // We don't allow concurrent schema refreshes. If one is already running, the next one is queued // (and the ones after that are merged with the queued one). - private CompletableFuture currentSchemaRefresh; - private CompletableFuture queuedSchemaRefresh; + private CompletableFuture currentSchemaRefresh; + private CompletableFuture queuedSchemaRefresh; private boolean didFirstNodeListRefresh; @@ -336,12 +362,12 @@ private void refreshSchema( String keyspace, boolean evenIfDisabled, boolean flushNow, - CompletableFuture future) { + CompletableFuture future) { if (!didFirstNodeListRefresh) { // This happen if the control connection receives a schema event during init. We can't // refresh yet because we don't know the nodes' versions, simply ignore. - future.complete(metadata); + future.complete(new RefreshSchemaResult(metadata)); return; } @@ -352,16 +378,17 @@ private void refreshSchema( if (isRefreshedKeyspace && (evenIfDisabled || isSchemaEnabled())) { acceptSchemaRequest(future, flushNow); } else { - future.complete(metadata); + future.complete(new RefreshSchemaResult(metadata)); singleThreaded.firstSchemaRefreshFuture.complete(null); } } // An external component has requested a schema refresh, feed it to the debouncer. - private void acceptSchemaRequest(CompletableFuture future, boolean flushNow) { + private void acceptSchemaRequest( + CompletableFuture future, boolean flushNow) { assert adminExecutor.inEventLoop(); if (closeWasCalled) { - future.complete(metadata); + future.complete(new RefreshSchemaResult(metadata)); } else { schemaRefreshDebouncer.receive(future); if (flushNow) { @@ -371,13 +398,13 @@ private void acceptSchemaRequest(CompletableFuture future, boolean flu } // Multiple requests have arrived within the debouncer window, coalesce them. - private CompletableFuture coalesceSchemaRequests( - List> futures) { + private CompletableFuture coalesceSchemaRequests( + List> futures) { assert adminExecutor.inEventLoop(); assert !futures.isEmpty(); // Keep only one, but ensure that the discarded ones will still be completed when we're done - CompletableFuture result = null; - for (CompletableFuture future : futures) { + CompletableFuture result = null; + for (CompletableFuture future : futures) { if (result == null) { result = future; } else { @@ -388,33 +415,55 @@ private CompletableFuture coalesceSchemaRequests( } // The debouncer has flushed, start the actual work. - private void startSchemaRequest(CompletableFuture future) { + private void startSchemaRequest(CompletableFuture refreshFuture) { assert adminExecutor.inEventLoop(); if (closeWasCalled) { - future.complete(metadata); + refreshFuture.complete(new RefreshSchemaResult(metadata)); return; } if (currentSchemaRefresh == null) { - currentSchemaRefresh = future; + currentSchemaRefresh = refreshFuture; LOG.debug("[{}] Starting schema refresh", logPrefix); initControlConnectionForSchema() .thenCompose(v -> context.getTopologyMonitor().checkSchemaAgreement()) - // 1. Query system tables - .thenCompose(b -> schemaQueriesFactory.newInstance(future).execute()) - // 2. Parse the rows into metadata objects, put them in a MetadataRefresh - // 3. Apply the MetadataRefresh - .thenApplyAsync(this::parseAndApplySchemaRows, adminExecutor) .whenComplete( - (v, error) -> { - if (error != null) { - currentSchemaRefresh.completeExceptionally(error); + (schemaInAgreement, agreementError) -> { + if (agreementError != null) { + refreshFuture.completeExceptionally(agreementError); + } else { + schemaQueriesFactory + .newInstance( + // TODO remove this unused parameter (see JAVA-2582) + null) + .execute() + .thenApplyAsync(this::parseAndApplySchemaRows, adminExecutor) + .whenComplete( + (newMetadata, metadataError) -> { + if (metadataError != null) { + refreshFuture.completeExceptionally(metadataError); + } else { + refreshFuture.complete( + new RefreshSchemaResult(newMetadata, schemaInAgreement)); + } + + firstSchemaRefreshFuture.complete(null); + + currentSchemaRefresh = null; + // If another refresh was enqueued during this one, run it now + if (queuedSchemaRefresh != null) { + CompletableFuture tmp = + this.queuedSchemaRefresh; + this.queuedSchemaRefresh = null; + startSchemaRequest(tmp); + } + }); } - singleThreaded.firstSchemaRefreshFuture.complete(null); }); } else if (queuedSchemaRefresh == null) { - queuedSchemaRefresh = future; // wait for our turn + queuedSchemaRefresh = refreshFuture; // wait for our turn } else { - CompletableFutures.completeFrom(queuedSchemaRefresh, future); // join the queued request + CompletableFutures.completeFrom( + queuedSchemaRefresh, refreshFuture); // join the queued request } } @@ -432,26 +481,13 @@ private CompletionStage initControlConnectionForSchema() { } } - private Void parseAndApplySchemaRows(SchemaRows schemaRows) { + private Metadata parseAndApplySchemaRows(SchemaRows schemaRows) { assert adminExecutor.inEventLoop(); - assert schemaRows.refreshFuture() == currentSchemaRefresh; - try { - SchemaRefresh schemaRefresh = schemaParserFactory.newInstance(schemaRows).parse(); - long start = System.nanoTime(); - apply(schemaRefresh); - currentSchemaRefresh.complete(metadata); - LOG.debug( - "[{}] Applying schema refresh took {}", logPrefix, NanoTime.formatTimeSince(start)); - } catch (Throwable t) { - currentSchemaRefresh.completeExceptionally(t); - } - currentSchemaRefresh = null; - if (queuedSchemaRefresh != null) { - CompletableFuture tmp = this.queuedSchemaRefresh; - this.queuedSchemaRefresh = null; - startSchemaRequest(tmp); - } - return null; + SchemaRefresh schemaRefresh = schemaParserFactory.newInstance(schemaRows).parse(); + long start = System.nanoTime(); + apply(schemaRefresh); + LOG.debug("[{}] Applying schema refresh took {}", logPrefix, NanoTime.formatTimeSince(start)); + return metadata; } private void close() { diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/session/DefaultSession.java b/core/src/main/java/com/datastax/oss/driver/internal/core/session/DefaultSession.java index 4c35e7a79cd..0746bca24d2 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/session/DefaultSession.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/session/DefaultSession.java @@ -33,6 +33,7 @@ import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.internal.core.context.LifecycleListener; import com.datastax.oss.driver.internal.core.metadata.MetadataManager; +import com.datastax.oss.driver.internal.core.metadata.MetadataManager.RefreshSchemaResult; import com.datastax.oss.driver.internal.core.metadata.NodeStateEvent; import com.datastax.oss.driver.internal.core.metadata.NodeStateManager; import com.datastax.oss.driver.internal.core.metrics.SessionMetricUpdater; @@ -152,7 +153,9 @@ public CompletionStage setSchemaMetadataEnabled(@Nullable Boolean newV @NonNull @Override public CompletionStage refreshSchemaAsync() { - return metadataManager.refreshSchema(null, true, true); + return metadataManager + .refreshSchema(null, true, true) + .thenApply(RefreshSchemaResult::getMetadata); } @NonNull From 3ef94b9dc57f5b87449276f96ad63b9dc4e44bb7 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Wed, 8 Jan 2020 15:07:26 -0300 Subject: [PATCH 185/979] JAVA-2583: IS NOT NULL clause should be idempotent (#1376) --- changelog/README.md | 1 + .../internal/querybuilder/relation/DefaultRelation.java | 2 +- .../api/querybuilder/update/UpdateIdempotenceTest.java | 7 +++++++ 3 files changed, 9 insertions(+), 1 deletion(-) diff --git a/changelog/README.md b/changelog/README.md index 5b40d960013..906ac2363fc 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.4.0 (in progress) +- [bug] JAVA-2583: IS NOT NULL clause should be idempotent - [improvement] JAVA-2442: Don't check for schema agreement twice when completing a DDL query - [improvement] JAVA-2473: Don't reconnect control connection if protocol is downgraded - [bug] JAVA-2556: Make ExecutionInfo compatible with any Request type diff --git a/query-builder/src/main/java/com/datastax/oss/driver/internal/querybuilder/relation/DefaultRelation.java b/query-builder/src/main/java/com/datastax/oss/driver/internal/querybuilder/relation/DefaultRelation.java index 3807986e611..a4c12a77e09 100644 --- a/query-builder/src/main/java/com/datastax/oss/driver/internal/querybuilder/relation/DefaultRelation.java +++ b/query-builder/src/main/java/com/datastax/oss/driver/internal/querybuilder/relation/DefaultRelation.java @@ -50,7 +50,7 @@ public void appendTo(@NonNull StringBuilder builder) { @Override public boolean isIdempotent() { - return rightOperand.isIdempotent(); + return rightOperand == null || rightOperand.isIdempotent(); } @NonNull diff --git a/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/update/UpdateIdempotenceTest.java b/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/update/UpdateIdempotenceTest.java index 1f424484ee6..09f778d041a 100644 --- a/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/update/UpdateIdempotenceTest.java +++ b/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/update/UpdateIdempotenceTest.java @@ -148,4 +148,11 @@ public void should_not_be_idempotent_if_concatenating_to_collection() { .hasCql("UPDATE foo SET l=[1,2,3]+l WHERE k=?") .isNotIdempotent(); } + + @Test + public void should_be_idempotent_if_relation_does_not_have_right_operand() { + assertThat(update("foo").setColumn("col1", literal(42)).whereColumn("col2").isNotNull()) + .hasCql("UPDATE foo SET col1=42 WHERE col2 IS NOT NULL") + .isIdempotent(); + } } From 31809f49d968c53c0436f2b5a4b4aa9a5885607c Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Wed, 8 Jan 2020 15:08:37 -0300 Subject: [PATCH 186/979] JAVA-2584: Ensure codec registry is able to create codecs for collections of UDTs and tuples (#1377) --- changelog/README.md | 1 + .../codec/registry/CachingCodecRegistry.java | 148 +++- .../registry/CachingCodecRegistryTest.java | 723 +++--------------- ...CachingCodecRegistryTestDataProviders.java | 567 ++++++++++++++ 4 files changed, 810 insertions(+), 629 deletions(-) create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistryTestDataProviders.java diff --git a/changelog/README.md b/changelog/README.md index 906ac2363fc..1b5e2c66257 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.4.0 (in progress) +- [bug] JAVA-2584: Ensure codec registry is able to create codecs for collections of UDTs and tuples - [bug] JAVA-2583: IS NOT NULL clause should be idempotent - [improvement] JAVA-2442: Don't check for schema agreement twice when completing a DDL query - [improvement] JAVA-2473: Don't reconnect control connection if protocol is downgraded diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistry.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistry.java index eec29520eb1..35ea21f38c7 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistry.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistry.java @@ -15,10 +15,12 @@ */ package com.datastax.oss.driver.internal.core.type.codec.registry; +import com.datastax.oss.driver.api.core.data.CqlDuration; import com.datastax.oss.driver.api.core.data.TupleValue; import com.datastax.oss.driver.api.core.data.UdtValue; import com.datastax.oss.driver.api.core.type.CustomType; import com.datastax.oss.driver.api.core.type.DataType; +import com.datastax.oss.driver.api.core.type.DataTypes; import com.datastax.oss.driver.api.core.type.ListType; import com.datastax.oss.driver.api.core.type.MapType; import com.datastax.oss.driver.api.core.type.SetType; @@ -37,10 +39,18 @@ import edu.umd.cs.findbugs.annotations.Nullable; import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; +import java.math.BigDecimal; +import java.math.BigInteger; +import java.net.InetAddress; import java.nio.ByteBuffer; +import java.time.Instant; +import java.time.LocalDate; +import java.time.LocalTime; import java.util.List; import java.util.Map; +import java.util.Map.Entry; import java.util.Set; +import java.util.UUID; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; @@ -72,7 +82,7 @@ public abstract class CachingCodecRegistry implements MutableCodecRegistry { protected final String logPrefix; private final TypeCodec[] primitiveCodecs; private final CopyOnWriteArrayList> userCodecs = new CopyOnWriteArrayList<>(); - private final IntMap primitiveCodecsByCode; + private final IntMap> primitiveCodecsByCode; private final Lock registerLock = new ReentrantLock(); protected CachingCodecRegistry( @@ -243,12 +253,6 @@ public TypeCodec codecFor( } } - if (value instanceof TupleValue) { - return uncheckedCast(codecFor(cqlType, TupleValue.class)); - } else if (value instanceof UdtValue) { - return uncheckedCast(codecFor(cqlType, UdtValue.class)); - } - GenericType javaType = inspectType(value, cqlType); LOG.trace("[{}] Continuing based on inferred type {}", logPrefix, javaType); return uncheckedCast(getCachedCodec(cqlType, javaType, true)); @@ -273,15 +277,14 @@ public TypeCodec codecFor(@NonNull JavaTypeT value) { } } - if (value instanceof TupleValue) { - return uncheckedCast(codecFor(((TupleValue) value).getType(), TupleValue.class)); - } else if (value instanceof UdtValue) { - return uncheckedCast(codecFor(((UdtValue) value).getType(), UdtValue.class)); - } - - GenericType javaType = inspectType(value, null); - LOG.trace("[{}] Continuing based on inferred type {}", logPrefix, javaType); - return uncheckedCast(getCachedCodec(null, javaType, true)); + DataType cqlType = inferCqlTypeFromValue(value); + GenericType javaType = inspectType(value, cqlType); + LOG.trace( + "[{}] Continuing based on inferred CQL type {} and Java type {}", + logPrefix, + cqlType, + javaType); + return uncheckedCast(getCachedCodec(cqlType, javaType, true)); } @NonNull @@ -322,7 +325,7 @@ protected boolean matches( @NonNull protected GenericType inspectType(@NonNull Object value, @Nullable DataType cqlType) { if (value instanceof List) { - List list = (List) value; + List list = (List) value; if (list.isEmpty()) { // Empty collections are always encoded the same way, so any element type will do // in the absence of a CQL type. When the CQL type is known, we try to infer the best Java @@ -341,7 +344,7 @@ protected GenericType inspectType(@NonNull Object value, @Nullable DataType c return GenericType.listOf(elementType); } } else if (value instanceof Set) { - Set set = (Set) value; + Set set = (Set) value; if (set.isEmpty()) { return cqlType == null ? JAVA_TYPE_FOR_EMPTY_SETS : inferJavaTypeFromCqlType(cqlType); } else { @@ -357,7 +360,7 @@ protected GenericType inspectType(@NonNull Object value, @Nullable DataType c return GenericType.setOf(elementType); } } else if (value instanceof Map) { - Map map = (Map) value; + Map map = (Map) value; if (map.isEmpty()) { return cqlType == null ? JAVA_TYPE_FOR_EMPTY_MAPS : inferJavaTypeFromCqlType(cqlType); } else { @@ -443,6 +446,105 @@ protected GenericType inferJavaTypeFromCqlType(@NonNull DataType cqlType) { } } + @Nullable + protected DataType inferCqlTypeFromValue(@NonNull Object value) { + if (value instanceof List) { + List list = (List) value; + if (list.isEmpty()) { + return CQL_TYPE_FOR_EMPTY_LISTS; + } + Object firstElement = list.get(0); + if (firstElement == null) { + throw new IllegalArgumentException( + "Can't infer list codec because the first element is null " + + "(note that CQL does not allow null values in collections)"); + } + DataType elementType = inferCqlTypeFromValue(firstElement); + if (elementType == null) { + return null; + } + return DataTypes.listOf(elementType); + } else if (value instanceof Set) { + Set set = (Set) value; + if (set.isEmpty()) { + return CQL_TYPE_FOR_EMPTY_SETS; + } + Object firstElement = set.iterator().next(); + if (firstElement == null) { + throw new IllegalArgumentException( + "Can't infer set codec because the first element is null " + + "(note that CQL does not allow null values in collections)"); + } + DataType elementType = inferCqlTypeFromValue(firstElement); + if (elementType == null) { + return null; + } + return DataTypes.setOf(elementType); + } else if (value instanceof Map) { + Map map = (Map) value; + if (map.isEmpty()) { + return CQL_TYPE_FOR_EMPTY_MAPS; + } + Entry firstEntry = map.entrySet().iterator().next(); + Object firstKey = firstEntry.getKey(); + Object firstValue = firstEntry.getValue(); + if (firstKey == null || firstValue == null) { + throw new IllegalArgumentException( + "Can't infer map codec because the first key and/or value is null " + + "(note that CQL does not allow null values in collections)"); + } + DataType keyType = inferCqlTypeFromValue(firstKey); + DataType valueType = inferCqlTypeFromValue(firstValue); + if (keyType == null || valueType == null) { + return null; + } + return DataTypes.mapOf(keyType, valueType); + } + Class javaClass = value.getClass(); + if (ByteBuffer.class.isAssignableFrom(javaClass)) { + return DataTypes.BLOB; + } else if (String.class.equals(javaClass)) { + return DataTypes.TEXT; + } else if (Long.class.equals(javaClass)) { + return DataTypes.BIGINT; + } else if (Boolean.class.equals(javaClass)) { + return DataTypes.BOOLEAN; + } else if (BigDecimal.class.equals(javaClass)) { + return DataTypes.DECIMAL; + } else if (Double.class.equals(javaClass)) { + return DataTypes.DOUBLE; + } else if (Float.class.equals(javaClass)) { + return DataTypes.FLOAT; + } else if (Integer.class.equals(javaClass)) { + return DataTypes.INT; + } else if (Instant.class.equals(javaClass)) { + return DataTypes.TIMESTAMP; + } else if (UUID.class.equals(javaClass)) { + return DataTypes.UUID; + } else if (BigInteger.class.equals(javaClass)) { + return DataTypes.VARINT; + } else if (InetAddress.class.isAssignableFrom(javaClass)) { + return DataTypes.INET; + } else if (LocalDate.class.equals(javaClass)) { + return DataTypes.DATE; + } else if (LocalTime.class.equals(javaClass)) { + return DataTypes.TIME; + } else if (Short.class.equals(javaClass)) { + return DataTypes.SMALLINT; + } else if (Byte.class.equals(javaClass)) { + return DataTypes.TINYINT; + } else if (CqlDuration.class.equals(javaClass)) { + return DataTypes.DURATION; + } else if (UdtValue.class.isAssignableFrom(javaClass)) { + return ((UdtValue) value).getType(); + } else if (TupleValue.class.isAssignableFrom(javaClass)) { + return ((TupleValue) value).getType(); + } + // This might mean that the java type is a custom type with a custom codec, + // so don't throw CodecNotFoundException just yet. + return null; + } + // Try to create a codec when we haven't found it in the cache @NonNull protected TypeCodec createCodec( @@ -565,8 +667,8 @@ protected TypeCodec createCodec(@NonNull DataType cqlType) { throw new CodecNotFoundException(cqlType, null); } - private static IntMap sortByProtocolCode(TypeCodec[] codecs) { - IntMap.Builder builder = IntMap.builder(); + private static IntMap> sortByProtocolCode(TypeCodec[] codecs) { + IntMap.Builder> builder = IntMap.builder(); for (TypeCodec codec : codecs) { builder.put(codec.getCqlType().getProtocolCode(), codec); } @@ -590,4 +692,8 @@ private static TypeCodec uncheckedCast( GenericType.setOf(Boolean.class); private static final GenericType> JAVA_TYPE_FOR_EMPTY_MAPS = GenericType.mapOf(Boolean.class, Boolean.class); + private static final DataType CQL_TYPE_FOR_EMPTY_LISTS = DataTypes.listOf(DataTypes.BOOLEAN); + private static final DataType CQL_TYPE_FOR_EMPTY_SETS = DataTypes.setOf(DataTypes.BOOLEAN); + private static final DataType CQL_TYPE_FOR_EMPTY_MAPS = + DataTypes.mapOf(DataTypes.BOOLEAN, DataTypes.BOOLEAN); } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistryTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistryTest.java index 6e6071a9ae2..eba88b75cd5 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistryTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistryTest.java @@ -22,58 +22,39 @@ import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyZeroInteractions; -import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.ProtocolVersion; -import com.datastax.oss.driver.api.core.data.CqlDuration; import com.datastax.oss.driver.api.core.data.TupleValue; import com.datastax.oss.driver.api.core.data.UdtValue; import com.datastax.oss.driver.api.core.type.DataType; import com.datastax.oss.driver.api.core.type.DataTypes; import com.datastax.oss.driver.api.core.type.ListType; -import com.datastax.oss.driver.api.core.type.MapType; -import com.datastax.oss.driver.api.core.type.SetType; -import com.datastax.oss.driver.api.core.type.TupleType; -import com.datastax.oss.driver.api.core.type.UserDefinedType; import com.datastax.oss.driver.api.core.type.codec.CodecNotFoundException; import com.datastax.oss.driver.api.core.type.codec.TypeCodec; import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; import com.datastax.oss.driver.api.core.type.codec.registry.CodecRegistry; import com.datastax.oss.driver.api.core.type.reflect.GenericType; -import com.datastax.oss.driver.internal.core.type.UserDefinedTypeBuilder; +import com.datastax.oss.driver.internal.core.data.DefaultTupleValue; +import com.datastax.oss.driver.internal.core.data.DefaultUdtValue; import com.datastax.oss.driver.internal.core.type.codec.CqlIntToStringCodec; import com.datastax.oss.driver.internal.core.type.codec.IntCodec; import com.datastax.oss.driver.internal.core.type.codec.ListCodec; import com.datastax.oss.driver.internal.core.type.codec.registry.CachingCodecRegistryTest.TestCachingCodecRegistry.MockCache; -import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; -import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; -import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; import com.datastax.oss.driver.shaded.guava.common.collect.Lists; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; -import java.math.BigDecimal; -import java.math.BigInteger; -import java.net.Inet4Address; -import java.net.InetAddress; -import java.net.UnknownHostException; import java.nio.ByteBuffer; -import java.time.Instant; -import java.time.LocalDate; -import java.time.LocalTime; import java.time.Period; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.UUID; import org.junit.Before; import org.junit.Test; +import org.junit.runner.RunWith; import org.mockito.InOrder; import org.mockito.Mock; import org.mockito.MockitoAnnotations; +@RunWith(DataProviderRunner.class) public class CachingCodecRegistryTest { @Mock private MockCache mockCache; @@ -84,91 +65,40 @@ public void setup() { } @Test - public void should_find_primitive_codecs_for_types() { + @UseDataProvider( + value = "primitiveCodecs", + location = CachingCodecRegistryTestDataProviders.class) + public void should_find_primitive_codecs_for_types(TypeCodec codec) { TestCachingCodecRegistry registry = new TestCachingCodecRegistry(mockCache); - checkPrimitiveMappings(registry, TypeCodecs.BOOLEAN); - checkPrimitiveMappings(registry, TypeCodecs.TINYINT); - checkPrimitiveMappings(registry, TypeCodecs.DOUBLE); - checkPrimitiveMappings(registry, TypeCodecs.COUNTER); - checkPrimitiveMappings(registry, TypeCodecs.FLOAT); - checkPrimitiveMappings(registry, TypeCodecs.INT); - checkPrimitiveMappings(registry, TypeCodecs.BIGINT); - checkPrimitiveMappings(registry, TypeCodecs.SMALLINT); - checkPrimitiveMappings(registry, TypeCodecs.TIMESTAMP); - checkPrimitiveMappings(registry, TypeCodecs.DATE); - checkPrimitiveMappings(registry, TypeCodecs.TIME); - checkPrimitiveMappings(registry, TypeCodecs.BLOB); - checkPrimitiveMappings(registry, TypeCodecs.TEXT); - checkPrimitiveMappings(registry, TypeCodecs.ASCII); - checkPrimitiveMappings(registry, TypeCodecs.VARINT); - checkPrimitiveMappings(registry, TypeCodecs.DECIMAL); - checkPrimitiveMappings(registry, TypeCodecs.UUID); - checkPrimitiveMappings(registry, TypeCodecs.TIMEUUID); - checkPrimitiveMappings(registry, TypeCodecs.INET); - checkPrimitiveMappings(registry, TypeCodecs.DURATION); - // Primitive mappings never hit the cache - verifyZeroInteractions(mockCache); - } - - private void checkPrimitiveMappings(TestCachingCodecRegistry registry, TypeCodec codec) { DataType cqlType = codec.getCqlType(); GenericType javaType = codec.getJavaType(); - assertThat(registry.codecFor(cqlType, javaType)).isSameAs(codec); assertThat(registry.codecFor(cqlType)).isSameAs(codec); - assertThat(javaType.__getToken().getType()).isInstanceOf(Class.class); Class javaClass = (Class) javaType.__getToken().getType(); assertThat(registry.codecFor(cqlType, javaClass)).isSameAs(codec); + // Primitive mappings never hit the cache + verifyZeroInteractions(mockCache); } @Test - public void should_find_primitive_codecs_for_value() throws Exception { + @UseDataProvider( + value = "primitiveCodecsWithValues", + location = CachingCodecRegistryTestDataProviders.class) + public void should_find_primitive_codecs_for_value(Object value, TypeCodec codec) { TestCachingCodecRegistry registry = new TestCachingCodecRegistry(mockCache); - assertThat(registry.codecFor(true)).isEqualTo(TypeCodecs.BOOLEAN); - assertThat(registry.codecFor((byte) 0)).isEqualTo(TypeCodecs.TINYINT); - assertThat(registry.codecFor(0.0)).isEqualTo(TypeCodecs.DOUBLE); - assertThat(registry.codecFor(0.0f)).isEqualTo(TypeCodecs.FLOAT); - assertThat(registry.codecFor(0)).isEqualTo(TypeCodecs.INT); - assertThat(registry.codecFor(0L)).isEqualTo(TypeCodecs.BIGINT); - assertThat(registry.codecFor((short) 0)).isEqualTo(TypeCodecs.SMALLINT); - assertThat(registry.codecFor(Instant.EPOCH)).isEqualTo(TypeCodecs.TIMESTAMP); - assertThat(registry.codecFor(LocalDate.MIN)).isEqualTo(TypeCodecs.DATE); - assertThat(registry.codecFor(LocalTime.MIDNIGHT)).isEqualTo(TypeCodecs.TIME); - assertThat(registry.codecFor(ByteBuffer.allocate(0))).isEqualTo(TypeCodecs.BLOB); - assertThat(registry.codecFor("")).isEqualTo(TypeCodecs.TEXT); - assertThat(registry.codecFor(BigInteger.ONE)).isEqualTo(TypeCodecs.VARINT); - assertThat(registry.codecFor(BigDecimal.ONE)).isEqualTo(TypeCodecs.DECIMAL); - assertThat(registry.codecFor(new UUID(2L, 1L))).isEqualTo(TypeCodecs.UUID); - assertThat(registry.codecFor(InetAddress.getByName("127.0.0.1"))).isEqualTo(TypeCodecs.INET); - assertThat(registry.codecFor(CqlDuration.newInstance(1, 2, 3))).isEqualTo(TypeCodecs.DURATION); + assertThat(registry.codecFor(value)).isEqualTo(codec); verifyZeroInteractions(mockCache); } @Test - public void should_find_primitive_codecs_for_cql_type_and_value() throws Exception { + @UseDataProvider( + value = "primitiveCodecsWithCqlTypesAndValues", + location = CachingCodecRegistryTestDataProviders.class) + public void should_find_primitive_codecs_for_cql_type_and_value( + DataType cqlType, Object value, TypeCodec codec) { TestCachingCodecRegistry registry = new TestCachingCodecRegistry(mockCache); - assertThat(registry.codecFor(DataTypes.BOOLEAN, true)).isEqualTo(TypeCodecs.BOOLEAN); - assertThat(registry.codecFor(DataTypes.TINYINT, (byte) 0)).isEqualTo(TypeCodecs.TINYINT); - assertThat(registry.codecFor(DataTypes.DOUBLE, 0.0)).isEqualTo(TypeCodecs.DOUBLE); - assertThat(registry.codecFor(DataTypes.FLOAT, 0.0f)).isEqualTo(TypeCodecs.FLOAT); - assertThat(registry.codecFor(DataTypes.INT, 0)).isEqualTo(TypeCodecs.INT); - assertThat(registry.codecFor(DataTypes.BIGINT, 0L)).isEqualTo(TypeCodecs.BIGINT); - assertThat(registry.codecFor(DataTypes.SMALLINT, (short) 0)).isEqualTo(TypeCodecs.SMALLINT); - assertThat(registry.codecFor(DataTypes.TIMESTAMP, Instant.EPOCH)) - .isEqualTo(TypeCodecs.TIMESTAMP); - assertThat(registry.codecFor(DataTypes.DATE, LocalDate.MIN)).isEqualTo(TypeCodecs.DATE); - assertThat(registry.codecFor(DataTypes.TIME, LocalTime.MIDNIGHT)).isEqualTo(TypeCodecs.TIME); - assertThat(registry.codecFor(DataTypes.BLOB, ByteBuffer.allocate(0))) - .isEqualTo(TypeCodecs.BLOB); - assertThat(registry.codecFor(DataTypes.TEXT, "")).isEqualTo(TypeCodecs.TEXT); - assertThat(registry.codecFor(DataTypes.VARINT, BigInteger.ONE)).isEqualTo(TypeCodecs.VARINT); - assertThat(registry.codecFor(DataTypes.DECIMAL, BigDecimal.ONE)).isEqualTo(TypeCodecs.DECIMAL); - assertThat(registry.codecFor(DataTypes.UUID, new UUID(2L, 1L))).isEqualTo(TypeCodecs.UUID); - assertThat(registry.codecFor(DataTypes.INET, InetAddress.getByName("127.0.0.1"))) - .isEqualTo(TypeCodecs.INET); - assertThat(registry.codecFor(DataTypes.DURATION, CqlDuration.newInstance(1, 2, 3))) - .isEqualTo(TypeCodecs.DURATION); + assertThat(registry.codecFor(cqlType, value)).isEqualTo(codec); verifyZeroInteractions(mockCache); } @@ -216,93 +146,85 @@ public void should_find_user_codec_for_custom_java_type() { } @Test - public void should_create_list_codec_for_cql_and_java_types() { - ListType cqlType = DataTypes.listOf(DataTypes.listOf(DataTypes.INT)); - GenericType>> javaType = new GenericType>>() {}; - List> value = ImmutableList.of(ImmutableList.of(1)); - + @UseDataProvider( + value = "collectionsWithCqlAndJavaTypes", + location = CachingCodecRegistryTestDataProviders.class) + public void should_create_collection_codec_for_cql_and_java_types( + DataType cqlType, GenericType javaType, Object value) { TestCachingCodecRegistry registry = new TestCachingCodecRegistry(mockCache); InOrder inOrder = inOrder(mockCache); - - TypeCodec>> codec = registry.codecFor(cqlType, javaType); + TypeCodec codec = registry.codecFor(cqlType, javaType); assertThat(codec).isNotNull(); assertThat(codec.accepts(cqlType)).isTrue(); assertThat(codec.accepts(javaType)).isTrue(); assertThat(codec.accepts(value)).isTrue(); - // Cache lookup for the codec, and recursively for its subcodec inOrder.verify(mockCache).lookup(cqlType, javaType, false); - inOrder - .verify(mockCache) - .lookup(DataTypes.listOf(DataTypes.INT), GenericType.listOf(GenericType.INTEGER), false); } @Test - public void should_create_list_codec_for_cql_type() { - ListType cqlType = DataTypes.listOf(DataTypes.listOf(DataTypes.INT)); - GenericType>> javaType = new GenericType>>() {}; - List> value = ImmutableList.of(ImmutableList.of(1)); - + @UseDataProvider( + value = "collectionsWithCqlAndJavaTypes", + location = CachingCodecRegistryTestDataProviders.class) + public void should_create_collection_codec_for_cql_type( + DataType cqlType, GenericType javaType, Object value) { TestCachingCodecRegistry registry = new TestCachingCodecRegistry(mockCache); InOrder inOrder = inOrder(mockCache); - - TypeCodec>> codec = registry.codecFor(cqlType); + TypeCodec codec = registry.codecFor(cqlType); assertThat(codec).isNotNull(); assertThat(codec.accepts(cqlType)).isTrue(); assertThat(codec.accepts(javaType)).isTrue(); assertThat(codec.accepts(value)).isTrue(); inOrder.verify(mockCache).lookup(cqlType, null, false); - inOrder.verify(mockCache).lookup(DataTypes.listOf(DataTypes.INT), null, false); } @Test - public void should_create_list_codec_for_cql_type_and_java_value() { - ListType cqlType = DataTypes.listOf(DataTypes.listOf(DataTypes.INT)); - GenericType>> javaType = new GenericType>>() {}; - List> value = ImmutableList.of(ImmutableList.of(1)); - + @UseDataProvider( + value = "collectionsWithCqlAndJavaTypes", + location = CachingCodecRegistryTestDataProviders.class) + public void should_create_collection_codec_for_cql_type_and_java_value( + DataType cqlType, GenericType javaType, GenericType javaTypeLookup, Object value) { TestCachingCodecRegistry registry = new TestCachingCodecRegistry(mockCache); InOrder inOrder = inOrder(mockCache); - - TypeCodec>> codec = registry.codecFor(cqlType, value); + TypeCodec codec = registry.codecFor(cqlType, value); assertThat(codec).isNotNull(); assertThat(codec.accepts(cqlType)).isTrue(); assertThat(codec.accepts(javaType)).isTrue(); assertThat(codec.accepts(value)).isTrue(); - inOrder.verify(mockCache).lookup(cqlType, javaType, true); - inOrder - .verify(mockCache) - .lookup(DataTypes.listOf(DataTypes.INT), GenericType.listOf(GenericType.INTEGER), true); + inOrder.verify(mockCache).lookup(cqlType, javaTypeLookup, true); } @Test - public void should_create_list_codec_for_java_value() { - ListType cqlType = DataTypes.listOf(DataTypes.listOf(DataTypes.INT)); - GenericType>> javaType = new GenericType>>() {}; - List> value = ImmutableList.of(ImmutableList.of(1)); - + @UseDataProvider( + value = "collectionsWithCqlAndJavaTypes", + location = CachingCodecRegistryTestDataProviders.class) + public void should_create_collection_codec_for_java_value( + DataType cqlType, GenericType javaType, GenericType javaTypeLookup, Object value) { TestCachingCodecRegistry registry = new TestCachingCodecRegistry(mockCache); InOrder inOrder = inOrder(mockCache); - - TypeCodec>> codec = registry.codecFor(value); + TypeCodec codec = registry.codecFor(value); assertThat(codec).isNotNull(); assertThat(codec.accepts(cqlType)).isTrue(); assertThat(codec.accepts(javaType)).isTrue(); assertThat(codec.accepts(value)).isTrue(); - inOrder.verify(mockCache).lookup(null, javaType, true); - inOrder.verify(mockCache).lookup(null, GenericType.listOf(GenericType.INTEGER), true); + inOrder.verify(mockCache).lookup(cqlType, javaTypeLookup, true); } @Test - public void should_create_list_codec_for_empty_java_value() { - GenericType>> javaType = - GenericType.listOf(GenericType.listOf(Boolean.class)); - List> value = Collections.singletonList(Collections.emptyList()); - + @UseDataProvider( + value = "emptyCollectionsWithCqlAndJavaTypes", + location = CachingCodecRegistryTestDataProviders.class) + public void should_create_collection_codec_for_empty_java_value( + DataType cqlType, + GenericType javaType, + DataType cqlTypeLookup, + GenericType javaTypeLookup, + Object value) { TestCachingCodecRegistry registry = new TestCachingCodecRegistry(mockCache); InOrder inOrder = inOrder(mockCache); - - TypeCodec>> codec = registry.codecFor(value); + TypeCodec codec = registry.codecFor(value); assertThat(codec).isNotNull(); + assertThat(codec.accepts(cqlType)).isFalse(); + assertThat(codec.accepts(javaType)).isFalse(); assertThat(codec.accepts(value)).isTrue(); // Note that empty collections without CQL type are a corner case, in that the registry returns @@ -310,425 +232,45 @@ public void should_create_list_codec_for_empty_java_value() { // The only requirement is that it can encode the value, which holds true: codec.encode(value, ProtocolVersion.DEFAULT); - inOrder.verify(mockCache).lookup(null, javaType, true); + inOrder.verify(mockCache).lookup(cqlTypeLookup, javaTypeLookup, true); } @Test - public void should_create_list_codec_for_cql_type_and_empty_java_value() { - ListType cqlType = DataTypes.listOf(DataTypes.listOf(DataTypes.INT)); - GenericType>> javaType = - GenericType.listOf(GenericType.listOf(GenericType.INTEGER)); - List> value = Collections.singletonList(Collections.emptyList()); - + @UseDataProvider( + value = "emptyCollectionsWithCqlAndJavaTypes", + location = CachingCodecRegistryTestDataProviders.class) + public void should_create_collection_codec_for_cql_type_and_empty_java_value( + DataType cqlType, GenericType javaType, Object value) { TestCachingCodecRegistry registry = new TestCachingCodecRegistry(mockCache); InOrder inOrder = inOrder(mockCache); - - TypeCodec>> codec = registry.codecFor(cqlType, value); + TypeCodec codec = registry.codecFor(cqlType, value); assertThat(codec).isNotNull(); assertThat(codec.accepts(cqlType)).isTrue(); assertThat(codec.accepts(javaType)).isTrue(); assertThat(codec.accepts(value)).isTrue(); // Verify that the codec can encode the value codec.encode(value, ProtocolVersion.DEFAULT); - inOrder.verify(mockCache).lookup(cqlType, javaType, true); } @Test - public void should_create_list_codec_for_java_value_when_first_element_is_a_subtype() - throws UnknownHostException { - ListType cqlType = DataTypes.listOf(DataTypes.INET); - GenericType> javaType = new GenericType>() {}; - InetAddress address = InetAddress.getByAddress(new byte[] {127, 0, 0, 1}); - // Because the actual implementation is a subclass, there is no exact match with the codec's - // declared type - assertThat(address).isInstanceOf(Inet4Address.class); - List value = ImmutableList.of(address); - - TestCachingCodecRegistry registry = new TestCachingCodecRegistry(mockCache); - InOrder inOrder = inOrder(mockCache); - - TypeCodec> codec = registry.codecFor(value); - assertThat(codec).isNotNull(); - assertThat(codec.accepts(cqlType)).isTrue(); - assertThat(codec.accepts(javaType)).isTrue(); - assertThat(codec.accepts(value)).isTrue(); - - inOrder.verify(mockCache).lookup(null, GenericType.listOf(Inet4Address.class), true); - } - - @Test - public void should_throw_for_list_codec_containing_null_element() { - List value = new ArrayList<>(); - value.add(null); - + @UseDataProvider( + value = "collectionsWithNullElements", + location = CachingCodecRegistryTestDataProviders.class) + public void should_throw_for_collection_containing_null_element(Object value, String expected) { TestCachingCodecRegistry registry = new TestCachingCodecRegistry(mockCache); assertThatThrownBy(() -> registry.codecFor(value)) .isInstanceOf(IllegalArgumentException.class) - .hasMessage( - "Can't infer list codec because the first element is null " - + "(note that CQL does not allow null values in collections)"); + .hasMessage(expected); } @Test - public void should_create_set_codec_for_cql_and_java_types() { - SetType cqlType = DataTypes.setOf(DataTypes.setOf(DataTypes.INT)); - GenericType>> javaType = new GenericType>>() {}; - Set> value = ImmutableSet.of(ImmutableSet.of(1)); - + @UseDataProvider( + value = "tuplesWithCqlTypes", + location = CachingCodecRegistryTestDataProviders.class) + public void should_create_tuple_codec_for_cql_and_java_types(DataType cqlType, Object value) { TestCachingCodecRegistry registry = new TestCachingCodecRegistry(mockCache); InOrder inOrder = inOrder(mockCache); - - TypeCodec>> codec = registry.codecFor(cqlType, javaType); - assertThat(codec).isNotNull(); - assertThat(codec.accepts(cqlType)).isTrue(); - assertThat(codec.accepts(javaType)).isTrue(); - assertThat(codec.accepts(value)).isTrue(); - // Cache lookup for the codec, and recursively for its subcodec - inOrder.verify(mockCache).lookup(cqlType, javaType, false); - inOrder - .verify(mockCache) - .lookup(DataTypes.setOf(DataTypes.INT), GenericType.setOf(GenericType.INTEGER), false); - } - - @Test - public void should_create_set_codec_for_cql_type() { - SetType cqlType = DataTypes.setOf(DataTypes.setOf(DataTypes.INT)); - GenericType>> javaType = new GenericType>>() {}; - Set> value = ImmutableSet.of(ImmutableSet.of(1)); - - TestCachingCodecRegistry registry = new TestCachingCodecRegistry(mockCache); - InOrder inOrder = inOrder(mockCache); - - TypeCodec>> codec = registry.codecFor(cqlType); - assertThat(codec).isNotNull(); - assertThat(codec.accepts(cqlType)).isTrue(); - assertThat(codec.accepts(javaType)).isTrue(); - assertThat(codec.accepts(value)).isTrue(); - inOrder.verify(mockCache).lookup(cqlType, null, false); - inOrder.verify(mockCache).lookup(DataTypes.setOf(DataTypes.INT), null, false); - } - - @Test - public void should_create_set_codec_for_cql_type_and_java_value() { - SetType cqlType = DataTypes.setOf(DataTypes.setOf(DataTypes.INT)); - GenericType>> javaType = new GenericType>>() {}; - Set> value = ImmutableSet.of(ImmutableSet.of(1)); - - TestCachingCodecRegistry registry = new TestCachingCodecRegistry(mockCache); - InOrder inOrder = inOrder(mockCache); - - TypeCodec>> codec = registry.codecFor(cqlType, value); - assertThat(codec).isNotNull(); - assertThat(codec.accepts(cqlType)).isTrue(); - assertThat(codec.accepts(javaType)).isTrue(); - assertThat(codec.accepts(value)).isTrue(); - inOrder.verify(mockCache).lookup(cqlType, javaType, true); - inOrder - .verify(mockCache) - .lookup(DataTypes.setOf(DataTypes.INT), GenericType.setOf(GenericType.INTEGER), true); - } - - @Test - public void should_create_set_codec_for_java_value() { - SetType cqlType = DataTypes.setOf(DataTypes.setOf(DataTypes.INT)); - GenericType>> javaType = new GenericType>>() {}; - Set> value = ImmutableSet.of(ImmutableSet.of(1)); - - TestCachingCodecRegistry registry = new TestCachingCodecRegistry(mockCache); - InOrder inOrder = inOrder(mockCache); - - TypeCodec>> codec = registry.codecFor(value); - assertThat(codec).isNotNull(); - assertThat(codec.accepts(cqlType)).isTrue(); - assertThat(codec.accepts(javaType)).isTrue(); - assertThat(codec.accepts(value)).isTrue(); - inOrder.verify(mockCache).lookup(null, javaType, true); - inOrder.verify(mockCache).lookup(null, GenericType.setOf(GenericType.INTEGER), true); - } - - @Test - public void should_create_set_codec_for_empty_java_value() { - GenericType>> javaType = GenericType.setOf(GenericType.setOf(Boolean.class)); - Set> value = Collections.singleton(Collections.emptySet()); - - TestCachingCodecRegistry registry = new TestCachingCodecRegistry(mockCache); - InOrder inOrder = inOrder(mockCache); - - TypeCodec>> codec = registry.codecFor(value); - assertThat(codec).isNotNull(); - assertThat(codec.accepts(value)).isTrue(); - - // Note that empty collections without CQL type are a corner case, in that the registry returns - // a codec that does not accept cqlType, nor the value's declared Java type. - // The only requirement is that it can encode the value, which holds true: - codec.encode(value, ProtocolVersion.DEFAULT); - - inOrder.verify(mockCache).lookup(null, javaType, true); - } - - @Test - public void should_create_set_codec_for_cql_type_and_empty_java_value() { - SetType cqlType = DataTypes.setOf(DataTypes.setOf(DataTypes.INT)); - GenericType>> javaType = GenericType.setOf(GenericType.setOf(Integer.class)); - Set> value = Collections.emptySet(); - - TestCachingCodecRegistry registry = new TestCachingCodecRegistry(mockCache); - InOrder inOrder = inOrder(mockCache); - - TypeCodec>> codec = registry.codecFor(cqlType, value); - assertThat(codec).isNotNull(); - assertThat(codec.accepts(value)).isTrue(); - assertThat(codec.accepts(javaType)).isTrue(); - assertThat(codec.accepts(value)).isTrue(); - // Verify that the codec can encode the value - codec.encode(value, ProtocolVersion.DEFAULT); - - inOrder.verify(mockCache).lookup(cqlType, javaType, true); - } - - @Test - public void should_create_set_codec_for_java_value_when_first_element_is_a_subtype() - throws UnknownHostException { - SetType cqlType = DataTypes.setOf(DataTypes.INET); - GenericType> javaType = new GenericType>() {}; - InetAddress address = InetAddress.getByAddress(new byte[] {127, 0, 0, 1}); - // Because the actual implementation is a subclass, there is no exact match with the codec's - // declared type - assertThat(address).isInstanceOf(Inet4Address.class); - Set value = ImmutableSet.of(address); - - TestCachingCodecRegistry registry = new TestCachingCodecRegistry(mockCache); - InOrder inOrder = inOrder(mockCache); - - TypeCodec> codec = registry.codecFor(value); - assertThat(codec).isNotNull(); - assertThat(codec.accepts(cqlType)).isTrue(); - assertThat(codec.accepts(javaType)).isTrue(); - assertThat(codec.accepts(value)).isTrue(); - - inOrder.verify(mockCache).lookup(null, GenericType.setOf(Inet4Address.class), true); - } - - @Test - public void should_throw_for_set_codec_containing_null_element() { - Set value = new HashSet<>(); - value.add(null); - - TestCachingCodecRegistry registry = new TestCachingCodecRegistry(mockCache); - assertThatThrownBy(() -> registry.codecFor(value)) - .isInstanceOf(IllegalArgumentException.class) - .hasMessage( - "Can't infer set codec because the first element is null " - + "(note that CQL does not allow null values in collections)"); - } - - @Test - public void should_create_map_codec_for_cql_and_java_types() { - MapType cqlType = DataTypes.mapOf(DataTypes.INT, DataTypes.mapOf(DataTypes.INT, DataTypes.INT)); - GenericType>> javaType = - new GenericType>>() {}; - Map> value = ImmutableMap.of(1, ImmutableMap.of(1, 1)); - - TestCachingCodecRegistry registry = new TestCachingCodecRegistry(mockCache); - InOrder inOrder = inOrder(mockCache); - - TypeCodec>> codec = registry.codecFor(cqlType, javaType); - assertThat(codec).isNotNull(); - assertThat(codec.accepts(cqlType)).isTrue(); - assertThat(codec.accepts(javaType)).isTrue(); - assertThat(codec.accepts(value)).isTrue(); - // Cache lookup for the codec, and recursively for its subcodec - inOrder.verify(mockCache).lookup(cqlType, javaType, false); - inOrder - .verify(mockCache) - .lookup( - DataTypes.mapOf(DataTypes.INT, DataTypes.INT), - GenericType.mapOf(GenericType.INTEGER, GenericType.INTEGER), - false); - } - - @Test - public void should_create_map_codec_for_cql_type() { - MapType cqlType = DataTypes.mapOf(DataTypes.INT, DataTypes.mapOf(DataTypes.INT, DataTypes.INT)); - GenericType>> javaType = - new GenericType>>() {}; - Map> value = ImmutableMap.of(1, ImmutableMap.of(1, 1)); - - TestCachingCodecRegistry registry = new TestCachingCodecRegistry(mockCache); - InOrder inOrder = inOrder(mockCache); - - TypeCodec>> codec = registry.codecFor(cqlType); - assertThat(codec).isNotNull(); - assertThat(codec.accepts(cqlType)).isTrue(); - assertThat(codec.accepts(javaType)).isTrue(); - assertThat(codec.accepts(value)).isTrue(); - inOrder.verify(mockCache).lookup(cqlType, null, false); - inOrder.verify(mockCache).lookup(DataTypes.mapOf(DataTypes.INT, DataTypes.INT), null, false); - } - - @Test - public void should_create_map_codec_for_java_type() { - MapType cqlType = DataTypes.mapOf(DataTypes.INT, DataTypes.mapOf(DataTypes.INT, DataTypes.INT)); - GenericType>> javaType = - new GenericType>>() {}; - Map> value = ImmutableMap.of(1, ImmutableMap.of(1, 1)); - - TestCachingCodecRegistry registry = new TestCachingCodecRegistry(mockCache); - InOrder inOrder = inOrder(mockCache); - - TypeCodec>> codec = registry.codecFor(javaType); - assertThat(codec).isNotNull(); - assertThat(codec.accepts(cqlType)).isTrue(); - assertThat(codec.accepts(javaType)).isTrue(); - assertThat(codec.accepts(value)).isTrue(); - inOrder.verify(mockCache).lookup(null, javaType, false); - inOrder.verify(mockCache).lookup(null, new GenericType>() {}, false); - } - - @Test - public void should_create_map_codec_for_cql_type_and_java_value() { - MapType cqlType = DataTypes.mapOf(DataTypes.INT, DataTypes.mapOf(DataTypes.INT, DataTypes.INT)); - GenericType>> javaType = - new GenericType>>() {}; - Map> value = ImmutableMap.of(1, ImmutableMap.of(1, 1)); - - TestCachingCodecRegistry registry = new TestCachingCodecRegistry(mockCache); - InOrder inOrder = inOrder(mockCache); - - TypeCodec>> codec = registry.codecFor(cqlType, value); - assertThat(codec).isNotNull(); - assertThat(codec.accepts(cqlType)).isTrue(); - assertThat(codec.accepts(javaType)).isTrue(); - assertThat(codec.accepts(value)).isTrue(); - inOrder.verify(mockCache).lookup(cqlType, javaType, true); - inOrder - .verify(mockCache) - .lookup( - DataTypes.mapOf(DataTypes.INT, DataTypes.INT), - GenericType.mapOf(GenericType.INTEGER, GenericType.INTEGER), - true); - } - - @Test - public void should_create_map_codec_for_java_value() { - MapType cqlType = DataTypes.mapOf(DataTypes.INT, DataTypes.mapOf(DataTypes.INT, DataTypes.INT)); - GenericType>> javaType = - new GenericType>>() {}; - Map> value = ImmutableMap.of(1, ImmutableMap.of(1, 1)); - - TestCachingCodecRegistry registry = new TestCachingCodecRegistry(mockCache); - InOrder inOrder = inOrder(mockCache); - - TypeCodec>> codec = registry.codecFor(value); - assertThat(codec).isNotNull(); - assertThat(codec.accepts(cqlType)).isTrue(); - assertThat(codec.accepts(javaType)).isTrue(); - assertThat(codec.accepts(value)).isTrue(); - inOrder.verify(mockCache).lookup(null, javaType, true); - inOrder - .verify(mockCache) - .lookup(null, GenericType.mapOf(GenericType.INTEGER, GenericType.INTEGER), true); - } - - @Test - public void should_create_map_codec_for_empty_java_value() { - GenericType> javaType = - GenericType.mapOf(GenericType.BOOLEAN, GenericType.BOOLEAN); - Map> value = ImmutableMap.of(1, Collections.emptyMap()); - - TestCachingCodecRegistry registry = new TestCachingCodecRegistry(mockCache); - InOrder inOrder = inOrder(mockCache); - - TypeCodec>> codec = registry.codecFor(value); - assertThat(codec).isNotNull(); - assertThat(codec.accepts(value)).isTrue(); - - // Note that empty collections without CQL type are a corner case, in that the registry returns - // a codec that does not accept cqlType, nor the value's declared Java type. - // The only requirement is that it can encode the value, which holds true: - codec.encode(value, ProtocolVersion.DEFAULT); - - inOrder.verify(mockCache).lookup(null, javaType, true); - } - - @Test - public void should_create_map_codec_for_cql_type_and_empty_java_value() { - MapType cqlType = - DataTypes.mapOf(DataTypes.INT, DataTypes.mapOf(DataTypes.DOUBLE, DataTypes.TEXT)); - GenericType>> javaType = - GenericType.mapOf( - GenericType.INTEGER, GenericType.mapOf(GenericType.DOUBLE, GenericType.STRING)); - Map> value = ImmutableMap.of(1, Collections.emptyMap()); - - TestCachingCodecRegistry registry = new TestCachingCodecRegistry(mockCache); - InOrder inOrder = inOrder(mockCache); - - TypeCodec>> codec = registry.codecFor(cqlType, value); - assertThat(codec).isNotNull(); - assertThat(codec.accepts(cqlType)).isTrue(); - assertThat(codec.accepts(javaType)).isTrue(); - assertThat(codec.accepts(value)).isTrue(); - // Verify that the codec can encode the value - codec.encode(value, ProtocolVersion.DEFAULT); - - inOrder.verify(mockCache).lookup(cqlType, javaType, true); - } - - @Test - public void should_create_map_codec_for_java_value_when_first_element_is_a_subtype() - throws UnknownHostException { - MapType cqlType = DataTypes.mapOf(DataTypes.INET, DataTypes.INET); - GenericType> javaType = - new GenericType>() {}; - InetAddress address = InetAddress.getByAddress(new byte[] {127, 0, 0, 1}); - // Because the actual implementation is a subclass, there is no exact match with the codec's - // declared type - assertThat(address).isInstanceOf(Inet4Address.class); - Map value = ImmutableMap.of(address, address); - - TestCachingCodecRegistry registry = new TestCachingCodecRegistry(mockCache); - InOrder inOrder = inOrder(mockCache); - - TypeCodec> codec = registry.codecFor(value); - assertThat(codec).isNotNull(); - assertThat(codec.accepts(cqlType)).isTrue(); - assertThat(codec.accepts(javaType)).isTrue(); - assertThat(codec.accepts(value)).isTrue(); - - inOrder - .verify(mockCache) - .lookup(null, GenericType.mapOf(Inet4Address.class, Inet4Address.class), true); - } - - @Test - public void should_throw_for_map_codec_containing_null_element() { - should_throw_for_map_codec_containing_null_element("foo", null); - should_throw_for_map_codec_containing_null_element(null, "foo"); - should_throw_for_map_codec_containing_null_element(null, null); - } - - private void should_throw_for_map_codec_containing_null_element( - String firstKey, String firstValue) { - Map value = new HashMap<>(); - value.put(firstKey, firstValue); - - TestCachingCodecRegistry registry = new TestCachingCodecRegistry(mockCache); - assertThatThrownBy(() -> registry.codecFor(value)) - .isInstanceOf(IllegalArgumentException.class) - .hasMessage( - "Can't infer map codec because the first key and/or value is null " - + "(note that CQL does not allow null values in collections)"); - } - - @Test - public void should_create_tuple_codec_for_cql_and_java_types() { - TupleType cqlType = DataTypes.tupleOf(DataTypes.INT, DataTypes.listOf(DataTypes.TEXT)); - TupleValue value = cqlType.newValue(); - - TestCachingCodecRegistry registry = new TestCachingCodecRegistry(mockCache); - InOrder inOrder = inOrder(mockCache); - TypeCodec codec = registry.codecFor(cqlType, GenericType.TUPLE_VALUE); assertThat(codec).isNotNull(); assertThat(codec.accepts(cqlType)).isTrue(); @@ -737,17 +279,15 @@ public void should_create_tuple_codec_for_cql_and_java_types() { assertThat(codec.accepts(value)).isTrue(); inOrder.verify(mockCache).lookup(cqlType, GenericType.TUPLE_VALUE, false); // field codecs are only looked up when fields are accessed, so no cache hit for list now - } @Test - public void should_create_tuple_codec_for_cql_type() { - TupleType cqlType = DataTypes.tupleOf(DataTypes.INT, DataTypes.listOf(DataTypes.TEXT)); - TupleValue value = cqlType.newValue(); - + @UseDataProvider( + value = "tuplesWithCqlTypes", + location = CachingCodecRegistryTestDataProviders.class) + public void should_create_tuple_codec_for_cql_type(DataType cqlType, Object value) { TestCachingCodecRegistry registry = new TestCachingCodecRegistry(mockCache); InOrder inOrder = inOrder(mockCache); - TypeCodec codec = registry.codecFor(cqlType); assertThat(codec).isNotNull(); assertThat(codec.accepts(cqlType)).isTrue(); @@ -758,58 +298,47 @@ public void should_create_tuple_codec_for_cql_type() { } @Test - public void should_create_tuple_codec_for_cql_type_and_java_value() { - TupleType cqlType = DataTypes.tupleOf(DataTypes.INT, DataTypes.listOf(DataTypes.TEXT)); - TupleValue value = cqlType.newValue(); - + @UseDataProvider( + value = "tuplesWithCqlTypes", + location = CachingCodecRegistryTestDataProviders.class) + public void should_create_tuple_codec_for_cql_type_and_java_value( + DataType cqlType, Object value) { TestCachingCodecRegistry registry = new TestCachingCodecRegistry(mockCache); InOrder inOrder = inOrder(mockCache); - - TypeCodec codec = registry.codecFor(cqlType, value); + TypeCodec codec = registry.codecFor(cqlType, value); assertThat(codec).isNotNull(); assertThat(codec.accepts(cqlType)).isTrue(); assertThat(codec.accepts(GenericType.TUPLE_VALUE)).isTrue(); assertThat(codec.accepts(TupleValue.class)).isTrue(); assertThat(codec.accepts(value)).isTrue(); - inOrder.verify(mockCache).lookup(cqlType, GenericType.TUPLE_VALUE, false); - + inOrder.verify(mockCache).lookup(cqlType, GenericType.of(DefaultTupleValue.class), true); inOrder.verifyNoMoreInteractions(); } @Test - public void should_create_tuple_codec_for_java_value() { - TupleType cqlType = DataTypes.tupleOf(DataTypes.INT, DataTypes.listOf(DataTypes.TEXT)); - TupleValue value = cqlType.newValue(); - + @UseDataProvider( + value = "tuplesWithCqlTypes", + location = CachingCodecRegistryTestDataProviders.class) + public void should_create_tuple_codec_for_java_value(DataType cqlType, Object value) { TestCachingCodecRegistry registry = new TestCachingCodecRegistry(mockCache); InOrder inOrder = inOrder(mockCache); - - TypeCodec codec = registry.codecFor(value); + TypeCodec codec = registry.codecFor(value); assertThat(codec).isNotNull(); assertThat(codec.accepts(cqlType)).isTrue(); assertThat(codec.accepts(GenericType.TUPLE_VALUE)).isTrue(); assertThat(codec.accepts(TupleValue.class)).isTrue(); assertThat(codec.accepts(value)).isTrue(); - // UDTs know their CQL type, so the actual lookup is by CQL + Java type, and therefore not - // covariant. - inOrder.verify(mockCache).lookup(cqlType, GenericType.TUPLE_VALUE, false); - + inOrder.verify(mockCache).lookup(cqlType, GenericType.of(DefaultTupleValue.class), true); inOrder.verifyNoMoreInteractions(); } @Test - public void should_create_udt_codec_for_cql_and_java_types() { - UserDefinedType cqlType = - new UserDefinedTypeBuilder( - CqlIdentifier.fromInternal("ks"), CqlIdentifier.fromInternal("type")) - .withField(CqlIdentifier.fromInternal("field1"), DataTypes.INT) - .withField(CqlIdentifier.fromInternal("field2"), DataTypes.listOf(DataTypes.TEXT)) - .build(); - UdtValue value = cqlType.newValue(); - + @UseDataProvider( + value = "udtsWithCqlTypes", + location = CachingCodecRegistryTestDataProviders.class) + public void should_create_udt_codec_for_cql_and_java_types(DataType cqlType, Object value) { TestCachingCodecRegistry registry = new TestCachingCodecRegistry(mockCache); InOrder inOrder = inOrder(mockCache); - TypeCodec codec = registry.codecFor(cqlType, GenericType.UDT_VALUE); assertThat(codec).isNotNull(); assertThat(codec.accepts(cqlType)).isTrue(); @@ -822,18 +351,12 @@ public void should_create_udt_codec_for_cql_and_java_types() { } @Test - public void should_create_udt_codec_for_cql_type() { - UserDefinedType cqlType = - new UserDefinedTypeBuilder( - CqlIdentifier.fromInternal("ks"), CqlIdentifier.fromInternal("type")) - .withField(CqlIdentifier.fromInternal("field1"), DataTypes.INT) - .withField(CqlIdentifier.fromInternal("field2"), DataTypes.listOf(DataTypes.TEXT)) - .build(); - UdtValue value = cqlType.newValue(); - + @UseDataProvider( + value = "udtsWithCqlTypes", + location = CachingCodecRegistryTestDataProviders.class) + public void should_create_udt_codec_for_cql_type(DataType cqlType, Object value) { TestCachingCodecRegistry registry = new TestCachingCodecRegistry(mockCache); InOrder inOrder = inOrder(mockCache); - TypeCodec codec = registry.codecFor(cqlType); assertThat(codec).isNotNull(); assertThat(codec.accepts(cqlType)).isTrue(); @@ -844,52 +367,36 @@ public void should_create_udt_codec_for_cql_type() { } @Test - public void should_create_udt_codec_for_cql_type_and_java_value() { - UserDefinedType cqlType = - new UserDefinedTypeBuilder( - CqlIdentifier.fromInternal("ks"), CqlIdentifier.fromInternal("type")) - .withField(CqlIdentifier.fromInternal("field1"), DataTypes.INT) - .withField(CqlIdentifier.fromInternal("field2"), DataTypes.listOf(DataTypes.TEXT)) - .build(); - UdtValue value = cqlType.newValue(); - + @UseDataProvider( + value = "udtsWithCqlTypes", + location = CachingCodecRegistryTestDataProviders.class) + public void should_create_udt_codec_for_cql_type_and_java_value(DataType cqlType, Object value) { TestCachingCodecRegistry registry = new TestCachingCodecRegistry(mockCache); InOrder inOrder = inOrder(mockCache); - - TypeCodec codec = registry.codecFor(cqlType, value); + TypeCodec codec = registry.codecFor(cqlType, value); assertThat(codec).isNotNull(); assertThat(codec.accepts(cqlType)).isTrue(); assertThat(codec.accepts(GenericType.UDT_VALUE)).isTrue(); assertThat(codec.accepts(UdtValue.class)).isTrue(); assertThat(codec.accepts(value)).isTrue(); - inOrder.verify(mockCache).lookup(cqlType, GenericType.UDT_VALUE, false); - + inOrder.verify(mockCache).lookup(cqlType, GenericType.of(DefaultUdtValue.class), true); inOrder.verifyNoMoreInteractions(); } @Test - public void should_create_udt_codec_for_java_value() { - UserDefinedType cqlType = - new UserDefinedTypeBuilder( - CqlIdentifier.fromInternal("ks"), CqlIdentifier.fromInternal("type")) - .withField(CqlIdentifier.fromInternal("field1"), DataTypes.INT) - .withField(CqlIdentifier.fromInternal("field2"), DataTypes.listOf(DataTypes.TEXT)) - .build(); - UdtValue value = cqlType.newValue(); - + @UseDataProvider( + value = "udtsWithCqlTypes", + location = CachingCodecRegistryTestDataProviders.class) + public void should_create_udt_codec_for_java_value(DataType cqlType, Object value) { TestCachingCodecRegistry registry = new TestCachingCodecRegistry(mockCache); InOrder inOrder = inOrder(mockCache); - - TypeCodec codec = registry.codecFor(value); + TypeCodec codec = registry.codecFor(value); assertThat(codec).isNotNull(); assertThat(codec.accepts(cqlType)).isTrue(); assertThat(codec.accepts(GenericType.UDT_VALUE)).isTrue(); assertThat(codec.accepts(UdtValue.class)).isTrue(); assertThat(codec.accepts(value)).isTrue(); - // UDTs know their CQL type, so the actual lookup is by CQL + Java type, and therefore not - // covariant. - inOrder.verify(mockCache).lookup(cqlType, GenericType.UDT_VALUE, false); - + inOrder.verify(mockCache).lookup(cqlType, GenericType.of(DefaultUdtValue.class), true); inOrder.verifyNoMoreInteractions(); } @@ -1053,7 +560,7 @@ public void should_ignore_user_codec_if_collides_with_generated_codec() { public static class TestCachingCodecRegistry extends CachingCodecRegistry { private final MockCache cache; - public TestCachingCodecRegistry(MockCache cache) { + TestCachingCodecRegistry(MockCache cache) { super("test", CodecRegistryConstants.PRIMITIVE_CODECS); this.cache = cache; } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistryTestDataProviders.java b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistryTestDataProviders.java new file mode 100644 index 00000000000..64bbd800c92 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistryTestDataProviders.java @@ -0,0 +1,567 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.type.codec.registry; + +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.data.CqlDuration; +import com.datastax.oss.driver.api.core.data.TupleValue; +import com.datastax.oss.driver.api.core.data.UdtValue; +import com.datastax.oss.driver.api.core.type.DataTypes; +import com.datastax.oss.driver.api.core.type.TupleType; +import com.datastax.oss.driver.api.core.type.UserDefinedType; +import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import com.datastax.oss.driver.internal.core.data.DefaultTupleValue; +import com.datastax.oss.driver.internal.core.data.DefaultUdtValue; +import com.datastax.oss.driver.internal.core.type.UserDefinedTypeBuilder; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; +import com.tngtech.java.junit.dataprovider.DataProvider; +import java.math.BigDecimal; +import java.math.BigInteger; +import java.net.Inet4Address; +import java.net.InetAddress; +import java.net.UnknownHostException; +import java.nio.ByteBuffer; +import java.time.Instant; +import java.time.LocalDate; +import java.time.LocalTime; +import java.util.Collections; +import java.util.UUID; + +@SuppressWarnings("unused") +public class CachingCodecRegistryTestDataProviders { + + @DataProvider + public static Object[][] primitiveCodecs() { + return new Object[][] { + {TypeCodecs.BOOLEAN}, + {TypeCodecs.TINYINT}, + {TypeCodecs.DOUBLE}, + {TypeCodecs.COUNTER}, + {TypeCodecs.FLOAT}, + {TypeCodecs.INT}, + {TypeCodecs.BIGINT}, + {TypeCodecs.SMALLINT}, + {TypeCodecs.TIMESTAMP}, + {TypeCodecs.DATE}, + {TypeCodecs.TIME}, + {TypeCodecs.BLOB}, + {TypeCodecs.TEXT}, + {TypeCodecs.ASCII}, + {TypeCodecs.VARINT}, + {TypeCodecs.DECIMAL}, + {TypeCodecs.UUID}, + {TypeCodecs.TIMEUUID}, + {TypeCodecs.INET}, + {TypeCodecs.DURATION}, + }; + } + + @DataProvider + public static Object[][] primitiveCodecsWithValues() throws UnknownHostException { + return new Object[][] { + {true, TypeCodecs.BOOLEAN}, + {(byte) 0, TypeCodecs.TINYINT}, + {0.0, TypeCodecs.DOUBLE}, + {0.0f, TypeCodecs.FLOAT}, + {0, TypeCodecs.INT}, + {0L, TypeCodecs.BIGINT}, + {(short) 0, TypeCodecs.SMALLINT}, + {Instant.EPOCH, TypeCodecs.TIMESTAMP}, + {LocalDate.MIN, TypeCodecs.DATE}, + {LocalTime.MIDNIGHT, TypeCodecs.TIME}, + {ByteBuffer.allocate(0), TypeCodecs.BLOB}, + {"", TypeCodecs.TEXT}, + {BigInteger.ONE, TypeCodecs.VARINT}, + {BigDecimal.ONE, TypeCodecs.DECIMAL}, + {new UUID(2L, 1L), TypeCodecs.UUID}, + {InetAddress.getByName("127.0.0.1"), TypeCodecs.INET}, + {CqlDuration.newInstance(1, 2, 3), TypeCodecs.DURATION}, + }; + } + + @DataProvider + public static Object[][] primitiveCodecsWithCqlTypesAndValues() throws UnknownHostException { + return new Object[][] { + {DataTypes.BOOLEAN, true, TypeCodecs.BOOLEAN}, + {DataTypes.TINYINT, (byte) 0, TypeCodecs.TINYINT}, + {DataTypes.DOUBLE, 0.0, TypeCodecs.DOUBLE}, + {DataTypes.FLOAT, 0.0f, TypeCodecs.FLOAT}, + {DataTypes.INT, 0, TypeCodecs.INT}, + {DataTypes.BIGINT, 0L, TypeCodecs.BIGINT}, + {DataTypes.SMALLINT, (short) 0, TypeCodecs.SMALLINT}, + {DataTypes.TIMESTAMP, Instant.EPOCH, TypeCodecs.TIMESTAMP}, + {DataTypes.DATE, LocalDate.MIN, TypeCodecs.DATE}, + {DataTypes.TIME, LocalTime.MIDNIGHT, TypeCodecs.TIME}, + {DataTypes.BLOB, ByteBuffer.allocate(0), TypeCodecs.BLOB}, + {DataTypes.TEXT, "", TypeCodecs.TEXT}, + {DataTypes.VARINT, BigInteger.ONE, TypeCodecs.VARINT}, + {DataTypes.DECIMAL, BigDecimal.ONE, TypeCodecs.DECIMAL}, + {DataTypes.UUID, new UUID(2L, 1L), TypeCodecs.UUID}, + {DataTypes.INET, InetAddress.getByName("127.0.0.1"), TypeCodecs.INET}, + {DataTypes.DURATION, CqlDuration.newInstance(1, 2, 3), TypeCodecs.DURATION}, + }; + } + + @DataProvider + public static Object[][] collectionsWithCqlAndJavaTypes() + throws UnknownHostException, ClassNotFoundException { + TupleType tupleType = DataTypes.tupleOf(DataTypes.INT, DataTypes.listOf(DataTypes.TEXT)); + TupleValue tupleValue = tupleType.newValue(); + UserDefinedType userType = + new UserDefinedTypeBuilder( + CqlIdentifier.fromInternal("ks"), CqlIdentifier.fromInternal("type")) + .withField(CqlIdentifier.fromInternal("field1"), DataTypes.INT) + .withField(CqlIdentifier.fromInternal("field2"), DataTypes.listOf(DataTypes.TEXT)) + .build(); + UdtValue udtValue = userType.newValue(); + return new Object[][] { + // lists + { + DataTypes.listOf(DataTypes.INT), + GenericType.listOf(Integer.class), + GenericType.listOf(Integer.class), + ImmutableList.of(1) + }, + { + DataTypes.listOf(DataTypes.TEXT), + GenericType.listOf(String.class), + GenericType.listOf(String.class), + ImmutableList.of("foo") + }, + { + DataTypes.listOf(DataTypes.BLOB), + GenericType.listOf(ByteBuffer.class), + GenericType.listOf(Class.forName("java.nio.HeapByteBuffer")), + ImmutableList.of(ByteBuffer.wrap(new byte[] {127, 0, 0, 1})) + }, + { + DataTypes.listOf(DataTypes.INET), + GenericType.listOf(InetAddress.class), + GenericType.listOf(Inet4Address.class), + ImmutableList.of(InetAddress.getByAddress(new byte[] {127, 0, 0, 1})) + }, + { + DataTypes.listOf(tupleType), + GenericType.listOf(TupleValue.class), + GenericType.listOf(DefaultTupleValue.class), + ImmutableList.of(tupleValue) + }, + { + DataTypes.listOf(userType), + GenericType.listOf(UdtValue.class), + GenericType.listOf(DefaultUdtValue.class), + ImmutableList.of(udtValue) + }, + { + DataTypes.listOf(DataTypes.listOf(DataTypes.INT)), + GenericType.listOf(GenericType.listOf(Integer.class)), + GenericType.listOf(GenericType.listOf(Integer.class)), + ImmutableList.of(ImmutableList.of(1)) + }, + { + DataTypes.listOf(DataTypes.listOf(tupleType)), + GenericType.listOf(GenericType.listOf(TupleValue.class)), + GenericType.listOf(GenericType.listOf(DefaultTupleValue.class)), + ImmutableList.of(ImmutableList.of(tupleValue)) + }, + { + DataTypes.listOf(DataTypes.listOf(userType)), + GenericType.listOf(GenericType.listOf(UdtValue.class)), + GenericType.listOf(GenericType.listOf(DefaultUdtValue.class)), + ImmutableList.of(ImmutableList.of(udtValue)) + }, + // sets + { + DataTypes.setOf(DataTypes.INT), + GenericType.setOf(Integer.class), + GenericType.setOf(Integer.class), + ImmutableSet.of(1) + }, + { + DataTypes.setOf(DataTypes.TEXT), + GenericType.setOf(String.class), + GenericType.setOf(String.class), + ImmutableSet.of("foo") + }, + { + DataTypes.setOf(DataTypes.BLOB), + GenericType.setOf(ByteBuffer.class), + GenericType.setOf(Class.forName("java.nio.HeapByteBuffer")), + ImmutableSet.of(ByteBuffer.wrap(new byte[] {127, 0, 0, 1})) + }, + { + DataTypes.setOf(DataTypes.INET), + GenericType.setOf(InetAddress.class), + GenericType.setOf(Inet4Address.class), + ImmutableSet.of(InetAddress.getByAddress(new byte[] {127, 0, 0, 1})) + }, + { + DataTypes.setOf(tupleType), + GenericType.setOf(TupleValue.class), + GenericType.setOf(DefaultTupleValue.class), + ImmutableSet.of(tupleValue) + }, + { + DataTypes.setOf(userType), + GenericType.setOf(UdtValue.class), + GenericType.setOf(DefaultUdtValue.class), + ImmutableSet.of(udtValue) + }, + { + DataTypes.setOf(DataTypes.setOf(DataTypes.INT)), + GenericType.setOf(GenericType.setOf(Integer.class)), + GenericType.setOf(GenericType.setOf(Integer.class)), + ImmutableSet.of(ImmutableSet.of(1)) + }, + { + DataTypes.setOf(DataTypes.setOf(tupleType)), + GenericType.setOf(GenericType.setOf(TupleValue.class)), + GenericType.setOf(GenericType.setOf(DefaultTupleValue.class)), + ImmutableSet.of(ImmutableSet.of(tupleValue)) + }, + { + DataTypes.setOf(DataTypes.setOf(userType)), + GenericType.setOf(GenericType.setOf(UdtValue.class)), + GenericType.setOf(GenericType.setOf(DefaultUdtValue.class)), + ImmutableSet.of(ImmutableSet.of(udtValue)) + }, + // maps + { + DataTypes.mapOf(DataTypes.INT, DataTypes.TEXT), + GenericType.mapOf(Integer.class, String.class), + GenericType.mapOf(Integer.class, String.class), + ImmutableMap.of(1, "foo") + }, + { + DataTypes.mapOf(DataTypes.BLOB, DataTypes.INET), + GenericType.mapOf(ByteBuffer.class, InetAddress.class), + GenericType.mapOf(Class.forName("java.nio.HeapByteBuffer"), Inet4Address.class), + ImmutableMap.of( + ByteBuffer.wrap(new byte[] {127, 0, 0, 1}), + InetAddress.getByAddress(new byte[] {127, 0, 0, 1})) + }, + { + DataTypes.mapOf(tupleType, tupleType), + GenericType.mapOf(TupleValue.class, TupleValue.class), + GenericType.mapOf(DefaultTupleValue.class, DefaultTupleValue.class), + ImmutableMap.of(tupleValue, tupleValue) + }, + { + DataTypes.mapOf(userType, userType), + GenericType.mapOf(UdtValue.class, UdtValue.class), + GenericType.mapOf(DefaultUdtValue.class, DefaultUdtValue.class), + ImmutableMap.of(udtValue, udtValue) + }, + { + DataTypes.mapOf(DataTypes.UUID, DataTypes.mapOf(DataTypes.INT, DataTypes.TEXT)), + GenericType.mapOf(GenericType.UUID, GenericType.mapOf(Integer.class, String.class)), + GenericType.mapOf(GenericType.UUID, GenericType.mapOf(Integer.class, String.class)), + ImmutableMap.of(UUID.randomUUID(), ImmutableMap.of(1, "foo")) + }, + { + DataTypes.mapOf(DataTypes.mapOf(userType, userType), DataTypes.mapOf(tupleType, tupleType)), + GenericType.mapOf( + GenericType.mapOf(UdtValue.class, UdtValue.class), + GenericType.mapOf(TupleValue.class, TupleValue.class)), + GenericType.mapOf( + GenericType.mapOf(DefaultUdtValue.class, DefaultUdtValue.class), + GenericType.mapOf(DefaultTupleValue.class, DefaultTupleValue.class)), + ImmutableMap.of( + ImmutableMap.of(udtValue, udtValue), ImmutableMap.of(tupleValue, tupleValue)) + }, + }; + } + + @DataProvider + public static Object[][] emptyCollectionsWithCqlAndJavaTypes() { + TupleType tupleType = DataTypes.tupleOf(DataTypes.INT, DataTypes.listOf(DataTypes.TEXT)); + UserDefinedType userType = + new UserDefinedTypeBuilder( + CqlIdentifier.fromInternal("ks"), CqlIdentifier.fromInternal("type")) + .withField(CqlIdentifier.fromInternal("field1"), DataTypes.INT) + .withField(CqlIdentifier.fromInternal("field2"), DataTypes.listOf(DataTypes.TEXT)) + .build(); + return new Object[][] { + // lists + { + DataTypes.listOf(DataTypes.INT), + GenericType.listOf(Integer.class), + DataTypes.listOf(DataTypes.BOOLEAN), + GenericType.listOf(Boolean.class), + Collections.emptyList() + }, + { + DataTypes.listOf(DataTypes.TEXT), + GenericType.listOf(String.class), + DataTypes.listOf(DataTypes.BOOLEAN), + GenericType.listOf(Boolean.class), + Collections.emptyList() + }, + { + DataTypes.listOf(DataTypes.BLOB), + GenericType.listOf(ByteBuffer.class), + DataTypes.listOf(DataTypes.BOOLEAN), + GenericType.listOf(Boolean.class), + Collections.emptyList() + }, + { + DataTypes.listOf(DataTypes.INET), + GenericType.listOf(InetAddress.class), + DataTypes.listOf(DataTypes.BOOLEAN), + GenericType.listOf(Boolean.class), + Collections.emptyList() + }, + { + DataTypes.listOf(tupleType), + GenericType.listOf(TupleValue.class), + DataTypes.listOf(DataTypes.BOOLEAN), + GenericType.listOf(Boolean.class), + Collections.emptyList() + }, + { + DataTypes.listOf(userType), + GenericType.listOf(UdtValue.class), + DataTypes.listOf(DataTypes.BOOLEAN), + GenericType.listOf(Boolean.class), + Collections.emptyList() + }, + { + DataTypes.listOf(DataTypes.listOf(DataTypes.INT)), + GenericType.listOf(GenericType.listOf(Integer.class)), + DataTypes.listOf(DataTypes.listOf(DataTypes.BOOLEAN)), + GenericType.listOf(GenericType.listOf(Boolean.class)), + ImmutableList.of(Collections.emptyList()) + }, + { + DataTypes.listOf(DataTypes.listOf(tupleType)), + GenericType.listOf(GenericType.listOf(TupleValue.class)), + DataTypes.listOf(DataTypes.listOf(DataTypes.BOOLEAN)), + GenericType.listOf(GenericType.listOf(Boolean.class)), + ImmutableList.of(Collections.emptyList()) + }, + { + DataTypes.listOf(DataTypes.listOf(userType)), + GenericType.listOf(GenericType.listOf(UdtValue.class)), + DataTypes.listOf(DataTypes.listOf(DataTypes.BOOLEAN)), + GenericType.listOf(GenericType.listOf(Boolean.class)), + ImmutableList.of(Collections.emptyList()) + }, + // sets + { + DataTypes.setOf(DataTypes.INT), + GenericType.setOf(Integer.class), + DataTypes.setOf(DataTypes.BOOLEAN), + GenericType.setOf(Boolean.class), + Collections.emptySet() + }, + { + DataTypes.setOf(DataTypes.TEXT), + GenericType.setOf(String.class), + DataTypes.setOf(DataTypes.BOOLEAN), + GenericType.setOf(Boolean.class), + Collections.emptySet() + }, + { + DataTypes.setOf(DataTypes.BLOB), + GenericType.setOf(ByteBuffer.class), + DataTypes.setOf(DataTypes.BOOLEAN), + GenericType.setOf(Boolean.class), + Collections.emptySet() + }, + { + DataTypes.setOf(DataTypes.INET), + GenericType.setOf(InetAddress.class), + DataTypes.setOf(DataTypes.BOOLEAN), + GenericType.setOf(Boolean.class), + Collections.emptySet() + }, + { + DataTypes.setOf(tupleType), + GenericType.setOf(TupleValue.class), + DataTypes.setOf(DataTypes.BOOLEAN), + GenericType.setOf(Boolean.class), + Collections.emptySet() + }, + { + DataTypes.setOf(userType), + GenericType.setOf(UdtValue.class), + DataTypes.setOf(DataTypes.BOOLEAN), + GenericType.setOf(Boolean.class), + Collections.emptySet() + }, + { + DataTypes.setOf(DataTypes.setOf(DataTypes.INT)), + GenericType.setOf(GenericType.setOf(Integer.class)), + DataTypes.setOf(DataTypes.setOf(DataTypes.BOOLEAN)), + GenericType.setOf(GenericType.setOf(Boolean.class)), + ImmutableSet.of(Collections.emptySet()) + }, + { + DataTypes.setOf(DataTypes.setOf(tupleType)), + GenericType.setOf(GenericType.setOf(TupleValue.class)), + DataTypes.setOf(DataTypes.setOf(DataTypes.BOOLEAN)), + GenericType.setOf(GenericType.setOf(Boolean.class)), + ImmutableSet.of(Collections.emptySet()) + }, + { + DataTypes.setOf(DataTypes.setOf(userType)), + GenericType.setOf(GenericType.setOf(UdtValue.class)), + DataTypes.setOf(DataTypes.setOf(DataTypes.BOOLEAN)), + GenericType.setOf(GenericType.setOf(Boolean.class)), + ImmutableSet.of(Collections.emptySet()) + }, + // maps + { + DataTypes.mapOf(DataTypes.INT, DataTypes.TEXT), + GenericType.mapOf(Integer.class, String.class), + DataTypes.mapOf(DataTypes.BOOLEAN, DataTypes.BOOLEAN), + GenericType.mapOf(Boolean.class, Boolean.class), + Collections.emptyMap() + }, + { + DataTypes.mapOf(DataTypes.BLOB, DataTypes.INET), + GenericType.mapOf(ByteBuffer.class, InetAddress.class), + DataTypes.mapOf(DataTypes.BOOLEAN, DataTypes.BOOLEAN), + GenericType.mapOf(Boolean.class, Boolean.class), + Collections.emptyMap() + }, + { + DataTypes.mapOf(tupleType, tupleType), + GenericType.mapOf(TupleValue.class, TupleValue.class), + DataTypes.mapOf(DataTypes.BOOLEAN, DataTypes.BOOLEAN), + GenericType.mapOf(Boolean.class, Boolean.class), + Collections.emptyMap() + }, + { + DataTypes.mapOf(userType, userType), + GenericType.mapOf(UdtValue.class, UdtValue.class), + DataTypes.mapOf(DataTypes.BOOLEAN, DataTypes.BOOLEAN), + GenericType.mapOf(Boolean.class, Boolean.class), + Collections.emptyMap() + }, + { + DataTypes.mapOf(DataTypes.UUID, DataTypes.mapOf(DataTypes.INT, DataTypes.TEXT)), + GenericType.mapOf(GenericType.UUID, GenericType.mapOf(Integer.class, String.class)), + DataTypes.mapOf(DataTypes.UUID, DataTypes.mapOf(DataTypes.BOOLEAN, DataTypes.BOOLEAN)), + GenericType.mapOf(GenericType.UUID, GenericType.mapOf(Boolean.class, Boolean.class)), + ImmutableMap.of(UUID.randomUUID(), Collections.emptyMap()) + }, + { + DataTypes.mapOf(DataTypes.mapOf(DataTypes.INT, DataTypes.TEXT), DataTypes.UUID), + GenericType.mapOf(GenericType.mapOf(Integer.class, String.class), GenericType.UUID), + DataTypes.mapOf(DataTypes.mapOf(DataTypes.BOOLEAN, DataTypes.BOOLEAN), DataTypes.UUID), + GenericType.mapOf(GenericType.mapOf(Boolean.class, Boolean.class), GenericType.UUID), + ImmutableMap.of(Collections.emptyMap(), UUID.randomUUID()) + }, + { + DataTypes.mapOf(DataTypes.mapOf(userType, userType), DataTypes.mapOf(tupleType, tupleType)), + GenericType.mapOf( + GenericType.mapOf(UdtValue.class, UdtValue.class), + GenericType.mapOf(TupleValue.class, TupleValue.class)), + DataTypes.mapOf( + DataTypes.mapOf(DataTypes.BOOLEAN, DataTypes.BOOLEAN), + DataTypes.mapOf(DataTypes.BOOLEAN, DataTypes.BOOLEAN)), + GenericType.mapOf( + GenericType.mapOf(Boolean.class, Boolean.class), + GenericType.mapOf(Boolean.class, Boolean.class)), + ImmutableMap.of(Collections.emptyMap(), Collections.emptyMap()) + }, + }; + } + + @DataProvider + public static Object[][] collectionsWithNullElements() { + return new Object[][] { + { + Collections.singletonList(null), + "Can't infer list codec because the first element is null " + + "(note that CQL does not allow null values in collections)" + }, + { + Collections.singleton(null), + "Can't infer set codec because the first element is null " + + "(note that CQL does not allow null values in collections)" + }, + { + Collections.singletonMap("foo", null), + "Can't infer map codec because the first key and/or value is null " + + "(note that CQL does not allow null values in collections)" + }, + { + Collections.singletonMap(null, "foo"), + "Can't infer map codec because the first key and/or value is null " + + "(note that CQL does not allow null values in collections)" + }, + { + Collections.singletonMap(null, null), + "Can't infer map codec because the first key and/or value is null " + + "(note that CQL does not allow null values in collections)" + }, + }; + } + + @DataProvider + public static Object[][] tuplesWithCqlTypes() { + TupleType tupleType1 = DataTypes.tupleOf(DataTypes.INT, DataTypes.TEXT); + TupleType tupleType2 = DataTypes.tupleOf(DataTypes.INT, DataTypes.listOf(DataTypes.TEXT)); + TupleType tupleType3 = DataTypes.tupleOf(DataTypes.mapOf(tupleType1, tupleType2)); + TupleValue tupleValue1 = tupleType1.newValue(42, "foo"); + TupleValue tupleValue2 = tupleType2.newValue(42, ImmutableList.of("foo", "bar")); + return new Object[][] { + {tupleType1, tupleType1.newValue()}, + {tupleType1, tupleValue1}, + {tupleType2, tupleType2.newValue()}, + {tupleType2, tupleValue2}, + {tupleType3, tupleType3.newValue()}, + {tupleType3, tupleType3.newValue(ImmutableMap.of(tupleValue1, tupleValue2))}, + }; + } + + @DataProvider + public static Object[][] udtsWithCqlTypes() { + UserDefinedType userType1 = + new UserDefinedTypeBuilder( + CqlIdentifier.fromInternal("ks"), CqlIdentifier.fromInternal("type")) + .withField(CqlIdentifier.fromInternal("field1"), DataTypes.INT) + .withField(CqlIdentifier.fromInternal("field2"), DataTypes.TEXT) + .build(); + UserDefinedType userType2 = + new UserDefinedTypeBuilder( + CqlIdentifier.fromInternal("ks"), CqlIdentifier.fromInternal("type")) + .withField(CqlIdentifier.fromInternal("field1"), DataTypes.setOf(DataTypes.BIGINT)) + .withField(CqlIdentifier.fromInternal("field2"), DataTypes.listOf(DataTypes.TEXT)) + .build(); + UserDefinedType userType3 = + new UserDefinedTypeBuilder( + CqlIdentifier.fromInternal("ks"), CqlIdentifier.fromInternal("type")) + .withField(CqlIdentifier.fromInternal("field1"), DataTypes.mapOf(userType1, userType2)) + .build(); + UdtValue userValue1 = userType1.newValue(42, "foo"); + UdtValue userValue2 = + userType2.newValue(ImmutableSet.of(24L, 43L), ImmutableList.of("foo", "bar")); + return new Object[][] { + {userType1, userType1.newValue()}, + {userType1, userValue1}, + {userType2, userType2.newValue()}, + {userType2, userValue2}, + {userType3, userType3.newValue()}, + {userType3, userType3.newValue(ImmutableMap.of(userValue1, userValue2))}, + }; + } +} From d7dab59057eb4277c975f447408461eb6594b8a6 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Wed, 8 Jan 2020 15:09:51 -0300 Subject: [PATCH 187/979] JAVA-2555: Generate append/prepend constructs compatible with legacy C* versions (#1379) --- changelog/README.md | 2 + manual/query_builder/idempotence/README.md | 72 ++++++++++++++--- .../api/querybuilder/update/Assignment.java | 21 ++--- .../update/OngoingAssignment.java | 16 ++-- .../querybuilder/update/AppendAssignment.java | 14 +--- .../update/CollectionAssignment.java | 81 +++++++++++++++++++ .../update/CollectionElementAssignment.java | 4 +- .../update/PrependAssignment.java | 33 +------- .../querybuilder/update/RemoveAssignment.java | 29 +++++++ .../update/RemoveListElementAssignment.java | 5 -- .../update/UpdateFluentAssignmentTest.java | 36 ++++----- .../update/UpdateIdempotenceTest.java | 39 ++++++++- 12 files changed, 255 insertions(+), 97 deletions(-) create mode 100644 query-builder/src/main/java/com/datastax/oss/driver/internal/querybuilder/update/CollectionAssignment.java create mode 100644 query-builder/src/main/java/com/datastax/oss/driver/internal/querybuilder/update/RemoveAssignment.java diff --git a/changelog/README.md b/changelog/README.md index 1b5e2c66257..008d8f245d2 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,8 @@ ### 4.4.0 (in progress) +- [improvement] JAVA-2596: Consider collection removals as idempotent in query builder +- [bug] JAVA-2555: Generate append/prepend constructs compatible with legacy C* versions - [bug] JAVA-2584: Ensure codec registry is able to create codecs for collections of UDTs and tuples - [bug] JAVA-2583: IS NOT NULL clause should be idempotent - [improvement] JAVA-2442: Don't check for schema agreement twice when completing a DDL query diff --git a/manual/query_builder/idempotence/README.md b/manual/query_builder/idempotence/README.md index b064c81cdd6..9fd6d39114d 100644 --- a/manual/query_builder/idempotence/README.md +++ b/manual/query_builder/idempotence/README.md @@ -39,7 +39,7 @@ If you use the result of a user-defined function in an INSERT or UPDATE statemen of knowing if that function is idempotent: ```java -Statement statement = insertInto("foo").value("k", function("generate_id")).build(); +SimpleStatement statement = insertInto("foo").value("k", function("generate_id")).build(); // INSERT INTO foo (k) VALUES (generate_id()) assert !statement.isIdempotent(); ``` @@ -47,7 +47,7 @@ assert !statement.isIdempotent(); This extends to arithmetic operations using such terms: ```java -Statement statement = +SimpleStatement statement = insertInto("foo").value("k", add(function("generate_id"), literal(1))).build(); // INSERT INTO foo (k) VALUES (generate_id()+1) assert !statement.isIdempotent(); @@ -56,7 +56,7 @@ assert !statement.isIdempotent(); Raw terms could be anything, so they are also considered unsafe by default: ```java -Statement statement = +SimpleStatement statement = insertInto("foo").value("k", raw("generate_id()+1")).build(); // INSERT INTO foo (k) VALUES (generate_id()+1) assert !statement.isIdempotent(); @@ -68,7 +68,7 @@ If a WHERE clause in an UPDATE or DELETE statement uses a comparison with an uns potentially apply to different rows for each execution: ```java -Statement statement = +SimpleStatement statement = update("foo") .setColumn("v", bindMarker()) .whereColumn("k").isEqualTo(function("non_idempotent_func")) @@ -82,7 +82,7 @@ assert !statement.isIdempotent(); Counter updates are never idempotent: ```java -Statement statement = +SimpleStatement statement = update("foo") .increment("c") .whereColumn("k").isEqualTo(bindMarker()) @@ -94,12 +94,12 @@ assert !statement.isIdempotent(); Nor is appending or prepending an element to a list: ```java -Statement statement = +SimpleStatement statement = update("foo") .appendListElement("l", literal(1)) .whereColumn("k").isEqualTo(bindMarker()) .build(); -// UPDATE foo SET l+=[1] WHERE k=? +// UPDATE foo SET l=l+[1] WHERE k=? assert !statement.isIdempotent(); ``` @@ -107,7 +107,7 @@ The generic `append` and `prepend` methods apply to any kind of collection, so w them unsafe by default too: ```java -Statement statement = +SimpleStatement statement = update("foo") .prepend("l", literal(Arrays.asList(1, 2, 3))) .whereColumn("k").isEqualTo(bindMarker()) @@ -116,12 +116,66 @@ Statement statement = assert !statement.isIdempotent(); ``` +The generic `remove` method is however safe since collection removals are idempotent: + +```java +SimpleStatement statement = + update("foo") + .remove("l", literal(Arrays.asList(1, 2, 3))) + .whereColumn("k").isEqualTo(bindMarker()) + .build(); +// UPDATE foo SET l=l-[1,2,3] WHERE k=? +assert statement.isIdempotent(); +``` + +When appending, prepending or removing a single element to/from a collection, it is possible to use +the dedicated methods listed below; their idempotence depends on the collection type (list, set or +map), the operation (append, prepend or removal) and the idempotence of the element being +added/removed: + +1. `appendListElement` : not idempotent +2. `prependListElement` : not idempotent +3. `removeListElement` : idempotent if element is idempotent +4. `appendSetElement` : idempotent if element is idempotent +5. `prependSetElement` : idempotent if element is idempotent +6. `removeSetElement` : idempotent if element is idempotent +7. `appendMapElement` : idempotent if both key and value are idempotent +8. `prependMapElement` : idempotent if both key and value are idempotent +9. `removeMapElement` : idempotent if both key and value are idempotent + +In practice, most invocations of the above methods will be idempotent because most collection +elements are. For example, the following statement is idempotent since `literal(1)` is also +idempotent: + +```java +SimpleStatement statement = + update("foo") + .removeListElement("l", literal(1)) + .whereColumn("k").isEqualTo(bindMarker()) + .build(); +// UPDATE foo SET l=l-[1] WHERE k=? +assert statement.isIdempotent(); +``` + +However, in rare cases the resulting statement won't be marked idempotent, e.g. if you use a +function to select a collection element: + +```java +SimpleStatement statement = + update("foo") + .removeListElement("l", function("myfunc")) + .whereColumn("k").isEqualTo(bindMarker()) + .build(); +// UPDATE foo SET l=l-[myfunc()] WHERE k=? +assert !statement.isIdempotent(); +``` + ### Unsafe deletions Deleting from a list is not idempotent: ```java -Statement statement = +SimpleStatement statement = deleteFrom("foo") .element("l", literal(0)) .whereColumn("k").isEqualTo(bindMarker()) diff --git a/query-builder/src/main/java/com/datastax/oss/driver/api/querybuilder/update/Assignment.java b/query-builder/src/main/java/com/datastax/oss/driver/api/querybuilder/update/Assignment.java index d918c8aba42..e1e81e74c18 100644 --- a/query-builder/src/main/java/com/datastax/oss/driver/api/querybuilder/update/Assignment.java +++ b/query-builder/src/main/java/com/datastax/oss/driver/api/querybuilder/update/Assignment.java @@ -34,6 +34,7 @@ import com.datastax.oss.driver.internal.querybuilder.update.PrependListElementAssignment; import com.datastax.oss.driver.internal.querybuilder.update.PrependMapEntryAssignment; import com.datastax.oss.driver.internal.querybuilder.update.PrependSetElementAssignment; +import com.datastax.oss.driver.internal.querybuilder.update.RemoveAssignment; import com.datastax.oss.driver.internal.querybuilder.update.RemoveListElementAssignment; import com.datastax.oss.driver.internal.querybuilder.update.RemoveMapEntryAssignment; import com.datastax.oss.driver.internal.querybuilder.update.RemoveSetElementAssignment; @@ -146,13 +147,13 @@ static Assignment decrement(@NonNull String columnName) { } /** - * Appends to a collection column, as in {@code SET l+=?}. + * Appends to a collection column, as in {@code SET l=l+?}. * *

      The term must be a collection of the same type as the column. */ @NonNull static Assignment append(@NonNull CqlIdentifier columnId, @NonNull Term suffix) { - return new AppendAssignment(new ColumnLeftOperand(columnId), suffix); + return new AppendAssignment(columnId, suffix); } /** @@ -165,7 +166,7 @@ static Assignment append(@NonNull String columnName, @NonNull Term suffix) { } /** - * Appends a single element to a list column, as in {@code SET l+=[?]}. + * Appends a single element to a list column, as in {@code SET l=l+[?]}. * *

      The term must be of the same type as the column's elements. */ @@ -184,7 +185,7 @@ static Assignment appendListElement(@NonNull String columnName, @NonNull Term su } /** - * Appends a single element to a set column, as in {@code SET s+={?}}. + * Appends a single element to a set column, as in {@code SET s=s+{?}}. * *

      The term must be of the same type as the column's elements. */ @@ -203,7 +204,7 @@ static Assignment appendSetElement(@NonNull String columnName, @NonNull Term suf } /** - * Appends a single entry to a map column, as in {@code SET m+={?:?}}. + * Appends a single entry to a map column, as in {@code SET m=m+{?:?}}. * *

      The terms must be of the same type as the column's keys and values respectively. */ @@ -302,7 +303,7 @@ static Assignment prependMapEntry( } /** - * Removes elements from a collection, as in {@code SET l-=[1,2,3]}. + * Removes elements from a collection, as in {@code SET l=l-[1,2,3]}. * *

      The term must be a collection of the same type as the column. * @@ -313,7 +314,7 @@ static Assignment prependMapEntry( */ @NonNull static Assignment remove(@NonNull CqlIdentifier columnId, @NonNull Term collectionToRemove) { - return new DefaultAssignment(new ColumnLeftOperand(columnId), "-=", collectionToRemove); + return new RemoveAssignment(columnId, collectionToRemove); } /** @@ -326,7 +327,7 @@ static Assignment remove(@NonNull String columnName, @NonNull Term collectionToR } /** - * Removes a single element to a list column, as in {@code SET l-=[?]}. + * Removes a single element from a list column, as in {@code SET l=l-[?]}. * *

      The term must be of the same type as the column's elements. */ @@ -345,7 +346,7 @@ static Assignment removeListElement(@NonNull String columnName, @NonNull Term su } /** - * Removes a single element to a set column, as in {@code SET s-={?}}. + * Removes a single element from a set column, as in {@code SET s=s-{?}}. * *

      The term must be of the same type as the column's elements. */ @@ -364,7 +365,7 @@ static Assignment removeSetElement(@NonNull String columnName, @NonNull Term suf } /** - * Removes a single entry to a map column, as in {@code SET m-={?:?}}. + * Removes a single entry from a map column, as in {@code SET m=m-{?:?}}. * *

      The terms must be of the same type as the column's keys and values respectively. */ diff --git a/query-builder/src/main/java/com/datastax/oss/driver/api/querybuilder/update/OngoingAssignment.java b/query-builder/src/main/java/com/datastax/oss/driver/api/querybuilder/update/OngoingAssignment.java index 67af1f09e34..093e2047274 100644 --- a/query-builder/src/main/java/com/datastax/oss/driver/api/querybuilder/update/OngoingAssignment.java +++ b/query-builder/src/main/java/com/datastax/oss/driver/api/querybuilder/update/OngoingAssignment.java @@ -221,7 +221,7 @@ default UpdateWithAssignments decrement(@NonNull String columnName) { } /** - * Appends to a collection column, as in {@code SET l+=?}. + * Appends to a collection column, as in {@code SET l=l+?}. * *

      The term must be a collection of the same type as the column. * @@ -246,7 +246,7 @@ default UpdateWithAssignments append(@NonNull String columnName, @NonNull Term s } /** - * Appends a single element to a list column, as in {@code SET l+=[?]}. + * Appends a single element to a list column, as in {@code SET l=l+[?]}. * *

      The term must be of the same type as the column's elements. * @@ -274,7 +274,7 @@ default UpdateWithAssignments appendListElement( } /** - * Appends a single element to a set column, as in {@code SET s+={?}}. + * Appends a single element to a set column, as in {@code SET s=s+{?}}. * *

      The term must be of the same type as the column's elements. * @@ -299,7 +299,7 @@ default UpdateWithAssignments appendSetElement(@NonNull String columnName, @NonN } /** - * Appends a single entry to a map column, as in {@code SET m+={?:?}}. + * Appends a single entry to a map column, as in {@code SET m=m+{?:?}}. * *

      The terms must be of the same type as the column's keys and values respectively. * @@ -436,7 +436,7 @@ default UpdateWithAssignments prependMapEntry( } /** - * Removes elements from a collection, as in {@code SET l-=[1,2,3]}. + * Removes elements from a collection, as in {@code SET l=l-[1,2,3]}. * *

      The term must be a collection of the same type as the column. * @@ -469,7 +469,7 @@ default UpdateWithAssignments remove( } /** - * Removes a single element to a list column, as in {@code SET l-=[?]}. + * Removes a single element to a list column, as in {@code SET l=l-[?]}. * *

      The term must be of the same type as the column's elements. * @@ -497,7 +497,7 @@ default UpdateWithAssignments removeListElement( } /** - * Removes a single element to a set column, as in {@code SET s-={?}}. + * Removes a single element to a set column, as in {@code SET s=s-{?}}. * *

      The term must be of the same type as the column's elements. * @@ -522,7 +522,7 @@ default UpdateWithAssignments removeSetElement(@NonNull String columnName, @NonN } /** - * Removes a single entry to a map column, as in {@code SET m-={?:?}}. + * Removes a single entry to a map column, as in {@code SET m=m-{?:?}}. * *

      The terms must be of the same type as the column's keys and values respectively. * diff --git a/query-builder/src/main/java/com/datastax/oss/driver/internal/querybuilder/update/AppendAssignment.java b/query-builder/src/main/java/com/datastax/oss/driver/internal/querybuilder/update/AppendAssignment.java index 271c0bcca16..3213a097928 100644 --- a/query-builder/src/main/java/com/datastax/oss/driver/internal/querybuilder/update/AppendAssignment.java +++ b/query-builder/src/main/java/com/datastax/oss/driver/internal/querybuilder/update/AppendAssignment.java @@ -15,21 +15,15 @@ */ package com.datastax.oss.driver.internal.querybuilder.update; +import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.querybuilder.term.Term; -import com.datastax.oss.driver.internal.querybuilder.lhs.LeftOperand; import edu.umd.cs.findbugs.annotations.NonNull; import net.jcip.annotations.Immutable; @Immutable -public class AppendAssignment extends DefaultAssignment { +public class AppendAssignment extends CollectionAssignment { - public AppendAssignment(@NonNull LeftOperand leftOperand, @NonNull Term rightOperand) { - super(leftOperand, "+=", rightOperand); - } - - @Override - public boolean isIdempotent() { - // Not idempotent for lists, be pessimistic - return false; + public AppendAssignment(@NonNull CqlIdentifier columnId, @NonNull Term value) { + super(columnId, Operator.APPEND, value); } } diff --git a/query-builder/src/main/java/com/datastax/oss/driver/internal/querybuilder/update/CollectionAssignment.java b/query-builder/src/main/java/com/datastax/oss/driver/internal/querybuilder/update/CollectionAssignment.java new file mode 100644 index 00000000000..e89a5028055 --- /dev/null +++ b/query-builder/src/main/java/com/datastax/oss/driver/internal/querybuilder/update/CollectionAssignment.java @@ -0,0 +1,81 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.querybuilder.update; + +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.querybuilder.term.Term; +import com.datastax.oss.driver.api.querybuilder.update.Assignment; +import com.datastax.oss.driver.shaded.guava.common.base.Preconditions; +import edu.umd.cs.findbugs.annotations.NonNull; +import net.jcip.annotations.Immutable; + +@Immutable +public abstract class CollectionAssignment implements Assignment { + + public enum Operator { + APPEND("%1$s=%1$s+%2$s"), + PREPEND("%1$s=%2$s+%1$s"), + REMOVE("%1$s=%1$s-%2$s"), + ; + + public final String pattern; + + Operator(String pattern) { + this.pattern = pattern; + } + } + + private final CqlIdentifier columnId; + private final Operator operator; + private final Term value; + + protected CollectionAssignment( + @NonNull CqlIdentifier columnId, @NonNull Operator operator, @NonNull Term value) { + Preconditions.checkNotNull(columnId); + Preconditions.checkNotNull(value); + this.columnId = columnId; + this.operator = operator; + this.value = value; + } + + @Override + public void appendTo(@NonNull StringBuilder builder) { + builder.append(String.format(operator.pattern, columnId.asCql(true), buildRightOperand())); + } + + private String buildRightOperand() { + StringBuilder builder = new StringBuilder(); + value.appendTo(builder); + return builder.toString(); + } + + @Override + public boolean isIdempotent() { + // REMOVE is idempotent if the collection being removed is idempotent; APPEND and PREPEND are + // not idempotent for lists, so be pessimistic + return operator == Operator.REMOVE && value.isIdempotent(); + } + + @NonNull + public CqlIdentifier getColumnId() { + return columnId; + } + + @NonNull + public Term getValue() { + return value; + } +} diff --git a/query-builder/src/main/java/com/datastax/oss/driver/internal/querybuilder/update/CollectionElementAssignment.java b/query-builder/src/main/java/com/datastax/oss/driver/internal/querybuilder/update/CollectionElementAssignment.java index 35da1ca0f6e..4d63e49c0f0 100644 --- a/query-builder/src/main/java/com/datastax/oss/driver/internal/querybuilder/update/CollectionElementAssignment.java +++ b/query-builder/src/main/java/com/datastax/oss/driver/internal/querybuilder/update/CollectionElementAssignment.java @@ -27,9 +27,9 @@ public abstract class CollectionElementAssignment implements Assignment { public enum Operator { - APPEND("%s+=%s"), + APPEND("%1$s=%1$s+%2$s"), PREPEND("%1$s=%2$s+%1$s"), - REMOVE("%s-=%s"), + REMOVE("%1$s=%1$s-%2$s"), ; public final String pattern; diff --git a/query-builder/src/main/java/com/datastax/oss/driver/internal/querybuilder/update/PrependAssignment.java b/query-builder/src/main/java/com/datastax/oss/driver/internal/querybuilder/update/PrependAssignment.java index d58a6ffa18e..1b461bbb2c6 100644 --- a/query-builder/src/main/java/com/datastax/oss/driver/internal/querybuilder/update/PrependAssignment.java +++ b/query-builder/src/main/java/com/datastax/oss/driver/internal/querybuilder/update/PrependAssignment.java @@ -17,42 +17,13 @@ import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.querybuilder.term.Term; -import com.datastax.oss.driver.api.querybuilder.update.Assignment; import edu.umd.cs.findbugs.annotations.NonNull; import net.jcip.annotations.Immutable; @Immutable -public class PrependAssignment implements Assignment { - - private final CqlIdentifier columnId; - private final Term prefix; +public class PrependAssignment extends CollectionAssignment { public PrependAssignment(@NonNull CqlIdentifier columnId, @NonNull Term prefix) { - this.columnId = columnId; - this.prefix = prefix; - } - - @Override - public void appendTo(@NonNull StringBuilder builder) { - String column = columnId.asCql(true); - builder.append(column).append('='); - prefix.appendTo(builder); - builder.append('+').append(column); - } - - @Override - public boolean isIdempotent() { - // Not idempotent for lists, be pessimistic - return false; - } - - @NonNull - public CqlIdentifier getColumnId() { - return columnId; - } - - @NonNull - public Term getPrefix() { - return prefix; + super(columnId, Operator.PREPEND, prefix); } } diff --git a/query-builder/src/main/java/com/datastax/oss/driver/internal/querybuilder/update/RemoveAssignment.java b/query-builder/src/main/java/com/datastax/oss/driver/internal/querybuilder/update/RemoveAssignment.java new file mode 100644 index 00000000000..453b8a318f6 --- /dev/null +++ b/query-builder/src/main/java/com/datastax/oss/driver/internal/querybuilder/update/RemoveAssignment.java @@ -0,0 +1,29 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.querybuilder.update; + +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.querybuilder.term.Term; +import edu.umd.cs.findbugs.annotations.NonNull; +import net.jcip.annotations.Immutable; + +@Immutable +public class RemoveAssignment extends CollectionAssignment { + + public RemoveAssignment(@NonNull CqlIdentifier columnId, @NonNull Term value) { + super(columnId, Operator.REMOVE, value); + } +} diff --git a/query-builder/src/main/java/com/datastax/oss/driver/internal/querybuilder/update/RemoveListElementAssignment.java b/query-builder/src/main/java/com/datastax/oss/driver/internal/querybuilder/update/RemoveListElementAssignment.java index 985a871fe5e..080104965c6 100644 --- a/query-builder/src/main/java/com/datastax/oss/driver/internal/querybuilder/update/RemoveListElementAssignment.java +++ b/query-builder/src/main/java/com/datastax/oss/driver/internal/querybuilder/update/RemoveListElementAssignment.java @@ -26,9 +26,4 @@ public class RemoveListElementAssignment extends CollectionElementAssignment { public RemoveListElementAssignment(@NonNull CqlIdentifier columnId, @NonNull Term element) { super(columnId, Operator.REMOVE, null, element, '[', ']'); } - - @Override - public boolean isIdempotent() { - return false; - } } diff --git a/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/update/UpdateFluentAssignmentTest.java b/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/update/UpdateFluentAssignmentTest.java index 184ad2e2dbf..bf9e13ef2d5 100644 --- a/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/update/UpdateFluentAssignmentTest.java +++ b/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/update/UpdateFluentAssignmentTest.java @@ -82,15 +82,15 @@ public void should_generate_list_operations() { Literal listLiteral = literal(ImmutableList.of(1, 2, 3)); assertThat(update("foo").append("l", bindMarker()).whereColumn("k").isEqualTo(bindMarker())) - .hasCql("UPDATE foo SET l+=? WHERE k=?"); + .hasCql("UPDATE foo SET l=l+? WHERE k=?"); assertThat(update("foo").append("l", listLiteral).whereColumn("k").isEqualTo(bindMarker())) - .hasCql("UPDATE foo SET l+=[1,2,3] WHERE k=?"); + .hasCql("UPDATE foo SET l=l+[1,2,3] WHERE k=?"); assertThat( update("foo") .appendListElement("l", bindMarker()) .whereColumn("k") .isEqualTo(bindMarker())) - .hasCql("UPDATE foo SET l+=[?] WHERE k=?"); + .hasCql("UPDATE foo SET l=l+[?] WHERE k=?"); assertThat(update("foo").prepend("l", bindMarker()).whereColumn("k").isEqualTo(bindMarker())) .hasCql("UPDATE foo SET l=?+l WHERE k=?"); @@ -104,15 +104,15 @@ public void should_generate_list_operations() { .hasCql("UPDATE foo SET l=[?]+l WHERE k=?"); assertThat(update("foo").remove("l", bindMarker()).whereColumn("k").isEqualTo(bindMarker())) - .hasCql("UPDATE foo SET l-=? WHERE k=?"); + .hasCql("UPDATE foo SET l=l-? WHERE k=?"); assertThat(update("foo").remove("l", listLiteral).whereColumn("k").isEqualTo(bindMarker())) - .hasCql("UPDATE foo SET l-=[1,2,3] WHERE k=?"); + .hasCql("UPDATE foo SET l=l-[1,2,3] WHERE k=?"); assertThat( update("foo") .removeListElement("l", bindMarker()) .whereColumn("k") .isEqualTo(bindMarker())) - .hasCql("UPDATE foo SET l-=[?] WHERE k=?"); + .hasCql("UPDATE foo SET l=l-[?] WHERE k=?"); } @Test @@ -120,15 +120,15 @@ public void should_generate_set_operations() { Literal setLiteral = literal(ImmutableSet.of(1, 2, 3)); assertThat(update("foo").append("s", bindMarker()).whereColumn("k").isEqualTo(bindMarker())) - .hasCql("UPDATE foo SET s+=? WHERE k=?"); + .hasCql("UPDATE foo SET s=s+? WHERE k=?"); assertThat(update("foo").append("s", setLiteral).whereColumn("k").isEqualTo(bindMarker())) - .hasCql("UPDATE foo SET s+={1,2,3} WHERE k=?"); + .hasCql("UPDATE foo SET s=s+{1,2,3} WHERE k=?"); assertThat( update("foo") .appendSetElement("s", bindMarker()) .whereColumn("k") .isEqualTo(bindMarker())) - .hasCql("UPDATE foo SET s+={?} WHERE k=?"); + .hasCql("UPDATE foo SET s=s+{?} WHERE k=?"); assertThat(update("foo").prepend("s", bindMarker()).whereColumn("k").isEqualTo(bindMarker())) .hasCql("UPDATE foo SET s=?+s WHERE k=?"); @@ -142,15 +142,15 @@ public void should_generate_set_operations() { .hasCql("UPDATE foo SET s={?}+s WHERE k=?"); assertThat(update("foo").remove("s", bindMarker()).whereColumn("k").isEqualTo(bindMarker())) - .hasCql("UPDATE foo SET s-=? WHERE k=?"); + .hasCql("UPDATE foo SET s=s-? WHERE k=?"); assertThat(update("foo").remove("s", setLiteral).whereColumn("k").isEqualTo(bindMarker())) - .hasCql("UPDATE foo SET s-={1,2,3} WHERE k=?"); + .hasCql("UPDATE foo SET s=s-{1,2,3} WHERE k=?"); assertThat( update("foo") .removeSetElement("s", bindMarker()) .whereColumn("k") .isEqualTo(bindMarker())) - .hasCql("UPDATE foo SET s-={?} WHERE k=?"); + .hasCql("UPDATE foo SET s=s-{?} WHERE k=?"); } @Test @@ -158,15 +158,15 @@ public void should_generate_map_operations() { Literal mapLiteral = literal(ImmutableMap.of(1, "foo", 2, "bar")); assertThat(update("foo").append("m", bindMarker()).whereColumn("k").isEqualTo(bindMarker())) - .hasCql("UPDATE foo SET m+=? WHERE k=?"); + .hasCql("UPDATE foo SET m=m+? WHERE k=?"); assertThat(update("foo").append("m", mapLiteral).whereColumn("k").isEqualTo(bindMarker())) - .hasCql("UPDATE foo SET m+={1:'foo',2:'bar'} WHERE k=?"); + .hasCql("UPDATE foo SET m=m+{1:'foo',2:'bar'} WHERE k=?"); assertThat( update("foo") .appendMapEntry("m", literal(1), literal("foo")) .whereColumn("k") .isEqualTo(bindMarker())) - .hasCql("UPDATE foo SET m+={1:'foo'} WHERE k=?"); + .hasCql("UPDATE foo SET m=m+{1:'foo'} WHERE k=?"); assertThat(update("foo").prepend("m", bindMarker()).whereColumn("k").isEqualTo(bindMarker())) .hasCql("UPDATE foo SET m=?+m WHERE k=?"); @@ -180,14 +180,14 @@ public void should_generate_map_operations() { .hasCql("UPDATE foo SET m={1:'foo'}+m WHERE k=?"); assertThat(update("foo").remove("m", bindMarker()).whereColumn("k").isEqualTo(bindMarker())) - .hasCql("UPDATE foo SET m-=? WHERE k=?"); + .hasCql("UPDATE foo SET m=m-? WHERE k=?"); assertThat(update("foo").remove("m", mapLiteral).whereColumn("k").isEqualTo(bindMarker())) - .hasCql("UPDATE foo SET m-={1:'foo',2:'bar'} WHERE k=?"); + .hasCql("UPDATE foo SET m=m-{1:'foo',2:'bar'} WHERE k=?"); assertThat( update("foo") .removeMapEntry("m", literal(1), literal("foo")) .whereColumn("k") .isEqualTo(bindMarker())) - .hasCql("UPDATE foo SET m-={1:'foo'} WHERE k=?"); + .hasCql("UPDATE foo SET m=m-{1:'foo'} WHERE k=?"); } } diff --git a/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/update/UpdateIdempotenceTest.java b/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/update/UpdateIdempotenceTest.java index 09f778d041a..1a3b05614ea 100644 --- a/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/update/UpdateIdempotenceTest.java +++ b/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/update/UpdateIdempotenceTest.java @@ -104,7 +104,7 @@ public void should_not_be_idempotent_if_adding_element_to_list() { .appendListElement("l", literal(1)) .whereColumn("k") .isEqualTo(bindMarker())) - .hasCql("UPDATE foo SET l+=[1] WHERE k=?") + .hasCql("UPDATE foo SET l=l+[1] WHERE k=?") .isNotIdempotent(); assertThat( update("foo") @@ -120,14 +120,37 @@ public void should_not_be_idempotent_if_adding_element_to_list() { .appendSetElement("s", literal(1)) .whereColumn("k") .isEqualTo(bindMarker())) - .hasCql("UPDATE foo SET s+={1} WHERE k=?") + .hasCql("UPDATE foo SET s=s+{1} WHERE k=?") .isIdempotent(); assertThat( update("foo") .appendMapEntry("m", literal(1), literal("bar")) .whereColumn("k") .isEqualTo(bindMarker())) - .hasCql("UPDATE foo SET m+={1:'bar'} WHERE k=?") + .hasCql("UPDATE foo SET m=m+{1:'bar'} WHERE k=?") + .isIdempotent(); + + // Also, removals are always safe: + assertThat( + update("foo") + .removeListElement("l", literal(1)) + .whereColumn("k") + .isEqualTo(bindMarker())) + .hasCql("UPDATE foo SET l=l-[1] WHERE k=?") + .isIdempotent(); + assertThat( + update("foo") + .removeSetElement("s", literal(1)) + .whereColumn("k") + .isEqualTo(bindMarker())) + .hasCql("UPDATE foo SET s=s-{1} WHERE k=?") + .isIdempotent(); + assertThat( + update("foo") + .removeMapEntry("m", literal(1), literal("bar")) + .whereColumn("k") + .isEqualTo(bindMarker())) + .hasCql("UPDATE foo SET m=m-{1:'bar'} WHERE k=?") .isIdempotent(); } @@ -138,7 +161,7 @@ public void should_not_be_idempotent_if_concatenating_to_collection() { .append("l", literal(Arrays.asList(1, 2, 3))) .whereColumn("k") .isEqualTo(bindMarker())) - .hasCql("UPDATE foo SET l+=[1,2,3] WHERE k=?") + .hasCql("UPDATE foo SET l=l+[1,2,3] WHERE k=?") .isNotIdempotent(); assertThat( update("foo") @@ -147,6 +170,14 @@ public void should_not_be_idempotent_if_concatenating_to_collection() { .isEqualTo(bindMarker())) .hasCql("UPDATE foo SET l=[1,2,3]+l WHERE k=?") .isNotIdempotent(); + // However, removals are always safe: + assertThat( + update("foo") + .remove("l", literal(Arrays.asList(1, 2, 3))) + .whereColumn("k") + .isEqualTo(bindMarker())) + .hasCql("UPDATE foo SET l=l-[1,2,3] WHERE k=?") + .isIdempotent(); } @Test From 7ec4bd9783d84380d8ad64c62cf6d8fb5d8386d5 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Wed, 8 Jan 2020 15:10:55 -0300 Subject: [PATCH 188/979] JAVA-2581: Add query builder support for indexed list assignments (#1380) --- changelog/README.md | 1 + .../api/querybuilder/update/Assignment.java | 23 ++++++++++-- .../update/OngoingAssignment.java | 36 ++++++++++++++++--- .../update/UpdateFluentAssignmentTest.java | 10 ++++++ 4 files changed, 62 insertions(+), 8 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 008d8f245d2..0175faa03c1 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.4.0 (in progress) +- [new feature] JAVA-2581: Add query builder support for indexed list assignments - [improvement] JAVA-2596: Consider collection removals as idempotent in query builder - [bug] JAVA-2555: Generate append/prepend constructs compatible with legacy C* versions - [bug] JAVA-2584: Ensure codec registry is able to create codecs for collections of UDTs and tuples diff --git a/query-builder/src/main/java/com/datastax/oss/driver/api/querybuilder/update/Assignment.java b/query-builder/src/main/java/com/datastax/oss/driver/api/querybuilder/update/Assignment.java index e1e81e74c18..f2590766767 100644 --- a/query-builder/src/main/java/com/datastax/oss/driver/api/querybuilder/update/Assignment.java +++ b/query-builder/src/main/java/com/datastax/oss/driver/api/querybuilder/update/Assignment.java @@ -78,8 +78,8 @@ static Assignment setField( /** Assigns a value to an entry in a map column, as in {@code SET map[?]=?}. */ @NonNull static Assignment setMapValue( - @NonNull CqlIdentifier columnId, @NonNull Term index, @NonNull Term value) { - return new DefaultAssignment(new ColumnComponentLeftOperand(columnId, index), "=", value); + @NonNull CqlIdentifier columnId, @NonNull Term key, @NonNull Term value) { + return new DefaultAssignment(new ColumnComponentLeftOperand(columnId, key), "=", value); } /** @@ -88,8 +88,25 @@ static Assignment setMapValue( */ @NonNull static Assignment setMapValue( + @NonNull String columnName, @NonNull Term key, @NonNull Term value) { + return setMapValue(CqlIdentifier.fromCql(columnName), key, value); + } + + /** Assigns a value to an index in a list column, as in {@code SET list[?]=?}. */ + @NonNull + static Assignment setListValue( + @NonNull CqlIdentifier columnId, @NonNull Term index, @NonNull Term value) { + return new DefaultAssignment(new ColumnComponentLeftOperand(columnId, index), "=", value); + } + + /** + * Shortcut for {@link #setListValue(CqlIdentifier, Term, Term) + * setMapValue(CqlIdentifier.fromCql(columnName), index, value)}. + */ + @NonNull + static Assignment setListValue( @NonNull String columnName, @NonNull Term index, @NonNull Term value) { - return setMapValue(CqlIdentifier.fromCql(columnName), index, value); + return setListValue(CqlIdentifier.fromCql(columnName), index, value); } /** Increments a counter, as in {@code SET c+=?}. */ diff --git a/query-builder/src/main/java/com/datastax/oss/driver/api/querybuilder/update/OngoingAssignment.java b/query-builder/src/main/java/com/datastax/oss/driver/api/querybuilder/update/OngoingAssignment.java index 093e2047274..9c507be5db9 100644 --- a/query-builder/src/main/java/com/datastax/oss/driver/api/querybuilder/update/OngoingAssignment.java +++ b/query-builder/src/main/java/com/datastax/oss/driver/api/querybuilder/update/OngoingAssignment.java @@ -106,27 +106,53 @@ default UpdateWithAssignments setField( /** * Assigns a value to an entry in a map column, as in {@code SET map[?]=?}. * - *

      This is a shortcut for {@link #set(Assignment) set(Assignment.setMapValue(columnId, index, + *

      This is a shortcut for {@link #set(Assignment) set(Assignment.setMapValue(columnId, key, * value))}. * * @see Assignment#setMapValue(CqlIdentifier, Term, Term) */ @NonNull default UpdateWithAssignments setMapValue( - @NonNull CqlIdentifier columnId, @NonNull Term index, @NonNull Term value) { - return set(Assignment.setMapValue(columnId, index, value)); + @NonNull CqlIdentifier columnId, @NonNull Term key, @NonNull Term value) { + return set(Assignment.setMapValue(columnId, key, value)); } /** * Shortcut for {@link #setMapValue(CqlIdentifier, Term, Term) - * setMapValue(CqlIdentifier.fromCql(columnName), index, value)}. + * setMapValue(CqlIdentifier.fromCql(columnName), key, value)}. * * @see Assignment#setMapValue(String, Term, Term) */ @NonNull default UpdateWithAssignments setMapValue( + @NonNull String columnName, @NonNull Term key, @NonNull Term value) { + return setMapValue(CqlIdentifier.fromCql(columnName), key, value); + } + + /** + * Assigns a value to an index in a list column, as in {@code SET list[?]=?}. + * + *

      This is a shortcut for {@link #set(Assignment) set(Assignment.setListValue(columnId, index, + * value))}. + * + * @see Assignment#setListValue(CqlIdentifier, Term, Term) + */ + @NonNull + default UpdateWithAssignments setListValue( + @NonNull CqlIdentifier columnId, @NonNull Term index, @NonNull Term value) { + return set(Assignment.setListValue(columnId, index, value)); + } + + /** + * Shortcut for {@link #setListValue(CqlIdentifier, Term, Term) + * setListValue(CqlIdentifier.fromCql(columnName), index, value)}. + * + * @see Assignment#setListValue(String, Term, Term) + */ + @NonNull + default UpdateWithAssignments setListValue( @NonNull String columnName, @NonNull Term index, @NonNull Term value) { - return setMapValue(CqlIdentifier.fromCql(columnName), index, value); + return setListValue(CqlIdentifier.fromCql(columnName), index, value); } /** diff --git a/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/update/UpdateFluentAssignmentTest.java b/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/update/UpdateFluentAssignmentTest.java index bf9e13ef2d5..77cf9ed00ca 100644 --- a/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/update/UpdateFluentAssignmentTest.java +++ b/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/update/UpdateFluentAssignmentTest.java @@ -60,6 +60,16 @@ public void should_generate_map_value_assignment() { .hasCql("UPDATE foo SET features['color']=? WHERE k=?"); } + @Test + public void should_generate_list_value_assignment() { + assertThat( + update("foo") + .setListValue("features", literal(1), bindMarker()) + .whereColumn("k") + .isEqualTo(bindMarker())) + .hasCql("UPDATE foo SET features[1]=? WHERE k=?"); + } + @Test public void should_generate_counter_operations() { assertThat(update("foo").increment("c").whereColumn("k").isEqualTo(bindMarker())) From 153ca4ca1f81a86b174fa9238c0ea9d7e2748af7 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Wed, 8 Jan 2020 15:12:35 -0300 Subject: [PATCH 189/979] JAVA-2528: Store suppressed exceptions in AllNodesFailedException (#1381) --- changelog/README.md | 1 + .../driver/api/core/AllNodesFailedException.java | 13 ++++++++++++- .../api/core/AllNodesFailedExceptionTest.java | 5 +++++ 3 files changed, 18 insertions(+), 1 deletion(-) diff --git a/changelog/README.md b/changelog/README.md index 0175faa03c1..568caa0a78f 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.4.0 (in progress) +- [improvement] JAVA-2528: Store suppressed exceptions in AllNodesFailedException - [new feature] JAVA-2581: Add query builder support for indexed list assignments - [improvement] JAVA-2596: Consider collection removals as idempotent in query builder - [bug] JAVA-2555: Generate append/prepend constructs compatible with legacy C* versions diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/AllNodesFailedException.java b/core/src/main/java/com/datastax/oss/driver/api/core/AllNodesFailedException.java index 324e7756fb6..3f00e0b474c 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/AllNodesFailedException.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/AllNodesFailedException.java @@ -31,7 +31,8 @@ /** * Thrown when a query failed on all the coordinators it was tried on. This exception may wrap - * multiple errors, use {@link #getAllErrors()} to inspect individual problems on each node. + * multiple errors, that are available either as {@linkplain #getSuppressed() suppressed + * exceptions}, or via {@link #getAllErrors()} where they are grouped by node. */ public class AllNodesFailedException extends DriverException { @@ -65,6 +66,7 @@ protected AllNodesFailedException( @NonNull Map errors) { super(message, executionInfo, null, true); this.errors = toDeepImmutableMap(groupByNode(errors)); + addSuppressedErrors(); } protected AllNodesFailedException( @@ -73,6 +75,15 @@ protected AllNodesFailedException( @NonNull Iterable>> errors) { super(message, executionInfo, null, true); this.errors = toDeepImmutableMap(errors); + addSuppressedErrors(); + } + + private void addSuppressedErrors() { + for (List errors : this.errors.values()) { + for (Throwable error : errors) { + addSuppressed(error); + } + } } private AllNodesFailedException(Map> errors) { diff --git a/core/src/test/java/com/datastax/oss/driver/api/core/AllNodesFailedExceptionTest.java b/core/src/test/java/com/datastax/oss/driver/api/core/AllNodesFailedExceptionTest.java index 020cb47a9b2..5c5ad335ef6 100644 --- a/core/src/test/java/com/datastax/oss/driver/api/core/AllNodesFailedExceptionTest.java +++ b/core/src/test/java/com/datastax/oss/driver/api/core/AllNodesFailedExceptionTest.java @@ -63,6 +63,7 @@ public void should_create_instance_from_map_of_first_errors() { .hasEntrySatisfying(node2, list -> assertThat(list).containsExactly(e2)); assertThat(e.getErrors()).containsEntry(node1, e1); assertThat(e.getErrors()).containsEntry(node2, e2); + assertThat(e).hasSuppressedException(e1).hasSuppressedException(e2); } @SuppressWarnings("deprecation") @@ -89,5 +90,9 @@ public void should_create_instance_from_list_of_all_errors() { .hasEntrySatisfying(node2, list -> assertThat(list).containsExactly(e2a)); assertThat(e.getErrors()).containsEntry(node1, e1a); assertThat(e.getErrors()).containsEntry(node2, e2a); + assertThat(e) + .hasSuppressedException(e1a) + .hasSuppressedException(e1b) + .hasSuppressedException(e2a); } } From 3cb3de726ca99024b2396107cc5cf224d65dc318 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Wed, 8 Jan 2020 15:14:27 -0300 Subject: [PATCH 190/979] JAVA-2547: Add method DriverConfigLoader.fromPath (#1382) --- changelog/README.md | 1 + .../api/core/config/DriverConfigLoader.java | 26 +++++++++++++++++++ 2 files changed, 27 insertions(+) diff --git a/changelog/README.md b/changelog/README.md index 568caa0a78f..4d048494432 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.4.0 (in progress) +- [improvement] JAVA-2547: Add method DriverConfigLoader.fromPath - [improvement] JAVA-2528: Store suppressed exceptions in AllNodesFailedException - [new feature] JAVA-2581: Add query builder support for indexed list assignments - [improvement] JAVA-2596: Consider collection removals as idempotent in query builder diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/DriverConfigLoader.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/DriverConfigLoader.java index 04c2b156f1b..05f615a3e9b 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/DriverConfigLoader.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/DriverConfigLoader.java @@ -24,6 +24,7 @@ import edu.umd.cs.findbugs.annotations.NonNull; import java.io.File; import java.net.URL; +import java.nio.file.Path; import java.util.concurrent.CompletionStage; /** @@ -69,6 +70,31 @@ static DriverConfigLoader fromClasspath(@NonNull String resourceBaseName) { }); } + /** + * Builds an instance using the driver's default implementation (based on Typesafe config), except + * that application-specific options are loaded from the given path. + * + *

      More precisely, configuration properties are loaded and merged from the following + * (first-listed are higher priority): + * + *

        + *
      • system properties + *
      • the contents of {@code file} + *
      • {@code reference.conf} (all resources on classpath with this name). In particular, this + * will load the {@code reference.conf} included in the core driver JAR, that defines + * default options for all mandatory options. + *
      + * + * The resulting configuration is expected to contain a {@code datastax-java-driver} section. + * + *

      The returned loader will honor the reload interval defined by the option {@code + * basic.config-reload-interval}. + */ + @NonNull + static DriverConfigLoader fromPath(@NonNull Path file) { + return fromFile(file.toFile()); + } + /** * Builds an instance using the driver's default implementation (based on Typesafe config), except * that application-specific options are loaded from the given file. From 8c5ab3491f5f8dac492144ce44b3ecb92e23106e Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Wed, 8 Jan 2020 15:24:23 -0300 Subject: [PATCH 191/979] JAVA-2530: Expose shortcuts for name-based UUIDs (#1383) --- changelog/README.md | 1 + .../oss/driver/api/core/uuid/Uuids.java | 210 ++++++++++++++++- .../oss/driver/api/core/uuid/UuidsTest.java | 211 +++++++++++++++++- 3 files changed, 420 insertions(+), 2 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 4d048494432..838267fa882 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.4.0 (in progress) +- [improvement] JAVA-2530: Expose shortcuts for name-based UUIDs - [improvement] JAVA-2547: Add method DriverConfigLoader.fromPath - [improvement] JAVA-2528: Store suppressed exceptions in AllNodesFailedException - [new feature] JAVA-2581: Add query builder support for indexed list assignments diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/uuid/Uuids.java b/core/src/main/java/com/datastax/oss/driver/api/core/uuid/Uuids.java index 0356f7e52f1..9e2b344c2a7 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/uuid/Uuids.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/uuid/Uuids.java @@ -25,12 +25,14 @@ import java.net.NetworkInterface; import java.net.SocketException; import java.net.UnknownHostException; +import java.nio.charset.StandardCharsets; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.Calendar; import java.util.Date; import java.util.Enumeration; import java.util.HashSet; +import java.util.Objects; import java.util.Properties; import java.util.Random; import java.util.Set; @@ -75,6 +77,37 @@ public final class Uuids { /** The system property to use to force the value of the process ID ({@value}). */ public static final String PID_SYSTEM_PROPERTY = "com.datastax.oss.driver.PID"; + /** + * The namespace UUID for URLs, as defined in Appendix C of RFC-4122. When using + * this namespace to create a name-based UUID, it is expected that the name part be a valid {@link + * java.net.URL URL}. + */ + public static final UUID NAMESPACE_URL = UUID.fromString("6ba7b811-9dad-11d1-80b4-00c04fd430c8"); + + /** + * The namespace UUID for fully-qualified domain names, as defined in Appendix C of RFC-4122. When using + * this namespace to create a name-based UUID, it is expected that the name part be a valid domain + * name. + */ + public static final UUID NAMESPACE_DNS = UUID.fromString("6ba7b810-9dad-11d1-80b4-00c04fd430c8"); + + /** + * The namespace UUID for OIDs, as defined in Appendix C of RFC-4122. When using + * this namespace to create a name-based UUID, it is expected that the name part be an ISO OID. + */ + public static final UUID NAMESPACE_OID = UUID.fromString("6ba7b812-9dad-11d1-80b4-00c04fd430c8"); + + /** + * The namespace UUID for X.500 domain names, as defined in Appendix C of RFC-4122. When using + * this namespace to create a name-based UUID, it is expected that the name part be a valid X.500 + * domain name, in DER or a text output format. + */ + public static final UUID NAMESPACE_X500 = UUID.fromString("6ba7b814-9dad-11d1-80b4-00c04fd430c8"); + private static final Logger LOG = LoggerFactory.getLogger(Uuids.class); private Uuids() {} @@ -202,13 +235,188 @@ private static long makeClockSeqAndNode() { /** * Creates a new random (version 4) UUID. * - *

      This method is just a convenience for {@code UUID.randomUUID()}. + *

      This method is just a convenience for {@link UUID#randomUUID()}. */ @NonNull public static UUID random() { return UUID.randomUUID(); } + /** + * Creates a new name-based (version 3) {@link UUID} from the given namespace UUID and the given + * string representing the name part. + * + *

      Note that the given string will be converted to bytes using {@link StandardCharsets#UTF_8}. + * + * @param namespace The namespace UUID to use; cannot be null. + * @param name The name part; cannot be null. + * @throws NullPointerException if namespace or name is null. + * @throws IllegalStateException if the {@link MessageDigest} algorithm for version 3 (MD5) is not + * available on this platform. + */ + @NonNull + public static UUID nameBased(@NonNull UUID namespace, @NonNull String name) { + Objects.requireNonNull(name, "name cannot be null"); + return nameBased(namespace, name.getBytes(StandardCharsets.UTF_8)); + } + + /** + * Creates a new name-based (version 3) {@link UUID} from the given namespace UUID and the given + * byte array representing the name part. + * + * @param namespace The namespace UUID to use; cannot be null. + * @param name The name part; cannot be null. + * @throws NullPointerException if namespace or name is null. + * @throws IllegalStateException if the {@link MessageDigest} algorithm for version 3 (MD5) is not + * available on this platform. + */ + @NonNull + public static UUID nameBased(@NonNull UUID namespace, @NonNull byte[] name) { + return nameBased(namespace, name, 3); + } + + /** + * Creates a new name-based (version 3 or version 5) {@link UUID} from the given namespace UUID + * and the given string representing the name part. + * + *

      Note that the given string will be converted to bytes using {@link StandardCharsets#UTF_8}. + * + * @param namespace The namespace UUID to use; cannot be null. + * @param name The name part; cannot be null. + * @param version The version to use, must be either 3 or 5; version 3 uses MD5 as its {@link + * MessageDigest} algorithm, while version 5 uses SHA-1. + * @throws NullPointerException if namespace or name is null. + * @throws IllegalArgumentException if version is not 3 nor 5. + * @throws IllegalStateException if the {@link MessageDigest} algorithm for the desired version is + * not available on this platform. + */ + @NonNull + public static UUID nameBased(@NonNull UUID namespace, @NonNull String name, int version) { + Objects.requireNonNull(name, "name cannot be null"); + return nameBased(namespace, name.getBytes(StandardCharsets.UTF_8), version); + } + + /** + * Creates a new name-based (version 3 or version 5) {@link UUID} from the given namespace UUID + * and the given byte array representing the name part. + * + * @param namespace The namespace UUID to use; cannot be null. + * @param name The name to use; cannot be null. + * @param version The version to use, must be either 3 or 5; version 3 uses MD5 as its {@link + * MessageDigest} algorithm, while version 5 uses SHA-1. + * @throws NullPointerException if namespace or name is null. + * @throws IllegalArgumentException if version is not 3 nor 5. + * @throws IllegalStateException if the {@link MessageDigest} algorithm for the desired version is + * not available on this platform. + */ + @NonNull + public static UUID nameBased(@NonNull UUID namespace, @NonNull byte[] name, int version) { + Objects.requireNonNull(namespace, "namespace cannot be null"); + Objects.requireNonNull(name, "name cannot be null"); + MessageDigest md = newMessageDigest(version); + md.update(toBytes(namespace)); + md.update(name); + return buildNamedUuid(md.digest(), version); + } + + /** + * Creates a new name-based (version 3) {@link UUID} from the given byte array containing the + * namespace UUID and the name parts concatenated together. + * + *

      The byte array is expected to be at least 16 bytes long. + * + * @param namespaceAndName A byte array containing the concatenated namespace UUID and name; + * cannot be null. + * @throws NullPointerException if namespaceAndName is null. + * @throws IllegalArgumentException if namespaceAndName is not at least 16 bytes + * long. + * @throws IllegalStateException if the {@link MessageDigest} algorithm for version 3 (MD5) is not + * available on this platform. + */ + @NonNull + public static UUID nameBased(@NonNull byte[] namespaceAndName) { + return nameBased(namespaceAndName, 3); + } + + /** + * Creates a new name-based (version 3 or version 5) {@link UUID} from the given byte array + * containing the namespace UUID and the name parts concatenated together. + * + *

      The byte array is expected to be at least 16 bytes long. + * + * @param namespaceAndName A byte array containing the concatenated namespace UUID and name; + * cannot be null. + * @param version The version to use, must be either 3 or 5. + * @throws NullPointerException if namespaceAndName is null. + * @throws IllegalArgumentException if namespaceAndName is not at least 16 bytes + * long. + * @throws IllegalArgumentException if version is not 3 nor 5. + * @throws IllegalStateException if the {@link MessageDigest} algorithm for the desired version is + * not available on this platform. + */ + @NonNull + public static UUID nameBased(@NonNull byte[] namespaceAndName, int version) { + Objects.requireNonNull(namespaceAndName, "namespaceAndName cannot be null"); + if (namespaceAndName.length < 16) { + throw new IllegalArgumentException("namespaceAndName must be at least 16 bytes long"); + } + MessageDigest md = newMessageDigest(version); + md.update(namespaceAndName); + return buildNamedUuid(md.digest(), version); + } + + @NonNull + private static MessageDigest newMessageDigest(int version) { + if (version != 3 && version != 5) { + throw new IllegalArgumentException( + "Invalid name-based UUID version, expecting 3 or 5, got: " + version); + } + String algorithm = version == 3 ? "MD5" : "SHA-1"; + try { + return MessageDigest.getInstance(algorithm); + } catch (NoSuchAlgorithmException e) { + throw new IllegalStateException(algorithm + " algorithm not available", e); + } + } + + @NonNull + private static UUID buildNamedUuid(@NonNull byte[] data, int version) { + // clear and set version + data[6] &= (byte) 0x0f; + data[6] |= (byte) (version << 4); + // clear and set variant to IETF + data[8] &= (byte) 0x3f; + data[8] |= (byte) 0x80; + return fromBytes(data); + } + + private static UUID fromBytes(byte[] data) { + // data longer than 16 bytes will be truncated as mandated by the specs + assert data.length >= 16; + long msb = 0; + for (int i = 0; i < 8; i++) { + msb = (msb << 8) | (data[i] & 0xff); + } + long lsb = 0; + for (int i = 8; i < 16; i++) { + lsb = (lsb << 8) | (data[i] & 0xff); + } + return new UUID(msb, lsb); + } + + private static byte[] toBytes(UUID uuid) { + byte[] out = new byte[16]; + long msb = uuid.getMostSignificantBits(); + for (int i = 0; i < 8; i++) { + out[i] = (byte) (msb >> (7 - i) * 8); + } + long lsb = uuid.getLeastSignificantBits(); + for (int i = 8; i < 16; i++) { + out[i] = (byte) (lsb >> (15 - i) * 8); + } + return out; + } + /** * Creates a new time-based (version 1) UUID. * diff --git a/core/src/test/java/com/datastax/oss/driver/api/core/uuid/UuidsTest.java b/core/src/test/java/com/datastax/oss/driver/api/core/uuid/UuidsTest.java index da51e00f366..848aebc7f7a 100644 --- a/core/src/test/java/com/datastax/oss/driver/api/core/uuid/UuidsTest.java +++ b/core/src/test/java/com/datastax/oss/driver/api/core/uuid/UuidsTest.java @@ -16,19 +16,171 @@ package com.datastax.oss.driver.api.core.uuid; import static com.datastax.oss.driver.Assertions.assertThat; +import static org.assertj.core.api.Assertions.catchThrowable; import com.datastax.oss.driver.api.core.DefaultProtocolVersion; import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; import java.nio.ByteBuffer; +import java.nio.charset.StandardCharsets; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.util.Arrays; import java.util.HashSet; import java.util.Random; import java.util.Set; import java.util.UUID; import java.util.concurrent.ConcurrentSkipListSet; import org.junit.Test; +import org.junit.runner.RunWith; +@RunWith(DataProviderRunner.class) public class UuidsTest { + @Test + @UseDataProvider("byteArrayNames") + public void should_generate_name_based_uuid_from_namespace_and_byte_array( + UUID namespace, byte[] name) throws NoSuchAlgorithmException { + // when + UUID actual = Uuids.nameBased(namespace, name); + // then + assertThat(actual).isNotNull(); + assertThat(actual.version()).isEqualTo(3); + assertUuid(namespace, name, 3, actual); + } + + @DataProvider + public static Object[][] byteArrayNames() { + return new Object[][] { + {Uuids.NAMESPACE_DNS, new byte[] {}}, {Uuids.NAMESPACE_URL, new byte[] {1, 2, 3, 4}}, + }; + } + + @Test + @UseDataProvider("byteArrayNamesWithVersions") + public void should_generate_name_based_uuid_from_namespace_byte_array_and_version( + UUID namespace, byte[] name, int version) throws NoSuchAlgorithmException { + // when + UUID actual = Uuids.nameBased(namespace, name, version); + // then + assertThat(actual).isNotNull(); + assertThat(actual.version()).isEqualTo(version); + assertUuid(namespace, name, version, actual); + } + + @DataProvider + public static Object[][] byteArrayNamesWithVersions() { + return new Object[][] { + {Uuids.NAMESPACE_DNS, new byte[] {}, 3}, + {Uuids.NAMESPACE_URL, new byte[] {1, 2, 3, 4}, 3}, + {Uuids.NAMESPACE_OID, new byte[] {}, 5}, + {Uuids.NAMESPACE_X500, new byte[] {1, 2, 3, 4}, 5}, + }; + } + + @Test + @UseDataProvider("stringNames") + public void should_generate_name_based_uuid_from_namespace_and_string(UUID namespace, String name) + throws NoSuchAlgorithmException { + // when + UUID actual = Uuids.nameBased(namespace, name); + // then + assertThat(actual).isNotNull(); + assertThat(actual.version()).isEqualTo(3); + assertUuid(namespace, name, 3, actual); + } + + @DataProvider + public static Object[][] stringNames() { + return new Object[][] { + {Uuids.NAMESPACE_DNS, ""}, {Uuids.NAMESPACE_URL, "Hello world!"}, {Uuids.NAMESPACE_OID, "你好"}, + }; + } + + @Test + @UseDataProvider("stringNamesWithVersions") + public void should_generate_name_based_uuid_from_namespace_string_and_version( + UUID namespace, String name, int version) throws NoSuchAlgorithmException { + // when + UUID actual = Uuids.nameBased(namespace, name, version); + // then + assertThat(actual).isNotNull(); + assertThat(actual.version()).isEqualTo(version); + assertUuid(namespace, name, version, actual); + } + + @DataProvider + public static Object[][] stringNamesWithVersions() { + return new Object[][] { + {Uuids.NAMESPACE_DNS, "", 3}, + {Uuids.NAMESPACE_URL, "Hello world!", 3}, + {Uuids.NAMESPACE_OID, "你好", 3}, + {Uuids.NAMESPACE_DNS, "", 5}, + {Uuids.NAMESPACE_URL, "Hello world!", 5}, + {Uuids.NAMESPACE_OID, "你好", 5}, + }; + } + + @Test + @UseDataProvider("concatenatedData") + public void should_generate_name_based_uuid_from_concatenated_data(byte[] namespaceAndName) + throws NoSuchAlgorithmException { + // when + UUID actual = Uuids.nameBased(namespaceAndName); + // then + assertThat(actual).isNotNull(); + assertThat(actual.version()).isEqualTo(3); + assertUuid(namespaceAndName, 3, actual); + } + + @DataProvider + public static Object[][] concatenatedData() { + return new Object[][] { + {concat(Uuids.NAMESPACE_DNS, new byte[] {})}, + {concat(Uuids.NAMESPACE_URL, new byte[] {1, 2, 3, 4})}, + }; + } + + @Test + @UseDataProvider("concatenatedDataWithVersions") + public void should_generate_name_based_uuid_from_concatenated_data_and_version( + byte[] namespaceAndName, int version) throws NoSuchAlgorithmException { + // when + UUID actual = Uuids.nameBased(namespaceAndName, version); + // then + assertThat(actual).isNotNull(); + assertThat(actual.version()).isEqualTo(version); + assertUuid(namespaceAndName, version, actual); + } + + @DataProvider + public static Object[][] concatenatedDataWithVersions() { + return new Object[][] { + {concat(Uuids.NAMESPACE_DNS, new byte[] {}), 3}, + {concat(Uuids.NAMESPACE_URL, new byte[] {1, 2, 3, 4}), 3}, + {concat(Uuids.NAMESPACE_DNS, new byte[] {}), 5}, + {concat(Uuids.NAMESPACE_URL, new byte[] {1, 2, 3, 4}), 5}, + }; + } + + @Test + public void should_throw_when_invalid_version() { + Throwable error = catchThrowable(() -> Uuids.nameBased(Uuids.NAMESPACE_URL, "irrelevant", 1)); + assertThat(error) + .isInstanceOf(IllegalArgumentException.class) + .hasMessage("Invalid name-based UUID version, expecting 3 or 5, got: 1"); + } + + @Test + public void should_throw_when_invalid_data() { + Throwable error = catchThrowable(() -> Uuids.nameBased(new byte[] {1}, 3)); + assertThat(error) + .isInstanceOf(IllegalArgumentException.class) + .hasMessage("namespaceAndName must be at least 16 bytes long"); + } + @Test public void should_generate_timestamp_within_10_ms() { @@ -101,7 +253,7 @@ public void should_generate_within_bounds_for_given_timestamp() { int uuidsPerTimestamp = 10; for (int i = 0; i < timestampsCount; i++) { - long timestamp = (long) random.nextInt(); + long timestamp = random.nextInt(); for (int j = 0; j < uuidsPerTimestamp; j++) { UUID uuid = new UUID(Uuids.makeMsb(Uuids.fromUnixTimestamp(timestamp)), random.nextLong()); assertBetween(uuid, Uuids.startOf(timestamp), Uuids.endOf(timestamp)); @@ -114,6 +266,9 @@ private static void assertBetween(UUID uuid, UUID lowerBound, UUID upperBound) { ByteBuffer uuidBytes = TypeCodecs.UUID.encode(uuid, DefaultProtocolVersion.V3); ByteBuffer lb = TypeCodecs.UUID.encode(lowerBound, DefaultProtocolVersion.V3); ByteBuffer ub = TypeCodecs.UUID.encode(upperBound, DefaultProtocolVersion.V3); + assertThat(uuidBytes).isNotNull(); + assertThat(lb).isNotNull(); + assertThat(ub).isNotNull(); assertThat(compareTimestampBytes(lb, uuidBytes)).isLessThanOrEqualTo(0); assertThat(compareTimestampBytes(ub, uuidBytes)).isGreaterThanOrEqualTo(0); } @@ -153,6 +308,60 @@ private static int compareTimestampBytes(ByteBuffer o1, ByteBuffer o2) { return (o1.get(o1Pos + 3) & 0xFF) - (o2.get(o2Pos + 3) & 0xFF); } + private static void assertUuid(UUID namespace, String name, int version, UUID actual) + throws NoSuchAlgorithmException { + assertUuid(namespace, name.getBytes(StandardCharsets.UTF_8), version, actual); + } + + private static void assertUuid(UUID namespace, byte[] name, int version, UUID actual) + throws NoSuchAlgorithmException { + byte[] data = digest(namespace, name, version); + assertThat(longToBytes(actual.getMostSignificantBits())) + .isEqualTo(Arrays.copyOfRange(data, 0, 8)); + assertThat(longToBytes(actual.getLeastSignificantBits())) + .isEqualTo(Arrays.copyOfRange(data, 8, 16)); + } + + private static void assertUuid(byte[] namespaceAndName, int version, UUID actual) + throws NoSuchAlgorithmException { + byte[] data = digest(namespaceAndName, version); + assertThat(longToBytes(actual.getMostSignificantBits())) + .isEqualTo(Arrays.copyOfRange(data, 0, 8)); + assertThat(longToBytes(actual.getLeastSignificantBits())) + .isEqualTo(Arrays.copyOfRange(data, 8, 16)); + } + + private static byte[] digest(UUID namespace, byte[] name, int version) + throws NoSuchAlgorithmException { + byte[] namespaceAndName = concat(namespace, name); + return digest(namespaceAndName, version); + } + + private static byte[] digest(byte[] namespaceAndName, int version) + throws NoSuchAlgorithmException { + MessageDigest result; + String algorithm = version == 3 ? "MD5" : "SHA-1"; + result = MessageDigest.getInstance(algorithm); + byte[] digest = result.digest(namespaceAndName); + digest[6] &= (byte) 0x0f; + digest[6] |= (byte) (version << 4); + digest[8] &= (byte) 0x3f; + digest[8] |= (byte) 0x80; + return digest; + } + + private static byte[] concat(UUID namespace, byte[] name) { + return ByteBuffer.allocate(16 + name.length) + .putLong(namespace.getMostSignificantBits()) + .putLong(namespace.getLeastSignificantBits()) + .put(name) + .array(); + } + + private static byte[] longToBytes(long x) { + return ByteBuffer.allocate(Long.BYTES).putLong(x).array(); + } + private static class UUIDGenerator extends Thread { private final int toGenerate; From cce68e79a1249b856789abeb0e71067df6fb180d Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Wed, 8 Jan 2020 15:25:22 -0300 Subject: [PATCH 192/979] JAVA-2541: Improve error messages during connection initialization (#1385) --- changelog/README.md | 1 + .../core/channel/ChannelHandlerRequest.java | 11 ++-- .../core/channel/HeartbeatHandler.java | 2 +- .../core/channel/InFlightHandler.java | 2 +- .../core/channel/ProtocolInitHandler.java | 51 ++++++++++++++----- .../core/channel/ProtocolInitHandlerTest.java | 2 +- 6 files changed, 49 insertions(+), 20 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 838267fa882..c322951418e 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.4.0 (in progress) +- [improvement] JAVA-2541: Improve error messages during connection initialization - [improvement] JAVA-2530: Expose shortcuts for name-based UUIDs - [improvement] JAVA-2547: Add method DriverConfigLoader.fromPath - [improvement] JAVA-2528: Store suppressed exceptions in AllNodesFailedException diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ChannelHandlerRequest.java b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ChannelHandlerRequest.java index 0a977b97573..db30a476f0b 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ChannelHandlerRequest.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ChannelHandlerRequest.java @@ -71,7 +71,9 @@ private void writeListener(Future writeFuture) { timeoutFuture = channel.eventLoop().schedule(this::onTimeout, timeoutMillis, TimeUnit.MILLISECONDS); } else { - fail(describe() + ": error writing ", writeFuture.cause()); + String message = + String.format("%s: failed to send request (%s)", describe(), writeFuture.cause()); + fail(message, writeFuture.cause()); } } @@ -87,7 +89,8 @@ public final void onFailure(Throwable error) { if (timeoutFuture != null) { timeoutFuture.cancel(true); } - fail(describe() + ": unexpected failure", error); + String message = String.format("%s: unexpected failure (%s)", describe(), error); + fail(message, error); } private void onTimeout() { @@ -104,13 +107,13 @@ void failOnUnexpected(Message response) { fail( new IllegalArgumentException( String.format( - "%s: unexpected server error [%s] %s", + "%s: server replied with unexpected error code [%s]: %s", describe(), ProtocolUtils.errorCodeString(error.code), error.message))); } else { fail( new IllegalArgumentException( String.format( - "%s: unexpected server response opcode=%s", + "%s: server replied with unexpected response type (opcode=%s)", describe(), ProtocolUtils.opcodeString(response.opcode)))); } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/HeartbeatHandler.java b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/HeartbeatHandler.java index 0c4dba9ffc4..605de2ad5f9 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/HeartbeatHandler.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/HeartbeatHandler.java @@ -71,7 +71,7 @@ private class HeartbeatRequest extends ChannelHandlerRequest { @Override String describe() { - return "heartbeat"; + return "Heartbeat request"; } @Override diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/InFlightHandler.java b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/InFlightHandler.java index df34cdef58f..86a2f2090ab 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/InFlightHandler.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/InFlightHandler.java @@ -395,7 +395,7 @@ private class SetKeyspaceRequest extends ChannelHandlerRequest { @Override String describe() { - return "[" + logPrefix + "] set keyspace " + keyspaceName; + return "[" + logPrefix + "] Set keyspace request (USE " + keyspaceName.asCql(true) + ")"; } @Override diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ProtocolInitHandler.java b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ProtocolInitHandler.java index b3662ee2419..f958e0b6477 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ProtocolInitHandler.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ProtocolInitHandler.java @@ -48,6 +48,7 @@ import io.netty.channel.ChannelHandlerContext; import java.nio.ByteBuffer; import java.util.List; +import java.util.Objects; import net.jcip.annotations.NotThreadSafe; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -140,6 +141,8 @@ private class InitRequest extends ChannelHandlerRequest { // This class is a finite-state automaton, that sends a different query depending on the step // in the initialization sequence. private Step step; + private int stepNumber = 0; + private Message request; private Authenticator authenticator; private ByteBuffer authResponseToken; @@ -150,29 +153,36 @@ private class InitRequest extends ChannelHandlerRequest { @Override String describe() { - return "[" + logPrefix + "] init query " + step; + return String.format( + "[%s] Protocol initialization request, step %d (%s)", logPrefix, stepNumber, request); } @Override Message getRequest() { switch (step) { case OPTIONS: - return Options.INSTANCE; + return request = Options.INSTANCE; case STARTUP: - return new Startup(context.getStartupOptions()); + return request = new Startup(context.getStartupOptions()); case GET_CLUSTER_NAME: - return CLUSTER_NAME_QUERY; + return request = CLUSTER_NAME_QUERY; case SET_KEYSPACE: - return new Query("USE " + options.keyspace.asCql(false)); + return request = new Query("USE " + options.keyspace.asCql(false)); case AUTH_RESPONSE: - return new AuthResponse(authResponseToken); + return request = new AuthResponse(authResponseToken); case REGISTER: - return new Register(options.eventTypes); + return request = new Register(options.eventTypes); default: throw new AssertionError("unhandled step: " + step); } } + @Override + void send() { + stepNumber++; + super.send(); + } + @Override void onResponse(Message response) { LOG.debug( @@ -199,7 +209,11 @@ void onResponse(Message response) { if (error != null) { fail( new AuthenticationException( - endPoint, "authenticator threw an exception", error)); + endPoint, + String.format( + "Authenticator.initialResponse(): stage completed exceptionally (%s)", + error), + error)); } else { step = Step.AUTH_RESPONSE; authResponseToken = token; @@ -217,7 +231,11 @@ void onResponse(Message response) { if (error != null) { fail( new AuthenticationException( - endPoint, "authenticator threw an exception", error)); + endPoint, + String.format( + "Authenticator.evaluateChallenge(): stage completed exceptionally (%s)", + error), + error)); } else { step = Step.AUTH_RESPONSE; authResponseToken = token; @@ -235,7 +253,11 @@ void onResponse(Message response) { if (error != null) { fail( new AuthenticationException( - endPoint, "authenticator threw an exception", error)); + endPoint, + String.format( + "Authenticator.onAuthenticationSuccess(): stage completed exceptionally (%s)", + error), + error)); } else { step = Step.GET_CLUSTER_NAME; send(); @@ -248,10 +270,13 @@ void onResponse(Message response) { && ((Error) response).code == ProtocolConstants.ErrorCode.AUTH_ERROR) { fail( new AuthenticationException( - endPoint, String.format("server replied '%s'", ((Error) response).message))); + endPoint, + String.format( + "server replied with '%s' to AuthResponse request", + ((Error) response).message))); } else if (step == Step.GET_CLUSTER_NAME && response instanceof Rows) { Rows rows = (Rows) response; - List row = rows.getData().poll(); + List row = Objects.requireNonNull(rows.getData().poll()); String actualClusterName = getString(row, 0); if (expectedClusterName != null && !expectedClusterName.equals(actualClusterName)) { fail( @@ -307,7 +332,7 @@ void onResponse(Message response) { } catch (AuthenticationException e) { fail(e); } catch (Throwable t) { - fail("Unexpected exception at step " + step, t); + fail(String.format("%s: unexpected exception (%s)", describe(), t), t); } } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/channel/ProtocolInitHandlerTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/channel/ProtocolInitHandlerTest.java index 9a2ff781b23..5df99bb4200 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/channel/ProtocolInitHandlerTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/channel/ProtocolInitHandlerTest.java @@ -394,7 +394,7 @@ public void should_fail_to_initialize_if_server_sends_auth_error() throws Throwa .isInstanceOf(AuthenticationException.class) .hasMessage( String.format( - "Authentication error on node %s: server replied 'mock error'", + "Authentication error on node %s: server replied with 'mock error' to AuthResponse request", END_POINT))); } From e60d92aa81b0c7177a0a128462b2434e4ef3c832 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Wed, 8 Jan 2020 15:42:09 -0300 Subject: [PATCH 193/979] JAVA-2569: Make driver compatible with Netty < 4.1.34 again (#1371) Motivation: Netty's PromiseCombiner class is not thread-safe prior to Netty 4.1.34. Using this class is problematic as it can only work properly if all added futures are using the same event executor. Modifications: Introduce our own promise combiner component that uses proper synchronization to ensure that the aggregate promise is properly completed when the parent futures complete. Replace all current usages of Netty's PromiseCombiner by this new component. Result: Promises can now be combined in a thread-safe manner. --- changelog/README.md | 1 + .../core/channel/ConnectInitHandler.java | 8 +- .../core/context/DefaultNettyOptions.java | 19 ++--- .../core/util/concurrent/PromiseCombiner.java | 83 +++++++++++++++++++ .../util/concurrent/PromiseCombinerTest.java | 62 ++++++++++++++ manual/core/shaded_jar/README.md | 4 + 6 files changed, 160 insertions(+), 17 deletions(-) create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/util/concurrent/PromiseCombiner.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/util/concurrent/PromiseCombinerTest.java diff --git a/changelog/README.md b/changelog/README.md index c322951418e..44d614bbed6 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.4.0 (in progress) +- [improvement] JAVA-2569: Make driver compatible with Netty < 4.1.34 again - [improvement] JAVA-2541: Improve error messages during connection initialization - [improvement] JAVA-2530: Expose shortcuts for name-based UUIDs - [improvement] JAVA-2547: Add method DriverConfigLoader.fromPath diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ConnectInitHandler.java b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ConnectInitHandler.java index 3194a95557c..ae88d86b4f2 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ConnectInitHandler.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ConnectInitHandler.java @@ -15,12 +15,10 @@ */ package com.datastax.oss.driver.internal.core.channel; +import com.datastax.oss.driver.internal.core.util.concurrent.PromiseCombiner; import io.netty.channel.ChannelDuplexHandler; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.ChannelPromise; -import io.netty.util.concurrent.Future; -import io.netty.util.concurrent.ImmediateEventExecutor; -import io.netty.util.concurrent.PromiseCombiner; import java.net.SocketAddress; import net.jcip.annotations.NotThreadSafe; @@ -59,9 +57,7 @@ public void connect( realConnectPromise.addListener(future -> onRealConnect(ctx)); // Make the caller's promise wait on the other two: - PromiseCombiner combiner = new PromiseCombiner(ImmediateEventExecutor.INSTANCE); - combiner.addAll(new Future[] {realConnectPromise, initPromise}); - combiner.finish(callerPromise); + PromiseCombiner.combine(callerPromise, realConnectPromise, initPromise); } protected abstract void onRealConnect(ChannelHandlerContext ctx); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultNettyOptions.java b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultNettyOptions.java index 10da06c8308..f0a222a3d00 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultNettyOptions.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultNettyOptions.java @@ -18,6 +18,7 @@ import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.internal.core.util.concurrent.BlockingOperation; +import com.datastax.oss.driver.internal.core.util.concurrent.PromiseCombiner; import com.datastax.oss.driver.shaded.guava.common.util.concurrent.ThreadFactoryBuilder; import io.netty.bootstrap.Bootstrap; import io.netty.buffer.ByteBufAllocator; @@ -34,7 +35,6 @@ import io.netty.util.concurrent.EventExecutorGroup; import io.netty.util.concurrent.Future; import io.netty.util.concurrent.GlobalEventExecutor; -import io.netty.util.concurrent.PromiseCombiner; import io.netty.util.internal.PlatformDependent; import java.time.Duration; import java.util.concurrent.ThreadFactory; @@ -173,16 +173,13 @@ public void afterChannelInitialized(Channel channel) { public Future onClose() { DefaultPromise closeFuture = new DefaultPromise<>(GlobalEventExecutor.INSTANCE); GlobalEventExecutor.INSTANCE.execute( - () -> { - PromiseCombiner combiner = new PromiseCombiner(GlobalEventExecutor.INSTANCE); - combiner.add( - adminEventLoopGroup.shutdownGracefully( - adminShutdownQuietPeriod, adminShutdownTimeout, adminShutdownUnit)); - combiner.add( - ioEventLoopGroup.shutdownGracefully( - ioShutdownQuietPeriod, ioShutdownTimeout, ioShutdownUnit)); - combiner.finish(closeFuture); - }); + () -> + PromiseCombiner.combine( + closeFuture, + adminEventLoopGroup.shutdownGracefully( + adminShutdownQuietPeriod, adminShutdownTimeout, adminShutdownUnit), + ioEventLoopGroup.shutdownGracefully( + ioShutdownQuietPeriod, ioShutdownTimeout, ioShutdownUnit))); closeFuture.addListener(f -> timer.stop()); return closeFuture; } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/util/concurrent/PromiseCombiner.java b/core/src/main/java/com/datastax/oss/driver/internal/core/util/concurrent/PromiseCombiner.java new file mode 100644 index 00000000000..dd166551631 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/util/concurrent/PromiseCombiner.java @@ -0,0 +1,83 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.util.concurrent; + +import edu.umd.cs.findbugs.annotations.NonNull; +import io.netty.util.concurrent.Future; +import io.netty.util.concurrent.GenericFutureListener; +import io.netty.util.concurrent.Promise; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; +import net.jcip.annotations.ThreadSafe; + +/** + * A thread-safe version of Netty's {@link io.netty.util.concurrent.PromiseCombiner} that uses + * proper synchronization to trigger the completion of the aggregate promise. + */ +@ThreadSafe +public class PromiseCombiner { + + /** + * Combines the given futures into the given promise, that is, ties the completion of the latter + * to that of the formers. + * + * @param aggregatePromise The promise that will complete when all parents complete. + * @param parents The parent futures. + */ + public static void combine( + @NonNull Promise aggregatePromise, @NonNull Future... parents) { + PromiseCombinerListener listener = + new PromiseCombinerListener(aggregatePromise, parents.length); + for (Future parent : parents) { + parent.addListener(listener); + } + } + + private static class PromiseCombinerListener implements GenericFutureListener> { + + private final Promise aggregatePromise; + private final AtomicInteger remainingCount; + private final AtomicReference aggregateFailureRef = new AtomicReference<>(); + + private PromiseCombinerListener(Promise aggregatePromise, int numberOfParents) { + this.aggregatePromise = aggregatePromise; + remainingCount = new AtomicInteger(numberOfParents); + } + + @Override + public void operationComplete(Future future) { + if (!future.isSuccess()) { + aggregateFailureRef.updateAndGet( + aggregateFailure -> { + if (aggregateFailure == null) { + aggregateFailure = future.cause(); + } else { + aggregateFailure.addSuppressed(future.cause()); + } + return aggregateFailure; + }); + } + if (remainingCount.decrementAndGet() == 0) { + Throwable aggregateFailure = aggregateFailureRef.get(); + if (aggregateFailure != null) { + aggregatePromise.tryFailure(aggregateFailure); + } else { + aggregatePromise.trySuccess(null); + } + } + } + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/util/concurrent/PromiseCombinerTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/util/concurrent/PromiseCombinerTest.java new file mode 100644 index 00000000000..0b0ce45644d --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/util/concurrent/PromiseCombinerTest.java @@ -0,0 +1,62 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.util.concurrent; + +import static org.assertj.core.api.Assertions.assertThat; + +import io.netty.util.concurrent.EventExecutor; +import io.netty.util.concurrent.ImmediateEventExecutor; +import io.netty.util.concurrent.Promise; +import java.io.IOException; +import org.junit.Test; + +public class PromiseCombinerTest { + + private final EventExecutor executor = ImmediateEventExecutor.INSTANCE; + + @Test + public void should_complete_normally_if_all_parents_complete_normally() { + // given + Promise promise = executor.newPromise(); + Promise parent1 = executor.newPromise(); + Promise parent2 = executor.newPromise(); + // when + PromiseCombiner.combine(promise, parent1, parent2); + parent1.setSuccess(null); + parent2.setSuccess(null); + // then + assertThat(promise.isSuccess()).isTrue(); + } + + @Test + public void should_complete_exceptionally_if_any_parent_completes_exceptionally() { + // given + Promise promise = executor.newPromise(); + Promise parent1 = executor.newPromise(); + Promise parent2 = executor.newPromise(); + Promise parent3 = executor.newPromise(); + NullPointerException npe = new NullPointerException(); + IOException ioe = new IOException(); + // when + PromiseCombiner.combine(promise, parent1, parent2, parent3); + parent1.setSuccess(null); + parent2.setFailure(npe); + parent3.setFailure(ioe); + // then + assertThat(promise.isSuccess()).isFalse(); + assertThat(promise.cause()).isSameAs(npe).hasSuppressedException(ioe); + } +} diff --git a/manual/core/shaded_jar/README.md b/manual/core/shaded_jar/README.md index 8e988418d34..45ed7fd63af 100644 --- a/manual/core/shaded_jar/README.md +++ b/manual/core/shaded_jar/README.md @@ -3,6 +3,10 @@ The default driver JAR depends on [Netty](http://netty.io/), which is used internally for networking. +The driver is compatible with all Netty versions in the range [4.1.7, 4.2.0), +that is, it can work with any version equal to or higher than 4.1.7, and +lesser than 4.2.0. + This explicit dependency can be a problem if your application already uses another Netty version. To avoid conflicts, we provide a "shaded" version of the JAR, which bundles the Netty classes under a different From b7ee28e065d67586cf661db5ea1231b384633181 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Wed, 8 Jan 2020 15:55:11 -0300 Subject: [PATCH 194/979] Add clarifying parentheses to fix error-prone compilation failure --- .../java/com/datastax/oss/driver/api/core/uuid/Uuids.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/uuid/Uuids.java b/core/src/main/java/com/datastax/oss/driver/api/core/uuid/Uuids.java index 9e2b344c2a7..3cd122d7f97 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/uuid/Uuids.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/uuid/Uuids.java @@ -408,11 +408,11 @@ private static byte[] toBytes(UUID uuid) { byte[] out = new byte[16]; long msb = uuid.getMostSignificantBits(); for (int i = 0; i < 8; i++) { - out[i] = (byte) (msb >> (7 - i) * 8); + out[i] = (byte) (msb >> ((7 - i) * 8)); } long lsb = uuid.getLeastSignificantBits(); for (int i = 8; i < 16; i++) { - out[i] = (byte) (lsb >> (15 - i) * 8); + out[i] = (byte) (lsb >> ((15 - i) * 8)); } return out; } From 29067ed6209a66475ec404f5f7f686a38feb4143 Mon Sep 17 00:00:00 2001 From: olim7t Date: Wed, 9 Oct 2019 11:40:09 -0700 Subject: [PATCH 195/979] Bump native-protocol to 1.4.8 --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 09f64bf97a8..e7eba6b772c 100644 --- a/pom.xml +++ b/pom.xml @@ -45,7 +45,7 @@ 25.1-jre 2.1.11 4.0.5 - 1.4.7 + 1.4.8 4.1.39.Final 1.7.26 From e5606668107d8dad2aa2ba43cdfb3b3db63912a8 Mon Sep 17 00:00:00 2001 From: olim7t Date: Wed, 9 Oct 2019 15:14:24 -0700 Subject: [PATCH 196/979] Copy sources from riptano/java-dse-driver@0a187cec6 (except mapper) cp -R ../java-dse-driver-2.x/core/src/* core/src cp -R ../java-dse-driver-2.x/integration-tests/src/* integration-tests/src cp -R ../java-dse-driver-2.x/query-builder/src/* query-builder/src --- .../datastax/dse/driver/DseSessionMetric.java | 49 + .../driver/api/core/DseProtocolVersion.java | 48 + .../dse/driver/api/core/DseSession.java | 47 + .../driver/api/core/DseSessionBuilder.java | 27 + .../api/core/DseSessionBuilderBase.java | 242 +++ .../api/core/auth/BaseDseAuthenticator.java | 81 ++ .../core/auth/DseGssApiAuthProviderBase.java | 324 +++++ .../auth/DsePlainTextAuthProviderBase.java | 195 +++ .../api/core/auth/ProxyAuthentication.java | 71 + .../core/config/DseDriverConfigLoader.java | 200 +++ .../api/core/config/DseDriverOption.java | 180 +++ .../continuous/ContinuousAsyncResultSet.java | 68 + .../cql/continuous/ContinuousResultSet.java | 69 + .../cql/continuous/ContinuousSession.java | 99 ++ .../reactive/ContinuousReactiveResultSet.java | 18 + .../reactive/ContinuousReactiveSession.java | 62 + .../cql/reactive/ReactiveQueryMetadata.java | 77 + .../core/cql/reactive/ReactiveResultSet.java | 38 + .../api/core/cql/reactive/ReactiveRow.java | 82 ++ .../core/cql/reactive/ReactiveSession.java | 53 + .../api/core/data/geometry/Geometry.java | 54 + .../api/core/data/geometry/LineString.java | 80 + .../driver/api/core/data/geometry/Point.java | 92 ++ .../api/core/data/geometry/Polygon.java | 116 ++ .../driver/api/core/data/time/DateRange.java | 246 ++++ .../api/core/data/time/DateRangeBound.java | 201 +++ .../core/data/time/DateRangePrecision.java | 186 +++ .../api/core/graph/AsyncGraphResultSet.java | 77 + .../api/core/graph/BatchGraphStatement.java | 138 ++ .../graph/BatchGraphStatementBuilder.java | 101 ++ .../dse/driver/api/core/graph/DseGraph.java | 51 + .../DseGraphRemoteConnectionBuilder.java | 50 + .../api/core/graph/FluentGraphStatement.java | 81 ++ .../graph/FluentGraphStatementBuilder.java | 53 + .../api/core/graph/GraphExecutionInfo.java | 76 + .../dse/driver/api/core/graph/GraphNode.java | 219 +++ .../driver/api/core/graph/GraphResultSet.java | 73 + .../driver/api/core/graph/GraphSession.java | 66 + .../driver/api/core/graph/GraphStatement.java | 367 +++++ .../core/graph/GraphStatementBuilderBase.java | 178 +++ .../api/core/graph/ScriptGraphStatement.java | 159 ++ .../graph/ScriptGraphStatementBuilder.java | 113 ++ .../driver/api/core/graph/predicates/Geo.java | 149 ++ .../api/core/graph/predicates/Search.java | 125 ++ .../api/core/metadata/DseNodeProperties.java | 126 ++ .../metadata/schema/DseAggregateMetadata.java | 78 + .../metadata/schema/DseColumnMetadata.java | 17 + .../metadata/schema/DseFunctionMetadata.java | 123 ++ .../metadata/schema/DseIndexMetadata.java | 17 + .../metadata/schema/DseKeyspaceMetadata.java | 21 + .../metadata/schema/DseRelationMetadata.java | 20 + .../metadata/schema/DseTableMetadata.java | 22 + .../core/metadata/schema/DseViewMetadata.java | 20 + .../servererrors/UnfitClientException.java | 44 + .../session/DseProgrammaticArguments.java | 85 ++ .../driver/api/core/type/DseDataTypes.java | 26 + .../api/core/type/codec/DseTypeCodecs.java | 30 + .../internal/core/DseProtocolFeature.java | 27 + .../core/DseProtocolVersionRegistry.java | 186 +++ .../core/InsightsClientLifecycleListener.java | 55 + .../driver/internal/core/auth/AuthUtils.java | 47 + .../core/auth/DseGssApiAuthProvider.java | 212 +++ .../core/auth/DsePlainTextAuthProvider.java | 71 + .../DseProgrammaticPlainTextAuthProvider.java | 33 + .../DefaultDseDriverConfigLoader.java | 94 ++ .../core/context/DseDriverContext.java | 297 ++++ .../context/DseStartupOptionsBuilder.java | 108 ++ .../internal/core/cql/DseConversions.java | 144 ++ .../ContinuousCqlRequestAsyncProcessor.java | 45 + .../ContinuousCqlRequestHandler.java | 1294 +++++++++++++++++ .../ContinuousCqlRequestSyncProcessor.java | 56 + .../DefaultContinuousAsyncResultSet.java | 158 ++ .../DefaultContinuousResultSet.java | 121 ++ ...ContinuousCqlRequestReactiveProcessor.java | 52 + .../DefaultContinuousReactiveResultSet.java | 24 + .../reactive/CqlRequestReactiveProcessor.java | 51 + .../reactive/DefaultReactiveResultSet.java | 20 + .../core/cql/reactive/DefaultReactiveRow.java | 557 +++++++ .../core/cql/reactive/EmptySubscription.java | 22 + .../core/cql/reactive/FailedPublisher.java | 38 + .../cql/reactive/FailedReactiveResultSet.java | 49 + .../core/cql/reactive/ReactiveOperators.java | 50 + .../cql/reactive/ReactiveResultSetBase.java | 94 ++ .../ReactiveResultSetSubscription.java | 485 ++++++ .../cql/reactive/SimpleUnicastProcessor.java | 254 ++++ .../core/data/geometry/DefaultGeometry.java | 191 +++ .../core/data/geometry/DefaultLineString.java | 57 + .../core/data/geometry/DefaultPoint.java | 53 + .../core/data/geometry/DefaultPolygon.java | 120 ++ .../internal/core/data/geometry/Distance.java | 226 +++ .../geometry/DistanceSerializationProxy.java | 32 + .../data/geometry/WkbSerializationProxy.java | 49 + .../internal/core/data/geometry/WkbUtil.java | 109 ++ .../core/graph/BytecodeGraphStatement.java | 118 ++ .../graph/DefaultAsyncGraphResultSet.java | 105 ++ .../graph/DefaultBatchGraphStatement.java | 143 ++ .../DefaultDseRemoteConnectionBuilder.java | 43 + .../graph/DefaultFluentGraphStatement.java | 95 ++ .../core/graph/DefaultGraphExecutionInfo.java | 84 ++ .../graph/DefaultScriptGraphStatement.java | 191 +++ .../core/graph/DseGraphRemoteConnection.java | 66 + .../core/graph/DseGraphTraversal.java | 59 + .../internal/core/graph/DsePredicate.java | 24 + .../internal/core/graph/EditDistance.java | 63 + .../internal/core/graph/GeoPredicate.java | 100 ++ .../driver/internal/core/graph/GeoUtils.java | 16 + .../internal/core/graph/GraphConversions.java | 231 +++ .../graph/GraphRequestAsyncProcessor.java | 42 + .../core/graph/GraphRequestHandler.java | 661 +++++++++ .../core/graph/GraphRequestSyncProcessor.java | 53 + .../internal/core/graph/GraphResultSets.java | 21 + .../internal/core/graph/GraphSON1SerdeTP.java | 335 +++++ .../internal/core/graph/GraphSON2SerdeTP.java | 419 ++++++ .../internal/core/graph/GraphSON3SerdeTP.java | 428 ++++++ .../internal/core/graph/GraphSONUtils.java | 140 ++ .../core/graph/GraphStatementBase.java | 402 +++++ .../internal/core/graph/LegacyGraphNode.java | 312 ++++ .../internal/core/graph/ObjectGraphNode.java | 233 +++ .../internal/core/graph/SearchPredicate.java | 290 ++++ .../internal/core/graph/SearchUtils.java | 132 ++ .../core/graph/SinglePageGraphResultSet.java | 43 + .../core/insights/AddressFormatter.java | 43 + .../insights/ConfigAntiPatternsFinder.java | 35 + .../core/insights/DataCentersFinder.java | 44 + .../insights/ExecutionProfilesInfoFinder.java | 167 +++ .../core/insights/InsightsClient.java | 475 ++++++ .../insights/InsightsSupportVerifier.java | 39 + .../internal/core/insights/PackageUtil.java | 84 ++ .../core/insights/PlatformInfoFinder.java | 271 ++++ .../ReconnectionPolicyInfoFinder.java | 38 + .../configuration/InsightsConfiguration.java | 34 + .../InsightEventFormatException.java | 14 + .../insights/schema/AuthProviderType.java | 56 + .../core/insights/schema/Insight.java | 41 + .../core/insights/schema/InsightMetadata.java | 107 ++ .../core/insights/schema/InsightType.java | 17 + .../insights/schema/InsightsPlatformInfo.java | 225 +++ .../insights/schema/InsightsStartupData.java | 414 ++++++ .../insights/schema/InsightsStatusData.java | 125 ++ .../insights/schema/LoadBalancingInfo.java | 78 + .../schema/PoolSizeByHostDistance.java | 74 + .../schema/ReconnectionPolicyInfo.java | 79 + .../internal/core/insights/schema/SSL.java | 52 + .../insights/schema/SessionStateForNode.java | 63 + .../schema/SpecificExecutionProfile.java | 122 ++ .../schema/SpeculativeExecutionInfo.java | 78 + .../loadbalancing/DseLoadBalancingPolicy.java | 497 +++++++ .../core/metadata/DseTopologyMonitor.java | 74 + .../schema/DefaultDseAggregateMetadata.java | 99 ++ .../schema/DefaultDseColumnMetadata.java | 27 + .../schema/DefaultDseFunctionMetadata.java | 110 ++ .../schema/DefaultDseIndexMetadata.java | 29 + .../schema/DefaultDseKeyspaceMetadata.java | 133 ++ .../schema/DefaultDseTableMetadata.java | 152 ++ .../schema/DefaultDseViewMetadata.java | 155 ++ .../schema/parsing/DseAggregateParser.java | 52 + .../schema/parsing/DseFunctionParser.java | 62 + .../schema/parsing/DseSchemaParser.java | 218 +++ .../parsing/DseSchemaParserFactory.java | 28 + .../schema/parsing/DseTableParser.java | 329 +++++ .../schema/parsing/DseViewParser.java | 146 ++ .../queries/DseSchemaQueriesFactory.java | 63 + .../token/DseReplicationStrategyFactory.java | 34 + .../metadata/token/EverywhereStrategy.java | 32 + .../metrics/DseDropwizardMetricsFactory.java | 73 + .../DseDropwizardSessionMetricUpdater.java | 32 + .../internal/core/search/DateRangeUtil.java | 219 +++ .../core/session/DefaultDseSession.java | 24 + .../tracker/MultiplexingRequestTracker.java | 94 ++ .../type/codec/geometry/GeometryCodec.java | 102 ++ .../type/codec/geometry/LineStringCodec.java | 76 + .../core/type/codec/geometry/PointCodec.java | 76 + .../type/codec/geometry/PolygonCodec.java | 76 + .../core/type/codec/time/DateRangeCodec.java | 177 +++ .../concurrent/BoundedConcurrentQueue.java | 123 ++ .../com/datastax/dse/driver/Driver.properties | 13 + core/src/main/resources/dse-reference.conf | 430 ++++++ .../dse/driver/DriverRunListener.java | 30 + .../dse/driver/DseTestDataProviders.java | 40 + .../datastax/dse/driver/DseTestFixtures.java | 67 + .../config/DseDriverConfigLoaderTest.java | 117 ++ .../data/time/DateRangePrecisionTest.java | 43 + .../api/core/data/time/DateRangeTest.java | 106 ++ .../api/core/graph/predicates/GeoTest.java | 97 ++ .../api/core/graph/predicates/SearchTest.java | 105 ++ .../core/DseProtocolVersionRegistryTest.java | 124 ++ .../context/DseStartupOptionsBuilderTest.java | 188 +++ ...ousCqlRequestHandlerNodeTargetingTest.java | 162 +++ ...tinuousCqlRequestHandlerReprepareTest.java | 186 +++ .../ContinuousCqlRequestHandlerRetryTest.java | 608 ++++++++ .../ContinuousCqlRequestHandlerTest.java | 519 +++++++ .../ContinuousCqlRequestHandlerTestBase.java | 39 + .../DefaultContinuousAsyncResultSetTest.java | 106 ++ .../DefaultContinuousResultSetTest.java | 143 ++ ...inuousCqlRequestReactiveProcessorTest.java | 162 +++ .../CqlRequestReactiveProcessorTest.java | 177 +++ .../DefaultReactiveResultSetTckTest.java | 61 + .../core/cql/reactive/MockAsyncResultSet.java | 88 ++ .../internal/core/cql/reactive/MockRow.java | 115 ++ .../ReactiveResultSetSubscriptionTest.java | 139 ++ .../SimpleUnicastProcessorTckTest.java | 41 + .../reactive/SimpleUnicastProcessorTest.java | 45 + .../core/cql/reactive/TestSubscriber.java | 66 + .../data/geometry/DefaultLineStringTest.java | 201 +++ .../core/data/geometry/DefaultPointTest.java | 125 ++ .../data/geometry/DefaultPolygonTest.java | 331 +++++ .../core/data/geometry/DistanceTest.java | 120 ++ .../data/geometry/SerializationUtils.java | 36 + .../internal/core/graph/GraphNodeTest.java | 377 +++++ .../core/graph/GraphRequestHandlerTest.java | 414 ++++++ .../graph/GraphRequestHandlerTestHarness.java | 112 ++ .../core/insights/AddressFormatterTest.java | 42 + .../ConfigAntiPatternsFinderTest.java | 74 + .../core/insights/DataCentersFinderTest.java | 77 + .../insights/ExecutionProfileMockUtil.java | 101 ++ .../ExecutionProfilesInfoFinderTest.java | 210 +++ .../core/insights/InsightsClientTest.java | 484 ++++++ .../insights/InsightsSupportVerifierTest.java | 73 + .../core/insights/PackageUtilTest.java | 76 + .../core/insights/PlatformInfoFinderTest.java | 222 +++ .../ReconnectionPolicyInfoFinderTest.java | 63 + .../DseLoadBalancingPolicyEventsTest.java | 146 ++ .../DseLoadBalancingPolicyInitTest.java | 246 ++++ .../DseLoadBalancingPolicyQueryPlanTest.java | 502 +++++++ ...LoadBalancingPolicyRequestTrackerTest.java | 180 +++ .../DseLoadBalancingPolicyTestBase.java | 76 + .../codec/geometry/GeometryCodecTest.java | 28 + .../codec/geometry/LineStringCodecTest.java | 65 + .../type/codec/geometry/PointCodecTest.java | 69 + .../type/codec/geometry/PolygonCodecTest.java | 69 + .../type/codec/time/DateRangeCodecTest.java | 102 ++ .../BoundedConcurrentQueueTest.java | 68 + .../resources/config/customApplication.conf | 2 + .../resources/config/customApplication.json | 5 + .../config/customApplication.properties | 14 +- .../insights/duplicate-dependencies.txt | 2 + .../insights/malformed-pom.properties | 11 + .../insights/netty-dependency-optional.txt | 1 + .../resources/insights/netty-dependency.txt | 1 + .../insights/ordered-dependencies.txt | 3 + .../test/resources/insights/pom.properties | 12 + .../resources/insights/test-dependencies.txt | 31 + core/src/test/resources/logback-test.xml | 33 +- .../DseGssApiAuthProviderAlternateIT.java | 84 ++ .../core/auth/DseGssApiAuthProviderIT.java | 99 ++ .../core/auth/DsePlainTextAuthProviderIT.java | 115 ++ .../core/auth/DseProxyAuthenticationIT.java | 253 ++++ .../dse/driver/api/core/auth/EmbeddedAds.java | 596 ++++++++ .../driver/api/core/auth/EmbeddedAdsRule.java | 319 ++++ .../driver/api/core/auth/KerberosUtils.java | 49 + .../cql/continuous/ContinuousPagingIT.java | 687 +++++++++ .../continuous/ContinuousPagingITBase.java | 162 +++ .../reactive/ContinuousPagingReactiveIT.java | 106 ++ .../reactive/DefaultReactiveResultSetIT.java | 300 ++++ .../api/core/data/geometry/GeometryIT.java | 394 +++++ .../api/core/data/geometry/LineStringIT.java | 83 ++ .../api/core/data/geometry/PointIT.java | 47 + .../api/core/data/geometry/PolygonIT.java | 111 ++ .../api/core/data/time/DateRangeIT.java | 345 +++++ .../api/core/graph/GraphAuthenticationIT.java | 62 + .../api/core/graph/GraphDataTypeITBase.java | 165 +++ .../api/core/graph/GraphGeoSearchIndexIT.java | 257 ++++ .../core/graph/GraphTextSearchIndexIT.java | 320 ++++ .../api/core/graph/GraphTimeoutsIT.java | 163 +++ .../api/core/graph/SampleGraphScripts.java | 44 + .../api/core/graph/SocialTraversalDsl.java | 18 + .../core/graph/SocialTraversalSourceDsl.java | 44 + .../api/core/graph/TinkerEdgeAssert.java | 38 + .../api/core/graph/TinkerElementAssert.java | 41 + .../api/core/graph/TinkerGraphAssertions.java | 36 + .../api/core/graph/TinkerPathAssert.java | 99 ++ .../api/core/graph/TinkerTreeAssert.java | 34 + .../api/core/graph/TinkerVertexAssert.java | 35 + .../graph/TinkerVertexPropertyAssert.java | 35 + .../graph/remote/GraphDataTypeRemoteIT.java | 58 + .../GraphTraversalMetaPropertiesRemoteIT.java | 76 + ...GraphTraversalMultiPropertiesRemoteIT.java | 76 + .../graph/remote/GraphTraversalRemoteIT.java | 494 +++++++ .../statement/GraphDataTypeFluentIT.java | 61 + .../statement/GraphDataTypeScriptIT.java | 62 + .../statement/GraphTraversalBatchIT.java | 120 ++ .../graph/statement/GraphTraversalIT.java | 512 +++++++ .../GraphTraversalMetaPropertiesIT.java | 79 + .../GraphTraversalMultiPropertiesIT.java | 79 + .../api/core/insights/InsightsClientIT.java | 73 + .../driver/api/core/metadata/MetadataIT.java | 50 + .../metadata/schema/AbstractMetadataIT.java | 61 + .../schema/DseAggregateMetadataIT.java | 84 ++ .../schema/DseFunctionMetadataIT.java | 195 +++ .../DseSessionBuilderInstantiator.java | 22 + .../api/testinfra/session/DseSessionRule.java | 66 + .../session/DseSessionRuleBuilder.java | 87 ++ .../DseOsgiCustomLoadBalancingPolicyIT.java | 69 + .../dse/driver/osgi/DseOsgiGeoTypesIT.java | 62 + .../dse/driver/osgi/DseOsgiGraphIT.java | 63 + .../dse/driver/osgi/DseOsgiLz4IT.java | 64 + .../dse/driver/osgi/DseOsgiReactiveIT.java | 60 + .../dse/driver/osgi/DseOsgiShadedIT.java | 78 + .../dse/driver/osgi/DseOsgiSnappyIT.java | 64 + .../dse/driver/osgi/DseOsgiVanillaIT.java | 128 ++ .../driver/osgi/support/DseBundleOptions.java | 187 +++ .../osgi/support/DseOsgiGeoTypesTests.java | 61 + .../osgi/support/DseOsgiGraphTests.java | 70 + .../osgi/support/DseOsgiReactiveTests.java | 66 + .../osgi/support/DseOsgiSimpleTests.java | 80 + .../src/test/resources/application.conf | 8 +- .../src/test/resources/logback-test.xml | 17 +- .../api/querybuilder/DseQueryBuilder.java | 23 + .../api/querybuilder/DseSchemaBuilder.java | 205 +++ .../driver/api/querybuilder/package-info.java | 13 + .../schema/CreateDseAggregateEnd.java | 46 + .../schema/CreateDseAggregateStart.java | 50 + .../schema/CreateDseAggregateStateFunc.java | 26 + .../schema/CreateDseFunctionEnd.java | 11 + .../schema/CreateDseFunctionStart.java | 66 + .../schema/CreateDseFunctionWithLanguage.java | 39 + .../CreateDseFunctionWithNullOption.java | 25 + .../schema/CreateDseFunctionWithType.java | 68 + .../api/querybuilder/schema/package-info.java | 13 + .../schema/DefaultCreateDseAggregate.java | 304 ++++ .../schema/DefaultCreateDseFunction.java | 433 ++++++ .../querybuilder/schema/package-info.java | 13 + .../schema/CreateDseAggregateTest.java | 173 +++ .../schema/CreateDseFunctionTest.java | 443 ++++++ 324 files changed, 41327 insertions(+), 45 deletions(-) create mode 100644 core/src/main/java/com/datastax/dse/driver/DseSessionMetric.java create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/DseProtocolVersion.java create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/DseSession.java create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/DseSessionBuilder.java create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/DseSessionBuilderBase.java create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/auth/BaseDseAuthenticator.java create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderBase.java create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/auth/DsePlainTextAuthProviderBase.java create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/auth/ProxyAuthentication.java create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/config/DseDriverConfigLoader.java create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/config/DseDriverOption.java create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousAsyncResultSet.java create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousResultSet.java create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousSession.java create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousReactiveResultSet.java create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousReactiveSession.java create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/cql/reactive/ReactiveQueryMetadata.java create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.java create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.java create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.java create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/data/geometry/Geometry.java create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/data/geometry/LineString.java create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/data/geometry/Point.java create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/data/geometry/Polygon.java create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/data/time/DateRange.java create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/data/time/DateRangeBound.java create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/data/time/DateRangePrecision.java create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/graph/AsyncGraphResultSet.java create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.java create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/graph/BatchGraphStatementBuilder.java create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/graph/DseGraph.java create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/graph/DseGraphRemoteConnectionBuilder.java create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.java create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/graph/FluentGraphStatementBuilder.java create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphExecutionInfo.java create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphNode.java create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphResultSet.java create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphSession.java create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphStatement.java create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphStatementBuilderBase.java create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.java create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/graph/ScriptGraphStatementBuilder.java create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/graph/predicates/Geo.java create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/graph/predicates/Search.java create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/metadata/DseNodeProperties.java create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseAggregateMetadata.java create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseColumnMetadata.java create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseFunctionMetadata.java create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseIndexMetadata.java create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseKeyspaceMetadata.java create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseRelationMetadata.java create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseTableMetadata.java create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseViewMetadata.java create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/servererrors/UnfitClientException.java create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/session/DseProgrammaticArguments.java create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/type/DseDataTypes.java create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/type/codec/DseTypeCodecs.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/DseProtocolFeature.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/DseProtocolVersionRegistry.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/InsightsClientLifecycleListener.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/auth/AuthUtils.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/auth/DseGssApiAuthProvider.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/auth/DsePlainTextAuthProvider.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/auth/DseProgrammaticPlainTextAuthProvider.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/config/typesafe/DefaultDseDriverConfigLoader.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/context/DseDriverContext.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/context/DseStartupOptionsBuilder.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/cql/DseConversions.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestAsyncProcessor.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandler.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestSyncProcessor.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/DefaultContinuousAsyncResultSet.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/DefaultContinuousResultSet.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/reactive/ContinuousCqlRequestReactiveProcessor.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/reactive/DefaultContinuousReactiveResultSet.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/CqlRequestReactiveProcessor.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/DefaultReactiveResultSet.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/DefaultReactiveRow.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/EmptySubscription.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/FailedPublisher.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/FailedReactiveResultSet.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/ReactiveOperators.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/ReactiveResultSetBase.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/ReactiveResultSetSubscription.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/SimpleUnicastProcessor.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/DefaultGeometry.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/DefaultLineString.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/DefaultPoint.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/DefaultPolygon.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/Distance.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/DistanceSerializationProxy.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/WkbSerializationProxy.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/WkbUtil.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/graph/BytecodeGraphStatement.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultAsyncGraphResultSet.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultBatchGraphStatement.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultDseRemoteConnectionBuilder.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultFluentGraphStatement.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultGraphExecutionInfo.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultScriptGraphStatement.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/graph/DseGraphRemoteConnection.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/graph/DseGraphTraversal.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/graph/DsePredicate.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/graph/EditDistance.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/graph/GeoPredicate.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/graph/GeoUtils.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphConversions.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestAsyncProcessor.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestSyncProcessor.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphResultSets.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSON1SerdeTP.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSON2SerdeTP.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSON3SerdeTP.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSONUtils.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphStatementBase.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/graph/LegacyGraphNode.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/graph/ObjectGraphNode.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/graph/SearchPredicate.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/graph/SearchUtils.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/graph/SinglePageGraphResultSet.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/insights/AddressFormatter.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/insights/ConfigAntiPatternsFinder.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/insights/DataCentersFinder.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/insights/ExecutionProfilesInfoFinder.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/insights/InsightsClient.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/insights/InsightsSupportVerifier.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/insights/PackageUtil.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/insights/PlatformInfoFinder.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/insights/ReconnectionPolicyInfoFinder.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/insights/configuration/InsightsConfiguration.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/insights/exceptions/InsightEventFormatException.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/AuthProviderType.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/Insight.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/InsightMetadata.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/InsightType.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/InsightsPlatformInfo.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/InsightsStartupData.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/InsightsStatusData.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/LoadBalancingInfo.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/PoolSizeByHostDistance.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/ReconnectionPolicyInfo.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/SSL.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/SessionStateForNode.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/SpecificExecutionProfile.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/SpeculativeExecutionInfo.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicy.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/metadata/DseTopologyMonitor.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseAggregateMetadata.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseColumnMetadata.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseFunctionMetadata.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseIndexMetadata.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseKeyspaceMetadata.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseTableMetadata.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseViewMetadata.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseAggregateParser.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseFunctionParser.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseSchemaParser.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseSchemaParserFactory.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseTableParser.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseViewParser.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/queries/DseSchemaQueriesFactory.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/metadata/token/DseReplicationStrategyFactory.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/metadata/token/EverywhereStrategy.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/metrics/DseDropwizardMetricsFactory.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/metrics/DseDropwizardSessionMetricUpdater.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/search/DateRangeUtil.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/session/DefaultDseSession.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/tracker/MultiplexingRequestTracker.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/type/codec/geometry/GeometryCodec.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/type/codec/geometry/LineStringCodec.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/type/codec/geometry/PointCodec.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/type/codec/geometry/PolygonCodec.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/type/codec/time/DateRangeCodec.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/util/concurrent/BoundedConcurrentQueue.java create mode 100644 core/src/main/resources/com/datastax/dse/driver/Driver.properties create mode 100644 core/src/main/resources/dse-reference.conf create mode 100644 core/src/test/java/com/datastax/dse/driver/DriverRunListener.java create mode 100644 core/src/test/java/com/datastax/dse/driver/DseTestDataProviders.java create mode 100644 core/src/test/java/com/datastax/dse/driver/DseTestFixtures.java create mode 100644 core/src/test/java/com/datastax/dse/driver/api/core/config/DseDriverConfigLoaderTest.java create mode 100644 core/src/test/java/com/datastax/dse/driver/api/core/data/time/DateRangePrecisionTest.java create mode 100644 core/src/test/java/com/datastax/dse/driver/api/core/data/time/DateRangeTest.java create mode 100644 core/src/test/java/com/datastax/dse/driver/api/core/graph/predicates/GeoTest.java create mode 100644 core/src/test/java/com/datastax/dse/driver/api/core/graph/predicates/SearchTest.java create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/DseProtocolVersionRegistryTest.java create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/context/DseStartupOptionsBuilderTest.java create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerNodeTargetingTest.java create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerReprepareTest.java create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerRetryTest.java create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerTest.java create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerTestBase.java create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/DefaultContinuousAsyncResultSetTest.java create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/DefaultContinuousResultSetTest.java create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/reactive/ContinuousCqlRequestReactiveProcessorTest.java create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/CqlRequestReactiveProcessorTest.java create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/DefaultReactiveResultSetTckTest.java create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/MockAsyncResultSet.java create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/MockRow.java create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/ReactiveResultSetSubscriptionTest.java create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/SimpleUnicastProcessorTckTest.java create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/SimpleUnicastProcessorTest.java create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/TestSubscriber.java create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/data/geometry/DefaultLineStringTest.java create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/data/geometry/DefaultPointTest.java create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/data/geometry/DefaultPolygonTest.java create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/data/geometry/DistanceTest.java create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/data/geometry/SerializationUtils.java create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphNodeTest.java create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTestHarness.java create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/insights/AddressFormatterTest.java create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/insights/ConfigAntiPatternsFinderTest.java create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/insights/DataCentersFinderTest.java create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/insights/ExecutionProfileMockUtil.java create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/insights/ExecutionProfilesInfoFinderTest.java create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/insights/InsightsClientTest.java create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/insights/InsightsSupportVerifierTest.java create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/insights/PackageUtilTest.java create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/insights/PlatformInfoFinderTest.java create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/insights/ReconnectionPolicyInfoFinderTest.java create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyEventsTest.java create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyInitTest.java create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyQueryPlanTest.java create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyRequestTrackerTest.java create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyTestBase.java create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/type/codec/geometry/GeometryCodecTest.java create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/type/codec/geometry/LineStringCodecTest.java create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/type/codec/geometry/PointCodecTest.java create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/type/codec/geometry/PolygonCodecTest.java create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/type/codec/time/DateRangeCodecTest.java create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/util/concurrent/BoundedConcurrentQueueTest.java create mode 100644 core/src/test/resources/insights/duplicate-dependencies.txt create mode 100644 core/src/test/resources/insights/malformed-pom.properties create mode 100644 core/src/test/resources/insights/netty-dependency-optional.txt create mode 100644 core/src/test/resources/insights/netty-dependency.txt create mode 100644 core/src/test/resources/insights/ordered-dependencies.txt create mode 100644 core/src/test/resources/insights/pom.properties create mode 100644 core/src/test/resources/insights/test-dependencies.txt create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderAlternateIT.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderIT.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DsePlainTextAuthProviderIT.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseProxyAuthenticationIT.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/EmbeddedAds.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/EmbeddedAdsRule.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/KerberosUtils.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousPagingIT.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousPagingITBase.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousPagingReactiveIT.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/reactive/DefaultReactiveResultSetIT.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/data/geometry/GeometryIT.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/data/geometry/LineStringIT.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/data/geometry/PointIT.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/data/geometry/PolygonIT.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/data/time/DateRangeIT.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphAuthenticationIT.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphDataTypeITBase.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphGeoSearchIndexIT.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphTextSearchIndexIT.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphTimeoutsIT.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/SampleGraphScripts.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/SocialTraversalDsl.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/SocialTraversalSourceDsl.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/TinkerEdgeAssert.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/TinkerElementAssert.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/TinkerGraphAssertions.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/TinkerPathAssert.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/TinkerTreeAssert.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/TinkerVertexAssert.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/TinkerVertexPropertyAssert.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphDataTypeRemoteIT.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalMetaPropertiesRemoteIT.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalMultiPropertiesRemoteIT.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalRemoteIT.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphDataTypeFluentIT.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphDataTypeScriptIT.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalBatchIT.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalIT.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalMetaPropertiesIT.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalMultiPropertiesIT.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/insights/InsightsClientIT.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/MetadataIT.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/AbstractMetadataIT.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/DseAggregateMetadataIT.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/DseFunctionMetadataIT.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/testinfra/DseSessionBuilderInstantiator.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/testinfra/session/DseSessionRule.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/testinfra/session/DseSessionRuleBuilder.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiCustomLoadBalancingPolicyIT.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiGeoTypesIT.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiGraphIT.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiLz4IT.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiReactiveIT.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiShadedIT.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiSnappyIT.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiVanillaIT.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/osgi/support/DseBundleOptions.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/osgi/support/DseOsgiGeoTypesTests.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/osgi/support/DseOsgiGraphTests.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/osgi/support/DseOsgiReactiveTests.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/osgi/support/DseOsgiSimpleTests.java create mode 100644 query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/DseQueryBuilder.java create mode 100644 query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/DseSchemaBuilder.java create mode 100644 query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/package-info.java create mode 100644 query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseAggregateEnd.java create mode 100644 query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseAggregateStart.java create mode 100644 query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseAggregateStateFunc.java create mode 100644 query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseFunctionEnd.java create mode 100644 query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseFunctionStart.java create mode 100644 query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseFunctionWithLanguage.java create mode 100644 query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseFunctionWithNullOption.java create mode 100644 query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseFunctionWithType.java create mode 100644 query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/package-info.java create mode 100644 query-builder/src/main/java/com/datastax/dse/driver/internal/querybuilder/schema/DefaultCreateDseAggregate.java create mode 100644 query-builder/src/main/java/com/datastax/dse/driver/internal/querybuilder/schema/DefaultCreateDseFunction.java create mode 100644 query-builder/src/main/java/com/datastax/dse/driver/internal/querybuilder/schema/package-info.java create mode 100644 query-builder/src/test/java/com/datastax/dse/driver/internal/querybuilder/schema/CreateDseAggregateTest.java create mode 100644 query-builder/src/test/java/com/datastax/dse/driver/internal/querybuilder/schema/CreateDseFunctionTest.java diff --git a/core/src/main/java/com/datastax/dse/driver/DseSessionMetric.java b/core/src/main/java/com/datastax/dse/driver/DseSessionMetric.java new file mode 100644 index 00000000000..3559f9c6690 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/DseSessionMetric.java @@ -0,0 +1,49 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver; + +import com.datastax.oss.driver.api.core.metrics.SessionMetric; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Map; + +/** See {@code dse-reference.conf} for a description of each metric. */ +public enum DseSessionMetric implements SessionMetric { + CONTINUOUS_CQL_REQUESTS("continuous-cql-requests"), + ; + + private static final Map BY_PATH = sortByPath(); + + private final String path; + + DseSessionMetric(String path) { + this.path = path; + } + + @NonNull + @Override + public String getPath() { + return path; + } + + @NonNull + public static DseSessionMetric fromPath(@NonNull String path) { + DseSessionMetric metric = BY_PATH.get(path); + if (metric == null) { + throw new IllegalArgumentException("Unknown DSE session metric path " + path); + } + return metric; + } + + private static Map sortByPath() { + ImmutableMap.Builder result = ImmutableMap.builder(); + for (DseSessionMetric value : values()) { + result.put(value.getPath(), value); + } + return result.build(); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/DseProtocolVersion.java b/core/src/main/java/com/datastax/dse/driver/api/core/DseProtocolVersion.java new file mode 100644 index 00000000000..6313f87917d --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/DseProtocolVersion.java @@ -0,0 +1,48 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core; + +import com.datastax.dse.protocol.internal.DseProtocolConstants; +import com.datastax.oss.driver.api.core.DefaultProtocolVersion; +import com.datastax.oss.driver.api.core.ProtocolVersion; + +/** + * A DSE-specific protocol version. + * + *

      Legacy DSE versions did not have a specific version, but instead reused a Cassandra protocol + * version: DSE 5.0 is supported via {@link DefaultProtocolVersion#V4}, and DSE 4.7 and 4.8 via + * {@link DefaultProtocolVersion#V3}. + * + *

      DSE 4.6 and earlier are not supported by this version of the driver, use the 1.x series. + */ +public enum DseProtocolVersion implements ProtocolVersion { + + /** Version 1, supported by DSE 5.1.0 and above. */ + DSE_V1(DseProtocolConstants.Version.DSE_V1, false), + + /** Version 2, supported by DSE 6 and above. */ + DSE_V2(DseProtocolConstants.Version.DSE_V2, false), + ; + + private final int code; + private final boolean beta; + + DseProtocolVersion(int code, boolean beta) { + this.code = code; + this.beta = beta; + } + + @Override + public int getCode() { + return code; + } + + @Override + public boolean isBeta() { + return beta; + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/DseSession.java b/core/src/main/java/com/datastax/dse/driver/api/core/DseSession.java new file mode 100644 index 00000000000..2226c97253e --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/DseSession.java @@ -0,0 +1,47 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core; + +import com.datastax.dse.driver.api.core.cql.continuous.ContinuousSession; +import com.datastax.dse.driver.api.core.cql.continuous.reactive.ContinuousReactiveSession; +import com.datastax.dse.driver.api.core.cql.reactive.ReactiveSession; +import com.datastax.dse.driver.api.core.graph.GraphSession; +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.MavenCoordinates; +import com.datastax.oss.driver.api.core.session.Session; +import com.datastax.oss.driver.internal.core.DefaultMavenCoordinates; +import edu.umd.cs.findbugs.annotations.NonNull; + +/** A custom session with DSE-specific capabilities. */ +public interface DseSession + extends CqlSession, + ContinuousSession, + GraphSession, + ReactiveSession, + ContinuousReactiveSession { + + /** + * The Maven coordinates of the core DSE driver artifact. + * + *

      Note that this DSE driver depends on the DataStax Java driver for Apache Cassandra®. You + * can find the coordinates of the Cassandra driver at {@link Session#OSS_DRIVER_COORDINATES}. + */ + @NonNull + MavenCoordinates DSE_DRIVER_COORDINATES = + DefaultMavenCoordinates.buildFromResourceAndPrint( + DseSession.class.getResource("/com/datastax/dse/driver/Driver.properties")); + + /** + * Returns a builder to create a new instance. + * + *

      Note that this builder is mutable and not thread-safe. + */ + @NonNull + static DseSessionBuilder builder() { + return new DseSessionBuilder(); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/DseSessionBuilder.java b/core/src/main/java/com/datastax/dse/driver/api/core/DseSessionBuilder.java new file mode 100644 index 00000000000..088b440c362 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/DseSessionBuilder.java @@ -0,0 +1,27 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core; + +import com.datastax.dse.driver.internal.core.session.DefaultDseSession; +import com.datastax.oss.driver.api.core.CqlSession; +import edu.umd.cs.findbugs.annotations.NonNull; +import net.jcip.annotations.NotThreadSafe; + +/** + * Helper class to build a {@link DseSession} instance. + * + *

      This class is mutable and not thread-safe. + */ +@NotThreadSafe +public class DseSessionBuilder extends DseSessionBuilderBase { + + @NonNull + @Override + protected DseSession wrap(@NonNull CqlSession defaultSession) { + return new DefaultDseSession(defaultSession); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/DseSessionBuilderBase.java b/core/src/main/java/com/datastax/dse/driver/api/core/DseSessionBuilderBase.java new file mode 100644 index 00000000000..61c079a1a57 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/DseSessionBuilderBase.java @@ -0,0 +1,242 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core; + +import com.datastax.dse.driver.api.core.config.DseDriverConfigLoader; +import com.datastax.dse.driver.api.core.session.DseProgrammaticArguments; +import com.datastax.dse.driver.api.core.type.codec.DseTypeCodecs; +import com.datastax.dse.driver.internal.core.auth.DseProgrammaticPlainTextAuthProvider; +import com.datastax.dse.driver.internal.core.config.typesafe.DefaultDseDriverConfigLoader; +import com.datastax.dse.driver.internal.core.context.DseDriverContext; +import com.datastax.oss.driver.api.core.auth.AuthProvider; +import com.datastax.oss.driver.api.core.config.DriverConfigLoader; +import com.datastax.oss.driver.api.core.context.DriverContext; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.metadata.NodeStateListener; +import com.datastax.oss.driver.api.core.metadata.schema.SchemaChangeListener; +import com.datastax.oss.driver.api.core.session.ProgrammaticArguments; +import com.datastax.oss.driver.api.core.session.SessionBuilder; +import com.datastax.oss.driver.api.core.tracker.RequestTracker; +import com.datastax.oss.driver.api.core.type.codec.TypeCodec; +import com.datastax.oss.driver.api.core.uuid.Uuids; +import com.datastax.oss.driver.internal.core.util.Loggers; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.List; +import java.util.Map; +import java.util.UUID; +import java.util.function.Predicate; +import net.jcip.annotations.NotThreadSafe; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@NotThreadSafe +public abstract class DseSessionBuilderBase< + SelfT extends DseSessionBuilderBase, SessionT> + extends SessionBuilder { + + private static final Logger LOG = LoggerFactory.getLogger(DseSessionBuilderBase.class); + + protected DseProgrammaticArguments.Builder dseProgrammaticArgumentsBuilder = + DseProgrammaticArguments.builder(); + + protected DseSessionBuilderBase() { + try { + Class.forName("com.esri.core.geometry.ogc.OGCGeometry"); + programmaticArgumentsBuilder.addTypeCodecs( + DseTypeCodecs.LINE_STRING, + DseTypeCodecs.POINT, + DseTypeCodecs.POLYGON, + DseTypeCodecs.DATE_RANGE); + } catch (ClassNotFoundException | LinkageError error) { + Loggers.warnWithException( + LOG, "Could not register Geo codecs; ESRI API might be missing from classpath", error); + } + } + + /** + * A unique identifier for the created session. + * + *

      It will be sent in the {@code STARTUP} protocol message for each new connection established + * by the driver, and may be used by future DSE versions for monitoring purposes. + * + *

      If you don't call this method, the driver will generate an identifier with {@link + * Uuids#random()}. + */ + @NonNull + public SelfT withClientId(@Nullable UUID clientId) { + this.dseProgrammaticArgumentsBuilder.withStartupClientId(clientId); + return self; + } + + /** + * The name of the application using the created session. + * + *

      It will be sent in the {@code STARTUP} protocol message for each new connection established + * by the driver, and may be used by future DSE versions for monitoring purposes. + * + *

      This can also be defined in the driver configuration with the option {@code + * basic.application.name}; if you specify both, this method takes precedence and the + * configuration option will be ignored. + */ + @NonNull + public SelfT withApplicationName(@Nullable String applicationName) { + this.dseProgrammaticArgumentsBuilder.withStartupApplicationName(applicationName); + return self; + } + + /** + * The version of the application using the created session. + * + *

      It will be sent in the {@code STARTUP} protocol message for each new connection established + * by the driver, and may be used by future DSE versions for monitoring purposes. + * + *

      This can also be defined in the driver configuration with the option {@code + * basic.application.version}; if you specify both, this method takes precedence and the + * configuration option will be ignored. + */ + @NonNull + public SelfT withApplicationVersion(@Nullable String applicationVersion) { + this.dseProgrammaticArgumentsBuilder.withStartupApplicationVersion(applicationVersion); + return self; + } + + /** + * Sets the configuration loader to use. + * + *

      Note that this loader must produce a configuration that includes the DSE-specific options: + * if you're using one of the built-in implementations provided by the driver, use the static + * factory methods from {@link DseDriverConfigLoader} (not the ones from {@link + * DriverConfigLoader}). + * + *

      If you don't call this method, the builder will use the default implementation, based on the + * Typesafe config library. More precisely, configuration properties are loaded and merged from + * the following (first-listed are higher priority): + * + *

        + *
      • system properties + *
      • {@code application.conf} (all resources on classpath with this name) + *
      • {@code application.json} (all resources on classpath with this name) + *
      • {@code application.properties} (all resources on classpath with this name) + *
      • {@code dse-reference.conf} (all resources on classpath with this name). In particular, + * this will load the {@code dse-reference.conf} included in the core DSE driver JAR, that + * defines default options for all DSE-specific mandatory options. + *
      • {@code reference.conf} (all resources on classpath with this name). In particular, this + * will load the {@code reference.conf} included in the core driver JAR, that defines + * default options for all mandatory options. + *
      + * + * The resulting configuration is expected to contain a {@code datastax-java-driver} section. + * + *

      This default loader will honor the reload interval defined by the option {@code + * basic.config-reload-interval}. + * + * @see Typesafe config's + * standard loading behavior + */ + @NonNull + @Override + public SelfT withConfigLoader(@Nullable DriverConfigLoader configLoader) { + // overridden only to customize the javadocs + return super.withConfigLoader(configLoader); + } + + /** + * Configures the session to use DSE plaintext authentication with the given username and + * password. + * + *

      This methods calls {@link #withAuthProvider(AuthProvider)} to register a special provider + * implementation. Therefore calling it overrides the configuration (that is, the {@code + * advanced.auth-provider.class} option will be ignored). + * + *

      Note that this approach holds the credentials in clear text in memory, which makes them + * vulnerable to an attacker who is able to perform memory dumps. If this is not acceptable for + * you, consider writing your own {@link AuthProvider} implementation (the internal class {@code + * PlainTextAuthProviderBase} is a good starting point), and providing it either with {@link + * #withAuthProvider(AuthProvider)} or via the configuration ({@code + * advanced.auth-provider.class}). + */ + @NonNull + @Override + public SelfT withAuthCredentials(@NonNull String username, @NonNull String password) { + return withAuthCredentials(username, password, ""); + } + + /** + * Configures the session to use DSE plaintext authentication with the given username and + * password, and perform proxy authentication with the given authorization id. + * + *

      This methods calls {@link #withAuthProvider(AuthProvider)} to register a special provider + * implementation. Therefore calling it overrides the configuration (that is, the {@code + * advanced.auth-provider.class} option will be ignored). + * + *

      Note that this approach holds the credentials in clear text in memory, which makes them + * vulnerable to an attacker who is able to perform memory dumps. If this is not acceptable for + * you, consider writing your own {@link AuthProvider} implementation (the internal class {@code + * PlainTextAuthProviderBase} is a good starting point), and providing it either with {@link + * #withAuthProvider(AuthProvider)} or via the configuration ({@code + * advanced.auth-provider.class}). + */ + @NonNull + public SelfT withAuthCredentials( + @NonNull String username, @NonNull String password, @NonNull String authorizationId) { + return withAuthProvider( + new DseProgrammaticPlainTextAuthProvider(username, password, authorizationId)); + } + + @Override + protected DriverContext buildContext( + DriverConfigLoader configLoader, ProgrammaticArguments programmaticArguments) { + + // Preserve backward compatibility with the deprecated method: + @SuppressWarnings("deprecation") + DriverContext legacyApiContext = + buildContext( + configLoader, + programmaticArguments.getTypeCodecs(), + programmaticArguments.getNodeStateListener(), + programmaticArguments.getSchemaChangeListener(), + programmaticArguments.getRequestTracker(), + programmaticArguments.getLocalDatacenters(), + programmaticArguments.getNodeFilters(), + programmaticArguments.getClassLoader()); + if (legacyApiContext != null) { + return legacyApiContext; + } + + return new DseDriverContext( + configLoader, programmaticArguments, dseProgrammaticArgumentsBuilder.build()); + } + + @Deprecated + @Override + protected DriverContext buildContext( + DriverConfigLoader configLoader, + List> typeCodecs, + NodeStateListener nodeStateListener, + SchemaChangeListener schemaChangeListener, + RequestTracker requestTracker, + Map localDatacenters, + Map> nodeFilters, + ClassLoader classLoader) { + return super.buildContext( + configLoader, + typeCodecs, + nodeStateListener, + schemaChangeListener, + requestTracker, + localDatacenters, + nodeFilters, + classLoader); + } + + @NonNull + @Override + protected DriverConfigLoader defaultConfigLoader() { + return new DefaultDseDriverConfigLoader(); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/auth/BaseDseAuthenticator.java b/core/src/main/java/com/datastax/dse/driver/api/core/auth/BaseDseAuthenticator.java new file mode 100644 index 00000000000..482579895b0 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/auth/BaseDseAuthenticator.java @@ -0,0 +1,81 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.auth; + +import com.datastax.oss.driver.api.core.auth.SyncAuthenticator; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.nio.ByteBuffer; +import net.jcip.annotations.ThreadSafe; + +/** + * Base class for {@link SyncAuthenticator} implementations that want to make use of the + * authentication scheme negotiation in DseAuthenticator. + */ +@ThreadSafe +public abstract class BaseDseAuthenticator implements SyncAuthenticator { + + private static final String DSE_AUTHENTICATOR = + "com.datastax.bdp.cassandra.auth.DseAuthenticator"; + + private final String serverAuthenticator; + + protected BaseDseAuthenticator(@NonNull String serverAuthenticator) { + this.serverAuthenticator = serverAuthenticator; + } + + /** + * Return a byte buffer containing the required SASL mechanism. + * + *

      This should be one of: + * + *

        + *
      • PLAIN + *
      • GSSAPI + *
      + * + * This must be either a {@linkplain ByteBuffer#asReadOnlyBuffer() read-only} buffer, or a new + * instance every time. + */ + @NonNull + protected abstract ByteBuffer getMechanism(); + + /** + * Return a byte buffer containing the expected successful server challenge. + * + *

      This should be one of: + * + *

        + *
      • PLAIN-START + *
      • GSSAPI-START + *
      + * + * This must be either a {@linkplain ByteBuffer#asReadOnlyBuffer() read-only} buffer, or a new + * instance every time. + */ + @NonNull + protected abstract ByteBuffer getInitialServerChallenge(); + + @Nullable + @Override + public ByteBuffer initialResponseSync() { + // DseAuthenticator communicates back the mechanism in response to server authenticate message. + // older authenticators simply expect the auth response with credentials. + if (isDseAuthenticator()) { + return getMechanism(); + } else { + return evaluateChallengeSync(getInitialServerChallenge()); + } + } + + @Override + public void onAuthenticationSuccessSync(@Nullable ByteBuffer token) {} + + private boolean isDseAuthenticator() { + return serverAuthenticator.equals(DSE_AUTHENTICATOR); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderBase.java b/core/src/main/java/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderBase.java new file mode 100644 index 00000000000..71c5f187727 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderBase.java @@ -0,0 +1,324 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.auth; + +import com.datastax.oss.driver.api.core.auth.AuthProvider; +import com.datastax.oss.driver.api.core.auth.AuthenticationException; +import com.datastax.oss.driver.api.core.auth.Authenticator; +import com.datastax.oss.driver.api.core.metadata.EndPoint; +import com.datastax.oss.driver.api.core.session.Session; +import com.datastax.oss.driver.shaded.guava.common.base.Charsets; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import com.datastax.oss.protocol.internal.util.Bytes; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.net.InetSocketAddress; +import java.nio.ByteBuffer; +import java.security.PrivilegedActionException; +import java.security.PrivilegedExceptionAction; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; +import javax.security.auth.Subject; +import javax.security.auth.login.Configuration; +import javax.security.auth.login.LoginContext; +import javax.security.auth.login.LoginException; +import javax.security.sasl.Sasl; +import javax.security.sasl.SaslClient; +import javax.security.sasl.SaslException; +import net.jcip.annotations.Immutable; +import net.jcip.annotations.NotThreadSafe; +import net.jcip.annotations.ThreadSafe; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@ThreadSafe +public abstract class DseGssApiAuthProviderBase implements AuthProvider { + + /** The default SASL service name used by this auth provider. */ + public static final String DEFAULT_SASL_SERVICE_NAME = "dse"; + + /** The name of the system property to use to specify the SASL service name. */ + public static final String SASL_SERVICE_NAME_PROPERTY = "dse.sasl.service"; + + private static final Logger LOG = LoggerFactory.getLogger(DseGssApiAuthProviderBase.class); + + private final String logPrefix; + + /** + * @param logPrefix a string that will get prepended to the logs (this is used for discrimination + * when you have multiple driver instances executing in the same JVM). Config-based + * implementations fill this with {@link Session#getName()}. + */ + protected DseGssApiAuthProviderBase(@NonNull String logPrefix) { + this.logPrefix = Objects.requireNonNull(logPrefix); + } + + @NonNull + protected abstract GssApiOptions getOptions( + @NonNull EndPoint endPoint, @NonNull String serverAuthenticator); + + @NonNull + @Override + public Authenticator newAuthenticator( + @NonNull EndPoint endPoint, @NonNull String serverAuthenticator) + throws AuthenticationException { + return new GssApiAuthenticator( + getOptions(endPoint, serverAuthenticator), endPoint, serverAuthenticator); + } + + @Override + public void onMissingChallenge(@NonNull EndPoint endPoint) { + LOG.warn( + "[{}] {} did not send an authentication challenge; " + + "This is suspicious because the driver expects authentication", + logPrefix, + endPoint); + } + + @Override + public void close() { + // nothing to do + } + + /** + * The options to initialize a new authenticator. + * + *

      Use {@link #builder()} to create an instance. + */ + @Immutable + public static class GssApiOptions { + + @NonNull + public static Builder builder() { + return new Builder(); + } + + private final Configuration loginConfiguration; + private final Subject subject; + private final String saslProtocol; + private final String authorizationId; + private final Map saslProperties; + + private GssApiOptions( + @Nullable Configuration loginConfiguration, + @Nullable Subject subject, + @Nullable String saslProtocol, + @Nullable String authorizationId, + @NonNull Map saslProperties) { + this.loginConfiguration = loginConfiguration; + this.subject = subject; + this.saslProtocol = saslProtocol; + this.authorizationId = authorizationId; + this.saslProperties = saslProperties; + } + + @Nullable + public Configuration getLoginConfiguration() { + return loginConfiguration; + } + + @Nullable + public Subject getSubject() { + return subject; + } + + @Nullable + public String getSaslProtocol() { + return saslProtocol; + } + + @Nullable + public String getAuthorizationId() { + return authorizationId; + } + + @NonNull + public Map getSaslProperties() { + return saslProperties; + } + + @NotThreadSafe + public static class Builder { + + private Configuration loginConfiguration; + private Subject subject; + private String saslProtocol; + private String authorizationId; + private final Map saslProperties = new HashMap<>(); + + public Builder() { + saslProperties.put(Sasl.SERVER_AUTH, "true"); + saslProperties.put(Sasl.QOP, "auth"); + } + + /** + * Sets a login configuration that will be used to create a {@link LoginContext}. + * + *

      You MUST call either this method or {@link #withSubject(Subject)}; if both are called, + * the subject takes precedence, and the login configuration will be ignored. + */ + @NonNull + public Builder withLoginConfiguration(@Nullable Configuration loginConfiguration) { + this.loginConfiguration = loginConfiguration; + return this; + } + + /** + * Sets a previously authenticated subject to reuse. + * + *

      You MUST call either this method or {@link #withLoginConfiguration(Configuration)}; if + * both are called, the subject takes precedence, and the login configuration will be ignored. + */ + @NonNull + public Builder withSubject(@Nullable Subject subject) { + this.subject = subject; + return this; + } + + /** + * Sets the SASL protocol name to use; should match the username of the Kerberos service + * principal used by the DSE server. + */ + @NonNull + public Builder withSaslProtocol(@Nullable String saslProtocol) { + this.saslProtocol = saslProtocol; + return this; + } + + /** Sets the authorization ID (allows proxy authentication). */ + @NonNull + public Builder withAuthorizationId(@Nullable String authorizationId) { + this.authorizationId = authorizationId; + return this; + } + + /** + * Add a SASL property to use when creating the SASL client. + * + *

      Note that this builder pre-initializes these two default properties: + * + *

      +       * javax.security.sasl.server.authentication = true
      +       * javax.security.sasl.qop = auth
      +       * 
      + */ + @NonNull + public Builder addSaslProperty(@NonNull String name, @NonNull String value) { + this.saslProperties.put(Objects.requireNonNull(name), Objects.requireNonNull(value)); + return this; + } + + @NonNull + public GssApiOptions build() { + return new GssApiOptions( + loginConfiguration, + subject, + saslProtocol, + authorizationId, + ImmutableMap.copyOf(saslProperties)); + } + } + } + + protected static class GssApiAuthenticator extends BaseDseAuthenticator { + + private static final ByteBuffer MECHANISM = + ByteBuffer.wrap("GSSAPI".getBytes(Charsets.UTF_8)).asReadOnlyBuffer(); + private static final ByteBuffer SERVER_INITIAL_CHALLENGE = + ByteBuffer.wrap("GSSAPI-START".getBytes(Charsets.UTF_8)).asReadOnlyBuffer(); + private static final ByteBuffer EMPTY_BYTE_ARRAY = + ByteBuffer.wrap(new byte[0]).asReadOnlyBuffer(); + private static final String JAAS_CONFIG_ENTRY = "DseClient"; + private static final String[] SUPPORTED_MECHANISMS = new String[] {"GSSAPI"}; + + private Subject subject; + private SaslClient saslClient; + private EndPoint endPoint; + + protected GssApiAuthenticator( + GssApiOptions options, EndPoint endPoint, String serverAuthenticator) { + super(serverAuthenticator); + + try { + if (options.getSubject() != null) { + this.subject = options.getSubject(); + } else { + Configuration loginConfiguration = options.getLoginConfiguration(); + if (loginConfiguration == null) { + throw new IllegalArgumentException("Must provide one of subject or loginConfiguration"); + } + LoginContext login = new LoginContext(JAAS_CONFIG_ENTRY, null, null, loginConfiguration); + login.login(); + this.subject = login.getSubject(); + } + String protocol = options.getSaslProtocol(); + if (protocol == null) { + protocol = System.getProperty(SASL_SERVICE_NAME_PROPERTY, DEFAULT_SASL_SERVICE_NAME); + } + this.saslClient = + Sasl.createSaslClient( + SUPPORTED_MECHANISMS, + options.getAuthorizationId(), + protocol, + ((InetSocketAddress) endPoint.resolve()).getAddress().getCanonicalHostName(), + options.getSaslProperties(), + null); + } catch (LoginException | SaslException e) { + throw new AuthenticationException(endPoint, e.getMessage()); + } + this.endPoint = endPoint; + } + + @NonNull + @Override + protected ByteBuffer getMechanism() { + return MECHANISM; + } + + @NonNull + @Override + protected ByteBuffer getInitialServerChallenge() { + return SERVER_INITIAL_CHALLENGE; + } + + @Nullable + @Override + public ByteBuffer evaluateChallengeSync(@Nullable ByteBuffer challenge) { + + byte[] challengeBytes; + if (SERVER_INITIAL_CHALLENGE.equals(challenge)) { + if (!saslClient.hasInitialResponse()) { + return EMPTY_BYTE_ARRAY; + } + challengeBytes = new byte[0]; + } else { + // The native protocol spec says the incoming challenge can be null depending on the + // implementation. But saslClient.evaluateChallenge clearly documents that the byte array + // can't be null, which probably means that a SASL authenticator never sends back null. + if (challenge == null) { + throw new AuthenticationException(this.endPoint, "Unexpected null challenge from server"); + } + challengeBytes = Bytes.getArray(challenge); + } + try { + + return ByteBuffer.wrap( + Subject.doAs( + subject, + new PrivilegedExceptionAction() { + @Override + public byte[] run() throws SaslException { + return saslClient.evaluateChallenge(challengeBytes); + } + })); + } catch (PrivilegedActionException e) { + throw new AuthenticationException(this.endPoint, e.getMessage(), e.getException()); + } + } + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/auth/DsePlainTextAuthProviderBase.java b/core/src/main/java/com/datastax/dse/driver/api/core/auth/DsePlainTextAuthProviderBase.java new file mode 100644 index 00000000000..4c9ff5343f5 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/auth/DsePlainTextAuthProviderBase.java @@ -0,0 +1,195 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.auth; + +import com.datastax.oss.driver.api.core.auth.AuthProvider; +import com.datastax.oss.driver.api.core.auth.AuthenticationException; +import com.datastax.oss.driver.api.core.auth.Authenticator; +import com.datastax.oss.driver.api.core.metadata.EndPoint; +import com.datastax.oss.driver.api.core.session.Session; +import com.datastax.oss.driver.shaded.guava.common.base.Charsets; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.nio.ByteBuffer; +import java.nio.CharBuffer; +import java.nio.charset.StandardCharsets; +import java.util.Arrays; +import java.util.Objects; +import net.jcip.annotations.ThreadSafe; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Common infrastructure for DSE plain text auth providers. + * + *

      This can be reused to write an implementation that retrieves the credentials from another + * source than the configuration. + */ +@ThreadSafe +public abstract class DsePlainTextAuthProviderBase implements AuthProvider { + + private static final Logger LOG = LoggerFactory.getLogger(DsePlainTextAuthProviderBase.class); + + private final String logPrefix; + + /** + * @param logPrefix a string that will get prepended to the logs (this is used for discrimination + * when you have multiple driver instances executing in the same JVM). Config-based + * implementations fill this with {@link Session#getName()}. + */ + protected DsePlainTextAuthProviderBase(@NonNull String logPrefix) { + this.logPrefix = Objects.requireNonNull(logPrefix); + } + + /** + * Retrieves the credentials from the underlying source. + * + *

      This is invoked every time the driver opens a new connection. + */ + @NonNull + protected abstract Credentials getCredentials( + @NonNull EndPoint endPoint, @NonNull String serverAuthenticator); + + @NonNull + @Override + public Authenticator newAuthenticator( + @NonNull EndPoint endPoint, @NonNull String serverAuthenticator) + throws AuthenticationException { + return new PlainTextAuthenticator( + getCredentials(endPoint, serverAuthenticator), endPoint, serverAuthenticator); + } + + @Override + public void onMissingChallenge(@NonNull EndPoint endPoint) { + LOG.warn( + "[{}] {} did not send an authentication challenge; " + + "This is suspicious because the driver expects authentication", + logPrefix, + endPoint); + } + + @Override + public void close() { + // nothing to do + } + + public static class Credentials { + + private final char[] authenticationId; + private final char[] password; + private final char[] authorizationId; + + public Credentials( + @NonNull char[] authenticationId, + @NonNull char[] password, + @NonNull char[] authorizationId) { + this.authenticationId = Objects.requireNonNull(authenticationId); + this.password = Objects.requireNonNull(password); + this.authorizationId = Objects.requireNonNull(authorizationId); + } + + @NonNull + public char[] getAuthenticationId() { + return authenticationId; + } + + @NonNull + public char[] getPassword() { + return password; + } + + @NonNull + public char[] getAuthorizationId() { + return authorizationId; + } + + /** Clears the credentials from memory when they're no longer needed. */ + protected void clear() { + // Note: this is a bit irrelevant with the built-in provider, because the config already + // caches the credentials in memory. But it might be useful for a custom implementation that + // retrieves the credentials from a different source. + Arrays.fill(getAuthenticationId(), (char) 0); + Arrays.fill(getPassword(), (char) 0); + Arrays.fill(getAuthorizationId(), (char) 0); + } + } + + protected static class PlainTextAuthenticator extends BaseDseAuthenticator { + + private static final ByteBuffer MECHANISM = + ByteBuffer.wrap("PLAIN".getBytes(StandardCharsets.UTF_8)).asReadOnlyBuffer(); + + private static final ByteBuffer SERVER_INITIAL_CHALLENGE = + ByteBuffer.wrap("PLAIN-START".getBytes(StandardCharsets.UTF_8)).asReadOnlyBuffer(); + + private final ByteBuffer encodedCredentials; + private final EndPoint endPoint; + + protected PlainTextAuthenticator( + Credentials credentials, EndPoint endPoint, String serverAuthenticator) { + super(serverAuthenticator); + + Objects.requireNonNull(credentials); + + ByteBuffer authenticationId = toUtf8Bytes(credentials.getAuthenticationId()); + ByteBuffer password = toUtf8Bytes(credentials.getPassword()); + ByteBuffer authorizationId = toUtf8Bytes(credentials.getAuthorizationId()); + + this.encodedCredentials = + ByteBuffer.allocate( + authorizationId.remaining() + + authenticationId.remaining() + + password.remaining() + + 2); + encodedCredentials.put(authorizationId); + encodedCredentials.put((byte) 0); + encodedCredentials.put(authenticationId); + encodedCredentials.put((byte) 0); + encodedCredentials.put(password); + encodedCredentials.flip(); + + clear(authorizationId); + clear(authenticationId); + clear(password); + + this.endPoint = endPoint; + } + + private static ByteBuffer toUtf8Bytes(char[] charArray) { + CharBuffer charBuffer = CharBuffer.wrap(charArray); + return Charsets.UTF_8.encode(charBuffer); + } + + private static void clear(ByteBuffer buffer) { + buffer.rewind(); + while (buffer.remaining() > 0) { + buffer.put((byte) 0); + } + } + + @NonNull + @Override + public ByteBuffer getMechanism() { + return MECHANISM; + } + + @NonNull + @Override + public ByteBuffer getInitialServerChallenge() { + return SERVER_INITIAL_CHALLENGE; + } + + @Nullable + @Override + public ByteBuffer evaluateChallengeSync(@Nullable ByteBuffer challenge) { + if (SERVER_INITIAL_CHALLENGE.equals(challenge)) { + return encodedCredentials; + } + throw new AuthenticationException(endPoint, "Incorrect challenge from server"); + } + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/auth/ProxyAuthentication.java b/core/src/main/java/com/datastax/dse/driver/api/core/auth/ProxyAuthentication.java new file mode 100644 index 00000000000..f1d41016e35 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/auth/ProxyAuthentication.java @@ -0,0 +1,71 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.auth; + +import com.datastax.dse.driver.api.core.graph.GraphStatement; +import com.datastax.oss.driver.api.core.cql.Statement; +import com.datastax.oss.driver.shaded.guava.common.base.Charsets; +import com.datastax.oss.protocol.internal.util.collection.NullAllowingImmutableMap; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.nio.ByteBuffer; +import java.util.Map; + +public class ProxyAuthentication { + private static final String PROXY_EXECUTE = "ProxyExecute"; + + /** + * Adds proxy authentication information to a CQL statement. + * + *

      This allows executing a statement as another role than the one the session is currently + * authenticated as. + * + * @param userOrRole the role to use for execution. If the statement was already configured with + * another role, it will get replaced by this one. + * @param statement the statement to modify. + * @return a statement that will run the same CQL query as {@code statement}, but acting as the + * provided role. Note: with the driver's default implementations, this will always be a copy; + * but if you use a custom implementation, it might return the same instance (depending on the + * behavior of {@link Statement#setCustomPayload(Map) statement.setCustomPayload()}). + * @see Setting + * up roles for applications (DSE 6.0 admin guide) + */ + @NonNull + public static > StatementT executeAs( + @NonNull String userOrRole, @NonNull StatementT statement) { + return statement.setCustomPayload( + addProxyExecuteEntry(statement.getCustomPayload(), userOrRole)); + } + + /** + * Adds proxy authentication information to a graph statement. + * + * @see #executeAs(String, Statement) + */ + @NonNull + public static > StatementT executeAs( + @NonNull String userOrRole, @NonNull StatementT statement) { + return statement.setCustomPayload( + addProxyExecuteEntry(statement.getCustomPayload(), userOrRole)); + } + + private static Map addProxyExecuteEntry( + Map currentPayload, @NonNull String userOrRole) { + NullAllowingImmutableMap.Builder builder = + NullAllowingImmutableMap.builder(); + builder.put(PROXY_EXECUTE, ByteBuffer.wrap(userOrRole.getBytes(Charsets.UTF_8))); + if (!currentPayload.isEmpty()) { + for (Map.Entry entry : currentPayload.entrySet()) { + String key = entry.getKey(); + if (!key.equals(PROXY_EXECUTE)) { + builder.put(key, entry.getValue()); + } + } + } + return builder.build(); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/config/DseDriverConfigLoader.java b/core/src/main/java/com/datastax/dse/driver/api/core/config/DseDriverConfigLoader.java new file mode 100644 index 00000000000..4b5e791b584 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/config/DseDriverConfigLoader.java @@ -0,0 +1,200 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.config; + +import com.datastax.oss.driver.api.core.config.DriverConfigLoader; +import com.datastax.oss.driver.api.core.config.ProgrammaticDriverConfigLoaderBuilder; +import com.datastax.oss.driver.internal.core.config.typesafe.DefaultDriverConfigLoader; +import com.datastax.oss.driver.internal.core.config.typesafe.DefaultProgrammaticDriverConfigLoaderBuilder; +import com.typesafe.config.Config; +import com.typesafe.config.ConfigFactory; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.io.File; +import java.net.URL; + +/** + * Exposes factory methods to create config loaders from the DSE driver. + * + *

      Note that this class only exists to expose those methods in a way that is symmetric to its OSS + * counterpart {@link DriverConfigLoader}. It does not extend it, DSE-specific loaders are regular + * instances of the OSS type. + */ +public class DseDriverConfigLoader { + + /** + * Builds an instance using the driver's default implementation (based on Typesafe config), except + * that application-specific options are loaded from a classpath resource with a custom name. + * + *

      More precisely, configuration properties are loaded and merged from the following + * (first-listed are higher priority): + * + *

        + *
      • system properties + *
      • {@code .conf} (all resources on classpath with this name) + *
      • {@code .json} (all resources on classpath with this name) + *
      • {@code .properties} (all resources on classpath with this name) + *
      • {@code dse-reference.conf} (all resources on classpath with this name). In particular, + * this will load the {@code dse-reference.conf} included in the core DSE driver JAR, that + * defines default options for all DSE-specific mandatory options. + *
      • {@code reference.conf} (all resources on classpath with this name). In particular, this + * will load the {@code reference.conf} included in the core OSS driver JAR, that defines + * default options for all mandatory options common to OSS and DSE. + *
      + * + * The resulting configuration is expected to contain a {@code datastax-java-driver} section. + * + *

      The returned loader will honor the reload interval defined by the option {@code + * basic.config-reload-interval}. + */ + @NonNull + public static DriverConfigLoader fromClasspath(@NonNull String resourceBaseName) { + return new DefaultDriverConfigLoader( + () -> { + ConfigFactory.invalidateCaches(); + Config config = + ConfigFactory.defaultOverrides() + .withFallback(ConfigFactory.parseResourcesAnySyntax(resourceBaseName)) + .withFallback(ConfigFactory.parseResourcesAnySyntax("dse-reference")) + .withFallback(ConfigFactory.defaultReference()) + .resolve(); + return config.getConfig("datastax-java-driver"); + }); + } + + /** + * Builds an instance using the driver's default implementation (based on Typesafe config), except + * that application-specific options are loaded from the given file. + * + *

      More precisely, configuration properties are loaded and merged from the following + * (first-listed are higher priority): + * + *

        + *
      • system properties + *
      • the contents of {@code file} + *
      • {@code dse-reference.conf} (all resources on classpath with this name). In particular, + * this will load the {@code dse-reference.conf} included in the core DSE driver JAR, that + * defines default options for all DSE-specific mandatory options. + *
      • {@code reference.conf} (all resources on classpath with this name). In particular, this + * will load the {@code reference.conf} included in the core OSS driver JAR, that defines + * default options for all mandatory options common to OSS and DSE. + *
      + * + * The resulting configuration is expected to contain a {@code datastax-java-driver} section. + * + *

      The returned loader will honor the reload interval defined by the option {@code + * basic.config-reload-interval}. + */ + @NonNull + public static DriverConfigLoader fromFile(@NonNull File file) { + return new DefaultDriverConfigLoader( + () -> { + ConfigFactory.invalidateCaches(); + Config config = + ConfigFactory.defaultOverrides() + .withFallback(ConfigFactory.parseFileAnySyntax(file)) + .withFallback(ConfigFactory.parseResourcesAnySyntax("dse-reference")) + .withFallback(ConfigFactory.defaultReference()) + .resolve(); + return config.getConfig("datastax-java-driver"); + }); + } + + /** + * Builds an instance using the driver's default implementation (based on Typesafe config), except + * that application-specific options are loaded from the given URL. + * + *

      More precisely, configuration properties are loaded and merged from the following + * (first-listed are higher priority): + * + *

        + *
      • system properties + *
      • the contents of {@code url} + *
      • {@code dse-reference.conf} (all resources on classpath with this name). In particular, + * this will load the {@code dse-reference.conf} included in the core DSE driver JAR, that + * defines default options for all DSE-specific mandatory options. + *
      • {@code reference.conf} (all resources on classpath with this name). In particular, this + * will load the {@code reference.conf} included in the core OSS driver JAR, that defines + * default options for all mandatory options common to OSS and DSE. + *
      + * + * The resulting configuration is expected to contain a {@code datastax-java-driver} section. + * + *

      The returned loader will honor the reload interval defined by the option {@code + * basic.config-reload-interval}. + */ + @NonNull + public static DriverConfigLoader fromUrl(@NonNull URL url) { + return new DefaultDriverConfigLoader( + () -> { + ConfigFactory.invalidateCaches(); + Config config = + ConfigFactory.defaultOverrides() + .withFallback(ConfigFactory.parseURL(url)) + .withFallback(ConfigFactory.parseResourcesAnySyntax("dse-reference")) + .withFallback(ConfigFactory.defaultReference()) + .resolve(); + return config.getConfig("datastax-java-driver"); + }); + } + + /** + * Starts a builder that allows configuration options to be overridden programmatically. + * + *

      Sample usage: + * + *

      {@code
      +   * DriverConfigLoader loader =
      +   *     DriverConfigLoader.programmaticBuilder()
      +   *         .withDuration(DefaultDriverOption.REQUEST_TIMEOUT, Duration.ofSeconds(5))
      +   *         .startProfile("slow")
      +   *         .withDuration(DefaultDriverOption.REQUEST_TIMEOUT, Duration.ofSeconds(30))
      +   *         .endProfile()
      +   *         .build();
      +   * }
      + * + * The resulting loader still uses the driver's default implementation (based on Typesafe config), + * except that the programmatic configuration takes precedence. More precisely, configuration + * properties are loaded and merged from the following (first-listed are higher priority): + * + *
        + *
      • system properties + *
      • properties that were provided programmatically + *
      • {@code application.conf} (all resources on classpath with this name) + *
      • {@code application.json} (all resources on classpath with this name) + *
      • {@code application.properties} (all resources on classpath with this name) + *
      • {@code dse-reference.conf} (all resources on classpath with this name). In particular, + * this will load the {@code dse-reference.conf} included in the core DSE driver JAR, that + * defines default options for all DSE-specific mandatory options. + *
      • {@code reference.conf} (all resources on classpath with this name). In particular, this + * will load the {@code reference.conf} included in the core OSS driver JAR, that defines + * default options for all mandatory options common to OSS and DSE. + *
      + * + * Note that {@code application.*} is entirely optional, you may choose to only rely on the + * driver's built-in {@code reference.conf} and programmatic overrides. + * + *

      The resulting configuration is expected to contain a {@code datastax-java-driver} section. + * + *

      The loader will honor the reload interval defined by the option {@code + * basic.config-reload-interval}. + * + *

      Note that the returned builder is not thread-safe. + */ + @NonNull + public static ProgrammaticDriverConfigLoaderBuilder programmaticBuilder() { + return new DefaultProgrammaticDriverConfigLoaderBuilder( + () -> + ConfigFactory.defaultApplication() + .withFallback(ConfigFactory.parseResourcesAnySyntax("dse-reference")) + .withFallback(ConfigFactory.defaultReference()), + DefaultDriverConfigLoader.DEFAULT_ROOT_PATH); + } + + private DseDriverConfigLoader() { + throw new AssertionError("Not meant to be instantiated"); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/config/DseDriverOption.java b/core/src/main/java/com/datastax/dse/driver/api/core/config/DseDriverOption.java new file mode 100644 index 00000000000..a404ec3b2f2 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/config/DseDriverOption.java @@ -0,0 +1,180 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.config; + +import com.datastax.oss.driver.api.core.config.DriverOption; +import edu.umd.cs.findbugs.annotations.NonNull; + +public enum DseDriverOption implements DriverOption { + /** + * The name of the application using the session. + * + *

      Value type: {@link String} + */ + APPLICATION_NAME("basic.application.name"), + /** + * The version of the application using the session. + * + *

      Value type: {@link String} + */ + APPLICATION_VERSION("basic.application.version"), + + /** + * Proxy authentication for GSSAPI authentication: allows to login as another user or role. + * + *

      Value type: {@link String} + */ + AUTH_PROVIDER_AUTHORIZATION_ID("advanced.auth-provider.authorization-id"), + /** + * Service name for GSSAPI authentication. + * + *

      Value type: {@link String} + */ + AUTH_PROVIDER_SERVICE("advanced.auth-provider.service"), + /** + * Login configuration for GSSAPI authentication. + * + *

      Value type: {@link java.util.Map Map}<{@link String},{@link String}> + */ + AUTH_PROVIDER_LOGIN_CONFIGURATION("advanced.auth-provider.login-configuration"), + /** + * Internal SASL properties, if any, such as QOP, for GSSAPI authentication. + * + *

      Value type: {@link java.util.Map Map}<{@link String},{@link String}> + */ + AUTH_PROVIDER_SASL_PROPERTIES("advanced.auth-provider.sasl-properties"), + + /** + * The page size for continuous paging. + * + *

      Value type: int + */ + CONTINUOUS_PAGING_PAGE_SIZE("advanced.continuous-paging.page-size"), + /** + * Whether {@link #CONTINUOUS_PAGING_PAGE_SIZE} should be interpreted in number of rows or bytes. + * + *

      Value type: boolean + */ + CONTINUOUS_PAGING_PAGE_SIZE_BYTES("advanced.continuous-paging.page-size-in-bytes"), + /** + * The maximum number of continuous pages to return. + * + *

      Value type: int + */ + CONTINUOUS_PAGING_MAX_PAGES("advanced.continuous-paging.max-pages"), + /** + * The maximum number of continuous pages per second. + * + *

      Value type: int + */ + CONTINUOUS_PAGING_MAX_PAGES_PER_SECOND("advanced.continuous-paging.max-pages-per-second"), + /** + * The maximum number of continuous pages that can be stored in the local queue. + * + *

      Value type: int + */ + CONTINUOUS_PAGING_MAX_ENQUEUED_PAGES("advanced.continuous-paging.max-enqueued-pages"), + /** + * How long to wait for the coordinator to send the first continuous page. + * + *

      Value-type: {@link java.time.Duration Duration} + */ + CONTINUOUS_PAGING_TIMEOUT_FIRST_PAGE("advanced.continuous-paging.timeout.first-page"), + /** + * How long to wait for the coordinator to send subsequent continuous pages. + * + *

      Value-type: {@link java.time.Duration Duration} + */ + CONTINUOUS_PAGING_TIMEOUT_OTHER_PAGES("advanced.continuous-paging.timeout.other-pages"), + + /** + * The largest latency that we expect to record for continuous requests. + * + *

      Value-type: {@link java.time.Duration Duration} + */ + CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_HIGHEST( + "advanced.metrics.session.continuous-cql-requests.highest-latency"), + /** + * The number of significant decimal digits to which internal structures will maintain for + * continuous requests. + * + *

      Value-type: int + */ + CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_DIGITS( + "advanced.metrics.session.continuous-cql-requests.significant-digits"), + /** + * The interval at which percentile data is refreshed for continuous requests. + * + *

      Value-type: {@link java.time.Duration Duration} + */ + CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_INTERVAL( + "advanced.metrics.session.continuous-cql-requests.refresh-interval"), + + /** + * The read consistency level to use for graph statements. + * + *

      Value type: {@link String} + */ + GRAPH_READ_CONSISTENCY_LEVEL("basic.graph.read-consistency-level"), + /** + * The write consistency level to use for graph statements. + * + *

      Value type: {@link String} + */ + GRAPH_WRITE_CONSISTENCY_LEVEL("basic.graph.write-consistency-level"), + /** + * The traversal source to use for graph statements. + * + *

      Value type: {@link String} + */ + GRAPH_TRAVERSAL_SOURCE("basic.graph.traversal-source"), + /** + * The sub-protocol the driver will use to communicate with DSE Graph, on top of the Cassandra + * native protocol. + * + *

      Value type: {@link String} + */ + GRAPH_SUB_PROTOCOL("advanced.graph.sub-protocol"), + /** + * Whether a script statement represents a system query. + * + *

      Value type: boolean + */ + GRAPH_IS_SYSTEM_QUERY("basic.graph.is-system-query"), + /** + * The name of the graph targeted by graph statements. + * + *

      Value type: {@link String} + */ + GRAPH_NAME("basic.graph.name"), + /** + * How long the driver waits for a graph request to complete. + * + *

      Value-type: {@link java.time.Duration Duration} + */ + GRAPH_TIMEOUT("basic.graph.timeout"), + + /** + * Whether to send events for Insights monitoring. + * + *

      Value type: boolean + */ + MONITOR_REPORTING_ENABLED("advanced.monitor-reporting.enabled"), + ; + + private final String path; + + DseDriverOption(String path) { + this.path = path; + } + + @NonNull + @Override + public String getPath() { + return path; + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousAsyncResultSet.java b/core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousAsyncResultSet.java new file mode 100644 index 00000000000..0de14867950 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousAsyncResultSet.java @@ -0,0 +1,68 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.cql.continuous; + +import com.datastax.oss.driver.api.core.AsyncPagingIterable; +import com.datastax.oss.driver.api.core.cql.ExecutionInfo; +import com.datastax.oss.driver.api.core.cql.Row; +import com.datastax.oss.driver.api.core.cql.Statement; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.nio.ByteBuffer; +import java.util.concurrent.CancellationException; + +/** + * The result of an {@linkplain ContinuousSession#executeContinuouslyAsync(Statement) asynchronous + * continuous paging query}. + * + *

      DSE replies to a continuous query with a stream of response frames. There is one instance of + * this class for each frame. + */ +public interface ContinuousAsyncResultSet + extends AsyncPagingIterable { + + /** Returns the current page's number. Pages are numbered starting from 1. */ + int pageNumber(); + + /** + * Cancels the continuous query. + * + *

      There might still be rows available in the {@linkplain #currentPage() current page} after + * the cancellation; these rows can be retrieved normally. + * + *

      Also, there might be more pages available in the driver's local page cache after the + * cancellation; these extra pages will be discarded. + * + *

      Therefore, if you plan to resume the iteration later, the correct procedure is as follows: + * + *

        + *
      1. Cancel the operation by invoking this method, or by cancelling the {@linkplain + * #fetchNextPage() next page's future}; + *
      2. Keep iterating on the current page until it doesn't return any more rows; + *
      3. Retrieve the paging state with {@link #getExecutionInfo() + * getExecutionInfo().getPagingState()}; + *
      4. {@linkplain Statement#setPagingState(ByteBuffer) Re-inject the paging state} in the + * statement; + *
      5. Resume the operation by invoking {@link + * ContinuousSession#executeContinuouslyAsync(Statement) executeContinuouslyAsync} again. + *
      + * + * After a cancellation, futures returned by {@link #fetchNextPage()} that are not yet complete + * will always complete exceptionally by throwing a {@link CancellationException}, even if + * they were obtained before the cancellation. + */ + void cancel(); + + /** + * {@inheritDoc} + * + *

      Note: because the driver does not support query traces for continuous queries, {@link + * ExecutionInfo#getTracingId()} will always be {@code null}. + */ + @NonNull + @Override + ExecutionInfo getExecutionInfo(); +} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousResultSet.java b/core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousResultSet.java new file mode 100644 index 00000000000..f6b8c768a4b --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousResultSet.java @@ -0,0 +1,69 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.cql.continuous; + +import com.datastax.oss.driver.api.core.cql.ExecutionInfo; +import com.datastax.oss.driver.api.core.cql.ResultSet; +import com.datastax.oss.driver.api.core.cql.Statement; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.nio.ByteBuffer; +import java.util.List; + +/** + * The result of a {@linkplain ContinuousSession#executeContinuously(Statement) synchronous + * continuous paging query}. + * + *

      It uses {@linkplain ContinuousAsyncResultSet asynchronous calls} internally, but blocks on the + * results in order to provide a synchronous API to its clients. If the query is paged, only the + * first page will be fetched initially, and iteration will trigger background fetches of the next + * pages when necessary. + * + *

      Note that this object can only be iterated once: rows are "consumed" as they are read, + * subsequent calls to {@code iterator()} will return the same iterator instance. + * + *

      Implementations of this type are not thread-safe. They can only be iterated by the + * thread that invoked {@code session.executeContinuously}. + */ +public interface ContinuousResultSet extends ResultSet { + + /** + * Cancels the continuous query. + * + *

      There might still be rows available in the current page after the cancellation; the + * iteration will only stop when such rows are fully iterated upon. + * + *

      Also, there might be more pages available in the driver's local page cache after the + * cancellation; these extra pages will be discarded. + * + *

      Therefore, if you plan to resume the iteration later, the correct procedure is as follows: + * + *

        + *
      1. Cancel the operation by invoking this method; + *
      2. Keep iterating on this object until it doesn't return any more rows; + *
      3. Retrieve the paging state with {@link #getExecutionInfo() + * getExecutionInfo().getPagingState()}; + *
      4. {@linkplain Statement#setPagingState(ByteBuffer) Re-inject the paging state} in the + * statement; + *
      5. Resume the operation by invoking {@link ContinuousSession#executeContinuously(Statement) + * executeContinuously} again. + *
      + */ + void cancel(); + + /** + * {@inheritDoc} + * + *

      Note: because the driver does not support query traces for continuous queries, {@link + * ExecutionInfo#getTracingId()} will always be {@code null}. + */ + @NonNull + @Override + default ExecutionInfo getExecutionInfo() { + List infos = getExecutionInfos(); + return infos.get(infos.size() - 1); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousSession.java b/core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousSession.java new file mode 100644 index 00000000000..32863915819 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousSession.java @@ -0,0 +1,99 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.cql.continuous; + +import com.datastax.dse.driver.internal.core.cql.continuous.ContinuousCqlRequestAsyncProcessor; +import com.datastax.dse.driver.internal.core.cql.continuous.ContinuousCqlRequestSyncProcessor; +import com.datastax.oss.driver.api.core.DefaultConsistencyLevel; +import com.datastax.oss.driver.api.core.cql.Statement; +import com.datastax.oss.driver.api.core.loadbalancing.LoadBalancingPolicy; +import com.datastax.oss.driver.api.core.metadata.token.Token; +import com.datastax.oss.driver.api.core.session.Session; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.nio.ByteBuffer; +import java.util.Objects; +import java.util.concurrent.CompletionStage; + +/** + * A session that has the ability to execute continuous paging queries. + * + *

      Continuous paging is a new method of streaming bulk amounts of records from Datastax + * Enterprise (DSE) to the Datastax Java Driver, available since DSE 5.1. It is mainly intended to + * be leveraged by DSE + * Analytics and Apache Spark™, or by any similar analytics tool that needs to read large + * portions of a table in one single operation, as quick and reliably as possible. + * + *

      Continuous paging requires the following three conditions to be met on the client side: + * + *

        + *
      1. The statement must target a single partition or a token range owned by one single replica; + * in practice, this means that the statement must have either a {@linkplain + * Statement#setRoutingKey(ByteBuffer) routing key} or a {@linkplain + * Statement#setRoutingToken(Token) routing token} set; + *
      2. The coordinator must be a replica; in practice, this is usually achieved by using + * token-aware routing (if you are using the driver's default {@link LoadBalancingPolicy}, + * then this condition is met); + *
      3. The consistency level must be {@link DefaultConsistencyLevel#ONE ONE} (or {@link + * DefaultConsistencyLevel#LOCAL_ONE LOCAL_ONE}). + *
      + * + *

      It's the caller's responsibility to make sure that the above conditions are met. If this is + * not the case, continuous paging will silently degrade into a normal read operation, and the + * coordinator will retrieve pages one by one from replicas. + * + *

      Note that when the continuous paging optimization kicks in (range read at {@code ONE} + * performed directly on a replica), the snitch is bypassed and the coordinator will always chose + * itself as a replica. Therefore, other functionality such as probabilistic read repair and + * speculative retry is also not available when contacting a replica at {@code ONE}. + * + *

      Continuous paging is disabled by default and needs to be activated server-side. See Enabling + * continuous paging in the DSE docs to learn how to enable it. + * + * @see DSE + * Continuous Paging Tuning and Support Guide + */ +public interface ContinuousSession extends Session { + + /** + * Executes the provided query with continuous paging synchronously. + * + *

      This method takes care of chaining the successive results into a convenient iterable, + * provided that you always access the result from the same thread. For more flexibility, consider + * using the {@linkplain #executeContinuouslyAsync(Statement) asynchronous variant} of this method + * instead. + * + * @param statement the query to execute. + * @return a synchronous iterable on the results. + */ + @NonNull + default ContinuousResultSet executeContinuously(@NonNull Statement statement) { + return Objects.requireNonNull( + execute(statement, ContinuousCqlRequestSyncProcessor.CONTINUOUS_RESULT_SYNC)); + } + + /** + * Executes the provided query with continuous paging asynchronously. + * + *

      The server will push all requested pages asynchronously, according to the options defined in + * the current execution profile. The client should consume all pages as quickly as possible, to + * avoid blocking the server for too long. The server will adjust the rate according to the client + * speed, but it will give up if the client does not consume any pages in a period of time equal + * to the read request timeout. + * + * @param statement the query to execute. + * @return a future to the first asynchronous result. + */ + @NonNull + default CompletionStage executeContinuouslyAsync( + @NonNull Statement statement) { + return Objects.requireNonNull( + execute(statement, ContinuousCqlRequestAsyncProcessor.CONTINUOUS_RESULT_ASYNC)); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousReactiveResultSet.java b/core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousReactiveResultSet.java new file mode 100644 index 00000000000..233a02fc455 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousReactiveResultSet.java @@ -0,0 +1,18 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.cql.continuous.reactive; + +import com.datastax.dse.driver.api.core.cql.reactive.ReactiveResultSet; +import com.datastax.oss.driver.api.core.cql.Statement; + +/** + * A marker interface for publishers returned by {@link ContinuousReactiveSession}. + * + * @see ContinuousReactiveSession#executeContinuouslyReactive(String) + * @see ContinuousReactiveSession#executeContinuouslyReactive(Statement) + */ +public interface ContinuousReactiveResultSet extends ReactiveResultSet {} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousReactiveSession.java b/core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousReactiveSession.java new file mode 100644 index 00000000000..ff12e69a45e --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousReactiveSession.java @@ -0,0 +1,62 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.cql.continuous.reactive; + +import com.datastax.dse.driver.api.core.cql.reactive.ReactiveRow; +import com.datastax.dse.driver.internal.core.cql.continuous.reactive.ContinuousCqlRequestReactiveProcessor; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.core.cql.Statement; +import com.datastax.oss.driver.api.core.session.Session; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Objects; +import org.reactivestreams.Publisher; + +/** + * A {@link Session} that offers utility methods to issue queries using reactive-style programming + * and continuous paging, combined together. + * + *

      Methods in this interface all return {@link ContinuousReactiveResultSet} instances. All + * publishers support multiple subscriptions in a unicast fashion: each subscriber triggers an + * independent request execution and gets its own copy of the results. + * + *

      Also, note that the publishers may emit items to their subscribers on an internal driver IO + * thread. Subscriber implementors are encouraged to abide by Reactive Streams + * Specification rule 2.2 and avoid performing heavy computations or blocking calls inside + * {@link org.reactivestreams.Subscriber#onNext(Object) onNext} calls, as doing so could slow down + * the driver and impact performance. Instead, they should asynchronously dispatch received signals + * to their processing logic. + * + * @see ReactiveRow + */ +public interface ContinuousReactiveSession extends Session { + + /** + * Returns a {@link Publisher} that, once subscribed to, executes the given query continuously and + * emits all the results. + * + * @param query the query to execute. + * @return The {@link Publisher} that will publish the returned results. + */ + @NonNull + default ContinuousReactiveResultSet executeContinuouslyReactive(@NonNull String query) { + return executeContinuouslyReactive(SimpleStatement.newInstance(query)); + } + + /** + * Returns a {@link Publisher} that, once subscribed to, executes the given query continuously and + * emits all the results. + * + * @param statement the statement to execute. + * @return The {@link Publisher} that will publish the returned results. + */ + @NonNull + default ContinuousReactiveResultSet executeContinuouslyReactive(@NonNull Statement statement) { + return Objects.requireNonNull( + execute(statement, ContinuousCqlRequestReactiveProcessor.CONTINUOUS_REACTIVE_RESULT_SET)); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/cql/reactive/ReactiveQueryMetadata.java b/core/src/main/java/com/datastax/dse/driver/api/core/cql/reactive/ReactiveQueryMetadata.java new file mode 100644 index 00000000000..40147cd1ab3 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/cql/reactive/ReactiveQueryMetadata.java @@ -0,0 +1,77 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.cql.reactive; + +import com.datastax.oss.driver.api.core.cql.ColumnDefinitions; +import com.datastax.oss.driver.api.core.cql.ExecutionInfo; +import edu.umd.cs.findbugs.annotations.NonNull; +import org.reactivestreams.Publisher; + +/** + * Interface implemented by all the reactive result set publishers provided by the driver, and + * notably by {@link ReactiveResultSet}. + */ +public interface ReactiveQueryMetadata { + + /** + * Returns metadata about the {@linkplain ColumnDefinitions columns} contained in this result set. + * + *

      This publisher emits exactly one item as soon as the first response arrives, then completes. + * If the query execution fails within the first request-response cycle, then this + * publisher will fail with the same error; however if the error happens after the first + * response, then this publisher will be already completed and will not acknowledge that + * error in any way. + * + *

      By default, publishers returned by this method do not support multiple subscriptions. + * + * @see ReactiveRow#getColumnDefinitions() + */ + @NonNull + Publisher getColumnDefinitions(); + + /** + * Returns {@linkplain ExecutionInfo information about the execution} of all requests that have + * been performed so far to assemble this result set. + * + *

      If the query is not paged, this publisher will emit exactly one item as soon as the response + * arrives, then complete. If the query is paged, it will emit multiple items, one per page; then + * it will complete when the last page arrives. If the query execution fails, then this publisher + * will fail with the same error. + * + *

      By default, publishers returned by this method do not support multiple subscriptions. + * + * @see ReactiveRow#getExecutionInfo() + */ + @NonNull + Publisher getExecutionInfos(); + + /** + * If the query that produced this result was a conditional update, indicates whether it was + * successfully applied. + * + *

      This publisher emits exactly one item as soon as the first response arrives, then completes. + * If the query execution fails within the first request-response cycle, then this + * publisher will fail with the same error; however if the error happens after the first + * response, then this publisher will be already completed and will not acknowledge that + * error in any way. + * + *

      By default, publishers returned by this method do not support multiple subscriptions. + * + *

      For consistency, this method always returns {@code true} for non-conditional queries + * (although there is no reason to call the method in that case). This is also the case for + * conditional DDL statements ({@code CREATE KEYSPACE... IF NOT EXISTS}, {@code CREATE TABLE... IF + * NOT EXISTS}), for which Cassandra doesn't return an {@code [applied]} column. + * + *

      Note that, for versions of Cassandra strictly lower than 2.1.0-rc2, a server-side bug (CASSANDRA-7337) causes this + * method to always return {@code true} for batches containing conditional queries. + * + * @see ReactiveRow#wasApplied() + */ + @NonNull + Publisher wasApplied(); +} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.java b/core/src/main/java/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.java new file mode 100644 index 00000000000..4fde5ba0293 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.java @@ -0,0 +1,38 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.cql.reactive; + +import com.datastax.oss.driver.api.core.cql.Statement; +import org.reactivestreams.Publisher; + +/** + * A {@link Publisher} of {@link ReactiveRow}s returned by a {@link ReactiveSession}. + * + *

      By default, all implementations returned by the driver are cold, unicast, single-subscriber + * only publishers. In other words, they do not support multiple subscriptions; consider + * caching the results produced by such publishers if you need to consume them by more than one + * downstream subscriber. + * + *

      Also, note that reactive result sets may emit items to their subscribers on an internal driver + * IO thread. Subscriber implementors are encouraged to abide by Reactive Streams + * Specification rule 2.2 and avoid performing heavy computations or blocking calls inside + * {@link org.reactivestreams.Subscriber#onNext(Object) onNext} calls, as doing so could slow down + * the driver and impact performance. Instead, they should asynchronously dispatch received signals + * to their processing logic. + * + *

      This interface exists mainly to expose useful information about {@linkplain + * #getExecutionInfos() request execution} and {@linkplain #getColumnDefinitions() query metadata}. + * This is particularly convenient for queries that do not return rows; for queries that do return + * rows, it is also possible, and oftentimes easier, to access that same information {@linkplain + * ReactiveRow at row level}. + * + * @see ReactiveSession#executeReactive(String) + * @see ReactiveSession#executeReactive(Statement) + * @see ReactiveRow + */ +public interface ReactiveResultSet extends Publisher, ReactiveQueryMetadata {} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.java b/core/src/main/java/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.java new file mode 100644 index 00000000000..24692911c38 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.java @@ -0,0 +1,82 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.cql.reactive; + +import com.datastax.oss.driver.api.core.cql.ColumnDefinitions; +import com.datastax.oss.driver.api.core.cql.ExecutionInfo; +import com.datastax.oss.driver.api.core.cql.Row; +import com.datastax.oss.driver.api.core.type.DataTypes; +import edu.umd.cs.findbugs.annotations.NonNull; + +/** + * A row produced by a {@linkplain ReactiveResultSet reactive result set}. + * + *

      This is essentially an extension of the driver's {@link Row} object that also exposes useful + * information about {@linkplain #getExecutionInfo() request execution} and {@linkplain + * #getColumnDefinitions() query metadata} (note however that this information is also exposed at + * result set level for convenience). + * + * @see ReactiveSession + * @see ReactiveResultSet + */ +public interface ReactiveRow extends Row { + + /** + * Returns the column definitions contained in this row. + * + *

      This object is the same for all rows pertaining to the same result set. + * + * @return the column definitions contained in this row. + * @see ReactiveResultSet#getColumnDefinitions() + */ + @NonNull + @Override + ColumnDefinitions getColumnDefinitions(); + + /** + * The execution information for the paged request that produced this result. + * + *

      This object is the same for two rows pertaining to the same page, but differs for rows + * pertaining to different pages. + * + * @return the execution information for the paged request that produced this result. + * @see ReactiveResultSet#getExecutionInfos() + */ + @NonNull + ExecutionInfo getExecutionInfo(); + + /** + * If the query that produced this result was a conditional update, indicates whether it was + * successfully applied. + * + *

      This is equivalent to calling: + * + *

      {@code
      +   * ReactiveRow row = ...
      +   * boolean wasApplied = row.getBoolean("[applied]");
      +   * }
      + * + *

      For consistency, this method always returns {@code true} for non-conditional queries + * (although there is no reason to call the method in that case). This is also the case for + * conditional DDL statements ({@code CREATE KEYSPACE... IF NOT EXISTS}, {@code CREATE TABLE... IF + * NOT EXISTS}), for which Cassandra doesn't return an {@code [applied]} column. + * + *

      Note that, for versions of Cassandra strictly lower than 2.1.0-rc2, a server-side bug (CASSANDRA-7337) causes this + * method to always return {@code true} for batches containing conditional queries. + * + *

      This method always return the same value for all results in the result set. + * + * @return {@code true} for non-conditional queries and for conditional queries that were + * successfully applied, {@code false} otherwise. + */ + default boolean wasApplied() { + return !getColumnDefinitions().contains("[applied]") + || !getColumnDefinitions().get("[applied]").getType().equals(DataTypes.BOOLEAN) + || getBoolean("[applied]"); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.java b/core/src/main/java/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.java new file mode 100644 index 00000000000..ddad7c89582 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.java @@ -0,0 +1,53 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.cql.reactive; + +import com.datastax.dse.driver.internal.core.cql.reactive.CqlRequestReactiveProcessor; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.core.cql.Statement; +import com.datastax.oss.driver.api.core.session.Session; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Objects; +import org.reactivestreams.Publisher; + +/** + * A {@link Session} that offers utility methods to issue queries using reactive-style programming. + * + *

      Methods in this interface all return {@link ReactiveResultSet} instances. See the javadocs of + * this interface for important remarks anc caveats regarding the subscription to and consumption of + * reactive result sets. + * + * @see ReactiveResultSet + * @see ReactiveRow + */ +public interface ReactiveSession extends Session { + + /** + * Returns a {@link Publisher} that, once subscribed to, executes the given query and emits all + * the results. + * + * @param query the query to execute. + * @return The {@link Publisher} that will publish the returned results. + */ + @NonNull + default ReactiveResultSet executeReactive(@NonNull String query) { + return executeReactive(SimpleStatement.newInstance(query)); + } + + /** + * Returns a {@link Publisher} that, once subscribed to, executes the given query and emits all + * the results. + * + * @param statement the statement to execute. + * @return The {@link Publisher} that will publish the returned results. + */ + @NonNull + default ReactiveResultSet executeReactive(@NonNull Statement statement) { + return Objects.requireNonNull( + execute(statement, CqlRequestReactiveProcessor.REACTIVE_RESULT_SET)); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/data/geometry/Geometry.java b/core/src/main/java/com/datastax/dse/driver/api/core/data/geometry/Geometry.java new file mode 100644 index 00000000000..536ee3faf44 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/data/geometry/Geometry.java @@ -0,0 +1,54 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.data.geometry; + +import edu.umd.cs.findbugs.annotations.NonNull; +import java.nio.ByteBuffer; + +/** + * The driver-side representation for a DSE geospatial type. + * + *

      + *     Row row = dseSession.execute("SELECT coords FROM points_of_interest WHERE name = 'Eiffel Tower'").one();
      + *     Point coords = row.get("coords", Point.class);
      + * 
      + * + * The default implementations returned by the driver are immutable and serializable. If you write + * your own implementations, they should at least be thread-safe; serializability is not mandatory, + * but recommended for use with some 3rd-party tools like Apache Spark ™. + */ +public interface Geometry { + + /** + * Returns a Well-known Text (WKT) + * representation of this geospatial type. + */ + @NonNull + String asWellKnownText(); + + /** + * Returns a Well-known + * Binary (WKB) representation of this geospatial type. + * + *

      Note that, due to DSE implementation details, the resulting byte buffer always uses + * little-endian order, regardless of the platform's native order. + */ + @NonNull + ByteBuffer asWellKnownBinary(); + + /** Returns a JSON representation of this geospatial type. */ + @NonNull + String asGeoJson(); + + /** + * Tests whether this geospatial type instance contains another instance. + * + * @param other the other instance. + * @return whether {@code this} contains {@code other}. + */ + boolean contains(@NonNull Geometry other); +} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/data/geometry/LineString.java b/core/src/main/java/com/datastax/dse/driver/api/core/data/geometry/LineString.java new file mode 100644 index 00000000000..2a766d08b81 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/data/geometry/LineString.java @@ -0,0 +1,80 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.data.geometry; + +import com.datastax.dse.driver.internal.core.data.geometry.DefaultGeometry; +import com.datastax.dse.driver.internal.core.data.geometry.DefaultLineString; +import com.esri.core.geometry.ogc.OGCLineString; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.nio.ByteBuffer; +import java.util.List; + +/** + * The driver-side representation for DSE's {@code LineString}. + * + *

      This is a curve in a two-dimensional XY-plane, represented by a set of points (with linear + * interpolation between them). + * + *

      The default implementation returned by the driver is immutable. + */ +public interface LineString extends Geometry { + /** + * Creates a line string from its Well-known Text (WKT) representation. + * + * @param source the Well-known Text representation to parse. + * @return the line string represented by the WKT. + * @throws IllegalArgumentException if the string does not contain a valid Well-known Text + * representation. + */ + @NonNull + static LineString fromWellKnownText(@NonNull String source) { + return new DefaultLineString(DefaultGeometry.fromOgcWellKnownText(source, OGCLineString.class)); + } + + /** + * Creates a line string from its Well-known Binary + * (WKB) representation. + * + * @param source the Well-known Binary representation to parse. + * @return the line string represented by the WKB. + * @throws IllegalArgumentException if the provided {@link ByteBuffer} does not contain a valid + * Well-known Binary representation. + */ + @NonNull + static LineString fromWellKnownBinary(@NonNull ByteBuffer source) { + return new DefaultLineString( + DefaultGeometry.fromOgcWellKnownBinary(source, OGCLineString.class)); + } + + /** + * Creates a line string from a GeoJSON + * LineString representation. + * + * @param source the GeoJSON + * LineString representation to parse. + * @return the line string represented by the GeoJSON LineString. + * @throws IllegalArgumentException if the string does not contain a valid GeoJSON LineString + * representation. + */ + @NonNull + static LineString fromGeoJson(@NonNull String source) { + return new DefaultLineString(DefaultGeometry.fromOgcGeoJson(source, OGCLineString.class)); + } + + /** Creates a line string from two or more points. */ + @NonNull + static LineString fromPoints(@NonNull Point p1, @NonNull Point p2, @NonNull Point... pn) { + return new DefaultLineString(p1, p2, pn); + } + + @NonNull + List getPoints(); +} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/data/geometry/Point.java b/core/src/main/java/com/datastax/dse/driver/api/core/data/geometry/Point.java new file mode 100644 index 00000000000..4e02b1aaf5a --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/data/geometry/Point.java @@ -0,0 +1,92 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.data.geometry; + +import com.datastax.dse.driver.internal.core.data.geometry.DefaultGeometry; +import com.datastax.dse.driver.internal.core.data.geometry.DefaultPoint; +import com.esri.core.geometry.ogc.OGCPoint; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.nio.ByteBuffer; + +/** + * The driver-side representation of DSE's {@code Point}. + * + *

      This is a zero-dimensional object that represents a specific (X,Y) location in a + * two-dimensional XY-plane. In case of Geographic Coordinate Systems, the X coordinate is the + * longitude and the Y is the latitude. + * + *

      The default implementation returned by the driver is immutable. + */ +public interface Point extends Geometry { + + /** + * Creates a point from its Well-known + * Text (WKT) representation. + * + * @param source the Well-known Text representation to parse. + * @return the point represented by the WKT. + * @throws IllegalArgumentException if the string does not contain a valid Well-known Text + * representation. + */ + @NonNull + static Point fromWellKnownText(@NonNull String source) { + return new DefaultPoint(DefaultGeometry.fromOgcWellKnownText(source, OGCPoint.class)); + } + + /** + * Creates a point from its Well-known Binary + * (WKB) representation. + * + * @param source the Well-known Binary representation to parse. + * @return the point represented by the WKB. + * @throws IllegalArgumentException if the provided {@link ByteBuffer} does not contain a valid + * Well-known Binary representation. + */ + @NonNull + static Point fromWellKnownBinary(@NonNull ByteBuffer source) { + return new DefaultPoint(DefaultGeometry.fromOgcWellKnownBinary(source, OGCPoint.class)); + } + + /** + * Creates a point from a GeoJSON + * Point representation. + * + * @param source the GeoJSON Point + * representation to parse. + * @return the point represented by the GeoJSON Point. + * @throws IllegalArgumentException if the string does not contain a valid GeoJSON Point representation. + */ + @NonNull + static Point fromGeoJson(@NonNull String source) { + return new DefaultPoint(DefaultGeometry.fromOgcGeoJson(source, OGCPoint.class)); + } + + /** + * Creates a new point. + * + * @param x The X coordinate of this point (or its longitude in Geographic Coordinate Systems). + * @param y The Y coordinate of this point (or its latitude in Geographic Coordinate Systems). + * @return the point represented by coordinates. + */ + @NonNull + static Point fromCoordinates(double x, double y) { + return new DefaultPoint(x, y); + } + + /** + * Returns the X coordinate of this 2D point (or its longitude in Geographic Coordinate Systems). + */ + double X(); + + /** + * Returns the Y coordinate of this 2D point (or its latitude in Geographic Coordinate Systems). + */ + double Y(); +} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/data/geometry/Polygon.java b/core/src/main/java/com/datastax/dse/driver/api/core/data/geometry/Polygon.java new file mode 100644 index 00000000000..de8e52bf04b --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/data/geometry/Polygon.java @@ -0,0 +1,116 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.data.geometry; + +import com.datastax.dse.driver.internal.core.data.geometry.DefaultGeometry; +import com.datastax.dse.driver.internal.core.data.geometry.DefaultPolygon; +import com.esri.core.geometry.ogc.OGCPolygon; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.nio.ByteBuffer; +import java.util.List; + +/** + * The driver-side representation of DSE's {@code Polygon}. + * + *

      This is a planar surface in a two-dimensional XY-plane, represented by one exterior boundary + * and 0 or more interior boundaries. + * + *

      The default implementation returned by the driver is immutable. + */ +public interface Polygon extends Geometry { + /** + * Creates a polygon from its Well-known + * Text (WKT) representation. + * + * @param source the Well-known Text representation to parse. + * @return the polygon represented by the WKT. + * @throws IllegalArgumentException if the string does not contain a valid Well-known Text + * representation. + */ + @NonNull + static Polygon fromWellKnownText(@NonNull String source) { + return new DefaultPolygon(DefaultGeometry.fromOgcWellKnownText(source, OGCPolygon.class)); + } + + /** + * Creates a polygon from its Well-known Binary + * (WKB) representation. + * + * @param source the Well-known Binary representation to parse. + * @return the polygon represented by the WKB. + * @throws IllegalArgumentException if the provided {@link ByteBuffer} does not contain a valid + * Well-known Binary representation. + */ + @NonNull + static Polygon fromWellKnownBinary(@NonNull ByteBuffer source) { + return new DefaultPolygon(DefaultGeometry.fromOgcWellKnownBinary(source, OGCPolygon.class)); + } + + /** + * Creates a polygon from a GeoJSON + * Polygon representation. + * + * @param source the GeoJSON Polygon + * representation to parse. + * @return the polygon represented by the GeoJSON Polygon. + * @throws IllegalArgumentException if the string does not contain a valid GeoJSON Polygon representation. + */ + @NonNull + static Polygon fromGeoJson(@NonNull String source) { + return new DefaultPolygon(DefaultGeometry.fromOgcGeoJson(source, OGCPolygon.class)); + } + + /** Creates a polygon from a series of 3 or more points. */ + @NonNull + static Polygon fromPoints( + @NonNull Point p1, @NonNull Point p2, @NonNull Point p3, @NonNull Point... pn) { + return new DefaultPolygon(p1, p2, p3, pn); + } + + /** + * Returns a polygon builder. + * + *

      This is intended for complex polygons with multiple rings (i.e. holes inside the polygon). + * For simple cases, consider {@link #fromPoints(Point, Point, Point, Point...)} instead. + */ + @NonNull + static Builder builder() { + return new DefaultPolygon.Builder(); + } + + /** Returns the external ring of the polygon. */ + @NonNull + List getExteriorRing(); + + /** + * Returns the internal rings of the polygon, i.e. any holes inside of it (or islands inside of + * the holes). + */ + @NonNull + List> getInteriorRings(); + + /** Provides a simple DSL to build a polygon. */ + interface Builder { + /** + * Adds a new ring for this polygon. + * + *

      There can be one or more outer rings and zero or more inner rings. If a polygon has an + * inner ring, the inner ring looks like a hole. If the hole contains another outer ring, that + * outer ring looks like an island. + * + *

      There must be one "main" outer ring that contains all the others. + */ + @NonNull + Builder addRing(@NonNull Point p1, @NonNull Point p2, @NonNull Point p3, @NonNull Point... pn); + + @NonNull + Polygon build(); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/data/time/DateRange.java b/core/src/main/java/com/datastax/dse/driver/api/core/data/time/DateRange.java new file mode 100644 index 00000000000..0f4dc42a6ba --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/data/time/DateRange.java @@ -0,0 +1,246 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.data.time; + +import com.datastax.oss.driver.shaded.guava.common.base.Preconditions; +import com.datastax.oss.driver.shaded.guava.common.base.Strings; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.io.Serializable; +import java.text.ParseException; +import java.time.ZonedDateTime; +import java.util.Objects; +import java.util.Optional; + +/** + * A date range, as defined by the server type {@code + * org.apache.cassandra.db.marshal.DateRangeType}, corresponding to the Apache Solr type {@code + * DateRangeField}. + * + *

      A date range can be either {@linkplain DateRange#DateRange(DateRangeBound) single-bounded}, in + * which case it represents a unique instant (e.g. "{@code 2001-01-01}"), or {@linkplain + * #DateRange(DateRangeBound, DateRangeBound) double-bounded}, in which case it represents an + * interval of time (e.g. "{@code [2001-01-01 TO 2002]}"). + * + *

      Date range {@linkplain DateRangeBound bounds} are always inclusive; they must be either valid + * dates, or the special value {@link DateRangeBound#UNBOUNDED UNBOUNDED}, represented by a "{@code + * *}", e.g. "{@code [2001 TO *]}". + * + *

      Instances can be more easily created with the {@link #parse(String)} method. + * + *

      This class is immutable and thread-safe. + * + * @since DSE 5.1 + */ +public class DateRange implements Serializable { + + /** + * Parses the given string as a date range. + * + *

      The given input must be compliant with Apache Solr type {@code + * DateRangeField} syntax; it can either be a {@linkplain #DateRange(DateRangeBound) + * single-bounded range}, or a {@linkplain #DateRange(DateRangeBound, DateRangeBound) + * double-bounded range}. + * + * @throws ParseException if the given string could not be parsed into a valid range. + * @see DateRangeBound#parseLowerBound(String) + * @see DateRangeBound#parseUpperBound(String) + */ + @NonNull + public static DateRange parse(@NonNull String source) throws ParseException { + if (Strings.isNullOrEmpty(source)) { + throw new ParseException("Date range is null or empty", 0); + } + + if (source.charAt(0) == '[') { + if (source.charAt(source.length() - 1) != ']') { + throw new ParseException( + "If date range starts with '[' it must end with ']'; got " + source, + source.length() - 1); + } + int middle = source.indexOf(" TO "); + if (middle < 0) { + throw new ParseException( + "If date range starts with '[' it must contain ' TO '; got " + source, 0); + } + String lowerBoundString = source.substring(1, middle); + int upperBoundStart = middle + 4; + String upperBoundString = source.substring(upperBoundStart, source.length() - 1); + DateRangeBound lowerBound; + try { + lowerBound = DateRangeBound.parseLowerBound(lowerBoundString); + } catch (Exception e) { + throw newParseException("Cannot parse date range lower bound: " + source, 1, e); + } + DateRangeBound upperBound; + try { + upperBound = DateRangeBound.parseUpperBound(upperBoundString); + } catch (Exception e) { + throw newParseException( + "Cannot parse date range upper bound: " + source, upperBoundStart, e); + } + return new DateRange(lowerBound, upperBound); + } else { + try { + return new DateRange(DateRangeBound.parseLowerBound(source)); + } catch (Exception e) { + throw newParseException("Cannot parse single date range bound: " + source, 0, e); + } + } + } + + @NonNull private final DateRangeBound lowerBound; + @Nullable private final DateRangeBound upperBound; + + /** + * Creates a "single bounded" instance, i.e., a date range whose upper and lower bounds are + * identical. + * + * @throws NullPointerException if {@code singleBound} is null. + */ + public DateRange(@NonNull DateRangeBound singleBound) { + this.lowerBound = Preconditions.checkNotNull(singleBound, "singleBound cannot be null"); + this.upperBound = null; + } + + /** + * Creates an instance composed of two distinct bounds. + * + * @throws NullPointerException if {@code lowerBound} or {@code upperBound} is null. + * @throws IllegalArgumentException if both {@code lowerBound} and {@code upperBound} are not + * unbounded and {@code lowerBound} is greater than {@code upperBound}. + */ + public DateRange(@NonNull DateRangeBound lowerBound, @NonNull DateRangeBound upperBound) { + Preconditions.checkNotNull(lowerBound, "lowerBound cannot be null"); + Preconditions.checkNotNull(upperBound, "upperBound cannot be null"); + if (!lowerBound.isUnbounded() + && !upperBound.isUnbounded() + && lowerBound.getTimestamp().compareTo(upperBound.getTimestamp()) >= 0) { + throw new IllegalArgumentException( + String.format( + "Lower bound of a date range should be before upper bound, got: [%s TO %s]", + lowerBound, upperBound)); + } + this.lowerBound = lowerBound; + this.upperBound = upperBound; + } + + /** Returns the lower bound of this range (inclusive). */ + @NonNull + public DateRangeBound getLowerBound() { + return lowerBound; + } + + /** + * Returns the upper bound of this range (inclusive), or empty if the range is {@linkplain + * #isSingleBounded() single-bounded}. + */ + @NonNull + public Optional getUpperBound() { + return Optional.ofNullable(upperBound); + } + + /** + * Returns whether this range is single-bounded, i.e. if the upper and lower bounds are identical. + */ + public boolean isSingleBounded() { + return upperBound == null; + } + + /** + * Returns the string representation of this range, in a format compatible with Apache Solr + * DateRageField syntax + * + * @see DateRangeBound#toString() + */ + @NonNull + @Override + public String toString() { + if (isSingleBounded()) { + return lowerBound.toString(); + } else { + return String.format("[%s TO %s]", lowerBound, upperBound); + } + } + + @Override + public boolean equals(@Nullable Object other) { + if (other == this) { + return true; + } else if (other instanceof DateRange) { + DateRange that = (DateRange) other; + return Objects.equals(this.lowerBound, that.lowerBound) + && Objects.equals(this.upperBound, that.upperBound); + + } else { + return false; + } + } + + @Override + public int hashCode() { + return Objects.hash(lowerBound, upperBound); + } + + private static ParseException newParseException(String message, int offset, Exception cause) { + ParseException parseException = new ParseException(message, offset); + parseException.initCause(cause); + return parseException; + } + + /** + * This object gets replaced by an internal proxy for serialization. + * + * @serialData the lower bound timestamp and precision, followed by the upper bound timestamp and + * precision, or two {@code null}s if the range is single-bounded. + */ + private Object writeReplace() { + return new SerializationProxy(this); + } + + private static class SerializationProxy implements Serializable { + + private static final long serialVersionUID = 1L; + + private final ZonedDateTime lowerBoundTimestamp; + private final DateRangePrecision lowerBoundPrecision; + private final ZonedDateTime upperBoundTimestamp; + private final DateRangePrecision upperBoundPrecision; + + SerializationProxy(DateRange input) { + this.lowerBoundTimestamp = input.lowerBound.getTimestamp(); + this.lowerBoundPrecision = input.lowerBound.getPrecision(); + if (input.upperBound != null) { + this.upperBoundTimestamp = input.upperBound.getTimestamp(); + this.upperBoundPrecision = input.upperBound.getPrecision(); + } else { + this.upperBoundTimestamp = null; + this.upperBoundPrecision = null; + } + } + + private Object readResolve() { + if (upperBoundTimestamp == null ^ upperBoundPrecision == null) { + // Should not happen, but protect against corrupted streams + throw new IllegalArgumentException( + "Invalid serialized form, upper bound timestamp and precision " + + "should be either both null or both non-null"); + } + + if (upperBoundTimestamp == null) { + return new DateRange(DateRangeBound.lowerBound(lowerBoundTimestamp, lowerBoundPrecision)); + } else { + return new DateRange( + DateRangeBound.lowerBound(lowerBoundTimestamp, lowerBoundPrecision), + DateRangeBound.upperBound(upperBoundTimestamp, upperBoundPrecision)); + } + } + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/data/time/DateRangeBound.java b/core/src/main/java/com/datastax/dse/driver/api/core/data/time/DateRangeBound.java new file mode 100644 index 00000000000..0ce2f104330 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/data/time/DateRangeBound.java @@ -0,0 +1,201 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.data.time; + +import com.datastax.dse.driver.internal.core.search.DateRangeUtil; +import com.datastax.oss.driver.shaded.guava.common.base.Preconditions; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.text.ParseException; +import java.time.ZonedDateTime; +import java.util.Calendar; +import java.util.Objects; + +/** + * A date range bound. + * + *

      It is composed of a {@link ZonedDateTime} field and a corresponding {@link + * DateRangePrecision}. + * + *

      Date range bounds are inclusive. The special value {@link #UNBOUNDED} denotes an un unbounded + * (infinite) bound, represented by a {@code *} sign. + * + *

      This class is immutable and thread-safe. + */ +public class DateRangeBound { + + /** + * The unbounded {@link DateRangeBound} instance. It is syntactically represented by a {@code *} + * (star) sign. + */ + public static final DateRangeBound UNBOUNDED = new DateRangeBound(); + + /** + * Parses the given input as a lower date range bound. + * + *

      The input should be a Lucene-compliant + * string. + * + *

      The returned bound will have its {@linkplain DateRangePrecision precision} inferred from the + * input, and its timestamp will be {@linkplain DateRangePrecision#roundDown(ZonedDateTime) + * rounded down} to that precision. + * + *

      Note that, in order to align with the server's parsing behavior, dates will always be parsed + * in the UTC time zone. + * + * @throws NullPointerException if {@code lowerBound} is {@code null}. + * @throws ParseException if the given input cannot be parsed. + */ + @NonNull + public static DateRangeBound parseLowerBound(@NonNull String source) throws ParseException { + Preconditions.checkNotNull(source); + Calendar calendar = DateRangeUtil.parseCalendar(source); + DateRangePrecision precision = DateRangeUtil.getPrecision(calendar); + return (precision == null) + ? UNBOUNDED + : lowerBound(DateRangeUtil.toZonedDateTime(calendar), precision); + } + + /** + * Parses the given input as an upper date range bound. + * + *

      The input should be a Lucene-compliant + * string. + * + *

      The returned bound will have its {@linkplain DateRangePrecision precision} inferred from the + * input, and its timestamp will be {@linkplain DateRangePrecision#roundUp(ZonedDateTime)} rounded + * up} to that precision. + * + *

      Note that, in order to align with the server's behavior (e.g. when using date range literals + * in CQL query strings), dates must always be in the UTC time zone: an optional trailing {@code + * Z}" is allowed, but no other time zone ID (not even {@code UTC}, {@code GMT} or {@code +00:00}) + * is permitted. + * + * @throws NullPointerException if {@code upperBound} is {@code null}. + * @throws ParseException if the given input cannot be parsed. + */ + public static DateRangeBound parseUpperBound(String source) throws ParseException { + Preconditions.checkNotNull(source); + Calendar calendar = DateRangeUtil.parseCalendar(source); + DateRangePrecision precision = DateRangeUtil.getPrecision(calendar); + return (precision == null) + ? UNBOUNDED + : upperBound(DateRangeUtil.toZonedDateTime(calendar), precision); + } + + /** + * Creates a date range lower bound from the given date and precision. Temporal fields smaller + * than the precision will be rounded down. + */ + public static DateRangeBound lowerBound(ZonedDateTime timestamp, DateRangePrecision precision) { + return new DateRangeBound(precision.roundDown(timestamp), precision); + } + + /** + * Creates a date range upper bound from the given date and precision. Temporal fields smaller + * than the precision will be rounded up. + */ + public static DateRangeBound upperBound(ZonedDateTime timestamp, DateRangePrecision precision) { + return new DateRangeBound(precision.roundUp(timestamp), precision); + } + + @Nullable private final ZonedDateTime timestamp; + @Nullable private final DateRangePrecision precision; + + private DateRangeBound(@NonNull ZonedDateTime timestamp, @NonNull DateRangePrecision precision) { + Preconditions.checkNotNull(timestamp); + Preconditions.checkNotNull(precision); + this.timestamp = timestamp; + this.precision = precision; + } + + // constructor used for the special UNBOUNDED value + private DateRangeBound() { + this.timestamp = null; + this.precision = null; + } + + /** Whether this bound is unbounded (i.e. denotes the special {@code *} value). */ + public boolean isUnbounded() { + return this.timestamp == null && this.precision == null; + } + + /** + * Returns the timestamp of this bound. + * + * @throws IllegalStateException if this bound is {@linkplain #isUnbounded() unbounded}. + */ + @NonNull + public ZonedDateTime getTimestamp() { + if (isUnbounded()) { + throw new IllegalStateException( + "Can't call this method on UNBOUNDED, use isUnbounded() to check first"); + } + assert timestamp != null; + return timestamp; + } + + /** + * Returns the precision of this bound. + * + * @throws IllegalStateException if this bound is {@linkplain #isUnbounded() unbounded}. + */ + @NonNull + public DateRangePrecision getPrecision() { + if (isUnbounded()) { + throw new IllegalStateException( + "Can't call this method on UNBOUNDED, use isUnbounded() to check first"); + } + assert precision != null; + return precision; + } + + /** + * Returns this bound as a Lucene-compliant string. + * + *

      Unbounded bounds always return "{@code *}"; all other bounds are formatted in one of the + * common ISO-8601 datetime formats, depending on their precision. + * + *

      Note that Lucene expects timestamps in UTC only. Timezone presence is always optional, and + * if present, it must be expressed with the symbol "Z" exclusively. Therefore this method does + * not include any timezone information in the returned string, except for bounds with {@linkplain + * DateRangePrecision#MILLISECOND millisecond} precision, where the symbol "Z" is always appended + * to the resulting string. + */ + @NonNull + @Override + public String toString() { + if (isUnbounded()) { + return "*"; + } else { + assert timestamp != null && precision != null; + return precision.format(timestamp); + } + } + + @Override + public boolean equals(@Nullable Object other) { + if (other == this) { + return true; + } else if (other instanceof DateRangeBound) { + DateRangeBound that = (DateRangeBound) other; + return Objects.equals(this.timestamp, that.timestamp) + && Objects.equals(this.precision, that.precision); + } else { + return false; + } + } + + @Override + public int hashCode() { + return Objects.hash(timestamp, precision); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/data/time/DateRangePrecision.java b/core/src/main/java/com/datastax/dse/driver/api/core/data/time/DateRangePrecision.java new file mode 100644 index 00000000000..3ab93a78bba --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/data/time/DateRangePrecision.java @@ -0,0 +1,186 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.data.time; + +import com.datastax.dse.driver.internal.core.search.DateRangeUtil; +import com.datastax.oss.driver.shaded.guava.common.base.Preconditions; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; +import java.time.format.DateTimeFormatter; +import java.time.format.DateTimeFormatterBuilder; +import java.time.temporal.ChronoField; +import java.time.temporal.ChronoUnit; +import java.util.Locale; +import java.util.Map; + +/** The precision of a {@link DateRangeBound}. */ +public enum DateRangePrecision { + MILLISECOND( + 0x06, + ChronoUnit.MILLIS, + new DateTimeFormatterBuilder() + .parseCaseSensitive() + .parseStrict() + .appendPattern("uuuu-MM-dd'T'HH:mm:ss.SSS") + .optionalStart() + .appendZoneId() + .optionalEnd() + .toFormatter() + .withZone(ZoneOffset.UTC) + .withLocale(Locale.ROOT)), + SECOND( + 0x05, + ChronoUnit.SECONDS, + new DateTimeFormatterBuilder() + .parseCaseSensitive() + .parseStrict() + .appendPattern("uuuu-MM-dd'T'HH:mm:ss") + .parseDefaulting(ChronoField.MILLI_OF_SECOND, 0) + .toFormatter() + .withZone(ZoneOffset.UTC) + .withLocale(Locale.ROOT)), + MINUTE( + 0x04, + ChronoUnit.MINUTES, + new DateTimeFormatterBuilder() + .parseCaseSensitive() + .parseStrict() + .appendPattern("uuuu-MM-dd'T'HH:mm") + .parseDefaulting(ChronoField.SECOND_OF_MINUTE, 0) + .parseDefaulting(ChronoField.MILLI_OF_SECOND, 0) + .toFormatter() + .withZone(ZoneOffset.UTC) + .withLocale(Locale.ROOT)), + HOUR( + 0x03, + ChronoUnit.HOURS, + new DateTimeFormatterBuilder() + .parseCaseSensitive() + .parseStrict() + .appendPattern("uuuu-MM-dd'T'HH") + .parseDefaulting(ChronoField.MINUTE_OF_HOUR, 0) + .parseDefaulting(ChronoField.SECOND_OF_MINUTE, 0) + .parseDefaulting(ChronoField.MILLI_OF_SECOND, 0) + .toFormatter() + .withZone(ZoneOffset.UTC) + .withLocale(Locale.ROOT)), + DAY( + 0x02, + ChronoUnit.DAYS, + new DateTimeFormatterBuilder() + .parseCaseSensitive() + .parseStrict() + .appendPattern("uuuu-MM-dd") + .parseDefaulting(ChronoField.HOUR_OF_DAY, 0) + .parseDefaulting(ChronoField.MINUTE_OF_HOUR, 0) + .parseDefaulting(ChronoField.SECOND_OF_MINUTE, 0) + .parseDefaulting(ChronoField.MILLI_OF_SECOND, 0) + .toFormatter() + .withZone(ZoneOffset.UTC) + .withLocale(Locale.ROOT)), + MONTH( + 0x01, + ChronoUnit.MONTHS, + new DateTimeFormatterBuilder() + .parseCaseSensitive() + .parseStrict() + .appendPattern("uuuu-MM") + .parseDefaulting(ChronoField.DAY_OF_MONTH, 1) + .parseDefaulting(ChronoField.HOUR_OF_DAY, 0) + .parseDefaulting(ChronoField.MINUTE_OF_HOUR, 0) + .parseDefaulting(ChronoField.SECOND_OF_MINUTE, 0) + .parseDefaulting(ChronoField.MILLI_OF_SECOND, 0) + .toFormatter() + .withZone(ZoneOffset.UTC) + .withLocale(Locale.ROOT)), + YEAR( + 0x00, + ChronoUnit.YEARS, + new DateTimeFormatterBuilder() + .parseCaseSensitive() + .parseStrict() + .appendPattern("uuuu") + .parseDefaulting(ChronoField.MONTH_OF_YEAR, 1) + .parseDefaulting(ChronoField.DAY_OF_MONTH, 1) + .parseDefaulting(ChronoField.HOUR_OF_DAY, 0) + .parseDefaulting(ChronoField.MINUTE_OF_HOUR, 0) + .parseDefaulting(ChronoField.SECOND_OF_MINUTE, 0) + .parseDefaulting(ChronoField.MILLI_OF_SECOND, 0) + .toFormatter() + .withZone(ZoneOffset.UTC) + .withLocale(Locale.ROOT)); + + private final byte encoding; + private final ChronoUnit roundingUnit; + // The formatter is only used for formatting (parsing is done with DateRangeUtil.parseCalendar to + // be exactly the same as DSE's). + // If that ever were to change, note that DateTimeFormatters with a time zone have a parsing bug + // in Java 8: the formatter's zone will always be used, even if the input string specifies one + // explicitly. + // See https://stackoverflow.com/questions/41999421 + private final DateTimeFormatter formatter; + + DateRangePrecision(int encoding, ChronoUnit roundingUnit, DateTimeFormatter formatter) { + this.encoding = (byte) encoding; + this.roundingUnit = roundingUnit; + this.formatter = formatter; + } + + private static final Map ENCODINGS; + + static { + ImmutableMap.Builder builder = ImmutableMap.builder(); + for (DateRangePrecision precision : values()) { + builder.put(precision.encoding, precision); + } + ENCODINGS = builder.build(); + } + + public static DateRangePrecision fromEncoding(byte encoding) { + DateRangePrecision precision = ENCODINGS.get(encoding); + if (precision == null) { + throw new IllegalArgumentException("Invalid precision encoding: " + encoding); + } + return precision; + } + + /** The code used to represent the precision when a date range is encoded to binary. */ + public byte getEncoding() { + return encoding; + } + + /** + * Rounds up the given timestamp to this precision. + * + *

      Temporal fields smaller than this precision will be rounded up; other fields will be left + * untouched. + */ + @NonNull + public ZonedDateTime roundUp(@NonNull ZonedDateTime timestamp) { + Preconditions.checkNotNull(timestamp); + return DateRangeUtil.roundUp(timestamp, roundingUnit); + } + + /** + * Rounds down the given timestamp to this precision. + * + *

      Temporal fields smaller than this precision will be rounded down; other fields will be left + * untouched. + */ + @NonNull + public ZonedDateTime roundDown(@NonNull ZonedDateTime timestamp) { + Preconditions.checkNotNull(timestamp); + return DateRangeUtil.roundDown(timestamp, roundingUnit); + } + + /** Formats the given timestamp according to this precision. */ + public String format(ZonedDateTime timestamp) { + return formatter.format(timestamp); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/graph/AsyncGraphResultSet.java b/core/src/main/java/com/datastax/dse/driver/api/core/graph/AsyncGraphResultSet.java new file mode 100644 index 00000000000..2aa661de1df --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/graph/AsyncGraphResultSet.java @@ -0,0 +1,77 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph; + +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.Iterator; +import java.util.concurrent.CompletionStage; + +/** + * The result of an asynchronous graph query. + * + *

      The default implementation returned by the driver is not thread-safe: the iterable + * returned by {@link #currentPage()} should only be iterated by a single thread. However, if + * subsequent pages are requested via {@link #fetchNextPage()}, it's safe to process those new + * instances in other threads (as long as each individual page of results is not accessed + * concurrently). + * + * @see GraphResultSet + */ +public interface AsyncGraphResultSet { + + /** The execution information for this page of results. */ + @NonNull + GraphExecutionInfo getExecutionInfo(); + + /** How many rows are left before the current page is exhausted. */ + int remaining(); + + /** + * The nodes in the current page. To keep iterating beyond that, use {@link #hasMorePages()} and + * {@link #fetchNextPage()}. + * + *

      Note that this method always returns the same object, and that that object can only be + * iterated once: nodes are "consumed" as they are read. + */ + @NonNull + Iterable currentPage(); + + /** + * Returns the next node, or {@code null} if the result set is exhausted. + * + *

      This is convenient for queries that are known to return exactly one node. + */ + @Nullable + default GraphNode one() { + Iterator iterator = currentPage().iterator(); + return iterator.hasNext() ? iterator.next() : null; + } + + /** + * Whether there are more pages of results. If so, call {@link #fetchNextPage()} to fetch the next + * one asynchronously. + */ + boolean hasMorePages(); + + /** + * Fetch the next page of results asynchronously. + * + * @throws IllegalStateException if there are no more pages. Use {@link #hasMorePages()} to check + * if you can call this method. + */ + @NonNull + CompletionStage fetchNextPage() throws IllegalStateException; + + /** + * Cancels the query and asks the server to stop sending results. + * + *

      At this time, graph queries are not paginated and the server sends all the results at once; + * therefore this method has no effect. + */ + void cancel(); +} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.java b/core/src/main/java/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.java new file mode 100644 index 00000000000..9125562c1f1 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.java @@ -0,0 +1,138 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph; + +import com.datastax.dse.driver.internal.core.graph.DefaultBatchGraphStatement; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Collections; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; + +/** + * A graph statement that groups multiple mutating traversals together, to be executed in the + * same transaction. + * + *

      It is reserved for graph mutations, and does not return any result. + * + *

      All the mutations grouped in the batch will either all succeed, or they will all be discarded + * and return an error. + * + *

      The default implementation returned by the driver is immutable and thread-safe. Each mutation + * operation returns a copy. If you chain many of those operations, it is recommended to use {@link + * #builder()} instead for better memory usage. + * + *

      Typically used like so: + * + *

      {@code
      + * import static com.datastax.dse.driver.api.core.graph.DseGraph.g;
      + *
      + * BatchGraphStatement statement =
      + *     BatchGraphStatement.builder()
      + *         .addTraversal(
      + *                 g.addV("person").property("name", "batch1").property("age", 1))
      + *         .addTraversal(
      + *                 g.addV("person").property("name", "batch2").property("age", 2))
      + *         .build();
      + *
      + * GraphResultSet graphResultSet = dseSession.execute(statement);
      + * }
      + * + * @see DseGraph#g + */ +public interface BatchGraphStatement + extends GraphStatement, Iterable { + + /** + * Create a new, empty instance. + * + *

      Traversals can be added with {@link #addTraversal(GraphTraversal)}. + */ + @NonNull + static BatchGraphStatement newInstance() { + return new DefaultBatchGraphStatement( + ImmutableList.of(), + null, + null, + null, + Long.MIN_VALUE, + null, + null, + Collections.emptyMap(), + null, + null, + null, + null, + null, + null); + } + + /** Create a new instance from the given list of traversals. */ + @NonNull + static BatchGraphStatement newInstance(@NonNull Iterable traversals) { + return new DefaultBatchGraphStatement( + traversals, + null, + null, + null, + Long.MIN_VALUE, + null, + null, + Collections.emptyMap(), + null, + null, + null, + null, + null, + null); + } + + /** Create a new instance from the given list of traversals. */ + @NonNull + static BatchGraphStatement newInstance(@NonNull GraphTraversal... traversals) { + return newInstance(ImmutableList.copyOf(traversals)); + } + + /** + * Create a builder helper object to start creating a new instance. + * + *

      Note that this builder is mutable and not thread-safe. + */ + @NonNull + static BatchGraphStatementBuilder builder() { + return new BatchGraphStatementBuilder(); + } + + /** + * Create a builder helper object to start creating a new instance with an existing statement as a + * template. The traversals and options set on the template will be copied for the new statement + * at the moment this method is called. + * + *

      Note that this builder is mutable and not thread-safe. + */ + @NonNull + static BatchGraphStatementBuilder builder(@NonNull BatchGraphStatement template) { + return new BatchGraphStatementBuilder(template); + } + + /** + * Add a traversal to this statement. If many traversals need to be added, use a {@link + * #builder()}, or the {@link #addTraversals(Iterable)} method instead to avoid intermediary + * copies. + */ + @NonNull + BatchGraphStatement addTraversal(@NonNull GraphTraversal traversal); + + /** + * Adds several traversals to this statement. If this method is to be called many times, consider + * using a {@link #builder()} instead to avoid intermediary copies. + */ + @NonNull + BatchGraphStatement addTraversals(@NonNull Iterable traversals); + + /** Get the number of traversals already added to this statement. */ + int size(); +} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/graph/BatchGraphStatementBuilder.java b/core/src/main/java/com/datastax/dse/driver/api/core/graph/BatchGraphStatementBuilder.java new file mode 100644 index 00000000000..246d80db2e7 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/graph/BatchGraphStatementBuilder.java @@ -0,0 +1,101 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph; + +import com.datastax.dse.driver.internal.core.graph.DefaultBatchGraphStatement; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; +import edu.umd.cs.findbugs.annotations.NonNull; +import net.jcip.annotations.NotThreadSafe; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; + +/** + * A builder to create a batch graph statement. + * + *

      This class is mutable and not thread-safe. + */ +@NotThreadSafe +public class BatchGraphStatementBuilder + extends GraphStatementBuilderBase { + + private ImmutableList.Builder traversalsBuilder = ImmutableList.builder(); + private int traversalsCount; + + public BatchGraphStatementBuilder() { + // nothing to do + } + + public BatchGraphStatementBuilder(BatchGraphStatement template) { + super(template); + traversalsBuilder.addAll(template); + traversalsCount = template.size(); + } + + /** Add a traversal to this builder to include in the generated {@link BatchGraphStatement}. */ + @NonNull + public BatchGraphStatementBuilder addTraversal(@NonNull GraphTraversal traversal) { + traversalsBuilder.add(traversal); + traversalsCount += 1; + return this; + } + + /** + * Add several traversals to this builder to include in the generated {@link BatchGraphStatement}. + */ + @NonNull + public BatchGraphStatementBuilder addTraversals(@NonNull Iterable traversals) { + for (GraphTraversal traversal : traversals) { + traversalsBuilder.add(traversal); + traversalsCount += 1; + } + return this; + } + + /** + * Add several traversals to this builder to include in the generated {@link BatchGraphStatement}. + */ + @NonNull + public BatchGraphStatementBuilder addTraversals(@NonNull GraphTraversal... traversals) { + for (GraphTraversal traversal : traversals) { + traversalsBuilder.add(traversal); + traversalsCount += 1; + } + return this; + } + + /** Clears all the traversals previously added to this builder. */ + @NonNull + public BatchGraphStatementBuilder clearTraversals() { + traversalsBuilder = ImmutableList.builder(); + traversalsCount = 0; + return this; + } + + /** Returns the number of traversals added to this statement so far. */ + public int getTraversalsCount() { + return traversalsCount; + } + + @NonNull + @Override + public BatchGraphStatement build() { + return new DefaultBatchGraphStatement( + traversalsBuilder.build(), + isIdempotent, + timeout, + node, + timestamp, + executionProfile, + executionProfileName, + buildCustomPayload(), + graphName, + traversalSource, + subProtocol, + consistencyLevel, + readConsistencyLevel, + writeConsistencyLevel); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/graph/DseGraph.java b/core/src/main/java/com/datastax/dse/driver/api/core/graph/DseGraph.java new file mode 100644 index 00000000000..206121c85fb --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/graph/DseGraph.java @@ -0,0 +1,51 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph; + +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.dse.driver.internal.core.graph.DefaultDseRemoteConnectionBuilder; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; +import org.apache.tinkerpop.gremlin.structure.util.empty.EmptyGraph; + +/** + * General purpose utility class for interaction with DSE Graph via the DataStax Enterprise Java + * driver. + */ +public class DseGraph { + + /** + * A general-purpose shortcut for a non-connected TinkerPop {@link GraphTraversalSource} + * based on an immutable empty graph. This is really just a shortcut to {@code + * EmptyGraph.instance().traversal();}. + * + *

      Can be used to create {@link FluentGraphStatement} instances (recommended), or can be + * configured to be remotely connected to DSE Graph using the {@link #remoteConnectionBuilder} + * method. + * + *

      For ease of use you may statically import this variable. + * + *

      Calling {@code g.getGraph()} will return a local immutable empty graph which is in no way + * connected to the DSE Graph server, it will not allow to modify a DSE Graph directly. To act on + * data stored in DSE Graph you must use {@linkplain + * org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal traversal}s such as + * {@code DseGraph.g.V()}, {@code DseGraph.g.addV/addE()}. + */ + public static final GraphTraversalSource g = EmptyGraph.instance().traversal(); + + /** + * Returns a builder helper class to help create {@link + * org.apache.tinkerpop.gremlin.process.remote.RemoteConnection} implementations that seamlessly + * connect to DSE Graph using the {@link DseSession} in parameter. + */ + public static DseGraphRemoteConnectionBuilder remoteConnectionBuilder(DseSession dseSession) { + return new DefaultDseRemoteConnectionBuilder(dseSession); + } + + private DseGraph() { + // nothing to do + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/graph/DseGraphRemoteConnectionBuilder.java b/core/src/main/java/com/datastax/dse/driver/api/core/graph/DseGraphRemoteConnectionBuilder.java new file mode 100644 index 00000000000..112663e1bb8 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/graph/DseGraphRemoteConnectionBuilder.java @@ -0,0 +1,50 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph; + +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import org.apache.tinkerpop.gremlin.process.remote.RemoteConnection; + +/** + * A builder helper to create a {@link RemoteConnection} that will be used to build + * implicitly-executing fluent traversals. + * + *

      To create an instance of this, use the {@link DseGraph#remoteConnectionBuilder(DseSession)} + * method: + * + *

      {@code
      + * DseSession dseSession = DseSession.builder().build();
      + * GraphTraversalSource g = DseGraph.g.withRemote(DseGraph.remoteConnectionBuilder(dseSession).build());
      + * List vertices = g.V().hasLabel("person").toList();
      + * }
      + * + * @see DseSession + */ +public interface DseGraphRemoteConnectionBuilder { + + /** Build the remote connection that was configured with this builder. */ + RemoteConnection build(); + + /** + * Set a configuration profile that will be used for every traversal built using the remote + * connection. + * + *

      For the list of options available for Graph requests, see the {@code reference.conf} + * configuration file. + */ + DseGraphRemoteConnectionBuilder withExecutionProfile(DriverExecutionProfile executionProfile); + + /** + * Set the name of an execution profile that will be used for every traversal using from the + * remote connection. Named profiles are pre-defined in the driver configuration. + * + *

      For the list of options available for Graph requests, see the {@code reference.conf} + * configuration file. + */ + DseGraphRemoteConnectionBuilder withExecutionProfileName(String executionProfileName); +} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.java b/core/src/main/java/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.java new file mode 100644 index 00000000000..b9f8aaea362 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.java @@ -0,0 +1,81 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph; + +import com.datastax.dse.driver.internal.core.graph.DefaultFluentGraphStatement; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Collections; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; + +/** + * A graph statement that uses a TinkerPop {@link GraphTraversal} as the query. + * + *

      Typically used like so: + * + *

      {@code
      + * import static com.datastax.dse.driver.api.core.graph.DseGraph.g;
      + *
      + * FluentGraphStatement statement = FluentGraphStatement.newInstance(g.V().has("name", "marko"));
      + *
      + * GraphResultSet graphResultSet = dseSession.execute(statement);
      + * }
      + * + * @see DseGraph#g + */ +public interface FluentGraphStatement extends GraphStatement { + + /** + * Create a new instance from the given traversal. + * + *

      Use {@link #builder(GraphTraversal)} if you want to set more options before building the + * final statement instance. + */ + @NonNull + static FluentGraphStatement newInstance(@NonNull GraphTraversal traversal) { + return new DefaultFluentGraphStatement( + traversal, + null, + null, + null, + Long.MIN_VALUE, + null, + null, + Collections.emptyMap(), + null, + null, + null, + null, + null, + null); + } + + /** + * Create a builder object to start creating a new instance from the given traversal. + * + *

      Note that this builder is mutable and not thread-safe. + */ + @NonNull + static FluentGraphStatementBuilder builder(@NonNull GraphTraversal traversal) { + return new FluentGraphStatementBuilder(traversal); + } + + /** + * Create a builder helper object to start creating a new instance with an existing statement as a + * template. The traversal and options set on the template will be copied for the new statement at + * the moment this method is called. + * + *

      Note that this builder is mutable and not thread-safe. + */ + @NonNull + static FluentGraphStatementBuilder builder(@NonNull FluentGraphStatement template) { + return new FluentGraphStatementBuilder(template); + } + + /** The underlying TinkerPop object representing the traversal executed by this statement. */ + @NonNull + GraphTraversal getTraversal(); +} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/graph/FluentGraphStatementBuilder.java b/core/src/main/java/com/datastax/dse/driver/api/core/graph/FluentGraphStatementBuilder.java new file mode 100644 index 00000000000..a0220607adc --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/graph/FluentGraphStatementBuilder.java @@ -0,0 +1,53 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph; + +import com.datastax.dse.driver.internal.core.graph.DefaultFluentGraphStatement; +import edu.umd.cs.findbugs.annotations.NonNull; +import net.jcip.annotations.NotThreadSafe; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; + +/** + * A builder to create a fluent graph statement. + * + *

      This class is mutable and not thread-safe. + */ +@NotThreadSafe +public class FluentGraphStatementBuilder + extends GraphStatementBuilderBase { + + private GraphTraversal traversal; + + public FluentGraphStatementBuilder(@NonNull GraphTraversal traversal) { + this.traversal = traversal; + } + + public FluentGraphStatementBuilder(@NonNull FluentGraphStatement template) { + super(template); + this.traversal = template.getTraversal(); + } + + @NonNull + @Override + public FluentGraphStatement build() { + return new DefaultFluentGraphStatement( + this.traversal, + isIdempotent, + timeout, + node, + timestamp, + executionProfile, + executionProfileName, + buildCustomPayload(), + graphName, + traversalSource, + subProtocol, + consistencyLevel, + readConsistencyLevel, + writeConsistencyLevel); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphExecutionInfo.java b/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphExecutionInfo.java new file mode 100644 index 00000000000..21483be54f9 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphExecutionInfo.java @@ -0,0 +1,76 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph; + +import com.datastax.oss.driver.api.core.DefaultProtocolVersion; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.specex.SpeculativeExecutionPolicy; +import java.nio.ByteBuffer; +import java.util.List; +import java.util.Map; + +/** Information about the execution of a graph statement. */ +public interface GraphExecutionInfo { + + /** The statement that was executed. */ + GraphStatement getStatement(); + + /** The node that was used as a coordinator to successfully complete the query. */ + Node getCoordinator(); + + /** + * The number of speculative executions that were started for this query. + * + *

      This does not include the initial, normal execution of the query. Therefore, if speculative + * executions are disabled, this will always be 0. If they are enabled and one speculative + * execution was triggered in addition to the initial execution, this will be 1, etc. + * + * @see SpeculativeExecutionPolicy + */ + int getSpeculativeExecutionCount(); + + /** + * The index of the execution that completed this query. + * + *

      0 represents the initial, normal execution of the query, 1 the first speculative execution, + * etc. + * + * @see SpeculativeExecutionPolicy + */ + int getSuccessfulExecutionIndex(); + + /** + * The errors encountered on previous coordinators, if any. + * + *

      The list is in chronological order, based on the time that the driver processed the error + * responses. If speculative executions are enabled, they run concurrently so their errors will be + * interleaved. A node can appear multiple times (if the retry policy decided to retry on the same + * node). + */ + List> getErrors(); + + /** + * The server-side warnings for this query, if any (otherwise the list will be empty). + * + *

      This feature is only available with {@link DefaultProtocolVersion#V4} or above; with lower + * versions, this list will always be empty. + */ + List getWarnings(); + + /** + * The custom payload sent back by the server with the response, if any (otherwise the map will be + * empty). + * + *

      This method returns a read-only view of the original map, but its values remain inherently + * mutable. If multiple clients will read these values, care should be taken not to corrupt the + * data (in particular, preserve the indices by calling {@link ByteBuffer#duplicate()}). + * + *

      This feature is only available with {@link DefaultProtocolVersion#V4} or above; with lower + * versions, this map will always be empty. + */ + Map getIncomingPayload(); +} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphNode.java b/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphNode.java new file mode 100644 index 00000000000..ceec6cc31b3 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphNode.java @@ -0,0 +1,219 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph; + +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import java.util.List; +import java.util.Map; +import java.util.Set; +import org.apache.tinkerpop.gremlin.process.traversal.Path; +import org.apache.tinkerpop.gremlin.structure.Edge; +import org.apache.tinkerpop.gremlin.structure.Property; +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.apache.tinkerpop.gremlin.structure.VertexProperty; + +/** + * A node in a tree-like structure representing a Graph or a Graph component. + * + *

      It can be: + * + *

        + *
      • a scalar value of a primitive type (boolean, string, int, long, double); + *
      • a graph element (vertex, edge, path or property); + *
      • a list of nodes; + *
      • a set of nodes; + *
      • a map of nodes. + *
      + * + * This interface provides test methods to find out what a node represents, and conversion methods + * to cast it to a particular Java type. Two generic methods {@link #as(Class)} and {@link + * #as(GenericType)} can produce any arbitrary Java type, provided that the underlying serialization + * runtime has been correctly configured to support the requested conversion. + */ +public interface GraphNode { + + /** Whether this node represents a {@code null} value. */ + boolean isNull(); + + /** + * Returns {@code true} if this node is a {@link Map}, and {@code false} otherwise. + * + *

      If this method returns {@code true}, you can convert this node with {@link #asMap()}, or use + * {@link #keys()} and {@link #getByKey(Object)} to access the individual fields (note that + * entries are not ordered, so {@link #getByIndex(int)} does not work). + */ + boolean isMap(); + + /** The keys of this map node, or an empty iterator if it is not a map. */ + Iterable keys(); + + /** + * Returns the value for the given key as a node. + * + *

      If this node is not a map, or does not contain the specified key, {@code null} is returned. + * + *

      If the property value has been explicitly set to {@code null}, implementors may return a + * special "null node" instead of {@code null}. + */ + GraphNode getByKey(Object key); + + /** Deserializes and returns this node as a {@link Map}. */ + Map asMap(); + + /** + * Returns {@code true} if this node is a {@link List}, and {@code false} otherwise. + * + *

      If this method returns {@code true}, you can convert this node with {@link #asList()}, or + * use {@link #size()} and {@link #getByIndex(int)} to access the individual fields. + */ + boolean isList(); + + /** The size of the current node, if it is a list or map, or {@code 0} otherwise. */ + int size(); + + /** + * Returns the element at the given index as a node. + * + *

      If this node is not a list, or {@code index} is out of bounds (i.e. less than zero or {@code + * >= size()}, {@code null} is returned; no exception will be thrown. + * + *

      If the requested element has been explicitly set to {@code null}, implementors may return a + * special "null node" instead of {@code null}. + */ + GraphNode getByIndex(int index); + + /** Deserializes and returns this node as a {@link List}. */ + List asList(); + + /** + * Returns {@code true} if this node is a simple scalar value, (i.e., string, boolean or number), + * and {@code false} otherwise. + * + *

      If this method returns {@code true}, you can convert this node with {@link #asString()}, + * {@link #asBoolean()}, {@link #asInt()}, {@link #asLong()} or {@link #asDouble()}. + */ + boolean isValue(); + + /** + * This node as an integer, or {@code 0} if it can't be converted. + * + *

      This method never throws; even incompatible objects (including {@code null} and structured + * types like objects and arrays) are converted to the default value. + */ + int asInt(); + + /** + * This node as a boolean, or {@code false} if it can't be converted. + * + *

      This method never throws; even incompatible objects (including {@code null} and structured + * types like objects and arrays) are converted to the default value. + */ + boolean asBoolean(); + + /** + * Returns this node as a long integer, or {@code 0L} if it can't be converted. + * + *

      This method never throws; even incompatible objects (including {@code null} and structured + * types like objects and arrays) are converted to the default value. + */ + long asLong(); + + /** + * Returns this node as a long integer, or {@code 0.0D} if it can't be converted. + * + *

      This method never throws; even incompatible objects (including {@code null} and structured + * types like objects and arrays) are converted to the default value. + */ + double asDouble(); + + /** + * A valid string representation of this node, if the node is a simple node (i.e. it is not an + * object nor an array), otherwise an empty string. + * + *

      This method never throws; even incompatible objects (including {@code null} and structured + * types like objects and arrays) are converted to the default value. + */ + String asString(); + + /** + * Deserializes and returns this node as an instance of {@code clazz}. + * + *

      Before attempting such a conversion, there must be an appropriate converter configured on + * the underlying serialization runtime. + */ + ResultT as(Class clazz); + + /** + * Deserializes and returns this node as an instance of the given {@link GenericType type}. + * + *

      Before attempting such a conversion, there must be an appropriate converter configured on + * the underlying serialization runtime. + */ + ResultT as(GenericType type); + + /** + * Returns {@code true} if this node is a {@link Vertex}, and {@code false} otherwise. + * + *

      If this method returns {@code true}, then {@link #asVertex()} can be safely called. + */ + boolean isVertex(); + + /** Returns this node as a Tinkerpop {@link Vertex}. */ + Vertex asVertex(); + + /** + * Returns {@code true} if this node is a {@link Edge}, and {@code false} otherwise. + * + *

      If this method returns {@code true}, then {@link #asEdge()} can be safely called. + */ + boolean isEdge(); + + /** Returns this node as a Tinkerpop {@link Edge}. */ + Edge asEdge(); + + /** + * Returns {@code true} if this node is a {@link Path}, and {@code false} otherwise. + * + *

      If this method returns {@code true}, then {@link #asPath()} can be safely called. + */ + boolean isPath(); + + /** Returns this node as a Tinkerpop {@link Path}. */ + Path asPath(); + + /** + * Returns {@code true} if this node is a {@link Property}, and {@code false} otherwise. + * + *

      If this method returns {@code true}, then {@link #asProperty()} can be safely called. + */ + boolean isProperty(); + + /** Returns this node as a Tinkerpop {@link Property}. */ + Property asProperty(); + + /** + * Returns {@code true} if this node is a {@link VertexProperty}, and {@code false} otherwise. + * + *

      If this method returns {@code true}, then {@link #asVertexProperty()} ()} can be safely + * called. + */ + boolean isVertexProperty(); + + /** Returns this node as a Tinkerpop {@link VertexProperty}. */ + VertexProperty asVertexProperty(); + + /** + * Returns {@code true} if this node is a {@link Set}, and {@code false} otherwise. + * + *

      If this method returns {@code true}, you can convert this node with {@link #asSet()}, or use + * {@link #size()}. + */ + boolean isSet(); + + /** Deserializes and returns this node as a {@link Set}. */ + Set asSet(); +} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphResultSet.java b/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphResultSet.java new file mode 100644 index 00000000000..1c809725cda --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphResultSet.java @@ -0,0 +1,73 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph; + +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; + +/** + * The result of a synchronous Graph query. + * + *

      This object is a container for {@link GraphNode} objects that will contain the data returned + * by Graph queries. + * + *

      Note that this object can only be iterated once: items are "consumed" as they are read, + * subsequent calls to {@code iterator()} will return the same iterator instance. + * + *

      The default implementation returned by the driver is not thread-safe. It can only be + * iterated by the thread that invoked {@code dseSession.execute}. + * + * @see GraphNode + * @see GraphSession#execute(GraphStatement) + */ +public interface GraphResultSet extends Iterable { + + /** + * Returns the next node, or {@code null} if the result set is exhausted. + * + *

      This is convenient for queries that are known to return exactly one row, for example count + * queries. + */ + @Nullable + default GraphNode one() { + Iterator graphNodeIterator = iterator(); + return graphNodeIterator.hasNext() ? graphNodeIterator.next() : null; + } + + /** + * Returns all the remaining nodes as a list; not recommended for paginated queries that return + * a large number of nodes. + * + *

      At this time (DSE 6.0.0), graph queries are not paginated and the server sends all the + * results at once. + */ + @NonNull + default List all() { + if (!iterator().hasNext()) { + return Collections.emptyList(); + } + return ImmutableList.copyOf(this); + } + + /** + * Cancels the query and asks the server to stop sending results. + * + *

      At this time (DSE 6.0.0), graph queries are not paginated and the server sends all the + * results at once; therefore this method has no effect. + */ + void cancel(); + + /** + * The execution information for the query that have been performed to assemble this result set. + */ + @NonNull + GraphExecutionInfo getExecutionInfo(); +} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphSession.java b/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphSession.java new file mode 100644 index 00000000000..201ae095b63 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphSession.java @@ -0,0 +1,66 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph; + +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.oss.driver.api.core.session.Session; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Objects; +import java.util.concurrent.CompletionStage; + +/** + * A session that has the ability to execute DSE Graph requests. + * + *

      Generally this interface won't be referenced directly in an application; instead, you should + * use {@link DseSession}, which is a combination of this interface and many others for a more + * integrated usage of DataStax Enterprise's multi-model database via a single entry point. However, + * it is still possible to cast a {@code DseSession} to a {@code GraphSession} to only expose the + * DSE Graph execution methods. + */ +public interface GraphSession extends Session { + + /** + * Executes a graph statement synchronously (the calling thread blocks until the result becomes + * available). + * + *

      The driver provides different kinds of graph statements: + * + *

        + *
      • {@link FluentGraphStatement} (recommended): wraps a fluent TinkerPop {@linkplain + * org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal traversal}; + *
      • {@link BatchGraphStatement}: groups together multiple mutating traversals ({@code + * g.addV()/g.addE()}) inside a single transaction and avoids multiple client-server + * round-trips. Improves performance in data ingestion scenarios; + *
      • {@link ScriptGraphStatement}: wraps a Gremlin-groovy script provided as a plain Java + * string. Required for administrative queries such as creating/dropping a graph, + * configuration and schema. + *
      + * + * @see GraphResultSet + */ + @NonNull + default GraphResultSet execute(@NonNull GraphStatement graphStatement) { + return Objects.requireNonNull( + execute(graphStatement, GraphStatement.SYNC), + "The graph processor should never return a null result"); + } + + /** + * Executes a graph statement asynchronously (the call returns as soon as the statement was sent, + * generally before the result is available). + * + * @see #execute(GraphStatement) + * @see AsyncGraphResultSet + */ + @NonNull + default CompletionStage executeAsync( + @NonNull GraphStatement graphStatement) { + return Objects.requireNonNull( + execute(graphStatement, GraphStatement.ASYNC), + "The graph processor should never return a null result"); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphStatement.java b/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphStatement.java new file mode 100644 index 00000000000..87f1d5f4664 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphStatement.java @@ -0,0 +1,367 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph; + +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.oss.driver.api.core.ConsistencyLevel; +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.NoNodeAvailableException; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.loadbalancing.LoadBalancingPolicy; +import com.datastax.oss.driver.api.core.loadbalancing.NodeDistance; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.metadata.token.Token; +import com.datastax.oss.driver.api.core.session.Request; +import com.datastax.oss.driver.api.core.session.Session; +import com.datastax.oss.driver.api.core.specex.SpeculativeExecutionPolicy; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import edu.umd.cs.findbugs.annotations.CheckReturnValue; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.nio.ByteBuffer; +import java.time.Duration; +import java.util.Map; +import java.util.concurrent.CompletionStage; + +/** + * A request to execute a DSE Graph query. + * + * @param the "self type" used for covariant returns in subtypes. + */ +public interface GraphStatement> extends Request { + + /** + * The type returned when a graph statement is executed synchronously. + * + *

      Most users won't use this explicitly. It is needed for the generic execute method ({@link + * Session#execute(Request, GenericType)}), but graph statements will generally be run with one of + * the DSE driver's built-in helper methods (such as {@link DseSession#execute(GraphStatement)}). + */ + GenericType SYNC = GenericType.of(GraphResultSet.class); + + /** + * The type returned when a graph statement is executed asynchronously. + * + *

      Most users won't use this explicitly. It is needed for the generic execute method ({@link + * Session#execute(Request, GenericType)}), but graph statements will generally be run with one of + * the DSE driver's built-in helper methods (such as {@link + * DseSession#executeAsync(GraphStatement)}). + */ + GenericType> ASYNC = + new GenericType>() {}; + + /** + * Set the idempotence to use for execution. + * + *

      Idempotence defines whether it will be possible to speculatively re-execute the statement, + * based on a {@link SpeculativeExecutionPolicy}. + * + *

      All the driver's built-in implementations are immutable, and return a new instance from this + * method. However custom implementations may choose to be mutable and return the same instance. + * + * @param idempotent a boolean instance to set a statement-specific value, or {@code null} to use + * the default idempotence defined in the configuration. + */ + @NonNull + @CheckReturnValue + SelfT setIdempotent(@Nullable Boolean idempotent); + + /** + * {@inheritDoc} + * + *

      Note that, if this method returns {@code null}, graph statements fall back to a dedicated + * configuration option: {@code basic.graph.timeout}. See {@code reference.conf} in the DSE driver + * distribution for more details. + */ + @Nullable + @Override + Duration getTimeout(); + + /** + * Sets how long to wait for this request to complete. This is a global limit on the duration of a + * session.execute() call, including any retries the driver might do. + * + *

      All the driver's built-in implementations are immutable, and return a new instance from this + * method. However custom implementations may choose to be mutable and return the same instance. + * + * @param newTimeout the timeout to use, or {@code null} to use the default value defined in the + * configuration. + * @see #getTimeout() + */ + @NonNull + @CheckReturnValue + SelfT setTimeout(@Nullable Duration newTimeout); + + /** + * Sets the {@link Node} that should handle this query. + * + *

      In the general case, use of this method is heavily discouraged and should only be + * used in specific cases, such as applying a series of schema changes, which may be advantageous + * to execute in sequence on the same node. + * + *

      Configuring a specific node causes the configured {@link LoadBalancingPolicy} to be + * completely bypassed. However, if the load balancing policy dictates that the node is at + * distance {@link NodeDistance#IGNORED} or there is no active connectivity to the node, the + * request will fail with a {@link NoNodeAvailableException}. + * + *

      All the driver's built-in implementations are immutable, and return a new instance from this + * method. However custom implementations may choose to be mutable and return the same instance. + * + * @param newNode The node that should be used to handle executions of this statement or null to + * delegate to the configured load balancing policy. + */ + @NonNull + @CheckReturnValue + SelfT setNode(@Nullable Node newNode); + + /** + * Get the timestamp set on the statement. + * + *

      By default, if left unset, the value returned by this is {@code Long.MIN_VALUE}, which means + * that the timestamp will be set via the Timestamp Generator. + * + * @return the timestamp set on this statement. + */ + long getTimestamp(); + + /** + * Set the timestamp to use for execution. + * + *

      By default the timestamp generator (see reference config file) will be used for timestamps, + * unless set explicitly via this method. + * + *

      All the driver's built-in implementations are immutable, and return a new instance from this + * method. However custom implementations may choose to be mutable and return the same instance. + */ + @CheckReturnValue + SelfT setTimestamp(long timestamp); + + /** + * Sets the configuration profile to use for execution. + * + *

      All the driver's built-in implementations are immutable, and return a new instance from this + * method. However custom implementations may choose to be mutable and return the same instance. + */ + @NonNull + @CheckReturnValue + SelfT setExecutionProfile(@Nullable DriverExecutionProfile executionProfile); + + /** + * Sets the name of the driver configuration profile that will be used for execution. + * + *

      For all the driver's built-in implementations, this method has no effect if {@link + * #setExecutionProfile} has been called with a non-null argument. + * + *

      All the driver's built-in implementations are immutable, and return a new instance from this + * method. However custom implementations may choose to be mutable and return the same instance. + */ + @NonNull + @CheckReturnValue + SelfT setExecutionProfileName(@Nullable String name); + + /** + * Sets the custom payload to use for execution. + * + *

      This is intended for advanced use cases, such as tools with very advanced knowledge of DSE + * Graph, and reserved for internal settings like transaction settings. Note that the driver also + * adds graph-related options to the payload, in addition to the ones provided here; it won't + * override any option that is already present. + * + *

      All the driver's built-in statement implementations are immutable, and return a new instance + * from this method. However custom implementations may choose to be mutable and return the same + * instance. + * + *

      Note that it's your responsibility to provide a thread-safe map. This can be achieved with a + * concurrent or immutable implementation, or by making it effectively immutable (meaning that + * it's never modified after being set on the statement). + * + *

      All the driver's built-in implementations are immutable, and return a new instance from this + * method. However custom implementations may choose to be mutable and return the same instance. + */ + @NonNull + @CheckReturnValue + SelfT setCustomPayload(@NonNull Map newCustomPayload); + + /** + * The name of the graph to use for this statement. + * + *

      This is the programmatic equivalent of the configuration option {@code basic.graph.name}, + * and takes precedence over it. That is, if this property is non-null, then the configuration + * will be ignored. + */ + @Nullable + String getGraphName(); + + /** + * Sets the graph name. + * + *

      All the driver's built-in implementations are immutable, and return a new instance from this + * method. However custom implementations may choose to be mutable and return the same instance. + * + * @see #getGraphName() + */ + @NonNull + @CheckReturnValue + SelfT setGraphName(@Nullable String newGraphName); + + /** + * The name of the traversal source to use for this statement. + * + *

      This is the programmatic equivalent of the configuration option {@code + * basic.graph.traversal-source}, and takes precedence over it. That is, if this property is + * non-null, then the configuration will be ignored. + */ + @Nullable + String getTraversalSource(); + + /** + * Sets the traversal source. + * + *

      All the driver's built-in implementations are immutable, and return a new instance from this + * method. However custom implementations may choose to be mutable and return the same instance. + * + * @see #getTraversalSource() + */ + @NonNull + @CheckReturnValue + SelfT setTraversalSource(@Nullable String newTraversalSource); + + /** + * The DSE graph sub-protocol to use for this statement. + * + *

      This is the programmatic equivalent of the configuration option {@code + * advanced.graph.sub-protocol}, and takes precedence over it. That is, if this property is + * non-null, then the configuration will be ignored. + */ + @Nullable + String getSubProtocol(); + + /** + * Sets the sub-protocol. + * + *

      All the driver's built-in implementations are immutable, and return a new instance from this + * method. However custom implementations may choose to be mutable and return the same instance. + * + * @see #getSubProtocol() + */ + @NonNull + @CheckReturnValue + SelfT setSubProtocol(@Nullable String newSubProtocol); + + /** + * Returns the consistency level to use for the statement. + * + *

      This is the programmatic equivalent of the configuration option {@code + * basic.request.consistency}, and takes precedence over it. That is, if this property is + * non-null, then the configuration will be ignored. + */ + @Nullable + ConsistencyLevel getConsistencyLevel(); + + /** + * Sets the consistency level to use for this statement. + * + *

      All the driver's built-in implementations are immutable, and return a new instance from this + * method. However custom implementations may choose to be mutable and return the same instance. + * + * @param newConsistencyLevel the consistency level to use, or null to use the default value + * defined in the configuration. + * @see #getConsistencyLevel() + */ + @CheckReturnValue + SelfT setConsistencyLevel(@Nullable ConsistencyLevel newConsistencyLevel); + + /** + * The consistency level to use for the internal read queries that will be produced by this + * statement. + * + *

      This is the programmatic equivalent of the configuration option {@code + * basic.graph.read-consistency-level}, and takes precedence over it. That is, if this property is + * non-null, then the configuration will be ignored. + * + *

      If this property isn't set here or in the configuration, the default consistency level will + * be used ({@link #getConsistencyLevel()} or {@code basic.request.consistency}). + */ + @Nullable + ConsistencyLevel getReadConsistencyLevel(); + + /** + * Sets the read consistency level. + * + *

      All the driver's built-in implementations are immutable, and return a new instance from this + * method. However custom implementations may choose to be mutable and return the same instance. + * + * @see #getReadConsistencyLevel() + */ + @NonNull + @CheckReturnValue + SelfT setReadConsistencyLevel(@Nullable ConsistencyLevel newReadConsistencyLevel); + + /** + * The consistency level to use for the internal write queries that will be produced by this + * statement. + * + *

      This is the programmatic equivalent of the configuration option {@code + * basic.graph.write-consistency-level}, and takes precedence over it. That is, if this property + * is non-null, then the configuration will be ignored. + * + *

      If this property isn't set here or in the configuration, the default consistency level will + * be used ({@link #getConsistencyLevel()} or {@code basic.request.consistency}). + */ + @Nullable + ConsistencyLevel getWriteConsistencyLevel(); + + /** + * Sets the write consistency level. + * + *

      All the driver's built-in implementations are immutable, and return a new instance from this + * method. However custom implementations may choose to be mutable and return the same instance. + * + * @see #getWriteConsistencyLevel() + */ + @NonNull + @CheckReturnValue + SelfT setWriteConsistencyLevel(@Nullable ConsistencyLevel newWriteConsistencyLevel); + + /** Graph statements do not have a per-query keyspace, this method always returns {@code null}. */ + @Nullable + @Override + default CqlIdentifier getKeyspace() { + return null; + } + + /** Graph statements can't be routed, this method always returns {@code null}. */ + @Nullable + @Override + default CqlIdentifier getRoutingKeyspace() { + return null; + } + + /** Graph statements can't be routed, this method always returns {@code null}. */ + @Nullable + @Override + default ByteBuffer getRoutingKey() { + return null; + } + + /** Graph statements can't be routed, this method always returns {@code null}. */ + @Nullable + @Override + default Token getRoutingToken() { + return null; + } + + /** + * Whether tracing information should be recorded for this statement. + * + *

      This method is only exposed for future extensibility. At the time of writing, graph + * statements do not support tracing, and this always returns {@code false}. + */ + default boolean isTracing() { + return false; + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphStatementBuilderBase.java b/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphStatementBuilderBase.java new file mode 100644 index 00000000000..1bcdcf7710f --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphStatementBuilderBase.java @@ -0,0 +1,178 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph; + +import com.datastax.oss.driver.api.core.ConsistencyLevel; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.protocol.internal.util.collection.NullAllowingImmutableMap; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.nio.ByteBuffer; +import java.time.Duration; +import java.util.Map; +import net.jcip.annotations.NotThreadSafe; + +@NotThreadSafe +public abstract class GraphStatementBuilderBase< + SelfT extends GraphStatementBuilderBase, + StatementT extends GraphStatement> { + + @SuppressWarnings({"unchecked"}) + private final SelfT self = (SelfT) this; + + protected Boolean isIdempotent; + protected Duration timeout; + protected Node node; + protected long timestamp; + protected DriverExecutionProfile executionProfile; + protected String executionProfileName; + private NullAllowingImmutableMap.Builder customPayloadBuilder; + protected String graphName; + protected String traversalSource; + protected String subProtocol; + protected ConsistencyLevel consistencyLevel; + protected ConsistencyLevel readConsistencyLevel; + protected ConsistencyLevel writeConsistencyLevel; + + protected GraphStatementBuilderBase() { + // nothing to do + } + + protected GraphStatementBuilderBase(StatementT template) { + this.isIdempotent = template.isIdempotent(); + this.timeout = template.getTimeout(); + this.node = template.getNode(); + this.timestamp = template.getTimestamp(); + this.executionProfile = template.getExecutionProfile(); + this.executionProfileName = template.getExecutionProfileName(); + if (!template.getCustomPayload().isEmpty()) { + this.customPayloadBuilder = + NullAllowingImmutableMap.builder() + .putAll(template.getCustomPayload()); + } + this.graphName = template.getGraphName(); + this.traversalSource = template.getTraversalSource(); + this.subProtocol = template.getSubProtocol(); + this.consistencyLevel = template.getConsistencyLevel(); + this.readConsistencyLevel = template.getReadConsistencyLevel(); + this.writeConsistencyLevel = template.getWriteConsistencyLevel(); + } + + /** @see GraphStatement#setIdempotent(Boolean) */ + @NonNull + public SelfT setIdempotence(@Nullable Boolean idempotent) { + this.isIdempotent = idempotent; + return self; + } + + /** @see GraphStatement#setTimeout(Duration) */ + @NonNull + public SelfT setTimeout(@Nullable Duration timeout) { + this.timeout = timeout; + return self; + } + + /** @see GraphStatement#setNode(Node) */ + @NonNull + public SelfT setNode(@Nullable Node node) { + this.node = node; + return self; + } + + /** @see GraphStatement#setTimestamp(long) */ + @NonNull + public SelfT setTimestamp(long timestamp) { + this.timestamp = timestamp; + return self; + } + + /** @see GraphStatement#setExecutionProfileName(String) */ + @NonNull + public SelfT setExecutionProfileName(@Nullable String executionProfileName) { + this.executionProfileName = executionProfileName; + return self; + } + + /** @see GraphStatement#setExecutionProfile(DriverExecutionProfile) */ + @NonNull + public SelfT setExecutionProfile(@Nullable DriverExecutionProfile executionProfile) { + this.executionProfile = executionProfile; + this.executionProfileName = null; + return self; + } + + /** @see GraphStatement#setCustomPayload(Map) */ + @NonNull + public SelfT addCustomPayload(@NonNull String key, @Nullable ByteBuffer value) { + if (customPayloadBuilder == null) { + customPayloadBuilder = NullAllowingImmutableMap.builder(); + } + customPayloadBuilder.put(key, value); + return self; + } + + /** @see GraphStatement#setCustomPayload(Map) */ + @NonNull + public SelfT clearCustomPayload() { + customPayloadBuilder = null; + return self; + } + + /** @see GraphStatement#setGraphName(String) */ + @NonNull + public SelfT setGraphName(@Nullable String graphName) { + this.graphName = graphName; + return self; + } + + /** @see GraphStatement#setTraversalSource(String) */ + @NonNull + public SelfT setTraversalSource(@Nullable String traversalSource) { + this.traversalSource = traversalSource; + return self; + } + + /** @see GraphStatement#setSubProtocol(String) */ + @NonNull + public SelfT setSubProtocol(@Nullable String subProtocol) { + this.subProtocol = subProtocol; + return self; + } + + /** @see GraphStatement#setConsistencyLevel(ConsistencyLevel) */ + @NonNull + public SelfT setConsistencyLevel(@Nullable ConsistencyLevel consistencyLevel) { + this.consistencyLevel = consistencyLevel; + return self; + } + + /** @see GraphStatement#setReadConsistencyLevel(ConsistencyLevel) */ + @NonNull + public SelfT setReadConsistencyLevel(@Nullable ConsistencyLevel readConsistencyLevel) { + this.readConsistencyLevel = readConsistencyLevel; + return self; + } + + /** @see GraphStatement#setWriteConsistencyLevel(ConsistencyLevel) */ + @NonNull + public SelfT setWriteConsistencyLevel(@Nullable ConsistencyLevel writeConsistencyLevel) { + this.writeConsistencyLevel = writeConsistencyLevel; + return self; + } + + @NonNull + protected Map buildCustomPayload() { + return (customPayloadBuilder == null) + ? NullAllowingImmutableMap.of() + : customPayloadBuilder.build(); + } + + /** Create the statement with the configuration defined by this builder object. */ + @NonNull + public abstract StatementT build(); +} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.java b/core/src/main/java/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.java new file mode 100644 index 00000000000..f85ddf1b0cd --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.java @@ -0,0 +1,159 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph; + +import com.datastax.dse.driver.internal.core.graph.DefaultScriptGraphStatement; +import com.datastax.oss.protocol.internal.util.collection.NullAllowingImmutableMap; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.Collections; +import java.util.Map; + +/** + * A graph statement that uses a Gremlin-groovy script the query. + * + *

      These statements are generally used for DSE Graph set-up queries, such as creating or dropping + * a graph, or defining a graph schema. For graph traversals, we recommend using {@link + * FluentGraphStatement} instead. To do bulk data ingestion in graph, we recommend using {@link + * BatchGraphStatement} instead. + * + *

      Typical usage: + * + *

      {@code
      + * ScriptGraphStatement statement = ScriptGraphStatement.newInstance("schema.propertyKey('age').Int().create()");
      + *
      + * GraphResultSet graphResultSet = dseSession.execute(statement);
      + * }
      + */ +public interface ScriptGraphStatement extends GraphStatement { + + /** Create a new instance from the given script. */ + @NonNull + static ScriptGraphStatement newInstance(@NonNull String script) { + return new DefaultScriptGraphStatement( + script, + NullAllowingImmutableMap.of(), + null, + null, + null, + null, + Long.MIN_VALUE, + null, + null, + Collections.emptyMap(), + null, + null, + null, + null, + null, + null); + } + + /** + * Create a builder object to start creating a new instance from the given script. + * + *

      Note that this builder is mutable and not thread-safe. + */ + @NonNull + static ScriptGraphStatementBuilder builder(@NonNull String script) { + return new ScriptGraphStatementBuilder(script); + } + + /** + * Create a builder helper object to start creating a new instance with an existing statement as a + * template. The script and options set on the template will be copied for the new statement at + * the moment this method is called. + * + *

      Note that this builder is mutable and not thread-safe. + */ + @NonNull + static ScriptGraphStatementBuilder builder(@NonNull ScriptGraphStatement template) { + return new ScriptGraphStatementBuilder(template); + } + + /** The Gremlin-groovy script representing the graph query. */ + @NonNull + String getScript(); + + /** + * Whether the statement is a system query, or {@code null} if it defaults to the value defined in + * the configuration. + * + * @see #setSystemQuery(Boolean) + */ + @Nullable + Boolean isSystemQuery(); + + /** + * Defines if this statement is a system query. + * + *

      Script statements that access the {@code system} variable must not specify a graph + * name (otherwise {@code system} is not available). However, if your application executes a lot + * of non-system statements, it is convenient to configure the graph name in your configuration to + * avoid repeating it every time. This method allows you to ignore that global graph name for a + * specific statement. + * + *

      This property is the programmatic equivalent of the configuration option {@code + * basic.graph.is-system-query}, and takes precedence over it. That is, if this property is + * non-null, then the configuration will be ignored. + * + *

      The driver's built-in implementation is immutable, and returns a new instance from this + * method. However custom implementations may choose to be mutable and return the same instance. + * + * @param newValue {@code true} to mark this statement as a system query (the driver will ignore + * any graph name set on the statement or the configuration); {@code false} to mark it as a + * non-system query; {@code null} to default to the value defined in the configuration. + * @see #isSystemQuery() + */ + @NonNull + ScriptGraphStatement setSystemQuery(@Nullable Boolean newValue); + + /** + * The query parameters to send along the request. + * + * @see #setQueryParam(String, Object) + */ + @NonNull + Map getQueryParams(); + + /** + * Set a value for a parameter defined in the Groovy script. + * + *

      The script engine in the DSE Graph server allows to define parameters in a Groovy script and + * set the values of these parameters as a binding. Defining parameters allows to re-use scripts + * and only change their parameters values, which improves the performance of the script executed, + * so defining parameters is encouraged; however, for optimal Graph traversal performance, we + * recommend either using {@link BatchGraphStatement}s for data ingestion, or {@link + * FluentGraphStatement} for normal traversals. + * + *

      Parameters in a Groovy script are always named; unlike CQL, they are not prefixed by a + * column ({@code :}). + * + *

      The driver's built-in implementation is immutable, and returns a new instance from this + * method. However custom implementations may choose to be mutable and return the same instance. + * If many parameters are to be set in a query, it is recommended to create the statement with + * {@link #builder(String)} instead. + * + * @param name the name of the parameter defined in the script. If the statement already had a + * binding for this name, it gets replaced. + * @param value the value that will be transmitted with the request. + */ + @NonNull + ScriptGraphStatement setQueryParam(@NonNull String name, @Nullable Object value); + + /** + * Removes a binding for the given name from this statement. + * + *

      If the statement did not have such a binding, this method has no effect and returns the same + * statement instance. Otherwise, the driver's built-in implementation returns a new instance + * (however custom implementations may choose to be mutable and return the same instance). + * + * @see #setQueryParam(String, Object) + */ + @NonNull + ScriptGraphStatement removeQueryParam(@NonNull String name); +} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/graph/ScriptGraphStatementBuilder.java b/core/src/main/java/com/datastax/dse/driver/api/core/graph/ScriptGraphStatementBuilder.java new file mode 100644 index 00000000000..33a5c8efc9e --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/graph/ScriptGraphStatementBuilder.java @@ -0,0 +1,113 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph; + +import com.datastax.dse.driver.internal.core.graph.DefaultScriptGraphStatement; +import com.datastax.oss.driver.shaded.guava.common.collect.Maps; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.Map; +import net.jcip.annotations.NotThreadSafe; + +/** + * A builder to create a script graph statement. + * + *

      This class is mutable and not thread-safe. + */ +@NotThreadSafe +public class ScriptGraphStatementBuilder + extends GraphStatementBuilderBase { + + private String script; + private Boolean isSystemQuery; + private Map queryParams; + + public ScriptGraphStatementBuilder(String script) { + this.script = script; + this.queryParams = Maps.newHashMap(); + } + + public ScriptGraphStatementBuilder(ScriptGraphStatement template) { + super(template); + this.script = template.getScript(); + this.queryParams = Maps.newHashMap(template.getQueryParams()); + this.isSystemQuery = template.isSystemQuery(); + } + + /** @see ScriptGraphStatement#isSystemQuery() */ + @NonNull + public ScriptGraphStatementBuilder setSystemQuery(@Nullable Boolean isSystemQuery) { + this.isSystemQuery = isSystemQuery; + return this; + } + + /** + * Set a value for a parameter defined in the script query. + * + * @see ScriptGraphStatement#setQueryParam(String, Object) + */ + @NonNull + public ScriptGraphStatementBuilder setQueryParam(@NonNull String name, @Nullable Object value) { + this.queryParams.put(name, value); + return this; + } + + /** + * Set multiple values for named parameters defined in the script query. + * + * @see ScriptGraphStatement#setQueryParam(String, Object) + */ + @NonNull + public ScriptGraphStatementBuilder setQueryParams(@NonNull Map params) { + this.queryParams.putAll(params); + return this; + } + + /** + * Removes a parameter. + * + *

      This is useful if the builder was {@linkplain + * ScriptGraphStatement#builder(ScriptGraphStatement) initialized with a template statement} that + * has more parameters than desired. + * + * @see ScriptGraphStatement#setQueryParam(String, Object) + * @see #clearQueryParams() + */ + @NonNull + public ScriptGraphStatementBuilder removeQueryParam(@NonNull String name) { + this.queryParams.remove(name); + return this; + } + + /** Clears all the parameters previously added to this builder. */ + public ScriptGraphStatementBuilder clearQueryParams() { + this.queryParams.clear(); + return this; + } + + @NonNull + @Override + public ScriptGraphStatement build() { + return new DefaultScriptGraphStatement( + this.script, + this.queryParams, + this.isSystemQuery, + isIdempotent, + timeout, + node, + timestamp, + executionProfile, + executionProfileName, + buildCustomPayload(), + graphName, + traversalSource, + subProtocol, + consistencyLevel, + readConsistencyLevel, + writeConsistencyLevel); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/graph/predicates/Geo.java b/core/src/main/java/com/datastax/dse/driver/api/core/graph/predicates/Geo.java new file mode 100644 index 00000000000..0aa1da6df75 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/graph/predicates/Geo.java @@ -0,0 +1,149 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph.predicates; + +import com.datastax.dse.driver.api.core.data.geometry.LineString; +import com.datastax.dse.driver.api.core.data.geometry.Point; +import com.datastax.dse.driver.api.core.data.geometry.Polygon; +import com.datastax.dse.driver.internal.core.data.geometry.Distance; +import com.datastax.dse.driver.internal.core.graph.GeoPredicate; +import com.datastax.dse.driver.internal.core.graph.GeoUtils; +import edu.umd.cs.findbugs.annotations.NonNull; +import org.apache.tinkerpop.gremlin.process.traversal.P; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; + +public interface Geo { + + enum Unit { + MILES(GeoUtils.MILES_TO_KM * GeoUtils.KM_TO_DEG), + KILOMETERS(GeoUtils.KM_TO_DEG), + METERS(GeoUtils.KM_TO_DEG / 1000.0), + DEGREES(1); + + private final double multiplier; + + Unit(double multiplier) { + this.multiplier = multiplier; + } + + /** Convert distance to degrees (used internally only). */ + public double toDegrees(double distance) { + return distance * multiplier; + } + } + + /** + * Finds whether an entity is inside the given circular area using a geo coordinate system. + * + * @return a predicate to apply in a {@link GraphTraversal}. + */ + static P inside(Point center, double radius, Unit units) { + return new P<>(GeoPredicate.inside, new Distance(center, units.toDegrees(radius))); + } + + /** + * Finds whether an entity is inside the given circular area using a cartesian coordinate system. + * + * @return a predicate to apply in a {@link GraphTraversal}. + */ + static P inside(Point center, double radius) { + return new P<>(GeoPredicate.insideCartesian, new Distance(center, radius)); + } + + /** + * Finds whether an entity is inside the given polygon. + * + * @return a predicate to apply in a {@link GraphTraversal}. + */ + static P inside(Polygon polygon) { + return new P<>(GeoPredicate.insideCartesian, polygon); + } + + /** + * Creates a point from the given coordinates. + * + *

      This is just a shortcut to {@link Point#fromCoordinates(double, double)}. It is duplicated + * here so that {@code Geo} can be used as a single entry point in Gremlin-groovy scripts. + */ + @NonNull + static Point point(double x, double y) { + return Point.fromCoordinates(x, y); + } + + /** + * Creates a line string from the given (at least 2) points. + * + *

      This is just a shortcut to {@link LineString#fromPoints(Point, Point, Point...)}. It is + * duplicated here so that {@code Geo} can be used as a single entry point in Gremlin-groovy + * scripts. + */ + @NonNull + static LineString lineString( + @NonNull Point point1, @NonNull Point point2, @NonNull Point... otherPoints) { + return LineString.fromPoints(point1, point2, otherPoints); + } + + /** + * Creates a line string from the coordinates of its points. + * + *

      This is provided for backward compatibility with previous DSE versions. We recommend {@link + * #lineString(Point, Point, Point...)} instead. + */ + @NonNull + static LineString lineString(double... coordinates) { + if (coordinates.length % 2 != 0) { + throw new IllegalArgumentException("lineString() must be passed an even number of arguments"); + } else if (coordinates.length < 4) { + throw new IllegalArgumentException( + "lineString() must be passed at least 4 arguments (2 points)"); + } + Point point1 = Point.fromCoordinates(coordinates[0], coordinates[1]); + Point point2 = Point.fromCoordinates(coordinates[2], coordinates[3]); + Point[] otherPoints = new Point[coordinates.length / 2 - 2]; + for (int i = 4; i < coordinates.length; i += 2) { + otherPoints[i / 2 - 2] = Point.fromCoordinates(coordinates[i], coordinates[i + 1]); + } + return LineString.fromPoints(point1, point2, otherPoints); + } + + /** + * Creates a polygon from the given (at least 3) points. + * + *

      This is just a shortcut to {@link Polygon#fromPoints(Point, Point, Point, Point...)}. It is + * duplicated here so that {@code Geo} can be used as a single entry point in Gremlin-groovy + * scripts. + */ + @NonNull + static Polygon polygon( + @NonNull Point p1, @NonNull Point p2, @NonNull Point p3, @NonNull Point... otherPoints) { + return Polygon.fromPoints(p1, p2, p3, otherPoints); + } + + /** + * Creates a polygon from the coordinates of its points. + * + *

      This is provided for backward compatibility with previous DSE versions. We recommend {@link + * #polygon(Point, Point, Point, Point...)} instead. + */ + @NonNull + static Polygon polygon(double... coordinates) { + if (coordinates.length % 2 != 0) { + throw new IllegalArgumentException("polygon() must be passed an even number of arguments"); + } else if (coordinates.length < 6) { + throw new IllegalArgumentException( + "polygon() must be passed at least 6 arguments (3 points)"); + } + Point point1 = Point.fromCoordinates(coordinates[0], coordinates[1]); + Point point2 = Point.fromCoordinates(coordinates[2], coordinates[3]); + Point point3 = Point.fromCoordinates(coordinates[4], coordinates[5]); + Point[] otherPoints = new Point[coordinates.length / 2 - 3]; + for (int i = 6; i < coordinates.length; i += 2) { + otherPoints[i / 2 - 3] = Point.fromCoordinates(coordinates[i], coordinates[i + 1]); + } + return Polygon.fromPoints(point1, point2, point3, otherPoints); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/graph/predicates/Search.java b/core/src/main/java/com/datastax/dse/driver/api/core/graph/predicates/Search.java new file mode 100644 index 00000000000..5b2872ca505 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/graph/predicates/Search.java @@ -0,0 +1,125 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph.predicates; + +import com.datastax.dse.driver.internal.core.graph.EditDistance; +import com.datastax.dse.driver.internal.core.graph.SearchPredicate; +import org.apache.tinkerpop.gremlin.process.traversal.P; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; + +public interface Search { + + /** + * Search any instance of a certain token within the text property targeted (case insensitive). + * + * @return a predicate to apply in a {@link GraphTraversal}. + */ + static P token(String value) { + return new P<>(SearchPredicate.token, value); + } + + /** + * Search any instance of a certain token prefix within the text property targeted (case + * insensitive). + * + * @return a predicate to apply in a {@link GraphTraversal}. + */ + static P tokenPrefix(String value) { + return new P<>(SearchPredicate.tokenPrefix, value); + } + + /** + * Search any instance of the provided regular expression for the targeted property (case + * insensitive). + * + * @return a predicate to apply in a {@link GraphTraversal}. + */ + static P tokenRegex(String value) { + return new P<>(SearchPredicate.tokenRegex, value); + } + + /** + * Search for a specific prefix at the beginning of the text property targeted (case sensitive). + * + * @return a predicate to apply in a {@link GraphTraversal}. + */ + static P prefix(String value) { + return new P<>(SearchPredicate.prefix, value); + } + + /** + * Search for this regular expression inside the text property targeted (case sensitive). + * + * @return a predicate to apply in a {@link GraphTraversal}. + */ + static P regex(String value) { + return new P<>(SearchPredicate.regex, value); + } + + /** + * Supports finding words which are a within a specific distance away (case insensitive). + * + *

      Example: the search expression is {@code phrase("Hello world", 2)} + * + *

        + *
      • the inserted value "Hello world" is found + *
      • the inserted value "Hello wild world" is found + *
      • the inserted value "Hello big wild world" is found + *
      • the inserted value "Hello the big wild world" is not found + *
      • the inserted value "Goodbye world" is not found. + *
      + * + * @param query the string to look for in the value + * @param distance the number of terms allowed between two correct terms to find a value. + * @return a predicate to apply in a {@link GraphTraversal}. + */ + static P phrase(String query, int distance) { + return new P<>(SearchPredicate.phrase, new EditDistance(query, distance)); + } + + /** + * Supports fuzzy searches based on the Damerau-Levenshtein Distance, or Edit Distance algorithm + * (case sensitive). + * + *

      Example: the search expression is {@code fuzzy("david", 1)} + * + *

        + *
      • the inserted value "david" is found + *
      • the inserted value "dawid" is found + *
      • the inserted value "davids" is found + *
      • the inserted value "dewid" is not found + *
      + * + * @param query the string to look for in the value + * @param distance the number of "uncertainties" allowed for the Levenshtein algorithm. + * @return a predicate to apply in a {@link GraphTraversal}. + */ + static P fuzzy(String query, int distance) { + return new P<>(SearchPredicate.fuzzy, new EditDistance(query, distance)); + } + + /** + * Supports fuzzy searches based on the Damerau-Levenshtein Distance, or Edit Distance algorithm + * after having tokenized the data stored (case insensitive). + * + *

      Example: the search expression is {@code tokenFuzzy("david", 1)} + * + *

        + *
      • the inserted value "david" is found + *
      • the inserted value "dawid" is found + *
      • the inserted value "hello-dawid" is found + *
      • the inserted value "dewid" is not found + *
      + * + * @param query the string to look for in the value + * @param distance the number of "uncertainties" allowed for the Levenshtein algorithm. + * @return a predicate to apply in a {@link GraphTraversal}. + */ + static P tokenFuzzy(String query, int distance) { + return new P<>(SearchPredicate.tokenFuzzy, new EditDistance(query, distance)); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/metadata/DseNodeProperties.java b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/DseNodeProperties.java new file mode 100644 index 00000000000..aa44807929a --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/DseNodeProperties.java @@ -0,0 +1,126 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.metadata; + +import com.datastax.oss.driver.api.core.Version; +import com.datastax.oss.driver.api.core.metadata.Node; + +/** The keys for the additional DSE-specific properties stored in {@link Node#getExtras()}. */ +public class DseNodeProperties { + + /** + * The DSE version that the node is running. + * + *

      The associated value in {@link Node#getExtras()} is a {@link Version}). + */ + public static final String DSE_VERSION = "DSE_VERSION"; + + /** + * The value of the {@code server_id} field in the {@code peers} system table for this node. + * + *

      This is the single identifier of the machine running a DSE instance. If DSE has been + * configured with Multi-Instance, the {@code server_id} helps identifying the single physical + * machine that runs the multiple DSE instances. If DSE is not configured with DSE Multi-Instance, + * the {@code server_id} will be automatically set and be unique for each node. + * + *

      This information is only available if connecting to a DSE 6.0+ node. + * + *

      The associated value in {@link Node#getExtras()} is a {@code String}). + * + * @see DSE + * Multi-Instance (DSE Administrator Guide) + * @see + * server_id (DSE Administrator Guide) + */ + public static final String SERVER_ID = "SERVER_ID"; + + /** + * The DSE workloads that the node is running. + * + *

      This is based on the {@code workload} or {@code workloads} columns in {@code system.local} + * and {@code system.peers}. + * + *

      Workload labels may vary depending on the DSE version in use; e.g. DSE 5.1 may report two + * distinct workloads: {@code Search} and {@code Analytics}, while DSE 5.0 would report a single + * {@code SearchAnalytics} workload instead. It is up to users to deal with such discrepancies; + * the driver simply returns the workload labels as reported by DSE, without any form of + * pre-processing (with the exception of Graph in DSE 5.0, which is stored in a separate column, + * but will be reported as {@code Graph} here). + * + *

      The associated value in {@link Node#getExtras()} is an immutable {@code Set}. + */ + public static final String DSE_WORKLOADS = "DSE_WORKLOADS"; + + /** + * The port for the native transport connections on the DSE node. + * + *

      The native transport port is {@code 9042} by default but can be changed on instances + * requiring specific firewall configurations. This can be configured in the {@code + * cassandra.yaml} configuration file under the {@code native_transport_port} property. + * + *

      This information is only available if connecting the driver to a DSE 6.0+ node. + * + *

      The associated value in {@link Node#getExtras()} is an {@code Integer}. + */ + public static final String NATIVE_TRANSPORT_PORT = "NATIVE_TRANSPORT_PORT"; + + /** + * The port for the encrypted native transport connections on the DSE node. + * + *

      In most scenarios enabling client communications in DSE will result in using a single port + * that will only accept encrypted connections (by default the port {@code 9042} is reused since + * unencrypted connections are not allowed). + * + *

      However, it is possible to configure DSE to use both encrypted and a non-encrypted + * communication ports with clients. In that case the port accepting encrypted connections will + * differ from the non-encrypted one (see {@link #NATIVE_TRANSPORT_PORT}) and will be exposed via + * this method. + * + *

      This information is only available if connecting the driver to a DSE 6.0+ node. + * + *

      The associated value in {@link Node#getExtras()} is an {@code Integer}. + */ + public static final String NATIVE_TRANSPORT_PORT_SSL = "NATIVE_TRANSPORT_PORT_SSL"; + + /** + * The storage port used by the DSE node. + * + *

      The storage port is used for internal communication between the DSE server nodes. This port + * is never used by the driver. + * + *

      This information is only available if connecting the driver to a DSE 6.0+ node. + * + *

      The associated value in {@link Node#getExtras()} is an {@code Integer}. + */ + public static final String STORAGE_PORT = "STORAGE_PORT"; + + /** + * The encrypted storage port used by the DSE node. + * + *

      If inter-node encryption is enabled on the DSE cluster, nodes will communicate securely + * between each other via this port. This port is never used by the driver. + * + *

      This information is only available if connecting the driver to a DSE 6.0+ node. + * + *

      The associated value in {@link Node#getExtras()} is an {@code Integer}. + */ + public static final String STORAGE_PORT_SSL = "STORAGE_PORT_SSL"; + + /** + * The JMX port used by this node. + * + *

      The JMX port can be configured in the {@code cassandra-env.sh} configuration file separately + * on each node. + * + *

      This information is only available if connecting the driver to a DSE 6.0+ node. + * + *

      The associated value in {@link Node#getExtras()} is an {@code Integer}. + */ + public static final String JMX_PORT = "JMX_PORT"; +} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseAggregateMetadata.java b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseAggregateMetadata.java new file mode 100644 index 00000000000..d9fb7a799ea --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseAggregateMetadata.java @@ -0,0 +1,78 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.metadata.schema; + +import com.datastax.oss.driver.api.core.metadata.schema.AggregateMetadata; +import com.datastax.oss.driver.api.core.type.DataType; +import com.datastax.oss.driver.internal.core.metadata.schema.ScriptBuilder; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Optional; + +/** + * Specialized aggregate metadata for DSE. + * + *

      It adds support for the DSE-specific {@link #isDeterministic() DETERMINISTIC} keyword. + */ +public interface DseAggregateMetadata extends AggregateMetadata { + + /** + * Indicates if this aggregate is deterministic. A deterministic aggregate means that given a + * particular input, the aggregate will always produce the same output. + * + *

      NOTE: For versions of DSE older than 6.0.0, this method will always return false, regardless + * of the actual function characteristics. + * + * @return Whether or not this aggregate is deterministic. + */ + boolean isDeterministic(); + + @NonNull + @Override + default String describe(boolean pretty) { + // Easiest to just copy the OSS describe() method and add in DETERMINISTIC + ScriptBuilder builder = new ScriptBuilder(pretty); + builder + .append("CREATE AGGREGATE ") + .append(getKeyspace()) + .append(".") + .append(getSignature().getName()) + .append("("); + boolean first = true; + for (int i = 0; i < getSignature().getParameterTypes().size(); i++) { + if (first) { + first = false; + } else { + builder.append(","); + } + DataType type = getSignature().getParameterTypes().get(i); + builder.append(type.asCql(false, pretty)); + } + builder + .increaseIndent() + .append(")") + .newLine() + .append("SFUNC ") + .append(getStateFuncSignature().getName()) + .newLine() + .append("STYPE ") + .append(getStateType().asCql(false, pretty)); + + if (getFinalFuncSignature().isPresent()) { + builder.newLine().append("FINALFUNC ").append(getFinalFuncSignature().get().getName()); + } + if (getInitCond().isPresent()) { + Optional formatInitCond = formatInitCond(); + assert formatInitCond.isPresent(); + builder.newLine().append("INITCOND ").append(formatInitCond.get()); + } + // add DETERMINISTIC if present + if (isDeterministic()) { + builder.newLine().append("DETERMINISTIC"); + } + return builder.append(";").build(); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseColumnMetadata.java b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseColumnMetadata.java new file mode 100644 index 00000000000..33a0dd87c3e --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseColumnMetadata.java @@ -0,0 +1,17 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.metadata.schema; + +import com.datastax.oss.driver.api.core.metadata.schema.ColumnMetadata; + +/** + * Specialized column metadata for DSE. + * + *

      This type exists only for future extensibility; currently, it is identical to {@link + * ColumnMetadata}. + */ +public interface DseColumnMetadata extends ColumnMetadata {} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseFunctionMetadata.java b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseFunctionMetadata.java new file mode 100644 index 00000000000..8393e421b1d --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseFunctionMetadata.java @@ -0,0 +1,123 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.metadata.schema; + +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.metadata.schema.FunctionMetadata; +import com.datastax.oss.driver.api.core.type.DataType; +import com.datastax.oss.driver.internal.core.metadata.schema.ScriptBuilder; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.List; + +/** + * Specialized function metadata for DSE. + * + *

      It adds support for the DSE-specific {@link #isDeterministic() DETERMINISTIC} and {@link + * #isMonotonic() MONOTONIC} keywords. + */ +public interface DseFunctionMetadata extends FunctionMetadata { + + /** + * Indicates if this function is deterministic. A deterministic function means that given a + * particular input, the function will always produce the same output. + * + *

      NOTE: For versions of DSE older than 6.0.0, this method will always return false, regardless + * of the actual function characteristics. + * + * @return Whether or not this function is deterministic. + */ + boolean isDeterministic(); + + /** + * Indicates whether or not this function is monotonic on all of its arguments. This means that it + * is either entirely non-increasing or non-decreasing. + * + *

      A function can be either: + * + *

        + *
      • monotonic on all of its arguments. In that case, this method returns {@code true}, and + * {@link #getMonotonicArgumentNames()} returns all the arguments; + *
      • partially monotonic, meaning that partial application over some of the arguments is + * monotonic. Currently (DSE 6.0.0), CQL only allows partial monotonicity on exactly one + * argument. This may change in a future CQL version. In that case, this method returns + * {@code false}, and {@link #getMonotonicArgumentNames()} returns a singleton list; + *
      • not monotonic. In that case, this method return {@code false} and {@link + * #getMonotonicArgumentNames()} returns an empty list. + *
      + * + *

      Monotonicity is required to use the function in a GROUP BY clause. + * + *

      NOTE: For versions of DSE older than 6.0.0, this method will always return false, regardless + * of the actual function characteristics. + * + * @return whether or not this function is monotonic on all of its arguments. + */ + boolean isMonotonic(); + + /** + * Returns a list of argument names that are monotonic. + * + *

      See {@link #isMonotonic()} for explanations on monotonicity, and the possible values + * returned by this method. + * + *

      NOTE: For versions of DSE older than 6.0.0, this method will always return an empty list, + * regardless of the actual function characteristics. + * + * @return the argument names that the function is monotonic on. + */ + @NonNull + List getMonotonicArgumentNames(); + + @NonNull + @Override + default String describe(boolean pretty) { + ScriptBuilder builder = new ScriptBuilder(pretty); + builder + .append("CREATE FUNCTION ") + .append(getKeyspace()) + .append(".") + .append(getSignature().getName()) + .append("("); + boolean first = true; + for (int i = 0; i < getSignature().getParameterTypes().size(); i++) { + if (first) { + first = false; + } else { + builder.append(","); + } + DataType type = getSignature().getParameterTypes().get(i); + CqlIdentifier name = getParameterNames().get(i); + builder.append(name).append(" ").append(type.asCql(false, pretty)); + } + builder + .append(")") + .increaseIndent() + .newLine() + .append(isCalledOnNullInput() ? "CALLED ON NULL INPUT" : "RETURNS NULL ON NULL INPUT") + .newLine() + .append("RETURNS ") + .append(getReturnType().asCql(false, true)) + .newLine(); + // handle deterministic and monotonic + if (isDeterministic()) { + builder.append("DETERMINISTIC").newLine(); + } + if (isMonotonic()) { + builder.append("MONOTONIC").newLine(); + } else if (!getMonotonicArgumentNames().isEmpty()) { + builder.append("MONOTONIC ON ").append(getMonotonicArgumentNames().get(0)).newLine(); + } + builder + .append("LANGUAGE ") + .append(getLanguage()) + .newLine() + .append("AS '") + .append(getBody()) + .append("';"); + return builder.build(); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseIndexMetadata.java b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseIndexMetadata.java new file mode 100644 index 00000000000..469cf3babe7 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseIndexMetadata.java @@ -0,0 +1,17 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.metadata.schema; + +import com.datastax.oss.driver.api.core.metadata.schema.IndexMetadata; + +/** + * Specialized index metadata for DSE. + * + *

      This type exists only for future extensibility; currently, it is identical to {@link + * IndexMetadata}. + */ +public interface DseIndexMetadata extends IndexMetadata {} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseKeyspaceMetadata.java b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseKeyspaceMetadata.java new file mode 100644 index 00000000000..1460de0ba06 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseKeyspaceMetadata.java @@ -0,0 +1,21 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.metadata.schema; + +import com.datastax.oss.driver.api.core.metadata.schema.KeyspaceMetadata; +import com.datastax.oss.driver.api.core.metadata.schema.TableMetadata; + +/** + * Specialized keyspace metadata for DSE. + * + *

      This type exists only for future extensibility; currently, it is identical to {@link + * KeyspaceMetadata}. + * + *

      Note that all returned elements can be cast to their DSE counterpart, for example {@link + * TableMetadata} to {@link DseTableMetadata}. + */ +public interface DseKeyspaceMetadata extends KeyspaceMetadata {} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseRelationMetadata.java b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseRelationMetadata.java new file mode 100644 index 00000000000..4dc8bb1fc50 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseRelationMetadata.java @@ -0,0 +1,20 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.metadata.schema; + +import com.datastax.oss.driver.api.core.metadata.schema.ColumnMetadata; +import com.datastax.oss.driver.api.core.metadata.schema.RelationMetadata; + +/** + * Specialized table or materialized view metadata for DSE. + * + *

      This type exists only for future extensibility; currently, it is identical to {@link + * RelationMetadata}. + * + *

      Note that all returned {@link ColumnMetadata} can be cast to {@link DseColumnMetadata}. + */ +public interface DseRelationMetadata extends RelationMetadata {} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseTableMetadata.java b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseTableMetadata.java new file mode 100644 index 00000000000..4aa8ab9690a --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseTableMetadata.java @@ -0,0 +1,22 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.metadata.schema; + +import com.datastax.oss.driver.api.core.metadata.schema.ColumnMetadata; +import com.datastax.oss.driver.api.core.metadata.schema.IndexMetadata; +import com.datastax.oss.driver.api.core.metadata.schema.TableMetadata; + +/** + * Specialized table metadata for DSE. + * + *

      This type exists only for future extensibility; currently, it is identical to {@link + * TableMetadata}. + * + *

      Note that all returned {@link ColumnMetadata} can be cast to {@link DseColumnMetadata}, and + * all {@link IndexMetadata} to {@link DseIndexMetadata}. + */ +public interface DseTableMetadata extends DseRelationMetadata, TableMetadata {} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseViewMetadata.java b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseViewMetadata.java new file mode 100644 index 00000000000..ff4bc7b7d98 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseViewMetadata.java @@ -0,0 +1,20 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.metadata.schema; + +import com.datastax.oss.driver.api.core.metadata.schema.ColumnMetadata; +import com.datastax.oss.driver.api.core.metadata.schema.ViewMetadata; + +/** + * Specialized materialized view metadata for DSE. + * + *

      This type exists only for future extensibility; currently, it is identical to {@link + * ViewMetadata}. + * + *

      Note that all returned {@link ColumnMetadata} can be cast to {@link DseColumnMetadata}. + */ +public interface DseViewMetadata extends DseRelationMetadata, ViewMetadata {} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/servererrors/UnfitClientException.java b/core/src/main/java/com/datastax/dse/driver/api/core/servererrors/UnfitClientException.java new file mode 100644 index 00000000000..bdd121eebec --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/servererrors/UnfitClientException.java @@ -0,0 +1,44 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.servererrors; + +import com.datastax.oss.driver.api.core.cql.ExecutionInfo; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.servererrors.CoordinatorException; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; + +/** + * A server-side error triggered when DSE can't send asynchronous results back to the client. + * + *

      Currently, this is used when the client is unable to keep up with the rate during a continuous + * paging session. + * + *

      Note that the protocol specification refers to this error as {@code CLIENT_WRITE_FAILURE}; we + * don't follow that terminology because it would be too misleading (this is not a client error, and + * it doesn't occur while writing data to DSE). + */ +public class UnfitClientException extends CoordinatorException { + + public UnfitClientException(@NonNull Node coordinator, @NonNull String message) { + this(coordinator, message, null, false); + } + + private UnfitClientException( + @NonNull Node coordinator, + @NonNull String message, + @Nullable ExecutionInfo executionInfo, + boolean writableStackTrace) { + super(coordinator, message, executionInfo, writableStackTrace); + } + + @Override + @NonNull + public UnfitClientException copy() { + return new UnfitClientException(getCoordinator(), getMessage(), getExecutionInfo(), true); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/session/DseProgrammaticArguments.java b/core/src/main/java/com/datastax/dse/driver/api/core/session/DseProgrammaticArguments.java new file mode 100644 index 00000000000..bbcb9882a11 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/session/DseProgrammaticArguments.java @@ -0,0 +1,85 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.session; + +import com.datastax.dse.driver.api.core.DseSessionBuilder; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.UUID; + +/** + * The DSE-specific arguments that can be set programmatically when building a session. + * + *

      This is mostly for internal use, you only need to deal with this directly if you write custom + * {@link DseSessionBuilder} subclasses. + */ +public class DseProgrammaticArguments { + + @NonNull + public static Builder builder() { + return new Builder(); + } + + private final UUID startupClientId; + private final String startupApplicationName; + private final String startupApplicationVersion; + + private DseProgrammaticArguments( + @Nullable UUID startupClientId, + @Nullable String startupApplicationName, + @Nullable String startupApplicationVersion) { + this.startupClientId = startupClientId; + this.startupApplicationName = startupApplicationName; + this.startupApplicationVersion = startupApplicationVersion; + } + + @Nullable + public UUID getStartupClientId() { + return startupClientId; + } + + @Nullable + public String getStartupApplicationName() { + return startupApplicationName; + } + + @Nullable + public String getStartupApplicationVersion() { + return startupApplicationVersion; + } + + public static class Builder { + + private UUID startupClientId; + private String startupApplicationName; + private String startupApplicationVersion; + + @NonNull + public Builder withStartupClientId(@Nullable UUID startupClientId) { + this.startupClientId = startupClientId; + return this; + } + + @NonNull + public Builder withStartupApplicationName(@Nullable String startupApplicationName) { + this.startupApplicationName = startupApplicationName; + return this; + } + + @NonNull + public Builder withStartupApplicationVersion(@Nullable String startupApplicationVersion) { + this.startupApplicationVersion = startupApplicationVersion; + return this; + } + + @NonNull + public DseProgrammaticArguments build() { + return new DseProgrammaticArguments( + startupClientId, startupApplicationName, startupApplicationVersion); + } + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/type/DseDataTypes.java b/core/src/main/java/com/datastax/dse/driver/api/core/type/DseDataTypes.java new file mode 100644 index 00000000000..b2191023c66 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/type/DseDataTypes.java @@ -0,0 +1,26 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.type; + +import com.datastax.oss.driver.api.core.type.CustomType; +import com.datastax.oss.driver.api.core.type.DataTypes; + +/** Extends {@link DataTypes} to handle DSE-specific types. */ +public class DseDataTypes extends DataTypes { + + public static final CustomType LINE_STRING = + (CustomType) custom("org.apache.cassandra.db.marshal.LineStringType"); + + public static final CustomType POINT = + (CustomType) custom("org.apache.cassandra.db.marshal.PointType"); + + public static final CustomType POLYGON = + (CustomType) custom("org.apache.cassandra.db.marshal.PolygonType"); + + public static final CustomType DATE_RANGE = + (CustomType) custom("org.apache.cassandra.db.marshal.DateRangeType"); +} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/type/codec/DseTypeCodecs.java b/core/src/main/java/com/datastax/dse/driver/api/core/type/codec/DseTypeCodecs.java new file mode 100644 index 00000000000..8b642783fac --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/type/codec/DseTypeCodecs.java @@ -0,0 +1,30 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.type.codec; + +import com.datastax.dse.driver.api.core.data.geometry.LineString; +import com.datastax.dse.driver.api.core.data.geometry.Point; +import com.datastax.dse.driver.api.core.data.geometry.Polygon; +import com.datastax.dse.driver.api.core.data.time.DateRange; +import com.datastax.dse.driver.internal.core.type.codec.geometry.LineStringCodec; +import com.datastax.dse.driver.internal.core.type.codec.geometry.PointCodec; +import com.datastax.dse.driver.internal.core.type.codec.geometry.PolygonCodec; +import com.datastax.dse.driver.internal.core.type.codec.time.DateRangeCodec; +import com.datastax.oss.driver.api.core.type.codec.TypeCodec; +import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; + +/** Extends {@link TypeCodecs} to handle DSE-specific types. */ +public class DseTypeCodecs extends TypeCodecs { + + public static final TypeCodec LINE_STRING = new LineStringCodec(); + + public static final TypeCodec POINT = new PointCodec(); + + public static final TypeCodec POLYGON = new PolygonCodec(); + + public static final TypeCodec DATE_RANGE = new DateRangeCodec(); +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/DseProtocolFeature.java b/core/src/main/java/com/datastax/dse/driver/internal/core/DseProtocolFeature.java new file mode 100644 index 00000000000..911005eaca6 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/DseProtocolFeature.java @@ -0,0 +1,27 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core; + +import com.datastax.oss.driver.internal.core.ProtocolFeature; + +/** + * Features that are supported by DataStax Enterprise (DSE) protocol versions. + * + * @see com.datastax.dse.driver.api.core.DseProtocolVersion + * @see com.datastax.oss.driver.internal.core.DefaultProtocolFeature + */ +public enum DseProtocolFeature implements ProtocolFeature { + + /** + * The ability to execute continuous paging requests. + * + * @see CASSANDRA-11521 + * @see com.datastax.dse.driver.api.core.cql.continuous.ContinuousSession + */ + CONTINUOUS_PAGING, + ; +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/DseProtocolVersionRegistry.java b/core/src/main/java/com/datastax/dse/driver/internal/core/DseProtocolVersionRegistry.java new file mode 100644 index 00000000000..7ba50ea9098 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/DseProtocolVersionRegistry.java @@ -0,0 +1,186 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core; + +import com.datastax.dse.driver.api.core.DseProtocolVersion; +import com.datastax.dse.driver.api.core.metadata.DseNodeProperties; +import com.datastax.oss.driver.api.core.DefaultProtocolVersion; +import com.datastax.oss.driver.api.core.ProtocolVersion; +import com.datastax.oss.driver.api.core.UnsupportedProtocolVersionException; +import com.datastax.oss.driver.api.core.Version; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.internal.core.CassandraProtocolVersionRegistry; +import com.datastax.oss.driver.internal.core.DefaultProtocolFeature; +import com.datastax.oss.driver.internal.core.ProtocolFeature; +import com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import net.jcip.annotations.ThreadSafe; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@ThreadSafe +public class DseProtocolVersionRegistry extends CassandraProtocolVersionRegistry { + + private static final Logger LOG = LoggerFactory.getLogger(DseProtocolVersionRegistry.class); + @VisibleForTesting static final Version DSE_4_7_0 = Version.parse("4.7.0"); + @VisibleForTesting static final Version DSE_5_0_0 = Version.parse("5.0.0"); + @VisibleForTesting static final Version DSE_5_1_0 = Version.parse("5.1.0"); + @VisibleForTesting static final Version DSE_6_0_0 = Version.parse("6.0.0"); + + private final String logPrefix; + + public DseProtocolVersionRegistry(String logPrefix) { + super(logPrefix, DefaultProtocolVersion.values(), DseProtocolVersion.values()); + this.logPrefix = logPrefix; + } + + @Override + public ProtocolVersion highestCommon(Collection nodes) { + if (nodes == null || nodes.isEmpty()) { + throw new IllegalArgumentException("Expected at least one node"); + } + + // Sadly we can't trust the Cassandra version reported by DSE to infer the maximum OSS protocol + // supported. For example DSE 6 reports release_version 4.0-SNAPSHOT, but only supports OSS + // protocol v4 (while Cassandra 4 will support v5). So there's no way to reuse the OSS algorithm + // from the parent class, simply redo everything: + + Set candidates = new HashSet<>(); + candidates.addAll(allNonBeta(DefaultProtocolVersion.values())); + candidates.addAll(allNonBeta(DseProtocolVersion.values())); + + for (Node node : nodes) { + List toEliminate = Collections.emptyList(); + + Version dseVersion = (Version) node.getExtras().get(DseNodeProperties.DSE_VERSION); + if (dseVersion != null) { + LOG.debug("[{}] Node {} reports DSE version {}", logPrefix, node.getEndPoint(), dseVersion); + dseVersion = dseVersion.nextStable(); + if (dseVersion.compareTo(DSE_4_7_0) < 0) { + throw new UnsupportedProtocolVersionException( + node.getEndPoint(), + String.format( + "Node %s reports DSE version %s, " + + "but the driver only supports 4.7.0 and above", + node.getEndPoint(), dseVersion), + triedVersionsForHighestCommon()); + } else if (dseVersion.compareTo(DSE_5_0_0) < 0) { + // DSE 4.7 or 4.8 (Cassandra 2.1): OSS protocol v3 + toEliminate = + ImmutableList.of( + DefaultProtocolVersion.V4, DseProtocolVersion.DSE_V1, DseProtocolVersion.DSE_V2); + } else if (dseVersion.compareTo(DSE_5_1_0) < 0) { + // DSE 5.0 (Cassandra 3): OSS protocol v4 + toEliminate = ImmutableList.of(DseProtocolVersion.DSE_V1, DseProtocolVersion.DSE_V2); + } else if (dseVersion.compareTo(DSE_6_0_0) < 0) { + // DSE 5.1: DSE protocol v1 or OSS protocol v4 + toEliminate = ImmutableList.of(DseProtocolVersion.DSE_V2); + } // else DSE 6: DSE protocol v2 or OSS protocol v4 + } else { + Version cassandraVersion = node.getCassandraVersion(); + if (cassandraVersion == null) { + LOG.warn( + "[{}] Node {} reports neither DSE version nor Cassandra version, " + + "ignoring it from optimal protocol version computation", + logPrefix, + node.getEndPoint()); + continue; + } + cassandraVersion = cassandraVersion.nextStable(); + if (cassandraVersion.compareTo(Version.V2_1_0) < 0) { + throw new UnsupportedProtocolVersionException( + node.getEndPoint(), + String.format( + "Node %s reports Cassandra version %s, " + + "but the driver only supports 2.1.0 and above", + node.getEndPoint(), cassandraVersion), + ImmutableList.of(DefaultProtocolVersion.V3, DefaultProtocolVersion.V4)); + } + + LOG.debug( + "[{}] Node {} reports Cassandra version {}", + logPrefix, + node.getEndPoint(), + cassandraVersion); + + if (cassandraVersion.compareTo(Version.V2_2_0) < 0) { + toEliminate = + ImmutableList.of( + DefaultProtocolVersion.V4, DseProtocolVersion.DSE_V1, DseProtocolVersion.DSE_V2); + } else { + toEliminate = ImmutableList.of(DseProtocolVersion.DSE_V1, DseProtocolVersion.DSE_V2); + } + } + + for (ProtocolVersion version : toEliminate) { + if (candidates.remove(version)) { + LOG.debug("[{}] Excluding protocol {}", logPrefix, version); + } + } + } + + ProtocolVersion max = null; + for (ProtocolVersion candidate : candidates) { + if (max == null || max.getCode() < candidate.getCode()) { + max = candidate; + } + } + if (max == null) { // Note: with the current algorithm, this never happens + throw new UnsupportedProtocolVersionException( + null, + String.format( + "Could not determine a common protocol version, " + + "enable DEBUG logs for '%s' for more details", + LOG.getName()), + triedVersionsForHighestCommon()); + } else { + return max; + } + } + + // Simply all non-beta versions, since this is the set we start from before filtering + private static ImmutableList triedVersionsForHighestCommon() { + return ImmutableList.builder() + .addAll(allNonBeta(DefaultProtocolVersion.values())) + .addAll(allNonBeta(DseProtocolVersion.values())) + .build(); + } + + private static & ProtocolVersion> Collection allNonBeta(T[] versions) { + ImmutableList.Builder result = ImmutableList.builder(); + for (T version : versions) { + if (!version.isBeta()) { + result.add(version); + } + } + return result.build(); + } + + @Override + public boolean supports(ProtocolVersion version, ProtocolFeature feature) { + int code = version.getCode(); + if (DefaultProtocolFeature.UNSET_BOUND_VALUES.equals(feature)) { + // All DSE versions and all OSS V4+ + return DefaultProtocolVersion.V4.getCode() <= code; + } else if (DefaultProtocolFeature.PER_REQUEST_KEYSPACE.equals(feature)) { + // Only DSE_V2+ and OSS V5+ + return (DefaultProtocolVersion.V5.getCode() <= code + && code < DseProtocolVersion.DSE_V1.getCode()) + || DseProtocolVersion.DSE_V2.getCode() <= code; + } else if (DseProtocolFeature.CONTINUOUS_PAGING.equals(feature)) { + // All DSE versions + return DseProtocolVersion.DSE_V1.getCode() <= code; + } else { + throw new IllegalArgumentException("Unhandled protocol feature: " + feature); + } + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/InsightsClientLifecycleListener.java b/core/src/main/java/com/datastax/dse/driver/internal/core/InsightsClientLifecycleListener.java new file mode 100644 index 00000000000..3a9f2dfb42b --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/InsightsClientLifecycleListener.java @@ -0,0 +1,55 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core; + +import static com.datastax.dse.driver.api.core.config.DseDriverOption.MONITOR_REPORTING_ENABLED; + +import com.datastax.dse.driver.internal.core.context.DseDriverContext; +import com.datastax.dse.driver.internal.core.insights.InsightsClient; +import com.datastax.dse.driver.internal.core.insights.configuration.InsightsConfiguration; +import com.datastax.oss.driver.internal.core.context.LifecycleListener; + +public class InsightsClientLifecycleListener implements LifecycleListener { + private static final boolean DEFAULT_INSIGHTS_ENABLED = true; + private static final long STATUS_EVENT_DELAY_MILLIS = 300000L; + private final DseDriverContext context; + private final StackTraceElement[] initCallStackTrace; + private volatile InsightsClient insightsClient; + + public InsightsClientLifecycleListener( + DseDriverContext context, StackTraceElement[] initCallStackTrace) { + this.context = context; + this.initCallStackTrace = initCallStackTrace; + } + + @Override + public void onSessionReady() { + boolean monitorReportingEnabled = + context + .getConfig() + .getDefaultProfile() + .getBoolean(MONITOR_REPORTING_ENABLED, DEFAULT_INSIGHTS_ENABLED); + + this.insightsClient = + InsightsClient.createInsightsClient( + new InsightsConfiguration( + monitorReportingEnabled, + STATUS_EVENT_DELAY_MILLIS, + context.getNettyOptions().adminEventExecutorGroup().next()), + context, + initCallStackTrace); + insightsClient.sendStartupMessage(); + insightsClient.scheduleStatusMessageSend(); + } + + @Override + public void close() { + if (insightsClient != null) { + insightsClient.shutdown(); + } + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/auth/AuthUtils.java b/core/src/main/java/com/datastax/dse/driver/internal/core/auth/AuthUtils.java new file mode 100644 index 00000000000..f13d3632c91 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/auth/AuthUtils.java @@ -0,0 +1,47 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.auth; + +import com.datastax.oss.driver.api.core.auth.AuthenticationException; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.config.DriverOption; +import com.datastax.oss.driver.api.core.metadata.EndPoint; +import java.util.ArrayList; +import java.util.List; + +public class AuthUtils { + /** + * Utility function that checks for the existence of settings and throws an exception if they + * aren't present + * + * @param config Current working driver configuration + * @param authenticatorName name of authenticator for logging purposes + * @param endPoint the host we are attempting to authenticate to + * @param options a list of DriverOptions to check to see if they are present + */ + public static void validateConfigPresent( + DriverExecutionProfile config, + String authenticatorName, + EndPoint endPoint, + DriverOption... options) { + List missingOptions = new ArrayList<>(); + for (DriverOption option : options) { + + if (!config.isDefined(option)) { + missingOptions.add(option); + } + if (missingOptions.size() > 0) { + String message = + "Missing required configuration options for authenticator " + authenticatorName + ":"; + for (DriverOption missingOption : missingOptions) { + message = message + " " + missingOption.getPath(); + } + throw new AuthenticationException(endPoint, message); + } + } + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/auth/DseGssApiAuthProvider.java b/core/src/main/java/com/datastax/dse/driver/internal/core/auth/DseGssApiAuthProvider.java new file mode 100644 index 00000000000..e0267d0ccd0 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/auth/DseGssApiAuthProvider.java @@ -0,0 +1,212 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.auth; + +import com.datastax.dse.driver.api.core.auth.DseGssApiAuthProviderBase; +import com.datastax.dse.driver.api.core.config.DseDriverOption; +import com.datastax.oss.driver.api.core.auth.AuthProvider; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.context.DriverContext; +import com.datastax.oss.driver.api.core.metadata.EndPoint; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Map; +import javax.security.auth.login.AppConfigurationEntry; +import javax.security.auth.login.Configuration; +import net.jcip.annotations.ThreadSafe; + +/** + * {@link AuthProvider} that provides GSSAPI authenticator instances for clients to connect to DSE + * clusters secured with {@code DseAuthenticator}. + * + *

      To activate this provider an {@code auth-provider} section must be included in the driver + * configuration, for example: + * + *

      + * dse-java-driver {
      + *  auth-provider {
      + *      class = com.datastax.dse.driver.internal.core.auth.DseGssApiAuthProvider
      + *      login-configuration {
      + *          principal = "user principal here ex cassandra@DATASTAX.COM"
      + *          useKeyTab = "true"
      + *          refreshKrb5Config = "true"
      + *          keyTab = "Path to keytab file here"
      + *      }
      + *   }
      + * }
      + * 
      + * + *

      Kerberos Authentication

      + * + * Keytab and ticket cache settings are specified using a standard JAAS configuration file. The + * location of the file can be set using the java.security.auth.login.config system + * property or by adding a login.config.url.n entry in the java.security + * properties file. Alternatively a login-configuration section can be included in the driver + * configuration. + * + *

      See the following documents for further details: + * + *

        + *
      1. JAAS + * Login Configuration File; + *
      2. Krb5LoginModule + * options; + *
      3. JAAS + * Authentication Tutorial for more on JAAS in general. + *
      + * + *

      Authentication using ticket cache

      + * + * Run kinit to obtain a ticket and populate the cache before connecting. JAAS config: + * + *
      + * DseClient {
      + *   com.sun.security.auth.module.Krb5LoginModule required
      + *     useTicketCache=true
      + *     renewTGT=true;
      + * };
      + * 
      + * + *

      Authentication using a keytab file

      + * + * To enable authentication using a keytab file, specify its location on disk. If your keytab + * contains more than one principal key, you should also specify which one to select. This + * information can also be specified in the driver config, under the login-configuration section. + * + *
      + * DseClient {
      + *     com.sun.security.auth.module.Krb5LoginModule required
      + *       useKeyTab=true
      + *       keyTab="/path/to/file.keytab"
      + *       principal="user@MYDOMAIN.COM";
      + * };
      + * 
      + * + *

      Specifying SASL protocol name

      + * + * The SASL protocol name used by this auth provider defaults to " + * {@value #DEFAULT_SASL_SERVICE_NAME}". + * + *

      Important: the SASL protocol name should match the username of the Kerberos + * service principal used by the DSE server. This information is specified in the dse.yaml file by + * the {@code service_principal} option under the kerberos_options + * section, and may vary from one DSE installation to another – especially if you installed + * DSE with an automated package installer. + * + *

      For example, if your dse.yaml file contains the following: + * + *

      {@code
      + * kerberos_options:
      + *     ...
      + *     service_principal: cassandra/my.host.com@MY.REALM.COM
      + * }
      + * + * The correct SASL protocol name to use when authenticating against this DSE server is "{@code + * cassandra}". + * + *

      Should you need to change the SASL protocol name, use one of the methods below: + * + *

        + *
      1. Specify the service name in the driver config. + *
        + * dse-java-driver {
        + *   auth-provider {
        + *     class = com.datastax.dse.driver.internal.core.auth.DseGssApiAuthProvider
        + *     service = "alternate"
        + *   }
        + * }
        + * 
        + *
      2. Specify the service name with the {@code dse.sasl.service} system property when starting + * your application, e.g. {@code -Ddse.sasl.service=cassandra}. + *
      + * + * If a non-null SASL service name is provided to the aforementioned config, that name takes + * precedence over the contents of the {@code dse.sasl.service} system property. + * + *

      Should internal sasl properties need to be set such as qop. This can be accomplished by + * including a sasl-properties in the driver config, for example: + * + *

      + * dse-java-driver {
      + *   auth-provider {
      + *     class = com.datastax.dse.driver.internal.core.auth.DseGssApiAuthProvider
      + *     sasl-properties {
      + *       javax.security.sasl.qop = "auth-conf"
      + *     }
      + *   }
      + * }
      + * 
      + * + * @see Authenticating + * a DSE cluster with Kerberos + */ +@ThreadSafe +public class DseGssApiAuthProvider extends DseGssApiAuthProviderBase { + + private final DriverExecutionProfile config; + + public DseGssApiAuthProvider(DriverContext context) { + super(context.getSessionName()); + + this.config = context.getConfig().getDefaultProfile(); + } + + @NonNull + @Override + protected GssApiOptions getOptions( + @NonNull EndPoint endPoint, @NonNull String serverAuthenticator) { + // A login configuration is always necessary, throw an exception if that option is missing. + AuthUtils.validateConfigPresent( + config, + DseGssApiAuthProvider.class.getName(), + endPoint, + DseDriverOption.AUTH_PROVIDER_LOGIN_CONFIGURATION); + + GssApiOptions.Builder optionsBuilder = GssApiOptions.builder(); + + if (config.isDefined(DseDriverOption.AUTH_PROVIDER_AUTHORIZATION_ID)) { + optionsBuilder.withAuthorizationId( + config.getString(DseDriverOption.AUTH_PROVIDER_AUTHORIZATION_ID)); + } + if (config.isDefined(DseDriverOption.AUTH_PROVIDER_SERVICE)) { + optionsBuilder.withSaslProtocol(config.getString(DseDriverOption.AUTH_PROVIDER_SERVICE)); + } + if (config.isDefined(DseDriverOption.AUTH_PROVIDER_SASL_PROPERTIES)) { + for (Map.Entry entry : + config.getStringMap(DseDriverOption.AUTH_PROVIDER_SASL_PROPERTIES).entrySet()) { + optionsBuilder.addSaslProperty(entry.getKey(), entry.getValue()); + } + } + Map loginConfigurationMap = + config.getStringMap(DseDriverOption.AUTH_PROVIDER_LOGIN_CONFIGURATION); + optionsBuilder.withLoginConfiguration(fetchLoginConfiguration(loginConfigurationMap)); + return optionsBuilder.build(); + } + + /** + * Creates a configuration that depends on the given keytab file for authenticating the given + * user. + */ + private static Configuration fetchLoginConfiguration(Map options) { + return new Configuration() { + + @Override + public AppConfigurationEntry[] getAppConfigurationEntry(String name) { + return new AppConfigurationEntry[] { + new AppConfigurationEntry( + "com.sun.security.auth.module.Krb5LoginModule", + AppConfigurationEntry.LoginModuleControlFlag.REQUIRED, + options) + }; + } + }; + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/auth/DsePlainTextAuthProvider.java b/core/src/main/java/com/datastax/dse/driver/internal/core/auth/DsePlainTextAuthProvider.java new file mode 100644 index 00000000000..5521a519ce0 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/auth/DsePlainTextAuthProvider.java @@ -0,0 +1,71 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.auth; + +import com.datastax.dse.driver.api.core.auth.DsePlainTextAuthProviderBase; +import com.datastax.dse.driver.api.core.config.DseDriverOption; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.context.DriverContext; +import com.datastax.oss.driver.api.core.metadata.EndPoint; +import edu.umd.cs.findbugs.annotations.NonNull; +import net.jcip.annotations.ThreadSafe; + +/** + * An authentication provider that supports SASL authentication using the PLAIN mechanism to connect + * to DSE clusters secured with DseAuthenticator. + * + *

      To activate this provider, an {@code auth-provider} section must be included in the driver + * configuration, for example: + * + *

      + * dse-java-driver {
      + *   auth-provider {
      + *     class = com.datastax.dse.driver.internal.core.auth.DsePlainTextAuthProvider
      + *     username = user0
      + *     password = mypassword
      + *     authorization-id = user1
      + *   }
      + * }
      + * 
      + * + * See the {@code dse-reference.conf} file included with the driver for more information. + */ +@ThreadSafe +public class DsePlainTextAuthProvider extends DsePlainTextAuthProviderBase { + + private final DriverExecutionProfile config; + + public DsePlainTextAuthProvider(DriverContext context) { + super(context.getSessionName()); + this.config = context.getConfig().getDefaultProfile(); + } + + @NonNull + @Override + protected Credentials getCredentials( + @NonNull EndPoint endPoint, @NonNull String serverAuthenticator) { + String authorizationId; + if (config.isDefined(DseDriverOption.AUTH_PROVIDER_AUTHORIZATION_ID)) { + authorizationId = config.getString(DseDriverOption.AUTH_PROVIDER_AUTHORIZATION_ID); + } else { + authorizationId = ""; + } + // It's not valid to use the DsePlainTextAuthProvider without a username or password, error out + // early here + AuthUtils.validateConfigPresent( + config, + DsePlainTextAuthProvider.class.getName(), + endPoint, + DefaultDriverOption.AUTH_PROVIDER_USER_NAME, + DefaultDriverOption.AUTH_PROVIDER_PASSWORD); + return new Credentials( + config.getString(DefaultDriverOption.AUTH_PROVIDER_USER_NAME).toCharArray(), + config.getString(DefaultDriverOption.AUTH_PROVIDER_PASSWORD).toCharArray(), + authorizationId.toCharArray()); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/auth/DseProgrammaticPlainTextAuthProvider.java b/core/src/main/java/com/datastax/dse/driver/internal/core/auth/DseProgrammaticPlainTextAuthProvider.java new file mode 100644 index 00000000000..9dadcc2311a --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/auth/DseProgrammaticPlainTextAuthProvider.java @@ -0,0 +1,33 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.auth; + +import com.datastax.dse.driver.api.core.auth.DsePlainTextAuthProviderBase; +import com.datastax.oss.driver.api.core.metadata.EndPoint; +import edu.umd.cs.findbugs.annotations.NonNull; + +public class DseProgrammaticPlainTextAuthProvider extends DsePlainTextAuthProviderBase { + private final String authenticationId; + private final String password; + private final String authorizationId; + + public DseProgrammaticPlainTextAuthProvider( + String authenticationId, String password, String authorizationId) { + super(""); + this.authenticationId = authenticationId; + this.password = password; + this.authorizationId = authorizationId; + } + + @NonNull + @Override + protected Credentials getCredentials( + @NonNull EndPoint endPoint, @NonNull String serverAuthenticator) { + return new Credentials( + authenticationId.toCharArray(), password.toCharArray(), authorizationId.toCharArray()); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/config/typesafe/DefaultDseDriverConfigLoader.java b/core/src/main/java/com/datastax/dse/driver/internal/core/config/typesafe/DefaultDseDriverConfigLoader.java new file mode 100644 index 00000000000..ea7e46670c8 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/config/typesafe/DefaultDseDriverConfigLoader.java @@ -0,0 +1,94 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.config.typesafe; + +import com.datastax.oss.driver.api.core.config.DriverConfigLoader; +import com.datastax.oss.driver.internal.core.config.typesafe.DefaultDriverConfigLoader; +import com.typesafe.config.Config; +import com.typesafe.config.ConfigFactory; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.function.Supplier; +import net.jcip.annotations.ThreadSafe; + +/** + * The default loader for DSE; it is based on Typesafe Config and reloads at a configurable + * interval. + */ +@ThreadSafe +public class DefaultDseDriverConfigLoader extends DefaultDriverConfigLoader { + + /** + * This loads configuration files in the following order of descending priority. + * + *
        + *
      1. 1. System properties. e.g. + * -Ddatastax-java-driver.basic.load-balancing-policy.local-datacenter=dc1 + *
      2. 2. The Application config, either specified by the system properties config.file, + * config.url, config.resource, or the default application.conf found in the system path. + *
      3. 3. The configuration values in the dse-reference.conf. + *
      4. 4. The configuration values in the reference.conf. + *
      + */ + private static final Supplier DEFAULT_DSE_CONFIG_SUPPLIER = + () -> { + ConfigFactory.invalidateCaches(); + Config config = + ConfigFactory.defaultOverrides() + .withFallback(ConfigFactory.defaultApplication()) + .withFallback(ConfigFactory.parseResourcesAnySyntax("dse-reference")) + .withFallback(ConfigFactory.defaultReference()) + .resolve(); + return config.getConfig("datastax-java-driver"); + }; + + public DefaultDseDriverConfigLoader() { + this(DEFAULT_DSE_CONFIG_SUPPLIER); + } + + /** + * Builds an instance with custom arguments, if you want to load the configuration from somewhere + * else. + */ + public DefaultDseDriverConfigLoader(Supplier configSupplier) { + super(configSupplier); + } + + /** + * Constructs a builder that may be used to provide additional configuration beyond those defined + * in your configuration files programmatically. For example: + * + *
      {@code
      +   * DseSession session = DseSession.builder()
      +   *   .withConfigLoader(DefaultDseDriverConfigLoader.builder()
      +   *     .withDuration(DefaultDriverOption.REQUEST_TIMEOUT, Duration.ofMillis(500))
      +   *     .build())
      +   *   .build();
      +   * }
      + * + *

      In the general case, use of this is not recommended, but it may be useful in situations + * where configuration must be defined at runtime or is derived from some other configuration + * source. + * + * @deprecated Use {@link DriverConfigLoader#programmaticBuilder()} instead. + */ + @NonNull + @Deprecated + public static com.datastax.oss.driver.internal.core.config.typesafe + .DefaultDriverConfigLoaderBuilder + builder() { + return new com.datastax.oss.driver.internal.core.config.typesafe + .DefaultDriverConfigLoaderBuilder() { + @Override + @NonNull + public DriverConfigLoader build() { + // fallback on the default config supplier (config file) + return new DefaultDriverConfigLoader( + () -> buildConfig().withFallback(DEFAULT_DSE_CONFIG_SUPPLIER.get())); + } + }; + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseDriverContext.java b/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseDriverContext.java new file mode 100644 index 00000000000..ae4a63f8912 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseDriverContext.java @@ -0,0 +1,297 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.context; + +import com.datastax.dse.driver.api.core.config.DseDriverConfigLoader; +import com.datastax.dse.driver.api.core.config.DseDriverOption; +import com.datastax.dse.driver.api.core.session.DseProgrammaticArguments; +import com.datastax.dse.driver.internal.core.DseProtocolVersionRegistry; +import com.datastax.dse.driver.internal.core.InsightsClientLifecycleListener; +import com.datastax.dse.driver.internal.core.cql.continuous.ContinuousCqlRequestAsyncProcessor; +import com.datastax.dse.driver.internal.core.cql.continuous.ContinuousCqlRequestSyncProcessor; +import com.datastax.dse.driver.internal.core.cql.continuous.reactive.ContinuousCqlRequestReactiveProcessor; +import com.datastax.dse.driver.internal.core.cql.reactive.CqlRequestReactiveProcessor; +import com.datastax.dse.driver.internal.core.graph.GraphRequestAsyncProcessor; +import com.datastax.dse.driver.internal.core.graph.GraphRequestSyncProcessor; +import com.datastax.dse.driver.internal.core.metadata.DseTopologyMonitor; +import com.datastax.dse.driver.internal.core.metadata.schema.parsing.DseSchemaParserFactory; +import com.datastax.dse.driver.internal.core.metadata.schema.queries.DseSchemaQueriesFactory; +import com.datastax.dse.driver.internal.core.metadata.token.DseReplicationStrategyFactory; +import com.datastax.dse.driver.internal.core.metrics.DseDropwizardMetricsFactory; +import com.datastax.dse.driver.internal.core.tracker.MultiplexingRequestTracker; +import com.datastax.dse.protocol.internal.DseProtocolV1ClientCodecs; +import com.datastax.dse.protocol.internal.DseProtocolV2ClientCodecs; +import com.datastax.dse.protocol.internal.ProtocolV4ClientCodecsForDse; +import com.datastax.oss.driver.api.core.auth.AuthProvider; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverConfigLoader; +import com.datastax.oss.driver.api.core.loadbalancing.LoadBalancingPolicy; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.metadata.NodeStateListener; +import com.datastax.oss.driver.api.core.metadata.schema.SchemaChangeListener; +import com.datastax.oss.driver.api.core.session.ProgrammaticArguments; +import com.datastax.oss.driver.api.core.tracker.RequestTracker; +import com.datastax.oss.driver.api.core.type.codec.TypeCodec; +import com.datastax.oss.driver.internal.core.ProtocolVersionRegistry; +import com.datastax.oss.driver.internal.core.context.DefaultDriverContext; +import com.datastax.oss.driver.internal.core.context.LifecycleListener; +import com.datastax.oss.driver.internal.core.cql.CqlPrepareAsyncProcessor; +import com.datastax.oss.driver.internal.core.cql.CqlPrepareSyncProcessor; +import com.datastax.oss.driver.internal.core.cql.CqlRequestAsyncProcessor; +import com.datastax.oss.driver.internal.core.cql.CqlRequestSyncProcessor; +import com.datastax.oss.driver.internal.core.metadata.TopologyMonitor; +import com.datastax.oss.driver.internal.core.metadata.schema.parsing.SchemaParserFactory; +import com.datastax.oss.driver.internal.core.metadata.schema.queries.SchemaQueriesFactory; +import com.datastax.oss.driver.internal.core.metadata.token.ReplicationStrategyFactory; +import com.datastax.oss.driver.internal.core.metrics.MetricsFactory; +import com.datastax.oss.driver.internal.core.protocol.ByteBufPrimitiveCodec; +import com.datastax.oss.driver.internal.core.session.RequestProcessor; +import com.datastax.oss.driver.internal.core.session.RequestProcessorRegistry; +import com.datastax.oss.driver.internal.core.util.Loggers; +import com.datastax.oss.driver.internal.core.util.Reflection; +import com.datastax.oss.protocol.internal.FrameCodec; +import com.datastax.oss.protocol.internal.ProtocolV3ClientCodecs; +import com.datastax.oss.protocol.internal.ProtocolV5ClientCodecs; +import edu.umd.cs.findbugs.annotations.NonNull; +import io.netty.buffer.ByteBuf; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.UUID; +import java.util.function.Predicate; +import net.jcip.annotations.ThreadSafe; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** Extends the default driver context to plug-in DSE-specific implementations. */ +@ThreadSafe +public class DseDriverContext extends DefaultDriverContext { + + private static final Logger LOG = LoggerFactory.getLogger(DseDriverContext.class); + + private final UUID startupClientId; + private final String startupApplicationName; + private final String startupApplicationVersion; + private final List listeners; + + public DseDriverContext( + DriverConfigLoader configLoader, + ProgrammaticArguments programmaticArguments, + DseProgrammaticArguments dseProgrammaticArguments) { + super(configLoader, programmaticArguments); + this.startupClientId = dseProgrammaticArguments.getStartupClientId(); + this.startupApplicationName = dseProgrammaticArguments.getStartupApplicationName(); + this.startupApplicationVersion = dseProgrammaticArguments.getStartupApplicationVersion(); + StackTraceElement[] stackTrace = {}; + try { + stackTrace = Thread.currentThread().getStackTrace(); + } catch (Exception ex) { + // ignore and use empty + } + this.listeners = + Collections.singletonList(new InsightsClientLifecycleListener(this, stackTrace)); + + if (!getConfig().getDefaultProfile().isDefined(DseDriverOption.CONTINUOUS_PAGING_PAGE_SIZE)) { + LOG.warn( + "[{}] It looks like your configuration is missing DSE-specific options. " + + "If you use a built-in config loader, make sure you create it with {}.", + getSessionName(), + DseDriverConfigLoader.class.getSimpleName()); + } + } + /** + * @deprecated this constructor only exists for backward compatibility. Please use {@link + * #DseDriverContext(DriverConfigLoader, ProgrammaticArguments, DseProgrammaticArguments)} + * instead. + */ + public DseDriverContext( + DriverConfigLoader configLoader, + List> typeCodecs, + NodeStateListener nodeStateListener, + SchemaChangeListener schemaChangeListener, + RequestTracker requestTracker, + Map localDatacenters, + Map> nodeFilters, + ClassLoader classLoader, + UUID clientId, + String applicationName, + String applicationVersion) { + this( + configLoader, + ProgrammaticArguments.builder() + .addTypeCodecs(typeCodecs.toArray(new TypeCodec[0])) + .withNodeStateListener(nodeStateListener) + .withSchemaChangeListener(schemaChangeListener) + .withRequestTracker(requestTracker) + .withLocalDatacenters(localDatacenters) + .withNodeFilters(nodeFilters) + .withClassLoader(classLoader) + .build(), + DseProgrammaticArguments.builder() + .withStartupClientId(clientId) + .withStartupApplicationName(applicationName) + .withStartupApplicationVersion(applicationVersion) + .build()); + } + + @Override + protected ProtocolVersionRegistry buildProtocolVersionRegistry() { + return new DseProtocolVersionRegistry(getSessionName()); + } + + @Override + protected FrameCodec buildFrameCodec() { + return new FrameCodec<>( + new ByteBufPrimitiveCodec(getNettyOptions().allocator()), + getCompressor(), + new ProtocolV3ClientCodecs(), + new ProtocolV4ClientCodecsForDse(), + new ProtocolV5ClientCodecs(), + new DseProtocolV1ClientCodecs(), + new DseProtocolV2ClientCodecs()); + } + + @Override + protected RequestProcessorRegistry buildRequestProcessorRegistry() { + String logPrefix = getSessionName(); + + List> processors = new ArrayList<>(); + + // regular requests (sync and async) + CqlRequestAsyncProcessor cqlRequestAsyncProcessor = new CqlRequestAsyncProcessor(); + CqlRequestSyncProcessor cqlRequestSyncProcessor = + new CqlRequestSyncProcessor(cqlRequestAsyncProcessor); + processors.add(cqlRequestAsyncProcessor); + processors.add(cqlRequestSyncProcessor); + + // prepare requests (sync and async) + CqlPrepareAsyncProcessor cqlPrepareAsyncProcessor = new CqlPrepareAsyncProcessor(); + CqlPrepareSyncProcessor cqlPrepareSyncProcessor = + new CqlPrepareSyncProcessor(cqlPrepareAsyncProcessor); + processors.add(cqlPrepareAsyncProcessor); + processors.add(cqlPrepareSyncProcessor); + + // continuous requests (sync and async) + ContinuousCqlRequestAsyncProcessor continuousCqlRequestAsyncProcessor = + new ContinuousCqlRequestAsyncProcessor(); + ContinuousCqlRequestSyncProcessor continuousCqlRequestSyncProcessor = + new ContinuousCqlRequestSyncProcessor(continuousCqlRequestAsyncProcessor); + processors.add(continuousCqlRequestAsyncProcessor); + processors.add(continuousCqlRequestSyncProcessor); + + // graph requests (sync and async) + try { + Class.forName("org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal"); + GraphRequestAsyncProcessor graphRequestAsyncProcessor = new GraphRequestAsyncProcessor(); + GraphRequestSyncProcessor graphRequestSyncProcessor = + new GraphRequestSyncProcessor(graphRequestAsyncProcessor); + processors.add(graphRequestAsyncProcessor); + processors.add(graphRequestSyncProcessor); + } catch (ClassNotFoundException | LinkageError error) { + Loggers.warnWithException( + LOG, + "Could not register Graph extensions; Tinkerpop API might be missing from classpath", + error); + } + + // reactive requests (regular and continuous) + try { + Class.forName("org.reactivestreams.Publisher"); + CqlRequestReactiveProcessor cqlRequestReactiveProcessor = + new CqlRequestReactiveProcessor(cqlRequestAsyncProcessor); + ContinuousCqlRequestReactiveProcessor continuousCqlRequestReactiveProcessor = + new ContinuousCqlRequestReactiveProcessor(continuousCqlRequestAsyncProcessor); + processors.add(cqlRequestReactiveProcessor); + processors.add(continuousCqlRequestReactiveProcessor); + } catch (ClassNotFoundException | LinkageError error) { + Loggers.warnWithException( + LOG, + "Could not register Reactive extensions; Reactive Streams API might be missing from classpath", + error); + } + + return new RequestProcessorRegistry(logPrefix, processors.toArray(new RequestProcessor[0])); + } + + @Override + protected TopologyMonitor buildTopologyMonitor() { + return new DseTopologyMonitor(this); + } + + @Override + protected ReplicationStrategyFactory buildReplicationStrategyFactory() { + return new DseReplicationStrategyFactory(this); + } + + @Override + protected SchemaQueriesFactory buildSchemaQueriesFactory() { + return new DseSchemaQueriesFactory(this); + } + + @Override + protected SchemaParserFactory buildSchemaParserFactory() { + return new DseSchemaParserFactory(this); + } + + @Override + protected MetricsFactory buildMetricsFactory() { + return new DseDropwizardMetricsFactory(this); + } + + @Override + protected Map buildStartupOptions() { + return new DseStartupOptionsBuilder(this) + .withClientId(startupClientId) + .withApplicationName(startupApplicationName) + .withApplicationVersion(startupApplicationVersion) + .build(); + } + + @Override + protected RequestTracker buildRequestTracker(RequestTracker requestTrackerFromBuilder) { + RequestTracker requestTrackerFromConfig = super.buildRequestTracker(requestTrackerFromBuilder); + if (requestTrackerFromConfig instanceof MultiplexingRequestTracker) { + return requestTrackerFromConfig; + } else { + MultiplexingRequestTracker multiplexingRequestTracker = new MultiplexingRequestTracker(); + multiplexingRequestTracker.register(requestTrackerFromConfig); + return multiplexingRequestTracker; + } + } + + @NonNull + @Override + public List getLifecycleListeners() { + return listeners; + } + + @Override + protected Map buildLoadBalancingPolicies() { + return Reflection.buildFromConfigProfiles( + this, + DefaultDriverOption.LOAD_BALANCING_POLICY, + LoadBalancingPolicy.class, + "com.datastax.oss.driver.internal.core.loadbalancing", + // Add the DSE default package + "com.datastax.dse.driver.internal.core.loadbalancing"); + } + + @Override + protected Optional buildAuthProvider(AuthProvider authProviderFromBuilder) { + return (authProviderFromBuilder != null) + ? Optional.of(authProviderFromBuilder) + : Reflection.buildFromConfig( + this, + DefaultDriverOption.AUTH_PROVIDER_CLASS, + AuthProvider.class, + "com.datastax.oss.driver.internal.core.auth", + // Add the DSE default package + "com.datastax.dse.driver.internal.core.auth"); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseStartupOptionsBuilder.java b/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseStartupOptionsBuilder.java new file mode 100644 index 00000000000..54a651634e9 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseStartupOptionsBuilder.java @@ -0,0 +1,108 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.context; + +import static com.datastax.dse.driver.api.core.DseSession.DSE_DRIVER_COORDINATES; + +import com.datastax.dse.driver.api.core.config.DseDriverOption; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.uuid.Uuids; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.context.StartupOptionsBuilder; +import com.datastax.oss.protocol.internal.util.collection.NullAllowingImmutableMap; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.Map; +import java.util.UUID; + +public class DseStartupOptionsBuilder extends StartupOptionsBuilder { + + public static final String APPLICATION_NAME_KEY = "APPLICATION_NAME"; + public static final String APPLICATION_VERSION_KEY = "APPLICATION_VERSION"; + public static final String CLIENT_ID_KEY = "CLIENT_ID"; + + private UUID clientId; + private String applicationName; + private String applicationVersion; + + public DseStartupOptionsBuilder(InternalDriverContext context) { + super(context); + } + + @Override + protected String getDriverVersion() { + // use the DSE Version instead + return DSE_DRIVER_COORDINATES.getVersion().toString(); + } + + @Override + protected String getDriverName() { + return DSE_DRIVER_COORDINATES.getName(); + } + + /** + * Sets the client ID to be sent in the Startup message options. + * + *

      If this method is not invoked, or the id passed in is null, a random {@link UUID} will be + * generated and used by default. + */ + public DseStartupOptionsBuilder withClientId(@Nullable UUID clientId) { + this.clientId = clientId; + return this; + } + + /** + * Sets the client application name to be sent in the Startup message options. + * + *

      If this method is not invoked, or the name passed in is null, no application name option + * will be sent in the startup message options. + */ + public DseStartupOptionsBuilder withApplicationName(@Nullable String applicationName) { + this.applicationName = applicationName; + return this; + } + + /** + * Sets the client application version to be sent in the Startup message options. + * + *

      If this method is not invoked, or the name passed in is null, no application version option + * will be sent in the startup message options. + */ + public DseStartupOptionsBuilder withApplicationVersion(@Nullable String applicationVersion) { + this.applicationVersion = applicationVersion; + return this; + } + + @Override + public Map build() { + + DriverExecutionProfile config = context.getConfig().getDefaultProfile(); + + // Fall back to generation / config if no programmatic values provided: + if (clientId == null) { + clientId = Uuids.random(); + } + if (applicationName == null) { + applicationName = config.getString(DseDriverOption.APPLICATION_NAME, null); + } + if (applicationVersion == null) { + applicationVersion = config.getString(DseDriverOption.APPLICATION_VERSION, null); + } + + NullAllowingImmutableMap.Builder builder = + NullAllowingImmutableMap.builder().putAll(super.build()); + + builder.put(CLIENT_ID_KEY, clientId.toString()); + if (applicationName != null) { + builder.put(APPLICATION_NAME_KEY, applicationName); + } + if (applicationVersion != null) { + builder.put(APPLICATION_VERSION_KEY, applicationVersion); + } + + return builder.build(); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/DseConversions.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/DseConversions.java new file mode 100644 index 00000000000..0cc06b3be11 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/DseConversions.java @@ -0,0 +1,144 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.cql; + +import com.datastax.dse.driver.api.core.config.DseDriverOption; +import com.datastax.dse.driver.api.core.servererrors.UnfitClientException; +import com.datastax.dse.protocol.internal.DseProtocolConstants; +import com.datastax.dse.protocol.internal.request.query.ContinuousPagingOptions; +import com.datastax.dse.protocol.internal.request.query.DseQueryOptions; +import com.datastax.oss.driver.api.core.ConsistencyLevel; +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.ProtocolVersion; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.cql.BoundStatement; +import com.datastax.oss.driver.api.core.cql.PreparedStatement; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.core.cql.Statement; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.servererrors.CoordinatorException; +import com.datastax.oss.driver.api.core.type.codec.registry.CodecRegistry; +import com.datastax.oss.driver.internal.core.ConsistencyLevelRegistry; +import com.datastax.oss.driver.internal.core.DefaultProtocolFeature; +import com.datastax.oss.driver.internal.core.ProtocolVersionRegistry; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.cql.Conversions; +import com.datastax.oss.protocol.internal.Message; +import com.datastax.oss.protocol.internal.request.Execute; +import com.datastax.oss.protocol.internal.request.Query; +import com.datastax.oss.protocol.internal.response.Error; +import com.datastax.oss.protocol.internal.util.Bytes; +import java.nio.ByteBuffer; +import java.util.Collections; +import java.util.List; +import java.util.Map; + +public class DseConversions { + + public static Message toContinuousPagingMessage( + Statement statement, DriverExecutionProfile config, InternalDriverContext context) { + ConsistencyLevelRegistry consistencyLevelRegistry = context.getConsistencyLevelRegistry(); + ConsistencyLevel consistency = statement.getConsistencyLevel(); + int consistencyCode = + (consistency == null) + ? consistencyLevelRegistry.nameToCode( + config.getString(DefaultDriverOption.REQUEST_CONSISTENCY)) + : consistency.getProtocolCode(); + int pageSize = config.getInt(DseDriverOption.CONTINUOUS_PAGING_PAGE_SIZE); + boolean pageSizeInBytes = config.getBoolean(DseDriverOption.CONTINUOUS_PAGING_PAGE_SIZE_BYTES); + int maxPages = config.getInt(DseDriverOption.CONTINUOUS_PAGING_MAX_PAGES); + int maxPagesPerSecond = config.getInt(DseDriverOption.CONTINUOUS_PAGING_MAX_PAGES_PER_SECOND); + int maxEnqueuedPages = config.getInt(DseDriverOption.CONTINUOUS_PAGING_MAX_ENQUEUED_PAGES); + ContinuousPagingOptions options = + new ContinuousPagingOptions(maxPages, maxPagesPerSecond, maxEnqueuedPages); + ConsistencyLevel serialConsistency = statement.getSerialConsistencyLevel(); + int serialConsistencyCode = + (serialConsistency == null) + ? consistencyLevelRegistry.nameToCode( + config.getString(DefaultDriverOption.REQUEST_SERIAL_CONSISTENCY)) + : serialConsistency.getProtocolCode(); + long timestamp = statement.getQueryTimestamp(); + if (timestamp == Long.MIN_VALUE) { + timestamp = context.getTimestampGenerator().next(); + } + CodecRegistry codecRegistry = context.getCodecRegistry(); + ProtocolVersion protocolVersion = context.getProtocolVersion(); + ProtocolVersionRegistry protocolVersionRegistry = context.getProtocolVersionRegistry(); + CqlIdentifier keyspace = statement.getKeyspace(); + if (statement instanceof SimpleStatement) { + SimpleStatement simpleStatement = (SimpleStatement) statement; + List positionalValues = simpleStatement.getPositionalValues(); + Map namedValues = simpleStatement.getNamedValues(); + if (!positionalValues.isEmpty() && !namedValues.isEmpty()) { + throw new IllegalArgumentException( + "Can't have both positional and named values in a statement."); + } + if (keyspace != null + && !protocolVersionRegistry.supports( + protocolVersion, DefaultProtocolFeature.PER_REQUEST_KEYSPACE)) { + throw new IllegalArgumentException( + "Can't use per-request keyspace with protocol " + protocolVersion); + } + DseQueryOptions queryOptions = + new DseQueryOptions( + consistencyCode, + Conversions.encode(positionalValues, codecRegistry, protocolVersion), + Conversions.encode(namedValues, codecRegistry, protocolVersion), + false, + pageSize, + statement.getPagingState(), + serialConsistencyCode, + timestamp, + (keyspace == null) ? null : keyspace.asInternal(), + pageSizeInBytes, + options); + return new Query(simpleStatement.getQuery(), queryOptions); + } else if (statement instanceof BoundStatement) { + BoundStatement boundStatement = (BoundStatement) statement; + if (!protocolVersionRegistry.supports( + protocolVersion, DefaultProtocolFeature.UNSET_BOUND_VALUES)) { + Conversions.ensureAllSet(boundStatement); + } + boolean skipMetadata = + boundStatement.getPreparedStatement().getResultSetDefinitions().size() > 0; + DseQueryOptions queryOptions = + new DseQueryOptions( + consistencyCode, + boundStatement.getValues(), + Collections.emptyMap(), + skipMetadata, + pageSize, + statement.getPagingState(), + serialConsistencyCode, + timestamp, + null, + pageSizeInBytes, + options); + PreparedStatement preparedStatement = boundStatement.getPreparedStatement(); + ByteBuffer id = preparedStatement.getId(); + ByteBuffer resultMetadataId = preparedStatement.getResultMetadataId(); + return new Execute( + Bytes.getArray(id), + (resultMetadataId == null) ? null : Bytes.getArray(resultMetadataId), + queryOptions); + } else { + throw new IllegalArgumentException( + "Unsupported statement type: " + statement.getClass().getName()); + } + } + + public static CoordinatorException toThrowable( + Node node, Error errorMessage, InternalDriverContext context) { + switch (errorMessage.code) { + case DseProtocolConstants.ErrorCode.CLIENT_WRITE_FAILURE: + return new UnfitClientException(node, errorMessage.message); + default: + return Conversions.toThrowable(node, errorMessage, context); + } + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestAsyncProcessor.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestAsyncProcessor.java new file mode 100644 index 00000000000..ed2959c71dd --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestAsyncProcessor.java @@ -0,0 +1,45 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.cql.continuous; + +import com.datastax.dse.driver.api.core.cql.continuous.ContinuousAsyncResultSet; +import com.datastax.oss.driver.api.core.cql.Statement; +import com.datastax.oss.driver.api.core.session.Request; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.session.DefaultSession; +import com.datastax.oss.driver.internal.core.session.RequestProcessor; +import com.datastax.oss.driver.internal.core.util.concurrent.CompletableFutures; +import java.util.concurrent.CompletionStage; +import net.jcip.annotations.ThreadSafe; + +@ThreadSafe +public class ContinuousCqlRequestAsyncProcessor + implements RequestProcessor, CompletionStage> { + + public static final GenericType> + CONTINUOUS_RESULT_ASYNC = new GenericType>() {}; + + @Override + public boolean canProcess(Request request, GenericType resultType) { + return request instanceof Statement && resultType.equals(CONTINUOUS_RESULT_ASYNC); + } + + @Override + public CompletionStage process( + Statement request, + DefaultSession session, + InternalDriverContext context, + String sessionLogPrefix) { + return new ContinuousCqlRequestHandler(request, session, context, sessionLogPrefix).handle(); + } + + @Override + public CompletionStage newFailure(RuntimeException error) { + return CompletableFutures.failedFuture(error); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandler.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandler.java new file mode 100644 index 00000000000..2e5b2ea5c2d --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandler.java @@ -0,0 +1,1294 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.cql.continuous; + +import com.datastax.dse.driver.DseSessionMetric; +import com.datastax.dse.driver.api.core.DseProtocolVersion; +import com.datastax.dse.driver.api.core.config.DseDriverOption; +import com.datastax.dse.driver.api.core.cql.continuous.ContinuousAsyncResultSet; +import com.datastax.dse.driver.internal.core.DseProtocolFeature; +import com.datastax.dse.driver.internal.core.cql.DseConversions; +import com.datastax.dse.protocol.internal.request.Revise; +import com.datastax.dse.protocol.internal.response.result.DseRowsMetadata; +import com.datastax.oss.driver.api.core.AllNodesFailedException; +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.DriverTimeoutException; +import com.datastax.oss.driver.api.core.ProtocolVersion; +import com.datastax.oss.driver.api.core.RequestThrottlingException; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverConfig; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.connection.FrameTooLongException; +import com.datastax.oss.driver.api.core.cql.ColumnDefinitions; +import com.datastax.oss.driver.api.core.cql.ExecutionInfo; +import com.datastax.oss.driver.api.core.cql.Row; +import com.datastax.oss.driver.api.core.cql.Statement; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.metadata.TokenMap; +import com.datastax.oss.driver.api.core.metadata.token.Token; +import com.datastax.oss.driver.api.core.metrics.DefaultNodeMetric; +import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; +import com.datastax.oss.driver.api.core.retry.RetryDecision; +import com.datastax.oss.driver.api.core.retry.RetryPolicy; +import com.datastax.oss.driver.api.core.servererrors.BootstrappingException; +import com.datastax.oss.driver.api.core.servererrors.CoordinatorException; +import com.datastax.oss.driver.api.core.servererrors.FunctionFailureException; +import com.datastax.oss.driver.api.core.servererrors.ProtocolError; +import com.datastax.oss.driver.api.core.servererrors.QueryValidationException; +import com.datastax.oss.driver.api.core.servererrors.ReadTimeoutException; +import com.datastax.oss.driver.api.core.servererrors.UnavailableException; +import com.datastax.oss.driver.api.core.servererrors.WriteTimeoutException; +import com.datastax.oss.driver.api.core.session.throttling.RequestThrottler; +import com.datastax.oss.driver.api.core.session.throttling.Throttled; +import com.datastax.oss.driver.internal.core.adminrequest.ThrottledAdminRequestHandler; +import com.datastax.oss.driver.internal.core.adminrequest.UnexpectedResponseException; +import com.datastax.oss.driver.internal.core.channel.DriverChannel; +import com.datastax.oss.driver.internal.core.channel.ResponseCallback; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.cql.Conversions; +import com.datastax.oss.driver.internal.core.cql.DefaultExecutionInfo; +import com.datastax.oss.driver.internal.core.cql.DefaultRow; +import com.datastax.oss.driver.internal.core.metadata.DefaultNode; +import com.datastax.oss.driver.internal.core.metrics.NodeMetricUpdater; +import com.datastax.oss.driver.internal.core.metrics.SessionMetricUpdater; +import com.datastax.oss.driver.internal.core.session.DefaultSession; +import com.datastax.oss.driver.internal.core.session.RepreparePayload; +import com.datastax.oss.driver.internal.core.util.CountingIterator; +import com.datastax.oss.driver.internal.core.util.Loggers; +import com.datastax.oss.driver.internal.core.util.collection.QueryPlan; +import com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting; +import com.datastax.oss.protocol.internal.Frame; +import com.datastax.oss.protocol.internal.Message; +import com.datastax.oss.protocol.internal.request.Prepare; +import com.datastax.oss.protocol.internal.response.Error; +import com.datastax.oss.protocol.internal.response.Result; +import com.datastax.oss.protocol.internal.response.error.Unprepared; +import com.datastax.oss.protocol.internal.response.result.Rows; +import com.datastax.oss.protocol.internal.response.result.Void; +import com.datastax.oss.protocol.internal.util.Bytes; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import io.netty.handler.codec.EncoderException; +import io.netty.util.Timeout; +import io.netty.util.Timer; +import io.netty.util.concurrent.Future; +import io.netty.util.concurrent.GenericFutureListener; +import java.nio.ByteBuffer; +import java.time.Duration; +import java.util.AbstractMap; +import java.util.ArrayDeque; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Queue; +import java.util.Set; +import java.util.concurrent.CancellationException; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionStage; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.locks.ReentrantLock; +import net.jcip.annotations.GuardedBy; +import net.jcip.annotations.ThreadSafe; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Handles a request that supports multiple response messages (a.k.a. continuous paging request). + */ +@ThreadSafe +public class ContinuousCqlRequestHandler + implements ResponseCallback, GenericFutureListener>, Throttled { + + private static final Logger LOG = LoggerFactory.getLogger(ContinuousCqlRequestHandler.class); + + private final String logPrefix; + private final Statement statement; + private final DefaultSession session; + private final InternalDriverContext context; + private final DriverExecutionProfile executionProfile; + private final Queue queryPlan; + private final Set replicas; + private final RetryPolicy retryPolicy; + private final RequestThrottler throttler; + private final int maxEnqueuedPages; + private final int maxPages; + private final boolean protocolBackpressureAvailable; + private final boolean isIdempotent; + private final Message message; + private final Duration timeoutFirstPage; + private final Duration timeoutOtherPages; + private final Timer timer; + private final SessionMetricUpdater sessionMetricUpdater; + + // The errors on the nodes that were already tried. + // We don't use a map because nodes can appear multiple times. + private final List> errors = new CopyOnWriteArrayList<>(); + + // Coordinates concurrent accesses between the client and I/O threads + private final ReentrantLock lock = new ReentrantLock(); + + // The page queue, storing responses that we have received and have not been consumed by the + // client yet. + @GuardedBy("lock") + private final Queue queue; + + // If the client requests a page and we can't serve it immediately (empty queue), then we create + // this future and have the client wait on it. Otherwise this field is null. + @GuardedBy("lock") + @VisibleForTesting + CompletableFuture pendingResult; + + // How many pages were requested. This is the total number of pages requested from the beginning. + // It will be zero if the protocol does not support numPagesRequested (DSE_V1) + @GuardedBy("lock") + private int numPagesRequested; + + // An integer that represents the state of the continuous paging request: + // - if positive, it is the sequence number of the next expected page; + // - if negative, it is a terminal state, identified by the constants below. + @GuardedBy("lock") + @VisibleForTesting + int state = 1; + + private static final int STATE_FINISHED = -1; + private static final int STATE_FAILED = -2; + + // Set when the execution starts, and is never modified after. + private volatile long startTimeNanos; + + // These are set when the first page arrives, and are never modified after. + private volatile ColumnDefinitions columnDefinitions; + + // These change over time as different nodes are tried; + // they can only be null before the first request is sent. + private volatile Node node; + private volatile DriverChannel channel; + private volatile int streamId; + // Set each time a new request/response cycle starts. + private volatile long messageStartTimeNanos; + private volatile Timeout timeout; + + // How many times we've invoked the retry policy and it has returned a "retry" decision (0 for + // the first attempt, 1 for the first retry, etc.). + private volatile int retryCount; + + public ContinuousCqlRequestHandler( + @NonNull Statement statement, + @NonNull DefaultSession session, + @NonNull InternalDriverContext context, + @NonNull String sessionLogPrefix) { + ProtocolVersion protocolVersion = context.getProtocolVersion(); + if (!context + .getProtocolVersionRegistry() + .supports(protocolVersion, DseProtocolFeature.CONTINUOUS_PAGING)) { + throw new IllegalStateException( + "Cannot execute continuous paging requests with protocol version " + protocolVersion); + } + this.logPrefix = sessionLogPrefix + "|" + this.hashCode(); + LOG.trace("[{}] Creating new continuous handler for request {}", logPrefix, statement); + this.statement = statement; + this.session = session; + this.context = context; + if (statement.getExecutionProfile() != null) { + this.executionProfile = statement.getExecutionProfile(); + } else { + DriverConfig config = context.getConfig(); + String profileName = statement.getExecutionProfileName(); + this.executionProfile = + (profileName == null || profileName.isEmpty()) + ? config.getDefaultProfile() + : config.getProfile(profileName); + } + this.queryPlan = + statement.getNode() != null + ? new QueryPlan(statement.getNode()) + : context + .getLoadBalancingPolicyWrapper() + .newQueryPlan(statement, executionProfile.getName(), session); + this.retryPolicy = context.getRetryPolicy(executionProfile.getName()); + Boolean idempotent = statement.isIdempotent(); + this.isIdempotent = + (idempotent == null) + ? executionProfile.getBoolean(DefaultDriverOption.REQUEST_DEFAULT_IDEMPOTENCE) + : idempotent; + this.timeoutFirstPage = + executionProfile.getDuration(DseDriverOption.CONTINUOUS_PAGING_TIMEOUT_FIRST_PAGE); + this.timeoutOtherPages = + executionProfile.getDuration(DseDriverOption.CONTINUOUS_PAGING_TIMEOUT_OTHER_PAGES); + this.timer = context.getNettyOptions().getTimer(); + this.maxEnqueuedPages = + executionProfile.getInt(DseDriverOption.CONTINUOUS_PAGING_MAX_ENQUEUED_PAGES); + this.queue = new ArrayDeque<>(maxEnqueuedPages); + this.maxPages = executionProfile.getInt(DseDriverOption.CONTINUOUS_PAGING_MAX_PAGES); + this.protocolBackpressureAvailable = + protocolVersion.getCode() >= DseProtocolVersion.DSE_V2.getCode(); + this.numPagesRequested = protocolBackpressureAvailable ? maxEnqueuedPages : 0; + this.message = DseConversions.toContinuousPagingMessage(statement, executionProfile, context); + this.replicas = getReplicas(); + this.throttler = context.getRequestThrottler(); + this.throttler.register(this); + this.sessionMetricUpdater = session.getMetricUpdater(); + this.startTimeNanos = System.nanoTime(); + } + + // MAIN LIFECYCLE + + @Override + public void onStreamIdAssigned(int streamId) { + LOG.trace("[{}] Assigned streamId {} on node {}", logPrefix, streamId, node); + this.streamId = streamId; + } + + @Override + public boolean isLastResponse(@NonNull Frame responseFrame) { + Message message = responseFrame.message; + if (message instanceof Rows) { + Rows rows = (Rows) message; + DseRowsMetadata metadata = (DseRowsMetadata) rows.getMetadata(); + return metadata.isLastContinuousPage; + } else { + return message instanceof Error; + } + } + + @Override + public void onThrottleReady(boolean wasDelayed) { + if (wasDelayed) { + session + .getMetricUpdater() + .updateTimer( + DefaultSessionMetric.THROTTLING_DELAY, + executionProfile.getName(), + System.nanoTime() - startTimeNanos, + TimeUnit.NANOSECONDS); + } + sendRequest(null); + } + + public CompletionStage handle() { + return dequeueOrCreatePending(); + } + + /** + * Sends the initial request to the next available node. + * + * @param node if not null, it will be attempted first before the rest of the query plan. It + * happens only when we retry on the same host. + */ + private void sendRequest(@Nullable Node node) { + channel = null; + if (node == null || (channel = session.getChannel(node, logPrefix)) == null) { + while ((node = queryPlan.poll()) != null) { + channel = session.getChannel(node, logPrefix); + if (channel != null) { + break; + } + } + } + if (channel == null || node == null) { + // We've reached the end of the query plan without finding any node to write to; abort the + // continuous paging session. + lock.lock(); + try { + abort(AllNodesFailedException.fromErrors(errors), false); + } finally { + lock.unlock(); + } + } else { + if (replicas.isEmpty()) { + LOG.warn( + "[{}] Could not determine if the node is a replica, " + + "continuous paging may not be available: {}", + logPrefix, + node); + } else if (!replicas.contains(node)) { + LOG.warn( + "[{}] Contacting a node that is likely not a replica, " + + "continuous paging may not be available: {}", + logPrefix, + node); + } + this.node = node; + streamId = -1; + messageStartTimeNanos = System.nanoTime(); + channel.write(message, false, statement.getCustomPayload(), this).addListener(this); + } + } + + /** + * Invoked when the write from {@link #sendRequest(Node)} completes. + * + * @param future The future representing the outcome of the write operation. + */ + @Override + public void operationComplete(@NonNull Future future) { + if (!future.isSuccess()) { + Throwable error = future.cause(); + if (error instanceof EncoderException && error.getCause() instanceof FrameTooLongException) { + trackNodeError(node, error.getCause()); + lock.lock(); + try { + abort(error.getCause(), false); + } finally { + lock.unlock(); + } + } else { + LOG.trace( + "[{}] Failed to send request on {}, trying next node (cause: {})", + logPrefix, + channel, + error); + ((DefaultNode) node) + .getMetricUpdater() + .incrementCounter(DefaultNodeMetric.UNSENT_REQUESTS, executionProfile.getName()); + recordError(node, error); + trackNodeError(node, error.getCause()); + sendRequest(null); + } + } else { + LOG.trace("[{}] Request sent on {}", logPrefix, channel); + timeout = scheduleTimeout(1); + } + } + + /** + * Invoked when a continuous paging response is received, either a successful or failed one. + * + *

      Delegates further processing to appropriate methods: {@link #processResultResponse(Result, + * Frame)} if the response was successful, or {@link #processErrorResponse(Error)} if it wasn't. + * + * @param response the received {@link Frame}. + */ + @Override + public void onResponse(@NonNull Frame response) { + stopNodeMessageTimer(); + cancelTimeout(); + lock.lock(); + try { + if (state < 0) { + LOG.trace("[{}] Got result but the request has been cancelled, ignoring", logPrefix); + return; + } + try { + Message responseMessage = response.message; + if (responseMessage instanceof Result) { + LOG.trace("[{}] Got result", logPrefix); + processResultResponse((Result) responseMessage, response); + } else if (responseMessage instanceof Error) { + LOG.trace("[{}] Got error response", logPrefix); + processErrorResponse((Error) responseMessage); + } else { + IllegalStateException error = + new IllegalStateException("Unexpected response " + responseMessage); + trackNodeError(node, error); + abort(error, false); + } + } catch (Throwable t) { + trackNodeError(node, t); + abort(t, false); + } + } finally { + lock.unlock(); + } + } + + /** + * Invoked when a continuous paging request hits an unexpected error. + * + *

      Delegates further processing to to the retry policy ({@link + * #processRetryDecision(RetryDecision, Throwable)}. + * + * @param error the error encountered, usually a network problem. + */ + @Override + public void onFailure(@NonNull Throwable error) { + cancelTimeout(); + LOG.trace(String.format("[%s] Request failure", logPrefix), error); + RetryDecision decision; + if (!isIdempotent || error instanceof FrameTooLongException) { + decision = RetryDecision.RETHROW; + } else { + decision = retryPolicy.onRequestAborted(statement, error, retryCount); + } + updateErrorMetrics( + ((DefaultNode) node).getMetricUpdater(), + decision, + DefaultNodeMetric.ABORTED_REQUESTS, + DefaultNodeMetric.RETRIES_ON_ABORTED, + DefaultNodeMetric.IGNORES_ON_ABORTED); + lock.lock(); + try { + processRetryDecision(decision, error); + } finally { + lock.unlock(); + } + } + + @Override + public void onThrottleFailure(@NonNull RequestThrottlingException error) { + session + .getMetricUpdater() + .incrementCounter(DefaultSessionMetric.THROTTLING_ERRORS, executionProfile.getName()); + lock.lock(); + try { + abort(error, false); + } finally { + lock.unlock(); + } + } + + // PROCESSING METHODS + + /** + * Processes a new result response, creating the corresponding {@link ContinuousAsyncResultSet} + * object and then enqueuing it or serving it directly to the user if he was waiting for it. + * + * @param result the result to process. It is normally a {@link Rows} object, but may be a {@link + * Void} object if the retry policy decided to ignore an error. + * @param frame the {@link Frame} (used to create the {@link ExecutionInfo} the first time). + */ + @SuppressWarnings("GuardedBy") // this method is only called with the lock held + private void processResultResponse(@NonNull Result result, @Nullable Frame frame) { + assert lock.isHeldByCurrentThread(); + try { + ExecutionInfo executionInfo = createExecutionInfo(result, frame); + if (result instanceof Rows) { + DseRowsMetadata rowsMetadata = (DseRowsMetadata) ((Rows) result).getMetadata(); + if (columnDefinitions == null) { + // Contrary to ROWS responses from regular queries, + // the first page always includes metadata so we use this + // regardless of whether or not the query was from a prepared statement. + columnDefinitions = Conversions.toColumnDefinitions(rowsMetadata, context); + } + int pageNumber = rowsMetadata.continuousPageNumber; + int currentPage = state; + if (pageNumber != currentPage) { + abort( + new IllegalStateException( + String.format("Received page %d but was expecting %d", pageNumber, currentPage)), + false); + } else { + DefaultContinuousAsyncResultSet resultSet = createResultSet((Rows) result, executionInfo); + if (rowsMetadata.isLastContinuousPage) { + LOG.trace( + "[{}] Received last page ({} - {} rows)", + logPrefix, + pageNumber, + resultSet.remaining()); + state = STATE_FINISHED; + reenableAutoReadIfNeeded(); + enqueueOrCompletePending(resultSet); + stopGlobalRequestTimer(); + } else { + LOG.trace( + "[{}] Received page {} ({} rows)", logPrefix, pageNumber, resultSet.remaining()); + if (currentPage > 0) { + state = currentPage + 1; + } + enqueueOrCompletePending(resultSet); + } + } + } else { + // Void responses happen only when the retry decision is ignore. + assert result instanceof Void; + ContinuousAsyncResultSet resultSet = DefaultContinuousAsyncResultSet.empty(executionInfo); + LOG.trace( + "[{}] Continuous paging interrupted by retry policy decision to ignore error", + logPrefix); + state = STATE_FINISHED; + reenableAutoReadIfNeeded(); + enqueueOrCompletePending(resultSet); + stopGlobalRequestTimer(); + } + } catch (Throwable error) { + abort(error, false); + } + } + + /** + * Processes an unsuccessful response. + * + *

      Depending on the error, may trigger: + * + *

        + *
      1. a re-prepare cycle, see {@link #processUnprepared(Unprepared)}; + *
      2. an immediate retry on the next host, bypassing the retry policy, if the host was + * bootstrapping; + *
      3. an immediate abortion if the error is unrecoverable; + *
      4. further processing if the error is recoverable, see {@link + * #processRecoverableError(CoordinatorException)} + *
      + * + * @param errorMessage the error message received. + */ + @SuppressWarnings("GuardedBy") // this method is only called with the lock held + private void processErrorResponse(@NonNull Error errorMessage) { + assert lock.isHeldByCurrentThread(); + if (errorMessage instanceof Unprepared) { + processUnprepared((Unprepared) errorMessage); + } else { + CoordinatorException error = DseConversions.toThrowable(node, errorMessage, context); + if (error instanceof BootstrappingException) { + LOG.trace("[{}] {} is bootstrapping, trying next node", logPrefix, node); + recordError(node, error); + trackNodeError(node, error); + sendRequest(null); + } else if (error instanceof QueryValidationException + || error instanceof FunctionFailureException + || error instanceof ProtocolError + || state > 1) { + // we only process recoverable errors for the first page, + // errors on subsequent pages will always trigger an immediate abortion + LOG.trace("[{}] Unrecoverable error, rethrowing", logPrefix); + NodeMetricUpdater metricUpdater = ((DefaultNode) node).getMetricUpdater(); + metricUpdater.incrementCounter(DefaultNodeMetric.OTHER_ERRORS, executionProfile.getName()); + trackNodeError(node, error); + abort(error, true); + } else { + processRecoverableError(error); + } + } + } + + /** + * Processes a recoverable error. + * + *

      In most cases, delegates to the retry policy and its decision, see {@link + * #processRetryDecision(RetryDecision, Throwable)}. + * + * @param error the recoverable error. + */ + private void processRecoverableError(@NonNull CoordinatorException error) { + assert lock.isHeldByCurrentThread(); + NodeMetricUpdater metricUpdater = ((DefaultNode) node).getMetricUpdater(); + RetryDecision decision; + if (error instanceof ReadTimeoutException) { + ReadTimeoutException readTimeout = (ReadTimeoutException) error; + decision = + retryPolicy.onReadTimeout( + statement, + readTimeout.getConsistencyLevel(), + readTimeout.getBlockFor(), + readTimeout.getReceived(), + readTimeout.wasDataPresent(), + retryCount); + updateErrorMetrics( + metricUpdater, + decision, + DefaultNodeMetric.READ_TIMEOUTS, + DefaultNodeMetric.RETRIES_ON_READ_TIMEOUT, + DefaultNodeMetric.IGNORES_ON_READ_TIMEOUT); + } else if (error instanceof WriteTimeoutException) { + WriteTimeoutException writeTimeout = (WriteTimeoutException) error; + if (isIdempotent) { + decision = + retryPolicy.onWriteTimeout( + statement, + writeTimeout.getConsistencyLevel(), + writeTimeout.getWriteType(), + writeTimeout.getBlockFor(), + writeTimeout.getReceived(), + retryCount); + } else { + decision = RetryDecision.RETHROW; + } + updateErrorMetrics( + metricUpdater, + decision, + DefaultNodeMetric.WRITE_TIMEOUTS, + DefaultNodeMetric.RETRIES_ON_WRITE_TIMEOUT, + DefaultNodeMetric.IGNORES_ON_WRITE_TIMEOUT); + } else if (error instanceof UnavailableException) { + UnavailableException unavailable = (UnavailableException) error; + decision = + retryPolicy.onUnavailable( + statement, + unavailable.getConsistencyLevel(), + unavailable.getRequired(), + unavailable.getAlive(), + retryCount); + updateErrorMetrics( + metricUpdater, + decision, + DefaultNodeMetric.UNAVAILABLES, + DefaultNodeMetric.RETRIES_ON_UNAVAILABLE, + DefaultNodeMetric.IGNORES_ON_UNAVAILABLE); + } else { + decision = + isIdempotent + ? retryPolicy.onErrorResponse(statement, error, retryCount) + : RetryDecision.RETHROW; + updateErrorMetrics( + metricUpdater, + decision, + DefaultNodeMetric.OTHER_ERRORS, + DefaultNodeMetric.RETRIES_ON_OTHER_ERROR, + DefaultNodeMetric.IGNORES_ON_OTHER_ERROR); + } + processRetryDecision(decision, error); + } + + /** + * Processes an {@link Unprepared} error by re-preparing then retrying on the same host. + * + * @param errorMessage the unprepared error message. + */ + @SuppressWarnings("GuardedBy") // this method is only called with the lock held + private void processUnprepared(@NonNull Unprepared errorMessage) { + assert lock.isHeldByCurrentThread(); + ByteBuffer idToReprepare = ByteBuffer.wrap(errorMessage.id); + LOG.trace( + "[{}] Statement {} is not prepared on {}, re-preparing", + logPrefix, + Bytes.toHexString(idToReprepare), + node); + RepreparePayload repreparePayload = session.getRepreparePayloads().get(idToReprepare); + if (repreparePayload == null) { + throw new IllegalStateException( + String.format( + "Tried to execute unprepared query %s but we don't have the data to re-prepare it", + Bytes.toHexString(idToReprepare))); + } + Prepare prepare = repreparePayload.toMessage(); + Duration timeout = executionProfile.getDuration(DefaultDriverOption.REQUEST_TIMEOUT); + ThrottledAdminRequestHandler.prepare( + channel, + prepare, + repreparePayload.customPayload, + timeout, + throttler, + sessionMetricUpdater, + logPrefix) + .start() + .whenComplete( + (repreparedId, exception) -> { + // If we run into an unrecoverable error, surface it to the client instead of retrying + Throwable fatalError = null; + if (exception == null) { + if (!repreparedId.equals(idToReprepare)) { + IllegalStateException illegalStateException = + new IllegalStateException( + String.format( + "ID mismatch while trying to reprepare (expected %s, got %s). " + + "This prepared statement won't work anymore. " + + "This usually happens when you run a 'USE...' query after " + + "the statement was prepared.", + Bytes.toHexString(idToReprepare), Bytes.toHexString(repreparedId))); + trackNodeError(node, illegalStateException); + fatalError = illegalStateException; + } else { + LOG.trace( + "[{}] Re-prepare successful, retrying on the same node ({})", + logPrefix, + node); + sendRequest(node); + } + } else { + if (exception instanceof UnexpectedResponseException) { + Message prepareErrorMessage = ((UnexpectedResponseException) exception).message; + if (prepareErrorMessage instanceof Error) { + CoordinatorException prepareError = + DseConversions.toThrowable(node, (Error) prepareErrorMessage, context); + if (prepareError instanceof QueryValidationException + || prepareError instanceof FunctionFailureException + || prepareError instanceof ProtocolError) { + LOG.trace("[{}] Unrecoverable error on re-prepare, rethrowing", logPrefix); + trackNodeError(node, prepareError); + fatalError = prepareError; + } + } + } else if (exception instanceof RequestThrottlingException) { + trackNodeError(node, exception); + fatalError = exception; + } + if (fatalError == null) { + LOG.trace("[{}] Re-prepare failed, trying next node", logPrefix); + recordError(node, exception); + trackNodeError(node, exception); + sendRequest(null); + } + } + if (fatalError != null) { + lock.lock(); + try { + abort(fatalError, true); + } finally { + lock.unlock(); + } + } + }); + } + + /** + * Processes the retry decision by triggering a retry, aborting or ignoring; also records the + * failures for further access. + * + * @param decision the decision to process. + * @param error the original error. + */ + @SuppressWarnings({"NonAtomicOperationOnVolatileField", "NonAtomicVolatileUpdate"}) + private void processRetryDecision(@NonNull RetryDecision decision, @NonNull Throwable error) { + assert lock.isHeldByCurrentThread(); + LOG.trace("[{}] Processing retry decision {}", logPrefix, decision); + switch (decision) { + case RETRY_SAME: + recordError(node, error); + trackNodeError(node, error); + retryCount++; + sendRequest(node); + break; + case RETRY_NEXT: + recordError(node, error); + trackNodeError(node, error); + retryCount++; + sendRequest(null); + break; + case RETHROW: + trackNodeError(node, error); + abort(error, true); + break; + case IGNORE: + processResultResponse(Void.INSTANCE, null); + break; + } + } + + // PAGE HANDLING + + /** + * Enqueues a response or, if the client was already waiting for it, completes the pending future. + * + *

      Guarded by {@link #lock}. + * + * @param pageOrError the next page, or an error. + */ + @SuppressWarnings("GuardedBy") // this method is only called with the lock held + private void enqueueOrCompletePending(@NonNull Object pageOrError) { + assert lock.isHeldByCurrentThread(); + if (pendingResult != null) { + if (LOG.isTraceEnabled()) { + LOG.trace( + "[{}] Client was waiting on empty queue, completing with {}", + logPrefix, + asTraceString(pageOrError)); + } + CompletableFuture tmp = pendingResult; + // null out pendingResult before completing it because its completion + // may trigger a call to fetchNextPage -> dequeueOrCreatePending, + // which expects pendingResult to be null. + pendingResult = null; + completeResultSetFuture(tmp, pageOrError); + } else { + if (LOG.isTraceEnabled()) { + LOG.trace("[{}] Enqueuing {}", logPrefix, asTraceString(pageOrError)); + } + queue.add(pageOrError); + // Backpressure without protocol support: if the queue grows too large, + // disable auto-read so that the channel eventually becomes + // non-writable on the server side (causing it to back off for a while) + if (!protocolBackpressureAvailable && queue.size() == maxEnqueuedPages && state > 0) { + LOG.trace( + "[{}] Exceeded {} queued response pages, disabling auto-read", logPrefix, queue.size()); + channel.config().setAutoRead(false); + } + } + } + + /** + * Dequeue a response or, if the queue is empty, create the future that will get notified of the + * next response, when it arrives. + * + *

      Called from user code, see {@link DefaultContinuousAsyncResultSet#fetchNextPage()}. + * + * @return the next page's future; never null. + */ + @NonNull + protected CompletableFuture dequeueOrCreatePending() { + lock.lock(); + try { + // If the client was already waiting for a page, there's no way it can call this method again + // (this is guaranteed by our public API because in order to ask for the next page, + // you need the reference to the previous page). + assert pendingResult == null; + + Object head = queue.poll(); + if (!protocolBackpressureAvailable && head != null && queue.size() == maxEnqueuedPages - 1) { + LOG.trace( + "[{}] Back to {} queued response pages, re-enabling auto-read", + logPrefix, + queue.size()); + channel.config().setAutoRead(true); + } + maybeRequestMore(); + if (head != null) { + if (state == STATE_FAILED && !(head instanceof Throwable)) { + LOG.trace( + "[{}] Client requested next page on cancelled queue, discarding page and returning cancelled future", + logPrefix); + return cancelledResultSetFuture(); + } else { + if (LOG.isTraceEnabled()) { + LOG.trace( + "[{}] Client requested next page on non-empty queue, returning immediate future of {}", + logPrefix, + asTraceString(head)); + } + return immediateResultSetFuture(head); + } + } else { + if (state == STATE_FAILED) { + LOG.trace( + "[{}] Client requested next page on cancelled empty queue, returning cancelled future", + logPrefix); + return cancelledResultSetFuture(); + } else { + LOG.trace( + "[{}] Client requested next page but queue is empty, installing future", logPrefix); + pendingResult = createResultSetFuture(); + // Only schedule a timeout if we're past the first page (the first page's timeout is + // handled in sendRequest). + if (state > 1) { + timeout = scheduleTimeout(state); + // Note: each new timeout is cancelled when the next response arrives, see + // onResponse(Frame). + } + return pendingResult; + } + } + } finally { + lock.unlock(); + } + } + + /** + * If the total number of results in the queue and in-flight (requested - received) is less than + * half the queue size, then request more pages, unless the {@link #state} is failed, we're still + * waiting for the first page (so maybe still throttled or in the middle of a retry), or we don't + * support backpressure at the protocol level. + */ + @SuppressWarnings("GuardedBy") + private void maybeRequestMore() { + assert lock.isHeldByCurrentThread(); + if (state < 2 || streamId == -1 || !protocolBackpressureAvailable) { + return; + } + // if we have already requested more than the client needs, then no need to request some more + if (maxPages > 0 && numPagesRequested >= maxPages) { + return; + } + // the pages received so far, which is the state minus one + int received = state - 1; + int requested = numPagesRequested; + // the pages that fit in the queue, which is the queue free space minus the requests in flight + int freeSpace = maxEnqueuedPages - queue.size(); + int inFlight = requested - received; + int numPagesFittingInQueue = freeSpace - inFlight; + if (numPagesFittingInQueue >= maxEnqueuedPages / 2) { + LOG.trace("[{}] Requesting more {} pages", logPrefix, numPagesFittingInQueue); + numPagesRequested = requested + numPagesFittingInQueue; + sendMorePagesRequest(numPagesFittingInQueue); + } + } + + /** + * Sends a request for more pages (a.k.a. backpressure request). + * + * @param nextPages the number of extra pages to request. + */ + @SuppressWarnings("GuardedBy") + private void sendMorePagesRequest(int nextPages) { + assert lock.isHeldByCurrentThread(); + assert channel != null : "expected valid connection in order to request more pages"; + assert protocolBackpressureAvailable; + assert streamId != -1; + + LOG.trace("[{}] Sending request for more pages", logPrefix); + ThrottledAdminRequestHandler.query( + channel, + Revise.requestMoreContinuousPages(streamId, nextPages), + statement.getCustomPayload(), + timeoutOtherPages, + throttler, + session.getMetricUpdater(), + logPrefix, + "request " + nextPages + " more pages for id " + streamId) + .start() + .handle( + (result, error) -> { + if (error != null) { + Loggers.warnWithException( + LOG, "[{}] Error requesting more pages, aborting.", logPrefix, error); + lock.lock(); + try { + // Set fromServer to false because we want the callback to still cancel the + // session if possible or else the server will wait on a timeout. + abort(error, false); + } finally { + lock.unlock(); + } + } + return null; + }); + } + + // TIMEOUT HANDLING + + private Timeout scheduleTimeout(int expectedPage) { + if (expectedPage < 0) { + return null; + } + Duration timeout = (expectedPage == 1) ? timeoutFirstPage : timeoutOtherPages; + if (timeout.toNanos() <= 0) { + return null; + } + LOG.trace("[{}] Scheduling timeout for page {} in {}", logPrefix, expectedPage, timeout); + return timer.newTimeout( + (timeout1) -> { + lock.lock(); + try { + if (state == expectedPage) { + abort( + new DriverTimeoutException( + String.format("Timed out waiting for page %d", expectedPage)), + false); + } else { + // Ignore timeout if the request has moved on in the interim. + LOG.trace( + "[{}] Timeout fired for page {} but query already at state {}, skipping", + logPrefix, + expectedPage, + state); + } + } finally { + lock.unlock(); + } + }, + timeout.toNanos(), + TimeUnit.NANOSECONDS); + } + + /** Cancels the current timeout, if non null. */ + private void cancelTimeout() { + Timeout timeout = this.timeout; + if (timeout != null) { + LOG.trace("[{}] Cancelling timeout", logPrefix); + timeout.cancel(); + } + } + + // CANCELLATION + + /** + * Cancels the continuous paging request. + * + *

      Called from user code, see {@link DefaultContinuousAsyncResultSet#cancel()}, or from a + * driver I/O thread. + */ + void cancel() { + lock.lock(); + try { + if (state < 0) { + return; + } else { + LOG.trace( + "[{}] Cancelling continuous paging session with state {} on node {}", + logPrefix, + state, + node); + state = STATE_FAILED; + if (pendingResult != null) { + pendingResult.cancel(true); + } + // the rest can be done without holding the lock, see below + } + } finally { + lock.unlock(); + } + if (channel != null) { + if (!channel.closeFuture().isDone()) { + this.channel.cancel(this); + } + sendCancelRequest(); + } + reenableAutoReadIfNeeded(); + } + + private void sendCancelRequest() { + LOG.trace("[{}] Sending cancel request", logPrefix); + ThrottledAdminRequestHandler.query( + channel, + Revise.cancelContinuousPaging(streamId), + statement.getCustomPayload(), + timeoutOtherPages, + throttler, + session.getMetricUpdater(), + logPrefix, + "cancel request") + .start() + .handle( + (result, error) -> { + if (error != null) { + Loggers.warnWithException( + LOG, + "[{}] Error sending cancel request. " + + "This is not critical (the request will eventually time out server-side).", + logPrefix, + error); + } else { + LOG.trace("[{}] Continuous paging session cancelled successfully", logPrefix); + } + return null; + }); + } + + // TERMINATION + + private void reenableAutoReadIfNeeded() { + // Make sure we don't leave the channel unreadable + LOG.trace("[{}] Re-enabling auto-read", logPrefix); + if (!protocolBackpressureAvailable) { + channel.config().setAutoRead(true); + } + } + + // ERROR HANDLING + + private void recordError(@NonNull Node node, @NonNull Throwable error) { + errors.add(new AbstractMap.SimpleEntry<>(node, error)); + } + + private void trackNodeError(@NonNull Node node, @NonNull Throwable error) { + long latencyNanos = System.nanoTime() - this.messageStartTimeNanos; + context + .getRequestTracker() + .onNodeError(statement, error, latencyNanos, executionProfile, node, logPrefix); + } + + /** + * Aborts the continuous paging session due to an error that can be either from the server or the + * client. + * + * @param error the error that causes the abortion. + * @param fromServer whether the error was triggered by the coordinator or by the driver. + */ + @SuppressWarnings("GuardedBy") // this method is only called with the lock held + private void abort(@NonNull Throwable error, boolean fromServer) { + assert lock.isHeldByCurrentThread(); + LOG.trace( + "[{}] Aborting due to {} ({})", + logPrefix, + error.getClass().getSimpleName(), + error.getMessage()); + if (channel == null) { + // This only happens when sending the initial request, if no host was available + // or if the iterator returned by the LBP threw an exception. + // In either case the write was not even attempted, and + // we set the state right now. + enqueueOrCompletePending(error); + state = STATE_FAILED; + } else if (state > 0) { + enqueueOrCompletePending(error); + if (fromServer) { + // We can safely assume the server won't send any more responses, + // so set the state and call release() right now. + state = STATE_FAILED; + reenableAutoReadIfNeeded(); + } else { + // attempt to cancel first, i.e. ask server to stop sending responses, + // and only then release. + cancel(); + } + } + stopGlobalRequestTimer(); + } + + // METRICS + + private void stopNodeMessageTimer() { + ((DefaultNode) node) + .getMetricUpdater() + .updateTimer( + DefaultNodeMetric.CQL_MESSAGES, + executionProfile.getName(), + System.nanoTime() - messageStartTimeNanos, + TimeUnit.NANOSECONDS); + } + + private void stopGlobalRequestTimer() { + session + .getMetricUpdater() + .updateTimer( + DseSessionMetric.CONTINUOUS_CQL_REQUESTS, + executionProfile.getName(), + System.nanoTime() - startTimeNanos, + TimeUnit.NANOSECONDS); + } + + private void updateErrorMetrics( + @NonNull NodeMetricUpdater metricUpdater, + @NonNull RetryDecision decision, + @NonNull DefaultNodeMetric error, + @NonNull DefaultNodeMetric retriesOnError, + @NonNull DefaultNodeMetric ignoresOnError) { + metricUpdater.incrementCounter(error, executionProfile.getName()); + switch (decision) { + case RETRY_SAME: + case RETRY_NEXT: + metricUpdater.incrementCounter(DefaultNodeMetric.RETRIES, executionProfile.getName()); + metricUpdater.incrementCounter(retriesOnError, executionProfile.getName()); + break; + case IGNORE: + metricUpdater.incrementCounter(DefaultNodeMetric.IGNORES, executionProfile.getName()); + metricUpdater.incrementCounter(ignoresOnError, executionProfile.getName()); + break; + case RETHROW: + // nothing do do + } + } + + // UTILITY METHODS + + @NonNull + private Set getReplicas() { + if (session.getMetadata().getTokenMap().isPresent()) { + CqlIdentifier keyspace = statement.getKeyspace(); + if (keyspace == null) { + keyspace = statement.getRoutingKeyspace(); + if (keyspace == null) { + keyspace = session.getKeyspace().orElse(null); + } + } + if (keyspace != null) { + TokenMap tokenMap = session.getMetadata().getTokenMap().get(); + Token routingToken = statement.getRoutingToken(); + if (routingToken != null) { + return tokenMap.getReplicas(keyspace, routingToken); + } else { + ByteBuffer routingKey = statement.getRoutingKey(); + if (routingKey != null) { + return tokenMap.getReplicas(keyspace, routingKey); + } + } + } + } + return Collections.emptySet(); + } + + @NonNull + private DefaultExecutionInfo createExecutionInfo( + @NonNull Result result, @Nullable Frame response) { + ByteBuffer pagingState = + result instanceof Rows ? ((Rows) result).getMetadata().pagingState : null; + return new DefaultExecutionInfo( + statement, + node, + 0, + 0, + errors, + pagingState, + response, + true, + session, + context, + executionProfile); + } + + @NonNull + private DefaultContinuousAsyncResultSet createResultSet( + @NonNull Rows rows, @NonNull ExecutionInfo executionInfo) { + Queue> data = rows.getData(); + CountingIterator iterator = + new CountingIterator(data.size()) { + @Override + protected Row computeNext() { + List rowData = data.poll(); + return (rowData == null) + ? endOfData() + : new DefaultRow(columnDefinitions, rowData, context); + } + }; + DseRowsMetadata metadata = (DseRowsMetadata) rows.getMetadata(); + return new DefaultContinuousAsyncResultSet( + iterator, + columnDefinitions, + metadata.continuousPageNumber, + !metadata.isLastContinuousPage, + executionInfo, + this); + } + + @NonNull + private CompletableFuture createResultSetFuture() { + CompletableFuture future = new CompletableFuture<>(); + future.whenComplete( + (rs, t) -> { + if (t instanceof CancellationException) { + // if the future has been canceled by the user, propagate the cancellation + cancel(); + } + }); + return future; + } + + @NonNull + private CompletableFuture immediateResultSetFuture( + @NonNull Object pageOrError) { + CompletableFuture future = createResultSetFuture(); + completeResultSetFuture(future, pageOrError); + return future; + } + + @NonNull + private CompletableFuture cancelledResultSetFuture() { + return immediateResultSetFuture( + new CancellationException( + "Can't get more results because the continuous query has failed already. " + + "Most likely this is because the query was cancelled")); + } + + private void completeResultSetFuture( + @NonNull CompletableFuture future, @NonNull Object pageOrError) { + long now = System.nanoTime(); + long totalLatencyNanos = now - startTimeNanos; + long nodeLatencyNanos = now - messageStartTimeNanos; + if (pageOrError instanceof ContinuousAsyncResultSet) { + if (future.complete((ContinuousAsyncResultSet) pageOrError)) { + throttler.signalSuccess(this); + context + .getRequestTracker() + .onNodeSuccess(statement, nodeLatencyNanos, executionProfile, node, logPrefix); + context + .getRequestTracker() + .onSuccess(statement, totalLatencyNanos, executionProfile, node, logPrefix); + } + } else { + Throwable error = (Throwable) pageOrError; + if (future.completeExceptionally(error)) { + context + .getRequestTracker() + .onError(statement, error, totalLatencyNanos, executionProfile, node, logPrefix); + if (error instanceof DriverTimeoutException) { + throttler.signalTimeout(this); + session + .getMetricUpdater() + .incrementCounter( + DefaultSessionMetric.CQL_CLIENT_TIMEOUTS, executionProfile.getName()); + } else if (!(error instanceof RequestThrottlingException)) { + throttler.signalError(this, error); + } + } + } + } + + @NonNull + private static String asTraceString(@NonNull Object pageOrError) { + return (pageOrError instanceof ContinuousAsyncResultSet) + ? "page " + ((ContinuousAsyncResultSet) pageOrError).pageNumber() + : ((Exception) pageOrError).getClass().getSimpleName(); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestSyncProcessor.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestSyncProcessor.java new file mode 100644 index 00000000000..592afedae1b --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestSyncProcessor.java @@ -0,0 +1,56 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.cql.continuous; + +import com.datastax.dse.driver.api.core.cql.continuous.ContinuousAsyncResultSet; +import com.datastax.dse.driver.api.core.cql.continuous.ContinuousResultSet; +import com.datastax.oss.driver.api.core.cql.Statement; +import com.datastax.oss.driver.api.core.session.Request; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.session.DefaultSession; +import com.datastax.oss.driver.internal.core.session.RequestProcessor; +import com.datastax.oss.driver.internal.core.util.concurrent.BlockingOperation; +import com.datastax.oss.driver.internal.core.util.concurrent.CompletableFutures; +import net.jcip.annotations.ThreadSafe; + +@ThreadSafe +public class ContinuousCqlRequestSyncProcessor + implements RequestProcessor, ContinuousResultSet> { + + public static final GenericType CONTINUOUS_RESULT_SYNC = + GenericType.of(ContinuousResultSet.class); + + private final ContinuousCqlRequestAsyncProcessor asyncProcessor; + + public ContinuousCqlRequestSyncProcessor(ContinuousCqlRequestAsyncProcessor asyncProcessor) { + this.asyncProcessor = asyncProcessor; + } + + @Override + public boolean canProcess(Request request, GenericType resultType) { + return request instanceof Statement && resultType.equals(CONTINUOUS_RESULT_SYNC); + } + + @Override + public ContinuousResultSet process( + Statement request, + DefaultSession session, + InternalDriverContext context, + String sessionLogPrefix) { + BlockingOperation.checkNotDriverThread(); + ContinuousAsyncResultSet firstPage = + CompletableFutures.getUninterruptibly( + asyncProcessor.process(request, session, context, sessionLogPrefix)); + return new DefaultContinuousResultSet(firstPage); + } + + @Override + public ContinuousResultSet newFailure(RuntimeException error) { + throw error; + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/DefaultContinuousAsyncResultSet.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/DefaultContinuousAsyncResultSet.java new file mode 100644 index 00000000000..a804ac8dec7 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/DefaultContinuousAsyncResultSet.java @@ -0,0 +1,158 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.cql.continuous; + +import com.datastax.dse.driver.api.core.cql.continuous.ContinuousAsyncResultSet; +import com.datastax.oss.driver.api.core.cql.ColumnDefinitions; +import com.datastax.oss.driver.api.core.cql.ExecutionInfo; +import com.datastax.oss.driver.api.core.cql.Row; +import com.datastax.oss.driver.internal.core.cql.EmptyColumnDefinitions; +import com.datastax.oss.driver.internal.core.util.CountingIterator; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Collections; +import java.util.concurrent.CompletionStage; +import net.jcip.annotations.NotThreadSafe; + +@NotThreadSafe // wraps a mutable queue +public class DefaultContinuousAsyncResultSet implements ContinuousAsyncResultSet { + + private final Iterable currentPage; + private final ColumnDefinitions columnDefinitions; + private final int pageNumber; + private final boolean hasMorePages; + private final ExecutionInfo executionInfo; + private final ContinuousCqlRequestHandler handler; + private final CountingIterator iterator; + + public DefaultContinuousAsyncResultSet( + CountingIterator iterator, + ColumnDefinitions columnDefinitions, + int pageNumber, + boolean hasMorePages, + ExecutionInfo executionInfo, + ContinuousCqlRequestHandler handler) { + this.columnDefinitions = columnDefinitions; + this.pageNumber = pageNumber; + this.hasMorePages = hasMorePages; + this.executionInfo = executionInfo; + this.handler = handler; + this.iterator = iterator; + this.currentPage = () -> iterator; + } + + @NonNull + @Override + public ColumnDefinitions getColumnDefinitions() { + return columnDefinitions; + } + + @Override + public boolean wasApplied() { + // always return true for non-conditional updates + return true; + } + + @NonNull + @Override + public ExecutionInfo getExecutionInfo() { + return executionInfo; + } + + @Override + public int pageNumber() { + return pageNumber; + } + + @Override + public boolean hasMorePages() { + return hasMorePages; + } + + @NonNull + @Override + public Iterable currentPage() { + return currentPage; + } + + @Override + public int remaining() { + return iterator.remaining(); + } + + @NonNull + @Override + public CompletionStage fetchNextPage() throws IllegalStateException { + if (!hasMorePages()) { + throw new IllegalStateException( + "Can't call fetchNextPage() on the last page (use hasMorePages() to check)"); + } + return handler.dequeueOrCreatePending(); + } + + @Override + public void cancel() { + handler.cancel(); + } + + static ContinuousAsyncResultSet empty(ExecutionInfo executionInfo) { + + return new ContinuousAsyncResultSet() { + + @NonNull + @Override + public ColumnDefinitions getColumnDefinitions() { + return EmptyColumnDefinitions.INSTANCE; + } + + @NonNull + @Override + public ExecutionInfo getExecutionInfo() { + return executionInfo; + } + + @NonNull + @Override + public Iterable currentPage() { + return Collections.emptyList(); + } + + @Override + public int remaining() { + return 0; + } + + @Override + public boolean hasMorePages() { + return false; + } + + @Override + public int pageNumber() { + return 1; + } + + @NonNull + @Override + public CompletionStage fetchNextPage() + throws IllegalStateException { + throw new IllegalStateException( + "Can't call fetchNextPage() on the last page (use hasMorePages() to check)"); + } + + @Override + public void cancel() { + // noop + } + + @Override + public boolean wasApplied() { + // always true + return true; + } + }; + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/DefaultContinuousResultSet.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/DefaultContinuousResultSet.java new file mode 100644 index 00000000000..b5c5d9a7e30 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/DefaultContinuousResultSet.java @@ -0,0 +1,121 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.cql.continuous; + +import com.datastax.dse.driver.api.core.cql.continuous.ContinuousAsyncResultSet; +import com.datastax.dse.driver.api.core.cql.continuous.ContinuousResultSet; +import com.datastax.oss.driver.api.core.cql.ColumnDefinitions; +import com.datastax.oss.driver.api.core.cql.ExecutionInfo; +import com.datastax.oss.driver.api.core.cql.Row; +import com.datastax.oss.driver.internal.core.util.CountingIterator; +import com.datastax.oss.driver.internal.core.util.concurrent.BlockingOperation; +import com.datastax.oss.driver.internal.core.util.concurrent.CompletableFutures; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import net.jcip.annotations.NotThreadSafe; + +/** + * This class is roughly equivalent to {@link + * com.datastax.oss.driver.internal.core.cql.MultiPageResultSet}, except that {@link + * RowIterator#maybeMoveToNextPage()} needs to check for cancellation before fetching the next page. + */ +@NotThreadSafe +public class DefaultContinuousResultSet implements ContinuousResultSet { + + private final RowIterator iterator; + private final List executionInfos = new ArrayList<>(); + private final ColumnDefinitions columnDefinitions; + + public DefaultContinuousResultSet(ContinuousAsyncResultSet firstPage) { + iterator = new RowIterator(firstPage); + columnDefinitions = firstPage.getColumnDefinitions(); + executionInfos.add(firstPage.getExecutionInfo()); + } + + @Override + public void cancel() { + iterator.cancel(); + } + + @NonNull + @Override + public ColumnDefinitions getColumnDefinitions() { + return columnDefinitions; + } + + @NonNull + @Override + public List getExecutionInfos() { + return executionInfos; + } + + @NonNull + @Override + public Iterator iterator() { + return iterator; + } + + @Override + public boolean isFullyFetched() { + return iterator.isFullyFetched(); + } + + @Override + public int getAvailableWithoutFetching() { + return iterator.remaining(); + } + + @Override + public boolean wasApplied() { + return iterator.wasApplied(); + } + + private class RowIterator extends CountingIterator { + private ContinuousAsyncResultSet currentPage; + private Iterator currentRows; + private boolean cancelled = false; + + private RowIterator(ContinuousAsyncResultSet firstPage) { + super(firstPage.remaining()); + currentPage = firstPage; + currentRows = firstPage.currentPage().iterator(); + } + + @Override + protected Row computeNext() { + maybeMoveToNextPage(); + return currentRows.hasNext() ? currentRows.next() : endOfData(); + } + + private void maybeMoveToNextPage() { + if (!cancelled && !currentRows.hasNext() && currentPage.hasMorePages()) { + BlockingOperation.checkNotDriverThread(); + ContinuousAsyncResultSet nextPage = + CompletableFutures.getUninterruptibly(currentPage.fetchNextPage()); + currentPage = nextPage; + remaining += currentPage.remaining(); + currentRows = nextPage.currentPage().iterator(); + executionInfos.add(nextPage.getExecutionInfo()); + } + } + + private boolean isFullyFetched() { + return !currentPage.hasMorePages(); + } + + private boolean wasApplied() { + return currentPage.wasApplied(); + } + + private void cancel() { + currentPage.cancel(); + cancelled = true; + } + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/reactive/ContinuousCqlRequestReactiveProcessor.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/reactive/ContinuousCqlRequestReactiveProcessor.java new file mode 100644 index 00000000000..eb38df3e6b6 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/reactive/ContinuousCqlRequestReactiveProcessor.java @@ -0,0 +1,52 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.cql.continuous.reactive; + +import com.datastax.dse.driver.api.core.cql.continuous.reactive.ContinuousReactiveResultSet; +import com.datastax.dse.driver.internal.core.cql.continuous.ContinuousCqlRequestAsyncProcessor; +import com.datastax.dse.driver.internal.core.cql.reactive.FailedReactiveResultSet; +import com.datastax.oss.driver.api.core.cql.Statement; +import com.datastax.oss.driver.api.core.session.Request; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.session.DefaultSession; +import com.datastax.oss.driver.internal.core.session.RequestProcessor; +import net.jcip.annotations.ThreadSafe; + +@ThreadSafe +public class ContinuousCqlRequestReactiveProcessor + implements RequestProcessor, ContinuousReactiveResultSet> { + + public static final GenericType CONTINUOUS_REACTIVE_RESULT_SET = + GenericType.of(ContinuousReactiveResultSet.class); + + private final ContinuousCqlRequestAsyncProcessor asyncProcessor; + + public ContinuousCqlRequestReactiveProcessor(ContinuousCqlRequestAsyncProcessor asyncProcessor) { + this.asyncProcessor = asyncProcessor; + } + + @Override + public boolean canProcess(Request request, GenericType resultType) { + return request instanceof Statement && resultType.equals(CONTINUOUS_REACTIVE_RESULT_SET); + } + + @Override + public ContinuousReactiveResultSet process( + Statement request, + DefaultSession session, + InternalDriverContext context, + String sessionLogPrefix) { + return new DefaultContinuousReactiveResultSet( + () -> asyncProcessor.process(request, session, context, sessionLogPrefix)); + } + + @Override + public ContinuousReactiveResultSet newFailure(RuntimeException error) { + return new FailedReactiveResultSet(error); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/reactive/DefaultContinuousReactiveResultSet.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/reactive/DefaultContinuousReactiveResultSet.java new file mode 100644 index 00000000000..673cbe1777c --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/reactive/DefaultContinuousReactiveResultSet.java @@ -0,0 +1,24 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.cql.continuous.reactive; + +import com.datastax.dse.driver.api.core.cql.continuous.ContinuousAsyncResultSet; +import com.datastax.dse.driver.api.core.cql.continuous.reactive.ContinuousReactiveResultSet; +import com.datastax.dse.driver.internal.core.cql.reactive.ReactiveResultSetBase; +import java.util.concurrent.Callable; +import java.util.concurrent.CompletionStage; +import net.jcip.annotations.ThreadSafe; + +@ThreadSafe +public class DefaultContinuousReactiveResultSet + extends ReactiveResultSetBase implements ContinuousReactiveResultSet { + + public DefaultContinuousReactiveResultSet( + Callable> firstPage) { + super(firstPage); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/CqlRequestReactiveProcessor.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/CqlRequestReactiveProcessor.java new file mode 100644 index 00000000000..0e2b03cdbfb --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/CqlRequestReactiveProcessor.java @@ -0,0 +1,51 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.cql.reactive; + +import com.datastax.dse.driver.api.core.cql.reactive.ReactiveResultSet; +import com.datastax.oss.driver.api.core.cql.Statement; +import com.datastax.oss.driver.api.core.session.Request; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.cql.CqlRequestAsyncProcessor; +import com.datastax.oss.driver.internal.core.session.DefaultSession; +import com.datastax.oss.driver.internal.core.session.RequestProcessor; +import net.jcip.annotations.ThreadSafe; + +@ThreadSafe +public class CqlRequestReactiveProcessor + implements RequestProcessor, ReactiveResultSet> { + + public static final GenericType REACTIVE_RESULT_SET = + GenericType.of(ReactiveResultSet.class); + + private final CqlRequestAsyncProcessor asyncProcessor; + + public CqlRequestReactiveProcessor(CqlRequestAsyncProcessor asyncProcessor) { + this.asyncProcessor = asyncProcessor; + } + + @Override + public boolean canProcess(Request request, GenericType resultType) { + return request instanceof Statement && resultType.equals(REACTIVE_RESULT_SET); + } + + @Override + public ReactiveResultSet process( + Statement request, + DefaultSession session, + InternalDriverContext context, + String sessionLogPrefix) { + return new DefaultReactiveResultSet( + () -> asyncProcessor.process(request, session, context, sessionLogPrefix)); + } + + @Override + public ReactiveResultSet newFailure(RuntimeException error) { + return new FailedReactiveResultSet(error); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/DefaultReactiveResultSet.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/DefaultReactiveResultSet.java new file mode 100644 index 00000000000..3e50e1ed116 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/DefaultReactiveResultSet.java @@ -0,0 +1,20 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.cql.reactive; + +import com.datastax.oss.driver.api.core.cql.AsyncResultSet; +import java.util.concurrent.Callable; +import java.util.concurrent.CompletionStage; +import net.jcip.annotations.ThreadSafe; + +@ThreadSafe +public class DefaultReactiveResultSet extends ReactiveResultSetBase { + + public DefaultReactiveResultSet(Callable> firstPage) { + super(firstPage); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/DefaultReactiveRow.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/DefaultReactiveRow.java new file mode 100644 index 00000000000..e6e02a9c244 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/DefaultReactiveRow.java @@ -0,0 +1,557 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.cql.reactive; + +import com.datastax.dse.driver.api.core.cql.reactive.ReactiveRow; +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.ProtocolVersion; +import com.datastax.oss.driver.api.core.cql.ColumnDefinitions; +import com.datastax.oss.driver.api.core.cql.ExecutionInfo; +import com.datastax.oss.driver.api.core.cql.Row; +import com.datastax.oss.driver.api.core.data.CqlDuration; +import com.datastax.oss.driver.api.core.data.TupleValue; +import com.datastax.oss.driver.api.core.data.UdtValue; +import com.datastax.oss.driver.api.core.detach.AttachmentPoint; +import com.datastax.oss.driver.api.core.metadata.token.Token; +import com.datastax.oss.driver.api.core.type.DataType; +import com.datastax.oss.driver.api.core.type.codec.TypeCodec; +import com.datastax.oss.driver.api.core.type.codec.registry.CodecRegistry; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.math.BigDecimal; +import java.math.BigInteger; +import java.net.InetAddress; +import java.nio.ByteBuffer; +import java.time.Instant; +import java.time.LocalDate; +import java.time.LocalTime; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.UUID; +import net.jcip.annotations.NotThreadSafe; + +@NotThreadSafe +class DefaultReactiveRow implements ReactiveRow { + + private final Row row; + private final ExecutionInfo executionInfo; + + DefaultReactiveRow(@NonNull Row row, @NonNull ExecutionInfo executionInfo) { + this.row = row; + this.executionInfo = executionInfo; + } + + @NonNull + @Override + public ExecutionInfo getExecutionInfo() { + return executionInfo; + } + + @NonNull + @Override + public ColumnDefinitions getColumnDefinitions() { + return row.getColumnDefinitions(); + } + + @Override + public ByteBuffer getBytesUnsafe(int i) { + return row.getBytesUnsafe(i); + } + + @Override + public boolean isNull(int i) { + return row.isNull(i); + } + + @Override + public T get(int i, TypeCodec codec) { + return row.get(i, codec); + } + + @Override + public T get(int i, GenericType targetType) { + return row.get(i, targetType); + } + + @Override + public T get(int i, Class targetClass) { + return row.get(i, targetClass); + } + + @Override + public Object getObject(int i) { + return row.getObject(i); + } + + @Override + public boolean getBoolean(int i) { + return row.getBoolean(i); + } + + @Override + public byte getByte(int i) { + return row.getByte(i); + } + + @Override + public double getDouble(int i) { + return row.getDouble(i); + } + + @Override + public float getFloat(int i) { + return row.getFloat(i); + } + + @Override + public int getInt(int i) { + return row.getInt(i); + } + + @Override + public long getLong(int i) { + return row.getLong(i); + } + + @Override + public short getShort(int i) { + return row.getShort(i); + } + + @Override + public Instant getInstant(int i) { + return row.getInstant(i); + } + + @Override + public LocalDate getLocalDate(int i) { + return row.getLocalDate(i); + } + + @Override + public LocalTime getLocalTime(int i) { + return row.getLocalTime(i); + } + + @Override + public ByteBuffer getByteBuffer(int i) { + return row.getByteBuffer(i); + } + + @Override + public String getString(int i) { + return row.getString(i); + } + + @Override + public BigInteger getBigInteger(int i) { + return row.getBigInteger(i); + } + + @Override + public BigDecimal getBigDecimal(int i) { + return row.getBigDecimal(i); + } + + @Override + public UUID getUuid(int i) { + return row.getUuid(i); + } + + @Override + public InetAddress getInetAddress(int i) { + return row.getInetAddress(i); + } + + @Override + public CqlDuration getCqlDuration(int i) { + return row.getCqlDuration(i); + } + + @Override + public Token getToken(int i) { + return row.getToken(i); + } + + @Override + public List getList(int i, @NonNull Class elementsClass) { + return row.getList(i, elementsClass); + } + + @Override + public Set getSet(int i, @NonNull Class elementsClass) { + return row.getSet(i, elementsClass); + } + + @Override + public Map getMap(int i, @NonNull Class keyClass, @NonNull Class valueClass) { + return row.getMap(i, keyClass, valueClass); + } + + @Override + public UdtValue getUdtValue(int i) { + return row.getUdtValue(i); + } + + @Override + public TupleValue getTupleValue(int i) { + return row.getTupleValue(i); + } + + @Override + public int size() { + return row.size(); + } + + @NonNull + @Override + public DataType getType(int i) { + return row.getType(i); + } + + @NonNull + @Override + public CodecRegistry codecRegistry() { + return row.codecRegistry(); + } + + @NonNull + @Override + public ProtocolVersion protocolVersion() { + return row.protocolVersion(); + } + + @Override + public ByteBuffer getBytesUnsafe(@NonNull String name) { + return row.getBytesUnsafe(name); + } + + @Override + public boolean isNull(@NonNull String name) { + return row.isNull(name); + } + + @Override + public T get(@NonNull String name, @NonNull TypeCodec codec) { + return row.get(name, codec); + } + + @Override + public T get(@NonNull String name, @NonNull GenericType targetType) { + return row.get(name, targetType); + } + + @Override + public T get(@NonNull String name, @NonNull Class targetClass) { + return row.get(name, targetClass); + } + + @Override + public Object getObject(@NonNull String name) { + return row.getObject(name); + } + + @Override + public boolean getBoolean(@NonNull String name) { + return row.getBoolean(name); + } + + @Override + public byte getByte(@NonNull String name) { + return row.getByte(name); + } + + @Override + public double getDouble(@NonNull String name) { + return row.getDouble(name); + } + + @Override + public float getFloat(@NonNull String name) { + return row.getFloat(name); + } + + @Override + public int getInt(@NonNull String name) { + return row.getInt(name); + } + + @Override + public long getLong(@NonNull String name) { + return row.getLong(name); + } + + @Override + public short getShort(@NonNull String name) { + return row.getShort(name); + } + + @Override + public Instant getInstant(@NonNull String name) { + return row.getInstant(name); + } + + @Override + public LocalDate getLocalDate(@NonNull String name) { + return row.getLocalDate(name); + } + + @Override + public LocalTime getLocalTime(@NonNull String name) { + return row.getLocalTime(name); + } + + @Override + public ByteBuffer getByteBuffer(@NonNull String name) { + return row.getByteBuffer(name); + } + + @Override + public String getString(@NonNull String name) { + return row.getString(name); + } + + @Override + public BigInteger getBigInteger(@NonNull String name) { + return row.getBigInteger(name); + } + + @Override + public BigDecimal getBigDecimal(@NonNull String name) { + return row.getBigDecimal(name); + } + + @Override + public UUID getUuid(@NonNull String name) { + return row.getUuid(name); + } + + @Override + public InetAddress getInetAddress(@NonNull String name) { + return row.getInetAddress(name); + } + + @Override + public CqlDuration getCqlDuration(@NonNull String name) { + return row.getCqlDuration(name); + } + + @Override + public Token getToken(@NonNull String name) { + return row.getToken(name); + } + + @Override + public List getList(@NonNull String name, @NonNull Class elementsClass) { + return row.getList(name, elementsClass); + } + + @Override + public Set getSet(@NonNull String name, @NonNull Class elementsClass) { + return row.getSet(name, elementsClass); + } + + @Override + public Map getMap( + @NonNull String name, @NonNull Class keyClass, @NonNull Class valueClass) { + return row.getMap(name, keyClass, valueClass); + } + + @Override + public UdtValue getUdtValue(@NonNull String name) { + return row.getUdtValue(name); + } + + @Override + public TupleValue getTupleValue(@NonNull String name) { + return row.getTupleValue(name); + } + + @Override + public int firstIndexOf(@NonNull String name) { + return row.firstIndexOf(name); + } + + @NonNull + @Override + public DataType getType(@NonNull String name) { + return row.getType(name); + } + + @Override + public ByteBuffer getBytesUnsafe(@NonNull CqlIdentifier id) { + return row.getBytesUnsafe(id); + } + + @Override + public boolean isNull(@NonNull CqlIdentifier id) { + return row.isNull(id); + } + + @Override + public T get(@NonNull CqlIdentifier id, @NonNull TypeCodec codec) { + return row.get(id, codec); + } + + @Override + public T get(@NonNull CqlIdentifier id, @NonNull GenericType targetType) { + return row.get(id, targetType); + } + + @Override + public T get(@NonNull CqlIdentifier id, @NonNull Class targetClass) { + return row.get(id, targetClass); + } + + @Override + public Object getObject(@NonNull CqlIdentifier id) { + return row.getObject(id); + } + + @Override + public boolean getBoolean(@NonNull CqlIdentifier id) { + return row.getBoolean(id); + } + + @Override + public byte getByte(@NonNull CqlIdentifier id) { + return row.getByte(id); + } + + @Override + public double getDouble(@NonNull CqlIdentifier id) { + return row.getDouble(id); + } + + @Override + public float getFloat(@NonNull CqlIdentifier id) { + return row.getFloat(id); + } + + @Override + public int getInt(@NonNull CqlIdentifier id) { + return row.getInt(id); + } + + @Override + public long getLong(@NonNull CqlIdentifier id) { + return row.getLong(id); + } + + @Override + public short getShort(@NonNull CqlIdentifier id) { + return row.getShort(id); + } + + @Override + public Instant getInstant(@NonNull CqlIdentifier id) { + return row.getInstant(id); + } + + @Override + public LocalDate getLocalDate(@NonNull CqlIdentifier id) { + return row.getLocalDate(id); + } + + @Override + public LocalTime getLocalTime(@NonNull CqlIdentifier id) { + return row.getLocalTime(id); + } + + @Override + public ByteBuffer getByteBuffer(@NonNull CqlIdentifier id) { + return row.getByteBuffer(id); + } + + @Override + public String getString(@NonNull CqlIdentifier id) { + return row.getString(id); + } + + @Override + public BigInteger getBigInteger(@NonNull CqlIdentifier id) { + return row.getBigInteger(id); + } + + @Override + public BigDecimal getBigDecimal(@NonNull CqlIdentifier id) { + return row.getBigDecimal(id); + } + + @Override + public UUID getUuid(@NonNull CqlIdentifier id) { + return row.getUuid(id); + } + + @Override + public InetAddress getInetAddress(@NonNull CqlIdentifier id) { + return row.getInetAddress(id); + } + + @Override + public CqlDuration getCqlDuration(@NonNull CqlIdentifier id) { + return row.getCqlDuration(id); + } + + @Override + public Token getToken(@NonNull CqlIdentifier id) { + return row.getToken(id); + } + + @Override + public List getList(@NonNull CqlIdentifier id, @NonNull Class elementsClass) { + return row.getList(id, elementsClass); + } + + @Override + public Set getSet(@NonNull CqlIdentifier id, @NonNull Class elementsClass) { + return row.getSet(id, elementsClass); + } + + @Override + public Map getMap( + @NonNull CqlIdentifier id, @NonNull Class keyClass, @NonNull Class valueClass) { + return row.getMap(id, keyClass, valueClass); + } + + @Override + public UdtValue getUdtValue(@NonNull CqlIdentifier id) { + return row.getUdtValue(id); + } + + @Override + public TupleValue getTupleValue(@NonNull CqlIdentifier id) { + return row.getTupleValue(id); + } + + @Override + public int firstIndexOf(@NonNull CqlIdentifier id) { + return row.firstIndexOf(id); + } + + @NonNull + @Override + public DataType getType(@NonNull CqlIdentifier id) { + return row.getType(id); + } + + @Override + public boolean isDetached() { + return row.isDetached(); + } + + @Override + public void attach(@NonNull AttachmentPoint attachmentPoint) { + row.attach(attachmentPoint); + } + + @Override + public String toString() { + return "DefaultReactiveRow{row=" + row + ", executionInfo=" + executionInfo + '}'; + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/EmptySubscription.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/EmptySubscription.java new file mode 100644 index 00000000000..b48a057d36a --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/EmptySubscription.java @@ -0,0 +1,22 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.cql.reactive; + +import org.reactivestreams.Subscription; + +class EmptySubscription implements Subscription { + + static final EmptySubscription INSTANCE = new EmptySubscription(); + + private EmptySubscription() {} + + @Override + public void request(long n) {} + + @Override + public void cancel() {} +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/FailedPublisher.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/FailedPublisher.java new file mode 100644 index 00000000000..d069e41d227 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/FailedPublisher.java @@ -0,0 +1,38 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.cql.reactive; + +import java.util.Objects; +import org.reactivestreams.Publisher; +import org.reactivestreams.Subscriber; + +/** + * A {@link Publisher} that immediately signals the error passed at instantiation to all its + * subscribers. + */ +public class FailedPublisher implements Publisher { + + protected final Throwable error; + + public FailedPublisher(Throwable error) { + this.error = error; + } + + @Override + public void subscribe(Subscriber subscriber) { + Objects.requireNonNull(subscriber, "Subscriber cannot be null"); + // Per rule 1.9, we need to call onSubscribe before any other signal. Pass a dummy + // subscription since we know it will never be used. + subscriber.onSubscribe(EmptySubscription.INSTANCE); + // Signal the error to the subscriber right away. This is safe to do because per rule 2.10, + // a Subscriber MUST be prepared to receive an onError signal without a preceding + // Subscription.request(long n) call. + // Also, per rule 2.13: onError MUST return normally except when any provided parameter + // is null (which is not the case here); so we don't need care about catching errors here. + subscriber.onError(error); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/FailedReactiveResultSet.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/FailedReactiveResultSet.java new file mode 100644 index 00000000000..9274b2f1f25 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/FailedReactiveResultSet.java @@ -0,0 +1,49 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.cql.reactive; + +import com.datastax.dse.driver.api.core.cql.continuous.reactive.ContinuousReactiveResultSet; +import com.datastax.dse.driver.api.core.cql.reactive.ReactiveResultSet; +import com.datastax.dse.driver.api.core.cql.reactive.ReactiveRow; +import com.datastax.dse.driver.internal.core.cql.continuous.reactive.ContinuousCqlRequestReactiveProcessor; +import com.datastax.oss.driver.api.core.cql.ColumnDefinitions; +import com.datastax.oss.driver.api.core.cql.ExecutionInfo; +import edu.umd.cs.findbugs.annotations.NonNull; +import org.reactivestreams.Publisher; + +/** + * A {@link ReactiveResultSet} that immediately signals the error passed at instantiation to all its + * subscribers. + * + * @see CqlRequestReactiveProcessor#newFailure(java.lang.RuntimeException) + * @see ContinuousCqlRequestReactiveProcessor#newFailure(java.lang.RuntimeException) + */ +public class FailedReactiveResultSet extends FailedPublisher + implements ReactiveResultSet, ContinuousReactiveResultSet { + + public FailedReactiveResultSet(Throwable error) { + super(error); + } + + @NonNull + @Override + public Publisher getColumnDefinitions() { + return new FailedPublisher<>(error); + } + + @NonNull + @Override + public Publisher getExecutionInfos() { + return new FailedPublisher<>(error); + } + + @NonNull + @Override + public Publisher wasApplied() { + return new FailedPublisher<>(error); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/ReactiveOperators.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/ReactiveOperators.java new file mode 100644 index 00000000000..966004a97d3 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/ReactiveOperators.java @@ -0,0 +1,50 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.cql.reactive; + +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.concurrent.atomic.AtomicLong; + +final class ReactiveOperators { + + /** + * Atomically adds the given value to the given AtomicLong, bound to Long.MAX_VALUE. + * + * @param current the current value. + * @param toAdd the delta to add. + */ + static void addCap(@NonNull AtomicLong current, long toAdd) { + long r, u; + do { + r = current.get(); + if (r == Long.MAX_VALUE) { + return; + } + u = r + toAdd; + if (u < 0L) { + u = Long.MAX_VALUE; + } + } while (!current.compareAndSet(r, u)); + } + + /** + * Atomically subtracts the given value from the given AtomicLong, bound to 0. + * + * @param current the current value. + * @param toSub the delta to subtract. + */ + static void subCap(@NonNull AtomicLong current, long toSub) { + long r, u; + do { + r = current.get(); + if (r == 0 || r == Long.MAX_VALUE) { + return; + } + u = Math.max(r - toSub, 0); + } while (!current.compareAndSet(r, u)); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/ReactiveResultSetBase.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/ReactiveResultSetBase.java new file mode 100644 index 00000000000..514467d1d8c --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/ReactiveResultSetBase.java @@ -0,0 +1,94 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.cql.reactive; + +import com.datastax.dse.driver.api.core.cql.reactive.ReactiveResultSet; +import com.datastax.dse.driver.api.core.cql.reactive.ReactiveRow; +import com.datastax.oss.driver.api.core.AsyncPagingIterable; +import com.datastax.oss.driver.api.core.cql.ColumnDefinitions; +import com.datastax.oss.driver.api.core.cql.ExecutionInfo; +import com.datastax.oss.driver.api.core.cql.Row; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Objects; +import java.util.concurrent.Callable; +import java.util.concurrent.CompletionStage; +import java.util.concurrent.atomic.AtomicBoolean; +import net.jcip.annotations.ThreadSafe; +import org.reactivestreams.Publisher; +import org.reactivestreams.Subscriber; + +@ThreadSafe +public abstract class ReactiveResultSetBase> + implements ReactiveResultSet { + + private final Callable> firstPage; + + private final AtomicBoolean once = new AtomicBoolean(false); + + private final SimpleUnicastProcessor columnDefinitionsPublisher = + new SimpleUnicastProcessor<>(); + + private final SimpleUnicastProcessor executionInfosPublisher = + new SimpleUnicastProcessor<>(); + + private final SimpleUnicastProcessor wasAppliedPublisher = + new SimpleUnicastProcessor<>(); + + protected ReactiveResultSetBase(Callable> firstPage) { + this.firstPage = firstPage; + } + + @Override + public void subscribe(@NonNull Subscriber subscriber) { + // As per rule 1.9, we need to throw an NPE if subscriber is null + Objects.requireNonNull(subscriber, "Subscriber cannot be null"); + // As per rule 1.11, this publisher is allowed to support only one subscriber. + if (once.compareAndSet(false, true)) { + ReactiveResultSetSubscription subscription = + new ReactiveResultSetSubscription<>( + subscriber, columnDefinitionsPublisher, executionInfosPublisher, wasAppliedPublisher); + try { + subscriber.onSubscribe(subscription); + // must be done after onSubscribe + subscription.start(firstPage); + } catch (Throwable t) { + // As per rule 2.13: In the case that this rule is violated, + // any associated Subscription to the Subscriber MUST be considered as + // cancelled, and the caller MUST raise this error condition in a fashion + // that is adequate for the runtime environment. + subscription.doOnError( + new IllegalStateException( + subscriber + + " violated the Reactive Streams rule 2.13 by throwing an exception from onSubscribe.", + t)); + } + } else { + subscriber.onSubscribe(EmptySubscription.INSTANCE); + subscriber.onError( + new IllegalStateException("This publisher does not support multiple subscriptions")); + } + // As per 2.13, this method must return normally (i.e. not throw) + } + + @NonNull + @Override + public Publisher getColumnDefinitions() { + return columnDefinitionsPublisher; + } + + @NonNull + @Override + public Publisher getExecutionInfos() { + return executionInfosPublisher; + } + + @NonNull + @Override + public Publisher wasApplied() { + return wasAppliedPublisher; + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/ReactiveResultSetSubscription.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/ReactiveResultSetSubscription.java new file mode 100644 index 00000000000..fc690661845 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/ReactiveResultSetSubscription.java @@ -0,0 +1,485 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.cql.reactive; + +import com.datastax.dse.driver.api.core.cql.reactive.ReactiveRow; +import com.datastax.dse.driver.internal.core.util.concurrent.BoundedConcurrentQueue; +import com.datastax.oss.driver.api.core.AsyncPagingIterable; +import com.datastax.oss.driver.api.core.cql.ColumnDefinitions; +import com.datastax.oss.driver.api.core.cql.ExecutionInfo; +import com.datastax.oss.driver.api.core.cql.Row; +import com.datastax.oss.driver.internal.core.util.concurrent.CompletableFutures; +import com.datastax.oss.driver.shaded.guava.common.collect.Iterators; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.Collections; +import java.util.Iterator; +import java.util.Objects; +import java.util.concurrent.Callable; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionException; +import java.util.concurrent.CompletionStage; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicLong; +import net.jcip.annotations.ThreadSafe; +import org.reactivestreams.Subscriber; +import org.reactivestreams.Subscription; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * A single-subscriber subscription that executes one single query and emits all the returned rows. + * + *

      This class can handle both continuous and non-continuous result sets. + */ +@ThreadSafe +public class ReactiveResultSetSubscription> + implements Subscription { + + private static final Logger LOG = LoggerFactory.getLogger(ReactiveResultSetSubscription.class); + + private static final int MAX_ENQUEUED_PAGES = 4; + + /** Tracks the number of items requested by the subscriber. */ + private final AtomicLong requested = new AtomicLong(0); + + /** The pages received so far, with a maximum of MAX_ENQUEUED_PAGES elements. */ + private final BoundedConcurrentQueue> pages = + new BoundedConcurrentQueue<>(MAX_ENQUEUED_PAGES); + + /** + * Used to signal that a thread is currently draining, i.e., emitting items to the subscriber. + * When it is zero, that means there is no ongoing emission. This mechanism effectively serializes + * access to the drain() method, and also keeps track of missed attempts to enter it, since each + * thread that attempts to drain will increment this counter. + * + * @see #drain() + */ + private final AtomicInteger draining = new AtomicInteger(0); + + /** + * Waited upon by the driver and completed when the subscriber requests its first item. + * + *

      Used to hold off emitting results until the subscriber issues its first request for items. + * Since this future is only completed from {@link #request(long)}, this effectively conditions + * the enqueueing of the first page to the reception of the subscriber's first request. + * + *

      This mechanism avoids sending terminal signals before a request is made when the stream is + * empty. Note that as per 2.9, "a Subscriber MUST be prepared to receive an onComplete signal + * with or without a preceding Subscription.request(long n) call." However, the TCK considers it + * as unfair behavior. + * + * @see #start(Callable) + * @see #request(long) + */ + private final CompletableFuture firstSubscriberRequestArrived = new CompletableFuture<>(); + + /** non-final because it has to be de-referenced, see {@link #clear()}. */ + private volatile Subscriber mainSubscriber; + + private volatile Subscriber columnDefinitionsSubscriber; + + private volatile Subscriber executionInfosSubscriber; + + private volatile Subscriber wasAppliedSubscriber; + + /** + * Set to true when the subscription is cancelled, which happens when an error is encountered, + * when the result set is fully consumed and the subscription terminates, or when the subscriber + * manually calls {@link #cancel()}. + */ + private volatile boolean cancelled = false; + + ReactiveResultSetSubscription( + @NonNull Subscriber mainSubscriber, + @NonNull Subscriber columnDefinitionsSubscriber, + @NonNull Subscriber executionInfosSubscriber, + @NonNull Subscriber wasAppliedSubscriber) { + this.mainSubscriber = mainSubscriber; + this.columnDefinitionsSubscriber = columnDefinitionsSubscriber; + this.executionInfosSubscriber = executionInfosSubscriber; + this.wasAppliedSubscriber = wasAppliedSubscriber; + } + + /** + * Starts the query execution. + * + *

      Must be called immediately after creating the subscription, but after {@link + * Subscriber#onSubscribe(Subscription)}. + * + * @param firstPage The future that, when complete, will produce the first page. + */ + void start(@NonNull Callable> firstPage) { + firstSubscriberRequestArrived.thenAccept( + (aVoid) -> fetchNextPageAndEnqueue(new Page<>(firstPage), true)); + } + + @Override + public void request(long n) { + // As per 3.6: after the Subscription is cancelled, additional + // calls to request() MUST be NOPs. + if (!cancelled) { + if (n < 1) { + // Validate request as per rule 3.9 + doOnError( + new IllegalArgumentException( + mainSubscriber + + " violated the Reactive Streams rule 3.9 by requesting a non-positive number of elements.")); + } else { + // As per rule 3.17, when demand overflows Long.MAX_VALUE + // it can be treated as "effectively unbounded" + ReactiveOperators.addCap(requested, n); + // Set the first future to true if not done yet. + // This will make the first page of results ready for consumption, + // see start(). + // As per 2.7 it is the subscriber's responsibility to provide + // external synchronization when calling request(), + // so the check-then-act idiom below is good enough + // (and besides, complete() is idempotent). + if (!firstSubscriberRequestArrived.isDone()) { + firstSubscriberRequestArrived.complete(null); + } + drain(); + } + } + } + + @Override + public void cancel() { + // As per 3.5: Subscription.cancel() MUST respect the responsiveness of + // its caller by returning in a timely manner, MUST be idempotent and + // MUST be thread-safe. + if (!cancelled) { + cancelled = true; + if (draining.getAndIncrement() == 0) { + // If nobody is draining, clear now; + // otherwise, the draining thread will notice + // that the cancelled flag was set + // and will clear for us. + clear(); + } + } + } + + /** + * Attempts to drain available items, i.e. emit them to the subscriber. + * + *

      Access to this method is serialized by the field {@link #draining}: only one thread at a + * time can drain, but threads that attempt to drain while other thread is already draining + * increment that field; the draining thread, before finishing its work, checks for such failed + * attempts and triggers another round of draining if that was the case. + * + *

      The loop is interrupted when 1) the requested amount has been met or 2) when there are no + * more items readily available or 3) the subscription has been cancelled. + * + *

      The loop also checks for stream exhaustion and emits a terminal {@code onComplete} signal in + * this case. + * + *

      This method may run on a driver IO thread when invoked from {@link + * #fetchNextPageAndEnqueue(Page, boolean)}, or on a subscriber thread, when invoked from {@link + * #request(long)}. + */ + @SuppressWarnings("ConditionalBreakInInfiniteLoop") + private void drain() { + // As per 3.4: this method SHOULD respect the responsiveness + // of its caller by returning in a timely manner. + // We accomplish this by a wait-free implementation. + if (draining.getAndIncrement() != 0) { + // Someone else is already draining, so do nothing, + // the other thread will notice that we attempted to drain. + // This also allows to abide by rule 3.3 and avoid + // cycles such as request() -> onNext() -> request() etc. + return; + } + int missed = 1; + // Note: when termination is detected inside this loop, + // we MUST call clear() manually. + for (; ; ) { + // The requested number of items at this point + long r = requested.get(); + // The number of items emitted thus far + long emitted = 0L; + while (emitted != r) { + if (cancelled) { + clear(); + return; + } + Object result; + try { + result = tryNext(); + } catch (Throwable t) { + doOnError(t); + clear(); + return; + } + if (result == null) { + break; + } + if (result instanceof Throwable) { + doOnError((Throwable) result); + clear(); + return; + } + doOnNext((ReactiveRow) result); + emitted++; + } + if (isExhausted()) { + doOnComplete(); + clear(); + return; + } + if (cancelled) { + clear(); + return; + } + if (emitted != 0) { + // if any item was emitted, adjust the requested field + ReactiveOperators.subCap(requested, emitted); + } + // if another thread tried to call drain() while we were busy, + // then we should do another drain round. + missed = draining.addAndGet(-missed); + if (missed == 0) { + break; + } + } + } + + /** + * Tries to return the next item, if one is readily available, and returns {@code null} otherwise. + * + *

      Cannot run concurrently due to the {@link #draining} field. + */ + @Nullable + private Object tryNext() { + Page current = pages.peek(); + if (current != null) { + if (current.hasMoreRows()) { + return current.nextRow(); + } else if (current.hasMorePages()) { + // Discard current page as it is consumed. + // Don't discard the last page though as we need it + // to test isExhausted(). It will be GC'ed when a terminal signal + // is issued anyway, so that's no big deal. + if (pages.poll() == null) { + throw new AssertionError("Queue is empty, this should not happen"); + } + current = pages.peek(); + // if the next page is readily available, + // serve its first row now, no need to wait + // for the next drain. + if (current != null && current.hasMoreRows()) { + return current.nextRow(); + } + } + } + // No items available right now. + return null; + } + + /** + * Returns {@code true} when the entire stream has been consumed and no more items can be emitted. + * When that is the case, a terminal signal is sent. + * + *

      Cannot run concurrently due to the draining field. + */ + private boolean isExhausted() { + Page current = pages.peek(); + // Note: current can only be null when: + // 1) we are waiting for the first page and it hasn't arrived yet; + // 2) we just discarded the current page, but the next page hasn't arrived yet. + // In any case, a null here means it is not the last page, since the last page + // stays in the queue until the very end of the operation. + return current != null && !current.hasMoreRows() && !current.hasMorePages(); + } + + /** + * Runs on a subscriber thread initially, see {@link #start(Callable)}. Subsequent executions run + * on the thread that completes the pair of futures [current.fetchNextPage, pages.offer] and + * enqueues. This can be a driver IO thread or a subscriber thread; in both cases, cannot run + * concurrently due to the fact that one can only fetch the next page when the current one is + * arrived and enqueued. + */ + private void fetchNextPageAndEnqueue(@NonNull Page current, boolean firstPage) { + current + .fetchNextPage() + // as soon as the response arrives, + // create the new page + .handle( + (rs, t) -> { + Page page; + if (t == null) { + page = toPage(rs); + executionInfosSubscriber.onNext(rs.getExecutionInfo()); + if (!page.hasMorePages()) { + executionInfosSubscriber.onComplete(); + } + if (firstPage) { + columnDefinitionsSubscriber.onNext(rs.getColumnDefinitions()); + columnDefinitionsSubscriber.onComplete(); + // Avoid calling wasApplied on empty pages as some implementations may throw + // IllegalStateException; if the page is empty, this wasn't a CAS query, in which + // case, as per the method's contract, wasApplied should be true. + boolean wasApplied = rs.remaining() == 0 || rs.wasApplied(); + wasAppliedSubscriber.onNext(wasApplied); + wasAppliedSubscriber.onComplete(); + } + } else { + // Unwrap CompletionExceptions created by combined futures + if (t instanceof CompletionException) { + t = t.getCause(); + } + page = toErrorPage(t); + executionInfosSubscriber.onError(t); + if (firstPage) { + columnDefinitionsSubscriber.onError(t); + wasAppliedSubscriber.onError(t); + } + } + return page; + }) + .thenCompose(pages::offer) + .thenAccept( + page -> { + if (page.hasMorePages() && !cancelled) { + // preemptively fetch the next page, if available + fetchNextPageAndEnqueue(page, false); + } + drain(); + }); + } + + private void doOnNext(@NonNull ReactiveRow result) { + try { + mainSubscriber.onNext(result); + } catch (Throwable t) { + LOG.error( + mainSubscriber + + " violated the Reactive Streams rule 2.13 by throwing an exception from onNext.", + t); + cancel(); + } + } + + private void doOnComplete() { + try { + // Then we signal onComplete as per rules 1.2 and 1.5 + mainSubscriber.onComplete(); + } catch (Throwable t) { + LOG.error( + mainSubscriber + + " violated the Reactive Streams rule 2.13 by throwing an exception from onComplete.", + t); + } + // We need to consider this Subscription as cancelled as per rule 1.6 + cancel(); + } + + // package-private because it can be invoked by the publisher if the subscription handshake + // process fails. + void doOnError(@NonNull Throwable error) { + try { + // Then we signal the error downstream, as per rules 1.2 and 1.4. + mainSubscriber.onError(error); + } catch (Throwable t) { + t.addSuppressed(error); + LOG.error( + mainSubscriber + + " violated the Reactive Streams rule 2.13 by throwing an exception from onError.", + t); + } + // We need to consider this Subscription as cancelled as per rule 1.6 + cancel(); + } + + private void clear() { + // We don't need these pages anymore and should not hold references + // to them. + pages.clear(); + // As per 3.13, Subscription.cancel() MUST request the Publisher to + // eventually drop any references to the corresponding subscriber. + // Our own publishers do not keep references to this subscription, + // but downstream processors might do so, which is why we need to + // defensively clear the subscriber reference when we are done. + mainSubscriber = null; + columnDefinitionsSubscriber = null; + executionInfosSubscriber = null; + wasAppliedSubscriber = null; + } + + /** + * Converts the received result object into a {@link Page}. + * + * @param rs the result object to convert. + * @return a new page. + */ + @NonNull + private Page toPage(@NonNull ResultSetT rs) { + ExecutionInfo executionInfo = rs.getExecutionInfo(); + Iterator results = + Iterators.transform( + rs.currentPage().iterator(), + row -> new DefaultReactiveRow(Objects.requireNonNull(row), executionInfo)); + return new Page<>(results, rs.hasMorePages() ? rs::fetchNextPage : null); + } + + /** Converts the given error into a {@link Page}, containing the error as its only element. */ + @NonNull + private Page toErrorPage(@NonNull Throwable t) { + return new Page<>(Iterators.singletonIterator(t), null); + } + + /** + * A page object comprises an iterator over the page's results, and a future pointing to the next + * page (or {@code null}, if it's the last page). + */ + static class Page> { + + @NonNull final Iterator iterator; + + // A pointer to the next page, or null if this is the last page. + @Nullable final Callable> nextPage; + + /** called only from start() */ + Page(@NonNull Callable> nextPage) { + this.iterator = Collections.emptyIterator(); + this.nextPage = nextPage; + } + + Page(@NonNull Iterator iterator, @Nullable Callable> nextPage) { + this.iterator = iterator; + this.nextPage = nextPage; + } + + boolean hasMorePages() { + return nextPage != null; + } + + @NonNull + CompletionStage fetchNextPage() { + try { + return Objects.requireNonNull(nextPage).call(); + } catch (Exception e) { + // This is a synchronous failure in the driver. + // It can happen in rare cases when the driver throws an exception instead of returning a + // failed future; e.g. if someone tries to execute a continuous paging request but the + // protocol version in use does not support it. + // We treat it as a failed future. + return CompletableFutures.failedFuture(e); + } + } + + boolean hasMoreRows() { + return iterator.hasNext(); + } + + @NonNull + Object nextRow() { + return iterator.next(); + } + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/SimpleUnicastProcessor.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/SimpleUnicastProcessor.java new file mode 100644 index 00000000000..3504c5c5688 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/SimpleUnicastProcessor.java @@ -0,0 +1,254 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.cql.reactive; + +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Objects; +import java.util.Queue; +import java.util.concurrent.ConcurrentLinkedDeque; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicLong; +import org.reactivestreams.Processor; +import org.reactivestreams.Subscriber; +import org.reactivestreams.Subscription; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * A simple {@link Processor} that receives items form an upstream publisher, stores them in an + * internal queue, then serves them to one single downstream subscriber. It does not support + * multiple subscriptions. + * + *

      Implementation note: this class is intended to serve as the common implementation for all + * secondary publishers exposed by the driver's reactive API, and in particular, for publishers of + * query metadata objects. Since such publishers are not critical, and usually only publish a + * handful of items, this implementation favors simplicity over efficiency (in particular, it uses + * an unbounded linked queue, but in practice there is no risk that this queue could grow + * uncontrollably). + * + * @param The type of elements received and emitted by this processor. + */ +public class SimpleUnicastProcessor + implements Processor, Subscription { + + private static final Logger LOG = LoggerFactory.getLogger(SimpleUnicastProcessor.class); + + private static final Object ON_COMPLETE = new Object(); + + private final Queue queue = new ConcurrentLinkedDeque<>(); + + private final AtomicBoolean once = new AtomicBoolean(false); + + private final AtomicInteger draining = new AtomicInteger(0); + + private final AtomicLong requested = new AtomicLong(0); + + private volatile Subscriber subscriber; + + private volatile boolean cancelled; + + @Override + public void subscribe(Subscriber subscriber) { + // As per rule 1.9, we need to throw an NPE if subscriber is null + Objects.requireNonNull(subscriber, "Subscriber cannot be null"); + // As per rule 1.11, this publisher supports only one subscriber. + if (once.compareAndSet(false, true)) { + this.subscriber = subscriber; + try { + subscriber.onSubscribe(this); + } catch (Throwable t) { + // As per rule 2.13: In the case that this rule is violated, + // any associated Subscription to the Subscriber MUST be considered as + // cancelled, and the caller MUST raise this error condition in a fashion + // that is adequate for the runtime environment. + doOnError( + new IllegalStateException( + subscriber + + " violated the Reactive Streams rule 2.13 by throwing an exception from onSubscribe.", + t)); + } + } else { + subscriber.onSubscribe(EmptySubscription.INSTANCE); + subscriber.onError( + new IllegalStateException("This publisher does not support multiple subscriptions")); + } + // As per 2.13, this method must return normally (i.e. not throw) + } + + @Override + public void onSubscribe(Subscription s) { + // no-op + } + + @Override + public void onNext(ElementT value) { + if (!cancelled) { + queue.offer(value); + drain(); + } + } + + @Override + public void onError(Throwable error) { + if (!cancelled) { + queue.offer(error); + drain(); + } + } + + @Override + public void onComplete() { + if (!cancelled) { + queue.offer(ON_COMPLETE); + drain(); + } + } + + @Override + public void request(long n) { + // As per 3.6: after the Subscription is cancelled, additional + // calls to request() MUST be NOPs. + if (!cancelled) { + if (n < 1) { + // Validate request as per rule 3.9 + doOnError( + new IllegalArgumentException( + subscriber + + " violated the Reactive Streams rule 3.9 by requesting a non-positive number of elements.")); + } else { + // As per rule 3.17, when demand overflows Long.MAX_VALUE + // it can be treated as "effectively unbounded" + ReactiveOperators.addCap(requested, n); + drain(); + } + } + } + + @Override + public void cancel() { + // As per 3.5: Subscription.cancel() MUST respect the responsiveness of + // its caller by returning in a timely manner, MUST be idempotent and + // MUST be thread-safe. + if (!cancelled) { + cancelled = true; + if (draining.getAndIncrement() == 0) { + // If nobody is draining, clear now; + // otherwise, the draining thread will notice + // that the cancelled flag was set + // and will clear for us. + clear(); + } + } + } + + @SuppressWarnings("ConditionalBreakInInfiniteLoop") + private void drain() { + if (draining.getAndIncrement() != 0) { + return; + } + int missed = 1; + for (; ; ) { + // Note: when termination is detected inside this loop, + // we MUST call clear() manually. + long requested = this.requested.get(); + long emitted = 0L; + while (requested != emitted) { + if (cancelled) { + clear(); + return; + } + Object t = queue.poll(); + if (t == null) { + break; + } + if (t instanceof Throwable) { + Throwable error = (Throwable) t; + doOnError(error); + clear(); + return; + } else if (t == ON_COMPLETE) { + doOnComplete(); + clear(); + return; + } else { + @SuppressWarnings("unchecked") + ElementT item = (ElementT) t; + doOnNext(item); + emitted++; + } + } + if (cancelled) { + clear(); + return; + } + if (emitted != 0) { + // if any item was emitted, adjust the requested field + ReactiveOperators.subCap(this.requested, emitted); + } + // if another thread tried to call drain() while we were busy, + // then we should do another drain round. + missed = draining.addAndGet(-missed); + if (missed == 0) { + break; + } + } + } + + private void doOnNext(@NonNull ElementT result) { + try { + subscriber.onNext(result); + } catch (Throwable t) { + LOG.error( + subscriber + + " violated the Reactive Streams rule 2.13 by throwing an exception from onNext.", + t); + cancel(); + } + } + + private void doOnComplete() { + try { + // Then we signal onComplete as per rules 1.2 and 1.5 + subscriber.onComplete(); + } catch (Throwable t) { + LOG.error( + subscriber + + " violated the Reactive Streams rule 2.13 by throwing an exception from onComplete.", + t); + } + // We need to consider this Subscription as cancelled as per rule 1.6 + cancel(); + } + + private void doOnError(@NonNull Throwable error) { + try { + // Then we signal the error downstream, as per rules 1.2 and 1.4. + subscriber.onError(error); + } catch (Throwable t) { + t.addSuppressed(error); + LOG.error( + subscriber + + " violated the Reactive Streams rule 2.13 by throwing an exception from onError.", + t); + } + // We need to consider this Subscription as cancelled as per rule 1.6 + cancel(); + } + + private void clear() { + // We don't need the elements anymore and should not hold references + // to them. + queue.clear(); + // As per 3.13, Subscription.cancel() MUST request the Publisher to + // eventually drop any references to the corresponding subscriber. + // Our own publishers do not keep references to this subscription, + // but downstream processors might do so, which is why we need to + // defensively clear the subscriber reference when we are done. + subscriber = null; + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/DefaultGeometry.java b/core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/DefaultGeometry.java new file mode 100644 index 00000000000..18e851f23ea --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/DefaultGeometry.java @@ -0,0 +1,191 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.data.geometry; + +import com.datastax.dse.driver.api.core.data.geometry.Geometry; +import com.datastax.dse.driver.api.core.data.geometry.Point; +import com.datastax.oss.driver.shaded.guava.common.base.Preconditions; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; +import com.esri.core.geometry.GeometryException; +import com.esri.core.geometry.SpatialReference; +import com.esri.core.geometry.ogc.OGCGeometry; +import com.esri.core.geometry.ogc.OGCLineString; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.io.InvalidObjectException; +import java.io.ObjectInputStream; +import java.io.Serializable; +import java.nio.ByteBuffer; +import net.jcip.annotations.Immutable; + +@Immutable +public abstract class DefaultGeometry implements Geometry, Serializable { + + private static final long serialVersionUID = 1L; + + /** + * Default spatial reference for Well Known Text / Well Known Binary. + * + *

      4326 is the EPSG identifier of the World Geodetic System (WGS) in + * its later revision, WGS 84. + */ + public static final SpatialReference SPATIAL_REFERENCE_4326 = SpatialReference.create(4326); + + @NonNull + public static T fromOgcWellKnownText( + @NonNull String source, @NonNull Class klass) { + OGCGeometry geometry; + try { + geometry = OGCGeometry.fromText(source); + } catch (IllegalArgumentException e) { + throw new IllegalArgumentException(e.getMessage()); + } + validateType(geometry, klass); + return klass.cast(geometry); + } + + @NonNull + public static T fromOgcWellKnownBinary( + @NonNull ByteBuffer source, @NonNull Class klass) { + OGCGeometry geometry; + try { + geometry = OGCGeometry.fromBinary(source); + } catch (IllegalArgumentException e) { + throw new IllegalArgumentException(e.getMessage()); + } + validateType(geometry, klass); + return klass.cast(geometry); + } + + @NonNull + public static T fromOgcGeoJson( + @NonNull String source, @NonNull Class klass) { + OGCGeometry geometry; + try { + geometry = OGCGeometry.fromGeoJson(source); + } catch (Exception e) { + throw new IllegalArgumentException(e.getMessage()); + } + validateType(geometry, klass); + return klass.cast(geometry); + } + + private static void validateType(OGCGeometry geometry, Class klass) { + if (!geometry.getClass().equals(klass)) { + throw new IllegalArgumentException( + String.format( + "%s is not of type %s", geometry.getClass().getSimpleName(), klass.getSimpleName())); + } + } + + private final OGCGeometry ogcGeometry; + + protected DefaultGeometry(@NonNull OGCGeometry ogcGeometry) { + this.ogcGeometry = ogcGeometry; + Preconditions.checkNotNull(ogcGeometry); + validateOgcGeometry(ogcGeometry); + } + + private static void validateOgcGeometry(OGCGeometry geometry) { + try { + if (geometry.is3D()) { + throw new IllegalArgumentException(String.format("'%s' is not 2D", geometry.asText())); + } + if (!geometry.isSimple()) { + throw new IllegalArgumentException( + String.format( + "'%s' is not simple. Points and edges cannot self-intersect.", geometry.asText())); + } + } catch (GeometryException e) { + throw new IllegalArgumentException("Invalid geometry" + e.getMessage()); + } + } + + @NonNull + public static ImmutableList getPoints(@NonNull OGCLineString lineString) { + ImmutableList.Builder builder = ImmutableList.builder(); + for (int i = 0; i < lineString.numPoints(); i++) { + builder.add(new DefaultPoint(lineString.pointN(i))); + } + return builder.build(); + } + + protected static com.esri.core.geometry.Point toEsri(Point p) { + return new com.esri.core.geometry.Point(p.X(), p.Y()); + } + + @NonNull + public OGCGeometry getOgcGeometry() { + return ogcGeometry; + } + + @NonNull + public com.esri.core.geometry.Geometry getEsriGeometry() { + return ogcGeometry.getEsriGeometry(); + } + + @NonNull + @Override + public String asWellKnownText() { + return ogcGeometry.asText(); + } + + @NonNull + @Override + public ByteBuffer asWellKnownBinary() { + return WkbUtil.asLittleEndianBinary(ogcGeometry); + } + + @NonNull + @Override + public String asGeoJson() { + return ogcGeometry.asGeoJson(); + } + + @Override + public boolean contains(@NonNull Geometry other) { + Preconditions.checkNotNull(other); + if (other instanceof DefaultGeometry) { + DefaultGeometry defautlOther = (DefaultGeometry) other; + return getOgcGeometry().contains(defautlOther.getOgcGeometry()); + } + return false; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (!(o instanceof DefaultGeometry)) { + return false; + } + DefaultGeometry that = (DefaultGeometry) o; + return this.getOgcGeometry().equals(that.getOgcGeometry()); + } + + @Override + public int hashCode() { + // OGCGeometry subclasses do not overwrite Object.hashCode() + // while com.esri.core.geometry.Geometry subclasses usually do, + // so use these instead; this is consistent with equals + // because OGCGeometry.equals() actually compare between + // com.esri.core.geometry.Geometry objects + return getEsriGeometry().hashCode(); + } + + // Should never be called since we serialize a proxy (see subclasses) + @SuppressWarnings("UnusedVariable") + private void readObject(ObjectInputStream stream) throws InvalidObjectException { + throw new InvalidObjectException("Proxy required"); + } + + @Override + public String toString() { + return asWellKnownText(); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/DefaultLineString.java b/core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/DefaultLineString.java new file mode 100644 index 00000000000..d50db16eadb --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/DefaultLineString.java @@ -0,0 +1,57 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.data.geometry; + +import com.datastax.dse.driver.api.core.data.geometry.LineString; +import com.datastax.dse.driver.api.core.data.geometry.Point; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; +import com.esri.core.geometry.Polyline; +import com.esri.core.geometry.ogc.OGCLineString; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.List; +import net.jcip.annotations.Immutable; + +@Immutable +public class DefaultLineString extends DefaultGeometry implements LineString { + + private static final long serialVersionUID = 1280189361978382248L; + + private static OGCLineString fromPoints(Point p1, Point p2, Point... pn) { + Polyline polyline = new Polyline(toEsri(p1), toEsri(p2)); + for (Point p : pn) { + polyline.lineTo(toEsri(p)); + } + return new OGCLineString(polyline, 0, DefaultGeometry.SPATIAL_REFERENCE_4326); + } + + private final List points; + + public DefaultLineString(@NonNull Point p1, @NonNull Point p2, @NonNull Point... pn) { + super(fromPoints(p1, p2, pn)); + this.points = ImmutableList.builder().add(p1).add(p2).add(pn).build(); + } + + public DefaultLineString(@NonNull OGCLineString lineString) { + super(lineString); + this.points = getPoints(lineString); + } + + @NonNull + @Override + public List getPoints() { + return points; + } + + /** + * This object gets replaced by an internal proxy for serialization. + * + * @serialData a single byte array containing the Well-Known Binary representation. + */ + private Object writeReplace() { + return new WkbSerializationProxy(this.asWellKnownBinary()); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/DefaultPoint.java b/core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/DefaultPoint.java new file mode 100644 index 00000000000..a9d853dc5a8 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/DefaultPoint.java @@ -0,0 +1,53 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.data.geometry; + +import com.datastax.dse.driver.api.core.data.geometry.Point; +import com.esri.core.geometry.ogc.OGCPoint; +import edu.umd.cs.findbugs.annotations.NonNull; +import net.jcip.annotations.Immutable; + +@Immutable +public class DefaultPoint extends DefaultGeometry implements Point { + + private static final long serialVersionUID = -8337622213980781285L; + + public DefaultPoint(double x, double y) { + this( + new OGCPoint( + new com.esri.core.geometry.Point(x, y), DefaultGeometry.SPATIAL_REFERENCE_4326)); + } + + public DefaultPoint(@NonNull OGCPoint point) { + super(point); + } + + @NonNull + @Override + public OGCPoint getOgcGeometry() { + return (OGCPoint) super.getOgcGeometry(); + } + + @Override + public double X() { + return getOgcGeometry().X(); + } + + @Override + public double Y() { + return getOgcGeometry().Y(); + } + + /** + * This object gets replaced by an internal proxy for serialization. + * + * @serialData a single byte array containing the Well-Known Binary representation. + */ + private Object writeReplace() { + return new WkbSerializationProxy(this.asWellKnownBinary()); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/DefaultPolygon.java b/core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/DefaultPolygon.java new file mode 100644 index 00000000000..244e7a3675f --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/DefaultPolygon.java @@ -0,0 +1,120 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.data.geometry; + +import com.datastax.dse.driver.api.core.data.geometry.Point; +import com.datastax.dse.driver.api.core.data.geometry.Polygon; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; +import com.esri.core.geometry.Operator; +import com.esri.core.geometry.OperatorFactoryLocal; +import com.esri.core.geometry.OperatorSimplifyOGC; +import com.esri.core.geometry.ogc.OGCPolygon; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Collections; +import java.util.List; +import net.jcip.annotations.Immutable; + +@Immutable +public class DefaultPolygon extends DefaultGeometry implements Polygon { + + private static final long serialVersionUID = 3694196802962890314L; + + private final List exteriorRing; + private final List> interiorRings; + + public DefaultPolygon( + @NonNull Point p1, @NonNull Point p2, @NonNull Point p3, @NonNull Point... pn) { + super(fromPoints(p1, p2, p3, pn)); + this.exteriorRing = ImmutableList.builder().add(p1).add(p2).add(p3).add(pn).build(); + this.interiorRings = Collections.emptyList(); + } + + public DefaultPolygon(@NonNull OGCPolygon polygon) { + super(polygon); + if (polygon.isEmpty()) { + this.exteriorRing = ImmutableList.of(); + } else { + this.exteriorRing = getPoints(polygon.exteriorRing()); + } + + ImmutableList.Builder> builder = ImmutableList.builder(); + for (int i = 0; i < polygon.numInteriorRing(); i++) { + builder.add(getPoints(polygon.interiorRingN(i))); + } + this.interiorRings = builder.build(); + } + + @NonNull + @Override + public List getExteriorRing() { + return exteriorRing; + } + + @NonNull + @Override + public List> getInteriorRings() { + return interiorRings; + } + + private static OGCPolygon fromPoints(Point p1, Point p2, Point p3, Point... pn) { + com.esri.core.geometry.Polygon polygon = new com.esri.core.geometry.Polygon(); + addPath(polygon, p1, p2, p3, pn); + return new OGCPolygon(simplify(polygon), DefaultGeometry.SPATIAL_REFERENCE_4326); + } + + private static void addPath( + com.esri.core.geometry.Polygon polygon, Point p1, Point p2, Point p3, Point[] pn) { + + polygon.startPath(toEsri(p1)); + polygon.lineTo(toEsri(p2)); + polygon.lineTo(toEsri(p3)); + for (Point p : pn) { + polygon.lineTo(toEsri(p)); + } + } + + private static com.esri.core.geometry.Polygon simplify(com.esri.core.geometry.Polygon polygon) { + OperatorSimplifyOGC op = + (OperatorSimplifyOGC) + OperatorFactoryLocal.getInstance().getOperator(Operator.Type.SimplifyOGC); + return (com.esri.core.geometry.Polygon) + op.execute(polygon, DefaultGeometry.SPATIAL_REFERENCE_4326, true, null); + } + + /** + * This object gets replaced by an internal proxy for serialization. + * + * @serialData a single byte array containing the Well-Known Binary representation. + */ + private Object writeReplace() { + return new WkbSerializationProxy(this.asWellKnownBinary()); + } + + public static class Builder implements Polygon.Builder { + private final com.esri.core.geometry.Polygon polygon = new com.esri.core.geometry.Polygon(); + + @NonNull + @Override + public Builder addRing( + @NonNull Point p1, @NonNull Point p2, @NonNull Point p3, @NonNull Point... pn) { + addPath(polygon, p1, p2, p3, pn); + return this; + } + + /** + * Builds the polygon. + * + * @return the polygon. + */ + @NonNull + @Override + public Polygon build() { + return new DefaultPolygon( + new OGCPolygon(simplify(polygon), DefaultGeometry.SPATIAL_REFERENCE_4326)); + } + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/Distance.java b/core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/Distance.java new file mode 100644 index 00000000000..99cca96c5b9 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/Distance.java @@ -0,0 +1,226 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.data.geometry; + +import static java.util.regex.Pattern.CASE_INSENSITIVE; + +import com.datastax.dse.driver.api.core.data.geometry.Geometry; +import com.datastax.dse.driver.api.core.data.geometry.LineString; +import com.datastax.dse.driver.api.core.data.geometry.Point; +import com.datastax.dse.driver.api.core.data.geometry.Polygon; +import com.datastax.dse.driver.api.core.graph.predicates.Geo; +import com.datastax.oss.driver.shaded.guava.common.base.Preconditions; +import com.esri.core.geometry.MultiPath; +import com.esri.core.geometry.ogc.OGCGeometry; +import com.esri.core.geometry.ogc.OGCPoint; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.nio.ByteBuffer; +import java.util.Objects; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import net.jcip.annotations.Immutable; + +/** + * The driver-side representation of DSE's {@code Geo.distance}. + * + *

      This is a circle in a two-dimensional XY plane represented by its center point and radius. It + * is used as a search criteria to determine whether or not another geospatial object lies within a + * circular area. + * + *

      Note that this shape has no equivalent in the OGC and GeoJSON standards: as a consequence, + * {@link #asWellKnownText()} returns a custom format, and {@link #getOgcGeometry()}, {@link + * #asWellKnownBinary()}, and {@link #asGeoJson()} throw {@link UnsupportedOperationException}. + * + *

      Unlike other geo types, this class is never exposed directly to driver clients: it is used + * internally by {@linkplain Geo#inside(Point, double) geo predicates}, but cannot be a column type, + * nor appear in CQL or graph results. Therefore it doesn't have a public-facing interface, nor a + * built-in codec. + */ +@Immutable +public class Distance extends DefaultGeometry { + + private static final Pattern WKT_PATTERN = + Pattern.compile( + "distance *\\( *\\( *([\\d\\.-]+) *([\\d+\\.-]+) *\\) *([\\d+\\.-]+) *\\)", + CASE_INSENSITIVE); + + /** + * Creates a distance from its Well-known + * Text (WKT) representation. + * + * @param source the Well-known Text representation to parse. + * @return the point represented by the WKT. + * @throws IllegalArgumentException if the string does not contain a valid Well-known Text + * representation. + * @see Distance#asWellKnownText() + */ + @NonNull + public static Distance fromWellKnownText(@NonNull String source) { + Matcher matcher = WKT_PATTERN.matcher(source.trim()); + if (matcher.matches() && matcher.groupCount() == 3) { + try { + return new Distance( + new DefaultPoint( + Double.parseDouble(matcher.group(1)), Double.parseDouble(matcher.group(2))), + Double.parseDouble(matcher.group(3))); + } catch (NumberFormatException var3) { + throw new IllegalArgumentException(String.format("Unable to parse %s", source)); + } + } else { + throw new IllegalArgumentException(String.format("Unable to parse %s", source)); + } + } + + private final DefaultPoint center; + + private final double radius; + + /** + * Creates a new distance with the given center and radius. + * + * @param center The center point. + * @param radius The radius of the circle representing distance. + */ + public Distance(@NonNull Point center, double radius) { + super(((DefaultPoint) center).getOgcGeometry()); + Preconditions.checkNotNull(center); + Preconditions.checkArgument(radius >= 0.0D, "Radius must be >= 0 (got %s)", radius); + this.center = ((DefaultPoint) center); + this.radius = radius; + } + + /** @return The center point of the circle representing this distance. */ + @NonNull + public Point getCenter() { + return center; + } + + /** @return The radius of the circle representing this distance. */ + public double getRadius() { + return radius; + } + + /** + * Returns a Well-known Text (WKT) + * representation of this geospatial type. + * + *

      Since there is no Well-known Text specification for Distance, this returns a custom format + * of: DISTANCE((center.x center.y) radius) + * + * @return a Well-known Text representation of this object. + */ + @NonNull + @Override + public String asWellKnownText() { + return String.format("DISTANCE((%s %s) %s)", this.center.X(), this.center.Y(), this.radius); + } + + /** + * The distance type has no equivalent in the OGC standard: this method throws an {@link + * UnsupportedOperationException}. + */ + @NonNull + @Override + public OGCGeometry getOgcGeometry() { + throw new UnsupportedOperationException(); + } + + /** + * The distance type has no equivalent in the OGC standard: this method throws an {@link + * UnsupportedOperationException}. + */ + @NonNull + @Override + public ByteBuffer asWellKnownBinary() { + throw new UnsupportedOperationException(); + } + + /** + * The distance type has no equivalent in the GeoJSON standard: this method throws an {@link + * UnsupportedOperationException}. + */ + @Override + @NonNull + public String asGeoJson() { + throw new UnsupportedOperationException(); + } + + @Override + public boolean equals(Object other) { + if (other == this) { + return true; + } else if (other instanceof Distance) { + Distance that = (Distance) other; + return Objects.equals(this.center, that.center) && this.radius == that.radius; + } else { + return false; + } + } + + @Override + public int hashCode() { + return Objects.hash(center, radius); + } + + @SuppressWarnings("SimplifiableConditionalExpression") + @Override + public boolean contains(@NonNull Geometry geometry) { + return geometry instanceof Distance + ? this.containsDistance((Distance) geometry) + : geometry instanceof Point + ? this.containsPoint((Point) geometry) + : geometry instanceof LineString + ? this.containsLineString((LineString) geometry) + : geometry instanceof Polygon ? this.containsPolygon((Polygon) geometry) : false; + } + + private boolean containsDistance(Distance distance) { + return this.center.getOgcGeometry().distance(distance.center.getOgcGeometry()) + distance.radius + <= this.radius; + } + + private boolean containsPoint(Point point) { + return this.containsOGCPoint(((DefaultPoint) point).getOgcGeometry()); + } + + private boolean containsLineString(LineString lineString) { + MultiPath multiPath = + (MultiPath) ((DefaultLineString) lineString).getOgcGeometry().getEsriGeometry(); + return containsMultiPath(multiPath); + } + + private boolean containsPolygon(Polygon polygon) { + MultiPath multiPath = + (com.esri.core.geometry.Polygon) + ((DefaultPolygon) polygon).getOgcGeometry().getEsriGeometry(); + return containsMultiPath(multiPath); + } + + private boolean containsMultiPath(MultiPath multiPath) { + int numPoints = multiPath.getPointCount(); + for (int i = 0; i < numPoints; ++i) { + OGCPoint point = new OGCPoint(multiPath.getPoint(i), DefaultGeometry.SPATIAL_REFERENCE_4326); + if (!this.containsOGCPoint(point)) { + return false; + } + } + return true; + } + + private boolean containsOGCPoint(OGCPoint point) { + return this.center.getOgcGeometry().distance(point) <= this.radius; + } + + /** + * This object gets replaced by an internal proxy for serialization. + * + * @serialData Point (wkb) for center followed by double for radius + */ + private Object writeReplace() { + return new DistanceSerializationProxy(this); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/DistanceSerializationProxy.java b/core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/DistanceSerializationProxy.java new file mode 100644 index 00000000000..43ee6d55ff6 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/DistanceSerializationProxy.java @@ -0,0 +1,32 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.data.geometry; + +import com.datastax.dse.driver.api.core.data.geometry.Point; +import java.io.Serializable; + +/** + * A thin wrapper around {@link Distance}, that gets substituted during the serialization / + * deserialization process. This allows {@link Distance} to be immutable and reference centers' OGC + * counterpart. + */ +public class DistanceSerializationProxy implements Serializable { + + private static final long serialVersionUID = 1L; + + private final Point center; + private final double radius; + + public DistanceSerializationProxy(Distance distance) { + this.center = distance.getCenter(); + this.radius = distance.getRadius(); + } + + private Object readResolve() { + return new Distance(center, radius); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/WkbSerializationProxy.java b/core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/WkbSerializationProxy.java new file mode 100644 index 00000000000..42e2aaf490b --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/WkbSerializationProxy.java @@ -0,0 +1,49 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.data.geometry; + +import com.datastax.dse.driver.api.core.data.geometry.LineString; +import com.datastax.dse.driver.api.core.data.geometry.Point; +import com.datastax.dse.driver.api.core.data.geometry.Polygon; +import com.datastax.oss.protocol.internal.util.Bytes; +import java.io.Serializable; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import net.jcip.annotations.Immutable; + +/** + * A thin wrapper around a Well-Known Binary byte sequence, that gets substituted for {@link + * DefaultGeometry} instances during the serialization / deserialization process. This allows + * immutable geometry classes. + */ +@Immutable +class WkbSerializationProxy implements Serializable { + + private static final long serialVersionUID = 1L; + + private final byte[] wkb; + + WkbSerializationProxy(ByteBuffer wkb) { + this.wkb = Bytes.getArray(wkb); + } + + private Object readResolve() { + ByteBuffer buffer = ByteBuffer.wrap(wkb).order(ByteOrder.nativeOrder()); + int type = buffer.getInt(1); + + if (type == 1) { + return Point.fromWellKnownBinary(buffer); + } else if (type == 2) { + return LineString.fromWellKnownBinary(buffer); + } else if (type == 3) { + return Polygon.fromWellKnownBinary(buffer); + } else { + throw new IllegalArgumentException( + "Unknown geospatial type code in serialized form: " + type); + } + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/WkbUtil.java b/core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/WkbUtil.java new file mode 100644 index 00000000000..c932b7eb879 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/WkbUtil.java @@ -0,0 +1,109 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.data.geometry; + +import com.esri.core.geometry.Geometry; +import com.esri.core.geometry.Operator; +import com.esri.core.geometry.OperatorExportToWkb; +import com.esri.core.geometry.OperatorFactoryLocal; +import com.esri.core.geometry.WkbExportFlags; +import com.esri.core.geometry.ogc.OGCGeometry; +import com.esri.core.geometry.ogc.OGCLineString; +import com.esri.core.geometry.ogc.OGCPoint; +import com.esri.core.geometry.ogc.OGCPolygon; +import java.lang.reflect.Method; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; + +/** + * Helper class to serialize OGC geometries to Well-Known Binary, forcing the byte order to little + * endian. + * + *

      WKB encodes the byte order, so in theory we could send the buffer in any order, even if it is + * different from the server. However DSE server performs an additional validation step server-side: + * it deserializes to Java, serializes back to WKB, and then compares the original buffer to the + * "re-serialized" one. If they don't match, a MarshalException is thrown. So with a client in + * big-endian and a server in little-endian, we would get: + * + *

      + * incoming buffer (big endian) --> Java --> reserialized buffer (little endian)
      + * 
      + * + * Since the two buffers have a different endian-ness, they don't match. + * + *

      The ESRI library defaults to the native byte order and doesn't let us change it. Therefore: + * + *

        + *
      • if the native order is little endian (vast majority of cases), this class simply delegates + * to the appropriate public API method; + *
      • if the native order is big endian, it re-implements the serialization code, using + * reflection to get access to a private method. If reflection fails for any reason (updated + * ESRI library, security manager...), a runtime exception will be thrown. + *
      + */ +class WkbUtil { + + private static final boolean IS_NATIVE_LITTLE_ENDIAN = + ByteOrder.nativeOrder().equals(ByteOrder.LITTLE_ENDIAN) + && System.getProperty("com.datastax.driver.dse.geometry.FORCE_REFLECTION_WKB") + == null; // only for tests + + static ByteBuffer asLittleEndianBinary(OGCGeometry ogcGeometry) { + if (IS_NATIVE_LITTLE_ENDIAN) { + return ogcGeometry.asBinary(); // the default implementation does what we want + } else { + int exportFlags; + if (ogcGeometry instanceof OGCPoint) { + exportFlags = 0; + } else if (ogcGeometry instanceof OGCLineString) { + exportFlags = WkbExportFlags.wkbExportLineString; + } else if (ogcGeometry instanceof OGCPolygon) { + exportFlags = WkbExportFlags.wkbExportPolygon; + } else { + throw new AssertionError("Unsupported type: " + ogcGeometry.getClass()); + } + + // Copy-pasted from OperatorExportToWkbLocal#execute, except for the flags and order + int size = exportToWKB(exportFlags, ogcGeometry.getEsriGeometry(), null); + ByteBuffer wkbBuffer = ByteBuffer.allocate(size).order(ByteOrder.LITTLE_ENDIAN); + exportToWKB(exportFlags, ogcGeometry.getEsriGeometry(), wkbBuffer); + return wkbBuffer; + } + } + + // Provides reflective access to the private static method OperatorExportToWkbLocal#exportToWKB + private static int exportToWKB(int exportFlags, Geometry geometry, ByteBuffer wkbBuffer) { + assert !IS_NATIVE_LITTLE_ENDIAN; + try { + return (Integer) exportToWKB.invoke(null, exportFlags, geometry, wkbBuffer); + } catch (Exception e) { + throw new RuntimeException( + "Couldn't invoke private method OperatorExportToWkbLocal#exportToWKB", e); + } + } + + private static final Method exportToWKB; + + static { + if (IS_NATIVE_LITTLE_ENDIAN) { + exportToWKB = null; // won't be used + } else { + try { + OperatorExportToWkb op = + (OperatorExportToWkb) + OperatorFactoryLocal.getInstance().getOperator(Operator.Type.ExportToWkb); + exportToWKB = + op.getClass() + .getDeclaredMethod("exportToWKB", int.class, Geometry.class, ByteBuffer.class); + exportToWKB.setAccessible(true); + } catch (NoSuchMethodException e) { + throw new RuntimeException( + "Couldn't get access to private method OperatorExportToWkbLocal#exportToWKB", e); + } + } + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/BytecodeGraphStatement.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/BytecodeGraphStatement.java new file mode 100644 index 00000000000..9a6f3c7bd0c --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/BytecodeGraphStatement.java @@ -0,0 +1,118 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph; + +import com.datastax.dse.driver.api.core.graph.FluentGraphStatement; +import com.datastax.oss.driver.api.core.ConsistencyLevel; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.metadata.Node; +import java.nio.ByteBuffer; +import java.time.Duration; +import java.util.Collections; +import java.util.Map; +import org.apache.tinkerpop.gremlin.process.traversal.Bytecode; + +/** + * A dedicated statement implementation for implicit traversal execution via a {@link + * DseGraphRemoteConnection}. + * + *

      This is a simplified version of {@link FluentGraphStatement} that exposes the bytecode + * directly instead of the traversal. + * + *

      This class is for internal use only. + */ +public class BytecodeGraphStatement extends GraphStatementBase { + + private final Bytecode bytecode; + + public BytecodeGraphStatement( + Bytecode bytecode, DriverExecutionProfile executionProfile, String executionProfileName) { + this( + bytecode, + null, + null, + null, + Long.MIN_VALUE, + executionProfile, + executionProfileName, + Collections.emptyMap(), + null, + null, + null, + null, + null, + null); + } + + private BytecodeGraphStatement( + Bytecode bytecode, + Boolean isIdempotent, + Duration timeout, + Node node, + long timestamp, + DriverExecutionProfile executionProfile, + String executionProfileName, + Map customPayload, + String graphName, + String traversalSource, + String subProtocol, + ConsistencyLevel consistencyLevel, + ConsistencyLevel readConsistencyLevel, + ConsistencyLevel writeConsistencyLevel) { + super( + isIdempotent, + timeout, + node, + timestamp, + executionProfile, + executionProfileName, + customPayload, + graphName, + traversalSource, + subProtocol, + consistencyLevel, + readConsistencyLevel, + writeConsistencyLevel); + this.bytecode = bytecode; + } + + public Bytecode getBytecode() { + return bytecode; + } + + @Override + protected BytecodeGraphStatement newInstance( + Boolean isIdempotent, + Duration timeout, + Node node, + long timestamp, + DriverExecutionProfile executionProfile, + String executionProfileName, + Map customPayload, + String graphName, + String traversalSource, + String subProtocol, + ConsistencyLevel consistencyLevel, + ConsistencyLevel readConsistencyLevel, + ConsistencyLevel writeConsistencyLevel) { + return new BytecodeGraphStatement( + bytecode, + isIdempotent, + timeout, + node, + timestamp, + executionProfile, + executionProfileName, + customPayload, + graphName, + traversalSource, + subProtocol, + consistencyLevel, + readConsistencyLevel, + writeConsistencyLevel); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultAsyncGraphResultSet.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultAsyncGraphResultSet.java new file mode 100644 index 00000000000..34929de642f --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultAsyncGraphResultSet.java @@ -0,0 +1,105 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph; + +import com.datastax.dse.driver.api.core.graph.AsyncGraphResultSet; +import com.datastax.dse.driver.api.core.graph.GraphExecutionInfo; +import com.datastax.dse.driver.api.core.graph.GraphNode; +import com.datastax.oss.driver.internal.core.util.CountingIterator; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Queue; +import java.util.concurrent.CompletionStage; +import net.jcip.annotations.NotThreadSafe; + +@NotThreadSafe // wraps a mutable queue +public class DefaultAsyncGraphResultSet implements AsyncGraphResultSet { + + private final GraphExecutionInfo executionInfo; + private final CountingIterator iterator; + private final Iterable currentPage; + + public DefaultAsyncGraphResultSet(GraphExecutionInfo executionInfo, Queue data) { + this.executionInfo = executionInfo; + this.iterator = new GraphResultIterator(data); + this.currentPage = () -> iterator; + } + + @NonNull + @Override + public GraphExecutionInfo getExecutionInfo() { + return executionInfo; + } + + @Override + public int remaining() { + return iterator.remaining(); + } + + @NonNull + @Override + public Iterable currentPage() { + return currentPage; + } + + @Override + public boolean hasMorePages() { + // hard-coded until DSE graph supports paging + return false; + } + + @NonNull + @Override + public CompletionStage fetchNextPage() throws IllegalStateException { + // hard-coded until DSE graph supports paging + throw new IllegalStateException( + "No next page. Use #hasMorePages before calling this method to avoid this error."); + } + + @Override + public void cancel() { + // nothing to do + } + + private static class GraphResultIterator extends CountingIterator { + + private final Queue data; + + // Sometimes a traversal can yield the same result multiple times consecutively. To avoid + // duplicating the data, DSE graph sends it only once with a counter indicating how many times + // it's repeated. + private long repeat = 0; + private GraphNode lastGraphNode = null; + + private GraphResultIterator(Queue data) { + super(data.size()); + this.data = data; + } + + @Override + protected GraphNode computeNext() { + if (repeat > 1) { + repeat -= 1; + // Note that we don't make a defensive copy, we assume the client won't mutate the node + return lastGraphNode; + } + + GraphNode container = data.poll(); + if (container == null) { + return endOfData(); + } + + // The repeat counter is called "bulk" in the JSON payload + GraphNode b = container.getByKey("bulk"); + if (b != null) { + this.repeat = b.asLong(); + } + + lastGraphNode = container.getByKey("result"); + return lastGraphNode; + } + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultBatchGraphStatement.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultBatchGraphStatement.java new file mode 100644 index 00000000000..36da830cee0 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultBatchGraphStatement.java @@ -0,0 +1,143 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph; + +import com.datastax.dse.driver.api.core.graph.BatchGraphStatement; +import com.datastax.oss.driver.api.core.ConsistencyLevel; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.nio.ByteBuffer; +import java.time.Duration; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import net.jcip.annotations.Immutable; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; + +@Immutable +public class DefaultBatchGraphStatement extends GraphStatementBase + implements BatchGraphStatement { + + private final List traversals; + + public DefaultBatchGraphStatement( + Iterable traversals, + Boolean isIdempotent, + Duration timeout, + Node node, + long timestamp, + DriverExecutionProfile executionProfile, + String executionProfileName, + Map customPayload, + String graphName, + String traversalSource, + String subProtocol, + ConsistencyLevel consistencyLevel, + ConsistencyLevel readConsistencyLevel, + ConsistencyLevel writeConsistencyLevel) { + super( + isIdempotent, + timeout, + node, + timestamp, + executionProfile, + executionProfileName, + customPayload, + graphName, + traversalSource, + subProtocol, + consistencyLevel, + readConsistencyLevel, + writeConsistencyLevel); + this.traversals = ImmutableList.copyOf(traversals); + } + + @NonNull + @Override + public DefaultBatchGraphStatement addTraversal(@NonNull GraphTraversal newTraversal) { + return new DefaultBatchGraphStatement( + ImmutableList.builder().addAll(traversals).add(newTraversal).build(), + isIdempotent(), + getTimeout(), + getNode(), + getTimestamp(), + getExecutionProfile(), + getExecutionProfileName(), + getCustomPayload(), + getGraphName(), + getTraversalSource(), + getSubProtocol(), + getConsistencyLevel(), + getReadConsistencyLevel(), + getWriteConsistencyLevel()); + } + + @NonNull + @Override + public DefaultBatchGraphStatement addTraversals(@NonNull Iterable newTraversals) { + return new DefaultBatchGraphStatement( + ImmutableList.builder().addAll(traversals).addAll(newTraversals).build(), + isIdempotent(), + getTimeout(), + getNode(), + getTimestamp(), + getExecutionProfile(), + getExecutionProfileName(), + getCustomPayload(), + getGraphName(), + getTraversalSource(), + getSubProtocol(), + getConsistencyLevel(), + getReadConsistencyLevel(), + getWriteConsistencyLevel()); + } + + @Override + public int size() { + return this.traversals.size(); + } + + @Override + protected BatchGraphStatement newInstance( + Boolean isIdempotent, + Duration timeout, + Node node, + long timestamp, + DriverExecutionProfile executionProfile, + String executionProfileName, + Map customPayload, + String graphName, + String traversalSource, + String subProtocol, + ConsistencyLevel consistencyLevel, + ConsistencyLevel readConsistencyLevel, + ConsistencyLevel writeConsistencyLevel) { + return new DefaultBatchGraphStatement( + traversals, + isIdempotent, + timeout, + node, + timestamp, + executionProfile, + executionProfileName, + customPayload, + graphName, + traversalSource, + subProtocol, + consistencyLevel, + readConsistencyLevel, + writeConsistencyLevel); + } + + @NonNull + @Override + public Iterator iterator() { + return this.traversals.iterator(); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultDseRemoteConnectionBuilder.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultDseRemoteConnectionBuilder.java new file mode 100644 index 00000000000..3d31f8e4140 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultDseRemoteConnectionBuilder.java @@ -0,0 +1,43 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph; + +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.dse.driver.api.core.graph.DseGraphRemoteConnectionBuilder; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import net.jcip.annotations.NotThreadSafe; +import org.apache.tinkerpop.gremlin.process.remote.RemoteConnection; + +@NotThreadSafe +public class DefaultDseRemoteConnectionBuilder implements DseGraphRemoteConnectionBuilder { + + private final DseSession dseSession; + private DriverExecutionProfile executionProfile; + private String executionProfileName; + + public DefaultDseRemoteConnectionBuilder(DseSession dseSession) { + this.dseSession = dseSession; + } + + @Override + public RemoteConnection build() { + return new DseGraphRemoteConnection(dseSession, executionProfile, executionProfileName); + } + + @Override + public DseGraphRemoteConnectionBuilder withExecutionProfile( + DriverExecutionProfile executionProfile) { + this.executionProfile = executionProfile; + return this; + } + + @Override + public DseGraphRemoteConnectionBuilder withExecutionProfileName(String executionProfileName) { + this.executionProfileName = executionProfileName; + return this; + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultFluentGraphStatement.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultFluentGraphStatement.java new file mode 100644 index 00000000000..d3aba9c9f58 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultFluentGraphStatement.java @@ -0,0 +1,95 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph; + +import com.datastax.dse.driver.api.core.graph.FluentGraphStatement; +import com.datastax.oss.driver.api.core.ConsistencyLevel; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.metadata.Node; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.nio.ByteBuffer; +import java.time.Duration; +import java.util.Map; +import net.jcip.annotations.Immutable; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; + +@Immutable +public class DefaultFluentGraphStatement extends GraphStatementBase + implements FluentGraphStatement { + + private final GraphTraversal traversal; + + public DefaultFluentGraphStatement( + GraphTraversal traversal, + Boolean isIdempotent, + Duration timeout, + Node node, + long timestamp, + DriverExecutionProfile executionProfile, + String executionProfileName, + Map customPayload, + String graphName, + String traversalSource, + String subProtocol, + ConsistencyLevel consistencyLevel, + ConsistencyLevel readConsistencyLevel, + ConsistencyLevel writeConsistencyLevel) { + super( + isIdempotent, + timeout, + node, + timestamp, + executionProfile, + executionProfileName, + customPayload, + graphName, + traversalSource, + subProtocol, + consistencyLevel, + readConsistencyLevel, + writeConsistencyLevel); + this.traversal = traversal; + } + + @Override + protected FluentGraphStatement newInstance( + Boolean isIdempotent, + Duration timeout, + Node node, + long timestamp, + DriverExecutionProfile executionProfile, + String executionProfileName, + Map customPayload, + String graphName, + String traversalSource, + String subProtocol, + ConsistencyLevel consistencyLevel, + ConsistencyLevel readConsistencyLevel, + ConsistencyLevel writeConsistencyLevel) { + return new DefaultFluentGraphStatement( + traversal, + isIdempotent, + timeout, + node, + timestamp, + executionProfile, + executionProfileName, + customPayload, + graphName, + traversalSource, + subProtocol, + consistencyLevel, + readConsistencyLevel, + writeConsistencyLevel); + } + + @NonNull + @Override + public GraphTraversal getTraversal() { + return traversal; + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultGraphExecutionInfo.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultGraphExecutionInfo.java new file mode 100644 index 00000000000..c5e3dc492c9 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultGraphExecutionInfo.java @@ -0,0 +1,84 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph; + +import com.datastax.dse.driver.api.core.graph.GraphExecutionInfo; +import com.datastax.dse.driver.api.core.graph.GraphStatement; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.protocol.internal.Frame; +import java.nio.ByteBuffer; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import net.jcip.annotations.Immutable; + +@Immutable +public class DefaultGraphExecutionInfo implements GraphExecutionInfo { + + private final GraphStatement statement; + private final Node coordinator; + private final int speculativeExecutionCount; + private final int successfulExecutionIndex; + private final List> errors; + private final List warnings; + private final Map customPayload; + + public DefaultGraphExecutionInfo( + GraphStatement statement, + Node coordinator, + int speculativeExecutionCount, + int successfulExecutionIndex, + List> errors, + Frame frame) { + this.statement = statement; + this.coordinator = coordinator; + this.speculativeExecutionCount = speculativeExecutionCount; + this.successfulExecutionIndex = successfulExecutionIndex; + this.errors = errors; + + // Note: the collections returned by the protocol layer are already unmodifiable + this.warnings = (frame == null) ? Collections.emptyList() : frame.warnings; + this.customPayload = (frame == null) ? Collections.emptyMap() : frame.customPayload; + } + + @Override + public GraphStatement getStatement() { + return statement; + } + + @Override + public Node getCoordinator() { + return coordinator; + } + + @Override + public int getSpeculativeExecutionCount() { + return speculativeExecutionCount; + } + + @Override + public int getSuccessfulExecutionIndex() { + return successfulExecutionIndex; + } + + @Override + public List> getErrors() { + // Assume this method will be called 0 or 1 time, so we create the unmodifiable wrapper on + // demand. + return (errors == null) ? Collections.emptyList() : Collections.unmodifiableList(errors); + } + + @Override + public List getWarnings() { + return warnings; + } + + @Override + public Map getIncomingPayload() { + return customPayload; + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultScriptGraphStatement.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultScriptGraphStatement.java new file mode 100644 index 00000000000..2ef1b4f45d5 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultScriptGraphStatement.java @@ -0,0 +1,191 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph; + +import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; +import com.datastax.oss.driver.api.core.ConsistencyLevel; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.protocol.internal.util.collection.NullAllowingImmutableMap; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.nio.ByteBuffer; +import java.time.Duration; +import java.util.Map; +import net.jcip.annotations.Immutable; + +@Immutable +public class DefaultScriptGraphStatement extends GraphStatementBase + implements ScriptGraphStatement { + + private final String script; + private final Boolean isSystemQuery; + private final NullAllowingImmutableMap queryParams; + + public DefaultScriptGraphStatement( + String script, + Map queryParams, + Boolean isSystemQuery, + Boolean isIdempotent, + Duration timeout, + Node node, + long timestamp, + DriverExecutionProfile executionProfile, + String executionProfileName, + Map customPayload, + String graphName, + String traversalSource, + String subProtocol, + ConsistencyLevel consistencyLevel, + ConsistencyLevel readConsistencyLevel, + ConsistencyLevel writeConsistencyLevel) { + super( + isIdempotent, + timeout, + node, + timestamp, + executionProfile, + executionProfileName, + customPayload, + graphName, + traversalSource, + subProtocol, + consistencyLevel, + readConsistencyLevel, + writeConsistencyLevel); + this.script = script; + this.isSystemQuery = isSystemQuery; + this.queryParams = NullAllowingImmutableMap.copyOf(queryParams); + } + + //// Script GraphStatement level options + + @NonNull + @Override + public String getScript() { + return script; + } + + @NonNull + @Override + public ScriptGraphStatement setSystemQuery(@Nullable Boolean newValue) { + return new DefaultScriptGraphStatement( + script, + queryParams, + newValue, + isIdempotent(), + getTimeout(), + getNode(), + getTimestamp(), + getExecutionProfile(), + getExecutionProfileName(), + getCustomPayload(), + getGraphName(), + getTraversalSource(), + getSubProtocol(), + getConsistencyLevel(), + getReadConsistencyLevel(), + getWriteConsistencyLevel()); + } + + @Nullable + @Override + public Boolean isSystemQuery() { + return isSystemQuery; + } + + @NonNull + @Override + public Map getQueryParams() { + return this.queryParams; + } + + @NonNull + @Override + public ScriptGraphStatement setQueryParam(@NonNull String name, @Nullable Object value) { + NullAllowingImmutableMap.Builder newQueryParamsBuilder = + NullAllowingImmutableMap.builder(); + for (Map.Entry entry : queryParams.entrySet()) { + if (!entry.getKey().equals(name)) { + newQueryParamsBuilder.put(entry.getKey(), entry.getValue()); + } + } + newQueryParamsBuilder.put(name, value); + return setQueryParams(newQueryParamsBuilder.build()); + } + + @NonNull + @Override + public ScriptGraphStatement removeQueryParam(@NonNull String name) { + if (!queryParams.containsKey(name)) { + return this; + } else { + NullAllowingImmutableMap.Builder newQueryParamsBuilder = + NullAllowingImmutableMap.builder(); + for (Map.Entry entry : queryParams.entrySet()) { + if (!entry.getKey().equals(name)) { + newQueryParamsBuilder.put(entry.getKey(), entry.getValue()); + } + } + return setQueryParams(newQueryParamsBuilder.build()); + } + } + + private ScriptGraphStatement setQueryParams(Map newQueryParams) { + return new DefaultScriptGraphStatement( + script, + newQueryParams, + isSystemQuery, + isIdempotent(), + getTimeout(), + getNode(), + getTimestamp(), + getExecutionProfile(), + getExecutionProfileName(), + getCustomPayload(), + getGraphName(), + getTraversalSource(), + getSubProtocol(), + getConsistencyLevel(), + getReadConsistencyLevel(), + getWriteConsistencyLevel()); + } + + @Override + protected ScriptGraphStatement newInstance( + Boolean isIdempotent, + Duration timeout, + Node node, + long timestamp, + DriverExecutionProfile executionProfile, + String executionProfileName, + Map customPayload, + String graphName, + String traversalSource, + String subProtocol, + ConsistencyLevel consistencyLevel, + ConsistencyLevel readConsistencyLevel, + ConsistencyLevel writeConsistencyLevel) { + return new DefaultScriptGraphStatement( + script, + queryParams, + isSystemQuery, + isIdempotent, + timeout, + node, + timestamp, + executionProfile, + executionProfileName, + customPayload, + graphName, + traversalSource, + subProtocol, + consistencyLevel, + readConsistencyLevel, + writeConsistencyLevel); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DseGraphRemoteConnection.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DseGraphRemoteConnection.java new file mode 100644 index 00000000000..ad66396daae --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DseGraphRemoteConnection.java @@ -0,0 +1,66 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph; + +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import java.util.Iterator; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ExecutionException; +import net.jcip.annotations.Immutable; +import org.apache.tinkerpop.gremlin.process.remote.RemoteConnection; +import org.apache.tinkerpop.gremlin.process.remote.RemoteConnectionException; +import org.apache.tinkerpop.gremlin.process.remote.traversal.RemoteTraversal; +import org.apache.tinkerpop.gremlin.process.traversal.Bytecode; +import org.apache.tinkerpop.gremlin.process.traversal.Traversal; +import org.apache.tinkerpop.gremlin.process.traversal.Traverser; + +@Immutable +public class DseGraphRemoteConnection implements RemoteConnection { + + private final DseSession dseSession; + private final DriverExecutionProfile executionProfile; + private final String executionProfileName; + + public DseGraphRemoteConnection( + DseSession dseSession, DriverExecutionProfile executionProfile, String executionProfileName) { + this.dseSession = dseSession; + this.executionProfile = executionProfile; + this.executionProfileName = executionProfileName; + } + + @Override + @SuppressWarnings("deprecation") + public Iterator> submit(Traversal traversal) + throws RemoteConnectionException { + return submit(traversal.asAdmin().getBytecode()); + } + + @Override + @SuppressWarnings({"deprecation", "unchecked"}) + public RemoteTraversal submit(Bytecode bytecode) throws RemoteConnectionException { + try { + return (RemoteTraversal) submitAsync(bytecode).get(); + } catch (InterruptedException | ExecutionException e) { + throw new RemoteConnectionException(e); + } + } + + @Override + public CompletableFuture> submitAsync(Bytecode bytecode) + throws RemoteConnectionException { + return dseSession + .executeAsync(new BytecodeGraphStatement(bytecode, executionProfile, executionProfileName)) + .toCompletableFuture() + .thenApply(DseGraphTraversal::new); + } + + @Override + public void close() throws Exception { + // do not close the DseSession here. + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DseGraphTraversal.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DseGraphTraversal.java new file mode 100644 index 00000000000..d4a3643e461 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DseGraphTraversal.java @@ -0,0 +1,59 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph; + +import com.datastax.dse.driver.api.core.graph.AsyncGraphResultSet; +import com.datastax.dse.driver.api.core.graph.GraphNode; +import java.util.Iterator; +import java.util.NoSuchElementException; +import net.jcip.annotations.NotThreadSafe; +import org.apache.tinkerpop.gremlin.process.remote.traversal.AbstractRemoteTraversal; +import org.apache.tinkerpop.gremlin.process.remote.traversal.DefaultRemoteTraverser; +import org.apache.tinkerpop.gremlin.process.remote.traversal.RemoteTraversalSideEffects; +import org.apache.tinkerpop.gremlin.process.traversal.Traverser; + +@NotThreadSafe +class DseGraphTraversal extends AbstractRemoteTraversal { + + private final Iterator graphNodeIterator; + + public DseGraphTraversal(AsyncGraphResultSet firstPage) { + this.graphNodeIterator = GraphResultSets.toSync(firstPage).iterator(); + } + + @Override + public RemoteTraversalSideEffects getSideEffects() { + // return null but do not throw "NotSupportedException" + return null; + } + + @Override + public boolean hasNext() { + return graphNodeIterator.hasNext(); + } + + @Override + public E next() { + return nextTraverser().get(); + } + + @Override + @SuppressWarnings("unchecked") + public Traverser.Admin nextTraverser() { + if (hasNext()) { + GraphNode nextGraphNode = graphNodeIterator.next(); + + // get the Raw object from the ObjectGraphNode, create a new remote Traverser + // with bulk = 1 because bulk is not supported yet. Casting should be ok + // because we have been able to deserialize into the right type. + return new DefaultRemoteTraverser<>((E) nextGraphNode.as(Object.class), 1); + } else { + // finished iterating/nothing to iterate. Normal behaviour. + throw new NoSuchElementException(); + } + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DsePredicate.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DsePredicate.java new file mode 100644 index 00000000000..7d2ba1d65e9 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DsePredicate.java @@ -0,0 +1,24 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph; + +import com.datastax.oss.driver.shaded.guava.common.base.Preconditions; +import java.util.function.BiPredicate; + +/** + * An extension of TinkerPop's {@link BiPredicate} adding simple pre-condition checking methods that + * have to be written in the implementations. + */ +public interface DsePredicate extends BiPredicate { + + default void preEvaluate(Object condition) { + Preconditions.checkArgument( + this.isValidCondition(condition), "Invalid condition provided: %s", condition); + } + + boolean isValidCondition(Object condition); +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/EditDistance.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/EditDistance.java new file mode 100644 index 00000000000..33e5f1d5c52 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/EditDistance.java @@ -0,0 +1,63 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph; + +import com.datastax.oss.driver.shaded.guava.common.base.Objects; +import com.datastax.oss.driver.shaded.guava.common.base.Preconditions; +import java.io.Serializable; +import net.jcip.annotations.Immutable; + +/** + * A container for a term and maximum edit distance. + * + *

      The context in which this is used determines the semantics of the edit distance. For instance, + * it might indicate single-character edits if used with fuzzy search queries or whole word + * movements if used with phrase proximity queries. + */ +@Immutable +public class EditDistance implements Serializable { + + private static final long serialVersionUID = 1L; + + public static final int DEFAULT_EDIT_DISTANCE = 0; + + public final String query; + public final int distance; + + public EditDistance(String query) { + this(query, DEFAULT_EDIT_DISTANCE); + } + + public EditDistance(String query, int distance) { + Preconditions.checkNotNull(query, "Query cannot be null."); + Preconditions.checkArgument(distance >= 0, "Edit distance cannot be negative."); + this.query = query; + this.distance = distance; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (!(o instanceof EditDistance)) { + return false; + } + EditDistance that = (EditDistance) o; + return distance == that.distance && Objects.equal(query, that.query); + } + + @Override + public int hashCode() { + return Objects.hashCode(query, distance); + } + + @Override + public String toString() { + return "EditDistance{" + "query='" + query + '\'' + ", distance=" + distance + '}'; + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GeoPredicate.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GeoPredicate.java new file mode 100644 index 00000000000..c72b4a28d6e --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GeoPredicate.java @@ -0,0 +1,100 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph; + +import com.datastax.dse.driver.api.core.data.geometry.Geometry; +import com.datastax.dse.driver.api.core.data.geometry.LineString; +import com.datastax.dse.driver.api.core.data.geometry.Point; +import com.datastax.dse.driver.api.core.data.geometry.Polygon; +import com.datastax.dse.driver.api.core.graph.predicates.Geo; +import com.datastax.dse.driver.internal.core.data.geometry.Distance; +import com.datastax.oss.driver.shaded.guava.common.base.Preconditions; + +/** + * List of predicates for geolocation usage with DseGraph and Search indexes. Should not be accessed + * directly but through the {@link Geo} static methods. + */ +public enum GeoPredicate implements DsePredicate { + + /** Matches values within the distance specified by the condition over a Haversine geometry. */ + inside { + @Override + public boolean test(Object value, Object condition) { + preEvaluate(condition); + if (value == null) { + return false; + } + Preconditions.checkArgument(value instanceof Geometry); + Distance distance = (Distance) condition; + if (value instanceof Point) { + return haversineDistanceInDegrees(distance.getCenter(), (Point) value) + <= distance.getRadius(); + } else if (value instanceof Polygon) { + for (Point point : ((Polygon) value).getExteriorRing()) { + if (haversineDistanceInDegrees(distance.getCenter(), point) > distance.getRadius()) { + return false; + } + } + } else if (value instanceof LineString) { + for (Point point : ((LineString) value).getPoints()) { + if (haversineDistanceInDegrees(distance.getCenter(), point) > distance.getRadius()) { + return false; + } + } + } else { + throw new UnsupportedOperationException( + String.format("Value type '%s' unsupported", value.getClass().getName())); + } + + return true; + } + + @Override + public String toString() { + return "inside"; + } + }, + + /** + * Matches values contained in the geometric entity specified by the condition on a 2D Euclidean + * plane. + */ + insideCartesian { + @Override + public boolean test(Object value, Object condition) { + preEvaluate(condition); + if (value == null) { + return false; + } + Preconditions.checkArgument(value instanceof Geometry); + return ((Geometry) condition).contains((Geometry) value); + } + + @Override + public String toString() { + return "insideCartesian"; + } + }; + + @Override + public boolean isValidCondition(Object condition) { + return condition != null; + } + + static double haversineDistanceInDegrees(Point p1, Point p2) { + double dLat = Math.toRadians(p2.Y() - p1.Y()); + double dLon = Math.toRadians(p2.X() - p1.X()); + double lat1 = Math.toRadians(p1.Y()); + double lat2 = Math.toRadians(p2.Y()); + + double a = + Math.pow(Math.sin(dLat / 2), 2) + + Math.pow(Math.sin(dLon / 2), 2) * Math.cos(lat1) * Math.cos(lat2); + double c = 2 * Math.asin(Math.sqrt(a)); + return Math.toDegrees(c); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GeoUtils.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GeoUtils.java new file mode 100644 index 00000000000..04e7b6d0568 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GeoUtils.java @@ -0,0 +1,16 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph; + +public class GeoUtils { + private static final double DEGREES_TO_RADIANS = Math.PI / 180; + private static final double EARTH_MEAN_RADIUS_KM = 6371.0087714; + private static final double DEG_TO_KM = DEGREES_TO_RADIANS * EARTH_MEAN_RADIUS_KM; + private static final double KM_TO_MILES = 0.621371192; + public static final double KM_TO_DEG = 1 / DEG_TO_KM; + public static final double MILES_TO_KM = 1 / KM_TO_MILES; +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphConversions.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphConversions.java new file mode 100644 index 00000000000..13a83732ca0 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphConversions.java @@ -0,0 +1,231 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph; + +import com.datastax.dse.driver.api.core.config.DseDriverOption; +import com.datastax.dse.driver.api.core.graph.BatchGraphStatement; +import com.datastax.dse.driver.api.core.graph.FluentGraphStatement; +import com.datastax.dse.driver.api.core.graph.GraphStatement; +import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; +import com.datastax.dse.protocol.internal.request.RawBytesQuery; +import com.datastax.dse.protocol.internal.request.query.DseQueryOptions; +import com.datastax.oss.driver.api.core.ConsistencyLevel; +import com.datastax.oss.driver.api.core.DefaultConsistencyLevel; +import com.datastax.oss.driver.api.core.ProtocolVersion; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.cql.Conversions; +import com.datastax.oss.driver.internal.core.session.DefaultSession; +import com.datastax.oss.protocol.internal.Message; +import com.datastax.oss.protocol.internal.ProtocolConstants; +import com.datastax.oss.protocol.internal.request.Query; +import com.datastax.oss.protocol.internal.util.collection.NullAllowingImmutableMap; +import java.io.IOException; +import java.io.UncheckedIOException; +import java.nio.ByteBuffer; +import java.time.Duration; +import java.util.Collections; +import java.util.Map; + +/** + * Utility class to move boilerplate out of {@link GraphRequestHandler}. + * + *

      We extend {@link Conversions} only for methods that can be directly reused as-is; if something + * needs to be customized, it will be duplicated here instead of making the parent method + * "pluggable". + */ +public class GraphConversions extends Conversions { + + static String GRAPH_LANG_OPTION_KEY = "graph-language"; + static String GRAPH_NAME_OPTION_KEY = "graph-name"; + static String GRAPH_SOURCE_OPTION_KEY = "graph-source"; + static String GRAPH_READ_CONSISTENCY_LEVEL_OPTION_KEY = "graph-read-consistency"; + static String GRAPH_WRITE_CONSISTENCY_LEVEL_OPTION_KEY = "graph-write-consistency"; + static String GRAPH_RESULTS_OPTION_KEY = "graph-results"; + static String GRAPH_TIMEOUT_OPTION_KEY = "request-timeout"; + + static String inferSubProtocol( + GraphStatement statement, DriverExecutionProfile config, DefaultSession session) { + String graphProtocol = statement.getSubProtocol(); + if (graphProtocol == null) { + graphProtocol = + config.getString( + DseDriverOption.GRAPH_SUB_PROTOCOL, + // TODO pick graphson-3.0 if the target graph uses the native engine + "graphson-2.0"); + } + assert graphProtocol != null; + return graphProtocol; + } + + static Message createMessageFromGraphStatement( + GraphStatement statement, + String subProtocol, + DriverExecutionProfile config, + InternalDriverContext context) { + + ByteBuffer encodedQueryParams; + try { + Map queryParams = + (statement instanceof ScriptGraphStatement) + ? ((ScriptGraphStatement) statement).getQueryParams() + : Collections.emptyMap(); + encodedQueryParams = GraphSONUtils.serializeToByteBuffer(queryParams, subProtocol); + } catch (IOException e) { + throw new UncheckedIOException( + "Couldn't serialize parameters for GraphStatement: " + statement, e); + } + + int consistencyLevel = + DefaultConsistencyLevel.valueOf(config.getString(DefaultDriverOption.REQUEST_CONSISTENCY)) + .getProtocolCode(); + + long timestamp = statement.getTimestamp(); + if (timestamp == Long.MIN_VALUE) { + timestamp = context.getTimestampGenerator().next(); + } + + DseQueryOptions queryOptions = + new DseQueryOptions( + consistencyLevel, + Collections.singletonList(encodedQueryParams), + Collections.emptyMap(), // ignored by the DSE Graph server + true, // also ignored + 50, // also ignored + null, // also ignored + ProtocolConstants.ConsistencyLevel.LOCAL_SERIAL, // also ignored + timestamp, + null, // also ignored + false, // also ignored + null // also ignored + ); + + if (statement instanceof ScriptGraphStatement) { + return new Query(((ScriptGraphStatement) statement).getScript(), queryOptions); + } else { + return new RawBytesQuery(getQueryBytes(statement, subProtocol), queryOptions); + } + } + + private static byte[] getQueryBytes(GraphStatement statement, String graphSubProtocol) { + assert statement instanceof FluentGraphStatement + || statement instanceof BatchGraphStatement + || statement instanceof BytecodeGraphStatement; + Object toSerialize; + if (statement instanceof FluentGraphStatement) { + toSerialize = ((FluentGraphStatement) statement).getTraversal().asAdmin().getBytecode(); + } else if (statement instanceof BatchGraphStatement) { + toSerialize = ((BatchGraphStatement) statement).iterator(); + } else { + toSerialize = ((BytecodeGraphStatement) statement).getBytecode(); + } + try { + return GraphSONUtils.serializeToBytes(toSerialize, graphSubProtocol); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + } + + static Map createCustomPayload( + GraphStatement statement, + String subProtocol, + DriverExecutionProfile config, + InternalDriverContext context) { + + ProtocolVersion protocolVersion = context.getProtocolVersion(); + + NullAllowingImmutableMap.Builder payload = + NullAllowingImmutableMap.builder(); + Map statementOptions = statement.getCustomPayload(); + payload.putAll(statementOptions); + + // Don't override anything that's already provided at the statement level + if (!statementOptions.containsKey(GRAPH_LANG_OPTION_KEY)) { + String graphLanguage = + (statement instanceof ScriptGraphStatement) ? "gremlin-groovy" : "bytecode-json"; + payload.put(GRAPH_LANG_OPTION_KEY, TypeCodecs.TEXT.encode(graphLanguage, protocolVersion)); + } + + if (!isSystemQuery(statement, config)) { + if (!statementOptions.containsKey(GRAPH_NAME_OPTION_KEY)) { + String graphName = statement.getGraphName(); + if (graphName == null) { + graphName = config.getString(DseDriverOption.GRAPH_NAME, null); + } + if (graphName != null) { + payload.put(GRAPH_NAME_OPTION_KEY, TypeCodecs.TEXT.encode(graphName, protocolVersion)); + } + } + if (!statementOptions.containsKey(GRAPH_SOURCE_OPTION_KEY)) { + String traversalSource = statement.getTraversalSource(); + if (traversalSource == null) { + traversalSource = config.getString(DseDriverOption.GRAPH_TRAVERSAL_SOURCE, null); + } + if (traversalSource != null) { + payload.put( + GRAPH_SOURCE_OPTION_KEY, TypeCodecs.TEXT.encode(traversalSource, protocolVersion)); + } + } + } + + if (!statementOptions.containsKey(GRAPH_RESULTS_OPTION_KEY)) { + assert subProtocol != null; + payload.put(GRAPH_RESULTS_OPTION_KEY, TypeCodecs.TEXT.encode(subProtocol, protocolVersion)); + } + + if (!statementOptions.containsKey(GRAPH_READ_CONSISTENCY_LEVEL_OPTION_KEY)) { + ConsistencyLevel readCl = statement.getReadConsistencyLevel(); + String readClString = + (readCl != null) + ? readCl.name() + : config.getString(DseDriverOption.GRAPH_READ_CONSISTENCY_LEVEL, null); + if (readClString != null) { + payload.put( + GRAPH_READ_CONSISTENCY_LEVEL_OPTION_KEY, + TypeCodecs.TEXT.encode(readClString, protocolVersion)); + } + } + + if (!statementOptions.containsKey(GRAPH_WRITE_CONSISTENCY_LEVEL_OPTION_KEY)) { + ConsistencyLevel writeCl = statement.getWriteConsistencyLevel(); + String writeClString = + (writeCl != null) + ? writeCl.name() + : config.getString(DseDriverOption.GRAPH_WRITE_CONSISTENCY_LEVEL, null); + if (writeClString != null) { + payload.put( + GRAPH_WRITE_CONSISTENCY_LEVEL_OPTION_KEY, + TypeCodecs.TEXT.encode(writeClString, protocolVersion)); + } + } + + if (!statementOptions.containsKey(GRAPH_TIMEOUT_OPTION_KEY)) { + Duration timeout = statement.getTimeout(); + if (timeout == null) { + timeout = config.getDuration(DseDriverOption.GRAPH_TIMEOUT, null); + } + if (timeout != null && !timeout.isZero()) { + payload.put( + GRAPH_TIMEOUT_OPTION_KEY, + TypeCodecs.BIGINT.encode(timeout.toMillis(), protocolVersion)); + } + } + return payload.build(); + } + + private static boolean isSystemQuery(GraphStatement statement, DriverExecutionProfile config) { + if (statement instanceof ScriptGraphStatement) { + Boolean statementValue = ((ScriptGraphStatement) statement).isSystemQuery(); + if (statementValue != null) { + return statementValue; + } + } + return config.getBoolean(DseDriverOption.GRAPH_IS_SYSTEM_QUERY, false); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestAsyncProcessor.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestAsyncProcessor.java new file mode 100644 index 00000000000..96992f1abf5 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestAsyncProcessor.java @@ -0,0 +1,42 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph; + +import com.datastax.dse.driver.api.core.graph.AsyncGraphResultSet; +import com.datastax.dse.driver.api.core.graph.GraphStatement; +import com.datastax.oss.driver.api.core.session.Request; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.session.DefaultSession; +import com.datastax.oss.driver.internal.core.session.RequestProcessor; +import com.datastax.oss.driver.internal.core.util.concurrent.CompletableFutures; +import java.util.concurrent.CompletionStage; +import net.jcip.annotations.ThreadSafe; + +@ThreadSafe +public class GraphRequestAsyncProcessor + implements RequestProcessor, CompletionStage> { + + @Override + public boolean canProcess(Request request, GenericType resultType) { + return request instanceof GraphStatement && resultType.equals(GraphStatement.ASYNC); + } + + @Override + public CompletionStage process( + GraphStatement request, + DefaultSession session, + InternalDriverContext context, + String sessionLogPrefix) { + return new GraphRequestHandler(request, session, context, sessionLogPrefix).handle(); + } + + @Override + public CompletionStage newFailure(RuntimeException error) { + return CompletableFutures.failedFuture(error); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java new file mode 100644 index 00000000000..14e91877a17 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java @@ -0,0 +1,661 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph; + +import com.datastax.dse.driver.api.core.config.DseDriverOption; +import com.datastax.dse.driver.api.core.graph.AsyncGraphResultSet; +import com.datastax.dse.driver.api.core.graph.BatchGraphStatement; +import com.datastax.dse.driver.api.core.graph.FluentGraphStatement; +import com.datastax.dse.driver.api.core.graph.GraphExecutionInfo; +import com.datastax.dse.driver.api.core.graph.GraphNode; +import com.datastax.dse.driver.api.core.graph.GraphStatement; +import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; +import com.datastax.oss.driver.api.core.AllNodesFailedException; +import com.datastax.oss.driver.api.core.DriverTimeoutException; +import com.datastax.oss.driver.api.core.RequestThrottlingException; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.connection.FrameTooLongException; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.metrics.DefaultNodeMetric; +import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; +import com.datastax.oss.driver.api.core.retry.RetryDecision; +import com.datastax.oss.driver.api.core.retry.RetryPolicy; +import com.datastax.oss.driver.api.core.servererrors.BootstrappingException; +import com.datastax.oss.driver.api.core.servererrors.CoordinatorException; +import com.datastax.oss.driver.api.core.servererrors.FunctionFailureException; +import com.datastax.oss.driver.api.core.servererrors.ProtocolError; +import com.datastax.oss.driver.api.core.servererrors.QueryValidationException; +import com.datastax.oss.driver.api.core.servererrors.ReadTimeoutException; +import com.datastax.oss.driver.api.core.servererrors.UnavailableException; +import com.datastax.oss.driver.api.core.servererrors.WriteTimeoutException; +import com.datastax.oss.driver.api.core.session.throttling.RequestThrottler; +import com.datastax.oss.driver.api.core.session.throttling.Throttled; +import com.datastax.oss.driver.api.core.specex.SpeculativeExecutionPolicy; +import com.datastax.oss.driver.internal.core.channel.DriverChannel; +import com.datastax.oss.driver.internal.core.channel.ResponseCallback; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.metadata.DefaultNode; +import com.datastax.oss.driver.internal.core.metrics.NodeMetricUpdater; +import com.datastax.oss.driver.internal.core.session.DefaultSession; +import com.datastax.oss.driver.internal.core.util.Loggers; +import com.datastax.oss.driver.shaded.guava.common.base.Preconditions; +import com.datastax.oss.protocol.internal.Frame; +import com.datastax.oss.protocol.internal.Message; +import com.datastax.oss.protocol.internal.response.Error; +import com.datastax.oss.protocol.internal.response.Result; +import com.datastax.oss.protocol.internal.response.result.Rows; +import com.datastax.oss.protocol.internal.response.result.Void; +import edu.umd.cs.findbugs.annotations.NonNull; +import io.netty.handler.codec.EncoderException; +import io.netty.util.concurrent.EventExecutor; +import io.netty.util.concurrent.Future; +import io.netty.util.concurrent.GenericFutureListener; +import io.netty.util.concurrent.ScheduledFuture; +import java.nio.ByteBuffer; +import java.time.Duration; +import java.util.AbstractMap; +import java.util.ArrayDeque; +import java.util.List; +import java.util.Map; +import java.util.Queue; +import java.util.concurrent.CancellationException; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionStage; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; +import net.jcip.annotations.ThreadSafe; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@ThreadSafe +public class GraphRequestHandler implements Throttled { + + private static final Logger LOG = LoggerFactory.getLogger(GraphRequestHandler.class); + + private final long startTimeNanos; + private final String logPrefix; + + private final DefaultSession session; + + private final InternalDriverContext context; + private Queue queryPlan; + private final DriverExecutionProfile executionProfile; + + private final GraphStatement graphStatement; + + private final boolean isIdempotent; + protected final CompletableFuture result; + private final Message message; + private final String subProtocol; + private final EventExecutor scheduler; + + /** + * How many speculative executions are currently running (including the initial execution). We + * track this in order to know when to fail the request if all executions have reached the end of + * the query plan. + */ + private final AtomicInteger activeExecutionsCount; + + /** + * How many speculative executions have started (excluding the initial execution), whether they + * have completed or not. We track this in order to fill {@link + * GraphExecutionInfo#getSpeculativeExecutionCount()}. + */ + private final AtomicInteger startedSpeculativeExecutionsCount; + + private final SpeculativeExecutionPolicy speculativeExecutionPolicy; + + private final ScheduledFuture timeoutFuture; + private final List> scheduledExecutions; + private final List inFlightCallbacks; + private final RetryPolicy retryPolicy; + private final RequestThrottler throttler; + + // The errors on the nodes that were already tried (lazily initialized on the first error). + // We don'traversals use a map because nodes can appear multiple times. + private volatile List> errors; + + public GraphRequestHandler( + @NonNull GraphStatement graphStatement, + @NonNull DefaultSession dseSession, + @NonNull InternalDriverContext context, + @NonNull String sessionLogPrefix) { + this.startTimeNanos = System.nanoTime(); + this.logPrefix = sessionLogPrefix + "|" + this.hashCode(); + Preconditions.checkArgument( + graphStatement instanceof ScriptGraphStatement + || graphStatement instanceof FluentGraphStatement + || graphStatement instanceof BatchGraphStatement + || graphStatement instanceof BytecodeGraphStatement, + "Unknown graph statement type: " + graphStatement.getClass()); + + LOG.trace("[{}] Creating new Graph request handler for request {}", logPrefix, graphStatement); + this.graphStatement = graphStatement; + this.session = dseSession; + this.context = context; + + this.executionProfile = + GraphConversions.resolveExecutionProfile(this.graphStatement, this.context); + Boolean statementIsIdempotent = graphStatement.isIdempotent(); + this.isIdempotent = + (statementIsIdempotent == null) + ? executionProfile.getBoolean(DefaultDriverOption.REQUEST_DEFAULT_IDEMPOTENCE) + : statementIsIdempotent; + this.result = new CompletableFuture<>(); + this.result.exceptionally( + t -> { + try { + if (t instanceof CancellationException) { + cancelScheduledTasks(); + } + } catch (Throwable t2) { + Loggers.warnWithException(LOG, "[{}] Uncaught exception", logPrefix, t2); + } + return null; + }); + + this.scheduler = context.getNettyOptions().ioEventLoopGroup().next(); + + Duration timeout = graphStatement.getTimeout(); + if (timeout == null) { + timeout = executionProfile.getDuration(DseDriverOption.GRAPH_TIMEOUT, null); + } + this.timeoutFuture = scheduleTimeout(timeout); + + this.retryPolicy = context.getRetryPolicy(executionProfile.getName()); + this.speculativeExecutionPolicy = + context.getSpeculativeExecutionPolicy(executionProfile.getName()); + this.activeExecutionsCount = new AtomicInteger(1); + this.startedSpeculativeExecutionsCount = new AtomicInteger(0); + this.scheduledExecutions = isIdempotent ? new CopyOnWriteArrayList<>() : null; + + this.inFlightCallbacks = new CopyOnWriteArrayList<>(); + + this.subProtocol = + GraphConversions.inferSubProtocol(this.graphStatement, executionProfile, session); + this.message = + GraphConversions.createMessageFromGraphStatement( + this.graphStatement, subProtocol, executionProfile, context); + + this.throttler = context.getRequestThrottler(); + this.throttler.register(this); + } + + @Override + public void onThrottleReady(boolean wasDelayed) { + if (wasDelayed) { + session + .getMetricUpdater() + .updateTimer( + DefaultSessionMetric.THROTTLING_DELAY, + executionProfile.getName(), + System.nanoTime() - startTimeNanos, + TimeUnit.NANOSECONDS); + } + // compute query plan only when the throttling is done. + // TODO thread safety? + this.queryPlan = + context + .getLoadBalancingPolicyWrapper() + .newQueryPlan(graphStatement, executionProfile.getName(), session); + sendRequest(null, 0, 0, true); + } + + public CompletionStage handle() { + return result; + } + + @Override + public void onThrottleFailure(@NonNull RequestThrottlingException error) { + session + .getMetricUpdater() + .incrementCounter(DefaultSessionMetric.THROTTLING_ERRORS, executionProfile.getName()); + setFinalError(error, null); + } + + private ScheduledFuture scheduleTimeout(Duration timeout) { + if (timeout != null && timeout.toNanos() > 0) { + return scheduler.schedule( + () -> setFinalError(new DriverTimeoutException("Query timed out after " + timeout), null), + timeout.toNanos(), + TimeUnit.NANOSECONDS); + } else { + return null; + } + } + + /** + * Sends the request to the next available node. + * + * @param node if not null, it will be attempted first before the rest of the query plan. + * @param currentExecutionIndex 0 for the initial execution, 1 for the first speculative one, etc. + * @param retryCount the number of times that the retry policy was invoked for this execution + * already (note that some internal retries don'traversals go through the policy, and + * therefore don'traversals increment this counter) + * @param scheduleNextExecution whether to schedule the next speculative execution + */ + private void sendRequest( + Node node, int currentExecutionIndex, int retryCount, boolean scheduleNextExecution) { + if (result.isDone()) { + return; + } + DriverChannel channel = null; + if (node == null || (channel = session.getChannel(node, logPrefix)) == null) { + while (!result.isDone() && (node = queryPlan.poll()) != null) { + channel = session.getChannel(node, logPrefix); + if (channel != null) { + break; + } + } + } + if (channel == null) { + // We've reached the end of the query plan without finding any node to write to + if (!result.isDone() && activeExecutionsCount.decrementAndGet() == 0) { + // We're the last execution so fail the result + setFinalError(AllNodesFailedException.fromErrors(this.errors), null); + } + } else { + PerRequestCallback perRequestCallback = + new PerRequestCallback( + node, channel, currentExecutionIndex, retryCount, scheduleNextExecution, logPrefix); + + channel + .write( + message, + graphStatement.isTracing(), + GraphConversions.createCustomPayload( + graphStatement, subProtocol, executionProfile, context), + perRequestCallback) + .addListener(perRequestCallback); + } + } + + private void cancelScheduledTasks() { + if (this.timeoutFuture != null) { + this.timeoutFuture.cancel(false); + } + if (scheduledExecutions != null) { + for (ScheduledFuture future : scheduledExecutions) { + future.cancel(false); + } + } + for (PerRequestCallback callback : inFlightCallbacks) { + callback.cancel(); + } + } + + private void setFinalError(Throwable error, Node node) { + if (result.completeExceptionally(error)) { + cancelScheduledTasks(); + long latencyNanos = System.nanoTime() - startTimeNanos; + context + .getRequestTracker() + .onError(graphStatement, error, latencyNanos, executionProfile, node, logPrefix); + if (error instanceof DriverTimeoutException) { + throttler.signalTimeout(this); + session + .getMetricUpdater() + .incrementCounter(DefaultSessionMetric.CQL_CLIENT_TIMEOUTS, executionProfile.getName()); + } else if (!(error instanceof RequestThrottlingException)) { + throttler.signalError(this, error); + } + } + } + + private void recordError(Node node, Throwable error) { + // Use a local variable to do only a single single volatile read in the nominal case + List> errorsSnapshot = this.errors; + if (errorsSnapshot == null) { + synchronized (GraphRequestHandler.this) { + errorsSnapshot = this.errors; + if (errorsSnapshot == null) { + this.errors = errorsSnapshot = new CopyOnWriteArrayList<>(); + } + } + } + errorsSnapshot.add(new AbstractMap.SimpleEntry<>(node, error)); + } + + /** + * Handles the interaction with a single node in the query plan. + * + *

      An instance of this class is created each time we (re)try a node. + */ + private class PerRequestCallback + implements ResponseCallback, GenericFutureListener> { + private final long start = System.nanoTime(); + private final Node node; + private final DriverChannel channel; + // The identifier of the current execution (0 for the initial execution, 1 for the first + // speculative execution, etc.) + private final int execution; + // How many times we've invoked the retry policy and it has returned a "retry" decision (0 for + // the first attempt of each execution). + private final int retryCount; + private final boolean scheduleNextExecution; + private final String logPrefix; + + PerRequestCallback( + Node node, + DriverChannel channel, + int execution, + int retryCount, + boolean scheduleNextExecution, + String logPrefix) { + this.node = node; + this.channel = channel; + this.execution = execution; + this.retryCount = retryCount; + this.scheduleNextExecution = scheduleNextExecution; + this.logPrefix = logPrefix + "|" + execution; + } + + @Override + public void onFailure(Throwable error) { + inFlightCallbacks.remove(this); + if (result.isDone()) { + return; + } + LOG.trace("[{}] Request failure, processing: {}", logPrefix, error.toString()); + RetryDecision decision; + if (!isIdempotent || error instanceof FrameTooLongException) { + decision = RetryDecision.RETHROW; + } else { + decision = retryPolicy.onRequestAborted(graphStatement, error, retryCount); + } + processRetryDecision(decision, error); + updateErrorMetrics( + ((DefaultNode) node).getMetricUpdater(), + decision, + DefaultNodeMetric.ABORTED_REQUESTS, + DefaultNodeMetric.RETRIES_ON_ABORTED, + DefaultNodeMetric.IGNORES_ON_ABORTED); + } + + // this gets invoked once the write completes. + @Override + public void operationComplete(Future voidFuture) { + if (!voidFuture.isSuccess()) { + Throwable error = voidFuture.cause(); + if (error instanceof EncoderException + && error.getCause() instanceof FrameTooLongException) { + setFinalError(error.getCause(), node); + } else { + LOG.trace( + "[{}] Failed to send request on {}, trying next node (cause: {})", + logPrefix, + channel, + error); + recordError(node, error); + ((DefaultNode) node) + .getMetricUpdater() + .incrementCounter(DefaultNodeMetric.UNSENT_REQUESTS, executionProfile.getName()); + sendRequest(null, execution, retryCount, scheduleNextExecution); // try next node + } + } else { + LOG.trace("[{}] Request sent on {}", logPrefix, channel); + if (result.isDone()) { + // If the handler completed since the last time we checked, cancel directly because we + // don'traversals know if cancelScheduledTasks() has run yet + cancel(); + } else { + inFlightCallbacks.add(this); + if (scheduleNextExecution && isIdempotent) { + int nextExecution = execution + 1; + // Note that `node` is the first node of the execution, it might not be the "slow" one + // if there were retries, but in practice retries are rare. + long nextDelay = + speculativeExecutionPolicy.nextExecution(node, null, graphStatement, nextExecution); + if (nextDelay >= 0) { + LOG.trace( + "[{}] Scheduling speculative execution {} in {} ms", + logPrefix, + nextExecution, + nextDelay); + scheduledExecutions.add( + scheduler.schedule( + () -> { + if (!result.isDone()) { + LOG.trace( + "[{}] Starting speculative execution {}", + GraphRequestHandler.this.logPrefix, + nextExecution); + activeExecutionsCount.incrementAndGet(); + startedSpeculativeExecutionsCount.incrementAndGet(); + ((DefaultNode) node) + .getMetricUpdater() + .incrementCounter( + DefaultNodeMetric.SPECULATIVE_EXECUTIONS, + executionProfile.getName()); + sendRequest(null, nextExecution, 0, true); + } + }, + nextDelay, + TimeUnit.MILLISECONDS)); + } else { + LOG.trace( + "[{}] Speculative execution policy returned {}, no next execution", + logPrefix, + nextDelay); + } + } + } + } + } + + @Override + public void onResponse(Frame responseFrame) { + ((DefaultNode) node) + .getMetricUpdater() + .updateTimer( + DefaultNodeMetric.CQL_MESSAGES, + executionProfile.getName(), + System.nanoTime() - start, + TimeUnit.NANOSECONDS); + inFlightCallbacks.remove(this); + if (result.isDone()) { + return; + } + try { + Message responseMessage = responseFrame.message; + if (responseMessage instanceof Result) { + LOG.trace("[{}] Got result, completing", logPrefix); + setFinalResult((Result) responseMessage, responseFrame, this); + } else if (responseMessage instanceof Error) { + LOG.trace("[{}] Got error response, processing", logPrefix); + processErrorResponse((Error) responseMessage); + } else { + setFinalError(new IllegalStateException("Unexpected response " + responseMessage), node); + } + } catch (Throwable t) { + setFinalError(t, node); + } + } + + private void setFinalResult( + Result resultMessage, Frame responseFrame, PerRequestCallback callback) { + try { + GraphExecutionInfo executionInfo = buildExecutionInfo(callback, responseFrame); + + Queue graphNodes = new ArrayDeque<>(); + for (List row : ((Rows) resultMessage).getData()) { + graphNodes.offer(GraphSONUtils.createGraphNode(row, subProtocol)); + } + + DefaultAsyncGraphResultSet resultSet = + new DefaultAsyncGraphResultSet(executionInfo, graphNodes); + if (result.complete(resultSet)) { + cancelScheduledTasks(); + throttler.signalSuccess(GraphRequestHandler.this); + long latencyNanos = System.nanoTime() - startTimeNanos; + context + .getRequestTracker() + .onSuccess(graphStatement, latencyNanos, executionProfile, callback.node, logPrefix); + session + .getMetricUpdater() + .updateTimer( + DefaultSessionMetric.CQL_REQUESTS, + executionProfile.getName(), + latencyNanos, + TimeUnit.NANOSECONDS); + } + } catch (Throwable error) { + setFinalError(error, callback.node); + } + } + + private GraphExecutionInfo buildExecutionInfo( + PerRequestCallback callback, Frame responseFrame) { + return new DefaultGraphExecutionInfo( + graphStatement, + callback.node, + startedSpeculativeExecutionsCount.get(), + callback.execution, + errors, + responseFrame); + } + + private void processErrorResponse(Error errorMessage) { + CoordinatorException error = GraphConversions.toThrowable(node, errorMessage, context); + NodeMetricUpdater metricUpdater = ((DefaultNode) node).getMetricUpdater(); + if (error instanceof BootstrappingException) { + LOG.trace("[{}] {} is bootstrapping, trying next node", logPrefix, node); + recordError(node, error); + sendRequest(null, execution, retryCount, false); + } else if (error instanceof QueryValidationException + || error instanceof FunctionFailureException + || error instanceof ProtocolError) { + LOG.trace("[{}] Unrecoverable error, rethrowing", logPrefix); + metricUpdater.incrementCounter(DefaultNodeMetric.OTHER_ERRORS, executionProfile.getName()); + setFinalError(error, node); + } else { + RetryDecision decision; + if (error instanceof ReadTimeoutException) { + ReadTimeoutException readTimeout = (ReadTimeoutException) error; + decision = + retryPolicy.onReadTimeout( + graphStatement, + readTimeout.getConsistencyLevel(), + readTimeout.getBlockFor(), + readTimeout.getReceived(), + readTimeout.wasDataPresent(), + retryCount); + updateErrorMetrics( + metricUpdater, + decision, + DefaultNodeMetric.READ_TIMEOUTS, + DefaultNodeMetric.RETRIES_ON_READ_TIMEOUT, + DefaultNodeMetric.IGNORES_ON_READ_TIMEOUT); + } else if (error instanceof WriteTimeoutException) { + WriteTimeoutException writeTimeout = (WriteTimeoutException) error; + decision = + isIdempotent + ? retryPolicy.onWriteTimeout( + graphStatement, + writeTimeout.getConsistencyLevel(), + writeTimeout.getWriteType(), + writeTimeout.getBlockFor(), + writeTimeout.getReceived(), + retryCount) + : RetryDecision.RETHROW; + updateErrorMetrics( + metricUpdater, + decision, + DefaultNodeMetric.WRITE_TIMEOUTS, + DefaultNodeMetric.RETRIES_ON_WRITE_TIMEOUT, + DefaultNodeMetric.IGNORES_ON_WRITE_TIMEOUT); + } else if (error instanceof UnavailableException) { + UnavailableException unavailable = (UnavailableException) error; + decision = + retryPolicy.onUnavailable( + graphStatement, + unavailable.getConsistencyLevel(), + unavailable.getRequired(), + unavailable.getAlive(), + retryCount); + updateErrorMetrics( + metricUpdater, + decision, + DefaultNodeMetric.UNAVAILABLES, + DefaultNodeMetric.RETRIES_ON_UNAVAILABLE, + DefaultNodeMetric.IGNORES_ON_UNAVAILABLE); + } else { + decision = + isIdempotent + ? retryPolicy.onErrorResponse(graphStatement, error, retryCount) + : RetryDecision.RETHROW; + updateErrorMetrics( + metricUpdater, + decision, + DefaultNodeMetric.OTHER_ERRORS, + DefaultNodeMetric.RETRIES_ON_OTHER_ERROR, + DefaultNodeMetric.IGNORES_ON_OTHER_ERROR); + } + processRetryDecision(decision, error); + } + } + + private void processRetryDecision(RetryDecision decision, Throwable error) { + LOG.trace("[{}] Processing retry decision {}", logPrefix, decision); + switch (decision) { + case RETRY_SAME: + recordError(node, error); + sendRequest(node, execution, retryCount + 1, false); + break; + case RETRY_NEXT: + recordError(node, error); + sendRequest(null, execution, retryCount + 1, false); + break; + case RETHROW: + setFinalError(error, node); + break; + case IGNORE: + setFinalResult(Void.INSTANCE, null, this); + break; + } + } + + private void updateErrorMetrics( + NodeMetricUpdater metricUpdater, + RetryDecision decision, + DefaultNodeMetric error, + DefaultNodeMetric retriesOnError, + DefaultNodeMetric ignoresOnError) { + metricUpdater.incrementCounter(error, executionProfile.getName()); + switch (decision) { + case RETRY_SAME: + case RETRY_NEXT: + metricUpdater.incrementCounter(DefaultNodeMetric.RETRIES, executionProfile.getName()); + metricUpdater.incrementCounter(retriesOnError, executionProfile.getName()); + break; + case IGNORE: + metricUpdater.incrementCounter(DefaultNodeMetric.IGNORES, executionProfile.getName()); + metricUpdater.incrementCounter(ignoresOnError, executionProfile.getName()); + break; + case RETHROW: + // nothing do do + } + } + + void cancel() { + try { + if (!channel.closeFuture().isDone()) { + this.channel.cancel(this); + } + } catch (Throwable t) { + Loggers.warnWithException(LOG, "[{}] Error cancelling", logPrefix, t); + } + } + + @Override + public String toString() { + return logPrefix; + } + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestSyncProcessor.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestSyncProcessor.java new file mode 100644 index 00000000000..efd211a8edc --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestSyncProcessor.java @@ -0,0 +1,53 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph; + +import com.datastax.dse.driver.api.core.graph.AsyncGraphResultSet; +import com.datastax.dse.driver.api.core.graph.GraphResultSet; +import com.datastax.dse.driver.api.core.graph.GraphStatement; +import com.datastax.oss.driver.api.core.session.Request; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.session.DefaultSession; +import com.datastax.oss.driver.internal.core.session.RequestProcessor; +import com.datastax.oss.driver.internal.core.util.concurrent.BlockingOperation; +import com.datastax.oss.driver.internal.core.util.concurrent.CompletableFutures; +import net.jcip.annotations.ThreadSafe; + +@ThreadSafe +public class GraphRequestSyncProcessor + implements RequestProcessor, GraphResultSet> { + + private final GraphRequestAsyncProcessor asyncProcessor; + + public GraphRequestSyncProcessor(GraphRequestAsyncProcessor asyncProcessor) { + this.asyncProcessor = asyncProcessor; + } + + @Override + public boolean canProcess(Request request, GenericType resultType) { + return request instanceof GraphStatement && resultType.equals(GraphStatement.SYNC); + } + + @Override + public GraphResultSet process( + GraphStatement request, + DefaultSession session, + InternalDriverContext context, + String sessionLogPrefix) { + BlockingOperation.checkNotDriverThread(); + AsyncGraphResultSet firstPage = + CompletableFutures.getUninterruptibly( + asyncProcessor.process(request, session, context, sessionLogPrefix)); + return GraphResultSets.toSync(firstPage); + } + + @Override + public GraphResultSet newFailure(RuntimeException error) { + throw error; + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphResultSets.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphResultSets.java new file mode 100644 index 00000000000..04e42d94bec --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphResultSets.java @@ -0,0 +1,21 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph; + +import com.datastax.dse.driver.api.core.graph.AsyncGraphResultSet; +import com.datastax.dse.driver.api.core.graph.GraphResultSet; + +public class GraphResultSets { + + public static GraphResultSet toSync(AsyncGraphResultSet firstPage) { + if (firstPage.hasMorePages()) { + throw new UnsupportedOperationException("TODO implement multi-page results"); + } else { + return new SinglePageGraphResultSet(firstPage); + } + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSON1SerdeTP.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSON1SerdeTP.java new file mode 100644 index 00000000000..77cb3ac4640 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSON1SerdeTP.java @@ -0,0 +1,335 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph; + +import com.datastax.dse.driver.api.core.data.geometry.Geometry; +import com.datastax.dse.driver.api.core.data.geometry.LineString; +import com.datastax.dse.driver.api.core.data.geometry.Point; +import com.datastax.dse.driver.api.core.data.geometry.Polygon; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import com.datastax.oss.driver.shaded.guava.common.net.InetAddresses; +import java.io.IOException; +import java.net.Inet4Address; +import java.net.Inet6Address; +import java.net.InetAddress; +import java.util.List; +import java.util.Map; +import org.apache.tinkerpop.shaded.jackson.core.JsonGenerator; +import org.apache.tinkerpop.shaded.jackson.core.JsonParseException; +import org.apache.tinkerpop.shaded.jackson.core.JsonParser; +import org.apache.tinkerpop.shaded.jackson.core.Version; +import org.apache.tinkerpop.shaded.jackson.databind.DeserializationContext; +import org.apache.tinkerpop.shaded.jackson.databind.JsonDeserializer; +import org.apache.tinkerpop.shaded.jackson.databind.JsonSerializer; +import org.apache.tinkerpop.shaded.jackson.databind.SerializerProvider; +import org.apache.tinkerpop.shaded.jackson.databind.deser.std.StdDeserializer; +import org.apache.tinkerpop.shaded.jackson.databind.module.SimpleModule; +import org.apache.tinkerpop.shaded.jackson.databind.ser.std.StdSerializer; + +public class GraphSON1SerdeTP { + + //////////////////////// DESERIALIZERS //////////////////////// + + /** + * Default deserializer used by the driver for {@link InetAddress} instances. The actual subclass + * returned by this deserializer depends on the type of address: {@link Inet4Address IPV4} or + * {@link Inet6Address IPV6}. + */ + static class DefaultInetAddressDeserializer extends StdDeserializer { + + private static final long serialVersionUID = 1L; + + private final Class inetClass; + + DefaultInetAddressDeserializer(Class inetClass) { + super(inetClass); + this.inetClass = inetClass; + } + + @Override + public T deserialize(JsonParser parser, DeserializationContext ctx) throws IOException { + String ip = parser.readValueAs(String.class); + try { + InetAddress inet = InetAddresses.forString(ip); + return inetClass.cast(inet); + } catch (ClassCastException e) { + throw new JsonParseException( + parser, + String.format("Inet address cannot be cast to %s: %s", inetClass.getSimpleName(), ip), + e); + } catch (IllegalArgumentException e) { + throw new JsonParseException(parser, String.format("Expected inet address, got %s", ip), e); + } + } + } + + /** + * Default deserializer used by the driver for geospatial types. It deserializes such types into + * {@link Geometry} instances. The actual subclass depends on the type being deserialized. + */ + static class DefaultGeometryDeserializer extends StdDeserializer { + + private static final long serialVersionUID = 1L; + + private final Class geometryClass; + + DefaultGeometryDeserializer(Class geometryClass) { + super(geometryClass); + this.geometryClass = geometryClass; + } + + @Override + public T deserialize(JsonParser parser, DeserializationContext ctx) throws IOException { + String wkt = parser.readValueAs(String.class); + Geometry geometry; + if (wkt.startsWith("POINT")) geometry = Point.fromWellKnownText(wkt); + else if (wkt.startsWith("LINESTRING")) geometry = LineString.fromWellKnownText(wkt); + else if (wkt.startsWith("POLYGON")) geometry = Polygon.fromWellKnownText(wkt); + else throw new JsonParseException(parser, "Unknown geometry type: " + wkt); + return geometryClass.cast(geometry); + } + } + + /** Base class for serializing the {@code java.time.*} types to ISO-8061 formats. */ + abstract static class AbstractJavaTimeSerializer extends StdSerializer { + + private static final long serialVersionUID = 1L; + + AbstractJavaTimeSerializer(final Class clazz) { + super(clazz); + } + + @Override + public void serialize( + final T value, final JsonGenerator gen, final SerializerProvider serializerProvider) + throws IOException { + gen.writeString(value.toString()); + } + } + + /** Base class for deserializing the {@code java.time.*} types from ISO-8061 formats. */ + abstract static class AbstractJavaTimeJacksonDeserializer extends StdDeserializer { + + private static final long serialVersionUID = 1L; + + AbstractJavaTimeJacksonDeserializer(final Class clazz) { + super(clazz); + } + + abstract T parse(final String val); + + @Override + public T deserialize( + final JsonParser jsonParser, final DeserializationContext deserializationContext) + throws IOException { + return parse(jsonParser.getText()); + } + } + + static final class DurationJacksonSerializer + extends AbstractJavaTimeSerializer { + + private static final long serialVersionUID = 1L; + + DurationJacksonSerializer() { + super(java.time.Duration.class); + } + } + + static final class DurationJacksonDeserializer + extends AbstractJavaTimeJacksonDeserializer { + + private static final long serialVersionUID = 1L; + + DurationJacksonDeserializer() { + super(java.time.Duration.class); + } + + @Override + public java.time.Duration parse(final String val) { + return java.time.Duration.parse(val); + } + } + + static final class InstantJacksonSerializer + extends AbstractJavaTimeSerializer { + + private static final long serialVersionUID = 1L; + + InstantJacksonSerializer() { + super(java.time.Instant.class); + } + } + + static final class InstantJacksonDeserializer + extends AbstractJavaTimeJacksonDeserializer { + + private static final long serialVersionUID = 1L; + + InstantJacksonDeserializer() { + super(java.time.Instant.class); + } + + @Override + public java.time.Instant parse(final String val) { + return java.time.Instant.parse(val); + } + } + + static final class LocalDateJacksonSerializer + extends AbstractJavaTimeSerializer { + + private static final long serialVersionUID = 1L; + + LocalDateJacksonSerializer() { + super(java.time.LocalDate.class); + } + } + + static final class LocalDateJacksonDeserializer + extends AbstractJavaTimeJacksonDeserializer { + + private static final long serialVersionUID = 1L; + + LocalDateJacksonDeserializer() { + super(java.time.LocalDate.class); + } + + @Override + public java.time.LocalDate parse(final String val) { + return java.time.LocalDate.parse(val); + } + } + + static final class LocalTimeJacksonSerializer + extends AbstractJavaTimeSerializer { + + private static final long serialVersionUID = 1L; + + LocalTimeJacksonSerializer() { + super(java.time.LocalTime.class); + } + } + + static final class LocalTimeJacksonDeserializer + extends AbstractJavaTimeJacksonDeserializer { + + private static final long serialVersionUID = 1L; + + LocalTimeJacksonDeserializer() { + super(java.time.LocalTime.class); + } + + @Override + public java.time.LocalTime parse(final String val) { + return java.time.LocalTime.parse(val); + } + } + + //////////////////////// SERIALIZERS //////////////////////// + + /** Default serializer used by the driver for {@link LegacyGraphNode} instances. */ + static class DefaultGraphNodeSerializer extends StdSerializer { + + private static final long serialVersionUID = 1L; + + DefaultGraphNodeSerializer() { + super(LegacyGraphNode.class); + } + + @Override + public void serialize( + LegacyGraphNode value, JsonGenerator jsonGenerator, SerializerProvider serializerProvider) + throws IOException { + jsonGenerator.writeTree(value.getDelegate()); + } + } + + /** + * Default serializer used by the driver for geospatial types. It serializes {@link Geometry} + * instances into their Well-Known Text (WKT) equivalent. + */ + static class DefaultGeometrySerializer extends StdSerializer { + + private static final long serialVersionUID = 1L; + + DefaultGeometrySerializer() { + super(Geometry.class); + } + + @Override + public void serialize( + Geometry value, JsonGenerator jsonGenerator, SerializerProvider serializers) + throws IOException { + jsonGenerator.writeString(value.asWellKnownText()); + } + } + + /** The default Jackson module used by DSE Graph. */ + static class GraphSON1DefaultModule extends SimpleModule { + + private static final long serialVersionUID = 1L; + + GraphSON1DefaultModule(String name, Version version) { + super(name, version, createDeserializers(), createSerializers()); + } + + private static Map, JsonDeserializer> createDeserializers() { + + return ImmutableMap., JsonDeserializer>builder() + + // Inet (there is no built-in deserializer for InetAddress and subclasses) + .put(InetAddress.class, new DefaultInetAddressDeserializer<>(InetAddress.class)) + .put(Inet4Address.class, new DefaultInetAddressDeserializer<>(Inet4Address.class)) + .put(Inet6Address.class, new DefaultInetAddressDeserializer<>(Inet6Address.class)) + + // Geospatial types + .put(Geometry.class, new DefaultGeometryDeserializer<>(Geometry.class)) + .put(Point.class, new DefaultGeometryDeserializer<>(Point.class)) + .put(LineString.class, new DefaultGeometryDeserializer<>(LineString.class)) + .put(Polygon.class, new DefaultGeometryDeserializer<>(Polygon.class)) + .build(); + } + + private static List> createSerializers() { + return ImmutableList.>builder() + .add(new DefaultGraphNodeSerializer()) + .add(new DefaultGeometrySerializer()) + .build(); + } + } + + /** Serializers and deserializers for JSR 310 {@code java.time.*}. */ + static class GraphSON1JavaTimeModule extends SimpleModule { + + private static final long serialVersionUID = 1L; + + GraphSON1JavaTimeModule(String name, Version version) { + super(name, version, createDeserializers(), createSerializers()); + } + + private static Map, JsonDeserializer> createDeserializers() { + + return ImmutableMap., JsonDeserializer>builder() + .put(java.time.Duration.class, new DurationJacksonDeserializer()) + .put(java.time.Instant.class, new InstantJacksonDeserializer()) + .put(java.time.LocalDate.class, new LocalDateJacksonDeserializer()) + .put(java.time.LocalTime.class, new LocalTimeJacksonDeserializer()) + .build(); + } + + private static List> createSerializers() { + return ImmutableList.>builder() + .add(new DurationJacksonSerializer()) + .add(new InstantJacksonSerializer()) + .add(new LocalDateJacksonSerializer()) + .add(new LocalTimeJacksonSerializer()) + .build(); + } + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSON2SerdeTP.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSON2SerdeTP.java new file mode 100644 index 00000000000..9b385e47622 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSON2SerdeTP.java @@ -0,0 +1,419 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph; + +import com.datastax.dse.driver.api.core.data.geometry.Geometry; +import com.datastax.dse.driver.api.core.data.geometry.LineString; +import com.datastax.dse.driver.api.core.data.geometry.Point; +import com.datastax.dse.driver.api.core.data.geometry.Polygon; +import com.datastax.dse.driver.api.core.graph.predicates.Geo; +import com.datastax.dse.driver.api.core.graph.predicates.Search; +import com.datastax.dse.driver.internal.core.data.geometry.DefaultLineString; +import com.datastax.dse.driver.internal.core.data.geometry.DefaultPoint; +import com.datastax.dse.driver.internal.core.data.geometry.DefaultPolygon; +import com.datastax.dse.driver.internal.core.data.geometry.Distance; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import java.io.IOException; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import org.apache.tinkerpop.gremlin.process.traversal.P; +import org.apache.tinkerpop.gremlin.process.traversal.util.AndP; +import org.apache.tinkerpop.gremlin.process.traversal.util.ConnectiveP; +import org.apache.tinkerpop.gremlin.process.traversal.util.OrP; +import org.apache.tinkerpop.gremlin.structure.io.graphson.AbstractObjectDeserializer; +import org.apache.tinkerpop.gremlin.structure.io.graphson.GraphSONTokens; +import org.apache.tinkerpop.gremlin.structure.io.graphson.TinkerPopJacksonModule; +import org.apache.tinkerpop.shaded.jackson.core.JsonGenerator; +import org.apache.tinkerpop.shaded.jackson.core.JsonParser; +import org.apache.tinkerpop.shaded.jackson.databind.DeserializationContext; +import org.apache.tinkerpop.shaded.jackson.databind.SerializerProvider; +import org.apache.tinkerpop.shaded.jackson.databind.deser.std.StdDeserializer; +import org.apache.tinkerpop.shaded.jackson.databind.jsontype.TypeSerializer; +import org.apache.tinkerpop.shaded.jackson.databind.module.SimpleModule; +import org.apache.tinkerpop.shaded.jackson.databind.ser.std.StdScalarSerializer; +import org.apache.tinkerpop.shaded.jackson.databind.ser.std.StdSerializer; + +public class GraphSON2SerdeTP { + + /** + * A Jackson Module to use for TinkerPop serialization/deserialization. It extends {@link + * org.apache.tinkerpop.gremlin.structure.io.graphson.TinkerPopJacksonModule} because of the + * specific typing format used in GraphSON. + */ + public static class DseGraphModule extends TinkerPopJacksonModule { + + private static final long serialVersionUID = 1L; + + public DseGraphModule() { + super("dse-driver-2.0"); + addSerializer(DefaultPoint.class, new PointGeometrySerializer()); + addSerializer(DefaultLineString.class, new LineStringGeometrySerializer()); + addSerializer(DefaultPolygon.class, new PolygonGeometrySerializer()); + addSerializer(Distance.class, new DistanceGeometrySerializer()); + // override TinkerPop's P predicates because of DSE's Search and Geo predicates + addSerializer(P.class, new DsePJacksonSerializer()); + addSerializer(EditDistance.class, new EditDistanceSerializer()); + + addDeserializer(DefaultLineString.class, new LineStringGeometryDeserializer()); + addDeserializer(DefaultPoint.class, new PointGeometryDeserializer()); + addDeserializer(DefaultPolygon.class, new PolygonGeometryDeserializer()); + addDeserializer(Distance.class, new DistanceGeometryDeserializer()); + // override TinkerPop's P predicates because of DSE's Search and Geo predicates + addDeserializer(P.class, new DsePJacksonDeserializer()); + } + + @SuppressWarnings("rawtypes") + @Override + public Map getTypeDefinitions() { + Map definitions = new HashMap<>(); + definitions.put(DefaultLineString.class, "LineString"); + definitions.put(DefaultPoint.class, "Point"); + definitions.put(DefaultPolygon.class, "Polygon"); + definitions.put(byte[].class, "Blob"); + definitions.put(Distance.class, "Distance"); + definitions.put(P.class, "P"); + return definitions; + } + + @Override + public String getTypeNamespace() { + return "dse"; + } + + abstract static class AbstractGeometryJacksonDeserializer + extends StdDeserializer { + + private static final long serialVersionUID = 1L; + + AbstractGeometryJacksonDeserializer(final Class clazz) { + super(clazz); + } + + public abstract T parse(final String val); + + @Override + public T deserialize( + final JsonParser jsonParser, final DeserializationContext deserializationContext) + throws IOException { + return parse(jsonParser.getText()); + } + } + + abstract static class AbstractGeometryJacksonSerializer + extends StdScalarSerializer { + + private static final long serialVersionUID = 1L; + + AbstractGeometryJacksonSerializer(final Class clazz) { + super(clazz); + } + + @Override + public void serialize( + final T value, final JsonGenerator gen, final SerializerProvider serializerProvider) + throws IOException { + gen.writeString(value.asWellKnownText()); + } + } + + public static class LineStringGeometrySerializer + extends AbstractGeometryJacksonSerializer { + + private static final long serialVersionUID = 1L; + + LineStringGeometrySerializer() { + super(LineString.class); + } + } + + public static class LineStringGeometryDeserializer + extends AbstractGeometryJacksonDeserializer { + + private static final long serialVersionUID = 1L; + + LineStringGeometryDeserializer() { + super(DefaultLineString.class); + } + + @Override + public DefaultLineString parse(final String val) { + return (DefaultLineString) LineString.fromWellKnownText(val); + } + } + + public static class PolygonGeometrySerializer + extends AbstractGeometryJacksonSerializer { + + private static final long serialVersionUID = 1L; + + PolygonGeometrySerializer() { + super(Polygon.class); + } + } + + public static class PolygonGeometryDeserializer + extends AbstractGeometryJacksonDeserializer { + + private static final long serialVersionUID = 1L; + + PolygonGeometryDeserializer() { + super(DefaultPolygon.class); + } + + @Override + public DefaultPolygon parse(final String val) { + return (DefaultPolygon) Polygon.fromWellKnownText(val); + } + } + + public static class PointGeometrySerializer extends AbstractGeometryJacksonSerializer { + + private static final long serialVersionUID = 1L; + + PointGeometrySerializer() { + super(Point.class); + } + } + + public static class PointGeometryDeserializer + extends AbstractGeometryJacksonDeserializer { + + private static final long serialVersionUID = 1L; + + PointGeometryDeserializer() { + super(DefaultPoint.class); + } + + @Override + public DefaultPoint parse(final String val) { + return (DefaultPoint) Point.fromWellKnownText(val); + } + } + + public static class DistanceGeometrySerializer + extends AbstractGeometryJacksonSerializer { + + private static final long serialVersionUID = 1L; + + DistanceGeometrySerializer() { + super(Distance.class); + } + } + + public static class DistanceGeometryDeserializer + extends AbstractGeometryJacksonDeserializer { + + private static final long serialVersionUID = 1L; + + DistanceGeometryDeserializer() { + super(Distance.class); + } + + @Override + public Distance parse(final String val) { + return Distance.fromWellKnownText(val); + } + } + + @SuppressWarnings("rawtypes") + static final class DsePJacksonSerializer extends StdScalarSerializer

      { + + private static final long serialVersionUID = 1L; + + DsePJacksonSerializer() { + super(P.class); + } + + @Override + public void serialize( + final P p, final JsonGenerator jsonGenerator, final SerializerProvider serializerProvider) + throws IOException { + jsonGenerator.writeStartObject(); + jsonGenerator.writeStringField("predicateType", getPredicateType(p)); + jsonGenerator.writeStringField( + GraphSONTokens.PREDICATE, + p instanceof ConnectiveP + ? p instanceof AndP ? GraphSONTokens.AND : GraphSONTokens.OR + : p.getBiPredicate().toString()); + if (p instanceof ConnectiveP) { + jsonGenerator.writeArrayFieldStart(GraphSONTokens.VALUE); + for (final P predicate : ((ConnectiveP) p).getPredicates()) { + jsonGenerator.writeObject(predicate); + } + jsonGenerator.writeEndArray(); + } else { + if (p.getValue() instanceof Collection) { + jsonGenerator.writeArrayFieldStart(GraphSONTokens.VALUE); + for (final Object object : (Collection) p.getValue()) { + jsonGenerator.writeObject(object); + } + jsonGenerator.writeEndArray(); + } else { + jsonGenerator.writeObjectField(GraphSONTokens.VALUE, p.getValue()); + } + } + jsonGenerator.writeEndObject(); + } + + private String getPredicateType(P p) { + if (p.getBiPredicate() instanceof SearchPredicate) { + return Search.class.getSimpleName(); + } else if (p.getBiPredicate() instanceof GeoPredicate) { + return Geo.class.getSimpleName(); + } else { + return P.class.getSimpleName(); + } + } + } + + @SuppressWarnings({"unchecked", "rawtypes"}) + static final class DsePJacksonDeserializer extends AbstractObjectDeserializer

      { + + private static final long serialVersionUID = 1L; + + DsePJacksonDeserializer() { + super(P.class); + } + + @Override + public P createObject(final Map data) { + final String predicate = (String) data.get(GraphSONTokens.PREDICATE); + final String predicateType = (String) data.get("predicateType"); + final Object value = data.get(GraphSONTokens.VALUE); + if (predicate.equals(GraphSONTokens.AND) || predicate.equals(GraphSONTokens.OR)) { + return predicate.equals(GraphSONTokens.AND) + ? new AndP((List

      ) value) + : new OrP((List

      ) value); + } else { + try { + if (value instanceof Collection) { + if (predicate.equals("between")) { + return P.between(((List) value).get(0), ((List) value).get(1)); + } else if (predicateType.equals(P.class.getSimpleName()) + && predicate.equals("inside")) { + return P.between(((List) value).get(0), ((List) value).get(1)); + } else if (predicate.equals("outside")) { + return P.outside(((List) value).get(0), ((List) value).get(1)); + } else if (predicate.equals("within")) { + return P.within((Collection) value); + } else if (predicate.equals("without")) { + return P.without((Collection) value); + } else { + return (P) + P.class.getMethod(predicate, Collection.class).invoke(null, (Collection) value); + } + } else { + if (predicate.equals(SearchPredicate.prefix.name())) { + return Search.prefix((String) value); + } else if (predicate.equals(SearchPredicate.tokenPrefix.name())) { + return Search.tokenPrefix((String) value); + } else if (predicate.equals(SearchPredicate.regex.name())) { + return Search.regex((String) value); + } else if (predicate.equals(SearchPredicate.tokenRegex.name())) { + return Search.tokenRegex((String) value); + } else if (predicate.equals(SearchPredicate.token.name())) { + return Search.token((String) value); + } else if (predicate.equals(SearchPredicate.fuzzy.name())) { + Map arguments = (Map) value; + return Search.fuzzy( + (String) arguments.get("query"), (int) arguments.get("distance")); + } else if (predicate.equals(SearchPredicate.tokenFuzzy.name())) { + Map arguments = (Map) value; + return Search.tokenFuzzy( + (String) arguments.get("query"), (int) arguments.get("distance")); + } else if (predicate.equals(SearchPredicate.phrase.name())) { + Map arguments = (Map) value; + return Search.phrase( + (String) arguments.get("query"), (int) arguments.get("distance")); + } else if (predicateType.equals(Geo.class.getSimpleName()) + && predicate.equals(GeoPredicate.inside.name())) { + return Geo.inside( + ((Distance) value).getCenter(), + ((Distance) value).getRadius(), + Geo.Unit.DEGREES); + } else if (predicateType.equals(Geo.class.getSimpleName()) + && predicate.equals(GeoPredicate.insideCartesian.name())) { + return Geo.inside(((Distance) value).getCenter(), ((Distance) value).getRadius()); + } else { + return (P) P.class.getMethod(predicate, Object.class).invoke(null, value); + } + } + } catch (final Exception e) { + throw new IllegalStateException(e.getMessage(), e); + } + } + } + } + + public static class EditDistanceSerializer extends StdSerializer { + + private static final long serialVersionUID = 1L; + + EditDistanceSerializer() { + super(EditDistance.class); + } + + @Override + public void serialize( + EditDistance editDistance, JsonGenerator generator, SerializerProvider provider) + throws IOException { + generator.writeObject( + ImmutableMap.of("query", editDistance.query, "distance", editDistance.distance)); + } + + @Override + public void serializeWithType( + EditDistance editDistance, + JsonGenerator generator, + SerializerProvider provider, + TypeSerializer serializer) + throws IOException { + serialize(editDistance, generator, provider); + } + } + } + + public static class DriverObjectsModule extends SimpleModule { + + private static final long serialVersionUID = 1L; + + public DriverObjectsModule() { + super("datastax-driver-module"); + addSerializer(ObjectGraphNode.class, new ObjectGraphNodeGraphSON2Serializer()); + } + + static final class ObjectGraphNodeGraphSON2Serializer extends StdSerializer { + + private static final long serialVersionUID = 1L; + + protected ObjectGraphNodeGraphSON2Serializer() { + super(ObjectGraphNode.class); + } + + @Override + public void serialize( + ObjectGraphNode objectGraphNode, + JsonGenerator jsonGenerator, + SerializerProvider serializerProvider) + throws IOException { + jsonGenerator.writeObject(objectGraphNode.as(Object.class)); + } + + @Override + public void serializeWithType( + ObjectGraphNode objectGraphNode, + JsonGenerator jsonGenerator, + SerializerProvider serializerProvider, + TypeSerializer typeSerializer) + throws IOException { + serialize(objectGraphNode, jsonGenerator, serializerProvider); + } + } + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSON3SerdeTP.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSON3SerdeTP.java new file mode 100644 index 00000000000..9c29dd31eeb --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSON3SerdeTP.java @@ -0,0 +1,428 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph; + +import com.datastax.dse.driver.api.core.data.geometry.Geometry; +import com.datastax.dse.driver.api.core.data.geometry.LineString; +import com.datastax.dse.driver.api.core.data.geometry.Point; +import com.datastax.dse.driver.api.core.data.geometry.Polygon; +import com.datastax.dse.driver.api.core.graph.predicates.Geo; +import com.datastax.dse.driver.api.core.graph.predicates.Search; +import com.datastax.dse.driver.internal.core.data.geometry.DefaultLineString; +import com.datastax.dse.driver.internal.core.data.geometry.DefaultPoint; +import com.datastax.dse.driver.internal.core.data.geometry.DefaultPolygon; +import com.datastax.dse.driver.internal.core.data.geometry.Distance; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import java.io.IOException; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import org.apache.tinkerpop.gremlin.process.traversal.P; +import org.apache.tinkerpop.gremlin.process.traversal.util.AndP; +import org.apache.tinkerpop.gremlin.process.traversal.util.ConnectiveP; +import org.apache.tinkerpop.gremlin.process.traversal.util.OrP; +import org.apache.tinkerpop.gremlin.structure.io.graphson.AbstractObjectDeserializer; +import org.apache.tinkerpop.gremlin.structure.io.graphson.GraphSONTokens; +import org.apache.tinkerpop.gremlin.structure.io.graphson.TinkerPopJacksonModule; +import org.apache.tinkerpop.shaded.jackson.core.JsonGenerator; +import org.apache.tinkerpop.shaded.jackson.core.JsonParser; +import org.apache.tinkerpop.shaded.jackson.databind.DeserializationContext; +import org.apache.tinkerpop.shaded.jackson.databind.SerializerProvider; +import org.apache.tinkerpop.shaded.jackson.databind.deser.std.StdDeserializer; +import org.apache.tinkerpop.shaded.jackson.databind.jsontype.TypeSerializer; +import org.apache.tinkerpop.shaded.jackson.databind.module.SimpleModule; +import org.apache.tinkerpop.shaded.jackson.databind.ser.std.StdScalarSerializer; +import org.apache.tinkerpop.shaded.jackson.databind.ser.std.StdSerializer; + +public class GraphSON3SerdeTP { + /** + * A Jackson Module to use for TinkerPop serialization/deserialization. It extends {@link + * TinkerPopJacksonModule} because of the specific typing format used in GraphSON. + */ + public static class DseGraphModule extends TinkerPopJacksonModule { + + private static final long serialVersionUID = 1L; + + public DseGraphModule() { + super("dse-driver-2.0"); + addSerializer( + DefaultPoint.class, new GraphSON3SerdeTP.DseGraphModule.PointGeometrySerializer()); + addSerializer( + DefaultLineString.class, + new GraphSON3SerdeTP.DseGraphModule.LineStringGeometrySerializer()); + addSerializer( + DefaultPolygon.class, new GraphSON3SerdeTP.DseGraphModule.PolygonGeometrySerializer()); + addSerializer( + Distance.class, new GraphSON3SerdeTP.DseGraphModule.DistanceGeometrySerializer()); + // override TinkerPop's P predicates because of DSE's Search and Geo predicates + addSerializer(P.class, new GraphSON3SerdeTP.DseGraphModule.DsePJacksonSerializer()); + addSerializer( + EditDistance.class, new GraphSON3SerdeTP.DseGraphModule.EditDistanceSerializer()); + + addDeserializer( + DefaultLineString.class, + new GraphSON3SerdeTP.DseGraphModule.LineStringGeometryDeserializer()); + addDeserializer( + DefaultPoint.class, new GraphSON3SerdeTP.DseGraphModule.PointGeometryDeserializer()); + addDeserializer( + DefaultPolygon.class, new GraphSON3SerdeTP.DseGraphModule.PolygonGeometryDeserializer()); + addDeserializer( + Distance.class, new GraphSON3SerdeTP.DseGraphModule.DistanceGeometryDeserializer()); + // override TinkerPop's P predicates because of DSE's Search and Geo predicates + addDeserializer(P.class, new GraphSON3SerdeTP.DseGraphModule.DsePJacksonDeserializer()); + } + + @SuppressWarnings("rawtypes") + @Override + public Map getTypeDefinitions() { + Map definitions = new HashMap<>(); + definitions.put(DefaultLineString.class, "LineString"); + definitions.put(DefaultPoint.class, "Point"); + definitions.put(DefaultPolygon.class, "Polygon"); + definitions.put(byte[].class, "Blob"); + definitions.put(Distance.class, "Distance"); + definitions.put(P.class, "P"); + return definitions; + } + + @Override + public String getTypeNamespace() { + return "dse"; + } + + abstract static class AbstractGeometryJacksonDeserializer + extends StdDeserializer { + + private static final long serialVersionUID = 1L; + + AbstractGeometryJacksonDeserializer(final Class clazz) { + super(clazz); + } + + public abstract T parse(final String val); + + @Override + public T deserialize( + final JsonParser jsonParser, final DeserializationContext deserializationContext) + throws IOException { + return parse(jsonParser.getText()); + } + } + + abstract static class AbstractGeometryJacksonSerializer + extends StdScalarSerializer { + + private static final long serialVersionUID = 1L; + + AbstractGeometryJacksonSerializer(final Class clazz) { + super(clazz); + } + + @Override + public void serialize( + final T value, final JsonGenerator gen, final SerializerProvider serializerProvider) + throws IOException { + gen.writeString(value.asWellKnownText()); + } + } + + public static class LineStringGeometrySerializer + extends AbstractGeometryJacksonSerializer { + + private static final long serialVersionUID = 1L; + + LineStringGeometrySerializer() { + super(LineString.class); + } + } + + public static class LineStringGeometryDeserializer + extends AbstractGeometryJacksonDeserializer { + + private static final long serialVersionUID = 1L; + + LineStringGeometryDeserializer() { + super(DefaultLineString.class); + } + + @Override + public DefaultLineString parse(final String val) { + return (DefaultLineString) LineString.fromWellKnownText(val); + } + } + + public static class PolygonGeometrySerializer + extends AbstractGeometryJacksonSerializer { + + private static final long serialVersionUID = 1L; + + PolygonGeometrySerializer() { + super(Polygon.class); + } + } + + public static class PolygonGeometryDeserializer + extends AbstractGeometryJacksonDeserializer { + + private static final long serialVersionUID = 1L; + + PolygonGeometryDeserializer() { + super(DefaultPolygon.class); + } + + @Override + public DefaultPolygon parse(final String val) { + return (DefaultPolygon) Polygon.fromWellKnownText(val); + } + } + + public static class PointGeometrySerializer extends AbstractGeometryJacksonSerializer { + + private static final long serialVersionUID = 1L; + + PointGeometrySerializer() { + super(Point.class); + } + } + + public static class PointGeometryDeserializer + extends AbstractGeometryJacksonDeserializer { + + private static final long serialVersionUID = 1L; + + PointGeometryDeserializer() { + super(DefaultPoint.class); + } + + @Override + public DefaultPoint parse(final String val) { + return (DefaultPoint) Point.fromWellKnownText(val); + } + } + + public static class DistanceGeometrySerializer + extends AbstractGeometryJacksonSerializer { + + private static final long serialVersionUID = 1L; + + DistanceGeometrySerializer() { + super(Distance.class); + } + } + + public static class DistanceGeometryDeserializer + extends AbstractGeometryJacksonDeserializer { + + private static final long serialVersionUID = 1L; + + DistanceGeometryDeserializer() { + super(Distance.class); + } + + @Override + public Distance parse(final String val) { + return Distance.fromWellKnownText(val); + } + } + + @SuppressWarnings("rawtypes") + static final class DsePJacksonSerializer extends StdScalarSerializer

      { + + private static final long serialVersionUID = 1L; + + DsePJacksonSerializer() { + super(P.class); + } + + @Override + public void serialize( + final P p, final JsonGenerator jsonGenerator, final SerializerProvider serializerProvider) + throws IOException { + jsonGenerator.writeStartObject(); + jsonGenerator.writeStringField("predicateType", getPredicateType(p)); + jsonGenerator.writeStringField( + GraphSONTokens.PREDICATE, + p instanceof ConnectiveP + ? p instanceof AndP ? GraphSONTokens.AND : GraphSONTokens.OR + : p.getBiPredicate().toString()); + if (p instanceof ConnectiveP) { + jsonGenerator.writeArrayFieldStart(GraphSONTokens.VALUE); + for (final P predicate : ((ConnectiveP) p).getPredicates()) { + jsonGenerator.writeObject(predicate); + } + jsonGenerator.writeEndArray(); + } else { + if (p.getValue() instanceof Collection) { + jsonGenerator.writeArrayFieldStart(GraphSONTokens.VALUE); + for (final Object object : (Collection) p.getValue()) { + jsonGenerator.writeObject(object); + } + jsonGenerator.writeEndArray(); + } else { + jsonGenerator.writeObjectField(GraphSONTokens.VALUE, p.getValue()); + } + } + jsonGenerator.writeEndObject(); + } + + private String getPredicateType(P p) { + if (p.getBiPredicate() instanceof SearchPredicate) { + return Search.class.getSimpleName(); + } else if (p.getBiPredicate() instanceof GeoPredicate) { + return Geo.class.getSimpleName(); + } else { + return P.class.getSimpleName(); + } + } + } + + @SuppressWarnings({"unchecked", "rawtypes"}) + static final class DsePJacksonDeserializer extends AbstractObjectDeserializer

      { + + private static final long serialVersionUID = 1L; + + DsePJacksonDeserializer() { + super(P.class); + } + + @Override + public P createObject(final Map data) { + final String predicate = (String) data.get(GraphSONTokens.PREDICATE); + final String predicateType = (String) data.get("predicateType"); + final Object value = data.get(GraphSONTokens.VALUE); + if (predicate.equals(GraphSONTokens.AND) || predicate.equals(GraphSONTokens.OR)) { + return predicate.equals(GraphSONTokens.AND) + ? new AndP((List

      ) value) + : new OrP((List

      ) value); + } else { + try { + if (value instanceof Collection) { + if (predicate.equals("between")) { + return P.between(((List) value).get(0), ((List) value).get(1)); + } else if (predicateType.equals(P.class.getSimpleName()) + && predicate.equals("inside")) { + return P.between(((List) value).get(0), ((List) value).get(1)); + } else if (predicate.equals("outside")) { + return P.outside(((List) value).get(0), ((List) value).get(1)); + } else if (predicate.equals("within")) { + return P.within((Collection) value); + } else if (predicate.equals("without")) { + return P.without((Collection) value); + } else { + return (P) + P.class.getMethod(predicate, Collection.class).invoke(null, (Collection) value); + } + } else { + if (predicate.equals(SearchPredicate.prefix.name())) { + return Search.prefix((String) value); + } else if (predicate.equals(SearchPredicate.tokenPrefix.name())) { + return Search.tokenPrefix((String) value); + } else if (predicate.equals(SearchPredicate.regex.name())) { + return Search.regex((String) value); + } else if (predicate.equals(SearchPredicate.tokenRegex.name())) { + return Search.tokenRegex((String) value); + } else if (predicate.equals(SearchPredicate.token.name())) { + return Search.token((String) value); + } else if (predicate.equals(SearchPredicate.fuzzy.name())) { + Map arguments = (Map) value; + return Search.fuzzy( + (String) arguments.get("query"), (int) arguments.get("distance")); + } else if (predicate.equals(SearchPredicate.tokenFuzzy.name())) { + Map arguments = (Map) value; + return Search.tokenFuzzy( + (String) arguments.get("query"), (int) arguments.get("distance")); + } else if (predicate.equals(SearchPredicate.phrase.name())) { + Map arguments = (Map) value; + return Search.phrase( + (String) arguments.get("query"), (int) arguments.get("distance")); + } else if (predicateType.equals(Geo.class.getSimpleName()) + && predicate.equals(GeoPredicate.inside.name())) { + return Geo.inside( + ((Distance) value).getCenter(), + ((Distance) value).getRadius(), + Geo.Unit.DEGREES); + } else if (predicateType.equals(Geo.class.getSimpleName()) + && predicate.equals(GeoPredicate.insideCartesian.name())) { + return Geo.inside(((Distance) value).getCenter(), ((Distance) value).getRadius()); + } else { + return (P) P.class.getMethod(predicate, Object.class).invoke(null, value); + } + } + } catch (final Exception e) { + throw new IllegalStateException(e.getMessage(), e); + } + } + } + } + + public static class EditDistanceSerializer extends StdSerializer { + + private static final long serialVersionUID = 1L; + + EditDistanceSerializer() { + super(EditDistance.class); + } + + @Override + public void serialize( + EditDistance editDistance, JsonGenerator generator, SerializerProvider provider) + throws IOException { + generator.writeObject( + ImmutableMap.of("query", editDistance.query, "distance", editDistance.distance)); + } + + @Override + public void serializeWithType( + EditDistance editDistance, + JsonGenerator generator, + SerializerProvider provider, + TypeSerializer serializer) + throws IOException { + serialize(editDistance, generator, provider); + } + } + } + + public static class DriverObjectsModule extends SimpleModule { + + private static final long serialVersionUID = 1L; + + public DriverObjectsModule() { + super("datastax-driver-module"); + addSerializer(ObjectGraphNode.class, new ObjectGraphNodeGraphSON3Serializer()); + } + + static final class ObjectGraphNodeGraphSON3Serializer extends StdSerializer { + + private static final long serialVersionUID = 1L; + + protected ObjectGraphNodeGraphSON3Serializer() { + super(ObjectGraphNode.class); + } + + @Override + public void serialize( + ObjectGraphNode objectGraphNode, + JsonGenerator jsonGenerator, + SerializerProvider serializerProvider) + throws IOException { + jsonGenerator.writeObject(objectGraphNode.as(Object.class)); + } + + @Override + public void serializeWithType( + ObjectGraphNode objectGraphNode, + JsonGenerator jsonGenerator, + SerializerProvider serializerProvider, + TypeSerializer typeSerializer) + throws IOException { + serialize(objectGraphNode, jsonGenerator, serializerProvider); + } + } + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSONUtils.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSONUtils.java new file mode 100644 index 00000000000..12b74016b0d --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSONUtils.java @@ -0,0 +1,140 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph; + +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.dse.driver.api.core.graph.GraphNode; +import com.datastax.oss.driver.shaded.guava.common.base.Suppliers; +import com.datastax.oss.driver.shaded.guava.common.base.Throwables; +import com.datastax.oss.driver.shaded.guava.common.cache.CacheBuilder; +import com.datastax.oss.driver.shaded.guava.common.cache.CacheLoader; +import com.datastax.oss.driver.shaded.guava.common.cache.LoadingCache; +import com.datastax.oss.protocol.internal.util.Bytes; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.List; +import java.util.concurrent.ExecutionException; +import java.util.function.Supplier; +import org.apache.tinkerpop.gremlin.structure.io.graphson.GraphSONMapper; +import org.apache.tinkerpop.gremlin.structure.io.graphson.GraphSONReader; +import org.apache.tinkerpop.gremlin.structure.io.graphson.GraphSONVersion; +import org.apache.tinkerpop.gremlin.structure.io.graphson.GraphSONXModuleV2d0; +import org.apache.tinkerpop.gremlin.structure.io.graphson.GraphSONXModuleV3d0; +import org.apache.tinkerpop.gremlin.tinkergraph.structure.TinkerIoRegistryV2d0; +import org.apache.tinkerpop.gremlin.tinkergraph.structure.TinkerIoRegistryV3d0; +import org.apache.tinkerpop.shaded.jackson.core.Version; +import org.apache.tinkerpop.shaded.jackson.databind.ObjectMapper; + +class GraphSONUtils { + + public static final String GRAPHSON_1_0 = "graphson-1.0"; + public static final String GRAPHSON_2_0 = "graphson-2.0"; + public static final String GRAPHSON_3_0 = "graphson-3.0"; + private static final LoadingCache OBJECT_MAPPERS = + CacheBuilder.newBuilder() + .build( + new CacheLoader() { + @Override + public ObjectMapper load(@NonNull String graphSubProtocol) throws Exception { + switch (graphSubProtocol) { + case GRAPHSON_1_0: + com.datastax.oss.driver.api.core.Version driverVersion = + DseSession.DSE_DRIVER_COORDINATES.getVersion(); + Version driverJacksonVersion = + new Version( + driverVersion.getMajor(), + driverVersion.getMinor(), + driverVersion.getPatch(), + driverVersion.getPreReleaseLabels() != null + && driverVersion.getPreReleaseLabels().contains("SNAPSHOT") + ? "SNAPSHOT" + : null, + "com.datastax.dse", + "dse-java-driver-core"); + + ObjectMapper mapper = + GraphSONMapper.build() + .version(GraphSONVersion.V1_0) + .create() + .createMapper(); + mapper.registerModule( + new GraphSON1SerdeTP.GraphSON1DefaultModule( + "graph-graphson1default", driverJacksonVersion)); + mapper.registerModule( + new GraphSON1SerdeTP.GraphSON1JavaTimeModule( + "graph-graphson1javatime", driverJacksonVersion)); + + return mapper; + case GRAPHSON_2_0: + return GraphSONMapper.build() + .version(GraphSONVersion.V2_0) + .addCustomModule(GraphSONXModuleV2d0.build().create(false)) + .addRegistry(TinkerIoRegistryV2d0.instance()) + .addCustomModule(new GraphSON2SerdeTP.DseGraphModule()) + .addCustomModule(new GraphSON2SerdeTP.DriverObjectsModule()) + .create() + .createMapper(); + case GRAPHSON_3_0: + return GraphSONMapper.build() + .version(GraphSONVersion.V3_0) + .addCustomModule(GraphSONXModuleV3d0.build().create(false)) + .addRegistry(TinkerIoRegistryV3d0.instance()) + .addCustomModule(new GraphSON3SerdeTP.DseGraphModule()) + .addCustomModule(new GraphSON3SerdeTP.DriverObjectsModule()) + .create() + .createMapper(); + + default: + throw new IllegalStateException( + String.format("Unknown graph sub-protocol: {%s}", graphSubProtocol)); + } + } + }); + + static final Supplier GRAPHSON1_READER = + Suppliers.memoize( + () -> + GraphSONReader.build() + .mapper(GraphSONMapper.build().version(GraphSONVersion.V1_0).create()) + .create()); + + static ByteBuffer serializeToByteBuffer(Object object, String graphSubProtocol) + throws IOException { + return ByteBuffer.wrap(serializeToBytes(object, graphSubProtocol)); + } + + static byte[] serializeToBytes(Object object, String graphSubProtocol) throws IOException { + try { + return OBJECT_MAPPERS.get(graphSubProtocol).writeValueAsBytes(object); + } catch (ExecutionException e) { + Throwables.throwIfUnchecked(e); + throw new RuntimeException(e); + } + } + + static GraphNode createGraphNode(List data, String graphSubProtocol) + throws IOException { + try { + ObjectMapper mapper = OBJECT_MAPPERS.get(graphSubProtocol); + switch (graphSubProtocol) { + case GRAPHSON_1_0: + return new LegacyGraphNode(mapper.readTree(Bytes.getArray(data.get(0))), mapper); + case GRAPHSON_2_0: + case GRAPHSON_3_0: + return new ObjectGraphNode(mapper.readValue(Bytes.getArray(data.get(0)), Object.class)); + default: + // Should already be caught when we lookup in the cache + throw new AssertionError( + String.format("Unknown graph sub-protocol: {%s}", graphSubProtocol)); + } + } catch (ExecutionException e) { + Throwables.throwIfUnchecked(e); + throw new RuntimeException(e); + } + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphStatementBase.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphStatementBase.java new file mode 100644 index 00000000000..645fa8f2d20 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphStatementBase.java @@ -0,0 +1,402 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph; + +import com.datastax.dse.driver.api.core.graph.GraphStatement; +import com.datastax.oss.driver.api.core.ConsistencyLevel; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.metadata.Node; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.nio.ByteBuffer; +import java.time.Duration; +import java.util.Map; +import net.jcip.annotations.Immutable; + +@Immutable +public abstract class GraphStatementBase> + implements GraphStatement { + private final Boolean isIdempotent; + private final Duration timeout; + private final Node node; + private final long timestamp; + private final DriverExecutionProfile executionProfile; + private final String executionProfileName; + private final Map customPayload; + private final String graphName; + private final String traversalSource; + private final String subProtocol; + private final ConsistencyLevel consistencyLevel; + private final ConsistencyLevel readConsistencyLevel; + private final ConsistencyLevel writeConsistencyLevel; + + protected GraphStatementBase( + Boolean isIdempotent, + Duration timeout, + Node node, + long timestamp, + DriverExecutionProfile executionProfile, + String executionProfileName, + Map customPayload, + String graphName, + String traversalSource, + String subProtocol, + ConsistencyLevel consistencyLevel, + ConsistencyLevel readConsistencyLevel, + ConsistencyLevel writeConsistencyLevel) { + this.isIdempotent = isIdempotent; + this.timeout = timeout; + this.node = node; + this.timestamp = timestamp; + this.executionProfile = executionProfile; + this.executionProfileName = executionProfileName; + this.customPayload = customPayload; + this.graphName = graphName; + this.traversalSource = traversalSource; + this.subProtocol = subProtocol; + this.consistencyLevel = consistencyLevel; + this.readConsistencyLevel = readConsistencyLevel; + this.writeConsistencyLevel = writeConsistencyLevel; + } + + protected abstract SelfT newInstance( + Boolean isIdempotent, + Duration timeout, + Node node, + long timestamp, + DriverExecutionProfile executionProfile, + String executionProfileName, + Map customPayload, + String graphName, + String traversalSource, + String subProtocol, + ConsistencyLevel consistencyLevel, + ConsistencyLevel readConsistencyLevel, + ConsistencyLevel writeConsistencyLevel); + + @Override + public Boolean isIdempotent() { + return isIdempotent; + } + + @NonNull + @Override + public SelfT setIdempotent(@Nullable Boolean newIdempotence) { + return newInstance( + newIdempotence, + timeout, + node, + timestamp, + executionProfile, + executionProfileName, + customPayload, + graphName, + traversalSource, + subProtocol, + consistencyLevel, + readConsistencyLevel, + writeConsistencyLevel); + } + + @Nullable + @Override + public Duration getTimeout() { + return timeout; + } + + @NonNull + @Override + public SelfT setTimeout(@Nullable Duration newTimeout) { + return newInstance( + isIdempotent, + newTimeout, + node, + timestamp, + executionProfile, + executionProfileName, + customPayload, + graphName, + traversalSource, + subProtocol, + consistencyLevel, + readConsistencyLevel, + writeConsistencyLevel); + } + + @Nullable + @Override + public Node getNode() { + return node; + } + + @NonNull + @Override + public SelfT setNode(@Nullable Node newNode) { + return newInstance( + isIdempotent, + timeout, + newNode, + timestamp, + executionProfile, + executionProfileName, + customPayload, + graphName, + traversalSource, + subProtocol, + consistencyLevel, + readConsistencyLevel, + writeConsistencyLevel); + } + + @Override + public long getTimestamp() { + return this.timestamp; + } + + @NonNull + @Override + public SelfT setTimestamp(long newTimestamp) { + return newInstance( + isIdempotent, + timeout, + node, + newTimestamp, + executionProfile, + executionProfileName, + customPayload, + graphName, + traversalSource, + subProtocol, + consistencyLevel, + readConsistencyLevel, + writeConsistencyLevel); + } + + @Nullable + @Override + public DriverExecutionProfile getExecutionProfile() { + return executionProfile; + } + + @NonNull + @Override + public SelfT setExecutionProfile(@Nullable DriverExecutionProfile newExecutionProfile) { + return newInstance( + isIdempotent, + timeout, + node, + timestamp, + newExecutionProfile, + executionProfileName, + customPayload, + graphName, + traversalSource, + subProtocol, + consistencyLevel, + readConsistencyLevel, + writeConsistencyLevel); + } + + @Nullable + @Override + public String getExecutionProfileName() { + return executionProfileName; + } + + @NonNull + @Override + public SelfT setExecutionProfileName(@Nullable String newExecutionProfileName) { + return newInstance( + isIdempotent, + timeout, + node, + timestamp, + executionProfile, + newExecutionProfileName, + customPayload, + graphName, + traversalSource, + subProtocol, + consistencyLevel, + readConsistencyLevel, + writeConsistencyLevel); + } + + @NonNull + @Override + public Map getCustomPayload() { + return customPayload; + } + + @NonNull + @Override + public SelfT setCustomPayload(@NonNull Map newCustomPayload) { + return newInstance( + isIdempotent, + timeout, + node, + timestamp, + executionProfile, + executionProfileName, + newCustomPayload, + graphName, + traversalSource, + subProtocol, + consistencyLevel, + readConsistencyLevel, + writeConsistencyLevel); + } + + @Nullable + @Override + public String getGraphName() { + return graphName; + } + + @NonNull + @Override + public SelfT setGraphName(@Nullable String newGraphName) { + return newInstance( + isIdempotent, + timeout, + node, + timestamp, + executionProfile, + executionProfileName, + customPayload, + newGraphName, + traversalSource, + subProtocol, + consistencyLevel, + readConsistencyLevel, + writeConsistencyLevel); + } + + @Nullable + @Override + public String getTraversalSource() { + return traversalSource; + } + + @NonNull + @Override + public SelfT setTraversalSource(@Nullable String newTraversalSource) { + return newInstance( + isIdempotent, + timeout, + node, + timestamp, + executionProfile, + executionProfileName, + customPayload, + graphName, + newTraversalSource, + subProtocol, + consistencyLevel, + readConsistencyLevel, + writeConsistencyLevel); + } + + @Nullable + @Override + public String getSubProtocol() { + return subProtocol; + } + + @NonNull + @Override + public SelfT setSubProtocol(@Nullable String newSubProtocol) { + return newInstance( + isIdempotent, + timeout, + node, + timestamp, + executionProfile, + executionProfileName, + customPayload, + graphName, + traversalSource, + newSubProtocol, + consistencyLevel, + readConsistencyLevel, + writeConsistencyLevel); + } + + @Nullable + @Override + public ConsistencyLevel getConsistencyLevel() { + return consistencyLevel; + } + + @Override + public SelfT setConsistencyLevel(@Nullable ConsistencyLevel newConsistencyLevel) { + return newInstance( + isIdempotent, + timeout, + node, + timestamp, + executionProfile, + executionProfileName, + customPayload, + graphName, + traversalSource, + subProtocol, + newConsistencyLevel, + readConsistencyLevel, + writeConsistencyLevel); + } + + @Nullable + @Override + public ConsistencyLevel getReadConsistencyLevel() { + return readConsistencyLevel; + } + + @NonNull + @Override + public SelfT setReadConsistencyLevel(@Nullable ConsistencyLevel newReadConsistencyLevel) { + return newInstance( + isIdempotent, + timeout, + node, + timestamp, + executionProfile, + executionProfileName, + customPayload, + graphName, + traversalSource, + subProtocol, + consistencyLevel, + newReadConsistencyLevel, + writeConsistencyLevel); + } + + @Nullable + @Override + public ConsistencyLevel getWriteConsistencyLevel() { + return writeConsistencyLevel; + } + + @NonNull + @Override + public SelfT setWriteConsistencyLevel(@Nullable ConsistencyLevel newWriteConsistencyLevel) { + return newInstance( + isIdempotent, + timeout, + node, + timestamp, + executionProfile, + executionProfileName, + customPayload, + graphName, + traversalSource, + subProtocol, + consistencyLevel, + readConsistencyLevel, + newWriteConsistencyLevel); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/LegacyGraphNode.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/LegacyGraphNode.java new file mode 100644 index 00000000000..f5f61d9072f --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/LegacyGraphNode.java @@ -0,0 +1,312 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph; + +import com.datastax.dse.driver.api.core.graph.GraphNode; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import com.datastax.oss.driver.shaded.guava.common.base.Objects; +import com.datastax.oss.driver.shaded.guava.common.base.Preconditions; +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.UncheckedIOException; +import java.nio.charset.StandardCharsets; +import java.util.List; +import java.util.Map; +import java.util.Set; +import net.jcip.annotations.Immutable; +import org.apache.tinkerpop.gremlin.process.traversal.Path; +import org.apache.tinkerpop.gremlin.structure.Edge; +import org.apache.tinkerpop.gremlin.structure.Property; +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.apache.tinkerpop.gremlin.structure.VertexProperty; +import org.apache.tinkerpop.gremlin.structure.io.graphson.GraphSONTokens; +import org.apache.tinkerpop.gremlin.structure.util.Attachable; +import org.apache.tinkerpop.shaded.jackson.core.JsonParser; +import org.apache.tinkerpop.shaded.jackson.databind.JavaType; +import org.apache.tinkerpop.shaded.jackson.databind.JsonNode; +import org.apache.tinkerpop.shaded.jackson.databind.ObjectMapper; + +/** + * Legacy implementation for GraphSON 1 results. + * + *

      The server returns plain JSON with no type information. The driver works with the JSON + * representation directly. + */ +@Immutable +public class LegacyGraphNode implements GraphNode { + private static final String TYPE = "type"; + private static final String VERTEX_TYPE = "vertex"; + private static final String EDGE_TYPE = "edge"; + + private static final GenericType> LIST_TYPE = GenericType.listOf(Object.class); + private static final GenericType> MAP_TYPE = + GenericType.mapOf(String.class, Object.class); + + private final JsonNode delegate; + private final ObjectMapper objectMapper; + + public LegacyGraphNode(JsonNode delegate, ObjectMapper objectMapper) { + Preconditions.checkNotNull(delegate); + Preconditions.checkNotNull(objectMapper); + this.delegate = delegate; + this.objectMapper = objectMapper; + } + + /** + * The underlying JSON representation. + * + *

      This is an implementation detail, it's only exposed through the internal API. + */ + public JsonNode getDelegate() { + return delegate; + } + + /** + * The object mapper used to deserialize results in {@link #as(Class)} and {@link + * #as(GenericType)}. + * + *

      This is an implementation detail, it's only exposed through the internal API. + */ + public ObjectMapper getObjectMapper() { + return objectMapper; + } + + @Override + public boolean isNull() { + return delegate.isNull(); + } + + @Override + public boolean isMap() { + return delegate.isObject(); + } + + @Override + public Iterable keys() { + return (Iterable) delegate::fieldNames; + } + + @Override + public LegacyGraphNode getByKey(Object key) { + if (!(key instanceof String)) { + return null; + } + JsonNode node = delegate.get(((String) key)); + if (node == null) { + return null; + } + return new LegacyGraphNode(node, objectMapper); + } + + @Override + @SuppressWarnings("unchecked") + public Map asMap() { + return (Map) as(MAP_TYPE); + } + + @Override + public boolean isList() { + return delegate.isArray(); + } + + @Override + public int size() { + return delegate.size(); + } + + @Override + public LegacyGraphNode getByIndex(int index) { + JsonNode node = delegate.get(index); + if (node == null) { + return null; + } + return new LegacyGraphNode(node, objectMapper); + } + + @Override + @SuppressWarnings("unchecked") + public List asList() { + return (List) as(LIST_TYPE); + } + + @Override + public boolean isValue() { + return delegate.isValueNode(); + } + + @Override + public int asInt() { + return delegate.asInt(); + } + + @Override + public boolean asBoolean() { + return delegate.asBoolean(); + } + + @Override + public long asLong() { + return delegate.asLong(); + } + + @Override + public double asDouble() { + return delegate.asDouble(); + } + + @Override + public String asString() { + return delegate.asText(); + } + + @Override + public boolean isVertex() { + return isType(VERTEX_TYPE); + } + + @Override + public Vertex asVertex() { + try { + return GraphSONUtils.GRAPHSON1_READER + .get() + .readVertex( + new ByteArrayInputStream(delegate.toString().getBytes(StandardCharsets.UTF_8)), + null, + null, + null); + } catch (IOException e) { + throw new UncheckedIOException("Could not deserialize node as Vertex.", e); + } + } + + @Override + public boolean isEdge() { + return isType(EDGE_TYPE); + } + + @Override + public Edge asEdge() { + try { + return GraphSONUtils.GRAPHSON1_READER + .get() + .readEdge( + new ByteArrayInputStream(delegate.toString().getBytes(StandardCharsets.UTF_8)), + Attachable::get); + } catch (IOException e) { + throw new UncheckedIOException("Could not deserialize node as Edge.", e); + } + } + + @Override + public boolean isPath() { + return false; + } + + @Override + public Path asPath() { + throw new UnsupportedOperationException( + "GraphSON1 does not support Path, use another Graph sub-protocol such as GraphSON2."); + } + + @Override + public boolean isProperty() { + return delegate.has(GraphSONTokens.KEY) && delegate.has(GraphSONTokens.VALUE); + } + + @Override + @SuppressWarnings("unchecked") + public Property asProperty() { + try { + return GraphSONUtils.GRAPHSON1_READER + .get() + .readProperty( + new ByteArrayInputStream(delegate.toString().getBytes(StandardCharsets.UTF_8)), + Attachable::get); + } catch (IOException e) { + throw new UncheckedIOException("Could not deserialize node as Property.", e); + } + } + + @Override + public boolean isVertexProperty() { + return delegate.has(GraphSONTokens.ID) + && delegate.has(GraphSONTokens.VALUE) + && delegate.has(GraphSONTokens.LABEL); + } + + @Override + @SuppressWarnings("unchecked") + public VertexProperty asVertexProperty() { + try { + return GraphSONUtils.GRAPHSON1_READER + .get() + .readVertexProperty( + new ByteArrayInputStream(delegate.toString().getBytes(StandardCharsets.UTF_8)), + Attachable::get); + } catch (IOException e) { + throw new UncheckedIOException("Could not deserialize node as VertexProperty.", e); + } + } + + @Override + public boolean isSet() { + return false; + } + + @Override + public Set asSet() { + throw new UnsupportedOperationException( + "GraphSON1 does not support Set, use another Graph sub-protocol such as GraphSON2."); + } + + @Override + public ResultT as(Class clazz) { + try { + return objectMapper.treeToValue(delegate, clazz); + } catch (IOException e) { + throw new UncheckedIOException("Could not deserialize node as: " + clazz, e); + } + } + + @Override + public ResultT as(GenericType type) { + try { + JsonParser parser = objectMapper.treeAsTokens(delegate); + JavaType javaType = objectMapper.constructType(type.__getToken().getType()); + return objectMapper.readValue(parser, javaType); + } catch (IOException e) { + throw new UncheckedIOException("Could not deserialize node as: " + type, e); + } + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (!(o instanceof LegacyGraphNode)) { + return false; + } + LegacyGraphNode that = (LegacyGraphNode) o; + return Objects.equal(delegate, that.delegate); + } + + @Override + public int hashCode() { + return Objects.hashCode(delegate); + } + + @Override + public String toString() { + return delegate.toString(); + } + + private boolean isType(String expectedTypeName) { + JsonNode type = delegate.get(TYPE); + return type != null && expectedTypeName.equals(type.asText()); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ObjectGraphNode.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ObjectGraphNode.java new file mode 100644 index 00000000000..92e1fc17f0c --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ObjectGraphNode.java @@ -0,0 +1,233 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph; + +import com.datastax.dse.driver.api.core.graph.GraphNode; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import com.datastax.oss.driver.shaded.guava.common.base.Objects; +import java.util.List; +import java.util.Map; +import java.util.Set; +import net.jcip.annotations.Immutable; +import org.apache.tinkerpop.gremlin.process.traversal.Path; +import org.apache.tinkerpop.gremlin.structure.Edge; +import org.apache.tinkerpop.gremlin.structure.Property; +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.apache.tinkerpop.gremlin.structure.VertexProperty; + +/** + * Modern implementation for GraphSON 2+ results. + * + *

      The server returns results with type information. The driver works with the decoded objects + * directly. + */ +@Immutable +public class ObjectGraphNode implements GraphNode { + + private final Object delegate; + + public ObjectGraphNode(Object delegate) { + this.delegate = delegate; + } + + @Override + public boolean isNull() { + return delegate == null; + } + + @Override + public boolean isMap() { + return delegate instanceof Map; + } + + @Override + public Iterable keys() { + return ((Map) delegate).keySet(); + } + + @Override + public GraphNode getByKey(Object key) { + if (!isMap()) { + return null; + } + Map map = asMap(); + if (map.containsKey(key)) { + return new ObjectGraphNode(map.get(key)); + } + return null; + } + + @Override + @SuppressWarnings("unchecked") + public Map asMap() { + return (Map) delegate; + } + + @Override + public boolean isList() { + return delegate instanceof List; + } + + @Override + public int size() { + if (isList()) { + return asList().size(); + } else if (isMap()) { + return asMap().size(); + } else if (isSet()) { + return asSet().size(); + } else { + return 0; + } + } + + @Override + public GraphNode getByIndex(int index) { + if (!isList() || index < 0 || index >= size()) { + return null; + } + return new ObjectGraphNode(asList().get(index)); + } + + @Override + @SuppressWarnings("unchecked") + public List asList() { + return (List) delegate; + } + + @Override + public boolean isValue() { + return !(isList() + || isMap() + || isSet() + || isVertex() + || isEdge() + || isPath() + || isProperty() + || isVertexProperty()); + } + + @Override + public boolean isVertexProperty() { + return delegate instanceof VertexProperty; + } + + @Override + public boolean isProperty() { + return delegate instanceof Property; + } + + @Override + public boolean isPath() { + return delegate instanceof Path; + } + + @Override + public int asInt() { + return (Integer) delegate; + } + + @Override + public boolean asBoolean() { + return (Boolean) delegate; + } + + @Override + public long asLong() { + return (Long) delegate; + } + + @Override + public double asDouble() { + return (Double) delegate; + } + + @Override + public String asString() { + return (String) delegate; + } + + @Override + @SuppressWarnings("unchecked") + public T as(Class clazz) { + return (T) delegate; + } + + @Override + @SuppressWarnings("unchecked") + public T as(GenericType type) { + return (T) delegate; + } + + @Override + public boolean isVertex() { + return delegate instanceof Vertex; + } + + @Override + public Vertex asVertex() { + return (Vertex) delegate; + } + + @Override + public boolean isEdge() { + return delegate instanceof Edge; + } + + @Override + public Edge asEdge() { + return (Edge) delegate; + } + + @Override + public Path asPath() { + return (Path) delegate; + } + + @Override + @SuppressWarnings("unchecked") + public Property asProperty() { + return (Property) delegate; + } + + @Override + @SuppressWarnings("unchecked") + public VertexProperty asVertexProperty() { + return (VertexProperty) delegate; + } + + @Override + public boolean isSet() { + return delegate instanceof Set; + } + + @Override + @SuppressWarnings("unchecked") + public Set asSet() { + return (Set) delegate; + } + + @Override + public String toString() { + return this.delegate.toString(); + } + + @Override + public boolean equals(Object other) { + if (this == other) { + return true; + } + // Compare each others' delegates. + return other instanceof ObjectGraphNode + && Objects.equal(this.delegate, ((ObjectGraphNode) other).delegate); + } + + @Override + public int hashCode() { + return Objects.hashCode(delegate); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/SearchPredicate.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/SearchPredicate.java new file mode 100644 index 00000000000..304a1e09a2a --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/SearchPredicate.java @@ -0,0 +1,290 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph; + +import com.datastax.oss.driver.shaded.guava.common.collect.Sets; +import java.util.List; +import java.util.Set; +import java.util.regex.Pattern; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +/** + * List of predicates for geolocation usage with DseGraph and Search indexes. Should not be accessed + * directly but through the {@link com.datastax.dse.driver.api.core.graph.predicates.Search} static + * methods. + */ +public enum SearchPredicate implements DsePredicate { + /** Whether the text contains a given term as a token in the text (case insensitive). */ + token { + @Override + public boolean test(Object value, Object condition) { + preEvaluate(condition); + return value != null && evaluate(value.toString(), (String) condition); + } + + boolean evaluate(String value, String terms) { + Set tokens = Sets.newHashSet(tokenize(value.toLowerCase())); + terms = terms.trim(); + List tokenTerms = tokenize(terms.toLowerCase()); + if (!terms.isEmpty() && tokenTerms.isEmpty()) { + return false; + } + for (String term : tokenTerms) { + if (!tokens.contains(term)) { + return false; + } + } + return true; + } + + @Override + public boolean isValidCondition(Object condition) { + return condition != null && isNotBlank((String) condition); + } + + @Override + public String toString() { + return "token"; + } + }, + + /** Whether the text contains a token that starts with a given term (case insensitive). */ + tokenPrefix { + @Override + public boolean test(Object value, Object condition) { + preEvaluate(condition); + return value != null && evaluate(value.toString(), (String) condition); + } + + boolean evaluate(String value, String prefix) { + for (String token : tokenize(value.toLowerCase())) { + if (token.startsWith(prefix.toLowerCase().trim())) { + return true; + } + } + return false; + } + + @Override + public boolean isValidCondition(Object condition) { + return condition != null; + } + + @Override + public String toString() { + return "tokenPrefix"; + } + }, + + /** Whether the text contains a token that matches a regular expression (case insensitive). */ + tokenRegex { + @Override + public boolean test(Object value, Object condition) { + preEvaluate(condition); + return value != null && evaluate(value.toString(), (String) condition); + } + + boolean evaluate(String value, String regex) { + Pattern compiled = Pattern.compile(regex, Pattern.CASE_INSENSITIVE); + for (String token : tokenize(value.toLowerCase())) { + if (compiled.matcher(token).matches()) { + return true; + } + } + return false; + } + + @Override + public boolean isValidCondition(Object condition) { + return condition != null && isNotBlank((String) condition); + } + + @Override + public String toString() { + return "tokenRegex"; + } + }, + + /** + * Whether some token in the text is within a given edit distance from the given term (case + * insensitive). + */ + tokenFuzzy { + @Override + public boolean test(Object value, Object condition) { + preEvaluate(condition); + if (value == null) { + return false; + } + + EditDistance fuzzyCondition = (EditDistance) condition; + + for (String token : tokenize(value.toString().toLowerCase())) { + if (SearchUtils.getOptimalStringAlignmentDistance(token, fuzzyCondition.query.toLowerCase()) + <= fuzzyCondition.distance) { + return true; + } + } + + return false; + } + + @Override + public boolean isValidCondition(Object condition) { + return condition != null; + } + + @Override + public String toString() { + return "tokenFuzzy"; + } + }, + + /** Whether the text starts with a given prefix (case sensitive). */ + prefix { + @Override + public boolean test(Object value, Object condition) { + preEvaluate(condition); + return value != null && value.toString().startsWith(((String) condition).trim()); + } + + @Override + public boolean isValidCondition(Object condition) { + return condition != null; + } + + @Override + public String toString() { + return "prefix"; + } + }, + + /** Whether the text matches a regular expression (case sensitive). */ + regex { + @Override + public boolean test(Object value, Object condition) { + preEvaluate(condition); + return value != null + && Pattern.compile((String) condition, Pattern.DOTALL) + .matcher(value.toString()) + .matches(); + } + + @Override + public boolean isValidCondition(Object condition) { + return condition != null && isNotBlank((String) condition); + } + + @Override + public String toString() { + return "regex"; + } + }, + + /** Whether the text is within a given edit distance from the given term (case sensitive). */ + fuzzy { + @Override + public boolean test(Object value, Object condition) { + preEvaluate(condition); + if (value == null) { + return false; + } + EditDistance fuzzyCondition = (EditDistance) condition; + return SearchUtils.getOptimalStringAlignmentDistance(value.toString(), fuzzyCondition.query) + <= fuzzyCondition.distance; + } + + @Override + public boolean isValidCondition(Object condition) { + return condition != null; + } + + @Override + public String toString() { + return "fuzzy"; + } + }, + + /** + * Whether tokenized text contains a given phrase, optionally within a given proximity (case + * insensitive). + */ + phrase { + @Override + public boolean test(Object value, Object condition) { + preEvaluate(condition); + if (value == null) { + return false; + } + + EditDistance phraseCondition = (EditDistance) condition; + + List valueTokens = tokenize(value.toString().toLowerCase()); + List phraseTokens = tokenize(phraseCondition.query.toLowerCase()); + + int valuePosition = 0; + int phrasePosition = 0; + int distance = 0; + + // Look for matches while phrase/value tokens and distance budget remain + while (phrasePosition < phraseTokens.size() + && valuePosition < valueTokens.size() + && distance <= phraseCondition.distance) { + + if (phraseTokens.get(phrasePosition).equals(valueTokens.get(valuePosition))) { + // Early return-true when we've matched the whole phrase (within the specified distance) + if (phrasePosition == phraseTokens.size() - 1) { + return true; + } + phrasePosition++; + } else if (0 < phrasePosition) { + // We've previously found at least one matching token in the input string, + // but the current token does not match the phrase. Increment distance. + distance++; + } + + valuePosition++; + } + + return false; + } + + @Override + public boolean isValidCondition(Object condition) { + return condition != null; + } + + @Override + public String toString() { + return "phrase"; + } + }; + + private static boolean isNotBlank(String str) { + if (str == null || str.isEmpty()) { + return false; + } + int strLen = str.length(); + for (int i = 0; i < strLen; i++) { + if (!Character.isWhitespace(str.charAt(i))) { + return true; + } + } + return false; + } + + // Match anything that is not either: + // 1) a unicode letter, regardless of subcategory (same as Character.isLetter), or + // 2) a unicode decimal digit number (same as Character.isDigit) + private static final Pattern TOKEN_SPLIT_PATTERN = Pattern.compile("[^\\p{L}\\p{Nd}]"); + + static List tokenize(String str) { + String[] rawTokens = TOKEN_SPLIT_PATTERN.split(str); // could contain empty strings + return Stream.of(rawTokens).filter(t -> 0 < t.length()).collect(Collectors.toList()); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/SearchUtils.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/SearchUtils.java new file mode 100644 index 00000000000..36db6be0db3 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/SearchUtils.java @@ -0,0 +1,132 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph; + +public class SearchUtils { + + /** + * Finds the Optimal + * string alignment distance – also referred to as the Damerau-Levenshtein distance – between + * two strings. + * + *

      This is the number of changes needed to change one string into another (insertions, + * deletions or substitutions of a single character, or transpositions of two adjacent + * characters). + * + *

      This implementation is based on the Apache Commons Lang implementation of the Levenshtein + * distance, only adding support for transpositions. + * + *

      Note that this is the distance used in Lucene for {@code FuzzyTermsEnum}. Lucene itself has + * an implementation of this algorithm, but it is much less efficient in terms of space (also note + * that Lucene's implementation does not return the distance, but a similarity score based on it). + * + * @param s the first string, must not be {@code null}. + * @param t the second string, must not be {@code null}. + * @return The Optimal string alignment distance between the two strings. + * @throws IllegalArgumentException if either String input is {@code null}. + * @see org.apache.commons.lang.StringUtils#getLevenshteinDistance(String, String) + * @see + * LuceneLevenshteinDistance + */ + public static int getOptimalStringAlignmentDistance(String s, String t) { + + /* + * Code adapted from https://github.com/apache/commons-lang/blob/LANG_2_6/src/main/java/org/apache/commons/lang/StringUtils.java + * which was originally released under the Apache 2.0 license with the following copyright: + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + if (s == null || t == null) { + throw new IllegalArgumentException("Strings must not be null"); + } + + int n = s.length(); // length of s + int m = t.length(); // length of t + + if (n == 0) { + return m; + } else if (m == 0) { + return n; + } + + if (n > m) { + // swap the input strings to consume less memory + String tmp = s; + s = t; + t = tmp; + n = m; + m = t.length(); + } + + // instead of maintaining the full matrix in memory, + // we use a sliding window containing 3 lines: + // the current line being written to, and + // the two previous ones. + + int d[] = new int[n + 1]; // current line in the cost matrix + int p1[] = new int[n + 1]; // first line above the current one in the cost matrix + int p2[] = new int[n + 1]; // second line above the current one in the cost matrix + int _d[]; // placeholder to assist in swapping p1, p2 and d + + // indexes into strings s and t + int i; // iterates through s + int j; // iterates through t + + for (i = 0; i <= n; i++) { + p1[i] = i; + } + + for (j = 1; j <= m; j++) { + + // jth character of t + char t_j = t.charAt(j - 1); + d[0] = j; + + for (i = 1; i <= n; i++) { + + char s_i = s.charAt(i - 1); + int cost = s_i == t_j ? 0 : 1; + + int deletion = d[i - 1] + 1; // cell to the left + 1 + int insertion = p1[i] + 1; // cell to the top + 1 + int substitution = p1[i - 1] + cost; // cell diagonally left and up + cost + + d[i] = Math.min(Math.min(deletion, insertion), substitution); + + // transposition + if (i > 1 && j > 1 && s_i == t.charAt(j - 2) && s.charAt(i - 2) == t_j) { + d[i] = Math.min(d[i], p2[i - 2] + cost); + } + } + + // swap arrays + _d = p2; + p2 = p1; + p1 = d; + d = _d; + } + + // our last action in the above loop was to switch d and p1, so p1 now + // actually has the most recent cost counts + return p1[n]; + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/SinglePageGraphResultSet.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/SinglePageGraphResultSet.java new file mode 100644 index 00000000000..2d5599e5351 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/SinglePageGraphResultSet.java @@ -0,0 +1,43 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph; + +import com.datastax.dse.driver.api.core.graph.AsyncGraphResultSet; +import com.datastax.dse.driver.api.core.graph.GraphExecutionInfo; +import com.datastax.dse.driver.api.core.graph.GraphNode; +import com.datastax.dse.driver.api.core.graph.GraphResultSet; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Iterator; +import net.jcip.annotations.NotThreadSafe; + +@NotThreadSafe +public class SinglePageGraphResultSet implements GraphResultSet { + + private final AsyncGraphResultSet onlyPage; + + public SinglePageGraphResultSet(AsyncGraphResultSet onlyPage) { + this.onlyPage = onlyPage; + assert !onlyPage.hasMorePages(); + } + + @NonNull + @Override + public GraphExecutionInfo getExecutionInfo() { + return onlyPage.getExecutionInfo(); + } + + @NonNull + @Override + public Iterator iterator() { + return onlyPage.currentPage().iterator(); + } + + @Override + public void cancel() { + // nothing to do + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/AddressFormatter.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/AddressFormatter.java new file mode 100644 index 00000000000..abeb30ce85f --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/AddressFormatter.java @@ -0,0 +1,43 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.insights; + +import java.net.InetAddress; +import java.net.InetSocketAddress; + +class AddressFormatter { + + static String nullSafeToString(Object address) { + if (address instanceof InetAddress) { + return nullSafeToString((InetAddress) address); + } else if (address instanceof InetSocketAddress) { + return nullSafeToString((InetSocketAddress) address); + } else if (address instanceof String) { + return address.toString(); + } else { + return ""; + } + } + + static String nullSafeToString(InetAddress inetAddress) { + return inetAddress != null ? inetAddress.getHostAddress() : null; + } + + static String nullSafeToString(InetSocketAddress inetSocketAddress) { + if (inetSocketAddress != null) { + if (inetSocketAddress.isUnresolved()) { + return String.format( + "%s:%s", + nullSafeToString(inetSocketAddress.getHostName()), inetSocketAddress.getPort()); + } else { + return String.format( + "%s:%s", nullSafeToString(inetSocketAddress.getAddress()), inetSocketAddress.getPort()); + } + } + return null; + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/ConfigAntiPatternsFinder.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/ConfigAntiPatternsFinder.java new file mode 100644 index 00000000000..034b87dd1c6 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/ConfigAntiPatternsFinder.java @@ -0,0 +1,35 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.insights; + +import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.SSL_ENGINE_FACTORY_CLASS; +import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.SSL_HOSTNAME_VALIDATION; + +import com.datastax.dse.driver.internal.core.context.DseDriverContext; +import java.util.HashMap; +import java.util.Map; + +class ConfigAntiPatternsFinder { + Map findAntiPatterns(DseDriverContext driverContext) { + Map antiPatterns = new HashMap<>(); + findSslAntiPattern(driverContext, antiPatterns); + return antiPatterns; + } + + private void findSslAntiPattern( + DseDriverContext driverContext, Map antiPatterns) { + boolean isSslDefined = + driverContext.getConfig().getDefaultProfile().isDefined(SSL_ENGINE_FACTORY_CLASS); + boolean certValidation = + driverContext.getConfig().getDefaultProfile().getBoolean(SSL_HOSTNAME_VALIDATION, false); + if (isSslDefined && !certValidation) { + antiPatterns.put( + "sslWithoutCertValidation", + "Client-to-node encryption is enabled but server certificate validation is disabled"); + } + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/DataCentersFinder.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/DataCentersFinder.java new file mode 100644 index 00000000000..0ba645817a5 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/DataCentersFinder.java @@ -0,0 +1,44 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.insights; + +import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.CONNECTION_POOL_REMOTE_SIZE; + +import com.datastax.dse.driver.internal.core.context.DseDriverContext; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.loadbalancing.NodeDistance; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting; +import java.util.Collection; +import java.util.HashSet; +import java.util.Set; + +class DataCentersFinder { + + Set getDataCenters(DseDriverContext driverContext) { + return getDataCenters( + driverContext.getMetadataManager().getMetadata().getNodes().values(), + driverContext.getConfig().getDefaultProfile()); + } + + @VisibleForTesting + Set getDataCenters(Collection nodes, DriverExecutionProfile executionProfile) { + + int remoteConnectionsLength = executionProfile.getInt(CONNECTION_POOL_REMOTE_SIZE); + + Set dataCenters = new HashSet<>(); + for (Node n : nodes) { + NodeDistance distance = n.getDistance(); + + if (distance.equals(NodeDistance.LOCAL) + || (distance.equals(NodeDistance.REMOTE) && remoteConnectionsLength > 0)) { + dataCenters.add(n.getDatacenter()); + } + } + return dataCenters; + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/ExecutionProfilesInfoFinder.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/ExecutionProfilesInfoFinder.java new file mode 100644 index 00000000000..a28fea6cb78 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/ExecutionProfilesInfoFinder.java @@ -0,0 +1,167 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.insights; + +import static com.datastax.dse.driver.api.core.config.DseDriverOption.GRAPH_TRAVERSAL_SOURCE; + +import com.datastax.dse.driver.internal.core.context.DseDriverContext; +import com.datastax.dse.driver.internal.core.insights.PackageUtil.ClassSettingDetails; +import com.datastax.dse.driver.internal.core.insights.schema.LoadBalancingInfo; +import com.datastax.dse.driver.internal.core.insights.schema.SpecificExecutionProfile; +import com.datastax.dse.driver.internal.core.insights.schema.SpeculativeExecutionInfo; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.function.Function; +import java.util.stream.Collectors; + +class ExecutionProfilesInfoFinder { + Map getExecutionProfilesInfo(DseDriverContext driverContext) { + + SpecificExecutionProfile defaultProfile = + mapToSpecificProfile(driverContext.getConfig().getDefaultProfile()); + + return driverContext.getConfig().getProfiles().entrySet().stream() + .collect( + Collectors.toMap( + Map.Entry::getKey, + e -> { + if (isNotDefaultProfile(e)) { + SpecificExecutionProfile specificExecutionProfile = + mapToSpecificProfile(e.getValue()); + return retainOnlyDifferentFieldsFromSpecificProfile( + defaultProfile, specificExecutionProfile); + } else { + return defaultProfile; + } + })); + } + + private boolean isNotDefaultProfile(Map.Entry e) { + return !e.getKey().equals("default"); + } + + private SpecificExecutionProfile retainOnlyDifferentFieldsFromSpecificProfile( + SpecificExecutionProfile defaultProfile, SpecificExecutionProfile specificExecutionProfile) { + Integer readTimeout = + getIfDifferentOrReturnNull( + defaultProfile, specificExecutionProfile, SpecificExecutionProfile::getReadTimeout); + LoadBalancingInfo loadBalancingInfo = + getIfDifferentOrReturnNull( + defaultProfile, specificExecutionProfile, SpecificExecutionProfile::getLoadBalancing); + + SpeculativeExecutionInfo speculativeExecutionInfo = + getIfDifferentOrReturnNull( + defaultProfile, + specificExecutionProfile, + SpecificExecutionProfile::getSpeculativeExecution); + + String consistency = + getIfDifferentOrReturnNull( + defaultProfile, specificExecutionProfile, SpecificExecutionProfile::getConsistency); + + String serialConsistency = + getIfDifferentOrReturnNull( + defaultProfile, + specificExecutionProfile, + SpecificExecutionProfile::getSerialConsistency); + + Map graphOptions = + getIfDifferentOrReturnNull( + defaultProfile, specificExecutionProfile, SpecificExecutionProfile::getGraphOptions); + + return new SpecificExecutionProfile( + readTimeout, + loadBalancingInfo, + speculativeExecutionInfo, + consistency, + serialConsistency, + graphOptions); + } + + private T getIfDifferentOrReturnNull( + SpecificExecutionProfile defaultProfile, + SpecificExecutionProfile profile, + Function valueExtractor) { + T defaultProfileValue = valueExtractor.apply(defaultProfile); + T specificProfileValue = valueExtractor.apply(profile); + if (defaultProfileValue.equals(specificProfileValue)) { + return null; + } else { + return specificProfileValue; + } + } + + private SpecificExecutionProfile mapToSpecificProfile( + DriverExecutionProfile driverExecutionProfile) { + return new SpecificExecutionProfile( + (int) driverExecutionProfile.getDuration(DefaultDriverOption.REQUEST_TIMEOUT).toMillis(), + getLoadBalancingInfo(driverExecutionProfile), + getSpeculativeExecutionInfo(driverExecutionProfile), + driverExecutionProfile.getString(DefaultDriverOption.REQUEST_CONSISTENCY), + driverExecutionProfile.getString(DefaultDriverOption.REQUEST_SERIAL_CONSISTENCY), + getGraphOptions(driverExecutionProfile)); + } + + private SpeculativeExecutionInfo getSpeculativeExecutionInfo( + DriverExecutionProfile driverExecutionProfile) { + Map options = new LinkedHashMap<>(); + + putIfExists( + options, + "maxSpeculativeExecutions", + DefaultDriverOption.SPECULATIVE_EXECUTION_MAX, + driverExecutionProfile); + putIfExists( + options, "delay", DefaultDriverOption.SPECULATIVE_EXECUTION_DELAY, driverExecutionProfile); + + ClassSettingDetails speculativeExecutionDetails = + PackageUtil.getSpeculativeExecutionDetails( + driverExecutionProfile.getString( + DefaultDriverOption.SPECULATIVE_EXECUTION_POLICY_CLASS)); + return new SpeculativeExecutionInfo( + speculativeExecutionDetails.getClassName(), + options, + speculativeExecutionDetails.getFullPackage()); + } + + private void putIfExists( + Map options, + String key, + DefaultDriverOption option, + DriverExecutionProfile executionProfile) { + if (executionProfile.isDefined(option)) { + options.put(key, executionProfile.getInt(option)); + } + } + + private LoadBalancingInfo getLoadBalancingInfo(DriverExecutionProfile driverExecutionProfile) { + Map options = new LinkedHashMap<>(); + options.put( + "localDataCenter", + driverExecutionProfile.getString(DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER)); + options.put( + "filterFunction", + driverExecutionProfile.isDefined(DefaultDriverOption.LOAD_BALANCING_FILTER_CLASS)); + ClassSettingDetails loadBalancingDetails = + PackageUtil.getLoadBalancingDetails( + driverExecutionProfile.getString(DefaultDriverOption.LOAD_BALANCING_POLICY_CLASS)); + return new LoadBalancingInfo( + loadBalancingDetails.getClassName(), options, loadBalancingDetails.getFullPackage()); + } + + private Map getGraphOptions(DriverExecutionProfile driverExecutionProfile) { + Map graphOptionsMap = new HashMap<>(); + String graphTraversalSource = driverExecutionProfile.getString(GRAPH_TRAVERSAL_SOURCE, null); + if (graphTraversalSource != null) { + graphOptionsMap.put("source", graphTraversalSource); + } + return graphOptionsMap; + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/InsightsClient.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/InsightsClient.java new file mode 100644 index 00000000000..96c606eb869 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/InsightsClient.java @@ -0,0 +1,475 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.insights; + +import static com.datastax.dse.driver.internal.core.context.DseStartupOptionsBuilder.APPLICATION_NAME_KEY; +import static com.datastax.dse.driver.internal.core.context.DseStartupOptionsBuilder.APPLICATION_VERSION_KEY; +import static com.datastax.dse.driver.internal.core.context.DseStartupOptionsBuilder.CLIENT_ID_KEY; +import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.AUTH_PROVIDER_CLASS; +import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.CONNECTION_POOL_LOCAL_SIZE; +import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.CONNECTION_POOL_REMOTE_SIZE; +import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.HEARTBEAT_INTERVAL; +import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.PROTOCOL_COMPRESSION; +import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.SSL_ENGINE_FACTORY_CLASS; +import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.SSL_HOSTNAME_VALIDATION; +import static com.datastax.oss.driver.internal.core.context.StartupOptionsBuilder.DRIVER_NAME_KEY; +import static com.datastax.oss.driver.internal.core.context.StartupOptionsBuilder.DRIVER_VERSION_KEY; + +import com.datastax.dse.driver.api.core.DseProtocolVersion; +import com.datastax.dse.driver.api.core.DseSessionBuilder; +import com.datastax.dse.driver.internal.core.context.DseDriverContext; +import com.datastax.dse.driver.internal.core.insights.PackageUtil.ClassSettingDetails; +import com.datastax.dse.driver.internal.core.insights.configuration.InsightsConfiguration; +import com.datastax.dse.driver.internal.core.insights.exceptions.InsightEventFormatException; +import com.datastax.dse.driver.internal.core.insights.schema.AuthProviderType; +import com.datastax.dse.driver.internal.core.insights.schema.Insight; +import com.datastax.dse.driver.internal.core.insights.schema.InsightMetadata; +import com.datastax.dse.driver.internal.core.insights.schema.InsightType; +import com.datastax.dse.driver.internal.core.insights.schema.InsightsStartupData; +import com.datastax.dse.driver.internal.core.insights.schema.InsightsStatusData; +import com.datastax.dse.driver.internal.core.insights.schema.PoolSizeByHostDistance; +import com.datastax.dse.driver.internal.core.insights.schema.SSL; +import com.datastax.dse.driver.internal.core.insights.schema.SessionStateForNode; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.session.SessionBuilder; +import com.datastax.oss.driver.api.core.type.DataTypes; +import com.datastax.oss.driver.api.core.type.codec.TypeCodec; +import com.datastax.oss.driver.internal.core.adminrequest.AdminRequestHandler; +import com.datastax.oss.driver.internal.core.control.ControlConnection; +import com.datastax.oss.driver.internal.core.pool.ChannelPool; +import com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import com.datastax.oss.protocol.internal.request.Query; +import com.datastax.oss.protocol.internal.request.query.QueryOptions; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.net.InetAddress; +import java.net.InetSocketAddress; +import java.net.SocketAddress; +import java.net.UnknownHostException; +import java.nio.ByteBuffer; +import java.time.Duration; +import java.util.Collections; +import java.util.Date; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.UUID; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionStage; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.ScheduledFuture; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.Supplier; +import java.util.stream.Collectors; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class InsightsClient { + private static final Logger LOGGER = LoggerFactory.getLogger(InsightsClient.class); + private static final String STARTUP_MESSAGE_NAME = "driver.startup"; + private static final String STATUS_MESSAGE_NAME = "driver.status"; + private static final String REPORT_INSIGHT_RPC = "CALL InsightsRpc.reportInsight(?)"; + private static final Map TAGS = ImmutableMap.of("language", "java"); + private static final String STARTUP_VERSION_1_ID = "v1"; + private static final String STATUS_VERSION_1_ID = "v1"; + private static final ObjectMapper objectMapper = new ObjectMapper(); + private static final int MAX_NUMBER_OF_STATUS_ERROR_LOGS = 5; + static final String DEFAULT_JAVA_APPLICATION = "Default Java Application"; + + private final ControlConnection controlConnection; + private final String id = UUID.randomUUID().toString(); + private final InsightsConfiguration insightsConfiguration; + private final AtomicInteger numberOfStatusEventErrors = new AtomicInteger(); + + private final DseDriverContext driverContext; + private final Supplier timestampSupplier; + private final PlatformInfoFinder platformInfoFinder; + private final ReconnectionPolicyInfoFinder reconnectionPolicyInfoInfoFinder; + private final ExecutionProfilesInfoFinder executionProfilesInfoFinder; + private final ConfigAntiPatternsFinder configAntiPatternsFinder; + private final DataCentersFinder dataCentersFinder; + private StackTraceElement[] initCallStackTrace; + private ScheduledFuture scheduleInsightsTask; + + public static InsightsClient createInsightsClient( + InsightsConfiguration insightsConfiguration, + DseDriverContext dseDriverContext, + StackTraceElement[] initCallStackTrace) { + DataCentersFinder dataCentersFinder = new DataCentersFinder(); + return new InsightsClient( + dseDriverContext, + () -> new Date().getTime(), + insightsConfiguration, + new PlatformInfoFinder(), + new ReconnectionPolicyInfoFinder(), + new ExecutionProfilesInfoFinder(), + new ConfigAntiPatternsFinder(), + dataCentersFinder, + initCallStackTrace); + } + + InsightsClient( + DseDriverContext driverContext, + Supplier timestampSupplier, + InsightsConfiguration insightsConfiguration, + PlatformInfoFinder platformInfoFinder, + ReconnectionPolicyInfoFinder reconnectionPolicyInfoInfoFinder, + ExecutionProfilesInfoFinder executionProfilesInfoFinder, + ConfigAntiPatternsFinder configAntiPatternsFinder, + DataCentersFinder dataCentersFinder, + StackTraceElement[] initCallStackTrace) { + this.driverContext = driverContext; + this.controlConnection = driverContext.getControlConnection(); + this.timestampSupplier = timestampSupplier; + this.insightsConfiguration = insightsConfiguration; + this.platformInfoFinder = platformInfoFinder; + this.reconnectionPolicyInfoInfoFinder = reconnectionPolicyInfoInfoFinder; + this.executionProfilesInfoFinder = executionProfilesInfoFinder; + this.configAntiPatternsFinder = configAntiPatternsFinder; + this.dataCentersFinder = dataCentersFinder; + this.initCallStackTrace = initCallStackTrace; + } + + public CompletionStage sendStartupMessage() { + if (!shouldSendEvent()) { + return CompletableFuture.completedFuture(null); + } + final String startupMessage = createStartupMessage(); + CompletionStage result = sendJsonMessage(startupMessage); + + return result.whenComplete( + (aVoid, throwable) -> { + if (throwable != null) { + LOGGER.debug( + "Error while sending: " + + trimToFirst500characters(startupMessage) + + " to insights. Aborting sending all future: " + + STARTUP_MESSAGE_NAME + + " events", + throwable); + } + }); + } + + private static String trimToFirst500characters(String startupMessage) { + return startupMessage.substring(0, Math.min(startupMessage.length(), 500)); + } + + public void scheduleStatusMessageSend() { + if (!shouldSendEvent()) { + return; + } + scheduleInsightsTask = + scheduleInsightsTask( + insightsConfiguration.getStatusEventDelayMillis(), + insightsConfiguration.getExecutor(), + this::sendStatusMessage); + } + + public void shutdown() { + if (scheduleInsightsTask != null) { + scheduleInsightsTask.cancel(false); + } + } + + @VisibleForTesting + public CompletionStage sendStatusMessage() { + if (!shouldSendEvent()) { + return CompletableFuture.completedFuture(null); + } + final String statusMessage = createStatusMessage(); + CompletionStage result = sendJsonMessage(statusMessage); + + return result.whenComplete( + (aVoid, throwable) -> { + if (throwable != null) { + if (numberOfStatusEventErrors.getAndIncrement() < MAX_NUMBER_OF_STATUS_ERROR_LOGS) { + LOGGER.debug( + "Error while sending: " + + trimToFirst500characters(statusMessage) + + " to insights.", + throwable); + } + } + }); + } + + private CompletionStage sendJsonMessage(String jsonMessage) { + + QueryOptions queryOptions = createQueryOptionsWithJson(jsonMessage); + String logPrefix = driverContext.getSessionName(); + Duration timeout = + driverContext + .getConfig() + .getDefaultProfile() + .getDuration(DefaultDriverOption.CONTROL_CONNECTION_TIMEOUT); + LOGGER.debug("sending JSON message: {}", jsonMessage); + + Query query = new Query(REPORT_INSIGHT_RPC, queryOptions); + return AdminRequestHandler.call(controlConnection.channel(), query, timeout, logPrefix).start(); + } + + private QueryOptions createQueryOptionsWithJson(String json) { + TypeCodec codec = + driverContext.getCodecRegistry().codecFor(DataTypes.TEXT, String.class); + ByteBuffer startupMessageSerialized = codec.encode(json, DseProtocolVersion.DSE_V2); + return new QueryOptions( + QueryOptions.DEFAULT.consistency, + Collections.singletonList(startupMessageSerialized), + QueryOptions.DEFAULT.namedValues, + QueryOptions.DEFAULT.skipMetadata, + QueryOptions.DEFAULT.pageSize, + QueryOptions.DEFAULT.pagingState, + QueryOptions.DEFAULT.serialConsistency, + QueryOptions.DEFAULT.defaultTimestamp, + QueryOptions.DEFAULT.keyspace); + } + + private boolean shouldSendEvent() { + return insightsConfiguration.isMonitorReportingEnabled() + && InsightsSupportVerifier.supportsInsights( + driverContext.getMetadataManager().getMetadata().getNodes().values()); + } + + @VisibleForTesting + String createStartupMessage() { + InsightMetadata insightMetadata = createMetadata(STARTUP_MESSAGE_NAME, STARTUP_VERSION_1_ID); + InsightsStartupData data = createStartupData(); + + try { + return objectMapper.writeValueAsString(new Insight<>(insightMetadata, data)); + } catch (JsonProcessingException e) { + throw new InsightEventFormatException("Problem when creating: " + STARTUP_MESSAGE_NAME, e); + } + } + + @VisibleForTesting + String createStatusMessage() { + InsightMetadata insightMetadata = createMetadata(STATUS_MESSAGE_NAME, STATUS_VERSION_1_ID); + InsightsStatusData data = createStatusData(); + + try { + return objectMapper.writeValueAsString(new Insight<>(insightMetadata, data)); + } catch (JsonProcessingException e) { + throw new InsightEventFormatException("Problem when creating: " + STATUS_MESSAGE_NAME, e); + } + } + + private InsightsStatusData createStatusData() { + Map startupOptions = driverContext.getStartupOptions(); + return InsightsStatusData.builder() + .withClientId(getClientId(startupOptions)) + .withSessionId(id) + .withControlConnection(getControlConnectionSocketAddress()) + .withConnectedNodes(getConnectedNodes()) + .build(); + } + + private Map getConnectedNodes() { + Map pools = driverContext.getPoolManager().getPools(); + return pools.entrySet().stream() + .collect( + Collectors.toMap( + entry -> AddressFormatter.nullSafeToString(entry.getKey().getEndPoint().resolve()), + this::constructSessionStateForNode)); + } + + private SessionStateForNode constructSessionStateForNode(Map.Entry entry) { + return new SessionStateForNode( + entry.getKey().getOpenConnections(), entry.getValue().getInFlight()); + } + + private InsightsStartupData createStartupData() { + Map startupOptions = driverContext.getStartupOptions(); + return InsightsStartupData.builder() + .withClientId(getClientId(startupOptions)) + .withSessionId(id) + .withApplicationName(getApplicationName(startupOptions)) + .withApplicationVersion(getApplicationVersion(startupOptions)) + .withDriverName(getDriverName(startupOptions)) + .withDriverVersion(getDriverVersion(startupOptions)) + .withContactPoints( + getResolvedContactPoints( + driverContext.getMetadataManager().getContactPoints().stream() + .map(n -> n.getEndPoint().resolve()) + .filter(InetSocketAddress.class::isInstance) + .map(InetSocketAddress.class::cast) + .collect(Collectors.toSet()))) + .withInitialControlConnection(getControlConnectionSocketAddress()) + .withProtocolVersion(driverContext.getProtocolVersion().getCode()) + .withLocalAddress(getLocalAddress()) + .withExecutionProfiles(executionProfilesInfoFinder.getExecutionProfilesInfo(driverContext)) + .withPoolSizeByHostDistance(getPoolSizeByHostDistance()) + .withHeartbeatInterval( + driverContext + .getConfig() + .getDefaultProfile() + .getDuration(HEARTBEAT_INTERVAL) + .toMillis()) + .withCompression( + driverContext.getConfig().getDefaultProfile().getString(PROTOCOL_COMPRESSION, "none")) + .withReconnectionPolicy( + reconnectionPolicyInfoInfoFinder.getReconnectionPolicyInfo( + driverContext.getReconnectionPolicy(), + driverContext.getConfig().getDefaultProfile())) + .withSsl(getSsl()) + .withAuthProvider(getAuthProvider()) + .withOtherOptions(getOtherOptions()) + .withPlatformInfo(platformInfoFinder.getInsightsPlatformInfo()) + .withConfigAntiPatterns(configAntiPatternsFinder.findAntiPatterns(driverContext)) + .withPeriodicStatusInterval(getPeriodicStatusInterval()) + .withHostName(getLocalHostName()) + .withApplicationNameWasGenerated(isApplicationNameGenerated(startupOptions)) + .withDataCenters(dataCentersFinder.getDataCenters(driverContext)) + .build(); + } + + private AuthProviderType getAuthProvider() { + String authProviderClassName = + driverContext + .getConfig() + .getDefaultProfile() + .getString(AUTH_PROVIDER_CLASS, "NoAuthProvider"); + ClassSettingDetails authProviderDetails = + PackageUtil.getAuthProviderDetails(authProviderClassName); + return new AuthProviderType( + authProviderDetails.getClassName(), authProviderDetails.getFullPackage()); + } + + private long getPeriodicStatusInterval() { + return TimeUnit.MILLISECONDS.toSeconds(insightsConfiguration.getStatusEventDelayMillis()); + } + + @VisibleForTesting + static Map> getResolvedContactPoints(Set contactPoints) { + if (contactPoints == null) { + return Collections.emptyMap(); + } + return contactPoints.stream() + .collect( + Collectors.groupingBy( + InetSocketAddress::getHostName, + Collectors.mapping(AddressFormatter::nullSafeToString, Collectors.toList()))); + } + + private String getDriverVersion(Map startupOptions) { + return startupOptions.get(DRIVER_VERSION_KEY); + } + + private String getDriverName(Map startupOptions) { + return startupOptions.get(DRIVER_NAME_KEY); + } + + private String getClientId(Map startupOptions) { + return startupOptions.get(CLIENT_ID_KEY); + } + + private boolean isApplicationNameGenerated(Map startupOptions) { + return startupOptions.get(APPLICATION_NAME_KEY) == null; + } + + private String getApplicationVersion(Map startupOptions) { + String applicationVersion = startupOptions.get(APPLICATION_VERSION_KEY); + if (applicationVersion == null) { + return ""; + } + return applicationVersion; + } + + private String getApplicationName(Map startupOptions) { + String applicationName = startupOptions.get(APPLICATION_NAME_KEY); + if (applicationName == null || applicationName.isEmpty()) { + return getClusterCreateCaller(initCallStackTrace); + } + return applicationName; + } + + @VisibleForTesting + static String getClusterCreateCaller(StackTraceElement[] stackTrace) { + for (int i = 0; i < stackTrace.length - 1; i++) { + if (isClusterStackTrace(stackTrace[i])) { + int nextElement = i + 1; + if (!isClusterStackTrace(stackTrace[nextElement])) { + return stackTrace[nextElement].getClassName(); + } + } + } + return DEFAULT_JAVA_APPLICATION; + } + + private static boolean isClusterStackTrace(StackTraceElement stackTraceElement) { + return stackTraceElement.getClassName().equals(DseDriverContext.class.getName()) + || stackTraceElement.getClassName().equals(DseSessionBuilder.class.getName()) + || stackTraceElement.getClassName().equals(SessionBuilder.class.getName()); + } + + private String getLocalHostName() { + try { + return InetAddress.getLocalHost().getHostName(); + } catch (UnknownHostException e) { + LOGGER.warn("Can not resolve the name of a host, returning null", e); + return null; + } + } + + private Map getOtherOptions() { + return Collections.emptyMap(); // todo + } + + private SSL getSsl() { + boolean isSslDefined = + driverContext.getConfig().getDefaultProfile().isDefined(SSL_ENGINE_FACTORY_CLASS); + boolean certValidation = + driverContext.getConfig().getDefaultProfile().getBoolean(SSL_HOSTNAME_VALIDATION, false); + return new SSL(isSslDefined, certValidation); + } + + private PoolSizeByHostDistance getPoolSizeByHostDistance() { + + return new PoolSizeByHostDistance( + driverContext.getConfig().getDefaultProfile().getInt(CONNECTION_POOL_LOCAL_SIZE), + driverContext.getConfig().getDefaultProfile().getInt(CONNECTION_POOL_REMOTE_SIZE), + 0); + } + + private String getControlConnectionSocketAddress() { + SocketAddress controlConnectionAddress = controlConnection.channel().getEndPoint().resolve(); + return AddressFormatter.nullSafeToString(controlConnectionAddress); + } + + private String getLocalAddress() { + SocketAddress controlConnectionLocalAddress = controlConnection.channel().localAddress(); + if (controlConnectionLocalAddress instanceof InetSocketAddress) { + return AddressFormatter.nullSafeToString( + ((InetSocketAddress) controlConnectionLocalAddress).getAddress()); + } + return null; + } + + private InsightMetadata createMetadata(String messageName, String messageVersion) { + return new InsightMetadata( + messageName, timestampSupplier.get(), TAGS, InsightType.EVENT, messageVersion); + } + + @VisibleForTesting + static ScheduledFuture scheduleInsightsTask( + long statusEventDelayMillis, + ScheduledExecutorService scheduledTasksExecutor, + Runnable runnable) { + long initialDelay = + (long) Math.floor(statusEventDelayMillis - zeroToTenPercentRandom(statusEventDelayMillis)); + return scheduledTasksExecutor.scheduleWithFixedDelay( + runnable, initialDelay, statusEventDelayMillis, TimeUnit.MILLISECONDS); + } + + private static double zeroToTenPercentRandom(long statusEventDelayMillis) { + return 0.1 * statusEventDelayMillis * Math.random(); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/InsightsSupportVerifier.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/InsightsSupportVerifier.java new file mode 100644 index 00000000000..dd452672c23 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/InsightsSupportVerifier.java @@ -0,0 +1,39 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.insights; + +import com.datastax.dse.driver.api.core.metadata.DseNodeProperties; +import com.datastax.oss.driver.api.core.Version; +import com.datastax.oss.driver.api.core.metadata.Node; +import java.util.Collection; + +class InsightsSupportVerifier { + private static final Version minDse6Version = Version.parse("6.0.5"); + private static final Version minDse51Version = Version.parse("5.1.13"); + private static final Version dse600Version = Version.parse("6.0.0"); + + static boolean supportsInsights(Collection nodes) { + assert minDse6Version != null; + assert dse600Version != null; + assert minDse51Version != null; + if (nodes.isEmpty()) return false; + + for (Node node : nodes) { + Object version = node.getExtras().get(DseNodeProperties.DSE_VERSION); + if (version == null) { + return false; + } + Version dseVersion = (Version) version; + if (!(dseVersion.compareTo(minDse6Version) >= 0 + || (dseVersion.compareTo(dse600Version) < 0 + && dseVersion.compareTo(minDse51Version) >= 0))) { + return false; + } + } + return true; + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/PackageUtil.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/PackageUtil.java new file mode 100644 index 00000000000..b1d3c10505b --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/PackageUtil.java @@ -0,0 +1,84 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.insights; + +import com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting; +import com.datastax.oss.driver.shaded.guava.common.base.Joiner; +import java.util.Arrays; +import java.util.regex.Pattern; + +class PackageUtil { + static final String DEFAULT_SPECULATIVE_EXECUTION_PACKAGE = + "com.datastax.oss.driver.internal.core.specex"; + static final String DEFAULT_LOAD_BALANCING_PACKAGE = + "com.datastax.oss.driver.internal.core.loadbalancing"; + static final String DEFAULT_AUTH_PROVIDER_PACKAGE = "com.datastax.oss.driver.internal.core.auth"; + private static final Pattern PACKAGE_SPLIT_REGEX = Pattern.compile("\\."); + private static final Joiner DOT_JOINER = Joiner.on("."); + + static String getNamespace(Class tClass) { + String namespace = ""; + Package packageInfo = tClass.getPackage(); + if (packageInfo != null) { + namespace = packageInfo.getName(); + } + return namespace; + } + + static ClassSettingDetails getSpeculativeExecutionDetails(String classSetting) { + return getClassSettingDetails(classSetting, DEFAULT_SPECULATIVE_EXECUTION_PACKAGE); + } + + static ClassSettingDetails getLoadBalancingDetails(String classSetting) { + return getClassSettingDetails(classSetting, DEFAULT_LOAD_BALANCING_PACKAGE); + } + + static ClassSettingDetails getAuthProviderDetails(String classSetting) { + return getClassSettingDetails(classSetting, DEFAULT_AUTH_PROVIDER_PACKAGE); + } + + private static ClassSettingDetails getClassSettingDetails( + String classSetting, String packageName) { + String className = getClassName(classSetting); + String fullPackage = getFullPackageOrDefault(classSetting, packageName); + return new ClassSettingDetails(className, fullPackage); + } + + @VisibleForTesting + static String getClassName(String classSetting) { + String[] split = PACKAGE_SPLIT_REGEX.split(classSetting); + if (split.length == 0) { + return ""; + } + return split[split.length - 1]; + } + + @VisibleForTesting + static String getFullPackageOrDefault(String classSetting, String defaultValue) { + String[] split = PACKAGE_SPLIT_REGEX.split(classSetting); + if (split.length <= 1) return defaultValue; + return DOT_JOINER.join(Arrays.copyOf(split, split.length - 1)); + } + + static class ClassSettingDetails { + private final String className; + private final String fullPackage; + + ClassSettingDetails(String className, String fullPackage) { + this.className = className; + this.fullPackage = fullPackage; + } + + String getClassName() { + return className; + } + + String getFullPackage() { + return fullPackage; + } + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/PlatformInfoFinder.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/PlatformInfoFinder.java new file mode 100644 index 00000000000..5dd2f9072a6 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/PlatformInfoFinder.java @@ -0,0 +1,271 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.insights; + +import static com.datastax.dse.driver.internal.core.insights.schema.InsightsPlatformInfo.OS; +import static com.datastax.dse.driver.internal.core.insights.schema.InsightsPlatformInfo.RuntimeAndCompileTimeVersions; + +import com.datastax.dse.driver.internal.core.insights.schema.InsightsPlatformInfo; +import com.datastax.dse.driver.internal.core.insights.schema.InsightsPlatformInfo.CPUS; +import com.datastax.oss.driver.internal.core.os.Native; +import com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting; +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.net.URL; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Properties; +import java.util.function.Function; +import java.util.regex.Pattern; + +class PlatformInfoFinder { + private static final String MAVEN_IGNORE_LINE = "The following files have been resolved:"; + private static final Pattern DEPENDENCY_SPLIT_REGEX = Pattern.compile(":"); + static final String UNVERIFIED_RUNTIME_VERSION = "UNVERIFIED"; + public static final String UNKNOWN = "UNKNOWN"; + private final Function propertiesUrlProvider; + + private static final Function M2_PROPERTIES_PROVIDER = + d -> { + ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader(); + if (contextClassLoader == null) { + contextClassLoader = PlatformInfoFinder.class.getClassLoader(); + } + return contextClassLoader.getResource( + "META-INF/maven/" + d.groupId + "/" + d.artifactId + "/pom.properties"); + }; + + PlatformInfoFinder() { + this(M2_PROPERTIES_PROVIDER); + } + + @VisibleForTesting + PlatformInfoFinder(Function pomPropertiesUrlProvider) { + this.propertiesUrlProvider = pomPropertiesUrlProvider; + } + + InsightsPlatformInfo getInsightsPlatformInfo() { + OS os = getOsInfo(); + CPUS cpus = getCpuInfo(); + Map> runtimeInfo = getRuntimeInfo(); + + return new InsightsPlatformInfo(os, cpus, runtimeInfo); + } + + private Map> getRuntimeInfo() { + Map coreDeps = + fetchDependenciesFromFile( + this.getClass().getResourceAsStream("/com/datastax/dse/driver/internal/deps.txt")); + + Map queryBuilderDeps = + fetchDependenciesFromFile( + this.getClass() + .getResourceAsStream("/com/datastax/dse/driver/internal/querybuilder/deps.txt")); + + Map> runtimeDependencies = + new LinkedHashMap<>(); + putIfNonEmpty(coreDeps, runtimeDependencies, "core"); + putIfNonEmpty(queryBuilderDeps, runtimeDependencies, "query-builder"); + addJavaVersion(runtimeDependencies); + return runtimeDependencies; + } + + private void putIfNonEmpty( + Map moduleDependencies, + Map> runtimeDependencies, + String moduleName) { + if (!moduleDependencies.isEmpty()) { + runtimeDependencies.put(moduleName, moduleDependencies); + } + } + + @VisibleForTesting + void addJavaVersion(Map> runtimeDependencies) { + Package javaPackage = Runtime.class.getPackage(); + Map javaDependencies = new LinkedHashMap<>(); + javaDependencies.put( + "version", toSameRuntimeAndCompileVersion(javaPackage.getImplementationVersion())); + javaDependencies.put( + "vendor", toSameRuntimeAndCompileVersion(javaPackage.getImplementationVendor())); + javaDependencies.put( + "title", toSameRuntimeAndCompileVersion(javaPackage.getImplementationTitle())); + putIfNonEmpty(javaDependencies, runtimeDependencies, "java"); + } + + private RuntimeAndCompileTimeVersions toSameRuntimeAndCompileVersion(String version) { + return new RuntimeAndCompileTimeVersions(version, version, false); + } + + /** + * Method is fetching dependencies from file. Lines in file should be in format: + * com.organization:artifactId:jar:1.2.0 or com.organization:artifactId:jar:native:1.2.0 + * + *

      For such file the output will be: Map + * "com.organization:artifactId",{"runtimeVersion":"1.2.0", "compileVersion:"1.2.0", "optional": + * false} Duplicates will be omitted. If there are two dependencies for the exactly the same + * organizationId:artifactId it is not deterministic which version will be taken. In the case of + * an error while opening file this method will fail silently returning an empty Map + */ + @VisibleForTesting + Map fetchDependenciesFromFile(InputStream inputStream) { + Map dependencies = new LinkedHashMap<>(); + if (inputStream == null) { + return dependencies; + } + try { + List dependenciesFromFile = extractMavenDependenciesFromFile(inputStream); + for (DependencyFromFile d : dependenciesFromFile) { + dependencies.put(formatDependencyName(d), getRuntimeAndCompileVersion(d)); + } + } catch (IOException e) { + return dependencies; + } + return dependencies; + } + + private RuntimeAndCompileTimeVersions getRuntimeAndCompileVersion(DependencyFromFile d) { + URL url = propertiesUrlProvider.apply(d); + if (url == null) { + return new RuntimeAndCompileTimeVersions( + UNVERIFIED_RUNTIME_VERSION, d.getVersion(), d.isOptional()); + } + Properties properties = new Properties(); + try { + properties.load(url.openStream()); + } catch (IOException e) { + return new RuntimeAndCompileTimeVersions( + UNVERIFIED_RUNTIME_VERSION, d.getVersion(), d.isOptional()); + } + Object version = properties.get("version"); + if (version == null) { + return new RuntimeAndCompileTimeVersions( + UNVERIFIED_RUNTIME_VERSION, d.getVersion(), d.isOptional()); + } else { + return new RuntimeAndCompileTimeVersions(version.toString(), d.getVersion(), d.isOptional()); + } + } + + private String formatDependencyName(DependencyFromFile d) { + return String.format("%s:%s", d.getGroupId(), d.getArtifactId()); + } + + private List extractMavenDependenciesFromFile(InputStream inputStream) + throws IOException { + List dependenciesFromFile = new ArrayList<>(); + BufferedReader reader = + new BufferedReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8)); + for (String line; (line = reader.readLine()) != null; ) { + if (lineWithDependencyInfo(line)) { + dependenciesFromFile.add(extractDependencyFromLine(line.trim())); + } + } + return dependenciesFromFile; + } + + private DependencyFromFile extractDependencyFromLine(String line) { + String[] split = DEPENDENCY_SPLIT_REGEX.split(line); + if (split.length == 6) { // case for i.e.: com.github.jnr:jffi:jar:native:1.2.16:compile + return new DependencyFromFile(split[0], split[1], split[4], checkIsOptional(split[5])); + } else { // case for normal: org.ow2.asm:asm:jar:5.0.3:compile + return new DependencyFromFile(split[0], split[1], split[3], checkIsOptional(split[4])); + } + } + + private boolean checkIsOptional(String scope) { + return scope.contains("(optional)"); + } + + private boolean lineWithDependencyInfo(String line) { + return (!line.equals(MAVEN_IGNORE_LINE) && !line.isEmpty()); + } + + private CPUS getCpuInfo() { + int numberOfProcessors = Runtime.getRuntime().availableProcessors(); + String model = Native.isPlatformAvailable() ? Native.getCPU() : UNKNOWN; + return new CPUS(numberOfProcessors, model); + } + + private OS getOsInfo() { + String osName = System.getProperty("os.name"); + String osVersion = System.getProperty("os.version"); + String osArch = System.getProperty("os.arch"); + return new OS(osName, osVersion, osArch); + } + + static class DependencyFromFile { + private final String groupId; + private final String artifactId; + private final String version; + private boolean optional; + + DependencyFromFile(String groupId, String artifactId, String version, boolean optional) { + this.groupId = groupId; + this.artifactId = artifactId; + this.version = version; + this.optional = optional; + } + + String getGroupId() { + return groupId; + } + + String getArtifactId() { + return artifactId; + } + + String getVersion() { + return version; + } + + boolean isOptional() { + return optional; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (!(o instanceof DependencyFromFile)) { + return false; + } + DependencyFromFile that = (DependencyFromFile) o; + return optional == that.optional + && Objects.equals(groupId, that.groupId) + && Objects.equals(artifactId, that.artifactId) + && Objects.equals(version, that.version); + } + + @Override + public int hashCode() { + return Objects.hash(groupId, artifactId, version, optional); + } + + @Override + public String toString() { + return "DependencyFromFile{" + + "groupId='" + + groupId + + '\'' + + ", artifactId='" + + artifactId + + '\'' + + ", version='" + + version + + '\'' + + ", optional=" + + optional + + '}'; + } + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/ReconnectionPolicyInfoFinder.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/ReconnectionPolicyInfoFinder.java new file mode 100644 index 00000000000..11491ce1f41 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/ReconnectionPolicyInfoFinder.java @@ -0,0 +1,38 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.insights; + +import com.datastax.dse.driver.internal.core.insights.schema.ReconnectionPolicyInfo; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.connection.ReconnectionPolicy; +import com.datastax.oss.driver.internal.core.connection.ConstantReconnectionPolicy; +import com.datastax.oss.driver.internal.core.connection.ExponentialReconnectionPolicy; +import java.util.HashMap; +import java.util.Map; + +class ReconnectionPolicyInfoFinder { + ReconnectionPolicyInfo getReconnectionPolicyInfo( + ReconnectionPolicy reconnectionPolicy, DriverExecutionProfile executionProfile) { + Class reconnectionPolicyClass = reconnectionPolicy.getClass(); + String type = reconnectionPolicyClass.getSimpleName(); + String namespace = PackageUtil.getNamespace(reconnectionPolicyClass); + Map options = new HashMap<>(); + if (reconnectionPolicy instanceof ConstantReconnectionPolicy) { + options.put( + "delayMs", + executionProfile.getDuration(DefaultDriverOption.RECONNECTION_BASE_DELAY).toMillis()); + } else if (reconnectionPolicy instanceof ExponentialReconnectionPolicy) { + ExponentialReconnectionPolicy exponentialReconnectionPolicy = + (ExponentialReconnectionPolicy) reconnectionPolicy; + options.put("maxDelayMs", exponentialReconnectionPolicy.getMaxDelayMs()); + options.put("baseDelayMs", exponentialReconnectionPolicy.getBaseDelayMs()); + options.put("maxAttempts", exponentialReconnectionPolicy.getMaxAttempts()); + } + return new ReconnectionPolicyInfo(type, options, namespace); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/configuration/InsightsConfiguration.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/configuration/InsightsConfiguration.java new file mode 100644 index 00000000000..d130510e09d --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/configuration/InsightsConfiguration.java @@ -0,0 +1,34 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.insights.configuration; + +import io.netty.util.concurrent.EventExecutor; + +public class InsightsConfiguration { + private final boolean monitorReportingEnabled; + private final long statusEventDelayMillis; + private final EventExecutor executor; + + public InsightsConfiguration( + boolean monitorReportingEnabled, long statusEventDelayMillis, EventExecutor executor) { + this.monitorReportingEnabled = monitorReportingEnabled; + this.statusEventDelayMillis = statusEventDelayMillis; + this.executor = executor; + } + + public boolean isMonitorReportingEnabled() { + return monitorReportingEnabled; + } + + public long getStatusEventDelayMillis() { + return statusEventDelayMillis; + } + + public EventExecutor getExecutor() { + return executor; + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/exceptions/InsightEventFormatException.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/exceptions/InsightEventFormatException.java new file mode 100644 index 00000000000..bfb6a28b441 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/exceptions/InsightEventFormatException.java @@ -0,0 +1,14 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.insights.exceptions; + +public class InsightEventFormatException extends RuntimeException { + + public InsightEventFormatException(String message, Throwable cause) { + super(message, cause); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/AuthProviderType.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/AuthProviderType.java new file mode 100644 index 00000000000..93fc2f70ca8 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/AuthProviderType.java @@ -0,0 +1,56 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.insights.schema; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +public class AuthProviderType { + @JsonProperty("type") + private final String type; + + @JsonProperty("namespace") + private final String namespace; + + @JsonCreator + public AuthProviderType( + @JsonProperty("type") String type, @JsonProperty("namespace") String namespace) { + this.type = type; + this.namespace = namespace; + } + + public String getType() { + return type; + } + + public String getNamespace() { + return namespace; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (!(o instanceof AuthProviderType)) { + return false; + } + AuthProviderType that = (AuthProviderType) o; + return Objects.equals(type, that.type) && Objects.equals(namespace, that.namespace); + } + + @Override + public int hashCode() { + return Objects.hash(type, namespace); + } + + @Override + public String toString() { + return "AuthProviderType{" + "type='" + type + '\'' + ", namespace='" + namespace + '\'' + '}'; + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/Insight.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/Insight.java new file mode 100644 index 00000000000..b4d79e16e2b --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/Insight.java @@ -0,0 +1,41 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.insights.schema; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; + +@JsonIgnoreProperties(ignoreUnknown = true) +@JsonInclude(JsonInclude.Include.NON_EMPTY) +public class Insight { + @JsonProperty("metadata") + private final InsightMetadata metadata; + + @JsonProperty("data") + private final T insightData; + + @JsonCreator + public Insight(@JsonProperty("metadata") InsightMetadata metadata, @JsonProperty("data") T data) { + this.metadata = metadata; + this.insightData = data; + } + + public InsightMetadata getMetadata() { + return metadata; + } + + public T getInsightData() { + return insightData; + } + + @Override + public String toString() { + return "Insight{" + "metadata=" + metadata + ", insightData=" + insightData + '}'; + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/InsightMetadata.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/InsightMetadata.java new file mode 100644 index 00000000000..ca02eee3ef5 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/InsightMetadata.java @@ -0,0 +1,107 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.insights.schema; + +import com.datastax.oss.driver.shaded.guava.common.base.Preconditions; +import com.datastax.oss.driver.shaded.guava.common.base.Strings; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Map; +import java.util.Objects; + +public class InsightMetadata { + @JsonProperty("name") + private final String name; + + @JsonProperty("timestamp") + private final long timestamp; + + @JsonProperty("tags") + private final Map tags; + + @JsonProperty("insightType") + private final InsightType insightType; + + @JsonProperty("insightMappingId") + @JsonInclude(JsonInclude.Include.NON_NULL) + private String insightMappingId; + + @JsonCreator + public InsightMetadata( + @JsonProperty("name") String name, + @JsonProperty("timestamp") long timestamp, + @JsonProperty("tags") Map tags, + @JsonProperty("insightType") InsightType insightType, + @JsonProperty("insightMappingId") String insightMappingId) { + Preconditions.checkArgument(!Strings.isNullOrEmpty(name), "name is required"); + + this.name = name; + this.timestamp = timestamp; + this.tags = tags; + this.insightType = insightType; + this.insightMappingId = insightMappingId; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (!(o instanceof InsightMetadata)) { + return false; + } + InsightMetadata that = (InsightMetadata) o; + return Objects.equals(name, that.name) + && Objects.equals(timestamp, that.timestamp) + && Objects.equals(tags, that.tags) + && insightType == that.insightType + && Objects.equals(insightMappingId, that.insightMappingId); + } + + @Override + public int hashCode() { + return Objects.hash(name, timestamp, tags, insightType, insightMappingId); + } + + @Override + public String toString() { + return "InsightMetadata{" + + "name='" + + name + + '\'' + + ", timestamp=" + + timestamp + + ", tags=" + + tags + + ", insightType=" + + insightType + + ", insightMappingId=" + + insightMappingId + + '}'; + } + + public String getName() { + return name; + } + + public long getTimestamp() { + return timestamp; + } + + public Map getTags() { + return tags; + } + + public InsightType getInsightType() { + return insightType; + } + + public String getInsightMappingId() { + return insightMappingId; + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/InsightType.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/InsightType.java new file mode 100644 index 00000000000..a6ec490491f --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/InsightType.java @@ -0,0 +1,17 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.insights.schema; + +public enum InsightType { + EVENT, + GAUGE, + COUNTER, + HISTOGRAM, + TIMER, + METER, + LOG; +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/InsightsPlatformInfo.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/InsightsPlatformInfo.java new file mode 100644 index 00000000000..ab217796fed --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/InsightsPlatformInfo.java @@ -0,0 +1,225 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.insights.schema; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Map; +import java.util.Objects; + +public class InsightsPlatformInfo { + @JsonProperty("os") + private final OS os; + + @JsonProperty("cpus") + private CPUS cpus; + + /** + * All dependencies in a map format grouped by the module: {"core" : {"com.datastax.driver:core": + * {"runtimeVersion:" : "1.0.0", "compileVersion": "1.0.1"},...}}, "extras"" {...} + */ + @JsonProperty("runtime") + private Map> runtime; + + @JsonCreator + public InsightsPlatformInfo( + @JsonProperty("os") OS os, + @JsonProperty("cpus") CPUS cpus, + @JsonProperty("runtime") Map> runtime) { + this.os = os; + this.cpus = cpus; + this.runtime = runtime; + } + + public OS getOs() { + return os; + } + + public CPUS getCpus() { + return cpus; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (!(o instanceof InsightsPlatformInfo)) { + return false; + } + InsightsPlatformInfo that = (InsightsPlatformInfo) o; + return Objects.equals(os, that.os) + && Objects.equals(cpus, that.cpus) + && Objects.equals(runtime, that.runtime); + } + + @Override + public int hashCode() { + return Objects.hash(os, cpus, runtime); + } + + Map> getRuntime() { + return runtime; + } + + public static class OS { + @JsonProperty("name") + private final String name; + + @JsonProperty("version") + private final String version; + + @JsonProperty("arch") + private final String arch; + + @JsonCreator + public OS( + @JsonProperty("name") String name, + @JsonProperty("version") String version, + @JsonProperty("arch") String arch) { + this.name = name; + this.version = version; + this.arch = arch; + } + + public String getName() { + return name; + } + + public String getVersion() { + return version; + } + + public String getArch() { + return arch; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (!(o instanceof OS)) { + return false; + } + OS os = (OS) o; + return Objects.equals(name, os.name) + && Objects.equals(version, os.version) + && Objects.equals(arch, os.arch); + } + + @Override + public int hashCode() { + return Objects.hash(name, version, arch); + } + } + + public static class CPUS { + @JsonProperty("length") + private final int length; + + @JsonProperty("model") + private final String model; + + @JsonCreator + public CPUS(@JsonProperty("length") int length, @JsonProperty("model") String model) { + this.length = length; + this.model = model; + } + + public int getLength() { + return length; + } + + public String getModel() { + return model; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (!(o instanceof CPUS)) { + return false; + } + CPUS cpus = (CPUS) o; + return length == cpus.length && Objects.equals(model, cpus.model); + } + + @Override + public int hashCode() { + return Objects.hash(length, model); + } + } + + public static class RuntimeAndCompileTimeVersions { + @JsonProperty("runtimeVersion") + private final String runtimeVersion; + + @JsonProperty("compileVersion") + private final String compileVersion; + + @JsonProperty("optional") + private final boolean optional; + + @JsonCreator + public RuntimeAndCompileTimeVersions( + @JsonProperty("runtimeVersion") String runtimeVersion, + @JsonProperty("compileVersion") String compileVersion, + @JsonProperty("optional") boolean optional) { + this.runtimeVersion = runtimeVersion; + this.compileVersion = compileVersion; + this.optional = optional; + } + + public String getRuntimeVersion() { + return runtimeVersion; + } + + public String getCompileVersion() { + return compileVersion; + } + + public boolean isOptional() { + return optional; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (!(o instanceof RuntimeAndCompileTimeVersions)) { + return false; + } + RuntimeAndCompileTimeVersions that = (RuntimeAndCompileTimeVersions) o; + return optional == that.optional + && Objects.equals(runtimeVersion, that.runtimeVersion) + && Objects.equals(compileVersion, that.compileVersion); + } + + @Override + public int hashCode() { + return Objects.hash(runtimeVersion, compileVersion, optional); + } + + @Override + public String toString() { + return "RuntimeAndCompileTimeVersions{" + + "runtimeVersion='" + + runtimeVersion + + '\'' + + ", compileVersion='" + + compileVersion + + '\'' + + ", optional=" + + optional + + '}'; + } + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/InsightsStartupData.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/InsightsStartupData.java new file mode 100644 index 00000000000..e9b8d72a57d --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/InsightsStartupData.java @@ -0,0 +1,414 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.insights.schema; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.List; +import java.util.Map; +import java.util.Set; + +public class InsightsStartupData { + @JsonProperty("clientId") + private final String clientId; + + @JsonProperty("sessionId") + private final String sessionId; + + @JsonProperty("applicationName") + private final String applicationName; + + @JsonProperty("applicationVersion") + private final String applicationVersion; + + @JsonProperty("contactPoints") + private final Map> contactPoints; + + @JsonProperty("initialControlConnection") + private final String initialControlConnection; + + @JsonProperty("protocolVersion") + private final int protocolVersion; + + @JsonProperty("localAddress") + private final String localAddress; + + @JsonProperty("executionProfiles") + private final Map executionProfiles; + + @JsonProperty("poolSizeByHostDistance") + private final PoolSizeByHostDistance poolSizeByHostDistance; + + @JsonProperty("heartbeatInterval") + private final long heartbeatInterval; + + @JsonProperty("compression") + private final String compression; + + @JsonProperty("reconnectionPolicy") + private final ReconnectionPolicyInfo reconnectionPolicy; + + @JsonProperty("ssl") + private final SSL ssl; + + @JsonProperty("authProvider") + private final AuthProviderType authProvider; + + @JsonProperty("otherOptions") + private final Map otherOptions; + + @JsonProperty("configAntiPatterns") + private final Map configAntiPatterns; + + @JsonProperty("periodicStatusInterval") + private final long periodicStatusInterval; + + @JsonProperty("platformInfo") + private final InsightsPlatformInfo platformInfo; + + @JsonProperty("hostName") + private final String hostName; + + @JsonProperty("driverName") + private String driverName; + + @JsonProperty("applicationNameWasGenerated") + private boolean applicationNameWasGenerated; + + @JsonProperty("driverVersion") + private String driverVersion; + + @JsonProperty("dataCenters") + private Set dataCenters; + + @JsonCreator + private InsightsStartupData( + @JsonProperty("clientId") String clientId, + @JsonProperty("sessionId") String sessionId, + @JsonProperty("applicationName") String applicationName, + @JsonProperty("applicationVersion") String applicationVersion, + @JsonProperty("contactPoints") Map> contactPoints, + @JsonProperty("initialControlConnection") String initialControlConnection, + @JsonProperty("protocolVersion") int protocolVersion, + @JsonProperty("localAddress") String localAddress, + @JsonProperty("executionProfiles") Map executionProfiles, + @JsonProperty("poolSizeByHostDistance") PoolSizeByHostDistance poolSizeByHostDistance, + @JsonProperty("heartbeatInterval") long heartbeatInterval, + @JsonProperty("compression") String compression, + @JsonProperty("reconnectionPolicy") ReconnectionPolicyInfo reconnectionPolicy, + @JsonProperty("ssl") SSL ssl, + @JsonProperty("authProvider") AuthProviderType authProvider, + @JsonProperty("otherOptions") Map otherOptions, + @JsonProperty("configAntiPatterns") Map configAntiPatterns, + @JsonProperty("periodicStatusInterval") long periodicStatusInterval, + @JsonProperty("platformInfo") InsightsPlatformInfo platformInfo, + @JsonProperty("hostName") String hostName, + @JsonProperty("driverName") String driverName, + @JsonProperty("applicationNameWasGenerated") boolean applicationNameWasGenerated, + @JsonProperty("driverVersion") String driverVersion, + @JsonProperty("dataCenters") Set dataCenters) { + this.clientId = clientId; + this.sessionId = sessionId; + this.applicationName = applicationName; + this.applicationVersion = applicationVersion; + this.contactPoints = contactPoints; + this.initialControlConnection = initialControlConnection; + this.protocolVersion = protocolVersion; + this.localAddress = localAddress; + this.executionProfiles = executionProfiles; + this.poolSizeByHostDistance = poolSizeByHostDistance; + this.heartbeatInterval = heartbeatInterval; + this.compression = compression; + this.reconnectionPolicy = reconnectionPolicy; + this.ssl = ssl; + this.authProvider = authProvider; + this.otherOptions = otherOptions; + this.configAntiPatterns = configAntiPatterns; + this.periodicStatusInterval = periodicStatusInterval; + this.platformInfo = platformInfo; + this.hostName = hostName; + this.driverName = driverName; + this.applicationNameWasGenerated = applicationNameWasGenerated; + this.driverVersion = driverVersion; + this.dataCenters = dataCenters; + } + + public String getClientId() { + return clientId; + } + + public String getSessionId() { + return sessionId; + } + + public String getApplicationName() { + return applicationName; + } + + public String getApplicationVersion() { + return applicationVersion; + } + + public Map> getContactPoints() { + return contactPoints; + } + + public String getInitialControlConnection() { + return initialControlConnection; + } + + public int getProtocolVersion() { + return protocolVersion; + } + + public String getLocalAddress() { + return localAddress; + } + + public Map getExecutionProfiles() { + return executionProfiles; + } + + public PoolSizeByHostDistance getPoolSizeByHostDistance() { + return poolSizeByHostDistance; + } + + public long getHeartbeatInterval() { + return heartbeatInterval; + } + + public String getCompression() { + return compression; + } + + public ReconnectionPolicyInfo getReconnectionPolicy() { + return reconnectionPolicy; + } + + public SSL getSsl() { + return ssl; + } + + public AuthProviderType getAuthProvider() { + return authProvider; + } + + public Map getOtherOptions() { + return otherOptions; + } + + public Map getConfigAntiPatterns() { + return configAntiPatterns; + } + + public long getPeriodicStatusInterval() { + return periodicStatusInterval; + } + + public InsightsPlatformInfo getPlatformInfo() { + return platformInfo; + } + + public String getHostName() { + return hostName; + } + + public String getDriverName() { + return driverName; + } + + public boolean isApplicationNameWasGenerated() { + return applicationNameWasGenerated; + } + + public String getDriverVersion() { + return driverVersion; + } + + public Set getDataCenters() { + return dataCenters; + } + + public static InsightsStartupData.Builder builder() { + return new InsightsStartupData.Builder(); + } + + public static class Builder { + private String clientId; + private String sessionId; + private String applicationName; + private String applicationVersion; + private Map> contactPoints; + private String initialControlConnection; + private int protocolVersion; + private String localAddress; + private Map executionProfiles; + private PoolSizeByHostDistance poolSizeByHostDistance; + private long heartbeatInterval; + private String compression; + private ReconnectionPolicyInfo reconnectionPolicy; + private SSL ssl; + private AuthProviderType authProvider; + private Map otherOptions; + private Map configAntiPatterns; + private long periodicStatusInterval; + private InsightsPlatformInfo platformInfo; + private String hostName; + private String driverName; + private String driverVersion; + private boolean applicationNameWasGenerated; + private Set dataCenters; + + public InsightsStartupData build() { + return new InsightsStartupData( + clientId, + sessionId, + applicationName, + applicationVersion, + contactPoints, + initialControlConnection, + protocolVersion, + localAddress, + executionProfiles, + poolSizeByHostDistance, + heartbeatInterval, + compression, + reconnectionPolicy, + ssl, + authProvider, + otherOptions, + configAntiPatterns, + periodicStatusInterval, + platformInfo, + hostName, + driverName, + applicationNameWasGenerated, + driverVersion, + dataCenters); + } + + public Builder withClientId(String clientId) { + this.clientId = clientId; + return this; + } + + public Builder withSessionId(String id) { + this.sessionId = id; + return this; + } + + public Builder withApplicationName(String applicationName) { + this.applicationName = applicationName; + return this; + } + + public Builder withApplicationVersion(String applicationVersion) { + this.applicationVersion = applicationVersion; + return this; + } + + public Builder withContactPoints(Map> contactPoints) { + this.contactPoints = contactPoints; + return this; + } + + public Builder withInitialControlConnection(String inetSocketAddress) { + this.initialControlConnection = inetSocketAddress; + return this; + } + + public Builder withProtocolVersion(int protocolVersion) { + this.protocolVersion = protocolVersion; + return this; + } + + public Builder withLocalAddress(String localAddress) { + this.localAddress = localAddress; + return this; + } + + public Builder withExecutionProfiles(Map executionProfiles) { + this.executionProfiles = executionProfiles; + return this; + } + + public Builder withPoolSizeByHostDistance(PoolSizeByHostDistance poolSizeByHostDistance) { + this.poolSizeByHostDistance = poolSizeByHostDistance; + return this; + } + + public Builder withHeartbeatInterval(long heartbeatInterval) { + this.heartbeatInterval = heartbeatInterval; + return this; + } + + public Builder withCompression(String compression) { + this.compression = compression; + return this; + } + + public Builder withReconnectionPolicy(ReconnectionPolicyInfo reconnectionPolicy) { + this.reconnectionPolicy = reconnectionPolicy; + return this; + } + + public Builder withSsl(SSL ssl) { + this.ssl = ssl; + return this; + } + + public Builder withAuthProvider(AuthProviderType authProvider) { + this.authProvider = authProvider; + return this; + } + + public Builder withOtherOptions(Map otherOptions) { + this.otherOptions = otherOptions; + return this; + } + + public Builder withConfigAntiPatterns(Map configAntiPatterns) { + this.configAntiPatterns = configAntiPatterns; + return this; + } + + public Builder withPeriodicStatusInterval(long periodicStatusInterval) { + this.periodicStatusInterval = periodicStatusInterval; + return this; + } + + public Builder withPlatformInfo(InsightsPlatformInfo insightsPlatformInfo) { + this.platformInfo = insightsPlatformInfo; + return this; + } + + public Builder withHostName(String hostName) { + this.hostName = hostName; + return this; + } + + public Builder withDriverName(String driverName) { + this.driverName = driverName; + return this; + } + + public Builder withDriverVersion(String driverVersion) { + this.driverVersion = driverVersion; + return this; + } + + public Builder withApplicationNameWasGenerated(boolean applicationNameWasGenerated) { + this.applicationNameWasGenerated = applicationNameWasGenerated; + return this; + } + + public Builder withDataCenters(Set dataCenters) { + this.dataCenters = dataCenters; + return this; + } + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/InsightsStatusData.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/InsightsStatusData.java new file mode 100644 index 00000000000..789baea6a3d --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/InsightsStatusData.java @@ -0,0 +1,125 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.insights.schema; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Map; +import java.util.Objects; + +public class InsightsStatusData { + @JsonProperty("clientId") + private final String clientId; + + @JsonProperty("sessionId") + private final String sessionId; + + @JsonProperty("controlConnection") + private final String controlConnection; + + @JsonProperty("connectedNodes") + private final Map connectedNodes; + + @JsonCreator + private InsightsStatusData( + @JsonProperty("clientId") String clientId, + @JsonProperty("sessionId") String sessionId, + @JsonProperty("controlConnection") String controlConnection, + @JsonProperty("connectedNodes") Map connectedNodes) { + this.clientId = clientId; + this.sessionId = sessionId; + this.controlConnection = controlConnection; + this.connectedNodes = connectedNodes; + } + + public String getClientId() { + return clientId; + } + + public String getSessionId() { + return sessionId; + } + + public String getControlConnection() { + return controlConnection; + } + + public Map getConnectedNodes() { + return connectedNodes; + } + + @Override + public String toString() { + return "InsightsStatusData{" + + "clientId='" + + clientId + + '\'' + + ", sessionId='" + + sessionId + + '\'' + + ", controlConnection=" + + controlConnection + + ", connectedNodes=" + + connectedNodes + + '}'; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (!(o instanceof InsightsStatusData)) { + return false; + } + InsightsStatusData that = (InsightsStatusData) o; + return Objects.equals(clientId, that.clientId) + && Objects.equals(sessionId, that.sessionId) + && Objects.equals(controlConnection, that.controlConnection) + && Objects.equals(connectedNodes, that.connectedNodes); + } + + @Override + public int hashCode() { + return Objects.hash(clientId, sessionId, controlConnection, connectedNodes); + } + + public static InsightsStatusData.Builder builder() { + return new InsightsStatusData.Builder(); + } + + public static class Builder { + private String clientId; + private String sessionId; + private String controlConnection; + private Map connectedNodes; + + public Builder withClientId(String clientId) { + this.clientId = clientId; + return this; + } + + public Builder withSessionId(String id) { + this.sessionId = id; + return this; + } + + public Builder withControlConnection(String controlConnection) { + this.controlConnection = controlConnection; + return this; + } + + public Builder withConnectedNodes(Map connectedNodes) { + this.connectedNodes = connectedNodes; + return this; + } + + public InsightsStatusData build() { + return new InsightsStatusData(clientId, sessionId, controlConnection, connectedNodes); + } + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/LoadBalancingInfo.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/LoadBalancingInfo.java new file mode 100644 index 00000000000..ec05f3094e5 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/LoadBalancingInfo.java @@ -0,0 +1,78 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.insights.schema; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Map; +import java.util.Objects; + +public class LoadBalancingInfo { + @JsonProperty("type") + private final String type; + + @JsonProperty("options") + private final Map options; + + @JsonProperty("namespace") + private final String namespace; + + @JsonCreator + public LoadBalancingInfo( + @JsonProperty("type") String type, + @JsonProperty("options") Map options, + @JsonProperty("namespace") String namespace) { + this.type = type; + this.options = options; + this.namespace = namespace; + } + + public String getType() { + return type; + } + + public Map getOptions() { + return options; + } + + public String getNamespace() { + return namespace; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (!(o instanceof LoadBalancingInfo)) { + return false; + } + LoadBalancingInfo that = (LoadBalancingInfo) o; + return Objects.equals(type, that.type) + && Objects.equals(options, that.options) + && Objects.equals(namespace, that.namespace); + } + + @Override + public int hashCode() { + return Objects.hash(type, options, namespace); + } + + @Override + public String toString() { + return "LoadBalancingInfo{" + + "type='" + + type + + '\'' + + ", options=" + + options + + ", namespace='" + + namespace + + '\'' + + '}'; + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/PoolSizeByHostDistance.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/PoolSizeByHostDistance.java new file mode 100644 index 00000000000..ed99ad17b53 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/PoolSizeByHostDistance.java @@ -0,0 +1,74 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.insights.schema; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +public class PoolSizeByHostDistance { + @JsonProperty("local") + private final int local; + + @JsonProperty("remote") + private final int remote; + + @JsonProperty("ignored") + private final int ignored; + + @JsonCreator + public PoolSizeByHostDistance( + @JsonProperty("local") int local, + @JsonProperty("remote") int remote, + @JsonProperty("ignored") int ignored) { + + this.local = local; + this.remote = remote; + this.ignored = ignored; + } + + public int getLocal() { + return local; + } + + public int getRemote() { + return remote; + } + + public int getIgnored() { + return ignored; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (!(o instanceof PoolSizeByHostDistance)) { + return false; + } + PoolSizeByHostDistance that = (PoolSizeByHostDistance) o; + return local == that.local && remote == that.remote && ignored == that.ignored; + } + + @Override + public int hashCode() { + return Objects.hash(local, remote, ignored); + } + + @Override + public String toString() { + return "PoolSizeByHostDistance{" + + "local=" + + local + + ", remote=" + + remote + + ", ignored=" + + ignored + + '}'; + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/ReconnectionPolicyInfo.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/ReconnectionPolicyInfo.java new file mode 100644 index 00000000000..428f88ac6b4 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/ReconnectionPolicyInfo.java @@ -0,0 +1,79 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.insights.schema; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Map; +import java.util.Objects; + +public class ReconnectionPolicyInfo { + @JsonProperty("type") + private final String type; + + @JsonProperty("options") + private final Map options; + + @JsonProperty("namespace") + private final String namespace; + + @JsonCreator + public ReconnectionPolicyInfo( + @JsonProperty("type") String type, + @JsonProperty("options") Map options, + @JsonProperty("namespace") String namespace) { + + this.type = type; + this.options = options; + this.namespace = namespace; + } + + public String getType() { + return type; + } + + public Map getOptions() { + return options; + } + + public String getNamespace() { + return namespace; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (!(o instanceof ReconnectionPolicyInfo)) { + return false; + } + ReconnectionPolicyInfo that = (ReconnectionPolicyInfo) o; + return Objects.equals(type, that.type) + && Objects.equals(options, that.options) + && Objects.equals(namespace, that.namespace); + } + + @Override + public int hashCode() { + return Objects.hash(type, options, namespace); + } + + @Override + public String toString() { + return "ReconnectionPolicyInfo{" + + "type='" + + type + + '\'' + + ", options=" + + options + + ", namespace='" + + namespace + + '\'' + + '}'; + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/SSL.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/SSL.java new file mode 100644 index 00000000000..6bca417f138 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/SSL.java @@ -0,0 +1,52 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.insights.schema; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +public class SSL { + @JsonProperty("enabled") + private final boolean enabled; + + @JsonProperty("certValidation") + private final boolean certValidation; + + @JsonCreator + public SSL( + @JsonProperty("enabled") boolean enabled, + @JsonProperty("certValidation") boolean certValidation) { + this.enabled = enabled; + this.certValidation = certValidation; + } + + public boolean isEnabled() { + return enabled; + } + + public boolean isCertValidation() { + return certValidation; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (!(o instanceof SSL)) { + return false; + } + SSL that = (SSL) o; + return enabled == that.enabled && certValidation == that.certValidation; + } + + @Override + public int hashCode() { + return Objects.hash(enabled, certValidation); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/SessionStateForNode.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/SessionStateForNode.java new file mode 100644 index 00000000000..5fe7f82e7a6 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/SessionStateForNode.java @@ -0,0 +1,63 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.insights.schema; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; + +public class SessionStateForNode { + @JsonProperty("connections") + private final Integer connections; + + @JsonProperty("inFlightQueries") + private final Integer inFlightQueries; + + @JsonCreator + public SessionStateForNode( + @JsonProperty("connections") Integer connections, + @JsonProperty("inFlightQueries") Integer inFlightQueries) { + this.connections = connections; + this.inFlightQueries = inFlightQueries; + } + + public Integer getConnections() { + return connections; + } + + public Integer getInFlightQueries() { + return inFlightQueries; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (!(o instanceof SessionStateForNode)) { + return false; + } + SessionStateForNode that = (SessionStateForNode) o; + return Objects.equals(connections, that.connections) + && Objects.equals(inFlightQueries, that.inFlightQueries); + } + + @Override + public int hashCode() { + return Objects.hash(connections, inFlightQueries); + } + + @Override + public String toString() { + return "SessionStateForNode{" + + "connections=" + + connections + + ", inFlightQueries=" + + inFlightQueries + + '}'; + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/SpecificExecutionProfile.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/SpecificExecutionProfile.java new file mode 100644 index 00000000000..a911f1016ca --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/SpecificExecutionProfile.java @@ -0,0 +1,122 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.insights.schema; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Map; +import java.util.Objects; + +@JsonInclude(JsonInclude.Include.NON_NULL) +public class SpecificExecutionProfile { + @JsonProperty("readTimeout") + private final Integer readTimeout; + + @JsonProperty("loadBalancing") + private final LoadBalancingInfo loadBalancing; + + @JsonProperty("speculativeExecution") + private SpeculativeExecutionInfo speculativeExecution; + + @JsonProperty("consistency") + private final String consistency; + + @JsonProperty("serialConsistency") + private final String serialConsistency; + + @JsonProperty("graphOptions") + private Map graphOptions; + + @JsonCreator + public SpecificExecutionProfile( + @JsonProperty("readTimeout") Integer readTimeoutMillis, + @JsonProperty("loadBalancing") LoadBalancingInfo loadBalancing, + @JsonProperty("speculativeExecution") SpeculativeExecutionInfo speculativeExecutionInfo, + @JsonProperty("consistency") String consistency, + @JsonProperty("serialConsistency") String serialConsistency, + @JsonProperty("graphOptions") Map graphOptions) { + readTimeout = readTimeoutMillis; + this.loadBalancing = loadBalancing; + this.speculativeExecution = speculativeExecutionInfo; + this.consistency = consistency; + this.serialConsistency = serialConsistency; + this.graphOptions = graphOptions; + } + + public Integer getReadTimeout() { + return readTimeout; + } + + public LoadBalancingInfo getLoadBalancing() { + return loadBalancing; + } + + public SpeculativeExecutionInfo getSpeculativeExecution() { + return speculativeExecution; + } + + public String getConsistency() { + return consistency; + } + + public String getSerialConsistency() { + return serialConsistency; + } + + public Map getGraphOptions() { + return graphOptions; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (!(o instanceof SpecificExecutionProfile)) { + return false; + } + SpecificExecutionProfile that = (SpecificExecutionProfile) o; + return Objects.equals(readTimeout, that.readTimeout) + && Objects.equals(loadBalancing, that.loadBalancing) + && Objects.equals(speculativeExecution, that.speculativeExecution) + && Objects.equals(consistency, that.consistency) + && Objects.equals(serialConsistency, that.serialConsistency) + && Objects.equals(graphOptions, that.graphOptions); + } + + @Override + public int hashCode() { + return Objects.hash( + readTimeout, + loadBalancing, + speculativeExecution, + consistency, + serialConsistency, + graphOptions); + } + + @Override + public String toString() { + return "SpecificExecutionProfile{" + + "readTimeout=" + + readTimeout + + ", loadBalancing=" + + loadBalancing + + ", speculativeExecution=" + + speculativeExecution + + ", consistency='" + + consistency + + '\'' + + ", serialConsistency='" + + serialConsistency + + '\'' + + ", graphOptions=" + + graphOptions + + '}'; + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/SpeculativeExecutionInfo.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/SpeculativeExecutionInfo.java new file mode 100644 index 00000000000..39a4643d041 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/SpeculativeExecutionInfo.java @@ -0,0 +1,78 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.insights.schema; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Map; +import java.util.Objects; + +public class SpeculativeExecutionInfo { + @JsonProperty("type") + private final String type; + + @JsonProperty("options") + private final Map options; + + @JsonProperty("namespace") + private String namespace; + + @JsonCreator + public SpeculativeExecutionInfo( + @JsonProperty("type") String type, + @JsonProperty("options") Map options, + @JsonProperty("namespace") String namespace) { + this.type = type; + this.options = options; + this.namespace = namespace; + } + + public String getType() { + return type; + } + + public Map getOptions() { + return options; + } + + public String getNamespace() { + return namespace; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (!(o instanceof SpeculativeExecutionInfo)) { + return false; + } + SpeculativeExecutionInfo that = (SpeculativeExecutionInfo) o; + return Objects.equals(type, that.type) + && Objects.equals(options, that.options) + && Objects.equals(namespace, that.namespace); + } + + @Override + public int hashCode() { + return Objects.hash(type, options, namespace); + } + + @Override + public String toString() { + return "SpeculativeExecutionInfo{" + + "type='" + + type + + '\'' + + ", options=" + + options + + ", namespace='" + + namespace + + '\'' + + '}'; + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicy.java b/core/src/main/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicy.java new file mode 100644 index 00000000000..5f6edbc693c --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicy.java @@ -0,0 +1,497 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.loadbalancing; + +import static java.util.concurrent.TimeUnit.MILLISECONDS; +import static java.util.concurrent.TimeUnit.MINUTES; + +import com.datastax.dse.driver.internal.core.tracker.MultiplexingRequestTracker; +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.context.DriverContext; +import com.datastax.oss.driver.api.core.loadbalancing.LoadBalancingPolicy; +import com.datastax.oss.driver.api.core.loadbalancing.NodeDistance; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.metadata.NodeState; +import com.datastax.oss.driver.api.core.metadata.TokenMap; +import com.datastax.oss.driver.api.core.metadata.token.Token; +import com.datastax.oss.driver.api.core.session.Request; +import com.datastax.oss.driver.api.core.session.Session; +import com.datastax.oss.driver.api.core.tracker.RequestTracker; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.metadata.DefaultNode; +import com.datastax.oss.driver.internal.core.metadata.MetadataManager; +import com.datastax.oss.driver.internal.core.pool.ChannelPool; +import com.datastax.oss.driver.internal.core.session.DefaultSession; +import com.datastax.oss.driver.internal.core.util.ArrayUtils; +import com.datastax.oss.driver.internal.core.util.Reflection; +import com.datastax.oss.driver.internal.core.util.collection.QueryPlan; +import com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.nio.ByteBuffer; +import java.util.BitSet; +import java.util.Collections; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; +import java.util.Queue; +import java.util.Set; +import java.util.UUID; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.CopyOnWriteArraySet; +import java.util.concurrent.ThreadLocalRandom; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicLongArray; +import java.util.function.IntUnaryOperator; +import java.util.function.Predicate; +import net.jcip.annotations.ThreadSafe; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * The DSE load balancing policy implementation. + * + *

      To activate this policy, modify the {@code basic.load-balancing-policy} section in the DSE + * driver configuration, for example: + * + *

      + * datastax-java-driver {
      + *   basic.load-balancing-policy {
      + *     class = com.datastax.dse.driver.internal.core.loadbalancing.DseLoadBalancingPolicy
      + *     local-datacenter = datacenter1
      + *   }
      + * }
      + * 
      + * + * See {@code reference.conf} (in the manual or OSS driver JAR) and {@code dse-reference.conf} (in + * the manual or DSE driver JAR) for more details. + */ +@ThreadSafe +public class DseLoadBalancingPolicy implements LoadBalancingPolicy, RequestTracker { + + private static final Logger LOG = LoggerFactory.getLogger(DseLoadBalancingPolicy.class); + + private static final Predicate INCLUDE_ALL_NODES = n -> true; + private static final IntUnaryOperator INCREMENT = i -> (i == Integer.MAX_VALUE) ? 0 : i + 1; + + private static final long NEWLY_UP_INTERVAL_NANOS = MINUTES.toNanos(1); + private static final int MAX_IN_FLIGHT_THRESHOLD = 10; + private static final long RESPONSE_COUNT_RESET_INTERVAL_NANOS = MILLISECONDS.toNanos(200); + + @NonNull private final String logPrefix; + @NonNull private final MetadataManager metadataManager; + @NonNull private final Predicate filter; + private final boolean isDefaultPolicy; + + @Nullable @VisibleForTesting volatile String localDc; + @NonNull private volatile DistanceReporter distanceReporter = (node, distance) -> {}; + + private final AtomicInteger roundRobinAmount = new AtomicInteger(); + @VisibleForTesting final CopyOnWriteArraySet localDcLiveNodes = new CopyOnWriteArraySet<>(); + @VisibleForTesting final Map responseTimes = new ConcurrentHashMap<>(); + @VisibleForTesting final Map upTimes = new ConcurrentHashMap<>(); + + public DseLoadBalancingPolicy(@NonNull DriverContext context, @NonNull String profileName) { + this.logPrefix = context.getSessionName() + "|" + profileName; + this.metadataManager = ((InternalDriverContext) context).getMetadataManager(); + this.isDefaultPolicy = profileName.equals(DriverExecutionProfile.DEFAULT_NAME); + this.localDc = getLocalDcFromConfig((InternalDriverContext) context, profileName); + Predicate filterFromConfig = getFilterFromConfig(context, profileName); + this.filter = + node -> { + String localDc = this.localDc; + if (localDc != null && !localDc.equals(node.getDatacenter())) { + LOG.debug( + "[{}] Ignoring {} because it doesn't belong to the local DC {}", + logPrefix, + node, + localDc); + return false; + } else if (!filterFromConfig.test(node)) { + LOG.debug( + "[{}] Ignoring {} because it doesn't match the user-provided predicate", + logPrefix, + node); + return false; + } else { + return true; + } + }; + ((MultiplexingRequestTracker) context.getRequestTracker()).register(this); + } + + @Override + public void init(@NonNull Map nodes, @NonNull DistanceReporter distanceReporter) { + this.distanceReporter = distanceReporter; + + Set contactPoints = metadataManager.getContactPoints(); + if (localDc == null) { + if (metadataManager.wasImplicitContactPoint()) { + // No explicit contact points provided => the driver used the default (127.0.0.1:9042), and + // we allow inferring the local DC in this case + assert contactPoints.size() == 1; + Node contactPoint = contactPoints.iterator().next(); + localDc = contactPoint.getDatacenter(); + LOG.debug("[{}] Local DC set from contact point {}: {}", logPrefix, contactPoint, localDc); + } else { + throw new IllegalStateException( + "You provided explicit contact points, the local DC must be specified (see " + + DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER.getPath() + + " in the config)"); + } + } else { + ImmutableMap.Builder builder = ImmutableMap.builder(); + for (Node node : contactPoints) { + String datacenter = node.getDatacenter(); + if (!Objects.equals(localDc, datacenter)) { + builder.put(node, (datacenter == null) ? "" : datacenter); + } + } + ImmutableMap badContactPoints = builder.build(); + if (isDefaultPolicy && !badContactPoints.isEmpty()) { + LOG.warn( + "[{}] You specified {} as the local DC, but some contact points are from a different DC ({})", + logPrefix, + localDc, + badContactPoints); + } + } + + for (Node node : nodes.values()) { + if (filter.test(node)) { + distanceReporter.setDistance(node, NodeDistance.LOCAL); + if (node.getState() != NodeState.DOWN) { + // This includes state == UNKNOWN. If the node turns out to be unreachable, this will be + // detected when we try to open a pool to it, it will get marked down and this will be + // signaled back to this policy + localDcLiveNodes.add(node); + } + } else { + distanceReporter.setDistance(node, NodeDistance.IGNORED); + } + } + } + + @NonNull + @Override + public Queue newQueryPlan(@Nullable Request request, @Nullable Session session) { + // Take a snapshot since the set is concurrent: + Object[] currentNodes = localDcLiveNodes.toArray(); + + Set allReplicas = getReplicas(request, session); + int replicaCount = 0; // in currentNodes + + if (!allReplicas.isEmpty()) { + + // Move replicas to the beginning of the plan + for (int i = 0; i < currentNodes.length; i++) { + Node node = (Node) currentNodes[i]; + if (allReplicas.contains(node)) { + ArrayUtils.bubbleUp(currentNodes, i, replicaCount); + replicaCount++; + } + } + + if (replicaCount > 1) { + + shuffleHead(currentNodes, replicaCount); + + if (replicaCount > 2) { + + assert session != null; + + // Test replicas health + Node newestUpReplica = null; + BitSet unhealthyReplicas = null; // bit mask storing indices of unhealthy replicas + long mostRecentUpTimeNanos = -1; + long now = nanoTime(); + for (int i = 0; i < replicaCount; i++) { + Node node = (Node) currentNodes[i]; + Long upTimeNanos = upTimes.get(node); + if (upTimeNanos != null + && now - upTimeNanos - NEWLY_UP_INTERVAL_NANOS < 0 + && upTimeNanos - mostRecentUpTimeNanos > 0) { + newestUpReplica = node; + mostRecentUpTimeNanos = upTimeNanos; + } + if (newestUpReplica == null && isUnhealthy(node, session, now)) { + if (unhealthyReplicas == null) { + unhealthyReplicas = new BitSet(replicaCount); + } + unhealthyReplicas.set(i); + } + } + + // When: + // - there isn't any newly UP replica and + // - there is one or more unhealthy replicas and + // - there is a majority of healthy replicas + int unhealthyReplicasCount = + unhealthyReplicas == null ? 0 : unhealthyReplicas.cardinality(); + if (newestUpReplica == null + && unhealthyReplicasCount > 0 + && unhealthyReplicasCount < (replicaCount / 2.0)) { + + // Reorder the unhealthy replicas to the back of the list + // Start from the back of the replicas, then move backwards; + // stop once all unhealthy replicas are moved to the back. + int counter = 0; + for (int i = replicaCount - 1; i >= 0 && counter < unhealthyReplicasCount; i--) { + if (unhealthyReplicas.get(i)) { + ArrayUtils.bubbleDown(currentNodes, i, replicaCount - 1 - counter); + counter++; + } + } + } + + // When: + // - there is a newly UP replica and + // - the replica in first or second position is the most recent replica marked as UP and + // - dice roll 1d4 != 1 + else if ((newestUpReplica == currentNodes[0] || newestUpReplica == currentNodes[1]) + && diceRoll1d4() != 1) { + + // Send it to the back of the replicas + ArrayUtils.bubbleDown( + currentNodes, newestUpReplica == currentNodes[0] ? 0 : 1, replicaCount - 1); + } + + // Reorder the first two replicas in the shuffled list based on the number of + // in-flight requests + if (getInFlight((Node) currentNodes[0], session) + > getInFlight((Node) currentNodes[1], session)) { + ArrayUtils.swap(currentNodes, 0, 1); + } + } + } + } + + LOG.trace("[{}] Prioritizing {} local replicas", logPrefix, replicaCount); + + // Round-robin the remaining nodes + ArrayUtils.rotate( + currentNodes, + replicaCount, + currentNodes.length - replicaCount, + roundRobinAmount.getAndUpdate(INCREMENT)); + + return new QueryPlan(currentNodes); + } + + @Override + public void onAdd(@NonNull Node node) { + if (filter.test(node)) { + LOG.debug("[{}] {} was added, setting distance to LOCAL", logPrefix, node); + // Setting to a non-ignored distance triggers the session to open a pool, which will in turn + // set the node UP when the first channel gets opened. + distanceReporter.setDistance(node, NodeDistance.LOCAL); + } else { + distanceReporter.setDistance(node, NodeDistance.IGNORED); + } + } + + @Override + public void onUp(@NonNull Node node) { + if (filter.test(node)) { + // Normally this is already the case, but the filter could be dynamic and have ignored the + // node previously. + distanceReporter.setDistance(node, NodeDistance.LOCAL); + if (localDcLiveNodes.add(node)) { + upTimes.put(node, nanoTime()); + LOG.debug("[{}] {} came back UP, added to live set", logPrefix, node); + } + } else { + distanceReporter.setDistance(node, NodeDistance.IGNORED); + } + } + + @Override + public void onDown(@NonNull Node node) { + if (localDcLiveNodes.remove(node)) { + upTimes.remove(node); + LOG.debug("[{}] {} went DOWN, removed from live set", logPrefix, node); + } + } + + @Override + public void onRemove(@NonNull Node node) { + if (localDcLiveNodes.remove(node)) { + upTimes.remove(node); + LOG.debug("[{}] {} was removed, removed from live set", logPrefix, node); + } + } + + @Override + public void onNodeSuccess( + @NonNull Request request, + long latencyNanos, + @NonNull DriverExecutionProfile executionProfile, + @NonNull Node node, + @NonNull String logPrefix) { + updateResponseTimes(node); + } + + @Override + public void onNodeError( + @NonNull Request request, + @NonNull Throwable error, + long latencyNanos, + @NonNull DriverExecutionProfile executionProfile, + @NonNull Node node, + @NonNull String logPrefix) { + updateResponseTimes(node); + } + + @Override + public void close() {} + + @VisibleForTesting + void shuffleHead(Object[] array, int n) { + ArrayUtils.shuffleHead(array, n); + } + + @VisibleForTesting + long nanoTime() { + return System.nanoTime(); + } + + @VisibleForTesting + int diceRoll1d4() { + return ThreadLocalRandom.current().nextInt(4); + } + + private Set getReplicas(@Nullable Request request, @Nullable Session session) { + if (request == null || session == null) { + return Collections.emptySet(); + } + + // Note: we're on the hot path and the getXxx methods are potentially more than simple getters, + // so we only call each method when strictly necessary (which is why the code below looks a bit + // weird). + CqlIdentifier keyspace = request.getKeyspace(); + if (keyspace == null) { + keyspace = request.getRoutingKeyspace(); + } + if (keyspace == null && session.getKeyspace().isPresent()) { + keyspace = session.getKeyspace().get(); + } + if (keyspace == null) { + return Collections.emptySet(); + } + + Token token = request.getRoutingToken(); + ByteBuffer key = (token == null) ? request.getRoutingKey() : null; + if (token == null && key == null) { + return Collections.emptySet(); + } + + Optional maybeTokenMap = metadataManager.getMetadata().getTokenMap(); + if (maybeTokenMap.isPresent()) { + TokenMap tokenMap = maybeTokenMap.get(); + return (token != null) + ? tokenMap.getReplicas(keyspace, token) + : tokenMap.getReplicas(keyspace, key); + } else { + return Collections.emptySet(); + } + } + + private boolean isUnhealthy(@NonNull Node node, @NonNull Session session, long now) { + return isBusy(node, session) && isResponseRateInsufficient(node, now); + } + + private boolean isBusy(@NonNull Node node, @NonNull Session session) { + return getInFlight(node, session) >= MAX_IN_FLIGHT_THRESHOLD; + } + + @VisibleForTesting + boolean isResponseRateInsufficient(@NonNull Node node, long now) { + // response rate is considered insufficient when less than 2 responses were obtained in + // the past interval delimited by RESPONSE_COUNT_RESET_INTERVAL_NANOS. + if (responseTimes.containsKey(node)) { + AtomicLongArray array = responseTimes.get(node); + if (array.length() == 2) { + long threshold = now - RESPONSE_COUNT_RESET_INTERVAL_NANOS; + long leastRecent = array.get(0); + return leastRecent - threshold < 0; + } + } + return true; + } + + private void updateResponseTimes(@NonNull Node node) { + responseTimes.compute( + node, + (n, array) -> { + // The array stores at most two timestamps, since we don't need more; + // the first one is always the least recent one, and hence the one to inspect. + long now = nanoTime(); + if (array == null) { + array = new AtomicLongArray(1); + array.set(0, now); + } else if (array.length() == 1) { + long previous = array.get(0); + array = new AtomicLongArray(2); + array.set(0, previous); + array.set(1, now); + } else { + array.set(0, array.get(1)); + array.set(1, now); + } + return array; + }); + } + + private String getLocalDcFromConfig( + @NonNull InternalDriverContext context, @NonNull String profileName) { + // see if the local datacenter has been set programmatically + String localDataCenter = context.getLocalDatacenter(profileName); + if (localDataCenter != null) { + LOG.debug("[{}] Local DC set from builder: {}", logPrefix, localDataCenter); + return localDataCenter; + } else { + // it's not been set programmatically, try to get it from config + DriverExecutionProfile config = context.getConfig().getProfile(profileName); + localDataCenter = config.getString(DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER, null); + if (localDataCenter != null) { + LOG.debug("[{}] Local DC set from configuration: {}", logPrefix, localDataCenter); + } + return localDataCenter; + } + } + + private static int getInFlight(@NonNull Node node, @NonNull Session session) { + // The cast will always succeed because there's no way to replace the internal session impl + ChannelPool pool = ((DefaultSession) session).getPools().get(node); + // Note: getInFlight() includes orphaned ids, which is what we want as we need to account + // for requests that were cancelled or timed out (since the node is likely to still be + // processing them). + return (pool == null) ? 0 : pool.getInFlight(); + } + + private static Predicate getFilterFromConfig( + @NonNull DriverContext context, @NonNull String profileName) { + Predicate filterFromBuilder = + ((InternalDriverContext) context).getNodeFilter(profileName); + if (filterFromBuilder != null) { + return filterFromBuilder; + } else { + @SuppressWarnings("unchecked") + Predicate filter = + Reflection.buildFromConfig( + (InternalDriverContext) context, + profileName, + DefaultDriverOption.LOAD_BALANCING_FILTER_CLASS, + Predicate.class) + .orElse(INCLUDE_ALL_NODES); + return filter; + } + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/DseTopologyMonitor.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/DseTopologyMonitor.java new file mode 100644 index 00000000000..38f7f8046e9 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/DseTopologyMonitor.java @@ -0,0 +1,74 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.metadata; + +import com.datastax.dse.driver.api.core.metadata.DseNodeProperties; +import com.datastax.oss.driver.api.core.Version; +import com.datastax.oss.driver.api.core.metadata.EndPoint; +import com.datastax.oss.driver.internal.core.adminrequest.AdminRow; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.metadata.DefaultNodeInfo; +import com.datastax.oss.driver.internal.core.metadata.DefaultTopologyMonitor; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.net.InetSocketAddress; +import java.util.Set; +import net.jcip.annotations.ThreadSafe; + +@ThreadSafe +public class DseTopologyMonitor extends DefaultTopologyMonitor { + + public DseTopologyMonitor(InternalDriverContext context) { + super(context); + } + + @NonNull + @Override + protected DefaultNodeInfo.Builder nodeInfoBuilder( + @NonNull AdminRow row, + @Nullable InetSocketAddress broadcastRpcAddress, + @NonNull EndPoint localEndPoint) { + + // Fill default fields from standard columns: + DefaultNodeInfo.Builder builder = + super.nodeInfoBuilder(row, broadcastRpcAddress, localEndPoint); + + // Handle DSE-specific columns + String rawVersion = row.getString("dse_version"); + if (rawVersion != null) { + builder.withExtra(DseNodeProperties.DSE_VERSION, Version.parse(rawVersion)); + } + + ImmutableSet.Builder workloadsBuilder = ImmutableSet.builder(); + Boolean legacyGraph = row.getBoolean("graph"); // DSE 5.0 + if (legacyGraph != null && legacyGraph) { + workloadsBuilder.add("Graph"); + } + String legacyWorkload = row.getString("workload"); // DSE 5.0 (other than graph) + if (legacyWorkload != null) { + workloadsBuilder.add(legacyWorkload); + } + Set modernWorkloads = row.getSetOfString("workloads"); // DSE 5.1+ + if (modernWorkloads != null) { + workloadsBuilder.addAll(modernWorkloads); + } + builder.withExtra(DseNodeProperties.DSE_WORKLOADS, workloadsBuilder.build()); + + builder + .withExtra(DseNodeProperties.SERVER_ID, row.getString("server_id")) + .withExtra(DseNodeProperties.NATIVE_TRANSPORT_PORT, row.getInteger("native_transport_port")) + .withExtra( + DseNodeProperties.NATIVE_TRANSPORT_PORT_SSL, + row.getInteger("native_transport_port_ssl")) + .withExtra(DseNodeProperties.STORAGE_PORT, row.getInteger("storage_port")) + .withExtra(DseNodeProperties.STORAGE_PORT_SSL, row.getInteger("storage_port_ssl")) + .withExtra(DseNodeProperties.JMX_PORT, row.getInteger("jmx_port")); + + return builder; + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseAggregateMetadata.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseAggregateMetadata.java new file mode 100644 index 00000000000..884f2c10089 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseAggregateMetadata.java @@ -0,0 +1,99 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.metadata.schema; + +import com.datastax.dse.driver.api.core.metadata.schema.DseAggregateMetadata; +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.metadata.schema.FunctionSignature; +import com.datastax.oss.driver.api.core.type.DataType; +import com.datastax.oss.driver.api.core.type.codec.TypeCodec; +import com.datastax.oss.driver.internal.core.metadata.schema.DefaultAggregateMetadata; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.Objects; +import net.jcip.annotations.Immutable; + +@Immutable +public class DefaultDseAggregateMetadata extends DefaultAggregateMetadata + implements DseAggregateMetadata { + + private final boolean deterministic; + + public DefaultDseAggregateMetadata( + @NonNull CqlIdentifier keyspace, + @NonNull FunctionSignature signature, + @Nullable FunctionSignature finalFuncSignature, + @Nullable Object initCond, + @NonNull DataType returnType, + @NonNull FunctionSignature stateFuncSignature, + @NonNull DataType stateType, + @NonNull TypeCodec stateTypeCodec, + boolean deterministic) { + super( + keyspace, + signature, + finalFuncSignature, + initCond, + returnType, + stateFuncSignature, + stateType, + stateTypeCodec); + this.deterministic = deterministic; + } + + @Override + public boolean isDeterministic() { + return this.deterministic; + } + + @Override + public boolean equals(Object other) { + if (other == this) { + return true; + } else if (other instanceof DseAggregateMetadata) { + DseAggregateMetadata that = (DseAggregateMetadata) other; + return Objects.equals(this.getKeyspace(), that.getKeyspace()) + && Objects.equals(this.getSignature(), that.getSignature()) + && Objects.equals( + this.getFinalFuncSignature().orElse(null), that.getFinalFuncSignature().orElse(null)) + && Objects.equals(this.getInitCond().orElse(null), that.getInitCond().orElse(null)) + && Objects.equals(this.getReturnType(), that.getReturnType()) + && Objects.equals(this.getStateFuncSignature(), that.getStateFuncSignature()) + && Objects.equals(this.getStateType(), that.getStateType()) + && this.deterministic == that.isDeterministic(); + } else { + return false; + } + } + + @Override + public int hashCode() { + return Objects.hash( + getKeyspace(), + getSignature(), + getFinalFuncSignature(), + getInitCond(), + getReturnType(), + getStateFuncSignature(), + getStateType(), + deterministic); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("Aggregate Name: ") + .append(getSignature().getName().asCql(false)) + .append(", Keyspace: ") + .append(getKeyspace().asCql(false)) + .append(", Return Type: ") + .append(getReturnType().asCql(false, false)) + .append(", Deterministic: ") + .append(deterministic); + return sb.toString(); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseColumnMetadata.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseColumnMetadata.java new file mode 100644 index 00000000000..28e9357f376 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseColumnMetadata.java @@ -0,0 +1,27 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.metadata.schema; + +import com.datastax.dse.driver.api.core.metadata.schema.DseColumnMetadata; +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.type.DataType; +import com.datastax.oss.driver.internal.core.metadata.schema.DefaultColumnMetadata; +import edu.umd.cs.findbugs.annotations.NonNull; +import net.jcip.annotations.Immutable; + +@Immutable +public class DefaultDseColumnMetadata extends DefaultColumnMetadata implements DseColumnMetadata { + + public DefaultDseColumnMetadata( + @NonNull CqlIdentifier keyspace, + @NonNull CqlIdentifier parent, + @NonNull CqlIdentifier name, + @NonNull DataType dataType, + boolean isStatic) { + super(keyspace, parent, name, dataType, isStatic); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseFunctionMetadata.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseFunctionMetadata.java new file mode 100644 index 00000000000..d215810f291 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseFunctionMetadata.java @@ -0,0 +1,110 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.metadata.schema; + +import com.datastax.dse.driver.api.core.metadata.schema.DseFunctionMetadata; +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.metadata.schema.FunctionSignature; +import com.datastax.oss.driver.api.core.type.DataType; +import com.datastax.oss.driver.internal.core.metadata.schema.DefaultFunctionMetadata; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.List; +import java.util.Objects; +import net.jcip.annotations.Immutable; + +@Immutable +public class DefaultDseFunctionMetadata extends DefaultFunctionMetadata + implements DseFunctionMetadata { + + private final boolean deterministic; + private final boolean monotonic; + @NonNull private final List monotonicArgumentNames; + + public DefaultDseFunctionMetadata( + @NonNull CqlIdentifier keyspace, + @NonNull FunctionSignature signature, + @NonNull List parameterNames, + @NonNull String body, + boolean calledOnNullInput, + @NonNull String language, + @NonNull DataType returnType, + boolean deterministic, + boolean monotonic, + @NonNull List monotonicArgumentNames) { + super(keyspace, signature, parameterNames, body, calledOnNullInput, language, returnType); + // set DSE extension attributes + this.deterministic = deterministic; + this.monotonic = monotonic; + this.monotonicArgumentNames = monotonicArgumentNames; + } + + @Override + public boolean isDeterministic() { + return this.deterministic; + } + + @Override + public boolean isMonotonic() { + return this.monotonic; + } + + @NonNull + @Override + public List getMonotonicArgumentNames() { + return this.monotonicArgumentNames; + } + + @Override + public boolean equals(Object other) { + if (other == this) { + return true; + } else if (other instanceof DseFunctionMetadata) { + DseFunctionMetadata that = (DseFunctionMetadata) other; + return Objects.equals(this.getKeyspace(), that.getKeyspace()) + && Objects.equals(this.getSignature(), that.getSignature()) + && Objects.equals(this.getParameterNames(), that.getParameterNames()) + && Objects.equals(this.getBody(), that.getBody()) + && this.isCalledOnNullInput() == that.isCalledOnNullInput() + && Objects.equals(this.getLanguage(), that.getLanguage()) + && Objects.equals(this.getReturnType(), that.getReturnType()) + && this.deterministic == that.isDeterministic() + && this.monotonic == that.isMonotonic() + && Objects.equals(this.monotonicArgumentNames, that.getMonotonicArgumentNames()); + } else { + return false; + } + } + + @Override + public int hashCode() { + return Objects.hash( + getKeyspace(), + getSignature(), + getParameterNames(), + getBody(), + isCalledOnNullInput(), + getLanguage(), + getReturnType(), + isDeterministic(), + isMonotonic(), + getMonotonicArgumentNames()); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("Function Name: ").append(this.getSignature().getName().asCql(false)); + sb.append(", Keyspace: ").append(this.getKeyspace()); + sb.append(", Language: ").append(this.getLanguage()); + sb.append(", Protocol Code: ").append(this.getReturnType().getProtocolCode()); + sb.append(", Deterministic: ").append(this.isDeterministic()); + sb.append(", Monotonic: ").append(this.isMonotonic()); + sb.append(", Monotonic On: ") + .append(this.monotonicArgumentNames.isEmpty() ? "" : this.monotonicArgumentNames.get(0)); + return sb.toString(); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseIndexMetadata.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseIndexMetadata.java new file mode 100644 index 00000000000..27b320cb0d4 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseIndexMetadata.java @@ -0,0 +1,29 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.metadata.schema; + +import com.datastax.dse.driver.api.core.metadata.schema.DseIndexMetadata; +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.metadata.schema.IndexKind; +import com.datastax.oss.driver.internal.core.metadata.schema.DefaultIndexMetadata; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Map; +import net.jcip.annotations.Immutable; + +@Immutable +public class DefaultDseIndexMetadata extends DefaultIndexMetadata implements DseIndexMetadata { + + public DefaultDseIndexMetadata( + @NonNull CqlIdentifier keyspace, + @NonNull CqlIdentifier table, + @NonNull CqlIdentifier name, + @NonNull IndexKind kind, + @NonNull String target, + @NonNull Map options) { + super(keyspace, table, name, kind, target, options); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseKeyspaceMetadata.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseKeyspaceMetadata.java new file mode 100644 index 00000000000..af8c6e2c13d --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseKeyspaceMetadata.java @@ -0,0 +1,133 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.metadata.schema; + +import com.datastax.dse.driver.api.core.metadata.schema.DseKeyspaceMetadata; +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.metadata.schema.AggregateMetadata; +import com.datastax.oss.driver.api.core.metadata.schema.FunctionMetadata; +import com.datastax.oss.driver.api.core.metadata.schema.FunctionSignature; +import com.datastax.oss.driver.api.core.metadata.schema.TableMetadata; +import com.datastax.oss.driver.api.core.metadata.schema.ViewMetadata; +import com.datastax.oss.driver.api.core.type.UserDefinedType; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Map; +import java.util.Objects; +import net.jcip.annotations.Immutable; + +@Immutable +public class DefaultDseKeyspaceMetadata implements DseKeyspaceMetadata { + + @NonNull private final CqlIdentifier name; + private final boolean durableWrites; + private final boolean virtual; + @NonNull private final Map replication; + @NonNull private final Map types; + @NonNull private final Map tables; + @NonNull private final Map views; + @NonNull private final Map functions; + @NonNull private final Map aggregates; + + public DefaultDseKeyspaceMetadata( + @NonNull CqlIdentifier name, + boolean durableWrites, + boolean virtual, + @NonNull Map replication, + @NonNull Map types, + @NonNull Map tables, + @NonNull Map views, + @NonNull Map functions, + @NonNull Map aggregates) { + this.name = name; + this.durableWrites = durableWrites; + this.virtual = virtual; + this.replication = replication; + this.types = types; + this.tables = tables; + this.views = views; + this.functions = functions; + this.aggregates = aggregates; + } + + @NonNull + @Override + public CqlIdentifier getName() { + return name; + } + + @Override + public boolean isDurableWrites() { + return durableWrites; + } + + @Override + public boolean isVirtual() { + return virtual; + } + + @NonNull + @Override + public Map getReplication() { + return replication; + } + + @NonNull + @Override + public Map getUserDefinedTypes() { + return types; + } + + @NonNull + @Override + public Map getTables() { + return tables; + } + + @NonNull + @Override + public Map getViews() { + return views; + } + + @NonNull + @Override + public Map getFunctions() { + return functions; + } + + @NonNull + @Override + public Map getAggregates() { + return aggregates; + } + + @Override + public boolean equals(Object other) { + if (other == this) { + return true; + } else if (other instanceof DseKeyspaceMetadata) { + DseKeyspaceMetadata that = (DseKeyspaceMetadata) other; + return Objects.equals(this.name, that.getName()) + && this.durableWrites == that.isDurableWrites() + && this.virtual == that.isVirtual() + && Objects.equals(this.replication, that.getReplication()) + && Objects.equals(this.types, that.getUserDefinedTypes()) + && Objects.equals(this.tables, that.getTables()) + && Objects.equals(this.views, that.getViews()) + && Objects.equals(this.functions, that.getFunctions()) + && Objects.equals(this.aggregates, that.getAggregates()); + } else { + return false; + } + } + + @Override + public int hashCode() { + return Objects.hash( + name, durableWrites, virtual, replication, types, tables, views, functions, aggregates); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseTableMetadata.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseTableMetadata.java new file mode 100644 index 00000000000..7149bb2d77c --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseTableMetadata.java @@ -0,0 +1,152 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.metadata.schema; + +import com.datastax.dse.driver.api.core.metadata.schema.DseTableMetadata; +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.metadata.schema.ClusteringOrder; +import com.datastax.oss.driver.api.core.metadata.schema.ColumnMetadata; +import com.datastax.oss.driver.api.core.metadata.schema.IndexMetadata; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; +import java.util.UUID; +import net.jcip.annotations.Immutable; + +@Immutable +public class DefaultDseTableMetadata implements DseTableMetadata { + + @NonNull private final CqlIdentifier keyspace; + @NonNull private final CqlIdentifier name; + // null for virtual tables + @Nullable private final UUID id; + private final boolean compactStorage; + private final boolean virtual; + @NonNull private final List partitionKey; + @NonNull private final Map clusteringColumns; + @NonNull private final Map columns; + @NonNull private final Map options; + @NonNull private final Map indexes; + + public DefaultDseTableMetadata( + @NonNull CqlIdentifier keyspace, + @NonNull CqlIdentifier name, + @Nullable UUID id, + boolean compactStorage, + boolean virtual, + @NonNull List partitionKey, + @NonNull Map clusteringColumns, + @NonNull Map columns, + @NonNull Map options, + @NonNull Map indexes) { + this.keyspace = keyspace; + this.name = name; + this.id = id; + this.compactStorage = compactStorage; + this.virtual = virtual; + this.partitionKey = partitionKey; + this.clusteringColumns = clusteringColumns; + this.columns = columns; + this.options = options; + this.indexes = indexes; + } + + @NonNull + @Override + public CqlIdentifier getKeyspace() { + return keyspace; + } + + @NonNull + @Override + public CqlIdentifier getName() { + return name; + } + + @NonNull + @Override + public Optional getId() { + return Optional.ofNullable(id); + } + + @Override + public boolean isCompactStorage() { + return compactStorage; + } + + @Override + public boolean isVirtual() { + return virtual; + } + + @NonNull + @Override + public List getPartitionKey() { + return partitionKey; + } + + @NonNull + @Override + public Map getClusteringColumns() { + return clusteringColumns; + } + + @NonNull + @Override + public Map getColumns() { + return columns; + } + + @NonNull + @Override + public Map getOptions() { + return options; + } + + @NonNull + @Override + public Map getIndexes() { + return indexes; + } + + @Override + public boolean equals(Object other) { + if (other == this) { + return true; + } else if (other instanceof DseTableMetadata) { + DseTableMetadata that = (DseTableMetadata) other; + return Objects.equals(this.keyspace, that.getKeyspace()) + && Objects.equals(this.name, that.getName()) + && Objects.equals(Optional.ofNullable(this.id), that.getId()) + && this.compactStorage == that.isCompactStorage() + && this.virtual == that.isVirtual() + && Objects.equals(this.partitionKey, that.getPartitionKey()) + && Objects.equals(this.clusteringColumns, that.getClusteringColumns()) + && Objects.equals(this.columns, that.getColumns()) + && Objects.equals(this.indexes, that.getIndexes()); + } else { + return false; + } + } + + @Override + public int hashCode() { + return Objects.hash( + keyspace, + name, + id, + compactStorage, + virtual, + partitionKey, + clusteringColumns, + columns, + indexes); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseViewMetadata.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseViewMetadata.java new file mode 100644 index 00000000000..cc0de9a9b43 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseViewMetadata.java @@ -0,0 +1,155 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.metadata.schema; + +import com.datastax.dse.driver.api.core.metadata.schema.DseViewMetadata; +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.metadata.schema.ClusteringOrder; +import com.datastax.oss.driver.api.core.metadata.schema.ColumnMetadata; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; +import java.util.UUID; +import net.jcip.annotations.Immutable; + +@Immutable +public class DefaultDseViewMetadata implements DseViewMetadata { + + @NonNull private final CqlIdentifier keyspace; + @NonNull private final CqlIdentifier name; + @NonNull private final CqlIdentifier baseTable; + private final boolean includesAllColumns; + @Nullable private final String whereClause; + @NonNull private final UUID id; + @NonNull private final ImmutableList partitionKey; + @NonNull private final ImmutableMap clusteringColumns; + @NonNull private final ImmutableMap columns; + @NonNull private final Map options; + + public DefaultDseViewMetadata( + @NonNull CqlIdentifier keyspace, + @NonNull CqlIdentifier name, + @NonNull CqlIdentifier baseTable, + boolean includesAllColumns, + @Nullable String whereClause, + @NonNull UUID id, + @NonNull ImmutableList partitionKey, + @NonNull ImmutableMap clusteringColumns, + @NonNull ImmutableMap columns, + @NonNull Map options) { + this.keyspace = keyspace; + this.name = name; + this.baseTable = baseTable; + this.includesAllColumns = includesAllColumns; + this.whereClause = whereClause; + this.id = id; + this.partitionKey = partitionKey; + this.clusteringColumns = clusteringColumns; + this.columns = columns; + this.options = options; + } + + @NonNull + @Override + public CqlIdentifier getKeyspace() { + return keyspace; + } + + @NonNull + @Override + public CqlIdentifier getName() { + return name; + } + + @NonNull + @Override + public Optional getId() { + return Optional.of(id); + } + + @NonNull + @Override + public CqlIdentifier getBaseTable() { + return baseTable; + } + + @Override + public boolean includesAllColumns() { + return includesAllColumns; + } + + @NonNull + @Override + public Optional getWhereClause() { + return Optional.ofNullable(whereClause); + } + + @NonNull + @Override + public List getPartitionKey() { + return partitionKey; + } + + @NonNull + @Override + public Map getClusteringColumns() { + return clusteringColumns; + } + + @NonNull + @Override + public Map getColumns() { + return columns; + } + + @NonNull + @Override + public Map getOptions() { + return options; + } + + @Override + public boolean equals(Object other) { + if (other == this) { + return true; + } else if (other instanceof DseViewMetadata) { + DseViewMetadata that = (DseViewMetadata) other; + return Objects.equals(this.keyspace, that.getKeyspace()) + && Objects.equals(this.name, that.getName()) + && Objects.equals(this.baseTable, that.getBaseTable()) + && this.includesAllColumns == that.includesAllColumns() + && Objects.equals(this.whereClause, that.getWhereClause().orElse(null)) + && Objects.equals(Optional.of(this.id), that.getId()) + && Objects.equals(this.partitionKey, that.getPartitionKey()) + && Objects.equals(this.clusteringColumns, that.getClusteringColumns()) + && Objects.equals(this.columns, that.getColumns()) + && Objects.equals(this.options, that.getOptions()); + } else { + return false; + } + } + + @Override + public int hashCode() { + return Objects.hash( + keyspace, + name, + baseTable, + includesAllColumns, + whereClause, + id, + partitionKey, + clusteringColumns, + columns, + options); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseAggregateParser.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseAggregateParser.java new file mode 100644 index 00000000000..1544debf604 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseAggregateParser.java @@ -0,0 +1,52 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.metadata.schema.parsing; + +import com.datastax.dse.driver.api.core.metadata.schema.DseAggregateMetadata; +import com.datastax.dse.driver.internal.core.metadata.schema.DefaultDseAggregateMetadata; +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.metadata.schema.AggregateMetadata; +import com.datastax.oss.driver.api.core.type.UserDefinedType; +import com.datastax.oss.driver.internal.core.adminrequest.AdminRow; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.metadata.schema.parsing.AggregateParser; +import com.datastax.oss.driver.internal.core.metadata.schema.parsing.DataTypeParser; +import java.util.Map; +import net.jcip.annotations.ThreadSafe; + +@ThreadSafe +public class DseAggregateParser { + + private final AggregateParser aggregateParser; + private final InternalDriverContext context; + + public DseAggregateParser(DataTypeParser dataTypeParser, InternalDriverContext context) { + this.aggregateParser = new AggregateParser(dataTypeParser, context); + this.context = context; + } + + public DseAggregateMetadata parseAggregate( + AdminRow row, + CqlIdentifier keyspaceId, + Map userDefinedTypes) { + AggregateMetadata aggregate = aggregateParser.parseAggregate(row, keyspaceId, userDefinedTypes); + // parse the DSE extended columns + final boolean deterministic = + row.contains("deterministic") ? row.getBoolean("deterministic") : false; + + return new DefaultDseAggregateMetadata( + aggregate.getKeyspace(), + aggregate.getSignature(), + aggregate.getFinalFuncSignature().orElse(null), + aggregate.getInitCond().orElse(null), + aggregate.getReturnType(), + aggregate.getStateFuncSignature(), + aggregate.getStateType(), + context.getCodecRegistry().codecFor(aggregate.getStateType()), + deterministic); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseFunctionParser.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseFunctionParser.java new file mode 100644 index 00000000000..77e3c507da1 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseFunctionParser.java @@ -0,0 +1,62 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.metadata.schema.parsing; + +import com.datastax.dse.driver.api.core.metadata.schema.DseFunctionMetadata; +import com.datastax.dse.driver.internal.core.metadata.schema.DefaultDseFunctionMetadata; +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.metadata.schema.FunctionMetadata; +import com.datastax.oss.driver.api.core.type.UserDefinedType; +import com.datastax.oss.driver.internal.core.adminrequest.AdminRow; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.metadata.schema.parsing.DataTypeParser; +import com.datastax.oss.driver.internal.core.metadata.schema.parsing.FunctionParser; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; +import net.jcip.annotations.ThreadSafe; + +@ThreadSafe +public class DseFunctionParser { + + private final FunctionParser functionParser; + + public DseFunctionParser(DataTypeParser dataTypeParser, InternalDriverContext context) { + this.functionParser = new FunctionParser(dataTypeParser, context); + } + + public DseFunctionMetadata parseFunction( + AdminRow row, + CqlIdentifier keyspaceId, + Map userDefinedTypes) { + FunctionMetadata function = functionParser.parseFunction(row, keyspaceId, userDefinedTypes); + // parse the DSE extended columns + final boolean deterministic = + row.contains("deterministic") ? row.getBoolean("deterministic") : false; + final boolean monotonic = row.contains("monotonic") ? row.getBoolean("monotonic") : false; + // stream the list of strings into a list of CqlIdentifiers + final List monotonicOn = + row.contains("monotonic_on") + ? row.getListOfString("monotonic_on").stream() + .map(CqlIdentifier::fromInternal) + .collect(Collectors.toList()) + : Collections.emptyList(); + + return new DefaultDseFunctionMetadata( + function.getKeyspace(), + function.getSignature(), + function.getParameterNames(), + function.getBody(), + function.isCalledOnNullInput(), + function.getLanguage(), + function.getReturnType(), + deterministic, + monotonic, + monotonicOn); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseSchemaParser.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseSchemaParser.java new file mode 100644 index 00000000000..ab134904a56 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseSchemaParser.java @@ -0,0 +1,218 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.metadata.schema.parsing; + +import com.datastax.dse.driver.api.core.metadata.schema.DseAggregateMetadata; +import com.datastax.dse.driver.api.core.metadata.schema.DseFunctionMetadata; +import com.datastax.dse.driver.api.core.metadata.schema.DseKeyspaceMetadata; +import com.datastax.dse.driver.api.core.metadata.schema.DseTableMetadata; +import com.datastax.dse.driver.api.core.metadata.schema.DseViewMetadata; +import com.datastax.dse.driver.internal.core.metadata.schema.DefaultDseKeyspaceMetadata; +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.metadata.schema.AggregateMetadata; +import com.datastax.oss.driver.api.core.metadata.schema.FunctionMetadata; +import com.datastax.oss.driver.api.core.metadata.schema.FunctionSignature; +import com.datastax.oss.driver.api.core.metadata.schema.KeyspaceMetadata; +import com.datastax.oss.driver.api.core.metadata.schema.TableMetadata; +import com.datastax.oss.driver.api.core.metadata.schema.ViewMetadata; +import com.datastax.oss.driver.api.core.type.UserDefinedType; +import com.datastax.oss.driver.internal.core.adminrequest.AdminRow; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.metadata.schema.parsing.CassandraSchemaParser; +import com.datastax.oss.driver.internal.core.metadata.schema.parsing.SchemaParser; +import com.datastax.oss.driver.internal.core.metadata.schema.parsing.SimpleJsonParser; +import com.datastax.oss.driver.internal.core.metadata.schema.parsing.UserDefinedTypeParser; +import com.datastax.oss.driver.internal.core.metadata.schema.queries.SchemaRows; +import com.datastax.oss.driver.internal.core.metadata.schema.refresh.SchemaRefresh; +import com.datastax.oss.driver.internal.core.util.NanoTime; +import com.datastax.oss.driver.shaded.guava.common.base.MoreObjects; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import java.util.Collections; +import java.util.Map; +import net.jcip.annotations.ThreadSafe; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Default parser implementation for Cassandra. + * + *

      For modularity, the code for each element row is split into separate classes (schema stuff is + * not on the hot path, so creating a few extra objects doesn't matter). + */ +@ThreadSafe +public class DseSchemaParser implements SchemaParser { + + private static final Logger LOG = LoggerFactory.getLogger(CassandraSchemaParser.class); + + private final SchemaRows rows; + private final UserDefinedTypeParser userDefinedTypeParser; + private final DseTableParser tableParser; + private final DseViewParser viewParser; + private final DseFunctionParser functionParser; + private final DseAggregateParser aggregateParser; + private final String logPrefix; + private final long startTimeNs = System.nanoTime(); + + public DseSchemaParser(SchemaRows rows, InternalDriverContext context) { + this.rows = rows; + this.logPrefix = context.getSessionName(); + + this.userDefinedTypeParser = new UserDefinedTypeParser(rows.dataTypeParser(), context); + this.tableParser = new DseTableParser(rows, context); + this.viewParser = new DseViewParser(rows, context); + this.functionParser = new DseFunctionParser(rows.dataTypeParser(), context); + this.aggregateParser = new DseAggregateParser(rows.dataTypeParser(), context); + } + + @Override + public SchemaRefresh parse() { + ImmutableMap.Builder keyspacesBuilder = ImmutableMap.builder(); + for (AdminRow row : rows.keyspaces()) { + DseKeyspaceMetadata keyspace = parseKeyspace(row); + keyspacesBuilder.put(keyspace.getName(), keyspace); + } + for (AdminRow row : rows.virtualKeyspaces()) { + DseKeyspaceMetadata keyspace = parseVirtualKeyspace(row); + keyspacesBuilder.put(keyspace.getName(), keyspace); + } + SchemaRefresh refresh = new SchemaRefresh(keyspacesBuilder.build()); + LOG.debug("[{}] Schema parsing took {}", logPrefix, NanoTime.formatTimeSince(startTimeNs)); + return refresh; + } + + private DseKeyspaceMetadata parseKeyspace(AdminRow keyspaceRow) { + + // Cassandra <= 2.2 + // CREATE TABLE system.schema_keyspaces ( + // keyspace_name text PRIMARY KEY, + // durable_writes boolean, + // strategy_class text, + // strategy_options text + // ) + // + // Cassandra >= 3.0: + // CREATE TABLE system_schema.keyspaces ( + // keyspace_name text PRIMARY KEY, + // durable_writes boolean, + // replication frozen> + // ) + CqlIdentifier keyspaceId = CqlIdentifier.fromInternal(keyspaceRow.getString("keyspace_name")); + boolean durableWrites = + MoreObjects.firstNonNull(keyspaceRow.getBoolean("durable_writes"), false); + + Map replicationOptions; + if (keyspaceRow.contains("strategy_class")) { + String strategyClass = keyspaceRow.getString("strategy_class"); + Map strategyOptions = + SimpleJsonParser.parseStringMap(keyspaceRow.getString("strategy_options")); + replicationOptions = + ImmutableMap.builder() + .putAll(strategyOptions) + .put("class", strategyClass) + .build(); + } else { + replicationOptions = keyspaceRow.getMapOfStringToString("replication"); + } + + Map types = parseTypes(keyspaceId); + + return new DefaultDseKeyspaceMetadata( + keyspaceId, + durableWrites, + false, + replicationOptions, + types, + parseTables(keyspaceId, types), + parseViews(keyspaceId, types), + parseFunctions(keyspaceId, types), + parseAggregates(keyspaceId, types)); + } + + private Map parseTypes(CqlIdentifier keyspaceId) { + return userDefinedTypeParser.parse(rows.types().get(keyspaceId), keyspaceId); + } + + private Map parseTables( + CqlIdentifier keyspaceId, Map types) { + ImmutableMap.Builder tablesBuilder = ImmutableMap.builder(); + for (AdminRow tableRow : rows.tables().get(keyspaceId)) { + DseTableMetadata table = tableParser.parseTable(tableRow, keyspaceId, types); + if (table != null) { + tablesBuilder.put(table.getName(), table); + } + } + return tablesBuilder.build(); + } + + private Map parseViews( + CqlIdentifier keyspaceId, Map types) { + ImmutableMap.Builder viewsBuilder = ImmutableMap.builder(); + for (AdminRow viewRow : rows.views().get(keyspaceId)) { + DseViewMetadata view = viewParser.parseView(viewRow, keyspaceId, types); + if (view != null) { + viewsBuilder.put(view.getName(), view); + } + } + return viewsBuilder.build(); + } + + private Map parseFunctions( + CqlIdentifier keyspaceId, Map types) { + ImmutableMap.Builder functionsBuilder = + ImmutableMap.builder(); + for (AdminRow functionRow : rows.functions().get(keyspaceId)) { + DseFunctionMetadata function = functionParser.parseFunction(functionRow, keyspaceId, types); + if (function != null) { + functionsBuilder.put(function.getSignature(), function); + } + } + return functionsBuilder.build(); + } + + private Map parseAggregates( + CqlIdentifier keyspaceId, Map types) { + ImmutableMap.Builder aggregatesBuilder = + ImmutableMap.builder(); + for (AdminRow aggregateRow : rows.aggregates().get(keyspaceId)) { + DseAggregateMetadata aggregate = + aggregateParser.parseAggregate(aggregateRow, keyspaceId, types); + if (aggregate != null) { + aggregatesBuilder.put(aggregate.getSignature(), aggregate); + } + } + return aggregatesBuilder.build(); + } + + private DseKeyspaceMetadata parseVirtualKeyspace(AdminRow keyspaceRow) { + + CqlIdentifier keyspaceId = CqlIdentifier.fromInternal(keyspaceRow.getString("keyspace_name")); + boolean durableWrites = + MoreObjects.firstNonNull(keyspaceRow.getBoolean("durable_writes"), false); + + return new DefaultDseKeyspaceMetadata( + keyspaceId, + durableWrites, + true, + Collections.emptyMap(), + Collections.emptyMap(), + parseVirtualTables(keyspaceId), + Collections.emptyMap(), + Collections.emptyMap(), + Collections.emptyMap()); + } + + private Map parseVirtualTables(CqlIdentifier keyspaceId) { + ImmutableMap.Builder tablesBuilder = ImmutableMap.builder(); + for (AdminRow tableRow : rows.virtualTables().get(keyspaceId)) { + DseTableMetadata table = tableParser.parseVirtualTable(tableRow, keyspaceId); + if (table != null) { + tablesBuilder.put(table.getName(), table); + } + } + return tablesBuilder.build(); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseSchemaParserFactory.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseSchemaParserFactory.java new file mode 100644 index 00000000000..5497c21c8a3 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseSchemaParserFactory.java @@ -0,0 +1,28 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.metadata.schema.parsing; + +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.metadata.schema.parsing.SchemaParser; +import com.datastax.oss.driver.internal.core.metadata.schema.parsing.SchemaParserFactory; +import com.datastax.oss.driver.internal.core.metadata.schema.queries.SchemaRows; +import net.jcip.annotations.ThreadSafe; + +@ThreadSafe +public class DseSchemaParserFactory implements SchemaParserFactory { + + private final InternalDriverContext context; + + public DseSchemaParserFactory(InternalDriverContext context) { + this.context = context; + } + + @Override + public SchemaParser newInstance(SchemaRows rows) { + return new DseSchemaParser(rows, context); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseTableParser.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseTableParser.java new file mode 100644 index 00000000000..87fb97aaa4f --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseTableParser.java @@ -0,0 +1,329 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.metadata.schema.parsing; + +import com.datastax.dse.driver.api.core.metadata.schema.DseColumnMetadata; +import com.datastax.dse.driver.api.core.metadata.schema.DseIndexMetadata; +import com.datastax.dse.driver.api.core.metadata.schema.DseTableMetadata; +import com.datastax.dse.driver.internal.core.metadata.schema.DefaultDseColumnMetadata; +import com.datastax.dse.driver.internal.core.metadata.schema.DefaultDseIndexMetadata; +import com.datastax.dse.driver.internal.core.metadata.schema.DefaultDseTableMetadata; +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.metadata.schema.ClusteringOrder; +import com.datastax.oss.driver.api.core.metadata.schema.ColumnMetadata; +import com.datastax.oss.driver.api.core.metadata.schema.IndexKind; +import com.datastax.oss.driver.api.core.metadata.schema.IndexMetadata; +import com.datastax.oss.driver.api.core.type.DataType; +import com.datastax.oss.driver.api.core.type.ListType; +import com.datastax.oss.driver.api.core.type.MapType; +import com.datastax.oss.driver.api.core.type.SetType; +import com.datastax.oss.driver.api.core.type.UserDefinedType; +import com.datastax.oss.driver.internal.core.adminrequest.AdminRow; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.metadata.schema.parsing.DataTypeClassNameCompositeParser; +import com.datastax.oss.driver.internal.core.metadata.schema.parsing.RawColumn; +import com.datastax.oss.driver.internal.core.metadata.schema.parsing.RelationParser; +import com.datastax.oss.driver.internal.core.metadata.schema.parsing.TableParser; +import com.datastax.oss.driver.internal.core.metadata.schema.queries.SchemaRows; +import com.datastax.oss.driver.internal.core.util.Loggers; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMultimap; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.UUID; +import net.jcip.annotations.ThreadSafe; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@ThreadSafe +public class DseTableParser extends RelationParser { + + private static final Logger LOG = LoggerFactory.getLogger(TableParser.class); + + public DseTableParser(SchemaRows rows, InternalDriverContext context) { + super(rows, context); + } + + public DseTableMetadata parseTable( + AdminRow tableRow, CqlIdentifier keyspaceId, Map userTypes) { + // Cassandra <= 2.2: + // CREATE TABLE system.schema_columnfamilies ( + // keyspace_name text, + // columnfamily_name text, + // bloom_filter_fp_chance double, + // caching text, + // cf_id uuid, + // column_aliases text, (2.1 only) + // comment text, + // compaction_strategy_class text, + // compaction_strategy_options text, + // comparator text, + // compression_parameters text, + // default_time_to_live int, + // default_validator text, + // dropped_columns map, + // gc_grace_seconds int, + // index_interval int, + // is_dense boolean, (2.1 only) + // key_aliases text, (2.1 only) + // key_validator text, + // local_read_repair_chance double, + // max_compaction_threshold int, + // max_index_interval int, + // memtable_flush_period_in_ms int, + // min_compaction_threshold int, + // min_index_interval int, + // read_repair_chance double, + // speculative_retry text, + // subcomparator text, + // type text, + // value_alias text, (2.1 only) + // PRIMARY KEY (keyspace_name, columnfamily_name) + // ) WITH CLUSTERING ORDER BY (columnfamily_name ASC) + // + // Cassandra 3.0: + // CREATE TABLE system_schema.tables ( + // keyspace_name text, + // table_name text, + // bloom_filter_fp_chance double, + // caching frozen>, + // cdc boolean, + // comment text, + // compaction frozen>, + // compression frozen>, + // crc_check_chance double, + // dclocal_read_repair_chance double, + // default_time_to_live int, + // extensions frozen>, + // flags frozen>, + // gc_grace_seconds int, + // id uuid, + // max_index_interval int, + // memtable_flush_period_in_ms int, + // min_index_interval int, + // read_repair_chance double, + // speculative_retry text, + // PRIMARY KEY (keyspace_name, table_name) + // ) WITH CLUSTERING ORDER BY (table_name ASC) + CqlIdentifier tableId = + CqlIdentifier.fromInternal( + tableRow.getString( + tableRow.contains("table_name") ? "table_name" : "columnfamily_name")); + + UUID uuid = tableRow.contains("id") ? tableRow.getUuid("id") : tableRow.getUuid("cf_id"); + + List rawColumns = + RawColumn.toRawColumns( + rows.columns().getOrDefault(keyspaceId, ImmutableMultimap.of()).get(tableId)); + if (rawColumns.isEmpty()) { + LOG.warn( + "[{}] Processing TABLE refresh for {}.{} but found no matching rows, skipping", + logPrefix, + keyspaceId, + tableId); + return null; + } + + boolean isCompactStorage; + if (tableRow.contains("flags")) { + Set flags = tableRow.getSetOfString("flags"); + boolean isDense = flags.contains("dense"); + boolean isSuper = flags.contains("super"); + boolean isCompound = flags.contains("compound"); + isCompactStorage = isSuper || isDense || !isCompound; + boolean isStaticCompact = !isSuper && !isDense && !isCompound; + if (isStaticCompact) { + RawColumn.pruneStaticCompactTableColumns(rawColumns); + } else if (isDense) { + RawColumn.pruneDenseTableColumnsV3(rawColumns); + } + } else { + boolean isDense = tableRow.getBoolean("is_dense"); + if (isDense) { + RawColumn.pruneDenseTableColumnsV2(rawColumns); + } + DataTypeClassNameCompositeParser.ParseResult comparator = + new DataTypeClassNameCompositeParser() + .parseWithComposite(tableRow.getString("comparator"), keyspaceId, userTypes, context); + isCompactStorage = isDense || !comparator.isComposite; + } + + Collections.sort(rawColumns); + ImmutableMap.Builder allColumnsBuilder = ImmutableMap.builder(); + ImmutableList.Builder partitionKeyBuilder = ImmutableList.builder(); + ImmutableMap.Builder clusteringColumnsBuilder = + ImmutableMap.builder(); + ImmutableMap.Builder indexesBuilder = ImmutableMap.builder(); + + for (RawColumn raw : rawColumns) { + DataType dataType = rows.dataTypeParser().parse(keyspaceId, raw.dataType, userTypes, context); + DseColumnMetadata column = + new DefaultDseColumnMetadata( + keyspaceId, tableId, raw.name, dataType, raw.kind.equals(RawColumn.KIND_STATIC)); + switch (raw.kind) { + case RawColumn.KIND_PARTITION_KEY: + partitionKeyBuilder.add(column); + break; + case RawColumn.KIND_CLUSTERING_COLUMN: + clusteringColumnsBuilder.put( + column, raw.reversed ? ClusteringOrder.DESC : ClusteringOrder.ASC); + break; + default: + // nothing to do + } + allColumnsBuilder.put(column.getName(), column); + + DseIndexMetadata index = buildLegacyIndex(raw, column); + if (index != null) { + indexesBuilder.put(index.getName(), index); + } + } + + Map options; + try { + options = parseOptions(tableRow); + } catch (Exception e) { + // Options change the most often, so be especially lenient if anything goes wrong. + Loggers.warnWithException( + LOG, + "[{}] Error while parsing options for {}.{}, getOptions() will be empty", + logPrefix, + keyspaceId, + tableId, + e); + options = Collections.emptyMap(); + } + + Collection indexRows = + rows.indexes().getOrDefault(keyspaceId, ImmutableMultimap.of()).get(tableId); + for (AdminRow indexRow : indexRows) { + DseIndexMetadata index = buildModernIndex(keyspaceId, tableId, indexRow); + indexesBuilder.put(index.getName(), index); + } + + return new DefaultDseTableMetadata( + keyspaceId, + tableId, + uuid, + isCompactStorage, + false, + partitionKeyBuilder.build(), + clusteringColumnsBuilder.build(), + allColumnsBuilder.build(), + options, + indexesBuilder.build()); + } + + DseTableMetadata parseVirtualTable(AdminRow tableRow, CqlIdentifier keyspaceId) { + + CqlIdentifier tableId = CqlIdentifier.fromInternal(tableRow.getString("table_name")); + + List rawColumns = + RawColumn.toRawColumns( + rows.virtualColumns().getOrDefault(keyspaceId, ImmutableMultimap.of()).get(tableId)); + if (rawColumns.isEmpty()) { + LOG.warn( + "[{}] Processing TABLE refresh for {}.{} but found no matching rows, skipping", + logPrefix, + keyspaceId, + tableId); + return null; + } + + Collections.sort(rawColumns); + ImmutableMap.Builder allColumnsBuilder = ImmutableMap.builder(); + ImmutableList.Builder partitionKeyBuilder = ImmutableList.builder(); + ImmutableMap.Builder clusteringColumnsBuilder = + ImmutableMap.builder(); + + for (RawColumn raw : rawColumns) { + DataType dataType = + rows.dataTypeParser().parse(keyspaceId, raw.dataType, Collections.emptyMap(), context); + DseColumnMetadata column = + new DefaultDseColumnMetadata( + keyspaceId, tableId, raw.name, dataType, raw.kind.equals(RawColumn.KIND_STATIC)); + switch (raw.kind) { + case RawColumn.KIND_PARTITION_KEY: + partitionKeyBuilder.add(column); + break; + case RawColumn.KIND_CLUSTERING_COLUMN: + clusteringColumnsBuilder.put( + column, raw.reversed ? ClusteringOrder.DESC : ClusteringOrder.ASC); + break; + default: + } + + allColumnsBuilder.put(column.getName(), column); + } + + return new DefaultDseTableMetadata( + keyspaceId, + tableId, + null, + false, + true, + partitionKeyBuilder.build(), + clusteringColumnsBuilder.build(), + allColumnsBuilder.build(), + Collections.emptyMap(), + Collections.emptyMap()); + } + + // In C*<=2.2, index information is stored alongside the column. + private DseIndexMetadata buildLegacyIndex(RawColumn raw, ColumnMetadata column) { + if (raw.indexName == null) { + return null; + } + return new DefaultDseIndexMetadata( + column.getKeyspace(), + column.getParent(), + CqlIdentifier.fromInternal(raw.indexName), + IndexKind.valueOf(raw.indexType), + buildLegacyIndexTarget(column, raw.indexOptions), + raw.indexOptions); + } + + private static String buildLegacyIndexTarget(ColumnMetadata column, Map options) { + String columnName = column.getName().asCql(true); + DataType columnType = column.getType(); + if (options.containsKey("index_keys")) { + return String.format("keys(%s)", columnName); + } + if (options.containsKey("index_keys_and_values")) { + return String.format("entries(%s)", columnName); + } + if ((columnType instanceof ListType && ((ListType) columnType).isFrozen()) + || (columnType instanceof SetType && ((SetType) columnType).isFrozen()) + || (columnType instanceof MapType && ((MapType) columnType).isFrozen())) { + return String.format("full(%s)", columnName); + } + // Note: the keyword 'values' is not accepted as a valid index target function until 3.0 + return columnName; + } + + // In C*>=3.0, index information is stored in a dedicated table: + // CREATE TABLE system_schema.indexes ( + // keyspace_name text, + // table_name text, + // index_name text, + // kind text, + // options frozen>, + // PRIMARY KEY (keyspace_name, table_name, index_name) + // ) WITH CLUSTERING ORDER BY (table_name ASC, index_name ASC) + private DseIndexMetadata buildModernIndex( + CqlIdentifier keyspaceId, CqlIdentifier tableId, AdminRow row) { + CqlIdentifier name = CqlIdentifier.fromInternal(row.getString("index_name")); + IndexKind kind = IndexKind.valueOf(row.getString("kind")); + Map options = row.getMapOfStringToString("options"); + String target = options.get("target"); + return new DefaultDseIndexMetadata(keyspaceId, tableId, name, kind, target, options); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseViewParser.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseViewParser.java new file mode 100644 index 00000000000..eb528561fb2 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseViewParser.java @@ -0,0 +1,146 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.metadata.schema.parsing; + +import com.datastax.dse.driver.api.core.metadata.schema.DseColumnMetadata; +import com.datastax.dse.driver.api.core.metadata.schema.DseViewMetadata; +import com.datastax.dse.driver.internal.core.metadata.schema.DefaultDseColumnMetadata; +import com.datastax.dse.driver.internal.core.metadata.schema.DefaultDseViewMetadata; +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.metadata.schema.ClusteringOrder; +import com.datastax.oss.driver.api.core.metadata.schema.ColumnMetadata; +import com.datastax.oss.driver.api.core.type.DataType; +import com.datastax.oss.driver.api.core.type.UserDefinedType; +import com.datastax.oss.driver.internal.core.adminrequest.AdminRow; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.metadata.schema.parsing.RawColumn; +import com.datastax.oss.driver.internal.core.metadata.schema.parsing.RelationParser; +import com.datastax.oss.driver.internal.core.metadata.schema.queries.SchemaRows; +import com.datastax.oss.driver.internal.core.util.Loggers; +import com.datastax.oss.driver.shaded.guava.common.base.MoreObjects; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMultimap; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.UUID; +import net.jcip.annotations.ThreadSafe; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@ThreadSafe +public class DseViewParser extends RelationParser { + + private static final Logger LOG = LoggerFactory.getLogger(DseViewParser.class); + + public DseViewParser(SchemaRows rows, InternalDriverContext context) { + super(rows, context); + } + + public DseViewMetadata parseView( + AdminRow viewRow, CqlIdentifier keyspaceId, Map userTypes) { + // Cassandra 3.0 (no views in earlier versions): + // CREATE TABLE system_schema.views ( + // keyspace_name text, + // view_name text, + // base_table_id uuid, + // base_table_name text, + // bloom_filter_fp_chance double, + // caching frozen>, + // cdc boolean, + // comment text, + // compaction frozen>, + // compression frozen>, + // crc_check_chance double, + // dclocal_read_repair_chance double, + // default_time_to_live int, + // extensions frozen>, + // gc_grace_seconds int, + // id uuid, + // include_all_columns boolean, + // max_index_interval int, + // memtable_flush_period_in_ms int, + // min_index_interval int, + // read_repair_chance double, + // speculative_retry text, + // where_clause text, + // PRIMARY KEY (keyspace_name, view_name) + // ) WITH CLUSTERING ORDER BY (view_name ASC) + CqlIdentifier viewId = CqlIdentifier.fromInternal(viewRow.getString("view_name")); + + UUID uuid = viewRow.getUuid("id"); + CqlIdentifier baseTableId = CqlIdentifier.fromInternal(viewRow.getString("base_table_name")); + boolean includesAllColumns = + MoreObjects.firstNonNull(viewRow.getBoolean("include_all_columns"), false); + String whereClause = viewRow.getString("where_clause"); + + List rawColumns = + RawColumn.toRawColumns( + rows.columns().getOrDefault(keyspaceId, ImmutableMultimap.of()).get(viewId)); + if (rawColumns.isEmpty()) { + LOG.warn( + "[{}] Processing VIEW refresh for {}.{} but found no matching rows, skipping", + logPrefix, + keyspaceId, + viewId); + return null; + } + + Collections.sort(rawColumns); + ImmutableMap.Builder allColumnsBuilder = ImmutableMap.builder(); + ImmutableList.Builder partitionKeyBuilder = ImmutableList.builder(); + ImmutableMap.Builder clusteringColumnsBuilder = + ImmutableMap.builder(); + + for (RawColumn raw : rawColumns) { + DataType dataType = rows.dataTypeParser().parse(keyspaceId, raw.dataType, userTypes, context); + DseColumnMetadata column = + new DefaultDseColumnMetadata( + keyspaceId, viewId, raw.name, dataType, raw.kind.equals(RawColumn.KIND_STATIC)); + switch (raw.kind) { + case RawColumn.KIND_PARTITION_KEY: + partitionKeyBuilder.add(column); + break; + case RawColumn.KIND_CLUSTERING_COLUMN: + clusteringColumnsBuilder.put( + column, raw.reversed ? ClusteringOrder.DESC : ClusteringOrder.ASC); + break; + default: + // nothing to do + } + allColumnsBuilder.put(column.getName(), column); + } + + Map options; + try { + options = parseOptions(viewRow); + } catch (Exception e) { + // Options change the most often, so be especially lenient if anything goes wrong. + Loggers.warnWithException( + LOG, + "[{}] Error while parsing options for {}.{}, getOptions() will be empty", + logPrefix, + keyspaceId, + viewId, + e); + options = Collections.emptyMap(); + } + + return new DefaultDseViewMetadata( + keyspaceId, + viewId, + baseTableId, + includesAllColumns, + whereClause, + uuid, + partitionKeyBuilder.build(), + clusteringColumnsBuilder.build(), + allColumnsBuilder.build(), + options); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/queries/DseSchemaQueriesFactory.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/queries/DseSchemaQueriesFactory.java new file mode 100644 index 00000000000..8f83173502d --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/queries/DseSchemaQueriesFactory.java @@ -0,0 +1,63 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.metadata.schema.queries; + +import com.datastax.dse.driver.api.core.metadata.DseNodeProperties; +import com.datastax.oss.driver.api.core.Version; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.metadata.Metadata; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.internal.core.channel.DriverChannel; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.metadata.schema.queries.Cassandra21SchemaQueries; +import com.datastax.oss.driver.internal.core.metadata.schema.queries.Cassandra3SchemaQueries; +import com.datastax.oss.driver.internal.core.metadata.schema.queries.Cassandra4SchemaQueries; +import com.datastax.oss.driver.internal.core.metadata.schema.queries.DefaultSchemaQueriesFactory; +import com.datastax.oss.driver.internal.core.metadata.schema.queries.SchemaQueries; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.concurrent.CompletableFuture; +import net.jcip.annotations.ThreadSafe; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@ThreadSafe +public class DseSchemaQueriesFactory extends DefaultSchemaQueriesFactory { + + private static final Logger LOG = LoggerFactory.getLogger(DefaultSchemaQueriesFactory.class); + + @NonNull private static final Version V5_0_0 = Version.parse("5.0.0"); + @NonNull private static final Version V6_7_0 = Version.parse("6.7.0"); + + public DseSchemaQueriesFactory(InternalDriverContext context) { + super(context); + } + + @Override + protected SchemaQueries newInstance( + Node node, DriverChannel channel, CompletableFuture refreshFuture) { + Object versionObj = node.getExtras().get(DseNodeProperties.DSE_VERSION); + Version version; + if (versionObj == null) { + LOG.warn("[{}] DSE version missing for {}, deferring to C* version", logPrefix, node); + return super.newInstance(node, channel, refreshFuture); + } + + version = ((Version) versionObj).nextStable(); + DriverExecutionProfile config = context.getConfig().getDefaultProfile(); + LOG.debug("[{}] Sending schema queries to {} with DSE version {}", logPrefix, node, version); + // 4.8 is the oldest version supported, which uses C* 2.1 schema + if (version.compareTo(V5_0_0) < 0) { + return new Cassandra21SchemaQueries(channel, refreshFuture, config, logPrefix); + } else if (version.compareTo(V6_7_0) < 0) { + // 5.0 - 6.7 uses C* 3.0 schema + return new Cassandra3SchemaQueries(channel, refreshFuture, config, logPrefix); + } else { + // 6.7+ uses C* 4.0 schema + return new Cassandra4SchemaQueries(channel, refreshFuture, config, logPrefix); + } + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/token/DseReplicationStrategyFactory.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/token/DseReplicationStrategyFactory.java new file mode 100644 index 00000000000..1b3eb0781ff --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/token/DseReplicationStrategyFactory.java @@ -0,0 +1,34 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.metadata.token; + +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.metadata.token.DefaultReplicationStrategyFactory; +import com.datastax.oss.driver.internal.core.metadata.token.ReplicationStrategy; +import com.datastax.oss.driver.shaded.guava.common.base.Preconditions; +import java.util.Map; +import net.jcip.annotations.ThreadSafe; + +@ThreadSafe +public class DseReplicationStrategyFactory extends DefaultReplicationStrategyFactory { + public DseReplicationStrategyFactory(InternalDriverContext context) { + super(context); + } + + @Override + public ReplicationStrategy newInstance(Map replicationConfig) { + String strategyClass = replicationConfig.get("class"); + Preconditions.checkNotNull( + strategyClass, "Missing replication strategy class in " + replicationConfig); + switch (strategyClass) { + case "org.apache.cassandra.locator.EverywhereStrategy": + return new EverywhereStrategy(); + default: + return super.newInstance(replicationConfig); + } + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/token/EverywhereStrategy.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/token/EverywhereStrategy.java new file mode 100644 index 00000000000..4242e4c718d --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/token/EverywhereStrategy.java @@ -0,0 +1,32 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.metadata.token; + +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.metadata.token.Token; +import com.datastax.oss.driver.internal.core.metadata.token.ReplicationStrategy; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSetMultimap; +import com.datastax.oss.driver.shaded.guava.common.collect.SetMultimap; +import java.util.Collection; +import java.util.List; +import java.util.Map; +import net.jcip.annotations.ThreadSafe; + +@ThreadSafe +public class EverywhereStrategy implements ReplicationStrategy { + + @Override + public SetMultimap computeReplicasByToken( + Map tokenToPrimary, List ring) { + ImmutableSetMultimap.Builder result = ImmutableSetMultimap.builder(); + Collection nodes = tokenToPrimary.values(); + for (Token token : tokenToPrimary.keySet()) { + result = result.putAll(token, nodes); + } + return result.build(); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metrics/DseDropwizardMetricsFactory.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metrics/DseDropwizardMetricsFactory.java new file mode 100644 index 00000000000..147a8eb5966 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metrics/DseDropwizardMetricsFactory.java @@ -0,0 +1,73 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.metrics; + +import com.datastax.dse.driver.DseSessionMetric; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; +import com.datastax.oss.driver.api.core.metrics.Metrics; +import com.datastax.oss.driver.api.core.metrics.SessionMetric; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.metrics.DropwizardMetricsFactory; +import com.datastax.oss.driver.internal.core.metrics.NoopSessionMetricUpdater; +import com.datastax.oss.driver.internal.core.metrics.SessionMetricUpdater; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import net.jcip.annotations.ThreadSafe; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@ThreadSafe +public class DseDropwizardMetricsFactory extends DropwizardMetricsFactory { + + private static final Logger LOG = LoggerFactory.getLogger(DropwizardMetricsFactory.class); + + private final SessionMetricUpdater dseSessionUpdater; + private final String logPrefix; + + public DseDropwizardMetricsFactory(InternalDriverContext context) { + super(context); + logPrefix = context.getSessionName(); + DriverExecutionProfile config = context.getConfig().getDefaultProfile(); + Set enabledSessionMetrics = + parseSessionMetricPaths(config.getStringList(DefaultDriverOption.METRICS_SESSION_ENABLED)); + dseSessionUpdater = + getMetrics() + .map(Metrics::getRegistry) + .map( + registry -> + (SessionMetricUpdater) + new DseDropwizardSessionMetricUpdater( + enabledSessionMetrics, registry, context)) + .orElse(NoopSessionMetricUpdater.INSTANCE); + } + + @Override + public SessionMetricUpdater getSessionUpdater() { + return dseSessionUpdater; + } + + @Override + protected Set parseSessionMetricPaths(List paths) { + Set metrics = new HashSet<>(); + for (String path : paths) { + try { + metrics.add(DseSessionMetric.fromPath(path)); + } catch (IllegalArgumentException e) { + try { + metrics.add(DefaultSessionMetric.fromPath(path)); + } catch (IllegalArgumentException e1) { + LOG.warn("[{}] Unknown session metric {}, skipping", logPrefix, path); + } + } + } + return Collections.unmodifiableSet(metrics); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metrics/DseDropwizardSessionMetricUpdater.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metrics/DseDropwizardSessionMetricUpdater.java new file mode 100644 index 00000000000..fa8f6bcff56 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metrics/DseDropwizardSessionMetricUpdater.java @@ -0,0 +1,32 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.metrics; + +import com.codahale.metrics.MetricRegistry; +import com.datastax.dse.driver.DseSessionMetric; +import com.datastax.dse.driver.api.core.config.DseDriverOption; +import com.datastax.oss.driver.api.core.metrics.SessionMetric; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.metrics.DropwizardSessionMetricUpdater; +import java.util.Set; +import net.jcip.annotations.ThreadSafe; + +@ThreadSafe +public class DseDropwizardSessionMetricUpdater extends DropwizardSessionMetricUpdater { + + public DseDropwizardSessionMetricUpdater( + Set enabledMetrics, MetricRegistry registry, InternalDriverContext context) { + super(enabledMetrics, registry, context); + + initializeHdrTimer( + DseSessionMetric.CONTINUOUS_CQL_REQUESTS, + context.getConfig().getDefaultProfile(), + DseDriverOption.CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_HIGHEST, + DseDriverOption.CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_DIGITS, + DseDriverOption.CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_INTERVAL); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/search/DateRangeUtil.java b/core/src/main/java/com/datastax/dse/driver/internal/core/search/DateRangeUtil.java new file mode 100644 index 00000000000..601006d91a8 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/search/DateRangeUtil.java @@ -0,0 +1,219 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.search; + +import com.datastax.dse.driver.api.core.data.time.DateRangePrecision; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.text.ParseException; +import java.time.LocalDateTime; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; +import java.time.temporal.ChronoField; +import java.time.temporal.ChronoUnit; +import java.time.temporal.TemporalAdjusters; +import java.util.Calendar; +import java.util.Locale; +import java.util.Map; +import java.util.TimeZone; + +public class DateRangeUtil { + + /** Sets all the fields smaller than the given unit to their lowest possible value. */ + @NonNull + public static ZonedDateTime roundDown(@NonNull ZonedDateTime date, @NonNull ChronoUnit unit) { + switch (unit) { + case YEARS: + return date.with(TemporalAdjusters.firstDayOfYear()).truncatedTo(ChronoUnit.DAYS); + case MONTHS: + return date.with(TemporalAdjusters.firstDayOfMonth()).truncatedTo(ChronoUnit.DAYS); + case DAYS: + case HOURS: + case MINUTES: + case SECONDS: + case MILLIS: + return date.truncatedTo(unit); + default: + throw new IllegalArgumentException("Unsupported unit for rounding: " + unit); + } + } + + /** Sets all the fields smaller than the given unit to their highest possible value. */ + @NonNull + public static ZonedDateTime roundUp(@NonNull ZonedDateTime date, @NonNull ChronoUnit unit) { + return roundDown(date, unit) + .plus(1, unit) + // Even though ZDT has nanosecond-precision, DSE only rounds to millisecond precision so be + // consistent with that + .minus(1, ChronoUnit.MILLIS); + } + + /** + * Parses the given string as a date in a range bound. + * + *

      This method deliberately uses legacy time APIs, in order to be as close as possible to the + * server-side parsing logic. We want the client to behave exactly like the server, i.e. parsing a + * date locally and inlining it in a CQL query should always yield the same result as binding the + * date as a value. + */ + public static Calendar parseCalendar(String source) throws ParseException { + // The contents of this method are based on Lucene's DateRangePrefixTree#parseCalendar, released + // under the Apache License, Version 2.0. + // Following is the original notice from that file: + + // Licensed to the Apache Software Foundation (ASF) under one or more + // contributor license agreements. See the NOTICE file distributed with + // this work for additional information regarding copyright ownership. + // The ASF licenses this file to You under the Apache License, Version 2.0 + // (the "License"); you may not use this file except in compliance with + // the License. You may obtain a copy of the License at + // + // http://www.apache.org/licenses/LICENSE-2.0 + // + // Unless required by applicable law or agreed to in writing, software + // distributed under the License is distributed on an "AS IS" BASIS, + // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + // See the License for the specific language governing permissions and + // limitations under the License. + + if (source == null || source.isEmpty()) { + throw new IllegalArgumentException("Can't parse a null or blank string"); + } + + Calendar calendar = newCalendar(); + if (source.equals("*")) { + return calendar; + } + int offset = 0; // a pointer + try { + // year & era: + int lastOffset = + (source.charAt(source.length() - 1) == 'Z') ? source.length() - 1 : source.length(); + int hyphenIdx = source.indexOf('-', 1); // look past possible leading hyphen + if (hyphenIdx < 0) { + hyphenIdx = lastOffset; + } + int year = Integer.parseInt(source.substring(offset, hyphenIdx)); + calendar.set(Calendar.ERA, year <= 0 ? 0 : 1); + calendar.set(Calendar.YEAR, year <= 0 ? -1 * year + 1 : year); + offset = hyphenIdx + 1; + if (lastOffset < offset) { + return calendar; + } + + // NOTE: We aren't validating separator chars, and we unintentionally accept leading +/-. + // The str.substring()'s hopefully get optimized to be stack-allocated. + + // month: + calendar.set( + Calendar.MONTH, + Integer.parseInt(source.substring(offset, offset + 2)) - 1); // starts at 0 + offset += 3; + if (lastOffset < offset) { + return calendar; + } + // day: + calendar.set(Calendar.DAY_OF_MONTH, Integer.parseInt(source.substring(offset, offset + 2))); + offset += 3; + if (lastOffset < offset) { + return calendar; + } + // hour: + calendar.set(Calendar.HOUR_OF_DAY, Integer.parseInt(source.substring(offset, offset + 2))); + offset += 3; + if (lastOffset < offset) { + return calendar; + } + // minute: + calendar.set(Calendar.MINUTE, Integer.parseInt(source.substring(offset, offset + 2))); + offset += 3; + if (lastOffset < offset) { + return calendar; + } + // second: + calendar.set(Calendar.SECOND, Integer.parseInt(source.substring(offset, offset + 2))); + offset += 3; + if (lastOffset < offset) { + return calendar; + } + // ms: + calendar.set(Calendar.MILLISECOND, Integer.parseInt(source.substring(offset, offset + 3))); + offset += 3; // last one, move to next char + if (lastOffset == offset) { + return calendar; + } + } catch (Exception e) { + ParseException pe = new ParseException("Improperly formatted date: " + source, offset); + pe.initCause(e); + throw pe; + } + throw new ParseException("Improperly formatted date: " + source, offset); + } + + private static Calendar newCalendar() { + Calendar calendar = Calendar.getInstance(UTC, Locale.ROOT); + calendar.clear(); + return calendar; + } + + private static final TimeZone UTC = TimeZone.getTimeZone("UTC"); + + /** + * Returns the precision of a calendar obtained through {@link #parseCalendar(String)}, or {@code + * null} if no field was set. + */ + @Nullable + public static DateRangePrecision getPrecision(Calendar calendar) { + DateRangePrecision lastPrecision = null; + for (Map.Entry entry : FIELD_BY_PRECISION.entrySet()) { + DateRangePrecision precision = entry.getKey(); + int field = entry.getValue(); + if (calendar.isSet(field)) { + lastPrecision = precision; + } else { + break; + } + } + return lastPrecision; + } + + // Note: this could be a field on DateRangePrecision, but it's only used within this class so it's + // better not to expose it. + private static final ImmutableMap FIELD_BY_PRECISION = + ImmutableMap.builder() + .put(DateRangePrecision.YEAR, Calendar.YEAR) + .put(DateRangePrecision.MONTH, Calendar.MONTH) + .put(DateRangePrecision.DAY, Calendar.DAY_OF_MONTH) + .put(DateRangePrecision.HOUR, Calendar.HOUR_OF_DAY) + .put(DateRangePrecision.MINUTE, Calendar.MINUTE) + .put(DateRangePrecision.SECOND, Calendar.SECOND) + .put(DateRangePrecision.MILLISECOND, Calendar.MILLISECOND) + .build(); + + public static ZonedDateTime toZonedDateTime(Calendar calendar) { + int year = calendar.get(Calendar.YEAR); + if (calendar.get(Calendar.ERA) == 0) { + // BC era; 1 BC == 0 AD, 0 BD == -1 AD, etc + year -= 1; + if (year > 0) { + year = -year; + } + } + LocalDateTime localDateTime = + LocalDateTime.of( + year, + calendar.get(Calendar.MONTH) + 1, + calendar.get(Calendar.DAY_OF_MONTH), + calendar.get(Calendar.HOUR_OF_DAY), + calendar.get(Calendar.MINUTE), + calendar.get(Calendar.SECOND)); + localDateTime = + localDateTime.with(ChronoField.MILLI_OF_SECOND, calendar.get(Calendar.MILLISECOND)); + return ZonedDateTime.of(localDateTime, ZoneOffset.UTC); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/session/DefaultDseSession.java b/core/src/main/java/com/datastax/dse/driver/internal/core/session/DefaultDseSession.java new file mode 100644 index 00000000000..0e73c41069a --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/session/DefaultDseSession.java @@ -0,0 +1,24 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.session; + +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.oss.driver.api.core.session.Session; +import com.datastax.oss.driver.internal.core.session.SessionWrapper; +import net.jcip.annotations.ThreadSafe; + +/** + * Implementation note: metadata methods perform unchecked casts, relying on the fact that the + * metadata manager returns the appropriate runtime type. + */ +@ThreadSafe +public class DefaultDseSession extends SessionWrapper implements DseSession { + + public DefaultDseSession(Session delegate) { + super(delegate); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/tracker/MultiplexingRequestTracker.java b/core/src/main/java/com/datastax/dse/driver/internal/core/tracker/MultiplexingRequestTracker.java new file mode 100644 index 00000000000..d93b53f71c9 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/tracker/MultiplexingRequestTracker.java @@ -0,0 +1,94 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.tracker; + +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.session.Request; +import com.datastax.oss.driver.api.core.tracker.RequestTracker; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.List; +import java.util.concurrent.CopyOnWriteArrayList; + +public class MultiplexingRequestTracker implements RequestTracker { + + private final List trackers = new CopyOnWriteArrayList<>(); + + public void register(RequestTracker tracker) { + trackers.add(tracker); + } + + @Override + public void onSuccess( + @NonNull Request request, + long latencyNanos, + @NonNull DriverExecutionProfile executionProfile, + @NonNull Node node, + @NonNull String logPrefix) { + for (RequestTracker tracker : trackers) { + tracker.onSuccess(request, latencyNanos, executionProfile, node, logPrefix); + } + } + + @Override + public void onError( + @NonNull Request request, + @NonNull Throwable error, + long latencyNanos, + @NonNull DriverExecutionProfile executionProfile, + @Nullable Node node, + @NonNull String logPrefix) { + for (RequestTracker tracker : trackers) { + tracker.onError(request, error, latencyNanos, executionProfile, node, logPrefix); + } + } + + @Override + public void onNodeSuccess( + @NonNull Request request, + long latencyNanos, + @NonNull DriverExecutionProfile executionProfile, + @NonNull Node node, + @NonNull String logPrefix) { + for (RequestTracker tracker : trackers) { + tracker.onNodeSuccess(request, latencyNanos, executionProfile, node, logPrefix); + } + } + + @Override + public void onNodeError( + @NonNull Request request, + @NonNull Throwable error, + long latencyNanos, + @NonNull DriverExecutionProfile executionProfile, + @NonNull Node node, + @NonNull String logPrefix) { + for (RequestTracker tracker : trackers) { + tracker.onNodeError(request, error, latencyNanos, executionProfile, node, logPrefix); + } + } + + @Override + public void close() throws Exception { + Exception toThrow = null; + for (RequestTracker tracker : trackers) { + try { + tracker.close(); + } catch (Exception e) { + if (toThrow == null) { + toThrow = e; + } else { + toThrow.addSuppressed(e); + } + } + } + if (toThrow != null) { + throw toThrow; + } + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/type/codec/geometry/GeometryCodec.java b/core/src/main/java/com/datastax/dse/driver/internal/core/type/codec/geometry/GeometryCodec.java new file mode 100644 index 00000000000..13f54743fed --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/type/codec/geometry/GeometryCodec.java @@ -0,0 +1,102 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.type.codec.geometry; + +import static com.datastax.oss.driver.internal.core.util.Strings.isQuoted; + +import com.datastax.dse.driver.api.core.data.geometry.Geometry; +import com.datastax.oss.driver.api.core.ProtocolVersion; +import com.datastax.oss.driver.api.core.type.codec.TypeCodec; +import com.datastax.oss.driver.internal.core.util.Strings; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.nio.ByteBuffer; +import net.jcip.annotations.ThreadSafe; + +/** Base class for geospatial type codecs. */ +@ThreadSafe +public abstract class GeometryCodec implements TypeCodec { + + @Nullable + @Override + public T decode(@Nullable ByteBuffer bb, @NonNull ProtocolVersion protocolVersion) { + return bb == null || bb.remaining() == 0 ? null : fromWellKnownBinary(bb.slice()); + } + + @Nullable + @Override + public ByteBuffer encode(@Nullable T geometry, @NonNull ProtocolVersion protocolVersion) { + return geometry == null ? null : toWellKnownBinary(geometry); + } + + @Nullable + @Override + public T parse(@Nullable String s) { + if (s == null) { + return null; + } + s = s.trim(); + if (s.isEmpty() || s.equalsIgnoreCase("NULL")) { + return null; + } + if (!isQuoted(s)) { + throw new IllegalArgumentException("Geometry values must be enclosed by single quotes"); + } + return fromWellKnownText(Strings.unquote(s)); + } + + @NonNull + @Override + public String format(@Nullable T geometry) throws IllegalArgumentException { + return geometry == null ? "NULL" : Strings.quote(toWellKnownText(geometry)); + } + + /** + * Creates an instance of this codec's geospatial type from its Well-known Text (WKT) representation. + * + * @param source the Well-known Text representation to parse. Cannot be null. + * @return A new instance of this codec's geospatial type. + * @throws IllegalArgumentException if the string does not contain a valid Well-known Text + * representation. + */ + @NonNull + protected abstract T fromWellKnownText(@NonNull String source); + + /** + * Creates an instance of a geospatial type from its Well-known Binary + * (WKB) representation. + * + * @param bb the Well-known Binary representation to parse. Cannot be null. + * @return A new instance of this codec's geospatial type. + * @throws IllegalArgumentException if the given {@link ByteBuffer} does not contain a valid + * Well-known Binary representation. + */ + @NonNull + protected abstract T fromWellKnownBinary(@NonNull ByteBuffer bb); + + /** + * Returns a Well-known Text (WKT) + * representation of the given geospatial object. + * + * @param geometry the geospatial object to convert. Cannot be null. + * @return A Well-known Text representation of the given object. + */ + @NonNull + protected abstract String toWellKnownText(@NonNull T geometry); + + /** + * Returns a Well-known + * Binary (WKB) representation of the given geospatial object. + * + * @param geometry the geospatial object to convert. Cannot be null. + * @return A Well-known Binary representation of the given object. + */ + @NonNull + protected abstract ByteBuffer toWellKnownBinary(@NonNull T geometry); +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/type/codec/geometry/LineStringCodec.java b/core/src/main/java/com/datastax/dse/driver/internal/core/type/codec/geometry/LineStringCodec.java new file mode 100644 index 00000000000..154120e921a --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/type/codec/geometry/LineStringCodec.java @@ -0,0 +1,76 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.type.codec.geometry; + +import com.datastax.dse.driver.api.core.data.geometry.LineString; +import com.datastax.dse.driver.api.core.type.DseDataTypes; +import com.datastax.dse.driver.internal.core.data.geometry.DefaultGeometry; +import com.datastax.dse.driver.internal.core.data.geometry.DefaultLineString; +import com.datastax.oss.driver.api.core.type.DataType; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import com.esri.core.geometry.ogc.OGCLineString; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.nio.ByteBuffer; +import net.jcip.annotations.ThreadSafe; + +/** + * A custom type codec to use {@link LineString} instances in driver. + * + *

      If you use {@link com.datastax.dse.driver.api.core.DseSessionBuilder} to build your cluster, + * it will automatically register this codec. + */ +@ThreadSafe +public class LineStringCodec extends GeometryCodec { + + private static final GenericType JAVA_TYPE = GenericType.of(LineString.class); + + @NonNull + @Override + public GenericType getJavaType() { + return JAVA_TYPE; + } + + @NonNull + @Override + protected LineString fromWellKnownText(@NonNull String source) { + return new DefaultLineString(DefaultGeometry.fromOgcWellKnownText(source, OGCLineString.class)); + } + + @Override + public boolean accepts(@NonNull Class javaClass) { + return javaClass == LineString.class; + } + + @Override + public boolean accepts(@NonNull Object value) { + return value instanceof LineString; + } + + @NonNull + @Override + protected LineString fromWellKnownBinary(@NonNull ByteBuffer bb) { + return new DefaultLineString(DefaultGeometry.fromOgcWellKnownBinary(bb, OGCLineString.class)); + } + + @NonNull + @Override + protected String toWellKnownText(@NonNull LineString geometry) { + return geometry.asWellKnownText(); + } + + @NonNull + @Override + protected ByteBuffer toWellKnownBinary(@NonNull LineString geometry) { + return geometry.asWellKnownBinary(); + } + + @NonNull + @Override + public DataType getCqlType() { + return DseDataTypes.LINE_STRING; + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/type/codec/geometry/PointCodec.java b/core/src/main/java/com/datastax/dse/driver/internal/core/type/codec/geometry/PointCodec.java new file mode 100644 index 00000000000..b9327d1cfe2 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/type/codec/geometry/PointCodec.java @@ -0,0 +1,76 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.type.codec.geometry; + +import com.datastax.dse.driver.api.core.data.geometry.Point; +import com.datastax.dse.driver.api.core.type.DseDataTypes; +import com.datastax.dse.driver.internal.core.data.geometry.DefaultGeometry; +import com.datastax.dse.driver.internal.core.data.geometry.DefaultPoint; +import com.datastax.oss.driver.api.core.type.DataType; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import com.esri.core.geometry.ogc.OGCPoint; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.nio.ByteBuffer; +import net.jcip.annotations.ThreadSafe; + +/** + * A custom type codec to use {@link Point} instances in the driver. + * + *

      If you use {@link com.datastax.dse.driver.api.core.DseSessionBuilder} to build your cluster, + * it will automatically register this codec. + */ +@ThreadSafe +public class PointCodec extends GeometryCodec { + + private static final GenericType JAVA_TYPE = GenericType.of(Point.class); + + @NonNull + @Override + public GenericType getJavaType() { + return JAVA_TYPE; + } + + @NonNull + @Override + public DataType getCqlType() { + return DseDataTypes.POINT; + } + + @Override + public boolean accepts(@NonNull Class javaClass) { + return javaClass == Point.class; + } + + @Override + public boolean accepts(@NonNull Object value) { + return value instanceof Point; + } + + @NonNull + @Override + protected String toWellKnownText(@NonNull Point geometry) { + return geometry.asWellKnownText(); + } + + @NonNull + @Override + protected ByteBuffer toWellKnownBinary(@NonNull Point geometry) { + return geometry.asWellKnownBinary(); + } + + @NonNull + @Override + protected Point fromWellKnownText(@NonNull String source) { + return new DefaultPoint(DefaultGeometry.fromOgcWellKnownText(source, OGCPoint.class)); + } + + @NonNull + @Override + protected Point fromWellKnownBinary(@NonNull ByteBuffer source) { + return new DefaultPoint(DefaultGeometry.fromOgcWellKnownBinary(source, OGCPoint.class)); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/type/codec/geometry/PolygonCodec.java b/core/src/main/java/com/datastax/dse/driver/internal/core/type/codec/geometry/PolygonCodec.java new file mode 100644 index 00000000000..6074fbb0b0a --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/type/codec/geometry/PolygonCodec.java @@ -0,0 +1,76 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.type.codec.geometry; + +import com.datastax.dse.driver.api.core.data.geometry.Polygon; +import com.datastax.dse.driver.api.core.type.DseDataTypes; +import com.datastax.dse.driver.internal.core.data.geometry.DefaultGeometry; +import com.datastax.dse.driver.internal.core.data.geometry.DefaultPolygon; +import com.datastax.oss.driver.api.core.type.DataType; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import com.esri.core.geometry.ogc.OGCPolygon; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.nio.ByteBuffer; +import net.jcip.annotations.ThreadSafe; + +/** + * A custom type codec to use {@link Polygon} instances in the driver. + * + *

      If you use {@link com.datastax.dse.driver.api.core.DseSessionBuilder} to build your cluster, + * it will automatically register this codec. + */ +@ThreadSafe +public class PolygonCodec extends GeometryCodec { + + private static final GenericType JAVA_TYPE = GenericType.of(Polygon.class); + + @NonNull + @Override + public GenericType getJavaType() { + return JAVA_TYPE; + } + + @NonNull + @Override + public DataType getCqlType() { + return DseDataTypes.POLYGON; + } + + @Override + public boolean accepts(@NonNull Class javaClass) { + return javaClass == Polygon.class; + } + + @Override + public boolean accepts(@NonNull Object value) { + return value instanceof Polygon; + } + + @NonNull + @Override + protected Polygon fromWellKnownText(@NonNull String source) { + return new DefaultPolygon(DefaultGeometry.fromOgcWellKnownText(source, OGCPolygon.class)); + } + + @NonNull + @Override + protected Polygon fromWellKnownBinary(@NonNull ByteBuffer bb) { + return new DefaultPolygon(DefaultGeometry.fromOgcWellKnownBinary(bb, OGCPolygon.class)); + } + + @NonNull + @Override + protected String toWellKnownText(@NonNull Polygon geometry) { + return geometry.asWellKnownText(); + } + + @NonNull + @Override + protected ByteBuffer toWellKnownBinary(@NonNull Polygon geometry) { + return geometry.asWellKnownBinary(); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/type/codec/time/DateRangeCodec.java b/core/src/main/java/com/datastax/dse/driver/internal/core/type/codec/time/DateRangeCodec.java new file mode 100644 index 00000000000..133faec9ff4 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/type/codec/time/DateRangeCodec.java @@ -0,0 +1,177 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.type.codec.time; + +import com.datastax.dse.driver.api.core.data.time.DateRange; +import com.datastax.dse.driver.api.core.data.time.DateRangeBound; +import com.datastax.dse.driver.api.core.data.time.DateRangePrecision; +import com.datastax.dse.driver.api.core.type.DseDataTypes; +import com.datastax.oss.driver.api.core.ProtocolVersion; +import com.datastax.oss.driver.api.core.type.DataType; +import com.datastax.oss.driver.api.core.type.codec.TypeCodec; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import com.datastax.oss.driver.internal.core.util.Strings; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.nio.ByteBuffer; +import java.text.ParseException; +import java.time.Instant; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; +import java.util.Optional; + +public class DateRangeCodec implements TypeCodec { + + private static final GenericType JAVA_TYPE = GenericType.of(DateRange.class); + private static final DataType CQL_TYPE = DseDataTypes.DATE_RANGE; + + // e.g. [2001-01-01] + private static final byte DATE_RANGE_TYPE_SINGLE_DATE = 0x00; + // e.g. [2001-01-01 TO 2001-01-31] + private static final byte DATE_RANGE_TYPE_CLOSED_RANGE = 0x01; + // e.g. [2001-01-01 TO *] + private static final byte DATE_RANGE_TYPE_OPEN_RANGE_HIGH = 0x02; + // e.g. [* TO 2001-01-01] + private static final byte DATE_RANGE_TYPE_OPEN_RANGE_LOW = 0x03; + // [* TO *] + private static final byte DATE_RANGE_TYPE_BOTH_OPEN_RANGE = 0x04; + // * + private static final byte DATE_RANGE_TYPE_SINGLE_DATE_OPEN = 0x05; + + @NonNull + @Override + public GenericType getJavaType() { + return JAVA_TYPE; + } + + @NonNull + @Override + public DataType getCqlType() { + return CQL_TYPE; + } + + @Override + public boolean accepts(@NonNull Class javaClass) { + return javaClass == DateRange.class; + } + + @Nullable + @Override + public ByteBuffer encode( + @Nullable DateRange dateRange, @NonNull ProtocolVersion protocolVersion) { + if (dateRange == null) { + return null; + } + byte rangeType = encodeType(dateRange); + int bufferSize = 1; + DateRangeBound lowerBound = dateRange.getLowerBound(); + Optional maybeUpperBound = dateRange.getUpperBound(); + bufferSize += lowerBound.isUnbounded() ? 0 : 9; + bufferSize += maybeUpperBound.map(upperBound -> upperBound.isUnbounded() ? 0 : 9).orElse(0); + ByteBuffer buffer = ByteBuffer.allocate(bufferSize); + buffer.put(rangeType); + if (!lowerBound.isUnbounded()) { + put(buffer, lowerBound); + } + maybeUpperBound.ifPresent( + upperBound -> { + if (!upperBound.isUnbounded()) { + put(buffer, upperBound); + } + }); + return (ByteBuffer) buffer.flip(); + } + + private static byte encodeType(DateRange dateRange) { + if (dateRange.isSingleBounded()) { + return dateRange.getLowerBound().isUnbounded() + ? DATE_RANGE_TYPE_SINGLE_DATE_OPEN + : DATE_RANGE_TYPE_SINGLE_DATE; + } else { + DateRangeBound upperBound = + dateRange + .getUpperBound() + .orElseThrow( + () -> + new IllegalStateException("Upper bound should be set if !isSingleBounded()")); + if (dateRange.getLowerBound().isUnbounded()) { + return upperBound.isUnbounded() + ? DATE_RANGE_TYPE_BOTH_OPEN_RANGE + : DATE_RANGE_TYPE_OPEN_RANGE_LOW; + } else { + return upperBound.isUnbounded() + ? DATE_RANGE_TYPE_OPEN_RANGE_HIGH + : DATE_RANGE_TYPE_CLOSED_RANGE; + } + } + } + + private static void put(ByteBuffer buffer, DateRangeBound bound) { + buffer.putLong(bound.getTimestamp().toInstant().toEpochMilli()); + buffer.put(bound.getPrecision().getEncoding()); + } + + @Nullable + @Override + public DateRange decode(@Nullable ByteBuffer bytes, @NonNull ProtocolVersion protocolVersion) { + if (bytes == null || bytes.remaining() == 0) { + return null; + } + byte type = bytes.get(); + switch (type) { + case DATE_RANGE_TYPE_SINGLE_DATE: + return new DateRange(decodeLowerBound(bytes)); + case DATE_RANGE_TYPE_CLOSED_RANGE: + return new DateRange(decodeLowerBound(bytes), decodeUpperBound(bytes)); + case DATE_RANGE_TYPE_OPEN_RANGE_HIGH: + return new DateRange(decodeLowerBound(bytes), DateRangeBound.UNBOUNDED); + case DATE_RANGE_TYPE_OPEN_RANGE_LOW: + return new DateRange(DateRangeBound.UNBOUNDED, decodeUpperBound(bytes)); + case DATE_RANGE_TYPE_BOTH_OPEN_RANGE: + return new DateRange(DateRangeBound.UNBOUNDED, DateRangeBound.UNBOUNDED); + case DATE_RANGE_TYPE_SINGLE_DATE_OPEN: + return new DateRange(DateRangeBound.UNBOUNDED); + default: + throw new IllegalArgumentException("Unknown date range type: " + type); + } + } + + private static DateRangeBound decodeLowerBound(ByteBuffer bytes) { + long epochMilli = bytes.getLong(); + ZonedDateTime timestamp = + ZonedDateTime.ofInstant(Instant.ofEpochMilli(epochMilli), ZoneOffset.UTC); + DateRangePrecision precision = DateRangePrecision.fromEncoding(bytes.get()); + return DateRangeBound.lowerBound(timestamp, precision); + } + + private static DateRangeBound decodeUpperBound(ByteBuffer bytes) { + long epochMilli = bytes.getLong(); + ZonedDateTime timestamp = + ZonedDateTime.ofInstant(Instant.ofEpochMilli(epochMilli), ZoneOffset.UTC); + DateRangePrecision precision = DateRangePrecision.fromEncoding(bytes.get()); + return DateRangeBound.upperBound(timestamp, precision); + } + + @NonNull + @Override + public String format(@Nullable DateRange dateRange) { + return (dateRange == null) ? "NULL" : Strings.quote(dateRange.toString()); + } + + @Nullable + @Override + public DateRange parse(@Nullable String value) { + if (value == null || value.isEmpty() || value.equalsIgnoreCase("NULL")) { + return null; + } + try { + return DateRange.parse(Strings.unquote(value)); + } catch (ParseException e) { + throw new IllegalArgumentException(String.format("Invalid date range literal: %s", value), e); + } + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/util/concurrent/BoundedConcurrentQueue.java b/core/src/main/java/com/datastax/dse/driver/internal/core/util/concurrent/BoundedConcurrentQueue.java new file mode 100644 index 00000000000..036c0b16cc9 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/util/concurrent/BoundedConcurrentQueue.java @@ -0,0 +1,123 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.util.concurrent; + +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.Deque; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionStage; +import java.util.concurrent.ConcurrentLinkedDeque; +import java.util.concurrent.atomic.AtomicReference; + +/** + * A concurrent queue with a limited size. + * + *

      Once the queue is full, the insertion of the next element is delayed until space becomes + * available again; in the meantime, additional insertions are not allowed (in other words, there + * can be at most one "pending" element waiting on a full queue). + */ +public class BoundedConcurrentQueue { + + private final Deque elements = new ConcurrentLinkedDeque<>(); + private final AtomicReference state; + + public BoundedConcurrentQueue(int maxSize) { + this.state = new AtomicReference<>(new State(maxSize)); + } + + /** + * @return a stage that completes when the element is inserted. If there was still space in the + * queue, it will be already complete; if the queue was full, it will complete at a later + * point in time (triggered by a call to {@link #poll()}). This method must not be invoked + * again until the stage has completed. + * @throws IllegalStateException if the method is invoked before the stage returned by the + * previous call has completed. + */ + @NonNull + public CompletionStage offer(@NonNull ElementT element) { + while (true) { + State oldState = state.get(); + State newState = oldState.increment(); + if (state.compareAndSet(oldState, newState)) { + if (newState.spaceAvailable != null) { + return newState.spaceAvailable.thenApply( + (aVoid) -> { + elements.offer(element); + return element; + }); + } else { + elements.offer(element); + return CompletableFuture.completedFuture(element); + } + } + } + } + + @Nullable + public ElementT poll() { + while (true) { + State oldState = state.get(); + if (oldState.size == 0) { + return null; + } + State newState = oldState.decrement(); + if (state.compareAndSet(oldState, newState)) { + if (oldState.spaceAvailable != null) { + oldState.spaceAvailable.complete(null); + } + return elements.poll(); + } + } + } + + @Nullable + public ElementT peek() { + return elements.peek(); + } + + /** + * Note that this does not complete a pending call to {@link #offer(Object)}. We only use this + * method for terminal states where we want to dereference the contained elements. + */ + public void clear() { + elements.clear(); + } + + private static class State { + + private final int maxSize; + + final int size; // Number of elements in the queue, + 1 if one is waiting to get in + final CompletableFuture spaceAvailable; // Not null iff size == maxSize + 1 + + State(int maxSize) { + this(0, null, maxSize); + } + + private State(int size, CompletableFuture spaceAvailable, int maxSize) { + this.maxSize = maxSize; + this.size = size; + this.spaceAvailable = spaceAvailable; + } + + State increment() { + if (size > maxSize) { + throw new IllegalStateException( + "Can't call offer() until the stage returned by the previous offer() call has completed"); + } + int newSize = size + 1; + CompletableFuture newFuture = + (newSize == maxSize + 1) ? new CompletableFuture<>() : null; + return new State(newSize, newFuture, maxSize); + } + + State decrement() { + return new State(size - 1, null, maxSize); + } + } +} diff --git a/core/src/main/resources/com/datastax/dse/driver/Driver.properties b/core/src/main/resources/com/datastax/dse/driver/Driver.properties new file mode 100644 index 00000000000..d5c744b4eee --- /dev/null +++ b/core/src/main/resources/com/datastax/dse/driver/Driver.properties @@ -0,0 +1,13 @@ +# +# Copyright DataStax, Inc. +# +# This software can be used solely with DataStax Enterprise. Please consult the license at +# http://www.datastax.com/terms/datastax-dse-driver-license-terms +# + +driver.groupId=${project.groupId} +driver.artifactId=${project.artifactId} +driver.version=${project.version} +# Set the Driver name explicitly here as parent project properties don't resolve +# when bundled, so we can't use ${project.parent.name} +driver.name=DataStax Enterprise Java Driver diff --git a/core/src/main/resources/dse-reference.conf b/core/src/main/resources/dse-reference.conf new file mode 100644 index 00000000000..0fbac85b617 --- /dev/null +++ b/core/src/main/resources/dse-reference.conf @@ -0,0 +1,430 @@ +# Reference configuration for the DataStax Enterprise Java driver. +# +# This configuration inherits from the open-source Apache Cassandra® driver configuration. In other +# words, any option not explicitly defined below gets its value from the `reference.conf` file +# shipped with the java-driver-core.jar that this DSE driver depends on. +# +# You can place your own `application.conf` file in the classpath to override any option. +# +# Note that the configuration loading mechanism is pluggable (see +# DseSessionBuilder.withConfigLoader). +# +# This file is in HOCON format, see https://github.com/typesafehub/config/blob/master/HOCON.md. +datastax-java-driver { + + basic { + # The name of the application using the session. + # + # It will be sent in the STARTUP protocol message for each new connection established by the + # driver, and may be used by future DSE versions for monitoring purposes. + # + # This can also be defined programmatically with DseSessionBuilder.withApplicationName(). If you + # specify both, the programmatic value takes precedence and this option is ignored. + # + # Required: no + # Modifiable at runtime: no + # Overridable in a profile: no + // application.name = + + # The version of the application using the session. + # + # It will be sent in the STARTUP protocol message for each new connection established by the + # driver, and may be used by future DSE versions for monitoring purposes. + # + # This can also be defined programmatically with DseSessionBuilder.withApplicationVersion(). If + # you specify both, the programmatic value takes precedence and this option is ignored. + # + # Required: no + # Modifiable at runtime: no + # Overridable in a profile: no + // application.version = + + load-balancing-policy { + # The DSE driver ships with a specific load balancing policy implementation that is capable, + # among other things, of avoiding slow nodes and handling analytics queries: + # DseLoadBalancingPolicy. + # + # You can also specify any other custom implementation class, provided that it implements + # LoadBalancingPolicy and has a public constructor with two arguments: the DriverContext and a + # String representing the profile name. + class = DseLoadBalancingPolicy + + } + } + + basic.graph { + # The name of the graph targeted by graph statements. + # + # This can also be overridden programmatically with GraphStatement.setGraphName(). If both are + # specified, the programmatic value takes precedence, and this option is ignored. + # + # Required: no. In particular, system queries -- such as creating or dropping a graph -- must be + # executed without a graph name (see also basic.graph.is-system-query). + # Modifiable at runtime: yes, the new value will be used for requests issued after the change. + # Overridable in a profile: yes + // name = your-graph-name + + # The traversal source to use for graph statements. + # + # This setting doesn't usually need to change, unless executing OLAP queries, which require the + # traversal source "a". + # + # This can also be overridden programmatically with GraphStatement.setTraversalSource(). If both + # are specified, the programmatic value takes precedence, and this option is ignored. + # + # Required: no + # Modifiable at runtime: yes, the new value will be used for requests issued after the change. + # Overridable in a profile: yes + traversal-source = "g" + + # Whether a script statement represents a system query. + # + # Script statements that access the `system` variable *must not* specify a graph name (otherwise + # `system` is not available). However, if your application executes a lot of non-system + # statements, it is convenient to configure basic.graph.name to avoid repeating it every time. + # This option allows you to ignore that global graph name, for example in a specific profile. + # + # This can also be overridden programmatically with ScriptGraphStatement.setSystemQuery(). If + # both are specified, the programmatic value takes precedence, and this option is ignored. + # + # Required: no (defaults to false) + # Modifiable at runtime: yes, the new value will be used for requests issued after the change. + # Overridable in a profile: yes + // is-system-query = false + + # The read consistency level to use for graph statements. + # + # DSE Graph is able to distinguish between read and write timeouts for the internal storage + # queries that will be produced by a traversal. Hence the consistency level for reads and writes + # can be set separately. + # + # This can also be overridden programmatically with GraphStatement.setReadConsistencyLevel(). If + # both are specified, the programmatic value takes precedence, and this option is ignored. + # + # Required: no (defaults to request.basic.consistency) + # Modifiable at runtime: yes, the new value will be used for requests issued after the change. + # Overridable in a profile: yes + // read-consistency-level = LOCAL_QUORUM + + # The write consistency level to use for graph statements. + # + # DSE Graph is able to distinguish between read and write timeouts for the internal storage + # queries that will be produced by a traversal. Hence the consistency level for reads and writes + # can be set separately. + # + # This can also be overridden programmatically with GraphStatement.setReadConsistencyLevel(). If + # both are specified, the programmatic value takes precedence, and this option is ignored. + # + # Required: no (defaults to request.basic.consistency) + # Modifiable at runtime: yes, the new value will be used for requests issued after the change. + # Overridable in a profile: yes + // write-consistency-level = LOCAL_ONE + + # How long the driver waits for a graph request to complete. This is a global limit on the + # duration of a session.execute() call, including any internal retries the driver might do. + # + # Graph statements behave a bit differently than regular CQL requests (hence this dedicated + # option instead of reusing basic.request.timeout): by default, the client timeout is not set, + # and the driver will just wait as long as needed until the server replies (which is itself + # governed by server-side timeout configuration). + # If you specify a client timeout with this option, then the driver will fail the request after + # the given time; note that the value is also sent along with the request, so that the server + # can also time out early and avoid wasting resources on a response that the client has already + # given up on. + # + # This can also be overridden programmatically with GraphStatement.setTimeout(). If both are + # specified, the programmatic value takes precedence, and this option is ignored. + # + # Required: no (defaults to request.basic.consistency) + # Modifiable at runtime: yes, the new value will be used for requests issued after the change. + # Overridable in a profile: yes + // timeout = 10 seconds + } + + # The component that handles authentication on each new connection. + # + # Required: no. If the 'class' child option is absent, no authentication will occur. + # Modifiable at runtime: no + # Overridable in a profile: no + advanced.auth-provider { + # The class of the provider. If it is not qualified, the driver assumes that it resides in one + # of the following packages: + # - com.datastax.oss.driver.internal.core.auth + # - com.datastax.dse.driver.internal.core.auth + # + # The DSE driver provides 3 implementations out of the box: + # - PlainTextAuthProvider: uses plain-text credentials. It requires the `username` and + # `password` options below. Should be used only when authenticating against Apache + # Cassandra(R) clusters; not recommended when authenticating against DSE clusters. + # - DsePlainTextAuthProvider: provides SASL authentication using the PLAIN mechanism for DSE + # clusters secured with DseAuthenticator. It requires the `username` and `password` options + # below, and optionally, an `authorization-id`. + # - DseGssApiAuthProvider: provides GSSAPI authentication for DSE clusters secured with + # DseAuthenticator. Read the javadocs of this authenticator for detailed instructions. + # + # You can also specify a custom class that implements AuthProvider and has a public constructor + # with a DriverContext argument (to simplify this, the driver provides two abstract classes that + # can be extended: DsePlainTextAuthProviderBase and DseGssApiAuthProviderBase). + # + # Finally, you can configure a provider instance programmatically with + # DseSessionBuilder#withAuthProvider. In that case, it will take precedence over the + # configuration. + // class = DsePlainTextAuthProvider + # + # Sample configuration for plain-text authentication providers: + // username = cassandra + // password = cassandra + # + # Proxy authentication: allows to login as another user or role (valid for both + # DsePlainTextAuthProvider and DseGssApiAuthProvider): + // authorization-id = userOrRole + # + # The settings below are only applicable to DseGssApiAuthProvider: + # + # Service name. For example, if in your dse.yaml configuration file the + # "kerberos_options/service_principal" setting is "cassandra/my.host.com@MY.REALM.COM", then set + # this option to "cassandra": + //service = "cassandra" + # + # Login configuration. It is also possible to provide login configuration through a standard + # JAAS configuration file. The below configuration is just an example, see all possible options + # here: + # https://docs.oracle.com/javase/6/docs/jre/api/security/jaas/spec/com/sun/security/auth/module/Krb5LoginModule.html + // login-configuration { + // principal = "cassandra@DATASTAX.COM" + // useKeyTab = "true" + // refreshKrb5Config = "true" + // keyTab = "/path/to/keytab/file" + // } + # + # Internal SASL properties, if any, such as QOP. + // sasl-properties { + // javax.security.sasl.qop = "auth-conf" + // } + } + + advanced.graph { + # The sub-protocol the driver will use to communicate with DSE Graph, on top of the Cassandra + # native protocol. + # + # You should almost never have to change this: the driver sets it automatically, based on the + # information it has about the server. One exception is if you use the script API against a + # legacy DSE version (5.0.3 or older). In that case, you need to force the sub-protocol to + # "graphson-1.0". + # + # This can also be overridden programmatically with GraphStatement.setSubProtocol(). If both are + # specified, the programmatic value takes precedence, and this option is ignored. + # + # Required: no + # Modifiable at runtime: yes, the new value will be used for requests issued after the change. + # Overridable in a profile: yes + // sub-protocol = "graphson-2.0" + } + + advanced.metrics { + + # The session-level metrics (all disabled by default). + # + # This section only lists metrics specific to the DSE driver. Please refer to the OSS driver's + # reference.conf file to know which metrics are made available by the OSS driver. + # + # Required: yes + # Modifiable at runtime: no + # Overridable in a profile: no + session { + enabled = [ + + # The throughput and latency percentiles of continuous CQL requests (exposed as a Timer). + # + # This metric is a session-level metric and corresponds to the overall duration of the + # session.executeContinuously() call, including any retry. + # + # Note that this metric is analogous to the OSS driver's 'cql-requests' metrics, but for + # continuous paging requests only. Continuous paging requests do not update the + # 'cql-requests' metric, because they are usually much longer. Only the following metrics + # are updated during a continuous paging request: + # + # - At node level: all the usual metrics available for normal CQL requests, such as + # 'cql-messages' and error-related metrics; + # - At session level: only 'continuous-cql-requests' is updated (this metric). + // continuous-cql-requests, + + ] + + # Extra configuration (for the metrics that need it) + + # Required: if the 'continuous-cql-requests' metric is enabled + # Modifiable at runtime: no + # Overridable in a profile: no + continuous-cql-requests { + + # The largest latency that we expect to record for a continuous paging request. + # + # This is used to scale internal data structures. If a higher recording is encountered at + # runtime, it is discarded and a warning is logged. + highest-latency = 120 seconds + + # The number of significant decimal digits to which internal structures will maintain + # value resolution and separation (for example, 3 means that recordings up to 1 second + # will be recorded with a resolution of 1 millisecond or better). + # + # This must be between 0 and 5. If the value is out of range, it defaults to 3 and a + # warning is logged. + significant-digits = 3 + + # The interval at which percentile data is refreshed. + # + # The driver records latency data in a "live" histogram, and serves results from a cached + # snapshot. Each time the snapshot gets older than the interval, the two are switched. + # Note that this switch happens upon fetching the metrics, so if you never fetch the + # recording interval might grow higher (that shouldn't be an issue in a production + # environment because you would typically have a metrics reporter that exports to a + # monitoring tool at a regular interval). + # + # In practice, this means that if you set this to 5 minutes, you're looking at data from a + # 5-minute interval in the past, that is at most 5 minutes old. If you fetch the metrics + # at a faster pace, you will observe the same data for 5 minutes until the interval + # expires. + # + # Note that this does not apply to the total count and rates (those are updated in real + # time). + refresh-interval = 5 minutes + } + } + + # The node-level metrics (all disabled by default). + # + # This section only lists metrics specific to the DSE driver. Please refer to the OSS driver's + # reference.conf file to knwow which metrics are made available by the OSS driver. + # + # Required: yes + # Modifiable at runtime: no + # Overridable in a profile: no + node { + enabled = [ + + ] + } + } + + # Options to control the execution of continuous paging requests. + advanced.continuous-paging { + + # The page size. + # + # The value specified here can be interpreted in number of rows + # or in number of bytes, depending on the unit defined with page-unit (see below). + # + # It controls how many rows (or how much data) will be retrieved simultaneously in a single + # network roundtrip (the goal being to avoid loading too many results in memory at the same + # time). If there are more results, additional requests will be used to retrieve them (either + # automatically if you iterate with the sync API, or explicitly with the async API's + # fetchNextPage method). + # + # The default is the same as the driver's normal request page size, + # i.e., 5000 (rows). + # + # Required: yes + # Modifiable at runtime: yes, the new value will be used for continuous requests issued after + # the change + # Overridable in a profile: yes + page-size = ${datastax-java-driver.basic.request.page-size} + + # Whether the page-size option should be interpreted in number of rows or bytes. + # + # The default is false, i.e., the page size will be interpreted in number of rows. + # + # Required: yes + # Modifiable at runtime: yes, the new value will be used for continuous requests issued after + # the change + # Overridable in a profile: yes + page-size-in-bytes = false + + # The maximum number of pages to return. + # + # The default is zero, which means retrieve all pages. + # + # Required: yes + # Modifiable at runtime: yes, the new value will be used for continuous requests issued after + # the change + # Overridable in a profile: yes + max-pages = 0 + + # Returns the maximum number of pages per second. + # + # The default is zero, which means no limit. + # + # Required: yes + # Modifiable at runtime: yes, the new value will be used for continuous requests issued after + # the change + # Overridable in a profile: yes + max-pages-per-second = 0 + + # The maximum number of pages that can be stored in the local queue. + # + # This value must be positive. The default is 4. + # + # Required: yes + # Modifiable at runtime: yes, the new value will be used for continuous requests issued after + # the change + # Overridable in a profile: yes + max-enqueued-pages = 4 + + # Timeouts for continuous paging. + # + # Note that there is no global timeout for continuous paging as there is + # for regular queries, because continuous paging queries can take an arbitrarily + # long time to complete. + # + # Instead, timeouts are applied to each exchange between the driver and the coordinator. In + # other words, if the driver decides to retry, all timeouts are reset. + timeout { + + # How long to wait for the coordinator to send the first page. + # + # Required: yes + # Modifiable at runtime: yes, the new value will be used for continuous requests issued after + # the change + # Overridable in a profile: yes + first-page = 2 seconds + + # How long to wait for the coordinator to send subsequent pages. + # + # Required: yes + # Modifiable at runtime: yes, the new value will be used for continuous requests issued after + # the change + # Overridable in a profile: yes + other-pages = 1 second + + } + } + + # Options to control Insights monitoring. + advanced.monitor-reporting { + # Whether to send monitoring events. + # + # The default is true. + # + # Required: no (defaults to true) + # Modifiable at runtime: no + # Overridable in a profile: no + enabled = true + } + + profiles { + # An example configuration profile for graph requests. + // my-graph-profile-example { + // graph { + // read-consistency-level = LOCAL_QUORUM + // write-consistency-level = LOCAL_ONE + // } + // } + + # An example pre-defined configuration profile for OLAP graph queries. + // graph-olap { + // graph { + // traversal-source = "a" // traversal source needs to be set to "a" for OLAP queries. + // } + // } + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/DriverRunListener.java b/core/src/test/java/com/datastax/dse/driver/DriverRunListener.java new file mode 100644 index 00000000000..474e277d83a --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/DriverRunListener.java @@ -0,0 +1,30 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver; + +import static org.assertj.core.api.Assertions.fail; + +import org.junit.runner.Description; +import org.junit.runner.notification.RunListener; + +/** + * Common parent of all driver tests, to store common configuration and perform sanity checks. + * + * @see "maven-surefire-plugin configuration in pom.xml" + */ +public class DriverRunListener extends RunListener { + + @Override + public void testFinished(Description description) throws Exception { + // If a test interrupted the main thread silently, this can make later tests fail. Instead, we + // fail the test and clear the interrupt status. + // Note: Thread.interrupted() also clears the flag, which is what we want. + if (Thread.interrupted()) { + fail(description.getMethodName() + " interrupted the main thread"); + } + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/DseTestDataProviders.java b/core/src/test/java/com/datastax/dse/driver/DseTestDataProviders.java new file mode 100644 index 00000000000..37af095d5f7 --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/DseTestDataProviders.java @@ -0,0 +1,40 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver; + +import com.datastax.dse.driver.api.core.DseProtocolVersion; +import com.datastax.oss.driver.api.core.DefaultProtocolVersion; +import com.tngtech.java.junit.dataprovider.DataProvider; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Arrays; +import java.util.stream.Stream; + +public class DseTestDataProviders { + + @DataProvider + public static Object[][] allDseProtocolVersions() { + return concat(DseProtocolVersion.values()); + } + + @DataProvider + public static Object[][] allOssProtocolVersions() { + return concat(DefaultProtocolVersion.values()); + } + + @DataProvider + public static Object[][] allDseAndOssProtocolVersions() { + return concat(DefaultProtocolVersion.values(), DseProtocolVersion.values()); + } + + @NonNull + private static Object[][] concat(Object[]... values) { + return Stream.of(values) + .flatMap(Arrays::stream) + .map(o -> new Object[] {o}) + .toArray(Object[][]::new); + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/DseTestFixtures.java b/core/src/test/java/com/datastax/dse/driver/DseTestFixtures.java new file mode 100644 index 00000000000..558082d9e21 --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/DseTestFixtures.java @@ -0,0 +1,67 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver; + +import com.datastax.dse.protocol.internal.response.result.DseRowsMetadata; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; +import com.datastax.oss.protocol.internal.ProtocolConstants; +import com.datastax.oss.protocol.internal.response.result.ColumnSpec; +import com.datastax.oss.protocol.internal.response.result.DefaultRows; +import com.datastax.oss.protocol.internal.response.result.RawType; +import com.datastax.oss.protocol.internal.response.result.Rows; +import com.datastax.oss.protocol.internal.util.Bytes; +import java.nio.ByteBuffer; +import java.util.ArrayDeque; +import java.util.List; +import java.util.Queue; + +public class DseTestFixtures { + + // Returns a single row, with a single "message" column with the value "hello, world" + public static Rows singleDseRow() { + DseRowsMetadata metadata = + new DseRowsMetadata( + ImmutableList.of( + new ColumnSpec( + "ks", + "table", + "message", + 0, + RawType.PRIMITIVES.get(ProtocolConstants.DataType.VARCHAR))), + null, + new int[] {}, + null, + 1, + true); + Queue> data = new ArrayDeque<>(); + data.add(ImmutableList.of(Bytes.fromHexString("0x68656C6C6F2C20776F726C64"))); + return new DefaultRows(metadata, data); + } + + // Returns 10 rows, each with a single "message" column with the value "hello, world" + public static Rows tenDseRows(int page, boolean last) { + DseRowsMetadata metadata = + new DseRowsMetadata( + ImmutableList.of( + new ColumnSpec( + "ks", + "table", + "message", + 0, + RawType.PRIMITIVES.get(ProtocolConstants.DataType.VARCHAR))), + last ? null : ByteBuffer.wrap(new byte[] {(byte) page}), + new int[] {}, + null, + page, + last); + Queue> data = new ArrayDeque<>(); + for (int i = 0; i < 10; i++) { + data.add(ImmutableList.of(Bytes.fromHexString("0x68656C6C6F2C20776F726C64"))); + } + return new DefaultRows(metadata, data); + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/api/core/config/DseDriverConfigLoaderTest.java b/core/src/test/java/com/datastax/dse/driver/api/core/config/DseDriverConfigLoaderTest.java new file mode 100644 index 00000000000..fc2d9942a4b --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/api/core/config/DseDriverConfigLoaderTest.java @@ -0,0 +1,117 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.config; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.api.core.DefaultConsistencyLevel; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverConfig; +import com.datastax.oss.driver.api.core.config.DriverConfigLoader; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import java.io.File; +import java.net.URL; +import java.time.Duration; +import org.junit.Test; + +public class DseDriverConfigLoaderTest { + + @Test + public void should_load_from_other_classpath_resource() { + DriverConfigLoader loader = DseDriverConfigLoader.fromClasspath("config/customApplication"); + DriverExecutionProfile config = loader.getInitialConfig().getDefaultProfile(); + // From customApplication.conf: + assertThat(config.getDuration(DefaultDriverOption.REQUEST_TIMEOUT)) + .isEqualTo(Duration.ofMillis(500)); + assertThat(config.getInt(DseDriverOption.CONTINUOUS_PAGING_MAX_PAGES)).isEqualTo(10); + // From customApplication.json: + assertThat(config.getInt(DefaultDriverOption.REQUEST_PAGE_SIZE)).isEqualTo(2000); + assertThat(config.getInt(DseDriverOption.CONTINUOUS_PAGING_PAGE_SIZE)).isEqualTo(2000); + // From customApplication.properties: + assertThat(config.getString(DefaultDriverOption.REQUEST_CONSISTENCY)) + .isEqualTo(DefaultConsistencyLevel.ONE.name()); + assertThat(config.getInt(DseDriverOption.CONTINUOUS_PAGING_MAX_ENQUEUED_PAGES)).isEqualTo(8); + // From reference.conf: + assertThat(config.getString(DefaultDriverOption.REQUEST_SERIAL_CONSISTENCY)) + .isEqualTo(DefaultConsistencyLevel.SERIAL.name()); + // From dse-reference.conf: + assertThat(config.getDuration(DseDriverOption.CONTINUOUS_PAGING_TIMEOUT_FIRST_PAGE)) + .isEqualTo(Duration.ofSeconds(2)); + } + + @Test + public void should_load_from_file() { + File file = new File("src/test/resources/config/customApplication.conf"); + assertThat(file).exists(); + DriverConfigLoader loader = DseDriverConfigLoader.fromFile(file); + DriverExecutionProfile config = loader.getInitialConfig().getDefaultProfile(); + // From customApplication.conf: + assertThat(config.getDuration(DefaultDriverOption.REQUEST_TIMEOUT)) + .isEqualTo(Duration.ofMillis(500)); + assertThat(config.getInt(DseDriverOption.CONTINUOUS_PAGING_MAX_PAGES)).isEqualTo(10); + // From reference.conf: + assertThat(config.getString(DefaultDriverOption.REQUEST_SERIAL_CONSISTENCY)) + .isEqualTo(DefaultConsistencyLevel.SERIAL.name()); + // From dse-reference.conf: + assertThat(config.getDuration(DseDriverOption.CONTINUOUS_PAGING_TIMEOUT_FIRST_PAGE)) + .isEqualTo(Duration.ofSeconds(2)); + } + + @Test + public void should_load_from_url() throws Exception { + URL url = new File("src/test/resources/config/customApplication.conf").toURI().toURL(); + DriverConfigLoader loader = DseDriverConfigLoader.fromUrl(url); + DriverExecutionProfile config = loader.getInitialConfig().getDefaultProfile(); + // From customApplication.conf: + assertThat(config.getDuration(DefaultDriverOption.REQUEST_TIMEOUT)) + .isEqualTo(Duration.ofMillis(500)); + assertThat(config.getInt(DseDriverOption.CONTINUOUS_PAGING_MAX_PAGES)).isEqualTo(10); + // From reference.conf: + assertThat(config.getString(DefaultDriverOption.REQUEST_SERIAL_CONSISTENCY)) + .isEqualTo(DefaultConsistencyLevel.SERIAL.name()); + // From dse-reference.conf: + assertThat(config.getDuration(DseDriverOption.CONTINUOUS_PAGING_TIMEOUT_FIRST_PAGE)) + .isEqualTo(Duration.ofSeconds(2)); + } + + @Test + public void should_build_programmatically() { + DriverConfigLoader loader = + DseDriverConfigLoader.programmaticBuilder() + .withDuration(DefaultDriverOption.REQUEST_TIMEOUT, Duration.ofMillis(500)) + .withInt(DseDriverOption.CONTINUOUS_PAGING_MAX_PAGES, 10) + .startProfile("slow") + .withDuration(DefaultDriverOption.REQUEST_TIMEOUT, Duration.ofSeconds(30)) + .build(); + DriverConfig config = loader.getInitialConfig(); + + DriverExecutionProfile defaultProfile = config.getDefaultProfile(); + // From programmatic overrides: + assertThat(defaultProfile.getDuration(DefaultDriverOption.REQUEST_TIMEOUT)) + .isEqualTo(Duration.ofMillis(500)); + assertThat(defaultProfile.getInt(DseDriverOption.CONTINUOUS_PAGING_MAX_PAGES)).isEqualTo(10); + // From reference.conf: + assertThat(defaultProfile.getString(DefaultDriverOption.REQUEST_SERIAL_CONSISTENCY)) + .isEqualTo(DefaultConsistencyLevel.SERIAL.name()); + // From dse-reference.conf: + assertThat(defaultProfile.getDuration(DseDriverOption.CONTINUOUS_PAGING_TIMEOUT_FIRST_PAGE)) + .isEqualTo(Duration.ofSeconds(2)); + + DriverExecutionProfile slowProfile = config.getProfile("slow"); + // From programmatic override: + assertThat(slowProfile.getDuration(DefaultDriverOption.REQUEST_TIMEOUT)) + .isEqualTo(Duration.ofSeconds(30)); + // Inherited from the default profile (where the option was overridden programmatically) + assertThat(slowProfile.getInt(DseDriverOption.CONTINUOUS_PAGING_MAX_PAGES)).isEqualTo(10); + // Inherited from the default profile (where the option was pulled from reference.conf) + assertThat(slowProfile.getString(DefaultDriverOption.REQUEST_SERIAL_CONSISTENCY)) + .isEqualTo(DefaultConsistencyLevel.SERIAL.name()); + // Inherited from the default profile (where the option was pulled from dse-reference.conf) + assertThat(slowProfile.getDuration(DseDriverOption.CONTINUOUS_PAGING_TIMEOUT_FIRST_PAGE)) + .isEqualTo(Duration.ofSeconds(2)); + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/api/core/data/time/DateRangePrecisionTest.java b/core/src/test/java/com/datastax/dse/driver/api/core/data/time/DateRangePrecisionTest.java new file mode 100644 index 00000000000..264dd4e4d68 --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/api/core/data/time/DateRangePrecisionTest.java @@ -0,0 +1,43 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.data.time; + +import static org.assertj.core.api.Assertions.assertThat; + +import java.time.ZonedDateTime; +import org.junit.Test; + +public class DateRangePrecisionTest { + + @Test + public void should_round_up() { + ZonedDateTime timestamp = ZonedDateTime.parse("2011-02-03T04:05:16.789Z"); + assertThat(DateRangePrecision.MILLISECOND.roundUp(timestamp)) + .isEqualTo("2011-02-03T04:05:16.789Z"); + assertThat(DateRangePrecision.SECOND.roundUp(timestamp)).isEqualTo("2011-02-03T04:05:16.999Z"); + assertThat(DateRangePrecision.MINUTE.roundUp(timestamp)).isEqualTo("2011-02-03T04:05:59.999Z"); + assertThat(DateRangePrecision.HOUR.roundUp(timestamp)).isEqualTo("2011-02-03T04:59:59.999Z"); + assertThat(DateRangePrecision.DAY.roundUp(timestamp)).isEqualTo("2011-02-03T23:59:59.999Z"); + assertThat(DateRangePrecision.MONTH.roundUp(timestamp)).isEqualTo("2011-02-28T23:59:59.999Z"); + assertThat(DateRangePrecision.YEAR.roundUp(timestamp)).isEqualTo("2011-12-31T23:59:59.999Z"); + } + + @Test + public void should_round_down() { + ZonedDateTime timestamp = ZonedDateTime.parse("2011-02-03T04:05:16.789Z"); + assertThat(DateRangePrecision.MILLISECOND.roundDown(timestamp)) + .isEqualTo("2011-02-03T04:05:16.789Z"); + assertThat(DateRangePrecision.SECOND.roundDown(timestamp)) + .isEqualTo("2011-02-03T04:05:16.000Z"); + assertThat(DateRangePrecision.MINUTE.roundDown(timestamp)) + .isEqualTo("2011-02-03T04:05:00.000Z"); + assertThat(DateRangePrecision.HOUR.roundDown(timestamp)).isEqualTo("2011-02-03T04:00:00.000Z"); + assertThat(DateRangePrecision.DAY.roundDown(timestamp)).isEqualTo("2011-02-03T00:00:00.000Z"); + assertThat(DateRangePrecision.MONTH.roundDown(timestamp)).isEqualTo("2011-02-01T00:00:00.000Z"); + assertThat(DateRangePrecision.YEAR.roundDown(timestamp)).isEqualTo("2011-01-01T00:00:00.000Z"); + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/api/core/data/time/DateRangeTest.java b/core/src/test/java/com/datastax/dse/driver/api/core/data/time/DateRangeTest.java new file mode 100644 index 00000000000..74a7f5101c4 --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/api/core/data/time/DateRangeTest.java @@ -0,0 +1,106 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.data.time; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +import com.datastax.oss.driver.internal.SerializationHelper; +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import java.text.ParseException; +import java.time.temporal.ChronoField; +import java.util.function.Predicate; +import org.junit.Test; +import org.junit.runner.RunWith; + +@RunWith(DataProviderRunner.class) +public class DateRangeTest { + + @Test + @UseDataProvider("rangeStrings") + public void should_parse_and_format(String source) throws Exception { + DateRange parsed = DateRange.parse(source); + assertThat(parsed.toString()).isEqualTo(source); + } + + @DataProvider + public static Object[][] rangeStrings() { + return new Object[][] { + {"[2011-01 TO 2015]"}, + {"[2010-01-02 TO 2015-05-05T13]"}, + {"[1973-06-30T13:57:28.123Z TO 1999-05-05T14:14:59]"}, + // leap year + {"[2010-01-01T15 TO 2016-02]"}, + // pre-epoch + {"[1500 TO 1501]"}, + {"[0001 TO 0001-01-02]"}, + {"[0000 TO 0000-01-02]"}, + {"[-0001 TO -0001-01-02]"}, + // unbounded + {"[* TO 2014-12-01]"}, + {"[1999 TO *]"}, + {"[* TO *]"}, + // single bound ranges + // AD/BC era boundary + {"0001-01-01"}, + {"-0001-01-01"}, + {"-0009"}, + {"2000-11"}, + {"*"} + }; + } + + @Test + public void should_use_proleptic_parser() throws Exception { + DateRange parsed = DateRange.parse("[0000 TO 0000-01-02]"); + assertThat(parsed.getLowerBound().getTimestamp().get(ChronoField.YEAR)).isEqualTo(0); + } + + @Test + public void should_fail_to_parse_invalid_strings() { + assertThatThrownBy(() -> DateRange.parse("foo")).matches(hasOffset(0)); + assertThatThrownBy(() -> DateRange.parse("[foo TO *]")).matches(hasOffset(1)); + assertThatThrownBy(() -> DateRange.parse("[* TO foo]")).matches(hasOffset(6)); + } + + private static Predicate hasOffset(int offset) { + return e -> ((ParseException) e).getErrorOffset() == offset; + } + + @Test + public void should_fail_to_parse_inverted_range() { + assertThatThrownBy(() -> DateRange.parse("[2001-01 TO 2000]")) + .hasMessage( + "Lower bound of a date range should be before upper bound, got: [2001-01 TO 2000]"); + } + + @Test + public void should_not_equate_single_date_open_to_both_open_range() throws Exception { + assertThat(DateRange.parse("*")).isNotEqualTo(DateRange.parse("[* TO *]")); + } + + @Test + public void should_not_equate_same_ranges_with_different_precisions() throws ParseException { + assertThat(DateRange.parse("[2001 TO 2002]")) + .isNotEqualTo(DateRange.parse("[2001-01 TO 2002-12]")); + } + + @Test + public void should_give_same_hashcode_to_equal_objects() throws ParseException { + assertThat(DateRange.parse("[2001 TO 2002]").hashCode()) + .isEqualTo(DateRange.parse("[2001 TO 2002]").hashCode()); + } + + @Test + public void should_serialize_and_deserialize() throws Exception { + DateRange initial = DateRange.parse("[1973-06-30T13:57:28.123Z TO 1999-05-05T14:14:59]"); + DateRange deserialized = SerializationHelper.serializeAndDeserialize(initial); + assertThat(deserialized).isEqualTo(initial); + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/api/core/graph/predicates/GeoTest.java b/core/src/test/java/com/datastax/dse/driver/api/core/graph/predicates/GeoTest.java new file mode 100644 index 00000000000..431e8920d88 --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/api/core/graph/predicates/GeoTest.java @@ -0,0 +1,97 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph.predicates; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.dse.driver.api.core.data.geometry.LineString; +import com.datastax.dse.driver.api.core.data.geometry.Point; +import com.datastax.dse.driver.api.core.data.geometry.Polygon; +import org.apache.tinkerpop.gremlin.process.traversal.P; +import org.junit.Test; + +public class GeoTest { + + @Test + public void should_convert_units_to_degrees() { + assertThat(Geo.Unit.DEGREES.toDegrees(100.0)).isEqualTo(100.0); + assertThat(Geo.Unit.MILES.toDegrees(68.9722)).isEqualTo(0.9982455747535043); + assertThat(Geo.Unit.KILOMETERS.toDegrees(111.0)).isEqualTo(0.9982456082154465); + assertThat(Geo.Unit.METERS.toDegrees(111000.0)).isEqualTo(0.9982456082154464); + } + + @Test + public void should_test_if_point_is_inside_circle_with_cartesian_coordinates() { + P inside = Geo.inside(Point.fromCoordinates(30, 30), 14.142135623730951); + assertThat(inside.test(Point.fromCoordinates(40, 40))).isTrue(); + assertThat(inside.test(Point.fromCoordinates(40.1, 40))).isFalse(); + } + + @Test + public void should_test_if_point_is_inside_circle_with_geo_coordinates() { + P inside = + Geo.inside(Point.fromCoordinates(30, 30), 12.908258700131379, Geo.Unit.DEGREES); + assertThat(inside.test(Point.fromCoordinates(40, 40))).isTrue(); + assertThat(inside.test(Point.fromCoordinates(40.1, 40))).isFalse(); + } + + @Test + public void should_test_if_point_is_inside_polygon() { + P inside = + Geo.inside( + Polygon.builder() + .addRing( + Point.fromCoordinates(30, 30), + Point.fromCoordinates(40, 40), + Point.fromCoordinates(40, 30)) + .build()); + assertThat(inside.test(Point.fromCoordinates(35, 32))).isTrue(); + assertThat(inside.test(Point.fromCoordinates(33, 37))).isFalse(); + } + + @Test + public void should_build_line_string_from_coordinates() { + LineString lineString = Geo.lineString(1, 2, 3, 4, 5, 6); + assertThat(lineString.getPoints()) + .hasSize(3) + .contains(Point.fromCoordinates(1, 2)) + .contains(Point.fromCoordinates(3, 4)) + .contains(Point.fromCoordinates(5, 6)); + } + + @Test(expected = IllegalArgumentException.class) + public void should_fail_to_build_line_string_if_not_enough_coordinates() { + Geo.lineString(1, 2); + } + + @Test(expected = IllegalArgumentException.class) + public void should_fail_to_build_line_string_if_uneven_number_of_coordinates() { + Geo.lineString(1, 2, 3, 4, 5); + } + + @Test + public void should_build_polygon_from_coordinates() { + Polygon polygon = Geo.polygon(1, 2, 3, 4, 5, 6, 7, 8); + assertThat(polygon.getExteriorRing()) + .hasSize(4) + .contains(Point.fromCoordinates(1, 2)) + .contains(Point.fromCoordinates(3, 4)) + .contains(Point.fromCoordinates(5, 6)) + .contains(Point.fromCoordinates(7, 8)); + assertThat(polygon.getInteriorRings()).isEmpty(); + } + + @Test(expected = IllegalArgumentException.class) + public void should_fail_to_build_polygon_if_not_enough_coordinates() { + Geo.polygon(1, 2, 3, 4); + } + + @Test(expected = IllegalArgumentException.class) + public void should_fail_to_build_polygon_if_uneven_number_of_coordinates() { + Geo.polygon(1, 2, 3, 4, 5, 6, 7); + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/api/core/graph/predicates/SearchTest.java b/core/src/test/java/com/datastax/dse/driver/api/core/graph/predicates/SearchTest.java new file mode 100644 index 00000000000..3c8c4ee5a3f --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/api/core/graph/predicates/SearchTest.java @@ -0,0 +1,105 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph.predicates; + +import static org.assertj.core.api.Assertions.assertThat; + +import org.apache.tinkerpop.gremlin.process.traversal.P; +import org.junit.Test; + +public class SearchTest { + + @Test + public void testToken() { + P p = Search.token("needle"); + assertThat(p.test("needle")).isTrue(); + assertThat(p.test("This is a needle in a haystack")).isTrue(); + assertThat(p.test("This is just the haystack")).isFalse(); + } + + @Test + public void testPrefix() { + P p = Search.prefix("abcd"); + assertThat(p.test("abcd")).isTrue(); + assertThat(p.test("abcdefg hijkl")).isTrue(); + assertThat(p.test("zabcd")).isFalse(); + } + + @Test + public void testTokenPrefix() { + P p = Search.tokenPrefix("abcd"); + assertThat(p.test("abcd")).isTrue(); + assertThat(p.test("abcdefg hijkl")).isTrue(); + assertThat(p.test("z abcd")).isTrue(); + assertThat(p.test("ab cd")).isFalse(); + } + + @Test + public void testRegex() { + P p = Search.regex("(foo|bar)"); + assertThat(p.test("foo")).isTrue(); + assertThat(p.test("bar")).isTrue(); + assertThat(p.test("foo bar")).isFalse(); + } + + @Test + public void testTokenRegex() { + P p = Search.tokenRegex("(foo|bar)"); + assertThat(p.test("foo")).isTrue(); + assertThat(p.test("bar")).isTrue(); + assertThat(p.test("foo bar")).isTrue(); + assertThat(p.test("foo bar qix")).isTrue(); + assertThat(p.test("qix")).isFalse(); + } + + @Test + public void testPhrase() { + P p = Search.phrase("Hello world", 2); + assertThat(p.test("Hello World")).isTrue(); + assertThat(p.test("Hello Big World")).isTrue(); + assertThat(p.test("Hello Big Wild World")).isTrue(); + assertThat(p.test("Hello The Big Wild World")).isFalse(); + assertThat(p.test("Goodbye world")).isFalse(); + } + + @Test + public void testPhraseFragment() { + // Tests JAVA-1744 + P p = Search.phrase("a b", 0); + assertThat(p.test("a b")).isTrue(); + assertThat(p.test("a")).isFalse(); + assertThat(p.test("b")).isFalse(); + } + + @Test + public void testFuzzy() { + P p = Search.fuzzy("abc", 1); + assertThat(p.test("abcd")).isTrue(); + assertThat(p.test("ab")).isTrue(); + assertThat(p.test("abce")).isTrue(); + assertThat(p.test("abdc")).isTrue(); + assertThat(p.test("badc")).isFalse(); + + // Make sure we do NOT calculate the Damerau–Levenshtein distance (2), but the optimal string + // alignment distance (3): + assertThat(Search.tokenFuzzy("ca", 2).test("abc")).isFalse(); + } + + @Test + public void testTokenFuzzy() { + P p = Search.tokenFuzzy("abc", 1); + assertThat(p.test("foo abcd")).isTrue(); + assertThat(p.test("foo ab")).isTrue(); + assertThat(p.test("foo abce")).isTrue(); + assertThat(p.test("foo abdc")).isTrue(); + assertThat(p.test("foo badc")).isFalse(); + + // Make sure we do NOT calculate the Damerau–Levenshtein distance (2), but the optimal string + // alignment distance (3): + assertThat(Search.tokenFuzzy("ca", 2).test("abc 123")).isFalse(); + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/DseProtocolVersionRegistryTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/DseProtocolVersionRegistryTest.java new file mode 100644 index 00000000000..bac353fcdd7 --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/DseProtocolVersionRegistryTest.java @@ -0,0 +1,124 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.dse.driver.api.core.DseProtocolVersion; +import com.datastax.dse.driver.api.core.metadata.DseNodeProperties; +import com.datastax.oss.driver.api.core.DefaultProtocolVersion; +import com.datastax.oss.driver.api.core.UnsupportedProtocolVersionException; +import com.datastax.oss.driver.api.core.Version; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import org.junit.Test; +import org.mockito.Mockito; + +/** + * Note: some tests in this class depend on the set of supported protocol versions, they will need + * to be updated as new versions are added or become non-beta. + */ +public class DseProtocolVersionRegistryTest { + + private DseProtocolVersionRegistry registry = new DseProtocolVersionRegistry("test"); + + @Test + public void should_find_version_by_code() { + assertThat(registry.fromCode(4)).isEqualTo(DefaultProtocolVersion.V4); + assertThat(registry.fromCode(65)).isEqualTo(DseProtocolVersion.DSE_V1); + } + + @Test + public void should_find_version_by_name() { + assertThat(registry.fromName("V4")).isEqualTo(DefaultProtocolVersion.V4); + assertThat(registry.fromName("DSE_V1")).isEqualTo(DseProtocolVersion.DSE_V1); + } + + @Test + public void should_downgrade_from_dse_to_oss() { + assertThat(registry.downgrade(DseProtocolVersion.DSE_V1).get()) + .isEqualTo(DefaultProtocolVersion.V4); + } + + @Test + public void should_pick_dse_v2_as_highest_common_when_all_nodes_are_dse_6() { + assertThat(registry.highestCommon(ImmutableList.of(mockDseNode("6.0"), mockDseNode("6.1")))) + .isEqualTo(DseProtocolVersion.DSE_V2); + } + + @Test + public void should_pick_dse_v1_as_highest_common_when_all_nodes_are_dse_5_1_or_more() { + assertThat(registry.highestCommon(ImmutableList.of(mockDseNode("5.1"), mockDseNode("6.1")))) + .isEqualTo(DseProtocolVersion.DSE_V1); + } + + @Test + public void should_pick_oss_v4_as_highest_common_when_all_nodes_are_dse_5_or_more() { + assertThat( + registry.highestCommon( + ImmutableList.of(mockDseNode("5.0"), mockDseNode("5.1"), mockDseNode("6.1")))) + .isEqualTo(DefaultProtocolVersion.V4); + } + + @Test + public void should_pick_oss_v3_as_highest_common_when_all_nodes_are_dse_4_7_or_more() { + assertThat( + registry.highestCommon( + ImmutableList.of(mockDseNode("4.7"), mockDseNode("5.1"), mockDseNode("6.1")))) + .isEqualTo(DefaultProtocolVersion.V3); + } + + @Test(expected = UnsupportedProtocolVersionException.class) + public void should_fail_to_pick_highest_common_when_one_node_is_dse_4_6() { + registry.highestCommon( + ImmutableList.of(mockDseNode("4.6"), mockDseNode("5.1"), mockDseNode("6.1"))); + } + + @Test + public void should_pick_oss_v3_as_highest_common_when_one_node_is_cassandra_2_1() { + assertThat( + registry.highestCommon( + ImmutableList.of( + mockDseNode("5.1"), // oss v4 + mockDseNode("6.1"), // oss v4 + mockCassandraNode("2.1") // oss v3 + ))) + .isEqualTo(DefaultProtocolVersion.V3); + } + + private Node mockCassandraNode(String rawVersion) { + Node node = Mockito.mock(Node.class); + if (rawVersion != null) { + Mockito.when(node.getCassandraVersion()).thenReturn(Version.parse(rawVersion)); + } + return node; + } + + private Node mockDseNode(String rawDseVersion) { + Node node = Mockito.mock(Node.class); + Version dseVersion = Version.parse(rawDseVersion); + Mockito.when(node.getExtras()) + .thenReturn(ImmutableMap.of(DseNodeProperties.DSE_VERSION, dseVersion)); + + Version cassandraVersion; + if (dseVersion.compareTo(DseProtocolVersionRegistry.DSE_6_0_0) >= 0) { + cassandraVersion = Version.parse("4.0"); + } else if (dseVersion.compareTo(DseProtocolVersionRegistry.DSE_5_1_0) >= 0) { + cassandraVersion = Version.parse("3.11"); + } else if (dseVersion.compareTo(Version.parse("5.0")) >= 0) { + cassandraVersion = Version.parse("3.0"); + } else if (dseVersion.compareTo(DseProtocolVersionRegistry.DSE_4_7_0) >= 0) { + cassandraVersion = Version.parse("2.1"); + } else { + cassandraVersion = Version.parse("2.0"); + } + Mockito.when(node.getCassandraVersion()).thenReturn(cassandraVersion); + + return node; + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/context/DseStartupOptionsBuilderTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/context/DseStartupOptionsBuilderTest.java new file mode 100644 index 00000000000..36bd546a3ad --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/context/DseStartupOptionsBuilderTest.java @@ -0,0 +1,188 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.context; + +import static com.datastax.dse.driver.api.core.DseSession.DSE_DRIVER_COORDINATES; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException; +import static org.mockito.Mockito.when; +import static org.mockito.MockitoAnnotations.initMocks; + +import com.datastax.dse.driver.api.core.config.DseDriverOption; +import com.datastax.dse.driver.api.core.session.DseProgrammaticArguments; +import com.datastax.oss.driver.api.core.Version; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverConfig; +import com.datastax.oss.driver.api.core.config.DriverConfigLoader; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.session.ProgrammaticArguments; +import com.datastax.oss.driver.api.core.uuid.Uuids; +import com.datastax.oss.protocol.internal.request.Startup; +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import java.util.UUID; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; + +@RunWith(DataProviderRunner.class) +public class DseStartupOptionsBuilderTest { + + private DseDriverContext driverContext; + + // Mocks for instantiating the DSE driver context + @Mock private DriverConfigLoader configLoader; + @Mock private DriverConfig driverConfig; + @Mock private DriverExecutionProfile defaultProfile; + + @Before + public void before() { + initMocks(this); + when(configLoader.getInitialConfig()).thenReturn(driverConfig); + when(driverConfig.getDefaultProfile()).thenReturn(defaultProfile); + } + + private void buildContext(UUID clientId, String applicationName, String applicationVersion) { + this.driverContext = + new DseDriverContext( + configLoader, + ProgrammaticArguments.builder().build(), + DseProgrammaticArguments.builder() + .withStartupClientId(clientId) + .withStartupApplicationName(applicationName) + .withStartupApplicationVersion(applicationVersion) + .build()); + } + + private void assertDefaultStartupOptions(Startup startup) { + assertThat(startup.options).containsEntry(Startup.CQL_VERSION_KEY, "3.0.0"); + assertThat(startup.options) + .containsEntry(DseStartupOptionsBuilder.DRIVER_NAME_KEY, DSE_DRIVER_COORDINATES.getName()); + assertThat(startup.options).containsKey(DseStartupOptionsBuilder.DRIVER_VERSION_KEY); + Version version = + Version.parse(startup.options.get(DseStartupOptionsBuilder.DRIVER_VERSION_KEY)); + assertThat(version).isEqualTo(DSE_DRIVER_COORDINATES.getVersion()); + assertThat(startup.options).containsKey(DseStartupOptionsBuilder.CLIENT_ID_KEY); + } + + @Test + public void should_build_startup_options_with_no_compression_if_undefined() { + when(defaultProfile.getString(DefaultDriverOption.PROTOCOL_COMPRESSION, "none")) + .thenReturn("none"); + buildContext(null, null, null); + Startup startup = new Startup(driverContext.getStartupOptions()); + assertThat(startup.options).doesNotContainKey(Startup.COMPRESSION_KEY); + assertDefaultStartupOptions(startup); + } + + @Test + @DataProvider({"lz4", "snappy"}) + public void should_build_startup_options_with_compression(String compression) { + when(defaultProfile.getString(DefaultDriverOption.PROTOCOL_COMPRESSION, "none")) + .thenReturn(compression); + buildContext(null, null, null); + Startup startup = new Startup(driverContext.getStartupOptions()); + // assert the compression option is present + assertThat(startup.options).containsEntry(Startup.COMPRESSION_KEY, compression); + assertThat(startup.options).doesNotContainKey(DseStartupOptionsBuilder.APPLICATION_NAME_KEY); + assertThat(startup.options).doesNotContainKey(DseStartupOptionsBuilder.APPLICATION_VERSION_KEY); + assertDefaultStartupOptions(startup); + } + + @Test + public void should_fail_to_build_startup_options_with_invalid_compression() { + when(defaultProfile.getString(DefaultDriverOption.PROTOCOL_COMPRESSION, "none")) + .thenReturn("foobar"); + buildContext(null, null, null); + assertThatIllegalArgumentException() + .isThrownBy(() -> new Startup(driverContext.getStartupOptions())); + } + + @Test + public void should_build_startup_options_with_client_id() { + when(defaultProfile.getString(DefaultDriverOption.PROTOCOL_COMPRESSION, "none")) + .thenReturn("none"); + UUID customClientId = Uuids.random(); + buildContext(customClientId, null, null); + Startup startup = new Startup(driverContext.getStartupOptions()); + // assert the client id is present + assertThat(startup.options) + .containsEntry(DseStartupOptionsBuilder.CLIENT_ID_KEY, customClientId.toString()); + assertThat(startup.options).doesNotContainKey(Startup.COMPRESSION_KEY); + assertThat(startup.options).doesNotContainKey(DseStartupOptionsBuilder.APPLICATION_NAME_KEY); + assertThat(startup.options).doesNotContainKey(DseStartupOptionsBuilder.APPLICATION_VERSION_KEY); + assertDefaultStartupOptions(startup); + } + + @Test + public void should_build_startup_options_with_application_version_and_name() { + when(defaultProfile.getString(DefaultDriverOption.PROTOCOL_COMPRESSION, "none")) + .thenReturn("none"); + buildContext(null, "Custom_App_Name", "Custom_App_Version"); + Startup startup = new Startup(driverContext.getStartupOptions()); + // assert the app name and version are present + assertThat(startup.options) + .containsEntry(DseStartupOptionsBuilder.APPLICATION_NAME_KEY, "Custom_App_Name"); + assertThat(startup.options) + .containsEntry(DseStartupOptionsBuilder.APPLICATION_VERSION_KEY, "Custom_App_Version"); + assertThat(startup.options).doesNotContainKey(Startup.COMPRESSION_KEY); + assertDefaultStartupOptions(startup); + } + + @Test + public void should_build_startup_options_with_all_options() { + // mock config to specify "snappy" compression + when(defaultProfile.getString(DefaultDriverOption.PROTOCOL_COMPRESSION, "none")) + .thenReturn("snappy"); + + UUID customClientId = Uuids.random(); + + buildContext(customClientId, "Custom_App_Name", "Custom_App_Version"); + Startup startup = new Startup(driverContext.getStartupOptions()); + assertThat(startup.options) + .containsEntry(DseStartupOptionsBuilder.CLIENT_ID_KEY, customClientId.toString()) + .containsEntry(DseStartupOptionsBuilder.APPLICATION_NAME_KEY, "Custom_App_Name") + .containsEntry(DseStartupOptionsBuilder.APPLICATION_VERSION_KEY, "Custom_App_Version"); + assertThat(startup.options).containsEntry(Startup.COMPRESSION_KEY, "snappy"); + assertDefaultStartupOptions(startup); + } + + @Test + public void should_use_configuration_when_no_programmatic_values_provided() { + when(defaultProfile.getString(DseDriverOption.APPLICATION_NAME, null)) + .thenReturn("Config_App_Name"); + when(defaultProfile.getString(DseDriverOption.APPLICATION_VERSION, null)) + .thenReturn("Config_App_Version"); + when(defaultProfile.getString(DefaultDriverOption.PROTOCOL_COMPRESSION, "none")) + .thenReturn("none"); + + buildContext(null, null, null); + Startup startup = new Startup(driverContext.getStartupOptions()); + + assertThat(startup.options) + .containsEntry(DseStartupOptionsBuilder.APPLICATION_NAME_KEY, "Config_App_Name") + .containsEntry(DseStartupOptionsBuilder.APPLICATION_VERSION_KEY, "Config_App_Version"); + } + + @Test + public void should_ignore_configuration_when_programmatic_values_provided() { + when(defaultProfile.getString(DseDriverOption.APPLICATION_NAME, null)) + .thenReturn("Config_App_Name"); + when(defaultProfile.getString(DseDriverOption.APPLICATION_VERSION, null)) + .thenReturn("Config_App_Version"); + when(defaultProfile.getString(DefaultDriverOption.PROTOCOL_COMPRESSION, "none")) + .thenReturn("none"); + + buildContext(null, "Custom_App_Name", "Custom_App_Version"); + Startup startup = new Startup(driverContext.getStartupOptions()); + + assertThat(startup.options) + .containsEntry(DseStartupOptionsBuilder.APPLICATION_NAME_KEY, "Custom_App_Name") + .containsEntry(DseStartupOptionsBuilder.APPLICATION_VERSION_KEY, "Custom_App_Version"); + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerNodeTargetingTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerNodeTargetingTest.java new file mode 100644 index 00000000000..aacccb26ed7 --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerNodeTargetingTest.java @@ -0,0 +1,162 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.cql.continuous; + +import static com.datastax.oss.driver.Assertions.assertThatStage; +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.never; + +import com.datastax.dse.driver.DseTestDataProviders; +import com.datastax.dse.driver.DseTestFixtures; +import com.datastax.dse.driver.api.core.DseProtocolVersion; +import com.datastax.dse.driver.api.core.cql.continuous.ContinuousAsyncResultSet; +import com.datastax.oss.driver.api.core.NoNodeAvailableException; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.session.Request; +import com.datastax.oss.driver.api.core.session.Session; +import com.datastax.oss.driver.internal.core.cql.RequestHandlerTestHarness; +import com.datastax.oss.driver.internal.core.metadata.LoadBalancingPolicyWrapper; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import java.util.concurrent.CompletionStage; +import org.junit.Test; +import org.mockito.InOrder; +import org.mockito.Mockito; + +public class ContinuousCqlRequestHandlerNodeTargetingTest + extends ContinuousCqlRequestHandlerTestBase { + + @Test + @UseDataProvider(value = "allDseProtocolVersions", location = DseTestDataProviders.class) + public void should_fail_if_targeted_node_not_available(DseProtocolVersion version) { + try (RequestHandlerTestHarness harness = + continuousHarnessBuilder() + .withResponse(node1, defaultFrameOf(DseTestFixtures.singleDseRow())) + .withResponse(node2, defaultFrameOf(DseTestFixtures.singleDseRow())) + .withEmptyPool(node3) + .withProtocolVersion(version) + .build()) { + + LoadBalancingPolicyWrapper loadBalancingPolicy = + harness.getContext().getLoadBalancingPolicyWrapper(); + InOrder invocations = Mockito.inOrder(loadBalancingPolicy); + + // target node3, which should be unavailable + CompletionStage resultSetFuture = + new ContinuousCqlRequestHandler( + UNDEFINED_IDEMPOTENCE_STATEMENT.setNode(node3), + harness.getSession(), + harness.getContext(), + "target node 3, unavailable") + .handle(); + + assertThatStage(resultSetFuture) + .isFailed( + error -> { + assertThat(error).isInstanceOf(NoNodeAvailableException.class); + invocations + .verify(loadBalancingPolicy, never()) + .newQueryPlan(any(Request.class), anyString(), any(Session.class)); + }); + + resultSetFuture = + new ContinuousCqlRequestHandler( + UNDEFINED_IDEMPOTENCE_STATEMENT, + harness.getSession(), + harness.getContext(), + "no node targeting, should use node 1") + .handle(); + + assertThatStage(resultSetFuture) + .isSuccess( + resultSet -> { + assertThat(resultSet.getExecutionInfo().getCoordinator()).isEqualTo(node1); + invocations + .verify(loadBalancingPolicy) + .newQueryPlan( + UNDEFINED_IDEMPOTENCE_STATEMENT, + DriverExecutionProfile.DEFAULT_NAME, + harness.getSession()); + }); + + resultSetFuture = + new ContinuousCqlRequestHandler( + UNDEFINED_IDEMPOTENCE_STATEMENT, + harness.getSession(), + harness.getContext(), + "no node targeting, should use node 2") + .handle(); + + assertThatStage(resultSetFuture) + .isSuccess( + resultSet -> { + assertThat(resultSet.getExecutionInfo().getCoordinator()).isEqualTo(node2); + invocations + .verify(loadBalancingPolicy) + .newQueryPlan( + UNDEFINED_IDEMPOTENCE_STATEMENT, + DriverExecutionProfile.DEFAULT_NAME, + harness.getSession()); + }); + } + } + + @Test + @UseDataProvider(value = "allDseProtocolVersions", location = DseTestDataProviders.class) + public void should_target_node(DseProtocolVersion version) { + try (RequestHandlerTestHarness harness = + continuousHarnessBuilder() + .withResponse(node1, defaultFrameOf(DseTestFixtures.singleDseRow())) + .withResponse(node2, defaultFrameOf(DseTestFixtures.singleDseRow())) + .withResponse(node3, defaultFrameOf(DseTestFixtures.singleDseRow())) + .withProtocolVersion(version) + .build()) { + + LoadBalancingPolicyWrapper loadBalancingPolicy = + harness.getContext().getLoadBalancingPolicyWrapper(); + InOrder invocations = Mockito.inOrder(loadBalancingPolicy); + + CompletionStage resultSetFuture = + new ContinuousCqlRequestHandler( + UNDEFINED_IDEMPOTENCE_STATEMENT.setNode(node3), + harness.getSession(), + harness.getContext(), + "target node 3") + .handle(); + + assertThatStage(resultSetFuture) + .isSuccess( + resultSet -> { + assertThat(resultSet.getExecutionInfo().getCoordinator()).isEqualTo(node3); + invocations + .verify(loadBalancingPolicy, never()) + .newQueryPlan(any(Request.class), anyString(), any(Session.class)); + }); + + resultSetFuture = + new ContinuousCqlRequestHandler( + UNDEFINED_IDEMPOTENCE_STATEMENT, + harness.getSession(), + harness.getContext(), + "no node targeting") + .handle(); + + assertThatStage(resultSetFuture) + .isSuccess( + resultSet -> { + assertThat(resultSet.getExecutionInfo().getCoordinator()).isEqualTo(node1); + invocations + .verify(loadBalancingPolicy) + .newQueryPlan( + UNDEFINED_IDEMPOTENCE_STATEMENT, + DriverExecutionProfile.DEFAULT_NAME, + harness.getSession()); + }); + } + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerReprepareTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerReprepareTest.java new file mode 100644 index 00000000000..754d9decded --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerReprepareTest.java @@ -0,0 +1,186 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.cql.continuous; + +import static com.datastax.oss.driver.Assertions.assertThat; +import static com.datastax.oss.driver.Assertions.assertThatStage; +import static com.datastax.oss.protocol.internal.Frame.NO_PAYLOAD; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyBoolean; +import static org.mockito.ArgumentMatchers.anyMap; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import com.datastax.dse.driver.DseTestDataProviders; +import com.datastax.dse.driver.DseTestFixtures; +import com.datastax.dse.driver.api.core.DseProtocolVersion; +import com.datastax.dse.driver.api.core.cql.continuous.ContinuousAsyncResultSet; +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.servererrors.SyntaxError; +import com.datastax.oss.driver.internal.core.adminrequest.AdminRequestHandler; +import com.datastax.oss.driver.internal.core.adminrequest.UnexpectedResponseException; +import com.datastax.oss.driver.internal.core.cql.RequestHandlerTestHarness; +import com.datastax.oss.driver.internal.core.session.RepreparePayload; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import com.datastax.oss.protocol.internal.ProtocolConstants; +import com.datastax.oss.protocol.internal.ProtocolConstants.ErrorCode; +import com.datastax.oss.protocol.internal.request.Prepare; +import com.datastax.oss.protocol.internal.request.Query; +import com.datastax.oss.protocol.internal.response.Error; +import com.datastax.oss.protocol.internal.response.error.Unprepared; +import com.datastax.oss.protocol.internal.response.result.Prepared; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import io.netty.util.concurrent.Future; +import java.nio.ByteBuffer; +import java.util.concurrent.CompletionStage; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; +import org.junit.Test; +import org.mockito.Mock; + +public class ContinuousCqlRequestHandlerReprepareTest extends ContinuousCqlRequestHandlerTestBase { + + private final byte[] preparedId = {1, 2, 3}; + private final ByteBuffer preparedIdBuf = ByteBuffer.wrap(preparedId); + + private final RepreparePayload repreparePayload = + new RepreparePayload(preparedIdBuf, "irrelevant", CqlIdentifier.fromCql("ks"), NO_PAYLOAD); + + private final ConcurrentMap repreparePayloads = + new ConcurrentHashMap<>(ImmutableMap.of(preparedIdBuf, repreparePayload)); + + private final Unprepared unprepared = new Unprepared("test", preparedId); + private final Prepared prepared = new Prepared(preparedId, null, null, null); + private final Error unrecoverable = + new Error(ProtocolConstants.ErrorCode.SYNTAX_ERROR, "bad query"); + private final Error recoverable = new Error(ErrorCode.SERVER_ERROR, "sorry"); + + @Mock private Future future; + + @Override + public void setup() { + super.setup(); + when(future.isSuccess()).thenReturn(true); + } + + @Test + @UseDataProvider(value = "allDseProtocolVersions", location = DseTestDataProviders.class) + public void should_prepare_and_retry_on_same_node(DseProtocolVersion version) { + + try (RequestHandlerTestHarness harness = + continuousHarnessBuilder() + .withResponse(node1, defaultFrameOf(unprepared)) + .withProtocolVersion(version) + .build()) { + + when(harness.getSession().getRepreparePayloads()).thenReturn(repreparePayloads); + when(harness.getChannel(node1).write(any(Prepare.class), anyBoolean(), anyMap(), any())) + .then( + invocation -> { + AdminRequestHandler admin = invocation.getArgument(3); + admin.onResponse(defaultFrameOf(prepared)); + return future; + }); + + new ContinuousCqlRequestHandler( + UNDEFINED_IDEMPOTENCE_STATEMENT, harness.getSession(), harness.getContext(), "test") + .handle(); + + verify(harness.getChannel(node1)).write(any(Prepare.class), anyBoolean(), anyMap(), any()); + // should have attempted to execute the query twice on the same node + verify(harness.getChannel(node1), times(2)) + .write(any(Query.class), anyBoolean(), anyMap(), any()); + } + } + + @Test + @UseDataProvider(value = "allDseProtocolVersions", location = DseTestDataProviders.class) + public void should_abort_when_prepare_fails_with_unrecoverable_error(DseProtocolVersion version) { + + try (RequestHandlerTestHarness harness = + continuousHarnessBuilder() + .withResponse(node1, defaultFrameOf(unprepared)) + .withProtocolVersion(version) + .build()) { + + when(harness.getSession().getRepreparePayloads()).thenReturn(repreparePayloads); + when(harness.getChannel(node1).write(any(Prepare.class), anyBoolean(), anyMap(), any())) + .then( + invocation -> { + AdminRequestHandler admin = invocation.getArgument(3); + admin.onResponse(defaultFrameOf(unrecoverable)); + return future; + }); + + ContinuousCqlRequestHandler handler = + new ContinuousCqlRequestHandler( + UNDEFINED_IDEMPOTENCE_STATEMENT, harness.getSession(), harness.getContext(), "test"); + CompletionStage page1Future = handler.handle(); + + verify(harness.getChannel(node1)).write(any(Query.class), anyBoolean(), anyMap(), any()); + verify(harness.getChannel(node1)).write(any(Prepare.class), anyBoolean(), anyMap(), any()); + + assertThat(handler.state).isEqualTo(-2); + assertThat(page1Future) + .hasFailedWithThrowableThat() + .isInstanceOf(SyntaxError.class) + .hasMessageContaining("bad query"); + } + } + + @Test + @UseDataProvider(value = "allDseProtocolVersions", location = DseTestDataProviders.class) + public void should_try_next_node_when_prepare_fails_with_recoverable_error( + DseProtocolVersion version) { + + try (RequestHandlerTestHarness harness = + continuousHarnessBuilder() + .withResponse(node1, defaultFrameOf(unprepared)) + .withResponse(node2, defaultFrameOf(DseTestFixtures.singleDseRow())) + .withProtocolVersion(version) + .build()) { + + when(harness.getSession().getRepreparePayloads()).thenReturn(repreparePayloads); + when(harness.getChannel(node1).write(any(Prepare.class), anyBoolean(), anyMap(), any())) + .then( + invocation -> { + AdminRequestHandler admin = invocation.getArgument(3); + admin.onResponse(defaultFrameOf(recoverable)); + return future; + }); + + ContinuousCqlRequestHandler handler = + new ContinuousCqlRequestHandler( + UNDEFINED_IDEMPOTENCE_STATEMENT, harness.getSession(), harness.getContext(), "test"); + CompletionStage page1Future = handler.handle(); + + verify(harness.getChannel(node1)).write(any(Query.class), anyBoolean(), anyMap(), any()); + verify(harness.getChannel(node1)).write(any(Prepare.class), anyBoolean(), anyMap(), any()); + // should have tried the next host + verify(harness.getChannel(node2)).write(any(Query.class), anyBoolean(), anyMap(), any()); + + assertThat(handler.state).isEqualTo(-1); + assertThatStage(page1Future) + .isSuccess( + rs -> { + assertThat(rs.currentPage()).hasSize(1); + assertThat(rs.hasMorePages()).isFalse(); + assertThat(rs.getExecutionInfo().getCoordinator()).isEqualTo(node2); + assertThat(rs.getExecutionInfo().getErrors()) + .hasSize(1) + .allSatisfy( + entry -> { + assertThat(entry.getKey()).isEqualTo(node1); + assertThat(entry.getValue()) + .isInstanceOf(UnexpectedResponseException.class) + .hasMessageContaining(recoverable.toString()); + }); + }); + } + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerRetryTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerRetryTest.java new file mode 100644 index 00000000000..41e6ed93e1a --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerRetryTest.java @@ -0,0 +1,608 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.cql.continuous; + +import static com.datastax.dse.driver.DseTestDataProviders.allDseProtocolVersions; +import static com.datastax.oss.driver.Assertions.assertThat; +import static com.datastax.oss.driver.Assertions.assertThatStage; +import static com.datastax.oss.driver.TestDataProviders.combine; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyLong; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.atMost; + +import com.datastax.dse.driver.DseTestFixtures; +import com.datastax.dse.driver.api.core.DseProtocolVersion; +import com.datastax.dse.driver.api.core.cql.continuous.ContinuousAsyncResultSet; +import com.datastax.oss.driver.TestDataProviders; +import com.datastax.oss.driver.api.core.DefaultConsistencyLevel; +import com.datastax.oss.driver.api.core.connection.HeartbeatException; +import com.datastax.oss.driver.api.core.cql.BatchStatement; +import com.datastax.oss.driver.api.core.cql.ExecutionInfo; +import com.datastax.oss.driver.api.core.cql.Row; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.core.cql.Statement; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.metrics.DefaultNodeMetric; +import com.datastax.oss.driver.api.core.retry.RetryDecision; +import com.datastax.oss.driver.api.core.retry.RetryPolicy; +import com.datastax.oss.driver.api.core.servererrors.BootstrappingException; +import com.datastax.oss.driver.api.core.servererrors.DefaultWriteType; +import com.datastax.oss.driver.api.core.servererrors.InvalidQueryException; +import com.datastax.oss.driver.api.core.servererrors.ReadTimeoutException; +import com.datastax.oss.driver.api.core.servererrors.ServerError; +import com.datastax.oss.driver.api.core.servererrors.UnavailableException; +import com.datastax.oss.driver.api.core.servererrors.WriteTimeoutException; +import com.datastax.oss.driver.internal.core.cql.CqlRequestHandlerTestBase; +import com.datastax.oss.driver.internal.core.cql.RequestHandlerTestHarness; +import com.datastax.oss.protocol.internal.ProtocolConstants; +import com.datastax.oss.protocol.internal.response.Error; +import com.datastax.oss.protocol.internal.response.error.ReadTimeout; +import com.datastax.oss.protocol.internal.response.error.Unavailable; +import com.datastax.oss.protocol.internal.response.error.WriteTimeout; +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import java.util.Iterator; +import java.util.concurrent.CompletionStage; +import java.util.concurrent.TimeUnit; +import org.junit.Assume; +import org.junit.Test; +import org.mockito.Mockito; + +public class ContinuousCqlRequestHandlerRetryTest extends ContinuousCqlRequestHandlerTestBase { + + @Test + @UseDataProvider("allIdempotenceConfigs") + public void should_always_try_next_node_if_bootstrapping( + boolean defaultIdempotence, Statement statement, DseProtocolVersion version) { + + Assume.assumeFalse( + "Batch statements are not supported with continuous paging", + statement instanceof BatchStatement); + + try (RequestHandlerTestHarness harness = + continuousHarnessBuilder() + .withProtocolVersion(version) + .withDefaultIdempotence(defaultIdempotence) + .withResponse( + node1, + defaultFrameOf( + new Error(ProtocolConstants.ErrorCode.IS_BOOTSTRAPPING, "mock message"))) + .withResponse(node2, defaultFrameOf(DseTestFixtures.singleDseRow())) + .build()) { + + ContinuousCqlRequestHandler handler = + new ContinuousCqlRequestHandler( + statement, harness.getSession(), harness.getContext(), "test"); + CompletionStage resultSetFuture = handler.handle(); + + assertThat(handler.state).isEqualTo(-1); + + assertThatStage(resultSetFuture) + .isSuccess( + resultSet -> { + Iterator rows = resultSet.currentPage().iterator(); + assertThat(rows.hasNext()).isTrue(); + assertThat(rows.next().getString("message")).isEqualTo("hello, world"); + + ExecutionInfo executionInfo = resultSet.getExecutionInfo(); + assertThat(executionInfo.getCoordinator()).isEqualTo(node2); + assertThat(executionInfo.getErrors()).hasSize(1); + assertThat(executionInfo.getErrors().get(0).getKey()).isEqualTo(node1); + assertThat(executionInfo.getErrors().get(0).getValue()) + .isInstanceOf(BootstrappingException.class); + assertThat(executionInfo.getIncomingPayload()).isEmpty(); + assertThat(executionInfo.getPagingState()).isNull(); + assertThat(executionInfo.getSpeculativeExecutionCount()).isEqualTo(0); + assertThat(executionInfo.getSuccessfulExecutionIndex()).isEqualTo(0); + assertThat(executionInfo.getWarnings()).isEmpty(); + + Mockito.verifyNoMoreInteractions(harness.getContext().getRetryPolicy(anyString())); + }); + } + } + + @Test + @UseDataProvider("allIdempotenceConfigs") + public void should_always_rethrow_query_validation_error( + boolean defaultIdempotence, Statement statement, DseProtocolVersion version) { + + Assume.assumeFalse( + "Batch statements are not supported with continuous paging", + statement instanceof BatchStatement); + + try (RequestHandlerTestHarness harness = + continuousHarnessBuilder() + .withProtocolVersion(version) + .withDefaultIdempotence(defaultIdempotence) + .withResponse( + node1, + defaultFrameOf(new Error(ProtocolConstants.ErrorCode.INVALID, "mock message"))) + .build()) { + + ContinuousCqlRequestHandler handler = + new ContinuousCqlRequestHandler( + statement, harness.getSession(), harness.getContext(), "test"); + CompletionStage resultSetFuture = handler.handle(); + + assertThat(handler.state).isEqualTo(-2); + + assertThatStage(resultSetFuture) + .isFailed( + error -> { + assertThat(error) + .isInstanceOf(InvalidQueryException.class) + .hasMessage("mock message"); + Mockito.verifyNoMoreInteractions(harness.getContext().getRetryPolicy(anyString())); + + Mockito.verify(nodeMetricUpdater1) + .incrementCounter(eq(DefaultNodeMetric.OTHER_ERRORS), anyString()); + Mockito.verify(nodeMetricUpdater1) + .updateTimer( + eq(DefaultNodeMetric.CQL_MESSAGES), + anyString(), + anyLong(), + eq(TimeUnit.NANOSECONDS)); + Mockito.verifyNoMoreInteractions(nodeMetricUpdater1); + }); + } + } + + @Test + @UseDataProvider("failureAndIdempotent") + public void should_try_next_node_if_idempotent_and_retry_policy_decides_so( + FailureScenario failureScenario, + boolean defaultIdempotence, + Statement statement, + DseProtocolVersion version) { + + Assume.assumeFalse( + "Batch statements are not supported with continuous paging", + statement instanceof BatchStatement); + + RequestHandlerTestHarness.Builder harnessBuilder = + continuousHarnessBuilder() + .withProtocolVersion(version) + .withDefaultIdempotence(defaultIdempotence); + failureScenario.mockRequestError(harnessBuilder, node1); + harnessBuilder.withResponse(node2, defaultFrameOf(DseTestFixtures.singleDseRow())); + + try (RequestHandlerTestHarness harness = harnessBuilder.build()) { + failureScenario.mockRetryPolicyDecision( + harness.getContext().getRetryPolicy(anyString()), RetryDecision.RETRY_NEXT); + + ContinuousCqlRequestHandler handler = + new ContinuousCqlRequestHandler( + statement, harness.getSession(), harness.getContext(), "test"); + CompletionStage resultSetFuture = handler.handle(); + + assertThat(handler.state).isEqualTo(-1); + + assertThatStage(resultSetFuture) + .isSuccess( + resultSet -> { + Iterator rows = resultSet.currentPage().iterator(); + assertThat(rows.hasNext()).isTrue(); + assertThat(rows.next().getString("message")).isEqualTo("hello, world"); + + ExecutionInfo executionInfo = resultSet.getExecutionInfo(); + assertThat(executionInfo.getCoordinator()).isEqualTo(node2); + assertThat(executionInfo.getErrors()).hasSize(1); + assertThat(executionInfo.getErrors().get(0).getKey()).isEqualTo(node1); + + Mockito.verify(nodeMetricUpdater1) + .incrementCounter(eq(failureScenario.errorMetric), anyString()); + Mockito.verify(nodeMetricUpdater1) + .incrementCounter(eq(DefaultNodeMetric.RETRIES), anyString()); + Mockito.verify(nodeMetricUpdater1) + .incrementCounter(eq(failureScenario.retryMetric), anyString()); + Mockito.verify(nodeMetricUpdater1, atMost(1)) + .updateTimer( + eq(DefaultNodeMetric.CQL_MESSAGES), + anyString(), + anyLong(), + eq(TimeUnit.NANOSECONDS)); + Mockito.verifyNoMoreInteractions(nodeMetricUpdater1); + }); + } + } + + @Test + @UseDataProvider("failureAndIdempotent") + public void should_try_same_node_if_idempotent_and_retry_policy_decides_so( + FailureScenario failureScenario, + boolean defaultIdempotence, + Statement statement, + DseProtocolVersion version) { + + Assume.assumeFalse( + "Batch statements are not supported with continuous paging", + statement instanceof BatchStatement); + + RequestHandlerTestHarness.Builder harnessBuilder = + continuousHarnessBuilder() + .withProtocolVersion(version) + .withDefaultIdempotence(defaultIdempotence); + failureScenario.mockRequestError(harnessBuilder, node1); + harnessBuilder.withResponse(node1, defaultFrameOf(DseTestFixtures.singleDseRow())); + + try (RequestHandlerTestHarness harness = harnessBuilder.build()) { + failureScenario.mockRetryPolicyDecision( + harness.getContext().getRetryPolicy(anyString()), RetryDecision.RETRY_SAME); + + ContinuousCqlRequestHandler handler = + new ContinuousCqlRequestHandler( + statement, harness.getSession(), harness.getContext(), "test"); + CompletionStage resultSetFuture = handler.handle(); + + assertThat(handler.state).isEqualTo(-1); + + assertThatStage(resultSetFuture) + .isSuccess( + resultSet -> { + Iterator rows = resultSet.currentPage().iterator(); + assertThat(rows.hasNext()).isTrue(); + assertThat(rows.next().getString("message")).isEqualTo("hello, world"); + + ExecutionInfo executionInfo = resultSet.getExecutionInfo(); + assertThat(executionInfo.getCoordinator()).isEqualTo(node1); + assertThat(executionInfo.getErrors()).hasSize(1); + assertThat(executionInfo.getErrors().get(0).getKey()).isEqualTo(node1); + + Mockito.verify(nodeMetricUpdater1) + .incrementCounter(eq(failureScenario.errorMetric), anyString()); + Mockito.verify(nodeMetricUpdater1) + .incrementCounter(eq(DefaultNodeMetric.RETRIES), anyString()); + Mockito.verify(nodeMetricUpdater1) + .incrementCounter(eq(failureScenario.retryMetric), anyString()); + Mockito.verify(nodeMetricUpdater1, atMost(2)) + .updateTimer( + eq(DefaultNodeMetric.CQL_MESSAGES), + anyString(), + anyLong(), + eq(TimeUnit.NANOSECONDS)); + Mockito.verifyNoMoreInteractions(nodeMetricUpdater1); + }); + } + } + + @Test + @UseDataProvider("failureAndIdempotent") + public void should_ignore_error_if_idempotent_and_retry_policy_decides_so( + FailureScenario failureScenario, + boolean defaultIdempotence, + Statement statement, + DseProtocolVersion version) { + + Assume.assumeFalse( + "Batch statements are not supported with continuous paging", + statement instanceof BatchStatement); + + RequestHandlerTestHarness.Builder harnessBuilder = + continuousHarnessBuilder() + .withProtocolVersion(version) + .withDefaultIdempotence(defaultIdempotence); + failureScenario.mockRequestError(harnessBuilder, node1); + + try (RequestHandlerTestHarness harness = harnessBuilder.build()) { + failureScenario.mockRetryPolicyDecision( + harness.getContext().getRetryPolicy(anyString()), RetryDecision.IGNORE); + + ContinuousCqlRequestHandler handler = + new ContinuousCqlRequestHandler( + statement, harness.getSession(), harness.getContext(), "test"); + CompletionStage resultSetFuture = handler.handle(); + + assertThat(handler.state).isEqualTo(-1); + + assertThatStage(resultSetFuture) + .isSuccess( + resultSet -> { + Iterator rows = resultSet.currentPage().iterator(); + assertThat(rows.hasNext()).isFalse(); + + ExecutionInfo executionInfo = resultSet.getExecutionInfo(); + assertThat(executionInfo.getCoordinator()).isEqualTo(node1); + assertThat(executionInfo.getErrors()).hasSize(0); + + Mockito.verify(nodeMetricUpdater1) + .incrementCounter(eq(failureScenario.errorMetric), anyString()); + Mockito.verify(nodeMetricUpdater1) + .incrementCounter(eq(DefaultNodeMetric.IGNORES), anyString()); + Mockito.verify(nodeMetricUpdater1) + .incrementCounter(eq(failureScenario.ignoreMetric), anyString()); + Mockito.verify(nodeMetricUpdater1, atMost(1)) + .updateTimer( + eq(DefaultNodeMetric.CQL_MESSAGES), + anyString(), + anyLong(), + eq(TimeUnit.NANOSECONDS)); + Mockito.verifyNoMoreInteractions(nodeMetricUpdater1); + }); + } + } + + @Test + @UseDataProvider("failureAndIdempotent") + public void should_rethrow_error_if_idempotent_and_retry_policy_decides_so( + FailureScenario failureScenario, + boolean defaultIdempotence, + Statement statement, + DseProtocolVersion version) { + + Assume.assumeFalse( + "Batch statements are not supported with continuous paging", + statement instanceof BatchStatement); + + RequestHandlerTestHarness.Builder harnessBuilder = + continuousHarnessBuilder() + .withProtocolVersion(version) + .withDefaultIdempotence(defaultIdempotence); + failureScenario.mockRequestError(harnessBuilder, node1); + + try (RequestHandlerTestHarness harness = harnessBuilder.build()) { + + failureScenario.mockRetryPolicyDecision( + harness.getContext().getRetryPolicy(anyString()), RetryDecision.RETHROW); + + ContinuousCqlRequestHandler handler = + new ContinuousCqlRequestHandler( + statement, harness.getSession(), harness.getContext(), "test"); + CompletionStage resultSetFuture = handler.handle(); + + assertThat(handler.state).isEqualTo(-2); + + assertThatStage(resultSetFuture) + .isFailed( + error -> { + assertThat(error).isInstanceOf(failureScenario.expectedExceptionClass); + + Mockito.verify(nodeMetricUpdater1) + .incrementCounter(eq(failureScenario.errorMetric), anyString()); + Mockito.verify(nodeMetricUpdater1, atMost(1)) + .updateTimer( + eq(DefaultNodeMetric.CQL_MESSAGES), + anyString(), + anyLong(), + eq(TimeUnit.NANOSECONDS)); + Mockito.verifyNoMoreInteractions(nodeMetricUpdater1); + }); + } + } + + @Test + @UseDataProvider("failureAndNotIdempotent") + public void should_rethrow_error_if_not_idempotent_and_error_unsafe_or_policy_rethrows( + FailureScenario failureScenario, + boolean defaultIdempotence, + Statement statement, + DseProtocolVersion version) { + + Assume.assumeFalse( + "Batch statements are not supported with continuous paging", + statement instanceof BatchStatement); + + // For two of the possible exceptions, the retry policy is called even if the statement is not + // idempotent + boolean shouldCallRetryPolicy = + (failureScenario.expectedExceptionClass.equals(UnavailableException.class) + || failureScenario.expectedExceptionClass.equals(ReadTimeoutException.class)); + + RequestHandlerTestHarness.Builder harnessBuilder = + continuousHarnessBuilder() + .withProtocolVersion(version) + .withDefaultIdempotence(defaultIdempotence); + failureScenario.mockRequestError(harnessBuilder, node1); + harnessBuilder.withResponse(node2, defaultFrameOf(DseTestFixtures.singleDseRow())); + + try (RequestHandlerTestHarness harness = harnessBuilder.build()) { + + if (shouldCallRetryPolicy) { + failureScenario.mockRetryPolicyDecision( + harness.getContext().getRetryPolicy(anyString()), RetryDecision.RETHROW); + } + + ContinuousCqlRequestHandler handler = + new ContinuousCqlRequestHandler( + statement, harness.getSession(), harness.getContext(), "test"); + CompletionStage resultSetFuture = handler.handle(); + + assertThat(handler.state).isEqualTo(-2); + + assertThatStage(resultSetFuture) + .isFailed( + error -> { + assertThat(error).isInstanceOf(failureScenario.expectedExceptionClass); + // When non idempotent, the policy is bypassed completely: + if (!shouldCallRetryPolicy) { + Mockito.verifyNoMoreInteractions( + harness.getContext().getRetryPolicy(anyString())); + } + + Mockito.verify(nodeMetricUpdater1) + .incrementCounter(eq(failureScenario.errorMetric), anyString()); + Mockito.verify(nodeMetricUpdater1, atMost(1)) + .updateTimer( + eq(DefaultNodeMetric.CQL_MESSAGES), + anyString(), + anyLong(), + eq(TimeUnit.NANOSECONDS)); + Mockito.verifyNoMoreInteractions(nodeMetricUpdater1); + }); + } + } + + /** + * Sets up the mocks to simulate an error from a node, and make the retry policy return a given + * decision for that error. + */ + private abstract static class FailureScenario { + private final Class expectedExceptionClass; + final DefaultNodeMetric errorMetric; + final DefaultNodeMetric retryMetric; + final DefaultNodeMetric ignoreMetric; + + FailureScenario( + Class expectedExceptionClass, + DefaultNodeMetric errorMetric, + DefaultNodeMetric retryMetric, + DefaultNodeMetric ignoreMetric) { + this.expectedExceptionClass = expectedExceptionClass; + this.errorMetric = errorMetric; + this.retryMetric = retryMetric; + this.ignoreMetric = ignoreMetric; + } + + abstract void mockRequestError(RequestHandlerTestHarness.Builder builder, Node node); + + abstract void mockRetryPolicyDecision(RetryPolicy policy, RetryDecision decision); + } + + @DataProvider + public static Object[][] failure() { + return TestDataProviders.fromList( + new FailureScenario( + ReadTimeoutException.class, + DefaultNodeMetric.READ_TIMEOUTS, + DefaultNodeMetric.RETRIES_ON_READ_TIMEOUT, + DefaultNodeMetric.IGNORES_ON_READ_TIMEOUT) { + @Override + public void mockRequestError(RequestHandlerTestHarness.Builder builder, Node node) { + builder.withResponse( + node, + defaultFrameOf( + new ReadTimeout( + "mock message", ProtocolConstants.ConsistencyLevel.LOCAL_ONE, 1, 2, true))); + } + + @Override + public void mockRetryPolicyDecision(RetryPolicy policy, RetryDecision decision) { + Mockito.when( + policy.onReadTimeout( + any(SimpleStatement.class), + eq(DefaultConsistencyLevel.LOCAL_ONE), + eq(2), + eq(1), + eq(true), + eq(0))) + .thenReturn(decision); + } + }, + new FailureScenario( + WriteTimeoutException.class, + DefaultNodeMetric.WRITE_TIMEOUTS, + DefaultNodeMetric.RETRIES_ON_WRITE_TIMEOUT, + DefaultNodeMetric.IGNORES_ON_WRITE_TIMEOUT) { + @Override + public void mockRequestError(RequestHandlerTestHarness.Builder builder, Node node) { + builder.withResponse( + node, + defaultFrameOf( + new WriteTimeout( + "mock message", + ProtocolConstants.ConsistencyLevel.LOCAL_ONE, + 1, + 2, + ProtocolConstants.WriteType.SIMPLE))); + } + + @Override + public void mockRetryPolicyDecision(RetryPolicy policy, RetryDecision decision) { + Mockito.when( + policy.onWriteTimeout( + any(SimpleStatement.class), + eq(DefaultConsistencyLevel.LOCAL_ONE), + eq(DefaultWriteType.SIMPLE), + eq(2), + eq(1), + eq(0))) + .thenReturn(decision); + } + }, + new FailureScenario( + UnavailableException.class, + DefaultNodeMetric.UNAVAILABLES, + DefaultNodeMetric.RETRIES_ON_UNAVAILABLE, + DefaultNodeMetric.IGNORES_ON_UNAVAILABLE) { + @Override + public void mockRequestError(RequestHandlerTestHarness.Builder builder, Node node) { + builder.withResponse( + node, + defaultFrameOf( + new Unavailable( + "mock message", ProtocolConstants.ConsistencyLevel.LOCAL_ONE, 2, 1))); + } + + @Override + public void mockRetryPolicyDecision(RetryPolicy policy, RetryDecision decision) { + Mockito.when( + policy.onUnavailable( + any(SimpleStatement.class), + eq(DefaultConsistencyLevel.LOCAL_ONE), + eq(2), + eq(1), + eq(0))) + .thenReturn(decision); + } + }, + new FailureScenario( + ServerError.class, + DefaultNodeMetric.OTHER_ERRORS, + DefaultNodeMetric.RETRIES_ON_OTHER_ERROR, + DefaultNodeMetric.IGNORES_ON_OTHER_ERROR) { + @Override + public void mockRequestError(RequestHandlerTestHarness.Builder builder, Node node) { + builder.withResponse( + node, + defaultFrameOf( + new Error(ProtocolConstants.ErrorCode.SERVER_ERROR, "mock server error"))); + } + + @Override + public void mockRetryPolicyDecision(RetryPolicy policy, RetryDecision decision) { + Mockito.when( + policy.onErrorResponse( + any(SimpleStatement.class), any(ServerError.class), eq(0))) + .thenReturn(decision); + } + }, + new FailureScenario( + HeartbeatException.class, + DefaultNodeMetric.ABORTED_REQUESTS, + DefaultNodeMetric.RETRIES_ON_ABORTED, + DefaultNodeMetric.IGNORES_ON_ABORTED) { + @Override + public void mockRequestError(RequestHandlerTestHarness.Builder builder, Node node) { + builder.withResponseFailure(node, Mockito.mock(HeartbeatException.class)); + } + + @Override + public void mockRetryPolicyDecision(RetryPolicy policy, RetryDecision decision) { + Mockito.when( + policy.onRequestAborted( + any(SimpleStatement.class), any(HeartbeatException.class), eq(0))) + .thenReturn(decision); + } + }); + } + + @DataProvider + public static Object[][] failureAndIdempotent() { + return combine(failure(), idempotentConfig(), allDseProtocolVersions()); + } + + @DataProvider + public static Object[][] failureAndNotIdempotent() { + return combine(failure(), nonIdempotentConfig(), allDseProtocolVersions()); + } + + @DataProvider + public static Object[][] allIdempotenceConfigs() { + return combine(CqlRequestHandlerTestBase.allIdempotenceConfigs(), allDseProtocolVersions()); + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerTest.java new file mode 100644 index 00000000000..751b0316097 --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerTest.java @@ -0,0 +1,519 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.cql.continuous; + +import static com.datastax.dse.driver.api.core.DseProtocolVersion.DSE_V1; +import static com.datastax.dse.driver.api.core.DseProtocolVersion.DSE_V2; +import static com.datastax.dse.protocol.internal.DseProtocolConstants.RevisionType.CANCEL_CONTINUOUS_PAGING; +import static com.datastax.dse.protocol.internal.DseProtocolConstants.RevisionType.MORE_CONTINUOUS_PAGES; +import static com.datastax.oss.driver.Assertions.assertThat; +import static com.datastax.oss.driver.Assertions.assertThatStage; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyBoolean; +import static org.mockito.ArgumentMatchers.anyLong; +import static org.mockito.ArgumentMatchers.anyMap; +import static org.mockito.ArgumentMatchers.argThat; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.ArgumentMatchers.matches; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; +import static org.mockito.Mockito.when; + +import com.datastax.dse.driver.DseTestDataProviders; +import com.datastax.dse.driver.DseTestFixtures; +import com.datastax.dse.driver.api.core.DseProtocolVersion; +import com.datastax.dse.driver.api.core.cql.continuous.ContinuousAsyncResultSet; +import com.datastax.dse.protocol.internal.request.Revise; +import com.datastax.oss.driver.api.core.DefaultProtocolVersion; +import com.datastax.oss.driver.api.core.DriverTimeoutException; +import com.datastax.oss.driver.api.core.NoNodeAvailableException; +import com.datastax.oss.driver.api.core.ProtocolVersion; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.cql.ExecutionInfo; +import com.datastax.oss.driver.api.core.cql.Row; +import com.datastax.oss.driver.api.core.servererrors.BootstrappingException; +import com.datastax.oss.driver.api.core.tracker.RequestTracker; +import com.datastax.oss.driver.internal.core.ProtocolFeature; +import com.datastax.oss.driver.internal.core.cql.PoolBehavior; +import com.datastax.oss.driver.internal.core.cql.RequestHandlerTestHarness; +import com.datastax.oss.driver.internal.core.util.concurrent.CapturingTimer.CapturedTimeout; +import com.datastax.oss.driver.internal.core.util.concurrent.CompletableFutures; +import com.datastax.oss.protocol.internal.Message; +import com.datastax.oss.protocol.internal.ProtocolConstants; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import java.util.Iterator; +import java.util.concurrent.CompletionStage; +import java.util.concurrent.TimeUnit; +import java.util.regex.Pattern; +import org.junit.Test; +import org.mockito.Mockito; + +public class ContinuousCqlRequestHandlerTest extends ContinuousCqlRequestHandlerTestBase { + + private static final Pattern LOG_PREFIX_PER_REQUEST = Pattern.compile("test\\|\\d*"); + + @Test + @UseDataProvider(value = "allDseProtocolVersions", location = DseTestDataProviders.class) + public void should_complete_single_page_result(DseProtocolVersion version) { + try (RequestHandlerTestHarness harness = + continuousHarnessBuilder() + .withProtocolVersion(version) + .withResponse(node1, defaultFrameOf(DseTestFixtures.singleDseRow())) + .build()) { + + CompletionStage resultSetFuture = + new ContinuousCqlRequestHandler( + UNDEFINED_IDEMPOTENCE_STATEMENT, + harness.getSession(), + harness.getContext(), + "test") + .handle(); + + assertThatStage(resultSetFuture) + .isSuccess( + resultSet -> { + Iterator rows = resultSet.currentPage().iterator(); + assertThat(rows.hasNext()).isTrue(); + assertThat(rows.next().getString("message")).isEqualTo("hello, world"); + ExecutionInfo executionInfo = resultSet.getExecutionInfo(); + assertThat(executionInfo.getCoordinator()).isEqualTo(node1); + assertThat(executionInfo.getErrors()).isEmpty(); + assertThat(executionInfo.getIncomingPayload()).isEmpty(); + assertThat(executionInfo.getPagingState()).isNull(); + assertThat(executionInfo.getSpeculativeExecutionCount()).isEqualTo(0); + assertThat(executionInfo.getSuccessfulExecutionIndex()).isEqualTo(0); + assertThat(executionInfo.getWarnings()).isEmpty(); + }); + } + } + + @Test + @UseDataProvider(value = "allDseProtocolVersions", location = DseTestDataProviders.class) + public void should_complete_multi_page_result(DseProtocolVersion version) { + RequestHandlerTestHarness.Builder builder = + continuousHarnessBuilder().withProtocolVersion(version); + PoolBehavior node1Behavior = builder.customBehavior(node1); + try (RequestHandlerTestHarness harness = builder.build()) { + + ContinuousCqlRequestHandler handler = + new ContinuousCqlRequestHandler( + UNDEFINED_IDEMPOTENCE_STATEMENT, harness.getSession(), harness.getContext(), "test"); + CompletionStage page1Future = handler.handle(); + + assertThat(handler.pendingResult).isNotNull(); + node1Behavior.setResponseSuccess(defaultFrameOf(DseTestFixtures.tenDseRows(1, false))); + + assertThatStage(page1Future) + .isSuccess( + page1 -> { + assertThat(page1.hasMorePages()).isTrue(); + assertThat(page1.pageNumber()).isEqualTo(1); + Iterator rows = page1.currentPage().iterator(); + assertThat(rows.hasNext()).isTrue(); + assertThat(rows).toIterable().hasSize(10); + ExecutionInfo executionInfo = page1.getExecutionInfo(); + assertThat(executionInfo.getCoordinator()).isEqualTo(node1); + assertThat(executionInfo.getErrors()).isEmpty(); + assertThat(executionInfo.getIncomingPayload()).isEmpty(); + assertThat(executionInfo.getPagingState()).isNotNull(); + assertThat(executionInfo.getSpeculativeExecutionCount()).isEqualTo(0); + assertThat(executionInfo.getSuccessfulExecutionIndex()).isEqualTo(0); + assertThat(executionInfo.getWarnings()).isEmpty(); + }); + + ContinuousAsyncResultSet page1 = CompletableFutures.getCompleted(page1Future); + assertThat(handler.pendingResult).isNull(); + CompletionStage page2Future = page1.fetchNextPage(); + assertThat(handler.pendingResult).isNotNull(); + node1Behavior.setResponseSuccess(defaultFrameOf(DseTestFixtures.tenDseRows(2, true))); + + assertThatStage(page2Future) + .isSuccess( + page2 -> { + assertThat(page2.hasMorePages()).isFalse(); + assertThat(page2.pageNumber()).isEqualTo(2); + Iterator rows = page2.currentPage().iterator(); + assertThat(rows.hasNext()).isTrue(); + assertThat(rows).toIterable().hasSize(10); + ExecutionInfo executionInfo = page2.getExecutionInfo(); + assertThat(executionInfo.getCoordinator()).isEqualTo(node1); + assertThat(executionInfo.getErrors()).isEmpty(); + assertThat(executionInfo.getIncomingPayload()).isEmpty(); + assertThat(executionInfo.getPagingState()).isNull(); + assertThat(executionInfo.getSpeculativeExecutionCount()).isEqualTo(0); + assertThat(executionInfo.getSuccessfulExecutionIndex()).isEqualTo(0); + assertThat(executionInfo.getWarnings()).isEmpty(); + }); + } + } + + @Test + @UseDataProvider(value = "allDseProtocolVersions", location = DseTestDataProviders.class) + public void should_fail_if_no_node_available(DseProtocolVersion version) { + try (RequestHandlerTestHarness harness = + continuousHarnessBuilder() + .withProtocolVersion(version) + // Mock no responses => this will produce an empty query plan + .build()) { + + CompletionStage resultSetFuture = + new ContinuousCqlRequestHandler( + UNDEFINED_IDEMPOTENCE_STATEMENT, + harness.getSession(), + harness.getContext(), + "test") + .handle(); + + assertThatStage(resultSetFuture) + .isFailed(error -> assertThat(error).isInstanceOf(NoNodeAvailableException.class)); + } + } + + @Test + @UseDataProvider(value = "allOssProtocolVersions", location = DseTestDataProviders.class) + public void should_throw_if_protocol_version_does_not_support_continuous_paging( + ProtocolVersion version) { + try (RequestHandlerTestHarness harness = + continuousHarnessBuilder().withProtocolVersion(version).build()) { + Mockito.when( + harness + .getContext() + .getProtocolVersionRegistry() + .supports(any(DefaultProtocolVersion.class), any(ProtocolFeature.class))) + .thenReturn(false); + assertThatThrownBy( + () -> + new ContinuousCqlRequestHandler( + UNDEFINED_IDEMPOTENCE_STATEMENT, + harness.getSession(), + harness.getContext(), + "test") + .handle()) + .isInstanceOf(IllegalStateException.class) + .hasMessage("Cannot execute continuous paging requests with protocol version " + version); + } + } + + @Test + @UseDataProvider(value = "allDseProtocolVersions", location = DseTestDataProviders.class) + public void should_time_out_if_first_page_takes_too_long(DseProtocolVersion version) + throws Exception { + RequestHandlerTestHarness.Builder builder = + continuousHarnessBuilder().withProtocolVersion(version); + PoolBehavior node1Behavior = builder.customBehavior(node1); + try (RequestHandlerTestHarness harness = builder.build()) { + + CompletionStage resultSetFuture = + new ContinuousCqlRequestHandler( + UNDEFINED_IDEMPOTENCE_STATEMENT, + harness.getSession(), + harness.getContext(), + "test") + .handle(); + + // mark the initial request as successful, which should schedule a timeout for the first page + node1Behavior.setWriteSuccess(); + CapturedTimeout page1Timeout = harness.nextScheduledTimeout(); + assertThat(page1Timeout.getDelay(TimeUnit.NANOSECONDS)) + .isEqualTo(TIMEOUT_FIRST_PAGE.toNanos()); + + page1Timeout.task().run(page1Timeout); + + assertThatStage(resultSetFuture) + .isFailed( + t -> + assertThat(t) + .isInstanceOf(DriverTimeoutException.class) + .hasMessageContaining("Timed out waiting for page 1")); + } + } + + @Test + @UseDataProvider(value = "allDseProtocolVersions", location = DseTestDataProviders.class) + public void should_time_out_if_other_page_takes_too_long(DseProtocolVersion version) + throws Exception { + RequestHandlerTestHarness.Builder builder = + continuousHarnessBuilder().withProtocolVersion(version); + PoolBehavior node1Behavior = builder.customBehavior(node1); + + try (RequestHandlerTestHarness harness = builder.build()) { + + CompletionStage page1Future = + new ContinuousCqlRequestHandler( + UNDEFINED_IDEMPOTENCE_STATEMENT, + harness.getSession(), + harness.getContext(), + "test") + .handle(); + + // mark the initial request as successful, which should schedule a timeout for the first page + node1Behavior.setWriteSuccess(); + CapturedTimeout page1Timeout = harness.nextScheduledTimeout(); + assertThat(page1Timeout.getDelay(TimeUnit.NANOSECONDS)) + .isEqualTo(TIMEOUT_FIRST_PAGE.toNanos()); + + // the server replies with page 1, the corresponding timeout should be cancelled + node1Behavior.setResponseSuccess(defaultFrameOf(DseTestFixtures.tenDseRows(1, false))); + assertThat(page1Timeout.isCancelled()).isTrue(); + + // request page 2, the queue is empty so this should request more pages and schedule another + // timeout + ContinuousAsyncResultSet page1 = CompletableFutures.getUninterruptibly(page1Future); + CompletionStage page2Future = page1.fetchNextPage(); + CapturedTimeout page2Timeout = harness.nextScheduledTimeout(); + assertThat(page2Timeout.getDelay(TimeUnit.NANOSECONDS)) + .isEqualTo(TIMEOUT_OTHER_PAGES.toNanos()); + + page2Timeout.task().run(page2Timeout); + + assertThatStage(page2Future) + .isFailed( + t -> + assertThat(t) + .isInstanceOf(DriverTimeoutException.class) + .hasMessageContaining("Timed out waiting for page 2")); + } + } + + @Test + @UseDataProvider(value = "allDseProtocolVersions", location = DseTestDataProviders.class) + public void should_cancel_future_if_session_cancelled(DseProtocolVersion version) { + RequestHandlerTestHarness.Builder builder = + continuousHarnessBuilder().withProtocolVersion(version); + PoolBehavior node1Behavior = builder.customBehavior(node1); + try (RequestHandlerTestHarness harness = builder.build()) { + + ContinuousCqlRequestHandler handler = + new ContinuousCqlRequestHandler( + UNDEFINED_IDEMPOTENCE_STATEMENT, harness.getSession(), harness.getContext(), "test"); + CompletionStage page1Future = handler.handle(); + + node1Behavior.setResponseSuccess(defaultFrameOf(DseTestFixtures.tenDseRows(1, false))); + // will be discarded + node1Behavior.setResponseSuccess(defaultFrameOf(DseTestFixtures.tenDseRows(2, false))); + + ContinuousAsyncResultSet page1 = CompletableFutures.getUninterruptibly(page1Future); + page1.cancel(); + + assertThat(handler.state).isEqualTo(-2); + assertThat(page1.fetchNextPage()).isCancelled(); + } + } + + @Test + @UseDataProvider(value = "allDseProtocolVersions", location = DseTestDataProviders.class) + public void should_cancel_session_if_future_cancelled(DseProtocolVersion version) { + RequestHandlerTestHarness.Builder builder = + continuousHarnessBuilder().withProtocolVersion(version); + PoolBehavior node1Behavior = builder.customBehavior(node1); + try (RequestHandlerTestHarness harness = builder.build()) { + + ContinuousCqlRequestHandler handler = + new ContinuousCqlRequestHandler( + UNDEFINED_IDEMPOTENCE_STATEMENT, harness.getSession(), harness.getContext(), "test"); + CompletionStage page1Future = handler.handle(); + + page1Future.toCompletableFuture().cancel(true); + // this should be ignored + node1Behavior.setResponseSuccess(defaultFrameOf(DseTestFixtures.tenDseRows(1, false))); + assertThat(handler.state).isEqualTo(-2); + } + } + + @Test + @UseDataProvider(value = "allDseProtocolVersions", location = DseTestDataProviders.class) + public void should_not_cancel_session_if_future_cancelled_but_already_done( + DseProtocolVersion version) { + RequestHandlerTestHarness.Builder builder = + continuousHarnessBuilder().withProtocolVersion(version); + PoolBehavior node1Behavior = builder.customBehavior(node1); + try (RequestHandlerTestHarness harness = builder.build()) { + + ContinuousCqlRequestHandler handler = + new ContinuousCqlRequestHandler( + UNDEFINED_IDEMPOTENCE_STATEMENT, harness.getSession(), harness.getContext(), "test"); + CompletionStage page1Future = handler.handle(); + + // this will complete page 1 future + node1Behavior.setResponseSuccess(defaultFrameOf(DseTestFixtures.tenDseRows(1, true))); + + // to late + page1Future.toCompletableFuture().cancel(true); + assertThat(handler.state).isEqualTo(-1); + } + } + + @Test + public void should_send_cancel_request_if_dse_v2() { + RequestHandlerTestHarness.Builder builder = + continuousHarnessBuilder().withProtocolVersion(DSE_V2); + PoolBehavior node1Behavior = builder.customBehavior(node1); + try (RequestHandlerTestHarness harness = builder.build()) { + + ContinuousCqlRequestHandler handler = + new ContinuousCqlRequestHandler( + UNDEFINED_IDEMPOTENCE_STATEMENT, harness.getSession(), harness.getContext(), "test"); + CompletionStage page1Future = handler.handle(); + + page1Future.toCompletableFuture().cancel(true); + assertThat(handler.state).isEqualTo(-2); + verify(node1Behavior.getChannel()) + .write(argThat(this::isCancelRequest), anyBoolean(), anyMap(), any()); + } + } + + @Test + public void should_toggle_channel_autoread_if_dse_v1() { + RequestHandlerTestHarness.Builder builder = + continuousHarnessBuilder().withProtocolVersion(DSE_V1); + PoolBehavior node1Behavior = builder.customBehavior(node1); + try (RequestHandlerTestHarness harness = builder.build()) { + + CompletionStage page1Future = + new ContinuousCqlRequestHandler( + UNDEFINED_IDEMPOTENCE_STATEMENT, + harness.getSession(), + harness.getContext(), + "test") + .handle(); + + // simulate the arrival of 5 pages, the first one will complete page1 future above, + // the following 4 will be enqueued and should trigger autoread off + node1Behavior.setResponseSuccess(defaultFrameOf(DseTestFixtures.tenDseRows(1, false))); + node1Behavior.setResponseSuccess(defaultFrameOf(DseTestFixtures.tenDseRows(2, false))); + node1Behavior.setResponseSuccess(defaultFrameOf(DseTestFixtures.tenDseRows(3, false))); + node1Behavior.setResponseSuccess(defaultFrameOf(DseTestFixtures.tenDseRows(4, false))); + node1Behavior.setResponseSuccess(defaultFrameOf(DseTestFixtures.tenDseRows(5, false))); + + verify(node1Behavior.getChannel().config()).setAutoRead(false); + + // simulate the retrieval of 2 pages, this should dequeue page 2 + // and trigger autoread on + ContinuousAsyncResultSet page1 = CompletableFutures.getCompleted(page1Future); + CompletableFutures.getCompleted(page1.fetchNextPage()); + + verify(node1Behavior.getChannel().config()).setAutoRead(true); + + // in DSE_V1, the backpressure request should not have been sent + verify(node1Behavior.getChannel(), never()) + .write(any(Revise.class), anyBoolean(), anyMap(), any()); + } + } + + @Test + public void should_send_backpressure_request_if_dse_v2() { + RequestHandlerTestHarness.Builder builder = + continuousHarnessBuilder().withProtocolVersion(DSE_V2); + PoolBehavior node1Behavior = builder.customBehavior(node1); + try (RequestHandlerTestHarness harness = builder.build()) { + + CompletionStage page1Future = + new ContinuousCqlRequestHandler( + UNDEFINED_IDEMPOTENCE_STATEMENT, + harness.getSession(), + harness.getContext(), + "test") + .handle(); + + // simulate the arrival of 4 pages, the first one will complete page1 future above, + // the following 3 will be enqueued + node1Behavior.setResponseSuccess(defaultFrameOf(DseTestFixtures.tenDseRows(1, false))); + node1Behavior.setResponseSuccess(defaultFrameOf(DseTestFixtures.tenDseRows(2, false))); + node1Behavior.setResponseSuccess(defaultFrameOf(DseTestFixtures.tenDseRows(3, false))); + node1Behavior.setResponseSuccess(defaultFrameOf(DseTestFixtures.tenDseRows(4, false))); + + // simulate the retrieval of 2 pages, this should dequeue page 2 + // and trigger a backpressure request as the queue is now half empty (2/4) + ContinuousAsyncResultSet page1 = CompletableFutures.getCompleted(page1Future); + CompletableFutures.getCompleted(page1.fetchNextPage()); + + verify(node1Behavior.getChannel()) + .write(argThat(this::isBackpressureRequest), anyBoolean(), anyMap(), any()); + // should not mess with autoread in dse v2 + verify(node1Behavior.getChannel().config(), never()).setAutoRead(anyBoolean()); + } + } + + @Test + @UseDataProvider(value = "allDseProtocolVersions", location = DseTestDataProviders.class) + public void should_invoke_request_tracker(DseProtocolVersion version) { + try (RequestHandlerTestHarness harness = + continuousHarnessBuilder() + .withProtocolVersion(version) + .withResponse( + node1, + defaultFrameOf( + new com.datastax.oss.protocol.internal.response.Error( + ProtocolConstants.ErrorCode.IS_BOOTSTRAPPING, "mock message"))) + .withResponse(node2, defaultFrameOf(DseTestFixtures.singleDseRow())) + .build()) { + + RequestTracker requestTracker = mock(RequestTracker.class); + when(harness.getContext().getRequestTracker()).thenReturn(requestTracker); + + CompletionStage resultSetFuture = + new ContinuousCqlRequestHandler( + UNDEFINED_IDEMPOTENCE_STATEMENT, + harness.getSession(), + harness.getContext(), + "test") + .handle(); + + assertThatStage(resultSetFuture) + .isSuccess( + resultSet -> { + Iterator rows = resultSet.currentPage().iterator(); + assertThat(rows.hasNext()).isTrue(); + assertThat(rows.next().getString("message")).isEqualTo("hello, world"); + ExecutionInfo executionInfo = resultSet.getExecutionInfo(); + assertThat(executionInfo.getCoordinator()).isEqualTo(node2); + assertThat(executionInfo.getErrors()).isNotEmpty(); + assertThat(executionInfo.getIncomingPayload()).isEmpty(); + assertThat(executionInfo.getPagingState()).isNull(); + assertThat(executionInfo.getSpeculativeExecutionCount()).isEqualTo(0); + assertThat(executionInfo.getSuccessfulExecutionIndex()).isEqualTo(0); + assertThat(executionInfo.getWarnings()).isEmpty(); + + verify(requestTracker) + .onNodeError( + eq(UNDEFINED_IDEMPOTENCE_STATEMENT), + any(BootstrappingException.class), + anyLong(), + any(DriverExecutionProfile.class), + eq(node1), + matches(LOG_PREFIX_PER_REQUEST)); + verify(requestTracker) + .onNodeSuccess( + eq(UNDEFINED_IDEMPOTENCE_STATEMENT), + anyLong(), + any(DriverExecutionProfile.class), + eq(node2), + matches(LOG_PREFIX_PER_REQUEST)); + verify(requestTracker) + .onSuccess( + eq(UNDEFINED_IDEMPOTENCE_STATEMENT), + anyLong(), + any(DriverExecutionProfile.class), + eq(node2), + matches(LOG_PREFIX_PER_REQUEST)); + verifyNoMoreInteractions(requestTracker); + }); + } + } + + private boolean isBackpressureRequest(Message argument) { + return argument instanceof Revise && ((Revise) argument).revisionType == MORE_CONTINUOUS_PAGES; + } + + private boolean isCancelRequest(Message argument) { + return argument instanceof Revise + && ((Revise) argument).revisionType == CANCEL_CONTINUOUS_PAGING; + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerTestBase.java b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerTestBase.java new file mode 100644 index 00000000000..2679c7567ab --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerTestBase.java @@ -0,0 +1,39 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.cql.continuous; + +import static com.datastax.dse.driver.api.core.config.DseDriverOption.CONTINUOUS_PAGING_MAX_ENQUEUED_PAGES; +import static com.datastax.dse.driver.api.core.config.DseDriverOption.CONTINUOUS_PAGING_TIMEOUT_FIRST_PAGE; +import static com.datastax.dse.driver.api.core.config.DseDriverOption.CONTINUOUS_PAGING_TIMEOUT_OTHER_PAGES; +import static org.mockito.Mockito.when; + +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.internal.core.cql.CqlRequestHandlerTestBase; +import com.datastax.oss.driver.internal.core.cql.RequestHandlerTestHarness; +import java.time.Duration; + +public abstract class ContinuousCqlRequestHandlerTestBase extends CqlRequestHandlerTestBase { + + static final Duration TIMEOUT_FIRST_PAGE = Duration.ofSeconds(2); + static final Duration TIMEOUT_OTHER_PAGES = Duration.ofSeconds(1); + + protected RequestHandlerTestHarness.Builder continuousHarnessBuilder() { + return new RequestHandlerTestHarness.Builder() { + @Override + public RequestHandlerTestHarness build() { + RequestHandlerTestHarness harness = super.build(); + DriverExecutionProfile config = harness.getContext().getConfig().getDefaultProfile(); + when(config.getDuration(CONTINUOUS_PAGING_TIMEOUT_FIRST_PAGE)) + .thenReturn(TIMEOUT_FIRST_PAGE); + when(config.getDuration(CONTINUOUS_PAGING_TIMEOUT_OTHER_PAGES)) + .thenReturn(TIMEOUT_OTHER_PAGES); + when(config.getInt(CONTINUOUS_PAGING_MAX_ENQUEUED_PAGES)).thenReturn(4); + return harness; + } + }; + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/DefaultContinuousAsyncResultSetTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/DefaultContinuousAsyncResultSetTest.java new file mode 100644 index 00000000000..ed2d56e1473 --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/DefaultContinuousAsyncResultSetTest.java @@ -0,0 +1,106 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.cql.continuous; + +import static com.datastax.oss.driver.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.mockito.BDDMockito.given; +import static org.mockito.Mockito.verify; + +import com.datastax.dse.driver.api.core.cql.continuous.ContinuousAsyncResultSet; +import com.datastax.oss.driver.api.core.cql.ColumnDefinitions; +import com.datastax.oss.driver.api.core.cql.ExecutionInfo; +import com.datastax.oss.driver.api.core.cql.Row; +import com.datastax.oss.driver.internal.core.util.CountingIterator; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionStage; +import org.assertj.core.api.ThrowableAssert.ThrowingCallable; +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; + +public class DefaultContinuousAsyncResultSetTest { + + @Mock private ColumnDefinitions columnDefinitions; + @Mock private ExecutionInfo executionInfo; + @Mock private ContinuousCqlRequestHandler handler; + @Mock private CountingIterator rows; + + @Before + public void setup() { + MockitoAnnotations.initMocks(this); + } + + @Test + public void should_fail_to_fetch_next_page_if_last() { + // Given + given(executionInfo.getPagingState()).willReturn(null); + DefaultContinuousAsyncResultSet resultSet = + new DefaultContinuousAsyncResultSet( + rows, columnDefinitions, 1, false, executionInfo, handler); + + // When + boolean hasMorePages = resultSet.hasMorePages(); + ThrowingCallable nextPage = resultSet::fetchNextPage; + + // Then + assertThat(hasMorePages).isFalse(); + assertThatThrownBy(nextPage) + .isInstanceOf(IllegalStateException.class) + .hasMessageContaining("Can't call fetchNextPage() on the last page"); + } + + @Test + public void should_invoke_handler_to_fetch_next_page() { + // Given + CompletableFuture mockResultFuture = new CompletableFuture<>(); + given(handler.dequeueOrCreatePending()).willReturn(mockResultFuture); + DefaultContinuousAsyncResultSet resultSet = + new DefaultContinuousAsyncResultSet( + rows, columnDefinitions, 1, true, executionInfo, handler); + + // When + boolean hasMorePages = resultSet.hasMorePages(); + CompletionStage nextPageFuture = resultSet.fetchNextPage(); + + // Then + assertThat(hasMorePages).isTrue(); + verify(handler).dequeueOrCreatePending(); + assertThat(nextPageFuture).isEqualTo(mockResultFuture); + } + + @Test + public void should_invoke_handler_to_cancel() { + // Given + DefaultContinuousAsyncResultSet resultSet = + new DefaultContinuousAsyncResultSet( + rows, columnDefinitions, 1, true, executionInfo, handler); + // When + resultSet.cancel(); + + // Then + verify(handler).cancel(); + } + + @Test + public void should_report_remaining_rows() { + // Given + given(rows.remaining()).willReturn(42); + DefaultContinuousAsyncResultSet resultSet = + new DefaultContinuousAsyncResultSet( + rows, columnDefinitions, 1, true, executionInfo, handler); + + // When + int remaining = resultSet.remaining(); + Iterable currentPage = resultSet.currentPage(); + + // Then + assertThat(remaining).isEqualTo(42); + assertThat(currentPage.iterator()).isSameAs(rows); + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/DefaultContinuousResultSetTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/DefaultContinuousResultSetTest.java new file mode 100644 index 00000000000..188cdb21be6 --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/DefaultContinuousResultSetTest.java @@ -0,0 +1,143 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.cql.continuous; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.dse.driver.api.core.cql.continuous.ContinuousAsyncResultSet; +import com.datastax.oss.driver.api.core.cql.ColumnDefinitions; +import com.datastax.oss.driver.api.core.cql.ExecutionInfo; +import com.datastax.oss.driver.api.core.cql.ResultSet; +import com.datastax.oss.driver.api.core.cql.Row; +import com.datastax.oss.driver.internal.core.util.CountingIterator; +import java.util.Arrays; +import java.util.Iterator; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionStage; +import org.junit.Test; +import org.mockito.Mockito; + +public class DefaultContinuousResultSetTest { + + @Test + public void should_create_result_set_from_single_page() { + // Given + ContinuousAsyncResultSet page1 = mockPage(false, 0, 1, 2); + + // When + ResultSet resultSet = new DefaultContinuousResultSet(page1); + + // Then + assertThat(resultSet.getColumnDefinitions()).isSameAs(page1.getColumnDefinitions()); + assertThat(resultSet.getExecutionInfo()).isSameAs(page1.getExecutionInfo()); + assertThat(resultSet.getExecutionInfos()).containsExactly(page1.getExecutionInfo()); + + Iterator iterator = resultSet.iterator(); + + assertNextRow(iterator, 0); + assertNextRow(iterator, 1); + assertNextRow(iterator, 2); + + assertThat(iterator.hasNext()).isFalse(); + } + + @Test + public void should_create_result_set_from_multiple_pages() { + // Given + ContinuousAsyncResultSet page1 = mockPage(true, 0, 1, 2); + ContinuousAsyncResultSet page2 = mockPage(true, 3, 4, 5); + ContinuousAsyncResultSet page3 = mockPage(false, 6, 7, 8); + + complete(page1.fetchNextPage(), page2); + complete(page2.fetchNextPage(), page3); + + // When + ResultSet resultSet = new DefaultContinuousResultSet(page1); + + // Then + assertThat(resultSet.iterator().hasNext()).isTrue(); + + assertThat(resultSet.getColumnDefinitions()).isSameAs(page1.getColumnDefinitions()); + assertThat(resultSet.getExecutionInfo()).isSameAs(page1.getExecutionInfo()); + assertThat(resultSet.getExecutionInfos()).containsExactly(page1.getExecutionInfo()); + + Iterator iterator = resultSet.iterator(); + + assertNextRow(iterator, 0); + assertNextRow(iterator, 1); + assertNextRow(iterator, 2); + + assertThat(iterator.hasNext()).isTrue(); + // This should have triggered the fetch of page2 + assertThat(resultSet.getExecutionInfo()).isEqualTo(page2.getExecutionInfo()); + assertThat(resultSet.getExecutionInfos()) + .containsExactly(page1.getExecutionInfo(), page2.getExecutionInfo()); + + assertNextRow(iterator, 3); + assertNextRow(iterator, 4); + assertNextRow(iterator, 5); + + assertThat(iterator.hasNext()).isTrue(); + // This should have triggered the fetch of page3 + assertThat(resultSet.getExecutionInfo()).isEqualTo(page3.getExecutionInfo()); + assertThat(resultSet.getExecutionInfos()) + .containsExactly( + page1.getExecutionInfo(), page2.getExecutionInfo(), page3.getExecutionInfo()); + + assertNextRow(iterator, 6); + assertNextRow(iterator, 7); + assertNextRow(iterator, 8); + } + + private static ContinuousAsyncResultSet mockPage(boolean nextPage, Integer... data) { + ContinuousAsyncResultSet page = Mockito.mock(ContinuousAsyncResultSet.class); + + ColumnDefinitions columnDefinitions = Mockito.mock(ColumnDefinitions.class); + Mockito.when(page.getColumnDefinitions()).thenReturn(columnDefinitions); + + ExecutionInfo executionInfo = Mockito.mock(ExecutionInfo.class); + Mockito.when(page.getExecutionInfo()).thenReturn(executionInfo); + + if (nextPage) { + Mockito.when(page.hasMorePages()).thenReturn(true); + Mockito.when(page.fetchNextPage()).thenReturn(Mockito.spy(new CompletableFuture<>())); + } else { + Mockito.when(page.hasMorePages()).thenReturn(false); + Mockito.when(page.fetchNextPage()).thenThrow(new IllegalStateException()); + } + + Iterator rows = Arrays.asList(data).iterator(); + CountingIterator iterator = + new CountingIterator(data.length) { + @Override + protected Row computeNext() { + return rows.hasNext() ? mockRow(rows.next()) : endOfData(); + } + }; + Mockito.when(page.currentPage()).thenReturn(() -> iterator); + Mockito.when(page.remaining()).thenAnswer(invocation -> iterator.remaining()); + + return page; + } + + private static Row mockRow(int index) { + Row row = Mockito.mock(Row.class); + Mockito.when(row.getInt(0)).thenReturn(index); + return row; + } + + private static void complete( + CompletionStage stage, ContinuousAsyncResultSet result) { + stage.toCompletableFuture().complete(result); + } + + private static void assertNextRow(Iterator iterator, int expectedValue) { + assertThat(iterator.hasNext()).isTrue(); + Row row0 = iterator.next(); + assertThat(row0.getInt(0)).isEqualTo(expectedValue); + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/reactive/ContinuousCqlRequestReactiveProcessorTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/reactive/ContinuousCqlRequestReactiveProcessorTest.java new file mode 100644 index 00000000000..30f630b284c --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/reactive/ContinuousCqlRequestReactiveProcessorTest.java @@ -0,0 +1,162 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.cql.continuous.reactive; + +import static com.datastax.dse.driver.api.core.DseProtocolVersion.DSE_V1; +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.dse.driver.DseTestDataProviders; +import com.datastax.dse.driver.DseTestFixtures; +import com.datastax.dse.driver.api.core.DseProtocolVersion; +import com.datastax.dse.driver.api.core.cql.continuous.reactive.ContinuousReactiveResultSet; +import com.datastax.dse.driver.api.core.cql.reactive.ReactiveRow; +import com.datastax.dse.driver.internal.core.cql.continuous.ContinuousCqlRequestAsyncProcessor; +import com.datastax.dse.driver.internal.core.cql.continuous.ContinuousCqlRequestHandlerTestBase; +import com.datastax.oss.driver.api.core.cql.ColumnDefinitions; +import com.datastax.oss.driver.api.core.cql.ExecutionInfo; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.cql.PoolBehavior; +import com.datastax.oss.driver.internal.core.cql.RequestHandlerTestHarness; +import com.datastax.oss.driver.internal.core.session.DefaultSession; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import io.reactivex.Flowable; +import java.util.List; +import org.junit.Test; + +public class ContinuousCqlRequestReactiveProcessorTest extends ContinuousCqlRequestHandlerTestBase { + + @Test + public void should_be_able_to_process_reactive_result_set() { + ContinuousCqlRequestReactiveProcessor processor = + new ContinuousCqlRequestReactiveProcessor(new ContinuousCqlRequestAsyncProcessor()); + assertThat( + processor.canProcess( + UNDEFINED_IDEMPOTENCE_STATEMENT, + ContinuousCqlRequestReactiveProcessor.CONTINUOUS_REACTIVE_RESULT_SET)) + .isTrue(); + } + + @Test + public void should_create_request_handler() { + RequestHandlerTestHarness.Builder builder = + continuousHarnessBuilder().withProtocolVersion(DSE_V1); + try (RequestHandlerTestHarness harness = builder.build()) { + ContinuousCqlRequestReactiveProcessor processor = + new ContinuousCqlRequestReactiveProcessor(new ContinuousCqlRequestAsyncProcessor()); + assertThat( + processor.process( + UNDEFINED_IDEMPOTENCE_STATEMENT, + harness.getSession(), + harness.getContext(), + "test")) + .isInstanceOf(DefaultContinuousReactiveResultSet.class); + } + } + + @Test + @UseDataProvider(value = "allDseProtocolVersions", location = DseTestDataProviders.class) + public void should_complete_single_page_result(DseProtocolVersion version) { + try (RequestHandlerTestHarness harness = + continuousHarnessBuilder() + .withProtocolVersion(version) + .withResponse(node1, defaultFrameOf(DseTestFixtures.singleDseRow())) + .build()) { + + DefaultSession session = harness.getSession(); + InternalDriverContext context = harness.getContext(); + + ContinuousReactiveResultSet publisher = + new ContinuousCqlRequestReactiveProcessor(new ContinuousCqlRequestAsyncProcessor()) + .process(UNDEFINED_IDEMPOTENCE_STATEMENT, session, context, "test"); + + List rows = Flowable.fromPublisher(publisher).toList().blockingGet(); + + assertThat(rows).hasSize(1); + ReactiveRow row = rows.get(0); + assertThat(row.getString("message")).isEqualTo("hello, world"); + ExecutionInfo executionInfo = row.getExecutionInfo(); + assertThat(executionInfo.getCoordinator()).isEqualTo(node1); + assertThat(executionInfo.getErrors()).isEmpty(); + assertThat(executionInfo.getIncomingPayload()).isEmpty(); + assertThat(executionInfo.getPagingState()).isNull(); + assertThat(executionInfo.getSpeculativeExecutionCount()).isEqualTo(0); + assertThat(executionInfo.getSuccessfulExecutionIndex()).isEqualTo(0); + assertThat(executionInfo.getWarnings()).isEmpty(); + + Flowable execInfosFlowable = + Flowable.fromPublisher(publisher.getExecutionInfos()); + assertThat(execInfosFlowable.toList().blockingGet()).containsExactly(executionInfo); + + Flowable colDefsFlowable = + Flowable.fromPublisher(publisher.getColumnDefinitions()); + assertThat(colDefsFlowable.toList().blockingGet()) + .containsExactly(row.getColumnDefinitions()); + + Flowable wasAppliedFlowable = Flowable.fromPublisher(publisher.wasApplied()); + assertThat(wasAppliedFlowable.toList().blockingGet()).containsExactly(row.wasApplied()); + } + } + + @Test + @UseDataProvider(value = "allDseProtocolVersions", location = DseTestDataProviders.class) + public void should_complete_multi_page_result(DseProtocolVersion version) { + RequestHandlerTestHarness.Builder builder = + continuousHarnessBuilder().withProtocolVersion(version); + PoolBehavior node1Behavior = builder.customBehavior(node1); + try (RequestHandlerTestHarness harness = builder.build()) { + + DefaultSession session = harness.getSession(); + InternalDriverContext context = harness.getContext(); + + ContinuousReactiveResultSet publisher = + new ContinuousCqlRequestReactiveProcessor(new ContinuousCqlRequestAsyncProcessor()) + .process(UNDEFINED_IDEMPOTENCE_STATEMENT, session, context, "test"); + + Flowable rowsPublisher = Flowable.fromPublisher(publisher).cache(); + rowsPublisher.subscribe(); + + node1Behavior.setResponseSuccess(defaultFrameOf(DseTestFixtures.tenDseRows(1, false))); + node1Behavior.setResponseSuccess(defaultFrameOf(DseTestFixtures.tenDseRows(2, true))); + + List rows = rowsPublisher.toList().blockingGet(); + assertThat(rows).hasSize(20); + + ReactiveRow first = rows.get(0); + ExecutionInfo firstExecutionInfo = first.getExecutionInfo(); + assertThat(firstExecutionInfo.getCoordinator()).isEqualTo(node1); + assertThat(firstExecutionInfo.getErrors()).isEmpty(); + assertThat(firstExecutionInfo.getIncomingPayload()).isEmpty(); + assertThat(firstExecutionInfo.getPagingState()).isNotNull(); + assertThat(firstExecutionInfo.getSpeculativeExecutionCount()).isEqualTo(0); + assertThat(firstExecutionInfo.getSuccessfulExecutionIndex()).isEqualTo(0); + assertThat(firstExecutionInfo.getWarnings()).isEmpty(); + + ReactiveRow inSecondPage = rows.get(10); + ExecutionInfo secondExecutionInfo = inSecondPage.getExecutionInfo(); + assertThat(secondExecutionInfo.getCoordinator()).isEqualTo(node1); + assertThat(secondExecutionInfo.getErrors()).isEmpty(); + assertThat(secondExecutionInfo.getIncomingPayload()).isEmpty(); + assertThat(secondExecutionInfo.getPagingState()).isNull(); + assertThat(secondExecutionInfo.getSpeculativeExecutionCount()).isEqualTo(0); + assertThat(secondExecutionInfo.getSuccessfulExecutionIndex()).isEqualTo(0); + assertThat(secondExecutionInfo.getWarnings()).isEmpty(); + + Flowable execInfosFlowable = + Flowable.fromPublisher(publisher.getExecutionInfos()); + assertThat(execInfosFlowable.toList().blockingGet()) + .containsExactly(firstExecutionInfo, secondExecutionInfo); + + Flowable colDefsFlowable = + Flowable.fromPublisher(publisher.getColumnDefinitions()); + assertThat(colDefsFlowable.toList().blockingGet()) + .containsExactly(first.getColumnDefinitions()); + + Flowable wasAppliedFlowable = Flowable.fromPublisher(publisher.wasApplied()); + assertThat(wasAppliedFlowable.toList().blockingGet()).containsExactly(first.wasApplied()); + } + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/CqlRequestReactiveProcessorTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/CqlRequestReactiveProcessorTest.java new file mode 100644 index 00000000000..9555e52a2ed --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/CqlRequestReactiveProcessorTest.java @@ -0,0 +1,177 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.cql.reactive; + +import static com.datastax.dse.driver.DseTestFixtures.singleDseRow; +import static com.datastax.dse.driver.api.core.DseProtocolVersion.DSE_V1; +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import com.datastax.dse.driver.DseTestDataProviders; +import com.datastax.dse.driver.DseTestFixtures; +import com.datastax.dse.driver.api.core.cql.reactive.ReactiveResultSet; +import com.datastax.dse.driver.api.core.cql.reactive.ReactiveRow; +import com.datastax.oss.driver.api.core.ProtocolVersion; +import com.datastax.oss.driver.api.core.cql.AsyncResultSet; +import com.datastax.oss.driver.api.core.cql.ColumnDefinitions; +import com.datastax.oss.driver.api.core.cql.ExecutionInfo; +import com.datastax.oss.driver.api.core.cql.Statement; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.cql.Conversions; +import com.datastax.oss.driver.internal.core.cql.CqlRequestAsyncProcessor; +import com.datastax.oss.driver.internal.core.cql.CqlRequestHandlerTestBase; +import com.datastax.oss.driver.internal.core.cql.PoolBehavior; +import com.datastax.oss.driver.internal.core.cql.RequestHandlerTestHarness; +import com.datastax.oss.driver.internal.core.session.DefaultSession; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import io.reactivex.Flowable; +import java.util.List; +import java.util.concurrent.CompletableFuture; +import org.junit.Test; + +public class CqlRequestReactiveProcessorTest extends CqlRequestHandlerTestBase { + + @Test + public void should_be_able_to_process_reactive_result_set() { + CqlRequestReactiveProcessor processor = + new CqlRequestReactiveProcessor(new CqlRequestAsyncProcessor()); + assertThat( + processor.canProcess( + UNDEFINED_IDEMPOTENCE_STATEMENT, CqlRequestReactiveProcessor.REACTIVE_RESULT_SET)) + .isTrue(); + } + + @Test + public void should_create_request_handler() { + RequestHandlerTestHarness.Builder builder = + RequestHandlerTestHarness.builder().withProtocolVersion(DSE_V1); + try (RequestHandlerTestHarness harness = builder.build()) { + CqlRequestReactiveProcessor processor = + new CqlRequestReactiveProcessor(new CqlRequestAsyncProcessor()); + assertThat( + processor.process( + UNDEFINED_IDEMPOTENCE_STATEMENT, + harness.getSession(), + harness.getContext(), + "test")) + .isInstanceOf(DefaultReactiveResultSet.class); + } + } + + @Test + @UseDataProvider(value = "allDseAndOssProtocolVersions", location = DseTestDataProviders.class) + public void should_complete_single_page_result(ProtocolVersion version) { + try (RequestHandlerTestHarness harness = + RequestHandlerTestHarness.builder() + .withProtocolVersion(version) + .withResponse(node1, defaultFrameOf(singleDseRow())) + .build()) { + + DefaultSession session = harness.getSession(); + InternalDriverContext context = harness.getContext(); + + ReactiveResultSet publisher = + new CqlRequestReactiveProcessor(new CqlRequestAsyncProcessor()) + .process(UNDEFINED_IDEMPOTENCE_STATEMENT, session, context, "test"); + + List rows = Flowable.fromPublisher(publisher).toList().blockingGet(); + + assertThat(rows).hasSize(1); + ReactiveRow row = rows.get(0); + assertThat(row.getString("message")).isEqualTo("hello, world"); + ExecutionInfo executionInfo = row.getExecutionInfo(); + assertThat(executionInfo.getCoordinator()).isEqualTo(node1); + assertThat(executionInfo.getErrors()).isEmpty(); + assertThat(executionInfo.getIncomingPayload()).isEmpty(); + assertThat(executionInfo.getPagingState()).isNull(); + assertThat(executionInfo.getSpeculativeExecutionCount()).isEqualTo(0); + assertThat(executionInfo.getSuccessfulExecutionIndex()).isEqualTo(0); + assertThat(executionInfo.getWarnings()).isEmpty(); + + Flowable execInfosFlowable = + Flowable.fromPublisher(publisher.getExecutionInfos()); + assertThat(execInfosFlowable.toList().blockingGet()).containsExactly(executionInfo); + + Flowable colDefsFlowable = + Flowable.fromPublisher(publisher.getColumnDefinitions()); + assertThat(colDefsFlowable.toList().blockingGet()) + .containsExactly(row.getColumnDefinitions()); + + Flowable wasAppliedFlowable = Flowable.fromPublisher(publisher.wasApplied()); + assertThat(wasAppliedFlowable.toList().blockingGet()).containsExactly(row.wasApplied()); + } + } + + @Test + @UseDataProvider(value = "allDseAndOssProtocolVersions", location = DseTestDataProviders.class) + public void should_complete_multi_page_result(ProtocolVersion version) { + RequestHandlerTestHarness.Builder builder = + RequestHandlerTestHarness.builder().withProtocolVersion(version); + PoolBehavior node1Behavior = builder.customBehavior(node1); + try (RequestHandlerTestHarness harness = builder.build()) { + + DefaultSession session = harness.getSession(); + InternalDriverContext context = harness.getContext(); + + // The 2nd page is obtained by an "external" call to session.executeAsync(), + // so we need to mock that. + CompletableFuture page2Future = new CompletableFuture<>(); + when(session.executeAsync(any(Statement.class))).thenAnswer(invocation -> page2Future); + ExecutionInfo mockInfo = mock(ExecutionInfo.class); + + ReactiveResultSet publisher = + new CqlRequestReactiveProcessor(new CqlRequestAsyncProcessor()) + .process(UNDEFINED_IDEMPOTENCE_STATEMENT, session, context, "test"); + + Flowable rowsPublisher = Flowable.fromPublisher(publisher).cache(); + rowsPublisher.subscribe(); + + // emulate arrival of page 1 + node1Behavior.setResponseSuccess(defaultFrameOf(DseTestFixtures.tenDseRows(1, false))); + + // emulate arrival of page 2 following the call to session.executeAsync() + page2Future.complete( + Conversions.toResultSet( + DseTestFixtures.tenDseRows(2, true), + mockInfo, + harness.getSession(), + harness.getContext())); + + List rows = rowsPublisher.toList().blockingGet(); + assertThat(rows).hasSize(20); + + ReactiveRow first = rows.get(0); + ExecutionInfo firstExecutionInfo = first.getExecutionInfo(); + assertThat(firstExecutionInfo.getCoordinator()).isEqualTo(node1); + assertThat(firstExecutionInfo.getErrors()).isEmpty(); + assertThat(firstExecutionInfo.getIncomingPayload()).isEmpty(); + assertThat(firstExecutionInfo.getPagingState()).isNotNull(); + assertThat(firstExecutionInfo.getSpeculativeExecutionCount()).isEqualTo(0); + assertThat(firstExecutionInfo.getSuccessfulExecutionIndex()).isEqualTo(0); + assertThat(firstExecutionInfo.getWarnings()).isEmpty(); + + ReactiveRow inSecondPage = rows.get(10); + ExecutionInfo secondExecutionInfo = inSecondPage.getExecutionInfo(); + assertThat(secondExecutionInfo).isSameAs(mockInfo); + + Flowable execInfosFlowable = + Flowable.fromPublisher(publisher.getExecutionInfos()); + assertThat(execInfosFlowable.toList().blockingGet()) + .containsExactly(firstExecutionInfo, secondExecutionInfo); + + Flowable colDefsFlowable = + Flowable.fromPublisher(publisher.getColumnDefinitions()); + assertThat(colDefsFlowable.toList().blockingGet()) + .containsExactly(first.getColumnDefinitions()); + + Flowable wasAppliedFlowable = Flowable.fromPublisher(publisher.wasApplied()); + assertThat(wasAppliedFlowable.toList().blockingGet()).containsExactly(first.wasApplied()); + } + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/DefaultReactiveResultSetTckTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/DefaultReactiveResultSetTckTest.java new file mode 100644 index 00000000000..fe12243cf0a --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/DefaultReactiveResultSetTckTest.java @@ -0,0 +1,61 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.cql.reactive; + +import com.datastax.dse.driver.api.core.cql.reactive.ReactiveRow; +import com.datastax.oss.driver.api.core.cql.AsyncResultSet; +import io.reactivex.Flowable; +import java.util.Collections; +import java.util.List; +import java.util.concurrent.CompletableFuture; +import org.reactivestreams.Publisher; +import org.reactivestreams.tck.PublisherVerification; +import org.reactivestreams.tck.TestEnvironment; + +public class DefaultReactiveResultSetTckTest extends PublisherVerification { + + public DefaultReactiveResultSetTckTest() { + super(new TestEnvironment()); + } + + @Override + public Publisher createPublisher(long elements) { + // The TCK usually requests between 0 and 20 items, or Long.MAX_VALUE. + // Past 3 elements it never checks how many elements have been effectively produced, + // so we can safely cap at, say, 20. + int effective = (int) Math.min(elements, 20L); + return new DefaultReactiveResultSet(() -> createResults(effective)); + } + + @Override + public Publisher createFailedPublisher() { + DefaultReactiveResultSet publisher = new DefaultReactiveResultSet(() -> createResults(1)); + // Since our publisher does not support multiple + // subscriptions, we use that to create a failed publisher. + publisher.subscribe(new TestSubscriber<>()); + return publisher; + } + + private static CompletableFuture createResults(int elements) { + CompletableFuture previous = null; + if (elements > 0) { + // create pages of 5 elements each to exercise pagination + List pages = + Flowable.range(0, elements).buffer(5).map(List::size).toList().blockingGet(); + Collections.reverse(pages); + for (Integer size : pages) { + CompletableFuture future = new CompletableFuture<>(); + future.complete(new MockAsyncResultSet(size, previous)); + previous = future; + } + } else { + previous = new CompletableFuture<>(); + previous.complete(new MockAsyncResultSet(0, null)); + } + return previous; + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/MockAsyncResultSet.java b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/MockAsyncResultSet.java new file mode 100644 index 00000000000..ea8ad0eb938 --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/MockAsyncResultSet.java @@ -0,0 +1,88 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.cql.reactive; + +import static org.mockito.Mockito.mock; + +import com.datastax.oss.driver.api.core.cql.AsyncResultSet; +import com.datastax.oss.driver.api.core.cql.ColumnDefinitions; +import com.datastax.oss.driver.api.core.cql.ExecutionInfo; +import com.datastax.oss.driver.api.core.cql.Row; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Iterator; +import java.util.List; +import java.util.concurrent.CompletionStage; +import java.util.stream.Collectors; +import java.util.stream.IntStream; +import org.assertj.core.util.Lists; + +public class MockAsyncResultSet implements AsyncResultSet { + + private final List rows; + private final Iterator iterator; + private final CompletionStage nextPage; + private final ExecutionInfo executionInfo = mock(ExecutionInfo.class); + private final ColumnDefinitions columnDefinitions = mock(ColumnDefinitions.class); + private int remaining; + + public MockAsyncResultSet(int size, CompletionStage nextPage) { + this(IntStream.range(0, size).boxed().map(MockRow::new).collect(Collectors.toList()), nextPage); + } + + public MockAsyncResultSet(List rows, CompletionStage nextPage) { + this.rows = rows; + iterator = rows.iterator(); + remaining = rows.size(); + this.nextPage = nextPage; + } + + @Override + public Row one() { + Row next = iterator.next(); + remaining--; + return next; + } + + @Override + public int remaining() { + return remaining; + } + + @NonNull + @Override + public List currentPage() { + return Lists.newArrayList(rows); + } + + @Override + public boolean hasMorePages() { + return nextPage != null; + } + + @NonNull + @Override + public CompletionStage fetchNextPage() throws IllegalStateException { + return nextPage; + } + + @NonNull + @Override + public ColumnDefinitions getColumnDefinitions() { + return columnDefinitions; + } + + @NonNull + @Override + public ExecutionInfo getExecutionInfo() { + return executionInfo; + } + + @Override + public boolean wasApplied() { + return true; + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/MockRow.java b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/MockRow.java new file mode 100644 index 00000000000..00a973d5ba5 --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/MockRow.java @@ -0,0 +1,115 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.cql.reactive; + +import static org.mockito.Mockito.mock; + +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.DefaultProtocolVersion; +import com.datastax.oss.driver.api.core.ProtocolVersion; +import com.datastax.oss.driver.api.core.cql.ColumnDefinitions; +import com.datastax.oss.driver.api.core.cql.Row; +import com.datastax.oss.driver.api.core.detach.AttachmentPoint; +import com.datastax.oss.driver.api.core.type.DataType; +import com.datastax.oss.driver.api.core.type.DataTypes; +import com.datastax.oss.driver.api.core.type.codec.registry.CodecRegistry; +import com.datastax.oss.driver.internal.core.cql.EmptyColumnDefinitions; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.nio.ByteBuffer; + +class MockRow implements Row { + + private int index; + + MockRow(int index) { + this.index = index; + } + + @Override + public int size() { + return 0; + } + + @NonNull + @Override + public CodecRegistry codecRegistry() { + return mock(CodecRegistry.class); + } + + @NonNull + @Override + public ProtocolVersion protocolVersion() { + return DefaultProtocolVersion.V4; + } + + @NonNull + @Override + public ColumnDefinitions getColumnDefinitions() { + return EmptyColumnDefinitions.INSTANCE; + } + + @Override + public int firstIndexOf(@NonNull String name) { + return 0; + } + + @Override + public int firstIndexOf(@NonNull CqlIdentifier id) { + return 0; + } + + @NonNull + @Override + public DataType getType(int i) { + return DataTypes.INT; + } + + @NonNull + @Override + public DataType getType(@NonNull String name) { + return DataTypes.INT; + } + + @NonNull + @Override + public DataType getType(@NonNull CqlIdentifier id) { + return DataTypes.INT; + } + + @Override + public ByteBuffer getBytesUnsafe(int i) { + return null; + } + + @Override + public boolean isDetached() { + return false; + } + + @Override + public void attach(@NonNull AttachmentPoint attachmentPoint) {} + + // equals and hashCode required for TCK tests that check that two subscribers + // receive the exact same set of items. + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (!(o instanceof MockRow)) { + return false; + } + MockRow mockRow = (MockRow) o; + return index == mockRow.index; + } + + @Override + public int hashCode() { + return index; + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/ReactiveResultSetSubscriptionTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/ReactiveResultSetSubscriptionTest.java new file mode 100644 index 00000000000..0f5bed7b581 --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/ReactiveResultSetSubscriptionTest.java @@ -0,0 +1,139 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.cql.reactive; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.dse.driver.api.core.cql.reactive.ReactiveRow; +import com.datastax.oss.driver.api.core.cql.AsyncResultSet; +import com.datastax.oss.driver.api.core.cql.ColumnDefinitions; +import com.datastax.oss.driver.api.core.cql.ExecutionInfo; +import com.datastax.oss.driver.api.core.cql.Row; +import com.datastax.oss.driver.api.core.servererrors.UnavailableException; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.CompletableFuture; +import org.junit.Test; + +public class ReactiveResultSetSubscriptionTest { + + @Test + public void should_retrieve_entire_result_set() { + CompletableFuture future1 = new CompletableFuture<>(); + CompletableFuture future2 = new CompletableFuture<>(); + CompletableFuture future3 = new CompletableFuture<>(); + MockAsyncResultSet page1 = new MockAsyncResultSet(3, future2); + MockAsyncResultSet page2 = new MockAsyncResultSet(3, future3); + MockAsyncResultSet page3 = new MockAsyncResultSet(3, null); + TestSubscriber mainSubscriber = new TestSubscriber<>(); + TestSubscriber colDefsSubscriber = new TestSubscriber<>(); + TestSubscriber execInfosSubscriber = new TestSubscriber<>(); + TestSubscriber wasAppliedSubscriber = new TestSubscriber<>(); + ReactiveResultSetSubscription subscription = + new ReactiveResultSetSubscription<>( + mainSubscriber, colDefsSubscriber, execInfosSubscriber, wasAppliedSubscriber); + mainSubscriber.onSubscribe(subscription); + subscription.start(() -> future1); + future1.complete(page1); + future2.complete(page2); + future3.complete(page3); + mainSubscriber.awaitTermination(); + List expected = new ArrayList<>(page1.currentPage()); + expected.addAll(page2.currentPage()); + expected.addAll(page3.currentPage()); + assertThat(mainSubscriber.getElements()).extracting("row").isEqualTo(expected); + assertThat(colDefsSubscriber.getElements()) + .hasSize(1) + .containsExactly(page1.getColumnDefinitions()); + assertThat(execInfosSubscriber.getElements()) + .hasSize(3) + .containsExactly( + page1.getExecutionInfo(), page2.getExecutionInfo(), page3.getExecutionInfo()); + assertThat(wasAppliedSubscriber.getElements()).hasSize(1).containsExactly(true); + } + + @Test + public void should_report_error_on_first_page() { + CompletableFuture future1 = new CompletableFuture<>(); + TestSubscriber mainSubscriber = new TestSubscriber<>(); + TestSubscriber colDefsSubscriber = new TestSubscriber<>(); + TestSubscriber execInfosSubscriber = new TestSubscriber<>(); + TestSubscriber wasAppliedSubscriber = new TestSubscriber<>(); + ReactiveResultSetSubscription subscription = + new ReactiveResultSetSubscription<>( + mainSubscriber, colDefsSubscriber, execInfosSubscriber, wasAppliedSubscriber); + mainSubscriber.onSubscribe(subscription); + subscription.start(() -> future1); + future1.completeExceptionally(new UnavailableException(null, null, 0, 0)); + mainSubscriber.awaitTermination(); + assertThat(mainSubscriber.getError()).isNotNull().isInstanceOf(UnavailableException.class); + assertThat(colDefsSubscriber.getError()).isNotNull().isInstanceOf(UnavailableException.class); + assertThat(execInfosSubscriber.getError()).isNotNull().isInstanceOf(UnavailableException.class); + assertThat(wasAppliedSubscriber.getError()) + .isNotNull() + .isInstanceOf(UnavailableException.class); + } + + @Test + public void should_report_synchronous_failure_on_first_page() { + TestSubscriber mainSubscriber = new TestSubscriber<>(); + TestSubscriber colDefsSubscriber = new TestSubscriber<>(); + TestSubscriber execInfosSubscriber = new TestSubscriber<>(); + TestSubscriber wasAppliedSubscriber = new TestSubscriber<>(); + ReactiveResultSetSubscription subscription = + new ReactiveResultSetSubscription<>( + mainSubscriber, colDefsSubscriber, execInfosSubscriber, wasAppliedSubscriber); + mainSubscriber.onSubscribe(subscription); + subscription.start( + () -> { + throw new IllegalStateException(); + }); + mainSubscriber.awaitTermination(); + assertThat(mainSubscriber.getError()).isNotNull().isInstanceOf(IllegalStateException.class); + assertThat(colDefsSubscriber.getError()).isNotNull().isInstanceOf(IllegalStateException.class); + assertThat(execInfosSubscriber.getError()) + .isNotNull() + .isInstanceOf(IllegalStateException.class); + assertThat(wasAppliedSubscriber.getError()) + .isNotNull() + .isInstanceOf(IllegalStateException.class); + } + + @Test + public void should_report_error_on_intermediary_page() { + CompletableFuture future1 = new CompletableFuture<>(); + CompletableFuture future2 = new CompletableFuture<>(); + MockAsyncResultSet page1 = new MockAsyncResultSet(3, future2); + TestSubscriber mainSubscriber = new TestSubscriber<>(); + TestSubscriber colDefsSubscriber = new TestSubscriber<>(); + TestSubscriber execInfosSubscriber = new TestSubscriber<>(); + TestSubscriber wasAppliedSubscriber = new TestSubscriber<>(); + ReactiveResultSetSubscription subscription = + new ReactiveResultSetSubscription<>( + mainSubscriber, colDefsSubscriber, execInfosSubscriber, wasAppliedSubscriber); + mainSubscriber.onSubscribe(subscription); + subscription.start(() -> future1); + future1.complete(page1); + future2.completeExceptionally(new UnavailableException(null, null, 0, 0)); + mainSubscriber.awaitTermination(); + assertThat(mainSubscriber.getElements()).extracting("row").isEqualTo(page1.currentPage()); + assertThat(mainSubscriber.getError()).isNotNull().isInstanceOf(UnavailableException.class); + // colDefsSubscriber completed normally when page1 arrived + assertThat(colDefsSubscriber.getError()).isNull(); + assertThat(colDefsSubscriber.getElements()) + .hasSize(1) + .containsExactly(page1.getColumnDefinitions()); + // execInfosSubscriber completed with error, but should have emitted 1 item for page1 + assertThat(execInfosSubscriber.getElements()) + .hasSize(1) + .containsExactly(page1.getExecutionInfo()); + assertThat(execInfosSubscriber.getError()).isNotNull().isInstanceOf(UnavailableException.class); + // colDefsSubscriber completed normally when page1 arrived + assertThat(wasAppliedSubscriber.getElements()).hasSize(1).containsExactly(true); + assertThat(wasAppliedSubscriber.getError()).isNull(); + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/SimpleUnicastProcessorTckTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/SimpleUnicastProcessorTckTest.java new file mode 100644 index 00000000000..b7238812913 --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/SimpleUnicastProcessorTckTest.java @@ -0,0 +1,41 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.cql.reactive; + +import org.reactivestreams.Publisher; +import org.reactivestreams.tck.PublisherVerification; +import org.reactivestreams.tck.TestEnvironment; + +public class SimpleUnicastProcessorTckTest extends PublisherVerification { + + public SimpleUnicastProcessorTckTest() { + super(new TestEnvironment()); + } + + @Override + public Publisher createPublisher(long elements) { + // The TCK usually requests between 0 and 20 items, or Long.MAX_VALUE. + // Past 3 elements it never checks how many elements have been effectively produced, + // so we can safely cap at, say, 20. + int effective = (int) Math.min(elements, 20L); + SimpleUnicastProcessor processor = new SimpleUnicastProcessor<>(); + for (int i = 0; i < effective; i++) { + processor.onNext(i); + } + processor.onComplete(); + return processor; + } + + @Override + public Publisher createFailedPublisher() { + SimpleUnicastProcessor processor = new SimpleUnicastProcessor<>(); + // Since our publisher does not support multiple + // subscriptions, we use that to create a failed publisher. + processor.subscribe(new TestSubscriber<>()); + return processor; + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/SimpleUnicastProcessorTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/SimpleUnicastProcessorTest.java new file mode 100644 index 00000000000..1f44fa5c6cc --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/SimpleUnicastProcessorTest.java @@ -0,0 +1,45 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.cql.reactive; + +import static org.assertj.core.api.Assertions.assertThat; + +import org.junit.Test; + +public class SimpleUnicastProcessorTest { + + /** Test for JAVA-2387. */ + @Test + public void should_propagate_upstream_signals_when_downstream_already_subscribed() { + // given + SimpleUnicastProcessor processor = new SimpleUnicastProcessor<>(); + TestSubscriber subscriber = new TestSubscriber<>(); + // when + processor.subscribe(subscriber); // subscription happens before signals arrive + processor.onNext(1); + processor.onComplete(); + subscriber.awaitTermination(); + // then + assertThat(subscriber.getElements()).hasSize(1).containsExactly(1); + assertThat(subscriber.getError()).isNull(); + } + + @Test + public void should_delay_upstream_signals_until_downstream_is_subscribed() { + // given + SimpleUnicastProcessor processor = new SimpleUnicastProcessor<>(); + TestSubscriber subscriber = new TestSubscriber<>(); + // when + processor.onNext(1); + processor.onComplete(); + processor.subscribe(subscriber); // subscription happens after signals arrive + subscriber.awaitTermination(); + // then + assertThat(subscriber.getElements()).hasSize(1).containsExactly(1); + assertThat(subscriber.getError()).isNull(); + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/TestSubscriber.java b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/TestSubscriber.java new file mode 100644 index 00000000000..eaa2d8a5dbf --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/TestSubscriber.java @@ -0,0 +1,66 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.cql.reactive; + +import static org.assertj.core.api.Fail.fail; + +import com.datastax.oss.driver.shaded.guava.common.util.concurrent.Uninterruptibles; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; +import org.reactivestreams.Subscriber; +import org.reactivestreams.Subscription; + +public class TestSubscriber implements Subscriber { + + private final List elements = new ArrayList<>(); + private final CountDownLatch latch = new CountDownLatch(1); + private Subscription subscription; + private Throwable error; + + @Override + public void onSubscribe(Subscription s) { + if (subscription != null) { + fail("already subscribed"); + } + subscription = s; + s.request(Long.MAX_VALUE); + } + + @Override + public void onNext(T t) { + elements.add(t); + } + + @Override + public void onError(Throwable t) { + error = t; + latch.countDown(); + } + + @Override + public void onComplete() { + latch.countDown(); + } + + @Nullable + public Throwable getError() { + return error; + } + + @NonNull + public List getElements() { + return elements; + } + + public void awaitTermination() { + Uninterruptibles.awaitUninterruptibly(latch, 1, TimeUnit.MINUTES); + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/data/geometry/DefaultLineStringTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/data/geometry/DefaultLineStringTest.java new file mode 100644 index 00000000000..d3137071471 --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/data/geometry/DefaultLineStringTest.java @@ -0,0 +1,201 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.data.geometry; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Fail.fail; + +import com.datastax.dse.driver.api.core.data.geometry.LineString; +import com.datastax.dse.driver.api.core.data.geometry.Point; +import com.esri.core.geometry.ogc.OGCLineString; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import org.junit.Test; + +public class DefaultLineStringTest { + private final LineString lineString = + LineString.fromPoints( + Point.fromCoordinates(30, 10), + Point.fromCoordinates(10, 30), + Point.fromCoordinates(40, 40)); + + private final String wkt = "LINESTRING (30 10, 10 30, 40 40)"; + + private final String json = + "{\"type\":\"LineString\",\"coordinates\":[[30.0,10.0],[10.0,30.0],[40.0,40.0]]}"; + + @Test + public void should_parse_valid_well_known_text() { + assertThat(LineString.fromWellKnownText(wkt)).isEqualTo(lineString); + } + + @Test + public void should_fail_to_parse_invalid_well_known_text() { + assertInvalidWkt("linestring()"); + assertInvalidWkt("linestring(30 10 20, 10 30 20)"); // 3d + assertInvalidWkt("linestring(0 0, 1 1, 0 1, 1 0)"); // crossing itself + assertInvalidWkt("superlinestring(30 10, 10 30, 40 40)"); + } + + @Test + public void should_convert_to_well_known_text() { + assertThat(lineString.toString()).isEqualTo(wkt); + } + + @Test + public void should_convert_to_well_known_binary() { + ByteBuffer actual = lineString.asWellKnownBinary(); + + ByteBuffer expected = ByteBuffer.allocate(1024).order(ByteOrder.nativeOrder()); + expected.position(0); + expected.put((byte) (ByteOrder.nativeOrder() == ByteOrder.LITTLE_ENDIAN ? 1 : 0)); // endianness + expected.putInt(2); // type + expected.putInt(3); // num lineStrings + expected.putDouble(30); // x1 + expected.putDouble(10); // y1 + expected.putDouble(10); // x2 + expected.putDouble(30); // y2 + expected.putDouble(40); // x3 + expected.putDouble(40); // y3 + expected.flip(); + + assertThat(actual).isEqualTo(expected); + } + + @Test + public void should_load_from_well_known_binary() { + ByteBuffer bb = ByteBuffer.allocate(1024).order(ByteOrder.nativeOrder()); + bb.position(0); + bb.put((byte) (ByteOrder.nativeOrder() == ByteOrder.LITTLE_ENDIAN ? 1 : 0)); // endianness + bb.putInt(2); // type + bb.putInt(3); // num lineStrings + bb.putDouble(30); // x1 + bb.putDouble(10); // y1 + bb.putDouble(10); // x2 + bb.putDouble(30); // y2 + bb.putDouble(40); // x3 + bb.putDouble(40); // y3 + bb.flip(); + + assertThat(LineString.fromWellKnownBinary(bb)).isEqualTo(lineString); + } + + @Test + public void should_parse_valid_geo_json() { + assertThat(LineString.fromGeoJson(json)).isEqualTo(lineString); + } + + @Test + public void should_convert_to_geo_json() { + assertThat(lineString.asGeoJson()).isEqualTo(json); + } + + @Test + public void should_convert_to_ogc_line_string() { + assertThat(((DefaultLineString) lineString).getOgcGeometry()).isInstanceOf(OGCLineString.class); + } + + @Test + public void should_produce_same_hashCode_for_equal_objects() { + LineString line1 = + LineString.fromPoints( + Point.fromCoordinates(30, 10), + Point.fromCoordinates(10, 30), + Point.fromCoordinates(40, 40)); + LineString line2 = LineString.fromWellKnownText(wkt); + assertThat(line1).isEqualTo(line2); + assertThat(line1.hashCode()).isEqualTo(line2.hashCode()); + } + + @Test + public void should_expose_points() { + assertThat(lineString.getPoints()) + .containsOnly( + Point.fromCoordinates(30, 10), + Point.fromCoordinates(10, 30), + Point.fromCoordinates(40, 40)); + assertThat(LineString.fromWellKnownText(wkt).getPoints()) + .containsOnly( + Point.fromCoordinates(30, 10), + Point.fromCoordinates(10, 30), + Point.fromCoordinates(40, 40)); + } + + @Test + public void should_encode_and_decode() throws Exception { + assertThat(SerializationUtils.serializeAndDeserialize(lineString)).isEqualTo(lineString); + } + + @Test + public void should_contain_self() { + assertThat(lineString.contains(lineString)).isTrue(); + } + + @Test + public void should_contain_all_intersected_points_except_start_and_end() { + LineString s = + LineString.fromPoints( + Point.fromCoordinates(0, 0), + Point.fromCoordinates(0, 30), + Point.fromCoordinates(30, 30)); + assertThat(s.contains(Point.fromCoordinates(0, 0))).isFalse(); + assertThat(s.contains(Point.fromCoordinates(0, 15))).isTrue(); + assertThat(s.contains(Point.fromCoordinates(0, 30))).isTrue(); + assertThat(s.contains(Point.fromCoordinates(15, 30))).isTrue(); + assertThat(s.contains(Point.fromCoordinates(30, 30))).isFalse(); + } + + @Test + public void should_contain_substring() { + assertThat( + lineString.contains( + LineString.fromPoints( + Point.fromCoordinates(30, 10), Point.fromCoordinates(10, 30)))) + .isTrue(); + } + + @Test + public void should_not_contain_unrelated_string() { + assertThat( + lineString.contains( + LineString.fromPoints( + Point.fromCoordinates(10, 10), Point.fromCoordinates(30, 30)))) + .isFalse(); + } + + @Test + public void should_not_contain_polygon() { + LineString s = + LineString.fromPoints( + Point.fromCoordinates(0, 0), + Point.fromCoordinates(0, 30), + Point.fromCoordinates(30, 30), + Point.fromCoordinates(30, 0)); + LineString p = + LineString.fromPoints( + Point.fromCoordinates(10, 10), + Point.fromCoordinates(10, 20), + Point.fromCoordinates(20, 20), + Point.fromCoordinates(20, 10)); + assertThat(s.contains(p)).isFalse(); + } + + @Test + public void should_accept_empty_shape() throws Exception { + DefaultLineString s = ((DefaultLineString) LineString.fromWellKnownText("LINESTRING EMPTY")); + assertThat(s.getOgcGeometry().isEmpty()).isTrue(); + } + + private void assertInvalidWkt(String s) { + try { + LineString.fromWellKnownText(s); + fail("Should have thrown InvalidTypeException"); + } catch (IllegalArgumentException e) { + // expected + } + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/data/geometry/DefaultPointTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/data/geometry/DefaultPointTest.java new file mode 100644 index 00000000000..558d49173d7 --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/data/geometry/DefaultPointTest.java @@ -0,0 +1,125 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.data.geometry; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Fail.fail; + +import com.datastax.dse.driver.api.core.data.geometry.Point; +import com.esri.core.geometry.ogc.OGCPoint; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import org.junit.Test; + +public class DefaultPointTest { + + private DefaultPoint point = new DefaultPoint(1.1, 2.2); + + private final String wkt = "POINT (1.1 2.2)"; + + private final String json = "{\"type\":\"Point\",\"coordinates\":[1.1,2.2]}"; + + @Test + public void should_parse_valid_well_known_text() { + assertThat(Point.fromWellKnownText(wkt)).isEqualTo(point); + } + + @Test + public void should_fail_to_parse_invalid_well_known_text() { + assertInvalidWkt("superpoint(1.1 2.2 3.3)"); + } + + @Test + public void should_convert_to_well_known_text() { + assertThat(point.toString()).isEqualTo(wkt); + } + + @Test + public void should_convert_to_well_knowm_binary() { + ByteBuffer actual = point.asWellKnownBinary(); + + ByteBuffer expected = ByteBuffer.allocate(1024).order(ByteOrder.nativeOrder()); + expected.position(0); + expected.put((byte) (ByteOrder.nativeOrder() == ByteOrder.LITTLE_ENDIAN ? 1 : 0)); // endianness + expected.putInt(1); // type + expected.putDouble(1.1); // x + expected.putDouble(2.2); // y + expected.flip(); + + assertThat(actual).isEqualTo(expected); + } + + @Test + public void should_load_from_well_known_binary() { + ByteBuffer bb = ByteBuffer.allocate(1024).order(ByteOrder.nativeOrder()); + bb.position(0); + bb.put((byte) (ByteOrder.nativeOrder() == ByteOrder.LITTLE_ENDIAN ? 1 : 0)); // endianness + bb.putInt(1); // type + bb.putDouble(1.1); // x + bb.putDouble(2.2); // y + bb.flip(); + + assertThat(Point.fromWellKnownBinary(bb)).isEqualTo(point); + } + + @Test + public void should_parse_valid_geo_json() { + assertThat(Point.fromGeoJson(json)).isEqualTo(point); + } + + @Test + public void should_convert_to_geo_json() { + assertThat(point.asGeoJson()).isEqualTo(json); + } + + @Test + public void should_convert_to_ogc_point() { + assertThat(point.getOgcGeometry()).isInstanceOf(OGCPoint.class); + } + + @Test + public void should_produce_same_hashCode_for_equal_objects() { + Point point1 = new DefaultPoint(10, 20); + Point point2 = Point.fromWellKnownText("POINT (10 20)"); + assertThat(point1).isEqualTo(point2); + assertThat(point1.hashCode()).isEqualTo(point2.hashCode()); + } + + @Test + public void should_encode_and_decode() throws Exception { + assertThat(SerializationUtils.serializeAndDeserialize(point)).isEqualTo(point); + } + + @Test + public void should_contain_self() { + assertThat(point.contains(point)).isTrue(); + } + + @Test + public void should_not_contain_any_other_shape_than_self() { + DefaultPoint point2 = new DefaultPoint(1, 2); + DefaultPoint point3 = new DefaultPoint(1, 3); + assertThat(point.contains(point2)).isFalse(); + assertThat(point.contains(new DefaultLineString(point, point2))).isFalse(); + assertThat(point.contains(new DefaultPolygon(point, point2, point3))).isFalse(); + } + + @Test + public void should_accept_empty_shape() throws Exception { + DefaultPoint point = ((DefaultPoint) Point.fromWellKnownText("POINT EMPTY")); + assertThat(point.getOgcGeometry().isEmpty()).isTrue(); + } + + private void assertInvalidWkt(String s) { + try { + Point.fromWellKnownText(s); + fail("Should have thrown InvalidTypeException"); + } catch (IllegalArgumentException e) { + // expected + } + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/data/geometry/DefaultPolygonTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/data/geometry/DefaultPolygonTest.java new file mode 100644 index 00000000000..e015ce5cc33 --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/data/geometry/DefaultPolygonTest.java @@ -0,0 +1,331 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.data.geometry; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Fail.fail; + +import com.datastax.dse.driver.api.core.data.geometry.LineString; +import com.datastax.dse.driver.api.core.data.geometry.Point; +import com.datastax.dse.driver.api.core.data.geometry.Polygon; +import com.esri.core.geometry.ogc.OGCPolygon; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import org.junit.Test; + +public class DefaultPolygonTest { + + private Polygon polygon = + Polygon.fromPoints( + Point.fromCoordinates(30, 10), + Point.fromCoordinates(10, 20), + Point.fromCoordinates(20, 40), + Point.fromCoordinates(40, 40)); + + private String wkt = "POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))"; + + private String json = + "{\"type\":\"Polygon\",\"coordinates\":[[[30.0,10.0],[10.0,20.0],[20.0,40.0],[40.0,40.0],[30.0,10.0]]]}"; + + @Test + public void should_parse_valid_well_known_text() { + assertThat(Polygon.fromWellKnownText(wkt)).isEqualTo(polygon); + } + + @Test + public void should_fail_to_parse_invalid_well_known_text() { + assertInvalidWkt("polygon(())"); // malformed + assertInvalidWkt("polygon((30 10 1, 40 40 1, 20 40 1, 10 20 1, 30 10 1))"); // 3d + assertInvalidWkt("polygon((0 0, 1 1, 0 1, 1 0, 0 0))"); // crosses itself + assertInvalidWkt("polygon123((30 10, 40 40, 20 40, 10 20, 30 10))"); // malformed + } + + @Test + public void should_convert_to_well_known_binary() { + ByteBuffer actual = polygon.asWellKnownBinary(); + + ByteBuffer expected = ByteBuffer.allocate(1024).order(ByteOrder.nativeOrder()); + expected.position(0); + expected.put((byte) (ByteOrder.nativeOrder() == ByteOrder.LITTLE_ENDIAN ? 1 : 0)); // endianness + expected.putInt(3); // type + expected.putInt(1); // num rings + expected.putInt(5); // num polygons (ring 1/1) + expected.putDouble(30); // x1 + expected.putDouble(10); // y1 + expected.putDouble(40); // x2 + expected.putDouble(40); // y2 + expected.putDouble(20); // x3 + expected.putDouble(40); // y3 + expected.putDouble(10); // x4 + expected.putDouble(20); // y4 + expected.putDouble(30); // x5 + expected.putDouble(10); // y5 + expected.flip(); + + assertThat(actual).isEqualTo(expected); + } + + @Test + public void should_load_from_well_known_binary() { + ByteBuffer bb = ByteBuffer.allocate(1024).order(ByteOrder.nativeOrder()); + bb.position(0); + bb.put((byte) (ByteOrder.nativeOrder() == ByteOrder.LITTLE_ENDIAN ? 1 : 0)); // endianness + bb.putInt(3); // type + bb.putInt(1); // num rings + bb.putInt(5); // num polygons (ring 1/1) + bb.putDouble(30); // x1 + bb.putDouble(10); // y1 + bb.putDouble(40); // x2 + bb.putDouble(40); // y2 + bb.putDouble(20); // x3 + bb.putDouble(40); // y3 + bb.putDouble(10); // x4 + bb.putDouble(20); // y4 + bb.putDouble(30); // x5 + bb.putDouble(10); // y5 + bb.flip(); + + assertThat(Polygon.fromWellKnownBinary(bb)).isEqualTo(polygon); + } + + @Test + public void should_parse_valid_geo_json() { + assertThat(Polygon.fromGeoJson(json)).isEqualTo(polygon); + } + + @Test + public void should_convert_to_geo_json() { + assertThat(polygon.asGeoJson()).isEqualTo(json); + } + + @Test + public void should_convert_to_ogc_polygon() { + assertThat(((DefaultPolygon) polygon).getOgcGeometry()).isInstanceOf(OGCPolygon.class); + } + + @Test + public void should_produce_same_hashCode_for_equal_objects() { + Polygon polygon1 = + Polygon.fromPoints( + Point.fromCoordinates(30, 10), + Point.fromCoordinates(10, 20), + Point.fromCoordinates(20, 40), + Point.fromCoordinates(40, 40)); + Polygon polygon2 = Polygon.fromWellKnownText(wkt); + assertThat(polygon1).isEqualTo(polygon2); + assertThat(polygon1.hashCode()).isEqualTo(polygon2.hashCode()); + } + + @Test + public void should_build_with_constructor_without_checking_orientation() { + // By default, OGC requires outer rings to be clockwise and inner rings to be counterclockwise. + // We disable that in our constructors. + // This polygon has a single outer ring that is counterclockwise. + Polygon polygon = + Polygon.fromPoints( + Point.fromCoordinates(5, 0), + Point.fromCoordinates(5, 3), + Point.fromCoordinates(0, 3), + Point.fromCoordinates(0, 0)); + assertThat(polygon.asWellKnownText()).isEqualTo("POLYGON ((0 0, 5 0, 5 3, 0 3, 0 0))"); + } + + @Test + public void should_build_complex_polygon_with_builder() { + Polygon polygon = + Polygon.builder() + .addRing( + Point.fromCoordinates(0, 0), + Point.fromCoordinates(0, 3), + Point.fromCoordinates(5, 3), + Point.fromCoordinates(5, 0)) + .addRing( + Point.fromCoordinates(1, 1), + Point.fromCoordinates(1, 2), + Point.fromCoordinates(2, 2), + Point.fromCoordinates(2, 1)) + .addRing( + Point.fromCoordinates(3, 1), + Point.fromCoordinates(3, 2), + Point.fromCoordinates(4, 2), + Point.fromCoordinates(4, 1)) + .build(); + assertThat(polygon.asWellKnownText()) + .isEqualTo( + "POLYGON ((0 0, 5 0, 5 3, 0 3, 0 0), (1 1, 1 2, 2 2, 2 1, 1 1), (3 1, 3 2, 4 2, 4 1, 3 1))"); + } + + @Test + public void should_expose_rings() { + assertThat(polygon.getExteriorRing()) + .containsOnly( + Point.fromCoordinates(30, 10), + Point.fromCoordinates(10, 20), + Point.fromCoordinates(20, 40), + Point.fromCoordinates(40, 40)); + assertThat(polygon.getInteriorRings().isEmpty()).isTrue(); + + Polygon fromWkt = Polygon.fromWellKnownText(wkt); + assertThat(fromWkt.getExteriorRing()) + .containsOnly( + Point.fromCoordinates(30, 10), + Point.fromCoordinates(10, 20), + Point.fromCoordinates(20, 40), + Point.fromCoordinates(40, 40)); + assertThat(fromWkt.getInteriorRings().isEmpty()).isTrue(); + + Polygon complex = + Polygon.builder() + .addRing( + Point.fromCoordinates(0, 0), + Point.fromCoordinates(0, 3), + Point.fromCoordinates(5, 3), + Point.fromCoordinates(5, 0)) + .addRing( + Point.fromCoordinates(1, 1), + Point.fromCoordinates(1, 2), + Point.fromCoordinates(2, 2), + Point.fromCoordinates(2, 1)) + .addRing( + Point.fromCoordinates(3, 1), + Point.fromCoordinates(3, 2), + Point.fromCoordinates(4, 2), + Point.fromCoordinates(4, 1)) + .build(); + assertThat(complex.getExteriorRing()) + .containsOnly( + Point.fromCoordinates(0, 0), + Point.fromCoordinates(0, 3), + Point.fromCoordinates(5, 3), + Point.fromCoordinates(5, 0)); + assertThat(complex.getInteriorRings()).hasSize(2); + assertThat(complex.getInteriorRings().get(0)) + .containsOnly( + Point.fromCoordinates(1, 1), + Point.fromCoordinates(1, 2), + Point.fromCoordinates(2, 2), + Point.fromCoordinates(2, 1)); + assertThat(complex.getInteriorRings().get(1)) + .containsOnly( + Point.fromCoordinates(3, 1), + Point.fromCoordinates(3, 2), + Point.fromCoordinates(4, 2), + Point.fromCoordinates(4, 1)); + + Polygon complexFromWkt = + Polygon.fromWellKnownText( + "POLYGON ((0 0, 5 0, 5 3, 0 3, 0 0), (1 1, 1 2, 2 2, 2 1, 1 1), (3 1, 3 2, 4 2, 4 1, 3 1))"); + assertThat(complexFromWkt.getExteriorRing()) + .containsOnly( + Point.fromCoordinates(0, 0), + Point.fromCoordinates(0, 3), + Point.fromCoordinates(5, 3), + Point.fromCoordinates(5, 0)); + assertThat(complexFromWkt.getInteriorRings()).hasSize(2); + assertThat(complexFromWkt.getInteriorRings().get(0)) + .containsOnly( + Point.fromCoordinates(1, 1), + Point.fromCoordinates(1, 2), + Point.fromCoordinates(2, 2), + Point.fromCoordinates(2, 1)); + assertThat(complexFromWkt.getInteriorRings().get(1)) + .containsOnly( + Point.fromCoordinates(3, 1), + Point.fromCoordinates(3, 2), + Point.fromCoordinates(4, 2), + Point.fromCoordinates(4, 1)); + } + + @Test + public void should_encode_and_decode() throws Exception { + assertThat(SerializationUtils.serializeAndDeserialize(polygon)).isEqualTo(polygon); + } + + @Test + public void should_contain_self() { + assertThat(polygon.contains(polygon)).isTrue(); + } + + @Test + public void should_not_contain_point_or_linestring_on_exterior_ring() { + assertThat(polygon.contains(Point.fromCoordinates(30, 10))).isFalse(); + assertThat(polygon.contains(Point.fromCoordinates(30, 40))).isFalse(); + assertThat( + polygon.contains( + LineString.fromPoints( + Point.fromCoordinates(35, 40), Point.fromCoordinates(25, 40)))) + .isFalse(); + } + + @Test + public void should_contain_interior_shape() { + assertThat(polygon.contains(Point.fromCoordinates(20, 20))).isTrue(); + assertThat( + polygon.contains( + LineString.fromPoints( + Point.fromCoordinates(20, 20), Point.fromCoordinates(30, 20)))) + .isTrue(); + assertThat( + polygon.contains( + Polygon.fromPoints( + Point.fromCoordinates(20, 20), + Point.fromCoordinates(30, 20), + Point.fromCoordinates(20, 30)))) + .isTrue(); + } + + @Test + public void should_not_contain_exterior_shape() { + assertThat(polygon.contains(Point.fromCoordinates(10, 10))).isFalse(); + assertThat( + polygon.contains( + LineString.fromPoints( + Point.fromCoordinates(10, 10), Point.fromCoordinates(20, 20)))) + .isFalse(); + assertThat( + polygon.contains( + Polygon.fromPoints( + Point.fromCoordinates(0, 0), + Point.fromCoordinates(0, 10), + Point.fromCoordinates(10, 10)))) + .isFalse(); + } + + @Test + public void should_not_contain_shapes_in_interior_hole() { + Polygon complex = + Polygon.builder() + .addRing( + Point.fromCoordinates(0, 0), + Point.fromCoordinates(30, 0), + Point.fromCoordinates(30, 30), + Point.fromCoordinates(0, 30)) + .addRing( + Point.fromCoordinates(10, 10), + Point.fromCoordinates(20, 10), + Point.fromCoordinates(20, 20), + Point.fromCoordinates(10, 20)) + .build(); + assertThat(complex.contains(Point.fromCoordinates(15, 15))).isFalse(); + } + + @Test + public void should_accept_empty_shape() throws Exception { + Polygon polygon = Polygon.fromWellKnownText("POLYGON EMPTY"); + assertThat(polygon.getExteriorRing()).isEmpty(); + assertThat(((DefaultPolygon) polygon).getOgcGeometry().isEmpty()).isTrue(); + } + + private void assertInvalidWkt(String s) { + try { + Polygon.fromWellKnownText(s); + fail("Should have thrown InvalidTypeException"); + } catch (IllegalArgumentException e) { + // expected + } + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/data/geometry/DistanceTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/data/geometry/DistanceTest.java new file mode 100644 index 00000000000..1279390b491 --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/data/geometry/DistanceTest.java @@ -0,0 +1,120 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.data.geometry; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.dse.driver.api.core.data.geometry.LineString; +import com.datastax.dse.driver.api.core.data.geometry.Point; +import com.datastax.dse.driver.api.core.data.geometry.Polygon; +import org.junit.Test; + +public class DistanceTest { + + private final Point point = Point.fromCoordinates(1.1, 2.2); + private final Distance distance = new Distance(point, 7.0); + private final String wkt = "DISTANCE((1.1 2.2) 7.0)"; + + @Test + public void should_parse_valid_well_known_text() { + Distance fromWkt = Distance.fromWellKnownText(wkt); + assertThat(fromWkt.getRadius()).isEqualTo(7.0); + assertThat(fromWkt.getCenter()).isEqualTo(point); + assertThat(Distance.fromWellKnownText(wkt)).isEqualTo(distance); + // whitespace doesn't matter between distance and spec. + assertThat(Distance.fromWellKnownText("DISTANCE ((1.1 2.2) 7.0)")).isEqualTo(distance); + // case doesn't matter. + assertThat(Distance.fromWellKnownText("distance((1.1 2.2) 7.0)")).isEqualTo(distance); + } + + @Test(expected = IllegalArgumentException.class) + public void should_fail_to_parse_invalid_well_known_text() { + Distance.fromWellKnownText("dist((1.1 2.2) 3.3)"); + } + + @Test + public void should_convert_to_well_known_text() { + assertThat(distance.asWellKnownText()).isEqualTo(wkt); + } + + @Test + public void should_contain_point() { + assertThat(distance.contains(Point.fromCoordinates(2.0, 3.0))).isTrue(); + } + + @Test + public void should_not_contain_point() { + // y axis falls outside of distance + assertThat(distance.contains(Point.fromCoordinates(2.0, 9.3))).isFalse(); + } + + @Test + public void should_contain_linestring() { + assertThat( + distance.contains( + LineString.fromPoints( + Point.fromCoordinates(2.0, 3.0), + Point.fromCoordinates(3.1, 6.2), + Point.fromCoordinates(-1.0, -2.0)))) + .isTrue(); + } + + @Test + public void should_not_contain_linestring() { + // second point falls outside of distance at y axis. + assertThat( + distance.contains( + LineString.fromPoints( + Point.fromCoordinates(2.0, 3.0), + Point.fromCoordinates(3.1, 9.2), + Point.fromCoordinates(-1.0, -2.0)))) + .isFalse(); + } + + @Test + public void should_contain_polygon() { + Polygon polygon = + Polygon.fromPoints( + Point.fromCoordinates(3, 1), + Point.fromCoordinates(1, 2), + Point.fromCoordinates(2, 4), + Point.fromCoordinates(4, 4)); + assertThat(distance.contains(polygon)).isTrue(); + } + + @Test + public void should_not_contain_polygon() { + Polygon polygon = + Polygon.fromPoints( + Point.fromCoordinates(3, 1), + Point.fromCoordinates(1, 2), + Point.fromCoordinates(2, 4), + Point.fromCoordinates(10, 4)); + // final point falls outside of distance at x axis. + assertThat(distance.contains(polygon)).isFalse(); + } + + @Test(expected = UnsupportedOperationException.class) + public void should_fail_to_convert_to_ogc() { + distance.getOgcGeometry(); + } + + @Test(expected = UnsupportedOperationException.class) + public void should_fail_to_convert_to_wkb() { + distance.asWellKnownBinary(); + } + + @Test(expected = UnsupportedOperationException.class) + public void should_fail_to_convert_to_geo_json() { + distance.asGeoJson(); + } + + @Test + public void should_serialize_and_deserialize() throws Exception { + assertThat(SerializationUtils.serializeAndDeserialize(distance)).isEqualTo(distance); + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/data/geometry/SerializationUtils.java b/core/src/test/java/com/datastax/dse/driver/internal/core/data/geometry/SerializationUtils.java new file mode 100644 index 00000000000..0bedb9e5821 --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/data/geometry/SerializationUtils.java @@ -0,0 +1,36 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.data.geometry; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.dse.driver.api.core.data.geometry.Geometry; +import com.datastax.oss.protocol.internal.util.Bytes; +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.ObjectInputStream; +import java.io.ObjectOutputStream; + +public class SerializationUtils { + + public static Object serializeAndDeserialize(Geometry geometry) + throws IOException, ClassNotFoundException { + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + ObjectOutputStream out = new ObjectOutputStream(baos); + + out.writeObject(geometry); + + byte[] bytes = baos.toByteArray(); + if (!(geometry instanceof Distance)) { + byte[] wkb = Bytes.getArray(geometry.asWellKnownBinary()); + assertThat(bytes).containsSequence(wkb); + } + ObjectInputStream in = new ObjectInputStream(new ByteArrayInputStream(bytes)); + return in.readObject(); + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphNodeTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphNodeTest.java new file mode 100644 index 00000000000..2d34ba5006b --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphNodeTest.java @@ -0,0 +1,377 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph; + +import static com.datastax.dse.driver.internal.core.graph.GraphSONUtils.GRAPHSON_1_0; +import static com.datastax.dse.driver.internal.core.graph.GraphSONUtils.GRAPHSON_2_0; +import static com.datastax.dse.driver.internal.core.graph.GraphSONUtils.GRAPHSON_3_0; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +import com.datastax.dse.driver.api.core.graph.GraphNode; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Set; +import org.apache.tinkerpop.gremlin.process.traversal.step.util.EmptyPath; +import org.apache.tinkerpop.gremlin.structure.util.detached.DetachedEdge; +import org.apache.tinkerpop.gremlin.structure.util.detached.DetachedProperty; +import org.apache.tinkerpop.gremlin.structure.util.detached.DetachedVertex; +import org.apache.tinkerpop.gremlin.structure.util.detached.DetachedVertexProperty; +import org.junit.Test; +import org.junit.runner.RunWith; + +@RunWith(DataProviderRunner.class) +public class GraphNodeTest { + + @Test + public void should_create_graph_node_for_set_for_graphson_3_0() throws IOException { + // given + ImmutableList bytes = + ImmutableList.of( + GraphSONUtils.serializeToByteBuffer(ImmutableSet.of("value"), GRAPHSON_3_0)); + + // when + GraphNode graphNode = GraphSONUtils.createGraphNode(bytes, GRAPHSON_3_0); + + // then + assertThat(graphNode.isSet()).isTrue(); + Set set = graphNode.asSet(); + assertThat(set).isEqualTo(ImmutableSet.of("value")); + } + + @Test + public void should_not_support_set_for_graphson_2_0() throws IOException { + // given + ImmutableList bytes = + ImmutableList.of( + GraphSONUtils.serializeToByteBuffer(ImmutableSet.of("value"), GRAPHSON_2_0)); + + // when + GraphNode graphNode = GraphSONUtils.createGraphNode(bytes, GRAPHSON_2_0); + + // then + assertThat(graphNode.isSet()).isFalse(); + } + + @Test + public void should_throw_for_set_for_graphson_1_0() throws IOException { + // given + ImmutableList bytes = + ImmutableList.of( + GraphSONUtils.serializeToByteBuffer(ImmutableSet.of("value"), GRAPHSON_1_0)); + + // when + GraphNode graphNode = GraphSONUtils.createGraphNode(bytes, GRAPHSON_1_0); + + // then + assertThat(graphNode.isSet()).isFalse(); + assertThatThrownBy(graphNode::asSet).isExactlyInstanceOf(UnsupportedOperationException.class); + } + + @Test + @UseDataProvider(value = "graphsonAllVersions") + public void should_create_graph_node_for_list(String graphVersion) throws IOException { + // given + ImmutableList bytes = + ImmutableList.of( + GraphSONUtils.serializeToByteBuffer(ImmutableList.of("value"), graphVersion)); + + // when + GraphNode graphNode = GraphSONUtils.createGraphNode(bytes, graphVersion); + + // then + assertThat(graphNode.isList()).isTrue(); + List result = graphNode.asList(); + assertThat(result).isEqualTo(ImmutableList.of("value")); + } + + @Test + public void should_create_graph_node_for_map_for_graphson_3_0() throws IOException { + // given + ImmutableList bytes = + ImmutableList.of( + GraphSONUtils.serializeToByteBuffer(ImmutableMap.of(12, 1234), GRAPHSON_3_0)); + + // when + GraphNode graphNode = GraphSONUtils.createGraphNode(bytes, GRAPHSON_3_0); + + // then + assertThat(graphNode.isMap()).isTrue(); + Map result = graphNode.asMap(); + assertThat(result).isEqualTo(ImmutableMap.of(12, 1234)); + } + + @Test + @UseDataProvider("graphsonAllVersions") + public void should_create_graph_node_for_map(String graphsonVersion) throws IOException { + // given + ImmutableList bytes = + ImmutableList.of( + GraphSONUtils.serializeToByteBuffer(ImmutableMap.of("value", 1234), graphsonVersion)); + + // when + GraphNode graphNode = GraphSONUtils.createGraphNode(bytes, graphsonVersion); + + // then + assertThat(graphNode.isMap()).isTrue(); + Map result = graphNode.asMap(); + assertThat(result).isEqualTo(ImmutableMap.of("value", 1234)); + } + + @Test + @UseDataProvider("graphson1_0and2_0") + public void should_create_graph_node_for_map_for_non_string_key(String graphsonVersion) + throws IOException { + // given + ImmutableList bytes = + ImmutableList.of( + GraphSONUtils.serializeToByteBuffer(ImmutableMap.of(12, 1234), graphsonVersion)); + + // when + GraphNode graphNode = GraphSONUtils.createGraphNode(bytes, graphsonVersion); + + // then + assertThat(graphNode.isMap()).isTrue(); + Map result = graphNode.asMap(); + assertThat(result).isEqualTo(ImmutableMap.of("12", 1234)); + } + + @Test + @UseDataProvider(value = "graphsonAllVersions") + public void should_calculate_size_of_collection_types(String graphVersion) throws IOException { + // given + ImmutableList map = + ImmutableList.of( + GraphSONUtils.serializeToByteBuffer(ImmutableMap.of(12, 1234), graphVersion)); + + ImmutableList set = + ImmutableList.of( + GraphSONUtils.serializeToByteBuffer(ImmutableSet.of(12, 1234), graphVersion)); + + ImmutableList list = + ImmutableList.of( + GraphSONUtils.serializeToByteBuffer(ImmutableList.of(12, 1234, 99999), graphVersion)); + + // when + GraphNode mapNode = GraphSONUtils.createGraphNode(map, graphVersion); + GraphNode setNode = GraphSONUtils.createGraphNode(set, graphVersion); + GraphNode listNode = GraphSONUtils.createGraphNode(list, graphVersion); + + // then + assertThat(mapNode.size()).isEqualTo(1); + assertThat(setNode.size()).isEqualTo(2); + assertThat(listNode.size()).isEqualTo(3); + } + + @Test + @UseDataProvider(value = "graphsonAllVersions") + public void should_return_is_value_only_for_scalar_value(String graphVersion) throws IOException { + // given + ImmutableList map = + ImmutableList.of( + GraphSONUtils.serializeToByteBuffer(ImmutableMap.of(12, 1234), graphVersion)); + + ImmutableList set = + ImmutableList.of( + GraphSONUtils.serializeToByteBuffer(ImmutableSet.of(12, 1234), graphVersion)); + + ImmutableList list = + ImmutableList.of( + GraphSONUtils.serializeToByteBuffer(ImmutableList.of(12, 1234, 99999), graphVersion)); + + ImmutableList vertex = + ImmutableList.of( + GraphSONUtils.serializeToByteBuffer(new DetachedVertex("a", "l", null), graphVersion)); + + ImmutableList edge = + ImmutableList.of( + GraphSONUtils.serializeToByteBuffer( + new DetachedEdge("a", "l", Collections.emptyMap(), "v1", "l1", "v2", "l2"), + graphVersion)); + + ImmutableList path = + ImmutableList.of(GraphSONUtils.serializeToByteBuffer(EmptyPath.instance(), graphVersion)); + + ImmutableList property = + ImmutableList.of( + GraphSONUtils.serializeToByteBuffer(new DetachedProperty<>("a", 1), graphVersion)); + + ImmutableList vertexProperty = + ImmutableList.of( + GraphSONUtils.serializeToByteBuffer( + new DetachedVertexProperty<>( + "id", "l", "v", null, new DetachedVertex("a", "l", null)), + graphVersion)); + + ImmutableList scalarValue = + ImmutableList.of(GraphSONUtils.serializeToByteBuffer(true, graphVersion)); + + // when + GraphNode mapNode = GraphSONUtils.createGraphNode(map, graphVersion); + GraphNode setNode = GraphSONUtils.createGraphNode(set, graphVersion); + GraphNode listNode = GraphSONUtils.createGraphNode(list, graphVersion); + GraphNode vertexNode = GraphSONUtils.createGraphNode(vertex, graphVersion); + GraphNode edgeNode = GraphSONUtils.createGraphNode(edge, graphVersion); + GraphNode pathNode = GraphSONUtils.createGraphNode(path, graphVersion); + GraphNode propertyNode = GraphSONUtils.createGraphNode(property, graphVersion); + GraphNode vertexPropertyNode = GraphSONUtils.createGraphNode(vertexProperty, graphVersion); + GraphNode scalarValueNode = GraphSONUtils.createGraphNode(scalarValue, graphVersion); + + // then + assertThat(mapNode.isValue()).isFalse(); + assertThat(setNode.isValue()).isFalse(); + assertThat(listNode.isValue()).isFalse(); + assertThat(vertexNode.isValue()).isFalse(); + assertThat(edgeNode.isValue()).isFalse(); + assertThat(pathNode.isValue()).isFalse(); + assertThat(propertyNode.isValue()).isFalse(); + assertThat(vertexPropertyNode.isValue()).isFalse(); + assertThat(scalarValueNode.isValue()).isTrue(); + } + + @Test + @UseDataProvider("graphson2_0and3_0") + public void should_check_if_node_is_property_not_map(String graphVersion) throws IOException { + // given + ImmutableList property = + ImmutableList.of( + GraphSONUtils.serializeToByteBuffer(new DetachedProperty<>("a", 1), graphVersion)); + + // when + GraphNode propertyNode = GraphSONUtils.createGraphNode(property, graphVersion); + + // then + assertThat(propertyNode.isProperty()).isTrue(); + assertThat(propertyNode.isMap()).isFalse(); + assertThat(propertyNode.asProperty()).isNotNull(); + } + + @Test + public void should_check_if_node_is_property_or_map_for_1_0() throws IOException { + // given + ImmutableList property = + ImmutableList.of( + GraphSONUtils.serializeToByteBuffer(new DetachedProperty<>("a", 1), GRAPHSON_1_0)); + + // when + GraphNode propertyNode = GraphSONUtils.createGraphNode(property, GRAPHSON_1_0); + + // then + assertThat(propertyNode.isProperty()).isTrue(); + assertThat(propertyNode.isMap()).isTrue(); + assertThat(propertyNode.asProperty()).isNotNull(); + } + + @Test + @UseDataProvider("graphsonAllVersions") + public void should_check_if_node_is_vertex_property(String graphVersion) throws IOException { + // given + ImmutableList vertexProperty = + ImmutableList.of( + GraphSONUtils.serializeToByteBuffer( + new DetachedVertexProperty<>( + "id", "l", "v", null, new DetachedVertex("a", "l", null)), + graphVersion)); + + // when + GraphNode vertexPropertyNode = GraphSONUtils.createGraphNode(vertexProperty, graphVersion); + + // then + assertThat(vertexPropertyNode.isVertexProperty()).isTrue(); + assertThat(vertexPropertyNode.isVertexProperty()).isNotNull(); + } + + @Test + public void should_check_if_node_is_path_for_graphson_1_0() throws IOException { + // given + ImmutableList path = + ImmutableList.of(GraphSONUtils.serializeToByteBuffer(EmptyPath.instance(), GRAPHSON_1_0)); + + // when + GraphNode vertexPropertyNode = GraphSONUtils.createGraphNode(path, GRAPHSON_1_0); + + // then + assertThat(vertexPropertyNode.isPath()).isFalse(); + assertThatThrownBy(vertexPropertyNode::asPath) + .isExactlyInstanceOf(UnsupportedOperationException.class); + } + + @Test + @UseDataProvider("graphson2_0and3_0") + public void should_check_if_node_is_path(String graphsonVersion) throws IOException { + // given + ImmutableList path = + ImmutableList.of( + GraphSONUtils.serializeToByteBuffer(EmptyPath.instance(), graphsonVersion)); + + // when + GraphNode vertexPropertyNode = GraphSONUtils.createGraphNode(path, graphsonVersion); + + // then + assertThat(vertexPropertyNode.isPath()).isTrue(); + assertThat(vertexPropertyNode.asPath()).isNotNull(); + } + + @Test + @UseDataProvider("graphsonAllVersions") + public void should_check_if_node_is_vertex(String graphsonVersion) throws IOException { + // given + ImmutableList vertex = + ImmutableList.of( + GraphSONUtils.serializeToByteBuffer( + new DetachedVertex("a", "l", null), graphsonVersion)); + + // when + GraphNode vertexNode = GraphSONUtils.createGraphNode(vertex, graphsonVersion); + + // then + assertThat(vertexNode.isVertex()).isTrue(); + assertThat(vertexNode.asVertex()).isNotNull(); + } + + @Test + @UseDataProvider("graphsonAllVersions") + public void should_check_if_node_is_edge(String graphsonVersion) throws IOException { + // given + ImmutableList edge = + ImmutableList.of( + GraphSONUtils.serializeToByteBuffer( + new DetachedEdge("a", "l", Collections.emptyMap(), "v1", "l1", "v2", "l2"), + graphsonVersion)); + + // when + GraphNode edgeNode = GraphSONUtils.createGraphNode(edge, graphsonVersion); + + // then + assertThat(edgeNode.isEdge()).isTrue(); + assertThat(edgeNode.asEdge()).isNotNull(); + } + + @DataProvider + public static Object[][] graphsonAllVersions() { + return new Object[][] {{GRAPHSON_1_0}, {GRAPHSON_2_0}, {GRAPHSON_3_0}}; + } + + @DataProvider + public static Object[][] graphson1_0and2_0() { + return new Object[][] {{GRAPHSON_1_0}, {GRAPHSON_2_0}}; + } + + @DataProvider + public static Object[][] graphson2_0and3_0() { + return new Object[][] {{GRAPHSON_2_0}, {GRAPHSON_3_0}}; + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java new file mode 100644 index 00000000000..b658903dd79 --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java @@ -0,0 +1,414 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph; + +import static com.datastax.oss.driver.Assertions.assertThat; +import static com.datastax.oss.driver.api.core.type.codec.TypeCodecs.BIGINT; +import static com.datastax.oss.driver.api.core.type.codec.TypeCodecs.TEXT; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyLong; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.ArgumentMatchers.matches; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; +import static org.mockito.Mockito.when; + +import com.datastax.dse.driver.api.core.DseProtocolVersion; +import com.datastax.dse.driver.api.core.config.DseDriverOption; +import com.datastax.dse.driver.api.core.graph.BatchGraphStatement; +import com.datastax.dse.driver.api.core.graph.DseGraph; +import com.datastax.dse.driver.api.core.graph.FluentGraphStatement; +import com.datastax.dse.driver.api.core.graph.GraphNode; +import com.datastax.dse.driver.api.core.graph.GraphResultSet; +import com.datastax.dse.driver.api.core.graph.GraphStatement; +import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; +import com.datastax.dse.protocol.internal.request.RawBytesQuery; +import com.datastax.dse.protocol.internal.request.query.DseQueryOptions; +import com.datastax.oss.driver.api.core.DefaultConsistencyLevel; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.tracker.RequestTracker; +import com.datastax.oss.driver.internal.core.cql.RequestHandlerTestHarness; +import com.datastax.oss.driver.internal.core.metadata.DefaultNode; +import com.datastax.oss.driver.internal.core.metrics.NodeMetricUpdater; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import com.datastax.oss.protocol.internal.Frame; +import com.datastax.oss.protocol.internal.Message; +import com.datastax.oss.protocol.internal.ProtocolConstants; +import com.datastax.oss.protocol.internal.request.Query; +import com.datastax.oss.protocol.internal.response.result.ColumnSpec; +import com.datastax.oss.protocol.internal.response.result.DefaultRows; +import com.datastax.oss.protocol.internal.response.result.RawType; +import com.datastax.oss.protocol.internal.response.result.RowsMetadata; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.time.Duration; +import java.util.ArrayDeque; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Queue; +import java.util.concurrent.ExecutionException; +import java.util.regex.Pattern; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.apache.tinkerpop.gremlin.structure.util.detached.DetachedVertex; +import org.apache.tinkerpop.gremlin.structure.util.detached.DetachedVertexProperty; +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.MockitoAnnotations; + +// TODO subProtocol is hard-coded to graphson-2.0 everywhere, we could parameterize the tests +public class GraphRequestHandlerTest { + + private static final Pattern LOG_PREFIX_PER_REQUEST = Pattern.compile("test-graph\\|\\d*\\|\\d*"); + + @Mock DefaultNode node; + + @Mock protected NodeMetricUpdater nodeMetricUpdater1; + + @Before + public void setup() { + MockitoAnnotations.initMocks(this); + Mockito.when(node.getMetricUpdater()).thenReturn(nodeMetricUpdater1); + } + + @Test + public void should_create_query_message_from_script_statement() { + // initialization + RequestHandlerTestHarness harness = GraphRequestHandlerTestHarness.builder().build(); + GraphStatement graphStatement = ScriptGraphStatement.newInstance("mockQuery"); + String subProtocol = "graphson-2.0"; + + // when + Message m = + GraphConversions.createMessageFromGraphStatement( + graphStatement, + subProtocol, + GraphConversions.resolveExecutionProfile(graphStatement, harness.getContext()), + harness.getContext()); + + // checks + assertThat(m).isInstanceOf(Query.class); + assertThat(((Query) m).query).isEqualTo("mockQuery"); + } + + @Test + public void should_create_query_message_from_fluent_statement() throws IOException { + // initialization + RequestHandlerTestHarness harness = GraphRequestHandlerTestHarness.builder().build(); + GraphTraversal traversalTest = DseGraph.g.V().has("name", "marko"); + GraphStatement graphStatement = FluentGraphStatement.newInstance(traversalTest); + String subProtocol = "graphson-2.0"; + + // when + Message m = + GraphConversions.createMessageFromGraphStatement( + graphStatement, + subProtocol, + GraphConversions.resolveExecutionProfile(graphStatement, harness.getContext()), + harness.getContext()); + + // checks + assertThat(m).isInstanceOf(RawBytesQuery.class); + assertThat(((RawBytesQuery) m).query) + .isEqualTo(GraphSONUtils.serializeToBytes(traversalTest, subProtocol)); + } + + @Test + public void should_create_query_message_from_batch_statement() throws IOException { + // initialization + RequestHandlerTestHarness harness = GraphRequestHandlerTestHarness.builder().build(); + List traversalsTest = + ImmutableList.of( + DseGraph.g.addV("person").property("key1", "value1"), + DseGraph.g.addV("software").property("key2", "value2")); + GraphStatement graphStatement = + BatchGraphStatement.builder().addTraversals(traversalsTest).build(); + String subProtocol = "graphson-2.0"; + + // when + Message m = + GraphConversions.createMessageFromGraphStatement( + graphStatement, + subProtocol, + GraphConversions.resolveExecutionProfile(graphStatement, harness.getContext()), + harness.getContext()); + + // checks + assertThat(m).isInstanceOf(RawBytesQuery.class); + assertThat(((RawBytesQuery) m).query) + .isEqualTo(GraphSONUtils.serializeToBytes(traversalsTest, subProtocol)); + } + + @Test + public void should_set_correct_query_options_from_graph_statement() throws IOException { + // initialization + RequestHandlerTestHarness harness = GraphRequestHandlerTestHarness.builder().build(); + GraphStatement graphStatement = + ScriptGraphStatement.newInstance("mockQuery").setQueryParam("name", "value"); + String subProtocol = "graphson-2.0"; + + // when + DriverExecutionProfile executionProfile = + GraphConversions.resolveExecutionProfile(graphStatement, harness.getContext()); + Message m = + GraphConversions.createMessageFromGraphStatement( + graphStatement, subProtocol, executionProfile, harness.getContext()); + + // checks + Query query = ((Query) m); + DseQueryOptions options = ((DseQueryOptions) query.options); + assertThat(options.consistency) + .isEqualTo( + DefaultConsistencyLevel.valueOf( + executionProfile.getString(DefaultDriverOption.REQUEST_CONSISTENCY)) + .getProtocolCode()); + // set by the mock timestamp generator + assertThat(options.defaultTimestamp).isEqualTo(-9223372036854775808L); + assertThat(options.positionalValues) + .isEqualTo( + ImmutableList.of( + GraphSONUtils.serializeToByteBuffer( + ImmutableMap.of("name", "value"), subProtocol))); + + m = + GraphConversions.createMessageFromGraphStatement( + graphStatement.setTimestamp(2L), + subProtocol, + GraphConversions.resolveExecutionProfile(graphStatement, harness.getContext()), + harness.getContext()); + query = ((Query) m); + options = ((DseQueryOptions) query.options); + assertThat(options.defaultTimestamp).isEqualTo(2L); + } + + @Test + public void should_create_payload_from_config_options() { + // initialization + RequestHandlerTestHarness harness = GraphRequestHandlerTestHarness.builder().build(); + GraphStatement graphStatement = + ScriptGraphStatement.newInstance("mockQuery").setExecutionProfileName("test-graph"); + String subProtocol = "graphson-2.0"; + + // when + DriverExecutionProfile executionProfile = + GraphConversions.resolveExecutionProfile(graphStatement, harness.getContext()); + + Map requestPayload = + GraphConversions.createCustomPayload( + graphStatement, subProtocol, executionProfile, harness.getContext()); + + // checks + Mockito.verify(executionProfile).getString(DseDriverOption.GRAPH_TRAVERSAL_SOURCE, null); + Mockito.verify(executionProfile).getString(DseDriverOption.GRAPH_NAME, null); + Mockito.verify(executionProfile).getBoolean(DseDriverOption.GRAPH_IS_SYSTEM_QUERY, false); + Mockito.verify(executionProfile).getDuration(DseDriverOption.GRAPH_TIMEOUT, null); + Mockito.verify(executionProfile).getString(DseDriverOption.GRAPH_READ_CONSISTENCY_LEVEL, null); + Mockito.verify(executionProfile).getString(DseDriverOption.GRAPH_WRITE_CONSISTENCY_LEVEL, null); + + assertThat(requestPayload.get(GraphConversions.GRAPH_SOURCE_OPTION_KEY)) + .isEqualTo(TEXT.encode("a", harness.getContext().getProtocolVersion())); + assertThat(requestPayload.get(GraphConversions.GRAPH_RESULTS_OPTION_KEY)) + .isEqualTo(TEXT.encode(subProtocol, harness.getContext().getProtocolVersion())); + assertThat(requestPayload.get(GraphConversions.GRAPH_NAME_OPTION_KEY)) + .isEqualTo(TEXT.encode("mockGraph", harness.getContext().getProtocolVersion())); + assertThat(requestPayload.get(GraphConversions.GRAPH_LANG_OPTION_KEY)) + .isEqualTo(TEXT.encode("gremlin-groovy", harness.getContext().getProtocolVersion())); + assertThat(requestPayload.get(GraphConversions.GRAPH_TIMEOUT_OPTION_KEY)) + .isEqualTo(BIGINT.encode(2L, harness.getContext().getProtocolVersion())); + assertThat(requestPayload.get(GraphConversions.GRAPH_READ_CONSISTENCY_LEVEL_OPTION_KEY)) + .isEqualTo(TEXT.encode("LOCAL_TWO", harness.getContext().getProtocolVersion())); + assertThat(requestPayload.get(GraphConversions.GRAPH_WRITE_CONSISTENCY_LEVEL_OPTION_KEY)) + .isEqualTo(TEXT.encode("LOCAL_THREE", harness.getContext().getProtocolVersion())); + } + + @Test + public void should_create_payload_from_statement_options() { + // initialization + RequestHandlerTestHarness harness = GraphRequestHandlerTestHarness.builder().build(); + GraphStatement graphStatement = + ScriptGraphStatement.builder("mockQuery") + .setGraphName("mockGraph") + .setTraversalSource("a") + .setTimeout(Duration.ofMillis(2)) + .setReadConsistencyLevel(DefaultConsistencyLevel.TWO) + .setWriteConsistencyLevel(DefaultConsistencyLevel.THREE) + .setSystemQuery(false) + .build(); + String subProtocol = "graphson-2.0"; + + // when + DriverExecutionProfile executionProfile = + GraphConversions.resolveExecutionProfile(graphStatement, harness.getContext()); + + Map requestPayload = + GraphConversions.createCustomPayload( + graphStatement, subProtocol, executionProfile, harness.getContext()); + + // checks + Mockito.verify(executionProfile, never()) + .getString(DseDriverOption.GRAPH_TRAVERSAL_SOURCE, null); + Mockito.verify(executionProfile, never()).getString(DseDriverOption.GRAPH_NAME, null); + Mockito.verify(executionProfile, never()) + .getBoolean(DseDriverOption.GRAPH_IS_SYSTEM_QUERY, false); + Mockito.verify(executionProfile, never()).getDuration(DseDriverOption.GRAPH_TIMEOUT, null); + Mockito.verify(executionProfile, never()) + .getString(DseDriverOption.GRAPH_READ_CONSISTENCY_LEVEL, null); + Mockito.verify(executionProfile, never()) + .getString(DseDriverOption.GRAPH_WRITE_CONSISTENCY_LEVEL, null); + + assertThat(requestPayload.get(GraphConversions.GRAPH_SOURCE_OPTION_KEY)) + .isEqualTo(TEXT.encode("a", harness.getContext().getProtocolVersion())); + assertThat(requestPayload.get(GraphConversions.GRAPH_RESULTS_OPTION_KEY)) + .isEqualTo(TEXT.encode(subProtocol, harness.getContext().getProtocolVersion())); + assertThat(requestPayload.get(GraphConversions.GRAPH_NAME_OPTION_KEY)) + .isEqualTo(TEXT.encode("mockGraph", harness.getContext().getProtocolVersion())); + assertThat(requestPayload.get(GraphConversions.GRAPH_LANG_OPTION_KEY)) + .isEqualTo(TEXT.encode("gremlin-groovy", harness.getContext().getProtocolVersion())); + assertThat(requestPayload.get(GraphConversions.GRAPH_TIMEOUT_OPTION_KEY)) + .isEqualTo(BIGINT.encode(2L, harness.getContext().getProtocolVersion())); + assertThat(requestPayload.get(GraphConversions.GRAPH_READ_CONSISTENCY_LEVEL_OPTION_KEY)) + .isEqualTo(TEXT.encode("TWO", harness.getContext().getProtocolVersion())); + assertThat(requestPayload.get(GraphConversions.GRAPH_WRITE_CONSISTENCY_LEVEL_OPTION_KEY)) + .isEqualTo(TEXT.encode("THREE", harness.getContext().getProtocolVersion())); + } + + @Test + public void should_not_set_graph_name_on_system_queries() { + // initialization + RequestHandlerTestHarness harness = GraphRequestHandlerTestHarness.builder().build(); + GraphStatement graphStatement = + ScriptGraphStatement.newInstance("mockQuery").setSystemQuery(true); + String subProtocol = "graphson-2.0"; + + // when + DriverExecutionProfile executionProfile = + GraphConversions.resolveExecutionProfile(graphStatement, harness.getContext()); + + Map requestPayload = + GraphConversions.createCustomPayload( + graphStatement, subProtocol, executionProfile, harness.getContext()); + + // checks + assertThat(requestPayload.get(GraphConversions.GRAPH_NAME_OPTION_KEY)).isNull(); + assertThat(requestPayload.get(GraphConversions.GRAPH_SOURCE_OPTION_KEY)).isNull(); + } + + @Test + public void should_return_results_for_statements() + throws IOException, ExecutionException, InterruptedException { + RequestHandlerTestHarness harness = + GraphRequestHandlerTestHarness.builder() + .withResponse(node, defaultDseFrameOf(singleGraphRow())) + .build(); + + GraphStatement graphStatement = ScriptGraphStatement.newInstance("mockQuery"); + GraphResultSet grs = + new GraphRequestSyncProcessor(new GraphRequestAsyncProcessor()) + .process(graphStatement, harness.getSession(), harness.getContext(), "test-graph"); + + List nodes = grs.all(); + assertThat(nodes.size()).isEqualTo(1); + + GraphNode node = nodes.get(0); + assertThat(node.isVertex()).isTrue(); + + Vertex v = node.asVertex(); + assertThat(v.label()).isEqualTo("person"); + assertThat(v.id()).isEqualTo(1); + assertThat(v.property("name").id()).isEqualTo(11); + assertThat(v.property("name").value()).isEqualTo("marko"); + } + + @Test + public void should_invoke_request_tracker() + throws IOException, ExecutionException, InterruptedException { + RequestHandlerTestHarness harness = + GraphRequestHandlerTestHarness.builder() + .withResponse(node, defaultDseFrameOf(singleGraphRow())) + .build(); + + RequestTracker requestTracker = mock(RequestTracker.class); + when(harness.getContext().getRequestTracker()).thenReturn(requestTracker); + + GraphStatement graphStatement = ScriptGraphStatement.newInstance("mockQuery"); + GraphResultSet grs = + new GraphRequestSyncProcessor(new GraphRequestAsyncProcessor()) + .process(graphStatement, harness.getSession(), harness.getContext(), "test-graph"); + + List nodes = grs.all(); + assertThat(nodes.size()).isEqualTo(1); + + GraphNode graphNode = nodes.get(0); + assertThat(graphNode.isVertex()).isTrue(); + + Vertex v = graphNode.asVertex(); + assertThat(v.label()).isEqualTo("person"); + assertThat(v.id()).isEqualTo(1); + assertThat(v.property("name").id()).isEqualTo(11); + assertThat(v.property("name").value()).isEqualTo("marko"); + + verify(requestTracker) + .onSuccess( + eq(graphStatement), + anyLong(), + any(DriverExecutionProfile.class), + eq(node), + matches(LOG_PREFIX_PER_REQUEST)); + verifyNoMoreInteractions(requestTracker); + } + + private static Frame defaultDseFrameOf(Message responseMessage) { + return Frame.forResponse( + DseProtocolVersion.DSE_V2.getCode(), + 0, + null, + Frame.NO_PAYLOAD, + Collections.emptyList(), + responseMessage); + } + + // Returns a single row, with a single "message" column with the value "hello, world" + private static Message singleGraphRow() throws IOException { + RowsMetadata metadata = + new RowsMetadata( + ImmutableList.of( + new ColumnSpec( + "ks", + "table", + "gremlin", + 0, + RawType.PRIMITIVES.get(ProtocolConstants.DataType.VARCHAR))), + null, + new int[] {}, + null); + Queue> data = new ArrayDeque<>(); + data.add( + ImmutableList.of( + GraphSONUtils.serializeToByteBuffer( + ImmutableMap.of( + "result", + DetachedVertex.build() + .setId(1) + .setLabel("person") + .addProperty( + DetachedVertexProperty.build() + .setId(11) + .setLabel("name") + .setValue("marko") + .create()) + .create()), + "graphson-2.0"))); + return new DefaultRows(metadata, data); + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTestHarness.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTestHarness.java new file mode 100644 index 00000000000..b423041588c --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTestHarness.java @@ -0,0 +1,112 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph; + +import com.datastax.dse.driver.api.core.config.DseDriverOption; +import com.datastax.oss.driver.api.core.DefaultConsistencyLevel; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.internal.core.cql.RequestHandlerTestHarness; +import java.time.Duration; +import org.mockito.Mock; +import org.mockito.Mockito; + +/** + * Provides the environment to test a request handler, where a query plan can be defined, and the + * behavior of each successive node simulated. + */ +public class GraphRequestHandlerTestHarness extends RequestHandlerTestHarness { + + @Mock DriverExecutionProfile testProfile; + + @Mock DriverExecutionProfile systemQueryExecutionProfile; + + protected GraphRequestHandlerTestHarness(Builder builder) { + super(builder); + + // default graph options as in the reference.conf file + Mockito.when(defaultProfile.getString(DseDriverOption.GRAPH_TRAVERSAL_SOURCE, null)) + .thenReturn("g"); + Mockito.when(defaultProfile.getString(DseDriverOption.GRAPH_SUB_PROTOCOL, "graphson-2.0")) + .thenReturn("graphson-2.0"); + Mockito.when(defaultProfile.getBoolean(DseDriverOption.GRAPH_IS_SYSTEM_QUERY, false)) + .thenReturn(false); + Mockito.when(defaultProfile.getString(DseDriverOption.GRAPH_NAME, null)) + .thenReturn("mockGraph"); + + Mockito.when(testProfile.getName()).thenReturn("default"); + Mockito.when(testProfile.getDuration(DseDriverOption.GRAPH_TIMEOUT, null)) + .thenReturn(Duration.ofMillis(500L)); + Mockito.when(testProfile.getString(DefaultDriverOption.REQUEST_CONSISTENCY)) + .thenReturn(DefaultConsistencyLevel.LOCAL_ONE.name()); + Mockito.when(testProfile.getInt(DefaultDriverOption.REQUEST_PAGE_SIZE)).thenReturn(5000); + Mockito.when(testProfile.getString(DefaultDriverOption.REQUEST_SERIAL_CONSISTENCY)) + .thenReturn(DefaultConsistencyLevel.SERIAL.name()); + Mockito.when(testProfile.getBoolean(DefaultDriverOption.REQUEST_DEFAULT_IDEMPOTENCE)) + .thenReturn(false); + Mockito.when(testProfile.getBoolean(DefaultDriverOption.PREPARE_ON_ALL_NODES)).thenReturn(true); + Mockito.when(testProfile.getString(DseDriverOption.GRAPH_TRAVERSAL_SOURCE, null)) + .thenReturn("a"); + Mockito.when(testProfile.getString(DseDriverOption.GRAPH_SUB_PROTOCOL, "graphson-2.0")) + .thenReturn("testMock"); + Mockito.when(testProfile.getDuration(DseDriverOption.GRAPH_TIMEOUT, null)) + .thenReturn(Duration.ofMillis(2)); + Mockito.when(testProfile.getBoolean(DseDriverOption.GRAPH_IS_SYSTEM_QUERY, false)) + .thenReturn(false); + Mockito.when(testProfile.getString(DseDriverOption.GRAPH_NAME, null)).thenReturn("mockGraph"); + Mockito.when(testProfile.getString(DseDriverOption.GRAPH_READ_CONSISTENCY_LEVEL, null)) + .thenReturn("LOCAL_TWO"); + Mockito.when(testProfile.getString(DseDriverOption.GRAPH_WRITE_CONSISTENCY_LEVEL, null)) + .thenReturn("LOCAL_THREE"); + + Mockito.when(config.getProfile("test-graph")).thenReturn(testProfile); + + Mockito.when(systemQueryExecutionProfile.getName()).thenReturn("default"); + Mockito.when(systemQueryExecutionProfile.getDuration(DseDriverOption.GRAPH_TIMEOUT, null)) + .thenReturn(Duration.ofMillis(500L)); + Mockito.when(systemQueryExecutionProfile.getString(DefaultDriverOption.REQUEST_CONSISTENCY)) + .thenReturn(DefaultConsistencyLevel.LOCAL_ONE.name()); + Mockito.when(systemQueryExecutionProfile.getInt(DefaultDriverOption.REQUEST_PAGE_SIZE)) + .thenReturn(5000); + Mockito.when( + systemQueryExecutionProfile.getString(DefaultDriverOption.REQUEST_SERIAL_CONSISTENCY)) + .thenReturn(DefaultConsistencyLevel.SERIAL.name()); + Mockito.when( + systemQueryExecutionProfile.getBoolean(DefaultDriverOption.REQUEST_DEFAULT_IDEMPOTENCE)) + .thenReturn(false); + Mockito.when(systemQueryExecutionProfile.getBoolean(DefaultDriverOption.PREPARE_ON_ALL_NODES)) + .thenReturn(true); + Mockito.when(systemQueryExecutionProfile.getName()).thenReturn("graph-system-query"); + Mockito.when(systemQueryExecutionProfile.getDuration(DseDriverOption.GRAPH_TIMEOUT, null)) + .thenReturn(Duration.ofMillis(2)); + Mockito.when( + systemQueryExecutionProfile.getBoolean(DseDriverOption.GRAPH_IS_SYSTEM_QUERY, false)) + .thenReturn(true); + Mockito.when( + systemQueryExecutionProfile.getString( + DseDriverOption.GRAPH_READ_CONSISTENCY_LEVEL, null)) + .thenReturn("LOCAL_TWO"); + Mockito.when( + systemQueryExecutionProfile.getString( + DseDriverOption.GRAPH_WRITE_CONSISTENCY_LEVEL, null)) + .thenReturn("LOCAL_THREE"); + + Mockito.when(config.getProfile("graph-system-query")).thenReturn(systemQueryExecutionProfile); + } + + public static GraphRequestHandlerTestHarness.Builder builder() { + return new GraphRequestHandlerTestHarness.Builder(); + } + + public static class Builder extends RequestHandlerTestHarness.Builder { + + @Override + public RequestHandlerTestHarness build() { + return new GraphRequestHandlerTestHarness(this); + } + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/insights/AddressFormatterTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/AddressFormatterTest.java new file mode 100644 index 00000000000..cbaf6f1995d --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/AddressFormatterTest.java @@ -0,0 +1,42 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.insights; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import java.net.InetAddress; +import java.net.InetSocketAddress; +import java.net.UnknownHostException; +import org.junit.Test; +import org.junit.runner.RunWith; + +@RunWith(DataProviderRunner.class) +public class AddressFormatterTest { + + @Test + @UseDataProvider("addressesProvider") + public void should_format_addresses(Object address, String expected) { + // when + String result = AddressFormatter.nullSafeToString(address); + + // then + assertThat(result).isEqualTo(expected); + } + + @DataProvider + public static Object[][] addressesProvider() throws UnknownHostException { + return new Object[][] { + {new InetSocketAddress(8888), "0.0.0.0:8888"}, + {new InetSocketAddress("127.0.0.1", 8888), "127.0.0.1:8888"}, + {InetSocketAddress.createUnresolved("127.0.0.2", 8080), "127.0.0.2:8080"}, + {InetAddress.getByName("127.0.0.1"), "127.0.0.1"}, + }; + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/insights/ConfigAntiPatternsFinderTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/ConfigAntiPatternsFinderTest.java new file mode 100644 index 00000000000..064d030d4d9 --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/ConfigAntiPatternsFinderTest.java @@ -0,0 +1,74 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.insights; + +import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.SSL_ENGINE_FACTORY_CLASS; +import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.SSL_HOSTNAME_VALIDATION; +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import com.datastax.dse.driver.internal.core.context.DseDriverContext; +import com.datastax.oss.driver.api.core.config.DriverConfig; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import java.util.Collections; +import java.util.Map; +import org.junit.Test; +import org.junit.runner.RunWith; + +@RunWith(DataProviderRunner.class) +public class ConfigAntiPatternsFinderTest { + + private static final ImmutableMap SSL_ANTI_PATTERN = + ImmutableMap.of( + "sslWithoutCertValidation", + "Client-to-node encryption is enabled but server certificate validation is disabled"); + + @Test + @UseDataProvider("sslConfigProvider") + public void should_find_ssl_anti_pattern( + boolean sslEngineFactoryClassDefined, + boolean hostnameValidation, + Map expected) { + // given + DseDriverContext dseDriverContext = + mockDefaultProfile(sslEngineFactoryClassDefined, hostnameValidation); + + // when + Map antiPatterns = + new ConfigAntiPatternsFinder().findAntiPatterns(dseDriverContext); + + // then + assertThat(antiPatterns).isEqualTo(expected); + } + + private DseDriverContext mockDefaultProfile( + boolean sslEngineFactoryClassDefined, boolean hostnameValidation) { + DseDriverContext dseDriverContext = mock(DseDriverContext.class); + DriverConfig driverConfig = mock(DriverConfig.class); + when(dseDriverContext.getConfig()).thenReturn(driverConfig); + DriverExecutionProfile profile = mock(DriverExecutionProfile.class); + when(profile.isDefined(SSL_ENGINE_FACTORY_CLASS)).thenReturn(sslEngineFactoryClassDefined); + when(profile.getBoolean(SSL_HOSTNAME_VALIDATION, false)).thenReturn(hostnameValidation); + when(driverConfig.getDefaultProfile()).thenReturn(profile); + return dseDriverContext; + } + + @DataProvider + public static Object[][] sslConfigProvider() { + return new Object[][] { + {true, true, Collections.emptyMap()}, + {true, false, SSL_ANTI_PATTERN}, + {false, false, Collections.emptyMap()}, + {false, true, Collections.emptyMap()} + }; + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/insights/DataCentersFinderTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/DataCentersFinderTest.java new file mode 100644 index 00000000000..dfcf0cc04a6 --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/DataCentersFinderTest.java @@ -0,0 +1,77 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.insights; + +import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.CONNECTION_POOL_REMOTE_SIZE; +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.loadbalancing.NodeDistance; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; +import com.datastax.oss.driver.shaded.guava.common.collect.Sets; +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import java.util.Collection; +import java.util.Set; +import org.junit.Test; +import org.junit.runner.RunWith; + +@RunWith(DataProviderRunner.class) +public class DataCentersFinderTest { + + @Test + @UseDataProvider("hostProvider") + public void should_detect_data_centers( + int numberOfRemoteHosts, + String dc1, + NodeDistance h1Distance, + String dc2, + NodeDistance h2Distance, + Set expected) { + // given + DriverExecutionProfile executionProfile = mock(DriverExecutionProfile.class); + when(executionProfile.getInt(CONNECTION_POOL_REMOTE_SIZE)).thenReturn(numberOfRemoteHosts); + Collection nodes = mockNodes(dc1, h1Distance, dc2, h2Distance); + + // when + Set dataCenters = new DataCentersFinder().getDataCenters(nodes, executionProfile); + + // then + assertThat(dataCenters).isEqualTo(Sets.newHashSet(expected)); + } + + @DataProvider + public static Object[][] hostProvider() { + return new Object[][] { + {1, "dc1", NodeDistance.LOCAL, "dc2", NodeDistance.REMOTE, Sets.newHashSet("dc1", "dc2")}, + {1, "dc1", NodeDistance.LOCAL, "dc1", NodeDistance.REMOTE, Sets.newHashSet("dc1")}, + {0, "dc1", NodeDistance.LOCAL, "dc2", NodeDistance.REMOTE, Sets.newHashSet("dc1")}, + {0, "dc1", NodeDistance.IGNORED, "dc2", NodeDistance.REMOTE, Sets.newHashSet()}, + {1, "dc1", NodeDistance.IGNORED, "dc2", NodeDistance.REMOTE, Sets.newHashSet("dc2")}, + {1, "dc1", NodeDistance.LOCAL, "dc2", NodeDistance.IGNORED, Sets.newHashSet("dc1")}, + {0, "dc1", NodeDistance.IGNORED, "dc2", NodeDistance.REMOTE, Sets.newHashSet()}, + {0, "dc1", NodeDistance.LOCAL, "dc2", NodeDistance.IGNORED, Sets.newHashSet("dc1")}, + }; + } + + private Collection mockNodes( + String dc1, NodeDistance h1Distance, String dc2, NodeDistance h2Distance) { + Node n1 = mock(Node.class); + when(n1.getDatacenter()).thenReturn(dc1); + when(n1.getDistance()).thenReturn(h1Distance); + + Node n2 = mock(Node.class); + when(n2.getDatacenter()).thenReturn(dc2); + when(n2.getDistance()).thenReturn(h2Distance); + + return ImmutableSet.of(n1, n2); + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/insights/ExecutionProfileMockUtil.java b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/ExecutionProfileMockUtil.java new file mode 100644 index 00000000000..523aac1d6a3 --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/ExecutionProfileMockUtil.java @@ -0,0 +1,101 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.insights; + +import static com.datastax.dse.driver.api.core.config.DseDriverOption.GRAPH_TRAVERSAL_SOURCE; +import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.AUTH_PROVIDER_CLASS; +import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.CONNECTION_POOL_LOCAL_SIZE; +import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.CONNECTION_POOL_REMOTE_SIZE; +import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.HEARTBEAT_INTERVAL; +import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.LOAD_BALANCING_FILTER_CLASS; +import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER; +import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.LOAD_BALANCING_POLICY_CLASS; +import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.PROTOCOL_COMPRESSION; +import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.RECONNECTION_BASE_DELAY; +import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.REQUEST_CONSISTENCY; +import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.REQUEST_SERIAL_CONSISTENCY; +import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.REQUEST_TIMEOUT; +import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.SPECULATIVE_EXECUTION_DELAY; +import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.SPECULATIVE_EXECUTION_MAX; +import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.SPECULATIVE_EXECUTION_POLICY_CLASS; +import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.SSL_ENGINE_FACTORY_CLASS; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import java.time.Duration; + +class ExecutionProfileMockUtil { + static final String DEFAULT_LOCAL_DC = "local-dc"; + static final int SPECEX_MAX_DEFAULT = 100; + static final int SPECEX_DELAY_DEFAULT = 20; + + static DriverExecutionProfile mockDefaultExecutionProfile() { + DriverExecutionProfile profile = mock(DriverExecutionProfile.class); + + when(profile.getDuration(REQUEST_TIMEOUT)).thenReturn(Duration.ofMillis(100)); + when(profile.getString(LOAD_BALANCING_POLICY_CLASS)).thenReturn("LoadBalancingPolicyImpl"); + when(profile.isDefined(LOAD_BALANCING_FILTER_CLASS)).thenReturn(true); + when(profile.getString(LOAD_BALANCING_LOCAL_DATACENTER)).thenReturn(DEFAULT_LOCAL_DC); + when(profile.isDefined(SPECULATIVE_EXECUTION_MAX)).thenReturn(true); + when(profile.getInt(SPECULATIVE_EXECUTION_MAX)).thenReturn(SPECEX_MAX_DEFAULT); + when(profile.isDefined(SPECULATIVE_EXECUTION_DELAY)).thenReturn(true); + when(profile.getInt(SPECULATIVE_EXECUTION_DELAY)).thenReturn(SPECEX_DELAY_DEFAULT); + when(profile.getString(SPECULATIVE_EXECUTION_POLICY_CLASS)) + .thenReturn("SpeculativeExecutionImpl"); + when(profile.getString(REQUEST_CONSISTENCY)).thenReturn("LOCAL_ONE"); + when(profile.getString(REQUEST_SERIAL_CONSISTENCY)).thenReturn("SERIAL"); + when(profile.getInt(CONNECTION_POOL_LOCAL_SIZE)).thenReturn(2); + when(profile.getInt(CONNECTION_POOL_REMOTE_SIZE)).thenReturn(1); + when(profile.getString(eq(PROTOCOL_COMPRESSION), any())).thenReturn("none"); + when(profile.getDuration(HEARTBEAT_INTERVAL)).thenReturn(Duration.ofMillis(100)); + when(profile.getDuration(RECONNECTION_BASE_DELAY)).thenReturn(Duration.ofMillis(100)); + when(profile.isDefined(SSL_ENGINE_FACTORY_CLASS)).thenReturn(true); + when(profile.getString(eq(AUTH_PROVIDER_CLASS), any())).thenReturn("AuthProviderImpl"); + when(profile.getString(GRAPH_TRAVERSAL_SOURCE, null)).thenReturn("src-graph"); + return profile; + } + + static DriverExecutionProfile mockNonDefaultRequestTimeoutExecutionProfile() { + DriverExecutionProfile profile = mockDefaultExecutionProfile(); + when(profile.getDuration(REQUEST_TIMEOUT)).thenReturn(Duration.ofMillis(50)); + return profile; + } + + static DriverExecutionProfile mockNonDefaultLoadBalancingExecutionProfile() { + DriverExecutionProfile profile = mockDefaultExecutionProfile(); + when(profile.getString(LOAD_BALANCING_POLICY_CLASS)).thenReturn("NonDefaultLoadBalancing"); + return profile; + } + + static DriverExecutionProfile mockNonDefaultSpeculativeExecutionInfo() { + DriverExecutionProfile profile = mockDefaultExecutionProfile(); + when(profile.getString(SPECULATIVE_EXECUTION_POLICY_CLASS)) + .thenReturn("NonDefaultSpecexPolicy"); + return profile; + } + + static DriverExecutionProfile mockNonDefaultConsistency() { + DriverExecutionProfile profile = mockDefaultExecutionProfile(); + when(profile.getString(REQUEST_CONSISTENCY)).thenReturn("ALL"); + return profile; + } + + static DriverExecutionProfile mockNonDefaultSerialConsistency() { + DriverExecutionProfile profile = mockDefaultExecutionProfile(); + when(profile.getString(REQUEST_SERIAL_CONSISTENCY)).thenReturn("ONE"); + return profile; + } + + static DriverExecutionProfile mockNonDefaultGraphOptions() { + DriverExecutionProfile profile = mockDefaultExecutionProfile(); + when(profile.getString(GRAPH_TRAVERSAL_SOURCE, null)).thenReturn("non-default-graph"); + return profile; + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/insights/ExecutionProfilesInfoFinderTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/ExecutionProfilesInfoFinderTest.java new file mode 100644 index 00000000000..0d4d1bdc198 --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/ExecutionProfilesInfoFinderTest.java @@ -0,0 +1,210 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.insights; + +import static com.datastax.dse.driver.internal.core.insights.ExecutionProfileMockUtil.DEFAULT_LOCAL_DC; +import static com.datastax.dse.driver.internal.core.insights.ExecutionProfileMockUtil.SPECEX_DELAY_DEFAULT; +import static com.datastax.dse.driver.internal.core.insights.ExecutionProfileMockUtil.SPECEX_MAX_DEFAULT; +import static com.datastax.dse.driver.internal.core.insights.ExecutionProfileMockUtil.mockDefaultExecutionProfile; +import static com.datastax.dse.driver.internal.core.insights.ExecutionProfileMockUtil.mockNonDefaultConsistency; +import static com.datastax.dse.driver.internal.core.insights.ExecutionProfileMockUtil.mockNonDefaultGraphOptions; +import static com.datastax.dse.driver.internal.core.insights.ExecutionProfileMockUtil.mockNonDefaultLoadBalancingExecutionProfile; +import static com.datastax.dse.driver.internal.core.insights.ExecutionProfileMockUtil.mockNonDefaultRequestTimeoutExecutionProfile; +import static com.datastax.dse.driver.internal.core.insights.ExecutionProfileMockUtil.mockNonDefaultSerialConsistency; +import static com.datastax.dse.driver.internal.core.insights.ExecutionProfileMockUtil.mockNonDefaultSpeculativeExecutionInfo; +import static com.datastax.dse.driver.internal.core.insights.PackageUtil.DEFAULT_LOAD_BALANCING_PACKAGE; +import static com.datastax.dse.driver.internal.core.insights.PackageUtil.DEFAULT_SPECULATIVE_EXECUTION_PACKAGE; +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import com.datastax.dse.driver.internal.core.context.DseDriverContext; +import com.datastax.dse.driver.internal.core.insights.schema.LoadBalancingInfo; +import com.datastax.dse.driver.internal.core.insights.schema.SpecificExecutionProfile; +import com.datastax.dse.driver.internal.core.insights.schema.SpeculativeExecutionInfo; +import com.datastax.oss.driver.api.core.config.DriverConfig; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import java.util.Map; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mockito; + +@RunWith(DataProviderRunner.class) +public class ExecutionProfilesInfoFinderTest { + + @Test + public void should_include_info_about_default_profile() { + // given + DriverExecutionProfile defaultExecutionProfile = mockDefaultExecutionProfile(); + Map profiles = + ImmutableMap.of("default", defaultExecutionProfile); + + DseDriverContext dseDriverContext = + mockDriverContextWithProfiles(defaultExecutionProfile, profiles); + + // when + Map executionProfilesInfo = + new ExecutionProfilesInfoFinder().getExecutionProfilesInfo(dseDriverContext); + + // then + assertThat(executionProfilesInfo) + .isEqualTo( + ImmutableMap.of( + "default", + new SpecificExecutionProfile( + 100, + new LoadBalancingInfo( + "LoadBalancingPolicyImpl", + ImmutableMap.of("localDataCenter", "local-dc", "filterFunction", true), + DEFAULT_LOAD_BALANCING_PACKAGE), + new SpeculativeExecutionInfo( + "SpeculativeExecutionImpl", + ImmutableMap.of("maxSpeculativeExecutions", 100, "delay", 20), + DEFAULT_SPECULATIVE_EXECUTION_PACKAGE), + "LOCAL_ONE", + "SERIAL", + ImmutableMap.of("source", "src-graph")))); + } + + @Test + @UseDataProvider("executionProfileProvider") + public void should_include_info_about_default_profile_and_only_difference_for_specific_profile( + DriverExecutionProfile nonDefaultExecutionProfile, SpecificExecutionProfile expected) { + // given + + DriverExecutionProfile defaultExecutionProfile = mockDefaultExecutionProfile(); + Map profiles = + ImmutableMap.of( + "default", defaultExecutionProfile, "non-default", nonDefaultExecutionProfile); + DseDriverContext dseDriverContext = + mockDriverContextWithProfiles(defaultExecutionProfile, profiles); + // when + Map executionProfilesInfo = + new ExecutionProfilesInfoFinder().getExecutionProfilesInfo(dseDriverContext); + + // then + assertThat(executionProfilesInfo) + .isEqualTo( + ImmutableMap.of( + "default", + new SpecificExecutionProfile( + 100, + new LoadBalancingInfo( + "LoadBalancingPolicyImpl", + ImmutableMap.of("localDataCenter", "local-dc", "filterFunction", true), + DEFAULT_LOAD_BALANCING_PACKAGE), + new SpeculativeExecutionInfo( + "SpeculativeExecutionImpl", + ImmutableMap.of("maxSpeculativeExecutions", 100, "delay", 20), + DEFAULT_SPECULATIVE_EXECUTION_PACKAGE), + "LOCAL_ONE", + "SERIAL", + ImmutableMap.of("source", "src-graph")), + "non-default", + expected)); + } + + @DataProvider + public static Object[][] executionProfileProvider() { + return new Object[][] { + { + mockNonDefaultRequestTimeoutExecutionProfile(), + new SpecificExecutionProfile(50, null, null, null, null, null) + }, + { + mockNonDefaultLoadBalancingExecutionProfile(), + new SpecificExecutionProfile( + null, + new LoadBalancingInfo( + "NonDefaultLoadBalancing", + ImmutableMap.of("localDataCenter", DEFAULT_LOCAL_DC, "filterFunction", true), + DEFAULT_LOAD_BALANCING_PACKAGE), + null, + null, + null, + null) + }, + { + mockNonDefaultSpeculativeExecutionInfo(), + new SpecificExecutionProfile( + null, + null, + new SpeculativeExecutionInfo( + "NonDefaultSpecexPolicy", + ImmutableMap.of( + "maxSpeculativeExecutions", SPECEX_MAX_DEFAULT, "delay", SPECEX_DELAY_DEFAULT), + DEFAULT_SPECULATIVE_EXECUTION_PACKAGE), + null, + null, + null) + }, + { + mockNonDefaultConsistency(), + new SpecificExecutionProfile(null, null, null, "ALL", null, null) + }, + { + mockNonDefaultSerialConsistency(), + new SpecificExecutionProfile(null, null, null, null, "ONE", null) + }, + { + mockNonDefaultGraphOptions(), + new SpecificExecutionProfile( + null, null, null, null, null, ImmutableMap.of("source", "non-default-graph")) + }, + { + mockDefaultExecutionProfile(), + new SpecificExecutionProfile(null, null, null, null, null, null) + } + }; + } + + @Test + public void should_not_include_null_fields_in_json() throws JsonProcessingException { + // given + SpecificExecutionProfile specificExecutionProfile = + new SpecificExecutionProfile(50, null, null, "ONE", null, ImmutableMap.of("a", "b")); + + // when + String result = new ObjectMapper().writeValueAsString(specificExecutionProfile); + + // then + assertThat(result) + .isEqualTo("{\"readTimeout\":50,\"consistency\":\"ONE\",\"graphOptions\":{\"a\":\"b\"}}"); + } + + @Test + public void should_include_empty_execution_profile_if_has_all_nulls() + throws JsonProcessingException { + // given + Map executionProfiles = + ImmutableMap.of("p", new SpecificExecutionProfile(null, null, null, null, null, null)); + + // when + String result = new ObjectMapper().writeValueAsString(executionProfiles); + + // then + assertThat(result).isEqualTo("{\"p\":{}}"); + } + + private DseDriverContext mockDriverContextWithProfiles( + DriverExecutionProfile defaultExecutionProfile, + Map profiles) { + DseDriverContext dseDriverContext = mock(DseDriverContext.class); + DriverConfig driverConfig = mock(DriverConfig.class); + Mockito.>when(driverConfig.getProfiles()) + .thenReturn(profiles); + when(driverConfig.getDefaultProfile()).thenReturn(defaultExecutionProfile); + when(dseDriverContext.getConfig()).thenReturn(driverConfig); + return dseDriverContext; + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/insights/InsightsClientTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/InsightsClientTest.java new file mode 100644 index 00000000000..1b99a29fec5 --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/InsightsClientTest.java @@ -0,0 +1,484 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.insights; + +import static com.datastax.dse.driver.api.core.DseProtocolVersion.DSE_V2; +import static com.datastax.dse.driver.internal.core.context.DseStartupOptionsBuilder.APPLICATION_NAME_KEY; +import static com.datastax.dse.driver.internal.core.context.DseStartupOptionsBuilder.APPLICATION_VERSION_KEY; +import static com.datastax.dse.driver.internal.core.context.DseStartupOptionsBuilder.CLIENT_ID_KEY; +import static com.datastax.dse.driver.internal.core.insights.ExecutionProfileMockUtil.mockDefaultExecutionProfile; +import static com.datastax.dse.driver.internal.core.insights.ExecutionProfileMockUtil.mockNonDefaultRequestTimeoutExecutionProfile; +import static com.datastax.dse.driver.internal.core.insights.PackageUtil.DEFAULT_AUTH_PROVIDER_PACKAGE; +import static com.datastax.dse.driver.internal.core.insights.PackageUtil.DEFAULT_LOAD_BALANCING_PACKAGE; +import static com.datastax.dse.driver.internal.core.insights.PackageUtil.DEFAULT_SPECULATIVE_EXECUTION_PACKAGE; +import static com.datastax.oss.driver.internal.core.context.StartupOptionsBuilder.DRIVER_NAME_KEY; +import static com.datastax.oss.driver.internal.core.context.StartupOptionsBuilder.DRIVER_VERSION_KEY; +import static org.assertj.core.api.Assertions.assertThat; +import static org.awaitility.Awaitility.await; +import static org.awaitility.Duration.ONE_SECOND; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import com.datastax.dse.driver.internal.core.context.DseDriverContext; +import com.datastax.dse.driver.internal.core.insights.configuration.InsightsConfiguration; +import com.datastax.dse.driver.internal.core.insights.schema.AuthProviderType; +import com.datastax.dse.driver.internal.core.insights.schema.Insight; +import com.datastax.dse.driver.internal.core.insights.schema.InsightMetadata; +import com.datastax.dse.driver.internal.core.insights.schema.InsightType; +import com.datastax.dse.driver.internal.core.insights.schema.InsightsPlatformInfo; +import com.datastax.dse.driver.internal.core.insights.schema.InsightsPlatformInfo.CPUS; +import com.datastax.dse.driver.internal.core.insights.schema.InsightsPlatformInfo.OS; +import com.datastax.dse.driver.internal.core.insights.schema.InsightsPlatformInfo.RuntimeAndCompileTimeVersions; +import com.datastax.dse.driver.internal.core.insights.schema.InsightsStartupData; +import com.datastax.dse.driver.internal.core.insights.schema.InsightsStatusData; +import com.datastax.dse.driver.internal.core.insights.schema.LoadBalancingInfo; +import com.datastax.dse.driver.internal.core.insights.schema.PoolSizeByHostDistance; +import com.datastax.dse.driver.internal.core.insights.schema.ReconnectionPolicyInfo; +import com.datastax.dse.driver.internal.core.insights.schema.SSL; +import com.datastax.dse.driver.internal.core.insights.schema.SessionStateForNode; +import com.datastax.dse.driver.internal.core.insights.schema.SpecificExecutionProfile; +import com.datastax.dse.driver.internal.core.insights.schema.SpeculativeExecutionInfo; +import com.datastax.oss.driver.api.core.config.DriverConfig; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.metadata.EndPoint; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.internal.core.channel.DriverChannel; +import com.datastax.oss.driver.internal.core.control.ControlConnection; +import com.datastax.oss.driver.internal.core.metadata.DefaultNode; +import com.datastax.oss.driver.internal.core.metadata.MetadataManager; +import com.datastax.oss.driver.internal.core.pool.ChannelPool; +import com.datastax.oss.driver.internal.core.session.PoolManager; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; +import com.datastax.oss.driver.shaded.guava.common.collect.Sets; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import io.netty.channel.DefaultEventLoop; +import java.io.IOException; +import java.net.InetSocketAddress; +import java.net.UnknownHostException; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.Executors; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.Supplier; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mockito; + +@RunWith(DataProviderRunner.class) +public class InsightsClientTest { + private static final StackTraceElement[] EMPTY_STACK_TRACE = {}; + private static final Map EMPTY_OBJECT_MAP = Collections.emptyMap(); + private static final Supplier MOCK_TIME_SUPPLIER = () -> 1L; + private static final InsightsConfiguration INSIGHTS_CONFIGURATION = + new InsightsConfiguration(true, 300000L, new DefaultEventLoop()); + + @Test + public void should_construct_json_event_startup_message() throws IOException { + // given + DseDriverContext dseDriverContext = mockDseDriverContext(); + PlatformInfoFinder platformInfoFinder = mock(PlatformInfoFinder.class); + OS os = new OS("linux", "1.2", "x64"); + CPUS cpus = new CPUS(8, "intel i7"); + Map javaDeps = + ImmutableMap.of("version", new RuntimeAndCompileTimeVersions("1.8.0", "1.8.0", false)); + Map> runtimeInfo = + ImmutableMap.of("java", javaDeps); + InsightsPlatformInfo insightsPlatformInfo = new InsightsPlatformInfo(os, cpus, runtimeInfo); + when(platformInfoFinder.getInsightsPlatformInfo()).thenReturn(insightsPlatformInfo); + + ConfigAntiPatternsFinder configAntiPatternsFinder = mock(ConfigAntiPatternsFinder.class); + when(configAntiPatternsFinder.findAntiPatterns(any(DseDriverContext.class))) + .thenReturn( + ImmutableMap.of( + "contactPointsMultipleDCs", + "Contact points contain hosts from multiple data centers")); + + DataCentersFinder dataCentersFinder = mock(DataCentersFinder.class); + when(dataCentersFinder.getDataCenters(any(DseDriverContext.class))) + .thenReturn(Sets.newHashSet("dc1", "dc2")); + ReconnectionPolicyInfoFinder reconnectionPolicyInfoFinder = + mock(ReconnectionPolicyInfoFinder.class); + when(reconnectionPolicyInfoFinder.getReconnectionPolicyInfo(any(), any())) + .thenReturn( + new ReconnectionPolicyInfo( + "reconnection-policy-a", ImmutableMap.of("opt-a", 1), "com.datastax.dse")); + + InsightsClient insightsClient = + new InsightsClient( + dseDriverContext, + MOCK_TIME_SUPPLIER, + INSIGHTS_CONFIGURATION, + platformInfoFinder, + reconnectionPolicyInfoFinder, + new ExecutionProfilesInfoFinder(), + configAntiPatternsFinder, + dataCentersFinder, + EMPTY_STACK_TRACE); + + // when + String startupMessage = insightsClient.createStartupMessage(); + Insight insight = + new ObjectMapper() + .readValue(startupMessage, new TypeReference>() {}); + + // then + assertThat(insight.getMetadata()) + .isEqualTo( + new InsightMetadata( + "driver.startup", + 1L, + ImmutableMap.of("language", "java"), + InsightType.EVENT, + "v1")); + + InsightsStartupData insightData = insight.getInsightData(); + assertThat(insightData.getClientId()).isEqualTo("client-id"); + assertThat(insightData.getSessionId()).isNotNull(); + assertThat(insightData.getDriverName()).isEqualTo("DataStax Enterprise Java Driver"); + assertThat(insightData.getDriverVersion()).isNotEmpty(); + assertThat(insightData.getApplicationName()).isEqualTo("app-name"); + assertThat(insightData.getApplicationVersion()).isEqualTo("1.0.0"); + assertThat(insightData.isApplicationNameWasGenerated()).isEqualTo(false); + assertThat(insightData.getContactPoints()) + .isEqualTo(ImmutableMap.of("localhost", Collections.singletonList("127.0.0.1:9999"))); + + assertThat(insightData.getInitialControlConnection()).isEqualTo("127.0.0.1:10"); + assertThat(insightData.getLocalAddress()).isEqualTo("127.0.0.1"); + assertThat(insightData.getHostName()).isNotEmpty(); + assertThat(insightData.getProtocolVersion()).isEqualTo(DSE_V2.getCode()); + assertThat(insightData.getExecutionProfiles()) + .isEqualTo( + ImmutableMap.of( + "default", + new SpecificExecutionProfile( + 100, + new LoadBalancingInfo( + "LoadBalancingPolicyImpl", + ImmutableMap.of("localDataCenter", "local-dc", "filterFunction", true), + DEFAULT_LOAD_BALANCING_PACKAGE), + new SpeculativeExecutionInfo( + "SpeculativeExecutionImpl", + ImmutableMap.of("maxSpeculativeExecutions", 100, "delay", 20), + DEFAULT_SPECULATIVE_EXECUTION_PACKAGE), + "LOCAL_ONE", + "SERIAL", + ImmutableMap.of("source", "src-graph")), + "non-default", + new SpecificExecutionProfile(50, null, null, null, null, null))); + assertThat(insightData.getPoolSizeByHostDistance()) + .isEqualTo(new PoolSizeByHostDistance(2, 1, 0)); + assertThat(insightData.getHeartbeatInterval()).isEqualTo(100); + assertThat(insightData.getCompression()).isEqualTo("none"); + assertThat(insightData.getReconnectionPolicy()) + .isEqualTo( + new ReconnectionPolicyInfo( + "reconnection-policy-a", ImmutableMap.of("opt-a", 1), "com.datastax.dse")); + assertThat(insightData.getSsl()).isEqualTo(new SSL(true, false)); + assertThat(insightData.getAuthProvider()) + .isEqualTo(new AuthProviderType("AuthProviderImpl", DEFAULT_AUTH_PROVIDER_PACKAGE)); + assertThat(insightData.getOtherOptions()).isEqualTo(EMPTY_OBJECT_MAP); + assertThat(insightData.getPlatformInfo()).isEqualTo(insightsPlatformInfo); + assertThat(insightData.getConfigAntiPatterns()) + .isEqualTo( + ImmutableMap.of( + "contactPointsMultipleDCs", + "Contact points contain hosts from multiple data centers")); + assertThat(insightData.getPeriodicStatusInterval()).isEqualTo(300); + assertThat(insightData.getDataCenters()).isEqualTo(Sets.newHashSet("dc1", "dc2")); + } + + @Test + public void should_group_contact_points_by_host_name() { + // given + Set contactPoints = + ImmutableSet.of( + InetSocketAddress.createUnresolved("127.0.0.1", 8080), + InetSocketAddress.createUnresolved("127.0.0.1", 8081), + InetSocketAddress.createUnresolved("127.0.0.2", 8081)); + + Map> expected = + ImmutableMap.of( + "127.0.0.1", + ImmutableList.of("127.0.0.1:8080", "127.0.0.1:8081"), + "127.0.0.2", + ImmutableList.of("127.0.0.2:8081")); + + // when + Map> resolvedContactPoints = + InsightsClient.getResolvedContactPoints(contactPoints); + + // then + assertThat(resolvedContactPoints).isEqualTo(expected); + } + + @Test + public void should_construct_json_event_status_message() throws IOException { + // given + InsightsClient insightsClient = + new InsightsClient( + mockDseDriverContext(), + MOCK_TIME_SUPPLIER, + INSIGHTS_CONFIGURATION, + null, + null, + null, + null, + null, + EMPTY_STACK_TRACE); + + // when + String statusMessage = insightsClient.createStatusMessage(); + + // then + Insight insight = + new ObjectMapper() + .readValue(statusMessage, new TypeReference>() {}); + assertThat(insight.getMetadata()) + .isEqualTo( + new InsightMetadata( + "driver.status", 1L, ImmutableMap.of("language", "java"), InsightType.EVENT, "v1")); + InsightsStatusData insightData = insight.getInsightData(); + assertThat(insightData.getClientId()).isEqualTo("client-id"); + assertThat(insightData.getSessionId()).isNotNull(); + assertThat(insightData.getControlConnection()).isEqualTo("127.0.0.1:10"); + assertThat(insightData.getConnectedNodes()) + .isEqualTo( + ImmutableMap.of( + "127.0.0.1:10", new SessionStateForNode(1, 10), + "127.0.0.1:20", new SessionStateForNode(2, 20))); + } + + @Test + public void should_schedule_task_with_initial_delay() { + // given + final AtomicInteger counter = new AtomicInteger(); + Runnable runnable = counter::incrementAndGet; + + // when + InsightsClient.scheduleInsightsTask(100L, Executors.newScheduledThreadPool(1), runnable); + + // then + await().atMost(ONE_SECOND).until(() -> counter.get() >= 1); + } + + @Test + @UseDataProvider(value = "stackTraceProvider") + public void should_get_caller_of_create_cluster(StackTraceElement[] stackTrace, String expected) { + // when + String result = InsightsClient.getClusterCreateCaller(stackTrace); + + // then + assertThat(result).isEqualTo(expected); + } + + @DataProvider + public static Object[][] stackTraceProvider() { + StackTraceElement[] onlyInitCall = + new StackTraceElement[] { + new StackTraceElement( + "com.datastax.dse.driver.internal.core.context.DseDriverContext", + "", + "DseDriverContext.java", + 94), + }; + + StackTraceElement[] stackTraceElementsWithoutInitCall = + new StackTraceElement[] { + new StackTraceElement("java.lang.Thread", "getStackTrace", "Thread.java", 1559), + new StackTraceElement( + "com.datastax.driver.core.InsightsClient", + "getClusterCreateCaller", + "InsightsClient.java", + 302) + }; + StackTraceElement[] stackTraceWithOneInitCall = + new StackTraceElement[] { + new StackTraceElement("java.lang.Thread", "getStackTrace", "Thread.java", 1559), + new StackTraceElement( + "com.datastax.dse.driver.internal.core.context.DseDriverContext", + "", + "DseDriverContext.java", + 94), + }; + StackTraceElement[] stackTraceWithOneInitCallAndCaller = + new StackTraceElement[] { + new StackTraceElement("java.lang.Thread", "getStackTrace", "Thread.java", 1559), + new StackTraceElement( + "com.datastax.dse.driver.internal.core.context.DseDriverContext", + "", + "DseDriverContext.java", + 94), + new StackTraceElement( + "com.example.ActualCallerNameApp", "main", "ActualCallerNameApp.java", 1) + }; + + StackTraceElement[] stackTraceWithTwoInitCallsAndCaller = + new StackTraceElement[] { + new StackTraceElement("java.lang.Thread", "getStackTrace", "Thread.java", 1559), + new StackTraceElement( + "com.datastax.dse.driver.internal.core.context.DseDriverContext", + "", + "DseDriverContext.java", + 94), + new StackTraceElement( + "com.datastax.oss.driver.api.core.session.SessionBuilder", + "buildDefaultSessionAsync", + "SessionBuilder.java", + 300), + new StackTraceElement( + "com.example.ActualCallerNameApp", "main", "ActualCallerNameApp.java", 1) + }; + StackTraceElement[] stackTraceWithChainOfInitCalls = + new StackTraceElement[] { + new StackTraceElement( + "com.datastax.dse.driver.internal.core.context.DseDriverContext", + "", + "DseDriverContext.java", + 94), + new StackTraceElement( + "com.datastax.dse.driver.api.core.DseSessionBuilder", + "buildContext", + "DseSessionBuilder.java", + 100), + new StackTraceElement( + "com.datastax.oss.driver.api.core.session.SessionBuilder", + "buildDefaultSessionAsync", + "SessionBuilder.java", + 332), + new StackTraceElement( + "com.datastax.oss.driver.api.core.session.SessionBuilder", + "buildAsync", + "SessionBuilder.java", + 291), + new StackTraceElement( + "com.datastax.oss.driver.api.core.session.SessionBuilder", + "build", + "SessionBuilder.java", + 306) + }; + StackTraceElement[] stackTraceWithChainOfInitCallsAndCaller = + new StackTraceElement[] { + new StackTraceElement("java.lang.Thread", "getStackTrace", "Thread.java", 1559), + new StackTraceElement( + "com.datastax.dse.driver.internal.core.context.DseDriverContext", + "", + "DseDriverContext.java", + 94), + new StackTraceElement( + "com.datastax.dse.driver.api.core.DseSessionBuilder", + "buildContext", + "DseSessionBuilder.java", + 100), + new StackTraceElement( + "com.datastax.oss.driver.api.core.session.SessionBuilder", + "buildDefaultSessionAsync", + "SessionBuilder.java", + 332), + new StackTraceElement( + "com.datastax.oss.driver.api.core.session.SessionBuilder", + "buildAsync", + "SessionBuilder.java", + 291), + new StackTraceElement( + "com.datastax.oss.driver.api.core.session.SessionBuilder", + "build", + "SessionBuilder.java", + 306), + new StackTraceElement( + "com.example.ActualCallerNameApp", "main", "ActualCallerNameApp.java", 8) + }; + + return new Object[][] { + {new StackTraceElement[] {}, InsightsClient.DEFAULT_JAVA_APPLICATION}, + {stackTraceElementsWithoutInitCall, InsightsClient.DEFAULT_JAVA_APPLICATION}, + {stackTraceWithOneInitCall, InsightsClient.DEFAULT_JAVA_APPLICATION}, + {onlyInitCall, InsightsClient.DEFAULT_JAVA_APPLICATION}, + {stackTraceWithOneInitCallAndCaller, "com.example.ActualCallerNameApp"}, + {stackTraceWithTwoInitCallsAndCaller, "com.example.ActualCallerNameApp"}, + {stackTraceWithChainOfInitCalls, InsightsClient.DEFAULT_JAVA_APPLICATION}, + {stackTraceWithChainOfInitCallsAndCaller, "com.example.ActualCallerNameApp"} + }; + } + + private DseDriverContext mockDseDriverContext() throws UnknownHostException { + DseDriverContext dseDriverContext = mock(DseDriverContext.class); + mockConnectionPools(dseDriverContext); + MetadataManager manager = mock(MetadataManager.class); + when(dseDriverContext.getMetadataManager()).thenReturn(manager); + DriverExecutionProfile defaultExecutionProfile = mockDefaultExecutionProfile(); + DriverExecutionProfile nonDefaultExecutionProfile = + mockNonDefaultRequestTimeoutExecutionProfile(); + + Map startupOptions = new HashMap<>(); + startupOptions.put(CLIENT_ID_KEY, "client-id"); + startupOptions.put(APPLICATION_VERSION_KEY, "1.0.0"); + startupOptions.put(APPLICATION_NAME_KEY, "app-name"); + startupOptions.put(DRIVER_VERSION_KEY, "2.x"); + startupOptions.put(DRIVER_NAME_KEY, "DataStax Enterprise Java Driver"); + + when(dseDriverContext.getStartupOptions()).thenReturn(startupOptions); + when(dseDriverContext.getProtocolVersion()).thenReturn(DSE_V2); + DefaultNode contactPoint = mock(DefaultNode.class); + EndPoint contactEndPoint = mock(EndPoint.class); + when(contactEndPoint.resolve()).thenReturn(new InetSocketAddress("127.0.0.1", 9999)); + when(contactPoint.getEndPoint()).thenReturn(contactEndPoint); + when(manager.getContactPoints()).thenReturn(ImmutableSet.of(contactPoint)); + + DriverConfig driverConfig = mock(DriverConfig.class); + when(dseDriverContext.getConfig()).thenReturn(driverConfig); + Map profiles = + ImmutableMap.of( + "default", defaultExecutionProfile, "non-default", nonDefaultExecutionProfile); + Mockito.>when(driverConfig.getProfiles()) + .thenReturn(profiles); + when(driverConfig.getDefaultProfile()).thenReturn(defaultExecutionProfile); + + ControlConnection controlConnection = mock(ControlConnection.class); + DriverChannel channel = mock(DriverChannel.class); + EndPoint controlConnectionEndpoint = mock(EndPoint.class); + when(controlConnectionEndpoint.resolve()).thenReturn(new InetSocketAddress("127.0.0.1", 10)); + + when(channel.getEndPoint()).thenReturn(controlConnectionEndpoint); + when(channel.localAddress()).thenReturn(new InetSocketAddress("127.0.0.1", 10)); + when(controlConnection.channel()).thenReturn(channel); + when(dseDriverContext.getControlConnection()).thenReturn(controlConnection); + return dseDriverContext; + } + + private void mockConnectionPools(DseDriverContext driverContext) { + Node node1 = mock(Node.class); + EndPoint endPoint1 = mock(EndPoint.class); + when(endPoint1.resolve()).thenReturn(new InetSocketAddress("127.0.0.1", 10)); + when(node1.getEndPoint()).thenReturn(endPoint1); + when(node1.getOpenConnections()).thenReturn(1); + ChannelPool channelPool1 = mock(ChannelPool.class); + when(channelPool1.getInFlight()).thenReturn(10); + + Node node2 = mock(Node.class); + EndPoint endPoint2 = mock(EndPoint.class); + when(endPoint2.resolve()).thenReturn(new InetSocketAddress("127.0.0.1", 20)); + when(node2.getEndPoint()).thenReturn(endPoint2); + when(node2.getOpenConnections()).thenReturn(2); + ChannelPool channelPool2 = mock(ChannelPool.class); + when(channelPool2.getInFlight()).thenReturn(20); + + Map channelPools = ImmutableMap.of(node1, channelPool1, node2, channelPool2); + PoolManager poolManager = mock(PoolManager.class); + when(poolManager.getPools()).thenReturn(channelPools); + when(driverContext.getPoolManager()).thenReturn(poolManager); + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/insights/InsightsSupportVerifierTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/InsightsSupportVerifierTest.java new file mode 100644 index 00000000000..ce00d27d960 --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/InsightsSupportVerifierTest.java @@ -0,0 +1,73 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.insights; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import com.datastax.dse.driver.api.core.metadata.DseNodeProperties; +import com.datastax.oss.driver.api.core.Version; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import java.util.Collection; +import java.util.Collections; +import org.junit.Test; +import org.junit.runner.RunWith; + +@RunWith(DataProviderRunner.class) +public class InsightsSupportVerifierTest { + + @Test + @UseDataProvider(value = "dseHostsProvider") + public void should_detect_DSE_versions_that_supports_insights( + Collection hosts, boolean expected) { + // when + boolean result = InsightsSupportVerifier.supportsInsights(hosts); + + // then + assertThat(result).isEqualTo(expected); + } + + @DataProvider + public static Object[][] dseHostsProvider() { + Node dse605 = mock(Node.class); + when(dse605.getExtras()) + .thenReturn(ImmutableMap.of(DseNodeProperties.DSE_VERSION, Version.parse("6.0.5"))); + Node dse604 = mock(Node.class); + when(dse604.getExtras()) + .thenReturn(ImmutableMap.of(DseNodeProperties.DSE_VERSION, Version.parse("6.0.4"))); + Node dse600 = mock(Node.class); + when(dse600.getExtras()) + .thenReturn(ImmutableMap.of(DseNodeProperties.DSE_VERSION, Version.parse("6.0.0"))); + Node dse5113 = mock(Node.class); + when(dse5113.getExtras()) + .thenReturn(ImmutableMap.of(DseNodeProperties.DSE_VERSION, Version.parse("5.1.13"))); + Node dse500 = mock(Node.class); + when(dse500.getExtras()) + .thenReturn(ImmutableMap.of(DseNodeProperties.DSE_VERSION, Version.parse("5.0.0"))); + Node nodeWithoutExtras = mock(Node.class); + when(nodeWithoutExtras.getExtras()).thenReturn(Collections.emptyMap()); + + return new Object[][] { + {ImmutableList.of(dse605), true}, + {ImmutableList.of(dse604), false}, + {ImmutableList.of(dse600), false}, + {ImmutableList.of(dse5113), true}, + {ImmutableList.of(dse500), false}, + {ImmutableList.of(dse5113, dse605), true}, + {ImmutableList.of(dse5113, dse600), false}, + {ImmutableList.of(dse500, dse600), false}, + {ImmutableList.of(), false}, + {ImmutableList.of(nodeWithoutExtras), false} + }; + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/insights/PackageUtilTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/PackageUtilTest.java new file mode 100644 index 00000000000..9e7fdc72e0d --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/PackageUtilTest.java @@ -0,0 +1,76 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.insights; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import org.junit.Test; +import org.junit.runner.RunWith; + +@RunWith(DataProviderRunner.class) +public class PackageUtilTest { + + private static final String DEFAULT_PACKAGE = "default.package"; + + @Test + public void should_find_package_name_for_class() { + // given + TestClass testClass = new TestClass(); + + // then + String namespace = PackageUtil.getNamespace(testClass.getClass()); + + // then + assertThat(namespace).isEqualTo("com.datastax.dse.driver.internal.core.insights"); + } + + @Test + @UseDataProvider("packagesProvider") + public void should_get_full_package_or_return_default(String fullClassSetting, String expected) { + // when + String result = PackageUtil.getFullPackageOrDefault(fullClassSetting, DEFAULT_PACKAGE); + + // then + assertThat(result).isEqualTo(expected); + } + + @Test + @UseDataProvider("classesProvider") + public void should_get_class_name_from_full_class_setting( + String fullClassSetting, String expected) { + // when + String result = PackageUtil.getClassName(fullClassSetting); + + // then + assertThat(result).isEqualTo(expected); + } + + @DataProvider + public static Object[][] packagesProvider() { + return new Object[][] { + {"com.P", "com"}, + {"ClassName", DEFAULT_PACKAGE}, + {"", DEFAULT_PACKAGE}, + {"com.p.a.2.x.12.Class", "com.p.a.2.x.12"}, + }; + } + + @DataProvider + public static Object[][] classesProvider() { + return new Object[][] { + {"com.P", "P"}, + {"ClassName", "ClassName"}, + {"", ""}, + {"com.p.a.2.x.12.Class", "Class"}, + }; + } + + private static class TestClass {} +} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/insights/PlatformInfoFinderTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/PlatformInfoFinderTest.java new file mode 100644 index 00000000000..b41f2057f43 --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/PlatformInfoFinderTest.java @@ -0,0 +1,222 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ + +package com.datastax.dse.driver.internal.core.insights; + +import static com.datastax.dse.driver.internal.core.insights.PlatformInfoFinder.UNVERIFIED_RUNTIME_VERSION; +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.dse.driver.internal.core.insights.schema.InsightsPlatformInfo.RuntimeAndCompileTimeVersions; +import java.io.InputStream; +import java.net.URL; +import java.util.HashMap; +import java.util.Iterator; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.function.Function; +import org.junit.Test; + +public class PlatformInfoFinderTest { + private static final Function NULL_URL_PROVIDER = + d -> null; + + private static final Function NETTY_URL_PROVIDER = + new Function() { + @Override + public URL apply(PlatformInfoFinder.DependencyFromFile d) { + return this.getClass().getResource("/insights/pom.properties"); + } + }; + + private static final Function MALFORMED_URL_PROVIDER = + new Function() { + @Override + public URL apply(PlatformInfoFinder.DependencyFromFile d) { + return this.getClass().getResource("/insights/malformed-pom.properties"); + } + }; + + private static final Function + NON_EXISTING_URL_PROVIDER = + new Function() { + @Override + public URL apply(PlatformInfoFinder.DependencyFromFile d) { + return this.getClass().getResource("/insights/non-existing.pom"); + } + }; + + @Test + public void should_find_dependencies_from_file() { + // given + InputStream inputStream = + this.getClass().getResourceAsStream("/insights/test-dependencies.txt"); + Map expected = new HashMap<>(); + expected.put( + "io.netty:netty-transport-native-epoll", + withUnverifiedRuntimeVersionOptional("4.0.56.Final")); + expected.put("org.slf4j:slf4j-api", withUnverifiedRuntimeVersion("1.7.25")); + expected.put("org.ow2.asm:asm", withUnverifiedRuntimeVersion("5.0.3")); + expected.put("com.esri.geometry:esri-geometry-api", withUnverifiedRuntimeVersion("1.2.1")); + expected.put("io.netty:netty-transport", withUnverifiedRuntimeVersion("4.0.56.Final")); + expected.put("com.github.jnr:jnr-x86asm", withUnverifiedRuntimeVersion("1.0.2")); + expected.put("org.ow2.asm:asm-analysis", withUnverifiedRuntimeVersion("5.0.3")); + expected.put("com.github.jnr:jnr-constants", withUnverifiedRuntimeVersion("0.9.9")); + expected.put("io.netty:netty-common", withUnverifiedRuntimeVersion("4.0.56.Final")); + expected.put("com.google.guava:guava", withUnverifiedRuntimeVersion("19.0")); + expected.put("org.xerial.snappy:snappy-java", withUnverifiedRuntimeVersionOptional("1.1.2.6")); + expected.put("io.dropwizard.metrics:metrics-core", withUnverifiedRuntimeVersion("3.2.2")); + expected.put("org.ow2.asm:asm-tree", withUnverifiedRuntimeVersion("5.0.3")); + expected.put("com.github.jnr:jnr-posix", withUnverifiedRuntimeVersion("3.0.44")); + expected.put("org.codehaus.jackson:jackson-core-asl", withUnverifiedRuntimeVersion("1.9.12")); + expected.put( + "com.fasterxml.jackson.core:jackson-databind", withUnverifiedRuntimeVersion("2.7.9.3")); + expected.put("io.netty:netty-codec", withUnverifiedRuntimeVersion("4.0.56.Final")); + expected.put( + "com.fasterxml.jackson.core:jackson-annotations", withUnverifiedRuntimeVersion("2.8.11")); + expected.put("com.fasterxml.jackson.core:jackson-core", withUnverifiedRuntimeVersion("2.8.11")); + expected.put("io.netty:netty-handler", withUnverifiedRuntimeVersion("4.0.56.Final")); + expected.put("org.lz4:lz4-java", withUnverifiedRuntimeVersionOptional("1.4.1")); + expected.put("org.hdrhistogram:HdrHistogram", withUnverifiedRuntimeVersionOptional("2.1.10")); + expected.put("com.github.jnr:jffi", withUnverifiedRuntimeVersion("1.2.16")); + expected.put("io.netty:netty-buffer", withUnverifiedRuntimeVersion("4.0.56.Final")); + expected.put("org.ow2.asm:asm-commons", withUnverifiedRuntimeVersion("5.0.3")); + expected.put("org.json:json", withUnverifiedRuntimeVersion("20090211")); + expected.put("org.ow2.asm:asm-util", withUnverifiedRuntimeVersion("5.0.3")); + expected.put("com.github.jnr:jnr-ffi", withUnverifiedRuntimeVersion("2.1.7")); + + // when + Map stringStringMap = + new PlatformInfoFinder(NULL_URL_PROVIDER).fetchDependenciesFromFile(inputStream); + + // then + assertThat(stringStringMap).hasSize(28); + assertThat(stringStringMap).isEqualTo(expected); + } + + @Test + public void should_find_dependencies_from_file_without_duplicate() { + // given + InputStream inputStream = + this.getClass().getResourceAsStream("/insights/duplicate-dependencies.txt"); + + // when + Map stringStringMap = + new PlatformInfoFinder(NULL_URL_PROVIDER).fetchDependenciesFromFile(inputStream); + + // then + assertThat(stringStringMap).hasSize(1); + } + + @Test + public void should_keep_order_of_dependencies() { + // given + InputStream inputStream = + this.getClass().getResourceAsStream("/insights/ordered-dependencies.txt"); + Map expected = new LinkedHashMap<>(); + expected.put("b-org.com:art1", withUnverifiedRuntimeVersion("1.0")); + expected.put("a-org.com:art1", withUnverifiedRuntimeVersion("2.0")); + expected.put("c-org.com:art1", withUnverifiedRuntimeVersion("3.0")); + + // when + Map stringStringMap = + new PlatformInfoFinder(NULL_URL_PROVIDER).fetchDependenciesFromFile(inputStream); + + // then + assertThat(stringStringMap).isEqualTo(expected); + Iterator iterator = expected.keySet().iterator(); + assertThat(iterator.next()).isEqualTo("b-org.com:art1"); + assertThat(iterator.next()).isEqualTo("a-org.com:art1"); + assertThat(iterator.next()).isEqualTo("c-org.com:art1"); + } + + @Test + public void should_add_information_about_java_platform() { + // given + Map> runtimeDependencies = new HashMap<>(); + + // when + new PlatformInfoFinder(NULL_URL_PROVIDER).addJavaVersion(runtimeDependencies); + + // then + Map javaDependencies = runtimeDependencies.get("java"); + assertThat(javaDependencies.size()).isEqualTo(3); + } + + @Test + public void should_load_runtime_version_from_pom_properties_URL() { + // given + InputStream inputStream = this.getClass().getResourceAsStream("/insights/netty-dependency.txt"); + Map expected = new LinkedHashMap<>(); + expected.put( + "io.netty:netty-handler", + new RuntimeAndCompileTimeVersions("4.0.56.Final", "4.0.0.Final", false)); + + // when + Map stringStringMap = + new PlatformInfoFinder(NETTY_URL_PROVIDER).fetchDependenciesFromFile(inputStream); + + // then + assertThat(stringStringMap).isEqualTo(expected); + } + + @Test + public void should_load_runtime_version_of_optional_dependency_from_pom_properties_URL() { + // given + InputStream inputStream = + this.getClass().getResourceAsStream("/insights/netty-dependency-optional.txt"); + Map expected = new LinkedHashMap<>(); + expected.put( + "io.netty:netty-handler", + new RuntimeAndCompileTimeVersions("4.0.56.Final", "4.0.0.Final", true)); + + // when + Map stringStringMap = + new PlatformInfoFinder(NETTY_URL_PROVIDER).fetchDependenciesFromFile(inputStream); + + // then + assertThat(stringStringMap).isEqualTo(expected); + } + + @Test + public void should_not_load_runtime_dependency_from_malformed_pom_properties() { + // given + InputStream inputStream = this.getClass().getResourceAsStream("/insights/netty-dependency.txt"); + Map expected = new LinkedHashMap<>(); + expected.put("io.netty:netty-handler", withUnverifiedRuntimeVersion("4.0.0.Final")); + + // when + Map stringStringMap = + new PlatformInfoFinder(MALFORMED_URL_PROVIDER).fetchDependenciesFromFile(inputStream); + + // then + assertThat(stringStringMap).isEqualTo(expected); + } + + @Test + public void should_not_load_runtime_dependency_from_non_existing_pom_properties() { + // given + InputStream inputStream = this.getClass().getResourceAsStream("/insights/netty-dependency.txt"); + Map expected = new LinkedHashMap<>(); + expected.put("io.netty:netty-handler", withUnverifiedRuntimeVersion("4.0.0.Final")); + + // when + Map stringStringMap = + new PlatformInfoFinder(NON_EXISTING_URL_PROVIDER).fetchDependenciesFromFile(inputStream); + + // then + assertThat(stringStringMap).isEqualTo(expected); + } + + private RuntimeAndCompileTimeVersions withUnverifiedRuntimeVersion(String compileVersion) { + return new RuntimeAndCompileTimeVersions(UNVERIFIED_RUNTIME_VERSION, compileVersion, false); + } + + private RuntimeAndCompileTimeVersions withUnverifiedRuntimeVersionOptional( + String compileVersion) { + return new RuntimeAndCompileTimeVersions(UNVERIFIED_RUNTIME_VERSION, compileVersion, true); + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/insights/ReconnectionPolicyInfoFinderTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/ReconnectionPolicyInfoFinderTest.java new file mode 100644 index 00000000000..8bed7d9e288 --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/ReconnectionPolicyInfoFinderTest.java @@ -0,0 +1,63 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ + +package com.datastax.dse.driver.internal.core.insights; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import com.datastax.dse.driver.internal.core.insights.schema.ReconnectionPolicyInfo; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.connection.ReconnectionPolicy; +import com.datastax.oss.driver.internal.core.connection.ConstantReconnectionPolicy; +import com.datastax.oss.driver.internal.core.connection.ExponentialReconnectionPolicy; +import java.time.Duration; +import org.assertj.core.data.MapEntry; +import org.junit.Test; + +public class ReconnectionPolicyInfoFinderTest { + + @Test + public void should_find_an_info_about_constant_reconnection_policy() { + // given + DriverExecutionProfile driverExecutionProfile = mock(DriverExecutionProfile.class); + when(driverExecutionProfile.getDuration(DefaultDriverOption.RECONNECTION_BASE_DELAY)) + .thenReturn(Duration.ofMillis(100)); + ReconnectionPolicy constantReconnectionPolicy = mock(ConstantReconnectionPolicy.class); + + // when + ReconnectionPolicyInfo reconnectionPolicyInfo = + new ReconnectionPolicyInfoFinder() + .getReconnectionPolicyInfo(constantReconnectionPolicy, driverExecutionProfile); + + // then + assertThat(reconnectionPolicyInfo.getOptions()).contains(MapEntry.entry("delayMs", 100L)); + assertThat(reconnectionPolicyInfo.getType()).contains("ConstantReconnectionPolicy"); + } + + @Test + public void should_find_an_info_about_exponential_reconnection_policy() { + ExponentialReconnectionPolicy exponentialReconnectionPolicy = + mock(ExponentialReconnectionPolicy.class); + when(exponentialReconnectionPolicy.getBaseDelayMs()).thenReturn(100L); + when(exponentialReconnectionPolicy.getMaxAttempts()).thenReturn(10L); + when(exponentialReconnectionPolicy.getMaxDelayMs()).thenReturn(200L); + + // when + ReconnectionPolicyInfo reconnectionPolicyInfo = + new ReconnectionPolicyInfoFinder() + .getReconnectionPolicyInfo(exponentialReconnectionPolicy, null); + + // then + assertThat(reconnectionPolicyInfo.getOptions()).contains(MapEntry.entry("baseDelayMs", 100L)); + assertThat(reconnectionPolicyInfo.getOptions()).contains(MapEntry.entry("maxAttempts", 10L)); + assertThat(reconnectionPolicyInfo.getOptions()).contains(MapEntry.entry("maxDelayMs", 200L)); + assertThat(reconnectionPolicyInfo.getType()).contains("ExponentialReconnectionPolicy"); + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyEventsTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyEventsTest.java new file mode 100644 index 00000000000..e8063c219fb --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyEventsTest.java @@ -0,0 +1,146 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.loadbalancing; + +import static com.datastax.oss.driver.api.core.config.DriverExecutionProfile.DEFAULT_NAME; +import static com.datastax.oss.driver.api.core.loadbalancing.NodeDistance.IGNORED; +import static com.datastax.oss.driver.api.core.loadbalancing.NodeDistance.LOCAL; +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.BDDMockito.given; +import static org.mockito.BDDMockito.then; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.reset; + +import com.datastax.oss.driver.api.core.loadbalancing.NodeDistance; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; +import java.util.UUID; +import org.junit.Test; + +public class DseLoadBalancingPolicyEventsTest extends DseLoadBalancingPolicyTestBase { + + @Test + public void should_remove_down_node_from_live_set() { + // Given + DseLoadBalancingPolicy policy = createAndInitPolicy(); + + // When + policy.onDown(node2); + + // Then + then(distanceReporter).should(never()).setDistance(eq(node2), any(NodeDistance.class)); + assertThat(policy.localDcLiveNodes).containsOnly(node1); + } + + @Test + public void should_remove_removed_node_from_live_set() { + // Given + DseLoadBalancingPolicy policy = createAndInitPolicy(); + + // When + policy.onRemove(node2); + + // Then + then(distanceReporter).should(never()).setDistance(eq(node2), any(NodeDistance.class)); + assertThat(policy.localDcLiveNodes).containsOnly(node1); + } + + @Test + public void should_set_added_node_to_local() { + // Given + DseLoadBalancingPolicy policy = createAndInitPolicy(); + + // When + policy.onAdd(node3); + + // Then + // Not added to the live set yet, we're waiting for the pool to open + then(distanceReporter).should().setDistance(node3, LOCAL); + assertThat(policy.localDcLiveNodes).containsOnly(node1, node2); + } + + @Test + public void should_ignore_added_node_when_filtered() { + // Given + given(filter.test(node3)).willReturn(false); + DseLoadBalancingPolicy policy = createAndInitPolicy(); + + // When + policy.onAdd(node3); + + // Then + then(distanceReporter).should().setDistance(node3, IGNORED); + assertThat(policy.localDcLiveNodes).containsOnly(node1, node2); + } + + @Test + public void should_ignore_added_node_when_remote_dc() { + // Given + given(node3.getDatacenter()).willReturn("dc2"); + DseLoadBalancingPolicy policy = createAndInitPolicy(); + + // When + policy.onAdd(node3); + + // Then + then(distanceReporter).should().setDistance(node3, IGNORED); + assertThat(policy.localDcLiveNodes).containsOnly(node1, node2); + } + + @Test + public void should_add_up_node_to_live_set() { + // Given + DseLoadBalancingPolicy policy = createAndInitPolicy(); + + // When + policy.onUp(node3); + + // Then + then(distanceReporter).should().setDistance(node3, LOCAL); + assertThat(policy.localDcLiveNodes).containsOnly(node1, node2, node3); + } + + @Test + public void should_ignore_up_node_when_filtered() { + // Given + given(filter.test(node3)).willReturn(false); + DseLoadBalancingPolicy policy = createAndInitPolicy(); + + // When + policy.onUp(node3); + + // Then + then(distanceReporter).should().setDistance(node3, IGNORED); + assertThat(policy.localDcLiveNodes).containsOnly(node1, node2); + } + + @Test + public void should_ignore_up_node_when_remote_dc() { + // Given + given(node3.getDatacenter()).willReturn("dc2"); + DseLoadBalancingPolicy policy = createAndInitPolicy(); + + // When + policy.onUp(node3); + + // Then + then(distanceReporter).should().setDistance(node3, IGNORED); + assertThat(policy.localDcLiveNodes).containsOnly(node1, node2); + } + + private DseLoadBalancingPolicy createAndInitPolicy() { + given(metadataManager.getContactPoints()).willReturn(ImmutableSet.of(node1)); + DseLoadBalancingPolicy policy = new DseLoadBalancingPolicy(context, DEFAULT_NAME); + policy.init( + ImmutableMap.of(UUID.randomUUID(), node1, UUID.randomUUID(), node2), distanceReporter); + assertThat(policy.localDcLiveNodes).containsOnly(node1, node2); + reset(distanceReporter); + return policy; + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyInitTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyInitTest.java new file mode 100644 index 00000000000..0c5babc6d85 --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyInitTest.java @@ -0,0 +1,246 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.loadbalancing; + +import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.LOAD_BALANCING_FILTER_CLASS; +import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER; +import static com.datastax.oss.driver.api.core.config.DriverExecutionProfile.DEFAULT_NAME; +import static com.datastax.oss.driver.api.core.loadbalancing.NodeDistance.IGNORED; +import static com.datastax.oss.driver.api.core.loadbalancing.NodeDistance.LOCAL; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.catchThrowable; +import static org.assertj.core.api.Assertions.filter; +import static org.mockito.BDDMockito.given; +import static org.mockito.BDDMockito.then; +import static org.mockito.BDDMockito.verify; +import static org.mockito.BDDMockito.when; +import static org.mockito.Mockito.atLeast; +import static org.mockito.Mockito.never; + +import ch.qos.logback.classic.Level; +import ch.qos.logback.classic.spi.ILoggingEvent; +import com.datastax.oss.driver.api.core.context.DriverContext; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.metadata.NodeState; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; +import java.util.UUID; +import java.util.function.Predicate; +import org.junit.Test; + +public class DseLoadBalancingPolicyInitTest extends DseLoadBalancingPolicyTestBase { + + @Test + public void should_infer_local_dc_if_no_explicit_contact_points() { + // Given + given(profile.getString(LOAD_BALANCING_LOCAL_DATACENTER, null)).willReturn(null); + given(metadataManager.getContactPoints()).willReturn(ImmutableSet.of(node1)); + given(metadataManager.wasImplicitContactPoint()).willReturn(true); + DseLoadBalancingPolicy policy = new DseLoadBalancingPolicy(context, DEFAULT_NAME); + + // When + policy.init(ImmutableMap.of(UUID.randomUUID(), node1), distanceReporter); + + // Then + assertThat(policy.localDc).isEqualTo("dc1"); + } + + @Test + public void should_require_local_dc_if_explicit_contact_points() { + // Given + given(profile.getString(LOAD_BALANCING_LOCAL_DATACENTER, null)).willReturn(null); + given(metadataManager.getContactPoints()).willReturn(ImmutableSet.of(node2)); + given(metadataManager.wasImplicitContactPoint()).willReturn(false); + DseLoadBalancingPolicy policy = new DseLoadBalancingPolicy(context, DEFAULT_NAME); + + // When + Throwable error = + catchThrowable( + () -> policy.init(ImmutableMap.of(UUID.randomUUID(), node2), distanceReporter)); + + // Then + assertThat(error) + .isInstanceOf(IllegalStateException.class) + .hasMessageContaining( + "You provided explicit contact points, the local DC must be specified"); + } + + @Test + public void should_warn_if_contact_points_not_in_local_dc() { + // Given + given(node2.getDatacenter()).willReturn("dc2"); + given(node3.getDatacenter()).willReturn("dc3"); + given(metadataManager.getContactPoints()).willReturn(ImmutableSet.of(node1, node2, node3)); + DseLoadBalancingPolicy policy = new DseLoadBalancingPolicy(context, DEFAULT_NAME); + + // When + policy.init( + ImmutableMap.of( + UUID.randomUUID(), node1, UUID.randomUUID(), node2, UUID.randomUUID(), node3), + distanceReporter); + + // Then + then(appender).should(atLeast(1)).doAppend(loggingEventCaptor.capture()); + Iterable warnLogs = + filter(loggingEventCaptor.getAllValues()).with("level", Level.WARN).get(); + assertThat(warnLogs).hasSize(1); + assertThat(warnLogs.iterator().next().getFormattedMessage()) + .contains( + "You specified dc1 as the local DC, but some contact points are from a different DC") + .contains("node2=dc2") + .contains("node3=dc3"); + } + + @Test + public void should_not_warn_if_contact_points_not_in_local_dc_and_profile_not_default() { + // Given + given(node2.getDatacenter()).willReturn("dc2"); + given(node3.getDatacenter()).willReturn("dc3"); + given(metadataManager.getContactPoints()).willReturn(ImmutableSet.of(node1, node2, node3)); + given(config.getProfile("Non default")).willReturn(profile); + DseLoadBalancingPolicy policy = new DseLoadBalancingPolicy(context, "Non default"); + + // When + policy.init( + ImmutableMap.of( + UUID.randomUUID(), node1, UUID.randomUUID(), node2, UUID.randomUUID(), node3), + distanceReporter); + + // Then + then(appender).should(never()).doAppend(loggingEventCaptor.capture()); + Iterable warnLogs = + filter(loggingEventCaptor.getAllValues()).with("level", Level.WARN).get(); + assertThat(warnLogs).isEmpty(); + } + + @Test + public void should_include_nodes_from_local_dc() { + // Given + // make node3 not a contact point to cover all cases + given(metadataManager.getContactPoints()).willReturn(ImmutableSet.of(node1, node2)); + DseLoadBalancingPolicy policy = new DseLoadBalancingPolicy(context, DEFAULT_NAME); + given(node1.getState()).willReturn(NodeState.UP); + given(node2.getState()).willReturn(NodeState.DOWN); + given(node3.getState()).willReturn(NodeState.UNKNOWN); + + // When + policy.init( + ImmutableMap.of( + UUID.randomUUID(), node1, UUID.randomUUID(), node2, UUID.randomUUID(), node3), + distanceReporter); + + // Then + // Set distance for all nodes in the local DC + then(distanceReporter).should().setDistance(node1, LOCAL); + then(distanceReporter).should().setDistance(node2, LOCAL); + then(distanceReporter).should().setDistance(node3, LOCAL); + // But only include UP or UNKNOWN nodes in the live set + assertThat(policy.localDcLiveNodes).containsExactly(node1, node3); + } + + @Test + public void should_ignore_nodes_from_remote_dcs() { + // Given + given(node2.getDatacenter()).willReturn("dc2"); + given(node3.getDatacenter()).willReturn("dc3"); + // make node3 not a contact point to cover all cases + given(metadataManager.getContactPoints()).willReturn(ImmutableSet.of(node1, node2)); + DseLoadBalancingPolicy policy = new DseLoadBalancingPolicy(context, DEFAULT_NAME); + + // When + policy.init( + ImmutableMap.of( + UUID.randomUUID(), node1, UUID.randomUUID(), node2, UUID.randomUUID(), node3), + distanceReporter); + + // Then + then(distanceReporter).should().setDistance(node1, LOCAL); + then(distanceReporter).should().setDistance(node2, IGNORED); + then(distanceReporter).should().setDistance(node3, IGNORED); + assertThat(policy.localDcLiveNodes).containsExactly(node1); + } + + @Test + public void should_ignore_nodes_excluded_by_programmatic_filter() { + // Given + given(filter.test(node2)).willReturn(false); + given(filter.test(node3)).willReturn(false); + given(metadataManager.getContactPoints()).willReturn(ImmutableSet.of(node1)); + DseLoadBalancingPolicy policy = new DseLoadBalancingPolicy(context, DEFAULT_NAME); + + // When + policy.init( + ImmutableMap.of( + UUID.randomUUID(), node1, UUID.randomUUID(), node2, UUID.randomUUID(), node3), + distanceReporter); + + // Then + then(distanceReporter).should().setDistance(node1, LOCAL); + then(distanceReporter).should().setDistance(node2, IGNORED); + then(distanceReporter).should().setDistance(node3, IGNORED); + assertThat(policy.localDcLiveNodes).containsExactly(node1); + } + + @Test + public void should_ignore_nodes_excluded_by_configured_filter() { + // Given + given(context.getNodeFilter(DEFAULT_NAME)).willReturn(null); + given(metadataManager.getContactPoints()).willReturn(ImmutableSet.of(node1)); + given(profile.isDefined(LOAD_BALANCING_FILTER_CLASS)).willReturn(true); + given(profile.getString(LOAD_BALANCING_FILTER_CLASS)).willReturn(MyFilter.class.getName()); + DseLoadBalancingPolicy policy = new DseLoadBalancingPolicy(context, DEFAULT_NAME); + + // When + policy.init( + ImmutableMap.of( + UUID.randomUUID(), node1, UUID.randomUUID(), node2, UUID.randomUUID(), node3), + distanceReporter); + + // Then + then(distanceReporter).should().setDistance(node1, LOCAL); + then(distanceReporter).should().setDistance(node2, IGNORED); + then(distanceReporter).should().setDistance(node3, IGNORED); + assertThat(policy.localDcLiveNodes).containsExactly(node1); + } + + @Test + public void should_use_local_dc_if_provided_via_config() { + // Given + // the parent class sets the config option to "dc1" + + // When + DseLoadBalancingPolicy policy = new DseLoadBalancingPolicy(context, DEFAULT_NAME); + + // Then + assertThat(policy.localDc).isEqualTo("dc1"); + } + + @Test + public void should_use_local_dc_if_provided_via_context() { + // Given + when(context.getLocalDatacenter(DEFAULT_NAME)).thenReturn("dc1"); + // note: programmatic takes priority, the config won't even be inspected so no need to stub the + // option to null + + // When + DseLoadBalancingPolicy policy = new DseLoadBalancingPolicy(context, DEFAULT_NAME); + + // Then + assertThat(policy.localDc).isEqualTo("dc1"); + verify(profile, never()).getString(LOAD_BALANCING_LOCAL_DATACENTER, null); + } + + public static class MyFilter implements Predicate { + @SuppressWarnings("unused") + public MyFilter(DriverContext context, String profileName) {} + + @Override + public boolean test(Node node) { + return node.toString().equals("node1"); + } + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyQueryPlanTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyQueryPlanTest.java new file mode 100644 index 00000000000..c5d2b6ae1f6 --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyQueryPlanTest.java @@ -0,0 +1,502 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.loadbalancing; + +import static com.datastax.oss.driver.api.core.config.DriverExecutionProfile.DEFAULT_NAME; +import static java.util.Collections.emptySet; +import static java.util.stream.Collectors.toList; +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyInt; +import static org.mockito.BDDMockito.given; +import static org.mockito.BDDMockito.then; +import static org.mockito.Mockito.atLeast; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.times; + +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.metadata.Metadata; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.metadata.TokenMap; +import com.datastax.oss.driver.api.core.metadata.token.Token; +import com.datastax.oss.driver.internal.core.pool.ChannelPool; +import com.datastax.oss.driver.internal.core.session.DefaultSession; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; +import com.datastax.oss.protocol.internal.util.Bytes; +import java.nio.ByteBuffer; +import java.util.List; +import java.util.Optional; +import java.util.Queue; +import java.util.UUID; +import java.util.concurrent.atomic.AtomicLongArray; +import java.util.stream.IntStream; +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mock; + +public class DseLoadBalancingPolicyQueryPlanTest extends DseLoadBalancingPolicyTestBase { + + private static final CqlIdentifier KEYSPACE = CqlIdentifier.fromInternal("ks"); + private static final ByteBuffer ROUTING_KEY = Bytes.fromHexString("0xdeadbeef"); + + private static final long T0 = Long.MIN_VALUE; + private static final long T1 = 100; + private static final long T2 = 200; + private static final long T3 = 300; + + @Mock private Node node4; + @Mock private Node node5; + @Mock private ChannelPool pool1; + @Mock private ChannelPool pool2; + @Mock private ChannelPool pool3; + @Mock private ChannelPool pool4; + @Mock private ChannelPool pool5; + @Mock private DefaultSession session; + @Mock private Metadata metadata; + @Mock private TokenMap tokenMap; + @Mock private Token routingToken; + + private DseLoadBalancingPolicy policy; + private long nanoTime; + private int diceRoll; + + @Before + @Override + public void setUp() { + super.setUp(); + nanoTime = T1; + diceRoll = 4; + given(node4.getDatacenter()).willReturn("dc1"); + given(node5.getDatacenter()).willReturn("dc1"); + given(session.getPools()) + .willReturn( + ImmutableMap.of( + node1, pool1, + node2, pool2, + node3, pool3, + node4, pool4, + node5, pool5)); + given(context.getMetadataManager()).willReturn(metadataManager); + given(metadataManager.getMetadata()).willReturn(metadata); + given(metadataManager.getContactPoints()).willReturn(ImmutableSet.of(node1)); + given(metadata.getTokenMap()).willAnswer(invocation -> Optional.of(tokenMap)); + policy = + spy( + new DseLoadBalancingPolicy(context, DEFAULT_NAME) { + @Override + void shuffleHead(Object[] array, int n) {} + + @Override + long nanoTime() { + return nanoTime; + } + + @Override + int diceRoll1d4() { + return diceRoll; + } + }); + policy.init( + ImmutableMap.of( + UUID.randomUUID(), node1, + UUID.randomUUID(), node2, + UUID.randomUUID(), node3, + UUID.randomUUID(), node4, + UUID.randomUUID(), node5), + distanceReporter); + + // Note: tests in this class rely on the fact that the policy uses a CopyOnWriteArraySet which + // preserves insertion order, which is why we can use containsExactly() throughout this class. + assertThat(policy.localDcLiveNodes).containsExactly(node1, node2, node3, node4, node5); + } + + @Test + public void should_use_round_robin_when_no_request() { + // Given + request = null; + // When + List> plans = generateQueryPlans(); + // Then + thenAssertRoundRobinQueryPlans(plans); + then(metadataManager).should(never()).getMetadata(); + } + + @Test + public void should_use_round_robin_when_no_session() { + // Given + session = null; + // When + List> plans = generateQueryPlans(); + // Then + thenAssertRoundRobinQueryPlans(plans); + then(request).should(never()).getRoutingKey(); + then(request).should(never()).getRoutingToken(); + then(metadataManager).should(never()).getMetadata(); + } + + @Test + public void should_use_round_robin_when_request_has_no_routing_keyspace() { + // Given + given(request.getKeyspace()).willReturn(null); + given(request.getRoutingKeyspace()).willReturn(null); + given(session.getKeyspace()).willReturn(Optional.empty()); + // When + List> plans = generateQueryPlans(); + // Then + thenAssertRoundRobinQueryPlans(plans); + then(request).should(never()).getRoutingKey(); + then(request).should(never()).getRoutingToken(); + then(metadataManager).should(never()).getMetadata(); + } + + @Test + public void should_use_round_robin_when_request_has_no_routing_key_or_token() { + // Given + given(request.getKeyspace()).willReturn(null); + given(request.getRoutingKeyspace()).willReturn(KEYSPACE); + given(request.getRoutingKey()).willReturn(null); + given(request.getRoutingToken()).willReturn(null); + // When + List> plans = generateQueryPlans(); + // Then + thenAssertRoundRobinQueryPlans(plans); + then(metadataManager).should(never()).getMetadata(); + } + + @Test + public void should_use_round_robin_when_token_map_absent() { + // Given + given(request.getKeyspace()).willReturn(null); + given(request.getRoutingKeyspace()).willReturn(KEYSPACE); + given(request.getRoutingKey()).willReturn(ROUTING_KEY); + given(metadata.getTokenMap()).willReturn(Optional.empty()); + // When + List> plans = generateQueryPlans(); + // Then + thenAssertRoundRobinQueryPlans(plans); + then(metadata).should(atLeast(1)).getTokenMap(); + } + + @Test + public void + should_use_round_robin_when_token_map_returns_no_replicas_using_request_keyspace_and_routing_token() { + // Given + given(request.getKeyspace()).willReturn(null); + given(request.getRoutingKeyspace()).willReturn(KEYSPACE); + given(request.getRoutingToken()).willReturn(routingToken); + given(tokenMap.getReplicas(KEYSPACE, routingToken)).willReturn(emptySet()); + // When + List> plans = generateQueryPlans(); + // Then + thenAssertRoundRobinQueryPlans(plans); + then(tokenMap).should(atLeast(1)).getReplicas(KEYSPACE, routingToken); + } + + @Test + public void + should_use_round_robin_when_token_map_returns_no_replicas_using_session_keyspace_and_routing_key() { + // Given + given(request.getKeyspace()).willReturn(null); + given(request.getRoutingKeyspace()).willReturn(null); + given(session.getKeyspace()).willReturn(Optional.of(KEYSPACE)); + given(request.getRoutingKey()).willReturn(ROUTING_KEY); + given(tokenMap.getReplicas(KEYSPACE, ROUTING_KEY)).willReturn(emptySet()); + // When + List> plans = generateQueryPlans(); + // Then + thenAssertRoundRobinQueryPlans(plans); + then(tokenMap).should(atLeast(1)).getReplicas(KEYSPACE, ROUTING_KEY); + } + + @Test + public void should_prioritize_single_replica() { + // Given + given(request.getRoutingKeyspace()).willReturn(KEYSPACE); + given(request.getRoutingKey()).willReturn(ROUTING_KEY); + given(tokenMap.getReplicas(KEYSPACE, ROUTING_KEY)).willReturn(ImmutableSet.of(node3)); + + // When + Queue plan1 = policy.newQueryPlan(request, session); + Queue plan2 = policy.newQueryPlan(request, session); + Queue plan3 = policy.newQueryPlan(request, session); + Queue plan4 = policy.newQueryPlan(request, session); + + // Then + // node3 always first, round-robin on the rest + assertThat(plan1).containsExactly(node3, node1, node2, node4, node5); + assertThat(plan2).containsExactly(node3, node2, node4, node5, node1); + assertThat(plan3).containsExactly(node3, node4, node5, node1, node2); + assertThat(plan4).containsExactly(node3, node5, node1, node2, node4); + + then(policy).should(never()).shuffleHead(any(), anyInt()); + then(policy).should(never()).nanoTime(); + then(policy).should(never()).diceRoll1d4(); + } + + @Test + public void should_prioritize_and_shuffle_2_replicas() { + // Given + given(request.getRoutingKeyspace()).willReturn(KEYSPACE); + given(request.getRoutingKey()).willReturn(ROUTING_KEY); + given(tokenMap.getReplicas(KEYSPACE, ROUTING_KEY)).willReturn(ImmutableSet.of(node3, node5)); + + // When + Queue plan1 = policy.newQueryPlan(request, session); + Queue plan2 = policy.newQueryPlan(request, session); + Queue plan3 = policy.newQueryPlan(request, session); + + // Then + // node3 and node5 always first, round-robin on the rest + assertThat(plan1).containsExactly(node3, node5, node1, node2, node4); + assertThat(plan2).containsExactly(node3, node5, node2, node4, node1); + assertThat(plan3).containsExactly(node3, node5, node4, node1, node2); + + then(policy).should(times(3)).shuffleHead(any(), anyInt()); + then(policy).should(never()).nanoTime(); + then(policy).should(never()).diceRoll1d4(); + } + + @Test + public void should_prioritize_and_shuffle_3_or_more_replicas_when_all_healthy_and_all_newly_up() { + // Given + given(request.getRoutingKeyspace()).willReturn(KEYSPACE); + given(request.getRoutingKey()).willReturn(ROUTING_KEY); + given(tokenMap.getReplicas(KEYSPACE, ROUTING_KEY)) + .willReturn(ImmutableSet.of(node1, node3, node5)); + policy.upTimes.put(node1, T1); + policy.upTimes.put(node3, T2); + policy.upTimes.put(node5, T3); // newest up replica + given(pool1.getInFlight()).willReturn(0); + given(pool3.getInFlight()).willReturn(0); + + // When + Queue plan1 = policy.newQueryPlan(request, session); + Queue plan2 = policy.newQueryPlan(request, session); + + // Then + // nodes 1, 3 and 5 always first, round-robin on the rest + // newest up replica is 5, not in first or second position + assertThat(plan1).containsExactly(node1, node3, node5, node2, node4); + assertThat(plan2).containsExactly(node1, node3, node5, node4, node2); + + then(policy).should(times(2)).shuffleHead(any(), anyInt()); + then(policy).should(times(2)).nanoTime(); + then(policy).should(never()).diceRoll1d4(); + } + + @Test + public void + should_prioritize_and_shuffle_3_or_more_replicas_when_all_healthy_and_some_newly_up_and_dice_roll_4() { + // Given + given(request.getRoutingKeyspace()).willReturn(KEYSPACE); + given(request.getRoutingKey()).willReturn(ROUTING_KEY); + given(tokenMap.getReplicas(KEYSPACE, ROUTING_KEY)) + .willReturn(ImmutableSet.of(node1, node3, node5)); + policy.upTimes.put(node1, T2); // newest up replica + policy.upTimes.put(node3, T1); + given(pool3.getInFlight()).willReturn(0); + given(pool5.getInFlight()).willReturn(0); + + // When + Queue plan1 = policy.newQueryPlan(request, session); + Queue plan2 = policy.newQueryPlan(request, session); + + // Then + // nodes 1, 3 and 5 always first, round-robin on the rest + // newest up replica is node1 in first position and diceRoll = 4 -> bubbles down + assertThat(plan1).containsExactly(node3, node5, node1, node2, node4); + assertThat(plan2).containsExactly(node3, node5, node1, node4, node2); + + then(policy).should(times(2)).shuffleHead(any(), anyInt()); + then(policy).should(times(2)).nanoTime(); + then(policy).should(times(2)).diceRoll1d4(); + } + + @Test + public void + should_prioritize_and_shuffle_3_or_more_replicas_when_all_healthy_and_some_newly_up_and_dice_roll_1() { + // Given + given(request.getRoutingKeyspace()).willReturn(KEYSPACE); + given(request.getRoutingKey()).willReturn(ROUTING_KEY); + given(tokenMap.getReplicas(KEYSPACE, ROUTING_KEY)) + .willReturn(ImmutableSet.of(node1, node3, node5)); + policy.upTimes.put(node1, T2); // newest up replica + policy.upTimes.put(node3, T1); + given(pool1.getInFlight()).willReturn(0); + given(pool3.getInFlight()).willReturn(0); + diceRoll = 1; + + // When + Queue plan1 = policy.newQueryPlan(request, session); + Queue plan2 = policy.newQueryPlan(request, session); + + // Then + // nodes 1, 3 and 5 always first, round-robin on the rest + // newest up replica is node1 in first position and diceRoll = 1 -> does not bubble down + assertThat(plan1).containsExactly(node1, node3, node5, node2, node4); + assertThat(plan2).containsExactly(node1, node3, node5, node4, node2); + + then(policy).should(times(2)).shuffleHead(any(), anyInt()); + then(policy).should(times(2)).nanoTime(); + then(policy).should(times(2)).diceRoll1d4(); + } + + @Test + public void should_prioritize_and_shuffle_3_or_more_replicas_when_first_unhealthy() { + // Given + given(request.getRoutingKeyspace()).willReturn(KEYSPACE); + given(request.getRoutingKey()).willReturn(ROUTING_KEY); + given(tokenMap.getReplicas(KEYSPACE, ROUTING_KEY)) + .willReturn(ImmutableSet.of(node1, node3, node5)); + given(pool1.getInFlight()).willReturn(100); // unhealthy + given(pool3.getInFlight()).willReturn(0); + given(pool5.getInFlight()).willReturn(0); + + policy.responseTimes.put(node1, new AtomicLongArray(new long[] {T0, T0})); // unhealthy + + // When + Queue plan1 = policy.newQueryPlan(request, session); + Queue plan2 = policy.newQueryPlan(request, session); + + // Then + // nodes 1, 3 and 5 always first, round-robin on the rest + // node1 is unhealthy = 1 -> bubbles down + assertThat(plan1).containsExactly(node3, node5, node1, node2, node4); + assertThat(plan2).containsExactly(node3, node5, node1, node4, node2); + + then(policy).should(times(2)).shuffleHead(any(), anyInt()); + then(policy).should(times(2)).nanoTime(); + then(policy).should(never()).diceRoll1d4(); + } + + @Test + public void + should_not_treat_node_as_unhealthy_if_has_in_flight_exceeded_but_response_times_normal() { + // Given + given(request.getRoutingKeyspace()).willReturn(KEYSPACE); + given(request.getRoutingKey()).willReturn(ROUTING_KEY); + given(tokenMap.getReplicas(KEYSPACE, ROUTING_KEY)) + .willReturn(ImmutableSet.of(node1, node3, node5)); + given(pool1.getInFlight()).willReturn(100); // unhealthy + given(pool3.getInFlight()).willReturn(0); + given(pool5.getInFlight()).willReturn(0); + + policy.responseTimes.put(node1, new AtomicLongArray(new long[] {T1, T1})); // healthy + + // When + Queue plan1 = policy.newQueryPlan(request, session); + Queue plan2 = policy.newQueryPlan(request, session); + + // Then + // nodes 1, 3 and 5 always first, round-robin on the rest + // node1 has more in-flight than node3 -> swap + assertThat(plan1).containsExactly(node3, node1, node5, node2, node4); + assertThat(plan2).containsExactly(node3, node1, node5, node4, node2); + + then(policy).should(times(2)).shuffleHead(any(), anyInt()); + then(policy).should(times(2)).nanoTime(); + then(policy).should(never()).diceRoll1d4(); + } + + @Test + public void should_prioritize_and_shuffle_3_or_more_replicas_when_last_unhealthy() { + // Given + given(request.getRoutingKeyspace()).willReturn(KEYSPACE); + given(request.getRoutingKey()).willReturn(ROUTING_KEY); + given(tokenMap.getReplicas(KEYSPACE, ROUTING_KEY)) + .willReturn(ImmutableSet.of(node1, node3, node5)); + given(pool1.getInFlight()).willReturn(0); + given(pool3.getInFlight()).willReturn(0); + given(pool5.getInFlight()).willReturn(100); // unhealthy + + // When + Queue plan1 = policy.newQueryPlan(request, session); + Queue plan2 = policy.newQueryPlan(request, session); + + // Then + // nodes 1, 3 and 5 always first, round-robin on the rest + // node5 is unhealthy -> noop + assertThat(plan1).containsExactly(node1, node3, node5, node2, node4); + assertThat(plan2).containsExactly(node1, node3, node5, node4, node2); + + then(policy).should(times(2)).shuffleHead(any(), anyInt()); + then(policy).should(times(2)).nanoTime(); + then(policy).should(never()).diceRoll1d4(); + } + + @Test + public void should_prioritize_and_shuffle_3_or_more_replicas_when_majority_unhealthy() { + // Given + given(request.getRoutingKeyspace()).willReturn(KEYSPACE); + given(request.getRoutingKey()).willReturn(ROUTING_KEY); + given(tokenMap.getReplicas(KEYSPACE, ROUTING_KEY)) + .willReturn(ImmutableSet.of(node1, node3, node5)); + given(pool1.getInFlight()).willReturn(100); + given(pool3.getInFlight()).willReturn(100); + given(pool5.getInFlight()).willReturn(0); + + // When + Queue plan1 = policy.newQueryPlan(request, session); + Queue plan2 = policy.newQueryPlan(request, session); + + // Then + // nodes 1, 3 and 5 always first, round-robin on the rest + // majority of nodes unhealthy -> noop + assertThat(plan1).containsExactly(node1, node3, node5, node2, node4); + assertThat(plan2).containsExactly(node1, node3, node5, node4, node2); + + then(policy).should(times(2)).shuffleHead(any(), anyInt()); + then(policy).should(times(2)).nanoTime(); + then(policy).should(never()).diceRoll1d4(); + } + + @Test + public void should_reorder_first_two_replicas_when_first_has_more_in_flight_than_second() { + // Given + given(request.getRoutingKeyspace()).willReturn(KEYSPACE); + given(request.getRoutingKey()).willReturn(ROUTING_KEY); + given(tokenMap.getReplicas(KEYSPACE, ROUTING_KEY)) + .willReturn(ImmutableSet.of(node1, node3, node5)); + given(pool1.getInFlight()).willReturn(200); + given(pool3.getInFlight()).willReturn(100); + + // When + Queue plan1 = policy.newQueryPlan(request, session); + Queue plan2 = policy.newQueryPlan(request, session); + + // Then + // nodes 1, 3 and 5 always first, round-robin on the rest + // node1 has more in-flight than node3 -> swap + assertThat(plan1).containsExactly(node3, node1, node5, node2, node4); + assertThat(plan2).containsExactly(node3, node1, node5, node4, node2); + + then(policy).should(times(2)).shuffleHead(any(), anyInt()); + then(policy).should(times(2)).nanoTime(); + then(policy).should(never()).diceRoll1d4(); + } + + private List> generateQueryPlans() { + return IntStream.range(0, 10) + .mapToObj(i -> policy.newQueryPlan(request, session)) + .collect(toList()); + } + + private void thenAssertRoundRobinQueryPlans(List> plans) { + assertThat(plans.get(0)).containsExactly(node1, node2, node3, node4, node5); + assertThat(plans.get(1)).containsExactly(node2, node3, node4, node5, node1); + assertThat(plans.get(2)).containsExactly(node3, node4, node5, node1, node2); + assertThat(plans.get(3)).containsExactly(node4, node5, node1, node2, node3); + assertThat(plans.get(4)).containsExactly(node5, node1, node2, node3, node4); + assertThat(plans.get(5)).containsExactly(node1, node2, node3, node4, node5); + assertThat(plans.get(6)).containsExactly(node2, node3, node4, node5, node1); + assertThat(plans.get(7)).containsExactly(node3, node4, node5, node1, node2); + assertThat(plans.get(8)).containsExactly(node4, node5, node1, node2, node3); + assertThat(plans.get(9)).containsExactly(node5, node1, node2, node3, node4); + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyRequestTrackerTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyRequestTrackerTest.java new file mode 100644 index 00000000000..57073f72198 --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyRequestTrackerTest.java @@ -0,0 +1,180 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.loadbalancing; + +import static com.datastax.oss.driver.api.core.config.DriverExecutionProfile.DEFAULT_NAME; +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.BDDMockito.given; + +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; +import java.util.UUID; +import org.junit.Before; +import org.junit.Test; + +public class DseLoadBalancingPolicyRequestTrackerTest extends DseLoadBalancingPolicyTestBase { + + private DseLoadBalancingPolicy policy; + private long nextNanoTime; + + @Before + @Override + public void setUp() { + super.setUp(); + given(metadataManager.getContactPoints()).willReturn(ImmutableSet.of(node1)); + policy = + new DseLoadBalancingPolicy(context, DEFAULT_NAME) { + @Override + long nanoTime() { + return nextNanoTime; + } + }; + policy.init( + ImmutableMap.of( + UUID.randomUUID(), node1, + UUID.randomUUID(), node2, + UUID.randomUUID(), node3), + distanceReporter); + } + + @Test + public void should_record_first_response_time_on_node_success() { + // Given + nextNanoTime = 123; + + // When + policy.onNodeSuccess(request, 0, profile, node1, logPrefix); + + // Then + assertThat(policy.responseTimes) + .hasEntrySatisfying(node1, value -> assertThat(value.get(0)).isEqualTo(123L)) + .doesNotContainKeys(node2, node3); + assertThat(policy.isResponseRateInsufficient(node1, nextNanoTime)).isTrue(); + assertThat(policy.isResponseRateInsufficient(node2, nextNanoTime)).isTrue(); + assertThat(policy.isResponseRateInsufficient(node3, nextNanoTime)).isTrue(); + } + + @Test + public void should_record_second_response_time_on_node_success() { + // Given + should_record_first_response_time_on_node_success(); + nextNanoTime = 456; + + // When + policy.onNodeSuccess(request, 0, profile, node1, logPrefix); + + // Then + assertThat(policy.responseTimes) + .hasEntrySatisfying( + node1, + value -> { + // oldest value first + assertThat(value.get(0)).isEqualTo(123); + assertThat(value.get(1)).isEqualTo(456); + }) + .doesNotContainKeys(node2, node3); + assertThat(policy.isResponseRateInsufficient(node1, nextNanoTime)).isFalse(); + assertThat(policy.isResponseRateInsufficient(node2, nextNanoTime)).isTrue(); + assertThat(policy.isResponseRateInsufficient(node3, nextNanoTime)).isTrue(); + } + + @Test + public void should_record_further_response_times_on_node_success() { + // Given + should_record_second_response_time_on_node_success(); + nextNanoTime = 789; + + // When + policy.onNodeSuccess(request, 0, profile, node1, logPrefix); + policy.onNodeSuccess(request, 0, profile, node2, logPrefix); + + // Then + assertThat(policy.responseTimes) + .hasEntrySatisfying( + node1, + value -> { + // values should rotate left (bubble up) + assertThat(value.get(0)).isEqualTo(456); + assertThat(value.get(1)).isEqualTo(789); + }) + .hasEntrySatisfying(node2, value -> assertThat(value.get(0)).isEqualTo(789)) + .doesNotContainKey(node3); + assertThat(policy.isResponseRateInsufficient(node1, nextNanoTime)).isFalse(); + assertThat(policy.isResponseRateInsufficient(node2, nextNanoTime)).isTrue(); + assertThat(policy.isResponseRateInsufficient(node3, nextNanoTime)).isTrue(); + } + + @Test + public void should_record_first_response_time_on_node_error() { + // Given + nextNanoTime = 123; + Throwable iae = new IllegalArgumentException(); + + // When + policy.onNodeError(request, iae, 0, profile, node1, logPrefix); + + // Then + assertThat(policy.responseTimes) + .hasEntrySatisfying(node1, value -> assertThat(value.get(0)).isEqualTo(123L)) + .doesNotContainKeys(node2, node3); + assertThat(policy.isResponseRateInsufficient(node1, nextNanoTime)).isTrue(); + assertThat(policy.isResponseRateInsufficient(node2, nextNanoTime)).isTrue(); + assertThat(policy.isResponseRateInsufficient(node3, nextNanoTime)).isTrue(); + } + + @Test + public void should_record_second_response_time_on_node_error() { + // Given + should_record_first_response_time_on_node_error(); + nextNanoTime = 456; + Throwable iae = new IllegalArgumentException(); + + // When + policy.onNodeError(request, iae, 0, profile, node1, logPrefix); + + // Then + assertThat(policy.responseTimes) + .hasEntrySatisfying( + node1, + value -> { + // oldest value first + assertThat(value.get(0)).isEqualTo(123); + assertThat(value.get(1)).isEqualTo(456); + }) + .doesNotContainKeys(node2, node3); + assertThat(policy.isResponseRateInsufficient(node1, nextNanoTime)).isFalse(); + assertThat(policy.isResponseRateInsufficient(node2, nextNanoTime)).isTrue(); + assertThat(policy.isResponseRateInsufficient(node3, nextNanoTime)).isTrue(); + } + + @Test + public void should_record_further_response_times_on_node_error() { + // Given + should_record_second_response_time_on_node_error(); + nextNanoTime = 789; + Throwable iae = new IllegalArgumentException(); + + // When + policy.onNodeError(request, iae, 0, profile, node1, logPrefix); + policy.onNodeError(request, iae, 0, profile, node2, logPrefix); + + // Then + assertThat(policy.responseTimes) + .hasEntrySatisfying( + node1, + value -> { + // values should rotate left (bubble up) + assertThat(value.get(0)).isEqualTo(456); + assertThat(value.get(1)).isEqualTo(789); + }) + .hasEntrySatisfying(node2, value -> assertThat(value.get(0)).isEqualTo(789)) + .doesNotContainKey(node3); + assertThat(policy.isResponseRateInsufficient(node1, nextNanoTime)).isFalse(); + assertThat(policy.isResponseRateInsufficient(node2, nextNanoTime)).isTrue(); + assertThat(policy.isResponseRateInsufficient(node3, nextNanoTime)).isTrue(); + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyTestBase.java b/core/src/test/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyTestBase.java new file mode 100644 index 00000000000..d2049379477 --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyTestBase.java @@ -0,0 +1,76 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.loadbalancing; + +import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER; +import static com.datastax.oss.driver.api.core.config.DriverExecutionProfile.DEFAULT_NAME; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.BDDMockito.given; + +import ch.qos.logback.classic.Logger; +import ch.qos.logback.classic.spi.ILoggingEvent; +import ch.qos.logback.core.Appender; +import com.datastax.dse.driver.internal.core.tracker.MultiplexingRequestTracker; +import com.datastax.oss.driver.api.core.config.DriverConfig; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.loadbalancing.LoadBalancingPolicy; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.session.Request; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.metadata.DefaultNode; +import com.datastax.oss.driver.internal.core.metadata.MetadataManager; +import java.util.function.Predicate; +import org.junit.After; +import org.junit.Before; +import org.junit.runner.RunWith; +import org.mockito.ArgumentCaptor; +import org.mockito.Captor; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnitRunner; +import org.slf4j.LoggerFactory; + +@RunWith(MockitoJUnitRunner.class) +public abstract class DseLoadBalancingPolicyTestBase { + + @Mock DefaultNode node1; + @Mock DefaultNode node2; + @Mock DefaultNode node3; + @Mock InternalDriverContext context; + @Mock DriverConfig config; + @Mock DriverExecutionProfile profile; + @Mock Predicate filter; + @Mock LoadBalancingPolicy.DistanceReporter distanceReporter; + @Mock Appender appender; + @Mock Request request; + @Mock MetadataManager metadataManager; + final String logPrefix = "lbp-test-log-prefix"; + + @Captor ArgumentCaptor loggingEventCaptor; + + private Logger logger; + + @Before + public void setUp() { + logger = (Logger) LoggerFactory.getLogger(DseLoadBalancingPolicy.class); + logger.addAppender(appender); + given(node1.getDatacenter()).willReturn("dc1"); + given(node2.getDatacenter()).willReturn("dc1"); + given(node3.getDatacenter()).willReturn("dc1"); + given(filter.test(any(Node.class))).willReturn(true); + given(context.getNodeFilter(DEFAULT_NAME)).willReturn(filter); + given(context.getConfig()).willReturn(config); + given(config.getProfile(DEFAULT_NAME)).willReturn(profile); + given(profile.getString(LOAD_BALANCING_LOCAL_DATACENTER, null)).willReturn("dc1"); + given(context.getMetadataManager()).willReturn(metadataManager); + given(context.getRequestTracker()).willReturn(new MultiplexingRequestTracker()); + } + + @After + public void tearDown() { + logger.detachAppender(appender); + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/type/codec/geometry/GeometryCodecTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/type/codec/geometry/GeometryCodecTest.java new file mode 100644 index 00000000000..afb063182de --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/type/codec/geometry/GeometryCodecTest.java @@ -0,0 +1,28 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.type.codec.geometry; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.dse.driver.api.core.data.geometry.Geometry; + +public abstract class GeometryCodecTest> { + + private C codec; + + protected GeometryCodecTest(C codec) { + this.codec = codec; + } + + public void should_format(G input, String expected) { + assertThat(codec.format(input)).isEqualTo(expected); + } + + public void should_parse(String input, G expected) { + assertThat(codec.parse(input)).isEqualTo(expected); + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/type/codec/geometry/LineStringCodecTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/type/codec/geometry/LineStringCodecTest.java new file mode 100644 index 00000000000..ed3bf66e8c8 --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/type/codec/geometry/LineStringCodecTest.java @@ -0,0 +1,65 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.type.codec.geometry; + +import com.datastax.dse.driver.api.core.data.geometry.LineString; +import com.datastax.dse.driver.internal.core.data.geometry.DefaultLineString; +import com.datastax.dse.driver.internal.core.data.geometry.DefaultPoint; +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import org.junit.Test; +import org.junit.runner.RunWith; + +@RunWith(DataProviderRunner.class) +public class LineStringCodecTest extends GeometryCodecTest { + + private static DefaultLineString lineString = + new DefaultLineString( + new DefaultPoint(30, 10), new DefaultPoint(10, 30), new DefaultPoint(40, 40)); + + public LineStringCodecTest() { + super(new LineStringCodec()); + } + + @DataProvider + public static Object[][] serde() { + return new Object[][] {{null, null}, {lineString, lineString}}; + } + + @DataProvider + public static Object[][] format() { + return new Object[][] {{null, "NULL"}, {lineString, "'LINESTRING (30 10, 10 30, 40 40)'"}}; + } + + @DataProvider + public static Object[][] parse() { + return new Object[][] { + {null, null}, + {"", null}, + {" ", null}, + {"NULL", null}, + {" NULL ", null}, + {"'LINESTRING (30 10, 10 30, 40 40)'", lineString}, + {" ' LineString (30 10, 10 30, 40 40 ) ' ", lineString} + }; + } + + @Test + @UseDataProvider("format") + @Override + public void should_format(LineString input, String expected) { + super.should_format(input, expected); + } + + @Test + @UseDataProvider("parse") + @Override + public void should_parse(String input, LineString expected) { + super.should_parse(input, expected); + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/type/codec/geometry/PointCodecTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/type/codec/geometry/PointCodecTest.java new file mode 100644 index 00000000000..5230ea2f8d5 --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/type/codec/geometry/PointCodecTest.java @@ -0,0 +1,69 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.type.codec.geometry; + +import com.datastax.dse.driver.api.core.data.geometry.Point; +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import org.junit.Test; +import org.junit.runner.RunWith; + +@RunWith(DataProviderRunner.class) +public class PointCodecTest extends GeometryCodecTest { + + public PointCodecTest() { + super(new PointCodec()); + } + + @DataProvider + public static Object[][] serde() { + return new Object[][] { + {null, null}, + {Point.fromCoordinates(1, 2), Point.fromCoordinates(1, 2)}, + {Point.fromCoordinates(-1.1, -2.2), Point.fromCoordinates(-1.1, -2.2)} + }; + } + + @DataProvider + public static Object[][] format() { + return new Object[][] { + {null, "NULL"}, + {Point.fromCoordinates(1, 2), "'POINT (1 2)'"}, + {Point.fromCoordinates(-1.1, -2.2), "'POINT (-1.1 -2.2)'"} + }; + } + + @DataProvider + public static Object[][] parse() { + return new Object[][] { + {null, null}, + {"", null}, + {" ", null}, + {"NULL", null}, + {" NULL ", null}, + {"'POINT ( 1 2 )'", Point.fromCoordinates(1, 2)}, + {"'POINT ( 1.0 2.0 )'", Point.fromCoordinates(1, 2)}, + {"' point ( -1.1 -2.2 )'", Point.fromCoordinates(-1.1, -2.2)}, + {" ' Point ( -1.1 -2.2 ) ' ", Point.fromCoordinates(-1.1, -2.2)} + }; + } + + @Test + @UseDataProvider("format") + @Override + public void should_format(Point input, String expected) { + super.should_format(input, expected); + } + + @Test + @UseDataProvider("parse") + @Override + public void should_parse(String input, Point expected) { + super.should_parse(input, expected); + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/type/codec/geometry/PolygonCodecTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/type/codec/geometry/PolygonCodecTest.java new file mode 100644 index 00000000000..e39097e5451 --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/type/codec/geometry/PolygonCodecTest.java @@ -0,0 +1,69 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.type.codec.geometry; + +import com.datastax.dse.driver.api.core.data.geometry.Polygon; +import com.datastax.dse.driver.internal.core.data.geometry.DefaultPoint; +import com.datastax.dse.driver.internal.core.data.geometry.DefaultPolygon; +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import org.junit.Test; +import org.junit.runner.RunWith; + +@RunWith(DataProviderRunner.class) +public class PolygonCodecTest extends GeometryCodecTest { + private static Polygon polygon = + new DefaultPolygon( + new DefaultPoint(30, 10), + new DefaultPoint(10, 20), + new DefaultPoint(20, 40), + new DefaultPoint(40, 40)); + + public PolygonCodecTest() { + super(new PolygonCodec()); + } + + @DataProvider + public static Object[][] serde() { + return new Object[][] {{null, null}, {polygon, polygon}}; + } + + @DataProvider + public static Object[][] format() { + return new Object[][] { + {null, "NULL"}, {polygon, "'POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))'"} + }; + } + + @DataProvider + public static Object[][] parse() { + return new Object[][] { + {null, null}, + {"", null}, + {" ", null}, + {"NULL", null}, + {" NULL ", null}, + {"'POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))'", polygon}, + {" ' Polygon ( ( 30 10, 40 40, 20 40, 10 20, 30 10 ) ) ' ", polygon} + }; + } + + @Test + @UseDataProvider("format") + @Override + public void should_format(Polygon input, String expected) { + super.should_format(input, expected); + } + + @Test + @UseDataProvider("parse") + @Override + public void should_parse(String input, Polygon expected) { + super.should_parse(input, expected); + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/type/codec/time/DateRangeCodecTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/type/codec/time/DateRangeCodecTest.java new file mode 100644 index 00000000000..7ff553628d4 --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/type/codec/time/DateRangeCodecTest.java @@ -0,0 +1,102 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.type.codec.time; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.dse.driver.api.core.data.time.DateRange; +import com.datastax.dse.driver.api.core.type.codec.DseTypeCodecs; +import com.datastax.oss.driver.api.core.ProtocolVersion; +import com.datastax.oss.driver.api.core.type.codec.TypeCodec; +import com.datastax.oss.driver.shaded.guava.common.base.MoreObjects; +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import java.nio.ByteBuffer; +import java.text.ParseException; +import org.junit.Test; +import org.junit.runner.RunWith; + +@RunWith(DataProviderRunner.class) +public class DateRangeCodecTest { + + @Test + @UseDataProvider("dateRanges") + public void should_encode_and_decode(DateRange dateRange) { + TypeCodec codec = DseTypeCodecs.DATE_RANGE; + DateRange decoded = + codec.decode(codec.encode(dateRange, ProtocolVersion.DEFAULT), ProtocolVersion.DEFAULT); + assertThat(decoded).isEqualTo(dateRange); + } + + @Test(expected = IllegalArgumentException.class) + public void should_fail_to_encode_unknown_date_range_type() { + DseTypeCodecs.DATE_RANGE.decode(ByteBuffer.wrap(new byte[] {127}), ProtocolVersion.DEFAULT); + } + + @Test + @UseDataProvider("dateRangeStrings") + public void should_format_and_parse(String dateRangeString) { + TypeCodec codec = DseTypeCodecs.DATE_RANGE; + String formatted = codec.format(codec.parse(dateRangeString)); + assertThat(formatted).isEqualTo(MoreObjects.firstNonNull(dateRangeString, "NULL")); + } + + @Test(expected = IllegalArgumentException.class) + public void should_fail_to_parse_invalid_string() { + DseTypeCodecs.DATE_RANGE.parse("foo"); + } + + @DataProvider + public static Object[][] dateRanges() throws ParseException { + return new Object[][] { + {null}, + {DateRange.parse("[2011-01 TO 2015]")}, + {DateRange.parse("[2010-01-02 TO 2015-05-05T13]")}, + {DateRange.parse("[1973-06-30T13:57:28.123Z TO 1999-05-05T14:14:59]")}, + {DateRange.parse("[2010-01-01T15 TO 2016-02]")}, + {DateRange.parse("[1500 TO 1501]")}, + {DateRange.parse("[0001-01-01 TO 0001-01-01]")}, + {DateRange.parse("[0001-01-01 TO 0001-01-02]")}, + {DateRange.parse("[0000-01-01 TO 0000-01-01]")}, + {DateRange.parse("[0000-01-01 TO 0000-01-02]")}, + {DateRange.parse("[-0001-01-01 TO -0001-01-01]")}, + {DateRange.parse("[-0001-01-01 TO -0001-01-02]")}, + {DateRange.parse("[* TO 2014-12-01]")}, + {DateRange.parse("[1999 TO *]")}, + {DateRange.parse("[* TO *]")}, + {DateRange.parse("-0009")}, + {DateRange.parse("2000-11")}, + {DateRange.parse("*")} + }; + } + + @DataProvider + public static Object[][] dateRangeStrings() { + return new Object[][] { + {null}, + {"NULL"}, + {"'[2011-01 TO 2015]'"}, + {"'[2010-01-02 TO 2015-05-05T13]'"}, + {"'[1973-06-30T13:57:28.123Z TO 1999-05-05T14:14:59]'"}, + {"'[2010-01-01T15 TO 2016-02]'"}, + {"'[1500 TO 1501]'"}, + {"'[0001-01-01 TO 0001-01-01]'"}, + {"'[0001-01-01 TO 0001-01-02]'"}, + {"'[0000-01-01 TO 0000-01-01]'"}, + {"'[0000-01-01 TO 0000-01-02]'"}, + {"'[-0001-01-01 TO -0001-01-01]'"}, + {"'[-0001-01-01 TO -0001-01-02]'"}, + {"'[* TO 2014-12-01]'"}, + {"'[1999 TO *]'"}, + {"'[* TO *]'"}, + {"'-0009'"}, + {"'2000-11'"}, + {"'*'"} + }; + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/util/concurrent/BoundedConcurrentQueueTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/util/concurrent/BoundedConcurrentQueueTest.java new file mode 100644 index 00000000000..79297fb9caa --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/util/concurrent/BoundedConcurrentQueueTest.java @@ -0,0 +1,68 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.util.concurrent; + +import static com.datastax.oss.driver.Assertions.assertThatStage; +import static org.assertj.core.api.Assertions.assertThat; + +import java.util.concurrent.CompletionStage; +import org.junit.Test; + +public class BoundedConcurrentQueueTest { + + @Test + public void should_dequeue_null_when_empty() { + BoundedConcurrentQueue queue = new BoundedConcurrentQueue<>(4); + assertThat(queue.peek()).isNull(); + assertThat(queue.poll()).isNull(); + } + + @Test + public void should_enqueue_and_dequeue_while_not_full() { + BoundedConcurrentQueue queue = new BoundedConcurrentQueue<>(4); + + assertThatStage(queue.offer(1)).isSuccess(e -> assertThat(e).isEqualTo(1)); + assertThat(queue.peek()).isEqualTo(1); + assertThat(queue.poll()).isEqualTo(1); + + assertThatStage(queue.offer(2)).isSuccess(e -> assertThat(e).isEqualTo(2)); + assertThatStage(queue.offer(3)).isSuccess(e -> assertThat(e).isEqualTo(3)); + assertThatStage(queue.offer(4)).isSuccess(e -> assertThat(e).isEqualTo(4)); + + assertThat(queue.peek()).isEqualTo(2); + assertThat(queue.poll()).isEqualTo(2); + assertThat(queue.peek()).isEqualTo(3); + assertThat(queue.poll()).isEqualTo(3); + assertThat(queue.peek()).isEqualTo(4); + assertThat(queue.poll()).isEqualTo(4); + assertThat(queue.poll()).isNull(); + } + + @Test + public void should_delay_insertion_when_full_until_space_available() { + BoundedConcurrentQueue queue = new BoundedConcurrentQueue<>(4); + + assertThatStage(queue.offer(1)).isSuccess(e -> assertThat(e).isEqualTo(1)); + assertThatStage(queue.offer(2)).isSuccess(e -> assertThat(e).isEqualTo(2)); + assertThatStage(queue.offer(3)).isSuccess(e -> assertThat(e).isEqualTo(3)); + assertThatStage(queue.offer(4)).isSuccess(e -> assertThat(e).isEqualTo(4)); + + CompletionStage enqueue5 = queue.offer(5); + assertThat(enqueue5).isNotDone(); + + assertThat(queue.poll()).isEqualTo(1); + assertThatStage(enqueue5).isSuccess(e -> assertThat(e).isEqualTo(5)); + } + + @Test(expected = IllegalStateException.class) + public void should_fail_to_insert_when_other_insert_already_pending() { + BoundedConcurrentQueue queue = new BoundedConcurrentQueue<>(1); + assertThatStage(queue.offer(1)).isSuccess(); + assertThatStage(queue.offer(2)).isNotDone(); + queue.offer(3); + } +} diff --git a/core/src/test/resources/config/customApplication.conf b/core/src/test/resources/config/customApplication.conf index dc0c6d19b45..92b5f492b9c 100644 --- a/core/src/test/resources/config/customApplication.conf +++ b/core/src/test/resources/config/customApplication.conf @@ -1,4 +1,6 @@ datastax-java-driver { // Check that references to other options in `reference.conf` are correctly resolved basic.request.timeout = ${datastax-java-driver.advanced.connection.init-query-timeout} + + advanced.continuous-paging.max-pages = 10 } diff --git a/core/src/test/resources/config/customApplication.json b/core/src/test/resources/config/customApplication.json index 2527d9908d8..4988a72cd9a 100644 --- a/core/src/test/resources/config/customApplication.json +++ b/core/src/test/resources/config/customApplication.json @@ -4,6 +4,11 @@ "request": { "page-size": "2000" } + }, + "advanced": { + "continuous-paging": { + "page-size": 2000 + } } } } diff --git a/core/src/test/resources/config/customApplication.properties b/core/src/test/resources/config/customApplication.properties index 6e971ef1d84..4956c960b66 100644 --- a/core/src/test/resources/config/customApplication.properties +++ b/core/src/test/resources/config/customApplication.properties @@ -1,17 +1,9 @@ # # Copyright DataStax, Inc. # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# This software can be used solely with DataStax Enterprise. Please consult the license at +# http://www.datastax.com/terms/datastax-dse-driver-license-terms # datastax-java-driver.basic.request.consistency=ONE +datastax-java-driver.advanced.continuous-paging.max-enqueued-pages = 8 \ No newline at end of file diff --git a/core/src/test/resources/insights/duplicate-dependencies.txt b/core/src/test/resources/insights/duplicate-dependencies.txt new file mode 100644 index 00000000000..a808dff3f57 --- /dev/null +++ b/core/src/test/resources/insights/duplicate-dependencies.txt @@ -0,0 +1,2 @@ +io.netty:netty-handler:jar:4.0.56.Final:compile +io.netty:netty-handler:jar:4.1.2.Final:compile \ No newline at end of file diff --git a/core/src/test/resources/insights/malformed-pom.properties b/core/src/test/resources/insights/malformed-pom.properties new file mode 100644 index 00000000000..db049766ebc --- /dev/null +++ b/core/src/test/resources/insights/malformed-pom.properties @@ -0,0 +1,11 @@ +# +# Copyright DataStax, Inc. +# +# This software can be used solely with DataStax Enterprise. Please consult the license at +# http://www.datastax.com/terms/datastax-dse-driver-license-terms +# + +#Created by Apache Maven 3.5.0 +#no version +groupId=io.netty +artifactId=netty-handler \ No newline at end of file diff --git a/core/src/test/resources/insights/netty-dependency-optional.txt b/core/src/test/resources/insights/netty-dependency-optional.txt new file mode 100644 index 00000000000..2bd0cd21a0c --- /dev/null +++ b/core/src/test/resources/insights/netty-dependency-optional.txt @@ -0,0 +1 @@ +io.netty:netty-handler:jar:4.0.0.Final:compile (optional) \ No newline at end of file diff --git a/core/src/test/resources/insights/netty-dependency.txt b/core/src/test/resources/insights/netty-dependency.txt new file mode 100644 index 00000000000..69c350c30e8 --- /dev/null +++ b/core/src/test/resources/insights/netty-dependency.txt @@ -0,0 +1 @@ +io.netty:netty-handler:jar:4.0.0.Final:runtime \ No newline at end of file diff --git a/core/src/test/resources/insights/ordered-dependencies.txt b/core/src/test/resources/insights/ordered-dependencies.txt new file mode 100644 index 00000000000..a5518f89736 --- /dev/null +++ b/core/src/test/resources/insights/ordered-dependencies.txt @@ -0,0 +1,3 @@ +b-org.com:art1:jar:1.0:compile +a-org.com:art1:jar:2.0:compile +c-org.com:art1:jar:3.0:compile \ No newline at end of file diff --git a/core/src/test/resources/insights/pom.properties b/core/src/test/resources/insights/pom.properties new file mode 100644 index 00000000000..cb4f891bd9d --- /dev/null +++ b/core/src/test/resources/insights/pom.properties @@ -0,0 +1,12 @@ +# +# Copyright DataStax, Inc. +# +# This software can be used solely with DataStax Enterprise. Please consult the license at +# http://www.datastax.com/terms/datastax-dse-driver-license-terms +# + +#Created by Apache Maven 3.5.0 +version=4.0.56.Final +groupId=io.netty +artifactId=netty-handler + diff --git a/core/src/test/resources/insights/test-dependencies.txt b/core/src/test/resources/insights/test-dependencies.txt new file mode 100644 index 00000000000..6cabe8b257d --- /dev/null +++ b/core/src/test/resources/insights/test-dependencies.txt @@ -0,0 +1,31 @@ + +The following files have been resolved: + com.github.jnr:jffi:jar:1.2.16:compile + org.ow2.asm:asm:jar:5.0.3:compile + com.github.jnr:jnr-constants:jar:0.9.9:compile + com.esri.geometry:esri-geometry-api:jar:1.2.1:compile + com.google.guava:guava:jar:19.0:compile + com.fasterxml.jackson.core:jackson-annotations:jar:2.8.11:compile + com.github.jnr:jnr-posix:jar:3.0.44:compile + org.codehaus.jackson:jackson-core-asl:jar:1.9.12:compile + io.netty:netty-handler:jar:4.0.56.Final:compile + org.ow2.asm:asm-commons:jar:5.0.3:compile + org.ow2.asm:asm-util:jar:5.0.3:compile + org.xerial.snappy:snappy-java:jar:1.1.2.6:compile (optional) + io.netty:netty-buffer:jar:4.0.56.Final:compile + com.github.jnr:jnr-ffi:jar:2.1.7:compile + com.fasterxml.jackson.core:jackson-core:jar:2.8.11:compile + org.hdrhistogram:HdrHistogram:jar:2.1.10:compile (optional) + org.ow2.asm:asm-tree:jar:5.0.3:compile + org.lz4:lz4-java:jar:1.4.1:compile (optional) + io.netty:netty-transport:jar:4.0.56.Final:compile + io.dropwizard.metrics:metrics-core:jar:3.2.2:compile + io.netty:netty-common:jar:4.0.56.Final:compile + com.fasterxml.jackson.core:jackson-databind:jar:2.7.9.3:compile + org.slf4j:slf4j-api:jar:1.7.25:compile + io.netty:netty-transport-native-epoll:jar:4.0.56.Final:compile (optional) + org.ow2.asm:asm-analysis:jar:5.0.3:compile + com.github.jnr:jnr-x86asm:jar:1.0.2:compile + io.netty:netty-codec:jar:4.0.56.Final:compile + org.json:json:jar:20090211:compile + com.github.jnr:jffi:jar:native:1.2.16:runtime \ No newline at end of file diff --git a/core/src/test/resources/logback-test.xml b/core/src/test/resources/logback-test.xml index 1aa52f9527d..52c8cca374d 100644 --- a/core/src/test/resources/logback-test.xml +++ b/core/src/test/resources/logback-test.xml @@ -3,28 +3,19 @@ Copyright DataStax, Inc. - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. + This software can be used solely with DataStax Enterprise. Please consult the license at + http://www.datastax.com/terms/datastax-dse-driver-license-terms --> - - - %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n - - - - - - - + + + %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n + + + + + + + diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderAlternateIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderAlternateIT.java new file mode 100644 index 00000000000..1270b0d26c0 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderAlternateIT.java @@ -0,0 +1,84 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.auth; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.dse.driver.api.core.config.DseDriverOption; +import com.datastax.dse.driver.internal.core.auth.DseGssApiAuthProvider; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.cql.Row; +import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.api.testinfra.session.SessionUtils; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import org.junit.ClassRule; +import org.junit.Test; + +@DseRequirement(min = "5.0", description = "Required for DseAuthenticator") +public class DseGssApiAuthProviderAlternateIT { + @ClassRule public static EmbeddedAdsRule ads = new EmbeddedAdsRule(true); + + @Test + public void + should_authenticate_using_kerberos_with_keytab_and_alternate_service_principal_using_system_property() { + System.setProperty("dse.sasl.service", "alternate"); + try (DseSession session = + SessionUtils.newSession( + ads.getCcm(), + SessionUtils.configLoaderBuilder() + .withClass(DefaultDriverOption.AUTH_PROVIDER_CLASS, DseGssApiAuthProvider.class) + .withStringMap( + DseDriverOption.AUTH_PROVIDER_SASL_PROPERTIES, + ImmutableMap.of("javax.security.sasl.qop", "auth-conf")) + .withStringMap( + DseDriverOption.AUTH_PROVIDER_LOGIN_CONFIGURATION, + ImmutableMap.of( + "principal", + ads.getUserPrincipal(), + "useKeyTab", + "true", + "refreshKrb5Config", + "true", + "keyTab", + ads.getUserKeytab().getAbsolutePath())) + .build())) { + Row row = session.execute("select * from system.local").one(); + assertThat(row).isNotNull(); + } finally { + System.clearProperty("dse.sasl.service"); + } + } + + @Test + public void should_authenticate_using_kerberos_with_keytab_and_alternate_service_principal() { + try (DseSession session = + SessionUtils.newSession( + ads.getCcm(), + SessionUtils.configLoaderBuilder() + .withClass(DefaultDriverOption.AUTH_PROVIDER_CLASS, DseGssApiAuthProvider.class) + .withString(DseDriverOption.AUTH_PROVIDER_SERVICE, "alternate") + .withStringMap( + DseDriverOption.AUTH_PROVIDER_SASL_PROPERTIES, + ImmutableMap.of("javax.security.sasl.qop", "auth-conf")) + .withStringMap( + DseDriverOption.AUTH_PROVIDER_LOGIN_CONFIGURATION, + ImmutableMap.of( + "principal", + ads.getUserPrincipal(), + "useKeyTab", + "true", + "refreshKrb5Config", + "true", + "keyTab", + ads.getUserKeytab().getAbsolutePath())) + .build())) { + Row row = session.execute("select * from system.local").one(); + assertThat(row).isNotNull(); + } + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderIT.java new file mode 100644 index 00000000000..9acb71ca26b --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderIT.java @@ -0,0 +1,99 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.auth; + +import static com.datastax.dse.driver.api.core.auth.KerberosUtils.acquireTicket; +import static com.datastax.dse.driver.api.core.auth.KerberosUtils.destroyTicket; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.fail; + +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.oss.driver.api.core.AllNodesFailedException; +import com.datastax.oss.driver.api.core.auth.AuthenticationException; +import com.datastax.oss.driver.api.core.cql.ResultSet; +import com.datastax.oss.driver.api.testinfra.DseRequirement; +import org.junit.Assume; +import org.junit.ClassRule; +import org.junit.Test; + +@DseRequirement(min = "5.0", description = "Required for DseAuthenticator") +public class DseGssApiAuthProviderIT { + + @ClassRule public static EmbeddedAdsRule ads = new EmbeddedAdsRule(); + + /** + * Ensures that a Session can be established to a DSE server secured with Kerberos and that simple + * queries can be made using a client configuration that provides a keytab file. + */ + @Test + public void should_authenticate_using_kerberos_with_keytab() { + try (DseSession session = ads.newKeyTabSession()) { + ResultSet set = session.execute("select * from system.local"); + assertThat(set).isNotNull(); + } + } + + /** + * Ensures that a Session can be established to a DSE server secured with Kerberos and that simple + * queries can be made using a client configuration that uses the ticket cache. This test will + * only run on unix platforms since it uses kinit to acquire tickets and kdestroy to destroy them. + */ + @Test + public void should_authenticate_using_kerberos_with_ticket() throws Exception { + String osName = System.getProperty("os.name", "").toLowerCase(); + boolean isUnix = osName.contains("mac") || osName.contains("darwin") || osName.contains("nux"); + Assume.assumeTrue(isUnix); + acquireTicket(ads.getUserPrincipal(), ads.getUserKeytab(), ads.getAdsServer()); + try (DseSession session = ads.newTicketSession()) { + ResultSet set = session.execute("select * from system.local"); + assertThat(set).isNotNull(); + } finally { + destroyTicket(ads); + } + } + + /** + * Validates that an AllNodesFailedException is thrown when using a ticket-based configuration and + * no such ticket exists in the user's cache. This is expected because we shouldn't be able to + * establish connection to a cassandra node if we cannot authenticate. + * + * @test_category dse:authentication + */ + @SuppressWarnings("unused") + @Test + public void should_not_authenticate_if_no_ticket_in_cache() { + try (DseSession session = ads.newTicketSession()) { + fail("Expected an AllNodesFailedException"); + } catch (AllNodesFailedException e) { + assertThat(e.getErrors().size()).isEqualTo(1); + for (Throwable t : e.getErrors().values()) { + assertThat(t).isInstanceOf(AuthenticationException.class); + } + } + } + + /** + * Validates that an AllNodesFailedException is thrown when using a keytab-based configuration and + * no such user exists for the given principal. This is expected because we shouldn't be able to + * establish connection to a cassandra node if we cannot authenticate. + * + * @test_category dse:authentication + */ + @SuppressWarnings("unused") + @Test + public void should_not_authenticate_if_keytab_does_not_map_to_valid_principal() { + try (DseSession session = + ads.newKeyTabSession(ads.getUnknownPrincipal(), ads.getUnknownKeytab().getAbsolutePath())) { + fail("Expected an AllNodesFailedException"); + } catch (AllNodesFailedException e) { + assertThat(e.getErrors().size()).isEqualTo(1); + for (Throwable t : e.getErrors().values()) { + assertThat(t).isInstanceOf(AuthenticationException.class); + } + } + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DsePlainTextAuthProviderIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DsePlainTextAuthProviderIT.java new file mode 100644 index 00000000000..674e32b4391 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DsePlainTextAuthProviderIT.java @@ -0,0 +1,115 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.auth; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Fail.fail; + +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.dse.driver.api.core.config.DseDriverOption; +import com.datastax.dse.driver.internal.core.auth.DsePlainTextAuthProvider; +import com.datastax.oss.driver.api.core.AllNodesFailedException; +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.Version; +import com.datastax.oss.driver.api.core.auth.AuthenticationException; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.session.SessionUtils; +import com.datastax.oss.driver.shaded.guava.common.util.concurrent.Uninterruptibles; +import java.util.concurrent.TimeUnit; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Test; + +@DseRequirement(min = "5.0", description = "Required for DseAuthenticator") +public class DsePlainTextAuthProviderIT { + + @ClassRule + public static CustomCcmRule ccm = + CustomCcmRule.builder() + .withCassandraConfiguration( + "authenticator", "com.datastax.bdp.cassandra.auth.DseAuthenticator") + .withDseConfiguration("authentication_options.enabled", true) + .withDseConfiguration("authentication_options.default_scheme", "internal") + .withJvmArgs("-Dcassandra.superuser_setup_delay_ms=0") + .build(); + + @BeforeClass + public static void sleepForAuth() { + if (ccm.getCassandraVersion().compareTo(Version.V2_2_0) < 0) { + // Sleep for 1 second to allow C* auth to do its work. This is only needed for 2.1 + Uninterruptibles.sleepUninterruptibly(1, TimeUnit.SECONDS); + } + } + + @Test + public void should_connect_dse_plaintext_auth() { + try (DseSession session = + SessionUtils.newSession( + ccm, + SessionUtils.configLoaderBuilder() + .withString(DseDriverOption.AUTH_PROVIDER_AUTHORIZATION_ID, "") + .withString(DefaultDriverOption.AUTH_PROVIDER_USER_NAME, "cassandra") + .withString(DefaultDriverOption.AUTH_PROVIDER_PASSWORD, "cassandra") + .withClass(DefaultDriverOption.AUTH_PROVIDER_CLASS, DsePlainTextAuthProvider.class) + .build())) { + session.execute("select * from system.local"); + } + } + + @Test + public void should_connect_dse_plaintext_auth_programmatically() { + try (DseSession session = + DseSession.builder() + .addContactEndPoints(ccm.getContactPoints()) + .withAuthCredentials("cassandra", "cassandra") + .build()) { + session.execute("select * from system.local"); + } + } + + @SuppressWarnings("unused") + @Test + public void should_not_connect_with_invalid_credentials() { + try (CqlSession session = + SessionUtils.newSession( + ccm, + SessionUtils.configLoaderBuilder() + .withString(DseDriverOption.AUTH_PROVIDER_AUTHORIZATION_ID, "") + .withString(DefaultDriverOption.AUTH_PROVIDER_USER_NAME, "cassandra") + .withString(DefaultDriverOption.AUTH_PROVIDER_PASSWORD, "NotARealPassword") + .withClass(DefaultDriverOption.AUTH_PROVIDER_CLASS, DsePlainTextAuthProvider.class) + .build())) { + fail("Expected an AllNodesFailedException"); + } catch (AllNodesFailedException e) { + assertThat(e.getErrors().size()).isEqualTo(1); + for (Throwable t : e.getErrors().values()) { + assertThat(t).isInstanceOf(AuthenticationException.class); + } + } + } + + @SuppressWarnings("unused") + @Test + public void should_not_connect_without_credentials() { + try (DseSession session = + SessionUtils.newSession( + ccm, + SessionUtils.configLoaderBuilder() + .withString(DseDriverOption.AUTH_PROVIDER_AUTHORIZATION_ID, "") + .withClass(DefaultDriverOption.AUTH_PROVIDER_CLASS, DsePlainTextAuthProvider.class) + .build())) { + fail("Expected AllNodesFailedException"); + } catch (AllNodesFailedException e) { + assertThat(e.getErrors().size()).isEqualTo(1); + for (Throwable t : e.getErrors().values()) { + assertThat(t).isInstanceOf(AuthenticationException.class); + } + } + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseProxyAuthenticationIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseProxyAuthenticationIT.java new file mode 100644 index 00000000000..e7fc9dd3f03 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseProxyAuthenticationIT.java @@ -0,0 +1,253 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.auth; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.fail; + +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.dse.driver.api.core.config.DseDriverOption; +import com.datastax.dse.driver.internal.core.auth.DseGssApiAuthProvider; +import com.datastax.dse.driver.internal.core.auth.DsePlainTextAuthProvider; +import com.datastax.oss.driver.api.core.AllNodesFailedException; +import com.datastax.oss.driver.api.core.auth.AuthenticationException; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.cql.ResultSet; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.core.servererrors.UnauthorizedException; +import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.api.testinfra.session.SessionUtils; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Test; + +@DseRequirement(min = "5.1", description = "Required for DseAuthenticator with proxy") +public class DseProxyAuthenticationIT { + private static String bobPrincipal; + private static String charliePrincipal; + @ClassRule public static EmbeddedAdsRule ads = new EmbeddedAdsRule(); + + @BeforeClass + public static void addUsers() { + bobPrincipal = ads.addUserAndCreateKeyTab("bob", "bob"); + charliePrincipal = ads.addUserAndCreateKeyTab("charlie", "charlie"); + } + + @Before + public void setupRoles() { + + try (DseSession session = ads.newKeyTabSession()) { + session.execute("CREATE ROLE IF NOT EXISTS alice WITH PASSWORD = 'alice' AND LOGIN = FALSE"); + session.execute("CREATE ROLE IF NOT EXISTS ben WITH PASSWORD = 'ben' AND LOGIN = TRUE"); + session.execute("CREATE ROLE IF NOT EXISTS 'bob@DATASTAX.COM' WITH LOGIN = TRUE"); + session.execute( + "CREATE ROLE IF NOT EXISTS 'charlie@DATASTAX.COM' WITH PASSWORD = 'charlie' AND LOGIN = TRUE"); + session.execute("CREATE ROLE IF NOT EXISTS steve WITH PASSWORD = 'steve' AND LOGIN = TRUE"); + session.execute( + "CREATE KEYSPACE IF NOT EXISTS aliceks WITH REPLICATION = {'class':'SimpleStrategy', 'replication_factor':'1'}"); + session.execute( + "CREATE TABLE IF NOT EXISTS aliceks.alicetable (key text PRIMARY KEY, value text)"); + session.execute("INSERT INTO aliceks.alicetable (key, value) VALUES ('hello', 'world')"); + session.execute("GRANT ALL ON KEYSPACE aliceks TO alice"); + session.execute("GRANT EXECUTE ON ALL AUTHENTICATION SCHEMES TO 'ben'"); + session.execute("GRANT EXECUTE ON ALL AUTHENTICATION SCHEMES TO 'bob@DATASTAX.COM'"); + session.execute("GRANT EXECUTE ON ALL AUTHENTICATION SCHEMES TO 'steve'"); + session.execute("GRANT EXECUTE ON ALL AUTHENTICATION SCHEMES TO 'charlie@DATASTAX.COM'"); + session.execute("GRANT PROXY.LOGIN ON ROLE 'alice' TO 'ben'"); + session.execute("GRANT PROXY.LOGIN ON ROLE 'alice' TO 'bob@DATASTAX.COM'"); + session.execute("GRANT PROXY.EXECUTE ON ROLE 'alice' TO 'steve'"); + session.execute("GRANT PROXY.EXECUTE ON ROLE 'alice' TO 'charlie@DATASTAX.COM'"); + // ben and bob are allowed to login as alice, but not execute as alice. + // charlie and steve are allowed to execute as alice, but not login as alice. + } + } + /** + * Validates that a connection may be successfully made as user 'alice' using the credentials of a + * user 'ben' using {@link DsePlainTextAuthProvider} assuming ben has PROXY.LOGIN authorization on + * alice. + */ + @Test + public void should_allow_plain_text_authorized_user_to_login_as() { + try (DseSession session = + SessionUtils.newSession( + ads.ccm, + SessionUtils.configLoaderBuilder() + .withString(DseDriverOption.AUTH_PROVIDER_AUTHORIZATION_ID, "alice") + .withString(DefaultDriverOption.AUTH_PROVIDER_USER_NAME, "ben") + .withString(DefaultDriverOption.AUTH_PROVIDER_PASSWORD, "ben") + .withClass(DefaultDriverOption.AUTH_PROVIDER_CLASS, DsePlainTextAuthProvider.class) + .build())) { + SimpleStatement select = SimpleStatement.builder("select * from aliceks.alicetable").build(); + ResultSet set = session.execute(select); + assertThat(set).isNotNull(); + } + } + + @Test + public void should_allow_plain_text_authorized_user_to_login_as_programmatically() { + try (DseSession session = + DseSession.builder() + .addContactEndPoints(ads.ccm.getContactPoints()) + .withAuthCredentials("ben", "ben", "alice") + .build()) { + session.execute("select * from system.local"); + } + } + + /** + * Validates that a connection may successfully made as user 'alice' using the credentials of a + * principal 'bob@DATASTAX.COM' using {@link DseGssApiAuthProvider} assuming 'bob@DATASTAX.COM' + * has PROXY.LOGIN authorization on alice. + */ + @Test + public void should_allow_kerberos_authorized_user_to_login_as() { + try (DseSession session = + ads.newKeyTabSession( + bobPrincipal, ads.getKeytabForPrincipal(bobPrincipal).getAbsolutePath(), "alice")) { + SimpleStatement select = SimpleStatement.builder("select * from aliceks.alicetable").build(); + ResultSet set = session.execute(select); + assertThat(set).isNotNull(); + } + } + + /** + * Validates that a connection does not succeed as user 'alice' using the credentials of a user + * 'steve' assuming 'steve' does not have PROXY.LOGIN authorization on alice. + */ + @Test + public void should_not_allow_plain_text_unauthorized_user_to_login_as() { + try (DseSession session = + SessionUtils.newSession( + ads.ccm, + SessionUtils.configLoaderBuilder() + .withString(DseDriverOption.AUTH_PROVIDER_AUTHORIZATION_ID, "alice") + .withString(DefaultDriverOption.AUTH_PROVIDER_USER_NAME, "steve") + .withString(DefaultDriverOption.AUTH_PROVIDER_PASSWORD, "steve") + .withClass(DefaultDriverOption.AUTH_PROVIDER_CLASS, DsePlainTextAuthProvider.class) + .build())) { + SimpleStatement select = SimpleStatement.builder("select * from aliceks.alicetable").build(); + session.execute(select); + fail("Should have thrown AllNodesFailedException on login."); + } catch (AllNodesFailedException anfe) { + verifyException(anfe); + } + } + /** + * Validates that a connection does not succeed as user 'alice' using the credentials of a + * principal 'charlie@DATASTAX.COM' assuming 'charlie@DATASTAX.COM' does not have PROXY.LOGIN + * authorization on alice. + */ + @Test + public void should_not_allow_kerberos_unauthorized_user_to_login_as() throws Exception { + try (DseSession session = + ads.newKeyTabSession( + charliePrincipal, + ads.getKeytabForPrincipal(charliePrincipal).getAbsolutePath(), + "alice")) { + SimpleStatement select = SimpleStatement.builder("select * from aliceks.alicetable").build(); + session.execute(select); + fail("Should have thrown AllNodesFailedException on login."); + } catch (AllNodesFailedException anfe) { + verifyException(anfe); + } + } + /** + * Validates that a query may be successfully made as user 'alice' using a {@link DseSession} that + * is authenticated to user 'steve' using {@link DsePlainTextAuthProvider} assuming steve has + * PROXY.EXECUTE authorization on alice. + */ + @Test + public void should_allow_plain_text_authorized_user_to_execute_as() { + try (DseSession session = + SessionUtils.newSession( + ads.ccm, + SessionUtils.configLoaderBuilder() + .withString(DefaultDriverOption.AUTH_PROVIDER_USER_NAME, "steve") + .withString(DefaultDriverOption.AUTH_PROVIDER_PASSWORD, "steve") + .withClass(DefaultDriverOption.AUTH_PROVIDER_CLASS, DsePlainTextAuthProvider.class) + .build())) { + SimpleStatement select = SimpleStatement.builder("select * from aliceks.alicetable").build(); + SimpleStatement statementAsAlice = ProxyAuthentication.executeAs("alice", select); + ResultSet set = session.execute(statementAsAlice); + assertThat(set).isNotNull(); + } + } + /** + * Validates that a query may be successfully made as user 'alice' using a {@link DseSession} that + * is authenticated to principal 'charlie@DATASTAX.COM' using {@link DseGssApiAuthProvider} + * assuming charlie@DATASTAX.COM has PROXY.EXECUTE authorization on alice. + */ + @Test + public void should_allow_kerberos_authorized_user_to_execute_as() { + try (DseSession session = + ads.newKeyTabSession( + charliePrincipal, ads.getKeytabForPrincipal(charliePrincipal).getAbsolutePath())) { + SimpleStatement select = SimpleStatement.builder("select * from aliceks.alicetable").build(); + SimpleStatement statementAsAlice = ProxyAuthentication.executeAs("alice", select); + ResultSet set = session.execute(statementAsAlice); + assertThat(set).isNotNull(); + } + } + /** + * Validates that a query may not be made as user 'alice' using a {@link DseSession} that is + * authenticated to user 'ben' if ben does not have PROXY.EXECUTE authorization on alice. + */ + @Test + public void should_not_allow_plain_text_unauthorized_user_to_execute_as() { + try (DseSession session = + SessionUtils.newSession( + ads.ccm, + SessionUtils.configLoaderBuilder() + .withString(DefaultDriverOption.AUTH_PROVIDER_USER_NAME, "ben") + .withString(DefaultDriverOption.AUTH_PROVIDER_PASSWORD, "ben") + .withClass(DefaultDriverOption.AUTH_PROVIDER_CLASS, DsePlainTextAuthProvider.class) + .build())) { + SimpleStatement select = SimpleStatement.builder("select * from aliceks.alicetable").build(); + SimpleStatement statementAsAlice = ProxyAuthentication.executeAs("alice", select); + session.execute(statementAsAlice); + fail("Should have thrown UnauthorizedException on executeAs."); + } catch (UnauthorizedException ue) { + verifyException(ue, "ben"); + } + } + /** + * Validates that a query may not be made as user 'alice' using a {@link DseSession} that is + * authenticated to principal 'bob@DATASTAX.COM' using {@link DseGssApiAuthProvider} if + * bob@DATASTAX.COM does not have PROXY.EXECUTE authorization on alice. + */ + @Test + public void should_not_allow_kerberos_unauthorized_user_to_execute_as() { + try (DseSession session = + ads.newKeyTabSession( + bobPrincipal, ads.getKeytabForPrincipal(bobPrincipal).getAbsolutePath())) { + SimpleStatement select = SimpleStatement.builder("select * from aliceks.alicetable").build(); + SimpleStatement statementAsAlice = ProxyAuthentication.executeAs("alice", select); + session.execute(statementAsAlice); + fail("Should have thrown UnauthorizedException on executeAs."); + } catch (UnauthorizedException ue) { + verifyException(ue, "bob@DATASTAX.COM"); + } + } + + private void verifyException(AllNodesFailedException anfe) { + Throwable firstError = anfe.getErrors().values().iterator().next(); + assertThat(firstError).isInstanceOf(AuthenticationException.class); + assertThat(firstError.getMessage()) + .contains( + "Authentication error on node /127.0.0.1:9042: server replied 'Failed to login. Please re-try.'"); + } + + private void verifyException(UnauthorizedException ue, String user) { + assertThat(ue.getMessage()) + .contains( + String.format( + "Either '%s' does not have permission to execute queries as 'alice' " + + "or that role does not exist.", + user)); + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/EmbeddedAds.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/EmbeddedAds.java new file mode 100644 index 00000000000..6f42c05a997 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/EmbeddedAds.java @@ -0,0 +1,596 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.auth; + +import com.datastax.oss.driver.shaded.guava.common.base.Preconditions; +import com.datastax.oss.driver.shaded.guava.common.collect.Maps; +import com.datastax.oss.driver.shaded.guava.common.collect.Sets; +import com.datastax.oss.driver.shaded.guava.common.io.Files; +import java.io.BufferedWriter; +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.OutputStreamWriter; +import java.io.PrintWriter; +import java.net.InetAddress; +import java.net.ServerSocket; +import java.net.UnknownHostException; +import java.nio.charset.Charset; +import java.util.Collections; +import java.util.Map; +import java.util.UUID; +import org.apache.directory.api.ldap.model.constants.SchemaConstants; +import org.apache.directory.api.ldap.model.constants.SupportedSaslMechanisms; +import org.apache.directory.api.ldap.model.csn.CsnFactory; +import org.apache.directory.api.ldap.model.entry.Entry; +import org.apache.directory.api.ldap.model.exception.LdapException; +import org.apache.directory.api.ldap.model.exception.LdapInvalidDnException; +import org.apache.directory.api.ldap.model.name.Dn; +import org.apache.directory.api.ldap.model.schema.SchemaManager; +import org.apache.directory.api.ldap.schemamanager.impl.DefaultSchemaManager; +import org.apache.directory.server.constants.ServerDNConstants; +import org.apache.directory.server.core.DefaultDirectoryService; +import org.apache.directory.server.core.api.CacheService; +import org.apache.directory.server.core.api.DirectoryService; +import org.apache.directory.server.core.api.DnFactory; +import org.apache.directory.server.core.api.InstanceLayout; +import org.apache.directory.server.core.api.schema.SchemaPartition; +import org.apache.directory.server.core.kerberos.KeyDerivationInterceptor; +import org.apache.directory.server.core.partition.impl.btree.jdbm.JdbmPartition; +import org.apache.directory.server.core.partition.ldif.LdifPartition; +import org.apache.directory.server.core.shared.DefaultDnFactory; +import org.apache.directory.server.kerberos.KerberosConfig; +import org.apache.directory.server.kerberos.kdc.KdcServer; +import org.apache.directory.server.kerberos.shared.crypto.encryption.KerberosKeyFactory; +import org.apache.directory.server.kerberos.shared.keytab.Keytab; +import org.apache.directory.server.kerberos.shared.keytab.KeytabEntry; +import org.apache.directory.server.ldap.LdapServer; +import org.apache.directory.server.ldap.handlers.sasl.MechanismHandler; +import org.apache.directory.server.ldap.handlers.sasl.cramMD5.CramMd5MechanismHandler; +import org.apache.directory.server.ldap.handlers.sasl.digestMD5.DigestMd5MechanismHandler; +import org.apache.directory.server.ldap.handlers.sasl.gssapi.GssapiMechanismHandler; +import org.apache.directory.server.ldap.handlers.sasl.plain.PlainMechanismHandler; +import org.apache.directory.server.protocol.shared.transport.TcpTransport; +import org.apache.directory.server.protocol.shared.transport.UdpTransport; +import org.apache.directory.shared.kerberos.KerberosTime; +import org.apache.directory.shared.kerberos.codec.types.EncryptionType; +import org.apache.directory.shared.kerberos.components.EncryptionKey; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * A convenience utility for running an Embedded Apache Directory Service with LDAP and optionally a + * Kerberos Key Distribution Server. By default listens for LDAP on 10389 and Kerberos on 60088. You + * can use something like Apache Directory Studio + * to verify the server is configured and running correctly by connecting to localhost:10389 with + * username 'uid=admin,ou=system' and password 'secret'. + * + *

      Note: This should only be used for development and testing purposes. + */ +public class EmbeddedAds { + + private static final Logger LOG = LoggerFactory.getLogger(EmbeddedAds.class); + + private final String dn; + + private final String realm; + + private int kdcPort; + + private int ldapPort; + + private final boolean kerberos; + + private InetAddress address; + + private String hostname; + + private File confDir; + + private volatile boolean isInit = false; + + private DirectoryService service; + + private LdapServer ldapServer; + + private KdcServer kdcServer; + + private Dn usersDN; + + private File krb5Conf; + + private EmbeddedAds( + String dn, + String realm, + String address, + int ldapPort, + boolean kerberos, + int kdcPort, + File confDir) { + this.dn = dn; + this.realm = realm; + try { + this.address = InetAddress.getByName(address); + } catch (UnknownHostException e) { + LOG.error("Failure resolving address '{}', falling back to loopback.", address, e); + this.address = InetAddress.getLoopbackAddress(); + } + this.hostname = this.address.getHostName().toLowerCase(); + this.ldapPort = ldapPort; + this.kerberos = kerberos; + this.kdcPort = kdcPort; + this.confDir = confDir; + } + + public void start() throws Exception { + if (isInit) { + return; + } + isInit = true; + File workDir = Files.createTempDir(); + // Set confDir = workDir if not defined. + if (confDir == null) { + confDir = workDir; + } + + if (kerberos) { + kdcPort = kdcPort != -1 ? kdcPort : findAvailablePort(60088); + + // Set system properties required for kerberos auth to work. Unfortunately admin_server + // cannot be expressed via System properties (like realm and kdc can), thus we must create a + // config file. + krb5Conf = createKrb5Conf(); + + System.setProperty("java.security.krb5.conf", krb5Conf.getAbsolutePath()); + // Useful options for debugging. + // System.setProperty("sun.security.krb5.debug", "true"); + // System.setProperty("java.security.debug", "configfile,configparser,gssloginconfig"); + } + + // Initialize service and set its filesystem layout. + service = new DefaultDirectoryService(); + InstanceLayout layout = new InstanceLayout(workDir); + service.setInstanceLayout(layout); + + // Disable ChangeLog as we don't need change tracking. + service.getChangeLog().setEnabled(false); + // Denormalizes attribute DNs to be human readable, i.e uid=admin,ou=system instead of + // 0.9.2.3=admin,2.5=system) + service.setDenormalizeOpAttrsEnabled(true); + + // Create and init cache service which will be used for caching DNs, among other things. + CacheService cacheService = new CacheService(); + cacheService.initialize(layout); + + // Create and load SchemaManager which will create the default schema partition. + SchemaManager schemaManager = new DefaultSchemaManager(); + service.setSchemaManager(schemaManager); + schemaManager.loadAllEnabled(); + + // Create SchemaPartition from schema manager and load ldif from schema directory. + SchemaPartition schemaPartition = new SchemaPartition(schemaManager); + LdifPartition ldifPartition = new LdifPartition(schemaManager, service.getDnFactory()); + ldifPartition.setPartitionPath(new File(layout.getPartitionsDirectory(), "schema").toURI()); + schemaPartition.setWrappedPartition(ldifPartition); + service.setSchemaPartition(schemaPartition); + + // Create a DN factory which can be used to create and cache DNs. + DnFactory dnFactory = new DefaultDnFactory(schemaManager, cacheService.getCache("dnCache")); + service.setDnFactory(dnFactory); + + // Create mandatory system partition. This is used for storing server configuration. + JdbmPartition systemPartition = + createPartition("system", dnFactory.create(ServerDNConstants.SYSTEM_DN)); + service.setSystemPartition(systemPartition); + + // Now that we have a schema and system partition, start up the directory service. + service.startup(); + + // Create partition where user, tgt and ldap principals will live. + Dn partitionDn = dnFactory.create(dn); + String dnName = partitionDn.getRdn().getValue().getString(); + JdbmPartition partition = createPartition(dnName, partitionDn); + + // Add a context entry so the partition can be referenced by entries. + Entry context = service.newEntry(partitionDn); + context.add("objectClass", "top", "domain", "extensibleObject"); + context.add(partitionDn.getRdn().getType(), dnName); + partition.setContextEntry(context); + service.addPartition(partition); + + // Create users domain. + usersDN = partitionDn.add(dnFactory.create("ou=users")); + Entry usersEntry = service.newEntry(usersDN); + usersEntry.add("objectClass", "organizationalUnit", "top"); + usersEntry.add("ou", "users"); + if (kerberos) { + usersEntry = kerberize(usersEntry); + } + service.getAdminSession().add(usersEntry); + + // Uncomment to allow to connect to ldap server without credentials for convenience. + // service.setAllowAnonymousAccess(true); + + startLdap(); + + // Create sasl and krbtgt principals and start KDC if kerberos is enabled. + if (kerberos) { + // Ticket Granting Ticket entry. + Dn tgtDN = usersDN.add(dnFactory.create("uid=krbtgt")); + String servicePrincipal = "krbtgt/" + realm + "@" + realm; + Entry tgtEntry = service.newEntry(tgtDN); + tgtEntry.add( + "objectClass", + "person", + "inetOrgPerson", + "top", + "krb5KDCEntry", + "uidObject", + "krb5Principal"); + tgtEntry.add("krb5KeyVersionNumber", "0"); + tgtEntry.add("krb5PrincipalName", servicePrincipal); + tgtEntry.add("uid", "krbtgt"); + tgtEntry.add("userPassword", "secret"); + tgtEntry.add("sn", "Service"); + tgtEntry.add("cn", "KDC Service"); + service.getAdminSession().add(kerberize(tgtEntry)); + + // LDAP SASL principal. + String saslPrincipal = "ldap/" + hostname + "@" + realm; + ldapServer.setSaslPrincipal(saslPrincipal); + Dn ldapDN = usersDN.add(dnFactory.create("uid=ldap")); + Entry ldapEntry = service.newEntry(ldapDN); + ldapEntry.add( + "objectClass", + "top", + "person", + "inetOrgPerson", + "krb5KDCEntry", + "uidObject", + "krb5Principal"); + ldapEntry.add("krb5KeyVersionNumber", "0"); + ldapEntry.add("krb5PrincipalName", saslPrincipal); + ldapEntry.add("uid", "ldap"); + ldapEntry.add("userPassword", "secret"); + ldapEntry.add("sn", "Service"); + ldapEntry.add("cn", "LDAP Service"); + service.getAdminSession().add(kerberize(ldapEntry)); + + startKDC(servicePrincipal); + } + } + + public boolean isStarted() { + return this.isInit; + } + + private File createKrb5Conf() throws IOException { + File krb5Conf = new File(confDir, "krb5.conf"); + String config = + String.format( + "[libdefaults]%n" + + "default_realm = %s%n" + + "default_tgs_enctypes = aes128-cts-hmac-sha1-96 aes256-cts-hmac-sha1-96%n%n" + + "[realms]%n" + + "%s = {%n" + + " kdc = %s:%d%n" + + " admin_server = %s:%d%n" + + "}%n", + realm, realm, hostname, kdcPort, hostname, kdcPort); + + try (FileOutputStream fios = new FileOutputStream(krb5Conf)) { + PrintWriter pw = + new PrintWriter( + new BufferedWriter(new OutputStreamWriter(fios, Charset.defaultCharset()))); + pw.write(config); + pw.close(); + } + return krb5Conf; + } + + /** + * @return A specialized krb5.conf file that defines and defaults to the domain expressed by this + * server. + */ + public File getKrb5Conf() { + return krb5Conf; + } + + /** + * Adds a user with the given password and principal name and creates a keytab file for + * authenticating with that user's principal. + * + * @param user Username to login with (i.e. cassandra). + * @param password Password to authenticate with. + * @param principal Principal representing the server (i.e. cassandra@DATASTAX.COM). + * @return Generated keytab file for this user. + */ + public File addUserAndCreateKeytab(String user, String password, String principal) + throws IOException, LdapException { + addUser(user, password, principal); + return createKeytab(user, password, principal); + } + + /** + * Creates a keytab file for authenticating with a given principal. + * + * @param user Username to login with (i.e. cassandra). + * @param password Password to authenticate with. + * @param principal Principal representing the server (i.e. cassandra@DATASTAX.COM). + * @return Generated keytab file for this user. + */ + public File createKeytab(String user, String password, String principal) throws IOException { + File keytabFile = new File(confDir, user + ".keytab"); + Keytab keytab = Keytab.getInstance(); + + KerberosTime timeStamp = new KerberosTime(System.currentTimeMillis()); + + Map keys = + KerberosKeyFactory.getKerberosKeys(principal, password); + + KeytabEntry keytabEntry = + new KeytabEntry( + principal, 0, timeStamp, (byte) 0, keys.get(EncryptionType.AES128_CTS_HMAC_SHA1_96)); + + keytab.setEntries(Collections.singletonList(keytabEntry)); + keytab.write(keytabFile); + return keytabFile; + } + + /** + * Adds a user with the given password, does not create necessary kerberos attributes. + * + * @param user Username to login with (i.e. cassandra). + * @param password Password to authenticate with. + */ + public void addUser(String user, String password) throws LdapException { + addUser(user, password, null); + } + + /** + * Adds a user with the given password and principal. If principal is specified and kerberos is + * enabled, user is created with the necessary attributes to authenticate with kerberos (entryCsn, + * entryUuid, etc.). + * + * @param user Username to login with (i.e. cassandra). + * @param password Password to authenticate with. + * @param principal Principal representing the server (i.e. cassandra@DATASTAX.COM). + */ + public void addUser(String user, String password, String principal) throws LdapException { + Preconditions.checkState(isInit); + Dn userDN = usersDN.add("uid=" + user); + Entry userEntry = service.newEntry(userDN); + if (kerberos && principal != null) { + userEntry.add( + "objectClass", + "organizationalPerson", + "person", + "extensibleObject", + "inetOrgPerson", + "top", + "krb5KDCEntry", + "uidObject", + "krb5Principal"); + userEntry.add("krb5KeyVersionNumber", "0"); + userEntry.add("krb5PrincipalName", principal); + userEntry = kerberize(userEntry); + } else { + userEntry.add( + "objectClass", + "organizationalPerson", + "person", + "extensibleObject", + "inetOrgPerson", + "top", + "uidObject"); + } + userEntry.add("uid", user); + userEntry.add("sn", user); + userEntry.add("cn", user); + userEntry.add("userPassword", password); + service.getAdminSession().add(userEntry); + } + + /** Stops the server(s) if running. */ + public void stop() { + if (ldapServer != null) { + ldapServer.stop(); + } + if (kdcServer != null) { + kdcServer.stop(); + } + } + + /** @return The evaluated hostname that the server is listening with. */ + public String getHostname() { + return this.hostname; + } + + /** + * Adds attributes to the given Entry which will enable krb5key attributes to be added to them. + * + * @param entry Entry to add attributes to. + * @return The provided entry. + */ + private Entry kerberize(Entry entry) throws LdapException { + // Add csn and uuids for kerberos, this is needed to generate krb5keys. + entry.add(SchemaConstants.ENTRY_CSN_AT, new CsnFactory(0).newInstance().toString()); + entry.add(SchemaConstants.ENTRY_UUID_AT, UUID.randomUUID().toString()); + return entry; + } + + /** + * Creates a {@link JdbmPartition} with the given id and DN. + * + * @param id Id to create partition with. + * @param dn Distinguished Name to use to create partition. + * @return Created partition. + */ + private JdbmPartition createPartition(String id, Dn dn) throws LdapInvalidDnException { + JdbmPartition partition = new JdbmPartition(service.getSchemaManager(), service.getDnFactory()); + partition.setId(id); + partition.setPartitionPath( + new File(service.getInstanceLayout().getPartitionsDirectory(), id).toURI()); + partition.setSuffixDn(dn); + partition.setSchemaManager(service.getSchemaManager()); + return partition; + } + + /** Starts the LDAP Server with SASL enabled. */ + private void startLdap() throws Exception { + // Create and start LDAP server. + ldapServer = new LdapServer(); + + // Enable SASL layer, this is useful with or without kerberos. + Map mechanismHandlerMap = Maps.newHashMap(); + mechanismHandlerMap.put(SupportedSaslMechanisms.PLAIN, new PlainMechanismHandler()); + mechanismHandlerMap.put(SupportedSaslMechanisms.CRAM_MD5, new CramMd5MechanismHandler()); + mechanismHandlerMap.put(SupportedSaslMechanisms.DIGEST_MD5, new DigestMd5MechanismHandler()); + // GSSAPI is required for kerberos. + mechanismHandlerMap.put(SupportedSaslMechanisms.GSSAPI, new GssapiMechanismHandler()); + ldapServer.setSaslMechanismHandlers(mechanismHandlerMap); + ldapServer.setSaslHost(hostname); + // Realms only used by DIGEST_MD5 and GSSAPI. + ldapServer.setSaslRealms(Collections.singletonList(realm)); + ldapServer.setSearchBaseDn(dn); + + ldapPort = ldapPort != -1 ? ldapPort : findAvailablePort(10389); + ldapServer.setTransports(new TcpTransport(address.getHostAddress(), ldapPort)); + ldapServer.setDirectoryService(service); + if (kerberos) { + // Add an interceptor to attach krb5keys to created principals. + KeyDerivationInterceptor interceptor = new KeyDerivationInterceptor(); + interceptor.init(service); + service.addLast(interceptor); + } + ldapServer.start(); + } + + /** + * Starts the Kerberos Key Distribution Server supporting AES128 using the given principal for the + * Ticket-granting ticket. + * + * @param servicePrincipal TGT principcal service. + */ + private void startKDC(String servicePrincipal) throws Exception { + KerberosConfig config = new KerberosConfig(); + // We choose AES128_CTS_HMAC_SHA1_96 for our generated keytabs so we don't need JCE. + config.setEncryptionTypes(Sets.newHashSet(EncryptionType.AES128_CTS_HMAC_SHA1_96)); + config.setSearchBaseDn(dn); + config.setServicePrincipal(servicePrincipal); + + kdcServer = new KdcServer(config); + kdcServer.setDirectoryService(service); + + kdcServer.setTransports( + new TcpTransport(address.getHostAddress(), kdcPort), + new UdpTransport(address.getHostAddress(), kdcPort)); + kdcServer.start(); + } + + public static Builder builder() { + return new Builder(); + } + + public static class Builder { + + private String dn = "dc=datastax,dc=com"; + + private String realm = "DATASTAX.COM"; + + private boolean kerberos = false; + + private int kdcPort = -1; + + private int ldapPort = -1; + + private String address = "127.0.0.1"; + + private File confDir = null; + + private Builder() {} + + public EmbeddedAds build() { + return new EmbeddedAds(dn, realm, address, ldapPort, kerberos, kdcPort, confDir); + } + + /** + * Configures the base DN to create users under. Defaults to dc=datastax,dc=com. + */ + public Builder withBaseDn(String dn) { + this.dn = dn; + return this; + } + + /** Configures the realm to use for SASL and Kerberos. Defaults to DATASTAX.COM. */ + public Builder withRealm(String realm) { + this.realm = realm; + return this; + } + + /** + * Sets the directory where krb5.conf and generated keytabs are created. Defaults to current + * directory. + */ + public Builder withConfDir(File confDir) { + this.confDir = confDir; + return this; + } + + /** + * Configures the port to use for LDAP. Defaults to the first available port from 10389+. Must + * be greater than 0. + */ + public Builder withLdapPort(int port) { + Preconditions.checkArgument(port > 0); + this.ldapPort = port; + return this; + } + + /** + * Configures the port to use for Kerberos KDC. Defaults to the first available port for 60088+. + * Must be greater than 0. + */ + public Builder withKerberos(int port) { + Preconditions.checkArgument(port > 0); + this.kdcPort = port; + return withKerberos(); + } + + /** + * Configures the server to run with a Kerberos KDC using the first available port for 60088+. + */ + public Builder withKerberos() { + this.kerberos = true; + return this; + } + + /** + * Configures the server to be configured to listen with the given address. Defaults to + * 127.0.0.1. You shouldn't need to change this. + */ + public Builder withAddress(String address) { + this.address = address; + return this; + } + } + + private static int findAvailablePort(int startingWith) { + IOException last = null; + for (int port = startingWith; port < startingWith + 100; port++) { + try { + ServerSocket s = new ServerSocket(port); + s.close(); + return port; + } catch (IOException e) { + last = e; + } + } + // If for whatever reason a port could not be acquired throw the last encountered exception. + throw new RuntimeException("Could not acquire an available port", last); + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/EmbeddedAdsRule.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/EmbeddedAdsRule.java new file mode 100644 index 00000000000..eb8e18cf908 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/EmbeddedAdsRule.java @@ -0,0 +1,319 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.auth; + +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.dse.driver.api.core.config.DseDriverOption; +import com.datastax.dse.driver.internal.core.auth.DseGssApiAuthProvider; +import com.datastax.oss.driver.api.core.Version; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.api.testinfra.ccm.CcmBridge; +import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.session.SessionUtils; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import java.io.File; +import java.util.HashMap; +import java.util.Map; +import org.junit.AssumptionViolatedException; +import org.junit.rules.ExternalResource; +import org.junit.runner.Description; +import org.junit.runners.model.Statement; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * A testing rule that wraps the EmbeddedAds server, and ccmRule into one rule This is needed + * because ccm needs to be aware of the kerberos server settings prior to it's initialization. + */ +public class EmbeddedAdsRule extends ExternalResource { + + private static final Logger LOG = LoggerFactory.getLogger(EmbeddedAdsRule.class); + + public CustomCcmRule ccm; + // Realm for the KDC. + private final String realm = "DATASTAX.COM"; + private final String address = "127.0.0.1"; + + private final EmbeddedAds adsServer = + EmbeddedAds.builder().withKerberos().withRealm(realm).withAddress(address).build(); + + // Principal for DSE service ( = kerberos_options.service_principal) + private final String servicePrincipal = "dse/" + adsServer.getHostname() + "@" + realm; + + // A non-standard principal for DSE service, to test SASL protocol names + private final String alternateServicePrincipal = + "alternate/" + adsServer.getHostname() + "@" + realm; + + // Principal for the default cassandra user. + private final String userPrincipal = "cassandra@" + realm; + + // Principal for a user that doesn't exist. + private final String unknownPrincipal = "unknown@" + realm; + + // Keytabs to use for auth. + private static File userKeytab; + private static File unknownKeytab; + private static File dseKeytab; + private static File alternateKeytab; + private static Map customKeytabs = new HashMap<>(); + + private boolean alternate = false; + + public EmbeddedAdsRule(boolean alternate) { + this.alternate = alternate; + } + + public EmbeddedAdsRule() { + this(false); + } + + @Override + protected void before() { + try { + if (adsServer.isStarted()) { + return; + } + // Start ldap/kdc server. + adsServer.start(); + + // Create users and keytabs for the DSE principal and cassandra user. + dseKeytab = adsServer.addUserAndCreateKeytab("dse", "dse", servicePrincipal); + alternateKeytab = + adsServer.addUserAndCreateKeytab("alternate", "alternate", alternateServicePrincipal); + userKeytab = adsServer.addUserAndCreateKeytab("cassandra", "cassandra", userPrincipal); + unknownKeytab = adsServer.createKeytab("unknown", "unknown", unknownPrincipal); + + String authenticationOptions = + "" + + "authentication_options:\n" + + " enabled: true\n" + + " default_scheme: kerberos\n" + + " other_schemes:\n" + + " - internal"; + + if (alternate) { + ccm = + CustomCcmRule.builder() + .withCassandraConfiguration( + "authorizer", "com.datastax.bdp.cassandra.auth.DseAuthorizer") + .withCassandraConfiguration( + "authenticator", "com.datastax.bdp.cassandra.auth.DseAuthenticator") + .withDseConfiguration("authorization_options.enabled", true) + .withDseConfiguration(authenticationOptions) + .withDseConfiguration("kerberos_options.qop", "auth-conf") + .withDseConfiguration( + "kerberos_options.keytab", getAlternateKeytab().getAbsolutePath()) + .withDseConfiguration( + "kerberos_options.service_principal", "alternate/_HOST@" + getRealm()) + .withJvmArgs( + "-Dcassandra.superuser_setup_delay_ms=0", + "-Djava.security.krb5.conf=" + getAdsServer().getKrb5Conf().getAbsolutePath()) + .build(); + } else { + ccm = + CustomCcmRule.builder() + .withCassandraConfiguration( + "authorizer", "com.datastax.bdp.cassandra.auth.DseAuthorizer") + .withCassandraConfiguration( + "authenticator", "com.datastax.bdp.cassandra.auth.DseAuthenticator") + .withDseConfiguration("authorization_options.enabled", true) + .withDseConfiguration(authenticationOptions) + .withDseConfiguration("kerberos_options.qop", "auth") + .withDseConfiguration("kerberos_options.keytab", getDseKeytab().getAbsolutePath()) + .withDseConfiguration( + "kerberos_options.service_principal", "dse/_HOST@" + getRealm()) + .withJvmArgs( + "-Dcassandra.superuser_setup_delay_ms=0", + "-Djava.security.krb5.conf=" + getAdsServer().getKrb5Conf().getAbsolutePath()) + .build(); + } + ccm.getCcmBridge().create(); + ccm.getCcmBridge().start(); + + } catch (Exception e) { + LOG.error("Unable to start ads server ", e); + } + } + + private Statement buildErrorStatement( + Version requirement, Version actual, String description, boolean lessThan) { + return new Statement() { + + @Override + public void evaluate() { + throw new AssumptionViolatedException( + String.format( + "Test requires %s %s %s but %s is configured. Description: %s", + lessThan ? "less than" : "at least", "DSE", requirement, actual, description)); + } + }; + } + + @Override + public Statement apply(Statement base, Description description) { + DseRequirement dseRequirement = description.getAnnotation(DseRequirement.class); + if (dseRequirement != null) { + if (!CcmBridge.DSE_ENABLEMENT) { + return new Statement() { + @Override + public void evaluate() { + throw new AssumptionViolatedException("Test Requires DSE but C* is configured."); + } + }; + } else { + Version dseVersion = CcmBridge.VERSION; + if (!dseRequirement.min().isEmpty()) { + Version minVersion = Version.parse(dseRequirement.min()); + if (minVersion.compareTo(dseVersion) > 0) { + return buildErrorStatement(dseVersion, dseVersion, dseRequirement.description(), false); + } + } + + if (!dseRequirement.max().isEmpty()) { + Version maxVersion = Version.parse(dseRequirement.max()); + + if (maxVersion.compareTo(dseVersion) <= 0) { + return buildErrorStatement(dseVersion, dseVersion, dseRequirement.description(), true); + } + } + } + } + return super.apply(base, description); + } + + @Override + protected void after() { + adsServer.stop(); + ccm.getCcmBridge().stop(); + } + + public DseSession newKeyTabSession(String userPrincipal, String keytabPath) { + return SessionUtils.newSession( + getCcm(), + SessionUtils.configLoaderBuilder() + .withClass(DefaultDriverOption.AUTH_PROVIDER_CLASS, DseGssApiAuthProvider.class) + .withStringMap( + DseDriverOption.AUTH_PROVIDER_LOGIN_CONFIGURATION, + ImmutableMap.of( + "principal", + userPrincipal, + "useKeyTab", + "true", + "refreshKrb5Config", + "true", + "keyTab", + keytabPath)) + .build()); + } + + public DseSession newKeyTabSession(String userPrincipal, String keytabPath, String authId) { + return SessionUtils.newSession( + getCcm(), + SessionUtils.configLoaderBuilder() + .withClass(DefaultDriverOption.AUTH_PROVIDER_CLASS, DseGssApiAuthProvider.class) + .withStringMap( + DseDriverOption.AUTH_PROVIDER_LOGIN_CONFIGURATION, + ImmutableMap.of( + "principal", + userPrincipal, + "useKeyTab", + "true", + "refreshKrb5Config", + "true", + "keyTab", + keytabPath)) + .withString(DseDriverOption.AUTH_PROVIDER_AUTHORIZATION_ID, authId) + .build()); + } + + public DseSession newKeyTabSession() { + return newKeyTabSession(getUserPrincipal(), getUserKeytab().getAbsolutePath()); + } + + public DseSession newTicketSession() { + return SessionUtils.newSession( + getCcm(), + SessionUtils.configLoaderBuilder() + .withClass(DefaultDriverOption.AUTH_PROVIDER_CLASS, DseGssApiAuthProvider.class) + .withStringMap( + DseDriverOption.AUTH_PROVIDER_LOGIN_CONFIGURATION, + ImmutableMap.of( + "principal", + userPrincipal, + "useTicketCache", + "true", + "refreshKrb5Config", + "true", + "renewTGT", + "true")) + .build()); + } + + public CustomCcmRule getCcm() { + return ccm; + } + + public String getRealm() { + return realm; + } + + public String getAddress() { + return address; + } + + public EmbeddedAds getAdsServer() { + return adsServer; + } + + public String getServicePrincipal() { + return servicePrincipal; + } + + public String getAlternateServicePrincipal() { + return alternateServicePrincipal; + } + + public String getUserPrincipal() { + return userPrincipal; + } + + public String getUnknownPrincipal() { + return unknownPrincipal; + } + + public File getUserKeytab() { + return userKeytab; + } + + public File getUnknownKeytab() { + return unknownKeytab; + } + + public File getDseKeytab() { + return dseKeytab; + } + + public File getAlternateKeytab() { + return alternateKeytab; + } + + public String addUserAndCreateKeyTab(String user, String password) { + String principal = user + "@" + realm; + try { + File keytabFile = adsServer.addUserAndCreateKeytab(user, password, principal); + customKeytabs.put(principal, keytabFile); + } catch (Exception e) { + LOG.error("Unable to add user and create keytab for " + user + " ", e); + } + return principal; + } + + public File getKeytabForPrincipal(String prinicipal) { + return customKeytabs.get(prinicipal); + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/KerberosUtils.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/KerberosUtils.java new file mode 100644 index 00000000000..3ba295b6fda --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/KerberosUtils.java @@ -0,0 +1,49 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.auth; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import java.io.File; +import java.io.IOException; +import java.util.Map; +import org.apache.commons.exec.CommandLine; +import org.apache.commons.exec.DefaultExecutor; +import org.apache.commons.exec.Executor; + +public class KerberosUtils { + /** + * Executes the given command with KRB5_CONFIG environment variable pointing to the specialized + * config file for the embedded KDC server. + */ + public static void executeCommand(String command, EmbeddedAds adsServer) throws IOException { + Map environmentMap = + ImmutableMap.builder() + .put("KRB5_CONFIG", adsServer.getKrb5Conf().getAbsolutePath()) + .build(); + CommandLine cli = CommandLine.parse(command); + Executor executor = new DefaultExecutor(); + int retValue = executor.execute(cli, environmentMap); + assertThat(retValue).isZero(); + } + + /** + * Acquires a ticket into the cache with the tgt using kinit command with the given principal and + * keytab file. + */ + public static void acquireTicket(String principal, File keytab, EmbeddedAds adsServer) + throws IOException { + executeCommand( + String.format("kinit -t %s -k %s", keytab.getAbsolutePath(), principal), adsServer); + } + + /** Destroys all tickets in the cache with given principal. */ + public static void destroyTicket(EmbeddedAdsRule ads) throws IOException { + executeCommand("kdestroy", ads.getAdsServer()); + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousPagingIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousPagingIT.java new file mode 100644 index 00000000000..178b2915005 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousPagingIT.java @@ -0,0 +1,687 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.cql.continuous; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Fail.fail; + +import com.datastax.dse.driver.DseSessionMetric; +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.dse.driver.api.core.config.DseDriverOption; +import com.datastax.dse.driver.api.testinfra.session.DseSessionRuleBuilder; +import com.datastax.oss.driver.api.core.DriverTimeoutException; +import com.datastax.oss.driver.api.core.Version; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.cql.PreparedStatement; +import com.datastax.oss.driver.api.core.cql.Row; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.core.cql.Statement; +import com.datastax.oss.driver.api.core.metrics.DefaultNodeMetric; +import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import com.datastax.oss.driver.api.testinfra.session.SessionUtils; +import com.datastax.oss.driver.categories.ParallelizableTests; +import com.datastax.oss.driver.internal.core.util.concurrent.CompletableFutures; +import com.datastax.oss.driver.shaded.guava.common.util.concurrent.Uninterruptibles; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import java.nio.ByteBuffer; +import java.time.Duration; +import java.util.Collections; +import java.util.Iterator; +import java.util.concurrent.CancellationException; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionStage; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; +import java.util.function.Function; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; +import org.junit.runner.RunWith; + +@DseRequirement( + min = "5.1.0", + description = "Continuous paging is only available from 5.1.0 onwards") +@Category(ParallelizableTests.class) +@RunWith(DataProviderRunner.class) +public class ContinuousPagingIT extends ContinuousPagingITBase { + + private static CcmRule ccmRule = CcmRule.getInstance(); + + private static SessionRule sessionRule = + new DseSessionRuleBuilder(ccmRule) + .withConfigLoader( + SessionUtils.configLoaderBuilder() + .withStringList( + DefaultDriverOption.METRICS_SESSION_ENABLED, + Collections.singletonList(DseSessionMetric.CONTINUOUS_CQL_REQUESTS.getPath())) + .withStringList( + DefaultDriverOption.METRICS_NODE_ENABLED, + Collections.singletonList(DefaultNodeMetric.CQL_MESSAGES.getPath())) + .build()) + .build(); + + @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); + + @BeforeClass + public static void setUp() { + initialize(sessionRule.session(), sessionRule.slowProfile()); + } + + /** + * Validates {@link ContinuousSession#executeContinuously(Statement)} with a variety of paging + * options and ensures in all cases the expected number of rows come back. + * + * @test_category queries + * @jira_ticket JAVA-1322 + * @since 1.2.0 + */ + @Test + @UseDataProvider("pagingOptions") + public void should_execute_synchronously(Options options) { + DseSession session = sessionRule.session(); + SimpleStatement statement = SimpleStatement.newInstance("SELECT v from test where k=?", KEY); + DriverExecutionProfile profile = options.asProfile(session); + ContinuousResultSet result = + session.executeContinuously(statement.setExecutionProfile(profile)); + int i = 0; + for (Row row : result) { + assertThat(row.getInt("v")).isEqualTo(i); + i++; + } + assertThat(i).isEqualTo(options.expectedRows); + validateMetrics(session); + } + + /** + * Validates {@link ContinuousSession#executeContinuously(Statement)} with a variety of paging + * options using a prepared statement and ensures in all cases the expected number of rows come + * back. + * + * @test_category queries + * @jira_ticket JAVA-1322 + * @since 1.2.0 + */ + @Test + @UseDataProvider("pagingOptions") + public void should_execute_prepared_statement_synchronously(Options options) { + DseSession session = sessionRule.session(); + DriverExecutionProfile profile = options.asProfile(session); + ContinuousResultSet result = + session.executeContinuously(prepared.bind(KEY).setExecutionProfile(profile)); + int i = 0; + for (Row row : result) { + assertThat(row.getInt("v")).isEqualTo(i); + i++; + } + assertThat(i).isEqualTo(options.expectedRows); + validateMetrics(session); + } + + /** + * Validates {@link ContinuousSession#executeContinuouslyAsync(Statement)} with a variety of + * paging options and ensures in all cases the expected number of rows come back and the expected + * number of pages are received. + * + * @test_category queries + * @jira_ticket JAVA-1322 + * @since 1.2.0 + */ + @Test + @UseDataProvider("pagingOptions") + public void should_execute_asynchronously(Options options) { + DseSession session = sessionRule.session(); + SimpleStatement statement = SimpleStatement.newInstance("SELECT v from test where k=?", KEY); + DriverExecutionProfile profile = options.asProfile(session); + PageStatistics stats = + CompletableFutures.getUninterruptibly( + session + .executeContinuouslyAsync(statement.setExecutionProfile(profile)) + .thenCompose(new AsyncContinuousPagingFunction())); + assertThat(stats.rows).isEqualTo(options.expectedRows); + assertThat(stats.pages).isEqualTo(options.expectedPages); + validateMetrics(session); + } + + /** + * Validates that continuous paging is resilient to a schema change being made in the middle of + * producing pages for the driver if the query was a simple statement. + * + *

      Adds a column 'b' after paging the first row in. This column should not be present in the + * in-flight queries' rows, but should be present for subsequent queries. + * + * @test_category queries + * @jira_ticket JAVA-1653 + * @since 1.2.0 + */ + @Test + public void simple_statement_paging_should_be_resilient_to_schema_change() { + DseSession session = sessionRule.session(); + SimpleStatement simple = SimpleStatement.newInstance("select * from test_prepare"); + DriverExecutionProfile profile = + session + .getContext() + .getConfig() + .getDefaultProfile() + .withInt(DseDriverOption.CONTINUOUS_PAGING_MAX_ENQUEUED_PAGES, 1) + .withInt(DseDriverOption.CONTINUOUS_PAGING_PAGE_SIZE, 1) + .withInt(DefaultDriverOption.REQUEST_TIMEOUT, 120000000); + ContinuousResultSet result = session.executeContinuously(simple.setExecutionProfile(profile)); + Iterator it = result.iterator(); + // First row should have a non-null values. + Row row0 = it.next(); + assertThat(row0.getString("k")).isNotNull(); + assertThat(row0.isNull("v")).isFalse(); + // Make schema change to add b, its metadata should NOT be present in subsequent rows. + DseSession schemaChangeSession = + SessionUtils.newSession( + ccmRule, + session.getKeyspace().orElseThrow(IllegalStateException::new), + SessionUtils.configLoaderBuilder() + .withDuration(DefaultDriverOption.REQUEST_TIMEOUT, Duration.ofSeconds(30)) + .build()); + SimpleStatement statement = + SimpleStatement.newInstance("ALTER TABLE test_prepare add b int") + .setExecutionProfile(sessionRule.slowProfile()); + schemaChangeSession.execute(statement); + schemaChangeSession.checkSchemaAgreement(); + while (it.hasNext()) { + // Each row should have a value for k and v, but b should not be present as it was not part + // of the original metadata. + Row row = it.next(); + assertThat(row.getString("k")).isNotNull(); + assertThat(row.isNull("v")).isFalse(); + assertThat(row.getColumnDefinitions().contains("b")).isFalse(); + } + // Subsequent queries should contain b in metadata since its a new query. + result = session.executeContinuously(simple); + it = result.iterator(); + while (it.hasNext()) { + Row row = it.next(); + assertThat(row.getString("k")).isNotNull(); + assertThat(row.isNull("v")).isFalse(); + // b should be null, but present in metadata. + assertThat(row.isNull("b")).isTrue(); + assertThat(row.getColumnDefinitions().contains("b")).isTrue(); + } + } + + /** + * Validates that continuous paging is resilient to a schema change being made in the middle of + * producing pages for the driver if the query was prepared. + * + *

      Drops column 'v' after paging the first row in. This column should still be present in the + * in-flight queries' rows, but it's value should be null. The column should not be present in + * subsequent queries. + * + * @test_category queries + * @jira_ticket JAVA-1653 + * @since 1.2.0 + */ + @Test + public void prepared_statement_paging_should_be_resilient_to_schema_change() { + DseSession session = sessionRule.session(); + // Create table and prepare select * query against it. + session.execute("CREATE TABLE test_prep (k text PRIMARY KEY, v int)"); + for (int i = 0; i < 100; i++) { + session.execute(String.format("INSERT INTO test_prep (k, v) VALUES ('foo', %d)", i)); + } + PreparedStatement prepared = session.prepare("SELECT * FROM test_prep WHERE k = ?"); + DriverExecutionProfile profile = + session + .getContext() + .getConfig() + .getDefaultProfile() + .withInt(DseDriverOption.CONTINUOUS_PAGING_MAX_ENQUEUED_PAGES, 1) + .withInt(DseDriverOption.CONTINUOUS_PAGING_PAGE_SIZE, 1); + ContinuousResultSet result = + session.executeContinuously(prepared.bind("foo").setExecutionProfile(profile)); + Iterator it = result.iterator(); + // First row should have a non-null value for v. + Row row0 = it.next(); + assertThat(row0.getString("k")).isNotNull(); + assertThat(row0.isNull("v")).isFalse(); + // Make schema change to drop v, its metadata should be present, values will be null. + DseSession schemaChangeSession = + SessionUtils.newSession( + ccmRule, + session.getKeyspace().orElseThrow(IllegalStateException::new), + SessionUtils.configLoaderBuilder() + .withDuration(DefaultDriverOption.REQUEST_TIMEOUT, Duration.ofSeconds(30)) + .build()); + schemaChangeSession.execute("ALTER TABLE test_prep DROP v;"); + while (it.hasNext()) { + // Each row should have a value for k, v should still be present, but null since column was + // dropped. + Row row = it.next(); + assertThat(row.getString("k")).isNotNull(); + if (ccmRule + .getDseVersion() + .orElseThrow(IllegalStateException::new) + .compareTo(Version.parse("6.0.0")) + >= 0) { + // DSE 6 only, v should be null here since dropped. + // Not reliable for 5.1 since we may have gotten page queued before schema changed. + assertThat(row.isNull("v")).isTrue(); + } + assertThat(row.getColumnDefinitions().contains("v")).isTrue(); + } + // Subsequent queries should lack v from metadata as it was dropped. + prepared = session.prepare("SELECT * FROM test_prep WHERE k = ?"); + result = session.executeContinuously(prepared.bind("foo").setExecutionProfile(profile)); + it = result.iterator(); + while (it.hasNext()) { + Row row = it.next(); + assertThat(row.getString("k")).isNotNull(); + assertThat(row.getColumnDefinitions().contains("v")).isFalse(); + } + } + + /** + * Validates that {@link ContinuousResultSet#cancel()} will cancel a continuous paging session by + * setting maxPagesPerSecond to 1 and sending a cancel immediately and ensuring the total number + * of rows iterated over is equal to the size of pageSize. + * + *

      Also validates that it is possible to resume the operation using the paging state, as + * described in the javadocs of {@link ContinuousResultSet#cancel()}. + * + * @test_category queries + * @jira_ticket JAVA-1322 + * @since 1.2.0 + */ + @Test + public void should_cancel_with_synchronous_paging() { + DseSession session = sessionRule.session(); + SimpleStatement statement = SimpleStatement.newInstance("SELECT v from test where k=?", KEY); + // create options and throttle at a page per second so + // cancel can go out before the next page is sent. + // Note that this might not be perfect if there are pauses + // in the JVM and cancel isn't sent soon enough. + DriverExecutionProfile profile = + session + .getContext() + .getConfig() + .getDefaultProfile() + .withInt(DseDriverOption.CONTINUOUS_PAGING_MAX_ENQUEUED_PAGES, 1) + .withInt(DseDriverOption.CONTINUOUS_PAGING_PAGE_SIZE, 10) + .withInt(DseDriverOption.CONTINUOUS_PAGING_MAX_PAGES_PER_SECOND, 1); + ContinuousResultSet pagingResult = + session.executeContinuously(statement.setExecutionProfile(profile)); + pagingResult.cancel(); + int i = 0; + for (Row row : pagingResult) { + assertThat(row.getInt("v")).isEqualTo(i); + i++; + } + // Expect only 10 rows as paging was cancelled immediately. + assertThat(i).isEqualTo(10); + // attempt to resume the operation from where we left + ByteBuffer pagingState = pagingResult.getExecutionInfo().getPagingState(); + ContinuousResultSet pagingResultResumed = + session.executeContinuously( + statement + .setExecutionProfile( + profile.withInt(DseDriverOption.CONTINUOUS_PAGING_MAX_PAGES_PER_SECOND, 0)) + .setPagingState(pagingState)); + for (Row row : pagingResultResumed) { + assertThat(row.getInt("v")).isEqualTo(i); + i++; + } + assertThat(i).isEqualTo(100); + } + + /** + * Validates that {@link ContinuousAsyncResultSet#cancel()} will cancel a continuous paging + * session by setting maxPagesPerSecond to 1 and sending a cancel after the first page is received + * and then ensuring that the future returned from {@link + * ContinuousAsyncResultSet#fetchNextPage()} fails. + * + *

      Also validates that it is possible to resume the operation using the paging state, as + * described in the javadocs of {@link ContinuousAsyncResultSet#cancel()}. + * + * @test_category queries + * @jira_ticket JAVA-1322 + * @since 1.2.0 + */ + @Test + public void should_cancel_with_asynchronous_paging() { + DseSession session = sessionRule.session(); + SimpleStatement statement = SimpleStatement.newInstance("SELECT v from test where k=?", KEY); + // create options and throttle at a page per second so + // cancel can go out before the next page is sent. + // Note that this might not be perfect if there are pauses + // in the JVM and cancel isn't sent soon enough. + DriverExecutionProfile profile = + session + .getContext() + .getConfig() + .getDefaultProfile() + .withInt(DseDriverOption.CONTINUOUS_PAGING_PAGE_SIZE, 10) + .withInt(DseDriverOption.CONTINUOUS_PAGING_MAX_PAGES_PER_SECOND, 1); + CompletionStage future = + session.executeContinuouslyAsync(statement.setExecutionProfile(profile)); + ContinuousAsyncResultSet pagingResult = CompletableFutures.getUninterruptibly(future); + // Calling cancel on the previous result should cause the next future to timeout. + pagingResult.cancel(); + CompletionStage fetchNextPageFuture = pagingResult.fetchNextPage(); + try { + // Expect future to fail since it was cancelled. + CompletableFutures.getUninterruptibly(fetchNextPageFuture); + fail("Expected an execution exception since paging was cancelled."); + } catch (CancellationException e) { + assertThat(e) + .hasMessageContaining("Can't get more results") + .hasMessageContaining("query was cancelled"); + } + int i = 0; + for (Row row : pagingResult.currentPage()) { + assertThat(row.getInt("v")).isEqualTo(i); + i++; + } + // Expect only 10 rows as this is the defined page size. + assertThat(i).isEqualTo(10); + // attempt to resume the operation from where we left + ByteBuffer pagingState = pagingResult.getExecutionInfo().getPagingState(); + future = + session.executeContinuouslyAsync( + statement + .setExecutionProfile( + profile.withInt(DseDriverOption.CONTINUOUS_PAGING_MAX_PAGES_PER_SECOND, 0)) + .setPagingState(pagingState)); + ContinuousAsyncResultSet pagingResultResumed; + do { + pagingResultResumed = CompletableFutures.getUninterruptibly(future); + for (Row row : pagingResultResumed.currentPage()) { + assertThat(row.getInt("v")).isEqualTo(i); + i++; + } + if (pagingResultResumed.hasMorePages()) { + future = pagingResultResumed.fetchNextPage(); + } + } while (pagingResultResumed.hasMorePages()); + // expect 10 more rows + assertThat(i).isEqualTo(100); + } + + /** + * Validates that {@link ContinuousAsyncResultSet#cancel()} will cancel a continuous paging + * session and current tracked {@link CompletionStage} tied to the paging session. + * + *

      Also validates that it is possible to resume the operation using the paging state, as + * described in the javadocs of {@link ContinuousAsyncResultSet#cancel()}. + * + * @test_category queries + * @jira_ticket JAVA-1322 + * @since 1.2.0 + */ + @Test + public void should_cancel_future_when_cancelling_previous_result() { + DseSession session = sessionRule.session(); + SimpleStatement statement = SimpleStatement.newInstance("SELECT v from test where k=?", KEY); + // create options and throttle at a page per second so + // cancel can go out before the next page is sent. + // Note that this might not be perfect if there are pauses + // in the JVM and cancel isn't sent soon enough. + DriverExecutionProfile profile = + session + .getContext() + .getConfig() + .getDefaultProfile() + .withInt(DseDriverOption.CONTINUOUS_PAGING_PAGE_SIZE, 10) + .withInt(DseDriverOption.CONTINUOUS_PAGING_MAX_PAGES_PER_SECOND, 1); + CompletionStage future = + session.executeContinuouslyAsync(statement.setExecutionProfile(profile)); + ContinuousAsyncResultSet pagingResult = CompletableFutures.getUninterruptibly(future); + CompletionStage fetchNextPageFuture = pagingResult.fetchNextPage(); + // Calling cancel on the previous result should cause the current future to be cancelled. + pagingResult.cancel(); + assertThat(fetchNextPageFuture.toCompletableFuture().isCancelled()).isTrue(); + try { + // Expect future to be cancelled since the previous result was cancelled. + CompletableFutures.getUninterruptibly(fetchNextPageFuture); + fail("Expected a cancellation exception since previous result was cancelled."); + } catch (CancellationException ce) { + // expected + } + int i = 0; + for (Row row : pagingResult.currentPage()) { + assertThat(row.getInt("v")).isEqualTo(i); + i++; + } + // Expect only 10 rows as this is the defined page size. + assertThat(i).isEqualTo(10); + // attempt to resume the operation from where we left + ByteBuffer pagingState = pagingResult.getExecutionInfo().getPagingState(); + future = + session.executeContinuouslyAsync( + statement + .setExecutionProfile( + profile.withInt(DseDriverOption.CONTINUOUS_PAGING_MAX_PAGES_PER_SECOND, 0)) + .setPagingState(pagingState)); + ContinuousAsyncResultSet pagingResultResumed; + do { + pagingResultResumed = CompletableFutures.getUninterruptibly(future); + for (Row row : pagingResultResumed.currentPage()) { + assertThat(row.getInt("v")).isEqualTo(i); + i++; + } + if (pagingResultResumed.hasMorePages()) { + future = pagingResultResumed.fetchNextPage(); + } + } while (pagingResultResumed.hasMorePages()); + // expect 10 more rows + assertThat(i).isEqualTo(100); + } + + /** + * Validates that {@link CompletableFuture#cancel(boolean)} will cancel a continuous paging + * session by setting maxPagesPerSecond to 1 and sending a cancel after the first page is received + * and then ensuring that the future returned from {@link + * ContinuousAsyncResultSet#fetchNextPage()} is cancelled. + * + *

      Also validates that it is possible to resume the operation using the paging state, as + * described in the javadocs of {@link ContinuousAsyncResultSet#cancel()}. + * + * @test_category queries + * @jira_ticket JAVA-1322 + * @since 1.2.0 + */ + @Test + public void should_cancel_when_future_is_cancelled() { + DseSession session = sessionRule.session(); + SimpleStatement statement = SimpleStatement.newInstance("SELECT v from test where k=?", KEY); + // create options and throttle at a page per second so + // cancel can go out before the next page is sent. + // Note that this might not be perfect if there are pauses + // in the JVM and cancel isn't sent soon enough. + DriverExecutionProfile profile = + session + .getContext() + .getConfig() + .getDefaultProfile() + .withInt(DseDriverOption.CONTINUOUS_PAGING_PAGE_SIZE, 10) + .withInt(DseDriverOption.CONTINUOUS_PAGING_MAX_PAGES_PER_SECOND, 1); + CompletionStage future = + session.executeContinuouslyAsync(statement.setExecutionProfile(profile)); + ContinuousAsyncResultSet pagingResult = CompletableFutures.getUninterruptibly(future); + CompletableFuture fetchNextPageFuture = pagingResult.fetchNextPage().toCompletableFuture(); + fetchNextPageFuture.cancel(false); + assertThat(fetchNextPageFuture.isCancelled()).isTrue(); + try { + // Expect cancellation. + CompletableFutures.getUninterruptibly(fetchNextPageFuture); + fail("Expected a cancellation exception since future was cancelled."); + } catch (CancellationException ce) { + // expected + } + int i = 0; + for (Row row : pagingResult.currentPage()) { + assertThat(row.getInt("v")).isEqualTo(i); + i++; + } + // Expect only 10 rows as this is the defined page size. + assertThat(i).isEqualTo(10); + // attempt to resume the operation from where we left + ByteBuffer pagingState = pagingResult.getExecutionInfo().getPagingState(); + future = + session.executeContinuouslyAsync( + statement + .setExecutionProfile( + profile.withInt(DseDriverOption.CONTINUOUS_PAGING_MAX_PAGES_PER_SECOND, 0)) + .setPagingState(pagingState)); + ContinuousAsyncResultSet pagingResultResumed; + do { + pagingResultResumed = CompletableFutures.getUninterruptibly(future); + for (Row row : pagingResultResumed.currentPage()) { + assertThat(row.getInt("v")).isEqualTo(i); + i++; + } + if (pagingResultResumed.hasMorePages()) { + future = pagingResultResumed.fetchNextPage(); + } + } while (pagingResultResumed.hasMorePages()); + // expect 10 more rows + assertThat(i).isEqualTo(100); + } + + /** + * Validates that a client-side timeout is correctly reported to the caller. + * + * @test_category queries + * @jira_ticket JAVA-1390 + * @since 1.2.0 + */ + @Test + public void should_time_out_when_server_does_not_produce_pages_fast_enough() throws Exception { + DseSession session = sessionRule.session(); + SimpleStatement statement = SimpleStatement.newInstance("SELECT v from test where k=?", KEY); + // Throttle server at a page per second and set client timeout much lower so that the client + // will experience a timeout. + // Note that this might not be perfect if there are pauses in the JVM and the timeout + // doesn't fire soon enough. + DriverExecutionProfile profile = + session + .getContext() + .getConfig() + .getDefaultProfile() + .withInt(DseDriverOption.CONTINUOUS_PAGING_PAGE_SIZE, 10) + .withInt(DseDriverOption.CONTINUOUS_PAGING_MAX_PAGES_PER_SECOND, 1) + .withDuration( + DseDriverOption.CONTINUOUS_PAGING_TIMEOUT_OTHER_PAGES, Duration.ofMillis(100)); + CompletionStage future = + session.executeContinuouslyAsync(statement.setExecutionProfile(profile)); + ContinuousAsyncResultSet pagingResult = CompletableFutures.getUninterruptibly(future); + try { + pagingResult.fetchNextPage().toCompletableFuture().get(); + fail("Expected a timeout"); + } catch (ExecutionException e) { + assertThat(e.getCause()) + .isInstanceOf(DriverTimeoutException.class) + .hasMessageContaining("Timed out waiting for page 2"); + } + } + + /** + * Validates that the driver behaves appropriately when the client gets behind while paging rows + * in a continuous paging session. The driver should set autoread to false on the channel for that + * connection until the client consumes enough pages, at which point it will reenable autoread and + * continue reading. + * + *

      There is not really a direct way to verify that autoread is disabled, but delaying + * immediately after executing a continuous paging query should produce this effect. + * + * @test_category queries + * @jira_ticket JAVA-1375 + * @since 1.2.0 + */ + @Test + public void should_resume_reading_when_client_catches_up() { + DseSession session = sessionRule.session(); + SimpleStatement statement = + SimpleStatement.newInstance("SELECT * from test_autoread where k=?", KEY); + DriverExecutionProfile profile = + session + .getContext() + .getConfig() + .getDefaultProfile() + .withInt(DseDriverOption.CONTINUOUS_PAGING_PAGE_SIZE, 100); + CompletionStage result = + session.executeContinuouslyAsync(statement.setExecutionProfile(profile)); + // Defer consuming of rows for a second, this should cause autoread to be disabled. + Uninterruptibles.sleepUninterruptibly(1, TimeUnit.SECONDS); + // Start consuming rows, this should cause autoread to be reenabled once we consume some pages. + PageStatistics stats = + CompletableFutures.getUninterruptibly( + result.thenCompose(new AsyncContinuousPagingFunction())); + // 20k rows in this table. + assertThat(stats.rows).isEqualTo(20000); + // 200 * 100 = 20k. + assertThat(stats.pages).isEqualTo(200); + } + + private static class PageStatistics { + int rows; + int pages; + + PageStatistics(int rows, int pages) { + this.rows = rows; + this.pages = pages; + } + } + + /** + * A function that when invoked, will return a transformed future with another {@link + * AsyncContinuousPagingFunction} wrapping {@link ContinuousAsyncResultSet#fetchNextPage()} if + * there are more pages, otherwise returns an immediate future that shares {@link PageStatistics} + * about how many rows were returned and how many pages were encountered. + * + *

      Note that if observe that data is not parsed in order this future fails with an Exception. + */ + private static class AsyncContinuousPagingFunction + implements Function> { + + private final int rowsSoFar; + + AsyncContinuousPagingFunction() { + this(0); + } + + AsyncContinuousPagingFunction(int rowsSoFar) { + this.rowsSoFar = rowsSoFar; + } + + @Override + public CompletionStage apply(ContinuousAsyncResultSet input) { + int rows = rowsSoFar; + // Iterate over page and ensure data is in order. + for (Row row : input.currentPage()) { + int v = row.getInt("v"); + if (v != rows) { + fail(String.format("Expected v == %d, got %d.", rows, v)); + } + rows++; + } + // If on last page, complete future, otherwise keep iterating. + if (!input.hasMorePages()) { + // DSE may send an empty page as it can't always know if it's done paging or not yet. + // See: CASSANDRA-8871. In this case, don't count this page. + int pages = rows == rowsSoFar ? input.pageNumber() - 1 : input.pageNumber(); + CompletableFuture future = new CompletableFuture<>(); + future.complete(new PageStatistics(rows, pages)); + return future; + } else { + return input.fetchNextPage().thenCompose(new AsyncContinuousPagingFunction(rows)); + } + } + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousPagingITBase.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousPagingITBase.java new file mode 100644 index 00000000000..eb79035116f --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousPagingITBase.java @@ -0,0 +1,162 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.cql.continuous; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.codahale.metrics.Timer; +import com.datastax.dse.driver.DseSessionMetric; +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.dse.driver.api.core.config.DseDriverOption; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.cql.BatchStatement; +import com.datastax.oss.driver.api.core.cql.DefaultBatchType; +import com.datastax.oss.driver.api.core.cql.PreparedStatement; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.metrics.DefaultNodeMetric; +import com.datastax.oss.driver.api.core.metrics.Metrics; +import com.tngtech.java.junit.dataprovider.DataProvider; +import java.util.UUID; + +public abstract class ContinuousPagingITBase { + + protected static final String KEY = "k"; + + static PreparedStatement prepared; + + protected static void initialize(DseSession session, DriverExecutionProfile slowProfile) { + session.execute( + SimpleStatement.newInstance("CREATE TABLE test (k text, v int, PRIMARY KEY (k, v))") + .setExecutionProfile(slowProfile)); + // Load enough rows to cause TCP Zero Window. Default window size is 65535 bytes, each row + // is at least 48 bytes, so it would take ~1365 enqueued rows to zero window. + // Conservatively load 20k rows. + session.execute( + SimpleStatement.newInstance( + "CREATE TABLE test_autoread (k text, v int, v0 uuid, v1 uuid, PRIMARY KEY (k, v, v0))") + .setExecutionProfile(slowProfile)); + session.execute( + SimpleStatement.newInstance("CREATE TABLE test_prepare (k text PRIMARY KEY, v int)") + .setExecutionProfile(slowProfile)); + session.checkSchemaAgreement(); + prepared = session.prepare("SELECT v from test where k = ?"); + for (int i = 0; i < 100; i++) { + session.execute(String.format("INSERT INTO test (k, v) VALUES ('%s', %d)", KEY, i)); + } + int count = 0; + for (int i = 0; i < 200; i++) { + BatchStatement batch = BatchStatement.newInstance(DefaultBatchType.UNLOGGED); + for (int j = 0; j < 100; j++) { + batch = + batch.add( + SimpleStatement.newInstance( + "INSERT INTO test_autoread (k, v, v0, v1) VALUES (?, ?, ?, ?)", + KEY, + count++, + UUID.randomUUID(), + UUID.randomUUID())); + } + session.execute(batch); + } + for (int i = 0; i < 100; i++) { + session.execute(String.format("INSERT INTO test_prepare (k, v) VALUES ('%d', %d)", i, i)); + } + } + + @DataProvider(format = "%m[%p[0]]") + public static Object[][] pagingOptions() { + return new Object[][] { + // exact # of rows. + {new Options(100, false, 0, 0, 100, 1)}, + // # of rows - 1. + {new Options(99, false, 0, 0, 100, 2)}, + // # of rows / 2. + {new Options(50, false, 0, 0, 100, 2)}, + // # 1 row per page. + {new Options(1, false, 0, 0, 100, 100)}, + // 10 rows per page, 10 pages overall = 100 (exact). + {new Options(10, false, 10, 0, 100, 10)}, + // 10 rows per page, 9 pages overall = 90 (less than exact number of pages). + {new Options(10, false, 9, 0, 90, 9)}, + // 10 rows per page, 2 pages per second should take ~5secs. + {new Options(10, false, 0, 2, 100, 10)}, + // 8 bytes per page == 1 row per page as len(4) + int(4) for each row. + {new Options(8, true, 0, 0, 100, 100)}, + // 16 bytes per page == 2 rows page per page. + {new Options(16, true, 0, 0, 100, 50)}, + // 32 bytes per page == 4 rows per page. + {new Options(32, true, 0, 0, 100, 25)} + }; + } + + protected void validateMetrics(DseSession session) { + Node node = session.getMetadata().getNodes().values().iterator().next(); + assertThat(session.getMetrics()).isPresent(); + Metrics metrics = session.getMetrics().get(); + assertThat(metrics.getNodeMetric(node, DefaultNodeMetric.CQL_MESSAGES)).isPresent(); + Timer messages = (Timer) metrics.getNodeMetric(node, DefaultNodeMetric.CQL_MESSAGES).get(); + assertThat(messages.getCount()).isGreaterThan(0); + assertThat(messages.getMeanRate()).isGreaterThan(0); + assertThat(metrics.getSessionMetric(DseSessionMetric.CONTINUOUS_CQL_REQUESTS)).isPresent(); + Timer requests = + (Timer) metrics.getSessionMetric(DseSessionMetric.CONTINUOUS_CQL_REQUESTS).get(); + assertThat(requests.getCount()).isGreaterThan(0); + assertThat(requests.getMeanRate()).isGreaterThan(0); + } + + public static class Options { + public int pageSize; + public boolean sizeInBytes; + public int maxPages; + public int maxPagesPerSecond; + public int expectedRows; + public int expectedPages; + + Options( + int pageSize, + boolean sizeInBytes, + int maxPages, + int maxPagesPerSecond, + int expectedRows, + int expectedPages) { + this.pageSize = pageSize; + this.sizeInBytes = sizeInBytes; + this.maxPages = maxPages; + this.maxPagesPerSecond = maxPagesPerSecond; + this.expectedRows = expectedRows; + this.expectedPages = expectedPages; + } + + public DriverExecutionProfile asProfile(DseSession session) { + return session + .getContext() + .getConfig() + .getDefaultProfile() + .withInt(DseDriverOption.CONTINUOUS_PAGING_PAGE_SIZE, pageSize) + .withBoolean(DseDriverOption.CONTINUOUS_PAGING_PAGE_SIZE_BYTES, sizeInBytes) + .withInt(DseDriverOption.CONTINUOUS_PAGING_MAX_PAGES, maxPages) + .withInt(DseDriverOption.CONTINUOUS_PAGING_MAX_PAGES_PER_SECOND, maxPagesPerSecond); + } + + @Override + public String toString() { + return "pageSize=" + + pageSize + + ", sizeInBytes=" + + sizeInBytes + + ", maxPages=" + + maxPages + + ", maxPagesPerSecond=" + + maxPagesPerSecond + + ", expectedRows=" + + expectedRows + + ", expectedPages=" + + expectedPages; + } + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousPagingReactiveIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousPagingReactiveIT.java new file mode 100644 index 00000000000..5bf2c3a2b86 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousPagingReactiveIT.java @@ -0,0 +1,106 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.cql.continuous.reactive; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.dse.driver.DseSessionMetric; +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.dse.driver.api.core.cql.continuous.ContinuousPagingITBase; +import com.datastax.dse.driver.api.core.cql.reactive.ReactiveRow; +import com.datastax.dse.driver.api.testinfra.session.DseSessionRuleBuilder; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.cql.ColumnDefinitions; +import com.datastax.oss.driver.api.core.cql.ExecutionInfo; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.core.metrics.DefaultNodeMetric; +import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import com.datastax.oss.driver.api.testinfra.session.SessionUtils; +import com.datastax.oss.driver.categories.ParallelizableTests; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import io.reactivex.Flowable; +import java.util.Collections; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Set; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; +import org.junit.runner.RunWith; + +@DseRequirement( + min = "5.1.0", + description = "Continuous paging is only available from 5.1.0 onwards") +@Category(ParallelizableTests.class) +@RunWith(DataProviderRunner.class) +public class ContinuousPagingReactiveIT extends ContinuousPagingITBase { + + private static CcmRule ccmRule = CcmRule.getInstance(); + + private static SessionRule sessionRule = + new DseSessionRuleBuilder(ccmRule) + .withConfigLoader( + SessionUtils.configLoaderBuilder() + .withStringList( + DefaultDriverOption.METRICS_SESSION_ENABLED, + Collections.singletonList(DseSessionMetric.CONTINUOUS_CQL_REQUESTS.getPath())) + .withStringList( + DefaultDriverOption.METRICS_NODE_ENABLED, + Collections.singletonList(DefaultNodeMetric.CQL_MESSAGES.getPath())) + .build()) + .build(); + + @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); + + @BeforeClass + public static void setUp() { + initialize(sessionRule.session(), sessionRule.slowProfile()); + } + + @Test + @UseDataProvider("pagingOptions") + public void should_execute_reactively(Options options) { + DseSession session = sessionRule.session(); + SimpleStatement statement = SimpleStatement.newInstance("SELECT v from test where k=?", KEY); + DriverExecutionProfile profile = options.asProfile(session); + ContinuousReactiveResultSet rs = + session.executeContinuouslyReactive(statement.setExecutionProfile(profile)); + List results = Flowable.fromPublisher(rs).toList().blockingGet(); + assertThat(results).hasSize(options.expectedRows); + Set expectedExecInfos = new LinkedHashSet<>(); + for (int i = 0; i < results.size(); i++) { + ReactiveRow row = results.get(i); + assertThat(row.getInt("v")).isEqualTo(i); + expectedExecInfos.add(row.getExecutionInfo()); + } + + List execInfos = + Flowable.fromPublisher(rs.getExecutionInfos()).toList().blockingGet(); + // DSE may send an empty page as it can't always know if it's done paging or not yet. + // See: CASSANDRA-8871. In this case, this page's execution info appears in + // rs.getExecutionInfos(), but is not present in expectedExecInfos since the page did not + // contain any rows. + assertThat(execInfos).containsAll(expectedExecInfos); + + List colDefs = + Flowable.fromPublisher(rs.getColumnDefinitions()).toList().blockingGet(); + ReactiveRow first = results.get(0); + assertThat(colDefs).hasSize(1).containsExactly(first.getColumnDefinitions()); + + List wasApplied = Flowable.fromPublisher(rs.wasApplied()).toList().blockingGet(); + assertThat(wasApplied).hasSize(1).containsExactly(first.wasApplied()); + + validateMetrics(session); + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/reactive/DefaultReactiveResultSetIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/reactive/DefaultReactiveResultSetIT.java new file mode 100644 index 00000000000..d9b32344c96 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/reactive/DefaultReactiveResultSetIT.java @@ -0,0 +1,300 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.cql.reactive; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.dse.driver.api.testinfra.session.DseSessionRuleBuilder; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.cql.BatchStatement; +import com.datastax.oss.driver.api.core.cql.BatchStatementBuilder; +import com.datastax.oss.driver.api.core.cql.ColumnDefinitions; +import com.datastax.oss.driver.api.core.cql.DefaultBatchType; +import com.datastax.oss.driver.api.core.cql.ExecutionInfo; +import com.datastax.oss.driver.api.core.cql.PreparedStatement; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import com.datastax.oss.driver.categories.ParallelizableTests; +import com.datastax.oss.driver.internal.core.cql.EmptyColumnDefinitions; +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import edu.umd.cs.findbugs.annotations.NonNull; +import io.reactivex.Flowable; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Set; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; +import org.junit.runner.RunWith; + +@RunWith(DataProviderRunner.class) +@Category(ParallelizableTests.class) +public class DefaultReactiveResultSetIT { + + private static CcmRule ccmRule = CcmRule.getInstance(); + + private static SessionRule sessionRule = new DseSessionRuleBuilder(ccmRule).build(); + + @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); + + @BeforeClass + public static void initialize() { + DseSession session = sessionRule.session(); + session.execute("DROP TABLE IF EXISTS test_reactive_read"); + session.execute("DROP TABLE IF EXISTS test_reactive_write"); + session.checkSchemaAgreement(); + session.execute( + SimpleStatement.builder( + "CREATE TABLE test_reactive_read (pk int, cc int, v int, PRIMARY KEY ((pk), cc))") + .setExecutionProfile(sessionRule.slowProfile()) + .build()); + session.execute( + SimpleStatement.builder( + "CREATE TABLE test_reactive_write (pk int, cc int, v int, PRIMARY KEY ((pk), cc))") + .setExecutionProfile(sessionRule.slowProfile()) + .build()); + session.checkSchemaAgreement(); + for (int i = 0; i < 1000; i++) { + session.execute( + SimpleStatement.builder("INSERT INTO test_reactive_read (pk, cc, v) VALUES (0, ?, ?)") + .addPositionalValue(i) + .addPositionalValue(i) + .setExecutionProfile(sessionRule.slowProfile()) + .build()); + } + } + + @Before + public void truncateTables() throws Exception { + DseSession session = sessionRule.session(); + session.execute( + SimpleStatement.builder("TRUNCATE test_reactive_write") + .setExecutionProfile(sessionRule.slowProfile()) + .build()); + } + + @Test + @DataProvider( + value = {"1", "10", "100", "999", "1000", "1001", "2000"}, + format = "%m [page size %p[0]]") + public void should_retrieve_all_rows(int pageSize) { + DriverExecutionProfile profile = + sessionRule + .session() + .getContext() + .getConfig() + .getDefaultProfile() + .withInt(DefaultDriverOption.REQUEST_PAGE_SIZE, pageSize); + SimpleStatement statement = + SimpleStatement.builder("SELECT cc, v FROM test_reactive_read WHERE pk = 0") + .setExecutionProfile(profile) + .build(); + ReactiveResultSet rs = sessionRule.session().executeReactive(statement); + List results = Flowable.fromPublisher(rs).toList().blockingGet(); + assertThat(results.size()).isEqualTo(1000); + Set expectedExecInfos = new LinkedHashSet<>(); + for (int i = 0; i < results.size(); i++) { + ReactiveRow row = results.get(i); + assertThat(row.getColumnDefinitions()).isNotNull(); + assertThat(row.getExecutionInfo()).isNotNull(); + assertThat(row.wasApplied()).isTrue(); + assertThat(row.getInt("cc")).isEqualTo(i); + assertThat(row.getInt("v")).isEqualTo(i); + expectedExecInfos.add(row.getExecutionInfo()); + } + + List execInfos = + Flowable.fromPublisher(rs.getExecutionInfos()).toList().blockingGet(); + // DSE may send an empty page as it can't always know if it's done paging or not yet. + // See: CASSANDRA-8871. In this case, this page's execution info appears in + // rs.getExecutionInfos(), but is not present in expectedExecInfos since the page did not + // contain any rows. + assertThat(execInfos).containsAll(expectedExecInfos); + + List colDefs = + Flowable.fromPublisher(rs.getColumnDefinitions()).toList().blockingGet(); + ReactiveRow first = results.get(0); + assertThat(colDefs).hasSize(1).containsExactly(first.getColumnDefinitions()); + + List wasApplied = Flowable.fromPublisher(rs.wasApplied()).toList().blockingGet(); + assertThat(wasApplied).hasSize(1).containsExactly(first.wasApplied()); + } + + @Test + public void should_write() { + SimpleStatement statement = + SimpleStatement.builder("INSERT INTO test_reactive_write (pk, cc, v) VALUES (?, ?, ?)") + .addPositionalValue(0) + .addPositionalValue(1) + .addPositionalValue(2) + .setExecutionProfile(sessionRule.slowProfile()) + .build(); + ReactiveResultSet rs = sessionRule.session().executeReactive(statement); + List results = Flowable.fromPublisher(rs).toList().blockingGet(); + assertThat(results).isEmpty(); + + List execInfos = + Flowable.fromPublisher(rs.getExecutionInfos()).toList().blockingGet(); + assertThat(execInfos).hasSize(1); + + List colDefs = + Flowable.fromPublisher(rs.getColumnDefinitions()).toList().blockingGet(); + assertThat(colDefs).hasSize(1).containsExactly(EmptyColumnDefinitions.INSTANCE); + + List wasApplied = Flowable.fromPublisher(rs.wasApplied()).toList().blockingGet(); + assertThat(wasApplied).hasSize(1).containsExactly(true); + } + + @Test + public void should_write_cas() { + SimpleStatement statement = + SimpleStatement.builder( + "INSERT INTO test_reactive_write (pk, cc, v) VALUES (?, ?, ?) IF NOT EXISTS") + .addPositionalValue(0) + .addPositionalValue(1) + .addPositionalValue(2) + .setExecutionProfile(sessionRule.slowProfile()) + .build(); + // execute statement for the first time: the insert should succeed and the server should return + // only one acknowledgement row with just the [applied] column = true + ReactiveResultSet rs = sessionRule.session().executeReactive(statement); + List results = Flowable.fromPublisher(rs).toList().blockingGet(); + assertThat(results).hasSize(1); + ReactiveRow row = results.get(0); + assertThat(row.getExecutionInfo()).isNotNull(); + assertThat(row.getColumnDefinitions()).hasSize(1); + assertThat(row.wasApplied()).isTrue(); + assertThat(row.getBoolean("[applied]")).isTrue(); + + List execInfos = + Flowable.fromPublisher(rs.getExecutionInfos()).toList().blockingGet(); + assertThat(execInfos).hasSize(1).containsExactly(row.getExecutionInfo()); + + List colDefs = + Flowable.fromPublisher(rs.getColumnDefinitions()).toList().blockingGet(); + assertThat(colDefs).hasSize(1).containsExactly(row.getColumnDefinitions()); + + List wasApplied = Flowable.fromPublisher(rs.wasApplied()).toList().blockingGet(); + assertThat(wasApplied).hasSize(1).containsExactly(row.wasApplied()); + + // re-execute same statement: server should return one row with data that failed to be inserted, + // with [applied] = false + rs = sessionRule.session().executeReactive(statement); + results = Flowable.fromPublisher(rs).toList().blockingGet(); + assertThat(results).hasSize(1); + row = results.get(0); + assertThat(row.getExecutionInfo()).isNotNull(); + assertThat(row.getColumnDefinitions()).hasSize(4); + assertThat(row.wasApplied()).isFalse(); + assertThat(row.getBoolean("[applied]")).isFalse(); + assertThat(row.getInt("pk")).isEqualTo(0); + assertThat(row.getInt("cc")).isEqualTo(1); + assertThat(row.getInt("v")).isEqualTo(2); + + execInfos = + Flowable.fromPublisher(rs.getExecutionInfos()).toList().blockingGet(); + assertThat(execInfos).hasSize(1).containsExactly(row.getExecutionInfo()); + + colDefs = + Flowable.fromPublisher(rs.getColumnDefinitions()).toList().blockingGet(); + assertThat(colDefs).hasSize(1).containsExactly(row.getColumnDefinitions()); + + wasApplied = Flowable.fromPublisher(rs.wasApplied()).toList().blockingGet(); + assertThat(wasApplied).hasSize(1).containsExactly(row.wasApplied()); + } + + @Test + public void should_write_batch_cas() { + BatchStatement batch = createCASBatch(); + DseSession session = sessionRule.session(); + // execute batch for the first time: all inserts should succeed and the server should return + // only one acknowledgement row with just the [applied] column = true + ReactiveResultSet rs = session.executeReactive(batch); + List results = Flowable.fromPublisher(rs).toList().blockingGet(); + assertThat(results).hasSize(1); + ReactiveRow row = results.get(0); + assertThat(row.getExecutionInfo()).isNotNull(); + assertThat(row.getColumnDefinitions()).hasSize(1); + assertThat(row.wasApplied()).isTrue(); + assertThat(row.getBoolean("[applied]")).isTrue(); + + List execInfos = + Flowable.fromPublisher(rs.getExecutionInfos()).toList().blockingGet(); + assertThat(execInfos).hasSize(1).containsExactly(row.getExecutionInfo()); + + List colDefs = + Flowable.fromPublisher(rs.getColumnDefinitions()).toList().blockingGet(); + assertThat(colDefs).hasSize(1).containsExactly(row.getColumnDefinitions()); + + List wasApplied = Flowable.fromPublisher(rs.wasApplied()).toList().blockingGet(); + assertThat(wasApplied).hasSize(1).containsExactly(row.wasApplied()); + + // delete 5 out of 10 rows + partiallyDeleteInsertedRows(); + + // re-execute same statement: server should return 5 rows for the 5 failed inserts, each one + // with [applied] = false + rs = session.executeReactive(batch); + results = Flowable.fromPublisher(rs).toList().blockingGet(); + assertThat(results).hasSize(5); + for (int i = 0; i < 5; i++) { + row = results.get(i); + assertThat(row.getExecutionInfo()).isNotNull(); + assertThat(row.getColumnDefinitions()).hasSize(4); + assertThat(row.wasApplied()).isFalse(); + assertThat(row.getBoolean("[applied]")).isFalse(); + assertThat(row.getInt("pk")).isEqualTo(0); + assertThat(row.getInt("cc")).isEqualTo(i); + assertThat(row.getInt("v")).isEqualTo(i + 1); + } + + execInfos = + Flowable.fromPublisher(rs.getExecutionInfos()).toList().blockingGet(); + assertThat(execInfos).hasSize(1).containsExactly(row.getExecutionInfo()); + + colDefs = + Flowable.fromPublisher(rs.getColumnDefinitions()).toList().blockingGet(); + assertThat(colDefs).hasSize(1).containsExactly(row.getColumnDefinitions()); + + wasApplied = Flowable.fromPublisher(rs.wasApplied()).toList().blockingGet(); + assertThat(wasApplied).hasSize(1).containsExactly(row.wasApplied()); + } + + @NonNull + private static BatchStatement createCASBatch() { + // Build a batch with CAS operations on the same partition (conditional batch updates cannot + // span multiple partitions). + BatchStatementBuilder builder = BatchStatement.builder(DefaultBatchType.UNLOGGED); + SimpleStatement insert = + SimpleStatement.builder( + "INSERT INTO test_reactive_write (pk, cc, v) VALUES (0, ?, ?) IF NOT EXISTS") + .setExecutionProfile(sessionRule.slowProfile()) + .build(); + PreparedStatement preparedStatement = sessionRule.session().prepare(insert); + for (int i = 0; i < 10; i++) { + builder.addStatement(preparedStatement.bind(i, i + 1)); + } + return builder.build(); + } + + private static void partiallyDeleteInsertedRows() { + DseSession session = sessionRule.session(); + session.execute(" DELETE FROM test_reactive_write WHERE pk = 0 and cc = 5"); + session.execute(" DELETE FROM test_reactive_write WHERE pk = 0 and cc = 6"); + session.execute(" DELETE FROM test_reactive_write WHERE pk = 0 and cc = 7"); + session.execute(" DELETE FROM test_reactive_write WHERE pk = 0 and cc = 8"); + session.execute(" DELETE FROM test_reactive_write WHERE pk = 0 and cc = 9"); + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/data/geometry/GeometryIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/data/geometry/GeometryIT.java new file mode 100644 index 00000000000..b230b09d6e3 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/data/geometry/GeometryIT.java @@ -0,0 +1,394 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.data.geometry; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.cql.BatchStatement; +import com.datastax.oss.driver.api.core.cql.BatchStatementBuilder; +import com.datastax.oss.driver.api.core.cql.BoundStatement; +import com.datastax.oss.driver.api.core.cql.DefaultBatchType; +import com.datastax.oss.driver.api.core.cql.PreparedStatement; +import com.datastax.oss.driver.api.core.cql.ResultSet; +import com.datastax.oss.driver.api.core.cql.Row; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.core.data.TupleValue; +import com.datastax.oss.driver.api.core.data.UdtValue; +import com.datastax.oss.driver.api.core.type.TupleType; +import com.datastax.oss.driver.api.core.type.UserDefinedType; +import com.datastax.oss.driver.api.core.type.codec.TypeCodec; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import com.datastax.oss.driver.api.core.uuid.Uuids; +import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import com.datastax.oss.driver.categories.ParallelizableTests; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import com.datastax.oss.driver.shaded.guava.common.collect.Sets; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.UUID; +import org.assertj.core.util.Preconditions; +import org.junit.Ignore; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +@Category({ParallelizableTests.class}) +public abstract class GeometryIT { + + private final Class genericType; + private final T baseSample; + private final List sampleData; + private final SessionRule sessionRule; + + @SuppressWarnings("unchecked") + GeometryIT(List sampleData, Class genericType, SessionRule sessionRule) { + Preconditions.checkArgument( + sampleData.size() >= 3, "Must be at least 3 samples, was given " + sampleData.size()); + this.baseSample = sampleData.get(0); + this.genericType = genericType; + this.sampleData = sampleData; + this.sessionRule = sessionRule; + } + + static void onTestContextInitialized(String cqlTypeName, SessionRule sessionRule) { + sessionRule + .session() + .execute( + SimpleStatement.builder(String.format("CREATE TYPE udt1 (g '%s')", cqlTypeName)) + .setExecutionProfile(sessionRule.slowProfile()) + .build()); + sessionRule + .session() + .execute( + SimpleStatement.builder( + String.format( + "CREATE TABLE tbl (k uuid PRIMARY KEY, g '%s', l list<'%s'>, s set<'%s'>, m0 map<'%s',int>, m1 map, t tuple<'%s','%s','%s'>, u frozen)", + cqlTypeName, + cqlTypeName, + cqlTypeName, + cqlTypeName, + cqlTypeName, + cqlTypeName, + cqlTypeName, + cqlTypeName)) + .setExecutionProfile(sessionRule.slowProfile()) + .build()); + sessionRule + .session() + .execute( + SimpleStatement.builder( + String.format("CREATE TABLE tblpk (k '%s' primary key, v int)", cqlTypeName)) + .setExecutionProfile(sessionRule.slowProfile()) + .build()); + sessionRule + .session() + .execute( + SimpleStatement.builder( + String.format( + "CREATE TABLE tblclustering (k0 int, k1 '%s', v int, primary key (k0, k1))", + cqlTypeName)) + .setExecutionProfile(sessionRule.slowProfile()) + .build()); + } + + private void validate(UUID key, String columnName, V expected, GenericType type) { + ResultSet result = + sessionRule + .session() + .execute( + SimpleStatement.builder( + String.format("SELECT k,%s FROM tbl where k =? ", columnName)) + .addPositionalValue(key) + .build()); + Row row = result.iterator().next(); + assertThat(row.getUuid("k")).isEqualTo(key); + assertThat(row.get(columnName, type)).isEqualTo(expected); + assertThat(row.get(1, type)).isEqualTo(expected); + } + + private void validate(UUID key, T expected) { + validate(key, "g", expected, GenericType.of(genericType)); + } + + /** + * Validates that a given geometry value can be inserted into a column using codec.format() and + * verifies that it is stored correctly by retrieving it and ensuring it matches. + */ + @Test + public void should_insert_using_format() { + for (T expected : sampleData) { + + String val = null; + if (expected != null) { + TypeCodec codec = + sessionRule.session().getContext().getCodecRegistry().codecFor(expected); + val = codec.format(expected); + } + UUID key = Uuids.random(); + sessionRule + .session() + .execute(String.format("INSERT INTO tbl (k, g) VALUES (%s, %s)", key, val)); + validate(key, expected); + } + } + + /** + * Validates that a given geometry value can be inserted into a column by providing it as a simple + * statement parameter and verifies that it is stored correctly by retrieving it and ensuring it + * matches. + */ + @Test + public void should_insert_using_simple_statement_with_parameters() { + for (T expected : sampleData) { + UUID key = Uuids.random(); + sessionRule + .session() + .execute( + SimpleStatement.builder("INSERT INTO tbl (k, g) VALUES (?, ?)") + .addPositionalValues(key, expected) + .build()); + validate(key, expected); + } + } + /** + * Validates that a given geometry value can be inserted into a column by providing it as a bound + * parameter in a BoundStatement and verifies that it is stored correctly by retrieving it and + * ensuring it matches. + */ + @Test + public void should_insert_using_prepared_statement_with_parameters() { + for (T expected : sampleData) { + UUID key = Uuids.random(); + PreparedStatement prepared = + sessionRule.session().prepare("INSERT INTO tbl (k, g) values (?, ?)"); + BoundStatement bs = + prepared.boundStatementBuilder().setUuid(0, key).set(1, expected, genericType).build(); + sessionRule.session().execute(bs); + validate(key, expected); + } + } + /** + * Validates that geometry values can be inserted as a list and verifies that the list is stored + * correctly by retrieving it and ensuring it matches. + */ + @Test + public void should_insert_as_list() { + UUID key = Uuids.random(); + PreparedStatement prepared = + sessionRule.session().prepare("INSERT INTO tbl (k, l) values (?, ?)"); + + BoundStatement bs = + prepared + .boundStatementBuilder() + .setUuid(0, key) + .setList(1, sampleData, genericType) + .build(); + sessionRule.session().execute(bs); + validate(key, "l", sampleData, GenericType.listOf(genericType)); + } + /** + * Validates that geometry values can be inserted as a set and verifies that the set is stored + * correctly by retrieving it and ensuring it matches. + */ + @Test + public void should_insert_as_set() { + UUID key = Uuids.random(); + Set asSet = Sets.newHashSet(sampleData); + + PreparedStatement prepared = + sessionRule.session().prepare("INSERT INTO tbl (k, s) values (?, ?)"); + BoundStatement bs = + prepared.boundStatementBuilder().setUuid(0, key).setSet(1, asSet, genericType).build(); + + sessionRule.session().execute(bs); + validate(key, "s", asSet, GenericType.setOf(genericType)); + } + + /** + * Validates that geometry values can be inserted into a map as keys and verifies that the map is + * stored correctly by retrieving it and ensuring it matches. + */ + @Test + public void should_insert_as_map_keys() { + UUID key = Uuids.random(); + ImmutableMap.Builder builder = ImmutableMap.builder(); + int count = 0; + for (T val : sampleData) { + builder = builder.put(val, count++); + } + Map asMapKeys = builder.build(); + + PreparedStatement prepared = + sessionRule.session().prepare("INSERT INTO tbl (k, m0) values (?, ?)"); + BoundStatement bs = + prepared + .boundStatementBuilder() + .setUuid(0, key) + .setMap(1, asMapKeys, genericType, Integer.class) + .build(); + sessionRule.session().execute(bs); + validate(key, "m0", asMapKeys, GenericType.mapOf(genericType, Integer.class)); + } + + /** + * Validates that geometry values can be inserted into a map as values and verifies that the map + * is stored correctly by retrieving it and ensuring it matches. + */ + @Test + public void should_insert_as_map_values() { + UUID key = Uuids.random(); + ImmutableMap.Builder builder = ImmutableMap.builder(); + int count = 0; + for (T val : sampleData) { + builder = builder.put(count++, val); + } + Map asMapValues = builder.build(); + PreparedStatement prepared = + sessionRule.session().prepare("INSERT INTO tbl (k, m1) values (?, ?)"); + BoundStatement bs = + prepared + .boundStatementBuilder() + .setUuid(0, key) + .setMap(1, asMapValues, Integer.class, genericType) + .build(); + sessionRule.session().execute(bs); + validate(key, "m1", asMapValues, GenericType.mapOf(Integer.class, genericType)); + } + + /** + * Validates that geometry values can be inserted as a tuple and verifies that the tuple is stored + * correctly by retrieving it and ensuring it matches. + */ + @Test + @Ignore + public void should_insert_as_tuple() { + UUID key = Uuids.random(); + + PreparedStatement prepared = + sessionRule.session().prepare("INSERT INTO tbl (k, t) values (?, ?)"); + TupleType tupleType = (TupleType) prepared.getVariableDefinitions().get(1).getType(); + TupleValue tuple = tupleType.newValue(); + tuple = tuple.set(0, sampleData.get(0), genericType); + tuple = tuple.set(1, sampleData.get(1), genericType); + tuple = tuple.set(2, sampleData.get(2), genericType); + BoundStatement bs = + prepared.boundStatementBuilder().setUuid(0, key).setTupleValue(1, tuple).build(); + sessionRule.session().execute(bs); + ResultSet rs = + sessionRule + .session() + .execute( + SimpleStatement.builder("SELECT k,t FROM tbl where k=?") + .addPositionalValues(key) + .build()); + Row row = rs.iterator().next(); + assertThat(row.getUuid("k")).isEqualTo(key); + assertThat(row.getTupleValue("t")).isEqualTo(tuple); + assertThat(row.getTupleValue(1)).isEqualTo(tuple); + } + /** + * Validates that a geometry value can be inserted as a field in a UDT and verifies that the UDT + * is stored correctly by retrieving it and ensuring it matches. + */ + @Test + @Ignore + public void should_insert_as_field_in_udt() { + UUID key = Uuids.random(); + UserDefinedType udtType = + sessionRule + .session() + .getMetadata() + .getKeyspace(sessionRule.session().getKeyspace().orElseThrow(AssertionError::new)) + .flatMap(ks -> ks.getUserDefinedType(CqlIdentifier.fromInternal("udt1"))) + .orElseThrow(AssertionError::new); + assertThat(udtType).isNotNull(); + UdtValue value = udtType.newValue(); + value = value.set("g", sampleData.get(0), genericType); + + PreparedStatement prepared = + sessionRule.session().prepare("INSERT INTO tbl (k, u) values (?, ?)"); + BoundStatement bs = + prepared.boundStatementBuilder().setUuid(0, key).setUdtValue(1, value).build(); + sessionRule.session().execute(bs); + + ResultSet rs = + sessionRule + .session() + .execute( + SimpleStatement.builder("SELECT k,u FROM tbl where k=?") + .addPositionalValues(key) + .build()); + Row row = rs.iterator().next(); + assertThat(row.getUuid("k")).isEqualTo(key); + assertThat(row.getUdtValue("u")).isEqualTo(value); + assertThat(row.getUdtValue(1)).isEqualTo(value); + } + + /** + * Validates that a geometry value can be inserted into a column that is the partition key and + * then validates that it can be queried back by partition key. + */ + @Test + public void should_accept_as_partition_key() { + sessionRule + .session() + .execute( + SimpleStatement.builder("INSERT INTO tblpk (k, v) VALUES (?,?)") + .addPositionalValues(baseSample, 1) + .build()); + ResultSet results = sessionRule.session().execute("SELECT k,v FROM tblpk"); + Row row = results.one(); + T key = row.get("k", genericType); + assertThat(key).isEqualTo(baseSample); + } + + /** + * Validates that geometry values can be inserted into a column that is a clustering key in rows + * sharing a partition key and then validates that the rows can be retrieved by partition key. + * + * @test_category dse:geospatial + */ + @Test + public void should_accept_as_clustering_key() { + PreparedStatement insert = + sessionRule.session().prepare("INSERT INTO tblclustering (k0, k1, v) values (?,?,?)"); + BatchStatementBuilder batchbuilder = BatchStatement.builder(DefaultBatchType.UNLOGGED); + + int count = 0; + for (T value : sampleData) { + BoundStatement bound = + insert + .boundStatementBuilder() + .setInt(0, 0) + .set(1, value, genericType) + .setInt(2, count++) + .build(); + batchbuilder.addStatement(bound); + } + sessionRule.session().execute(batchbuilder.build()); + + ResultSet result = + sessionRule + .session() + .execute( + SimpleStatement.builder("SELECT * from tblclustering where k0=?") + .addPositionalValue(0) + .build()); + + // The order of rows returned is not significant for geospatial types since it is stored in + // lexicographic byte order (8 bytes at a time). Thus we pull them all sort and extract and + // ensure all values were returned. + List rows = result.all(); + + assertThat(rows) + .extracting(row -> row.get("k1", genericType)) + .containsOnlyElementsOf(sampleData) + .hasSameSizeAs(sampleData); + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/data/geometry/LineStringIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/data/geometry/LineStringIT.java new file mode 100644 index 00000000000..0f4b7fac931 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/data/geometry/LineStringIT.java @@ -0,0 +1,83 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.data.geometry; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.dse.driver.api.testinfra.session.DseSessionRuleBuilder; +import com.datastax.oss.driver.api.core.cql.ResultSet; +import com.datastax.oss.driver.api.core.cql.Row; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.core.uuid.Uuids; +import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import java.util.List; +import java.util.UUID; +import org.assertj.core.util.Lists; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +@DseRequirement(min = "5.0") +public class LineStringIT extends GeometryIT { + + private static CcmRule ccm = CcmRule.getInstance(); + + private static SessionRule sessionRule = new DseSessionRuleBuilder(ccm).build(); + + @ClassRule public static TestRule chain = RuleChain.outerRule(ccm).around(sessionRule); + + private static final String LINE_STRING_TYPE = "LineStringType"; + + public LineStringIT() { + super( + Lists.newArrayList( + LineString.fromPoints(Point.fromCoordinates(0, 10), Point.fromCoordinates(10, 0)), + LineString.fromPoints( + Point.fromCoordinates(30, 10), + Point.fromCoordinates(10, 30), + Point.fromCoordinates(40, 40)), + LineString.fromPoints( + Point.fromCoordinates(-5, 0), + Point.fromCoordinates(0, 10), + Point.fromCoordinates(10, 5))), + LineString.class, + sessionRule); + } + + @BeforeClass + public static void initialize() { + onTestContextInitialized(LINE_STRING_TYPE, sessionRule); + } + + @Test + public void should_insert_and_retrieve_empty_linestring() { + LineString empty = LineString.fromWellKnownText("LINESTRING EMPTY"); + UUID key = Uuids.random(); + sessionRule + .session() + .execute( + SimpleStatement.builder("INSERT INTO tbl (k, g) VALUES (?, ?)") + .addPositionalValues(key, empty) + .build()); + + ResultSet result = + sessionRule + .session() + .execute( + SimpleStatement.builder("SELECT g from tbl where k=?") + .addPositionalValues(key) + .build()); + Row row = result.iterator().next(); + List points = row.get("g", LineString.class).getPoints(); + assertThat(points.isEmpty()).isTrue(); + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/data/geometry/PointIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/data/geometry/PointIT.java new file mode 100644 index 00000000000..1aebb0c1704 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/data/geometry/PointIT.java @@ -0,0 +1,47 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.data.geometry; + +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.dse.driver.api.testinfra.session.DseSessionRuleBuilder; +import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import org.assertj.core.util.Lists; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +@DseRequirement(min = "5.0") +public class PointIT extends GeometryIT { + + private static CcmRule ccm = CcmRule.getInstance(); + + private static SessionRule sessionRule = new DseSessionRuleBuilder(ccm).build(); + + @ClassRule public static TestRule chain = RuleChain.outerRule(ccm).around(sessionRule); + + private static final String POINT_TYPE = "PointType"; + + public PointIT() { + super( + Lists.newArrayList( + Point.fromCoordinates(-1.0, -5), + Point.fromCoordinates(0, 0), + Point.fromCoordinates(1.1, 2.2), + Point.fromCoordinates(Double.MIN_VALUE, 0), + Point.fromCoordinates(Double.MAX_VALUE, Double.MIN_VALUE)), + Point.class, + sessionRule); + } + + @BeforeClass + public static void initialize() { + onTestContextInitialized(POINT_TYPE, sessionRule); + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/data/geometry/PolygonIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/data/geometry/PolygonIT.java new file mode 100644 index 00000000000..556b76628f2 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/data/geometry/PolygonIT.java @@ -0,0 +1,111 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.data.geometry; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.dse.driver.api.testinfra.session.DseSessionRuleBuilder; +import com.datastax.oss.driver.api.core.cql.ResultSet; +import com.datastax.oss.driver.api.core.cql.Row; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.core.uuid.Uuids; +import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import java.util.UUID; +import org.assertj.core.util.Lists; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +@DseRequirement(min = "5.0") +public class PolygonIT extends GeometryIT { + + private static CcmRule ccm = CcmRule.getInstance(); + + private static SessionRule sessionRule = new DseSessionRuleBuilder(ccm).build(); + + @ClassRule public static TestRule chain = RuleChain.outerRule(ccm).around(sessionRule); + + private static final String POLYGON_TYPE = "PolygonType"; + + private static Polygon squareInMinDomain = + Polygon.fromPoints( + Point.fromCoordinates(Double.MIN_VALUE, Double.MIN_VALUE), + Point.fromCoordinates(Double.MIN_VALUE, Double.MIN_VALUE + 1), + Point.fromCoordinates(Double.MIN_VALUE + 1, Double.MIN_VALUE + 1), + Point.fromCoordinates(Double.MIN_VALUE + 1, Double.MIN_VALUE)); + + private static Polygon triangle = + Polygon.fromPoints( + Point.fromCoordinates(-5, 10), + Point.fromCoordinates(5, 5), + Point.fromCoordinates(10, -5)); + + private static Polygon complexPolygon = + Polygon.builder() + .addRing( + Point.fromCoordinates(0, 0), + Point.fromCoordinates(0, 3), + Point.fromCoordinates(5, 3), + Point.fromCoordinates(5, 0)) + .addRing( + Point.fromCoordinates(1, 1), + Point.fromCoordinates(1, 2), + Point.fromCoordinates(2, 2), + Point.fromCoordinates(2, 1)) + .addRing( + Point.fromCoordinates(3, 1), + Point.fromCoordinates(3, 2), + Point.fromCoordinates(4, 2), + Point.fromCoordinates(4, 1)) + .build(); + + public PolygonIT() { + super( + Lists.newArrayList(squareInMinDomain, complexPolygon, triangle), + Polygon.class, + sessionRule); + } + + @BeforeClass + public static void initialize() { + onTestContextInitialized(POLYGON_TYPE, sessionRule); + } + + /** + * Validates that an empty {@link Polygon} can be inserted and retrieved. + * + * @jira_ticket JAVA-1076 + * @test_category dse:graph + */ + @Test + public void should_insert_and_retrieve_empty_polygon() { + Polygon empty = Polygon.builder().build(); + UUID key = Uuids.random(); + sessionRule + .session() + .execute( + SimpleStatement.builder("INSERT INTO tbl (k, g) VALUES (?, ?)") + .addPositionalValues(key, empty) + .build()); + + ResultSet result = + sessionRule + .session() + .execute( + SimpleStatement.builder("SELECT g from tbl where k=?") + .addPositionalValues(key) + .build()); + Row row = result.iterator().next(); + assertThat(row.get("g", Polygon.class).getInteriorRings()).isEmpty(); + assertThat(row.get("g", Polygon.class).getExteriorRing()).isEmpty(); + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/data/time/DateRangeIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/data/time/DateRangeIT.java new file mode 100644 index 00000000000..f83ca1c168c --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/data/time/DateRangeIT.java @@ -0,0 +1,345 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.data.time; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.dse.driver.api.testinfra.session.DseSessionRuleBuilder; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.cql.PreparedStatement; +import com.datastax.oss.driver.api.core.cql.Row; +import com.datastax.oss.driver.api.core.data.TupleValue; +import com.datastax.oss.driver.api.core.data.UdtValue; +import com.datastax.oss.driver.api.core.servererrors.InvalidQueryException; +import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import com.datastax.oss.driver.api.testinfra.session.SessionUtils; +import com.datastax.oss.driver.categories.ParallelizableTests; +import com.google.common.collect.Sets; +import java.time.Duration; +import java.util.List; +import java.util.Map; +import java.util.Set; +import org.junit.ClassRule; +import org.junit.Rule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.rules.RuleChain; +import org.junit.rules.TestName; +import org.junit.rules.TestRule; + +@Category({ParallelizableTests.class}) +@DseRequirement(min = "5.1") +public class DateRangeIT { + + private static CcmRule ccmRule = CcmRule.getInstance(); + + private static SessionRule sessionRule = + new DseSessionRuleBuilder(ccmRule) + .withConfigLoader( + SessionUtils.configLoaderBuilder() + .withDuration(DefaultDriverOption.REQUEST_TIMEOUT, Duration.ofSeconds(30)) + .build()) + .build(); + + @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); + + @Rule public TestName testName = new TestName(); + + /** + * Validates that data can be retrieved by primary key where its primary key is a 'DateRangeType' + * column, and that the data returned properly parses into the expected {@link DateRange}. + */ + @Test + public void should_use_date_range_as_primary_key() throws Exception { + DseSession session = sessionRule.session(); + String tableName = testName.getMethodName(); + + session.execute( + String.format("CREATE TABLE %s (k 'DateRangeType' PRIMARY KEY, v int)", tableName)); + session.execute( + String.format("INSERT INTO %s (k, v) VALUES ('[2010-12-03 TO 2010-12-04]', 1)", tableName)); + session.execute( + String.format( + "INSERT INTO %s (k, v) VALUES ('[2015-12-03T10:15:30.001Z TO 2016-01-01T00:05:11.967Z]', 2)", + tableName)); + + List rows = session.execute("SELECT * FROM " + tableName).all(); + + assertThat(rows).hasSize(2); + assertThat(rows.get(0).get("k", DateRange.class)) + .isEqualTo(DateRange.parse("[2010-12-03 TO 2010-12-04]")); + assertThat(rows.get(1).get("k", DateRange.class)) + .isEqualTo(DateRange.parse("[2015-12-03T10:15:30.001Z TO 2016-01-01T00:05:11.967Z]")); + + rows = + session + .execute( + String.format( + "SELECT * FROM %s WHERE k = '[2015-12-03T10:15:30.001Z TO 2016-01-01T00:05:11.967]'", + tableName)) + .all(); + assertThat(rows.size()).isEqualTo(1); + assertThat(rows.get(0).getInt("v")).isEqualTo(2); + } + + /** + * Validates that a 'DateRangeType' column can take a variety of {@link DateRange} inputs: + * + *

        + *
      1. Upper bound unbounded + *
      2. Lower bound unbounded + *
      3. Unbounded + *
      4. Bounded + *
      5. null + *
      6. unset + *
      + */ + @Test + public void should_store_date_range() throws Exception { + DseSession session = sessionRule.session(); + String tableName = testName.getMethodName(); + + session.execute( + String.format("CREATE TABLE %s (k int PRIMARY KEY, v 'DateRangeType')", tableName)); + session.execute( + String.format( + "INSERT INTO %s (k, v) VALUES (1, '[2000-01-01T10:15:30.301Z TO *]')", tableName)); + session.execute( + String.format("INSERT INTO %s (k, v) VALUES (2, '[2000-02 TO 2000-03]')", tableName)); + session.execute(String.format("INSERT INTO %s (k, v) VALUES (3, '[* TO 2020]')", tableName)); + session.execute(String.format("INSERT INTO %s (k, v) VALUES (4, null)", tableName)); + session.execute(String.format("INSERT INTO %s (k) VALUES (5)", tableName)); + session.execute(String.format("INSERT INTO %s (k, v) VALUES (6, '*')", tableName)); + + List rows = session.execute("SELECT * FROM " + tableName).all(); + + assertThat(rows) + .extracting(input -> input.get("v", DateRange.class)) + .containsOnly( + DateRange.parse("[2000-01-01T10:15:30.301Z TO *]"), + DateRange.parse("[2000-02 TO 2000-03]"), + DateRange.parse("[* TO 2020]"), + null, + DateRange.parse("*")); + } + + /** + * Validates that if a provided {@link DateRange} for a 'DateRangeType' column has the bounds + * reversed (lower bound is later than upper bound), then an {@link InvalidQueryException} is + * thrown. + */ + @Test + public void should_disallow_invalid_order() throws Exception { + DseSession session = sessionRule.session(); + String tableName = testName.getMethodName(); + + session.execute( + String.format("CREATE TABLE %s (k int PRIMARY KEY, v 'DateRangeType')", tableName)); + + assertThatThrownBy( + () -> + session.execute( + String.format( + "INSERT INTO %s (k, v) " + + "VALUES (1, '[2020-01-01T10:15:30.009Z TO 2010-01-01T00:05:11.031Z]')", + tableName))) + .isInstanceOf(InvalidQueryException.class) + .hasMessageContaining("Wrong order: 2020-01-01T10:15:30.009Z TO 2010-01-01T00:05:11.031Z") + .hasMessageContaining( + "Could not parse date range: [2020-01-01T10:15:30.009Z TO 2010-01-01T00:05:11.031Z]"); + } + + /** Validates that {@link DateRange} can be used in UDT and Tuple types. */ + @Test + public void should_allow_date_range_in_udt_and_tuple() throws Exception { + DseSession session = sessionRule.session(); + String tableName = testName.getMethodName(); + + session.execute("CREATE TYPE IF NOT EXISTS test_udt (i int, range 'DateRangeType')"); + session.execute( + String.format( + "CREATE TABLE %s (k int PRIMARY KEY, u test_udt, uf frozen, " + + "t tuple<'DateRangeType', int>, tf frozen>)", + tableName)); + session.execute( + String.format( + "INSERT INTO %s (k, u, uf, t, tf) VALUES (" + + "1, " + + "{i: 10, range: '[2000-01-01T10:15:30.003Z TO 2020-01-01T10:15:30.001Z]'}, " + + "{i: 20, range: '[2000-01-01T10:15:30.003Z TO 2020-01-01T10:15:30.001Z]'}, " + + "('[2000-01-01T10:15:30.003Z TO 2020-01-01T10:15:30.001Z]', 30), " + + "('[2000-01-01T10:15:30.003Z TO 2020-01-01T10:15:30.001Z]', 40))", + tableName)); + + DateRange expected = DateRange.parse("[2000-01-01T10:15:30.003Z TO 2020-01-01T10:15:30.001Z]"); + + List rows = session.execute("SELECT * FROM " + tableName).all(); + assertThat(rows).hasSize(1); + + UdtValue u = rows.get(0).get("u", UdtValue.class); + DateRange dateRange = u.get("range", DateRange.class); + assertThat(dateRange).isEqualTo(expected); + assertThat(u.getInt("i")).isEqualTo(10); + + u = rows.get(0).get("uf", UdtValue.class); + dateRange = u.get("range", DateRange.class); + assertThat(dateRange).isEqualTo(expected); + assertThat(u.getInt("i")).isEqualTo(20); + + TupleValue t = rows.get(0).get("t", TupleValue.class); + dateRange = t.get(0, DateRange.class); + assertThat(dateRange).isEqualTo(expected); + assertThat(t.getInt(1)).isEqualTo(30); + + t = rows.get(0).get("tf", TupleValue.class); + dateRange = t.get(0, DateRange.class); + assertThat(dateRange).isEqualTo(expected); + assertThat(t.getInt(1)).isEqualTo(40); + } + + /** Validates that {@link DateRange} can be used in Collection types (Map, Set, List). */ + @Test + public void should_allow_date_range_in_collections() throws Exception { + DseSession session = sessionRule.session(); + String tableName = testName.getMethodName(); + + session.execute( + String.format( + "CREATE TABLE %s (k int PRIMARY KEY, l list<'DateRangeType'>, s set<'DateRangeType'>, " + + "dr2i map<'DateRangeType', int>, i2dr map)", + tableName)); + session.execute( + String.format( + "INSERT INTO %s (k, l, s, i2dr, dr2i) VALUES (" + + "1, " + // l + + "['[2000-01-01T10:15:30.001Z TO 2020]', '[2010-01-01T10:15:30.001Z TO 2020]'," + + " '2001-01-02'], " + // s + + "{'[2000-01-01T10:15:30.001Z TO 2020]', '[2000-01-01T10:15:30.001Z TO 2020]', " + + "'[2010-01-01T10:15:30.001Z TO 2020]'}, " + // i2dr + + "{1: '[2000-01-01T10:15:30.001Z TO 2020]', " + + "2: '[2010-01-01T10:15:30.001Z TO 2020]'}, " + // dr2i + + "{'[2000-01-01T10:15:30.001Z TO 2020]': 1, " + + "'[2010-01-01T10:15:30.001Z TO 2020]': 2})", + tableName)); + + List rows = session.execute("SELECT * FROM " + tableName).all(); + assertThat(rows.size()).isEqualTo(1); + + List drList = rows.get(0).getList("l", DateRange.class); + assertThat(drList.size()).isEqualTo(3); + assertThat(drList.get(0)).isEqualTo(DateRange.parse("[2000-01-01T10:15:30.001Z TO 2020]")); + assertThat(drList.get(1)).isEqualTo(DateRange.parse("[2010-01-01T10:15:30.001Z TO 2020]")); + assertThat(drList.get(2)).isEqualTo(DateRange.parse("2001-01-02")); + + Set drSet = rows.get(0).getSet("s", DateRange.class); + assertThat(drSet.size()).isEqualTo(2); + assertThat(drSet) + .isEqualTo( + Sets.newHashSet( + DateRange.parse("[2000-01-01T10:15:30.001Z TO 2020]"), + DateRange.parse("[2010-01-01T10:15:30.001Z TO 2020]"))); + + Map dr2i = rows.get(0).getMap("dr2i", DateRange.class, Integer.class); + assertThat(dr2i.size()).isEqualTo(2); + assertThat((int) dr2i.get(DateRange.parse("[2000-01-01T10:15:30.001Z TO 2020]"))).isEqualTo(1); + assertThat((int) dr2i.get(DateRange.parse("[2010-01-01T10:15:30.001Z TO 2020]"))).isEqualTo(2); + + Map i2dr = rows.get(0).getMap("i2dr", Integer.class, DateRange.class); + assertThat(i2dr.size()).isEqualTo(2); + assertThat(i2dr.get(1)).isEqualTo(DateRange.parse("[2000-01-01T10:15:30.001Z TO 2020]")); + assertThat(i2dr.get(2)).isEqualTo(DateRange.parse("[2010-01-01T10:15:30.001Z TO 2020]")); + } + + /** + * Validates that a 'DateRangeType' column can take a {@link DateRange} inputs as a prepared + * statement parameter. + */ + @Test + public void should_bind_date_range_in_prepared_statement() throws Exception { + DseSession session = sessionRule.session(); + String tableName = testName.getMethodName(); + + session.execute( + String.format("CREATE TABLE %s (k int PRIMARY KEY, v 'DateRangeType')", tableName)); + PreparedStatement statement = + session.prepare(String.format("INSERT INTO %s (k,v) VALUES(?,?)", tableName)); + + DateRange expected = DateRange.parse("[2007-12-03 TO 2007-12]"); + session.execute(statement.bind(1, expected)); + List rows = session.execute("SELECT * FROM " + tableName).all(); + assertThat(rows.size()).isEqualTo(1); + DateRange actual = rows.get(0).get("v", DateRange.class); + assertThat(actual).isEqualTo(expected); + assertThat(actual.getLowerBound().getPrecision()).isEqualTo(DateRangePrecision.DAY); + assertThat(actual.getUpperBound()) + .hasValueSatisfying( + upperBound -> + assertThat(upperBound.getPrecision()).isEqualTo(DateRangePrecision.MONTH)); + assertThat(actual.toString()).isEqualTo("[2007-12-03 TO 2007-12]"); + + expected = DateRange.parse("[* TO *]"); + session.execute(statement.bind(1, expected)); + rows = session.execute("SELECT * FROM " + tableName).all(); + assertThat(rows.size()).isEqualTo(1); + actual = rows.get(0).get("v", DateRange.class); + assertThat(actual).isEqualTo(expected); + assertThat(actual.getLowerBound().isUnbounded()).isTrue(); + assertThat(actual.isSingleBounded()).isFalse(); + assertThat(actual.getUpperBound()) + .hasValueSatisfying(upperBound -> assertThat(upperBound.isUnbounded()).isTrue()); + assertThat(actual.toString()).isEqualTo("[* TO *]"); + + expected = DateRange.parse("*"); + session.execute(statement.bind(1, expected)); + rows = session.execute("SELECT * FROM " + tableName).all(); + assertThat(rows.size()).isEqualTo(1); + actual = rows.get(0).get("v", DateRange.class); + assertThat(actual).isEqualTo(expected); + assertThat(actual.getLowerBound().isUnbounded()).isTrue(); + assertThat(actual.isSingleBounded()).isTrue(); + assertThat(actual.toString()).isEqualTo("*"); + } + + /** + * Validates that 'DateRangeType' columns are retrievable using SELECT JSON queries + * and that their value representations match their input. + */ + @Test + public void should_select_date_range_using_json() throws Exception { + DseSession session = sessionRule.session(); + String tableName = testName.getMethodName(); + + session.execute( + String.format("CREATE TABLE %s (k int PRIMARY KEY, v 'DateRangeType')", tableName)); + PreparedStatement statement = + session.prepare(String.format("INSERT INTO %s (k,v) VALUES(?,?)", tableName)); + + DateRange expected = DateRange.parse("[2007-12-03 TO 2007-12]"); + session.execute(statement.bind(1, expected)); + List rows = session.execute("SELECT JSON * FROM " + tableName).all(); + assertThat(rows.get(0).getString(0)) + .isEqualTo("{\"k\": 1, \"v\": \"[2007-12-03 TO 2007-12]\"}"); + + expected = DateRange.parse("[* TO *]"); + session.execute(statement.bind(1, expected)); + rows = session.execute("SELECT JSON * FROM " + tableName).all(); + assertThat(rows.get(0).getString(0)).isEqualTo("{\"k\": 1, \"v\": \"[* TO *]\"}"); + + expected = DateRange.parse("*"); + session.execute(statement.bind(1, expected)); + rows = session.execute("SELECT JSON * FROM " + tableName).all(); + assertThat(rows.get(0).getString(0)).isEqualTo("{\"k\": 1, \"v\": \"*\"}"); + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphAuthenticationIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphAuthenticationIT.java new file mode 100644 index 00000000000..96c56637e47 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphAuthenticationIT.java @@ -0,0 +1,62 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph; + +import static com.datastax.dse.driver.api.core.graph.TinkerGraphAssertions.assertThat; + +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.dse.driver.api.core.config.DseDriverConfigLoader; +import com.datastax.dse.driver.api.core.config.DseDriverOption; +import com.datastax.dse.driver.internal.core.auth.DsePlainTextAuthProvider; +import com.datastax.oss.driver.api.core.Version; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.session.SessionUtils; +import com.datastax.oss.driver.shaded.guava.common.util.concurrent.Uninterruptibles; +import java.util.concurrent.TimeUnit; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Test; + +@DseRequirement(min = "5.0.0", description = "DSE 5 required for Graph") +public class GraphAuthenticationIT { + + @ClassRule + public static CustomCcmRule ccm = + CustomCcmRule.builder() + .withDseConfiguration("authentication_options.enabled", true) + .withJvmArgs("-Dcassandra.superuser_setup_delay_ms=0") + .withDseWorkloads("graph") + .build(); + + @BeforeClass + public static void sleepForAuth() { + if (ccm.getCassandraVersion().compareTo(Version.V2_2_0) < 0) { + // Sleep for 1 second to allow C* auth to do its work. This is only needed for 2.1 + Uninterruptibles.sleepUninterruptibly(1, TimeUnit.SECONDS); + } + } + + @Test + public void should_execute_graph_query_on_authenticated_connection() { + DseSession dseSession = + SessionUtils.newSession( + ccm, + DseDriverConfigLoader.programmaticBuilder() + .withString(DseDriverOption.AUTH_PROVIDER_AUTHORIZATION_ID, "") + .withString(DefaultDriverOption.AUTH_PROVIDER_USER_NAME, "cassandra") + .withString(DefaultDriverOption.AUTH_PROVIDER_PASSWORD, "cassandra") + .withClass(DefaultDriverOption.AUTH_PROVIDER_CLASS, DsePlainTextAuthProvider.class) + .build()); + + GraphNode gn = + dseSession.execute(ScriptGraphStatement.newInstance("1+1").setSystemQuery(true)).one(); + assertThat(gn).isNotNull(); + assertThat(gn.asInt()).isEqualTo(2); + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphDataTypeITBase.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphDataTypeITBase.java new file mode 100644 index 00000000000..3f7f1c942f9 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphDataTypeITBase.java @@ -0,0 +1,165 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph; + +import static com.datastax.dse.driver.api.core.graph.TinkerGraphAssertions.assertThat; + +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.dse.driver.api.core.data.geometry.LineString; +import com.datastax.dse.driver.api.core.data.geometry.Point; +import com.datastax.dse.driver.api.core.data.geometry.Polygon; +import com.datastax.oss.driver.api.core.Version; +import com.datastax.oss.driver.api.testinfra.ccm.CcmBridge; +import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import com.datastax.oss.driver.shaded.guava.common.base.Charsets; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; +import com.datastax.oss.driver.shaded.guava.common.net.InetAddresses; +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Instant; +import java.time.LocalDate; +import java.time.LocalTime; +import java.util.Set; +import java.util.UUID; +import java.util.concurrent.atomic.AtomicInteger; +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.junit.AssumptionViolatedException; +import org.junit.Test; +import org.junit.runner.RunWith; + +@RunWith(DataProviderRunner.class) +public abstract class GraphDataTypeITBase { + + private static final boolean IS_DSE50 = CcmBridge.VERSION.compareTo(Version.parse("5.1")) < 0; + private static final Set TYPES_REQUIRING_DSE51 = + ImmutableSet.of("Date()", "Time()", "Point()", "Linestring()", "Polygon()"); + + private static AtomicInteger schemaCounter = new AtomicInteger(); + + @DataProvider + public static Object[][] typeSamples() { + return new Object[][] { + // Types that DSE supports. + {"Boolean()", true}, + {"Boolean()", false}, + {"Smallint()", Short.MAX_VALUE}, + {"Smallint()", Short.MIN_VALUE}, + {"Smallint()", (short) 0}, + {"Smallint()", (short) 42}, + {"Int()", Integer.MAX_VALUE}, + {"Int()", Integer.MIN_VALUE}, + {"Int()", 0}, + {"Int()", 42}, + {"Bigint()", Long.MAX_VALUE}, + {"Bigint()", Long.MIN_VALUE}, + {"Bigint()", 0L}, + {"Double()", Double.MAX_VALUE}, + {"Double()", Double.MIN_VALUE}, + {"Double()", 0.0d}, + {"Double()", Math.PI}, + {"Float()", Float.MAX_VALUE}, + {"Float()", Float.MIN_VALUE}, + {"Float()", 0.0f}, + {"Text()", ""}, + {"Text()", "75"}, + {"Text()", "Lorem Ipsum"}, + // Inet, UUID, Date + {"Inet()", InetAddresses.forString("127.0.0.1")}, + {"Inet()", InetAddresses.forString("0:0:0:0:0:0:0:1")}, + {"Inet()", InetAddresses.forString("2001:db8:85a3:0:0:8a2e:370:7334")}, + {"Uuid()", UUID.randomUUID()}, + // Timestamps + {"Timestamp()", Instant.ofEpochMilli(123)}, + {"Timestamp()", Instant.ofEpochMilli(1488313909)}, + {"Duration()", java.time.Duration.parse("P2DT3H4M")}, + {"Date()", LocalDate.of(2016, 5, 12)}, + {"Time()", LocalTime.parse("18:30:41.554")}, + {"Time()", LocalTime.parse("18:30:41.554010034")}, + // Blob + {"Blob()", "Hello World!".getBytes(Charsets.UTF_8)}, + // BigDecimal/BigInteger + {"Decimal()", new BigDecimal("8675309.9998")}, + {"Varint()", new BigInteger("8675309")}, + // Geospatial types + {"Point().withBounds(-2, -2, 2, 2)", Point.fromCoordinates((double) 0, (double) 1)}, + {"Point().withBounds(-40, -40, 40, 40)", Point.fromCoordinates((double) -5, (double) 20)}, + { + "Linestring().withGeoBounds()", + LineString.fromPoints( + Point.fromCoordinates((double) 30, (double) 10), + Point.fromCoordinates((double) 10, (double) 30), + Point.fromCoordinates((double) 40, (double) 40)) + }, + { + "Polygon().withGeoBounds()", + Polygon.builder() + .addRing( + Point.fromCoordinates((double) 35, (double) 10), + Point.fromCoordinates((double) 45, (double) 45), + Point.fromCoordinates((double) 15, (double) 40), + Point.fromCoordinates((double) 10, (double) 20), + Point.fromCoordinates((double) 35, (double) 10)) + .addRing( + Point.fromCoordinates((double) 20, (double) 30), + Point.fromCoordinates((double) 35, (double) 35), + Point.fromCoordinates((double) 30, (double) 20), + Point.fromCoordinates((double) 20, (double) 30)) + .build() + } + }; + } + + @UseDataProvider("typeSamples") + @Test + public void should_create_and_retrieve_vertex_property_with_correct_type( + String type, Object value) { + if (IS_DSE50 && requiresDse51(type)) { + throw new AssumptionViolatedException(type + " not supported in DSE " + CcmBridge.VERSION); + } + + int id = schemaCounter.getAndIncrement(); + + String vertexLabel = "vertex" + id; + String propertyName = "prop" + id; + GraphStatement addVertexLabelAndProperty = + ScriptGraphStatement.builder( + "schema.propertyKey(property)." + + type + + ".create()\n" + + "schema.vertexLabel(vertexLabel).properties(property).create()") + .setQueryParam("vertexLabel", vertexLabel) + .setQueryParam("property", propertyName) + .build(); + + session().execute(addVertexLabelAndProperty); + + Vertex v = insertVertexAndReturn(vertexLabel, propertyName, value); + + assertThat(v).hasProperty(propertyName, value); + } + + private boolean requiresDse51(String type) { + for (String prefix : TYPES_REQUIRING_DSE51) { + if (type.startsWith(prefix)) { + return true; + } + } + return false; + } + + public abstract Vertex insertVertexAndReturn( + String vertexLabel, String propertyName, Object value); + + /** + * Note that the {@link SessionRule} (and setupSchema method) must be redeclared in each subclass, + * since it depends on the CCM rule that can't be shared across serial tests. + */ + public abstract DseSession session(); +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphGeoSearchIndexIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphGeoSearchIndexIT.java new file mode 100644 index 00000000000..d741a478918 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphGeoSearchIndexIT.java @@ -0,0 +1,257 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph; + +import static com.datastax.dse.driver.api.core.graph.TinkerGraphAssertions.assertThat; +import static org.assertj.core.api.Assertions.fail; + +import com.datastax.dse.driver.api.core.data.geometry.Point; +import com.datastax.dse.driver.api.core.graph.predicates.Geo; +import com.datastax.dse.driver.api.testinfra.session.DseSessionRule; +import com.datastax.dse.driver.api.testinfra.session.DseSessionRuleBuilder; +import com.datastax.oss.driver.api.core.servererrors.InvalidQueryException; +import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.shaded.guava.common.base.Joiner; +import com.datastax.oss.driver.shaded.guava.common.collect.Lists; +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import java.util.ArrayList; +import java.util.Collection; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; +import org.junit.runner.RunWith; + +@DseRequirement(min = "5.1", description = "DSE 5.1 required for graph geo indexing") +@RunWith(DataProviderRunner.class) +public class GraphGeoSearchIndexIT { + + private static CustomCcmRule ccmRule = + CustomCcmRule.builder().withDseWorkloads("graph", "solr").build(); + + private static DseSessionRule sessionRule = + new DseSessionRuleBuilder(ccmRule).withCreateGraph().build(); + + @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); + + private final GraphTraversalSource g = + DseGraph.g.withRemote(DseGraph.remoteConnectionBuilder(sessionRule.session()).build()); + + /** + * A schema representing an address book with search enabled on name, description, and + * coordinates. + */ + public static Collection geoIndices() { + Object[][] providerIndexTypes = indexTypes(); + String[] indexTypes = new String[providerIndexTypes.length]; + for (int i = 0; i < providerIndexTypes.length; i++) { + indexTypes[i] = (String) providerIndexTypes[i][0]; + } + + StringBuilder schema = new StringBuilder("schema.propertyKey('full_name').Text().create()\n"); + StringBuilder propertyKeys = new StringBuilder(""); + StringBuilder vertexLabel = new StringBuilder("schema.vertexLabel('user').properties("); + StringBuilder indices = new StringBuilder(""); + StringBuilder vertex0 = + new StringBuilder("g.addV('user').property('full_name', 'Paul Thomas Joe')"); + StringBuilder vertex1 = + new StringBuilder("g.addV('user').property('full_name', 'George Bill Steve')"); + String vertex2 = "g.addV('user').property('full_name', 'James Paul Joe')"; + StringBuilder vertex3 = new StringBuilder("g.addV('user').property('full_name', 'Jill Alice')"); + + ArrayList propertyNames = new ArrayList(); + propertyNames.add("'full_name'"); + + for (String indexType : indexTypes) { + + propertyKeys.append( + String.format( + "schema.propertyKey('pointPropWithBounds_%s').%s.create()\n", + indexType, geoTypeWithBounds("Point()", 0, 0, 100, 100))); + + propertyKeys.append( + String.format( + "schema.propertyKey('pointPropWithGeoBounds_%s').%s.create()\n", + indexType, geoType("Point()"))); + + propertyNames.add("'pointPropWithBounds_" + indexType + "'"); + propertyNames.add("'pointPropWithGeoBounds_" + indexType + "'"); + + if (indexType.equals("search")) { + + indices.append( + String.format( + "schema.vertexLabel('user').index('search').search().by('pointPropWithBounds_%s').withError(0.00001, 0.0).by('pointPropWithGeoBounds_%s').withError(0.00001, 0.0).add()\n", + indexType, indexType)); + } else { + + indices.append( + String.format( + "schema.vertexLabel('user').index('by_pointPropWithBounds_%s').%s().by('pointPropWithBounds_%s').add()\n", + indexType, indexType, indexType)); + + indices.append( + String.format( + "schema.vertexLabel('user').index('by_pointPropWithGeoBounds_%s').%s().by('pointPropWithGeoBounds_%s').add()\n", + indexType, indexType, indexType)); + } + + vertex0.append( + String.format( + ".property('pointPropWithBounds_%s', 'POINT(40.0001 40)').property('pointPropWithGeoBounds_%s', 'POINT(40.0001 40)')", + indexType, indexType)); + vertex1.append( + String.format( + ".property('pointPropWithBounds_%s', 'POINT(40 40)').property('pointPropWithGeoBounds_%s', 'POINT(40 40)')", + indexType, indexType)); + vertex3.append( + String.format( + ".property('pointPropWithBounds_%s', 'POINT(30 30)').property('pointPropWithGeoBounds_%s', 'POINT(30 30)')", + indexType, indexType)); + } + + vertexLabel.append(Joiner.on(", ").join(propertyNames)); + vertexLabel.append(").create()\n"); + + schema.append(propertyKeys).append(vertexLabel).append(indices); + + return Lists.newArrayList( + SampleGraphScripts.MAKE_STRICT, + schema.toString(), + vertex0.toString(), + vertex1.toString(), + vertex2, + vertex3.toString()); + } + + private static String geoTypeWithBounds( + String baseName, + double lowerLimitX, + double lowerLimitY, + double higherLimitX, + double higherLimitY) { + return baseName + + String.format( + ".withBounds(%f, %f, %f, %f)", lowerLimitX, lowerLimitY, higherLimitX, higherLimitY); + } + + private static String geoType(String baseName) { + return baseName + ".withGeoBounds()"; + } + + @BeforeClass + public static void setup() { + for (String setupQuery : geoIndices()) { + sessionRule.session().execute(ScriptGraphStatement.newInstance(setupQuery)); + } + + ccmRule.getCcmBridge().reloadCore(1, sessionRule.getGraphName(), "user_p", true); + } + + @DataProvider + public static Object[][] indexTypes() { + return new Object[][] {{"search"} + + // for some reason, materialized and secondary indices have decided not to work + // I get an exception saying "there is no index for this query, here is the defined + // indices: " and the list contains the indices that are needed. Mysterious. + // There may be something to do with differences in the CCMBridge adapter of the new + // driver, some changes make materialized views and secondary indices to be not + // considered for graph: + // + // , {"materialized"} + // , {"secondary"} + }; + } + + @UseDataProvider("indexTypes") + @Test + public void search_by_distance_cartesian(String indexType) { + // in cartesian geometry, the distance between POINT(30 30) and POINT(40 40) is exactly + // 14.142135623730951 + // any point further than that should be detected outside of the range. + // the vertex "Paul Thomas Joe" is at POINT(40.0001 40), and shouldn't be detected inside the + // range. + GraphTraversal traversal = + g.V() + .has( + "user", + "pointPropWithBounds_" + indexType, + Geo.inside(Point.fromCoordinates((double) 30, (double) 30), 14.142135623730951)) + .values("full_name"); + assertThat(traversal.toList()).containsOnly("George Bill Steve", "Jill Alice"); + } + + @UseDataProvider("indexTypes") + @Test + public void search_by_distance_geodetic(String indexType) { + // in geodetic geometry, the distance between POINT(30 30) and POINT(40 40) is exactly + // 12.908258700131379 + // any point further than that should be detected outside of the range. + // the vertex "Paul Thomas Joe" is at POINT(40.0001 40), and shouldn't be detected inside the + // range. + GraphTraversal traversal = + g.V() + .has( + "user", + "pointPropWithGeoBounds_" + indexType, + Geo.inside( + Point.fromCoordinates((double) 30, (double) 30), + 12.908258700131379, + Geo.Unit.DEGREES)) + .values("full_name"); + assertThat(traversal.toList()).containsOnly("George Bill Steve", "Jill Alice"); + } + + @Test + public void + should_fail_if_geodetic_predicate_used_against_cartesian_property_with_search_index() { + try { + GraphTraversal traversal = + g.V() + .has( + "user", + "pointPropWithBounds_search", + Geo.inside( + Point.fromCoordinates((double) 30, (double) 30), + 12.908258700131379, + Geo.Unit.DEGREES)) + .values("full_name"); + traversal.toList(); + fail("Should have failed executing the traversal because the property type is incorrect"); + } catch (InvalidQueryException e) { + assertThat(e.getMessage()) + .contains("Distance units cannot be used in queries against non-geodetic points."); + } + } + + @Test + public void + should_fail_if_cartesian_predicate_used_against_geodetic_property_with_search_index() { + try { + GraphTraversal traversal = + g.V() + .has( + "user", + "pointPropWithGeoBounds_search", + Geo.inside(Point.fromCoordinates((double) 30, (double) 30), 14.142135623730951)) + .values("full_name"); + traversal.toList(); + fail("Should have failed executing the traversal because the property type is incorrect"); + } catch (InvalidQueryException e) { + assertThat(e.getMessage()) + .contains("Distance units are required for queries against geodetic points."); + } + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphTextSearchIndexIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphTextSearchIndexIT.java new file mode 100644 index 00000000000..0e64eb568e7 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphTextSearchIndexIT.java @@ -0,0 +1,320 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.dse.driver.api.core.graph.predicates.Search; +import com.datastax.dse.driver.api.testinfra.session.DseSessionRule; +import com.datastax.dse.driver.api.testinfra.session.DseSessionRuleBuilder; +import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.shaded.guava.common.base.Joiner; +import com.datastax.oss.driver.shaded.guava.common.collect.Lists; +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import java.util.ArrayList; +import java.util.Collection; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; +import org.junit.runner.RunWith; + +@DseRequirement(min = "5.1", description = "DSE 5.1 required for graph geo indexing") +@RunWith(DataProviderRunner.class) +public class GraphTextSearchIndexIT { + + private static CustomCcmRule ccmRule = + CustomCcmRule.builder().withDseWorkloads("graph", "solr").build(); + + private static DseSessionRule sessionRule = + new DseSessionRuleBuilder(ccmRule).withCreateGraph().build(); + + @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); + + private final GraphTraversalSource g = + DseGraph.g.withRemote(DseGraph.remoteConnectionBuilder(sessionRule.session()).build()); + + /** + * A schema representing an address book with 3 properties (full_name_*, description_*, alias_*) + * created for each type of index (search, secondary, materialized). + */ + public static Collection textIndices() { + Object[][] providerIndexTypes = indexTypes(); + String[] indexTypes = new String[providerIndexTypes.length]; + for (int i = 0; i < providerIndexTypes.length; i++) { + indexTypes[i] = (String) providerIndexTypes[i][0]; + } + + StringBuilder schema = new StringBuilder(""); + StringBuilder propertyKeys = new StringBuilder(""); + StringBuilder vertexLabel = new StringBuilder("schema.vertexLabel('user').properties("); + StringBuilder indices = new StringBuilder(""); + StringBuilder vertex0 = new StringBuilder("g.addV('user')"); + StringBuilder vertex1 = new StringBuilder("g.addV('user')"); + StringBuilder vertex2 = new StringBuilder("g.addV('user')"); + StringBuilder vertex3 = new StringBuilder("g.addV('user')"); + + ArrayList propertyNames = new ArrayList(); + for (String indexType : indexTypes) { + propertyKeys.append( + String.format( + "schema.propertyKey('full_name_%s').Text().create()\n" + + "schema.propertyKey('description_%s').Text().create()\n" + + "schema.propertyKey('alias_%s').Text().create()\n", + indexType, indexType, indexType)); + + propertyNames.add("'full_name_" + indexType + "'"); + propertyNames.add("'description_" + indexType + "'"); + propertyNames.add("'alias_" + indexType + "'"); + + if (indexType.equals("search")) { + indices.append( + "schema.vertexLabel('user').index('search').search().by('full_name_search').asString().by('description_search').asText().by('alias_search').asString().add()\n"); + } else { + indices.append( + String.format( + "schema.vertexLabel('user').index('by_full_name_%s').%s().by('full_name_%s').add()\n", + indexType, indexType, indexType)); + indices.append( + String.format( + "schema.vertexLabel('user').index('by_description_%s').%s().by('description_%s').add()\n", + indexType, indexType, indexType)); + indices.append( + String.format( + "schema.vertexLabel('user').index('by_alias_name_%s').%s().by('alias_%s').add()\n", + indexType, indexType, indexType)); + } + + vertex0.append( + String.format( + ".property('full_name_%s', 'Paul Thomas Joe').property('description_%s', 'Lives by the hospital').property('alias_%s', 'mario')", + indexType, indexType, indexType)); + vertex1.append( + String.format( + ".property('full_name_%s', 'George Bill Steve').property('description_%s', 'A cold dude').property('alias_%s', 'wario')", + indexType, indexType, indexType)); + vertex2.append( + String.format( + ".property('full_name_%s', 'James Paul Joe').property('description_%s', 'Likes to hang out').property('alias_%s', 'bowser')", + indexType, indexType, indexType)); + vertex3.append( + String.format( + ".property('full_name_%s', 'Jill Alice').property('description_%s', 'Enjoys a very nice cold coca cola').property('alias_%s', 'peach')", + indexType, indexType, indexType)); + } + + vertexLabel.append(Joiner.on(", ").join(propertyNames)); + vertexLabel.append(").create()\n"); + + schema.append(propertyKeys).append(vertexLabel).append(indices); + + return Lists.newArrayList( + SampleGraphScripts.MAKE_STRICT, + schema.toString(), + vertex0.toString(), + vertex1.toString(), + vertex2.toString(), + vertex3.toString()); + } + + @BeforeClass + public static void setup() { + for (String setupQuery : textIndices()) { + sessionRule.session().execute(ScriptGraphStatement.newInstance(setupQuery)); + } + + ccmRule.getCcmBridge().reloadCore(1, sessionRule.getGraphName(), "user_p", true); + } + + @DataProvider + public static Object[][] indexTypes() { + return new Object[][] {{"search"} + + // for some reason, materialized and secondary indices have decided not to work + // I get an exception saying "there is no index for this query, here is the defined + // indices: " and the list contains the indices that are needed. Mysterious. + // There may be something to do with differences in the CCMBridge adapter of the new + // driver, some changes make materialized views and secondary indices to be not + // considered for graph: + // + // , {"materialized"} + // , {"secondary"} + }; + } + + /** + * Validates that a graph traversal can be made by using a Search prefix predicate on an indexed + * property of the given type. + * + *

      Finds all 'user' vertices having a 'full_name' property beginning with 'Paul'. + * + * @test_category dse:graph + */ + @UseDataProvider("indexTypes") + @Test + public void search_by_prefix_search(String indexType) { + // Only one user with full_name starting with Paul. + GraphTraversal traversal = + g.V() + .has("user", "full_name_" + indexType, Search.prefix("Paul")) + .values("full_name_" + indexType); + assertThat(traversal.toList()).containsOnly("Paul Thomas Joe"); + } + + /** + * Validates that a graph traversal can be made by using a Search regex predicate on an indexed + * property of the given type. + * + *

      Finds all 'user' vertices having a 'full_name' property matching regex '.*Paul.*'. + * + * @test_category dse:graph + */ + @UseDataProvider("indexTypes") + @Test + public void search_by_regex(String indexType) { + // Only two people with names containing pattern for Paul. + GraphTraversal traversal = + g.V() + .has("user", "full_name_" + indexType, Search.regex(".*Paul.*")) + .values("full_name_" + indexType); + assertThat(traversal.toList()).containsOnly("Paul Thomas Joe", "James Paul Joe"); + } + + /** + * Validates that a graph traversal can be made by using a Search fuzzy predicate on an indexed + * property of the given type. + * + *

      Finds all 'user' vertices having a 'alias' property matching 'awrio' with a fuzzy distance + * of 1. + * + * @test_category dse:graph + */ + @UseDataProvider("indexTypes") + @Test + @DseRequirement(min = "5.1.0") + public void search_by_fuzzy(String indexType) { + // Alias matches 'awrio' fuzzy + GraphTraversal traversal = + g.V() + .has("user", "alias_" + indexType, Search.fuzzy("awrio", 1)) + .values("full_name_" + indexType); + // Should not match 'Paul Thomas Joe' since alias is 'mario', which is at distance 2 of 'awrio' + // (a -> m, w -> a) + // Should match 'George Bill Steve' since alias is 'wario' witch matches 'awrio' within a + // distance of 1 (transpose w with a). + assertThat(traversal.toList()).containsOnly("George Bill Steve"); + } + + /** + * Validates that a graph traversal can be made by using a Search token predicate on an indexed + * property of the given type. + * + *

      Finds all 'user' vertices having a 'description' property containing the token 'cold'. + * + * @test_category dse:graph + */ + @UseDataProvider("indexTypes") + @Test + public void search_by_token(String indexType) { + // Description containing token 'cold' + GraphTraversal traversal = + g.V() + .has("user", "description_" + indexType, Search.token("cold")) + .values("full_name_" + indexType); + assertThat(traversal.toList()).containsOnly("Jill Alice", "George Bill Steve"); + } + + /** + * Validates that a graph traversal can be made by using a Search token prefix predicate on an + * indexed property of the given type. + * + *

      Finds all 'user' vertices having a 'description' containing the token prefix 'h'. + */ + @UseDataProvider("indexTypes") + @Test + public void search_by_token_prefix(String indexType) { + // Description containing a token starting with h + GraphTraversal traversal = + g.V() + .has("user", "description_" + indexType, Search.tokenPrefix("h")) + .values("full_name_" + indexType); + assertThat(traversal.toList()).containsOnly("Paul Thomas Joe", "James Paul Joe"); + } + + /** + * Validates that a graph traversal can be made by using a Search token regex predicate on an + * indexed property of the given type. + * + *

      Finds all 'user' vertices having a 'description' containing the token regex + * '(nice|hospital)'. + */ + @UseDataProvider("indexTypes") + @Test + public void search_by_token_regex(String indexType) { + // Description containing nice or hospital + GraphTraversal traversal = + g.V() + .has("user", "description_" + indexType, Search.tokenRegex("(nice|hospital)")) + .values("full_name_" + indexType); + assertThat(traversal.toList()).containsOnly("Paul Thomas Joe", "Jill Alice"); + } + + /** + * Validates that a graph traversal can be made by using a Search fuzzy predicate on an indexed + * property of the given type. + * + *

      Finds all 'user' vertices having a 'description' property matching 'lieks' with a fuzzy + * distance of 1. + * + * @test_category dse:graph + */ + @UseDataProvider("indexTypes") + @Test + @DseRequirement(min = "5.1.0") + public void search_by_token_fuzzy(String indexType) { + // Description containing 'lives' fuzzy + GraphTraversal traversal = + g.V() + .has("user", "description_" + indexType, Search.tokenFuzzy("lieks", 1)) + .values("full_name_" + indexType); + // Should not match 'Paul Thomas Joe' since description contains 'Lives' which is at distance of + // 2 (e -> v, k -> e) + // Should match 'James Paul Joe' since description contains 'Likes' (transpose e for k) + assertThat(traversal.toList()).containsOnly("James Paul Joe"); + } + + /** + * Validates that a graph traversal can be made by using a Search phrase predicate on an indexed + * property of the given type. + * + *

      Finds all 'user' vertices having a 'description' property matching 'a cold' with a distance + * of 2. + * + * @test_category dse:graph + */ + @UseDataProvider("indexTypes") + @Test + @DseRequirement(min = "5.1.0") + public void search_by_phrase(String indexType) { + // Full name contains phrase "Paul Joe" + GraphTraversal traversal = + g.V() + .has("user", "description_" + indexType, Search.phrase("a cold", 2)) + .values("full_name_" + indexType); + // Should match 'George Bill Steve' since 'A cold dude' is at distance of 0 for 'a cold'. + // Should match 'Jill Alice' since 'Enjoys a very nice cold coca cola' is at distance of 2 for + // 'a cold'. + assertThat(traversal.toList()).containsOnly("George Bill Steve", "Jill Alice"); + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphTimeoutsIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphTimeoutsIT.java new file mode 100644 index 00000000000..c859c2514ca --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphTimeoutsIT.java @@ -0,0 +1,163 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph; + +import static com.datastax.dse.driver.api.core.graph.ScriptGraphStatement.newInstance; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.fail; + +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.dse.driver.api.core.config.DseDriverOption; +import com.datastax.dse.driver.api.testinfra.session.DseSessionRuleBuilder; +import com.datastax.oss.driver.api.core.DriverTimeoutException; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.servererrors.InvalidQueryException; +import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import java.time.Duration; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +@DseRequirement(min = "5.0.0", description = "DSE 5 required for Graph") +public class GraphTimeoutsIT { + + public static CustomCcmRule ccmRule = CustomCcmRule.builder().withDseWorkloads("graph").build(); + + public static SessionRule sessionRule = + new DseSessionRuleBuilder(ccmRule).withCreateGraph().build(); + + @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); + + @Test + public void should_have_driver_wait_indefinitely_by_default_and_server_return_timeout_response() { + long desiredTimeout = 2500L; + + DriverExecutionProfile drivertest1 = + sessionRule + .session() + .getContext() + .getConfig() + .getDefaultProfile() + .withString(DseDriverOption.GRAPH_TRAVERSAL_SOURCE, "drivertest1"); + + // We could have done with the server's default but it's 30 secs so the test would have taken at + // least + // that time. So we simulate a server timeout change. + sessionRule + .session() + .execute( + newInstance( + "graph.schema().config().option(\"graph.traversal_sources.drivertest1.evaluation_timeout\").set('" + + desiredTimeout + + " ms')") + .setExecutionProfile(drivertest1)); + + try { + // The driver should wait indefinitely, but the server should timeout first. + sessionRule + .session() + .execute( + newInstance("java.util.concurrent.TimeUnit.MILLISECONDS.sleep(35000L);1+1") + .setExecutionProfile(drivertest1)); + fail("The request should have timed out"); + } catch (InvalidQueryException e) { + assertThat(e.toString()) + .contains("evaluation exceeded", "threshold of ", desiredTimeout + " ms"); + } + } + + @Test + public void should_not_take_into_account_request_timeout_if_more_than_server_timeout() { + long desiredTimeout = 1000L; + int clientTimeout = 32000; + + DriverExecutionProfile drivertest2 = + sessionRule + .session() + .getContext() + .getConfig() + .getDefaultProfile() + .withString(DseDriverOption.GRAPH_TRAVERSAL_SOURCE, "drivertest2") + .withDuration(DseDriverOption.GRAPH_TIMEOUT, Duration.ofMillis(clientTimeout)); + + sessionRule + .session() + .execute( + newInstance( + "graph.schema().config().option(\"graph.traversal_sources.drivertest2.evaluation_timeout\").set('" + + desiredTimeout + + " ms')") + .setExecutionProfile(drivertest2)); + + try { + // The driver should wait 32 secs, but the server should timeout first. + sessionRule + .session() + .execute( + newInstance("java.util.concurrent.TimeUnit.MILLISECONDS.sleep(35000L);1+1") + .setExecutionProfile(drivertest2)); + fail("The request should have timed out"); + } catch (InvalidQueryException e) { + assertThat(e.toString()) + .contains("evaluation exceeded", "threshold of ", Long.toString(desiredTimeout), "ms"); + } + } + + @Test + public void should_take_into_account_request_timeout_if_less_than_server_timeout() { + long serverTimeout = 10000L; + int desiredTimeout = 1000; + + DriverExecutionProfile drivertest3 = + sessionRule + .session() + .getContext() + .getConfig() + .getDefaultProfile() + .withString(DseDriverOption.GRAPH_TRAVERSAL_SOURCE, "drivertest3"); + + // We could have done with the server's default but it's 30 secs so the test would have taken at + // least + // that time. Also, we don't want to rely on server's default. So we simulate a server timeout + // change. + sessionRule + .session() + .execute( + ScriptGraphStatement.newInstance( + "graph.schema().config().option(\"graph.traversal_sources.drivertest3.evaluation_timeout\").set('" + + serverTimeout + + " ms')") + .setExecutionProfile(drivertest3)); + + try { + // The timeout on the request is lower than what's defined server side, so it should be taken + // into account. + sessionRule + .session() + .execute( + ScriptGraphStatement.newInstance( + "java.util.concurrent.TimeUnit.MILLISECONDS.sleep(35000L);1+1") + .setExecutionProfile( + drivertest3.withDuration( + DseDriverOption.GRAPH_TIMEOUT, Duration.ofMillis(desiredTimeout)))); + fail("The request should have timed out"); + } catch (Exception e) { + // Since server timeout == client timeout, locally concurrency is likely to happen. + // We cannot know for sure if it will be a Client timeout error, or a Server timeout, and + // during tests, both happened and not deterministically. + if (e instanceof InvalidQueryException) { + assertThat(e.toString()) + .contains("evaluation exceeded", "threshold of ", desiredTimeout + " ms"); + } else { + assertThat(e).isInstanceOf(DriverTimeoutException.class); + } + } + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/SampleGraphScripts.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/SampleGraphScripts.java new file mode 100644 index 00000000000..068fb464b2b --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/SampleGraphScripts.java @@ -0,0 +1,44 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph; + +public class SampleGraphScripts { + + public static final String MAKE_STRICT = + "schema.config().option('graph.schema_mode').set('production');\n"; + + public static final String MAKE_NOT_STRICT = + "schema.config().option('graph.schema_mode').set('development');\n"; + + public static final String ALLOW_SCANS = + "schema.config().option('graph.allow_scan').set('true');\n"; + + public static final String MODERN_SCHEMA = + "schema.propertyKey('name').Text().ifNotExists().create();\n" + + "schema.propertyKey('age').Int().ifNotExists().create();\n" + + "schema.propertyKey('lang').Text().ifNotExists().create();\n" + + "schema.propertyKey('weight').Float().ifNotExists().create();\n" + + "schema.vertexLabel('person').properties('name', 'age').ifNotExists().create();\n" + + "schema.vertexLabel('software').properties('name', 'lang').ifNotExists().create();\n" + + "schema.edgeLabel('created').properties('weight').connection('person', 'software').ifNotExists().create();\n" + + "schema.edgeLabel('knows').properties('weight').connection('person', 'person').ifNotExists().create();\n"; + + public static String MODERN_GRAPH = + MODERN_SCHEMA + + "marko = g.addV('person').property('name', 'marko').property('age', 29).next();\n" + + "vadas = g.addV('person').property('name', 'vadas').property('age', 27).next();\n" + + "josh = g.addV('person').property('name', 'josh').property('age', 32).next();\n" + + "peter = g.addV('person').property('name', 'peter').property('age', 35).next();\n" + + "lop = g.addV('software').property('name', 'lop').property('lang', 'java').next();\n" + + "ripple = g.addV('software').property('name', 'ripple').property('lang', 'java').next();\n" + + "g.addE('knows').from(marko).to(vadas).property('weight', 0.5f).next();\n" + + "g.addE('knows').from(marko).to(josh).property('weight', 1.0f).next();\n" + + "g.addE('created').from(marko).to(lop).property('weight', 0.4f).next();\n" + + "g.addE('created').from(josh).to(ripple).property('weight', 1.0f).next();\n" + + "g.addE('created').from(josh).to(lop).property('weight', 0.4f).next();\n" + + "g.addE('created').from(peter).to(lop).property('weight', 0.2f);"; +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/SocialTraversalDsl.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/SocialTraversalDsl.java new file mode 100644 index 00000000000..559949b676b --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/SocialTraversalDsl.java @@ -0,0 +1,18 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph; + +import org.apache.tinkerpop.gremlin.process.traversal.dsl.GremlinDsl; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; +import org.apache.tinkerpop.gremlin.structure.Vertex; + +@GremlinDsl(traversalSource = "com.datastax.dse.driver.api.core.graph.SocialTraversalSourceDsl") +public interface SocialTraversalDsl extends GraphTraversal.Admin { + public default GraphTraversal knows(String personName) { + return out("knows").hasLabel("person").has("name", personName).in(); + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/SocialTraversalSourceDsl.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/SocialTraversalSourceDsl.java new file mode 100644 index 00000000000..ba50213c4aa --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/SocialTraversalSourceDsl.java @@ -0,0 +1,44 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph; + +import org.apache.tinkerpop.gremlin.process.traversal.P; +import org.apache.tinkerpop.gremlin.process.traversal.TraversalStrategies; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.DefaultGraphTraversal; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; +import org.apache.tinkerpop.gremlin.process.traversal.step.map.GraphStep; +import org.apache.tinkerpop.gremlin.structure.Graph; +import org.apache.tinkerpop.gremlin.structure.Vertex; + +public class SocialTraversalSourceDsl extends GraphTraversalSource { + + public SocialTraversalSourceDsl( + final Graph graph, final TraversalStrategies traversalStrategies) { + super(graph, traversalStrategies); + } + + public SocialTraversalSourceDsl(final Graph graph) { + super(graph); + } + + public GraphTraversal persons(String... names) { + GraphTraversalSource clone = this.clone(); + + // Manually add a "start" step for the traversal in this case the equivalent of V(). GraphStep + // is marked + // as a "start" step by passing "true" in the constructor. + clone.getBytecode().addStep(GraphTraversal.Symbols.V); + GraphTraversal traversal = new DefaultGraphTraversal<>(clone); + traversal.asAdmin().addStep(new GraphStep(traversal.asAdmin(), Vertex.class, true)); + + traversal = traversal.hasLabel("person"); + if (names.length > 0) traversal = traversal.has("name", P.within(names)); + + return traversal; + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/TinkerEdgeAssert.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/TinkerEdgeAssert.java new file mode 100644 index 00000000000..6b30830ac42 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/TinkerEdgeAssert.java @@ -0,0 +1,38 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph; + +import com.datastax.oss.driver.assertions.Assertions; +import org.apache.tinkerpop.gremlin.structure.Edge; +import org.apache.tinkerpop.gremlin.structure.Vertex; + +public class TinkerEdgeAssert extends TinkerElementAssert { + + public TinkerEdgeAssert(Edge actual) { + super(actual, TinkerEdgeAssert.class); + } + + public TinkerEdgeAssert hasInVLabel(String label) { + Assertions.assertThat(actual.inVertex().label()).isEqualTo(label); + return myself; + } + + public TinkerEdgeAssert hasOutVLabel(String label) { + Assertions.assertThat(actual.outVertex().label()).isEqualTo(label); + return myself; + } + + public TinkerEdgeAssert hasOutV(Vertex vertex) { + Assertions.assertThat(actual.outVertex()).isEqualTo(vertex); + return myself; + } + + public TinkerEdgeAssert hasInV(Vertex vertex) { + Assertions.assertThat(actual.inVertex()).isEqualTo(vertex); + return myself; + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/TinkerElementAssert.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/TinkerElementAssert.java new file mode 100644 index 00000000000..19e668c8dee --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/TinkerElementAssert.java @@ -0,0 +1,41 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph; + +import static org.assertj.core.api.Assertions.assertThat; + +import org.apache.tinkerpop.gremlin.structure.Element; +import org.assertj.core.api.AbstractAssert; + +public abstract class TinkerElementAssert, A extends Element> + extends AbstractAssert { + + protected TinkerElementAssert(A actual, Class selfType) { + super(actual, selfType); + } + + public S hasId(Object id) { + assertThat(actual.id()).isEqualTo(id); + return myself; + } + + public S hasLabel(String label) { + assertThat(actual.label()).isEqualTo(label); + return myself; + } + + public S hasProperty(String propertyName) { + assertThat(actual.property(propertyName).isPresent()).isTrue(); + return myself; + } + + public S hasProperty(String propertyName, Object value) { + hasProperty(propertyName); + assertThat(actual.property(propertyName).value()).isEqualTo(value); + return myself; + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/TinkerGraphAssertions.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/TinkerGraphAssertions.java new file mode 100644 index 00000000000..0de34c14bbf --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/TinkerGraphAssertions.java @@ -0,0 +1,36 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph; + +import org.apache.tinkerpop.gremlin.process.traversal.Path; +import org.apache.tinkerpop.gremlin.process.traversal.step.util.Tree; +import org.apache.tinkerpop.gremlin.structure.Edge; +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.apache.tinkerpop.gremlin.structure.VertexProperty; + +public class TinkerGraphAssertions extends com.datastax.oss.driver.assertions.Assertions { + + public static TinkerEdgeAssert assertThat(Edge edge) { + return new TinkerEdgeAssert(edge); + } + + public static TinkerVertexAssert assertThat(Vertex vertex) { + return new TinkerVertexAssert(vertex); + } + + public static TinkerVertexPropertyAssert assertThat(VertexProperty vertexProperty) { + return new TinkerVertexPropertyAssert(vertexProperty); + } + + public static TinkerPathAssert assertThat(Path path) { + return new TinkerPathAssert(path); + } + + public static TinkerTreeAssert assertThat(Tree tree) { + return new TinkerTreeAssert<>(tree); + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/TinkerPathAssert.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/TinkerPathAssert.java new file mode 100644 index 00000000000..9136b0cec8b --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/TinkerPathAssert.java @@ -0,0 +1,99 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph; + +import static org.assertj.core.api.Assertions.assertThat; + +import org.apache.tinkerpop.gremlin.process.traversal.Path; +import org.apache.tinkerpop.gremlin.structure.Edge; +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.assertj.core.api.AbstractAssert; +import org.assertj.core.api.AbstractObjectAssert; + +public class TinkerPathAssert extends AbstractAssert { + + public TinkerPathAssert(Path actual) { + super(actual, TinkerPathAssert.class); + } + + /** + * Ensures that the given Path matches one of the exact traversals we'd expect for a person whom + * Marko knows that has created software and what software that is. + * + *

      These paths should be: + * + *

        + *
      • marko -> knows -> josh -> created -> lop + *
      • marko -> knows -> josh -> created -> ripple + *
      + */ + public static void validatePathObjects(Path path) { + + // marko should be the origin point. + TinkerGraphAssertions.assertThat(path).vertexAt(0).hasLabel("person"); + + // there should be a 'knows' outgoing relationship between marko and josh. + TinkerGraphAssertions.assertThat(path) + .edgeAt(1) + .hasLabel("knows") + .hasOutVLabel("person") + .hasOutV((Vertex) path.objects().get(0)) + .hasInVLabel("person") + .hasInV((Vertex) path.objects().get(2)); + + // josh... + TinkerGraphAssertions.assertThat(path).vertexAt(2).hasLabel("person"); + + // there should be a 'created' relationship between josh and lop. + TinkerGraphAssertions.assertThat(path) + .edgeAt(3) + .hasLabel("created") + .hasOutVLabel("person") + .hasOutV((Vertex) path.objects().get(2)) + .hasInVLabel("software") + .hasInV((Vertex) path.objects().get(4)); + + // lop.. + TinkerGraphAssertions.assertThat(path).vertexAt(4).hasLabel("software"); + } + + public AbstractObjectAssert objectAt(int i) { + assertThat(actual.size()).isGreaterThanOrEqualTo(i); + return assertThat(actual.objects().get(i)); + } + + public TinkerVertexAssert vertexAt(int i) { + assertThat(actual.size()).isGreaterThanOrEqualTo(i); + Object o = actual.objects().get(i); + assertThat(o).isInstanceOf(Vertex.class); + return new TinkerVertexAssert((Vertex) o); + } + + public TinkerEdgeAssert edgeAt(int i) { + assertThat(actual.size()).isGreaterThanOrEqualTo(i); + Object o = actual.objects().get(i); + assertThat(o).isInstanceOf(Edge.class); + return new TinkerEdgeAssert((Edge) o); + } + + public TinkerPathAssert hasLabel(int i, String... labels) { + assertThat(actual.labels().size()).isGreaterThanOrEqualTo(i); + assertThat(actual.labels().get(i)).containsExactly(labels); + return myself; + } + + public TinkerPathAssert hasNoLabel(int i) { + assertThat(actual.labels().size()).isGreaterThanOrEqualTo(i); + assertThat(actual.labels().get(i)).isEmpty(); + return myself; + } + + public TinkerPathAssert doesNotHaveLabel(String label) { + assertThat(actual.hasLabel(label)).isFalse(); + return myself; + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/TinkerTreeAssert.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/TinkerTreeAssert.java new file mode 100644 index 00000000000..58cb7ff956f --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/TinkerTreeAssert.java @@ -0,0 +1,34 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph; + +import static org.assertj.core.api.Assertions.assertThat; + +import org.apache.tinkerpop.gremlin.process.traversal.step.util.Tree; +import org.assertj.core.api.MapAssert; + +public class TinkerTreeAssert extends MapAssert> { + + public TinkerTreeAssert(Tree actual) { + super(actual); + } + + public TinkerTreeAssert hasTree(T key) { + assertThat(actual).containsKey(key); + return this; + } + + public TinkerTreeAssert isLeaf() { + assertThat(actual).hasSize(0); + return this; + } + + public TinkerTreeAssert tree(T key) { + hasTree(key); + return new TinkerTreeAssert<>(actual.get(key)); + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/TinkerVertexAssert.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/TinkerVertexAssert.java new file mode 100644 index 00000000000..981edb8d65a --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/TinkerVertexAssert.java @@ -0,0 +1,35 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph; + +import static org.assertj.core.api.Assertions.assertThat; + +import org.apache.tinkerpop.gremlin.structure.Property; +import org.apache.tinkerpop.gremlin.structure.Vertex; + +public class TinkerVertexAssert extends TinkerElementAssert { + + public TinkerVertexAssert(Vertex actual) { + super(actual, TinkerVertexAssert.class); + } + + @Override + public TinkerVertexAssert hasProperty(String propertyName) { + assertThat(actual.properties(propertyName)).toIterable().isNotEmpty(); + return myself; + } + + @Override + public TinkerVertexAssert hasProperty(String propertyName, Object value) { + hasProperty(propertyName); + assertThat(actual.properties(propertyName)) + .toIterable() + .extracting(Property::value) + .contains(value); + return myself; + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/TinkerVertexPropertyAssert.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/TinkerVertexPropertyAssert.java new file mode 100644 index 00000000000..4cdc3a844d6 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/TinkerVertexPropertyAssert.java @@ -0,0 +1,35 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph; + +import static org.assertj.core.api.Assertions.assertThat; + +import org.apache.tinkerpop.gremlin.structure.Element; +import org.apache.tinkerpop.gremlin.structure.VertexProperty; + +public class TinkerVertexPropertyAssert + extends TinkerElementAssert, VertexProperty> { + + public TinkerVertexPropertyAssert(VertexProperty actual) { + super(actual, TinkerVertexPropertyAssert.class); + } + + public TinkerVertexPropertyAssert hasKey(String key) { + assertThat(actual.key()).isEqualTo(key); + return this; + } + + public TinkerVertexPropertyAssert hasParent(Element parent) { + assertThat(actual.element()).isEqualTo(parent); + return this; + } + + public TinkerVertexPropertyAssert hasValue(T value) { + assertThat(actual.value()).isEqualTo(value); + return this; + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphDataTypeRemoteIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphDataTypeRemoteIT.java new file mode 100644 index 00000000000..6e8712406be --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphDataTypeRemoteIT.java @@ -0,0 +1,58 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph.remote; + +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.dse.driver.api.core.graph.DseGraph; +import com.datastax.dse.driver.api.core.graph.GraphDataTypeITBase; +import com.datastax.dse.driver.api.core.graph.SampleGraphScripts; +import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; +import com.datastax.dse.driver.api.testinfra.session.DseSessionRuleBuilder; +import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +@DseRequirement(min = "5.0.3", description = "DSE 5.0.3 required for remote TinkerPop support") +public class GraphDataTypeRemoteIT extends GraphDataTypeITBase { + + private static CustomCcmRule ccmRule = + CustomCcmRule.builder() + .withDseWorkloads("graph") + .withDseConfiguration( + "graph.gremlin_server.scriptEngines.gremlin-groovy.config.sandbox_enabled", "false") + .build(); + + private static SessionRule sessionRule = + new DseSessionRuleBuilder(ccmRule).withCreateGraph().build(); + + @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); + + @BeforeClass + public static void setupSchema() { + sessionRule.session().execute(ScriptGraphStatement.newInstance(SampleGraphScripts.ALLOW_SCANS)); + sessionRule.session().execute(ScriptGraphStatement.newInstance(SampleGraphScripts.MAKE_STRICT)); + } + + @Override + public DseSession session() { + return sessionRule.session(); + } + + private final GraphTraversalSource g = + DseGraph.g.withRemote(DseGraph.remoteConnectionBuilder(sessionRule.session()).build()); + + @Override + public Vertex insertVertexAndReturn(String vertexLabel, String propertyName, Object value) { + return g.addV(vertexLabel).property(propertyName, value).next(); + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalMetaPropertiesRemoteIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalMetaPropertiesRemoteIT.java new file mode 100644 index 00000000000..a565b8dab43 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalMetaPropertiesRemoteIT.java @@ -0,0 +1,76 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph.remote; + +import static com.datastax.dse.driver.api.core.graph.SampleGraphScripts.ALLOW_SCANS; +import static com.datastax.dse.driver.api.core.graph.SampleGraphScripts.MAKE_STRICT; +import static com.datastax.dse.driver.api.core.graph.TinkerGraphAssertions.assertThat; + +import com.datastax.dse.driver.api.core.graph.DseGraph; +import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; +import com.datastax.dse.driver.api.testinfra.session.DseSessionRule; +import com.datastax.dse.driver.api.testinfra.session.DseSessionRuleBuilder; +import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.apache.tinkerpop.gremlin.structure.VertexProperty; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +// INFO: meta props are going away in NGDG + +@DseRequirement(min = "5.0.3", description = "DSE 5.0.3 required for remote TinkerPop support") +public class GraphTraversalMetaPropertiesRemoteIT { + + private static CustomCcmRule ccmRule = CustomCcmRule.builder().withDseWorkloads("graph").build(); + + private static DseSessionRule sessionRule = + new DseSessionRuleBuilder(ccmRule).withCreateGraph().build(); + + @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); + + private final GraphTraversalSource g = + DseGraph.g.withRemote(DseGraph.remoteConnectionBuilder(sessionRule.session()).build()); + + /** Builds a simple schema that provides for a vertex with a property with sub properties. */ + public static String metaProps = + MAKE_STRICT + + ALLOW_SCANS + + "schema.propertyKey('sub_prop').Text().create()\n" + + "schema.propertyKey('sub_prop2').Text().create()\n" + + "schema.propertyKey('meta_prop').Text().properties('sub_prop', 'sub_prop2').create()\n" + + "schema.vertexLabel('meta_v').properties('meta_prop').create()"; + + /** + * Ensures that a traversal that yields a vertex with a property that has its own properties that + * is appropriately parsed and made accessible via {@link VertexProperty#property}. + * + * @test_category dse:graph + */ + @Test + public void should_parse_meta_properties() { + // given a schema that defines meta properties. + sessionRule.session().execute(ScriptGraphStatement.newInstance(metaProps)); + + // when adding a vertex with that meta property + Vertex v = + g.addV("meta_v") + .property("meta_prop", "hello", "sub_prop", "hi", "sub_prop2", "hi2") + .next(); + + // then the created vertex should have the meta prop present with its sub properties. + assertThat(v).hasProperty("meta_prop"); + VertexProperty metaProp = v.property("meta_prop"); + assertThat(metaProp) + .hasValue("hello") + .hasProperty("sub_prop", "hi") + .hasProperty("sub_prop2", "hi2"); + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalMultiPropertiesRemoteIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalMultiPropertiesRemoteIT.java new file mode 100644 index 00000000000..614e6034df5 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalMultiPropertiesRemoteIT.java @@ -0,0 +1,76 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph.remote; + +import static com.datastax.dse.driver.api.core.graph.SampleGraphScripts.ALLOW_SCANS; +import static com.datastax.dse.driver.api.core.graph.SampleGraphScripts.MAKE_STRICT; +import static com.datastax.dse.driver.api.core.graph.TinkerGraphAssertions.assertThat; + +import com.datastax.dse.driver.api.core.graph.DseGraph; +import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; +import com.datastax.dse.driver.api.testinfra.session.DseSessionRule; +import com.datastax.dse.driver.api.testinfra.session.DseSessionRuleBuilder; +import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import java.util.Iterator; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.apache.tinkerpop.gremlin.structure.VertexProperty; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +@DseRequirement(min = "5.0.3", description = "DSE 5.0.3 required for remote TinkerPop support") +public class GraphTraversalMultiPropertiesRemoteIT { + + private static CustomCcmRule ccmRule = CustomCcmRule.builder().withDseWorkloads("graph").build(); + + private static DseSessionRule sessionRule = + new DseSessionRuleBuilder(ccmRule).withCreateGraph().build(); + + @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); + + private final GraphTraversalSource g = + DseGraph.g.withRemote(DseGraph.remoteConnectionBuilder(sessionRule.session()).build()); + + /** Builds a simple schema that provides for a vertex with a multi-cardinality property. */ + public static final String multiProps = + MAKE_STRICT + + ALLOW_SCANS + + "schema.propertyKey('multi_prop').Text().multiple().create()\n" + + "schema.vertexLabel('multi_v').properties('multi_prop').create()\n"; + + /** + * Ensures that a traversal that yields a vertex with a property name that is present multiple + * times that the properties are parsed and made accessible via {@link + * Vertex#properties(String...)}. + * + * @test_category dse:graph + */ + @Test + public void should_parse_multiple_cardinality_properties() { + // given a schema that defines multiple cardinality properties. + sessionRule.session().execute(ScriptGraphStatement.newInstance(multiProps)); + + // when adding a vertex with a multiple cardinality property + Vertex v = + g.addV("multi_v") + .property("multi_prop", "Hello") + .property("multi_prop", "Sweet") + .property("multi_prop", "World") + .next(); + + // then the created vertex should have the multi-cardinality property present with its values. + assertThat(v).hasProperty("multi_prop"); + Iterator> multiProp = v.properties("multi_prop"); + assertThat(multiProp) + .toIterable() + .extractingResultOf("value") + .containsExactly("Hello", "Sweet", "World"); + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalRemoteIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalRemoteIT.java new file mode 100644 index 00000000000..f5a3ecbd7a8 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalRemoteIT.java @@ -0,0 +1,494 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph.remote; + +import static com.datastax.dse.driver.api.core.graph.TinkerGraphAssertions.assertThat; +import static org.assertj.core.api.Assertions.fail; + +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.dse.driver.api.core.graph.DseGraph; +import com.datastax.dse.driver.api.core.graph.SampleGraphScripts; +import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; +import com.datastax.dse.driver.api.core.graph.SocialTraversalSource; +import com.datastax.dse.driver.api.core.graph.TinkerPathAssert; +import com.datastax.dse.driver.api.testinfra.session.DseSessionRuleBuilder; +import com.datastax.oss.driver.api.core.servererrors.InvalidQueryException; +import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ExecutionException; +import java.util.stream.Collectors; +import org.apache.tinkerpop.gremlin.process.traversal.Path; +import org.apache.tinkerpop.gremlin.process.traversal.Traversal; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__; +import org.apache.tinkerpop.gremlin.process.traversal.step.util.Tree; +import org.apache.tinkerpop.gremlin.structure.Edge; +import org.apache.tinkerpop.gremlin.structure.Graph; +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.apache.tinkerpop.gremlin.structure.util.empty.EmptyGraph; +import org.assertj.core.api.Assertions; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +@DseRequirement(min = "6.0", description = "DSE 6 required for MODERN_GRAPH script (?)") +public class GraphTraversalRemoteIT { + + private static CustomCcmRule ccmRule = CustomCcmRule.builder().withDseWorkloads("graph").build(); + + private static SessionRule sessionRule = + new DseSessionRuleBuilder(ccmRule).withCreateGraph().build(); + + @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); + + @BeforeClass + public static void setupSchema() { + sessionRule + .session() + .execute(ScriptGraphStatement.newInstance(SampleGraphScripts.MODERN_GRAPH)); + sessionRule.session().execute(ScriptGraphStatement.newInstance(SampleGraphScripts.MAKE_STRICT)); + sessionRule.session().execute(ScriptGraphStatement.newInstance(SampleGraphScripts.ALLOW_SCANS)); + } + + private final GraphTraversalSource g = + DseGraph.g.withRemote(DseGraph.remoteConnectionBuilder(sessionRule.session()).build()); + + /** + * Ensures that a previously returned {@link Vertex}'s {@link Vertex#id()} can be used as an input + * to {@link + * org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource#V(Object...)} to + * retrieve the {@link Vertex} and that the returned {@link Vertex} is the same. + * + * @test_category dse:graph + */ + @Test + public void should_use_vertex_id_as_parameter() { + // given an existing vertex + Vertex marko = g.V().hasLabel("person").has("name", "marko").next(); + assertThat(marko).hasProperty("name", "marko"); + + // then should be able to retrieve that same vertex by id. + assertThat(g.V(marko.id()).next()).isEqualTo(marko); + } + + /** + * Ensures that a previously returned {@link Edge}'s {@link Edge#id()} can be used as an input to + * {@link + * org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource#E(Object...)} to + * retrieve the {@link Edge} and that the returned {@link Edge} is the same. + * + * @test_category dse:graph + */ + @Test + public void should_use_edge_is_as_parameter() { + // given an existing edge + Edge created = g.E().has("weight", 0.2f).next(); + + assertThat(created).hasProperty("weight", 0.2f).hasInVLabel("software").hasOutVLabel("person"); + + // should be able to retrieve incoming and outgoing vertices by edge id + Vertex in = g.E(created.id()).inV().next(); + Vertex out = g.E(created.id()).outV().next(); + + // should resolve to lop + assertThat(in).hasLabel("software").hasProperty("name", "lop").hasProperty("lang", "java"); + + // should resolve to marko, josh and peter whom created lop. + assertThat(out).hasLabel("person").hasProperty("name", "peter"); + } + + /** + * A sanity check that a returned {@link Vertex}'s id is a {@link Map}. This test could break in + * the future if the format of a vertex ID changes from a Map to something else in DSE. + * + * @test_category dse:graph + */ + @Test + public void should_deserialize_vertex_id_as_map() { + // given an existing vertex + Vertex marko = g.V().hasLabel("person").has("name", "marko").next(); + + // then id should be a map with expected values. + // Note: this is pretty dependent on DSE Graphs underlying id structure which may vary in the + // future. + @SuppressWarnings("unchecked") + Map id = (Map) marko.id(); + assertThat(id) + .hasSize(3) + .containsEntry("~label", "person") + .containsKey("community_id") + .containsKey("member_id"); + } + + /** + * Ensures that a traversal that returns a result of mixed types is interpreted as a {@link Map} + * with {@link Object} values. Also uses {@link + * org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal#by(Traversal)} with an + * anonymous traversal to get inbound 'created' edges and folds them into a list. + * + *

      Executes a vertex traversal that binds label 'a' and 'b' to vertex properties and label 'c' + * to vertices that have edges from that vertex. + * + * @test_category dse:graph + */ + @Test + public void should_handle_result_object_of_mixed_types() { + // find all software vertices and select name, language, and find all vertices that created such + // software. + List> results = + g.V() + .hasLabel("software") + .as("a", "b", "c") + .select("a", "b", "c") + .by("name") + .by("lang") + .by(__.in("created").fold()) + .toList(); + + // ensure that lop and ripple and their data are the results return. + assertThat(results).extracting(m -> m.get("a")).containsOnly("lop", "ripple"); + + for (Map result : results) { + assertThat(result).containsOnlyKeys("a", "b", "c"); + // both software are written in java. + assertThat(result.get("b")).isEqualTo("java"); + // ensure the created vertices match the creators of the software. + @SuppressWarnings("unchecked") + List vertices = (List) result.get("c"); + if (result.get("a").equals("lop")) { + // lop, 'c' should contain marko, josh, peter. + assertThat(vertices) + .extracting(vertex -> vertex.property("name").value()) + .containsOnly("marko", "josh", "peter"); + } else { + assertThat(vertices) + .extracting(vertex -> vertex.property("name").value()) + .containsOnly("josh"); + } + } + } + + /** + * Ensures that a traversal that returns a sub graph can be retrieved. + * + *

      The subgraph is all members in a knows relationship, thus is all people who marko knows and + * the edges that connect them. + */ + @Test + public void should_handle_subgraph() { + // retrieve a subgraph on the knows relationship, this omits the created edges. + Graph graph = (Graph) g.E().hasLabel("knows").subgraph("subGraph").cap("subGraph").next(); + + // there should only be 2 edges (since there are are only 2 knows relationships) and 3 vertices + assertThat(graph.edges()).toIterable().hasSize(2); + assertThat(graph.vertices()).toIterable().hasSize(3); + } + + /** + * Ensures a traversal that yields no results is properly retrieved and is empty. + * + * @test_category dse:graph + */ + @Test + public void should_return_zero_results() { + assertThat(g.V().hasLabel("notALabel").toList()).isEmpty(); + } + + /** + * Validates that a traversal returning a {@link Tree} structure is returned appropriately with + * the expected contents. + * + *

      Retrieves trees of people marko knows and the software they created. + * + * @test_category dse:graph + */ + @Test + public void should_parse_tree() { + // Get a tree structure showing the paths from mark to people he knows to software they've + // created. + @SuppressWarnings("unchecked") + Tree tree = + g.V().hasLabel("person").out("knows").out("created").tree().by("name").next(); + + // Marko knows josh who created lop and ripple. + assertThat(tree).tree("marko").tree("josh").tree("lop").isLeaf(); + + assertThat(tree).tree("marko").tree("josh").tree("ripple").isLeaf(); + } + + /** + * Validates that a traversal using lambda operations with anonymous traversals are applied + * appropriately and return the expected results. + * + *

      Traversal that filters 'person'-labeled vertices by name 'marko' and flatMaps outgoing + * vertices on the 'knows' relationship by their outgoing 'created' vertices and then maps by + * their 'name' property and folds them into one list. + * + *

      Note: This does not validate lambdas with functions as those can't be interpreted and + * sent remotely. + * + * @test_category dse:graph + */ + @Test + public void should_handle_lambdas() { + // Find all people marko knows and the software they created. + List software = + g.V() + .hasLabel("person") + .filter(__.has("name", "marko")) + .out("knows") + .flatMap(__.out("created")) + .map(__.values("name")) + .fold() + .next(); + + // Marko only knows josh and vadas, of which josh created lop and ripple. + assertThat(software).containsOnly("lop", "ripple"); + } + + /** + * Validates that {@link + * org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal#tryNext()} functions + * appropriate by returning an {@link Optional} of which the presence of the underlying data + * depends on whether or not remaining data is present. + * + *

      This is more of a test of Tinkerpop than the protocol between the client and DSE graph. + * + * @test_category dse:graph + */ + @Test + public void should_handle_tryNext() { + GraphTraversal traversal = g.V().hasLabel("person").has("name", "marko"); + + // value present + Optional v0 = traversal.tryNext(); + assertThat(v0.isPresent()).isTrue(); + //noinspection OptionalGetWithoutIsPresent + assertThat(v0.get()).hasProperty("name", "marko"); + + // value absent as there was only 1 matching vertex. + Optional v1 = traversal.tryNext(); + assertThat(v1.isPresent()).isFalse(); + } + + /** + * Validates that {@link GraphTraversal#toStream()} appropriately creates a stream from the + * underlying iterator on the traversal, and then an attempt to call toStream again yields no + * results. + * + *

      This is more of a test of Tinkerpop than the protocol between the client and DSE graph. + * + * @test_category dse:graph + */ + @Test + public void should_handle_streaming() { + GraphTraversal traversal = g.V().hasLabel("person"); + // retrieve all person vertices to stream, and filter on client side all persons under age 30 + // and map to their name. + List under30 = + traversal + .toStream() + .filter(v -> v.property("age").value() < 30) + .map(v -> v.property("name").value()) + .collect(Collectors.toList()); + + assertThat(under30).containsOnly("marko", "vadas"); + + // attempt to get a stream again, which should be empty. + assertThat(traversal.toStream().collect(Collectors.toList())).isEmpty(); + } + + /** + * Validates that when traversing a path and labeling some of the elements during the traversal + * that the output elements are properly labeled. + * + * @test_category dse:graph + */ + @Test + public void should_resolve_path_with_some_labels() { + // given a traversal where some objects have labels. + List paths = + g.V() + .hasLabel("person") + .has("name", "marko") + .as("a") + .outE("knows") + .inV() + .as("c", "d") + .outE("created") + .as("e", "f", "g") + .inV() + .path() + .toList(); + + // then the paths returned should be labeled for the + // appropriate objects, and not labeled otherwise. + for (Path path : paths) { + TinkerPathAssert.validatePathObjects(path); + assertThat(path) + .hasLabel(0, "a") + .hasNoLabel(1) + .hasLabel(2, "c", "d") + .hasLabel(3, "e", "f", "g") + .hasNoLabel(4); + } + } + + /** + * Validates that when traversing a path and labeling all of the elements during the traversal + * that the output elements are properly labeled. + * + * @test_category dse:graph + */ + @Test + public void should_resolve_path_with_labels() { + // given a traversal where all objects have labels. + List paths = + g.V() + .hasLabel("person") + .has("name", "marko") + .as("a") + .outE("knows") + .as("b") + .inV() + .as("c", "d") + .outE("created") + .as("e", "f", "g") + .inV() + .as("h") + .path() + .toList(); + + // then the paths returned should be labeled for all + // objects. + for (Path path : paths) { + TinkerPathAssert.validatePathObjects(path); + Assertions.assertThat(path.labels()).hasSize(5); + assertThat(path) + .hasLabel(0, "a") + .hasLabel(1, "b") + .hasLabel(2, "c", "d") + .hasLabel(3, "e", "f", "g") + .hasLabel(4, "h"); + } + } + + /** + * Validates that when traversing a path and labeling none of the elements during the traversal + * that all the labels are empty in the result. + * + * @test_category dse:graph + */ + @Test + public void should_resolve_path_without_labels() { + // given a traversal where no objects have labels. + List paths = + g.V() + .hasLabel("person") + .has("name", "marko") + .outE("knows") + .inV() + .outE("created") + .inV() + .path() + .toList(); + + // then the paths returned should be labeled for + // all objects. + for (Path path : paths) { + TinkerPathAssert.validatePathObjects(path); + for (int i = 0; i < 5; i++) assertThat(path).hasNoLabel(i); + } + } + + @Test + public void should_handle_asynchronous_execution() { + StringBuilder names = new StringBuilder(); + + CompletableFuture> future = g.V().hasLabel("person").promise(Traversal::toList); + try { + // dumb processing to make sure the completable future works correctly and correct results are + // returned + future + .thenAccept( + vertices -> vertices.forEach(vertex -> names.append((String) vertex.value("name")))) + .get(); + } catch (InterruptedException | ExecutionException e) { + fail("Shouldn't have thrown an exception waiting for the result to complete"); + } + + assertThat(names.toString()).contains("peter", "marko", "vadas", "josh"); + } + + /** + * Validates that if a traversal is made that encounters an error on the server side that the + * exception is set on the future. + * + * @test_category dse:graph + */ + @Test + @DseRequirement(min = "5.1.0") + public void should_fail_future_returned_from_promise_on_query_error() throws Exception { + CompletableFuture future = g.V("invalidid").peerPressure().promise(Traversal::next); + + try { + future.get(); + fail("Expected an ExecutionException"); + } catch (ExecutionException e) { + assertThat(e.getCause()).isInstanceOf(InvalidQueryException.class); + } + } + + /** + * A simple smoke test to ensure that a user can supply a custom {@link GraphTraversalSource} for + * use with DSLs. + * + * @test_category dse:graph + */ + @Test + public void should_allow_use_of_dsl() throws Exception { + SocialTraversalSource gSocial = + EmptyGraph.instance() + .traversal(SocialTraversalSource.class) + .withRemote(DseGraph.remoteConnectionBuilder(sessionRule.session()).build()); + List vertices = gSocial.persons("marko").knows("vadas").toList(); + assertThat(vertices.size()).isEqualTo(1); + assertThat(vertices.get(0)) + .hasProperty("name", "marko") + .hasProperty("age", 29) + .hasLabel("person"); + } + + /** + * Ensures that traversals with barriers (which return results bulked) contain the correct amount + * of end results. + * + *

      This will fail if ran against DSE < 5.0.9 or DSE < 5.1.2. + */ + @Test + public void should_return_correct_results_when_bulked() { + + List results = g.E().label().barrier().toList(); + Collections.sort(results); + + List expected = + Arrays.asList("knows", "created", "created", "knows", "created", "created"); + Collections.sort(expected); + + assertThat(results).isEqualTo(expected); + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphDataTypeFluentIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphDataTypeFluentIT.java new file mode 100644 index 00000000000..4a82285194d --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphDataTypeFluentIT.java @@ -0,0 +1,61 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph.statement; + +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.dse.driver.api.core.graph.DseGraph; +import com.datastax.dse.driver.api.core.graph.FluentGraphStatement; +import com.datastax.dse.driver.api.core.graph.GraphDataTypeITBase; +import com.datastax.dse.driver.api.core.graph.SampleGraphScripts; +import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; +import com.datastax.dse.driver.api.testinfra.session.DseSessionRuleBuilder; +import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +@DseRequirement(min = "5.0.3", description = "DSE 5.0.3 required for fluent API support") +public class GraphDataTypeFluentIT extends GraphDataTypeITBase { + + private static CustomCcmRule ccmRule = + CustomCcmRule.builder() + .withDseWorkloads("graph") + .withDseConfiguration( + "graph.gremlin_server.scriptEngines.gremlin-groovy.config.sandbox_enabled", "false") + .build(); + + private static SessionRule sessionRule = + new DseSessionRuleBuilder(ccmRule).withCreateGraph().build(); + + @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); + + @BeforeClass + public static void setupSchema() { + sessionRule.session().execute(ScriptGraphStatement.newInstance(SampleGraphScripts.ALLOW_SCANS)); + sessionRule.session().execute(ScriptGraphStatement.newInstance(SampleGraphScripts.MAKE_STRICT)); + } + + @Override + public DseSession session() { + return sessionRule.session(); + } + + @Override + public Vertex insertVertexAndReturn(String vertexLabel, String propertyName, Object value) { + return sessionRule + .session() + .execute( + FluentGraphStatement.newInstance( + DseGraph.g.addV(vertexLabel).property(propertyName, value))) + .one() + .asVertex(); + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphDataTypeScriptIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphDataTypeScriptIT.java new file mode 100644 index 00000000000..924b91cc289 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphDataTypeScriptIT.java @@ -0,0 +1,62 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph.statement; + +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.dse.driver.api.core.graph.GraphDataTypeITBase; +import com.datastax.dse.driver.api.core.graph.SampleGraphScripts; +import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; +import com.datastax.dse.driver.api.testinfra.session.DseSessionRuleBuilder; +import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +@DseRequirement(min = "5.0.4", description = "DSE 5.0.4 required for script API with GraphSON 2") +public class GraphDataTypeScriptIT extends GraphDataTypeITBase { + + private static CustomCcmRule ccmRule = + CustomCcmRule.builder() + .withDseWorkloads("graph") + .withDseConfiguration( + "graph.gremlin_server.scriptEngines.gremlin-groovy.config.sandbox_enabled", "false") + .build(); + + private static SessionRule sessionRule = + new DseSessionRuleBuilder(ccmRule).withCreateGraph().build(); + + @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); + + @BeforeClass + public static void setupSchema() { + sessionRule.session().execute(ScriptGraphStatement.newInstance(SampleGraphScripts.ALLOW_SCANS)); + sessionRule.session().execute(ScriptGraphStatement.newInstance(SampleGraphScripts.MAKE_STRICT)); + } + + @Override + public DseSession session() { + return sessionRule.session(); + } + + @Override + public Vertex insertVertexAndReturn(String vertexLabel, String propertyName, Object value) { + return sessionRule + .session() + .execute( + ScriptGraphStatement.builder("g.addV(labelP).property(nameP, valueP)") + .setQueryParam("labelP", vertexLabel) + .setQueryParam("nameP", propertyName) + .setQueryParam("valueP", value) + .build()) + .one() + .asVertex(); + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalBatchIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalBatchIT.java new file mode 100644 index 00000000000..413168f4649 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalBatchIT.java @@ -0,0 +1,120 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph.statement; + +import static com.datastax.dse.driver.api.core.graph.DseGraph.g; +import static com.datastax.dse.driver.api.core.graph.TinkerGraphAssertions.assertThat; +import static org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__.addE; +import static org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__.addV; +import static org.assertj.core.api.Assertions.fail; + +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.dse.driver.api.core.graph.BatchGraphStatement; +import com.datastax.dse.driver.api.core.graph.FluentGraphStatement; +import com.datastax.dse.driver.api.core.graph.SampleGraphScripts; +import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; +import com.datastax.dse.driver.api.testinfra.session.DseSessionRuleBuilder; +import com.datastax.oss.driver.api.core.servererrors.InvalidQueryException; +import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__; +import org.apache.tinkerpop.gremlin.structure.Edge; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +@DseRequirement(min = "6.0") +public class GraphTraversalBatchIT { + + private static CustomCcmRule ccmRule = CustomCcmRule.builder().withDseWorkloads("graph").build(); + + private static SessionRule sessionRule = + new DseSessionRuleBuilder(ccmRule).withCreateGraph().build(); + + @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); + + @BeforeClass + public static void setupSchema() { + sessionRule.session().execute(ScriptGraphStatement.newInstance(SampleGraphScripts.ALLOW_SCANS)); + sessionRule + .session() + .execute(ScriptGraphStatement.newInstance(SampleGraphScripts.MAKE_NOT_STRICT)); + } + + @Test + public void should_allow_vertex_and_edge_insertions_in_batch() { + BatchGraphStatement batch = + BatchGraphStatement.builder() + .addTraversals( + ImmutableList.of( + addV("person").property("name", "batch1").property("age", 1), + addV("person").property("name", "batch2").property("age", 2))) + .build(); + + BatchGraphStatement batch2 = + BatchGraphStatement.builder() + .addTraversals(batch) + .addTraversal( + addE("knows") + .from(__.V().has("name", "batch1")) + .to(__.V().has("name", "batch2")) + .property("weight", 2.3f)) + .build(); + + assertThat(batch.size()).isEqualTo(2); + assertThat(batch2.size()).isEqualTo(3); + + sessionRule.session().execute(batch2); + + assertThat( + sessionRule + .session() + .execute(FluentGraphStatement.newInstance(g.V().has("name", "batch1"))) + .one() + .asVertex()) + .hasProperty("age", 1); + + assertThat( + sessionRule + .session() + .execute(FluentGraphStatement.newInstance(g.V().has("name", "batch2"))) + .one() + .asVertex()) + .hasProperty("age", 2); + + assertThat( + sessionRule + .session() + .execute(FluentGraphStatement.newInstance(g.V().has("name", "batch1").bothE())) + .one() + .asEdge()) + .hasProperty("weight", 2.3f) + .hasOutVLabel("person") + .hasInVLabel("person"); + } + + @Test + public void should_fail_if_no_bytecode_in_batch() { + BatchGraphStatement batch = + BatchGraphStatement.builder().addTraversals(ImmutableList.of()).build(); + assertThat(batch.size()).isEqualTo(0); + try { + sessionRule.session().execute(batch); + fail( + "Should have thrown InvalidQueryException because batch does not contain any traversals."); + } catch (InvalidQueryException e) { + assertThat(e.getMessage()) + .contains( + "Could not read the traversal from the request sent.", + "The batch statement sent does not contain any traversal."); + } + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalIT.java new file mode 100644 index 00000000000..f6106364a23 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalIT.java @@ -0,0 +1,512 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph.statement; + +import static com.datastax.dse.driver.api.core.graph.DseGraph.g; +import static com.datastax.dse.driver.api.core.graph.FluentGraphStatement.newInstance; +import static com.datastax.dse.driver.api.core.graph.TinkerGraphAssertions.assertThat; +import static com.datastax.dse.driver.api.core.graph.TinkerPathAssert.validatePathObjects; +import static org.assertj.core.api.Assertions.fail; + +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.dse.driver.api.core.graph.AsyncGraphResultSet; +import com.datastax.dse.driver.api.core.graph.FluentGraphStatement; +import com.datastax.dse.driver.api.core.graph.GraphNode; +import com.datastax.dse.driver.api.core.graph.GraphResultSet; +import com.datastax.dse.driver.api.core.graph.GraphStatement; +import com.datastax.dse.driver.api.core.graph.SampleGraphScripts; +import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; +import com.datastax.dse.driver.api.core.graph.SocialTraversalSource; +import com.datastax.dse.driver.api.testinfra.session.DseSessionRuleBuilder; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; +import com.datastax.oss.driver.shaded.guava.common.collect.Lists; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.concurrent.CompletionStage; +import java.util.concurrent.ExecutionException; +import java.util.stream.Collectors; +import org.apache.tinkerpop.gremlin.process.traversal.Path; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__; +import org.apache.tinkerpop.gremlin.process.traversal.step.util.Tree; +import org.apache.tinkerpop.gremlin.structure.Edge; +import org.apache.tinkerpop.gremlin.structure.Graph; +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.apache.tinkerpop.gremlin.structure.util.empty.EmptyGraph; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +@DseRequirement(min = "6.0", description = "DSE 6 required for MODERN_GRAPH script (?)") +public class GraphTraversalIT { + + private static CustomCcmRule ccmRule = CustomCcmRule.builder().withDseWorkloads("graph").build(); + + private static SessionRule sessionRule = + new DseSessionRuleBuilder(ccmRule).withCreateGraph().build(); + + @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); + + @BeforeClass + public static void setupSchema() { + sessionRule + .session() + .execute(ScriptGraphStatement.newInstance(SampleGraphScripts.MODERN_GRAPH)); + sessionRule.session().execute(ScriptGraphStatement.newInstance(SampleGraphScripts.MAKE_STRICT)); + sessionRule.session().execute(ScriptGraphStatement.newInstance(SampleGraphScripts.ALLOW_SCANS)); + } + + /** + * Ensures that a previously returned {@link Vertex}'s {@link Vertex#id()} can be used as an input + * to {@link + * org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource#V(Object...)} to + * retrieve the {@link Vertex} and that the returned {@link Vertex} is the same. + * + * @test_category dse:graph + */ + @Test + public void should_use_vertex_id_as_parameter() { + GraphResultSet resultSet = + sessionRule.session().execute(newInstance(g.V().hasLabel("person").has("name", "marko"))); + + List results = resultSet.all(); + + assertThat(results.size()).isEqualTo(1); + Vertex marko = results.get(0).asVertex(); + assertThat(marko).hasProperty("name", "marko"); + + resultSet = sessionRule.session().execute(newInstance(g.V(marko.id()))); + + results = resultSet.all(); + assertThat(results.size()).isEqualTo(1); + Vertex marko2 = results.get(0).asVertex(); + // Ensure that the returned vertex is the same as the first. + assertThat(marko2).isEqualTo(marko); + } + + /** + * Ensures that a previously returned {@link Edge}'s {@link Edge#id()} can be used as an input to + * {@link + * org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource#E(Object...)} to + * retrieve the {@link Edge} and that the returned {@link Edge} is the same. + * + * @test_category dse:graph + */ + @Test + public void should_use_edge_id_as_parameter() { + GraphResultSet resultSet = + sessionRule.session().execute(newInstance(g.E().has("weight", 0.2f))); + + List results = resultSet.all(); + assertThat(results.size()).isEqualTo(1); + + Edge created = results.get(0).asEdge(); + assertThat(created).hasProperty("weight", 0.2f).hasInVLabel("software").hasOutVLabel("person"); + + resultSet = sessionRule.session().execute(newInstance(g.E(created.id()).inV())); + results = resultSet.all(); + assertThat(results.size()).isEqualTo(1); + Vertex lop = results.get(0).asVertex(); + + assertThat(lop).hasLabel("software").hasProperty("name", "lop").hasProperty("lang", "java"); + } + + /** + * A sanity check that a returned {@link Vertex}'s id is a {@link Map}. This test could break in + * the future if the format of a vertex ID changes from a Map to something else in DSE. + * + *

      // TODO: this test will break in NGDG + * + * @test_category dse:graph + */ + @Test + public void should_deserialize_vertex_id_as_map() { + GraphResultSet resultSet = + sessionRule.session().execute(newInstance(g.V().hasLabel("person").has("name", "marko"))); + + List results = resultSet.all(); + assertThat(results.size()).isEqualTo(1); + + Vertex marko = results.get(0).asVertex(); + assertThat(marko).hasProperty("name", "marko"); + + @SuppressWarnings("unchecked") + Map id = (Map) marko.id(); + assertThat(id) + .hasSize(3) + .containsEntry("~label", "person") + .containsKey("community_id") + .containsKey("member_id"); + } + + /** + * Ensures that a traversal that returns a result of mixed types is interpreted as a {@link Map} + * with {@link Object} values. Also uses {@link + * org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal#by(org.apache.tinkerpop.gremlin.process.traversal.Traversal)} + * with an anonymous traversal to get inbound 'created' edges and folds them into a list. + * + *

      Executes a vertex traversal that binds label 'a' and 'b' to vertex properties and label 'c' + * to vertices that have edges from that vertex. + * + * @test_category dse:graph + */ + @Test + public void should_handle_result_object_of_mixed_types() { + // find all software vertices and select name, language, and find all vertices that created such + // software. + GraphResultSet rs = + sessionRule + .session() + .execute( + newInstance( + g.V() + .hasLabel("software") + .as("a", "b", "c") + .select("a", "b", "c") + .by("name") + .by("lang") + .by(__.in("created").fold()))); + + List results = rs.all(); + assertThat(results.size()).isEqualTo(2); + + // Ensure that we got 'lop' and 'ripple' for property a. + assertThat(results) + .extracting(m -> m.getByKey("a").as(Object.class)) + .containsOnly("lop", "ripple"); + + for (GraphNode result : results) { + // The row should represent a map with a, b, and c keys. + assertThat(ImmutableList.copyOf(result.keys())).containsOnlyOnce("a", "b", "c"); + // 'e' should not exist, thus it should be null. + assertThat(result.getByKey("e")).isNull(); + // both software are written in java. + assertThat(result.getByKey("b").isNull()).isFalse(); + assertThat(result.getByKey("b").asString()).isEqualTo("java"); + GraphNode c = result.getByKey("c"); + assertThat(c.isList()).isTrue(); + if (result.getByKey("a").asString().equals("lop")) { + // 'c' should contain marko, josh, peter. + // Ensure we have three vertices. + assertThat(c.size()).isEqualTo(3); + List vertices = + Lists.newArrayList( + c.getByIndex(0).asVertex(), c.getByIndex(1).asVertex(), c.getByIndex(2).asVertex()); + assertThat(vertices) + .extracting(vertex -> vertex.property("name").value()) + .containsOnly("marko", "josh", "peter"); + } else { + // ripple, 'c' should contain josh. + // Ensure we have 1 vertex. + assertThat(c.size()).isEqualTo(1); + Vertex vertex = c.getByIndex(0).asVertex(); + assertThat(vertex).hasProperty("name", "josh"); + } + } + } + + /** + * Ensures a traversal that yields no results is properly retrieved and is empty. + * + * @test_category dse:graph + */ + @Test + public void should_return_zero_results() { + GraphResultSet rs = sessionRule.session().execute(newInstance(g.V().hasLabel("notALabel"))); + assertThat(rs.all().size()).isZero(); + } + + /** + * Ensures a traversal that yields no results is properly retrieved and is empty, using GraphSON2 + * and the TinkerPop transform results function. + * + * @test_category dse:graph + */ + @Test + public void should_return_zero_results_graphson_2() { + GraphStatement simpleGraphStatement = + ScriptGraphStatement.newInstance("g.V().hasLabel('notALabel')"); + + GraphResultSet rs = sessionRule.session().execute(simpleGraphStatement); + assertThat(rs.one()).isNull(); + } + + /** + * Validates that a traversal using lambda operations with anonymous traversals are applied + * appropriately and return the expected results. + * + *

      Traversal that filters 'person'-labeled vertices by name 'marko' and flatMaps outgoing + * vertices on the 'knows' relationship by their outgoing 'created' vertices and then maps by + * their 'name' property and folds them into one list. + * + *

      Note: This does not validate lambdas with functions as those can't be interpreted and + * sent remotely. + * + * @test_category dse:graph + */ + @Test + public void should_handle_lambdas() { + // Find all people marko knows and the software they created. + GraphResultSet result = + sessionRule + .session() + .execute( + newInstance( + g.V() + .hasLabel("person") + .filter(__.has("name", "marko")) + .out("knows") + .flatMap(__.out("created")) + .map(__.values("name")) + .fold())); + + // Marko only knows josh and vadas, of which josh created lop and ripple. + List software = result.one().as(GenericType.listOf(String.class)); + assertThat(software).containsOnly("lop", "ripple"); + } + + /** + * Validates that when traversing a path and labeling some of the elements during the traversal + * that the output elements are properly labeled. + * + * @test_category dse:graph + */ + @Test + public void should_resolve_path_with_some_labels() { + GraphResultSet rs = + sessionRule + .session() + .execute( + newInstance( + g.V() + .hasLabel("person") + .has("name", "marko") + .as("a") + .outE("knows") + .inV() + .as("c", "d") + .outE("created") + .as("e", "f", "g") + .inV() + .path())); + + List results = rs.all(); + assertThat(results.size()).isEqualTo(2); + for (GraphNode result : results) { + Path path = result.asPath(); + validatePathObjects(path); + assertThat(path.labels()).hasSize(5); + assertThat(path) + .hasLabel(0, "a") + .hasNoLabel(1) + .hasLabel(2, "c", "d") + .hasLabel(3, "e", "f", "g") + .hasNoLabel(4); + } + } + + /** + * Validates that when traversing a path and labeling all of the elements during the traversal + * that the output elements are properly labeled. + * + * @test_category dse:graph + */ + @Test + public void should_resolve_path_with_labels() { + GraphResultSet rs = + sessionRule + .session() + .execute( + newInstance( + g.V() + .hasLabel("person") + .has("name", "marko") + .as("a") + .outE("knows") + .as("b") + .inV() + .as("c", "d") + .outE("created") + .as("e", "f", "g") + .inV() + .as("h") + .path())); + List results = rs.all(); + assertThat(results.size()).isEqualTo(2); + for (GraphNode result : results) { + Path path = result.asPath(); + validatePathObjects(path); + assertThat(path.labels()).hasSize(5); + assertThat(path) + .hasLabel(0, "a") + .hasLabel(1, "b") + .hasLabel(2, "c", "d") + .hasLabel(3, "e", "f", "g") + .hasLabel(4, "h"); + } + } + + /** + * Validates that when traversing a path and labeling none of the elements during the traversal + * that all the labels are empty in the result. + * + * @test_category dse:graph + */ + @Test + public void should_resolve_path_without_labels() { + GraphResultSet rs = + sessionRule + .session() + .execute( + newInstance( + g.V() + .hasLabel("person") + .has("name", "marko") + .outE("knows") + .inV() + .outE("created") + .inV() + .path())); + List results = rs.all(); + assertThat(results.size()).isEqualTo(2); + for (GraphNode result : results) { + Path path = result.asPath(); + validatePathObjects(path); + assertThat(path.labels()).hasSize(5); + for (int i = 0; i < 5; i++) assertThat(path).hasNoLabel(i); + } + } + + /** + * Validates that a traversal returning a Tree structure is returned appropriately with the + * expected contents. + * + *

      Retrieves trees of people marko knows and the software they created. + * + * @test_category dse:graph + */ + @Test + public void should_parse_tree() { + // Get a tree structure showing the paths from mark to people he knows to software they've + // created. + GraphResultSet rs = + sessionRule + .session() + .execute( + newInstance( + g.V().hasLabel("person").out("knows").out("created").tree().by("name"))); + + List results = rs.all(); + assertThat(results.size()).isEqualTo(1); + + // [{key=marko, value=[{key=josh, value=[{key=ripple, value=[]}, {key=lop, value=[]}]}]}] + GraphNode result = results.get(0); + + @SuppressWarnings("unchecked") + Tree tree = result.as(Tree.class); + + assertThat(tree).tree("marko").tree("josh").tree("lop").isLeaf(); + + assertThat(tree).tree("marko").tree("josh").tree("ripple").isLeaf(); + } + + /** + * Ensures that a traversal that returns a sub graph can be retrieved. + * + *

      The subgraph is all members in a knows relationship, thus is all people who marko knows and + * the edges that connect them. + */ + @Test + public void should_handle_subgraph() { + GraphResultSet rs = + sessionRule + .session() + .execute(newInstance(g.E().hasLabel("knows").subgraph("subGraph").cap("subGraph"))); + + List results = rs.all(); + assertThat(results.size()).isEqualTo(1); + + Graph graph = results.get(0).as(Graph.class); + + assertThat(graph.edges()).toIterable().hasSize(2); + assertThat(graph.vertices()).toIterable().hasSize(3); + } + + /** + * A simple smoke test to ensure that a user can supply a custom {@link GraphTraversalSource} for + * use with DSLs. + * + * @test_category dse:graph + */ + @Test + public void should_allow_use_of_dsl() throws Exception { + SocialTraversalSource gSocial = EmptyGraph.instance().traversal(SocialTraversalSource.class); + + GraphStatement gs = newInstance(gSocial.persons("marko").knows("vadas")); + + GraphResultSet rs = sessionRule.session().execute(gs); + List results = rs.all(); + + assertThat(results.size()).isEqualTo(1); + assertThat(results.get(0).asVertex()) + .hasProperty("name", "marko") + .hasProperty("age", 29) + .hasLabel("person"); + } + + /** + * Ensures that traversals with barriers (which return results bulked) contain the correct amount + * of end results. + * + *

      This will fail if ran against DSE < 5.0.9 or DSE < 5.1.2. + */ + @Test + public void should_return_correct_results_when_bulked() { + GraphResultSet rs = sessionRule.session().execute(newInstance(g.E().label().barrier())); + + List results = + rs.all().stream().map(GraphNode::asString).sorted().collect(Collectors.toList()); + + List expected = + Arrays.asList("knows", "created", "created", "knows", "created", "created"); + Collections.sort(expected); + + assertThat(results).isEqualTo(expected); + } + + @Test + public void should_handle_asynchronous_execution() { + StringBuilder names = new StringBuilder(); + + CompletionStage future = + sessionRule + .session() + .executeAsync(FluentGraphStatement.newInstance(g.V().hasLabel("person"))); + + try { + // dumb processing to make sure the completable future works correctly and correct results are + // returned + Iterable results = + future.thenApply(AsyncGraphResultSet::currentPage).toCompletableFuture().get(); + for (GraphNode gn : results) { + names.append(gn.asVertex().property("name").value()); + } + } catch (InterruptedException | ExecutionException e) { + fail("Shouldn't have thrown an exception waiting for the result to complete"); + } + + assertThat(names.toString()).contains("peter", "marko", "vadas", "josh"); + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalMetaPropertiesIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalMetaPropertiesIT.java new file mode 100644 index 00000000000..2ac58bedc2b --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalMetaPropertiesIT.java @@ -0,0 +1,79 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph.statement; + +// INFO: meta props are going away in NGDG + +import static com.datastax.dse.driver.api.core.graph.DseGraph.g; +import static com.datastax.dse.driver.api.core.graph.FluentGraphStatement.newInstance; +import static com.datastax.dse.driver.api.core.graph.SampleGraphScripts.ALLOW_SCANS; +import static com.datastax.dse.driver.api.core.graph.SampleGraphScripts.MAKE_STRICT; +import static com.datastax.dse.driver.api.core.graph.TinkerGraphAssertions.assertThat; + +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.dse.driver.api.core.graph.GraphResultSet; +import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; +import com.datastax.dse.driver.api.testinfra.session.DseSessionRuleBuilder; +import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.apache.tinkerpop.gremlin.structure.VertexProperty; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +// INFO: meta props are going away in NGDG + +@DseRequirement(min = "5.0.3", description = "DSE 5.0.3 required for remote TinkerPop support") +public class GraphTraversalMetaPropertiesIT { + + private static CustomCcmRule ccmRule = CustomCcmRule.builder().withDseWorkloads("graph").build(); + + private static SessionRule sessionRule = + new DseSessionRuleBuilder(ccmRule).withCreateGraph().build(); + + @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); + + /** Builds a simple schema that provides for a vertex with a property with sub properties. */ + public static String metaProps = + MAKE_STRICT + + ALLOW_SCANS + + "schema.propertyKey('sub_prop').Text().create()\n" + + "schema.propertyKey('sub_prop2').Text().create()\n" + + "schema.propertyKey('meta_prop').Text().properties('sub_prop', 'sub_prop2').create()\n" + + "schema.vertexLabel('meta_v').properties('meta_prop').create()"; + + /** + * Ensures that a traversal that yields a vertex with a property that has its own properties that + * is appropriately parsed and made accessible via {@link VertexProperty#property(String)}. + * + * @test_category dse:graph + */ + @Test + public void should_parse_meta_properties() { + sessionRule.session().execute(ScriptGraphStatement.newInstance(metaProps)); + + GraphResultSet result = + sessionRule + .session() + .execute( + newInstance( + g.addV("meta_v") + .property("meta_prop", "hello", "sub_prop", "hi", "sub_prop2", "hi2"))); + + Vertex v = result.one().asVertex(); + assertThat(v).hasProperty("meta_prop"); + + VertexProperty metaProp = v.property("meta_prop"); + assertThat(metaProp) + .hasValue("hello") + .hasProperty("sub_prop", "hi") + .hasProperty("sub_prop2", "hi2"); + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalMultiPropertiesIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalMultiPropertiesIT.java new file mode 100644 index 00000000000..e1930b41861 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalMultiPropertiesIT.java @@ -0,0 +1,79 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph.statement; + +import static com.datastax.dse.driver.api.core.graph.DseGraph.g; +import static com.datastax.dse.driver.api.core.graph.FluentGraphStatement.newInstance; +import static com.datastax.dse.driver.api.core.graph.SampleGraphScripts.ALLOW_SCANS; +import static com.datastax.dse.driver.api.core.graph.SampleGraphScripts.MAKE_STRICT; +import static com.datastax.dse.driver.api.core.graph.TinkerGraphAssertions.assertThat; + +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.dse.driver.api.core.graph.GraphResultSet; +import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; +import com.datastax.dse.driver.api.testinfra.session.DseSessionRuleBuilder; +import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import java.util.Iterator; +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.apache.tinkerpop.gremlin.structure.VertexProperty; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +@DseRequirement(min = "5.0.3", description = "DSE 5.0.3 required for remote TinkerPop support") +public class GraphTraversalMultiPropertiesIT { + + private static CustomCcmRule ccmRule = CustomCcmRule.builder().withDseWorkloads("graph").build(); + + private static SessionRule sessionRule = + new DseSessionRuleBuilder(ccmRule).withCreateGraph().build(); + + @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); + + /** Builds a simple schema that provides for a vertex with a multi-cardinality property. */ + public static final String multiProps = + MAKE_STRICT + + ALLOW_SCANS + + "schema.propertyKey('multi_prop').Text().multiple().create()\n" + + "schema.vertexLabel('multi_v').properties('multi_prop').create()\n"; + + /** + * Ensures that a traversal that yields a vertex with a property name that is present multiple + * times that the properties are parsed and made accessible via {@link + * Vertex#properties(String...)}. + * + * @test_category dse:graph + */ + @Test + public void should_parse_multiple_cardinality_properties() { + // given a schema that defines multiple cardinality properties. + sessionRule.session().execute(ScriptGraphStatement.newInstance(multiProps)); + + // when adding a vertex with a multiple cardinality property + GraphResultSet result = + sessionRule + .session() + .execute( + newInstance( + g.addV("multi_v") + .property("multi_prop", "Hello") + .property("multi_prop", "Sweet") + .property("multi_prop", "World"))); + + Vertex v = result.one().asVertex(); + assertThat(v).hasProperty("multi_prop"); + + Iterator> multiProp = v.properties("multi_prop"); + assertThat(multiProp) + .toIterable() + .extractingResultOf("value") + .containsExactly("Hello", "Sweet", "World"); + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/insights/InsightsClientIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/insights/InsightsClientIT.java new file mode 100644 index 00000000000..39b8f3cec94 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/insights/InsightsClientIT.java @@ -0,0 +1,73 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.insights; + +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.dse.driver.api.testinfra.session.DseSessionRuleBuilder; +import com.datastax.dse.driver.internal.core.context.DseDriverContext; +import com.datastax.dse.driver.internal.core.insights.InsightsClient; +import com.datastax.dse.driver.internal.core.insights.configuration.InsightsConfiguration; +import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import io.netty.util.concurrent.DefaultEventExecutor; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +@DseRequirement(min = "6.7.0", description = "DSE 6.7.0 required for Insights support") +public class InsightsClientIT { + private static final StackTraceElement[] EMPTY_STACK_TRACE = {}; + + private static CustomCcmRule ccmRule = + CustomCcmRule.builder() + .withNodes(1) + .withJvmArgs( + "-Dinsights.service_options_enabled=true", + "-Dinsights.default_mode=ENABLED_WITH_LOCAL_STORAGE") + .build(); + + private static SessionRule sessionRule = new DseSessionRuleBuilder(ccmRule).build(); + + @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); + + @Test + public void should_send_insights_startup_event_using_client() + throws ExecutionException, InterruptedException, TimeoutException { + // given + InsightsClient insightsClient = + InsightsClient.createInsightsClient( + new InsightsConfiguration(true, 300000L, new DefaultEventExecutor()), + (DseDriverContext) sessionRule.session().getContext(), + EMPTY_STACK_TRACE); + + // when + insightsClient.sendStartupMessage().toCompletableFuture().get(1000, TimeUnit.SECONDS); + + // then no exception + } + + @Test + public void should_send_insights_status_event_using_client() + throws ExecutionException, InterruptedException, TimeoutException { + // given + InsightsClient insightsClient = + InsightsClient.createInsightsClient( + new InsightsConfiguration(true, 300000L, new DefaultEventExecutor()), + (DseDriverContext) sessionRule.session().getContext(), + EMPTY_STACK_TRACE); + + // when + insightsClient.sendStatusMessage().toCompletableFuture().get(1000, TimeUnit.SECONDS); + + // then no exception + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/MetadataIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/MetadataIT.java new file mode 100644 index 00000000000..3a1e1917dfb --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/MetadataIT.java @@ -0,0 +1,50 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.metadata; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.dse.driver.api.testinfra.session.DseSessionRuleBuilder; +import com.datastax.oss.driver.api.core.Version; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import com.datastax.oss.driver.categories.ParallelizableTests; +import java.util.Set; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +@Category(ParallelizableTests.class) +@DseRequirement(min = "5.1") +public class MetadataIT { + + private static CcmRule ccmRule = CcmRule.getInstance(); + + private static SessionRule sessionRule = new DseSessionRuleBuilder(ccmRule).build(); + + @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); + + @Test + public void should_expose_dse_node_properties() { + Node node = sessionRule.session().getMetadata().getNodes().values().iterator().next(); + + // Basic checks as we want something that will work with a large range of DSE versions: + assertThat(node.getExtras()) + .containsKeys( + DseNodeProperties.DSE_VERSION, + DseNodeProperties.DSE_WORKLOADS, + DseNodeProperties.SERVER_ID); + assertThat(node.getExtras().get(DseNodeProperties.DSE_VERSION)).isInstanceOf(Version.class); + assertThat(node.getExtras().get(DseNodeProperties.SERVER_ID)).isInstanceOf(String.class); + assertThat(node.getExtras().get(DseNodeProperties.DSE_WORKLOADS)).isInstanceOf(Set.class); + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/AbstractMetadataIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/AbstractMetadataIT.java new file mode 100644 index 00000000000..101c2f82666 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/AbstractMetadataIT.java @@ -0,0 +1,61 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.metadata.schema; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.core.metadata.schema.KeyspaceMetadata; +import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import com.datastax.oss.driver.categories.ParallelizableTests; +import java.util.Optional; +import org.junit.experimental.categories.Category; + +/* Abstract class to hold common methods for Metadata Schema tests. */ +@Category(ParallelizableTests.class) +public abstract class AbstractMetadataIT { + + /** + * Asserts the presence of a Keyspace and that it's name matches the keyspace associated with the + * Session Rule. + */ + protected void assertKeyspace(Optional keyspaceOpt) { + // assert the keyspace + assertThat(keyspaceOpt) + .hasValueSatisfying( + keyspace -> { + assertThat(keyspace).isInstanceOf(DseKeyspaceMetadata.class); + assertThat(keyspace.getName().asInternal()) + .isEqualTo(getSessionRule().keyspace().asInternal()); + }); + } + + /* Convenience method for executing a CQL statement using the test's Session Rule. */ + public void execute(String cql) { + getSessionRule() + .session() + .execute( + SimpleStatement.builder(cql) + .setExecutionProfile(getSessionRule().slowProfile()) + .build()); + } + + /** + * Convenience method for retrieving the Keyspace metadata from this test's Session Rule. Also + * asserts the Keyspace exists and has the expected name. + */ + public DseKeyspaceMetadata getKeyspace() { + Optional keyspace = + getSessionRule().session().getMetadata().getKeyspace(getSessionRule().keyspace()); + assertKeyspace(keyspace); + return ((DseKeyspaceMetadata) keyspace.get()); + } + + /* Concrete ITs should return their ClassRule SessionRule. */ + protected abstract SessionRule getSessionRule(); +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/DseAggregateMetadataIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/DseAggregateMetadataIT.java new file mode 100644 index 00000000000..22d5cc5d6f8 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/DseAggregateMetadataIT.java @@ -0,0 +1,84 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.metadata.schema; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.dse.driver.api.testinfra.session.DseSessionRuleBuilder; +import com.datastax.oss.driver.api.core.metadata.schema.AggregateMetadata; +import com.datastax.oss.driver.api.core.type.DataTypes; +import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import java.util.Optional; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +@DseRequirement(min = "6.0") +public class DseAggregateMetadataIT extends AbstractMetadataIT { + + private static final CcmRule CCM_RULE = CcmRule.getInstance(); + + private static final SessionRule SESSION_RULE = + new DseSessionRuleBuilder(CCM_RULE).build(); + + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); + + @Override + protected SessionRule getSessionRule() { + return DseAggregateMetadataIT.SESSION_RULE; + } + + @Test + public void should_parse_aggregate_without_deterministic() throws Exception { + String cqlFunction = + "CREATE FUNCTION nondetf(i int) RETURNS NULL ON NULL INPUT RETURNS int LANGUAGE java AS 'return new java.util.Random().nextInt(i);';"; + String cqlAggregate = "CREATE AGGREGATE nondeta() SFUNC nondetf STYPE int INITCOND 0;"; + execute(cqlFunction); + execute(cqlAggregate); + DseKeyspaceMetadata keyspace = getKeyspace(); + Optional aggregateOpt = keyspace.getAggregate("nondeta"); + assertThat(aggregateOpt.map(DseAggregateMetadata.class::cast)) + .hasValueSatisfying( + aggregate -> { + assertThat(aggregate.isDeterministic()).isFalse(); + assertThat(aggregate.getStateType()).isEqualTo(DataTypes.INT); + assertThat(aggregate.describe(false)) + .isEqualTo( + String.format( + "CREATE AGGREGATE \"%s\".\"nondeta\"() SFUNC \"nondetf\" STYPE int INITCOND 0;", + keyspace.getName().asInternal())); + }); + } + + @Test + public void should_parse_aggregate_with_deterministic() throws Exception { + String cqlFunction = + "CREATE FUNCTION detf(i int, y int) RETURNS NULL ON NULL INPUT RETURNS int LANGUAGE java AS 'return i+y;';"; + String cqlAggregate = + "CREATE AGGREGATE deta(int) SFUNC detf STYPE int INITCOND 0 DETERMINISTIC;"; + execute(cqlFunction); + execute(cqlAggregate); + DseKeyspaceMetadata keyspace = getKeyspace(); + Optional aggregateOpt = keyspace.getAggregate("deta", DataTypes.INT); + assertThat(aggregateOpt.map(DseAggregateMetadata.class::cast)) + .hasValueSatisfying( + aggregate -> { + assertThat(aggregate.isDeterministic()).isTrue(); + assertThat(aggregate.getStateType()).isEqualTo(DataTypes.INT); + assertThat(aggregate.describe(false)) + .isEqualTo( + String.format( + "CREATE AGGREGATE \"%s\".\"deta\"(int) SFUNC \"detf\" STYPE int INITCOND 0 DETERMINISTIC;", + keyspace.getName().asInternal())); + }); + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/DseFunctionMetadataIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/DseFunctionMetadataIT.java new file mode 100644 index 00000000000..b87f311ce9d --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/DseFunctionMetadataIT.java @@ -0,0 +1,195 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.metadata.schema; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.dse.driver.api.testinfra.session.DseSessionRuleBuilder; +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.metadata.schema.FunctionMetadata; +import com.datastax.oss.driver.api.core.type.DataTypes; +import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import java.util.Optional; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +@DseRequirement(min = "6.0") +public class DseFunctionMetadataIT extends AbstractMetadataIT { + + private static final CcmRule CCM_RULE = CcmRule.getInstance(); + + private static final SessionRule SESSION_RULE = + new DseSessionRuleBuilder(CCM_RULE).build(); + + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); + + @Override + public SessionRule getSessionRule() { + return DseFunctionMetadataIT.SESSION_RULE; + } + + @Test + public void should_parse_function_without_deterministic_or_monotonic() throws Exception { + String cqlFunction = + "CREATE FUNCTION nondetf(i int) RETURNS NULL ON NULL INPUT RETURNS int LANGUAGE java AS 'return new java.util.Random().nextInt(i);';"; + execute(cqlFunction); + DseKeyspaceMetadata keyspace = getKeyspace(); + Optional functionOpt = keyspace.getFunction("nondetf", DataTypes.INT); + assertThat(functionOpt.map(DseFunctionMetadata.class::cast)) + .hasValueSatisfying( + function -> { + assertThat(function.isDeterministic()).isFalse(); + assertThat(function.isMonotonic()).isFalse(); + assertThat(function.getMonotonicArgumentNames()).isEmpty(); + assertThat(function.getLanguage()).isEqualTo("java"); + assertThat(function.getReturnType()).isEqualTo(DataTypes.INT); + assertThat(function.getBody()).isEqualTo("return new java.util.Random().nextInt(i);"); + assertThat(function.describe(false)) + .isEqualTo( + String.format( + "CREATE FUNCTION \"%s\".\"nondetf\"(\"i\" int) RETURNS NULL ON NULL INPUT RETURNS int LANGUAGE java AS 'return new java.util.Random().nextInt(i);';", + keyspace.getName().asInternal())); + }); + } + + @Test + public void should_parse_function_with_deterministic() throws Exception { + String cqlFunction = + "CREATE FUNCTION detf(i int, y int) RETURNS NULL ON NULL INPUT RETURNS int DETERMINISTIC LANGUAGE java AS 'return i+y;';"; + execute(cqlFunction); + DseKeyspaceMetadata keyspace = getKeyspace(); + Optional functionOpt = + keyspace.getFunction("detf", DataTypes.INT, DataTypes.INT); + assertThat(functionOpt.map(DseFunctionMetadata.class::cast)) + .hasValueSatisfying( + function -> { + assertThat(function.isDeterministic()).isTrue(); + assertThat(function.isMonotonic()).isFalse(); + assertThat(function.getMonotonicArgumentNames()).isEmpty(); + assertThat(function.getLanguage()).isEqualTo("java"); + assertThat(function.getReturnType()).isEqualTo(DataTypes.INT); + assertThat(function.getBody()).isEqualTo("return i+y;"); + assertThat(function.describe(false)) + .isEqualTo( + String.format( + "CREATE FUNCTION \"%s\".\"detf\"(\"i\" int,\"y\" int) RETURNS NULL ON NULL INPUT RETURNS int DETERMINISTIC LANGUAGE java AS 'return i+y;';", + keyspace.getName().asInternal())); + }); + } + + @Test + public void should_parse_function_with_monotonic() throws Exception { + String cqlFunction = + "CREATE FUNCTION monotonic(dividend int, divisor int) CALLED ON NULL INPUT RETURNS int MONOTONIC LANGUAGE java AS 'return dividend / divisor;';"; + execute(cqlFunction); + DseKeyspaceMetadata keyspace = getKeyspace(); + Optional functionOpt = + keyspace.getFunction("monotonic", DataTypes.INT, DataTypes.INT); + assertThat(functionOpt.map(DseFunctionMetadata.class::cast)) + .hasValueSatisfying( + function -> { + assertThat(function.isDeterministic()).isFalse(); + assertThat(function.isMonotonic()).isTrue(); + assertThat(function.getMonotonicArgumentNames()) + .containsExactly( + CqlIdentifier.fromCql("dividend"), CqlIdentifier.fromCql("divisor")); + assertThat(function.getLanguage()).isEqualTo("java"); + assertThat(function.getReturnType()).isEqualTo(DataTypes.INT); + assertThat(function.getBody()).isEqualTo("return dividend / divisor;"); + assertThat(function.describe(false)) + .isEqualTo( + String.format( + "CREATE FUNCTION \"%s\".\"monotonic\"(\"dividend\" int,\"divisor\" int) CALLED ON NULL INPUT RETURNS int MONOTONIC LANGUAGE java AS 'return dividend / divisor;';", + keyspace.getName().asInternal())); + }); + } + + @Test + public void should_parse_function_with_monotonic_on() throws Exception { + String cqlFunction = + "CREATE FUNCTION monotonic_on(dividend int, divisor int) CALLED ON NULL INPUT RETURNS int MONOTONIC ON \"dividend\" LANGUAGE java AS 'return dividend / divisor;';"; + execute(cqlFunction); + DseKeyspaceMetadata keyspace = getKeyspace(); + Optional functionOpt = + keyspace.getFunction("monotonic_on", DataTypes.INT, DataTypes.INT); + assertThat(functionOpt.map(DseFunctionMetadata.class::cast)) + .hasValueSatisfying( + function -> { + assertThat(function.isDeterministic()).isFalse(); + assertThat(function.isMonotonic()).isFalse(); + assertThat(function.getMonotonicArgumentNames()) + .containsExactly(CqlIdentifier.fromCql("dividend")); + assertThat(function.getLanguage()).isEqualTo("java"); + assertThat(function.getReturnType()).isEqualTo(DataTypes.INT); + assertThat(function.getBody()).isEqualTo("return dividend / divisor;"); + assertThat(function.describe(false)) + .isEqualTo( + String.format( + "CREATE FUNCTION \"%s\".\"monotonic_on\"(\"dividend\" int,\"divisor\" int) CALLED ON NULL INPUT RETURNS int MONOTONIC ON \"dividend\" LANGUAGE java AS 'return dividend / divisor;';", + keyspace.getName().asInternal())); + }); + } + + @Test + public void should_parse_function_with_deterministic_and_monotonic() throws Exception { + String cqlFunction = + "CREATE FUNCTION det_and_monotonic(dividend int, divisor int) CALLED ON NULL INPUT RETURNS int DETERMINISTIC MONOTONIC LANGUAGE java AS 'return dividend / divisor;';"; + execute(cqlFunction); + DseKeyspaceMetadata keyspace = getKeyspace(); + Optional functionOpt = + keyspace.getFunction("det_and_monotonic", DataTypes.INT, DataTypes.INT); + assertThat(functionOpt.map(DseFunctionMetadata.class::cast)) + .hasValueSatisfying( + function -> { + assertThat(function.isDeterministic()).isTrue(); + assertThat(function.isMonotonic()).isTrue(); + assertThat(function.getMonotonicArgumentNames()) + .containsExactly( + CqlIdentifier.fromCql("dividend"), CqlIdentifier.fromCql("divisor")); + assertThat(function.getLanguage()).isEqualTo("java"); + assertThat(function.getReturnType()).isEqualTo(DataTypes.INT); + assertThat(function.getBody()).isEqualTo("return dividend / divisor;"); + assertThat(function.describe(false)) + .isEqualTo( + String.format( + "CREATE FUNCTION \"%s\".\"det_and_monotonic\"(\"dividend\" int,\"divisor\" int) CALLED ON NULL INPUT RETURNS int DETERMINISTIC MONOTONIC LANGUAGE java AS 'return dividend / divisor;';", + keyspace.getName().asInternal())); + }); + } + + @Test + public void should_parse_function_with_deterministic_and_monotonic_on() throws Exception { + String cqlFunction = + "CREATE FUNCTION det_and_monotonic_on(dividend int, divisor int) CALLED ON NULL INPUT RETURNS int DETERMINISTIC MONOTONIC ON \"dividend\" LANGUAGE java AS 'return dividend / divisor;';"; + execute(cqlFunction); + DseKeyspaceMetadata keyspace = getKeyspace(); + Optional functionOpt = + keyspace.getFunction("det_and_monotonic_on", DataTypes.INT, DataTypes.INT); + assertThat(functionOpt.map(DseFunctionMetadata.class::cast)) + .hasValueSatisfying( + function -> { + assertThat(function.isDeterministic()).isTrue(); + assertThat(function.isMonotonic()).isFalse(); + assertThat(function.getMonotonicArgumentNames()) + .containsExactly(CqlIdentifier.fromCql("dividend")); + assertThat(function.getLanguage()).isEqualTo("java"); + assertThat(function.getReturnType()).isEqualTo(DataTypes.INT); + assertThat(function.getBody()).isEqualTo("return dividend / divisor;"); + assertThat(function.describe(false)) + .isEqualTo( + String.format( + "CREATE FUNCTION \"%s\".\"det_and_monotonic_on\"(\"dividend\" int,\"divisor\" int) CALLED ON NULL INPUT RETURNS int DETERMINISTIC MONOTONIC ON \"dividend\" LANGUAGE java AS 'return dividend / divisor;';", + keyspace.getName().asInternal())); + }); + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/testinfra/DseSessionBuilderInstantiator.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/testinfra/DseSessionBuilderInstantiator.java new file mode 100644 index 00000000000..49b36c75066 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/testinfra/DseSessionBuilderInstantiator.java @@ -0,0 +1,22 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.testinfra; + +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.dse.driver.api.core.config.DseDriverConfigLoader; +import com.datastax.oss.driver.api.core.config.ProgrammaticDriverConfigLoaderBuilder; +import com.datastax.oss.driver.api.core.session.SessionBuilder; + +public class DseSessionBuilderInstantiator { + public static SessionBuilder builder() { + return DseSession.builder(); + } + + public static ProgrammaticDriverConfigLoaderBuilder configLoaderBuilder() { + return DseDriverConfigLoader.programmaticBuilder(); + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/testinfra/session/DseSessionRule.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/testinfra/session/DseSessionRule.java new file mode 100644 index 00000000000..ebc3a6a809b --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/testinfra/session/DseSessionRule.java @@ -0,0 +1,66 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.testinfra.session; + +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; +import com.datastax.oss.driver.api.core.config.DriverConfigLoader; +import com.datastax.oss.driver.api.core.metadata.NodeStateListener; +import com.datastax.oss.driver.api.core.metadata.schema.SchemaChangeListener; +import com.datastax.oss.driver.api.testinfra.CassandraResourceRule; +import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import org.junit.runner.Description; +import org.junit.runners.model.Statement; + +public class DseSessionRule extends SessionRule { + + private final String graphName; + + public DseSessionRule( + CassandraResourceRule cassandraResource, + boolean createKeyspace, + NodeStateListener nodeStateListener, + SchemaChangeListener schemaChangeListener, + DriverConfigLoader configLoader, + String graphName) { + super(cassandraResource, createKeyspace, nodeStateListener, schemaChangeListener, configLoader); + this.graphName = graphName; + } + + @Override + public Statement apply(Statement base, Description description) { + return super.apply(base, description); + } + + @Override + protected void before() { + super.before(); + if (graphName != null) { + session() + .execute( + ScriptGraphStatement.newInstance( + String.format("system.graph('%s').ifNotExists().create()", this.graphName)) + .setSystemQuery(true)); + } + } + + @Override + protected void after() { + if (graphName != null) { + session() + .execute( + ScriptGraphStatement.newInstance( + String.format("system.graph('%s').drop()", this.graphName)) + .setSystemQuery(true)); + } + super.after(); + } + + public String getGraphName() { + return graphName; + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/testinfra/session/DseSessionRuleBuilder.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/testinfra/session/DseSessionRuleBuilder.java new file mode 100644 index 00000000000..87d9de1e1cb --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/testinfra/session/DseSessionRuleBuilder.java @@ -0,0 +1,87 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.testinfra.session; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.dse.driver.api.core.config.DseDriverOption; +import com.datastax.dse.driver.internal.core.config.typesafe.DefaultDseDriverConfigLoader; +import com.datastax.oss.driver.api.core.config.DriverConfigLoader; +import com.datastax.oss.driver.api.testinfra.CassandraResourceRule; +import com.datastax.oss.driver.api.testinfra.session.SessionRuleBuilder; +import com.datastax.oss.driver.internal.core.config.typesafe.DefaultDriverConfigLoader; +import com.typesafe.config.Config; +import com.typesafe.config.ConfigValueFactory; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.Supplier; + +public class DseSessionRuleBuilder extends SessionRuleBuilder { + + private static final AtomicInteger GRAPH_NAME_INDEX = new AtomicInteger(); + + private boolean createGraph; + + public DseSessionRuleBuilder(CassandraResourceRule cassandraResource) { + super(cassandraResource); + } + + /** + * Configures the rule to create a new graph instance. + * + *

      This assumes that the associated {@link CassandraResourceRule} is a DSE instance with the + * graph workload enabled. + * + *

      The name of the graph will be injected in the session's configuration, so that all graph + * statements are automatically routed to it. It's also exposed via {@link + * DseSessionRule#getGraphName()}. + */ + public DseSessionRuleBuilder withCreateGraph() { + this.createGraph = true; + return this; + } + + @Override + public DseSessionRule build() { + final String graphName; + final DriverConfigLoader actualLoader; + if (createGraph) { + graphName = "dsedrivertests_" + GRAPH_NAME_INDEX.getAndIncrement(); + + // Inject the generated graph name in the provided configuration, so that the test doesn't + // need to set it explicitly on every statement. + if (loader == null) { + // This would normally be handled in DseSessionBuilder, do it early because we need it now + loader = new DefaultDseDriverConfigLoader(); + } else { + // To keep this relatively simple we assume that if the config loader was provided in a + // test, it is the Typesafe-config based one. This is always true in our integration tests. + assertThat(loader).isInstanceOf(DefaultDriverConfigLoader.class); + } + Supplier originalSupplier = ((DefaultDriverConfigLoader) loader).getConfigSupplier(); + Supplier actualSupplier = + () -> + originalSupplier + .get() + .withValue( + DseDriverOption.GRAPH_NAME.getPath(), + ConfigValueFactory.fromAnyRef(graphName)); + actualLoader = new DefaultDseDriverConfigLoader(actualSupplier); + } else { + graphName = null; + actualLoader = loader; + } + + return new DseSessionRule( + cassandraResource, + createKeyspace, + nodeStateListener, + schemaChangeListener, + actualLoader, + graphName); + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiCustomLoadBalancingPolicyIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiCustomLoadBalancingPolicyIT.java new file mode 100644 index 00000000000..1547ca68953 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiCustomLoadBalancingPolicyIT.java @@ -0,0 +1,69 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.osgi; + +import static com.datastax.dse.driver.osgi.support.DseBundleOptions.baseOptions; +import static com.datastax.dse.driver.osgi.support.DseBundleOptions.driverCoreBundle; +import static com.datastax.dse.driver.osgi.support.DseBundleOptions.driverDseBundle; +import static com.datastax.dse.driver.osgi.support.DseBundleOptions.driverDseQueryBuilderBundle; +import static com.datastax.dse.driver.osgi.support.DseBundleOptions.driverQueryBuilderBundle; +import static com.datastax.oss.driver.osgi.BundleOptions.jacksonBundles; +import static org.ops4j.pax.exam.CoreOptions.options; + +import com.datastax.dse.driver.api.testinfra.DseSessionBuilderInstantiator; +import com.datastax.dse.driver.osgi.support.DseOsgiSimpleTests; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.ProgrammaticDriverConfigLoaderBuilder; +import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.loadbalancing.SortingLoadBalancingPolicy; +import com.datastax.oss.driver.categories.IsolatedTests; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.runner.RunWith; +import org.ops4j.pax.exam.Configuration; +import org.ops4j.pax.exam.Option; +import org.ops4j.pax.exam.junit.PaxExam; +import org.ops4j.pax.exam.spi.reactors.ExamReactorStrategy; +import org.ops4j.pax.exam.spi.reactors.PerMethod; + +/** + * Test that uses a policy from a separate bundle from the core driver to ensure that the driver is + * able to load that policy via Reflection. To support this, the driver uses + * DynamicImport-Package: *. + */ +@RunWith(PaxExam.class) +@ExamReactorStrategy(PerMethod.class) +@Category(IsolatedTests.class) +public class DseOsgiCustomLoadBalancingPolicyIT implements DseOsgiSimpleTests { + + @ClassRule + public static final CustomCcmRule CCM_RULE = CustomCcmRule.builder().withNodes(1).build(); + + @Configuration + public Option[] config() { + return options( + driverDseBundle(), + driverDseQueryBuilderBundle(), + driverCoreBundle(), + driverQueryBuilderBundle(), + baseOptions(), + jacksonBundles()); + } + + @Override + public ProgrammaticDriverConfigLoaderBuilder configLoaderBuilder() { + return DseSessionBuilderInstantiator.configLoaderBuilder() + .withClass( + DefaultDriverOption.LOAD_BALANCING_POLICY_CLASS, SortingLoadBalancingPolicy.class); + } + + @Test + public void should_connect_and_query_with_custom_lbp() { + connectAndQuerySimple(); + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiGeoTypesIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiGeoTypesIT.java new file mode 100644 index 00000000000..1569e83c3a2 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiGeoTypesIT.java @@ -0,0 +1,62 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.osgi; + +import static com.datastax.dse.driver.osgi.support.DseBundleOptions.baseOptions; +import static com.datastax.dse.driver.osgi.support.DseBundleOptions.driverCoreBundle; +import static com.datastax.dse.driver.osgi.support.DseBundleOptions.driverDseBundle; +import static com.datastax.dse.driver.osgi.support.DseBundleOptions.driverDseQueryBuilderBundle; +import static com.datastax.dse.driver.osgi.support.DseBundleOptions.driverQueryBuilderBundle; +import static com.datastax.dse.driver.osgi.support.DseBundleOptions.esriBundles; +import static com.datastax.oss.driver.osgi.BundleOptions.jacksonBundles; +import static org.ops4j.pax.exam.CoreOptions.options; + +import com.datastax.dse.driver.osgi.support.DseOsgiGeoTypesTests; +import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.categories.IsolatedTests; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.runner.RunWith; +import org.ops4j.pax.exam.Configuration; +import org.ops4j.pax.exam.Option; +import org.ops4j.pax.exam.junit.PaxExam; +import org.ops4j.pax.exam.spi.reactors.ExamReactorStrategy; +import org.ops4j.pax.exam.spi.reactors.PerMethod; + +@RunWith(PaxExam.class) +@ExamReactorStrategy(PerMethod.class) +@Category(IsolatedTests.class) +@DseRequirement(min = "5.0", description = "Requires geo types") +public class DseOsgiGeoTypesIT implements DseOsgiGeoTypesTests { + + @ClassRule + public static final CustomCcmRule CCM_RULE = CustomCcmRule.builder().withNodes(1).build(); + + @Configuration + public Option[] config() { + return options( + driverDseBundle(), + driverDseQueryBuilderBundle(), + driverCoreBundle(), + driverQueryBuilderBundle(), + baseOptions(), + jacksonBundles(), + esriBundles()); + } + + @Test + public void should_connect_and_query_without_geo_types() { + connectAndQuerySimple(); + } + + @Test + public void should_connect_and_query_with_geo_types() { + connectAndQueryGeoTypes(); + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiGraphIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiGraphIT.java new file mode 100644 index 00000000000..2c925e8808f --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiGraphIT.java @@ -0,0 +1,63 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.osgi; + +import static com.datastax.dse.driver.osgi.support.DseBundleOptions.baseOptions; +import static com.datastax.dse.driver.osgi.support.DseBundleOptions.driverCoreBundle; +import static com.datastax.dse.driver.osgi.support.DseBundleOptions.driverDseBundle; +import static com.datastax.dse.driver.osgi.support.DseBundleOptions.driverDseQueryBuilderBundle; +import static com.datastax.dse.driver.osgi.support.DseBundleOptions.driverQueryBuilderBundle; +import static com.datastax.dse.driver.osgi.support.DseBundleOptions.tinkerpopBundles; +import static com.datastax.oss.driver.osgi.BundleOptions.jacksonBundles; +import static org.ops4j.pax.exam.CoreOptions.options; + +import com.datastax.dse.driver.osgi.support.DseOsgiGraphTests; +import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.categories.IsolatedTests; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.runner.RunWith; +import org.ops4j.pax.exam.Configuration; +import org.ops4j.pax.exam.Option; +import org.ops4j.pax.exam.junit.PaxExam; +import org.ops4j.pax.exam.spi.reactors.ExamReactorStrategy; +import org.ops4j.pax.exam.spi.reactors.PerMethod; + +@RunWith(PaxExam.class) +@ExamReactorStrategy(PerMethod.class) +@Category(IsolatedTests.class) +@DseRequirement(min = "5.0", description = "Requires Graph") +public class DseOsgiGraphIT implements DseOsgiGraphTests { + + @ClassRule + public static final CustomCcmRule CCM_RULE = + CustomCcmRule.builder().withNodes(1).withDseWorkloads("graph").build(); + + @Configuration + public Option[] config() { + return options( + driverDseBundle(), + driverDseQueryBuilderBundle(), + driverCoreBundle(), + driverQueryBuilderBundle(), + baseOptions(), + jacksonBundles(), + tinkerpopBundles()); + } + + @Test + public void should_connect_and_query_without_graph() { + connectAndQuerySimple(); + } + + @Test + public void should_connect_and_query_with_graph() { + connectAndQueryGraph(); + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiLz4IT.java b/integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiLz4IT.java new file mode 100644 index 00000000000..9f399c17e27 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiLz4IT.java @@ -0,0 +1,64 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.osgi; + +import static com.datastax.dse.driver.osgi.support.DseBundleOptions.baseOptions; +import static com.datastax.dse.driver.osgi.support.DseBundleOptions.driverCoreBundle; +import static com.datastax.dse.driver.osgi.support.DseBundleOptions.driverDseBundle; +import static com.datastax.dse.driver.osgi.support.DseBundleOptions.driverDseQueryBuilderBundle; +import static com.datastax.dse.driver.osgi.support.DseBundleOptions.driverQueryBuilderBundle; +import static com.datastax.oss.driver.osgi.BundleOptions.jacksonBundles; +import static com.datastax.oss.driver.osgi.BundleOptions.lz4Bundle; +import static org.ops4j.pax.exam.CoreOptions.options; + +import com.datastax.dse.driver.api.testinfra.DseSessionBuilderInstantiator; +import com.datastax.dse.driver.osgi.support.DseOsgiSimpleTests; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.ProgrammaticDriverConfigLoaderBuilder; +import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.categories.IsolatedTests; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.runner.RunWith; +import org.ops4j.pax.exam.Configuration; +import org.ops4j.pax.exam.Option; +import org.ops4j.pax.exam.junit.PaxExam; +import org.ops4j.pax.exam.spi.reactors.ExamReactorStrategy; +import org.ops4j.pax.exam.spi.reactors.PerMethod; + +@RunWith(PaxExam.class) +@ExamReactorStrategy(PerMethod.class) +@Category(IsolatedTests.class) +public class DseOsgiLz4IT implements DseOsgiSimpleTests { + + @ClassRule + public static final CustomCcmRule CCM_RULE = CustomCcmRule.builder().withNodes(1).build(); + + @Configuration + public Option[] config() { + return options( + lz4Bundle(), + driverDseBundle(), + driverDseQueryBuilderBundle(), + driverCoreBundle(), + driverQueryBuilderBundle(), + baseOptions(), + jacksonBundles()); + } + + @Override + public ProgrammaticDriverConfigLoaderBuilder configLoaderBuilder() { + return DseSessionBuilderInstantiator.configLoaderBuilder() + .withString(DefaultDriverOption.PROTOCOL_COMPRESSION, "lz4"); + } + + @Test + public void should_connect_and_query_with_lz4_compression() { + connectAndQuerySimple(); + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiReactiveIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiReactiveIT.java new file mode 100644 index 00000000000..72c2c29c480 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiReactiveIT.java @@ -0,0 +1,60 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.osgi; + +import static com.datastax.dse.driver.osgi.support.DseBundleOptions.baseOptions; +import static com.datastax.dse.driver.osgi.support.DseBundleOptions.driverCoreBundle; +import static com.datastax.dse.driver.osgi.support.DseBundleOptions.driverDseBundle; +import static com.datastax.dse.driver.osgi.support.DseBundleOptions.driverDseQueryBuilderBundle; +import static com.datastax.dse.driver.osgi.support.DseBundleOptions.driverQueryBuilderBundle; +import static com.datastax.dse.driver.osgi.support.DseBundleOptions.reactiveBundles; +import static com.datastax.oss.driver.osgi.BundleOptions.jacksonBundles; +import static org.ops4j.pax.exam.CoreOptions.options; + +import com.datastax.dse.driver.osgi.support.DseOsgiReactiveTests; +import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.categories.IsolatedTests; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.runner.RunWith; +import org.ops4j.pax.exam.Configuration; +import org.ops4j.pax.exam.Option; +import org.ops4j.pax.exam.junit.PaxExam; +import org.ops4j.pax.exam.spi.reactors.ExamReactorStrategy; +import org.ops4j.pax.exam.spi.reactors.PerMethod; + +@RunWith(PaxExam.class) +@ExamReactorStrategy(PerMethod.class) +@Category(IsolatedTests.class) +public class DseOsgiReactiveIT implements DseOsgiReactiveTests { + + @ClassRule + public static final CustomCcmRule CCM_RULE = CustomCcmRule.builder().withNodes(1).build(); + + @Configuration + public Option[] config() { + return options( + driverDseBundle(), + driverDseQueryBuilderBundle(), + driverCoreBundle(), + driverQueryBuilderBundle(), + baseOptions(), + jacksonBundles(), + reactiveBundles()); + } + + @Test + public void should_connect_and_query_without_reactive() { + connectAndQuerySimple(); + } + + @Test + public void should_connect_and_query_with_reactive() { + connectAndQueryReactive(); + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiShadedIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiShadedIT.java new file mode 100644 index 00000000000..186f1e1b81e --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiShadedIT.java @@ -0,0 +1,78 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.osgi; + +import static com.datastax.dse.driver.osgi.support.DseBundleOptions.baseOptions; +import static com.datastax.dse.driver.osgi.support.DseBundleOptions.driverCoreShadedBundle; +import static com.datastax.dse.driver.osgi.support.DseBundleOptions.driverDseQueryBuilderBundle; +import static com.datastax.dse.driver.osgi.support.DseBundleOptions.driverDseShadedBundle; +import static com.datastax.dse.driver.osgi.support.DseBundleOptions.driverQueryBuilderBundle; +import static com.datastax.dse.driver.osgi.support.DseBundleOptions.reactiveBundles; +import static com.datastax.dse.driver.osgi.support.DseBundleOptions.tinkerpopBundles; +import static org.ops4j.pax.exam.CoreOptions.options; + +import com.datastax.dse.driver.osgi.support.DseOsgiGeoTypesTests; +import com.datastax.dse.driver.osgi.support.DseOsgiGraphTests; +import com.datastax.dse.driver.osgi.support.DseOsgiReactiveTests; +import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.categories.IsolatedTests; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.runner.RunWith; +import org.ops4j.pax.exam.Configuration; +import org.ops4j.pax.exam.Option; +import org.ops4j.pax.exam.junit.PaxExam; +import org.ops4j.pax.exam.spi.reactors.ExamReactorStrategy; +import org.ops4j.pax.exam.spi.reactors.PerMethod; + +@RunWith(PaxExam.class) +@ExamReactorStrategy(PerMethod.class) +@Category(IsolatedTests.class) +@DseRequirement(min = "5.0", description = "Requires Graph and geo types") +public class DseOsgiShadedIT + implements DseOsgiReactiveTests, DseOsgiGraphTests, DseOsgiGeoTypesTests { + + @ClassRule + public static final CustomCcmRule CCM_RULE = + CustomCcmRule.builder().withNodes(1).withDseWorkloads("graph").build(); + + @Configuration + public Option[] config() { + return options( + driverDseShadedBundle(), + driverDseQueryBuilderBundle(), + driverCoreShadedBundle(), + driverQueryBuilderBundle(), + baseOptions(), + // do not include ESRI nor Jackson as they are shaded; include Rx and Tinkerpop because they + // are not shaded + reactiveBundles(), + tinkerpopBundles()); + } + + @Test + public void should_connect_and_query_shaded_simple() { + connectAndQuerySimple(); + } + + @Test + public void should_connect_and_query_shaded_with_geo_types() { + connectAndQueryGeoTypes(); + } + + @Test + public void should_connect_and_query_shaded_with_graph() { + connectAndQueryGraph(); + } + + @Test + public void should_connect_and_query_shaded_with_reactive() { + connectAndQueryReactive(); + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiSnappyIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiSnappyIT.java new file mode 100644 index 00000000000..4583630c72c --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiSnappyIT.java @@ -0,0 +1,64 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.osgi; + +import static com.datastax.dse.driver.osgi.support.DseBundleOptions.baseOptions; +import static com.datastax.dse.driver.osgi.support.DseBundleOptions.driverCoreBundle; +import static com.datastax.dse.driver.osgi.support.DseBundleOptions.driverDseBundle; +import static com.datastax.dse.driver.osgi.support.DseBundleOptions.driverDseQueryBuilderBundle; +import static com.datastax.dse.driver.osgi.support.DseBundleOptions.driverQueryBuilderBundle; +import static com.datastax.oss.driver.osgi.BundleOptions.jacksonBundles; +import static com.datastax.oss.driver.osgi.BundleOptions.snappyBundle; +import static org.ops4j.pax.exam.CoreOptions.options; + +import com.datastax.dse.driver.api.testinfra.DseSessionBuilderInstantiator; +import com.datastax.dse.driver.osgi.support.DseOsgiSimpleTests; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.ProgrammaticDriverConfigLoaderBuilder; +import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.categories.IsolatedTests; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.runner.RunWith; +import org.ops4j.pax.exam.Configuration; +import org.ops4j.pax.exam.Option; +import org.ops4j.pax.exam.junit.PaxExam; +import org.ops4j.pax.exam.spi.reactors.ExamReactorStrategy; +import org.ops4j.pax.exam.spi.reactors.PerMethod; + +@RunWith(PaxExam.class) +@ExamReactorStrategy(PerMethod.class) +@Category(IsolatedTests.class) +public class DseOsgiSnappyIT implements DseOsgiSimpleTests { + + @ClassRule + public static final CustomCcmRule CCM_RULE = CustomCcmRule.builder().withNodes(1).build(); + + @Configuration + public Option[] config() { + return options( + snappyBundle(), + driverDseBundle(), + driverDseQueryBuilderBundle(), + driverCoreBundle(), + driverQueryBuilderBundle(), + baseOptions(), + jacksonBundles()); + } + + @Override + public ProgrammaticDriverConfigLoaderBuilder configLoaderBuilder() { + return DseSessionBuilderInstantiator.configLoaderBuilder() + .withString(DefaultDriverOption.PROTOCOL_COMPRESSION, "snappy"); + } + + @Test + public void should_connect_and_query_with_snappy_compression() { + connectAndQuerySimple(); + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiVanillaIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiVanillaIT.java new file mode 100644 index 00000000000..e89d7a9ce45 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiVanillaIT.java @@ -0,0 +1,128 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.osgi; + +import static com.datastax.dse.driver.osgi.support.DseBundleOptions.baseOptions; +import static com.datastax.dse.driver.osgi.support.DseBundleOptions.driverCoreBundle; +import static com.datastax.dse.driver.osgi.support.DseBundleOptions.driverDseBundle; +import static com.datastax.dse.driver.osgi.support.DseBundleOptions.driverDseQueryBuilderBundle; +import static com.datastax.dse.driver.osgi.support.DseBundleOptions.driverQueryBuilderBundle; +import static com.datastax.oss.driver.osgi.BundleOptions.jacksonBundles; +import static org.assertj.core.api.Assertions.assertThat; +import static org.ops4j.pax.exam.CoreOptions.options; + +import ch.qos.logback.classic.Level; +import ch.qos.logback.classic.Logger; +import ch.qos.logback.classic.spi.ILoggingEvent; +import ch.qos.logback.core.AppenderBase; +import com.datastax.dse.driver.osgi.support.DseOsgiSimpleTests; +import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.categories.IsolatedTests; +import java.util.List; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.stream.Collectors; +import org.junit.After; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.runner.RunWith; +import org.ops4j.pax.exam.Configuration; +import org.ops4j.pax.exam.Option; +import org.ops4j.pax.exam.junit.PaxExam; +import org.ops4j.pax.exam.spi.reactors.ExamReactorStrategy; +import org.ops4j.pax.exam.spi.reactors.PerMethod; +import org.slf4j.LoggerFactory; + +@RunWith(PaxExam.class) +@ExamReactorStrategy(PerMethod.class) +@Category(IsolatedTests.class) +public class DseOsgiVanillaIT implements DseOsgiSimpleTests { + + @ClassRule + public static final CustomCcmRule CCM_RULE = CustomCcmRule.builder().withNodes(1).build(); + + @Configuration + public Option[] config() { + // this configuration purposely excludes bundles whose resolution is optional: + // ESRI, Reactive Streams and Tinkerpop. This allows to validate that the driver can still + // work properly in an OSGi container as long as the missing packages are not accessed. + return options( + driverDseBundle(), + driverDseQueryBuilderBundle(), + driverCoreBundle(), + driverQueryBuilderBundle(), + baseOptions(), + jacksonBundles()); + } + + @Before + public void addTestAppender() { + Logger logger = (Logger) LoggerFactory.getLogger("com.datastax.dse.driver"); + Level oldLevel = logger.getLevel(); + logger.getLoggerContext().putObject("oldLevel", oldLevel); + logger.setLevel(Level.WARN); + TestAppender appender = new TestAppender(); + logger.addAppender(appender); + appender.start(); + } + + @After + public void removeTestAppender() { + Logger logger = (Logger) LoggerFactory.getLogger("com.datastax.dse.driver"); + logger.detachAppender("test"); + Level oldLevel = (Level) logger.getLoggerContext().getObject("oldLevel"); + logger.setLevel(oldLevel); + } + + @Test + public void should_connect_and_query_simple() { + connectAndQuerySimple(); + assertLogMessagesPresent(); + } + + private void assertLogMessagesPresent() { + Logger logger = (Logger) LoggerFactory.getLogger("com.datastax.dse.driver"); + TestAppender appender = (TestAppender) logger.getAppender("test"); + List warnLogs = + appender.events.stream() + .filter(event -> event.getLevel().toInt() >= Level.WARN.toInt()) + .map(ILoggingEvent::getFormattedMessage) + .collect(Collectors.toList()); + assertThat(warnLogs).hasSize(3); + assertThat(warnLogs) + .anySatisfy( + msg -> + assertThat(msg) + .contains( + "Could not register Geo codecs; ESRI API might be missing from classpath")) + .anySatisfy( + msg -> + assertThat(msg) + .contains( + "Could not register Reactive extensions; Reactive Streams API might be missing from classpath")) + .anySatisfy( + msg -> + assertThat(msg) + .contains( + "Could not register Graph extensions; Tinkerpop API might be missing from classpath")); + } + + private static class TestAppender extends AppenderBase { + + private final List events = new CopyOnWriteArrayList<>(); + + private TestAppender() { + name = "test"; + } + + @Override + protected void append(ILoggingEvent event) { + events.add(event); + } + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/osgi/support/DseBundleOptions.java b/integration-tests/src/test/java/com/datastax/dse/driver/osgi/support/DseBundleOptions.java new file mode 100644 index 00000000000..fa471aa9d0f --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/osgi/support/DseBundleOptions.java @@ -0,0 +1,187 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.osgi.support; + +import static com.datastax.oss.driver.osgi.BundleOptions.getVersion; + +import com.datastax.oss.driver.osgi.BundleOptions; +import org.ops4j.pax.exam.CoreOptions; +import org.ops4j.pax.exam.options.CompositeOption; +import org.ops4j.pax.exam.options.MavenArtifactProvisionOption; +import org.ops4j.pax.exam.options.UrlProvisionOption; +import org.ops4j.pax.exam.options.WrappedUrlProvisionOption.OverwriteMode; +import org.ops4j.pax.exam.util.PathUtils; + +public class DseBundleOptions { + + public static UrlProvisionOption driverDseBundle() { + return CoreOptions.bundle( + "reference:file:" + + PathUtils.getBaseDir() + + "/../core/target/dse-java-driver-core-" + + getVersion("project.version") + + ".jar"); + } + + public static UrlProvisionOption driverDseShadedBundle() { + return CoreOptions.bundle( + "reference:file:" + + PathUtils.getBaseDir() + + "/../core-shaded/target/dse-java-driver-core-shaded-" + + getVersion("project.version") + + ".jar"); + } + + public static UrlProvisionOption driverDseQueryBuilderBundle() { + return CoreOptions.bundle( + "reference:file:" + + PathUtils.getBaseDir() + + "/../query-builder/target/dse-java-driver-query-builder-" + + getVersion("project.version") + + ".jar"); + } + + public static MavenArtifactProvisionOption driverCoreBundle() { + return CoreOptions.mavenBundle( + "com.datastax.oss", "java-driver-core", getVersion("oss-driver.version")); + } + + public static MavenArtifactProvisionOption driverCoreShadedBundle() { + return CoreOptions.mavenBundle( + "com.datastax.oss", "java-driver-core-shaded", getVersion("oss-driver.version")); + } + + public static MavenArtifactProvisionOption driverQueryBuilderBundle() { + return CoreOptions.mavenBundle( + "com.datastax.oss", "java-driver-query-builder", getVersion("oss-driver.version")); + } + + private static MavenArtifactProvisionOption driverTestInfraBundle() { + return CoreOptions.mavenBundle( + "com.datastax.oss", "java-driver-test-infra", getVersion("oss-driver.version")); + } + + public static CompositeOption baseOptions() { + // In theory, the options declared here should only include bundles that must be present + // in order for both the non-shaded and shaded driver versions to work properly. + // Bundles that should be present only for the non-shaded driver version should be declared + // elsewhere (e.g. ESRI, legacy "Codehaus" Jackson). Also, bundles that have optional resolution + // should be declared elsewhere (e.g. Tinkerpop, Reactive Streams). + // However we have two exceptions: Netty and modern "FasterXML" Jackson; both are shaded, but + // need to be present in all cases because the test bundles requires their presence (see + // #testBundles method). + return () -> + CoreOptions.options( + CoreOptions.mavenBundle( + "com.datastax.oss", "java-driver-shaded-guava", getVersion("guava.version")), + CoreOptions.mavenBundle( + "io.dropwizard.metrics", "metrics-core", getVersion("metrics.version")), + CoreOptions.mavenBundle("org.slf4j", "slf4j-api", getVersion("slf4j.version")), + CoreOptions.mavenBundle( + "org.hdrhistogram", "HdrHistogram", getVersion("hdrhistogram.version")), + CoreOptions.mavenBundle("com.typesafe", "config", getVersion("config.version")), + CoreOptions.mavenBundle( + "com.datastax.oss", "native-protocol", getVersion("native-protocol.version")), + CoreOptions.mavenBundle( + "com.datastax.dse", + "dse-native-protocol", + getVersion("dse-native-protocol.version")), + BundleOptions.logbackBundles(), + CoreOptions.systemProperty("logback.configurationFile") + .value("file:" + PathUtils.getBaseDir() + "/src/test/resources/logback-test.xml"), + testBundles()); + } + + public static CompositeOption tinkerpopBundles() { + String version = System.getProperty("tinkerpop.version"); + return () -> + CoreOptions.options( + CoreOptions.wrappedBundle( + CoreOptions.mavenBundle("org.apache.tinkerpop", "gremlin-core", version)) + .exports( + // avoid exporting 'org.apache.tinkerpop.gremlin.*' as other Tinkerpop jars have + // this root package as well + "org.apache.tinkerpop.gremlin.jsr223.*", + "org.apache.tinkerpop.gremlin.process.*", + "org.apache.tinkerpop.gremlin.structure.*", + "org.apache.tinkerpop.gremlin.util.*") + .bundleVersion(version) + .bundleSymbolicName("org.apache.tinkerpop.gremlin-core") + .overwriteManifest(OverwriteMode.FULL), + CoreOptions.wrappedBundle( + CoreOptions.mavenBundle("org.apache.tinkerpop", "gremlin-driver", version)) + .exports("org.apache.tinkerpop.gremlin.driver.*") + .bundleVersion(version) + .bundleSymbolicName("org.apache.tinkerpop.gremlin-driver") + .overwriteManifest(OverwriteMode.FULL), + CoreOptions.wrappedBundle( + CoreOptions.mavenBundle("org.apache.tinkerpop", "tinkergraph-gremlin", version)) + .exports("org.apache.tinkerpop.gremlin.tinkergraph.*") + .bundleVersion(version) + .bundleSymbolicName("org.apache.tinkerpop.tinkergraph-gremlin") + .overwriteManifest(OverwriteMode.FULL), + CoreOptions.wrappedBundle( + CoreOptions.mavenBundle("org.apache.tinkerpop", "gremlin-shaded", version)) + .exports("org.apache.tinkerpop.shaded.*") + .bundleVersion(version) + .bundleSymbolicName("org.apache.tinkerpop.gremlin-shaded") + .overwriteManifest(OverwriteMode.FULL), + // Note: the versions below are hard-coded because they shouldn't change very often, + // but if the tests fail because of them, we should consider parameterizing them + CoreOptions.mavenBundle("commons-configuration", "commons-configuration", "1.10"), + CoreOptions.mavenBundle("commons-collections", "commons-collections", "3.2.2"), + CoreOptions.mavenBundle("org.apache.commons", "commons-lang3", "3.8.1"), + CoreOptions.mavenBundle("commons-lang", "commons-lang", "2.6"), + CoreOptions.wrappedBundle( + CoreOptions.mavenBundle("org.javatuples", "javatuples", "1.2")) + .exports("org.javatuples.*") + .bundleVersion("1.2") + .bundleSymbolicName("org.javatuples") + .overwriteManifest(OverwriteMode.FULL)); + } + + private static CompositeOption testBundles() { + return () -> + CoreOptions.options( + driverTestInfraBundle(), + BundleOptions.simulacronBundles(), + BundleOptions + .nettyBundles(), // required by the test infra bundle, even for the shaded jar + BundleOptions + .jacksonBundles(), // required by the Simulacron bundle, even for the shaded jar + CoreOptions.mavenBundle( + "org.apache.commons", "commons-exec", System.getProperty("commons-exec.version")), + CoreOptions.mavenBundle( + "org.assertj", "assertj-core", System.getProperty("assertj.version")), + CoreOptions.junitBundles()); + } + + public static CompositeOption esriBundles() { + return () -> + CoreOptions.options( + CoreOptions.wrappedBundle( + CoreOptions.mavenBundle( + "com.esri.geometry", "esri-geometry-api", getVersion("esri.version"))) + .exports("com.esri.core.geometry.*") + .imports("org.json", "org.codehaus.jackson") + .bundleVersion(getVersion("esri.version")) + .bundleSymbolicName("com.esri.core.geometry") + .overwriteManifest(OverwriteMode.FULL), + CoreOptions.mavenBundle("org.json", "json", getVersion("json.version")), + CoreOptions.mavenBundle( + "org.codehaus.jackson", "jackson-core-asl", getVersion("legacy-jackson.version"))); + } + + public static CompositeOption reactiveBundles() { + return () -> + CoreOptions.options( + CoreOptions.mavenBundle( + "org.reactivestreams", "reactive-streams", getVersion("reactive-streams.version")), + CoreOptions.mavenBundle( + "io.reactivex.rxjava2", "rxjava", getVersion("rxjava.version"))); + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/osgi/support/DseOsgiGeoTypesTests.java b/integration-tests/src/test/java/com/datastax/dse/driver/osgi/support/DseOsgiGeoTypesTests.java new file mode 100644 index 00000000000..816b9c6a1da --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/osgi/support/DseOsgiGeoTypesTests.java @@ -0,0 +1,61 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.osgi.support; + +import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.literal; +import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.selectFrom; +import static com.datastax.oss.driver.api.querybuilder.relation.Relation.column; +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.dse.driver.api.core.data.geometry.Point; +import com.datastax.dse.driver.api.querybuilder.DseSchemaBuilder; +import com.datastax.oss.driver.api.core.cql.Row; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.core.type.DataTypes; + +public interface DseOsgiGeoTypesTests extends DseOsgiSimpleTests { + + /** + * Ensures a session can be established and a query using Geo Types can be made when running in an + * OSGi container. + */ + default void connectAndQueryGeoTypes() { + + try (DseSession session = sessionBuilder().build()) { + + session.execute(String.format(CREATE_KEYSPACE, "test_osgi_geo")); + + // test that ESRI is available + session.execute( + // also exercise the DSE query builder + DseSchemaBuilder.createTable("test_osgi_geo", "t1") + .ifNotExists() + .withPartitionKey("pk", DataTypes.INT) + .withColumn("v", DataTypes.custom("PointType")) + .build()); + + Point point = Point.fromCoordinates(-1.0, -5); + + session.execute( + SimpleStatement.newInstance("INSERT INTO test_osgi_geo.t1 (pk, v) VALUES (0, ?)", point)); + + Row row = + session + .execute( + // test that the Query Builder is availabconnectAndQueryle + selectFrom("test_osgi_geo", "t1") + .column("v") + .where(column("pk").isEqualTo(literal(0))) + .build()) + .one(); + + assertThat(row).isNotNull(); + assertThat(row.get(0, Point.class)).isEqualTo(point); + } + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/osgi/support/DseOsgiGraphTests.java b/integration-tests/src/test/java/com/datastax/dse/driver/osgi/support/DseOsgiGraphTests.java new file mode 100644 index 00000000000..034a63d33f5 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/osgi/support/DseOsgiGraphTests.java @@ -0,0 +1,70 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.osgi.support; + +import static com.datastax.dse.driver.api.core.graph.DseGraph.g; +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.dse.driver.api.core.config.DseDriverOption; +import com.datastax.dse.driver.api.core.graph.FluentGraphStatement; +import com.datastax.dse.driver.api.core.graph.GraphNode; +import com.datastax.dse.driver.api.core.graph.GraphResultSet; +import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; +import com.datastax.dse.driver.api.testinfra.DseSessionBuilderInstantiator; +import com.datastax.oss.driver.api.core.config.ProgrammaticDriverConfigLoaderBuilder; +import java.util.List; +import org.apache.tinkerpop.gremlin.structure.Property; +import org.apache.tinkerpop.gremlin.structure.Vertex; + +public interface DseOsgiGraphTests extends DseOsgiSimpleTests { + + String CREATE_GRAPH = "system.graph('%s').ifNotExists().create()"; + + String GRAPH_SCHEMA = + "schema.propertyKey('name').Text().ifNotExists().create();" + + "schema.vertexLabel('person').properties('name').ifNotExists().create();"; + + String GRAPH_DATA = "g.addV('person').property('name', 'alice').next();"; + + String ALLOW_SCANS = "schema.config().option('graph.allow_scan').set('true');"; + + @Override + default ProgrammaticDriverConfigLoaderBuilder configLoaderBuilder() { + return DseSessionBuilderInstantiator.configLoaderBuilder() + .withString(DseDriverOption.GRAPH_NAME, "test_osgi_graph"); + } + + /** + * Ensures a session can be established and a query using DSE Graph can be made when running in an + * OSGi container. + */ + default void connectAndQueryGraph() { + + try (DseSession session = sessionBuilder().build()) { + + // Test that Graph + Tinkerpop is available + session.execute( + ScriptGraphStatement.newInstance(String.format(CREATE_GRAPH, "test_osgi_graph")) + .setSystemQuery(true)); + session.execute(ScriptGraphStatement.newInstance(GRAPH_SCHEMA)); + session.execute(ScriptGraphStatement.newInstance(GRAPH_DATA)); + session.execute(ScriptGraphStatement.newInstance(ALLOW_SCANS)); + + GraphResultSet resultSet = + session.execute( + FluentGraphStatement.newInstance(g.V().hasLabel("person").has("name", "alice"))); + List results = resultSet.all(); + assertThat(results.size()).isEqualTo(1); + Vertex actual = results.get(0).asVertex(); + assertThat(actual.properties("name")) + .toIterable() + .extracting(Property::value) + .contains("alice"); + } + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/osgi/support/DseOsgiReactiveTests.java b/integration-tests/src/test/java/com/datastax/dse/driver/osgi/support/DseOsgiReactiveTests.java new file mode 100644 index 00000000000..66223230e34 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/osgi/support/DseOsgiReactiveTests.java @@ -0,0 +1,66 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.osgi.support; + +import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.literal; +import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.selectFrom; +import static com.datastax.oss.driver.api.querybuilder.relation.Relation.column; +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.dse.driver.api.querybuilder.DseSchemaBuilder; +import com.datastax.oss.driver.api.core.cql.Row; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.core.type.DataTypes; +import io.reactivex.Flowable; + +public interface DseOsgiReactiveTests extends DseOsgiSimpleTests { + + /** + * Ensures a session can be established and a query using Reactive can be made when running in an + * OSGi container. + */ + default void connectAndQueryReactive() { + + try (DseSession session = sessionBuilder().build()) { + + Flowable.fromPublisher( + session.executeReactive(String.format(CREATE_KEYSPACE, "test_osgi_reactive"))) + .blockingSubscribe(); + + // test that ESRI is available + Flowable.fromPublisher( + session.executeReactive( + // also exercise the DSE query builder + DseSchemaBuilder.createTable("test_osgi_reactive", "t1") + .ifNotExists() + .withPartitionKey("pk", DataTypes.INT) + .withColumn("v", DataTypes.INT) + .build())) + .blockingSubscribe(); + + Flowable.fromPublisher( + session.executeReactive( + SimpleStatement.newInstance( + "INSERT INTO test_osgi_reactive.t1 (pk, v) VALUES (0, 1)"))) + .blockingSubscribe(); + + Row row = + Flowable.fromPublisher( + session.executeReactive( + // test that the Query Builder is available + selectFrom("test_osgi_reactive", "t1") + .column("v") + .where(column("pk").isEqualTo(literal(0))) + .build())) + .blockingFirst(); + + assertThat(row).isNotNull(); + assertThat(row.getInt(0)).isEqualTo(1); + } + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/osgi/support/DseOsgiSimpleTests.java b/integration-tests/src/test/java/com/datastax/dse/driver/osgi/support/DseOsgiSimpleTests.java new file mode 100644 index 00000000000..d3a1424d460 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/osgi/support/DseOsgiSimpleTests.java @@ -0,0 +1,80 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.osgi.support; + +import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.literal; +import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.selectFrom; +import static com.datastax.oss.driver.api.querybuilder.relation.Relation.column; +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.dse.driver.api.core.DseSessionBuilder; +import com.datastax.dse.driver.api.querybuilder.DseSchemaBuilder; +import com.datastax.dse.driver.api.testinfra.DseSessionBuilderInstantiator; +import com.datastax.oss.driver.api.core.config.ProgrammaticDriverConfigLoaderBuilder; +import com.datastax.oss.driver.api.core.cql.Row; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.core.type.DataTypes; +import com.datastax.oss.driver.internal.core.metadata.DefaultEndPoint; +import java.net.InetSocketAddress; + +public interface DseOsgiSimpleTests { + + String CREATE_KEYSPACE = + "CREATE KEYSPACE IF NOT EXISTS %s " + + "WITH replication = {'class': 'SimpleStrategy', 'replication_factor': 1}"; + + /** @return config loader builder to be used to create session. */ + default ProgrammaticDriverConfigLoaderBuilder configLoaderBuilder() { + return DseSessionBuilderInstantiator.configLoaderBuilder(); + } + + /** @return The session builder to use for the OSGi tests. */ + default DseSessionBuilder sessionBuilder() { + return DseSession.builder() + .addContactEndPoint(new DefaultEndPoint(new InetSocketAddress("127.0.0.1", 9042))) + // use the DSE driver's ClassLoader instead of the OSGI application thread's. + .withClassLoader(DseSession.class.getClassLoader()) + .withConfigLoader(configLoaderBuilder().build()); + } + + /** + * A very simple test that ensures a session can be established and a query made when running in + * an OSGi container. + */ + default void connectAndQuerySimple() { + + try (DseSession session = sessionBuilder().build()) { + + session.execute(String.format(CREATE_KEYSPACE, "test_osgi")); + + session.execute( + // Exercise the DSE query builder + DseSchemaBuilder.createTable("test_osgi", "t1") + .ifNotExists() + .withPartitionKey("pk", DataTypes.INT) + .withColumn("v", DataTypes.INT) + .build()); + + session.execute( + SimpleStatement.newInstance("INSERT INTO test_osgi.t1 (pk, v) VALUES (0, 1)")); + + Row row = + session + .execute( + // test that the Query Builder is available + selectFrom("test_osgi", "t1") + .column("v") + .where(column("pk").isEqualTo(literal(0))) + .build()) + .one(); + + assertThat(row).isNotNull(); + assertThat(row.getInt(0)).isEqualTo(1); + } + } +} diff --git a/integration-tests/src/test/resources/application.conf b/integration-tests/src/test/resources/application.conf index 921a93fa6dd..5e59d6c8f79 100644 --- a/integration-tests/src/test/resources/application.conf +++ b/integration-tests/src/test/resources/application.conf @@ -8,6 +8,9 @@ datastax-java-driver { # (see CcmBridge). local-datacenter = dc1 } + + # Raise a bit for graph queries + request.timeout = 10 seconds } advanced { connection { @@ -20,6 +23,9 @@ datastax-java-driver { trace.interval = 1 second warn-if-set-keyspace = false } + graph { + name = "demo" + } metrics { // Raise histogram bounds because the tests execute DDL queries with a higher timeout session.cql_requests.highest_latency = 30 seconds @@ -35,4 +41,4 @@ datastax-java-driver { } } } -} +} \ No newline at end of file diff --git a/integration-tests/src/test/resources/logback-test.xml b/integration-tests/src/test/resources/logback-test.xml index 6dfad81de3e..df47408313f 100644 --- a/integration-tests/src/test/resources/logback-test.xml +++ b/integration-tests/src/test/resources/logback-test.xml @@ -3,17 +3,8 @@ Copyright DataStax, Inc. - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. + This software can be used solely with DataStax Enterprise. Please consult the license at + http://www.datastax.com/terms/datastax-dse-driver-license-terms --> @@ -25,7 +16,9 @@ - + + + diff --git a/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/DseQueryBuilder.java b/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/DseQueryBuilder.java new file mode 100644 index 00000000000..08d9e808557 --- /dev/null +++ b/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/DseQueryBuilder.java @@ -0,0 +1,23 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.querybuilder; + +import com.datastax.oss.driver.api.querybuilder.QueryBuilder; + +/** + * A DSE extension of the Cassandra driver's {@linkplain QueryBuilder query builder}. + * + *

      Note that, at this time, this class acts a simple pass-through: there is no DSE-specific + * syntax for DML queries, therefore it just inherits all of {@link QueryBuilder}'s methods, without + * adding any of its own. + * + *

      However, it is a good idea to use it as the entry point to the DSL in your DSE application, to + * avoid changing all your imports if specialized methods get added here in the future. + */ +public class DseQueryBuilder extends QueryBuilder { + // nothing to do +} diff --git a/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/DseSchemaBuilder.java b/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/DseSchemaBuilder.java new file mode 100644 index 00000000000..73cf5450dbf --- /dev/null +++ b/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/DseSchemaBuilder.java @@ -0,0 +1,205 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.querybuilder; + +import com.datastax.dse.driver.api.querybuilder.schema.CreateDseAggregateStart; +import com.datastax.dse.driver.api.querybuilder.schema.CreateDseFunctionStart; +import com.datastax.dse.driver.internal.querybuilder.schema.DefaultCreateDseAggregate; +import com.datastax.dse.driver.internal.querybuilder.schema.DefaultCreateDseFunction; +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.querybuilder.SchemaBuilder; +import com.datastax.oss.driver.api.querybuilder.schema.CreateAggregateStart; +import com.datastax.oss.driver.api.querybuilder.schema.CreateFunctionStart; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; + +/** + * An extension of {@link com.datastax.oss.driver.api.querybuilder.SchemaBuilder} for building + * schema entities that have DSE specific functionality. + */ +public class DseSchemaBuilder extends SchemaBuilder { + + /** + * Starts a CREATE AGGREGATE query with the given aggregate name. This assumes the keyspace name + * is already qualified for the Session or Statement. + */ + @NonNull + public static CreateDseAggregateStart createDseAggregate(@NonNull CqlIdentifier aggregateId) { + return new DefaultCreateDseAggregate(aggregateId); + } + + /** Starts a CREATE AGGREGATE query with the given aggregate name for the given keyspace name. */ + @NonNull + public static CreateDseAggregateStart createDseAggregate( + @Nullable CqlIdentifier keyspaceId, @NonNull CqlIdentifier aggregateId) { + return new DefaultCreateDseAggregate(keyspaceId, aggregateId); + } + + /** + * Shortcut for {@link #createDseAggregate(CqlIdentifier) + * createDseAggregate(CqlIdentifier.fromCql(aggregateName))}. + */ + @NonNull + public static CreateDseAggregateStart createDseAggregate(@NonNull String aggregateName) { + return new DefaultCreateDseAggregate(CqlIdentifier.fromCql(aggregateName)); + } + + /** + * Shortcut for {@link #createAggregate(CqlIdentifier, CqlIdentifier) + * createDseAggregate(CqlIdentifier.fromCql(keyspaceName), CqlIdentifier.fromCql(aggregateName))}. + */ + @NonNull + public static CreateDseAggregateStart createDseAggregate( + @Nullable String keyspaceName, @NonNull String aggregateName) { + return new DefaultCreateDseAggregate( + keyspaceName == null ? null : CqlIdentifier.fromCql(keyspaceName), + CqlIdentifier.fromCql(aggregateName)); + } + + /** + * Starts a CREATE AGGREGATE query with the given aggregate name. This assumes the keyspace name + * is already qualified for the Session or Statement. + * + *

      Note that this method only covers open-source Cassandra syntax. If you want to use + * DSE-specific features, such as the {@code DETERMINISTIC} keyword, use {@link + * #createDseAggregate(CqlIdentifier)}. + */ + @NonNull + public static CreateAggregateStart createAggregate(@NonNull CqlIdentifier aggregateName) { + return SchemaBuilder.createAggregate(aggregateName); + } + + /** + * Starts a CREATE AGGREGATE query with the given aggregate name for the given keyspace name. + * + *

      Note that this method only covers open-source Cassandra syntax. If you want to use + * DSE-specific features, such as the {@code DETERMINISTIC} keyword, use {@link + * #createDseAggregate(CqlIdentifier, CqlIdentifier)}. + */ + @NonNull + public static CreateAggregateStart createAggregate( + @Nullable CqlIdentifier keyspace, @NonNull CqlIdentifier aggregateName) { + return SchemaBuilder.createAggregate(keyspace, aggregateName); + } + + /** + * Shortcut for {@link #createAggregate(CqlIdentifier) + * createAggregate(CqlIdentifier.fromCql(aggregateName)}. + * + *

      Note that this method only covers open-source Cassandra syntax. If you want to use + * DSE-specific features, such as the {@code DETERMINISTIC} keyword, use {@link + * #createDseAggregate(String)}. + */ + @NonNull + public static CreateAggregateStart createAggregate(@NonNull String aggregateName) { + return SchemaBuilder.createAggregate(aggregateName); + } + + /** + * Shortcut for {@link #createAggregate(CqlIdentifier, CqlIdentifier) + * createAggregate(CqlIdentifier.fromCql(keyspace), CqlIdentifier.fromCql(aggregateName)}. + * + *

      Note that this method only covers open-source Cassandra syntax. If you want to use + * DSE-specific features, such as the {@code DETERMINISTIC} keyword, use {@link + * #createDseAggregate(String, String)}. + */ + @NonNull + public static CreateAggregateStart createAggregate( + @Nullable String keyspace, @NonNull String aggregateName) { + return SchemaBuilder.createAggregate(keyspace, aggregateName); + } + + /** + * Starts a CREATE FUNCTION query with the given function name. This assumes the keyspace name is + * already qualified for the Session or Statement. + */ + @NonNull + public static CreateDseFunctionStart createDseFunction(@NonNull CqlIdentifier functionId) { + return new DefaultCreateDseFunction(functionId); + } + + /** Starts a CREATE FUNCTION query with the given function name for the given keyspace name. */ + @NonNull + public static CreateDseFunctionStart createDseFunction( + @Nullable CqlIdentifier keyspaceId, @NonNull CqlIdentifier functionId) { + return new DefaultCreateDseFunction(keyspaceId, functionId); + } + + /** + * Shortcut for {@link #createFunction(CqlIdentifier) + * createFunction(CqlIdentifier.fromCql(functionName)} + */ + @NonNull + public static CreateDseFunctionStart createDseFunction(@NonNull String functionName) { + return new DefaultCreateDseFunction(CqlIdentifier.fromCql(functionName)); + } + + /** + * Shortcut for {@link #createFunction(CqlIdentifier, CqlIdentifier) + * createFunction(CqlIdentifier.fromCql(keyspaceName), CqlIdentifier.fromCql(functionName)} + */ + @NonNull + public static CreateDseFunctionStart createDseFunction( + @Nullable String keyspaceName, @NonNull String functionName) { + return new DefaultCreateDseFunction( + keyspaceName == null ? null : CqlIdentifier.fromCql(keyspaceName), + CqlIdentifier.fromCql(functionName)); + } + + /** + * Starts a CREATE FUNCTION query with the given function name. This assumes the keyspace name is + * already qualified for the Session or Statement. + * + *

      Note that this method only covers open-source Cassandra syntax. If you want to use + * DSE-specific features, such as the {@code MONOTONIC} or {@code DETERMINISTIC} keywords, use + * {@link #createDseFunction(CqlIdentifier)}. + */ + @NonNull + public static CreateFunctionStart createFunction(@NonNull CqlIdentifier functionName) { + return SchemaBuilder.createFunction(functionName); + } + + /** + * Starts a CREATE FUNCTION query with the given function name for the given keyspace name. + * + *

      Note that this method only covers open-source Cassandra syntax. If you want to use + * DSE-specific features, such as the {@code MONOTONIC} or {@code DETERMINISTIC} keywords, use + * {@link #createDseFunction(CqlIdentifier,CqlIdentifier)}. + */ + @NonNull + public static CreateFunctionStart createFunction( + @Nullable CqlIdentifier keyspace, @NonNull CqlIdentifier functionName) { + return SchemaBuilder.createFunction(keyspace, functionName); + } + + /** + * Shortcut for {@link #createFunction(CqlIdentifier, CqlIdentifier) + * createFunction(CqlIdentifier.fromCql(keyspace, functionName)} + * + *

      Note that this method only covers open-source Cassandra syntax. If you want to use + * DSE-specific features, such as the {@code MONOTONIC} or {@code DETERMINISTIC} keywords, use + * {@link #createDseFunction(String)}. + */ + @NonNull + public static CreateFunctionStart createFunction(@NonNull String functionName) { + return SchemaBuilder.createFunction(functionName); + } + + /** + * Shortcut for {@link #createFunction(CqlIdentifier) + * createFunction(CqlIdentifier.fromCql(keyspaceName),CqlIdentifier.fromCql(functionName)}. + * + *

      Note that this method only covers open-source Cassandra syntax. If you want to use + * DSE-specific features, such as the {@code MONOTONIC} or {@code DETERMINISTIC} keywords, use + * {@link #createDseFunction(String, String)}. + */ + @NonNull + public static CreateFunctionStart createFunction( + @Nullable String keyspace, @NonNull String functionName) { + return SchemaBuilder.createFunction(keyspace, functionName); + } +} diff --git a/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/package-info.java b/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/package-info.java new file mode 100644 index 00000000000..01c03187789 --- /dev/null +++ b/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/package-info.java @@ -0,0 +1,13 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ + +/** + * This package effectively mirrors the Cassandra OSS Query Builder package to allow DSE extended + * schema and query building for the DSE driver. In general, a class in this package should simply + * extend the equivalent class in the OSS driver and add extended functionality. + */ +package com.datastax.dse.driver.api.querybuilder; diff --git a/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseAggregateEnd.java b/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseAggregateEnd.java new file mode 100644 index 00000000000..d8d6151365e --- /dev/null +++ b/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseAggregateEnd.java @@ -0,0 +1,46 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.querybuilder.schema; + +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.querybuilder.BuildableQuery; +import com.datastax.oss.driver.api.querybuilder.term.Term; +import edu.umd.cs.findbugs.annotations.NonNull; + +public interface CreateDseAggregateEnd extends BuildableQuery { + + /** + * Adds INITCOND to the aggregate query. Defines the initial condition, values, of the first + * parameter in the SFUNC. + */ + @NonNull + CreateDseAggregateEnd withInitCond(@NonNull Term term); + + /** + * Adds FINALFUNC to the create aggregate query. This is used to specify what type is returned + * from the state function. + */ + @NonNull + CreateDseAggregateEnd withFinalFunc(@NonNull CqlIdentifier finalFunc); + + /** + * Shortcut for {@link #withFinalFunc(CqlIdentifier) + * withFinalFunc(CqlIdentifier.fromCql(finalFuncName))}. + */ + @NonNull + default CreateDseAggregateEnd withFinalFunc(@NonNull String finalFuncName) { + return withFinalFunc(CqlIdentifier.fromCql(finalFuncName)); + } + + /** + * Adds "DETERMINISTIC" to create aggregate specification. This is used to specify that this + * aggregate always returns the same output for a given input. Requires an initial condition and + * returns a single value. + */ + @NonNull + CreateDseAggregateEnd deterministic(); +} diff --git a/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseAggregateStart.java b/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseAggregateStart.java new file mode 100644 index 00000000000..f507a86ca04 --- /dev/null +++ b/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseAggregateStart.java @@ -0,0 +1,50 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.querybuilder.schema; + +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.type.DataType; +import com.datastax.oss.driver.api.core.type.DataTypes; +import com.datastax.oss.driver.api.querybuilder.SchemaBuilder; +import edu.umd.cs.findbugs.annotations.NonNull; + +public interface CreateDseAggregateStart { + /** + * Adds IF NOT EXISTS to the create aggregate specification. This indicates that the aggregate + * should not be created if it already exists. + */ + @NonNull + CreateDseAggregateStart ifNotExists(); + + /** + * Adds OR REPLACE to the create aggregate specification. This indicates that the aggregate should + * replace an existing aggregate with the same name if it exists. + */ + @NonNull + CreateDseAggregateStart orReplace(); + + /** + * Adds a parameter definition in the CREATE AGGREGATE statement. + * + *

      Parameter keys are added in the order of their declaration. + * + *

      To create the data type, use the constants and static methods in {@link DataTypes}, or + * {@link SchemaBuilder#udt(CqlIdentifier, boolean)}. + */ + @NonNull + CreateDseAggregateStart withParameter(@NonNull DataType paramType); + + /** Adds SFUNC to the create aggregate specification. This is the state function for each row. */ + @NonNull + CreateDseAggregateStateFunc withSFunc(@NonNull CqlIdentifier sfuncName); + + /** Shortcut for {@link #withSFunc(CqlIdentifier) withSFunc(CqlIdentifier.fromCql(sfuncName))}. */ + @NonNull + default CreateDseAggregateStateFunc withSFunc(@NonNull String sfuncName) { + return withSFunc(CqlIdentifier.fromCql(sfuncName)); + } +} diff --git a/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseAggregateStateFunc.java b/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseAggregateStateFunc.java new file mode 100644 index 00000000000..a90e97c8557 --- /dev/null +++ b/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseAggregateStateFunc.java @@ -0,0 +1,26 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.querybuilder.schema; + +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.type.DataType; +import com.datastax.oss.driver.api.core.type.DataTypes; +import com.datastax.oss.driver.api.querybuilder.SchemaBuilder; +import edu.umd.cs.findbugs.annotations.NonNull; + +public interface CreateDseAggregateStateFunc { + + /** + * Adds STYPE to the create aggregate query. This is used to specify what type is returned from + * the state function. + * + *

      To create the data type, use the constants and static methods in {@link DataTypes}, or + * {@link SchemaBuilder#udt(CqlIdentifier, boolean)}. + */ + @NonNull + CreateDseAggregateEnd withSType(@NonNull DataType dataType); +} diff --git a/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseFunctionEnd.java b/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseFunctionEnd.java new file mode 100644 index 00000000000..e7538dedb87 --- /dev/null +++ b/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseFunctionEnd.java @@ -0,0 +1,11 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.querybuilder.schema; + +import com.datastax.oss.driver.api.querybuilder.BuildableQuery; + +public interface CreateDseFunctionEnd extends BuildableQuery {} diff --git a/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseFunctionStart.java b/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseFunctionStart.java new file mode 100644 index 00000000000..8e2e71ad67a --- /dev/null +++ b/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseFunctionStart.java @@ -0,0 +1,66 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.querybuilder.schema; + +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.type.DataType; +import com.datastax.oss.driver.api.core.type.DataTypes; +import com.datastax.oss.driver.api.querybuilder.SchemaBuilder; +import edu.umd.cs.findbugs.annotations.NonNull; + +public interface CreateDseFunctionStart { + + /** + * Adds IF NOT EXISTS to the create function specification. This indicates that the function + * should not be created if it already exists. + */ + @NonNull + CreateDseFunctionStart ifNotExists(); + + /** + * Adds OR REPLACE to the create function specification. This indicates that the function should + * replace an existing function with the same name if it exists. + */ + @NonNull + CreateDseFunctionStart orReplace(); + + /** + * Adds a parameter definition in the CREATE FUNCTION statement. + * + *

      Parameter keys are added in the order of their declaration. + * + *

      To create the data type, use the constants and static methods in {@link DataTypes}, or + * {@link SchemaBuilder#udt(CqlIdentifier, boolean)}. + */ + @NonNull + CreateDseFunctionStart withParameter( + @NonNull CqlIdentifier paramName, @NonNull DataType paramType); + + /** + * Shortcut for {@link #withParameter(CqlIdentifier, DataType) + * withParameter(CqlIdentifier.asCql(paramName), dataType)}. + */ + @NonNull + default CreateDseFunctionStart withParameter( + @NonNull String paramName, @NonNull DataType paramType) { + return withParameter(CqlIdentifier.fromCql(paramName), paramType); + } + + /** + * Adds RETURNS NULL ON NULL to the create function specification. This indicates that the body of + * the function should be skipped when null input is provided. + */ + @NonNull + CreateDseFunctionWithNullOption returnsNullOnNull(); + + /** + * Adds CALLED ON NULL to the create function specification. This indicates that the body of the + * function not be skipped when null input is provided. + */ + @NonNull + CreateDseFunctionWithNullOption calledOnNull(); +} diff --git a/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseFunctionWithLanguage.java b/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseFunctionWithLanguage.java new file mode 100644 index 00000000000..47c9630f4bb --- /dev/null +++ b/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseFunctionWithLanguage.java @@ -0,0 +1,39 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.querybuilder.schema; + +import edu.umd.cs.findbugs.annotations.NonNull; + +public interface CreateDseFunctionWithLanguage { + + /** + * Adds AS to the create function specification. This is used to specify the body of the function. + * Note that it is expected that the provided body is properly quoted as this method does not make + * that decision for the user. For simple cases, one should wrap the input in single quotes, i.e. + * 'myBody'. If the body itself contains single quotes, one could use a + * postgres-style string literal, which is surrounded in two dollar signs, i.e. $$ myBody $$ + * . + */ + @NonNull + CreateDseFunctionEnd as(@NonNull String functionBody); + + /** + * Adds AS to the create function specification and quotes the function body. Assumes that if the + * input body contains at least one single quote, to quote the body with two dollar signs, i.e. + * $$ myBody $$, otherwise the body is quoted with single quotes, i.e. + * ' myBody '. If the function body is already quoted {@link #as(String)} should be used + * instead. + */ + @NonNull + default CreateDseFunctionEnd asQuoted(@NonNull String functionBody) { + if (functionBody.contains("'")) { + return as("$$ " + functionBody + " $$"); + } else { + return as('\'' + functionBody + '\''); + } + } +} diff --git a/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseFunctionWithNullOption.java b/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseFunctionWithNullOption.java new file mode 100644 index 00000000000..f2fd33e1964 --- /dev/null +++ b/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseFunctionWithNullOption.java @@ -0,0 +1,25 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.querybuilder.schema; + +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.type.DataType; +import com.datastax.oss.driver.api.core.type.DataTypes; +import com.datastax.oss.driver.api.querybuilder.SchemaBuilder; +import edu.umd.cs.findbugs.annotations.NonNull; + +public interface CreateDseFunctionWithNullOption { + /** + * Adds RETURNS to the create function specification. This is used to specify what type is + * returned from the function. + * + *

      To create the data type, use the constants and static methods in {@link DataTypes}, or + * {@link SchemaBuilder#udt(CqlIdentifier, boolean)}. + */ + @NonNull + CreateDseFunctionWithType returnsType(@NonNull DataType dataType); +} diff --git a/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseFunctionWithType.java b/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseFunctionWithType.java new file mode 100644 index 00000000000..b226044ca3c --- /dev/null +++ b/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseFunctionWithType.java @@ -0,0 +1,68 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.querybuilder.schema; + +import com.datastax.oss.driver.api.core.CqlIdentifier; +import edu.umd.cs.findbugs.annotations.NonNull; + +public interface CreateDseFunctionWithType { + /** + * Adds LANGUAGE to the create function specification. This is used to specify what language is + * used in the function body. + */ + @NonNull + CreateDseFunctionWithLanguage withLanguage(@NonNull String language); + + /** + * Adds "LANGUAGE java" to create function specification. Shortcut for {@link + * #withLanguage(String) withLanguage("java")}. + */ + @NonNull + default CreateDseFunctionWithLanguage withJavaLanguage() { + return withLanguage("java"); + } + + /** + * Adds "LANGUAGE javascript" to create function specification. Shortcut for {@link + * #withLanguage(String) withLanguage("javascript")}. + */ + @NonNull + default CreateDseFunctionWithLanguage withJavaScriptLanguage() { + return withLanguage("javascript"); + } + + /** + * Adds "DETERMINISTIC" to create function specification. This is used to specify that this + * function always returns the same output for a given input. + */ + @NonNull + CreateDseFunctionWithType deterministic(); + + /** + * Adds "MONOTONIC" to create function specification. This is used to specify that this function + * is either entirely non-increasing, or entirely non-decreasing. + */ + @NonNull + CreateDseFunctionWithType monotonic(); + + /** + * Adds "MONOTONIC ON" to create function specification. This is used to specify that this + * function has only a single column that is monotonic. If the function is fully monotonic, use + * {@link #monotonic()} instead. + */ + @NonNull + CreateDseFunctionWithType monotonicOn(@NonNull CqlIdentifier monotonicColumn); + + /** + * Shortcut for {@link #monotonicOn(CqlIdentifier) + * monotonicOn(CqlIdentifier.fromCql(monotonicColumn))}. + */ + @NonNull + default CreateDseFunctionWithType monotonicOn(@NonNull String monotonicColumn) { + return monotonicOn(CqlIdentifier.fromCql(monotonicColumn)); + } +} diff --git a/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/package-info.java b/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/package-info.java new file mode 100644 index 00000000000..c3d4f5bda6c --- /dev/null +++ b/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/package-info.java @@ -0,0 +1,13 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ + +/** + * This package effectively mirrors the Cassandra OSS Schema interfaces to allow extended schema and + * query building for the DSE driver. NOTE: Changes made to the OSS driver will need to be mirrored + * here if the OSS driver changes affect an extended schema build strategy for the DSE driver. + */ +package com.datastax.dse.driver.api.querybuilder.schema; diff --git a/query-builder/src/main/java/com/datastax/dse/driver/internal/querybuilder/schema/DefaultCreateDseAggregate.java b/query-builder/src/main/java/com/datastax/dse/driver/internal/querybuilder/schema/DefaultCreateDseAggregate.java new file mode 100644 index 00000000000..6f321529b8f --- /dev/null +++ b/query-builder/src/main/java/com/datastax/dse/driver/internal/querybuilder/schema/DefaultCreateDseAggregate.java @@ -0,0 +1,304 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.querybuilder.schema; + +import com.datastax.dse.driver.api.querybuilder.schema.CreateDseAggregateEnd; +import com.datastax.dse.driver.api.querybuilder.schema.CreateDseAggregateStart; +import com.datastax.dse.driver.api.querybuilder.schema.CreateDseAggregateStateFunc; +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.type.DataType; +import com.datastax.oss.driver.api.querybuilder.term.Term; +import com.datastax.oss.driver.internal.querybuilder.CqlHelper; +import com.datastax.oss.driver.internal.querybuilder.ImmutableCollections; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import net.jcip.annotations.Immutable; + +/** + * Implements DSE extended interfaces for creating aggregates. This class provides the same + * functionality as the Cassandra OSS {@link + * com.datastax.oss.driver.internal.querybuilder.schema.DefaultCreateAggregate} implementation, with + * the additional DSE specific extended functionality (DETERMINISTIC keyword). + */ +@Immutable +public class DefaultCreateDseAggregate + implements CreateDseAggregateEnd, CreateDseAggregateStart, CreateDseAggregateStateFunc { + + private final CqlIdentifier keyspace; + private final CqlIdentifier functionName; + private boolean orReplace; + private boolean ifNotExists; + private final ImmutableList parameters; + private final CqlIdentifier sFunc; + private final DataType sType; + private final CqlIdentifier finalFunc; + private final Term term; + private final boolean deterministic; + + public DefaultCreateDseAggregate(@NonNull CqlIdentifier functionName) { + this(null, functionName); + } + + public DefaultCreateDseAggregate( + @Nullable CqlIdentifier keyspace, @NonNull CqlIdentifier functionName) { + this(keyspace, functionName, false, false, ImmutableList.of(), null, null, null, null, false); + } + + public DefaultCreateDseAggregate( + @Nullable CqlIdentifier keyspace, + @NonNull CqlIdentifier functionName, + boolean orReplace, + boolean ifNotExists, + @NonNull ImmutableList parameters, + @Nullable CqlIdentifier sFunc, + @Nullable DataType sType, + @Nullable CqlIdentifier finalFunc, + @Nullable Term term, + boolean deterministic) { + this.keyspace = keyspace; + this.functionName = functionName; + this.orReplace = orReplace; + this.ifNotExists = ifNotExists; + this.parameters = parameters; + this.sFunc = sFunc; + this.sType = sType; + this.finalFunc = finalFunc; + this.term = term; + this.deterministic = deterministic; + } + + @NonNull + @Override + public String asCql() { + StringBuilder builder = new StringBuilder(); + + builder.append("CREATE "); + if (orReplace) { + builder.append("OR REPLACE "); + } + builder.append("AGGREGATE "); + + if (ifNotExists) { + builder.append("IF NOT EXISTS "); + } + CqlHelper.qualify(keyspace, functionName, builder); + + builder.append(" ("); + boolean first = true; + for (DataType param : parameters) { + if (first) { + first = false; + } else { + builder.append(','); + } + builder.append(param.asCql(false, true)); + } + builder.append(')'); + if (sFunc != null) { + builder.append(" SFUNC "); + builder.append(sFunc.asCql(true)); + } + if (sType != null) { + builder.append(" STYPE "); + builder.append(sType.asCql(false, true)); + } + if (finalFunc != null) { + builder.append(" FINALFUNC "); + builder.append(finalFunc.asCql(true)); + } + if (term != null) { + builder.append(" INITCOND "); + term.appendTo(builder); + } + // deterministic + if (deterministic) { + builder.append(" DETERMINISTIC"); + } + return builder.toString(); + } + + @NonNull + @Override + public CreateDseAggregateEnd withInitCond(@NonNull Term term) { + return new DefaultCreateDseAggregate( + keyspace, + functionName, + orReplace, + ifNotExists, + parameters, + sFunc, + sType, + finalFunc, + term, + deterministic); + } + + @NonNull + @Override + public CreateDseAggregateStart ifNotExists() { + return new DefaultCreateDseAggregate( + keyspace, + functionName, + orReplace, + true, + parameters, + sFunc, + sType, + finalFunc, + term, + deterministic); + } + + @NonNull + @Override + public CreateDseAggregateStart orReplace() { + return new DefaultCreateDseAggregate( + keyspace, + functionName, + true, + ifNotExists, + parameters, + sFunc, + sType, + finalFunc, + term, + deterministic); + } + + @NonNull + @Override + public CreateDseAggregateStart withParameter(@NonNull DataType paramType) { + return new DefaultCreateDseAggregate( + keyspace, + functionName, + orReplace, + ifNotExists, + ImmutableCollections.append(parameters, paramType), + sFunc, + sType, + finalFunc, + term, + deterministic); + } + + @NonNull + @Override + public CreateDseAggregateStateFunc withSFunc(@NonNull CqlIdentifier sFunc) { + return new DefaultCreateDseAggregate( + keyspace, + functionName, + orReplace, + ifNotExists, + parameters, + sFunc, + sType, + finalFunc, + term, + deterministic); + } + + @NonNull + @Override + public CreateDseAggregateEnd withSType(@NonNull DataType sType) { + return new DefaultCreateDseAggregate( + keyspace, + functionName, + orReplace, + ifNotExists, + parameters, + sFunc, + sType, + finalFunc, + term, + deterministic); + } + + @NonNull + @Override + public CreateDseAggregateEnd withFinalFunc(@NonNull CqlIdentifier finalFunc) { + return new DefaultCreateDseAggregate( + keyspace, + functionName, + orReplace, + ifNotExists, + parameters, + sFunc, + sType, + finalFunc, + term, + deterministic); + } + + @NonNull + @Override + public CreateDseAggregateEnd deterministic() { + return new DefaultCreateDseAggregate( + keyspace, + functionName, + orReplace, + ifNotExists, + parameters, + sFunc, + sType, + finalFunc, + term, + true); + } + + @Override + public String toString() { + return asCql(); + } + + @Nullable + public CqlIdentifier getKeyspace() { + return keyspace; + } + + @NonNull + public CqlIdentifier getFunctionName() { + return functionName; + } + + public boolean isOrReplace() { + return orReplace; + } + + public boolean isIfNotExists() { + return ifNotExists; + } + + @NonNull + public ImmutableList getParameters() { + return parameters; + } + + @Nullable + public CqlIdentifier getsFunc() { + return sFunc; + } + + @Nullable + public DataType getsType() { + return sType; + } + + @Nullable + public CqlIdentifier getFinalFunc() { + return finalFunc; + } + + @Nullable + public Term getTerm() { + return term; + } + + public boolean isDeterministic() { + return deterministic; + } +} diff --git a/query-builder/src/main/java/com/datastax/dse/driver/internal/querybuilder/schema/DefaultCreateDseFunction.java b/query-builder/src/main/java/com/datastax/dse/driver/internal/querybuilder/schema/DefaultCreateDseFunction.java new file mode 100644 index 00000000000..dc6e216b1af --- /dev/null +++ b/query-builder/src/main/java/com/datastax/dse/driver/internal/querybuilder/schema/DefaultCreateDseFunction.java @@ -0,0 +1,433 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.querybuilder.schema; + +import com.datastax.dse.driver.api.querybuilder.schema.CreateDseFunctionEnd; +import com.datastax.dse.driver.api.querybuilder.schema.CreateDseFunctionStart; +import com.datastax.dse.driver.api.querybuilder.schema.CreateDseFunctionWithLanguage; +import com.datastax.dse.driver.api.querybuilder.schema.CreateDseFunctionWithNullOption; +import com.datastax.dse.driver.api.querybuilder.schema.CreateDseFunctionWithType; +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.type.DataType; +import com.datastax.oss.driver.internal.querybuilder.CqlHelper; +import com.datastax.oss.driver.internal.querybuilder.ImmutableCollections; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.Map; +import net.jcip.annotations.Immutable; + +/** + * Implements DSE extended interfaces for creating functions. This class provides the same + * functionality as the Cassandra OSS {@link + * com.datastax.oss.driver.internal.querybuilder.schema.DefaultCreateFunction} implementation, with + * the additional DSE specific extended functionality (DETERMINISTIC and MONOTONIC keywords). + */ +@Immutable +public class DefaultCreateDseFunction + implements CreateDseFunctionEnd, + CreateDseFunctionStart, + CreateDseFunctionWithLanguage, + CreateDseFunctionWithNullOption, + CreateDseFunctionWithType { + + private final CqlIdentifier keyspace; + private final CqlIdentifier functionName; + private boolean orReplace; + private boolean ifNotExists; + private final ImmutableMap parameters; + private boolean returnsNullOnNull; + private final DataType returnType; + private final String language; + private final String functionBody; + private final boolean deterministic; + private final boolean globallyMonotonic; + private final CqlIdentifier monotonicOn; + + public DefaultCreateDseFunction(CqlIdentifier functionName) { + this(null, functionName); + } + + public DefaultCreateDseFunction(CqlIdentifier keyspace, CqlIdentifier functionName) { + this( + keyspace, + functionName, + false, + false, + ImmutableMap.of(), + false, + null, + null, + null, + false, + false, + null); + } + + public DefaultCreateDseFunction( + CqlIdentifier keyspace, + CqlIdentifier functionName, + boolean orReplace, + boolean ifNotExists, + ImmutableMap parameters, + boolean returnsNullOnNull, + DataType returns, + String language, + String functionBody, + boolean deterministic, + boolean globallyMonotonic, + CqlIdentifier monotonicOn) { + this.keyspace = keyspace; + this.functionName = functionName; + this.orReplace = orReplace; + this.ifNotExists = ifNotExists; + this.parameters = parameters; + this.returnsNullOnNull = returnsNullOnNull; + this.returnType = returns; + this.language = language; + this.functionBody = functionBody; + this.deterministic = deterministic; + this.globallyMonotonic = globallyMonotonic; + this.monotonicOn = monotonicOn; + } + + @NonNull + @Override + public String asCql() { + StringBuilder builder = new StringBuilder(); + + builder.append("CREATE "); + if (orReplace) { + builder.append("OR REPLACE "); + } + builder.append("FUNCTION "); + + if (ifNotExists) { + builder.append("IF NOT EXISTS "); + } + CqlHelper.qualify(keyspace, functionName, builder); + + builder.append(" ("); + + boolean first = true; + for (Map.Entry param : parameters.entrySet()) { + if (first) { + first = false; + } else { + builder.append(','); + } + builder + .append(param.getKey().asCql(true)) + .append(' ') + .append(param.getValue().asCql(false, true)); + } + builder.append(')'); + if (returnsNullOnNull) { + builder.append(" RETURNS NULL"); + } else { + builder.append(" CALLED"); + } + + builder.append(" ON NULL INPUT"); + + if (returnType == null) { + // return type has not been provided yet. + return builder.toString(); + } + + builder.append(" RETURNS "); + builder.append(returnType.asCql(false, true)); + + // deterministic + if (deterministic) { + builder.append(" DETERMINISTIC"); + } + + // monotonic + if (globallyMonotonic) { + builder.append(" MONOTONIC"); + } else if (monotonicOn != null) { + builder.append(" MONOTONIC ON ").append(monotonicOn.asCql(true)); + } + + if (language == null) { + // language has not been provided yet. + return builder.toString(); + } + + builder.append(" LANGUAGE "); + builder.append(language); + + if (functionBody == null) { + // body has not been provided yet. + return builder.toString(); + } + + builder.append(" AS "); + builder.append(functionBody); + return builder.toString(); + } + + @Override + public String toString() { + return asCql(); + } + + @NonNull + @Override + public CreateDseFunctionEnd as(@NonNull String functionBody) { + return new DefaultCreateDseFunction( + keyspace, + functionName, + orReplace, + ifNotExists, + parameters, + returnsNullOnNull, + returnType, + language, + functionBody, + deterministic, + globallyMonotonic, + monotonicOn); + } + + @NonNull + @Override + public CreateDseFunctionWithLanguage withLanguage(@NonNull String language) { + return new DefaultCreateDseFunction( + keyspace, + functionName, + orReplace, + ifNotExists, + parameters, + returnsNullOnNull, + returnType, + language, + functionBody, + deterministic, + globallyMonotonic, + monotonicOn); + } + + @NonNull + @Override + public CreateDseFunctionStart ifNotExists() { + return new DefaultCreateDseFunction( + keyspace, + functionName, + orReplace, + true, + parameters, + returnsNullOnNull, + returnType, + language, + functionBody, + deterministic, + globallyMonotonic, + monotonicOn); + } + + @NonNull + @Override + public CreateDseFunctionStart orReplace() { + return new DefaultCreateDseFunction( + keyspace, + functionName, + true, + ifNotExists, + parameters, + returnsNullOnNull, + returnType, + language, + functionBody, + deterministic, + globallyMonotonic, + monotonicOn); + } + + @NonNull + @Override + public CreateDseFunctionStart withParameter( + @NonNull CqlIdentifier paramName, @NonNull DataType paramType) { + return new DefaultCreateDseFunction( + keyspace, + functionName, + orReplace, + ifNotExists, + ImmutableCollections.append(parameters, paramName, paramType), + returnsNullOnNull, + returnType, + language, + functionBody, + deterministic, + globallyMonotonic, + monotonicOn); + } + + @NonNull + @Override + public CreateDseFunctionWithNullOption returnsNullOnNull() { + return new DefaultCreateDseFunction( + keyspace, + functionName, + orReplace, + ifNotExists, + parameters, + true, + returnType, + language, + functionBody, + deterministic, + globallyMonotonic, + monotonicOn); + } + + @NonNull + @Override + public CreateDseFunctionWithNullOption calledOnNull() { + return new DefaultCreateDseFunction( + keyspace, + functionName, + orReplace, + ifNotExists, + parameters, + false, + returnType, + language, + functionBody, + deterministic, + globallyMonotonic, + monotonicOn); + } + + @NonNull + @Override + public CreateDseFunctionWithType deterministic() { + return new DefaultCreateDseFunction( + keyspace, + functionName, + orReplace, + ifNotExists, + parameters, + returnsNullOnNull, + returnType, + language, + functionBody, + true, + globallyMonotonic, + monotonicOn); + } + + @NonNull + @Override + public CreateDseFunctionWithType monotonic() { + return new DefaultCreateDseFunction( + keyspace, + functionName, + orReplace, + ifNotExists, + parameters, + returnsNullOnNull, + returnType, + language, + functionBody, + deterministic, + true, + null); + } + + @NonNull + @Override + public CreateDseFunctionWithType monotonicOn(@NonNull CqlIdentifier monotonicColumn) { + return new DefaultCreateDseFunction( + keyspace, + functionName, + orReplace, + ifNotExists, + parameters, + returnsNullOnNull, + returnType, + language, + functionBody, + deterministic, + false, + monotonicColumn); + } + + @NonNull + @Override + public CreateDseFunctionWithType returnsType(@NonNull DataType returnType) { + return new DefaultCreateDseFunction( + keyspace, + functionName, + orReplace, + ifNotExists, + parameters, + returnsNullOnNull, + returnType, + language, + functionBody, + deterministic, + globallyMonotonic, + monotonicOn); + } + + @Nullable + public CqlIdentifier getKeyspace() { + return keyspace; + } + + @NonNull + public CqlIdentifier getFunction() { + return functionName; + } + + public boolean isOrReplace() { + return orReplace; + } + + public boolean isIfNotExists() { + return ifNotExists; + } + + @NonNull + public ImmutableMap getParameters() { + return parameters; + } + + public boolean isReturnsNullOnNull() { + return returnsNullOnNull; + } + + @Nullable + public DataType getReturnType() { + return returnType; + } + + @Nullable + public String getLanguage() { + return language; + } + + @Nullable + public String getFunctionBody() { + return functionBody; + } + + public boolean isDeterministic() { + return deterministic; + } + + public boolean isGloballyMonotonic() { + return globallyMonotonic; + } + + @Nullable + public CqlIdentifier getMonotonicOn() { + return monotonicOn; + } +} diff --git a/query-builder/src/main/java/com/datastax/dse/driver/internal/querybuilder/schema/package-info.java b/query-builder/src/main/java/com/datastax/dse/driver/internal/querybuilder/schema/package-info.java new file mode 100644 index 00000000000..19588410f95 --- /dev/null +++ b/query-builder/src/main/java/com/datastax/dse/driver/internal/querybuilder/schema/package-info.java @@ -0,0 +1,13 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ + +/** + * This package effectively mirrors the Cassandra OSS default query and schema implementations to + * allow extended schema and query building for the DSE driver. In general, a class in this package + * will need to implement the DSE equivalent interfaces for any DSE specific extensions. + */ +package com.datastax.dse.driver.internal.querybuilder.schema; diff --git a/query-builder/src/test/java/com/datastax/dse/driver/internal/querybuilder/schema/CreateDseAggregateTest.java b/query-builder/src/test/java/com/datastax/dse/driver/internal/querybuilder/schema/CreateDseAggregateTest.java new file mode 100644 index 00000000000..ffb36ba7ebd --- /dev/null +++ b/query-builder/src/test/java/com/datastax/dse/driver/internal/querybuilder/schema/CreateDseAggregateTest.java @@ -0,0 +1,173 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.querybuilder.schema; + +import static com.datastax.dse.driver.api.querybuilder.DseSchemaBuilder.createDseAggregate; +import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.literal; +import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.tuple; +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.api.core.type.DataTypes; +import org.junit.Test; + +/** + * Tests for creating DSE extended aggregates. Most of these tests are copied from the OSS {@code + * com.datastax.oss.driver.internal.querybuilder.schema.CreateAggregateTest} class to ensure DSE + * extended behavior does not break OSS functionality, with additional tests to verify the DSE + * specific functionality (i.e. the DETERMINISTIC keyword). + */ +public class CreateDseAggregateTest { + + @Test + public void should_create_aggreate_with_simple_param() { + + assertThat( + createDseAggregate("keyspace1", "agg1") + .withParameter(DataTypes.INT) + .withSFunc("sfunction") + .withSType(DataTypes.ASCII) + .withFinalFunc("finalfunction") + .withInitCond(tuple(literal(0), literal(0))) + .asCql()) + .isEqualTo( + "CREATE AGGREGATE keyspace1.agg1 (int) SFUNC sfunction STYPE ascii FINALFUNC finalfunction INITCOND (0,0)"); + } + + @Test + public void should_create_aggregate_with_many_params() { + + assertThat( + createDseAggregate("keyspace1", "agg2") + .withParameter(DataTypes.INT) + .withParameter(DataTypes.TEXT) + .withParameter(DataTypes.BOOLEAN) + .withSFunc("sfunction") + .withSType(DataTypes.ASCII) + .withFinalFunc("finalfunction") + .withInitCond(tuple(literal(0), literal(0))) + .asCql()) + .isEqualTo( + "CREATE AGGREGATE keyspace1.agg2 (int,text,boolean) SFUNC sfunction STYPE ascii FINALFUNC finalfunction INITCOND (0,0)"); + } + + @Test + public void should_create_aggregate_with_param_without_frozen() { + + assertThat( + createDseAggregate("keyspace1", "agg9") + .withParameter(DataTypes.tupleOf(DataTypes.TEXT)) + .withSFunc("sfunction") + .withSType(DataTypes.ASCII) + .withFinalFunc("finalfunction") + .withInitCond(tuple(literal(0), literal(0))) + .asCql()) + .isEqualTo( + "CREATE AGGREGATE keyspace1.agg9 (tuple) SFUNC sfunction STYPE ascii FINALFUNC finalfunction INITCOND (0,0)"); + } + + @Test + public void should_create_aggregate_with_no_params() { + + assertThat( + createDseAggregate("keyspace1", "agg3") + .withSFunc("sfunction") + .withSType(DataTypes.ASCII) + .withFinalFunc("finalfunction") + .withInitCond(tuple(literal(0), literal(0))) + .asCql()) + .isEqualTo( + "CREATE AGGREGATE keyspace1.agg3 () SFUNC sfunction STYPE ascii FINALFUNC finalfunction INITCOND (0,0)"); + } + + @Test + public void should_create_aggregate_with_no_keyspace() { + + assertThat( + createDseAggregate("agg4") + .withSFunc("sfunction") + .withSType(DataTypes.ASCII) + .withFinalFunc("finalfunction") + .withInitCond(tuple(literal(0), literal(0))) + .asCql()) + .isEqualTo( + "CREATE AGGREGATE agg4 () SFUNC sfunction STYPE ascii FINALFUNC finalfunction INITCOND (0,0)"); + } + + @Test + public void should_create_aggregate_with_if_not_exists() { + + assertThat( + createDseAggregate("agg6") + .ifNotExists() + .withSFunc("sfunction") + .withSType(DataTypes.ASCII) + .withFinalFunc("finalfunction") + .withInitCond(tuple(literal(0), literal(0))) + .asCql()) + .isEqualTo( + "CREATE AGGREGATE IF NOT EXISTS agg6 () SFUNC sfunction STYPE ascii FINALFUNC finalfunction INITCOND (0,0)"); + } + + @Test + public void should_create_aggregate_with_no_final_func() { + + assertThat( + createDseAggregate("cycling", "sum") + .withParameter(DataTypes.INT) + .withSFunc("dsum") + .withSType(DataTypes.INT) + .asCql()) + .isEqualTo("CREATE AGGREGATE cycling.sum (int) SFUNC dsum STYPE int"); + } + + @Test + public void should_create_or_replace() { + assertThat( + createDseAggregate("keyspace1", "agg7") + .orReplace() + .withSFunc("sfunction") + .withSType(DataTypes.ASCII) + .withFinalFunc("finalfunction") + .withInitCond(tuple(literal(0), literal(0))) + .asCql()) + .isEqualTo( + "CREATE OR REPLACE AGGREGATE keyspace1.agg7 () SFUNC sfunction STYPE ascii FINALFUNC finalfunction INITCOND (0,0)"); + } + + @Test + public void should_not_throw_on_toString_for_CreateAggregateStart() { + assertThat(createDseAggregate("agg1").toString()).isEqualTo("CREATE AGGREGATE agg1 ()"); + } + + @Test + public void should_not_throw_on_toString_for_CreateAggregateWithParam() { + assertThat(createDseAggregate("func1").withParameter(DataTypes.INT).toString()) + .isEqualTo("CREATE AGGREGATE func1 (int)"); + } + + @Test + public void should_not_throw_on_toString_for_NotExists_OrReplace() { + assertThat(createDseAggregate("func1").ifNotExists().orReplace().toString()) + .isEqualTo("CREATE OR REPLACE AGGREGATE IF NOT EXISTS func1 ()"); + } + + @Test + public void should_create_aggregate_with_deterministic() { + + assertThat( + createDseAggregate("keyspace1", "agg1") + .withParameter(DataTypes.INT) + .withSFunc("sfunction") + .withSType(DataTypes.ASCII) + .withFinalFunc("finalfunction") + .withInitCond(tuple(literal(0), literal(0))) + .deterministic() + .asCql()) + .isEqualTo( + "CREATE AGGREGATE keyspace1.agg1 (int) SFUNC sfunction STYPE ascii FINALFUNC finalfunction INITCOND (0,0) DETERMINISTIC"); + } +} diff --git a/query-builder/src/test/java/com/datastax/dse/driver/internal/querybuilder/schema/CreateDseFunctionTest.java b/query-builder/src/test/java/com/datastax/dse/driver/internal/querybuilder/schema/CreateDseFunctionTest.java new file mode 100644 index 00000000000..a2eeae20eb7 --- /dev/null +++ b/query-builder/src/test/java/com/datastax/dse/driver/internal/querybuilder/schema/CreateDseFunctionTest.java @@ -0,0 +1,443 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.querybuilder.schema; + +import static com.datastax.dse.driver.api.querybuilder.DseSchemaBuilder.createDseFunction; +import static com.datastax.oss.driver.api.querybuilder.SchemaBuilder.udt; +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.api.core.type.DataTypes; +import org.junit.Test; + +/** + * Tests for creating DSE extended functions. Most of these tests are copied from the OSS {@code + * com.datastax.oss.driver.internal.querybuilder.schema.CreateFunctionTest} class to ensure DSE + * extended behavior does not break OSS functionality, with additional tests to verify the DSE + * specific functionality (i.e. the DETERMINISTIC and MONOTONIC keywords). + */ +public class CreateDseFunctionTest { + + @Test + public void should_not_throw_on_toString_for_CreateFunctionStart() { + String funcStr = createDseFunction("func1").toString(); + assertThat(funcStr).isEqualTo("CREATE FUNCTION func1 () CALLED ON NULL INPUT"); + } + + @Test + public void should_not_throw_on_toString_for_CreateFunctionWithType() { + assertThat( + createDseFunction("func1") + .withParameter("param1", DataTypes.INT) + .returnsNullOnNull() + .returnsType(DataTypes.INT) + .toString()) + .isEqualTo("CREATE FUNCTION func1 (param1 int) RETURNS NULL ON NULL INPUT RETURNS int"); + } + + @Test + public void should_not_throw_on_toString_for_CreateFunctionWithLanguage() { + assertThat( + createDseFunction("func1") + .withParameter("param1", DataTypes.INT) + .returnsNullOnNull() + .returnsType(DataTypes.INT) + .withJavaLanguage() + .toString()) + .isEqualTo( + "CREATE FUNCTION func1 (param1 int) RETURNS NULL ON NULL INPUT RETURNS int LANGUAGE java"); + } + + @Test + public void should_create_function_with_simple_params() { + assertThat( + createDseFunction("keyspace1", "func1") + .withParameter("param1", DataTypes.INT) + .calledOnNull() + .returnsType(DataTypes.TEXT) + .withJavaLanguage() + .asQuoted("return Integer.toString(param1);") + .asCql()) + .isEqualTo( + "CREATE FUNCTION keyspace1.func1 (param1 int) CALLED ON NULL INPUT RETURNS text LANGUAGE java AS 'return Integer.toString(param1);'"); + } + + @Test + public void should_create_function_with_param_and_return_type_not_frozen() { + assertThat( + createDseFunction("keyspace1", "func6") + .withParameter("param1", DataTypes.tupleOf(DataTypes.INT, DataTypes.INT)) + .returnsNullOnNull() + .returnsType(udt("person", true)) + .withJavaLanguage() + .as("'return Integer.toString(param1);'") + .asCql()) + .isEqualTo( + "CREATE FUNCTION keyspace1.func6 (param1 tuple) RETURNS NULL ON NULL INPUT RETURNS person LANGUAGE java AS 'return Integer.toString(param1);'"); + } + + @Test + public void should_honor_returns_null() { + assertThat( + createDseFunction("keyspace1", "func2") + .withParameter("param1", DataTypes.INT) + .returnsNullOnNull() + .returnsType(DataTypes.TEXT) + .withJavaLanguage() + .asQuoted("return Integer.toString(param1);") + .asCql()) + .isEqualTo( + "CREATE FUNCTION keyspace1.func2 (param1 int) RETURNS NULL ON NULL INPUT RETURNS text LANGUAGE java AS 'return Integer.toString(param1);'"); + } + + @Test + public void should_create_function_with_many_params() { + assertThat( + createDseFunction("keyspace1", "func3") + .withParameter("param1", DataTypes.INT) + .withParameter("param2", DataTypes.TEXT) + .withParameter("param3", DataTypes.BOOLEAN) + .returnsNullOnNull() + .returnsType(DataTypes.TEXT) + .withJavaLanguage() + .asQuoted("return Integer.toString(param1);") + .asCql()) + .isEqualTo( + "CREATE FUNCTION keyspace1.func3 (param1 int,param2 text,param3 boolean) RETURNS NULL ON NULL INPUT RETURNS text LANGUAGE java AS 'return Integer.toString(param1);'"); + } + + @Test + public void should_create_function_with_no_params() { + + assertThat( + createDseFunction("keyspace1", "func4") + .returnsNullOnNull() + .returnsType(DataTypes.TEXT) + .withLanguage("java") + .asQuoted("return \"hello world\";") + .asCql()) + .isEqualTo( + "CREATE FUNCTION keyspace1.func4 () RETURNS NULL ON NULL INPUT RETURNS text LANGUAGE java AS 'return \"hello world\";'"); + } + + @Test + public void should_create_function_with_no_keyspace() { + assertThat( + createDseFunction("func5") + .returnsNullOnNull() + .returnsType(DataTypes.TEXT) + .withJavaLanguage() + .asQuoted("return \"hello world\";") + .asCql()) + .isEqualTo( + "CREATE FUNCTION func5 () RETURNS NULL ON NULL INPUT RETURNS text LANGUAGE java AS 'return \"hello world\";'"); + } + + @Test + public void should_create_function_with_if_not_exists() { + assertThat( + createDseFunction("keyspace1", "func6") + .ifNotExists() + .returnsNullOnNull() + .returnsType(DataTypes.TEXT) + .withJavaLanguage() + .asQuoted("return \"hello world\";") + .asCql()) + .isEqualTo( + "CREATE FUNCTION IF NOT EXISTS keyspace1.func6 () RETURNS NULL ON NULL INPUT RETURNS text LANGUAGE java AS 'return \"hello world\";'"); + } + + @Test + public void should_create_or_replace() { + assertThat( + createDseFunction("keyspace1", "func6") + .orReplace() + .withParameter("param1", DataTypes.INT) + .returnsNullOnNull() + .returnsType(DataTypes.TEXT) + .withJavaLanguage() + .asQuoted("return Integer.toString(param1);") + .asCql()) + .isEqualTo( + "CREATE OR REPLACE FUNCTION keyspace1.func6 (param1 int) RETURNS NULL ON NULL INPUT RETURNS text LANGUAGE java AS 'return Integer.toString(param1);'"); + } + + @Test + public void should_not_quote_body_using_as() { + assertThat( + createDseFunction("keyspace1", "func6") + .withParameter("param1", DataTypes.INT) + .returnsNullOnNull() + .returnsType(DataTypes.TEXT) + .withJavaLanguage() + .as("'return Integer.toString(param1);'") + .asCql()) + .isEqualTo( + "CREATE FUNCTION keyspace1.func6 (param1 int) RETURNS NULL ON NULL INPUT RETURNS text LANGUAGE java AS 'return Integer.toString(param1);'"); + } + + @Test + public void should_quote_with_dollar_signs_on_asQuoted_if_body_contains_single_quote() { + assertThat( + createDseFunction("keyspace1", "func6") + .withParameter("param1", DataTypes.INT) + .returnsNullOnNull() + .returnsType(DataTypes.TEXT) + .withJavaScriptLanguage() + .asQuoted("'hello ' + param1;") + .asCql()) + .isEqualTo( + "CREATE FUNCTION keyspace1.func6 (param1 int) RETURNS NULL ON NULL INPUT RETURNS text LANGUAGE javascript AS $$ 'hello ' + param1; $$"); + } + + @Test + public void should_not_throw_on_toString_for_create_function_with_deterministic() { + final String funcStr = + createDseFunction("func1") + .withParameter("param1", DataTypes.INT) + .returnsNullOnNull() + .returnsType(DataTypes.INT) + .deterministic() + .toString(); + assertThat(funcStr) + .isEqualTo( + "CREATE FUNCTION func1 (param1 int) RETURNS NULL ON NULL INPUT RETURNS int DETERMINISTIC"); + } + + @Test + public void should_not_quote_body_using_as_with_deterministic() { + final String funcStr = + createDseFunction("func1") + .withParameter("param1", DataTypes.INT) + .returnsNullOnNull() + .returnsType(DataTypes.TEXT) + .deterministic() + .withJavaLanguage() + .as("'return Integer.toString(param1);'") + .asCql(); + assertThat(funcStr) + .isEqualTo( + "CREATE FUNCTION func1 (param1 int) RETURNS NULL ON NULL INPUT RETURNS text DETERMINISTIC LANGUAGE java AS 'return Integer.toString(param1);'"); + } + + @Test + public void + should_quote_with_dollar_signs_on_asQuoted_if_body_contains_single_quote_with_deterministic() { + final String funcStr = + createDseFunction("func1") + .withParameter("param1", DataTypes.INT) + .returnsNullOnNull() + .returnsType(DataTypes.TEXT) + .deterministic() + .withJavaScriptLanguage() + .asQuoted("'hello ' + param1;") + .asCql(); + assertThat(funcStr) + .isEqualTo( + "CREATE FUNCTION func1 (param1 int) RETURNS NULL ON NULL INPUT RETURNS text DETERMINISTIC LANGUAGE javascript AS $$ 'hello ' + param1; $$"); + } + + @Test + public void should_not_throw_on_toString_for_create_function_with_monotonic() { + final String funcStr = + createDseFunction("func1") + .withParameter("param1", DataTypes.INT) + .returnsNullOnNull() + .returnsType(DataTypes.INT) + .monotonic() + .toString(); + assertThat(funcStr) + .isEqualTo( + "CREATE FUNCTION func1 (param1 int) RETURNS NULL ON NULL INPUT RETURNS int MONOTONIC"); + } + + @Test + public void should_not_quote_body_using_as_with_monotonic() { + final String funcStr = + createDseFunction("func1") + .withParameter("param1", DataTypes.INT) + .returnsNullOnNull() + .returnsType(DataTypes.TEXT) + .monotonic() + .withJavaLanguage() + .as("'return Integer.toString(param1);'") + .asCql(); + assertThat(funcStr) + .isEqualTo( + "CREATE FUNCTION func1 (param1 int) RETURNS NULL ON NULL INPUT RETURNS text MONOTONIC LANGUAGE java AS 'return Integer.toString(param1);'"); + } + + @Test + public void + should_quote_with_dollar_signs_on_asQuoted_if_body_contains_single_quote_with_monotonic() { + final String funcStr = + createDseFunction("func1") + .withParameter("param1", DataTypes.INT) + .returnsNullOnNull() + .returnsType(DataTypes.TEXT) + .monotonic() + .withJavaScriptLanguage() + .asQuoted("'hello ' + param1;") + .asCql(); + assertThat(funcStr) + .isEqualTo( + "CREATE FUNCTION func1 (param1 int) RETURNS NULL ON NULL INPUT RETURNS text MONOTONIC LANGUAGE javascript AS $$ 'hello ' + param1; $$"); + } + + @Test + public void should_not_throw_on_toString_for_create_function_with_monotonic_on() { + final String funcStr = + createDseFunction("func1") + .withParameter("param1", DataTypes.INT) + .withParameter("param2", DataTypes.INT) + .returnsNullOnNull() + .returnsType(DataTypes.INT) + .monotonicOn("param2") + .toString(); + assertThat(funcStr) + .isEqualTo( + "CREATE FUNCTION func1 (param1 int,param2 int) RETURNS NULL ON NULL INPUT RETURNS int MONOTONIC ON param2"); + } + + @Test + public void should_not_quote_body_using_as_with_monotonic_on() { + final String funcStr = + createDseFunction("func1") + .withParameter("param1", DataTypes.INT) + .withParameter("param2", DataTypes.INT) + .returnsNullOnNull() + .returnsType(DataTypes.TEXT) + .monotonicOn("param2") + .withJavaLanguage() + .as("'return Integer.toString(param1);'") + .asCql(); + assertThat(funcStr) + .isEqualTo( + "CREATE FUNCTION func1 (param1 int,param2 int) RETURNS NULL ON NULL INPUT RETURNS text MONOTONIC ON param2 LANGUAGE java AS 'return Integer.toString(param1);'"); + } + + @Test + public void + should_quote_with_dollar_signs_on_asQuoted_if_body_contains_single_quote_with_monotonic_on() { + final String funcStr = + createDseFunction("func1") + .withParameter("param1", DataTypes.INT) + .withParameter("param2", DataTypes.INT) + .returnsNullOnNull() + .returnsType(DataTypes.TEXT) + .monotonicOn("param2") + .withJavaScriptLanguage() + .asQuoted("'hello ' + param1;") + .asCql(); + assertThat(funcStr) + .isEqualTo( + "CREATE FUNCTION func1 (param1 int,param2 int) RETURNS NULL ON NULL INPUT RETURNS text MONOTONIC ON param2 LANGUAGE javascript AS $$ 'hello ' + param1; $$"); + } + + @Test + public void should_not_throw_on_toString_for_create_function_with_deterministic_and_monotonic() { + final String funcStr = + createDseFunction("func1") + .withParameter("param1", DataTypes.INT) + .returnsNullOnNull() + .returnsType(DataTypes.INT) + .deterministic() + .monotonic() + .toString(); + assertThat(funcStr) + .isEqualTo( + "CREATE FUNCTION func1 (param1 int) RETURNS NULL ON NULL INPUT RETURNS int DETERMINISTIC MONOTONIC"); + } + + @Test + public void should_not_quote_body_using_as_with_deterministic_and_monotonic() { + final String funcStr = + createDseFunction("func1") + .withParameter("param1", DataTypes.INT) + .returnsNullOnNull() + .returnsType(DataTypes.TEXT) + .deterministic() + .monotonic() + .withJavaLanguage() + .as("'return Integer.toString(param1);'") + .asCql(); + assertThat(funcStr) + .isEqualTo( + "CREATE FUNCTION func1 (param1 int) RETURNS NULL ON NULL INPUT RETURNS text DETERMINISTIC MONOTONIC LANGUAGE java AS 'return Integer.toString(param1);'"); + } + + @Test + public void + should_quote_with_dollar_signs_on_asQuoted_if_body_contains_single_quote_with_deterministic_and_monotonic() { + final String funcStr = + createDseFunction("func1") + .withParameter("param1", DataTypes.INT) + .returnsNullOnNull() + .returnsType(DataTypes.TEXT) + .deterministic() + .monotonic() + .withJavaScriptLanguage() + .asQuoted("'hello ' + param1;") + .asCql(); + assertThat(funcStr) + .isEqualTo( + "CREATE FUNCTION func1 (param1 int) RETURNS NULL ON NULL INPUT RETURNS text DETERMINISTIC MONOTONIC LANGUAGE javascript AS $$ 'hello ' + param1; $$"); + } + + @Test + public void + should_not_throw_on_toString_for_create_function_with_deterministic_and_monotonic_on() { + final String funcStr = + createDseFunction("func1") + .withParameter("param1", DataTypes.INT) + .withParameter("param2", DataTypes.INT) + .returnsNullOnNull() + .returnsType(DataTypes.INT) + .deterministic() + .monotonicOn("param2") + .toString(); + assertThat(funcStr) + .isEqualTo( + "CREATE FUNCTION func1 (param1 int,param2 int) RETURNS NULL ON NULL INPUT RETURNS int DETERMINISTIC MONOTONIC ON param2"); + } + + @Test + public void should_not_quote_body_using_as_with_deterministic_and_monotonic_on() { + final String funcStr = + createDseFunction("func1") + .withParameter("param1", DataTypes.INT) + .withParameter("param2", DataTypes.INT) + .returnsNullOnNull() + .returnsType(DataTypes.TEXT) + .deterministic() + .monotonicOn("param2") + .withJavaLanguage() + .as("'return Integer.toString(param1);'") + .asCql(); + assertThat(funcStr) + .isEqualTo( + "CREATE FUNCTION func1 (param1 int,param2 int) RETURNS NULL ON NULL INPUT RETURNS text DETERMINISTIC MONOTONIC ON param2 LANGUAGE java AS 'return Integer.toString(param1);'"); + } + + @Test + public void + should_quote_with_dollar_signs_on_asQuoted_if_body_contains_single_quote_with_deterministic_and_monotonic_on() { + final String funcStr = + createDseFunction("func1") + .withParameter("param1", DataTypes.INT) + .withParameter("param2", DataTypes.INT) + .returnsNullOnNull() + .returnsType(DataTypes.TEXT) + .deterministic() + .monotonicOn("param2") + .withJavaScriptLanguage() + .asQuoted("'hello ' + param1;") + .asCql(); + assertThat(funcStr) + .isEqualTo( + "CREATE FUNCTION func1 (param1 int,param2 int) RETURNS NULL ON NULL INPUT RETURNS text DETERMINISTIC MONOTONIC ON param2 LANGUAGE javascript AS $$ 'hello ' + param1; $$"); + } +} From 0bb8d0d380f13da9cb96bf44f75d4c99f924973a Mon Sep 17 00:00:00 2001 From: olim7t Date: Wed, 9 Oct 2019 15:17:32 -0700 Subject: [PATCH 197/979] Update license headers for DSE files --- .../com/datastax/dse/driver/DseSessionMetric.java | 13 +++++++++++-- .../dse/driver/api/core/DseProtocolVersion.java | 13 +++++++++++-- .../datastax/dse/driver/api/core/DseSession.java | 13 +++++++++++-- .../dse/driver/api/core/DseSessionBuilder.java | 13 +++++++++++-- .../dse/driver/api/core/DseSessionBuilderBase.java | 13 +++++++++++-- .../driver/api/core/auth/BaseDseAuthenticator.java | 13 +++++++++++-- .../api/core/auth/DseGssApiAuthProviderBase.java | 13 +++++++++++-- .../core/auth/DsePlainTextAuthProviderBase.java | 13 +++++++++++-- .../driver/api/core/auth/ProxyAuthentication.java | 13 +++++++++++-- .../api/core/config/DseDriverConfigLoader.java | 13 +++++++++++-- .../driver/api/core/config/DseDriverOption.java | 13 +++++++++++-- .../cql/continuous/ContinuousAsyncResultSet.java | 13 +++++++++++-- .../core/cql/continuous/ContinuousResultSet.java | 13 +++++++++++-- .../api/core/cql/continuous/ContinuousSession.java | 13 +++++++++++-- .../reactive/ContinuousReactiveResultSet.java | 13 +++++++++++-- .../reactive/ContinuousReactiveSession.java | 13 +++++++++++-- .../core/cql/reactive/ReactiveQueryMetadata.java | 13 +++++++++++-- .../api/core/cql/reactive/ReactiveResultSet.java | 13 +++++++++++-- .../driver/api/core/cql/reactive/ReactiveRow.java | 13 +++++++++++-- .../api/core/cql/reactive/ReactiveSession.java | 13 +++++++++++-- .../driver/api/core/data/geometry/Geometry.java | 13 +++++++++++-- .../driver/api/core/data/geometry/LineString.java | 13 +++++++++++-- .../dse/driver/api/core/data/geometry/Point.java | 13 +++++++++++-- .../dse/driver/api/core/data/geometry/Polygon.java | 13 +++++++++++-- .../dse/driver/api/core/data/time/DateRange.java | 13 +++++++++++-- .../driver/api/core/data/time/DateRangeBound.java | 13 +++++++++++-- .../api/core/data/time/DateRangePrecision.java | 13 +++++++++++-- .../driver/api/core/graph/AsyncGraphResultSet.java | 13 +++++++++++-- .../driver/api/core/graph/BatchGraphStatement.java | 13 +++++++++++-- .../api/core/graph/BatchGraphStatementBuilder.java | 13 +++++++++++-- .../dse/driver/api/core/graph/DseGraph.java | 13 +++++++++++-- .../graph/DseGraphRemoteConnectionBuilder.java | 13 +++++++++++-- .../api/core/graph/FluentGraphStatement.java | 13 +++++++++++-- .../core/graph/FluentGraphStatementBuilder.java | 13 +++++++++++-- .../driver/api/core/graph/GraphExecutionInfo.java | 13 +++++++++++-- .../dse/driver/api/core/graph/GraphNode.java | 13 +++++++++++-- .../dse/driver/api/core/graph/GraphResultSet.java | 13 +++++++++++-- .../dse/driver/api/core/graph/GraphSession.java | 13 +++++++++++-- .../dse/driver/api/core/graph/GraphStatement.java | 13 +++++++++++-- .../api/core/graph/GraphStatementBuilderBase.java | 13 +++++++++++-- .../api/core/graph/ScriptGraphStatement.java | 13 +++++++++++-- .../core/graph/ScriptGraphStatementBuilder.java | 13 +++++++++++-- .../dse/driver/api/core/graph/predicates/Geo.java | 13 +++++++++++-- .../driver/api/core/graph/predicates/Search.java | 13 +++++++++++-- .../api/core/metadata/DseNodeProperties.java | 13 +++++++++++-- .../core/metadata/schema/DseAggregateMetadata.java | 13 +++++++++++-- .../core/metadata/schema/DseColumnMetadata.java | 13 +++++++++++-- .../core/metadata/schema/DseFunctionMetadata.java | 13 +++++++++++-- .../api/core/metadata/schema/DseIndexMetadata.java | 13 +++++++++++-- .../core/metadata/schema/DseKeyspaceMetadata.java | 13 +++++++++++-- .../core/metadata/schema/DseRelationMetadata.java | 13 +++++++++++-- .../api/core/metadata/schema/DseTableMetadata.java | 13 +++++++++++-- .../api/core/metadata/schema/DseViewMetadata.java | 13 +++++++++++-- .../core/servererrors/UnfitClientException.java | 13 +++++++++++-- .../api/core/session/DseProgrammaticArguments.java | 13 +++++++++++-- .../dse/driver/api/core/type/DseDataTypes.java | 13 +++++++++++-- .../driver/api/core/type/codec/DseTypeCodecs.java | 13 +++++++++++-- .../driver/internal/core/DseProtocolFeature.java | 13 +++++++++++-- .../internal/core/DseProtocolVersionRegistry.java | 13 +++++++++++-- .../core/InsightsClientLifecycleListener.java | 13 +++++++++++-- .../dse/driver/internal/core/auth/AuthUtils.java | 13 +++++++++++-- .../internal/core/auth/DseGssApiAuthProvider.java | 13 +++++++++++-- .../core/auth/DsePlainTextAuthProvider.java | 13 +++++++++++-- .../auth/DseProgrammaticPlainTextAuthProvider.java | 13 +++++++++++-- .../typesafe/DefaultDseDriverConfigLoader.java | 13 +++++++++++-- .../internal/core/context/DseDriverContext.java | 13 +++++++++++-- .../core/context/DseStartupOptionsBuilder.java | 13 +++++++++++-- .../driver/internal/core/cql/DseConversions.java | 13 +++++++++++-- .../ContinuousCqlRequestAsyncProcessor.java | 13 +++++++++++-- .../continuous/ContinuousCqlRequestHandler.java | 13 +++++++++++-- .../ContinuousCqlRequestSyncProcessor.java | 13 +++++++++++-- .../DefaultContinuousAsyncResultSet.java | 13 +++++++++++-- .../cql/continuous/DefaultContinuousResultSet.java | 13 +++++++++++-- .../ContinuousCqlRequestReactiveProcessor.java | 13 +++++++++++-- .../DefaultContinuousReactiveResultSet.java | 13 +++++++++++-- .../cql/reactive/CqlRequestReactiveProcessor.java | 13 +++++++++++-- .../cql/reactive/DefaultReactiveResultSet.java | 13 +++++++++++-- .../core/cql/reactive/DefaultReactiveRow.java | 13 +++++++++++-- .../core/cql/reactive/EmptySubscription.java | 13 +++++++++++-- .../core/cql/reactive/FailedPublisher.java | 13 +++++++++++-- .../core/cql/reactive/FailedReactiveResultSet.java | 13 +++++++++++-- .../core/cql/reactive/ReactiveOperators.java | 13 +++++++++++-- .../core/cql/reactive/ReactiveResultSetBase.java | 13 +++++++++++-- .../reactive/ReactiveResultSetSubscription.java | 13 +++++++++++-- .../core/cql/reactive/SimpleUnicastProcessor.java | 13 +++++++++++-- .../core/data/geometry/DefaultGeometry.java | 13 +++++++++++-- .../core/data/geometry/DefaultLineString.java | 13 +++++++++++-- .../internal/core/data/geometry/DefaultPoint.java | 13 +++++++++++-- .../core/data/geometry/DefaultPolygon.java | 13 +++++++++++-- .../internal/core/data/geometry/Distance.java | 13 +++++++++++-- .../data/geometry/DistanceSerializationProxy.java | 13 +++++++++++-- .../core/data/geometry/WkbSerializationProxy.java | 13 +++++++++++-- .../internal/core/data/geometry/WkbUtil.java | 13 +++++++++++-- .../core/graph/BytecodeGraphStatement.java | 13 +++++++++++-- .../core/graph/DefaultAsyncGraphResultSet.java | 13 +++++++++++-- .../core/graph/DefaultBatchGraphStatement.java | 13 +++++++++++-- .../graph/DefaultDseRemoteConnectionBuilder.java | 13 +++++++++++-- .../core/graph/DefaultFluentGraphStatement.java | 13 +++++++++++-- .../core/graph/DefaultGraphExecutionInfo.java | 13 +++++++++++-- .../core/graph/DefaultScriptGraphStatement.java | 13 +++++++++++-- .../core/graph/DseGraphRemoteConnection.java | 13 +++++++++++-- .../internal/core/graph/DseGraphTraversal.java | 13 +++++++++++-- .../driver/internal/core/graph/DsePredicate.java | 13 +++++++++++-- .../driver/internal/core/graph/EditDistance.java | 13 +++++++++++-- .../driver/internal/core/graph/GeoPredicate.java | 13 +++++++++++-- .../dse/driver/internal/core/graph/GeoUtils.java | 13 +++++++++++-- .../internal/core/graph/GraphConversions.java | 13 +++++++++++-- .../core/graph/GraphRequestAsyncProcessor.java | 13 +++++++++++-- .../internal/core/graph/GraphRequestHandler.java | 13 +++++++++++-- .../core/graph/GraphRequestSyncProcessor.java | 13 +++++++++++-- .../internal/core/graph/GraphResultSets.java | 13 +++++++++++-- .../internal/core/graph/GraphSON1SerdeTP.java | 13 +++++++++++-- .../internal/core/graph/GraphSON2SerdeTP.java | 13 +++++++++++-- .../internal/core/graph/GraphSON3SerdeTP.java | 13 +++++++++++-- .../driver/internal/core/graph/GraphSONUtils.java | 13 +++++++++++-- .../internal/core/graph/GraphStatementBase.java | 13 +++++++++++-- .../internal/core/graph/LegacyGraphNode.java | 13 +++++++++++-- .../internal/core/graph/ObjectGraphNode.java | 13 +++++++++++-- .../internal/core/graph/SearchPredicate.java | 13 +++++++++++-- .../driver/internal/core/graph/SearchUtils.java | 13 +++++++++++-- .../core/graph/SinglePageGraphResultSet.java | 13 +++++++++++-- .../internal/core/insights/AddressFormatter.java | 13 +++++++++++-- .../core/insights/ConfigAntiPatternsFinder.java | 13 +++++++++++-- .../internal/core/insights/DataCentersFinder.java | 13 +++++++++++-- .../core/insights/ExecutionProfilesInfoFinder.java | 13 +++++++++++-- .../internal/core/insights/InsightsClient.java | 13 +++++++++++-- .../core/insights/InsightsSupportVerifier.java | 13 +++++++++++-- .../driver/internal/core/insights/PackageUtil.java | 13 +++++++++++-- .../internal/core/insights/PlatformInfoFinder.java | 13 +++++++++++-- .../insights/ReconnectionPolicyInfoFinder.java | 13 +++++++++++-- .../configuration/InsightsConfiguration.java | 13 +++++++++++-- .../exceptions/InsightEventFormatException.java | 13 +++++++++++-- .../core/insights/schema/AuthProviderType.java | 13 +++++++++++-- .../internal/core/insights/schema/Insight.java | 13 +++++++++++-- .../core/insights/schema/InsightMetadata.java | 13 +++++++++++-- .../internal/core/insights/schema/InsightType.java | 13 +++++++++++-- .../core/insights/schema/InsightsPlatformInfo.java | 13 +++++++++++-- .../core/insights/schema/InsightsStartupData.java | 13 +++++++++++-- .../core/insights/schema/InsightsStatusData.java | 13 +++++++++++-- .../core/insights/schema/LoadBalancingInfo.java | 13 +++++++++++-- .../insights/schema/PoolSizeByHostDistance.java | 13 +++++++++++-- .../insights/schema/ReconnectionPolicyInfo.java | 13 +++++++++++-- .../driver/internal/core/insights/schema/SSL.java | 13 +++++++++++-- .../core/insights/schema/SessionStateForNode.java | 13 +++++++++++-- .../insights/schema/SpecificExecutionProfile.java | 13 +++++++++++-- .../insights/schema/SpeculativeExecutionInfo.java | 13 +++++++++++-- .../core/loadbalancing/DseLoadBalancingPolicy.java | 13 +++++++++++-- .../internal/core/metadata/DseTopologyMonitor.java | 13 +++++++++++-- .../schema/DefaultDseAggregateMetadata.java | 13 +++++++++++-- .../metadata/schema/DefaultDseColumnMetadata.java | 13 +++++++++++-- .../schema/DefaultDseFunctionMetadata.java | 13 +++++++++++-- .../metadata/schema/DefaultDseIndexMetadata.java | 13 +++++++++++-- .../schema/DefaultDseKeyspaceMetadata.java | 13 +++++++++++-- .../metadata/schema/DefaultDseTableMetadata.java | 13 +++++++++++-- .../metadata/schema/DefaultDseViewMetadata.java | 13 +++++++++++-- .../schema/parsing/DseAggregateParser.java | 13 +++++++++++-- .../metadata/schema/parsing/DseFunctionParser.java | 13 +++++++++++-- .../metadata/schema/parsing/DseSchemaParser.java | 13 +++++++++++-- .../schema/parsing/DseSchemaParserFactory.java | 13 +++++++++++-- .../metadata/schema/parsing/DseTableParser.java | 13 +++++++++++-- .../metadata/schema/parsing/DseViewParser.java | 13 +++++++++++-- .../schema/queries/DseSchemaQueriesFactory.java | 13 +++++++++++-- .../token/DseReplicationStrategyFactory.java | 13 +++++++++++-- .../core/metadata/token/EverywhereStrategy.java | 13 +++++++++++-- .../core/metrics/DseDropwizardMetricsFactory.java | 13 +++++++++++-- .../metrics/DseDropwizardSessionMetricUpdater.java | 13 +++++++++++-- .../driver/internal/core/search/DateRangeUtil.java | 13 +++++++++++-- .../internal/core/session/DefaultDseSession.java | 13 +++++++++++-- .../core/tracker/MultiplexingRequestTracker.java | 13 +++++++++++-- .../core/type/codec/geometry/GeometryCodec.java | 13 +++++++++++-- .../core/type/codec/geometry/LineStringCodec.java | 13 +++++++++++-- .../core/type/codec/geometry/PointCodec.java | 13 +++++++++++-- .../core/type/codec/geometry/PolygonCodec.java | 13 +++++++++++-- .../core/type/codec/time/DateRangeCodec.java | 13 +++++++++++-- .../util/concurrent/BoundedConcurrentQueue.java | 13 +++++++++++-- .../com/datastax/dse/driver/Driver.properties | 13 +++++++++++-- .../com/datastax/dse/driver/DriverRunListener.java | 13 +++++++++++-- .../datastax/dse/driver/DseTestDataProviders.java | 13 +++++++++++-- .../com/datastax/dse/driver/DseTestFixtures.java | 13 +++++++++++-- .../api/core/config/DseDriverConfigLoaderTest.java | 13 +++++++++++-- .../api/core/data/time/DateRangePrecisionTest.java | 13 +++++++++++-- .../driver/api/core/data/time/DateRangeTest.java | 13 +++++++++++-- .../driver/api/core/graph/predicates/GeoTest.java | 13 +++++++++++-- .../api/core/graph/predicates/SearchTest.java | 13 +++++++++++-- .../core/DseProtocolVersionRegistryTest.java | 13 +++++++++++-- .../core/context/DseStartupOptionsBuilderTest.java | 13 +++++++++++-- ...ntinuousCqlRequestHandlerNodeTargetingTest.java | 13 +++++++++++-- .../ContinuousCqlRequestHandlerReprepareTest.java | 13 +++++++++++-- .../ContinuousCqlRequestHandlerRetryTest.java | 13 +++++++++++-- .../ContinuousCqlRequestHandlerTest.java | 13 +++++++++++-- .../ContinuousCqlRequestHandlerTestBase.java | 13 +++++++++++-- .../DefaultContinuousAsyncResultSetTest.java | 13 +++++++++++-- .../continuous/DefaultContinuousResultSetTest.java | 13 +++++++++++-- .../ContinuousCqlRequestReactiveProcessorTest.java | 13 +++++++++++-- .../reactive/CqlRequestReactiveProcessorTest.java | 13 +++++++++++-- .../reactive/DefaultReactiveResultSetTckTest.java | 13 +++++++++++-- .../core/cql/reactive/MockAsyncResultSet.java | 13 +++++++++++-- .../driver/internal/core/cql/reactive/MockRow.java | 13 +++++++++++-- .../ReactiveResultSetSubscriptionTest.java | 13 +++++++++++-- .../reactive/SimpleUnicastProcessorTckTest.java | 13 +++++++++++-- .../cql/reactive/SimpleUnicastProcessorTest.java | 13 +++++++++++-- .../internal/core/cql/reactive/TestSubscriber.java | 13 +++++++++++-- .../core/data/geometry/DefaultLineStringTest.java | 13 +++++++++++-- .../core/data/geometry/DefaultPointTest.java | 13 +++++++++++-- .../core/data/geometry/DefaultPolygonTest.java | 13 +++++++++++-- .../internal/core/data/geometry/DistanceTest.java | 13 +++++++++++-- .../core/data/geometry/SerializationUtils.java | 13 +++++++++++-- .../driver/internal/core/graph/GraphNodeTest.java | 13 +++++++++++-- .../core/graph/GraphRequestHandlerTest.java | 13 +++++++++++-- .../core/graph/GraphRequestHandlerTestHarness.java | 13 +++++++++++-- .../core/insights/AddressFormatterTest.java | 13 +++++++++++-- .../insights/ConfigAntiPatternsFinderTest.java | 13 +++++++++++-- .../core/insights/DataCentersFinderTest.java | 13 +++++++++++-- .../core/insights/ExecutionProfileMockUtil.java | 13 +++++++++++-- .../insights/ExecutionProfilesInfoFinderTest.java | 13 +++++++++++-- .../internal/core/insights/InsightsClientTest.java | 13 +++++++++++-- .../core/insights/InsightsSupportVerifierTest.java | 13 +++++++++++-- .../internal/core/insights/PackageUtilTest.java | 13 +++++++++++-- .../core/insights/PlatformInfoFinderTest.java | 14 +++++++++++--- .../insights/ReconnectionPolicyInfoFinderTest.java | 14 +++++++++++--- .../DseLoadBalancingPolicyEventsTest.java | 13 +++++++++++-- .../DseLoadBalancingPolicyInitTest.java | 13 +++++++++++-- .../DseLoadBalancingPolicyQueryPlanTest.java | 13 +++++++++++-- .../DseLoadBalancingPolicyRequestTrackerTest.java | 13 +++++++++++-- .../DseLoadBalancingPolicyTestBase.java | 13 +++++++++++-- .../type/codec/geometry/GeometryCodecTest.java | 13 +++++++++++-- .../type/codec/geometry/LineStringCodecTest.java | 13 +++++++++++-- .../core/type/codec/geometry/PointCodecTest.java | 13 +++++++++++-- .../core/type/codec/geometry/PolygonCodecTest.java | 13 +++++++++++-- .../core/type/codec/time/DateRangeCodecTest.java | 13 +++++++++++-- .../concurrent/BoundedConcurrentQueueTest.java | 13 +++++++++++-- .../resources/config/customApplication.properties | 13 +++++++++++-- .../resources/insights/malformed-pom.properties | 13 +++++++++++-- core/src/test/resources/insights/pom.properties | 13 +++++++++++-- core/src/test/resources/logback-test.xml | 13 +++++++++++-- .../auth/DseGssApiAuthProviderAlternateIT.java | 13 +++++++++++-- .../api/core/auth/DseGssApiAuthProviderIT.java | 13 +++++++++++-- .../api/core/auth/DsePlainTextAuthProviderIT.java | 13 +++++++++++-- .../api/core/auth/DseProxyAuthenticationIT.java | 13 +++++++++++-- .../dse/driver/api/core/auth/EmbeddedAds.java | 13 +++++++++++-- .../dse/driver/api/core/auth/EmbeddedAdsRule.java | 13 +++++++++++-- .../dse/driver/api/core/auth/KerberosUtils.java | 13 +++++++++++-- .../core/cql/continuous/ContinuousPagingIT.java | 13 +++++++++++-- .../cql/continuous/ContinuousPagingITBase.java | 13 +++++++++++-- .../reactive/ContinuousPagingReactiveIT.java | 13 +++++++++++-- .../cql/reactive/DefaultReactiveResultSetIT.java | 13 +++++++++++-- .../driver/api/core/data/geometry/GeometryIT.java | 13 +++++++++++-- .../api/core/data/geometry/LineStringIT.java | 13 +++++++++++-- .../dse/driver/api/core/data/geometry/PointIT.java | 13 +++++++++++-- .../driver/api/core/data/geometry/PolygonIT.java | 13 +++++++++++-- .../dse/driver/api/core/data/time/DateRangeIT.java | 13 +++++++++++-- .../api/core/graph/GraphAuthenticationIT.java | 13 +++++++++++-- .../driver/api/core/graph/GraphDataTypeITBase.java | 13 +++++++++++-- .../api/core/graph/GraphGeoSearchIndexIT.java | 13 +++++++++++-- .../api/core/graph/GraphTextSearchIndexIT.java | 13 +++++++++++-- .../dse/driver/api/core/graph/GraphTimeoutsIT.java | 13 +++++++++++-- .../driver/api/core/graph/SampleGraphScripts.java | 13 +++++++++++-- .../driver/api/core/graph/SocialTraversalDsl.java | 13 +++++++++++-- .../api/core/graph/SocialTraversalSourceDsl.java | 13 +++++++++++-- .../driver/api/core/graph/TinkerEdgeAssert.java | 13 +++++++++++-- .../driver/api/core/graph/TinkerElementAssert.java | 13 +++++++++++-- .../api/core/graph/TinkerGraphAssertions.java | 13 +++++++++++-- .../driver/api/core/graph/TinkerPathAssert.java | 13 +++++++++++-- .../driver/api/core/graph/TinkerTreeAssert.java | 13 +++++++++++-- .../driver/api/core/graph/TinkerVertexAssert.java | 13 +++++++++++-- .../api/core/graph/TinkerVertexPropertyAssert.java | 13 +++++++++++-- .../core/graph/remote/GraphDataTypeRemoteIT.java | 13 +++++++++++-- .../GraphTraversalMetaPropertiesRemoteIT.java | 13 +++++++++++-- .../GraphTraversalMultiPropertiesRemoteIT.java | 13 +++++++++++-- .../core/graph/remote/GraphTraversalRemoteIT.java | 13 +++++++++++-- .../graph/statement/GraphDataTypeFluentIT.java | 13 +++++++++++-- .../graph/statement/GraphDataTypeScriptIT.java | 13 +++++++++++-- .../graph/statement/GraphTraversalBatchIT.java | 13 +++++++++++-- .../api/core/graph/statement/GraphTraversalIT.java | 13 +++++++++++-- .../statement/GraphTraversalMetaPropertiesIT.java | 13 +++++++++++-- .../statement/GraphTraversalMultiPropertiesIT.java | 13 +++++++++++-- .../driver/api/core/insights/InsightsClientIT.java | 13 +++++++++++-- .../dse/driver/api/core/metadata/MetadataIT.java | 13 +++++++++++-- .../core/metadata/schema/AbstractMetadataIT.java | 13 +++++++++++-- .../metadata/schema/DseAggregateMetadataIT.java | 13 +++++++++++-- .../metadata/schema/DseFunctionMetadataIT.java | 13 +++++++++++-- .../testinfra/DseSessionBuilderInstantiator.java | 13 +++++++++++-- .../api/testinfra/session/DseSessionRule.java | 13 +++++++++++-- .../testinfra/session/DseSessionRuleBuilder.java | 13 +++++++++++-- .../osgi/DseOsgiCustomLoadBalancingPolicyIT.java | 13 +++++++++++-- .../dse/driver/osgi/DseOsgiGeoTypesIT.java | 13 +++++++++++-- .../datastax/dse/driver/osgi/DseOsgiGraphIT.java | 13 +++++++++++-- .../com/datastax/dse/driver/osgi/DseOsgiLz4IT.java | 13 +++++++++++-- .../dse/driver/osgi/DseOsgiReactiveIT.java | 13 +++++++++++-- .../datastax/dse/driver/osgi/DseOsgiShadedIT.java | 13 +++++++++++-- .../datastax/dse/driver/osgi/DseOsgiSnappyIT.java | 13 +++++++++++-- .../datastax/dse/driver/osgi/DseOsgiVanillaIT.java | 13 +++++++++++-- .../dse/driver/osgi/support/DseBundleOptions.java | 13 +++++++++++-- .../driver/osgi/support/DseOsgiGeoTypesTests.java | 13 +++++++++++-- .../dse/driver/osgi/support/DseOsgiGraphTests.java | 13 +++++++++++-- .../driver/osgi/support/DseOsgiReactiveTests.java | 13 +++++++++++-- .../driver/osgi/support/DseOsgiSimpleTests.java | 13 +++++++++++-- .../src/test/resources/logback-test.xml | 13 +++++++++++-- .../driver/api/querybuilder/DseQueryBuilder.java | 13 +++++++++++-- .../driver/api/querybuilder/DseSchemaBuilder.java | 13 +++++++++++-- .../dse/driver/api/querybuilder/package-info.java | 14 +++++++++++--- .../querybuilder/schema/CreateDseAggregateEnd.java | 13 +++++++++++-- .../schema/CreateDseAggregateStart.java | 13 +++++++++++-- .../schema/CreateDseAggregateStateFunc.java | 13 +++++++++++-- .../querybuilder/schema/CreateDseFunctionEnd.java | 13 +++++++++++-- .../schema/CreateDseFunctionStart.java | 13 +++++++++++-- .../schema/CreateDseFunctionWithLanguage.java | 13 +++++++++++-- .../schema/CreateDseFunctionWithNullOption.java | 13 +++++++++++-- .../schema/CreateDseFunctionWithType.java | 13 +++++++++++-- .../api/querybuilder/schema/package-info.java | 14 +++++++++++--- .../schema/DefaultCreateDseAggregate.java | 13 +++++++++++-- .../schema/DefaultCreateDseFunction.java | 13 +++++++++++-- .../internal/querybuilder/schema/package-info.java | 14 +++++++++++--- .../schema/CreateDseAggregateTest.java | 13 +++++++++++-- .../querybuilder/schema/CreateDseFunctionTest.java | 13 +++++++++++-- 315 files changed, 3465 insertions(+), 635 deletions(-) diff --git a/core/src/main/java/com/datastax/dse/driver/DseSessionMetric.java b/core/src/main/java/com/datastax/dse/driver/DseSessionMetric.java index 3559f9c6690..8b9e9f2126b 100644 --- a/core/src/main/java/com/datastax/dse/driver/DseSessionMetric.java +++ b/core/src/main/java/com/datastax/dse/driver/DseSessionMetric.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/DseProtocolVersion.java b/core/src/main/java/com/datastax/dse/driver/api/core/DseProtocolVersion.java index 6313f87917d..3d2a016f899 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/DseProtocolVersion.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/DseProtocolVersion.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/DseSession.java b/core/src/main/java/com/datastax/dse/driver/api/core/DseSession.java index 2226c97253e..14946aae384 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/DseSession.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/DseSession.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/DseSessionBuilder.java b/core/src/main/java/com/datastax/dse/driver/api/core/DseSessionBuilder.java index 088b440c362..0501604de52 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/DseSessionBuilder.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/DseSessionBuilder.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/DseSessionBuilderBase.java b/core/src/main/java/com/datastax/dse/driver/api/core/DseSessionBuilderBase.java index 61c079a1a57..121a8e64ba5 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/DseSessionBuilderBase.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/DseSessionBuilderBase.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/auth/BaseDseAuthenticator.java b/core/src/main/java/com/datastax/dse/driver/api/core/auth/BaseDseAuthenticator.java index 482579895b0..dd595f20ee0 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/auth/BaseDseAuthenticator.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/auth/BaseDseAuthenticator.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.auth; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderBase.java b/core/src/main/java/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderBase.java index 71c5f187727..09621279102 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderBase.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderBase.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.auth; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/auth/DsePlainTextAuthProviderBase.java b/core/src/main/java/com/datastax/dse/driver/api/core/auth/DsePlainTextAuthProviderBase.java index 4c9ff5343f5..4cb45dfb66a 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/auth/DsePlainTextAuthProviderBase.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/auth/DsePlainTextAuthProviderBase.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.auth; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/auth/ProxyAuthentication.java b/core/src/main/java/com/datastax/dse/driver/api/core/auth/ProxyAuthentication.java index f1d41016e35..a3756277e2a 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/auth/ProxyAuthentication.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/auth/ProxyAuthentication.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.auth; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/config/DseDriverConfigLoader.java b/core/src/main/java/com/datastax/dse/driver/api/core/config/DseDriverConfigLoader.java index 4b5e791b584..1d9ba9a88f9 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/config/DseDriverConfigLoader.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/config/DseDriverConfigLoader.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.config; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/config/DseDriverOption.java b/core/src/main/java/com/datastax/dse/driver/api/core/config/DseDriverOption.java index a404ec3b2f2..b8b46e1f699 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/config/DseDriverOption.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/config/DseDriverOption.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.config; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousAsyncResultSet.java b/core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousAsyncResultSet.java index 0de14867950..a9588be16a9 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousAsyncResultSet.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousAsyncResultSet.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.cql.continuous; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousResultSet.java b/core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousResultSet.java index f6b8c768a4b..d4473144c48 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousResultSet.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousResultSet.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.cql.continuous; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousSession.java b/core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousSession.java index 32863915819..dfcabeb8a4f 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousSession.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousSession.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.cql.continuous; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousReactiveResultSet.java b/core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousReactiveResultSet.java index 233a02fc455..9eb68cf0399 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousReactiveResultSet.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousReactiveResultSet.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.cql.continuous.reactive; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousReactiveSession.java b/core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousReactiveSession.java index ff12e69a45e..4718cef627a 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousReactiveSession.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousReactiveSession.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.cql.continuous.reactive; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/cql/reactive/ReactiveQueryMetadata.java b/core/src/main/java/com/datastax/dse/driver/api/core/cql/reactive/ReactiveQueryMetadata.java index 40147cd1ab3..5cd3445d6e3 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/cql/reactive/ReactiveQueryMetadata.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/cql/reactive/ReactiveQueryMetadata.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.cql.reactive; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.java b/core/src/main/java/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.java index 4fde5ba0293..981cf309d4a 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.cql.reactive; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.java b/core/src/main/java/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.java index 24692911c38..ff28ef2f575 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.cql.reactive; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.java b/core/src/main/java/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.java index ddad7c89582..fa0644c13c6 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.cql.reactive; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/data/geometry/Geometry.java b/core/src/main/java/com/datastax/dse/driver/api/core/data/geometry/Geometry.java index 536ee3faf44..2244b9cd758 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/data/geometry/Geometry.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/data/geometry/Geometry.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.data.geometry; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/data/geometry/LineString.java b/core/src/main/java/com/datastax/dse/driver/api/core/data/geometry/LineString.java index 2a766d08b81..84370ddafcd 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/data/geometry/LineString.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/data/geometry/LineString.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.data.geometry; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/data/geometry/Point.java b/core/src/main/java/com/datastax/dse/driver/api/core/data/geometry/Point.java index 4e02b1aaf5a..338de70625d 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/data/geometry/Point.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/data/geometry/Point.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.data.geometry; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/data/geometry/Polygon.java b/core/src/main/java/com/datastax/dse/driver/api/core/data/geometry/Polygon.java index de8e52bf04b..6f66f47042e 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/data/geometry/Polygon.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/data/geometry/Polygon.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.data.geometry; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/data/time/DateRange.java b/core/src/main/java/com/datastax/dse/driver/api/core/data/time/DateRange.java index 0f4dc42a6ba..fd17047b695 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/data/time/DateRange.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/data/time/DateRange.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.data.time; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/data/time/DateRangeBound.java b/core/src/main/java/com/datastax/dse/driver/api/core/data/time/DateRangeBound.java index 0ce2f104330..5d7427110cb 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/data/time/DateRangeBound.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/data/time/DateRangeBound.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.data.time; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/data/time/DateRangePrecision.java b/core/src/main/java/com/datastax/dse/driver/api/core/data/time/DateRangePrecision.java index 3ab93a78bba..db133a6846b 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/data/time/DateRangePrecision.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/data/time/DateRangePrecision.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.data.time; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/graph/AsyncGraphResultSet.java b/core/src/main/java/com/datastax/dse/driver/api/core/graph/AsyncGraphResultSet.java index 2aa661de1df..06c5301dd98 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/graph/AsyncGraphResultSet.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/graph/AsyncGraphResultSet.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.java b/core/src/main/java/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.java index 9125562c1f1..1757212aa71 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/graph/BatchGraphStatementBuilder.java b/core/src/main/java/com/datastax/dse/driver/api/core/graph/BatchGraphStatementBuilder.java index 246d80db2e7..d83c9e89141 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/graph/BatchGraphStatementBuilder.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/graph/BatchGraphStatementBuilder.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/graph/DseGraph.java b/core/src/main/java/com/datastax/dse/driver/api/core/graph/DseGraph.java index 206121c85fb..cb96526d0f1 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/graph/DseGraph.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/graph/DseGraph.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/graph/DseGraphRemoteConnectionBuilder.java b/core/src/main/java/com/datastax/dse/driver/api/core/graph/DseGraphRemoteConnectionBuilder.java index 112663e1bb8..fdc483325a7 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/graph/DseGraphRemoteConnectionBuilder.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/graph/DseGraphRemoteConnectionBuilder.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.java b/core/src/main/java/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.java index b9f8aaea362..3c480ff826c 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/graph/FluentGraphStatementBuilder.java b/core/src/main/java/com/datastax/dse/driver/api/core/graph/FluentGraphStatementBuilder.java index a0220607adc..82d7f0132d9 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/graph/FluentGraphStatementBuilder.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/graph/FluentGraphStatementBuilder.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphExecutionInfo.java b/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphExecutionInfo.java index 21483be54f9..483ed0be782 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphExecutionInfo.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphExecutionInfo.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphNode.java b/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphNode.java index ceec6cc31b3..faa3f220259 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphNode.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphNode.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphResultSet.java b/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphResultSet.java index 1c809725cda..f237f00ce16 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphResultSet.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphResultSet.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphSession.java b/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphSession.java index 201ae095b63..5f30f4a9f88 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphSession.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphSession.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphStatement.java b/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphStatement.java index 87f1d5f4664..a45b165ecd3 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphStatement.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphStatement.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphStatementBuilderBase.java b/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphStatementBuilderBase.java index 1bcdcf7710f..92be32e37d5 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphStatementBuilderBase.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphStatementBuilderBase.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.java b/core/src/main/java/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.java index f85ddf1b0cd..7731b21859b 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/graph/ScriptGraphStatementBuilder.java b/core/src/main/java/com/datastax/dse/driver/api/core/graph/ScriptGraphStatementBuilder.java index 33a5c8efc9e..24491b942b0 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/graph/ScriptGraphStatementBuilder.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/graph/ScriptGraphStatementBuilder.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/graph/predicates/Geo.java b/core/src/main/java/com/datastax/dse/driver/api/core/graph/predicates/Geo.java index 0aa1da6df75..eb80aa7d14f 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/graph/predicates/Geo.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/graph/predicates/Geo.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph.predicates; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/graph/predicates/Search.java b/core/src/main/java/com/datastax/dse/driver/api/core/graph/predicates/Search.java index 5b2872ca505..5e5a109f874 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/graph/predicates/Search.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/graph/predicates/Search.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph.predicates; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/metadata/DseNodeProperties.java b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/DseNodeProperties.java index aa44807929a..11dde63c235 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/metadata/DseNodeProperties.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/DseNodeProperties.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.metadata; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseAggregateMetadata.java b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseAggregateMetadata.java index d9fb7a799ea..02356fb6960 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseAggregateMetadata.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseAggregateMetadata.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.metadata.schema; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseColumnMetadata.java b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseColumnMetadata.java index 33a0dd87c3e..0b6ed9ed5d6 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseColumnMetadata.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseColumnMetadata.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.metadata.schema; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseFunctionMetadata.java b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseFunctionMetadata.java index 8393e421b1d..a1964b3a1b5 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseFunctionMetadata.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseFunctionMetadata.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.metadata.schema; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseIndexMetadata.java b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseIndexMetadata.java index 469cf3babe7..7bda2fae326 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseIndexMetadata.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseIndexMetadata.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.metadata.schema; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseKeyspaceMetadata.java b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseKeyspaceMetadata.java index 1460de0ba06..440c7649818 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseKeyspaceMetadata.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseKeyspaceMetadata.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.metadata.schema; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseRelationMetadata.java b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseRelationMetadata.java index 4dc8bb1fc50..97ce6df7386 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseRelationMetadata.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseRelationMetadata.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.metadata.schema; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseTableMetadata.java b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseTableMetadata.java index 4aa8ab9690a..31d2201bea8 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseTableMetadata.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseTableMetadata.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.metadata.schema; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseViewMetadata.java b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseViewMetadata.java index ff4bc7b7d98..3af7594f606 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseViewMetadata.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseViewMetadata.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.metadata.schema; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/servererrors/UnfitClientException.java b/core/src/main/java/com/datastax/dse/driver/api/core/servererrors/UnfitClientException.java index bdd121eebec..dee5d45061f 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/servererrors/UnfitClientException.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/servererrors/UnfitClientException.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.servererrors; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/session/DseProgrammaticArguments.java b/core/src/main/java/com/datastax/dse/driver/api/core/session/DseProgrammaticArguments.java index bbcb9882a11..7083cdcc6e4 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/session/DseProgrammaticArguments.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/session/DseProgrammaticArguments.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.session; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/type/DseDataTypes.java b/core/src/main/java/com/datastax/dse/driver/api/core/type/DseDataTypes.java index b2191023c66..d2c913bacd4 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/type/DseDataTypes.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/type/DseDataTypes.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.type; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/type/codec/DseTypeCodecs.java b/core/src/main/java/com/datastax/dse/driver/api/core/type/codec/DseTypeCodecs.java index 8b642783fac..13102b3e94b 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/type/codec/DseTypeCodecs.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/type/codec/DseTypeCodecs.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.type.codec; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/DseProtocolFeature.java b/core/src/main/java/com/datastax/dse/driver/internal/core/DseProtocolFeature.java index 911005eaca6..ce0f13eea41 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/DseProtocolFeature.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/DseProtocolFeature.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/DseProtocolVersionRegistry.java b/core/src/main/java/com/datastax/dse/driver/internal/core/DseProtocolVersionRegistry.java index 7ba50ea9098..3f311262e4c 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/DseProtocolVersionRegistry.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/DseProtocolVersionRegistry.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/InsightsClientLifecycleListener.java b/core/src/main/java/com/datastax/dse/driver/internal/core/InsightsClientLifecycleListener.java index 3a9f2dfb42b..e4ddaf5cea4 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/InsightsClientLifecycleListener.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/InsightsClientLifecycleListener.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/auth/AuthUtils.java b/core/src/main/java/com/datastax/dse/driver/internal/core/auth/AuthUtils.java index f13d3632c91..15efc152837 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/auth/AuthUtils.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/auth/AuthUtils.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.auth; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/auth/DseGssApiAuthProvider.java b/core/src/main/java/com/datastax/dse/driver/internal/core/auth/DseGssApiAuthProvider.java index e0267d0ccd0..7c93f6c015c 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/auth/DseGssApiAuthProvider.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/auth/DseGssApiAuthProvider.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.auth; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/auth/DsePlainTextAuthProvider.java b/core/src/main/java/com/datastax/dse/driver/internal/core/auth/DsePlainTextAuthProvider.java index 5521a519ce0..8e2dfd5b03b 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/auth/DsePlainTextAuthProvider.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/auth/DsePlainTextAuthProvider.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.auth; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/auth/DseProgrammaticPlainTextAuthProvider.java b/core/src/main/java/com/datastax/dse/driver/internal/core/auth/DseProgrammaticPlainTextAuthProvider.java index 9dadcc2311a..cc70951c0b2 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/auth/DseProgrammaticPlainTextAuthProvider.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/auth/DseProgrammaticPlainTextAuthProvider.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.auth; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/config/typesafe/DefaultDseDriverConfigLoader.java b/core/src/main/java/com/datastax/dse/driver/internal/core/config/typesafe/DefaultDseDriverConfigLoader.java index ea7e46670c8..81033d0a6f1 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/config/typesafe/DefaultDseDriverConfigLoader.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/config/typesafe/DefaultDseDriverConfigLoader.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.config.typesafe; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseDriverContext.java b/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseDriverContext.java index ae4a63f8912..30c2d1e625d 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseDriverContext.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseDriverContext.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.context; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseStartupOptionsBuilder.java b/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseStartupOptionsBuilder.java index 54a651634e9..c26dd5bf2ad 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseStartupOptionsBuilder.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseStartupOptionsBuilder.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.context; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/DseConversions.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/DseConversions.java index 0cc06b3be11..32fa823ca2e 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/DseConversions.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/DseConversions.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.cql; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestAsyncProcessor.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestAsyncProcessor.java index ed2959c71dd..eea0b331e73 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestAsyncProcessor.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestAsyncProcessor.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.cql.continuous; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandler.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandler.java index 2e5b2ea5c2d..b266d316b5c 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandler.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandler.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.cql.continuous; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestSyncProcessor.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestSyncProcessor.java index 592afedae1b..1333d50d720 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestSyncProcessor.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestSyncProcessor.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.cql.continuous; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/DefaultContinuousAsyncResultSet.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/DefaultContinuousAsyncResultSet.java index a804ac8dec7..10a5bfda2f2 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/DefaultContinuousAsyncResultSet.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/DefaultContinuousAsyncResultSet.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.cql.continuous; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/DefaultContinuousResultSet.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/DefaultContinuousResultSet.java index b5c5d9a7e30..38fcd746f9e 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/DefaultContinuousResultSet.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/DefaultContinuousResultSet.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.cql.continuous; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/reactive/ContinuousCqlRequestReactiveProcessor.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/reactive/ContinuousCqlRequestReactiveProcessor.java index eb38df3e6b6..6feff64f04a 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/reactive/ContinuousCqlRequestReactiveProcessor.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/reactive/ContinuousCqlRequestReactiveProcessor.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.cql.continuous.reactive; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/reactive/DefaultContinuousReactiveResultSet.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/reactive/DefaultContinuousReactiveResultSet.java index 673cbe1777c..d1131e9bbe8 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/reactive/DefaultContinuousReactiveResultSet.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/reactive/DefaultContinuousReactiveResultSet.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.cql.continuous.reactive; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/CqlRequestReactiveProcessor.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/CqlRequestReactiveProcessor.java index 0e2b03cdbfb..c2b1872e252 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/CqlRequestReactiveProcessor.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/CqlRequestReactiveProcessor.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.cql.reactive; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/DefaultReactiveResultSet.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/DefaultReactiveResultSet.java index 3e50e1ed116..f24d2638f37 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/DefaultReactiveResultSet.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/DefaultReactiveResultSet.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.cql.reactive; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/DefaultReactiveRow.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/DefaultReactiveRow.java index e6e02a9c244..b2708a83633 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/DefaultReactiveRow.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/DefaultReactiveRow.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.cql.reactive; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/EmptySubscription.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/EmptySubscription.java index b48a057d36a..00e543989c7 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/EmptySubscription.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/EmptySubscription.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.cql.reactive; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/FailedPublisher.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/FailedPublisher.java index d069e41d227..fc433a725b3 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/FailedPublisher.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/FailedPublisher.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.cql.reactive; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/FailedReactiveResultSet.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/FailedReactiveResultSet.java index 9274b2f1f25..07712f457fb 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/FailedReactiveResultSet.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/FailedReactiveResultSet.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.cql.reactive; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/ReactiveOperators.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/ReactiveOperators.java index 966004a97d3..7bc8bed4bfb 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/ReactiveOperators.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/ReactiveOperators.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.cql.reactive; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/ReactiveResultSetBase.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/ReactiveResultSetBase.java index 514467d1d8c..2e68392a367 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/ReactiveResultSetBase.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/ReactiveResultSetBase.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.cql.reactive; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/ReactiveResultSetSubscription.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/ReactiveResultSetSubscription.java index fc690661845..160e71296be 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/ReactiveResultSetSubscription.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/ReactiveResultSetSubscription.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.cql.reactive; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/SimpleUnicastProcessor.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/SimpleUnicastProcessor.java index 3504c5c5688..f93b4405ea0 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/SimpleUnicastProcessor.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/SimpleUnicastProcessor.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.cql.reactive; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/DefaultGeometry.java b/core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/DefaultGeometry.java index 18e851f23ea..f4113f5fd62 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/DefaultGeometry.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/DefaultGeometry.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.data.geometry; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/DefaultLineString.java b/core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/DefaultLineString.java index d50db16eadb..b7ceee3d36b 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/DefaultLineString.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/DefaultLineString.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.data.geometry; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/DefaultPoint.java b/core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/DefaultPoint.java index a9d853dc5a8..53fa8dde4ca 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/DefaultPoint.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/DefaultPoint.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.data.geometry; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/DefaultPolygon.java b/core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/DefaultPolygon.java index 244e7a3675f..f3d17e7c720 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/DefaultPolygon.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/DefaultPolygon.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.data.geometry; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/Distance.java b/core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/Distance.java index 99cca96c5b9..d6d235dd914 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/Distance.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/Distance.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.data.geometry; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/DistanceSerializationProxy.java b/core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/DistanceSerializationProxy.java index 43ee6d55ff6..f264393aefd 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/DistanceSerializationProxy.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/DistanceSerializationProxy.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.data.geometry; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/WkbSerializationProxy.java b/core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/WkbSerializationProxy.java index 42e2aaf490b..11ab10366e5 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/WkbSerializationProxy.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/WkbSerializationProxy.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.data.geometry; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/WkbUtil.java b/core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/WkbUtil.java index c932b7eb879..56f86ba1a47 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/WkbUtil.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/data/geometry/WkbUtil.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.data.geometry; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/BytecodeGraphStatement.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/BytecodeGraphStatement.java index 9a6f3c7bd0c..8cad9f6d85c 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/BytecodeGraphStatement.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/BytecodeGraphStatement.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultAsyncGraphResultSet.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultAsyncGraphResultSet.java index 34929de642f..63e7571f18c 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultAsyncGraphResultSet.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultAsyncGraphResultSet.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultBatchGraphStatement.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultBatchGraphStatement.java index 36da830cee0..08acb2815f2 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultBatchGraphStatement.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultBatchGraphStatement.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultDseRemoteConnectionBuilder.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultDseRemoteConnectionBuilder.java index 3d31f8e4140..3d607db5269 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultDseRemoteConnectionBuilder.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultDseRemoteConnectionBuilder.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultFluentGraphStatement.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultFluentGraphStatement.java index d3aba9c9f58..e8e9dd9dd14 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultFluentGraphStatement.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultFluentGraphStatement.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultGraphExecutionInfo.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultGraphExecutionInfo.java index c5e3dc492c9..69ec6cd8de4 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultGraphExecutionInfo.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultGraphExecutionInfo.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultScriptGraphStatement.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultScriptGraphStatement.java index 2ef1b4f45d5..4e704352355 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultScriptGraphStatement.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultScriptGraphStatement.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DseGraphRemoteConnection.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DseGraphRemoteConnection.java index ad66396daae..b9525eb3596 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DseGraphRemoteConnection.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DseGraphRemoteConnection.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DseGraphTraversal.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DseGraphTraversal.java index d4a3643e461..45f9b670b16 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DseGraphTraversal.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DseGraphTraversal.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DsePredicate.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DsePredicate.java index 7d2ba1d65e9..ddc68227388 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DsePredicate.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DsePredicate.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/EditDistance.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/EditDistance.java index 33e5f1d5c52..6570a6f8e8a 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/EditDistance.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/EditDistance.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GeoPredicate.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GeoPredicate.java index c72b4a28d6e..ce192ac0330 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GeoPredicate.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GeoPredicate.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GeoUtils.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GeoUtils.java index 04e7b6d0568..b2ee84b63a7 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GeoUtils.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GeoUtils.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphConversions.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphConversions.java index 13a83732ca0..91facbf7469 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphConversions.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphConversions.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestAsyncProcessor.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestAsyncProcessor.java index 96992f1abf5..05768b15d72 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestAsyncProcessor.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestAsyncProcessor.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java index 14e91877a17..23f1b41b869 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestSyncProcessor.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestSyncProcessor.java index efd211a8edc..196baa1a42b 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestSyncProcessor.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestSyncProcessor.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphResultSets.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphResultSets.java index 04e42d94bec..63cdf327f35 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphResultSets.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphResultSets.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSON1SerdeTP.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSON1SerdeTP.java index 77cb3ac4640..5377a4663e7 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSON1SerdeTP.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSON1SerdeTP.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSON2SerdeTP.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSON2SerdeTP.java index 9b385e47622..c394ba219e9 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSON2SerdeTP.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSON2SerdeTP.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSON3SerdeTP.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSON3SerdeTP.java index 9c29dd31eeb..7d44ca195c9 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSON3SerdeTP.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSON3SerdeTP.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSONUtils.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSONUtils.java index 12b74016b0d..6615760656b 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSONUtils.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSONUtils.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphStatementBase.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphStatementBase.java index 645fa8f2d20..624fec901ad 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphStatementBase.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphStatementBase.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/LegacyGraphNode.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/LegacyGraphNode.java index f5f61d9072f..3dc8360662b 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/LegacyGraphNode.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/LegacyGraphNode.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ObjectGraphNode.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ObjectGraphNode.java index 92e1fc17f0c..320031feb8f 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ObjectGraphNode.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ObjectGraphNode.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/SearchPredicate.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/SearchPredicate.java index 304a1e09a2a..a6991021a70 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/SearchPredicate.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/SearchPredicate.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/SearchUtils.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/SearchUtils.java index 36db6be0db3..1e64c5f2c7c 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/SearchUtils.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/SearchUtils.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/SinglePageGraphResultSet.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/SinglePageGraphResultSet.java index 2d5599e5351..3f0f81a11bd 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/SinglePageGraphResultSet.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/SinglePageGraphResultSet.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/AddressFormatter.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/AddressFormatter.java index abeb30ce85f..9f1df8c7ddf 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/AddressFormatter.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/AddressFormatter.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.insights; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/ConfigAntiPatternsFinder.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/ConfigAntiPatternsFinder.java index 034b87dd1c6..c1e4b5a21e3 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/ConfigAntiPatternsFinder.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/ConfigAntiPatternsFinder.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.insights; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/DataCentersFinder.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/DataCentersFinder.java index 0ba645817a5..ac63338a1cb 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/DataCentersFinder.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/DataCentersFinder.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.insights; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/ExecutionProfilesInfoFinder.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/ExecutionProfilesInfoFinder.java index a28fea6cb78..b72c3e6f420 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/ExecutionProfilesInfoFinder.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/ExecutionProfilesInfoFinder.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.insights; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/InsightsClient.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/InsightsClient.java index 96c606eb869..b1be52cc78d 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/InsightsClient.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/InsightsClient.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.insights; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/InsightsSupportVerifier.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/InsightsSupportVerifier.java index dd452672c23..8d6b83088e8 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/InsightsSupportVerifier.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/InsightsSupportVerifier.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.insights; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/PackageUtil.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/PackageUtil.java index b1d3c10505b..74a6cc82ea5 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/PackageUtil.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/PackageUtil.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.insights; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/PlatformInfoFinder.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/PlatformInfoFinder.java index 5dd2f9072a6..c8929933fc9 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/PlatformInfoFinder.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/PlatformInfoFinder.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.insights; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/ReconnectionPolicyInfoFinder.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/ReconnectionPolicyInfoFinder.java index 11491ce1f41..a6dab75e70d 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/ReconnectionPolicyInfoFinder.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/ReconnectionPolicyInfoFinder.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.insights; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/configuration/InsightsConfiguration.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/configuration/InsightsConfiguration.java index d130510e09d..900a64a0af3 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/configuration/InsightsConfiguration.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/configuration/InsightsConfiguration.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.insights.configuration; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/exceptions/InsightEventFormatException.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/exceptions/InsightEventFormatException.java index bfb6a28b441..539734c0427 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/exceptions/InsightEventFormatException.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/exceptions/InsightEventFormatException.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.insights.exceptions; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/AuthProviderType.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/AuthProviderType.java index 93fc2f70ca8..df58f45c2fd 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/AuthProviderType.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/AuthProviderType.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.insights.schema; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/Insight.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/Insight.java index b4d79e16e2b..19759006178 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/Insight.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/Insight.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.insights.schema; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/InsightMetadata.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/InsightMetadata.java index ca02eee3ef5..7b588270645 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/InsightMetadata.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/InsightMetadata.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.insights.schema; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/InsightType.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/InsightType.java index a6ec490491f..7b8f3213c06 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/InsightType.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/InsightType.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.insights.schema; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/InsightsPlatformInfo.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/InsightsPlatformInfo.java index ab217796fed..bc24f8dfb42 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/InsightsPlatformInfo.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/InsightsPlatformInfo.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.insights.schema; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/InsightsStartupData.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/InsightsStartupData.java index e9b8d72a57d..e57eb6f196a 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/InsightsStartupData.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/InsightsStartupData.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.insights.schema; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/InsightsStatusData.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/InsightsStatusData.java index 789baea6a3d..2ef967c4e85 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/InsightsStatusData.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/InsightsStatusData.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.insights.schema; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/LoadBalancingInfo.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/LoadBalancingInfo.java index ec05f3094e5..338bfed1dac 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/LoadBalancingInfo.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/LoadBalancingInfo.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.insights.schema; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/PoolSizeByHostDistance.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/PoolSizeByHostDistance.java index ed99ad17b53..060e955d15b 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/PoolSizeByHostDistance.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/PoolSizeByHostDistance.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.insights.schema; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/ReconnectionPolicyInfo.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/ReconnectionPolicyInfo.java index 428f88ac6b4..1a67c4b0633 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/ReconnectionPolicyInfo.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/ReconnectionPolicyInfo.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.insights.schema; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/SSL.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/SSL.java index 6bca417f138..96f0916a810 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/SSL.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/SSL.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.insights.schema; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/SessionStateForNode.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/SessionStateForNode.java index 5fe7f82e7a6..78e5b21dd87 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/SessionStateForNode.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/SessionStateForNode.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.insights.schema; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/SpecificExecutionProfile.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/SpecificExecutionProfile.java index a911f1016ca..ed7b7a14096 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/SpecificExecutionProfile.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/SpecificExecutionProfile.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.insights.schema; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/SpeculativeExecutionInfo.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/SpeculativeExecutionInfo.java index 39a4643d041..e2e30e6b982 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/SpeculativeExecutionInfo.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/schema/SpeculativeExecutionInfo.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.insights.schema; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicy.java b/core/src/main/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicy.java index 5f6edbc693c..189f38f6fc1 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicy.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicy.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.loadbalancing; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/DseTopologyMonitor.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/DseTopologyMonitor.java index 38f7f8046e9..c8d9e1fc5de 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/DseTopologyMonitor.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/DseTopologyMonitor.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.metadata; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseAggregateMetadata.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseAggregateMetadata.java index 884f2c10089..6902b2873e1 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseAggregateMetadata.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseAggregateMetadata.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.metadata.schema; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseColumnMetadata.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseColumnMetadata.java index 28e9357f376..01c76f2292b 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseColumnMetadata.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseColumnMetadata.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.metadata.schema; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseFunctionMetadata.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseFunctionMetadata.java index d215810f291..d741bf5935d 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseFunctionMetadata.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseFunctionMetadata.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.metadata.schema; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseIndexMetadata.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseIndexMetadata.java index 27b320cb0d4..3eeb1e14755 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseIndexMetadata.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseIndexMetadata.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.metadata.schema; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseKeyspaceMetadata.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseKeyspaceMetadata.java index af8c6e2c13d..50464c568a0 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseKeyspaceMetadata.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseKeyspaceMetadata.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.metadata.schema; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseTableMetadata.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseTableMetadata.java index 7149bb2d77c..91c3e6e7723 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseTableMetadata.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseTableMetadata.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.metadata.schema; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseViewMetadata.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseViewMetadata.java index cc0de9a9b43..31224bb66d3 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseViewMetadata.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseViewMetadata.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.metadata.schema; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseAggregateParser.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseAggregateParser.java index 1544debf604..2bef719b1cf 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseAggregateParser.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseAggregateParser.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.metadata.schema.parsing; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseFunctionParser.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseFunctionParser.java index 77e3c507da1..53e8f10fdb5 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseFunctionParser.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseFunctionParser.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.metadata.schema.parsing; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseSchemaParser.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseSchemaParser.java index ab134904a56..d87cc5e54de 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseSchemaParser.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseSchemaParser.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.metadata.schema.parsing; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseSchemaParserFactory.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseSchemaParserFactory.java index 5497c21c8a3..1428b8f5873 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseSchemaParserFactory.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseSchemaParserFactory.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.metadata.schema.parsing; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseTableParser.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseTableParser.java index 87fb97aaa4f..b803750f44f 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseTableParser.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseTableParser.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.metadata.schema.parsing; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseViewParser.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseViewParser.java index eb528561fb2..af3c0a246c7 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseViewParser.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseViewParser.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.metadata.schema.parsing; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/queries/DseSchemaQueriesFactory.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/queries/DseSchemaQueriesFactory.java index 8f83173502d..5d8d6d54dbe 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/queries/DseSchemaQueriesFactory.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/queries/DseSchemaQueriesFactory.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.metadata.schema.queries; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/token/DseReplicationStrategyFactory.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/token/DseReplicationStrategyFactory.java index 1b3eb0781ff..5027ad9d7de 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/token/DseReplicationStrategyFactory.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/token/DseReplicationStrategyFactory.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.metadata.token; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/token/EverywhereStrategy.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/token/EverywhereStrategy.java index 4242e4c718d..237ec7922f1 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/token/EverywhereStrategy.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/token/EverywhereStrategy.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.metadata.token; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metrics/DseDropwizardMetricsFactory.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metrics/DseDropwizardMetricsFactory.java index 147a8eb5966..ea6f5219ad8 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/metrics/DseDropwizardMetricsFactory.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metrics/DseDropwizardMetricsFactory.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.metrics; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metrics/DseDropwizardSessionMetricUpdater.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metrics/DseDropwizardSessionMetricUpdater.java index fa8f6bcff56..5d957578ffd 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/metrics/DseDropwizardSessionMetricUpdater.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metrics/DseDropwizardSessionMetricUpdater.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.metrics; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/search/DateRangeUtil.java b/core/src/main/java/com/datastax/dse/driver/internal/core/search/DateRangeUtil.java index 601006d91a8..96d568d548b 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/search/DateRangeUtil.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/search/DateRangeUtil.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.search; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/session/DefaultDseSession.java b/core/src/main/java/com/datastax/dse/driver/internal/core/session/DefaultDseSession.java index 0e73c41069a..2ec1ca0ad6e 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/session/DefaultDseSession.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/session/DefaultDseSession.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.session; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/tracker/MultiplexingRequestTracker.java b/core/src/main/java/com/datastax/dse/driver/internal/core/tracker/MultiplexingRequestTracker.java index d93b53f71c9..80c36451a68 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/tracker/MultiplexingRequestTracker.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/tracker/MultiplexingRequestTracker.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.tracker; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/type/codec/geometry/GeometryCodec.java b/core/src/main/java/com/datastax/dse/driver/internal/core/type/codec/geometry/GeometryCodec.java index 13f54743fed..819fd9d108b 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/type/codec/geometry/GeometryCodec.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/type/codec/geometry/GeometryCodec.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.type.codec.geometry; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/type/codec/geometry/LineStringCodec.java b/core/src/main/java/com/datastax/dse/driver/internal/core/type/codec/geometry/LineStringCodec.java index 154120e921a..d04d8459d12 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/type/codec/geometry/LineStringCodec.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/type/codec/geometry/LineStringCodec.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.type.codec.geometry; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/type/codec/geometry/PointCodec.java b/core/src/main/java/com/datastax/dse/driver/internal/core/type/codec/geometry/PointCodec.java index b9327d1cfe2..1ccc7b0adc1 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/type/codec/geometry/PointCodec.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/type/codec/geometry/PointCodec.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.type.codec.geometry; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/type/codec/geometry/PolygonCodec.java b/core/src/main/java/com/datastax/dse/driver/internal/core/type/codec/geometry/PolygonCodec.java index 6074fbb0b0a..823aaad7a4d 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/type/codec/geometry/PolygonCodec.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/type/codec/geometry/PolygonCodec.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.type.codec.geometry; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/type/codec/time/DateRangeCodec.java b/core/src/main/java/com/datastax/dse/driver/internal/core/type/codec/time/DateRangeCodec.java index 133faec9ff4..094904edbc4 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/type/codec/time/DateRangeCodec.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/type/codec/time/DateRangeCodec.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.type.codec.time; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/util/concurrent/BoundedConcurrentQueue.java b/core/src/main/java/com/datastax/dse/driver/internal/core/util/concurrent/BoundedConcurrentQueue.java index 036c0b16cc9..edb83d5f688 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/util/concurrent/BoundedConcurrentQueue.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/util/concurrent/BoundedConcurrentQueue.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.util.concurrent; diff --git a/core/src/main/resources/com/datastax/dse/driver/Driver.properties b/core/src/main/resources/com/datastax/dse/driver/Driver.properties index d5c744b4eee..23651bc0d4c 100644 --- a/core/src/main/resources/com/datastax/dse/driver/Driver.properties +++ b/core/src/main/resources/com/datastax/dse/driver/Driver.properties @@ -1,8 +1,17 @@ # # Copyright DataStax, Inc. # -# This software can be used solely with DataStax Enterprise. Please consult the license at -# http://www.datastax.com/terms/datastax-dse-driver-license-terms +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. # driver.groupId=${project.groupId} diff --git a/core/src/test/java/com/datastax/dse/driver/DriverRunListener.java b/core/src/test/java/com/datastax/dse/driver/DriverRunListener.java index 474e277d83a..ad9f7a42c7a 100644 --- a/core/src/test/java/com/datastax/dse/driver/DriverRunListener.java +++ b/core/src/test/java/com/datastax/dse/driver/DriverRunListener.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver; diff --git a/core/src/test/java/com/datastax/dse/driver/DseTestDataProviders.java b/core/src/test/java/com/datastax/dse/driver/DseTestDataProviders.java index 37af095d5f7..3cb82defa1c 100644 --- a/core/src/test/java/com/datastax/dse/driver/DseTestDataProviders.java +++ b/core/src/test/java/com/datastax/dse/driver/DseTestDataProviders.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver; diff --git a/core/src/test/java/com/datastax/dse/driver/DseTestFixtures.java b/core/src/test/java/com/datastax/dse/driver/DseTestFixtures.java index 558082d9e21..38893df3739 100644 --- a/core/src/test/java/com/datastax/dse/driver/DseTestFixtures.java +++ b/core/src/test/java/com/datastax/dse/driver/DseTestFixtures.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver; diff --git a/core/src/test/java/com/datastax/dse/driver/api/core/config/DseDriverConfigLoaderTest.java b/core/src/test/java/com/datastax/dse/driver/api/core/config/DseDriverConfigLoaderTest.java index fc2d9942a4b..0a47fbc2cc1 100644 --- a/core/src/test/java/com/datastax/dse/driver/api/core/config/DseDriverConfigLoaderTest.java +++ b/core/src/test/java/com/datastax/dse/driver/api/core/config/DseDriverConfigLoaderTest.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.config; diff --git a/core/src/test/java/com/datastax/dse/driver/api/core/data/time/DateRangePrecisionTest.java b/core/src/test/java/com/datastax/dse/driver/api/core/data/time/DateRangePrecisionTest.java index 264dd4e4d68..07b8e468585 100644 --- a/core/src/test/java/com/datastax/dse/driver/api/core/data/time/DateRangePrecisionTest.java +++ b/core/src/test/java/com/datastax/dse/driver/api/core/data/time/DateRangePrecisionTest.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.data.time; diff --git a/core/src/test/java/com/datastax/dse/driver/api/core/data/time/DateRangeTest.java b/core/src/test/java/com/datastax/dse/driver/api/core/data/time/DateRangeTest.java index 74a7f5101c4..e4e071a38f7 100644 --- a/core/src/test/java/com/datastax/dse/driver/api/core/data/time/DateRangeTest.java +++ b/core/src/test/java/com/datastax/dse/driver/api/core/data/time/DateRangeTest.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.data.time; diff --git a/core/src/test/java/com/datastax/dse/driver/api/core/graph/predicates/GeoTest.java b/core/src/test/java/com/datastax/dse/driver/api/core/graph/predicates/GeoTest.java index 431e8920d88..2fa006f7082 100644 --- a/core/src/test/java/com/datastax/dse/driver/api/core/graph/predicates/GeoTest.java +++ b/core/src/test/java/com/datastax/dse/driver/api/core/graph/predicates/GeoTest.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph.predicates; diff --git a/core/src/test/java/com/datastax/dse/driver/api/core/graph/predicates/SearchTest.java b/core/src/test/java/com/datastax/dse/driver/api/core/graph/predicates/SearchTest.java index 3c8c4ee5a3f..8144bedb236 100644 --- a/core/src/test/java/com/datastax/dse/driver/api/core/graph/predicates/SearchTest.java +++ b/core/src/test/java/com/datastax/dse/driver/api/core/graph/predicates/SearchTest.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph.predicates; diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/DseProtocolVersionRegistryTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/DseProtocolVersionRegistryTest.java index bac353fcdd7..4d19f1903aa 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/DseProtocolVersionRegistryTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/DseProtocolVersionRegistryTest.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core; diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/context/DseStartupOptionsBuilderTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/context/DseStartupOptionsBuilderTest.java index 36bd546a3ad..46c01fc2502 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/context/DseStartupOptionsBuilderTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/context/DseStartupOptionsBuilderTest.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.context; diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerNodeTargetingTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerNodeTargetingTest.java index aacccb26ed7..3d560d964b0 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerNodeTargetingTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerNodeTargetingTest.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.cql.continuous; diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerReprepareTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerReprepareTest.java index 754d9decded..989665a5efe 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerReprepareTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerReprepareTest.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.cql.continuous; diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerRetryTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerRetryTest.java index 41e6ed93e1a..27aabce3e30 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerRetryTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerRetryTest.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.cql.continuous; diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerTest.java index 751b0316097..fca7af05da1 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerTest.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.cql.continuous; diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerTestBase.java b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerTestBase.java index 2679c7567ab..2336d247521 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerTestBase.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerTestBase.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.cql.continuous; diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/DefaultContinuousAsyncResultSetTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/DefaultContinuousAsyncResultSetTest.java index ed2d56e1473..bcc4c1b0fb4 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/DefaultContinuousAsyncResultSetTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/DefaultContinuousAsyncResultSetTest.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.cql.continuous; diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/DefaultContinuousResultSetTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/DefaultContinuousResultSetTest.java index 188cdb21be6..03cb9b58c7a 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/DefaultContinuousResultSetTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/DefaultContinuousResultSetTest.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.cql.continuous; diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/reactive/ContinuousCqlRequestReactiveProcessorTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/reactive/ContinuousCqlRequestReactiveProcessorTest.java index 30f630b284c..ddd956ab84e 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/reactive/ContinuousCqlRequestReactiveProcessorTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/reactive/ContinuousCqlRequestReactiveProcessorTest.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.cql.continuous.reactive; diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/CqlRequestReactiveProcessorTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/CqlRequestReactiveProcessorTest.java index 9555e52a2ed..4655ae04607 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/CqlRequestReactiveProcessorTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/CqlRequestReactiveProcessorTest.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.cql.reactive; diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/DefaultReactiveResultSetTckTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/DefaultReactiveResultSetTckTest.java index fe12243cf0a..d1b97d20901 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/DefaultReactiveResultSetTckTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/DefaultReactiveResultSetTckTest.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.cql.reactive; diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/MockAsyncResultSet.java b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/MockAsyncResultSet.java index ea8ad0eb938..0e215f22c78 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/MockAsyncResultSet.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/MockAsyncResultSet.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.cql.reactive; diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/MockRow.java b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/MockRow.java index 00a973d5ba5..6e2e7196d6d 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/MockRow.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/MockRow.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.cql.reactive; diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/ReactiveResultSetSubscriptionTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/ReactiveResultSetSubscriptionTest.java index 0f5bed7b581..9a57f9e03fb 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/ReactiveResultSetSubscriptionTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/ReactiveResultSetSubscriptionTest.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.cql.reactive; diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/SimpleUnicastProcessorTckTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/SimpleUnicastProcessorTckTest.java index b7238812913..b92f98c5b2e 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/SimpleUnicastProcessorTckTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/SimpleUnicastProcessorTckTest.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.cql.reactive; diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/SimpleUnicastProcessorTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/SimpleUnicastProcessorTest.java index 1f44fa5c6cc..211df9aa28c 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/SimpleUnicastProcessorTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/SimpleUnicastProcessorTest.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.cql.reactive; diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/TestSubscriber.java b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/TestSubscriber.java index eaa2d8a5dbf..607bf57aac5 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/TestSubscriber.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/TestSubscriber.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.cql.reactive; diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/data/geometry/DefaultLineStringTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/data/geometry/DefaultLineStringTest.java index d3137071471..b362394a528 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/data/geometry/DefaultLineStringTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/data/geometry/DefaultLineStringTest.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.data.geometry; diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/data/geometry/DefaultPointTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/data/geometry/DefaultPointTest.java index 558d49173d7..526d8de7329 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/data/geometry/DefaultPointTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/data/geometry/DefaultPointTest.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.data.geometry; diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/data/geometry/DefaultPolygonTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/data/geometry/DefaultPolygonTest.java index e015ce5cc33..b5bc53da030 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/data/geometry/DefaultPolygonTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/data/geometry/DefaultPolygonTest.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.data.geometry; diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/data/geometry/DistanceTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/data/geometry/DistanceTest.java index 1279390b491..52b1e21ec6e 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/data/geometry/DistanceTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/data/geometry/DistanceTest.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.data.geometry; diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/data/geometry/SerializationUtils.java b/core/src/test/java/com/datastax/dse/driver/internal/core/data/geometry/SerializationUtils.java index 0bedb9e5821..db751d532bb 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/data/geometry/SerializationUtils.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/data/geometry/SerializationUtils.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.data.geometry; diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphNodeTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphNodeTest.java index 2d34ba5006b..46d3d9499c8 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphNodeTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphNodeTest.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph; diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java index b658903dd79..b95f682a3c6 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph; diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTestHarness.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTestHarness.java index b423041588c..cd1ccba8862 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTestHarness.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTestHarness.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph; diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/insights/AddressFormatterTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/AddressFormatterTest.java index cbaf6f1995d..ec65f4e156d 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/insights/AddressFormatterTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/AddressFormatterTest.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.insights; diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/insights/ConfigAntiPatternsFinderTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/ConfigAntiPatternsFinderTest.java index 064d030d4d9..ef40856e2fc 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/insights/ConfigAntiPatternsFinderTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/ConfigAntiPatternsFinderTest.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.insights; diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/insights/DataCentersFinderTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/DataCentersFinderTest.java index dfcf0cc04a6..c11b9144220 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/insights/DataCentersFinderTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/DataCentersFinderTest.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.insights; diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/insights/ExecutionProfileMockUtil.java b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/ExecutionProfileMockUtil.java index 523aac1d6a3..10b319b1228 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/insights/ExecutionProfileMockUtil.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/ExecutionProfileMockUtil.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.insights; diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/insights/ExecutionProfilesInfoFinderTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/ExecutionProfilesInfoFinderTest.java index 0d4d1bdc198..ef39e19367c 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/insights/ExecutionProfilesInfoFinderTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/ExecutionProfilesInfoFinderTest.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.insights; diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/insights/InsightsClientTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/InsightsClientTest.java index 1b99a29fec5..537b023550b 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/insights/InsightsClientTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/InsightsClientTest.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.insights; diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/insights/InsightsSupportVerifierTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/InsightsSupportVerifierTest.java index ce00d27d960..ccd3f94212a 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/insights/InsightsSupportVerifierTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/InsightsSupportVerifierTest.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.insights; diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/insights/PackageUtilTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/PackageUtilTest.java index 9e7fdc72e0d..352ef690165 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/insights/PackageUtilTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/PackageUtilTest.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.insights; diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/insights/PlatformInfoFinderTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/PlatformInfoFinderTest.java index b41f2057f43..ed4ec3581f0 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/insights/PlatformInfoFinderTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/PlatformInfoFinderTest.java @@ -1,10 +1,18 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ - package com.datastax.dse.driver.internal.core.insights; import static com.datastax.dse.driver.internal.core.insights.PlatformInfoFinder.UNVERIFIED_RUNTIME_VERSION; diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/insights/ReconnectionPolicyInfoFinderTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/ReconnectionPolicyInfoFinderTest.java index 8bed7d9e288..5952e05f30e 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/insights/ReconnectionPolicyInfoFinderTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/ReconnectionPolicyInfoFinderTest.java @@ -1,10 +1,18 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ - package com.datastax.dse.driver.internal.core.insights; import static org.assertj.core.api.Assertions.assertThat; diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyEventsTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyEventsTest.java index e8063c219fb..c1a7d96ae5b 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyEventsTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyEventsTest.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.loadbalancing; diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyInitTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyInitTest.java index 0c5babc6d85..aab118c5d95 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyInitTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyInitTest.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.loadbalancing; diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyQueryPlanTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyQueryPlanTest.java index c5d2b6ae1f6..34abc2b7af7 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyQueryPlanTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyQueryPlanTest.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.loadbalancing; diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyRequestTrackerTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyRequestTrackerTest.java index 57073f72198..c9fba9a9301 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyRequestTrackerTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyRequestTrackerTest.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.loadbalancing; diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyTestBase.java b/core/src/test/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyTestBase.java index d2049379477..b7c9ec5f6b5 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyTestBase.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyTestBase.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.loadbalancing; diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/type/codec/geometry/GeometryCodecTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/type/codec/geometry/GeometryCodecTest.java index afb063182de..3dff7dc704a 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/type/codec/geometry/GeometryCodecTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/type/codec/geometry/GeometryCodecTest.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.type.codec.geometry; diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/type/codec/geometry/LineStringCodecTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/type/codec/geometry/LineStringCodecTest.java index ed3bf66e8c8..5f8f0c11a64 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/type/codec/geometry/LineStringCodecTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/type/codec/geometry/LineStringCodecTest.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.type.codec.geometry; diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/type/codec/geometry/PointCodecTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/type/codec/geometry/PointCodecTest.java index 5230ea2f8d5..cec225df130 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/type/codec/geometry/PointCodecTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/type/codec/geometry/PointCodecTest.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.type.codec.geometry; diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/type/codec/geometry/PolygonCodecTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/type/codec/geometry/PolygonCodecTest.java index e39097e5451..e4c65a37189 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/type/codec/geometry/PolygonCodecTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/type/codec/geometry/PolygonCodecTest.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.type.codec.geometry; diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/type/codec/time/DateRangeCodecTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/type/codec/time/DateRangeCodecTest.java index 7ff553628d4..affbb32dea2 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/type/codec/time/DateRangeCodecTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/type/codec/time/DateRangeCodecTest.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.type.codec.time; diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/util/concurrent/BoundedConcurrentQueueTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/util/concurrent/BoundedConcurrentQueueTest.java index 79297fb9caa..0509994282f 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/util/concurrent/BoundedConcurrentQueueTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/util/concurrent/BoundedConcurrentQueueTest.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.util.concurrent; diff --git a/core/src/test/resources/config/customApplication.properties b/core/src/test/resources/config/customApplication.properties index 4956c960b66..26375fcc0f4 100644 --- a/core/src/test/resources/config/customApplication.properties +++ b/core/src/test/resources/config/customApplication.properties @@ -1,8 +1,17 @@ # # Copyright DataStax, Inc. # -# This software can be used solely with DataStax Enterprise. Please consult the license at -# http://www.datastax.com/terms/datastax-dse-driver-license-terms +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. # datastax-java-driver.basic.request.consistency=ONE diff --git a/core/src/test/resources/insights/malformed-pom.properties b/core/src/test/resources/insights/malformed-pom.properties index db049766ebc..dcb78b4b5ac 100644 --- a/core/src/test/resources/insights/malformed-pom.properties +++ b/core/src/test/resources/insights/malformed-pom.properties @@ -1,8 +1,17 @@ # # Copyright DataStax, Inc. # -# This software can be used solely with DataStax Enterprise. Please consult the license at -# http://www.datastax.com/terms/datastax-dse-driver-license-terms +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. # #Created by Apache Maven 3.5.0 diff --git a/core/src/test/resources/insights/pom.properties b/core/src/test/resources/insights/pom.properties index cb4f891bd9d..5f1c59124aa 100644 --- a/core/src/test/resources/insights/pom.properties +++ b/core/src/test/resources/insights/pom.properties @@ -1,8 +1,17 @@ # # Copyright DataStax, Inc. # -# This software can be used solely with DataStax Enterprise. Please consult the license at -# http://www.datastax.com/terms/datastax-dse-driver-license-terms +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. # #Created by Apache Maven 3.5.0 diff --git a/core/src/test/resources/logback-test.xml b/core/src/test/resources/logback-test.xml index 52c8cca374d..39f172d2faf 100644 --- a/core/src/test/resources/logback-test.xml +++ b/core/src/test/resources/logback-test.xml @@ -3,8 +3,17 @@ Copyright DataStax, Inc. - This software can be used solely with DataStax Enterprise. Please consult the license at - http://www.datastax.com/terms/datastax-dse-driver-license-terms + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. --> diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderAlternateIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderAlternateIT.java index 1270b0d26c0..5aad5e2d7df 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderAlternateIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderAlternateIT.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.auth; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderIT.java index 9acb71ca26b..681ead95582 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderIT.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.auth; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DsePlainTextAuthProviderIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DsePlainTextAuthProviderIT.java index 674e32b4391..588aae41e45 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DsePlainTextAuthProviderIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DsePlainTextAuthProviderIT.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.auth; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseProxyAuthenticationIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseProxyAuthenticationIT.java index e7fc9dd3f03..47f1d4f5b4f 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseProxyAuthenticationIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseProxyAuthenticationIT.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.auth; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/EmbeddedAds.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/EmbeddedAds.java index 6f42c05a997..c3d7dce18cc 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/EmbeddedAds.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/EmbeddedAds.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.auth; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/EmbeddedAdsRule.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/EmbeddedAdsRule.java index eb8e18cf908..88ad0fdbc10 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/EmbeddedAdsRule.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/EmbeddedAdsRule.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.auth; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/KerberosUtils.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/KerberosUtils.java index 3ba295b6fda..8876d07424e 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/KerberosUtils.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/KerberosUtils.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.auth; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousPagingIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousPagingIT.java index 178b2915005..fab508fbd42 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousPagingIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousPagingIT.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.cql.continuous; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousPagingITBase.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousPagingITBase.java index eb79035116f..ddf2a30dd43 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousPagingITBase.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousPagingITBase.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.cql.continuous; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousPagingReactiveIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousPagingReactiveIT.java index 5bf2c3a2b86..cb52e246707 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousPagingReactiveIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousPagingReactiveIT.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.cql.continuous.reactive; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/reactive/DefaultReactiveResultSetIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/reactive/DefaultReactiveResultSetIT.java index d9b32344c96..6e6adf50e2a 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/reactive/DefaultReactiveResultSetIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/reactive/DefaultReactiveResultSetIT.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.cql.reactive; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/data/geometry/GeometryIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/data/geometry/GeometryIT.java index b230b09d6e3..e992ec1a777 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/data/geometry/GeometryIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/data/geometry/GeometryIT.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.data.geometry; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/data/geometry/LineStringIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/data/geometry/LineStringIT.java index 0f4b7fac931..685ea03520c 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/data/geometry/LineStringIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/data/geometry/LineStringIT.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.data.geometry; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/data/geometry/PointIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/data/geometry/PointIT.java index 1aebb0c1704..0d756c8bc4a 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/data/geometry/PointIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/data/geometry/PointIT.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.data.geometry; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/data/geometry/PolygonIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/data/geometry/PolygonIT.java index 556b76628f2..67a8efc3c85 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/data/geometry/PolygonIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/data/geometry/PolygonIT.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.data.geometry; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/data/time/DateRangeIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/data/time/DateRangeIT.java index f83ca1c168c..e22f856c9ea 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/data/time/DateRangeIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/data/time/DateRangeIT.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.data.time; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphAuthenticationIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphAuthenticationIT.java index 96c56637e47..437c16ec2b1 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphAuthenticationIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphAuthenticationIT.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphDataTypeITBase.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphDataTypeITBase.java index 3f7f1c942f9..8636c67cbdb 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphDataTypeITBase.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphDataTypeITBase.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphGeoSearchIndexIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphGeoSearchIndexIT.java index d741a478918..3f5c8d72a25 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphGeoSearchIndexIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphGeoSearchIndexIT.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphTextSearchIndexIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphTextSearchIndexIT.java index 0e64eb568e7..76c880ac9dc 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphTextSearchIndexIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphTextSearchIndexIT.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphTimeoutsIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphTimeoutsIT.java index c859c2514ca..c9459516114 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphTimeoutsIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphTimeoutsIT.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/SampleGraphScripts.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/SampleGraphScripts.java index 068fb464b2b..530a5e38ddd 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/SampleGraphScripts.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/SampleGraphScripts.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/SocialTraversalDsl.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/SocialTraversalDsl.java index 559949b676b..c14c9e80f67 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/SocialTraversalDsl.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/SocialTraversalDsl.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/SocialTraversalSourceDsl.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/SocialTraversalSourceDsl.java index ba50213c4aa..d7651ac327e 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/SocialTraversalSourceDsl.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/SocialTraversalSourceDsl.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/TinkerEdgeAssert.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/TinkerEdgeAssert.java index 6b30830ac42..efd2927a4da 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/TinkerEdgeAssert.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/TinkerEdgeAssert.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/TinkerElementAssert.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/TinkerElementAssert.java index 19e668c8dee..ff0ef40a3a4 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/TinkerElementAssert.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/TinkerElementAssert.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/TinkerGraphAssertions.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/TinkerGraphAssertions.java index 0de34c14bbf..6555f41a772 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/TinkerGraphAssertions.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/TinkerGraphAssertions.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/TinkerPathAssert.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/TinkerPathAssert.java index 9136b0cec8b..fe1e7273485 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/TinkerPathAssert.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/TinkerPathAssert.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/TinkerTreeAssert.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/TinkerTreeAssert.java index 58cb7ff956f..d5f5484dc98 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/TinkerTreeAssert.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/TinkerTreeAssert.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/TinkerVertexAssert.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/TinkerVertexAssert.java index 981edb8d65a..de0bfdbc863 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/TinkerVertexAssert.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/TinkerVertexAssert.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/TinkerVertexPropertyAssert.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/TinkerVertexPropertyAssert.java index 4cdc3a844d6..a8dff9a72ec 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/TinkerVertexPropertyAssert.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/TinkerVertexPropertyAssert.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphDataTypeRemoteIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphDataTypeRemoteIT.java index 6e8712406be..a0e9e965b65 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphDataTypeRemoteIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphDataTypeRemoteIT.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph.remote; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalMetaPropertiesRemoteIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalMetaPropertiesRemoteIT.java index a565b8dab43..71fa0b89761 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalMetaPropertiesRemoteIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalMetaPropertiesRemoteIT.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph.remote; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalMultiPropertiesRemoteIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalMultiPropertiesRemoteIT.java index 614e6034df5..cf9a6688c8e 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalMultiPropertiesRemoteIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalMultiPropertiesRemoteIT.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph.remote; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalRemoteIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalRemoteIT.java index f5a3ecbd7a8..a8d72d4bac3 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalRemoteIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalRemoteIT.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph.remote; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphDataTypeFluentIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphDataTypeFluentIT.java index 4a82285194d..f835aa7eb84 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphDataTypeFluentIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphDataTypeFluentIT.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph.statement; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphDataTypeScriptIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphDataTypeScriptIT.java index 924b91cc289..0e2df17f9b6 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphDataTypeScriptIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphDataTypeScriptIT.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph.statement; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalBatchIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalBatchIT.java index 413168f4649..cca434bfbdf 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalBatchIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalBatchIT.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph.statement; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalIT.java index f6106364a23..a6737964a4f 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalIT.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph.statement; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalMetaPropertiesIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalMetaPropertiesIT.java index 2ac58bedc2b..9d3abcdbe76 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalMetaPropertiesIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalMetaPropertiesIT.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph.statement; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalMultiPropertiesIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalMultiPropertiesIT.java index e1930b41861..fc0234fc8d7 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalMultiPropertiesIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalMultiPropertiesIT.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph.statement; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/insights/InsightsClientIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/insights/InsightsClientIT.java index 39b8f3cec94..4eb90d6dbd0 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/insights/InsightsClientIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/insights/InsightsClientIT.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.insights; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/MetadataIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/MetadataIT.java index 3a1e1917dfb..d9ae83c3531 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/MetadataIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/MetadataIT.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.metadata; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/AbstractMetadataIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/AbstractMetadataIT.java index 101c2f82666..0d5c5165b72 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/AbstractMetadataIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/AbstractMetadataIT.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.metadata.schema; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/DseAggregateMetadataIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/DseAggregateMetadataIT.java index 22d5cc5d6f8..4cc7112fc9b 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/DseAggregateMetadataIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/DseAggregateMetadataIT.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.metadata.schema; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/DseFunctionMetadataIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/DseFunctionMetadataIT.java index b87f311ce9d..f05e8b5dc59 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/DseFunctionMetadataIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/DseFunctionMetadataIT.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.metadata.schema; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/testinfra/DseSessionBuilderInstantiator.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/testinfra/DseSessionBuilderInstantiator.java index 49b36c75066..c4f6f16d92a 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/testinfra/DseSessionBuilderInstantiator.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/testinfra/DseSessionBuilderInstantiator.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.testinfra; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/testinfra/session/DseSessionRule.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/testinfra/session/DseSessionRule.java index ebc3a6a809b..465dc747be1 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/testinfra/session/DseSessionRule.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/testinfra/session/DseSessionRule.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.testinfra.session; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/testinfra/session/DseSessionRuleBuilder.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/testinfra/session/DseSessionRuleBuilder.java index 87d9de1e1cb..ec5d5c5a671 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/testinfra/session/DseSessionRuleBuilder.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/testinfra/session/DseSessionRuleBuilder.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.testinfra.session; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiCustomLoadBalancingPolicyIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiCustomLoadBalancingPolicyIT.java index 1547ca68953..04cb0087747 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiCustomLoadBalancingPolicyIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiCustomLoadBalancingPolicyIT.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.osgi; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiGeoTypesIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiGeoTypesIT.java index 1569e83c3a2..a2b72cec0ed 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiGeoTypesIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiGeoTypesIT.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.osgi; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiGraphIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiGraphIT.java index 2c925e8808f..b6d648929e1 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiGraphIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiGraphIT.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.osgi; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiLz4IT.java b/integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiLz4IT.java index 9f399c17e27..b5c21513f76 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiLz4IT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiLz4IT.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.osgi; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiReactiveIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiReactiveIT.java index 72c2c29c480..bcd0403fec4 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiReactiveIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiReactiveIT.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.osgi; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiShadedIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiShadedIT.java index 186f1e1b81e..60d8eea985f 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiShadedIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiShadedIT.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.osgi; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiSnappyIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiSnappyIT.java index 4583630c72c..9367c703266 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiSnappyIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiSnappyIT.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.osgi; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiVanillaIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiVanillaIT.java index e89d7a9ce45..20d0478fc66 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiVanillaIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/osgi/DseOsgiVanillaIT.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.osgi; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/osgi/support/DseBundleOptions.java b/integration-tests/src/test/java/com/datastax/dse/driver/osgi/support/DseBundleOptions.java index fa471aa9d0f..b1b87e75b6e 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/osgi/support/DseBundleOptions.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/osgi/support/DseBundleOptions.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.osgi.support; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/osgi/support/DseOsgiGeoTypesTests.java b/integration-tests/src/test/java/com/datastax/dse/driver/osgi/support/DseOsgiGeoTypesTests.java index 816b9c6a1da..30aafec03d4 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/osgi/support/DseOsgiGeoTypesTests.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/osgi/support/DseOsgiGeoTypesTests.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.osgi.support; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/osgi/support/DseOsgiGraphTests.java b/integration-tests/src/test/java/com/datastax/dse/driver/osgi/support/DseOsgiGraphTests.java index 034a63d33f5..e998595bb63 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/osgi/support/DseOsgiGraphTests.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/osgi/support/DseOsgiGraphTests.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.osgi.support; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/osgi/support/DseOsgiReactiveTests.java b/integration-tests/src/test/java/com/datastax/dse/driver/osgi/support/DseOsgiReactiveTests.java index 66223230e34..c7284575cdd 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/osgi/support/DseOsgiReactiveTests.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/osgi/support/DseOsgiReactiveTests.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.osgi.support; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/osgi/support/DseOsgiSimpleTests.java b/integration-tests/src/test/java/com/datastax/dse/driver/osgi/support/DseOsgiSimpleTests.java index d3a1424d460..c4a2ab6f447 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/osgi/support/DseOsgiSimpleTests.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/osgi/support/DseOsgiSimpleTests.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.osgi.support; diff --git a/integration-tests/src/test/resources/logback-test.xml b/integration-tests/src/test/resources/logback-test.xml index df47408313f..77fa051841e 100644 --- a/integration-tests/src/test/resources/logback-test.xml +++ b/integration-tests/src/test/resources/logback-test.xml @@ -3,8 +3,17 @@ Copyright DataStax, Inc. - This software can be used solely with DataStax Enterprise. Please consult the license at - http://www.datastax.com/terms/datastax-dse-driver-license-terms + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. --> diff --git a/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/DseQueryBuilder.java b/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/DseQueryBuilder.java index 08d9e808557..696819720be 100644 --- a/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/DseQueryBuilder.java +++ b/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/DseQueryBuilder.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.querybuilder; diff --git a/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/DseSchemaBuilder.java b/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/DseSchemaBuilder.java index 73cf5450dbf..7c29a7c3033 100644 --- a/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/DseSchemaBuilder.java +++ b/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/DseSchemaBuilder.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.querybuilder; diff --git a/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/package-info.java b/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/package-info.java index 01c03187789..e46a4825de0 100644 --- a/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/package-info.java +++ b/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/package-info.java @@ -1,10 +1,18 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ - /** * This package effectively mirrors the Cassandra OSS Query Builder package to allow DSE extended * schema and query building for the DSE driver. In general, a class in this package should simply diff --git a/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseAggregateEnd.java b/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseAggregateEnd.java index d8d6151365e..cfe0200c3e5 100644 --- a/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseAggregateEnd.java +++ b/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseAggregateEnd.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.querybuilder.schema; diff --git a/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseAggregateStart.java b/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseAggregateStart.java index f507a86ca04..fb5747bf51a 100644 --- a/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseAggregateStart.java +++ b/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseAggregateStart.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.querybuilder.schema; diff --git a/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseAggregateStateFunc.java b/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseAggregateStateFunc.java index a90e97c8557..c5ee0a29d33 100644 --- a/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseAggregateStateFunc.java +++ b/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseAggregateStateFunc.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.querybuilder.schema; diff --git a/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseFunctionEnd.java b/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseFunctionEnd.java index e7538dedb87..762022c3cda 100644 --- a/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseFunctionEnd.java +++ b/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseFunctionEnd.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.querybuilder.schema; diff --git a/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseFunctionStart.java b/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseFunctionStart.java index 8e2e71ad67a..542cdc86be7 100644 --- a/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseFunctionStart.java +++ b/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseFunctionStart.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.querybuilder.schema; diff --git a/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseFunctionWithLanguage.java b/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseFunctionWithLanguage.java index 47c9630f4bb..7b5ad966cef 100644 --- a/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseFunctionWithLanguage.java +++ b/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseFunctionWithLanguage.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.querybuilder.schema; diff --git a/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseFunctionWithNullOption.java b/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseFunctionWithNullOption.java index f2fd33e1964..fc630f9c7aa 100644 --- a/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseFunctionWithNullOption.java +++ b/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseFunctionWithNullOption.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.querybuilder.schema; diff --git a/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseFunctionWithType.java b/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseFunctionWithType.java index b226044ca3c..5bc38495f89 100644 --- a/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseFunctionWithType.java +++ b/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseFunctionWithType.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.querybuilder.schema; diff --git a/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/package-info.java b/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/package-info.java index c3d4f5bda6c..42ea36eab12 100644 --- a/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/package-info.java +++ b/query-builder/src/main/java/com/datastax/dse/driver/api/querybuilder/schema/package-info.java @@ -1,10 +1,18 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ - /** * This package effectively mirrors the Cassandra OSS Schema interfaces to allow extended schema and * query building for the DSE driver. NOTE: Changes made to the OSS driver will need to be mirrored diff --git a/query-builder/src/main/java/com/datastax/dse/driver/internal/querybuilder/schema/DefaultCreateDseAggregate.java b/query-builder/src/main/java/com/datastax/dse/driver/internal/querybuilder/schema/DefaultCreateDseAggregate.java index 6f321529b8f..82130f0b1b0 100644 --- a/query-builder/src/main/java/com/datastax/dse/driver/internal/querybuilder/schema/DefaultCreateDseAggregate.java +++ b/query-builder/src/main/java/com/datastax/dse/driver/internal/querybuilder/schema/DefaultCreateDseAggregate.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.querybuilder.schema; diff --git a/query-builder/src/main/java/com/datastax/dse/driver/internal/querybuilder/schema/DefaultCreateDseFunction.java b/query-builder/src/main/java/com/datastax/dse/driver/internal/querybuilder/schema/DefaultCreateDseFunction.java index dc6e216b1af..8ceac2ab55c 100644 --- a/query-builder/src/main/java/com/datastax/dse/driver/internal/querybuilder/schema/DefaultCreateDseFunction.java +++ b/query-builder/src/main/java/com/datastax/dse/driver/internal/querybuilder/schema/DefaultCreateDseFunction.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.querybuilder.schema; diff --git a/query-builder/src/main/java/com/datastax/dse/driver/internal/querybuilder/schema/package-info.java b/query-builder/src/main/java/com/datastax/dse/driver/internal/querybuilder/schema/package-info.java index 19588410f95..1515b4c440d 100644 --- a/query-builder/src/main/java/com/datastax/dse/driver/internal/querybuilder/schema/package-info.java +++ b/query-builder/src/main/java/com/datastax/dse/driver/internal/querybuilder/schema/package-info.java @@ -1,10 +1,18 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ - /** * This package effectively mirrors the Cassandra OSS default query and schema implementations to * allow extended schema and query building for the DSE driver. In general, a class in this package diff --git a/query-builder/src/test/java/com/datastax/dse/driver/internal/querybuilder/schema/CreateDseAggregateTest.java b/query-builder/src/test/java/com/datastax/dse/driver/internal/querybuilder/schema/CreateDseAggregateTest.java index ffb36ba7ebd..62a4242a082 100644 --- a/query-builder/src/test/java/com/datastax/dse/driver/internal/querybuilder/schema/CreateDseAggregateTest.java +++ b/query-builder/src/test/java/com/datastax/dse/driver/internal/querybuilder/schema/CreateDseAggregateTest.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.querybuilder.schema; diff --git a/query-builder/src/test/java/com/datastax/dse/driver/internal/querybuilder/schema/CreateDseFunctionTest.java b/query-builder/src/test/java/com/datastax/dse/driver/internal/querybuilder/schema/CreateDseFunctionTest.java index a2eeae20eb7..5695a1da9bc 100644 --- a/query-builder/src/test/java/com/datastax/dse/driver/internal/querybuilder/schema/CreateDseFunctionTest.java +++ b/query-builder/src/test/java/com/datastax/dse/driver/internal/querybuilder/schema/CreateDseFunctionTest.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.querybuilder.schema; From 1c55e2489f716d95a6768e953645ecdcf5a00404 Mon Sep 17 00:00:00 2001 From: olim7t Date: Wed, 9 Oct 2019 15:40:36 -0700 Subject: [PATCH 198/979] Add DSE dependencies --- core/pom.xml | 44 +++++++++++++++++++++++++ pom.xml | 93 ++++++++++++++++++++++++++++++++++++++++++++++++++-- 2 files changed, 135 insertions(+), 2 deletions(-) diff --git a/core/pom.xml b/core/pom.xml index 4beb95b9b9d..49d967c69d8 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -80,6 +80,30 @@ org.hdrhistogram HdrHistogram + + com.esri.geometry + esri-geometry-api + + + org.apache.tinkerpop + gremlin-core + + + org.apache.tinkerpop + tinkergraph-gremlin + + + com.fasterxml.jackson.core + jackson-core + + + com.fasterxml.jackson.core + jackson-databind + + + org.reactivestreams + reactive-streams + com.github.stephenc.jcip jcip-annotations @@ -121,6 +145,26 @@ mockito-core test + + io.reactivex.rxjava2 + rxjava + test + + + org.reactivestreams + reactive-streams-tck + test + + + org.awaitility + awaitility + test + + + org.testng + testng + test + com.github.tomakehurst wiremock diff --git a/pom.xml b/pom.xml index e7eba6b772c..00c4e5c456a 100644 --- a/pom.xml +++ b/pom.xml @@ -48,6 +48,11 @@ 1.4.8 4.1.39.Final 1.7.26 + 1.2.1 + 3.3.3 + 1.0.2 + 2.10.0 + 2.10.0 1.1.7.3 1.6.0 @@ -63,8 +68,11 @@ 2.5.0 2.0.1 1.1.4 - 2.10.0 - 2.10.0 + 2.2.2 + 20180130 + 1.9.12 + 3.1.6 + 2.0.0-M19 @@ -170,6 +178,31 @@ HdrHistogram ${hdrhistogram.version} + + com.esri.geometry + esri-geometry-api + ${esri.version} + + + org.apache.tinkerpop + gremlin-core + ${tinkerpop.version} + + + org.apache.tinkerpop + tinkergraph-gremlin + ${tinkerpop.version} + + + org.reactivestreams + reactive-streams + ${reactive-streams.version} + + + org.reactivestreams + reactive-streams-tck + ${reactive-streams.version} + com.github.stephenc.jcip jcip-annotations @@ -205,6 +238,11 @@ mockito-core 2.28.2 + + io.reactivex.rxjava2 + rxjava + ${rxjava.version} + com.datastax.oss.simulacron simulacron-native-server @@ -300,6 +338,57 @@ compile-testing 0.18 + + org.awaitility + awaitility + ${awaitility.version} + + + org.testng + testng + 6.14.3 + + + org.apache.directory.server + apacheds-core + ${apacheds.version} + + + org.slf4j + slf4j-log4j12 + + + + + org.apache.directory.server + apacheds-protocol-kerberos + ${apacheds.version} + + + org.apache.directory.server + apacheds-interceptor-kerberos + ${apacheds.version} + + + org.apache.directory.server + apacheds-protocol-ldap + ${apacheds.version} + + + org.apache.directory.server + apacheds-ldif-partition + ${apacheds.version} + + + org.apache.directory.server + apacheds-jdbm-partition + ${apacheds.version} + + + org.apache.directory.api + api-ldap-codec-standalone + 1.0.0-M26 + com.github.tomakehurst wiremock From 03840d1e179d3ac0f2625169be203d30653f60d1 Mon Sep 17 00:00:00 2001 From: olim7t Date: Wed, 9 Oct 2019 16:16:29 -0700 Subject: [PATCH 199/979] Add missing elements in POMs Dependencies, mentions of driver packages in POMs. The code compiles. --- integration-tests/pom.xml | 63 +++++++++++++++++++++++++++++++++++++++ pom.xml | 4 ++- 2 files changed, 66 insertions(+), 1 deletion(-) diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 1dd5a102272..6d4fa9d4cda 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -105,6 +105,57 @@ lz4-java test + + io.reactivex.rxjava2 + rxjava + test + + + org.apache.directory.server + apacheds-core + test + + + org.apache.directory.server + apacheds-protocol-kerberos + test + + + org.apache.directory.server + apacheds-interceptor-kerberos + test + + + org.apache.directory.server + apacheds-protocol-ldap + test + + + org.apache.directory.server + apacheds-ldif-partition + test + + + org.apache.directory.server + apacheds-jdbm-partition + test + + + org.apache.directory.api + api-ldap-codec-standalone + 1.0.0-M26 + test + + + org.ops4j.pax.exam + pax-exam-junit4 + test + + + org.ops4j.pax.exam + pax-exam-container-native + test + org.ops4j.pax.exam pax-exam-junit4 @@ -171,6 +222,12 @@ ${simulacron.version} ${slf4j.version} ${snappy.version} + ${esri.version} + ${json.version} + ${legacy-jackson.version} + ${reactive-streams.version} + ${rxjava.version} + ${tinkerpop.version} @@ -196,6 +253,12 @@ ${simulacron.version} ${slf4j.version} ${snappy.version} + ${esri.version} + ${json.version} + ${legacy-jackson.version} + ${reactive-streams.version} + ${rxjava.version} + ${tinkerpop.version} diff --git a/pom.xml b/pom.xml index 00c4e5c456a..66b619eb712 100644 --- a/pom.xml +++ b/pom.xml @@ -688,7 +688,8 @@ limitations under the License.]]> false true all,-missing - com.datastax.oss.driver.internal + com.datastax.oss.driver.internal:com.datastax.dse.driver.internal + -preventleak com.datastax.oss.driver.internal + com.datastax.dse.driver.internal -preventleak com.datastax.oss.driver.shaded From 6d7965fa3dc0a5ad37bb91a09a58547356b1c11f Mon Sep 17 00:00:00 2001 From: olim7t Date: Wed, 9 Oct 2019 18:23:45 -0700 Subject: [PATCH 200/979] Deprecate DseSession.DSE_DRIVER_COORDINATES --- .../dse/driver/api/core/DseSession.java | 14 ++++-------- .../context/DseStartupOptionsBuilder.java | 13 ----------- .../internal/core/graph/GraphSONUtils.java | 4 ++-- .../com/datastax/dse/driver/Driver.properties | 22 ------------------- 4 files changed, 6 insertions(+), 47 deletions(-) delete mode 100644 core/src/main/resources/com/datastax/dse/driver/Driver.properties diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/DseSession.java b/core/src/main/java/com/datastax/dse/driver/api/core/DseSession.java index 14946aae384..9dd24044f37 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/DseSession.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/DseSession.java @@ -21,8 +21,6 @@ import com.datastax.dse.driver.api.core.graph.GraphSession; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.MavenCoordinates; -import com.datastax.oss.driver.api.core.session.Session; -import com.datastax.oss.driver.internal.core.DefaultMavenCoordinates; import edu.umd.cs.findbugs.annotations.NonNull; /** A custom session with DSE-specific capabilities. */ @@ -34,15 +32,11 @@ public interface DseSession ContinuousReactiveSession { /** - * The Maven coordinates of the core DSE driver artifact. - * - *

      Note that this DSE driver depends on the DataStax Java driver for Apache Cassandra®. You - * can find the coordinates of the Cassandra driver at {@link Session#OSS_DRIVER_COORDINATES}. + * @deprecated the DSE driver is now part of the DataStax Java driver for Apache Cassandra®. + * This field is preserved for backward compatibility, but it returns the same value as {@link + * CqlSession#OSS_DRIVER_COORDINATES}. */ - @NonNull - MavenCoordinates DSE_DRIVER_COORDINATES = - DefaultMavenCoordinates.buildFromResourceAndPrint( - DseSession.class.getResource("/com/datastax/dse/driver/Driver.properties")); + @Deprecated @NonNull MavenCoordinates DSE_DRIVER_COORDINATES = CqlSession.OSS_DRIVER_COORDINATES; /** * Returns a builder to create a new instance. diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseStartupOptionsBuilder.java b/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseStartupOptionsBuilder.java index c26dd5bf2ad..5ee2a1d93f8 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseStartupOptionsBuilder.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseStartupOptionsBuilder.java @@ -15,8 +15,6 @@ */ package com.datastax.dse.driver.internal.core.context; -import static com.datastax.dse.driver.api.core.DseSession.DSE_DRIVER_COORDINATES; - import com.datastax.dse.driver.api.core.config.DseDriverOption; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.uuid.Uuids; @@ -41,17 +39,6 @@ public DseStartupOptionsBuilder(InternalDriverContext context) { super(context); } - @Override - protected String getDriverVersion() { - // use the DSE Version instead - return DSE_DRIVER_COORDINATES.getVersion().toString(); - } - - @Override - protected String getDriverName() { - return DSE_DRIVER_COORDINATES.getName(); - } - /** * Sets the client ID to be sent in the Startup message options. * diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSONUtils.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSONUtils.java index 6615760656b..0e2d832f0aa 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSONUtils.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSONUtils.java @@ -15,8 +15,8 @@ */ package com.datastax.dse.driver.internal.core.graph; -import com.datastax.dse.driver.api.core.DseSession; import com.datastax.dse.driver.api.core.graph.GraphNode; +import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.shaded.guava.common.base.Suppliers; import com.datastax.oss.driver.shaded.guava.common.base.Throwables; import com.datastax.oss.driver.shaded.guava.common.cache.CacheBuilder; @@ -53,7 +53,7 @@ public ObjectMapper load(@NonNull String graphSubProtocol) throws Exception { switch (graphSubProtocol) { case GRAPHSON_1_0: com.datastax.oss.driver.api.core.Version driverVersion = - DseSession.DSE_DRIVER_COORDINATES.getVersion(); + CqlSession.OSS_DRIVER_COORDINATES.getVersion(); Version driverJacksonVersion = new Version( driverVersion.getMajor(), diff --git a/core/src/main/resources/com/datastax/dse/driver/Driver.properties b/core/src/main/resources/com/datastax/dse/driver/Driver.properties deleted file mode 100644 index 23651bc0d4c..00000000000 --- a/core/src/main/resources/com/datastax/dse/driver/Driver.properties +++ /dev/null @@ -1,22 +0,0 @@ -# -# Copyright DataStax, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -driver.groupId=${project.groupId} -driver.artifactId=${project.artifactId} -driver.version=${project.version} -# Set the Driver name explicitly here as parent project properties don't resolve -# when bundled, so we can't use ${project.parent.name} -driver.name=DataStax Enterprise Java Driver From 4ded36ecda9b42c390704c0836da57ab87f68db4 Mon Sep 17 00:00:00 2001 From: olim7t Date: Thu, 10 Oct 2019 15:34:19 -0700 Subject: [PATCH 201/979] Promote DSE LBP as default implementation --- .../DseDcInferringLoadBalancingPolicy.java | 32 ++ .../loadbalancing/DseLoadBalancingPolicy.java | 487 +---------------- .../core/context/DefaultDriverContext.java | 43 +- .../DcInferringLoadBalancingPolicy.java | 4 +- .../DefaultLoadBalancingPolicy.java | 243 ++++++++- core/src/main/resources/dse-reference.conf | 12 - core/src/main/resources/reference.conf | 6 +- .../DseLoadBalancingPolicyEventsTest.java | 155 ------ .../DseLoadBalancingPolicyInitTest.java | 255 --------- .../DseLoadBalancingPolicyQueryPlanTest.java | 511 ------------------ .../DseLoadBalancingPolicyTestBase.java | 85 --- .../BasicLoadBalancingPolicyEventsTest.java | 2 +- ...nferringLoadBalancingPolicyEventsTest.java | 25 +- ...cInferringLoadBalancingPolicyInitTest.java | 12 +- ...rringLoadBalancingPolicyQueryPlanTest.java | 38 +- .../DefaultLoadBalancingPolicyEventsTest.java | 26 +- .../DefaultLoadBalancingPolicyInitTest.java | 10 + ...faultLoadBalancingPolicyQueryPlanTest.java | 336 +++++++++++- ...oadBalancingPolicyRequestTrackerTest.java} | 26 +- 19 files changed, 723 insertions(+), 1585 deletions(-) create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/loadbalancing/DseDcInferringLoadBalancingPolicy.java delete mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyEventsTest.java delete mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyInitTest.java delete mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyQueryPlanTest.java delete mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyTestBase.java rename core/src/test/java/com/datastax/{dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyRequestTrackerTest.java => oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyRequestTrackerTest.java} (88%) diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/loadbalancing/DseDcInferringLoadBalancingPolicy.java b/core/src/main/java/com/datastax/dse/driver/internal/core/loadbalancing/DseDcInferringLoadBalancingPolicy.java new file mode 100644 index 00000000000..9ff1851ac7d --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/loadbalancing/DseDcInferringLoadBalancingPolicy.java @@ -0,0 +1,32 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.dse.driver.internal.core.loadbalancing; + +import com.datastax.oss.driver.api.core.context.DriverContext; +import com.datastax.oss.driver.internal.core.loadbalancing.DcInferringLoadBalancingPolicy; +import edu.umd.cs.findbugs.annotations.NonNull; + +/** + * @deprecated This class only exists for backward compatibility. It is equivalent to {@link + * DcInferringLoadBalancingPolicy}, which should now be used instead. + */ +@Deprecated +public class DseDcInferringLoadBalancingPolicy extends DcInferringLoadBalancingPolicy { + public DseDcInferringLoadBalancingPolicy( + @NonNull DriverContext context, @NonNull String profileName) { + super(context, profileName); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicy.java b/core/src/main/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicy.java index 189f38f6fc1..65ab95bdfeb 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicy.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicy.java @@ -15,492 +15,17 @@ */ package com.datastax.dse.driver.internal.core.loadbalancing; -import static java.util.concurrent.TimeUnit.MILLISECONDS; -import static java.util.concurrent.TimeUnit.MINUTES; - -import com.datastax.dse.driver.internal.core.tracker.MultiplexingRequestTracker; -import com.datastax.oss.driver.api.core.CqlIdentifier; -import com.datastax.oss.driver.api.core.config.DefaultDriverOption; -import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.context.DriverContext; -import com.datastax.oss.driver.api.core.loadbalancing.LoadBalancingPolicy; -import com.datastax.oss.driver.api.core.loadbalancing.NodeDistance; -import com.datastax.oss.driver.api.core.metadata.Node; -import com.datastax.oss.driver.api.core.metadata.NodeState; -import com.datastax.oss.driver.api.core.metadata.TokenMap; -import com.datastax.oss.driver.api.core.metadata.token.Token; -import com.datastax.oss.driver.api.core.session.Request; -import com.datastax.oss.driver.api.core.session.Session; -import com.datastax.oss.driver.api.core.tracker.RequestTracker; -import com.datastax.oss.driver.internal.core.context.InternalDriverContext; -import com.datastax.oss.driver.internal.core.metadata.DefaultNode; -import com.datastax.oss.driver.internal.core.metadata.MetadataManager; -import com.datastax.oss.driver.internal.core.pool.ChannelPool; -import com.datastax.oss.driver.internal.core.session.DefaultSession; -import com.datastax.oss.driver.internal.core.util.ArrayUtils; -import com.datastax.oss.driver.internal.core.util.Reflection; -import com.datastax.oss.driver.internal.core.util.collection.QueryPlan; -import com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting; -import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import com.datastax.oss.driver.internal.core.loadbalancing.DefaultLoadBalancingPolicy; import edu.umd.cs.findbugs.annotations.NonNull; -import edu.umd.cs.findbugs.annotations.Nullable; -import java.nio.ByteBuffer; -import java.util.BitSet; -import java.util.Collections; -import java.util.Map; -import java.util.Objects; -import java.util.Optional; -import java.util.Queue; -import java.util.Set; -import java.util.UUID; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.CopyOnWriteArraySet; -import java.util.concurrent.ThreadLocalRandom; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.concurrent.atomic.AtomicLongArray; -import java.util.function.IntUnaryOperator; -import java.util.function.Predicate; -import net.jcip.annotations.ThreadSafe; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; /** - * The DSE load balancing policy implementation. - * - *

      To activate this policy, modify the {@code basic.load-balancing-policy} section in the DSE - * driver configuration, for example: - * - *

      - * datastax-java-driver {
      - *   basic.load-balancing-policy {
      - *     class = com.datastax.dse.driver.internal.core.loadbalancing.DseLoadBalancingPolicy
      - *     local-datacenter = datacenter1
      - *   }
      - * }
      - * 
      - * - * See {@code reference.conf} (in the manual or OSS driver JAR) and {@code dse-reference.conf} (in - * the manual or DSE driver JAR) for more details. + * @deprecated This class only exists for backward compatibility. It is equivalent to {@link + * DefaultLoadBalancingPolicy}, which should now be used instead. */ -@ThreadSafe -public class DseLoadBalancingPolicy implements LoadBalancingPolicy, RequestTracker { - - private static final Logger LOG = LoggerFactory.getLogger(DseLoadBalancingPolicy.class); - - private static final Predicate INCLUDE_ALL_NODES = n -> true; - private static final IntUnaryOperator INCREMENT = i -> (i == Integer.MAX_VALUE) ? 0 : i + 1; - - private static final long NEWLY_UP_INTERVAL_NANOS = MINUTES.toNanos(1); - private static final int MAX_IN_FLIGHT_THRESHOLD = 10; - private static final long RESPONSE_COUNT_RESET_INTERVAL_NANOS = MILLISECONDS.toNanos(200); - - @NonNull private final String logPrefix; - @NonNull private final MetadataManager metadataManager; - @NonNull private final Predicate filter; - private final boolean isDefaultPolicy; - - @Nullable @VisibleForTesting volatile String localDc; - @NonNull private volatile DistanceReporter distanceReporter = (node, distance) -> {}; - - private final AtomicInteger roundRobinAmount = new AtomicInteger(); - @VisibleForTesting final CopyOnWriteArraySet localDcLiveNodes = new CopyOnWriteArraySet<>(); - @VisibleForTesting final Map responseTimes = new ConcurrentHashMap<>(); - @VisibleForTesting final Map upTimes = new ConcurrentHashMap<>(); - +@Deprecated +public class DseLoadBalancingPolicy extends DefaultLoadBalancingPolicy { public DseLoadBalancingPolicy(@NonNull DriverContext context, @NonNull String profileName) { - this.logPrefix = context.getSessionName() + "|" + profileName; - this.metadataManager = ((InternalDriverContext) context).getMetadataManager(); - this.isDefaultPolicy = profileName.equals(DriverExecutionProfile.DEFAULT_NAME); - this.localDc = getLocalDcFromConfig((InternalDriverContext) context, profileName); - Predicate filterFromConfig = getFilterFromConfig(context, profileName); - this.filter = - node -> { - String localDc = this.localDc; - if (localDc != null && !localDc.equals(node.getDatacenter())) { - LOG.debug( - "[{}] Ignoring {} because it doesn't belong to the local DC {}", - logPrefix, - node, - localDc); - return false; - } else if (!filterFromConfig.test(node)) { - LOG.debug( - "[{}] Ignoring {} because it doesn't match the user-provided predicate", - logPrefix, - node); - return false; - } else { - return true; - } - }; - ((MultiplexingRequestTracker) context.getRequestTracker()).register(this); - } - - @Override - public void init(@NonNull Map nodes, @NonNull DistanceReporter distanceReporter) { - this.distanceReporter = distanceReporter; - - Set contactPoints = metadataManager.getContactPoints(); - if (localDc == null) { - if (metadataManager.wasImplicitContactPoint()) { - // No explicit contact points provided => the driver used the default (127.0.0.1:9042), and - // we allow inferring the local DC in this case - assert contactPoints.size() == 1; - Node contactPoint = contactPoints.iterator().next(); - localDc = contactPoint.getDatacenter(); - LOG.debug("[{}] Local DC set from contact point {}: {}", logPrefix, contactPoint, localDc); - } else { - throw new IllegalStateException( - "You provided explicit contact points, the local DC must be specified (see " - + DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER.getPath() - + " in the config)"); - } - } else { - ImmutableMap.Builder builder = ImmutableMap.builder(); - for (Node node : contactPoints) { - String datacenter = node.getDatacenter(); - if (!Objects.equals(localDc, datacenter)) { - builder.put(node, (datacenter == null) ? "" : datacenter); - } - } - ImmutableMap badContactPoints = builder.build(); - if (isDefaultPolicy && !badContactPoints.isEmpty()) { - LOG.warn( - "[{}] You specified {} as the local DC, but some contact points are from a different DC ({})", - logPrefix, - localDc, - badContactPoints); - } - } - - for (Node node : nodes.values()) { - if (filter.test(node)) { - distanceReporter.setDistance(node, NodeDistance.LOCAL); - if (node.getState() != NodeState.DOWN) { - // This includes state == UNKNOWN. If the node turns out to be unreachable, this will be - // detected when we try to open a pool to it, it will get marked down and this will be - // signaled back to this policy - localDcLiveNodes.add(node); - } - } else { - distanceReporter.setDistance(node, NodeDistance.IGNORED); - } - } - } - - @NonNull - @Override - public Queue newQueryPlan(@Nullable Request request, @Nullable Session session) { - // Take a snapshot since the set is concurrent: - Object[] currentNodes = localDcLiveNodes.toArray(); - - Set allReplicas = getReplicas(request, session); - int replicaCount = 0; // in currentNodes - - if (!allReplicas.isEmpty()) { - - // Move replicas to the beginning of the plan - for (int i = 0; i < currentNodes.length; i++) { - Node node = (Node) currentNodes[i]; - if (allReplicas.contains(node)) { - ArrayUtils.bubbleUp(currentNodes, i, replicaCount); - replicaCount++; - } - } - - if (replicaCount > 1) { - - shuffleHead(currentNodes, replicaCount); - - if (replicaCount > 2) { - - assert session != null; - - // Test replicas health - Node newestUpReplica = null; - BitSet unhealthyReplicas = null; // bit mask storing indices of unhealthy replicas - long mostRecentUpTimeNanos = -1; - long now = nanoTime(); - for (int i = 0; i < replicaCount; i++) { - Node node = (Node) currentNodes[i]; - Long upTimeNanos = upTimes.get(node); - if (upTimeNanos != null - && now - upTimeNanos - NEWLY_UP_INTERVAL_NANOS < 0 - && upTimeNanos - mostRecentUpTimeNanos > 0) { - newestUpReplica = node; - mostRecentUpTimeNanos = upTimeNanos; - } - if (newestUpReplica == null && isUnhealthy(node, session, now)) { - if (unhealthyReplicas == null) { - unhealthyReplicas = new BitSet(replicaCount); - } - unhealthyReplicas.set(i); - } - } - - // When: - // - there isn't any newly UP replica and - // - there is one or more unhealthy replicas and - // - there is a majority of healthy replicas - int unhealthyReplicasCount = - unhealthyReplicas == null ? 0 : unhealthyReplicas.cardinality(); - if (newestUpReplica == null - && unhealthyReplicasCount > 0 - && unhealthyReplicasCount < (replicaCount / 2.0)) { - - // Reorder the unhealthy replicas to the back of the list - // Start from the back of the replicas, then move backwards; - // stop once all unhealthy replicas are moved to the back. - int counter = 0; - for (int i = replicaCount - 1; i >= 0 && counter < unhealthyReplicasCount; i--) { - if (unhealthyReplicas.get(i)) { - ArrayUtils.bubbleDown(currentNodes, i, replicaCount - 1 - counter); - counter++; - } - } - } - - // When: - // - there is a newly UP replica and - // - the replica in first or second position is the most recent replica marked as UP and - // - dice roll 1d4 != 1 - else if ((newestUpReplica == currentNodes[0] || newestUpReplica == currentNodes[1]) - && diceRoll1d4() != 1) { - - // Send it to the back of the replicas - ArrayUtils.bubbleDown( - currentNodes, newestUpReplica == currentNodes[0] ? 0 : 1, replicaCount - 1); - } - - // Reorder the first two replicas in the shuffled list based on the number of - // in-flight requests - if (getInFlight((Node) currentNodes[0], session) - > getInFlight((Node) currentNodes[1], session)) { - ArrayUtils.swap(currentNodes, 0, 1); - } - } - } - } - - LOG.trace("[{}] Prioritizing {} local replicas", logPrefix, replicaCount); - - // Round-robin the remaining nodes - ArrayUtils.rotate( - currentNodes, - replicaCount, - currentNodes.length - replicaCount, - roundRobinAmount.getAndUpdate(INCREMENT)); - - return new QueryPlan(currentNodes); - } - - @Override - public void onAdd(@NonNull Node node) { - if (filter.test(node)) { - LOG.debug("[{}] {} was added, setting distance to LOCAL", logPrefix, node); - // Setting to a non-ignored distance triggers the session to open a pool, which will in turn - // set the node UP when the first channel gets opened. - distanceReporter.setDistance(node, NodeDistance.LOCAL); - } else { - distanceReporter.setDistance(node, NodeDistance.IGNORED); - } - } - - @Override - public void onUp(@NonNull Node node) { - if (filter.test(node)) { - // Normally this is already the case, but the filter could be dynamic and have ignored the - // node previously. - distanceReporter.setDistance(node, NodeDistance.LOCAL); - if (localDcLiveNodes.add(node)) { - upTimes.put(node, nanoTime()); - LOG.debug("[{}] {} came back UP, added to live set", logPrefix, node); - } - } else { - distanceReporter.setDistance(node, NodeDistance.IGNORED); - } - } - - @Override - public void onDown(@NonNull Node node) { - if (localDcLiveNodes.remove(node)) { - upTimes.remove(node); - LOG.debug("[{}] {} went DOWN, removed from live set", logPrefix, node); - } - } - - @Override - public void onRemove(@NonNull Node node) { - if (localDcLiveNodes.remove(node)) { - upTimes.remove(node); - LOG.debug("[{}] {} was removed, removed from live set", logPrefix, node); - } - } - - @Override - public void onNodeSuccess( - @NonNull Request request, - long latencyNanos, - @NonNull DriverExecutionProfile executionProfile, - @NonNull Node node, - @NonNull String logPrefix) { - updateResponseTimes(node); - } - - @Override - public void onNodeError( - @NonNull Request request, - @NonNull Throwable error, - long latencyNanos, - @NonNull DriverExecutionProfile executionProfile, - @NonNull Node node, - @NonNull String logPrefix) { - updateResponseTimes(node); - } - - @Override - public void close() {} - - @VisibleForTesting - void shuffleHead(Object[] array, int n) { - ArrayUtils.shuffleHead(array, n); - } - - @VisibleForTesting - long nanoTime() { - return System.nanoTime(); - } - - @VisibleForTesting - int diceRoll1d4() { - return ThreadLocalRandom.current().nextInt(4); - } - - private Set getReplicas(@Nullable Request request, @Nullable Session session) { - if (request == null || session == null) { - return Collections.emptySet(); - } - - // Note: we're on the hot path and the getXxx methods are potentially more than simple getters, - // so we only call each method when strictly necessary (which is why the code below looks a bit - // weird). - CqlIdentifier keyspace = request.getKeyspace(); - if (keyspace == null) { - keyspace = request.getRoutingKeyspace(); - } - if (keyspace == null && session.getKeyspace().isPresent()) { - keyspace = session.getKeyspace().get(); - } - if (keyspace == null) { - return Collections.emptySet(); - } - - Token token = request.getRoutingToken(); - ByteBuffer key = (token == null) ? request.getRoutingKey() : null; - if (token == null && key == null) { - return Collections.emptySet(); - } - - Optional maybeTokenMap = metadataManager.getMetadata().getTokenMap(); - if (maybeTokenMap.isPresent()) { - TokenMap tokenMap = maybeTokenMap.get(); - return (token != null) - ? tokenMap.getReplicas(keyspace, token) - : tokenMap.getReplicas(keyspace, key); - } else { - return Collections.emptySet(); - } - } - - private boolean isUnhealthy(@NonNull Node node, @NonNull Session session, long now) { - return isBusy(node, session) && isResponseRateInsufficient(node, now); - } - - private boolean isBusy(@NonNull Node node, @NonNull Session session) { - return getInFlight(node, session) >= MAX_IN_FLIGHT_THRESHOLD; - } - - @VisibleForTesting - boolean isResponseRateInsufficient(@NonNull Node node, long now) { - // response rate is considered insufficient when less than 2 responses were obtained in - // the past interval delimited by RESPONSE_COUNT_RESET_INTERVAL_NANOS. - if (responseTimes.containsKey(node)) { - AtomicLongArray array = responseTimes.get(node); - if (array.length() == 2) { - long threshold = now - RESPONSE_COUNT_RESET_INTERVAL_NANOS; - long leastRecent = array.get(0); - return leastRecent - threshold < 0; - } - } - return true; - } - - private void updateResponseTimes(@NonNull Node node) { - responseTimes.compute( - node, - (n, array) -> { - // The array stores at most two timestamps, since we don't need more; - // the first one is always the least recent one, and hence the one to inspect. - long now = nanoTime(); - if (array == null) { - array = new AtomicLongArray(1); - array.set(0, now); - } else if (array.length() == 1) { - long previous = array.get(0); - array = new AtomicLongArray(2); - array.set(0, previous); - array.set(1, now); - } else { - array.set(0, array.get(1)); - array.set(1, now); - } - return array; - }); - } - - private String getLocalDcFromConfig( - @NonNull InternalDriverContext context, @NonNull String profileName) { - // see if the local datacenter has been set programmatically - String localDataCenter = context.getLocalDatacenter(profileName); - if (localDataCenter != null) { - LOG.debug("[{}] Local DC set from builder: {}", logPrefix, localDataCenter); - return localDataCenter; - } else { - // it's not been set programmatically, try to get it from config - DriverExecutionProfile config = context.getConfig().getProfile(profileName); - localDataCenter = config.getString(DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER, null); - if (localDataCenter != null) { - LOG.debug("[{}] Local DC set from configuration: {}", logPrefix, localDataCenter); - } - return localDataCenter; - } - } - - private static int getInFlight(@NonNull Node node, @NonNull Session session) { - // The cast will always succeed because there's no way to replace the internal session impl - ChannelPool pool = ((DefaultSession) session).getPools().get(node); - // Note: getInFlight() includes orphaned ids, which is what we want as we need to account - // for requests that were cancelled or timed out (since the node is likely to still be - // processing them). - return (pool == null) ? 0 : pool.getInFlight(); - } - - private static Predicate getFilterFromConfig( - @NonNull DriverContext context, @NonNull String profileName) { - Predicate filterFromBuilder = - ((InternalDriverContext) context).getNodeFilter(profileName); - if (filterFromBuilder != null) { - return filterFromBuilder; - } else { - @SuppressWarnings("unchecked") - Predicate filter = - Reflection.buildFromConfig( - (InternalDriverContext) context, - profileName, - DefaultDriverOption.LOAD_BALANCING_FILTER_CLASS, - Predicate.class) - .orElse(INCLUDE_ALL_NODES); - return filter; - } + super(context, profileName); } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java index 4a20833d860..0e017d3a946 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java @@ -15,6 +15,7 @@ */ package com.datastax.oss.driver.internal.core.context; +import com.datastax.dse.driver.internal.core.tracker.MultiplexingRequestTracker; import com.datastax.oss.driver.api.core.ProtocolVersion; import com.datastax.oss.driver.api.core.addresstranslation.AddressTranslator; import com.datastax.oss.driver.api.core.auth.AuthProvider; @@ -70,6 +71,7 @@ import com.datastax.oss.driver.internal.core.session.RequestProcessorRegistry; import com.datastax.oss.driver.internal.core.ssl.JdkSslHandlerFactory; import com.datastax.oss.driver.internal.core.ssl.SslHandlerFactory; +import com.datastax.oss.driver.internal.core.tracker.NoopRequestTracker; import com.datastax.oss.driver.internal.core.tracker.RequestLogFormatter; import com.datastax.oss.driver.internal.core.type.codec.registry.DefaultCodecRegistry; import com.datastax.oss.driver.internal.core.util.Reflection; @@ -288,7 +290,8 @@ protected Map buildLoadBalancingPolicies() { this, DefaultDriverOption.LOAD_BALANCING_POLICY, LoadBalancingPolicy.class, - "com.datastax.oss.driver.internal.core.loadbalancing"); + "com.datastax.oss.driver.internal.core.loadbalancing", + "com.datastax.dse.driver.internal.core.loadbalancing"); } protected Map buildRetryPolicies() { @@ -521,19 +524,31 @@ protected SchemaChangeListener buildSchemaChangeListener( } protected RequestTracker buildRequestTracker(RequestTracker requestTrackerFromBuilder) { - return (requestTrackerFromBuilder != null) - ? requestTrackerFromBuilder - : Reflection.buildFromConfig( - this, - DefaultDriverOption.REQUEST_TRACKER_CLASS, - RequestTracker.class, - "com.datastax.oss.driver.internal.core.tracker") - .orElseThrow( - () -> - new IllegalArgumentException( - String.format( - "Missing request tracker, check your configuration (%s)", - DefaultDriverOption.REQUEST_TRACKER_CLASS))); + RequestTracker requestTrackerFromConfig = + (requestTrackerFromBuilder != null) + ? requestTrackerFromBuilder + : Reflection.buildFromConfig( + this, + DefaultDriverOption.REQUEST_TRACKER_CLASS, + RequestTracker.class, + "com.datastax.oss.driver.internal.core.tracker") + .orElseThrow( + () -> + new IllegalArgumentException( + String.format( + "Missing request tracker, check your configuration (%s)", + DefaultDriverOption.REQUEST_TRACKER_CLASS))); + + // The default LBP needs to add its own tracker + if (requestTrackerFromConfig instanceof MultiplexingRequestTracker) { + return requestTrackerFromConfig; + } else { + MultiplexingRequestTracker multiplexingRequestTracker = new MultiplexingRequestTracker(); + if (!(requestTrackerFromConfig instanceof NoopRequestTracker)) { + multiplexingRequestTracker.register(requestTrackerFromConfig); + } + return multiplexingRequestTracker; + } } protected Optional buildAuthProvider(AuthProvider authProviderFromBuilder) { diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicy.java b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicy.java index 279f8b73e5c..39b17aa5154 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicy.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicy.java @@ -59,14 +59,14 @@ * provided contact points, if and only if they are all located in the same datacenter. * * - *

      Query plan: see {@link BasicLoadBalancingPolicy} for details on the computation of + *

      Query plan: see {@link DefaultLoadBalancingPolicy} for details on the computation of * query plans. * *

      This class is not recommended for normal users who should always prefer {@link * DefaultLoadBalancingPolicy}. */ @ThreadSafe -public class DcInferringLoadBalancingPolicy extends BasicLoadBalancingPolicy { +public class DcInferringLoadBalancingPolicy extends DefaultLoadBalancingPolicy { public DcInferringLoadBalancingPolicy( @NonNull DriverContext context, @NonNull String profileName) { diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicy.java b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicy.java index c7cd25215e0..97c38c2b13a 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicy.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicy.java @@ -15,15 +15,36 @@ */ package com.datastax.oss.driver.internal.core.loadbalancing; +import static java.util.concurrent.TimeUnit.MILLISECONDS; +import static java.util.concurrent.TimeUnit.MINUTES; + +import com.datastax.dse.driver.internal.core.tracker.MultiplexingRequestTracker; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.context.DriverContext; import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.session.Request; +import com.datastax.oss.driver.api.core.session.Session; +import com.datastax.oss.driver.api.core.tracker.RequestTracker; import com.datastax.oss.driver.internal.core.loadbalancing.helper.MandatoryLocalDcHelper; +import com.datastax.oss.driver.internal.core.pool.ChannelPool; +import com.datastax.oss.driver.internal.core.session.DefaultSession; +import com.datastax.oss.driver.internal.core.util.ArrayUtils; +import com.datastax.oss.driver.internal.core.util.collection.QueryPlan; import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.BitSet; import java.util.Map; import java.util.Optional; +import java.util.Queue; +import java.util.Set; import java.util.UUID; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ThreadLocalRandom; +import java.util.concurrent.atomic.AtomicLongArray; import net.jcip.annotations.ThreadSafe; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * The default load balancing policy implementation. @@ -57,19 +78,235 @@ * implementation will infer the local datacenter from the implicit contact point (localhost). * * - *

      Query plan: see {@link BasicLoadBalancingPolicy} for details on the computation of - * query plans. + *

      Query plan: This implementation prioritizes replica nodes over non-replica ones; if + * more than one replica is available, the replicas will be shuffled; if more than 2 replicas are + * available, they will be ordered from most healthy to least healthy ("Power of 2 choices" or busy + * node avoidance algorithm). Non-replica nodes will be included in a round-robin fashion. If the + * local datacenter is defined (see above), query plans will only include local nodes, never remote + * ones; if it is unspecified however, query plans may contain nodes from different datacenters. */ @ThreadSafe -public class DefaultLoadBalancingPolicy extends BasicLoadBalancingPolicy { +public class DefaultLoadBalancingPolicy extends BasicLoadBalancingPolicy implements RequestTracker { + + private static final Logger LOG = LoggerFactory.getLogger(DefaultLoadBalancingPolicy.class); + + private static final long NEWLY_UP_INTERVAL_NANOS = MINUTES.toNanos(1); + private static final int MAX_IN_FLIGHT_THRESHOLD = 10; + private static final long RESPONSE_COUNT_RESET_INTERVAL_NANOS = MILLISECONDS.toNanos(200); + + protected final Map responseTimes = new ConcurrentHashMap<>(); + protected final Map upTimes = new ConcurrentHashMap<>(); public DefaultLoadBalancingPolicy(@NonNull DriverContext context, @NonNull String profileName) { super(context, profileName); } + @Override + public void init(@NonNull Map nodes, @NonNull DistanceReporter distanceReporter) { + super.init(nodes, distanceReporter); + ((MultiplexingRequestTracker) context.getRequestTracker()).register(this); + } + @NonNull @Override protected Optional discoverLocalDc(@NonNull Map nodes) { return new MandatoryLocalDcHelper(context, profile, logPrefix).discoverLocalDc(nodes); } + + @NonNull + @Override + public Queue newQueryPlan(@Nullable Request request, @Nullable Session session) { + // Take a snapshot since the set is concurrent: + Object[] currentNodes = liveNodes.toArray(); + + Set allReplicas = getReplicas(request, session); + int replicaCount = 0; // in currentNodes + + if (!allReplicas.isEmpty()) { + + // Move replicas to the beginning of the plan + for (int i = 0; i < currentNodes.length; i++) { + Node node = (Node) currentNodes[i]; + if (allReplicas.contains(node)) { + ArrayUtils.bubbleUp(currentNodes, i, replicaCount); + replicaCount++; + } + } + + if (replicaCount > 1) { + + shuffleHead(currentNodes, replicaCount); + + if (replicaCount > 2) { + + assert session != null; + + // Test replicas health + Node newestUpReplica = null; + BitSet unhealthyReplicas = null; // bit mask storing indices of unhealthy replicas + long mostRecentUpTimeNanos = -1; + long now = nanoTime(); + for (int i = 0; i < replicaCount; i++) { + Node node = (Node) currentNodes[i]; + Long upTimeNanos = upTimes.get(node); + if (upTimeNanos != null + && now - upTimeNanos - NEWLY_UP_INTERVAL_NANOS < 0 + && upTimeNanos - mostRecentUpTimeNanos > 0) { + newestUpReplica = node; + mostRecentUpTimeNanos = upTimeNanos; + } + if (newestUpReplica == null && isUnhealthy(node, session, now)) { + if (unhealthyReplicas == null) { + unhealthyReplicas = new BitSet(replicaCount); + } + unhealthyReplicas.set(i); + } + } + + // When: + // - there isn't any newly UP replica and + // - there is one or more unhealthy replicas and + // - there is a majority of healthy replicas + int unhealthyReplicasCount = + unhealthyReplicas == null ? 0 : unhealthyReplicas.cardinality(); + if (newestUpReplica == null + && unhealthyReplicasCount > 0 + && unhealthyReplicasCount < (replicaCount / 2.0)) { + + // Reorder the unhealthy replicas to the back of the list + // Start from the back of the replicas, then move backwards; + // stop once all unhealthy replicas are moved to the back. + int counter = 0; + for (int i = replicaCount - 1; i >= 0 && counter < unhealthyReplicasCount; i--) { + if (unhealthyReplicas.get(i)) { + ArrayUtils.bubbleDown(currentNodes, i, replicaCount - 1 - counter); + counter++; + } + } + } + + // When: + // - there is a newly UP replica and + // - the replica in first or second position is the most recent replica marked as UP and + // - dice roll 1d4 != 1 + else if ((newestUpReplica == currentNodes[0] || newestUpReplica == currentNodes[1]) + && diceRoll1d4() != 1) { + + // Send it to the back of the replicas + ArrayUtils.bubbleDown( + currentNodes, newestUpReplica == currentNodes[0] ? 0 : 1, replicaCount - 1); + } + + // Reorder the first two replicas in the shuffled list based on the number of + // in-flight requests + if (getInFlight((Node) currentNodes[0], session) + > getInFlight((Node) currentNodes[1], session)) { + ArrayUtils.swap(currentNodes, 0, 1); + } + } + } + } + + LOG.trace("[{}] Prioritizing {} local replicas", logPrefix, replicaCount); + + // Round-robin the remaining nodes + ArrayUtils.rotate( + currentNodes, + replicaCount, + currentNodes.length - replicaCount, + roundRobinAmount.getAndUpdate(INCREMENT)); + + return new QueryPlan(currentNodes); + } + + @Override + public void onNodeSuccess( + @NonNull Request request, + long latencyNanos, + @NonNull DriverExecutionProfile executionProfile, + @NonNull Node node, + @NonNull String logPrefix) { + updateResponseTimes(node); + } + + @Override + public void onNodeError( + @NonNull Request request, + @NonNull Throwable error, + long latencyNanos, + @NonNull DriverExecutionProfile executionProfile, + @NonNull Node node, + @NonNull String logPrefix) { + updateResponseTimes(node); + } + + /** Exposed as a protected method so that it can be accessed by tests */ + @Override + protected void shuffleHead(Object[] currentNodes, int replicaCount) { + super.shuffleHead(currentNodes, replicaCount); + } + + /** Exposed as a protected method so that it can be accessed by tests */ + protected long nanoTime() { + return System.nanoTime(); + } + + /** Exposed as a protected method so that it can be accessed by tests */ + protected int diceRoll1d4() { + return ThreadLocalRandom.current().nextInt(4); + } + + protected boolean isUnhealthy(@NonNull Node node, @NonNull Session session, long now) { + return isBusy(node, session) && isResponseRateInsufficient(node, now); + } + + protected boolean isBusy(@NonNull Node node, @NonNull Session session) { + return getInFlight(node, session) >= MAX_IN_FLIGHT_THRESHOLD; + } + + protected boolean isResponseRateInsufficient(@NonNull Node node, long now) { + // response rate is considered insufficient when less than 2 responses were obtained in + // the past interval delimited by RESPONSE_COUNT_RESET_INTERVAL_NANOS. + if (responseTimes.containsKey(node)) { + AtomicLongArray array = responseTimes.get(node); + if (array.length() == 2) { + long threshold = now - RESPONSE_COUNT_RESET_INTERVAL_NANOS; + long leastRecent = array.get(0); + return leastRecent - threshold < 0; + } + } + return true; + } + + protected void updateResponseTimes(@NonNull Node node) { + responseTimes.compute( + node, + (n, array) -> { + // The array stores at most two timestamps, since we don't need more; + // the first one is always the least recent one, and hence the one to inspect. + long now = nanoTime(); + if (array == null) { + array = new AtomicLongArray(1); + array.set(0, now); + } else if (array.length() == 1) { + long previous = array.get(0); + array = new AtomicLongArray(2); + array.set(0, previous); + array.set(1, now); + } else { + array.set(0, array.get(1)); + array.set(1, now); + } + return array; + }); + } + + protected int getInFlight(@NonNull Node node, @NonNull Session session) { + // The cast will always succeed because there's no way to replace the internal session impl + ChannelPool pool = ((DefaultSession) session).getPools().get(node); + // Note: getInFlight() includes orphaned ids, which is what we want as we need to account + // for requests that were cancelled or timed out (since the node is likely to still be + // processing them). + return (pool == null) ? 0 : pool.getInFlight(); + } } diff --git a/core/src/main/resources/dse-reference.conf b/core/src/main/resources/dse-reference.conf index 0fbac85b617..712b4d4b91c 100644 --- a/core/src/main/resources/dse-reference.conf +++ b/core/src/main/resources/dse-reference.conf @@ -38,18 +38,6 @@ datastax-java-driver { # Modifiable at runtime: no # Overridable in a profile: no // application.version = - - load-balancing-policy { - # The DSE driver ships with a specific load balancing policy implementation that is capable, - # among other things, of avoiding slow nodes and handling analytics queries: - # DseLoadBalancingPolicy. - # - # You can also specify any other custom implementation class, provided that it implements - # LoadBalancingPolicy and has a public constructor with two arguments: the DriverContext and a - # String representing the profile name. - class = DseLoadBalancingPolicy - - } } basic.graph { diff --git a/core/src/main/resources/reference.conf b/core/src/main/resources/reference.conf index b433ff375b6..bc541083253 100644 --- a/core/src/main/resources/reference.conf +++ b/core/src/main/resources/reference.conf @@ -150,8 +150,10 @@ datastax-java-driver { # - when the policies assign distances to nodes, the driver uses the closest assigned distance # for any given node. basic.load-balancing-policy { - # The class of the policy. If it is not qualified, the driver assumes that it resides in the - # package com.datastax.oss.driver.internal.core.loadbalancing. + # The class of the policy. If it is not qualified, the driver assumes that it resides in one of + # the following packages: + # - com.datastax.oss.driver.internal.core.loadbalancing. + # - com.datastax.dse.driver.internal.core.loadbalancing. # # The driver provides a single implementation out of the box: DefaultLoadBalancingPolicy. # diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyEventsTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyEventsTest.java deleted file mode 100644 index c1a7d96ae5b..00000000000 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyEventsTest.java +++ /dev/null @@ -1,155 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.datastax.dse.driver.internal.core.loadbalancing; - -import static com.datastax.oss.driver.api.core.config.DriverExecutionProfile.DEFAULT_NAME; -import static com.datastax.oss.driver.api.core.loadbalancing.NodeDistance.IGNORED; -import static com.datastax.oss.driver.api.core.loadbalancing.NodeDistance.LOCAL; -import static org.assertj.core.api.Assertions.assertThat; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.BDDMockito.given; -import static org.mockito.BDDMockito.then; -import static org.mockito.Mockito.never; -import static org.mockito.Mockito.reset; - -import com.datastax.oss.driver.api.core.loadbalancing.NodeDistance; -import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; -import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; -import java.util.UUID; -import org.junit.Test; - -public class DseLoadBalancingPolicyEventsTest extends DseLoadBalancingPolicyTestBase { - - @Test - public void should_remove_down_node_from_live_set() { - // Given - DseLoadBalancingPolicy policy = createAndInitPolicy(); - - // When - policy.onDown(node2); - - // Then - then(distanceReporter).should(never()).setDistance(eq(node2), any(NodeDistance.class)); - assertThat(policy.localDcLiveNodes).containsOnly(node1); - } - - @Test - public void should_remove_removed_node_from_live_set() { - // Given - DseLoadBalancingPolicy policy = createAndInitPolicy(); - - // When - policy.onRemove(node2); - - // Then - then(distanceReporter).should(never()).setDistance(eq(node2), any(NodeDistance.class)); - assertThat(policy.localDcLiveNodes).containsOnly(node1); - } - - @Test - public void should_set_added_node_to_local() { - // Given - DseLoadBalancingPolicy policy = createAndInitPolicy(); - - // When - policy.onAdd(node3); - - // Then - // Not added to the live set yet, we're waiting for the pool to open - then(distanceReporter).should().setDistance(node3, LOCAL); - assertThat(policy.localDcLiveNodes).containsOnly(node1, node2); - } - - @Test - public void should_ignore_added_node_when_filtered() { - // Given - given(filter.test(node3)).willReturn(false); - DseLoadBalancingPolicy policy = createAndInitPolicy(); - - // When - policy.onAdd(node3); - - // Then - then(distanceReporter).should().setDistance(node3, IGNORED); - assertThat(policy.localDcLiveNodes).containsOnly(node1, node2); - } - - @Test - public void should_ignore_added_node_when_remote_dc() { - // Given - given(node3.getDatacenter()).willReturn("dc2"); - DseLoadBalancingPolicy policy = createAndInitPolicy(); - - // When - policy.onAdd(node3); - - // Then - then(distanceReporter).should().setDistance(node3, IGNORED); - assertThat(policy.localDcLiveNodes).containsOnly(node1, node2); - } - - @Test - public void should_add_up_node_to_live_set() { - // Given - DseLoadBalancingPolicy policy = createAndInitPolicy(); - - // When - policy.onUp(node3); - - // Then - then(distanceReporter).should().setDistance(node3, LOCAL); - assertThat(policy.localDcLiveNodes).containsOnly(node1, node2, node3); - } - - @Test - public void should_ignore_up_node_when_filtered() { - // Given - given(filter.test(node3)).willReturn(false); - DseLoadBalancingPolicy policy = createAndInitPolicy(); - - // When - policy.onUp(node3); - - // Then - then(distanceReporter).should().setDistance(node3, IGNORED); - assertThat(policy.localDcLiveNodes).containsOnly(node1, node2); - } - - @Test - public void should_ignore_up_node_when_remote_dc() { - // Given - given(node3.getDatacenter()).willReturn("dc2"); - DseLoadBalancingPolicy policy = createAndInitPolicy(); - - // When - policy.onUp(node3); - - // Then - then(distanceReporter).should().setDistance(node3, IGNORED); - assertThat(policy.localDcLiveNodes).containsOnly(node1, node2); - } - - private DseLoadBalancingPolicy createAndInitPolicy() { - given(metadataManager.getContactPoints()).willReturn(ImmutableSet.of(node1)); - DseLoadBalancingPolicy policy = new DseLoadBalancingPolicy(context, DEFAULT_NAME); - policy.init( - ImmutableMap.of(UUID.randomUUID(), node1, UUID.randomUUID(), node2), distanceReporter); - assertThat(policy.localDcLiveNodes).containsOnly(node1, node2); - reset(distanceReporter); - return policy; - } -} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyInitTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyInitTest.java deleted file mode 100644 index aab118c5d95..00000000000 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyInitTest.java +++ /dev/null @@ -1,255 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.datastax.dse.driver.internal.core.loadbalancing; - -import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.LOAD_BALANCING_FILTER_CLASS; -import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER; -import static com.datastax.oss.driver.api.core.config.DriverExecutionProfile.DEFAULT_NAME; -import static com.datastax.oss.driver.api.core.loadbalancing.NodeDistance.IGNORED; -import static com.datastax.oss.driver.api.core.loadbalancing.NodeDistance.LOCAL; -import static org.assertj.core.api.Assertions.assertThat; -import static org.assertj.core.api.Assertions.catchThrowable; -import static org.assertj.core.api.Assertions.filter; -import static org.mockito.BDDMockito.given; -import static org.mockito.BDDMockito.then; -import static org.mockito.BDDMockito.verify; -import static org.mockito.BDDMockito.when; -import static org.mockito.Mockito.atLeast; -import static org.mockito.Mockito.never; - -import ch.qos.logback.classic.Level; -import ch.qos.logback.classic.spi.ILoggingEvent; -import com.datastax.oss.driver.api.core.context.DriverContext; -import com.datastax.oss.driver.api.core.metadata.Node; -import com.datastax.oss.driver.api.core.metadata.NodeState; -import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; -import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; -import java.util.UUID; -import java.util.function.Predicate; -import org.junit.Test; - -public class DseLoadBalancingPolicyInitTest extends DseLoadBalancingPolicyTestBase { - - @Test - public void should_infer_local_dc_if_no_explicit_contact_points() { - // Given - given(profile.getString(LOAD_BALANCING_LOCAL_DATACENTER, null)).willReturn(null); - given(metadataManager.getContactPoints()).willReturn(ImmutableSet.of(node1)); - given(metadataManager.wasImplicitContactPoint()).willReturn(true); - DseLoadBalancingPolicy policy = new DseLoadBalancingPolicy(context, DEFAULT_NAME); - - // When - policy.init(ImmutableMap.of(UUID.randomUUID(), node1), distanceReporter); - - // Then - assertThat(policy.localDc).isEqualTo("dc1"); - } - - @Test - public void should_require_local_dc_if_explicit_contact_points() { - // Given - given(profile.getString(LOAD_BALANCING_LOCAL_DATACENTER, null)).willReturn(null); - given(metadataManager.getContactPoints()).willReturn(ImmutableSet.of(node2)); - given(metadataManager.wasImplicitContactPoint()).willReturn(false); - DseLoadBalancingPolicy policy = new DseLoadBalancingPolicy(context, DEFAULT_NAME); - - // When - Throwable error = - catchThrowable( - () -> policy.init(ImmutableMap.of(UUID.randomUUID(), node2), distanceReporter)); - - // Then - assertThat(error) - .isInstanceOf(IllegalStateException.class) - .hasMessageContaining( - "You provided explicit contact points, the local DC must be specified"); - } - - @Test - public void should_warn_if_contact_points_not_in_local_dc() { - // Given - given(node2.getDatacenter()).willReturn("dc2"); - given(node3.getDatacenter()).willReturn("dc3"); - given(metadataManager.getContactPoints()).willReturn(ImmutableSet.of(node1, node2, node3)); - DseLoadBalancingPolicy policy = new DseLoadBalancingPolicy(context, DEFAULT_NAME); - - // When - policy.init( - ImmutableMap.of( - UUID.randomUUID(), node1, UUID.randomUUID(), node2, UUID.randomUUID(), node3), - distanceReporter); - - // Then - then(appender).should(atLeast(1)).doAppend(loggingEventCaptor.capture()); - Iterable warnLogs = - filter(loggingEventCaptor.getAllValues()).with("level", Level.WARN).get(); - assertThat(warnLogs).hasSize(1); - assertThat(warnLogs.iterator().next().getFormattedMessage()) - .contains( - "You specified dc1 as the local DC, but some contact points are from a different DC") - .contains("node2=dc2") - .contains("node3=dc3"); - } - - @Test - public void should_not_warn_if_contact_points_not_in_local_dc_and_profile_not_default() { - // Given - given(node2.getDatacenter()).willReturn("dc2"); - given(node3.getDatacenter()).willReturn("dc3"); - given(metadataManager.getContactPoints()).willReturn(ImmutableSet.of(node1, node2, node3)); - given(config.getProfile("Non default")).willReturn(profile); - DseLoadBalancingPolicy policy = new DseLoadBalancingPolicy(context, "Non default"); - - // When - policy.init( - ImmutableMap.of( - UUID.randomUUID(), node1, UUID.randomUUID(), node2, UUID.randomUUID(), node3), - distanceReporter); - - // Then - then(appender).should(never()).doAppend(loggingEventCaptor.capture()); - Iterable warnLogs = - filter(loggingEventCaptor.getAllValues()).with("level", Level.WARN).get(); - assertThat(warnLogs).isEmpty(); - } - - @Test - public void should_include_nodes_from_local_dc() { - // Given - // make node3 not a contact point to cover all cases - given(metadataManager.getContactPoints()).willReturn(ImmutableSet.of(node1, node2)); - DseLoadBalancingPolicy policy = new DseLoadBalancingPolicy(context, DEFAULT_NAME); - given(node1.getState()).willReturn(NodeState.UP); - given(node2.getState()).willReturn(NodeState.DOWN); - given(node3.getState()).willReturn(NodeState.UNKNOWN); - - // When - policy.init( - ImmutableMap.of( - UUID.randomUUID(), node1, UUID.randomUUID(), node2, UUID.randomUUID(), node3), - distanceReporter); - - // Then - // Set distance for all nodes in the local DC - then(distanceReporter).should().setDistance(node1, LOCAL); - then(distanceReporter).should().setDistance(node2, LOCAL); - then(distanceReporter).should().setDistance(node3, LOCAL); - // But only include UP or UNKNOWN nodes in the live set - assertThat(policy.localDcLiveNodes).containsExactly(node1, node3); - } - - @Test - public void should_ignore_nodes_from_remote_dcs() { - // Given - given(node2.getDatacenter()).willReturn("dc2"); - given(node3.getDatacenter()).willReturn("dc3"); - // make node3 not a contact point to cover all cases - given(metadataManager.getContactPoints()).willReturn(ImmutableSet.of(node1, node2)); - DseLoadBalancingPolicy policy = new DseLoadBalancingPolicy(context, DEFAULT_NAME); - - // When - policy.init( - ImmutableMap.of( - UUID.randomUUID(), node1, UUID.randomUUID(), node2, UUID.randomUUID(), node3), - distanceReporter); - - // Then - then(distanceReporter).should().setDistance(node1, LOCAL); - then(distanceReporter).should().setDistance(node2, IGNORED); - then(distanceReporter).should().setDistance(node3, IGNORED); - assertThat(policy.localDcLiveNodes).containsExactly(node1); - } - - @Test - public void should_ignore_nodes_excluded_by_programmatic_filter() { - // Given - given(filter.test(node2)).willReturn(false); - given(filter.test(node3)).willReturn(false); - given(metadataManager.getContactPoints()).willReturn(ImmutableSet.of(node1)); - DseLoadBalancingPolicy policy = new DseLoadBalancingPolicy(context, DEFAULT_NAME); - - // When - policy.init( - ImmutableMap.of( - UUID.randomUUID(), node1, UUID.randomUUID(), node2, UUID.randomUUID(), node3), - distanceReporter); - - // Then - then(distanceReporter).should().setDistance(node1, LOCAL); - then(distanceReporter).should().setDistance(node2, IGNORED); - then(distanceReporter).should().setDistance(node3, IGNORED); - assertThat(policy.localDcLiveNodes).containsExactly(node1); - } - - @Test - public void should_ignore_nodes_excluded_by_configured_filter() { - // Given - given(context.getNodeFilter(DEFAULT_NAME)).willReturn(null); - given(metadataManager.getContactPoints()).willReturn(ImmutableSet.of(node1)); - given(profile.isDefined(LOAD_BALANCING_FILTER_CLASS)).willReturn(true); - given(profile.getString(LOAD_BALANCING_FILTER_CLASS)).willReturn(MyFilter.class.getName()); - DseLoadBalancingPolicy policy = new DseLoadBalancingPolicy(context, DEFAULT_NAME); - - // When - policy.init( - ImmutableMap.of( - UUID.randomUUID(), node1, UUID.randomUUID(), node2, UUID.randomUUID(), node3), - distanceReporter); - - // Then - then(distanceReporter).should().setDistance(node1, LOCAL); - then(distanceReporter).should().setDistance(node2, IGNORED); - then(distanceReporter).should().setDistance(node3, IGNORED); - assertThat(policy.localDcLiveNodes).containsExactly(node1); - } - - @Test - public void should_use_local_dc_if_provided_via_config() { - // Given - // the parent class sets the config option to "dc1" - - // When - DseLoadBalancingPolicy policy = new DseLoadBalancingPolicy(context, DEFAULT_NAME); - - // Then - assertThat(policy.localDc).isEqualTo("dc1"); - } - - @Test - public void should_use_local_dc_if_provided_via_context() { - // Given - when(context.getLocalDatacenter(DEFAULT_NAME)).thenReturn("dc1"); - // note: programmatic takes priority, the config won't even be inspected so no need to stub the - // option to null - - // When - DseLoadBalancingPolicy policy = new DseLoadBalancingPolicy(context, DEFAULT_NAME); - - // Then - assertThat(policy.localDc).isEqualTo("dc1"); - verify(profile, never()).getString(LOAD_BALANCING_LOCAL_DATACENTER, null); - } - - public static class MyFilter implements Predicate { - @SuppressWarnings("unused") - public MyFilter(DriverContext context, String profileName) {} - - @Override - public boolean test(Node node) { - return node.toString().equals("node1"); - } - } -} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyQueryPlanTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyQueryPlanTest.java deleted file mode 100644 index 34abc2b7af7..00000000000 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyQueryPlanTest.java +++ /dev/null @@ -1,511 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.datastax.dse.driver.internal.core.loadbalancing; - -import static com.datastax.oss.driver.api.core.config.DriverExecutionProfile.DEFAULT_NAME; -import static java.util.Collections.emptySet; -import static java.util.stream.Collectors.toList; -import static org.assertj.core.api.Assertions.assertThat; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyInt; -import static org.mockito.BDDMockito.given; -import static org.mockito.BDDMockito.then; -import static org.mockito.Mockito.atLeast; -import static org.mockito.Mockito.never; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.times; - -import com.datastax.oss.driver.api.core.CqlIdentifier; -import com.datastax.oss.driver.api.core.metadata.Metadata; -import com.datastax.oss.driver.api.core.metadata.Node; -import com.datastax.oss.driver.api.core.metadata.TokenMap; -import com.datastax.oss.driver.api.core.metadata.token.Token; -import com.datastax.oss.driver.internal.core.pool.ChannelPool; -import com.datastax.oss.driver.internal.core.session.DefaultSession; -import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; -import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; -import com.datastax.oss.protocol.internal.util.Bytes; -import java.nio.ByteBuffer; -import java.util.List; -import java.util.Optional; -import java.util.Queue; -import java.util.UUID; -import java.util.concurrent.atomic.AtomicLongArray; -import java.util.stream.IntStream; -import org.junit.Before; -import org.junit.Test; -import org.mockito.Mock; - -public class DseLoadBalancingPolicyQueryPlanTest extends DseLoadBalancingPolicyTestBase { - - private static final CqlIdentifier KEYSPACE = CqlIdentifier.fromInternal("ks"); - private static final ByteBuffer ROUTING_KEY = Bytes.fromHexString("0xdeadbeef"); - - private static final long T0 = Long.MIN_VALUE; - private static final long T1 = 100; - private static final long T2 = 200; - private static final long T3 = 300; - - @Mock private Node node4; - @Mock private Node node5; - @Mock private ChannelPool pool1; - @Mock private ChannelPool pool2; - @Mock private ChannelPool pool3; - @Mock private ChannelPool pool4; - @Mock private ChannelPool pool5; - @Mock private DefaultSession session; - @Mock private Metadata metadata; - @Mock private TokenMap tokenMap; - @Mock private Token routingToken; - - private DseLoadBalancingPolicy policy; - private long nanoTime; - private int diceRoll; - - @Before - @Override - public void setUp() { - super.setUp(); - nanoTime = T1; - diceRoll = 4; - given(node4.getDatacenter()).willReturn("dc1"); - given(node5.getDatacenter()).willReturn("dc1"); - given(session.getPools()) - .willReturn( - ImmutableMap.of( - node1, pool1, - node2, pool2, - node3, pool3, - node4, pool4, - node5, pool5)); - given(context.getMetadataManager()).willReturn(metadataManager); - given(metadataManager.getMetadata()).willReturn(metadata); - given(metadataManager.getContactPoints()).willReturn(ImmutableSet.of(node1)); - given(metadata.getTokenMap()).willAnswer(invocation -> Optional.of(tokenMap)); - policy = - spy( - new DseLoadBalancingPolicy(context, DEFAULT_NAME) { - @Override - void shuffleHead(Object[] array, int n) {} - - @Override - long nanoTime() { - return nanoTime; - } - - @Override - int diceRoll1d4() { - return diceRoll; - } - }); - policy.init( - ImmutableMap.of( - UUID.randomUUID(), node1, - UUID.randomUUID(), node2, - UUID.randomUUID(), node3, - UUID.randomUUID(), node4, - UUID.randomUUID(), node5), - distanceReporter); - - // Note: tests in this class rely on the fact that the policy uses a CopyOnWriteArraySet which - // preserves insertion order, which is why we can use containsExactly() throughout this class. - assertThat(policy.localDcLiveNodes).containsExactly(node1, node2, node3, node4, node5); - } - - @Test - public void should_use_round_robin_when_no_request() { - // Given - request = null; - // When - List> plans = generateQueryPlans(); - // Then - thenAssertRoundRobinQueryPlans(plans); - then(metadataManager).should(never()).getMetadata(); - } - - @Test - public void should_use_round_robin_when_no_session() { - // Given - session = null; - // When - List> plans = generateQueryPlans(); - // Then - thenAssertRoundRobinQueryPlans(plans); - then(request).should(never()).getRoutingKey(); - then(request).should(never()).getRoutingToken(); - then(metadataManager).should(never()).getMetadata(); - } - - @Test - public void should_use_round_robin_when_request_has_no_routing_keyspace() { - // Given - given(request.getKeyspace()).willReturn(null); - given(request.getRoutingKeyspace()).willReturn(null); - given(session.getKeyspace()).willReturn(Optional.empty()); - // When - List> plans = generateQueryPlans(); - // Then - thenAssertRoundRobinQueryPlans(plans); - then(request).should(never()).getRoutingKey(); - then(request).should(never()).getRoutingToken(); - then(metadataManager).should(never()).getMetadata(); - } - - @Test - public void should_use_round_robin_when_request_has_no_routing_key_or_token() { - // Given - given(request.getKeyspace()).willReturn(null); - given(request.getRoutingKeyspace()).willReturn(KEYSPACE); - given(request.getRoutingKey()).willReturn(null); - given(request.getRoutingToken()).willReturn(null); - // When - List> plans = generateQueryPlans(); - // Then - thenAssertRoundRobinQueryPlans(plans); - then(metadataManager).should(never()).getMetadata(); - } - - @Test - public void should_use_round_robin_when_token_map_absent() { - // Given - given(request.getKeyspace()).willReturn(null); - given(request.getRoutingKeyspace()).willReturn(KEYSPACE); - given(request.getRoutingKey()).willReturn(ROUTING_KEY); - given(metadata.getTokenMap()).willReturn(Optional.empty()); - // When - List> plans = generateQueryPlans(); - // Then - thenAssertRoundRobinQueryPlans(plans); - then(metadata).should(atLeast(1)).getTokenMap(); - } - - @Test - public void - should_use_round_robin_when_token_map_returns_no_replicas_using_request_keyspace_and_routing_token() { - // Given - given(request.getKeyspace()).willReturn(null); - given(request.getRoutingKeyspace()).willReturn(KEYSPACE); - given(request.getRoutingToken()).willReturn(routingToken); - given(tokenMap.getReplicas(KEYSPACE, routingToken)).willReturn(emptySet()); - // When - List> plans = generateQueryPlans(); - // Then - thenAssertRoundRobinQueryPlans(plans); - then(tokenMap).should(atLeast(1)).getReplicas(KEYSPACE, routingToken); - } - - @Test - public void - should_use_round_robin_when_token_map_returns_no_replicas_using_session_keyspace_and_routing_key() { - // Given - given(request.getKeyspace()).willReturn(null); - given(request.getRoutingKeyspace()).willReturn(null); - given(session.getKeyspace()).willReturn(Optional.of(KEYSPACE)); - given(request.getRoutingKey()).willReturn(ROUTING_KEY); - given(tokenMap.getReplicas(KEYSPACE, ROUTING_KEY)).willReturn(emptySet()); - // When - List> plans = generateQueryPlans(); - // Then - thenAssertRoundRobinQueryPlans(plans); - then(tokenMap).should(atLeast(1)).getReplicas(KEYSPACE, ROUTING_KEY); - } - - @Test - public void should_prioritize_single_replica() { - // Given - given(request.getRoutingKeyspace()).willReturn(KEYSPACE); - given(request.getRoutingKey()).willReturn(ROUTING_KEY); - given(tokenMap.getReplicas(KEYSPACE, ROUTING_KEY)).willReturn(ImmutableSet.of(node3)); - - // When - Queue plan1 = policy.newQueryPlan(request, session); - Queue plan2 = policy.newQueryPlan(request, session); - Queue plan3 = policy.newQueryPlan(request, session); - Queue plan4 = policy.newQueryPlan(request, session); - - // Then - // node3 always first, round-robin on the rest - assertThat(plan1).containsExactly(node3, node1, node2, node4, node5); - assertThat(plan2).containsExactly(node3, node2, node4, node5, node1); - assertThat(plan3).containsExactly(node3, node4, node5, node1, node2); - assertThat(plan4).containsExactly(node3, node5, node1, node2, node4); - - then(policy).should(never()).shuffleHead(any(), anyInt()); - then(policy).should(never()).nanoTime(); - then(policy).should(never()).diceRoll1d4(); - } - - @Test - public void should_prioritize_and_shuffle_2_replicas() { - // Given - given(request.getRoutingKeyspace()).willReturn(KEYSPACE); - given(request.getRoutingKey()).willReturn(ROUTING_KEY); - given(tokenMap.getReplicas(KEYSPACE, ROUTING_KEY)).willReturn(ImmutableSet.of(node3, node5)); - - // When - Queue plan1 = policy.newQueryPlan(request, session); - Queue plan2 = policy.newQueryPlan(request, session); - Queue plan3 = policy.newQueryPlan(request, session); - - // Then - // node3 and node5 always first, round-robin on the rest - assertThat(plan1).containsExactly(node3, node5, node1, node2, node4); - assertThat(plan2).containsExactly(node3, node5, node2, node4, node1); - assertThat(plan3).containsExactly(node3, node5, node4, node1, node2); - - then(policy).should(times(3)).shuffleHead(any(), anyInt()); - then(policy).should(never()).nanoTime(); - then(policy).should(never()).diceRoll1d4(); - } - - @Test - public void should_prioritize_and_shuffle_3_or_more_replicas_when_all_healthy_and_all_newly_up() { - // Given - given(request.getRoutingKeyspace()).willReturn(KEYSPACE); - given(request.getRoutingKey()).willReturn(ROUTING_KEY); - given(tokenMap.getReplicas(KEYSPACE, ROUTING_KEY)) - .willReturn(ImmutableSet.of(node1, node3, node5)); - policy.upTimes.put(node1, T1); - policy.upTimes.put(node3, T2); - policy.upTimes.put(node5, T3); // newest up replica - given(pool1.getInFlight()).willReturn(0); - given(pool3.getInFlight()).willReturn(0); - - // When - Queue plan1 = policy.newQueryPlan(request, session); - Queue plan2 = policy.newQueryPlan(request, session); - - // Then - // nodes 1, 3 and 5 always first, round-robin on the rest - // newest up replica is 5, not in first or second position - assertThat(plan1).containsExactly(node1, node3, node5, node2, node4); - assertThat(plan2).containsExactly(node1, node3, node5, node4, node2); - - then(policy).should(times(2)).shuffleHead(any(), anyInt()); - then(policy).should(times(2)).nanoTime(); - then(policy).should(never()).diceRoll1d4(); - } - - @Test - public void - should_prioritize_and_shuffle_3_or_more_replicas_when_all_healthy_and_some_newly_up_and_dice_roll_4() { - // Given - given(request.getRoutingKeyspace()).willReturn(KEYSPACE); - given(request.getRoutingKey()).willReturn(ROUTING_KEY); - given(tokenMap.getReplicas(KEYSPACE, ROUTING_KEY)) - .willReturn(ImmutableSet.of(node1, node3, node5)); - policy.upTimes.put(node1, T2); // newest up replica - policy.upTimes.put(node3, T1); - given(pool3.getInFlight()).willReturn(0); - given(pool5.getInFlight()).willReturn(0); - - // When - Queue plan1 = policy.newQueryPlan(request, session); - Queue plan2 = policy.newQueryPlan(request, session); - - // Then - // nodes 1, 3 and 5 always first, round-robin on the rest - // newest up replica is node1 in first position and diceRoll = 4 -> bubbles down - assertThat(plan1).containsExactly(node3, node5, node1, node2, node4); - assertThat(plan2).containsExactly(node3, node5, node1, node4, node2); - - then(policy).should(times(2)).shuffleHead(any(), anyInt()); - then(policy).should(times(2)).nanoTime(); - then(policy).should(times(2)).diceRoll1d4(); - } - - @Test - public void - should_prioritize_and_shuffle_3_or_more_replicas_when_all_healthy_and_some_newly_up_and_dice_roll_1() { - // Given - given(request.getRoutingKeyspace()).willReturn(KEYSPACE); - given(request.getRoutingKey()).willReturn(ROUTING_KEY); - given(tokenMap.getReplicas(KEYSPACE, ROUTING_KEY)) - .willReturn(ImmutableSet.of(node1, node3, node5)); - policy.upTimes.put(node1, T2); // newest up replica - policy.upTimes.put(node3, T1); - given(pool1.getInFlight()).willReturn(0); - given(pool3.getInFlight()).willReturn(0); - diceRoll = 1; - - // When - Queue plan1 = policy.newQueryPlan(request, session); - Queue plan2 = policy.newQueryPlan(request, session); - - // Then - // nodes 1, 3 and 5 always first, round-robin on the rest - // newest up replica is node1 in first position and diceRoll = 1 -> does not bubble down - assertThat(plan1).containsExactly(node1, node3, node5, node2, node4); - assertThat(plan2).containsExactly(node1, node3, node5, node4, node2); - - then(policy).should(times(2)).shuffleHead(any(), anyInt()); - then(policy).should(times(2)).nanoTime(); - then(policy).should(times(2)).diceRoll1d4(); - } - - @Test - public void should_prioritize_and_shuffle_3_or_more_replicas_when_first_unhealthy() { - // Given - given(request.getRoutingKeyspace()).willReturn(KEYSPACE); - given(request.getRoutingKey()).willReturn(ROUTING_KEY); - given(tokenMap.getReplicas(KEYSPACE, ROUTING_KEY)) - .willReturn(ImmutableSet.of(node1, node3, node5)); - given(pool1.getInFlight()).willReturn(100); // unhealthy - given(pool3.getInFlight()).willReturn(0); - given(pool5.getInFlight()).willReturn(0); - - policy.responseTimes.put(node1, new AtomicLongArray(new long[] {T0, T0})); // unhealthy - - // When - Queue plan1 = policy.newQueryPlan(request, session); - Queue plan2 = policy.newQueryPlan(request, session); - - // Then - // nodes 1, 3 and 5 always first, round-robin on the rest - // node1 is unhealthy = 1 -> bubbles down - assertThat(plan1).containsExactly(node3, node5, node1, node2, node4); - assertThat(plan2).containsExactly(node3, node5, node1, node4, node2); - - then(policy).should(times(2)).shuffleHead(any(), anyInt()); - then(policy).should(times(2)).nanoTime(); - then(policy).should(never()).diceRoll1d4(); - } - - @Test - public void - should_not_treat_node_as_unhealthy_if_has_in_flight_exceeded_but_response_times_normal() { - // Given - given(request.getRoutingKeyspace()).willReturn(KEYSPACE); - given(request.getRoutingKey()).willReturn(ROUTING_KEY); - given(tokenMap.getReplicas(KEYSPACE, ROUTING_KEY)) - .willReturn(ImmutableSet.of(node1, node3, node5)); - given(pool1.getInFlight()).willReturn(100); // unhealthy - given(pool3.getInFlight()).willReturn(0); - given(pool5.getInFlight()).willReturn(0); - - policy.responseTimes.put(node1, new AtomicLongArray(new long[] {T1, T1})); // healthy - - // When - Queue plan1 = policy.newQueryPlan(request, session); - Queue plan2 = policy.newQueryPlan(request, session); - - // Then - // nodes 1, 3 and 5 always first, round-robin on the rest - // node1 has more in-flight than node3 -> swap - assertThat(plan1).containsExactly(node3, node1, node5, node2, node4); - assertThat(plan2).containsExactly(node3, node1, node5, node4, node2); - - then(policy).should(times(2)).shuffleHead(any(), anyInt()); - then(policy).should(times(2)).nanoTime(); - then(policy).should(never()).diceRoll1d4(); - } - - @Test - public void should_prioritize_and_shuffle_3_or_more_replicas_when_last_unhealthy() { - // Given - given(request.getRoutingKeyspace()).willReturn(KEYSPACE); - given(request.getRoutingKey()).willReturn(ROUTING_KEY); - given(tokenMap.getReplicas(KEYSPACE, ROUTING_KEY)) - .willReturn(ImmutableSet.of(node1, node3, node5)); - given(pool1.getInFlight()).willReturn(0); - given(pool3.getInFlight()).willReturn(0); - given(pool5.getInFlight()).willReturn(100); // unhealthy - - // When - Queue plan1 = policy.newQueryPlan(request, session); - Queue plan2 = policy.newQueryPlan(request, session); - - // Then - // nodes 1, 3 and 5 always first, round-robin on the rest - // node5 is unhealthy -> noop - assertThat(plan1).containsExactly(node1, node3, node5, node2, node4); - assertThat(plan2).containsExactly(node1, node3, node5, node4, node2); - - then(policy).should(times(2)).shuffleHead(any(), anyInt()); - then(policy).should(times(2)).nanoTime(); - then(policy).should(never()).diceRoll1d4(); - } - - @Test - public void should_prioritize_and_shuffle_3_or_more_replicas_when_majority_unhealthy() { - // Given - given(request.getRoutingKeyspace()).willReturn(KEYSPACE); - given(request.getRoutingKey()).willReturn(ROUTING_KEY); - given(tokenMap.getReplicas(KEYSPACE, ROUTING_KEY)) - .willReturn(ImmutableSet.of(node1, node3, node5)); - given(pool1.getInFlight()).willReturn(100); - given(pool3.getInFlight()).willReturn(100); - given(pool5.getInFlight()).willReturn(0); - - // When - Queue plan1 = policy.newQueryPlan(request, session); - Queue plan2 = policy.newQueryPlan(request, session); - - // Then - // nodes 1, 3 and 5 always first, round-robin on the rest - // majority of nodes unhealthy -> noop - assertThat(plan1).containsExactly(node1, node3, node5, node2, node4); - assertThat(plan2).containsExactly(node1, node3, node5, node4, node2); - - then(policy).should(times(2)).shuffleHead(any(), anyInt()); - then(policy).should(times(2)).nanoTime(); - then(policy).should(never()).diceRoll1d4(); - } - - @Test - public void should_reorder_first_two_replicas_when_first_has_more_in_flight_than_second() { - // Given - given(request.getRoutingKeyspace()).willReturn(KEYSPACE); - given(request.getRoutingKey()).willReturn(ROUTING_KEY); - given(tokenMap.getReplicas(KEYSPACE, ROUTING_KEY)) - .willReturn(ImmutableSet.of(node1, node3, node5)); - given(pool1.getInFlight()).willReturn(200); - given(pool3.getInFlight()).willReturn(100); - - // When - Queue plan1 = policy.newQueryPlan(request, session); - Queue plan2 = policy.newQueryPlan(request, session); - - // Then - // nodes 1, 3 and 5 always first, round-robin on the rest - // node1 has more in-flight than node3 -> swap - assertThat(plan1).containsExactly(node3, node1, node5, node2, node4); - assertThat(plan2).containsExactly(node3, node1, node5, node4, node2); - - then(policy).should(times(2)).shuffleHead(any(), anyInt()); - then(policy).should(times(2)).nanoTime(); - then(policy).should(never()).diceRoll1d4(); - } - - private List> generateQueryPlans() { - return IntStream.range(0, 10) - .mapToObj(i -> policy.newQueryPlan(request, session)) - .collect(toList()); - } - - private void thenAssertRoundRobinQueryPlans(List> plans) { - assertThat(plans.get(0)).containsExactly(node1, node2, node3, node4, node5); - assertThat(plans.get(1)).containsExactly(node2, node3, node4, node5, node1); - assertThat(plans.get(2)).containsExactly(node3, node4, node5, node1, node2); - assertThat(plans.get(3)).containsExactly(node4, node5, node1, node2, node3); - assertThat(plans.get(4)).containsExactly(node5, node1, node2, node3, node4); - assertThat(plans.get(5)).containsExactly(node1, node2, node3, node4, node5); - assertThat(plans.get(6)).containsExactly(node2, node3, node4, node5, node1); - assertThat(plans.get(7)).containsExactly(node3, node4, node5, node1, node2); - assertThat(plans.get(8)).containsExactly(node4, node5, node1, node2, node3); - assertThat(plans.get(9)).containsExactly(node5, node1, node2, node3, node4); - } -} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyTestBase.java b/core/src/test/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyTestBase.java deleted file mode 100644 index b7c9ec5f6b5..00000000000 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyTestBase.java +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.datastax.dse.driver.internal.core.loadbalancing; - -import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER; -import static com.datastax.oss.driver.api.core.config.DriverExecutionProfile.DEFAULT_NAME; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.BDDMockito.given; - -import ch.qos.logback.classic.Logger; -import ch.qos.logback.classic.spi.ILoggingEvent; -import ch.qos.logback.core.Appender; -import com.datastax.dse.driver.internal.core.tracker.MultiplexingRequestTracker; -import com.datastax.oss.driver.api.core.config.DriverConfig; -import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; -import com.datastax.oss.driver.api.core.loadbalancing.LoadBalancingPolicy; -import com.datastax.oss.driver.api.core.metadata.Node; -import com.datastax.oss.driver.api.core.session.Request; -import com.datastax.oss.driver.internal.core.context.InternalDriverContext; -import com.datastax.oss.driver.internal.core.metadata.DefaultNode; -import com.datastax.oss.driver.internal.core.metadata.MetadataManager; -import java.util.function.Predicate; -import org.junit.After; -import org.junit.Before; -import org.junit.runner.RunWith; -import org.mockito.ArgumentCaptor; -import org.mockito.Captor; -import org.mockito.Mock; -import org.mockito.junit.MockitoJUnitRunner; -import org.slf4j.LoggerFactory; - -@RunWith(MockitoJUnitRunner.class) -public abstract class DseLoadBalancingPolicyTestBase { - - @Mock DefaultNode node1; - @Mock DefaultNode node2; - @Mock DefaultNode node3; - @Mock InternalDriverContext context; - @Mock DriverConfig config; - @Mock DriverExecutionProfile profile; - @Mock Predicate filter; - @Mock LoadBalancingPolicy.DistanceReporter distanceReporter; - @Mock Appender appender; - @Mock Request request; - @Mock MetadataManager metadataManager; - final String logPrefix = "lbp-test-log-prefix"; - - @Captor ArgumentCaptor loggingEventCaptor; - - private Logger logger; - - @Before - public void setUp() { - logger = (Logger) LoggerFactory.getLogger(DseLoadBalancingPolicy.class); - logger.addAppender(appender); - given(node1.getDatacenter()).willReturn("dc1"); - given(node2.getDatacenter()).willReturn("dc1"); - given(node3.getDatacenter()).willReturn("dc1"); - given(filter.test(any(Node.class))).willReturn(true); - given(context.getNodeFilter(DEFAULT_NAME)).willReturn(filter); - given(context.getConfig()).willReturn(config); - given(config.getProfile(DEFAULT_NAME)).willReturn(profile); - given(profile.getString(LOAD_BALANCING_LOCAL_DATACENTER, null)).willReturn("dc1"); - given(context.getMetadataManager()).willReturn(metadataManager); - given(context.getRequestTracker()).willReturn(new MultiplexingRequestTracker()); - } - - @After - public void tearDown() { - logger.detachAppender(appender); - } -} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyEventsTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyEventsTest.java index 47b73c126a6..d7b04f528c5 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyEventsTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyEventsTest.java @@ -158,7 +158,7 @@ public void should_ignore_up_node_when_remote_dc() { @NonNull protected BasicLoadBalancingPolicy createAndInitPolicy() { BasicLoadBalancingPolicy policy = - new DefaultLoadBalancingPolicy(context, DriverExecutionProfile.DEFAULT_NAME); + new BasicLoadBalancingPolicy(context, DriverExecutionProfile.DEFAULT_NAME); policy.init( ImmutableMap.of(UUID.randomUUID(), node1, UUID.randomUUID(), node2), distanceReporter); assertThat(policy.liveNodes).containsExactlyInAnyOrder(node1, node2); diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyEventsTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyEventsTest.java index f46c6e8a64d..7535e8c8fce 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyEventsTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyEventsTest.java @@ -15,26 +15,35 @@ */ package com.datastax.oss.driver.internal.core.loadbalancing; -import static org.assertj.core.api.Assertions.assertThat; +import static com.datastax.oss.driver.Assertions.assertThat; +import static com.datastax.oss.driver.api.core.config.DriverExecutionProfile.DEFAULT_NAME; +import static org.mockito.BDDMockito.given; +import static org.mockito.Mockito.reset; -import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.dse.driver.internal.core.tracker.MultiplexingRequestTracker; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import edu.umd.cs.findbugs.annotations.NonNull; import java.util.UUID; -import org.junit.runner.RunWith; -import org.mockito.junit.MockitoJUnitRunner; +import org.junit.Before; -@RunWith(MockitoJUnitRunner.class) public class DcInferringLoadBalancingPolicyEventsTest extends BasicLoadBalancingPolicyEventsTest { - @NonNull @Override + @Before + public void setup() { + given(context.getRequestTracker()).willReturn(new MultiplexingRequestTracker()); + super.setup(); + } + + @Override + @NonNull protected BasicLoadBalancingPolicy createAndInitPolicy() { DcInferringLoadBalancingPolicy policy = - new DcInferringLoadBalancingPolicy(context, DriverExecutionProfile.DEFAULT_NAME); + new DcInferringLoadBalancingPolicy(context, DEFAULT_NAME); policy.init( ImmutableMap.of(UUID.randomUUID(), node1, UUID.randomUUID(), node2), distanceReporter); - assertThat(policy.liveNodes).containsExactlyInAnyOrder(node1, node2); + assertThat(policy.getLiveNodes()).containsOnly(node1, node2); + reset(distanceReporter); return policy; } } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyInitTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyInitTest.java index d13be12d08c..71d1ef154c8 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyInitTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyInitTest.java @@ -17,6 +17,7 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.filter; +import static org.mockito.BDDMockito.given; import static org.mockito.Mockito.atLeast; import static org.mockito.Mockito.never; import static org.mockito.Mockito.verify; @@ -24,6 +25,7 @@ import ch.qos.logback.classic.Level; import ch.qos.logback.classic.spi.ILoggingEvent; +import com.datastax.dse.driver.internal.core.tracker.MultiplexingRequestTracker; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.loadbalancing.NodeDistance; @@ -32,10 +34,18 @@ import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; import edu.umd.cs.findbugs.annotations.NonNull; import java.util.UUID; +import org.junit.Before; import org.junit.Test; public class DcInferringLoadBalancingPolicyInitTest extends DefaultLoadBalancingPolicyTestBase { + @Override + @Before + public void setup() { + given(context.getRequestTracker()).willReturn(new MultiplexingRequestTracker()); + super.setup(); + } + @Test public void should_use_local_dc_if_provided_via_config() { // Given @@ -214,7 +224,7 @@ public void should_ignore_nodes_excluded_by_filter() { } @NonNull - protected BasicLoadBalancingPolicy createPolicy() { + protected DcInferringLoadBalancingPolicy createPolicy() { return new DcInferringLoadBalancingPolicy(context, DriverExecutionProfile.DEFAULT_NAME); } } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyQueryPlanTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyQueryPlanTest.java index 6d351df0265..1c2b8f09e67 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyQueryPlanTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyQueryPlanTest.java @@ -15,24 +15,33 @@ */ package com.datastax.oss.driver.internal.core.loadbalancing; +import static com.datastax.oss.driver.api.core.config.DriverExecutionProfile.DEFAULT_NAME; import static org.mockito.Mockito.spy; -import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; -import com.datastax.oss.driver.api.core.context.DriverContext; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import java.util.UUID; public class DcInferringLoadBalancingPolicyQueryPlanTest - extends BasicLoadBalancingPolicyQueryPlanTest { + extends DefaultLoadBalancingPolicyQueryPlanTest { @Override - protected DcInferringLoadBalancingPolicy createAndInitPolicy() { - // Use a subclass to disable shuffling, we just spy to make sure that the shuffling method was - // called (makes tests easier) - NonShufflingDcInferringLoadBalancingPolicy policy = + protected DefaultLoadBalancingPolicy createAndInitPolicy() { + DcInferringLoadBalancingPolicy policy = spy( - new NonShufflingDcInferringLoadBalancingPolicy( - context, DriverExecutionProfile.DEFAULT_NAME)); + new DcInferringLoadBalancingPolicy(context, DEFAULT_NAME) { + @Override + protected void shuffleHead(Object[] array, int n) {} + + @Override + protected long nanoTime() { + return nanoTime; + } + + @Override + protected int diceRoll1d4() { + return diceRoll; + } + }); policy.init( ImmutableMap.of( UUID.randomUUID(), node1, @@ -43,15 +52,4 @@ protected DcInferringLoadBalancingPolicy createAndInitPolicy() { distanceReporter); return policy; } - - static class NonShufflingDcInferringLoadBalancingPolicy extends DcInferringLoadBalancingPolicy { - NonShufflingDcInferringLoadBalancingPolicy(DriverContext context, String profileName) { - super(context, profileName); - } - - @Override - protected void shuffleHead(Object[] currentNodes, int replicaCount) { - // nothing (keep in same order) - } - } } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyEventsTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyEventsTest.java index efec5dbcf19..5c0f1b8c581 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyEventsTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyEventsTest.java @@ -15,26 +15,34 @@ */ package com.datastax.oss.driver.internal.core.loadbalancing; -import static org.assertj.core.api.Assertions.assertThat; +import static com.datastax.oss.driver.Assertions.assertThat; +import static com.datastax.oss.driver.api.core.config.DriverExecutionProfile.DEFAULT_NAME; +import static org.mockito.BDDMockito.given; +import static org.mockito.Mockito.reset; -import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.dse.driver.internal.core.tracker.MultiplexingRequestTracker; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import edu.umd.cs.findbugs.annotations.NonNull; import java.util.UUID; -import org.junit.runner.RunWith; -import org.mockito.junit.MockitoJUnitRunner; +import org.junit.Before; -@RunWith(MockitoJUnitRunner.class) public class DefaultLoadBalancingPolicyEventsTest extends BasicLoadBalancingPolicyEventsTest { - @NonNull @Override + @Before + public void setup() { + given(context.getRequestTracker()).willReturn(new MultiplexingRequestTracker()); + super.setup(); + } + + @Override + @NonNull protected DefaultLoadBalancingPolicy createAndInitPolicy() { - DefaultLoadBalancingPolicy policy = - new DefaultLoadBalancingPolicy(context, DriverExecutionProfile.DEFAULT_NAME); + DefaultLoadBalancingPolicy policy = new DefaultLoadBalancingPolicy(context, DEFAULT_NAME); policy.init( ImmutableMap.of(UUID.randomUUID(), node1, UUID.randomUUID(), node2), distanceReporter); - assertThat(policy.liveNodes).containsExactlyInAnyOrder(node1, node2); + assertThat(policy.getLiveNodes()).containsOnly(node1, node2); + reset(distanceReporter); return policy; } } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyInitTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyInitTest.java index a6bfd6590c8..2372e3de92d 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyInitTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyInitTest.java @@ -17,6 +17,7 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.filter; +import static org.mockito.BDDMockito.given; import static org.mockito.Mockito.atLeast; import static org.mockito.Mockito.never; import static org.mockito.Mockito.verify; @@ -24,6 +25,7 @@ import ch.qos.logback.classic.Level; import ch.qos.logback.classic.spi.ILoggingEvent; +import com.datastax.dse.driver.internal.core.tracker.MultiplexingRequestTracker; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.loadbalancing.NodeDistance; @@ -32,10 +34,18 @@ import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; import edu.umd.cs.findbugs.annotations.NonNull; import java.util.UUID; +import org.junit.Before; import org.junit.Test; public class DefaultLoadBalancingPolicyInitTest extends DefaultLoadBalancingPolicyTestBase { + @Override + @Before + public void setup() { + given(context.getRequestTracker()).willReturn(new MultiplexingRequestTracker()); + super.setup(); + } + @Test public void should_use_local_dc_if_provided_via_config() { // Given diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyQueryPlanTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyQueryPlanTest.java index e8ed2f9aaad..b3e65fdd4f2 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyQueryPlanTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyQueryPlanTest.java @@ -15,23 +15,334 @@ */ package com.datastax.oss.driver.internal.core.loadbalancing; +import static com.datastax.oss.driver.api.core.config.DriverExecutionProfile.DEFAULT_NAME; +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyInt; +import static org.mockito.BDDMockito.given; +import static org.mockito.BDDMockito.then; +import static org.mockito.Mockito.never; import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.times; -import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; -import com.datastax.oss.driver.api.core.context.DriverContext; +import com.datastax.dse.driver.internal.core.tracker.MultiplexingRequestTracker; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.internal.core.pool.ChannelPool; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; +import java.util.Optional; +import java.util.Queue; import java.util.UUID; +import java.util.concurrent.atomic.AtomicLongArray; +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mock; public class DefaultLoadBalancingPolicyQueryPlanTest extends BasicLoadBalancingPolicyQueryPlanTest { + private static final long T0 = Long.MIN_VALUE; + private static final long T1 = 100; + private static final long T2 = 200; + private static final long T3 = 300; + + @Mock protected ChannelPool pool1; + @Mock protected ChannelPool pool2; + @Mock protected ChannelPool pool3; + @Mock protected ChannelPool pool4; + @Mock protected ChannelPool pool5; + + long nanoTime; + int diceRoll; + + private DefaultLoadBalancingPolicy dsePolicy; + + @Before + @Override + public void setup() { + given(context.getRequestTracker()).willReturn(new MultiplexingRequestTracker()); + nanoTime = T1; + diceRoll = 4; + given(node4.getDatacenter()).willReturn("dc1"); + given(node5.getDatacenter()).willReturn("dc1"); + given(session.getPools()) + .willReturn( + ImmutableMap.of( + node1, pool1, + node2, pool2, + node3, pool3, + node4, pool4, + node5, pool5)); + given(context.getMetadataManager()).willReturn(metadataManager); + given(metadataManager.getMetadata()).willReturn(metadata); + given(metadataManager.getContactPoints()).willReturn(ImmutableSet.of(node1)); + given(metadata.getTokenMap()).willAnswer(invocation -> Optional.of(tokenMap)); + super.setup(); + dsePolicy = (DefaultLoadBalancingPolicy) policy; + // Note: tests in this class rely on the fact that the policy uses a CopyOnWriteArraySet which + // preserves insertion order, which is why we can use containsExactly() throughout this class. + assertThat(dsePolicy.getLiveNodes()).containsExactly(node1, node2, node3, node4, node5); + } + + @Test + public void should_prioritize_and_shuffle_2_replicas() { + // Given + given(request.getRoutingKeyspace()).willReturn(KEYSPACE); + given(request.getRoutingKey()).willReturn(ROUTING_KEY); + given(tokenMap.getReplicas(KEYSPACE, ROUTING_KEY)).willReturn(ImmutableSet.of(node3, node5)); + + // When + Queue plan1 = dsePolicy.newQueryPlan(request, session); + Queue plan2 = dsePolicy.newQueryPlan(request, session); + Queue plan3 = dsePolicy.newQueryPlan(request, session); + + // Then + // node3 and node5 always first, round-robin on the rest + assertThat(plan1).containsExactly(node3, node5, node1, node2, node4); + assertThat(plan2).containsExactly(node3, node5, node2, node4, node1); + assertThat(plan3).containsExactly(node3, node5, node4, node1, node2); + + then(dsePolicy).should(times(3)).shuffleHead(any(), anyInt()); + then(dsePolicy).should(never()).nanoTime(); + then(dsePolicy).should(never()).diceRoll1d4(); + } + + @Test + public void should_prioritize_and_shuffle_3_or_more_replicas_when_all_healthy_and_all_newly_up() { + // Given + given(request.getRoutingKeyspace()).willReturn(KEYSPACE); + given(request.getRoutingKey()).willReturn(ROUTING_KEY); + given(tokenMap.getReplicas(KEYSPACE, ROUTING_KEY)) + .willReturn(ImmutableSet.of(node1, node3, node5)); + dsePolicy.upTimes.put(node1, T1); + dsePolicy.upTimes.put(node3, T2); + dsePolicy.upTimes.put(node5, T3); // newest up replica + given(pool1.getInFlight()).willReturn(0); + given(pool3.getInFlight()).willReturn(0); + + // When + Queue plan1 = dsePolicy.newQueryPlan(request, session); + Queue plan2 = dsePolicy.newQueryPlan(request, session); + + // Then + // nodes 1, 3 and 5 always first, round-robin on the rest + // newest up replica is 5, not in first or second position + assertThat(plan1).containsExactly(node1, node3, node5, node2, node4); + assertThat(plan2).containsExactly(node1, node3, node5, node4, node2); + + then(dsePolicy).should(times(2)).shuffleHead(any(), anyInt()); + then(dsePolicy).should(times(2)).nanoTime(); + then(dsePolicy).should(never()).diceRoll1d4(); + } + + @Test + public void + should_prioritize_and_shuffle_3_or_more_replicas_when_all_healthy_and_some_newly_up_and_dice_roll_4() { + // Given + given(request.getRoutingKeyspace()).willReturn(KEYSPACE); + given(request.getRoutingKey()).willReturn(ROUTING_KEY); + given(tokenMap.getReplicas(KEYSPACE, ROUTING_KEY)) + .willReturn(ImmutableSet.of(node1, node3, node5)); + dsePolicy.upTimes.put(node1, T2); // newest up replica + dsePolicy.upTimes.put(node3, T1); + given(pool3.getInFlight()).willReturn(0); + given(pool5.getInFlight()).willReturn(0); + + // When + Queue plan1 = dsePolicy.newQueryPlan(request, session); + Queue plan2 = dsePolicy.newQueryPlan(request, session); + + // Then + // nodes 1, 3 and 5 always first, round-robin on the rest + // newest up replica is node1 in first position and diceRoll = 4 -> bubbles down + assertThat(plan1).containsExactly(node3, node5, node1, node2, node4); + assertThat(plan2).containsExactly(node3, node5, node1, node4, node2); + + then(dsePolicy).should(times(2)).shuffleHead(any(), anyInt()); + then(dsePolicy).should(times(2)).nanoTime(); + then(dsePolicy).should(times(2)).diceRoll1d4(); + } + + @Test + public void + should_prioritize_and_shuffle_3_or_more_replicas_when_all_healthy_and_some_newly_up_and_dice_roll_1() { + // Given + given(request.getRoutingKeyspace()).willReturn(KEYSPACE); + given(request.getRoutingKey()).willReturn(ROUTING_KEY); + given(tokenMap.getReplicas(KEYSPACE, ROUTING_KEY)) + .willReturn(ImmutableSet.of(node1, node3, node5)); + dsePolicy.upTimes.put(node1, T2); // newest up replica + dsePolicy.upTimes.put(node3, T1); + given(pool1.getInFlight()).willReturn(0); + given(pool3.getInFlight()).willReturn(0); + diceRoll = 1; + + // When + Queue plan1 = dsePolicy.newQueryPlan(request, session); + Queue plan2 = dsePolicy.newQueryPlan(request, session); + + // Then + // nodes 1, 3 and 5 always first, round-robin on the rest + // newest up replica is node1 in first position and diceRoll = 1 -> does not bubble down + assertThat(plan1).containsExactly(node1, node3, node5, node2, node4); + assertThat(plan2).containsExactly(node1, node3, node5, node4, node2); + + then(dsePolicy).should(times(2)).shuffleHead(any(), anyInt()); + then(dsePolicy).should(times(2)).nanoTime(); + then(dsePolicy).should(times(2)).diceRoll1d4(); + } + + @Test + public void should_prioritize_and_shuffle_3_or_more_replicas_when_first_unhealthy() { + // Given + given(request.getRoutingKeyspace()).willReturn(KEYSPACE); + given(request.getRoutingKey()).willReturn(ROUTING_KEY); + given(tokenMap.getReplicas(KEYSPACE, ROUTING_KEY)) + .willReturn(ImmutableSet.of(node1, node3, node5)); + given(pool1.getInFlight()).willReturn(100); // unhealthy + given(pool3.getInFlight()).willReturn(0); + given(pool5.getInFlight()).willReturn(0); + + dsePolicy.responseTimes.put(node1, new AtomicLongArray(new long[] {T0, T0})); // unhealthy + + // When + Queue plan1 = dsePolicy.newQueryPlan(request, session); + Queue plan2 = dsePolicy.newQueryPlan(request, session); + + // Then + // nodes 1, 3 and 5 always first, round-robin on the rest + // node1 is unhealthy = 1 -> bubbles down + assertThat(plan1).containsExactly(node3, node5, node1, node2, node4); + assertThat(plan2).containsExactly(node3, node5, node1, node4, node2); + + then(dsePolicy).should(times(2)).shuffleHead(any(), anyInt()); + then(dsePolicy).should(times(2)).nanoTime(); + then(dsePolicy).should(never()).diceRoll1d4(); + } + + @Test + public void + should_not_treat_node_as_unhealthy_if_has_in_flight_exceeded_but_response_times_normal() { + // Given + given(request.getRoutingKeyspace()).willReturn(KEYSPACE); + given(request.getRoutingKey()).willReturn(ROUTING_KEY); + given(tokenMap.getReplicas(KEYSPACE, ROUTING_KEY)) + .willReturn(ImmutableSet.of(node1, node3, node5)); + given(pool1.getInFlight()).willReturn(100); // unhealthy + given(pool3.getInFlight()).willReturn(0); + given(pool5.getInFlight()).willReturn(0); + + dsePolicy.responseTimes.put(node1, new AtomicLongArray(new long[] {T1, T1})); // healthy + + // When + Queue plan1 = dsePolicy.newQueryPlan(request, session); + Queue plan2 = dsePolicy.newQueryPlan(request, session); + + // Then + // nodes 1, 3 and 5 always first, round-robin on the rest + // node1 has more in-flight than node3 -> swap + assertThat(plan1).containsExactly(node3, node1, node5, node2, node4); + assertThat(plan2).containsExactly(node3, node1, node5, node4, node2); + + then(dsePolicy).should(times(2)).shuffleHead(any(), anyInt()); + then(dsePolicy).should(times(2)).nanoTime(); + then(dsePolicy).should(never()).diceRoll1d4(); + } + + @Test + public void should_prioritize_and_shuffle_3_or_more_replicas_when_last_unhealthy() { + // Given + given(request.getRoutingKeyspace()).willReturn(KEYSPACE); + given(request.getRoutingKey()).willReturn(ROUTING_KEY); + given(tokenMap.getReplicas(KEYSPACE, ROUTING_KEY)) + .willReturn(ImmutableSet.of(node1, node3, node5)); + given(pool1.getInFlight()).willReturn(0); + given(pool3.getInFlight()).willReturn(0); + given(pool5.getInFlight()).willReturn(100); // unhealthy + + // When + Queue plan1 = dsePolicy.newQueryPlan(request, session); + Queue plan2 = dsePolicy.newQueryPlan(request, session); + + // Then + // nodes 1, 3 and 5 always first, round-robin on the rest + // node5 is unhealthy -> noop + assertThat(plan1).containsExactly(node1, node3, node5, node2, node4); + assertThat(plan2).containsExactly(node1, node3, node5, node4, node2); + + then(dsePolicy).should(times(2)).shuffleHead(any(), anyInt()); + then(dsePolicy).should(times(2)).nanoTime(); + then(dsePolicy).should(never()).diceRoll1d4(); + } + + @Test + public void should_prioritize_and_shuffle_3_or_more_replicas_when_majority_unhealthy() { + // Given + given(request.getRoutingKeyspace()).willReturn(KEYSPACE); + given(request.getRoutingKey()).willReturn(ROUTING_KEY); + given(tokenMap.getReplicas(KEYSPACE, ROUTING_KEY)) + .willReturn(ImmutableSet.of(node1, node3, node5)); + given(pool1.getInFlight()).willReturn(100); + given(pool3.getInFlight()).willReturn(100); + given(pool5.getInFlight()).willReturn(0); + + // When + Queue plan1 = dsePolicy.newQueryPlan(request, session); + Queue plan2 = dsePolicy.newQueryPlan(request, session); + + // Then + // nodes 1, 3 and 5 always first, round-robin on the rest + // majority of nodes unhealthy -> noop + assertThat(plan1).containsExactly(node1, node3, node5, node2, node4); + assertThat(plan2).containsExactly(node1, node3, node5, node4, node2); + + then(dsePolicy).should(times(2)).shuffleHead(any(), anyInt()); + then(dsePolicy).should(times(2)).nanoTime(); + then(dsePolicy).should(never()).diceRoll1d4(); + } + + @Test + public void should_reorder_first_two_replicas_when_first_has_more_in_flight_than_second() { + // Given + given(request.getRoutingKeyspace()).willReturn(KEYSPACE); + given(request.getRoutingKey()).willReturn(ROUTING_KEY); + given(tokenMap.getReplicas(KEYSPACE, ROUTING_KEY)) + .willReturn(ImmutableSet.of(node1, node3, node5)); + given(pool1.getInFlight()).willReturn(200); + given(pool3.getInFlight()).willReturn(100); + + // When + Queue plan1 = dsePolicy.newQueryPlan(request, session); + Queue plan2 = dsePolicy.newQueryPlan(request, session); + + // Then + // nodes 1, 3 and 5 always first, round-robin on the rest + // node1 has more in-flight than node3 -> swap + assertThat(plan1).containsExactly(node3, node1, node5, node2, node4); + assertThat(plan2).containsExactly(node3, node1, node5, node4, node2); + + then(dsePolicy).should(times(2)).shuffleHead(any(), anyInt()); + then(dsePolicy).should(times(2)).nanoTime(); + then(dsePolicy).should(never()).diceRoll1d4(); + } + @Override protected DefaultLoadBalancingPolicy createAndInitPolicy() { - // Use a subclass to disable shuffling, we just spy to make sure that the shuffling method was - // called (makes tests easier) - NonShufflingDefaultLoadBalancingPolicy policy = + DefaultLoadBalancingPolicy policy = spy( - new NonShufflingDefaultLoadBalancingPolicy( - context, DriverExecutionProfile.DEFAULT_NAME)); + new DefaultLoadBalancingPolicy(context, DEFAULT_NAME) { + @Override + protected void shuffleHead(Object[] array, int n) {} + + @Override + protected long nanoTime() { + return nanoTime; + } + + @Override + protected int diceRoll1d4() { + return diceRoll; + } + }); policy.init( ImmutableMap.of( UUID.randomUUID(), node1, @@ -42,15 +353,4 @@ protected DefaultLoadBalancingPolicy createAndInitPolicy() { distanceReporter); return policy; } - - static class NonShufflingDefaultLoadBalancingPolicy extends DefaultLoadBalancingPolicy { - NonShufflingDefaultLoadBalancingPolicy(DriverContext context, String profileName) { - super(context, profileName); - } - - @Override - protected void shuffleHead(Object[] currentNodes, int replicaCount) { - // nothing (keep in same order) - } - } } diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyRequestTrackerTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyRequestTrackerTest.java similarity index 88% rename from core/src/test/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyRequestTrackerTest.java rename to core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyRequestTrackerTest.java index c9fba9a9301..76517fc2c0c 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/loadbalancing/DseLoadBalancingPolicyRequestTrackerTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyRequestTrackerTest.java @@ -13,32 +13,42 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.dse.driver.internal.core.loadbalancing; +package com.datastax.oss.driver.internal.core.loadbalancing; +import static com.datastax.oss.driver.Assertions.assertThat; import static com.datastax.oss.driver.api.core.config.DriverExecutionProfile.DEFAULT_NAME; -import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.BDDMockito.given; +import com.datastax.dse.driver.internal.core.tracker.MultiplexingRequestTracker; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.session.Request; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; import java.util.UUID; import org.junit.Before; import org.junit.Test; +import org.mockito.Mock; -public class DseLoadBalancingPolicyRequestTrackerTest extends DseLoadBalancingPolicyTestBase { +public class DefaultLoadBalancingPolicyRequestTrackerTest + extends DefaultLoadBalancingPolicyTestBase { - private DseLoadBalancingPolicy policy; + @Mock Request request; + @Mock DriverExecutionProfile profile; + final String logPrefix = "lbp-test-log-prefix"; + + private DefaultLoadBalancingPolicy policy; private long nextNanoTime; @Before @Override - public void setUp() { - super.setUp(); + public void setup() { + given(context.getRequestTracker()).willReturn(new MultiplexingRequestTracker()); + super.setup(); given(metadataManager.getContactPoints()).willReturn(ImmutableSet.of(node1)); policy = - new DseLoadBalancingPolicy(context, DEFAULT_NAME) { + new DefaultLoadBalancingPolicy(context, DEFAULT_NAME) { @Override - long nanoTime() { + protected long nanoTime() { return nextNanoTime; } }; From 454613c6b167b6c959ec4a0867f863588c4c8ef9 Mon Sep 17 00:00:00 2001 From: olim7t Date: Thu, 10 Oct 2019 16:06:37 -0700 Subject: [PATCH 202/979] Merge dse-reference.conf into reference.conf Deprecate DseDriverConfigLoader. --- .../api/core/DseSessionBuilderBase.java | 4 +- .../core/config/DseDriverConfigLoader.java | 173 +------- .../core/context/DseDriverContext.java | 10 - core/src/main/resources/dse-reference.conf | 418 ------------------ core/src/main/resources/reference.conf | 375 +++++++++++++++- .../config/DseDriverConfigLoaderTest.java | 126 ------ .../api/core/graph/GraphAuthenticationIT.java | 4 +- .../DseSessionBuilderInstantiator.java | 4 +- 8 files changed, 386 insertions(+), 728 deletions(-) delete mode 100644 core/src/main/resources/dse-reference.conf delete mode 100644 core/src/test/java/com/datastax/dse/driver/api/core/config/DseDriverConfigLoaderTest.java diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/DseSessionBuilderBase.java b/core/src/main/java/com/datastax/dse/driver/api/core/DseSessionBuilderBase.java index 121a8e64ba5..9a221511e9f 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/DseSessionBuilderBase.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/DseSessionBuilderBase.java @@ -15,7 +15,6 @@ */ package com.datastax.dse.driver.api.core; -import com.datastax.dse.driver.api.core.config.DseDriverConfigLoader; import com.datastax.dse.driver.api.core.session.DseProgrammaticArguments; import com.datastax.dse.driver.api.core.type.codec.DseTypeCodecs; import com.datastax.dse.driver.internal.core.auth.DseProgrammaticPlainTextAuthProvider; @@ -119,8 +118,7 @@ public SelfT withApplicationVersion(@Nullable String applicationVersion) { * *

      Note that this loader must produce a configuration that includes the DSE-specific options: * if you're using one of the built-in implementations provided by the driver, use the static - * factory methods from {@link DseDriverConfigLoader} (not the ones from {@link - * DriverConfigLoader}). + * factory methods from {@link DriverConfigLoader}. * *

      If you don't call this method, the builder will use the default implementation, based on the * Typesafe config library. More precisely, configuration properties are loaded and merged from diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/config/DseDriverConfigLoader.java b/core/src/main/java/com/datastax/dse/driver/api/core/config/DseDriverConfigLoader.java index 1d9ba9a88f9..cda2acdaa27 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/config/DseDriverConfigLoader.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/config/DseDriverConfigLoader.java @@ -17,190 +17,55 @@ import com.datastax.oss.driver.api.core.config.DriverConfigLoader; import com.datastax.oss.driver.api.core.config.ProgrammaticDriverConfigLoaderBuilder; -import com.datastax.oss.driver.internal.core.config.typesafe.DefaultDriverConfigLoader; -import com.datastax.oss.driver.internal.core.config.typesafe.DefaultProgrammaticDriverConfigLoaderBuilder; -import com.typesafe.config.Config; -import com.typesafe.config.ConfigFactory; import edu.umd.cs.findbugs.annotations.NonNull; import java.io.File; import java.net.URL; /** - * Exposes factory methods to create config loaders from the DSE driver. - * - *

      Note that this class only exists to expose those methods in a way that is symmetric to its OSS - * counterpart {@link DriverConfigLoader}. It does not extend it, DSE-specific loaders are regular - * instances of the OSS type. + * @deprecated This class only exists for backward compatibility. All of its methods delegate to + * their counterparts on {@link DriverConfigLoader}, which you should call directly instead. */ +@Deprecated public class DseDriverConfigLoader { /** - * Builds an instance using the driver's default implementation (based on Typesafe config), except - * that application-specific options are loaded from a classpath resource with a custom name. - * - *

      More precisely, configuration properties are loaded and merged from the following - * (first-listed are higher priority): - * - *

        - *
      • system properties - *
      • {@code .conf} (all resources on classpath with this name) - *
      • {@code .json} (all resources on classpath with this name) - *
      • {@code .properties} (all resources on classpath with this name) - *
      • {@code dse-reference.conf} (all resources on classpath with this name). In particular, - * this will load the {@code dse-reference.conf} included in the core DSE driver JAR, that - * defines default options for all DSE-specific mandatory options. - *
      • {@code reference.conf} (all resources on classpath with this name). In particular, this - * will load the {@code reference.conf} included in the core OSS driver JAR, that defines - * default options for all mandatory options common to OSS and DSE. - *
      - * - * The resulting configuration is expected to contain a {@code datastax-java-driver} section. - * - *

      The returned loader will honor the reload interval defined by the option {@code - * basic.config-reload-interval}. + * @deprecated This method only exists for backward compatibility. It delegates to {@link + * DriverConfigLoader#fromClasspath(String)}, which you should call directly instead. */ + @Deprecated @NonNull public static DriverConfigLoader fromClasspath(@NonNull String resourceBaseName) { - return new DefaultDriverConfigLoader( - () -> { - ConfigFactory.invalidateCaches(); - Config config = - ConfigFactory.defaultOverrides() - .withFallback(ConfigFactory.parseResourcesAnySyntax(resourceBaseName)) - .withFallback(ConfigFactory.parseResourcesAnySyntax("dse-reference")) - .withFallback(ConfigFactory.defaultReference()) - .resolve(); - return config.getConfig("datastax-java-driver"); - }); + return DriverConfigLoader.fromClasspath(resourceBaseName); } /** - * Builds an instance using the driver's default implementation (based on Typesafe config), except - * that application-specific options are loaded from the given file. - * - *

      More precisely, configuration properties are loaded and merged from the following - * (first-listed are higher priority): - * - *

        - *
      • system properties - *
      • the contents of {@code file} - *
      • {@code dse-reference.conf} (all resources on classpath with this name). In particular, - * this will load the {@code dse-reference.conf} included in the core DSE driver JAR, that - * defines default options for all DSE-specific mandatory options. - *
      • {@code reference.conf} (all resources on classpath with this name). In particular, this - * will load the {@code reference.conf} included in the core OSS driver JAR, that defines - * default options for all mandatory options common to OSS and DSE. - *
      - * - * The resulting configuration is expected to contain a {@code datastax-java-driver} section. - * - *

      The returned loader will honor the reload interval defined by the option {@code - * basic.config-reload-interval}. + * @deprecated This method only exists for backward compatibility. It delegates to {@link + * DriverConfigLoader#fromFile(File)}, which you should call directly instead. */ + @Deprecated @NonNull public static DriverConfigLoader fromFile(@NonNull File file) { - return new DefaultDriverConfigLoader( - () -> { - ConfigFactory.invalidateCaches(); - Config config = - ConfigFactory.defaultOverrides() - .withFallback(ConfigFactory.parseFileAnySyntax(file)) - .withFallback(ConfigFactory.parseResourcesAnySyntax("dse-reference")) - .withFallback(ConfigFactory.defaultReference()) - .resolve(); - return config.getConfig("datastax-java-driver"); - }); + return DriverConfigLoader.fromFile(file); } /** - * Builds an instance using the driver's default implementation (based on Typesafe config), except - * that application-specific options are loaded from the given URL. - * - *

      More precisely, configuration properties are loaded and merged from the following - * (first-listed are higher priority): - * - *

        - *
      • system properties - *
      • the contents of {@code url} - *
      • {@code dse-reference.conf} (all resources on classpath with this name). In particular, - * this will load the {@code dse-reference.conf} included in the core DSE driver JAR, that - * defines default options for all DSE-specific mandatory options. - *
      • {@code reference.conf} (all resources on classpath with this name). In particular, this - * will load the {@code reference.conf} included in the core OSS driver JAR, that defines - * default options for all mandatory options common to OSS and DSE. - *
      - * - * The resulting configuration is expected to contain a {@code datastax-java-driver} section. - * - *

      The returned loader will honor the reload interval defined by the option {@code - * basic.config-reload-interval}. + * @deprecated This method only exists for backward compatibility. It delegates to {@link + * DriverConfigLoader#fromUrl(URL)}, which you should call directly instead. */ + @Deprecated @NonNull public static DriverConfigLoader fromUrl(@NonNull URL url) { - return new DefaultDriverConfigLoader( - () -> { - ConfigFactory.invalidateCaches(); - Config config = - ConfigFactory.defaultOverrides() - .withFallback(ConfigFactory.parseURL(url)) - .withFallback(ConfigFactory.parseResourcesAnySyntax("dse-reference")) - .withFallback(ConfigFactory.defaultReference()) - .resolve(); - return config.getConfig("datastax-java-driver"); - }); + return DriverConfigLoader.fromUrl(url); } /** - * Starts a builder that allows configuration options to be overridden programmatically. - * - *

      Sample usage: - * - *

      {@code
      -   * DriverConfigLoader loader =
      -   *     DriverConfigLoader.programmaticBuilder()
      -   *         .withDuration(DefaultDriverOption.REQUEST_TIMEOUT, Duration.ofSeconds(5))
      -   *         .startProfile("slow")
      -   *         .withDuration(DefaultDriverOption.REQUEST_TIMEOUT, Duration.ofSeconds(30))
      -   *         .endProfile()
      -   *         .build();
      -   * }
      - * - * The resulting loader still uses the driver's default implementation (based on Typesafe config), - * except that the programmatic configuration takes precedence. More precisely, configuration - * properties are loaded and merged from the following (first-listed are higher priority): - * - *
        - *
      • system properties - *
      • properties that were provided programmatically - *
      • {@code application.conf} (all resources on classpath with this name) - *
      • {@code application.json} (all resources on classpath with this name) - *
      • {@code application.properties} (all resources on classpath with this name) - *
      • {@code dse-reference.conf} (all resources on classpath with this name). In particular, - * this will load the {@code dse-reference.conf} included in the core DSE driver JAR, that - * defines default options for all DSE-specific mandatory options. - *
      • {@code reference.conf} (all resources on classpath with this name). In particular, this - * will load the {@code reference.conf} included in the core OSS driver JAR, that defines - * default options for all mandatory options common to OSS and DSE. - *
      - * - * Note that {@code application.*} is entirely optional, you may choose to only rely on the - * driver's built-in {@code reference.conf} and programmatic overrides. - * - *

      The resulting configuration is expected to contain a {@code datastax-java-driver} section. - * - *

      The loader will honor the reload interval defined by the option {@code - * basic.config-reload-interval}. - * - *

      Note that the returned builder is not thread-safe. + * @deprecated This method only exists for backward compatibility. It delegates to {@link + * DriverConfigLoader#programmaticBuilder()}, which you should call directly instead. */ + @Deprecated @NonNull public static ProgrammaticDriverConfigLoaderBuilder programmaticBuilder() { - return new DefaultProgrammaticDriverConfigLoaderBuilder( - () -> - ConfigFactory.defaultApplication() - .withFallback(ConfigFactory.parseResourcesAnySyntax("dse-reference")) - .withFallback(ConfigFactory.defaultReference()), - DefaultDriverConfigLoader.DEFAULT_ROOT_PATH); + return DriverConfigLoader.programmaticBuilder(); } private DseDriverConfigLoader() { diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseDriverContext.java b/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseDriverContext.java index 30c2d1e625d..1757db82188 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseDriverContext.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseDriverContext.java @@ -15,8 +15,6 @@ */ package com.datastax.dse.driver.internal.core.context; -import com.datastax.dse.driver.api.core.config.DseDriverConfigLoader; -import com.datastax.dse.driver.api.core.config.DseDriverOption; import com.datastax.dse.driver.api.core.session.DseProgrammaticArguments; import com.datastax.dse.driver.internal.core.DseProtocolVersionRegistry; import com.datastax.dse.driver.internal.core.InsightsClientLifecycleListener; @@ -105,14 +103,6 @@ public DseDriverContext( } this.listeners = Collections.singletonList(new InsightsClientLifecycleListener(this, stackTrace)); - - if (!getConfig().getDefaultProfile().isDefined(DseDriverOption.CONTINUOUS_PAGING_PAGE_SIZE)) { - LOG.warn( - "[{}] It looks like your configuration is missing DSE-specific options. " - + "If you use a built-in config loader, make sure you create it with {}.", - getSessionName(), - DseDriverConfigLoader.class.getSimpleName()); - } } /** * @deprecated this constructor only exists for backward compatibility. Please use {@link diff --git a/core/src/main/resources/dse-reference.conf b/core/src/main/resources/dse-reference.conf deleted file mode 100644 index 712b4d4b91c..00000000000 --- a/core/src/main/resources/dse-reference.conf +++ /dev/null @@ -1,418 +0,0 @@ -# Reference configuration for the DataStax Enterprise Java driver. -# -# This configuration inherits from the open-source Apache Cassandra® driver configuration. In other -# words, any option not explicitly defined below gets its value from the `reference.conf` file -# shipped with the java-driver-core.jar that this DSE driver depends on. -# -# You can place your own `application.conf` file in the classpath to override any option. -# -# Note that the configuration loading mechanism is pluggable (see -# DseSessionBuilder.withConfigLoader). -# -# This file is in HOCON format, see https://github.com/typesafehub/config/blob/master/HOCON.md. -datastax-java-driver { - - basic { - # The name of the application using the session. - # - # It will be sent in the STARTUP protocol message for each new connection established by the - # driver, and may be used by future DSE versions for monitoring purposes. - # - # This can also be defined programmatically with DseSessionBuilder.withApplicationName(). If you - # specify both, the programmatic value takes precedence and this option is ignored. - # - # Required: no - # Modifiable at runtime: no - # Overridable in a profile: no - // application.name = - - # The version of the application using the session. - # - # It will be sent in the STARTUP protocol message for each new connection established by the - # driver, and may be used by future DSE versions for monitoring purposes. - # - # This can also be defined programmatically with DseSessionBuilder.withApplicationVersion(). If - # you specify both, the programmatic value takes precedence and this option is ignored. - # - # Required: no - # Modifiable at runtime: no - # Overridable in a profile: no - // application.version = - } - - basic.graph { - # The name of the graph targeted by graph statements. - # - # This can also be overridden programmatically with GraphStatement.setGraphName(). If both are - # specified, the programmatic value takes precedence, and this option is ignored. - # - # Required: no. In particular, system queries -- such as creating or dropping a graph -- must be - # executed without a graph name (see also basic.graph.is-system-query). - # Modifiable at runtime: yes, the new value will be used for requests issued after the change. - # Overridable in a profile: yes - // name = your-graph-name - - # The traversal source to use for graph statements. - # - # This setting doesn't usually need to change, unless executing OLAP queries, which require the - # traversal source "a". - # - # This can also be overridden programmatically with GraphStatement.setTraversalSource(). If both - # are specified, the programmatic value takes precedence, and this option is ignored. - # - # Required: no - # Modifiable at runtime: yes, the new value will be used for requests issued after the change. - # Overridable in a profile: yes - traversal-source = "g" - - # Whether a script statement represents a system query. - # - # Script statements that access the `system` variable *must not* specify a graph name (otherwise - # `system` is not available). However, if your application executes a lot of non-system - # statements, it is convenient to configure basic.graph.name to avoid repeating it every time. - # This option allows you to ignore that global graph name, for example in a specific profile. - # - # This can also be overridden programmatically with ScriptGraphStatement.setSystemQuery(). If - # both are specified, the programmatic value takes precedence, and this option is ignored. - # - # Required: no (defaults to false) - # Modifiable at runtime: yes, the new value will be used for requests issued after the change. - # Overridable in a profile: yes - // is-system-query = false - - # The read consistency level to use for graph statements. - # - # DSE Graph is able to distinguish between read and write timeouts for the internal storage - # queries that will be produced by a traversal. Hence the consistency level for reads and writes - # can be set separately. - # - # This can also be overridden programmatically with GraphStatement.setReadConsistencyLevel(). If - # both are specified, the programmatic value takes precedence, and this option is ignored. - # - # Required: no (defaults to request.basic.consistency) - # Modifiable at runtime: yes, the new value will be used for requests issued after the change. - # Overridable in a profile: yes - // read-consistency-level = LOCAL_QUORUM - - # The write consistency level to use for graph statements. - # - # DSE Graph is able to distinguish between read and write timeouts for the internal storage - # queries that will be produced by a traversal. Hence the consistency level for reads and writes - # can be set separately. - # - # This can also be overridden programmatically with GraphStatement.setReadConsistencyLevel(). If - # both are specified, the programmatic value takes precedence, and this option is ignored. - # - # Required: no (defaults to request.basic.consistency) - # Modifiable at runtime: yes, the new value will be used for requests issued after the change. - # Overridable in a profile: yes - // write-consistency-level = LOCAL_ONE - - # How long the driver waits for a graph request to complete. This is a global limit on the - # duration of a session.execute() call, including any internal retries the driver might do. - # - # Graph statements behave a bit differently than regular CQL requests (hence this dedicated - # option instead of reusing basic.request.timeout): by default, the client timeout is not set, - # and the driver will just wait as long as needed until the server replies (which is itself - # governed by server-side timeout configuration). - # If you specify a client timeout with this option, then the driver will fail the request after - # the given time; note that the value is also sent along with the request, so that the server - # can also time out early and avoid wasting resources on a response that the client has already - # given up on. - # - # This can also be overridden programmatically with GraphStatement.setTimeout(). If both are - # specified, the programmatic value takes precedence, and this option is ignored. - # - # Required: no (defaults to request.basic.consistency) - # Modifiable at runtime: yes, the new value will be used for requests issued after the change. - # Overridable in a profile: yes - // timeout = 10 seconds - } - - # The component that handles authentication on each new connection. - # - # Required: no. If the 'class' child option is absent, no authentication will occur. - # Modifiable at runtime: no - # Overridable in a profile: no - advanced.auth-provider { - # The class of the provider. If it is not qualified, the driver assumes that it resides in one - # of the following packages: - # - com.datastax.oss.driver.internal.core.auth - # - com.datastax.dse.driver.internal.core.auth - # - # The DSE driver provides 3 implementations out of the box: - # - PlainTextAuthProvider: uses plain-text credentials. It requires the `username` and - # `password` options below. Should be used only when authenticating against Apache - # Cassandra(R) clusters; not recommended when authenticating against DSE clusters. - # - DsePlainTextAuthProvider: provides SASL authentication using the PLAIN mechanism for DSE - # clusters secured with DseAuthenticator. It requires the `username` and `password` options - # below, and optionally, an `authorization-id`. - # - DseGssApiAuthProvider: provides GSSAPI authentication for DSE clusters secured with - # DseAuthenticator. Read the javadocs of this authenticator for detailed instructions. - # - # You can also specify a custom class that implements AuthProvider and has a public constructor - # with a DriverContext argument (to simplify this, the driver provides two abstract classes that - # can be extended: DsePlainTextAuthProviderBase and DseGssApiAuthProviderBase). - # - # Finally, you can configure a provider instance programmatically with - # DseSessionBuilder#withAuthProvider. In that case, it will take precedence over the - # configuration. - // class = DsePlainTextAuthProvider - # - # Sample configuration for plain-text authentication providers: - // username = cassandra - // password = cassandra - # - # Proxy authentication: allows to login as another user or role (valid for both - # DsePlainTextAuthProvider and DseGssApiAuthProvider): - // authorization-id = userOrRole - # - # The settings below are only applicable to DseGssApiAuthProvider: - # - # Service name. For example, if in your dse.yaml configuration file the - # "kerberos_options/service_principal" setting is "cassandra/my.host.com@MY.REALM.COM", then set - # this option to "cassandra": - //service = "cassandra" - # - # Login configuration. It is also possible to provide login configuration through a standard - # JAAS configuration file. The below configuration is just an example, see all possible options - # here: - # https://docs.oracle.com/javase/6/docs/jre/api/security/jaas/spec/com/sun/security/auth/module/Krb5LoginModule.html - // login-configuration { - // principal = "cassandra@DATASTAX.COM" - // useKeyTab = "true" - // refreshKrb5Config = "true" - // keyTab = "/path/to/keytab/file" - // } - # - # Internal SASL properties, if any, such as QOP. - // sasl-properties { - // javax.security.sasl.qop = "auth-conf" - // } - } - - advanced.graph { - # The sub-protocol the driver will use to communicate with DSE Graph, on top of the Cassandra - # native protocol. - # - # You should almost never have to change this: the driver sets it automatically, based on the - # information it has about the server. One exception is if you use the script API against a - # legacy DSE version (5.0.3 or older). In that case, you need to force the sub-protocol to - # "graphson-1.0". - # - # This can also be overridden programmatically with GraphStatement.setSubProtocol(). If both are - # specified, the programmatic value takes precedence, and this option is ignored. - # - # Required: no - # Modifiable at runtime: yes, the new value will be used for requests issued after the change. - # Overridable in a profile: yes - // sub-protocol = "graphson-2.0" - } - - advanced.metrics { - - # The session-level metrics (all disabled by default). - # - # This section only lists metrics specific to the DSE driver. Please refer to the OSS driver's - # reference.conf file to know which metrics are made available by the OSS driver. - # - # Required: yes - # Modifiable at runtime: no - # Overridable in a profile: no - session { - enabled = [ - - # The throughput and latency percentiles of continuous CQL requests (exposed as a Timer). - # - # This metric is a session-level metric and corresponds to the overall duration of the - # session.executeContinuously() call, including any retry. - # - # Note that this metric is analogous to the OSS driver's 'cql-requests' metrics, but for - # continuous paging requests only. Continuous paging requests do not update the - # 'cql-requests' metric, because they are usually much longer. Only the following metrics - # are updated during a continuous paging request: - # - # - At node level: all the usual metrics available for normal CQL requests, such as - # 'cql-messages' and error-related metrics; - # - At session level: only 'continuous-cql-requests' is updated (this metric). - // continuous-cql-requests, - - ] - - # Extra configuration (for the metrics that need it) - - # Required: if the 'continuous-cql-requests' metric is enabled - # Modifiable at runtime: no - # Overridable in a profile: no - continuous-cql-requests { - - # The largest latency that we expect to record for a continuous paging request. - # - # This is used to scale internal data structures. If a higher recording is encountered at - # runtime, it is discarded and a warning is logged. - highest-latency = 120 seconds - - # The number of significant decimal digits to which internal structures will maintain - # value resolution and separation (for example, 3 means that recordings up to 1 second - # will be recorded with a resolution of 1 millisecond or better). - # - # This must be between 0 and 5. If the value is out of range, it defaults to 3 and a - # warning is logged. - significant-digits = 3 - - # The interval at which percentile data is refreshed. - # - # The driver records latency data in a "live" histogram, and serves results from a cached - # snapshot. Each time the snapshot gets older than the interval, the two are switched. - # Note that this switch happens upon fetching the metrics, so if you never fetch the - # recording interval might grow higher (that shouldn't be an issue in a production - # environment because you would typically have a metrics reporter that exports to a - # monitoring tool at a regular interval). - # - # In practice, this means that if you set this to 5 minutes, you're looking at data from a - # 5-minute interval in the past, that is at most 5 minutes old. If you fetch the metrics - # at a faster pace, you will observe the same data for 5 minutes until the interval - # expires. - # - # Note that this does not apply to the total count and rates (those are updated in real - # time). - refresh-interval = 5 minutes - } - } - - # The node-level metrics (all disabled by default). - # - # This section only lists metrics specific to the DSE driver. Please refer to the OSS driver's - # reference.conf file to knwow which metrics are made available by the OSS driver. - # - # Required: yes - # Modifiable at runtime: no - # Overridable in a profile: no - node { - enabled = [ - - ] - } - } - - # Options to control the execution of continuous paging requests. - advanced.continuous-paging { - - # The page size. - # - # The value specified here can be interpreted in number of rows - # or in number of bytes, depending on the unit defined with page-unit (see below). - # - # It controls how many rows (or how much data) will be retrieved simultaneously in a single - # network roundtrip (the goal being to avoid loading too many results in memory at the same - # time). If there are more results, additional requests will be used to retrieve them (either - # automatically if you iterate with the sync API, or explicitly with the async API's - # fetchNextPage method). - # - # The default is the same as the driver's normal request page size, - # i.e., 5000 (rows). - # - # Required: yes - # Modifiable at runtime: yes, the new value will be used for continuous requests issued after - # the change - # Overridable in a profile: yes - page-size = ${datastax-java-driver.basic.request.page-size} - - # Whether the page-size option should be interpreted in number of rows or bytes. - # - # The default is false, i.e., the page size will be interpreted in number of rows. - # - # Required: yes - # Modifiable at runtime: yes, the new value will be used for continuous requests issued after - # the change - # Overridable in a profile: yes - page-size-in-bytes = false - - # The maximum number of pages to return. - # - # The default is zero, which means retrieve all pages. - # - # Required: yes - # Modifiable at runtime: yes, the new value will be used for continuous requests issued after - # the change - # Overridable in a profile: yes - max-pages = 0 - - # Returns the maximum number of pages per second. - # - # The default is zero, which means no limit. - # - # Required: yes - # Modifiable at runtime: yes, the new value will be used for continuous requests issued after - # the change - # Overridable in a profile: yes - max-pages-per-second = 0 - - # The maximum number of pages that can be stored in the local queue. - # - # This value must be positive. The default is 4. - # - # Required: yes - # Modifiable at runtime: yes, the new value will be used for continuous requests issued after - # the change - # Overridable in a profile: yes - max-enqueued-pages = 4 - - # Timeouts for continuous paging. - # - # Note that there is no global timeout for continuous paging as there is - # for regular queries, because continuous paging queries can take an arbitrarily - # long time to complete. - # - # Instead, timeouts are applied to each exchange between the driver and the coordinator. In - # other words, if the driver decides to retry, all timeouts are reset. - timeout { - - # How long to wait for the coordinator to send the first page. - # - # Required: yes - # Modifiable at runtime: yes, the new value will be used for continuous requests issued after - # the change - # Overridable in a profile: yes - first-page = 2 seconds - - # How long to wait for the coordinator to send subsequent pages. - # - # Required: yes - # Modifiable at runtime: yes, the new value will be used for continuous requests issued after - # the change - # Overridable in a profile: yes - other-pages = 1 second - - } - } - - # Options to control Insights monitoring. - advanced.monitor-reporting { - # Whether to send monitoring events. - # - # The default is true. - # - # Required: no (defaults to true) - # Modifiable at runtime: no - # Overridable in a profile: no - enabled = true - } - - profiles { - # An example configuration profile for graph requests. - // my-graph-profile-example { - // graph { - // read-consistency-level = LOCAL_QUORUM - // write-consistency-level = LOCAL_ONE - // } - // } - - # An example pre-defined configuration profile for OLAP graph queries. - // graph-olap { - // graph { - // traversal-source = "a" // traversal source needs to be set to "a" for OLAP queries. - // } - // } - } -} diff --git a/core/src/main/resources/reference.conf b/core/src/main/resources/reference.conf index bc541083253..72c4ac46161 100644 --- a/core/src/main/resources/reference.conf +++ b/core/src/main/resources/reference.conf @@ -205,6 +205,125 @@ datastax-java-driver { // secure-connect-bundle = /location/of/secure/connect/bundle } + # DataStax Insights monitoring. + basic.application { + # The name of the application using the session. + # + # It will be sent in the STARTUP protocol message for each new connection established by the + # driver. + # + # This can also be defined programmatically with DseSessionBuilder.withApplicationName(). If you + # specify both, the programmatic value takes precedence and this option is ignored. + # + # Required: no + # Modifiable at runtime: no + # Overridable in a profile: no + // name = + + # The version of the application using the session. + # + # It will be sent in the STARTUP protocol message for each new connection established by the + # driver. + # + # This can also be defined programmatically with DseSessionBuilder.withApplicationVersion(). If + # you specify both, the programmatic value takes precedence and this option is ignored. + # + # Required: no + # Modifiable at runtime: no + # Overridable in a profile: no + // version = + } + + # Graph (DataStax Enterprise only) + basic.graph { + # The name of the graph targeted by graph statements. + # + # This can also be overridden programmatically with GraphStatement.setGraphName(). If both are + # specified, the programmatic value takes precedence, and this option is ignored. + # + # Required: no. In particular, system queries -- such as creating or dropping a graph -- must be + # executed without a graph name (see also basic.graph.is-system-query). + # Modifiable at runtime: yes, the new value will be used for requests issued after the change. + # Overridable in a profile: yes + // name = your-graph-name + + # The traversal source to use for graph statements. + # + # This setting doesn't usually need to change, unless executing OLAP queries, which require the + # traversal source "a". + # + # This can also be overridden programmatically with GraphStatement.setTraversalSource(). If both + # are specified, the programmatic value takes precedence, and this option is ignored. + # + # Required: no + # Modifiable at runtime: yes, the new value will be used for requests issued after the change. + # Overridable in a profile: yes + traversal-source = "g" + + # Whether a script statement represents a system query. + # + # Script statements that access the `system` variable *must not* specify a graph name (otherwise + # `system` is not available). However, if your application executes a lot of non-system + # statements, it is convenient to configure basic.graph.name to avoid repeating it every time. + # This option allows you to ignore that global graph name, for example in a specific profile. + # + # This can also be overridden programmatically with ScriptGraphStatement.setSystemQuery(). If + # both are specified, the programmatic value takes precedence, and this option is ignored. + # + # Required: no (defaults to false) + # Modifiable at runtime: yes, the new value will be used for requests issued after the change. + # Overridable in a profile: yes + // is-system-query = false + + # The read consistency level to use for graph statements. + # + # DSE Graph is able to distinguish between read and write timeouts for the internal storage + # queries that will be produced by a traversal. Hence the consistency level for reads and writes + # can be set separately. + # + # This can also be overridden programmatically with GraphStatement.setReadConsistencyLevel(). If + # both are specified, the programmatic value takes precedence, and this option is ignored. + # + # Required: no (defaults to request.basic.consistency) + # Modifiable at runtime: yes, the new value will be used for requests issued after the change. + # Overridable in a profile: yes + // read-consistency-level = LOCAL_QUORUM + + # The write consistency level to use for graph statements. + # + # DSE Graph is able to distinguish between read and write timeouts for the internal storage + # queries that will be produced by a traversal. Hence the consistency level for reads and writes + # can be set separately. + # + # This can also be overridden programmatically with GraphStatement.setReadConsistencyLevel(). If + # both are specified, the programmatic value takes precedence, and this option is ignored. + # + # Required: no (defaults to request.basic.consistency) + # Modifiable at runtime: yes, the new value will be used for requests issued after the change. + # Overridable in a profile: yes + // write-consistency-level = LOCAL_ONE + + # How long the driver waits for a graph request to complete. This is a global limit on the + # duration of a session.execute() call, including any internal retries the driver might do. + # + # Graph statements behave a bit differently than regular CQL requests (hence this dedicated + # option instead of reusing basic.request.timeout): by default, the client timeout is not set, + # and the driver will just wait as long as needed until the server replies (which is itself + # governed by server-side timeout configuration). + # If you specify a client timeout with this option, then the driver will fail the request after + # the given time; note that the value is also sent along with the request, so that the server + # can also time out early and avoid wasting resources on a response that the client has already + # given up on. + # + # This can also be overridden programmatically with GraphStatement.setTimeout(). If both are + # specified, the programmatic value takes precedence, and this option is ignored. + # + # Required: no (defaults to request.basic.consistency) + # Modifiable at runtime: yes, the new value will be used for requests issued after the change. + # Overridable in a profile: yes + // timeout = 10 seconds + } + # ADVANCED OPTIONS ------------------------------------------------------------------------------- @@ -402,22 +521,60 @@ datastax-java-driver { # Note that the contents of this section can be overridden programmatically with # SessionBuilder.withAuthProvider or SessionBuilder.withAuthCredentials. advanced.auth-provider { - # The class of the provider. If it is not qualified, the driver assumes that it resides in the - # package com.datastax.oss.driver.internal.core.auth. - # - # The driver provides a single implementation out of the box: PlainTextAuthProvider, that uses - # plain-text credentials. It requires the `username` and `password` options below. - # If storing clear text credentials in the configuration is not acceptable for you, consider - # providing them programmatically with SessionBuilder#withAuthCredentials, or writing your own - # provider implementation. + # The class of the provider. If it is not qualified, the driver assumes that it resides in one + # of the following packages: + # - com.datastax.oss.driver.internal.core.auth + # - com.datastax.dse.driver.internal.core.auth + # + # The DSE driver provides 3 implementations out of the box: + # - PlainTextAuthProvider: uses plain-text credentials. It requires the `username` and + # `password` options below. Should be used only when authenticating against Apache + # Cassandra(R) clusters; not recommended when authenticating against DSE clusters. + # - DsePlainTextAuthProvider: provides SASL authentication using the PLAIN mechanism for DSE + # clusters secured with DseAuthenticator. It requires the `username` and `password` options + # below, and optionally, an `authorization-id`. + # - DseGssApiAuthProvider: provides GSSAPI authentication for DSE clusters secured with + # DseAuthenticator. Read the javadocs of this authenticator for detailed instructions. + # + # You can also specify a custom class that implements AuthProvider and has a public constructor + # with a DriverContext argument (to simplify this, the driver provides two abstract classes that + # can be extended: DsePlainTextAuthProviderBase and DseGssApiAuthProviderBase). + # + # Finally, you can configure a provider instance programmatically with + # DseSessionBuilder#withAuthProvider. In that case, it will take precedence over the + # configuration. + // class = DsePlainTextAuthProvider # - # You can also specify a custom class that implements AuthProvider and has a public - # constructor with a DriverContext argument. - // class = PlainTextAuthProvider - - # Sample configuration for the plain-text provider: + # Sample configuration for plain-text authentication providers: // username = cassandra // password = cassandra + # + # Proxy authentication: allows to login as another user or role (valid for both + # DsePlainTextAuthProvider and DseGssApiAuthProvider): + // authorization-id = userOrRole + # + # The settings below are only applicable to DseGssApiAuthProvider: + # + # Service name. For example, if in your dse.yaml configuration file the + # "kerberos_options/service_principal" setting is "cassandra/my.host.com@MY.REALM.COM", then set + # this option to "cassandra": + //service = "cassandra" + # + # Login configuration. It is also possible to provide login configuration through a standard + # JAAS configuration file. The below configuration is just an example, see all possible options + # here: + # https://docs.oracle.com/javase/6/docs/jre/api/security/jaas/spec/com/sun/security/auth/module/Krb5LoginModule.html + // login-configuration { + // principal = "cassandra@DATASTAX.COM" + // useKeyTab = "true" + // refreshKrb5Config = "true" + // keyTab = "/path/to/keytab/file" + // } + # + # Internal SASL properties, if any, such as QOP. + // sasl-properties { + // javax.security.sasl.qop = "auth-conf" + // } } # The SSL engine factory that will initialize an SSL engine for each new connection to a server. @@ -813,6 +970,129 @@ datastax-java-driver { log-warnings = true } + # Graph (DataStax Enterprise only) + advanced.graph { + # The sub-protocol the driver will use to communicate with DSE Graph, on top of the Cassandra + # native protocol. + # + # You should almost never have to change this: the driver sets it automatically, based on the + # information it has about the server. One exception is if you use the script API against a + # legacy DSE version (5.0.3 or older). In that case, you need to force the sub-protocol to + # "graphson-1.0". + # + # This can also be overridden programmatically with GraphStatement.setSubProtocol(). If both are + # specified, the programmatic value takes precedence, and this option is ignored. + # + # Required: no + # Modifiable at runtime: yes, the new value will be used for requests issued after the change. + # Overridable in a profile: yes + // sub-protocol = "graphson-2.0" + } + + # Continuous paging (DataStax Enterprise only) + advanced.continuous-paging { + + # The page size. + # + # The value specified here can be interpreted in number of rows + # or in number of bytes, depending on the unit defined with page-unit (see below). + # + # It controls how many rows (or how much data) will be retrieved simultaneously in a single + # network roundtrip (the goal being to avoid loading too many results in memory at the same + # time). If there are more results, additional requests will be used to retrieve them (either + # automatically if you iterate with the sync API, or explicitly with the async API's + # fetchNextPage method). + # + # The default is the same as the driver's normal request page size, + # i.e., 5000 (rows). + # + # Required: yes + # Modifiable at runtime: yes, the new value will be used for continuous requests issued after + # the change + # Overridable in a profile: yes + page-size = ${datastax-java-driver.basic.request.page-size} + + # Whether the page-size option should be interpreted in number of rows or bytes. + # + # The default is false, i.e., the page size will be interpreted in number of rows. + # + # Required: yes + # Modifiable at runtime: yes, the new value will be used for continuous requests issued after + # the change + # Overridable in a profile: yes + page-size-in-bytes = false + + # The maximum number of pages to return. + # + # The default is zero, which means retrieve all pages. + # + # Required: yes + # Modifiable at runtime: yes, the new value will be used for continuous requests issued after + # the change + # Overridable in a profile: yes + max-pages = 0 + + # Returns the maximum number of pages per second. + # + # The default is zero, which means no limit. + # + # Required: yes + # Modifiable at runtime: yes, the new value will be used for continuous requests issued after + # the change + # Overridable in a profile: yes + max-pages-per-second = 0 + + # The maximum number of pages that can be stored in the local queue. + # + # This value must be positive. The default is 4. + # + # Required: yes + # Modifiable at runtime: yes, the new value will be used for continuous requests issued after + # the change + # Overridable in a profile: yes + max-enqueued-pages = 4 + + # Timeouts for continuous paging. + # + # Note that there is no global timeout for continuous paging as there is + # for regular queries, because continuous paging queries can take an arbitrarily + # long time to complete. + # + # Instead, timeouts are applied to each exchange between the driver and the coordinator. In + # other words, if the driver decides to retry, all timeouts are reset. + timeout { + + # How long to wait for the coordinator to send the first page. + # + # Required: yes + # Modifiable at runtime: yes, the new value will be used for continuous requests issued after + # the change + # Overridable in a profile: yes + first-page = 2 seconds + + # How long to wait for the coordinator to send subsequent pages. + # + # Required: yes + # Modifiable at runtime: yes, the new value will be used for continuous requests issued after + # the change + # Overridable in a profile: yes + other-pages = 1 second + + } + } + + # DataStax Insights + advanced.monitor-reporting { + # Whether to send monitoring events. + # + # The default is true. + # + # Required: no (defaults to true) + # Modifiable at runtime: no + # Overridable in a profile: no + enabled = true + } + advanced.metrics { # The session-level metrics (all disabled by default). # @@ -864,6 +1144,22 @@ datastax-java-driver { # The number of times a request was rejected with a RequestThrottlingException (exposed as # a Counter) // throttling.errors, + + # The throughput and latency percentiles of DSE continuous CQL requests (exposed as a + # Timer). + # + # This metric is a session-level metric and corresponds to the overall duration of the + # session.executeContinuously() call, including any retry. + # + # Note that this metric is analogous to the OSS driver's 'cql-requests' metrics, but for + # continuous paging requests only. Continuous paging requests do not update the + # 'cql-requests' metric, because they are usually much longer. Only the following metrics + # are updated during a continuous paging request: + # + # - At node level: all the usual metrics available for normal CQL requests, such as + # 'cql-messages' and error-related metrics; + # - At session level: only 'continuous-cql-requests' is updated (this metric). + // continuous-cql-requests, ] # Extra configuration (for the metrics that need it) @@ -916,6 +1212,44 @@ datastax-java-driver { significant-digits = 3 refresh-interval = 5 minutes } + + # Required: if the 'continuous-cql-requests' metric is enabled + # Modifiable at runtime: no + # Overridable in a profile: no + continuous-cql-requests { + + # The largest latency that we expect to record for a continuous paging request. + # + # This is used to scale internal data structures. If a higher recording is encountered at + # runtime, it is discarded and a warning is logged. + highest-latency = 120 seconds + + # The number of significant decimal digits to which internal structures will maintain + # value resolution and separation (for example, 3 means that recordings up to 1 second + # will be recorded with a resolution of 1 millisecond or better). + # + # This must be between 0 and 5. If the value is out of range, it defaults to 3 and a + # warning is logged. + significant-digits = 3 + + # The interval at which percentile data is refreshed. + # + # The driver records latency data in a "live" histogram, and serves results from a cached + # snapshot. Each time the snapshot gets older than the interval, the two are switched. + # Note that this switch happens upon fetching the metrics, so if you never fetch the + # recording interval might grow higher (that shouldn't be an issue in a production + # environment because you would typically have a metrics reporter that exports to a + # monitoring tool at a regular interval). + # + # In practice, this means that if you set this to 5 minutes, you're looking at data from a + # 5-minute interval in the past, that is at most 5 minutes old. If you fetch the metrics + # at a faster pace, you will observe the same data for 5 minutes until the interval + # expires. + # + # Note that this does not apply to the total count and rates (those are updated in real + # time). + refresh-interval = 5 minutes + } } # The node-level metrics (all disabled by default). # @@ -1507,5 +1841,20 @@ datastax-java-driver { # olap { # basic.request.timeout = 5 seconds # } + + # An example configuration profile for graph requests. + // my-graph-profile-example { + // graph { + // read-consistency-level = LOCAL_QUORUM + // write-consistency-level = LOCAL_ONE + // } + // } + + # An example pre-defined configuration profile for OLAP graph queries. + // graph-olap { + // graph { + // traversal-source = "a" // traversal source needs to be set to "a" for OLAP queries. + // } + // } } } \ No newline at end of file diff --git a/core/src/test/java/com/datastax/dse/driver/api/core/config/DseDriverConfigLoaderTest.java b/core/src/test/java/com/datastax/dse/driver/api/core/config/DseDriverConfigLoaderTest.java deleted file mode 100644 index 0a47fbc2cc1..00000000000 --- a/core/src/test/java/com/datastax/dse/driver/api/core/config/DseDriverConfigLoaderTest.java +++ /dev/null @@ -1,126 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.datastax.dse.driver.api.core.config; - -import static org.assertj.core.api.Assertions.assertThat; - -import com.datastax.oss.driver.api.core.DefaultConsistencyLevel; -import com.datastax.oss.driver.api.core.config.DefaultDriverOption; -import com.datastax.oss.driver.api.core.config.DriverConfig; -import com.datastax.oss.driver.api.core.config.DriverConfigLoader; -import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; -import java.io.File; -import java.net.URL; -import java.time.Duration; -import org.junit.Test; - -public class DseDriverConfigLoaderTest { - - @Test - public void should_load_from_other_classpath_resource() { - DriverConfigLoader loader = DseDriverConfigLoader.fromClasspath("config/customApplication"); - DriverExecutionProfile config = loader.getInitialConfig().getDefaultProfile(); - // From customApplication.conf: - assertThat(config.getDuration(DefaultDriverOption.REQUEST_TIMEOUT)) - .isEqualTo(Duration.ofMillis(500)); - assertThat(config.getInt(DseDriverOption.CONTINUOUS_PAGING_MAX_PAGES)).isEqualTo(10); - // From customApplication.json: - assertThat(config.getInt(DefaultDriverOption.REQUEST_PAGE_SIZE)).isEqualTo(2000); - assertThat(config.getInt(DseDriverOption.CONTINUOUS_PAGING_PAGE_SIZE)).isEqualTo(2000); - // From customApplication.properties: - assertThat(config.getString(DefaultDriverOption.REQUEST_CONSISTENCY)) - .isEqualTo(DefaultConsistencyLevel.ONE.name()); - assertThat(config.getInt(DseDriverOption.CONTINUOUS_PAGING_MAX_ENQUEUED_PAGES)).isEqualTo(8); - // From reference.conf: - assertThat(config.getString(DefaultDriverOption.REQUEST_SERIAL_CONSISTENCY)) - .isEqualTo(DefaultConsistencyLevel.SERIAL.name()); - // From dse-reference.conf: - assertThat(config.getDuration(DseDriverOption.CONTINUOUS_PAGING_TIMEOUT_FIRST_PAGE)) - .isEqualTo(Duration.ofSeconds(2)); - } - - @Test - public void should_load_from_file() { - File file = new File("src/test/resources/config/customApplication.conf"); - assertThat(file).exists(); - DriverConfigLoader loader = DseDriverConfigLoader.fromFile(file); - DriverExecutionProfile config = loader.getInitialConfig().getDefaultProfile(); - // From customApplication.conf: - assertThat(config.getDuration(DefaultDriverOption.REQUEST_TIMEOUT)) - .isEqualTo(Duration.ofMillis(500)); - assertThat(config.getInt(DseDriverOption.CONTINUOUS_PAGING_MAX_PAGES)).isEqualTo(10); - // From reference.conf: - assertThat(config.getString(DefaultDriverOption.REQUEST_SERIAL_CONSISTENCY)) - .isEqualTo(DefaultConsistencyLevel.SERIAL.name()); - // From dse-reference.conf: - assertThat(config.getDuration(DseDriverOption.CONTINUOUS_PAGING_TIMEOUT_FIRST_PAGE)) - .isEqualTo(Duration.ofSeconds(2)); - } - - @Test - public void should_load_from_url() throws Exception { - URL url = new File("src/test/resources/config/customApplication.conf").toURI().toURL(); - DriverConfigLoader loader = DseDriverConfigLoader.fromUrl(url); - DriverExecutionProfile config = loader.getInitialConfig().getDefaultProfile(); - // From customApplication.conf: - assertThat(config.getDuration(DefaultDriverOption.REQUEST_TIMEOUT)) - .isEqualTo(Duration.ofMillis(500)); - assertThat(config.getInt(DseDriverOption.CONTINUOUS_PAGING_MAX_PAGES)).isEqualTo(10); - // From reference.conf: - assertThat(config.getString(DefaultDriverOption.REQUEST_SERIAL_CONSISTENCY)) - .isEqualTo(DefaultConsistencyLevel.SERIAL.name()); - // From dse-reference.conf: - assertThat(config.getDuration(DseDriverOption.CONTINUOUS_PAGING_TIMEOUT_FIRST_PAGE)) - .isEqualTo(Duration.ofSeconds(2)); - } - - @Test - public void should_build_programmatically() { - DriverConfigLoader loader = - DseDriverConfigLoader.programmaticBuilder() - .withDuration(DefaultDriverOption.REQUEST_TIMEOUT, Duration.ofMillis(500)) - .withInt(DseDriverOption.CONTINUOUS_PAGING_MAX_PAGES, 10) - .startProfile("slow") - .withDuration(DefaultDriverOption.REQUEST_TIMEOUT, Duration.ofSeconds(30)) - .build(); - DriverConfig config = loader.getInitialConfig(); - - DriverExecutionProfile defaultProfile = config.getDefaultProfile(); - // From programmatic overrides: - assertThat(defaultProfile.getDuration(DefaultDriverOption.REQUEST_TIMEOUT)) - .isEqualTo(Duration.ofMillis(500)); - assertThat(defaultProfile.getInt(DseDriverOption.CONTINUOUS_PAGING_MAX_PAGES)).isEqualTo(10); - // From reference.conf: - assertThat(defaultProfile.getString(DefaultDriverOption.REQUEST_SERIAL_CONSISTENCY)) - .isEqualTo(DefaultConsistencyLevel.SERIAL.name()); - // From dse-reference.conf: - assertThat(defaultProfile.getDuration(DseDriverOption.CONTINUOUS_PAGING_TIMEOUT_FIRST_PAGE)) - .isEqualTo(Duration.ofSeconds(2)); - - DriverExecutionProfile slowProfile = config.getProfile("slow"); - // From programmatic override: - assertThat(slowProfile.getDuration(DefaultDriverOption.REQUEST_TIMEOUT)) - .isEqualTo(Duration.ofSeconds(30)); - // Inherited from the default profile (where the option was overridden programmatically) - assertThat(slowProfile.getInt(DseDriverOption.CONTINUOUS_PAGING_MAX_PAGES)).isEqualTo(10); - // Inherited from the default profile (where the option was pulled from reference.conf) - assertThat(slowProfile.getString(DefaultDriverOption.REQUEST_SERIAL_CONSISTENCY)) - .isEqualTo(DefaultConsistencyLevel.SERIAL.name()); - // Inherited from the default profile (where the option was pulled from dse-reference.conf) - assertThat(slowProfile.getDuration(DseDriverOption.CONTINUOUS_PAGING_TIMEOUT_FIRST_PAGE)) - .isEqualTo(Duration.ofSeconds(2)); - } -} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphAuthenticationIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphAuthenticationIT.java index 437c16ec2b1..f4b18813b9e 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphAuthenticationIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphAuthenticationIT.java @@ -18,11 +18,11 @@ import static com.datastax.dse.driver.api.core.graph.TinkerGraphAssertions.assertThat; import com.datastax.dse.driver.api.core.DseSession; -import com.datastax.dse.driver.api.core.config.DseDriverConfigLoader; import com.datastax.dse.driver.api.core.config.DseDriverOption; import com.datastax.dse.driver.internal.core.auth.DsePlainTextAuthProvider; import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverConfigLoader; import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; @@ -56,7 +56,7 @@ public void should_execute_graph_query_on_authenticated_connection() { DseSession dseSession = SessionUtils.newSession( ccm, - DseDriverConfigLoader.programmaticBuilder() + DriverConfigLoader.programmaticBuilder() .withString(DseDriverOption.AUTH_PROVIDER_AUTHORIZATION_ID, "") .withString(DefaultDriverOption.AUTH_PROVIDER_USER_NAME, "cassandra") .withString(DefaultDriverOption.AUTH_PROVIDER_PASSWORD, "cassandra") diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/testinfra/DseSessionBuilderInstantiator.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/testinfra/DseSessionBuilderInstantiator.java index c4f6f16d92a..194f9e01d26 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/testinfra/DseSessionBuilderInstantiator.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/testinfra/DseSessionBuilderInstantiator.java @@ -16,7 +16,7 @@ package com.datastax.dse.driver.api.testinfra; import com.datastax.dse.driver.api.core.DseSession; -import com.datastax.dse.driver.api.core.config.DseDriverConfigLoader; +import com.datastax.oss.driver.api.core.config.DriverConfigLoader; import com.datastax.oss.driver.api.core.config.ProgrammaticDriverConfigLoaderBuilder; import com.datastax.oss.driver.api.core.session.SessionBuilder; @@ -26,6 +26,6 @@ public class DseSessionBuilderInstantiator { } public static ProgrammaticDriverConfigLoaderBuilder configLoaderBuilder() { - return DseDriverConfigLoader.programmaticBuilder(); + return DriverConfigLoader.programmaticBuilder(); } } From 4995974b32f5cfe296ba387c8d414004446c3777 Mon Sep 17 00:00:00 2001 From: olim7t Date: Wed, 16 Oct 2019 20:30:57 -0700 Subject: [PATCH 203/979] Remove DseReplicationStrategyFactory --- .../core/context/DseDriverContext.java | 7 --- .../token/DseReplicationStrategyFactory.java | 43 ------------------- .../metadata/token/EverywhereStrategy.java | 41 ------------------ 3 files changed, 91 deletions(-) delete mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/metadata/token/DseReplicationStrategyFactory.java delete mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/metadata/token/EverywhereStrategy.java diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseDriverContext.java b/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseDriverContext.java index 1757db82188..06a760fe4be 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseDriverContext.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseDriverContext.java @@ -27,7 +27,6 @@ import com.datastax.dse.driver.internal.core.metadata.DseTopologyMonitor; import com.datastax.dse.driver.internal.core.metadata.schema.parsing.DseSchemaParserFactory; import com.datastax.dse.driver.internal.core.metadata.schema.queries.DseSchemaQueriesFactory; -import com.datastax.dse.driver.internal.core.metadata.token.DseReplicationStrategyFactory; import com.datastax.dse.driver.internal.core.metrics.DseDropwizardMetricsFactory; import com.datastax.dse.driver.internal.core.tracker.MultiplexingRequestTracker; import com.datastax.dse.protocol.internal.DseProtocolV1ClientCodecs; @@ -53,7 +52,6 @@ import com.datastax.oss.driver.internal.core.metadata.TopologyMonitor; import com.datastax.oss.driver.internal.core.metadata.schema.parsing.SchemaParserFactory; import com.datastax.oss.driver.internal.core.metadata.schema.queries.SchemaQueriesFactory; -import com.datastax.oss.driver.internal.core.metadata.token.ReplicationStrategyFactory; import com.datastax.oss.driver.internal.core.metrics.MetricsFactory; import com.datastax.oss.driver.internal.core.protocol.ByteBufPrimitiveCodec; import com.datastax.oss.driver.internal.core.session.RequestProcessor; @@ -223,11 +221,6 @@ protected TopologyMonitor buildTopologyMonitor() { return new DseTopologyMonitor(this); } - @Override - protected ReplicationStrategyFactory buildReplicationStrategyFactory() { - return new DseReplicationStrategyFactory(this); - } - @Override protected SchemaQueriesFactory buildSchemaQueriesFactory() { return new DseSchemaQueriesFactory(this); diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/token/DseReplicationStrategyFactory.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/token/DseReplicationStrategyFactory.java deleted file mode 100644 index 5027ad9d7de..00000000000 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/token/DseReplicationStrategyFactory.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.datastax.dse.driver.internal.core.metadata.token; - -import com.datastax.oss.driver.internal.core.context.InternalDriverContext; -import com.datastax.oss.driver.internal.core.metadata.token.DefaultReplicationStrategyFactory; -import com.datastax.oss.driver.internal.core.metadata.token.ReplicationStrategy; -import com.datastax.oss.driver.shaded.guava.common.base.Preconditions; -import java.util.Map; -import net.jcip.annotations.ThreadSafe; - -@ThreadSafe -public class DseReplicationStrategyFactory extends DefaultReplicationStrategyFactory { - public DseReplicationStrategyFactory(InternalDriverContext context) { - super(context); - } - - @Override - public ReplicationStrategy newInstance(Map replicationConfig) { - String strategyClass = replicationConfig.get("class"); - Preconditions.checkNotNull( - strategyClass, "Missing replication strategy class in " + replicationConfig); - switch (strategyClass) { - case "org.apache.cassandra.locator.EverywhereStrategy": - return new EverywhereStrategy(); - default: - return super.newInstance(replicationConfig); - } - } -} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/token/EverywhereStrategy.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/token/EverywhereStrategy.java deleted file mode 100644 index 237ec7922f1..00000000000 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/token/EverywhereStrategy.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.datastax.dse.driver.internal.core.metadata.token; - -import com.datastax.oss.driver.api.core.metadata.Node; -import com.datastax.oss.driver.api.core.metadata.token.Token; -import com.datastax.oss.driver.internal.core.metadata.token.ReplicationStrategy; -import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSetMultimap; -import com.datastax.oss.driver.shaded.guava.common.collect.SetMultimap; -import java.util.Collection; -import java.util.List; -import java.util.Map; -import net.jcip.annotations.ThreadSafe; - -@ThreadSafe -public class EverywhereStrategy implements ReplicationStrategy { - - @Override - public SetMultimap computeReplicasByToken( - Map tokenToPrimary, List ring) { - ImmutableSetMultimap.Builder result = ImmutableSetMultimap.builder(); - Collection nodes = tokenToPrimary.values(); - for (Token token : tokenToPrimary.keySet()) { - result = result.putAll(token, nodes); - } - return result.build(); - } -} From 4ca63d754e263e6992ad7dae7bfc8ce4de43befe Mon Sep 17 00:00:00 2001 From: olim7t Date: Thu, 17 Oct 2019 11:35:22 -0700 Subject: [PATCH 204/979] Always use default DSE package for auth providers --- .../core/context/DseDriverContext.java | 29 ------------------- .../core/context/DefaultDriverContext.java | 3 +- core/src/main/resources/reference.conf | 6 ++-- 3 files changed, 6 insertions(+), 32 deletions(-) diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseDriverContext.java b/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseDriverContext.java index 06a760fe4be..a2b5bf034df 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseDriverContext.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseDriverContext.java @@ -32,10 +32,7 @@ import com.datastax.dse.protocol.internal.DseProtocolV1ClientCodecs; import com.datastax.dse.protocol.internal.DseProtocolV2ClientCodecs; import com.datastax.dse.protocol.internal.ProtocolV4ClientCodecsForDse; -import com.datastax.oss.driver.api.core.auth.AuthProvider; -import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; -import com.datastax.oss.driver.api.core.loadbalancing.LoadBalancingPolicy; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.metadata.NodeStateListener; import com.datastax.oss.driver.api.core.metadata.schema.SchemaChangeListener; @@ -57,7 +54,6 @@ import com.datastax.oss.driver.internal.core.session.RequestProcessor; import com.datastax.oss.driver.internal.core.session.RequestProcessorRegistry; import com.datastax.oss.driver.internal.core.util.Loggers; -import com.datastax.oss.driver.internal.core.util.Reflection; import com.datastax.oss.protocol.internal.FrameCodec; import com.datastax.oss.protocol.internal.ProtocolV3ClientCodecs; import com.datastax.oss.protocol.internal.ProtocolV5ClientCodecs; @@ -67,7 +63,6 @@ import java.util.Collections; import java.util.List; import java.util.Map; -import java.util.Optional; import java.util.UUID; import java.util.function.Predicate; import net.jcip.annotations.ThreadSafe; @@ -262,28 +257,4 @@ protected RequestTracker buildRequestTracker(RequestTracker requestTrackerFromBu public List getLifecycleListeners() { return listeners; } - - @Override - protected Map buildLoadBalancingPolicies() { - return Reflection.buildFromConfigProfiles( - this, - DefaultDriverOption.LOAD_BALANCING_POLICY, - LoadBalancingPolicy.class, - "com.datastax.oss.driver.internal.core.loadbalancing", - // Add the DSE default package - "com.datastax.dse.driver.internal.core.loadbalancing"); - } - - @Override - protected Optional buildAuthProvider(AuthProvider authProviderFromBuilder) { - return (authProviderFromBuilder != null) - ? Optional.of(authProviderFromBuilder) - : Reflection.buildFromConfig( - this, - DefaultDriverOption.AUTH_PROVIDER_CLASS, - AuthProvider.class, - "com.datastax.oss.driver.internal.core.auth", - // Add the DSE default package - "com.datastax.dse.driver.internal.core.auth"); - } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java index 0e017d3a946..2acc3880c41 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java @@ -558,7 +558,8 @@ protected Optional buildAuthProvider(AuthProvider authProviderFrom this, DefaultDriverOption.AUTH_PROVIDER_CLASS, AuthProvider.class, - "com.datastax.oss.driver.internal.core.auth"); + "com.datastax.oss.driver.internal.core.auth", + "com.datastax.dse.driver.internal.core.auth"); } @NonNull diff --git a/core/src/main/resources/reference.conf b/core/src/main/resources/reference.conf index 72c4ac46161..cd1b895cdd3 100644 --- a/core/src/main/resources/reference.conf +++ b/core/src/main/resources/reference.conf @@ -526,15 +526,17 @@ datastax-java-driver { # - com.datastax.oss.driver.internal.core.auth # - com.datastax.dse.driver.internal.core.auth # - # The DSE driver provides 3 implementations out of the box: + # The driver provides one implementation for Apache Cassandra(R): # - PlainTextAuthProvider: uses plain-text credentials. It requires the `username` and # `password` options below. Should be used only when authenticating against Apache # Cassandra(R) clusters; not recommended when authenticating against DSE clusters. + # + # And two DSE-specific implementations: # - DsePlainTextAuthProvider: provides SASL authentication using the PLAIN mechanism for DSE # clusters secured with DseAuthenticator. It requires the `username` and `password` options # below, and optionally, an `authorization-id`. # - DseGssApiAuthProvider: provides GSSAPI authentication for DSE clusters secured with - # DseAuthenticator. Read the javadocs of this authenticator for detailed instructions. + # DseAuthenticator. See the example below and refer to the manual for detailed instructions. # # You can also specify a custom class that implements AuthProvider and has a public constructor # with a DriverContext argument (to simplify this, the driver provides two abstract classes that From 7a4405adc343c741a5887c5c229c9f3c3861b550 Mon Sep 17 00:00:00 2001 From: olim7t Date: Thu, 17 Oct 2019 15:27:33 -0700 Subject: [PATCH 205/979] Merge TopologyMonitor implementations --- .../core/context/DseDriverContext.java | 7 -- .../core/metadata/DseTopologyMonitor.java | 83 ------------------- .../core/metadata/DefaultTopologyMonitor.java | 44 +++++++++- .../core/metadata/NodeProperties.java | 29 ------- .../queries/DefaultSchemaQueriesFactory.java | 6 +- .../DefaultSchemaQueriesFactoryTest.java | 4 +- 6 files changed, 45 insertions(+), 128 deletions(-) delete mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/metadata/DseTopologyMonitor.java delete mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/metadata/NodeProperties.java diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseDriverContext.java b/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseDriverContext.java index a2b5bf034df..8114771a466 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseDriverContext.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseDriverContext.java @@ -24,7 +24,6 @@ import com.datastax.dse.driver.internal.core.cql.reactive.CqlRequestReactiveProcessor; import com.datastax.dse.driver.internal.core.graph.GraphRequestAsyncProcessor; import com.datastax.dse.driver.internal.core.graph.GraphRequestSyncProcessor; -import com.datastax.dse.driver.internal.core.metadata.DseTopologyMonitor; import com.datastax.dse.driver.internal.core.metadata.schema.parsing.DseSchemaParserFactory; import com.datastax.dse.driver.internal.core.metadata.schema.queries.DseSchemaQueriesFactory; import com.datastax.dse.driver.internal.core.metrics.DseDropwizardMetricsFactory; @@ -46,7 +45,6 @@ import com.datastax.oss.driver.internal.core.cql.CqlPrepareSyncProcessor; import com.datastax.oss.driver.internal.core.cql.CqlRequestAsyncProcessor; import com.datastax.oss.driver.internal.core.cql.CqlRequestSyncProcessor; -import com.datastax.oss.driver.internal.core.metadata.TopologyMonitor; import com.datastax.oss.driver.internal.core.metadata.schema.parsing.SchemaParserFactory; import com.datastax.oss.driver.internal.core.metadata.schema.queries.SchemaQueriesFactory; import com.datastax.oss.driver.internal.core.metrics.MetricsFactory; @@ -211,11 +209,6 @@ protected RequestProcessorRegistry buildRequestProcessorRegistry() { return new RequestProcessorRegistry(logPrefix, processors.toArray(new RequestProcessor[0])); } - @Override - protected TopologyMonitor buildTopologyMonitor() { - return new DseTopologyMonitor(this); - } - @Override protected SchemaQueriesFactory buildSchemaQueriesFactory() { return new DseSchemaQueriesFactory(this); diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/DseTopologyMonitor.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/DseTopologyMonitor.java deleted file mode 100644 index c8d9e1fc5de..00000000000 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/DseTopologyMonitor.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.datastax.dse.driver.internal.core.metadata; - -import com.datastax.dse.driver.api.core.metadata.DseNodeProperties; -import com.datastax.oss.driver.api.core.Version; -import com.datastax.oss.driver.api.core.metadata.EndPoint; -import com.datastax.oss.driver.internal.core.adminrequest.AdminRow; -import com.datastax.oss.driver.internal.core.context.InternalDriverContext; -import com.datastax.oss.driver.internal.core.metadata.DefaultNodeInfo; -import com.datastax.oss.driver.internal.core.metadata.DefaultTopologyMonitor; -import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; -import edu.umd.cs.findbugs.annotations.NonNull; -import edu.umd.cs.findbugs.annotations.Nullable; -import java.net.InetSocketAddress; -import java.util.Set; -import net.jcip.annotations.ThreadSafe; - -@ThreadSafe -public class DseTopologyMonitor extends DefaultTopologyMonitor { - - public DseTopologyMonitor(InternalDriverContext context) { - super(context); - } - - @NonNull - @Override - protected DefaultNodeInfo.Builder nodeInfoBuilder( - @NonNull AdminRow row, - @Nullable InetSocketAddress broadcastRpcAddress, - @NonNull EndPoint localEndPoint) { - - // Fill default fields from standard columns: - DefaultNodeInfo.Builder builder = - super.nodeInfoBuilder(row, broadcastRpcAddress, localEndPoint); - - // Handle DSE-specific columns - String rawVersion = row.getString("dse_version"); - if (rawVersion != null) { - builder.withExtra(DseNodeProperties.DSE_VERSION, Version.parse(rawVersion)); - } - - ImmutableSet.Builder workloadsBuilder = ImmutableSet.builder(); - Boolean legacyGraph = row.getBoolean("graph"); // DSE 5.0 - if (legacyGraph != null && legacyGraph) { - workloadsBuilder.add("Graph"); - } - String legacyWorkload = row.getString("workload"); // DSE 5.0 (other than graph) - if (legacyWorkload != null) { - workloadsBuilder.add(legacyWorkload); - } - Set modernWorkloads = row.getSetOfString("workloads"); // DSE 5.1+ - if (modernWorkloads != null) { - workloadsBuilder.addAll(modernWorkloads); - } - builder.withExtra(DseNodeProperties.DSE_WORKLOADS, workloadsBuilder.build()); - - builder - .withExtra(DseNodeProperties.SERVER_ID, row.getString("server_id")) - .withExtra(DseNodeProperties.NATIVE_TRANSPORT_PORT, row.getInteger("native_transport_port")) - .withExtra( - DseNodeProperties.NATIVE_TRANSPORT_PORT_SSL, - row.getInteger("native_transport_port_ssl")) - .withExtra(DseNodeProperties.STORAGE_PORT, row.getInteger("storage_port")) - .withExtra(DseNodeProperties.STORAGE_PORT_SSL, row.getInteger("storage_port_ssl")) - .withExtra(DseNodeProperties.JMX_PORT, row.getInteger("jmx_port")); - - return builder; - } -} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultTopologyMonitor.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultTopologyMonitor.java index 7d87a33ffc7..21c993a4989 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultTopologyMonitor.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultTopologyMonitor.java @@ -15,6 +15,7 @@ */ package com.datastax.oss.driver.internal.core.metadata; +import com.datastax.dse.driver.api.core.metadata.DseNodeProperties; import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; @@ -30,6 +31,7 @@ import com.datastax.oss.driver.internal.core.util.concurrent.CompletableFutures; import com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; import com.datastax.oss.protocol.internal.ProtocolConstants; import com.datastax.oss.protocol.internal.response.Error; import edu.umd.cs.findbugs.annotations.NonNull; @@ -45,6 +47,7 @@ import java.util.Map; import java.util.Objects; import java.util.Optional; +import java.util.Set; import java.util.UUID; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionStage; @@ -333,7 +336,7 @@ protected DefaultNodeInfo.Builder nodeInfoBuilder( listenAddress = new InetSocketAddress(listenInetAddress, listenPort); } - DefaultNodeInfo.Builder rv = + DefaultNodeInfo.Builder builder = DefaultNodeInfo.builder() .withEndPoint(endPoint) .withBroadcastRpcAddress(broadcastRpcAddress) @@ -347,9 +350,42 @@ protected DefaultNodeInfo.Builder nodeInfoBuilder( .withHostId(Objects.requireNonNull(row.getUuid("host_id"))) .withSchemaVersion(row.getUuid("schema_version")); - return row.contains("dse_version") - ? rv.withExtra(NodeProperties.DSE_VERSION, Version.parse(row.getString("dse_version"))) - : rv; + // Handle DSE-specific columns, if present + String rawVersion = row.getString("dse_version"); + if (rawVersion != null) { + builder.withExtra(DseNodeProperties.DSE_VERSION, Version.parse(rawVersion)); + } + + ImmutableSet.Builder workloadsBuilder = ImmutableSet.builder(); + Boolean legacyGraph = row.getBoolean("graph"); // DSE 5.0 + if (legacyGraph != null && legacyGraph) { + workloadsBuilder.add("Graph"); + } + String legacyWorkload = row.getString("workload"); // DSE 5.0 (other than graph) + if (legacyWorkload != null) { + workloadsBuilder.add(legacyWorkload); + } + Set modernWorkloads = row.getSetOfString("workloads"); // DSE 5.1+ + if (modernWorkloads != null) { + workloadsBuilder.addAll(modernWorkloads); + } + ImmutableSet workloads = workloadsBuilder.build(); + if (!workloads.isEmpty()) { + builder.withExtra(DseNodeProperties.DSE_WORKLOADS, workloads); + } + + // Note: withExtra discards null values + builder + .withExtra(DseNodeProperties.SERVER_ID, row.getString("server_id")) + .withExtra(DseNodeProperties.NATIVE_TRANSPORT_PORT, row.getInteger("native_transport_port")) + .withExtra( + DseNodeProperties.NATIVE_TRANSPORT_PORT_SSL, + row.getInteger("native_transport_port_ssl")) + .withExtra(DseNodeProperties.STORAGE_PORT, row.getInteger("storage_port")) + .withExtra(DseNodeProperties.STORAGE_PORT_SSL, row.getInteger("storage_port_ssl")) + .withExtra(DseNodeProperties.JMX_PORT, row.getInteger("jmx_port")); + + return builder; } /** diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/NodeProperties.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/NodeProperties.java deleted file mode 100644 index b079b1df897..00000000000 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/NodeProperties.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.datastax.oss.driver.internal.core.metadata; - -import com.datastax.oss.driver.api.core.Version; - -/** The keys for the additional properties stored in {@link Node#getExtras()}. */ -public class NodeProperties { - - /** - * The DSE version that the node is running. - * - *

      The associated value in {@link Node#getExtras()} is a {@link Version}). - */ - public static final String DSE_VERSION = "DSE_VERSION"; -} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/DefaultSchemaQueriesFactory.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/DefaultSchemaQueriesFactory.java index 801d9931998..ccb760ee88f 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/DefaultSchemaQueriesFactory.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/DefaultSchemaQueriesFactory.java @@ -15,13 +15,13 @@ */ package com.datastax.oss.driver.internal.core.metadata.schema.queries; +import com.datastax.dse.driver.api.core.metadata.DseNodeProperties; import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.metadata.Metadata; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.internal.core.channel.DriverChannel; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; -import com.datastax.oss.driver.internal.core.metadata.NodeProperties; import java.util.concurrent.CompletableFuture; import net.jcip.annotations.ThreadSafe; import org.slf4j.Logger; @@ -86,9 +86,9 @@ protected SchemaQueries newInstance( // A bit of custom logic for DSE 6.0.x. These versions report a Cassandra version of 4.0.0 // but don't have support for system_virtual_schema tables supported by that version. To // compensate we return the Cassandra 3 schema queries here for those versions - if (node.getExtras().containsKey(NodeProperties.DSE_VERSION)) { + if (node.getExtras().containsKey(DseNodeProperties.DSE_VERSION)) { - Object dseVersionObj = node.getExtras().get(NodeProperties.DSE_VERSION); + Object dseVersionObj = node.getExtras().get(DseNodeProperties.DSE_VERSION); assert (dseVersionObj instanceof Version); if (((Version) dseVersionObj).compareTo(Version.V6_7_0) < 0) { diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/DefaultSchemaQueriesFactoryTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/DefaultSchemaQueriesFactoryTest.java index 1c221d448e1..61094e979c8 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/DefaultSchemaQueriesFactoryTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/DefaultSchemaQueriesFactoryTest.java @@ -19,13 +19,13 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; +import com.datastax.dse.driver.api.core.metadata.DseNodeProperties; import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.api.core.config.DriverConfig; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.internal.core.channel.DriverChannel; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; -import com.datastax.oss.driver.internal.core.metadata.NodeProperties; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import com.tngtech.java.junit.dataprovider.DataProvider; @@ -110,7 +110,7 @@ public void should_return_correct_schema_queries_impl( when(mockNode.getExtras()) .thenReturn( ImmutableMap.of( - NodeProperties.DSE_VERSION, Version.parse(versionStr))); + DseNodeProperties.DSE_VERSION, Version.parse(versionStr))); }); DefaultSchemaQueriesFactory factory = buildFactory(); From 92b53b0df6215b0844985fb000072d0cca168f08 Mon Sep 17 00:00:00 2001 From: olim7t Date: Thu, 17 Oct 2019 16:05:55 -0700 Subject: [PATCH 206/979] Merge SchemaQueriesFactory implementations --- .../core/context/DseDriverContext.java | 7 -- .../queries/DseSchemaQueriesFactory.java | 72 ------------------- .../datastax/oss/driver/api/core/Version.java | 2 +- .../queries/DefaultSchemaQueriesFactory.java | 68 ++++++++++-------- 4 files changed, 39 insertions(+), 110 deletions(-) delete mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/queries/DseSchemaQueriesFactory.java diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseDriverContext.java b/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseDriverContext.java index 8114771a466..770a8837f63 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseDriverContext.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseDriverContext.java @@ -25,7 +25,6 @@ import com.datastax.dse.driver.internal.core.graph.GraphRequestAsyncProcessor; import com.datastax.dse.driver.internal.core.graph.GraphRequestSyncProcessor; import com.datastax.dse.driver.internal.core.metadata.schema.parsing.DseSchemaParserFactory; -import com.datastax.dse.driver.internal.core.metadata.schema.queries.DseSchemaQueriesFactory; import com.datastax.dse.driver.internal.core.metrics.DseDropwizardMetricsFactory; import com.datastax.dse.driver.internal.core.tracker.MultiplexingRequestTracker; import com.datastax.dse.protocol.internal.DseProtocolV1ClientCodecs; @@ -46,7 +45,6 @@ import com.datastax.oss.driver.internal.core.cql.CqlRequestAsyncProcessor; import com.datastax.oss.driver.internal.core.cql.CqlRequestSyncProcessor; import com.datastax.oss.driver.internal.core.metadata.schema.parsing.SchemaParserFactory; -import com.datastax.oss.driver.internal.core.metadata.schema.queries.SchemaQueriesFactory; import com.datastax.oss.driver.internal.core.metrics.MetricsFactory; import com.datastax.oss.driver.internal.core.protocol.ByteBufPrimitiveCodec; import com.datastax.oss.driver.internal.core.session.RequestProcessor; @@ -209,11 +207,6 @@ protected RequestProcessorRegistry buildRequestProcessorRegistry() { return new RequestProcessorRegistry(logPrefix, processors.toArray(new RequestProcessor[0])); } - @Override - protected SchemaQueriesFactory buildSchemaQueriesFactory() { - return new DseSchemaQueriesFactory(this); - } - @Override protected SchemaParserFactory buildSchemaParserFactory() { return new DseSchemaParserFactory(this); diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/queries/DseSchemaQueriesFactory.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/queries/DseSchemaQueriesFactory.java deleted file mode 100644 index 5d8d6d54dbe..00000000000 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/queries/DseSchemaQueriesFactory.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.datastax.dse.driver.internal.core.metadata.schema.queries; - -import com.datastax.dse.driver.api.core.metadata.DseNodeProperties; -import com.datastax.oss.driver.api.core.Version; -import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; -import com.datastax.oss.driver.api.core.metadata.Metadata; -import com.datastax.oss.driver.api.core.metadata.Node; -import com.datastax.oss.driver.internal.core.channel.DriverChannel; -import com.datastax.oss.driver.internal.core.context.InternalDriverContext; -import com.datastax.oss.driver.internal.core.metadata.schema.queries.Cassandra21SchemaQueries; -import com.datastax.oss.driver.internal.core.metadata.schema.queries.Cassandra3SchemaQueries; -import com.datastax.oss.driver.internal.core.metadata.schema.queries.Cassandra4SchemaQueries; -import com.datastax.oss.driver.internal.core.metadata.schema.queries.DefaultSchemaQueriesFactory; -import com.datastax.oss.driver.internal.core.metadata.schema.queries.SchemaQueries; -import edu.umd.cs.findbugs.annotations.NonNull; -import java.util.concurrent.CompletableFuture; -import net.jcip.annotations.ThreadSafe; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -@ThreadSafe -public class DseSchemaQueriesFactory extends DefaultSchemaQueriesFactory { - - private static final Logger LOG = LoggerFactory.getLogger(DefaultSchemaQueriesFactory.class); - - @NonNull private static final Version V5_0_0 = Version.parse("5.0.0"); - @NonNull private static final Version V6_7_0 = Version.parse("6.7.0"); - - public DseSchemaQueriesFactory(InternalDriverContext context) { - super(context); - } - - @Override - protected SchemaQueries newInstance( - Node node, DriverChannel channel, CompletableFuture refreshFuture) { - Object versionObj = node.getExtras().get(DseNodeProperties.DSE_VERSION); - Version version; - if (versionObj == null) { - LOG.warn("[{}] DSE version missing for {}, deferring to C* version", logPrefix, node); - return super.newInstance(node, channel, refreshFuture); - } - - version = ((Version) versionObj).nextStable(); - DriverExecutionProfile config = context.getConfig().getDefaultProfile(); - LOG.debug("[{}] Sending schema queries to {} with DSE version {}", logPrefix, node, version); - // 4.8 is the oldest version supported, which uses C* 2.1 schema - if (version.compareTo(V5_0_0) < 0) { - return new Cassandra21SchemaQueries(channel, refreshFuture, config, logPrefix); - } else if (version.compareTo(V6_7_0) < 0) { - // 5.0 - 6.7 uses C* 3.0 schema - return new Cassandra3SchemaQueries(channel, refreshFuture, config, logPrefix); - } else { - // 6.7+ uses C* 4.0 schema - return new Cassandra4SchemaQueries(channel, refreshFuture, config, logPrefix); - } - } -} diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/Version.java b/core/src/main/java/com/datastax/oss/driver/api/core/Version.java index 8e69bcb6a1d..cb17fd60628 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/Version.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/Version.java @@ -50,7 +50,7 @@ public class Version implements Comparable, Serializable { @NonNull public static final Version V2_2_0 = Objects.requireNonNull(parse("2.2.0")); @NonNull public static final Version V3_0_0 = Objects.requireNonNull(parse("3.0.0")); @NonNull public static final Version V4_0_0 = Objects.requireNonNull(parse("4.0.0")); - + @NonNull public static final Version V5_0_0 = Objects.requireNonNull(parse("5.0.0")); @NonNull public static final Version V6_7_0 = Objects.requireNonNull(parse("6.7.0")); private final int major; diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/DefaultSchemaQueriesFactory.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/DefaultSchemaQueriesFactory.java index ccb760ee88f..090dbb71b60 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/DefaultSchemaQueriesFactory.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/DefaultSchemaQueriesFactory.java @@ -62,40 +62,48 @@ public SchemaQueries newInstance(CompletableFuture refreshFuture) { protected SchemaQueries newInstance( Node node, DriverChannel channel, CompletableFuture refreshFuture) { - Version version = node.getCassandraVersion(); - if (version == null) { - LOG.warn( - "[{}] Cassandra version missing for {}, defaulting to {}", - logPrefix, - node, - Version.V3_0_0); - version = Version.V3_0_0; - } else { - version = version.nextStable(); - } - DriverExecutionProfile config = context.getConfig().getDefaultProfile(); - LOG.debug("[{}] Sending schema queries to {} with version {}", logPrefix, node, version); - if (version.compareTo(Version.V2_2_0) < 0) { - return new Cassandra21SchemaQueries(channel, refreshFuture, config, logPrefix); - } else if (version.compareTo(Version.V3_0_0) < 0) { - return new Cassandra22SchemaQueries(channel, refreshFuture, config, logPrefix); - } else if (version.compareTo(Version.V4_0_0) < 0) { - return new Cassandra3SchemaQueries(channel, refreshFuture, config, logPrefix); - } else { - // A bit of custom logic for DSE 6.0.x. These versions report a Cassandra version of 4.0.0 - // but don't have support for system_virtual_schema tables supported by that version. To - // compensate we return the Cassandra 3 schema queries here for those versions - if (node.getExtras().containsKey(DseNodeProperties.DSE_VERSION)) { + DriverExecutionProfile config = context.getConfig().getDefaultProfile(); - Object dseVersionObj = node.getExtras().get(DseNodeProperties.DSE_VERSION); - assert (dseVersionObj instanceof Version); - if (((Version) dseVersionObj).compareTo(Version.V6_7_0) < 0) { + Version dseVersion = (Version) node.getExtras().get(DseNodeProperties.DSE_VERSION); + if (dseVersion != null) { + dseVersion = dseVersion.nextStable(); - return new Cassandra3SchemaQueries(channel, refreshFuture, config, logPrefix); - } + LOG.debug( + "[{}] Sending schema queries to {} with DSE version {}", logPrefix, node, dseVersion); + // 4.8 is the oldest version supported, which uses C* 2.1 schema + if (dseVersion.compareTo(Version.V5_0_0) < 0) { + return new Cassandra21SchemaQueries(channel, refreshFuture, config, logPrefix); + } else if (dseVersion.compareTo(Version.V6_7_0) < 0) { + // 5.0 - 6.7 uses C* 3.0 schema + return new Cassandra3SchemaQueries(channel, refreshFuture, config, logPrefix); + } else { + // 6.7+ uses C* 4.0 schema + return new Cassandra4SchemaQueries(channel, refreshFuture, config, logPrefix); + } + } else { + Version cassandraVersion = node.getCassandraVersion(); + if (cassandraVersion == null) { + LOG.warn( + "[{}] Cassandra version missing for {}, defaulting to {}", + logPrefix, + node, + Version.V3_0_0); + cassandraVersion = Version.V3_0_0; + } else { + cassandraVersion = cassandraVersion.nextStable(); + } + LOG.debug( + "[{}] Sending schema queries to {} with version {}", logPrefix, node, cassandraVersion); + if (cassandraVersion.compareTo(Version.V2_2_0) < 0) { + return new Cassandra21SchemaQueries(channel, refreshFuture, config, logPrefix); + } else if (cassandraVersion.compareTo(Version.V3_0_0) < 0) { + return new Cassandra22SchemaQueries(channel, refreshFuture, config, logPrefix); + } else if (cassandraVersion.compareTo(Version.V4_0_0) < 0) { + return new Cassandra3SchemaQueries(channel, refreshFuture, config, logPrefix); + } else { + return new Cassandra4SchemaQueries(channel, refreshFuture, config, logPrefix); } - return new Cassandra4SchemaQueries(channel, refreshFuture, config, logPrefix); } } } From 15f6b0fce69b7cecf6aa12ade97201dd328f5eb0 Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Thu, 17 Oct 2019 16:55:30 -0500 Subject: [PATCH 207/979] Apply changes from riptano/java-dse-driver 0a187ce..a2254a434 --- .../core/auth/DseGssApiAuthProviderBase.java | 11 +- .../core/graph/GraphStatementBuilderBase.java | 2 +- .../insights/ExecutionProfilesInfoFinder.java | 8 +- .../core/insights/InsightsClient.java | 85 ++++---- core/src/main/resources/reference.conf | 6 +- .../context/DseStartupOptionsBuilderTest.java | 5 +- .../graph/GraphStatementBuilderBaseTest.java | 54 +++++ .../insights/ExecutionProfileMockUtil.java | 7 + .../ExecutionProfilesInfoFinderTest.java | 14 ++ .../DseGssApiAuthProviderAlternateIT.java | 18 +- .../core/cql/reactive/ReactiveRetryIT.java | 202 ++++++++++++++++++ 11 files changed, 360 insertions(+), 52 deletions(-) create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphStatementBuilderBaseTest.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRetryIT.java diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderBase.java b/core/src/main/java/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderBase.java index 09621279102..4fa9fed7f51 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderBase.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderBase.java @@ -54,6 +54,12 @@ public abstract class DseGssApiAuthProviderBase implements AuthProvider { /** The name of the system property to use to specify the SASL service name. */ public static final String SASL_SERVICE_NAME_PROPERTY = "dse.sasl.service"; + /** + * Legacy system property for SASL protocol name. Clients should migrate to + * SASL_SERVICE_NAME_PROPERTY above. + */ + private static final String LEGACY_SASL_PROTOCOL_PROPERTY = "dse.sasl.protocol"; + private static final Logger LOG = LoggerFactory.getLogger(DseGssApiAuthProviderBase.class); private final String logPrefix; @@ -267,7 +273,10 @@ protected GssApiAuthenticator( } String protocol = options.getSaslProtocol(); if (protocol == null) { - protocol = System.getProperty(SASL_SERVICE_NAME_PROPERTY, DEFAULT_SASL_SERVICE_NAME); + protocol = + System.getProperty( + SASL_SERVICE_NAME_PROPERTY, + System.getProperty(LEGACY_SASL_PROTOCOL_PROPERTY, DEFAULT_SASL_SERVICE_NAME)); } this.saslClient = Sasl.createSaslClient( diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphStatementBuilderBase.java b/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphStatementBuilderBase.java index 92be32e37d5..9892e673d23 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphStatementBuilderBase.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphStatementBuilderBase.java @@ -37,7 +37,7 @@ public abstract class GraphStatementBuilderBase< protected Boolean isIdempotent; protected Duration timeout; protected Node node; - protected long timestamp; + protected long timestamp = Long.MIN_VALUE; protected DriverExecutionProfile executionProfile; protected String executionProfileName; private NullAllowingImmutableMap.Builder customPayloadBuilder; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/ExecutionProfilesInfoFinder.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/ExecutionProfilesInfoFinder.java index b72c3e6f420..b31fec9c960 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/ExecutionProfilesInfoFinder.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/ExecutionProfilesInfoFinder.java @@ -152,9 +152,11 @@ private void putIfExists( private LoadBalancingInfo getLoadBalancingInfo(DriverExecutionProfile driverExecutionProfile) { Map options = new LinkedHashMap<>(); - options.put( - "localDataCenter", - driverExecutionProfile.getString(DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER)); + if (driverExecutionProfile.isDefined(DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER)) { + options.put( + "localDataCenter", + driverExecutionProfile.getString(DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER)); + } options.put( "filterFunction", driverExecutionProfile.isDefined(DefaultDriverOption.LOAD_BALANCING_FILTER_CLASS)); diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/InsightsClient.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/InsightsClient.java index b1be52cc78d..9e3a2506359 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/InsightsClient.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/InsightsClient.java @@ -51,6 +51,7 @@ import com.datastax.oss.driver.internal.core.adminrequest.AdminRequestHandler; import com.datastax.oss.driver.internal.core.control.ControlConnection; import com.datastax.oss.driver.internal.core.pool.ChannelPool; +import com.datastax.oss.driver.internal.core.util.concurrent.CompletableFutures; import com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import com.datastax.oss.protocol.internal.request.Query; @@ -88,7 +89,7 @@ public class InsightsClient { private static final Map TAGS = ImmutableMap.of("language", "java"); private static final String STARTUP_VERSION_1_ID = "v1"; private static final String STATUS_VERSION_1_ID = "v1"; - private static final ObjectMapper objectMapper = new ObjectMapper(); + private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); private static final int MAX_NUMBER_OF_STATUS_ERROR_LOGS = 5; static final String DEFAULT_JAVA_APPLICATION = "Default Java Application"; @@ -104,8 +105,9 @@ public class InsightsClient { private final ExecutionProfilesInfoFinder executionProfilesInfoFinder; private final ConfigAntiPatternsFinder configAntiPatternsFinder; private final DataCentersFinder dataCentersFinder; - private StackTraceElement[] initCallStackTrace; - private ScheduledFuture scheduleInsightsTask; + private final StackTraceElement[] initCallStackTrace; + + private volatile ScheduledFuture scheduleInsightsTask; public static InsightsClient createInsightsClient( InsightsConfiguration insightsConfiguration, @@ -147,24 +149,26 @@ public static InsightsClient createInsightsClient( } public CompletionStage sendStartupMessage() { - if (!shouldSendEvent()) { - return CompletableFuture.completedFuture(null); + try { + if (!shouldSendEvent()) { + return CompletableFuture.completedFuture(null); + } else { + String startupMessage = createStartupMessage(); + return sendJsonMessage(startupMessage) + .whenComplete( + (aVoid, throwable) -> { + if (throwable != null) { + LOGGER.debug( + "Error while sending startup message to Insights. Message was: " + + trimToFirst500characters(startupMessage), + throwable); + } + }); + } + } catch (Exception e) { + LOGGER.debug("Unexpected error while sending startup message to Insights.", e); + return CompletableFutures.failedFuture(e); } - final String startupMessage = createStartupMessage(); - CompletionStage result = sendJsonMessage(startupMessage); - - return result.whenComplete( - (aVoid, throwable) -> { - if (throwable != null) { - LOGGER.debug( - "Error while sending: " - + trimToFirst500characters(startupMessage) - + " to insights. Aborting sending all future: " - + STARTUP_MESSAGE_NAME - + " events", - throwable); - } - }); } private static String trimToFirst500characters(String startupMessage) { @@ -190,24 +194,27 @@ public void shutdown() { @VisibleForTesting public CompletionStage sendStatusMessage() { - if (!shouldSendEvent()) { - return CompletableFuture.completedFuture(null); - } - final String statusMessage = createStatusMessage(); - CompletionStage result = sendJsonMessage(statusMessage); - - return result.whenComplete( - (aVoid, throwable) -> { - if (throwable != null) { - if (numberOfStatusEventErrors.getAndIncrement() < MAX_NUMBER_OF_STATUS_ERROR_LOGS) { - LOGGER.debug( - "Error while sending: " - + trimToFirst500characters(statusMessage) - + " to insights.", - throwable); + try { + if (!shouldSendEvent()) { + return CompletableFuture.completedFuture(null); + } + String statusMessage = createStatusMessage(); + CompletionStage result = sendJsonMessage(statusMessage); + return result.whenComplete( + (aVoid, throwable) -> { + if (throwable != null) { + if (numberOfStatusEventErrors.getAndIncrement() < MAX_NUMBER_OF_STATUS_ERROR_LOGS) { + LOGGER.debug( + "Error while sending status message to Insights. Message was: " + + trimToFirst500characters(statusMessage), + throwable); + } } - } - }); + }); + } catch (Exception e) { + LOGGER.debug("Unexpected error while sending status message to Insights.", e); + return CompletableFutures.failedFuture(e); + } } private CompletionStage sendJsonMessage(String jsonMessage) { @@ -253,7 +260,7 @@ String createStartupMessage() { InsightsStartupData data = createStartupData(); try { - return objectMapper.writeValueAsString(new Insight<>(insightMetadata, data)); + return OBJECT_MAPPER.writeValueAsString(new Insight<>(insightMetadata, data)); } catch (JsonProcessingException e) { throw new InsightEventFormatException("Problem when creating: " + STARTUP_MESSAGE_NAME, e); } @@ -265,7 +272,7 @@ String createStatusMessage() { InsightsStatusData data = createStatusData(); try { - return objectMapper.writeValueAsString(new Insight<>(insightMetadata, data)); + return OBJECT_MAPPER.writeValueAsString(new Insight<>(insightMetadata, data)); } catch (JsonProcessingException e) { throw new InsightEventFormatException("Problem when creating: " + STATUS_MESSAGE_NAME, e); } diff --git a/core/src/main/resources/reference.conf b/core/src/main/resources/reference.conf index cd1b895cdd3..137dd91aad3 100644 --- a/core/src/main/resources/reference.conf +++ b/core/src/main/resources/reference.conf @@ -559,7 +559,11 @@ datastax-java-driver { # # Service name. For example, if in your dse.yaml configuration file the # "kerberos_options/service_principal" setting is "cassandra/my.host.com@MY.REALM.COM", then set - # this option to "cassandra": + # this option to "cassandra". If this value is not explicitly set via configuration (in an + # application.conf or programmatically), the driver will attempt to set it via a System + # property. The property should be "dse.sasl.service". For backwards compatibility with 1.x + # versions of the driver, if "dse.sasl.service" is not set as a System property, the driver will + # attempt to use "dse.sasl.protocol" as a fallback (which is the property for the 1.x driver). //service = "cassandra" # # Login configuration. It is also possible to provide login configuration through a standard diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/context/DseStartupOptionsBuilderTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/context/DseStartupOptionsBuilderTest.java index 46c01fc2502..08a34bc8e3d 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/context/DseStartupOptionsBuilderTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/context/DseStartupOptionsBuilderTest.java @@ -54,6 +54,7 @@ public void before() { initMocks(this); when(configLoader.getInitialConfig()).thenReturn(driverConfig); when(driverConfig.getDefaultProfile()).thenReturn(defaultProfile); + when(defaultProfile.isDefined(DseDriverOption.CONTINUOUS_PAGING_PAGE_SIZE)).thenReturn(true); } private void buildContext(UUID clientId, String applicationName, String applicationVersion) { @@ -180,10 +181,6 @@ public void should_use_configuration_when_no_programmatic_values_provided() { @Test public void should_ignore_configuration_when_programmatic_values_provided() { - when(defaultProfile.getString(DseDriverOption.APPLICATION_NAME, null)) - .thenReturn("Config_App_Name"); - when(defaultProfile.getString(DseDriverOption.APPLICATION_VERSION, null)) - .thenReturn("Config_App_Version"); when(defaultProfile.getString(DefaultDriverOption.PROTOCOL_COMPRESSION, "none")) .thenReturn("none"); diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphStatementBuilderBaseTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphStatementBuilderBaseTest.java new file mode 100644 index 00000000000..a4e7be3ecb9 --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphStatementBuilderBaseTest.java @@ -0,0 +1,54 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.dse.driver.internal.core.graph; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import com.datastax.dse.driver.api.core.graph.FluentGraphStatement; +import com.datastax.dse.driver.api.core.graph.GraphStatementBuilderBase; +import org.junit.Test; + +public class GraphStatementBuilderBaseTest { + + private static class MockGraphStatementBuilder + extends GraphStatementBuilderBase { + + @Override + public FluentGraphStatement build() { + + FluentGraphStatement rv = mock(FluentGraphStatement.class); + when(rv.getTimestamp()).thenReturn(this.timestamp); + return rv; + } + } + + @Test + public void should_use_timestamp_if_set() { + + MockGraphStatementBuilder builder = new MockGraphStatementBuilder(); + builder.setTimestamp(1); + assertThat(builder.build().getTimestamp()).isEqualTo(1); + } + + @Test + public void should_use_correct_default_timestamp_if_not_set() { + + MockGraphStatementBuilder builder = new MockGraphStatementBuilder(); + assertThat(builder.build().getTimestamp()).isEqualTo(Long.MIN_VALUE); + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/insights/ExecutionProfileMockUtil.java b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/ExecutionProfileMockUtil.java index 10b319b1228..de8a4693d5e 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/insights/ExecutionProfileMockUtil.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/ExecutionProfileMockUtil.java @@ -51,6 +51,7 @@ static DriverExecutionProfile mockDefaultExecutionProfile() { when(profile.getDuration(REQUEST_TIMEOUT)).thenReturn(Duration.ofMillis(100)); when(profile.getString(LOAD_BALANCING_POLICY_CLASS)).thenReturn("LoadBalancingPolicyImpl"); when(profile.isDefined(LOAD_BALANCING_FILTER_CLASS)).thenReturn(true); + when(profile.isDefined(LOAD_BALANCING_LOCAL_DATACENTER)).thenReturn(true); when(profile.getString(LOAD_BALANCING_LOCAL_DATACENTER)).thenReturn(DEFAULT_LOCAL_DC); when(profile.isDefined(SPECULATIVE_EXECUTION_MAX)).thenReturn(true); when(profile.getInt(SPECULATIVE_EXECUTION_MAX)).thenReturn(SPECEX_MAX_DEFAULT); @@ -83,6 +84,12 @@ static DriverExecutionProfile mockNonDefaultLoadBalancingExecutionProfile() { return profile; } + static DriverExecutionProfile mockUndefinedLocalDcExecutionProfile() { + DriverExecutionProfile profile = mockNonDefaultLoadBalancingExecutionProfile(); + when(profile.isDefined(LOAD_BALANCING_LOCAL_DATACENTER)).thenReturn(false); + return profile; + } + static DriverExecutionProfile mockNonDefaultSpeculativeExecutionInfo() { DriverExecutionProfile profile = mockDefaultExecutionProfile(); when(profile.getString(SPECULATIVE_EXECUTION_POLICY_CLASS)) diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/insights/ExecutionProfilesInfoFinderTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/ExecutionProfilesInfoFinderTest.java index ef39e19367c..5bf10cd3466 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/insights/ExecutionProfilesInfoFinderTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/ExecutionProfilesInfoFinderTest.java @@ -25,6 +25,7 @@ import static com.datastax.dse.driver.internal.core.insights.ExecutionProfileMockUtil.mockNonDefaultRequestTimeoutExecutionProfile; import static com.datastax.dse.driver.internal.core.insights.ExecutionProfileMockUtil.mockNonDefaultSerialConsistency; import static com.datastax.dse.driver.internal.core.insights.ExecutionProfileMockUtil.mockNonDefaultSpeculativeExecutionInfo; +import static com.datastax.dse.driver.internal.core.insights.ExecutionProfileMockUtil.mockUndefinedLocalDcExecutionProfile; import static com.datastax.dse.driver.internal.core.insights.PackageUtil.DEFAULT_LOAD_BALANCING_PACKAGE; import static com.datastax.dse.driver.internal.core.insights.PackageUtil.DEFAULT_SPECULATIVE_EXECUTION_PACKAGE; import static org.assertj.core.api.Assertions.assertThat; @@ -143,6 +144,19 @@ public static Object[][] executionProfileProvider() { null, null) }, + { + mockUndefinedLocalDcExecutionProfile(), + new SpecificExecutionProfile( + null, + new LoadBalancingInfo( + "NonDefaultLoadBalancing", + ImmutableMap.of("filterFunction", true), + DEFAULT_LOAD_BALANCING_PACKAGE), + null, + null, + null, + null) + }, { mockNonDefaultSpeculativeExecutionInfo(), new SpecificExecutionProfile( diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderAlternateIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderAlternateIT.java index 5aad5e2d7df..3e3641ca2d3 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderAlternateIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderAlternateIT.java @@ -25,17 +25,29 @@ import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; import org.junit.ClassRule; import org.junit.Test; +import org.junit.runner.RunWith; @DseRequirement(min = "5.0", description = "Required for DseAuthenticator") +@RunWith(DataProviderRunner.class) public class DseGssApiAuthProviderAlternateIT { @ClassRule public static EmbeddedAdsRule ads = new EmbeddedAdsRule(true); + @DataProvider + public static Object[][] saslSystemProperties() { + return new Object[][] {{"dse.sasl.service"}, {"dse.sasl.protocol"}}; + } + @Test + @UseDataProvider("saslSystemProperties") public void - should_authenticate_using_kerberos_with_keytab_and_alternate_service_principal_using_system_property() { - System.setProperty("dse.sasl.service", "alternate"); + should_authenticate_using_kerberos_with_keytab_and_alternate_service_principal_using_system_property( + String saslSystemProperty) { + System.setProperty(saslSystemProperty, "alternate"); try (DseSession session = SessionUtils.newSession( ads.getCcm(), @@ -59,7 +71,7 @@ public class DseGssApiAuthProviderAlternateIT { Row row = session.execute("select * from system.local").one(); assertThat(row).isNotNull(); } finally { - System.clearProperty("dse.sasl.service"); + System.clearProperty(saslSystemProperty); } } diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRetryIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRetryIT.java new file mode 100644 index 00000000000..d8337323a53 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRetryIT.java @@ -0,0 +1,202 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.dse.driver.api.core.cql.reactive; + +import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.rows; +import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.unavailable; +import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.when; +import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.Assert.fail; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.verify; +import static org.mockito.internal.verification.VerificationModeFactory.times; + +import com.codahale.metrics.Metric; +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.dse.driver.api.testinfra.session.DseSessionRule; +import com.datastax.dse.driver.api.testinfra.session.DseSessionRuleBuilder; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.context.DriverContext; +import com.datastax.oss.driver.api.core.loadbalancing.LoadBalancingPolicy; +import com.datastax.oss.driver.api.core.loadbalancing.NodeDistance; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.metrics.DefaultNodeMetric; +import com.datastax.oss.driver.api.core.metrics.Metrics; +import com.datastax.oss.driver.api.core.servererrors.UnavailableException; +import com.datastax.oss.driver.api.core.session.Request; +import com.datastax.oss.driver.api.core.session.Session; +import com.datastax.oss.driver.api.testinfra.loadbalancing.NodeComparator; +import com.datastax.oss.driver.api.testinfra.session.SessionUtils; +import com.datastax.oss.driver.api.testinfra.simulacron.SimulacronRule; +import com.datastax.oss.driver.categories.ParallelizableTests; +import com.datastax.oss.driver.core.retry.PerProfileRetryPolicyIT.NoRetryPolicy; +import com.datastax.oss.simulacron.common.cluster.ClusterSpec; +import com.datastax.oss.simulacron.common.codec.ConsistencyLevel; +import com.datastax.oss.simulacron.server.BoundCluster; +import com.google.common.collect.Iterables; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import io.reactivex.Flowable; +import java.util.ArrayDeque; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Queue; +import java.util.TreeSet; +import java.util.UUID; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +/** Small test to validate the application-level retry behavior explained in the manual. */ +@Category(ParallelizableTests.class) +public class ReactiveRetryIT { + + private static final SimulacronRule SIMULACRON_RULE = + new SimulacronRule(ClusterSpec.builder().withNodes(3)); + + private static final DseSessionRule SESSION_RULE = + new DseSessionRuleBuilder(SIMULACRON_RULE) + .withConfigLoader( + SessionUtils.configLoaderBuilder() + .withBoolean(DefaultDriverOption.REQUEST_DEFAULT_IDEMPOTENCE, true) + .withClass( + DefaultDriverOption.LOAD_BALANCING_POLICY_CLASS, + CyclingLoadBalancingPolicy.class) + .withClass(DefaultDriverOption.RETRY_POLICY_CLASS, NoRetryPolicy.class) + .withStringList( + DefaultDriverOption.METRICS_NODE_ENABLED, + Collections.singletonList("errors.request.unavailables")) + .build()) + .build(); + + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(SIMULACRON_RULE).around(SESSION_RULE); + + private static final String QUERY_STRING = "select * from foo"; + + private List nodes; + + @Before + public void clearPrimes() { + SIMULACRON_RULE.cluster().clearLogs(); + SIMULACRON_RULE.cluster().clearPrimes(true); + } + + @Before + public void createNodesList() { + nodes = new ArrayList<>(SESSION_RULE.session().getMetadata().getNodes().values()); + nodes.sort(NodeComparator.INSTANCE); + } + + @Test + public void should_retry_at_application_level() { + // Given + DseSession session = spy(SESSION_RULE.session()); + BoundCluster cluster = SIMULACRON_RULE.cluster(); + cluster.node(0).prime(when(QUERY_STRING).then(unavailable(ConsistencyLevel.ONE, 1, 0))); + cluster.node(1).prime(when(QUERY_STRING).then(unavailable(ConsistencyLevel.ONE, 1, 0))); + cluster.node(2).prime(when(QUERY_STRING).then(rows().row("col1", "Yay!"))); + + // When + ReactiveRow row = + Flowable.defer(() -> session.executeReactive(QUERY_STRING)) + .retry( + (retry, error) -> { + assertThat(error).isInstanceOf(UnavailableException.class); + UnavailableException ue = (UnavailableException) error; + Node coordinator = ue.getCoordinator(); + if (retry == 1) { + assertCoordinator(0, coordinator); + return true; + } else if (retry == 2) { + assertCoordinator(1, coordinator); + return true; + } else { + fail("Unexpected retry attempt"); + return false; + } + }) + .blockingLast(); + + // Then + assertThat(row.getString(0)).isEqualTo("Yay!"); + verify(session, times(3)).executeReactive(QUERY_STRING); + assertUnavailableMetric(0, 1L); + assertUnavailableMetric(1, 1L); + assertUnavailableMetric(2, 0L); + } + + private void assertCoordinator(int expectedNodeIndex, Node actual) { + Node expected = nodes.get(expectedNodeIndex); + assertThat(actual).isSameAs(expected); + } + + private void assertUnavailableMetric(int nodeIndex, long expectedUnavailableCount) { + Metrics metrics = SESSION_RULE.session().getMetrics().orElseThrow(AssertionError::new); + Node node = nodes.get(nodeIndex); + Optional expectedMetric = metrics.getNodeMetric(node, DefaultNodeMetric.UNAVAILABLES); + assertThat(expectedMetric) + .isPresent() + .hasValueSatisfying( + metric -> assertThat(metric).extracting("count").isEqualTo(expectedUnavailableCount)); + } + + public static class CyclingLoadBalancingPolicy implements LoadBalancingPolicy { + + private final TreeSet nodes = new TreeSet<>(NodeComparator.INSTANCE); + private volatile Iterator iterator = Iterables.cycle(nodes).iterator(); + + @SuppressWarnings("unused") + public CyclingLoadBalancingPolicy(DriverContext context, String profileName) { + // constructor needed for loading via config. + } + + @Override + public void init(@NonNull Map nodes, @NonNull DistanceReporter distanceReporter) { + this.nodes.addAll(nodes.values()); + this.nodes.forEach(n -> distanceReporter.setDistance(n, NodeDistance.LOCAL)); + iterator = Iterables.cycle(this.nodes).iterator(); + } + + @NonNull + @Override + public Queue newQueryPlan(@Nullable Request request, @Nullable Session session) { + return new ArrayDeque<>(Collections.singleton(iterator.next())); + } + + @Override + public void onAdd(@NonNull Node node) {} + + @Override + public void onUp(@NonNull Node node) {} + + @Override + public void onDown(@NonNull Node node) {} + + @Override + public void onRemove(@NonNull Node node) {} + + @Override + public void close() {} + } +} From adc4aeb62d9c133fd596967305141344bbfd5441 Mon Sep 17 00:00:00 2001 From: olim7t Date: Fri, 1 Nov 2019 10:59:49 -0700 Subject: [PATCH 208/979] Merge schema parser factories Propagate the control node in SchemaRows in order to have a single factory that can select the correct parser. If the control node is a DSE node, then schema metadata objects can be cast to their DSE counterparts. --- .../core/context/DseDriverContext.java | 7 --- .../parsing/DseSchemaParserFactory.java | 37 ------------ .../parsing/DefaultSchemaParserFactory.java | 5 +- .../queries/Cassandra21SchemaQueries.java | 4 +- .../queries/Cassandra22SchemaQueries.java | 4 +- .../queries/Cassandra3SchemaQueries.java | 4 +- .../queries/Cassandra4SchemaQueries.java | 4 +- .../queries/CassandraSchemaQueries.java | 9 +-- .../schema/queries/CassandraSchemaRows.java | 57 ++++++++++++++++--- .../queries/DefaultSchemaQueriesFactory.java | 14 ++--- .../metadata/schema/queries/SchemaRows.java | 6 ++ .../schema/parsing/SchemaParserTest.java | 2 +- .../schema/parsing/SchemaParserTestBase.java | 12 ++++ .../schema/parsing/TableParserTest.java | 12 ++-- .../schema/parsing/ViewParserTest.java | 2 +- .../queries/Cassandra21SchemaQueriesTest.java | 8 ++- .../queries/Cassandra22SchemaQueriesTest.java | 8 ++- .../queries/Cassandra3SchemaQueriesTest.java | 12 ++-- 18 files changed, 121 insertions(+), 86 deletions(-) delete mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseSchemaParserFactory.java diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseDriverContext.java b/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseDriverContext.java index 770a8837f63..2f334fed4c3 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseDriverContext.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseDriverContext.java @@ -24,7 +24,6 @@ import com.datastax.dse.driver.internal.core.cql.reactive.CqlRequestReactiveProcessor; import com.datastax.dse.driver.internal.core.graph.GraphRequestAsyncProcessor; import com.datastax.dse.driver.internal.core.graph.GraphRequestSyncProcessor; -import com.datastax.dse.driver.internal.core.metadata.schema.parsing.DseSchemaParserFactory; import com.datastax.dse.driver.internal.core.metrics.DseDropwizardMetricsFactory; import com.datastax.dse.driver.internal.core.tracker.MultiplexingRequestTracker; import com.datastax.dse.protocol.internal.DseProtocolV1ClientCodecs; @@ -44,7 +43,6 @@ import com.datastax.oss.driver.internal.core.cql.CqlPrepareSyncProcessor; import com.datastax.oss.driver.internal.core.cql.CqlRequestAsyncProcessor; import com.datastax.oss.driver.internal.core.cql.CqlRequestSyncProcessor; -import com.datastax.oss.driver.internal.core.metadata.schema.parsing.SchemaParserFactory; import com.datastax.oss.driver.internal.core.metrics.MetricsFactory; import com.datastax.oss.driver.internal.core.protocol.ByteBufPrimitiveCodec; import com.datastax.oss.driver.internal.core.session.RequestProcessor; @@ -207,11 +205,6 @@ protected RequestProcessorRegistry buildRequestProcessorRegistry() { return new RequestProcessorRegistry(logPrefix, processors.toArray(new RequestProcessor[0])); } - @Override - protected SchemaParserFactory buildSchemaParserFactory() { - return new DseSchemaParserFactory(this); - } - @Override protected MetricsFactory buildMetricsFactory() { return new DseDropwizardMetricsFactory(this); diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseSchemaParserFactory.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseSchemaParserFactory.java deleted file mode 100644 index 1428b8f5873..00000000000 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseSchemaParserFactory.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.datastax.dse.driver.internal.core.metadata.schema.parsing; - -import com.datastax.oss.driver.internal.core.context.InternalDriverContext; -import com.datastax.oss.driver.internal.core.metadata.schema.parsing.SchemaParser; -import com.datastax.oss.driver.internal.core.metadata.schema.parsing.SchemaParserFactory; -import com.datastax.oss.driver.internal.core.metadata.schema.queries.SchemaRows; -import net.jcip.annotations.ThreadSafe; - -@ThreadSafe -public class DseSchemaParserFactory implements SchemaParserFactory { - - private final InternalDriverContext context; - - public DseSchemaParserFactory(InternalDriverContext context) { - this.context = context; - } - - @Override - public SchemaParser newInstance(SchemaRows rows) { - return new DseSchemaParser(rows, context); - } -} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/DefaultSchemaParserFactory.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/DefaultSchemaParserFactory.java index 9a4a5bf148a..29c6356979d 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/DefaultSchemaParserFactory.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/DefaultSchemaParserFactory.java @@ -15,6 +15,8 @@ */ package com.datastax.oss.driver.internal.core.metadata.schema.parsing; +import com.datastax.dse.driver.api.core.metadata.DseNodeProperties; +import com.datastax.dse.driver.internal.core.metadata.schema.parsing.DseSchemaParser; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.internal.core.metadata.schema.queries.SchemaRows; import net.jcip.annotations.ThreadSafe; @@ -30,6 +32,7 @@ public DefaultSchemaParserFactory(InternalDriverContext context) { @Override public SchemaParser newInstance(SchemaRows rows) { - return new CassandraSchemaParser(rows, context); + boolean isDse = rows.getNode().getExtras().containsKey(DseNodeProperties.DSE_VERSION); + return isDse ? new DseSchemaParser(rows, context) : new CassandraSchemaParser(rows, context); } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra21SchemaQueries.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra21SchemaQueries.java index 556c9c58b6b..e17e5e4eeda 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra21SchemaQueries.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra21SchemaQueries.java @@ -17,6 +17,7 @@ import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.metadata.Metadata; +import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.internal.core.channel.DriverChannel; import java.util.Optional; import java.util.concurrent.CompletableFuture; @@ -26,10 +27,11 @@ public class Cassandra21SchemaQueries extends CassandraSchemaQueries { public Cassandra21SchemaQueries( DriverChannel channel, + Node node, CompletableFuture refreshFuture, DriverExecutionProfile config, String logPrefix) { - super(channel, false, refreshFuture, config, logPrefix); + super(channel, node, refreshFuture, config, logPrefix); } @Override diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra22SchemaQueries.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra22SchemaQueries.java index 130599b86e2..3e3076477f5 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra22SchemaQueries.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra22SchemaQueries.java @@ -17,6 +17,7 @@ import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.metadata.Metadata; +import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.internal.core.channel.DriverChannel; import java.util.Optional; import java.util.concurrent.CompletableFuture; @@ -26,10 +27,11 @@ public class Cassandra22SchemaQueries extends CassandraSchemaQueries { public Cassandra22SchemaQueries( DriverChannel channel, + Node node, CompletableFuture refreshFuture, DriverExecutionProfile config, String logPrefix) { - super(channel, false, refreshFuture, config, logPrefix); + super(channel, node, refreshFuture, config, logPrefix); } @Override diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra3SchemaQueries.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra3SchemaQueries.java index c2c97873624..90a0907417f 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra3SchemaQueries.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra3SchemaQueries.java @@ -17,6 +17,7 @@ import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.metadata.Metadata; +import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.internal.core.channel.DriverChannel; import java.util.Optional; import java.util.concurrent.CompletableFuture; @@ -26,10 +27,11 @@ public class Cassandra3SchemaQueries extends CassandraSchemaQueries { public Cassandra3SchemaQueries( DriverChannel channel, + Node node, CompletableFuture refreshFuture, DriverExecutionProfile config, String logPrefix) { - super(channel, true, refreshFuture, config, logPrefix); + super(channel, node, refreshFuture, config, logPrefix); } @Override diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra4SchemaQueries.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra4SchemaQueries.java index 641a97119b9..d0d989fa567 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra4SchemaQueries.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra4SchemaQueries.java @@ -17,6 +17,7 @@ import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.metadata.Metadata; +import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.internal.core.channel.DriverChannel; import java.util.Optional; import java.util.concurrent.CompletableFuture; @@ -26,10 +27,11 @@ public class Cassandra4SchemaQueries extends Cassandra3SchemaQueries { public Cassandra4SchemaQueries( DriverChannel channel, + Node node, CompletableFuture refreshFuture, DriverExecutionProfile config, String logPrefix) { - super(channel, refreshFuture, config, logPrefix); + super(channel, node, refreshFuture, config, logPrefix); } @Override diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/CassandraSchemaQueries.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/CassandraSchemaQueries.java index f0ec6211e15..be3241979f6 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/CassandraSchemaQueries.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/CassandraSchemaQueries.java @@ -18,6 +18,7 @@ import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.metadata.Metadata; +import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.internal.core.adminrequest.AdminRequestHandler; import com.datastax.oss.driver.internal.core.adminrequest.AdminResult; import com.datastax.oss.driver.internal.core.adminrequest.AdminRow; @@ -44,7 +45,7 @@ public abstract class CassandraSchemaQueries implements SchemaQueries { private final DriverChannel channel; private final EventExecutor adminExecutor; - private final boolean isCassandraV3; + private final Node node; private final String logPrefix; private final Duration timeout; private final int pageSize; @@ -62,13 +63,13 @@ public abstract class CassandraSchemaQueries implements SchemaQueries { protected CassandraSchemaQueries( DriverChannel channel, - boolean isCassandraV3, + Node node, CompletableFuture refreshFuture, DriverExecutionProfile config, String logPrefix) { this.channel = channel; this.adminExecutor = channel.eventLoop(); - this.isCassandraV3 = isCassandraV3; + this.node = node; this.refreshFuture = refreshFuture; this.logPrefix = logPrefix; this.timeout = config.getDuration(DefaultDriverOption.METADATA_SCHEMA_REQUEST_TIMEOUT); @@ -129,7 +130,7 @@ public CompletionStage execute() { private void executeOnAdminExecutor() { assert adminExecutor.inEventLoop(); - schemaRowsBuilder = new CassandraSchemaRows.Builder(isCassandraV3, refreshFuture, logPrefix); + schemaRowsBuilder = new CassandraSchemaRows.Builder(node, refreshFuture, logPrefix); query(selectKeyspacesQuery() + whereClause, schemaRowsBuilder::withKeyspaces); query(selectTypesQuery() + whereClause, schemaRowsBuilder::withTypes); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/CassandraSchemaRows.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/CassandraSchemaRows.java index 49a49764021..cf5fbe9d7f6 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/CassandraSchemaRows.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/CassandraSchemaRows.java @@ -15,8 +15,11 @@ */ package com.datastax.oss.driver.internal.core.metadata.schema.queries; +import com.datastax.dse.driver.api.core.metadata.DseNodeProperties; import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.api.core.metadata.Metadata; +import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.internal.core.adminrequest.AdminRow; import com.datastax.oss.driver.internal.core.metadata.schema.parsing.DataTypeClassNameParser; import com.datastax.oss.driver.internal.core.metadata.schema.parsing.DataTypeCqlNameParser; @@ -26,6 +29,7 @@ import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMultimap; import com.datastax.oss.driver.shaded.guava.common.collect.Multimap; +import edu.umd.cs.findbugs.annotations.NonNull; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -37,6 +41,7 @@ @Immutable public class CassandraSchemaRows implements SchemaRows { + private final Node node; private final DataTypeParser dataTypeParser; private final CompletableFuture refreshFuture; private final List keyspaces; @@ -52,8 +57,9 @@ public class CassandraSchemaRows implements SchemaRows { private final Map> indexes; private CassandraSchemaRows( - boolean isCassandraV3, + Node node, CompletableFuture refreshFuture, + DataTypeParser dataTypeParser, List keyspaces, List virtualKeyspaces, Multimap tables, @@ -65,8 +71,8 @@ private CassandraSchemaRows( Multimap types, Multimap functions, Multimap aggregates) { - this.dataTypeParser = - isCassandraV3 ? new DataTypeCqlNameParser() : new DataTypeClassNameParser(); + this.node = node; + this.dataTypeParser = dataTypeParser; this.refreshFuture = refreshFuture; this.keyspaces = keyspaces; this.virtualKeyspaces = virtualKeyspaces; @@ -81,6 +87,12 @@ private CassandraSchemaRows( this.aggregates = aggregates; } + @NonNull + @Override + public Node getNode() { + return node; + } + @Override public DataTypeParser dataTypeParser() { return dataTypeParser; @@ -149,8 +161,9 @@ public Map> indexes() { public static class Builder { private static final Logger LOG = LoggerFactory.getLogger(Builder.class); - private final boolean isCassandraV3; + private final Node node; private final CompletableFuture refreshFuture; + private final DataTypeParser dataTypeParser; private final String tableNameColumn; private final String logPrefix; private final ImmutableList.Builder keyspacesBuilder = ImmutableList.builder(); @@ -174,12 +187,37 @@ public static class Builder { private final Map> indexesBuilders = new LinkedHashMap<>(); - public Builder( - boolean isCassandraV3, CompletableFuture refreshFuture, String logPrefix) { - this.isCassandraV3 = isCassandraV3; + public Builder(Node node, CompletableFuture refreshFuture, String logPrefix) { + this.node = node; this.refreshFuture = refreshFuture; this.logPrefix = logPrefix; - this.tableNameColumn = isCassandraV3 ? "table_name" : "columnfamily_name"; + if (isCassandraV3OrAbove(node)) { + this.tableNameColumn = "table_name"; + this.dataTypeParser = new DataTypeCqlNameParser(); + } else { + this.tableNameColumn = "columnfamily_name"; + this.dataTypeParser = new DataTypeClassNameParser(); + } + } + + private static boolean isCassandraV3OrAbove(Node node) { + // We already did those checks in DefaultSchemaQueriesFactory. + // We could pass along booleans (isCassandraV3, isDse...), but passing the whole Node is + // better for maintainability, in case we need to do more checks in downstream components in + // the future. + Version dseVersion = (Version) node.getExtras().get(DseNodeProperties.DSE_VERSION); + if (dseVersion != null) { + dseVersion = dseVersion.nextStable(); + return dseVersion.compareTo(Version.V5_0_0) >= 0; + } else { + Version cassandraVersion = node.getCassandraVersion(); + if (cassandraVersion == null) { + cassandraVersion = Version.V3_0_0; + } else { + cassandraVersion = cassandraVersion.nextStable(); + } + return cassandraVersion.compareTo(Version.V3_0_0) >= 0; + } } public Builder withKeyspaces(Iterable rows) { @@ -284,8 +322,9 @@ private void putByKeyspaceAndTable( public CassandraSchemaRows build() { return new CassandraSchemaRows( - isCassandraV3, + node, refreshFuture, + dataTypeParser, keyspacesBuilder.build(), virtualKeyspacesBuilder.build(), tablesBuilder.build(), diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/DefaultSchemaQueriesFactory.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/DefaultSchemaQueriesFactory.java index 090dbb71b60..89ec59f3408 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/DefaultSchemaQueriesFactory.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/DefaultSchemaQueriesFactory.java @@ -73,13 +73,13 @@ protected SchemaQueries newInstance( "[{}] Sending schema queries to {} with DSE version {}", logPrefix, node, dseVersion); // 4.8 is the oldest version supported, which uses C* 2.1 schema if (dseVersion.compareTo(Version.V5_0_0) < 0) { - return new Cassandra21SchemaQueries(channel, refreshFuture, config, logPrefix); + return new Cassandra21SchemaQueries(channel, node, refreshFuture, config, logPrefix); } else if (dseVersion.compareTo(Version.V6_7_0) < 0) { // 5.0 - 6.7 uses C* 3.0 schema - return new Cassandra3SchemaQueries(channel, refreshFuture, config, logPrefix); + return new Cassandra3SchemaQueries(channel, node, refreshFuture, config, logPrefix); } else { // 6.7+ uses C* 4.0 schema - return new Cassandra4SchemaQueries(channel, refreshFuture, config, logPrefix); + return new Cassandra4SchemaQueries(channel, node, refreshFuture, config, logPrefix); } } else { Version cassandraVersion = node.getCassandraVersion(); @@ -96,13 +96,13 @@ protected SchemaQueries newInstance( LOG.debug( "[{}] Sending schema queries to {} with version {}", logPrefix, node, cassandraVersion); if (cassandraVersion.compareTo(Version.V2_2_0) < 0) { - return new Cassandra21SchemaQueries(channel, refreshFuture, config, logPrefix); + return new Cassandra21SchemaQueries(channel, node, refreshFuture, config, logPrefix); } else if (cassandraVersion.compareTo(Version.V3_0_0) < 0) { - return new Cassandra22SchemaQueries(channel, refreshFuture, config, logPrefix); + return new Cassandra22SchemaQueries(channel, node, refreshFuture, config, logPrefix); } else if (cassandraVersion.compareTo(Version.V4_0_0) < 0) { - return new Cassandra3SchemaQueries(channel, refreshFuture, config, logPrefix); + return new Cassandra3SchemaQueries(channel, node, refreshFuture, config, logPrefix); } else { - return new Cassandra4SchemaQueries(channel, refreshFuture, config, logPrefix); + return new Cassandra4SchemaQueries(channel, node, refreshFuture, config, logPrefix); } } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/SchemaRows.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/SchemaRows.java index b8242517241..c96976dcb8f 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/SchemaRows.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/SchemaRows.java @@ -17,9 +17,11 @@ import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.metadata.Metadata; +import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.internal.core.adminrequest.AdminRow; import com.datastax.oss.driver.internal.core.metadata.schema.parsing.DataTypeParser; import com.datastax.oss.driver.shaded.guava.common.collect.Multimap; +import edu.umd.cs.findbugs.annotations.NonNull; import java.util.List; import java.util.Map; import java.util.concurrent.CompletableFuture; @@ -32,6 +34,10 @@ */ public interface SchemaRows { + /** The node that was used to retrieve the schema information. */ + @NonNull + Node getNode(); + List keyspaces(); List virtualKeyspaces(); diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/SchemaParserTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/SchemaParserTest.java index 7109910705f..4c770e57046 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/SchemaParserTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/SchemaParserTest.java @@ -137,7 +137,7 @@ public void should_parse_multiple_keyspaces() { } private MetadataRefresh parse(Consumer builderConfig) { - CassandraSchemaRows.Builder builder = new CassandraSchemaRows.Builder(true, null, "test"); + CassandraSchemaRows.Builder builder = new CassandraSchemaRows.Builder(NODE_3_0, null, "test"); builderConfig.accept(builder); SchemaRows rows = builder.build(); return new CassandraSchemaParser(rows, context).parse(); diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/SchemaParserTestBase.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/SchemaParserTestBase.java index 9adce5643d9..009a2db614f 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/SchemaParserTestBase.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/SchemaParserTestBase.java @@ -20,12 +20,15 @@ import static org.mockito.Mockito.when; import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.Version; +import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.internal.core.adminrequest.AdminRow; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.internal.core.metadata.DefaultMetadata; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; import java.nio.ByteBuffer; +import java.util.Collections; import java.util.List; import org.junit.runner.RunWith; import org.mockito.Mock; @@ -34,6 +37,8 @@ @RunWith(MockitoJUnitRunner.Silent.class) public abstract class SchemaParserTestBase { + protected static final Node NODE_2_2 = mockNode(Version.V2_2_0); + protected static final Node NODE_3_0 = mockNode(Version.V3_0_0); protected static final CqlIdentifier KEYSPACE_ID = CqlIdentifier.fromInternal("ks"); @Mock protected DefaultMetadata currentMetadata; @Mock protected InternalDriverContext context; @@ -291,4 +296,11 @@ protected static AdminRow mockLegacyKeyspaceRow(String keyspaceName) { return row; } + + private static Node mockNode(Version version) { + Node node = mock(Node.class); + when(node.getExtras()).thenReturn(Collections.emptyMap()); + when(node.getCassandraVersion()).thenReturn(version); + return node; + } } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/TableParserTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/TableParserTest.java index 3fab5fc11b1..3b081d33cbf 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/TableParserTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/TableParserTest.java @@ -19,6 +19,7 @@ import static org.mockito.Mockito.when; import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.metadata.schema.ClusteringOrder; import com.datastax.oss.driver.api.core.metadata.schema.ColumnMetadata; import com.datastax.oss.driver.api.core.metadata.schema.IndexKind; @@ -184,21 +185,18 @@ private void checkTable(TableMetadata table) { } private SchemaRows legacyRows(AdminRow tableRow, Iterable columnRows) { - return rows(tableRow, columnRows, null, false); + return rows(tableRow, columnRows, null, NODE_2_2); } private SchemaRows modernRows( AdminRow tableRow, Iterable columnRows, Iterable indexesRows) { - return rows(tableRow, columnRows, indexesRows, true); + return rows(tableRow, columnRows, indexesRows, NODE_3_0); } private SchemaRows rows( - AdminRow tableRow, - Iterable columnRows, - Iterable indexesRows, - boolean isCassandraV3) { + AdminRow tableRow, Iterable columnRows, Iterable indexesRows, Node node) { CassandraSchemaRows.Builder builder = - new CassandraSchemaRows.Builder(isCassandraV3, null, "test") + new CassandraSchemaRows.Builder(node, null, "test") .withTables(ImmutableList.of(tableRow)) .withColumns(columnRows); if (indexesRows != null) { diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/ViewParserTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/ViewParserTest.java index 6ba458bebfb..4e92d9253bc 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/ViewParserTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/ViewParserTest.java @@ -86,7 +86,7 @@ public void should_parse_view() { } private SchemaRows rows(AdminRow viewRow, Iterable columnRows) { - return new CassandraSchemaRows.Builder(true, null, "test") + return new CassandraSchemaRows.Builder(NODE_3_0, null, "test") .withViews(ImmutableList.of(viewRow)) .withColumns(columnRows) .build(); diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra21SchemaQueriesTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra21SchemaQueriesTest.java index 9fbfa0e7349..8f6e87bd714 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra21SchemaQueriesTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra21SchemaQueriesTest.java @@ -22,6 +22,7 @@ import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.metadata.Metadata; +import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.internal.core.adminrequest.AdminResult; import com.datastax.oss.driver.internal.core.channel.DriverChannel; import java.util.Collections; @@ -42,7 +43,7 @@ public void should_query() { .thenReturn(Collections.emptyList()); SchemaQueriesWithMockedChannel queries = - new SchemaQueriesWithMockedChannel(driverChannel, null, config, "test"); + new SchemaQueriesWithMockedChannel(driverChannel, node, null, config, "test"); CompletionStage result = queries.execute(); @@ -74,6 +75,8 @@ public void should_query() { assertThatStage(result) .isSuccess( rows -> { + assertThat(rows.getNode()).isEqualTo(node); + // Keyspace assertThat(rows.keyspaces()).hasSize(2); assertThat(rows.keyspaces().get(0).getString("keyspace_name")).isEqualTo("ks1"); @@ -117,10 +120,11 @@ static class SchemaQueriesWithMockedChannel extends Cassandra21SchemaQueries { SchemaQueriesWithMockedChannel( DriverChannel channel, + Node node, CompletableFuture refreshFuture, DriverExecutionProfile config, String logPrefix) { - super(channel, refreshFuture, config, logPrefix); + super(channel, node, refreshFuture, config, logPrefix); } @Override diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra22SchemaQueriesTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra22SchemaQueriesTest.java index 7fd37d2541a..46ba1448dbc 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra22SchemaQueriesTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra22SchemaQueriesTest.java @@ -22,6 +22,7 @@ import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.metadata.Metadata; +import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.internal.core.adminrequest.AdminResult; import com.datastax.oss.driver.internal.core.channel.DriverChannel; import java.util.Collections; @@ -42,7 +43,7 @@ public void should_query() { .thenReturn(Collections.emptyList()); SchemaQueriesWithMockedChannel queries = - new SchemaQueriesWithMockedChannel(driverChannel, null, config, "test"); + new SchemaQueriesWithMockedChannel(driverChannel, node, null, config, "test"); CompletionStage result = queries.execute(); @@ -84,6 +85,8 @@ public void should_query() { assertThatStage(result) .isSuccess( rows -> { + assertThat(rows.getNode()).isEqualTo(node); + // Keyspace assertThat(rows.keyspaces()).hasSize(2); assertThat(rows.keyspaces().get(0).getString("keyspace_name")).isEqualTo("ks1"); @@ -138,10 +141,11 @@ static class SchemaQueriesWithMockedChannel extends Cassandra22SchemaQueries { SchemaQueriesWithMockedChannel( DriverChannel channel, + Node node, CompletableFuture refreshFuture, DriverExecutionProfile config, String logPrefix) { - super(channel, refreshFuture, config, logPrefix); + super(channel, node, refreshFuture, config, logPrefix); } @Override diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra3SchemaQueriesTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra3SchemaQueriesTest.java index 0e708238647..88b45481747 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra3SchemaQueriesTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra3SchemaQueriesTest.java @@ -22,6 +22,7 @@ import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.metadata.Metadata; +import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.internal.core.adminrequest.AdminResult; import com.datastax.oss.driver.internal.core.channel.DriverChannel; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; @@ -62,7 +63,7 @@ public void should_query_with_keyspace_filter() { private void should_query_with_where_clause(String whereClause) { SchemaQueriesWithMockedChannel queries = - new SchemaQueriesWithMockedChannel(driverChannel, null, config, "test"); + new SchemaQueriesWithMockedChannel(driverChannel, node, null, config, "test"); CompletionStage result = queries.execute(); // Keyspace @@ -113,6 +114,8 @@ private void should_query_with_where_clause(String whereClause) { assertThatStage(result) .isSuccess( rows -> { + assertThat(rows.getNode()).isEqualTo(node); + // Keyspace assertThat(rows.keyspaces()).hasSize(2); assertThat(rows.keyspaces().get(0).getString("keyspace_name")).isEqualTo("ks1"); @@ -178,7 +181,7 @@ private void should_query_with_where_clause(String whereClause) { @Test public void should_query_with_paging() { SchemaQueriesWithMockedChannel queries = - new SchemaQueriesWithMockedChannel(driverChannel, null, config, "test"); + new SchemaQueriesWithMockedChannel(driverChannel, node, null, config, "test"); CompletionStage result = queries.execute(); // Keyspace @@ -242,7 +245,7 @@ public void should_query_with_paging() { @Test public void should_ignore_malformed_rows() { SchemaQueriesWithMockedChannel queries = - new SchemaQueriesWithMockedChannel(driverChannel, null, config, "test"); + new SchemaQueriesWithMockedChannel(driverChannel, node, null, config, "test"); CompletionStage result = queries.execute(); // Keyspace @@ -347,10 +350,11 @@ static class SchemaQueriesWithMockedChannel extends Cassandra3SchemaQueries { SchemaQueriesWithMockedChannel( DriverChannel channel, + Node node, CompletableFuture refreshFuture, DriverExecutionProfile config, String logPrefix) { - super(channel, refreshFuture, config, logPrefix); + super(channel, node, refreshFuture, config, logPrefix); } @Override From ee5ad885f3915cc854073cf4a5717597f73dee94 Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 4 Nov 2019 13:11:21 -0800 Subject: [PATCH 209/979] Merge protocol version registries - always register all protocol versions in the frame codec - merge ProtocolVersionRegistry implementations into a single one: negotiation will always start at DSE_V2, and downgrade to the OSS versions if the server is not DSE. - remove a couple of unused methods on ProtocolVersionRegistry --- .../core/context/DseDriverContext.java | 27 --- .../oss/driver/api/core/ProtocolVersion.java | 3 + .../CassandraProtocolVersionRegistry.java | 199 ------------------ .../core/DefaultProtocolVersionRegistry.java} | 189 +++++++++++------ .../core/ProtocolVersionRegistry.java | 10 - .../core/context/DefaultDriverContext.java | 19 +- ...tocolVersionRegistryHighestCommonTest.java | 107 ---------- .../CassandraProtocolVersionRegistryTest.java | 121 ----------- .../DefaultProtocolVersionRegistryTest.java} | 52 +++-- .../core/channel/ProtocolInitHandlerTest.java | 4 +- .../internal/core/cql/StatementSizeTest.java | 4 +- 11 files changed, 176 insertions(+), 559 deletions(-) delete mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/CassandraProtocolVersionRegistry.java rename core/src/main/java/com/datastax/{dse/driver/internal/core/DseProtocolVersionRegistry.java => oss/driver/internal/core/DefaultProtocolVersionRegistry.java} (50%) delete mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/CassandraProtocolVersionRegistryHighestCommonTest.java delete mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/CassandraProtocolVersionRegistryTest.java rename core/src/test/java/com/datastax/{dse/driver/internal/core/DseProtocolVersionRegistryTest.java => oss/driver/internal/core/DefaultProtocolVersionRegistryTest.java} (71%) diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseDriverContext.java b/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseDriverContext.java index 2f334fed4c3..68823249647 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseDriverContext.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseDriverContext.java @@ -16,7 +16,6 @@ package com.datastax.dse.driver.internal.core.context; import com.datastax.dse.driver.api.core.session.DseProgrammaticArguments; -import com.datastax.dse.driver.internal.core.DseProtocolVersionRegistry; import com.datastax.dse.driver.internal.core.InsightsClientLifecycleListener; import com.datastax.dse.driver.internal.core.cql.continuous.ContinuousCqlRequestAsyncProcessor; import com.datastax.dse.driver.internal.core.cql.continuous.ContinuousCqlRequestSyncProcessor; @@ -26,9 +25,6 @@ import com.datastax.dse.driver.internal.core.graph.GraphRequestSyncProcessor; import com.datastax.dse.driver.internal.core.metrics.DseDropwizardMetricsFactory; import com.datastax.dse.driver.internal.core.tracker.MultiplexingRequestTracker; -import com.datastax.dse.protocol.internal.DseProtocolV1ClientCodecs; -import com.datastax.dse.protocol.internal.DseProtocolV2ClientCodecs; -import com.datastax.dse.protocol.internal.ProtocolV4ClientCodecsForDse; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.metadata.NodeStateListener; @@ -36,7 +32,6 @@ import com.datastax.oss.driver.api.core.session.ProgrammaticArguments; import com.datastax.oss.driver.api.core.tracker.RequestTracker; import com.datastax.oss.driver.api.core.type.codec.TypeCodec; -import com.datastax.oss.driver.internal.core.ProtocolVersionRegistry; import com.datastax.oss.driver.internal.core.context.DefaultDriverContext; import com.datastax.oss.driver.internal.core.context.LifecycleListener; import com.datastax.oss.driver.internal.core.cql.CqlPrepareAsyncProcessor; @@ -44,15 +39,10 @@ import com.datastax.oss.driver.internal.core.cql.CqlRequestAsyncProcessor; import com.datastax.oss.driver.internal.core.cql.CqlRequestSyncProcessor; import com.datastax.oss.driver.internal.core.metrics.MetricsFactory; -import com.datastax.oss.driver.internal.core.protocol.ByteBufPrimitiveCodec; import com.datastax.oss.driver.internal.core.session.RequestProcessor; import com.datastax.oss.driver.internal.core.session.RequestProcessorRegistry; import com.datastax.oss.driver.internal.core.util.Loggers; -import com.datastax.oss.protocol.internal.FrameCodec; -import com.datastax.oss.protocol.internal.ProtocolV3ClientCodecs; -import com.datastax.oss.protocol.internal.ProtocolV5ClientCodecs; import edu.umd.cs.findbugs.annotations.NonNull; -import io.netty.buffer.ByteBuf; import java.util.ArrayList; import java.util.Collections; import java.util.List; @@ -126,23 +116,6 @@ public DseDriverContext( .build()); } - @Override - protected ProtocolVersionRegistry buildProtocolVersionRegistry() { - return new DseProtocolVersionRegistry(getSessionName()); - } - - @Override - protected FrameCodec buildFrameCodec() { - return new FrameCodec<>( - new ByteBufPrimitiveCodec(getNettyOptions().allocator()), - getCompressor(), - new ProtocolV3ClientCodecs(), - new ProtocolV4ClientCodecsForDse(), - new ProtocolV5ClientCodecs(), - new DseProtocolV1ClientCodecs(), - new DseProtocolV2ClientCodecs()); - } - @Override protected RequestProcessorRegistry buildRequestProcessorRegistry() { String logPrefix = getSessionName(); diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/ProtocolVersion.java b/core/src/main/java/com/datastax/oss/driver/api/core/ProtocolVersion.java index cbc061432d3..a633bcf892f 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/ProtocolVersion.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/ProtocolVersion.java @@ -15,6 +15,7 @@ */ package com.datastax.oss.driver.api.core; +import com.datastax.dse.driver.api.core.DseProtocolVersion; import com.datastax.oss.driver.api.core.detach.Detachable; import edu.umd.cs.findbugs.annotations.NonNull; @@ -30,6 +31,8 @@ public interface ProtocolVersion { ProtocolVersion V3 = DefaultProtocolVersion.V3; ProtocolVersion V4 = DefaultProtocolVersion.V4; ProtocolVersion V5 = DefaultProtocolVersion.V5; + ProtocolVersion DSE_V1 = DseProtocolVersion.DSE_V1; + ProtocolVersion DSE_V2 = DseProtocolVersion.DSE_V2; /** The default version used for {@link Detachable detached} objects. */ // Implementation note: we can't use the ProtocolVersionRegistry here, this has to be a diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/CassandraProtocolVersionRegistry.java b/core/src/main/java/com/datastax/oss/driver/internal/core/CassandraProtocolVersionRegistry.java deleted file mode 100644 index d84b13cb72d..00000000000 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/CassandraProtocolVersionRegistry.java +++ /dev/null @@ -1,199 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.datastax.oss.driver.internal.core; - -import com.datastax.oss.driver.api.core.DefaultProtocolVersion; -import com.datastax.oss.driver.api.core.ProtocolVersion; -import com.datastax.oss.driver.api.core.UnsupportedProtocolVersionException; -import com.datastax.oss.driver.api.core.Version; -import com.datastax.oss.driver.api.core.metadata.Node; -import com.datastax.oss.driver.internal.core.context.DefaultDriverContext; -import com.datastax.oss.driver.shaded.guava.common.base.Preconditions; -import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; -import java.util.Collection; -import java.util.Map; -import java.util.NavigableMap; -import java.util.Optional; -import java.util.SortedSet; -import java.util.TreeMap; -import java.util.TreeSet; -import net.jcip.annotations.ThreadSafe; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Built-in implementation of the protocol version registry, that supports the protocol versions of - * Apache Cassandra. - * - *

      This can be overridden with a custom implementation by subclassing {@link - * DefaultDriverContext}. - * - * @see DefaultProtocolVersion - */ -@ThreadSafe -public class CassandraProtocolVersionRegistry implements ProtocolVersionRegistry { - - private static final Logger LOG = LoggerFactory.getLogger(CassandraProtocolVersionRegistry.class); - private static final ImmutableList values = - ImmutableList.builder().add(DefaultProtocolVersion.values()).build(); - - private final String logPrefix; - private final NavigableMap versionsByCode; - - public CassandraProtocolVersionRegistry(String logPrefix) { - this(logPrefix, DefaultProtocolVersion.values()); - } - - protected CassandraProtocolVersionRegistry(String logPrefix, ProtocolVersion[]... versionRanges) { - this.logPrefix = logPrefix; - this.versionsByCode = byCode(versionRanges); - } - - @Override - public ProtocolVersion fromCode(int code) { - ProtocolVersion protocolVersion = versionsByCode.get(code); - if (protocolVersion == null) { - throw new IllegalArgumentException("Unknown protocol version code: " + code); - } - return protocolVersion; - } - - @Override - public ProtocolVersion fromName(String name) { - for (ProtocolVersion version : versionsByCode.values()) { - if (version.name().equals(name)) { - return version; - } - } - throw new IllegalArgumentException("Unknown protocol version name: " + name); - } - - @Override - public ProtocolVersion highestNonBeta() { - ProtocolVersion highest = versionsByCode.lastEntry().getValue(); - if (!highest.isBeta()) { - return highest; - } else { - return downgrade(highest) - .orElseThrow(() -> new AssertionError("There should be at least one non-beta version")); - } - } - - @Override - public Optional downgrade(ProtocolVersion version) { - Map.Entry previousEntry = - versionsByCode.lowerEntry(version.getCode()); - if (previousEntry == null) { - return Optional.empty(); - } else { - ProtocolVersion previousVersion = previousEntry.getValue(); - // Beta versions are skipped during negotiation - return previousVersion.isBeta() ? downgrade(previousVersion) : Optional.of(previousVersion); - } - } - - @Override - public ProtocolVersion highestCommon(Collection nodes) { - if (nodes == null || nodes.isEmpty()) { - throw new IllegalArgumentException("Expected at least one node"); - } - - SortedSet candidates = new TreeSet<>(); - - for (DefaultProtocolVersion version : DefaultProtocolVersion.values()) { - // Beta versions always need to be forced, and we only call this method if the version - // wasn't forced - if (!version.isBeta()) { - candidates.add(version); - } - } - - // The C*<=>protocol mapping is hardcoded in the code below, I don't see a need to be more - // sophisticated right now. - for (Node node : nodes) { - Version version = node.getCassandraVersion(); - if (version == null) { - LOG.warn( - "[{}] Node {} reports null Cassandra version, " - + "ignoring it from optimal protocol version computation", - logPrefix, - node.getEndPoint()); - continue; - } - version = version.nextStable(); - if (version.compareTo(Version.V2_1_0) < 0) { - throw new UnsupportedProtocolVersionException( - node.getEndPoint(), - String.format( - "Node %s reports Cassandra version %s, " - + "but the driver only supports 2.1.0 and above", - node.getEndPoint(), version), - ImmutableList.of(DefaultProtocolVersion.V3, DefaultProtocolVersion.V4)); - } - - LOG.debug( - "[{}] Node {} reports Cassandra version {}", logPrefix, node.getEndPoint(), version); - if (version.compareTo(Version.V2_2_0) < 0 && candidates.remove(DefaultProtocolVersion.V4)) { - LOG.debug("[{}] Excluding protocol V4", logPrefix); - } - } - - if (candidates.isEmpty()) { - // Note: with the current algorithm, this never happens - throw new UnsupportedProtocolVersionException( - null, - String.format( - "Could not determine a common protocol version, " - + "enable DEBUG logs for '%s' for more details", - LOG.getName()), - ImmutableList.of(DefaultProtocolVersion.V3, DefaultProtocolVersion.V4)); - } else { - return candidates.last(); - } - } - - @Override - public boolean supports(ProtocolVersion version, ProtocolFeature feature) { - if (DefaultProtocolFeature.UNSET_BOUND_VALUES.equals(feature)) { - return version.getCode() >= 4; - } else if (DefaultProtocolFeature.PER_REQUEST_KEYSPACE.equals(feature)) { - return version.getCode() >= 5; - } else { - throw new IllegalArgumentException("Unhandled protocol feature: " + feature); - } - } - - @Override - public ImmutableList getValues() { - return values; - } - - private NavigableMap byCode(ProtocolVersion[][] versionRanges) { - NavigableMap map = new TreeMap<>(); - for (ProtocolVersion[] versionRange : versionRanges) { - for (ProtocolVersion version : versionRange) { - ProtocolVersion previous = map.put(version.getCode(), version); - Preconditions.checkArgument( - previous == null, - "Duplicate version code: %s in %s and %s", - version.getCode(), - previous, - version); - } - } - return map; - } -} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/DseProtocolVersionRegistry.java b/core/src/main/java/com/datastax/oss/driver/internal/core/DefaultProtocolVersionRegistry.java similarity index 50% rename from core/src/main/java/com/datastax/dse/driver/internal/core/DseProtocolVersionRegistry.java rename to core/src/main/java/com/datastax/oss/driver/internal/core/DefaultProtocolVersionRegistry.java index 3f311262e4c..edf24cef73b 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/DseProtocolVersionRegistry.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/DefaultProtocolVersionRegistry.java @@ -13,63 +13,122 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.dse.driver.internal.core; +package com.datastax.oss.driver.internal.core; import com.datastax.dse.driver.api.core.DseProtocolVersion; import com.datastax.dse.driver.api.core.metadata.DseNodeProperties; +import com.datastax.dse.driver.internal.core.DseProtocolFeature; import com.datastax.oss.driver.api.core.DefaultProtocolVersion; import com.datastax.oss.driver.api.core.ProtocolVersion; import com.datastax.oss.driver.api.core.UnsupportedProtocolVersionException; import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.api.core.metadata.Node; -import com.datastax.oss.driver.internal.core.CassandraProtocolVersionRegistry; -import com.datastax.oss.driver.internal.core.DefaultProtocolFeature; -import com.datastax.oss.driver.internal.core.ProtocolFeature; import com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import java.util.Collection; -import java.util.Collections; -import java.util.HashSet; +import java.util.LinkedHashSet; import java.util.List; +import java.util.Objects; +import java.util.Optional; import java.util.Set; import net.jcip.annotations.ThreadSafe; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +/** + * Built-in implementation of the protocol version registry, supports all Cassandra and DSE + * versions. + */ @ThreadSafe -public class DseProtocolVersionRegistry extends CassandraProtocolVersionRegistry { +public class DefaultProtocolVersionRegistry implements ProtocolVersionRegistry { + + private static final Logger LOG = LoggerFactory.getLogger(DefaultProtocolVersionRegistry.class); + private static final List allVersions = + ImmutableList.builder() + .add(DefaultProtocolVersion.values()) + .add(DseProtocolVersion.values()) + .build(); + + @VisibleForTesting + static final Version DSE_4_7_0 = Objects.requireNonNull(Version.parse("4.7.0")); - private static final Logger LOG = LoggerFactory.getLogger(DseProtocolVersionRegistry.class); - @VisibleForTesting static final Version DSE_4_7_0 = Version.parse("4.7.0"); - @VisibleForTesting static final Version DSE_5_0_0 = Version.parse("5.0.0"); - @VisibleForTesting static final Version DSE_5_1_0 = Version.parse("5.1.0"); - @VisibleForTesting static final Version DSE_6_0_0 = Version.parse("6.0.0"); + @VisibleForTesting + static final Version DSE_5_0_0 = Objects.requireNonNull(Version.parse("5.0.0")); + + @VisibleForTesting + static final Version DSE_5_1_0 = Objects.requireNonNull(Version.parse("5.1.0")); + + @VisibleForTesting + static final Version DSE_6_0_0 = Objects.requireNonNull(Version.parse("6.0.0")); private final String logPrefix; - public DseProtocolVersionRegistry(String logPrefix) { - super(logPrefix, DefaultProtocolVersion.values(), DseProtocolVersion.values()); + public DefaultProtocolVersionRegistry(String logPrefix) { this.logPrefix = logPrefix; } + @Override + public ProtocolVersion fromName(String name) { + try { + return DefaultProtocolVersion.valueOf(name); + } catch (IllegalArgumentException noOssVersion) { + try { + return DseProtocolVersion.valueOf(name); + } catch (IllegalArgumentException noDseVersion) { + throw new IllegalArgumentException("Unknown protocol version name: " + name); + } + } + } + + @Override + public ProtocolVersion highestNonBeta() { + ProtocolVersion highest = allVersions.get(allVersions.size() - 1); + if (!highest.isBeta()) { + return highest; + } else { + return downgrade(highest) + .orElseThrow(() -> new AssertionError("There should be at least one non-beta version")); + } + } + + @Override + public Optional downgrade(ProtocolVersion version) { + int index = allVersions.indexOf(version); + if (index < 0) { + // This method is called with a value obtained from fromName, so this should never happen + throw new AssertionError(version + " is not a known version"); + } else if (index == 0) { + return Optional.empty(); + } else { + ProtocolVersion previousVersion = allVersions.get(index - 1); + // Beta versions are skipped during negotiation + return previousVersion.isBeta() ? downgrade(previousVersion) : Optional.of(previousVersion); + } + } + @Override public ProtocolVersion highestCommon(Collection nodes) { if (nodes == null || nodes.isEmpty()) { throw new IllegalArgumentException("Expected at least one node"); } - // Sadly we can't trust the Cassandra version reported by DSE to infer the maximum OSS protocol - // supported. For example DSE 6 reports release_version 4.0-SNAPSHOT, but only supports OSS - // protocol v4 (while Cassandra 4 will support v5). So there's no way to reuse the OSS algorithm - // from the parent class, simply redo everything: - - Set candidates = new HashSet<>(); - candidates.addAll(allNonBeta(DefaultProtocolVersion.values())); - candidates.addAll(allNonBeta(DseProtocolVersion.values())); + // Start with all non-beta versions (beta versions are always forced, and we don't call this + // method if the version was forced). + Set candidates = new LinkedHashSet<>(); + for (ProtocolVersion version : allVersions) { + if (!version.isBeta()) { + candidates.add(version); + } + } + // Keep an unfiltered copy in case we need to throw an exception below + ImmutableList initialCandidates = ImmutableList.copyOf(candidates); + // For each node, remove the versions it doesn't support for (Node node : nodes) { - List toEliminate = Collections.emptyList(); + // We can't trust the Cassandra version reported by DSE to infer the maximum OSS protocol + // supported. For example DSE 6 reports release_version 4.0-SNAPSHOT, but only supports OSS + // protocol v4 (while Cassandra 4 will support v5). So we treat DSE separately. Version dseVersion = (Version) node.getExtras().get(DseNodeProperties.DSE_VERSION); if (dseVersion != null) { LOG.debug("[{}] Node {} reports DSE version {}", logPrefix, node.getEndPoint(), dseVersion); @@ -81,20 +140,21 @@ public ProtocolVersion highestCommon(Collection nodes) { "Node %s reports DSE version %s, " + "but the driver only supports 4.7.0 and above", node.getEndPoint(), dseVersion), - triedVersionsForHighestCommon()); + initialCandidates); } else if (dseVersion.compareTo(DSE_5_0_0) < 0) { - // DSE 4.7 or 4.8 (Cassandra 2.1): OSS protocol v3 - toEliminate = - ImmutableList.of( - DefaultProtocolVersion.V4, DseProtocolVersion.DSE_V1, DseProtocolVersion.DSE_V2); + // DSE 4.7.x, 4.8.x + removeHigherThan(DefaultProtocolVersion.V3, null, candidates); } else if (dseVersion.compareTo(DSE_5_1_0) < 0) { - // DSE 5.0 (Cassandra 3): OSS protocol v4 - toEliminate = ImmutableList.of(DseProtocolVersion.DSE_V1, DseProtocolVersion.DSE_V2); + // DSE 5.0 + removeHigherThan(DefaultProtocolVersion.V4, null, candidates); } else if (dseVersion.compareTo(DSE_6_0_0) < 0) { - // DSE 5.1: DSE protocol v1 or OSS protocol v4 - toEliminate = ImmutableList.of(DseProtocolVersion.DSE_V2); - } // else DSE 6: DSE protocol v2 or OSS protocol v4 - } else { + // DSE 5.1 + removeHigherThan(DefaultProtocolVersion.V4, DseProtocolVersion.DSE_V1, candidates); + } else { + // DSE 6 + removeHigherThan(DefaultProtocolVersion.V4, DseProtocolVersion.DSE_V2, candidates); + } + } else { // not DSE Version cassandraVersion = node.getCassandraVersion(); if (cassandraVersion == null) { LOG.warn( @@ -105,6 +165,11 @@ public ProtocolVersion highestCommon(Collection nodes) { continue; } cassandraVersion = cassandraVersion.nextStable(); + LOG.debug( + "[{}] Node {} reports Cassandra version {}", + logPrefix, + node.getEndPoint(), + cassandraVersion); if (cassandraVersion.compareTo(Version.V2_1_0) < 0) { throw new UnsupportedProtocolVersionException( node.getEndPoint(), @@ -113,30 +178,17 @@ public ProtocolVersion highestCommon(Collection nodes) { + "but the driver only supports 2.1.0 and above", node.getEndPoint(), cassandraVersion), ImmutableList.of(DefaultProtocolVersion.V3, DefaultProtocolVersion.V4)); - } - - LOG.debug( - "[{}] Node {} reports Cassandra version {}", - logPrefix, - node.getEndPoint(), - cassandraVersion); - - if (cassandraVersion.compareTo(Version.V2_2_0) < 0) { - toEliminate = - ImmutableList.of( - DefaultProtocolVersion.V4, DseProtocolVersion.DSE_V1, DseProtocolVersion.DSE_V2); + } else if (cassandraVersion.compareTo(Version.V2_2_0) < 0) { + // 2.1.0 + removeHigherThan(DefaultProtocolVersion.V3, null, candidates); } else { - toEliminate = ImmutableList.of(DseProtocolVersion.DSE_V1, DseProtocolVersion.DSE_V2); - } - } - - for (ProtocolVersion version : toEliminate) { - if (candidates.remove(version)) { - LOG.debug("[{}] Excluding protocol {}", logPrefix, version); + // 2.2, 3.x + removeHigherThan(DefaultProtocolVersion.V4, null, candidates); } } } + // If we have versions left, return the highest one ProtocolVersion max = null; for (ProtocolVersion candidate : candidates) { if (max == null || max.getCode() < candidate.getCode()) { @@ -150,28 +202,29 @@ public ProtocolVersion highestCommon(Collection nodes) { "Could not determine a common protocol version, " + "enable DEBUG logs for '%s' for more details", LOG.getName()), - triedVersionsForHighestCommon()); + initialCandidates); } else { return max; } } - // Simply all non-beta versions, since this is the set we start from before filtering - private static ImmutableList triedVersionsForHighestCommon() { - return ImmutableList.builder() - .addAll(allNonBeta(DefaultProtocolVersion.values())) - .addAll(allNonBeta(DseProtocolVersion.values())) - .build(); - } - - private static & ProtocolVersion> Collection allNonBeta(T[] versions) { - ImmutableList.Builder result = ImmutableList.builder(); - for (T version : versions) { - if (!version.isBeta()) { - result.add(version); + // Removes all versions strictly higher than the given versions from candidates. A null + // maxDseVersion means "remove all DSE versions". + private void removeHigherThan( + DefaultProtocolVersion maxOssVersion, + DseProtocolVersion maxDseVersion, + Set candidates) { + for (DefaultProtocolVersion ossVersion : DefaultProtocolVersion.values()) { + if (ossVersion.compareTo(maxOssVersion) > 0 && candidates.remove(ossVersion)) { + LOG.debug("[{}] Excluding protocol {}", logPrefix, ossVersion); + } + } + for (DseProtocolVersion dseVersion : DseProtocolVersion.values()) { + if ((maxDseVersion == null || dseVersion.compareTo(maxDseVersion) > 0) + && candidates.remove(dseVersion)) { + LOG.debug("[{}] Excluding protocol {}", logPrefix, dseVersion); } } - return result.build(); } @Override diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/ProtocolVersionRegistry.java b/core/src/main/java/com/datastax/oss/driver/internal/core/ProtocolVersionRegistry.java index 2f3c3b9a972..1cb7ecfe242 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/ProtocolVersionRegistry.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/ProtocolVersionRegistry.java @@ -26,13 +26,6 @@ /** Defines which native protocol versions are supported by a driver instance. */ public interface ProtocolVersionRegistry { - /** - * Look up a version by its {@link ProtocolVersion#getCode()} code}. - * - * @throws IllegalArgumentException if there is no known version with this code. - */ - ProtocolVersion fromCode(int code); - /** * Look up a version by its {@link ProtocolVersion#name() name}. This is used when a version was * forced in the configuration. @@ -69,7 +62,4 @@ public interface ProtocolVersionRegistry { /** Whether a given version supports a given feature. */ boolean supports(ProtocolVersion version, ProtocolFeature feature); - - /** @return all the values known to this driver instance. */ - Iterable getValues(); } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java index 2acc3880c41..0d009ccdb0a 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java @@ -16,6 +16,9 @@ package com.datastax.oss.driver.internal.core.context; import com.datastax.dse.driver.internal.core.tracker.MultiplexingRequestTracker; +import com.datastax.dse.protocol.internal.DseProtocolV1ClientCodecs; +import com.datastax.dse.protocol.internal.DseProtocolV2ClientCodecs; +import com.datastax.dse.protocol.internal.ProtocolV4ClientCodecsForDse; import com.datastax.oss.driver.api.core.ProtocolVersion; import com.datastax.oss.driver.api.core.addresstranslation.AddressTranslator; import com.datastax.oss.driver.api.core.auth.AuthProvider; @@ -38,9 +41,9 @@ import com.datastax.oss.driver.api.core.type.codec.TypeCodec; import com.datastax.oss.driver.api.core.type.codec.registry.CodecRegistry; import com.datastax.oss.driver.api.core.type.codec.registry.MutableCodecRegistry; -import com.datastax.oss.driver.internal.core.CassandraProtocolVersionRegistry; import com.datastax.oss.driver.internal.core.ConsistencyLevelRegistry; import com.datastax.oss.driver.internal.core.DefaultConsistencyLevelRegistry; +import com.datastax.oss.driver.internal.core.DefaultProtocolVersionRegistry; import com.datastax.oss.driver.internal.core.ProtocolVersionRegistry; import com.datastax.oss.driver.internal.core.channel.ChannelFactory; import com.datastax.oss.driver.internal.core.channel.DefaultWriteCoalescer; @@ -79,6 +82,8 @@ import com.datastax.oss.driver.internal.core.util.concurrent.LazyReference; import com.datastax.oss.protocol.internal.Compressor; import com.datastax.oss.protocol.internal.FrameCodec; +import com.datastax.oss.protocol.internal.ProtocolV3ClientCodecs; +import com.datastax.oss.protocol.internal.ProtocolV5ClientCodecs; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import io.netty.buffer.ByteBuf; @@ -386,12 +391,18 @@ protected Compressor buildCompressor() { } protected FrameCodec buildFrameCodec() { - return FrameCodec.defaultClient( - new ByteBufPrimitiveCodec(getNettyOptions().allocator()), getCompressor()); + return new FrameCodec<>( + new ByteBufPrimitiveCodec(getNettyOptions().allocator()), + getCompressor(), + new ProtocolV3ClientCodecs(), + new ProtocolV4ClientCodecsForDse(), + new ProtocolV5ClientCodecs(), + new DseProtocolV1ClientCodecs(), + new DseProtocolV2ClientCodecs()); } protected ProtocolVersionRegistry buildProtocolVersionRegistry() { - return new CassandraProtocolVersionRegistry(getSessionName()); + return new DefaultProtocolVersionRegistry(getSessionName()); } protected ConsistencyLevelRegistry buildConsistencyLevelRegistry() { diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/CassandraProtocolVersionRegistryHighestCommonTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/CassandraProtocolVersionRegistryHighestCommonTest.java deleted file mode 100644 index 19146e6c286..00000000000 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/CassandraProtocolVersionRegistryHighestCommonTest.java +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.datastax.oss.driver.internal.core; - -import static com.datastax.oss.driver.Assertions.assertThat; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import com.datastax.oss.driver.api.core.DefaultProtocolVersion; -import com.datastax.oss.driver.api.core.UnsupportedProtocolVersionException; -import com.datastax.oss.driver.api.core.Version; -import com.datastax.oss.driver.api.core.metadata.Node; -import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; -import java.util.Collection; -import java.util.Collections; -import org.junit.Test; - -/** - * Covers {@link CassandraProtocolVersionRegistry#highestCommon(Collection)} separately, because it - * relies explicitly on {@link DefaultProtocolVersion} as the version implementation. - */ -public class CassandraProtocolVersionRegistryHighestCommonTest { - - private CassandraProtocolVersionRegistry registry = new CassandraProtocolVersionRegistry("test"); - - @Test - public void should_pick_v3_when_at_least_one_node_is_2_1() { - assertThat( - registry.highestCommon( - ImmutableList.of(mockNode("2.2.1"), mockNode("2.1.0"), mockNode("3.1.9")))) - .isEqualTo(DefaultProtocolVersion.V3); - } - - @Test - public void should_pick_v4_when_all_nodes_are_2_2_or_more() { - assertThat( - registry.highestCommon( - ImmutableList.of(mockNode("2.2.0"), mockNode("2.2.1"), mockNode("3.1.9")))) - .isEqualTo(DefaultProtocolVersion.V4); - } - - @Test - public void should_treat_rcs_as_next_stable_versions() { - assertThat( - registry.highestCommon( - ImmutableList.of(mockNode("2.2.1"), mockNode("2.1.0-rc1"), mockNode("3.1.9")))) - .isEqualTo(DefaultProtocolVersion.V3); - assertThat( - registry.highestCommon( - ImmutableList.of(mockNode("2.2.0-rc2"), mockNode("2.2.1"), mockNode("3.1.9")))) - .isEqualTo(DefaultProtocolVersion.V4); - } - - @Test - public void should_skip_nodes_that_report_null_version() { - assertThat( - registry.highestCommon( - ImmutableList.of(mockNode(null), mockNode("2.1.0"), mockNode("3.1.9")))) - .isEqualTo(DefaultProtocolVersion.V3); - - // Edge case: if all do, go with the latest version - assertThat( - registry.highestCommon( - ImmutableList.of(mockNode(null), mockNode(null), mockNode(null)))) - .isEqualTo(DefaultProtocolVersion.V4); - } - - @Test - public void should_use_v4_for_future_cassandra_versions() { - // That might change in the future when some C* versions drop v4 support - assertThat( - registry.highestCommon( - ImmutableList.of(mockNode("3.0.0"), mockNode("12.1.5"), mockNode("98.7.22")))) - .isEqualTo(DefaultProtocolVersion.V4); - } - - @Test(expected = IllegalArgumentException.class) - public void should_fail_if_no_nodes() { - registry.highestCommon(Collections.emptyList()); - } - - private Node mockNode(String cassandraVersion) { - Node node = mock(Node.class); - if (cassandraVersion != null) { - when(node.getCassandraVersion()).thenReturn(Version.parse(cassandraVersion)); - } - return node; - } - - @Test(expected = UnsupportedProtocolVersionException.class) - public void should_fail_if_pre_2_1_node() { - registry.highestCommon(ImmutableList.of(mockNode("3.0.0"), mockNode("2.0.9"))); - } -} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/CassandraProtocolVersionRegistryTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/CassandraProtocolVersionRegistryTest.java deleted file mode 100644 index 0835e1c83ab..00000000000 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/CassandraProtocolVersionRegistryTest.java +++ /dev/null @@ -1,121 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.datastax.oss.driver.internal.core; - -import static com.datastax.oss.driver.Assertions.assertThat; - -import com.datastax.oss.driver.api.core.ProtocolVersion; -import edu.umd.cs.findbugs.annotations.NonNull; -import java.util.Optional; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; - -/** - * Covers the method that are agnostic to the actual {@link ProtocolVersion} implementation (using a - * mock implementation). - */ -public class CassandraProtocolVersionRegistryTest { - - private static ProtocolVersion V3 = new MockProtocolVersion(3, false); - private static ProtocolVersion V4 = new MockProtocolVersion(4, false); - private static ProtocolVersion V5 = new MockProtocolVersion(5, false); - private static ProtocolVersion V5_BETA = new MockProtocolVersion(5, true); - private static ProtocolVersion V10 = new MockProtocolVersion(10, false); - private static ProtocolVersion V11 = new MockProtocolVersion(11, false); - - @Rule public ExpectedException expectedException = ExpectedException.none(); - - @Test - public void should_fail_if_duplicate_version_code() { - expectedException.expect(IllegalArgumentException.class); - expectedException.expectMessage("Duplicate version code: 5 in V5 and V5_BETA"); - new CassandraProtocolVersionRegistry("test", new ProtocolVersion[] {V5, V5_BETA}); - } - - @Test - public void should_find_version_by_name() { - ProtocolVersionRegistry versions = - new CassandraProtocolVersionRegistry("test", new ProtocolVersion[] {V3, V4}); - assertThat(versions.fromName("V3")).isEqualTo(V3); - assertThat(versions.fromName("V4")).isEqualTo(V4); - } - - @Test - public void should_downgrade_if_lower_version_available() { - ProtocolVersionRegistry versions = - new CassandraProtocolVersionRegistry("test", new ProtocolVersion[] {V3, V4}); - Optional downgraded = versions.downgrade(V4); - downgraded.map(version -> assertThat(version).isEqualTo(V3)).orElseThrow(AssertionError::new); - } - - @Test - public void should_not_downgrade_if_no_lower_version() { - ProtocolVersionRegistry versions = - new CassandraProtocolVersionRegistry("test", new ProtocolVersion[] {V3, V4}); - Optional downgraded = versions.downgrade(V3); - assertThat(downgraded.isPresent()).isFalse(); - } - - @Test - public void should_downgrade_across_version_range() { - ProtocolVersionRegistry versions = - new CassandraProtocolVersionRegistry( - "test", new ProtocolVersion[] {V3, V4}, new ProtocolVersion[] {V10, V11}); - Optional downgraded = versions.downgrade(V10); - downgraded.map(version -> assertThat(version).isEqualTo(V4)).orElseThrow(AssertionError::new); - } - - @Test - public void should_downgrade_skipping_beta_version() { - ProtocolVersionRegistry versions = - new CassandraProtocolVersionRegistry( - "test", new ProtocolVersion[] {V4, V5_BETA}, new ProtocolVersion[] {V10, V11}); - Optional downgraded = versions.downgrade(V10); - downgraded.map(version -> assertThat(version).isEqualTo(V4)).orElseThrow(AssertionError::new); - } - - private static class MockProtocolVersion implements ProtocolVersion { - private final int code; - private final boolean beta; - - MockProtocolVersion(int code, boolean beta) { - this.code = code; - this.beta = beta; - } - - @Override - public int getCode() { - return code; - } - - @NonNull - @Override - public String name() { - return "V" + code; - } - - @Override - public boolean isBeta() { - return beta; - } - - @Override - public String toString() { - return name() + (beta ? "_BETA" : ""); - } - } -} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/DseProtocolVersionRegistryTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/DefaultProtocolVersionRegistryTest.java similarity index 71% rename from core/src/test/java/com/datastax/dse/driver/internal/core/DseProtocolVersionRegistryTest.java rename to core/src/test/java/com/datastax/oss/driver/internal/core/DefaultProtocolVersionRegistryTest.java index 4d19f1903aa..9e9fefb5f53 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/DseProtocolVersionRegistryTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/DefaultProtocolVersionRegistryTest.java @@ -13,18 +13,21 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.dse.driver.internal.core; +package com.datastax.oss.driver.internal.core; +import static com.datastax.oss.driver.api.core.ProtocolVersion.V3; +import static com.datastax.oss.driver.api.core.ProtocolVersion.V4; import static org.assertj.core.api.Assertions.assertThat; import com.datastax.dse.driver.api.core.DseProtocolVersion; import com.datastax.dse.driver.api.core.metadata.DseNodeProperties; -import com.datastax.oss.driver.api.core.DefaultProtocolVersion; +import com.datastax.oss.driver.api.core.ProtocolVersion; import com.datastax.oss.driver.api.core.UnsupportedProtocolVersionException; import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import java.util.Optional; import org.junit.Test; import org.mockito.Mockito; @@ -32,26 +35,31 @@ * Note: some tests in this class depend on the set of supported protocol versions, they will need * to be updated as new versions are added or become non-beta. */ -public class DseProtocolVersionRegistryTest { +public class DefaultProtocolVersionRegistryTest { - private DseProtocolVersionRegistry registry = new DseProtocolVersionRegistry("test"); + private DefaultProtocolVersionRegistry registry = new DefaultProtocolVersionRegistry("test"); @Test - public void should_find_version_by_code() { - assertThat(registry.fromCode(4)).isEqualTo(DefaultProtocolVersion.V4); - assertThat(registry.fromCode(65)).isEqualTo(DseProtocolVersion.DSE_V1); + public void should_find_version_by_name() { + assertThat(registry.fromName("V4")).isEqualTo(ProtocolVersion.V4); + assertThat(registry.fromName("DSE_V1")).isEqualTo(DseProtocolVersion.DSE_V1); } @Test - public void should_find_version_by_name() { - assertThat(registry.fromName("V4")).isEqualTo(DefaultProtocolVersion.V4); - assertThat(registry.fromName("DSE_V1")).isEqualTo(DseProtocolVersion.DSE_V1); + public void should_downgrade_if_lower_version_available() { + Optional downgraded = registry.downgrade(V4); + downgraded.map(version -> assertThat(version).isEqualTo(V3)).orElseThrow(AssertionError::new); + } + + @Test + public void should_not_downgrade_if_no_lower_version() { + Optional downgraded = registry.downgrade(V3); + assertThat(downgraded.isPresent()).isFalse(); } @Test public void should_downgrade_from_dse_to_oss() { - assertThat(registry.downgrade(DseProtocolVersion.DSE_V1).get()) - .isEqualTo(DefaultProtocolVersion.V4); + assertThat(registry.downgrade(DseProtocolVersion.DSE_V1).get()).isEqualTo(ProtocolVersion.V4); } @Test @@ -71,7 +79,7 @@ public void should_pick_oss_v4_as_highest_common_when_all_nodes_are_dse_5_or_mor assertThat( registry.highestCommon( ImmutableList.of(mockDseNode("5.0"), mockDseNode("5.1"), mockDseNode("6.1")))) - .isEqualTo(DefaultProtocolVersion.V4); + .isEqualTo(ProtocolVersion.V4); } @Test @@ -79,7 +87,7 @@ public void should_pick_oss_v3_as_highest_common_when_all_nodes_are_dse_4_7_or_m assertThat( registry.highestCommon( ImmutableList.of(mockDseNode("4.7"), mockDseNode("5.1"), mockDseNode("6.1")))) - .isEqualTo(DefaultProtocolVersion.V3); + .isEqualTo(ProtocolVersion.V3); } @Test(expected = UnsupportedProtocolVersionException.class) @@ -88,6 +96,12 @@ public void should_fail_to_pick_highest_common_when_one_node_is_dse_4_6() { ImmutableList.of(mockDseNode("4.6"), mockDseNode("5.1"), mockDseNode("6.1"))); } + @Test(expected = UnsupportedProtocolVersionException.class) + public void should_fail_to_pick_highest_common_when_one_node_is_2_0() { + registry.highestCommon( + ImmutableList.of(mockCassandraNode("3.0.0"), mockCassandraNode("2.0.9"))); + } + @Test public void should_pick_oss_v3_as_highest_common_when_one_node_is_cassandra_2_1() { assertThat( @@ -97,7 +111,7 @@ public void should_pick_oss_v3_as_highest_common_when_one_node_is_cassandra_2_1( mockDseNode("6.1"), // oss v4 mockCassandraNode("2.1") // oss v3 ))) - .isEqualTo(DefaultProtocolVersion.V3); + .isEqualTo(ProtocolVersion.V3); } private Node mockCassandraNode(String rawVersion) { @@ -115,13 +129,13 @@ private Node mockDseNode(String rawDseVersion) { .thenReturn(ImmutableMap.of(DseNodeProperties.DSE_VERSION, dseVersion)); Version cassandraVersion; - if (dseVersion.compareTo(DseProtocolVersionRegistry.DSE_6_0_0) >= 0) { + if (dseVersion.compareTo(DefaultProtocolVersionRegistry.DSE_6_0_0) >= 0) { cassandraVersion = Version.parse("4.0"); - } else if (dseVersion.compareTo(DseProtocolVersionRegistry.DSE_5_1_0) >= 0) { + } else if (dseVersion.compareTo(DefaultProtocolVersionRegistry.DSE_5_1_0) >= 0) { cassandraVersion = Version.parse("3.11"); - } else if (dseVersion.compareTo(Version.parse("5.0")) >= 0) { + } else if (dseVersion.compareTo(DefaultProtocolVersionRegistry.DSE_5_0_0) >= 0) { cassandraVersion = Version.parse("3.0"); - } else if (dseVersion.compareTo(DseProtocolVersionRegistry.DSE_4_7_0) >= 0) { + } else if (dseVersion.compareTo(DefaultProtocolVersionRegistry.DSE_4_7_0) >= 0) { cassandraVersion = Version.parse("2.1"); } else { cassandraVersion = Version.parse("2.0"); diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/channel/ProtocolInitHandlerTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/channel/ProtocolInitHandlerTest.java index 5df99bb4200..03974b3911a 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/channel/ProtocolInitHandlerTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/channel/ProtocolInitHandlerTest.java @@ -29,7 +29,7 @@ import com.datastax.oss.driver.api.core.config.DriverConfig; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.metadata.EndPoint; -import com.datastax.oss.driver.internal.core.CassandraProtocolVersionRegistry; +import com.datastax.oss.driver.internal.core.DefaultProtocolVersionRegistry; import com.datastax.oss.driver.internal.core.ProtocolVersionRegistry; import com.datastax.oss.driver.internal.core.TestResponses; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; @@ -73,7 +73,7 @@ public class ProtocolInitHandlerTest extends ChannelHandlerTestBase { @Mock private DriverExecutionProfile defaultProfile; private ProtocolVersionRegistry protocolVersionRegistry = - new CassandraProtocolVersionRegistry("test"); + new DefaultProtocolVersionRegistry("test"); private HeartbeatHandler heartbeatHandler; @Before diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/cql/StatementSizeTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/cql/StatementSizeTest.java index 726704844bf..e1f80bfd61d 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/cql/StatementSizeTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/cql/StatementSizeTest.java @@ -32,7 +32,7 @@ import com.datastax.oss.driver.api.core.detach.AttachmentPoint; import com.datastax.oss.driver.api.core.time.TimestampGenerator; import com.datastax.oss.driver.api.core.type.codec.registry.CodecRegistry; -import com.datastax.oss.driver.internal.core.CassandraProtocolVersionRegistry; +import com.datastax.oss.driver.internal.core.DefaultProtocolVersionRegistry; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.shaded.guava.common.base.Charsets; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; @@ -84,7 +84,7 @@ public void setup() { when(driverContext.getProtocolVersion()).thenReturn(DefaultProtocolVersion.V5); when(driverContext.getCodecRegistry()).thenReturn(CodecRegistry.DEFAULT); when(driverContext.getProtocolVersionRegistry()) - .thenReturn(new CassandraProtocolVersionRegistry(null)); + .thenReturn(new DefaultProtocolVersionRegistry(null)); when(config.getDefaultProfile()).thenReturn(defaultProfile); when(driverContext.getConfig()).thenReturn(config); when(driverContext.getTimestampGenerator()).thenReturn(timestampGenerator); From 3e7d809c5716aa6a9fdb80539d326c0ff513d5cf Mon Sep 17 00:00:00 2001 From: olim7t Date: Tue, 5 Nov 2019 15:28:42 -0800 Subject: [PATCH 210/979] Enable Insights listener on CqlSession --- .../api/core/DseSessionBuilderBase.java | 49 --------- .../core/InsightsClientLifecycleListener.java | 6 +- .../core/context/DseDriverContext.java | 35 ------ .../context/DseStartupOptionsBuilder.java | 104 ------------------ .../insights/ConfigAntiPatternsFinder.java | 6 +- .../core/insights/DataCentersFinder.java | 4 +- .../insights/ExecutionProfilesInfoFinder.java | 5 +- .../core/insights/InsightsClient.java | 27 ++--- .../core/session/ProgrammaticArguments.java | 54 ++++++++- .../api/core/session/SessionBuilder.java | 57 ++++++++++ .../core/context/DefaultDriverContext.java | 56 +++++++++- .../core/context/InternalDriverContext.java | 5 +- .../core/context/LifecycleListener.java | 6 +- .../core/context/StartupOptionsBuilder.java | 71 +++++++++++- .../context/DseStartupOptionsBuilderTest.java | 41 +++---- .../core/insights/InsightsClientTest.java | 16 +-- 16 files changed, 285 insertions(+), 257 deletions(-) delete mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/context/DseStartupOptionsBuilder.java diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/DseSessionBuilderBase.java b/core/src/main/java/com/datastax/dse/driver/api/core/DseSessionBuilderBase.java index 9a221511e9f..7a8cc9ecc36 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/DseSessionBuilderBase.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/DseSessionBuilderBase.java @@ -30,13 +30,11 @@ import com.datastax.oss.driver.api.core.session.SessionBuilder; import com.datastax.oss.driver.api.core.tracker.RequestTracker; import com.datastax.oss.driver.api.core.type.codec.TypeCodec; -import com.datastax.oss.driver.api.core.uuid.Uuids; import com.datastax.oss.driver.internal.core.util.Loggers; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import java.util.List; import java.util.Map; -import java.util.UUID; import java.util.function.Predicate; import net.jcip.annotations.NotThreadSafe; import org.slf4j.Logger; @@ -66,53 +64,6 @@ protected DseSessionBuilderBase() { } } - /** - * A unique identifier for the created session. - * - *

      It will be sent in the {@code STARTUP} protocol message for each new connection established - * by the driver, and may be used by future DSE versions for monitoring purposes. - * - *

      If you don't call this method, the driver will generate an identifier with {@link - * Uuids#random()}. - */ - @NonNull - public SelfT withClientId(@Nullable UUID clientId) { - this.dseProgrammaticArgumentsBuilder.withStartupClientId(clientId); - return self; - } - - /** - * The name of the application using the created session. - * - *

      It will be sent in the {@code STARTUP} protocol message for each new connection established - * by the driver, and may be used by future DSE versions for monitoring purposes. - * - *

      This can also be defined in the driver configuration with the option {@code - * basic.application.name}; if you specify both, this method takes precedence and the - * configuration option will be ignored. - */ - @NonNull - public SelfT withApplicationName(@Nullable String applicationName) { - this.dseProgrammaticArgumentsBuilder.withStartupApplicationName(applicationName); - return self; - } - - /** - * The version of the application using the created session. - * - *

      It will be sent in the {@code STARTUP} protocol message for each new connection established - * by the driver, and may be used by future DSE versions for monitoring purposes. - * - *

      This can also be defined in the driver configuration with the option {@code - * basic.application.version}; if you specify both, this method takes precedence and the - * configuration option will be ignored. - */ - @NonNull - public SelfT withApplicationVersion(@Nullable String applicationVersion) { - this.dseProgrammaticArgumentsBuilder.withStartupApplicationVersion(applicationVersion); - return self; - } - /** * Sets the configuration loader to use. * diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/InsightsClientLifecycleListener.java b/core/src/main/java/com/datastax/dse/driver/internal/core/InsightsClientLifecycleListener.java index e4ddaf5cea4..ddf4cbfee79 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/InsightsClientLifecycleListener.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/InsightsClientLifecycleListener.java @@ -17,20 +17,20 @@ import static com.datastax.dse.driver.api.core.config.DseDriverOption.MONITOR_REPORTING_ENABLED; -import com.datastax.dse.driver.internal.core.context.DseDriverContext; import com.datastax.dse.driver.internal.core.insights.InsightsClient; import com.datastax.dse.driver.internal.core.insights.configuration.InsightsConfiguration; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.internal.core.context.LifecycleListener; public class InsightsClientLifecycleListener implements LifecycleListener { private static final boolean DEFAULT_INSIGHTS_ENABLED = true; private static final long STATUS_EVENT_DELAY_MILLIS = 300000L; - private final DseDriverContext context; + private final InternalDriverContext context; private final StackTraceElement[] initCallStackTrace; private volatile InsightsClient insightsClient; public InsightsClientLifecycleListener( - DseDriverContext context, StackTraceElement[] initCallStackTrace) { + InternalDriverContext context, StackTraceElement[] initCallStackTrace) { this.context = context; this.initCallStackTrace = initCallStackTrace; } diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseDriverContext.java b/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseDriverContext.java index 68823249647..582badd1bd7 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseDriverContext.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseDriverContext.java @@ -16,7 +16,6 @@ package com.datastax.dse.driver.internal.core.context; import com.datastax.dse.driver.api.core.session.DseProgrammaticArguments; -import com.datastax.dse.driver.internal.core.InsightsClientLifecycleListener; import com.datastax.dse.driver.internal.core.cql.continuous.ContinuousCqlRequestAsyncProcessor; import com.datastax.dse.driver.internal.core.cql.continuous.ContinuousCqlRequestSyncProcessor; import com.datastax.dse.driver.internal.core.cql.continuous.reactive.ContinuousCqlRequestReactiveProcessor; @@ -33,7 +32,6 @@ import com.datastax.oss.driver.api.core.tracker.RequestTracker; import com.datastax.oss.driver.api.core.type.codec.TypeCodec; import com.datastax.oss.driver.internal.core.context.DefaultDriverContext; -import com.datastax.oss.driver.internal.core.context.LifecycleListener; import com.datastax.oss.driver.internal.core.cql.CqlPrepareAsyncProcessor; import com.datastax.oss.driver.internal.core.cql.CqlPrepareSyncProcessor; import com.datastax.oss.driver.internal.core.cql.CqlRequestAsyncProcessor; @@ -42,9 +40,7 @@ import com.datastax.oss.driver.internal.core.session.RequestProcessor; import com.datastax.oss.driver.internal.core.session.RequestProcessorRegistry; import com.datastax.oss.driver.internal.core.util.Loggers; -import edu.umd.cs.findbugs.annotations.NonNull; import java.util.ArrayList; -import java.util.Collections; import java.util.List; import java.util.Map; import java.util.UUID; @@ -59,27 +55,11 @@ public class DseDriverContext extends DefaultDriverContext { private static final Logger LOG = LoggerFactory.getLogger(DseDriverContext.class); - private final UUID startupClientId; - private final String startupApplicationName; - private final String startupApplicationVersion; - private final List listeners; - public DseDriverContext( DriverConfigLoader configLoader, ProgrammaticArguments programmaticArguments, DseProgrammaticArguments dseProgrammaticArguments) { super(configLoader, programmaticArguments); - this.startupClientId = dseProgrammaticArguments.getStartupClientId(); - this.startupApplicationName = dseProgrammaticArguments.getStartupApplicationName(); - this.startupApplicationVersion = dseProgrammaticArguments.getStartupApplicationVersion(); - StackTraceElement[] stackTrace = {}; - try { - stackTrace = Thread.currentThread().getStackTrace(); - } catch (Exception ex) { - // ignore and use empty - } - this.listeners = - Collections.singletonList(new InsightsClientLifecycleListener(this, stackTrace)); } /** * @deprecated this constructor only exists for backward compatibility. Please use {@link @@ -183,15 +163,6 @@ protected MetricsFactory buildMetricsFactory() { return new DseDropwizardMetricsFactory(this); } - @Override - protected Map buildStartupOptions() { - return new DseStartupOptionsBuilder(this) - .withClientId(startupClientId) - .withApplicationName(startupApplicationName) - .withApplicationVersion(startupApplicationVersion) - .build(); - } - @Override protected RequestTracker buildRequestTracker(RequestTracker requestTrackerFromBuilder) { RequestTracker requestTrackerFromConfig = super.buildRequestTracker(requestTrackerFromBuilder); @@ -203,10 +174,4 @@ protected RequestTracker buildRequestTracker(RequestTracker requestTrackerFromBu return multiplexingRequestTracker; } } - - @NonNull - @Override - public List getLifecycleListeners() { - return listeners; - } } diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseStartupOptionsBuilder.java b/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseStartupOptionsBuilder.java deleted file mode 100644 index 5ee2a1d93f8..00000000000 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseStartupOptionsBuilder.java +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.datastax.dse.driver.internal.core.context; - -import com.datastax.dse.driver.api.core.config.DseDriverOption; -import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; -import com.datastax.oss.driver.api.core.uuid.Uuids; -import com.datastax.oss.driver.internal.core.context.InternalDriverContext; -import com.datastax.oss.driver.internal.core.context.StartupOptionsBuilder; -import com.datastax.oss.protocol.internal.util.collection.NullAllowingImmutableMap; -import edu.umd.cs.findbugs.annotations.Nullable; -import java.util.Map; -import java.util.UUID; - -public class DseStartupOptionsBuilder extends StartupOptionsBuilder { - - public static final String APPLICATION_NAME_KEY = "APPLICATION_NAME"; - public static final String APPLICATION_VERSION_KEY = "APPLICATION_VERSION"; - public static final String CLIENT_ID_KEY = "CLIENT_ID"; - - private UUID clientId; - private String applicationName; - private String applicationVersion; - - public DseStartupOptionsBuilder(InternalDriverContext context) { - super(context); - } - - /** - * Sets the client ID to be sent in the Startup message options. - * - *

      If this method is not invoked, or the id passed in is null, a random {@link UUID} will be - * generated and used by default. - */ - public DseStartupOptionsBuilder withClientId(@Nullable UUID clientId) { - this.clientId = clientId; - return this; - } - - /** - * Sets the client application name to be sent in the Startup message options. - * - *

      If this method is not invoked, or the name passed in is null, no application name option - * will be sent in the startup message options. - */ - public DseStartupOptionsBuilder withApplicationName(@Nullable String applicationName) { - this.applicationName = applicationName; - return this; - } - - /** - * Sets the client application version to be sent in the Startup message options. - * - *

      If this method is not invoked, or the name passed in is null, no application version option - * will be sent in the startup message options. - */ - public DseStartupOptionsBuilder withApplicationVersion(@Nullable String applicationVersion) { - this.applicationVersion = applicationVersion; - return this; - } - - @Override - public Map build() { - - DriverExecutionProfile config = context.getConfig().getDefaultProfile(); - - // Fall back to generation / config if no programmatic values provided: - if (clientId == null) { - clientId = Uuids.random(); - } - if (applicationName == null) { - applicationName = config.getString(DseDriverOption.APPLICATION_NAME, null); - } - if (applicationVersion == null) { - applicationVersion = config.getString(DseDriverOption.APPLICATION_VERSION, null); - } - - NullAllowingImmutableMap.Builder builder = - NullAllowingImmutableMap.builder().putAll(super.build()); - - builder.put(CLIENT_ID_KEY, clientId.toString()); - if (applicationName != null) { - builder.put(APPLICATION_NAME_KEY, applicationName); - } - if (applicationVersion != null) { - builder.put(APPLICATION_VERSION_KEY, applicationVersion); - } - - return builder.build(); - } -} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/ConfigAntiPatternsFinder.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/ConfigAntiPatternsFinder.java index c1e4b5a21e3..88f467270bb 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/ConfigAntiPatternsFinder.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/ConfigAntiPatternsFinder.java @@ -18,19 +18,19 @@ import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.SSL_ENGINE_FACTORY_CLASS; import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.SSL_HOSTNAME_VALIDATION; -import com.datastax.dse.driver.internal.core.context.DseDriverContext; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import java.util.HashMap; import java.util.Map; class ConfigAntiPatternsFinder { - Map findAntiPatterns(DseDriverContext driverContext) { + Map findAntiPatterns(InternalDriverContext driverContext) { Map antiPatterns = new HashMap<>(); findSslAntiPattern(driverContext, antiPatterns); return antiPatterns; } private void findSslAntiPattern( - DseDriverContext driverContext, Map antiPatterns) { + InternalDriverContext driverContext, Map antiPatterns) { boolean isSslDefined = driverContext.getConfig().getDefaultProfile().isDefined(SSL_ENGINE_FACTORY_CLASS); boolean certValidation = diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/DataCentersFinder.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/DataCentersFinder.java index ac63338a1cb..c335a5639ff 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/DataCentersFinder.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/DataCentersFinder.java @@ -17,10 +17,10 @@ import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.CONNECTION_POOL_REMOTE_SIZE; -import com.datastax.dse.driver.internal.core.context.DseDriverContext; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.loadbalancing.NodeDistance; import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting; import java.util.Collection; import java.util.HashSet; @@ -28,7 +28,7 @@ class DataCentersFinder { - Set getDataCenters(DseDriverContext driverContext) { + Set getDataCenters(InternalDriverContext driverContext) { return getDataCenters( driverContext.getMetadataManager().getMetadata().getNodes().values(), driverContext.getConfig().getDefaultProfile()); diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/ExecutionProfilesInfoFinder.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/ExecutionProfilesInfoFinder.java index b31fec9c960..a255b5b0de0 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/ExecutionProfilesInfoFinder.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/ExecutionProfilesInfoFinder.java @@ -17,13 +17,13 @@ import static com.datastax.dse.driver.api.core.config.DseDriverOption.GRAPH_TRAVERSAL_SOURCE; -import com.datastax.dse.driver.internal.core.context.DseDriverContext; import com.datastax.dse.driver.internal.core.insights.PackageUtil.ClassSettingDetails; import com.datastax.dse.driver.internal.core.insights.schema.LoadBalancingInfo; import com.datastax.dse.driver.internal.core.insights.schema.SpecificExecutionProfile; import com.datastax.dse.driver.internal.core.insights.schema.SpeculativeExecutionInfo; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.Map; @@ -31,7 +31,8 @@ import java.util.stream.Collectors; class ExecutionProfilesInfoFinder { - Map getExecutionProfilesInfo(DseDriverContext driverContext) { + Map getExecutionProfilesInfo( + InternalDriverContext driverContext) { SpecificExecutionProfile defaultProfile = mapToSpecificProfile(driverContext.getConfig().getDefaultProfile()); diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/InsightsClient.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/InsightsClient.java index 9e3a2506359..9ef176ffc6d 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/InsightsClient.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/InsightsClient.java @@ -15,9 +15,6 @@ */ package com.datastax.dse.driver.internal.core.insights; -import static com.datastax.dse.driver.internal.core.context.DseStartupOptionsBuilder.APPLICATION_NAME_KEY; -import static com.datastax.dse.driver.internal.core.context.DseStartupOptionsBuilder.APPLICATION_VERSION_KEY; -import static com.datastax.dse.driver.internal.core.context.DseStartupOptionsBuilder.CLIENT_ID_KEY; import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.AUTH_PROVIDER_CLASS; import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.CONNECTION_POOL_LOCAL_SIZE; import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.CONNECTION_POOL_REMOTE_SIZE; @@ -25,8 +22,6 @@ import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.PROTOCOL_COMPRESSION; import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.SSL_ENGINE_FACTORY_CLASS; import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.SSL_HOSTNAME_VALIDATION; -import static com.datastax.oss.driver.internal.core.context.StartupOptionsBuilder.DRIVER_NAME_KEY; -import static com.datastax.oss.driver.internal.core.context.StartupOptionsBuilder.DRIVER_VERSION_KEY; import com.datastax.dse.driver.api.core.DseProtocolVersion; import com.datastax.dse.driver.api.core.DseSessionBuilder; @@ -49,6 +44,8 @@ import com.datastax.oss.driver.api.core.type.DataTypes; import com.datastax.oss.driver.api.core.type.codec.TypeCodec; import com.datastax.oss.driver.internal.core.adminrequest.AdminRequestHandler; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.context.StartupOptionsBuilder; import com.datastax.oss.driver.internal.core.control.ControlConnection; import com.datastax.oss.driver.internal.core.pool.ChannelPool; import com.datastax.oss.driver.internal.core.util.concurrent.CompletableFutures; @@ -98,7 +95,7 @@ public class InsightsClient { private final InsightsConfiguration insightsConfiguration; private final AtomicInteger numberOfStatusEventErrors = new AtomicInteger(); - private final DseDriverContext driverContext; + private final InternalDriverContext driverContext; private final Supplier timestampSupplier; private final PlatformInfoFinder platformInfoFinder; private final ReconnectionPolicyInfoFinder reconnectionPolicyInfoInfoFinder; @@ -111,11 +108,11 @@ public class InsightsClient { public static InsightsClient createInsightsClient( InsightsConfiguration insightsConfiguration, - DseDriverContext dseDriverContext, + InternalDriverContext driverContext, StackTraceElement[] initCallStackTrace) { DataCentersFinder dataCentersFinder = new DataCentersFinder(); return new InsightsClient( - dseDriverContext, + driverContext, () -> new Date().getTime(), insightsConfiguration, new PlatformInfoFinder(), @@ -127,7 +124,7 @@ public static InsightsClient createInsightsClient( } InsightsClient( - DseDriverContext driverContext, + InternalDriverContext driverContext, Supplier timestampSupplier, InsightsConfiguration insightsConfiguration, PlatformInfoFinder platformInfoFinder, @@ -376,23 +373,23 @@ static Map> getResolvedContactPoints(Set } private String getDriverVersion(Map startupOptions) { - return startupOptions.get(DRIVER_VERSION_KEY); + return startupOptions.get(StartupOptionsBuilder.DRIVER_VERSION_KEY); } private String getDriverName(Map startupOptions) { - return startupOptions.get(DRIVER_NAME_KEY); + return startupOptions.get(StartupOptionsBuilder.DRIVER_NAME_KEY); } private String getClientId(Map startupOptions) { - return startupOptions.get(CLIENT_ID_KEY); + return startupOptions.get(StartupOptionsBuilder.CLIENT_ID_KEY); } private boolean isApplicationNameGenerated(Map startupOptions) { - return startupOptions.get(APPLICATION_NAME_KEY) == null; + return startupOptions.get(StartupOptionsBuilder.APPLICATION_NAME_KEY) == null; } private String getApplicationVersion(Map startupOptions) { - String applicationVersion = startupOptions.get(APPLICATION_VERSION_KEY); + String applicationVersion = startupOptions.get(StartupOptionsBuilder.APPLICATION_VERSION_KEY); if (applicationVersion == null) { return ""; } @@ -400,7 +397,7 @@ private String getApplicationVersion(Map startupOptions) { } private String getApplicationName(Map startupOptions) { - String applicationName = startupOptions.get(APPLICATION_NAME_KEY); + String applicationName = startupOptions.get(StartupOptionsBuilder.APPLICATION_NAME_KEY); if (applicationName == null || applicationName.isEmpty()) { return getClusterCreateCaller(initCallStackTrace); } diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/session/ProgrammaticArguments.java b/core/src/main/java/com/datastax/oss/driver/api/core/session/ProgrammaticArguments.java index 03ab781ca42..19c91ec4140 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/session/ProgrammaticArguments.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/session/ProgrammaticArguments.java @@ -29,6 +29,7 @@ import java.net.InetSocketAddress; import java.util.List; import java.util.Map; +import java.util.UUID; import java.util.function.Predicate; /** @@ -54,6 +55,9 @@ public static Builder builder() { private final AuthProvider authProvider; private final SslEngineFactory sslEngineFactory; private final InetSocketAddress cloudProxyAddress; + private final UUID startupClientId; + private final String startupApplicationName; + private final String startupApplicationVersion; private ProgrammaticArguments( @NonNull List> typeCodecs, @@ -65,7 +69,11 @@ private ProgrammaticArguments( @Nullable ClassLoader classLoader, @Nullable AuthProvider authProvider, @Nullable SslEngineFactory sslEngineFactory, - @Nullable InetSocketAddress cloudProxyAddress) { + @Nullable InetSocketAddress cloudProxyAddress, + @Nullable UUID startupClientId, + @Nullable String startupApplicationName, + @Nullable String startupApplicationVersion) { + this.typeCodecs = typeCodecs; this.nodeStateListener = nodeStateListener; this.schemaChangeListener = schemaChangeListener; @@ -76,6 +84,9 @@ private ProgrammaticArguments( this.authProvider = authProvider; this.sslEngineFactory = sslEngineFactory; this.cloudProxyAddress = cloudProxyAddress; + this.startupClientId = startupClientId; + this.startupApplicationName = startupApplicationName; + this.startupApplicationVersion = startupApplicationVersion; } @NonNull @@ -128,6 +139,21 @@ public InetSocketAddress getCloudProxyAddress() { return cloudProxyAddress; } + @Nullable + public UUID getStartupClientId() { + return startupClientId; + } + + @Nullable + public String getStartupApplicationName() { + return startupApplicationName; + } + + @Nullable + public String getStartupApplicationVersion() { + return startupApplicationVersion; + } + public static class Builder { private ImmutableList.Builder> typeCodecsBuilder = ImmutableList.builder(); @@ -141,6 +167,9 @@ public static class Builder { private AuthProvider authProvider; private SslEngineFactory sslEngineFactory; private InetSocketAddress cloudProxyAddress; + private UUID startupClientId; + private String startupApplicationName; + private String startupApplicationVersion; @NonNull public Builder addTypeCodecs(@NonNull TypeCodec... typeCodecs) { @@ -220,6 +249,24 @@ public Builder withSslEngineFactory(@Nullable SslEngineFactory sslEngineFactory) return this; } + @NonNull + public Builder withStartupClientId(@Nullable UUID startupClientId) { + this.startupClientId = startupClientId; + return this; + } + + @NonNull + public Builder withStartupApplicationName(@Nullable String startupApplicationName) { + this.startupApplicationName = startupApplicationName; + return this; + } + + @NonNull + public Builder withStartupApplicationVersion(@Nullable String startupApplicationVersion) { + this.startupApplicationVersion = startupApplicationVersion; + return this; + } + @NonNull public ProgrammaticArguments build() { return new ProgrammaticArguments( @@ -232,7 +279,10 @@ public ProgrammaticArguments build() { classLoader, authProvider, sslEngineFactory, - cloudProxyAddress); + cloudProxyAddress, + startupClientId, + startupApplicationName, + startupApplicationVersion); } } } diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java b/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java index a5776bdf1b5..226ccbe657b 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java @@ -32,6 +32,7 @@ import com.datastax.oss.driver.api.core.ssl.SslEngineFactory; import com.datastax.oss.driver.api.core.tracker.RequestTracker; import com.datastax.oss.driver.api.core.type.codec.TypeCodec; +import com.datastax.oss.driver.api.core.uuid.Uuids; import com.datastax.oss.driver.internal.core.ContactPoints; import com.datastax.oss.driver.internal.core.auth.ProgrammaticPlainTextAuthProvider; import com.datastax.oss.driver.internal.core.config.cloud.CloudConfig; @@ -57,6 +58,7 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.UUID; import java.util.concurrent.Callable; import java.util.concurrent.CompletionStage; import java.util.function.Predicate; @@ -479,6 +481,61 @@ public SelfT withCloudProxyAddress(@Nullable InetSocketAddress cloudProxyAddress return self; } + /** + * A unique identifier for the created session. + * + *

      It will be sent in the {@code STARTUP} protocol message, under the key {@code CLIENT_ID}, + * for each new connection established by the driver. Currently, this information is used by + * Insights monitoring (if the target cluster does not support Insights, the entry will be ignored + * by the server). + * + *

      If you don't call this method, the driver will generate an identifier with {@link + * Uuids#random()}. + */ + @NonNull + public SelfT withClientId(@Nullable UUID clientId) { + this.programmaticArgumentsBuilder.withStartupClientId(clientId); + return self; + } + + /** + * The name of the application using the created session. + * + *

      It will be sent in the {@code STARTUP} protocol message, under the key {@code + * APPLICATION_NAME}, for each new connection established by the driver. Currently, this + * information is used by Insights monitoring (if the target cluster does not support Insights, + * the entry will be ignored by the server). + * + *

      This can also be defined in the driver configuration with the option {@code + * basic.application.name}; if you specify both, this method takes precedence and the + * configuration option will be ignored. If neither is specified, the entry is not included in the + * message. + */ + @NonNull + public SelfT withApplicationName(@Nullable String applicationName) { + this.programmaticArgumentsBuilder.withStartupApplicationName(applicationName); + return self; + } + + /** + * The version of the application using the created session. + * + *

      It will be sent in the {@code STARTUP} protocol message, under the key {@code + * APPLICATION_VERSION}, for each new connection established by the driver. Currently, this + * information is used by Insights monitoring (if the target cluster does not support Insights, + * the entry will be ignored by the server). + * + *

      This can also be defined in the driver configuration with the option {@code + * basic.application.version}; if you specify both, this method takes precedence and the + * configuration option will be ignored. If neither is specified, the entry is not included in the + * message. + */ + @NonNull + public SelfT withApplicationVersion(@Nullable String applicationVersion) { + this.programmaticArgumentsBuilder.withStartupApplicationVersion(applicationVersion); + return self; + } + /** * Creates the session with the options set by this builder. * diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java index 0d009ccdb0a..f1170313974 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java @@ -15,6 +15,8 @@ */ package com.datastax.oss.driver.internal.core.context; +import com.datastax.dse.driver.api.core.config.DseDriverOption; +import com.datastax.dse.driver.internal.core.InsightsClientLifecycleListener; import com.datastax.dse.driver.internal.core.tracker.MultiplexingRequestTracker; import com.datastax.dse.protocol.internal.DseProtocolV1ClientCodecs; import com.datastax.dse.protocol.internal.DseProtocolV2ClientCodecs; @@ -77,6 +79,7 @@ import com.datastax.oss.driver.internal.core.tracker.NoopRequestTracker; import com.datastax.oss.driver.internal.core.tracker.RequestLogFormatter; import com.datastax.oss.driver.internal.core.type.codec.registry.DefaultCodecRegistry; +import com.datastax.oss.driver.internal.core.util.Loggers; import com.datastax.oss.driver.internal.core.util.Reflection; import com.datastax.oss.driver.internal.core.util.concurrent.CycleDetector; import com.datastax.oss.driver.internal.core.util.concurrent.LazyReference; @@ -88,12 +91,16 @@ import edu.umd.cs.findbugs.annotations.Nullable; import io.netty.buffer.ByteBuf; import java.net.InetSocketAddress; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Optional; +import java.util.UUID; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Predicate; import net.jcip.annotations.ThreadSafe; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Default implementation of the driver context. @@ -115,6 +122,7 @@ @ThreadSafe public class DefaultDriverContext implements InternalDriverContext { + private static final Logger LOG = LoggerFactory.getLogger(InternalDriverContext.class); private static final AtomicInteger SESSION_NAME_COUNTER = new AtomicInteger(); protected final CycleDetector cycleDetector = @@ -193,6 +201,8 @@ public class DefaultDriverContext implements InternalDriverContext { private final LazyReference schemaChangeListenerRef; private final LazyReference requestTrackerRef; private final LazyReference> authProviderRef; + private final LazyReference> lifecycleListenersRef = + new LazyReference<>("lifecycleListeners", this::buildLifecycleListeners, cycleDetector); private final DriverConfig config; private final DriverConfigLoader configLoader; @@ -208,6 +218,12 @@ public class DefaultDriverContext implements InternalDriverContext { private final InetSocketAddress cloudProxyAddress; private final LazyReference requestLogFormatterRef = new LazyReference<>("requestLogFormatter", this::buildRequestLogFormatter, cycleDetector); + private final UUID startupClientId; + private final String startupApplicationName; + private final String startupApplicationVersion; + // A stack trace captured in the constructor. Used to extract information about the client + // application. + private final StackTraceElement[] initStackTrace; public DefaultDriverContext( DriverConfigLoader configLoader, ProgrammaticArguments programmaticArguments) { @@ -252,6 +268,17 @@ public DefaultDriverContext( this.nodeFiltersFromBuilder = programmaticArguments.getNodeFilters(); this.classLoader = programmaticArguments.getClassLoader(); this.cloudProxyAddress = programmaticArguments.getCloudProxyAddress(); + this.startupClientId = programmaticArguments.getStartupClientId(); + this.startupApplicationName = programmaticArguments.getStartupApplicationName(); + this.startupApplicationVersion = programmaticArguments.getStartupApplicationVersion(); + StackTraceElement[] stackTrace; + try { + stackTrace = Thread.currentThread().getStackTrace(); + } catch (Exception ex) { + // ignore and use empty + stackTrace = new StackTraceElement[] {}; + } + this.initStackTrace = stackTrace; } /** @@ -287,7 +314,11 @@ public DefaultDriverContext( * @see #getStartupOptions() */ protected Map buildStartupOptions() { - return new StartupOptionsBuilder(this).build(); + return new StartupOptionsBuilder(this) + .withClientId(startupClientId) + .withApplicationName(startupApplicationName) + .withApplicationVersion(startupApplicationVersion) + .build(); } protected Map buildLoadBalancingPolicies() { @@ -573,6 +604,23 @@ protected Optional buildAuthProvider(AuthProvider authProviderFrom "com.datastax.dse.driver.internal.core.auth"); } + protected List buildLifecycleListeners() { + try { + Class.forName("com.fasterxml.jackson.core.JsonParser"); + Class.forName("com.fasterxml.jackson.databind.ObjectMapper"); + return Collections.singletonList(new InsightsClientLifecycleListener(this, initStackTrace)); + } catch (ClassNotFoundException | LinkageError error) { + if (config.getDefaultProfile().getBoolean(DseDriverOption.MONITOR_REPORTING_ENABLED)) { + Loggers.warnWithException( + LOG, + "Could not initialize Insights monitoring; " + + "Jackson libraries might be missing from classpath", + error); + } + return Collections.emptyList(); + } + } + @NonNull @Override public String getSessionName() { @@ -840,4 +888,10 @@ protected RequestLogFormatter buildRequestLogFormatter() { public RequestLogFormatter getRequestLogFormatter() { return requestLogFormatterRef.get(); } + + @NonNull + @Override + public List getLifecycleListeners() { + return lifecycleListenersRef.get(); + } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/context/InternalDriverContext.java b/core/src/main/java/com/datastax/oss/driver/internal/core/context/InternalDriverContext.java index afc5dbce92e..fa9caabefce 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/context/InternalDriverContext.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/context/InternalDriverContext.java @@ -152,8 +152,9 @@ public interface InternalDriverContext extends DriverContext { /** * A list of additional components to notify of session lifecycle events. * - *

      The default implementation returns an empty list. Custom driver extensions might override - * this method to add their own components. + *

      For historical reasons, this method has a default implementation that returns an empty list. + * The built-in {@link DefaultDriverContext} overrides it to plug in the Insights monitoring + * listener. Custom driver extensions might override this method to add their own components. * *

      Note that the driver assumes that the returned list is constant; there is no way to add * listeners dynamically. diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/context/LifecycleListener.java b/core/src/main/java/com/datastax/oss/driver/internal/core/context/LifecycleListener.java index 31fcacfdcf1..4f7e6f84527 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/context/LifecycleListener.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/context/LifecycleListener.java @@ -17,11 +17,7 @@ import com.datastax.oss.driver.api.core.session.SessionBuilder; -/** - * A component that gets notified of certain events in the session's lifecycle. - * - *

      This is intended for third-party extensions, no built-in components implement this. - */ +/** A component that gets notified of certain events in the session's lifecycle. */ public interface LifecycleListener extends AutoCloseable { /** diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/context/StartupOptionsBuilder.java b/core/src/main/java/com/datastax/oss/driver/internal/core/context/StartupOptionsBuilder.java index 49718b7df97..a29e6bc7661 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/context/StartupOptionsBuilder.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/context/StartupOptionsBuilder.java @@ -15,10 +15,15 @@ */ package com.datastax.oss.driver.internal.core.context; +import com.datastax.dse.driver.api.core.config.DseDriverOption; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.session.Session; +import com.datastax.oss.driver.api.core.uuid.Uuids; import com.datastax.oss.protocol.internal.request.Startup; import com.datastax.oss.protocol.internal.util.collection.NullAllowingImmutableMap; +import edu.umd.cs.findbugs.annotations.Nullable; import java.util.Map; +import java.util.UUID; import net.jcip.annotations.Immutable; @Immutable @@ -26,13 +31,52 @@ public class StartupOptionsBuilder { public static final String DRIVER_NAME_KEY = "DRIVER_NAME"; public static final String DRIVER_VERSION_KEY = "DRIVER_VERSION"; + public static final String APPLICATION_NAME_KEY = "APPLICATION_NAME"; + public static final String APPLICATION_VERSION_KEY = "APPLICATION_VERSION"; + public static final String CLIENT_ID_KEY = "CLIENT_ID"; protected final InternalDriverContext context; + private UUID clientId; + private String applicationName; + private String applicationVersion; public StartupOptionsBuilder(InternalDriverContext context) { this.context = context; } + /** + * Sets the client ID to be sent in the Startup message options. + * + *

      If this method is not invoked, or the id passed in is null, a random {@link UUID} will be + * generated and used by default. + */ + public StartupOptionsBuilder withClientId(@Nullable UUID clientId) { + this.clientId = clientId; + return this; + } + + /** + * Sets the client application name to be sent in the Startup message options. + * + *

      If this method is not invoked, or the name passed in is null, no application name option + * will be sent in the startup message options. + */ + public StartupOptionsBuilder withApplicationName(@Nullable String applicationName) { + this.applicationName = applicationName; + return this; + } + + /** + * Sets the client application version to be sent in the Startup message options. + * + *

      If this method is not invoked, or the name passed in is null, no application version option + * will be sent in the startup message options. + */ + public StartupOptionsBuilder withApplicationVersion(@Nullable String applicationVersion) { + this.applicationVersion = applicationVersion; + return this; + } + /** * Builds a map of options to send in a Startup message. * @@ -46,16 +90,35 @@ public StartupOptionsBuilder(InternalDriverContext context) { * @return Map of Startup Options. */ public Map build() { + DriverExecutionProfile config = context.getConfig().getDefaultProfile(); + NullAllowingImmutableMap.Builder builder = NullAllowingImmutableMap.builder(3); // add compression (if configured) and driver name and version String compressionAlgorithm = context.getCompressor().algorithm(); if (compressionAlgorithm != null && !compressionAlgorithm.trim().isEmpty()) { builder.put(Startup.COMPRESSION_KEY, compressionAlgorithm.trim()); } - return builder - .put(DRIVER_NAME_KEY, getDriverName()) - .put(DRIVER_VERSION_KEY, getDriverVersion()) - .build(); + builder.put(DRIVER_NAME_KEY, getDriverName()).put(DRIVER_VERSION_KEY, getDriverVersion()); + + // Add Insights entries, falling back to generation / config if no programmatic values provided: + if (clientId == null) { + clientId = Uuids.random(); + } + builder.put(CLIENT_ID_KEY, clientId.toString()); + if (applicationName == null) { + applicationName = config.getString(DseDriverOption.APPLICATION_NAME, null); + } + if (applicationName != null) { + builder.put(APPLICATION_NAME_KEY, applicationName); + } + if (applicationVersion == null) { + applicationVersion = config.getString(DseDriverOption.APPLICATION_VERSION, null); + } + if (applicationVersion != null) { + builder.put(APPLICATION_VERSION_KEY, applicationVersion); + } + + return builder.build(); } /** diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/context/DseStartupOptionsBuilderTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/context/DseStartupOptionsBuilderTest.java index 08a34bc8e3d..8743bd219da 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/context/DseStartupOptionsBuilderTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/context/DseStartupOptionsBuilderTest.java @@ -29,7 +29,9 @@ import com.datastax.oss.driver.api.core.config.DriverConfigLoader; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.session.ProgrammaticArguments; +import com.datastax.oss.driver.api.core.session.Session; import com.datastax.oss.driver.api.core.uuid.Uuids; +import com.datastax.oss.driver.internal.core.context.StartupOptionsBuilder; import com.datastax.oss.protocol.internal.request.Startup; import com.tngtech.java.junit.dataprovider.DataProvider; import com.tngtech.java.junit.dataprovider.DataProviderRunner; @@ -72,12 +74,11 @@ private void buildContext(UUID clientId, String applicationName, String applicat private void assertDefaultStartupOptions(Startup startup) { assertThat(startup.options).containsEntry(Startup.CQL_VERSION_KEY, "3.0.0"); assertThat(startup.options) - .containsEntry(DseStartupOptionsBuilder.DRIVER_NAME_KEY, DSE_DRIVER_COORDINATES.getName()); - assertThat(startup.options).containsKey(DseStartupOptionsBuilder.DRIVER_VERSION_KEY); - Version version = - Version.parse(startup.options.get(DseStartupOptionsBuilder.DRIVER_VERSION_KEY)); - assertThat(version).isEqualTo(DSE_DRIVER_COORDINATES.getVersion()); - assertThat(startup.options).containsKey(DseStartupOptionsBuilder.CLIENT_ID_KEY); + .containsEntry(StartupOptionsBuilder.DRIVER_NAME_KEY, DSE_DRIVER_COORDINATES.getName()); + assertThat(startup.options).containsKey(StartupOptionsBuilder.DRIVER_VERSION_KEY); + Version version = Version.parse(startup.options.get(StartupOptionsBuilder.DRIVER_VERSION_KEY)); + assertThat(version).isEqualTo(Session.OSS_DRIVER_COORDINATES.getVersion()); + assertThat(startup.options).containsKey(StartupOptionsBuilder.CLIENT_ID_KEY); } @Test @@ -99,8 +100,8 @@ public void should_build_startup_options_with_compression(String compression) { Startup startup = new Startup(driverContext.getStartupOptions()); // assert the compression option is present assertThat(startup.options).containsEntry(Startup.COMPRESSION_KEY, compression); - assertThat(startup.options).doesNotContainKey(DseStartupOptionsBuilder.APPLICATION_NAME_KEY); - assertThat(startup.options).doesNotContainKey(DseStartupOptionsBuilder.APPLICATION_VERSION_KEY); + assertThat(startup.options).doesNotContainKey(StartupOptionsBuilder.APPLICATION_NAME_KEY); + assertThat(startup.options).doesNotContainKey(StartupOptionsBuilder.APPLICATION_VERSION_KEY); assertDefaultStartupOptions(startup); } @@ -122,10 +123,10 @@ public void should_build_startup_options_with_client_id() { Startup startup = new Startup(driverContext.getStartupOptions()); // assert the client id is present assertThat(startup.options) - .containsEntry(DseStartupOptionsBuilder.CLIENT_ID_KEY, customClientId.toString()); + .containsEntry(StartupOptionsBuilder.CLIENT_ID_KEY, customClientId.toString()); assertThat(startup.options).doesNotContainKey(Startup.COMPRESSION_KEY); - assertThat(startup.options).doesNotContainKey(DseStartupOptionsBuilder.APPLICATION_NAME_KEY); - assertThat(startup.options).doesNotContainKey(DseStartupOptionsBuilder.APPLICATION_VERSION_KEY); + assertThat(startup.options).doesNotContainKey(StartupOptionsBuilder.APPLICATION_NAME_KEY); + assertThat(startup.options).doesNotContainKey(StartupOptionsBuilder.APPLICATION_VERSION_KEY); assertDefaultStartupOptions(startup); } @@ -137,9 +138,9 @@ public void should_build_startup_options_with_application_version_and_name() { Startup startup = new Startup(driverContext.getStartupOptions()); // assert the app name and version are present assertThat(startup.options) - .containsEntry(DseStartupOptionsBuilder.APPLICATION_NAME_KEY, "Custom_App_Name"); + .containsEntry(StartupOptionsBuilder.APPLICATION_NAME_KEY, "Custom_App_Name"); assertThat(startup.options) - .containsEntry(DseStartupOptionsBuilder.APPLICATION_VERSION_KEY, "Custom_App_Version"); + .containsEntry(StartupOptionsBuilder.APPLICATION_VERSION_KEY, "Custom_App_Version"); assertThat(startup.options).doesNotContainKey(Startup.COMPRESSION_KEY); assertDefaultStartupOptions(startup); } @@ -155,9 +156,9 @@ public void should_build_startup_options_with_all_options() { buildContext(customClientId, "Custom_App_Name", "Custom_App_Version"); Startup startup = new Startup(driverContext.getStartupOptions()); assertThat(startup.options) - .containsEntry(DseStartupOptionsBuilder.CLIENT_ID_KEY, customClientId.toString()) - .containsEntry(DseStartupOptionsBuilder.APPLICATION_NAME_KEY, "Custom_App_Name") - .containsEntry(DseStartupOptionsBuilder.APPLICATION_VERSION_KEY, "Custom_App_Version"); + .containsEntry(StartupOptionsBuilder.CLIENT_ID_KEY, customClientId.toString()) + .containsEntry(StartupOptionsBuilder.APPLICATION_NAME_KEY, "Custom_App_Name") + .containsEntry(StartupOptionsBuilder.APPLICATION_VERSION_KEY, "Custom_App_Version"); assertThat(startup.options).containsEntry(Startup.COMPRESSION_KEY, "snappy"); assertDefaultStartupOptions(startup); } @@ -175,8 +176,8 @@ public void should_use_configuration_when_no_programmatic_values_provided() { Startup startup = new Startup(driverContext.getStartupOptions()); assertThat(startup.options) - .containsEntry(DseStartupOptionsBuilder.APPLICATION_NAME_KEY, "Config_App_Name") - .containsEntry(DseStartupOptionsBuilder.APPLICATION_VERSION_KEY, "Config_App_Version"); + .containsEntry(StartupOptionsBuilder.APPLICATION_NAME_KEY, "Config_App_Name") + .containsEntry(StartupOptionsBuilder.APPLICATION_VERSION_KEY, "Config_App_Version"); } @Test @@ -188,7 +189,7 @@ public void should_ignore_configuration_when_programmatic_values_provided() { Startup startup = new Startup(driverContext.getStartupOptions()); assertThat(startup.options) - .containsEntry(DseStartupOptionsBuilder.APPLICATION_NAME_KEY, "Custom_App_Name") - .containsEntry(DseStartupOptionsBuilder.APPLICATION_VERSION_KEY, "Custom_App_Version"); + .containsEntry(StartupOptionsBuilder.APPLICATION_NAME_KEY, "Custom_App_Name") + .containsEntry(StartupOptionsBuilder.APPLICATION_VERSION_KEY, "Custom_App_Version"); } } diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/insights/InsightsClientTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/InsightsClientTest.java index 537b023550b..af00d203e07 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/insights/InsightsClientTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/InsightsClientTest.java @@ -16,16 +16,11 @@ package com.datastax.dse.driver.internal.core.insights; import static com.datastax.dse.driver.api.core.DseProtocolVersion.DSE_V2; -import static com.datastax.dse.driver.internal.core.context.DseStartupOptionsBuilder.APPLICATION_NAME_KEY; -import static com.datastax.dse.driver.internal.core.context.DseStartupOptionsBuilder.APPLICATION_VERSION_KEY; -import static com.datastax.dse.driver.internal.core.context.DseStartupOptionsBuilder.CLIENT_ID_KEY; import static com.datastax.dse.driver.internal.core.insights.ExecutionProfileMockUtil.mockDefaultExecutionProfile; import static com.datastax.dse.driver.internal.core.insights.ExecutionProfileMockUtil.mockNonDefaultRequestTimeoutExecutionProfile; import static com.datastax.dse.driver.internal.core.insights.PackageUtil.DEFAULT_AUTH_PROVIDER_PACKAGE; import static com.datastax.dse.driver.internal.core.insights.PackageUtil.DEFAULT_LOAD_BALANCING_PACKAGE; import static com.datastax.dse.driver.internal.core.insights.PackageUtil.DEFAULT_SPECULATIVE_EXECUTION_PACKAGE; -import static com.datastax.oss.driver.internal.core.context.StartupOptionsBuilder.DRIVER_NAME_KEY; -import static com.datastax.oss.driver.internal.core.context.StartupOptionsBuilder.DRIVER_VERSION_KEY; import static org.assertj.core.api.Assertions.assertThat; import static org.awaitility.Awaitility.await; import static org.awaitility.Duration.ONE_SECOND; @@ -57,6 +52,7 @@ import com.datastax.oss.driver.api.core.metadata.EndPoint; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.internal.core.channel.DriverChannel; +import com.datastax.oss.driver.internal.core.context.StartupOptionsBuilder; import com.datastax.oss.driver.internal.core.control.ControlConnection; import com.datastax.oss.driver.internal.core.metadata.DefaultNode; import com.datastax.oss.driver.internal.core.metadata.MetadataManager; @@ -433,11 +429,11 @@ private DseDriverContext mockDseDriverContext() throws UnknownHostException { mockNonDefaultRequestTimeoutExecutionProfile(); Map startupOptions = new HashMap<>(); - startupOptions.put(CLIENT_ID_KEY, "client-id"); - startupOptions.put(APPLICATION_VERSION_KEY, "1.0.0"); - startupOptions.put(APPLICATION_NAME_KEY, "app-name"); - startupOptions.put(DRIVER_VERSION_KEY, "2.x"); - startupOptions.put(DRIVER_NAME_KEY, "DataStax Enterprise Java Driver"); + startupOptions.put(StartupOptionsBuilder.CLIENT_ID_KEY, "client-id"); + startupOptions.put(StartupOptionsBuilder.APPLICATION_VERSION_KEY, "1.0.0"); + startupOptions.put(StartupOptionsBuilder.APPLICATION_NAME_KEY, "app-name"); + startupOptions.put(StartupOptionsBuilder.DRIVER_VERSION_KEY, "2.x"); + startupOptions.put(StartupOptionsBuilder.DRIVER_NAME_KEY, "DataStax Enterprise Java Driver"); when(dseDriverContext.getStartupOptions()).thenReturn(startupOptions); when(dseDriverContext.getProtocolVersion()).thenReturn(DSE_V2); From 2bc7b1b164681217bf496f9e6d638d32ca4bab6f Mon Sep 17 00:00:00 2001 From: olim7t Date: Wed, 6 Nov 2019 09:46:20 -0800 Subject: [PATCH 211/979] Register DSE codecs in the default registry --- .../api/core/DseSessionBuilderBase.java | 20 ------------------- .../core/context/DefaultDriverContext.java | 10 ++++++++++ 2 files changed, 10 insertions(+), 20 deletions(-) diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/DseSessionBuilderBase.java b/core/src/main/java/com/datastax/dse/driver/api/core/DseSessionBuilderBase.java index 7a8cc9ecc36..ff2a3629e05 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/DseSessionBuilderBase.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/DseSessionBuilderBase.java @@ -16,7 +16,6 @@ package com.datastax.dse.driver.api.core; import com.datastax.dse.driver.api.core.session.DseProgrammaticArguments; -import com.datastax.dse.driver.api.core.type.codec.DseTypeCodecs; import com.datastax.dse.driver.internal.core.auth.DseProgrammaticPlainTextAuthProvider; import com.datastax.dse.driver.internal.core.config.typesafe.DefaultDseDriverConfigLoader; import com.datastax.dse.driver.internal.core.context.DseDriverContext; @@ -30,40 +29,21 @@ import com.datastax.oss.driver.api.core.session.SessionBuilder; import com.datastax.oss.driver.api.core.tracker.RequestTracker; import com.datastax.oss.driver.api.core.type.codec.TypeCodec; -import com.datastax.oss.driver.internal.core.util.Loggers; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import java.util.List; import java.util.Map; import java.util.function.Predicate; import net.jcip.annotations.NotThreadSafe; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; @NotThreadSafe public abstract class DseSessionBuilderBase< SelfT extends DseSessionBuilderBase, SessionT> extends SessionBuilder { - private static final Logger LOG = LoggerFactory.getLogger(DseSessionBuilderBase.class); - protected DseProgrammaticArguments.Builder dseProgrammaticArgumentsBuilder = DseProgrammaticArguments.builder(); - protected DseSessionBuilderBase() { - try { - Class.forName("com.esri.core.geometry.ogc.OGCGeometry"); - programmaticArgumentsBuilder.addTypeCodecs( - DseTypeCodecs.LINE_STRING, - DseTypeCodecs.POINT, - DseTypeCodecs.POLYGON, - DseTypeCodecs.DATE_RANGE); - } catch (ClassNotFoundException | LinkageError error) { - Loggers.warnWithException( - LOG, "Could not register Geo codecs; ESRI API might be missing from classpath", error); - } - } - /** * Sets the configuration loader to use. * diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java index f1170313974..ac139f415c0 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java @@ -16,6 +16,7 @@ package com.datastax.oss.driver.internal.core.context; import com.datastax.dse.driver.api.core.config.DseDriverOption; +import com.datastax.dse.driver.api.core.type.codec.DseTypeCodecs; import com.datastax.dse.driver.internal.core.InsightsClientLifecycleListener; import com.datastax.dse.driver.internal.core.tracker.MultiplexingRequestTracker; import com.datastax.dse.protocol.internal.DseProtocolV1ClientCodecs; @@ -490,6 +491,15 @@ protected RequestProcessorRegistry buildRequestProcessorRegistry() { protected CodecRegistry buildCodecRegistry(String logPrefix, List> codecs) { MutableCodecRegistry registry = new DefaultCodecRegistry(logPrefix); registry.register(codecs); + + registry.register(DseTypeCodecs.DATE_RANGE); + try { + Class.forName("com.esri.core.geometry.ogc.OGCGeometry"); + registry.register(DseTypeCodecs.LINE_STRING, DseTypeCodecs.POINT, DseTypeCodecs.POLYGON); + } catch (ClassNotFoundException | LinkageError error) { + Loggers.warnWithException( + LOG, "Could not register Geo codecs; ESRI API might be missing from classpath", error); + } return registry; } From 363e8885bc56b4017fda971ca343358111fbfda6 Mon Sep 17 00:00:00 2001 From: olim7t Date: Wed, 6 Nov 2019 11:02:19 -0800 Subject: [PATCH 212/979] Handle DSE metrics in default factory --- .../core/context/DseDriverContext.java | 7 -- .../metrics/DseDropwizardMetricsFactory.java | 82 ------------------- .../DseDropwizardSessionMetricUpdater.java | 41 ---------- .../metrics/DropwizardMetricsFactory.java | 10 ++- .../DropwizardSessionMetricUpdater.java | 8 ++ 5 files changed, 16 insertions(+), 132 deletions(-) delete mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/metrics/DseDropwizardMetricsFactory.java delete mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/metrics/DseDropwizardSessionMetricUpdater.java diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseDriverContext.java b/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseDriverContext.java index 582badd1bd7..177ffde5043 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseDriverContext.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseDriverContext.java @@ -22,7 +22,6 @@ import com.datastax.dse.driver.internal.core.cql.reactive.CqlRequestReactiveProcessor; import com.datastax.dse.driver.internal.core.graph.GraphRequestAsyncProcessor; import com.datastax.dse.driver.internal.core.graph.GraphRequestSyncProcessor; -import com.datastax.dse.driver.internal.core.metrics.DseDropwizardMetricsFactory; import com.datastax.dse.driver.internal.core.tracker.MultiplexingRequestTracker; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; import com.datastax.oss.driver.api.core.metadata.Node; @@ -36,7 +35,6 @@ import com.datastax.oss.driver.internal.core.cql.CqlPrepareSyncProcessor; import com.datastax.oss.driver.internal.core.cql.CqlRequestAsyncProcessor; import com.datastax.oss.driver.internal.core.cql.CqlRequestSyncProcessor; -import com.datastax.oss.driver.internal.core.metrics.MetricsFactory; import com.datastax.oss.driver.internal.core.session.RequestProcessor; import com.datastax.oss.driver.internal.core.session.RequestProcessorRegistry; import com.datastax.oss.driver.internal.core.util.Loggers; @@ -158,11 +156,6 @@ protected RequestProcessorRegistry buildRequestProcessorRegistry() { return new RequestProcessorRegistry(logPrefix, processors.toArray(new RequestProcessor[0])); } - @Override - protected MetricsFactory buildMetricsFactory() { - return new DseDropwizardMetricsFactory(this); - } - @Override protected RequestTracker buildRequestTracker(RequestTracker requestTrackerFromBuilder) { RequestTracker requestTrackerFromConfig = super.buildRequestTracker(requestTrackerFromBuilder); diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metrics/DseDropwizardMetricsFactory.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metrics/DseDropwizardMetricsFactory.java deleted file mode 100644 index ea6f5219ad8..00000000000 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/metrics/DseDropwizardMetricsFactory.java +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.datastax.dse.driver.internal.core.metrics; - -import com.datastax.dse.driver.DseSessionMetric; -import com.datastax.oss.driver.api.core.config.DefaultDriverOption; -import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; -import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; -import com.datastax.oss.driver.api.core.metrics.Metrics; -import com.datastax.oss.driver.api.core.metrics.SessionMetric; -import com.datastax.oss.driver.internal.core.context.InternalDriverContext; -import com.datastax.oss.driver.internal.core.metrics.DropwizardMetricsFactory; -import com.datastax.oss.driver.internal.core.metrics.NoopSessionMetricUpdater; -import com.datastax.oss.driver.internal.core.metrics.SessionMetricUpdater; -import java.util.Collections; -import java.util.HashSet; -import java.util.List; -import java.util.Set; -import net.jcip.annotations.ThreadSafe; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -@ThreadSafe -public class DseDropwizardMetricsFactory extends DropwizardMetricsFactory { - - private static final Logger LOG = LoggerFactory.getLogger(DropwizardMetricsFactory.class); - - private final SessionMetricUpdater dseSessionUpdater; - private final String logPrefix; - - public DseDropwizardMetricsFactory(InternalDriverContext context) { - super(context); - logPrefix = context.getSessionName(); - DriverExecutionProfile config = context.getConfig().getDefaultProfile(); - Set enabledSessionMetrics = - parseSessionMetricPaths(config.getStringList(DefaultDriverOption.METRICS_SESSION_ENABLED)); - dseSessionUpdater = - getMetrics() - .map(Metrics::getRegistry) - .map( - registry -> - (SessionMetricUpdater) - new DseDropwizardSessionMetricUpdater( - enabledSessionMetrics, registry, context)) - .orElse(NoopSessionMetricUpdater.INSTANCE); - } - - @Override - public SessionMetricUpdater getSessionUpdater() { - return dseSessionUpdater; - } - - @Override - protected Set parseSessionMetricPaths(List paths) { - Set metrics = new HashSet<>(); - for (String path : paths) { - try { - metrics.add(DseSessionMetric.fromPath(path)); - } catch (IllegalArgumentException e) { - try { - metrics.add(DefaultSessionMetric.fromPath(path)); - } catch (IllegalArgumentException e1) { - LOG.warn("[{}] Unknown session metric {}, skipping", logPrefix, path); - } - } - } - return Collections.unmodifiableSet(metrics); - } -} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metrics/DseDropwizardSessionMetricUpdater.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metrics/DseDropwizardSessionMetricUpdater.java deleted file mode 100644 index 5d957578ffd..00000000000 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/metrics/DseDropwizardSessionMetricUpdater.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.datastax.dse.driver.internal.core.metrics; - -import com.codahale.metrics.MetricRegistry; -import com.datastax.dse.driver.DseSessionMetric; -import com.datastax.dse.driver.api.core.config.DseDriverOption; -import com.datastax.oss.driver.api.core.metrics.SessionMetric; -import com.datastax.oss.driver.internal.core.context.InternalDriverContext; -import com.datastax.oss.driver.internal.core.metrics.DropwizardSessionMetricUpdater; -import java.util.Set; -import net.jcip.annotations.ThreadSafe; - -@ThreadSafe -public class DseDropwizardSessionMetricUpdater extends DropwizardSessionMetricUpdater { - - public DseDropwizardSessionMetricUpdater( - Set enabledMetrics, MetricRegistry registry, InternalDriverContext context) { - super(enabledMetrics, registry, context); - - initializeHdrTimer( - DseSessionMetric.CONTINUOUS_CQL_REQUESTS, - context.getConfig().getDefaultProfile(), - DseDriverOption.CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_HIGHEST, - DseDriverOption.CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_DIGITS, - DseDriverOption.CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_INTERVAL); - } -} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricsFactory.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricsFactory.java index 76e9cb8965a..e2f72e387ab 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricsFactory.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricsFactory.java @@ -16,6 +16,7 @@ package com.datastax.oss.driver.internal.core.metrics; import com.codahale.metrics.MetricRegistry; +import com.datastax.dse.driver.DseSessionMetric; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.metadata.Node; @@ -28,6 +29,7 @@ import edu.umd.cs.findbugs.annotations.Nullable; import java.util.Collections; import java.util.EnumSet; +import java.util.HashSet; import java.util.List; import java.util.Optional; import java.util.Set; @@ -89,12 +91,16 @@ public NodeMetricUpdater newNodeUpdater(Node node) { } protected Set parseSessionMetricPaths(List paths) { - EnumSet result = EnumSet.noneOf(DefaultSessionMetric.class); + Set result = new HashSet<>(); for (String path : paths) { try { result.add(DefaultSessionMetric.fromPath(path)); } catch (IllegalArgumentException e) { - LOG.warn("[{}] Unknown session metric {}, skipping", logPrefix, path); + try { + result.add(DseSessionMetric.fromPath(path)); + } catch (IllegalArgumentException e1) { + LOG.warn("[{}] Unknown session metric {}, skipping", logPrefix, path); + } } } return Collections.unmodifiableSet(result); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardSessionMetricUpdater.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardSessionMetricUpdater.java index 3a81bcad221..0b9b90c661e 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardSessionMetricUpdater.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardSessionMetricUpdater.java @@ -17,6 +17,8 @@ import com.codahale.metrics.Gauge; import com.codahale.metrics.MetricRegistry; +import com.datastax.dse.driver.DseSessionMetric; +import com.datastax.dse.driver.api.core.config.DseDriverOption; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; @@ -101,6 +103,12 @@ public DropwizardSessionMetricUpdater( DefaultDriverOption.METRICS_SESSION_THROTTLING_DIGITS, DefaultDriverOption.METRICS_SESSION_THROTTLING_INTERVAL); initializeDefaultCounter(DefaultSessionMetric.THROTTLING_ERRORS, null); + initializeHdrTimer( + DseSessionMetric.CONTINUOUS_CQL_REQUESTS, + context.getConfig().getDefaultProfile(), + DseDriverOption.CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_HIGHEST, + DseDriverOption.CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_DIGITS, + DseDriverOption.CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_INTERVAL); } @Override From 20eec0dbc4e1f2959765fe9f70e92d87f685d333 Mon Sep 17 00:00:00 2001 From: olim7t Date: Wed, 6 Nov 2019 16:59:20 -0800 Subject: [PATCH 213/979] Merge plain-text auth providers --- .../api/core/DseSessionBuilderBase.java | 45 ----- .../auth/DsePlainTextAuthProviderBase.java | 184 +----------------- .../core/auth/DsePlainTextAuthProvider.java | 60 +----- .../DseProgrammaticPlainTextAuthProvider.java | 30 +-- .../core/auth/PlainTextAuthProviderBase.java | 131 +++++++++---- .../api/core/session/SessionBuilder.java | 26 +++ .../core/auth/PlainTextAuthProvider.java | 20 +- .../ProgrammaticPlainTextAuthProvider.java | 19 +- core/src/main/resources/reference.conf | 19 +- .../core/auth/DsePlainTextAuthProviderIT.java | 8 +- .../core/auth/DseProxyAuthenticationIT.java | 14 +- .../api/core/graph/GraphAuthenticationIT.java | 4 +- 12 files changed, 202 insertions(+), 358 deletions(-) diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/DseSessionBuilderBase.java b/core/src/main/java/com/datastax/dse/driver/api/core/DseSessionBuilderBase.java index ff2a3629e05..6f218c40e41 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/DseSessionBuilderBase.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/DseSessionBuilderBase.java @@ -16,10 +16,8 @@ package com.datastax.dse.driver.api.core; import com.datastax.dse.driver.api.core.session.DseProgrammaticArguments; -import com.datastax.dse.driver.internal.core.auth.DseProgrammaticPlainTextAuthProvider; import com.datastax.dse.driver.internal.core.config.typesafe.DefaultDseDriverConfigLoader; import com.datastax.dse.driver.internal.core.context.DseDriverContext; -import com.datastax.oss.driver.api.core.auth.AuthProvider; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; import com.datastax.oss.driver.api.core.context.DriverContext; import com.datastax.oss.driver.api.core.metadata.Node; @@ -83,49 +81,6 @@ public SelfT withConfigLoader(@Nullable DriverConfigLoader configLoader) { return super.withConfigLoader(configLoader); } - /** - * Configures the session to use DSE plaintext authentication with the given username and - * password. - * - *

      This methods calls {@link #withAuthProvider(AuthProvider)} to register a special provider - * implementation. Therefore calling it overrides the configuration (that is, the {@code - * advanced.auth-provider.class} option will be ignored). - * - *

      Note that this approach holds the credentials in clear text in memory, which makes them - * vulnerable to an attacker who is able to perform memory dumps. If this is not acceptable for - * you, consider writing your own {@link AuthProvider} implementation (the internal class {@code - * PlainTextAuthProviderBase} is a good starting point), and providing it either with {@link - * #withAuthProvider(AuthProvider)} or via the configuration ({@code - * advanced.auth-provider.class}). - */ - @NonNull - @Override - public SelfT withAuthCredentials(@NonNull String username, @NonNull String password) { - return withAuthCredentials(username, password, ""); - } - - /** - * Configures the session to use DSE plaintext authentication with the given username and - * password, and perform proxy authentication with the given authorization id. - * - *

      This methods calls {@link #withAuthProvider(AuthProvider)} to register a special provider - * implementation. Therefore calling it overrides the configuration (that is, the {@code - * advanced.auth-provider.class} option will be ignored). - * - *

      Note that this approach holds the credentials in clear text in memory, which makes them - * vulnerable to an attacker who is able to perform memory dumps. If this is not acceptable for - * you, consider writing your own {@link AuthProvider} implementation (the internal class {@code - * PlainTextAuthProviderBase} is a good starting point), and providing it either with {@link - * #withAuthProvider(AuthProvider)} or via the configuration ({@code - * advanced.auth-provider.class}). - */ - @NonNull - public SelfT withAuthCredentials( - @NonNull String username, @NonNull String password, @NonNull String authorizationId) { - return withAuthProvider( - new DseProgrammaticPlainTextAuthProvider(username, password, authorizationId)); - } - @Override protected DriverContext buildContext( DriverConfigLoader configLoader, ProgrammaticArguments programmaticArguments) { diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/auth/DsePlainTextAuthProviderBase.java b/core/src/main/java/com/datastax/dse/driver/api/core/auth/DsePlainTextAuthProviderBase.java index 4cb45dfb66a..39ce47136d3 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/auth/DsePlainTextAuthProviderBase.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/auth/DsePlainTextAuthProviderBase.java @@ -15,190 +15,20 @@ */ package com.datastax.dse.driver.api.core.auth; -import com.datastax.oss.driver.api.core.auth.AuthProvider; -import com.datastax.oss.driver.api.core.auth.AuthenticationException; -import com.datastax.oss.driver.api.core.auth.Authenticator; -import com.datastax.oss.driver.api.core.metadata.EndPoint; -import com.datastax.oss.driver.api.core.session.Session; -import com.datastax.oss.driver.shaded.guava.common.base.Charsets; +import com.datastax.oss.driver.api.core.auth.PlainTextAuthProviderBase; import edu.umd.cs.findbugs.annotations.NonNull; -import edu.umd.cs.findbugs.annotations.Nullable; -import java.nio.ByteBuffer; -import java.nio.CharBuffer; -import java.nio.charset.StandardCharsets; -import java.util.Arrays; -import java.util.Objects; import net.jcip.annotations.ThreadSafe; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; /** - * Common infrastructure for DSE plain text auth providers. - * - *

      This can be reused to write an implementation that retrieves the credentials from another - * source than the configuration. + * @deprecated The driver's default plain text providers now support both Apache Cassandra and DSE. + * This type was preserved for backward compatibility, but implementors should now extend {@link + * PlainTextAuthProviderBase} instead. */ @ThreadSafe -public abstract class DsePlainTextAuthProviderBase implements AuthProvider { - - private static final Logger LOG = LoggerFactory.getLogger(DsePlainTextAuthProviderBase.class); - - private final String logPrefix; +@Deprecated +public abstract class DsePlainTextAuthProviderBase extends PlainTextAuthProviderBase { - /** - * @param logPrefix a string that will get prepended to the logs (this is used for discrimination - * when you have multiple driver instances executing in the same JVM). Config-based - * implementations fill this with {@link Session#getName()}. - */ protected DsePlainTextAuthProviderBase(@NonNull String logPrefix) { - this.logPrefix = Objects.requireNonNull(logPrefix); - } - - /** - * Retrieves the credentials from the underlying source. - * - *

      This is invoked every time the driver opens a new connection. - */ - @NonNull - protected abstract Credentials getCredentials( - @NonNull EndPoint endPoint, @NonNull String serverAuthenticator); - - @NonNull - @Override - public Authenticator newAuthenticator( - @NonNull EndPoint endPoint, @NonNull String serverAuthenticator) - throws AuthenticationException { - return new PlainTextAuthenticator( - getCredentials(endPoint, serverAuthenticator), endPoint, serverAuthenticator); - } - - @Override - public void onMissingChallenge(@NonNull EndPoint endPoint) { - LOG.warn( - "[{}] {} did not send an authentication challenge; " - + "This is suspicious because the driver expects authentication", - logPrefix, - endPoint); - } - - @Override - public void close() { - // nothing to do - } - - public static class Credentials { - - private final char[] authenticationId; - private final char[] password; - private final char[] authorizationId; - - public Credentials( - @NonNull char[] authenticationId, - @NonNull char[] password, - @NonNull char[] authorizationId) { - this.authenticationId = Objects.requireNonNull(authenticationId); - this.password = Objects.requireNonNull(password); - this.authorizationId = Objects.requireNonNull(authorizationId); - } - - @NonNull - public char[] getAuthenticationId() { - return authenticationId; - } - - @NonNull - public char[] getPassword() { - return password; - } - - @NonNull - public char[] getAuthorizationId() { - return authorizationId; - } - - /** Clears the credentials from memory when they're no longer needed. */ - protected void clear() { - // Note: this is a bit irrelevant with the built-in provider, because the config already - // caches the credentials in memory. But it might be useful for a custom implementation that - // retrieves the credentials from a different source. - Arrays.fill(getAuthenticationId(), (char) 0); - Arrays.fill(getPassword(), (char) 0); - Arrays.fill(getAuthorizationId(), (char) 0); - } - } - - protected static class PlainTextAuthenticator extends BaseDseAuthenticator { - - private static final ByteBuffer MECHANISM = - ByteBuffer.wrap("PLAIN".getBytes(StandardCharsets.UTF_8)).asReadOnlyBuffer(); - - private static final ByteBuffer SERVER_INITIAL_CHALLENGE = - ByteBuffer.wrap("PLAIN-START".getBytes(StandardCharsets.UTF_8)).asReadOnlyBuffer(); - - private final ByteBuffer encodedCredentials; - private final EndPoint endPoint; - - protected PlainTextAuthenticator( - Credentials credentials, EndPoint endPoint, String serverAuthenticator) { - super(serverAuthenticator); - - Objects.requireNonNull(credentials); - - ByteBuffer authenticationId = toUtf8Bytes(credentials.getAuthenticationId()); - ByteBuffer password = toUtf8Bytes(credentials.getPassword()); - ByteBuffer authorizationId = toUtf8Bytes(credentials.getAuthorizationId()); - - this.encodedCredentials = - ByteBuffer.allocate( - authorizationId.remaining() - + authenticationId.remaining() - + password.remaining() - + 2); - encodedCredentials.put(authorizationId); - encodedCredentials.put((byte) 0); - encodedCredentials.put(authenticationId); - encodedCredentials.put((byte) 0); - encodedCredentials.put(password); - encodedCredentials.flip(); - - clear(authorizationId); - clear(authenticationId); - clear(password); - - this.endPoint = endPoint; - } - - private static ByteBuffer toUtf8Bytes(char[] charArray) { - CharBuffer charBuffer = CharBuffer.wrap(charArray); - return Charsets.UTF_8.encode(charBuffer); - } - - private static void clear(ByteBuffer buffer) { - buffer.rewind(); - while (buffer.remaining() > 0) { - buffer.put((byte) 0); - } - } - - @NonNull - @Override - public ByteBuffer getMechanism() { - return MECHANISM; - } - - @NonNull - @Override - public ByteBuffer getInitialServerChallenge() { - return SERVER_INITIAL_CHALLENGE; - } - - @Nullable - @Override - public ByteBuffer evaluateChallengeSync(@Nullable ByteBuffer challenge) { - if (SERVER_INITIAL_CHALLENGE.equals(challenge)) { - return encodedCredentials; - } - throw new AuthenticationException(endPoint, "Incorrect challenge from server"); - } + super(logPrefix); } } diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/auth/DsePlainTextAuthProvider.java b/core/src/main/java/com/datastax/dse/driver/internal/core/auth/DsePlainTextAuthProvider.java index 8e2dfd5b03b..e812dda211a 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/auth/DsePlainTextAuthProvider.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/auth/DsePlainTextAuthProvider.java @@ -15,66 +15,20 @@ */ package com.datastax.dse.driver.internal.core.auth; -import com.datastax.dse.driver.api.core.auth.DsePlainTextAuthProviderBase; -import com.datastax.dse.driver.api.core.config.DseDriverOption; -import com.datastax.oss.driver.api.core.config.DefaultDriverOption; -import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.context.DriverContext; -import com.datastax.oss.driver.api.core.metadata.EndPoint; -import edu.umd.cs.findbugs.annotations.NonNull; +import com.datastax.oss.driver.internal.core.auth.PlainTextAuthProvider; import net.jcip.annotations.ThreadSafe; /** - * An authentication provider that supports SASL authentication using the PLAIN mechanism to connect - * to DSE clusters secured with DseAuthenticator. - * - *

      To activate this provider, an {@code auth-provider} section must be included in the driver - * configuration, for example: - * - *

      - * dse-java-driver {
      - *   auth-provider {
      - *     class = com.datastax.dse.driver.internal.core.auth.DsePlainTextAuthProvider
      - *     username = user0
      - *     password = mypassword
      - *     authorization-id = user1
      - *   }
      - * }
      - * 
      - * - * See the {@code dse-reference.conf} file included with the driver for more information. + * @deprecated The driver's default plain text providers now support both Apache Cassandra and DSE. + * This type was preserved for backward compatibility, but {@link PlainTextAuthProvider} should + * be used instead. */ @ThreadSafe -public class DsePlainTextAuthProvider extends DsePlainTextAuthProviderBase { - - private final DriverExecutionProfile config; +@Deprecated +public class DsePlainTextAuthProvider extends PlainTextAuthProvider { public DsePlainTextAuthProvider(DriverContext context) { - super(context.getSessionName()); - this.config = context.getConfig().getDefaultProfile(); - } - - @NonNull - @Override - protected Credentials getCredentials( - @NonNull EndPoint endPoint, @NonNull String serverAuthenticator) { - String authorizationId; - if (config.isDefined(DseDriverOption.AUTH_PROVIDER_AUTHORIZATION_ID)) { - authorizationId = config.getString(DseDriverOption.AUTH_PROVIDER_AUTHORIZATION_ID); - } else { - authorizationId = ""; - } - // It's not valid to use the DsePlainTextAuthProvider without a username or password, error out - // early here - AuthUtils.validateConfigPresent( - config, - DsePlainTextAuthProvider.class.getName(), - endPoint, - DefaultDriverOption.AUTH_PROVIDER_USER_NAME, - DefaultDriverOption.AUTH_PROVIDER_PASSWORD); - return new Credentials( - config.getString(DefaultDriverOption.AUTH_PROVIDER_USER_NAME).toCharArray(), - config.getString(DefaultDriverOption.AUTH_PROVIDER_PASSWORD).toCharArray(), - authorizationId.toCharArray()); + super(context); } } diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/auth/DseProgrammaticPlainTextAuthProvider.java b/core/src/main/java/com/datastax/dse/driver/internal/core/auth/DseProgrammaticPlainTextAuthProvider.java index cc70951c0b2..a8feb1cd2ba 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/auth/DseProgrammaticPlainTextAuthProvider.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/auth/DseProgrammaticPlainTextAuthProvider.java @@ -15,28 +15,18 @@ */ package com.datastax.dse.driver.internal.core.auth; -import com.datastax.dse.driver.api.core.auth.DsePlainTextAuthProviderBase; -import com.datastax.oss.driver.api.core.metadata.EndPoint; -import edu.umd.cs.findbugs.annotations.NonNull; +import com.datastax.oss.driver.internal.core.auth.ProgrammaticPlainTextAuthProvider; -public class DseProgrammaticPlainTextAuthProvider extends DsePlainTextAuthProviderBase { - private final String authenticationId; - private final String password; - private final String authorizationId; +/** + * @deprecated The driver's default plain text providers now support both Apache Cassandra and DSE. + * This type was preserved for backward compatibility, but {@link + * ProgrammaticPlainTextAuthProvider} should be used instead. + */ +@Deprecated +public class DseProgrammaticPlainTextAuthProvider extends ProgrammaticPlainTextAuthProvider { public DseProgrammaticPlainTextAuthProvider( - String authenticationId, String password, String authorizationId) { - super(""); - this.authenticationId = authenticationId; - this.password = password; - this.authorizationId = authorizationId; - } - - @NonNull - @Override - protected Credentials getCredentials( - @NonNull EndPoint endPoint, @NonNull String serverAuthenticator) { - return new Credentials( - authenticationId.toCharArray(), password.toCharArray(), authorizationId.toCharArray()); + String username, String password, String authorizationId) { + super(username, password, authorizationId); } } diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderBase.java b/core/src/main/java/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderBase.java index b44e6f8765c..89162f00cd2 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderBase.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderBase.java @@ -15,6 +15,7 @@ */ package com.datastax.oss.driver.api.core.auth; +import com.datastax.dse.driver.api.core.auth.BaseDseAuthenticator; import com.datastax.oss.driver.api.core.metadata.EndPoint; import com.datastax.oss.driver.api.core.session.Session; import com.datastax.oss.driver.shaded.guava.common.base.Charsets; @@ -22,6 +23,7 @@ import edu.umd.cs.findbugs.annotations.Nullable; import java.nio.ByteBuffer; import java.nio.CharBuffer; +import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.Objects; import net.jcip.annotations.ThreadSafe; @@ -62,8 +64,10 @@ protected abstract Credentials getCredentials( @NonNull @Override public Authenticator newAuthenticator( - @NonNull EndPoint endPoint, @NonNull String serverAuthenticator) { - return new PlainTextAuthenticator(getCredentials(endPoint, serverAuthenticator)); + @NonNull EndPoint endPoint, @NonNull String serverAuthenticator) + throws AuthenticationException { + return new PlainTextAuthenticator( + getCredentials(endPoint, serverAuthenticator), endPoint, serverAuthenticator); } @Override @@ -84,10 +88,25 @@ public static class Credentials { private final char[] username; private final char[] password; - - public Credentials(@NonNull char[] username, @NonNull char[] password) { + private final char[] authorizationId; + + /** + * Builds an instance for username/password authentication, and proxy authentication with the + * given authorizationId. + * + *

      This feature is only available with Datastax Enterprise. If the target server is Apache + * Cassandra, the authorizationId will be ignored. + */ + public Credentials( + @NonNull char[] username, @NonNull char[] password, @NonNull char[] authorizationId) { this.username = Objects.requireNonNull(username); this.password = Objects.requireNonNull(password); + this.authorizationId = Objects.requireNonNull(authorizationId); + } + + /** Builds an instance for simple username/password authentication. */ + public Credentials(@NonNull char[] username, @NonNull char[] password) { + this(username, password, new char[0]); } @NonNull @@ -95,11 +114,26 @@ public char[] getUsername() { return username; } + /** + * @deprecated this method only exists for backward compatibility. It is a synonym for {@link + * #getUsername()}, which should be used instead. + */ + @Deprecated + @NonNull + public char[] getAuthenticationId() { + return username; + } + @NonNull public char[] getPassword() { return password; } + @NonNull + public char[] getAuthorizationId() { + return authorizationId; + } + /** Clears the credentials from memory when they're no longer needed. */ protected void clear() { // Note: this is a bit irrelevant with the built-in provider, because the config already @@ -107,58 +141,81 @@ protected void clear() { // retrieves the credentials from a different source. Arrays.fill(getUsername(), (char) 0); Arrays.fill(getPassword(), (char) 0); + Arrays.fill(getAuthorizationId(), (char) 0); } } - protected static class PlainTextAuthenticator implements SyncAuthenticator { + // Implementation note: BaseDseAuthenticator is backward compatible with Cassandra authenticators. + // This will work with both Cassandra (as long as no authorizationId is set) and DSE. + protected static class PlainTextAuthenticator extends BaseDseAuthenticator { + + private static final ByteBuffer MECHANISM = + ByteBuffer.wrap("PLAIN".getBytes(StandardCharsets.UTF_8)).asReadOnlyBuffer(); + + private static final ByteBuffer SERVER_INITIAL_CHALLENGE = + ByteBuffer.wrap("PLAIN-START".getBytes(StandardCharsets.UTF_8)).asReadOnlyBuffer(); - private final ByteBuffer initialToken; + private final ByteBuffer encodedCredentials; + private final EndPoint endPoint; + + protected PlainTextAuthenticator( + Credentials credentials, EndPoint endPoint, String serverAuthenticator) { + super(serverAuthenticator); - protected PlainTextAuthenticator(@NonNull Credentials credentials) { Objects.requireNonNull(credentials); - ByteBuffer usernameBytes = toUtf8Bytes(credentials.getUsername()); - ByteBuffer passwordBytes = toUtf8Bytes(credentials.getPassword()); - credentials.clear(); - - this.initialToken = - ByteBuffer.allocate(usernameBytes.remaining() + passwordBytes.remaining() + 2); - initialToken.put((byte) 0); - initialToken.put(usernameBytes); - initialToken.put((byte) 0); - initialToken.put(passwordBytes); - initialToken.flip(); - - // Clear temporary buffers - usernameBytes.rewind(); - while (usernameBytes.remaining() > 0) { - usernameBytes.put((byte) 0); - } - passwordBytes.rewind(); - while (passwordBytes.remaining() > 0) { - passwordBytes.put((byte) 0); - } + + ByteBuffer authorizationId = toUtf8Bytes(credentials.getAuthorizationId()); + ByteBuffer username = toUtf8Bytes(credentials.getUsername()); + ByteBuffer password = toUtf8Bytes(credentials.getPassword()); + + this.encodedCredentials = + ByteBuffer.allocate( + authorizationId.remaining() + username.remaining() + password.remaining() + 2); + encodedCredentials.put(authorizationId); + encodedCredentials.put((byte) 0); + encodedCredentials.put(username); + encodedCredentials.put((byte) 0); + encodedCredentials.put(password); + encodedCredentials.flip(); + + clear(authorizationId); + clear(username); + clear(password); + + this.endPoint = endPoint; } - private ByteBuffer toUtf8Bytes(char[] charArray) { + private static ByteBuffer toUtf8Bytes(char[] charArray) { CharBuffer charBuffer = CharBuffer.wrap(charArray); return Charsets.UTF_8.encode(charBuffer); } + private static void clear(ByteBuffer buffer) { + buffer.rewind(); + while (buffer.remaining() > 0) { + buffer.put((byte) 0); + } + } + + @NonNull @Override - @Nullable - public ByteBuffer initialResponseSync() { - return initialToken; + public ByteBuffer getMechanism() { + return MECHANISM; } + @NonNull @Override - @Nullable - public ByteBuffer evaluateChallengeSync(@Nullable ByteBuffer token) { - return null; + public ByteBuffer getInitialServerChallenge() { + return SERVER_INITIAL_CHALLENGE; } + @Nullable @Override - public void onAuthenticationSuccessSync(@Nullable ByteBuffer token) { - // no-op, the server should send nothing anyway + public ByteBuffer evaluateChallengeSync(@Nullable ByteBuffer challenge) { + if (SERVER_INITIAL_CHALLENGE.equals(challenge)) { + return encodedCredentials; + } + throw new AuthenticationException(endPoint, "Incorrect challenge from server"); } } } diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java b/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java index 226ccbe657b..c91d22d58a9 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java @@ -268,6 +268,32 @@ public SelfT withAuthCredentials(@NonNull String username, @NonNull String passw return withAuthProvider(new ProgrammaticPlainTextAuthProvider(username, password)); } + /** + * Configures the session to use DSE plaintext authentication with the given username and + * password, and perform proxy authentication with the given authorization id. + * + *

      This feature is only available in Datastax Enterprise. If connecting to Apache Cassandra, + * the authorization id will be ignored; it is recommended to use {@link + * #withAuthCredentials(String, String)} instead. + * + *

      This methods calls {@link #withAuthProvider(AuthProvider)} to register a special provider + * implementation. Therefore calling it overrides the configuration (that is, the {@code + * advanced.auth-provider.class} option will be ignored). + * + *

      Note that this approach holds the credentials in clear text in memory, which makes them + * vulnerable to an attacker who is able to perform memory dumps. If this is not acceptable for + * you, consider writing your own {@link AuthProvider} implementation (the internal class {@code + * PlainTextAuthProviderBase} is a good starting point), and providing it either with {@link + * #withAuthProvider(AuthProvider)} or via the configuration ({@code + * advanced.auth-provider.class}). + */ + @NonNull + public SelfT withAuthCredentials( + @NonNull String username, @NonNull String password, @NonNull String authorizationId) { + return withAuthProvider( + new ProgrammaticPlainTextAuthProvider(username, password, authorizationId)); + } + /** * Registers an SSL engine factory for the session. * diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/auth/PlainTextAuthProvider.java b/core/src/main/java/com/datastax/oss/driver/internal/core/auth/PlainTextAuthProvider.java index 2d664063933..de951b881f1 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/auth/PlainTextAuthProvider.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/auth/PlainTextAuthProvider.java @@ -15,6 +15,8 @@ */ package com.datastax.oss.driver.internal.core.auth; +import com.datastax.dse.driver.api.core.config.DseDriverOption; +import com.datastax.dse.driver.internal.core.auth.AuthUtils; import com.datastax.oss.driver.api.core.auth.PlainTextAuthProviderBase; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; @@ -36,6 +38,10 @@ * class = com.datastax.driver.api.core.auth.PlainTextAuthProvider * username = cassandra * password = cassandra + * + * // If connecting to Datastax Enterprise, this additional option allows proxy authentication + * // (login as another user or role) + * authorization-id = userOrRole * } * } * @@ -56,8 +62,20 @@ public PlainTextAuthProvider(DriverContext context) { @Override protected Credentials getCredentials( @NonNull EndPoint endPoint, @NonNull String serverAuthenticator) { + // It's not valid to use the PlainTextAuthProvider without a username or password, error out + // early here + AuthUtils.validateConfigPresent( + config, + PlainTextAuthProvider.class.getName(), + endPoint, + DefaultDriverOption.AUTH_PROVIDER_USER_NAME, + DefaultDriverOption.AUTH_PROVIDER_PASSWORD); + + String authorizationId = config.getString(DseDriverOption.AUTH_PROVIDER_AUTHORIZATION_ID, ""); + assert authorizationId != null; // per the default above return new Credentials( config.getString(DefaultDriverOption.AUTH_PROVIDER_USER_NAME).toCharArray(), - config.getString(DefaultDriverOption.AUTH_PROVIDER_PASSWORD).toCharArray()); + config.getString(DefaultDriverOption.AUTH_PROVIDER_PASSWORD).toCharArray(), + authorizationId.toCharArray()); } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/auth/ProgrammaticPlainTextAuthProvider.java b/core/src/main/java/com/datastax/oss/driver/internal/core/auth/ProgrammaticPlainTextAuthProvider.java index 0b395240f53..ba0bc4b41db 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/auth/ProgrammaticPlainTextAuthProvider.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/auth/ProgrammaticPlainTextAuthProvider.java @@ -26,24 +26,41 @@ * pulling them from the configuration. * * @see SessionBuilder#withAuthCredentials(String, String) + * @see SessionBuilder#withAuthCredentials(String, String, String) */ @ThreadSafe public class ProgrammaticPlainTextAuthProvider extends PlainTextAuthProviderBase { private final String username; private final String password; + private final String authorizationId; + /** Builds an instance for simple username/password authentication. */ public ProgrammaticPlainTextAuthProvider(String username, String password) { + this(username, password, ""); + } + + /** + * Builds an instance for username/password authentication, and proxy authentication with the + * given authorizationId. + * + *

      This feature is only available with Datastax Enterprise. If the target server is Apache + * Cassandra, the authorizationId will be ignored. + */ + public ProgrammaticPlainTextAuthProvider( + String username, String password, String authorizationId) { // This will typically be built before the session so we don't know the log prefix yet. Pass an // empty string, it's only used in one log message. super(""); this.username = username; this.password = password; + this.authorizationId = authorizationId; } @NonNull @Override protected Credentials getCredentials( @NonNull EndPoint endPoint, @NonNull String serverAuthenticator) { - return new Credentials(username.toCharArray(), password.toCharArray()); + return new Credentials( + username.toCharArray(), password.toCharArray(), authorizationId.toCharArray()); } } diff --git a/core/src/main/resources/reference.conf b/core/src/main/resources/reference.conf index 137dd91aad3..4784ede5c5b 100644 --- a/core/src/main/resources/reference.conf +++ b/core/src/main/resources/reference.conf @@ -526,33 +526,30 @@ datastax-java-driver { # - com.datastax.oss.driver.internal.core.auth # - com.datastax.dse.driver.internal.core.auth # - # The driver provides one implementation for Apache Cassandra(R): + # The driver provides two implementations: # - PlainTextAuthProvider: uses plain-text credentials. It requires the `username` and - # `password` options below. Should be used only when authenticating against Apache - # Cassandra(R) clusters; not recommended when authenticating against DSE clusters. - # - # And two DSE-specific implementations: - # - DsePlainTextAuthProvider: provides SASL authentication using the PLAIN mechanism for DSE - # clusters secured with DseAuthenticator. It requires the `username` and `password` options - # below, and optionally, an `authorization-id`. + # `password` options below. When connecting to Datastax Enterprise, an optional + # `authorization-id` can also be specified. + # For backward compatibility with previous driver versions, you can also use the class name + # "DsePlainTextAuthProvider" for this provider. # - DseGssApiAuthProvider: provides GSSAPI authentication for DSE clusters secured with # DseAuthenticator. See the example below and refer to the manual for detailed instructions. # # You can also specify a custom class that implements AuthProvider and has a public constructor # with a DriverContext argument (to simplify this, the driver provides two abstract classes that - # can be extended: DsePlainTextAuthProviderBase and DseGssApiAuthProviderBase). + # can be extended: PlainTextAuthProviderBase and DseGssApiAuthProviderBase). # # Finally, you can configure a provider instance programmatically with # DseSessionBuilder#withAuthProvider. In that case, it will take precedence over the # configuration. - // class = DsePlainTextAuthProvider + // class = PlainTextAuthProvider # # Sample configuration for plain-text authentication providers: // username = cassandra // password = cassandra # # Proxy authentication: allows to login as another user or role (valid for both - # DsePlainTextAuthProvider and DseGssApiAuthProvider): + # PlainTextAuthProvider and DseGssApiAuthProvider): // authorization-id = userOrRole # # The settings below are only applicable to DseGssApiAuthProvider: diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DsePlainTextAuthProviderIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DsePlainTextAuthProviderIT.java index 588aae41e45..45588247824 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DsePlainTextAuthProviderIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DsePlainTextAuthProviderIT.java @@ -20,7 +20,6 @@ import com.datastax.dse.driver.api.core.DseSession; import com.datastax.dse.driver.api.core.config.DseDriverOption; -import com.datastax.dse.driver.internal.core.auth.DsePlainTextAuthProvider; import com.datastax.oss.driver.api.core.AllNodesFailedException; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.Version; @@ -29,6 +28,7 @@ import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; +import com.datastax.oss.driver.internal.core.auth.PlainTextAuthProvider; import com.datastax.oss.driver.shaded.guava.common.util.concurrent.Uninterruptibles; import java.util.concurrent.TimeUnit; import org.junit.BeforeClass; @@ -65,7 +65,7 @@ public void should_connect_dse_plaintext_auth() { .withString(DseDriverOption.AUTH_PROVIDER_AUTHORIZATION_ID, "") .withString(DefaultDriverOption.AUTH_PROVIDER_USER_NAME, "cassandra") .withString(DefaultDriverOption.AUTH_PROVIDER_PASSWORD, "cassandra") - .withClass(DefaultDriverOption.AUTH_PROVIDER_CLASS, DsePlainTextAuthProvider.class) + .withClass(DefaultDriverOption.AUTH_PROVIDER_CLASS, PlainTextAuthProvider.class) .build())) { session.execute("select * from system.local"); } @@ -92,7 +92,7 @@ public void should_not_connect_with_invalid_credentials() { .withString(DseDriverOption.AUTH_PROVIDER_AUTHORIZATION_ID, "") .withString(DefaultDriverOption.AUTH_PROVIDER_USER_NAME, "cassandra") .withString(DefaultDriverOption.AUTH_PROVIDER_PASSWORD, "NotARealPassword") - .withClass(DefaultDriverOption.AUTH_PROVIDER_CLASS, DsePlainTextAuthProvider.class) + .withClass(DefaultDriverOption.AUTH_PROVIDER_CLASS, PlainTextAuthProvider.class) .build())) { fail("Expected an AllNodesFailedException"); } catch (AllNodesFailedException e) { @@ -111,7 +111,7 @@ public void should_not_connect_without_credentials() { ccm, SessionUtils.configLoaderBuilder() .withString(DseDriverOption.AUTH_PROVIDER_AUTHORIZATION_ID, "") - .withClass(DefaultDriverOption.AUTH_PROVIDER_CLASS, DsePlainTextAuthProvider.class) + .withClass(DefaultDriverOption.AUTH_PROVIDER_CLASS, PlainTextAuthProvider.class) .build())) { fail("Expected AllNodesFailedException"); } catch (AllNodesFailedException e) { diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseProxyAuthenticationIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseProxyAuthenticationIT.java index 47f1d4f5b4f..d3059115cfa 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseProxyAuthenticationIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseProxyAuthenticationIT.java @@ -21,7 +21,6 @@ import com.datastax.dse.driver.api.core.DseSession; import com.datastax.dse.driver.api.core.config.DseDriverOption; import com.datastax.dse.driver.internal.core.auth.DseGssApiAuthProvider; -import com.datastax.dse.driver.internal.core.auth.DsePlainTextAuthProvider; import com.datastax.oss.driver.api.core.AllNodesFailedException; import com.datastax.oss.driver.api.core.auth.AuthenticationException; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; @@ -30,6 +29,7 @@ import com.datastax.oss.driver.api.core.servererrors.UnauthorizedException; import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; +import com.datastax.oss.driver.internal.core.auth.PlainTextAuthProvider; import org.junit.Before; import org.junit.BeforeClass; import org.junit.ClassRule; @@ -77,7 +77,7 @@ public void setupRoles() { } /** * Validates that a connection may be successfully made as user 'alice' using the credentials of a - * user 'ben' using {@link DsePlainTextAuthProvider} assuming ben has PROXY.LOGIN authorization on + * user 'ben' using {@link PlainTextAuthProvider} assuming ben has PROXY.LOGIN authorization on * alice. */ @Test @@ -89,7 +89,7 @@ public void should_allow_plain_text_authorized_user_to_login_as() { .withString(DseDriverOption.AUTH_PROVIDER_AUTHORIZATION_ID, "alice") .withString(DefaultDriverOption.AUTH_PROVIDER_USER_NAME, "ben") .withString(DefaultDriverOption.AUTH_PROVIDER_PASSWORD, "ben") - .withClass(DefaultDriverOption.AUTH_PROVIDER_CLASS, DsePlainTextAuthProvider.class) + .withClass(DefaultDriverOption.AUTH_PROVIDER_CLASS, PlainTextAuthProvider.class) .build())) { SimpleStatement select = SimpleStatement.builder("select * from aliceks.alicetable").build(); ResultSet set = session.execute(select); @@ -137,7 +137,7 @@ public void should_not_allow_plain_text_unauthorized_user_to_login_as() { .withString(DseDriverOption.AUTH_PROVIDER_AUTHORIZATION_ID, "alice") .withString(DefaultDriverOption.AUTH_PROVIDER_USER_NAME, "steve") .withString(DefaultDriverOption.AUTH_PROVIDER_PASSWORD, "steve") - .withClass(DefaultDriverOption.AUTH_PROVIDER_CLASS, DsePlainTextAuthProvider.class) + .withClass(DefaultDriverOption.AUTH_PROVIDER_CLASS, PlainTextAuthProvider.class) .build())) { SimpleStatement select = SimpleStatement.builder("select * from aliceks.alicetable").build(); session.execute(select); @@ -167,7 +167,7 @@ public void should_not_allow_kerberos_unauthorized_user_to_login_as() throws Exc } /** * Validates that a query may be successfully made as user 'alice' using a {@link DseSession} that - * is authenticated to user 'steve' using {@link DsePlainTextAuthProvider} assuming steve has + * is authenticated to user 'steve' using {@link PlainTextAuthProvider} assuming steve has * PROXY.EXECUTE authorization on alice. */ @Test @@ -178,7 +178,7 @@ public void should_allow_plain_text_authorized_user_to_execute_as() { SessionUtils.configLoaderBuilder() .withString(DefaultDriverOption.AUTH_PROVIDER_USER_NAME, "steve") .withString(DefaultDriverOption.AUTH_PROVIDER_PASSWORD, "steve") - .withClass(DefaultDriverOption.AUTH_PROVIDER_CLASS, DsePlainTextAuthProvider.class) + .withClass(DefaultDriverOption.AUTH_PROVIDER_CLASS, PlainTextAuthProvider.class) .build())) { SimpleStatement select = SimpleStatement.builder("select * from aliceks.alicetable").build(); SimpleStatement statementAsAlice = ProxyAuthentication.executeAs("alice", select); @@ -214,7 +214,7 @@ public void should_not_allow_plain_text_unauthorized_user_to_execute_as() { SessionUtils.configLoaderBuilder() .withString(DefaultDriverOption.AUTH_PROVIDER_USER_NAME, "ben") .withString(DefaultDriverOption.AUTH_PROVIDER_PASSWORD, "ben") - .withClass(DefaultDriverOption.AUTH_PROVIDER_CLASS, DsePlainTextAuthProvider.class) + .withClass(DefaultDriverOption.AUTH_PROVIDER_CLASS, PlainTextAuthProvider.class) .build())) { SimpleStatement select = SimpleStatement.builder("select * from aliceks.alicetable").build(); SimpleStatement statementAsAlice = ProxyAuthentication.executeAs("alice", select); diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphAuthenticationIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphAuthenticationIT.java index f4b18813b9e..5cb95a1b4c7 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphAuthenticationIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphAuthenticationIT.java @@ -19,13 +19,13 @@ import com.datastax.dse.driver.api.core.DseSession; import com.datastax.dse.driver.api.core.config.DseDriverOption; -import com.datastax.dse.driver.internal.core.auth.DsePlainTextAuthProvider; import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; +import com.datastax.oss.driver.internal.core.auth.PlainTextAuthProvider; import com.datastax.oss.driver.shaded.guava.common.util.concurrent.Uninterruptibles; import java.util.concurrent.TimeUnit; import org.junit.BeforeClass; @@ -60,7 +60,7 @@ public void should_execute_graph_query_on_authenticated_connection() { .withString(DseDriverOption.AUTH_PROVIDER_AUTHORIZATION_ID, "") .withString(DefaultDriverOption.AUTH_PROVIDER_USER_NAME, "cassandra") .withString(DefaultDriverOption.AUTH_PROVIDER_PASSWORD, "cassandra") - .withClass(DefaultDriverOption.AUTH_PROVIDER_CLASS, DsePlainTextAuthProvider.class) + .withClass(DefaultDriverOption.AUTH_PROVIDER_CLASS, PlainTextAuthProvider.class) .build()); GraphNode gn = From 0527ceb49f9c37d9b089add12d997a3994c17e6d Mon Sep 17 00:00:00 2001 From: olim7t Date: Thu, 10 Oct 2019 08:59:51 -0700 Subject: [PATCH 214/979] Move all DSE functionality to CqlSession --- .../dse/driver/api/core/DseSession.java | 21 ++- .../driver/api/core/DseSessionBuilder.java | 7 +- .../api/core/DseSessionBuilderBase.java | 120 ++---------------- .../cql/continuous/ContinuousSession.java | 10 ++ .../reactive/ContinuousReactiveSession.java | 11 ++ .../api/core/cql/reactive/package-info.java | 22 ++++ .../dse/driver/api/core/graph/DseGraph.java | 6 +- .../DseGraphRemoteConnectionBuilder.java | 6 +- .../driver/api/core/graph/GraphSession.java | 12 +- .../driver/api/core/graph/GraphStatement.java | 6 +- .../session/DseProgrammaticArguments.java | 10 +- .../core/context/DseDriverContext.java | 101 +-------------- .../DefaultDseRemoteConnectionBuilder.java | 10 +- .../core/graph/DseGraphRemoteConnection.java | 10 +- .../core/insights/InsightsClient.java | 6 +- .../core/session/DefaultDseSession.java | 7 +- .../oss/driver/api/core/CqlSession.java | 14 +- .../oss/driver/api/core/session/Session.java | 12 +- .../core/context/DefaultDriverContext.java | 72 ++++++++++- .../session/RequestProcessorRegistry.java | 20 --- .../context/DseStartupOptionsBuilderTest.java | 13 +- .../ConfigAntiPatternsFinderTest.java | 15 +-- .../ExecutionProfilesInfoFinderTest.java | 18 +-- .../core/insights/InsightsClientTest.java | 56 ++++---- .../api/core/insights/InsightsClientIT.java | 6 +- 25 files changed, 251 insertions(+), 340 deletions(-) create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/cql/reactive/package-info.java diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/DseSession.java b/core/src/main/java/com/datastax/dse/driver/api/core/DseSession.java index 9dd24044f37..d66487eb41c 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/DseSession.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/DseSession.java @@ -15,25 +15,20 @@ */ package com.datastax.dse.driver.api.core; -import com.datastax.dse.driver.api.core.cql.continuous.ContinuousSession; -import com.datastax.dse.driver.api.core.cql.continuous.reactive.ContinuousReactiveSession; -import com.datastax.dse.driver.api.core.cql.reactive.ReactiveSession; -import com.datastax.dse.driver.api.core.graph.GraphSession; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.MavenCoordinates; import edu.umd.cs.findbugs.annotations.NonNull; -/** A custom session with DSE-specific capabilities. */ -public interface DseSession - extends CqlSession, - ContinuousSession, - GraphSession, - ReactiveSession, - ContinuousReactiveSession { +/** + * @deprecated All DSE functionality is now available directly on {@link CqlSession}. This type is + * preserved for backward compatibility, but you should now use {@link CqlSession} instead. + */ +@Deprecated +public interface DseSession extends CqlSession { /** - * @deprecated the DSE driver is now part of the DataStax Java driver for Apache Cassandra®. - * This field is preserved for backward compatibility, but it returns the same value as {@link + * @deprecated the DSE driver is no longer published as a separate artifact. This field is + * preserved for backward compatibility, but it returns the same value as {@link * CqlSession#OSS_DRIVER_COORDINATES}. */ @Deprecated @NonNull MavenCoordinates DSE_DRIVER_COORDINATES = CqlSession.OSS_DRIVER_COORDINATES; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/DseSessionBuilder.java b/core/src/main/java/com/datastax/dse/driver/api/core/DseSessionBuilder.java index 0501604de52..56cfb31d4ce 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/DseSessionBuilder.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/DseSessionBuilder.java @@ -21,11 +21,12 @@ import net.jcip.annotations.NotThreadSafe; /** - * Helper class to build a {@link DseSession} instance. - * - *

      This class is mutable and not thread-safe. + * @deprecated DSE functionality is now exposed directly on {@link CqlSession}. This class is + * preserved for backward compatibility, but {@link CqlSession#builder()} should be used + * instead. */ @NotThreadSafe +@Deprecated public class DseSessionBuilder extends DseSessionBuilderBase { @NonNull diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/DseSessionBuilderBase.java b/core/src/main/java/com/datastax/dse/driver/api/core/DseSessionBuilderBase.java index 6f218c40e41..26a6c04747f 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/DseSessionBuilderBase.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/DseSessionBuilderBase.java @@ -15,121 +15,17 @@ */ package com.datastax.dse.driver.api.core; -import com.datastax.dse.driver.api.core.session.DseProgrammaticArguments; -import com.datastax.dse.driver.internal.core.config.typesafe.DefaultDseDriverConfigLoader; -import com.datastax.dse.driver.internal.core.context.DseDriverContext; -import com.datastax.oss.driver.api.core.config.DriverConfigLoader; -import com.datastax.oss.driver.api.core.context.DriverContext; -import com.datastax.oss.driver.api.core.metadata.Node; -import com.datastax.oss.driver.api.core.metadata.NodeStateListener; -import com.datastax.oss.driver.api.core.metadata.schema.SchemaChangeListener; -import com.datastax.oss.driver.api.core.session.ProgrammaticArguments; +import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.session.SessionBuilder; -import com.datastax.oss.driver.api.core.tracker.RequestTracker; -import com.datastax.oss.driver.api.core.type.codec.TypeCodec; -import edu.umd.cs.findbugs.annotations.NonNull; -import edu.umd.cs.findbugs.annotations.Nullable; -import java.util.List; -import java.util.Map; -import java.util.function.Predicate; import net.jcip.annotations.NotThreadSafe; +/** + * @deprecated DSE functionality is now exposed directly on {@link CqlSession}. This class is + * preserved for backward compatibility, but {@link CqlSession#builder()} should be used + * instead. + */ @NotThreadSafe +@Deprecated public abstract class DseSessionBuilderBase< SelfT extends DseSessionBuilderBase, SessionT> - extends SessionBuilder { - - protected DseProgrammaticArguments.Builder dseProgrammaticArgumentsBuilder = - DseProgrammaticArguments.builder(); - - /** - * Sets the configuration loader to use. - * - *

      Note that this loader must produce a configuration that includes the DSE-specific options: - * if you're using one of the built-in implementations provided by the driver, use the static - * factory methods from {@link DriverConfigLoader}. - * - *

      If you don't call this method, the builder will use the default implementation, based on the - * Typesafe config library. More precisely, configuration properties are loaded and merged from - * the following (first-listed are higher priority): - * - *

        - *
      • system properties - *
      • {@code application.conf} (all resources on classpath with this name) - *
      • {@code application.json} (all resources on classpath with this name) - *
      • {@code application.properties} (all resources on classpath with this name) - *
      • {@code dse-reference.conf} (all resources on classpath with this name). In particular, - * this will load the {@code dse-reference.conf} included in the core DSE driver JAR, that - * defines default options for all DSE-specific mandatory options. - *
      • {@code reference.conf} (all resources on classpath with this name). In particular, this - * will load the {@code reference.conf} included in the core driver JAR, that defines - * default options for all mandatory options. - *
      - * - * The resulting configuration is expected to contain a {@code datastax-java-driver} section. - * - *

      This default loader will honor the reload interval defined by the option {@code - * basic.config-reload-interval}. - * - * @see Typesafe config's - * standard loading behavior - */ - @NonNull - @Override - public SelfT withConfigLoader(@Nullable DriverConfigLoader configLoader) { - // overridden only to customize the javadocs - return super.withConfigLoader(configLoader); - } - - @Override - protected DriverContext buildContext( - DriverConfigLoader configLoader, ProgrammaticArguments programmaticArguments) { - - // Preserve backward compatibility with the deprecated method: - @SuppressWarnings("deprecation") - DriverContext legacyApiContext = - buildContext( - configLoader, - programmaticArguments.getTypeCodecs(), - programmaticArguments.getNodeStateListener(), - programmaticArguments.getSchemaChangeListener(), - programmaticArguments.getRequestTracker(), - programmaticArguments.getLocalDatacenters(), - programmaticArguments.getNodeFilters(), - programmaticArguments.getClassLoader()); - if (legacyApiContext != null) { - return legacyApiContext; - } - - return new DseDriverContext( - configLoader, programmaticArguments, dseProgrammaticArgumentsBuilder.build()); - } - - @Deprecated - @Override - protected DriverContext buildContext( - DriverConfigLoader configLoader, - List> typeCodecs, - NodeStateListener nodeStateListener, - SchemaChangeListener schemaChangeListener, - RequestTracker requestTracker, - Map localDatacenters, - Map> nodeFilters, - ClassLoader classLoader) { - return super.buildContext( - configLoader, - typeCodecs, - nodeStateListener, - schemaChangeListener, - requestTracker, - localDatacenters, - nodeFilters, - classLoader); - } - - @NonNull - @Override - protected DriverConfigLoader defaultConfigLoader() { - return new DefaultDseDriverConfigLoader(); - } -} + extends SessionBuilder {} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousSession.java b/core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousSession.java index dfcabeb8a4f..2b1a91ebdd3 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousSession.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousSession.java @@ -78,6 +78,11 @@ public interface ContinuousSession extends Session { * using the {@linkplain #executeContinuouslyAsync(Statement) asynchronous variant} of this method * instead. * + *

      See {@link ContinuousSession} for more explanations about continuous paging. + * + *

      This feature is only available with Datastax Enterprise. Executing continuous queries + * against an Apache Cassandra© cluster will result in a runtime error. + * * @param statement the query to execute. * @return a synchronous iterable on the results. */ @@ -96,6 +101,11 @@ default ContinuousResultSet executeContinuously(@NonNull Statement statement) * speed, but it will give up if the client does not consume any pages in a period of time equal * to the read request timeout. * + *

      See {@link ContinuousSession} for more explanations about continuous paging. + * + *

      This feature is only available with Datastax Enterprise. Executing continuous queries + * against an Apache Cassandra© cluster will result in a runtime error. + * * @param statement the query to execute. * @return a future to the first asynchronous result. */ diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousReactiveSession.java b/core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousReactiveSession.java index 4718cef627a..9661f9bf5a1 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousReactiveSession.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousReactiveSession.java @@ -15,6 +15,7 @@ */ package com.datastax.dse.driver.api.core.cql.continuous.reactive; +import com.datastax.dse.driver.api.core.cql.continuous.ContinuousSession; import com.datastax.dse.driver.api.core.cql.reactive.ReactiveRow; import com.datastax.dse.driver.internal.core.cql.continuous.reactive.ContinuousCqlRequestReactiveProcessor; import com.datastax.oss.driver.api.core.cql.SimpleStatement; @@ -48,6 +49,11 @@ public interface ContinuousReactiveSession extends Session { * Returns a {@link Publisher} that, once subscribed to, executes the given query continuously and * emits all the results. * + *

      See {@link ContinuousSession} for more explanations about continuous paging. + * + *

      This feature is only available with Datastax Enterprise. Executing continuous queries + * against an Apache Cassandra® cluster will result in a runtime error. + * * @param query the query to execute. * @return The {@link Publisher} that will publish the returned results. */ @@ -60,6 +66,11 @@ default ContinuousReactiveResultSet executeContinuouslyReactive(@NonNull String * Returns a {@link Publisher} that, once subscribed to, executes the given query continuously and * emits all the results. * + *

      See {@link ContinuousSession} for more explanations about continuous paging. + * + *

      This feature is only available with Datastax Enterprise. Executing continuous queries + * against an Apache Cassandra® cluster will result in a runtime error. + * * @param statement the statement to execute. * @return The {@link Publisher} that will publish the returned results. */ diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/cql/reactive/package-info.java b/core/src/main/java/com/datastax/dse/driver/api/core/cql/reactive/package-info.java new file mode 100644 index 00000000000..3a138abd833 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/cql/reactive/package-info.java @@ -0,0 +1,22 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/** + * Types related to CQL execution using reactive-style programming. + * + *

      Note that this is located in a {@code dse} package for historical reasons; reactive queries + * can now be used with open-source Cassandra as well. + */ +package com.datastax.dse.driver.api.core.cql.reactive; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/graph/DseGraph.java b/core/src/main/java/com/datastax/dse/driver/api/core/graph/DseGraph.java index cb96526d0f1..44d04e6b05f 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/graph/DseGraph.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/graph/DseGraph.java @@ -15,8 +15,8 @@ */ package com.datastax.dse.driver.api.core.graph; -import com.datastax.dse.driver.api.core.DseSession; import com.datastax.dse.driver.internal.core.graph.DefaultDseRemoteConnectionBuilder; +import com.datastax.oss.driver.api.core.CqlSession; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; import org.apache.tinkerpop.gremlin.structure.util.empty.EmptyGraph; @@ -48,9 +48,9 @@ public class DseGraph { /** * Returns a builder helper class to help create {@link * org.apache.tinkerpop.gremlin.process.remote.RemoteConnection} implementations that seamlessly - * connect to DSE Graph using the {@link DseSession} in parameter. + * connect to DSE Graph using the {@link CqlSession} in parameter. */ - public static DseGraphRemoteConnectionBuilder remoteConnectionBuilder(DseSession dseSession) { + public static DseGraphRemoteConnectionBuilder remoteConnectionBuilder(CqlSession dseSession) { return new DefaultDseRemoteConnectionBuilder(dseSession); } diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/graph/DseGraphRemoteConnectionBuilder.java b/core/src/main/java/com/datastax/dse/driver/api/core/graph/DseGraphRemoteConnectionBuilder.java index fdc483325a7..fe2dca85819 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/graph/DseGraphRemoteConnectionBuilder.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/graph/DseGraphRemoteConnectionBuilder.java @@ -15,7 +15,7 @@ */ package com.datastax.dse.driver.api.core.graph; -import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import org.apache.tinkerpop.gremlin.process.remote.RemoteConnection; @@ -23,7 +23,7 @@ * A builder helper to create a {@link RemoteConnection} that will be used to build * implicitly-executing fluent traversals. * - *

      To create an instance of this, use the {@link DseGraph#remoteConnectionBuilder(DseSession)} + *

      To create an instance of this, use the {@link DseGraph#remoteConnectionBuilder(CqlSession)} * method: * *

      {@code
      @@ -32,7 +32,7 @@
        * List vertices = g.V().hasLabel("person").toList();
        * }
      * - * @see DseSession + * @see CqlSession */ public interface DseGraphRemoteConnectionBuilder { diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphSession.java b/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphSession.java index 5f30f4a9f88..0bd263e8f64 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphSession.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphSession.java @@ -15,7 +15,7 @@ */ package com.datastax.dse.driver.api.core.graph; -import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.session.Session; import edu.umd.cs.findbugs.annotations.NonNull; import java.util.Objects; @@ -25,9 +25,9 @@ * A session that has the ability to execute DSE Graph requests. * *

      Generally this interface won't be referenced directly in an application; instead, you should - * use {@link DseSession}, which is a combination of this interface and many others for a more + * use {@link CqlSession}, which is a combination of this interface and many others for a more * integrated usage of DataStax Enterprise's multi-model database via a single entry point. However, - * it is still possible to cast a {@code DseSession} to a {@code GraphSession} to only expose the + * it is still possible to cast a {@code CqlSession} to a {@code GraphSession} to only expose the * DSE Graph execution methods. */ public interface GraphSession extends Session { @@ -49,6 +49,9 @@ public interface GraphSession extends Session { * configuration and schema. * * + *

      This feature is only available with Datastax Enterprise. Executing graph queries against an + * Apache Cassandra® cluster will result in a runtime error. + * * @see GraphResultSet */ @NonNull @@ -62,6 +65,9 @@ default GraphResultSet execute(@NonNull GraphStatement graphStatement) { * Executes a graph statement asynchronously (the call returns as soon as the statement was sent, * generally before the result is available). * + *

      This feature is only available with Datastax Enterprise. Executing graph queries against an + * Apache Cassandra® cluster will result in a runtime error. + * * @see #execute(GraphStatement) * @see AsyncGraphResultSet */ diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphStatement.java b/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphStatement.java index a45b165ecd3..b7f4d13ac2f 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphStatement.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphStatement.java @@ -15,9 +15,9 @@ */ package com.datastax.dse.driver.api.core.graph; -import com.datastax.dse.driver.api.core.DseSession; import com.datastax.oss.driver.api.core.ConsistencyLevel; import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.NoNodeAvailableException; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.loadbalancing.LoadBalancingPolicy; @@ -48,7 +48,7 @@ public interface GraphStatement> extends Req * *

      Most users won't use this explicitly. It is needed for the generic execute method ({@link * Session#execute(Request, GenericType)}), but graph statements will generally be run with one of - * the DSE driver's built-in helper methods (such as {@link DseSession#execute(GraphStatement)}). + * the DSE driver's built-in helper methods (such as {@link CqlSession#execute(GraphStatement)}). */ GenericType SYNC = GenericType.of(GraphResultSet.class); @@ -58,7 +58,7 @@ public interface GraphStatement> extends Req *

      Most users won't use this explicitly. It is needed for the generic execute method ({@link * Session#execute(Request, GenericType)}), but graph statements will generally be run with one of * the DSE driver's built-in helper methods (such as {@link - * DseSession#executeAsync(GraphStatement)}). + * CqlSession#executeAsync(GraphStatement)}). */ GenericType> ASYNC = new GenericType>() {}; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/session/DseProgrammaticArguments.java b/core/src/main/java/com/datastax/dse/driver/api/core/session/DseProgrammaticArguments.java index 7083cdcc6e4..2e727e583c0 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/session/DseProgrammaticArguments.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/session/DseProgrammaticArguments.java @@ -15,17 +15,17 @@ */ package com.datastax.dse.driver.api.core.session; -import com.datastax.dse.driver.api.core.DseSessionBuilder; +import com.datastax.oss.driver.api.core.CqlSession; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import java.util.UUID; /** - * The DSE-specific arguments that can be set programmatically when building a session. - * - *

      This is mostly for internal use, you only need to deal with this directly if you write custom - * {@link DseSessionBuilder} subclasses. + * @deprecated All DSE functionality is now available directly on {@link CqlSession}. This type is + * preserved for backward compatibility, but clients should now build {@link CqlSession} + * instances instead of DSE sessions. */ +@Deprecated public class DseProgrammaticArguments { @NonNull diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseDriverContext.java b/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseDriverContext.java index 177ffde5043..d95d03f8b99 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseDriverContext.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseDriverContext.java @@ -16,13 +16,7 @@ package com.datastax.dse.driver.internal.core.context; import com.datastax.dse.driver.api.core.session.DseProgrammaticArguments; -import com.datastax.dse.driver.internal.core.cql.continuous.ContinuousCqlRequestAsyncProcessor; -import com.datastax.dse.driver.internal.core.cql.continuous.ContinuousCqlRequestSyncProcessor; -import com.datastax.dse.driver.internal.core.cql.continuous.reactive.ContinuousCqlRequestReactiveProcessor; -import com.datastax.dse.driver.internal.core.cql.reactive.CqlRequestReactiveProcessor; -import com.datastax.dse.driver.internal.core.graph.GraphRequestAsyncProcessor; -import com.datastax.dse.driver.internal.core.graph.GraphRequestSyncProcessor; -import com.datastax.dse.driver.internal.core.tracker.MultiplexingRequestTracker; +import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.metadata.NodeStateListener; @@ -31,28 +25,21 @@ import com.datastax.oss.driver.api.core.tracker.RequestTracker; import com.datastax.oss.driver.api.core.type.codec.TypeCodec; import com.datastax.oss.driver.internal.core.context.DefaultDriverContext; -import com.datastax.oss.driver.internal.core.cql.CqlPrepareAsyncProcessor; -import com.datastax.oss.driver.internal.core.cql.CqlPrepareSyncProcessor; -import com.datastax.oss.driver.internal.core.cql.CqlRequestAsyncProcessor; -import com.datastax.oss.driver.internal.core.cql.CqlRequestSyncProcessor; -import com.datastax.oss.driver.internal.core.session.RequestProcessor; -import com.datastax.oss.driver.internal.core.session.RequestProcessorRegistry; -import com.datastax.oss.driver.internal.core.util.Loggers; -import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.UUID; import java.util.function.Predicate; import net.jcip.annotations.ThreadSafe; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -/** Extends the default driver context to plug-in DSE-specific implementations. */ +/** + * @deprecated All DSE functionality is now available directly on {@link CqlSession}. This type is + * preserved for backward compatibility, but clients should now build {@link CqlSession} + * instances instead of DSE sessions. + */ @ThreadSafe +@Deprecated public class DseDriverContext extends DefaultDriverContext { - private static final Logger LOG = LoggerFactory.getLogger(DseDriverContext.class); - public DseDriverContext( DriverConfigLoader configLoader, ProgrammaticArguments programmaticArguments, @@ -93,78 +80,4 @@ public DseDriverContext( .withStartupApplicationVersion(applicationVersion) .build()); } - - @Override - protected RequestProcessorRegistry buildRequestProcessorRegistry() { - String logPrefix = getSessionName(); - - List> processors = new ArrayList<>(); - - // regular requests (sync and async) - CqlRequestAsyncProcessor cqlRequestAsyncProcessor = new CqlRequestAsyncProcessor(); - CqlRequestSyncProcessor cqlRequestSyncProcessor = - new CqlRequestSyncProcessor(cqlRequestAsyncProcessor); - processors.add(cqlRequestAsyncProcessor); - processors.add(cqlRequestSyncProcessor); - - // prepare requests (sync and async) - CqlPrepareAsyncProcessor cqlPrepareAsyncProcessor = new CqlPrepareAsyncProcessor(); - CqlPrepareSyncProcessor cqlPrepareSyncProcessor = - new CqlPrepareSyncProcessor(cqlPrepareAsyncProcessor); - processors.add(cqlPrepareAsyncProcessor); - processors.add(cqlPrepareSyncProcessor); - - // continuous requests (sync and async) - ContinuousCqlRequestAsyncProcessor continuousCqlRequestAsyncProcessor = - new ContinuousCqlRequestAsyncProcessor(); - ContinuousCqlRequestSyncProcessor continuousCqlRequestSyncProcessor = - new ContinuousCqlRequestSyncProcessor(continuousCqlRequestAsyncProcessor); - processors.add(continuousCqlRequestAsyncProcessor); - processors.add(continuousCqlRequestSyncProcessor); - - // graph requests (sync and async) - try { - Class.forName("org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal"); - GraphRequestAsyncProcessor graphRequestAsyncProcessor = new GraphRequestAsyncProcessor(); - GraphRequestSyncProcessor graphRequestSyncProcessor = - new GraphRequestSyncProcessor(graphRequestAsyncProcessor); - processors.add(graphRequestAsyncProcessor); - processors.add(graphRequestSyncProcessor); - } catch (ClassNotFoundException | LinkageError error) { - Loggers.warnWithException( - LOG, - "Could not register Graph extensions; Tinkerpop API might be missing from classpath", - error); - } - - // reactive requests (regular and continuous) - try { - Class.forName("org.reactivestreams.Publisher"); - CqlRequestReactiveProcessor cqlRequestReactiveProcessor = - new CqlRequestReactiveProcessor(cqlRequestAsyncProcessor); - ContinuousCqlRequestReactiveProcessor continuousCqlRequestReactiveProcessor = - new ContinuousCqlRequestReactiveProcessor(continuousCqlRequestAsyncProcessor); - processors.add(cqlRequestReactiveProcessor); - processors.add(continuousCqlRequestReactiveProcessor); - } catch (ClassNotFoundException | LinkageError error) { - Loggers.warnWithException( - LOG, - "Could not register Reactive extensions; Reactive Streams API might be missing from classpath", - error); - } - - return new RequestProcessorRegistry(logPrefix, processors.toArray(new RequestProcessor[0])); - } - - @Override - protected RequestTracker buildRequestTracker(RequestTracker requestTrackerFromBuilder) { - RequestTracker requestTrackerFromConfig = super.buildRequestTracker(requestTrackerFromBuilder); - if (requestTrackerFromConfig instanceof MultiplexingRequestTracker) { - return requestTrackerFromConfig; - } else { - MultiplexingRequestTracker multiplexingRequestTracker = new MultiplexingRequestTracker(); - multiplexingRequestTracker.register(requestTrackerFromConfig); - return multiplexingRequestTracker; - } - } } diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultDseRemoteConnectionBuilder.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultDseRemoteConnectionBuilder.java index 3d607db5269..9aa984dbc56 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultDseRemoteConnectionBuilder.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultDseRemoteConnectionBuilder.java @@ -15,8 +15,8 @@ */ package com.datastax.dse.driver.internal.core.graph; -import com.datastax.dse.driver.api.core.DseSession; import com.datastax.dse.driver.api.core.graph.DseGraphRemoteConnectionBuilder; +import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import net.jcip.annotations.NotThreadSafe; import org.apache.tinkerpop.gremlin.process.remote.RemoteConnection; @@ -24,17 +24,17 @@ @NotThreadSafe public class DefaultDseRemoteConnectionBuilder implements DseGraphRemoteConnectionBuilder { - private final DseSession dseSession; + private final CqlSession session; private DriverExecutionProfile executionProfile; private String executionProfileName; - public DefaultDseRemoteConnectionBuilder(DseSession dseSession) { - this.dseSession = dseSession; + public DefaultDseRemoteConnectionBuilder(CqlSession session) { + this.session = session; } @Override public RemoteConnection build() { - return new DseGraphRemoteConnection(dseSession, executionProfile, executionProfileName); + return new DseGraphRemoteConnection(session, executionProfile, executionProfileName); } @Override diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DseGraphRemoteConnection.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DseGraphRemoteConnection.java index b9525eb3596..6a2e2e67beb 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DseGraphRemoteConnection.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DseGraphRemoteConnection.java @@ -15,7 +15,7 @@ */ package com.datastax.dse.driver.internal.core.graph; -import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import java.util.Iterator; import java.util.concurrent.CompletableFuture; @@ -31,13 +31,13 @@ @Immutable public class DseGraphRemoteConnection implements RemoteConnection { - private final DseSession dseSession; + private final CqlSession session; private final DriverExecutionProfile executionProfile; private final String executionProfileName; public DseGraphRemoteConnection( - DseSession dseSession, DriverExecutionProfile executionProfile, String executionProfileName) { - this.dseSession = dseSession; + CqlSession session, DriverExecutionProfile executionProfile, String executionProfileName) { + this.session = session; this.executionProfile = executionProfile; this.executionProfileName = executionProfileName; } @@ -62,7 +62,7 @@ public RemoteTraversal submit(Bytecode bytecode) throws RemoteConnecti @Override public CompletableFuture> submitAsync(Bytecode bytecode) throws RemoteConnectionException { - return dseSession + return session .executeAsync(new BytecodeGraphStatement(bytecode, executionProfile, executionProfileName)) .toCompletableFuture() .thenApply(DseGraphTraversal::new); diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/InsightsClient.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/InsightsClient.java index 9ef176ffc6d..047e30d8d51 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/InsightsClient.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/InsightsClient.java @@ -24,8 +24,6 @@ import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.SSL_HOSTNAME_VALIDATION; import com.datastax.dse.driver.api.core.DseProtocolVersion; -import com.datastax.dse.driver.api.core.DseSessionBuilder; -import com.datastax.dse.driver.internal.core.context.DseDriverContext; import com.datastax.dse.driver.internal.core.insights.PackageUtil.ClassSettingDetails; import com.datastax.dse.driver.internal.core.insights.configuration.InsightsConfiguration; import com.datastax.dse.driver.internal.core.insights.exceptions.InsightEventFormatException; @@ -44,6 +42,7 @@ import com.datastax.oss.driver.api.core.type.DataTypes; import com.datastax.oss.driver.api.core.type.codec.TypeCodec; import com.datastax.oss.driver.internal.core.adminrequest.AdminRequestHandler; +import com.datastax.oss.driver.internal.core.context.DefaultDriverContext; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.internal.core.context.StartupOptionsBuilder; import com.datastax.oss.driver.internal.core.control.ControlConnection; @@ -418,8 +417,7 @@ static String getClusterCreateCaller(StackTraceElement[] stackTrace) { } private static boolean isClusterStackTrace(StackTraceElement stackTraceElement) { - return stackTraceElement.getClassName().equals(DseDriverContext.class.getName()) - || stackTraceElement.getClassName().equals(DseSessionBuilder.class.getName()) + return stackTraceElement.getClassName().equals(DefaultDriverContext.class.getName()) || stackTraceElement.getClassName().equals(SessionBuilder.class.getName()); } diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/session/DefaultDseSession.java b/core/src/main/java/com/datastax/dse/driver/internal/core/session/DefaultDseSession.java index 2ec1ca0ad6e..bd269020ff6 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/session/DefaultDseSession.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/session/DefaultDseSession.java @@ -16,15 +16,18 @@ package com.datastax.dse.driver.internal.core.session; import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.session.Session; +import com.datastax.oss.driver.internal.core.session.DefaultSession; import com.datastax.oss.driver.internal.core.session.SessionWrapper; import net.jcip.annotations.ThreadSafe; /** - * Implementation note: metadata methods perform unchecked casts, relying on the fact that the - * metadata manager returns the appropriate runtime type. + * @deprecated DSE functionality is now exposed directly on {@link CqlSession}. This class is + * preserved for backward compatibility, but {@link DefaultSession} should be used instead. */ @ThreadSafe +@Deprecated public class DefaultDseSession extends SessionWrapper implements DseSession { public DefaultDseSession(Session delegate) { diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/CqlSession.java b/core/src/main/java/com/datastax/oss/driver/api/core/CqlSession.java index 8af93ca6ddd..49a65b4e812 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/CqlSession.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/CqlSession.java @@ -15,6 +15,10 @@ */ package com.datastax.oss.driver.api.core; +import com.datastax.dse.driver.api.core.cql.continuous.ContinuousSession; +import com.datastax.dse.driver.api.core.cql.continuous.reactive.ContinuousReactiveSession; +import com.datastax.dse.driver.api.core.cql.reactive.ReactiveSession; +import com.datastax.dse.driver.api.core.graph.GraphSession; import com.datastax.oss.driver.api.core.cql.AsyncResultSet; import com.datastax.oss.driver.api.core.cql.PrepareRequest; import com.datastax.oss.driver.api.core.cql.PreparedStatement; @@ -28,8 +32,14 @@ import java.util.Objects; import java.util.concurrent.CompletionStage; -/** A specialized session with convenience methods to execute CQL statements. */ -public interface CqlSession extends Session { +/** + * The default session type built by the driver. + * + *

      It provides friendlier execution methods for the request types most commonly used with Apache + * Cassandra® and Datastax Enterprise. + */ +public interface CqlSession + extends Session, ReactiveSession, ContinuousSession, GraphSession, ContinuousReactiveSession { /** * Returns a builder to create a new instance. diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/session/Session.java b/core/src/main/java/com/datastax/oss/driver/api/core/session/Session.java index 498cea42935..b57545cbc3d 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/session/Session.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/session/Session.java @@ -38,15 +38,13 @@ /** * A nexus to send requests to a Cassandra cluster. * - *

      This is a high-level abstraction capable of handling arbitrary request and result types. For - * CQL statements, {@link CqlSession} provides convenience methods with more familiar signatures (by - * default, all the instances returned by the driver also implement {@link CqlSession}). + *

      This is a high-level abstraction capable of handling arbitrary request and result types. The + * driver's built-in {@link CqlSession} is a more convenient subtype for most client applications. * *

      The driver's request execution logic is pluggable (see {@code RequestProcessor} in the - * internal API). This is intended for future extensions, for example a reactive API for CQL - * statements, or graph requests in the Datastax Enterprise driver. Hence the generic {@link - * #execute(Request, GenericType)} method in this interface, that makes no assumptions about the - * request or result type. + * internal API) to allow custom extensions. Hence the generic {@link #execute(Request, + * GenericType)} method in this interface, that makes no assumptions about the request or result + * type. * * @see CqlSession#builder() */ diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java index ac139f415c0..aa057fd49d4 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java @@ -18,6 +18,12 @@ import com.datastax.dse.driver.api.core.config.DseDriverOption; import com.datastax.dse.driver.api.core.type.codec.DseTypeCodecs; import com.datastax.dse.driver.internal.core.InsightsClientLifecycleListener; +import com.datastax.dse.driver.internal.core.cql.continuous.ContinuousCqlRequestAsyncProcessor; +import com.datastax.dse.driver.internal.core.cql.continuous.ContinuousCqlRequestSyncProcessor; +import com.datastax.dse.driver.internal.core.cql.continuous.reactive.ContinuousCqlRequestReactiveProcessor; +import com.datastax.dse.driver.internal.core.cql.reactive.CqlRequestReactiveProcessor; +import com.datastax.dse.driver.internal.core.graph.GraphRequestAsyncProcessor; +import com.datastax.dse.driver.internal.core.graph.GraphRequestSyncProcessor; import com.datastax.dse.driver.internal.core.tracker.MultiplexingRequestTracker; import com.datastax.dse.protocol.internal.DseProtocolV1ClientCodecs; import com.datastax.dse.protocol.internal.DseProtocolV2ClientCodecs; @@ -52,6 +58,10 @@ import com.datastax.oss.driver.internal.core.channel.DefaultWriteCoalescer; import com.datastax.oss.driver.internal.core.channel.WriteCoalescer; import com.datastax.oss.driver.internal.core.control.ControlConnection; +import com.datastax.oss.driver.internal.core.cql.CqlPrepareAsyncProcessor; +import com.datastax.oss.driver.internal.core.cql.CqlPrepareSyncProcessor; +import com.datastax.oss.driver.internal.core.cql.CqlRequestAsyncProcessor; +import com.datastax.oss.driver.internal.core.cql.CqlRequestSyncProcessor; import com.datastax.oss.driver.internal.core.metadata.CloudTopologyMonitor; import com.datastax.oss.driver.internal.core.metadata.DefaultTopologyMonitor; import com.datastax.oss.driver.internal.core.metadata.LoadBalancingPolicyWrapper; @@ -74,6 +84,7 @@ import com.datastax.oss.driver.internal.core.servererrors.DefaultWriteTypeRegistry; import com.datastax.oss.driver.internal.core.servererrors.WriteTypeRegistry; import com.datastax.oss.driver.internal.core.session.PoolManager; +import com.datastax.oss.driver.internal.core.session.RequestProcessor; import com.datastax.oss.driver.internal.core.session.RequestProcessorRegistry; import com.datastax.oss.driver.internal.core.ssl.JdkSslHandlerFactory; import com.datastax.oss.driver.internal.core.ssl.SslHandlerFactory; @@ -92,6 +103,7 @@ import edu.umd.cs.findbugs.annotations.Nullable; import io.netty.buffer.ByteBuf; import java.net.InetSocketAddress; +import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; @@ -485,7 +497,65 @@ protected ControlConnection buildControlConnection() { } protected RequestProcessorRegistry buildRequestProcessorRegistry() { - return RequestProcessorRegistry.defaultCqlProcessors(getSessionName()); + String logPrefix = getSessionName(); + + List> processors = new ArrayList<>(); + + // regular requests (sync and async) + CqlRequestAsyncProcessor cqlRequestAsyncProcessor = new CqlRequestAsyncProcessor(); + CqlRequestSyncProcessor cqlRequestSyncProcessor = + new CqlRequestSyncProcessor(cqlRequestAsyncProcessor); + processors.add(cqlRequestAsyncProcessor); + processors.add(cqlRequestSyncProcessor); + + // prepare requests (sync and async) + CqlPrepareAsyncProcessor cqlPrepareAsyncProcessor = new CqlPrepareAsyncProcessor(); + CqlPrepareSyncProcessor cqlPrepareSyncProcessor = + new CqlPrepareSyncProcessor(cqlPrepareAsyncProcessor); + processors.add(cqlPrepareAsyncProcessor); + processors.add(cqlPrepareSyncProcessor); + + // continuous requests (sync and async) + ContinuousCqlRequestAsyncProcessor continuousCqlRequestAsyncProcessor = + new ContinuousCqlRequestAsyncProcessor(); + ContinuousCqlRequestSyncProcessor continuousCqlRequestSyncProcessor = + new ContinuousCqlRequestSyncProcessor(continuousCqlRequestAsyncProcessor); + processors.add(continuousCqlRequestAsyncProcessor); + processors.add(continuousCqlRequestSyncProcessor); + + // graph requests (sync and async) + try { + Class.forName("org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal"); + GraphRequestAsyncProcessor graphRequestAsyncProcessor = new GraphRequestAsyncProcessor(); + GraphRequestSyncProcessor graphRequestSyncProcessor = + new GraphRequestSyncProcessor(graphRequestAsyncProcessor); + processors.add(graphRequestAsyncProcessor); + processors.add(graphRequestSyncProcessor); + } catch (ClassNotFoundException | LinkageError error) { + Loggers.warnWithException( + LOG, + "Could not register Graph extensions; Tinkerpop API might be missing from classpath", + error); + } + + // reactive requests (regular and continuous) + try { + Class.forName("org.reactivestreams.Publisher"); + CqlRequestReactiveProcessor cqlRequestReactiveProcessor = + new CqlRequestReactiveProcessor(cqlRequestAsyncProcessor); + ContinuousCqlRequestReactiveProcessor continuousCqlRequestReactiveProcessor = + new ContinuousCqlRequestReactiveProcessor(continuousCqlRequestAsyncProcessor); + processors.add(cqlRequestReactiveProcessor); + processors.add(continuousCqlRequestReactiveProcessor); + } catch (ClassNotFoundException | LinkageError error) { + Loggers.warnWithException( + LOG, + "Could not register Reactive extensions; " + + "Reactive Streams API might be missing from classpath", + error); + } + + return new RequestProcessorRegistry(logPrefix, processors.toArray(new RequestProcessor[0])); } protected CodecRegistry buildCodecRegistry(String logPrefix, List> codecs) { diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/session/RequestProcessorRegistry.java b/core/src/main/java/com/datastax/oss/driver/internal/core/session/RequestProcessorRegistry.java index aca57fda97f..30810164854 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/session/RequestProcessorRegistry.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/session/RequestProcessorRegistry.java @@ -17,10 +17,6 @@ import com.datastax.oss.driver.api.core.session.Request; import com.datastax.oss.driver.api.core.type.reflect.GenericType; -import com.datastax.oss.driver.internal.core.cql.CqlPrepareAsyncProcessor; -import com.datastax.oss.driver.internal.core.cql.CqlPrepareSyncProcessor; -import com.datastax.oss.driver.internal.core.cql.CqlRequestAsyncProcessor; -import com.datastax.oss.driver.internal.core.cql.CqlRequestSyncProcessor; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import net.jcip.annotations.ThreadSafe; import org.slf4j.Logger; @@ -31,22 +27,6 @@ public class RequestProcessorRegistry { private static final Logger LOG = LoggerFactory.getLogger(RequestProcessorRegistry.class); - public static RequestProcessorRegistry defaultCqlProcessors(String logPrefix) { - CqlRequestAsyncProcessor requestAsyncProcessor = new CqlRequestAsyncProcessor(); - CqlRequestSyncProcessor requestSyncProcessor = - new CqlRequestSyncProcessor(requestAsyncProcessor); - CqlPrepareAsyncProcessor prepareAsyncProcessor = new CqlPrepareAsyncProcessor(); - CqlPrepareSyncProcessor prepareSyncProcessor = - new CqlPrepareSyncProcessor(prepareAsyncProcessor); - - return new RequestProcessorRegistry( - logPrefix, - requestAsyncProcessor, - requestSyncProcessor, - prepareAsyncProcessor, - prepareSyncProcessor); - } - private final String logPrefix; // Effectively immutable: the contents are never modified after construction private final RequestProcessor[] processors; diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/context/DseStartupOptionsBuilderTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/context/DseStartupOptionsBuilderTest.java index 8743bd219da..0d255488c3d 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/context/DseStartupOptionsBuilderTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/context/DseStartupOptionsBuilderTest.java @@ -15,14 +15,12 @@ */ package com.datastax.dse.driver.internal.core.context; -import static com.datastax.dse.driver.api.core.DseSession.DSE_DRIVER_COORDINATES; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException; import static org.mockito.Mockito.when; import static org.mockito.MockitoAnnotations.initMocks; import com.datastax.dse.driver.api.core.config.DseDriverOption; -import com.datastax.dse.driver.api.core.session.DseProgrammaticArguments; import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverConfig; @@ -31,6 +29,7 @@ import com.datastax.oss.driver.api.core.session.ProgrammaticArguments; import com.datastax.oss.driver.api.core.session.Session; import com.datastax.oss.driver.api.core.uuid.Uuids; +import com.datastax.oss.driver.internal.core.context.DefaultDriverContext; import com.datastax.oss.driver.internal.core.context.StartupOptionsBuilder; import com.datastax.oss.protocol.internal.request.Startup; import com.tngtech.java.junit.dataprovider.DataProvider; @@ -44,7 +43,7 @@ @RunWith(DataProviderRunner.class) public class DseStartupOptionsBuilderTest { - private DseDriverContext driverContext; + private DefaultDriverContext driverContext; // Mocks for instantiating the DSE driver context @Mock private DriverConfigLoader configLoader; @@ -61,10 +60,9 @@ public void before() { private void buildContext(UUID clientId, String applicationName, String applicationVersion) { this.driverContext = - new DseDriverContext( + new DefaultDriverContext( configLoader, - ProgrammaticArguments.builder().build(), - DseProgrammaticArguments.builder() + ProgrammaticArguments.builder() .withStartupClientId(clientId) .withStartupApplicationName(applicationName) .withStartupApplicationVersion(applicationVersion) @@ -74,7 +72,8 @@ private void buildContext(UUID clientId, String applicationName, String applicat private void assertDefaultStartupOptions(Startup startup) { assertThat(startup.options).containsEntry(Startup.CQL_VERSION_KEY, "3.0.0"); assertThat(startup.options) - .containsEntry(StartupOptionsBuilder.DRIVER_NAME_KEY, DSE_DRIVER_COORDINATES.getName()); + .containsEntry( + StartupOptionsBuilder.DRIVER_NAME_KEY, Session.OSS_DRIVER_COORDINATES.getName()); assertThat(startup.options).containsKey(StartupOptionsBuilder.DRIVER_VERSION_KEY); Version version = Version.parse(startup.options.get(StartupOptionsBuilder.DRIVER_VERSION_KEY)); assertThat(version).isEqualTo(Session.OSS_DRIVER_COORDINATES.getVersion()); diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/insights/ConfigAntiPatternsFinderTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/ConfigAntiPatternsFinderTest.java index ef40856e2fc..22ffa73c0c7 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/insights/ConfigAntiPatternsFinderTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/ConfigAntiPatternsFinderTest.java @@ -21,9 +21,9 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; -import com.datastax.dse.driver.internal.core.context.DseDriverContext; import com.datastax.oss.driver.api.core.config.DriverConfig; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import com.tngtech.java.junit.dataprovider.DataProvider; import com.tngtech.java.junit.dataprovider.DataProviderRunner; @@ -48,27 +48,26 @@ public void should_find_ssl_anti_pattern( boolean hostnameValidation, Map expected) { // given - DseDriverContext dseDriverContext = + InternalDriverContext context = mockDefaultProfile(sslEngineFactoryClassDefined, hostnameValidation); // when - Map antiPatterns = - new ConfigAntiPatternsFinder().findAntiPatterns(dseDriverContext); + Map antiPatterns = new ConfigAntiPatternsFinder().findAntiPatterns(context); // then assertThat(antiPatterns).isEqualTo(expected); } - private DseDriverContext mockDefaultProfile( + private InternalDriverContext mockDefaultProfile( boolean sslEngineFactoryClassDefined, boolean hostnameValidation) { - DseDriverContext dseDriverContext = mock(DseDriverContext.class); + InternalDriverContext context = mock(InternalDriverContext.class); DriverConfig driverConfig = mock(DriverConfig.class); - when(dseDriverContext.getConfig()).thenReturn(driverConfig); + when(context.getConfig()).thenReturn(driverConfig); DriverExecutionProfile profile = mock(DriverExecutionProfile.class); when(profile.isDefined(SSL_ENGINE_FACTORY_CLASS)).thenReturn(sslEngineFactoryClassDefined); when(profile.getBoolean(SSL_HOSTNAME_VALIDATION, false)).thenReturn(hostnameValidation); when(driverConfig.getDefaultProfile()).thenReturn(profile); - return dseDriverContext; + return context; } @DataProvider diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/insights/ExecutionProfilesInfoFinderTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/ExecutionProfilesInfoFinderTest.java index 5bf10cd3466..d119ea5b655 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/insights/ExecutionProfilesInfoFinderTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/ExecutionProfilesInfoFinderTest.java @@ -32,12 +32,12 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; -import com.datastax.dse.driver.internal.core.context.DseDriverContext; import com.datastax.dse.driver.internal.core.insights.schema.LoadBalancingInfo; import com.datastax.dse.driver.internal.core.insights.schema.SpecificExecutionProfile; import com.datastax.dse.driver.internal.core.insights.schema.SpeculativeExecutionInfo; import com.datastax.oss.driver.api.core.config.DriverConfig; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; @@ -59,12 +59,12 @@ public void should_include_info_about_default_profile() { Map profiles = ImmutableMap.of("default", defaultExecutionProfile); - DseDriverContext dseDriverContext = + InternalDriverContext context = mockDriverContextWithProfiles(defaultExecutionProfile, profiles); // when Map executionProfilesInfo = - new ExecutionProfilesInfoFinder().getExecutionProfilesInfo(dseDriverContext); + new ExecutionProfilesInfoFinder().getExecutionProfilesInfo(context); // then assertThat(executionProfilesInfo) @@ -96,11 +96,11 @@ public void should_include_info_about_default_profile_and_only_difference_for_sp Map profiles = ImmutableMap.of( "default", defaultExecutionProfile, "non-default", nonDefaultExecutionProfile); - DseDriverContext dseDriverContext = + InternalDriverContext context = mockDriverContextWithProfiles(defaultExecutionProfile, profiles); // when Map executionProfilesInfo = - new ExecutionProfilesInfoFinder().getExecutionProfilesInfo(dseDriverContext); + new ExecutionProfilesInfoFinder().getExecutionProfilesInfo(context); // then assertThat(executionProfilesInfo) @@ -219,15 +219,15 @@ public void should_include_empty_execution_profile_if_has_all_nulls() assertThat(result).isEqualTo("{\"p\":{}}"); } - private DseDriverContext mockDriverContextWithProfiles( + private InternalDriverContext mockDriverContextWithProfiles( DriverExecutionProfile defaultExecutionProfile, Map profiles) { - DseDriverContext dseDriverContext = mock(DseDriverContext.class); + InternalDriverContext context = mock(InternalDriverContext.class); DriverConfig driverConfig = mock(DriverConfig.class); Mockito.>when(driverConfig.getProfiles()) .thenReturn(profiles); when(driverConfig.getDefaultProfile()).thenReturn(defaultExecutionProfile); - when(dseDriverContext.getConfig()).thenReturn(driverConfig); - return dseDriverContext; + when(context.getConfig()).thenReturn(driverConfig); + return context; } } diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/insights/InsightsClientTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/InsightsClientTest.java index af00d203e07..9fdaba2e991 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/insights/InsightsClientTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/InsightsClientTest.java @@ -28,7 +28,6 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; -import com.datastax.dse.driver.internal.core.context.DseDriverContext; import com.datastax.dse.driver.internal.core.insights.configuration.InsightsConfiguration; import com.datastax.dse.driver.internal.core.insights.schema.AuthProviderType; import com.datastax.dse.driver.internal.core.insights.schema.Insight; @@ -52,6 +51,7 @@ import com.datastax.oss.driver.api.core.metadata.EndPoint; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.internal.core.channel.DriverChannel; +import com.datastax.oss.driver.internal.core.context.DefaultDriverContext; import com.datastax.oss.driver.internal.core.context.StartupOptionsBuilder; import com.datastax.oss.driver.internal.core.control.ControlConnection; import com.datastax.oss.driver.internal.core.metadata.DefaultNode; @@ -94,7 +94,7 @@ public class InsightsClientTest { @Test public void should_construct_json_event_startup_message() throws IOException { // given - DseDriverContext dseDriverContext = mockDseDriverContext(); + DefaultDriverContext DefaultDriverContext = mockDefaultDriverContext(); PlatformInfoFinder platformInfoFinder = mock(PlatformInfoFinder.class); OS os = new OS("linux", "1.2", "x64"); CPUS cpus = new CPUS(8, "intel i7"); @@ -106,14 +106,14 @@ public void should_construct_json_event_startup_message() throws IOException { when(platformInfoFinder.getInsightsPlatformInfo()).thenReturn(insightsPlatformInfo); ConfigAntiPatternsFinder configAntiPatternsFinder = mock(ConfigAntiPatternsFinder.class); - when(configAntiPatternsFinder.findAntiPatterns(any(DseDriverContext.class))) + when(configAntiPatternsFinder.findAntiPatterns(any(DefaultDriverContext.class))) .thenReturn( ImmutableMap.of( "contactPointsMultipleDCs", "Contact points contain hosts from multiple data centers")); DataCentersFinder dataCentersFinder = mock(DataCentersFinder.class); - when(dataCentersFinder.getDataCenters(any(DseDriverContext.class))) + when(dataCentersFinder.getDataCenters(any(DefaultDriverContext.class))) .thenReturn(Sets.newHashSet("dc1", "dc2")); ReconnectionPolicyInfoFinder reconnectionPolicyInfoFinder = mock(ReconnectionPolicyInfoFinder.class); @@ -124,7 +124,7 @@ public void should_construct_json_event_startup_message() throws IOException { InsightsClient insightsClient = new InsightsClient( - dseDriverContext, + DefaultDriverContext, MOCK_TIME_SUPPLIER, INSIGHTS_CONFIGURATION, platformInfoFinder, @@ -235,7 +235,7 @@ public void should_construct_json_event_status_message() throws IOException { // given InsightsClient insightsClient = new InsightsClient( - mockDseDriverContext(), + mockDefaultDriverContext(), MOCK_TIME_SUPPLIER, INSIGHTS_CONFIGURATION, null, @@ -295,9 +295,9 @@ public static Object[][] stackTraceProvider() { StackTraceElement[] onlyInitCall = new StackTraceElement[] { new StackTraceElement( - "com.datastax.dse.driver.internal.core.context.DseDriverContext", + "com.datastax.oss.driver.internal.core.context.DefaultDriverContext", "", - "DseDriverContext.java", + "DefaultDriverContext.java", 94), }; @@ -314,18 +314,18 @@ public static Object[][] stackTraceProvider() { new StackTraceElement[] { new StackTraceElement("java.lang.Thread", "getStackTrace", "Thread.java", 1559), new StackTraceElement( - "com.datastax.dse.driver.internal.core.context.DseDriverContext", + "com.datastax.oss.driver.internal.core.context.DefaultDriverContext", "", - "DseDriverContext.java", + "DefaultDriverContext.java", 94), }; StackTraceElement[] stackTraceWithOneInitCallAndCaller = new StackTraceElement[] { new StackTraceElement("java.lang.Thread", "getStackTrace", "Thread.java", 1559), new StackTraceElement( - "com.datastax.dse.driver.internal.core.context.DseDriverContext", + "com.datastax.oss.driver.internal.core.context.DefaultDriverContext", "", - "DseDriverContext.java", + "DefaultDriverContext.java", 94), new StackTraceElement( "com.example.ActualCallerNameApp", "main", "ActualCallerNameApp.java", 1) @@ -335,9 +335,9 @@ public static Object[][] stackTraceProvider() { new StackTraceElement[] { new StackTraceElement("java.lang.Thread", "getStackTrace", "Thread.java", 1559), new StackTraceElement( - "com.datastax.dse.driver.internal.core.context.DseDriverContext", + "com.datastax.oss.driver.internal.core.context.DefaultDriverContext", "", - "DseDriverContext.java", + "DefaultDriverContext.java", 94), new StackTraceElement( "com.datastax.oss.driver.api.core.session.SessionBuilder", @@ -350,9 +350,9 @@ public static Object[][] stackTraceProvider() { StackTraceElement[] stackTraceWithChainOfInitCalls = new StackTraceElement[] { new StackTraceElement( - "com.datastax.dse.driver.internal.core.context.DseDriverContext", + "com.datastax.oss.driver.internal.core.context.DefaultDriverContext", "", - "DseDriverContext.java", + "DefaultDriverContext.java", 94), new StackTraceElement( "com.datastax.dse.driver.api.core.DseSessionBuilder", @@ -379,9 +379,9 @@ public static Object[][] stackTraceProvider() { new StackTraceElement[] { new StackTraceElement("java.lang.Thread", "getStackTrace", "Thread.java", 1559), new StackTraceElement( - "com.datastax.dse.driver.internal.core.context.DseDriverContext", + "com.datastax.oss.driver.internal.core.context.DefaultDriverContext", "", - "DseDriverContext.java", + "DefaultDriverContext.java", 94), new StackTraceElement( "com.datastax.dse.driver.api.core.DseSessionBuilder", @@ -419,11 +419,11 @@ public static Object[][] stackTraceProvider() { }; } - private DseDriverContext mockDseDriverContext() throws UnknownHostException { - DseDriverContext dseDriverContext = mock(DseDriverContext.class); - mockConnectionPools(dseDriverContext); + private DefaultDriverContext mockDefaultDriverContext() throws UnknownHostException { + DefaultDriverContext DefaultDriverContext = mock(DefaultDriverContext.class); + mockConnectionPools(DefaultDriverContext); MetadataManager manager = mock(MetadataManager.class); - when(dseDriverContext.getMetadataManager()).thenReturn(manager); + when(DefaultDriverContext.getMetadataManager()).thenReturn(manager); DriverExecutionProfile defaultExecutionProfile = mockDefaultExecutionProfile(); DriverExecutionProfile nonDefaultExecutionProfile = mockNonDefaultRequestTimeoutExecutionProfile(); @@ -435,8 +435,8 @@ private DseDriverContext mockDseDriverContext() throws UnknownHostException { startupOptions.put(StartupOptionsBuilder.DRIVER_VERSION_KEY, "2.x"); startupOptions.put(StartupOptionsBuilder.DRIVER_NAME_KEY, "DataStax Enterprise Java Driver"); - when(dseDriverContext.getStartupOptions()).thenReturn(startupOptions); - when(dseDriverContext.getProtocolVersion()).thenReturn(DSE_V2); + when(DefaultDriverContext.getStartupOptions()).thenReturn(startupOptions); + when(DefaultDriverContext.getProtocolVersion()).thenReturn(DSE_V2); DefaultNode contactPoint = mock(DefaultNode.class); EndPoint contactEndPoint = mock(EndPoint.class); when(contactEndPoint.resolve()).thenReturn(new InetSocketAddress("127.0.0.1", 9999)); @@ -444,7 +444,7 @@ private DseDriverContext mockDseDriverContext() throws UnknownHostException { when(manager.getContactPoints()).thenReturn(ImmutableSet.of(contactPoint)); DriverConfig driverConfig = mock(DriverConfig.class); - when(dseDriverContext.getConfig()).thenReturn(driverConfig); + when(DefaultDriverContext.getConfig()).thenReturn(driverConfig); Map profiles = ImmutableMap.of( "default", defaultExecutionProfile, "non-default", nonDefaultExecutionProfile); @@ -460,11 +460,11 @@ private DseDriverContext mockDseDriverContext() throws UnknownHostException { when(channel.getEndPoint()).thenReturn(controlConnectionEndpoint); when(channel.localAddress()).thenReturn(new InetSocketAddress("127.0.0.1", 10)); when(controlConnection.channel()).thenReturn(channel); - when(dseDriverContext.getControlConnection()).thenReturn(controlConnection); - return dseDriverContext; + when(DefaultDriverContext.getControlConnection()).thenReturn(controlConnection); + return DefaultDriverContext; } - private void mockConnectionPools(DseDriverContext driverContext) { + private void mockConnectionPools(DefaultDriverContext driverContext) { Node node1 = mock(Node.class); EndPoint endPoint1 = mock(EndPoint.class); when(endPoint1.resolve()).thenReturn(new InetSocketAddress("127.0.0.1", 10)); diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/insights/InsightsClientIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/insights/InsightsClientIT.java index 4eb90d6dbd0..c7a131eb5af 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/insights/InsightsClientIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/insights/InsightsClientIT.java @@ -17,12 +17,12 @@ import com.datastax.dse.driver.api.core.DseSession; import com.datastax.dse.driver.api.testinfra.session.DseSessionRuleBuilder; -import com.datastax.dse.driver.internal.core.context.DseDriverContext; import com.datastax.dse.driver.internal.core.insights.InsightsClient; import com.datastax.dse.driver.internal.core.insights.configuration.InsightsConfiguration; import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import io.netty.util.concurrent.DefaultEventExecutor; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; @@ -55,7 +55,7 @@ public void should_send_insights_startup_event_using_client() InsightsClient insightsClient = InsightsClient.createInsightsClient( new InsightsConfiguration(true, 300000L, new DefaultEventExecutor()), - (DseDriverContext) sessionRule.session().getContext(), + (InternalDriverContext) sessionRule.session().getContext(), EMPTY_STACK_TRACE); // when @@ -71,7 +71,7 @@ public void should_send_insights_status_event_using_client() InsightsClient insightsClient = InsightsClient.createInsightsClient( new InsightsConfiguration(true, 300000L, new DefaultEventExecutor()), - (DseDriverContext) sessionRule.session().getContext(), + (InternalDriverContext) sessionRule.session().getContext(), EMPTY_STACK_TRACE); // when From 24adef39ab90e71b021d5307574f9dd8126cced0 Mon Sep 17 00:00:00 2001 From: olim7t Date: Thu, 14 Nov 2019 14:41:33 -0800 Subject: [PATCH 215/979] Extract legacy CQL methods to separate interfaces --- .../oss/driver/api/core/CqlSession.java | 230 ++---------------- .../driver/api/core/cql/AsyncCqlSession.java | 100 ++++++++ .../driver/api/core/cql/SyncCqlSession.java | 153 ++++++++++++ 3 files changed, 277 insertions(+), 206 deletions(-) create mode 100644 core/src/main/java/com/datastax/oss/driver/api/core/cql/AsyncCqlSession.java create mode 100644 core/src/main/java/com/datastax/oss/driver/api/core/cql/SyncCqlSession.java diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/CqlSession.java b/core/src/main/java/com/datastax/oss/driver/api/core/CqlSession.java index 49a65b4e812..17be89b7a52 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/CqlSession.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/CqlSession.java @@ -19,27 +19,38 @@ import com.datastax.dse.driver.api.core.cql.continuous.reactive.ContinuousReactiveSession; import com.datastax.dse.driver.api.core.cql.reactive.ReactiveSession; import com.datastax.dse.driver.api.core.graph.GraphSession; -import com.datastax.oss.driver.api.core.cql.AsyncResultSet; -import com.datastax.oss.driver.api.core.cql.PrepareRequest; -import com.datastax.oss.driver.api.core.cql.PreparedStatement; -import com.datastax.oss.driver.api.core.cql.ResultSet; -import com.datastax.oss.driver.api.core.cql.SimpleStatement; -import com.datastax.oss.driver.api.core.cql.Statement; -import com.datastax.oss.driver.api.core.session.Request; +import com.datastax.oss.driver.api.core.cql.AsyncCqlSession; +import com.datastax.oss.driver.api.core.cql.SyncCqlSession; import com.datastax.oss.driver.api.core.session.Session; -import com.datastax.oss.driver.internal.core.cql.DefaultPrepareRequest; import edu.umd.cs.findbugs.annotations.NonNull; -import java.util.Objects; -import java.util.concurrent.CompletionStage; /** * The default session type built by the driver. * - *

      It provides friendlier execution methods for the request types most commonly used with Apache - * Cassandra® and Datastax Enterprise. + *

      It provides user-friendly execution methods for: + * + *

        + *
      • CQL requests: synchronous, asynchronous or reactive mode; + *
      • requests specific to Datastax Enterprise: graph and continuous paging. + *
      + * + * Client applications can use this interface even if they don't need all the features. In + * particular, it can be used with a regular Apache Cassandra ® cluster, as long as you don't + * call any of the DSE-specific execute methods. If you're in that situation, you might also want to + * exclude certain dependencies from your classpath (see the "Integration" page in the user manual). + * + *

      Note that the name "CQL session" is no longer really accurate since this interface can now + * execute other request types; but it was preserved for backward compatibility with previous driver + * versions. */ public interface CqlSession - extends Session, ReactiveSession, ContinuousSession, GraphSession, ContinuousReactiveSession { + extends Session, + SyncCqlSession, + AsyncCqlSession, + ReactiveSession, + ContinuousSession, + GraphSession, + ContinuousReactiveSession { /** * Returns a builder to create a new instance. @@ -50,197 +61,4 @@ public interface CqlSession static CqlSessionBuilder builder() { return new CqlSessionBuilder(); } - - /** - * Executes a CQL statement synchronously (the calling thread blocks until the result becomes - * available). - */ - @NonNull - default ResultSet execute(@NonNull Statement statement) { - return Objects.requireNonNull( - execute(statement, Statement.SYNC), "The CQL processor should never return a null result"); - } - - /** - * Executes a CQL statement synchronously (the calling thread blocks until the result becomes - * available). - */ - @NonNull - default ResultSet execute(@NonNull String query) { - return execute(SimpleStatement.newInstance(query)); - } - - /** - * Executes a CQL statement asynchronously (the call returns as soon as the statement was sent, - * generally before the result is available). - */ - @NonNull - default CompletionStage executeAsync(@NonNull Statement statement) { - return Objects.requireNonNull( - execute(statement, Statement.ASYNC), "The CQL processor should never return a null result"); - } - - /** - * Executes a CQL statement asynchronously (the call returns as soon as the statement was sent, - * generally before the result is available). - */ - @NonNull - default CompletionStage executeAsync(@NonNull String query) { - return executeAsync(SimpleStatement.newInstance(query)); - } - - /** - * Prepares a CQL statement synchronously (the calling thread blocks until the statement is - * prepared). - * - *

      Note that the bound statements created from the resulting prepared statement will inherit - * some of the attributes of the provided simple statement. That is, given: - * - *

      {@code
      -   * SimpleStatement simpleStatement = SimpleStatement.newInstance("...");
      -   * PreparedStatement preparedStatement = session.prepare(simpleStatement);
      -   * BoundStatement boundStatement = preparedStatement.bind();
      -   * }
      - * - * Then: - * - *
        - *
      • the following methods return the same value as their counterpart on {@code - * simpleStatement}: - *
          - *
        • {@link Request#getExecutionProfileName() boundStatement.getExecutionProfileName()} - *
        • {@link Request#getExecutionProfile() boundStatement.getExecutionProfile()} - *
        • {@link Statement#getPagingState() boundStatement.getPagingState()} - *
        • {@link Request#getRoutingKey() boundStatement.getRoutingKey()} - *
        • {@link Request#getRoutingToken() boundStatement.getRoutingToken()} - *
        • {@link Request#getCustomPayload() boundStatement.getCustomPayload()} - *
        • {@link Request#isIdempotent() boundStatement.isIdempotent()} - *
        • {@link Request#getTimeout() boundStatement.getTimeout()} - *
        • {@link Statement#getPagingState() boundStatement.getPagingState()} - *
        • {@link Statement#getPageSize() boundStatement.getPageSize()} - *
        • {@link Statement#getConsistencyLevel() boundStatement.getConsistencyLevel()} - *
        • {@link Statement#getSerialConsistencyLevel() - * boundStatement.getSerialConsistencyLevel()} - *
        • {@link Statement#isTracing() boundStatement.isTracing()} - *
        - *
      • {@link Request#getRoutingKeyspace() boundStatement.getRoutingKeyspace()} is set from - * either {@link Request#getKeyspace() simpleStatement.getKeyspace()} (if it's not {@code - * null}), or {@code simpleStatement.getRoutingKeyspace()}; - *
      • on the other hand, the following attributes are not propagated: - *
          - *
        • {@link Statement#getQueryTimestamp() boundStatement.getQueryTimestamp()} will be - * set to {@link Long#MIN_VALUE}, meaning that the value will be assigned by the - * session's timestamp generator. - *
        • {@link Statement#getNode() boundStatement.getNode()} will always be {@code null}. - *
        - *
      - * - * If you want to customize this behavior, you can write your own implementation of {@link - * PrepareRequest} and pass it to {@link #prepare(PrepareRequest)}. - * - *

      The result of this method is cached: if you call it twice with the same {@link - * SimpleStatement}, you will get the same {@link PreparedStatement} instance. We still recommend - * keeping a reference to it (for example by caching it as a field in a DAO); if that's not - * possible (e.g. if query strings are generated dynamically), it's OK to call this method every - * time: there will just be a small performance overhead to check the internal cache. Note that - * caching is based on: - * - *

        - *
      • the query string exactly as you provided it: the driver does not perform any kind of - * trimming or sanitizing. - *
      • all other execution parameters: for example, preparing two statements with identical - * query strings but different {@linkplain SimpleStatement#getConsistencyLevel() consistency - * levels} will yield distinct prepared statements. - *
      - */ - @NonNull - default PreparedStatement prepare(@NonNull SimpleStatement statement) { - return Objects.requireNonNull( - execute(new DefaultPrepareRequest(statement), PrepareRequest.SYNC), - "The CQL prepare processor should never return a null result"); - } - - /** - * Prepares a CQL statement synchronously (the calling thread blocks until the statement is - * prepared). - * - *

      The result of this method is cached (see {@link #prepare(SimpleStatement)} for more - * explanations). - */ - @NonNull - default PreparedStatement prepare(@NonNull String query) { - return Objects.requireNonNull( - execute(new DefaultPrepareRequest(query), PrepareRequest.SYNC), - "The CQL prepare processor should never return a null result"); - } - - /** - * Prepares a CQL statement synchronously (the calling thread blocks until the statement is - * prepared). - * - *

      This variant is exposed in case you use an ad hoc {@link PrepareRequest} implementation to - * customize how attributes are propagated when you prepare a {@link SimpleStatement} (see {@link - * #prepare(SimpleStatement)} for more explanations). Otherwise, you should rarely have to deal - * with {@link PrepareRequest} directly. - * - *

      The result of this method is cached (see {@link #prepare(SimpleStatement)} for more - * explanations). - */ - @NonNull - default PreparedStatement prepare(@NonNull PrepareRequest request) { - return Objects.requireNonNull( - execute(request, PrepareRequest.SYNC), - "The CQL prepare processor should never return a null result"); - } - - /** - * Prepares a CQL statement asynchronously (the call returns as soon as the prepare query was - * sent, generally before the statement is prepared). - * - *

      Note that the bound statements created from the resulting prepared statement will inherit - * some of the attributes of {@code query}; see {@link #prepare(SimpleStatement)} for more - * details. - * - *

      The result of this method is cached (see {@link #prepare(SimpleStatement)} for more - * explanations). - */ - @NonNull - default CompletionStage prepareAsync(@NonNull SimpleStatement statement) { - return Objects.requireNonNull( - execute(new DefaultPrepareRequest(statement), PrepareRequest.ASYNC), - "The CQL prepare processor should never return a null result"); - } - - /** - * Prepares a CQL statement asynchronously (the call returns as soon as the prepare query was - * sent, generally before the statement is prepared). - * - *

      The result of this method is cached (see {@link #prepare(SimpleStatement)} for more - * explanations). - */ - @NonNull - default CompletionStage prepareAsync(@NonNull String query) { - return Objects.requireNonNull( - execute(new DefaultPrepareRequest(query), PrepareRequest.ASYNC), - "The CQL prepare processor should never return a null result"); - } - - /** - * Prepares a CQL statement asynchronously (the call returns as soon as the prepare query was - * sent, generally before the statement is prepared). - * - *

      This variant is exposed in case you use an ad hoc {@link PrepareRequest} implementation to - * customize how attributes are propagated when you prepare a {@link SimpleStatement} (see {@link - * #prepare(SimpleStatement)} for more explanations). Otherwise, you should rarely have to deal - * with {@link PrepareRequest} directly. - * - *

      The result of this method is cached (see {@link #prepare(SimpleStatement)} for more - * explanations). - */ - @NonNull - default CompletionStage prepareAsync(PrepareRequest request) { - return Objects.requireNonNull( - execute(request, PrepareRequest.ASYNC), - "The CQL prepare processor should never return a null result"); - } } diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/cql/AsyncCqlSession.java b/core/src/main/java/com/datastax/oss/driver/api/core/cql/AsyncCqlSession.java new file mode 100644 index 00000000000..2e430414a72 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/api/core/cql/AsyncCqlSession.java @@ -0,0 +1,100 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.core.cql; + +import com.datastax.oss.driver.api.core.session.Session; +import com.datastax.oss.driver.internal.core.cql.DefaultPrepareRequest; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Objects; +import java.util.concurrent.CompletionStage; + +/** + * A session that offers user-friendly methods to execute CQL requests asynchronously. + * + * @since 4.4.0 + */ +public interface AsyncCqlSession extends Session { + + /** + * Executes a CQL statement asynchronously (the call returns as soon as the statement was sent, + * generally before the result is available). + */ + @NonNull + default CompletionStage executeAsync(@NonNull Statement statement) { + return Objects.requireNonNull( + execute(statement, Statement.ASYNC), "The CQL processor should never return a null result"); + } + + /** + * Executes a CQL statement asynchronously (the call returns as soon as the statement was sent, + * generally before the result is available). + */ + @NonNull + default CompletionStage executeAsync(@NonNull String query) { + return executeAsync(SimpleStatement.newInstance(query)); + } + + /** + * Prepares a CQL statement asynchronously (the call returns as soon as the prepare query was + * sent, generally before the statement is prepared). + * + *

      Note that the bound statements created from the resulting prepared statement will inherit + * some of the attributes of {@code query}; see {@link SyncCqlSession#prepare(SimpleStatement)} + * for more details. + * + *

      The result of this method is cached (see {@link SyncCqlSession#prepare(SimpleStatement)} for + * more explanations). + */ + @NonNull + default CompletionStage prepareAsync(@NonNull SimpleStatement statement) { + return Objects.requireNonNull( + execute(new DefaultPrepareRequest(statement), PrepareRequest.ASYNC), + "The CQL prepare processor should never return a null result"); + } + + /** + * Prepares a CQL statement asynchronously (the call returns as soon as the prepare query was + * sent, generally before the statement is prepared). + * + *

      The result of this method is cached (see {@link SyncCqlSession#prepare(SimpleStatement)} for + * more explanations). + */ + @NonNull + default CompletionStage prepareAsync(@NonNull String query) { + return Objects.requireNonNull( + execute(new DefaultPrepareRequest(query), PrepareRequest.ASYNC), + "The CQL prepare processor should never return a null result"); + } + + /** + * Prepares a CQL statement asynchronously (the call returns as soon as the prepare query was + * sent, generally before the statement is prepared). + * + *

      This variant is exposed in case you use an ad hoc {@link PrepareRequest} implementation to + * customize how attributes are propagated when you prepare a {@link SimpleStatement} (see {@link + * SyncCqlSession#prepare(SimpleStatement)} for more explanations). Otherwise, you should rarely + * have to deal with {@link PrepareRequest} directly. + * + *

      The result of this method is cached (see {@link SyncCqlSession#prepare(SimpleStatement)} for + * more explanations). + */ + @NonNull + default CompletionStage prepareAsync(PrepareRequest request) { + return Objects.requireNonNull( + execute(request, PrepareRequest.ASYNC), + "The CQL prepare processor should never return a null result"); + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/cql/SyncCqlSession.java b/core/src/main/java/com/datastax/oss/driver/api/core/cql/SyncCqlSession.java new file mode 100644 index 00000000000..f9c0deff0f5 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/api/core/cql/SyncCqlSession.java @@ -0,0 +1,153 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.core.cql; + +import com.datastax.oss.driver.api.core.session.Request; +import com.datastax.oss.driver.api.core.session.Session; +import com.datastax.oss.driver.internal.core.cql.DefaultPrepareRequest; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Objects; + +/** + * A session that offers user-friendly methods to execute CQL requests synchronously. + * + * @since 4.4.0 + */ +public interface SyncCqlSession extends Session { + + /** + * Executes a CQL statement synchronously (the calling thread blocks until the result becomes + * available). + */ + @NonNull + default ResultSet execute(@NonNull Statement statement) { + return Objects.requireNonNull( + execute(statement, Statement.SYNC), "The CQL processor should never return a null result"); + } + + /** + * Executes a CQL statement synchronously (the calling thread blocks until the result becomes + * available). + */ + @NonNull + default ResultSet execute(@NonNull String query) { + return execute(SimpleStatement.newInstance(query)); + } + + /** + * Prepares a CQL statement synchronously (the calling thread blocks until the statement is + * prepared). + * + *

      Note that the bound statements created from the resulting prepared statement will inherit + * some of the attributes of the provided simple statement. That is, given: + * + *

      {@code
      +   * SimpleStatement simpleStatement = SimpleStatement.newInstance("...");
      +   * PreparedStatement preparedStatement = session.prepare(simpleStatement);
      +   * BoundStatement boundStatement = preparedStatement.bind();
      +   * }
      + * + * Then: + * + *
        + *
      • the following methods return the same value as their counterpart on {@code + * simpleStatement}: + *
          + *
        • {@link Request#getExecutionProfileName() boundStatement.getExecutionProfileName()} + *
        • {@link Request#getExecutionProfile() boundStatement.getExecutionProfile()} + *
        • {@link Statement#getPagingState() boundStatement.getPagingState()} + *
        • {@link Request#getRoutingKey() boundStatement.getRoutingKey()} + *
        • {@link Request#getRoutingToken() boundStatement.getRoutingToken()} + *
        • {@link Request#getCustomPayload() boundStatement.getCustomPayload()} + *
        • {@link Request#isIdempotent() boundStatement.isIdempotent()} + *
        • {@link Request#getTimeout() boundStatement.getTimeout()} + *
        • {@link Statement#getPagingState() boundStatement.getPagingState()} + *
        • {@link Statement#getPageSize() boundStatement.getPageSize()} + *
        • {@link Statement#getConsistencyLevel() boundStatement.getConsistencyLevel()} + *
        • {@link Statement#getSerialConsistencyLevel() + * boundStatement.getSerialConsistencyLevel()} + *
        • {@link Statement#isTracing() boundStatement.isTracing()} + *
        + *
      • {@link Request#getRoutingKeyspace() boundStatement.getRoutingKeyspace()} is set from + * either {@link Request#getKeyspace() simpleStatement.getKeyspace()} (if it's not {@code + * null}), or {@code simpleStatement.getRoutingKeyspace()}; + *
      • on the other hand, the following attributes are not propagated: + *
          + *
        • {@link Statement#getQueryTimestamp() boundStatement.getQueryTimestamp()} will be + * set to {@link Long#MIN_VALUE}, meaning that the value will be assigned by the + * session's timestamp generator. + *
        • {@link Statement#getNode() boundStatement.getNode()} will always be {@code null}. + *
        + *
      + * + * If you want to customize this behavior, you can write your own implementation of {@link + * PrepareRequest} and pass it to {@link #prepare(PrepareRequest)}. + * + *

      The result of this method is cached: if you call it twice with the same {@link + * SimpleStatement}, you will get the same {@link PreparedStatement} instance. We still recommend + * keeping a reference to it (for example by caching it as a field in a DAO); if that's not + * possible (e.g. if query strings are generated dynamically), it's OK to call this method every + * time: there will just be a small performance overhead to check the internal cache. Note that + * caching is based on: + * + *

        + *
      • the query string exactly as you provided it: the driver does not perform any kind of + * trimming or sanitizing. + *
      • all other execution parameters: for example, preparing two statements with identical + * query strings but different {@linkplain SimpleStatement#getConsistencyLevel() consistency + * levels} will yield distinct prepared statements. + *
      + */ + @NonNull + default PreparedStatement prepare(@NonNull SimpleStatement statement) { + return Objects.requireNonNull( + execute(new DefaultPrepareRequest(statement), PrepareRequest.SYNC), + "The CQL prepare processor should never return a null result"); + } + + /** + * Prepares a CQL statement synchronously (the calling thread blocks until the statement is + * prepared). + * + *

      The result of this method is cached (see {@link #prepare(SimpleStatement)} for more + * explanations). + */ + @NonNull + default PreparedStatement prepare(@NonNull String query) { + return Objects.requireNonNull( + execute(new DefaultPrepareRequest(query), PrepareRequest.SYNC), + "The CQL prepare processor should never return a null result"); + } + + /** + * Prepares a CQL statement synchronously (the calling thread blocks until the statement is + * prepared). + * + *

      This variant is exposed in case you use an ad hoc {@link PrepareRequest} implementation to + * customize how attributes are propagated when you prepare a {@link SimpleStatement} (see {@link + * #prepare(SimpleStatement)} for more explanations). Otherwise, you should rarely have to deal + * with {@link PrepareRequest} directly. + * + *

      The result of this method is cached (see {@link #prepare(SimpleStatement)} for more + * explanations). + */ + @NonNull + default PreparedStatement prepare(@NonNull PrepareRequest request) { + return Objects.requireNonNull( + execute(request, PrepareRequest.SYNC), + "The CQL prepare processor should never return a null result"); + } +} From aa511253cbdfae872820ae96ad98affba5afa3c2 Mon Sep 17 00:00:00 2001 From: olim7t Date: Thu, 7 Nov 2019 11:14:04 -0800 Subject: [PATCH 216/979] Remove non-user-facing DSE session types --- .../driver/api/core/DseSessionBuilder.java | 3 +- .../api/core/DseSessionBuilderBase.java | 31 ------ .../session/DseProgrammaticArguments.java | 94 ------------------- .../core/context/DseDriverContext.java | 83 ---------------- 4 files changed, 2 insertions(+), 209 deletions(-) delete mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/DseSessionBuilderBase.java delete mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/session/DseProgrammaticArguments.java delete mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/context/DseDriverContext.java diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/DseSessionBuilder.java b/core/src/main/java/com/datastax/dse/driver/api/core/DseSessionBuilder.java index 56cfb31d4ce..263ebbf6a98 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/DseSessionBuilder.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/DseSessionBuilder.java @@ -17,6 +17,7 @@ import com.datastax.dse.driver.internal.core.session.DefaultDseSession; import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.session.SessionBuilder; import edu.umd.cs.findbugs.annotations.NonNull; import net.jcip.annotations.NotThreadSafe; @@ -27,7 +28,7 @@ */ @NotThreadSafe @Deprecated -public class DseSessionBuilder extends DseSessionBuilderBase { +public class DseSessionBuilder extends SessionBuilder { @NonNull @Override diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/DseSessionBuilderBase.java b/core/src/main/java/com/datastax/dse/driver/api/core/DseSessionBuilderBase.java deleted file mode 100644 index 26a6c04747f..00000000000 --- a/core/src/main/java/com/datastax/dse/driver/api/core/DseSessionBuilderBase.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.datastax.dse.driver.api.core; - -import com.datastax.oss.driver.api.core.CqlSession; -import com.datastax.oss.driver.api.core.session.SessionBuilder; -import net.jcip.annotations.NotThreadSafe; - -/** - * @deprecated DSE functionality is now exposed directly on {@link CqlSession}. This class is - * preserved for backward compatibility, but {@link CqlSession#builder()} should be used - * instead. - */ -@NotThreadSafe -@Deprecated -public abstract class DseSessionBuilderBase< - SelfT extends DseSessionBuilderBase, SessionT> - extends SessionBuilder {} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/session/DseProgrammaticArguments.java b/core/src/main/java/com/datastax/dse/driver/api/core/session/DseProgrammaticArguments.java deleted file mode 100644 index 2e727e583c0..00000000000 --- a/core/src/main/java/com/datastax/dse/driver/api/core/session/DseProgrammaticArguments.java +++ /dev/null @@ -1,94 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.datastax.dse.driver.api.core.session; - -import com.datastax.oss.driver.api.core.CqlSession; -import edu.umd.cs.findbugs.annotations.NonNull; -import edu.umd.cs.findbugs.annotations.Nullable; -import java.util.UUID; - -/** - * @deprecated All DSE functionality is now available directly on {@link CqlSession}. This type is - * preserved for backward compatibility, but clients should now build {@link CqlSession} - * instances instead of DSE sessions. - */ -@Deprecated -public class DseProgrammaticArguments { - - @NonNull - public static Builder builder() { - return new Builder(); - } - - private final UUID startupClientId; - private final String startupApplicationName; - private final String startupApplicationVersion; - - private DseProgrammaticArguments( - @Nullable UUID startupClientId, - @Nullable String startupApplicationName, - @Nullable String startupApplicationVersion) { - this.startupClientId = startupClientId; - this.startupApplicationName = startupApplicationName; - this.startupApplicationVersion = startupApplicationVersion; - } - - @Nullable - public UUID getStartupClientId() { - return startupClientId; - } - - @Nullable - public String getStartupApplicationName() { - return startupApplicationName; - } - - @Nullable - public String getStartupApplicationVersion() { - return startupApplicationVersion; - } - - public static class Builder { - - private UUID startupClientId; - private String startupApplicationName; - private String startupApplicationVersion; - - @NonNull - public Builder withStartupClientId(@Nullable UUID startupClientId) { - this.startupClientId = startupClientId; - return this; - } - - @NonNull - public Builder withStartupApplicationName(@Nullable String startupApplicationName) { - this.startupApplicationName = startupApplicationName; - return this; - } - - @NonNull - public Builder withStartupApplicationVersion(@Nullable String startupApplicationVersion) { - this.startupApplicationVersion = startupApplicationVersion; - return this; - } - - @NonNull - public DseProgrammaticArguments build() { - return new DseProgrammaticArguments( - startupClientId, startupApplicationName, startupApplicationVersion); - } - } -} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseDriverContext.java b/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseDriverContext.java deleted file mode 100644 index d95d03f8b99..00000000000 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/context/DseDriverContext.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.datastax.dse.driver.internal.core.context; - -import com.datastax.dse.driver.api.core.session.DseProgrammaticArguments; -import com.datastax.oss.driver.api.core.CqlSession; -import com.datastax.oss.driver.api.core.config.DriverConfigLoader; -import com.datastax.oss.driver.api.core.metadata.Node; -import com.datastax.oss.driver.api.core.metadata.NodeStateListener; -import com.datastax.oss.driver.api.core.metadata.schema.SchemaChangeListener; -import com.datastax.oss.driver.api.core.session.ProgrammaticArguments; -import com.datastax.oss.driver.api.core.tracker.RequestTracker; -import com.datastax.oss.driver.api.core.type.codec.TypeCodec; -import com.datastax.oss.driver.internal.core.context.DefaultDriverContext; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import java.util.function.Predicate; -import net.jcip.annotations.ThreadSafe; - -/** - * @deprecated All DSE functionality is now available directly on {@link CqlSession}. This type is - * preserved for backward compatibility, but clients should now build {@link CqlSession} - * instances instead of DSE sessions. - */ -@ThreadSafe -@Deprecated -public class DseDriverContext extends DefaultDriverContext { - - public DseDriverContext( - DriverConfigLoader configLoader, - ProgrammaticArguments programmaticArguments, - DseProgrammaticArguments dseProgrammaticArguments) { - super(configLoader, programmaticArguments); - } - /** - * @deprecated this constructor only exists for backward compatibility. Please use {@link - * #DseDriverContext(DriverConfigLoader, ProgrammaticArguments, DseProgrammaticArguments)} - * instead. - */ - public DseDriverContext( - DriverConfigLoader configLoader, - List> typeCodecs, - NodeStateListener nodeStateListener, - SchemaChangeListener schemaChangeListener, - RequestTracker requestTracker, - Map localDatacenters, - Map> nodeFilters, - ClassLoader classLoader, - UUID clientId, - String applicationName, - String applicationVersion) { - this( - configLoader, - ProgrammaticArguments.builder() - .addTypeCodecs(typeCodecs.toArray(new TypeCodec[0])) - .withNodeStateListener(nodeStateListener) - .withSchemaChangeListener(schemaChangeListener) - .withRequestTracker(requestTracker) - .withLocalDatacenters(localDatacenters) - .withNodeFilters(nodeFilters) - .withClassLoader(classLoader) - .build(), - DseProgrammaticArguments.builder() - .withStartupClientId(clientId) - .withStartupApplicationName(applicationName) - .withStartupApplicationVersion(applicationVersion) - .build()); - } -} From 29af6e13b6bd8bda18b6404d4d46a290b15fee90 Mon Sep 17 00:00:00 2001 From: olim7t Date: Wed, 16 Oct 2019 20:17:26 -0700 Subject: [PATCH 217/979] Add reactive support to the OSS mapper Adapted from the DSE mapper in riptano/java-dse-driver@191f065ff. --- .../queries/Cassandra3SchemaQueriesTest.java | 2 +- .../oss/driver/mapper/DeleteReactiveIT.java | 181 +++++++++++++ .../oss/driver/mapper/InsertReactiveIT.java | 136 ++++++++++ .../oss/driver/mapper/QueryReactiveIT.java | 156 +++++++++++ .../oss/driver/mapper/SelectReactiveIT.java | 110 ++++++++ .../oss/driver/mapper/UpdateReactiveIT.java | 167 ++++++++++++ .../dao/DaoDeleteMethodGenerator.java | 4 +- .../dao/DaoInsertMethodGenerator.java | 4 +- .../mapper/processor/dao/DaoReturnType.java | 2 + .../dao/DaoSelectMethodGenerator.java | 4 +- .../dao/DaoUpdateMethodGenerator.java | 4 +- .../dao/DefaultDaoReturnTypeKind.java | 185 ++++++++++--- .../dao/DefaultDaoReturnTypeParser.java | 4 + .../dao/DaoDeleteMethodGeneratorTest.java | 2 +- .../dao/DaoImplementationGeneratorTest.java | 166 ++++++++++++ .../dao/DaoInsertMethodGeneratorTest.java | 4 +- .../dao/DaoQueryMethodGeneratorTest.java | 2 +- .../dao/DaoSelectMethodGeneratorTest.java | 4 +- .../dao/DaoUpdateMethodGeneratorTest.java | 3 +- mapper-runtime/pom.xml | 54 ++++ .../reactive/MappedReactiveResultSet.java | 45 ++++ .../DefaultMappedReactiveResultSet.java | 245 ++++++++++++++++++ .../FailedMappedReactiveResultSet.java | 53 ++++ .../oss/driver/internal/mapper/DaoBase.java | 13 + .../MappedReactiveResultSetTckTest.java | 86 ++++++ .../mapper/reactive/MockAsyncResultSet.java | 97 +++++++ .../driver/api/mapper/reactive/MockRow.java | 124 +++++++++ .../api/mapper/reactive/TestSubscriber.java | 73 ++++++ 28 files changed, 1881 insertions(+), 49 deletions(-) create mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/mapper/DeleteReactiveIT.java create mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/mapper/InsertReactiveIT.java create mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/mapper/QueryReactiveIT.java create mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/mapper/SelectReactiveIT.java create mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateReactiveIT.java create mode 100644 mapper-runtime/src/main/java/com/datastax/dse/driver/api/mapper/reactive/MappedReactiveResultSet.java create mode 100644 mapper-runtime/src/main/java/com/datastax/dse/driver/internal/mapper/reactive/DefaultMappedReactiveResultSet.java create mode 100644 mapper-runtime/src/main/java/com/datastax/dse/driver/internal/mapper/reactive/FailedMappedReactiveResultSet.java create mode 100644 mapper-runtime/src/test/java/com/datastax/dse/driver/api/mapper/reactive/MappedReactiveResultSetTckTest.java create mode 100644 mapper-runtime/src/test/java/com/datastax/dse/driver/api/mapper/reactive/MockAsyncResultSet.java create mode 100644 mapper-runtime/src/test/java/com/datastax/dse/driver/api/mapper/reactive/MockRow.java create mode 100644 mapper-runtime/src/test/java/com/datastax/dse/driver/api/mapper/reactive/TestSubscriber.java diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra3SchemaQueriesTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra3SchemaQueriesTest.java index 88b45481747..766ac9d572e 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra3SchemaQueriesTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra3SchemaQueriesTest.java @@ -329,7 +329,7 @@ public void should_ignore_malformed_rows() { @Test public void should_abort_if_query_fails() { SchemaQueriesWithMockedChannel queries = - new SchemaQueriesWithMockedChannel(driverChannel, null, config, "test"); + new SchemaQueriesWithMockedChannel(driverChannel, node, null, config, "test"); CompletionStage result = queries.execute(); Exception mockQueryError = new Exception("mock query error"); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/DeleteReactiveIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/DeleteReactiveIT.java new file mode 100644 index 00000000000..f9af1f0074d --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/DeleteReactiveIT.java @@ -0,0 +1,181 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.mapper; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.dse.driver.api.core.cql.reactive.ReactiveResultSet; +import com.datastax.dse.driver.api.core.cql.reactive.ReactiveRow; +import com.datastax.dse.driver.api.mapper.reactive.MappedReactiveResultSet; +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.mapper.annotations.Dao; +import com.datastax.oss.driver.api.mapper.annotations.DaoFactory; +import com.datastax.oss.driver.api.mapper.annotations.DaoKeyspace; +import com.datastax.oss.driver.api.mapper.annotations.DefaultNullSavingStrategy; +import com.datastax.oss.driver.api.mapper.annotations.Delete; +import com.datastax.oss.driver.api.mapper.annotations.Insert; +import com.datastax.oss.driver.api.mapper.annotations.Mapper; +import com.datastax.oss.driver.api.mapper.annotations.Select; +import com.datastax.oss.driver.api.mapper.entity.saving.NullSavingStrategy; +import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import com.datastax.oss.driver.categories.ParallelizableTests; +import io.reactivex.Flowable; +import java.util.UUID; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +@Category(ParallelizableTests.class) +public class DeleteReactiveIT extends InventoryITBase { + + private static CcmRule ccmRule = CcmRule.getInstance(); + + private static SessionRule sessionRule = SessionRule.builder(ccmRule).build(); + + @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); + + private static DseProductDao dao; + + @BeforeClass + public static void setup() { + CqlSession session = sessionRule.session(); + + for (String query : createStatements(ccmRule)) { + session.execute( + SimpleStatement.builder(query).setExecutionProfile(sessionRule.slowProfile()).build()); + } + + DseInventoryMapper inventoryMapper = + new DeleteReactiveIT_DseInventoryMapperBuilder(session).build(); + dao = inventoryMapper.productDao(sessionRule.keyspace()); + } + + @Before + public void insertFixtures() { + Flowable.fromPublisher(dao.saveReactive(FLAMETHROWER)).blockingSubscribe(); + } + + @Test + public void should_delete_entity_reactive() { + UUID id = FLAMETHROWER.getId(); + assertThat(Flowable.fromPublisher(dao.findByIdReactive(id)).blockingSingle()).isNotNull(); + + ReactiveResultSet rs = dao.deleteEntityReactive(FLAMETHROWER); + ReactiveRow row = Flowable.fromPublisher(rs).singleElement().blockingGet(); + + assertThat(row).isNull(); + assertThat(Flowable.fromPublisher(dao.findByIdReactive(id)).singleElement().blockingGet()) + .isNull(); + } + + @Test + public void should_delete_by_id_reactive() { + UUID id = FLAMETHROWER.getId(); + assertThat(Flowable.fromPublisher(dao.findByIdReactive(id)).blockingSingle()).isNotNull(); + + ReactiveResultSet rs = dao.deleteByIdReactive(id); + ReactiveRow row = Flowable.fromPublisher(rs).singleElement().blockingGet(); + + assertThat(row).isNull(); + assertThat(Flowable.fromPublisher(dao.findByIdReactive(id)).singleElement().blockingGet()) + .isNull(); + + // Non-existing id should be silently ignored + rs = dao.deleteByIdReactive(id); + row = Flowable.fromPublisher(rs).singleElement().blockingGet(); + + assertThat(row).isNull(); + } + + @Test + public void should_delete_if_exists_reactive() { + UUID id = FLAMETHROWER.getId(); + assertThat(Flowable.fromPublisher(dao.findByIdReactive(id)).blockingSingle()).isNotNull(); + { + ReactiveResultSet rs = dao.deleteIfExistsReactive(FLAMETHROWER); + ReactiveRow row = Flowable.fromPublisher(rs).blockingSingle(); + assertThat(row.wasApplied()).isTrue(); + assertThat(Flowable.fromPublisher(rs.wasApplied()).blockingSingle()).isTrue(); + } + assertThat(Flowable.fromPublisher(dao.findByIdReactive(id)).singleElement().blockingGet()) + .isNull(); + { + ReactiveResultSet rs = dao.deleteIfExistsReactive(FLAMETHROWER); + ReactiveRow row = Flowable.fromPublisher(rs).singleElement().blockingGet(); + assertThat(row.wasApplied()).isFalse(); + assertThat(Flowable.fromPublisher(rs.wasApplied()).blockingSingle()).isFalse(); + } + } + + @Test + public void should_delete_with_condition_reactive() { + UUID id = FLAMETHROWER.getId(); + assertThat(Flowable.fromPublisher(dao.findByIdReactive(id)).blockingSingle()).isNotNull(); + { + ReactiveResultSet rs = dao.deleteIfDescriptionMatchesReactive(id, "foo"); + ReactiveRow row = Flowable.fromPublisher(rs).blockingSingle(); + assertThat(row.wasApplied()).isFalse(); + assertThat(Flowable.fromPublisher(rs.wasApplied()).blockingSingle()).isFalse(); + assertThat(row.getString("description")).isEqualTo(FLAMETHROWER.getDescription()); + } + { + ReactiveResultSet rs = + dao.deleteIfDescriptionMatchesReactive(id, FLAMETHROWER.getDescription()); + ReactiveRow row = Flowable.fromPublisher(rs).blockingSingle(); + assertThat(row.wasApplied()).isTrue(); + assertThat(Flowable.fromPublisher(rs.wasApplied()).blockingSingle()).isTrue(); + } + assertThat(Flowable.fromPublisher(dao.findByIdReactive(id)).singleElement().blockingGet()) + .isNull(); + } + + @Mapper + public interface DseInventoryMapper { + @DaoFactory + DseProductDao productDao(@DaoKeyspace CqlIdentifier keyspace); + } + + @Dao + @DefaultNullSavingStrategy(NullSavingStrategy.SET_TO_NULL) + public interface DseProductDao { + + @Delete + ReactiveResultSet deleteEntityReactive(Product product); + + @Delete(entityClass = Product.class) + ReactiveResultSet deleteByIdReactive(UUID productId); + + @Delete(ifExists = true) + ReactiveResultSet deleteIfExistsReactive(Product product); + + @Delete(entityClass = Product.class, customIfClause = "description = :expectedDescription") + ReactiveResultSet deleteIfDescriptionMatchesReactive( + UUID productId, String expectedDescription); + + @Select + MappedReactiveResultSet findByIdReactive(UUID productId); + + @Insert + ReactiveResultSet saveReactive(Product product); + } +} diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/InsertReactiveIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/InsertReactiveIT.java new file mode 100644 index 00000000000..e7a6b0175fc --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/InsertReactiveIT.java @@ -0,0 +1,136 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.mapper; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.dse.driver.api.core.cql.reactive.ReactiveResultSet; +import com.datastax.dse.driver.api.core.cql.reactive.ReactiveRow; +import com.datastax.dse.driver.api.mapper.reactive.MappedReactiveResultSet; +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.mapper.annotations.Dao; +import com.datastax.oss.driver.api.mapper.annotations.DaoFactory; +import com.datastax.oss.driver.api.mapper.annotations.DaoKeyspace; +import com.datastax.oss.driver.api.mapper.annotations.DefaultNullSavingStrategy; +import com.datastax.oss.driver.api.mapper.annotations.Insert; +import com.datastax.oss.driver.api.mapper.annotations.Mapper; +import com.datastax.oss.driver.api.mapper.annotations.Select; +import com.datastax.oss.driver.api.mapper.entity.saving.NullSavingStrategy; +import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import com.datastax.oss.driver.categories.ParallelizableTests; +import io.reactivex.Flowable; +import java.util.UUID; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +@Category(ParallelizableTests.class) +public class InsertReactiveIT extends InventoryITBase { + + private static CcmRule ccmRule = CcmRule.getInstance(); + + private static SessionRule sessionRule = SessionRule.builder(ccmRule).build(); + + @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); + + private static DseProductDao dao; + + @BeforeClass + public static void setup() { + CqlSession session = sessionRule.session(); + + for (String query : createStatements(ccmRule)) { + session.execute( + SimpleStatement.builder(query).setExecutionProfile(sessionRule.slowProfile()).build()); + } + + DseInventoryMapper dseInventoryMapper = + new InsertReactiveIT_DseInventoryMapperBuilder(session).build(); + dao = dseInventoryMapper.productDao(sessionRule.keyspace()); + } + + @Before + public void clearProductData() { + CqlSession session = sessionRule.session(); + session.execute( + SimpleStatement.builder("TRUNCATE product") + .setExecutionProfile(sessionRule.slowProfile()) + .build()); + } + + @Test + public void should_insert_entity_returning_reactive_result_set() { + assertThat( + Flowable.fromPublisher(dao.findByIdReactive(FLAMETHROWER.getId())) + .singleElement() + .blockingGet()) + .isNull(); + assertThat(Flowable.fromPublisher(dao.saveReactive(FLAMETHROWER)).singleElement().blockingGet()) + .isNull(); + assertThat(Flowable.fromPublisher(dao.findByIdReactive(FLAMETHROWER.getId())).blockingSingle()) + .isEqualTo(FLAMETHROWER); + } + + @Test + public void should_insert_entity_if_not_exists_reactive() { + UUID id = FLAMETHROWER.getId(); + assertThat(Flowable.fromPublisher(dao.findByIdReactive(id)).singleElement().blockingGet()) + .isNull(); + { + ReactiveResultSet rs = dao.saveIfNotExistsReactive(FLAMETHROWER); + ReactiveRow row = Flowable.fromPublisher(rs).blockingSingle(); + assertThat(row.wasApplied()).isTrue(); + assertThat(Flowable.fromPublisher(rs.wasApplied()).blockingSingle()).isTrue(); + } + assertThat(Flowable.fromPublisher(dao.findByIdReactive(id)).blockingSingle()) + .isNotNull() + .isEqualTo(FLAMETHROWER); + { + ReactiveResultSet rs = dao.saveIfNotExistsReactive(FLAMETHROWER); + ReactiveRow row = Flowable.fromPublisher(rs).singleElement().blockingGet(); + assertThat(row.wasApplied()).isFalse(); + assertThat(Flowable.fromPublisher(rs.wasApplied()).blockingSingle()).isFalse(); + } + } + + @Mapper + public interface DseInventoryMapper { + + @DaoFactory + DseProductDao productDao(@DaoKeyspace CqlIdentifier keyspace); + } + + @Dao + @DefaultNullSavingStrategy(NullSavingStrategy.SET_TO_NULL) + public interface DseProductDao { + + @Insert + ReactiveResultSet saveReactive(Product product); + + @Insert(ifNotExists = true) + ReactiveResultSet saveIfNotExistsReactive(Product product); + + @Select + MappedReactiveResultSet findByIdReactive(UUID productId); + } +} diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/QueryReactiveIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/QueryReactiveIT.java new file mode 100644 index 00000000000..8368e309a5b --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/QueryReactiveIT.java @@ -0,0 +1,156 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.mapper; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.dse.driver.api.core.cql.reactive.ReactiveResultSet; +import com.datastax.dse.driver.api.mapper.reactive.MappedReactiveResultSet; +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.mapper.annotations.ClusteringColumn; +import com.datastax.oss.driver.api.mapper.annotations.Dao; +import com.datastax.oss.driver.api.mapper.annotations.DaoFactory; +import com.datastax.oss.driver.api.mapper.annotations.DaoKeyspace; +import com.datastax.oss.driver.api.mapper.annotations.DefaultNullSavingStrategy; +import com.datastax.oss.driver.api.mapper.annotations.Entity; +import com.datastax.oss.driver.api.mapper.annotations.Insert; +import com.datastax.oss.driver.api.mapper.annotations.Mapper; +import com.datastax.oss.driver.api.mapper.annotations.PartitionKey; +import com.datastax.oss.driver.api.mapper.annotations.Query; +import com.datastax.oss.driver.api.mapper.entity.saving.NullSavingStrategy; +import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import com.datastax.oss.driver.categories.ParallelizableTests; +import io.reactivex.Flowable; +import java.util.List; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +@Category(ParallelizableTests.class) +public class QueryReactiveIT { + + private static CcmRule ccmRule = CcmRule.getInstance(); + + private static SessionRule sessionRule = SessionRule.builder(ccmRule).build(); + + @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); + + private static DseTestDao dao; + + @BeforeClass + public static void setup() { + CqlSession session = sessionRule.session(); + + session.execute( + SimpleStatement.builder( + "CREATE TABLE test_entity(id int, rank int, value int, PRIMARY KEY(id, rank))") + .setExecutionProfile(sessionRule.slowProfile()) + .build()); + + TestMapper testMapper = new QueryReactiveIT_TestMapperBuilder(session).build(); + dao = testMapper.productDao(sessionRule.keyspace()); + } + + @Before + public void insertData() { + for (int i = 0; i < 10; i++) { + dao.insert(new TestEntity(1, i, i)); + } + } + + @Test + public void should_query_reactive() { + ReactiveResultSet rs = dao.findByIdReactive(1); + assertThat(Flowable.fromPublisher(rs).count().blockingGet()).isEqualTo(10); + } + + @Test + public void should_query_reactive_mapped() { + MappedReactiveResultSet rs = dao.findByIdReactiveMapped(1); + List results = Flowable.fromPublisher(rs).toList().blockingGet(); + assertThat(results).hasSize(10); + assertThat(results).extracting("rank").containsExactly(0, 1, 2, 3, 4, 5, 6, 7, 8, 9); + } + + @Mapper + public interface TestMapper { + + @DaoFactory + DseTestDao productDao(@DaoKeyspace CqlIdentifier keyspace); + } + + @Dao + @DefaultNullSavingStrategy(NullSavingStrategy.SET_TO_NULL) + public interface DseTestDao { + + @Insert + void insert(TestEntity entity); + + @Query("SELECT * FROM ${qualifiedTableId} WHERE id = :id") + MappedReactiveResultSet findByIdReactiveMapped(int id); + + @Query("SELECT * FROM ${keyspaceId}.test_entity WHERE id = :id") + ReactiveResultSet findByIdReactive(int id); + } + + @Entity + public static class TestEntity { + @PartitionKey private int id; + + @ClusteringColumn private int rank; + + private Integer value; + + public TestEntity() {} + + public TestEntity(int id, int rank, Integer value) { + this.id = id; + this.rank = rank; + this.value = value; + } + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + public int getRank() { + return rank; + } + + public void setRank(int rank) { + this.rank = rank; + } + + public Integer getValue() { + return value; + } + + public void setValue(Integer value) { + this.value = value; + } + } +} diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SelectReactiveIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SelectReactiveIT.java new file mode 100644 index 00000000000..09e107f98f3 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SelectReactiveIT.java @@ -0,0 +1,110 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.mapper; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.dse.driver.api.core.cql.reactive.ReactiveResultSet; +import com.datastax.dse.driver.api.mapper.reactive.MappedReactiveResultSet; +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.mapper.annotations.Dao; +import com.datastax.oss.driver.api.mapper.annotations.DaoFactory; +import com.datastax.oss.driver.api.mapper.annotations.DaoKeyspace; +import com.datastax.oss.driver.api.mapper.annotations.DefaultNullSavingStrategy; +import com.datastax.oss.driver.api.mapper.annotations.Delete; +import com.datastax.oss.driver.api.mapper.annotations.Insert; +import com.datastax.oss.driver.api.mapper.annotations.Mapper; +import com.datastax.oss.driver.api.mapper.annotations.Select; +import com.datastax.oss.driver.api.mapper.entity.saving.NullSavingStrategy; +import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import com.datastax.oss.driver.categories.ParallelizableTests; +import io.reactivex.Flowable; +import java.util.UUID; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +@Category(ParallelizableTests.class) +public class SelectReactiveIT extends InventoryITBase { + + private static CcmRule ccmRule = CcmRule.getInstance(); + + private static SessionRule sessionRule = SessionRule.builder(ccmRule).build(); + + @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); + + private static DseProductDao dao; + + @BeforeClass + public static void setup() { + CqlSession session = sessionRule.session(); + + for (String query : createStatements(ccmRule)) { + session.execute( + SimpleStatement.builder(query).setExecutionProfile(sessionRule.slowProfile()).build()); + } + + DseInventoryMapper inventoryMapper = + new SelectReactiveIT_DseInventoryMapperBuilder(session).build(); + dao = inventoryMapper.productDao(sessionRule.keyspace()); + } + + @Before + public void insertData() { + Flowable.fromPublisher(dao.saveReactive(FLAMETHROWER)).blockingSubscribe(); + Flowable.fromPublisher(dao.saveReactive(MP3_DOWNLOAD)).blockingSubscribe(); + } + + @Test + public void should_select_by_primary_key_reactive() { + assertThat(Flowable.fromPublisher(dao.findByIdReactive(FLAMETHROWER.getId())).blockingSingle()) + .isEqualTo(FLAMETHROWER); + Flowable.fromPublisher(dao.deleteReactive(FLAMETHROWER)).blockingSubscribe(); + assertThat( + Flowable.fromPublisher(dao.findByIdReactive(FLAMETHROWER.getId())) + .singleElement() + .blockingGet()) + .isNull(); + } + + @Mapper + public interface DseInventoryMapper { + + @DaoFactory + DseProductDao productDao(@DaoKeyspace CqlIdentifier keyspace); + } + + @Dao + @DefaultNullSavingStrategy(NullSavingStrategy.SET_TO_NULL) + public interface DseProductDao { + + @Select + MappedReactiveResultSet findByIdReactive(UUID productId); + + @Delete + ReactiveResultSet deleteReactive(Product product); + + @Insert + ReactiveResultSet saveReactive(Product product); + } +} diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateReactiveIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateReactiveIT.java new file mode 100644 index 00000000000..6eb2f83793c --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateReactiveIT.java @@ -0,0 +1,167 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.mapper; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.dse.driver.api.core.cql.reactive.ReactiveResultSet; +import com.datastax.dse.driver.api.core.cql.reactive.ReactiveRow; +import com.datastax.dse.driver.api.mapper.reactive.MappedReactiveResultSet; +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.mapper.annotations.Dao; +import com.datastax.oss.driver.api.mapper.annotations.DaoFactory; +import com.datastax.oss.driver.api.mapper.annotations.DaoKeyspace; +import com.datastax.oss.driver.api.mapper.annotations.DefaultNullSavingStrategy; +import com.datastax.oss.driver.api.mapper.annotations.Mapper; +import com.datastax.oss.driver.api.mapper.annotations.Select; +import com.datastax.oss.driver.api.mapper.annotations.Update; +import com.datastax.oss.driver.api.mapper.entity.saving.NullSavingStrategy; +import com.datastax.oss.driver.api.testinfra.CassandraRequirement; +import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import com.datastax.oss.driver.categories.ParallelizableTests; +import io.reactivex.Flowable; +import io.reactivex.Single; +import java.util.UUID; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +@Category(ParallelizableTests.class) +@CassandraRequirement( + min = "3.6", + description = "Uses UDT fields in IF conditions (CASSANDRA-7423)") +public class UpdateReactiveIT extends InventoryITBase { + + private static CcmRule ccmRule = CcmRule.getInstance(); + + private static SessionRule sessionRule = SessionRule.builder(ccmRule).build(); + + @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); + + private static DseProductDao dao; + + @BeforeClass + public static void setup() { + CqlSession session = sessionRule.session(); + + for (String query : createStatements(ccmRule)) { + session.execute( + SimpleStatement.builder(query).setExecutionProfile(sessionRule.slowProfile()).build()); + } + + DseInventoryMapper dseInventoryMapper = + new UpdateReactiveIT_DseInventoryMapperBuilder(session).build(); + dao = dseInventoryMapper.productDao(sessionRule.keyspace()); + } + + @Before + public void clearProductData() { + CqlSession session = sessionRule.session(); + session.execute( + SimpleStatement.builder("TRUNCATE product") + .setExecutionProfile(sessionRule.slowProfile()) + .build()); + } + + @Test + public void should_update_entity_if_exists_reactive() { + Flowable.fromPublisher(dao.updateReactive(FLAMETHROWER)).blockingSubscribe(); + assertThat(Flowable.fromPublisher(dao.findByIdReactive(FLAMETHROWER.getId())).blockingSingle()) + .isNotNull(); + + Product otherProduct = + new Product(FLAMETHROWER.getId(), "Other description", new Dimensions(1, 1, 1)); + ReactiveResultSet rs = dao.updateIfExistsReactive(otherProduct); + assertThat(Flowable.fromPublisher(rs).count().blockingGet()).isOne(); + assertThat( + Single.fromPublisher(rs.getColumnDefinitions()).blockingGet().contains("description")) + .isFalse(); + assertThat(Single.fromPublisher(rs.wasApplied()).blockingGet()).isTrue(); + } + + @Test + public void should_update_entity_if_condition_is_met_reactive() { + Flowable.fromPublisher( + dao.updateReactive( + new Product( + FLAMETHROWER.getId(), "Description for length 10", new Dimensions(10, 1, 1)))) + .blockingSubscribe(); + assertThat(Flowable.fromPublisher(dao.findByIdReactive(FLAMETHROWER.getId())).blockingSingle()) + .isNotNull(); + Product otherProduct = + new Product(FLAMETHROWER.getId(), "Other description", new Dimensions(1, 1, 1)); + ReactiveResultSet rs = dao.updateIfLengthReactive(otherProduct, 10); + ReactiveRow row = Flowable.fromPublisher(rs).blockingSingle(); + assertThat(row.wasApplied()).isTrue(); + assertThat(row.getColumnDefinitions().contains("dimensions")).isFalse(); + assertThat(Single.fromPublisher(rs.getColumnDefinitions()).blockingGet().contains("dimensions")) + .isFalse(); + assertThat(Single.fromPublisher(rs.wasApplied()).blockingGet()).isTrue(); + } + + @Test + public void should_not_update_entity_if_condition_is_not_met_reactive() { + Flowable.fromPublisher( + dao.updateReactive( + new Product( + FLAMETHROWER.getId(), "Description for length 10", new Dimensions(10, 1, 1)))) + .blockingSubscribe(); + assertThat(Flowable.fromPublisher(dao.findByIdReactive(FLAMETHROWER.getId())).blockingSingle()) + .isNotNull() + .extracting("description") + .isEqualTo("Description for length 10"); + ReactiveResultSet rs = + dao.updateIfLengthReactive( + new Product(FLAMETHROWER.getId(), "Other description", new Dimensions(1, 1, 1)), 20); + ReactiveRow row = Flowable.fromPublisher(rs).blockingSingle(); + assertThat(row.wasApplied()).isFalse(); + assertThat(row.getColumnDefinitions().contains("dimensions")).isTrue(); + assertThat(Single.fromPublisher(rs.getColumnDefinitions()).blockingGet().contains("dimensions")) + .isTrue(); + assertThat(Single.fromPublisher(rs.wasApplied()).blockingGet()).isFalse(); + } + + @Mapper + public interface DseInventoryMapper { + + @DaoFactory + DseProductDao productDao(@DaoKeyspace CqlIdentifier keyspace); + } + + @Dao + @DefaultNullSavingStrategy(NullSavingStrategy.SET_TO_NULL) + public interface DseProductDao { + + @Update + ReactiveResultSet updateReactive(Product product); + + @Update(ifExists = true) + ReactiveResultSet updateIfExistsReactive(Product product); + + @Update(customIfClause = "dimensions.length = :length") + ReactiveResultSet updateIfLengthReactive(Product product, int length); + + @Select + MappedReactiveResultSet findByIdReactive(UUID productId); + } +} diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGenerator.java index 1fe33b4d1aa..2772d1f1f34 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGenerator.java @@ -20,6 +20,7 @@ import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.FUTURE_OF_ASYNC_RESULT_SET; import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.FUTURE_OF_BOOLEAN; import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.FUTURE_OF_VOID; +import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.REACTIVE_RESULT_SET; import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.RESULT_SET; import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.VOID; @@ -69,7 +70,8 @@ protected Set getSupportedReturnTypes() { FUTURE_OF_BOOLEAN, RESULT_SET, BOUND_STATEMENT, - FUTURE_OF_ASYNC_RESULT_SET); + FUTURE_OF_ASYNC_RESULT_SET, + REACTIVE_RESULT_SET); } @Override diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoInsertMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoInsertMethodGenerator.java index 3c6d93c077d..69e2400b7e2 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoInsertMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoInsertMethodGenerator.java @@ -24,6 +24,7 @@ import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.FUTURE_OF_OPTIONAL_ENTITY; import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.FUTURE_OF_VOID; import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.OPTIONAL_ENTITY; +import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.REACTIVE_RESULT_SET; import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.RESULT_SET; import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.VOID; @@ -73,7 +74,8 @@ protected Set getSupportedReturnTypes() { FUTURE_OF_BOOLEAN, RESULT_SET, BOUND_STATEMENT, - FUTURE_OF_ASYNC_RESULT_SET); + FUTURE_OF_ASYNC_RESULT_SET, + REACTIVE_RESULT_SET); } @Override diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoReturnType.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoReturnType.java index 116c4812353..b76bbf1676d 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoReturnType.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoReturnType.java @@ -38,6 +38,8 @@ public class DaoReturnType { new DaoReturnType(DefaultDaoReturnTypeKind.FUTURE_OF_ROW); public static final DaoReturnType FUTURE_OF_ASYNC_RESULT_SET = new DaoReturnType(DefaultDaoReturnTypeKind.FUTURE_OF_ASYNC_RESULT_SET); + public static final DaoReturnType REACTIVE_RESULT_SET = + new DaoReturnType(DefaultDaoReturnTypeKind.REACTIVE_RESULT_SET); public static final DaoReturnType UNSUPPORTED = new DaoReturnType(DefaultDaoReturnTypeKind.UNSUPPORTED); diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGenerator.java index d958c2460c2..5125719e89d 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGenerator.java @@ -19,6 +19,7 @@ import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.FUTURE_OF_ASYNC_PAGING_ITERABLE; import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.FUTURE_OF_ENTITY; import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.FUTURE_OF_OPTIONAL_ENTITY; +import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.MAPPED_REACTIVE_RESULT_SET; import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.OPTIONAL_ENTITY; import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.PAGING_ITERABLE; @@ -66,7 +67,8 @@ protected Set getSupportedReturnTypes() { FUTURE_OF_ENTITY, FUTURE_OF_OPTIONAL_ENTITY, PAGING_ITERABLE, - FUTURE_OF_ASYNC_PAGING_ITERABLE); + FUTURE_OF_ASYNC_PAGING_ITERABLE, + MAPPED_REACTIVE_RESULT_SET); } @Override diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoUpdateMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoUpdateMethodGenerator.java index 53f0e5098d1..02a81be61c9 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoUpdateMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoUpdateMethodGenerator.java @@ -20,6 +20,7 @@ import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.FUTURE_OF_ASYNC_RESULT_SET; import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.FUTURE_OF_BOOLEAN; import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.FUTURE_OF_VOID; +import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.REACTIVE_RESULT_SET; import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.RESULT_SET; import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.VOID; @@ -68,7 +69,8 @@ protected Set getSupportedReturnTypes() { BOUND_STATEMENT, FUTURE_OF_ASYNC_RESULT_SET, BOOLEAN, - FUTURE_OF_BOOLEAN); + FUTURE_OF_BOOLEAN, + REACTIVE_RESULT_SET); } @Override diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DefaultDaoReturnTypeKind.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DefaultDaoReturnTypeKind.java index 522edf0ac0a..59a7454200c 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DefaultDaoReturnTypeKind.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DefaultDaoReturnTypeKind.java @@ -15,152 +15,263 @@ */ package com.datastax.oss.driver.internal.mapper.processor.dao; +import com.datastax.dse.driver.internal.core.cql.reactive.FailedReactiveResultSet; +import com.datastax.dse.driver.internal.mapper.reactive.FailedMappedReactiveResultSet; import com.datastax.oss.driver.internal.core.util.concurrent.CompletableFutures; import com.squareup.javapoet.CodeBlock; public enum DefaultDaoReturnTypeKind implements DaoReturnTypeKind { - VOID(false) { + VOID { @Override public void addExecuteStatement(CodeBlock.Builder methodBuilder, String helperFieldName) { // Note that the execute* methods in the generated code are defined in DaoBase methodBuilder.addStatement("execute(boundStatement)"); } + + @Override + public CodeBlock wrapWithErrorHandling(CodeBlock innerBlock) { + return innerBlock; + } }, - BOOLEAN(false) { + BOOLEAN { @Override public void addExecuteStatement(CodeBlock.Builder methodBuilder, String helperFieldName) { methodBuilder.addStatement("return executeAndMapWasAppliedToBoolean(boundStatement)"); } + + @Override + public CodeBlock wrapWithErrorHandling(CodeBlock innerBlock) { + return innerBlock; + } }, - LONG(false) { + LONG { @Override public void addExecuteStatement(CodeBlock.Builder methodBuilder, String helperFieldName) { methodBuilder.addStatement("return executeAndMapFirstColumnToLong(boundStatement)"); } + + @Override + public CodeBlock wrapWithErrorHandling(CodeBlock innerBlock) { + return innerBlock; + } }, - ROW(false) { + ROW { @Override public void addExecuteStatement(CodeBlock.Builder methodBuilder, String helperFieldName) { methodBuilder.addStatement("return executeAndExtractFirstRow(boundStatement)"); } + + @Override + public CodeBlock wrapWithErrorHandling(CodeBlock innerBlock) { + return innerBlock; + } }, - ENTITY(false) { + ENTITY { @Override public void addExecuteStatement(CodeBlock.Builder methodBuilder, String helperFieldName) { methodBuilder.addStatement( "return executeAndMapToSingleEntity(boundStatement, $L)", helperFieldName); } + + @Override + public CodeBlock wrapWithErrorHandling(CodeBlock innerBlock) { + return innerBlock; + } }, - OPTIONAL_ENTITY(false) { + OPTIONAL_ENTITY { @Override public void addExecuteStatement(CodeBlock.Builder methodBuilder, String helperFieldName) { methodBuilder.addStatement( "return executeAndMapToOptionalEntity(boundStatement, $L)", helperFieldName); } + + @Override + public CodeBlock wrapWithErrorHandling(CodeBlock innerBlock) { + return innerBlock; + } }, - RESULT_SET(false) { + RESULT_SET { @Override public void addExecuteStatement(CodeBlock.Builder methodBuilder, String helperFieldName) { methodBuilder.addStatement("return execute(boundStatement)"); } + + @Override + public CodeBlock wrapWithErrorHandling(CodeBlock innerBlock) { + return innerBlock; + } }, - BOUND_STATEMENT(false) { + BOUND_STATEMENT { @Override public void addExecuteStatement(CodeBlock.Builder methodBuilder, String helperFieldName) { methodBuilder.addStatement("return boundStatement"); } + + @Override + public CodeBlock wrapWithErrorHandling(CodeBlock innerBlock) { + return innerBlock; + } }, - PAGING_ITERABLE(false) { + PAGING_ITERABLE { @Override public void addExecuteStatement(CodeBlock.Builder methodBuilder, String helperFieldName) { methodBuilder.addStatement( "return executeAndMapToEntityIterable(boundStatement, $L)", helperFieldName); } + + @Override + public CodeBlock wrapWithErrorHandling(CodeBlock innerBlock) { + return innerBlock; + } }, - FUTURE_OF_VOID(true) { + FUTURE_OF_VOID { @Override public void addExecuteStatement(CodeBlock.Builder methodBuilder, String helperFieldName) { methodBuilder.addStatement("return executeAsyncAndMapToVoid(boundStatement)"); } + + @Override + public CodeBlock wrapWithErrorHandling(CodeBlock innerBlock) { + return wrapWithErrorHandling(innerBlock, FAILED_FUTURE); + } }, - FUTURE_OF_BOOLEAN(true) { + FUTURE_OF_BOOLEAN { @Override public void addExecuteStatement(CodeBlock.Builder methodBuilder, String helperFieldName) { methodBuilder.addStatement("return executeAsyncAndMapWasAppliedToBoolean(boundStatement)"); } + + @Override + public CodeBlock wrapWithErrorHandling(CodeBlock innerBlock) { + return wrapWithErrorHandling(innerBlock, FAILED_FUTURE); + } }, - FUTURE_OF_LONG(true) { + FUTURE_OF_LONG { @Override public void addExecuteStatement(CodeBlock.Builder methodBuilder, String helperFieldName) { methodBuilder.addStatement("return executeAsyncAndMapFirstColumnToLong(boundStatement)"); } + + @Override + public CodeBlock wrapWithErrorHandling(CodeBlock innerBlock) { + return wrapWithErrorHandling(innerBlock, FAILED_FUTURE); + } }, - FUTURE_OF_ROW(true) { + FUTURE_OF_ROW { @Override public void addExecuteStatement(CodeBlock.Builder methodBuilder, String helperFieldName) { methodBuilder.addStatement("return executeAsyncAndExtractFirstRow(boundStatement)"); } + + @Override + public CodeBlock wrapWithErrorHandling(CodeBlock innerBlock) { + return wrapWithErrorHandling(innerBlock, FAILED_FUTURE); + } }, - FUTURE_OF_ENTITY(true) { + FUTURE_OF_ENTITY { @Override public void addExecuteStatement(CodeBlock.Builder methodBuilder, String helperFieldName) { methodBuilder.addStatement( "return executeAsyncAndMapToSingleEntity(boundStatement, $L)", helperFieldName); } + + @Override + public CodeBlock wrapWithErrorHandling(CodeBlock innerBlock) { + return wrapWithErrorHandling(innerBlock, FAILED_FUTURE); + } }, - FUTURE_OF_OPTIONAL_ENTITY(true) { + FUTURE_OF_OPTIONAL_ENTITY { @Override public void addExecuteStatement(CodeBlock.Builder methodBuilder, String helperFieldName) { methodBuilder.addStatement( "return executeAsyncAndMapToOptionalEntity(boundStatement, $L)", helperFieldName); } + + @Override + public CodeBlock wrapWithErrorHandling(CodeBlock innerBlock) { + return wrapWithErrorHandling(innerBlock, FAILED_FUTURE); + } }, - FUTURE_OF_ASYNC_RESULT_SET(true) { + FUTURE_OF_ASYNC_RESULT_SET { @Override public void addExecuteStatement(CodeBlock.Builder methodBuilder, String helperFieldName) { methodBuilder.addStatement("return executeAsync(boundStatement)"); } + + @Override + public CodeBlock wrapWithErrorHandling(CodeBlock innerBlock) { + return wrapWithErrorHandling(innerBlock, FAILED_FUTURE); + } }, - FUTURE_OF_ASYNC_PAGING_ITERABLE(true) { + FUTURE_OF_ASYNC_PAGING_ITERABLE { @Override public void addExecuteStatement(CodeBlock.Builder methodBuilder, String helperFieldName) { methodBuilder.addStatement( "return executeAsyncAndMapToEntityIterable(boundStatement, $L)", helperFieldName); } + + @Override + public CodeBlock wrapWithErrorHandling(CodeBlock innerBlock) { + return wrapWithErrorHandling(innerBlock, FAILED_FUTURE); + } }, + REACTIVE_RESULT_SET { + @Override + public void addExecuteStatement(CodeBlock.Builder methodBuilder, String helperFieldName) { + methodBuilder.addStatement("return executeReactive(boundStatement)"); + } - UNSUPPORTED(false) { + @Override + public CodeBlock wrapWithErrorHandling(CodeBlock innerBlock) { + return wrapWithErrorHandling(innerBlock, FAILED_REACTIVE_RESULT_SET); + } + }, + MAPPED_REACTIVE_RESULT_SET { @Override public void addExecuteStatement(CodeBlock.Builder methodBuilder, String helperFieldName) { - throw new AssertionError("Should never get here"); + methodBuilder.addStatement( + "return executeReactiveAndMap(boundStatement, $L)", helperFieldName); + } + + @Override + public CodeBlock wrapWithErrorHandling(CodeBlock innerBlock) { + return wrapWithErrorHandling(innerBlock, FAILED_MAPPED_REACTIVE_RESULT_SET); } }, - ; - private final boolean isAsync; + UNSUPPORTED() { + @Override + public void addExecuteStatement(CodeBlock.Builder methodBuilder, String helperFieldName) { + throw new AssertionError("Should never get here"); + } - DefaultDaoReturnTypeKind(boolean isAsync) { - this.isAsync = isAsync; - } + @Override + public CodeBlock wrapWithErrorHandling(CodeBlock innerBlock) { + throw new AssertionError("Should never get here"); + } + }, + ; @Override public String getDescription() { return name(); } - @Override - public CodeBlock wrapWithErrorHandling(CodeBlock innerBlock) { - if (isAsync) { - return CodeBlock.builder() - .beginControlFlow("try") - .add(innerBlock) - .nextControlFlow("catch ($T t)", Throwable.class) - .addStatement("return $T.failedFuture(t)", CompletableFutures.class) - .endControlFlow() - .build(); - } else { - return innerBlock; - } + static CodeBlock wrapWithErrorHandling(CodeBlock innerBlock, CodeBlock catchBlock) { + return CodeBlock.builder() + .beginControlFlow("try") + .add(innerBlock) + .nextControlFlow("catch ($T t)", Throwable.class) + .addStatement(catchBlock) + .endControlFlow() + .build(); } + + private static final CodeBlock FAILED_FUTURE = + CodeBlock.of("return $T.failedFuture(t)", CompletableFutures.class); + private static final CodeBlock FAILED_REACTIVE_RESULT_SET = + CodeBlock.of("return new $T(t)", FailedReactiveResultSet.class); + private static final CodeBlock FAILED_MAPPED_REACTIVE_RESULT_SET = + CodeBlock.of("return new $T(t)", FailedMappedReactiveResultSet.class); } diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DefaultDaoReturnTypeParser.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DefaultDaoReturnTypeParser.java index fb865253aff..f059d3139fd 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DefaultDaoReturnTypeParser.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DefaultDaoReturnTypeParser.java @@ -15,6 +15,8 @@ */ package com.datastax.oss.driver.internal.mapper.processor.dao; +import com.datastax.dse.driver.api.core.cql.reactive.ReactiveResultSet; +import com.datastax.dse.driver.api.mapper.reactive.MappedReactiveResultSet; import com.datastax.oss.driver.api.core.MappedAsyncPagingIterable; import com.datastax.oss.driver.api.core.PagingIterable; import com.datastax.oss.driver.api.core.cql.AsyncResultSet; @@ -57,6 +59,7 @@ public class DefaultDaoReturnTypeParser implements DaoReturnTypeParser { .put(Row.class, DaoReturnType.ROW) .put(ResultSet.class, DaoReturnType.RESULT_SET) .put(BoundStatement.class, DaoReturnType.BOUND_STATEMENT) + .put(ReactiveResultSet.class, DaoReturnType.REACTIVE_RESULT_SET) .build(); /** @@ -69,6 +72,7 @@ public class DefaultDaoReturnTypeParser implements DaoReturnTypeParser { .put(CompletionStage.class, DefaultDaoReturnTypeKind.FUTURE_OF_ENTITY) .put(CompletableFuture.class, DefaultDaoReturnTypeKind.FUTURE_OF_ENTITY) .put(PagingIterable.class, DefaultDaoReturnTypeKind.PAGING_ITERABLE) + .put(MappedReactiveResultSet.class, DefaultDaoReturnTypeKind.MAPPED_REACTIVE_RESULT_SET) .build(); /** The return types that correspond to a future of a non-generic Java class. */ diff --git a/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGeneratorTest.java b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGeneratorTest.java index c915c8a5666..7b5bda222b1 100644 --- a/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGeneratorTest.java +++ b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGeneratorTest.java @@ -105,7 +105,7 @@ public static Object[][] invalidSignatures() { }, { "Delete methods must return one of [VOID, FUTURE_OF_VOID, BOOLEAN, FUTURE_OF_BOOLEAN, " - + "RESULT_SET, BOUND_STATEMENT, FUTURE_OF_ASYNC_RESULT_SET]", + + "RESULT_SET, BOUND_STATEMENT, FUTURE_OF_ASYNC_RESULT_SET, REACTIVE_RESULT_SET]", MethodSpec.methodBuilder("delete") .addAnnotation(Delete.class) .addModifiers(Modifier.PUBLIC, Modifier.ABSTRACT) diff --git a/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoImplementationGeneratorTest.java b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoImplementationGeneratorTest.java index 324a8d17a8e..e7d854805f0 100644 --- a/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoImplementationGeneratorTest.java +++ b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoImplementationGeneratorTest.java @@ -17,18 +17,27 @@ import static com.google.testing.compile.CompilationSubject.assertThat; +import com.datastax.dse.driver.api.core.cql.reactive.ReactiveResultSet; +import com.datastax.dse.driver.api.mapper.reactive.MappedReactiveResultSet; import com.datastax.oss.driver.api.core.cql.Row; import com.datastax.oss.driver.api.mapper.annotations.Dao; +import com.datastax.oss.driver.api.mapper.annotations.Delete; +import com.datastax.oss.driver.api.mapper.annotations.Insert; +import com.datastax.oss.driver.api.mapper.annotations.Query; +import com.datastax.oss.driver.api.mapper.annotations.Select; import com.datastax.oss.driver.api.mapper.annotations.Update; import com.google.testing.compile.Compilation; +import com.squareup.javapoet.AnnotationSpec; import com.squareup.javapoet.ClassName; import com.squareup.javapoet.MethodSpec; import com.squareup.javapoet.ParameterSpec; +import com.squareup.javapoet.ParameterizedTypeName; import com.squareup.javapoet.TypeSpec; import com.tngtech.java.junit.dataprovider.DataProvider; import com.tngtech.java.junit.dataprovider.DataProviderRunner; import com.tngtech.java.junit.dataprovider.UseDataProvider; import java.util.Collections; +import java.util.UUID; import javax.lang.model.element.Modifier; import javax.tools.StandardLocation; import org.junit.Test; @@ -37,6 +46,15 @@ @RunWith(DataProviderRunner.class) public class DaoImplementationGeneratorTest extends DaoMethodGeneratorTest { + private static final ClassName REACTIVE_RESULT_CLASS_NAME = + ClassName.get(ReactiveResultSet.class); + + private static final ClassName MAPPED_REACTIVE_RESULT_CLASS_NAME = + ClassName.get(MappedReactiveResultSet.class); + + private static final ParameterizedTypeName ENTITY_MAPPED_REACTIVE_RESULT_SET = + ParameterizedTypeName.get(MAPPED_REACTIVE_RESULT_CLASS_NAME, ENTITY_CLASS_NAME); + @Test public void should_fail_if_method_is_not_annotated() { should_fail_with_expected_error( @@ -113,6 +131,154 @@ public void should_compile_with_logging_disabled(Iterable options) { assertGeneratedFileDoesNotContain(compilation, "LOG.debug"); } + @Test + public void should_generate_findById_method_returning_MappedReactiveResultSet() { + Compilation compilation = + compileWithMapperProcessor( + "test", + Collections.emptyList(), + ENTITY_SPEC, + TypeSpec.interfaceBuilder(ClassName.get("test", "ProductDao")) + .addModifiers(Modifier.PUBLIC) + .addAnnotation(Dao.class) + .addMethod( + MethodSpec.methodBuilder("findById") + .addAnnotation(Select.class) + .addParameter(ParameterSpec.builder(UUID.class, "pk").build()) + .returns(ENTITY_MAPPED_REACTIVE_RESULT_SET) + .addModifiers(Modifier.PUBLIC, Modifier.ABSTRACT) + .build()) + .build()); + assertThat(compilation).succeededWithoutWarnings(); + assertGeneratedFileContains( + compilation, "public MappedReactiveResultSet findById(UUID pk)"); + assertGeneratedFileContains( + compilation, "return executeReactiveAndMap(boundStatement, productHelper);"); + } + + @Test + public void should_generate_insert_method_returning_ReactiveResultSet() { + Compilation compilation = + compileWithMapperProcessor( + "test", + Collections.emptyList(), + ENTITY_SPEC, + TypeSpec.interfaceBuilder(ClassName.get("test", "ProductDao")) + .addModifiers(Modifier.PUBLIC) + .addAnnotation(Dao.class) + .addMethod( + MethodSpec.methodBuilder("insertIfNotExists") + .addAnnotation(Insert.class) + .addParameter(ParameterSpec.builder(ENTITY_CLASS_NAME, "product").build()) + .returns(REACTIVE_RESULT_CLASS_NAME) + .addModifiers(Modifier.PUBLIC, Modifier.ABSTRACT) + .build()) + .build()); + assertThat(compilation).succeededWithoutWarnings(); + assertGeneratedFileContains( + compilation, "public ReactiveResultSet insertIfNotExists(Product product)"); + assertGeneratedFileContains(compilation, "return executeReactive(boundStatement);"); + } + + @Test + public void should_generate_update_method_returning_ReactiveResultSet() { + Compilation compilation = + compileWithMapperProcessor( + "test", + Collections.emptyList(), + ENTITY_SPEC, + TypeSpec.interfaceBuilder(ClassName.get("test", "ProductDao")) + .addModifiers(Modifier.PUBLIC) + .addAnnotation(Dao.class) + .addMethod( + MethodSpec.methodBuilder("updateIfExists") + .addAnnotation(Update.class) + .addParameter(ParameterSpec.builder(ENTITY_CLASS_NAME, "product").build()) + .returns(REACTIVE_RESULT_CLASS_NAME) + .addModifiers(Modifier.PUBLIC, Modifier.ABSTRACT) + .build()) + .build()); + assertThat(compilation).succeededWithoutWarnings(); + assertGeneratedFileContains( + compilation, "public ReactiveResultSet updateIfExists(Product product)"); + assertGeneratedFileContains(compilation, "return executeReactive(boundStatement);"); + } + + @Test + public void should_generate_delete_method_returning_ReactiveResultSet() { + Compilation compilation = + compileWithMapperProcessor( + "test", + Collections.emptyList(), + ENTITY_SPEC, + TypeSpec.interfaceBuilder(ClassName.get("test", "ProductDao")) + .addModifiers(Modifier.PUBLIC) + .addAnnotation(Dao.class) + .addMethod( + MethodSpec.methodBuilder("delete") + .addAnnotation(Delete.class) + .addParameter(ParameterSpec.builder(ENTITY_CLASS_NAME, "product").build()) + .returns(REACTIVE_RESULT_CLASS_NAME) + .addModifiers(Modifier.PUBLIC, Modifier.ABSTRACT) + .build()) + .build()); + assertThat(compilation).succeededWithoutWarnings(); + assertGeneratedFileContains(compilation, "public ReactiveResultSet delete(Product product)"); + assertGeneratedFileContains(compilation, "return executeReactive(boundStatement);"); + } + + @Test + public void should_generate_query_method_returning_ReactiveResultSet() { + Compilation compilation = + compileWithMapperProcessor( + "test", + Collections.emptyList(), + ENTITY_SPEC, + TypeSpec.interfaceBuilder(ClassName.get("test", "ProductDao")) + .addModifiers(Modifier.PUBLIC) + .addAnnotation(Dao.class) + .addMethod( + MethodSpec.methodBuilder("queryReactive") + .addAnnotation( + AnnotationSpec.builder(Query.class) + .addMember("value", "$S", "SELECT * FROM whatever") + .build()) + .returns(REACTIVE_RESULT_CLASS_NAME) + .addModifiers(Modifier.PUBLIC, Modifier.ABSTRACT) + .build()) + .build()); + assertThat(compilation).succeededWithoutWarnings(); + assertGeneratedFileContains(compilation, "public ReactiveResultSet queryReactive()"); + assertGeneratedFileContains(compilation, "return executeReactive(boundStatement);"); + } + + @Test + public void should_generate_query_method_returning_MappedReactiveResultSet() { + Compilation compilation = + compileWithMapperProcessor( + "test", + Collections.emptyList(), + ENTITY_SPEC, + TypeSpec.interfaceBuilder(ClassName.get("test", "ProductDao")) + .addModifiers(Modifier.PUBLIC) + .addAnnotation(Dao.class) + .addMethod( + MethodSpec.methodBuilder("queryReactiveMapped") + .addAnnotation( + AnnotationSpec.builder(Query.class) + .addMember("value", "$S", "SELECT * FROM whatever") + .build()) + .returns(ENTITY_MAPPED_REACTIVE_RESULT_SET) + .addModifiers(Modifier.PUBLIC, Modifier.ABSTRACT) + .build()) + .build()); + assertThat(compilation).succeededWithoutWarnings(); + assertGeneratedFileContains( + compilation, "public MappedReactiveResultSet queryReactiveMapped()"); + assertGeneratedFileContains( + compilation, "return executeReactiveAndMap(boundStatement, productHelper);"); + } + protected void assertGeneratedFileDoesNotContain(Compilation compilation, String string) { assertThat(compilation) .generatedFile( diff --git a/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoInsertMethodGeneratorTest.java b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoInsertMethodGeneratorTest.java index 7f1bf078e46..cddc9b0d23d 100644 --- a/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoInsertMethodGeneratorTest.java +++ b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoInsertMethodGeneratorTest.java @@ -57,8 +57,8 @@ public static Object[][] invalidSignatures() { }, { "Insert methods must return one of [VOID, FUTURE_OF_VOID, ENTITY, FUTURE_OF_ENTITY, " - + "OPTIONAL_ENTITY, FUTURE_OF_OPTIONAL_ENTITY, BOOLEAN, FUTURE_OF_BOOLEAN, RESULT_SET, BOUND_STATEMENT, " - + "FUTURE_OF_ASYNC_RESULT_SET]", + + "OPTIONAL_ENTITY, FUTURE_OF_OPTIONAL_ENTITY, BOOLEAN, FUTURE_OF_BOOLEAN, RESULT_SET, " + + "BOUND_STATEMENT, FUTURE_OF_ASYNC_RESULT_SET, REACTIVE_RESULT_SET]", MethodSpec.methodBuilder("insert") .addAnnotation(Insert.class) .addModifiers(Modifier.PUBLIC, Modifier.ABSTRACT) diff --git a/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoQueryMethodGeneratorTest.java b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoQueryMethodGeneratorTest.java index ab2c0ce2458..882d8fd26e6 100644 --- a/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoQueryMethodGeneratorTest.java +++ b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoQueryMethodGeneratorTest.java @@ -45,7 +45,7 @@ public static Object[][] invalidSignatures() { + "ENTITY, OPTIONAL_ENTITY, RESULT_SET, BOUND_STATEMENT, PAGING_ITERABLE, FUTURE_OF_VOID, " + "FUTURE_OF_BOOLEAN, FUTURE_OF_LONG, FUTURE_OF_ROW, FUTURE_OF_ENTITY, " + "FUTURE_OF_OPTIONAL_ENTITY, FUTURE_OF_ASYNC_RESULT_SET, " - + "FUTURE_OF_ASYNC_PAGING_ITERABLE]", + + "FUTURE_OF_ASYNC_PAGING_ITERABLE, REACTIVE_RESULT_SET, MAPPED_REACTIVE_RESULT_SET]", MethodSpec.methodBuilder("select") .addAnnotation( AnnotationSpec.builder(Query.class) diff --git a/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGeneratorTest.java b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGeneratorTest.java index 0cabc78d435..01e2f6aa9dd 100644 --- a/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGeneratorTest.java +++ b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGeneratorTest.java @@ -42,7 +42,7 @@ public static Object[][] invalidSignatures() { { "Invalid return type: Select methods must return one of [ENTITY, OPTIONAL_ENTITY, " + "FUTURE_OF_ENTITY, FUTURE_OF_OPTIONAL_ENTITY, PAGING_ITERABLE, " - + "FUTURE_OF_ASYNC_PAGING_ITERABLE]", + + "FUTURE_OF_ASYNC_PAGING_ITERABLE, MAPPED_REACTIVE_RESULT_SET]", MethodSpec.methodBuilder("select") .addAnnotation(Select.class) .addModifiers(Modifier.PUBLIC, Modifier.ABSTRACT) @@ -52,7 +52,7 @@ public static Object[][] invalidSignatures() { { "Invalid return type: Select methods must return one of [ENTITY, OPTIONAL_ENTITY, " + "FUTURE_OF_ENTITY, FUTURE_OF_OPTIONAL_ENTITY, PAGING_ITERABLE, " - + "FUTURE_OF_ASYNC_PAGING_ITERABLE]", + + "FUTURE_OF_ASYNC_PAGING_ITERABLE, MAPPED_REACTIVE_RESULT_SET]", MethodSpec.methodBuilder("select") .addAnnotation(Select.class) .addModifiers(Modifier.PUBLIC, Modifier.ABSTRACT) diff --git a/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoUpdateMethodGeneratorTest.java b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoUpdateMethodGeneratorTest.java index 013d17a0403..0c6f571eb44 100644 --- a/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoUpdateMethodGeneratorTest.java +++ b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoUpdateMethodGeneratorTest.java @@ -67,7 +67,8 @@ public static Object[][] invalidSignatures() { }, { "Invalid return type: Update methods must return one of [VOID, FUTURE_OF_VOID, " - + "RESULT_SET, BOUND_STATEMENT, FUTURE_OF_ASYNC_RESULT_SET, BOOLEAN, FUTURE_OF_BOOLEAN]", + + "RESULT_SET, BOUND_STATEMENT, FUTURE_OF_ASYNC_RESULT_SET, BOOLEAN, " + + "FUTURE_OF_BOOLEAN, REACTIVE_RESULT_SET]", MethodSpec.methodBuilder("update") .addAnnotation(UPDATE_ANNOTATION) .addModifiers(Modifier.PUBLIC, Modifier.ABSTRACT) diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index 79d9b5e99e8..2a7eb90f844 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -43,9 +43,63 @@ com.github.spotbugs spotbugs-annotations + + junit + junit + test + + + org.testng + testng + test + + + org.reactivestreams + reactive-streams-tck + test + + + io.reactivex.rxjava2 + rxjava + test + + + org.mockito + mockito-core + test + + + maven-surefire-plugin + + 1 + + + + junit + false + + + suitename + Reactive Streams TCK + + + + + + org.apache.maven.surefire + surefire-junit47 + ${surefire.version} + + + org.apache.maven.surefire + surefire-testng + ${surefire.version} + + + org.apache.felix maven-bundle-plugin diff --git a/mapper-runtime/src/main/java/com/datastax/dse/driver/api/mapper/reactive/MappedReactiveResultSet.java b/mapper-runtime/src/main/java/com/datastax/dse/driver/api/mapper/reactive/MappedReactiveResultSet.java new file mode 100644 index 00000000000..5f16f262032 --- /dev/null +++ b/mapper-runtime/src/main/java/com/datastax/dse/driver/api/mapper/reactive/MappedReactiveResultSet.java @@ -0,0 +1,45 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.dse.driver.api.mapper.reactive; + +import com.datastax.dse.driver.api.core.cql.reactive.ReactiveQueryMetadata; +import com.datastax.dse.driver.api.core.cql.reactive.ReactiveResultSet; +import org.reactivestreams.Publisher; + +/** + * A {@link Publisher} of mapped entities returned by DAO methods. In other words, this interface is + * the equivalent of {@link ReactiveResultSet} for mapped entities. + * + *

      By default, all implementations returned by the driver are cold, unicast, single-subscriber + * only publishers. In other words, they do not support multiple subscriptions; consider + * caching the results produced by such publishers if you need to consume them by more than one + * downstream subscriber. + * + *

      Also, note that mapped reactive result sets may emit items to their subscribers on an internal + * driver IO thread. Subscriber implementors are encouraged to abide by Reactive Streams + * Specification rule 2.2 and avoid performing heavy computations or blocking calls inside + * {@link org.reactivestreams.Subscriber#onNext(Object) onNext} calls, as doing so could slow down + * the driver and impact performance. Instead, they should asynchronously dispatch received signals + * to their processing logic. + * + *

      This type is located in a {@code dse} package for historical reasons; reactive result sets + * work with both Cassandra and DSE. + * + * @see ReactiveResultSet + */ +public interface MappedReactiveResultSet + extends Publisher, ReactiveQueryMetadata {} diff --git a/mapper-runtime/src/main/java/com/datastax/dse/driver/internal/mapper/reactive/DefaultMappedReactiveResultSet.java b/mapper-runtime/src/main/java/com/datastax/dse/driver/internal/mapper/reactive/DefaultMappedReactiveResultSet.java new file mode 100644 index 00000000000..71452d7a746 --- /dev/null +++ b/mapper-runtime/src/main/java/com/datastax/dse/driver/internal/mapper/reactive/DefaultMappedReactiveResultSet.java @@ -0,0 +1,245 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.dse.driver.internal.mapper.reactive; + +import com.datastax.dse.driver.api.core.cql.reactive.ReactiveResultSet; +import com.datastax.dse.driver.api.core.cql.reactive.ReactiveRow; +import com.datastax.dse.driver.api.mapper.reactive.MappedReactiveResultSet; +import com.datastax.oss.driver.api.core.cql.ColumnDefinitions; +import com.datastax.oss.driver.api.core.cql.ExecutionInfo; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Objects; +import java.util.function.Function; +import org.reactivestreams.Publisher; +import org.reactivestreams.Subscriber; +import org.reactivestreams.Subscription; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class DefaultMappedReactiveResultSet implements MappedReactiveResultSet { + + private static final Logger LOG = LoggerFactory.getLogger(DefaultMappedReactiveResultSet.class); + + private static final Subscription EMPTY_SUBSCRIPTION = + new Subscription() { + @Override + public void request(long n) {} + + @Override + public void cancel() {} + }; + + @NonNull private final ReactiveResultSet source; + + @NonNull private final Function mapper; + + public DefaultMappedReactiveResultSet( + @NonNull ReactiveResultSet source, @NonNull Function mapper) { + this.source = source; + this.mapper = mapper; + } + + @Override + @NonNull + public Publisher getColumnDefinitions() { + return source.getColumnDefinitions(); + } + + @Override + @NonNull + public Publisher getExecutionInfos() { + return source.getExecutionInfos(); + } + + @Override + @NonNull + public Publisher wasApplied() { + return source.wasApplied(); + } + + @Override + public void subscribe(@NonNull Subscriber subscriber) { + // As per rule 1.9, we need to throw an NPE if subscriber is null + Objects.requireNonNull(subscriber, "Subscriber cannot be null"); + // As per rule 1.11, this publisher supports multiple subscribers in a unicast configuration, + // as long as the source publisher does too. + MappedReactiveResultSetSubscriber s = new MappedReactiveResultSetSubscriber(subscriber); + try { + source.subscribe(s); + } catch (Throwable t) { + // As per rule 1.9: subscribe MUST return normally. The only legal way to signal failure (or + // reject the Subscriber) is by calling onError (after calling onSubscribe). + s.cancel(); + IllegalStateException error = + new IllegalStateException( + "Publisher violated $1.9 by throwing an exception from subscribe.", t); + LOG.error(error.getMessage(), error.getCause()); + // This may violate 1.9 since we cannot know if subscriber.onSubscribe was called or not. + subscriber.onSubscribe(EMPTY_SUBSCRIPTION); + subscriber.onError(error); + } + // As per 1.9, this method must return normally (i.e. not throw) + } + + private class MappedReactiveResultSetSubscriber implements Subscriber, Subscription { + + private volatile Subscriber downstreamSubscriber; + private volatile Subscription upstreamSubscription; + private volatile boolean terminated; + + MappedReactiveResultSetSubscriber(@NonNull Subscriber subscriber) { + this.downstreamSubscriber = subscriber; + } + + @Override + public void onSubscribe(@NonNull Subscription subscription) { + // As per rule 2.13, we need to throw NPE if the subscription is null + Objects.requireNonNull(subscription, "Subscription cannot be null"); + // As per rule 2.12, Subscriber.onSubscribe MUST be called at most once for a given subscriber + if (upstreamSubscription != null) { + try { + // Cancel the additional subscription + subscription.cancel(); + } catch (Throwable t) { + // As per rule 3.15, Subscription.cancel is not allowed to throw an exception; the only + // thing we can do is log. + LOG.error("Subscription violated $3.15 by throwing an exception from cancel.", t); + } + } else if (!terminated) { + upstreamSubscription = subscription; + try { + downstreamSubscriber.onSubscribe(this); + } catch (Throwable t) { + // As per rule 2.13: In the case that this rule is violated, + // any associated Subscription to the Subscriber MUST be considered as + // cancelled... + cancel(); + // ...and the caller MUST raise this error condition in a fashion that is "adequate for + // the runtime environment" (we choose to log). + LOG.error("Subscriber violated $2.13 by throwing an exception from onSubscribe.", t); + } + } + } + + @Override + public void onNext(@NonNull ReactiveRow row) { + LOG.trace("Received onNext: {}", row); + if (upstreamSubscription == null) { + LOG.error("Publisher violated $1.09 by signalling onNext prior to onSubscribe."); + } else if (!terminated) { + Objects.requireNonNull(row, "Publisher violated $2.13 by emitting a null element"); + EntityT entity; + try { + entity = mapper.apply(row); + } catch (Throwable t) { + onError(t); + return; + } + Objects.requireNonNull(entity, "Publisher violated $2.13 by generating a null entity"); + try { + downstreamSubscriber.onNext(entity); + } catch (Throwable t) { + LOG.error("Subscriber violated $2.13 by throwing an exception from onNext.", t); + cancel(); + } + } + } + + @Override + public void onComplete() { + LOG.trace("Received onComplete"); + if (upstreamSubscription == null) { + LOG.error("Publisher violated $1.09 by signalling onComplete prior to onSubscribe."); + } else if (!terminated) { + try { + downstreamSubscriber.onComplete(); + } catch (Throwable t) { + LOG.error("Subscriber violated $2.13 by throwing an exception from onComplete.", t); + } + // We need to consider this Subscription as cancelled as per rule 1.6 + cancel(); + } + } + + @Override + public void onError(@NonNull Throwable error) { + LOG.trace("Received onError", error); + if (upstreamSubscription == null) { + LOG.error("Publisher violated $1.09 by signalling onError prior to onSubscribe."); + } else if (!terminated) { + Objects.requireNonNull(error, "Publisher violated $2.13 by signalling a null error"); + try { + downstreamSubscriber.onError(error); + } catch (Throwable t) { + t.addSuppressed(error); + LOG.error("Subscriber violated $2.13 by throwing an exception from onError.", t); + } + // We need to consider this Subscription as cancelled as per rule 1.6 + cancel(); + } + } + + @Override + public void request(long n) { + LOG.trace("Received request: {}", n); + // As per 3.6: after the Subscription is cancelled, additional calls to request() MUST be + // NOPs. + // Implementation note: triggering onError() from below may break 1.3 because this method is + // called by the subscriber thread, and it can race with the producer thread. But these + // situations are already abnormal, so there is no point in trying to prevent the race + // condition with locks. + if (!terminated) { + if (n <= 0) { + // Validate request as per rule 3.9: While the subscription is not cancelled, + // Subscription.request(long n) MUST signal onError with a + // java.lang.IllegalArgumentException if the argument is <= 0. + // The cause message SHOULD explain that non-positive request signals are illegal. + onError( + new IllegalArgumentException( + "Subscriber violated $3.9 by requesting a non-positive number of elements.")); + } else { + try { + upstreamSubscription.request(n); + } catch (Throwable t) { + // As per rule 3.16, Subscription.request is not allowed to throw + IllegalStateException error = + new IllegalStateException( + "Subscription violated $3.16 by throwing an exception from request.", t); + onError(error); + } + } + } + } + + @Override + public void cancel() { + // As per 3.5: Subscription.cancel() MUST respect the responsiveness of its caller by + // returning in a timely manner, MUST be idempotent and MUST be thread-safe. + if (!terminated) { + terminated = true; + LOG.trace("Cancelling"); + // propagate cancellation, if we got a chance to subscribe to the upstream source + if (upstreamSubscription != null) { + upstreamSubscription.cancel(); + } + // As per 3.13, Subscription.cancel() MUST request the Publisher to + // eventually drop any references to the corresponding subscriber. + downstreamSubscriber = null; + upstreamSubscription = null; + } + } + } +} diff --git a/mapper-runtime/src/main/java/com/datastax/dse/driver/internal/mapper/reactive/FailedMappedReactiveResultSet.java b/mapper-runtime/src/main/java/com/datastax/dse/driver/internal/mapper/reactive/FailedMappedReactiveResultSet.java new file mode 100644 index 00000000000..9590d597678 --- /dev/null +++ b/mapper-runtime/src/main/java/com/datastax/dse/driver/internal/mapper/reactive/FailedMappedReactiveResultSet.java @@ -0,0 +1,53 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.dse.driver.internal.mapper.reactive; + +import com.datastax.dse.driver.api.mapper.reactive.MappedReactiveResultSet; +import com.datastax.dse.driver.internal.core.cql.reactive.FailedPublisher; +import com.datastax.oss.driver.api.core.cql.ColumnDefinitions; +import com.datastax.oss.driver.api.core.cql.ExecutionInfo; +import edu.umd.cs.findbugs.annotations.NonNull; +import org.reactivestreams.Publisher; + +/** + * A mapped reactive result set that immediately signals the error passed at instantiation to all + * its subscribers. + */ +public class FailedMappedReactiveResultSet extends FailedPublisher + implements MappedReactiveResultSet { + + public FailedMappedReactiveResultSet(Throwable error) { + super(error); + } + + @NonNull + @Override + public Publisher getColumnDefinitions() { + return new FailedPublisher<>(error); + } + + @NonNull + @Override + public Publisher getExecutionInfos() { + return new FailedPublisher<>(error); + } + + @NonNull + @Override + public Publisher wasApplied() { + return new FailedPublisher<>(error); + } +} diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DaoBase.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DaoBase.java index e5f817ab43d..e51282e0705 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DaoBase.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DaoBase.java @@ -15,6 +15,9 @@ */ package com.datastax.oss.driver.internal.mapper; +import com.datastax.dse.driver.api.core.cql.reactive.ReactiveResultSet; +import com.datastax.dse.driver.api.mapper.reactive.MappedReactiveResultSet; +import com.datastax.dse.driver.internal.mapper.reactive.DefaultMappedReactiveResultSet; import com.datastax.oss.driver.api.core.ConsistencyLevel; import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.MappedAsyncPagingIterable; @@ -276,6 +279,16 @@ CompletableFuture> executeAsyncAndMapToEntity return executeAsync(statement).thenApply(rs -> rs.map(entityHelper::get)); } + protected ReactiveResultSet executeReactive(Statement statement) { + return context.getSession().executeReactive(statement); + } + + protected MappedReactiveResultSet executeReactiveAndMap( + Statement statement, EntityHelper entityHelper) { + ReactiveResultSet source = executeReactive(statement); + return new DefaultMappedReactiveResultSet<>(source, entityHelper::get); + } + protected static void throwIfProtocolVersionV3(MapperContext context) { if (context.getSession().getContext().getProtocolVersion().getCode() <= ProtocolConstants.Version.V3) { diff --git a/mapper-runtime/src/test/java/com/datastax/dse/driver/api/mapper/reactive/MappedReactiveResultSetTckTest.java b/mapper-runtime/src/test/java/com/datastax/dse/driver/api/mapper/reactive/MappedReactiveResultSetTckTest.java new file mode 100644 index 00000000000..cc62393ade3 --- /dev/null +++ b/mapper-runtime/src/test/java/com/datastax/dse/driver/api/mapper/reactive/MappedReactiveResultSetTckTest.java @@ -0,0 +1,86 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.dse.driver.api.mapper.reactive; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import com.datastax.dse.driver.internal.core.cql.reactive.DefaultReactiveResultSet; +import com.datastax.dse.driver.internal.mapper.reactive.DefaultMappedReactiveResultSet; +import com.datastax.oss.driver.api.core.cql.AsyncResultSet; +import com.datastax.oss.driver.api.core.cql.Row; +import io.reactivex.Flowable; +import java.util.Collections; +import java.util.List; +import java.util.concurrent.CompletableFuture; +import org.reactivestreams.Publisher; +import org.reactivestreams.tck.PublisherVerification; +import org.reactivestreams.tck.TestEnvironment; + +public class MappedReactiveResultSetTckTest extends PublisherVerification { + + public MappedReactiveResultSetTckTest() { + super(new TestEnvironment()); + } + + @Override + public Publisher createPublisher(long elements) { + // The TCK usually requests between 0 and 20 items, or Long.MAX_VALUE. + // Past 3 elements it never checks how many elements have been effectively produced, + // so we can safely cap at, say, 20. + int effective = (int) Math.min(elements, 20L); + return new DefaultMappedReactiveResultSet<>( + new DefaultReactiveResultSet(() -> createResults(effective)), row -> row.getInt(0)); + } + + @Override + public Publisher createFailedPublisher() { + DefaultReactiveResultSet publisher = new DefaultReactiveResultSet(() -> createResults(1)); + // Since our publisher does not support multiple + // subscriptions, we use that to create a failed publisher. + publisher.subscribe(new TestSubscriber<>()); + return new DefaultMappedReactiveResultSet<>(publisher, row -> row.getInt(0)); + } + + private static CompletableFuture createResults(int elements) { + CompletableFuture previous = null; + if (elements > 0) { + // create pages of 5 elements each to exercise pagination + List pages = + Flowable.range(0, elements).buffer(5).map(List::size).toList().blockingGet(); + Collections.reverse(pages); + for (Integer size : pages) { + List rows = + Flowable.range(0, size) + .map( + i -> { + Row row = mock(Row.class); + when(row.getInt(0)).thenReturn(i); + return row; + }) + .toList() + .blockingGet(); + CompletableFuture future = new CompletableFuture<>(); + future.complete(new MockAsyncResultSet(rows, previous)); + previous = future; + } + } else { + previous = new CompletableFuture<>(); + previous.complete(new MockAsyncResultSet(0, null)); + } + return previous; + } +} diff --git a/mapper-runtime/src/test/java/com/datastax/dse/driver/api/mapper/reactive/MockAsyncResultSet.java b/mapper-runtime/src/test/java/com/datastax/dse/driver/api/mapper/reactive/MockAsyncResultSet.java new file mode 100644 index 00000000000..058b706ca59 --- /dev/null +++ b/mapper-runtime/src/test/java/com/datastax/dse/driver/api/mapper/reactive/MockAsyncResultSet.java @@ -0,0 +1,97 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.dse.driver.api.mapper.reactive; + +import static org.mockito.Mockito.mock; + +import com.datastax.oss.driver.api.core.cql.AsyncResultSet; +import com.datastax.oss.driver.api.core.cql.ColumnDefinitions; +import com.datastax.oss.driver.api.core.cql.ExecutionInfo; +import com.datastax.oss.driver.api.core.cql.Row; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.concurrent.CompletionStage; +import java.util.stream.Collectors; +import java.util.stream.IntStream; + +public class MockAsyncResultSet implements AsyncResultSet { + + private final List rows; + private final Iterator iterator; + private final CompletionStage nextPage; + private final ExecutionInfo executionInfo = mock(ExecutionInfo.class); + private final ColumnDefinitions columnDefinitions = mock(ColumnDefinitions.class); + private int remaining; + + public MockAsyncResultSet(int size, CompletionStage nextPage) { + this(IntStream.range(0, size).boxed().map(MockRow::new).collect(Collectors.toList()), nextPage); + } + + public MockAsyncResultSet(List rows, CompletionStage nextPage) { + this.rows = rows; + iterator = rows.iterator(); + remaining = rows.size(); + this.nextPage = nextPage; + } + + @Override + public Row one() { + Row next = iterator.next(); + remaining--; + return next; + } + + @Override + public int remaining() { + return remaining; + } + + @NonNull + @Override + public List currentPage() { + return new ArrayList<>(rows); + } + + @Override + public boolean hasMorePages() { + return nextPage != null; + } + + @NonNull + @Override + public CompletionStage fetchNextPage() throws IllegalStateException { + return nextPage; + } + + @NonNull + @Override + public ColumnDefinitions getColumnDefinitions() { + return columnDefinitions; + } + + @NonNull + @Override + public ExecutionInfo getExecutionInfo() { + return executionInfo; + } + + @Override + public boolean wasApplied() { + return true; + } +} diff --git a/mapper-runtime/src/test/java/com/datastax/dse/driver/api/mapper/reactive/MockRow.java b/mapper-runtime/src/test/java/com/datastax/dse/driver/api/mapper/reactive/MockRow.java new file mode 100644 index 00000000000..d223989cdd4 --- /dev/null +++ b/mapper-runtime/src/test/java/com/datastax/dse/driver/api/mapper/reactive/MockRow.java @@ -0,0 +1,124 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.dse.driver.api.mapper.reactive; + +import static org.mockito.Mockito.mock; + +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.DefaultProtocolVersion; +import com.datastax.oss.driver.api.core.ProtocolVersion; +import com.datastax.oss.driver.api.core.cql.ColumnDefinitions; +import com.datastax.oss.driver.api.core.cql.Row; +import com.datastax.oss.driver.api.core.detach.AttachmentPoint; +import com.datastax.oss.driver.api.core.type.DataType; +import com.datastax.oss.driver.api.core.type.DataTypes; +import com.datastax.oss.driver.api.core.type.codec.registry.CodecRegistry; +import com.datastax.oss.driver.internal.core.cql.EmptyColumnDefinitions; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.nio.ByteBuffer; + +class MockRow implements Row { + + private int index; + + MockRow(int index) { + this.index = index; + } + + @Override + public int size() { + return 0; + } + + @NonNull + @Override + public CodecRegistry codecRegistry() { + return mock(CodecRegistry.class); + } + + @NonNull + @Override + public ProtocolVersion protocolVersion() { + return DefaultProtocolVersion.V4; + } + + @NonNull + @Override + public ColumnDefinitions getColumnDefinitions() { + return EmptyColumnDefinitions.INSTANCE; + } + + @Override + public int firstIndexOf(@NonNull String name) { + return 0; + } + + @Override + public int firstIndexOf(@NonNull CqlIdentifier id) { + return 0; + } + + @NonNull + @Override + public DataType getType(int i) { + return DataTypes.INT; + } + + @NonNull + @Override + public DataType getType(@NonNull String name) { + return DataTypes.INT; + } + + @NonNull + @Override + public DataType getType(@NonNull CqlIdentifier id) { + return DataTypes.INT; + } + + @Override + public ByteBuffer getBytesUnsafe(int i) { + return null; + } + + @Override + public boolean isDetached() { + return false; + } + + @Override + public void attach(@NonNull AttachmentPoint attachmentPoint) {} + + // equals and hashCode required for TCK tests that check that two subscribers + // receive the exact same set of items. + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (!(o instanceof MockRow)) { + return false; + } + MockRow mockRow = (MockRow) o; + return index == mockRow.index; + } + + @Override + public int hashCode() { + return index; + } +} diff --git a/mapper-runtime/src/test/java/com/datastax/dse/driver/api/mapper/reactive/TestSubscriber.java b/mapper-runtime/src/test/java/com/datastax/dse/driver/api/mapper/reactive/TestSubscriber.java new file mode 100644 index 00000000000..0eaaf508fe9 --- /dev/null +++ b/mapper-runtime/src/test/java/com/datastax/dse/driver/api/mapper/reactive/TestSubscriber.java @@ -0,0 +1,73 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.dse.driver.api.mapper.reactive; + +import com.datastax.oss.driver.shaded.guava.common.util.concurrent.Uninterruptibles; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; +import org.reactivestreams.Subscriber; +import org.reactivestreams.Subscription; + +public class TestSubscriber implements Subscriber { + + private final List elements = new ArrayList<>(); + private final CountDownLatch latch = new CountDownLatch(1); + private Subscription subscription; + private Throwable error; + + @Override + public void onSubscribe(Subscription s) { + if (subscription != null) { + throw new AssertionError("already subscribed"); + } + subscription = s; + s.request(Long.MAX_VALUE); + } + + @Override + public void onNext(T t) { + elements.add(t); + } + + @Override + public void onError(Throwable t) { + error = t; + latch.countDown(); + } + + @Override + public void onComplete() { + latch.countDown(); + } + + @Nullable + public Throwable getError() { + return error; + } + + @NonNull + public List getElements() { + return elements; + } + + public void awaitTermination() { + Uninterruptibles.awaitUninterruptibly(latch, 1, TimeUnit.MINUTES); + } +} From 2d439848ec3147a3b9956802d69c03e7d0f10ac6 Mon Sep 17 00:00:00 2001 From: olim7t Date: Thu, 10 Oct 2019 15:04:50 -0700 Subject: [PATCH 218/979] Run Reactive TCK with TestNG --- core/pom.xml | 22 ++++++++++++++++++++++ pom.xml | 1 + 2 files changed, 23 insertions(+) diff --git a/core/pom.xml b/core/pom.xml index 49d967c69d8..987b066a18e 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -208,13 +208,35 @@ maven-surefire-plugin + 1 listener com.datastax.oss.driver.DriverRunListener + + + junit + false + + + suitename + Reactive Streams TCK + + + + org.apache.maven.surefire + surefire-junit47 + ${surefire.version} + + + org.apache.maven.surefire + surefire-testng + ${surefire.version} + + org.apache.felix diff --git a/pom.xml b/pom.xml index 66b619eb712..89f386adc9d 100644 --- a/pom.xml +++ b/pom.xml @@ -73,6 +73,7 @@ 1.9.12 3.1.6 2.0.0-M19 + 2.19.1 From fb15a8285f9996dbe848a396d2f24074a6ed48b5 Mon Sep 17 00:00:00 2001 From: olim7t Date: Thu, 7 Nov 2019 14:41:55 -0800 Subject: [PATCH 219/979] Fix failing unit tests --- .../core/insights/InsightsClientTest.java | 41 ++++++++----------- .../queries/Cassandra21SchemaQueriesTest.java | 2 + .../queries/Cassandra22SchemaQueriesTest.java | 2 + .../queries/Cassandra3SchemaQueriesTest.java | 2 + 4 files changed, 24 insertions(+), 23 deletions(-) diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/insights/InsightsClientTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/InsightsClientTest.java index 9fdaba2e991..91e70437536 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/insights/InsightsClientTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/InsightsClientTest.java @@ -94,7 +94,7 @@ public class InsightsClientTest { @Test public void should_construct_json_event_startup_message() throws IOException { // given - DefaultDriverContext DefaultDriverContext = mockDefaultDriverContext(); + DefaultDriverContext context = mockDefaultDriverContext(); PlatformInfoFinder platformInfoFinder = mock(PlatformInfoFinder.class); OS os = new OS("linux", "1.2", "x64"); CPUS cpus = new CPUS(8, "intel i7"); @@ -124,7 +124,7 @@ public void should_construct_json_event_startup_message() throws IOException { InsightsClient insightsClient = new InsightsClient( - DefaultDriverContext, + context, MOCK_TIME_SUPPLIER, INSIGHTS_CONFIGURATION, platformInfoFinder, @@ -317,7 +317,7 @@ public static Object[][] stackTraceProvider() { "com.datastax.oss.driver.internal.core.context.DefaultDriverContext", "", "DefaultDriverContext.java", - 94), + 243), }; StackTraceElement[] stackTraceWithOneInitCallAndCaller = new StackTraceElement[] { @@ -326,7 +326,7 @@ public static Object[][] stackTraceProvider() { "com.datastax.oss.driver.internal.core.context.DefaultDriverContext", "", "DefaultDriverContext.java", - 94), + 243), new StackTraceElement( "com.example.ActualCallerNameApp", "main", "ActualCallerNameApp.java", 1) }; @@ -338,7 +338,7 @@ public static Object[][] stackTraceProvider() { "com.datastax.oss.driver.internal.core.context.DefaultDriverContext", "", "DefaultDriverContext.java", - 94), + 243), new StackTraceElement( "com.datastax.oss.driver.api.core.session.SessionBuilder", "buildDefaultSessionAsync", @@ -353,12 +353,7 @@ public static Object[][] stackTraceProvider() { "com.datastax.oss.driver.internal.core.context.DefaultDriverContext", "", "DefaultDriverContext.java", - 94), - new StackTraceElement( - "com.datastax.dse.driver.api.core.DseSessionBuilder", - "buildContext", - "DseSessionBuilder.java", - 100), + 243), new StackTraceElement( "com.datastax.oss.driver.api.core.session.SessionBuilder", "buildDefaultSessionAsync", @@ -382,12 +377,12 @@ public static Object[][] stackTraceProvider() { "com.datastax.oss.driver.internal.core.context.DefaultDriverContext", "", "DefaultDriverContext.java", - 94), + 243), new StackTraceElement( - "com.datastax.dse.driver.api.core.DseSessionBuilder", + "com.datastax.oss.driver.api.core.session.SessionBuilder", "buildContext", - "DseSessionBuilder.java", - 100), + "SessionBuilder.java", + 687), new StackTraceElement( "com.datastax.oss.driver.api.core.session.SessionBuilder", "buildDefaultSessionAsync", @@ -420,10 +415,10 @@ public static Object[][] stackTraceProvider() { } private DefaultDriverContext mockDefaultDriverContext() throws UnknownHostException { - DefaultDriverContext DefaultDriverContext = mock(DefaultDriverContext.class); - mockConnectionPools(DefaultDriverContext); + DefaultDriverContext context = mock(DefaultDriverContext.class); + mockConnectionPools(context); MetadataManager manager = mock(MetadataManager.class); - when(DefaultDriverContext.getMetadataManager()).thenReturn(manager); + when(context.getMetadataManager()).thenReturn(manager); DriverExecutionProfile defaultExecutionProfile = mockDefaultExecutionProfile(); DriverExecutionProfile nonDefaultExecutionProfile = mockNonDefaultRequestTimeoutExecutionProfile(); @@ -435,8 +430,8 @@ private DefaultDriverContext mockDefaultDriverContext() throws UnknownHostExcept startupOptions.put(StartupOptionsBuilder.DRIVER_VERSION_KEY, "2.x"); startupOptions.put(StartupOptionsBuilder.DRIVER_NAME_KEY, "DataStax Enterprise Java Driver"); - when(DefaultDriverContext.getStartupOptions()).thenReturn(startupOptions); - when(DefaultDriverContext.getProtocolVersion()).thenReturn(DSE_V2); + when(context.getStartupOptions()).thenReturn(startupOptions); + when(context.getProtocolVersion()).thenReturn(DSE_V2); DefaultNode contactPoint = mock(DefaultNode.class); EndPoint contactEndPoint = mock(EndPoint.class); when(contactEndPoint.resolve()).thenReturn(new InetSocketAddress("127.0.0.1", 9999)); @@ -444,7 +439,7 @@ private DefaultDriverContext mockDefaultDriverContext() throws UnknownHostExcept when(manager.getContactPoints()).thenReturn(ImmutableSet.of(contactPoint)); DriverConfig driverConfig = mock(DriverConfig.class); - when(DefaultDriverContext.getConfig()).thenReturn(driverConfig); + when(context.getConfig()).thenReturn(driverConfig); Map profiles = ImmutableMap.of( "default", defaultExecutionProfile, "non-default", nonDefaultExecutionProfile); @@ -460,8 +455,8 @@ private DefaultDriverContext mockDefaultDriverContext() throws UnknownHostExcept when(channel.getEndPoint()).thenReturn(controlConnectionEndpoint); when(channel.localAddress()).thenReturn(new InetSocketAddress("127.0.0.1", 10)); when(controlConnection.channel()).thenReturn(channel); - when(DefaultDriverContext.getControlConnection()).thenReturn(controlConnection); - return DefaultDriverContext; + when(context.getControlConnection()).thenReturn(controlConnection); + return context; } private void mockConnectionPools(DefaultDriverContext driverContext) { diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra21SchemaQueriesTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra21SchemaQueriesTest.java index 8f6e87bd714..b1703defa16 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra21SchemaQueriesTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra21SchemaQueriesTest.java @@ -19,6 +19,7 @@ import static com.datastax.oss.driver.Assertions.assertThatStage; import static org.mockito.Mockito.when; +import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.metadata.Metadata; @@ -41,6 +42,7 @@ public void should_query() { when(config.getStringList( DefaultDriverOption.METADATA_SCHEMA_REFRESHED_KEYSPACES, Collections.emptyList())) .thenReturn(Collections.emptyList()); + when(node.getCassandraVersion()).thenReturn(Version.V2_1_0); SchemaQueriesWithMockedChannel queries = new SchemaQueriesWithMockedChannel(driverChannel, node, null, config, "test"); diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra22SchemaQueriesTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra22SchemaQueriesTest.java index 46ba1448dbc..85321f8e27b 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra22SchemaQueriesTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra22SchemaQueriesTest.java @@ -19,6 +19,7 @@ import static com.datastax.oss.driver.Assertions.assertThatStage; import static org.mockito.Mockito.when; +import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.metadata.Metadata; @@ -41,6 +42,7 @@ public void should_query() { when(config.getStringList( DefaultDriverOption.METADATA_SCHEMA_REFRESHED_KEYSPACES, Collections.emptyList())) .thenReturn(Collections.emptyList()); + when(node.getCassandraVersion()).thenReturn(Version.V2_2_0); SchemaQueriesWithMockedChannel queries = new SchemaQueriesWithMockedChannel(driverChannel, node, null, config, "test"); diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra3SchemaQueriesTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra3SchemaQueriesTest.java index 766ac9d572e..e29e12179a2 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra3SchemaQueriesTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra3SchemaQueriesTest.java @@ -19,6 +19,7 @@ import static com.datastax.oss.driver.Assertions.assertThatStage; import static org.mockito.Mockito.when; +import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.metadata.Metadata; @@ -45,6 +46,7 @@ public void setup() { when(config.getStringList( DefaultDriverOption.METADATA_SCHEMA_REFRESHED_KEYSPACES, Collections.emptyList())) .thenReturn(Collections.emptyList()); + when(node.getCassandraVersion()).thenReturn(Version.V3_0_0); } @Test From 540443a95fe237b2d47c1023869a573126994de1 Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 11 Nov 2019 10:46:27 -0800 Subject: [PATCH 220/979] Fix shaded JAR and OSGI descriptors --- core-shaded/pom.xml | 52 +++++++++++++++++++----- core/pom.xml | 6 +-- core/src/test/resources/logback-test.xml | 20 ++++----- pom.xml | 9 ++-- query-builder/pom.xml | 2 +- 5 files changed, 61 insertions(+), 28 deletions(-) diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 7b86e3f1032..ce95e4313ab 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -81,6 +81,18 @@ org.hdrhistogram HdrHistogram + + org.apache.tinkerpop + gremlin-core + + + org.apache.tinkerpop + tinkergraph-gremlin + + + org.reactivestreams + reactive-streams + com.github.stephenc.jcip jcip-annotations @@ -116,11 +128,13 @@ com.datastax.oss:java-driver-core io.netty:* + com.esri.geometry:* + org.json:* + org.codehaus.jackson:* com.fasterxml.jackson.core:* @@ -129,6 +143,18 @@ io.netty com.datastax.oss.driver.shaded.netty + + com.esri + com.datastax.oss.driver.shaded.esri + + + org.json + com.datastax.oss.driver.shaded.json + + + org.codehaus.jackson + com.datastax.oss.driver.shaded.codehaus.jackson + com.fasterxml.jackson com.datastax.oss.driver.shaded.fasterxml.jackson @@ -136,7 +162,13 @@ + these one must be done here because their pattern is too wide --> + + org.codehaus.jackson:* + + META-INF/** + + com.fasterxml.jackson.core:* @@ -170,7 +202,7 @@ Exclude leftovers from the shading phase (this could also be done with a resource transformer by the shade plugin itself, but this way is more flexible). --> - META-INF/maven/com.datastax.oss/java-driver-core/**, META-INF/maven/io.netty/**, + META-INF/maven/com.datastax.oss/java-driver-core/**, META-INF/maven/io.netty/**, META-INF/maven/com.esri.geometry/**, META-INF/maven/org.json/** @@ -204,7 +236,7 @@ ${project.build.directory}/shaded-sources - com.datastax.oss.driver.internal:com.datastax.oss.driver.shaded + com.datastax.oss.driver.internal, com.datastax.dse.driver.internal, com.datastax.oss.driver.shaded !com.datastax.oss.driver.shaded.netty.*, + 1) Don't import packages shaded in the driver bundle. Note that shaded-guava lives + in its own bundle, so we must explicitly *not* mention it here. + -->!com.datastax.oss.driver.shaded.netty.*, !com.datastax.oss.driver.shaded.esri.*, !com.datastax.oss.driver.shaded.json.*, !com.datastax.oss.driver.shaded.codehaus.jackson.*, !com.datastax.oss.driver.shaded.fasterxml.jackson.*, jnr.*;resolution:=optional, + -->jnr.*;resolution:=optional, com.esri.core.geometry.*;resolution:=optional, org.reactivestreams.*;resolution:=optional, org.apache.tinkerpop.*;resolution:=optional, - com.datastax.oss.driver.api.core.*, com.datastax.oss.driver.internal.core.*, com.datastax.oss.driver.shaded.netty.*, + com.datastax.oss.driver.api.core.*, com.datastax.oss.driver.internal.core.*, com.datastax.dse.driver.api.core.*, com.datastax.dse.driver.internal.core.*, com.datastax.oss.driver.shaded.netty.*, com.datastax.oss.driver.shaded.esri.*, com.datastax.oss.driver.shaded.json.*, com.datastax.oss.driver.shaded.codehaus.jackson.*, com.datastax.oss.driver.shaded.fasterxml.jackson.*, true diff --git a/core/pom.xml b/core/pom.xml index 987b066a18e..424e193c353 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -262,14 +262,14 @@ -->!net.jcip.annotations.*, !edu.umd.cs.findbugs.annotations.*, jnr.*;resolution:=optional, * + -->jnr.*;resolution:=optional, com.esri.core.geometry.*;resolution:=optional, org.reactivestreams.*;resolution:=optional, org.apache.tinkerpop.*;resolution:=optional, * - com.datastax.oss.driver.*.core.* + com.datastax.oss.driver.*.core.*, com.datastax.dse.driver.*.core.* diff --git a/core/src/test/resources/logback-test.xml b/core/src/test/resources/logback-test.xml index 39f172d2faf..90dc593ef3b 100644 --- a/core/src/test/resources/logback-test.xml +++ b/core/src/test/resources/logback-test.xml @@ -17,14 +17,14 @@ --> - - - %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n - - - - - - - + + + %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n + + + + + + + diff --git a/pom.xml b/pom.xml index 89f386adc9d..750b86e2d07 100644 --- a/pom.xml +++ b/pom.xml @@ -69,8 +69,10 @@ 2.0.1 1.1.4 2.2.2 - 20180130 - 1.9.12 + 20180130 + + 1.9.12 + 3.1.6 2.0.0-M19 2.19.1 @@ -689,8 +691,7 @@ limitations under the License.]]> false true all,-missing - com.datastax.oss.driver.internal:com.datastax.dse.driver.internal - + com.datastax.oss.driver.internal:com.datastax.dse.driver.internal 1.9.12 - 3.1.6 2.0.0-M19 2.19.1 @@ -511,6 +510,14 @@ false \d+\.\d+\.\d+ + + + + + java.class.externalClassExposedInAPI + + + diff --git a/query-builder/revapi.json b/query-builder/revapi.json index 0cf4e85f90d..9d0163b487e 100644 --- a/query-builder/revapi.json +++ b/query-builder/revapi.json @@ -7,12 +7,12 @@ "packages": { "regex": true, "exclude": [ - "com\\.datastax\\.oss\\.protocol\\.internal(\\..+)?", - "com\\.datastax\\.oss\\.driver\\.internal(\\..+)?", + "com\\.datastax\\.(oss|dse)\\.protocol\\.internal(\\..+)?", + "com\\.datastax\\.(oss|dse)\\.driver\\.internal(\\..+)?", "com\\.datastax\\.oss\\.driver\\.shaded(\\..+)?", "org\\.assertj(\\..+)?", // Don't re-check sibling modules that this module depends on - "com\\.datastax\\.oss\\.driver\\.api\\.core(\\..+)?" + "com\\.datastax\\.(oss|dse)\\.driver\\.api\\.core(\\..+)?" ] } } diff --git a/test-infra/revapi.json b/test-infra/revapi.json index e1a98fb2b05..1cdc6a8ec9f 100644 --- a/test-infra/revapi.json +++ b/test-infra/revapi.json @@ -7,13 +7,13 @@ "packages": { "regex": true, "exclude": [ - "com\\.datastax\\.oss\\.protocol\\.internal(\\..+)?", - "com\\.datastax\\.oss\\.driver\\.internal(\\..+)?", + "com\\.datastax\\.(oss|dse)\\.protocol\\.internal(\\..+)?", + "com\\.datastax\\.(oss|dse)\\.driver\\.internal(\\..+)?", "com\\.datastax\\.oss\\.driver\\.shaded(\\..+)?", "com\\.datastax\\.oss\\.simulacron(\\..+)?", "org\\.assertj(\\..+)?", // Don't re-check sibling modules that this module depends on - "com\\.datastax\\.oss\\.driver\\.api\\.core(\\..+)?" + "com\\.datastax\\.(oss|dse)\\.driver\\.api\\.core(\\..+)?" ] } } diff --git a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/session/SessionRule.java b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/session/SessionRule.java index ba377663d80..eed2f596f15 100644 --- a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/session/SessionRule.java +++ b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/session/SessionRule.java @@ -96,6 +96,21 @@ public SessionRule( this.graphName = graphName; } + public SessionRule( + CassandraResourceRule cassandraResource, + boolean createKeyspace, + NodeStateListener nodeStateListener, + SchemaChangeListener schemaChangeListener, + DriverConfigLoader configLoader) { + this( + cassandraResource, + createKeyspace, + nodeStateListener, + schemaChangeListener, + configLoader, + null); + } + @Override protected void before() { session = From c70472436bb7da5858b8114077dad417863b457b Mon Sep 17 00:00:00 2001 From: olim7t Date: Wed, 13 Nov 2019 17:05:02 -0800 Subject: [PATCH 224/979] Port DSE-specific manual sections --- README.md | 4 - manual/core/authentication/README.md | 78 +++- manual/core/dse/.nav | 2 + manual/core/dse/README.md | 11 + manual/core/dse/geotypes/README.md | 173 ++++++++ manual/core/dse/graph/.nav | 5 + manual/core/dse/graph/README.md | 74 ++++ manual/core/dse/graph/fluent/.nav | 2 + manual/core/dse/graph/fluent/README.md | 118 ++++++ .../core/dse/graph/fluent/explicit/README.md | 113 ++++++ .../core/dse/graph/fluent/implicit/README.md | 52 +++ manual/core/dse/graph/options/README.md | 153 +++++++ manual/core/dse/graph/results/README.md | 144 +++++++ manual/core/dse/graph/script/README.md | 106 +++++ manual/core/integration/README.md | 59 ++- manual/core/metadata/node/README.md | 13 +- manual/core/metadata/schema/README.md | 35 ++ manual/core/reactive/README.md | 383 ++++++++++++++++++ manual/query_builder/README.md | 34 +- 19 files changed, 1538 insertions(+), 21 deletions(-) create mode 100644 manual/core/dse/.nav create mode 100644 manual/core/dse/README.md create mode 100644 manual/core/dse/geotypes/README.md create mode 100644 manual/core/dse/graph/.nav create mode 100644 manual/core/dse/graph/README.md create mode 100644 manual/core/dse/graph/fluent/.nav create mode 100644 manual/core/dse/graph/fluent/README.md create mode 100644 manual/core/dse/graph/fluent/explicit/README.md create mode 100644 manual/core/dse/graph/fluent/implicit/README.md create mode 100644 manual/core/dse/graph/options/README.md create mode 100644 manual/core/dse/graph/results/README.md create mode 100644 manual/core/dse/graph/script/README.md create mode 100644 manual/core/reactive/README.md diff --git a/README.md b/README.md index 88e9132e300..a28c316274e 100644 --- a/README.md +++ b/README.md @@ -55,10 +55,6 @@ higher. It requires Java 8 or higher. -If using DataStax Enterprise, the [DataStax Enterprise Java -driver](http://docs.datastax.com/en/developer/java-driver-dse/latest) provides more features and -better compatibility. - Disclaimer: Some DataStax/DataStax Enterprise products might partially work on big-endian systems, but DataStax does not officially support these systems. diff --git a/manual/core/authentication/README.md b/manual/core/authentication/README.md index ecc57a2612e..8c1d41a2c0b 100644 --- a/manual/core/authentication/README.md +++ b/manual/core/authentication/README.md @@ -3,7 +3,7 @@ ### Quick overview * `advanced.auth-provider` in the configuration. -* disabled by default. Also available: plain-text credentials, or write your own. +* disabled by default. Also available: plain-text credentials, GSSAPI (DSE only), or write your own. * can also be defined programmatically: [CqlSession.builder().withAuthCredentials][SessionBuilder.withAuthCredentials] or [CqlSession.builder().withAuthProvider][SessionBuilder.withAuthProvider]. @@ -19,6 +19,22 @@ This can be done in two ways: Define an `auth-provider` section in the [configuration](../configuration/): +``` +datastax-java-driver { + advanced.auth-provider { + class = ... + } +} +``` + +The auth provider must be configured before opening a session, it cannot be changed at runtime. + +#### Plain text + +`PlainTextAuthProvider` supports simple username/password authentication (intended to work with the +server-side `PasswordAuthenticator`). The credentials can be changed at runtime, they will be used +for new connection attempts once the configuration gets reloaded. + ``` datastax-java-driver { advanced.auth-provider { @@ -29,11 +45,45 @@ datastax-java-driver { } ``` -Authentication must be configured before opening a session, it cannot be changed at runtime. +When connecting to DSE, an optional `authorization-id` can also be specified. It will be used for +proxy authentication (logging in as another user or role). If you try to use this feature with an +authenticator that doesn't support it, the authorization id will be ignored. + +``` +datastax-java-driver { + advanced.auth-provider { + class = PlainTextAuthProvider + username = user + password = pass + authorization-id = otherUserOrRole + } +} +``` + +Note that, for backward compatibility with previous driver versions, you can also use the class name +`DsePlainTextAuthProvider` to enable this provider. -`PlainTextAuthProvider` is provided out of the box, for simple username/password authentication -(intended to work with the server-side `PasswordAuthenticator`). The credentials can be changed at -runtime, they will be used for new connection attempts once the configuration gets reloaded. +#### GSSAPI (DSE only) + +`DseGssApiAuthProvider` supports GSSAPI authentication against a DSE cluster secured with Kerberos: + +``` +dse-java-driver { + advanced.auth-provider { + class = DseGssApiAuthProvider + login-configuration { + principal = "user principal here ex cassandra@DATASTAX.COM" + useKeyTab = "true" + refreshKrb5Config = "true" + keyTab = "Path to keytab file here" + } + } + } +``` + +See the comments in [reference.conf] for more details. + +#### Custom You can also write your own provider; it must implement [AuthProvider] and declare a public constructor with a [DriverContext] argument. @@ -58,7 +108,7 @@ CqlSession session = .build(); ``` -For convenience, there is a shortcut that takes the credentials directly. This is equivalent to +For convenience, there are shortcuts that take the credentials directly. This is equivalent to using `PlainTextAuthProvider` in the configuration: ```java @@ -66,18 +116,28 @@ CqlSession session = CqlSession.builder() .withAuthCredentials("user", "pass") .build(); + +// With proxy authentication (DSE only) +CqlSession session = + CqlSession.builder() + .withAuthCredentials("user", "pass", "otherUserOrRole") + .build(); ``` One downside of `withAuthCredentials` is that the credentials are stored in clear text in memory; this means they are vulnerable to an attacker who is able to perform memory dumps. If this is not -acceptable for you, consider writing your own [AuthProvider] implementation -([PlainTextAuthProviderBase] is a good starting point). +acceptable for you, consider writing your own [AuthProvider] implementation; +[PlainTextAuthProviderBase] is a good starting point. +Similarly, the driver provides [DseGssApiAuthProviderBase] as a starting point to write your own +GSSAPI auth provider. [SASL]: https://en.wikipedia.org/wiki/Simple_Authentication_and_Security_Layer [AuthProvider]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/auth/AuthProvider.html [DriverContext]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/context/DriverContext.html [PlainTextAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderBase.html +[DseGssApiAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderBase.html [SessionBuilder.withAuthCredentials]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthCredentials-java.lang.String-java.lang.String- -[SessionBuilder.withAuthProvider]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthProvider-com.datastax.oss.driver.api.core.auth.AuthProvider- \ No newline at end of file +[SessionBuilder.withAuthProvider]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthProvider-com.datastax.oss.driver.api.core.auth.AuthProvider- +[reference.conf]: ../configuration/reference/ \ No newline at end of file diff --git a/manual/core/dse/.nav b/manual/core/dse/.nav new file mode 100644 index 00000000000..c53a353fd33 --- /dev/null +++ b/manual/core/dse/.nav @@ -0,0 +1,2 @@ +graph +geotypes \ No newline at end of file diff --git a/manual/core/dse/README.md b/manual/core/dse/README.md new file mode 100644 index 00000000000..e0d41ef38c7 --- /dev/null +++ b/manual/core/dse/README.md @@ -0,0 +1,11 @@ +## DSE-specific features + +Some driver features only work with Datastax Enterprise: + +* [Graph](graph/); +* [Geospatial types](geotypes/); +* Proxy and GSSAPI authentication (covered in the [Authentication](../authentication/) page). + +Note that, if you don't use these features, you might be able to exclude certain dependencies in +order to limit the number of JARs in your classpath. See the +[Integration](../integration/#driver-dependencies) page. \ No newline at end of file diff --git a/manual/core/dse/geotypes/README.md b/manual/core/dse/geotypes/README.md new file mode 100644 index 00000000000..8153de337a2 --- /dev/null +++ b/manual/core/dse/geotypes/README.md @@ -0,0 +1,173 @@ +## Geospatial types + +The driver comes with client-side representations of the DSE geospatial data types: [Point], +[LineString] and [Polygon]. + +Note: geospatial types require the [ESRI] library version 1.2 to be present on the classpath. The +DSE driver has a non-optional dependency on that library, but if your application does not use +geotypes at all, it is possible to exclude it to minimize the number of runtime dependencies. If the +library cannot be found at runtime, geospatial types won't be available and a warning will be +logged, but the driver will otherwise operate normally (this is also valid for OSGi deployments). + +### Usage in requests + +Geospatial types can be retrieved from query results like any other value; use the "typed" getter +that takes the class as a second argument: + +```java +// Schema: CREATE TABLE poi(id int PRIMARY KEY, location 'PointType', description text); + +CqlSession session = CqlSession.builder().build() + +Row row = session.execute("SELECT location FROM poi WHERE id = 1").one(); +Point location = row.get(0, Point.class); +``` + +The corresponding setter can be used for insertions: + +```java +PreparedStatement pst = + session.prepare("INSERT INTO poi (id, location, description) VALUES (?, ?, ?)"); +session.execute( + pst.boundStatementBuilder() + .setInt("id", 2) + .set("location", Point.fromCoordinates(2.2945, 48.8584), Point.class) + .setString("description", "Eiffel Tower") + .build()); +``` + +This also works with the vararg syntax where target CQL types are inferred: + +```java +session.execute(pst.bind(2, Point.fromCoordinates(2.2945, 48.8584), "Eiffel Tower")); +``` + +### Client-side API + +The driver provides methods to create instances or inspect existing ones. + +[Point] is a trivial pair of coordinates: + +```java +Point point = Point.fromCoordinates(2.2945, 48.8584); +System.out.println(point.X()); +System.out.println(point.Y()); +``` + +[LineString] is a series of 2 or more points: + +```java +LineString lineString = + LineString.fromPoints( + Point.fromCoordinates(30, 10), + Point.fromCoordinates(10, 30), + Point.fromCoordinates(40, 40)); + +for (Point point : lineString.getPoints()) { + System.out.println(point); +} +``` + +[Polygon] is a planar surface in a two-dimensional XY-plane. You can build a simple polygon from a +list of points: + +```java +Polygon polygon = + Polygon.fromPoints( + Point.fromCoordinates(30, 10), + Point.fromCoordinates(10, 20), + Point.fromCoordinates(20, 40), + Point.fromCoordinates(40, 40)); +``` + +In addition to its exterior boundary, a polygon can have an arbitrary number of interior rings, +possibly nested (the first level defines "lakes" in the shape, the next level "islands" in those +lakes, etc). To create such complex polygons, use the builder: + +```java +Polygon polygon = + Polygon.builder() + .addRing( + Point.fromCoordinates(0, 0), + Point.fromCoordinates(0, 3), + Point.fromCoordinates(5, 3), + Point.fromCoordinates(5, 0)) + .addRing( + Point.fromCoordinates(1, 1), + Point.fromCoordinates(1, 2), + Point.fromCoordinates(2, 2), + Point.fromCoordinates(2, 1)) + .addRing( + Point.fromCoordinates(3, 1), + Point.fromCoordinates(3, 2), + Point.fromCoordinates(4, 2), + Point.fromCoordinates(4, 1)) + .build(); +``` + +You can then retrieve all the points with the following methods: + +```java +List exteriorRing = polygon.getExteriorRing(); + +for (List interiorRing : polygon.getInteriorRings()) { + ... +} +``` + +Note that all rings (exterior or interior) are defined with the same builder method: you can provide +them in any order, the implementation will figure out which is the exterior one. In addition, points +are always ordered counterclockwise for the exterior ring, clockwise for the first interior level, +counterclockwise for the second level, etc. Again, this is done automatically, so you don't need to +sort them beforehand; however, be prepared to get a different order when you read them back: + +```java +Polygon polygon = + Polygon.fromPoints( + // Clockwise: + Point.fromCoordinates(0, 0), + Point.fromCoordinates(0, 3), + Point.fromCoordinates(5, 3), + Point.fromCoordinates(5, 0)); + +System.out.println(polygon); +// Counterclockwise: +// POLYGON ((0 0, 5 0, 5 3, 0 3, 0 0)) +``` + +All geospatial types interoperate with three standard formats: + +* [Well-known text]\: + + ```java + Point point = Point.fromWellKnownText("POINT (0 1)"); + System.out.println(point.asWellKnownText()); + ``` + +* [Well-known binary]\: + + ```java + import com.datastax.oss.protocol.internal.util.Bytes; + + Point point = + Point.fromWellKnownBinary( + Bytes.fromHexString("0x01010000000000000000000000000000000000f03f")); + System.out.println(Bytes.toHexString(point.asWellKnownBinary())); + ``` + +* [GeoJSON]\: + + ```java + Point point = Point.fromGeoJson("{\"type\":\"Point\",\"coordinates\":[0.0,1.0]}"); + System.out.println(point.asGeoJson()); + ``` + +[ESRI]: https://github.com/Esri/geometry-api-java + +[LineString]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/dse/driver/api/core/data/geometry/LineString.html +[Point]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/dse/driver/api/core/data/geometry/Point.html +[Polygon]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/dse/driver/api/core/data/geometry/Polygon.html + +[Well-known text]: https://en.wikipedia.org/wiki/Well-known_text +[Well-known binary]: https://en.wikipedia.org/wiki/Well-known_text#Well-known_binary +[GeoJSON]: https://tools.ietf.org/html/rfc7946 diff --git a/manual/core/dse/graph/.nav b/manual/core/dse/graph/.nav new file mode 100644 index 00000000000..d7f30c149fc --- /dev/null +++ b/manual/core/dse/graph/.nav @@ -0,0 +1,5 @@ +script +fluent +fluent +options +results \ No newline at end of file diff --git a/manual/core/dse/graph/README.md b/manual/core/dse/graph/README.md new file mode 100644 index 00000000000..c78f60f6a67 --- /dev/null +++ b/manual/core/dse/graph/README.md @@ -0,0 +1,74 @@ +## Graph + +The driver provides full support for DSE graph, the distributed graph database available in DataStax +Enterprise. The [CqlSession] interface extends [GraphSession], which adds specialized methods to +execute requests expressed in the [Gremlin] graph traversal language. + +*This manual only covers driver usage; for more information about server-side configuration and data +modeling, refer to the [DSE developer guide].* + +Note: graph capabilities require the [Apache TinkerPop™] library to be present on the classpath. The +driver has a non-optional dependency on that library, but if your application does not use graph at +all, it is possible to exclude it to minimize the number of runtime dependencies. If the library +cannot be found at runtime, graph queries won't be available and a warning will be logged, but the +driver will otherwise operate normally (this is also valid for OSGi deployments). + +### Overview + +There are 3 ways to execute graph requests: + +1. Passing a Gremlin script directly in a plain Java string. We'll refer to this as the + [script API](script/): + + ```java + CqlSession session = CqlSession.builder().build(); + + String script = "g.V().has('name', name)"; + ScriptGraphStatement statement = + ScriptGraphStatement.builder(script) + .withQueryParam("name", "marko") + .build(); + + GraphResultSet result = session.execute(statement); + for (GraphNode node : result) { + System.out.println(node.asVertex()); + } + ``` + +2. Building a traversal with the [TinkerPop fluent API](fluent/), and [executing it + explicitly](fluent/explicit/) with the session: + + ```java + import static com.datastax.dse.driver.api.core.graph.DseGraph.g; + + GraphTraversal traversal = g.V().has("name", "marko"); + FluentGraphStatement statement = FluentGraphStatement.newInstance(traversal); + + GraphResultSet result = session.execute(statement); + for (GraphNode node : result) { + System.out.println(node.asVertex()); + } + ``` + +3. Building a connected traversal with the fluent API, and [executing it + implicitly](fluent/implicit/) by invoking a terminal step: + + ```java + GraphTraversalSource g = DseGraph.g + .withRemote(DseGraph.remoteConnectionBuilder(session).build()); + + List vertices = g.V().has("name", "marko").toList(); + ``` + +All executions modes rely on the same set of [configuration options](options/). + +The script and explicit fluent API return driver-specific [result sets](results/). The implicit +fluent API returns Apache TinkerPop™ types directly. + +[Apache TinkerPop™]: http://tinkerpop.apache.org/ + +[CqlSession]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/CqlSession.html +[GraphSession]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/dse/driver/api/core/graph/GraphSession.html + +[DSE developer guide]: https://docs.datastax.com/en/dse/6.0/dse-dev/datastax_enterprise/graph/graphTOC.html +[Gremlin]: https://docs.datastax.com/en/dse/6.0/dse-dev/datastax_enterprise/graph/dseGraphAbout.html#dseGraphAbout__what-is-cql diff --git a/manual/core/dse/graph/fluent/.nav b/manual/core/dse/graph/fluent/.nav new file mode 100644 index 00000000000..4be448834af --- /dev/null +++ b/manual/core/dse/graph/fluent/.nav @@ -0,0 +1,2 @@ +explicit +implicit \ No newline at end of file diff --git a/manual/core/dse/graph/fluent/README.md b/manual/core/dse/graph/fluent/README.md new file mode 100644 index 00000000000..4edf44fe45d --- /dev/null +++ b/manual/core/dse/graph/fluent/README.md @@ -0,0 +1,118 @@ +## Fluent API + +The driver depends on [Apache TinkerPop™], a graph computing framework that provides a fluent API to +build Gremlin traversals. This allows you to write your graph requests directly in Java, like you +would in a Gremlin-groovy script: + +```java +// How this is initialized will depend on the execution model, see details below +GraphTraversalSource g = ... + +GraphTraversal traversal = g.V().has("name", "marko"); +``` + +### Execution models + +There are two ways to execute fluent traversals: + +* [explicitly](explicit/) by wrapping a traversal into a statement and passing it to + `session.execute`; +* [implicitly](implicit/) by building the traversal from a connected source, and calling a + terminal step. + +### Common topics + +The following apply regardless of the execution model: + +#### Limitations + +At the time of writing (DSE 6.0 / driver 4.0), some types of queries cannot be executed through the +fluent API: + +* system queries (e.g. creating / dropping a graph); +* configuration; +* DSE graph schema queries. + +You'll have to use the [script API](../script) for those use cases. + +#### Performance considerations + +Before sending a fluent graph statement over the network, the driver serializes the Gremlin +traversal into a byte array. **Traversal serialization happens on the client thread, even in +asynchronous mode**. In other words, it is done on: + +* the thread that calls `session.execute` or `session.executeAsync` for explicit execution; +* the thread that calls the terminal step for implicit execution. + +In practice, this shouldn't be an issue, but we've seen it become problematic in some corner cases +of our performance benchmarks: if a single thread issues a lot of `session.executeAsync` calls in a +tight loop, traversal serialization can dominate CPU usage on that thread, and become a bottleneck +for request throughput. + +If you believe that you're running into that scenario, start by profiling your application to +confirm that the client thread maxes out its CPU core; to solve the problem, distribute your +`session.executeAsync` calls onto more threads. + +#### Domain specific languages + +Gremlin can be extended with domain specific languages to make traversals more natural to write. For +example, considering the following query: + +```java +g.V().hasLabel("person").has("name", "marko"). + out("knows").hasLabel("person").has("name", "josh"); +``` + +A "social" DSL could be written to simplify it as: + +```java +socialG.persons("marko").knows("josh"); +``` + +TinkerPop provides an annotation processor to generate a DSL from an annotated interface. This is +covered in detail in the [TinkerPop documentation][TinkerPop DSL]. + +Once your custom traversal source is generated, here's how to use it: + +```java +// Non-connected source for explicit execution: +SocialTraversalSource socialG = DseGraph.g.getGraph().traversal(SocialTraversalSource.class); + +// Connected source for implicit execution: +SocialTraversalSource socialG = + DseGraph.g + .withRemote(DseGraph.remoteConnectionBuilder(session).build()) + .getGraph() + .traversal(SocialTraversalSource.class); +``` + +#### Search and geospatial predicates + +All the DSE predicates are available on the driver side: + +* for [search][DSE search], use the [Search] class: + + ```java + GraphTraversal traversal = + g.V().has("recipe", "instructions", Search.token("Saute")).values("name"); + ``` + +* for [geospatial queries][DSE geo], use the [Geo] class: + + ```java + GraphTraversal traversal = + g.V() + .has( + "location", + "point", + Geo.inside(Geo.point(2.352222, 48.856614), 4.2, Geo.Unit.DEGREES)) + .values("name"); + ``` + +[Search]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/dse/driver/api/core/graph/predicates/Search.html +[Geo]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/dse/driver/api/core/graph/predicates/Geo.html + +[Apache TinkerPop™]: http://tinkerpop.apache.org/ +[TinkerPop DSL]: http://tinkerpop.apache.org/docs/current/reference/#dsl +[DSE search]: https://docs.datastax.com/en/dse/6.0/dse-dev/datastax_enterprise/graph/using/useSearchIndexes.html +[DSE geo]: https://docs.datastax.com/en/dse/6.0/dse-dev/datastax_enterprise/graph/using/queryGeospatial.html diff --git a/manual/core/dse/graph/fluent/explicit/README.md b/manual/core/dse/graph/fluent/explicit/README.md new file mode 100644 index 00000000000..bfda2f3a805 --- /dev/null +++ b/manual/core/dse/graph/fluent/explicit/README.md @@ -0,0 +1,113 @@ +## Explicit execution + +Fluent traversals can be wrapped into a [FluentGraphStatement] and passed to the session: + +```java +// A "dummy", non-connected traversal source that is not meant to be iterated directly, but instead +// serves as the basis to build fluent statements: +import static com.datastax.dse.driver.api.core.graph.DseGraph.g; + +GraphTraversal traversal = g.V().has("name", "marko"); +FluentGraphStatement statement = FluentGraphStatement.newInstance(traversal); + +GraphResultSet result = session.execute(statement); +for (GraphNode node : result) { + System.out.println(node.asVertex()); +} +``` + +### Creating fluent statements + +#### Factory method + +As shown above, [FluentGraphStatement.newInstance] creates a statement from a traversal directly. + +The default implementation returned by the driver is **immutable**; if you call additional methods +on the statement -- for example to set [options](../../options/) -- each method call will create a +new copy: + +```java +FluentGraphStatement statement = FluentGraphStatement.newInstance(traversal); +FluentGraphStatement statement2 = statement.setTimeout(Duration.ofSeconds(10)); + +assert statement2 != statement; +``` + +Immutability is good because it makes statements inherently **thread-safe**: you can share them in +your application and access them concurrently without any risk. + +On the other hand, it means a lot of intermediary copies if you often call methods on your +statements. Modern VMs are normally good at dealing with such short-lived objects, but if you're +worried about the performance impact, consider using a builder instead. + +Note: contrary to driver statements, Tinkerpop's `GraphTraversal` is mutable and therefore not +thread-safe. This is fine if you just wrap a traversal into a statement and never modify it +afterwards, but be careful not to share traversals and modify them concurrently. + +#### Builder + +Instead of creating a statement directly, you can pass your traversal to +[FluentGraphStatement.builder], chain method calls to set options, and finally call `build()`: + +```java +FluentGraphStatement statement1 = + FluentGraphStatement.builder(traversal) + .withTimeout(Duration.ofSeconds(10)) + .withIdempotence(true) + .build(); +``` + +The builder implementation is **mutable**: every method call returns the same object, only one +builder instance will be created no matter how many methods you call on it. As a consequence, the +builder object is **not thread-safe**. + +You can also initialize a builder from an existing statement: it will inherit all of its options. + +```java +FluentGraphStatement statement2 = + FluentGraphStatement.builder(statement1).withTimeout(Duration.ofSeconds(20)).build(); + +assert statement2.getTraversal().equals(statement1.getTraversal()); +assert statement2.getTimeout().equals(Duration.ofSeconds(20)); // overridden by the builder +assert statement2.isIdempotent(); // because statement1 was +``` + +### Batching traversals + +[BatchGraphStatement] allows you to execute multiple mutating traversals in the same transaction. +Like other types of statements, it is immutable and thread-safe, and can be created either with a +[factory method][BatchGraphStatement.newInstance] or a [builder][BatchGraphStatement.builder]: + +```java +GraphTraversal traversal1 = g.addV("person").property("name", "batch1").property("age", 1); +GraphTraversal traversal2 = g.addV("person").property("name", "batch2").property("age", 2); + +// Each method call creates a copy: +BatchGraphStatement batch1 = BatchGraphStatement.newInstance() + .addTraversal(traversal1) + .addTraversal(traversal2); + +// Uses a single, mutable builder instance: +BatchGraphStatement batch2 = BatchGraphStatement.builder() + .addTraversal(traversal1) + .addTraversal(traversal2) + .build(); +``` + +Traversal batches are only available with DSE 6.0 or above. + +### Prepared statements + +At the time of writing (DSE 6.0), prepared graph statements are not supported yet; they will be +added in a future version. + +----- + +See also the [parent page](../) for topics common to all fluent traversals. + +[FluentGraphStatement]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html +[FluentGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html#newInstance-org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal- +[FluentGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html#builder-org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal- +[BatchGraphStatement]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html +[BatchGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html#newInstance-- +[BatchGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html#builder-- diff --git a/manual/core/dse/graph/fluent/implicit/README.md b/manual/core/dse/graph/fluent/implicit/README.md new file mode 100644 index 00000000000..a282b79d983 --- /dev/null +++ b/manual/core/dse/graph/fluent/implicit/README.md @@ -0,0 +1,52 @@ +## Implicit execution + +Instead of passing traversals to the driver, you can create a *remote traversal source* connected to +the DSE cluster: + +```java +CqlSession session = CqlSession.builder().build(); + +GraphTraversalSource g = + DseGraph.g.withRemote(DseGraph.remoteConnectionBuilder(session).build()); +``` + +Then build traversals from that source. Whenever you reach a [terminal step] \(such as `next()`, +`toList()`...), the DSE driver will be invoked under the covers: + +```java +List vertices = g.V().has("name", "marko").toList(); +``` + +This lets you use the traversal as if it were working against a local graph; all the communication +with DSE is done transparently. Note however that the returned objects (vertices, edges...) are +completely *detached*: even though they contain the complete data, modifications made to them will +not be reflected on the server side. + +Traversal sources with different configurations can easily be created through execution profiles in +the [configuration](../../../../configuration/): + +``` +datastax-java-driver { + profiles { + graph-oltp { + basic.graph.traversal-source = a + basic.graph.timeout = 30 seconds + } + } +} +``` + +Pass the profile name to the remote connection builder: + +```java +GraphTraversalSource a = DseGraph.g.withRemote( + DseGraph.remoteConnectionBuilder(session) + .withExecutionProfileName("graph-oltp") + .build()); +``` + +----- + +See also the [parent page](../) for topics common to all fluent traversals. + +[terminal step]: http://tinkerpop.apache.org/docs/current/reference/#terminal-steps diff --git a/manual/core/dse/graph/options/README.md b/manual/core/dse/graph/options/README.md new file mode 100644 index 00000000000..82957905018 --- /dev/null +++ b/manual/core/dse/graph/options/README.md @@ -0,0 +1,153 @@ +## Graph options + +There are various [configuration](../../../configuration/) options that control the execution of +graph statements. They can also be overridden programmatically on individual statements. + +### Setting options + +Given the following configuration: + +``` +datastax-java-driver { + + basic.graph.timeout = 3 seconds + + profiles { + graph-oltp { + basic.graph.timeout = 30 seconds + } + } +} +``` + +This statement inherits the timeout from the default profile: + +```java +ScriptGraphStatement statement = ScriptGraphStatement.newInstance("g.V().next()"); +assert statement.getTimeout().equals(Duration.ofSeconds(3)); +``` + +This statement inherits the timeout from a named profile: + +```java +ScriptGraphStatement statement = + ScriptGraphStatement.newInstance("g.V().next()").setExecutionProfileName("graph-oltp"); +assert statement.getTimeout().equals(Duration.ofSeconds(30)); +``` + +This statement overrides the timeout programmatically; that takes precedence over the configuration: + +```java +ScriptGraphStatement statement = + ScriptGraphStatement.newInstance("g.V().next()").setTimeout(Duration.ofSeconds(5)); +``` + +Programmatic overrides are also available in statement builders: + +```java +ScriptGraphStatement statement = + ScriptGraphStatement.builder("g.V().next()").withTimeout(Duration.ofSeconds(5)).build(); +``` + +Whether you use the configuration or programmatic API depends on the use case; in general, we +recommend trying execution profiles first, if you can identify static categories of statements that +share the same options. Resort to the API for specific options that only apply to a single +statement, or if the value is only known at runtime. + +### Available options + +#### Graph name + +The `basic.graph.name` option defines the name of the graph you're querying. + +This doesn't have to be set all the time. In fact, some queries explicitly require no graph name, +for example those that access the `system` query. If you try to execute them with a graph name set, +you'll get an error: + +```java +// Don't do this: executing a system query with the graph name set +ScriptGraphStatement statement = + ScriptGraphStatement.newInstance("system.graph('demo').ifNotExists().create()") + .setGraphName("test"); +session.execute(statement); +// InvalidQueryException: No such property: system for class: Script2 +``` + +If you set the graph name globally in the configuration, you'll need to unset it for system queries. +To do that, set it to `null`, or use the more explicit equivalent `is-system-query`: + +``` +datastax-java-driver { + basic.graph.name = my_graph + + profiles { + graph-system { + # Don't inherit the graph name here + basic.graph.is-system-query = true + } + } +} +``` + +```java +ScriptGraphStatement statement = + ScriptGraphStatement.newInstance("system.graph('demo').ifNotExists().create()") + .setExecutionProfileName("graph-system"); + +// Programmatic alternative: +ScriptGraphStatement statement = + ScriptGraphStatement.newInstance("system.graph('demo').ifNotExists().create()") + .setSystemQuery(true); +``` + +#### Traversal source + +`basic.graph.traversal-source` defines the underlying engine used to create traversals. + +Set this to `g` for regular OLTP queries, or `a` for OLAP queries. + +#### Consistency level + +Graph statements use the same option as CQL: `basic.request.consistency`. + +However, DSE graph also provides a finer level of tuning: a single traversal may produce multiple +internal storage queries, some of which are reads, and others writes. The read and write consistency +levels can be configured independently with `basic.graph.read-consistency` and +`basic.graph.write-consistency`. + +If any of these is set, it overrides the consistency level for that type of query; otherwise, the +global option is used. + +#### Timeout + +Graph statements have a dedicated timeout option: `basic.graph.timeout`. This is because the timeout +behaves a bit differently with DSE graph: by default, it is unset and the driver will wait until the +server replies (there are server-side timeouts that limit how long the request will take). + +If a timeout is defined on the client, the driver will fail the request after that time, without +waiting for a reply. But the timeout is also sent alongside the initial request, and the server will +adjust its own timeout to ensure that it doesn't keep working for a result that the client is no +longer waiting for. + +#### Graph protocol version + +DSE graph relies on the Cassandra native protocol, but it extends it with a sub-protocol that has +its own versioning scheme. + +`advanced.graph.sub-protocol` controls the graph protocol version to use for each statement. It is +unset by default, and you should almost never have to change it: the driver sets it automatically +based on the information it knows about the server. + +There is one exception: if you use the [script API](../script/) against a legacy DSE version (5.0.3 +or older), the driver infers the wrong protocol version. This manifests as a `ClassCastException` +when you try to deserialize complex result objects, such as vertices: + +```java +GraphResultSet result = + session.execute(ScriptGraphStatement.newInstance("g.V().next()")); +result.one().asVertex(); +// ClassCastException: java.util.LinkedHashMap cannot be cast to org.apache.tinkerpop.gremlin.structure.Vertex +``` + +If you run into that situation, force the sub-protocol to `graphson-1.0` for script statements +(that's not necessary for fluent statements). \ No newline at end of file diff --git a/manual/core/dse/graph/results/README.md b/manual/core/dse/graph/results/README.md new file mode 100644 index 00000000000..064a2946a53 --- /dev/null +++ b/manual/core/dse/graph/results/README.md @@ -0,0 +1,144 @@ +## Handling graph results + +[Script queries](../script/) and [explicit fluent traversals](../fluent/explicit/) return graph +result sets, which are essentially iterables of [GraphNode]. + +### Synchronous / asynchronous result + +Like their CQL counterparts, graph result sets come in two forms, depending on the way the query +was executed. + +* `session.execute` returns a [GraphResultSet]. It can be iterated directly, and will return the + whole result set, triggering background fetches if the query is paged: + + ```java + for (GraphNode n : resultSet) { + System.out.println(n); + } + ``` + +* `session.executeAsync` returns an [AsyncGraphResultSet]. It only holds the current page of + results, accessible via the `currentPage()` method. If the query is paged, the next pages must be + fetched explicitly using the `hasMorePages()` and `fetchNextPage()` methods. See [Asynchronous + paging](../../../paging/#asynchronous-paging) for more details about how to work with async + types. + +*Note: at the time of writing (DSE 6.0), graph queries are never paged. Results are always returned +as a single page. However, paging is on the roadmap for a future DSE version; the driver APIs +reflect that, to avoid breaking changes when the feature is introduced.* + +Both types have a `one()` method, to use when you know there is exactly one node, or are only +interested in the first one: + +```java +GraphNode n = resultSet.one(); +``` + +### Working with graph nodes + +[GraphNode] wraps the responses returned by the server. Use the `asXxx()` methods to coerce a node +to a specific type: + +```java +FluentGraphStatement statement = FluentGraphStatement.newInstance(g.V().count()); +GraphNode n = session.execute(statement).one(); +System.out.printf("The graph has %s vertices%n", n.asInt()); +``` + +If the result is an array or "object" (in the JSON sense: a collection of named fields), you can +iterate its children: + +```java +if (n.isList()) { + for (int i = 0; i < n.size(); i++) { + GraphNode child = n.getByIndex(i); + System.out.printf("Element at position %d: %s%n", i, child); + } + + // Alternatively, convert to a list: + List l = n.asList(); +} + +if (n.isMap()) { + for (Object key : n.keys()) { + System.out.printf("Element at key %s: %s%n", key, n.getByKey(key)); + } + + // Alternatively, convert to a map: + Map m = n.asMap(); +} +``` + +#### Graph structural types + +If the traversal returns graph elements (like vertices and edges), the results can be converted to +the corresponding TinkerPop types: + +```java +GraphNode n = session.execute(FluentGraphStatement.newInstance( + g.V().hasLabel("test_vertex") +)).one(); +Vertex vertex = n.asVertex(); + +n = session.execute(FluentGraphStatement.newInstance( + g.V().hasLabel("test_vertex").outE() +)).one(); +Edge edge = n.asEdge(); + +n = session.execute(FluentGraphStatement.newInstance( + g.V().hasLabel("test_vertex") + .outE() + .inV() + .path() +)).one(); +Path path = n.asPath(); + +n = session.execute(FluentGraphStatement.newInstance( + g.V().hasLabel("test_vertex") + .properties("name") +)).one(); +// .properties() returns a list of properties, so we get the first one and transform it as a +// VertexProperty +VertexProperty vertexProperty = n.getByIndex(0).asVertexProperty(); +``` + +#### Data type compatibility matrix + +Dse graph exposes several [data types][DSE data types] when defining a schema for a graph. They +translate into specific Java classes when the data is returned from the server. + +Here is an exhaustive compatibility matrix (for DSE 6.0): + +| DSE graph | Java driver | +|------------|---------------------| +| bigint | Long | +| blob | byte[] | +| boolean | Boolean | +| date | java.time.LocalDate | +| decimal | BigDecimal | +| double | Double | +| duration | java.time.Duration | +| float | Float | +| inet | InetAddress | +| int | Integer | +| linestring | LineString | +| point | Point | +| polygon | Polygon | +| smallint | Short | +| text | String | +| time | java.time.LocalTime | +| timestamp | java.time.Instant | +| uuid | UUID | +| varint | BigInteger | + +If a type doesn't have a corresponding `asXxx()` method, use the variant that takes a type token: + +```java +UUID uuid = graphNode.as(UUID.class); +``` + +[GraphNode]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/dse/driver/api/core/graph/GraphNode.html +[GraphResultSet]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/dse/driver/api/core/graph/GraphResultSet.html +[AsyncGraphResultSet]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/dse/driver/api/core/graph/AsyncGraphResultSet.html + +[DSE data types]: https://docs.datastax.com/en/dse/6.0/dse-dev/datastax_enterprise/graph/reference/refDSEGraphDataTypes.html \ No newline at end of file diff --git a/manual/core/dse/graph/script/README.md b/manual/core/dse/graph/script/README.md new file mode 100644 index 00000000000..48d85246628 --- /dev/null +++ b/manual/core/dse/graph/script/README.md @@ -0,0 +1,106 @@ +## Script API + +The script API handles Gremlin-groovy requests provided as plain Java strings. To execute a script, +wrap it into a [ScriptGraphStatement] and pass it to the session: + +```java +CqlSession session = CqlSession.builder().build(); + +String groovyScript = "system.graph('demo').ifNotExists().create()"; +ScriptGraphStatement statement = ScriptGraphStatement.newInstance(groovyScript); +session.execute(statement); +``` + +### Creating script statements + +#### Factory method + +As demonstrated above, the simplest way to create a script statement is to pass the Gremlin-groovy +string to [ScriptGraphStatement.newInstance]. + +The default implementation returned by the driver is **immutable**; if you call additional methods +on the statement -- for example to set [options](../options/) -- each method call will create a new +copy: + +```java +ScriptGraphStatement statement = + ScriptGraphStatement.newInstance("system.graph('demo').ifNotExists().create()"); +ScriptGraphStatement statement2 = statement.setTimeout(Duration.ofSeconds(10)); + +assert statement2 != statement; +``` + +Immutability is good because it makes statements inherently **thread-safe**: you can share them in +your application and access them concurrently without any risk. + +On the other hand, it means a lot of intermediary copies if you often call methods on your +statements. Modern VMs are normally good at dealing with such short-lived objects, but if you're +worried about the performance impact, consider using a builder instead. + +#### Builder + +Instead of creating a statement directly, you can pass your Gremlin-groovy string to +[ScriptGraphStatement.builder], chain method calls to set options, and finally call `build()`: + +```java +ScriptGraphStatement statement1 = + ScriptGraphStatement.builder("system.graph('demo').ifNotExists().create()") + .withTimeout(Duration.ofSeconds(10)) + .withIdempotence(true) + .build(); +``` + +The builder implementation is **mutable**: every method call returns the same object, only one +builder instance will be created no matter how many methods you call on it. As a consequence, the +builder object is **not thread-safe**. + +You can also initialize a builder from an existing statement: it will inherit all of its options. + +```java +ScriptGraphStatement statement2 = + ScriptGraphStatement.builder(statement1).withTimeout(Duration.ofSeconds(20)).build(); + +assert statement2.getScript().equals(statement1.getScript()); +assert statement2.getTimeout().equals(Duration.ofSeconds(20)); // overridden by the builder +assert statement2.isIdempotent(); // because statement1 was +``` + +### Parameters + +Gremlin-groovy scripts accept parameters, which are always named. Note that, unlike in CQL, +placeholders are not prefixed with ":". + +To manage parameters on an existing statement, use `setQueryParam` / `removeQueryParam`: + +```java +ScriptGraphStatement statement = + ScriptGraphStatement.newInstance("g.addV(label, vertexLabel)") + .setQueryParam("vertexLabel", "test_vertex_2"); +``` + +On the builder, use `withQueryParam` / `withoutQueryParams`: + +```java +ScriptGraphStatement statement = + ScriptGraphStatement.builder("g.addV(label, vertexLabel)") + .withQueryParam("vertexLabel", "test_vertex_2") + .build(); +``` + +Alternatively, `withQueryParams` takes multiple parameters as a map. + +### Use cases for the script API + +Building requests as Java strings can be unwieldy, especially for long scripts. Besides, the script +API is a bit less performant on the server side. Therefore we recommend the +[Fluent API](../fluent/) instead for graph traversals. + +Note however that some types of queries can only be performed through the script API: + +* system queries (e.g. creating / dropping a graph); +* configuration; +* DSE graph schema queries. + +[ScriptGraphStatement]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html +[ScriptGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html#newInstance-java.lang.String- +[ScriptGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html#builder-java.lang.String- \ No newline at end of file diff --git a/manual/core/integration/README.md b/manual/core/integration/README.md index e6dfe56713c..fa94ff80899 100644 --- a/manual/core/integration/README.md +++ b/manual/core/integration/README.md @@ -360,12 +360,12 @@ If all of these metrics are disabled, you can remove the dependency: #### Jackson -[Jackson](https://github.com/FasterXML/jackson) is used to parse configuration files when connecting -to DataStax Apache Cassandra® as a Service. +[Jackson](https://github.com/FasterXML/jackson) is used: -If you don't use that feature (that is, if you neither call -`SessionBuilder.withCloudSecureConnectBundle()` nor set the `basic.cloud.secure-connect-bundle` -configuration option), you can safely exclude the dependency: +* when connecting to [Datastax Apollo](../../cloud/); +* when Insights monitoring is enabled. + +If you don't use either of those features, you can safely exclude the dependency: ```xml @@ -373,6 +373,10 @@ configuration option), you can safely exclude the dependency: java-driver-core ${driver.version} + + com.fasterxml.jackson.core + jackson-core + com.fasterxml.jackson.core jackson-databind @@ -381,6 +385,51 @@ configuration option), you can safely exclude the dependency: ``` +#### Esri + +Our [geospatial types](../dse/geotypes/) implementation is based on the [Esri Geometry +API](https://github.com/Esri/geometry-api-java). + +If you don't use geospatial types anywhere in your application, you can exclude the dependency: + +```xml + + com.datastax.oss + java-driver-core + ${driver.version} + + + com.esri.geometry + esri-geometry-api + + + +``` + +#### TinkerPop + +[Apache TinkerPop™](http://tinkerpop.apache.org/) is used in our [graph API](../dse/graph/). + +If you don't use DSE graph at all, you can exclude the dependencies: + +```xml + + com.datastax.oss + java-driver-core + ${driver.version} + + + org.apache.tinkerpop + gremlin-core + + + org.apache.tinkerpop + tinkergraph-gremlin + + + +``` + #### Documenting annotations The driver team uses annotations to document certain aspects of the code: diff --git a/manual/core/metadata/node/README.md b/manual/core/metadata/node/README.md index 50ae9c41ebd..357cc26f479 100644 --- a/manual/core/metadata/node/README.md +++ b/manual/core/metadata/node/README.md @@ -50,6 +50,15 @@ nodes. The exact definition of `LOCAL` and `REMOTE` is left to the interpretatio but in general it represents the proximity to the client, and `LOCAL` nodes will be prioritized as coordinators. They also influence pooling options. +[Node#getExtras()] contains additional free-form properties. This is intended for future evolution +or custom driver extensions. In particular, if the driver is connected to Datastax Enterprise, the +map will contain additional information under the keys defined in [DseNodeProperties]: + +```java +Object rawDseVersion = node.getExtras().get(DseNodeProperties.DSE_VERSION); +Version dseVersion = (rawDseVersion == null) ? null : (Version) rawDseVersion; +``` + If you need to follow node state changes, you don't need to poll the metadata manually; instead, you can register a listener to get notified when changes occur: @@ -109,9 +118,11 @@ the source code. [Node#getDatacenter()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/metadata/Node.html#getDatacenter-- [Node#getRack()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/metadata/Node.html#getRack-- [Node#getDistance()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/metadata/Node.html#getDistance-- +[Node#getExtras()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/metadata/Node.html#getExtras-- [Node#getOpenConnections()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- [Node#isReconnecting()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/metadata/Node.html#isReconnecting-- [NodeState]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/metadata/NodeState.html [NodeStateListener]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/metadata/NodeStateListener.html [NodeStateListenerBase]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/metadata/NodeStateListenerBase.html -[SessionBuilder.withNodeStateListener]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withNodeStateListener-com.datastax.oss.driver.api.core.metadata.NodeStateListener- \ No newline at end of file +[SessionBuilder.withNodeStateListener]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withNodeStateListener-com.datastax.oss.driver.api.core.metadata.NodeStateListener- +[DseNodeProperties]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/dse/driver/api/core/metadata/DseNodeProperties.html \ No newline at end of file diff --git a/manual/core/metadata/schema/README.md b/manual/core/metadata/schema/README.md index 753c21ba538..48ed89dfe56 100644 --- a/manual/core/metadata/schema/README.md +++ b/manual/core/metadata/schema/README.md @@ -35,6 +35,38 @@ immutable; if you need to get the latest schema, be sure to call reference). +### DSE + +All schema metadata interfaces accessible through `Metadata.getKeyspaces()` have a DSE-specific +subtype in the package [com.datastax.dse.driver.api.core.metadata.schema]. The objects returned by +the DSE driver implement those types, so you can safely cast: + +```java +for (KeyspaceMetadata keyspace : session.getMetadata().getKeyspaces().values()) { + DseKeyspaceMetadata dseKeyspace = (DseKeyspaceMetadata) keyspace; +} +``` + +If you're calling a method that returns an optional and want to keep the result wrapped, use this +pattern: + +```java +Optional f = + session + .getMetadata() + .getKeyspace("ks") + .flatMap(ks -> ks.getFunction("f")) + .map(DseFunctionMetadata.class::cast); +``` + +For future extensibility, there is a `DseXxxMetadata` subtype for every OSS type. But currently (DSE +6.7), the only types that really add extra information are: + +* [DseFunctionMetadata]: add support for the `DETERMINISTIC` and `MONOTONIC` keywords; +* [DseAggregateMetadata]: add support for the `MONOTONIC` keyword. + +All other types (keyspaces, tables, etc.) are identical to their OSS counterparts. + ### Notifications If you need to follow schema changes, you don't need to poll the metadata manually; instead, @@ -235,5 +267,8 @@ take a look at the [Performance](../../performance/#schema-updates) page for a f [Session#checkSchemaAgreementAsync]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/Session.html#checkSchemaAgreementAsync-- [SessionBuilder#withSchemaChangeListener]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSchemaChangeListener-com.datastax.oss.driver.api.core.metadata.schema.SchemaChangeListener- [ExecutionInfo#isSchemaInAgreement]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#isSchemaInAgreement-- +[com.datastax.dse.driver.api.core.metadata.schema]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/dse/driver/api/core/metadata/schema/package-frame.html +[DseFunctionMetadata]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/dse/driver/api/core/metadata/schema/DseFunctionMetadata.html +[DseAggregateMetadata]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/dse/driver/api/core/metadata/schema/DseAggregateMetadata.html [JAVA-750]: https://datastax-oss.atlassian.net/browse/JAVA-750 \ No newline at end of file diff --git a/manual/core/reactive/README.md b/manual/core/reactive/README.md new file mode 100644 index 00000000000..fca2d936d93 --- /dev/null +++ b/manual/core/reactive/README.md @@ -0,0 +1,383 @@ +## Reactive Style Programming + +The driver provides built-in support for reactive queries. The [CqlSession] interface extends +[ReactiveSession], which adds specialized methods to execute requests expressed in [reactive +streams]. + +Notes: + +* reactive capabilities require the [Reactive Streams API] to be present on the classpath. The + driver has a dependency on that library, but if your application does not use reactive queries at + all, it is possible to exclude it to minimize the number of runtime dependencies. If the library + cannot be found at runtime, reactive queries won't be available and a warning will be logged, but + the driver will otherwise operate normally (this is also valid for OSGi deployments). +* for historical reasons, reactive-related driver types reside in a package prefixed with `dse`; + however, reactive queries also work with regular Cassandra. + +### Overview + +`ReactiveSession` exposes two public methods: + +```java +ReactiveResultSet executeReactive(String query); +ReactiveResultSet executeReactive(Statement statement); +``` + +Both methods return a [ReactiveResultSet], which is the reactive streams version of a regular +[ResultSet]. In other words, a `ReactiveResultSet` is a [Publisher] for query results. + +When subscribing to and consuming from a `ReactiveResultSet`, there are two important caveats to +bear in mind: + +1. By default, all `ReactiveResultSet` implementations returned by the driver are cold, unicast, + single-subscription-only publishers. In other words, they do not support multiple subscribers; + consider caching the results produced by such publishers if you need to consume them by more than + one downstream subscriber. We provide a few examples of caching further in this document. +2. Also, note that reactive result sets may emit items to their subscribers on an internal driver IO + thread. Subscriber implementors are encouraged to abide by [Reactive Streams Specification rule + 2.2] and avoid performing heavy computations or blocking calls inside `onNext` calls, as doing so + could slow down the driver and impact performance. Instead, they should asynchronously dispatch + received signals to their processing logic. + +### Basic usage + +The examples in this page make usage of [Reactor], a popular reactive library, but they should be +easily adaptable to any other library implementing the concepts of reactive streams. + +#### Reading in reactive style + +The following example reads from a table and prints all the returned rows to the console. In case of +error, a `DriverException` is thrown and its stack trace is printed to standard error: + +```java +try (DseSession session = ...) { + Flux.from(session.executeReactive("SELECT ...")) + .doOnNext(System.out::println) + .blockLast(); +} catch (DriverException e) { + e.printStackTrace(); +} +``` + +#### Writing in reactive style + +The following example inserts rows into a table after printing the queries to the console, stopping +at the first error, if any. Again, in case of error, a `DriverException` is thrown: + +```java +try (DseSession session = ...) { + Flux.just("INSERT ...", "INSERT ...", "INSERT ...", ...) + .doOnNext(System.out::println) + .flatMap(session::executeReactive) + .blockLast(); +} catch (DriverException e) { + e.printStackTrace(); +} +``` + +Note that when a statement is executed reactively, the actual request is only triggered when the +`ReactiveResultSet` is subscribed to; in other words, when the `executeReactive` method returns, +_nothing has been executed yet_. This is why the write example above uses a `flatMap` operator, +which takes care of subscribing to each `ReactiveResultSet` returned by successive calls to +`session.executeReactive`. A common pitfall is to use an operator that silently ignores the returned +`ReactiveResultSet`; for example, the code below seems correct, but will not execute any query: + +```java +// DON'T DO THIS +Flux.just("INSERT INTO ...") + // The returned ReactiveResultSet is not subscribed to + .doOnNext(session::executeReactive) + .blockLast(); +``` + +Since a write query does not return any rows, it may appear difficult to count the number of rows +written to the database. Hopefully most reactive libraries have operators that are useful in these +scenarios. The following example demonstrates how to achieve this goal with Reactor: + +```java +Flux> stmts = ...; +long count = + stmts + .flatMap( + stmt -> + Flux.from(session.executeReactive(stmt)) + // dummy cast, since result sets are always empty for write queries + .cast(Integer.class) + // flow will always be empty, so '1' will be emitted for each query + .defaultIfEmpty(1)) + .count() + .block(); +System.out.printf("Executed %d write statements%n", count); +``` + +### Accessing query metadata + +`ReactiveResultSet` exposes useful information about request execution and query metadata: + +```java +Publisher getColumnDefinitions(); +Publisher getExecutionInfos(); +Publisher wasApplied(); +``` + +Refer to the javadocs of [getColumnDefinitions], +[getExecutionInfos] and +[wasApplied] for more information on these methods. + +To inspect the contents of the above publishers, simply subscribe to them. Note that these publishers cannot complete before the query itself completes; if the query fails, then these publishers will fail with the same error. + +The following example executes a query, then prints all the available metadata to the console: + +```java +ReactiveResultSet rs = session.executeReactive("SELECT ..."); +// execute the query first +Flux.from(rs).blockLast(); +// then retrieve query metadata +System.out.println("Column definitions: "); +Mono.from(rs.getColumnDefinitions()).doOnNext(System.out::println).block(); +System.out.println("Execution infos: "); +Flux.from(rs.getExecutionInfos()).doOnNext(System.out::println).blockLast(); +System.out.println("Was applied: "); +Mono.from(rs.wasApplied()).doOnNext(System.out::println).block(); +``` + +Note that it is also possible to inspect query metadata at row level. Each row returned by a reactive query execution implements [`ReactiveRow`][ReactiveRow], the reactive equivalent of a [`Row`][Row]. + +`ReactiveRow` exposes the same kind of query metadata and execution info found in `ReactiveResultSet`, but for each individual row: + +```java +ColumnDefinitions getColumnDefinitions(); +ExecutionInfo getExecutionInfo(); +boolean wasApplied(); +``` + +Refer to the javadocs of [`getColumnDefinitions`][ReactiveRow.getColumnDefinitions], +[`getExecutionInfo`][ReactiveRow.getExecutionInfo] and [`wasApplied`][ReactiveRow.wasApplied] for +more information on these methods. + +The following example executes a query and, for each row returned, prints the coordinator that +served that row, then retrieves all the coordinators that were contacted to fulfill the query and +prints them to the console: + +```java +Iterable coordinators = Flux.from(session.executeReactive("SELECT ...")) + .doOnNext( + row -> + System.out.printf( + "Row %s was obtained from coordinator %s%n", + row, + row.getExecutionInfo().getCoordinator())) + .map(ReactiveRow::getExecutionInfo) + // dedup by coordinator (note: this is dangerous on a large result set) + .groupBy(ExecutionInfo::getCoordinator) + .map(GroupedFlux::key) + .toIterable(); +System.out.println("Contacted coordinators: " + coordinators); +``` + +### Advanced topics + +#### Applying backpressure + +One of the key features of reactive programming is backpressure. + +Unfortunately, the Cassandra native protocol does not offer proper support for exchanging +backpressure information between client and server over the network. Cassandra is able, since +version 3.10, to [throttle clients](https://issues.apache.org/jira/browse/CASSANDRA-9318) but at the +time of writing, there is no proper [client-facing backpressure +mechanism](https://issues.apache.org/jira/browse/CASSANDRA-11380) available. + +When reading from Cassandra, this shouldn't however be a problem for most applications. Indeed, in a +read scenario, Cassandra acts as a producer, and the driver is a consumer; in such a setup, if a +downstream subscriber is not able to cope with the throughput, the driver would progressively adjust +the rate at which it requests more pages from the server, thus effectively regulating the server +throughput to match the subscriber's. The only caveat is if the subscriber is really too slow, which +could eventually trigger a query timeout, be it on the client side (`DriverTimeoutException`), or on +the server side (`ReadTimeoutException`). + +When writing to Cassandra, the lack of backpressure communication between client and server is more +problematic. Indeed in a write scenario, the driver acts as a producer, and Cassandra is a consumer; +in such a setup, if an upstream producer generates too much data, the driver would blindly send the +write statements to the server as quickly as possible, eventually causing the cluster to become +overloaded or even crash. This usually manifests itself with errors like `WriteTimeoutException`, or +`OverloadedException`. + +It is strongly advised for users to limit the concurrency at which write statements are executed in +write-intensive scenarios. A simple way to achieve this is to use the `flatMap` operator, which, in +most reactive libraries, has an overloaded form that takes a parameter that controls the desired +amount of concurrency. The following example executes a flow of statements with a maximum +concurrency of 10, leveraging the `concurrency` parameter of Reactor's `flatMap` operator: + +```java +Flux> stmts = ...; +stmts.flatMap(session::executeReactive, 10).blockLast(); +``` + +In the example above, the `flatMap` operator will subscribe to at most 10 `ReactiveResultSet` +instances simultaneously, effectively limiting the number of concurrent in-flight requests to 10. +This is usually enough to prevent data from being written too fast. More sophisticated operators are +capable of rate-limiting or throttling the execution of a flow; for example, Reactor offers a +`delayElements` operator that rate-limits the throughput of its upstream publisher. Consult the +documentation of your reactive library for more information. + +As a last resort, it is also possible to limit concurrency at driver level, for example using the +driver's built-in [request throttling] mechanism, although this is usually not required in reactive +applications. See "[Managing concurrency in asynchronous query execution]" in the Developer Guide +for a few examples. + +#### Caching query results + +As stated above, a `ReactiveResultSet` can only be subscribed once. This is an intentional design +decision, because otherwise users could inadvertently trigger a spurious execution of the same query +again when subscribing for the second time to the same `ReactiveResultSet`. + +Let's suppose that we want to compute both the average and the sum of all values from a table +column. The most naive approach would be to create two flows and subscribe to both: + + ```java +// DON'T DO THIS +ReactiveResultSet rs = session.executeReactive("SELECT n FROM ..."); +double avg = Flux.from(rs) + .map(row -> row.getLong(0)) + .reduce(0d, (a, b) -> (a + b / 2.0)) + .block(); +// will fail with IllegalStateException +long sum = Flux.from(rs) + .map(row -> row.getLong(0)) + .reduce(0L, (a, b) -> a + b) + .block(); + ``` + +Unfortunately, the second `Flux` above with terminate immediately with an `onError` signal +encapsulating an `IllegalStateException`, since `rs` was already subscribed to. + +To circumvent this limitation, while still avoiding to query the table twice, the easiest technique +consists in using the `cache` operator that most reactive libraries offer: + +```java +Flux rs = Flux.from(session.executeReactive("SELECT n FROM ...")) + .map(row -> row.getLong(0)) + .cache(); +double avg = rs + .reduce(0d, (a, b) -> (a + b / 2.0)) + .block(); +long sum = rs + .reduce(0L, (a, b) -> a + b) + .block(); +``` + +The above example works just fine. + +The `cache` operator will subscribe at most once to the `ReactiveResultSet`, cache the results, and +serve the cached results to downstream subscribers. This is obviously only possible if your result +set is small and can fit entirely in memory. + +If caching is not an option, most reactive libraries also offer operators that multicast their +upstream subscription to many subscribers on the fly. + +The above example could be rewritten with a different approach as follows: + +```java +Flux rs = Flux.from(session.executeReactive("SELECT n FROM ...")) + .map(row -> row.getLong(0)) + .publish() // multicast upstream to all downstream subscribers + .autoConnect(2); // wait until two subscribers subscribe +long sum = rs + .reduce(0L, (a, b) -> a + b) + .block(); +double avg = rs + .reduce(0d, (a, b) -> (a + b / 2.0)) + .block(); +``` + +In the above example, the `publish` operator multicasts every `onNext` signal to all of its +subscribers; and the `autoConnect(2)` operator instructs `publish` to wait until it gets 2 +subscriptions before subscribing to its upstream source (and triggering the actual query execution). + +This approach should be the preferred one for large result sets since it does not involve caching +results in memory. + +#### Resuming from and retrying after failed queries + +When executing a flow of statements, any failed query execution would trigger an `onError` signal +and terminate the subscription immediately, potentially preventing subsequent queries from being +executed at all. + +If this behavior is not desired, it is possible to mimic the behavior of a fail-safe system. This +usually involves the usage of operators such as `onErrorReturn` or `onErrorResume`. Consult your +reactive library documentation to find out which operators allow you to intercept failures. + +The following example executes a flow of statements; for each failed execution, the stack trace is +printed to standard error and, thanks to the `onErrorResume` operator, the error is completely +ignored and the flow execution resumes normally: + +```java +Flux> stmts = ...; +stmts.flatMap( + statement -> + Flux.from(session.executeReactive(statement)) + .doOnError(Throwable::printStackTrace) + .onErrorResume(error -> Mono.empty())) + .blockLast(); +``` + +The following example expands on the previous one: for each failed execution, at most 3 retries are +attempted if the error was an ` UnavailableException`, then, if the query wasn't successful after +retrying, a message is logged. Finally, all the errors are collected and the total number of failed +queries is printed to the console: + +```java +Flux> statements = ...; +long failed = statements.flatMap( + stmt -> + Flux.defer(() -> session.executeReactive(stmt)) + // retry at most 3 times on Unavailable + .retry(3, UnavailableException.class::isInstance) + // handle errors + .doOnError( + error -> { + System.err.println("Statement failed: " + stmt); + error.printStackTrace(); + }) + // Collect errors and discard all returned rows + .ignoreElements() + .cast(Long.class) + .onErrorReturn(1L)) + .sum() + .block(); +System.out.println("Total failed queries: " + failed); +``` + +The example above uses `Flux.defer()` to wrap the call to `session.executeReactive()`. This is +required because, as mentioned above, the driver always creates single-subscription-only publishers. +Such publishers are not compatible with operators like `retry` because these operators sometimes +subscribe more than once to the upstream publisher, thus causing the driver to throw an exception. +Hopefully it's easy to solve this issue, and that's exactly what the `defer` operator is designed +for: each subscription to the `defer` operator triggers a distinct call to +`session.executeReactive()`, thus causing the session to re-execute the query and return a brand-new +publisher at every retry. + +Note that the driver already has a [built-in retry mechanism] that can transparently retry failed +queries; the above example should be seen as a demonstration of application-level retries, when a +more fine-grained control of what should be retried, and how, is required. + +[reactive streams]: https://en.wikipedia.org/wiki/Reactive_Streams +[Reactive Streams API]: https://github.com/reactive-streams/reactive-streams-jvm +[CqlSession]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/CqlSession.html +[ReactiveSession]: https://docs.datastax.com/en/drivers/java-dse/2.3/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/ResultSet.html +[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java-dse/2.3/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html +[ReactiveRow]: https://docs.datastax.com/en/drivers/java-dse/2.3/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html +[Row]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/Row.html +[Publisher]: https://www.reactive-streams.org/reactive-streams-1.0.2-javadoc/org/reactivestreams/Publisher.html +[Reactive Streams Specification rule 2.2]: https://github.com/reactive-streams/reactive-streams-jvm#2.2 +[Reactor]: https://projectreactor.io/ +[getColumnDefinitions]: https://docs.datastax.com/en/drivers/java-dse/2.3/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#getColumnDefinitions-- +[getExecutionInfos]: https://docs.datastax.com/en/drivers/java-dse/2.3/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#getExecutionInfos-- +[wasApplied]: https://docs.datastax.com/en/drivers/java-dse/2.3/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#wasApplied-- +[ReactiveRow.getColumnDefinitions]: https://docs.datastax.com/en/drivers/java-dse/2.3/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#getColumnDefinitions-- +[ReactiveRow.getExecutionInfo]: https://docs.datastax.com/en/drivers/java-dse/2.3/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#getExecutionInfo-- +[ReactiveRow.wasApplied]: https://docs.datastax.com/en/drivers/java-dse/2.3/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#wasApplied-- +[built-in retry mechanism]: https://docs.datastax.com/en/developer/java-driver/4.3/manual/core/retries/ +[request throttling]: https://docs.datastax.com/en/developer/java-driver/4.3/manual/core/throttling/ +[Managing concurrency in asynchronous query execution]: https://docs.datastax.com/en/devapp/doc/devapp/driverManagingConcurrency.html] \ No newline at end of file diff --git a/manual/query_builder/README.md b/manual/query_builder/README.md index 429acce5e28..07e192ba0f0 100644 --- a/manual/query_builder/README.md +++ b/manual/query_builder/README.md @@ -38,11 +38,15 @@ try (CqlSession session = CqlSession.builder().build()) { #### Fluent API -All the starting methods are centralized in the [QueryBuilder] class. To get started, add the -following import: +All the starting methods are centralized in the [QueryBuilder] and [SchemaBuilder] classes. To get +started, add one of the following imports: ```java +// For DML queries, such as SELECT import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.*; + +// For DDL queries, such as CREATE TABLE +import static com.datastax.oss.driver.api.querybuilder.SchemaBuilder.*; ``` Choose the method matching your desired statement, for example `selectFrom`. Then use your IDE's @@ -66,6 +70,30 @@ SimpleStatement statement = select.build(); SimpleStatementBuilder builder = select.builder(); ``` +#### Datastax Enterprise + +The driver provides two additional entry points for DSE-specific queries: [DseQueryBuilder] and +[DseSchemaBuilder]. They extend their respective non-DSE counterparts, so anything that is available +on the default query builder can also be done with the DSE query builder. + +We recommend that you use those classes if you are targeting Datastax Enterprise; they will be +enriched in the future if DSE adds custom CQL syntax. + +Currently, the only difference is the support for the `DETERMINISTIC` and `MONOTONIC` keywords when +generating `CREATE FUNCTION` or `CREATE AGGREGATE` statements: + +```java +import static com.datastax.dse.driver.api.querybuilder.DseSchemaBuilder.createDseFunction; + +createDseFunction("func1") + .withParameter("param1", DataTypes.INT) + .returnsNullOnNull() + .returnsType(DataTypes.INT) + .deterministic() + .monotonic(); +// CREATE FUNCTION func1 (param1 int) RETURNS NULL ON NULL INPUT RETURNS int DETERMINISTIC MONOTONIC +``` + #### Immutability All types in the fluent API are immutable. This means that every step creates a new object: @@ -187,3 +215,5 @@ For a complete tour of the API, browse the child pages in this manual: [QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html [SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html [CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/CqlIdentifier.html +[DseQueryBuilder]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/dse/driver/api/querybuilder/DseQueryBuilder.html +[DseSchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/dse/driver/api/querybuilder/DseSchemaBuilder.html From 843450fff004ffe4f5664c3e149a53a01cb64f3b Mon Sep 17 00:00:00 2001 From: olim7t Date: Thu, 14 Nov 2019 14:47:35 -0800 Subject: [PATCH 225/979] Add upgrade instructions --- upgrade_guide/README.md | 63 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 63 insertions(+) diff --git a/upgrade_guide/README.md b/upgrade_guide/README.md index e3d315f9180..3d566d95832 100644 --- a/upgrade_guide/README.md +++ b/upgrade_guide/README.md @@ -1,5 +1,68 @@ ## Upgrade guide +### 4.4.0 + +Datastax Enteprise support is now available directly in the main driver. There is no longer a +separate DSE driver. + +#### For Apache Cassandra® users + +The great news is that [reactive execution](../manual/core/reactive/) is now available for everyone. +See the `CqlSession.executeReactive` methods. + +Apart from that, the only visible change is that DSE-specific features are now exposed in the API: + +* new execution methods: `CqlSession.executeGraph`, `CqlSession.executeContinuously*`. They all + have default implementations so this doesn't break binary compatibility. You can just ignore them. +* new driver dependencies: Tinkerpop, ESRI. You can exclude them manually if you want to keep your + classpath lean, the rest of the driver will still work. See the + [Integration](../manual/core/integration/#driver-dependencies) page. + +#### For Datastax Enterprise users + +Adjust your Maven coordinates to use the unified artifact: + +```xml + + + com.datastax.dse + dse-java-driver-core + 2.3.0 + + + + + com.datastax.oss + java-driver-core + 4.4.0 + + + +``` + +The new driver is a drop-in replacement for the DSE driver. Note however that we've deprecated a few +DSE-specific types in favor of their OSS equivalents. They still work, so you don't need to make the +changes right away; but you will get deprecation warnings: + +* `DseSession`: use `CqlSession` instead, it can now do everything that a DSE session does. This + also applies to the builder: + + ```java + // Replace: + DseSession session = DseSession.builder().build() + + // By: + CqlSession session = CqlSession.builder().build() + ``` +* `DseDriverConfigLoader`: the driver no longer needs DSE-specific config loaders. All the factory + methods in this class now redirect to `DriverConfigLoader`. On that note, `dse-reference.conf` + does not exist anymore, all the driver defaults are now in + [reference.conf](../manual/core/configuration/reference/). +* plain-text authentication: there is now a single implementation that works with both Cassandra and + DSE. If you used `DseProgrammaticPlainTextAuthProvider`, replace it by + `PlainTextProgrammaticAuthProvider`. Similarly, if you wrote a custom implementation by + subclassing `DsePlainTextAuthProviderBase`, extend `PlainTextAuthProviderBase` instead. + ### 4.1.0 #### Object mapper From f9f14841fafdaa0118d17751ace09b328965c7ce Mon Sep 17 00:00:00 2001 From: olim7t Date: Thu, 14 Nov 2019 15:25:37 -0800 Subject: [PATCH 226/979] Fix distribution to include all modules --- distribution/src/assembly/binary-tarball.xml | 60 +++++++++++++++++++- 1 file changed, 59 insertions(+), 1 deletion(-) diff --git a/distribution/src/assembly/binary-tarball.xml b/distribution/src/assembly/binary-tarball.xml index b19b2000216..78e6a0141ba 100644 --- a/distribution/src/assembly/binary-tarball.xml +++ b/distribution/src/assembly/binary-tarball.xml @@ -41,6 +41,8 @@ moduleSet targets core only --> com.datastax.oss:java-driver-query-builder + com.datastax.oss:java-driver-mapper-runtime + com.datastax.oss:java-driver-mapper-processor true @@ -60,10 +62,64 @@ com.datastax.oss:java-driver-core + com.datastax.oss:java-driver-mapper-runtime + com.datastax.oss:java-driver-mapper-processor com.datastax.oss:java-driver-shaded-guava com.github.stephenc.jcip:jcip-annotations - + com.github.spotbugs:spotbugs-annotations + + true + + + + + + + + true + + com.datastax.oss:java-driver-mapper-runtime + + + lib/mapper-runtime + false + + + + com.datastax.oss:java-driver-core + com.datastax.oss:java-driver-query-builder + com.datastax.oss:java-driver-mapper-processor + + com.datastax.oss:java-driver-shaded-guava + com.github.stephenc.jcip:jcip-annotations + com.github.spotbugs:spotbugs-annotations + + true + + + + + + + + true + + com.datastax.oss:java-driver-mapper-processor + + + lib/mapper-processor + false + + + + com.datastax.oss:java-driver-core + com.datastax.oss:java-driver-query-builder + com.datastax.oss:java-driver-mapper-runtime + + com.datastax.oss:java-driver-shaded-guava + com.github.stephenc.jcip:jcip-annotations + com.github.spotbugs:spotbugs-annotations true @@ -76,6 +132,8 @@ com.datastax.oss:java-driver-core com.datastax.oss:java-driver-query-builder + com.datastax.oss:java-driver-mapper-runtime + com.datastax.oss:java-driver-mapper-processor false From 01a223db42088ffc38cdf995658d448f6c45bc20 Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 25 Nov 2019 08:56:37 -0800 Subject: [PATCH 227/979] Fix package of DseSessionMetric This is a breaking change compared to DSE driver 2.3, but this was a legitimate mistake and it's likely that the enum is referenced directly in 3rd-party code. --- .../dse/driver/{ => api/core/metrics}/DseSessionMetric.java | 2 +- .../core/cql/continuous/ContinuousCqlRequestHandler.java | 2 +- .../driver/internal/core/metrics/DropwizardMetricsFactory.java | 2 +- .../internal/core/metrics/DropwizardSessionMetricUpdater.java | 2 +- .../dse/driver/api/core/cql/continuous/ContinuousPagingIT.java | 2 +- .../driver/api/core/cql/continuous/ContinuousPagingITBase.java | 2 +- .../cql/continuous/reactive/ContinuousPagingReactiveIT.java | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) rename core/src/main/java/com/datastax/dse/driver/{ => api/core/metrics}/DseSessionMetric.java (97%) diff --git a/core/src/main/java/com/datastax/dse/driver/DseSessionMetric.java b/core/src/main/java/com/datastax/dse/driver/api/core/metrics/DseSessionMetric.java similarity index 97% rename from core/src/main/java/com/datastax/dse/driver/DseSessionMetric.java rename to core/src/main/java/com/datastax/dse/driver/api/core/metrics/DseSessionMetric.java index 8b9e9f2126b..10cde726c5b 100644 --- a/core/src/main/java/com/datastax/dse/driver/DseSessionMetric.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/metrics/DseSessionMetric.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.dse.driver; +package com.datastax.dse.driver.api.core.metrics; import com.datastax.oss.driver.api.core.metrics.SessionMetric; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandler.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandler.java index b266d316b5c..8b9977df614 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandler.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandler.java @@ -15,10 +15,10 @@ */ package com.datastax.dse.driver.internal.core.cql.continuous; -import com.datastax.dse.driver.DseSessionMetric; import com.datastax.dse.driver.api.core.DseProtocolVersion; import com.datastax.dse.driver.api.core.config.DseDriverOption; import com.datastax.dse.driver.api.core.cql.continuous.ContinuousAsyncResultSet; +import com.datastax.dse.driver.api.core.metrics.DseSessionMetric; import com.datastax.dse.driver.internal.core.DseProtocolFeature; import com.datastax.dse.driver.internal.core.cql.DseConversions; import com.datastax.dse.protocol.internal.request.Revise; diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricsFactory.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricsFactory.java index e2f72e387ab..e3a27165594 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricsFactory.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricsFactory.java @@ -16,7 +16,7 @@ package com.datastax.oss.driver.internal.core.metrics; import com.codahale.metrics.MetricRegistry; -import com.datastax.dse.driver.DseSessionMetric; +import com.datastax.dse.driver.api.core.metrics.DseSessionMetric; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.metadata.Node; diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardSessionMetricUpdater.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardSessionMetricUpdater.java index 0b9b90c661e..17ba8db1613 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardSessionMetricUpdater.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardSessionMetricUpdater.java @@ -17,8 +17,8 @@ import com.codahale.metrics.Gauge; import com.codahale.metrics.MetricRegistry; -import com.datastax.dse.driver.DseSessionMetric; import com.datastax.dse.driver.api.core.config.DseDriverOption; +import com.datastax.dse.driver.api.core.metrics.DseSessionMetric; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousPagingIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousPagingIT.java index e87a3f546bc..92bfaa42e06 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousPagingIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousPagingIT.java @@ -18,8 +18,8 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Fail.fail; -import com.datastax.dse.driver.DseSessionMetric; import com.datastax.dse.driver.api.core.config.DseDriverOption; +import com.datastax.dse.driver.api.core.metrics.DseSessionMetric; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.DriverTimeoutException; import com.datastax.oss.driver.api.core.Version; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousPagingITBase.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousPagingITBase.java index 38ffb2f4957..a4c937d9311 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousPagingITBase.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousPagingITBase.java @@ -18,8 +18,8 @@ import static org.assertj.core.api.Assertions.assertThat; import com.codahale.metrics.Timer; -import com.datastax.dse.driver.DseSessionMetric; import com.datastax.dse.driver.api.core.config.DseDriverOption; +import com.datastax.dse.driver.api.core.metrics.DseSessionMetric; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.cql.BatchStatement; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousPagingReactiveIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousPagingReactiveIT.java index 5ebc857f3b0..927a3dfc286 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousPagingReactiveIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousPagingReactiveIT.java @@ -17,9 +17,9 @@ import static org.assertj.core.api.Assertions.assertThat; -import com.datastax.dse.driver.DseSessionMetric; import com.datastax.dse.driver.api.core.cql.continuous.ContinuousPagingITBase; import com.datastax.dse.driver.api.core.cql.reactive.ReactiveRow; +import com.datastax.dse.driver.api.core.metrics.DseSessionMetric; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; From 091918e0e879acd6b38a8dd2e41c2a16ac182a82 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Sat, 23 Nov 2019 18:39:37 +0100 Subject: [PATCH 228/979] Apply changes from riptano/java-dse-driver a2254a4..33f88f9 --- changelog/README.md | 2 + .../cql/continuous/ContinuousSession.java | 22 ++++----- .../ContinuousCqlRequestHandler.java | 46 ------------------- distribution/src/assembly/binary-tarball.xml | 2 - 4 files changed, 11 insertions(+), 61 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 44d614bbed6..a9efc9dd03f 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,8 @@ ### 4.4.0 (in progress) +- [documentation] JAVA-2446: Revisit continuous paging javadocs +- [improvement] JAVA-2550: Remove warnings in ContinuousCqlRequestHandler when coordinator is not replica - [improvement] JAVA-2569: Make driver compatible with Netty < 4.1.34 again - [improvement] JAVA-2541: Improve error messages during connection initialization - [improvement] JAVA-2530: Expose shortcuts for name-based UUIDs diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousSession.java b/core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousSession.java index 2b1a91ebdd3..f98b0a5d1fa 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousSession.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousSession.java @@ -37,7 +37,8 @@ * Analytics and Apache Spark™, or by any similar analytics tool that needs to read large * portions of a table in one single operation, as quick and reliably as possible. * - *

      Continuous paging requires the following three conditions to be met on the client side: + *

      Continuous paging provides the best performance improvement against regular paging when the + * following conditions are met: * *

        *
      1. The statement must target a single partition or a token range owned by one single replica; @@ -51,20 +52,15 @@ * DefaultConsistencyLevel#LOCAL_ONE LOCAL_ONE}). *
      * - *

      It's the caller's responsibility to make sure that the above conditions are met. If this is - * not the case, continuous paging will silently degrade into a normal read operation, and the - * coordinator will retrieve pages one by one from replicas. - * - *

      Note that when the continuous paging optimization kicks in (range read at {@code ONE} - * performed directly on a replica), the snitch is bypassed and the coordinator will always chose - * itself as a replica. Therefore, other functionality such as probabilistic read repair and - * speculative retry is also not available when contacting a replica at {@code ONE}. - * - *

      Continuous paging is disabled by default and needs to be activated server-side. See Enabling - * continuous paging in the DSE docs to learn how to enable it. + *

      If the above conditions are met, the coordinator will be able to optimize the read path and + * serve results from local data, thus significantly improving response times; if however these + * conditions cannot be met, continuous paging would still work, but response times wouldn't be + * significantly better than those of regular paging anymore. * * @see Continuous + * paging options in cassandra.yaml configuration file + * @see DSE * Continuous Paging Tuning and Support Guide */ diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandler.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandler.java index 8b9977df614..33aea927616 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandler.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandler.java @@ -24,7 +24,6 @@ import com.datastax.dse.protocol.internal.request.Revise; import com.datastax.dse.protocol.internal.response.result.DseRowsMetadata; import com.datastax.oss.driver.api.core.AllNodesFailedException; -import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.DriverTimeoutException; import com.datastax.oss.driver.api.core.ProtocolVersion; import com.datastax.oss.driver.api.core.RequestThrottlingException; @@ -37,8 +36,6 @@ import com.datastax.oss.driver.api.core.cql.Row; import com.datastax.oss.driver.api.core.cql.Statement; import com.datastax.oss.driver.api.core.metadata.Node; -import com.datastax.oss.driver.api.core.metadata.TokenMap; -import com.datastax.oss.driver.api.core.metadata.token.Token; import com.datastax.oss.driver.api.core.metrics.DefaultNodeMetric; import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; import com.datastax.oss.driver.api.core.retry.RetryDecision; @@ -90,11 +87,9 @@ import java.time.Duration; import java.util.AbstractMap; import java.util.ArrayDeque; -import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Queue; -import java.util.Set; import java.util.concurrent.CancellationException; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionStage; @@ -121,7 +116,6 @@ public class ContinuousCqlRequestHandler private final InternalDriverContext context; private final DriverExecutionProfile executionProfile; private final Queue queryPlan; - private final Set replicas; private final RetryPolicy retryPolicy; private final RequestThrottler throttler; private final int maxEnqueuedPages; @@ -238,7 +232,6 @@ public ContinuousCqlRequestHandler( protocolVersion.getCode() >= DseProtocolVersion.DSE_V2.getCode(); this.numPagesRequested = protocolBackpressureAvailable ? maxEnqueuedPages : 0; this.message = DseConversions.toContinuousPagingMessage(statement, executionProfile, context); - this.replicas = getReplicas(); this.throttler = context.getRequestThrottler(); this.throttler.register(this); this.sessionMetricUpdater = session.getMetricUpdater(); @@ -309,19 +302,6 @@ private void sendRequest(@Nullable Node node) { lock.unlock(); } } else { - if (replicas.isEmpty()) { - LOG.warn( - "[{}] Could not determine if the node is a replica, " - + "continuous paging may not be available: {}", - logPrefix, - node); - } else if (!replicas.contains(node)) { - LOG.warn( - "[{}] Contacting a node that is likely not a replica, " - + "continuous paging may not be available: {}", - logPrefix, - node); - } this.node = node; streamId = -1; messageStartTimeNanos = System.nanoTime(); @@ -1162,32 +1142,6 @@ private void updateErrorMetrics( // UTILITY METHODS - @NonNull - private Set getReplicas() { - if (session.getMetadata().getTokenMap().isPresent()) { - CqlIdentifier keyspace = statement.getKeyspace(); - if (keyspace == null) { - keyspace = statement.getRoutingKeyspace(); - if (keyspace == null) { - keyspace = session.getKeyspace().orElse(null); - } - } - if (keyspace != null) { - TokenMap tokenMap = session.getMetadata().getTokenMap().get(); - Token routingToken = statement.getRoutingToken(); - if (routingToken != null) { - return tokenMap.getReplicas(keyspace, routingToken); - } else { - ByteBuffer routingKey = statement.getRoutingKey(); - if (routingKey != null) { - return tokenMap.getReplicas(keyspace, routingKey); - } - } - } - } - return Collections.emptySet(); - } - @NonNull private DefaultExecutionInfo createExecutionInfo( @NonNull Result result, @Nullable Frame response) { diff --git a/distribution/src/assembly/binary-tarball.xml b/distribution/src/assembly/binary-tarball.xml index 78e6a0141ba..7265569f504 100644 --- a/distribution/src/assembly/binary-tarball.xml +++ b/distribution/src/assembly/binary-tarball.xml @@ -74,7 +74,6 @@ - true @@ -100,7 +99,6 @@ - true From 97132a4ab21e09868f83a14b4e6a17c06bc3402a Mon Sep 17 00:00:00 2001 From: Erik Merkle Date: Thu, 19 Dec 2019 09:57:39 -0600 Subject: [PATCH 229/979] Remove duplicate dependency entries in pom files --- core/pom.xml | 8 -------- integration-tests/pom.xml | 5 ----- 2 files changed, 13 deletions(-) diff --git a/core/pom.xml b/core/pom.xml index 424e193c353..4581f2313e6 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -112,14 +112,6 @@ com.github.spotbugs spotbugs-annotations - - com.fasterxml.jackson.core - jackson-core - - - com.fasterxml.jackson.core - jackson-databind - ch.qos.logback logback-classic diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 08b8a9819cf..8365f3b92e8 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -156,11 +156,6 @@ pax-exam-container-native test - - org.ops4j.pax.exam - pax-exam-junit4 - test - org.ops4j.pax.exam pax-exam-link-mvn From 7d9996e3220109f84ff0e64c210973b9701f76f1 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Thu, 2 Jan 2020 19:11:37 -0300 Subject: [PATCH 230/979] Fix wrong test method name --- .../test/java/com/datastax/oss/driver/osgi/OsgiSnappyIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/osgi/OsgiSnappyIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/osgi/OsgiSnappyIT.java index 79800c6b7cb..fd4d206e6c0 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/osgi/OsgiSnappyIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/osgi/OsgiSnappyIT.java @@ -58,7 +58,7 @@ public ProgrammaticDriverConfigLoaderBuilder configLoaderBuilder() { } @Test - public void should_connect_and_query_with_lz4_compression() { + public void should_connect_and_query_with_snappy_compression() { connectAndQuerySimple(); } } From 5c06be53adc22d9a9afe8b1c78136f642815137d Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 7 Jan 2020 13:34:42 -0300 Subject: [PATCH 231/979] Remove deprecation warnings --- .../com/datastax/dse/driver/api/core/DseSessionBuilder.java | 3 +-- .../dse/driver/internal/core/session/DefaultDseSession.java | 4 ++-- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/DseSessionBuilder.java b/core/src/main/java/com/datastax/dse/driver/api/core/DseSessionBuilder.java index 263ebbf6a98..a8f2840895d 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/DseSessionBuilder.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/DseSessionBuilder.java @@ -15,7 +15,6 @@ */ package com.datastax.dse.driver.api.core; -import com.datastax.dse.driver.internal.core.session.DefaultDseSession; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.session.SessionBuilder; import edu.umd.cs.findbugs.annotations.NonNull; @@ -33,6 +32,6 @@ public class DseSessionBuilder extends SessionBuilder Date: Tue, 7 Jan 2020 13:56:04 -0300 Subject: [PATCH 232/979] Mention unified driver in changelog section for 4.4.0 --- changelog/README.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/changelog/README.md b/changelog/README.md index a9efc9dd03f..cd7ae125f12 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,10 @@ ### 4.4.0 (in progress) +This version brings in all functionality that was formerly only in the DataStax Enterprise driver, +such as the built-in support for reactive programming. Going forward, all new features will be +implemented in this single driver. + - [documentation] JAVA-2446: Revisit continuous paging javadocs - [improvement] JAVA-2550: Remove warnings in ContinuousCqlRequestHandler when coordinator is not replica - [improvement] JAVA-2569: Make driver compatible with Netty < 4.1.34 again From a85f5dac662b12bb747a809d730a3596d590817d Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 7 Jan 2020 13:54:04 -0300 Subject: [PATCH 233/979] Mention DataStax Apollo in main README --- README.md | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index a28c316274e..65406ff5688 100644 --- a/README.md +++ b/README.md @@ -8,12 +8,13 @@ documentation for latest version through [DataStax Docs] or via the release tags [4.4.0](https://github.com/datastax/java-driver/tree/4.4.0).* A modern, feature-rich and highly tunable Java client library for [Apache Cassandra®] \(2.1+) and -[DataStax Enterprise] \(4.7+), using exclusively Cassandra's binary protocol and Cassandra Query -Language v3. +[DataStax Enterprise] \(4.7+), and [DataStax Apollo], using exclusively Cassandra's binary protocol +and Cassandra Query Language (CQL) v3. [DataStax Docs]: http://docs.datastax.com/en/developer/java-driver/ [Apache Cassandra®]: http://cassandra.apache.org/ -[DataStax Enterprise]: http://www.datastax.com/products/datastax-enterprise +[DataStax Enterprise]: https://www.datastax.com/products/datastax-enterprise +[DataStax Apollo]: https://www.datastax.com/constellation/datastax-apollo ## Getting the driver @@ -50,8 +51,8 @@ builder](manual/query_builder/), [mapper](manual/mapper)). ## Compatibility -The driver is compatible with Apache Cassandra® 2.1 and higher, and DataStax Enterprise 4.7 and -higher. +The driver is compatible with Apache Cassandra® 2.1 and higher, DataStax Enterprise 4.7 and +higher, and DataStax Apollo. It requires Java 8 or higher. From 860eac18230981479b15c03ab00410395354e156 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 7 Jan 2020 14:07:42 -0300 Subject: [PATCH 234/979] Fix minor typo --- upgrade_guide/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/upgrade_guide/README.md b/upgrade_guide/README.md index 3d566d95832..33c018f352e 100644 --- a/upgrade_guide/README.md +++ b/upgrade_guide/README.md @@ -2,7 +2,7 @@ ### 4.4.0 -Datastax Enteprise support is now available directly in the main driver. There is no longer a +Datastax Enterprise support is now available directly in the main driver. There is no longer a separate DSE driver. #### For Apache Cassandra® users From 95563db36b41a5a4b78ee4bc5e68a25d3fc03c6e Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 7 Jan 2020 14:08:10 -0300 Subject: [PATCH 235/979] Mention Reactive Streams among driver dependencies --- upgrade_guide/README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/upgrade_guide/README.md b/upgrade_guide/README.md index 33c018f352e..6f1c28d7138 100644 --- a/upgrade_guide/README.md +++ b/upgrade_guide/README.md @@ -14,8 +14,8 @@ Apart from that, the only visible change is that DSE-specific features are now e * new execution methods: `CqlSession.executeGraph`, `CqlSession.executeContinuously*`. They all have default implementations so this doesn't break binary compatibility. You can just ignore them. -* new driver dependencies: Tinkerpop, ESRI. You can exclude them manually if you want to keep your - classpath lean, the rest of the driver will still work. See the +* new driver dependencies: Tinkerpop, ESRI, Reactive Streams. You can exclude them manually if you + want to keep your classpath lean, the rest of the driver will still work. See the [Integration](../manual/core/integration/#driver-dependencies) page. #### For Datastax Enterprise users From 6d325159284edf905de518e5a10e4dd2cff83a11 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Wed, 8 Jan 2020 10:35:29 -0300 Subject: [PATCH 236/979] JAVA-2551: Improve support for DETERMINISTIC and MONOTONIC functions (#27) --- changelog/README.md | 1 + .../metadata/schema/DseAggregateMetadata.java | 20 ++-- .../metadata/schema/DseFunctionMetadata.java | 98 ++++++++++++++----- .../schema/DefaultDseAggregateMetadata.java | 34 ++++--- .../schema/DefaultDseFunctionMetadata.java | 72 +++++++++----- .../schema/parsing/DseAggregateParser.java | 4 +- .../schema/parsing/DseFunctionParser.java | 6 +- .../schema/DseAggregateMetadataIT.java | 27 ++++- .../schema/DseFunctionMetadataIT.java | 71 ++++++++++---- 9 files changed, 237 insertions(+), 96 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index cd7ae125f12..93618f2856c 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -8,6 +8,7 @@ This version brings in all functionality that was formerly only in the DataStax such as the built-in support for reactive programming. Going forward, all new features will be implemented in this single driver. +- [improvement] JAVA-2551: Improve support for DETERMINISTIC and MONOTONIC functions - [documentation] JAVA-2446: Revisit continuous paging javadocs - [improvement] JAVA-2550: Remove warnings in ContinuousCqlRequestHandler when coordinator is not replica - [improvement] JAVA-2569: Make driver compatible with Netty < 4.1.34 again diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseAggregateMetadata.java b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseAggregateMetadata.java index 02356fb6960..0555a61bb1b 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseAggregateMetadata.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseAggregateMetadata.java @@ -24,20 +24,28 @@ /** * Specialized aggregate metadata for DSE. * - *

      It adds support for the DSE-specific {@link #isDeterministic() DETERMINISTIC} keyword. + *

      It adds support for the DSE-specific {@link #getDeterministic() DETERMINISTIC} keyword. */ public interface DseAggregateMetadata extends AggregateMetadata { + /** @deprecated Use {@link #getDeterministic()} instead. */ + @Deprecated + boolean isDeterministic(); + /** * Indicates if this aggregate is deterministic. A deterministic aggregate means that given a * particular input, the aggregate will always produce the same output. * - *

      NOTE: For versions of DSE older than 6.0.0, this method will always return false, regardless - * of the actual function characteristics. + *

      This method returns {@linkplain Optional#empty() empty} if this information was not found in + * the system tables, regardless of the actual aggregate characteristics; this is the case for all + * versions of DSE older than 6.0.0. * - * @return Whether or not this aggregate is deterministic. + * @return Whether or not this aggregate is deterministic; or {@linkplain Optional#empty() empty} + * if such information is not available in the system tables. */ - boolean isDeterministic(); + default Optional getDeterministic() { + return Optional.of(isDeterministic()); + } @NonNull @Override @@ -79,7 +87,7 @@ default String describe(boolean pretty) { builder.newLine().append("INITCOND ").append(formatInitCond.get()); } // add DETERMINISTIC if present - if (isDeterministic()) { + if (getDeterministic().orElse(false)) { builder.newLine().append("DETERMINISTIC"); } return builder.append(";").build(); diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseFunctionMetadata.java b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseFunctionMetadata.java index a1964b3a1b5..32f897863ef 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseFunctionMetadata.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseFunctionMetadata.java @@ -21,56 +21,99 @@ import com.datastax.oss.driver.internal.core.metadata.schema.ScriptBuilder; import edu.umd.cs.findbugs.annotations.NonNull; import java.util.List; +import java.util.Optional; /** * Specialized function metadata for DSE. * - *

      It adds support for the DSE-specific {@link #isDeterministic() DETERMINISTIC} and {@link - * #isMonotonic() MONOTONIC} keywords. + *

      It adds support for the DSE-specific {@link #getDeterministic() DETERMINISTIC} and {@link + * #getMonotonicity() MONOTONIC} keywords. */ public interface DseFunctionMetadata extends FunctionMetadata { + /** The monotonicity of a function. */ + enum Monotonicity { + + /** + * Indicates that the function is fully monotonic on all of its arguments. This means that it is + * either entirely non-increasing or non-decreasing. Full monotonicity is required to use the + * function in a GROUP BY clause. + */ + FULLY_MONOTONIC, + + /** + * Indicates that the function is partially monotonic, meaning that partial application over + * some of the its arguments is monotonic. Currently (DSE 6.0.0), CQL only allows partial + * monotonicity on exactly one argument. This may change in a future CQL version. + */ + PARTIALLY_MONOTONIC, + + /** Indicates that the function is not monotonic. */ + NOT_MONOTONIC, + } + + /** @deprecated Use {@link #getDeterministic()} instead. */ + @Deprecated + boolean isDeterministic(); + /** * Indicates if this function is deterministic. A deterministic function means that given a * particular input, the function will always produce the same output. * - *

      NOTE: For versions of DSE older than 6.0.0, this method will always return false, regardless - * of the actual function characteristics. + *

      This method returns {@linkplain Optional#empty() empty} if this information was not found in + * the system tables, regardless of the actual function characteristics; this is the case for all + * versions of DSE older than 6.0.0. * - * @return Whether or not this function is deterministic. + * @return Whether or not this function is deterministic; or {@linkplain Optional#empty() empty} + * if such information is not available in the system tables. */ - boolean isDeterministic(); + default Optional getDeterministic() { + return Optional.of(isDeterministic()); + } + + /** @deprecated use {@link #getMonotonicity()} instead. */ + @Deprecated + boolean isMonotonic(); /** - * Indicates whether or not this function is monotonic on all of its arguments. This means that it - * is either entirely non-increasing or non-decreasing. + * Returns this function's {@link Monotonicity}. * *

      A function can be either: * *

        - *
      • monotonic on all of its arguments. In that case, this method returns {@code true}, and - * {@link #getMonotonicArgumentNames()} returns all the arguments; + *
      • fully monotonic. In that case, this method returns {@link Monotonicity#FULLY_MONOTONIC}, + * and {@link #getMonotonicArgumentNames()} returns all the arguments; *
      • partially monotonic, meaning that partial application over some of the arguments is * monotonic. Currently (DSE 6.0.0), CQL only allows partial monotonicity on exactly one * argument. This may change in a future CQL version. In that case, this method returns - * {@code false}, and {@link #getMonotonicArgumentNames()} returns a singleton list; - *
      • not monotonic. In that case, this method return {@code false} and {@link - * #getMonotonicArgumentNames()} returns an empty list. + * {@link Monotonicity#PARTIALLY_MONOTONIC}, and {@link #getMonotonicArgumentNames()} + * returns a singleton list; + *
      • not monotonic. In that case, this method return {@link Monotonicity#NOT_MONOTONIC} and + * {@link #getMonotonicArgumentNames()} returns an empty list. *
      * - *

      Monotonicity is required to use the function in a GROUP BY clause. + *

      Full monotonicity is required to use the function in a GROUP BY clause. * - *

      NOTE: For versions of DSE older than 6.0.0, this method will always return false, regardless - * of the actual function characteristics. + *

      This method returns {@linkplain Optional#empty() empty} if this information was not found in + * the system tables, regardless of the actual function characteristics; this is the case for all + * versions of DSE older than 6.0.0. * - * @return whether or not this function is monotonic on all of its arguments. + * @return this function's {@link Monotonicity}; or {@linkplain Optional#empty() empty} if such + * information is not available in the system tables. */ - boolean isMonotonic(); + default Optional getMonotonicity() { + return Optional.of( + isMonotonic() + ? Monotonicity.FULLY_MONOTONIC + : getMonotonicArgumentNames().isEmpty() + ? Monotonicity.NOT_MONOTONIC + : Monotonicity.PARTIALLY_MONOTONIC); + } /** * Returns a list of argument names that are monotonic. * - *

      See {@link #isMonotonic()} for explanations on monotonicity, and the possible values + *

      See {@link #getMonotonicity()} for explanations on monotonicity, and the possible values * returned by this method. * *

      NOTE: For versions of DSE older than 6.0.0, this method will always return an empty list, @@ -112,13 +155,20 @@ default String describe(boolean pretty) { .append(getReturnType().asCql(false, true)) .newLine(); // handle deterministic and monotonic - if (isDeterministic()) { + if (getDeterministic().orElse(false)) { builder.append("DETERMINISTIC").newLine(); } - if (isMonotonic()) { - builder.append("MONOTONIC").newLine(); - } else if (!getMonotonicArgumentNames().isEmpty()) { - builder.append("MONOTONIC ON ").append(getMonotonicArgumentNames().get(0)).newLine(); + if (getMonotonicity().isPresent()) { + switch (getMonotonicity().get()) { + case FULLY_MONOTONIC: + builder.append("MONOTONIC").newLine(); + break; + case PARTIALLY_MONOTONIC: + builder.append("MONOTONIC ON ").append(getMonotonicArgumentNames().get(0)).newLine(); + break; + default: + break; + } } builder .append("LANGUAGE ") diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseAggregateMetadata.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseAggregateMetadata.java index 6902b2873e1..c71bf49e5e8 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseAggregateMetadata.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseAggregateMetadata.java @@ -24,13 +24,14 @@ import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import java.util.Objects; +import java.util.Optional; import net.jcip.annotations.Immutable; @Immutable public class DefaultDseAggregateMetadata extends DefaultAggregateMetadata implements DseAggregateMetadata { - private final boolean deterministic; + @Nullable private final Boolean deterministic; public DefaultDseAggregateMetadata( @NonNull CqlIdentifier keyspace, @@ -41,7 +42,7 @@ public DefaultDseAggregateMetadata( @NonNull FunctionSignature stateFuncSignature, @NonNull DataType stateType, @NonNull TypeCodec stateTypeCodec, - boolean deterministic) { + @Nullable Boolean deterministic) { super( keyspace, signature, @@ -55,8 +56,15 @@ public DefaultDseAggregateMetadata( } @Override + @Deprecated public boolean isDeterministic() { - return this.deterministic; + return deterministic != null && deterministic; + } + + @Override + @Nullable + public Optional getDeterministic() { + return Optional.ofNullable(deterministic); } @Override @@ -73,7 +81,7 @@ public boolean equals(Object other) { && Objects.equals(this.getReturnType(), that.getReturnType()) && Objects.equals(this.getStateFuncSignature(), that.getStateFuncSignature()) && Objects.equals(this.getStateType(), that.getStateType()) - && this.deterministic == that.isDeterministic(); + && Objects.equals(this.deterministic, that.getDeterministic().orElse(null)); } else { return false; } @@ -94,15 +102,13 @@ public int hashCode() { @Override public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append("Aggregate Name: ") - .append(getSignature().getName().asCql(false)) - .append(", Keyspace: ") - .append(getKeyspace().asCql(false)) - .append(", Return Type: ") - .append(getReturnType().asCql(false, false)) - .append(", Deterministic: ") - .append(deterministic); - return sb.toString(); + return "Aggregate Name: " + + getSignature().getName().asCql(false) + + ", Keyspace: " + + getKeyspace().asCql(false) + + ", Return Type: " + + getReturnType().asCql(false, false) + + ", Deterministic: " + + deterministic; } } diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseFunctionMetadata.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseFunctionMetadata.java index d741bf5935d..8a8bc6a58ee 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseFunctionMetadata.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseFunctionMetadata.java @@ -20,17 +20,20 @@ import com.datastax.oss.driver.api.core.metadata.schema.FunctionSignature; import com.datastax.oss.driver.api.core.type.DataType; import com.datastax.oss.driver.internal.core.metadata.schema.DefaultFunctionMetadata; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; import java.util.List; import java.util.Objects; +import java.util.Optional; import net.jcip.annotations.Immutable; @Immutable public class DefaultDseFunctionMetadata extends DefaultFunctionMetadata implements DseFunctionMetadata { - private final boolean deterministic; - private final boolean monotonic; + @Nullable private final Boolean deterministic; + @Nullable private final Monotonicity monotonicity; @NonNull private final List monotonicArgumentNames; public DefaultDseFunctionMetadata( @@ -41,24 +44,43 @@ public DefaultDseFunctionMetadata( boolean calledOnNullInput, @NonNull String language, @NonNull DataType returnType, - boolean deterministic, - boolean monotonic, + @Nullable Boolean deterministic, + @Nullable Boolean monotonic, @NonNull List monotonicArgumentNames) { super(keyspace, signature, parameterNames, body, calledOnNullInput, language, returnType); // set DSE extension attributes this.deterministic = deterministic; - this.monotonic = monotonic; - this.monotonicArgumentNames = monotonicArgumentNames; + this.monotonicity = + monotonic == null + ? null + : monotonic + ? Monotonicity.FULLY_MONOTONIC + : monotonicArgumentNames.isEmpty() + ? Monotonicity.NOT_MONOTONIC + : Monotonicity.PARTIALLY_MONOTONIC; + this.monotonicArgumentNames = ImmutableList.copyOf(monotonicArgumentNames); } @Override + @Deprecated public boolean isDeterministic() { - return this.deterministic; + return deterministic != null && deterministic; } @Override + public Optional getDeterministic() { + return Optional.ofNullable(deterministic); + } + + @Override + @Deprecated public boolean isMonotonic() { - return this.monotonic; + return monotonicity == Monotonicity.FULLY_MONOTONIC; + } + + @Override + public Optional getMonotonicity() { + return Optional.ofNullable(monotonicity); } @NonNull @@ -80,8 +102,8 @@ public boolean equals(Object other) { && this.isCalledOnNullInput() == that.isCalledOnNullInput() && Objects.equals(this.getLanguage(), that.getLanguage()) && Objects.equals(this.getReturnType(), that.getReturnType()) - && this.deterministic == that.isDeterministic() - && this.monotonic == that.isMonotonic() + && Objects.equals(this.deterministic, that.getDeterministic().orElse(null)) + && this.monotonicity == that.getMonotonicity().orElse(null) && Objects.equals(this.monotonicArgumentNames, that.getMonotonicArgumentNames()); } else { return false; @@ -98,22 +120,26 @@ public int hashCode() { isCalledOnNullInput(), getLanguage(), getReturnType(), - isDeterministic(), - isMonotonic(), - getMonotonicArgumentNames()); + deterministic, + monotonicity, + monotonicArgumentNames); } @Override public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append("Function Name: ").append(this.getSignature().getName().asCql(false)); - sb.append(", Keyspace: ").append(this.getKeyspace()); - sb.append(", Language: ").append(this.getLanguage()); - sb.append(", Protocol Code: ").append(this.getReturnType().getProtocolCode()); - sb.append(", Deterministic: ").append(this.isDeterministic()); - sb.append(", Monotonic: ").append(this.isMonotonic()); - sb.append(", Monotonic On: ") - .append(this.monotonicArgumentNames.isEmpty() ? "" : this.monotonicArgumentNames.get(0)); - return sb.toString(); + return "Function Name: " + + this.getSignature().getName().asCql(false) + + ", Keyspace: " + + this.getKeyspace().asCql(false) + + ", Language: " + + this.getLanguage() + + ", Return Type: " + + getReturnType().asCql(false, false) + + ", Deterministic: " + + this.deterministic + + ", Monotonicity: " + + this.monotonicity + + ", Monotonic On: " + + (this.monotonicArgumentNames.isEmpty() ? "" : this.monotonicArgumentNames.get(0)); } } diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseAggregateParser.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseAggregateParser.java index 2bef719b1cf..d894e1b1008 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseAggregateParser.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseAggregateParser.java @@ -44,8 +44,8 @@ public DseAggregateMetadata parseAggregate( Map userDefinedTypes) { AggregateMetadata aggregate = aggregateParser.parseAggregate(row, keyspaceId, userDefinedTypes); // parse the DSE extended columns - final boolean deterministic = - row.contains("deterministic") ? row.getBoolean("deterministic") : false; + final Boolean deterministic = + row.contains("deterministic") ? row.getBoolean("deterministic") : null; return new DefaultDseAggregateMetadata( aggregate.getKeyspace(), diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseFunctionParser.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseFunctionParser.java index 53e8f10fdb5..696297a041e 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseFunctionParser.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseFunctionParser.java @@ -45,9 +45,9 @@ public DseFunctionMetadata parseFunction( Map userDefinedTypes) { FunctionMetadata function = functionParser.parseFunction(row, keyspaceId, userDefinedTypes); // parse the DSE extended columns - final boolean deterministic = - row.contains("deterministic") ? row.getBoolean("deterministic") : false; - final boolean monotonic = row.contains("monotonic") ? row.getBoolean("monotonic") : false; + final Boolean deterministic = + row.contains("deterministic") ? row.getBoolean("deterministic") : null; + final Boolean monotonic = row.contains("monotonic") ? row.getBoolean("monotonic") : null; // stream the list of strings into a list of CqlIdentifiers final List monotonicOn = row.contains("monotonic_on") diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/DseAggregateMetadataIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/DseAggregateMetadataIT.java index 5172e5cf445..a7f1a4fd25a 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/DseAggregateMetadataIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/DseAggregateMetadataIT.java @@ -16,20 +16,23 @@ package com.datastax.dse.driver.api.core.metadata.schema; import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assumptions.assumeThat; import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.api.core.metadata.schema.AggregateMetadata; import com.datastax.oss.driver.api.core.type.DataTypes; import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import java.util.Objects; import java.util.Optional; import org.junit.ClassRule; import org.junit.Test; import org.junit.rules.RuleChain; import org.junit.rules.TestRule; -@DseRequirement(min = "6.0") +@DseRequirement(min = "5.0", description = "DSE 5.0+ required function/aggregate support") public class DseAggregateMetadataIT extends AbstractMetadataIT { private static final CcmRule CCM_RULE = CcmRule.getInstance(); @@ -39,13 +42,15 @@ public class DseAggregateMetadataIT extends AbstractMetadataIT { @ClassRule public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); + private static final Version DSE_6_0_0 = Objects.requireNonNull(Version.parse("6.0.0")); + @Override protected SessionRule getSessionRule() { return DseAggregateMetadataIT.SESSION_RULE; } @Test - public void should_parse_aggregate_without_deterministic() throws Exception { + public void should_parse_aggregate_without_deterministic() { String cqlFunction = "CREATE FUNCTION nondetf(i int) RETURNS NULL ON NULL INPUT RETURNS int LANGUAGE java AS 'return new java.util.Random().nextInt(i);';"; String cqlAggregate = "CREATE AGGREGATE nondeta() SFUNC nondetf STYPE int INITCOND 0;"; @@ -56,7 +61,11 @@ public void should_parse_aggregate_without_deterministic() throws Exception { assertThat(aggregateOpt.map(DseAggregateMetadata.class::cast)) .hasValueSatisfying( aggregate -> { - assertThat(aggregate.isDeterministic()).isFalse(); + if (isDse6OrHigher()) { + assertThat(aggregate.getDeterministic()).contains(false); + } else { + assertThat(aggregate.getDeterministic()).isEmpty(); + } assertThat(aggregate.getStateType()).isEqualTo(DataTypes.INT); assertThat(aggregate.describe(false)) .isEqualTo( @@ -67,7 +76,8 @@ public void should_parse_aggregate_without_deterministic() throws Exception { } @Test - public void should_parse_aggregate_with_deterministic() throws Exception { + public void should_parse_aggregate_with_deterministic() { + assumeThat(isDse6OrHigher()).describedAs("DSE 6.0+ required for DETERMINISTIC").isTrue(); String cqlFunction = "CREATE FUNCTION detf(i int, y int) RETURNS NULL ON NULL INPUT RETURNS int LANGUAGE java AS 'return i+y;';"; String cqlAggregate = @@ -79,7 +89,7 @@ public void should_parse_aggregate_with_deterministic() throws Exception { assertThat(aggregateOpt.map(DseAggregateMetadata.class::cast)) .hasValueSatisfying( aggregate -> { - assertThat(aggregate.isDeterministic()).isTrue(); + assertThat(aggregate.getDeterministic()).contains(true); assertThat(aggregate.getStateType()).isEqualTo(DataTypes.INT); assertThat(aggregate.describe(false)) .isEqualTo( @@ -88,4 +98,11 @@ public void should_parse_aggregate_with_deterministic() throws Exception { keyspace.getName().asInternal())); }); } + + private static boolean isDse6OrHigher() { + assumeThat(CCM_RULE.getDseVersion()) + .describedAs("DSE required for DseFunctionMetadata tests") + .isPresent(); + return CCM_RULE.getDseVersion().get().compareTo(DSE_6_0_0) >= 0; + } } diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/DseFunctionMetadataIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/DseFunctionMetadataIT.java index 8803033fc09..66ed45ce9e0 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/DseFunctionMetadataIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/DseFunctionMetadataIT.java @@ -16,21 +16,25 @@ package com.datastax.dse.driver.api.core.metadata.schema; import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assumptions.assumeThat; +import com.datastax.dse.driver.api.core.metadata.schema.DseFunctionMetadata.Monotonicity; import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.api.core.metadata.schema.FunctionMetadata; import com.datastax.oss.driver.api.core.type.DataTypes; import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import java.util.Objects; import java.util.Optional; import org.junit.ClassRule; import org.junit.Test; import org.junit.rules.RuleChain; import org.junit.rules.TestRule; -@DseRequirement(min = "6.0") +@DseRequirement(min = "5.0", description = "DSE 5.0+ required function/aggregate support") public class DseFunctionMetadataIT extends AbstractMetadataIT { private static final CcmRule CCM_RULE = CcmRule.getInstance(); @@ -40,13 +44,15 @@ public class DseFunctionMetadataIT extends AbstractMetadataIT { @ClassRule public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); + private static final Version DSE_6_0_0 = Objects.requireNonNull(Version.parse("6.0.0")); + @Override public SessionRule getSessionRule() { return DseFunctionMetadataIT.SESSION_RULE; } @Test - public void should_parse_function_without_deterministic_or_monotonic() throws Exception { + public void should_parse_function_without_deterministic_or_monotonic() { String cqlFunction = "CREATE FUNCTION nondetf(i int) RETURNS NULL ON NULL INPUT RETURNS int LANGUAGE java AS 'return new java.util.Random().nextInt(i);';"; execute(cqlFunction); @@ -55,8 +61,13 @@ public void should_parse_function_without_deterministic_or_monotonic() throws Ex assertThat(functionOpt.map(DseFunctionMetadata.class::cast)) .hasValueSatisfying( function -> { - assertThat(function.isDeterministic()).isFalse(); - assertThat(function.isMonotonic()).isFalse(); + if (isDse6OrHigher()) { + assertThat(function.getDeterministic()).contains(false); + assertThat(function.getMonotonicity()).contains(Monotonicity.NOT_MONOTONIC); + } else { + assertThat(function.getDeterministic()).isEmpty(); + assertThat(function.getMonotonicity()).isEmpty(); + } assertThat(function.getMonotonicArgumentNames()).isEmpty(); assertThat(function.getLanguage()).isEqualTo("java"); assertThat(function.getReturnType()).isEqualTo(DataTypes.INT); @@ -70,7 +81,10 @@ public void should_parse_function_without_deterministic_or_monotonic() throws Ex } @Test - public void should_parse_function_with_deterministic() throws Exception { + public void should_parse_function_with_deterministic() { + assumeThat(isDse6OrHigher()) + .describedAs("DSE 6.0+ required for DETERMINISTIC / MONOTONIC") + .isTrue(); String cqlFunction = "CREATE FUNCTION detf(i int, y int) RETURNS NULL ON NULL INPUT RETURNS int DETERMINISTIC LANGUAGE java AS 'return i+y;';"; execute(cqlFunction); @@ -80,8 +94,8 @@ public void should_parse_function_with_deterministic() throws Exception { assertThat(functionOpt.map(DseFunctionMetadata.class::cast)) .hasValueSatisfying( function -> { - assertThat(function.isDeterministic()).isTrue(); - assertThat(function.isMonotonic()).isFalse(); + assertThat(function.getDeterministic()).contains(true); + assertThat(function.getMonotonicity()).contains(Monotonicity.NOT_MONOTONIC); assertThat(function.getMonotonicArgumentNames()).isEmpty(); assertThat(function.getLanguage()).isEqualTo("java"); assertThat(function.getReturnType()).isEqualTo(DataTypes.INT); @@ -95,7 +109,10 @@ public void should_parse_function_with_deterministic() throws Exception { } @Test - public void should_parse_function_with_monotonic() throws Exception { + public void should_parse_function_with_monotonic() { + assumeThat(isDse6OrHigher()) + .describedAs("DSE 6.0+ required for DETERMINISTIC / MONOTONIC") + .isTrue(); String cqlFunction = "CREATE FUNCTION monotonic(dividend int, divisor int) CALLED ON NULL INPUT RETURNS int MONOTONIC LANGUAGE java AS 'return dividend / divisor;';"; execute(cqlFunction); @@ -105,8 +122,8 @@ public void should_parse_function_with_monotonic() throws Exception { assertThat(functionOpt.map(DseFunctionMetadata.class::cast)) .hasValueSatisfying( function -> { - assertThat(function.isDeterministic()).isFalse(); - assertThat(function.isMonotonic()).isTrue(); + assertThat(function.getDeterministic()).contains(false); + assertThat(function.getMonotonicity()).contains(Monotonicity.FULLY_MONOTONIC); assertThat(function.getMonotonicArgumentNames()) .containsExactly( CqlIdentifier.fromCql("dividend"), CqlIdentifier.fromCql("divisor")); @@ -122,7 +139,10 @@ public void should_parse_function_with_monotonic() throws Exception { } @Test - public void should_parse_function_with_monotonic_on() throws Exception { + public void should_parse_function_with_monotonic_on() { + assumeThat(isDse6OrHigher()) + .describedAs("DSE 6.0+ required for DETERMINISTIC / MONOTONIC") + .isTrue(); String cqlFunction = "CREATE FUNCTION monotonic_on(dividend int, divisor int) CALLED ON NULL INPUT RETURNS int MONOTONIC ON \"dividend\" LANGUAGE java AS 'return dividend / divisor;';"; execute(cqlFunction); @@ -132,8 +152,8 @@ public void should_parse_function_with_monotonic_on() throws Exception { assertThat(functionOpt.map(DseFunctionMetadata.class::cast)) .hasValueSatisfying( function -> { - assertThat(function.isDeterministic()).isFalse(); - assertThat(function.isMonotonic()).isFalse(); + assertThat(function.getDeterministic()).contains(false); + assertThat(function.getMonotonicity()).contains(Monotonicity.PARTIALLY_MONOTONIC); assertThat(function.getMonotonicArgumentNames()) .containsExactly(CqlIdentifier.fromCql("dividend")); assertThat(function.getLanguage()).isEqualTo("java"); @@ -148,7 +168,10 @@ public void should_parse_function_with_monotonic_on() throws Exception { } @Test - public void should_parse_function_with_deterministic_and_monotonic() throws Exception { + public void should_parse_function_with_deterministic_and_monotonic() { + assumeThat(isDse6OrHigher()) + .describedAs("DSE 6.0+ required for DETERMINISTIC / MONOTONIC") + .isTrue(); String cqlFunction = "CREATE FUNCTION det_and_monotonic(dividend int, divisor int) CALLED ON NULL INPUT RETURNS int DETERMINISTIC MONOTONIC LANGUAGE java AS 'return dividend / divisor;';"; execute(cqlFunction); @@ -158,8 +181,8 @@ public void should_parse_function_with_deterministic_and_monotonic() throws Exce assertThat(functionOpt.map(DseFunctionMetadata.class::cast)) .hasValueSatisfying( function -> { - assertThat(function.isDeterministic()).isTrue(); - assertThat(function.isMonotonic()).isTrue(); + assertThat(function.getDeterministic()).contains(true); + assertThat(function.getMonotonicity()).contains(Monotonicity.FULLY_MONOTONIC); assertThat(function.getMonotonicArgumentNames()) .containsExactly( CqlIdentifier.fromCql("dividend"), CqlIdentifier.fromCql("divisor")); @@ -175,7 +198,10 @@ public void should_parse_function_with_deterministic_and_monotonic() throws Exce } @Test - public void should_parse_function_with_deterministic_and_monotonic_on() throws Exception { + public void should_parse_function_with_deterministic_and_monotonic_on() { + assumeThat(isDse6OrHigher()) + .describedAs("DSE 6.0+ required for DETERMINISTIC / MONOTONIC") + .isTrue(); String cqlFunction = "CREATE FUNCTION det_and_monotonic_on(dividend int, divisor int) CALLED ON NULL INPUT RETURNS int DETERMINISTIC MONOTONIC ON \"dividend\" LANGUAGE java AS 'return dividend / divisor;';"; execute(cqlFunction); @@ -185,8 +211,8 @@ public void should_parse_function_with_deterministic_and_monotonic_on() throws E assertThat(functionOpt.map(DseFunctionMetadata.class::cast)) .hasValueSatisfying( function -> { - assertThat(function.isDeterministic()).isTrue(); - assertThat(function.isMonotonic()).isFalse(); + assertThat(function.getDeterministic()).contains(true); + assertThat(function.getMonotonicity()).contains(Monotonicity.PARTIALLY_MONOTONIC); assertThat(function.getMonotonicArgumentNames()) .containsExactly(CqlIdentifier.fromCql("dividend")); assertThat(function.getLanguage()).isEqualTo("java"); @@ -199,4 +225,11 @@ public void should_parse_function_with_deterministic_and_monotonic_on() throws E keyspace.getName().asInternal())); }); } + + private static boolean isDse6OrHigher() { + assumeThat(CCM_RULE.getDseVersion()) + .describedAs("DSE required for DseFunctionMetadata tests") + .isPresent(); + return CCM_RULE.getDseVersion().get().compareTo(DSE_6_0_0) >= 0; + } } From 77c301d91486e75bb9e5f572082f5358528395e5 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 6 Jan 2020 11:57:48 -0300 Subject: [PATCH 237/979] JAVA-2603: Upgrade Maven javadoc plugin to 3.1.1 This commit allows to get rid of spurious error messages when building the distribution submodule: [ERROR] no module descriptor for XYZ However it requires a change to configurations: package names must be specified with a trailing .* for subpackages to be excluded as well. --- core-shaded/pom.xml | 2 +- pom.xml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index ce95e4313ab..fd0751487e9 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -236,7 +236,7 @@ ${project.build.directory}/shaded-sources - com.datastax.oss.driver.internal, com.datastax.dse.driver.internal, com.datastax.oss.driver.shaded + com.datastax.oss.driver.internal.*,com.datastax.dse.driver.internal.*,com.datastax.oss.driver.shaded.* + + + + +]]> + + --allow-script-in-comments + + From f160bc6dc038110200f2e852925ff49dc9c3dc52 Mon Sep 17 00:00:00 2001 From: Tomasz Lelek Date: Wed, 8 Jan 2020 20:39:46 +0100 Subject: [PATCH 239/979] JAVA-2554: Improve efficiency of InsightsClient by improving supportsInsights check (#26) --- changelog/README.md | 1 + .../core/insights/InsightsClient.java | 3 -- .../core/insights/InsightsClientTest.java | 48 +++++++++++++++++++ 3 files changed, 49 insertions(+), 3 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index fe930261acc..bef205b1663 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -8,6 +8,7 @@ This version brings in all functionality that was formerly only in the DataStax such as the built-in support for reactive programming. Going forward, all new features will be implemented in this single driver. +- [improvement] JAVA-2554: Improve efficiency of InsightsClient by improving supportsInsights check - [improvement] JAVA-2601: Inject Google Tag Manager scripts in generated API documentation - [improvement] JAVA-2551: Improve support for DETERMINISTIC and MONOTONIC functions - [documentation] JAVA-2446: Revisit continuous paging javadocs diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/InsightsClient.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/InsightsClient.java index 047e30d8d51..700899ef085 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/InsightsClient.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/InsightsClient.java @@ -191,9 +191,6 @@ public void shutdown() { @VisibleForTesting public CompletionStage sendStatusMessage() { try { - if (!shouldSendEvent()) { - return CompletableFuture.completedFuture(null); - } String statusMessage = createStatusMessage(); CompletionStage result = sendJsonMessage(statusMessage); return result.whenComplete( diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/insights/InsightsClientTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/InsightsClientTest.java index 91e70437536..e9601a1f26d 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/insights/InsightsClientTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/InsightsClientTest.java @@ -26,8 +26,11 @@ import static org.awaitility.Duration.ONE_SECOND; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; +import com.datastax.dse.driver.api.core.metadata.DseNodeProperties; import com.datastax.dse.driver.internal.core.insights.configuration.InsightsConfiguration; import com.datastax.dse.driver.internal.core.insights.schema.AuthProviderType; import com.datastax.dse.driver.internal.core.insights.schema.Insight; @@ -46,9 +49,11 @@ import com.datastax.dse.driver.internal.core.insights.schema.SessionStateForNode; import com.datastax.dse.driver.internal.core.insights.schema.SpecificExecutionProfile; import com.datastax.dse.driver.internal.core.insights.schema.SpeculativeExecutionInfo; +import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.api.core.config.DriverConfig; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.metadata.EndPoint; +import com.datastax.oss.driver.api.core.metadata.Metadata; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.internal.core.channel.DriverChannel; import com.datastax.oss.driver.internal.core.context.DefaultDriverContext; @@ -75,7 +80,9 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Set; +import java.util.UUID; import java.util.concurrent.Executors; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Supplier; @@ -290,6 +297,39 @@ public void should_get_caller_of_create_cluster(StackTraceElement[] stackTrace, assertThat(result).isEqualTo(expected); } + @Test + @SuppressWarnings("ResultOfMethodCallIgnored") + public void should_execute_should_send_event_check_only_once() + throws UnknownHostException, InterruptedException { + // given + InsightsConfiguration insightsConfiguration = mock(InsightsConfiguration.class); + when(insightsConfiguration.isMonitorReportingEnabled()).thenReturn(true); + when(insightsConfiguration.getStatusEventDelayMillis()).thenReturn(10L); + when(insightsConfiguration.getExecutor()).thenReturn(new DefaultEventLoop()); + + InsightsClient insightsClient = + new InsightsClient( + mockDefaultDriverContext(), + MOCK_TIME_SUPPLIER, + insightsConfiguration, + null, + null, + null, + null, + null, + EMPTY_STACK_TRACE); + + // when + insightsClient.scheduleStatusMessageSend(); + // emulate periodic calls to sendStatusMessage + insightsClient.sendStatusMessage(); + insightsClient.sendStatusMessage(); + insightsClient.sendStatusMessage(); + + // then + verify(insightsConfiguration, times(1)).isMonitorReportingEnabled(); + } + @DataProvider public static Object[][] stackTraceProvider() { StackTraceElement[] onlyInitCall = @@ -419,6 +459,14 @@ private DefaultDriverContext mockDefaultDriverContext() throws UnknownHostExcept mockConnectionPools(context); MetadataManager manager = mock(MetadataManager.class); when(context.getMetadataManager()).thenReturn(manager); + Metadata metadata = mock(Metadata.class); + when(manager.getMetadata()).thenReturn(metadata); + Node node = mock(Node.class); + when(node.getExtras()) + .thenReturn( + ImmutableMap.of( + DseNodeProperties.DSE_VERSION, Objects.requireNonNull(Version.parse("6.0.5")))); + when(metadata.getNodes()).thenReturn(ImmutableMap.of(UUID.randomUUID(), node)); DriverExecutionProfile defaultExecutionProfile = mockDefaultExecutionProfile(); DriverExecutionProfile nonDefaultExecutionProfile = mockNonDefaultRequestTimeoutExecutionProfile(); From 17f91338f3407efe0341d438142a2782938f6e41 Mon Sep 17 00:00:00 2001 From: Erik Merkle Date: Tue, 7 Jan 2020 14:32:59 -0600 Subject: [PATCH 240/979] JAVA-2395: Update Jenkins build.yaml to test against various JDKs and DSE clusters --- build.yaml | 72 ++++++++++++++++++++++++++++++++++++++++++++++++------ 1 file changed, 65 insertions(+), 7 deletions(-) diff --git a/build.yaml b/build.yaml index d9608044a0a..01a78f10698 100644 --- a/build.yaml +++ b/build.yaml @@ -1,19 +1,77 @@ -java: - - openjdk8 +jabba: + - oracle_jdk8 + - openjdk_jdk11 + - openjdk_jdk12 + - openjdk_jdk13 os: - ubuntu/bionic64/java-driver cassandra: - '2.1' - - '2.2' - '3.0' - '3.11' + - 'dse-4.8' + - 'dse-5.0' + - 'dse-5.1' + - 'dse-6.0' + - 'dse-6.7' + - 'dse-6.8' +schedules: + commit: + schedule: per_commit + matrix: + exclude: + # Just run against JDK8 and latest DSE (6.7 for now, change to 6.8 when released) + - jabba: oracle_jdk8 + cassandra: ['2.1', '3.0', '3.11', 'dse-4.8', 'dse-5.0', 'dse-5.1', 'dse-6.0', 'dse-6.8'] + # Exclude all other JDKs + - jabba: openjdk_jdk11 + - jabba: openjdk_jdk12 + - jabba: openjdk_jdk13 + nightly: + schedule: nightly + matrix: + exclude: + # No excludes for JDK8 + # Exclude JDK11 for all but the latest Cassandra and DSE 6.7+ + - jabba: openjdk_jdk11 + cassandra: ['2.1', '3.0', 'dse-4.8', 'dse-5.0', 'dse-5.1', 'dse-6.0', dse-6.8'] + # Exclude JDK12 for all but the latest Cassandra and DSE 6.7+ + - jabba: openjdk_jdk12 + cassandra: ['2.1', '3.0', 'dse-4.8', 'dse-5.0', 'dse-5.1', 'dse-6.0', dse-6.8'] + # Exclude JDK13 for all but the latest Cassandra and DSE 6.7+ + - jabba: openjdk_jdk13 + cassandra: ['2.1', '3.0', 'dse-4.8', 'dse-5.0', 'dse-5.1', 'dse-6.0', dse-6.8'] + adhoc: + schedule: adhoc + matrix: + exclude: + # No excludes for JDK8 + # Exclude JDK11 for all but the latest Cassandra and DSE 6.7+ + - jabba: openjdk_jdk11 + cassandra: ['2.1', '3.0', 'dse-4.8', 'dse-5.0', 'dse-5.1', 'dse-6.0', dse-6.8'] + # Exclude JDK12 for all but the latest Cassandra and DSE 6.7+ + - jabba: openjdk_jdk12 + cassandra: ['2.1', '3.0', 'dse-4.8', 'dse-5.0', 'dse-5.1', 'dse-6.0', dse-6.8'] + # Exclude JDK13 for all but the latest Cassandra and DSE 6.7+ + - jabba: openjdk_jdk13 + cassandra: ['2.1', '3.0', 'dse-4.8', 'dse-5.0', 'dse-5.1', 'dse-6.0', dse-6.8'] build: - - type: maven - version: 3.2.5 - goals: verify --batch-mode - properties: | + - properties: | ccm.version=$CCM_CASSANDRA_VERSION + ccm.dse=$CCM_IS_DSE proxy.path=$HOME/proxy + maven.javadoc.skip=true + - script: | + # Jabba default should be a JDK8 for now + jabba use default + export MAVEN_HOME=/home/jenkins/.mvn/apache-maven-3.3.9 + export PATH=$MAVEN_HOME/bin:$PATH + # Build with the default JDK + mvn -B -V install -DskipTests + # Use the matrix JDK for testing + jabba use $JABBA_JDK_NAME + # Run tests against matrix JDK + mvn -B -V verify --batch-mode --show-version - xunit: - "**/target/surefire-reports/TEST-*.xml" - "**/target/failsafe-reports/TEST-*.xml" From 83e931b453f09354051e6a987b9af9138fbf18c8 Mon Sep 17 00:00:00 2001 From: Tomasz Lelek Date: Wed, 8 Jan 2020 21:37:39 +0100 Subject: [PATCH 241/979] JAVA-2609: Add docs for proxy authentication to unified driver (#36) --- changelog/README.md | 1 + manual/core/authentication/README.md | 71 +++++++++++++++++++++++++++- 2 files changed, 71 insertions(+), 1 deletion(-) diff --git a/changelog/README.md b/changelog/README.md index bef205b1663..51bac55e7b0 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -8,6 +8,7 @@ This version brings in all functionality that was formerly only in the DataStax such as the built-in support for reactive programming. Going forward, all new features will be implemented in this single driver. +- [documentation] JAVA-2609: Add docs for proxy authentication to unified driver - [improvement] JAVA-2554: Improve efficiency of InsightsClient by improving supportsInsights check - [improvement] JAVA-2601: Inject Google Tag Manager scripts in generated API documentation - [improvement] JAVA-2551: Improve support for DETERMINISTIC and MONOTONIC functions diff --git a/manual/core/authentication/README.md b/manual/core/authentication/README.md index 8c1d41a2c0b..c630be9dbd2 100644 --- a/manual/core/authentication/README.md +++ b/manual/core/authentication/README.md @@ -130,7 +130,75 @@ acceptable for you, consider writing your own [AuthProvider] implementation; [PlainTextAuthProviderBase] is a good starting point. Similarly, the driver provides [DseGssApiAuthProviderBase] as a starting point to write your own -GSSAPI auth provider. +GSSAPI auth provider. + +### Proxy authentication + +DSE allows a user to connect as another user or role: + +``` +-- Allow bob to connect as alice: +GRANT PROXY.LOGIN ON ROLE 'alice' TO 'bob' +``` + +Once connected, all authorization checks will be performed against the proxy role (alice in this +example). + +To use proxy authentication with the driver, you need to provide the **authorization-id**, in other +words the name of the role you want to connect as. + +Example for plain text authentication: + +``` +dse-java-driver { + advanced.auth-provider { + class = PlainTextAuthProvider + username = bob + password = bob's password + authorization-id = alice + } + } +``` + +With the GSSAPI (Kerberos) provider: + +``` +dse-java-driver { + advanced.auth-provider { + class = DseGssApiAuthProvider + authorization-id = alice + login-configuration { + principal = "user principal here ex bob@DATASTAX.COM" + useKeyTab = "true" + refreshKrb5Config = "true" + keyTab = "Path to keytab file here" + } + } + } +``` + +### Proxy execution + +Proxy execution is similar to proxy authentication, but it applies to a single query, not the whole +session. + +``` +-- Allow bob to execute queries as alice: +GRANT PROXY.EXECUTE ON ROLE 'alice' TO 'bob' +``` + +For this scenario, you would **not** add the `authorization-id = alice` to your configuration. +Instead, use [ProxyAuthentication.executeAs] to wrap your query with the correct authorization for +the execution: + +```java +import com.datastax.dse.driver.api.core.auth.ProxyAuthentication; + +SimpleStatement statement = SimpleStatement.newInstance("some query"); +// executeAs returns a new instance, you need to re-assign +statement = ProxyAuthentication.executeAs("alice", statement); +session.execute(statement); +``` [SASL]: https://en.wikipedia.org/wiki/Simple_Authentication_and_Security_Layer @@ -138,6 +206,7 @@ GSSAPI auth provider. [DriverContext]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/context/DriverContext.html [PlainTextAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderBase.html [DseGssApiAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderBase.html +[ProxyAuthentication.executeAs]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/dse/driver/api/core/auth/ProxyAuthentication.html#executeAs-java.lang.String-StatementT- [SessionBuilder.withAuthCredentials]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthCredentials-java.lang.String-java.lang.String- [SessionBuilder.withAuthProvider]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthProvider-com.datastax.oss.driver.api.core.auth.AuthProvider- [reference.conf]: ../configuration/reference/ \ No newline at end of file From c474e1ac470c012417d04bc0a4edafa27cb15ce6 Mon Sep 17 00:00:00 2001 From: Tomasz Lelek Date: Wed, 8 Jan 2020 22:03:20 +0100 Subject: [PATCH 242/979] JAVA-2542: Improve the javadocs of methods in CqlSession (#34) --- changelog/README.md | 1 + .../driver/api/core/graph/GraphSession.java | 4 ++ .../oss/driver/api/core/CqlSession.java | 2 + .../driver/api/core/cql/AsyncCqlSession.java | 15 ++++++++ .../driver/api/core/cql/SyncCqlSession.java | 38 +++++++++++++++++++ 5 files changed, 60 insertions(+) diff --git a/changelog/README.md b/changelog/README.md index 51bac55e7b0..e442f2cee03 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -8,6 +8,7 @@ This version brings in all functionality that was formerly only in the DataStax such as the built-in support for reactive programming. Going forward, all new features will be implemented in this single driver. +- [documentation] JAVA-2542: JAVA-2542: Improve the javadocs of methods in CqlSession - [documentation] JAVA-2609: Add docs for proxy authentication to unified driver - [improvement] JAVA-2554: Improve efficiency of InsightsClient by improving supportsInsights check - [improvement] JAVA-2601: Inject Google Tag Manager scripts in generated API documentation diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphSession.java b/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphSession.java index 0bd263e8f64..2c022ff4d49 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphSession.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphSession.java @@ -53,6 +53,8 @@ public interface GraphSession extends Session { * Apache Cassandra® cluster will result in a runtime error. * * @see GraphResultSet + * @param graphStatement the graph query to execute (that can be any {@code GraphStatement}). + * @return the result of the graph query. That result will never be null but can be empty. */ @NonNull default GraphResultSet execute(@NonNull GraphStatement graphStatement) { @@ -70,6 +72,8 @@ default GraphResultSet execute(@NonNull GraphStatement graphStatement) { * * @see #execute(GraphStatement) * @see AsyncGraphResultSet + * @param graphStatement the graph query to execute (that can be any {@code GraphStatement}). + * @return the {@code CompletionStage} on the result of the graph query. */ @NonNull default CompletionStage executeAsync( diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/CqlSession.java b/core/src/main/java/com/datastax/oss/driver/api/core/CqlSession.java index 17be89b7a52..50f4db697f9 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/CqlSession.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/CqlSession.java @@ -56,6 +56,8 @@ public interface CqlSession * Returns a builder to create a new instance. * *

      Note that this builder is mutable and not thread-safe. + * + * @return {@code CqlSessionBuilder} to create a new instance. */ @NonNull static CqlSessionBuilder builder() { diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/cql/AsyncCqlSession.java b/core/src/main/java/com/datastax/oss/driver/api/core/cql/AsyncCqlSession.java index 2e430414a72..dcd52f251cb 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/cql/AsyncCqlSession.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/cql/AsyncCqlSession.java @@ -31,6 +31,9 @@ public interface AsyncCqlSession extends Session { /** * Executes a CQL statement asynchronously (the call returns as soon as the statement was sent, * generally before the result is available). + * + * @param statement the CQL query to execute (that can be any {@code Statement}). + * @return a {@code CompletionStage} that, once complete, will produce the async result set. */ @NonNull default CompletionStage executeAsync(@NonNull Statement statement) { @@ -41,6 +44,9 @@ default CompletionStage executeAsync(@NonNull Statement state /** * Executes a CQL statement asynchronously (the call returns as soon as the statement was sent, * generally before the result is available). + * + * @param query the CQL query to execute. + * @return a {@code CompletionStage} that, once complete, will produce the async result set. */ @NonNull default CompletionStage executeAsync(@NonNull String query) { @@ -57,6 +63,9 @@ default CompletionStage executeAsync(@NonNull String query) { * *

      The result of this method is cached (see {@link SyncCqlSession#prepare(SimpleStatement)} for * more explanations). + * + * @param statement the CQL query to prepare (that can be any {@code SimpleStatement}). + * @return a {@code CompletionStage} that, once complete, will produce the prepared statement. */ @NonNull default CompletionStage prepareAsync(@NonNull SimpleStatement statement) { @@ -71,6 +80,9 @@ default CompletionStage prepareAsync(@NonNull SimpleStatement * *

      The result of this method is cached (see {@link SyncCqlSession#prepare(SimpleStatement)} for * more explanations). + * + * @param query the CQL query string to prepare. + * @return a {@code CompletionStage} that, once complete, will produce the prepared statement. */ @NonNull default CompletionStage prepareAsync(@NonNull String query) { @@ -90,6 +102,9 @@ default CompletionStage prepareAsync(@NonNull String query) { * *

      The result of this method is cached (see {@link SyncCqlSession#prepare(SimpleStatement)} for * more explanations). + * + * @param request the {@code PrepareRequest} to prepare. + * @return a {@code CompletionStage} that, once complete, will produce the prepared statement. */ @NonNull default CompletionStage prepareAsync(PrepareRequest request) { diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/cql/SyncCqlSession.java b/core/src/main/java/com/datastax/oss/driver/api/core/cql/SyncCqlSession.java index f9c0deff0f5..3280f3947ef 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/cql/SyncCqlSession.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/cql/SyncCqlSession.java @@ -15,6 +15,10 @@ */ package com.datastax.oss.driver.api.core.cql; +import com.datastax.oss.driver.api.core.AllNodesFailedException; +import com.datastax.oss.driver.api.core.servererrors.QueryExecutionException; +import com.datastax.oss.driver.api.core.servererrors.QueryValidationException; +import com.datastax.oss.driver.api.core.servererrors.SyntaxError; import com.datastax.oss.driver.api.core.session.Request; import com.datastax.oss.driver.api.core.session.Session; import com.datastax.oss.driver.internal.core.cql.DefaultPrepareRequest; @@ -31,6 +35,17 @@ public interface SyncCqlSession extends Session { /** * Executes a CQL statement synchronously (the calling thread blocks until the result becomes * available). + * + * @param statement the CQL query to execute (that can be any {@link Statement}). + * @return the result of the query. That result will never be null but can be empty (and will be + * for any non SELECT query). + * @throws AllNodesFailedException if no host in the cluster can be contacted successfully to + * execute this query. + * @throws QueryExecutionException if the query triggered an execution exception, i.e. an + * exception thrown by Cassandra when it cannot execute the query with the requested + * consistency level successfully. + * @throws QueryValidationException if the query is invalid (syntax error, unauthorized or any + * other validation problem). */ @NonNull default ResultSet execute(@NonNull Statement statement) { @@ -41,6 +56,17 @@ default ResultSet execute(@NonNull Statement statement) { /** * Executes a CQL statement synchronously (the calling thread blocks until the result becomes * available). + * + * @param query the CQL query to execute. + * @return the result of the query. That result will never be null but can be empty (and will be + * for any non SELECT query). + * @throws AllNodesFailedException if no host in the cluster can be contacted successfully to + * execute this query. + * @throws QueryExecutionException if the query triggered an execution exception, i.e. an + * exception thrown by Cassandra when it cannot execute the query with the requested + * consistency level successfully. + * @throws QueryValidationException if the query if invalid (syntax error, unauthorized or any + * other validation problem). */ @NonNull default ResultSet execute(@NonNull String query) { @@ -110,6 +136,10 @@ default ResultSet execute(@NonNull String query) { * query strings but different {@linkplain SimpleStatement#getConsistencyLevel() consistency * levels} will yield distinct prepared statements. * + * + * @param statement the CQL query to execute (that can be any {@link SimpleStatement}). + * @return the prepared statement corresponding to {@code statement}. + * @throws SyntaxError if the syntax of the query to prepare is not correct. */ @NonNull default PreparedStatement prepare(@NonNull SimpleStatement statement) { @@ -124,6 +154,10 @@ default PreparedStatement prepare(@NonNull SimpleStatement statement) { * *

      The result of this method is cached (see {@link #prepare(SimpleStatement)} for more * explanations). + * + * @param query the CQL string query to execute. + * @return the prepared statement corresponding to {@code query}. + * @throws SyntaxError if the syntax of the query to prepare is not correct. */ @NonNull default PreparedStatement prepare(@NonNull String query) { @@ -143,6 +177,10 @@ default PreparedStatement prepare(@NonNull String query) { * *

      The result of this method is cached (see {@link #prepare(SimpleStatement)} for more * explanations). + * + * @param request the {@code PrepareRequest} to execute. + * @return the prepared statement corresponding to {@code request}. + * @throws SyntaxError if the syntax of the query to prepare is not correct. */ @NonNull default PreparedStatement prepare(@NonNull PrepareRequest request) { From bb3848b282600781347b2b8d642d565d24138545 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Wed, 8 Jan 2020 18:55:42 -0300 Subject: [PATCH 243/979] JAVA-2548: Fix Reactive Streams TCK tests with Java 11 (#31) --- pom.xml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pom.xml b/pom.xml index 925f683dd52..63ae18d74e5 100644 --- a/pom.xml +++ b/pom.xml @@ -74,7 +74,7 @@ 1.9.12 3.1.6 2.0.0-M19 - 2.19.1 + 2.22.2 false ${skipTests} @@ -429,11 +429,11 @@ maven-surefire-plugin - 2.19.1 + ${surefire.version} maven-failsafe-plugin - 2.19.1 + ${surefire.version} maven-shade-plugin From 94df1369fa311da8c3991251ec758a36d360ac6f Mon Sep 17 00:00:00 2001 From: Tomasz Lelek Date: Thu, 9 Jan 2020 13:37:47 +0100 Subject: [PATCH 244/979] JAVA-2582: Don't propagate a future into SchemaQueriesFactory (#23) --- changelog/README.md | 2 ++ .../core/metadata/MetadataManager.java | 4 +--- .../queries/Cassandra21SchemaQueries.java | 10 ++------ .../queries/Cassandra22SchemaQueries.java | 10 ++------ .../queries/Cassandra3SchemaQueries.java | 10 ++------ .../queries/Cassandra4SchemaQueries.java | 10 ++------ .../queries/CassandraSchemaQueries.java | 13 ++--------- .../schema/queries/CassandraSchemaRows.java | 15 +----------- .../queries/DefaultSchemaQueriesFactory.java | 23 ++++++++----------- .../schema/queries/SchemaQueriesFactory.java | 5 +--- .../metadata/schema/queries/SchemaRows.java | 8 ------- .../schema/parsing/SchemaParserTest.java | 2 +- .../schema/parsing/TableParserTest.java | 2 +- .../schema/parsing/ViewParserTest.java | 2 +- .../queries/Cassandra21SchemaQueriesTest.java | 12 +++------- .../queries/Cassandra22SchemaQueriesTest.java | 2 +- .../queries/Cassandra3SchemaQueriesTest.java | 18 +++++---------- .../DefaultSchemaQueriesFactoryTest.java | 4 +--- 18 files changed, 39 insertions(+), 113 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index e442f2cee03..19d1a46b70f 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -8,6 +8,8 @@ This version brings in all functionality that was formerly only in the DataStax such as the built-in support for reactive programming. Going forward, all new features will be implemented in this single driver. + +- [improvement] JAVA-2582: Don't propagate a future into SchemaQueriesFactory - [documentation] JAVA-2542: JAVA-2542: Improve the javadocs of methods in CqlSession - [documentation] JAVA-2609: Add docs for proxy authentication to unified driver - [improvement] JAVA-2554: Improve efficiency of InsightsClient by improving supportsInsights check diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/MetadataManager.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/MetadataManager.java index 7b36ae6fe4e..39586b7bfeb 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/MetadataManager.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/MetadataManager.java @@ -432,9 +432,7 @@ private void startSchemaRequest(CompletableFuture refreshFu refreshFuture.completeExceptionally(agreementError); } else { schemaQueriesFactory - .newInstance( - // TODO remove this unused parameter (see JAVA-2582) - null) + .newInstance() .execute() .thenApplyAsync(this::parseAndApplySchemaRows, adminExecutor) .whenComplete( diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra21SchemaQueries.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra21SchemaQueries.java index e17e5e4eeda..dc9588ba50f 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra21SchemaQueries.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra21SchemaQueries.java @@ -16,22 +16,16 @@ package com.datastax.oss.driver.internal.core.metadata.schema.queries; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; -import com.datastax.oss.driver.api.core.metadata.Metadata; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.internal.core.channel.DriverChannel; import java.util.Optional; -import java.util.concurrent.CompletableFuture; import net.jcip.annotations.ThreadSafe; @ThreadSafe public class Cassandra21SchemaQueries extends CassandraSchemaQueries { public Cassandra21SchemaQueries( - DriverChannel channel, - Node node, - CompletableFuture refreshFuture, - DriverExecutionProfile config, - String logPrefix) { - super(channel, node, refreshFuture, config, logPrefix); + DriverChannel channel, Node node, DriverExecutionProfile config, String logPrefix) { + super(channel, node, config, logPrefix); } @Override diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra22SchemaQueries.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra22SchemaQueries.java index 3e3076477f5..5dbdc0c8efe 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra22SchemaQueries.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra22SchemaQueries.java @@ -16,22 +16,16 @@ package com.datastax.oss.driver.internal.core.metadata.schema.queries; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; -import com.datastax.oss.driver.api.core.metadata.Metadata; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.internal.core.channel.DriverChannel; import java.util.Optional; -import java.util.concurrent.CompletableFuture; import net.jcip.annotations.ThreadSafe; @ThreadSafe public class Cassandra22SchemaQueries extends CassandraSchemaQueries { public Cassandra22SchemaQueries( - DriverChannel channel, - Node node, - CompletableFuture refreshFuture, - DriverExecutionProfile config, - String logPrefix) { - super(channel, node, refreshFuture, config, logPrefix); + DriverChannel channel, Node node, DriverExecutionProfile config, String logPrefix) { + super(channel, node, config, logPrefix); } @Override diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra3SchemaQueries.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra3SchemaQueries.java index 90a0907417f..e268af4c43e 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra3SchemaQueries.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra3SchemaQueries.java @@ -16,22 +16,16 @@ package com.datastax.oss.driver.internal.core.metadata.schema.queries; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; -import com.datastax.oss.driver.api.core.metadata.Metadata; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.internal.core.channel.DriverChannel; import java.util.Optional; -import java.util.concurrent.CompletableFuture; import net.jcip.annotations.ThreadSafe; @ThreadSafe public class Cassandra3SchemaQueries extends CassandraSchemaQueries { public Cassandra3SchemaQueries( - DriverChannel channel, - Node node, - CompletableFuture refreshFuture, - DriverExecutionProfile config, - String logPrefix) { - super(channel, node, refreshFuture, config, logPrefix); + DriverChannel channel, Node node, DriverExecutionProfile config, String logPrefix) { + super(channel, node, config, logPrefix); } @Override diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra4SchemaQueries.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra4SchemaQueries.java index d0d989fa567..79e3aa0eb42 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra4SchemaQueries.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra4SchemaQueries.java @@ -16,22 +16,16 @@ package com.datastax.oss.driver.internal.core.metadata.schema.queries; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; -import com.datastax.oss.driver.api.core.metadata.Metadata; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.internal.core.channel.DriverChannel; import java.util.Optional; -import java.util.concurrent.CompletableFuture; import net.jcip.annotations.ThreadSafe; @ThreadSafe public class Cassandra4SchemaQueries extends Cassandra3SchemaQueries { public Cassandra4SchemaQueries( - DriverChannel channel, - Node node, - CompletableFuture refreshFuture, - DriverExecutionProfile config, - String logPrefix) { - super(channel, node, refreshFuture, config, logPrefix); + DriverChannel channel, Node node, DriverExecutionProfile config, String logPrefix) { + super(channel, node, config, logPrefix); } @Override diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/CassandraSchemaQueries.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/CassandraSchemaQueries.java index be3241979f6..fea51be283b 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/CassandraSchemaQueries.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/CassandraSchemaQueries.java @@ -17,7 +17,6 @@ import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; -import com.datastax.oss.driver.api.core.metadata.Metadata; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.internal.core.adminrequest.AdminRequestHandler; import com.datastax.oss.driver.internal.core.adminrequest.AdminResult; @@ -52,9 +51,6 @@ public abstract class CassandraSchemaQueries implements SchemaQueries { private final String whereClause; // The future we return from execute, completes when all the queries are done. private final CompletableFuture schemaRowsFuture = new CompletableFuture<>(); - // A future that completes later, when the whole refresh is done. We just store it here to pass it - // down to the next step. - public final CompletableFuture refreshFuture; private final long startTimeNs = System.nanoTime(); // All non-final fields are accessed exclusively on adminExecutor @@ -62,15 +58,10 @@ public abstract class CassandraSchemaQueries implements SchemaQueries { private int pendingQueries; protected CassandraSchemaQueries( - DriverChannel channel, - Node node, - CompletableFuture refreshFuture, - DriverExecutionProfile config, - String logPrefix) { + DriverChannel channel, Node node, DriverExecutionProfile config, String logPrefix) { this.channel = channel; this.adminExecutor = channel.eventLoop(); this.node = node; - this.refreshFuture = refreshFuture; this.logPrefix = logPrefix; this.timeout = config.getDuration(DefaultDriverOption.METADATA_SCHEMA_REQUEST_TIMEOUT); this.pageSize = config.getInt(DefaultDriverOption.METADATA_SCHEMA_REQUEST_PAGE_SIZE); @@ -130,7 +121,7 @@ public CompletionStage execute() { private void executeOnAdminExecutor() { assert adminExecutor.inEventLoop(); - schemaRowsBuilder = new CassandraSchemaRows.Builder(node, refreshFuture, logPrefix); + schemaRowsBuilder = new CassandraSchemaRows.Builder(node, logPrefix); query(selectKeyspacesQuery() + whereClause, schemaRowsBuilder::withKeyspaces); query(selectTypesQuery() + whereClause, schemaRowsBuilder::withTypes); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/CassandraSchemaRows.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/CassandraSchemaRows.java index cf5fbe9d7f6..7e452bdafb3 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/CassandraSchemaRows.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/CassandraSchemaRows.java @@ -18,7 +18,6 @@ import com.datastax.dse.driver.api.core.metadata.DseNodeProperties; import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.Version; -import com.datastax.oss.driver.api.core.metadata.Metadata; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.internal.core.adminrequest.AdminRow; import com.datastax.oss.driver.internal.core.metadata.schema.parsing.DataTypeClassNameParser; @@ -33,7 +32,6 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -import java.util.concurrent.CompletableFuture; import net.jcip.annotations.Immutable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -43,7 +41,6 @@ public class CassandraSchemaRows implements SchemaRows { private final Node node; private final DataTypeParser dataTypeParser; - private final CompletableFuture refreshFuture; private final List keyspaces; private final List virtualKeyspaces; private final Multimap tables; @@ -58,7 +55,6 @@ public class CassandraSchemaRows implements SchemaRows { private CassandraSchemaRows( Node node, - CompletableFuture refreshFuture, DataTypeParser dataTypeParser, List keyspaces, List virtualKeyspaces, @@ -73,7 +69,6 @@ private CassandraSchemaRows( Multimap aggregates) { this.node = node; this.dataTypeParser = dataTypeParser; - this.refreshFuture = refreshFuture; this.keyspaces = keyspaces; this.virtualKeyspaces = virtualKeyspaces; this.tables = tables; @@ -98,11 +93,6 @@ public DataTypeParser dataTypeParser() { return dataTypeParser; } - @Override - public CompletableFuture refreshFuture() { - return refreshFuture; - } - @Override public List keyspaces() { return keyspaces; @@ -162,7 +152,6 @@ public static class Builder { private static final Logger LOG = LoggerFactory.getLogger(Builder.class); private final Node node; - private final CompletableFuture refreshFuture; private final DataTypeParser dataTypeParser; private final String tableNameColumn; private final String logPrefix; @@ -187,9 +176,8 @@ public static class Builder { private final Map> indexesBuilders = new LinkedHashMap<>(); - public Builder(Node node, CompletableFuture refreshFuture, String logPrefix) { + public Builder(Node node, String logPrefix) { this.node = node; - this.refreshFuture = refreshFuture; this.logPrefix = logPrefix; if (isCassandraV3OrAbove(node)) { this.tableNameColumn = "table_name"; @@ -323,7 +311,6 @@ private void putByKeyspaceAndTable( public CassandraSchemaRows build() { return new CassandraSchemaRows( node, - refreshFuture, dataTypeParser, keyspacesBuilder.build(), virtualKeyspacesBuilder.build(), diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/DefaultSchemaQueriesFactory.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/DefaultSchemaQueriesFactory.java index 89ec59f3408..7100908f1ea 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/DefaultSchemaQueriesFactory.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/DefaultSchemaQueriesFactory.java @@ -18,11 +18,9 @@ import com.datastax.dse.driver.api.core.metadata.DseNodeProperties; import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; -import com.datastax.oss.driver.api.core.metadata.Metadata; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.internal.core.channel.DriverChannel; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; -import java.util.concurrent.CompletableFuture; import net.jcip.annotations.ThreadSafe; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -41,7 +39,7 @@ public DefaultSchemaQueriesFactory(InternalDriverContext context) { } @Override - public SchemaQueries newInstance(CompletableFuture refreshFuture) { + public SchemaQueries newInstance() { DriverChannel channel = context.getControlConnection().channel(); if (channel == null || channel.closeFuture().isDone()) { throw new IllegalStateException("Control channel not available, aborting schema refresh"); @@ -57,11 +55,10 @@ public SchemaQueries newInstance(CompletableFuture refreshFuture) { "Could not find control node metadata " + channel.getEndPoint() + ", aborting schema refresh")); - return newInstance(node, channel, refreshFuture); + return newInstance(node, channel); } - protected SchemaQueries newInstance( - Node node, DriverChannel channel, CompletableFuture refreshFuture) { + protected SchemaQueries newInstance(Node node, DriverChannel channel) { DriverExecutionProfile config = context.getConfig().getDefaultProfile(); @@ -73,13 +70,13 @@ protected SchemaQueries newInstance( "[{}] Sending schema queries to {} with DSE version {}", logPrefix, node, dseVersion); // 4.8 is the oldest version supported, which uses C* 2.1 schema if (dseVersion.compareTo(Version.V5_0_0) < 0) { - return new Cassandra21SchemaQueries(channel, node, refreshFuture, config, logPrefix); + return new Cassandra21SchemaQueries(channel, node, config, logPrefix); } else if (dseVersion.compareTo(Version.V6_7_0) < 0) { // 5.0 - 6.7 uses C* 3.0 schema - return new Cassandra3SchemaQueries(channel, node, refreshFuture, config, logPrefix); + return new Cassandra3SchemaQueries(channel, node, config, logPrefix); } else { // 6.7+ uses C* 4.0 schema - return new Cassandra4SchemaQueries(channel, node, refreshFuture, config, logPrefix); + return new Cassandra4SchemaQueries(channel, node, config, logPrefix); } } else { Version cassandraVersion = node.getCassandraVersion(); @@ -96,13 +93,13 @@ protected SchemaQueries newInstance( LOG.debug( "[{}] Sending schema queries to {} with version {}", logPrefix, node, cassandraVersion); if (cassandraVersion.compareTo(Version.V2_2_0) < 0) { - return new Cassandra21SchemaQueries(channel, node, refreshFuture, config, logPrefix); + return new Cassandra21SchemaQueries(channel, node, config, logPrefix); } else if (cassandraVersion.compareTo(Version.V3_0_0) < 0) { - return new Cassandra22SchemaQueries(channel, node, refreshFuture, config, logPrefix); + return new Cassandra22SchemaQueries(channel, node, config, logPrefix); } else if (cassandraVersion.compareTo(Version.V4_0_0) < 0) { - return new Cassandra3SchemaQueries(channel, node, refreshFuture, config, logPrefix); + return new Cassandra3SchemaQueries(channel, node, config, logPrefix); } else { - return new Cassandra4SchemaQueries(channel, node, refreshFuture, config, logPrefix); + return new Cassandra4SchemaQueries(channel, node, config, logPrefix); } } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/SchemaQueriesFactory.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/SchemaQueriesFactory.java index 94f1ae24d78..5b85053767c 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/SchemaQueriesFactory.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/SchemaQueriesFactory.java @@ -15,9 +15,6 @@ */ package com.datastax.oss.driver.internal.core.metadata.schema.queries; -import com.datastax.oss.driver.api.core.metadata.Metadata; -import java.util.concurrent.CompletableFuture; - public interface SchemaQueriesFactory { - SchemaQueries newInstance(CompletableFuture refreshFuture); + SchemaQueries newInstance(); } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/SchemaRows.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/SchemaRows.java index c96976dcb8f..74cd505a80d 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/SchemaRows.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/SchemaRows.java @@ -16,7 +16,6 @@ package com.datastax.oss.driver.internal.core.metadata.schema.queries; import com.datastax.oss.driver.api.core.CqlIdentifier; -import com.datastax.oss.driver.api.core.metadata.Metadata; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.internal.core.adminrequest.AdminRow; import com.datastax.oss.driver.internal.core.metadata.schema.parsing.DataTypeParser; @@ -24,7 +23,6 @@ import edu.umd.cs.findbugs.annotations.NonNull; import java.util.List; import java.util.Map; -import java.util.concurrent.CompletableFuture; /** * The system rows returned by the queries for a schema refresh, categorized by keyspace/table where @@ -61,10 +59,4 @@ public interface SchemaRows { Map> indexes(); DataTypeParser dataTypeParser(); - - /** - * The future to complete when the schema refresh is complete (here just to be propagated further - * down the chain). - */ - CompletableFuture refreshFuture(); } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/SchemaParserTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/SchemaParserTest.java index 4c770e57046..037234b0632 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/SchemaParserTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/SchemaParserTest.java @@ -137,7 +137,7 @@ public void should_parse_multiple_keyspaces() { } private MetadataRefresh parse(Consumer builderConfig) { - CassandraSchemaRows.Builder builder = new CassandraSchemaRows.Builder(NODE_3_0, null, "test"); + CassandraSchemaRows.Builder builder = new CassandraSchemaRows.Builder(NODE_3_0, "test"); builderConfig.accept(builder); SchemaRows rows = builder.build(); return new CassandraSchemaParser(rows, context).parse(); diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/TableParserTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/TableParserTest.java index 3b081d33cbf..52428214816 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/TableParserTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/TableParserTest.java @@ -196,7 +196,7 @@ private SchemaRows modernRows( private SchemaRows rows( AdminRow tableRow, Iterable columnRows, Iterable indexesRows, Node node) { CassandraSchemaRows.Builder builder = - new CassandraSchemaRows.Builder(node, null, "test") + new CassandraSchemaRows.Builder(node, "test") .withTables(ImmutableList.of(tableRow)) .withColumns(columnRows); if (indexesRows != null) { diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/ViewParserTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/ViewParserTest.java index 4e92d9253bc..4fe83cf34b6 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/ViewParserTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/ViewParserTest.java @@ -86,7 +86,7 @@ public void should_parse_view() { } private SchemaRows rows(AdminRow viewRow, Iterable columnRows) { - return new CassandraSchemaRows.Builder(NODE_3_0, null, "test") + return new CassandraSchemaRows.Builder(NODE_3_0, "test") .withViews(ImmutableList.of(viewRow)) .withColumns(columnRows) .build(); diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra21SchemaQueriesTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra21SchemaQueriesTest.java index b1703defa16..5625be4dddc 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra21SchemaQueriesTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra21SchemaQueriesTest.java @@ -22,13 +22,11 @@ import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; -import com.datastax.oss.driver.api.core.metadata.Metadata; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.internal.core.adminrequest.AdminResult; import com.datastax.oss.driver.internal.core.channel.DriverChannel; import java.util.Collections; import java.util.Queue; -import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionStage; import java.util.concurrent.LinkedBlockingDeque; import org.junit.Test; @@ -45,7 +43,7 @@ public void should_query() { when(node.getCassandraVersion()).thenReturn(Version.V2_1_0); SchemaQueriesWithMockedChannel queries = - new SchemaQueriesWithMockedChannel(driverChannel, node, null, config, "test"); + new SchemaQueriesWithMockedChannel(driverChannel, node, config, "test"); CompletionStage result = queries.execute(); @@ -121,12 +119,8 @@ static class SchemaQueriesWithMockedChannel extends Cassandra21SchemaQueries { final Queue calls = new LinkedBlockingDeque<>(); SchemaQueriesWithMockedChannel( - DriverChannel channel, - Node node, - CompletableFuture refreshFuture, - DriverExecutionProfile config, - String logPrefix) { - super(channel, node, refreshFuture, config, logPrefix); + DriverChannel channel, Node node, DriverExecutionProfile config, String logPrefix) { + super(channel, node, config, logPrefix); } @Override diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra22SchemaQueriesTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra22SchemaQueriesTest.java index 85321f8e27b..a867bfcccdb 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra22SchemaQueriesTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra22SchemaQueriesTest.java @@ -147,7 +147,7 @@ static class SchemaQueriesWithMockedChannel extends Cassandra22SchemaQueries { CompletableFuture refreshFuture, DriverExecutionProfile config, String logPrefix) { - super(channel, node, refreshFuture, config, logPrefix); + super(channel, node, config, logPrefix); } @Override diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra3SchemaQueriesTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra3SchemaQueriesTest.java index e29e12179a2..e41fd78d3ed 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra3SchemaQueriesTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra3SchemaQueriesTest.java @@ -22,14 +22,12 @@ import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; -import com.datastax.oss.driver.api.core.metadata.Metadata; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.internal.core.adminrequest.AdminResult; import com.datastax.oss.driver.internal.core.channel.DriverChannel; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import java.util.Collections; import java.util.Queue; -import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionStage; import java.util.concurrent.LinkedBlockingDeque; import org.junit.Before; @@ -65,7 +63,7 @@ public void should_query_with_keyspace_filter() { private void should_query_with_where_clause(String whereClause) { SchemaQueriesWithMockedChannel queries = - new SchemaQueriesWithMockedChannel(driverChannel, node, null, config, "test"); + new SchemaQueriesWithMockedChannel(driverChannel, node, config, "test"); CompletionStage result = queries.execute(); // Keyspace @@ -183,7 +181,7 @@ private void should_query_with_where_clause(String whereClause) { @Test public void should_query_with_paging() { SchemaQueriesWithMockedChannel queries = - new SchemaQueriesWithMockedChannel(driverChannel, node, null, config, "test"); + new SchemaQueriesWithMockedChannel(driverChannel, node, config, "test"); CompletionStage result = queries.execute(); // Keyspace @@ -247,7 +245,7 @@ public void should_query_with_paging() { @Test public void should_ignore_malformed_rows() { SchemaQueriesWithMockedChannel queries = - new SchemaQueriesWithMockedChannel(driverChannel, node, null, config, "test"); + new SchemaQueriesWithMockedChannel(driverChannel, node, config, "test"); CompletionStage result = queries.execute(); // Keyspace @@ -331,7 +329,7 @@ public void should_ignore_malformed_rows() { @Test public void should_abort_if_query_fails() { SchemaQueriesWithMockedChannel queries = - new SchemaQueriesWithMockedChannel(driverChannel, node, null, config, "test"); + new SchemaQueriesWithMockedChannel(driverChannel, node, config, "test"); CompletionStage result = queries.execute(); Exception mockQueryError = new Exception("mock query error"); @@ -351,12 +349,8 @@ static class SchemaQueriesWithMockedChannel extends Cassandra3SchemaQueries { final Queue calls = new LinkedBlockingDeque<>(); SchemaQueriesWithMockedChannel( - DriverChannel channel, - Node node, - CompletableFuture refreshFuture, - DriverExecutionProfile config, - String logPrefix) { - super(channel, node, refreshFuture, config, logPrefix); + DriverChannel channel, Node node, DriverExecutionProfile config, String logPrefix) { + super(channel, node, config, logPrefix); } @Override diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/DefaultSchemaQueriesFactoryTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/DefaultSchemaQueriesFactoryTest.java index 61094e979c8..0f220955179 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/DefaultSchemaQueriesFactoryTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/DefaultSchemaQueriesFactoryTest.java @@ -32,7 +32,6 @@ import com.tngtech.java.junit.dataprovider.DataProviderRunner; import com.tngtech.java.junit.dataprovider.UseDataProvider; import java.util.Optional; -import java.util.concurrent.CompletableFuture; import org.junit.Test; import org.junit.runner.RunWith; @@ -116,8 +115,7 @@ public void should_return_correct_schema_queries_impl( DefaultSchemaQueriesFactory factory = buildFactory(); @SuppressWarnings("unchecked") - SchemaQueries queries = - factory.newInstance(mockNode, mock(DriverChannel.class), mock(CompletableFuture.class)); + SchemaQueries queries = factory.newInstance(mockNode, mock(DriverChannel.class)); assertThat(queries.getClass()).isEqualTo(expected.getClz()); } From a3051b4ce683e696339acf04c3dc426a2047c49e Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Thu, 9 Jan 2020 09:52:53 -0300 Subject: [PATCH 245/979] Fix wrong config prefix in examples of DSE authentication --- manual/core/authentication/README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/manual/core/authentication/README.md b/manual/core/authentication/README.md index c630be9dbd2..e9b8d905f1b 100644 --- a/manual/core/authentication/README.md +++ b/manual/core/authentication/README.md @@ -68,7 +68,7 @@ Note that, for backward compatibility with previous driver versions, you can als `DseGssApiAuthProvider` supports GSSAPI authentication against a DSE cluster secured with Kerberos: ``` -dse-java-driver { +datastax-java-driver { advanced.auth-provider { class = DseGssApiAuthProvider login-configuration { @@ -150,7 +150,7 @@ words the name of the role you want to connect as. Example for plain text authentication: ``` -dse-java-driver { +datastax-java-driver { advanced.auth-provider { class = PlainTextAuthProvider username = bob @@ -163,7 +163,7 @@ dse-java-driver { With the GSSAPI (Kerberos) provider: ``` -dse-java-driver { +datastax-java-driver { advanced.auth-provider { class = DseGssApiAuthProvider authorization-id = alice From 6fa234d3f912ad723890ed4a2b0d308d1c5410ed Mon Sep 17 00:00:00 2001 From: Tomasz Lelek Date: Fri, 10 Jan 2020 14:12:45 +0100 Subject: [PATCH 246/979] JAVA-2612: Fix ProtocolVersionMixedClusterIT (#38) The control connection does not reconnect anymore when the protocol version is negotiated and downgraded, see JAVA-2473. This test was still expecting a reconnection, and thus was failing since. --- .../core/ProtocolVersionMixedClusterIT.java | 23 ++++--------------- 1 file changed, 4 insertions(+), 19 deletions(-) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/ProtocolVersionMixedClusterIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/ProtocolVersionMixedClusterIT.java index a2b1851b2d3..3f548a52b57 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/ProtocolVersionMixedClusterIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/ProtocolVersionMixedClusterIT.java @@ -34,7 +34,6 @@ import com.datastax.oss.simulacron.server.BoundCluster; import com.datastax.oss.simulacron.server.BoundNode; import com.datastax.oss.simulacron.server.BoundTopic; -import java.net.InetSocketAddress; import java.util.stream.Stream; import org.junit.Rule; import org.junit.Test; @@ -66,19 +65,12 @@ public void should_downgrade_if_peer_does_not_support_negotiated_version() { .build()) { InternalDriverContext context = (InternalDriverContext) session.getContext(); + // General version should have been downgraded to V3 assertThat(context.getProtocolVersion()).isEqualTo(DefaultProtocolVersion.V3); + // But control connection should still be using protocol V4 since node0 supports V4 + assertThat(context.getControlConnection().channel().protocolVersion()).isEqualTo(DefaultProtocolVersion.V4); - // Find out which node became the control node after the reconnection (not necessarily node 0) - InetSocketAddress controlAddress = - (InetSocketAddress) context.getControlConnection().channel().getEndPoint().resolve(); - BoundNode currentControlNode = null; - for (BoundNode node : simulacron.getNodes()) { - if (node.inetSocketAddress().equals(controlAddress)) { - currentControlNode = node; - } - } - assertThat(currentControlNode).isNotNull(); - assertThat(queries(simulacron)).hasSize(8); + assertThat(queries(simulacron)).hasSize(4); assertThat(protocolQueries(contactPoint, 4)) .containsExactly( @@ -87,13 +79,6 @@ public void should_downgrade_if_peer_does_not_support_negotiated_version() { "SELECT * FROM system.local", "SELECT * FROM system.peers_v2", "SELECT * FROM system.peers"); - assertThat(protocolQueries(currentControlNode, 3)) - .containsExactly( - // Reconnection with protocol v3 - "SELECT cluster_name FROM system.local", - "SELECT * FROM system.local", - "SELECT * FROM system.peers_v2", - "SELECT * FROM system.peers"); } } From d245dbd8f05351cd809acdcaf1b351f9b10125e5 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 10 Jan 2020 14:44:50 -0300 Subject: [PATCH 247/979] JAVA-2598: Do not use context class loader when attempting to load classes (#25) --- changelog/README.md | 2 +- .../api/core/session/SessionBuilder.java | 36 +++++++++++++++--- .../core/context/InternalDriverContext.java | 4 +- .../driver/internal/core/util/Reflection.java | 33 +++++++++-------- .../driver/osgi/support/OsgiSimpleTests.java | 2 - manual/osgi/README.md | 37 +++++++++++++------ upgrade_guide/README.md | 28 ++++++++++++++ 7 files changed, 106 insertions(+), 36 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 19d1a46b70f..70560db2368 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -8,7 +8,7 @@ This version brings in all functionality that was formerly only in the DataStax such as the built-in support for reactive programming. Going forward, all new features will be implemented in this single driver. - +- [bug] JAVA-2598: Do not use context class loader when attempting to load classes - [improvement] JAVA-2582: Don't propagate a future into SchemaQueriesFactory - [documentation] JAVA-2542: JAVA-2542: Improve the javadocs of methods in CqlSession - [documentation] JAVA-2609: Add docs for proxy authentication to unified driver diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java b/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java index c91d22d58a9..1ad6ab2f864 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java @@ -404,11 +404,37 @@ public SelfT withKeyspace(@Nullable String keyspaceName) { /** * The {@link ClassLoader} to use to reflectively load class names defined in configuration. * - *

      This is typically only needed when using OSGi or other in environments where there are - * complex class loading requirements. - * - *

      If null, the driver attempts to use {@link Thread#getContextClassLoader()} of the current - * thread or the same {@link ClassLoader} that loaded the core driver classes. + *

      If null, the driver attempts to use the same {@link ClassLoader} that loaded the core driver + * classes, which is generally the right thing to do. + * + *

      Defining a different class loader is typically only needed in web or OSGi environments where + * there are complex class loading requirements. + * + *

      For example, if the driver jar is loaded by the web server's system class loader (that is, + * the driver jar was placed in the "/lib" folder of the web server), but the application tries to + * load a custom load balancing policy declared in the web app's "WEB-INF/lib" folder, the system + * class loader will not be able to load such class. Instead, you must use the web app's class + * loader, that you can obtain by calling {@link Thread#getContextClassLoader()}: + * + *

      {@code
      +   * CqlSession.builder()
      +   *   .addContactEndPoint(...)
      +   *   .withClassLoader(Thread.currentThread().getContextClassLoader())
      +   *   .build();
      +   * }
      + * + * Indeed, in most web environments, {@code Thread.currentThread().getContextClassLoader()} will + * return the web app's class loader, which is a child of the web server's system class loader. + * This class loader is thus capable of loading both the implemented interface and the + * implementing class, in spite of them being declared in different places. + * + *

      For OSGi deployments, it is usually not necessary to use this method. Even if the + * implemented interface and the implementing class are located in different bundles, the right + * class loader to use should be the default one (the driver bundle's class loader). In + * particular, it is not advised to rely on {@code Thread.currentThread().getContextClassLoader()} + * in OSGi environments, so you should never pass that class loader to this method. See Using + * a custom ClassLoader in our OSGi online docs for more information. */ @NonNull public SelfT withClassLoader(@Nullable ClassLoader classLoader) { diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/context/InternalDriverContext.java b/core/src/main/java/com/datastax/oss/driver/internal/core/context/InternalDriverContext.java index fa9caabefce..b596149db0d 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/context/InternalDriverContext.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/context/InternalDriverContext.java @@ -135,8 +135,8 @@ public interface InternalDriverContext extends DriverContext { /** * The {@link ClassLoader} to use to reflectively load class names defined in configuration. If - * null, the driver attempts to use {@link Thread#getContextClassLoader()} of the current thread - * or {@link com.datastax.oss.driver.internal.core.util.Reflection}'s {@link ClassLoader}. + * null, the driver attempts to use the same {@link ClassLoader} that loaded the core driver + * classes. */ @Nullable ClassLoader getClassLoader(); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/util/Reflection.java b/core/src/main/java/com/datastax/oss/driver/internal/core/util/Reflection.java index d57e23c3982..933c4b4c226 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/util/Reflection.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/util/Reflection.java @@ -24,6 +24,8 @@ import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import com.datastax.oss.driver.shaded.guava.common.collect.ListMultimap; import com.datastax.oss.driver.shaded.guava.common.collect.MultimapBuilder; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.util.Collection; @@ -37,27 +39,28 @@ public class Reflection { private static final Logger LOG = LoggerFactory.getLogger(Reflection.class); /** - * Loads a class by name. + * Loads a class by name using the given {@link ClassLoader}. * - *

      This methods tries first with the current thread's context class loader (the intent is that - * if the driver is in a low-level loader of an application server -- e.g. bootstrap or system -- - * it can still find classes in the application's class loader). If it is null, it defaults to the - * class loader that loaded the class calling this method. + *

      If the class loader is null, the class will be loaded using the class loader that loaded the + * driver. * - * @return null if the class does not exist. + * @return null if the class does not exist or could not be loaded. */ - public static Class loadClass(ClassLoader classLoader, String className) { + @Nullable + public static Class loadClass(@Nullable ClassLoader classLoader, @NonNull String className) { try { - // If input classLoader is null, use current thread's ClassLoader, if that is null, use - // default (calling class') ClassLoader. - ClassLoader cl = - classLoader != null ? classLoader : Thread.currentThread().getContextClassLoader(); - if (cl != null) { - return Class.forName(className, true, cl); + Class clazz; + if (classLoader == null) { + LOG.trace("Attempting to load {} with driver's class loader", className); + clazz = Class.forName(className); } else { - return Class.forName(className); + LOG.trace("Attempting to load {} with {}", className, classLoader); + clazz = Class.forName(className, true, classLoader); } - } catch (ClassNotFoundException e) { + LOG.trace("Successfully loaded {}", className); + return clazz; + } catch (ClassNotFoundException | LinkageError | SecurityException e) { + LOG.debug(String.format("Could not load %s: %s", className, e), e); return null; } } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/osgi/support/OsgiSimpleTests.java b/integration-tests/src/test/java/com/datastax/oss/driver/osgi/support/OsgiSimpleTests.java index 148abd6b08d..9108e0e4c8a 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/osgi/support/OsgiSimpleTests.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/osgi/support/OsgiSimpleTests.java @@ -46,8 +46,6 @@ default ProgrammaticDriverConfigLoaderBuilder configLoaderBuilder() { default CqlSessionBuilder sessionBuilder() { return CqlSession.builder() .addContactEndPoint(new DefaultEndPoint(new InetSocketAddress("127.0.0.1", 9042))) - // use the DSE driver's ClassLoader instead of the OSGI application thread's. - .withClassLoader(CqlSession.class.getClassLoader()) .withConfigLoader(configLoaderBuilder().build()); } diff --git a/manual/osgi/README.md b/manual/osgi/README.md index bfe9841de2c..b1d59684f4c 100644 --- a/manual/osgi/README.md +++ b/manual/osgi/README.md @@ -24,27 +24,29 @@ an explicit version of dependency in your project different than that of the dri In several places of the [driver configuration] it is possible to specify the class name of something to be instantiated by the driver such as the reconnection policy. This is accomplished -using reflection, which uses a `ClassLoader`. By default, the driver uses `Thread.currentThread -.getContextClassLoader()` if available, otherwise it uses its own `ClassLoader`. This is typically -adequate except in environments like application containers or OSGi frameworks where class loading -logic is much more deliberate and libraries are isolated from each other. +using reflection, which uses a `ClassLoader`. By default, the driver uses its own bundle's +`ClassLoader` to instantiate classes by reflection. This is typically adequate as long as the driver +bundle has access to the bundle where the implementing class resides. -If the chosen `ClassLoader` is not able to ascertain whether a loaded class is the same instance -as its expected parent type, you may encounter an error such as: +However if the default `ClassLoader` cannot load the implementing class, you may encounter an error +like this: + + java.lang.ClassNotFoundException: com.datastax.oss.MyCustomReconnectionPolicy + +Similarly, it also happens that the default `ClassLoader` is able to load the implementing class but +is not able to ascertain whether that class implements the expected parent type. In these cases you +may encounter an error such as: java.lang.IllegalArgumentException: Expected class ExponentialReconnectionPolicy (specified by advanced.reconnection-policy.class) to be a subtype of com.datastax.oss.driver.api.core.connection.ReconnectionPolicy This is occurring because there is a disparity in the `ClassLoader`s used between the driver code -and the `ClassLoader` used to reflectively load the class (in this case, +and the `ClassLoader` used to reflectively load the class (in this case, `ExponentialReconnectionPolicy`). -You may also encounter `ClassNotFoundException` if the `ClassLoader` does not have access to the -class being loaded. - To overcome these issues, you may specify a `ClassLoader` instance when constructing a `Session` -by using [withClassLoader()]. In a lot of cases, it may be adequate to pass in the `ClassLoader` +by using [withClassLoader()]. In a lot of cases, it may be adequate to pass in the `ClassLoader` from a `Class` that is part of the core driver, i.e.: ```java @@ -53,6 +55,19 @@ CqlSession session = CqlSession.builder() .build(); ``` +Alternatively, if you have access to the `BundleContext` (for example, if you are creating the +session in an `Activator` class) you can also obtain the bundle's `ClassLoader` the following way: + +```java +BundleContext bundleContext = ...; +Bundle bundle = bundleContext.getBundle(); +BundleWiring bundleWiring = bundle.adapt(BundleWiring.class); +ClassLoader classLoader = bundleWiring.getClassLoader(); +CqlSession session = CqlSession.builder() + .withClassLoader(classLoader) + .build(); +``` + ## What does the "Error loading libc" DEBUG message mean? The driver is able to perform native system calls through [JNR] in some cases, for example to diff --git a/upgrade_guide/README.md b/upgrade_guide/README.md index 6f1c28d7138..eb178656248 100644 --- a/upgrade_guide/README.md +++ b/upgrade_guide/README.md @@ -63,6 +63,34 @@ changes right away; but you will get deprecation warnings: `PlainTextProgrammaticAuthProvider`. Similarly, if you wrote a custom implementation by subclassing `DsePlainTextAuthProviderBase`, extend `PlainTextAuthProviderBase` instead. +#### Class Loader + +The default class loader used by the driver when instantiating classes by reflection changed. +Unless specified by the user, the driver will now use the same class loader that was used to load +the driver classes themselves, in order to ensure that implemented interfaces and implementing +classes are fully compatible. + +This should ensure a more streamlined experience for OSGi users, who do not need anymore to define +a specific class loader to use. + +However if you are developing a web application and your setup corresponds to the following +scenario, then you will now be required to explicitly define another class loader to use: if in your +application the driver jar is loaded by the web server's system class loader (for example, +because the driver jar was placed in the "/lib" folder of the web server), then the default class +loader will be the server's system class loader. Then if the application tries to load, say, a +custom load balancing policy declared in the web app's "WEB-INF/lib" folder, then the default class +loader will not be able to locate that class. Instead, you must use the web app's class loader, that +you can obtain in most web environments by calling `Thread.getContextClassLoader()`: + + CqlSession.builder() + .addContactEndPoint(...) + .withClassLoader(Thread.currentThread().getContextClassLoader()) + .build(); + +See the javadocs of [SessionBuilder.withClassLoader] for more information. + +[SessionBuilder.withClassLoader]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withClassLoader-java.lang.ClassLoader- + ### 4.1.0 #### Object mapper From 48fb1f6b799c426d8081645b786aa1bfd352d1fd Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 10 Jan 2020 14:49:12 -0300 Subject: [PATCH 248/979] Fix incorrect changelog entry --- changelog/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/changelog/README.md b/changelog/README.md index 70560db2368..de62f4c8000 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -10,7 +10,7 @@ implemented in this single driver. - [bug] JAVA-2598: Do not use context class loader when attempting to load classes - [improvement] JAVA-2582: Don't propagate a future into SchemaQueriesFactory -- [documentation] JAVA-2542: JAVA-2542: Improve the javadocs of methods in CqlSession +- [documentation] JAVA-2542: Improve the javadocs of methods in CqlSession - [documentation] JAVA-2609: Add docs for proxy authentication to unified driver - [improvement] JAVA-2554: Improve efficiency of InsightsClient by improving supportsInsights check - [improvement] JAVA-2601: Inject Google Tag Manager scripts in generated API documentation From d456e2a656b91f5a651d74fdcf322744dfb8ea85 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 10 Jan 2020 14:53:13 -0300 Subject: [PATCH 249/979] JAVA-2556: Make ExecutionInfo compatible with any Request type (follow-up for DSE-specific features) (#35) --- .../api/core/graph/AsyncGraphResultSet.java | 15 +- .../api/core/graph/GraphExecutionInfo.java | 9 +- .../driver/api/core/graph/GraphResultSet.java | 11 +- .../graph/DefaultAsyncGraphResultSet.java | 15 +- .../core/graph/DefaultGraphExecutionInfo.java | 93 --------- .../graph/GraphExecutionInfoConverter.java | 178 ++++++++++++++++++ .../core/graph/GraphRequestHandler.java | 19 +- .../core/graph/SinglePageGraphResultSet.java | 11 +- .../GraphExecutionInfoConverterTest.java | 122 ++++++++++++ 9 files changed, 364 insertions(+), 109 deletions(-) delete mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultGraphExecutionInfo.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphExecutionInfoConverter.java create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphExecutionInfoConverterTest.java diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/graph/AsyncGraphResultSet.java b/core/src/main/java/com/datastax/dse/driver/api/core/graph/AsyncGraphResultSet.java index 06c5301dd98..f1f871c67dd 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/graph/AsyncGraphResultSet.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/graph/AsyncGraphResultSet.java @@ -15,6 +15,8 @@ */ package com.datastax.dse.driver.api.core.graph; +import com.datastax.dse.driver.internal.core.graph.GraphExecutionInfoConverter; +import com.datastax.oss.driver.api.core.cql.ExecutionInfo; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import java.util.Iterator; @@ -35,7 +37,18 @@ public interface AsyncGraphResultSet { /** The execution information for this page of results. */ @NonNull - GraphExecutionInfo getExecutionInfo(); + default ExecutionInfo getRequestExecutionInfo() { + return GraphExecutionInfoConverter.convert(getExecutionInfo()); + } + + /** + * The execution information for this page of results. + * + * @deprecated Use {@link #getRequestExecutionInfo()} instead. + */ + @Deprecated + @NonNull + com.datastax.dse.driver.api.core.graph.GraphExecutionInfo getExecutionInfo(); /** How many rows are left before the current page is exhausted. */ int remaining(); diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphExecutionInfo.java b/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphExecutionInfo.java index 483ed0be782..f15275356f2 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphExecutionInfo.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphExecutionInfo.java @@ -22,7 +22,14 @@ import java.util.List; import java.util.Map; -/** Information about the execution of a graph statement. */ +/** + * Information about the execution of a graph statement. + * + * @deprecated This interface is not used by any driver component anymore; the driver now exposes + * instances of {@link com.datastax.oss.driver.api.core.cql.ExecutionInfo} for all Graph + * queries. + */ +@Deprecated public interface GraphExecutionInfo { /** The statement that was executed. */ diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphResultSet.java b/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphResultSet.java index f237f00ce16..561cd21682b 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphResultSet.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphResultSet.java @@ -15,6 +15,8 @@ */ package com.datastax.dse.driver.api.core.graph; +import com.datastax.dse.driver.internal.core.graph.GraphExecutionInfoConverter; +import com.datastax.oss.driver.api.core.cql.ExecutionInfo; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; @@ -78,5 +80,12 @@ default List all() { * The execution information for the query that have been performed to assemble this result set. */ @NonNull - GraphExecutionInfo getExecutionInfo(); + default ExecutionInfo getRequestExecutionInfo() { + return GraphExecutionInfoConverter.convert(getExecutionInfo()); + } + + /** @deprecated Use {@link #getRequestExecutionInfo()} instead. */ + @Deprecated + @NonNull + com.datastax.dse.driver.api.core.graph.GraphExecutionInfo getExecutionInfo(); } diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultAsyncGraphResultSet.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultAsyncGraphResultSet.java index 63e7571f18c..a4768054357 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultAsyncGraphResultSet.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultAsyncGraphResultSet.java @@ -16,8 +16,8 @@ package com.datastax.dse.driver.internal.core.graph; import com.datastax.dse.driver.api.core.graph.AsyncGraphResultSet; -import com.datastax.dse.driver.api.core.graph.GraphExecutionInfo; import com.datastax.dse.driver.api.core.graph.GraphNode; +import com.datastax.oss.driver.api.core.cql.ExecutionInfo; import com.datastax.oss.driver.internal.core.util.CountingIterator; import edu.umd.cs.findbugs.annotations.NonNull; import java.util.Queue; @@ -27,11 +27,11 @@ @NotThreadSafe // wraps a mutable queue public class DefaultAsyncGraphResultSet implements AsyncGraphResultSet { - private final GraphExecutionInfo executionInfo; + private final ExecutionInfo executionInfo; private final CountingIterator iterator; private final Iterable currentPage; - public DefaultAsyncGraphResultSet(GraphExecutionInfo executionInfo, Queue data) { + public DefaultAsyncGraphResultSet(ExecutionInfo executionInfo, Queue data) { this.executionInfo = executionInfo; this.iterator = new GraphResultIterator(data); this.currentPage = () -> iterator; @@ -39,10 +39,17 @@ public DefaultAsyncGraphResultSet(GraphExecutionInfo executionInfo, Queue statement; - private final Node coordinator; - private final int speculativeExecutionCount; - private final int successfulExecutionIndex; - private final List> errors; - private final List warnings; - private final Map customPayload; - - public DefaultGraphExecutionInfo( - GraphStatement statement, - Node coordinator, - int speculativeExecutionCount, - int successfulExecutionIndex, - List> errors, - Frame frame) { - this.statement = statement; - this.coordinator = coordinator; - this.speculativeExecutionCount = speculativeExecutionCount; - this.successfulExecutionIndex = successfulExecutionIndex; - this.errors = errors; - - // Note: the collections returned by the protocol layer are already unmodifiable - this.warnings = (frame == null) ? Collections.emptyList() : frame.warnings; - this.customPayload = (frame == null) ? Collections.emptyMap() : frame.customPayload; - } - - @Override - public GraphStatement getStatement() { - return statement; - } - - @Override - public Node getCoordinator() { - return coordinator; - } - - @Override - public int getSpeculativeExecutionCount() { - return speculativeExecutionCount; - } - - @Override - public int getSuccessfulExecutionIndex() { - return successfulExecutionIndex; - } - - @Override - public List> getErrors() { - // Assume this method will be called 0 or 1 time, so we create the unmodifiable wrapper on - // demand. - return (errors == null) ? Collections.emptyList() : Collections.unmodifiableList(errors); - } - - @Override - public List getWarnings() { - return warnings; - } - - @Override - public Map getIncomingPayload() { - return customPayload; - } -} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphExecutionInfoConverter.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphExecutionInfoConverter.java new file mode 100644 index 00000000000..344b84ad050 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphExecutionInfoConverter.java @@ -0,0 +1,178 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.dse.driver.internal.core.graph; + +import com.datastax.dse.driver.api.core.graph.GraphStatement; +import com.datastax.oss.driver.api.core.cql.ExecutionInfo; +import com.datastax.oss.driver.api.core.cql.QueryTrace; +import com.datastax.oss.driver.api.core.cql.Statement; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.session.Request; +import com.datastax.oss.driver.internal.core.util.concurrent.CompletableFutures; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.nio.ByteBuffer; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.UUID; +import java.util.concurrent.CompletionStage; + +/** + * Handles conversions from / to GraphExecutionInfo and ExecutionInfo since GraphExecutionInfo has + * been deprecated by JAVA-2556. + */ +public class GraphExecutionInfoConverter { + + /** + * Called exclusively from default methods in API interfaces {@link + * com.datastax.dse.driver.api.core.graph.GraphResultSet} and {@link + * com.datastax.dse.driver.api.core.graph.AsyncGraphResultSet}. Graph result set implementations + * do not use this method but rather the other one below. + */ + @SuppressWarnings("deprecation") + public static ExecutionInfo convert( + com.datastax.dse.driver.api.core.graph.GraphExecutionInfo graphExecutionInfo) { + return new ExecutionInfo() { + + @NonNull + @Override + public Request getRequest() { + return graphExecutionInfo.getStatement(); + } + + @NonNull + @Override + public Statement getStatement() { + throw new ClassCastException("GraphStatement cannot be cast to Statement"); + } + + @Nullable + @Override + public Node getCoordinator() { + return graphExecutionInfo.getCoordinator(); + } + + @Override + public int getSpeculativeExecutionCount() { + return graphExecutionInfo.getSpeculativeExecutionCount(); + } + + @Override + public int getSuccessfulExecutionIndex() { + return graphExecutionInfo.getSuccessfulExecutionIndex(); + } + + @NonNull + @Override + public List> getErrors() { + return graphExecutionInfo.getErrors(); + } + + @Nullable + @Override + public ByteBuffer getPagingState() { + return null; + } + + @NonNull + @Override + public List getWarnings() { + return graphExecutionInfo.getWarnings(); + } + + @NonNull + @Override + public Map getIncomingPayload() { + return graphExecutionInfo.getIncomingPayload(); + } + + @Override + public boolean isSchemaInAgreement() { + return true; + } + + @Nullable + @Override + public UUID getTracingId() { + return null; + } + + @NonNull + @Override + public CompletionStage getQueryTraceAsync() { + return CompletableFutures.failedFuture( + new IllegalStateException("Tracing was disabled for this request")); + } + + @Override + public int getResponseSizeInBytes() { + return -1; + } + + @Override + public int getCompressedResponseSizeInBytes() { + return -1; + } + }; + } + + /** + * Called from graph result set implementations, to convert the original {@link ExecutionInfo} + * produced by request handlers into the (deprecated) type GraphExecutionInfo. + */ + @SuppressWarnings("deprecation") + public static com.datastax.dse.driver.api.core.graph.GraphExecutionInfo convert( + ExecutionInfo executionInfo) { + return new com.datastax.dse.driver.api.core.graph.GraphExecutionInfo() { + + @Override + public GraphStatement getStatement() { + return (GraphStatement) executionInfo.getRequest(); + } + + @Override + public Node getCoordinator() { + return executionInfo.getCoordinator(); + } + + @Override + public int getSpeculativeExecutionCount() { + return executionInfo.getSpeculativeExecutionCount(); + } + + @Override + public int getSuccessfulExecutionIndex() { + return executionInfo.getSuccessfulExecutionIndex(); + } + + @Override + public List> getErrors() { + return executionInfo.getErrors(); + } + + @Override + public List getWarnings() { + return executionInfo.getWarnings(); + } + + @Override + public Map getIncomingPayload() { + return executionInfo.getIncomingPayload(); + } + }; + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java index 23f1b41b869..a73180d8160 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java @@ -19,7 +19,6 @@ import com.datastax.dse.driver.api.core.graph.AsyncGraphResultSet; import com.datastax.dse.driver.api.core.graph.BatchGraphStatement; import com.datastax.dse.driver.api.core.graph.FluentGraphStatement; -import com.datastax.dse.driver.api.core.graph.GraphExecutionInfo; import com.datastax.dse.driver.api.core.graph.GraphNode; import com.datastax.dse.driver.api.core.graph.GraphStatement; import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; @@ -29,6 +28,7 @@ import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.connection.FrameTooLongException; +import com.datastax.oss.driver.api.core.cql.ExecutionInfo; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.metrics.DefaultNodeMetric; import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; @@ -48,6 +48,7 @@ import com.datastax.oss.driver.internal.core.channel.DriverChannel; import com.datastax.oss.driver.internal.core.channel.ResponseCallback; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.cql.DefaultExecutionInfo; import com.datastax.oss.driver.internal.core.metadata.DefaultNode; import com.datastax.oss.driver.internal.core.metrics.NodeMetricUpdater; import com.datastax.oss.driver.internal.core.session.DefaultSession; @@ -114,7 +115,7 @@ public class GraphRequestHandler implements Throttled { /** * How many speculative executions have started (excluding the initial execution), whether they * have completed or not. We track this in order to fill {@link - * GraphExecutionInfo#getSpeculativeExecutionCount()}. + * ExecutionInfo#getSpeculativeExecutionCount()}. */ private final AtomicInteger startedSpeculativeExecutionsCount; @@ -490,7 +491,7 @@ public void onResponse(Frame responseFrame) { private void setFinalResult( Result resultMessage, Frame responseFrame, PerRequestCallback callback) { try { - GraphExecutionInfo executionInfo = buildExecutionInfo(callback, responseFrame); + ExecutionInfo executionInfo = buildExecutionInfo(callback, responseFrame); Queue graphNodes = new ArrayDeque<>(); for (List row : ((Rows) resultMessage).getData()) { @@ -519,15 +520,19 @@ private void setFinalResult( } } - private GraphExecutionInfo buildExecutionInfo( - PerRequestCallback callback, Frame responseFrame) { - return new DefaultGraphExecutionInfo( + private ExecutionInfo buildExecutionInfo(PerRequestCallback callback, Frame responseFrame) { + return new DefaultExecutionInfo( graphStatement, callback.node, startedSpeculativeExecutionsCount.get(), callback.execution, errors, - responseFrame); + null, + responseFrame, + true, + session, + context, + executionProfile); } private void processErrorResponse(Error errorMessage) { diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/SinglePageGraphResultSet.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/SinglePageGraphResultSet.java index 3f0f81a11bd..c945821ff92 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/SinglePageGraphResultSet.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/SinglePageGraphResultSet.java @@ -16,9 +16,9 @@ package com.datastax.dse.driver.internal.core.graph; import com.datastax.dse.driver.api.core.graph.AsyncGraphResultSet; -import com.datastax.dse.driver.api.core.graph.GraphExecutionInfo; import com.datastax.dse.driver.api.core.graph.GraphNode; import com.datastax.dse.driver.api.core.graph.GraphResultSet; +import com.datastax.oss.driver.api.core.cql.ExecutionInfo; import edu.umd.cs.findbugs.annotations.NonNull; import java.util.Iterator; import net.jcip.annotations.NotThreadSafe; @@ -35,7 +35,14 @@ public SinglePageGraphResultSet(AsyncGraphResultSet onlyPage) { @NonNull @Override - public GraphExecutionInfo getExecutionInfo() { + public ExecutionInfo getRequestExecutionInfo() { + return onlyPage.getRequestExecutionInfo(); + } + + @NonNull + @Override + @Deprecated + public com.datastax.dse.driver.api.core.graph.GraphExecutionInfo getExecutionInfo() { return onlyPage.getExecutionInfo(); } diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphExecutionInfoConverterTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphExecutionInfoConverterTest.java new file mode 100644 index 00000000000..467972523e5 --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphExecutionInfoConverterTest.java @@ -0,0 +1,122 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.dse.driver.internal.core.graph; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import com.datastax.dse.driver.api.core.graph.GraphStatement; +import com.datastax.oss.driver.api.core.cql.ExecutionInfo; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.servererrors.ServerError; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import com.datastax.oss.protocol.internal.util.Bytes; +import java.nio.ByteBuffer; +import java.util.AbstractMap.SimpleEntry; +import java.util.Collections; +import java.util.List; +import java.util.Map.Entry; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnitRunner; + +@RunWith(MockitoJUnitRunner.Strict.class) +@SuppressWarnings("deprecation") +public class GraphExecutionInfoConverterTest { + + @Mock GraphStatement request; + @Mock Node node; + + private List> errors; + private List warnings; + private ImmutableMap payload; + + @Before + public void setUp() { + errors = + Collections.singletonList( + new SimpleEntry<>(node, new ServerError(node, "this is a server error"))); + warnings = Collections.singletonList("this is a warning"); + payload = ImmutableMap.of("key", Bytes.fromHexString("0xcafebabe")); + } + + @Test + public void should_convert_to_graph_execution_info() { + + // given + ExecutionInfo executionInfo = mock(ExecutionInfo.class); + when(executionInfo.getRequest()).thenReturn(request); + when(executionInfo.getCoordinator()).thenReturn(node); + when(executionInfo.getSpeculativeExecutionCount()).thenReturn(42); + when(executionInfo.getSuccessfulExecutionIndex()).thenReturn(10); + when(executionInfo.getErrors()).thenReturn(errors); + when(executionInfo.getWarnings()).thenReturn(warnings); + when(executionInfo.getIncomingPayload()).thenReturn(payload); + + // when + com.datastax.dse.driver.api.core.graph.GraphExecutionInfo graphExecutionInfo = + GraphExecutionInfoConverter.convert(executionInfo); + + // then + assertThat(graphExecutionInfo.getStatement()).isSameAs(request); + assertThat(graphExecutionInfo.getCoordinator()).isSameAs(node); + assertThat(graphExecutionInfo.getSpeculativeExecutionCount()).isEqualTo(42); + assertThat(graphExecutionInfo.getSuccessfulExecutionIndex()).isEqualTo(10); + assertThat(graphExecutionInfo.getErrors()).isEqualTo(errors); + assertThat(graphExecutionInfo.getWarnings()).isEqualTo(warnings); + assertThat(graphExecutionInfo.getIncomingPayload()).isEqualTo(payload); + } + + @Test + public void should_convert_from_graph_execution_info() { + + // given + com.datastax.dse.driver.api.core.graph.GraphExecutionInfo graphExecutionInfo = + mock(com.datastax.dse.driver.api.core.graph.GraphExecutionInfo.class); + when(graphExecutionInfo.getStatement()).thenAnswer(args -> request); + when(graphExecutionInfo.getCoordinator()).thenReturn(node); + when(graphExecutionInfo.getSpeculativeExecutionCount()).thenReturn(42); + when(graphExecutionInfo.getSuccessfulExecutionIndex()).thenReturn(10); + when(graphExecutionInfo.getErrors()).thenReturn(errors); + when(graphExecutionInfo.getWarnings()).thenReturn(warnings); + when(graphExecutionInfo.getIncomingPayload()).thenReturn(payload); + + // when + ExecutionInfo executionInfo = GraphExecutionInfoConverter.convert(graphExecutionInfo); + + // then + assertThat(executionInfo.getRequest()).isSameAs(request); + assertThatThrownBy(executionInfo::getStatement).isInstanceOf(ClassCastException.class); + assertThat(executionInfo.getCoordinator()).isSameAs(node); + assertThat(executionInfo.getSpeculativeExecutionCount()).isEqualTo(42); + assertThat(executionInfo.getSuccessfulExecutionIndex()).isEqualTo(10); + assertThat(executionInfo.getErrors()).isEqualTo(errors); + assertThat(executionInfo.getWarnings()).isEqualTo(warnings); + assertThat(executionInfo.getIncomingPayload()).isEqualTo(payload); + assertThat(executionInfo.getPagingState()).isNull(); + assertThat(executionInfo.isSchemaInAgreement()).isTrue(); + assertThat(executionInfo.getQueryTraceAsync()).isCompletedExceptionally(); + assertThatThrownBy(executionInfo::getQueryTrace) + .isInstanceOf(IllegalStateException.class) + .hasMessage("Tracing was disabled for this request"); + assertThat(executionInfo.getResponseSizeInBytes()).isEqualTo(-1L); + assertThat(executionInfo.getCompressedResponseSizeInBytes()).isEqualTo(-1L); + } +} From 09c1f6aef85f6b83ea03dbe3537684c8d38f183c Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 10 Jan 2020 14:55:39 -0300 Subject: [PATCH 250/979] JAVA-2529: Standardize optional/excludable dependency checks (#24) --- changelog/README.md | 1 + .../core/context/DefaultDriverContext.java | 49 +++++------- .../oss/driver/internal/core/os/Native.java | 12 +-- .../internal/core/protocol/Lz4Compressor.java | 11 +-- .../core/protocol/SnappyCompressor.java | 10 +-- .../internal/core/util/DependencyCheck.java | 74 +++++++++++++++++++ .../context/MockedDriverContextFactory.java | 1 - .../oss/driver/osgi/OsgiVanillaIT.java | 2 +- .../mapper/processor/DecoratedMessager.java | 8 +- .../processor/dao/DaoMethodGenerator.java | 8 +- 10 files changed, 116 insertions(+), 60 deletions(-) create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/util/DependencyCheck.java diff --git a/changelog/README.md b/changelog/README.md index de62f4c8000..a04360b0bd2 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -8,6 +8,7 @@ This version brings in all functionality that was formerly only in the DataStax such as the built-in support for reactive programming. Going forward, all new features will be implemented in this single driver. +- [improvement] JAVA-2529: Standardize optional/excludable dependency checks - [bug] JAVA-2598: Do not use context class loader when attempting to load classes - [improvement] JAVA-2582: Don't propagate a future into SchemaQueriesFactory - [documentation] JAVA-2542: Improve the javadocs of methods in CqlSession diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java index aa057fd49d4..994763980d7 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java @@ -91,7 +91,7 @@ import com.datastax.oss.driver.internal.core.tracker.NoopRequestTracker; import com.datastax.oss.driver.internal.core.tracker.RequestLogFormatter; import com.datastax.oss.driver.internal.core.type.codec.registry.DefaultCodecRegistry; -import com.datastax.oss.driver.internal.core.util.Loggers; +import com.datastax.oss.driver.internal.core.util.DependencyCheck; import com.datastax.oss.driver.internal.core.util.Reflection; import com.datastax.oss.driver.internal.core.util.concurrent.CycleDetector; import com.datastax.oss.driver.internal.core.util.concurrent.LazyReference; @@ -524,51 +524,44 @@ protected RequestProcessorRegistry buildRequestProcessorRegistry() { processors.add(continuousCqlRequestSyncProcessor); // graph requests (sync and async) - try { - Class.forName("org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal"); + if (DependencyCheck.TINKERPOP.isPresent()) { GraphRequestAsyncProcessor graphRequestAsyncProcessor = new GraphRequestAsyncProcessor(); GraphRequestSyncProcessor graphRequestSyncProcessor = new GraphRequestSyncProcessor(graphRequestAsyncProcessor); processors.add(graphRequestAsyncProcessor); processors.add(graphRequestSyncProcessor); - } catch (ClassNotFoundException | LinkageError error) { - Loggers.warnWithException( - LOG, - "Could not register Graph extensions; Tinkerpop API might be missing from classpath", - error); + } else { + LOG.info( + "Could not register Graph extensions; " + + "this is normal if Tinkerpop was explicitly excluded from classpath"); } // reactive requests (regular and continuous) - try { - Class.forName("org.reactivestreams.Publisher"); + if (DependencyCheck.REACTIVE_STREAMS.isPresent()) { CqlRequestReactiveProcessor cqlRequestReactiveProcessor = new CqlRequestReactiveProcessor(cqlRequestAsyncProcessor); ContinuousCqlRequestReactiveProcessor continuousCqlRequestReactiveProcessor = new ContinuousCqlRequestReactiveProcessor(continuousCqlRequestAsyncProcessor); processors.add(cqlRequestReactiveProcessor); processors.add(continuousCqlRequestReactiveProcessor); - } catch (ClassNotFoundException | LinkageError error) { - Loggers.warnWithException( - LOG, + } else { + LOG.info( "Could not register Reactive extensions; " - + "Reactive Streams API might be missing from classpath", - error); + + "this is normal if Reactive Streams was explicitly excluded from classpath"); } - return new RequestProcessorRegistry(logPrefix, processors.toArray(new RequestProcessor[0])); } protected CodecRegistry buildCodecRegistry(String logPrefix, List> codecs) { MutableCodecRegistry registry = new DefaultCodecRegistry(logPrefix); registry.register(codecs); - registry.register(DseTypeCodecs.DATE_RANGE); - try { - Class.forName("com.esri.core.geometry.ogc.OGCGeometry"); + if (DependencyCheck.ESRI.isPresent()) { registry.register(DseTypeCodecs.LINE_STRING, DseTypeCodecs.POINT, DseTypeCodecs.POLYGON); - } catch (ClassNotFoundException | LinkageError error) { - Loggers.warnWithException( - LOG, "Could not register Geo codecs; ESRI API might be missing from classpath", error); + } else { + LOG.info( + "Could not register Geo codecs; " + + "this is normal if ESRI was explicitly excluded from classpath"); } return registry; } @@ -685,17 +678,13 @@ protected Optional buildAuthProvider(AuthProvider authProviderFrom } protected List buildLifecycleListeners() { - try { - Class.forName("com.fasterxml.jackson.core.JsonParser"); - Class.forName("com.fasterxml.jackson.databind.ObjectMapper"); + if (DependencyCheck.JACKSON.isPresent()) { return Collections.singletonList(new InsightsClientLifecycleListener(this, initStackTrace)); - } catch (ClassNotFoundException | LinkageError error) { + } else { if (config.getDefaultProfile().getBoolean(DseDriverOption.MONITOR_REPORTING_ENABLED)) { - Loggers.warnWithException( - LOG, + LOG.info( "Could not initialize Insights monitoring; " - + "Jackson libraries might be missing from classpath", - error); + + "this is normal if Jackson was explicitly excluded from classpath"); } return Collections.emptyList(); } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/os/Native.java b/core/src/main/java/com/datastax/oss/driver/internal/core/os/Native.java index 02de90d3cf4..7b85a5d9434 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/os/Native.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/os/Native.java @@ -15,6 +15,7 @@ */ package com.datastax.oss.driver.internal.core.os; +import com.datastax.oss.driver.internal.core.util.Reflection; import java.lang.reflect.Method; import jnr.ffi.LibraryLoader; import jnr.ffi.Platform; @@ -188,13 +189,14 @@ private static class PlatformLoader { private static final Platform PLATFORM; static { - Platform platform; + Platform platform = null; try { - Class platformClass = Class.forName("jnr.ffi.Platform"); - Method getNativePlatform = platformClass.getMethod("getNativePlatform"); - platform = (Platform) getNativePlatform.invoke(null); + Class platformClass = Reflection.loadClass(null, "jnr.ffi.Platform"); + if (platformClass != null) { + Method getNativePlatform = platformClass.getMethod("getNativePlatform"); + platform = (Platform) getNativePlatform.invoke(null); + } } catch (Throwable t) { - platform = null; LOG.debug("Error loading jnr.ffi.Platform class, this class will not be available.", t); } PLATFORM = platform; diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/Lz4Compressor.java b/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/Lz4Compressor.java index e1bce12fc11..84de9f36aae 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/Lz4Compressor.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/Lz4Compressor.java @@ -16,6 +16,8 @@ package com.datastax.oss.driver.internal.core.protocol; import com.datastax.oss.driver.api.core.context.DriverContext; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.util.DependencyCheck; import io.netty.buffer.ByteBuf; import java.nio.ByteBuffer; import net.jcip.annotations.ThreadSafe; @@ -34,17 +36,16 @@ public class Lz4Compressor extends ByteBufCompressor { private final LZ4FastDecompressor decompressor; public Lz4Compressor(DriverContext context) { - try { + if (DependencyCheck.LZ4.isPresent()) { LZ4Factory lz4Factory = LZ4Factory.fastestInstance(); LOG.info("[{}] Using {}", context.getSessionName(), lz4Factory.toString()); this.compressor = lz4Factory.fastCompressor(); this.decompressor = lz4Factory.fastDecompressor(); - } catch (NoClassDefFoundError e) { + } else { throw new IllegalStateException( - "Error initializing compressor, make sure that the LZ4 library is in the classpath " + "Could not find the LZ4 library on the classpath " + "(the driver declares it as an optional dependency, " - + "so you need to declare it explicitly)", - e); + + "so you need to declare it explicitly)"); } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/SnappyCompressor.java b/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/SnappyCompressor.java index 3b50220ef95..9461a1a0a41 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/SnappyCompressor.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/SnappyCompressor.java @@ -16,6 +16,7 @@ package com.datastax.oss.driver.internal.core.protocol; import com.datastax.oss.driver.api.core.context.DriverContext; +import com.datastax.oss.driver.internal.core.util.DependencyCheck; import io.netty.buffer.ByteBuf; import java.io.IOException; import java.nio.ByteBuffer; @@ -26,14 +27,11 @@ public class SnappyCompressor extends ByteBufCompressor { public SnappyCompressor(@SuppressWarnings("unused") DriverContext context) { - try { - Snappy.getNativeLibraryVersion(); - } catch (NoClassDefFoundError e) { + if (!DependencyCheck.SNAPPY.isPresent()) { throw new IllegalStateException( - "Error initializing compressor, make sure that the Snappy library is in the classpath " + "Could not find the Snappy library on the classpath " + "(the driver declares it as an optional dependency, " - + "so you need to declare it explicitly)", - e); + + "so you need to declare it explicitly)"); } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/util/DependencyCheck.java b/core/src/main/java/com/datastax/oss/driver/internal/core/util/DependencyCheck.java new file mode 100644 index 00000000000..1f3b6ae4480 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/util/DependencyCheck.java @@ -0,0 +1,74 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.util; + +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; + +/** + * A set of driver optional dependencies and a common mechanism to test the presence of such + * dependencies on the application's classpath. + */ +public enum DependencyCheck { + SNAPPY("org.xerial.snappy.Snappy"), + LZ4("net.jpountz.lz4.LZ4Compressor"), + ESRI("com.esri.core.geometry.ogc.OGCGeometry"), + TINKERPOP( + // gremlin-core + "org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal", + // tinkergraph-gremlin + "org.apache.tinkerpop.gremlin.tinkergraph.structure.TinkerIoRegistryV3d0"), + REACTIVE_STREAMS("org.reactivestreams.Publisher"), + JACKSON( + // jackson-core + "com.fasterxml.jackson.core.JsonParser", + // jackson-databind + "com.fasterxml.jackson.databind.ObjectMapper"), + ; + + /** + * The fully-qualified name of classes that must exist for the dependency to work properly; we use + * them to test the presence of the whole dependency on the classpath, including its transitive + * dependencies if applicable. This assumes that if these classes are present, then the entire + * library is present and functional, and vice versa. + * + *

      Note: some of the libraries declared here may be shaded; in these cases the shade plugin + * will replace the package names listed above with names starting with {@code + * com.datastax.oss.driver.shaded.*}, but the presence check would still work as expected. + */ + @SuppressWarnings("ImmutableEnumChecker") + private final ImmutableSet fqcns; + + DependencyCheck(String... fqcns) { + this.fqcns = ImmutableSet.copyOf(fqcns); + } + + /** + * Checks if the dependency is present on the application's classpath and is loadable. + * + * @return true if the dependency is present and loadable, false otherwise. + */ + public boolean isPresent() { + for (String fqcn : fqcns) { + // Always use the driver class loader, assuming that the driver classes and + // the dependency classes are either being loaded by the same class loader, + // or – as in OSGi deployments – by two distinct, but compatible class loaders. + if (Reflection.loadClass(null, fqcn) == null) { + return false; + } + } + return true; + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/context/MockedDriverContextFactory.java b/core/src/test/java/com/datastax/oss/driver/internal/core/context/MockedDriverContextFactory.java index 776b227fe8c..91f315461b9 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/context/MockedDriverContextFactory.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/context/MockedDriverContextFactory.java @@ -61,7 +61,6 @@ public static DefaultDriverContext defaultDriverContext( .withRequestTracker(mock(RequestTracker.class)) .withLocalDatacenters(Maps.newHashMap()) .withNodeFilters(Maps.newHashMap()) - .withClassLoader(mock(ClassLoader.class)) .build(); return new DefaultDriverContext(configLoader, args); } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/osgi/OsgiVanillaIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/osgi/OsgiVanillaIT.java index 39a2fcb3935..a9f445fb968 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/osgi/OsgiVanillaIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/osgi/OsgiVanillaIT.java @@ -94,7 +94,7 @@ private void assertLogMessagesPresent() { TestAppender appender = (TestAppender) logger.getAppender("test"); List warnLogs = appender.events.stream() - .filter(event -> event.getLevel().toInt() >= Level.WARN.toInt()) + .filter(event -> event.getLevel().toInt() == Level.INFO.toInt()) .map(ILoggingEvent::getFormattedMessage) .collect(Collectors.toList()); assertThat(warnLogs).hasSize(3); diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/DecoratedMessager.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/DecoratedMessager.java index 36c49e681c1..fb3c6d7ddb7 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/DecoratedMessager.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/DecoratedMessager.java @@ -15,6 +15,7 @@ */ package com.datastax.oss.driver.internal.mapper.processor; +import com.datastax.oss.driver.internal.core.util.Reflection; import edu.umd.cs.findbugs.annotations.NonNull; import javax.annotation.processing.Messager; import javax.lang.model.element.Element; @@ -164,12 +165,7 @@ void print(Diagnostic.Kind level, String template, Object... arguments) { } private boolean isSourceFile(TypeElement element) { - try { - Class.forName(element.getQualifiedName().toString()); - return false; - } catch (ClassNotFoundException e) { - return true; - } + return Reflection.loadClass(null, element.getQualifiedName().toString()) == null; } } } diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoMethodGenerator.java index 9e43425742d..6608c4702b4 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoMethodGenerator.java @@ -20,6 +20,7 @@ import com.datastax.oss.driver.api.mapper.annotations.CqlName; import com.datastax.oss.driver.api.mapper.annotations.StatementAttributes; import com.datastax.oss.driver.api.querybuilder.QueryBuilder; +import com.datastax.oss.driver.internal.core.util.Reflection; import com.datastax.oss.driver.internal.mapper.processor.MethodGenerator; import com.datastax.oss.driver.internal.mapper.processor.ProcessorContext; import com.squareup.javapoet.CodeBlock; @@ -231,11 +232,6 @@ protected void warnIfCqlNamePresent(List parameters) protected boolean isFromClassFile() { TypeElement enclosingElement = (TypeElement) methodElement.getEnclosingElement(); - try { - Class.forName(enclosingElement.getQualifiedName().toString()); - return true; - } catch (ClassNotFoundException e) { - return false; - } + return Reflection.loadClass(null, enclosingElement.getQualifiedName().toString()) != null; } } From 1e818d866a68d66ff1d498fd5a5cba60f59d7b09 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 10 Jan 2020 15:15:58 -0300 Subject: [PATCH 251/979] Remove unused import --- .../oss/driver/internal/core/protocol/Lz4Compressor.java | 1 - 1 file changed, 1 deletion(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/Lz4Compressor.java b/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/Lz4Compressor.java index 84de9f36aae..3dd47d17320 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/Lz4Compressor.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/Lz4Compressor.java @@ -16,7 +16,6 @@ package com.datastax.oss.driver.internal.core.protocol; import com.datastax.oss.driver.api.core.context.DriverContext; -import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.internal.core.util.DependencyCheck; import io.netty.buffer.ByteBuf; import java.nio.ByteBuffer; From a695330f2435648ae4c08aa361e2577bb9540d5c Mon Sep 17 00:00:00 2001 From: olim7t Date: Fri, 10 Jan 2020 12:13:47 -0800 Subject: [PATCH 252/979] Fix formatting issue --- .../oss/driver/core/ProtocolVersionMixedClusterIT.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/ProtocolVersionMixedClusterIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/ProtocolVersionMixedClusterIT.java index 3f548a52b57..6acdeb4b042 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/ProtocolVersionMixedClusterIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/ProtocolVersionMixedClusterIT.java @@ -68,7 +68,8 @@ public void should_downgrade_if_peer_does_not_support_negotiated_version() { // General version should have been downgraded to V3 assertThat(context.getProtocolVersion()).isEqualTo(DefaultProtocolVersion.V3); // But control connection should still be using protocol V4 since node0 supports V4 - assertThat(context.getControlConnection().channel().protocolVersion()).isEqualTo(DefaultProtocolVersion.V4); + assertThat(context.getControlConnection().channel().protocolVersion()) + .isEqualTo(DefaultProtocolVersion.V4); assertThat(queries(simulacron)).hasSize(4); From f6c270ebc1b7c60e6c4b15cd089668f76cdd4b87 Mon Sep 17 00:00:00 2001 From: olim7t Date: Fri, 10 Jan 2020 12:13:02 -0800 Subject: [PATCH 253/979] Introduce an option to disable slow replica avoidance on the default LBP --- .../driver/api/core/config/DefaultDriverOption.java | 7 +++++++ .../loadbalancing/DefaultLoadBalancingPolicy.java | 11 ++++++++++- core/src/main/resources/reference.conf | 11 +++++++++++ 3 files changed, 28 insertions(+), 1 deletion(-) diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java index 18d708fce55..3de2da35bce 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java @@ -784,6 +784,13 @@ public enum DefaultDriverOption implements DriverOption { NETTY_DAEMON("advanced.netty.daemon"), CLOUD_SECURE_CONNECT_BUNDLE("basic.cloud.secure-connect-bundle"), + + /** + * Whether the slow replica avoidance should be enabled in the default LBP. + * + *

      Value-type: boolean + */ + LOAD_BALANCING_POLICY_SLOW_AVOIDANCE("basic.load-balancing-policy.slow-replica-avoidance"), ; private final String path; diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicy.java b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicy.java index 97c38c2b13a..bc609c2ece3 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicy.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicy.java @@ -96,15 +96,20 @@ public class DefaultLoadBalancingPolicy extends BasicLoadBalancingPolicy impleme protected final Map responseTimes = new ConcurrentHashMap<>(); protected final Map upTimes = new ConcurrentHashMap<>(); + private final boolean avoidSlowReplicas; public DefaultLoadBalancingPolicy(@NonNull DriverContext context, @NonNull String profileName) { super(context, profileName); + this.avoidSlowReplicas = + profile.getBoolean(DefaultDriverOption.LOAD_BALANCING_POLICY_SLOW_AVOIDANCE, true); } @Override public void init(@NonNull Map nodes, @NonNull DistanceReporter distanceReporter) { super.init(nodes, distanceReporter); - ((MultiplexingRequestTracker) context.getRequestTracker()).register(this); + if (avoidSlowReplicas) { + ((MultiplexingRequestTracker) context.getRequestTracker()).register(this); + } } @NonNull @@ -116,6 +121,10 @@ protected Optional discoverLocalDc(@NonNull Map nodes) { @NonNull @Override public Queue newQueryPlan(@Nullable Request request, @Nullable Session session) { + if (!avoidSlowReplicas) { + return super.newQueryPlan(request, session); + } + // Take a snapshot since the set is concurrent: Object[] currentNodes = liveNodes.toArray(); diff --git a/core/src/main/resources/reference.conf b/core/src/main/resources/reference.conf index 4784ede5c5b..9056bda0e2e 100644 --- a/core/src/main/resources/reference.conf +++ b/core/src/main/resources/reference.conf @@ -189,6 +189,17 @@ datastax-java-driver { # topology or state change: if it returns false, the node will be set at distance IGNORED # (meaning the driver won't ever connect to it), and never included in any query plan. // filter.class= + + # Whether to enable the slow replica avoidance mechanism in DefaultLoadBalancingPolicy. + # + # The default policy always moves replicas first in the query plan (if routing information can + # be determined for the current request). However: + # - if this option is true, it also applies a custom algorithm that takes the responsiveness and + # uptime of each replica into account to order them among each other; + # - if this option is false, replicas are simply shuffled. + # + # If this option is not defined, the driver defaults to true. + slow-replica-avoidance = true } basic.cloud { # The location of the cloud secure bundle used to connect to Datastax Apache Cassandra as a From d4f45752f03c782bd209410b485dd36b0ae56888 Mon Sep 17 00:00:00 2001 From: olim7t Date: Fri, 10 Jan 2020 15:36:00 -0800 Subject: [PATCH 254/979] Add missing documentation for cloud option --- .../oss/driver/api/core/config/DefaultDriverOption.java | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java index 3de2da35bce..913983e2962 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java @@ -783,6 +783,12 @@ public enum DefaultDriverOption implements DriverOption { */ NETTY_DAEMON("advanced.netty.daemon"), + /** + * The location of the cloud secure bundle used to connect to Datastax Apache Cassandra as a + * service. + * + *

      Value-type: {@link String} + */ CLOUD_SECURE_CONNECT_BUNDLE("basic.cloud.secure-connect-bundle"), /** From 00109b63d4b8a48f8073b5f7b1a4eea4edf68f76 Mon Sep 17 00:00:00 2001 From: olim7t Date: Fri, 10 Jan 2020 16:11:15 -0800 Subject: [PATCH 255/979] Apply changes from riptano/java-dse-driver 33f88f9..014cf1473 --- .../core/auth/DseGssApiAuthProviderIT.java | 21 ++++++++++------- .../core/auth/DsePlainTextAuthProviderIT.java | 21 ++++++++++------- .../core/auth/DseProxyAuthenticationIT.java | 15 ++++++++---- .../metadata/schema/AbstractMetadataIT.java | 23 ++++++------------- 4 files changed, 43 insertions(+), 37 deletions(-) diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderIT.java index 725be0e3fa9..1778161e175 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderIT.java @@ -25,6 +25,7 @@ import com.datastax.oss.driver.api.core.auth.AuthenticationException; import com.datastax.oss.driver.api.core.cql.ResultSet; import com.datastax.oss.driver.api.testinfra.DseRequirement; +import java.util.List; import org.junit.Assume; import org.junit.ClassRule; import org.junit.Test; @@ -78,10 +79,7 @@ public void should_not_authenticate_if_no_ticket_in_cache() { try (CqlSession session = ads.newTicketSession()) { fail("Expected an AllNodesFailedException"); } catch (AllNodesFailedException e) { - assertThat(e.getErrors().size()).isEqualTo(1); - for (Throwable t : e.getErrors().values()) { - assertThat(t).isInstanceOf(AuthenticationException.class); - } + verifyException(e); } } @@ -99,10 +97,17 @@ public void should_not_authenticate_if_keytab_does_not_map_to_valid_principal() ads.newKeyTabSession(ads.getUnknownPrincipal(), ads.getUnknownKeytab().getAbsolutePath())) { fail("Expected an AllNodesFailedException"); } catch (AllNodesFailedException e) { - assertThat(e.getErrors().size()).isEqualTo(1); - for (Throwable t : e.getErrors().values()) { - assertThat(t).isInstanceOf(AuthenticationException.class); - } + verifyException(e); } } + + private void verifyException(AllNodesFailedException anfe) { + assertThat(anfe.getAllErrors()).hasSize(1); + List errors = anfe.getAllErrors().values().iterator().next(); + assertThat(errors).hasSize(1); + Throwable firstError = errors.get(0); + assertThat(firstError) + .isInstanceOf(AuthenticationException.class) + .hasMessageContaining("Authentication error on node /127.0.0.1:9042"); + } } diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DsePlainTextAuthProviderIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DsePlainTextAuthProviderIT.java index 670566600e4..08629a1f17e 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DsePlainTextAuthProviderIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DsePlainTextAuthProviderIT.java @@ -29,6 +29,7 @@ import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.internal.core.auth.PlainTextAuthProvider; import com.datastax.oss.driver.shaded.guava.common.util.concurrent.Uninterruptibles; +import java.util.List; import java.util.concurrent.TimeUnit; import org.junit.BeforeClass; import org.junit.ClassRule; @@ -95,10 +96,7 @@ public void should_not_connect_with_invalid_credentials() { .build())) { fail("Expected an AllNodesFailedException"); } catch (AllNodesFailedException e) { - assertThat(e.getErrors().size()).isEqualTo(1); - for (Throwable t : e.getErrors().values()) { - assertThat(t).isInstanceOf(AuthenticationException.class); - } + verifyException(e); } } @@ -114,10 +112,17 @@ public void should_not_connect_without_credentials() { .build())) { fail("Expected AllNodesFailedException"); } catch (AllNodesFailedException e) { - assertThat(e.getErrors().size()).isEqualTo(1); - for (Throwable t : e.getErrors().values()) { - assertThat(t).isInstanceOf(AuthenticationException.class); - } + verifyException(e); } } + + private void verifyException(AllNodesFailedException anfe) { + assertThat(anfe.getAllErrors()).hasSize(1); + List errors = anfe.getAllErrors().values().iterator().next(); + assertThat(errors).hasSize(1); + Throwable firstError = errors.get(0); + assertThat(firstError) + .isInstanceOf(AuthenticationException.class) + .hasMessageContaining("Authentication error on node /127.0.0.1:9042"); + } } diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseProxyAuthenticationIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseProxyAuthenticationIT.java index c0bf5b7d36a..726be74a691 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseProxyAuthenticationIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseProxyAuthenticationIT.java @@ -30,6 +30,7 @@ import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.internal.core.auth.PlainTextAuthProvider; +import java.util.List; import org.junit.Before; import org.junit.BeforeClass; import org.junit.ClassRule; @@ -244,11 +245,15 @@ public void should_not_allow_kerberos_unauthorized_user_to_execute_as() { } private void verifyException(AllNodesFailedException anfe) { - Throwable firstError = anfe.getErrors().values().iterator().next(); - assertThat(firstError).isInstanceOf(AuthenticationException.class); - assertThat(firstError.getMessage()) - .contains( - "Authentication error on node /127.0.0.1:9042: server replied 'Failed to login. Please re-try.'"); + assertThat(anfe.getAllErrors()).hasSize(1); + List errors = anfe.getAllErrors().values().iterator().next(); + assertThat(errors).hasSize(1); + Throwable firstError = errors.get(0); + assertThat(firstError) + .isInstanceOf(AuthenticationException.class) + .hasMessageContaining( + "Authentication error on node /127.0.0.1:9042: " + + "server replied with 'Failed to login. Please re-try.' to AuthResponse request"); } private void verifyException(UnauthorizedException ue, String user) { diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/AbstractMetadataIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/AbstractMetadataIT.java index 21830d76d3b..e77304412b8 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/AbstractMetadataIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/AbstractMetadataIT.java @@ -29,21 +29,6 @@ @Category(ParallelizableTests.class) public abstract class AbstractMetadataIT { - /** - * Asserts the presence of a Keyspace and that it's name matches the keyspace associated with the - * Session Rule. - */ - protected void assertKeyspace(Optional keyspaceOpt) { - // assert the keyspace - assertThat(keyspaceOpt) - .hasValueSatisfying( - keyspace -> { - assertThat(keyspace).isInstanceOf(DseKeyspaceMetadata.class); - assertThat(keyspace.getName().asInternal()) - .isEqualTo(getSessionRule().keyspace().asInternal()); - }); - } - /* Convenience method for executing a CQL statement using the test's Session Rule. */ public void execute(String cql) { getSessionRule() @@ -61,7 +46,13 @@ public void execute(String cql) { public DseKeyspaceMetadata getKeyspace() { Optional keyspace = getSessionRule().session().getMetadata().getKeyspace(getSessionRule().keyspace()); - assertKeyspace(keyspace); + assertThat(keyspace) + .isPresent() + .hasValueSatisfying( + ks -> { + assertThat(ks).isInstanceOf(DseKeyspaceMetadata.class); + assertThat(ks.getName()).isEqualTo(getSessionRule().keyspace()); + }); return ((DseKeyspaceMetadata) keyspace.get()); } From d3631922b0625da855f363334180613ccb536bf8 Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 13 Jan 2020 16:03:44 -0800 Subject: [PATCH 256/979] JAVA-1975: Document the importance of using specific TinkerPop version --- changelog/README.md | 1 + manual/core/dse/graph/README.md | 5 +++++ manual/core/integration/README.md | 5 +++++ 3 files changed, 11 insertions(+) diff --git a/changelog/README.md b/changelog/README.md index a04360b0bd2..6a038bb2548 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -8,6 +8,7 @@ This version brings in all functionality that was formerly only in the DataStax such as the built-in support for reactive programming. Going forward, all new features will be implemented in this single driver. +- [documentation] JAVA-1975: Document the importance of using specific TinkerPop version - [improvement] JAVA-2529: Standardize optional/excludable dependency checks - [bug] JAVA-2598: Do not use context class loader when attempting to load classes - [improvement] JAVA-2582: Don't propagate a future into SchemaQueriesFactory diff --git a/manual/core/dse/graph/README.md b/manual/core/dse/graph/README.md index c78f60f6a67..3d8191ffc1e 100644 --- a/manual/core/dse/graph/README.md +++ b/manual/core/dse/graph/README.md @@ -13,6 +13,11 @@ all, it is possible to exclude it to minimize the number of runtime dependencies cannot be found at runtime, graph queries won't be available and a warning will be logged, but the driver will otherwise operate normally (this is also valid for OSGi deployments). +If you do use graph, it is important to keep the precise TinkerPop version that the driver depends +on: unlike the driver, TinkerPop does not follow semantic versioning, so even a patch version change +(e.g. 3.3.0 vs 3.3.3) could introduce incompatibilities. So do not declare an explicit dependency in +your application, let the driver pull it transitively. + ### Overview There are 3 ways to execute graph requests: diff --git a/manual/core/integration/README.md b/manual/core/integration/README.md index fa94ff80899..aee714ffaf3 100644 --- a/manual/core/integration/README.md +++ b/manual/core/integration/README.md @@ -430,6 +430,11 @@ If you don't use DSE graph at all, you can exclude the dependencies: ``` +If you do use graph, it is important to keep the precise TinkerPop version that the driver depends +on: unlike the driver, TinkerPop does not follow semantic versioning, so even a patch version change +(e.g. 3.3.0 vs 3.3.3) could introduce incompatibilities. So do not declare an explicit dependency in +your application, let the driver pull it transitively. + #### Documenting annotations The driver team uses annotations to document certain aspects of the code: From fac21320bd0f3eba179eab7aeb03f63c03c1c9fb Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 13 Jan 2020 15:48:13 -0800 Subject: [PATCH 257/979] JAVA-2607: Improve visibility of driver dependencies section --- changelog/README.md | 1 + manual/core/compression/README.md | 3 ++- manual/core/dse/geotypes/README.md | 10 ++++++---- manual/core/dse/graph/README.md | 8 +++++--- manual/core/integration/README.md | 3 ++- manual/osgi/README.md | 10 ++++++---- upgrade_guide/README.md | 6 +++--- 7 files changed, 25 insertions(+), 16 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 6a038bb2548..045963379c0 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -8,6 +8,7 @@ This version brings in all functionality that was formerly only in the DataStax such as the built-in support for reactive programming. Going forward, all new features will be implemented in this single driver. +- [documentation] JAVA-2607: Improve visibility of driver dependencies section - [documentation] JAVA-1975: Document the importance of using specific TinkerPop version - [improvement] JAVA-2529: Standardize optional/excludable dependency checks - [bug] JAVA-2598: Do not use context class loader when attempting to load classes diff --git a/manual/core/compression/README.md b/manual/core/compression/README.md index d71cb130fa7..32d18a8ac2f 100644 --- a/manual/core/compression/README.md +++ b/manual/core/compression/README.md @@ -36,7 +36,8 @@ better performance and compression ratios over Snappy. Both implementations rely on third-party libraries, declared by the driver as *optional* dependencies; if you enable compression, you need to explicitly depend on the corresponding library -to pull it into your project. +to pull it into your project (see the [Integration>Driver +dependencies](../integration/#driver-dependencies) section for more details). ### LZ4 diff --git a/manual/core/dse/geotypes/README.md b/manual/core/dse/geotypes/README.md index 8153de337a2..353497a8824 100644 --- a/manual/core/dse/geotypes/README.md +++ b/manual/core/dse/geotypes/README.md @@ -3,11 +3,13 @@ The driver comes with client-side representations of the DSE geospatial data types: [Point], [LineString] and [Polygon]. -Note: geospatial types require the [ESRI] library version 1.2 to be present on the classpath. The +Note: geospatial types require the [ESRI] library version 1.2 to be present on the classpath. The DSE driver has a non-optional dependency on that library, but if your application does not use -geotypes at all, it is possible to exclude it to minimize the number of runtime dependencies. If the -library cannot be found at runtime, geospatial types won't be available and a warning will be -logged, but the driver will otherwise operate normally (this is also valid for OSGi deployments). +geotypes at all, it is possible to exclude it to minimize the number of runtime dependencies (see +the [Integration>Driver dependencies](../../integration/#driver-dependencies) section for +more details). If the library cannot be found at runtime, geospatial types won't be available and a +warning will be logged, but the driver will otherwise operate normally (this is also valid for OSGi +deployments). ### Usage in requests diff --git a/manual/core/dse/graph/README.md b/manual/core/dse/graph/README.md index 3d8191ffc1e..2d6fe8291a9 100644 --- a/manual/core/dse/graph/README.md +++ b/manual/core/dse/graph/README.md @@ -9,9 +9,11 @@ modeling, refer to the [DSE developer guide].* Note: graph capabilities require the [Apache TinkerPop™] library to be present on the classpath. The driver has a non-optional dependency on that library, but if your application does not use graph at -all, it is possible to exclude it to minimize the number of runtime dependencies. If the library -cannot be found at runtime, graph queries won't be available and a warning will be logged, but the -driver will otherwise operate normally (this is also valid for OSGi deployments). +all, it is possible to exclude it to minimize the number of runtime dependencies (see the +[Integration>Driver dependencies](../../integration/#driver-dependencies) section for more +details). If the library cannot be found at runtime, graph queries won't be available and a warning +will be logged, but the driver will otherwise operate normally (this is also valid for OSGi +deployments). If you do use graph, it is important to keep the precise TinkerPop version that the driver depends on: unlike the driver, TinkerPop does not follow semantic versioning, so even a patch version change diff --git a/manual/core/integration/README.md b/manual/core/integration/README.md index aee714ffaf3..07ec00aa498 100644 --- a/manual/core/integration/README.md +++ b/manual/core/integration/README.md @@ -3,7 +3,8 @@ ### Quick overview * sample project structures for Maven and Gradle. -* explanations about driver dependencies and when they can be manually excluded. +* explanations about [driver dependencies](#driver-dependencies) and when they can be manually + excluded. ----- diff --git a/manual/osgi/README.md b/manual/osgi/README.md index b1d59684f4c..50358e140c0 100644 --- a/manual/osgi/README.md +++ b/manual/osgi/README.md @@ -8,10 +8,12 @@ valid OSGi bundles: - `java-driver-core-shaded` Note: some of the driver dependencies are not valid OSGi bundles. Most of them are optional, and the -driver can work properly without them; in such cases, the corresponding packages are declared with -optional resolution in `Import-Package` directives. However, if you need to access such packages in -an OSGi container you MUST wrap the corresponding jar in a valid OSGi bundle and make it available -for provisioning to the OSGi runtime. +driver can work properly without them (see the +[Integration>Driver dependencies](../core/integration/#driver-dependencies) section for more +details); in such cases, the corresponding packages are declared with optional resolution in +`Import-Package` directives. However, if you need to access such packages in an OSGi container you +MUST wrap the corresponding jar in a valid OSGi bundle and make it available for provisioning to the +OSGi runtime. ## Using the shaded jar diff --git a/upgrade_guide/README.md b/upgrade_guide/README.md index eb178656248..15b2d07a1c2 100644 --- a/upgrade_guide/README.md +++ b/upgrade_guide/README.md @@ -14,9 +14,9 @@ Apart from that, the only visible change is that DSE-specific features are now e * new execution methods: `CqlSession.executeGraph`, `CqlSession.executeContinuously*`. They all have default implementations so this doesn't break binary compatibility. You can just ignore them. -* new driver dependencies: Tinkerpop, ESRI, Reactive Streams. You can exclude them manually if you - want to keep your classpath lean, the rest of the driver will still work. See the - [Integration](../manual/core/integration/#driver-dependencies) page. +* new driver dependencies: Tinkerpop, ESRI, Reactive Streams. If you want to keep your classpath + lean, you can exclude some dependencies when you don't use the corresponding DSE features; see the + [Integration>Driver dependencies](../manual/core/integration/#driver-dependencies) section. #### For Datastax Enterprise users From 08a3b65e08b889ca18b93c783ee9025e52eb70e4 Mon Sep 17 00:00:00 2001 From: olim7t Date: Tue, 14 Jan 2020 09:15:01 -0800 Subject: [PATCH 258/979] Mention LBP in upgrade guide --- upgrade_guide/README.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/upgrade_guide/README.md b/upgrade_guide/README.md index 15b2d07a1c2..27e26067790 100644 --- a/upgrade_guide/README.md +++ b/upgrade_guide/README.md @@ -62,6 +62,9 @@ changes right away; but you will get deprecation warnings: DSE. If you used `DseProgrammaticPlainTextAuthProvider`, replace it by `PlainTextProgrammaticAuthProvider`. Similarly, if you wrote a custom implementation by subclassing `DsePlainTextAuthProviderBase`, extend `PlainTextAuthProviderBase` instead. +* `DseLoadBalancingPolicy`: DSE-specific features (the slow replica avoidance mechanism) have been + merged into `DefaultLoadBalancingPolicy`. `DseLoadBalancingPolicy` still exists for backward + compatibility, but it is now identical to the default policy. #### Class Loader From fdad137c26eb21aa2b89d145f56bc47e0d50a95b Mon Sep 17 00:00:00 2001 From: olim7t Date: Tue, 14 Jan 2020 09:44:26 -0800 Subject: [PATCH 259/979] Include 3.x entries in the changelog --- changelog/README.md | 1645 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 1645 insertions(+) diff --git a/changelog/README.md b/changelog/README.md index 045963379c0..8c1b0811e4a 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -368,3 +368,1648 @@ implemented in this single driver. - [new feature] JAVA-1501: Reprepare on the fly when we get an UNPREPARED response - [bug] JAVA-1499: Wait for load balancing policy at cluster initialization - [new feature] JAVA-1495: Add prepared statements + + +## 3.8.0 + +- [new feature] JAVA-2356: Support for DataStax Cloud API. +- [improvement] JAVA-2483: Allow to provide secure bundle via URL. +- [improvement] JAVA-2499: Allow to read the secure bundle from an InputStream. +- [improvement] JAVA-2457: Detect CaaS and change default consistency. +- [improvement] JAVA-2485: Add errors for Cloud misconfiguration. +- [documentation] JAVA-2504: Migrate Cloud "getting started" page to driver manual. +- [improvement] JAVA-2516: Enable hostname validation with Cloud +- [bug] JAVA-2515: NEW_NODE and REMOVED_NODE events should trigger ADDED and REMOVED. + + +### 3.7.2 + +- [bug] JAVA-2249: Stop stripping trailing zeros in ByteOrderedTokens. +- [bug] JAVA-1492: Don't immediately reuse busy connections for another request. +- [bug] JAVA-2198: Handle UDTs with names that clash with collection types. +- [bug] JAVA-2204: Avoid memory leak when client holds onto a stale TableMetadata instance. + + +### 3.7.1 + +- [bug] JAVA-2174: Metadata.needsQuote should accept empty strings. +- [bug] JAVA-2193: Fix flaky tests in WarningsTest. + + +### 3.7.0 + +- [improvement] JAVA-2025: Include exception message in Abstract\*Codec.accepts(null). +- [improvement] JAVA-1980: Use covariant return types in RemoteEndpointAwareJdkSSLOptions.Builder methods. +- [documentation] JAVA-2062: Document frozen collection preference with Mapper. +- [bug] JAVA-2071: Fix NPE in ArrayBackedRow.toString(). +- [bug] JAVA-2070: Call onRemove instead of onDown when rack and/or DC information changes for a host. +- [improvement] JAVA-1256: Log parameters of BuiltStatement in QueryLogger. +- [documentation] JAVA-2074: Document preference for LZ4 over Snappy. +- [bug] JAVA-1612: Include netty-common jar in binary tarball. +- [improvement] JAVA-2003: Simplify CBUtil internal API to improve performance. +- [improvement] JAVA-2002: Reimplement TypeCodec.accepts to improve performance. +- [documentation] JAVA-2041: Deprecate cross-DC failover in DCAwareRoundRobinPolicy. +- [documentation] JAVA-1159: Document workaround for using tuple with udt field in Mapper. +- [documentation] JAVA-1964: Complete remaining "Coming Soon" sections in docs. +- [improvement] JAVA-1950: Log server side warnings returned from a query. +- [improvement] JAVA-2123: Allow to use QueryBuilder for building queries against Materialized Views. +- [bug] JAVA-2082: Avoid race condition during cluster close and schema refresh. + + +### 3.6.0 + +- [improvement] JAVA-1394: Add request-queue-depth metric. +- [improvement] JAVA-1857: Add Statement.setHost. +- [bug] JAVA-1920: Use nanosecond precision in LocalTimeCodec#format(). +- [bug] JAVA-1794: Driver tries to create a connection array of size -1. +- [new feature] JAVA-1899: Support virtual tables. +- [bug] JAVA-1908: TableMetadata.asCQLQuery does not add table option 'memtable_flush_period_in_ms' in the generated query. +- [bug] JAVA-1924: StatementWrapper setters should return the wrapping statement. +- [new feature] JAVA-1532: Add Codec support for Java 8's LocalDateTime and ZoneId. +- [improvement] JAVA-1786: Use Google code formatter. +- [bug] JAVA-1871: Change LOCAL\_SERIAL.isDCLocal() to return true. +- [documentation] JAVA-1902: Clarify unavailable & request error in DefaultRetryPolicy javadoc. +- [new feature] JAVA-1903: Add WhiteListPolicy.ofHosts. +- [bug] JAVA-1928: Fix GuavaCompatibility for Guava 26. +- [bug] JAVA-1935: Add null check in QueryConsistencyException.getHost. +- [improvement] JAVA-1771: Send driver name and version in STARTUP message. +- [improvement] JAVA-1388: Add dynamic port discovery for system.peers\_v2. +- [documentation] JAVA-1810: Note which setters are not propagated to PreparedStatement. +- [bug] JAVA-1944: Surface Read and WriteFailureException to RetryPolicy. +- [bug] JAVA-1211: Fix NPE in cluster close when cluster init fails. +- [bug] JAVA-1220: Fail fast on cluster init if previous init failed. +- [bug] JAVA-1929: Preempt session execute queries if session was closed. + +Merged from 3.5.x: + +- [bug] JAVA-1872: Retain table's views when processing table update. + + +### 3.5.0 + +- [improvement] JAVA-1448: TokenAwarePolicy should respect child policy ordering. +- [bug] JAVA-1751: Include defaultTimestamp length in encodedSize for protocol version >= 3. +- [bug] JAVA-1770: Fix message size when using Custom Payload. +- [documentation] JAVA-1760: Add metrics documentation. +- [improvement] JAVA-1765: Update dependencies to latest patch versions. +- [improvement] JAVA-1752: Deprecate DowngradingConsistencyRetryPolicy. +- [improvement] JAVA-1735: Log driver version on first use. +- [documentation] JAVA-1380: Add FAQ entry for errors arising from incompatibilities. +- [improvement] JAVA-1748: Support IS NOT NULL and != in query builder. +- [documentation] JAVA-1740: Mention C*2.2/3.0 incompatibilities in paging state manual. +- [improvement] JAVA-1725: Add a getNodeCount method to CCMAccess for easier automation. +- [new feature] JAVA-708: Add means to measure request sizes. +- [documentation] JAVA-1788: Add example for enabling host name verification to SSL docs. +- [improvement] JAVA-1791: Revert "JAVA-1677: Warn if auth is configured on the client but not the server." +- [bug] JAVA-1789: Account for flags in Prepare encodedSize. +- [bug] JAVA-1797: Use jnr-ffi version required by jnr-posix. + + +### 3.4.0 + +- [improvement] JAVA-1671: Remove unnecessary test on prepared statement metadata. +- [bug] JAVA-1694: Upgrade to jackson-databind 2.7.9.2 to address CVE-2015-15095. +- [documentation] JAVA-1685: Clarify recommendation on preparing SELECT *. +- [improvement] JAVA-1679: Improve error message on batch log write timeout. +- [improvement] JAVA-1672: Remove schema agreement check when repreparing on up. +- [improvement] JAVA-1677: Warn if auth is configured on the client but not the server. +- [new feature] JAVA-1651: Add NO_COMPACT startup option. +- [improvement] JAVA-1683: Add metrics to track writes to nodes. +- [new feature] JAVA-1229: Allow specifying the keyspace for individual queries. +- [improvement] JAVA-1682: Provide a way to record latencies for cancelled speculative executions. +- [improvement] JAVA-1717: Add metrics to latency-aware policy. +- [improvement] JAVA-1675: Remove dates from copyright headers. + +Merged from 3.3.x: + +- [bug] JAVA-1555: Include VIEW and CDC in WriteType. +- [bug] JAVA-1599: exportAsString improvements (sort, format, clustering order) +- [improvement] JAVA-1587: Deterministic ordering of columns used in Mapper#saveQuery +- [improvement] JAVA-1500: Add a metric to report number of in-flight requests. +- [bug] JAVA-1438: QueryBuilder check for empty orderings. +- [improvement] JAVA-1490: Allow zero delay for speculative executions. +- [documentation] JAVA-1607: Add FAQ entry for netty-transport-native-epoll. +- [bug] JAVA-1630: Fix Metadata.addIfAbsent. +- [improvement] JAVA-1619: Update QueryBuilder methods to support Iterable input. +- [improvement] JAVA-1527: Expose host_id and schema_version on Host metadata. +- [new feature] JAVA-1377: Add support for TWCS in SchemaBuilder. +- [improvement] JAVA-1631: Publish a sources jar for driver-core-tests. +- [improvement] JAVA-1632: Add a withIpPrefix(String) method to CCMBridge.Builder. +- [bug] JAVA-1639: VersionNumber does not fullfill equals/hashcode contract. +- [bug] JAVA-1613: Fix broken shaded Netty detection in NettyUtil. +- [bug] JAVA-1666: Fix keyspace export when a UDT has case-sensitive field names. +- [improvement] JAVA-1196: Include hash of result set metadata in prepared statement id. +- [improvement] JAVA-1670: Support user-provided JMX ports for CCMBridge. +- [improvement] JAVA-1661: Avoid String.toLowerCase if possible in Metadata. +- [improvement] JAVA-1659: Expose low-level flusher tuning options. +- [improvement] JAVA-1660: Support netty-transport-native-epoll in OSGi container. + + +### 3.3.2 + +- [bug] JAVA-1666: Fix keyspace export when a UDT has case-sensitive field names. +- [improvement] JAVA-1196: Include hash of result set metadata in prepared statement id. +- [improvement] JAVA-1670: Support user-provided JMX ports for CCMBridge. +- [improvement] JAVA-1661: Avoid String.toLowerCase if possible in Metadata. +- [improvement] JAVA-1659: Expose low-level flusher tuning options. +- [improvement] JAVA-1660: Support netty-transport-native-epoll in OSGi container. + + +### 3.3.1 + +- [bug] JAVA-1555: Include VIEW and CDC in WriteType. +- [bug] JAVA-1599: exportAsString improvements (sort, format, clustering order) +- [improvement] JAVA-1587: Deterministic ordering of columns used in Mapper#saveQuery +- [improvement] JAVA-1500: Add a metric to report number of in-flight requests. +- [bug] JAVA-1438: QueryBuilder check for empty orderings. +- [improvement] JAVA-1490: Allow zero delay for speculative executions. +- [documentation] JAVA-1607: Add FAQ entry for netty-transport-native-epoll. +- [bug] JAVA-1630: Fix Metadata.addIfAbsent. +- [improvement] JAVA-1619: Update QueryBuilder methods to support Iterable input. +- [improvement] JAVA-1527: Expose host_id and schema_version on Host metadata. +- [new feature] JAVA-1377: Add support for TWCS in SchemaBuilder. +- [improvement] JAVA-1631: Publish a sources jar for driver-core-tests. +- [improvement] JAVA-1632: Add a withIpPrefix(String) method to CCMBridge.Builder. +- [bug] JAVA-1639: VersionNumber does not fullfill equals/hashcode contract. +- [bug] JAVA-1613: Fix broken shaded Netty detection in NettyUtil. + + +### 3.3.0 + +- [bug] JAVA-1469: Update LoggingRetryPolicy to deal with SLF4J-353. +- [improvement] JAVA-1203: Upgrade Metrics to allow usage in OSGi. +- [bug] JAVA-1407: KeyspaceMetadata exportAsString should export user types in topological sort order. +- [bug] JAVA-1455: Mapper support using unset for null values. +- [bug] JAVA-1464: Allow custom codecs with non public constructors in @Param. +- [bug] JAVA-1470: Querying multiple pages overrides WrappedStatement. +- [improvement] JAVA-1428: Upgrade logback and jackson dependencies. +- [documentation] JAVA-1463: Revisit speculative execution docs. +- [documentation] JAVA-1466: Revisit timestamp docs. +- [documentation] JAVA-1445: Clarify how nodes are penalized in LatencyAwarePolicy docs. +- [improvement] JAVA-1446: Support 'DEFAULT UNSET' in Query Builder JSON Insert. +- [improvement] JAVA-1443: Add groupBy method to Select statement. +- [improvement] JAVA-1458: Check thread in mapper sync methods. +- [improvement] JAVA-1488: Upgrade Netty to 4.0.47.Final. +- [improvement] JAVA-1460: Add speculative execution number to ExecutionInfo +- [improvement] JAVA-1431: Improve error handling during pool initialization. + + +### 3.2.0 + +- [new feature] JAVA-1347: Add support for duration type. +- [new feature] JAVA-1248: Implement "beta" flag for native protocol v5. +- [new feature] JAVA-1362: Send query options flags as [int] for Protocol V5+. +- [new feature] JAVA-1364: Enable creation of SSLHandler with remote address information. +- [improvement] JAVA-1367: Make protocol negotiation more resilient. +- [bug] JAVA-1397: Handle duration as native datatype in protocol v5+. +- [improvement] JAVA-1308: CodecRegistry performance improvements. +- [improvement] JAVA-1287: Add CDC to TableOptionsMetadata and Schema Builder. +- [improvement] JAVA-1392: Reduce lock contention in RPTokenFactory. +- [improvement] JAVA-1328: Provide compatibility with Guava 20. +- [improvement] JAVA-1247: Disable idempotence warnings. +- [improvement] JAVA-1286: Support setting and retrieving udt fields in QueryBuilder. +- [bug] JAVA-1415: Correctly report if a UDT column is frozen. +- [bug] JAVA-1418: Make Guava version detection more reliable. +- [new feature] JAVA-1174: Add ifNotExists option to mapper. +- [improvement] JAVA-1414: Optimize Metadata.escapeId and Metadata.handleId. +- [improvement] JAVA-1310: Make mapper's ignored properties configurable. +- [improvement] JAVA-1316: Add strategy for resolving properties into CQL names. +- [bug] JAVA-1424: Handle new WRITE_FAILURE and READ_FAILURE format in v5 protocol. + +Merged from 3.1.x branch: + +- [bug] JAVA-1371: Reintroduce connection pool timeout. +- [bug] JAVA-1313: Copy SerialConsistencyLevel to PreparedStatement. +- [documentation] JAVA-1334: Clarify documentation of method `addContactPoints`. +- [improvement] JAVA-1357: Document that getReplicas only returns replicas of the last token in range. +- [bug] JAVA-1404: Fix min token handling in TokenRange.contains. +- [bug] JAVA-1429: Prevent heartbeats until connection is fully initialized. + + +### 3.1.4 + +Merged from 3.0.x branch: + +- [bug] JAVA-1371: Reintroduce connection pool timeout. +- [bug] JAVA-1313: Copy SerialConsistencyLevel to PreparedStatement. +- [documentation] JAVA-1334: Clarify documentation of method `addContactPoints`. +- [improvement] JAVA-1357: Document that getReplicas only returns replicas of the last token in range. + + +### 3.1.3 + +Merged from 3.0.x branch: + +- [bug] JAVA-1330: Add un/register for SchemaChangeListener in DelegatingCluster +- [bug] JAVA-1351: Include Custom Payload in Request.copy. +- [bug] JAVA-1346: Reset heartbeat only on client reads (not writes). +- [improvement] JAVA-866: Support tuple notation in QueryBuilder.eq/in. + + +### 3.1.2 + +- [bug] JAVA-1321: Wrong OSGi dependency version for Guava. + +Merged from 3.0.x branch: + +- [bug] JAVA-1312: QueryBuilder modifies selected columns when manually selected. +- [improvement] JAVA-1303: Add missing BoundStatement.setRoutingKey(ByteBuffer...) +- [improvement] JAVA-262: Make internal executors customizable + + +### 3.1.1 + +- [bug] JAVA-1284: ClockFactory should check system property before attempting to load Native class. +- [bug] JAVA-1255: Allow nested UDTs to be used in Mapper. +- [bug] JAVA-1279: Mapper should exclude Groovy's "metaClass" property when looking for mapped properties + +Merged from 3.0.x branch: + +- [improvement] JAVA-1246: Driver swallows the real exception in a few cases +- [improvement] JAVA-1261: Throw error when attempting to page in I/O thread. +- [bug] JAVA-1258: Regression: Mapper cannot map a materialized view after JAVA-1126. +- [bug] JAVA-1101: Batch and BatchStatement should consider inner statements to determine query idempotence +- [improvement] JAVA-1262: Use ParseUtils for quoting & unquoting. +- [improvement] JAVA-1275: Use Netty's default thread factory +- [bug] JAVA-1285: QueryBuilder routing key auto-discovery should handle case-sensitive column names. +- [bug] JAVA-1283: Don't cache failed query preparations in the mapper. +- [improvement] JAVA-1277: Expose AbstractSession.checkNotInEventLoop. +- [bug] JAVA-1272: BuiltStatement not able to print its query string if it contains mapped UDTs. +- [bug] JAVA-1292: 'Adjusted frame length' error breaks driver's ability to read data. +- [improvement] JAVA-1293: Make DecoderForStreamIdSize.MAX_FRAME_LENGTH configurable. +- [improvement] JAVA-1053: Add a metric for authentication errors +- [improvement] JAVA-1263: Eliminate unnecessary memory copies in FrameCompressor implementations. +- [improvement] JAVA-893: Make connection pool non-blocking + + +### 3.1.0 + +- [new feature] JAVA-1153: Add PER PARTITION LIMIT to Select QueryBuilder. +- [improvement] JAVA-743: Add JSON support to QueryBuilder. +- [improvement] JAVA-1233: Update HdrHistogram to 2.1.9. +- [improvement] JAVA-1233: Update Snappy to 1.1.2.6. +- [bug] JAVA-1161: Preserve full time zone info in ZonedDateTimeCodec and DateTimeCodec. +- [new feature] JAVA-1157: Allow asynchronous paging of Mapper Result. +- [improvement] JAVA-1212: Don't retry non-idempotent statements by default. +- [improvement] JAVA-1192: Make EventDebouncer settings updatable at runtime. +- [new feature] JAVA-541: Add polymorphism support to object mapper. +- [new feature] JAVA-636: Allow @Column annotations on getters/setters as well as fields. +- [new feature] JAVA-984: Allow non-void setters in object mapping. +- [new feature] JAVA-1055: Add ErrorAware load balancing policy. + +Merged from 3.0.x branch: + +- [bug] JAVA-1179: Request objects should be copied when executed. +- [improvement] JAVA-1182: Throw error when synchronous call made on I/O thread. +- [bug] JAVA-1184: Unwrap StatementWrappers when extracting column definitions. +- [bug] JAVA-1132: Executing bound statement with no variables results in exception with protocol v1. +- [improvement] JAVA-1040: SimpleStatement parameters support in QueryLogger. +- [improvement] JAVA-1151: Fail fast if HdrHistogram is not in the classpath. +- [improvement] JAVA-1154: Allow individual Statement to cancel the read timeout. +- [bug] JAVA-1074: Fix documentation around default timestamp generator. +- [improvement] JAVA-1109: Document SSLOptions changes in upgrade guide. +- [improvement] JAVA-1065: Add method to create token from partition key values. +- [improvement] JAVA-1136: Enable JDK signature check in module driver-extras. +- [improvement] JAVA-866: Support tuple notation in QueryBuilder.eq/in. +- [bug] JAVA-1140: Use same connection to check for schema agreement after a DDL query. +- [improvement] JAVA-1113: Support Cassandra 3.4 LIKE operator in QueryBuilder. +- [improvement] JAVA-1086: Support Cassandra 3.2 CAST function in QueryBuilder. +- [bug] JAVA-1095: Check protocol version for custom payload before sending the query. +- [improvement] JAVA-1133: Add OSGi headers to cassandra-driver-extras. +- [bug] JAVA-1137: Incorrect string returned by DataType.asFunctionParameterString() for collections and tuples. +- [bug] JAVA-1046: (Dynamic)CompositeTypes need to be parsed as string literal, not blob. +- [improvement] JAVA-1164: Clarify documentation on Host.listenAddress and broadcastAddress. +- [improvement] JAVA-1171: Add Host method to determine if DSE Graph is enabled. +- [improvement] JAVA-1069: Bootstrap driver-examples module. +- [documentation] JAVA-1150: Add example and FAQ entry about ByteBuffer/BLOB. +- [improvement] JAVA-1011: Expose PoolingOptions default values. +- [improvement] JAVA-630: Don't process DOWN events for nodes that have active connections. +- [improvement] JAVA-851: Improve UUIDs javadoc with regard to user-provided timestamps. +- [improvement] JAVA-979: Update javadoc for RegularStatement toString() and getQueryString() to indicate that consistency level and other parameters are not maintained in the query string. +- [bug] JAVA-1068: Unwrap StatementWrappers when hashing the paging state. +- [improvement] JAVA-1021: Improve error message when connect() is called with an invalid keyspace name. +- [improvement] JAVA-879: Mapper.map() accepts mapper-generated and user queries. +- [bug] JAVA-1100: Exception when connecting with shaded java driver in OSGI +- [bug] JAVA-1064: getTable create statement doesn't properly handle quotes in primary key. +- [bug] JAVA-1089: Set LWT made from BuiltStatements to non-idempotent. +- [improvement] JAVA-923: Position idempotent flag on object mapper queries. +- [bug] JAVA-1070: The Mapper should not prepare queries synchronously. +- [new feature] JAVA-982: Introduce new method ConsistencyLevel.isSerial(). +- [bug] JAVA-764: Retry with the normal consistency level (not the serial one) when a write times out on the Paxos phase. +- [improvement] JAVA-852: Ignore peers with null entries during discovery. +- [bug] JAVA-1005: DowngradingConsistencyRetryPolicy does not work with EACH_QUORUM when 1 DC is down. +- [bug] JAVA-1002: Avoid deadlock when re-preparing a statement on other hosts. +- [bug] JAVA-1072: Ensure defunct connections are properly evicted from the pool. +- [bug] JAVA-1152: Fix NPE at ControlConnection.refreshNodeListAndTokenMap(). + +Merged from 2.1 branch: + +- [improvement] JAVA-1038: Fetch node info by rpc_address if its broadcast_address is not in system.peers. +- [improvement] JAVA-888: Add cluster-wide percentile tracker. +- [improvement] JAVA-963: Automatically register PercentileTracker from components that use it. +- [new feature] JAVA-1019: SchemaBuilder support for CREATE/ALTER/DROP KEYSPACE. +- [bug] JAVA-727: Allow monotonic timestamp generators to drift in the future + use microsecond precision when possible. +- [improvement] JAVA-444: Add Java process information to UUIDs.makeNode() hash. + + +### 3.0.7 + +- [bug] JAVA-1371: Reintroduce connection pool timeout. +- [bug] JAVA-1313: Copy SerialConsistencyLevel to PreparedStatement. +- [documentation] JAVA-1334: Clarify documentation of method `addContactPoints`. +- [improvement] JAVA-1357: Document that getReplicas only returns replicas of the last token in range. + + +### 3.0.6 + +- [bug] JAVA-1330: Add un/register for SchemaChangeListener in DelegatingCluster +- [bug] JAVA-1351: Include Custom Payload in Request.copy. +- [bug] JAVA-1346: Reset heartbeat only on client reads (not writes). +- [improvement] JAVA-866: Support tuple notation in QueryBuilder.eq/in. + + +### 3.0.5 + +- [bug] JAVA-1312: QueryBuilder modifies selected columns when manually selected. +- [improvement] JAVA-1303: Add missing BoundStatement.setRoutingKey(ByteBuffer...) +- [improvement] JAVA-262: Make internal executors customizable +- [bug] JAVA-1320: prevent unnecessary task creation on empty pool + + +### 3.0.4 + +- [improvement] JAVA-1246: Driver swallows the real exception in a few cases +- [improvement] JAVA-1261: Throw error when attempting to page in I/O thread. +- [bug] JAVA-1258: Regression: Mapper cannot map a materialized view after JAVA-1126. +- [bug] JAVA-1101: Batch and BatchStatement should consider inner statements to determine query idempotence +- [improvement] JAVA-1262: Use ParseUtils for quoting & unquoting. +- [improvement] JAVA-1275: Use Netty's default thread factory +- [bug] JAVA-1285: QueryBuilder routing key auto-discovery should handle case-sensitive column names. +- [bug] JAVA-1283: Don't cache failed query preparations in the mapper. +- [improvement] JAVA-1277: Expose AbstractSession.checkNotInEventLoop. +- [bug] JAVA-1272: BuiltStatement not able to print its query string if it contains mapped UDTs. +- [bug] JAVA-1292: 'Adjusted frame length' error breaks driver's ability to read data. +- [improvement] JAVA-1293: Make DecoderForStreamIdSize.MAX_FRAME_LENGTH configurable. +- [improvement] JAVA-1053: Add a metric for authentication errors +- [improvement] JAVA-1263: Eliminate unnecessary memory copies in FrameCompressor implementations. +- [improvement] JAVA-893: Make connection pool non-blocking + + +### 3.0.3 + +- [improvement] JAVA-1147: Upgrade Netty to 4.0.37. +- [bug] JAVA-1213: Allow updates and inserts to BLOB column using read-only ByteBuffer. +- [bug] JAVA-1209: ProtocolOptions.getProtocolVersion() should return null instead of throwing NPE if Cluster has not + been init'd. +- [improvement] JAVA-1204: Update documentation to indicate tcnative version requirement. +- [bug] JAVA-1186: Fix duplicated hosts in DCAwarePolicy warn message. +- [bug] JAVA-1187: Fix warning message when local CL used with RoundRobinPolicy. +- [improvement] JAVA-1175: Warn if DCAwarePolicy configuration is inconsistent. +- [bug] JAVA-1139: ConnectionException.getMessage() throws NPE if address is null. +- [bug] JAVA-1202: Handle null rpc_address when checking schema agreement. +- [improvement] JAVA-1198: Document that BoundStatement is not thread-safe. +- [improvement] JAVA-1200: Upgrade LZ4 to 1.3.0. +- [bug] JAVA-1232: Fix NPE in IdempotenceAwareRetryPolicy.isIdempotent. +- [improvement] JAVA-1227: Document "SELECT *" issue with prepared statement. +- [bug] JAVA-1160: Fix NPE in VersionNumber.getPreReleaseLabels(). +- [improvement] JAVA-1126: Handle schema changes in Mapper. +- [bug] JAVA-1193: Refresh token and replica metadata synchronously when schema is altered. +- [bug] JAVA-1120: Skip schema refresh debouncer when checking for agreement as a result of schema change made by client. +- [improvement] JAVA-1242: Fix driver-core dependency in driver-stress +- [improvement] JAVA-1235: Move the query to the end of "re-preparing .." log message as a key value. + + +### 3.0.2 + +Merged from 2.1 branch: + +- [bug] JAVA-1179: Request objects should be copied when executed. +- [improvement] JAVA-1182: Throw error when synchronous call made on I/O thread. +- [bug] JAVA-1184: Unwrap StatementWrappers when extracting column definitions. + + +### 3.0.1 + +- [bug] JAVA-1132: Executing bound statement with no variables results in exception with protocol v1. +- [improvement] JAVA-1040: SimpleStatement parameters support in QueryLogger. +- [improvement] JAVA-1151: Fail fast if HdrHistogram is not in the classpath. +- [improvement] JAVA-1154: Allow individual Statement to cancel the read timeout. +- [bug] JAVA-1074: Fix documentation around default timestamp generator. +- [improvement] JAVA-1109: Document SSLOptions changes in upgrade guide. +- [improvement] JAVA-1065: Add method to create token from partition key values. +- [improvement] JAVA-1136: Enable JDK signature check in module driver-extras. +- [improvement] JAVA-866: Support tuple notation in QueryBuilder.eq/in. +- [bug] JAVA-1140: Use same connection to check for schema agreement after a DDL query. +- [improvement] JAVA-1113: Support Cassandra 3.4 LIKE operator in QueryBuilder. +- [improvement] JAVA-1086: Support Cassandra 3.2 CAST function in QueryBuilder. +- [bug] JAVA-1095: Check protocol version for custom payload before sending the query. +- [improvement] JAVA-1133: Add OSGi headers to cassandra-driver-extras. +- [bug] JAVA-1137: Incorrect string returned by DataType.asFunctionParameterString() for collections and tuples. +- [bug] JAVA-1046: (Dynamic)CompositeTypes need to be parsed as string literal, not blob. +- [improvement] JAVA-1164: Clarify documentation on Host.listenAddress and broadcastAddress. +- [improvement] JAVA-1171: Add Host method to determine if DSE Graph is enabled. +- [improvement] JAVA-1069: Bootstrap driver-examples module. +- [documentation] JAVA-1150: Add example and FAQ entry about ByteBuffer/BLOB. + +Merged from 2.1 branch: + +- [improvement] JAVA-1011: Expose PoolingOptions default values. +- [improvement] JAVA-630: Don't process DOWN events for nodes that have active connections. +- [improvement] JAVA-851: Improve UUIDs javadoc with regard to user-provided timestamps. +- [improvement] JAVA-979: Update javadoc for RegularStatement toString() and getQueryString() to indicate that consistency level and other parameters are not maintained in the query string. +- [bug] JAVA-1068: Unwrap StatementWrappers when hashing the paging state. +- [improvement] JAVA-1021: Improve error message when connect() is called with an invalid keyspace name. +- [improvement] JAVA-879: Mapper.map() accepts mapper-generated and user queries. +- [bug] JAVA-1100: Exception when connecting with shaded java driver in OSGI +- [bug] JAVA-1064: getTable create statement doesn't properly handle quotes in primary key. +- [bug] JAVA-1089: Set LWT made from BuiltStatements to non-idempotent. +- [improvement] JAVA-923: Position idempotent flag on object mapper queries. +- [bug] JAVA-1070: The Mapper should not prepare queries synchronously. +- [new feature] JAVA-982: Introduce new method ConsistencyLevel.isSerial(). +- [bug] JAVA-764: Retry with the normal consistency level (not the serial one) when a write times out on the Paxos phase. +- [improvement] JAVA-852: Ignore peers with null entries during discovery. +- [bug] JAVA-1005: DowngradingConsistencyRetryPolicy does not work with EACH_QUORUM when 1 DC is down. +- [bug] JAVA-1002: Avoid deadlock when re-preparing a statement on other hosts. +- [bug] JAVA-1072: Ensure defunct connections are properly evicted from the pool. +- [bug] JAVA-1152: Fix NPE at ControlConnection.refreshNodeListAndTokenMap(). + + +### 3.0.0 + +- [bug] JAVA-1034: fix metadata parser for collections of custom types. +- [improvement] JAVA-1035: Expose host broadcast_address and listen_address if available. +- [new feature] JAVA-1037: Allow named parameters in simple statements. +- [improvement] JAVA-1033: Allow per-statement read timeout. +- [improvement] JAVA-1042: Include DSE version and workload in Host data. + +Merged from 2.1 branch: + +- [improvement] JAVA-1030: Log token to replica map computation times. +- [bug] JAVA-1039: Minor bugs in Event Debouncer. + + +### 3.0.0-rc1 + +- [bug] JAVA-890: fix mapper for case-sensitive UDT. + + +### 3.0.0-beta1 + +- [bug] JAVA-993: Support for "custom" types after CASSANDRA-10365. +- [bug] JAVA-999: Handle unset parameters in QueryLogger. +- [bug] JAVA-998: SchemaChangeListener not invoked for Functions or Aggregates having UDT arguments. +- [bug] JAVA-1009: use CL ONE to compute query plan when reconnecting + control connection. +- [improvement] JAVA-1003: Change default consistency level to LOCAL_ONE (amends JAVA-926). +- [improvement] JAVA-863: Idempotence propagation in prepared statements. +- [improvement] JAVA-996: Make CodecRegistry available to ProtocolDecoder. +- [bug] JAVA-819: Driver shouldn't retry on client timeout if statement is not idempotent. +- [improvement] JAVA-1007: Make SimpleStatement and QueryBuilder "detached" again. + +Merged from 2.1 branch: + +- [improvement] JAVA-989: Include keyspace name when invalid replication found when generating token map. +- [improvement] JAVA-664: Reduce heap consumption for TokenMap. +- [bug] JAVA-994: Don't call on(Up|Down|Add|Remove) methods if Cluster is closed/closing. + + +### 3.0.0-alpha5 + +- [improvement] JAVA-958: Make TableOrView.Order visible. +- [improvement] JAVA-968: Update metrics to the latest version. +- [improvement] JAVA-965: Improve error handling for when a non-type 1 UUID is given to bind() on a timeuuid column. +- [improvement] JAVA-885: Pass the authenticator name from the server to the auth provider. +- [improvement] JAVA-961: Raise an exception when an older version of guava (<16.01) is found. +- [bug] JAVA-972: TypeCodec.parse() implementations should be case insensitive when checking for keyword NULL. +- [bug] JAVA-971: Make type codecs invariant. +- [bug] JAVA-986: Update documentation links to reference 3.0. +- [improvement] JAVA-841: Refactor SSLOptions API. +- [improvement] JAVA-948: Don't limit cipher suites by default. +- [improvement] JAVA-917: Document SSL configuration. +- [improvement] JAVA-936: Adapt schema metadata parsing logic to new storage format of CQL types in C* 3.0. +- [new feature] JAVA-846: Provide custom codecs library as an extra module. +- [new feature] JAVA-742: Codec Support for JSON. +- [new feature] JAVA-606: Codec support for Java 8. +- [new feature] JAVA-565: Codec support for Java arrays. +- [new feature] JAVA-605: Codec support for Java enums. +- [bug] JAVA-884: Fix UDT mapper to process fields in the correct order. + +Merged from 2.1 branch: + +- [bug] JAVA-854: avoid early return in Cluster.init when a node doesn't support the protocol version. +- [bug] JAVA-978: Fix quoting issue that caused Mapper.getTableMetadata() to return null. +- [improvement] JAVA-920: Downgrade "error creating pool" message to WARN. +- [bug] JAVA-954: Don't trigger reconnection before initialization complete. +- [improvement] JAVA-914: Avoid rejected tasks at shutdown. +- [improvement] JAVA-921: Add SimpleStatement.getValuesCount(). +- [bug] JAVA-901: Move call to connection.release() out of cancelHandler. +- [bug] JAVA-960: Avoid race in control connection shutdown. +- [bug] JAVA-656: Fix NPE in ControlConnection.updateLocationInfo. +- [bug] JAVA-966: Count uninitialized connections in conviction policy. +- [improvement] JAVA-917: Document SSL configuration. +- [improvement] JAVA-652: Add DCAwareRoundRobinPolicy builder. +- [improvement] JAVA-808: Add generic filtering policy that can be used to exclude specific DCs. +- [bug] JAVA-988: Metadata.handleId should handle escaped double quotes. +- [bug] JAVA-983: QueryBuilder cannot handle collections containing function calls. + + +### 3.0.0-alpha4 + +- [improvement] JAVA-926: Change default consistency level to LOCAL_QUORUM. +- [bug] JAVA-942: Fix implementation of UserType.hashCode(). +- [improvement] JAVA-877: Don't delay UP/ADDED notifications if protocol version = V4. +- [improvement] JAVA-938: Parse 'extensions' column in table metadata. +- [bug] JAVA-900: Fix Configuration builder to allow disabled metrics. +- [new feature] JAVA-902: Prepare API for async query trace. +- [new feature] JAVA-930: Add BoundStatement#unset. +- [bug] JAVA-946: Make table metadata options class visible. +- [bug] JAVA-939: Add crcCheckChance to TableOptionsMetadata#equals/hashCode. +- [bug] JAVA-922: Make TypeCodec return mutable collections. +- [improvement] JAVA-932: Limit visibility of codec internals. +- [improvement] JAVA-934: Warn if a custom codec collides with an existing one. +- [improvement] JAVA-940: Allow typed getters/setters to target any CQL type. +- [bug] JAVA-950: Fix Cluster.connect with a case-sensitive keyspace. +- [bug] JAVA-953: Fix MaterializedViewMetadata when base table name is case sensitive. + + +### 3.0.0-alpha3 + +- [new feature] JAVA-571: Support new system tables in C* 3.0. +- [improvement] JAVA-919: Move crc_check_chance out of compressions options. + +Merged from 2.0 branch: + +- [improvement] JAVA-718: Log streamid at the trace level on sending request and receiving response. +- [bug] JAVA-796: Fix SpeculativeExecutionPolicy.init() and close() are never called. +- [improvement] JAVA-710: Suppress unnecessary warning at shutdown. +- [improvement] #340: Allow DNS name with multiple A-records as contact point. +- [bug] JAVA-794: Allow tracing across multiple result pages. +- [bug] JAVA-737: DowngradingConsistencyRetryPolicy ignores write timeouts. +- [bug] JAVA-736: Forbid bind marker in QueryBuilder add/append/prepend. +- [bug] JAVA-712: Prevent QueryBuilder.quote() from applying duplicate double quotes. +- [bug] JAVA-688: Prevent QueryBuilder from trying to serialize raw string. +- [bug] JAVA-679: Support bind marker in QueryBuilder DELETE's list index. +- [improvement] JAVA-475: Improve QueryBuilder API for SELECT DISTINCT. +- [improvement] JAVA-225: Create values() function for Insert builder using List. +- [improvement] JAVA-702: Warn when ReplicationStrategy encounters invalid + replication factors. +- [improvement] JAVA-662: Add PoolingOptions method to set both core and max + connections. +- [improvement] JAVA-766: Do not include epoll JAR in binary distribution. +- [improvement] JAVA-726: Optimize internal copies of Request objects. +- [bug] JAVA-815: Preserve tracing across retries. +- [improvement] JAVA-709: New RetryDecision.tryNextHost(). +- [bug] JAVA-733: Handle function calls and raw strings as non-idempotent in QueryBuilder. +- [improvement] JAVA-765: Provide API to retrieve values of a Parameterized SimpleStatement. +- [improvement] JAVA-827: implement UPDATE .. IF EXISTS in QueryBuilder. +- [improvement] JAVA-618: Randomize contact points list to prevent hotspots. +- [improvement] JAVA-720: Surface the coordinator used on query failure. +- [bug] JAVA-792: Handle contact points removed during init. +- [improvement] JAVA-719: Allow PlainTextAuthProvider to change its credentials at runtime. +- [new feature] JAVA-151: Make it possible to register for SchemaChange Events. +- [improvement] JAVA-861: Downgrade "Asked to rebuild table" log from ERROR to INFO level. +- [improvement] JAVA-797: Provide an option to prepare statements only on one node. +- [improvement] JAVA-658: Provide an option to not re-prepare all statements in onUp. +- [improvement] JAVA-853: Customizable creation of netty timer. +- [bug] JAVA-859: Avoid quadratic ring processing with invalid replication factors. +- [improvement] JAVA-657: Debounce control connection queries. +- [bug] JAVA-784: LoadBalancingPolicy.distance() called before init(). +- [new feature] JAVA-828: Make driver-side metadata optional. +- [improvement] JAVA-544: Allow hosts to remain partially up. +- [improvement] JAVA-821, JAVA-822: Remove internal blocking calls and expose async session + creation. +- [improvement] JAVA-725: Use parallel calls when re-preparing statement on other + hosts. +- [bug] JAVA-629: Don't use connection timeout for unrelated internal queries. +- [bug] JAVA-892: Fix NPE in speculative executions when metrics disabled. + + +### 3.0.0-alpha2 + +- [new feature] JAVA-875, JAVA-882: Move secondary index metadata out of column definitions. + +Merged from 2.2 branch: + +- [bug] JAVA-847: Propagate CodecRegistry to nested UDTs. +- [improvement] JAVA-848: Ability to store a default, shareable CodecRegistry + instance. +- [bug] JAVA-880: Treat empty ByteBuffers as empty values in TupleCodec and + UDTCodec. + + +### 3.0.0-alpha1 + +- [new feature] JAVA-876: Support new system tables in C* 3.0.0-alpha1. + +Merged from 2.2 branch: + +- [improvement] JAVA-810: Rename DateWithoutTime to LocalDate. +- [bug] JAVA-816: DateCodec does not format values correctly. +- [bug] JAVA-817: TimeCodec does not format values correctly. +- [bug] JAVA-818: TypeCodec.getDataTypeFor() does not handle LocalDate instances. +- [improvement] JAVA-836: Make ResultSet#fetchMoreResult return a + ListenableFuture. +- [improvement] JAVA-843: Disable frozen checks in mapper. +- [improvement] JAVA-721: Allow user to register custom type codecs. +- [improvement] JAVA-722: Support custom type codecs in mapper. + + +### 2.2.0-rc3 + +- [bug] JAVA-847: Propagate CodecRegistry to nested UDTs. +- [improvement] JAVA-848: Ability to store a default, shareable CodecRegistry + instance. +- [bug] JAVA-880: Treat empty ByteBuffers as empty values in TupleCodec and + UDTCodec. + + +### 2.2.0-rc2 + +- [improvement] JAVA-810: Rename DateWithoutTime to LocalDate. +- [bug] JAVA-816: DateCodec does not format values correctly. +- [bug] JAVA-817: TimeCodec does not format values correctly. +- [bug] JAVA-818: TypeCodec.getDataTypeFor() does not handle LocalDate instances. +- [improvement] JAVA-836: Make ResultSet#fetchMoreResult return a + ListenableFuture. +- [improvement] JAVA-843: Disable frozen checks in mapper. +- [improvement] JAVA-721: Allow user to register custom type codecs. +- [improvement] JAVA-722: Support custom type codecs in mapper. + +Merged from 2.1 branch: + +- [bug] JAVA-834: Special case check for 'null' string in index_options column. +- [improvement] JAVA-835: Allow accessor methods with less parameters in case + named bind markers are repeated. +- [improvement] JAVA-475: Improve QueryBuilder API for SELECT DISTINCT. +- [improvement] JAVA-715: Make NativeColumnType a top-level class. +- [improvement] JAVA-700: Expose ProtocolVersion#toInt. +- [bug] JAVA-542: Handle void return types in accessors. +- [improvement] JAVA-225: Create values() function for Insert builder using List. +- [improvement] JAVA-713: HashMap throws an OOM Exception when logging level is set to TRACE. +- [bug] JAVA-679: Support bind marker in QueryBuilder DELETE's list index. +- [improvement] JAVA-732: Expose KEYS and FULL indexing options in IndexMetadata. +- [improvement] JAVA-589: Allow @Enumerated in Accessor method parameters. +- [improvement] JAVA-554: Allow access to table metadata from Mapper. +- [improvement] JAVA-661: Provide a way to map computed fields. +- [improvement] JAVA-824: Ignore missing columns in mapper. +- [bug] JAVA-724: Preserve default timestamp for retries and speculative executions. +- [improvement] JAVA-738: Use same pool implementation for protocol v2 and v3. +- [improvement] JAVA-677: Support CONTAINS / CONTAINS KEY in QueryBuilder. +- [improvement] JAVA-477/JAVA-540: Add USING options in mapper for delete and save + operations. +- [improvement] JAVA-473: Add mapper option to configure whether to save null fields. + +Merged from 2.0 branch: + +- [bug] JAVA-737: DowngradingConsistencyRetryPolicy ignores write timeouts. +- [bug] JAVA-736: Forbid bind marker in QueryBuilder add/append/prepend. +- [bug] JAVA-712: Prevent QueryBuilder.quote() from applying duplicate double quotes. +- [bug] JAVA-688: Prevent QueryBuilder from trying to serialize raw string. +- [bug] JAVA-679: Support bind marker in QueryBuilder DELETE's list index. +- [improvement] JAVA-475: Improve QueryBuilder API for SELECT DISTINCT. +- [improvement] JAVA-225: Create values() function for Insert builder using List. +- [improvement] JAVA-702: Warn when ReplicationStrategy encounters invalid + replication factors. +- [improvement] JAVA-662: Add PoolingOptions method to set both core and max + connections. +- [improvement] JAVA-766: Do not include epoll JAR in binary distribution. +- [improvement] JAVA-726: Optimize internal copies of Request objects. +- [bug] JAVA-815: Preserve tracing across retries. +- [improvement] JAVA-709: New RetryDecision.tryNextHost(). +- [bug] JAVA-733: Handle function calls and raw strings as non-idempotent in QueryBuilder. + + +### 2.2.0-rc1 + +- [new feature] JAVA-783: Protocol V4 enum support. +- [new feature] JAVA-776: Use PK columns in protocol v4 PREPARED response. +- [new feature] JAVA-777: Distinguish NULL and UNSET values. +- [new feature] JAVA-779: Add k/v payload for 3rd party usage. +- [new feature] JAVA-780: Expose server-side warnings on ExecutionInfo. +- [new feature] JAVA-749: Expose new read/write failure exceptions. +- [new feature] JAVA-747: Expose function and aggregate metadata. +- [new feature] JAVA-778: Add new client exception for CQL function failure. +- [improvement] JAVA-700: Expose ProtocolVersion#toInt. +- [new feature] JAVA-404: Support new C* 2.2 CQL date and time types. + +Merged from 2.1 branch: + +- [improvement] JAVA-782: Unify "Target" enum for schema elements. + + +### 2.1.10.2 + +Merged from 2.0 branch: + +- [bug] JAVA-1179: Request objects should be copied when executed. +- [improvement] JAVA-1182: Throw error when synchronous call made on I/O thread. +- [bug] JAVA-1184: Unwrap StatementWrappers when extracting column definitions. + + +### 2.1.10.1 + +- [bug] JAVA-1152: Fix NPE at ControlConnection.refreshNodeListAndTokenMap(). +- [bug] JAVA-1156: Fix NPE at TableMetadata.equals(). + + +### 2.1.10 + +- [bug] JAVA-988: Metadata.handleId should handle escaped double quotes. +- [bug] JAVA-983: QueryBuilder cannot handle collections containing function calls. +- [improvement] JAVA-863: Idempotence propagation in PreparedStatements. +- [bug] JAVA-937: TypeCodec static initializers not always correctly executed. +- [improvement] JAVA-989: Include keyspace name when invalid replication found when generating token map. +- [improvement] JAVA-664: Reduce heap consumption for TokenMap. +- [improvement] JAVA-1030: Log token to replica map computation times. +- [bug] JAVA-1039: Minor bugs in Event Debouncer. +- [improvement] JAVA-843: Disable frozen checks in mapper. +- [improvement] JAVA-833: Improve message when a nested type can't be serialized. +- [improvement] JAVA-1011: Expose PoolingOptions default values. +- [improvement] JAVA-630: Don't process DOWN events for nodes that have active connections. +- [improvement] JAVA-851: Improve UUIDs javadoc with regard to user-provided timestamps. +- [improvement] JAVA-979: Update javadoc for RegularStatement toString() and getQueryString() to indicate that consistency level and other parameters are not maintained in the query string. +- [improvement] JAVA-1038: Fetch node info by rpc_address if its broadcast_address is not in system.peers. +- [improvement] JAVA-974: Validate accessor parameter types against bound statement. +- [bug] JAVA-1068: Unwrap StatementWrappers when hashing the paging state. +- [bug] JAVA-831: Mapper can't load an entity where the PK is a UDT. +- [improvement] JAVA-1021: Improve error message when connect() is called with an invalid keyspace name. +- [improvement] JAVA-879: Mapper.map() accepts mapper-generated and user queries. +- [bug] JAVA-1100: Exception when connecting with shaded java driver in OSGI +- [bug] JAVA-819: Expose more errors in RetryPolicy + provide idempotent-aware wrapper. +- [improvement] JAVA-1040: SimpleStatement parameters support in QueryLogger. +- [bug] JAVA-1064: getTable create statement doesn't properly handle quotes in primary key. +- [improvement] JAVA-888: Add cluster-wide percentile tracker. +- [improvement] JAVA-963: Automatically register PercentileTracker from components that use it. +- [bug] JAVA-1089: Set LWT made from BuiltStatements to non-idempotent. +- [improvement] JAVA-923: Position idempotent flag on object mapper queries. +- [new feature] JAVA-1019: SchemaBuilder support for CREATE/ALTER/DROP KEYSPACE. +- [bug] JAVA-1070: The Mapper should not prepare queries synchronously. +- [new feature] JAVA-982: Introduce new method ConsistencyLevel.isSerial(). +- [bug] JAVA-764: Retry with the normal consistency level (not the serial one) when a write times out on the Paxos phase. +- [bug] JAVA-727: Allow monotonic timestamp generators to drift in the future + use microsecond precision when possible. +- [improvement] JAVA-444: Add Java process information to UUIDs.makeNode() hash. +- [improvement] JAVA-977: Preserve original cause when BuiltStatement value can't be serialized. +- [bug] JAVA-1094: Backport TypeCodec parse and format fixes from 3.0. +- [improvement] JAVA-852: Ignore peers with null entries during discovery. +- [bug] JAVA-1132: Executing bound statement with no variables results in exception with protocol v1. +- [bug] JAVA-1005: DowngradingConsistencyRetryPolicy does not work with EACH_QUORUM when 1 DC is down. +- [bug] JAVA-1002: Avoid deadlock when re-preparing a statement on other hosts. + +Merged from 2.0 branch: + +- [bug] JAVA-994: Don't call on(Up|Down|Add|Remove) methods if Cluster is closed/closing. +- [improvement] JAVA-805: Document that metrics are null until Cluster is initialized. +- [bug] JAVA-1072: Ensure defunct connections are properly evicted from the pool. + + +### 2.1.9 + +- [bug] JAVA-942: Fix implementation of UserType.hashCode(). +- [bug] JAVA-854: avoid early return in Cluster.init when a node doesn't support the protocol version. +- [bug] JAVA-978: Fix quoting issue that caused Mapper.getTableMetadata() to return null. + +Merged from 2.0 branch: + +- [bug] JAVA-950: Fix Cluster.connect with a case-sensitive keyspace. +- [improvement] JAVA-920: Downgrade "error creating pool" message to WARN. +- [bug] JAVA-954: Don't trigger reconnection before initialization complete. +- [improvement] JAVA-914: Avoid rejected tasks at shutdown. +- [improvement] JAVA-921: Add SimpleStatement.getValuesCount(). +- [bug] JAVA-901: Move call to connection.release() out of cancelHandler. +- [bug] JAVA-960: Avoid race in control connection shutdown. +- [bug] JAVA-656: Fix NPE in ControlConnection.updateLocationInfo. +- [bug] JAVA-966: Count uninitialized connections in conviction policy. +- [improvement] JAVA-917: Document SSL configuration. +- [improvement] JAVA-652: Add DCAwareRoundRobinPolicy builder. +- [improvement] JAVA-808: Add generic filtering policy that can be used to exclude specific DCs. + + +### 2.1.8 + +Merged from 2.0 branch: + +- [improvement] JAVA-718: Log streamid at the trace level on sending request and receiving response. + +- [bug] JAVA-796: Fix SpeculativeExecutionPolicy.init() and close() are never called. +- [improvement] JAVA-710: Suppress unnecessary warning at shutdown. +- [improvement] #340: Allow DNS name with multiple A-records as contact point. +- [bug] JAVA-794: Allow tracing across multiple result pages. +- [bug] JAVA-737: DowngradingConsistencyRetryPolicy ignores write timeouts. +- [bug] JAVA-736: Forbid bind marker in QueryBuilder add/append/prepend. +- [bug] JAVA-712: Prevent QueryBuilder.quote() from applying duplicate double quotes. +- [bug] JAVA-688: Prevent QueryBuilder from trying to serialize raw string. +- [bug] JAVA-679: Support bind marker in QueryBuilder DELETE's list index. +- [improvement] JAVA-475: Improve QueryBuilder API for SELECT DISTINCT. +- [improvement] JAVA-225: Create values() function for Insert builder using List. +- [improvement] JAVA-702: Warn when ReplicationStrategy encounters invalid + replication factors. +- [improvement] JAVA-662: Add PoolingOptions method to set both core and max + connections. +- [improvement] JAVA-766: Do not include epoll JAR in binary distribution. +- [improvement] JAVA-726: Optimize internal copies of Request objects. +- [bug] JAVA-815: Preserve tracing across retries. +- [improvement] JAVA-709: New RetryDecision.tryNextHost(). +- [bug] JAVA-733: Handle function calls and raw strings as non-idempotent in QueryBuilder. +- [improvement] JAVA-765: Provide API to retrieve values of a Parameterized SimpleStatement. +- [improvement] JAVA-827: implement UPDATE .. IF EXISTS in QueryBuilder. +- [improvement] JAVA-618: Randomize contact points list to prevent hotspots. +- [improvement] JAVA-720: Surface the coordinator used on query failure. +- [bug] JAVA-792: Handle contact points removed during init. +- [improvement] JAVA-719: Allow PlainTextAuthProvider to change its credentials at runtime. +- [new feature] JAVA-151: Make it possible to register for SchemaChange Events. +- [improvement] JAVA-861: Downgrade "Asked to rebuild table" log from ERROR to INFO level. +- [improvement] JAVA-797: Provide an option to prepare statements only on one node. +- [improvement] JAVA-658: Provide an option to not re-prepare all statements in onUp. +- [improvement] JAVA-853: Customizable creation of netty timer. +- [bug] JAVA-859: Avoid quadratic ring processing with invalid replication factors. +- [improvement] JAVA-657: Debounce control connection queries. +- [bug] JAVA-784: LoadBalancingPolicy.distance() called before init(). +- [new feature] JAVA-828: Make driver-side metadata optional. +- [improvement] JAVA-544: Allow hosts to remain partially up. +- [improvement] JAVA-821, JAVA-822: Remove internal blocking calls and expose async session + creation. +- [improvement] JAVA-725: Use parallel calls when re-preparing statement on other + hosts. +- [bug] JAVA-629: Don't use connection timeout for unrelated internal queries. +- [bug] JAVA-892: Fix NPE in speculative executions when metrics disabled. + + +### 2.1.7.1 + +- [bug] JAVA-834: Special case check for 'null' string in index_options column. +- [improvement] JAVA-835: Allow accessor methods with less parameters in case + named bind markers are repeated. + + +### 2.1.7 + +- [improvement] JAVA-475: Improve QueryBuilder API for SELECT DISTINCT. +- [improvement] JAVA-715: Make NativeColumnType a top-level class. +- [improvement] JAVA-782: Unify "Target" enum for schema elements. +- [improvement] JAVA-700: Expose ProtocolVersion#toInt. +- [bug] JAVA-542: Handle void return types in accessors. +- [improvement] JAVA-225: Create values() function for Insert builder using List. +- [improvement] JAVA-713: HashMap throws an OOM Exception when logging level is set to TRACE. +- [bug] JAVA-679: Support bind marker in QueryBuilder DELETE's list index. +- [improvement] JAVA-732: Expose KEYS and FULL indexing options in IndexMetadata. +- [improvement] JAVA-589: Allow @Enumerated in Accessor method parameters. +- [improvement] JAVA-554: Allow access to table metadata from Mapper. +- [improvement] JAVA-661: Provide a way to map computed fields. +- [improvement] JAVA-824: Ignore missing columns in mapper. +- [bug] JAVA-724: Preserve default timestamp for retries and speculative executions. +- [improvement] JAVA-738: Use same pool implementation for protocol v2 and v3. +- [improvement] JAVA-677: Support CONTAINS / CONTAINS KEY in QueryBuilder. +- [improvement] JAVA-477/JAVA-540: Add USING options in mapper for delete and save + operations. +- [improvement] JAVA-473: Add mapper option to configure whether to save null fields. + +Merged from 2.0 branch: + +- [bug] JAVA-737: DowngradingConsistencyRetryPolicy ignores write timeouts. +- [bug] JAVA-736: Forbid bind marker in QueryBuilder add/append/prepend. +- [bug] JAVA-712: Prevent QueryBuilder.quote() from applying duplicate double quotes. +- [bug] JAVA-688: Prevent QueryBuilder from trying to serialize raw string. +- [bug] JAVA-679: Support bind marker in QueryBuilder DELETE's list index. +- [improvement] JAVA-475: Improve QueryBuilder API for SELECT DISTINCT. +- [improvement] JAVA-225: Create values() function for Insert builder using List. +- [improvement] JAVA-702: Warn when ReplicationStrategy encounters invalid + replication factors. +- [improvement] JAVA-662: Add PoolingOptions method to set both core and max + connections. +- [improvement] JAVA-766: Do not include epoll JAR in binary distribution. +- [improvement] JAVA-726: Optimize internal copies of Request objects. +- [bug] JAVA-815: Preserve tracing across retries. +- [improvement] JAVA-709: New RetryDecision.tryNextHost(). +- [bug] JAVA-733: Handle function calls and raw strings as non-idempotent in QueryBuilder. + + +### 2.1.6 + +Merged from 2.0 branch: + +- [new feature] JAVA-584: Add getObject to BoundStatement and Row. +- [improvement] JAVA-419: Improve connection pool resizing algorithm. +- [bug] JAVA-599: Fix race condition between pool expansion and shutdown. +- [improvement] JAVA-622: Upgrade Netty to 4.0.27. +- [improvement] JAVA-562: Coalesce frames before flushing them to the connection. +- [improvement] JAVA-583: Rename threads to indicate that they are for the driver. +- [new feature] JAVA-550: Expose paging state. +- [new feature] JAVA-646: Slow Query Logger. +- [improvement] JAVA-698: Exclude some errors from measurements in LatencyAwarePolicy. +- [bug] JAVA-641: Fix issue when executing a PreparedStatement from another cluster. +- [improvement] JAVA-534: Log keyspace xxx does not exist at WARN level. +- [improvement] JAVA-619: Allow Cluster subclasses to delegate to another instance. +- [new feature] JAVA-669: Expose an API to check for schema agreement after a + schema-altering statement. +- [improvement] JAVA-692: Make connection and pool creation fully async. +- [improvement] JAVA-505: Optimize connection use after reconnection. +- [improvement] JAVA-617: Remove "suspected" mechanism. +- [improvement] reverts JAVA-425: Don't mark connection defunct on client timeout. +- [new feature] JAVA-561: Speculative query executions. +- [bug] JAVA-666: Release connection before completing the ResultSetFuture. +- [new feature BETA] JAVA-723: Percentile-based variant of query logger and speculative + executions. +- [bug] JAVA-734: Fix buffer leaks when compression is enabled. +- [improvement] JAVA-756: Use Netty's pooled ByteBufAllocator by default. +- [improvement] JAVA-759: Expose "unsafe" paging state API. +- [bug] JAVA-768: Prevent race during pool initialization. + + +### 2.1.5 + +- [bug] JAVA-575: Authorize Null parameter in Accessor method. +- [improvement] JAVA-570: Support C* 2.1.3's nested collections. +- [bug] JAVA-612: Fix checks on mapped collection types. +- [bug] JAVA-672: Fix QueryBuilder.putAll() when the collection contains UDTs. + +Merged from 2.0 branch: + +- [new feature] JAVA-518: Add AddressTranslater for EC2 multi-region deployment. +- [improvement] JAVA-533: Add connection heartbeat. +- [improvement] JAVA-568: Reduce level of logs on missing rpc_address. +- [improvement] JAVA-312, JAVA-681: Expose node token and range information. +- [bug] JAVA-595: Fix cluster name mismatch check at startup. +- [bug] JAVA-620: Fix guava dependency when using OSGI. +- [bug] JAVA-678: Fix handling of DROP events when ks name is case-sensitive. +- [improvement] JAVA-631: Use List instead of List in QueryBuilder API. +- [improvement] JAVA-654: Exclude Netty POM from META-INF in shaded JAR. +- [bug] JAVA-655: Quote single quotes contained in table comments in asCQLQuery method. +- [bug] JAVA-684: Empty TokenRange returned in a one token cluster. +- [improvement] JAVA-687: Expose TokenRange#contains. +- [bug] JAVA-614: Prevent race between cancellation and query completion. +- [bug] JAVA-632: Prevent cancel and timeout from cancelling unrelated ResponseHandler if + streamId was already released and reused. +- [bug] JAVA-642: Fix issue when newly opened pool fails before we could mark the node UP. +- [bug] JAVA-613: Fix unwanted LBP notifications when a contact host is down. +- [bug] JAVA-651: Fix edge cases where a connection was released twice. +- [bug] JAVA-653: Fix edge cases in query cancellation. + + +### 2.1.4 + +Merged from 2.0 branch: + +- [improvement] JAVA-538: Shade Netty dependency. +- [improvement] JAVA-543: Target schema refreshes more precisely. +- [bug] JAVA-546: Don't check rpc_address for control host. +- [improvement] JAVA-409: Improve message of NoHostAvailableException. +- [bug] JAVA-556: Rework connection reaper to avoid deadlock. +- [bug] JAVA-557: Avoid deadlock when multiple connections to the same host get write + errors. +- [improvement] JAVA-504: Make shuffle=true the default for TokenAwarePolicy. +- [bug] JAVA-577: Fix bug when SUSPECT reconnection succeeds, but one of the pooled + connections fails while bringing the node back up. +- [bug] JAVA-419: JAVA-587: Prevent faulty control connection from ignoring reconnecting hosts. +- temporarily revert "Add idle timeout to the connection pool". +- [bug] JAVA-593: Ensure updateCreatedPools does not add pools for suspected hosts. +- [bug] JAVA-594: Ensure state change notifications for a given host are handled serially. +- [bug] JAVA-597: Ensure control connection reconnects when control host is removed. + + +### 2.1.3 + +- [bug] JAVA-510: Ignore static fields in mapper. +- [bug] JAVA-509: Fix UDT parsing at init when using the default protocol version. +- [bug] JAVA-495: Fix toString, equals and hashCode on accessor proxies. +- [bug] JAVA-528: Allow empty name on Column and Field annotations. + +Merged from 2.0 branch: + +- [bug] JAVA-497: Ensure control connection does not trigger concurrent reconnects. +- [improvement] JAVA-472: Keep trying to reconnect on authentication errors. +- [improvement] JAVA-463: Expose close method on load balancing policy. +- [improvement] JAVA-459: Allow load balancing policy to trigger refresh for a single host. +- [bug] JAVA-493: Expose an API to cancel reconnection attempts. +- [bug] JAVA-503: Fix NPE when a connection fails during pool construction. +- [improvement] JAVA-423: Log datacenter name in DCAware policy's init when it is explicitly provided. +- [improvement] JAVA-504: Shuffle the replicas in TokenAwarePolicy.newQueryPlan. +- [improvement] JAVA-507: Make schema agreement wait tuneable. +- [improvement] JAVA-494: Document how to inject the driver metrics into another registry. +- [improvement] JAVA-419: Add idle timeout to the connection pool. +- [bug] JAVA-516: LatencyAwarePolicy does not shutdown executor on invocation of close. +- [improvement] JAVA-451: Throw an exception when DCAwareRoundRobinPolicy is built with + an explicit but null or empty local datacenter. +- [bug] JAVA-511: Fix check for local contact points in DCAware policy's init. +- [improvement] JAVA-457: Make timeout on saturated pool customizable. +- [improvement] JAVA-521: Downgrade Guava to 14.0.1. +- [bug] JAVA-526: Fix token awareness for case-sensitive keyspaces and tables. +- [bug] JAVA-515: Check maximum number of values passed to SimpleStatement. +- [improvement] JAVA-532: Expose the driver version through the API. +- [improvement] JAVA-522: Optimize session initialization when some hosts are not + responsive. + + +### 2.1.2 + +- [improvement] JAVA-361, JAVA-364, JAVA-467: Support for native protocol v3. +- [bug] JAVA-454: Fix UDT fields of type inet in QueryBuilder. +- [bug] JAVA-455: Exclude transient fields from Frozen checks. +- [bug] JAVA-453: Fix handling of null collections in mapper. +- [improvement] JAVA-452: Make implicit column names case-insensitive in mapper. +- [bug] JAVA-433: Fix named bind markers in QueryBuilder. +- [bug] JAVA-458: Fix handling of BigInteger in object mapper. +- [bug] JAVA-465: Ignore synthetic fields in mapper. +- [improvement] JAVA-451: Throw an exception when DCAwareRoundRobinPolicy is built with + an explicit but null or empty local datacenter. +- [improvement] JAVA-469: Add backwards-compatible DataType.serialize methods. +- [bug] JAVA-487: Handle null enum fields in object mapper. +- [bug] JAVA-499: Handle null UDT fields in object mapper. + +Merged from 2.0 branch: + +- [bug] JAVA-449: Handle null pool in PooledConnection.release. +- [improvement] JAVA-425: Defunct connection on request timeout. +- [improvement] JAVA-426: Try next host when we get a SERVER_ERROR. +- [bug] JAVA-449, JAVA-460, JAVA-471: Handle race between query timeout and completion. +- [bug] JAVA-496: Fix DCAwareRoundRobinPolicy datacenter auto-discovery. + + +### 2.1.1 + +- [new] JAVA-441: Support for new "frozen" keyword. + +Merged from 2.0 branch: + +- [bug] JAVA-397: Check cluster name when connecting to a new node. +- [bug] JAVA-326: Add missing CAS delete support in QueryBuilder. +- [bug] JAVA-363: Add collection and data length checks during serialization. +- [improvement] JAVA-329: Surface number of retries in metrics. +- [bug] JAVA-428: Do not use a host when no rpc_address found for it. +- [improvement] JAVA-358: Add ResultSet.wasApplied() for conditional queries. +- [bug] JAVA-349: Fix negative HostConnectionPool open count. +- [improvement] JAVA-436: Log more connection details at trace and debug levels. +- [bug] JAVA-445: Fix cluster shutdown. + + +### 2.1.0 + +- [bug] JAVA-408: ClusteringColumn annotation not working with specified ordering. +- [improvement] JAVA-410: Fail BoundStatement if null values are not set explicitly. +- [bug] JAVA-416: Handle UDT and tuples in BuiltStatement.toString. + +Merged from 2.0 branch: + +- [bug] JAVA-407: Release connections on ResultSetFuture#cancel. +- [bug] JAVA-393: Fix handling of SimpleStatement with values in query builder + batches. +- [bug] JAVA-417: Ensure pool is properly closed in onDown. +- [bug] JAVA-415: Fix tokenMap initialization at startup. +- [bug] JAVA-418: Avoid deadlock on close. + + +### 2.1.0-rc1 + +Merged from 2.0 branch: + +- [bug] JAVA-394: Ensure defunct connections are completely closed. +- [bug] JAVA-342, JAVA-390: Fix memory and resource leak on closed Sessions. + + +### 2.1.0-beta1 + +- [new] Support for User Defined Types and tuples +- [new] Simple object mapper + +Merged from 2.0 branch: everything up to 2.0.3 (included), and the following. + +- [improvement] JAVA-204: Better handling of dead connections. +- [bug] JAVA-373: Fix potential NPE in ControlConnection. +- [bug] JAVA-291: Throws NPE when passed null for a contact point. +- [bug] JAVA-315: Avoid LoadBalancingPolicy onDown+onUp at startup. +- [bug] JAVA-343: Avoid classloader leak in Tomcat. +- [bug] JAVA-387: Avoid deadlock in onAdd/onUp. +- [bug] JAVA-377, JAVA-391: Make metadata parsing more lenient. + + +### 2.0.12.2 + +- [bug] JAVA-1179: Request objects should be copied when executed. +- [improvement] JAVA-1182: Throw error when synchronous call made on I/O thread. +- [bug] JAVA-1184: Unwrap StatementWrappers when extracting column definitions. + + +### 2.0.12.1 + +- [bug] JAVA-994: Don't call on(Up|Down|Add|Remove) methods if Cluster is closed/closing. +- [improvement] JAVA-805: Document that metrics are null until Cluster is initialized. +- [bug] JAVA-1072: Ensure defunct connections are properly evicted from the pool. + + +### 2.0.12 + +- [bug] JAVA-950: Fix Cluster.connect with a case-sensitive keyspace. +- [improvement] JAVA-920: Downgrade "error creating pool" message to WARN. +- [bug] JAVA-954: Don't trigger reconnection before initialization complete. +- [improvement] JAVA-914: Avoid rejected tasks at shutdown. +- [improvement] JAVA-921: Add SimpleStatement.getValuesCount(). +- [bug] JAVA-901: Move call to connection.release() out of cancelHandler. +- [bug] JAVA-960: Avoid race in control connection shutdown. +- [bug] JAVA-656: Fix NPE in ControlConnection.updateLocationInfo. +- [bug] JAVA-966: Count uninitialized connections in conviction policy. +- [improvement] JAVA-917: Document SSL configuration. +- [improvement] JAVA-652: Add DCAwareRoundRobinPolicy builder. +- [improvement] JAVA-808: Add generic filtering policy that can be used to exclude specific DCs. + + +### 2.0.11 + +- [improvement] JAVA-718: Log streamid at the trace level on sending request and receiving response. +- [bug] JAVA-796: Fix SpeculativeExecutionPolicy.init() and close() are never called. +- [improvement] JAVA-710: Suppress unnecessary warning at shutdown. +- [improvement] #340: Allow DNS name with multiple A-records as contact point. +- [bug] JAVA-794: Allow tracing across multiple result pages. +- [bug] JAVA-737: DowngradingConsistencyRetryPolicy ignores write timeouts. +- [bug] JAVA-736: Forbid bind marker in QueryBuilder add/append/prepend. +- [bug] JAVA-712: Prevent QueryBuilder.quote() from applying duplicate double quotes. +- [bug] JAVA-688: Prevent QueryBuilder from trying to serialize raw string. +- [bug] JAVA-679: Support bind marker in QueryBuilder DELETE's list index. +- [improvement] JAVA-475: Improve QueryBuilder API for SELECT DISTINCT. +- [improvement] JAVA-225: Create values() function for Insert builder using List. +- [improvement] JAVA-702: Warn when ReplicationStrategy encounters invalid + replication factors. +- [improvement] JAVA-662: Add PoolingOptions method to set both core and max + connections. +- [improvement] JAVA-766: Do not include epoll JAR in binary distribution. +- [improvement] JAVA-726: Optimize internal copies of Request objects. +- [bug] JAVA-815: Preserve tracing across retries. +- [improvement] JAVA-709: New RetryDecision.tryNextHost(). +- [bug] JAVA-733: Handle function calls and raw strings as non-idempotent in QueryBuilder. +- [improvement] JAVA-765: Provide API to retrieve values of a Parameterized SimpleStatement. +- [improvement] JAVA-827: implement UPDATE .. IF EXISTS in QueryBuilder. +- [improvement] JAVA-618: Randomize contact points list to prevent hotspots. +- [improvement] JAVA-720: Surface the coordinator used on query failure. +- [bug] JAVA-792: Handle contact points removed during init. +- [improvement] JAVA-719: Allow PlainTextAuthProvider to change its credentials at runtime. +- [new feature] JAVA-151: Make it possible to register for SchemaChange Events. +- [improvement] JAVA-861: Downgrade "Asked to rebuild table" log from ERROR to INFO level. +- [improvement] JAVA-797: Provide an option to prepare statements only on one node. +- [improvement] JAVA-658: Provide an option to not re-prepare all statements in onUp. +- [improvement] JAVA-853: Customizable creation of netty timer. +- [bug] JAVA-859: Avoid quadratic ring processing with invalid replication factors. +- [improvement] JAVA-657: Debounce control connection queries. +- [bug] JAVA-784: LoadBalancingPolicy.distance() called before init(). +- [new feature] JAVA-828: Make driver-side metadata optional. +- [improvement] JAVA-544: Allow hosts to remain partially up. +- [improvement] JAVA-821, JAVA-822: Remove internal blocking calls and expose async session + creation. +- [improvement] JAVA-725: Use parallel calls when re-preparing statement on other + hosts. +- [bug] JAVA-629: Don't use connection timeout for unrelated internal queries. +- [bug] JAVA-892: Fix NPE in speculative executions when metrics disabled. + +Merged from 2.0.10_fixes branch: + +- [improvement] JAVA-756: Use Netty's pooled ByteBufAllocator by default. +- [improvement] JAVA-759: Expose "unsafe" paging state API. +- [bug] JAVA-767: Fix getObject by name. +- [bug] JAVA-768: Prevent race during pool initialization. + + +### 2.0.10.1 + +- [improvement] JAVA-756: Use Netty's pooled ByteBufAllocator by default. +- [improvement] JAVA-759: Expose "unsafe" paging state API. +- [bug] JAVA-767: Fix getObject by name. +- [bug] JAVA-768: Prevent race during pool initialization. + + +### 2.0.10 + +- [new feature] JAVA-518: Add AddressTranslater for EC2 multi-region deployment. +- [improvement] JAVA-533: Add connection heartbeat. +- [improvement] JAVA-568: Reduce level of logs on missing rpc_address. +- [improvement] JAVA-312, JAVA-681: Expose node token and range information. +- [bug] JAVA-595: Fix cluster name mismatch check at startup. +- [bug] JAVA-620: Fix guava dependency when using OSGI. +- [bug] JAVA-678: Fix handling of DROP events when ks name is case-sensitive. +- [improvement] JAVA-631: Use List instead of List in QueryBuilder API. +- [improvement] JAVA-654: Exclude Netty POM from META-INF in shaded JAR. +- [bug] JAVA-655: Quote single quotes contained in table comments in asCQLQuery method. +- [bug] JAVA-684: Empty TokenRange returned in a one token cluster. +- [improvement] JAVA-687: Expose TokenRange#contains. +- [new feature] JAVA-547: Expose values of BoundStatement. +- [new feature] JAVA-584: Add getObject to BoundStatement and Row. +- [improvement] JAVA-419: Improve connection pool resizing algorithm. +- [bug] JAVA-599: Fix race condition between pool expansion and shutdown. +- [improvement] JAVA-622: Upgrade Netty to 4.0.27. +- [improvement] JAVA-562: Coalesce frames before flushing them to the connection. +- [improvement] JAVA-583: Rename threads to indicate that they are for the driver. +- [new feature] JAVA-550: Expose paging state. +- [new feature] JAVA-646: Slow Query Logger. +- [improvement] JAVA-698: Exclude some errors from measurements in LatencyAwarePolicy. +- [bug] JAVA-641: Fix issue when executing a PreparedStatement from another cluster. +- [improvement] JAVA-534: Log keyspace xxx does not exist at WARN level. +- [improvement] JAVA-619: Allow Cluster subclasses to delegate to another instance. +- [new feature] JAVA-669: Expose an API to check for schema agreement after a + schema-altering statement. +- [improvement] JAVA-692: Make connection and pool creation fully async. +- [improvement] JAVA-505: Optimize connection use after reconnection. +- [improvement] JAVA-617: Remove "suspected" mechanism. +- [improvement] reverts JAVA-425: Don't mark connection defunct on client timeout. +- [new feature] JAVA-561: Speculative query executions. +- [bug] JAVA-666: Release connection before completing the ResultSetFuture. +- [new feature BETA] JAVA-723: Percentile-based variant of query logger and speculative + executions. +- [bug] JAVA-734: Fix buffer leaks when compression is enabled. + +Merged from 2.0.9_fixes branch: + +- [bug] JAVA-614: Prevent race between cancellation and query completion. +- [bug] JAVA-632: Prevent cancel and timeout from cancelling unrelated ResponseHandler if + streamId was already released and reused. +- [bug] JAVA-642: Fix issue when newly opened pool fails before we could mark the node UP. +- [bug] JAVA-613: Fix unwanted LBP notifications when a contact host is down. +- [bug] JAVA-651: Fix edge cases where a connection was released twice. +- [bug] JAVA-653: Fix edge cases in query cancellation. + + +### 2.0.9.2 + +- [bug] JAVA-651: Fix edge cases where a connection was released twice. +- [bug] JAVA-653: Fix edge cases in query cancellation. + + +### 2.0.9.1 + +- [bug] JAVA-614: Prevent race between cancellation and query completion. +- [bug] JAVA-632: Prevent cancel and timeout from cancelling unrelated ResponseHandler if + streamId was already released and reused. +- [bug] JAVA-642: Fix issue when newly opened pool fails before we could mark the node UP. +- [bug] JAVA-613: Fix unwanted LBP notifications when a contact host is down. + + +### 2.0.9 + +- [improvement] JAVA-538: Shade Netty dependency. +- [improvement] JAVA-543: Target schema refreshes more precisely. +- [bug] JAVA-546: Don't check rpc_address for control host. +- [improvement] JAVA-409: Improve message of NoHostAvailableException. +- [bug] JAVA-556: Rework connection reaper to avoid deadlock. +- [bug] JAVA-557: Avoid deadlock when multiple connections to the same host get write + errors. +- [improvement] JAVA-504: Make shuffle=true the default for TokenAwarePolicy. +- [bug] JAVA-577: Fix bug when SUSPECT reconnection succeeds, but one of the pooled + connections fails while bringing the node back up. +- [bug] JAVA-419: JAVA-587: Prevent faulty control connection from ignoring reconnecting hosts. +- temporarily revert "Add idle timeout to the connection pool". +- [bug] JAVA-593: Ensure updateCreatedPools does not add pools for suspected hosts. +- [bug] JAVA-594: Ensure state change notifications for a given host are handled serially. +- [bug] JAVA-597: Ensure control connection reconnects when control host is removed. + + +### 2.0.8 + +- [bug] JAVA-526: Fix token awareness for case-sensitive keyspaces and tables. +- [bug] JAVA-515: Check maximum number of values passed to SimpleStatement. +- [improvement] JAVA-532: Expose the driver version through the API. +- [improvement] JAVA-522: Optimize session initialization when some hosts are not + responsive. + + +### 2.0.7 + +- [bug] JAVA-449: Handle null pool in PooledConnection.release. +- [improvement] JAVA-425: Defunct connection on request timeout. +- [improvement] JAVA-426: Try next host when we get a SERVER_ERROR. +- [bug] JAVA-449, JAVA-460, JAVA-471: Handle race between query timeout and completion. +- [bug] JAVA-496: Fix DCAwareRoundRobinPolicy datacenter auto-discovery. +- [bug] JAVA-497: Ensure control connection does not trigger concurrent reconnects. +- [improvement] JAVA-472: Keep trying to reconnect on authentication errors. +- [improvement] JAVA-463: Expose close method on load balancing policy. +- [improvement] JAVA-459: Allow load balancing policy to trigger refresh for a single host. +- [bug] JAVA-493: Expose an API to cancel reconnection attempts. +- [bug] JAVA-503: Fix NPE when a connection fails during pool construction. +- [improvement] JAVA-423: Log datacenter name in DCAware policy's init when it is explicitly provided. +- [improvement] JAVA-504: Shuffle the replicas in TokenAwarePolicy.newQueryPlan. +- [improvement] JAVA-507: Make schema agreement wait tuneable. +- [improvement] JAVA-494: Document how to inject the driver metrics into another registry. +- [improvement] JAVA-419: Add idle timeout to the connection pool. +- [bug] JAVA-516: LatencyAwarePolicy does not shutdown executor on invocation of close. +- [improvement] JAVA-451: Throw an exception when DCAwareRoundRobinPolicy is built with + an explicit but null or empty local datacenter. +- [bug] JAVA-511: Fix check for local contact points in DCAware policy's init. +- [improvement] JAVA-457: Make timeout on saturated pool customizable. +- [improvement] JAVA-521: Downgrade Guava to 14.0.1. + + +### 2.0.6 + +- [bug] JAVA-397: Check cluster name when connecting to a new node. +- [bug] JAVA-326: Add missing CAS delete support in QueryBuilder. +- [bug] JAVA-363: Add collection and data length checks during serialization. +- [improvement] JAVA-329: Surface number of retries in metrics. +- [bug] JAVA-428: Do not use a host when no rpc_address found for it. +- [improvement] JAVA-358: Add ResultSet.wasApplied() for conditional queries. +- [bug] JAVA-349: Fix negative HostConnectionPool open count. +- [improvement] JAVA-436: Log more connection details at trace and debug levels. +- [bug] JAVA-445: Fix cluster shutdown. +- [improvement] JAVA-439: Expose child policy in chainable load balancing policies. + + +### 2.0.5 + +- [bug] JAVA-407: Release connections on ResultSetFuture#cancel. +- [bug] JAVA-393: Fix handling of SimpleStatement with values in query builder + batches. +- [bug] JAVA-417: Ensure pool is properly closed in onDown. +- [bug] JAVA-415: Fix tokenMap initialization at startup. +- [bug] JAVA-418: Avoid deadlock on close. + + +### 2.0.4 + +- [improvement] JAVA-204: Better handling of dead connections. +- [bug] JAVA-373: Fix potential NPE in ControlConnection. +- [bug] JAVA-291: Throws NPE when passed null for a contact point. +- [bug] JAVA-315: Avoid LoadBalancingPolicy onDown+onUp at startup. +- [bug] JAVA-343: Avoid classloader leak in Tomcat. +- [bug] JAVA-387: Avoid deadlock in onAdd/onUp. +- [bug] JAVA-377, JAVA-391: Make metadata parsing more lenient. +- [bug] JAVA-394: Ensure defunct connections are completely closed. +- [bug] JAVA-342, JAVA-390: Fix memory and resource leak on closed Sessions. + + +### 2.0.3 + +- [new] The new AbsractSession makes mocking of Session easier. +- [new] JAVA-309: Allow to trigger a refresh of connected hosts. +- [new] JAVA-265: New Session#getState method allows to grab information on + which nodes a session is connected to. +- [new] JAVA-327: Add QueryBuilder syntax for tuples in where clauses (syntax + introduced in Cassandra 2.0.6). +- [improvement] JAVA-359: Properly validate arguments of PoolingOptions methods. +- [bug] JAVA-368: Fix bogus rejection of BigInteger in 'execute with values'. +- [bug] JAVA-367: Signal connection failure sooner to avoid missing them. +- [bug] JAVA-337: Throw UnsupportedOperationException for protocol batch + setSerialCL. + +Merged from 1.0 branch: + +- [bug] JAVA-325: Fix periodic reconnection to down hosts. + + +### 2.0.2 + +- [api] The type of the map key returned by NoHostAvailable#getErrors has changed from + InetAddress to InetSocketAddress. Same for Initializer#getContactPoints return and + for AuthProvider#newAuthenticator. +- [api] JAVA-296: The default load balacing policy is now DCAwareRoundRobinPolicy, and the local + datacenter is automatically picked based on the first connected node. Furthermore, + the TokenAwarePolicy is also used by default. +- [new] JAVA-145: New optional AddressTranslater. +- [bug] JAVA-321: Don't remove quotes on keyspace in the query builder. +- [bug] JAVA-320: Fix potential NPE while cluster undergo schema changes. +- [bug] JAVA-319: Fix thread-safety of page fetching. +- [bug] JAVA-318: Fix potential NPE using fetchMoreResults. + +Merged from 1.0 branch: + +- [new] JAVA-179: Expose the name of the partitioner in use in the cluster metadata. +- [new] Add new WhiteListPolicy to limit the nodes connected to a particular list. +- [improvement] JAVA-289: Do not hop DC for LOCAL_* CL in DCAwareRoundRobinPolicy. +- [bug] JAVA-313: Revert back to longs for dates in the query builder. +- [bug] JAVA-314: Don't reconnect to nodes ignored by the load balancing policy. + + +### 2.0.1 + +- [improvement] JAVA-278: Handle the static columns introduced in Cassandra 2.0.6. +- [improvement] JAVA-208: Add Cluster#newSession method to create Session without connecting + right away. +- [bug] JAVA-279: Add missing iso8601 patterns for parsing dates. +- [bug] Properly parse BytesType as the blob type. +- [bug] JAVA-280: Potential NPE when parsing schema of pre-CQL tables of C* 1.2 nodes. + +Merged from 1.0 branch: + +- [bug] JAVA-275: LatencyAwarePolicy.Builder#withScale doesn't set the scale. +- [new] JAVA-114: Add methods to check if a Cluster/Session instance has been closed already. + + +### 2.0.0 + +- [api] JAVA-269: Case sensitive identifier by default in Metadata. +- [bug] JAVA-274: Fix potential NPE in Cluster#connect. + +Merged from 1.0 branch: + +- [bug] JAVA-263: Always return the PreparedStatement object that is cache internally. +- [bug] JAVA-261: Fix race when multiple connect are done in parallel. +- [bug] JAVA-270: Don't connect at all to nodes that are ignored by the load balancing + policy. + + +### 2.0.0-rc3 + +- [improvement] The protocol version 1 is now supported (features only supported by the + version 2 of the protocol throw UnsupportedFeatureException). +- [improvement] JAVA-195: Make most main objects interface to facilitate testing/mocking. +- [improvement] Adds new getStatements and clear methods to BatchStatement. +- [api] JAVA-247: Renamed shutdown to closeAsync and ShutdownFuture to CloseFuture. Clustering + and Session also now implement Closeable. +- [bug] JAVA-232: Fix potential thread leaks when shutting down Metrics. +- [bug] JAVA-231: Fix potential NPE in HostConnectionPool. +- [bug] JAVA-244: Avoid NPE when node is in an unconfigured DC. +- [bug] JAVA-258: Don't block for scheduled reconnections on Cluster#close. + +Merged from 1.0 branch: + +- [new] JAVA-224: Added Session#prepareAsync calls. +- [new] JAVA-249: Added Cluster#getLoggedKeyspace. +- [improvement] Avoid preparing a statement multiple time per host with multiple sessions. +- [bug] JAVA-255: Make sure connections are returned to the right pools. +- [bug] JAVA-264: Use date string in query build to work-around CASSANDRA-6718. + + +### 2.0.0-rc2 + +- [new] JAVA-207: Add LOCAL_ONE consistency level support (requires using C* 2.0.2+). +- [bug] JAVA-219: Fix parsing of counter types. +- [bug] JAVA-218: Fix missing whitespace for IN clause in the query builder. +- [bug] JAVA-221: Fix replicas computation for token aware balancing. + +Merged from 1.0 branch: + +- [bug] JAVA-213: Fix regression from JAVA-201. +- [improvement] New getter to obtain a snapshot of the scores maintained by + LatencyAwarePolicy. + + +### 2.0.0-rc1 + +- [new] JAVA-199: Mark compression dependencies optional in maven. +- [api] Renamed TableMetadata#getClusteringKey to TableMetadata#getClusteringColumns. + +Merged from 1.0 branch: + +- [new] JAVA-142: OSGi bundle. +- [improvement] JAVA-205: Make collections returned by Row immutable. +- [improvement] JAVA-203: Limit internal thread pool size. +- [bug] JAVA-201: Don't retain unused PreparedStatement in memory. +- [bug] Add missing clustering order info in TableMetadata +- [bug] JAVA-196: Allow bind markers for collections in the query builder. + + +### 2.0.0-beta2 + +- [api] BoundStatement#setX(String, X) methods now set all values (if there is + more than one) having the provided name, not just the first occurence. +- [api] The Authenticator interface now has a onAuthenticationSuccess method that + allows to handle the potential last token sent by the server. +- [new] The query builder don't serialize large values to strings anymore by + default by making use the new ability to send values alongside the query string. +- [new] JAVA-140: The query builder has been updated for new CQL features. +- [bug] Fix exception when a conditional write timeout C* side. +- [bug] JAVA-182: Ensure connection is created when Cluster metadata are asked for. +- [bug] JAVA-187: Fix potential NPE during authentication. + + +### 2.0.0-beta1 + +- [api] The 2.0 version is an API-breaking upgrade of the driver. While most + of the breaking changes are minor, there are too numerous to be listed here + and you are encouraged to look at the Upgrade_guide_to_2.0 file that describe + those changes in details. +- [new] LZ4 compression is supported for the protocol. +- [new] JAVA-39: The driver does not depend on cassandra-all anymore. +- [new] New BatchStatement class allows to execute batch other statements. +- [new] Large ResultSet are now paged (incrementally fetched) by default. +- [new] SimpleStatement support values for bind-variables, to allow + prepare+execute behavior with one roundtrip. +- [new] Query parameters defaults (Consistency level, page size, ...) can be + configured globally. +- [new] New Cassandra 2.0 SERIAL and LOCAL_SERIAL consistency levels are + supported. +- [new] JAVA-116: Cluster#shutdown now waits for ongoing queries to complete by default. +- [new] Generic authentication through SASL is now exposed. +- [bug] JAVA-88: TokenAwarePolicy now takes all replica into account, instead of only the + first one. + + +### 1.0.5 + +- [new] JAVA-142: OSGi bundle. +- [new] JAVA-207: Add support for ConsistencyLevel.LOCAL_ONE; note that this + require Cassandra 1.2.12+. +- [improvement] JAVA-205: Make collections returned by Row immutable. +- [improvement] JAVA-203: Limit internal thread pool size. +- [improvement] New getter to obtain a snapshot of the scores maintained by + LatencyAwarePolicy. +- [improvement] JAVA-222: Avoid synchronization when getting codec for collection + types. +- [bug] JAVA-201, JAVA-213: Don't retain unused PreparedStatement in memory. +- [bug] Add missing clustering order info in TableMetadata +- [bug] JAVA-196: Allow bind markers for collections in the query builder. + + +### 1.0.4 + +- [api] JAVA-163: The Cluster.Builder#poolingOptions and Cluster.Builder#socketOptions + are now deprecated. They are replaced by the new withPoolingOptions and + withSocketOptions methods. +- [new] JAVA-129: A new LatencyAwarePolicy wrapping policy has been added, allowing to + add latency awareness to a wrapped load balancing policy. +- [new] JAVA-161: Cluster.Builder#deferInitialization: Allow defering cluster initialization. +- [new] JAVA-117: Add truncate statement in query builder. +- [new] JAVA-106: Support empty IN in the query builder. +- [bug] JAVA-166: Fix spurious "No current pool set; this should not happen" error + message. +- [bug] JAVA-184: Fix potential overflow in RoundRobinPolicy and correctly errors if + a balancing policy throws. +- [bug] Don't release Stream ID for timeouted queries (unless we do get back + the response) +- [bug] Correctly escape identifiers and use fully qualified table names when + exporting schema as string. + + +### 1.0.3 + +- [api] The query builder now correctly throw an exception when given a value + of a type it doesn't know about. +- [new] SocketOptions#setReadTimeout allows to set a timeout on how long we + wait for the answer of one node. See the javadoc for more details. +- [new] New Session#prepare method that takes a Statement. +- [bug] JAVA-143: Always take per-query CL, tracing, etc. into account for QueryBuilder + statements. +- [bug] Temporary fixup for TimestampType when talking to C* 2.0 nodes. + + +### 1.0.2 + +- [api] Host#getMonitor and all Host.HealthMonitor methods have been + deprecated. The new Host#isUp method is now prefered to the method + in the monitor and you should now register Host.StateListener against + the Cluster object directly (registering against a host HealthMonitor + was much more limited anyway). +- [new] JAVA-92: New serialize/deserialize methods in DataType to serialize/deserialize + values to/from bytes. +- [new] JAVA-128: New getIndexOf() method in ColumnDefinitions to find the index of + a given column name. +- [bug] JAVA-131: Fix a bug when thread could get blocked while setting the current + keyspace. +- [bug] JAVA-136: Quote inet addresses in the query builder since CQL3 requires it. + + +### 1.0.1 + +- [api] JAVA-100: Function call handling in the query builder has been modified in a + backward incompatible way. Function calls are not parsed from string values + anymore as this wasn't safe. Instead the new 'fcall' method should be used. +- [api] Some typos in method names in PoolingOptions have been fixed in a + backward incompatible way before the API get widespread. +- [bug] JAVA-123: Don't destroy composite partition key with BoundStatement and + TokenAwarePolicy. +- [new] null values support in the query builder. +- [new] JAVA-5: SSL support (requires C* >= 1.2.1). +- [new] JAVA-113: Allow generating unlogged batch in the query builder. +- [improvement] Better error message when no host are available. +- [improvement] Improves performance of the stress example application been. + + +### 1.0.0 + +- [api] The AuthInfoProvider has be (temporarily) removed. Instead, the + Cluster builder has a new withCredentials() method to provide a username + and password for use with Cassandra's PasswordAuthenticator. Custom + authenticator will be re-introduced in a future version but are not + supported at the moment. +- [api] The isMetricsEnabled() method in Configuration has been replaced by + getMetricsOptions(). An option to disabled JMX reporting (on by default) + has been added. +- [bug] JAVA-91: Don't make default load balancing policy a static singleton since it + is stateful. + + +### 1.0.0-RC1 + +- [new] JAVA-79: Null values are now supported in BoundStatement (but you will need at + least Cassandra 1.2.3 for it to work). The API of BoundStatement has been + slightly changed so that not binding a variable is not an error anymore, + the variable is simply considered null by default. The isReady() method has + been removed. +- [improvement] JAVA-75: The Cluster/Session shutdown methods now properly block until + the shutdown is complete. A version with at timeout has been added. +- [bug] JAVA-44: Fix use of CQL3 functions in the query builder. +- [bug] JAVA-77: Fix case where multiple schema changes too quickly wouldn't work + (only triggered when 0.0.0.0 was used for the rpc_address on the Cassandra + nodes). +- [bug] JAVA-72: Fix IllegalStateException thrown due to a reconnection made on an I/O + thread. +- [bug] JAVA-82: Correctly reports errors during authentication phase. + + +### 1.0.0-beta2 + +- [new] JAVA-51, JAVA-60, JAVA-58: Support blob constants, BigInteger, BigDecimal and counter batches in + the query builder. +- [new] JAVA-61: Basic support for custom CQL3 types. +- [new] JAVA-65: Add "execution infos" for a result set (this also move the query + trace in the new ExecutionInfos object, so users of beta1 will have to + update). +- [bug] JAVA-62: Fix failover bug in DCAwareRoundRobinPolicy. +- [bug] JAVA-66: Fix use of bind markers for routing keys in the query builder. + + +### 1.0.0-beta1 + +- initial release From eb289473a5811c802ba1986889a3e8a1791e8582 Mon Sep 17 00:00:00 2001 From: olim7t Date: Tue, 14 Jan 2020 09:51:05 -0800 Subject: [PATCH 260/979] Reference DSE changelog from changelog --- changelog/README.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/changelog/README.md b/changelog/README.md index 8c1b0811e4a..5ba419e223a 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -6,7 +6,9 @@ This version brings in all functionality that was formerly only in the DataStax Enterprise driver, such as the built-in support for reactive programming. Going forward, all new features will be -implemented in this single driver. +implemented in this single driver (for past DataStax Enterprise driver versions before the merge, +refer to the [DSE driver +changelog](https://docs.datastax.com/en/developer/java-driver-dse/latest/changelog/)). - [documentation] JAVA-2607: Improve visibility of driver dependencies section - [documentation] JAVA-1975: Document the importance of using specific TinkerPop version From 2863100b736f01b77f7347d17536da6f151b4583 Mon Sep 17 00:00:00 2001 From: olim7t Date: Tue, 14 Jan 2020 13:31:38 -0800 Subject: [PATCH 261/979] Fix title and summary in docs.yaml --- docs.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs.yaml b/docs.yaml index 17989d6312f..d7c3f1eb5b3 100644 --- a/docs.yaml +++ b/docs.yaml @@ -1,5 +1,5 @@ -title: Java Driver for Apache Cassandra™ -summary: High performance Java client for Apache Cassandra™ +title: DataStax Java Driver +summary: DataStax Java Driver for Apache Cassandra® homepage: http://docs.datastax.com/en/developer/java-driver theme: datastax sections: From 1ef3506037200cee3a0795cc9a0fddd5a2d17af9 Mon Sep 17 00:00:00 2001 From: olim7t Date: Tue, 14 Jan 2020 13:37:53 -0800 Subject: [PATCH 262/979] Update version in docs --- changelog/README.md | 2 +- manual/case_sensitivity/README.md | 10 +++--- manual/core/README.md | 26 +++++++------- manual/core/address_resolution/README.md | 2 +- manual/core/async/README.md | 2 +- manual/core/authentication/README.md | 14 ++++---- manual/core/configuration/README.md | 20 +++++------ manual/core/control_connection/README.md | 2 +- manual/core/custom_codecs/README.md | 8 ++--- manual/core/detachable_types/README.md | 14 ++++---- manual/core/dse/geotypes/README.md | 6 ++-- manual/core/dse/graph/README.md | 4 +-- manual/core/dse/graph/fluent/README.md | 4 +-- .../core/dse/graph/fluent/explicit/README.md | 12 +++---- manual/core/dse/graph/results/README.md | 6 ++-- manual/core/dse/graph/script/README.md | 6 ++-- manual/core/idempotence/README.md | 4 +-- manual/core/integration/README.md | 6 ++-- manual/core/load_balancing/README.md | 10 +++--- manual/core/metadata/README.md | 6 ++-- manual/core/metadata/node/README.md | 28 +++++++-------- manual/core/metadata/schema/README.md | 20 +++++------ manual/core/metadata/token/README.md | 4 +-- manual/core/native_protocol/README.md | 6 ++-- manual/core/paging/README.md | 8 ++--- manual/core/performance/README.md | 10 +++--- manual/core/pooling/README.md | 2 +- manual/core/query_timestamps/README.md | 4 +-- manual/core/reactive/README.md | 2 +- manual/core/reconnection/README.md | 8 ++--- manual/core/request_tracker/README.md | 4 +-- manual/core/retries/README.md | 34 +++++++++---------- manual/core/speculative_execution/README.md | 2 +- manual/core/ssl/README.md | 4 +-- manual/core/statements/README.md | 8 ++--- manual/core/statements/batch/README.md | 6 ++-- .../statements/per_query_keyspace/README.md | 2 +- manual/core/statements/prepared/README.md | 8 ++--- manual/core/statements/simple/README.md | 6 ++-- manual/core/temporal_types/README.md | 8 ++--- manual/core/throttling/README.md | 6 ++-- manual/core/tracing/README.md | 12 +++---- manual/core/tuples/README.md | 4 +-- manual/core/udts/README.md | 4 +-- manual/mapper/daos/README.md | 8 ++--- manual/mapper/daos/delete/README.md | 16 ++++----- manual/mapper/daos/getentity/README.md | 16 ++++----- manual/mapper/daos/insert/README.md | 12 +++---- manual/mapper/daos/null_saving/README.md | 10 +++--- manual/mapper/daos/query/README.md | 18 +++++----- manual/mapper/daos/queryprovider/README.md | 16 ++++----- manual/mapper/daos/select/README.md | 24 ++++++------- manual/mapper/daos/setentity/README.md | 10 +++--- .../daos/statement_attributes/README.md | 2 +- manual/mapper/daos/update/README.md | 10 +++--- manual/mapper/entities/README.md | 34 +++++++++---------- manual/mapper/mapper/README.md | 10 +++--- manual/osgi/README.md | 2 +- manual/query_builder/README.md | 10 +++--- manual/query_builder/condition/README.md | 2 +- manual/query_builder/delete/README.md | 4 +-- manual/query_builder/insert/README.md | 2 +- manual/query_builder/relation/README.md | 4 +-- manual/query_builder/schema/README.md | 2 +- .../query_builder/schema/aggregate/README.md | 2 +- .../query_builder/schema/function/README.md | 2 +- manual/query_builder/schema/index/README.md | 2 +- .../query_builder/schema/keyspace/README.md | 2 +- .../schema/materialized_view/README.md | 4 +-- manual/query_builder/schema/table/README.md | 6 ++-- manual/query_builder/schema/type/README.md | 2 +- manual/query_builder/select/README.md | 4 +-- manual/query_builder/term/README.md | 4 +-- manual/query_builder/truncate/README.md | 2 +- manual/query_builder/update/README.md | 4 +-- 75 files changed, 305 insertions(+), 305 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 5ba419e223a..869c86f119f 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -2,7 +2,7 @@ -### 4.4.0 (in progress) +### 4.4.0 This version brings in all functionality that was formerly only in the DataStax Enterprise driver, such as the built-in support for reactive programming. Going forward, all new features will be diff --git a/manual/case_sensitivity/README.md b/manual/case_sensitivity/README.md index 3c7bf6d561f..3adb1bb7572 100644 --- a/manual/case_sensitivity/README.md +++ b/manual/case_sensitivity/README.md @@ -106,11 +106,11 @@ For "consuming" methods, string overloads are also provided for convenience, for * in other cases, the string is always assumed to be in CQL form, and converted on the fly with `CqlIdentifier.fromCql`. -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/CqlIdentifier.html -[Row]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/Row.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/data/UdtValue.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/data/AccessibleByName.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/CqlIdentifier.html +[Row]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/Row.html +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/data/UdtValue.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/data/AccessibleByName.html ### Good practices diff --git a/manual/core/README.md b/manual/core/README.md index 3915294e50a..9a68793745c 100644 --- a/manual/core/README.md +++ b/manual/core/README.md @@ -310,18 +310,18 @@ for (ColumnDefinitions.Definition definition : row.getColumnDefinitions()) { } ``` -[CqlSession]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/CqlSession.html -[CqlSession#builder()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/CqlSession.html#builder-- -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/ResultSet.html -[Row]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/Row.html -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/CqlIdentifier.html -[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/data/AccessibleByName.html -[GenericType]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/type/reflect/GenericType.html -[CqlDuration]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/data/CqlDuration.html -[TupleValue]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/data/TupleValue.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/data/UdtValue.html -[SessionBuilder.addContactPoint()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoint-java.net.InetSocketAddress- -[SessionBuilder.addContactPoints()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoints-java.util.Collection- -[SessionBuilder.withLocalDatacenter()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withLocalDatacenter-java.lang.String- +[CqlSession]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/CqlSession.html +[CqlSession#builder()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/CqlSession.html#builder-- +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/ResultSet.html +[Row]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/Row.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/CqlIdentifier.html +[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/data/AccessibleByName.html +[GenericType]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/type/reflect/GenericType.html +[CqlDuration]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/data/CqlDuration.html +[TupleValue]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/data/TupleValue.html +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/data/UdtValue.html +[SessionBuilder.addContactPoint()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoint-java.net.InetSocketAddress- +[SessionBuilder.addContactPoints()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoints-java.util.Collection- +[SessionBuilder.withLocalDatacenter()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withLocalDatacenter-java.lang.String- [CASSANDRA-10145]: https://issues.apache.org/jira/browse/CASSANDRA-10145 \ No newline at end of file diff --git a/manual/core/address_resolution/README.md b/manual/core/address_resolution/README.md index af498372136..40d7a485220 100644 --- a/manual/core/address_resolution/README.md +++ b/manual/core/address_resolution/README.md @@ -124,7 +124,7 @@ Cassandra node: domain name of the target instance. Then it performs a forward DNS lookup of the domain name; the EC2 DNS does the private/public switch automatically based on location). -[AddressTranslator]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/addresstranslation/AddressTranslator.html +[AddressTranslator]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/addresstranslation/AddressTranslator.html [cassandra.yaml]: https://docs.datastax.com/en/cassandra/3.x/cassandra/configuration/configCassandra_yaml.html [rpc_address]: https://docs.datastax.com/en/cassandra/3.x/cassandra/configuration/configCassandra_yaml.html?scroll=configCassandra_yaml__rpc_address diff --git a/manual/core/async/README.md b/manual/core/async/README.md index 22cb2d63434..f15efce09d2 100644 --- a/manual/core/async/README.md +++ b/manual/core/async/README.md @@ -203,4 +203,4 @@ documentation for more details and an example. [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html \ No newline at end of file +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html \ No newline at end of file diff --git a/manual/core/authentication/README.md b/manual/core/authentication/README.md index e9b8d905f1b..ff28f8231d9 100644 --- a/manual/core/authentication/README.md +++ b/manual/core/authentication/README.md @@ -202,11 +202,11 @@ session.execute(statement); [SASL]: https://en.wikipedia.org/wiki/Simple_Authentication_and_Security_Layer -[AuthProvider]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/auth/AuthProvider.html -[DriverContext]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/context/DriverContext.html -[PlainTextAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderBase.html -[DseGssApiAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderBase.html -[ProxyAuthentication.executeAs]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/dse/driver/api/core/auth/ProxyAuthentication.html#executeAs-java.lang.String-StatementT- -[SessionBuilder.withAuthCredentials]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthCredentials-java.lang.String-java.lang.String- -[SessionBuilder.withAuthProvider]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthProvider-com.datastax.oss.driver.api.core.auth.AuthProvider- +[AuthProvider]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/auth/AuthProvider.html +[DriverContext]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/context/DriverContext.html +[PlainTextAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderBase.html +[DseGssApiAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderBase.html +[ProxyAuthentication.executeAs]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/dse/driver/api/core/auth/ProxyAuthentication.html#executeAs-java.lang.String-StatementT- +[SessionBuilder.withAuthCredentials]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthCredentials-java.lang.String-java.lang.String- +[SessionBuilder.withAuthProvider]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthProvider-com.datastax.oss.driver.api.core.auth.AuthProvider- [reference.conf]: ../configuration/reference/ \ No newline at end of file diff --git a/manual/core/configuration/README.md b/manual/core/configuration/README.md index 2e3ac592de9..119c93650a6 100644 --- a/manual/core/configuration/README.md +++ b/manual/core/configuration/README.md @@ -501,16 +501,16 @@ config.getDefaultProfile().getString(MyCustomOption.ADMIN_EMAIL); config.getDefaultProfile().getInt(MyCustomOption.AWESOMENESS_FACTOR); ``` -[DriverConfig]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/config/DriverConfig.html -[DriverExecutionProfile]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/config/DriverExecutionProfile.html -[DriverContext]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/context/DriverContext.html -[DriverOption]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/config/DriverOption.html -[DefaultDriverOption]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/config/DefaultDriverOption.html -[DriverConfigLoader]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html -[DriverConfigLoader.fromClasspath]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromClasspath-java.lang.String- -[DriverConfigLoader.fromFile]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromFile-java.io.File- -[DriverConfigLoader.fromUrl]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromUrl-java.net.URL- -[DriverConfigLoader.programmaticBuilder]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#programmaticBuilder-- +[DriverConfig]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/config/DriverConfig.html +[DriverExecutionProfile]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/config/DriverExecutionProfile.html +[DriverContext]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/context/DriverContext.html +[DriverOption]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/config/DriverOption.html +[DefaultDriverOption]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/config/DefaultDriverOption.html +[DriverConfigLoader]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html +[DriverConfigLoader.fromClasspath]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromClasspath-java.lang.String- +[DriverConfigLoader.fromFile]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromFile-java.io.File- +[DriverConfigLoader.fromUrl]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromUrl-java.net.URL- +[DriverConfigLoader.programmaticBuilder]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#programmaticBuilder-- [Typesafe Config]: https://github.com/typesafehub/config [config standard behavior]: https://github.com/typesafehub/config#standard-behavior diff --git a/manual/core/control_connection/README.md b/manual/core/control_connection/README.md index 492beff9f94..b549708b102 100644 --- a/manual/core/control_connection/README.md +++ b/manual/core/control_connection/README.md @@ -23,4 +23,4 @@ There are a few options to fine tune the control connection behavior in the `advanced.control-connection` and `advanced.metadata` sections; see the [metadata](../metadata/) pages and the [reference configuration](../configuration/reference/) for all the details. -[Node.getOpenConnections]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- \ No newline at end of file +[Node.getOpenConnections]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- \ No newline at end of file diff --git a/manual/core/custom_codecs/README.md b/manual/core/custom_codecs/README.md index 827d6d1a134..83057758545 100644 --- a/manual/core/custom_codecs/README.md +++ b/manual/core/custom_codecs/README.md @@ -255,7 +255,7 @@ private static String formatRow(Row row) { } ``` -[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html -[GenericType]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/type/reflect/GenericType.html -[TypeCodec]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html -[SessionBuilder.addTypeCodecs]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addTypeCodecs-com.datastax.oss.driver.api.core.type.codec.TypeCodec...- \ No newline at end of file +[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html +[GenericType]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/type/reflect/GenericType.html +[TypeCodec]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html +[SessionBuilder.addTypeCodecs]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addTypeCodecs-com.datastax.oss.driver.api.core.type.codec.TypeCodec...- \ No newline at end of file diff --git a/manual/core/detachable_types/README.md b/manual/core/detachable_types/README.md index f67ce8c2bb8..0b823cc6bd6 100644 --- a/manual/core/detachable_types/README.md +++ b/manual/core/detachable_types/README.md @@ -137,13 +137,13 @@ Even then, the defaults used by detached objects might be good enough for you: Otherwise, just make sure you reattach objects any time you deserialize them or create them from scratch. -[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html -[CodecRegistry#DEFAULT]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html#DEFAULT -[DataType]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/type/DataType.html -[Detachable]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/detach/Detachable.html -[Session]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/Session.html -[ColumnDefinition]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/ColumnDefinition.html -[Row]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/Row.html +[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html +[CodecRegistry#DEFAULT]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html#DEFAULT +[DataType]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/type/DataType.html +[Detachable]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/detach/Detachable.html +[Session]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/Session.html +[ColumnDefinition]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/ColumnDefinition.html +[Row]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/Row.html [Java serialization]: https://docs.oracle.com/javase/tutorial/jndi/objects/serial.html [protocol specifications]: https://github.com/datastax/native-protocol/tree/1.x/src/main/resources diff --git a/manual/core/dse/geotypes/README.md b/manual/core/dse/geotypes/README.md index 353497a8824..05448529069 100644 --- a/manual/core/dse/geotypes/README.md +++ b/manual/core/dse/geotypes/README.md @@ -166,9 +166,9 @@ All geospatial types interoperate with three standard formats: [ESRI]: https://github.com/Esri/geometry-api-java -[LineString]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/dse/driver/api/core/data/geometry/LineString.html -[Point]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/dse/driver/api/core/data/geometry/Point.html -[Polygon]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/dse/driver/api/core/data/geometry/Polygon.html +[LineString]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/dse/driver/api/core/data/geometry/LineString.html +[Point]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/dse/driver/api/core/data/geometry/Point.html +[Polygon]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/dse/driver/api/core/data/geometry/Polygon.html [Well-known text]: https://en.wikipedia.org/wiki/Well-known_text [Well-known binary]: https://en.wikipedia.org/wiki/Well-known_text#Well-known_binary diff --git a/manual/core/dse/graph/README.md b/manual/core/dse/graph/README.md index 2d6fe8291a9..b7ff46a33ed 100644 --- a/manual/core/dse/graph/README.md +++ b/manual/core/dse/graph/README.md @@ -74,8 +74,8 @@ fluent API returns Apache TinkerPop™ types directly. [Apache TinkerPop™]: http://tinkerpop.apache.org/ -[CqlSession]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/CqlSession.html -[GraphSession]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/dse/driver/api/core/graph/GraphSession.html +[CqlSession]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/CqlSession.html +[GraphSession]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/dse/driver/api/core/graph/GraphSession.html [DSE developer guide]: https://docs.datastax.com/en/dse/6.0/dse-dev/datastax_enterprise/graph/graphTOC.html [Gremlin]: https://docs.datastax.com/en/dse/6.0/dse-dev/datastax_enterprise/graph/dseGraphAbout.html#dseGraphAbout__what-is-cql diff --git a/manual/core/dse/graph/fluent/README.md b/manual/core/dse/graph/fluent/README.md index 4edf44fe45d..b2ba9d6e141 100644 --- a/manual/core/dse/graph/fluent/README.md +++ b/manual/core/dse/graph/fluent/README.md @@ -109,8 +109,8 @@ All the DSE predicates are available on the driver side: .values("name"); ``` -[Search]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/dse/driver/api/core/graph/predicates/Search.html -[Geo]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/dse/driver/api/core/graph/predicates/Geo.html +[Search]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/dse/driver/api/core/graph/predicates/Search.html +[Geo]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/dse/driver/api/core/graph/predicates/Geo.html [Apache TinkerPop™]: http://tinkerpop.apache.org/ [TinkerPop DSL]: http://tinkerpop.apache.org/docs/current/reference/#dsl diff --git a/manual/core/dse/graph/fluent/explicit/README.md b/manual/core/dse/graph/fluent/explicit/README.md index bfda2f3a805..7db57adc651 100644 --- a/manual/core/dse/graph/fluent/explicit/README.md +++ b/manual/core/dse/graph/fluent/explicit/README.md @@ -105,9 +105,9 @@ added in a future version. See also the [parent page](../) for topics common to all fluent traversals. -[FluentGraphStatement]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html -[FluentGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html#newInstance-org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal- -[FluentGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html#builder-org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal- -[BatchGraphStatement]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html -[BatchGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html#newInstance-- -[BatchGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html#builder-- +[FluentGraphStatement]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html +[FluentGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html#newInstance-org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal- +[FluentGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html#builder-org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal- +[BatchGraphStatement]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html +[BatchGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html#newInstance-- +[BatchGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html#builder-- diff --git a/manual/core/dse/graph/results/README.md b/manual/core/dse/graph/results/README.md index 064a2946a53..08904e7406d 100644 --- a/manual/core/dse/graph/results/README.md +++ b/manual/core/dse/graph/results/README.md @@ -137,8 +137,8 @@ If a type doesn't have a corresponding `asXxx()` method, use the variant that ta UUID uuid = graphNode.as(UUID.class); ``` -[GraphNode]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/dse/driver/api/core/graph/GraphNode.html -[GraphResultSet]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/dse/driver/api/core/graph/GraphResultSet.html -[AsyncGraphResultSet]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/dse/driver/api/core/graph/AsyncGraphResultSet.html +[GraphNode]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/dse/driver/api/core/graph/GraphNode.html +[GraphResultSet]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/dse/driver/api/core/graph/GraphResultSet.html +[AsyncGraphResultSet]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/dse/driver/api/core/graph/AsyncGraphResultSet.html [DSE data types]: https://docs.datastax.com/en/dse/6.0/dse-dev/datastax_enterprise/graph/reference/refDSEGraphDataTypes.html \ No newline at end of file diff --git a/manual/core/dse/graph/script/README.md b/manual/core/dse/graph/script/README.md index 48d85246628..e7474fe374d 100644 --- a/manual/core/dse/graph/script/README.md +++ b/manual/core/dse/graph/script/README.md @@ -101,6 +101,6 @@ Note however that some types of queries can only be performed through the script * configuration; * DSE graph schema queries. -[ScriptGraphStatement]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html -[ScriptGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html#newInstance-java.lang.String- -[ScriptGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html#builder-java.lang.String- \ No newline at end of file +[ScriptGraphStatement]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html +[ScriptGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html#newInstance-java.lang.String- +[ScriptGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html#builder-java.lang.String- \ No newline at end of file diff --git a/manual/core/idempotence/README.md b/manual/core/idempotence/README.md index f748bc07493..ddbe51b9f81 100644 --- a/manual/core/idempotence/README.md +++ b/manual/core/idempotence/README.md @@ -60,5 +60,5 @@ assert bs.isIdempotent(); The query builder tries to infer idempotence automatically; refer to [its manual](../../query_builder/idempotence/) for more details. -[Statement.setIdempotent]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/Statement.html#setIdempotent-java.lang.Boolean- -[StatementBuilder.setIdempotence]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/StatementBuilder.html#setIdempotence-java.lang.Boolean- +[Statement.setIdempotent]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/Statement.html#setIdempotent-java.lang.Boolean- +[StatementBuilder.setIdempotence]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/StatementBuilder.html#setIdempotence-java.lang.Boolean- diff --git a/manual/core/integration/README.md b/manual/core/integration/README.md index 07ec00aa498..2c288ffd68d 100644 --- a/manual/core/integration/README.md +++ b/manual/core/integration/README.md @@ -502,6 +502,6 @@ The remaining core driver dependencies are the only ones that are truly mandator [guava]: https://github.com/google/guava/issues/2721 [annotation processing]: https://docs.oracle.com/javase/8/docs/technotes/tools/windows/javac.html#sthref65 -[Session.getMetrics]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/Session.html#getMetrics-- -[SessionBuilder.addContactPoint]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoint-java.net.InetSocketAddress- -[Uuids]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/uuid/Uuids.html +[Session.getMetrics]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/Session.html#getMetrics-- +[SessionBuilder.addContactPoint]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoint-java.net.InetSocketAddress- +[Uuids]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/uuid/Uuids.html diff --git a/manual/core/load_balancing/README.md b/manual/core/load_balancing/README.md index bc904d4d143..a560d425b7b 100644 --- a/manual/core/load_balancing/README.md +++ b/manual/core/load_balancing/README.md @@ -323,10 +323,10 @@ Then it uses the "closest" distance for any given node. For example: * policy1 changes its suggestion to IGNORED. node1 is set to REMOTE; * policy1 changes its suggestion to REMOTE. node1 stays at REMOTE. -[DriverContext]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/context/DriverContext.html -[LoadBalancingPolicy]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/loadbalancing/LoadBalancingPolicy.html -[getRoutingKeyspace()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKeyspace-- -[getRoutingToken()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/Request.html#getRoutingToken-- -[getRoutingKey()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- +[DriverContext]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/context/DriverContext.html +[LoadBalancingPolicy]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/loadbalancing/LoadBalancingPolicy.html +[getRoutingKeyspace()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKeyspace-- +[getRoutingToken()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/Request.html#getRoutingToken-- +[getRoutingKey()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- [`nodetool status`]: https://docs.datastax.com/en/dse/6.7/dse-dev/datastax_enterprise/tools/nodetool/toolsStatus.html [cqlsh]: https://docs.datastax.com/en/dse/6.7/cql/cql/cql_using/startCqlshStandalone.html diff --git a/manual/core/metadata/README.md b/manual/core/metadata/README.md index 0270a3b5df2..c4e6ed83aca 100644 --- a/manual/core/metadata/README.md +++ b/manual/core/metadata/README.md @@ -56,6 +56,6 @@ new keyspace in the schema metadata before the token metadata was updated. Schema and node state events are debounced. This allows you to control how often the metadata gets refreshed. See the [Performance](../performance/#debouncing) page for more details. -[Session#getMetadata]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/Session.html#getMetadata-- -[Metadata]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/metadata/Metadata.html -[Node]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/metadata/Node.html \ No newline at end of file +[Session#getMetadata]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/Session.html#getMetadata-- +[Metadata]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/metadata/Metadata.html +[Node]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/metadata/Node.html \ No newline at end of file diff --git a/manual/core/metadata/node/README.md b/manual/core/metadata/node/README.md index 357cc26f479..2e8749bf84e 100644 --- a/manual/core/metadata/node/README.md +++ b/manual/core/metadata/node/README.md @@ -112,17 +112,17 @@ beyond the scope of this document; if you're interested, study the `TopologyMoni the source code. -[Metadata#getNodes]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/metadata/Metadata.html#getNodes-- -[Node]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/metadata/Node.html -[Node#getState()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/metadata/Node.html#getState-- -[Node#getDatacenter()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/metadata/Node.html#getDatacenter-- -[Node#getRack()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/metadata/Node.html#getRack-- -[Node#getDistance()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/metadata/Node.html#getDistance-- -[Node#getExtras()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/metadata/Node.html#getExtras-- -[Node#getOpenConnections()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- -[Node#isReconnecting()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/metadata/Node.html#isReconnecting-- -[NodeState]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/metadata/NodeState.html -[NodeStateListener]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/metadata/NodeStateListener.html -[NodeStateListenerBase]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/metadata/NodeStateListenerBase.html -[SessionBuilder.withNodeStateListener]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withNodeStateListener-com.datastax.oss.driver.api.core.metadata.NodeStateListener- -[DseNodeProperties]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/dse/driver/api/core/metadata/DseNodeProperties.html \ No newline at end of file +[Metadata#getNodes]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/metadata/Metadata.html#getNodes-- +[Node]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/metadata/Node.html +[Node#getState()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/metadata/Node.html#getState-- +[Node#getDatacenter()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/metadata/Node.html#getDatacenter-- +[Node#getRack()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/metadata/Node.html#getRack-- +[Node#getDistance()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/metadata/Node.html#getDistance-- +[Node#getExtras()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/metadata/Node.html#getExtras-- +[Node#getOpenConnections()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- +[Node#isReconnecting()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/metadata/Node.html#isReconnecting-- +[NodeState]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/metadata/NodeState.html +[NodeStateListener]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/metadata/NodeStateListener.html +[NodeStateListenerBase]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/metadata/NodeStateListenerBase.html +[SessionBuilder.withNodeStateListener]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withNodeStateListener-com.datastax.oss.driver.api.core.metadata.NodeStateListener- +[DseNodeProperties]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/dse/driver/api/core/metadata/DseNodeProperties.html \ No newline at end of file diff --git a/manual/core/metadata/schema/README.md b/manual/core/metadata/schema/README.md index 48ed89dfe56..00947d40980 100644 --- a/manual/core/metadata/schema/README.md +++ b/manual/core/metadata/schema/README.md @@ -260,15 +260,15 @@ unavailable for the excluded keyspaces. If you issue schema-altering requests from the driver (e.g. `session.execute("CREATE TABLE ..")`), take a look at the [Performance](../../performance/#schema-updates) page for a few tips. -[Metadata#getKeyspaces]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/metadata/Metadata.html#getKeyspaces-- -[SchemaChangeListener]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListener.html -[SchemaChangeListenerBase]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListenerBase.html -[Session#setSchemaMetadataEnabled]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/Session.html#setSchemaMetadataEnabled-java.lang.Boolean- -[Session#checkSchemaAgreementAsync]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/Session.html#checkSchemaAgreementAsync-- -[SessionBuilder#withSchemaChangeListener]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSchemaChangeListener-com.datastax.oss.driver.api.core.metadata.schema.SchemaChangeListener- -[ExecutionInfo#isSchemaInAgreement]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#isSchemaInAgreement-- -[com.datastax.dse.driver.api.core.metadata.schema]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/dse/driver/api/core/metadata/schema/package-frame.html -[DseFunctionMetadata]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/dse/driver/api/core/metadata/schema/DseFunctionMetadata.html -[DseAggregateMetadata]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/dse/driver/api/core/metadata/schema/DseAggregateMetadata.html +[Metadata#getKeyspaces]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/metadata/Metadata.html#getKeyspaces-- +[SchemaChangeListener]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListener.html +[SchemaChangeListenerBase]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListenerBase.html +[Session#setSchemaMetadataEnabled]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/Session.html#setSchemaMetadataEnabled-java.lang.Boolean- +[Session#checkSchemaAgreementAsync]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/Session.html#checkSchemaAgreementAsync-- +[SessionBuilder#withSchemaChangeListener]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSchemaChangeListener-com.datastax.oss.driver.api.core.metadata.schema.SchemaChangeListener- +[ExecutionInfo#isSchemaInAgreement]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#isSchemaInAgreement-- +[com.datastax.dse.driver.api.core.metadata.schema]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/dse/driver/api/core/metadata/schema/package-frame.html +[DseFunctionMetadata]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/dse/driver/api/core/metadata/schema/DseFunctionMetadata.html +[DseAggregateMetadata]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/dse/driver/api/core/metadata/schema/DseAggregateMetadata.html [JAVA-750]: https://datastax-oss.atlassian.net/browse/JAVA-750 \ No newline at end of file diff --git a/manual/core/metadata/token/README.md b/manual/core/metadata/token/README.md index 6641ec6025f..c3838e946bc 100644 --- a/manual/core/metadata/token/README.md +++ b/manual/core/metadata/token/README.md @@ -169,5 +169,5 @@ on [schema metadata](../schema/). If schema metadata is disabled or filtered, to also be unavailable for the excluded keyspaces. -[Metadata#getTokenMap]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/metadata/Metadata.html#getTokenMap-- -[TokenMap]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/metadata/TokenMap.html \ No newline at end of file +[Metadata#getTokenMap]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/metadata/Metadata.html#getTokenMap-- +[TokenMap]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/metadata/TokenMap.html \ No newline at end of file diff --git a/manual/core/native_protocol/README.md b/manual/core/native_protocol/README.md index 60133dfb9cb..8e3ed4f300d 100644 --- a/manual/core/native_protocol/README.md +++ b/manual/core/native_protocol/README.md @@ -120,6 +120,6 @@ If you want to see the details of mixed cluster negotiation, enable `DEBUG` leve [protocol spec]: https://github.com/datastax/native-protocol/tree/1.x/src/main/resources [driver3]: https://docs.datastax.com/en/developer/java-driver/3.5/manual/native_protocol/ -[ExecutionInfo.getWarnings]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getWarnings-- -[Request.getCustomPayload]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/Request.html#getCustomPayload-- -[AttachmentPoint.getProtocolVersion]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/detach/AttachmentPoint.html#getProtocolVersion-- \ No newline at end of file +[ExecutionInfo.getWarnings]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getWarnings-- +[Request.getCustomPayload]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/Request.html#getCustomPayload-- +[AttachmentPoint.getProtocolVersion]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/detach/AttachmentPoint.html#getProtocolVersion-- \ No newline at end of file diff --git a/manual/core/paging/README.md b/manual/core/paging/README.md index bda8c311442..ab844124871 100644 --- a/manual/core/paging/README.md +++ b/manual/core/paging/README.md @@ -186,9 +186,9 @@ think you can get away with the performance hit. We recommend that you: The [driver examples] include two complete web service implementations demonstrating forward-only and random (offset-based) paging. -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/ResultSet.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html -[AsyncPagingIterable.hasMorePages]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/AsyncPagingIterable.html#hasMorePages-- -[AsyncPagingIterable.fetchNextPage]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/AsyncPagingIterable.html#fetchNextPage-- +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/ResultSet.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[AsyncPagingIterable.hasMorePages]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/AsyncPagingIterable.html#hasMorePages-- +[AsyncPagingIterable.fetchNextPage]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/AsyncPagingIterable.html#fetchNextPage-- [driver examples]: https://github.com/datastax/java-driver/tree/4.x/examples/src/main/java/com/datastax/oss/driver/examples/paging diff --git a/manual/core/performance/README.md b/manual/core/performance/README.md index 97bd9711c45..4971f6f79fb 100644 --- a/manual/core/performance/README.md +++ b/manual/core/performance/README.md @@ -345,8 +345,8 @@ possible to reuse the same event loop group for I/O, admin tasks, and even your (the driver's internal code is fully asynchronous so it will never block any thread). The timer is the only one that will have to stay on a separate thread. -[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/data/AccessibleByName.html -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/CqlIdentifier.html -[CqlSession.prepare(SimpleStatement)]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/CqlSession.html#prepare-com.datastax.oss.driver.api.core.cql.SimpleStatement- -[GenericType]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/type/reflect/GenericType.html -[Statement.setNode()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/Statement.html#setNode-com.datastax.oss.driver.api.core.metadata.Node- \ No newline at end of file +[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/data/AccessibleByName.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/CqlIdentifier.html +[CqlSession.prepare(SimpleStatement)]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/CqlSession.html#prepare-com.datastax.oss.driver.api.core.cql.SimpleStatement- +[GenericType]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/type/reflect/GenericType.html +[Statement.setNode()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/Statement.html#setNode-com.datastax.oss.driver.api.core.metadata.Node- \ No newline at end of file diff --git a/manual/core/pooling/README.md b/manual/core/pooling/README.md index 664897fd6f2..6057ee4fa93 100644 --- a/manual/core/pooling/README.md +++ b/manual/core/pooling/README.md @@ -158,5 +158,5 @@ you experience the issue, here's what to look out for: Try adding more connections per node. Thanks to the driver's hot-reload mechanism, you can do that at runtime and see the effects immediately. -[CqlSession]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/CqlSession.html +[CqlSession]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/CqlSession.html [CASSANDRA-8086]: https://issues.apache.org/jira/browse/CASSANDRA-8086 \ No newline at end of file diff --git a/manual/core/query_timestamps/README.md b/manual/core/query_timestamps/README.md index 6a5ef438d66..0a11d641d98 100644 --- a/manual/core/query_timestamps/README.md +++ b/manual/core/query_timestamps/README.md @@ -187,9 +187,9 @@ Here is the order of precedence of all the methods described so far: 3. otherwise, if the timestamp generator assigned a timestamp, use it; 4. otherwise, let the server assign the timestamp. -[TimestampGenerator]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/time/TimestampGenerator.html +[TimestampGenerator]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/time/TimestampGenerator.html [gettimeofday]: http://man7.org/linux/man-pages/man2/settimeofday.2.html [JNR]: https://github.com/jnr/jnr-ffi [Lightweight transactions]: https://docs.datastax.com/en/dse/6.0/cql/cql/cql_using/useInsertLWT.html -[Statement.setQueryTimestamp()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/Statement.html#setQueryTimestamp-long- \ No newline at end of file +[Statement.setQueryTimestamp()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/Statement.html#setQueryTimestamp-long- \ No newline at end of file diff --git a/manual/core/reactive/README.md b/manual/core/reactive/README.md index fca2d936d93..519decf51e4 100644 --- a/manual/core/reactive/README.md +++ b/manual/core/reactive/README.md @@ -363,7 +363,7 @@ more fine-grained control of what should be retried, and how, is required. [reactive streams]: https://en.wikipedia.org/wiki/Reactive_Streams [Reactive Streams API]: https://github.com/reactive-streams/reactive-streams-jvm -[CqlSession]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/CqlSession.html +[CqlSession]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/CqlSession.html [ReactiveSession]: https://docs.datastax.com/en/drivers/java-dse/2.3/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.html [ResultSet]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/ResultSet.html [ReactiveResultSet]: https://docs.datastax.com/en/drivers/java-dse/2.3/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html diff --git a/manual/core/reconnection/README.md b/manual/core/reconnection/README.md index 811051f7806..fca4b0dec6c 100644 --- a/manual/core/reconnection/README.md +++ b/manual/core/reconnection/README.md @@ -66,7 +66,7 @@ is the exponential one with the default values, and the control connection is in * [t = 3] node2's pool tries to open the last missing connection, which succeeds. The pool is back to its expected size, node2's reconnection stops. -[ConstantReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/internal/core/connection/ConstantReconnectionPolicy.html -[DriverContext]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/context/DriverContext.html -[ExponentialReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/internal/core/connection/ExponentialReconnectionPolicy.html -[ReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/connection/ReconnectionPolicy.html \ No newline at end of file +[ConstantReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/internal/core/connection/ConstantReconnectionPolicy.html +[DriverContext]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/context/DriverContext.html +[ExponentialReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/internal/core/connection/ExponentialReconnectionPolicy.html +[ReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/connection/ReconnectionPolicy.html \ No newline at end of file diff --git a/manual/core/request_tracker/README.md b/manual/core/request_tracker/README.md index 023fe842557..1f9ddb1ebc6 100644 --- a/manual/core/request_tracker/README.md +++ b/manual/core/request_tracker/README.md @@ -117,5 +117,5 @@ all FROM users WHERE user_id=? [v0=42] com.datastax.oss.driver.api.core.servererrors.InvalidQueryException: Undefined column name all ``` -[RequestTracker]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/tracker/RequestTracker.html -[SessionBuilder.withRequestTracker]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withRequestTracker-com.datastax.oss.driver.api.core.tracker.RequestTracker- \ No newline at end of file +[RequestTracker]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/tracker/RequestTracker.html +[SessionBuilder.withRequestTracker]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withRequestTracker-com.datastax.oss.driver.api.core.tracker.RequestTracker- \ No newline at end of file diff --git a/manual/core/retries/README.md b/manual/core/retries/README.md index 025bfd584ff..1a20062ae85 100644 --- a/manual/core/retries/README.md +++ b/manual/core/retries/README.md @@ -174,20 +174,20 @@ configuration). Each request uses its declared profile's policy. If it doesn't declare any profile, or if the profile doesn't have a dedicated policy, then the default profile's policy is used. -[AllNodesFailedException]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/AllNodesFailedException.html -[ClosedConnectionException]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/connection/ClosedConnectionException.html -[DriverTimeoutException]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/DriverTimeoutException.html -[FunctionFailureException]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/servererrors/FunctionFailureException.html -[HeartbeatException]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/connection/HeartbeatException.html -[ProtocolError]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/servererrors/ProtocolError.html -[OverloadedException]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/servererrors/OverloadedException.html -[QueryValidationException]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/servererrors/QueryValidationException.html -[ReadFailureException]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/servererrors/ReadFailureException.html -[ReadTimeoutException]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/servererrors/ReadTimeoutException.html -[RetryDecision]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/retry/RetryDecision.html -[RetryPolicy]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/retry/RetryPolicy.html -[ServerError]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/servererrors/ServerError.html -[TruncateException]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/servererrors/TruncateException.html -[UnavailableException]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/servererrors/UnavailableException.html -[WriteFailureException]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/servererrors/WriteFailureException.html -[WriteTimeoutException]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/servererrors/WriteTimeoutException.html +[AllNodesFailedException]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/AllNodesFailedException.html +[ClosedConnectionException]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/connection/ClosedConnectionException.html +[DriverTimeoutException]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/DriverTimeoutException.html +[FunctionFailureException]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/servererrors/FunctionFailureException.html +[HeartbeatException]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/connection/HeartbeatException.html +[ProtocolError]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/servererrors/ProtocolError.html +[OverloadedException]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/servererrors/OverloadedException.html +[QueryValidationException]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/servererrors/QueryValidationException.html +[ReadFailureException]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/servererrors/ReadFailureException.html +[ReadTimeoutException]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/servererrors/ReadTimeoutException.html +[RetryDecision]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/retry/RetryDecision.html +[RetryPolicy]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/retry/RetryPolicy.html +[ServerError]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/servererrors/ServerError.html +[TruncateException]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/servererrors/TruncateException.html +[UnavailableException]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/servererrors/UnavailableException.html +[WriteFailureException]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/servererrors/WriteFailureException.html +[WriteTimeoutException]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/servererrors/WriteTimeoutException.html diff --git a/manual/core/speculative_execution/README.md b/manual/core/speculative_execution/README.md index 3605c0768fd..8f82aea95c5 100644 --- a/manual/core/speculative_execution/README.md +++ b/manual/core/speculative_execution/README.md @@ -250,4 +250,4 @@ profiles have the same configuration). Each request uses its declared profile's policy. If it doesn't declare any profile, or if the profile doesn't have a dedicated policy, then the default profile's policy is used. -[SpeculativeExecutionPolicy]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/specex/SpeculativeExecutionPolicy.html \ No newline at end of file +[SpeculativeExecutionPolicy]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/specex/SpeculativeExecutionPolicy.html \ No newline at end of file diff --git a/manual/core/ssl/README.md b/manual/core/ssl/README.md index 2c0fab297b5..33480b70465 100644 --- a/manual/core/ssl/README.md +++ b/manual/core/ssl/README.md @@ -188,5 +188,5 @@ the box, but with a bit of custom development it is fairly easy to add. See [dsClientToNode]: https://docs.datastax.com/en/cassandra/3.0/cassandra/configuration/secureSSLClientToNode.html [pickle]: http://thelastpickle.com/blog/2015/09/30/hardening-cassandra-step-by-step-part-1-server-to-server.html [JSSE system properties]: http://docs.oracle.com/javase/6/docs/technotes/guides/security/jsse/JSSERefGuide.html#Customization -[SessionBuilder.withSslEngineFactory]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSslEngineFactory-com.datastax.oss.driver.api.core.ssl.SslEngineFactory- -[SessionBuilder.withSslContext]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSslContext-javax.net.ssl.SSLContext- +[SessionBuilder.withSslEngineFactory]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSslEngineFactory-com.datastax.oss.driver.api.core.ssl.SslEngineFactory- +[SessionBuilder.withSslContext]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSslContext-javax.net.ssl.SSLContext- diff --git a/manual/core/statements/README.md b/manual/core/statements/README.md index 9108806d658..c6bc200aa56 100644 --- a/manual/core/statements/README.md +++ b/manual/core/statements/README.md @@ -59,7 +59,7 @@ the [configuration](../configuration/). Namely, these are: idempotent flag, quer consistency levels and page size. We recommended the configuration approach whenever possible (you can create execution profiles to capture common combinations of those options). -[Statement]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/Statement.html -[StatementBuilder]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/StatementBuilder.html -[execute]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/Session.html#execute-com.datastax.oss.driver.api.core.cql.Statement- -[executeAsync]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/Session.html#executeAsync-com.datastax.oss.driver.api.core.cql.Statement- +[Statement]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/Statement.html +[StatementBuilder]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/StatementBuilder.html +[execute]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/Session.html#execute-com.datastax.oss.driver.api.core.cql.Statement- +[executeAsync]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/Session.html#executeAsync-com.datastax.oss.driver.api.core.cql.Statement- diff --git a/manual/core/statements/batch/README.md b/manual/core/statements/batch/README.md index 68ab112bbcc..eb5671b3d2e 100644 --- a/manual/core/statements/batch/README.md +++ b/manual/core/statements/batch/README.md @@ -61,8 +61,8 @@ In addition, simple statements with named parameters are currently not supported due to a [protocol limitation][CASSANDRA-10246] that will be fixed in a future version). If you try to execute such a batch, an `IllegalArgumentException` is thrown. -[BatchStatement]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/BatchStatement.html -[BatchStatement.newInstance()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/BatchStatement.html#newInstance-com.datastax.oss.driver.api.core.cql.BatchType- -[BatchStatement.builder()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/BatchStatement.html#builder-com.datastax.oss.driver.api.core.cql.BatchType- +[BatchStatement]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/BatchStatement.html +[BatchStatement.newInstance()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/BatchStatement.html#newInstance-com.datastax.oss.driver.api.core.cql.BatchType- +[BatchStatement.builder()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/BatchStatement.html#builder-com.datastax.oss.driver.api.core.cql.BatchType- [batch_dse]: http://docs.datastax.com/en/dse/6.7/cql/cql/cql_using/useBatch.html [CASSANDRA-10246]: https://issues.apache.org/jira/browse/CASSANDRA-10246 diff --git a/manual/core/statements/per_query_keyspace/README.md b/manual/core/statements/per_query_keyspace/README.md index 9bc0810274c..2d5fc88e2d1 100644 --- a/manual/core/statements/per_query_keyspace/README.md +++ b/manual/core/statements/per_query_keyspace/README.md @@ -124,6 +124,6 @@ SimpleStatement statement = At some point in the future, when Cassandra 4 becomes prevalent and using a per-query keyspace is the norm, we'll probably deprecate `setRoutingKeyspace()`. -[token-aware routing]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- +[token-aware routing]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- [CASSANDRA-10145]: https://issues.apache.org/jira/browse/CASSANDRA-10145 \ No newline at end of file diff --git a/manual/core/statements/prepared/README.md b/manual/core/statements/prepared/README.md index f780051c6d7..a5f3b240bd4 100644 --- a/manual/core/statements/prepared/README.md +++ b/manual/core/statements/prepared/README.md @@ -330,10 +330,10 @@ With Cassandra 4 and [native protocol](../../native_protocol/) v5, this issue is new version with the response; the driver updates its local cache transparently, and the client can observe the new columns in the result set. -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[Session.prepare]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/CqlSession.html#prepare-com.datastax.oss.driver.api.core.cql.SimpleStatement- +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[Session.prepare]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/CqlSession.html#prepare-com.datastax.oss.driver.api.core.cql.SimpleStatement- [CASSANDRA-10786]: https://issues.apache.org/jira/browse/CASSANDRA-10786 [CASSANDRA-10813]: https://issues.apache.org/jira/browse/CASSANDRA-10813 [guava eviction]: https://github.com/google/guava/wiki/CachesExplained#reference-based-eviction -[PreparedStatement.bind]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/PreparedStatement.html#bind-java.lang.Object...- -[PreparedStatement.boundStatementBuilder]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/PreparedStatement.html#boundStatementBuilder-java.lang.Object...- +[PreparedStatement.bind]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/PreparedStatement.html#bind-java.lang.Object...- +[PreparedStatement.boundStatementBuilder]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/PreparedStatement.html#boundStatementBuilder-java.lang.Object...- diff --git a/manual/core/statements/simple/README.md b/manual/core/statements/simple/README.md index 6250085117d..f396884ea79 100644 --- a/manual/core/statements/simple/README.md +++ b/manual/core/statements/simple/README.md @@ -182,6 +182,6 @@ session.execute( Or you could also use [prepared statements](../prepared/), which don't have this limitation since parameter types are known in advance. -[SimpleStatement]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/SimpleStatement.html -[SimpleStatement.newInstance()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/SimpleStatement.html#newInstance-java.lang.String- -[SimpleStatement.builder()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/SimpleStatement.html#builder-java.lang.String- +[SimpleStatement]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/SimpleStatement.html +[SimpleStatement.newInstance()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/SimpleStatement.html#newInstance-java.lang.String- +[SimpleStatement.builder()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/SimpleStatement.html#builder-java.lang.String- diff --git a/manual/core/temporal_types/README.md b/manual/core/temporal_types/README.md index 529d081f484..aad88010a12 100644 --- a/manual/core/temporal_types/README.md +++ b/manual/core/temporal_types/README.md @@ -146,7 +146,7 @@ System.out.println(dateTime.minus(CqlDuration.from("1h15s15ns"))); // prints "2018-10-03T22:59:44.999999985-07:00[America/Los_Angeles]" ``` -[CqlDuration]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/data/CqlDuration.html -[TypeCodecs.ZONED_TIMESTAMP_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#ZONED_TIMESTAMP_SYSTEM -[TypeCodecs.ZONED_TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#ZONED_TIMESTAMP_UTC -[TypeCodecs.zonedTimestampAt()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#zonedTimestampAt-java.time.ZoneId- \ No newline at end of file +[CqlDuration]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/data/CqlDuration.html +[TypeCodecs.ZONED_TIMESTAMP_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#ZONED_TIMESTAMP_SYSTEM +[TypeCodecs.ZONED_TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#ZONED_TIMESTAMP_UTC +[TypeCodecs.zonedTimestampAt()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#zonedTimestampAt-java.time.ZoneId- \ No newline at end of file diff --git a/manual/core/throttling/README.md b/manual/core/throttling/README.md index 8268c00c4dd..be54d610dac 100644 --- a/manual/core/throttling/README.md +++ b/manual/core/throttling/README.md @@ -145,6 +145,6 @@ datastax-java-driver { If you enable `throttling.delay`, make sure to also check the associated extra options to correctly size the underlying histograms (`metrics.session.throttling.delay.*`). -[RequestThrottlingException]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/RequestThrottlingException.html -[AllNodesFailedException]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/AllNodesFailedException.html -[BusyConnectionException]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/connection/BusyConnectionException.html \ No newline at end of file +[RequestThrottlingException]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/RequestThrottlingException.html +[AllNodesFailedException]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/AllNodesFailedException.html +[BusyConnectionException]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/connection/BusyConnectionException.html \ No newline at end of file diff --git a/manual/core/tracing/README.md b/manual/core/tracing/README.md index ab3af6146e3..5928f2fe445 100644 --- a/manual/core/tracing/README.md +++ b/manual/core/tracing/README.md @@ -113,9 +113,9 @@ for (TraceEvent event : trace.getEvents()) { If you call `getQueryTrace()` for a statement that didn't have tracing enabled, an exception is thrown. -[ExecutionInfo]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html -[QueryTrace]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/QueryTrace.html -[Statement.setTracing()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/Statement.html#setTracing-boolean- -[StatementBuilder.setTracing()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/StatementBuilder.html#setTracing-- -[ExecutionInfo.getTracingId()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getTracingId-- -[ExecutionInfo.getQueryTrace()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getQueryTrace-- +[ExecutionInfo]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html +[QueryTrace]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/QueryTrace.html +[Statement.setTracing()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/Statement.html#setTracing-boolean- +[StatementBuilder.setTracing()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/StatementBuilder.html#setTracing-- +[ExecutionInfo.getTracingId()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getTracingId-- +[ExecutionInfo.getQueryTrace()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getQueryTrace-- diff --git a/manual/core/tuples/README.md b/manual/core/tuples/README.md index 9600d9a05e7..491ef4974f4 100644 --- a/manual/core/tuples/README.md +++ b/manual/core/tuples/README.md @@ -139,5 +139,5 @@ BoundStatement bs = [cql_doc]: https://docs.datastax.com/en/cql/3.3/cql/cql_reference/tupleType.html -[TupleType]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/type/TupleType.html -[TupleValue]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/data/TupleValue.html +[TupleType]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/type/TupleType.html +[TupleValue]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/data/TupleValue.html diff --git a/manual/core/udts/README.md b/manual/core/udts/README.md index d7245262778..af7d304dd15 100644 --- a/manual/core/udts/README.md +++ b/manual/core/udts/README.md @@ -135,5 +135,5 @@ session.execute(bs); [cql_doc]: https://docs.datastax.com/en/cql/3.3/cql/cql_reference/cqlRefUDType.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/data/UdtValue.html -[UserDefinedType]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/type/UserDefinedType.html \ No newline at end of file +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/data/UdtValue.html +[UserDefinedType]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/type/UserDefinedType.html \ No newline at end of file diff --git a/manual/mapper/daos/README.md b/manual/mapper/daos/README.md index f40e494a9f7..9da276b720b 100644 --- a/manual/mapper/daos/README.md +++ b/manual/mapper/daos/README.md @@ -147,8 +147,8 @@ In this case, any annotations declared in `Dao1` would be chosen over `Dao2`. To control how the hierarchy is scanned, annotate interfaces with [@HierarchyScanStrategy]. -[@Dao]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/Dao.html -[@DaoFactory]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.html -[@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html -[@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html +[@Dao]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/Dao.html +[@DaoFactory]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.html +[@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html +[@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html [Entity Inheritance]: ../entities/#inheritance diff --git a/manual/mapper/daos/delete/README.md b/manual/mapper/daos/delete/README.md index 6d7dba65ab4..0e604233639 100644 --- a/manual/mapper/daos/delete/README.md +++ b/manual/mapper/daos/delete/README.md @@ -142,14 +142,14 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the [naming strategy](../../entities/#naming-strategy)). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html -[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html -[@Delete]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/Delete.html -[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/ResultSet.html -[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html +[@Delete]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/Delete.html +[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/ResultSet.html +[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/BoundStatement.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html \ No newline at end of file diff --git a/manual/mapper/daos/getentity/README.md b/manual/mapper/daos/getentity/README.md index 5629df431fd..1bbad280908 100644 --- a/manual/mapper/daos/getentity/README.md +++ b/manual/mapper/daos/getentity/README.md @@ -69,14 +69,14 @@ If the return type doesn't match the parameter type (for example [PagingIterable [AsyncResultSet]), the mapper processor will issue a compile-time error. -[@GetEntity]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html -[GettableByName]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/data/GettableByName.html -[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/PagingIterable.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/ResultSet.html -[Row]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/Row.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/data/UdtValue.html +[@GetEntity]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[GettableByName]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/data/GettableByName.html +[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/PagingIterable.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/ResultSet.html +[Row]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/Row.html +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/data/UdtValue.html diff --git a/manual/mapper/daos/insert/README.md b/manual/mapper/daos/insert/README.md index 0cca26143b2..06f0d345a8c 100644 --- a/manual/mapper/daos/insert/README.md +++ b/manual/mapper/daos/insert/README.md @@ -99,12 +99,12 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the [naming strategy](../../entities/#naming-strategy)). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@Insert]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/Insert.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/ResultSet.html -[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- -[ResultSet#getExecutionInfo()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/ResultSet.html#getExecutionInfo-- -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@Insert]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/Insert.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/ResultSet.html +[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- +[ResultSet#getExecutionInfo()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/ResultSet.html#getExecutionInfo-- +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/BoundStatement.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html diff --git a/manual/mapper/daos/null_saving/README.md b/manual/mapper/daos/null_saving/README.md index 1133490bf32..fd1b3f6f81c 100644 --- a/manual/mapper/daos/null_saving/README.md +++ b/manual/mapper/daos/null_saving/README.md @@ -93,10 +93,10 @@ public interface UserDao extends InventoryDao { } ``` -[@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[MapperException]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/MapperException.html -[DO_NOT_SET]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#DO_NOT_SET -[SET_TO_NULL]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#SET_TO_NULL +[@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[MapperException]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/MapperException.html +[DO_NOT_SET]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#DO_NOT_SET +[SET_TO_NULL]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#SET_TO_NULL [CASSANDRA-7304]: https://issues.apache.org/jira/browse/CASSANDRA-7304 diff --git a/manual/mapper/daos/query/README.md b/manual/mapper/daos/query/README.md index 9f7f9801be1..f8ff612546d 100644 --- a/manual/mapper/daos/query/README.md +++ b/manual/mapper/daos/query/README.md @@ -106,15 +106,15 @@ Then: query succeeds or not depends on whether the session that the mapper was built with has a [default keyspace]. -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@Query]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/Query.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/ResultSet.html -[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- -[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/PagingIterable.html -[Row]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/Row.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@Query]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/Query.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/ResultSet.html +[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- +[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/PagingIterable.html +[Row]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/Row.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/BoundStatement.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html diff --git a/manual/mapper/daos/queryprovider/README.md b/manual/mapper/daos/queryprovider/README.md index 5eda5c18d48..9ef5e5231ce 100644 --- a/manual/mapper/daos/queryprovider/README.md +++ b/manual/mapper/daos/queryprovider/README.md @@ -137,11 +137,11 @@ Here is the full implementation: the desired [PagingIterable][PagingIterable]. -[@QueryProvider]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html -[providerClass]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerClass-- -[entityHelpers]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#entityHelpers-- -[providerMethod]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerMethod-- -[MapperContext]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/MapperContext.html -[EntityHelper]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/EntityHelper.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/ResultSet.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/PagingIterable.html +[@QueryProvider]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html +[providerClass]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerClass-- +[entityHelpers]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#entityHelpers-- +[providerMethod]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerMethod-- +[MapperContext]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/MapperContext.html +[EntityHelper]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/EntityHelper.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/ResultSet.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/PagingIterable.html diff --git a/manual/mapper/daos/select/README.md b/manual/mapper/daos/select/README.md index 7c69ab4e7f0..6c29304a0ca 100644 --- a/manual/mapper/daos/select/README.md +++ b/manual/mapper/daos/select/README.md @@ -133,18 +133,18 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the [naming strategy](../../entities/#naming-strategy)). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html -[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html -[@Select]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/Select.html -[allowFiltering()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/Select.html#allowFiltering-- -[customWhereClause()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/Select.html#customWhereClause-- -[groupBy()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/Select.html#groupBy-- -[limit()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/Select.html#limit-- -[orderBy()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/Select.html#orderBy-- -[perPartitionLimit()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/Select.html#perPartitionLimit-- -[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/PagingIterable.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html +[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html +[@Select]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/Select.html +[allowFiltering()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/Select.html#allowFiltering-- +[customWhereClause()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/Select.html#customWhereClause-- +[groupBy()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/Select.html#groupBy-- +[limit()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/Select.html#limit-- +[orderBy()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/Select.html#orderBy-- +[perPartitionLimit()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/Select.html#perPartitionLimit-- +[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/PagingIterable.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html diff --git a/manual/mapper/daos/setentity/README.md b/manual/mapper/daos/setentity/README.md index 1a5aaecb1c0..47f847a8f4f 100644 --- a/manual/mapper/daos/setentity/README.md +++ b/manual/mapper/daos/setentity/README.md @@ -63,8 +63,8 @@ BoundStatement bind(Product product, BoundStatement statement); If you use a void method with [BoundStatement], the mapper processor will issue a compile-time warning. -[@SetEntity]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/SetEntity.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[BoundStatementBuilder]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/BoundStatementBuilder.html -[SettableByName]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/data/SettableByName.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/data/UdtValue.html +[@SetEntity]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/SetEntity.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[BoundStatementBuilder]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/BoundStatementBuilder.html +[SettableByName]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/data/SettableByName.html +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/data/UdtValue.html diff --git a/manual/mapper/daos/statement_attributes/README.md b/manual/mapper/daos/statement_attributes/README.md index 23d0f801c23..9d4913f8625 100644 --- a/manual/mapper/daos/statement_attributes/README.md +++ b/manual/mapper/daos/statement_attributes/README.md @@ -60,4 +60,4 @@ Product product = dao.findById(1, builder -> builder.setConsistencyLevel(DefaultConsistencyLevel.QUORUM)); ``` -[@StatementAttributes]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/StatementAttributes.html \ No newline at end of file +[@StatementAttributes]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/StatementAttributes.html \ No newline at end of file diff --git a/manual/mapper/daos/update/README.md b/manual/mapper/daos/update/README.md index 48b91a22793..50189224fab 100644 --- a/manual/mapper/daos/update/README.md +++ b/manual/mapper/daos/update/README.md @@ -134,12 +134,12 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the naming convention). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@Update]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/Update.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@Update]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/Update.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html [Boolean]: https://docs.oracle.com/javase/8/docs/api/index.html?java/lang/Boolean.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/ResultSet.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/ResultSet.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/BoundStatement.html diff --git a/manual/mapper/entities/README.md b/manual/mapper/entities/README.md index f0186a8b310..55634bcefc6 100644 --- a/manual/mapper/entities/README.md +++ b/manual/mapper/entities/README.md @@ -468,21 +468,21 @@ the same level. To control how the class hierarchy is scanned, annotate classes with [@HierarchyScanStrategy]. -[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html -[@CqlName]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/CqlName.html -[@Dao]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/Dao.html -[@Entity]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/Entity.html -[NameConverter]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/entity/naming/NameConverter.html -[NamingConvention]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/entity/naming/NamingConvention.html -[@NamingStrategy]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/NamingStrategy.html -[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html -[@Computed]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/Computed.html -[@Select]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/Select.html -[@Insert]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/Insert.html -[@Update]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/Update.html -[@GetEntity]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html -[@Query]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/Query.html +[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html +[@CqlName]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/CqlName.html +[@Dao]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/Dao.html +[@Entity]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/Entity.html +[NameConverter]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/entity/naming/NameConverter.html +[NamingConvention]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/entity/naming/NamingConvention.html +[@NamingStrategy]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/NamingStrategy.html +[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html +[@Computed]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/Computed.html +[@Select]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/Select.html +[@Insert]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/Insert.html +[@Update]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/Update.html +[@GetEntity]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html +[@Query]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/Query.html [aliases]: http://cassandra.apache.org/doc/latest/cql/dml.html?#aliases -[@Transient]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/Transient.html -[@TransientProperties]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/TransientProperties.html -[@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html +[@Transient]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/Transient.html +[@TransientProperties]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/TransientProperties.html +[@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html diff --git a/manual/mapper/mapper/README.md b/manual/mapper/mapper/README.md index 919073a94c2..35885ac1c47 100644 --- a/manual/mapper/mapper/README.md +++ b/manual/mapper/mapper/README.md @@ -159,8 +159,8 @@ ProductDao dao3 = inventoryMapper.productDao("keyspace3", "table3"); The DAO's keyspace and table can also be injected into custom query strings; see [Query methods](../daos/query/). -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/CqlIdentifier.html -[@DaoFactory]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.html -[@DaoKeyspace]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/DaoKeyspace.html -[@DaoTable]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/DaoTable.html -[@Mapper]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/mapper/annotations/Mapper.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/CqlIdentifier.html +[@DaoFactory]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.html +[@DaoKeyspace]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/DaoKeyspace.html +[@DaoTable]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/DaoTable.html +[@Mapper]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/Mapper.html diff --git a/manual/osgi/README.md b/manual/osgi/README.md index 50358e140c0..b0b153ac5e3 100644 --- a/manual/osgi/README.md +++ b/manual/osgi/README.md @@ -94,5 +94,5 @@ starting the driver: [driver configuration]: ../core/configuration [OSGi]:https://www.osgi.org [JNR]: https://github.com/jnr/jnr-ffi -[withClassLoader()]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withClassLoader-java.lang.ClassLoader- +[withClassLoader()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withClassLoader-java.lang.ClassLoader- [JAVA-1127]:https://datastax-oss.atlassian.net/browse/JAVA-1127 \ No newline at end of file diff --git a/manual/query_builder/README.md b/manual/query_builder/README.md index 07e192ba0f0..25fdcfdfc5d 100644 --- a/manual/query_builder/README.md +++ b/manual/query_builder/README.md @@ -212,8 +212,8 @@ For a complete tour of the API, browse the child pages in this manual: * [Terms](term/) * [Idempotence](idempotence/) -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/CqlIdentifier.html -[DseQueryBuilder]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/dse/driver/api/querybuilder/DseQueryBuilder.html -[DseSchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/dse/driver/api/querybuilder/DseSchemaBuilder.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/CqlIdentifier.html +[DseQueryBuilder]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/dse/driver/api/querybuilder/DseQueryBuilder.html +[DseSchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/dse/driver/api/querybuilder/DseSchemaBuilder.html diff --git a/manual/query_builder/condition/README.md b/manual/query_builder/condition/README.md index 0520992bc52..6bf09947e3c 100644 --- a/manual/query_builder/condition/README.md +++ b/manual/query_builder/condition/README.md @@ -132,4 +132,4 @@ It is mutually exclusive with column conditions: if you previously specified col the statement, they will be ignored; conversely, adding a column condition cancels a previous IF EXISTS clause. -[Condition]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/querybuilder/condition/Condition.html +[Condition]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/querybuilder/condition/Condition.html diff --git a/manual/query_builder/delete/README.md b/manual/query_builder/delete/README.md index 5cf061ecaed..0262daf0b77 100644 --- a/manual/query_builder/delete/README.md +++ b/manual/query_builder/delete/README.md @@ -141,5 +141,5 @@ deleteFrom("user") Conditions are a common feature used by UPDATE and DELETE, so they have a [dedicated page](../condition) in this manual. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[Selector]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/querybuilder/select/Selector.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[Selector]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/querybuilder/select/Selector.html diff --git a/manual/query_builder/insert/README.md b/manual/query_builder/insert/README.md index f4d2ab8121f..876b8a83e36 100644 --- a/manual/query_builder/insert/README.md +++ b/manual/query_builder/insert/README.md @@ -114,4 +114,4 @@ is executed. This is distinctly different than setting the value to null. Passin this method will only remove the USING TTL clause from the query, which will not alter the TTL (if one is set) in Cassandra. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html \ No newline at end of file +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html \ No newline at end of file diff --git a/manual/query_builder/relation/README.md b/manual/query_builder/relation/README.md index 99fe7516fc3..a33f26868ee 100644 --- a/manual/query_builder/relation/README.md +++ b/manual/query_builder/relation/README.md @@ -201,5 +201,5 @@ This should be used with caution, as it's possible to generate invalid CQL that execution time; on the other hand, it can be used as a workaround to handle new CQL features that are not yet covered by the query builder. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[Relation]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/querybuilder/relation/Relation.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[Relation]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/querybuilder/relation/Relation.html diff --git a/manual/query_builder/schema/README.md b/manual/query_builder/schema/README.md index b33eba70d23..07b0d8a5f4d 100644 --- a/manual/query_builder/schema/README.md +++ b/manual/query_builder/schema/README.md @@ -44,4 +44,4 @@ element type: * [function](function/) * [aggregate](aggregate/) -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/aggregate/README.md b/manual/query_builder/schema/aggregate/README.md index a7b4a8f1885..9ac8df978da 100644 --- a/manual/query_builder/schema/aggregate/README.md +++ b/manual/query_builder/schema/aggregate/README.md @@ -76,4 +76,4 @@ dropAggregate("average").ifExists(); // DROP AGGREGATE IF EXISTS average ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/function/README.md b/manual/query_builder/schema/function/README.md index 2e72b321b73..6dfdea05912 100644 --- a/manual/query_builder/schema/function/README.md +++ b/manual/query_builder/schema/function/README.md @@ -92,4 +92,4 @@ dropFunction("log").ifExists(); // DROP FUNCTION IF EXISTS log ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/index/README.md b/manual/query_builder/schema/index/README.md index 0321bee22a6..1a50ae9dad2 100644 --- a/manual/query_builder/schema/index/README.md +++ b/manual/query_builder/schema/index/README.md @@ -99,4 +99,4 @@ dropIndex("my_idx").ifExists(); // DROP INDEX IF EXISTS my_idx ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/keyspace/README.md b/manual/query_builder/schema/keyspace/README.md index 59964b88f62..ee985c10a77 100644 --- a/manual/query_builder/schema/keyspace/README.md +++ b/manual/query_builder/schema/keyspace/README.md @@ -83,6 +83,6 @@ dropKeyspace("cycling").ifExists(); // DROP KEYSPACE IF EXISTS cycling ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/materialized_view/README.md b/manual/query_builder/schema/materialized_view/README.md index 18702c55ce5..2dfebba8ce8 100644 --- a/manual/query_builder/schema/materialized_view/README.md +++ b/manual/query_builder/schema/materialized_view/README.md @@ -85,5 +85,5 @@ dropTable("cyclist_by_age").ifExists(); // DROP MATERIALIZED VIEW IF EXISTS cyclist_by_age ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html -[RelationStructure]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/querybuilder/schema/RelationStructure.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[RelationStructure]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/querybuilder/schema/RelationStructure.html diff --git a/manual/query_builder/schema/table/README.md b/manual/query_builder/schema/table/README.md index 68c97822c36..a38775e4b65 100644 --- a/manual/query_builder/schema/table/README.md +++ b/manual/query_builder/schema/table/README.md @@ -107,6 +107,6 @@ dropTable("cyclist_name").ifExists(); // DROP TABLE IF EXISTS cyclist_name ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html -[CreateTableWithOptions]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/querybuilder/schema/CreateTableWithOptions.html -[AlterTableWithOptions]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/querybuilder/schema/AlterTableWithOptions.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[CreateTableWithOptions]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/querybuilder/schema/CreateTableWithOptions.html +[AlterTableWithOptions]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/querybuilder/schema/AlterTableWithOptions.html diff --git a/manual/query_builder/schema/type/README.md b/manual/query_builder/schema/type/README.md index 52d98566b26..a64fb6457fc 100644 --- a/manual/query_builder/schema/type/README.md +++ b/manual/query_builder/schema/type/README.md @@ -88,4 +88,4 @@ dropTable("address").ifExists(); // DROP TYPE IF EXISTS address ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/select/README.md b/manual/query_builder/select/README.md index e98da7d7560..3fe6b359e2b 100644 --- a/manual/query_builder/select/README.md +++ b/manual/query_builder/select/README.md @@ -391,5 +391,5 @@ selectFrom("user").all().allowFiltering(); // SELECT * FROM user ALLOW FILTERING ``` -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[Selector]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/querybuilder/select/Selector.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[Selector]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/querybuilder/select/Selector.html diff --git a/manual/query_builder/term/README.md b/manual/query_builder/term/README.md index 6d30f0344e6..0e9a427ebd4 100644 --- a/manual/query_builder/term/README.md +++ b/manual/query_builder/term/README.md @@ -105,5 +105,5 @@ This should be used with caution, as it's possible to generate invalid CQL that execution time; on the other hand, it can be used as a workaround to handle new CQL features that are not yet covered by the query builder. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html \ No newline at end of file +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html \ No newline at end of file diff --git a/manual/query_builder/truncate/README.md b/manual/query_builder/truncate/README.md index e89bf37e888..63bfa0ebb44 100644 --- a/manual/query_builder/truncate/README.md +++ b/manual/query_builder/truncate/README.md @@ -17,4 +17,4 @@ Truncate truncate2 = truncate(CqlIdentifier.fromCql("mytable")); Note that, at this stage, the query is ready to build. After creating a TRUNCATE query it does not take any values. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html diff --git a/manual/query_builder/update/README.md b/manual/query_builder/update/README.md index f846e5d00ff..8961be20e1e 100644 --- a/manual/query_builder/update/README.md +++ b/manual/query_builder/update/README.md @@ -251,5 +251,5 @@ update("foo") Conditions are a common feature used by UPDATE and DELETE, so they have a [dedicated page](../condition) in this manual. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[Assignment]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/querybuilder/update/Assignment.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[Assignment]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/querybuilder/update/Assignment.html From ad87a0df0f3cd471cc2275fa0b7d3a56e19cdc4b Mon Sep 17 00:00:00 2001 From: olim7t Date: Tue, 14 Jan 2020 13:59:51 -0800 Subject: [PATCH 263/979] Fix test failures introduced by f6c270ebc --- .../loadbalancing/BasicLoadBalancingPolicyEventsTest.java | 3 ++- .../core/loadbalancing/BasicLoadBalancingPolicyInitTest.java | 4 ++++ .../loadbalancing/BasicLoadBalancingPolicyQueryPlanTest.java | 4 ++++ .../loadbalancing/DefaultLoadBalancingPolicyTestBase.java | 2 ++ 4 files changed, 12 insertions(+), 1 deletion(-) diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyEventsTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyEventsTest.java index d7b04f528c5..f8dee137d64 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyEventsTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyEventsTest.java @@ -37,7 +37,8 @@ import org.mockito.Mock; import org.mockito.junit.MockitoJUnitRunner; -@RunWith(MockitoJUnitRunner.class) +// TODO fix unnecessary stubbing of config option in parent class (and stop using "silent" runner) +@RunWith(MockitoJUnitRunner.Silent.class) public class BasicLoadBalancingPolicyEventsTest extends DefaultLoadBalancingPolicyTestBase { @Mock private Predicate filter; diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyInitTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyInitTest.java index 217c01d7baa..b4bca1638a6 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyInitTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyInitTest.java @@ -34,7 +34,11 @@ import edu.umd.cs.findbugs.annotations.NonNull; import java.util.UUID; import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.junit.MockitoJUnitRunner; +// TODO fix unnecessary stubbing of config option in parent class (and stop using "silent" runner) +@RunWith(MockitoJUnitRunner.Silent.class) public class BasicLoadBalancingPolicyInitTest extends DefaultLoadBalancingPolicyTestBase { @Override diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyQueryPlanTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyQueryPlanTest.java index c4f1689319b..f5c68e79c2b 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyQueryPlanTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyQueryPlanTest.java @@ -46,8 +46,12 @@ import java.util.UUID; import org.junit.Before; import org.junit.Test; +import org.junit.runner.RunWith; import org.mockito.Mock; +import org.mockito.junit.MockitoJUnitRunner; +// TODO fix unnecessary stubbing of config option in parent class (and stop using "silent" runner) +@RunWith(MockitoJUnitRunner.Silent.class) public class BasicLoadBalancingPolicyQueryPlanTest extends DefaultLoadBalancingPolicyTestBase { protected static final CqlIdentifier KEYSPACE = CqlIdentifier.fromInternal("ks"); diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyTestBase.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyTestBase.java index 762f7bec889..f5ac866e4ef 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyTestBase.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyTestBase.java @@ -73,6 +73,8 @@ public void setup() { .thenReturn(true); when(defaultProfile.getString(DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER)) .thenReturn("dc1"); + when(defaultProfile.getBoolean(DefaultDriverOption.LOAD_BALANCING_POLICY_SLOW_AVOIDANCE, true)) + .thenReturn(true); when(context.getMetadataManager()).thenReturn(metadataManager); From bbc6926a7a076f629e12bc143c9897ba89bbade0 Mon Sep 17 00:00:00 2001 From: olim7t Date: Tue, 14 Jan 2020 14:18:53 -0800 Subject: [PATCH 264/979] [maven-release-plugin] prepare for next development iteration --- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- pom.xml | 2 +- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 10 files changed, 10 insertions(+), 10 deletions(-) diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index fd0751487e9..d2b8be79b10 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.4.0-SNAPSHOT + 4.5.0-SNAPSHOT java-driver-core-shaded DataStax Java driver for Apache Cassandra(R) - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index 4581f2313e6..e92f9c57318 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.4.0-SNAPSHOT + 4.5.0-SNAPSHOT java-driver-core bundle diff --git a/distribution/pom.xml b/distribution/pom.xml index 983b1e049aa..fcface968e1 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.4.0-SNAPSHOT + 4.5.0-SNAPSHOT java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index 3ac20b800ff..b48833f98cf 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.4.0-SNAPSHOT + 4.5.0-SNAPSHOT java-driver-examples DataStax Java driver for Apache Cassandra(R) - examples. diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 8365f3b92e8..4abf2e7c8d8 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.4.0-SNAPSHOT + 4.5.0-SNAPSHOT java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index be6d1e016e8..28584648836 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.4.0-SNAPSHOT + 4.5.0-SNAPSHOT java-driver-mapper-processor DataStax Java driver for Apache Cassandra(R) - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index 2a7eb90f844..ff5f03a28c5 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.4.0-SNAPSHOT + 4.5.0-SNAPSHOT java-driver-mapper-runtime bundle diff --git a/pom.xml b/pom.xml index 63ae18d74e5..12cdb899963 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ 4.0.0 com.datastax.oss java-driver-parent - 4.4.0-SNAPSHOT + 4.5.0-SNAPSHOT pom DataStax Java driver for Apache Cassandra(R) A driver for Apache Cassandra(R) 2.1+ that works exclusively with the Cassandra Query Language version 3 (CQL3) and Cassandra's native protocol versions 3 and above. diff --git a/query-builder/pom.xml b/query-builder/pom.xml index 889679f8b32..d7e33b736d3 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.4.0-SNAPSHOT + 4.5.0-SNAPSHOT java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index 0a84a8add4a..cd4b608458b 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.4.0-SNAPSHOT + 4.5.0-SNAPSHOT java-driver-test-infra bundle From 9a897ccc3309dc50dff9517faf790125663a2d9d Mon Sep 17 00:00:00 2001 From: olim7t Date: Tue, 14 Jan 2020 17:59:49 -0800 Subject: [PATCH 265/979] Prepare changelog for next development iteration --- changelog/README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/changelog/README.md b/changelog/README.md index 869c86f119f..b8641382caf 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -2,6 +2,8 @@ +### 4.5.0 (in progress) + ### 4.4.0 This version brings in all functionality that was formerly only in the DataStax Enterprise driver, From c5a50b89add65abee3436b4034bdc28094db2385 Mon Sep 17 00:00:00 2001 From: olim7t Date: Tue, 14 Jan 2020 18:02:11 -0800 Subject: [PATCH 266/979] Exclude docs from XML formatting --- pom.xml | 1 + 1 file changed, 1 insertion(+) diff --git a/pom.xml b/pom.xml index 12cdb899963..603857b3c07 100644 --- a/pom.xml +++ b/pom.xml @@ -591,6 +591,7 @@ .idea/** **/target/** **/dependency-reduced-pom.xml + docs/** From b054abb237e3bc6144ecdf2208a22ba8044dbbe0 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 20 Jan 2020 15:30:45 -0300 Subject: [PATCH 267/979] Remove redundant type arguments --- .../com/datastax/oss/driver/internal/core/CqlIdentifiers.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/CqlIdentifiers.java b/core/src/main/java/com/datastax/oss/driver/internal/core/CqlIdentifiers.java index 02be974f46a..c685d9d69c8 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/CqlIdentifiers.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/CqlIdentifiers.java @@ -32,7 +32,7 @@ private static List wrap( Objects.requireNonNull(in, "Input Iterable must not be null"); Objects.requireNonNull(fn, "CqlIdentifier conversion function must not be null"); - ImmutableList.Builder builder = ImmutableList.builder(); + ImmutableList.Builder builder = ImmutableList.builder(); for (String name : in) { builder.add(fn.apply(name)); } @@ -54,7 +54,7 @@ private static Map wrapKeys( @NonNull Map in, @NonNull Function fn) { Objects.requireNonNull(in, "Input Map must not be null"); Objects.requireNonNull(fn, "CqlIdentifier conversion function must not be null"); - ImmutableMap.Builder builder = ImmutableMap.builder(); + ImmutableMap.Builder builder = ImmutableMap.builder(); for (Map.Entry entry : in.entrySet()) { builder.put(fn.apply(entry.getKey()), entry.getValue()); } From ed2d5aaedb1a2320e2b57de7a5d127bcd7157774 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 20 Jan 2020 15:35:57 -0300 Subject: [PATCH 268/979] Fix compilation warning in AdminResult --- .../oss/driver/internal/core/adminrequest/AdminResult.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/adminrequest/AdminResult.java b/core/src/main/java/com/datastax/oss/driver/internal/core/adminrequest/AdminResult.java index d40a85049fc..6c567ef7174 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/adminrequest/AdminResult.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/adminrequest/AdminResult.java @@ -35,10 +35,11 @@ public class AdminResult implements Iterable { private final Queue> data; private final Map columnSpecs; - private final AdminRequestHandler nextHandler; + private final AdminRequestHandler nextHandler; private final ProtocolVersion protocolVersion; - public AdminResult(Rows rows, AdminRequestHandler nextHandler, ProtocolVersion protocolVersion) { + public AdminResult( + Rows rows, AdminRequestHandler nextHandler, ProtocolVersion protocolVersion) { this.data = rows.getData(); ImmutableMap.Builder columnSpecsBuilder = ImmutableMap.builder(); From 8ead1325edfa3f09684689c39269ae1da9d7f8fc Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 20 Jan 2020 15:38:12 -0300 Subject: [PATCH 269/979] Fix deprecation warning in TermTest --- .../datastax/oss/driver/api/querybuilder/relation/TermTest.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/relation/TermTest.java b/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/relation/TermTest.java index 320b7c827b8..c6d1b4c9659 100644 --- a/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/relation/TermTest.java +++ b/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/relation/TermTest.java @@ -116,7 +116,7 @@ public void should_generate_literal_terms() { @Test public void should_fail_when_no_codec_for_literal() { - assertThatThrownBy(() -> literal(new Date(2018, 10, 10))) + assertThatThrownBy(() -> literal(new Date(1234))) .isInstanceOf(IllegalArgumentException.class) .hasMessage( "Could not inline literal of type java.util.Date. " From 59d08a6bba1218ef9bea7d7d4edfc5d1fef3f609 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 20 Jan 2020 15:46:25 -0300 Subject: [PATCH 270/979] Fix compilation warning in TableParserTest --- .../core/metadata/schema/parsing/TableParserTest.java | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/TableParserTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/TableParserTest.java index 52428214816..7b4e652fb0f 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/TableParserTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/TableParserTest.java @@ -111,7 +111,10 @@ public void should_parse_modern_tables() { checkTable(table); - assertThat((Map) table.getOptions().get(CqlIdentifier.fromInternal("caching"))) + @SuppressWarnings("unchecked") + Map caching = + (Map) table.getOptions().get(CqlIdentifier.fromInternal("caching")); + assertThat(caching) .hasSize(2) .containsEntry("keys", "ALL") .containsEntry("rows_per_partition", "NONE"); @@ -177,8 +180,10 @@ private void checkTable(TableMetadata table) { assertThat(index.getClassName()).isNotPresent(); assertThat(index.getKind()).isEqualTo(IndexKind.COMPOSITES); assertThat(index.getTarget()).isEqualTo("v"); - assertThat( - (Map) table.getOptions().get(CqlIdentifier.fromInternal("compaction"))) + @SuppressWarnings("unchecked") + Map compaction = + (Map) table.getOptions().get(CqlIdentifier.fromInternal("compaction")); + assertThat(compaction) .hasSize(2) .containsEntry("class", "org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy") .containsEntry("mock_option", "1"); From b10a6fb3992aa93f3f2fd319aa00c38ab3a23220 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 20 Jan 2020 15:55:23 -0300 Subject: [PATCH 271/979] Fix compilation warning in ProtocolVersionMixedClusterIT --- .../oss/driver/core/ProtocolVersionMixedClusterIT.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/ProtocolVersionMixedClusterIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/ProtocolVersionMixedClusterIT.java index 6acdeb4b042..2dba7ae4ba9 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/ProtocolVersionMixedClusterIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/ProtocolVersionMixedClusterIT.java @@ -170,12 +170,12 @@ private BoundCluster mixedVersions(String... versions) { return SimulacronRule.server.register(clusterSpec); } - private Stream queries(BoundTopic topic) { + private Stream queries(BoundTopic topic) { return topic.getLogs().getQueryLogs().stream() .filter(q -> q.getFrame().message instanceof Query); } - private Stream protocolQueries(BoundTopic topic, int protocolVersion) { + private Stream protocolQueries(BoundTopic topic, int protocolVersion) { return queries(topic) .filter(q -> q.getFrame().protocolVersion == protocolVersion) .map(QueryLog::getQuery); From dddb8c68e648db792c8efa693f0c464030a695fd Mon Sep 17 00:00:00 2001 From: tomekl007 Date: Fri, 24 Jan 2020 10:20:36 +0100 Subject: [PATCH 272/979] fix `build.yaml` by adding required type setting --- build.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/build.yaml b/build.yaml index 01a78f10698..32298b3aad9 100644 --- a/build.yaml +++ b/build.yaml @@ -56,6 +56,7 @@ schedules: - jabba: openjdk_jdk13 cassandra: ['2.1', '3.0', 'dse-4.8', 'dse-5.0', 'dse-5.1', 'dse-6.0', dse-6.8'] build: + - type: maven - properties: | ccm.version=$CCM_CASSANDRA_VERSION ccm.dse=$CCM_IS_DSE From 6cd2d8f8a9a4673058295c94da314564fb852b3d Mon Sep 17 00:00:00 2001 From: Erik Merkle Date: Fri, 24 Jan 2020 08:10:29 -0600 Subject: [PATCH 273/979] JAVA-2622: Add Cassandra 4.0 to build matrix for ITs (#1390) - also fixes the build.yaml for CI builds --- build.yaml | 34 ++++++++++--------- .../driver/api/testinfra/ccm/CcmBridge.java | 20 ++++++++++- 2 files changed, 37 insertions(+), 17 deletions(-) diff --git a/build.yaml b/build.yaml index 32298b3aad9..319ed0b9800 100644 --- a/build.yaml +++ b/build.yaml @@ -9,6 +9,7 @@ cassandra: - '2.1' - '3.0' - '3.11' + - '4.0' - 'dse-4.8' - 'dse-5.0' - 'dse-5.1' @@ -27,6 +28,9 @@ schedules: - jabba: openjdk_jdk11 - jabba: openjdk_jdk12 - jabba: openjdk_jdk13 + disable_commit_status: true + notify: + slack: java-driver-dev-bots nightly: schedule: nightly matrix: @@ -34,13 +38,17 @@ schedules: # No excludes for JDK8 # Exclude JDK11 for all but the latest Cassandra and DSE 6.7+ - jabba: openjdk_jdk11 - cassandra: ['2.1', '3.0', 'dse-4.8', 'dse-5.0', 'dse-5.1', 'dse-6.0', dse-6.8'] + cassandra: ['2.1', '3.0', 'dse-4.8', 'dse-5.0', 'dse-5.1', 'dse-6.0', 'dse-6.8'] # Exclude JDK12 for all but the latest Cassandra and DSE 6.7+ - jabba: openjdk_jdk12 - cassandra: ['2.1', '3.0', 'dse-4.8', 'dse-5.0', 'dse-5.1', 'dse-6.0', dse-6.8'] + cassandra: ['2.1', '3.0', 'dse-4.8', 'dse-5.0', 'dse-5.1', 'dse-6.0', 'dse-6.8'] # Exclude JDK13 for all but the latest Cassandra and DSE 6.7+ - jabba: openjdk_jdk13 - cassandra: ['2.1', '3.0', 'dse-4.8', 'dse-5.0', 'dse-5.1', 'dse-6.0', dse-6.8'] + cassandra: ['2.1', '3.0', 'dse-4.8', 'dse-5.0', 'dse-5.1', 'dse-6.0', 'dse-6.8'] + disable_commit_status: true + notify: + slack: java-driver-dev-bots + adhoc: schedule: adhoc matrix: @@ -48,20 +56,17 @@ schedules: # No excludes for JDK8 # Exclude JDK11 for all but the latest Cassandra and DSE 6.7+ - jabba: openjdk_jdk11 - cassandra: ['2.1', '3.0', 'dse-4.8', 'dse-5.0', 'dse-5.1', 'dse-6.0', dse-6.8'] + cassandra: ['2.1', '3.0', 'dse-4.8', 'dse-5.0', 'dse-5.1', 'dse-6.0', 'dse-6.8'] # Exclude JDK12 for all but the latest Cassandra and DSE 6.7+ - jabba: openjdk_jdk12 - cassandra: ['2.1', '3.0', 'dse-4.8', 'dse-5.0', 'dse-5.1', 'dse-6.0', dse-6.8'] + cassandra: ['2.1', '3.0', 'dse-4.8', 'dse-5.0', 'dse-5.1', 'dse-6.0', 'dse-6.8'] # Exclude JDK13 for all but the latest Cassandra and DSE 6.7+ - jabba: openjdk_jdk13 - cassandra: ['2.1', '3.0', 'dse-4.8', 'dse-5.0', 'dse-5.1', 'dse-6.0', dse-6.8'] + cassandra: ['2.1', '3.0', 'dse-4.8', 'dse-5.0', 'dse-5.1', 'dse-6.0', 'dse-6.8'] + disable_commit_status: true + notify: + slack: java-driver-dev-bots build: - - type: maven - - properties: | - ccm.version=$CCM_CASSANDRA_VERSION - ccm.dse=$CCM_IS_DSE - proxy.path=$HOME/proxy - maven.javadoc.skip=true - script: | # Jabba default should be a JDK8 for now jabba use default @@ -72,11 +77,8 @@ build: # Use the matrix JDK for testing jabba use $JABBA_JDK_NAME # Run tests against matrix JDK - mvn -B -V verify --batch-mode --show-version + mvn -B -V verify --batch-mode --show-version -Dccm.version=$CCM_CASSANDRA_VERSION -Dccm.dse=$CCM_IS_DSE -Dproxy.path=$HOME/proxy -Dmaven.javadoc.skip=true - xunit: - "**/target/surefire-reports/TEST-*.xml" - "**/target/failsafe-reports/TEST-*.xml" - jacoco: true -disable_commit_status: true -notify: - slack: java-driver-dev-bots diff --git a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CcmBridge.java b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CcmBridge.java index f10a2cdeba3..4155342195d 100644 --- a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CcmBridge.java +++ b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CcmBridge.java @@ -186,6 +186,24 @@ public Version getCassandraVersion() { } } + private String getCcmVersionString(Version version) { + // for 4.0 pre-releases, the CCM version string needs to be "4.0-alpha1" or "4.0-alpha2" + // Version.toString() always adds a patch value, even if it's not specified when parsing. + if (version.getMajor() == 4 + && version.getMinor() == 0 + && version.getPatch() == 0 + && version.getPreReleaseLabels() != null) { + // truncate the patch version from the Version string + StringBuilder sb = new StringBuilder(); + sb.append(version.getMajor()).append('.').append(version.getMinor()); + for (String preReleaseString : version.getPreReleaseLabels()) { + sb.append('-').append(preReleaseString); + } + return sb.toString(); + } + return version.toString(); + } + public void create() { if (created.compareAndSet(false, true)) { if (INSTALL_DIRECTORY != null) { @@ -194,7 +212,7 @@ public void create() { createOptions.add("-v git:" + BRANCH.trim().replaceAll("\"", "")); } else { - createOptions.add("-v " + VERSION.toString()); + createOptions.add("-v " + getCcmVersionString(VERSION)); } if (DSE_ENABLEMENT) { createOptions.add("--dse"); From 48bac34693c106afa162d80d1a8bc00757c2761c Mon Sep 17 00:00:00 2001 From: Tomasz Lelek Date: Mon, 27 Jan 2020 12:46:20 +0100 Subject: [PATCH 274/979] JAVA-2263: Add optional schema validation (#1300) --- changelog/README.md | 2 + .../internal/core/util/CollectionsUtils.java | 34 + .../core/util/CollectionsUtilsTest.java | 67 + .../driver/internal/core/util/LoggerTest.java | 63 + .../oss/driver/mapper/DefaultKeyspaceIT.java | 2 + .../oss/driver/mapper/SchemaValidationIT.java | 1191 +++++++++++++++++ .../datastax/oss/driver/mapper/UpdateIT.java | 1 + manual/mapper/mapper/README.md | 48 + .../dao/DaoImplementationGenerator.java | 13 + .../processor/dao/LoggingGenerator.java | 25 +- .../entity/EntityHelperGenerator.java | 4 +- ...HelperSchemaValidationMethodGenerator.java | 374 ++++++ .../mapper/entity/EntityHelperBaseTest.java | 109 ++ .../oss/driver/api/mapper/MapperBuilder.java | 15 + .../api/mapper/annotations/SchemaHint.java | 70 + .../mapper/entity/EntityHelperBase.java | 128 ++ 16 files changed, 2144 insertions(+), 2 deletions(-) create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/util/CollectionsUtils.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/util/CollectionsUtilsTest.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/util/LoggerTest.java create mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/mapper/SchemaValidationIT.java create mode 100644 mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperSchemaValidationMethodGenerator.java create mode 100644 mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/entity/EntityHelperBaseTest.java create mode 100644 mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/SchemaHint.java diff --git a/changelog/README.md b/changelog/README.md index b8641382caf..5181569ade2 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,8 @@ ### 4.5.0 (in progress) +- [new feature] JAVA-2263: Add optional schema validation to the mapper + ### 4.4.0 This version brings in all functionality that was formerly only in the DataStax Enterprise driver, diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/util/CollectionsUtils.java b/core/src/main/java/com/datastax/oss/driver/internal/core/util/CollectionsUtils.java new file mode 100644 index 00000000000..2dcbdf866ba --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/util/CollectionsUtils.java @@ -0,0 +1,34 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.util; + +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; + +public class CollectionsUtils { + public static Map combineListsIntoOrderedMap(List keys, List values) { + if (keys.size() != values.size()) { + throw new IllegalArgumentException("Cannot combine lists with not matching sizes"); + } + + Map map = new LinkedHashMap<>(); + for (int i = 0; i < keys.size(); i++) { + map.put(keys.get(i), values.get(i)); + } + return map; + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/util/CollectionsUtilsTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/util/CollectionsUtilsTest.java new file mode 100644 index 00000000000..3ae49f12338 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/util/CollectionsUtilsTest.java @@ -0,0 +1,67 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.util; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import java.util.List; +import java.util.Map; +import org.junit.Test; +import org.junit.runner.RunWith; + +@RunWith(DataProviderRunner.class) +public class CollectionsUtilsTest { + @Test + @UseDataProvider("listsProvider") + public void should_combine_two_lists_by_index( + List firstList, List secondList, Map expected) { + + // when + Map result = + CollectionsUtils.combineListsIntoOrderedMap(firstList, secondList); + + // then + assertThat(result).isEqualTo(expected); + } + + @Test + public void should_throw_if_lists_have_not_matching_size() { + // given + List list1 = ImmutableList.of(1); + List list2 = ImmutableList.of(1, 2); + + // when + assertThatThrownBy(() -> CollectionsUtils.combineListsIntoOrderedMap(list1, list2)) + .isInstanceOf(IllegalArgumentException.class) + .hasMessageMatching("Cannot combine lists with not matching sizes"); + } + + @DataProvider + public static Object[][] listsProvider() { + + return new Object[][] { + {ImmutableList.of(1), ImmutableList.of(1), ImmutableMap.of(1, 1)}, + {ImmutableList.of(1, 10, 5), ImmutableList.of(1, 10, 5), ImmutableMap.of(1, 1, 10, 10, 5, 5)}, + {ImmutableList.of(1, 1), ImmutableList.of(1, 2), ImmutableMap.of(1, 2)} + }; + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/util/LoggerTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/util/LoggerTest.java new file mode 100644 index 00000000000..415d887ee45 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/util/LoggerTest.java @@ -0,0 +1,63 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.util; + +import static org.mockito.Mockito.mock; + +import ch.qos.logback.classic.Level; +import ch.qos.logback.classic.Logger; +import ch.qos.logback.classic.spi.ILoggingEvent; +import ch.qos.logback.core.Appender; +import org.mockito.ArgumentCaptor; +import org.slf4j.LoggerFactory; + +public class LoggerTest { + public static LoggerSetup setupTestLogger(Class clazz, Level levelToCapture) { + @SuppressWarnings("unchecked") + Appender appender = (Appender) mock(Appender.class); + + ArgumentCaptor loggingEventCaptor = ArgumentCaptor.forClass(ILoggingEvent.class); + Logger logger = (Logger) LoggerFactory.getLogger(clazz); + Level originalLoggerLevel = logger.getLevel(); + logger.setLevel(levelToCapture); + logger.addAppender(appender); + return new LoggerSetup(appender, originalLoggerLevel, logger, loggingEventCaptor); + } + + public static class LoggerSetup { + + private final Level originalLoggerLevel; + public final Appender appender; + public final Logger logger; + public ArgumentCaptor loggingEventCaptor; + + private LoggerSetup( + Appender appender, + Level originalLoggerLevel, + Logger logger, + ArgumentCaptor loggingEventCaptor) { + this.appender = appender; + this.originalLoggerLevel = originalLoggerLevel; + this.logger = logger; + this.loggingEventCaptor = loggingEventCaptor; + } + + public void close() { + logger.detachAppender(appender); + logger.setLevel(originalLoggerLevel); + } + } +} diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/DefaultKeyspaceIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/DefaultKeyspaceIT.java index babba8b982c..55f7aff9b62 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/DefaultKeyspaceIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/DefaultKeyspaceIT.java @@ -15,6 +15,7 @@ */ package com.datastax.oss.driver.mapper; +import static com.datastax.oss.driver.api.mapper.MapperBuilder.*; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; @@ -121,6 +122,7 @@ public void should_fail_to_insert_if_default_ks_and_dao_ks_not_provided() { () -> { InventoryMapperKsNotSet mapper = new DefaultKeyspaceIT_InventoryMapperKsNotSetBuilder(SESSION_RULE.session()) + .withCustomState(SCHEMA_VALIDATION_ENABLED_SETTING, false) .build(); mapper.productDaoDefaultKsNotSet(); }) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SchemaValidationIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SchemaValidationIT.java new file mode 100644 index 00000000000..ec0c579c3c3 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SchemaValidationIT.java @@ -0,0 +1,1191 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.mapper; + +import static com.datastax.oss.driver.api.mapper.annotations.SchemaHint.*; +import static com.datastax.oss.driver.internal.core.util.LoggerTest.setupTestLogger; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatCode; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.mockito.Mockito.timeout; +import static org.mockito.Mockito.verify; + +import ch.qos.logback.classic.Level; +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.core.servererrors.InvalidQueryException; +import com.datastax.oss.driver.api.mapper.annotations.ClusteringColumn; +import com.datastax.oss.driver.api.mapper.annotations.Dao; +import com.datastax.oss.driver.api.mapper.annotations.DaoFactory; +import com.datastax.oss.driver.api.mapper.annotations.DaoKeyspace; +import com.datastax.oss.driver.api.mapper.annotations.Entity; +import com.datastax.oss.driver.api.mapper.annotations.Mapper; +import com.datastax.oss.driver.api.mapper.annotations.PartitionKey; +import com.datastax.oss.driver.api.mapper.annotations.SchemaHint; +import com.datastax.oss.driver.api.mapper.annotations.Select; +import com.datastax.oss.driver.api.mapper.annotations.Update; +import com.datastax.oss.driver.api.testinfra.CassandraRequirement; +import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import com.datastax.oss.driver.categories.ParallelizableTests; +import com.datastax.oss.driver.internal.core.util.LoggerTest; +import java.util.Arrays; +import java.util.List; +import java.util.Objects; +import java.util.UUID; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +@Category(ParallelizableTests.class) +@CassandraRequirement(min = "3.4", description = "Creates a SASI index") +public class SchemaValidationIT extends InventoryITBase { + + private static CcmRule ccm = CcmRule.getInstance(); + + private static SessionRule sessionRule = SessionRule.builder(ccm).build(); + + private static InventoryMapper mapper; + private static InventoryMapper mapperDisabledValidation; + + @ClassRule public static TestRule chain = RuleChain.outerRule(ccm).around(sessionRule); + + @BeforeClass + public static void setup() { + CqlSession session = sessionRule.session(); + List statements = + Arrays.asList( + "CREATE TABLE product_simple(id uuid PRIMARY KEY, description text, unmapped text)", + "CREATE TABLE product_simple_missing_p_k(id uuid PRIMARY KEY, description text, unmapped text)", + "CREATE TABLE product_simple_missing_clustering_column(id uuid PRIMARY KEY, description text, unmapped text)", + "CREATE TABLE product_pk_and_clustering(id uuid, c_id uuid, PRIMARY KEY (id, c_id))", + "CREATE TABLE product_wrong_type(id uuid PRIMARY KEY, wrong_type_column text)", + "CREATE TYPE dimensions_with_incorrect_name(length int, width int, height int)", + "CREATE TYPE dimensions_with_wrong_type(length int, width int, height text)", + "CREATE TYPE dimensions_with_incorrect_name_schema_hint_udt(length int, width int, height int)", + "CREATE TYPE dimensions_with_incorrect_name_schema_hint_table(length int, width int, height int)", + "CREATE TABLE product_with_incorrect_udt(id uuid PRIMARY KEY, description text, dimensions dimensions_with_incorrect_name)", + "CREATE TABLE product_with_incorrect_udt_schema_hint_udt(id uuid PRIMARY KEY, description text, dimensions dimensions_with_incorrect_name_schema_hint_udt)", + "CREATE TABLE product_with_incorrect_udt_schema_hint_table(id uuid PRIMARY KEY, description text, dimensions dimensions_with_incorrect_name_schema_hint_table)", + "CREATE TABLE product_with_udt_wrong_type(id uuid PRIMARY KEY, description text, dimensions dimensions_with_wrong_type)"); + + for (String query : statements) { + session.execute( + SimpleStatement.builder(query).setExecutionProfile(sessionRule.slowProfile()).build()); + } + for (String query : createStatements(ccm)) { + session.execute( + SimpleStatement.builder(query).setExecutionProfile(sessionRule.slowProfile()).build()); + } + mapper = + new SchemaValidationIT_InventoryMapperBuilder(session) + .withSchemaValidationEnabled(true) + .build(); + mapperDisabledValidation = + new SchemaValidationIT_InventoryMapperBuilder(session) + .withSchemaValidationEnabled(false) + .build(); + } + + @Before + public void clearData() { + CqlSession session = sessionRule.session(); + session.execute( + SimpleStatement.builder("TRUNCATE product_simple") + .setExecutionProfile(sessionRule.slowProfile()) + .build()); + session.execute( + SimpleStatement.builder("TRUNCATE product") + .setExecutionProfile(sessionRule.slowProfile()) + .build()); + session.execute( + SimpleStatement.builder("TRUNCATE product_with_incorrect_udt") + .setExecutionProfile(sessionRule.slowProfile()) + .build()); + session.execute( + SimpleStatement.builder("TRUNCATE product_with_incorrect_udt_schema_hint_udt") + .setExecutionProfile(sessionRule.slowProfile()) + .build()); + session.execute( + SimpleStatement.builder("TRUNCATE product_with_incorrect_udt_schema_hint_table") + .setExecutionProfile(sessionRule.slowProfile()) + .build()); + session.execute( + SimpleStatement.builder("TRUNCATE product_wrong_type") + .setExecutionProfile(sessionRule.slowProfile()) + .build()); + session.execute( + SimpleStatement.builder("TRUNCATE product_pk_and_clustering") + .setExecutionProfile(sessionRule.slowProfile()) + .build()); + session.execute( + SimpleStatement.builder("TRUNCATE product_with_udt_wrong_type") + .setExecutionProfile(sessionRule.slowProfile()) + .build()); + } + + @Test + public void should_throw_when_use_not_properly_mapped_entity() { + assertThatThrownBy(() -> mapper.productSimpleDao(sessionRule.keyspace())) + .isInstanceOf(IllegalArgumentException.class) + .hasMessageContaining( + String.format( + "The CQL ks.table: %s.product_simple has missing columns: [description_with_incorrect_name, some_other_not_mapped_field] that are defined in the entity class: com.datastax.oss.driver.mapper.SchemaValidationIT.ProductSimple", + sessionRule.keyspace())); + } + + @Test + public void + should_throw_when_use_not_properly_mapped_entity_when_ks_is_passed_as_null_extracting_ks_from_session() { + assertThatThrownBy(() -> mapper.productSimpleDao(null)) + .isInstanceOf(IllegalArgumentException.class) + .hasMessageContaining( + String.format( + "The CQL ks.table: %s.product_simple has missing columns: [description_with_incorrect_name, some_other_not_mapped_field] that are defined in the entity class: com.datastax.oss.driver.mapper.SchemaValidationIT.ProductSimple", + sessionRule.keyspace())); + } + + @Test + public void should_log_warn_when_entity_has_no_corresponding_cql_table() { + LoggerTest.LoggerSetup logger = + setupTestLogger( + SchemaValidationIT_ProductCqlTableMissingHelper__MapperGenerated.class, Level.WARN); + try { + assertThatThrownBy(() -> mapper.productCqlTableMissingDao(sessionRule.keyspace())) + .isInstanceOf(InvalidQueryException.class); + + verify(logger.appender, timeout(500).times(1)).doAppend(logger.loggingEventCaptor.capture()); + assertThat(logger.loggingEventCaptor.getValue().getMessage()).isNotNull(); + assertThat(logger.loggingEventCaptor.getValue().getFormattedMessage()) + .contains( + String.format( + "There is no ks.table or UDT: %s.product_cql_table_missing for the entity class: com.datastax.oss.driver.mapper.SchemaValidationIT.ProductCqlTableMissing, or metadata is out of date.", + sessionRule.keyspace())); + + } finally { + logger.close(); + } + } + + @Test + public void should_throw_general_driver_exception_when_schema_validation_check_is_disabled() { + assertThatThrownBy( + () -> mapperDisabledValidation.productDaoValidationDisabled(sessionRule.keyspace())) + .isInstanceOf(InvalidQueryException.class) + .hasMessageContaining("Undefined column name description_with_incorrect_name"); + } + + @Test + public void should_not_throw_on_table_with_properly_mapped_udt_field() { + assertThatCode(() -> mapper.productDao(sessionRule.keyspace())).doesNotThrowAnyException(); + } + + @Test + public void should_throw_when_use_not_properly_mapped_entity_with_udt() { + assertThatThrownBy(() -> mapper.productWithIncorrectUdtDao(sessionRule.keyspace())) + .isInstanceOf(IllegalArgumentException.class) + .hasStackTraceContaining( + String.format( + "The CQL ks.udt: %s.dimensions_with_incorrect_name has missing columns: [length_not_present] that are defined in the entity class: com.datastax.oss.driver.mapper.SchemaValidationIT.DimensionsWithIncorrectName", + sessionRule.keyspace())); + } + + @Test + public void should_throw_when_use_not_properly_mapped_entity_with_udt_with_udt_schema_hint() { + assertThatThrownBy(() -> mapper.productWithIncorrectUdtSchemaHintUdt(sessionRule.keyspace())) + .isInstanceOf(IllegalArgumentException.class) + .hasStackTraceContaining( + String.format( + "The CQL ks.udt: %s.dimensions_with_incorrect_name_schema_hint_udt has missing columns: [length_not_present] that are defined in the entity class: com.datastax.oss.driver.mapper.SchemaValidationIT.DimensionsWithIncorrectNameSchemaHintUdt", + sessionRule.keyspace())); + } + + @Test + public void + should_warn_about_missing_table_when_use_not_properly_mapped_entity_with_udt_with_table_schema_hint() { + LoggerTest.LoggerSetup logger = + setupTestLogger( + SchemaValidationIT_DimensionsWithIncorrectNameSchemaHintTableHelper__MapperGenerated + .class, + Level.WARN); + try { + // when + mapper.productWithIncorrectUdtSchemaHintTable(sessionRule.keyspace()); + + verify(logger.appender, timeout(500).times(1)).doAppend(logger.loggingEventCaptor.capture()); + assertThat(logger.loggingEventCaptor.getValue().getMessage()).isNotNull(); + assertThat(logger.loggingEventCaptor.getValue().getFormattedMessage()) + .contains( + String.format( + "There is no ks.table or UDT: %s.dimensions_with_incorrect_name_schema_hint_table for the entity class: com.datastax.oss.driver.mapper.SchemaValidationIT.DimensionsWithIncorrectNameSchemaHintTable, or metadata is out of date.", + sessionRule.keyspace())); + } finally { + logger.close(); + } + } + + @Test + public void should_throw_when_table_is_missing_PKs() { + assertThatThrownBy(() -> mapper.productSimpleMissingPKDao(sessionRule.keyspace())) + .isInstanceOf(IllegalArgumentException.class) + .hasMessageContaining( + String.format( + "The CQL ks.table: %s.product_simple_missing_p_k has missing Primary Key columns: [id_not_present] that are defined in the entity class: com.datastax.oss.driver.mapper.SchemaValidationIT.ProductSimpleMissingPK", + sessionRule.keyspace())); + } + + @Test + public void should_throw_when_table_is_missing_clustering_column() { + assertThatThrownBy(() -> mapper.productSimpleMissingClusteringColumn(sessionRule.keyspace())) + .isInstanceOf(IllegalArgumentException.class) + .hasMessageContaining( + String.format( + "The CQL ks.table: %s.product_simple_missing_clustering_column has missing Clustering columns: [not_existing_clustering_column] that are defined in the entity class: com.datastax.oss.driver.mapper.SchemaValidationIT.ProductSimpleMissingClusteringColumn", + sessionRule.keyspace())); + } + + @Test + public void should_throw_when_type_defined_in_table_does_not_match_type_from_entity() { + assertThatThrownBy(() -> mapper.productDaoWrongType(sessionRule.keyspace())) + .isInstanceOf(IllegalArgumentException.class) + .hasMessageContaining( + String.format( + "The CQL ks.table: %s.product_wrong_type defined in the entity class: com.datastax.oss.driver.mapper.SchemaValidationIT.ProductWrongType declares type mappings that are not supported by the codec registry:\n" + + "Field: wrong_type_column, Entity Type: java.lang.Integer, CQL type: TEXT", + sessionRule.keyspace())); + } + + @Test + public void should_throw_when_type_defined_in_udt_does_not_match_type_from_entity() { + assertThatThrownBy(() -> mapper.productWithUdtWrongTypeDao(sessionRule.keyspace())) + .isInstanceOf(IllegalArgumentException.class) + .hasMessageContaining( + String.format( + "The CQL ks.udt: %s.dimensions_with_wrong_type defined in the entity class: com.datastax.oss.driver.mapper.SchemaValidationIT.DimensionsWithWrongType declares type mappings that are not supported by the codec registry:\n" + + "Field: height, Entity Type: java.lang.Integer, CQL type: TEXT", + sessionRule.keyspace())); + } + + @Test + public void should_not_throw_when_have_correct_pk_and_clustering() { + assertThatCode(() -> mapper.productPkAndClusteringDao(sessionRule.keyspace())) + .doesNotThrowAnyException(); + } + + @Test + public void should_log_warning_when_passing_not_existing_keyspace() { + LoggerTest.LoggerSetup logger = + setupTestLogger(SchemaValidationIT_ProductSimpleHelper__MapperGenerated.class, Level.WARN); + try { + // when + assertThatThrownBy( + () -> mapper.productSimpleDao(CqlIdentifier.fromCql("not_existing_keyspace"))) + .isInstanceOf(InvalidQueryException.class) + .hasMessageContaining("not_existing_keyspace does not exist"); + + // then + verify(logger.appender, timeout(500).times(1)).doAppend(logger.loggingEventCaptor.capture()); + assertThat(logger.loggingEventCaptor.getValue().getMessage()).isNotNull(); + assertThat(logger.loggingEventCaptor.getValue().getFormattedMessage()) + .contains( + "Unable to validate table: product_simple for the entity class: com.datastax.oss.driver.mapper.SchemaValidationIT.ProductSimple because the session metadata has no information about the keyspace: not_existing_keyspace."); + } finally { + logger.close(); + } + } + + @Mapper + public interface InventoryMapper { + @DaoFactory + ProductSimpleDao productSimpleDao(@DaoKeyspace CqlIdentifier keyspace); + + @DaoFactory + ProductSimpleCqlTableMissingDao productCqlTableMissingDao(@DaoKeyspace CqlIdentifier keyspace); + + @DaoFactory + ProductSimpleDaoValidationDisabledDao productDaoValidationDisabled( + @DaoKeyspace CqlIdentifier keyspace); + + @DaoFactory + ProductDao productDao(@DaoKeyspace CqlIdentifier keyspace); + + @DaoFactory + ProductWithIncorrectUdtDao productWithIncorrectUdtDao(@DaoKeyspace CqlIdentifier keyspace); + + @DaoFactory + ProductWithIncorrectUdtSchemaHintUdtDao productWithIncorrectUdtSchemaHintUdt( + @DaoKeyspace CqlIdentifier keyspace); + + @DaoFactory + ProductWithIncorrectUdtSchemaHintTableDao productWithIncorrectUdtSchemaHintTable( + @DaoKeyspace CqlIdentifier keyspace); + + @DaoFactory + ProductWithUdtWrongTypeDao productWithUdtWrongTypeDao(@DaoKeyspace CqlIdentifier keyspace); + + @DaoFactory + ProductSimpleMissingPKDao productSimpleMissingPKDao(@DaoKeyspace CqlIdentifier keyspace); + + @DaoFactory + ProductSimpleMissingClusteringColumnDao productSimpleMissingClusteringColumn( + @DaoKeyspace CqlIdentifier keyspace); + + @DaoFactory + ProductDaoWrongTypeDao productDaoWrongType(@DaoKeyspace CqlIdentifier keyspace); + + @DaoFactory + ProductPkAndClusteringDao productPkAndClusteringDao(@DaoKeyspace CqlIdentifier keyspace); + } + + @Dao + public interface ProductWithIncorrectUdtDao { + + @Update(customWhereClause = "id = :id") + void updateWhereId(ProductWithIncorrectUdt product, UUID id); + } + + @Dao + public interface ProductWithIncorrectUdtSchemaHintUdtDao { + + @Update(customWhereClause = "id = :id") + void updateWhereId(ProductWithIncorrectUdtSchemaHintUdt product, UUID id); + } + + @Dao + public interface ProductWithIncorrectUdtSchemaHintTableDao { + + @Update(customWhereClause = "id = :id") + void updateWhereId(ProductWithIncorrectUdtSchemaHintTable product, UUID id); + } + + @Dao + public interface ProductWithUdtWrongTypeDao { + + @Update(customWhereClause = "id = :id") + void updateWhereId(ProductWithUdtWrongType product, UUID id); + } + + @Dao + public interface ProductDao { + + @Update(customWhereClause = "id = :id") + void updateWhereId(Product product, UUID id); + } + + @Dao + public interface ProductSimpleDao { + + @Select + ProductSimple findById(UUID productId); + } + + @Dao + public interface ProductSimpleDaoValidationDisabledDao { + + @Select + ProductSimple findById(UUID productId); + } + + @Dao + public interface ProductSimpleCqlTableMissingDao { + + @Select + ProductCqlTableMissing findById(UUID productId); + } + + @Dao + public interface ProductSimpleMissingPKDao { + @Select + ProductSimpleMissingPK findById(UUID productId); + } + + @Dao + public interface ProductSimpleMissingClusteringColumnDao { + @Select + ProductSimpleMissingClusteringColumn findById(UUID productId); + } + + @Dao + public interface ProductDaoWrongTypeDao { + + @Select + ProductWrongType findById(UUID productId); + } + + @Dao + public interface ProductPkAndClusteringDao { + + @Select + ProductPkAndClustering findById(UUID productId); + } + + @Entity + public static class ProductCqlTableMissing { + @PartitionKey private UUID id; + + public ProductCqlTableMissing() {} + + public UUID getId() { + return id; + } + + public void setId(UUID id) { + this.id = id; + } + } + + @Entity + public static class ProductSimpleMissingPK { + @PartitionKey private UUID idNotPresent; + + public ProductSimpleMissingPK() {} + + public UUID getIdNotPresent() { + return idNotPresent; + } + + public void setIdNotPresent(UUID idNotPresent) { + this.idNotPresent = idNotPresent; + } + } + + @Entity + public static class ProductWrongType { + @PartitionKey private UUID id; + private Integer wrongTypeColumn; + + public ProductWrongType() {} + + public UUID getId() { + return id; + } + + public void setId(UUID id) { + this.id = id; + } + + public Integer getWrongTypeColumn() { + return wrongTypeColumn; + } + + public void setWrongTypeColumn(Integer wrongTypeColumn) { + this.wrongTypeColumn = wrongTypeColumn; + } + } + + @Entity + public static class ProductSimpleMissingClusteringColumn { + @PartitionKey private UUID id; + @ClusteringColumn private Integer notExistingClusteringColumn; + + public ProductSimpleMissingClusteringColumn() {} + + public UUID getId() { + return id; + } + + public void setId(UUID id) { + this.id = id; + } + + public Integer getNotExistingClusteringColumn() { + return notExistingClusteringColumn; + } + + public void setNotExistingClusteringColumn(Integer notExistingClusteringColumn) { + this.notExistingClusteringColumn = notExistingClusteringColumn; + } + } + + @Entity + public static class ProductPkAndClustering { + @PartitionKey private UUID id; + @ClusteringColumn private UUID cId; + + public ProductPkAndClustering() {} + + public UUID getId() { + return id; + } + + public void setId(UUID id) { + this.id = id; + } + + public UUID getcId() { + return cId; + } + + public void setcId(UUID cId) { + this.cId = cId; + } + } + + @Entity + public static class ProductSimple { + @PartitionKey private UUID id; + private String descriptionWithIncorrectName; + private Integer someOtherNotMappedField; + + public ProductSimple() {} + + public ProductSimple( + UUID id, String descriptionWithIncorrectName, Integer someOtherNotMappedField) { + this.id = id; + this.descriptionWithIncorrectName = descriptionWithIncorrectName; + this.someOtherNotMappedField = someOtherNotMappedField; + } + + public UUID getId() { + return id; + } + + public void setId(UUID id) { + this.id = id; + } + + public String getDescriptionWithIncorrectName() { + return descriptionWithIncorrectName; + } + + public void setDescriptionWithIncorrectName(String descriptionWithIncorrectName) { + this.descriptionWithIncorrectName = descriptionWithIncorrectName; + } + + public Integer getSomeOtherNotMappedField() { + return someOtherNotMappedField; + } + + public void setSomeOtherNotMappedField(Integer someOtherNotMappedField) { + this.someOtherNotMappedField = someOtherNotMappedField; + } + + @Override + public boolean equals(Object o) { + + if (this == o) { + return true; + } + if (!(o instanceof ProductSimple)) { + return false; + } + ProductSimple that = (ProductSimple) o; + return this.id.equals(that.id) + && this.someOtherNotMappedField.equals(that.someOtherNotMappedField) + && this.descriptionWithIncorrectName.equals(that.descriptionWithIncorrectName); + } + + @Override + public int hashCode() { + return Objects.hash(id, descriptionWithIncorrectName, someOtherNotMappedField); + } + + @Override + public String toString() { + return "ProductSimple{" + + "id=" + + id + + ", descriptionWithIncorrectName='" + + descriptionWithIncorrectName + + '\'' + + ", someOtherNotMappedField=" + + someOtherNotMappedField + + '}'; + } + } + + @Entity + public static class ProductWithIncorrectUdt { + + @PartitionKey private UUID id; + private String description; + private DimensionsWithIncorrectName dimensions; + + public ProductWithIncorrectUdt() {} + + public ProductWithIncorrectUdt( + UUID id, String description, DimensionsWithIncorrectName dimensions) { + this.id = id; + this.description = description; + this.dimensions = dimensions; + } + + public UUID getId() { + return id; + } + + public void setId(UUID id) { + this.id = id; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + public DimensionsWithIncorrectName getDimensions() { + return dimensions; + } + + public void setDimensions(DimensionsWithIncorrectName dimensions) { + this.dimensions = dimensions; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (!(o instanceof ProductWithIncorrectUdt)) { + return false; + } + ProductWithIncorrectUdt that = (ProductWithIncorrectUdt) o; + return this.id.equals(that.id) + && this.description.equals(that.description) + && this.dimensions.equals(that.dimensions); + } + + @Override + public int hashCode() { + return Objects.hash(id, description, dimensions); + } + + @Override + public String toString() { + return "ProductWithIncorrectUdt{" + + "id=" + + id + + ", description='" + + description + + '\'' + + ", dimensions=" + + dimensions + + '}'; + } + } + + @Entity + public static class ProductWithUdtWrongType { + + @PartitionKey private UUID id; + private String description; + private DimensionsWithWrongType dimensions; + + public ProductWithUdtWrongType() {} + + public ProductWithUdtWrongType( + UUID id, String description, DimensionsWithWrongType dimensions) { + this.id = id; + this.description = description; + this.dimensions = dimensions; + } + + public UUID getId() { + return id; + } + + public void setId(UUID id) { + this.id = id; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + public DimensionsWithWrongType getDimensions() { + return dimensions; + } + + public void setDimensions(DimensionsWithWrongType dimensions) { + this.dimensions = dimensions; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (!(o instanceof ProductWithUdtWrongType)) { + return false; + } + ProductWithUdtWrongType that = (ProductWithUdtWrongType) o; + return this.id.equals(that.id) + && this.description.equals(that.description) + && this.dimensions.equals(that.dimensions); + } + + @Override + public int hashCode() { + return Objects.hash(id, description, dimensions); + } + + @Override + public String toString() { + return "ProductWithUdtWrongType{" + + "id=" + + id + + ", description='" + + description + + '\'' + + ", dimensions=" + + dimensions + + '}'; + } + } + + @Entity + public static class ProductWithIncorrectUdtSchemaHintUdt { + + @PartitionKey private UUID id; + private String description; + private DimensionsWithIncorrectNameSchemaHintUdt dimensions; + + public ProductWithIncorrectUdtSchemaHintUdt() {} + + public ProductWithIncorrectUdtSchemaHintUdt( + UUID id, String description, DimensionsWithIncorrectNameSchemaHintUdt dimensions) { + this.id = id; + this.description = description; + this.dimensions = dimensions; + } + + public UUID getId() { + return id; + } + + public void setId(UUID id) { + this.id = id; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + public DimensionsWithIncorrectNameSchemaHintUdt getDimensions() { + return dimensions; + } + + public void setDimensions(DimensionsWithIncorrectNameSchemaHintUdt dimensions) { + this.dimensions = dimensions; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (!(o instanceof ProductWithIncorrectUdtSchemaHintUdt)) { + return false; + } + ProductWithIncorrectUdtSchemaHintUdt that = (ProductWithIncorrectUdtSchemaHintUdt) o; + return this.id.equals(that.id) + && this.description.equals(that.description) + && this.dimensions.equals(that.dimensions); + } + + @Override + public int hashCode() { + return Objects.hash(id, description, dimensions); + } + + @Override + public String toString() { + return "ProductWithIncorrectUdtSchemaHint{" + + "id=" + + id + + ", description='" + + description + + '\'' + + ", dimensions=" + + dimensions + + '}'; + } + } + + @Entity + public static class ProductWithIncorrectUdtSchemaHintTable { + + @PartitionKey private UUID id; + private String description; + private DimensionsWithIncorrectNameSchemaHintTable dimensions; + + public ProductWithIncorrectUdtSchemaHintTable() {} + + public ProductWithIncorrectUdtSchemaHintTable( + UUID id, String description, DimensionsWithIncorrectNameSchemaHintTable dimensions) { + this.id = id; + this.description = description; + this.dimensions = dimensions; + } + + public UUID getId() { + return id; + } + + public void setId(UUID id) { + this.id = id; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + public DimensionsWithIncorrectNameSchemaHintTable getDimensions() { + return dimensions; + } + + public void setDimensions(DimensionsWithIncorrectNameSchemaHintTable dimensions) { + this.dimensions = dimensions; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (!(o instanceof ProductWithIncorrectUdtSchemaHintTable)) { + return false; + } + ProductWithIncorrectUdtSchemaHintTable that = (ProductWithIncorrectUdtSchemaHintTable) o; + return this.id.equals(that.id) + && this.description.equals(that.description) + && this.dimensions.equals(that.dimensions); + } + + @Override + public int hashCode() { + return Objects.hash(id, description, dimensions); + } + + @Override + public String toString() { + return "ProductWithIncorrectUdtSchemaHintTable{" + + "id=" + + id + + ", description='" + + description + + '\'' + + ", dimensions=" + + dimensions + + '}'; + } + } + + @Entity + public static class DimensionsWithIncorrectName { + + private int lengthNotPresent; + private int width; + private int height; + + public DimensionsWithIncorrectName() {} + + public DimensionsWithIncorrectName(int lengthNotPresent, int width, int height) { + this.lengthNotPresent = lengthNotPresent; + this.width = width; + this.height = height; + } + + public int getLengthNotPresent() { + return lengthNotPresent; + } + + public void setLengthNotPresent(int lengthNotPresent) { + this.lengthNotPresent = lengthNotPresent; + } + + public int getWidth() { + return width; + } + + public void setWidth(int width) { + this.width = width; + } + + public int getHeight() { + return height; + } + + public void setHeight(int height) { + this.height = height; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (!(o instanceof DimensionsWithIncorrectName)) { + return false; + } + DimensionsWithIncorrectName that = (DimensionsWithIncorrectName) o; + return this.lengthNotPresent == that.lengthNotPresent + && this.height == that.height + && this.width == that.width; + } + + @Override + public int hashCode() { + return Objects.hash(lengthNotPresent, width, height); + } + + @Override + public String toString() { + return "DimensionsWithIncorrectName{" + + "lengthNotPresent=" + + lengthNotPresent + + ", width=" + + width + + ", height=" + + height + + '}'; + } + } + + @Entity + public static class DimensionsWithWrongType { + + private int length; + private int width; + private int height; + + public DimensionsWithWrongType() {} + + public DimensionsWithWrongType(int length, int width, int height) { + this.length = length; + this.width = width; + this.height = height; + } + + public int getLength() { + return length; + } + + public void setLength(int length) { + this.length = length; + } + + public int getWidth() { + return width; + } + + public void setWidth(int width) { + this.width = width; + } + + public int getHeight() { + return height; + } + + public void setHeight(int height) { + this.height = height; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (!(o instanceof DimensionsWithWrongType)) { + return false; + } + DimensionsWithWrongType that = (DimensionsWithWrongType) o; + return this.length == that.length && this.height == that.height && this.width == that.width; + } + + @Override + public int hashCode() { + return Objects.hash(length, width, height); + } + + @Override + public String toString() { + return "DimensionsWithWrongType{" + + "length=" + + length + + ", width=" + + width + + ", height=" + + height + + '}'; + } + } + + @Entity + @SchemaHint(targetElement = TargetElement.UDT) + public static class DimensionsWithIncorrectNameSchemaHintUdt { + + private int lengthNotPresent; + private int width; + private int height; + + public DimensionsWithIncorrectNameSchemaHintUdt() {} + + public DimensionsWithIncorrectNameSchemaHintUdt(int lengthNotPresent, int width, int height) { + this.lengthNotPresent = lengthNotPresent; + this.width = width; + this.height = height; + } + + public int getLengthNotPresent() { + return lengthNotPresent; + } + + public void setLengthNotPresent(int lengthNotPresent) { + this.lengthNotPresent = lengthNotPresent; + } + + public int getWidth() { + return width; + } + + public void setWidth(int width) { + this.width = width; + } + + public int getHeight() { + return height; + } + + public void setHeight(int height) { + this.height = height; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (!(o instanceof DimensionsWithIncorrectNameSchemaHintUdt)) { + return false; + } + DimensionsWithIncorrectNameSchemaHintUdt that = (DimensionsWithIncorrectNameSchemaHintUdt) o; + return this.lengthNotPresent == that.lengthNotPresent + && this.height == that.height + && this.width == that.width; + } + + @Override + public int hashCode() { + return Objects.hash(lengthNotPresent, width, height); + } + + @Override + public String toString() { + return "DimensionsWithIncorrectNameSchemaHintUdt{" + + "lengthNotPresent=" + + lengthNotPresent + + ", width=" + + width + + ", height=" + + height + + '}'; + } + } + + @Entity + @SchemaHint(targetElement = TargetElement.TABLE) + public static class DimensionsWithIncorrectNameSchemaHintTable { + + private int lengthNotPresent; + private int width; + private int height; + + public DimensionsWithIncorrectNameSchemaHintTable() {} + + public DimensionsWithIncorrectNameSchemaHintTable(int lengthNotPresent, int width, int height) { + this.lengthNotPresent = lengthNotPresent; + this.width = width; + this.height = height; + } + + public int getLengthNotPresent() { + return lengthNotPresent; + } + + public void setLengthNotPresent(int lengthNotPresent) { + this.lengthNotPresent = lengthNotPresent; + } + + public int getWidth() { + return width; + } + + public void setWidth(int width) { + this.width = width; + } + + public int getHeight() { + return height; + } + + public void setHeight(int height) { + this.height = height; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (!(o instanceof DimensionsWithIncorrectNameSchemaHintTable)) { + return false; + } + DimensionsWithIncorrectNameSchemaHintTable that = + (DimensionsWithIncorrectNameSchemaHintTable) o; + return this.lengthNotPresent == that.lengthNotPresent + && this.height == that.height + && this.width == that.width; + } + + @Override + public int hashCode() { + return Objects.hash(lengthNotPresent, width, height); + } + + @Override + public String toString() { + return "DimensionsWithIncorrectNameSchemaHintTable{" + + "lengthNotPresent=" + + lengthNotPresent + + ", width=" + + width + + ", height=" + + height + + '}'; + } + } +} diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateIT.java index e7ab56f663c..00df838b2a8 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateIT.java @@ -68,6 +68,7 @@ public static void setup() { session.execute( SimpleStatement.builder(query).setExecutionProfile(SESSION_RULE.slowProfile()).build()); } + session.execute("CREATE TABLE only_p_k(id uuid PRIMARY KEY)"); inventoryMapper = new UpdateIT_InventoryMapperBuilder(session).build(); dao = inventoryMapper.productDao(SESSION_RULE.keyspace()); diff --git a/manual/mapper/mapper/README.md b/manual/mapper/mapper/README.md index 35885ac1c47..47f8a9441ee 100644 --- a/manual/mapper/mapper/README.md +++ b/manual/mapper/mapper/README.md @@ -159,6 +159,54 @@ ProductDao dao3 = inventoryMapper.productDao("keyspace3", "table3"); The DAO's keyspace and table can also be injected into custom query strings; see [Query methods](../daos/query/). +### Schema validation + +The mapper validates entity mappings against the database schema at runtime. This check is performed +every time you initialize a new DAO: + +```java +// Checks that entity 'Product' can be mapped to table or UDT 'keyspace1.product' +ProductDao dao1 = inventoryMapper.productDao("keyspace1", "product"); + +// Checks that entity 'Product' can be mapped to table or UDT 'keyspace2.product' +ProductDao dao2 = inventoryMapper.productDao("keyspace2", "product"); +``` + +For each entity referenced in the DAO, the mapper tries to find a schema element with the +corresponding name (according to the [naming strategy](../entities/#naming-strategy)). It tries +tables first, then falls back to UDTs if there is no match. You can speed up this process by +providing a hint: + +```java +import static com.datastax.oss.driver.api.mapper.annotations.SchemaHint.TargetElement.UDT; +import com.datastax.oss.driver.api.mapper.annotations.SchemaHint; + +@Entity +@SchemaHint(targetElement = UDT) +public class Address { ... } +``` + +The following checks are then performed: + +* for each entity field, the database table or UDT must contain a column with the corresponding name + (according to the [naming strategy](../entities/#naming-strategy)). +* the types must be compatible, either according to the [default type + mappings](../../core/#cql-to-java-type-mapping), or via a [custom + codec](../../core/custom_codecs/) registered with the session. +* additionally, if the target element is a table, the primary key must be [properly + annotated](../entities/#primary-key-columns) in the entity. + +If any of those steps fails, an `IllegalArgumentException` is thrown. + +Schema validation adds a small startup overhead, so once your application is stable you may want to +disable it: + +```java +InventoryMapper inventoryMapper = new InventoryMapperBuilder(session) + .withSchemaValidationEnabled(false) + .build(); +``` + [CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/CqlIdentifier.html [@DaoFactory]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.html [@DaoKeyspace]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/DaoKeyspace.html diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoImplementationGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoImplementationGenerator.java index b14e6fb3e7b..c0cc5f7c819 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoImplementationGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoImplementationGenerator.java @@ -15,6 +15,8 @@ */ package com.datastax.oss.driver.internal.mapper.processor.dao; +import static com.datastax.oss.driver.api.mapper.MapperBuilder.SCHEMA_VALIDATION_ENABLED_SETTING; + import com.datastax.oss.driver.api.core.cql.PreparedStatement; import com.datastax.oss.driver.api.mapper.MapperContext; import com.datastax.oss.driver.api.mapper.annotations.Dao; @@ -436,6 +438,8 @@ private MethodSpec.Builder getInitAsyncContents() { String fieldName = entry.getValue(); // - create an instance initAsyncBuilder.addStatement("$1T $2L = new $1T(context)", fieldTypeName, fieldName); + // - validate entity schema + generateValidationCheck(initAsyncBuilder, fieldName); // - add it as a parameter to the constructor call newDaoStatement.add(",\n$L", fieldName); } @@ -499,6 +503,15 @@ private MethodSpec.Builder getInitAsyncContents() { return initAsyncBuilder; } + private void generateValidationCheck(MethodSpec.Builder initAsyncBuilder, String fieldName) { + initAsyncBuilder.beginControlFlow( + "if (($1T)context.getCustomState().get($2S))", + Boolean.class, + SCHEMA_VALIDATION_ENABLED_SETTING); + initAsyncBuilder.addStatement("$1L.validateEntityFields()", fieldName); + initAsyncBuilder.endControlFlow(); + } + private void generateProtocolVersionCheck(MethodSpec.Builder builder) { List methodElements = preparedStatements.stream().map(v -> v.methodElement).collect(Collectors.toList()); diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/LoggingGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/LoggingGenerator.java index f53d46f3c59..effad305a1b 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/LoggingGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/LoggingGenerator.java @@ -70,8 +70,31 @@ public void addLoggerField(TypeSpec.Builder classBuilder, ClassName className) { * @param arguments the arguments ({@code key} and {@code value}). */ public void debug(MethodSpec.Builder builder, String template, CodeBlock... arguments) { + log("debug", builder, template, arguments); + } + + /** + * Generates a warn log statement, such as: + * + *
      +   *   LOG.warn("setting {} = {}", key, value);
      +   * 
      + * + *

      This assumes that {@link #addLoggerField(TypeSpec.Builder, ClassName)} has already been + * called for the class where this is generated. + * + * @param builder where to generate. + * @param template the message ({@code "setting {} = {}"}). + * @param arguments the arguments ({@code key} and {@code value}). + */ + public void warn(MethodSpec.Builder builder, String template, CodeBlock... arguments) { + log("warn", builder, template, arguments); + } + + public void log( + String logLevel, MethodSpec.Builder builder, String template, CodeBlock... arguments) { if (logsEnabled) { - builder.addCode("$[LOG.debug($S", template); + builder.addCode("$[LOG.$L($S", logLevel, template); for (CodeBlock argument : arguments) { builder.addCode(",\n$L", argument); } diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperGenerator.java index 743b396ff2b..bf7d552d272 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperGenerator.java @@ -116,7 +116,9 @@ protected JavaFile.Builder getContents() { new EntityHelperDeleteByPrimaryKeyPartsMethodGenerator(entityDefinition), new EntityHelperDeleteByPrimaryKeyMethodGenerator(), new EntityHelperUpdateStartMethodGenerator(entityDefinition), - new EntityHelperUpdateByPrimaryKeyMethodGenerator(entityDefinition))) { + new EntityHelperUpdateByPrimaryKeyMethodGenerator(entityDefinition), + new EntityHelperSchemaValidationMethodGenerator( + entityDefinition, classElement, context.getLoggingGenerator(), this))) { methodGenerator.generate().ifPresent(classContents::addMethod); } diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperSchemaValidationMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperSchemaValidationMethodGenerator.java new file mode 100644 index 00000000000..3aa9957ac82 --- /dev/null +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperSchemaValidationMethodGenerator.java @@ -0,0 +1,374 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.mapper.processor.entity; + +import static com.datastax.oss.driver.api.mapper.annotations.SchemaHint.TargetElement; + +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.metadata.schema.KeyspaceMetadata; +import com.datastax.oss.driver.api.core.metadata.schema.TableMetadata; +import com.datastax.oss.driver.api.core.type.DataType; +import com.datastax.oss.driver.api.core.type.UserDefinedType; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import com.datastax.oss.driver.api.mapper.annotations.SchemaHint; +import com.datastax.oss.driver.internal.mapper.processor.MethodGenerator; +import com.datastax.oss.driver.internal.mapper.processor.dao.LoggingGenerator; +import com.squareup.javapoet.CodeBlock; +import com.squareup.javapoet.MethodSpec; +import com.squareup.javapoet.TypeName; +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.stream.Collectors; +import javax.lang.model.element.Modifier; +import javax.lang.model.element.TypeElement; + +public class EntityHelperSchemaValidationMethodGenerator implements MethodGenerator { + + private final EntityDefinition entityDefinition; + private TypeElement entityTypeElement; + private LoggingGenerator loggingGenerator; + private EntityHelperGenerator entityHelperGenerator; + + public EntityHelperSchemaValidationMethodGenerator( + EntityDefinition entityDefinition, + TypeElement entityTypeElement, + LoggingGenerator loggingGenerator, + EntityHelperGenerator entityHelperGenerator) { + this.entityDefinition = entityDefinition; + this.entityTypeElement = entityTypeElement; + this.loggingGenerator = loggingGenerator; + this.entityHelperGenerator = entityHelperGenerator; + } + + @Override + public Optional generate() { + MethodSpec.Builder methodBuilder = + MethodSpec.methodBuilder("validateEntityFields") + .addAnnotation(Override.class) + .addModifiers(Modifier.PUBLIC) + .returns(TypeName.VOID); + + // get keyspaceId from context, and if not present fallback to keyspace set on session + methodBuilder.addStatement( + "$1T keyspaceId = this.keyspaceId != null ? this.keyspaceId : context.getSession().getKeyspace().orElse(null)", + CqlIdentifier.class); + + methodBuilder.addStatement("String entityClassName = $S", entityDefinition.getClassName()); + generateKeyspaceNull(methodBuilder); + + generateKeyspaceNameWrong(methodBuilder); + + methodBuilder.addStatement( + "$1T<$2T> keyspace = context.getSession().getMetadata().getKeyspace(keyspaceId)", + Optional.class, + KeyspaceMetadata.class); + + // Generates expected names to be present in cql (table or udt) + List expectedCqlNames = + entityDefinition.getAllColumns().stream() + .map(PropertyDefinition::getCqlName) + .collect(Collectors.toList()); + methodBuilder.addStatement( + "$1T<$2T> expectedCqlNames = new $3T<>()", + List.class, + CqlIdentifier.class, + ArrayList.class); + for (CodeBlock expectedCqlName : expectedCqlNames) { + methodBuilder.addStatement( + "expectedCqlNames.add($1T.fromCql($2L))", CqlIdentifier.class, expectedCqlName); + } + + methodBuilder.addStatement( + "$1T<$2T> tableMetadata = keyspace.flatMap(v -> v.getTable(tableId))", + Optional.class, + TableMetadata.class); + + // Generated UserDefineTypes metadata + methodBuilder.addStatement( + "$1T<$2T> userDefinedType = keyspace.flatMap(v -> v.getUserDefinedType(tableId))", + Optional.class, + UserDefinedType.class); + + generateValidationChecks(methodBuilder); + + logMissingMetadata(methodBuilder); + + return Optional.of(methodBuilder.build()); + } + + private void logMissingMetadata(MethodSpec.Builder methodBuilder) { + methodBuilder.addComment( + "warn if there is not keyspace.table for defined entity - it means that table is missing, or schema it out of date."); + methodBuilder.beginControlFlow("else"); + loggingGenerator.warn( + methodBuilder, + "[{}] There is no ks.table or UDT: {}.{} for the entity class: {}, or metadata is out of date.", + CodeBlock.of("context.getSession().getName()"), + CodeBlock.of("keyspaceId"), + CodeBlock.of("tableId"), + CodeBlock.of("entityClassName")); + methodBuilder.endControlFlow(); + } + + // handle case where keyspace name is not present in metadata keyspaces + private void generateKeyspaceNameWrong(MethodSpec.Builder methodBuilder) { + methodBuilder.beginControlFlow( + "if(!keyspaceNamePresent(context.getSession().getMetadata().getKeyspaces(), keyspaceId))"); + loggingGenerator.warn( + methodBuilder, + "[{}] Unable to validate table: {} for the entity class: {} " + + "because the session metadata has no information about the keyspace: {}.", + CodeBlock.of("context.getSession().getName()"), + CodeBlock.of("tableId"), + CodeBlock.of("entityClassName"), + CodeBlock.of("keyspaceId")); + methodBuilder.addStatement("return"); + methodBuilder.endControlFlow(); + } + + // Handle case where keyspaceId = null. + // In such case we cannot infer and validate schema for table or udt + private void generateKeyspaceNull(MethodSpec.Builder methodBuilder) { + methodBuilder.beginControlFlow("if (keyspaceId == null)"); + loggingGenerator.warn( + methodBuilder, + "[{}] Unable to validate table: {} for the entity class: {} because the keyspace " + + "is unknown (the entity does not declare a default keyspace, and neither the " + + "session nor the DAO were created with a keyspace). The DAO will only work if it " + + "uses fully-qualified queries with @Query or @QueryProvider.", + CodeBlock.of("context.getSession().getName()"), + CodeBlock.of("tableId"), + CodeBlock.of("entityClassName")); + methodBuilder.addStatement("return"); + methodBuilder.endControlFlow(); + } + + private void generateValidationChecks(MethodSpec.Builder methodBuilder) { + Optional targetElement = + Optional.ofNullable(entityTypeElement.getAnnotation(SchemaHint.class)) + .map(SchemaHint::targetElement); + + // if SchemaHint was not provided explicitly try to match TABLE, then fallback to UDT + if (!targetElement.isPresent()) { + validateColumnsInTable(methodBuilder); + validateColumnsInUdt(methodBuilder, true); + } + // if explicitly provided SchemaHint is TABLE, then generate only TABLE check + else if (targetElement.get().equals(TargetElement.TABLE)) { + validateColumnsInTable(methodBuilder); + } + // if explicitly provided SchemaHint is UDT, then generate only UDT check + else if (targetElement.get().equals(TargetElement.UDT)) { + validateColumnsInUdt(methodBuilder, false); + } + } + + private void validateColumnsInTable(MethodSpec.Builder methodBuilder) { + methodBuilder.beginControlFlow("if (tableMetadata.isPresent())"); + + generateMissingClusteringColumnsCheck(methodBuilder); + + generateMissingPKsCheck(methodBuilder); + + generateMissingColumnsCheck(methodBuilder); + + generateColumnsTypeCheck(methodBuilder); + + methodBuilder.endControlFlow(); + } + + private void generateColumnsTypeCheck(MethodSpec.Builder methodBuilder) { + methodBuilder.addComment("validation of types"); + generateExpectedTypesPerColumn(methodBuilder); + + methodBuilder.addStatement( + "$1T<$2T> missingTableTypes = findTypeMismatches(expectedTypesPerColumn, tableMetadata.get().getColumns(), context.getSession().getContext().getCodecRegistry())", + List.class, + String.class); + methodBuilder.addStatement( + "throwMissingTableTypesIfNotEmpty(missingTableTypes, keyspaceId, tableId, entityClassName)"); + } + + private void generateMissingColumnsCheck(MethodSpec.Builder methodBuilder) { + methodBuilder.addComment("validation of all columns"); + + methodBuilder.addStatement( + "$1T<$2T> missingTableCqlNames = findMissingCqlIdentifiers(expectedCqlNames, tableMetadata.get().getColumns().keySet())", + List.class, + CqlIdentifier.class); + + // Throw if there are any missingTableCqlNames + CodeBlock missingCqlColumnExceptionMessage = + CodeBlock.of( + "String.format(\"The CQL ks.table: %s.%s has missing columns: %s that are defined in the entity class: %s\", " + + "keyspaceId, tableId, missingTableCqlNames, entityClassName)"); + methodBuilder.beginControlFlow("if (!missingTableCqlNames.isEmpty())"); + methodBuilder.addStatement( + "throw new $1T($2L)", IllegalArgumentException.class, missingCqlColumnExceptionMessage); + methodBuilder.endControlFlow(); + } + + private void generateMissingPKsCheck(MethodSpec.Builder methodBuilder) { + methodBuilder.addComment("validation of missing PKs"); + List expectedCqlPKs = + entityDefinition.getPartitionKey().stream() + .map(PropertyDefinition::getCqlName) + .collect(Collectors.toList()); + + methodBuilder.addStatement( + "$1T<$2T> expectedCqlPKs = new $3T<>()", List.class, CqlIdentifier.class, ArrayList.class); + for (CodeBlock expectedCqlName : expectedCqlPKs) { + methodBuilder.addStatement( + "expectedCqlPKs.add($1T.fromCql($2L))", CqlIdentifier.class, expectedCqlName); + } + methodBuilder.addStatement( + "$1T<$2T> missingTablePksNames = findMissingColumns(expectedCqlPKs, tableMetadata.get().getPartitionKey())", + List.class, + CqlIdentifier.class); + + // throw if there are any missing PK columns + CodeBlock missingCqlColumnExceptionMessage = + CodeBlock.of( + "String.format(\"The CQL ks.table: %s.%s has missing Primary Key columns: %s that are defined in the entity class: %s\", " + + "keyspaceId, tableId, missingTablePksNames, entityClassName)"); + methodBuilder.beginControlFlow("if (!missingTablePksNames.isEmpty())"); + methodBuilder.addStatement( + "throw new $1T($2L)", IllegalArgumentException.class, missingCqlColumnExceptionMessage); + methodBuilder.endControlFlow(); + } + + private void generateMissingClusteringColumnsCheck(MethodSpec.Builder methodBuilder) { + List expectedCqlClusteringColumns = + entityDefinition.getClusteringColumns().stream() + .map(PropertyDefinition::getCqlName) + .collect(Collectors.toList()); + + if (!expectedCqlClusteringColumns.isEmpty()) { + methodBuilder.addComment("validation of missing Clustering Columns"); + methodBuilder.addStatement( + "$1T<$2T> expectedCqlClusteringColumns = new $3T<>()", + List.class, + CqlIdentifier.class, + ArrayList.class); + for (CodeBlock expectedCqlName : expectedCqlClusteringColumns) { + methodBuilder.addStatement( + "expectedCqlClusteringColumns.add($1T.fromCql($2L))", + CqlIdentifier.class, + expectedCqlName); + } + + methodBuilder.addStatement( + "$1T<$2T> missingTableClusteringColumnNames = findMissingColumns(expectedCqlClusteringColumns, tableMetadata.get().getClusteringColumns().keySet())", + List.class, + CqlIdentifier.class); + + // throw if there are any missing Clustering Columns columns + CodeBlock missingCqlColumnExceptionMessage = + CodeBlock.of( + "String.format(\"The CQL ks.table: %s.%s has missing Clustering columns: %s that are defined in the entity class: %s\", " + + "keyspaceId, tableId, missingTableClusteringColumnNames, entityClassName)"); + methodBuilder.beginControlFlow("if (!missingTableClusteringColumnNames.isEmpty())"); + methodBuilder.addStatement( + "throw new $1T($2L)", IllegalArgumentException.class, missingCqlColumnExceptionMessage); + methodBuilder.endControlFlow(); + } + } + + // Finds out missingTableCqlNames - columns that are present in Entity Mapping but NOT present in + // UDT table + private void validateColumnsInUdt(MethodSpec.Builder methodBuilder, boolean generateElse) { + if (generateElse) { + methodBuilder.beginControlFlow("else if (userDefinedType.isPresent())"); + } else { + methodBuilder.beginControlFlow("if (userDefinedType.isPresent())"); + } + + generateUdtMissingColumnsCheck(methodBuilder); + + generateUdtColumnsTypeCheck(methodBuilder); + + methodBuilder.endControlFlow(); + } + + private void generateUdtColumnsTypeCheck(MethodSpec.Builder methodBuilder) { + methodBuilder.addComment("validation of UDT types"); + generateExpectedTypesPerColumn(methodBuilder); + + methodBuilder.addStatement( + "$1T<$2T> expectedColumns = userDefinedType.get().getFieldNames()", + List.class, + CqlIdentifier.class); + methodBuilder.addStatement( + "$1T<$2T> expectedTypes = userDefinedType.get().getFieldTypes()", + List.class, + DataType.class); + + methodBuilder.addStatement( + "$1T<$2T> missingTableTypes = findTypeMismatches(expectedTypesPerColumn, expectedColumns, expectedTypes, context.getSession().getContext().getCodecRegistry())", + List.class, + String.class); + methodBuilder.addStatement( + "throwMissingUdtTypesIfNotEmpty(missingTableTypes, keyspaceId, tableId, entityClassName)"); + } + + private void generateUdtMissingColumnsCheck(MethodSpec.Builder methodBuilder) { + methodBuilder.addComment("validation of UDT columns"); + methodBuilder.addStatement( + "$1T<$2T> columns = userDefinedType.get().getFieldNames()", + List.class, + CqlIdentifier.class); + + methodBuilder.addStatement( + "$1T<$2T> missingTableCqlNames = findMissingCqlIdentifiers(expectedCqlNames, columns)", + List.class, + CqlIdentifier.class); + + // Throw if there are any missingTableCqlNames + CodeBlock missingCqlUdtExceptionMessage = + CodeBlock.of( + "String.format(\"The CQL ks.udt: %s.%s has missing columns: %s that are defined in the entity class: %s\", " + + "keyspaceId, tableId, missingTableCqlNames, entityClassName)"); + methodBuilder.beginControlFlow("if (!missingTableCqlNames.isEmpty())"); + methodBuilder.addStatement( + "throw new $1T($2L)", IllegalArgumentException.class, missingCqlUdtExceptionMessage); + methodBuilder.endControlFlow(); + } + + private void generateExpectedTypesPerColumn(MethodSpec.Builder methodBuilder) { + methodBuilder.addStatement( + "$1T<$2T, $3T> expectedTypesPerColumn = new $4T<>()", + Map.class, + CqlIdentifier.class, + GenericType.class, + LinkedHashMap.class); + + Map expectedTypesPerColumn = + entityDefinition.getAllColumns().stream() + .collect( + Collectors.toMap(PropertyDefinition::getCqlName, v -> v.getType().asRawTypeName())); + + for (Map.Entry expected : expectedTypesPerColumn.entrySet()) { + methodBuilder.addStatement( + "expectedTypesPerColumn.put($1T.fromCql($2L), $3L)", + CqlIdentifier.class, + expected.getKey(), + entityHelperGenerator.addGenericTypeConstant(expected.getValue().box())); + } + } +} diff --git a/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/entity/EntityHelperBaseTest.java b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/entity/EntityHelperBaseTest.java new file mode 100644 index 00000000000..4b6b831e782 --- /dev/null +++ b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/entity/EntityHelperBaseTest.java @@ -0,0 +1,109 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.mapper.entity; + +import static org.assertj.core.api.AssertionsForClassTypes.assertThatThrownBy; +import static org.assertj.core.api.AssertionsForInterfaceTypes.assertThat; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.metadata.schema.ColumnMetadata; +import com.datastax.oss.driver.api.core.type.DataTypes; +import com.datastax.oss.driver.api.core.type.codec.registry.CodecRegistry; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import org.junit.Test; +import org.junit.runner.RunWith; + +@RunWith(DataProviderRunner.class) +public class EntityHelperBaseTest { + + @Test + @UseDataProvider("typesProvider") + public void should_find_not_matching_types( + Map> entityColumns, + Map cqlColumns, + List expected) { + // when + List missingTypes = + EntityHelperBase.findTypeMismatches(entityColumns, cqlColumns, CodecRegistry.DEFAULT); + + // then + assertThat(missingTypes).isEqualTo(expected); + } + + @Test + public void should_throw_if_there_is_not_matching_cql_column() { + // given + ImmutableMap> entityColumns = + ImmutableMap.of(CqlIdentifier.fromCql("c1"), GenericType.of(Integer.class)); + ColumnMetadata columnMetadataInt = mock(ColumnMetadata.class); + when(columnMetadataInt.getType()).thenReturn(DataTypes.INT); + ImmutableMap cqlColumns = + ImmutableMap.of(CqlIdentifier.fromCql("c2"), columnMetadataInt); + + // when, then + assertThatThrownBy( + () -> + EntityHelperBase.findTypeMismatches( + entityColumns, cqlColumns, CodecRegistry.DEFAULT)) + .isInstanceOf(AssertionError.class) + .hasMessageContaining("There is no cql column for entity column: c1"); + } + + @DataProvider + public static Object[][] typesProvider() { + ColumnMetadata columnMetadataText = mock(ColumnMetadata.class); + when(columnMetadataText.getType()).thenReturn(DataTypes.TEXT); + ColumnMetadata columnMetadataInt = mock(ColumnMetadata.class); + when(columnMetadataInt.getType()).thenReturn(DataTypes.INT); + + CqlIdentifier c1 = CqlIdentifier.fromCql("c1"); + CqlIdentifier c2 = CqlIdentifier.fromCql("c2"); + return new Object[][] { + { + ImmutableMap.of(c1, GenericType.of(String.class)), + ImmutableMap.of(c1, columnMetadataText), + Collections.emptyList() + }, + { + ImmutableMap.of(c1, GenericType.of(Integer.class)), + ImmutableMap.of(c1, columnMetadataText), + ImmutableList.of("Field: c1, Entity Type: java.lang.Integer, CQL type: TEXT") + }, + { + ImmutableMap.of(c1, GenericType.of(String.class), c2, GenericType.of(Integer.class)), + ImmutableMap.of(c1, columnMetadataText, c2, columnMetadataInt), + Collections.emptyList() + }, + { + ImmutableMap.of(c1, GenericType.of(String.class), c2, GenericType.of(Integer.class)), + ImmutableMap.of(c1, columnMetadataInt, c2, columnMetadataText), + ImmutableList.of( + "Field: c1, Entity Type: java.lang.String, CQL type: INT", + "Field: c2, Entity Type: java.lang.Integer, CQL type: TEXT") + } + }; + } +} diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/MapperBuilder.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/MapperBuilder.java index 69becbe18f5..ab87d1bd792 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/MapperBuilder.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/MapperBuilder.java @@ -33,6 +33,8 @@ */ public abstract class MapperBuilder { + public static final String SCHEMA_VALIDATION_ENABLED_SETTING = + "datastax.mapper.schemaValidationEnabled"; protected final CqlSession session; protected CqlIdentifier defaultKeyspaceId; protected Map customState; @@ -40,6 +42,8 @@ public abstract class MapperBuilder { protected MapperBuilder(CqlSession session) { this.session = session; this.customState = new HashMap<>(); + // schema validation is enabled by default + customState.put(SCHEMA_VALIDATION_ENABLED_SETTING, true); } /** @@ -91,6 +95,17 @@ public MapperBuilder withDefaultKeyspace(@Nullable String keyspaceName) return withDefaultKeyspace(keyspaceName == null ? null : CqlIdentifier.fromCql(keyspaceName)); } + /** + * When the new instance of a class annotated with {@code @Dao} is created an automatic check for + * schema validation is performed. It verifies if all {@code @Dao} entity fields are present in + * CQL table. If not the exception is thrown. This check has startup overhead so once your app is + * stable you may want to disable it. The schema Validation check is enabled by default. + */ + public MapperBuilder withSchemaValidationEnabled(boolean enableSchemaValidation) { + customState.put(SCHEMA_VALIDATION_ENABLED_SETTING, enableSchemaValidation); + return this; + } + /** * Stores custom state that will be propagated to {@link MapperContext#getCustomState()}. * diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/SchemaHint.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/SchemaHint.java new file mode 100644 index 00000000000..42356f55fe3 --- /dev/null +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/SchemaHint.java @@ -0,0 +1,70 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.mapper.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * This annotation could be used only on a class that is annotated with @Entity annotation. The + * logic will be applied only, if you are running mapper {@code withSchemaValidationEnabled(true)}. + * + *

      Example: + * + *

      + * @Entity
      + * @SchemaHint(targetElement = @SchemaHint.TargetElement.TABLE)
      + * public class Product {
      + *   // fields of the entity
      + * }
      + * 
      + * + *

      By default, if you will create an @Entity without the @SchemaHint annotation, the + * following logic will be applied when doing validation: + * + *

        + *
      1. Check if the given entity is a Table, if it is - validates if all fields of the Entity are + * present in the CQL table. + *
      2. If it is not a table, check if the given entity is a UDT. If this is a case check if all + * Entity fields are present in the CQL UDT type. + *
      3. If there is not information about Table or UDT it means that the given @Entity has no + * corresponding CQL definition and error is generated. + *
      + * + *

      If you want the mapper to generate code only to check the path for UDT or Table you can + * provide the @SchemaHint on the Entity: + * + *

        + *
      1. If you will set the {@code targetElement = TABLE}, then only the code path for checking CQL + * TABLE will be generated. If there is no corresponding CQL Table, then there is no check of + * UDT. The code throws an Exception denoting that CQL Table is missing for this Entity. + *
      2. If you will set the {@code targetElement = UDT}, then only the code path for checking CQL + * UDT will be generated. If there is no corresponding CQL UDT type, the code throws an + * Exception denoting that CQL UDT is missing for this Entity. + *
      + */ +@Target(ElementType.TYPE) +@Retention(RetentionPolicy.RUNTIME) +public @interface SchemaHint { + TargetElement targetElement(); + + enum TargetElement { + TABLE, + UDT + } +} diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/entity/EntityHelperBase.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/entity/EntityHelperBase.java index 9b17935702c..9937a977704 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/entity/EntityHelperBase.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/entity/EntityHelperBase.java @@ -16,14 +16,28 @@ package com.datastax.oss.driver.internal.mapper.entity; import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.metadata.schema.ColumnMetadata; +import com.datastax.oss.driver.api.core.metadata.schema.KeyspaceMetadata; +import com.datastax.oss.driver.api.core.type.DataType; +import com.datastax.oss.driver.api.core.type.codec.CodecNotFoundException; +import com.datastax.oss.driver.api.core.type.codec.registry.CodecRegistry; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import com.datastax.oss.driver.api.mapper.MapperBuilder; import com.datastax.oss.driver.api.mapper.MapperContext; import com.datastax.oss.driver.api.mapper.MapperException; +import com.datastax.oss.driver.api.mapper.annotations.Dao; import com.datastax.oss.driver.api.mapper.annotations.DaoFactory; import com.datastax.oss.driver.api.mapper.annotations.DaoKeyspace; import com.datastax.oss.driver.api.mapper.annotations.Entity; import com.datastax.oss.driver.api.mapper.entity.EntityHelper; +import com.datastax.oss.driver.internal.core.util.CollectionsUtils; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; public abstract class EntityHelperBase implements EntityHelper { @@ -75,4 +89,118 @@ protected void throwIfKeyspaceMissing() { DaoKeyspace.class.getSimpleName())); } } + + public List findMissingColumns( + List entityColumns, Collection cqlColumns) { + return findMissingCqlIdentifiers( + entityColumns, + cqlColumns.stream().map(ColumnMetadata::getName).collect(Collectors.toList())); + } + + public List findMissingCqlIdentifiers( + List entityColumns, Collection cqlColumns) { + List missingColumns = new ArrayList<>(); + for (CqlIdentifier entityCqlIdentifier : entityColumns) { + if (!cqlColumns.contains(entityCqlIdentifier)) { + missingColumns.add(entityCqlIdentifier); + } + } + return missingColumns; + } + + /** + * When the new instance of a class annotated with {@link Dao} is created an automatic check for + * schema validation is performed. It verifies if all {@link Dao} entity fields are present in CQL + * table. If not the {@link IllegalArgumentException} exception with detailed message is thrown. + * This check has startup overhead so once your app is stable you may want to disable it. The + * schema validation check is enabled by default. It can be disabled using the {@link + * MapperBuilder#withSchemaValidationEnabled(boolean)} method. + */ + public abstract void validateEntityFields(); + + public static List findTypeMismatches( + Map> entityColumns, + Map cqlColumns, + CodecRegistry codecRegistry) { + Map cqlColumnsDataTypes = + cqlColumns.entrySet().stream() + .collect( + Collectors.toMap( + Map.Entry::getKey, + cqlIdentifierColumnMetadataEntry -> + cqlIdentifierColumnMetadataEntry.getValue().getType())); + + return findDataTypeMismatches(entityColumns, cqlColumnsDataTypes, codecRegistry); + } + + public static List findTypeMismatches( + Map> entityColumns, + List cqlColumns, + List cqlTypes, + CodecRegistry codecRegistry) { + return findDataTypeMismatches( + entityColumns, + CollectionsUtils.combineListsIntoOrderedMap(cqlColumns, cqlTypes), + codecRegistry); + } + + private static List findDataTypeMismatches( + Map> entityColumns, + Map cqlColumns, + CodecRegistry codecRegistry) { + List missingCodecs = new ArrayList<>(); + + for (Map.Entry> entityEntry : entityColumns.entrySet()) { + DataType datType = cqlColumns.get(entityEntry.getKey()); + if (datType == null) { + // this will not happen because it will be catch by the generateMissingColumnsCheck() method + throw new AssertionError( + "There is no cql column for entity column: " + entityEntry.getKey()); + } + try { + codecRegistry.codecFor(datType, entityEntry.getValue()); + } catch (CodecNotFoundException exception) { + missingCodecs.add( + String.format( + "Field: %s, Entity Type: %s, CQL type: %s", + entityEntry.getKey(), exception.getJavaType(), exception.getCqlType())); + } + } + return missingCodecs; + } + + public void throwMissingUdtTypesIfNotEmpty( + List missingTypes, + CqlIdentifier keyspaceId, + CqlIdentifier tableId, + String entityClassName) { + throwMissingTypesIfNotEmpty(missingTypes, keyspaceId, tableId, entityClassName, "udt"); + } + + public void throwMissingTableTypesIfNotEmpty( + List missingTypes, + CqlIdentifier keyspaceId, + CqlIdentifier tableId, + String entityClassName) { + throwMissingTypesIfNotEmpty(missingTypes, keyspaceId, tableId, entityClassName, "table"); + } + + public void throwMissingTypesIfNotEmpty( + List missingTypes, + CqlIdentifier keyspaceId, + CqlIdentifier tableId, + String entityClassName, + String type) { + if (!missingTypes.isEmpty()) { + throw new IllegalArgumentException( + String.format( + "The CQL ks.%s: %s.%s defined in the entity class: %s declares type mappings that are not supported by the codec registry:\n%s", + type, keyspaceId, tableId, entityClassName, String.join("\n", missingTypes))); + } + } + + public boolean keyspaceNamePresent( + Map keyspaces, CqlIdentifier keyspaceId) { + return keyspaces.keySet().contains(keyspaceId); + } } From e9eefad1be13f56589dff6400631679e2ec01bab Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 3 Feb 2020 14:31:46 -0800 Subject: [PATCH 275/979] Bump native-protocol to 1.4.9 --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 603857b3c07..4d525d61cf8 100644 --- a/pom.xml +++ b/pom.xml @@ -45,7 +45,7 @@ 25.1-jre 2.1.11 4.0.5 - 1.4.8 + 1.4.9 4.1.39.Final 1.7.26 1.2.1 From 45e84ff292c1e46c265a3f739731a0ef5c4c0bf7 Mon Sep 17 00:00:00 2001 From: olim7t Date: Tue, 4 Feb 2020 14:13:55 -0800 Subject: [PATCH 276/979] Improve visibility of request logger link in logging manual page --- manual/core/logging/README.md | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/manual/core/logging/README.md b/manual/core/logging/README.md index 39f2c7a320a..ff0ee5303b6 100644 --- a/manual/core/logging/README.md +++ b/manual/core/logging/README.md @@ -5,6 +5,9 @@ * based on SLF4J. * config file examples for Logback and Log4J. +**If you're looking for information about the request logger, see the [request +tracker](../request_tracker/#request-logger) page.** + ----- The driver uses [SLF4J] as a logging facade. This allows you to plug in your preferred logging @@ -107,11 +110,6 @@ investigate an issue. Keep in mind that they are quite verbose, in particular TRACE. It's a good idea to only enable them on a limited set of categories. -### Logging request latencies - -The driver provides a built-in component to log the latency and outcome of every application -request. See the [request tracker](../request_tracker/#request-logger) page for more details. - ### Configuration examples #### Logback From 75b670ea5a715a3aa0deac4e67642e2b32966c19 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 23 Dec 2019 13:40:38 -0300 Subject: [PATCH 277/979] Improve javadocs of reload() and supportsReloading() --- .../api/core/config/DriverConfigLoader.java | 22 ++++++++++++------- 1 file changed, 14 insertions(+), 8 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/DriverConfigLoader.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/DriverConfigLoader.java index 05f615a3e9b..7855385cbc3 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/DriverConfigLoader.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/DriverConfigLoader.java @@ -237,21 +237,27 @@ static ProgrammaticDriverConfigLoaderBuilder programmaticBuilder() { void onDriverInit(@NonNull DriverContext context); /** - * Triggers an immediate reload attempt. + * Triggers an immediate reload attempt and returns a stage that completes once the attempt is + * finished, with a boolean indicating whether the configuration changed as a result of this + * reload. * - * @return a stage that completes once the attempt is finished, with a boolean indicating whether - * the configuration changed as a result of this reload. If so, it's also guaranteed that - * internal driver components have been notified by that time; note however that some react to - * the notification asynchronously, so they may not have completely applied all resulting - * changes yet. If this loader does not support programmatic reloading — which you can - * check by calling {@link #supportsReloading()} before this method — the returned - * object will fail immediately with an {@link UnsupportedOperationException}. + *

      If so, it's also guaranteed that internal driver components have been notified by that time; + * note however that some react to the notification asynchronously, so they may not have + * completely applied all resulting changes yet. + * + *

      If this loader does not support programmatic reloading — which you can check by + * calling {@link #supportsReloading()} before this method — the returned stage should fail + * immediately with an {@link UnsupportedOperationException}. The default implementation of this + * interface does support programmatic reloading however, and never returns a failed stage. */ @NonNull CompletionStage reload(); /** * Whether this implementation supports programmatic reloading with the {@link #reload()} method. + * + *

      The default implementation of this interface does support programmatic reloading and always + * returns true. */ boolean supportsReloading(); From a351e002e3c2b3a8a45026d53b45b7b0b502929f Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 23 Dec 2019 13:43:05 -0300 Subject: [PATCH 278/979] JAVA-2592: Make reload support parameterizable for DefaultDriverConfigLoader --- changelog/README.md | 1 + .../typesafe/DefaultDriverConfigLoader.java | 47 +++++++++++++++---- .../DefaultDriverConfigLoaderTest.java | 15 ++++++ 3 files changed, 54 insertions(+), 9 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 5181569ade2..428cd9dbcbe 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.5.0 (in progress) +- [improvement] JAVA-2592: Make reload support parameterizable for DefaultDriverConfigLoader - [new feature] JAVA-2263: Add optional schema validation to the mapper ### 4.4.0 diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultDriverConfigLoader.java b/core/src/main/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultDriverConfigLoader.java index 93a1b0b8316..178561b323f 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultDriverConfigLoader.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultDriverConfigLoader.java @@ -25,6 +25,7 @@ import com.datastax.oss.driver.internal.core.context.EventBus; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.internal.core.util.Loggers; +import com.datastax.oss.driver.internal.core.util.concurrent.CompletableFutures; import com.datastax.oss.driver.internal.core.util.concurrent.RunOrSchedule; import com.typesafe.config.Config; import com.typesafe.config.ConfigFactory; @@ -40,7 +41,10 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -/** The default loader; it is based on Typesafe Config and reloads at a configurable interval. */ +/** + * The default loader; it is based on Typesafe Config and optionally reloads at a configurable + * interval. + */ @ThreadSafe public class DefaultDriverConfigLoader implements DriverConfigLoader { @@ -56,12 +60,14 @@ public class DefaultDriverConfigLoader implements DriverConfigLoader { private final Supplier configSupplier; private final TypesafeDriverConfig driverConfig; + private final boolean supportsReloading; private volatile SingleThreaded singleThreaded; /** * Builds a new instance with the default Typesafe config loading rules (documented in {@link - * SessionBuilder#withConfigLoader(DriverConfigLoader)}) and the core driver options. + * SessionBuilder#withConfigLoader(DriverConfigLoader)}) and the core driver options. This + * constructor enables config reloading (that is, {@link #supportsReloading} will return true). */ public DefaultDriverConfigLoader() { this(DEFAULT_CONFIG_SUPPLIER); @@ -69,11 +75,28 @@ public DefaultDriverConfigLoader() { /** * Builds an instance with custom arguments, if you want to load the configuration from somewhere - * else. + * else. This constructor enables config reloading (that is, {@link #supportsReloading} will + * return true). + * + * @param configSupplier A supplier for the Typesafe {@link Config}; it will be invoked once when + * this object is instantiated, and at each reload attempt, if reloading is enabled. */ public DefaultDriverConfigLoader(Supplier configSupplier) { + this(configSupplier, true); + } + + /** + * Builds an instance with custom arguments, if you want to load the configuration from somewhere + * else and/or modify config reload behavior. + * + * @param configSupplier A supplier for the Typesafe {@link Config}; it will be invoked once when + * this object is instantiated, and at each reload attempt, if reloading is enabled. + * @param supportsReloading Whether config reloading should be enabled or not. + */ + public DefaultDriverConfigLoader(Supplier configSupplier, boolean supportsReloading) { this.configSupplier = configSupplier; this.driverConfig = new TypesafeDriverConfig(configSupplier.get()); + this.supportsReloading = supportsReloading; } @NonNull @@ -89,15 +112,21 @@ public void onDriverInit(@NonNull DriverContext driverContext) { @NonNull @Override - public CompletionStage reload() { - CompletableFuture result = new CompletableFuture<>(); - RunOrSchedule.on(singleThreaded.adminExecutor, () -> singleThreaded.reload(result)); - return result; + public final CompletionStage reload() { + if (supportsReloading) { + CompletableFuture result = new CompletableFuture<>(); + RunOrSchedule.on(singleThreaded.adminExecutor, () -> singleThreaded.reload(result)); + return result; + } else { + return CompletableFutures.failedFuture( + new UnsupportedOperationException( + "This instance of DefaultDriverConfigLoader does not support reloading")); + } } @Override - public boolean supportsReloading() { - return true; + public final boolean supportsReloading() { + return supportsReloading; } /** For internal use only, this leaks a Typesafe config type. */ diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultDriverConfigLoaderTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultDriverConfigLoaderTest.java index 5f63468f7fb..f88d12e77d9 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultDriverConfigLoaderTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultDriverConfigLoaderTest.java @@ -208,4 +208,19 @@ public void should_load_from_file() { assertThat(config.getString(DefaultDriverOption.REQUEST_SERIAL_CONSISTENCY)) .isEqualTo(DefaultConsistencyLevel.SERIAL.name()); } + + @Test + public void should_return_failed_future_if_reloading_not_supported() { + DefaultDriverConfigLoader loader = + new DefaultDriverConfigLoader(() -> ConfigFactory.parseString(configSource.get()), false); + assertThat(loader.supportsReloading()).isFalse(); + CompletionStage stage = loader.reload(); + assertThatStage(stage) + .isFailed( + t -> + assertThat(t) + .isInstanceOf(UnsupportedOperationException.class) + .hasMessage( + "This instance of DefaultDriverConfigLoader does not support reloading")); + } } From 9ff4095ba6e3d61cad3303db5bfb86452258a6d4 Mon Sep 17 00:00:00 2001 From: olim7t Date: Wed, 5 Feb 2020 10:29:30 -0800 Subject: [PATCH 279/979] Revisit contributing guidelines about Stream API --- CONTRIBUTING.md | 50 ++++++++++++++++++------------------------------- 1 file changed, 18 insertions(+), 32 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 068498032cd..e6eba076dec 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -139,43 +139,29 @@ line. When you add or review new code, take a moment to run the tests in `DEBUG` mode and check if the output looks good. -### No stream API +### Don't abuse the stream API -Please don't use `java.util.stream` in the driver codebase. Streams were designed for *data -processing*, not to make your collection traversals "functional". - -Here's an example from the driver codebase (`ChannelSet`): - -```java -DriverChannel[] snapshot = this.channels; -DriverChannel best = null; -int bestScore = 0; -for (DriverChannel channel : snapshot) { - int score = channel.availableIds(); - if (score > bestScore) { - bestScore = score; - best = channel; - } -} -return best; -``` - -And here's a terrible way to rewrite it using streams: +The `java.util.stream` API is often used (abused?) as a "functional API for collections": ```java -// Don't do this: -DriverChannel best = - Stream.of(snapshot) - .reduce((a, b) -> a.availableIds() > b.availableIds() ? a : b) - .get(); +List sizes = words.stream().map(String::length).collect(Collectors.toList()); ``` -The stream version is not easier to read, and will probably be slower (creating intermediary objects -vs. an array iteration, compounded by the fact that this particular array typically has a low -cardinality). - -The driver never does the kind of processing that the stream API is intended for; the only large -collections we manipulate are result sets, and these get passed on to the client directly. +The perceived advantages of this approach over traditional for-loops are debatable: + +* readability: this is highly subjective. But consider the following: + * everyone can read for-loops, whether they are familiar with the Stream API or not. The opposite + is not true. + * the stream API does not spell out all the details: what kind of list does `Collectors.toList()` + return? Is it pre-sized? Mutable? Thread-safe? + * the stream API looks pretty on simple examples, but things can get ugly fast. Try rewriting + `NetworkTopologyReplicationStrategy` with streams. +* concision: this is irrelevant. When we look at code we care about maintainability, not how many + keystrokes the author saved. The for-loop version of the above example is just 5 lines long, and + your brain doesn't take longer to parse it. + +The bottom line: don't try to "be functional" at all cost. Plain old for-loops are often just as +simple. ### Never assume a specific format for `toString()` From 258ec45593f3488bb5c256ecb3b9aa0d71355e65 Mon Sep 17 00:00:00 2001 From: Erik Merkle Date: Thu, 6 Feb 2020 13:35:06 -0600 Subject: [PATCH 280/979] JAVA-2624: Expose a config option for the connect timeout --- changelog/README.md | 1 + .../driver/api/core/config/DefaultDriverOption.java | 7 +++++++ .../internal/core/context/DefaultNettyOptions.java | 5 +++++ core/src/main/resources/reference.conf | 12 ++++++++++++ .../core/channel/ChannelFactoryTestBase.java | 2 ++ 5 files changed, 27 insertions(+) diff --git a/changelog/README.md b/changelog/README.md index 428cd9dbcbe..34aad71d62f 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.5.0 (in progress) +- [improvement] JAVA-2624: Expose a config option for the connect timeout - [improvement] JAVA-2592: Make reload support parameterizable for DefaultDriverConfigLoader - [new feature] JAVA-2263: Add optional schema validation to the mapper diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java index 913983e2962..072657f8913 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java @@ -797,6 +797,13 @@ public enum DefaultDriverOption implements DriverOption { *

      Value-type: boolean */ LOAD_BALANCING_POLICY_SLOW_AVOIDANCE("basic.load-balancing-policy.slow-replica-avoidance"), + + /** + * The timeout to use when establishing driver connections. + * + *

      Value-type: {@link java.time.Duration Duration} + */ + CONNECTION_CONNECT_TIMEOUT("advanced.connection.connect-timeout"), ; private final String path; diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultNettyOptions.java b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultNettyOptions.java index f0a222a3d00..aefd6d55bde 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultNettyOptions.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultNettyOptions.java @@ -162,6 +162,11 @@ public void afterBootstrapInitialized(Bootstrap bootstrap) { int sendBufferSize = config.getInt(DefaultDriverOption.SOCKET_SEND_BUFFER_SIZE); bootstrap.option(ChannelOption.SO_SNDBUF, sendBufferSize); } + if (config.isDefined(DefaultDriverOption.CONNECTION_CONNECT_TIMEOUT)) { + Duration connectTimeout = config.getDuration(DefaultDriverOption.CONNECTION_CONNECT_TIMEOUT); + bootstrap.option( + ChannelOption.CONNECT_TIMEOUT_MILLIS, Long.valueOf(connectTimeout.toMillis()).intValue()); + } } @Override diff --git a/core/src/main/resources/reference.conf b/core/src/main/resources/reference.conf index 9056bda0e2e..3edd91a13ab 100644 --- a/core/src/main/resources/reference.conf +++ b/core/src/main/resources/reference.conf @@ -339,6 +339,18 @@ datastax-java-driver { # ADVANCED OPTIONS ------------------------------------------------------------------------------- advanced.connection { + # The timeout to use when establishing driver connections. + # + # This timeout is for controlling how long the driver will wait for the underlying channel + # to actually connect to the server. This is not the time limit for completing protocol + # negotiations, only the time limit for establishing a channel connection. + # + # Required: yes + # Modifiable at runtime: yes, the new value will be used for connections created after the + # change. + # Overridable in a profile: no + connect-timeout = 5 seconds + # The timeout to use for internal queries that run as part of the initialization process, just # after we open a connection. If this timeout fires, the initialization of the connection will # fail. If this is the first connection ever, the driver will fail to initialize as well, diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/channel/ChannelFactoryTestBase.java b/core/src/test/java/com/datastax/oss/driver/internal/core/channel/ChannelFactoryTestBase.java index 8508fbae46b..0d4c15c558c 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/channel/ChannelFactoryTestBase.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/channel/ChannelFactoryTestBase.java @@ -123,6 +123,8 @@ public void setup() throws InterruptedException { when(defaultProfile.getInt(DefaultDriverOption.CONNECTION_MAX_REQUESTS)).thenReturn(1); when(defaultProfile.getDuration(DefaultDriverOption.HEARTBEAT_INTERVAL)) .thenReturn(Duration.ofSeconds(30)); + when(defaultProfile.getDuration(DefaultDriverOption.CONNECTION_CONNECT_TIMEOUT)) + .thenReturn(Duration.ofSeconds(5)); when(context.getProtocolVersionRegistry()).thenReturn(protocolVersionRegistry); when(context.getNettyOptions()).thenReturn(nettyOptions); From 662babcd735b13739245ef155a6a46bb9f11c1e3 Mon Sep 17 00:00:00 2001 From: Greg Bestland Date: Fri, 31 Jan 2020 15:47:04 -0600 Subject: [PATCH 281/979] JAVA-2625: Provide user-friendly programmatic subclass of DseGssApiAuthProviderBase --- changelog/README.md | 1 + .../core/auth/DseGssApiAuthProviderBase.java | 38 +++- .../ProgrammaticDseGssApiAuthProvider.java | 172 ++++++++++++++++++ .../core/auth/DseGssApiAuthProvider.java | 23 +-- .../core/auth/DseGssApiAuthProviderIT.java | 31 ++++ manual/core/authentication/README.md | 18 +- 6 files changed, 257 insertions(+), 26 deletions(-) create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/auth/ProgrammaticDseGssApiAuthProvider.java diff --git a/changelog/README.md b/changelog/README.md index 34aad71d62f..3adeb001e27 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.5.0 (in progress) +- [new feature] JAVA-2625: Provide user-friendly programmatic configuration for kerberos - [improvement] JAVA-2624: Expose a config option for the connect timeout - [improvement] JAVA-2592: Make reload support parameterizable for DefaultDriverConfigLoader - [new feature] JAVA-2263: Add optional schema validation to the mapper diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderBase.java b/core/src/main/java/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderBase.java index 4fa9fed7f51..36fb0d5475c 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderBase.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderBase.java @@ -33,6 +33,7 @@ import java.util.Map; import java.util.Objects; import javax.security.auth.Subject; +import javax.security.auth.login.AppConfigurationEntry; import javax.security.auth.login.Configuration; import javax.security.auth.login.LoginContext; import javax.security.auth.login.LoginException; @@ -174,14 +175,32 @@ public Builder() { /** * Sets a login configuration that will be used to create a {@link LoginContext}. * - *

      You MUST call either this method or {@link #withSubject(Subject)}; if both are called, - * the subject takes precedence, and the login configuration will be ignored. + *

      You MUST call either a withLoginConfiguration method or {@link #withSubject(Subject)}; + * if both are called, the subject takes precedence, and the login configuration will be + * ignored. + * + * @see #withLoginConfiguration(Map) */ @NonNull public Builder withLoginConfiguration(@Nullable Configuration loginConfiguration) { this.loginConfiguration = loginConfiguration; return this; } + /** + * Sets a login configuration that will be used to create a {@link LoginContext}. + * + *

      This is an alternative to {@link #withLoginConfiguration(Configuration)}, that builds + * the configuration from {@code Krb5LoginModule} with the given options. + * + *

      You MUST call either a withLoginConfiguration method or {@link #withSubject(Subject)}; + * if both are called, the subject takes precedence, and the login configuration will be + * ignored. + */ + @NonNull + public Builder withLoginConfiguration(@Nullable Map loginConfiguration) { + this.loginConfiguration = fetchLoginConfiguration(loginConfiguration); + return this; + } /** * Sets a previously authenticated subject to reuse. @@ -237,6 +256,21 @@ public GssApiOptions build() { authorizationId, ImmutableMap.copyOf(saslProperties)); } + + public static Configuration fetchLoginConfiguration(Map options) { + return new Configuration() { + + @Override + public AppConfigurationEntry[] getAppConfigurationEntry(String name) { + return new AppConfigurationEntry[] { + new AppConfigurationEntry( + "com.sun.security.auth.module.Krb5LoginModule", + AppConfigurationEntry.LoginModuleControlFlag.REQUIRED, + options) + }; + } + }; + } } } diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/auth/ProgrammaticDseGssApiAuthProvider.java b/core/src/main/java/com/datastax/dse/driver/api/core/auth/ProgrammaticDseGssApiAuthProvider.java new file mode 100644 index 00000000000..1cb193fb726 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/auth/ProgrammaticDseGssApiAuthProvider.java @@ -0,0 +1,172 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.dse.driver.api.core.auth; + +import com.datastax.oss.driver.api.core.auth.AuthProvider; +import com.datastax.oss.driver.api.core.metadata.EndPoint; +import edu.umd.cs.findbugs.annotations.NonNull; + +/** + * {@link AuthProvider} that provides GSSAPI authenticator instances for clients to connect to DSE + * clusters secured with {@code DseAuthenticator}, in a programmatic way. + * + *

      To use this provider the corresponding GssApiOptions must be passed into the provider + * directly, for example: + * + *

      + *     DseGssApiAuthProviderBase.GssApiOptions.Builder builder =
      + *         DseGssApiAuthProviderBase.GssApiOptions.builder();
      + *     Map<String, String> loginConfig =
      + *         ImmutableMap.of(
      + *             "principal",
      + *             "user principal here ex cassandra@DATASTAX.COM",
      + *             "useKeyTab",
      + *             "true",
      + *             "refreshKrb5Config",
      + *             "true",
      + *             "keyTab",
      + *             "Path to keytab file here");
      + *
      + *     builder.withLoginConfiguration(loginConfig);
      + *
      + *     CqlSession session =
      + *         CqlSession.builder()
      + *             .withAuthProvider(new ProgrammaticDseGssApiAuthProvider(builder.build()))
      + *             .build();
      + * 
      + * + * or alternatively + * + *
      + *     DseGssApiAuthProviderBase.GssApiOptions.Builder builder =
      + *         DseGssApiAuthProviderBase.GssApiOptions.builder().withSubject(subject);
      + *     CqlSession session =
      + *         CqlSession.builder()
      + *             .withAuthProvider(new ProgrammaticDseGssApiAuthProvider(builder.build()))
      + *             .build();
      + * 
      + * + *

      Kerberos Authentication

      + * + * Keytab and ticket cache settings are specified using a standard JAAS configuration file. The + * location of the file can be set using the java.security.auth.login.config system + * property or by adding a login.config.url.n entry in the java.security + * properties file. Alternatively a login-configuration, or subject can be provided to the provider + * via the GssApiOptions (see above). + * + *

      See the following documents for further details: + * + *

        + *
      1. JAAS + * Login Configuration File; + *
      2. Krb5LoginModule + * options; + *
      3. JAAS + * Authentication Tutorial for more on JAAS in general. + *
      + * + *

      Authentication using ticket cache

      + * + * Run kinit to obtain a ticket and populate the cache before connecting. JAAS config: + * + *
      + * DseClient {
      + *   com.sun.security.auth.module.Krb5LoginModule required
      + *     useTicketCache=true
      + *     renewTGT=true;
      + * };
      + * 
      + * + *

      Authentication using a keytab file

      + * + * To enable authentication using a keytab file, specify its location on disk. If your keytab + * contains more than one principal key, you should also specify which one to select. This + * information can also be specified in the driver config, under the login-configuration section. + * + *
      + * DseClient {
      + *     com.sun.security.auth.module.Krb5LoginModule required
      + *       useKeyTab=true
      + *       keyTab="/path/to/file.keytab"
      + *       principal="user@MYDOMAIN.COM";
      + * };
      + * 
      + * + *

      Specifying SASL protocol name

      + * + * The SASL protocol name used by this auth provider defaults to " + * {@value #DEFAULT_SASL_SERVICE_NAME}". + * + *

      Important: the SASL protocol name should match the username of the Kerberos + * service principal used by the DSE server. This information is specified in the dse.yaml file by + * the {@code service_principal} option under the kerberos_options + * section, and may vary from one DSE installation to another – especially if you installed + * DSE with an automated package installer. + * + *

      For example, if your dse.yaml file contains the following: + * + *

      {@code
      + * kerberos_options:
      + *     ...
      + *     service_principal: cassandra/my.host.com@MY.REALM.COM
      + * }
      + * + * The correct SASL protocol name to use when authenticating against this DSE server is "{@code + * cassandra}". + * + *

      Should you need to change the SASL protocol name specify it in the GssApiOptions, use the + * method below: + * + *

      + *     DseGssApiAuthProviderBase.GssApiOptions.Builder builder =
      + *         DseGssApiAuthProviderBase.GssApiOptions.builder();
      + *     builder.withSaslProtocol("alternate");
      + *     DseGssApiAuthProviderBase.GssApiOptions options = builder.build();
      + * 
      + * + *

      Should internal sasl properties need to be set such as qop. This can also be accomplished by + * setting it in the GssApiOptions: + * + *

      + *   DseGssApiAuthProviderBase.GssApiOptions.Builder builder =
      + *         DseGssApiAuthProviderBase.GssApiOptions.builder();
      + *     builder.addSaslProperty("javax.security.sasl.qop", "auth-conf");
      + *     DseGssApiAuthProviderBase.GssApiOptions options = builder.build();
      + * 
      + * + * @see Authenticating + * a DSE cluster with Kerberos + */ +public class ProgrammaticDseGssApiAuthProvider extends DseGssApiAuthProviderBase { + private final GssApiOptions options; + + public ProgrammaticDseGssApiAuthProvider(GssApiOptions options) { + super("Programmatic-Kerberos"); + this.options = options; + } + + @NonNull + @Override + protected GssApiOptions getOptions( + @NonNull EndPoint endPoint, @NonNull String serverAuthenticator) { + return options; + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/auth/DseGssApiAuthProvider.java b/core/src/main/java/com/datastax/dse/driver/internal/core/auth/DseGssApiAuthProvider.java index 7c93f6c015c..be1b64fad7c 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/auth/DseGssApiAuthProvider.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/auth/DseGssApiAuthProvider.java @@ -23,8 +23,6 @@ import com.datastax.oss.driver.api.core.metadata.EndPoint; import edu.umd.cs.findbugs.annotations.NonNull; import java.util.Map; -import javax.security.auth.login.AppConfigurationEntry; -import javax.security.auth.login.Configuration; import net.jcip.annotations.ThreadSafe; /** @@ -196,26 +194,7 @@ protected GssApiOptions getOptions( } Map loginConfigurationMap = config.getStringMap(DseDriverOption.AUTH_PROVIDER_LOGIN_CONFIGURATION); - optionsBuilder.withLoginConfiguration(fetchLoginConfiguration(loginConfigurationMap)); + optionsBuilder.withLoginConfiguration(loginConfigurationMap); return optionsBuilder.build(); } - - /** - * Creates a configuration that depends on the given keytab file for authenticating the given - * user. - */ - private static Configuration fetchLoginConfiguration(Map options) { - return new Configuration() { - - @Override - public AppConfigurationEntry[] getAppConfigurationEntry(String name) { - return new AppConfigurationEntry[] { - new AppConfigurationEntry( - "com.sun.security.auth.module.Krb5LoginModule", - AppConfigurationEntry.LoginModuleControlFlag.REQUIRED, - options) - }; - } - }; - } } diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderIT.java index 1778161e175..b8884e68b27 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderIT.java @@ -25,7 +25,9 @@ import com.datastax.oss.driver.api.core.auth.AuthenticationException; import com.datastax.oss.driver.api.core.cql.ResultSet; import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import java.util.List; +import java.util.Map; import org.junit.Assume; import org.junit.ClassRule; import org.junit.Test; @@ -100,6 +102,35 @@ public void should_not_authenticate_if_keytab_does_not_map_to_valid_principal() verifyException(e); } } + /** + * Ensures that a Session can be established to a DSE server secured with Kerberos and that simple + * queries can be made using a client configuration that is provided via programatic interface + */ + @Test + public void should_authenticate_using_kerberos_with_keytab_programmatically() { + DseGssApiAuthProviderBase.GssApiOptions.Builder builder = + DseGssApiAuthProviderBase.GssApiOptions.builder(); + Map loginConfig = + ImmutableMap.of( + "principal", + ads.getUserPrincipal(), + "useKeyTab", + "true", + "refreshKrb5Config", + "true", + "keyTab", + ads.getUserKeytab().getAbsolutePath()); + + builder.withLoginConfiguration(loginConfig); + try (CqlSession session = + CqlSession.builder() + .withAuthProvider(new ProgrammaticDseGssApiAuthProvider(builder.build())) + .build()) { + + ResultSet set = session.execute("select * from system.local"); + assertThat(set).isNotNull(); + } + } private void verifyException(AllNodesFailedException anfe) { assertThat(anfe.getAllErrors()).hasSize(1); diff --git a/manual/core/authentication/README.md b/manual/core/authentication/README.md index ff28f8231d9..1c3f5f72c90 100644 --- a/manual/core/authentication/README.md +++ b/manual/core/authentication/README.md @@ -129,8 +129,21 @@ this means they are vulnerable to an attacker who is able to perform memory dump acceptable for you, consider writing your own [AuthProvider] implementation; [PlainTextAuthProviderBase] is a good starting point. -Similarly, the driver provides [DseGssApiAuthProviderBase] as a starting point to write your own -GSSAPI auth provider. +Similarly, [ProgrammaticDseGssApiAuthProvider] lets you configure GSSAPI programmatically: + +```java +import com.datastax.dse.driver.api.core.auth.DseGssApiAuthProviderBase.GssApiOptions; + +javax.security.auth.Subject subject = ...; // do your Kerberos configuration here + +GssApiOptions options = GssApiOptions.builder().withSubject(subject).build(); +CqlSession session = CqlSession.builder() + .withAuthProvider(new ProgrammaticDseGssApiAuthProvider(options)) + .build(); +``` + +For more complex needs (e.g. if building the options once and reusing them doesn't work for you), +you can subclass [DseGssApiAuthProviderBase]. ### Proxy authentication @@ -206,6 +219,7 @@ session.execute(statement); [DriverContext]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/context/DriverContext.html [PlainTextAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderBase.html [DseGssApiAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderBase.html +[ProgrammaticDseGssApiAuthProvider]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/dse/driver/api/core/auth/ProgrammaticDseGssApiAuthProvider.html [ProxyAuthentication.executeAs]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/dse/driver/api/core/auth/ProxyAuthentication.html#executeAs-java.lang.String-StatementT- [SessionBuilder.withAuthCredentials]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthCredentials-java.lang.String-java.lang.String- [SessionBuilder.withAuthProvider]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthProvider-com.datastax.oss.driver.api.core.auth.AuthProvider- From 93a013ed3e266073941b86ed7c5c4f6cc057fe32 Mon Sep 17 00:00:00 2001 From: Erik Merkle Date: Wed, 29 Jan 2020 09:44:12 -0600 Subject: [PATCH 282/979] JAVA-2617: Reinstate generation of deps.txt for Insights --- changelog/README.md | 1 + core/pom.xml | 34 ++++++++++++ .../core/insights/PlatformInfoFinder.java | 15 +++++- .../driver/internal/DependencyCheckTest.java | 36 +++++++++++++ .../internal/DependencyCheckTestBase.java | 53 +++++++++++++++++++ core/src/test/resources/project.properties | 17 ++++++ mapper-processor/pom.xml | 41 ++++++++++++++ .../mapper/processor/DependencyCheckTest.java | 39 ++++++++++++++ .../src/test/resources/project.properties | 17 ++++++ mapper-runtime/pom.xml | 46 ++++++++++++++++ .../api/mapper/DependencyCheckTest.java | 38 +++++++++++++ .../src/test/resources/project.properties | 17 ++++++ query-builder/pom.xml | 41 ++++++++++++++ .../querybuilder/DependencyCheckTest.java | 38 +++++++++++++ .../src/test/resources/project.properties | 17 ++++++ 15 files changed, 449 insertions(+), 1 deletion(-) create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/DependencyCheckTest.java create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/DependencyCheckTestBase.java create mode 100644 core/src/test/resources/project.properties create mode 100644 mapper-processor/src/test/java/com/datastax/dse/driver/internal/mapper/processor/DependencyCheckTest.java create mode 100644 mapper-processor/src/test/resources/project.properties create mode 100644 mapper-runtime/src/test/java/com/datastax/dse/driver/api/mapper/DependencyCheckTest.java create mode 100644 mapper-runtime/src/test/resources/project.properties create mode 100644 query-builder/src/test/java/com/datastax/dse/driver/internal/querybuilder/DependencyCheckTest.java create mode 100644 query-builder/src/test/resources/project.properties diff --git a/changelog/README.md b/changelog/README.md index 3adeb001e27..44806ff49a6 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.5.0 (in progress) +- [bug] JAVA-2617: Reinstate generation of deps.txt for Insights - [new feature] JAVA-2625: Provide user-friendly programmatic configuration for kerberos - [improvement] JAVA-2624: Expose a config option for the connect timeout - [improvement] JAVA-2592: Make reload support parameterizable for DefaultDriverConfigLoader diff --git a/core/pom.xml b/core/pom.xml index e92f9c57318..1ac87d67166 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -180,6 +180,22 @@ false + + + src/test/resources + + project.properties + + true + + + src/test/resources + + project.properties + + false + + maven-jar-plugin @@ -267,6 +283,24 @@ + + maven-dependency-plugin + + + generate-dependency-list + + list + + generate-resources + + + + runtime + true + com.datastax.cassandra,com.datastax.dse + ${project.build.outputDirectory}/com/datastax/dse/driver/internal/deps.txt + + diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/PlatformInfoFinder.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/PlatformInfoFinder.java index c8929933fc9..4813027df74 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/PlatformInfoFinder.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/PlatformInfoFinder.java @@ -81,10 +81,23 @@ private Map> getRuntimeInfo() this.getClass() .getResourceAsStream("/com/datastax/dse/driver/internal/querybuilder/deps.txt")); + Map mapperProcessorDeps = + fetchDependenciesFromFile( + this.getClass() + .getResourceAsStream( + "/com/datastax/dse/driver/internal/mapper/processor/deps.txt")); + + Map mapperRuntimeDeps = + fetchDependenciesFromFile( + this.getClass() + .getResourceAsStream("/com/datastax/dse/driver/internal/mapper/deps.txt")); + Map> runtimeDependencies = new LinkedHashMap<>(); putIfNonEmpty(coreDeps, runtimeDependencies, "core"); putIfNonEmpty(queryBuilderDeps, runtimeDependencies, "query-builder"); + putIfNonEmpty(mapperProcessorDeps, runtimeDependencies, "mapper-processor"); + putIfNonEmpty(mapperRuntimeDeps, runtimeDependencies, "mapper-runtime"); addJavaVersion(runtimeDependencies); return runtimeDependencies; } @@ -215,7 +228,7 @@ static class DependencyFromFile { private final String groupId; private final String artifactId; private final String version; - private boolean optional; + private final boolean optional; DependencyFromFile(String groupId, String artifactId, String version, boolean optional) { this.groupId = groupId; diff --git a/core/src/test/java/com/datastax/dse/driver/internal/DependencyCheckTest.java b/core/src/test/java/com/datastax/dse/driver/internal/DependencyCheckTest.java new file mode 100644 index 00000000000..f12dd8d20f8 --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/DependencyCheckTest.java @@ -0,0 +1,36 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.dse.driver.internal; + +import java.nio.file.Path; +import java.nio.file.Paths; + +public class DependencyCheckTest extends DependencyCheckTestBase { + + @Override + protected Path getDepsTxtPath() { + return Paths.get( + getBaseResourcePathString(), + "target", + "classes", + "com", + "datastax", + "dse", + "driver", + "internal", + "deps.txt"); + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/DependencyCheckTestBase.java b/core/src/test/java/com/datastax/dse/driver/internal/DependencyCheckTestBase.java new file mode 100644 index 00000000000..b80a6118eed --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/DependencyCheckTestBase.java @@ -0,0 +1,53 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.dse.driver.internal; + +import static org.assertj.core.api.Assertions.assertThat; + +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Path; +import java.util.Properties; +import org.junit.Test; + +public abstract class DependencyCheckTestBase { + + private String baseResourcePath; + + protected DependencyCheckTestBase() { + Properties projectProperties = new Properties(); + try (InputStream is = this.getClass().getResourceAsStream("/project.properties")) { + projectProperties.load(is); + baseResourcePath = projectProperties.getProperty("project.basedir"); + } catch (IOException ioe) { + throw new AssertionError( + "Error retrieving \"project.basedir\" value from \"/project.properties\". Please check test resources in this project.", + ioe); + } + assert baseResourcePath != null; + } + + @Test + public void should_generate_deps_txt() { + assertThat(getDepsTxtPath()).exists(); + } + + protected final String getBaseResourcePathString() { + return baseResourcePath; + } + + protected abstract Path getDepsTxtPath(); +} diff --git a/core/src/test/resources/project.properties b/core/src/test/resources/project.properties new file mode 100644 index 00000000000..a977778735e --- /dev/null +++ b/core/src/test/resources/project.properties @@ -0,0 +1,17 @@ +# +# Copyright DataStax, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +project.basedir=${basedir} \ No newline at end of file diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 28584648836..e8d057da50d 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -76,8 +76,31 @@ logback-classic test + + com.datastax.oss + java-driver-core + test + ${project.version} + test-jar + + + + src/test/resources + + project.properties + + true + + + src/test/resources + + project.properties + + false + + maven-compiler-plugin @@ -125,6 +148,24 @@ true + + maven-dependency-plugin + + + generate-dependency-list + + list + + generate-resources + + + + runtime + true + com.datastax.cassandra,com.datastax.dse + ${project.build.outputDirectory}/com/datastax/dse/driver/internal/mapper/processor/deps.txt + + diff --git a/mapper-processor/src/test/java/com/datastax/dse/driver/internal/mapper/processor/DependencyCheckTest.java b/mapper-processor/src/test/java/com/datastax/dse/driver/internal/mapper/processor/DependencyCheckTest.java new file mode 100644 index 00000000000..ee9223affe3 --- /dev/null +++ b/mapper-processor/src/test/java/com/datastax/dse/driver/internal/mapper/processor/DependencyCheckTest.java @@ -0,0 +1,39 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.dse.driver.internal.mapper.processor; + +import com.datastax.dse.driver.internal.DependencyCheckTestBase; +import java.nio.file.Path; +import java.nio.file.Paths; + +public class DependencyCheckTest extends DependencyCheckTestBase { + + @Override + protected Path getDepsTxtPath() { + return Paths.get( + getBaseResourcePathString(), + "target", + "classes", + "com", + "datastax", + "dse", + "driver", + "internal", + "mapper", + "processor", + "deps.txt"); + } +} diff --git a/mapper-processor/src/test/resources/project.properties b/mapper-processor/src/test/resources/project.properties new file mode 100644 index 00000000000..a977778735e --- /dev/null +++ b/mapper-processor/src/test/resources/project.properties @@ -0,0 +1,17 @@ +# +# Copyright DataStax, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +project.basedir=${basedir} \ No newline at end of file diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index ff5f03a28c5..4a3af3d2e55 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -68,8 +68,36 @@ mockito-core test + + org.assertj + assertj-core + test + + + com.datastax.oss + java-driver-core + test + ${project.version} + test-jar + + + + src/test/resources + + project.properties + + true + + + src/test/resources + + project.properties + + false + + maven-surefire-plugin @@ -128,6 +156,24 @@ + + maven-dependency-plugin + + + generate-dependency-list + + list + + generate-resources + + + + runtime + true + com.datastax.cassandra,com.datastax.dse + ${project.build.outputDirectory}/com/datastax/dse/driver/internal/mapper/deps.txt + + diff --git a/mapper-runtime/src/test/java/com/datastax/dse/driver/api/mapper/DependencyCheckTest.java b/mapper-runtime/src/test/java/com/datastax/dse/driver/api/mapper/DependencyCheckTest.java new file mode 100644 index 00000000000..787d88fea4d --- /dev/null +++ b/mapper-runtime/src/test/java/com/datastax/dse/driver/api/mapper/DependencyCheckTest.java @@ -0,0 +1,38 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.dse.driver.api.mapper; + +import com.datastax.dse.driver.internal.DependencyCheckTestBase; +import java.nio.file.Path; +import java.nio.file.Paths; + +public class DependencyCheckTest extends DependencyCheckTestBase { + + @Override + protected Path getDepsTxtPath() { + return Paths.get( + getBaseResourcePathString(), + "target", + "classes", + "com", + "datastax", + "dse", + "driver", + "internal", + "mapper", + "deps.txt"); + } +} diff --git a/mapper-runtime/src/test/resources/project.properties b/mapper-runtime/src/test/resources/project.properties new file mode 100644 index 00000000000..a977778735e --- /dev/null +++ b/mapper-runtime/src/test/resources/project.properties @@ -0,0 +1,17 @@ +# +# Copyright DataStax, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +project.basedir=${basedir} \ No newline at end of file diff --git a/query-builder/pom.xml b/query-builder/pom.xml index d7e33b736d3..c87f29229dd 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -58,8 +58,31 @@ assertj-core test + + com.datastax.oss + java-driver-core + test + ${project.version} + test-jar + + + + src/test/resources + + project.properties + + true + + + src/test/resources + + project.properties + + false + + org.apache.felix @@ -72,6 +95,24 @@ + + maven-dependency-plugin + + + generate-dependency-list + + list + + generate-resources + + + + runtime + true + com.datastax.cassandra,com.datastax.dse + ${project.build.outputDirectory}/com/datastax/dse/driver/internal/querybuilder/deps.txt + + diff --git a/query-builder/src/test/java/com/datastax/dse/driver/internal/querybuilder/DependencyCheckTest.java b/query-builder/src/test/java/com/datastax/dse/driver/internal/querybuilder/DependencyCheckTest.java new file mode 100644 index 00000000000..12f80e9d645 --- /dev/null +++ b/query-builder/src/test/java/com/datastax/dse/driver/internal/querybuilder/DependencyCheckTest.java @@ -0,0 +1,38 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.dse.driver.internal.querybuilder; + +import com.datastax.dse.driver.internal.DependencyCheckTestBase; +import java.nio.file.Path; +import java.nio.file.Paths; + +public class DependencyCheckTest extends DependencyCheckTestBase { + + @Override + protected Path getDepsTxtPath() { + return Paths.get( + getBaseResourcePathString(), + "target", + "classes", + "com", + "datastax", + "dse", + "driver", + "internal", + "querybuilder", + "deps.txt"); + } +} diff --git a/query-builder/src/test/resources/project.properties b/query-builder/src/test/resources/project.properties new file mode 100644 index 00000000000..a977778735e --- /dev/null +++ b/query-builder/src/test/resources/project.properties @@ -0,0 +1,17 @@ +# +# Copyright DataStax, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +project.basedir=${basedir} \ No newline at end of file From 24757424b70b3e7bd889e94e8d1acf313ba70fec Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 3 Feb 2020 16:22:59 -0800 Subject: [PATCH 283/979] JAVA-2637: Bump Netty to 4.1.45 This required adjustements in a few unit tests, where `waitForPendingAdminTasks` methods relied on the fact that a task scheduled on the event loop with a short delay would be executed after any current immediate task. This doesn't work anymore, possibly after the event loop refactoring in Netty 4.1.44. It was replaced with a combination of Awaitility and timeouts in `Mockito.verify` calls. --- changelog/README.md | 1 + .../core/insights/InsightsClientTest.java | 4 +- .../control/ControlConnectionEventsTest.java | 39 ++-- .../core/control/ControlConnectionTest.java | 191 ++++++++---------- .../control/ControlConnectionTestBase.java | 23 +-- .../core/pool/ChannelPoolInitTest.java | 34 ++-- .../core/pool/ChannelPoolKeyspaceTest.java | 19 +- .../core/pool/ChannelPoolReconnectTest.java | 56 +++-- .../core/pool/ChannelPoolResizeTest.java | 114 +++++------ .../core/pool/ChannelPoolShutdownTest.java | 35 ++-- .../core/pool/ChannelPoolTestBase.java | 23 +-- .../core/session/DefaultSessionPoolsTest.java | 143 +++++-------- pom.xml | 4 +- 13 files changed, 287 insertions(+), 399 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 44806ff49a6..3a66f815849 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.5.0 (in progress) +- [improvement] JAVA-2637: Bump Netty to 4.1.45 - [bug] JAVA-2617: Reinstate generation of deps.txt for Insights - [new feature] JAVA-2625: Provide user-friendly programmatic configuration for kerberos - [improvement] JAVA-2624: Expose a config option for the connect timeout diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/insights/InsightsClientTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/InsightsClientTest.java index e9601a1f26d..7209e81019c 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/insights/InsightsClientTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/InsightsClientTest.java @@ -21,9 +21,9 @@ import static com.datastax.dse.driver.internal.core.insights.PackageUtil.DEFAULT_AUTH_PROVIDER_PACKAGE; import static com.datastax.dse.driver.internal.core.insights.PackageUtil.DEFAULT_LOAD_BALANCING_PACKAGE; import static com.datastax.dse.driver.internal.core.insights.PackageUtil.DEFAULT_SPECULATIVE_EXECUTION_PACKAGE; +import static java.util.concurrent.TimeUnit.SECONDS; import static org.assertj.core.api.Assertions.assertThat; import static org.awaitility.Awaitility.await; -import static org.awaitility.Duration.ONE_SECOND; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; @@ -284,7 +284,7 @@ public void should_schedule_task_with_initial_delay() { InsightsClient.scheduleInsightsTask(100L, Executors.newScheduledThreadPool(1), runnable); // then - await().atMost(ONE_SECOND).until(() -> counter.get() >= 1); + await().atMost(1, SECONDS).until(() -> counter.get() >= 1); } @Test diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/control/ControlConnectionEventsTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/control/ControlConnectionEventsTest.java index 7aaebe73b68..c92cf14de41 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/control/ControlConnectionEventsTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/control/ControlConnectionEventsTest.java @@ -16,6 +16,7 @@ package com.datastax.oss.driver.internal.core.control; import static com.datastax.oss.driver.Assertions.assertThat; +import static org.awaitility.Awaitility.await; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @@ -46,16 +47,19 @@ public void should_register_for_all_events_if_topology_requested() { // When controlConnection.init(true, false, false); - waitForPendingAdminTasks(); - DriverChannelOptions channelOptions = optionsCaptor.getValue(); // Then - assertThat(channelOptions.eventTypes) - .containsExactly( - ProtocolConstants.EventType.SCHEMA_CHANGE, - ProtocolConstants.EventType.STATUS_CHANGE, - ProtocolConstants.EventType.TOPOLOGY_CHANGE); - assertThat(channelOptions.eventCallback).isEqualTo(controlConnection); + await() + .untilAsserted( + () -> { + DriverChannelOptions channelOptions = optionsCaptor.getValue(); + assertThat(channelOptions.eventTypes) + .containsExactly( + ProtocolConstants.EventType.SCHEMA_CHANGE, + ProtocolConstants.EventType.STATUS_CHANGE, + ProtocolConstants.EventType.TOPOLOGY_CHANGE); + assertThat(channelOptions.eventCallback).isEqualTo(controlConnection); + }); } @Test @@ -69,13 +73,16 @@ public void should_register_for_schema_events_only_if_topology_not_requested() { // When controlConnection.init(false, false, false); - waitForPendingAdminTasks(); - DriverChannelOptions channelOptions = optionsCaptor.getValue(); // Then - assertThat(channelOptions.eventTypes) - .containsExactly(ProtocolConstants.EventType.SCHEMA_CHANGE); - assertThat(channelOptions.eventCallback).isEqualTo(controlConnection); + await() + .untilAsserted( + () -> { + DriverChannelOptions channelOptions = optionsCaptor.getValue(); + assertThat(channelOptions.eventTypes) + .containsExactly(ProtocolConstants.EventType.SCHEMA_CHANGE); + assertThat(channelOptions.eventCallback).isEqualTo(controlConnection); + }); } @Test @@ -87,7 +94,7 @@ public void should_process_status_change_events() { when(channelFactory.connect(eq(node1), optionsCaptor.capture())) .thenReturn(CompletableFuture.completedFuture(channel1)); controlConnection.init(true, false, false); - waitForPendingAdminTasks(); + await().until(() -> optionsCaptor.getValue() != null); EventCallback callback = optionsCaptor.getValue().eventCallback; StatusChangeEvent event = new StatusChangeEvent(ProtocolConstants.StatusChangeType.UP, ADDRESS1); @@ -108,7 +115,7 @@ public void should_process_topology_change_events() { when(channelFactory.connect(eq(node1), optionsCaptor.capture())) .thenReturn(CompletableFuture.completedFuture(channel1)); controlConnection.init(true, false, false); - waitForPendingAdminTasks(); + await().until(() -> optionsCaptor.getValue() != null); EventCallback callback = optionsCaptor.getValue().eventCallback; TopologyChangeEvent event = new TopologyChangeEvent(ProtocolConstants.TopologyChangeType.NEW_NODE, ADDRESS1); @@ -129,7 +136,7 @@ public void should_process_schema_change_events() { when(channelFactory.connect(eq(node1), optionsCaptor.capture())) .thenReturn(CompletableFuture.completedFuture(channel1)); controlConnection.init(false, false, false); - waitForPendingAdminTasks(); + await().until(() -> optionsCaptor.getValue() != null); EventCallback callback = optionsCaptor.getValue().eventCallback; SchemaChangeEvent event = new SchemaChangeEvent( diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/control/ControlConnectionTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/control/ControlConnectionTest.java index 845c0435aa4..edaa9e90060 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/control/ControlConnectionTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/control/ControlConnectionTest.java @@ -17,6 +17,7 @@ import static com.datastax.oss.driver.Assertions.assertThat; import static com.datastax.oss.driver.Assertions.assertThatStage; +import static org.awaitility.Awaitility.await; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.never; import static org.mockito.Mockito.verify; @@ -34,6 +35,7 @@ import java.time.Duration; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionStage; +import java.util.concurrent.TimeUnit; import org.junit.Test; import org.junit.runner.RunWith; @@ -59,12 +61,11 @@ public void should_init_with_first_contact_point_if_reachable() { // When CompletionStage initFuture = controlConnection.init(false, false, false); factoryHelper.waitForCall(node1); - waitForPendingAdminTasks(); // Then - assertThatStage(initFuture).isSuccess(); - assertThat(controlConnection.channel()).isEqualTo(channel1); - verify(eventBus).fire(ChannelEvent.channelOpened(node1)); + assertThatStage(initFuture) + .isSuccess(v -> assertThat(controlConnection.channel()).isEqualTo(channel1)); + verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.channelOpened(node1)); factoryHelper.verifyNoMoreCalls(); } @@ -101,13 +102,12 @@ public void should_init_with_second_contact_point_if_first_one_fails() { CompletionStage initFuture = controlConnection.init(false, false, false); factoryHelper.waitForCall(node1); factoryHelper.waitForCall(node2); - waitForPendingAdminTasks(); // Then assertThatStage(initFuture) .isSuccess(v -> assertThat(controlConnection.channel()).isEqualTo(channel2)); - verify(eventBus).fire(ChannelEvent.controlConnectionFailed(node1)); - verify(eventBus).fire(ChannelEvent.channelOpened(node2)); + verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.controlConnectionFailed(node1)); + verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.channelOpened(node2)); // each attempt tries all nodes, so there is no reconnection verify(reconnectionPolicy, never()).newNodeSchedule(any(Node.class)); @@ -127,12 +127,11 @@ public void should_fail_to_init_if_all_contact_points_fail() { CompletionStage initFuture = controlConnection.init(false, false, false); factoryHelper.waitForCall(node1); factoryHelper.waitForCall(node2); - waitForPendingAdminTasks(); // Then assertThatStage(initFuture).isFailed(); - verify(eventBus).fire(ChannelEvent.controlConnectionFailed(node1)); - verify(eventBus).fire(ChannelEvent.controlConnectionFailed(node2)); + verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.controlConnectionFailed(node1)); + verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.controlConnectionFailed(node2)); // no reconnections at init verify(reconnectionPolicy, never()).newNodeSchedule(any(Node.class)); @@ -155,26 +154,23 @@ public void should_reconnect_if_channel_goes_down() throws Exception { CompletionStage initFuture = controlConnection.init(false, false, false); factoryHelper.waitForCall(node1); - waitForPendingAdminTasks(); - assertThatStage(initFuture).isSuccess(); - assertThat(controlConnection.channel()).isEqualTo(channel1); - verify(eventBus).fire(ChannelEvent.channelOpened(node1)); + assertThatStage(initFuture) + .isSuccess(v -> assertThat(controlConnection.channel()).isEqualTo(channel1)); + verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.channelOpened(node1)); // When channel1.close(); - waitForPendingAdminTasks(); // Then // a reconnection was started - verify(reconnectionSchedule).nextDelay(); + verify(reconnectionSchedule, VERIFY_TIMEOUT).nextDelay(); factoryHelper.waitForCall(node1); factoryHelper.waitForCall(node2); - waitForPendingAdminTasks(); - assertThat(controlConnection.channel()).isEqualTo(channel2); - verify(eventBus).fire(ChannelEvent.channelClosed(node1)); - verify(eventBus).fire(ChannelEvent.channelOpened(node2)); - verify(metadataManager).refreshNodes(); - verify(loadBalancingPolicyWrapper).init(); + await().untilAsserted(() -> assertThat(controlConnection.channel()).isEqualTo(channel2)); + verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.channelClosed(node1)); + verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.channelOpened(node2)); + verify(metadataManager, VERIFY_TIMEOUT).refreshNodes(); + verify(loadBalancingPolicyWrapper, VERIFY_TIMEOUT).init(); factoryHelper.verifyNoMoreCalls(); } @@ -194,26 +190,23 @@ public void should_reconnect_if_node_becomes_ignored() { CompletionStage initFuture = controlConnection.init(false, false, false); factoryHelper.waitForCall(node1); - waitForPendingAdminTasks(); - assertThatStage(initFuture).isSuccess(); - assertThat(controlConnection.channel()).isEqualTo(channel1); - verify(eventBus).fire(ChannelEvent.channelOpened(node1)); + assertThatStage(initFuture) + .isSuccess(v -> assertThat(controlConnection.channel()).isEqualTo(channel1)); + verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.channelOpened(node1)); // When mockQueryPlan(node2); eventBus.fire(new DistanceEvent(NodeDistance.IGNORED, node1)); - waitForPendingAdminTasks(); // Then // an immediate reconnection was started - verify(reconnectionSchedule, never()).nextDelay(); factoryHelper.waitForCall(node2); - waitForPendingAdminTasks(); - assertThat(controlConnection.channel()).isEqualTo(channel2); - verify(eventBus).fire(ChannelEvent.channelClosed(node1)); - verify(eventBus).fire(ChannelEvent.channelOpened(node2)); - verify(metadataManager).refreshNodes(); - verify(loadBalancingPolicyWrapper).init(); + await().untilAsserted(() -> assertThat(controlConnection.channel()).isEqualTo(channel2)); + verify(reconnectionSchedule, never()).nextDelay(); + verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.channelClosed(node1)); + verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.channelOpened(node2)); + verify(metadataManager, VERIFY_TIMEOUT).refreshNodes(); + verify(loadBalancingPolicyWrapper, VERIFY_TIMEOUT).init(); factoryHelper.verifyNoMoreCalls(); } @@ -242,26 +235,23 @@ private void should_reconnect_if_event(NodeStateEvent event) { CompletionStage initFuture = controlConnection.init(false, false, false); factoryHelper.waitForCall(node1); - waitForPendingAdminTasks(); - assertThatStage(initFuture).isSuccess(); - assertThat(controlConnection.channel()).isEqualTo(channel1); - verify(eventBus).fire(ChannelEvent.channelOpened(node1)); + assertThatStage(initFuture) + .isSuccess(v -> assertThat(controlConnection.channel()).isEqualTo(channel1)); + verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.channelOpened(node1)); // When mockQueryPlan(node2); eventBus.fire(event); - waitForPendingAdminTasks(); // Then // an immediate reconnection was started - verify(reconnectionSchedule, never()).nextDelay(); factoryHelper.waitForCall(node2); - waitForPendingAdminTasks(); - assertThat(controlConnection.channel()).isEqualTo(channel2); - verify(eventBus).fire(ChannelEvent.channelClosed(node1)); - verify(eventBus).fire(ChannelEvent.channelOpened(node2)); - verify(metadataManager).refreshNodes(); - verify(loadBalancingPolicyWrapper).init(); + await().untilAsserted(() -> assertThat(controlConnection.channel()).isEqualTo(channel2)); + verify(reconnectionSchedule, never()).nextDelay(); + verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.channelClosed(node1)); + verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.channelOpened(node2)); + verify(metadataManager, VERIFY_TIMEOUT).refreshNodes(); + verify(loadBalancingPolicyWrapper, VERIFY_TIMEOUT).init(); factoryHelper.verifyNoMoreCalls(); } @@ -286,17 +276,15 @@ public void should_reconnect_if_node_became_ignored_during_reconnection_attempt( CompletionStage initFuture = controlConnection.init(false, false, false); factoryHelper.waitForCall(node1); - waitForPendingAdminTasks(); - assertThatStage(initFuture).isSuccess(); - assertThat(controlConnection.channel()).isEqualTo(channel1); - verify(eventBus).fire(ChannelEvent.channelOpened(node1)); + assertThatStage(initFuture) + .isSuccess(v -> assertThat(controlConnection.channel()).isEqualTo(channel1)); + verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.channelOpened(node1)); mockQueryPlan(node2, node1); // channel1 goes down, triggering a reconnection channel1.close(); - waitForPendingAdminTasks(); - verify(eventBus).fire(ChannelEvent.channelClosed(node1)); - verify(reconnectionSchedule).nextDelay(); + verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.channelClosed(node1)); + verify(reconnectionSchedule, VERIFY_TIMEOUT).nextDelay(); // the reconnection to node2 is in progress factoryHelper.waitForCall(node2); @@ -305,11 +293,10 @@ public void should_reconnect_if_node_became_ignored_during_reconnection_attempt( eventBus.fire(new DistanceEvent(NodeDistance.IGNORED, node2)); // the reconnection to node2 completes channel2Future.complete(channel2); - waitForPendingAdminTasks(); // Then // The channel should get closed and we should try the next node - verify(channel2).forceClose(); + verify(channel2, VERIFY_TIMEOUT).forceClose(); factoryHelper.waitForCall(node1); } @@ -343,17 +330,15 @@ private void should_reconnect_if_event_during_reconnection_attempt(NodeStateEven CompletionStage initFuture = controlConnection.init(false, false, false); factoryHelper.waitForCall(node1); - waitForPendingAdminTasks(); assertThatStage(initFuture).isSuccess(); - assertThat(controlConnection.channel()).isEqualTo(channel1); - verify(eventBus).fire(ChannelEvent.channelOpened(node1)); + await().untilAsserted(() -> assertThat(controlConnection.channel()).isEqualTo(channel1)); + verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.channelOpened(node1)); mockQueryPlan(node2, node1); // channel1 goes down, triggering a reconnection channel1.close(); - waitForPendingAdminTasks(); - verify(eventBus).fire(ChannelEvent.channelClosed(node1)); - verify(reconnectionSchedule).nextDelay(); + verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.channelClosed(node1)); + verify(reconnectionSchedule, VERIFY_TIMEOUT).nextDelay(); // the reconnection to node2 is in progress factoryHelper.waitForCall(node2); @@ -362,11 +347,10 @@ private void should_reconnect_if_event_during_reconnection_attempt(NodeStateEven eventBus.fire(event); // the reconnection to node2 completes channel2Future.complete(channel2); - waitForPendingAdminTasks(); // Then // The channel should get closed and we should try the next node - verify(channel2).forceClose(); + verify(channel2, VERIFY_TIMEOUT).forceClose(); factoryHelper.waitForCall(node1); } @@ -386,26 +370,23 @@ public void should_force_reconnection_if_pending() { CompletionStage initFuture = controlConnection.init(false, false, false); factoryHelper.waitForCall(node1); - waitForPendingAdminTasks(); - assertThatStage(initFuture).isSuccess(); - assertThat(controlConnection.channel()).isEqualTo(channel1); - verify(eventBus).fire(ChannelEvent.channelOpened(node1)); + assertThatStage(initFuture) + .isSuccess(v -> assertThat(controlConnection.channel()).isEqualTo(channel1)); + verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.channelOpened(node1)); // the channel fails and a reconnection is scheduled for later channel1.close(); - waitForPendingAdminTasks(); - verify(eventBus).fire(ChannelEvent.channelClosed(node1)); - verify(reconnectionSchedule).nextDelay(); + verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.channelClosed(node1)); + verify(reconnectionSchedule, VERIFY_TIMEOUT).nextDelay(); // When controlConnection.reconnectNow(); factoryHelper.waitForCall(node1); factoryHelper.waitForCall(node2); - waitForPendingAdminTasks(); // Then - assertThat(controlConnection.channel()).isEqualTo(channel2); - verify(eventBus).fire(ChannelEvent.channelOpened(node2)); + await().untilAsserted(() -> assertThat(controlConnection.channel()).isEqualTo(channel2)); + verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.channelOpened(node2)); factoryHelper.verifyNoMoreCalls(); } @@ -424,10 +405,9 @@ public void should_force_reconnection_even_if_connected() { CompletionStage initFuture = controlConnection.init(false, false, false); factoryHelper.waitForCall(node1); - waitForPendingAdminTasks(); - assertThatStage(initFuture).isSuccess(); - assertThat(controlConnection.channel()).isEqualTo(channel1); - verify(eventBus).fire(ChannelEvent.channelOpened(node1)); + assertThatStage(initFuture) + .isSuccess(v -> assertThat(controlConnection.channel()).isEqualTo(channel1)); + verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.channelOpened(node1)); // When controlConnection.reconnectNow(); @@ -435,41 +415,39 @@ public void should_force_reconnection_even_if_connected() { // Then factoryHelper.waitForCall(node1); factoryHelper.waitForCall(node2); - waitForPendingAdminTasks(); - assertThat(controlConnection.channel()).isEqualTo(channel2); - verify(channel1).forceClose(); - verify(eventBus).fire(ChannelEvent.channelClosed(node1)); - verify(eventBus).fire(ChannelEvent.channelOpened(node2)); + await().untilAsserted(() -> assertThat(controlConnection.channel()).isEqualTo(channel2)); + verify(channel1, VERIFY_TIMEOUT).forceClose(); + verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.channelClosed(node1)); + verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.channelOpened(node2)); factoryHelper.verifyNoMoreCalls(); } @Test - public void should_not_force_reconnection_if_not_init() { + public void should_not_force_reconnection_if_not_init() throws InterruptedException { // When controlConnection.reconnectNow(); - waitForPendingAdminTasks(); + TimeUnit.MILLISECONDS.sleep(500); // Then verify(reconnectionSchedule, never()).nextDelay(); } @Test - public void should_not_force_reconnection_if_closed() { + public void should_not_force_reconnection_if_closed() throws InterruptedException { // Given DriverChannel channel1 = newMockDriverChannel(1); MockChannelFactoryHelper factoryHelper = MockChannelFactoryHelper.builder(channelFactory).success(node1, channel1).build(); CompletionStage initFuture = controlConnection.init(false, false, false); factoryHelper.waitForCall(node1); - waitForPendingAdminTasks(); assertThatStage(initFuture).isSuccess(); CompletionStage closeFuture = controlConnection.forceCloseAsync(); assertThatStage(closeFuture).isSuccess(); // When controlConnection.reconnectNow(); - waitForPendingAdminTasks(); + TimeUnit.MILLISECONDS.sleep(500); // Then verify(reconnectionSchedule, never()).nextDelay(); @@ -486,16 +464,14 @@ public void should_close_channel_when_closing() { CompletionStage initFuture = controlConnection.init(false, false, false); factoryHelper.waitForCall(node1); - waitForPendingAdminTasks(); assertThatStage(initFuture).isSuccess(); // When CompletionStage closeFuture = controlConnection.forceCloseAsync(); - waitForPendingAdminTasks(); // Then assertThatStage(closeFuture).isSuccess(); - verify(channel1).forceClose(); + verify(channel1, VERIFY_TIMEOUT).forceClose(); factoryHelper.verifyNoMoreCalls(); } @@ -517,29 +493,26 @@ public void should_close_channel_if_closed_during_reconnection() { CompletionStage initFuture = controlConnection.init(false, false, false); factoryHelper.waitForCall(node1); - waitForPendingAdminTasks(); - assertThatStage(initFuture).isSuccess(); - assertThat(controlConnection.channel()).isEqualTo(channel1); - verify(eventBus).fire(ChannelEvent.channelOpened(node1)); + assertThatStage(initFuture) + .isSuccess(v -> assertThat(controlConnection.channel()).isEqualTo(channel1)); + verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.channelOpened(node1)); // the channel fails and a reconnection is scheduled channel1.close(); - waitForPendingAdminTasks(); - verify(eventBus).fire(ChannelEvent.channelClosed(node1)); - verify(reconnectionSchedule).nextDelay(); + verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.channelClosed(node1)); + verify(reconnectionSchedule, VERIFY_TIMEOUT).nextDelay(); factoryHelper.waitForCall(node1); // channel2 starts initializing (but the future is not completed yet) factoryHelper.waitForCall(node2); // When // the control connection gets closed before channel2 initialization is complete - controlConnection.forceCloseAsync(); - waitForPendingAdminTasks(); + CompletionStage closeFuture = controlConnection.forceCloseAsync(); + assertThatStage(closeFuture).isSuccess(); channel2Future.complete(channel2); - waitForPendingAdminTasks(); // Then - verify(channel2).forceClose(); + verify(channel2, VERIFY_TIMEOUT).forceClose(); // no event because the control connection never "owned" the channel verify(eventBus, never()).fire(ChannelEvent.channelOpened(node2)); verify(eventBus, never()).fire(ChannelEvent.channelClosed(node2)); @@ -564,24 +537,22 @@ public void should_handle_channel_failure_if_closed_during_reconnection() { CompletionStage initFuture = controlConnection.init(false, false, false); factoryHelper.waitForCall(node1); - waitForPendingAdminTasks(); - assertThatStage(initFuture).isSuccess(); - assertThat(controlConnection.channel()).isEqualTo(channel1); - verify(eventBus).fire(ChannelEvent.channelOpened(node1)); + assertThatStage(initFuture) + .isSuccess(v -> assertThat(controlConnection.channel()).isEqualTo(channel1)); + verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.channelOpened(node1)); // the channel fails and a reconnection is scheduled channel1.close(); - waitForPendingAdminTasks(); - verify(eventBus).fire(ChannelEvent.channelClosed(node1)); - verify(reconnectionSchedule).nextDelay(); + verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.channelClosed(node1)); + verify(reconnectionSchedule, VERIFY_TIMEOUT).nextDelay(); // channel1 starts initializing (but the future is not completed yet) factoryHelper.waitForCall(node1); // When // the control connection gets closed before channel1 initialization fails - controlConnection.forceCloseAsync(); + CompletionStage closeFuture = controlConnection.forceCloseAsync(); + assertThatStage(closeFuture).isSuccess(); channel1Future.completeExceptionally(new Exception("mock failure")); - waitForPendingAdminTasks(); // Then // should never try channel2 because the reconnection has detected that it can stop after the diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/control/ControlConnectionTestBase.java b/core/src/test/java/com/datastax/oss/driver/internal/core/control/ControlConnectionTestBase.java index 7e0ee752d0e..ca349d135a1 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/control/ControlConnectionTestBase.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/control/ControlConnectionTestBase.java @@ -15,12 +15,12 @@ */ package com.datastax.oss.driver.internal.core.control; -import static org.assertj.core.api.Assertions.fail; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyBoolean; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.timeout; import static org.mockito.Mockito.when; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; @@ -40,29 +40,29 @@ import com.datastax.oss.driver.internal.core.metadata.MetadataManager; import com.datastax.oss.driver.internal.core.metadata.TestNodeFactory; import com.datastax.oss.driver.internal.core.metrics.MetricsFactory; -import com.datastax.oss.driver.shaded.guava.common.util.concurrent.Uninterruptibles; import io.netty.channel.Channel; import io.netty.channel.DefaultChannelPromise; import io.netty.channel.DefaultEventLoopGroup; import io.netty.channel.EventLoop; -import io.netty.util.concurrent.Future; import java.net.InetSocketAddress; import java.time.Duration; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.Exchanger; -import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; import org.junit.After; import org.junit.Before; import org.mockito.Mock; import org.mockito.MockitoAnnotations; +import org.mockito.verification.VerificationWithTimeout; abstract class ControlConnectionTestBase { protected static final InetSocketAddress ADDRESS1 = new InetSocketAddress("127.0.0.1", 9042); protected static final InetSocketAddress ADDRESS2 = new InetSocketAddress("127.0.0.2", 9042); + /** How long we wait when verifying mocks for async invocations */ + protected static final VerificationWithTimeout VERIFY_TIMEOUT = timeout(500); + @Mock protected InternalDriverContext context; @Mock protected DriverConfig config; @Mock protected DriverExecutionProfile defaultProfile; @@ -174,17 +174,4 @@ protected DriverChannel newMockDriverChannel(int id) { .thenReturn(new DefaultEndPoint(new InetSocketAddress("127.0.0." + id, 9042))); return driverChannel; } - - // Wait for all the tasks on the admin executor to complete. - protected void waitForPendingAdminTasks() { - // This works because the event loop group is single-threaded - Future f = adminEventLoopGroup.schedule(() -> null, 5, TimeUnit.NANOSECONDS); - try { - Uninterruptibles.getUninterruptibly(f, 100, TimeUnit.MILLISECONDS); - } catch (ExecutionException e) { - fail("unexpected error", e.getCause()); - } catch (TimeoutException e) { - fail("timed out while waiting for admin tasks to complete", e); - } - } } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/pool/ChannelPoolInitTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/pool/ChannelPoolInitTest.java index 3acfeb3b65d..3ca09a65092 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/pool/ChannelPoolInitTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/pool/ChannelPoolInitTest.java @@ -17,9 +17,9 @@ import static com.datastax.oss.driver.Assertions.assertThat; import static com.datastax.oss.driver.Assertions.assertThatStage; +import static org.awaitility.Awaitility.await; import static org.mockito.Mockito.inOrder; import static org.mockito.Mockito.never; -import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @@ -58,11 +58,10 @@ public void should_initialize_when_all_channels_succeed() throws Exception { ChannelPool.init(node, null, NodeDistance.LOCAL, context, "test"); factoryHelper.waitForCalls(node, 3); - waitForPendingAdminTasks(); assertThatStage(poolFuture) .isSuccess(pool -> assertThat(pool.channels).containsOnly(channel1, channel2, channel3)); - verify(eventBus, times(3)).fire(ChannelEvent.channelOpened(node)); + verify(eventBus, VERIFY_TIMEOUT.times(3)).fire(ChannelEvent.channelOpened(node)); factoryHelper.verifyNoMoreCalls(); } @@ -82,11 +81,10 @@ public void should_initialize_when_all_channels_fail() throws Exception { ChannelPool.init(node, null, NodeDistance.LOCAL, context, "test"); factoryHelper.waitForCalls(node, 3); - waitForPendingAdminTasks(); assertThatStage(poolFuture).isSuccess(pool -> assertThat(pool.channels).isEmpty()); verify(eventBus, never()).fire(ChannelEvent.channelOpened(node)); - verify(nodeMetricUpdater, times(3)) + verify(nodeMetricUpdater, VERIFY_TIMEOUT.times(3)) .incrementCounter(DefaultNodeMetric.CONNECTION_INIT_ERRORS, null); factoryHelper.verifyNoMoreCalls(); @@ -107,12 +105,11 @@ public void should_indicate_when_keyspace_failed_on_all_channels() { ChannelPool.init(node, null, NodeDistance.LOCAL, context, "test"); factoryHelper.waitForCalls(node, 3); - waitForPendingAdminTasks(); assertThatStage(poolFuture) .isSuccess( pool -> { assertThat(pool.isInvalidKeyspace()).isTrue(); - verify(nodeMetricUpdater, times(3)) + verify(nodeMetricUpdater, VERIFY_TIMEOUT.times(3)) .incrementCounter(DefaultNodeMetric.CONNECTION_INIT_ERRORS, null); }); } @@ -133,12 +130,12 @@ public void should_fire_force_down_event_when_cluster_name_does_not_match() thro ChannelPool.init(node, null, NodeDistance.LOCAL, context, "test"); factoryHelper.waitForCalls(node, 3); - waitForPendingAdminTasks(); - verify(eventBus).fire(TopologyEvent.forceDown(node.getBroadcastRpcAddress().get())); + verify(eventBus, VERIFY_TIMEOUT) + .fire(TopologyEvent.forceDown(node.getBroadcastRpcAddress().get())); verify(eventBus, never()).fire(ChannelEvent.channelOpened(node)); - verify(nodeMetricUpdater, times(3)) + verify(nodeMetricUpdater, VERIFY_TIMEOUT.times(3)) .incrementCounter(DefaultNodeMetric.CONNECTION_INIT_ERRORS, null); factoryHelper.verifyNoMoreCalls(); } @@ -167,26 +164,25 @@ public void should_reconnect_when_init_incomplete() throws Exception { ChannelPool.init(node, null, NodeDistance.LOCAL, context, "test"); factoryHelper.waitForCalls(node, 2); - waitForPendingAdminTasks(); assertThatStage(poolFuture).isSuccess(); ChannelPool pool = poolFuture.toCompletableFuture().get(); assertThat(pool.channels).containsOnly(channel1); - inOrder.verify(eventBus).fire(ChannelEvent.channelOpened(node)); + inOrder.verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.channelOpened(node)); // A reconnection should have been scheduled - verify(reconnectionSchedule).nextDelay(); - inOrder.verify(eventBus).fire(ChannelEvent.reconnectionStarted(node)); + verify(reconnectionSchedule, VERIFY_TIMEOUT).nextDelay(); + inOrder.verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.reconnectionStarted(node)); channel2Future.complete(channel2); factoryHelper.waitForCalls(node, 1); - waitForPendingAdminTasks(); - inOrder.verify(eventBus).fire(ChannelEvent.channelOpened(node)); - inOrder.verify(eventBus).fire(ChannelEvent.reconnectionStopped(node)); + inOrder.verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.channelOpened(node)); + inOrder.verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.reconnectionStopped(node)); - assertThat(pool.channels).containsOnly(channel1, channel2); + await().untilAsserted(() -> assertThat(pool.channels).containsOnly(channel1, channel2)); - verify(nodeMetricUpdater).incrementCounter(DefaultNodeMetric.CONNECTION_INIT_ERRORS, null); + verify(nodeMetricUpdater, VERIFY_TIMEOUT) + .incrementCounter(DefaultNodeMetric.CONNECTION_INIT_ERRORS, null); factoryHelper.verifyNoMoreCalls(); } } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/pool/ChannelPoolKeyspaceTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/pool/ChannelPoolKeyspaceTest.java index a5a6e33c821..85631ca5ab6 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/pool/ChannelPoolKeyspaceTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/pool/ChannelPoolKeyspaceTest.java @@ -49,7 +49,6 @@ public void should_switch_keyspace_on_existing_channels() throws Exception { ChannelPool.init(node, null, NodeDistance.LOCAL, context, "test"); factoryHelper.waitForCalls(node, 2); - waitForPendingAdminTasks(); assertThatStage(poolFuture).isSuccess(); ChannelPool pool = poolFuture.toCompletableFuture().get(); @@ -57,10 +56,9 @@ public void should_switch_keyspace_on_existing_channels() throws Exception { CqlIdentifier newKeyspace = CqlIdentifier.fromCql("new_keyspace"); CompletionStage setKeyspaceFuture = pool.setKeyspace(newKeyspace); - waitForPendingAdminTasks(); - verify(channel1).setKeyspace(newKeyspace); - verify(channel2).setKeyspace(newKeyspace); + verify(channel1, VERIFY_TIMEOUT).setKeyspace(newKeyspace); + verify(channel2, VERIFY_TIMEOUT).setKeyspace(newKeyspace); assertThatStage(setKeyspaceFuture).isSuccess(); @@ -91,30 +89,27 @@ public void should_switch_keyspace_on_pending_channels() throws Exception { ChannelPool.init(node, null, NodeDistance.LOCAL, context, "test"); factoryHelper.waitForCalls(node, 2); - waitForPendingAdminTasks(); assertThatStage(poolFuture).isSuccess(); ChannelPool pool = poolFuture.toCompletableFuture().get(); // Check that reconnection has kicked in, but do not complete it yet - verify(reconnectionSchedule).nextDelay(); - verify(eventBus).fire(ChannelEvent.reconnectionStarted(node)); + verify(reconnectionSchedule, VERIFY_TIMEOUT).nextDelay(); + verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.reconnectionStarted(node)); factoryHelper.waitForCalls(node, 2); // Switch keyspace, it succeeds immediately since there is no active channel CqlIdentifier newKeyspace = CqlIdentifier.fromCql("new_keyspace"); CompletionStage setKeyspaceFuture = pool.setKeyspace(newKeyspace); - waitForPendingAdminTasks(); assertThatStage(setKeyspaceFuture).isSuccess(); // Now let the two channels succeed to complete the reconnection channel1Future.complete(channel1); channel2Future.complete(channel2); - waitForPendingAdminTasks(); - verify(eventBus).fire(ChannelEvent.reconnectionStopped(node)); - verify(channel1).setKeyspace(newKeyspace); - verify(channel2).setKeyspace(newKeyspace); + verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.reconnectionStopped(node)); + verify(channel1, VERIFY_TIMEOUT).setKeyspace(newKeyspace); + verify(channel2, VERIFY_TIMEOUT).setKeyspace(newKeyspace); factoryHelper.verifyNoMoreCalls(); } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/pool/ChannelPoolReconnectTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/pool/ChannelPoolReconnectTest.java index a932bfb4bea..f6b811ddbe7 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/pool/ChannelPoolReconnectTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/pool/ChannelPoolReconnectTest.java @@ -17,10 +17,10 @@ import static com.datastax.oss.driver.Assertions.assertThat; import static com.datastax.oss.driver.Assertions.assertThatStage; +import static org.awaitility.Awaitility.await; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.inOrder; import static org.mockito.Mockito.never; -import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @@ -34,6 +34,7 @@ import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionStage; import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; import org.junit.Test; import org.mockito.InOrder; @@ -63,29 +64,26 @@ public void should_reconnect_when_channel_closes() throws Exception { ChannelPool.init(node, null, NodeDistance.LOCAL, context, "test"); factoryHelper.waitForCalls(node, 2); - waitForPendingAdminTasks(); assertThatStage(poolFuture).isSuccess(); ChannelPool pool = poolFuture.toCompletableFuture().get(); assertThat(pool.channels).containsOnly(channel1, channel2); - inOrder.verify(eventBus, times(2)).fire(ChannelEvent.channelOpened(node)); + inOrder.verify(eventBus, VERIFY_TIMEOUT.times(2)).fire(ChannelEvent.channelOpened(node)); // Simulate fatal error on channel2 ((ChannelPromise) channel2.closeFuture()) .setFailure(new Exception("mock channel init failure")); - waitForPendingAdminTasks(); - inOrder.verify(eventBus).fire(ChannelEvent.channelClosed(node)); + inOrder.verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.channelClosed(node)); - verify(reconnectionSchedule).nextDelay(); - inOrder.verify(eventBus).fire(ChannelEvent.reconnectionStarted(node)); + verify(reconnectionSchedule, VERIFY_TIMEOUT).nextDelay(); + inOrder.verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.reconnectionStarted(node)); factoryHelper.waitForCall(node); channel3Future.complete(channel3); - waitForPendingAdminTasks(); - inOrder.verify(eventBus).fire(ChannelEvent.channelOpened(node)); - verify(eventBus).fire(ChannelEvent.reconnectionStopped(node)); + inOrder.verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.channelOpened(node)); + verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.reconnectionStopped(node)); - assertThat(pool.channels).containsOnly(channel1, channel3); + await().untilAsserted(() -> assertThat(pool.channels).containsOnly(channel1, channel3)); factoryHelper.verifyNoMoreCalls(); } @@ -114,28 +112,25 @@ public void should_reconnect_when_channel_starts_graceful_shutdown() throws Exce ChannelPool.init(node, null, NodeDistance.LOCAL, context, "test"); factoryHelper.waitForCalls(node, 2); - waitForPendingAdminTasks(); assertThatStage(poolFuture).isSuccess(); ChannelPool pool = poolFuture.toCompletableFuture().get(); assertThat(pool.channels).containsOnly(channel1, channel2); - inOrder.verify(eventBus, times(2)).fire(ChannelEvent.channelOpened(node)); + inOrder.verify(eventBus, VERIFY_TIMEOUT.times(2)).fire(ChannelEvent.channelOpened(node)); // Simulate graceful shutdown on channel2 ((ChannelPromise) channel2.closeStartedFuture()).setSuccess(); - waitForPendingAdminTasks(); - inOrder.verify(eventBus).fire(ChannelEvent.channelClosed(node)); + inOrder.verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.channelClosed(node)); - verify(reconnectionSchedule).nextDelay(); - inOrder.verify(eventBus).fire(ChannelEvent.reconnectionStarted(node)); + verify(reconnectionSchedule, VERIFY_TIMEOUT).nextDelay(); + inOrder.verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.reconnectionStarted(node)); factoryHelper.waitForCall(node); channel3Future.complete(channel3); - waitForPendingAdminTasks(); - inOrder.verify(eventBus).fire(ChannelEvent.channelOpened(node)); - verify(eventBus).fire(ChannelEvent.reconnectionStopped(node)); + inOrder.verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.channelOpened(node)); + verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.reconnectionStopped(node)); - assertThat(pool.channels).containsOnly(channel1, channel3); + await().untilAsserted(() -> assertThat(pool.channels).containsOnly(channel1, channel3)); factoryHelper.verifyNoMoreCalls(); } @@ -164,33 +159,30 @@ public void should_let_current_attempt_complete_when_reconnecting_now() CompletionStage poolFuture = ChannelPool.init(node, null, NodeDistance.LOCAL, context, "test"); factoryHelper.waitForCalls(node, 1); - waitForPendingAdminTasks(); assertThatStage(poolFuture).isSuccess(); ChannelPool pool = poolFuture.toCompletableFuture().get(); - inOrder.verify(eventBus, times(1)).fire(ChannelEvent.channelOpened(node)); + inOrder.verify(eventBus, VERIFY_TIMEOUT.times(1)).fire(ChannelEvent.channelOpened(node)); // Kill channel1, reconnection begins and starts initializing channel2, but the initialization // is still pending (channel2Future not completed) ((ChannelPromise) channel1.closeStartedFuture()).setSuccess(); - waitForPendingAdminTasks(); - inOrder.verify(eventBus).fire(ChannelEvent.channelClosed(node)); - inOrder.verify(eventBus).fire(ChannelEvent.reconnectionStarted(node)); - verify(reconnectionSchedule).nextDelay(); + inOrder.verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.channelClosed(node)); + inOrder.verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.reconnectionStarted(node)); + verify(reconnectionSchedule, VERIFY_TIMEOUT).nextDelay(); factoryHelper.waitForCalls(node, 1); // Force a reconnection, should not try to create a new channel since we have a pending one pool.reconnectNow(); - waitForPendingAdminTasks(); + TimeUnit.MILLISECONDS.sleep(200); factoryHelper.verifyNoMoreCalls(); inOrder.verify(eventBus, never()).fire(any()); // Complete the initialization of channel2, reconnection succeeds channel2Future.complete(channel2); - waitForPendingAdminTasks(); - inOrder.verify(eventBus).fire(ChannelEvent.channelOpened(node)); - verify(eventBus).fire(ChannelEvent.reconnectionStopped(node)); + inOrder.verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.channelOpened(node)); + verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.reconnectionStopped(node)); - assertThat(pool.channels).containsOnly(channel2); + await().untilAsserted(() -> assertThat(pool.channels).containsOnly(channel2)); factoryHelper.verifyNoMoreCalls(); } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/pool/ChannelPoolResizeTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/pool/ChannelPoolResizeTest.java index 57e5cf145eb..da1c5e3d2a2 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/pool/ChannelPoolResizeTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/pool/ChannelPoolResizeTest.java @@ -17,9 +17,9 @@ import static com.datastax.oss.driver.Assertions.assertThat; import static com.datastax.oss.driver.Assertions.assertThatStage; +import static org.awaitility.Awaitility.await; import static org.mockito.Mockito.inOrder; import static org.mockito.Mockito.never; -import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @@ -32,6 +32,7 @@ import java.time.Duration; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionStage; +import java.util.concurrent.TimeUnit; import org.junit.Test; import org.mockito.InOrder; @@ -59,19 +60,17 @@ public void should_shrink_outside_of_reconnection() throws Exception { ChannelPool.init(node, null, NodeDistance.REMOTE, context, "test"); factoryHelper.waitForCalls(node, 4); - waitForPendingAdminTasks(); assertThatStage(poolFuture).isSuccess(); ChannelPool pool = poolFuture.toCompletableFuture().get(); assertThat(pool.channels).containsOnly(channel1, channel2, channel3, channel4); - inOrder.verify(eventBus, times(4)).fire(ChannelEvent.channelOpened(node)); + inOrder.verify(eventBus, VERIFY_TIMEOUT.times(4)).fire(ChannelEvent.channelOpened(node)); pool.resize(NodeDistance.LOCAL); - waitForPendingAdminTasks(); - inOrder.verify(eventBus, times(2)).fire(ChannelEvent.channelClosed(node)); + inOrder.verify(eventBus, VERIFY_TIMEOUT.times(2)).fire(ChannelEvent.channelClosed(node)); - assertThat(pool.channels).containsOnly(channel3, channel4); + await().untilAsserted(() -> assertThat(pool.channels).containsOnly(channel3, channel4)); factoryHelper.verifyNoMoreCalls(); } @@ -106,9 +105,8 @@ public void should_shrink_during_reconnection() throws Exception { ChannelPool.init(node, null, NodeDistance.REMOTE, context, "test"); factoryHelper.waitForCalls(node, 4); - waitForPendingAdminTasks(); - inOrder.verify(eventBus, times(2)).fire(ChannelEvent.channelOpened(node)); + inOrder.verify(eventBus, VERIFY_TIMEOUT.times(2)).fire(ChannelEvent.channelOpened(node)); assertThatStage(poolFuture).isSuccess(); ChannelPool pool = poolFuture.toCompletableFuture().get(); assertThat(pool.channels).containsOnly(channel1, channel2); @@ -119,20 +117,19 @@ public void should_shrink_during_reconnection() throws Exception { pool.resize(NodeDistance.LOCAL); - waitForPendingAdminTasks(); + TimeUnit.MILLISECONDS.sleep(200); // Now allow the reconnected channels to complete initialization channel3Future.complete(channel3); channel4Future.complete(channel4); factoryHelper.waitForCalls(node, 2); - waitForPendingAdminTasks(); // Pool should have shrinked back to 2. We keep the most recent channels so 1 and 2 get closed. - inOrder.verify(eventBus, times(2)).fire(ChannelEvent.channelOpened(node)); - inOrder.verify(eventBus, times(2)).fire(ChannelEvent.channelClosed(node)); - inOrder.verify(eventBus).fire(ChannelEvent.reconnectionStopped(node)); - assertThat(pool.channels).containsOnly(channel3, channel4); + inOrder.verify(eventBus, VERIFY_TIMEOUT.times(2)).fire(ChannelEvent.channelOpened(node)); + inOrder.verify(eventBus, VERIFY_TIMEOUT.times(2)).fire(ChannelEvent.channelClosed(node)); + inOrder.verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.reconnectionStopped(node)); + await().untilAsserted(() -> assertThat(pool.channels).containsOnly(channel3, channel4)); factoryHelper.verifyNoMoreCalls(); } @@ -163,26 +160,25 @@ public void should_grow_outside_of_reconnection() throws Exception { ChannelPool.init(node, null, NodeDistance.LOCAL, context, "test"); factoryHelper.waitForCalls(node, 2); - waitForPendingAdminTasks(); - inOrder.verify(eventBus, times(2)).fire(ChannelEvent.channelOpened(node)); + inOrder.verify(eventBus, VERIFY_TIMEOUT.times(2)).fire(ChannelEvent.channelOpened(node)); assertThatStage(poolFuture).isSuccess(); ChannelPool pool = poolFuture.toCompletableFuture().get(); assertThat(pool.channels).containsOnly(channel1, channel2); pool.resize(NodeDistance.REMOTE); - waitForPendingAdminTasks(); // The resizing should have triggered a reconnection - verify(reconnectionSchedule).nextDelay(); - inOrder.verify(eventBus).fire(ChannelEvent.reconnectionStarted(node)); + verify(reconnectionSchedule, VERIFY_TIMEOUT).nextDelay(); + inOrder.verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.reconnectionStarted(node)); factoryHelper.waitForCalls(node, 2); - waitForPendingAdminTasks(); - inOrder.verify(eventBus, times(2)).fire(ChannelEvent.channelOpened(node)); - inOrder.verify(eventBus).fire(ChannelEvent.reconnectionStopped(node)); + inOrder.verify(eventBus, VERIFY_TIMEOUT.times(2)).fire(ChannelEvent.channelOpened(node)); + inOrder.verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.reconnectionStopped(node)); - assertThat(pool.channels).containsOnly(channel1, channel2, channel3, channel4); + await() + .untilAsserted( + () -> assertThat(pool.channels).containsOnly(channel1, channel2, channel3, channel4)); factoryHelper.verifyNoMoreCalls(); } @@ -218,31 +214,29 @@ public void should_grow_during_reconnection() throws Exception { ChannelPool.init(node, null, NodeDistance.LOCAL, context, "test"); factoryHelper.waitForCalls(node, 2); - waitForPendingAdminTasks(); - inOrder.verify(eventBus).fire(ChannelEvent.channelOpened(node)); + inOrder.verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.channelOpened(node)); assertThatStage(poolFuture).isSuccess(); ChannelPool pool = poolFuture.toCompletableFuture().get(); assertThat(pool.channels).containsOnly(channel1); // A reconnection should have been scheduled to add the missing channel, don't complete yet - verify(reconnectionSchedule).nextDelay(); - inOrder.verify(eventBus).fire(ChannelEvent.reconnectionStarted(node)); + verify(reconnectionSchedule, VERIFY_TIMEOUT).nextDelay(); + inOrder.verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.reconnectionStarted(node)); pool.resize(NodeDistance.REMOTE); - waitForPendingAdminTasks(); + TimeUnit.MILLISECONDS.sleep(200); // Complete the channel for the first reconnection, bringing the count to 2 channel2Future.complete(channel2); factoryHelper.waitForCall(node); - waitForPendingAdminTasks(); - inOrder.verify(eventBus).fire(ChannelEvent.channelOpened(node)); + inOrder.verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.channelOpened(node)); - assertThat(pool.channels).containsOnly(channel1, channel2); + await().untilAsserted(() -> assertThat(pool.channels).containsOnly(channel1, channel2)); // A second attempt should have been scheduled since we're now still under the target size - verify(reconnectionSchedule, times(2)).nextDelay(); + verify(reconnectionSchedule, VERIFY_TIMEOUT.times(2)).nextDelay(); // Same reconnection is still running, no additional events inOrder.verify(eventBus, never()).fire(ChannelEvent.reconnectionStopped(node)); inOrder.verify(eventBus, never()).fire(ChannelEvent.reconnectionStarted(node)); @@ -251,11 +245,12 @@ public void should_grow_during_reconnection() throws Exception { factoryHelper.waitForCalls(node, 2); channel3Future.complete(channel3); channel4Future.complete(channel4); - waitForPendingAdminTasks(); - inOrder.verify(eventBus, times(2)).fire(ChannelEvent.channelOpened(node)); - inOrder.verify(eventBus).fire(ChannelEvent.reconnectionStopped(node)); + inOrder.verify(eventBus, VERIFY_TIMEOUT.times(2)).fire(ChannelEvent.channelOpened(node)); + inOrder.verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.reconnectionStopped(node)); - assertThat(pool.channels).containsOnly(channel1, channel2, channel3, channel4); + await() + .untilAsserted( + () -> assertThat(pool.channels).containsOnly(channel1, channel2, channel3, channel4)); factoryHelper.verifyNoMoreCalls(); } @@ -285,8 +280,7 @@ public void should_resize_outside_of_reconnection_if_config_changes() throws Exc ChannelPool.init(node, null, NodeDistance.LOCAL, context, "test"); factoryHelper.waitForCalls(node, 2); - waitForPendingAdminTasks(); - inOrder.verify(eventBus, times(2)).fire(ChannelEvent.channelOpened(node)); + inOrder.verify(eventBus, VERIFY_TIMEOUT.times(2)).fire(ChannelEvent.channelOpened(node)); assertThatStage(poolFuture).isSuccess(); ChannelPool pool = poolFuture.toCompletableFuture().get(); @@ -295,18 +289,18 @@ public void should_resize_outside_of_reconnection_if_config_changes() throws Exc // Simulate a configuration change when(defaultProfile.getInt(DefaultDriverOption.CONNECTION_POOL_LOCAL_SIZE)).thenReturn(4); eventBus.fire(ConfigChangeEvent.INSTANCE); - waitForPendingAdminTasks(); // It should have triggered a reconnection - verify(reconnectionSchedule).nextDelay(); - inOrder.verify(eventBus).fire(ChannelEvent.reconnectionStarted(node)); + verify(reconnectionSchedule, VERIFY_TIMEOUT).nextDelay(); + inOrder.verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.reconnectionStarted(node)); factoryHelper.waitForCalls(node, 2); - waitForPendingAdminTasks(); - inOrder.verify(eventBus, times(2)).fire(ChannelEvent.channelOpened(node)); - inOrder.verify(eventBus).fire(ChannelEvent.reconnectionStopped(node)); + inOrder.verify(eventBus, VERIFY_TIMEOUT.times(2)).fire(ChannelEvent.channelOpened(node)); + inOrder.verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.reconnectionStopped(node)); - assertThat(pool.channels).containsOnly(channel1, channel2, channel3, channel4); + await() + .untilAsserted( + () -> assertThat(pool.channels).containsOnly(channel1, channel2, channel3, channel4)); factoryHelper.verifyNoMoreCalls(); } @@ -341,32 +335,30 @@ public void should_resize_during_reconnection_if_config_changes() throws Excepti ChannelPool.init(node, null, NodeDistance.LOCAL, context, "test"); factoryHelper.waitForCalls(node, 2); - waitForPendingAdminTasks(); - inOrder.verify(eventBus).fire(ChannelEvent.channelOpened(node)); + inOrder.verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.channelOpened(node)); assertThatStage(poolFuture).isSuccess(); ChannelPool pool = poolFuture.toCompletableFuture().get(); assertThat(pool.channels).containsOnly(channel1); // A reconnection should have been scheduled to add the missing channel, don't complete yet - verify(reconnectionSchedule).nextDelay(); - inOrder.verify(eventBus).fire(ChannelEvent.reconnectionStarted(node)); + verify(reconnectionSchedule, VERIFY_TIMEOUT).nextDelay(); + inOrder.verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.reconnectionStarted(node)); // Simulate a configuration change when(defaultProfile.getInt(DefaultDriverOption.CONNECTION_POOL_LOCAL_SIZE)).thenReturn(4); eventBus.fire(ConfigChangeEvent.INSTANCE); - waitForPendingAdminTasks(); + TimeUnit.MILLISECONDS.sleep(200); // Complete the channel for the first reconnection, bringing the count to 2 channel2Future.complete(channel2); factoryHelper.waitForCall(node); - waitForPendingAdminTasks(); - inOrder.verify(eventBus).fire(ChannelEvent.channelOpened(node)); + inOrder.verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.channelOpened(node)); - assertThat(pool.channels).containsOnly(channel1, channel2); + await().untilAsserted(() -> assertThat(pool.channels).containsOnly(channel1, channel2)); // A second attempt should have been scheduled since we're now still under the target size - verify(reconnectionSchedule, times(2)).nextDelay(); + verify(reconnectionSchedule, VERIFY_TIMEOUT.times(2)).nextDelay(); // Same reconnection is still running, no additional events inOrder.verify(eventBus, never()).fire(ChannelEvent.reconnectionStopped(node)); inOrder.verify(eventBus, never()).fire(ChannelEvent.reconnectionStarted(node)); @@ -375,11 +367,12 @@ public void should_resize_during_reconnection_if_config_changes() throws Excepti factoryHelper.waitForCalls(node, 2); channel3Future.complete(channel3); channel4Future.complete(channel4); - waitForPendingAdminTasks(); - inOrder.verify(eventBus, times(2)).fire(ChannelEvent.channelOpened(node)); - inOrder.verify(eventBus).fire(ChannelEvent.reconnectionStopped(node)); + inOrder.verify(eventBus, VERIFY_TIMEOUT.times(2)).fire(ChannelEvent.channelOpened(node)); + inOrder.verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.reconnectionStopped(node)); - assertThat(pool.channels).containsOnly(channel1, channel2, channel3, channel4); + await() + .untilAsserted( + () -> assertThat(pool.channels).containsOnly(channel1, channel2, channel3, channel4)); factoryHelper.verifyNoMoreCalls(); } @@ -403,8 +396,7 @@ public void should_ignore_config_change_if_not_relevant() throws Exception { ChannelPool.init(node, null, NodeDistance.LOCAL, context, "test"); factoryHelper.waitForCalls(node, 2); - waitForPendingAdminTasks(); - inOrder.verify(eventBus, times(2)).fire(ChannelEvent.channelOpened(node)); + inOrder.verify(eventBus, VERIFY_TIMEOUT.times(2)).fire(ChannelEvent.channelOpened(node)); assertThatStage(poolFuture).isSuccess(); ChannelPool pool = poolFuture.toCompletableFuture().get(); @@ -413,7 +405,7 @@ public void should_ignore_config_change_if_not_relevant() throws Exception { // Config changes, but not for our distance when(defaultProfile.getInt(DefaultDriverOption.CONNECTION_POOL_REMOTE_SIZE)).thenReturn(1); eventBus.fire(ConfigChangeEvent.INSTANCE); - waitForPendingAdminTasks(); + TimeUnit.MILLISECONDS.sleep(200); // It should not have triggered a reconnection verify(reconnectionSchedule, never()).nextDelay(); diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/pool/ChannelPoolShutdownTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/pool/ChannelPoolShutdownTest.java index 3efb2147247..b6249ac2554 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/pool/ChannelPoolShutdownTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/pool/ChannelPoolShutdownTest.java @@ -18,7 +18,6 @@ import static com.datastax.oss.driver.Assertions.assertThatStage; import static org.mockito.Mockito.inOrder; import static org.mockito.Mockito.never; -import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @@ -62,37 +61,33 @@ public void should_close_all_channels_when_closed() throws Exception { ChannelPool.init(node, null, NodeDistance.LOCAL, context, "test"); factoryHelper.waitForCalls(node, 3); - waitForPendingAdminTasks(); - inOrder.verify(eventBus, times(3)).fire(ChannelEvent.channelOpened(node)); + inOrder.verify(eventBus, VERIFY_TIMEOUT.times(3)).fire(ChannelEvent.channelOpened(node)); assertThatStage(poolFuture).isSuccess(); ChannelPool pool = poolFuture.toCompletableFuture().get(); // Simulate graceful shutdown on channel3 ((ChannelPromise) channel3.closeStartedFuture()).setSuccess(); - waitForPendingAdminTasks(); - inOrder.verify(eventBus, times(1)).fire(ChannelEvent.channelClosed(node)); + inOrder.verify(eventBus, VERIFY_TIMEOUT.times(1)).fire(ChannelEvent.channelClosed(node)); // Reconnection should have kicked in and started to open channel4, do not complete it yet verify(reconnectionSchedule).nextDelay(); factoryHelper.waitForCalls(node, 1); CompletionStage closeFuture = pool.closeAsync(); - waitForPendingAdminTasks(); // The two original channels were closed normally - verify(channel1).close(); - verify(channel2).close(); - inOrder.verify(eventBus, times(2)).fire(ChannelEvent.channelClosed(node)); + verify(channel1, VERIFY_TIMEOUT).close(); + verify(channel2, VERIFY_TIMEOUT).close(); + inOrder.verify(eventBus, VERIFY_TIMEOUT.times(2)).fire(ChannelEvent.channelClosed(node)); // The closing channel was not closed again verify(channel3, never()).close(); // Complete the reconnecting channel channel4Future.complete(channel4); - waitForPendingAdminTasks(); // It should be force-closed once we find out the pool was closed - verify(channel4).forceClose(); + verify(channel4, VERIFY_TIMEOUT).forceClose(); // No events because the channel was never really associated to the pool inOrder.verify(eventBus, never()).fire(ChannelEvent.channelOpened(node)); inOrder.verify(eventBus, never()).fire(ChannelEvent.channelClosed(node)); @@ -133,37 +128,33 @@ public void should_force_close_all_channels_when_force_closed() throws Exception ChannelPool.init(node, null, NodeDistance.LOCAL, context, "test"); factoryHelper.waitForCalls(node, 3); - waitForPendingAdminTasks(); assertThatStage(poolFuture).isSuccess(); ChannelPool pool = poolFuture.toCompletableFuture().get(); - inOrder.verify(eventBus, times(3)).fire(ChannelEvent.channelOpened(node)); + inOrder.verify(eventBus, VERIFY_TIMEOUT.times(3)).fire(ChannelEvent.channelOpened(node)); // Simulate graceful shutdown on channel3 ((ChannelPromise) channel3.closeStartedFuture()).setSuccess(); - waitForPendingAdminTasks(); - inOrder.verify(eventBus, times(1)).fire(ChannelEvent.channelClosed(node)); + inOrder.verify(eventBus, VERIFY_TIMEOUT.times(1)).fire(ChannelEvent.channelClosed(node)); // Reconnection should have kicked in and started to open a channel, do not complete it yet verify(reconnectionSchedule).nextDelay(); factoryHelper.waitForCalls(node, 1); CompletionStage closeFuture = pool.forceCloseAsync(); - waitForPendingAdminTasks(); // The three original channels were force-closed - verify(channel1).forceClose(); - verify(channel2).forceClose(); - verify(channel3).forceClose(); + verify(channel1, VERIFY_TIMEOUT).forceClose(); + verify(channel2, VERIFY_TIMEOUT).forceClose(); + verify(channel3, VERIFY_TIMEOUT).forceClose(); // Only two events because the one for channel3 was sent earlier - inOrder.verify(eventBus, times(2)).fire(ChannelEvent.channelClosed(node)); + inOrder.verify(eventBus, VERIFY_TIMEOUT.times(2)).fire(ChannelEvent.channelClosed(node)); // Complete the reconnecting channel channel4Future.complete(channel4); - waitForPendingAdminTasks(); // It should be force-closed once we find out the pool was closed - verify(channel4).forceClose(); + verify(channel4, VERIFY_TIMEOUT).forceClose(); // No events because the channel was never really associated to the pool inOrder.verify(eventBus, never()).fire(ChannelEvent.channelOpened(node)); inOrder.verify(eventBus, never()).fire(ChannelEvent.channelClosed(node)); diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/pool/ChannelPoolTestBase.java b/core/src/test/java/com/datastax/oss/driver/internal/core/pool/ChannelPoolTestBase.java index 16164c950e3..cc18e7d2842 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/pool/ChannelPoolTestBase.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/pool/ChannelPoolTestBase.java @@ -15,10 +15,10 @@ */ package com.datastax.oss.driver.internal.core.pool; -import static org.assertj.core.api.Assertions.fail; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.timeout; import static org.mockito.Mockito.when; import com.datastax.oss.driver.api.core.CqlIdentifier; @@ -35,23 +35,23 @@ import com.datastax.oss.driver.internal.core.metadata.TestNodeFactory; import com.datastax.oss.driver.internal.core.metrics.MetricsFactory; import com.datastax.oss.driver.internal.core.metrics.NodeMetricUpdater; -import com.datastax.oss.driver.shaded.guava.common.util.concurrent.Uninterruptibles; import io.netty.channel.Channel; import io.netty.channel.DefaultChannelPromise; import io.netty.channel.DefaultEventLoopGroup; import io.netty.channel.EventLoop; -import io.netty.util.concurrent.Future; import java.time.Duration; -import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; import org.junit.After; import org.junit.Before; import org.mockito.Mock; import org.mockito.MockitoAnnotations; +import org.mockito.verification.VerificationWithTimeout; abstract class ChannelPoolTestBase { + /** How long we wait when verifying mocks for async invocations */ + protected static final VerificationWithTimeout VERIFY_TIMEOUT = timeout(500); + @Mock protected InternalDriverContext context; @Mock private DriverConfig config; @Mock protected DriverExecutionProfile defaultProfile; @@ -111,17 +111,4 @@ DriverChannel newMockDriverChannel(int id) { when(driverChannel.toString()).thenReturn("channel" + id); return driverChannel; } - - // Wait for all the tasks on the pool's admin executor to complete. - void waitForPendingAdminTasks() { - // This works because the event loop group is single-threaded - Future f = adminEventLoopGroup.schedule(() -> null, 5, TimeUnit.NANOSECONDS); - try { - Uninterruptibles.getUninterruptibly(f, 100, TimeUnit.MILLISECONDS); - } catch (ExecutionException e) { - fail("unexpected error", e.getCause()); - } catch (TimeoutException e) { - fail("timed out while waiting for admin tasks to complete", e); - } - } } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/session/DefaultSessionPoolsTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/session/DefaultSessionPoolsTest.java index 7d2a66fab02..b42f281a02a 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/session/DefaultSessionPoolsTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/session/DefaultSessionPoolsTest.java @@ -17,7 +17,7 @@ import static com.datastax.oss.driver.Assertions.assertThat; import static com.datastax.oss.driver.Assertions.assertThatStage; -import static org.assertj.core.api.Assertions.fail; +import static org.awaitility.Awaitility.await; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; @@ -60,10 +60,8 @@ import com.datastax.oss.driver.internal.core.pool.ChannelPoolFactory; import com.datastax.oss.driver.internal.core.util.concurrent.CompletableFutures; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; -import com.datastax.oss.driver.shaded.guava.common.util.concurrent.Uninterruptibles; import io.netty.channel.DefaultEventLoopGroup; import io.netty.util.concurrent.DefaultPromise; -import io.netty.util.concurrent.Future; import io.netty.util.concurrent.GlobalEventExecutor; import java.time.Duration; import java.util.Collections; @@ -71,17 +69,18 @@ import java.util.UUID; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionStage; -import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; import org.mockito.MockitoAnnotations; +import org.mockito.verification.VerificationWithTimeout; public class DefaultSessionPoolsTest { private static final CqlIdentifier KEYSPACE = CqlIdentifier.fromInternal("ks"); + /** How long we wait when verifying mocks for async invocations */ + protected static final VerificationWithTimeout VERIFY_TIMEOUT = timeout(500); @Mock private InternalDriverContext context; @Mock private NettyOptions nettyOptions; @@ -213,14 +212,12 @@ public void should_initialize_pools_with_distances() { factoryHelper.waitForCall(node1, KEYSPACE, NodeDistance.LOCAL); factoryHelper.waitForCall(node2, KEYSPACE, NodeDistance.LOCAL); factoryHelper.waitForCall(node3, KEYSPACE, NodeDistance.REMOTE); - waitForPendingAdminTasks(); assertThatStage(initFuture).isNotDone(); pool1Future.complete(pool1); pool2Future.complete(pool2); pool3Future.complete(pool3); - waitForPendingAdminTasks(); assertThatStage(initFuture) .isSuccess( @@ -246,7 +243,6 @@ public void should_not_connect_to_ignored_nodes() { factoryHelper.waitForCall(node1, KEYSPACE, NodeDistance.LOCAL); factoryHelper.waitForCall(node3, KEYSPACE, NodeDistance.LOCAL); - waitForPendingAdminTasks(); assertThatStage(initFuture) .isSuccess( session -> @@ -270,7 +266,6 @@ public void should_not_connect_to_forced_down_nodes() { factoryHelper.waitForCall(node1, KEYSPACE, NodeDistance.LOCAL); factoryHelper.waitForCall(node3, KEYSPACE, NodeDistance.LOCAL); - waitForPendingAdminTasks(); assertThatStage(initFuture) .isSuccess( session -> @@ -297,7 +292,6 @@ public void should_adjust_distance_if_changed_while_init() { factoryHelper.waitForCall(node1, KEYSPACE, NodeDistance.LOCAL); factoryHelper.waitForCall(node2, KEYSPACE, NodeDistance.LOCAL); factoryHelper.waitForCall(node3, KEYSPACE, NodeDistance.LOCAL); - waitForPendingAdminTasks(); assertThatStage(initFuture).isNotDone(); @@ -307,9 +301,8 @@ public void should_adjust_distance_if_changed_while_init() { pool1Future.complete(pool1); pool2Future.complete(pool2); pool3Future.complete(pool3); - waitForPendingAdminTasks(); - verify(pool2).resize(NodeDistance.REMOTE); + verify(pool2, VERIFY_TIMEOUT).resize(NodeDistance.REMOTE); assertThatStage(initFuture) .isSuccess( @@ -338,7 +331,6 @@ public void should_remove_pool_if_ignored_while_init() { factoryHelper.waitForCall(node1, KEYSPACE, NodeDistance.LOCAL); factoryHelper.waitForCall(node2, KEYSPACE, NodeDistance.LOCAL); factoryHelper.waitForCall(node3, KEYSPACE, NodeDistance.LOCAL); - waitForPendingAdminTasks(); assertThatStage(initFuture).isNotDone(); @@ -348,9 +340,8 @@ public void should_remove_pool_if_ignored_while_init() { pool1Future.complete(pool1); pool2Future.complete(pool2); pool3Future.complete(pool3); - waitForPendingAdminTasks(); - verify(pool2).closeAsync(); + verify(pool2, VERIFY_TIMEOUT).closeAsync(); assertThatStage(initFuture) .isSuccess( @@ -378,7 +369,6 @@ public void should_remove_pool_if_forced_down_while_init() { factoryHelper.waitForCall(node1, KEYSPACE, NodeDistance.LOCAL); factoryHelper.waitForCall(node2, KEYSPACE, NodeDistance.LOCAL); factoryHelper.waitForCall(node3, KEYSPACE, NodeDistance.LOCAL); - waitForPendingAdminTasks(); assertThatStage(initFuture).isNotDone(); @@ -388,9 +378,8 @@ public void should_remove_pool_if_forced_down_while_init() { pool1Future.complete(pool1); pool2Future.complete(pool2); pool3Future.complete(pool3); - waitForPendingAdminTasks(); - verify(pool2).closeAsync(); + verify(pool2, VERIFY_TIMEOUT).closeAsync(); assertThatStage(initFuture) .isSuccess( @@ -415,7 +404,6 @@ public void should_resize_pool_if_distance_changes() { factoryHelper.waitForCall(node1, KEYSPACE, NodeDistance.LOCAL); factoryHelper.waitForCall(node2, KEYSPACE, NodeDistance.LOCAL); factoryHelper.waitForCall(node3, KEYSPACE, NodeDistance.LOCAL); - waitForPendingAdminTasks(); assertThatStage(initFuture).isSuccess(); eventBus.fire(new DistanceEvent(NodeDistance.REMOTE, node2)); @@ -439,18 +427,20 @@ public void should_remove_pool_if_node_becomes_ignored() { factoryHelper.waitForCall(node1, KEYSPACE, NodeDistance.LOCAL); factoryHelper.waitForCall(node2, KEYSPACE, NodeDistance.LOCAL); factoryHelper.waitForCall(node3, KEYSPACE, NodeDistance.LOCAL); - waitForPendingAdminTasks(); assertThatStage(initFuture).isSuccess(); eventBus.fire(new DistanceEvent(NodeDistance.IGNORED, node2)); verify(pool2, timeout(500)).closeAsync(); Session session = CompletableFutures.getCompleted(initFuture.toCompletableFuture()); - assertThat(((DefaultSession) session).getPools()).containsValues(pool1, pool3); + await() + .untilAsserted( + () -> assertThat(((DefaultSession) session).getPools()).containsValues(pool1, pool3)); } @Test - public void should_do_nothing_if_node_becomes_ignored_but_was_already_ignored() { + public void should_do_nothing_if_node_becomes_ignored_but_was_already_ignored() + throws InterruptedException { ChannelPool pool1 = mockPool(node1); ChannelPool pool2 = mockPool(node2); ChannelPool pool3 = mockPool(node3); @@ -466,7 +456,6 @@ public void should_do_nothing_if_node_becomes_ignored_but_was_already_ignored() factoryHelper.waitForCall(node1, KEYSPACE, NodeDistance.LOCAL); factoryHelper.waitForCall(node2, KEYSPACE, NodeDistance.LOCAL); factoryHelper.waitForCall(node3, KEYSPACE, NodeDistance.LOCAL); - waitForPendingAdminTasks(); assertThatStage(initFuture).isSuccess(); eventBus.fire(new DistanceEvent(NodeDistance.IGNORED, node2)); @@ -477,7 +466,7 @@ public void should_do_nothing_if_node_becomes_ignored_but_was_already_ignored() // Fire the same event again, nothing should happen eventBus.fire(new DistanceEvent(NodeDistance.IGNORED, node2)); - waitForPendingAdminTasks(); + TimeUnit.MILLISECONDS.sleep(200); factoryHelper.verifyNoMoreCalls(); } @@ -501,7 +490,6 @@ public void should_recreate_pool_if_node_becomes_not_ignored() { factoryHelper.waitForCall(node1, KEYSPACE, NodeDistance.LOCAL); factoryHelper.waitForCall(node3, KEYSPACE, NodeDistance.LOCAL); - waitForPendingAdminTasks(); assertThatStage(initFuture).isSuccess(); Session session = CompletableFutures.getCompleted(initFuture.toCompletableFuture()); assertThat(((DefaultSession) session).getPools()).containsValues(pool1, pool3); @@ -509,8 +497,11 @@ public void should_recreate_pool_if_node_becomes_not_ignored() { eventBus.fire(new DistanceEvent(NodeDistance.LOCAL, node2)); factoryHelper.waitForCall(node2, KEYSPACE, NodeDistance.LOCAL); - waitForPendingAdminTasks(); - assertThat(((DefaultSession) session).getPools()).containsValues(pool1, pool2, pool3); + await() + .untilAsserted( + () -> + assertThat(((DefaultSession) session).getPools()) + .containsValues(pool1, pool2, pool3)); } @Test @@ -530,14 +521,15 @@ public void should_remove_pool_if_node_is_forced_down() { factoryHelper.waitForCall(node1, KEYSPACE, NodeDistance.LOCAL); factoryHelper.waitForCall(node2, KEYSPACE, NodeDistance.LOCAL); factoryHelper.waitForCall(node3, KEYSPACE, NodeDistance.LOCAL); - waitForPendingAdminTasks(); assertThatStage(initFuture).isSuccess(); eventBus.fire(NodeStateEvent.changed(NodeState.UP, NodeState.FORCED_DOWN, node2)); verify(pool2, timeout(500)).closeAsync(); Session session = CompletableFutures.getCompleted(initFuture.toCompletableFuture()); - assertThat(((DefaultSession) session).getPools()).containsValues(pool1, pool3); + await() + .untilAsserted( + () -> assertThat(((DefaultSession) session).getPools()).containsValues(pool1, pool3)); } @Test @@ -560,15 +552,17 @@ public void should_recreate_pool_if_node_is_forced_back_up() { factoryHelper.waitForCall(node1, KEYSPACE, NodeDistance.LOCAL); factoryHelper.waitForCall(node3, KEYSPACE, NodeDistance.LOCAL); - waitForPendingAdminTasks(); assertThatStage(initFuture).isSuccess(); Session session = CompletableFutures.getCompleted(initFuture.toCompletableFuture()); assertThat(((DefaultSession) session).getPools()).containsValues(pool1, pool3); eventBus.fire(NodeStateEvent.changed(NodeState.FORCED_DOWN, NodeState.UP, node2)); factoryHelper.waitForCall(node2, KEYSPACE, NodeDistance.LOCAL); - waitForPendingAdminTasks(); - assertThat(((DefaultSession) session).getPools()).containsValues(pool1, pool2, pool3); + await() + .untilAsserted( + () -> + assertThat(((DefaultSession) session).getPools()) + .containsValues(pool1, pool2, pool3)); } @Test @@ -589,15 +583,15 @@ public void should_not_recreate_pool_if_node_is_forced_back_up_but_ignored() { factoryHelper.waitForCall(node1, KEYSPACE, NodeDistance.LOCAL); factoryHelper.waitForCall(node3, KEYSPACE, NodeDistance.LOCAL); - waitForPendingAdminTasks(); assertThatStage(initFuture).isSuccess(); Session session = CompletableFutures.getCompleted(initFuture.toCompletableFuture()); assertThat(((DefaultSession) session).getPools()).containsValues(pool1, pool3); eventBus.fire(NodeStateEvent.changed(NodeState.FORCED_DOWN, NodeState.UP, node2)); - waitForPendingAdminTasks(); + await() + .untilAsserted( + () -> assertThat(((DefaultSession) session).getPools()).containsValues(pool1, pool3)); factoryHelper.verifyNoMoreCalls(); - assertThat(((DefaultSession) session).getPools()).containsValues(pool1, pool3); } @Test @@ -621,7 +615,6 @@ public void should_adjust_distance_if_changed_while_recreating() { factoryHelper.waitForCall(node1, KEYSPACE, NodeDistance.LOCAL); factoryHelper.waitForCall(node3, KEYSPACE, NodeDistance.LOCAL); - waitForPendingAdminTasks(); assertThatStage(initFuture).isSuccess(); Session session = CompletableFutures.getCompleted(initFuture.toCompletableFuture()); assertThat(((DefaultSession) session).getPools()).containsValues(pool1, pool3); @@ -635,12 +628,14 @@ public void should_adjust_distance_if_changed_while_recreating() { // Now pool init succeeds pool2Future.complete(pool2); - waitForPendingAdminTasks(); // Pool should have been adjusted - verify(pool2).resize(NodeDistance.REMOTE); - - assertThat(((DefaultSession) session).getPools()).containsValues(pool1, pool2, pool3); + verify(pool2, VERIFY_TIMEOUT).resize(NodeDistance.REMOTE); + await() + .untilAsserted( + () -> + assertThat(((DefaultSession) session).getPools()) + .containsValues(pool1, pool2, pool3)); } @Test @@ -664,7 +659,6 @@ public void should_remove_pool_if_ignored_while_recreating() { factoryHelper.waitForCall(node1, KEYSPACE, NodeDistance.LOCAL); factoryHelper.waitForCall(node3, KEYSPACE, NodeDistance.LOCAL); - waitForPendingAdminTasks(); assertThatStage(initFuture).isSuccess(); Session session = CompletableFutures.getCompleted(initFuture.toCompletableFuture()); assertThat(((DefaultSession) session).getPools()).containsValues(pool1, pool3); @@ -678,12 +672,13 @@ public void should_remove_pool_if_ignored_while_recreating() { // Now pool init succeeds pool2Future.complete(pool2); - waitForPendingAdminTasks(); // Pool should have been closed - verify(pool2).closeAsync(); + verify(pool2, VERIFY_TIMEOUT).closeAsync(); - assertThat(((DefaultSession) session).getPools()).containsValues(pool1, pool3); + await() + .untilAsserted( + () -> assertThat(((DefaultSession) session).getPools()).containsValues(pool1, pool3)); } @Test @@ -707,7 +702,6 @@ public void should_remove_pool_if_forced_down_while_recreating() { factoryHelper.waitForCall(node1, KEYSPACE, NodeDistance.LOCAL); factoryHelper.waitForCall(node3, KEYSPACE, NodeDistance.LOCAL); - waitForPendingAdminTasks(); assertThatStage(initFuture).isSuccess(); Session session = CompletableFutures.getCompleted(initFuture.toCompletableFuture()); assertThat(((DefaultSession) session).getPools()).containsValues(pool1, pool3); @@ -721,12 +715,12 @@ public void should_remove_pool_if_forced_down_while_recreating() { // Now pool init succeeds pool2Future.complete(pool2); - waitForPendingAdminTasks(); // Pool should have been closed - verify(pool2).closeAsync(); - - assertThat(((DefaultSession) session).getPools()).containsValues(pool1, pool3); + verify(pool2, VERIFY_TIMEOUT).closeAsync(); + await() + .untilAsserted( + () -> assertThat(((DefaultSession) session).getPools()).containsValues(pool1, pool3)); } @Test @@ -746,17 +740,15 @@ public void should_close_all_pools_when_closing() { factoryHelper.waitForCall(node1, KEYSPACE, NodeDistance.LOCAL); factoryHelper.waitForCall(node2, KEYSPACE, NodeDistance.LOCAL); factoryHelper.waitForCall(node3, KEYSPACE, NodeDistance.LOCAL); - waitForPendingAdminTasks(); assertThatStage(initFuture).isSuccess(); Session session = CompletableFutures.getCompleted(initFuture.toCompletableFuture()); CompletionStage closeFuture = session.closeAsync(); - waitForPendingAdminTasks(); assertThatStage(closeFuture).isSuccess(); - verify(pool1).closeAsync(); - verify(pool2).closeAsync(); - verify(pool3).closeAsync(); + verify(pool1, VERIFY_TIMEOUT).closeAsync(); + verify(pool2, VERIFY_TIMEOUT).closeAsync(); + verify(pool3, VERIFY_TIMEOUT).closeAsync(); } @Test @@ -776,17 +768,15 @@ public void should_force_close_all_pools_when_force_closing() { factoryHelper.waitForCall(node1, KEYSPACE, NodeDistance.LOCAL); factoryHelper.waitForCall(node2, KEYSPACE, NodeDistance.LOCAL); factoryHelper.waitForCall(node3, KEYSPACE, NodeDistance.LOCAL); - waitForPendingAdminTasks(); assertThatStage(initFuture).isSuccess(); Session session = CompletableFutures.getCompleted(initFuture.toCompletableFuture()); CompletionStage closeFuture = session.forceCloseAsync(); - waitForPendingAdminTasks(); assertThatStage(closeFuture).isSuccess(); - verify(pool1).forceCloseAsync(); - verify(pool2).forceCloseAsync(); - verify(pool3).forceCloseAsync(); + verify(pool1, VERIFY_TIMEOUT).forceCloseAsync(); + verify(pool2, VERIFY_TIMEOUT).forceCloseAsync(); + verify(pool3, VERIFY_TIMEOUT).forceCloseAsync(); } @Test @@ -810,7 +800,6 @@ public void should_close_pool_if_recreated_while_closing() { factoryHelper.waitForCall(node1, KEYSPACE, NodeDistance.LOCAL); factoryHelper.waitForCall(node3, KEYSPACE, NodeDistance.LOCAL); - waitForPendingAdminTasks(); assertThatStage(initFuture).isSuccess(); Session session = CompletableFutures.getCompleted(initFuture.toCompletableFuture()); assertThat(((DefaultSession) session).getPools()).containsValues(pool1, pool3); @@ -821,15 +810,13 @@ public void should_close_pool_if_recreated_while_closing() { // but the session gets closed before pool init completes CompletionStage closeFuture = session.closeAsync(); - waitForPendingAdminTasks(); assertThatStage(closeFuture).isSuccess(); // now pool init completes pool2Future.complete(pool2); - waitForPendingAdminTasks(); // Pool should have been closed - verify(pool2).forceCloseAsync(); + verify(pool2, VERIFY_TIMEOUT).forceCloseAsync(); } @Test @@ -849,17 +836,15 @@ public void should_set_keyspace_on_all_pools() { factoryHelper.waitForCall(node1, KEYSPACE, NodeDistance.LOCAL); factoryHelper.waitForCall(node2, KEYSPACE, NodeDistance.LOCAL); factoryHelper.waitForCall(node3, KEYSPACE, NodeDistance.LOCAL); - waitForPendingAdminTasks(); assertThatStage(initFuture).isSuccess(); Session session = CompletableFutures.getCompleted(initFuture.toCompletableFuture()); CqlIdentifier newKeyspace = CqlIdentifier.fromInternal("newKeyspace"); ((DefaultSession) session).setKeyspace(newKeyspace); - waitForPendingAdminTasks(); - verify(pool1).setKeyspace(newKeyspace); - verify(pool2).setKeyspace(newKeyspace); - verify(pool3).setKeyspace(newKeyspace); + verify(pool1, VERIFY_TIMEOUT).setKeyspace(newKeyspace); + verify(pool2, VERIFY_TIMEOUT).setKeyspace(newKeyspace); + verify(pool3, VERIFY_TIMEOUT).setKeyspace(newKeyspace); } @Test @@ -883,7 +868,6 @@ public void should_set_keyspace_on_pool_if_recreated_while_switching_keyspace() factoryHelper.waitForCall(node1, KEYSPACE, NodeDistance.LOCAL); factoryHelper.waitForCall(node3, KEYSPACE, NodeDistance.LOCAL); - waitForPendingAdminTasks(); assertThatStage(initFuture).isSuccess(); DefaultSession session = (DefaultSession) CompletableFutures.getCompleted(initFuture.toCompletableFuture()); @@ -896,16 +880,14 @@ public void should_set_keyspace_on_pool_if_recreated_while_switching_keyspace() // Keyspace gets changed on the session in the meantime, node2's pool will miss it CqlIdentifier newKeyspace = CqlIdentifier.fromInternal("newKeyspace"); session.setKeyspace(newKeyspace); - waitForPendingAdminTasks(); - verify(pool1).setKeyspace(newKeyspace); - verify(pool3).setKeyspace(newKeyspace); + verify(pool1, VERIFY_TIMEOUT).setKeyspace(newKeyspace); + verify(pool3, VERIFY_TIMEOUT).setKeyspace(newKeyspace); // now pool init completes pool2Future.complete(pool2); - waitForPendingAdminTasks(); // Pool should have been closed - verify(pool2).setKeyspace(newKeyspace); + verify(pool2, VERIFY_TIMEOUT).setKeyspace(newKeyspace); } private ChannelPool mockPool(Node node) { @@ -945,17 +927,4 @@ private static DefaultNode mockLocalNode(int i) { when(node.toString()).thenReturn("node" + i); return node; } - - // Wait for all the tasks on the pool's admin executor to complete. - private void waitForPendingAdminTasks() { - // This works because the event loop group is single-threaded - Future f = adminEventLoopGroup.schedule(() -> null, 5, TimeUnit.NANOSECONDS); - try { - Uninterruptibles.getUninterruptibly(f, 250, TimeUnit.MILLISECONDS); - } catch (ExecutionException e) { - fail("unexpected error", e.getCause()); - } catch (TimeoutException e) { - fail("timed out while waiting for admin tasks to complete", e); - } - } } diff --git a/pom.xml b/pom.xml index 4d525d61cf8..201088a3617 100644 --- a/pom.xml +++ b/pom.xml @@ -46,7 +46,7 @@ 2.1.11 4.0.5 1.4.9 - 4.1.39.Final + 4.1.45.Final 1.7.26 1.2.1 3.3.3 @@ -72,7 +72,7 @@ 20180130 1.9.12 - 3.1.6 + 4.0.2 2.0.0-M19 2.22.2 false From 8f8e2e766d596d84b459245987ea5c81cd72bf64 Mon Sep 17 00:00:00 2001 From: olim7t Date: Fri, 31 Jan 2020 16:27:26 -0800 Subject: [PATCH 284/979] JAVA-1556: Publish Maven Bill Of Materials POM --- .gitignore | 1 + bom/pom.xml | 109 ++++++++++++++ changelog/README.md | 1 + core-shaded/pom.xml | 11 ++ core/pom.xml | 11 ++ distribution/pom.xml | 11 ++ examples/pom.xml | 11 ++ integration-tests/pom.xml | 13 +- .../driver/osgi/support/BundleOptions.java | 133 +++++++++++++----- mapper-processor/pom.xml | 11 ++ mapper-runtime/pom.xml | 11 ++ pom.xml | 51 ++----- query-builder/pom.xml | 11 ++ test-infra/pom.xml | 11 ++ 14 files changed, 317 insertions(+), 79 deletions(-) create mode 100644 bom/pom.xml diff --git a/.gitignore b/.gitignore index eaf1a9ef8b2..07449882cc0 100644 --- a/.gitignore +++ b/.gitignore @@ -8,6 +8,7 @@ .project .java-version +.flattened-pom.xml .documenter_local_last_run /docs diff --git a/bom/pom.xml b/bom/pom.xml new file mode 100644 index 00000000000..583eaa833cd --- /dev/null +++ b/bom/pom.xml @@ -0,0 +1,109 @@ + + + + 4.0.0 + + com.datastax.oss + java-driver-parent + 4.5.0-SNAPSHOT + + java-driver-bom + pom + DataStax Java driver for Apache Cassandra(R) - Bill Of Materials + + + + com.datastax.oss + java-driver-core + 4.5.0-SNAPSHOT + + + com.datastax.oss + java-driver-core-shaded + 4.5.0-SNAPSHOT + + + com.datastax.oss + java-driver-mapper-processor + 4.5.0-SNAPSHOT + + + com.datastax.oss + java-driver-mapper-runtime + 4.5.0-SNAPSHOT + + + com.datastax.oss + java-driver-query-builder + 4.5.0-SNAPSHOT + + + com.datastax.oss + java-driver-test-infra + 4.5.0-SNAPSHOT + + + com.datastax.oss + native-protocol + 1.4.9 + + + com.datastax.oss + java-driver-shaded-guava + 25.1-jre + + + + + + + org.codehaus.mojo + flatten-maven-plugin + + + flatten + process-resources + + flatten + + + + keep + expand + expand + expand + expand + expand + expand + expand + expand + expand + expand + expand + expand + remove + + true + + + + + + + diff --git a/changelog/README.md b/changelog/README.md index 3a66f815849..8c57c8f6d8b 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.5.0 (in progress) +- [improvement] JAVA-1556: Publish Maven Bill Of Materials POM - [improvement] JAVA-2637: Bump Netty to 4.1.45 - [bug] JAVA-2617: Reinstate generation of deps.txt for Insights - [new feature] JAVA-2625: Provide user-friendly programmatic configuration for kerberos diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index d2b8be79b10..951cd506ca1 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -25,6 +25,17 @@ java-driver-core-shaded DataStax Java driver for Apache Cassandra(R) - core with shaded deps + + + + ${project.groupId} + java-driver-bom + ${project.version} + pom + import + + + + + + + ${project.groupId} + java-driver-bom + ${project.version} + pom + import + + + ${project.groupId} diff --git a/examples/pom.xml b/examples/pom.xml index b48833f98cf..01f024aaa23 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -26,6 +26,17 @@ java-driver-examples DataStax Java driver for Apache Cassandra(R) - examples. A collection of examples to demonstrate DataStax Java Driver for Apache Cassandra(R). + + + + ${project.groupId} + java-driver-bom + ${project.version} + pom + import + + + diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 4abf2e7c8d8..a77352fc139 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -32,6 +32,17 @@ ${skipITs} ${skipITs} + + + + ${project.groupId} + java-driver-bom + ${project.version} + pom + import + + + com.datastax.oss @@ -205,14 +216,12 @@ ${assertj.version} ${config.version} ${commons-exec.version} - ${guava.version} ${hdrhistogram.version} ${jackson.version} ${jackson-databind.version} ${logback.version} ${lz4.version} ${metrics.version} - ${native-protocol.version} ${netty.version} ${simulacron.version} ${slf4j.version} diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/osgi/support/BundleOptions.java b/integration-tests/src/test/java/com/datastax/oss/driver/osgi/support/BundleOptions.java index 22841235861..6777109232b 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/osgi/support/BundleOptions.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/osgi/support/BundleOptions.java @@ -21,6 +21,14 @@ import static org.ops4j.pax.exam.CoreOptions.options; import static org.ops4j.pax.exam.CoreOptions.systemProperty; +import com.datastax.oss.driver.shaded.guava.common.base.Charsets; +import com.datastax.oss.driver.shaded.guava.common.base.Splitter; +import com.google.common.io.CharSource; +import com.google.common.io.Files; +import java.io.File; +import java.io.IOException; +import java.io.UncheckedIOException; +import java.util.List; import org.ops4j.pax.exam.CoreOptions; import org.ops4j.pax.exam.options.CompositeOption; import org.ops4j.pax.exam.options.MavenArtifactProvisionOption; @@ -40,13 +48,23 @@ public static CompositeOption baseOptions() { return () -> options( mavenBundle( - "com.datastax.oss", "java-driver-shaded-guava", getVersion("guava.version")), - mavenBundle("io.dropwizard.metrics", "metrics-core", getVersion("metrics.version")), - mavenBundle("org.slf4j", "slf4j-api", getVersion("slf4j.version")), - mavenBundle("org.hdrhistogram", "HdrHistogram", getVersion("hdrhistogram.version")), - mavenBundle("com.typesafe", "config", getVersion("config.version")), + "com.datastax.oss", + "java-driver-shaded-guava", + getVersionFromDepsTxt("com.datastax.oss:java-driver-shaded-guava")), mavenBundle( - "com.datastax.oss", "native-protocol", getVersion("native-protocol.version")), + "io.dropwizard.metrics", + "metrics-core", + getVersionFromSystemProperty("metrics.version")), + mavenBundle("org.slf4j", "slf4j-api", getVersionFromSystemProperty("slf4j.version")), + mavenBundle( + "org.hdrhistogram", + "HdrHistogram", + getVersionFromSystemProperty("hdrhistogram.version")), + mavenBundle("com.typesafe", "config", getVersionFromSystemProperty("config.version")), + mavenBundle( + "com.datastax.oss", + "native-protocol", + getVersionFromDepsTxt("com.datastax.oss:native-protocol")), logbackBundles(), systemProperty("logback.configurationFile") .value("file:" + PathUtils.getBaseDir() + "/src/test/resources/logback-test.xml"), @@ -58,7 +76,7 @@ public static UrlProvisionOption driverCoreBundle() { "reference:file:" + PathUtils.getBaseDir() + "/../core/target/java-driver-core-" - + getVersion("project.version") + + getVersionFromSystemProperty("project.version") + ".jar"); } @@ -67,7 +85,7 @@ public static UrlProvisionOption driverCoreShadedBundle() { "reference:file:" + PathUtils.getBaseDir() + "/../core-shaded/target/java-driver-core-shaded-" - + getVersion("project.version") + + getVersionFromSystemProperty("project.version") + ".jar"); } @@ -76,7 +94,7 @@ public static UrlProvisionOption driverQueryBuilderBundle() { "reference:file:" + PathUtils.getBaseDir() + "/../query-builder/target/java-driver-query-builder-" - + getVersion("project.version") + + getVersionFromSystemProperty("project.version") + ".jar"); } @@ -85,7 +103,7 @@ public static UrlProvisionOption driverTestInfraBundle() { "reference:file:" + PathUtils.getBaseDir() + "/../test-infra/target/java-driver-test-infra-" - + getVersion("project.version") + + getVersionFromSystemProperty("project.version") + ".jar"); } @@ -97,13 +115,16 @@ public static CompositeOption testBundles() { nettyBundles(), // required by the test infra bundle, even for the shaded jar jacksonBundles(), // required by the Simulacron bundle, even for the shaded jar mavenBundle( - "org.apache.commons", "commons-exec", System.getProperty("commons-exec.version")), - mavenBundle("org.assertj", "assertj-core", System.getProperty("assertj.version")), + "org.apache.commons", + "commons-exec", + getVersionFromSystemProperty("commons-exec.version")), + mavenBundle( + "org.assertj", "assertj-core", getVersionFromSystemProperty("assertj.version")), junitBundles()); } public static CompositeOption nettyBundles() { - String nettyVersion = getVersion("netty.version"); + String nettyVersion = getVersionFromSystemProperty("netty.version"); return () -> options( mavenBundle("io.netty", "netty-handler", nettyVersion), @@ -115,7 +136,7 @@ public static CompositeOption nettyBundles() { } public static CompositeOption logbackBundles() { - String logbackVersion = getVersion("logback.version"); + String logbackVersion = getVersionFromSystemProperty("logback.version"); return () -> options( mavenBundle("ch.qos.logback", "logback-classic", logbackVersion), @@ -123,8 +144,8 @@ public static CompositeOption logbackBundles() { } public static CompositeOption jacksonBundles() { - String jacksonVersion = getVersion("jackson.version"); - String jacksonDatabindVersion = getVersion("jackson-databind.version"); + String jacksonVersion = getVersionFromSystemProperty("jackson.version"); + String jacksonDatabindVersion = getVersionFromSystemProperty("jackson-databind.version"); return () -> options( mavenBundle("com.fasterxml.jackson.core", "jackson-databind", jacksonDatabindVersion), @@ -133,7 +154,7 @@ public static CompositeOption jacksonBundles() { } public static CompositeOption simulacronBundles() { - String simulacronVersion = getVersion("simulacron.version"); + String simulacronVersion = getVersionFromSystemProperty("simulacron.version"); return () -> options( mavenBundle( @@ -146,23 +167,16 @@ public static CompositeOption simulacronBundles() { } public static MavenArtifactProvisionOption lz4Bundle() { - return mavenBundle("org.lz4", "lz4-java", getVersion("lz4.version")); + return mavenBundle("org.lz4", "lz4-java", getVersionFromSystemProperty("lz4.version")); } public static MavenArtifactProvisionOption snappyBundle() { - return mavenBundle("org.xerial.snappy", "snappy-java", getVersion("snappy.version")); - } - - public static String getVersion(String propertyName) { - String value = System.getProperty(propertyName); - if (value == null) { - throw new IllegalArgumentException(propertyName + " system property is not set"); - } - return value; + return mavenBundle( + "org.xerial.snappy", "snappy-java", getVersionFromSystemProperty("snappy.version")); } public static CompositeOption tinkerpopBundles() { - String version = System.getProperty("tinkerpop.version"); + String version = getVersionFromSystemProperty("tinkerpop.version"); return () -> options( CoreOptions.wrappedBundle(mavenBundle("org.apache.tinkerpop", "gremlin-core", version)) @@ -212,22 +226,73 @@ public static CompositeOption esriBundles() { options( CoreOptions.wrappedBundle( mavenBundle( - "com.esri.geometry", "esri-geometry-api", getVersion("esri.version"))) + "com.esri.geometry", + "esri-geometry-api", + getVersionFromSystemProperty("esri.version"))) .exports("com.esri.core.geometry.*") .imports("org.json", "org.codehaus.jackson") - .bundleVersion(getVersion("esri.version")) + .bundleVersion(getVersionFromSystemProperty("esri.version")) .bundleSymbolicName("com.esri.core.geometry") .overwriteManifest(WrappedUrlProvisionOption.OverwriteMode.FULL), - mavenBundle("org.json", "json", getVersion("json.version")), + mavenBundle("org.json", "json", getVersionFromSystemProperty("json.version")), mavenBundle( - "org.codehaus.jackson", "jackson-core-asl", getVersion("legacy-jackson.version"))); + "org.codehaus.jackson", + "jackson-core-asl", + getVersionFromSystemProperty("legacy-jackson.version"))); } public static CompositeOption reactiveBundles() { return () -> options( mavenBundle( - "org.reactivestreams", "reactive-streams", getVersion("reactive-streams.version")), - mavenBundle("io.reactivex.rxjava2", "rxjava", getVersion("rxjava.version"))); + "org.reactivestreams", + "reactive-streams", + getVersionFromSystemProperty("reactive-streams.version")), + mavenBundle( + "io.reactivex.rxjava2", "rxjava", getVersionFromSystemProperty("rxjava.version"))); + } + + private static String getVersionFromSystemProperty(String propertyName) { + String value = System.getProperty(propertyName); + if (value == null) { + throw new IllegalArgumentException(propertyName + " system property is not set"); + } + return value; + } + + /** + * Some versions are not available as system properties because they are hardcoded in the BOM. + * + *

      Rely on the deps.txt file instead. + */ + private static String getVersionFromDepsTxt(String searchString) { + for (String dependency : DepsTxtLoader.lines) { + if (dependency.contains(searchString)) { + List components = Splitter.on(':').splitToList(dependency); + return components.get(components.size() - 2); + } + } + throw new IllegalStateException("Couldn't find version for " + searchString); + } + + private static class DepsTxtLoader { + + private static List lines; + + static { + String path = + PathUtils.getBaseDir() + + "/../core/target/classes/com/datastax/dse/driver/internal/deps.txt"; + CharSource charSource = Files.asCharSource(new File(path), Charsets.UTF_8); + + try { + lines = charSource.readLines(); + } catch (IOException e) { + throw new UncheckedIOException( + "Couldn't load deps.txt for driver core, " + + "make sure you run `mvn generate-resources` before running this test", + e); + } + } } } diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index e8d057da50d..2f0e5a4c276 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -25,6 +25,17 @@ java-driver-mapper-processor DataStax Java driver for Apache Cassandra(R) - object mapper processor + + + + ${project.groupId} + java-driver-bom + ${project.version} + pom + import + + + com.datastax.oss diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index 4a3af3d2e55..63def336d15 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -26,6 +26,17 @@ java-driver-mapper-runtime bundle DataStax Java driver for Apache Cassandra(R) - object mapper runtime + + + + ${project.groupId} + java-driver-bom + ${project.version} + pom + import + + + ${project.groupId} diff --git a/pom.xml b/pom.xml index 201088a3617..2a2f06bf94e 100644 --- a/pom.xml +++ b/pom.xml @@ -36,16 +36,15 @@ integration-tests distribution examples + bom true UTF-8 UTF-8 1.3.4 - 25.1-jre 2.1.11 4.0.5 - 1.4.9 4.1.45.Final 1.7.26 1.2.1 @@ -80,62 +79,22 @@ - - com.datastax.oss - java-driver-core - ${project.version} - com.datastax.oss java-driver-core ${project.version} test-jar - - com.datastax.oss - java-driver-core-shaded - ${project.version} - - - com.datastax.oss - java-driver-mapper-processor - ${project.version} - - - com.datastax.oss - java-driver-mapper-runtime - ${project.version} - - - com.datastax.oss - java-driver-query-builder - ${project.version} - - - com.datastax.oss - java-driver-test-infra - ${project.version} - - - com.datastax.oss - native-protocol - ${native-protocol.version} - io.netty netty-handler ${netty.version} - - com.datastax.oss - java-driver-shaded-guava - ${guava.version} - com.google.guava guava - ${guava.version} + 25.1-jre com.typesafe @@ -532,6 +491,11 @@ versions-maven-plugin 2.7 + + org.codehaus.mojo + flatten-maven-plugin + 1.2.1 + @@ -591,6 +555,7 @@ .idea/** **/target/** **/dependency-reduced-pom.xml + **/.flattened-pom.xml docs/** diff --git a/query-builder/pom.xml b/query-builder/pom.xml index c87f29229dd..c846bbc5466 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -26,6 +26,17 @@ java-driver-query-builder bundle DataStax Java driver for Apache Cassandra(R) - query builder + + + + ${project.groupId} + java-driver-bom + ${project.version} + pom + import + + + com.datastax.oss diff --git a/test-infra/pom.xml b/test-infra/pom.xml index cd4b608458b..e1fda41d2f4 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -26,6 +26,17 @@ java-driver-test-infra bundle DataStax Java driver for Apache Cassandra(R) - test infrastructure tools + + + + ${project.groupId} + java-driver-bom + ${project.version} + pom + import + + + com.datastax.oss From ccad5f254c761d0ccb7002b73f2a8eafaadd9ef3 Mon Sep 17 00:00:00 2001 From: Erik Merkle Date: Mon, 10 Feb 2020 11:16:55 -0600 Subject: [PATCH 285/979] JAVA-2630: Correctly handle custom classes in IndexMetadata.describe --- changelog/README.md | 1 + .../core/metadata/schema/IndexMetadata.java | 2 +- .../metadata/schema/IndexMetadataTest.java | 44 +++++++++++++++++++ 3 files changed, 46 insertions(+), 1 deletion(-) create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/IndexMetadataTest.java diff --git a/changelog/README.md b/changelog/README.md index 8c57c8f6d8b..533062c77b3 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.5.0 (in progress) +- [bug] JAVA-2630: Correctly handle custom classes in IndexMetadata.describe - [improvement] JAVA-1556: Publish Maven Bill Of Materials POM - [improvement] JAVA-2637: Bump Netty to 4.1.45 - [bug] JAVA-2617: Reinstate generation of deps.txt for Insights diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/metadata/schema/IndexMetadata.java b/core/src/main/java/com/datastax/oss/driver/api/core/metadata/schema/IndexMetadata.java index 773eba5cb8b..9b0e2fede9e 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/metadata/schema/IndexMetadata.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/metadata/schema/IndexMetadata.java @@ -73,7 +73,7 @@ default String describe(boolean pretty) { .append(getTable()) .append(String.format(" (%s)", getTarget())) .newLine() - .append(String.format("USING '%s'", getClassName())); + .append(String.format("USING '%s'", getClassName().get())); // Some options already appear in the CREATE statement, ignore them Map describedOptions = diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/IndexMetadataTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/IndexMetadataTest.java new file mode 100644 index 00000000000..b2113052092 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/IndexMetadataTest.java @@ -0,0 +1,44 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.metadata.schema; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.metadata.schema.IndexKind; +import com.datastax.oss.driver.api.core.metadata.schema.IndexMetadata; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import org.junit.Test; + +public class IndexMetadataTest { + + @Test + public void should_describe_custom_index_class_correctly() { + IndexMetadata indexMetadata = + new DefaultIndexMetadata( + CqlIdentifier.fromCql("ks1"), + CqlIdentifier.fromCql("myTable"), + CqlIdentifier.fromCql("myName"), + IndexKind.CUSTOM, + "myTarget", + ImmutableMap.of("class_name", "com.datastax.MyClass")); + String describe = indexMetadata.describe(true); + assertThat(describe) + .contains( + "CREATE CUSTOM INDEX myname ON ks1.mytable (myTarget)\n" + + "USING 'com.datastax.MyClass'"); + } +} From dea8be728aafb60775055640a41eeb936e568a0e Mon Sep 17 00:00:00 2001 From: olim7t Date: Thu, 13 Feb 2020 14:24:10 -0800 Subject: [PATCH 286/979] Remove obsolete POM properties --- integration-tests/pom.xml | 2 -- 1 file changed, 2 deletions(-) diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index a77352fc139..df3e8637545 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -245,14 +245,12 @@ ${assertj.version} ${config.version} ${commons-exec.version} - ${guava.version} ${hdrhistogram.version} ${jackson.version} ${jackson-databind.version} ${logback.version} ${lz4.version} ${metrics.version} - ${native-protocol.version} ${netty.version} ${simulacron.version} ${slf4j.version} From 11178b4dae8e247dfbfef08046a1bacc01fe13a6 Mon Sep 17 00:00:00 2001 From: olim7t Date: Sun, 9 Feb 2020 17:22:06 -0800 Subject: [PATCH 287/979] JAVA-2644: Revisit channel selection when pool size > 1 --- changelog/README.md | 1 + .../ContinuousCqlRequestHandler.java | 3 + .../connection/BusyConnectionException.java | 7 ++ .../adminrequest/AdminRequestHandler.java | 18 ++- .../ThrottledAdminRequestHandler.java | 31 +++++- .../core/channel/ChannelHandlerRequest.java | 19 +++- .../internal/core/channel/DriverChannel.java | 45 +++++++- .../core/channel/InFlightHandler.java | 13 ++- .../core/channel/StreamIdGenerator.java | 43 +++++--- .../internal/core/cql/CqlPrepareHandler.java | 1 + .../internal/core/cql/CqlRequestHandler.java | 1 + .../driver/internal/core/pool/ChannelSet.java | 38 +++++-- .../internal/core/session/PoolManager.java | 1 + .../internal/core/session/ReprepareOnUp.java | 80 ++++++++------ .../ChannelFactoryAvailableIdsTest.java | 1 + .../core/channel/InFlightHandlerTest.java | 1 + .../core/channel/StreamIdGeneratorTest.java | 14 ++- .../internal/core/cql/PoolBehavior.java | 1 + .../internal/core/pool/ChannelSetTest.java | 62 +++++++++++ .../core/session/ReprepareOnUpTest.java | 85 +++++++++----- .../oss/driver/core/PoolBalancingIT.java | 104 ++++++++++++++++++ manual/developer/request_execution/README.md | 23 ++-- 22 files changed, 490 insertions(+), 102 deletions(-) create mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/core/PoolBalancingIT.java diff --git a/changelog/README.md b/changelog/README.md index 533062c77b3..b2dd31275b2 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.5.0 (in progress) +- [bug] JAVA-2644: Revisit channel selection when pool size > 1 - [bug] JAVA-2630: Correctly handle custom classes in IndexMetadata.describe - [improvement] JAVA-1556: Publish Maven Bill Of Materials POM - [improvement] JAVA-2637: Bump Netty to 4.1.45 diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandler.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandler.java index 33aea927616..34e3b7f4666 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandler.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandler.java @@ -647,6 +647,7 @@ private void processUnprepared(@NonNull Unprepared errorMessage) { Duration timeout = executionProfile.getDuration(DefaultDriverOption.REQUEST_TIMEOUT); ThrottledAdminRequestHandler.prepare( channel, + true, prepare, repreparePayload.customPayload, timeout, @@ -899,6 +900,7 @@ private void sendMorePagesRequest(int nextPages) { LOG.trace("[{}] Sending request for more pages", logPrefix); ThrottledAdminRequestHandler.query( channel, + true, Revise.requestMoreContinuousPages(streamId, nextPages), statement.getCustomPayload(), timeoutOtherPages, @@ -1011,6 +1013,7 @@ private void sendCancelRequest() { LOG.trace("[{}] Sending cancel request", logPrefix); ThrottledAdminRequestHandler.query( channel, + true, Revise.cancelContinuousPaging(streamId), statement.getCustomPayload(), timeoutOtherPages, diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/connection/BusyConnectionException.java b/core/src/main/java/com/datastax/oss/driver/api/core/connection/BusyConnectionException.java index 1c725715d54..bbe513351ba 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/connection/BusyConnectionException.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/connection/BusyConnectionException.java @@ -30,6 +30,9 @@ */ public class BusyConnectionException extends DriverException { + // Note: the driver doesn't use this constructor anymore, it is preserved only for backward + // compatibility. + @SuppressWarnings("unused") public BusyConnectionException(int maxAvailableIds) { this( String.format( @@ -38,6 +41,10 @@ public BusyConnectionException(int maxAvailableIds) { false); } + public BusyConnectionException(String message) { + this(message, null, false); + } + private BusyConnectionException( String message, ExecutionInfo executionInfo, boolean writableStackTrace) { super(message, executionInfo, null, writableStackTrace); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/adminrequest/AdminRequestHandler.java b/core/src/main/java/com/datastax/oss/driver/internal/core/adminrequest/AdminRequestHandler.java index 6ab32f1adc6..10701bd6bef 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/adminrequest/AdminRequestHandler.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/adminrequest/AdminRequestHandler.java @@ -17,6 +17,7 @@ import com.datastax.oss.driver.api.core.DriverTimeoutException; import com.datastax.oss.driver.api.core.ProtocolVersion; +import com.datastax.oss.driver.api.core.connection.BusyConnectionException; import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; import com.datastax.oss.driver.internal.core.channel.DriverChannel; import com.datastax.oss.driver.internal.core.channel.ResponseCallback; @@ -54,6 +55,7 @@ public static AdminRequestHandler call( DriverChannel channel, Query query, Duration timeout, String logPrefix) { return new AdminRequestHandler<>( channel, + true, query, Frame.NO_PAYLOAD, timeout, @@ -78,7 +80,7 @@ public static AdminRequestHandler query( debugString += " with parameters " + parameters; } return new AdminRequestHandler<>( - channel, message, Frame.NO_PAYLOAD, timeout, logPrefix, debugString, Rows.class); + channel, true, message, Frame.NO_PAYLOAD, timeout, logPrefix, debugString, Rows.class); } public static AdminRequestHandler query( @@ -87,6 +89,7 @@ public static AdminRequestHandler query( } private final DriverChannel channel; + private final boolean shouldPreAcquireId; private final Message message; private final Map customPayload; private final Duration timeout; @@ -100,6 +103,7 @@ public static AdminRequestHandler query( protected AdminRequestHandler( DriverChannel channel, + boolean shouldPreAcquireId, Message message, Map customPayload, Duration timeout, @@ -107,6 +111,7 @@ protected AdminRequestHandler( String debugString, Class expectedResponseType) { this.channel = channel; + this.shouldPreAcquireId = shouldPreAcquireId; this.message = message; this.customPayload = customPayload; this.timeout = timeout; @@ -117,7 +122,14 @@ protected AdminRequestHandler( public CompletionStage start() { LOG.debug("[{}] Executing {}", logPrefix, this); - channel.write(message, false, customPayload, this).addListener(this::onWriteComplete); + if (shouldPreAcquireId && !channel.preAcquireId()) { + setFinalError( + new BusyConnectionException( + String.format( + "%s has reached its maximum number of simultaneous requests", channel))); + } else { + channel.write(message, false, customPayload, this).addListener(this::onWriteComplete); + } return result; } @@ -199,6 +211,8 @@ private AdminRequestHandler copy(ByteBuffer pagingState) { buildQueryOptions(currentOptions.pageSize, currentOptions.namedValues, pagingState); return new AdminRequestHandler<>( channel, + // This is called for next page queries, so we always need to reacquire an id: + true, new Query(current.query, newOptions), customPayload, timeout, diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/adminrequest/ThrottledAdminRequestHandler.java b/core/src/main/java/com/datastax/oss/driver/internal/core/adminrequest/ThrottledAdminRequestHandler.java index 712f338c8da..a7d872cc1ee 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/adminrequest/ThrottledAdminRequestHandler.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/adminrequest/ThrottledAdminRequestHandler.java @@ -17,11 +17,15 @@ import com.datastax.oss.driver.api.core.DriverTimeoutException; import com.datastax.oss.driver.api.core.RequestThrottlingException; +import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; import com.datastax.oss.driver.api.core.session.throttling.RequestThrottler; import com.datastax.oss.driver.api.core.session.throttling.Throttled; import com.datastax.oss.driver.internal.core.channel.DriverChannel; +import com.datastax.oss.driver.internal.core.control.ControlConnection; import com.datastax.oss.driver.internal.core.metrics.SessionMetricUpdater; +import com.datastax.oss.driver.internal.core.pool.ChannelPool; +import com.datastax.oss.driver.internal.core.session.DefaultSession; import com.datastax.oss.protocol.internal.Message; import com.datastax.oss.protocol.internal.response.Result; import com.datastax.oss.protocol.internal.response.result.Prepared; @@ -38,8 +42,16 @@ public class ThrottledAdminRequestHandler extends AdminRequestHandler implements Throttled { + /** + * @param shouldPreAcquireId whether to call {@link DriverChannel#preAcquireId()} before sending + * the request. This must be false if you obtained the connection from a pool ({@link + * ChannelPool#next()}, or {@link DefaultSession#getChannel(Node, String)}). It must be + * true if you are using a standalone channel (e.g. in {@link ControlConnection} or one of + * its auxiliary components). + */ public static ThrottledAdminRequestHandler query( DriverChannel channel, + boolean shouldPreAcquireId, Message message, Map customPayload, Duration timeout, @@ -49,6 +61,7 @@ public static ThrottledAdminRequestHandler query( String debugString) { return new ThrottledAdminRequestHandler<>( channel, + shouldPreAcquireId, message, customPayload, timeout, @@ -59,8 +72,14 @@ public static ThrottledAdminRequestHandler query( Rows.class); } + /** + * @param shouldPreAcquireId whether to call {@link DriverChannel#preAcquireId()} before sending + * the request. See {@link #query(DriverChannel, boolean, Message, Map, Duration, + * RequestThrottler, SessionMetricUpdater, String, String)} for more explanations. + */ public static ThrottledAdminRequestHandler prepare( DriverChannel channel, + boolean shouldPreAcquireId, Message message, Map customPayload, Duration timeout, @@ -69,6 +88,7 @@ public static ThrottledAdminRequestHandler prepare( String logPrefix) { return new ThrottledAdminRequestHandler<>( channel, + shouldPreAcquireId, message, customPayload, timeout, @@ -85,6 +105,7 @@ public static ThrottledAdminRequestHandler prepare( protected ThrottledAdminRequestHandler( DriverChannel channel, + boolean preAcquireId, Message message, Map customPayload, Duration timeout, @@ -93,7 +114,15 @@ protected ThrottledAdminRequestHandler( String logPrefix, String debugString, Class expectedResponseType) { - super(channel, message, customPayload, timeout, logPrefix, debugString, expectedResponseType); + super( + channel, + preAcquireId, + message, + customPayload, + timeout, + logPrefix, + debugString, + expectedResponseType); this.startTimeNanos = System.nanoTime(); this.throttler = throttler; this.metricUpdater = metricUpdater; diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ChannelHandlerRequest.java b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ChannelHandlerRequest.java index db30a476f0b..e6a20c0d233 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ChannelHandlerRequest.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ChannelHandlerRequest.java @@ -16,6 +16,7 @@ package com.datastax.oss.driver.internal.core.channel; import com.datastax.oss.driver.api.core.DriverTimeoutException; +import com.datastax.oss.driver.api.core.connection.BusyConnectionException; import com.datastax.oss.driver.internal.core.util.ProtocolUtils; import com.datastax.oss.driver.internal.core.util.concurrent.UncaughtExceptions; import com.datastax.oss.protocol.internal.Frame; @@ -35,6 +36,7 @@ abstract class ChannelHandlerRequest implements ResponseCallback { final Channel channel; final ChannelHandlerContext ctx; + final InFlightHandler inFlightHandler; private final long timeoutMillis; private ScheduledFuture timeoutFuture; @@ -42,6 +44,8 @@ abstract class ChannelHandlerRequest implements ResponseCallback { ChannelHandlerRequest(ChannelHandlerContext ctx, long timeoutMillis) { this.ctx = ctx; this.channel = ctx.channel(); + this.inFlightHandler = ctx.pipeline().get(InFlightHandler.class); + assert inFlightHandler != null; this.timeoutMillis = timeoutMillis; } @@ -60,10 +64,17 @@ void fail(Throwable cause) { void send() { assert channel.eventLoop().inEventLoop(); - DriverChannel.RequestMessage message = - new DriverChannel.RequestMessage(getRequest(), false, Frame.NO_PAYLOAD, this); - ChannelFuture writeFuture = channel.writeAndFlush(message); - writeFuture.addListener(this::writeListener); + if (!inFlightHandler.preAcquireId()) { + fail( + new BusyConnectionException( + String.format( + "%s has reached its maximum number of simultaneous requests", channel))); + } else { + DriverChannel.RequestMessage message = + new DriverChannel.RequestMessage(getRequest(), false, Frame.NO_PAYLOAD, this); + ChannelFuture writeFuture = channel.writeAndFlush(message); + writeFuture.addListener(this::writeListener); + } } private void writeListener(Future writeFuture) { diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/DriverChannel.java b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/DriverChannel.java index d9ace3bae51..12bb6bbd95f 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/DriverChannel.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/DriverChannel.java @@ -17,7 +17,13 @@ import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.ProtocolVersion; +import com.datastax.oss.driver.api.core.connection.BusyConnectionException; import com.datastax.oss.driver.api.core.metadata.EndPoint; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.internal.core.adminrequest.AdminRequestHandler; +import com.datastax.oss.driver.internal.core.adminrequest.ThrottledAdminRequestHandler; +import com.datastax.oss.driver.internal.core.pool.ChannelPool; +import com.datastax.oss.driver.internal.core.session.DefaultSession; import com.datastax.oss.driver.internal.core.util.concurrent.UncaughtExceptions; import com.datastax.oss.protocol.internal.Message; import io.netty.channel.Channel; @@ -129,14 +135,47 @@ public Map> getOptions() { } /** - * @return the number of available stream ids on the channel. This is used to weigh channels in - * pools that have a size bigger than 1, in the load balancing policy, and for monitoring - * purposes. + * @return the number of available stream ids on the channel; more precisely, this is the number + * of {@link #preAcquireId()} calls for which the id has not been released yet. This is used + * to weigh channels in pools that have a size bigger than 1, in the load balancing policy, + * and for monitoring purposes. */ public int getAvailableIds() { return inFlightHandler.getAvailableIds(); } + /** + * Indicates the intention to send a request using this channel. + * + *

      There must be exactly one invocation of this method before each call to {@link + * #write(Message, boolean, Map, ResponseCallback)}. If this method returns true, the client + * must proceed with the write. If it returns false, it must not proceed. + * + *

      This method is used together with {@link #getAvailableIds()} to track how many requests are + * currently executing on the channel, and avoid submitting a request that would result in a + * {@link BusyConnectionException}. The two methods follow atomic semantics: {@link + * #getAvailableIds()} returns the exact count of clients that have called {@link #preAcquireId()} + * and not yet released their stream id at this point in time. + * + *

      Most of the time, the driver code calls this method automatically: + * + *

        + *
      • if you obtained the channel from a pool ({@link ChannelPool#next()} or {@link + * DefaultSession#getChannel(Node, String)}), do not call this method: it has already + * been done as part of selecting the channel. + *
      • if you use {@link ChannelHandlerRequest} or {@link AdminRequestHandler} for internal + * queries, do not call this method, those classes already do it. + *
      • however, if you use {@link ThrottledAdminRequestHandler}, you must specify a {@code + * shouldPreAcquireId} argument to indicate whether to call this method or not. This is + * because those requests are sometimes used with a channel that comes from a pool + * (requiring {@code shouldPreAcquireId = false}), or sometimes with a standalone channel + * like in the control connection (requiring {@code shouldPreAcquireId = true}). + *
      + */ + public boolean preAcquireId() { + return inFlightHandler.preAcquireId(); + } + /** * @return the number of requests currently executing on this channel (including {@link * #getOrphanedIds() orphaned ids}). diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/InFlightHandler.java b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/InFlightHandler.java index 86a2f2090ab..9b3bbb9101d 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/InFlightHandler.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/InFlightHandler.java @@ -117,17 +117,24 @@ public void write(ChannelHandlerContext ctx, Object in, ChannelPromise promise) private void write(ChannelHandlerContext ctx, RequestMessage message, ChannelPromise promise) { if (closingGracefully) { promise.setFailure(new IllegalStateException("Channel is closing")); + streamIds.cancelPreAcquire(); return; } int streamId = streamIds.acquire(); if (streamId < 0) { - promise.setFailure(new BusyConnectionException(streamIds.getMaxAvailableIds())); + // Should not happen with the preAcquire mechanism, but handle gracefully + promise.setFailure( + new BusyConnectionException( + String.format( + "Couldn't acquire a stream id from InFlightHandler on %s", ctx.channel()))); + streamIds.cancelPreAcquire(); return; } if (inFlight.containsKey(streamId)) { promise.setFailure( new IllegalStateException("Found pending callback for stream id " + streamId)); + streamIds.cancelPreAcquire(); return; } @@ -374,6 +381,10 @@ int getAvailableIds() { return streamIds.getAvailableIds(); } + boolean preAcquireId() { + return streamIds.preAcquire(); + } + int getInFlight() { return streamIds.getMaxAvailableIds() - streamIds.getAvailableIds(); } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/StreamIdGenerator.java b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/StreamIdGenerator.java index 756112c8f77..934eeefc061 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/StreamIdGenerator.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/StreamIdGenerator.java @@ -16,14 +16,17 @@ package com.datastax.oss.driver.internal.core.channel; import java.util.BitSet; +import java.util.concurrent.atomic.AtomicInteger; import net.jcip.annotations.NotThreadSafe; /** * Manages the set of identifiers used to distinguish multiplexed requests on a channel. * - *

      This class is not thread safe: calls to {@link #acquire()} and {@link #release(int)} must be - * properly synchronized (in practice this is done by only calling them from the I/O thread). - * However, {@link #getAvailableIds()} has volatile semantics. + *

      {@link #preAcquire()} / {@link #getAvailableIds()} follow atomic semantics. See {@link + * DriverChannel#preAcquireId()} for more explanations. + * + *

      Other methods are not synchronized, they are only called by {@link InFlightHandler} on the I/O + * thread. */ @NotThreadSafe class StreamIdGenerator { @@ -31,38 +34,52 @@ class StreamIdGenerator { private final int maxAvailableIds; // unset = available, set = borrowed (note that this is the opposite of the 3.x implementation) private final BitSet ids; - private volatile int availableIds; + private AtomicInteger availableIds; StreamIdGenerator(int maxAvailableIds) { this.maxAvailableIds = maxAvailableIds; this.ids = new BitSet(this.maxAvailableIds); - this.availableIds = this.maxAvailableIds; + this.availableIds = new AtomicInteger(this.maxAvailableIds); + } + + boolean preAcquire() { + while (true) { + int current = availableIds.get(); + assert current >= 0; + if (current == 0) { + return false; + } else if (availableIds.compareAndSet(current, current - 1)) { + return true; + } + } + } + + void cancelPreAcquire() { + int available = availableIds.incrementAndGet(); + assert available <= maxAvailableIds; } - // safe because a given instance is always called from the same I/O thread - @SuppressWarnings({"NonAtomicVolatileUpdate", "NonAtomicOperationOnVolatileField"}) int acquire() { + assert availableIds.get() < maxAvailableIds; int id = ids.nextClearBit(0); if (id >= maxAvailableIds) { return -1; } ids.set(id); - availableIds--; return id; } - @SuppressWarnings({"NonAtomicVolatileUpdate", "NonAtomicOperationOnVolatileField"}) void release(int id) { - if (ids.get(id)) { - availableIds++; - } else { + if (!ids.get(id)) { throw new IllegalStateException("Tried to release id that hadn't been borrowed: " + id); } ids.clear(id); + int available = availableIds.incrementAndGet(); + assert available <= maxAvailableIds; } int getAvailableIds() { - return availableIds; + return availableIds.get(); } int getMaxAvailableIds() { diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlPrepareHandler.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlPrepareHandler.java index 2caa19649d2..e5a6b5afd98 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlPrepareHandler.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlPrepareHandler.java @@ -283,6 +283,7 @@ private CompletionStage prepareOnOtherNode(Node node) { ThrottledAdminRequestHandler handler = ThrottledAdminRequestHandler.prepare( channel, + false, message, request.getCustomPayload(), timeout, diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java index 19354dd0473..951ef9aa93c 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java @@ -660,6 +660,7 @@ private void processErrorResponse(Error errorMessage) { ThrottledAdminRequestHandler reprepareHandler = ThrottledAdminRequestHandler.prepare( channel, + true, reprepareMessage, repreparePayload.customPayload, timeout, diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/pool/ChannelSet.java b/core/src/main/java/com/datastax/oss/driver/internal/core/pool/ChannelSet.java index 0f6144c77a4..cccaac6d9b9 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/pool/ChannelSet.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/pool/ChannelSet.java @@ -23,6 +23,8 @@ import java.util.Iterator; import java.util.concurrent.locks.ReentrantLock; import net.jcip.annotations.ThreadSafe; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Concurrent structure used to store the channels of a pool. @@ -32,6 +34,15 @@ */ @ThreadSafe class ChannelSet implements Iterable { + + private static final Logger LOG = LoggerFactory.getLogger(ChannelSet.class); + /** + * The maximum number of iterations in the busy wait loop in {@link #next()} when there are + * multiple channels. This is a backstop to protect against thread starvation, in practice we've + * never observed more than 3 iterations in tests. + */ + private static final int MAX_ITERATIONS = 50; + private volatile DriverChannel[] channels; private final ReentrantLock lock = new ReentrantLock(); // must be held when mutating the array @@ -83,18 +94,27 @@ DriverChannel next() { case 0: return null; case 1: - return snapshot[0]; + DriverChannel onlyChannel = snapshot[0]; + return onlyChannel.preAcquireId() ? onlyChannel : null; default: - DriverChannel best = null; - int bestScore = 0; - for (DriverChannel channel : snapshot) { - int score = channel.getAvailableIds(); - if (score > bestScore) { - bestScore = score; - best = channel; + for (int i = 0; i < MAX_ITERATIONS; i++) { + DriverChannel best = null; + int bestScore = 0; + for (DriverChannel channel : snapshot) { + int score = channel.getAvailableIds(); + if (score > bestScore) { + bestScore = score; + best = channel; + } + } + if (best == null) { + return null; + } else if (best.preAcquireId()) { + return best; } } - return best; + LOG.trace("Could not select a channel after {} iterations", MAX_ITERATIONS); + return null; } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/session/PoolManager.java b/core/src/main/java/com/datastax/oss/driver/internal/core/session/PoolManager.java index b43c0c8c448..6127c00226d 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/session/PoolManager.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/session/PoolManager.java @@ -408,6 +408,7 @@ private void reprepareStatements(ChannelPool pool) { new ReprepareOnUp( logPrefix + "|" + pool.getNode().getEndPoint(), pool, + adminExecutor, repreparePayloads, context, () -> RunOrSchedule.on(adminExecutor, () -> onPoolReady(pool))) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/session/ReprepareOnUp.java b/core/src/main/java/com/datastax/oss/driver/internal/core/session/ReprepareOnUp.java index 367314c5c6c..c6059dcbc57 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/session/ReprepareOnUp.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/session/ReprepareOnUp.java @@ -17,6 +17,7 @@ import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverConfig; +import com.datastax.oss.driver.api.core.connection.BusyConnectionException; import com.datastax.oss.driver.api.core.session.throttling.RequestThrottler; import com.datastax.oss.driver.internal.core.adminrequest.AdminResult; import com.datastax.oss.driver.internal.core.adminrequest.AdminRow; @@ -26,12 +27,14 @@ import com.datastax.oss.driver.internal.core.cql.CqlRequestHandler; import com.datastax.oss.driver.internal.core.metrics.SessionMetricUpdater; import com.datastax.oss.driver.internal.core.pool.ChannelPool; +import com.datastax.oss.driver.internal.core.util.concurrent.CompletableFutures; import com.datastax.oss.driver.internal.core.util.concurrent.RunOrSchedule; import com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting; import com.datastax.oss.protocol.internal.Message; import com.datastax.oss.protocol.internal.request.Prepare; import com.datastax.oss.protocol.internal.request.Query; import com.datastax.oss.protocol.internal.util.Bytes; +import io.netty.util.concurrent.EventExecutor; import java.nio.ByteBuffer; import java.time.Duration; import java.util.ArrayDeque; @@ -64,7 +67,8 @@ class ReprepareOnUp { new Query("SELECT prepared_id FROM system.prepared_statements"); private final String logPrefix; - private final DriverChannel channel; + private final ChannelPool pool; + private final EventExecutor adminExecutor; private final Map repreparePayloads; private final Runnable whenPrepared; private final boolean checkSystemTable; @@ -74,8 +78,8 @@ class ReprepareOnUp { private final RequestThrottler throttler; private final SessionMetricUpdater metricUpdater; - // After the constructor, everything happens on the channel's event loop, so these fields do not - // need any synchronization. + // After the constructor, everything happens on adminExecutor, so these fields do not need any + // synchronization. private Set serverKnownIds; private Queue toReprepare; private int runningWorkers; @@ -83,12 +87,14 @@ class ReprepareOnUp { ReprepareOnUp( String logPrefix, ChannelPool pool, + EventExecutor adminExecutor, Map repreparePayloads, InternalDriverContext context, Runnable whenPrepared) { this.logPrefix = logPrefix; - this.channel = pool.next(); + this.pool = pool; + this.adminExecutor = adminExecutor; this.repreparePayloads = repreparePayloads; this.whenPrepared = whenPrepared; this.throttler = context.getRequestThrottler(); @@ -109,10 +115,6 @@ void start() { if (repreparePayloads.isEmpty()) { LOG.debug("[{}] No statements to reprepare, done", logPrefix); whenPrepared.run(); - } else if (this.channel == null) { - // Should not happen, but handle cleanly - LOG.debug("[{}] No channel available to reprepare, done", logPrefix); - whenPrepared.run(); } else { // Check log level because ConcurrentMap.size is not a constant operation if (LOG.isDebugEnabled()) { @@ -124,14 +126,14 @@ void start() { if (checkSystemTable) { LOG.debug("[{}] Checking which statements the server knows about", logPrefix); queryAsync(QUERY_SERVER_IDS, Collections.emptyMap(), "QUERY system.prepared_statements") - .whenComplete(this::gatherServerIds); + .whenCompleteAsync(this::gatherServerIds, adminExecutor); } else { LOG.debug( "[{}] {} is disabled, repreparing directly", logPrefix, DefaultDriverOption.REPREPARE_CHECK_SYSTEM_TABLE.getPath()); RunOrSchedule.on( - channel.eventLoop(), + adminExecutor, () -> { serverKnownIds = Collections.emptySet(); gatherPayloadsToReprepare(); @@ -141,7 +143,7 @@ void start() { } private void gatherServerIds(AdminResult rows, Throwable error) { - assert channel.eventLoop().inEventLoop(); + assert adminExecutor.inEventLoop(); if (serverKnownIds == null) { serverKnownIds = new HashSet<>(); } @@ -157,7 +159,7 @@ private void gatherServerIds(AdminResult rows, Throwable error) { } if (rows.hasNextPage()) { LOG.debug("[{}] system.prepared_statements has more pages", logPrefix); - rows.nextPage().whenComplete(this::gatherServerIds); + rows.nextPage().whenCompleteAsync(this::gatherServerIds, adminExecutor); } else { LOG.debug("[{}] Gathered {} server ids, proceeding", logPrefix, serverKnownIds.size()); gatherPayloadsToReprepare(); @@ -166,7 +168,7 @@ private void gatherServerIds(AdminResult rows, Throwable error) { } private void gatherPayloadsToReprepare() { - assert channel.eventLoop().inEventLoop(); + assert adminExecutor.inEventLoop(); toReprepare = new ArrayDeque<>(); for (RepreparePayload payload : repreparePayloads.values()) { if (serverKnownIds.contains(payload.id)) { @@ -198,7 +200,7 @@ private void gatherPayloadsToReprepare() { } private void startWorkers() { - assert channel.eventLoop().inEventLoop(); + assert adminExecutor.inEventLoop(); runningWorkers = Math.min(maxParallelism, toReprepare.size()); LOG.debug( "[{}] Repreparing {} statements with {} parallel workers", @@ -211,7 +213,7 @@ private void startWorkers() { } private void startWorker() { - assert channel.eventLoop().inEventLoop(); + assert adminExecutor.inEventLoop(); if (toReprepare.isEmpty()) { runningWorkers -= 1; if (runningWorkers == 0) { @@ -224,37 +226,51 @@ private void startWorker() { new Prepare( payload.query, (payload.keyspace == null ? null : payload.keyspace.asInternal())), payload.customPayload) - .handle( + .handleAsync( (result, error) -> { // Don't log, AdminRequestHandler does already startWorker(); return null; - }); + }, + adminExecutor); } } @VisibleForTesting protected CompletionStage queryAsync( Message message, Map customPayload, String debugString) { - ThrottledAdminRequestHandler reprepareHandler = - ThrottledAdminRequestHandler.query( - channel, - message, - customPayload, - timeout, - throttler, - metricUpdater, - logPrefix, - debugString); - return reprepareHandler.start(); + DriverChannel channel = pool.next(); + if (channel == null) { + return CompletableFutures.failedFuture( + new BusyConnectionException("Found no channel to execute reprepare query")); + } else { + ThrottledAdminRequestHandler reprepareHandler = + ThrottledAdminRequestHandler.query( + channel, + false, + message, + customPayload, + timeout, + throttler, + metricUpdater, + logPrefix, + debugString); + return reprepareHandler.start(); + } } @VisibleForTesting protected CompletionStage prepareAsync( Message message, Map customPayload) { - ThrottledAdminRequestHandler reprepareHandler = - ThrottledAdminRequestHandler.prepare( - channel, message, customPayload, timeout, throttler, metricUpdater, logPrefix); - return reprepareHandler.start(); + DriverChannel channel = pool.next(); + if (channel == null) { + return CompletableFutures.failedFuture( + new BusyConnectionException("Found no channel to execute reprepare query")); + } else { + ThrottledAdminRequestHandler reprepareHandler = + ThrottledAdminRequestHandler.prepare( + channel, false, message, customPayload, timeout, throttler, metricUpdater, logPrefix); + return reprepareHandler.start(); + } } } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/channel/ChannelFactoryAvailableIdsTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/channel/ChannelFactoryAvailableIdsTest.java index 1f9ad10478a..95d4ff119eb 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/channel/ChannelFactoryAvailableIdsTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/channel/ChannelFactoryAvailableIdsTest.java @@ -69,6 +69,7 @@ public void should_report_available_ids() { assertThat(channel.getAvailableIds()).isEqualTo(128); // Write a request, should decrease the count + assertThat(channel.preAcquireId()).isTrue(); Future writeFuture = channel.write(new Query("test"), false, Frame.NO_PAYLOAD, responseCallback); assertThat(writeFuture) diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/channel/InFlightHandlerTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/channel/InFlightHandlerTest.java index 7b8c7f870ce..2fca9366104 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/channel/InFlightHandlerTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/channel/InFlightHandlerTest.java @@ -57,6 +57,7 @@ public class InFlightHandlerTest extends ChannelHandlerTestBase { public void setup() { super.setup(); MockitoAnnotations.initMocks(this); + when(streamIds.preAcquire()).thenReturn(true); } @Test diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/channel/StreamIdGeneratorTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/channel/StreamIdGeneratorTest.java index 7bbbf23c329..53917f78ec4 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/channel/StreamIdGeneratorTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/channel/StreamIdGeneratorTest.java @@ -30,6 +30,7 @@ public void should_have_all_available_upon_creation() { public void should_return_available_ids_in_sequence() { StreamIdGenerator generator = new StreamIdGenerator(8); for (int i = 0; i < 8; i++) { + assertThat(generator.preAcquire()).isTrue(); assertThat(generator.acquire()).isEqualTo(i); assertThat(generator.getAvailableIds()).isEqualTo(7 - i); } @@ -39,23 +40,28 @@ public void should_return_available_ids_in_sequence() { public void should_return_minus_one_when_no_id_available() { StreamIdGenerator generator = new StreamIdGenerator(8); for (int i = 0; i < 8; i++) { - generator.acquire(); + assertThat(generator.preAcquire()).isTrue(); + // also validating that ids are held as soon as preAcquire() is called, even if acquire() has + // not been invoked yet } assertThat(generator.getAvailableIds()).isEqualTo(0); - assertThat(generator.acquire()).isEqualTo(-1); + assertThat(generator.preAcquire()).isFalse(); } @Test public void should_return_previously_released_ids() { StreamIdGenerator generator = new StreamIdGenerator(8); for (int i = 0; i < 8; i++) { - generator.acquire(); + assertThat(generator.preAcquire()).isTrue(); + assertThat(generator.acquire()).isEqualTo(i); } generator.release(7); generator.release(2); assertThat(generator.getAvailableIds()).isEqualTo(2); + assertThat(generator.preAcquire()).isTrue(); assertThat(generator.acquire()).isEqualTo(2); + assertThat(generator.preAcquire()).isTrue(); assertThat(generator.acquire()).isEqualTo(7); - assertThat(generator.acquire()).isEqualTo(-1); + assertThat(generator.preAcquire()).isFalse(); } } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/cql/PoolBehavior.java b/core/src/test/java/com/datastax/oss/driver/internal/core/cql/PoolBehavior.java index 8b1c719fc0c..50694b50b72 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/cql/PoolBehavior.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/cql/PoolBehavior.java @@ -60,6 +60,7 @@ public PoolBehavior(Node node, boolean createChannel) { EventLoop eventLoop = mock(EventLoop.class); ChannelConfig config = mock(DefaultSocketChannelConfig.class); this.writePromise = ImmediateEventExecutor.INSTANCE.newPromise(); + when(channel.preAcquireId()).thenReturn(true); when(channel.write(any(Message.class), anyBoolean(), anyMap(), any(ResponseCallback.class))) .thenAnswer( invocation -> { diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/pool/ChannelSetTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/pool/ChannelSetTest.java index 5e1e12d13d8..bb24bf615fc 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/pool/ChannelSetTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/pool/ChannelSetTest.java @@ -44,6 +44,9 @@ public void should_return_null_when_empty() { @Test public void should_return_element_when_single() { + // Given + when(channel1.preAcquireId()).thenReturn(true); + // When set.add(channel1); @@ -51,6 +54,20 @@ public void should_return_element_when_single() { assertThat(set.size()).isEqualTo(1); assertThat(set.next()).isEqualTo(channel1); verify(channel1, never()).getAvailableIds(); + verify(channel1).preAcquireId(); + } + + @Test + public void should_return_null_when_single_but_full() { + // Given + when(channel1.preAcquireId()).thenReturn(false); + + // When + set.add(channel1); + + // Then + assertThat(set.next()).isNull(); + verify(channel1).preAcquireId(); } @Test @@ -59,6 +76,7 @@ public void should_return_most_available_when_multiple() { when(channel1.getAvailableIds()).thenReturn(2); when(channel2.getAvailableIds()).thenReturn(12); when(channel3.getAvailableIds()).thenReturn(8); + when(channel2.preAcquireId()).thenReturn(true); // When set.add(channel1); @@ -71,12 +89,31 @@ public void should_return_most_available_when_multiple() { verify(channel1).getAvailableIds(); verify(channel2).getAvailableIds(); verify(channel3).getAvailableIds(); + verify(channel2).preAcquireId(); // When when(channel1.getAvailableIds()).thenReturn(15); + when(channel1.preAcquireId()).thenReturn(true); // Then assertThat(set.next()).isEqualTo(channel1); + verify(channel1).preAcquireId(); + } + + @Test + public void should_return_null_when_multiple_but_all_full() { + // Given + when(channel1.getAvailableIds()).thenReturn(0); + when(channel2.getAvailableIds()).thenReturn(0); + when(channel3.getAvailableIds()).thenReturn(0); + + // When + set.add(channel1); + set.add(channel2); + set.add(channel3); + + // Then + assertThat(set.next()).isNull(); } @Test @@ -85,6 +122,7 @@ public void should_remove_channels() { when(channel1.getAvailableIds()).thenReturn(2); when(channel2.getAvailableIds()).thenReturn(12); when(channel3.getAvailableIds()).thenReturn(8); + when(channel2.preAcquireId()).thenReturn(true); set.add(channel1); set.add(channel2); @@ -93,6 +131,7 @@ public void should_remove_channels() { // When set.remove(channel2); + when(channel3.preAcquireId()).thenReturn(true); // Then assertThat(set.size()).isEqualTo(2); @@ -100,6 +139,7 @@ public void should_remove_channels() { // When set.remove(channel3); + when(channel1.preAcquireId()).thenReturn(true); // Then assertThat(set.size()).isEqualTo(1); @@ -112,4 +152,26 @@ public void should_remove_channels() { assertThat(set.size()).isEqualTo(0); assertThat(set.next()).isNull(); } + + /** + * Check that {@link ChannelSet#next()} doesn't spin forever if it keeps racing (see comments in + * the implementation). + */ + @Test + public void should_not_loop_indefinitely_if_acquisition_keeps_failing() { + // Given + when(channel1.getAvailableIds()).thenReturn(2); + when(channel2.getAvailableIds()).thenReturn(12); + when(channel3.getAvailableIds()).thenReturn(8); + // channel2 is the most available but we keep failing to acquire (simulating the race condition) + when(channel2.preAcquireId()).thenReturn(false); + + // When + set.add(channel1); + set.add(channel2); + set.add(channel3); + + // Then + assertThat(set.next()).isNull(); + } } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/session/ReprepareOnUpTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/session/ReprepareOnUpTest.java index 3f13f91ea77..2be35a00ab7 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/session/ReprepareOnUpTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/session/ReprepareOnUpTest.java @@ -43,7 +43,8 @@ import com.datastax.oss.protocol.internal.response.result.Rows; import com.datastax.oss.protocol.internal.response.result.RowsMetadata; import com.datastax.oss.protocol.internal.util.Bytes; -import io.netty.channel.EventLoop; +import io.netty.util.concurrent.EventExecutor; +import io.netty.util.concurrent.ImmediateEventExecutor; import java.nio.ByteBuffer; import java.time.Duration; import java.util.ArrayDeque; @@ -61,7 +62,6 @@ public class ReprepareOnUpTest { @Mock private ChannelPool pool; @Mock private DriverChannel channel; - @Mock private EventLoop eventLoop; @Mock private InternalDriverContext context; @Mock private DriverConfig config; @Mock private DriverExecutionProfile defaultProfile; @@ -76,8 +76,6 @@ public void setup() { MockitoAnnotations.initMocks(this); when(pool.next()).thenReturn(channel); - when(channel.eventLoop()).thenReturn(eventLoop); - when(eventLoop.inEventLoop()).thenReturn(true); when(config.getDefaultProfile()).thenReturn(defaultProfile); when(defaultProfile.getBoolean(DefaultDriverOption.REPREPARE_CHECK_SYSTEM_TABLE)) @@ -99,21 +97,13 @@ public void setup() { public void should_complete_immediately_if_no_prepared_statements() { // Given MockReprepareOnUp reprepareOnUp = - new MockReprepareOnUp("test", pool, getMockPayloads(/*none*/ ), context, whenPrepared); - - // When - reprepareOnUp.start(); - - // Then - assertThatStage(done).isSuccess(v -> assertThat(reprepareOnUp.queries).isEmpty()); - } - - @Test - public void should_complete_immediately_if_pool_empty() { - // Given - when(pool.next()).thenReturn(null); - MockReprepareOnUp reprepareOnUp = - new MockReprepareOnUp("test", pool, getMockPayloads('a'), context, whenPrepared); + new MockReprepareOnUp( + "test", + pool, + ImmediateEventExecutor.INSTANCE, + getMockPayloads(/*none*/ ), + context, + whenPrepared); // When reprepareOnUp.start(); @@ -126,11 +116,17 @@ public void should_complete_immediately_if_pool_empty() { public void should_reprepare_all_if_system_table_query_fails() { MockReprepareOnUp reprepareOnUp = new MockReprepareOnUp( - "test", pool, getMockPayloads('a', 'b', 'c', 'd', 'e', 'f'), context, whenPrepared); + "test", + pool, + ImmediateEventExecutor.INSTANCE, + getMockPayloads('a', 'b', 'c', 'd', 'e', 'f'), + context, + whenPrepared); reprepareOnUp.start(); MockAdminQuery adminQuery = reprepareOnUp.queries.poll(); + assertThat(adminQuery).isNotNull(); assertThat(adminQuery.request).isInstanceOf(Query.class); assertThat(((Query) adminQuery.request).query) .isEqualTo("SELECT prepared_id FROM system.prepared_statements"); @@ -138,6 +134,7 @@ public void should_reprepare_all_if_system_table_query_fails() { for (char c = 'a'; c <= 'f'; c++) { adminQuery = reprepareOnUp.queries.poll(); + assertThat(adminQuery).isNotNull(); assertThat(adminQuery.request).isInstanceOf(Prepare.class); assertThat(((Prepare) adminQuery.request).cqlQuery).isEqualTo("mock query " + c); adminQuery.resultFuture.complete(null); @@ -150,11 +147,17 @@ public void should_reprepare_all_if_system_table_query_fails() { public void should_reprepare_all_if_system_table_empty() { MockReprepareOnUp reprepareOnUp = new MockReprepareOnUp( - "test", pool, getMockPayloads('a', 'b', 'c', 'd', 'e', 'f'), context, whenPrepared); + "test", + pool, + ImmediateEventExecutor.INSTANCE, + getMockPayloads('a', 'b', 'c', 'd', 'e', 'f'), + context, + whenPrepared); reprepareOnUp.start(); MockAdminQuery adminQuery = reprepareOnUp.queries.poll(); + assertThat(adminQuery).isNotNull(); assertThat(adminQuery.request).isInstanceOf(Query.class); assertThat(((Query) adminQuery.request).query) .isEqualTo("SELECT prepared_id FROM system.prepared_statements"); @@ -164,6 +167,7 @@ public void should_reprepare_all_if_system_table_empty() { for (char c = 'a'; c <= 'f'; c++) { adminQuery = reprepareOnUp.queries.poll(); + assertThat(adminQuery).isNotNull(); assertThat(adminQuery.request).isInstanceOf(Prepare.class); assertThat(((Prepare) adminQuery.request).cqlQuery).isEqualTo("mock query " + c); adminQuery.resultFuture.complete(null); @@ -179,13 +183,19 @@ public void should_reprepare_all_if_system_query_disabled() { MockReprepareOnUp reprepareOnUp = new MockReprepareOnUp( - "test", pool, getMockPayloads('a', 'b', 'c', 'd', 'e', 'f'), context, whenPrepared); + "test", + pool, + ImmediateEventExecutor.INSTANCE, + getMockPayloads('a', 'b', 'c', 'd', 'e', 'f'), + context, + whenPrepared); reprepareOnUp.start(); MockAdminQuery adminQuery; for (char c = 'a'; c <= 'f'; c++) { adminQuery = reprepareOnUp.queries.poll(); + assertThat(adminQuery).isNotNull(); assertThat(adminQuery.request).isInstanceOf(Prepare.class); assertThat(((Prepare) adminQuery.request).cqlQuery).isEqualTo("mock query " + c); adminQuery.resultFuture.complete(null); @@ -198,11 +208,17 @@ public void should_reprepare_all_if_system_query_disabled() { public void should_not_reprepare_already_known_statements() { MockReprepareOnUp reprepareOnUp = new MockReprepareOnUp( - "test", pool, getMockPayloads('a', 'b', 'c', 'd', 'e', 'f'), context, whenPrepared); + "test", + pool, + ImmediateEventExecutor.INSTANCE, + getMockPayloads('a', 'b', 'c', 'd', 'e', 'f'), + context, + whenPrepared); reprepareOnUp.start(); MockAdminQuery adminQuery = reprepareOnUp.queries.poll(); + assertThat(adminQuery).isNotNull(); assertThat(adminQuery.request).isInstanceOf(Query.class); assertThat(((Query) adminQuery.request).query) .isEqualTo("SELECT prepared_id FROM system.prepared_statements"); @@ -212,6 +228,7 @@ public void should_not_reprepare_already_known_statements() { for (char c = 'a'; c <= 'c'; c++) { adminQuery = reprepareOnUp.queries.poll(); + assertThat(adminQuery).isNotNull(); assertThat(adminQuery.request).isInstanceOf(Prepare.class); assertThat(((Prepare) adminQuery.request).cqlQuery).isEqualTo("mock query " + c); adminQuery.resultFuture.complete(null); @@ -240,11 +257,17 @@ public void should_limit_number_of_statements_to_reprepare() { MockReprepareOnUp reprepareOnUp = new MockReprepareOnUp( - "test", pool, getMockPayloads('a', 'b', 'c', 'd', 'e', 'f'), context, whenPrepared); + "test", + pool, + ImmediateEventExecutor.INSTANCE, + getMockPayloads('a', 'b', 'c', 'd', 'e', 'f'), + context, + whenPrepared); reprepareOnUp.start(); MockAdminQuery adminQuery = reprepareOnUp.queries.poll(); + assertThat(adminQuery).isNotNull(); assertThat(adminQuery.request).isInstanceOf(Query.class); assertThat(((Query) adminQuery.request).query) .isEqualTo("SELECT prepared_id FROM system.prepared_statements"); @@ -254,6 +277,7 @@ public void should_limit_number_of_statements_to_reprepare() { for (char c = 'a'; c <= 'c'; c++) { adminQuery = reprepareOnUp.queries.poll(); + assertThat(adminQuery).isNotNull(); assertThat(adminQuery.request).isInstanceOf(Prepare.class); assertThat(((Prepare) adminQuery.request).cqlQuery).isEqualTo("mock query " + c); adminQuery.resultFuture.complete(null); @@ -268,11 +292,17 @@ public void should_limit_number_of_statements_reprepared_in_parallel() { MockReprepareOnUp reprepareOnUp = new MockReprepareOnUp( - "test", pool, getMockPayloads('a', 'b', 'c', 'd', 'e', 'f'), context, whenPrepared); + "test", + pool, + ImmediateEventExecutor.INSTANCE, + getMockPayloads('a', 'b', 'c', 'd', 'e', 'f'), + context, + whenPrepared); reprepareOnUp.start(); MockAdminQuery adminQuery = reprepareOnUp.queries.poll(); + assertThat(adminQuery).isNotNull(); assertThat(adminQuery.request).isInstanceOf(Query.class); assertThat(((Query) adminQuery.request).query) .isEqualTo("SELECT prepared_id FROM system.prepared_statements"); @@ -286,6 +316,7 @@ public void should_limit_number_of_statements_reprepared_in_parallel() { // As we complete each statement, another one should enqueue: for (char c = 'a'; c <= 'c'; c++) { adminQuery = reprepareOnUp.queries.poll(); + assertThat(adminQuery).isNotNull(); assertThat(adminQuery.request).isInstanceOf(Prepare.class); assertThat(((Prepare) adminQuery.request).cqlQuery).isEqualTo("mock query " + c); adminQuery.resultFuture.complete(null); @@ -295,6 +326,7 @@ public void should_limit_number_of_statements_reprepared_in_parallel() { // Complete the last 3: for (char c = 'd'; c <= 'f'; c++) { adminQuery = reprepareOnUp.queries.poll(); + assertThat(adminQuery).isNotNull(); assertThat(adminQuery.request).isInstanceOf(Prepare.class); assertThat(((Prepare) adminQuery.request).cqlQuery).isEqualTo("mock query " + c); adminQuery.resultFuture.complete(null); @@ -321,10 +353,11 @@ private static class MockReprepareOnUp extends ReprepareOnUp { MockReprepareOnUp( String logPrefix, ChannelPool pool, + EventExecutor adminExecutor, Map repreparePayloads, InternalDriverContext context, Runnable whenPrepared) { - super(logPrefix, pool, repreparePayloads, context, whenPrepared); + super(logPrefix, pool, adminExecutor, repreparePayloads, context, whenPrepared); } @Override diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/PoolBalancingIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/PoolBalancingIT.java new file mode 100644 index 00000000000..b18384ba074 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/PoolBalancingIT.java @@ -0,0 +1,104 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.core; + +import static java.util.concurrent.TimeUnit.SECONDS; +import static org.assertj.core.api.Assertions.fail; + +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.NoNodeAvailableException; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverConfigLoader; +import com.datastax.oss.driver.api.core.cql.AsyncResultSet; +import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import com.datastax.oss.driver.api.testinfra.simulacron.SimulacronRule; +import com.datastax.oss.driver.categories.ParallelizableTests; +import com.datastax.oss.simulacron.common.cluster.ClusterSpec; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.atomic.AtomicReference; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +@Category(ParallelizableTests.class) +public class PoolBalancingIT { + + private static final int POOL_SIZE = 2; + private static final int REQUESTS_PER_CONNECTION = 20; + + private static final SimulacronRule SIMULACRON_RULE = + new SimulacronRule(ClusterSpec.builder().withNodes(1)); + + private static final SessionRule SESSION_RULE = + SessionRule.builder(SIMULACRON_RULE) + .withConfigLoader( + DriverConfigLoader.programmaticBuilder() + .withInt(DefaultDriverOption.CONNECTION_MAX_REQUESTS, REQUESTS_PER_CONNECTION) + .withInt(DefaultDriverOption.CONNECTION_POOL_LOCAL_SIZE, POOL_SIZE) + .build()) + .build(); + + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(SIMULACRON_RULE).around(SESSION_RULE); + + private CountDownLatch done; + private AtomicReference unexpectedErrorRef; + + @Before + public void setup() { + done = new CountDownLatch(1); + unexpectedErrorRef = new AtomicReference<>(); + } + + @Test + public void should_balance_requests_across_connections() throws InterruptedException { + // Generate just the right load to completely fill the pool. All requests should succeed. + int simultaneousRequests = POOL_SIZE * REQUESTS_PER_CONNECTION; + + for (int i = 0; i < simultaneousRequests; i++) { + reschedule(null, null); + } + SECONDS.sleep(1); + done.countDown(); + + Throwable unexpectedError = unexpectedErrorRef.get(); + if (unexpectedError != null) { + fail("At least one request failed unexpectedly", unexpectedError); + } + } + + private void reschedule(AsyncResultSet asyncResultSet, Throwable throwable) { + if (done.getCount() == 1) { + if (throwable != null + // Actually there is a tiny race condition where pool acquisition may still fail: channel + // sizes can change as the client is iterating through them, so it can look like they're + // all full even if there's always a free slot somewhere at every point in time. This will + // result in NoNodeAvailableException, ignore it. + && !(throwable instanceof NoNodeAvailableException)) { + unexpectedErrorRef.compareAndSet(null, throwable); + // Even a single error is a failure, no need to continue + done.countDown(); + } + SESSION_RULE + .session() + .executeAsync("SELECT release_version FROM system.local") + .whenComplete(this::reschedule); + } + } +} diff --git a/manual/developer/request_execution/README.md b/manual/developer/request_execution/README.md index a53ee5efe28..c6ec04e3b1a 100644 --- a/manual/developer/request_execution/README.md +++ b/manual/developer/request_execution/README.md @@ -89,7 +89,7 @@ you need to provision for the max size anyway, so you might as well run with all the time. If on the other hand the fluctuations are rare and predictable (e.g. peak for holiday sales), then a manual configuration change is good enough. -#### Wait-free +#### No queuing To get a connection to a node, client code calls `ChannelPool.next()`. This returns the less busy connection, based on the the `getAvailableIds()` counter exposed by @@ -101,12 +101,21 @@ introducing an additional wait for each node. If the user wants queuing when all it's better to do it at the session level with a [throttler](../../core/throttling/), which provides more intuitive configuration. -Also, note that there is no preemptive acquisition of the stream id outside of the event loop: we -select a channel based on a volatile counter, so a race condition is possible; if the channel gets -full by the time we arrive in `InFlightHandler`, the client will simply get a -`BusyConnectionException` and move on to the next node. We only acquire stream ids from the event -loop, which makes it much easier to track the current load (in driver 3, "inflight count getting out -of sync" bugs were very frequent). +Before 4.5.0, there was also no preemptive acquisition of the stream id outside of the event loop: +`getAvailableIds()` had volatile semantics, and a client could get a pooled connection that seemed +not busy, but fail to acquire a stream id when it later tried the actual write. This turned out to +not work well under high load, see [JAVA-2644](https://datastax-oss.atlassian.net/browse/JAVA-2644). + +Starting with 4.5.0, we've reintroduced a stronger guarantee (reminiscent of how things worked in +3.x): clients **must call `DriverChannel.preAcquireId()` exactly once before each write**. If the +call succeeds, `getAvailableIds()` is incremented immediately, and the client is guaranteed that +there will be a stream id available for the write. `preAcquireId()` and `getAvailableIds()` have +atomic semantics, so we can distribute the load more accurately. + +This comes at the cost of additional complexity: **we must ensure that every write is pre-acquired +first**, so that `getAvailableIds()` doesn't get out of sync with the actual stream id usage inside +`InFlightHandler`. This is explained in detail in the javadocs of `DriverChannel.preAcquireId()`, +read them carefully. The pool manages its channels with `ChannelSet`, a simple copy-on-write data structure. From 6a98e173f4bf325b21cf141a85c78a588f63c1a2 Mon Sep 17 00:00:00 2001 From: olim7t Date: Fri, 14 Feb 2020 11:43:31 -0800 Subject: [PATCH 288/979] JAVA-2642: Fix default value of max-orphan-requests --- changelog/README.md | 1 + .../internal/core/channel/ChannelFactory.java | 16 ++++++++++++++++ core/src/main/resources/reference.conf | 4 +++- 3 files changed, 20 insertions(+), 1 deletion(-) diff --git a/changelog/README.md b/changelog/README.md index b2dd31275b2..3fc1f22f3c9 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.5.0 (in progress) +- [bug] JAVA-2642: Fix default value of max-orphan-requests - [bug] JAVA-2644: Revisit channel selection when pool size > 1 - [bug] JAVA-2630: Correctly handle custom classes in IndexMetadata.describe - [improvement] JAVA-1556: Publish Maven Bill Of Materials POM diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ChannelFactory.java b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ChannelFactory.java index 4ab862785b3..49c0d7ac745 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ChannelFactory.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ChannelFactory.java @@ -49,6 +49,7 @@ import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionStage; import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.atomic.AtomicBoolean; import net.jcip.annotations.ThreadSafe; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -62,6 +63,8 @@ public class ChannelFactory { /** A value for {@link #productType} that indicates that we are connected to Datastax Cloud. */ private static final String DATASTAX_CLOUD_PRODUCT_TYPE = "DATASTAX_APOLLO"; + private static final AtomicBoolean LOGGED_ORPHAN_WARNING = new AtomicBoolean(); + /** * A value for {@link #productType} that indicates that the server does not report any product * type. @@ -264,6 +267,19 @@ protected void initChannel(Channel channel) { defaultConfig.getInt(DefaultDriverOption.CONNECTION_MAX_REQUESTS); int maxOrphanRequests = defaultConfig.getInt(DefaultDriverOption.CONNECTION_MAX_ORPHAN_REQUESTS); + if (maxOrphanRequests >= maxRequestsPerConnection) { + if (LOGGED_ORPHAN_WARNING.compareAndSet(false, true)) { + LOG.warn( + "[{}] Invalid value for {}: {}. It must be lower than {}. " + + "Defaulting to {} (1/4 of max-requests) instead.", + logPrefix, + DefaultDriverOption.CONNECTION_MAX_ORPHAN_REQUESTS.getPath(), + maxOrphanRequests, + DefaultDriverOption.CONNECTION_MAX_REQUESTS.getPath(), + maxRequestsPerConnection / 4); + } + maxOrphanRequests = maxRequestsPerConnection / 4; + } InFlightHandler inFlightHandler = new InFlightHandler( diff --git a/core/src/main/resources/reference.conf b/core/src/main/resources/reference.conf index 3edd91a13ab..5ad53cdaf92 100644 --- a/core/src/main/resources/reference.conf +++ b/core/src/main/resources/reference.conf @@ -409,11 +409,13 @@ datastax-java-driver { # accumulate over time, eventually affecting the connection's throughput. So we monitor them # and close the connection above a given threshold (the pool will replace it). # + # The value must be lower than `max-requests-per-connection`. + # # Required: yes # Modifiable at runtime: yes, the new value will be used for connections created after the # change. # Overridable in a profile: no - max-orphan-requests = 24576 + max-orphan-requests = 256 # Whether to log non-fatal errors when the driver tries to open a new connection. # From 17a18193b583cfef4810ca5cb63b2013f51f09d0 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 17 Feb 2020 18:36:08 +0100 Subject: [PATCH 289/979] JAVA-2590 follow-up: Don't propagate errors when checking Insights support --- .../driver/internal/core/insights/InsightsClient.java | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/InsightsClient.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/InsightsClient.java index 700899ef085..43318ef1969 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/InsightsClient.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/InsightsClient.java @@ -242,9 +242,14 @@ private QueryOptions createQueryOptionsWithJson(String json) { } private boolean shouldSendEvent() { - return insightsConfiguration.isMonitorReportingEnabled() - && InsightsSupportVerifier.supportsInsights( - driverContext.getMetadataManager().getMetadata().getNodes().values()); + try { + return insightsConfiguration.isMonitorReportingEnabled() + && InsightsSupportVerifier.supportsInsights( + driverContext.getMetadataManager().getMetadata().getNodes().values()); + } catch (Exception e) { + LOGGER.debug("Unexpected error while checking Insights support.", e); + return false; + } } @VisibleForTesting From 5d54f7b34c754760fdf2fdd6dc6d94867b4b15f9 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Sat, 6 Apr 2019 21:55:20 -0700 Subject: [PATCH 290/979] JAVA-2235: GraphBinary support (#221) --- .travis.yml | 1 + changelog/README.md | 2 + core/pom.xml | 4 + core/revapi.json | 162 +++++++++ .../graph/ScriptGraphStatementBuilder.java | 12 + .../core/metadata/schema/DseEdgeMetadata.java | 51 +++ .../metadata/schema/DseKeyspaceMetadata.java | 188 +++++++++- .../metadata/schema/DseTableMetadata.java | 144 +++++++- .../metadata/schema/DseVertexMetadata.java | 18 + .../internal/core/graph/ByteBufUtil.java | 36 ++ .../graph/DefaultAsyncGraphResultSet.java | 35 +- .../graph/DefaultScriptGraphStatement.java | 5 + .../core/graph/DseGraphRemoteConnection.java | 5 - .../internal/core/graph/GraphConversions.java | 179 +++++++--- .../internal/core/graph/GraphProtocol.java | 62 ++++ .../graph/GraphRequestAsyncProcessor.java | 24 +- .../core/graph/GraphRequestHandler.java | 52 ++- .../internal/core/graph/GraphSONUtils.java | 19 +- ...actDynamicGraphBinaryCustomSerializer.java | 63 ++++ ...ractSimpleGraphBinaryCustomSerializer.java | 143 ++++++++ .../binary/ComplexTypeSerializerUtil.java | 134 +++++++ .../graph/binary/CqlDurationSerializer.java | 41 +++ .../core/graph/binary/DistanceSerializer.java | 37 ++ .../graph/binary/EditDistanceSerializer.java | 39 +++ .../core/graph/binary/GeometrySerializer.java | 36 ++ .../core/graph/binary/GraphBinaryModule.java | 83 +++++ .../core/graph/binary/GraphBinaryUtils.java | 46 +++ .../graph/binary/LineStringSerializer.java | 22 ++ .../core/graph/binary/PointSerializer.java | 23 ++ .../core/graph/binary/PolygonSerializer.java | 22 ++ .../graph/binary/TupleValueSerializer.java | 56 +++ .../core/graph/binary/UdtValueSerializer.java | 52 +++ .../schema/DefaultDseEdgeMetadata.java | 138 ++++++++ .../schema/DefaultDseKeyspaceMetadata.java | 23 +- .../schema/DefaultDseTableMetadata.java | 32 +- .../schema/DefaultDseVertexMetadata.java | 45 +++ .../core/metadata/schema/ScriptHelper.java | 45 +++ .../schema/parsing/DseSchemaParser.java | 20 +- .../schema/parsing/DseTableParser.java | 91 ++++- .../schema/queries/Dse68SchemaQueries.java | 204 +++++++++++ .../schema/queries/Dse68SchemaRows.java | 326 ++++++++++++++++++ .../internal/core/graph/GraphNodeTest.java | 305 +++++++--------- .../core/graph/GraphProtocolTest.java | 97 ++++++ .../core/graph/GraphRequestHandlerTest.java | 324 ++++++++++++----- .../graph/GraphRequestHandlerTestHarness.java | 146 +++++--- .../binary/GraphBinaryDataTypesTest.java | 255 ++++++++++++++ ...se.java => LegacyGraphDataTypeITBase.java} | 2 +- .../core/graph/NativeGraphDataTypeITBase.java | 276 +++++++++++++++ ....java => LegacyGraphDataTypeRemoteIT.java} | 4 +- .../remote/NativeGraphDataTypeRemoteIT.java | 41 +++ ....java => LegacyGraphDataTypeFluentIT.java} | 4 +- ....java => LegacyGraphDataTypeScriptIT.java} | 4 +- .../NativeGraphDataTypeFluentIT.java | 41 +++ .../NativeGraphDataTypeScriptIT.java | 47 +++ .../schema/KeyspaceGraphMetadataIT.java | 60 ++++ .../metadata/schema/TableGraphMetadataIT.java | 119 +++++++ pom.xml | 7 + .../session/CqlSessionRuleBuilder.java | 28 +- 58 files changed, 4039 insertions(+), 441 deletions(-) create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseEdgeMetadata.java create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseVertexMetadata.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/graph/ByteBufUtil.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphProtocol.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/AbstractDynamicGraphBinaryCustomSerializer.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/AbstractSimpleGraphBinaryCustomSerializer.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/ComplexTypeSerializerUtil.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/CqlDurationSerializer.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/DistanceSerializer.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/EditDistanceSerializer.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/GeometrySerializer.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/GraphBinaryModule.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/GraphBinaryUtils.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/LineStringSerializer.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/PointSerializer.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/PolygonSerializer.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/TupleValueSerializer.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/UdtValueSerializer.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseEdgeMetadata.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseVertexMetadata.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/ScriptHelper.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/queries/Dse68SchemaQueries.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/queries/Dse68SchemaRows.java create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphProtocolTest.java create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/graph/binary/GraphBinaryDataTypesTest.java rename integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/{GraphDataTypeITBase.java => LegacyGraphDataTypeITBase.java} (99%) create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/NativeGraphDataTypeITBase.java rename integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/{GraphDataTypeRemoteIT.java => LegacyGraphDataTypeRemoteIT.java} (94%) create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/NativeGraphDataTypeRemoteIT.java rename integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/{GraphDataTypeFluentIT.java => LegacyGraphDataTypeFluentIT.java} (94%) rename integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/{GraphDataTypeScriptIT.java => LegacyGraphDataTypeScriptIT.java} (94%) create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/NativeGraphDataTypeFluentIT.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/NativeGraphDataTypeScriptIT.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/KeyspaceGraphMetadataIT.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/TableGraphMetadataIT.java diff --git a/.travis.yml b/.travis.yml index 50c05e76856..9887e9e2a03 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,3 +1,4 @@ +dist: trusty language: java dist: trusty sudo: false diff --git a/changelog/README.md b/changelog/README.md index 3fc1f22f3c9..b717aa7d211 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -2,6 +2,8 @@ +### NGDG (in progress) + ### 4.5.0 (in progress) - [bug] JAVA-2642: Fix default value of max-orphan-requests diff --git a/core/pom.xml b/core/pom.xml index 517c23c08ee..eff9b0601d9 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -103,6 +103,10 @@ org.apache.tinkerpop tinkergraph-gremlin + + org.apache.tinkerpop + gremlin-driver + com.fasterxml.jackson.core jackson-core diff --git a/core/revapi.json b/core/revapi.json index 0277bded5f3..ba5c7fae49a 100644 --- a/core/revapi.json +++ b/core/revapi.json @@ -4851,6 +4851,168 @@ "new": "method com.datastax.oss.driver.api.core.auth.Authenticator com.datastax.oss.driver.api.core.auth.PlainTextAuthProviderBase::newAuthenticator(com.datastax.oss.driver.api.core.metadata.EndPoint, java.lang.String) throws com.datastax.oss.driver.api.core.auth.AuthenticationException", "exception": "com.datastax.oss.driver.api.core.auth.AuthenticationException", "justification": "New exception is unchecked" + }, + { + "code": "java.class.superTypeTypeParametersChanged", + "old": "class com.datastax.dse.driver.api.core.DseSessionBuilderBase", + "new": "class com.datastax.dse.driver.api.core.DseSessionBuilderBase, SessionT>", + "oldSuperType": "com.datastax.oss.driver.api.core.session.SessionBuilder", + "newSuperType": "com.datastax.oss.driver.api.core.session.SessionBuilder, SessionT>", + "justification": "JAVA-2411: Type parameters were wrong but it is unlikely that implementors would notice that in subclasses" + }, + { + "code": "java.method.addedToInterface", + "new": "method java.util.Map com.datastax.dse.driver.api.core.metadata.schema.DseKeyspaceMetadata::getAggregates()", + "justification": "JAVA-1898: Expose new table-level graph metadata" + }, + { + "code": "java.method.addedToInterface", + "new": "method java.util.Map com.datastax.dse.driver.api.core.metadata.schema.DseKeyspaceMetadata::getFunctions()", + "justification": "JAVA-1898: Expose new table-level graph metadata" + }, + { + "code": "java.method.addedToInterface", + "new": "method java.util.Optional com.datastax.dse.driver.api.core.metadata.schema.DseKeyspaceMetadata::getGraphEngine()", + "justification": "JAVA-1898: Expose new table-level graph metadata" + }, + { + "code": "java.method.addedToInterface", + "new": "method java.util.Map com.datastax.dse.driver.api.core.metadata.schema.DseKeyspaceMetadata::getTables()", + "justification": "JAVA-1898: Expose new table-level graph metadata" + }, + { + "code": "java.method.addedToInterface", + "new": "method java.util.Map com.datastax.dse.driver.api.core.metadata.schema.DseKeyspaceMetadata::getViews()", + "justification": "JAVA-1898: Expose new table-level graph metadata" + }, + { + "code": "java.method.addedToInterface", + "new": "method java.util.Optional com.datastax.dse.driver.api.core.metadata.schema.DseTableMetadata::getEdge()", + "justification": "JAVA-1898: Expose new table-level graph metadata" + }, + { + "code": "java.method.addedToInterface", + "new": "method java.util.Map com.datastax.dse.driver.api.core.metadata.schema.DseTableMetadata::getIndexes()", + "justification": "JAVA-1898: Expose new table-level graph metadata" + }, + { + "code": "java.method.addedToInterface", + "new": "method java.util.Optional com.datastax.dse.driver.api.core.metadata.schema.DseTableMetadata::getVertex()", + "justification": "JAVA-1898: Expose new table-level graph metadata" + }, + { + "code": "java.method.removed", + "old": "method org.apache.tinkerpop.gremlin.process.remote.traversal.RemoteTraversal org.apache.tinkerpop.gremlin.process.remote.RemoteConnection::submit(org.apache.tinkerpop.gremlin.process.traversal.Bytecode) throws org.apache.tinkerpop.gremlin.process.remote.RemoteConnectionException", + "justification": "JAVA-2235: GraphBinary support - TinkerPop upgrade from 3.3 to 3.4" + }, + { + "code": "java.method.removed", + "old": "method java.util.Iterator> org.apache.tinkerpop.gremlin.process.remote.RemoteConnection::submit(org.apache.tinkerpop.gremlin.process.traversal.Traversal) throws org.apache.tinkerpop.gremlin.process.remote.RemoteConnectionException", + "justification": "JAVA-2235: GraphBinary support - TinkerPop upgrade from 3.3 to 3.4" + }, + { + "code": "java.method.noLongerDefault", + "old": "method java.util.concurrent.CompletableFuture> org.apache.tinkerpop.gremlin.process.remote.RemoteConnection::submitAsync(org.apache.tinkerpop.gremlin.process.traversal.Bytecode) throws org.apache.tinkerpop.gremlin.process.remote.RemoteConnectionException", + "new": "method java.util.concurrent.CompletableFuture> org.apache.tinkerpop.gremlin.process.remote.RemoteConnection::submitAsync(org.apache.tinkerpop.gremlin.process.traversal.Bytecode) throws org.apache.tinkerpop.gremlin.process.remote.RemoteConnectionException", + "justification": "JAVA-2235: GraphBinary support - TinkerPop upgrade from 3.3 to 3.4" + }, + { + "code": "java.method.nowAbstract", + "old": "method java.util.concurrent.CompletableFuture> org.apache.tinkerpop.gremlin.process.remote.RemoteConnection::submitAsync(org.apache.tinkerpop.gremlin.process.traversal.Bytecode) throws org.apache.tinkerpop.gremlin.process.remote.RemoteConnectionException", + "new": "method java.util.concurrent.CompletableFuture> org.apache.tinkerpop.gremlin.process.remote.RemoteConnection::submitAsync(org.apache.tinkerpop.gremlin.process.traversal.Bytecode) throws org.apache.tinkerpop.gremlin.process.remote.RemoteConnectionException", + "justification": "JAVA-2235: GraphBinary support - TinkerPop upgrade from 3.3 to 3.4" + }, + { + "code": "java.field.removedWithConstant", + "old": "field org.apache.tinkerpop.gremlin.process.traversal.TraversalSource.GREMLIN_REMOTE", + "justification": "JAVA-2235: GraphBinary support - TinkerPop upgrade from 3.3 to 3.4" + }, + { + "code": "java.field.removedWithConstant", + "old": "field org.apache.tinkerpop.gremlin.process.traversal.TraversalSource.GREMLIN_REMOTE_CONNECTION_CLASS", + "justification": "JAVA-2235: GraphBinary support - TinkerPop upgrade from 3.3 to 3.4" + }, + { + "code": "java.method.parameterTypeChanged", + "old": "parameter java.util.List> org.apache.tinkerpop.gremlin.process.traversal.TraversalStrategies::sortStrategies(===java.util.List>===)", + "new": "parameter java.util.Set> org.apache.tinkerpop.gremlin.process.traversal.TraversalStrategies::sortStrategies(===java.util.Set>===)", + "justification": "JAVA-2235: GraphBinary support - TinkerPop upgrade from 3.3 to 3.4" + }, + { + "code": "java.method.returnTypeChanged", + "old": "method java.util.List> org.apache.tinkerpop.gremlin.process.traversal.TraversalStrategies::sortStrategies(java.util.List>)", + "new": "method java.util.Set> org.apache.tinkerpop.gremlin.process.traversal.TraversalStrategies::sortStrategies(java.util.Set>)", + "justification": "JAVA-2235: GraphBinary support - TinkerPop upgrade from 3.3 to 3.4" + }, + { + "code": "java.method.numberOfParametersChanged", + "old": "method void org.apache.tinkerpop.gremlin.process.traversal.Traverser.Admin::incrLoops(java.lang.String)", + "new": "method void org.apache.tinkerpop.gremlin.process.traversal.Traverser.Admin::incrLoops()", + "justification": "JAVA-2235: GraphBinary support - TinkerPop upgrade from 3.3 to 3.4" + }, + { + "code": "java.method.addedToInterface", + "new": "method void org.apache.tinkerpop.gremlin.process.traversal.Traverser.Admin::initialiseLoops(java.lang.String, java.lang.String)", + "justification": "JAVA-2235: GraphBinary support - TinkerPop upgrade from 3.3 to 3.4" + }, + { + "code": "java.method.addedToInterface", + "new": "method int org.apache.tinkerpop.gremlin.process.traversal.Traverser::loops(java.lang.String)", + "justification": "JAVA-2235: GraphBinary support - TinkerPop upgrade from 3.3 to 3.4" + }, + { + "code": "java.method.returnTypeTypeParametersChanged", + "old": "method org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal::max()", + "new": "method org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal::max()", + "justification": "JAVA-2235: GraphBinary support - TinkerPop upgrade from 3.3 to 3.4" + }, + { + "code": "java.generics.formalTypeParameterChanged", + "old": "method org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal::max()", + "new": "method org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal::max()", + "justification": "JAVA-2235: GraphBinary support - TinkerPop upgrade from 3.3 to 3.4" + }, + { + "code": "java.method.returnTypeTypeParametersChanged", + "old": "method org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal::max(org.apache.tinkerpop.gremlin.process.traversal.Scope)", + "new": "method org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal::max(org.apache.tinkerpop.gremlin.process.traversal.Scope)", + "justification": "JAVA-2235: GraphBinary support - TinkerPop upgrade from 3.3 to 3.4" + }, + { + "code": "java.generics.formalTypeParameterChanged", + "old": "method org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal::max(org.apache.tinkerpop.gremlin.process.traversal.Scope)", + "new": "method org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal::max(org.apache.tinkerpop.gremlin.process.traversal.Scope)", + "justification": "JAVA-2235: GraphBinary support - TinkerPop upgrade from 3.3 to 3.4" + }, + { + "code": "java.method.returnTypeTypeParametersChanged", + "old": "method org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal::min()", + "new": "method org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal::min()", + "justification": "JAVA-2235: GraphBinary support - TinkerPop upgrade from 3.3 to 3.4" + }, + { + "code": "java.generics.formalTypeParameterChanged", + "old": "method org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal::min()", + "new": "method org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal::min()", + "justification": "JAVA-2235: GraphBinary support - TinkerPop upgrade from 3.3 to 3.4" + }, + { + "code": "java.method.returnTypeTypeParametersChanged", + "old": "method org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal::min(org.apache.tinkerpop.gremlin.process.traversal.Scope)", + "new": "method org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal::min(org.apache.tinkerpop.gremlin.process.traversal.Scope)", + "justification": "JAVA-2235: GraphBinary support - TinkerPop upgrade from 3.3 to 3.4" + }, + { + "code": "java.generics.formalTypeParameterChanged", + "old": "method org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal::min(org.apache.tinkerpop.gremlin.process.traversal.Scope)", + "new": "method org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal::min(org.apache.tinkerpop.gremlin.process.traversal.Scope)", + "justification": "JAVA-2235: GraphBinary support - TinkerPop upgrade from 3.3 to 3.4" + }, + { + "code": "java.method.returnTypeTypeParametersChanged", + "old": "method org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal> org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal::valueMap(java.lang.String[])", + "new": "method org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal> org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal::valueMap(java.lang.String[])", + "justification": "JAVA-2235: GraphBinary support - TinkerPop upgrade from 3.3 to 3.4" } ] } diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/graph/ScriptGraphStatementBuilder.java b/core/src/main/java/com/datastax/dse/driver/api/core/graph/ScriptGraphStatementBuilder.java index 24491b942b0..9a8d0d262eb 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/graph/ScriptGraphStatementBuilder.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/graph/ScriptGraphStatementBuilder.java @@ -16,6 +16,7 @@ package com.datastax.dse.driver.api.core.graph; import com.datastax.dse.driver.internal.core.graph.DefaultScriptGraphStatement; +import com.datastax.oss.driver.shaded.guava.common.base.Preconditions; import com.datastax.oss.driver.shaded.guava.common.collect.Maps; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; @@ -35,6 +36,10 @@ public class ScriptGraphStatementBuilder private Boolean isSystemQuery; private Map queryParams; + public ScriptGraphStatementBuilder() { + this.queryParams = Maps.newHashMap(); + } + public ScriptGraphStatementBuilder(String script) { this.script = script; this.queryParams = Maps.newHashMap(); @@ -47,6 +52,12 @@ public ScriptGraphStatementBuilder(ScriptGraphStatement template) { this.isSystemQuery = template.isSystemQuery(); } + @NonNull + public ScriptGraphStatementBuilder setScript(@NonNull String script) { + this.script = script; + return this; + } + /** @see ScriptGraphStatement#isSystemQuery() */ @NonNull public ScriptGraphStatementBuilder setSystemQuery(@Nullable Boolean isSystemQuery) { @@ -101,6 +112,7 @@ public ScriptGraphStatementBuilder clearQueryParams() { @NonNull @Override public ScriptGraphStatement build() { + Preconditions.checkNotNull(this.script, "Script hasn't been defined in this builder."); return new DefaultScriptGraphStatement( this.script, this.queryParams, diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseEdgeMetadata.java b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseEdgeMetadata.java new file mode 100644 index 00000000000..dc6280a7df9 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseEdgeMetadata.java @@ -0,0 +1,51 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.metadata.schema; + +import com.datastax.oss.driver.api.core.CqlIdentifier; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.List; + +/** Edge metadata, for a table that was created with CREATE TABLE ... WITH EDGE LABEL. */ +public interface DseEdgeMetadata { + + /** The label of the edge in graph. */ + @NonNull + CqlIdentifier getLabelName(); + + /** The identifier of the table representing the incoming vertex. */ + @NonNull + CqlIdentifier getFromTable(); + + /** The label of the incoming vertex in graph. */ + @NonNull + CqlIdentifier getFromLabel(); + + /** The columns in this table that match the partition key of the incoming vertex table. */ + @NonNull + List getFromPartitionKeyColumns(); + + /** The columns in this table that match the clustering columns of the incoming vertex table. */ + @NonNull + List getFromClusteringColumns(); + + /** The identifier of the table representing the outgoing vertex. */ + @NonNull + CqlIdentifier getToTable(); + + /** The label of the outgoing vertex in graph. */ + @NonNull + CqlIdentifier getToLabel(); + + /** The columns in this table that match the partition key of the outgoing vertex table. */ + @NonNull + List getToPartitionKeyColumns(); + + /** The columns in this table that match the clustering columns of the outgoing vertex table. */ + @NonNull + List getToClusteringColumns(); +} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseKeyspaceMetadata.java b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseKeyspaceMetadata.java index 440c7649818..41252eef244 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseKeyspaceMetadata.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseKeyspaceMetadata.java @@ -15,16 +15,194 @@ */ package com.datastax.dse.driver.api.core.metadata.schema; +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.metadata.schema.AggregateMetadata; +import com.datastax.oss.driver.api.core.metadata.schema.FunctionMetadata; +import com.datastax.oss.driver.api.core.metadata.schema.FunctionSignature; import com.datastax.oss.driver.api.core.metadata.schema.KeyspaceMetadata; import com.datastax.oss.driver.api.core.metadata.schema.TableMetadata; +import com.datastax.oss.driver.api.core.metadata.schema.ViewMetadata; +import com.datastax.oss.driver.api.core.type.DataType; +import com.datastax.oss.driver.internal.core.metadata.schema.ScriptBuilder; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Map; +import java.util.Optional; /** * Specialized keyspace metadata for DSE. * - *

      This type exists only for future extensibility; currently, it is identical to {@link - * KeyspaceMetadata}. + *

      It has the following differences with {@link KeyspaceMetadata}: * - *

      Note that all returned elements can be cast to their DSE counterpart, for example {@link - * TableMetadata} to {@link DseTableMetadata}. + *

        + *
      • new method {@link #getGraphEngine()}; + *
      • all sub-elements are specialized for DSE (e.g. {@link #getTables()} returns {@link + * DseTableMetadata} instances). + *
      */ -public interface DseKeyspaceMetadata extends KeyspaceMetadata {} +public interface DseKeyspaceMetadata extends KeyspaceMetadata { + + @NonNull + @Override + Map getTables(); + + @NonNull + @Override + default Optional getTable(@NonNull CqlIdentifier tableId) { + return Optional.ofNullable(getTables().get(tableId)); + } + + @NonNull + @Override + default Optional getTable(@NonNull String tableName) { + return getTable(CqlIdentifier.fromCql(tableName)); + } + + @NonNull + @Override + Map getViews(); + + @NonNull + @Override + default Map getViewsOnTable(@NonNull CqlIdentifier tableId) { + ImmutableMap.Builder builder = ImmutableMap.builder(); + for (ViewMetadata view : getViews().values()) { + if (view.getBaseTable().equals(tableId)) { + builder.put(view.getName(), view); + } + } + return builder.build(); + } + + @NonNull + @Override + default Optional getView(@NonNull CqlIdentifier viewId) { + return Optional.ofNullable(getViews().get(viewId)); + } + + @NonNull + @Override + default Optional getView(@NonNull String viewName) { + return getView(CqlIdentifier.fromCql(viewName)); + } + + @NonNull + @Override + Map getFunctions(); + + @NonNull + @Override + default Optional getFunction(@NonNull FunctionSignature functionSignature) { + return Optional.ofNullable(getFunctions().get(functionSignature)); + } + + @NonNull + @Override + default Optional getFunction( + @NonNull CqlIdentifier functionId, @NonNull Iterable parameterTypes) { + return Optional.ofNullable( + getFunctions().get(new FunctionSignature(functionId, parameterTypes))); + } + + @NonNull + @Override + default Optional getFunction( + @NonNull String functionName, @NonNull Iterable parameterTypes) { + return getFunction(CqlIdentifier.fromCql(functionName), parameterTypes); + } + + @NonNull + @Override + default Optional getFunction( + @NonNull CqlIdentifier functionId, @NonNull DataType... parameterTypes) { + return Optional.ofNullable( + getFunctions().get(new FunctionSignature(functionId, parameterTypes))); + } + + @NonNull + @Override + default Optional getFunction( + @NonNull String functionName, @NonNull DataType... parameterTypes) { + return getFunction(CqlIdentifier.fromCql(functionName), parameterTypes); + } + + @NonNull + @Override + Map getAggregates(); + + @NonNull + @Override + default Optional getAggregate(@NonNull FunctionSignature aggregateSignature) { + return Optional.ofNullable(getAggregates().get(aggregateSignature)); + } + + @NonNull + @Override + default Optional getAggregate( + @NonNull CqlIdentifier aggregateId, @NonNull Iterable parameterTypes) { + return Optional.ofNullable( + getAggregates().get(new FunctionSignature(aggregateId, parameterTypes))); + } + + @NonNull + @Override + default Optional getAggregate( + @NonNull String aggregateName, @NonNull Iterable parameterTypes) { + return getAggregate(CqlIdentifier.fromCql(aggregateName), parameterTypes); + } + + @NonNull + @Override + default Optional getAggregate( + @NonNull CqlIdentifier aggregateId, @NonNull DataType... parameterTypes) { + return Optional.ofNullable( + getAggregates().get(new FunctionSignature(aggregateId, parameterTypes))); + } + + @NonNull + @Override + default Optional getAggregate( + @NonNull String aggregateName, @NonNull DataType... parameterTypes) { + return getAggregate(CqlIdentifier.fromCql(aggregateName), parameterTypes); + } + + /** The graph engine that will be used to interpret this keyspace. */ + @NonNull + Optional getGraphEngine(); + + @NonNull + @Override + default String describe(boolean pretty) { + ScriptBuilder builder = new ScriptBuilder(pretty); + if (isVirtual()) { + builder.append("/* VIRTUAL "); + } else { + builder.append("CREATE "); + } + builder + .append("KEYSPACE ") + .append(getName()) + .append(" WITH replication = { 'class' : '") + .append(getReplication().get("class")) + .append("'"); + for (Map.Entry entry : getReplication().entrySet()) { + if (!entry.getKey().equals("class")) { + builder + .append(", '") + .append(entry.getKey()) + .append("': '") + .append(entry.getValue()) + .append("'"); + } + } + builder.append(" } AND durable_writes = ").append(Boolean.toString(isDurableWrites())); + getGraphEngine() + .ifPresent( + graphEngine -> builder.append(" AND graph_engine ='").append(graphEngine).append("'")); + builder.append(";"); + if (isVirtual()) { + builder.append(" */"); + } + return builder.build(); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseTableMetadata.java b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseTableMetadata.java index 31d2201bea8..c1c961999ca 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseTableMetadata.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseTableMetadata.java @@ -15,17 +15,151 @@ */ package com.datastax.dse.driver.api.core.metadata.schema; +import com.datastax.dse.driver.internal.core.metadata.schema.ScriptHelper; +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.metadata.schema.ClusteringOrder; import com.datastax.oss.driver.api.core.metadata.schema.ColumnMetadata; import com.datastax.oss.driver.api.core.metadata.schema.IndexMetadata; import com.datastax.oss.driver.api.core.metadata.schema.TableMetadata; +import com.datastax.oss.driver.internal.core.metadata.schema.ScriptBuilder; +import com.datastax.oss.driver.internal.core.metadata.schema.parsing.RelationParser; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Map; +import java.util.Optional; /** * Specialized table metadata for DSE. * - *

      This type exists only for future extensibility; currently, it is identical to {@link - * TableMetadata}. + *

      It has the following differences with {@link TableMetadata}: * - *

      Note that all returned {@link ColumnMetadata} can be cast to {@link DseColumnMetadata}, and - * all {@link IndexMetadata} to {@link DseIndexMetadata}. + *

        + *
      • new methods {@link #getVertex()} and {@link #getEdge()}; + *
      • all sub-elements are specialized for DSE (e.g. {@link #getIndexes()} returns {@link + * DseIndexMetadata} instances). + *
      */ -public interface DseTableMetadata extends DseRelationMetadata, TableMetadata {} +public interface DseTableMetadata extends DseRelationMetadata, TableMetadata { + + @NonNull + @Override + Map getIndexes(); + + /** + * The vertex metadata if this table represents a vertex in graph, otherwise empty. + * + *

      This is mutually exclusive with {@link #getEdge()}. + */ + @NonNull + Optional getVertex(); + + /** + * The edge metadata if this table represents an edge in graph, otherwise empty. + * + *

      This is mutually exclusive with {@link #getVertex()}. + */ + @NonNull + Optional getEdge(); + + @NonNull + @Override + default String describe(boolean pretty) { + ScriptBuilder builder = new ScriptBuilder(pretty); + if (isVirtual()) { + builder.append("/* VIRTUAL "); + } else { + builder.append("CREATE "); + } + + builder + .append("TABLE ") + .append(getKeyspace()) + .append(".") + .append(getName()) + .append(" (") + .newLine() + .increaseIndent(); + + for (ColumnMetadata column : getColumns().values()) { + builder.append(column.getName()).append(" ").append(column.getType().asCql(true, pretty)); + if (column.isStatic()) { + builder.append(" static"); + } + builder.append(",").newLine(); + } + + // PK + builder.append("PRIMARY KEY ("); + if (getPartitionKey().size() == 1) { // PRIMARY KEY (k + builder.append(getPartitionKey().get(0).getName()); + } else { // PRIMARY KEY ((k1, k2) + builder.append("("); + boolean first = true; + for (ColumnMetadata pkColumn : getPartitionKey()) { + if (first) { + first = false; + } else { + builder.append(", "); + } + builder.append(pkColumn.getName()); + } + builder.append(")"); + } + // PRIMARY KEY (, cc1, cc2, cc3) + for (ColumnMetadata clusteringColumn : getClusteringColumns().keySet()) { + builder.append(", ").append(clusteringColumn.getName()); + } + builder.append(")"); + + builder.newLine().decreaseIndent().append(")"); + + builder.increaseIndent(); + if (isCompactStorage()) { + builder.andWith().append("COMPACT STORAGE"); + } + if (getClusteringColumns().containsValue(ClusteringOrder.DESC)) { + builder.andWith().append("CLUSTERING ORDER BY ("); + boolean first = true; + for (Map.Entry entry : + getClusteringColumns().entrySet()) { + if (first) { + first = false; + } else { + builder.append(", "); + } + builder.append(entry.getKey().getName()).append(" ").append(entry.getValue().name()); + } + builder.append(")"); + } + getVertex() + .ifPresent( + vertex -> { + builder.andWith().append("VERTEX LABEL").append(" ").append(vertex.getLabelName()); + }); + getEdge() + .ifPresent( + edge -> { + builder.andWith().append("EDGE LABEL").append(" ").append(edge.getLabelName()); + ScriptHelper.appendEdgeSide( + builder, + edge.getFromTable(), + edge.getFromLabel(), + edge.getFromPartitionKeyColumns(), + edge.getFromClusteringColumns(), + "FROM"); + ScriptHelper.appendEdgeSide( + builder, + edge.getToTable(), + edge.getToLabel(), + edge.getToPartitionKeyColumns(), + edge.getToClusteringColumns(), + "TO"); + }); + Map options = getOptions(); + RelationParser.appendOptions(options, builder); + builder.append(";"); + if (isVirtual()) { + builder.append(" */"); + } + return builder.build(); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseVertexMetadata.java b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseVertexMetadata.java new file mode 100644 index 00000000000..50e99492485 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseVertexMetadata.java @@ -0,0 +1,18 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.metadata.schema; + +import com.datastax.oss.driver.api.core.CqlIdentifier; +import edu.umd.cs.findbugs.annotations.NonNull; + +/** Vertex metadata, for a table that was created with CREATE TABLE ... WITH VERTEX LABEL. */ +public interface DseVertexMetadata { + + /** The label of the vertex in graph. */ + @NonNull + CqlIdentifier getLabelName(); +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ByteBufUtil.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ByteBufUtil.java new file mode 100644 index 00000000000..dae9a741d98 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ByteBufUtil.java @@ -0,0 +1,36 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.Unpooled; +import java.nio.ByteBuffer; + +public class ByteBufUtil { + + // Does not move the reader index of the ByteBuf parameter + public static ByteBuffer toByteBuffer(ByteBuf buffer) { + if (buffer.isDirect()) { + return buffer.nioBuffer(); + } + final byte[] bytes = new byte[buffer.readableBytes()]; + buffer.getBytes(buffer.readerIndex(), bytes); + return ByteBuffer.wrap(bytes); + } + + static ByteBuf toByteBuf(ByteBuffer buffer) { + return Unpooled.wrappedBuffer(buffer); + } + + // read a predefined amount of bytes from the netty buffer and move its readerIndex + public static ByteBuffer readBytes(ByteBuf nettyBuf, int size) { + ByteBuffer res = ByteBuffer.allocate(size); + nettyBuf.readBytes(res); + res.flip(); + return res; + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultAsyncGraphResultSet.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultAsyncGraphResultSet.java index a4768054357..c4160201e44 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultAsyncGraphResultSet.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultAsyncGraphResultSet.java @@ -19,10 +19,12 @@ import com.datastax.dse.driver.api.core.graph.GraphNode; import com.datastax.oss.driver.api.core.cql.ExecutionInfo; import com.datastax.oss.driver.internal.core.util.CountingIterator; +import com.datastax.oss.driver.shaded.guava.common.base.Preconditions; import edu.umd.cs.findbugs.annotations.NonNull; import java.util.Queue; import java.util.concurrent.CompletionStage; import net.jcip.annotations.NotThreadSafe; +import org.apache.tinkerpop.gremlin.process.traversal.Traverser; @NotThreadSafe // wraps a mutable queue public class DefaultAsyncGraphResultSet implements AsyncGraphResultSet { @@ -30,11 +32,14 @@ public class DefaultAsyncGraphResultSet implements AsyncGraphResultSet { private final ExecutionInfo executionInfo; private final CountingIterator iterator; private final Iterable currentPage; + private final GraphProtocol graphProtocol; - public DefaultAsyncGraphResultSet(ExecutionInfo executionInfo, Queue data) { + public DefaultAsyncGraphResultSet( + ExecutionInfo executionInfo, Queue data, GraphProtocol graphProtocol) { this.executionInfo = executionInfo; this.iterator = new GraphResultIterator(data); this.currentPage = () -> iterator; + this.graphProtocol = graphProtocol; } @NonNull @@ -80,7 +85,7 @@ public void cancel() { // nothing to do } - private static class GraphResultIterator extends CountingIterator { + private class GraphResultIterator extends CountingIterator { private final Queue data; @@ -108,14 +113,26 @@ protected GraphNode computeNext() { return endOfData(); } - // The repeat counter is called "bulk" in the JSON payload - GraphNode b = container.getByKey("bulk"); - if (b != null) { - this.repeat = b.asLong(); + if (graphProtocol.isGraphBinary()) { + // results are contained in a Traverser object and not a Map if the protocol + // is GraphBinary + Preconditions.checkState( + container.as(Object.class) instanceof Traverser, + "Graph protocol error. Received object should be a Traverser but it is not."); + Traverser t = container.as(Traverser.class); + this.repeat = t.bulk(); + this.lastGraphNode = new ObjectGraphNode(t.get()); + return lastGraphNode; + } else { + // The repeat counter is called "bulk" in the JSON payload + GraphNode b = container.getByKey("bulk"); + if (b != null) { + this.repeat = b.asLong(); + } + + lastGraphNode = container.getByKey("result"); + return lastGraphNode; } - - lastGraphNode = container.getByKey("result"); - return lastGraphNode; } } } diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultScriptGraphStatement.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultScriptGraphStatement.java index 4e704352355..ac63e91d3a0 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultScriptGraphStatement.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultScriptGraphStatement.java @@ -197,4 +197,9 @@ protected ScriptGraphStatement newInstance( readConsistencyLevel, writeConsistencyLevel); } + + @Override + public String toString() { + return String.format("ScriptGraphStatement['%s', params: %s]", this.script, this.queryParams); + } } diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DseGraphRemoteConnection.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DseGraphRemoteConnection.java index 6a2e2e67beb..bb52cee39fc 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DseGraphRemoteConnection.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DseGraphRemoteConnection.java @@ -17,16 +17,11 @@ import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; -import java.util.Iterator; import java.util.concurrent.CompletableFuture; -import java.util.concurrent.ExecutionException; import net.jcip.annotations.Immutable; import org.apache.tinkerpop.gremlin.process.remote.RemoteConnection; -import org.apache.tinkerpop.gremlin.process.remote.RemoteConnectionException; import org.apache.tinkerpop.gremlin.process.remote.traversal.RemoteTraversal; import org.apache.tinkerpop.gremlin.process.traversal.Bytecode; -import org.apache.tinkerpop.gremlin.process.traversal.Traversal; -import org.apache.tinkerpop.gremlin.process.traversal.Traverser; @Immutable public class DseGraphRemoteConnection implements RemoteConnection { diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphConversions.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphConversions.java index 91facbf7469..99526532387 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphConversions.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphConversions.java @@ -15,11 +15,12 @@ */ package com.datastax.dse.driver.internal.core.graph; +import static java.nio.charset.StandardCharsets.UTF_8; + import com.datastax.dse.driver.api.core.config.DseDriverOption; -import com.datastax.dse.driver.api.core.graph.BatchGraphStatement; -import com.datastax.dse.driver.api.core.graph.FluentGraphStatement; -import com.datastax.dse.driver.api.core.graph.GraphStatement; -import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; +import com.datastax.dse.driver.api.core.graph.*; +import com.datastax.dse.driver.internal.core.context.DseDriverContext; +import com.datastax.dse.driver.internal.core.graph.binary.GraphBinaryModule; import com.datastax.dse.protocol.internal.request.RawBytesQuery; import com.datastax.dse.protocol.internal.request.query.DseQueryOptions; import com.datastax.oss.driver.api.core.ConsistencyLevel; @@ -28,19 +29,23 @@ import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; -import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.internal.core.cql.Conversions; -import com.datastax.oss.driver.internal.core.session.DefaultSession; +import com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting; +import com.datastax.oss.driver.shaded.guava.common.base.Preconditions; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; +import com.datastax.oss.driver.shaded.guava.common.collect.Iterators; import com.datastax.oss.protocol.internal.Message; import com.datastax.oss.protocol.internal.ProtocolConstants; import com.datastax.oss.protocol.internal.request.Query; import com.datastax.oss.protocol.internal.util.collection.NullAllowingImmutableMap; +import io.netty.buffer.ByteBuf; import java.io.IOException; import java.io.UncheckedIOException; import java.nio.ByteBuffer; import java.time.Duration; -import java.util.Collections; -import java.util.Map; +import java.util.*; +import org.apache.tinkerpop.gremlin.driver.ser.SerializationException; +import org.apache.tinkerpop.gremlin.process.traversal.Traverser; /** * Utility class to move boilerplate out of {@link GraphRequestHandler}. @@ -51,16 +56,22 @@ */ public class GraphConversions extends Conversions { - static String GRAPH_LANG_OPTION_KEY = "graph-language"; - static String GRAPH_NAME_OPTION_KEY = "graph-name"; - static String GRAPH_SOURCE_OPTION_KEY = "graph-source"; - static String GRAPH_READ_CONSISTENCY_LEVEL_OPTION_KEY = "graph-read-consistency"; - static String GRAPH_WRITE_CONSISTENCY_LEVEL_OPTION_KEY = "graph-write-consistency"; - static String GRAPH_RESULTS_OPTION_KEY = "graph-results"; - static String GRAPH_TIMEOUT_OPTION_KEY = "request-timeout"; + static final String GRAPH_LANG_OPTION_KEY = "graph-language"; + static final String GRAPH_NAME_OPTION_KEY = "graph-name"; + static final String GRAPH_SOURCE_OPTION_KEY = "graph-source"; + static final String GRAPH_READ_CONSISTENCY_LEVEL_OPTION_KEY = "graph-read-consistency"; + static final String GRAPH_WRITE_CONSISTENCY_LEVEL_OPTION_KEY = "graph-write-consistency"; + static final String GRAPH_RESULTS_OPTION_KEY = "graph-results"; + static final String GRAPH_TIMEOUT_OPTION_KEY = "request-timeout"; + static final String GRAPH_BINARY_QUERY_OPTION_KEY = "graph-binary-query"; + + static final String LANGUAGE_GROOVY = "gremlin-groovy"; + static final String LANGUAGE_BYTECODE = "bytecode-json"; + + @VisibleForTesting static final byte[] EMPTY_STRING_QUERY = "".getBytes(UTF_8); - static String inferSubProtocol( - GraphStatement statement, DriverExecutionProfile config, DefaultSession session) { + static GraphProtocol inferSubProtocol( + GraphStatement statement, DriverExecutionProfile config) { String graphProtocol = statement.getSubProtocol(); if (graphProtocol == null) { graphProtocol = @@ -69,26 +80,42 @@ static String inferSubProtocol( // TODO pick graphson-3.0 if the target graph uses the native engine "graphson-2.0"); } - assert graphProtocol != null; - return graphProtocol; + // should not be null because we call config.getString() with a default value + Objects.requireNonNull( + graphProtocol, + "Could not determine the graph protocol for the query. This is a bug, please report."); + + return GraphProtocol.fromString(graphProtocol); } static Message createMessageFromGraphStatement( GraphStatement statement, - String subProtocol, + GraphProtocol subProtocol, DriverExecutionProfile config, - InternalDriverContext context) { + DseDriverContext context, + GraphBinaryModule graphBinaryModule) { - ByteBuffer encodedQueryParams; - try { - Map queryParams = - (statement instanceof ScriptGraphStatement) - ? ((ScriptGraphStatement) statement).getQueryParams() - : Collections.emptyMap(); - encodedQueryParams = GraphSONUtils.serializeToByteBuffer(queryParams, subProtocol); - } catch (IOException e) { - throw new UncheckedIOException( - "Couldn't serialize parameters for GraphStatement: " + statement, e); + final List encodedQueryParams; + if ((!(statement instanceof ScriptGraphStatement)) + || ((ScriptGraphStatement) statement).getQueryParams().isEmpty()) { + encodedQueryParams = Collections.emptyList(); + } else { + try { + Map queryParams = ((ScriptGraphStatement) statement).getQueryParams(); + if (subProtocol.isGraphBinary()) { + ByteBuf graphBinaryParams = graphBinaryModule.serialize(queryParams); + encodedQueryParams = + Collections.singletonList(ByteBufUtil.toByteBuffer(graphBinaryParams)); + graphBinaryParams.release(); + } else { + encodedQueryParams = + Collections.singletonList( + GraphSONUtils.serializeToByteBuffer(queryParams, subProtocol)); + } + } catch (IOException e) { + throw new UncheckedIOException( + "Couldn't serialize parameters for GraphStatement: " + statement, e); + } } int consistencyLevel = @@ -103,7 +130,7 @@ static Message createMessageFromGraphStatement( DseQueryOptions queryOptions = new DseQueryOptions( consistencyLevel, - Collections.singletonList(encodedQueryParams), + encodedQueryParams, Collections.emptyMap(), // ignored by the DSE Graph server true, // also ignored 50, // also ignored @@ -122,20 +149,40 @@ static Message createMessageFromGraphStatement( } } - private static byte[] getQueryBytes(GraphStatement statement, String graphSubProtocol) { - assert statement instanceof FluentGraphStatement - || statement instanceof BatchGraphStatement - || statement instanceof BytecodeGraphStatement; + // This method returns either a Bytecode object, or a List if the statement is a + // BatchGraphStatement + @VisibleForTesting + public static Object bytecodeToSerialize(GraphStatement statement) { + Preconditions.checkArgument( + statement instanceof FluentGraphStatement + || statement instanceof BatchGraphStatement + || statement instanceof BytecodeGraphStatement, + "To serialize bytecode the query must be a fluent or batch statement, but was: %s", + statement.getClass()); + Object toSerialize; if (statement instanceof FluentGraphStatement) { toSerialize = ((FluentGraphStatement) statement).getTraversal().asAdmin().getBytecode(); } else if (statement instanceof BatchGraphStatement) { - toSerialize = ((BatchGraphStatement) statement).iterator(); + // transform the Iterator to List + toSerialize = + ImmutableList.copyOf( + Iterators.transform( + ((BatchGraphStatement) statement).iterator(), + traversal -> traversal.asAdmin().getBytecode())); } else { toSerialize = ((BytecodeGraphStatement) statement).getBytecode(); } + return toSerialize; + } + + private static byte[] getQueryBytes(GraphStatement statement, GraphProtocol graphSubProtocol) { try { - return GraphSONUtils.serializeToBytes(toSerialize, graphSubProtocol); + return graphSubProtocol.isGraphBinary() + // if GraphBinary, the query is encoded in the custom payload, and not in the query field + // see GraphConversions#createCustomPayload() + ? EMPTY_STRING_QUERY + : GraphSONUtils.serializeToBytes(bytecodeToSerialize(statement), graphSubProtocol); } catch (IOException e) { throw new UncheckedIOException(e); } @@ -143,9 +190,10 @@ private static byte[] getQueryBytes(GraphStatement statement, String graphSub static Map createCustomPayload( GraphStatement statement, - String subProtocol, + GraphProtocol subProtocol, DriverExecutionProfile config, - InternalDriverContext context) { + DseDriverContext context, + GraphBinaryModule graphBinaryModule) { ProtocolVersion protocolVersion = context.getProtocolVersion(); @@ -154,11 +202,18 @@ static Map createCustomPayload( Map statementOptions = statement.getCustomPayload(); payload.putAll(statementOptions); + final String graphLanguage; + // Don't override anything that's already provided at the statement level if (!statementOptions.containsKey(GRAPH_LANG_OPTION_KEY)) { - String graphLanguage = - (statement instanceof ScriptGraphStatement) ? "gremlin-groovy" : "bytecode-json"; + graphLanguage = + (statement instanceof ScriptGraphStatement) ? LANGUAGE_GROOVY : LANGUAGE_BYTECODE; payload.put(GRAPH_LANG_OPTION_KEY, TypeCodecs.TEXT.encode(graphLanguage, protocolVersion)); + } else { + graphLanguage = + TypeCodecs.TEXT.decode(statementOptions.get(GRAPH_LANG_OPTION_KEY), protocolVersion); + Preconditions.checkNotNull( + graphLanguage, "A null value was set for the graph-language custom payload key."); } if (!isSystemQuery(statement, config)) { @@ -183,9 +238,27 @@ static Map createCustomPayload( } } - if (!statementOptions.containsKey(GRAPH_RESULTS_OPTION_KEY)) { - assert subProtocol != null; - payload.put(GRAPH_RESULTS_OPTION_KEY, TypeCodecs.TEXT.encode(subProtocol, protocolVersion)); + // the payload allows null entry values so doing a get directly here and checking for null + final ByteBuffer payloadInitialProtocol = statementOptions.get(GRAPH_RESULTS_OPTION_KEY); + if (payloadInitialProtocol == null) { + Preconditions.checkNotNull(subProtocol); + payload.put( + GRAPH_RESULTS_OPTION_KEY, + TypeCodecs.TEXT.encode(subProtocol.toInternalCode(), protocolVersion)); + } else { + subProtocol = + GraphProtocol.fromString(TypeCodecs.TEXT.decode(payloadInitialProtocol, protocolVersion)); + } + + if (subProtocol.isGraphBinary() && graphLanguage.equals(LANGUAGE_BYTECODE)) { + Object bytecodeQuery = bytecodeToSerialize(statement); + try { + ByteBuf bytecodeByteBuf = graphBinaryModule.serialize(bytecodeQuery); + payload.put(GRAPH_BINARY_QUERY_OPTION_KEY, ByteBufUtil.toByteBuffer(bytecodeByteBuf)); + bytecodeByteBuf.release(); + } catch (SerializationException e) { + throw new UncheckedIOException(e); + } } if (!statementOptions.containsKey(GRAPH_READ_CONSISTENCY_LEVEL_OPTION_KEY)) { @@ -237,4 +310,20 @@ private static boolean isSystemQuery(GraphStatement statement, DriverExecutio } return config.getBoolean(DseDriverOption.GRAPH_IS_SYSTEM_QUERY, false); } + + static GraphNode createGraphBinaryGraphNode( + List data, GraphBinaryModule graphBinaryModule) throws IOException { + // there should be only one column in the given row + Preconditions.checkArgument(data.size() == 1, "Invalid row given to deserialize"); + + // TODO: avoid the conversion to ByteBuffer and use Netty ByteBuf directly from the driver since + // GraphBinary accepts ByteBufs. + // This would require fiddling with the DseFrameCodecs and the GraphRequestHandler + ByteBuf toDeserialize = ByteBufUtil.toByteBuf(data.get(0)); + Object deserializedObject = graphBinaryModule.deserialize(toDeserialize); + toDeserialize.release(); + assert deserializedObject instanceof Traverser + : "Graph protocol error. Received object should be a Traverser but it is not."; + return new ObjectGraphNode(deserializedObject); + } } diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphProtocol.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphProtocol.java new file mode 100644 index 00000000000..eb090dcb90a --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphProtocol.java @@ -0,0 +1,62 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph; + +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +public enum GraphProtocol { + GRAPHSON_1_0("graphson-1.0"), + GRAPHSON_2_0("graphson-2.0"), + GRAPHSON_3_0("graphson-3.0"), + GRAPH_BINARY_1_0("graph-binary-1.0"); + + private static final Map BY_CODE; + + static { + Map tmp = new HashMap<>(); + for (GraphProtocol value : values()) { + tmp.put(value.stringRepresentation, value); + } + BY_CODE = Collections.unmodifiableMap(tmp); + } + + private final String stringRepresentation; + + GraphProtocol(String stringRepresentation) { + this.stringRepresentation = stringRepresentation; + } + + @NonNull + public String toInternalCode() { + return stringRepresentation; + } + + @NonNull + public static GraphProtocol fromString(@Nullable String stringRepresentation) { + if (stringRepresentation == null || !BY_CODE.containsKey(stringRepresentation)) { + StringBuilder sb = + new StringBuilder( + String.format( + "Graph protocol used [\"%s\"] unknown. Possible values are: [ \"%s\"", + stringRepresentation, GraphProtocol.values()[0].toInternalCode())); + for (int i = 1; i < GraphProtocol.values().length; i++) { + sb.append(String.format(", \"%s\"", GraphProtocol.values()[i].toInternalCode())); + } + sb.append("]"); + throw new IllegalArgumentException(sb.toString()); + } + return BY_CODE.get(stringRepresentation); + } + + public boolean isGraphBinary() { + return this == GRAPH_BINARY_1_0; + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestAsyncProcessor.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestAsyncProcessor.java index 05768b15d72..36c9db99801 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestAsyncProcessor.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestAsyncProcessor.java @@ -17,6 +17,8 @@ import com.datastax.dse.driver.api.core.graph.AsyncGraphResultSet; import com.datastax.dse.driver.api.core.graph.GraphStatement; +import com.datastax.dse.driver.internal.core.context.DseDriverContext; +import com.datastax.dse.driver.internal.core.graph.binary.GraphBinaryModule; import com.datastax.oss.driver.api.core.session.Request; import com.datastax.oss.driver.api.core.type.reflect.GenericType; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; @@ -25,11 +27,29 @@ import com.datastax.oss.driver.internal.core.util.concurrent.CompletableFutures; import java.util.concurrent.CompletionStage; import net.jcip.annotations.ThreadSafe; +import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryReader; +import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryWriter; +import org.apache.tinkerpop.gremlin.driver.ser.binary.TypeSerializerRegistry; @ThreadSafe public class GraphRequestAsyncProcessor implements RequestProcessor, CompletionStage> { + private final GraphBinaryModule graphBinaryModule; + + public GraphRequestAsyncProcessor(DseDriverContext context) { + TypeSerializerRegistry typeSerializerRegistry = + GraphBinaryModule.createDseTypeSerializerRegistry(context); + this.graphBinaryModule = + new GraphBinaryModule( + new GraphBinaryReader(typeSerializerRegistry), + new GraphBinaryWriter(typeSerializerRegistry)); + } + + public GraphBinaryModule getGraphBinaryModule() { + return graphBinaryModule; + } + @Override public boolean canProcess(Request request, GenericType resultType) { return request instanceof GraphStatement && resultType.equals(GraphStatement.ASYNC); @@ -41,7 +61,9 @@ public CompletionStage process( DefaultSession session, InternalDriverContext context, String sessionLogPrefix) { - return new GraphRequestHandler(request, session, context, sessionLogPrefix).handle(); + return new GraphRequestHandler( + request, session, context, sessionLogPrefix, getGraphBinaryModule()) + .handle(); } @Override diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java index a73180d8160..2cad4da2459 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java @@ -22,6 +22,8 @@ import com.datastax.dse.driver.api.core.graph.GraphNode; import com.datastax.dse.driver.api.core.graph.GraphStatement; import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; +import com.datastax.dse.driver.internal.core.context.DseDriverContext; +import com.datastax.dse.driver.internal.core.graph.binary.GraphBinaryModule; import com.datastax.oss.driver.api.core.AllNodesFailedException; import com.datastax.oss.driver.api.core.DriverTimeoutException; import com.datastax.oss.driver.api.core.RequestThrottlingException; @@ -93,7 +95,7 @@ public class GraphRequestHandler implements Throttled { private final DefaultSession session; - private final InternalDriverContext context; + private final DseDriverContext context; private Queue queryPlan; private final DriverExecutionProfile executionProfile; @@ -102,7 +104,7 @@ public class GraphRequestHandler implements Throttled { private final boolean isIdempotent; protected final CompletableFuture result; private final Message message; - private final String subProtocol; + private final GraphProtocol subProtocol; private final EventExecutor scheduler; /** @@ -126,6 +128,8 @@ public class GraphRequestHandler implements Throttled { private final List inFlightCallbacks; private final RetryPolicy retryPolicy; private final RequestThrottler throttler; + private final Map queryCustomPayload; + private final GraphBinaryModule graphBinaryModule; // The errors on the nodes that were already tried (lazily initialized on the first error). // We don'traversals use a map because nodes can appear multiple times. @@ -135,7 +139,8 @@ public GraphRequestHandler( @NonNull GraphStatement graphStatement, @NonNull DefaultSession dseSession, @NonNull InternalDriverContext context, - @NonNull String sessionLogPrefix) { + @NonNull String sessionLogPrefix, + @NonNull GraphBinaryModule graphBinaryModule) { this.startTimeNanos = System.nanoTime(); this.logPrefix = sessionLogPrefix + "|" + this.hashCode(); Preconditions.checkArgument( @@ -148,7 +153,9 @@ public GraphRequestHandler( LOG.trace("[{}] Creating new Graph request handler for request {}", logPrefix, graphStatement); this.graphStatement = graphStatement; this.session = dseSession; - this.context = context; + + Preconditions.checkArgument(context instanceof DseDriverContext); + this.context = ((DseDriverContext) context); this.executionProfile = GraphConversions.resolveExecutionProfile(this.graphStatement, this.context); @@ -186,12 +193,26 @@ public GraphRequestHandler( this.scheduledExecutions = isIdempotent ? new CopyOnWriteArrayList<>() : null; this.inFlightCallbacks = new CopyOnWriteArrayList<>(); + this.graphBinaryModule = graphBinaryModule; + + this.subProtocol = GraphConversions.inferSubProtocol(this.graphStatement, executionProfile); + LOG.debug("[{}], Graph protocol used for query: {}", logPrefix, subProtocol); - this.subProtocol = - GraphConversions.inferSubProtocol(this.graphStatement, executionProfile, session); this.message = GraphConversions.createMessageFromGraphStatement( - this.graphStatement, subProtocol, executionProfile, context); + this.graphStatement, + subProtocol, + executionProfile, + this.context, + this.graphBinaryModule); + + this.queryCustomPayload = + GraphConversions.createCustomPayload( + this.graphStatement, + subProtocol, + executionProfile, + this.context, + this.graphBinaryModule); this.throttler = context.getRequestThrottler(); this.throttler.register(this); @@ -276,12 +297,7 @@ private void sendRequest( node, channel, currentExecutionIndex, retryCount, scheduleNextExecution, logPrefix); channel - .write( - message, - graphStatement.isTracing(), - GraphConversions.createCustomPayload( - graphStatement, subProtocol, executionProfile, context), - perRequestCallback) + .write(message, graphStatement.isTracing(), queryCustomPayload, perRequestCallback) .addListener(perRequestCallback); } } @@ -495,11 +511,17 @@ private void setFinalResult( Queue graphNodes = new ArrayDeque<>(); for (List row : ((Rows) resultMessage).getData()) { - graphNodes.offer(GraphSONUtils.createGraphNode(row, subProtocol)); + if (subProtocol.isGraphBinary()) { + graphNodes.offer( + GraphConversions.createGraphBinaryGraphNode( + row, GraphRequestHandler.this.graphBinaryModule)); + } else { + graphNodes.offer(GraphSONUtils.createGraphNode(row, subProtocol)); + } } DefaultAsyncGraphResultSet resultSet = - new DefaultAsyncGraphResultSet(executionInfo, graphNodes); + new DefaultAsyncGraphResultSet(executionInfo, graphNodes, subProtocol); if (result.complete(resultSet)) { cancelScheduledTasks(); throttler.signalSuccess(GraphRequestHandler.this); diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSONUtils.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSONUtils.java index 0e2d832f0aa..fdf3d4dccb1 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSONUtils.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSONUtils.java @@ -41,15 +41,12 @@ class GraphSONUtils { - public static final String GRAPHSON_1_0 = "graphson-1.0"; - public static final String GRAPHSON_2_0 = "graphson-2.0"; - public static final String GRAPHSON_3_0 = "graphson-3.0"; - private static final LoadingCache OBJECT_MAPPERS = + private static final LoadingCache OBJECT_MAPPERS = CacheBuilder.newBuilder() .build( - new CacheLoader() { + new CacheLoader() { @Override - public ObjectMapper load(@NonNull String graphSubProtocol) throws Exception { + public ObjectMapper load(@NonNull GraphProtocol graphSubProtocol) throws Exception { switch (graphSubProtocol) { case GRAPHSON_1_0: com.datastax.oss.driver.api.core.Version driverVersion = @@ -100,7 +97,7 @@ public ObjectMapper load(@NonNull String graphSubProtocol) throws Exception { default: throw new IllegalStateException( - String.format("Unknown graph sub-protocol: {%s}", graphSubProtocol)); + String.format("GraphSON sub-protocol unknown: {%s}", graphSubProtocol)); } } }); @@ -112,12 +109,12 @@ public ObjectMapper load(@NonNull String graphSubProtocol) throws Exception { .mapper(GraphSONMapper.build().version(GraphSONVersion.V1_0).create()) .create()); - static ByteBuffer serializeToByteBuffer(Object object, String graphSubProtocol) + static ByteBuffer serializeToByteBuffer(Object object, GraphProtocol graphSubProtocol) throws IOException { return ByteBuffer.wrap(serializeToBytes(object, graphSubProtocol)); } - static byte[] serializeToBytes(Object object, String graphSubProtocol) throws IOException { + static byte[] serializeToBytes(Object object, GraphProtocol graphSubProtocol) throws IOException { try { return OBJECT_MAPPERS.get(graphSubProtocol).writeValueAsBytes(object); } catch (ExecutionException e) { @@ -126,7 +123,7 @@ static byte[] serializeToBytes(Object object, String graphSubProtocol) throws IO } } - static GraphNode createGraphNode(List data, String graphSubProtocol) + static GraphNode createGraphNode(List data, GraphProtocol graphSubProtocol) throws IOException { try { ObjectMapper mapper = OBJECT_MAPPERS.get(graphSubProtocol); @@ -139,7 +136,7 @@ static GraphNode createGraphNode(List data, String graphSubProtocol) default: // Should already be caught when we lookup in the cache throw new AssertionError( - String.format("Unknown graph sub-protocol: {%s}", graphSubProtocol)); + String.format("Unknown GraphSON sub-protocol: {%s}", graphSubProtocol)); } } catch (ExecutionException e) { Throwables.throwIfUnchecked(e); diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/AbstractDynamicGraphBinaryCustomSerializer.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/AbstractDynamicGraphBinaryCustomSerializer.java new file mode 100644 index 00000000000..954326fef9d --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/AbstractDynamicGraphBinaryCustomSerializer.java @@ -0,0 +1,63 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph.binary; + +import io.netty.buffer.ByteBuf; +import org.apache.tinkerpop.gremlin.driver.ser.SerializationException; +import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryReader; +import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryWriter; + +/** + * Convenience class for dynamic types implemented as Custom types in GraphBinary. This class will + * take care of handling {value_length} automatically for implementing classes. {@link + * #writeDynamicCustomValue(Object, ByteBuf, GraphBinaryWriter)} and {@link + * #readDynamicCustomValue(ByteBuf, GraphBinaryReader)} only need to handle writing the internal + * components of the custom type. + * + * @param the java type the implementing classes will encode and decode. + */ +public abstract class AbstractDynamicGraphBinaryCustomSerializer + extends AbstractSimpleGraphBinaryCustomSerializer { + protected abstract void writeDynamicCustomValue( + T value, ByteBuf buffer, GraphBinaryWriter context) throws SerializationException; + + protected abstract T readDynamicCustomValue(ByteBuf buffer, GraphBinaryReader context) + throws SerializationException; + + @Override + protected T readCustomValue(int valueLength, ByteBuf buffer, GraphBinaryReader context) + throws SerializationException { + int initialIndex = buffer.readerIndex(); + + // read actual custom value + T read = readDynamicCustomValue(buffer, context); + + // make sure we didn't read more than what was input as {value_length} + checkValueSize((buffer.readerIndex() - initialIndex), valueLength); + + return read; + } + + @Override + protected void writeCustomValue(T value, ByteBuf buffer, GraphBinaryWriter context) + throws SerializationException { + // Store the current writer index + final int valueLengthIndex = buffer.writerIndex(); + + // Write a dummy length that will be overwritten at the end of this method + buffer.writeInt(0); + + // Custom type's writer logic + writeDynamicCustomValue(value, buffer, context); + + // value_length = diff written - 4 bytes for the dummy length + final int valueLength = buffer.writerIndex() - valueLengthIndex - GraphBinaryUtils.sizeOfInt(); + + // Go back, write the {value_length} and then reset back the writer index + buffer.markWriterIndex().writerIndex(valueLengthIndex).writeInt(valueLength).resetWriterIndex(); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/AbstractSimpleGraphBinaryCustomSerializer.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/AbstractSimpleGraphBinaryCustomSerializer.java new file mode 100644 index 00000000000..fc6bb8012cf --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/AbstractSimpleGraphBinaryCustomSerializer.java @@ -0,0 +1,143 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph.binary; + +import com.datastax.oss.driver.shaded.guava.common.base.Preconditions; +import io.netty.buffer.ByteBuf; +import org.apache.tinkerpop.gremlin.driver.ser.SerializationException; +import org.apache.tinkerpop.gremlin.driver.ser.binary.DataType; +import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryReader; +import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryWriter; +import org.apache.tinkerpop.gremlin.driver.ser.binary.types.CustomTypeSerializer; + +/** + * A base custom type serializer for DSE types that handles most of the boiler plate code associated + * with GraphBinary's custom types. + * + *

      The full format of a custom type in GraphBinary is the following: + * + *

      {type_code}{custom_type_name}{custom_type_info_length}{custom_type_info_bytes}{value_flag}{value_length}{value_bytes} + * + *

      This class is made to handle + * {type_code}{custom_type_name}{custom_type_info_length}{custom_type_info_bytes}{value_flag} for + * DSE types. + * + *

      Implementing classes are still in charge of encoding {value_length}{value_bytes} in the {@link + * #readCustomValue(int, ByteBuf, GraphBinaryReader)} implementations. + * + *

      Implementing classes must override {@link CustomTypeSerializer#getTypeName()} with their own + * type name. + * + * @param the java type the implementing classes will encode and decode. + */ +abstract class AbstractSimpleGraphBinaryCustomSerializer implements CustomTypeSerializer { + AbstractSimpleGraphBinaryCustomSerializer() { + super(); + } + + protected static final String INCORRECT_VALUE_LENGTH_ERROR_MESSAGE = + "{value_length} read for this value does not correspond to the size of a '%s' value. [%s] bytes required but got [%s]"; + + protected abstract T readCustomValue(int valueLength, ByteBuf buffer, GraphBinaryReader context) + throws SerializationException; + + protected abstract void writeCustomValue(T value, ByteBuf buffer, GraphBinaryWriter context) + throws SerializationException; + + protected void checkValueSize(int lengthRequired, int lengthFound) { + Preconditions.checkArgument( + lengthFound == lengthRequired, + INCORRECT_VALUE_LENGTH_ERROR_MESSAGE, + getTypeName(), + lengthRequired, + lengthFound); + } + + @Override + public DataType getDataType() { + return DataType.CUSTOM; + } + + @Override + public T read(ByteBuf buffer, GraphBinaryReader context) throws SerializationException { + // the type serializer registry will take care of deserializing {custom_type_name} + // read {custom_type_info_length} and verify it is 0. + // See #write(T, ByteBuf, GraphBinaryWriter) for why it is set to 0 + if (context.readValue(buffer, Integer.class, false) != 0) { + throw new SerializationException( + "{custom_type_info} should not be provided for this custom type"); + } + + return readValue(buffer, context, true); + } + + @Override + public T readValue(ByteBuf buffer, GraphBinaryReader context, boolean nullable) + throws SerializationException { + if (nullable) { + // read {value_flag} + final byte valueFlag = buffer.readByte(); + + // if value is null and the value is nullable + if ((valueFlag & 1) == 1) { + return null; + } + // Note: we don't error out if the valueFlag == "value is null" and nullable == false because + // the serializer + // should have errored out at write time if that was the case. + } + + // Read the byte length of the value bytes + final int valueLength = buffer.readInt(); + + if (valueLength <= 0) { + throw new SerializationException(String.format("Unexpected value length: %d", valueLength)); + } + + if (valueLength > buffer.readableBytes()) { + throw new SerializationException( + String.format( + "Not enough readable bytes: %d bytes required for value (%d bytes available)", + valueLength, buffer.readableBytes())); + } + + // subclasses are responsible for reading {value} + return readCustomValue(valueLength, buffer, context); + } + + @Override + public void write(final T value, final ByteBuf buffer, final GraphBinaryWriter context) + throws SerializationException { + // the type serializer registry will take care of serializing {custom_type_name} + // write "{custom_type_info_length}" to 0 because we don't need it for the DSE types + context.writeValue(0, buffer, false); + writeValue(value, buffer, context, true); + } + + @Override + public void writeValue( + final T value, final ByteBuf buffer, final GraphBinaryWriter context, final boolean nullable) + throws SerializationException { + if (value == null) { + if (!nullable) { + throw new SerializationException("Unexpected null value when nullable is false"); + } + + // writes {value_flag} to "1" which means "the value is null" + context.writeValueFlagNull(buffer); + return; + } + + if (nullable) { + // writes {value_flag} to "0" which means "value is not null" + context.writeValueFlagNone(buffer); + } + + // sub classes will be responsible for writing {value_length} and {value_bytes} + writeCustomValue(value, buffer, context); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/ComplexTypeSerializerUtil.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/ComplexTypeSerializerUtil.java new file mode 100644 index 00000000000..64ebf74d9a1 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/ComplexTypeSerializerUtil.java @@ -0,0 +1,134 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph.binary; + +import com.datastax.dse.driver.internal.core.context.DseDriverContext; +import com.datastax.dse.driver.internal.core.graph.ByteBufUtil; +import com.datastax.oss.driver.api.core.data.GettableByIndex; +import com.datastax.oss.driver.api.core.data.SettableByIndex; +import com.datastax.oss.driver.api.core.type.*; +import com.datastax.oss.driver.internal.core.protocol.ByteBufPrimitiveCodec; +import com.datastax.oss.driver.internal.core.type.DataTypeHelper; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import com.datastax.oss.protocol.internal.PrimitiveCodec; +import com.datastax.oss.protocol.internal.ProtocolConstants; +import com.datastax.oss.protocol.internal.response.result.RawType; +import edu.umd.cs.findbugs.annotations.Nullable; +import io.netty.buffer.ByteBuf; +import java.nio.BufferUnderflowException; +import java.nio.ByteBuffer; +import java.util.Objects; + +class ComplexTypeSerializerUtil { + + private static final PrimitiveCodec protocolCodec = + new ByteBufPrimitiveCodec(GraphBinaryModule.ALLOCATOR); + + static void encodeTypeDefinition(DataType type, ByteBuf buffer, DseDriverContext driverContext) { + RawType protocolType = toProtocolSpec(type); + protocolType.encode(buffer, protocolCodec, driverContext.getProtocolVersion().getCode()); + } + + static DataType decodeTypeDefinition(ByteBuf buffer, DseDriverContext driverContext) { + RawType type = + RawType.decode(buffer, protocolCodec, driverContext.getProtocolVersion().getCode()); + return DataTypeHelper.fromProtocolSpec(type, driverContext); + } + + /* Netty-based encoding of UDT values, based on the UdtCoded.encode() method, but using Netty buffers directly to avoid + unnecessary NIO ByteBuffer copies. */ + static void encodeValue(@Nullable GettableByIndex value, ByteBuf nettyBuf) { + if (value == null) { + return; + } + + for (int i = 0; i < value.size(); i++) { + ByteBuffer fieldBuffer = value.getBytesUnsafe(i); + if (fieldBuffer == null) { + nettyBuf.writeInt(-1); + } else { + nettyBuf.writeInt(fieldBuffer.remaining()); + nettyBuf.writeBytes(fieldBuffer.duplicate()); + } + } + } + + /* This method will move forward the netty buffer given in parameter based on the UDT value read. + Content of the method is roughly equivalent to UdtCodec.decode(), but using Netty buffers directly to avoid + unnecessary NIO ByteBuffer copies. */ + static > T decodeValue(ByteBuf nettyBuf, T val, int size) { + try { + for (int i = 0; i < size; i++) { + int fieldSize = nettyBuf.readInt(); + if (fieldSize >= 0) { + // the reassignment is to shut down the error-prone warning about ignoring return values. + val = val.setBytesUnsafe(i, ByteBufUtil.readBytes(nettyBuf, fieldSize)); + } + } + return val; + } catch (BufferUnderflowException e) { + throw new IllegalArgumentException("Not enough bytes to deserialize a UDT value", e); + } + } + + private static RawType toProtocolSpec(DataType dataType) { + int id = dataType.getProtocolCode(); + RawType type = RawType.PRIMITIVES.get(id); + if (type != null) { + return type; + } + + switch (id) { + case ProtocolConstants.DataType.CUSTOM: + CustomType customType = ((CustomType) dataType); + type = new RawType.RawCustom(customType.getClassName()); + break; + case ProtocolConstants.DataType.LIST: + ListType listType = ((ListType) dataType); + type = new RawType.RawList(toProtocolSpec(listType.getElementType())); + break; + case ProtocolConstants.DataType.SET: + SetType setType = ((SetType) dataType); + type = new RawType.RawSet(toProtocolSpec(setType.getElementType())); + break; + case ProtocolConstants.DataType.MAP: + MapType mapType = ((MapType) dataType); + type = + new RawType.RawMap( + toProtocolSpec(mapType.getKeyType()), toProtocolSpec(mapType.getValueType())); + break; + case ProtocolConstants.DataType.TUPLE: + TupleType tupleType = ((TupleType) dataType); + ImmutableList.Builder subTypesList = + ImmutableList.builderWithExpectedSize(tupleType.getComponentTypes().size()); + for (int i = 0; i < tupleType.getComponentTypes().size(); i++) { + subTypesList.add(toProtocolSpec(tupleType.getComponentTypes().get(i))); + } + type = new RawType.RawTuple(subTypesList.build()); + break; + case ProtocolConstants.DataType.UDT: + UserDefinedType userDefinedType = ((UserDefinedType) dataType); + ImmutableMap.Builder subTypesMap = + ImmutableMap.builderWithExpectedSize(userDefinedType.getFieldNames().size()); + for (int i = 0; i < userDefinedType.getFieldTypes().size(); i++) { + subTypesMap.put( + userDefinedType.getFieldNames().get(i).asInternal(), + toProtocolSpec(userDefinedType.getFieldTypes().get(i))); + } + type = + new RawType.RawUdt( + Objects.requireNonNull(userDefinedType.getKeyspace()).asInternal(), + userDefinedType.getName().asInternal(), + subTypesMap.build()); + break; + default: + throw new IllegalArgumentException("Unsupported type: " + dataType.asCql(true, true)); + } + return type; + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/CqlDurationSerializer.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/CqlDurationSerializer.java new file mode 100644 index 00000000000..1b8e595341a --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/CqlDurationSerializer.java @@ -0,0 +1,41 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph.binary; + +import com.datastax.oss.driver.api.core.data.CqlDuration; +import io.netty.buffer.ByteBuf; +import org.apache.tinkerpop.gremlin.driver.ser.SerializationException; +import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryReader; +import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryWriter; + +public class CqlDurationSerializer extends AbstractSimpleGraphBinaryCustomSerializer { + + @Override + public String getTypeName() { + return GraphBinaryModule.GRAPH_BINARY_DURATION_TYPE_NAME; + } + + @Override + protected CqlDuration readCustomValue( + final int valueLength, final ByteBuf buffer, final GraphBinaryReader context) + throws SerializationException { + checkValueSize(GraphBinaryUtils.sizeOfDuration(), valueLength); + return CqlDuration.newInstance( + context.readValue(buffer, Integer.class, false), + context.readValue(buffer, Integer.class, false), + context.readValue(buffer, Long.class, false)); + } + + @Override + protected void writeCustomValue(CqlDuration value, ByteBuf buffer, GraphBinaryWriter context) + throws SerializationException { + context.writeValue(GraphBinaryUtils.sizeOfDuration(), buffer, false); + context.writeValue(value.getMonths(), buffer, false); + context.writeValue(value.getDays(), buffer, false); + context.writeValue(value.getNanoseconds(), buffer, false); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/DistanceSerializer.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/DistanceSerializer.java new file mode 100644 index 00000000000..0c02a135f05 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/DistanceSerializer.java @@ -0,0 +1,37 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph.binary; + +import com.datastax.dse.driver.api.core.data.geometry.Point; +import com.datastax.dse.driver.internal.core.data.geometry.Distance; +import io.netty.buffer.ByteBuf; +import org.apache.tinkerpop.gremlin.driver.ser.SerializationException; +import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryReader; +import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryWriter; + +public class DistanceSerializer extends AbstractSimpleGraphBinaryCustomSerializer { + @Override + public String getTypeName() { + return GraphBinaryModule.GRAPH_BINARY_DISTANCE_TYPE_NAME; + } + + @Override + protected Distance readCustomValue(int valueLength, ByteBuf buffer, GraphBinaryReader context) + throws SerializationException { + Point p = context.readValue(buffer, Point.class, false); + checkValueSize(GraphBinaryUtils.sizeOfDistance(p), valueLength); + return new Distance(p, context.readValue(buffer, Double.class, false)); + } + + @Override + protected void writeCustomValue(Distance value, ByteBuf buffer, GraphBinaryWriter context) + throws SerializationException { + context.writeValue(GraphBinaryUtils.sizeOfDistance(value.getCenter()), buffer, false); + context.writeValue(value.getCenter(), buffer, false); + context.writeValue(value.getRadius(), buffer, false); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/EditDistanceSerializer.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/EditDistanceSerializer.java new file mode 100644 index 00000000000..f3afe4e23f3 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/EditDistanceSerializer.java @@ -0,0 +1,39 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph.binary; + +import com.datastax.dse.driver.internal.core.graph.EditDistance; +import io.netty.buffer.ByteBuf; +import org.apache.tinkerpop.gremlin.driver.ser.SerializationException; +import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryReader; +import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryWriter; + +public class EditDistanceSerializer + extends AbstractSimpleGraphBinaryCustomSerializer { + @Override + public String getTypeName() { + return GraphBinaryModule.GRAPH_BINARY_EDIT_DISTANCE_TYPE_NAME; + } + + @Override + protected EditDistance readCustomValue(int valueLength, ByteBuf buffer, GraphBinaryReader context) + throws SerializationException { + int distance = context.readValue(buffer, Integer.class, false); + String query = context.readValue(buffer, String.class, false); + checkValueSize(GraphBinaryUtils.sizeOfEditDistance(query), valueLength); + + return new EditDistance(query, distance); + } + + @Override + protected void writeCustomValue(EditDistance value, ByteBuf buffer, GraphBinaryWriter context) + throws SerializationException { + context.writeValue(GraphBinaryUtils.sizeOfEditDistance(value.query), buffer, false); + context.writeValue(value.distance, buffer, false); + context.writeValue(value.query, buffer, false); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/GeometrySerializer.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/GeometrySerializer.java new file mode 100644 index 00000000000..302ba40f3f9 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/GeometrySerializer.java @@ -0,0 +1,36 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph.binary; + +import com.datastax.dse.driver.api.core.data.geometry.Geometry; +import com.datastax.dse.driver.internal.core.graph.ByteBufUtil; +import io.netty.buffer.ByteBuf; +import java.nio.ByteBuffer; +import org.apache.tinkerpop.gremlin.driver.ser.SerializationException; +import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryReader; +import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryWriter; + +public abstract class GeometrySerializer + extends AbstractSimpleGraphBinaryCustomSerializer { + public abstract T fromWellKnownBinary(ByteBuffer buffer); + + @Override + protected T readCustomValue(int valueLength, ByteBuf buffer, GraphBinaryReader context) + throws SerializationException { + return fromWellKnownBinary(ByteBufUtil.readBytes(buffer, valueLength)); + } + + @Override + protected void writeCustomValue(T value, ByteBuf buffer, GraphBinaryWriter context) + throws SerializationException { + ByteBuffer bb = value.asWellKnownBinary(); + + // writing the {value_length} + context.writeValue(bb.remaining(), buffer, false); + buffer.writeBytes(bb); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/GraphBinaryModule.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/GraphBinaryModule.java new file mode 100644 index 00000000000..696a81ad0e2 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/GraphBinaryModule.java @@ -0,0 +1,83 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph.binary; + +import com.datastax.dse.driver.api.core.data.geometry.LineString; +import com.datastax.dse.driver.api.core.data.geometry.Point; +import com.datastax.dse.driver.api.core.data.geometry.Polygon; +import com.datastax.dse.driver.internal.core.context.DseDriverContext; +import com.datastax.dse.driver.internal.core.data.geometry.Distance; +import com.datastax.dse.driver.internal.core.graph.EditDistance; +import com.datastax.oss.driver.api.core.data.CqlDuration; +import com.datastax.oss.driver.api.core.data.TupleValue; +import com.datastax.oss.driver.api.core.data.UdtValue; +import io.netty.buffer.ByteBuf; +import io.netty.buffer.ByteBufAllocator; +import io.netty.buffer.UnpooledByteBufAllocator; +import org.apache.tinkerpop.gremlin.driver.ser.SerializationException; +import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryReader; +import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryWriter; +import org.apache.tinkerpop.gremlin.driver.ser.binary.TypeSerializerRegistry; + +public class GraphBinaryModule { + public static final UnpooledByteBufAllocator ALLOCATOR = new UnpooledByteBufAllocator(false); + + static final String GRAPH_BINARY_POINT_TYPE_NAME = "driver.dse.geometry.Point"; + static final String GRAPH_BINARY_LINESTRING_TYPE_NAME = "driver.dse.geometry.LineString"; + static final String GRAPH_BINARY_POLYGON_TYPE_NAME = "driver.dse.geometry.Polygon"; + static final String GRAPH_BINARY_DISTANCE_TYPE_NAME = "driver.dse.geometry.Distance"; + static final String GRAPH_BINARY_DURATION_TYPE_NAME = "driver.core.Duration"; + static final String GRAPH_BINARY_EDIT_DISTANCE_TYPE_NAME = "driver.dse.search.EditDistance"; + static final String GRAPH_BINARY_TUPLE_VALUE_TYPE_NAME = "driver.core.TupleValue"; + static final String GRAPH_BINARY_UDT_VALUE_TYPE_NAME = "driver.core.UDTValue"; + + private final GraphBinaryReader reader; + private final GraphBinaryWriter writer; + + public GraphBinaryModule(GraphBinaryReader reader, GraphBinaryWriter writer) { + this.reader = reader; + this.writer = writer; + } + + public static TypeSerializerRegistry createDseTypeSerializerRegistry( + DseDriverContext driverContext) { + return TypeSerializerRegistry.build() + .addCustomType(CqlDuration.class, new CqlDurationSerializer()) + .addCustomType(Point.class, new PointSerializer()) + .addCustomType(LineString.class, new LineStringSerializer()) + .addCustomType(Polygon.class, new PolygonSerializer()) + .addCustomType(Distance.class, new DistanceSerializer()) + .addCustomType(EditDistance.class, new EditDistanceSerializer()) + .addCustomType(TupleValue.class, new TupleValueSerializer(driverContext)) + .addCustomType(UdtValue.class, new UdtValueSerializer(driverContext)) + .create(); + } + + @SuppressWarnings("TypeParameterUnusedInFormals") + public T deserialize(final ByteBuf buffer) throws SerializationException { + return reader.read(buffer); + } + + public ByteBuf serialize(final T value) throws SerializationException { + return serialize(value, ALLOCATOR); + } + + public ByteBuf serialize(final T value, final ByteBufAllocator allocator) + throws SerializationException { + return serialize(value, allocator.heapBuffer()); + } + + public ByteBuf serialize(final T value, final ByteBuf buffer) throws SerializationException { + try { + writer.write(value, buffer); + return buffer; + } catch (Exception e) { + buffer.release(); + throw e; + } + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/GraphBinaryUtils.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/GraphBinaryUtils.java new file mode 100644 index 00000000000..28c558a5da2 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/GraphBinaryUtils.java @@ -0,0 +1,46 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph.binary; + +import com.datastax.dse.driver.api.core.data.geometry.Point; +import java.nio.charset.StandardCharsets; + +class GraphBinaryUtils { + static int sizeOfInt() { + return 4; + } + + static int sizeOfLong() { + return 8; + } + + static int sizeOfDouble() { + return 8; + } + + static int sizeOfPoint(Point point) { + return point.asWellKnownBinary().remaining(); + } + + /* assumes UTF8 */ + static int sizeOfString(String s) { + // length + data length + return sizeOfInt() + s.getBytes(StandardCharsets.UTF_8).length; + } + + static int sizeOfDuration() { + return sizeOfInt() + sizeOfInt() + sizeOfLong(); + } + + static int sizeOfDistance(Point point) { + return sizeOfPoint(point) + sizeOfDouble(); + } + + static int sizeOfEditDistance(String s) { + return sizeOfInt() + sizeOfString(s); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/LineStringSerializer.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/LineStringSerializer.java new file mode 100644 index 00000000000..835dbeafa34 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/LineStringSerializer.java @@ -0,0 +1,22 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph.binary; + +import com.datastax.dse.driver.api.core.data.geometry.LineString; +import java.nio.ByteBuffer; + +public class LineStringSerializer extends GeometrySerializer { + @Override + public String getTypeName() { + return GraphBinaryModule.GRAPH_BINARY_LINESTRING_TYPE_NAME; + } + + @Override + public LineString fromWellKnownBinary(ByteBuffer buffer) { + return LineString.fromWellKnownBinary(buffer); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/PointSerializer.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/PointSerializer.java new file mode 100644 index 00000000000..49529eea3e0 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/PointSerializer.java @@ -0,0 +1,23 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph.binary; + +import com.datastax.dse.driver.api.core.data.geometry.Point; +import java.nio.ByteBuffer; + +public class PointSerializer extends GeometrySerializer { + + @Override + public String getTypeName() { + return GraphBinaryModule.GRAPH_BINARY_POINT_TYPE_NAME; + } + + @Override + public Point fromWellKnownBinary(ByteBuffer buffer) { + return Point.fromWellKnownBinary(buffer); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/PolygonSerializer.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/PolygonSerializer.java new file mode 100644 index 00000000000..e608ad73932 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/PolygonSerializer.java @@ -0,0 +1,22 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph.binary; + +import com.datastax.dse.driver.api.core.data.geometry.Polygon; +import java.nio.ByteBuffer; + +public class PolygonSerializer extends GeometrySerializer { + @Override + public String getTypeName() { + return GraphBinaryModule.GRAPH_BINARY_POLYGON_TYPE_NAME; + } + + @Override + public Polygon fromWellKnownBinary(ByteBuffer buffer) { + return Polygon.fromWellKnownBinary(buffer); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/TupleValueSerializer.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/TupleValueSerializer.java new file mode 100644 index 00000000000..368bd5baac3 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/TupleValueSerializer.java @@ -0,0 +1,56 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph.binary; + +import com.datastax.dse.driver.internal.core.context.DseDriverContext; +import com.datastax.oss.driver.api.core.data.TupleValue; +import com.datastax.oss.driver.api.core.type.DataType; +import com.datastax.oss.driver.api.core.type.TupleType; +import io.netty.buffer.ByteBuf; +import org.apache.tinkerpop.gremlin.driver.ser.SerializationException; +import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryReader; +import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryWriter; + +public class TupleValueSerializer extends AbstractDynamicGraphBinaryCustomSerializer { + + private final DseDriverContext driverContext; + + public TupleValueSerializer(DseDriverContext driverContext) { + this.driverContext = driverContext; + } + + @Override + public String getTypeName() { + return GraphBinaryModule.GRAPH_BINARY_TUPLE_VALUE_TYPE_NAME; + } + + @Override + public TupleValue readDynamicCustomValue(ByteBuf buffer, GraphBinaryReader context) { + // read the type first + DataType type = ComplexTypeSerializerUtil.decodeTypeDefinition(buffer, driverContext); + + assert type instanceof TupleType + : "GraphBinary TupleValue deserializer was called on a value that is not encoded as a TupleValue."; + + TupleType tupleType = (TupleType) type; + TupleValue value = tupleType.newValue(); + + // then decode the values from the buffer + return ComplexTypeSerializerUtil.decodeValue( + buffer, value, tupleType.getComponentTypes().size()); + } + + @Override + public void writeDynamicCustomValue(TupleValue value, ByteBuf buffer, GraphBinaryWriter context) + throws SerializationException { + // write type first in native protocol + ComplexTypeSerializerUtil.encodeTypeDefinition(value.getType(), buffer, driverContext); + + // write value after + ComplexTypeSerializerUtil.encodeValue(value, buffer); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/UdtValueSerializer.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/UdtValueSerializer.java new file mode 100644 index 00000000000..dc97cb19aa4 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/UdtValueSerializer.java @@ -0,0 +1,52 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph.binary; + +import com.datastax.dse.driver.internal.core.context.DseDriverContext; +import com.datastax.oss.driver.api.core.data.UdtValue; +import com.datastax.oss.driver.api.core.type.DataType; +import com.datastax.oss.driver.api.core.type.UserDefinedType; +import io.netty.buffer.ByteBuf; +import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryReader; +import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryWriter; + +public class UdtValueSerializer extends AbstractDynamicGraphBinaryCustomSerializer { + private final DseDriverContext driverContext; + + public UdtValueSerializer(DseDriverContext driverContext) { + this.driverContext = driverContext; + } + + @Override + public String getTypeName() { + return GraphBinaryModule.GRAPH_BINARY_UDT_VALUE_TYPE_NAME; + } + + @Override + public UdtValue readDynamicCustomValue(ByteBuf buffer, GraphBinaryReader context) { + // read type definition first + DataType driverType = ComplexTypeSerializerUtil.decodeTypeDefinition(buffer, driverContext); + + assert driverType instanceof UserDefinedType + : "GraphBinary UdtValue deserializer was called on a value that is not encoded as a UdtValue."; + + UserDefinedType userDefinedType = (UserDefinedType) driverType; + UdtValue value = userDefinedType.newValue(); + + // then read values + return ComplexTypeSerializerUtil.decodeValue( + buffer, value, userDefinedType.getFieldTypes().size()); + } + + @Override + public void writeDynamicCustomValue(UdtValue value, ByteBuf buffer, GraphBinaryWriter context) { + // write type first in native protocol format + ComplexTypeSerializerUtil.encodeTypeDefinition(value.getType(), buffer, driverContext); + // write value after + ComplexTypeSerializerUtil.encodeValue(value, buffer); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseEdgeMetadata.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseEdgeMetadata.java new file mode 100644 index 00000000000..0223c341e3a --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseEdgeMetadata.java @@ -0,0 +1,138 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.metadata.schema; + +import com.datastax.dse.driver.api.core.metadata.schema.DseEdgeMetadata; +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.shaded.guava.common.base.Preconditions; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.List; +import java.util.Objects; + +public class DefaultDseEdgeMetadata implements DseEdgeMetadata { + + @NonNull private final CqlIdentifier labelName; + + @NonNull private final CqlIdentifier fromTable; + @NonNull private final CqlIdentifier fromLabel; + @NonNull private final List fromPartitionKeyColumns; + @NonNull private final List fromClusteringColumns; + + @NonNull private final CqlIdentifier toTable; + @NonNull private final CqlIdentifier toLabel; + @NonNull private final List toPartitionKeyColumns; + @NonNull private final List toClusteringColumns; + + public DefaultDseEdgeMetadata( + @NonNull CqlIdentifier labelName, + @NonNull CqlIdentifier fromTable, + @NonNull CqlIdentifier fromLabel, + @NonNull List fromPartitionKeyColumns, + @NonNull List fromClusteringColumns, + @NonNull CqlIdentifier toTable, + @NonNull CqlIdentifier toLabel, + @NonNull List toPartitionKeyColumns, + @NonNull List toClusteringColumns) { + this.labelName = Preconditions.checkNotNull(labelName); + this.fromTable = Preconditions.checkNotNull(fromTable); + this.fromLabel = Preconditions.checkNotNull(fromLabel); + this.fromPartitionKeyColumns = Preconditions.checkNotNull(fromPartitionKeyColumns); + this.fromClusteringColumns = Preconditions.checkNotNull(fromClusteringColumns); + this.toTable = Preconditions.checkNotNull(toTable); + this.toLabel = Preconditions.checkNotNull(toLabel); + this.toPartitionKeyColumns = Preconditions.checkNotNull(toPartitionKeyColumns); + this.toClusteringColumns = Preconditions.checkNotNull(toClusteringColumns); + } + + @NonNull + @Override + public CqlIdentifier getLabelName() { + return labelName; + } + + @NonNull + @Override + public CqlIdentifier getFromTable() { + return fromTable; + } + + @NonNull + @Override + public CqlIdentifier getFromLabel() { + return fromLabel; + } + + @NonNull + @Override + public List getFromPartitionKeyColumns() { + return fromPartitionKeyColumns; + } + + @NonNull + @Override + public List getFromClusteringColumns() { + return fromClusteringColumns; + } + + @NonNull + @Override + public CqlIdentifier getToTable() { + return toTable; + } + + @NonNull + @Override + public CqlIdentifier getToLabel() { + return toLabel; + } + + @NonNull + @Override + public List getToPartitionKeyColumns() { + return toPartitionKeyColumns; + } + + @NonNull + @Override + public List getToClusteringColumns() { + return toClusteringColumns; + } + + @Override + public boolean equals(Object other) { + if (other == this) { + return true; + } else if (other instanceof DseEdgeMetadata) { + DseEdgeMetadata that = (DseEdgeMetadata) other; + return Objects.equals(this.labelName, that.getLabelName()) + && Objects.equals(this.fromTable, that.getFromTable()) + && Objects.equals(this.fromLabel, that.getFromLabel()) + && Objects.equals(this.fromPartitionKeyColumns, that.getFromPartitionKeyColumns()) + && Objects.equals(this.fromClusteringColumns, that.getFromClusteringColumns()) + && Objects.equals(this.toTable, that.getToTable()) + && Objects.equals(this.toLabel, that.getToLabel()) + && Objects.equals(this.toPartitionKeyColumns, that.getToPartitionKeyColumns()) + && Objects.equals(this.toClusteringColumns, that.getToClusteringColumns()); + } else { + return false; + } + } + + @Override + public int hashCode() { + return Objects.hash( + labelName, + fromTable, + fromLabel, + fromPartitionKeyColumns, + fromClusteringColumns, + toTable, + toLabel, + toPartitionKeyColumns, + toClusteringColumns); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseKeyspaceMetadata.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseKeyspaceMetadata.java index 50464c568a0..c6fd89e26bb 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseKeyspaceMetadata.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseKeyspaceMetadata.java @@ -24,8 +24,10 @@ import com.datastax.oss.driver.api.core.metadata.schema.ViewMetadata; import com.datastax.oss.driver.api.core.type.UserDefinedType; import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; import java.util.Map; import java.util.Objects; +import java.util.Optional; import net.jcip.annotations.Immutable; @Immutable @@ -34,6 +36,7 @@ public class DefaultDseKeyspaceMetadata implements DseKeyspaceMetadata { @NonNull private final CqlIdentifier name; private final boolean durableWrites; private final boolean virtual; + @Nullable private final String graphEngine; @NonNull private final Map replication; @NonNull private final Map types; @NonNull private final Map tables; @@ -45,6 +48,7 @@ public DefaultDseKeyspaceMetadata( @NonNull CqlIdentifier name, boolean durableWrites, boolean virtual, + @Nullable String graphEngine, @NonNull Map replication, @NonNull Map types, @NonNull Map tables, @@ -54,6 +58,7 @@ public DefaultDseKeyspaceMetadata( this.name = name; this.durableWrites = durableWrites; this.virtual = virtual; + this.graphEngine = graphEngine; this.replication = replication; this.types = types; this.tables = tables; @@ -78,6 +83,12 @@ public boolean isVirtual() { return virtual; } + @NonNull + @Override + public Optional getGraphEngine() { + return Optional.ofNullable(graphEngine); + } + @NonNull @Override public Map getReplication() { @@ -123,6 +134,7 @@ public boolean equals(Object other) { return Objects.equals(this.name, that.getName()) && this.durableWrites == that.isDurableWrites() && this.virtual == that.isVirtual() + && Objects.equals(this.graphEngine, that.getGraphEngine().orElse(null)) && Objects.equals(this.replication, that.getReplication()) && Objects.equals(this.types, that.getUserDefinedTypes()) && Objects.equals(this.tables, that.getTables()) @@ -137,6 +149,15 @@ public boolean equals(Object other) { @Override public int hashCode() { return Objects.hash( - name, durableWrites, virtual, replication, types, tables, views, functions, aggregates); + name, + durableWrites, + virtual, + graphEngine, + replication, + types, + tables, + views, + functions, + aggregates); } } diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseTableMetadata.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseTableMetadata.java index 91c3e6e7723..92b3c14a939 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseTableMetadata.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseTableMetadata.java @@ -15,7 +15,9 @@ */ package com.datastax.dse.driver.internal.core.metadata.schema; +import com.datastax.dse.driver.api.core.metadata.schema.DseEdgeMetadata; import com.datastax.dse.driver.api.core.metadata.schema.DseTableMetadata; +import com.datastax.dse.driver.api.core.metadata.schema.DseVertexMetadata; import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.metadata.schema.ClusteringOrder; import com.datastax.oss.driver.api.core.metadata.schema.ColumnMetadata; @@ -43,6 +45,8 @@ public class DefaultDseTableMetadata implements DseTableMetadata { @NonNull private final Map columns; @NonNull private final Map options; @NonNull private final Map indexes; + @Nullable private final DseVertexMetadata vertex; + @Nullable private final DseEdgeMetadata edge; public DefaultDseTableMetadata( @NonNull CqlIdentifier keyspace, @@ -54,7 +58,9 @@ public DefaultDseTableMetadata( @NonNull Map clusteringColumns, @NonNull Map columns, @NonNull Map options, - @NonNull Map indexes) { + @NonNull Map indexes, + @Nullable DseVertexMetadata vertex, + @Nullable DseEdgeMetadata edge) { this.keyspace = keyspace; this.name = name; this.id = id; @@ -65,6 +71,8 @@ public DefaultDseTableMetadata( this.columns = columns; this.options = options; this.indexes = indexes; + this.vertex = vertex; + this.edge = edge; } @NonNull @@ -125,6 +133,18 @@ public Map getIndexes() { return indexes; } + @NonNull + @Override + public Optional getVertex() { + return Optional.ofNullable(vertex); + } + + @NonNull + @Override + public Optional getEdge() { + return Optional.ofNullable(edge); + } + @Override public boolean equals(Object other) { if (other == this) { @@ -133,13 +153,15 @@ public boolean equals(Object other) { DseTableMetadata that = (DseTableMetadata) other; return Objects.equals(this.keyspace, that.getKeyspace()) && Objects.equals(this.name, that.getName()) - && Objects.equals(Optional.ofNullable(this.id), that.getId()) + && Objects.equals(this.id, that.getId().orElse(null)) && this.compactStorage == that.isCompactStorage() && this.virtual == that.isVirtual() && Objects.equals(this.partitionKey, that.getPartitionKey()) && Objects.equals(this.clusteringColumns, that.getClusteringColumns()) && Objects.equals(this.columns, that.getColumns()) - && Objects.equals(this.indexes, that.getIndexes()); + && Objects.equals(this.indexes, that.getIndexes()) + && Objects.equals(this.vertex, that.getVertex().orElse(null)) + && Objects.equals(this.edge, that.getEdge().orElse(null)); } else { return false; } @@ -156,6 +178,8 @@ public int hashCode() { partitionKey, clusteringColumns, columns, - indexes); + indexes, + vertex, + edge); } } diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseVertexMetadata.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseVertexMetadata.java new file mode 100644 index 00000000000..e51b5ebb5b7 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseVertexMetadata.java @@ -0,0 +1,45 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.metadata.schema; + +import com.datastax.dse.driver.api.core.metadata.schema.DseVertexMetadata; +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.shaded.guava.common.base.Preconditions; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Objects; + +public class DefaultDseVertexMetadata implements DseVertexMetadata { + + @NonNull private final CqlIdentifier labelName; + + public DefaultDseVertexMetadata(@NonNull CqlIdentifier labelName) { + this.labelName = Preconditions.checkNotNull(labelName); + } + + @NonNull + @Override + public CqlIdentifier getLabelName() { + return labelName; + } + + @Override + public boolean equals(Object other) { + if (other == this) { + return true; + } else if (other instanceof DefaultDseVertexMetadata) { + DefaultDseVertexMetadata that = (DefaultDseVertexMetadata) other; + return Objects.equals(this.labelName, that.getLabelName()); + } else { + return false; + } + } + + @Override + public int hashCode() { + return labelName.hashCode(); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/ScriptHelper.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/ScriptHelper.java new file mode 100644 index 00000000000..7e0d5a2a226 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/ScriptHelper.java @@ -0,0 +1,45 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.metadata.schema; + +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.internal.core.metadata.schema.ScriptBuilder; +import java.util.List; + +public class ScriptHelper { + + public static void appendEdgeSide( + ScriptBuilder builder, + CqlIdentifier table, + CqlIdentifier label, + List partitionKeyColumns, + List clusteringColumns, + String keyword) { + builder.append(" ").append(keyword).append(label).append("("); + + if (partitionKeyColumns.size() == 1) { // PRIMARY KEY (k + builder.append(partitionKeyColumns.get(0)); + } else { // PRIMARY KEY ((k1, k2) + builder.append("("); + boolean first = true; + for (CqlIdentifier pkColumn : partitionKeyColumns) { + if (first) { + first = false; + } else { + builder.append(", "); + } + builder.append(pkColumn); + } + builder.append(")"); + } + // PRIMARY KEY (, cc1, cc2, cc3) + for (CqlIdentifier clusteringColumn : clusteringColumns) { + builder.append(", ").append(clusteringColumn); + } + builder.append(")"); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseSchemaParser.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseSchemaParser.java index d87cc5e54de..6e1e42bcd0e 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseSchemaParser.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseSchemaParser.java @@ -21,6 +21,7 @@ import com.datastax.dse.driver.api.core.metadata.schema.DseTableMetadata; import com.datastax.dse.driver.api.core.metadata.schema.DseViewMetadata; import com.datastax.dse.driver.internal.core.metadata.schema.DefaultDseKeyspaceMetadata; +import com.datastax.dse.driver.internal.core.metadata.schema.queries.Dse68SchemaRows; import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.metadata.schema.AggregateMetadata; import com.datastax.oss.driver.api.core.metadata.schema.FunctionMetadata; @@ -40,6 +41,8 @@ import com.datastax.oss.driver.internal.core.util.NanoTime; import com.datastax.oss.driver.shaded.guava.common.base.MoreObjects; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMultimap; +import com.datastax.oss.driver.shaded.guava.common.collect.Multimap; import java.util.Collections; import java.util.Map; import net.jcip.annotations.ThreadSafe; @@ -47,7 +50,7 @@ import org.slf4j.LoggerFactory; /** - * Default parser implementation for Cassandra. + * Default parser implementation for DSE. * *

      For modularity, the code for each element row is split into separate classes (schema stuff is * not on the hot path, so creating a few extra objects doesn't matter). @@ -109,9 +112,12 @@ private DseKeyspaceMetadata parseKeyspace(AdminRow keyspaceRow) { // durable_writes boolean, // replication frozen> // ) + // + // DSE >= 6.8: same as Cassandra 3 + graph_engine text CqlIdentifier keyspaceId = CqlIdentifier.fromInternal(keyspaceRow.getString("keyspace_name")); boolean durableWrites = MoreObjects.firstNonNull(keyspaceRow.getBoolean("durable_writes"), false); + String graphEngine = keyspaceRow.getString("graph_engine"); Map replicationOptions; if (keyspaceRow.contains("strategy_class")) { @@ -133,6 +139,7 @@ private DseKeyspaceMetadata parseKeyspace(AdminRow keyspaceRow) { keyspaceId, durableWrites, false, + graphEngine, replicationOptions, types, parseTables(keyspaceId, types), @@ -148,8 +155,16 @@ private Map parseTypes(CqlIdentifier keyspaceId) private Map parseTables( CqlIdentifier keyspaceId, Map types) { ImmutableMap.Builder tablesBuilder = ImmutableMap.builder(); + Multimap vertices; + Multimap edges; + if (rows instanceof Dse68SchemaRows) { + vertices = ((Dse68SchemaRows) rows).vertices().get(keyspaceId); + edges = ((Dse68SchemaRows) rows).edges().get(keyspaceId); + } else { + vertices = edges = ImmutableMultimap.of(); + } for (AdminRow tableRow : rows.tables().get(keyspaceId)) { - DseTableMetadata table = tableParser.parseTable(tableRow, keyspaceId, types); + DseTableMetadata table = tableParser.parseTable(tableRow, keyspaceId, types, vertices, edges); if (table != null) { tablesBuilder.put(table.getName(), table); } @@ -206,6 +221,7 @@ private DseKeyspaceMetadata parseVirtualKeyspace(AdminRow keyspaceRow) { keyspaceId, durableWrites, true, + null, Collections.emptyMap(), Collections.emptyMap(), parseVirtualTables(keyspaceId), diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseTableParser.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseTableParser.java index b803750f44f..17f86007b35 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseTableParser.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseTableParser.java @@ -16,11 +16,15 @@ package com.datastax.dse.driver.internal.core.metadata.schema.parsing; import com.datastax.dse.driver.api.core.metadata.schema.DseColumnMetadata; +import com.datastax.dse.driver.api.core.metadata.schema.DseEdgeMetadata; import com.datastax.dse.driver.api.core.metadata.schema.DseIndexMetadata; import com.datastax.dse.driver.api.core.metadata.schema.DseTableMetadata; +import com.datastax.dse.driver.api.core.metadata.schema.DseVertexMetadata; import com.datastax.dse.driver.internal.core.metadata.schema.DefaultDseColumnMetadata; +import com.datastax.dse.driver.internal.core.metadata.schema.DefaultDseEdgeMetadata; import com.datastax.dse.driver.internal.core.metadata.schema.DefaultDseIndexMetadata; import com.datastax.dse.driver.internal.core.metadata.schema.DefaultDseTableMetadata; +import com.datastax.dse.driver.internal.core.metadata.schema.DefaultDseVertexMetadata; import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.metadata.schema.ClusteringOrder; import com.datastax.oss.driver.api.core.metadata.schema.ColumnMetadata; @@ -31,6 +35,7 @@ import com.datastax.oss.driver.api.core.type.MapType; import com.datastax.oss.driver.api.core.type.SetType; import com.datastax.oss.driver.api.core.type.UserDefinedType; +import com.datastax.oss.driver.internal.core.CqlIdentifiers; import com.datastax.oss.driver.internal.core.adminrequest.AdminRow; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.internal.core.metadata.schema.parsing.DataTypeClassNameCompositeParser; @@ -42,6 +47,7 @@ import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMultimap; +import com.datastax.oss.driver.shaded.guava.common.collect.Multimap; import java.util.Collection; import java.util.Collections; import java.util.List; @@ -62,7 +68,11 @@ public DseTableParser(SchemaRows rows, InternalDriverContext context) { } public DseTableMetadata parseTable( - AdminRow tableRow, CqlIdentifier keyspaceId, Map userTypes) { + AdminRow tableRow, + CqlIdentifier keyspaceId, + Map userTypes, + Multimap vertices, + Multimap edges) { // Cassandra <= 2.2: // CREATE TABLE system.schema_columnfamilies ( // keyspace_name text, @@ -228,7 +238,9 @@ public DseTableMetadata parseTable( clusteringColumnsBuilder.build(), allColumnsBuilder.build(), options, - indexesBuilder.build()); + indexesBuilder.build(), + buildVertex(tableId, vertices), + buildEdge(tableId, edges, vertices)); } DseTableMetadata parseVirtualTable(AdminRow tableRow, CqlIdentifier keyspaceId) { @@ -283,7 +295,9 @@ DseTableMetadata parseVirtualTable(AdminRow tableRow, CqlIdentifier keyspaceId) clusteringColumnsBuilder.build(), allColumnsBuilder.build(), Collections.emptyMap(), - Collections.emptyMap()); + Collections.emptyMap(), + null, + null); } // In C*<=2.2, index information is stored alongside the column. @@ -335,4 +349,75 @@ private DseIndexMetadata buildModernIndex( String target = options.get("target"); return new DefaultDseIndexMetadata(keyspaceId, tableId, name, kind, target, options); } + + private DseVertexMetadata buildVertex( + CqlIdentifier tableId, Multimap keyspaceVertices) { + + if (keyspaceVertices == null) { + return null; + } + Collection tableVertices = keyspaceVertices.get(tableId); + if (tableVertices == null || tableVertices.isEmpty()) { + return null; + } + + AdminRow row = tableVertices.iterator().next(); + return new DefaultDseVertexMetadata(getLabel(row)); + } + + private DseEdgeMetadata buildEdge( + CqlIdentifier tableId, + Multimap keyspaceEdges, + Multimap keyspaceVertices) { + + if (keyspaceEdges == null) { + return null; + } + + Collection tableEdges = keyspaceEdges.get(tableId); + if (tableEdges == null || tableEdges.isEmpty()) { + return null; + } + + AdminRow row = tableEdges.iterator().next(); + + CqlIdentifier fromTable = CqlIdentifier.fromInternal(row.getString("from_table")); + + CqlIdentifier toTable = CqlIdentifier.fromInternal(row.getString("to_table")); + + return new DefaultDseEdgeMetadata( + getLabel(row), + fromTable, + findVertexLabel(fromTable, keyspaceVertices, "incoming"), + CqlIdentifiers.wrap(row.getListOfString("from_partition_key_columns")), + CqlIdentifiers.wrap(row.getListOfString("from_clustering_columns")), + toTable, + findVertexLabel(toTable, keyspaceVertices, "outgoing"), + CqlIdentifiers.wrap(row.getListOfString("to_partition_key_columns")), + CqlIdentifiers.wrap(row.getListOfString("to_clustering_columns"))); + } + + private CqlIdentifier getLabel(AdminRow row) { + String rawLabel = row.getString("label_name"); + return (rawLabel == null || rawLabel.isEmpty()) ? null : CqlIdentifier.fromInternal(rawLabel); + } + + // system_schema.edges only contains vertex table names. We also expose the labels in our metadata + // objects, so we need to look them up in system_schema.vertices. + private CqlIdentifier findVertexLabel( + CqlIdentifier table, + Multimap keyspaceVertices, + String directionForErrorMessage) { + Collection tableVertices = + (keyspaceVertices == null) ? null : keyspaceVertices.get(table); + if (tableVertices == null || tableVertices.isEmpty()) { + throw new IllegalArgumentException( + String.format( + "Missing vertex definition for %s table %s", + directionForErrorMessage, table.asCql(true))); + } + + AdminRow row = tableVertices.iterator().next(); + return getLabel(row); + } } diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/queries/Dse68SchemaQueries.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/queries/Dse68SchemaQueries.java new file mode 100644 index 00000000000..281c8b6f751 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/queries/Dse68SchemaQueries.java @@ -0,0 +1,204 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.metadata.schema.queries; + +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.metadata.Metadata; +import com.datastax.oss.driver.internal.core.adminrequest.AdminRequestHandler; +import com.datastax.oss.driver.internal.core.adminrequest.AdminResult; +import com.datastax.oss.driver.internal.core.adminrequest.AdminRow; +import com.datastax.oss.driver.internal.core.channel.DriverChannel; +import com.datastax.oss.driver.internal.core.metadata.schema.queries.SchemaQueries; +import com.datastax.oss.driver.internal.core.metadata.schema.queries.SchemaRows; +import com.datastax.oss.driver.internal.core.util.Loggers; +import com.datastax.oss.driver.internal.core.util.NanoTime; +import com.datastax.oss.driver.internal.core.util.concurrent.RunOrSchedule; +import com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting; +import io.netty.util.concurrent.EventExecutor; +import java.time.Duration; +import java.util.Collections; +import java.util.List; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionStage; +import java.util.function.Function; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * The system table queries to refresh the schema in DSE 6.8. + * + *

      There are two additional tables for per-table graph metadata. + */ +public class Dse68SchemaQueries implements SchemaQueries { + + private static final Logger LOG = LoggerFactory.getLogger(Dse68SchemaQueries.class); + + private final DriverChannel channel; + private final EventExecutor adminExecutor; + private final String logPrefix; + private final Duration timeout; + private final int pageSize; + private final String whereClause; + // The future we return from execute, completes when all the queries are done. + private final CompletableFuture schemaRowsFuture = new CompletableFuture<>(); + // A future that completes later, when the whole refresh is done. We just store it here to pass it + // down to the next step. + public final CompletableFuture refreshFuture; + private final long startTimeNs = System.nanoTime(); + + // All non-final fields are accessed exclusively on adminExecutor + private Dse68SchemaRows.Builder schemaRowsBuilder; + private int pendingQueries; + + public Dse68SchemaQueries( + DriverChannel channel, + CompletableFuture refreshFuture, + DriverExecutionProfile config, + String logPrefix) { + this.channel = channel; + this.adminExecutor = channel.eventLoop(); + this.refreshFuture = refreshFuture; + this.logPrefix = logPrefix; + this.timeout = config.getDuration(DefaultDriverOption.METADATA_SCHEMA_REQUEST_TIMEOUT); + this.pageSize = config.getInt(DefaultDriverOption.METADATA_SCHEMA_REQUEST_PAGE_SIZE); + + List refreshedKeyspaces = + config.isDefined(DefaultDriverOption.METADATA_SCHEMA_REFRESHED_KEYSPACES) + ? config.getStringList(DefaultDriverOption.METADATA_SCHEMA_REFRESHED_KEYSPACES) + : Collections.emptyList(); + this.whereClause = buildWhereClause(refreshedKeyspaces); + } + + private static String buildWhereClause(List refreshedKeyspaces) { + if (refreshedKeyspaces.isEmpty()) { + return ""; + } else { + StringBuilder builder = new StringBuilder(" WHERE keyspace_name in ("); + boolean first = true; + for (String keyspace : refreshedKeyspaces) { + if (first) { + first = false; + } else { + builder.append(","); + } + builder.append('\'').append(keyspace).append('\''); + } + return builder.append(")").toString(); + } + } + + @Override + public CompletionStage execute() { + RunOrSchedule.on(adminExecutor, this::executeOnAdminExecutor); + return schemaRowsFuture; + } + + private void executeOnAdminExecutor() { + assert adminExecutor.inEventLoop(); + + schemaRowsBuilder = new Dse68SchemaRows.Builder(refreshFuture, logPrefix); + + query( + "SELECT * FROM system_schema.keyspaces" + whereClause, + schemaRowsBuilder::withKeyspaces, + true); + query("SELECT * FROM system_schema.types" + whereClause, schemaRowsBuilder::withTypes, true); + query("SELECT * FROM system_schema.tables" + whereClause, schemaRowsBuilder::withTables, true); + query( + "SELECT * FROM system_schema.columns" + whereClause, schemaRowsBuilder::withColumns, true); + query( + "SELECT * FROM system_schema.indexes" + whereClause, schemaRowsBuilder::withIndexes, true); + query("SELECT * FROM system_schema.views" + whereClause, schemaRowsBuilder::withViews, true); + query( + "SELECT * FROM system_schema.functions" + whereClause, + schemaRowsBuilder::withFunctions, + true); + query( + "SELECT * FROM system_schema.aggregates" + whereClause, + schemaRowsBuilder::withAggregates, + true); + // Virtual tables (DSE 6.7+, C* 4.0+) + query( + "SELECT * FROM system_virtual_schema.keyspaces" + whereClause, + schemaRowsBuilder::withVirtualKeyspaces, + false); + query( + "SELECT * FROM system_virtual_schema.tables" + whereClause, + schemaRowsBuilder::withVirtualTables, + false); + query( + "SELECT * FROM system_virtual_schema.columns" + whereClause, + schemaRowsBuilder::withVirtualColumns, + false); + // Graph metadata (DSE 6.8+) + query( + "SELECT * FROM system_schema.vertices" + whereClause, + schemaRowsBuilder::withVertices, + true); + query("SELECT * FROM system_schema.edges" + whereClause, schemaRowsBuilder::withEdges, true); + } + + private void query( + String queryString, + Function, Dse68SchemaRows.Builder> builderUpdater, + boolean warnIfMissing) { + assert adminExecutor.inEventLoop(); + + pendingQueries += 1; + query(queryString) + .whenCompleteAsync( + (result, error) -> handleResult(result, error, builderUpdater, warnIfMissing), + adminExecutor); + } + + @VisibleForTesting + protected CompletionStage query(String query) { + return AdminRequestHandler.query(channel, query, timeout, pageSize, logPrefix).start(); + } + + private void handleResult( + AdminResult result, + Throwable error, + Function, Dse68SchemaRows.Builder> builderUpdater, + boolean warnIfMissing) { + + if (error != null) { + if (warnIfMissing || !error.getMessage().contains("does not exist")) { + Loggers.warnWithException( + LOG, + "[{}] Error during schema refresh, new metadata might be incomplete", + logPrefix, + error); + } + // Proceed without the results of this query, the rest of the schema refresh will run on a + // "best effort" basis + markQueryComplete(); + } else { + // Store the rows of the current page in the builder + schemaRowsBuilder = builderUpdater.apply(result); + if (result.hasNextPage()) { + result + .nextPage() + .whenCompleteAsync( + (nextResult, nextError) -> + handleResult(nextResult, nextError, builderUpdater, warnIfMissing), + adminExecutor); + } else { + markQueryComplete(); + } + } + } + + private void markQueryComplete() { + pendingQueries -= 1; + if (pendingQueries == 0) { + LOG.debug("[{}] Schema queries took {}", logPrefix, NanoTime.formatTimeSince(startTimeNs)); + schemaRowsFuture.complete(schemaRowsBuilder.build()); + } + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/queries/Dse68SchemaRows.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/queries/Dse68SchemaRows.java new file mode 100644 index 00000000000..0a774ec2479 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/queries/Dse68SchemaRows.java @@ -0,0 +1,326 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.metadata.schema.queries; + +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.metadata.Metadata; +import com.datastax.oss.driver.internal.core.adminrequest.AdminRow; +import com.datastax.oss.driver.internal.core.metadata.schema.parsing.DataTypeCqlNameParser; +import com.datastax.oss.driver.internal.core.metadata.schema.parsing.DataTypeParser; +import com.datastax.oss.driver.internal.core.metadata.schema.queries.SchemaRows; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableListMultimap; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMultimap; +import com.datastax.oss.driver.shaded.guava.common.collect.Multimap; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.CompletableFuture; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Dse68SchemaRows implements SchemaRows { + + private final DataTypeParser dataTypeParser; + private final CompletableFuture refreshFuture; + private final List keyspaces; + private final Multimap tables; + private final Multimap views; + private final Multimap types; + private final Multimap functions; + private final Multimap aggregates; + private final Map> columns; + private final Map> indexes; + private final Map> vertices; + private final Map> edges; + private final List virtualKeyspaces; + private final Multimap virtualTables; + private final Map> virtualColumns; + + private Dse68SchemaRows( + CompletableFuture refreshFuture, + List keyspaces, + Multimap tables, + Multimap views, + Map> columns, + Map> indexes, + Multimap types, + Multimap functions, + Multimap aggregates, + List virtualKeyspaces, + Multimap virtualTables, + Map> virtualColumns, + Map> vertices, + Map> edges) { + this.dataTypeParser = new DataTypeCqlNameParser(); + this.refreshFuture = refreshFuture; + this.keyspaces = keyspaces; + this.tables = tables; + this.views = views; + this.columns = columns; + this.indexes = indexes; + this.types = types; + this.functions = functions; + this.aggregates = aggregates; + this.virtualKeyspaces = virtualKeyspaces; + this.virtualTables = virtualTables; + this.virtualColumns = virtualColumns; + this.vertices = vertices; + this.edges = edges; + } + + @Override + public DataTypeParser dataTypeParser() { + return dataTypeParser; + } + + @Override + public CompletableFuture refreshFuture() { + return refreshFuture; + } + + @Override + public List keyspaces() { + return keyspaces; + } + + @Override + public Multimap tables() { + return tables; + } + + @Override + public Multimap views() { + return views; + } + + @Override + public Multimap types() { + return types; + } + + @Override + public Multimap functions() { + return functions; + } + + @Override + public Multimap aggregates() { + return aggregates; + } + + @Override + public Map> columns() { + return columns; + } + + @Override + public Map> indexes() { + return indexes; + } + + @Override + public List virtualKeyspaces() { + return virtualKeyspaces; + } + + @Override + public Multimap virtualTables() { + return virtualTables; + } + + @Override + public Map> virtualColumns() { + return virtualColumns; + } + + public Map> vertices() { + return vertices; + } + + public Map> edges() { + return edges; + } + + public static class Builder { + private static final Logger LOG = LoggerFactory.getLogger(Dse68SchemaRows.Builder.class); + + private final CompletableFuture refreshFuture; + private final String logPrefix; + private final ImmutableList.Builder keyspacesBuilder = ImmutableList.builder(); + private final ImmutableMultimap.Builder tablesBuilder = + ImmutableListMultimap.builder(); + private final ImmutableMultimap.Builder viewsBuilder = + ImmutableListMultimap.builder(); + private final ImmutableMultimap.Builder typesBuilder = + ImmutableListMultimap.builder(); + private final ImmutableMultimap.Builder functionsBuilder = + ImmutableListMultimap.builder(); + private final ImmutableMultimap.Builder aggregatesBuilder = + ImmutableListMultimap.builder(); + private final Map> + columnsBuilders = new LinkedHashMap<>(); + private final Map> + indexesBuilders = new LinkedHashMap<>(); + private final Map> + verticesBuilders = new LinkedHashMap<>(); + private final Map> + edgesBuilders = new LinkedHashMap<>(); + private final ImmutableList.Builder virtualKeyspacesBuilder = ImmutableList.builder(); + private final ImmutableMultimap.Builder virtualTablesBuilder = + ImmutableListMultimap.builder(); + private final Map> + virtualColumnsBuilders = new LinkedHashMap<>(); + + public Builder(CompletableFuture refreshFuture, String logPrefix) { + this.refreshFuture = refreshFuture; + this.logPrefix = logPrefix; + } + + public Dse68SchemaRows.Builder withKeyspaces(Iterable rows) { + keyspacesBuilder.addAll(rows); + return this; + } + + public Dse68SchemaRows.Builder withTables(Iterable rows) { + for (AdminRow row : rows) { + putByKeyspace(row, tablesBuilder); + } + return this; + } + + public Dse68SchemaRows.Builder withViews(Iterable rows) { + for (AdminRow row : rows) { + putByKeyspace(row, viewsBuilder); + } + return this; + } + + public Dse68SchemaRows.Builder withTypes(Iterable rows) { + for (AdminRow row : rows) { + putByKeyspace(row, typesBuilder); + } + return this; + } + + public Dse68SchemaRows.Builder withFunctions(Iterable rows) { + for (AdminRow row : rows) { + putByKeyspace(row, functionsBuilder); + } + return this; + } + + public Dse68SchemaRows.Builder withAggregates(Iterable rows) { + for (AdminRow row : rows) { + putByKeyspace(row, aggregatesBuilder); + } + return this; + } + + public Dse68SchemaRows.Builder withColumns(Iterable rows) { + for (AdminRow row : rows) { + putByKeyspaceAndTable(row, columnsBuilders); + } + return this; + } + + public Dse68SchemaRows.Builder withIndexes(Iterable rows) { + for (AdminRow row : rows) { + putByKeyspaceAndTable(row, indexesBuilders); + } + return this; + } + + public Dse68SchemaRows.Builder withVirtualKeyspaces(Iterable rows) { + virtualKeyspacesBuilder.addAll(rows); + return this; + } + + public Dse68SchemaRows.Builder withVirtualTables(Iterable rows) { + for (AdminRow row : rows) { + putByKeyspace(row, virtualTablesBuilder); + } + return this; + } + + public Dse68SchemaRows.Builder withVirtualColumns(Iterable rows) { + for (AdminRow row : rows) { + putByKeyspaceAndTable(row, virtualColumnsBuilders); + } + return this; + } + + public Dse68SchemaRows.Builder withVertices(Iterable rows) { + for (AdminRow row : rows) { + putByKeyspaceAndTable(row, verticesBuilders); + } + return this; + } + + public Dse68SchemaRows.Builder withEdges(Iterable rows) { + for (AdminRow row : rows) { + putByKeyspaceAndTable(row, edgesBuilders); + } + return this; + } + + private void putByKeyspace( + AdminRow row, ImmutableMultimap.Builder builder) { + String keyspace = row.getString("keyspace_name"); + if (keyspace == null) { + LOG.warn("[{}] Skipping system row with missing keyspace name", logPrefix); + } else { + builder.put(CqlIdentifier.fromInternal(keyspace), row); + } + } + + private void putByKeyspaceAndTable( + AdminRow row, + Map> builders) { + String keyspace = row.getString("keyspace_name"); + String table = row.getString("table_name"); + if (keyspace == null) { + LOG.warn("[{}] Skipping system row with missing keyspace name", logPrefix); + } else if (table == null) { + LOG.warn("[{}] Skipping system row with missing table name", logPrefix); + } else { + ImmutableMultimap.Builder builder = + builders.computeIfAbsent( + CqlIdentifier.fromInternal(keyspace), s -> ImmutableListMultimap.builder()); + builder.put(CqlIdentifier.fromInternal(table), row); + } + } + + public Dse68SchemaRows build() { + return new Dse68SchemaRows( + refreshFuture, + keyspacesBuilder.build(), + tablesBuilder.build(), + viewsBuilder.build(), + build(columnsBuilders), + build(indexesBuilders), + typesBuilder.build(), + functionsBuilder.build(), + aggregatesBuilder.build(), + virtualKeyspacesBuilder.build(), + virtualTablesBuilder.build(), + build(virtualColumnsBuilders), + build(verticesBuilders), + build(edgesBuilders)); + } + + private static Map> build( + Map> builders) { + ImmutableMap.Builder> builder = ImmutableMap.builder(); + for (Map.Entry> entry : builders.entrySet()) { + builder.put(entry.getKey(), entry.getValue().build()); + } + return builder.build(); + } + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphNodeTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphNodeTest.java index 46d3d9499c8..e8febdc90e8 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphNodeTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphNodeTest.java @@ -15,45 +15,68 @@ */ package com.datastax.dse.driver.internal.core.graph; -import static com.datastax.dse.driver.internal.core.graph.GraphSONUtils.GRAPHSON_1_0; -import static com.datastax.dse.driver.internal.core.graph.GraphSONUtils.GRAPHSON_2_0; -import static com.datastax.dse.driver.internal.core.graph.GraphSONUtils.GRAPHSON_3_0; +import static com.datastax.dse.driver.internal.core.graph.GraphProtocol.GRAPHSON_1_0; +import static com.datastax.dse.driver.internal.core.graph.GraphProtocol.GRAPHSON_2_0; +import static com.datastax.dse.driver.internal.core.graph.GraphProtocol.GRAPHSON_3_0; +import static com.datastax.dse.driver.internal.core.graph.GraphProtocol.GRAPH_BINARY_1_0; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; +import com.datastax.dse.driver.api.core.DseProtocolVersion; import com.datastax.dse.driver.api.core.graph.GraphNode; +import com.datastax.dse.driver.internal.core.context.DseDriverContext; +import com.datastax.dse.driver.internal.core.graph.binary.GraphBinaryModule; +import com.datastax.oss.driver.api.core.type.codec.registry.CodecRegistry; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; import com.tngtech.java.junit.dataprovider.DataProvider; import com.tngtech.java.junit.dataprovider.DataProviderRunner; import com.tngtech.java.junit.dataprovider.UseDataProvider; +import io.netty.buffer.ByteBuf; import java.io.IOException; import java.nio.ByteBuffer; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; +import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryReader; +import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryWriter; +import org.apache.tinkerpop.gremlin.driver.ser.binary.TypeSerializerRegistry; +import org.apache.tinkerpop.gremlin.process.remote.traversal.DefaultRemoteTraverser; +import org.apache.tinkerpop.gremlin.process.traversal.Traverser; import org.apache.tinkerpop.gremlin.process.traversal.step.util.EmptyPath; import org.apache.tinkerpop.gremlin.structure.util.detached.DetachedEdge; import org.apache.tinkerpop.gremlin.structure.util.detached.DetachedProperty; import org.apache.tinkerpop.gremlin.structure.util.detached.DetachedVertex; import org.apache.tinkerpop.gremlin.structure.util.detached.DetachedVertexProperty; +import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; @RunWith(DataProviderRunner.class) public class GraphNodeTest { + private GraphBinaryModule graphBinaryModule; + + @Before + public void setup() { + DseDriverContext dseDriverContext = mock(DseDriverContext.class); + when(dseDriverContext.getCodecRegistry()).thenReturn(CodecRegistry.DEFAULT); + when(dseDriverContext.getProtocolVersion()).thenReturn(DseProtocolVersion.DSE_V2); + + TypeSerializerRegistry registry = + GraphBinaryModule.createDseTypeSerializerRegistry(dseDriverContext); + graphBinaryModule = + new GraphBinaryModule(new GraphBinaryReader(registry), new GraphBinaryWriter(registry)); + } + @Test public void should_create_graph_node_for_set_for_graphson_3_0() throws IOException { - // given - ImmutableList bytes = - ImmutableList.of( - GraphSONUtils.serializeToByteBuffer(ImmutableSet.of("value"), GRAPHSON_3_0)); - // when - GraphNode graphNode = GraphSONUtils.createGraphNode(bytes, GRAPHSON_3_0); + GraphNode graphNode = serdeAndCreateGraphNode(ImmutableSet.of("value"), GRAPHSON_3_0); // then assertThat(graphNode.isSet()).isTrue(); @@ -63,13 +86,8 @@ public void should_create_graph_node_for_set_for_graphson_3_0() throws IOExcepti @Test public void should_not_support_set_for_graphson_2_0() throws IOException { - // given - ImmutableList bytes = - ImmutableList.of( - GraphSONUtils.serializeToByteBuffer(ImmutableSet.of("value"), GRAPHSON_2_0)); - // when - GraphNode graphNode = GraphSONUtils.createGraphNode(bytes, GRAPHSON_2_0); + GraphNode graphNode = serdeAndCreateGraphNode(ImmutableSet.of("value"), GRAPHSON_2_0); // then assertThat(graphNode.isSet()).isFalse(); @@ -77,13 +95,8 @@ public void should_not_support_set_for_graphson_2_0() throws IOException { @Test public void should_throw_for_set_for_graphson_1_0() throws IOException { - // given - ImmutableList bytes = - ImmutableList.of( - GraphSONUtils.serializeToByteBuffer(ImmutableSet.of("value"), GRAPHSON_1_0)); - // when - GraphNode graphNode = GraphSONUtils.createGraphNode(bytes, GRAPHSON_1_0); + GraphNode graphNode = serdeAndCreateGraphNode(ImmutableSet.of("value"), GRAPHSON_1_0); // then assertThat(graphNode.isSet()).isFalse(); @@ -91,15 +104,10 @@ public void should_throw_for_set_for_graphson_1_0() throws IOException { } @Test - @UseDataProvider(value = "graphsonAllVersions") - public void should_create_graph_node_for_list(String graphVersion) throws IOException { - // given - ImmutableList bytes = - ImmutableList.of( - GraphSONUtils.serializeToByteBuffer(ImmutableList.of("value"), graphVersion)); - + @UseDataProvider(value = "allGraphProtocols") + public void should_create_graph_node_for_list(GraphProtocol graphVersion) throws IOException { // when - GraphNode graphNode = GraphSONUtils.createGraphNode(bytes, graphVersion); + GraphNode graphNode = serdeAndCreateGraphNode(ImmutableList.of("value"), graphVersion); // then assertThat(graphNode.isList()).isTrue(); @@ -109,13 +117,8 @@ public void should_create_graph_node_for_list(String graphVersion) throws IOExce @Test public void should_create_graph_node_for_map_for_graphson_3_0() throws IOException { - // given - ImmutableList bytes = - ImmutableList.of( - GraphSONUtils.serializeToByteBuffer(ImmutableMap.of(12, 1234), GRAPHSON_3_0)); - // when - GraphNode graphNode = GraphSONUtils.createGraphNode(bytes, GRAPHSON_3_0); + GraphNode graphNode = serdeAndCreateGraphNode(ImmutableMap.of(12, 1234), GRAPHSON_3_0); // then assertThat(graphNode.isMap()).isTrue(); @@ -124,15 +127,10 @@ public void should_create_graph_node_for_map_for_graphson_3_0() throws IOExcepti } @Test - @UseDataProvider("graphsonAllVersions") - public void should_create_graph_node_for_map(String graphsonVersion) throws IOException { - // given - ImmutableList bytes = - ImmutableList.of( - GraphSONUtils.serializeToByteBuffer(ImmutableMap.of("value", 1234), graphsonVersion)); - + @UseDataProvider("allGraphProtocols") + public void should_create_graph_node_for_map(GraphProtocol graphProtocol) throws IOException { // when - GraphNode graphNode = GraphSONUtils.createGraphNode(bytes, graphsonVersion); + GraphNode graphNode = serdeAndCreateGraphNode(ImmutableMap.of("value", 1234), graphProtocol); // then assertThat(graphNode.isMap()).isTrue(); @@ -142,15 +140,10 @@ public void should_create_graph_node_for_map(String graphsonVersion) throws IOEx @Test @UseDataProvider("graphson1_0and2_0") - public void should_create_graph_node_for_map_for_non_string_key(String graphsonVersion) + public void should_create_graph_node_for_map_for_non_string_key(GraphProtocol graphProtocol) throws IOException { - // given - ImmutableList bytes = - ImmutableList.of( - GraphSONUtils.serializeToByteBuffer(ImmutableMap.of(12, 1234), graphsonVersion)); - // when - GraphNode graphNode = GraphSONUtils.createGraphNode(bytes, graphsonVersion); + GraphNode graphNode = serdeAndCreateGraphNode(ImmutableMap.of(12, 1234), graphProtocol); // then assertThat(graphNode.isMap()).isTrue(); @@ -159,25 +152,13 @@ public void should_create_graph_node_for_map_for_non_string_key(String graphsonV } @Test - @UseDataProvider(value = "graphsonAllVersions") - public void should_calculate_size_of_collection_types(String graphVersion) throws IOException { - // given - ImmutableList map = - ImmutableList.of( - GraphSONUtils.serializeToByteBuffer(ImmutableMap.of(12, 1234), graphVersion)); - - ImmutableList set = - ImmutableList.of( - GraphSONUtils.serializeToByteBuffer(ImmutableSet.of(12, 1234), graphVersion)); - - ImmutableList list = - ImmutableList.of( - GraphSONUtils.serializeToByteBuffer(ImmutableList.of(12, 1234, 99999), graphVersion)); - + @UseDataProvider(value = "allGraphProtocols") + public void should_calculate_size_of_collection_types(GraphProtocol graphProtocol) + throws IOException { // when - GraphNode mapNode = GraphSONUtils.createGraphNode(map, graphVersion); - GraphNode setNode = GraphSONUtils.createGraphNode(set, graphVersion); - GraphNode listNode = GraphSONUtils.createGraphNode(list, graphVersion); + GraphNode mapNode = serdeAndCreateGraphNode(ImmutableMap.of(12, 1234), graphProtocol); + GraphNode setNode = serdeAndCreateGraphNode(ImmutableSet.of(12, 1234), graphProtocol); + GraphNode listNode = serdeAndCreateGraphNode(ImmutableList.of(12, 1234, 99999), graphProtocol); // then assertThat(mapNode.size()).isEqualTo(1); @@ -186,58 +167,26 @@ public void should_calculate_size_of_collection_types(String graphVersion) throw } @Test - @UseDataProvider(value = "graphsonAllVersions") - public void should_return_is_value_only_for_scalar_value(String graphVersion) throws IOException { - // given - ImmutableList map = - ImmutableList.of( - GraphSONUtils.serializeToByteBuffer(ImmutableMap.of(12, 1234), graphVersion)); - - ImmutableList set = - ImmutableList.of( - GraphSONUtils.serializeToByteBuffer(ImmutableSet.of(12, 1234), graphVersion)); - - ImmutableList list = - ImmutableList.of( - GraphSONUtils.serializeToByteBuffer(ImmutableList.of(12, 1234, 99999), graphVersion)); - - ImmutableList vertex = - ImmutableList.of( - GraphSONUtils.serializeToByteBuffer(new DetachedVertex("a", "l", null), graphVersion)); - - ImmutableList edge = - ImmutableList.of( - GraphSONUtils.serializeToByteBuffer( - new DetachedEdge("a", "l", Collections.emptyMap(), "v1", "l1", "v2", "l2"), - graphVersion)); - - ImmutableList path = - ImmutableList.of(GraphSONUtils.serializeToByteBuffer(EmptyPath.instance(), graphVersion)); - - ImmutableList property = - ImmutableList.of( - GraphSONUtils.serializeToByteBuffer(new DetachedProperty<>("a", 1), graphVersion)); - - ImmutableList vertexProperty = - ImmutableList.of( - GraphSONUtils.serializeToByteBuffer( - new DetachedVertexProperty<>( - "id", "l", "v", null, new DetachedVertex("a", "l", null)), - graphVersion)); - - ImmutableList scalarValue = - ImmutableList.of(GraphSONUtils.serializeToByteBuffer(true, graphVersion)); - + @UseDataProvider(value = "allGraphProtocols") + public void should_return_is_value_only_for_scalar_value(GraphProtocol graphProtocol) + throws IOException { // when - GraphNode mapNode = GraphSONUtils.createGraphNode(map, graphVersion); - GraphNode setNode = GraphSONUtils.createGraphNode(set, graphVersion); - GraphNode listNode = GraphSONUtils.createGraphNode(list, graphVersion); - GraphNode vertexNode = GraphSONUtils.createGraphNode(vertex, graphVersion); - GraphNode edgeNode = GraphSONUtils.createGraphNode(edge, graphVersion); - GraphNode pathNode = GraphSONUtils.createGraphNode(path, graphVersion); - GraphNode propertyNode = GraphSONUtils.createGraphNode(property, graphVersion); - GraphNode vertexPropertyNode = GraphSONUtils.createGraphNode(vertexProperty, graphVersion); - GraphNode scalarValueNode = GraphSONUtils.createGraphNode(scalarValue, graphVersion); + GraphNode mapNode = serdeAndCreateGraphNode(ImmutableMap.of(12, 1234), graphProtocol); + GraphNode setNode = serdeAndCreateGraphNode(ImmutableMap.of(12, 1234), graphProtocol); + GraphNode listNode = serdeAndCreateGraphNode(ImmutableMap.of(12, 1234), graphProtocol); + GraphNode vertexNode = + serdeAndCreateGraphNode(new DetachedVertex("a", "l", null), graphProtocol); + GraphNode edgeNode = + serdeAndCreateGraphNode( + new DetachedEdge("a", "l", Collections.emptyMap(), "v1", "l1", "v2", "l2"), + graphProtocol); + GraphNode pathNode = serdeAndCreateGraphNode(EmptyPath.instance(), graphProtocol); + GraphNode propertyNode = serdeAndCreateGraphNode(new DetachedProperty<>("a", 1), graphProtocol); + GraphNode vertexPropertyNode = + serdeAndCreateGraphNode( + new DetachedVertexProperty<>("id", "l", "v", null, new DetachedVertex("a", "l", null)), + graphProtocol); + GraphNode scalarValueNode = serdeAndCreateGraphNode(true, graphProtocol); // then assertThat(mapNode.isValue()).isFalse(); @@ -252,15 +201,11 @@ public void should_return_is_value_only_for_scalar_value(String graphVersion) th } @Test - @UseDataProvider("graphson2_0and3_0") - public void should_check_if_node_is_property_not_map(String graphVersion) throws IOException { - // given - ImmutableList property = - ImmutableList.of( - GraphSONUtils.serializeToByteBuffer(new DetachedProperty<>("a", 1), graphVersion)); - + @UseDataProvider("objectGraphNodeProtocols") + public void should_check_if_node_is_property_not_map(GraphProtocol graphProtocol) + throws IOException { // when - GraphNode propertyNode = GraphSONUtils.createGraphNode(property, graphVersion); + GraphNode propertyNode = serdeAndCreateGraphNode(new DetachedProperty<>("a", 1), graphProtocol); // then assertThat(propertyNode.isProperty()).isTrue(); @@ -270,13 +215,8 @@ public void should_check_if_node_is_property_not_map(String graphVersion) throws @Test public void should_check_if_node_is_property_or_map_for_1_0() throws IOException { - // given - ImmutableList property = - ImmutableList.of( - GraphSONUtils.serializeToByteBuffer(new DetachedProperty<>("a", 1), GRAPHSON_1_0)); - // when - GraphNode propertyNode = GraphSONUtils.createGraphNode(property, GRAPHSON_1_0); + GraphNode propertyNode = serdeAndCreateGraphNode(new DetachedProperty<>("a", 1), GRAPHSON_1_0); // then assertThat(propertyNode.isProperty()).isTrue(); @@ -285,18 +225,14 @@ public void should_check_if_node_is_property_or_map_for_1_0() throws IOException } @Test - @UseDataProvider("graphsonAllVersions") - public void should_check_if_node_is_vertex_property(String graphVersion) throws IOException { - // given - ImmutableList vertexProperty = - ImmutableList.of( - GraphSONUtils.serializeToByteBuffer( - new DetachedVertexProperty<>( - "id", "l", "v", null, new DetachedVertex("a", "l", null)), - graphVersion)); - + @UseDataProvider("allGraphProtocols") + public void should_check_if_node_is_vertex_property(GraphProtocol graphProtocol) + throws IOException { // when - GraphNode vertexPropertyNode = GraphSONUtils.createGraphNode(vertexProperty, graphVersion); + GraphNode vertexPropertyNode = + serdeAndCreateGraphNode( + new DetachedVertexProperty<>("id", "l", "v", null, new DetachedVertex("a", "l", null)), + graphProtocol); // then assertThat(vertexPropertyNode.isVertexProperty()).isTrue(); @@ -305,46 +241,31 @@ public void should_check_if_node_is_vertex_property(String graphVersion) throws @Test public void should_check_if_node_is_path_for_graphson_1_0() throws IOException { - // given - ImmutableList path = - ImmutableList.of(GraphSONUtils.serializeToByteBuffer(EmptyPath.instance(), GRAPHSON_1_0)); - // when - GraphNode vertexPropertyNode = GraphSONUtils.createGraphNode(path, GRAPHSON_1_0); + GraphNode pathNode = serdeAndCreateGraphNode(EmptyPath.instance(), GRAPHSON_1_0); // then - assertThat(vertexPropertyNode.isPath()).isFalse(); - assertThatThrownBy(vertexPropertyNode::asPath) - .isExactlyInstanceOf(UnsupportedOperationException.class); + assertThat(pathNode.isPath()).isFalse(); + assertThatThrownBy(pathNode::asPath).isExactlyInstanceOf(UnsupportedOperationException.class); } @Test - @UseDataProvider("graphson2_0and3_0") - public void should_check_if_node_is_path(String graphsonVersion) throws IOException { - // given - ImmutableList path = - ImmutableList.of( - GraphSONUtils.serializeToByteBuffer(EmptyPath.instance(), graphsonVersion)); - + @UseDataProvider("objectGraphNodeProtocols") + public void should_check_if_node_is_path(GraphProtocol graphProtocol) throws IOException { // when - GraphNode vertexPropertyNode = GraphSONUtils.createGraphNode(path, graphsonVersion); + GraphNode pathNode = serdeAndCreateGraphNode(EmptyPath.instance(), graphProtocol); // then - assertThat(vertexPropertyNode.isPath()).isTrue(); - assertThat(vertexPropertyNode.asPath()).isNotNull(); + assertThat(pathNode.isPath()).isTrue(); + assertThat(pathNode.asPath()).isNotNull(); } @Test - @UseDataProvider("graphsonAllVersions") - public void should_check_if_node_is_vertex(String graphsonVersion) throws IOException { - // given - ImmutableList vertex = - ImmutableList.of( - GraphSONUtils.serializeToByteBuffer( - new DetachedVertex("a", "l", null), graphsonVersion)); - + @UseDataProvider("allGraphProtocols") + public void should_check_if_node_is_vertex(GraphProtocol graphProtocol) throws IOException { // when - GraphNode vertexNode = GraphSONUtils.createGraphNode(vertex, graphsonVersion); + GraphNode vertexNode = + serdeAndCreateGraphNode(new DetachedVertex("a", "l", null), graphProtocol); // then assertThat(vertexNode.isVertex()).isTrue(); @@ -352,26 +273,40 @@ public void should_check_if_node_is_vertex(String graphsonVersion) throws IOExce } @Test - @UseDataProvider("graphsonAllVersions") - public void should_check_if_node_is_edge(String graphsonVersion) throws IOException { - // given - ImmutableList edge = - ImmutableList.of( - GraphSONUtils.serializeToByteBuffer( - new DetachedEdge("a", "l", Collections.emptyMap(), "v1", "l1", "v2", "l2"), - graphsonVersion)); - + @UseDataProvider("allGraphProtocols") + public void should_check_if_node_is_edge(GraphProtocol graphProtocol) throws IOException { // when - GraphNode edgeNode = GraphSONUtils.createGraphNode(edge, graphsonVersion); + GraphNode edgeNode = + serdeAndCreateGraphNode( + new DetachedEdge("a", "l", Collections.emptyMap(), "v1", "l1", "v2", "l2"), + graphProtocol); // then assertThat(edgeNode.isEdge()).isTrue(); assertThat(edgeNode.asEdge()).isNotNull(); } + private GraphNode serdeAndCreateGraphNode(Object inputValue, GraphProtocol graphProtocol) + throws IOException { + if (graphProtocol.isGraphBinary()) { + ByteBuf nettyBuf = graphBinaryModule.serialize(new DefaultRemoteTraverser<>(inputValue, 0L)); + ByteBuffer nioBuffer = ByteBufUtil.toByteBuffer(nettyBuf); + nettyBuf.release(); + return new ObjectGraphNode( + GraphConversions.createGraphBinaryGraphNode( + ImmutableList.of(nioBuffer), graphBinaryModule) + .as(Traverser.class) + .get()); + } else { + return GraphSONUtils.createGraphNode( + ImmutableList.of(GraphSONUtils.serializeToByteBuffer(inputValue, graphProtocol)), + graphProtocol); + } + } + @DataProvider - public static Object[][] graphsonAllVersions() { - return new Object[][] {{GRAPHSON_1_0}, {GRAPHSON_2_0}, {GRAPHSON_3_0}}; + public static Object[][] allGraphProtocols() { + return new Object[][] {{GRAPHSON_1_0}, {GRAPHSON_2_0}, {GRAPHSON_3_0}, {GRAPH_BINARY_1_0}}; } @DataProvider @@ -380,7 +315,7 @@ public static Object[][] graphson1_0and2_0() { } @DataProvider - public static Object[][] graphson2_0and3_0() { - return new Object[][] {{GRAPHSON_2_0}, {GRAPHSON_3_0}}; + public static Object[][] objectGraphNodeProtocols() { + return new Object[][] {{GRAPHSON_2_0}, {GRAPHSON_3_0}, {GRAPH_BINARY_1_0}}; } } diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphProtocolTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphProtocolTest.java new file mode 100644 index 00000000000..850cc8063b8 --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphProtocolTest.java @@ -0,0 +1,97 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.mockito.Mockito.when; + +import com.datastax.dse.driver.api.core.config.DseDriverOption; +import com.datastax.dse.driver.api.core.graph.GraphStatement; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import org.junit.Rule; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.ArgumentMatchers; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.junit.MockitoJUnit; +import org.mockito.junit.MockitoRule; + +@RunWith(DataProviderRunner.class) +public class GraphProtocolTest { + + @Mock DriverExecutionProfile executionProfile; + + @Mock GraphStatement graphStatement; + + @Rule public MockitoRule mockitoRule = MockitoJUnit.rule(); + + @Test + @UseDataProvider("protocolObjects") + public void should_pickup_graph_protocol_from_statement(GraphProtocol graphProtocol) { + when(graphStatement.getSubProtocol()).thenReturn(graphProtocol.toInternalCode()); + + GraphProtocol inferredProtocol = + GraphConversions.inferSubProtocol(graphStatement, executionProfile); + + assertThat(inferredProtocol).isEqualTo(graphProtocol); + Mockito.verifyZeroInteractions(executionProfile); + } + + @Test + @UseDataProvider("protocolStrings") + public void should_pickup_graph_protocol_and_parse_from_string_config(String stringConfig) { + when(executionProfile.getString( + ArgumentMatchers.eq(DseDriverOption.GRAPH_SUB_PROTOCOL), ArgumentMatchers.any())) + .thenReturn(stringConfig); + + GraphProtocol inferredProtocol = + GraphConversions.inferSubProtocol(graphStatement, executionProfile); + assertThat(inferredProtocol.toInternalCode()).isEqualTo(stringConfig); + } + + @Test + public void should_use_graphson2_as_default_protocol_when_parsing() { + when(executionProfile.getString( + ArgumentMatchers.eq(DseDriverOption.GRAPH_SUB_PROTOCOL), ArgumentMatchers.anyString())) + .thenAnswer(i -> i.getArguments()[1]); + GraphProtocol inferredProtocol = + GraphConversions.inferSubProtocol(graphStatement, executionProfile); + assertThat(inferredProtocol).isEqualTo(GraphProtocol.GRAPHSON_2_0); + } + + @Test + public void should_fail_if_graph_protocol_used_is_invalid() { + assertThatThrownBy(() -> GraphProtocol.fromString("invalid")) + .isInstanceOf(IllegalArgumentException.class) + .hasMessage( + "Graph protocol used [\"invalid\"] unknown. Possible values are: [ \"graphson-1.0\", \"graphson-2.0\", \"graphson-3.0\", \"graph-binary-1.0\"]"); + } + + @DataProvider + public static Object[][] protocolObjects() { + return new Object[][] { + {GraphProtocol.GRAPHSON_1_0}, + {GraphProtocol.GRAPHSON_2_0}, + {GraphProtocol.GRAPHSON_3_0}, + {GraphProtocol.GRAPH_BINARY_1_0} + }; + } + + @DataProvider + public static Object[][] protocolStrings() { + // putting manual strings here to be sure to be notified if a value in + // GraphProtocol ever changes + return new Object[][] { + {"graphson-1.0"}, {"graphson-2.0"}, {"graphson-3.0"}, {"graph-binary-1.0"} + }; + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java index b95f682a3c6..db17e6191f5 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java @@ -15,6 +15,9 @@ */ package com.datastax.dse.driver.internal.core.graph; +import static com.datastax.dse.driver.internal.core.graph.GraphProtocol.GRAPHSON_2_0; +import static com.datastax.dse.driver.internal.core.graph.GraphProtocol.GRAPHSON_3_0; +import static com.datastax.dse.driver.internal.core.graph.GraphProtocol.GRAPH_BINARY_1_0; import static com.datastax.oss.driver.Assertions.assertThat; import static com.datastax.oss.driver.api.core.type.codec.TypeCodecs.BIGINT; import static com.datastax.oss.driver.api.core.type.codec.TypeCodecs.TEXT; @@ -30,6 +33,7 @@ import com.datastax.dse.driver.api.core.DseProtocolVersion; import com.datastax.dse.driver.api.core.config.DseDriverOption; +import com.datastax.dse.driver.api.core.data.geometry.Point; import com.datastax.dse.driver.api.core.graph.BatchGraphStatement; import com.datastax.dse.driver.api.core.graph.DseGraph; import com.datastax.dse.driver.api.core.graph.FluentGraphStatement; @@ -37,12 +41,15 @@ import com.datastax.dse.driver.api.core.graph.GraphResultSet; import com.datastax.dse.driver.api.core.graph.GraphStatement; import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; +import com.datastax.dse.driver.internal.core.context.DseDriverContext; +import com.datastax.dse.driver.internal.core.graph.binary.GraphBinaryModule; import com.datastax.dse.protocol.internal.request.RawBytesQuery; import com.datastax.dse.protocol.internal.request.query.DseQueryOptions; import com.datastax.oss.driver.api.core.DefaultConsistencyLevel; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.tracker.RequestTracker; +import com.datastax.oss.driver.api.core.uuid.Uuids; import com.datastax.oss.driver.internal.core.cql.RequestHandlerTestHarness; import com.datastax.oss.driver.internal.core.metadata.DefaultNode; import com.datastax.oss.driver.internal.core.metrics.NodeMetricUpdater; @@ -56,27 +63,37 @@ import com.datastax.oss.protocol.internal.response.result.DefaultRows; import com.datastax.oss.protocol.internal.response.result.RawType; import com.datastax.oss.protocol.internal.response.result.RowsMetadata; +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import io.netty.buffer.ByteBuf; import java.io.IOException; +import java.math.BigInteger; import java.nio.ByteBuffer; import java.time.Duration; +import java.time.LocalDateTime; import java.util.ArrayDeque; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Queue; -import java.util.concurrent.ExecutionException; import java.util.regex.Pattern; +import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryReader; +import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryWriter; +import org.apache.tinkerpop.gremlin.driver.ser.binary.TypeSerializerRegistry; +import org.apache.tinkerpop.gremlin.process.remote.traversal.DefaultRemoteTraverser; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; import org.apache.tinkerpop.gremlin.structure.Vertex; import org.apache.tinkerpop.gremlin.structure.util.detached.DetachedVertex; import org.apache.tinkerpop.gremlin.structure.util.detached.DetachedVertexProperty; import org.junit.Before; import org.junit.Test; +import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.MockitoAnnotations; -// TODO subProtocol is hard-coded to graphson-2.0 everywhere, we could parameterize the tests +@RunWith(DataProviderRunner.class) public class GraphRequestHandlerTest { private static final Pattern LOG_PREFIX_PER_REQUEST = Pattern.compile("test-graph\\|\\d*\\|\\d*"); @@ -88,91 +105,167 @@ public class GraphRequestHandlerTest { @Before public void setup() { MockitoAnnotations.initMocks(this); - Mockito.when(node.getMetricUpdater()).thenReturn(nodeMetricUpdater1); + when(node.getMetricUpdater()).thenReturn(nodeMetricUpdater1); + } + + GraphBinaryModule createGraphBinaryModule(DseDriverContext context) { + TypeSerializerRegistry registry = GraphBinaryModule.createDseTypeSerializerRegistry(context); + return new GraphBinaryModule(new GraphBinaryReader(registry), new GraphBinaryWriter(registry)); } @Test - public void should_create_query_message_from_script_statement() { + @UseDataProvider("bytecodeEnabledGraphProtocols") + public void should_create_query_message_from_script_statement(GraphProtocol graphProtocol) + throws IOException { // initialization - RequestHandlerTestHarness harness = GraphRequestHandlerTestHarness.builder().build(); - GraphStatement graphStatement = ScriptGraphStatement.newInstance("mockQuery"); - String subProtocol = "graphson-2.0"; + GraphRequestHandlerTestHarness harness = GraphRequestHandlerTestHarness.builder().build(); + ScriptGraphStatement graphStatement = + ScriptGraphStatement.newInstance("mockQuery") + .setQueryParam("p1", 1L) + .setQueryParam("p2", Uuids.random()); + + GraphBinaryModule module = createGraphBinaryModule(harness.getContext()); // when + DriverExecutionProfile executionProfile = + GraphConversions.resolveExecutionProfile(graphStatement, harness.getContext()); + Message m = GraphConversions.createMessageFromGraphStatement( - graphStatement, - subProtocol, - GraphConversions.resolveExecutionProfile(graphStatement, harness.getContext()), - harness.getContext()); + graphStatement, graphProtocol, executionProfile, harness.getContext(), module); // checks assertThat(m).isInstanceOf(Query.class); - assertThat(((Query) m).query).isEqualTo("mockQuery"); + Query q = ((Query) m); + assertThat(q.query).isEqualTo("mockQuery"); + assertThat(q.options.positionalValues) + .containsExactly(serialize(graphStatement.getQueryParams(), graphProtocol, module)); + assertThat(q.options.namedValues).isEmpty(); } @Test - public void should_create_query_message_from_fluent_statement() throws IOException { + @UseDataProvider("bytecodeEnabledGraphProtocols") + public void should_create_query_message_from_fluent_statement(GraphProtocol graphProtocol) + throws IOException { // initialization - RequestHandlerTestHarness harness = GraphRequestHandlerTestHarness.builder().build(); - GraphTraversal traversalTest = DseGraph.g.V().has("name", "marko"); + GraphRequestHandlerTestHarness harness = GraphRequestHandlerTestHarness.builder().build(); + GraphTraversal traversalTest = + DseGraph.g.V().has("person", "name", "marko").has("p1", 1L).has("p2", Uuids.random()); GraphStatement graphStatement = FluentGraphStatement.newInstance(traversalTest); - String subProtocol = "graphson-2.0"; + + GraphBinaryModule module = createGraphBinaryModule(harness.getContext()); // when + DriverExecutionProfile executionProfile = + GraphConversions.resolveExecutionProfile(graphStatement, harness.getContext()); + Message m = GraphConversions.createMessageFromGraphStatement( - graphStatement, - subProtocol, - GraphConversions.resolveExecutionProfile(graphStatement, harness.getContext()), - harness.getContext()); + graphStatement, graphProtocol, executionProfile, harness.getContext(), module); + + Map createdCustomPayload = + GraphConversions.createCustomPayload( + graphStatement, graphProtocol, executionProfile, harness.getContext(), module); // checks assertThat(m).isInstanceOf(RawBytesQuery.class); - assertThat(((RawBytesQuery) m).query) - .isEqualTo(GraphSONUtils.serializeToBytes(traversalTest, subProtocol)); + testQueryRequestAndPayloadContents( + ((RawBytesQuery) m), + createdCustomPayload, + GraphConversions.bytecodeToSerialize(graphStatement), + graphProtocol, + module); } @Test - public void should_create_query_message_from_batch_statement() throws IOException { + @UseDataProvider("bytecodeEnabledGraphProtocols") + public void should_create_query_message_from_batch_statement(GraphProtocol graphProtocol) + throws IOException { // initialization - RequestHandlerTestHarness harness = GraphRequestHandlerTestHarness.builder().build(); + GraphRequestHandlerTestHarness harness = GraphRequestHandlerTestHarness.builder().build(); List traversalsTest = ImmutableList.of( - DseGraph.g.addV("person").property("key1", "value1"), - DseGraph.g.addV("software").property("key2", "value2")); + // randomly testing some complex data types. Complete suite of data types test is in + // GraphBinaryDataTypesTest + DseGraph.g.addV("person").property("p1", 2.3f).property("p2", LocalDateTime.now()), + DseGraph.g + .addV("software") + .property("p3", new BigInteger("123456789123456789123456789123456789")) + .property("p4", ImmutableList.of(Point.fromCoordinates(30.4, 25.63746284)))); GraphStatement graphStatement = BatchGraphStatement.builder().addTraversals(traversalsTest).build(); - String subProtocol = "graphson-2.0"; + + GraphBinaryModule module = createGraphBinaryModule(harness.getContext()); // when + DriverExecutionProfile executionProfile = + GraphConversions.resolveExecutionProfile(graphStatement, harness.getContext()); + Message m = GraphConversions.createMessageFromGraphStatement( - graphStatement, - subProtocol, - GraphConversions.resolveExecutionProfile(graphStatement, harness.getContext()), - harness.getContext()); + graphStatement, graphProtocol, executionProfile, harness.getContext(), module); + + Map createdCustomPayload = + GraphConversions.createCustomPayload( + graphStatement, graphProtocol, executionProfile, harness.getContext(), module); // checks assertThat(m).isInstanceOf(RawBytesQuery.class); - assertThat(((RawBytesQuery) m).query) - .isEqualTo(GraphSONUtils.serializeToBytes(traversalsTest, subProtocol)); + testQueryRequestAndPayloadContents( + ((RawBytesQuery) m), + createdCustomPayload, + GraphConversions.bytecodeToSerialize(graphStatement), + graphProtocol, + module); + } + + private static ByteBuffer serialize( + Object value, GraphProtocol graphProtocol, GraphBinaryModule graphBinaryModule) + throws IOException { + + ByteBuf nettyBuf = graphBinaryModule.serialize(value); + ByteBuffer nioBuffer = ByteBufUtil.toByteBuffer(nettyBuf); + nettyBuf.release(); + return graphProtocol.isGraphBinary() + ? nioBuffer + : GraphSONUtils.serializeToByteBuffer(value, graphProtocol); + } + + private void testQueryRequestAndPayloadContents( + RawBytesQuery q, + Map customPayload, + Object traversalTest, + GraphProtocol graphProtocol, + GraphBinaryModule module) + throws IOException { + if (graphProtocol.isGraphBinary()) { + assertThat(q.query).isEqualTo(GraphConversions.EMPTY_STRING_QUERY); + assertThat(customPayload).containsKey(GraphConversions.GRAPH_BINARY_QUERY_OPTION_KEY); + ByteBuffer encodedQuery = customPayload.get(GraphConversions.GRAPH_BINARY_QUERY_OPTION_KEY); + assertThat(encodedQuery).isNotNull(); + assertThat(encodedQuery).isEqualTo(serialize(traversalTest, graphProtocol, module)); + } else { + assertThat(q.query).isEqualTo(serialize(traversalTest, graphProtocol, module).array()); + assertThat(customPayload).doesNotContainKey(GraphConversions.GRAPH_BINARY_QUERY_OPTION_KEY); + } } @Test public void should_set_correct_query_options_from_graph_statement() throws IOException { // initialization - RequestHandlerTestHarness harness = GraphRequestHandlerTestHarness.builder().build(); + GraphRequestHandlerTestHarness harness = GraphRequestHandlerTestHarness.builder().build(); GraphStatement graphStatement = ScriptGraphStatement.newInstance("mockQuery").setQueryParam("name", "value"); - String subProtocol = "graphson-2.0"; + GraphProtocol subProtocol = GraphProtocol.GRAPHSON_2_0; + + GraphBinaryModule module = createGraphBinaryModule(harness.getContext()); // when DriverExecutionProfile executionProfile = GraphConversions.resolveExecutionProfile(graphStatement, harness.getContext()); Message m = GraphConversions.createMessageFromGraphStatement( - graphStatement, subProtocol, executionProfile, harness.getContext()); + graphStatement, subProtocol, executionProfile, harness.getContext(), module); // checks Query query = ((Query) m); @@ -194,8 +287,9 @@ public void should_set_correct_query_options_from_graph_statement() throws IOExc GraphConversions.createMessageFromGraphStatement( graphStatement.setTimestamp(2L), subProtocol, - GraphConversions.resolveExecutionProfile(graphStatement, harness.getContext()), - harness.getContext()); + executionProfile, + harness.getContext(), + module); query = ((Query) m); options = ((DseQueryOptions) query.options); assertThat(options.defaultTimestamp).isEqualTo(2L); @@ -204,10 +298,12 @@ public void should_set_correct_query_options_from_graph_statement() throws IOExc @Test public void should_create_payload_from_config_options() { // initialization - RequestHandlerTestHarness harness = GraphRequestHandlerTestHarness.builder().build(); + GraphRequestHandlerTestHarness harness = GraphRequestHandlerTestHarness.builder().build(); GraphStatement graphStatement = ScriptGraphStatement.newInstance("mockQuery").setExecutionProfileName("test-graph"); - String subProtocol = "graphson-2.0"; + GraphProtocol subProtocol = GraphProtocol.GRAPHSON_2_0; + + GraphBinaryModule module = createGraphBinaryModule(harness.getContext()); // when DriverExecutionProfile executionProfile = @@ -215,7 +311,7 @@ public void should_create_payload_from_config_options() { Map requestPayload = GraphConversions.createCustomPayload( - graphStatement, subProtocol, executionProfile, harness.getContext()); + graphStatement, subProtocol, executionProfile, harness.getContext(), module); // checks Mockito.verify(executionProfile).getString(DseDriverOption.GRAPH_TRAVERSAL_SOURCE, null); @@ -228,7 +324,8 @@ public void should_create_payload_from_config_options() { assertThat(requestPayload.get(GraphConversions.GRAPH_SOURCE_OPTION_KEY)) .isEqualTo(TEXT.encode("a", harness.getContext().getProtocolVersion())); assertThat(requestPayload.get(GraphConversions.GRAPH_RESULTS_OPTION_KEY)) - .isEqualTo(TEXT.encode(subProtocol, harness.getContext().getProtocolVersion())); + .isEqualTo( + TEXT.encode(subProtocol.toInternalCode(), harness.getContext().getProtocolVersion())); assertThat(requestPayload.get(GraphConversions.GRAPH_NAME_OPTION_KEY)) .isEqualTo(TEXT.encode("mockGraph", harness.getContext().getProtocolVersion())); assertThat(requestPayload.get(GraphConversions.GRAPH_LANG_OPTION_KEY)) @@ -244,7 +341,7 @@ public void should_create_payload_from_config_options() { @Test public void should_create_payload_from_statement_options() { // initialization - RequestHandlerTestHarness harness = GraphRequestHandlerTestHarness.builder().build(); + GraphRequestHandlerTestHarness harness = GraphRequestHandlerTestHarness.builder().build(); GraphStatement graphStatement = ScriptGraphStatement.builder("mockQuery") .setGraphName("mockGraph") @@ -254,7 +351,9 @@ public void should_create_payload_from_statement_options() { .setWriteConsistencyLevel(DefaultConsistencyLevel.THREE) .setSystemQuery(false) .build(); - String subProtocol = "graphson-2.0"; + GraphProtocol subProtocol = GraphProtocol.GRAPHSON_2_0; + + GraphBinaryModule module = createGraphBinaryModule(harness.getContext()); // when DriverExecutionProfile executionProfile = @@ -262,7 +361,7 @@ public void should_create_payload_from_statement_options() { Map requestPayload = GraphConversions.createCustomPayload( - graphStatement, subProtocol, executionProfile, harness.getContext()); + graphStatement, subProtocol, executionProfile, harness.getContext(), module); // checks Mockito.verify(executionProfile, never()) @@ -279,7 +378,8 @@ public void should_create_payload_from_statement_options() { assertThat(requestPayload.get(GraphConversions.GRAPH_SOURCE_OPTION_KEY)) .isEqualTo(TEXT.encode("a", harness.getContext().getProtocolVersion())); assertThat(requestPayload.get(GraphConversions.GRAPH_RESULTS_OPTION_KEY)) - .isEqualTo(TEXT.encode(subProtocol, harness.getContext().getProtocolVersion())); + .isEqualTo( + TEXT.encode(subProtocol.toInternalCode(), harness.getContext().getProtocolVersion())); assertThat(requestPayload.get(GraphConversions.GRAPH_NAME_OPTION_KEY)) .isEqualTo(TEXT.encode("mockGraph", harness.getContext().getProtocolVersion())); assertThat(requestPayload.get(GraphConversions.GRAPH_LANG_OPTION_KEY)) @@ -295,10 +395,12 @@ public void should_create_payload_from_statement_options() { @Test public void should_not_set_graph_name_on_system_queries() { // initialization - RequestHandlerTestHarness harness = GraphRequestHandlerTestHarness.builder().build(); + GraphRequestHandlerTestHarness harness = GraphRequestHandlerTestHarness.builder().build(); GraphStatement graphStatement = ScriptGraphStatement.newInstance("mockQuery").setSystemQuery(true); - String subProtocol = "graphson-2.0"; + GraphProtocol subProtocol = GraphProtocol.GRAPHSON_2_0; + + GraphBinaryModule module = createGraphBinaryModule(harness.getContext()); // when DriverExecutionProfile executionProfile = @@ -306,7 +408,7 @@ public void should_not_set_graph_name_on_system_queries() { Map requestPayload = GraphConversions.createCustomPayload( - graphStatement, subProtocol, executionProfile, harness.getContext()); + graphStatement, subProtocol, executionProfile, harness.getContext(), module); // checks assertThat(requestPayload.get(GraphConversions.GRAPH_NAME_OPTION_KEY)).isNull(); @@ -314,16 +416,42 @@ public void should_not_set_graph_name_on_system_queries() { } @Test - public void should_return_results_for_statements() - throws IOException, ExecutionException, InterruptedException { + @UseDataProvider("bytecodeEnabledGraphProtocols") + public void should_return_results_for_statements(GraphProtocol graphProtocol) throws IOException { + DseDriverContext mockContext = Mockito.mock(DseDriverContext.class); + GraphBinaryModule module = createGraphBinaryModule(mockContext); + + GraphRequestAsyncProcessor p = Mockito.spy(new GraphRequestAsyncProcessor(mockContext)); + when(p.getGraphBinaryModule()).thenReturn(module); + + Vertex v = + DetachedVertex.build() + .setId(1) + .setLabel("person") + .addProperty( + DetachedVertexProperty.build() + .setId(11) + .setLabel("name") + .setValue("marko") + .create()) + .create(); + RequestHandlerTestHarness harness = GraphRequestHandlerTestHarness.builder() - .withResponse(node, defaultDseFrameOf(singleGraphRow())) + .withGraphProtocolForTestConfig(graphProtocol.toInternalCode()) + // ideally we would be able to provide a function here to + // produce results instead of a static predefined response. + // Function to which we would pass the harness instance or a (mocked)DriverContext. + // Since that's not possible in the RequestHandlerTestHarness API at the moment, we + // have to use another DseDriverContext and GraphBinaryModule here, + // instead of reusing the one in the harness' DriverContext + .withResponse(node, defaultDseFrameOf(singleGraphRow(graphProtocol, v, module))) .build(); - GraphStatement graphStatement = ScriptGraphStatement.newInstance("mockQuery"); + GraphStatement graphStatement = + ScriptGraphStatement.newInstance("mockQuery").setExecutionProfileName("test-graph"); GraphResultSet grs = - new GraphRequestSyncProcessor(new GraphRequestAsyncProcessor()) + new GraphRequestSyncProcessor(p) .process(graphStatement, harness.getSession(), harness.getContext(), "test-graph"); List nodes = grs.all(); @@ -332,19 +460,46 @@ public void should_return_results_for_statements() GraphNode node = nodes.get(0); assertThat(node.isVertex()).isTrue(); - Vertex v = node.asVertex(); - assertThat(v.label()).isEqualTo("person"); - assertThat(v.id()).isEqualTo(1); - assertThat(v.property("name").id()).isEqualTo(11); - assertThat(v.property("name").value()).isEqualTo("marko"); + Vertex vRead = node.asVertex(); + assertThat(vRead.label()).isEqualTo("person"); + assertThat(vRead.id()).isEqualTo(1); + if (!graphProtocol.isGraphBinary()) { + // GraphBinary does not encode properties regardless of whether they are present in the + // parent element or not :/ + assertThat(v.property("name").id()).isEqualTo(11); + assertThat(v.property("name").value()).isEqualTo("marko"); + } + } + + @DataProvider + public static Object[][] bytecodeEnabledGraphProtocols() { + return new Object[][] {{GRAPHSON_2_0}, {GRAPHSON_3_0}, {GRAPH_BINARY_1_0}}; } @Test - public void should_invoke_request_tracker() - throws IOException, ExecutionException, InterruptedException { + public void should_invoke_request_tracker() throws IOException { + DseDriverContext mockContext = Mockito.mock(DseDriverContext.class); + GraphBinaryModule module = createGraphBinaryModule(mockContext); + + GraphRequestAsyncProcessor p = Mockito.spy(new GraphRequestAsyncProcessor(mockContext)); + when(p.getGraphBinaryModule()).thenReturn(module); + + Vertex v = + DetachedVertex.build() + .setId(1) + .setLabel("person") + .addProperty( + DetachedVertexProperty.build() + .setId(11) + .setLabel("name") + .setValue("marko") + .create()) + .create(); + RequestHandlerTestHarness harness = GraphRequestHandlerTestHarness.builder() - .withResponse(node, defaultDseFrameOf(singleGraphRow())) + .withResponse( + node, defaultDseFrameOf(singleGraphRow(GraphProtocol.GRAPHSON_2_0, v, module))) .build(); RequestTracker requestTracker = mock(RequestTracker.class); @@ -352,7 +507,8 @@ public void should_invoke_request_tracker() GraphStatement graphStatement = ScriptGraphStatement.newInstance("mockQuery"); GraphResultSet grs = - new GraphRequestSyncProcessor(new GraphRequestAsyncProcessor()) + new GraphRequestSyncProcessor( + new GraphRequestAsyncProcessor((DseDriverContext) harness.getContext())) .process(graphStatement, harness.getSession(), harness.getContext(), "test-graph"); List nodes = grs.all(); @@ -361,11 +517,11 @@ public void should_invoke_request_tracker() GraphNode graphNode = nodes.get(0); assertThat(graphNode.isVertex()).isTrue(); - Vertex v = graphNode.asVertex(); - assertThat(v.label()).isEqualTo("person"); - assertThat(v.id()).isEqualTo(1); - assertThat(v.property("name").id()).isEqualTo(11); - assertThat(v.property("name").value()).isEqualTo("marko"); + Vertex actual = graphNode.asVertex(); + assertThat(actual.label()).isEqualTo("person"); + assertThat(actual.id()).isEqualTo(1); + assertThat(actual.property("name").id()).isEqualTo(11); + assertThat(actual.property("name").value()).isEqualTo("marko"); verify(requestTracker) .onSuccess( @@ -387,8 +543,10 @@ private static Frame defaultDseFrameOf(Message responseMessage) { responseMessage); } - // Returns a single row, with a single "message" column with the value "hello, world" - private static Message singleGraphRow() throws IOException { + // Returns a single row, with a single "message" column containing the value + // given in parameter serialized according to the protocol + private static Message singleGraphRow( + GraphProtocol graphProtocol, Object value, GraphBinaryModule module) throws IOException { RowsMetadata metadata = new RowsMetadata( ImmutableList.of( @@ -397,27 +555,23 @@ private static Message singleGraphRow() throws IOException { "table", "gremlin", 0, - RawType.PRIMITIVES.get(ProtocolConstants.DataType.VARCHAR))), + graphProtocol.isGraphBinary() + ? RawType.PRIMITIVES.get(ProtocolConstants.DataType.BLOB) + : RawType.PRIMITIVES.get(ProtocolConstants.DataType.VARCHAR))), null, new int[] {}, null); Queue> data = new ArrayDeque<>(); + data.add( ImmutableList.of( - GraphSONUtils.serializeToByteBuffer( - ImmutableMap.of( - "result", - DetachedVertex.build() - .setId(1) - .setLabel("person") - .addProperty( - DetachedVertexProperty.build() - .setId(11) - .setLabel("name") - .setValue("marko") - .create()) - .create()), - "graphson-2.0"))); + serialize( + graphProtocol.isGraphBinary() + // GraphBinary returns results directly inside a Traverser + ? new DefaultRemoteTraverser<>(value, 1) + : ImmutableMap.of("result", value), + graphProtocol, + module))); return new DefaultRows(metadata, data); } } diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTestHarness.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTestHarness.java index cd1ccba8862..65a2c0678f8 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTestHarness.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTestHarness.java @@ -15,14 +15,25 @@ */ package com.datastax.dse.driver.internal.core.graph; +import static org.mockito.Mockito.when; + +import com.datastax.dse.driver.api.core.DseProtocolVersion; import com.datastax.dse.driver.api.core.config.DseDriverOption; +import com.datastax.dse.driver.internal.core.context.DseDriverContext; import com.datastax.oss.driver.api.core.DefaultConsistencyLevel; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.type.codec.registry.CodecRegistry; +import com.datastax.oss.driver.internal.core.DefaultConsistencyLevelRegistry; import com.datastax.oss.driver.internal.core.cql.RequestHandlerTestHarness; +import com.datastax.oss.driver.internal.core.servererrors.DefaultWriteTypeRegistry; +import com.datastax.oss.driver.internal.core.session.throttling.PassThroughRequestThrottler; +import com.datastax.oss.driver.internal.core.tracker.NoopRequestTracker; +import io.netty.channel.EventLoop; import java.time.Duration; -import org.mockito.Mock; -import org.mockito.Mockito; +import java.util.Optional; +import javax.annotation.Nullable; +import org.mockito.*; /** * Provides the environment to test a request handler, where a query plan can be defined, and the @@ -34,77 +45,97 @@ public class GraphRequestHandlerTestHarness extends RequestHandlerTestHarness { @Mock DriverExecutionProfile systemQueryExecutionProfile; - protected GraphRequestHandlerTestHarness(Builder builder) { + @Mock DseDriverContext dseDriverContext; + + @Mock EventLoop eventLoop; + + protected GraphRequestHandlerTestHarness( + Builder builder, @Nullable String graphProtocolForTestConfig) { super(builder); + // not mocked by RequestHandlerTestHarness, will be used when DseDriverOptions.GRAPH_TIMEOUT + // is not null in the config + when(eventLoopGroup.next()).thenReturn(eventLoop); + // default graph options as in the reference.conf file - Mockito.when(defaultProfile.getString(DseDriverOption.GRAPH_TRAVERSAL_SOURCE, null)) - .thenReturn("g"); - Mockito.when(defaultProfile.getString(DseDriverOption.GRAPH_SUB_PROTOCOL, "graphson-2.0")) + when(defaultProfile.getString(DseDriverOption.GRAPH_TRAVERSAL_SOURCE, null)).thenReturn("g"); + when(defaultProfile.getString(DseDriverOption.GRAPH_SUB_PROTOCOL, "graphson-2.0")) .thenReturn("graphson-2.0"); - Mockito.when(defaultProfile.getBoolean(DseDriverOption.GRAPH_IS_SYSTEM_QUERY, false)) - .thenReturn(false); - Mockito.when(defaultProfile.getString(DseDriverOption.GRAPH_NAME, null)) - .thenReturn("mockGraph"); + when(defaultProfile.getBoolean(DseDriverOption.GRAPH_IS_SYSTEM_QUERY, false)).thenReturn(false); + when(defaultProfile.getString(DseDriverOption.GRAPH_NAME, null)).thenReturn("mockGraph"); - Mockito.when(testProfile.getName()).thenReturn("default"); - Mockito.when(testProfile.getDuration(DseDriverOption.GRAPH_TIMEOUT, null)) - .thenReturn(Duration.ofMillis(500L)); - Mockito.when(testProfile.getString(DefaultDriverOption.REQUEST_CONSISTENCY)) + when(testProfile.getName()).thenReturn("test-graph"); + when(testProfile.getDuration(DseDriverOption.GRAPH_TIMEOUT, null)) + .thenReturn(Duration.ofMillis(2L)); + when(testProfile.getString(DefaultDriverOption.REQUEST_CONSISTENCY)) .thenReturn(DefaultConsistencyLevel.LOCAL_ONE.name()); - Mockito.when(testProfile.getInt(DefaultDriverOption.REQUEST_PAGE_SIZE)).thenReturn(5000); - Mockito.when(testProfile.getString(DefaultDriverOption.REQUEST_SERIAL_CONSISTENCY)) + when(testProfile.getInt(DefaultDriverOption.REQUEST_PAGE_SIZE)).thenReturn(5000); + when(testProfile.getString(DefaultDriverOption.REQUEST_SERIAL_CONSISTENCY)) .thenReturn(DefaultConsistencyLevel.SERIAL.name()); - Mockito.when(testProfile.getBoolean(DefaultDriverOption.REQUEST_DEFAULT_IDEMPOTENCE)) - .thenReturn(false); - Mockito.when(testProfile.getBoolean(DefaultDriverOption.PREPARE_ON_ALL_NODES)).thenReturn(true); - Mockito.when(testProfile.getString(DseDriverOption.GRAPH_TRAVERSAL_SOURCE, null)) - .thenReturn("a"); - Mockito.when(testProfile.getString(DseDriverOption.GRAPH_SUB_PROTOCOL, "graphson-2.0")) - .thenReturn("testMock"); - Mockito.when(testProfile.getDuration(DseDriverOption.GRAPH_TIMEOUT, null)) - .thenReturn(Duration.ofMillis(2)); - Mockito.when(testProfile.getBoolean(DseDriverOption.GRAPH_IS_SYSTEM_QUERY, false)) - .thenReturn(false); - Mockito.when(testProfile.getString(DseDriverOption.GRAPH_NAME, null)).thenReturn("mockGraph"); - Mockito.when(testProfile.getString(DseDriverOption.GRAPH_READ_CONSISTENCY_LEVEL, null)) + when(testProfile.getBoolean(DefaultDriverOption.REQUEST_DEFAULT_IDEMPOTENCE)).thenReturn(false); + when(testProfile.getBoolean(DefaultDriverOption.PREPARE_ON_ALL_NODES)).thenReturn(true); + when(testProfile.getString(DseDriverOption.GRAPH_TRAVERSAL_SOURCE, null)).thenReturn("a"); + when(testProfile.getString( + ArgumentMatchers.eq(DseDriverOption.GRAPH_SUB_PROTOCOL), ArgumentMatchers.anyString())) + .thenReturn(Optional.ofNullable(graphProtocolForTestConfig).orElse("graphson-2.0")); + when(testProfile.getBoolean(DseDriverOption.GRAPH_IS_SYSTEM_QUERY, false)).thenReturn(false); + when(testProfile.getString(DseDriverOption.GRAPH_NAME, null)).thenReturn("mockGraph"); + when(testProfile.getString(DseDriverOption.GRAPH_READ_CONSISTENCY_LEVEL, null)) .thenReturn("LOCAL_TWO"); - Mockito.when(testProfile.getString(DseDriverOption.GRAPH_WRITE_CONSISTENCY_LEVEL, null)) + when(testProfile.getString(DseDriverOption.GRAPH_WRITE_CONSISTENCY_LEVEL, null)) .thenReturn("LOCAL_THREE"); + when(config.getProfile("test-graph")).thenReturn(testProfile); - Mockito.when(config.getProfile("test-graph")).thenReturn(testProfile); - - Mockito.when(systemQueryExecutionProfile.getName()).thenReturn("default"); - Mockito.when(systemQueryExecutionProfile.getDuration(DseDriverOption.GRAPH_TIMEOUT, null)) + when(systemQueryExecutionProfile.getName()).thenReturn("graph-system-query"); + when(systemQueryExecutionProfile.getDuration(DseDriverOption.GRAPH_TIMEOUT, null)) .thenReturn(Duration.ofMillis(500L)); - Mockito.when(systemQueryExecutionProfile.getString(DefaultDriverOption.REQUEST_CONSISTENCY)) + when(systemQueryExecutionProfile.getString(DefaultDriverOption.REQUEST_CONSISTENCY)) .thenReturn(DefaultConsistencyLevel.LOCAL_ONE.name()); - Mockito.when(systemQueryExecutionProfile.getInt(DefaultDriverOption.REQUEST_PAGE_SIZE)) + when(systemQueryExecutionProfile.getInt(DefaultDriverOption.REQUEST_PAGE_SIZE)) .thenReturn(5000); - Mockito.when( - systemQueryExecutionProfile.getString(DefaultDriverOption.REQUEST_SERIAL_CONSISTENCY)) + when(systemQueryExecutionProfile.getString(DefaultDriverOption.REQUEST_SERIAL_CONSISTENCY)) .thenReturn(DefaultConsistencyLevel.SERIAL.name()); - Mockito.when( - systemQueryExecutionProfile.getBoolean(DefaultDriverOption.REQUEST_DEFAULT_IDEMPOTENCE)) + when(systemQueryExecutionProfile.getBoolean(DefaultDriverOption.REQUEST_DEFAULT_IDEMPOTENCE)) .thenReturn(false); - Mockito.when(systemQueryExecutionProfile.getBoolean(DefaultDriverOption.PREPARE_ON_ALL_NODES)) + when(systemQueryExecutionProfile.getBoolean(DefaultDriverOption.PREPARE_ON_ALL_NODES)) .thenReturn(true); - Mockito.when(systemQueryExecutionProfile.getName()).thenReturn("graph-system-query"); - Mockito.when(systemQueryExecutionProfile.getDuration(DseDriverOption.GRAPH_TIMEOUT, null)) + when(systemQueryExecutionProfile.getName()).thenReturn("graph-system-query"); + when(systemQueryExecutionProfile.getDuration(DseDriverOption.GRAPH_TIMEOUT, null)) .thenReturn(Duration.ofMillis(2)); - Mockito.when( - systemQueryExecutionProfile.getBoolean(DseDriverOption.GRAPH_IS_SYSTEM_QUERY, false)) + when(systemQueryExecutionProfile.getBoolean(DseDriverOption.GRAPH_IS_SYSTEM_QUERY, false)) .thenReturn(true); - Mockito.when( - systemQueryExecutionProfile.getString( - DseDriverOption.GRAPH_READ_CONSISTENCY_LEVEL, null)) + when(systemQueryExecutionProfile.getString(DseDriverOption.GRAPH_READ_CONSISTENCY_LEVEL, null)) .thenReturn("LOCAL_TWO"); - Mockito.when( - systemQueryExecutionProfile.getString( - DseDriverOption.GRAPH_WRITE_CONSISTENCY_LEVEL, null)) + when(systemQueryExecutionProfile.getString(DseDriverOption.GRAPH_WRITE_CONSISTENCY_LEVEL, null)) .thenReturn("LOCAL_THREE"); - Mockito.when(config.getProfile("graph-system-query")).thenReturn(systemQueryExecutionProfile); + when(config.getProfile("graph-system-query")).thenReturn(systemQueryExecutionProfile); + + // need to re-mock everything on the context because the RequestHandlerTestHarness returns a + // InternalDriverContext and not a DseDriverContext. Couldn't figure out a way with mockito + // to say "mock this object (this.dseDriverContext), and delegate every call to that + // other object (this.context), except _this_ call and _this_ and so on" + // Spy wouldn't work because the spied object has to be of the same type as the final object + when(dseDriverContext.getConfig()).thenReturn(config); + when(dseDriverContext.getNettyOptions()).thenReturn(nettyOptions); + when(dseDriverContext.getLoadBalancingPolicyWrapper()).thenReturn(loadBalancingPolicyWrapper); + when(dseDriverContext.getRetryPolicy(ArgumentMatchers.anyString())).thenReturn(retryPolicy); + when(dseDriverContext.getSpeculativeExecutionPolicy(ArgumentMatchers.anyString())) + .thenReturn(speculativeExecutionPolicy); + when(dseDriverContext.getCodecRegistry()).thenReturn(CodecRegistry.DEFAULT); + when(dseDriverContext.getTimestampGenerator()).thenReturn(timestampGenerator); + when(dseDriverContext.getProtocolVersion()).thenReturn(DseProtocolVersion.DSE_V2); + when(dseDriverContext.getConsistencyLevelRegistry()) + .thenReturn(new DefaultConsistencyLevelRegistry()); + when(dseDriverContext.getWriteTypeRegistry()).thenReturn(new DefaultWriteTypeRegistry()); + when(dseDriverContext.getRequestThrottler()) + .thenReturn(new PassThroughRequestThrottler(dseDriverContext)); + when(dseDriverContext.getRequestTracker()).thenReturn(new NoopRequestTracker(dseDriverContext)); + } + + @Override + public DseDriverContext getContext() { + return dseDriverContext; } public static GraphRequestHandlerTestHarness.Builder builder() { @@ -113,9 +144,16 @@ public static GraphRequestHandlerTestHarness.Builder builder() { public static class Builder extends RequestHandlerTestHarness.Builder { + String graphProtocolForTestConfig; + + public Builder withGraphProtocolForTestConfig(String protocol) { + this.graphProtocolForTestConfig = protocol; + return this; + } + @Override - public RequestHandlerTestHarness build() { - return new GraphRequestHandlerTestHarness(this); + public GraphRequestHandlerTestHarness build() { + return new GraphRequestHandlerTestHarness(this, graphProtocolForTestConfig); } } } diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/binary/GraphBinaryDataTypesTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/binary/GraphBinaryDataTypesTest.java new file mode 100644 index 00000000000..c191d2d697b --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/binary/GraphBinaryDataTypesTest.java @@ -0,0 +1,255 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph.binary; + +import static com.datastax.oss.driver.api.core.type.DataTypes.*; +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.when; + +import com.datastax.dse.driver.api.core.DseProtocolVersion; +import com.datastax.dse.driver.api.core.data.geometry.LineString; +import com.datastax.dse.driver.api.core.data.geometry.Point; +import com.datastax.dse.driver.api.core.data.geometry.Polygon; +import com.datastax.dse.driver.api.core.graph.BatchGraphStatement; +import com.datastax.dse.driver.api.core.graph.DseGraph; +import com.datastax.dse.driver.api.core.type.DseDataTypes; +import com.datastax.dse.driver.api.core.type.codec.DseTypeCodecs; +import com.datastax.dse.driver.internal.core.context.DseDriverContext; +import com.datastax.dse.driver.internal.core.data.geometry.Distance; +import com.datastax.dse.driver.internal.core.graph.EditDistance; +import com.datastax.dse.driver.internal.core.graph.GraphConversions; +import com.datastax.oss.driver.api.core.data.CqlDuration; +import com.datastax.oss.driver.api.core.type.DataTypes; +import com.datastax.oss.driver.api.core.type.TupleType; +import com.datastax.oss.driver.api.core.type.UserDefinedType; +import com.datastax.oss.driver.api.core.type.codec.registry.CodecRegistry; +import com.datastax.oss.driver.internal.core.type.UserDefinedTypeBuilder; +import com.datastax.oss.driver.internal.core.type.codec.registry.DefaultCodecRegistry; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import io.netty.buffer.ByteBuf; +import java.math.BigDecimal; +import java.math.BigInteger; +import java.net.InetAddress; +import java.net.UnknownHostException; +import java.nio.ByteBuffer; +import java.time.Instant; +import java.time.LocalDate; +import java.time.LocalTime; +import org.apache.tinkerpop.gremlin.driver.ser.SerializationException; +import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryReader; +import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryWriter; +import org.apache.tinkerpop.gremlin.driver.ser.binary.TypeSerializerRegistry; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; + +@RunWith(DataProviderRunner.class) +public class GraphBinaryDataTypesTest { + + private GraphBinaryModule graphBinaryModule; + + @Mock private DseDriverContext context; + + private static final CodecRegistry CODEC_REGISTRY = + new DefaultCodecRegistry( + "testDseRegistry", DseTypeCodecs.POINT, DseTypeCodecs.LINE_STRING, DseTypeCodecs.POLYGON); + + @Before + public void setup() { + MockitoAnnotations.initMocks(this); + when(context.getCodecRegistry()).thenReturn(CODEC_REGISTRY); + when(context.getProtocolVersion()).thenReturn(DseProtocolVersion.DSE_V2); + + TypeSerializerRegistry registry = GraphBinaryModule.createDseTypeSerializerRegistry(context); + graphBinaryModule = + new GraphBinaryModule(new GraphBinaryReader(registry), new GraphBinaryWriter(registry)); + } + + @DataProvider + public static Object[][] datatypes() throws UnknownHostException { + return new Object[][] { + {ByteBuffer.wrap(new byte[] {1, 2, 3})}, + {"~’~^ää#123#ö"}, + {(byte) 34}, + {BigDecimal.TEN}, + {BigInteger.TEN}, + {Boolean.TRUE}, + {false}, + {23}, + {23L}, + {23.0d}, + {23f}, + {(short) 23}, + {InetAddress.getLocalHost()}, + {LocalDate.now()}, + {LocalTime.now()}, + {CqlDuration.newInstance(10, 10, 10000)}, + {java.util.UUID.randomUUID()}, + {Instant.now()}, + {ImmutableList.of(1L, 2L, 3L)}, + {ImmutableList.of(ImmutableList.of(1L, 3L), ImmutableList.of(2L, 4L))}, + {ImmutableSet.of(1L, 2L, 3L)}, + {ImmutableSet.of(ImmutableSet.of(1, 2, 3))}, + {ImmutableMap.of("a", 1, "b", 2)}, + {ImmutableMap.of(ImmutableMap.of("a", 1), ImmutableMap.of(2, "b"))}, + {Point.fromCoordinates(3.3, 4.4)}, + { + LineString.fromPoints( + Point.fromCoordinates(1, 1), Point.fromCoordinates(2, 2), Point.fromCoordinates(3, 3)) + }, + { + Polygon.fromPoints( + Point.fromCoordinates(3, 4), Point.fromCoordinates(5, 4), Point.fromCoordinates(6, 6)) + }, + {tupleOf(INT, TEXT, FLOAT).newValue(1, "2", 3.41f)}, + { + tupleOf(INT, TEXT, tupleOf(TEXT, DURATION)) + .newValue( + 1, "2", tupleOf(TEXT, DURATION).newValue("a", CqlDuration.newInstance(2, 1, 0))) + }, + { + tupleOf( + listOf(INT), + setOf(FLOAT), + DataTypes.mapOf(TEXT, BIGINT), + listOf(listOf(DOUBLE)), + setOf(setOf(FLOAT)), + listOf(tupleOf(INT, TEXT))) + .newValue( + ImmutableList.of(4, 8, 22, 34, 37, 59), + ImmutableSet.of(28f, 44f, 59f), + ImmutableMap.of("big10", 2345L), + ImmutableList.of(ImmutableList.of(11.1d, 33.3d), ImmutableList.of(22.2d, 44.4d)), + ImmutableSet.of(ImmutableSet.of(55.5f)), + ImmutableList.of(tupleOf(INT, TEXT).newValue(3, "three"))) + }, + { + new UserDefinedTypeBuilder("ks", "udt1") + .withField("a", INT) + .withField("b", TEXT) + .build() + .newValue(1, "two") + }, + {new Distance(Point.fromCoordinates(3.4, 17.0), 2.5)}, + {new EditDistance("xyz", 3)}, + {DseGraph.g.V().has("name", "marko").asAdmin().getBytecode()}, + { + GraphConversions.bytecodeToSerialize( + BatchGraphStatement.builder() + .addTraversal(DseGraph.g.addV("person").property("name", "1")) + .addTraversal(DseGraph.g.addV("person").property("name", "1")) + .build()) + }, + }; + } + + @Test + @UseDataProvider("datatypes") + public void datatypesTest(Object value) throws SerializationException { + verifySerDe(value); + } + + @Test + public void complexUdtTests() throws SerializationException { + UserDefinedType type1 = + new UserDefinedTypeBuilder("ks", "udt1").withField("a", INT).withField("b", TEXT).build(); + verifySerDe(type1.newValue(1, "2")); + + TupleType secondNested = tupleOf(BIGINT, listOf(BIGINT)); + TupleType firstNested = tupleOf(TEXT, secondNested); + + UserDefinedType type2 = + new UserDefinedTypeBuilder("ks", "udt2") + .withField("a", INT) + .withField("b", TEXT) + .withField("c", type1) + .withField("mylist", listOf(BIGINT)) + .withField("mytuple_withlist", firstNested) + .build(); + + verifySerDe( + type2.newValue( + 1, + "2", + type1.newValue(3, "4"), + ImmutableList.of(5L), + firstNested.newValue("6", secondNested.newValue(7L, ImmutableList.of(8L))))); + + UserDefinedType type3 = + new UserDefinedTypeBuilder("ks", "udt3") + .withField("a", listOf(INT)) + .withField("b", setOf(FLOAT)) + .withField("c", mapOf(TEXT, BIGINT)) + .withField("d", listOf(listOf(DOUBLE))) + .withField("e", setOf(setOf(FLOAT))) + .withField("f", listOf(tupleOf(INT, TEXT))) + .build(); + + verifySerDe( + type3.newValue( + ImmutableList.of(1), + ImmutableSet.of(2.1f), + ImmutableMap.of("3", 4L), + ImmutableList.of(ImmutableList.of(5.1d, 6.1d), ImmutableList.of(7.1d)), + ImmutableSet.of(ImmutableSet.of(8.1f), ImmutableSet.of(9.1f)), + ImmutableList.of(tupleOf(INT, TEXT).newValue(10, "11")))); + } + + @Test + public void complexTypesAndGeoTests() throws SerializationException { + + TupleType tuple = tupleOf(DseDataTypes.POINT, DseDataTypes.LINE_STRING, DseDataTypes.POLYGON); + tuple.attach(context); + + verifySerDe( + tuple.newValue( + Point.fromCoordinates(3.3, 4.4), + LineString.fromPoints( + Point.fromCoordinates(1, 1), + Point.fromCoordinates(2, 2), + Point.fromCoordinates(3, 3)), + Polygon.fromPoints( + Point.fromCoordinates(3, 4), + Point.fromCoordinates(5, 4), + Point.fromCoordinates(6, 6)))); + + UserDefinedType udt = + new UserDefinedTypeBuilder("ks", "udt1") + .withField("a", DseDataTypes.POINT) + .withField("b", DseDataTypes.LINE_STRING) + .withField("c", DseDataTypes.POLYGON) + .build(); + udt.attach(context); + + verifySerDe( + udt.newValue( + Point.fromCoordinates(3.3, 4.4), + LineString.fromPoints( + Point.fromCoordinates(1, 1), + Point.fromCoordinates(2, 2), + Point.fromCoordinates(3, 3)), + Polygon.fromPoints( + Point.fromCoordinates(3, 4), + Point.fromCoordinates(5, 4), + Point.fromCoordinates(6, 6)))); + } + // TODO add predicate tests + + private void verifySerDe(Object input) throws SerializationException { + ByteBuf result = graphBinaryModule.serialize(input); + Object deserialized = graphBinaryModule.deserialize(result); + result.release(); + assertThat(deserialized).isEqualTo(input); + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphDataTypeITBase.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/LegacyGraphDataTypeITBase.java similarity index 99% rename from integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphDataTypeITBase.java rename to integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/LegacyGraphDataTypeITBase.java index 73a24707436..fd758fe2dd2 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphDataTypeITBase.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/LegacyGraphDataTypeITBase.java @@ -44,7 +44,7 @@ import org.junit.runner.RunWith; @RunWith(DataProviderRunner.class) -public abstract class GraphDataTypeITBase { +public abstract class LegacyGraphDataTypeITBase { private static final boolean IS_DSE50 = CcmBridge.VERSION.compareTo(Version.parse("5.1")) < 0; private static final Set TYPES_REQUIRING_DSE51 = diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/NativeGraphDataTypeITBase.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/NativeGraphDataTypeITBase.java new file mode 100644 index 00000000000..312ee48402a --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/NativeGraphDataTypeITBase.java @@ -0,0 +1,276 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph; + +import static com.datastax.oss.driver.api.core.type.DataTypes.*; +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.dse.driver.api.core.data.geometry.LineString; +import com.datastax.dse.driver.api.core.data.geometry.Point; +import com.datastax.dse.driver.api.core.data.geometry.Polygon; +import com.datastax.dse.driver.api.core.graph.predicates.Geo; +import com.datastax.dse.driver.api.core.type.DseDataTypes; +import com.datastax.dse.driver.api.testinfra.session.DseSessionRule; +import com.datastax.dse.driver.api.testinfra.session.DseSessionRuleBuilder; +import com.datastax.dse.driver.internal.core.graph.GraphProtocol; +import com.datastax.oss.driver.api.core.data.CqlDuration; +import com.datastax.oss.driver.api.core.data.UdtValue; +import com.datastax.oss.driver.api.core.type.TupleType; +import com.datastax.oss.driver.api.core.type.UserDefinedType; +import com.datastax.oss.driver.api.core.uuid.Uuids; +import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Sets; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import java.math.BigDecimal; +import java.math.BigInteger; +import java.nio.ByteBuffer; +import java.time.Instant; +import java.time.LocalDate; +import java.time.LocalTime; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; +import org.junit.runner.RunWith; + +@RunWith(DataProviderRunner.class) +public abstract class NativeGraphDataTypeITBase { + + private static CustomCcmRule ccmRule = + CustomCcmRule.builder() + .withDseWorkloads("graph") + .withDseConfiguration( + "graph.gremlin_server.scriptEngines.gremlin-groovy.config.sandbox_enabled", "false") + .withDseConfiguration("graph.max_query_params", 32) + .build(); + + private static DseSessionRule sessionRule = + new DseSessionRuleBuilder(ccmRule) + .withCreateGraph() + .withGraphProtocol(GraphProtocol.GRAPH_BINARY_1_0.toInternalCode()) + .withNativeEngine() + .build(); + + @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); + + protected DseSession session() { + return sessionRule.session(); + } + + protected String graphName() { + return sessionRule.getGraphName(); + } + + @Test + public void should_create_and_retrieve_correct_data_with_types() { + DseSession session = session(); + + // use CQL to create type for now because DSP-17567 is not in yet, so this is more stable + session.execute( + String.format( + "CREATE TYPE %s.udt_graphbinary(simple text, complex tuple, missing text)", + graphName())); + + session.execute( + String.format( + "CREATE TYPE %s.udt_graphbinarygeo(point 'PointType', line 'LineStringType', poly 'PolygonType')", + graphName())); + + ImmutableMap.Builder properties = + ImmutableMap.builder() + .put("Ascii", "test") + .put("Bigint", 5L) + .put("Boolean", true) + .put("Date", LocalDate.of(2007, 7, 7)) + .put("Decimal", BigDecimal.valueOf(2.3)) + .put("Double", 4.5d) + .put("Float", 4.8f) + .put("Int", 45) + .put("Smallint", (short) 1) + .put("Text", "test") + .put("Time", LocalTime.now()) + .put("Timeuuid", Uuids.timeBased()) + .put("Timestamp", Instant.now()) + .put("Uuid", java.util.UUID.randomUUID()) + .put("Varint", BigInteger.valueOf(3234)) + .put("Blob", ByteBuffer.wrap(new byte[] {1, 2, 3})) + .put("Tinyint", (byte) 38) + .put("listOf(Int)", Arrays.asList(2, 3, 4)) + .put("setOf(Int)", Sets.newHashSet(2, 3, 4)) + .put("mapOf(Int, Text)", ImmutableMap.of(2, "two", 4, "four")) + .put("Duration", CqlDuration.newInstance(1, 2, 3)) + .put("LineString", Geo.lineString(1, 2, 3, 4, 5, 6)) + .put("Point", Geo.point(3, 4)) + .put("Polygon", Geo.polygon(Geo.point(3, 4), Geo.point(5, 4), Geo.point(6, 6))) + .put("tupleOf(Int, Text)", tupleOf(INT, TEXT).newValue(5, "Bar")) + .put( + "typeOf('udt_graphbinary')", + session + .getMetadata() + .getKeyspace(graphName()) + .get() + .getUserDefinedType("udt_graphbinary") + .get() + .newValue( + "some text", tupleOf(INT, TEXT).newValue(5, "Bar"), "some missing text")) + .put( + "typeOf('udt_graphbinarygeo')", + session + .getMetadata() + .getKeyspace(graphName()) + .get() + .getUserDefinedType("udt_graphbinarygeo") + .get() + .newValue( + Point.fromCoordinates(3.3, 4.4), + LineString.fromPoints( + Point.fromCoordinates(1, 1), + Point.fromCoordinates(2, 2), + Point.fromCoordinates(3, 3)), + Polygon.fromPoints( + Point.fromCoordinates(3, 4), + Point.fromCoordinates(5, 4), + Point.fromCoordinates(6, 6)))); + + TupleType tuple = tupleOf(DseDataTypes.POINT, DseDataTypes.LINE_STRING, DseDataTypes.POLYGON); + tuple.attach(session.getContext()); + + properties.put( + "tupleOf(Point, LineString, Polygon)", + tuple.newValue( + Point.fromCoordinates(3.3, 4.4), + LineString.fromPoints( + Point.fromCoordinates(1, 1), + Point.fromCoordinates(2, 2), + Point.fromCoordinates(3, 3)), + Polygon.fromPoints( + Point.fromCoordinates(3, 4), + Point.fromCoordinates(5, 4), + Point.fromCoordinates(6, 6)))); + + int vertexID = 1; + String vertexLabel = "graphBinaryAllTypes"; + + runTest(properties.build(), vertexLabel, vertexID); + } + + @Test + public void should_insert_and_retrieve_nested_UDTS_and_tuples() { + DseSession session = session(); + + // use CQL to create type for now because DSP-17567 is not in yet, so this is more stable + session.execute( + String.format("CREATE TYPE %s.udt1(" + "a int" + ", b text" + ")", graphName())); + + session.execute( + String.format( + "CREATE TYPE %s.udt2(" + + "a int" + + ", b text" + + ", c frozen" + + ", mylist list" + + ", mytuple_withlist tuple>>>" + + ")", + graphName())); + + session.execute( + String.format( + "CREATE TYPE %s.udt3(" + + "a list" + + ", b set" + + ", c map" + + ", d list>>" + + ", e set>>" + + ", f list>>" + + ")", + graphName())); + + UserDefinedType udt1 = + session.getMetadata().getKeyspace(graphName()).get().getUserDefinedType("udt1").get(); + UdtValue udtValue1 = udt1.newValue(1, "2"); + + UserDefinedType udt2 = + session.getMetadata().getKeyspace(graphName()).get().getUserDefinedType("udt2").get(); + TupleType secondNested = tupleOf(BIGINT, listOf(BIGINT)); + TupleType firstNested = tupleOf(TEXT, secondNested); + UdtValue udtValue2 = + udt2.newValue( + 1, + "2", + udt1.newValue(3, "4"), + ImmutableList.of(5L), + firstNested.newValue("6", secondNested.newValue(7L, ImmutableList.of(8L)))); + + UserDefinedType udt3 = + session.getMetadata().getKeyspace(graphName()).get().getUserDefinedType("udt3").get(); + UdtValue udtValue3 = + udt3.newValue( + ImmutableList.of(1), + ImmutableSet.of(2.1f), + ImmutableMap.of("3", 4L), + ImmutableList.of(ImmutableList.of(5.1d, 6.1d), ImmutableList.of(7.1d)), + ImmutableSet.of(ImmutableSet.of(8.1f), ImmutableSet.of(9.1f)), + ImmutableList.of(tupleOf(INT, TEXT).newValue(10, "11"))); + + Map properties = + ImmutableMap.builder() + .put("frozen(typeOf('udt1'))", udtValue1) + .put("frozen(typeOf('udt2'))", udtValue2) + .put("frozen(typeOf('udt3'))", udtValue3) + .build(); + + int vertexID = 1; + String vertexLabel = "graphBinaryNestedTypes"; + + runTest(properties, vertexLabel, vertexID); + } + + private void runTest(Map properties, String vertexLabel, int vertexID) { + // setup schema + session().execute(createVertexLabelStatement(properties, vertexLabel)); + + // execute insert query and read query + Map results = insertVertexThenReadProperties(properties, vertexID, vertexLabel); + + // test valid properties are returned + properties.forEach( + (k, v) -> assertThat(((List) results.get(formatPropertyName(k))).get(0)).isEqualTo(v)); + } + + private static GraphStatement createVertexLabelStatement( + Map properties, String vertexLabel) { + StringBuilder ddl = + new StringBuilder("schema.vertexLabel(vertexLabel).ifNotExists().partitionBy('id', Int)"); + + for (Map.Entry entry : properties.entrySet()) { + String typeDefinition = entry.getKey(); + String propName = formatPropertyName(typeDefinition); + + ddl.append(String.format(".property('%s', %s)", propName, typeDefinition)); + } + ddl.append(".create()"); + + return ScriptGraphStatement.newInstance(ddl.toString()) + .setQueryParam("vertexLabel", vertexLabel); + } + + protected abstract Map insertVertexThenReadProperties( + Map properties, int vertexID, String vertexLabel); + + protected static String formatPropertyName(String originalName) { + return String.format( + "prop%s", + originalName.replace("(", "").replace(")", "").replace(", ", "").replace("'", "")); + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphDataTypeRemoteIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/LegacyGraphDataTypeRemoteIT.java similarity index 94% rename from integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphDataTypeRemoteIT.java rename to integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/LegacyGraphDataTypeRemoteIT.java index 0c3b3e5cb66..0d211e53ab9 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphDataTypeRemoteIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/LegacyGraphDataTypeRemoteIT.java @@ -16,7 +16,7 @@ package com.datastax.dse.driver.api.core.graph.remote; import com.datastax.dse.driver.api.core.graph.DseGraph; -import com.datastax.dse.driver.api.core.graph.GraphDataTypeITBase; +import com.datastax.dse.driver.api.core.graph.LegacyGraphDataTypeITBase; import com.datastax.dse.driver.api.core.graph.SampleGraphScripts; import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; import com.datastax.oss.driver.api.core.CqlSession; @@ -31,7 +31,7 @@ import org.junit.rules.TestRule; @DseRequirement(min = "5.0.3", description = "DSE 5.0.3 required for remote TinkerPop support") -public class GraphDataTypeRemoteIT extends GraphDataTypeITBase { +public class LegacyGraphDataTypeRemoteIT extends LegacyGraphDataTypeITBase { private static CustomCcmRule ccmRule = CustomCcmRule.builder() diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/NativeGraphDataTypeRemoteIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/NativeGraphDataTypeRemoteIT.java new file mode 100644 index 00000000000..a0004002a06 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/NativeGraphDataTypeRemoteIT.java @@ -0,0 +1,41 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph.remote; + +import com.datastax.dse.driver.api.core.graph.DseGraph; +import com.datastax.dse.driver.api.core.graph.NativeGraphDataTypeITBase; +import com.datastax.oss.driver.api.testinfra.DseRequirement; +import java.util.Map; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; +import org.apache.tinkerpop.gremlin.structure.Vertex; + +@DseRequirement(min = "6.8.0", description = "DSE 6.8.0 required for Native graph support") +public class NativeGraphDataTypeRemoteIT extends NativeGraphDataTypeITBase { + + private final GraphTraversalSource g = + DseGraph.g.withRemote(DseGraph.remoteConnectionBuilder(session()).build()); + + @Override + public Map insertVertexThenReadProperties( + Map properties, int vertexID, String vertexLabel) { + GraphTraversal traversal = g.addV(vertexLabel).property("id", vertexID); + + for (Map.Entry entry : properties.entrySet()) { + String typeDefinition = entry.getKey(); + String propName = formatPropertyName(typeDefinition); + Object value = entry.getValue(); + traversal = traversal.property(propName, value); + } + + // insert vertex + traversal.iterate(); + + // query properties + return g.V().has(vertexLabel, "id", vertexID).valueMap().next(); + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphDataTypeFluentIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/LegacyGraphDataTypeFluentIT.java similarity index 94% rename from integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphDataTypeFluentIT.java rename to integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/LegacyGraphDataTypeFluentIT.java index 088a43ee805..424df6a841f 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphDataTypeFluentIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/LegacyGraphDataTypeFluentIT.java @@ -17,7 +17,7 @@ import com.datastax.dse.driver.api.core.graph.DseGraph; import com.datastax.dse.driver.api.core.graph.FluentGraphStatement; -import com.datastax.dse.driver.api.core.graph.GraphDataTypeITBase; +import com.datastax.dse.driver.api.core.graph.LegacyGraphDataTypeITBase; import com.datastax.dse.driver.api.core.graph.SampleGraphScripts; import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; import com.datastax.oss.driver.api.core.CqlSession; @@ -31,7 +31,7 @@ import org.junit.rules.TestRule; @DseRequirement(min = "5.0.3", description = "DSE 5.0.3 required for fluent API support") -public class GraphDataTypeFluentIT extends GraphDataTypeITBase { +public class LegacyGraphDataTypeFluentIT extends LegacyGraphDataTypeITBase { private static CustomCcmRule ccmRule = CustomCcmRule.builder() diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphDataTypeScriptIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/LegacyGraphDataTypeScriptIT.java similarity index 94% rename from integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphDataTypeScriptIT.java rename to integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/LegacyGraphDataTypeScriptIT.java index 22c51e1cd56..67055c4eb96 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphDataTypeScriptIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/LegacyGraphDataTypeScriptIT.java @@ -15,7 +15,7 @@ */ package com.datastax.dse.driver.api.core.graph.statement; -import com.datastax.dse.driver.api.core.graph.GraphDataTypeITBase; +import com.datastax.dse.driver.api.core.graph.LegacyGraphDataTypeITBase; import com.datastax.dse.driver.api.core.graph.SampleGraphScripts; import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; import com.datastax.oss.driver.api.core.CqlSession; @@ -29,7 +29,7 @@ import org.junit.rules.TestRule; @DseRequirement(min = "5.0.4", description = "DSE 5.0.4 required for script API with GraphSON 2") -public class GraphDataTypeScriptIT extends GraphDataTypeITBase { +public class LegacyGraphDataTypeScriptIT extends LegacyGraphDataTypeITBase { private static CustomCcmRule ccmRule = CustomCcmRule.builder() diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/NativeGraphDataTypeFluentIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/NativeGraphDataTypeFluentIT.java new file mode 100644 index 00000000000..5311bcb8666 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/NativeGraphDataTypeFluentIT.java @@ -0,0 +1,41 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph.statement; + +import static com.datastax.dse.driver.api.core.graph.DseGraph.g; + +import com.datastax.dse.driver.api.core.graph.FluentGraphStatement; +import com.datastax.dse.driver.api.core.graph.NativeGraphDataTypeITBase; +import com.datastax.oss.driver.api.testinfra.DseRequirement; +import java.util.Map; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; +import org.apache.tinkerpop.gremlin.structure.Vertex; + +@DseRequirement(min = "6.8.0", description = "DSE 6.8.0 required for Native graph support") +public class NativeGraphDataTypeFluentIT extends NativeGraphDataTypeITBase { + + @Override + public Map insertVertexThenReadProperties( + Map properties, int vertexID, String vertexLabel) { + GraphTraversal traversal = g.addV(vertexLabel).property("id", vertexID); + + for (Map.Entry entry : properties.entrySet()) { + String typeDefinition = entry.getKey(); + String propName = formatPropertyName(typeDefinition); + Object value = entry.getValue(); + traversal = traversal.property(propName, value); + } + + session().execute(FluentGraphStatement.newInstance(traversal)); + + return session() + .execute( + FluentGraphStatement.newInstance(g.V().has(vertexLabel, "id", vertexID).valueMap())) + .one() + .asMap(); + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/NativeGraphDataTypeScriptIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/NativeGraphDataTypeScriptIT.java new file mode 100644 index 00000000000..24104905f21 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/NativeGraphDataTypeScriptIT.java @@ -0,0 +1,47 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph.statement; + +import com.datastax.dse.driver.api.core.graph.NativeGraphDataTypeITBase; +import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; +import com.datastax.dse.driver.api.core.graph.ScriptGraphStatementBuilder; +import com.datastax.oss.driver.api.testinfra.DseRequirement; +import java.util.Map; + +@DseRequirement(min = "6.8.0", description = "DSE 6.8.0 required for Native graph support") +public class NativeGraphDataTypeScriptIT extends NativeGraphDataTypeITBase { + + @Override + protected Map insertVertexThenReadProperties( + Map properties, int vertexID, String vertexLabel) { + StringBuilder insert = new StringBuilder("g.addV(vertexLabel).property('id', vertexID)"); + + ScriptGraphStatementBuilder statementBuilder = + new ScriptGraphStatementBuilder() + .setQueryParam("vertexID", vertexID) + .setQueryParam("vertexLabel", vertexLabel); + + for (Map.Entry entry : properties.entrySet()) { + String typeDefinition = entry.getKey(); + String propName = formatPropertyName(typeDefinition); + Object value = entry.getValue(); + + insert.append(String.format(".property('%s', %s)", propName, propName)); + statementBuilder = statementBuilder.setQueryParam(propName, value); + } + + session().execute(statementBuilder.setScript(insert.toString()).build()); + + return session() + .execute( + ScriptGraphStatement.newInstance("g.V().has(vertexLabel, 'id', vertexID).valueMap()") + .setQueryParam("vertexID", vertexID) + .setQueryParam("vertexLabel", vertexLabel)) + .one() + .asMap(); + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/KeyspaceGraphMetadataIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/KeyspaceGraphMetadataIT.java new file mode 100644 index 00000000000..8c529b5c652 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/KeyspaceGraphMetadataIT.java @@ -0,0 +1,60 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.metadata.schema; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.dse.driver.api.testinfra.session.DseSessionRuleBuilder; +import com.datastax.oss.driver.api.core.metadata.Metadata; +import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import com.datastax.oss.driver.categories.ParallelizableTests; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +@Category(ParallelizableTests.class) +@DseRequirement(min = "6.8") +public class KeyspaceGraphMetadataIT { + + private static final CcmRule CCM_RULE = CcmRule.getInstance(); + + private static final SessionRule SESSION_RULE = + new DseSessionRuleBuilder(CCM_RULE).build(); + + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); + + @Test + public void should_expose_graph_engine_if_set() { + DseSession session = SESSION_RULE.session(); + session.execute( + "CREATE KEYSPACE keyspace_metadata_it_graph_engine " + + "WITH replication = {'class': 'SimpleStrategy', 'replication_factor': 1} " + + "AND graph_engine = 'Tinker'"); + Metadata metadata = session.getMetadata(); + assertThat(metadata.getKeyspace("keyspace_metadata_it_graph_engine")) + .hasValueSatisfying( + keyspaceMetadata -> + assertThat(((DseKeyspaceMetadata) keyspaceMetadata).getGraphEngine()) + .hasValue("Tinker")); + } + + @Test + public void should_expose_empty_graph_engine_if_not_set() { + // The default keyspace created by CcmRule has no graph engine + Metadata metadata = SESSION_RULE.session().getMetadata(); + assertThat(metadata.getKeyspace(SESSION_RULE.keyspace())) + .hasValueSatisfying( + keyspaceMetadata -> + assertThat(((DseKeyspaceMetadata) keyspaceMetadata).getGraphEngine()).isEmpty()); + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/TableGraphMetadataIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/TableGraphMetadataIT.java new file mode 100644 index 00000000000..3b50695d21d --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/TableGraphMetadataIT.java @@ -0,0 +1,119 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.metadata.schema; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.dse.driver.api.testinfra.session.DseSessionRuleBuilder; +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.metadata.Metadata; +import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import com.datastax.oss.driver.categories.ParallelizableTests; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +@Category(ParallelizableTests.class) +@DseRequirement(min = "6.8") +public class TableGraphMetadataIT { + + private static final CcmRule CCM_RULE = CcmRule.getInstance(); + + private static final SessionRule SESSION_RULE = + new DseSessionRuleBuilder(CCM_RULE).build(); + + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); + + @BeforeClass + public static void createTables() { + DseSession session = SESSION_RULE.session(); + + session.execute("CREATE TABLE person (name text PRIMARY KEY) " + "WITH VERTEX LABEL"); + session.execute( + "CREATE TABLE software (company text, name text, version int, " + + "PRIMARY KEY ((company, name), version)) " + + "WITH VERTEX LABEL soft"); + session.execute( + "CREATE TABLE contributors (contributor text, company_name text, software_name text, " + + "software_version int, " + + "PRIMARY KEY(contributor, company_name, software_name, software_version)) " + + "WITH EDGE LABEL contrib " + + "FROM person(contributor) " + + "TO soft((company_name, software_name), software_version)"); + } + + @Test + public void should_expose_vertex_and_edge_metadata() { + DseSession session = SESSION_RULE.session(); + Metadata metadata = session.getMetadata(); + assertThat(metadata.getKeyspace(SESSION_RULE.keyspace())) + .hasValueSatisfying( + keyspaceMetadata -> { + assertThat(keyspaceMetadata.getTable("person")) + .hasValueSatisfying( + person -> { + DseTableMetadata dsePerson = (DseTableMetadata) person; + assertThat(dsePerson.getVertex()) + .hasValueSatisfying( + vertex -> + assertThat(vertex.getLabelName()) + .isEqualTo(CqlIdentifier.fromInternal("person"))); + assertThat(dsePerson.getEdge()).isEmpty(); + }); + + assertThat(keyspaceMetadata.getTable("software")) + .hasValueSatisfying( + software -> { + DseTableMetadata dseSoftware = (DseTableMetadata) software; + assertThat(dseSoftware.getVertex()) + .hasValueSatisfying( + vertex -> + assertThat(vertex.getLabelName()) + .isEqualTo(CqlIdentifier.fromInternal("soft"))); + assertThat(dseSoftware.getEdge()).isEmpty(); + }); + + assertThat(keyspaceMetadata.getTable("contributors")) + .hasValueSatisfying( + contributors -> { + DseTableMetadata dseContributors = (DseTableMetadata) contributors; + assertThat(dseContributors.getVertex()).isEmpty(); + assertThat(dseContributors.getEdge()) + .hasValueSatisfying( + edge -> { + assertThat(edge.getLabelName()) + .isEqualTo(CqlIdentifier.fromInternal("contrib")); + + assertThat(edge.getFromTable().asInternal()).isEqualTo("person"); + assertThat(edge.getFromLabel()) + .isEqualTo(CqlIdentifier.fromInternal("person")); + assertThat(edge.getFromPartitionKeyColumns()) + .containsExactly(CqlIdentifier.fromInternal("contributor")); + assertThat(edge.getFromClusteringColumns()).isEmpty(); + + assertThat(edge.getToTable().asInternal()).isEqualTo("software"); + assertThat(edge.getToLabel()) + .isEqualTo(CqlIdentifier.fromInternal("soft")); + assertThat(edge.getToPartitionKeyColumns()) + .containsExactly( + CqlIdentifier.fromInternal("company_name"), + CqlIdentifier.fromInternal("software_name")); + assertThat(edge.getToClusteringColumns()) + .containsExactly( + CqlIdentifier.fromInternal("software_version")); + }); + }); + }); + } +} diff --git a/pom.xml b/pom.xml index 2a2f06bf94e..b9ff7f2b728 100644 --- a/pom.xml +++ b/pom.xml @@ -73,6 +73,8 @@ 1.9.12 4.0.2 2.0.0-M19 + + 20180130 2.22.2 false ${skipTests} @@ -156,6 +158,11 @@ tinkergraph-gremlin ${tinkerpop.version} + + org.apache.tinkerpop + gremlin-driver + ${tinkerpop.version} + org.reactivestreams reactive-streams diff --git a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/session/CqlSessionRuleBuilder.java b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/session/CqlSessionRuleBuilder.java index 1cbf4f3a84c..2b9d0447374 100644 --- a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/session/CqlSessionRuleBuilder.java +++ b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/session/CqlSessionRuleBuilder.java @@ -40,6 +40,9 @@ public SessionRule build() { final String graphName; final DriverConfigLoader actualLoader; + + Supplier actualSupplier; + if (createGraph) { graphName = "dsedrivertests_" + GRAPH_NAME_INDEX.getAndIncrement(); @@ -54,25 +57,42 @@ public SessionRule build() { assertThat(loader).isInstanceOf(DefaultDriverConfigLoader.class); } Supplier originalSupplier = ((DefaultDriverConfigLoader) loader).getConfigSupplier(); - Supplier actualSupplier = + actualSupplier = () -> originalSupplier .get() .withValue( DseDriverOption.GRAPH_NAME.getPath(), ConfigValueFactory.fromAnyRef(graphName)); - actualLoader = new DefaultDriverConfigLoader(actualSupplier); } else { graphName = null; - actualLoader = loader; + if (loader == null) { + loader = new DefaultDriverConfigLoader(); + } + + actualSupplier = ((DefaultDriverConfigLoader) loader).getConfigSupplier(); } + actualLoader = + new DefaultDriverConfigLoader( + () -> + graphProtocol != null + ? actualSupplier + .get() + .withValue( + DseDriverOption.GRAPH_SUB_PROTOCOL.getPath(), + ConfigValueFactory.fromAnyRef(graphProtocol)) + // will use the protocol from the config file (in application.conf if + // defined or in reference.conf) + : actualSupplier.get()); + return new SessionRule<>( cassandraResource, createKeyspace, nodeStateListener, schemaChangeListener, actualLoader, - graphName); + graphName, + isCoreGraph); } } From cf184324ba3ac673fde74b967ac2bc8aa4c3508e Mon Sep 17 00:00:00 2001 From: Tomasz Lelek Date: Mon, 12 Aug 2019 13:07:47 +0200 Subject: [PATCH 291/979] JAVA-2235: fix tests use coreEngine and use coreEngine instead of nativeEngine (#256) --- .../driver/api/core/graph/NativeGraphDataTypeITBase.java | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/NativeGraphDataTypeITBase.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/NativeGraphDataTypeITBase.java index 312ee48402a..15aaa241ee3 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/NativeGraphDataTypeITBase.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/NativeGraphDataTypeITBase.java @@ -36,7 +36,6 @@ import java.time.LocalDate; import java.time.LocalTime; import java.util.Arrays; -import java.util.List; import java.util.Map; import org.junit.ClassRule; import org.junit.Test; @@ -59,7 +58,7 @@ public abstract class NativeGraphDataTypeITBase { new DseSessionRuleBuilder(ccmRule) .withCreateGraph() .withGraphProtocol(GraphProtocol.GRAPH_BINARY_1_0.toInternalCode()) - .withNativeEngine() + .withCoreEngine() .build(); @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); @@ -244,8 +243,7 @@ private void runTest(Map properties, String vertexLabel, int ver Map results = insertVertexThenReadProperties(properties, vertexID, vertexLabel); // test valid properties are returned - properties.forEach( - (k, v) -> assertThat(((List) results.get(formatPropertyName(k))).get(0)).isEqualTo(v)); + properties.forEach((k, v) -> assertThat(results.get(formatPropertyName(k))).isEqualTo(v)); } private static GraphStatement createVertexLabelStatement( From b1546729fabb839e97908b7194db61d403ebfd19 Mon Sep 17 00:00:00 2001 From: Tomasz Lelek Date: Wed, 14 Aug 2019 11:57:44 +0200 Subject: [PATCH 292/979] Ngdg 2.x "Attempting to use a Ccm rule while another is in use. This is disallowed" fix (#261) --- .../datastax/dse/driver/api/core/graph/GraphTimeoutsIT.java | 2 +- .../driver/api/core/graph/NativeGraphDataTypeITBase.java | 3 --- .../api/core/graph/remote/NativeGraphDataTypeRemoteIT.java | 6 ++++++ .../core/graph/statement/NativeGraphDataTypeFluentIT.java | 6 ++++++ .../core/graph/statement/NativeGraphDataTypeScriptIT.java | 6 ++++++ 5 files changed, 19 insertions(+), 4 deletions(-) diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphTimeoutsIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphTimeoutsIT.java index d26821f6fbe..e13864b980f 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphTimeoutsIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphTimeoutsIT.java @@ -33,7 +33,7 @@ import org.junit.rules.RuleChain; import org.junit.rules.TestRule; -@DseRequirement(min = "5.0.0", description = "DSE 5 required for Graph") +@DseRequirement(min = "5.0.0", description = "DSE 5 required for Graph", max = "6.8.0") public class GraphTimeoutsIT { public static CustomCcmRule ccmRule = CustomCcmRule.builder().withDseWorkloads("graph").build(); diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/NativeGraphDataTypeITBase.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/NativeGraphDataTypeITBase.java index 15aaa241ee3..bee0f0e583b 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/NativeGraphDataTypeITBase.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/NativeGraphDataTypeITBase.java @@ -28,7 +28,6 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Sets; -import com.tngtech.java.junit.dataprovider.DataProviderRunner; import java.math.BigDecimal; import java.math.BigInteger; import java.nio.ByteBuffer; @@ -41,9 +40,7 @@ import org.junit.Test; import org.junit.rules.RuleChain; import org.junit.rules.TestRule; -import org.junit.runner.RunWith; -@RunWith(DataProviderRunner.class) public abstract class NativeGraphDataTypeITBase { private static CustomCcmRule ccmRule = diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/NativeGraphDataTypeRemoteIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/NativeGraphDataTypeRemoteIT.java index a0004002a06..45a5999d8da 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/NativeGraphDataTypeRemoteIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/NativeGraphDataTypeRemoteIT.java @@ -9,12 +9,18 @@ import com.datastax.dse.driver.api.core.graph.DseGraph; import com.datastax.dse.driver.api.core.graph.NativeGraphDataTypeITBase; import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.categories.IsolatedTests; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; import java.util.Map; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.junit.experimental.categories.Category; +import org.junit.runner.RunWith; @DseRequirement(min = "6.8.0", description = "DSE 6.8.0 required for Native graph support") +@RunWith(DataProviderRunner.class) +@Category(IsolatedTests.class) public class NativeGraphDataTypeRemoteIT extends NativeGraphDataTypeITBase { private final GraphTraversalSource g = diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/NativeGraphDataTypeFluentIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/NativeGraphDataTypeFluentIT.java index 5311bcb8666..17df83f8b29 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/NativeGraphDataTypeFluentIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/NativeGraphDataTypeFluentIT.java @@ -11,11 +11,17 @@ import com.datastax.dse.driver.api.core.graph.FluentGraphStatement; import com.datastax.dse.driver.api.core.graph.NativeGraphDataTypeITBase; import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.categories.IsolatedTests; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; import java.util.Map; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.junit.experimental.categories.Category; +import org.junit.runner.RunWith; @DseRequirement(min = "6.8.0", description = "DSE 6.8.0 required for Native graph support") +@RunWith(DataProviderRunner.class) +@Category(IsolatedTests.class) public class NativeGraphDataTypeFluentIT extends NativeGraphDataTypeITBase { @Override diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/NativeGraphDataTypeScriptIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/NativeGraphDataTypeScriptIT.java index 24104905f21..cf2f804176e 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/NativeGraphDataTypeScriptIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/NativeGraphDataTypeScriptIT.java @@ -10,9 +10,15 @@ import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; import com.datastax.dse.driver.api.core.graph.ScriptGraphStatementBuilder; import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.categories.IsolatedTests; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; import java.util.Map; +import org.junit.experimental.categories.Category; +import org.junit.runner.RunWith; @DseRequirement(min = "6.8.0", description = "DSE 6.8.0 required for Native graph support") +@RunWith(DataProviderRunner.class) +@Category(IsolatedTests.class) public class NativeGraphDataTypeScriptIT extends NativeGraphDataTypeITBase { @Override From a530b319e4daf7f76e14befe547d6489ec2190c2 Mon Sep 17 00:00:00 2001 From: Tomasz Lelek Date: Mon, 19 Aug 2019 09:34:25 +0200 Subject: [PATCH 293/979] fix GraphTimeoutsIt test (#264) --- .../dse/driver/api/core/graph/GraphTimeoutsIT.java | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphTimeoutsIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphTimeoutsIT.java index e13864b980f..d2f8c9ed063 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphTimeoutsIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphTimeoutsIT.java @@ -22,6 +22,7 @@ import com.datastax.dse.driver.api.core.config.DseDriverOption; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.DriverTimeoutException; +import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.servererrors.InvalidQueryException; import com.datastax.oss.driver.api.testinfra.DseRequirement; @@ -33,7 +34,7 @@ import org.junit.rules.RuleChain; import org.junit.rules.TestRule; -@DseRequirement(min = "5.0.0", description = "DSE 5 required for Graph", max = "6.8.0") +@DseRequirement(min = "5.0.0", description = "DSE 5 required for Graph") public class GraphTimeoutsIT { public static CustomCcmRule ccmRule = CustomCcmRule.builder().withDseWorkloads("graph").build(); @@ -76,8 +77,13 @@ public void should_have_driver_wait_indefinitely_by_default_and_server_return_ti .setExecutionProfile(drivertest1)); fail("The request should have timed out"); } catch (InvalidQueryException e) { - assertThat(e.toString()) - .contains("evaluation exceeded", "threshold of ", desiredTimeout + " ms"); + if (ccmRule.getCcmBridge().getDseVersion().get().compareTo(Version.parse("6.8.0")) >= 0) { + assertThat(e.toString()) + .contains("evaluation exceeded", "threshold of ", desiredTimeout + "ms"); + } else { + assertThat(e.toString()) + .contains("evaluation exceeded", "threshold of ", desiredTimeout + " ms"); + } } } From 69a692fce29056d575f45f8cbebb20247de6fa09 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Wed, 28 Aug 2019 13:36:24 +0200 Subject: [PATCH 294/979] Set version to 4.5.0-ngdg-SNAPSHOT --- bom/pom.xml | 14 +++++++------- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- pom.xml | 2 +- query-builder/pom.xml | 2 +- 9 files changed, 15 insertions(+), 15 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index 583eaa833cd..0efdf08ca03 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.5.0-SNAPSHOT + 4.5.0-ngdg-SNAPSHOT java-driver-bom pom @@ -31,32 +31,32 @@ com.datastax.oss java-driver-core - 4.5.0-SNAPSHOT + 4.5.0-ngdg-SNAPSHOT com.datastax.oss java-driver-core-shaded - 4.5.0-SNAPSHOT + 4.5.0-ngdg-SNAPSHOT com.datastax.oss java-driver-mapper-processor - 4.5.0-SNAPSHOT + 4.5.0-ngdg-SNAPSHOT com.datastax.oss java-driver-mapper-runtime - 4.5.0-SNAPSHOT + 4.5.0-ngdg-SNAPSHOT com.datastax.oss java-driver-query-builder - 4.5.0-SNAPSHOT + 4.5.0-ngdg-SNAPSHOT com.datastax.oss java-driver-test-infra - 4.5.0-SNAPSHOT + 4.5.0-ngdg-SNAPSHOT com.datastax.oss diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 951cd506ca1..eb7c2948b0c 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.5.0-SNAPSHOT + 4.5.0-ngdg-SNAPSHOT java-driver-core-shaded DataStax Java driver for Apache Cassandra(R) - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index eff9b0601d9..682b429061f 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.5.0-SNAPSHOT + 4.5.0-ngdg-SNAPSHOT java-driver-core bundle diff --git a/distribution/pom.xml b/distribution/pom.xml index cd52fe398b6..5b9c60a83a4 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.5.0-SNAPSHOT + 4.5.0-ngdg-SNAPSHOT java-driver-distribution diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index df3e8637545..0d8a009c7ca 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.5.0-SNAPSHOT + 4.5.0-ngdg-SNAPSHOT java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 2f0e5a4c276..8f41bbbb688 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.5.0-SNAPSHOT + 4.5.0-ngdg-SNAPSHOT java-driver-mapper-processor DataStax Java driver for Apache Cassandra(R) - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index 63def336d15..05277a7f60f 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.5.0-SNAPSHOT + 4.5.0-ngdg-SNAPSHOT java-driver-mapper-runtime bundle diff --git a/pom.xml b/pom.xml index b9ff7f2b728..1741f771c54 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ 4.0.0 com.datastax.oss java-driver-parent - 4.5.0-SNAPSHOT + 4.5.0-ngdg-SNAPSHOT pom DataStax Java driver for Apache Cassandra(R) A driver for Apache Cassandra(R) 2.1+ that works exclusively with the Cassandra Query Language version 3 (CQL3) and Cassandra's native protocol versions 3 and above. diff --git a/query-builder/pom.xml b/query-builder/pom.xml index c846bbc5466..581e30b5af1 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.5.0-SNAPSHOT + 4.5.0-ngdg-SNAPSHOT java-driver-query-builder bundle From 392bc80339508d57366fabc5bf5d9a778e5a7c82 Mon Sep 17 00:00:00 2001 From: Tomasz Lelek Date: Fri, 30 Aug 2019 13:10:44 +0200 Subject: [PATCH 295/979] JAVA-2099: Enable Paging Through DSE Driver for Gremlin Traversals (2.x) (#259) --- changelog/README.md | 2 + .../api/core/config/DseDriverOption.java | 2 + .../api/core/graph/PagingEnabledOptions.java | 13 + .../core/ContinuousRequestHandlerBase.java | 1231 +++++++++++++++++ .../ContinuousCqlRequestAsyncProcessor.java | 3 +- .../ContinuousCqlRequestHandler.java | 1168 +--------------- .../DefaultContinuousAsyncResultSet.java | 2 +- .../graph/ContinuousAsyncGraphResultSet.java | 126 ++ .../graph/ContinuousGraphRequestHandler.java | 127 ++ .../graph/DefaultAsyncGraphResultSet.java | 59 +- .../internal/core/graph/GraphConversions.java | 86 +- .../core/graph/GraphPagingSupportChecker.java | 83 ++ .../graph/GraphRequestAsyncProcessor.java | 22 +- .../core/graph/GraphResultIterator.java | 68 + .../internal/core/graph/GraphResultSets.java | 2 +- .../internal/core/graph/GraphSONUtils.java | 4 +- .../core/graph/MultiPageGraphResultSet.java | 97 ++ .../core/graph/SinglePageGraphResultSet.java | 2 +- .../core/context/DefaultDriverContext.java | 4 +- core/src/main/resources/reference.conf | 18 + ...tinuousCqlRequestHandlerReprepareTest.java | 4 +- .../ContinuousCqlRequestHandlerRetryTest.java | 14 +- .../ContinuousCqlRequestHandlerTest.java | 14 +- .../graph/GraphPagingSupportCheckerTest.java | 172 +++ .../core/graph/GraphRequestHandlerTest.java | 20 +- .../core/graph/GraphResultSetTestBase.java | 75 + .../core/graph/GraphResultSetsTest.java | 86 ++ .../binary/GraphBinaryDataTypesTest.java | 5 +- .../core/graph/statement/GraphPagingIT.java | 379 +++++ .../graph/statement/GraphTraversalIT.java | 11 +- .../session/CqlSessionRuleBuilder.java | 7 +- 31 files changed, 2653 insertions(+), 1253 deletions(-) create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/graph/PagingEnabledOptions.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/ContinuousRequestHandlerBase.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/graph/ContinuousAsyncGraphResultSet.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandler.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphPagingSupportChecker.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphResultIterator.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/graph/MultiPageGraphResultSet.java create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphPagingSupportCheckerTest.java create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphResultSetTestBase.java create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphResultSetsTest.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphPagingIT.java diff --git a/changelog/README.md b/changelog/README.md index b717aa7d211..82bd4c9fdd4 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -3,6 +3,8 @@ ### NGDG (in progress) +- [new feature] JAVA-2099: Enable Paging Through DSE Driver for Gremlin Traversals (2.x) +- [new feature] JAVA-1898: Expose new table-level graph metadata ### 4.5.0 (in progress) diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/config/DseDriverOption.java b/core/src/main/java/com/datastax/dse/driver/api/core/config/DseDriverOption.java index b8b46e1f699..f6a8b94236d 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/config/DseDriverOption.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/config/DseDriverOption.java @@ -173,6 +173,8 @@ public enum DseDriverOption implements DriverOption { *

      Value type: boolean */ MONITOR_REPORTING_ENABLED("advanced.monitor-reporting.enabled"), + + GRAPH_PAGING_ENABLED("advanced.graph.paging-enabled"), ; private final String path; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/graph/PagingEnabledOptions.java b/core/src/main/java/com/datastax/dse/driver/api/core/graph/PagingEnabledOptions.java new file mode 100644 index 00000000000..762f229623c --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/graph/PagingEnabledOptions.java @@ -0,0 +1,13 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph; + +public enum PagingEnabledOptions { + ENABLED, + DISABLED, + AUTO +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/ContinuousRequestHandlerBase.java b/core/src/main/java/com/datastax/dse/driver/internal/core/ContinuousRequestHandlerBase.java new file mode 100644 index 00000000000..8d4f09e6878 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/ContinuousRequestHandlerBase.java @@ -0,0 +1,1231 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core; + +import com.datastax.dse.driver.api.core.DseProtocolVersion; +import com.datastax.dse.driver.api.core.config.DseDriverOption; +import com.datastax.dse.driver.api.core.cql.continuous.ContinuousAsyncResultSet; +import com.datastax.dse.driver.api.core.metrics.DseSessionMetric; +import com.datastax.dse.driver.internal.core.cql.DseConversions; +import com.datastax.dse.driver.internal.core.cql.continuous.DefaultContinuousAsyncResultSet; +import com.datastax.dse.protocol.internal.request.Revise; +import com.datastax.dse.protocol.internal.response.result.DseRowsMetadata; +import com.datastax.oss.driver.api.core.AllNodesFailedException; +import com.datastax.oss.driver.api.core.DriverTimeoutException; +import com.datastax.oss.driver.api.core.ProtocolVersion; +import com.datastax.oss.driver.api.core.RequestThrottlingException; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.connection.FrameTooLongException; +import com.datastax.oss.driver.api.core.cql.ColumnDefinitions; +import com.datastax.oss.driver.api.core.cql.ExecutionInfo; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.metrics.DefaultNodeMetric; +import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; +import com.datastax.oss.driver.api.core.retry.RetryDecision; +import com.datastax.oss.driver.api.core.retry.RetryPolicy; +import com.datastax.oss.driver.api.core.servererrors.BootstrappingException; +import com.datastax.oss.driver.api.core.servererrors.CoordinatorException; +import com.datastax.oss.driver.api.core.servererrors.FunctionFailureException; +import com.datastax.oss.driver.api.core.servererrors.ProtocolError; +import com.datastax.oss.driver.api.core.servererrors.QueryValidationException; +import com.datastax.oss.driver.api.core.servererrors.ReadTimeoutException; +import com.datastax.oss.driver.api.core.servererrors.UnavailableException; +import com.datastax.oss.driver.api.core.servererrors.WriteTimeoutException; +import com.datastax.oss.driver.api.core.session.Request; +import com.datastax.oss.driver.api.core.session.throttling.RequestThrottler; +import com.datastax.oss.driver.api.core.session.throttling.Throttled; +import com.datastax.oss.driver.internal.core.adminrequest.ThrottledAdminRequestHandler; +import com.datastax.oss.driver.internal.core.adminrequest.UnexpectedResponseException; +import com.datastax.oss.driver.internal.core.channel.DriverChannel; +import com.datastax.oss.driver.internal.core.channel.ResponseCallback; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.cql.Conversions; +import com.datastax.oss.driver.internal.core.metadata.DefaultNode; +import com.datastax.oss.driver.internal.core.metrics.NodeMetricUpdater; +import com.datastax.oss.driver.internal.core.metrics.SessionMetricUpdater; +import com.datastax.oss.driver.internal.core.session.DefaultSession; +import com.datastax.oss.driver.internal.core.session.RepreparePayload; +import com.datastax.oss.driver.internal.core.util.Loggers; +import com.datastax.oss.driver.internal.core.util.collection.QueryPlan; +import com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting; +import com.datastax.oss.protocol.internal.Frame; +import com.datastax.oss.protocol.internal.Message; +import com.datastax.oss.protocol.internal.request.Prepare; +import com.datastax.oss.protocol.internal.response.Error; +import com.datastax.oss.protocol.internal.response.Result; +import com.datastax.oss.protocol.internal.response.error.Unprepared; +import com.datastax.oss.protocol.internal.response.result.Rows; +import com.datastax.oss.protocol.internal.response.result.Void; +import com.datastax.oss.protocol.internal.util.Bytes; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import io.netty.handler.codec.EncoderException; +import io.netty.util.Timeout; +import io.netty.util.Timer; +import io.netty.util.concurrent.Future; +import io.netty.util.concurrent.GenericFutureListener; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.time.Duration; +import java.util.AbstractMap; +import java.util.ArrayDeque; +import java.util.List; +import java.util.Map; +import java.util.Queue; +import java.util.concurrent.CancellationException; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionStage; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.locks.ReentrantLock; +import net.jcip.annotations.GuardedBy; +import net.jcip.annotations.ThreadSafe; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Handles a request that supports multiple response messages (a.k.a. continuous paging request). + */ +@ThreadSafe +public abstract class ContinuousRequestHandlerBase< + StatementT extends Request, ResultSetT, ExecutionInfoT> + implements ResponseCallback, GenericFutureListener>, Throttled { + + private static final Logger LOG = LoggerFactory.getLogger(ContinuousRequestHandlerBase.class); + + private final String logPrefix; + protected final StatementT statement; + protected final DefaultSession session; + protected final InternalDriverContext context; + protected final DriverExecutionProfile executionProfile; + private final Queue queryPlan; + private final RetryPolicy retryPolicy; + protected final RequestThrottler throttler; + private final int maxEnqueuedPages; + private final int maxPages; + private final boolean protocolBackpressureAvailable; + private final boolean isIdempotent; + private final Duration timeoutFirstPage; + private final Duration timeoutOtherPages; + private final Timer timer; + private final SessionMetricUpdater sessionMetricUpdater; + + // The errors on the nodes that were already tried. + // We don't use a map because nodes can appear multiple times. + protected final List> errors = new CopyOnWriteArrayList<>(); + + // Coordinates concurrent accesses between the client and I/O threads + private final ReentrantLock lock = new ReentrantLock(); + + // The page queue, storing responses that we have received and have not been consumed by the + // client yet. + @GuardedBy("lock") + private final Queue queue; + + // If the client requests a page and we can't serve it immediately (empty queue), then we create + // this future and have the client wait on it. Otherwise this field is null. + @GuardedBy("lock") + private CompletableFuture pendingResult; + + // How many pages were requested. This is the total number of pages requested from the beginning. + // It will be zero if the protocol does not support numPagesRequested (DSE_V1) + @GuardedBy("lock") + private int numPagesRequested; + + // An integer that represents the state of the continuous paging request: + // - if positive, it is the sequence number of the next expected page; + // - if negative, it is a terminal state, identified by the constants below. + @GuardedBy("lock") + private int state = 1; + + private static final int STATE_FINISHED = -1; + private static final int STATE_FAILED = -2; + + // Set when the execution starts, and is never modified after. + private volatile long startTimeNanos; + + // These are set when the first page arrives, and are never modified after. + protected volatile ColumnDefinitions columnDefinitions; + + // These change over time as different nodes are tried; + // they can only be null before the first request is sent. + protected volatile Node node; + private volatile DriverChannel channel; + private volatile int streamId; + // Set each time a new request/response cycle starts. + private volatile long messageStartTimeNanos; + private volatile Timeout timeout; + + // How many times we've invoked the retry policy and it has returned a "retry" decision (0 for + // the first attempt, 1 for the first retry, etc.). + private volatile int retryCount; + private Class resultSetClass; + + public ContinuousRequestHandlerBase( + @NonNull StatementT statement, + @NonNull DefaultSession session, + @NonNull InternalDriverContext context, + @NonNull String sessionLogPrefix, + @NonNull Class resultSetClass) { + this.resultSetClass = resultSetClass; + + ProtocolVersion protocolVersion = context.getProtocolVersion(); + if (!context + .getProtocolVersionRegistry() + .supports(protocolVersion, DseProtocolFeature.CONTINUOUS_PAGING)) { + throw new IllegalStateException( + "Cannot execute continuous paging requests with protocol version " + protocolVersion); + } + this.logPrefix = sessionLogPrefix + "|" + this.hashCode(); + LOG.trace("[{}] Creating new continuous handler for request {}", logPrefix, statement); + this.statement = statement; + this.session = session; + this.context = context; + this.executionProfile = Conversions.resolveExecutionProfile(this.statement, this.context); + this.queryPlan = + statement.getNode() != null + ? new QueryPlan(statement.getNode()) + : context + .getLoadBalancingPolicyWrapper() + .newQueryPlan(statement, executionProfile.getName(), session); + this.retryPolicy = context.getRetryPolicy(executionProfile.getName()); + Boolean idempotent = statement.isIdempotent(); + this.isIdempotent = + idempotent == null + ? executionProfile.getBoolean(DefaultDriverOption.REQUEST_DEFAULT_IDEMPOTENCE) + : idempotent; + this.timeoutFirstPage = + executionProfile.getDuration(DseDriverOption.CONTINUOUS_PAGING_TIMEOUT_FIRST_PAGE); + this.timeoutOtherPages = + executionProfile.getDuration(DseDriverOption.CONTINUOUS_PAGING_TIMEOUT_OTHER_PAGES); + this.timer = context.getNettyOptions().getTimer(); + this.maxEnqueuedPages = + executionProfile.getInt(DseDriverOption.CONTINUOUS_PAGING_MAX_ENQUEUED_PAGES); + this.queue = new ArrayDeque<>(maxEnqueuedPages); + this.maxPages = executionProfile.getInt(DseDriverOption.CONTINUOUS_PAGING_MAX_PAGES); + this.protocolBackpressureAvailable = + protocolVersion.getCode() >= DseProtocolVersion.DSE_V2.getCode(); + this.numPagesRequested = protocolBackpressureAvailable ? maxEnqueuedPages : 0; + this.throttler = context.getRequestThrottler(); + this.sessionMetricUpdater = session.getMetricUpdater(); + this.startTimeNanos = System.nanoTime(); + } + + @NonNull + protected abstract Message getMessage(); + + protected abstract boolean isTracingEnabled(); + + @NonNull + protected abstract Map createPayload(); + + @NonNull + protected abstract ResultSetT createEmptyResultSet(@NonNull ExecutionInfoT executionInfo); + + protected abstract int pageNumber(@NonNull ResultSetT resultSet); + + @NonNull + protected abstract ExecutionInfoT createExecutionInfo( + @NonNull Result result, @Nullable Frame response); + + @NonNull + protected abstract ResultSetT createResultSet( + @NonNull Rows rows, @NonNull ExecutionInfoT executionInfo) throws IOException; + + // MAIN LIFECYCLE + + @Override + public void onStreamIdAssigned(int streamId) { + LOG.trace("[{}] Assigned streamId {} on node {}", logPrefix, streamId, node); + this.streamId = streamId; + } + + @Override + public boolean isLastResponse(@NonNull Frame responseFrame) { + Message message = responseFrame.message; + if (message instanceof Rows) { + Rows rows = (Rows) message; + DseRowsMetadata metadata = (DseRowsMetadata) rows.getMetadata(); + return metadata.isLastContinuousPage; + } else { + return message instanceof Error; + } + } + + @Override + public void onThrottleReady(boolean wasDelayed) { + if (wasDelayed) { + session + .getMetricUpdater() + .updateTimer( + DefaultSessionMetric.THROTTLING_DELAY, + executionProfile.getName(), + System.nanoTime() - startTimeNanos, + TimeUnit.NANOSECONDS); + } + sendRequest(null); + } + + public CompletionStage handle() { + return dequeueOrCreatePending(); + } + + /** + * Sends the initial request to the next available node. + * + * @param node if not null, it will be attempted first before the rest of the query plan. It + * happens only when we retry on the same host. + */ + private void sendRequest(@Nullable Node node) { + channel = null; + if (node == null || (channel = session.getChannel(node, logPrefix)) == null) { + while ((node = queryPlan.poll()) != null) { + channel = session.getChannel(node, logPrefix); + if (channel != null) { + break; + } + } + } + if (channel == null || node == null) { + // We've reached the end of the query plan without finding any node to write to; abort the + // continuous paging session. + lock.lock(); + try { + abort(AllNodesFailedException.fromErrors(errors), false); + } finally { + lock.unlock(); + } + } else { + this.node = node; + streamId = -1; + messageStartTimeNanos = System.nanoTime(); + channel.write(getMessage(), isTracingEnabled(), createPayload(), this).addListener(this); + } + } + + /** + * Invoked when the write from {@link #sendRequest(Node)} completes. + * + * @param future The future representing the outcome of the write operation. + */ + @Override + public void operationComplete(@NonNull Future future) { + if (!future.isSuccess()) { + Throwable error = future.cause(); + if (error instanceof EncoderException && error.getCause() instanceof FrameTooLongException) { + trackNodeError(node, error.getCause()); + lock.lock(); + try { + abort(error.getCause(), false); + } finally { + lock.unlock(); + } + } else { + LOG.trace( + "[{}] Failed to send request on {}, trying next node (cause: {})", + logPrefix, + channel, + error); + ((DefaultNode) node) + .getMetricUpdater() + .incrementCounter(DefaultNodeMetric.UNSENT_REQUESTS, executionProfile.getName()); + recordError(node, error); + trackNodeError(node, error.getCause()); + sendRequest(null); + } + } else { + LOG.trace("[{}] Request sent on {}", logPrefix, channel); + timeout = scheduleTimeout(1); + } + } + + /** + * Invoked when a continuous paging response is received, either a successful or failed one. + * + *

      Delegates further processing to appropriate methods: {@link #processResultResponse(Result, + * Frame)} if the response was successful, or {@link #processErrorResponse(Error)} if it wasn't. + * + * @param response the received {@link Frame}. + */ + @Override + public void onResponse(@NonNull Frame response) { + stopNodeMessageTimer(); + cancelTimeout(); + lock.lock(); + try { + if (state < 0) { + LOG.trace("[{}] Got result but the request has been cancelled, ignoring", logPrefix); + return; + } + try { + Message responseMessage = response.message; + if (responseMessage instanceof Result) { + LOG.trace("[{}] Got result", logPrefix); + processResultResponse((Result) responseMessage, response); + } else if (responseMessage instanceof Error) { + LOG.trace("[{}] Got error response", logPrefix); + processErrorResponse((Error) responseMessage); + } else { + IllegalStateException error = + new IllegalStateException("Unexpected response " + responseMessage); + trackNodeError(node, error); + abort(error, false); + } + } catch (Throwable t) { + trackNodeError(node, t); + abort(t, false); + } + } finally { + lock.unlock(); + } + } + + /** + * Invoked when a continuous paging request hits an unexpected error. + * + *

      Delegates further processing to to the retry policy ({@link + * #processRetryDecision(RetryDecision, Throwable)}. + * + * @param error the error encountered, usually a network problem. + */ + @Override + public void onFailure(@NonNull Throwable error) { + cancelTimeout(); + LOG.trace(String.format("[%s] Request failure", logPrefix), error); + RetryDecision decision; + if (!isIdempotent || error instanceof FrameTooLongException) { + decision = RetryDecision.RETHROW; + } else { + decision = retryPolicy.onRequestAborted(statement, error, retryCount); + } + updateErrorMetrics( + ((DefaultNode) node).getMetricUpdater(), + decision, + DefaultNodeMetric.ABORTED_REQUESTS, + DefaultNodeMetric.RETRIES_ON_ABORTED, + DefaultNodeMetric.IGNORES_ON_ABORTED); + lock.lock(); + try { + processRetryDecision(decision, error); + } finally { + lock.unlock(); + } + } + + @Override + public void onThrottleFailure(@NonNull RequestThrottlingException error) { + session + .getMetricUpdater() + .incrementCounter(DefaultSessionMetric.THROTTLING_ERRORS, executionProfile.getName()); + lock.lock(); + try { + abort(error, false); + } finally { + lock.unlock(); + } + } + + // PROCESSING METHODS + + /** + * Processes a new result response, creating the corresponding {@link ResultSetT} object and then + * enqueuing it or serving it directly to the user if he was waiting for it. + * + * @param result the result to process. It is normally a {@link Rows} object, but may be a {@link + * Void} object if the retry policy decided to ignore an error. + * @param frame the {@link Frame} (used to create the {@link ExecutionInfo} the first time). + */ + @SuppressWarnings("GuardedBy") // this method is only called with the lock held + private void processResultResponse(@NonNull Result result, @Nullable Frame frame) { + assert lock.isHeldByCurrentThread(); + try { + ExecutionInfoT executionInfo = createExecutionInfo(result, frame); + if (result instanceof Rows) { + DseRowsMetadata rowsMetadata = (DseRowsMetadata) ((Rows) result).getMetadata(); + if (columnDefinitions == null) { + // Contrary to ROWS responses from regular queries, + // the first page always includes metadata so we use this + // regardless of whether or not the query was from a prepared statement. + columnDefinitions = Conversions.toColumnDefinitions(rowsMetadata, context); + } + int pageNumber = rowsMetadata.continuousPageNumber; + int currentPage = state; + if (pageNumber != currentPage) { + abort( + new IllegalStateException( + String.format("Received page %d but was expecting %d", pageNumber, currentPage)), + false); + } else { + int pageSize = ((Rows) result).getData().size(); + ResultSetT resultSet = createResultSet((Rows) result, executionInfo); + if (rowsMetadata.isLastContinuousPage) { + LOG.trace("[{}] Received last page ({} - {} rows)", logPrefix, pageNumber, pageSize); + state = STATE_FINISHED; + reenableAutoReadIfNeeded(); + enqueueOrCompletePending(resultSet); + stopGlobalRequestTimer(); + } else { + LOG.trace("[{}] Received page {} ({} rows)", logPrefix, pageNumber, pageSize); + if (currentPage > 0) { + state = currentPage + 1; + } + enqueueOrCompletePending(resultSet); + } + } + } else { + // Void responses happen only when the retry decision is ignore. + assert result instanceof Void; + ResultSetT resultSet = createEmptyResultSet(executionInfo); + LOG.trace( + "[{}] Continuous paging interrupted by retry policy decision to ignore error", + logPrefix); + state = STATE_FINISHED; + reenableAutoReadIfNeeded(); + enqueueOrCompletePending(resultSet); + stopGlobalRequestTimer(); + } + } catch (Throwable error) { + abort(error, false); + } + } + + /** + * Processes an unsuccessful response. + * + *

      Depending on the error, may trigger: + * + *

        + *
      1. a re-prepare cycle, see {@link #processUnprepared(Unprepared)}; + *
      2. an immediate retry on the next host, bypassing the retry policy, if the host was + * bootstrapping; + *
      3. an immediate abortion if the error is unrecoverable; + *
      4. further processing if the error is recoverable, see {@link + * #processRecoverableError(CoordinatorException)} + *
      + * + * @param errorMessage the error message received. + */ + @SuppressWarnings("GuardedBy") // this method is only called with the lock held + private void processErrorResponse(@NonNull Error errorMessage) { + assert lock.isHeldByCurrentThread(); + if (errorMessage instanceof Unprepared) { + processUnprepared((Unprepared) errorMessage); + } else { + CoordinatorException error = DseConversions.toThrowable(node, errorMessage, context); + if (error instanceof BootstrappingException) { + LOG.trace("[{}] {} is bootstrapping, trying next node", logPrefix, node); + recordError(node, error); + trackNodeError(node, error); + sendRequest(null); + } else if (error instanceof QueryValidationException + || error instanceof FunctionFailureException + || error instanceof ProtocolError + || state > 1) { + // we only process recoverable errors for the first page, + // errors on subsequent pages will always trigger an immediate abortion + LOG.trace("[{}] Unrecoverable error, rethrowing", logPrefix); + NodeMetricUpdater metricUpdater = ((DefaultNode) node).getMetricUpdater(); + metricUpdater.incrementCounter(DefaultNodeMetric.OTHER_ERRORS, executionProfile.getName()); + trackNodeError(node, error); + abort(error, true); + } else { + processRecoverableError(error); + } + } + } + + /** + * Processes a recoverable error. + * + *

      In most cases, delegates to the retry policy and its decision, see {@link + * #processRetryDecision(RetryDecision, Throwable)}. + * + * @param error the recoverable error. + */ + private void processRecoverableError(@NonNull CoordinatorException error) { + assert lock.isHeldByCurrentThread(); + NodeMetricUpdater metricUpdater = ((DefaultNode) node).getMetricUpdater(); + RetryDecision decision; + if (error instanceof ReadTimeoutException) { + ReadTimeoutException readTimeout = (ReadTimeoutException) error; + decision = + retryPolicy.onReadTimeout( + statement, + readTimeout.getConsistencyLevel(), + readTimeout.getBlockFor(), + readTimeout.getReceived(), + readTimeout.wasDataPresent(), + retryCount); + updateErrorMetrics( + metricUpdater, + decision, + DefaultNodeMetric.READ_TIMEOUTS, + DefaultNodeMetric.RETRIES_ON_READ_TIMEOUT, + DefaultNodeMetric.IGNORES_ON_READ_TIMEOUT); + } else if (error instanceof WriteTimeoutException) { + WriteTimeoutException writeTimeout = (WriteTimeoutException) error; + if (isIdempotent) { + decision = + retryPolicy.onWriteTimeout( + statement, + writeTimeout.getConsistencyLevel(), + writeTimeout.getWriteType(), + writeTimeout.getBlockFor(), + writeTimeout.getReceived(), + retryCount); + } else { + decision = RetryDecision.RETHROW; + } + updateErrorMetrics( + metricUpdater, + decision, + DefaultNodeMetric.WRITE_TIMEOUTS, + DefaultNodeMetric.RETRIES_ON_WRITE_TIMEOUT, + DefaultNodeMetric.IGNORES_ON_WRITE_TIMEOUT); + } else if (error instanceof UnavailableException) { + UnavailableException unavailable = (UnavailableException) error; + decision = + retryPolicy.onUnavailable( + statement, + unavailable.getConsistencyLevel(), + unavailable.getRequired(), + unavailable.getAlive(), + retryCount); + updateErrorMetrics( + metricUpdater, + decision, + DefaultNodeMetric.UNAVAILABLES, + DefaultNodeMetric.RETRIES_ON_UNAVAILABLE, + DefaultNodeMetric.IGNORES_ON_UNAVAILABLE); + } else { + decision = + isIdempotent + ? retryPolicy.onErrorResponse(statement, error, retryCount) + : RetryDecision.RETHROW; + updateErrorMetrics( + metricUpdater, + decision, + DefaultNodeMetric.OTHER_ERRORS, + DefaultNodeMetric.RETRIES_ON_OTHER_ERROR, + DefaultNodeMetric.IGNORES_ON_OTHER_ERROR); + } + processRetryDecision(decision, error); + } + + /** + * Processes an {@link Unprepared} error by re-preparing then retrying on the same host. + * + * @param errorMessage the unprepared error message. + */ + @SuppressWarnings("GuardedBy") // this method is only called with the lock held + private void processUnprepared(@NonNull Unprepared errorMessage) { + assert lock.isHeldByCurrentThread(); + ByteBuffer idToReprepare = ByteBuffer.wrap(errorMessage.id); + LOG.trace( + "[{}] Statement {} is not prepared on {}, re-preparing", + logPrefix, + Bytes.toHexString(idToReprepare), + node); + RepreparePayload repreparePayload = session.getRepreparePayloads().get(idToReprepare); + if (repreparePayload == null) { + throw new IllegalStateException( + String.format( + "Tried to execute unprepared query %s but we don't have the data to re-prepare it", + Bytes.toHexString(idToReprepare))); + } + Prepare prepare = repreparePayload.toMessage(); + Duration timeout = executionProfile.getDuration(DefaultDriverOption.REQUEST_TIMEOUT); + ThrottledAdminRequestHandler.prepare( + channel, + true, + prepare, + repreparePayload.customPayload, + timeout, + throttler, + sessionMetricUpdater, + logPrefix) + .start() + .whenComplete( + (repreparedId, exception) -> { + // If we run into an unrecoverable error, surface it to the client instead of retrying + Throwable fatalError = null; + if (exception == null) { + if (!repreparedId.equals(idToReprepare)) { + IllegalStateException illegalStateException = + new IllegalStateException( + String.format( + "ID mismatch while trying to reprepare (expected %s, got %s). " + + "This prepared statement won't work anymore. " + + "This usually happens when you run a 'USE...' query after " + + "the statement was prepared.", + Bytes.toHexString(idToReprepare), Bytes.toHexString(repreparedId))); + trackNodeError(node, illegalStateException); + fatalError = illegalStateException; + } else { + LOG.trace( + "[{}] Re-prepare successful, retrying on the same node ({})", + logPrefix, + node); + sendRequest(node); + } + } else { + if (exception instanceof UnexpectedResponseException) { + Message prepareErrorMessage = ((UnexpectedResponseException) exception).message; + if (prepareErrorMessage instanceof Error) { + CoordinatorException prepareError = + DseConversions.toThrowable(node, (Error) prepareErrorMessage, context); + if (prepareError instanceof QueryValidationException + || prepareError instanceof FunctionFailureException + || prepareError instanceof ProtocolError) { + LOG.trace("[{}] Unrecoverable error on re-prepare, rethrowing", logPrefix); + trackNodeError(node, prepareError); + fatalError = prepareError; + } + } + } else if (exception instanceof RequestThrottlingException) { + trackNodeError(node, exception); + fatalError = exception; + } + if (fatalError == null) { + LOG.trace("[{}] Re-prepare failed, trying next node", logPrefix); + recordError(node, exception); + trackNodeError(node, exception); + sendRequest(null); + } + } + if (fatalError != null) { + lock.lock(); + try { + abort(fatalError, true); + } finally { + lock.unlock(); + } + } + }); + } + + /** + * Processes the retry decision by triggering a retry, aborting or ignoring; also records the + * failures for further access. + * + * @param decision the decision to process. + * @param error the original error. + */ + @SuppressWarnings({"NonAtomicOperationOnVolatileField", "NonAtomicVolatileUpdate"}) + private void processRetryDecision(@NonNull RetryDecision decision, @NonNull Throwable error) { + assert lock.isHeldByCurrentThread(); + LOG.trace("[{}] Processing retry decision {}", logPrefix, decision); + switch (decision) { + case RETRY_SAME: + recordError(node, error); + trackNodeError(node, error); + retryCount++; + sendRequest(node); + break; + case RETRY_NEXT: + recordError(node, error); + trackNodeError(node, error); + retryCount++; + sendRequest(null); + break; + case RETHROW: + trackNodeError(node, error); + abort(error, true); + break; + case IGNORE: + processResultResponse(Void.INSTANCE, null); + break; + } + } + + // PAGE HANDLING + + /** + * Enqueues a response or, if the client was already waiting for it, completes the pending future. + * + *

      Guarded by {@link #lock}. + * + * @param pageOrError the next page, or an error. + */ + @SuppressWarnings("GuardedBy") // this method is only called with the lock held + private void enqueueOrCompletePending(@NonNull Object pageOrError) { + assert lock.isHeldByCurrentThread(); + if (pendingResult != null) { + if (LOG.isTraceEnabled()) { + LOG.trace( + "[{}] Client was waiting on empty queue, completing with {}", + logPrefix, + asTraceString(pageOrError)); + } + CompletableFuture tmp = pendingResult; + // null out pendingResult before completing it because its completion + // may trigger a call to fetchNextPage -> dequeueOrCreatePending, + // which expects pendingResult to be null. + pendingResult = null; + completeResultSetFuture(tmp, pageOrError); + } else { + if (LOG.isTraceEnabled()) { + LOG.trace("[{}] Enqueuing {}", logPrefix, asTraceString(pageOrError)); + } + queue.add(pageOrError); + // Backpressure without protocol support: if the queue grows too large, + // disable auto-read so that the channel eventually becomes + // non-writable on the server side (causing it to back off for a while) + if (!protocolBackpressureAvailable && queue.size() == maxEnqueuedPages && state > 0) { + LOG.trace( + "[{}] Exceeded {} queued response pages, disabling auto-read", logPrefix, queue.size()); + channel.config().setAutoRead(false); + } + } + } + + /** + * Dequeue a response or, if the queue is empty, create the future that will get notified of the + * next response, when it arrives. + * + *

      Called from user code, see {@link ContinuousAsyncResultSet#fetchNextPage()}. + * + * @return the next page's future; never null. + */ + @NonNull + public CompletableFuture dequeueOrCreatePending() { + lock.lock(); + try { + // If the client was already waiting for a page, there's no way it can call this method again + // (this is guaranteed by our public API because in order to ask for the next page, + // you need the reference to the previous page). + assert pendingResult == null; + + Object head = queue.poll(); + if (!protocolBackpressureAvailable && head != null && queue.size() == maxEnqueuedPages - 1) { + LOG.trace( + "[{}] Back to {} queued response pages, re-enabling auto-read", + logPrefix, + queue.size()); + channel.config().setAutoRead(true); + } + maybeRequestMore(); + if (head != null) { + if (state == STATE_FAILED && !(head instanceof Throwable)) { + LOG.trace( + "[{}] Client requested next page on cancelled queue, discarding page and returning cancelled future", + logPrefix); + return cancelledResultSetFuture(); + } else { + if (LOG.isTraceEnabled()) { + LOG.trace( + "[{}] Client requested next page on non-empty queue, returning immediate future of {}", + logPrefix, + asTraceString(head)); + } + return immediateResultSetFuture(head); + } + } else { + if (state == STATE_FAILED) { + LOG.trace( + "[{}] Client requested next page on cancelled empty queue, returning cancelled future", + logPrefix); + return cancelledResultSetFuture(); + } else { + LOG.trace( + "[{}] Client requested next page but queue is empty, installing future", logPrefix); + pendingResult = createResultSetFuture(); + // Only schedule a timeout if we're past the first page (the first page's timeout is + // handled in sendRequest). + if (state > 1) { + timeout = scheduleTimeout(state); + // Note: each new timeout is cancelled when the next response arrives, see + // onResponse(Frame). + } + return pendingResult; + } + } + } finally { + lock.unlock(); + } + } + + /** + * If the total number of results in the queue and in-flight (requested - received) is less than + * half the queue size, then request more pages, unless the {@link #state} is failed, we're still + * waiting for the first page (so maybe still throttled or in the middle of a retry), or we don't + * support backpressure at the protocol level. + */ + @SuppressWarnings("GuardedBy") + private void maybeRequestMore() { + assert lock.isHeldByCurrentThread(); + if (state < 2 || streamId == -1 || !protocolBackpressureAvailable) { + return; + } + // if we have already requested more than the client needs, then no need to request some more + if (maxPages > 0 && numPagesRequested >= maxPages) { + return; + } + // the pages received so far, which is the state minus one + int received = state - 1; + int requested = numPagesRequested; + // the pages that fit in the queue, which is the queue free space minus the requests in flight + int freeSpace = maxEnqueuedPages - queue.size(); + int inFlight = requested - received; + int numPagesFittingInQueue = freeSpace - inFlight; + if (numPagesFittingInQueue >= maxEnqueuedPages / 2) { + LOG.trace("[{}] Requesting more {} pages", logPrefix, numPagesFittingInQueue); + numPagesRequested = requested + numPagesFittingInQueue; + sendMorePagesRequest(numPagesFittingInQueue); + } + } + + /** + * Sends a request for more pages (a.k.a. backpressure request). + * + * @param nextPages the number of extra pages to request. + */ + @SuppressWarnings("GuardedBy") + private void sendMorePagesRequest(int nextPages) { + assert lock.isHeldByCurrentThread(); + assert channel != null : "expected valid connection in order to request more pages"; + assert protocolBackpressureAvailable; + assert streamId != -1; + + LOG.trace("[{}] Sending request for more pages", logPrefix); + ThrottledAdminRequestHandler.query( + channel, + true, + Revise.requestMoreContinuousPages(streamId, nextPages), + statement.getCustomPayload(), + timeoutOtherPages, + throttler, + session.getMetricUpdater(), + logPrefix, + "request " + nextPages + " more pages for id " + streamId) + .start() + .handle( + (result, error) -> { + if (error != null) { + Loggers.warnWithException( + LOG, "[{}] Error requesting more pages, aborting.", logPrefix, error); + lock.lock(); + try { + // Set fromServer to false because we want the callback to still cancel the + // session if possible or else the server will wait on a timeout. + abort(error, false); + } finally { + lock.unlock(); + } + } + return null; + }); + } + + // TIMEOUT HANDLING + + private Timeout scheduleTimeout(int expectedPage) { + if (expectedPage < 0) { + return null; + } + Duration timeout = expectedPage == 1 ? timeoutFirstPage : timeoutOtherPages; + if (timeout.toNanos() <= 0) { + return null; + } + LOG.trace("[{}] Scheduling timeout for page {} in {}", logPrefix, expectedPage, timeout); + return timer.newTimeout( + timeout1 -> { + lock.lock(); + try { + if (state == expectedPage) { + abort( + new DriverTimeoutException( + String.format("Timed out waiting for page %d", expectedPage)), + false); + } else { + // Ignore timeout if the request has moved on in the interim. + LOG.trace( + "[{}] Timeout fired for page {} but query already at state {}, skipping", + logPrefix, + expectedPage, + state); + } + } finally { + lock.unlock(); + } + }, + timeout.toNanos(), + TimeUnit.NANOSECONDS); + } + + /** Cancels the current timeout, if non null. */ + private void cancelTimeout() { + Timeout timeout = this.timeout; + if (timeout != null) { + LOG.trace("[{}] Cancelling timeout", logPrefix); + timeout.cancel(); + } + } + + // CANCELLATION + + /** + * Cancels the continuous paging request. + * + *

      Called from user code, see {@link DefaultContinuousAsyncResultSet#cancel()}, or from a + * driver I/O thread. + */ + public void cancel() { + lock.lock(); + try { + if (state < 0) { + return; + } else { + LOG.trace( + "[{}] Cancelling continuous paging session with state {} on node {}", + logPrefix, + state, + node); + state = STATE_FAILED; + if (pendingResult != null) { + pendingResult.cancel(true); + } + // the rest can be done without holding the lock, see below + } + } finally { + lock.unlock(); + } + if (channel != null) { + if (!channel.closeFuture().isDone()) { + this.channel.cancel(this); + } + sendCancelRequest(); + } + reenableAutoReadIfNeeded(); + } + + private void sendCancelRequest() { + LOG.trace("[{}] Sending cancel request", logPrefix); + ThrottledAdminRequestHandler.query( + channel, + true, + Revise.cancelContinuousPaging(streamId), + statement.getCustomPayload(), + timeoutOtherPages, + throttler, + session.getMetricUpdater(), + logPrefix, + "cancel request") + .start() + .handle( + (result, error) -> { + if (error != null) { + Loggers.warnWithException( + LOG, + "[{}] Error sending cancel request. " + + "This is not critical (the request will eventually time out server-side).", + logPrefix, + error); + } else { + LOG.trace("[{}] Continuous paging session cancelled successfully", logPrefix); + } + return null; + }); + } + + // TERMINATION + + private void reenableAutoReadIfNeeded() { + // Make sure we don't leave the channel unreadable + LOG.trace("[{}] Re-enabling auto-read", logPrefix); + if (!protocolBackpressureAvailable) { + channel.config().setAutoRead(true); + } + } + + // ERROR HANDLING + + private void recordError(@NonNull Node node, @NonNull Throwable error) { + errors.add(new AbstractMap.SimpleEntry<>(node, error)); + } + + private void trackNodeError(@NonNull Node node, @NonNull Throwable error) { + long latencyNanos = System.nanoTime() - this.messageStartTimeNanos; + context + .getRequestTracker() + .onNodeError(statement, error, latencyNanos, executionProfile, node, logPrefix); + } + + /** + * Aborts the continuous paging session due to an error that can be either from the server or the + * client. + * + * @param error the error that causes the abortion. + * @param fromServer whether the error was triggered by the coordinator or by the driver. + */ + @SuppressWarnings("GuardedBy") // this method is only called with the lock held + private void abort(@NonNull Throwable error, boolean fromServer) { + assert lock.isHeldByCurrentThread(); + LOG.trace( + "[{}] Aborting due to {} ({})", + logPrefix, + error.getClass().getSimpleName(), + error.getMessage()); + if (channel == null) { + // This only happens when sending the initial request, if no host was available + // or if the iterator returned by the LBP threw an exception. + // In either case the write was not even attempted, and + // we set the state right now. + enqueueOrCompletePending(error); + state = STATE_FAILED; + } else if (state > 0) { + enqueueOrCompletePending(error); + if (fromServer) { + // We can safely assume the server won't send any more responses, + // so set the state and call release() right now. + state = STATE_FAILED; + reenableAutoReadIfNeeded(); + } else { + // attempt to cancel first, i.e. ask server to stop sending responses, + // and only then release. + cancel(); + } + } + stopGlobalRequestTimer(); + } + + // METRICS + + private void stopNodeMessageTimer() { + ((DefaultNode) node) + .getMetricUpdater() + .updateTimer( + DefaultNodeMetric.CQL_MESSAGES, + executionProfile.getName(), + System.nanoTime() - messageStartTimeNanos, + TimeUnit.NANOSECONDS); + } + + private void stopGlobalRequestTimer() { + session + .getMetricUpdater() + .updateTimer( + DseSessionMetric.CONTINUOUS_CQL_REQUESTS, + executionProfile.getName(), + System.nanoTime() - startTimeNanos, + TimeUnit.NANOSECONDS); + } + + private void updateErrorMetrics( + @NonNull NodeMetricUpdater metricUpdater, + @NonNull RetryDecision decision, + @NonNull DefaultNodeMetric error, + @NonNull DefaultNodeMetric retriesOnError, + @NonNull DefaultNodeMetric ignoresOnError) { + metricUpdater.incrementCounter(error, executionProfile.getName()); + switch (decision) { + case RETRY_SAME: + case RETRY_NEXT: + metricUpdater.incrementCounter(DefaultNodeMetric.RETRIES, executionProfile.getName()); + metricUpdater.incrementCounter(retriesOnError, executionProfile.getName()); + break; + case IGNORE: + metricUpdater.incrementCounter(DefaultNodeMetric.IGNORES, executionProfile.getName()); + metricUpdater.incrementCounter(ignoresOnError, executionProfile.getName()); + break; + case RETHROW: + // nothing do do + } + } + + // UTILITY METHODS + + @NonNull + private CompletableFuture createResultSetFuture() { + CompletableFuture future = new CompletableFuture<>(); + future.whenComplete( + (rs, t) -> { + if (t instanceof CancellationException) { + // if the future has been canceled by the user, propagate the cancellation + cancel(); + } + }); + return future; + } + + @NonNull + private CompletableFuture immediateResultSetFuture(@NonNull Object pageOrError) { + CompletableFuture future = createResultSetFuture(); + completeResultSetFuture(future, pageOrError); + return future; + } + + @NonNull + private CompletableFuture cancelledResultSetFuture() { + return immediateResultSetFuture( + new CancellationException( + "Can't get more results because the continuous query has failed already. " + + "Most likely this is because the query was cancelled")); + } + + private void completeResultSetFuture( + @NonNull CompletableFuture future, @NonNull Object pageOrError) { + long now = System.nanoTime(); + long totalLatencyNanos = now - startTimeNanos; + long nodeLatencyNanos = now - messageStartTimeNanos; + if (resultSetClass.isInstance(pageOrError)) { + if (future.complete(resultSetClass.cast(pageOrError))) { + throttler.signalSuccess(this); + context + .getRequestTracker() + .onNodeSuccess(statement, nodeLatencyNanos, executionProfile, node, logPrefix); + context + .getRequestTracker() + .onSuccess(statement, totalLatencyNanos, executionProfile, node, logPrefix); + } + } else { + Throwable error = (Throwable) pageOrError; + if (future.completeExceptionally(error)) { + context + .getRequestTracker() + .onError(statement, error, totalLatencyNanos, executionProfile, node, logPrefix); + if (error instanceof DriverTimeoutException) { + throttler.signalTimeout(this); + session + .getMetricUpdater() + .incrementCounter( + DefaultSessionMetric.CQL_CLIENT_TIMEOUTS, executionProfile.getName()); + } else if (!(error instanceof RequestThrottlingException)) { + throttler.signalError(this, error); + } + } + } + } + + @NonNull + private String asTraceString(@NonNull Object pageOrError) { + return resultSetClass.isInstance(pageOrError) + ? "page " + pageNumber(resultSetClass.cast(pageOrError)) + : ((Exception) pageOrError).getClass().getSimpleName(); + } + + @VisibleForTesting + public int getState() { + lock.lock(); + try { + return state; + } finally { + lock.unlock(); + } + } + + @VisibleForTesting + public CompletableFuture getPendingResult() { + lock.lock(); + try { + return pendingResult; + } finally { + lock.unlock(); + } + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestAsyncProcessor.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestAsyncProcessor.java index eea0b331e73..a1edf1da2e2 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestAsyncProcessor.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestAsyncProcessor.java @@ -44,7 +44,8 @@ public CompletionStage process( DefaultSession session, InternalDriverContext context, String sessionLogPrefix) { - return new ContinuousCqlRequestHandler(request, session, context, sessionLogPrefix).handle(); + return new ContinuousCqlRequestHandler(request, session, context, sessionLogPrefix) + .dequeueOrCreatePending(); } @Override diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandler.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandler.java index 34e3b7f4666..ddedcbd0227 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandler.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandler.java @@ -15,1138 +15,80 @@ */ package com.datastax.dse.driver.internal.core.cql.continuous; -import com.datastax.dse.driver.api.core.DseProtocolVersion; -import com.datastax.dse.driver.api.core.config.DseDriverOption; import com.datastax.dse.driver.api.core.cql.continuous.ContinuousAsyncResultSet; -import com.datastax.dse.driver.api.core.metrics.DseSessionMetric; -import com.datastax.dse.driver.internal.core.DseProtocolFeature; +import com.datastax.dse.driver.internal.core.ContinuousRequestHandlerBase; import com.datastax.dse.driver.internal.core.cql.DseConversions; -import com.datastax.dse.protocol.internal.request.Revise; import com.datastax.dse.protocol.internal.response.result.DseRowsMetadata; -import com.datastax.oss.driver.api.core.AllNodesFailedException; -import com.datastax.oss.driver.api.core.DriverTimeoutException; -import com.datastax.oss.driver.api.core.ProtocolVersion; -import com.datastax.oss.driver.api.core.RequestThrottlingException; -import com.datastax.oss.driver.api.core.config.DefaultDriverOption; -import com.datastax.oss.driver.api.core.config.DriverConfig; -import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; -import com.datastax.oss.driver.api.core.connection.FrameTooLongException; -import com.datastax.oss.driver.api.core.cql.ColumnDefinitions; import com.datastax.oss.driver.api.core.cql.ExecutionInfo; import com.datastax.oss.driver.api.core.cql.Row; import com.datastax.oss.driver.api.core.cql.Statement; -import com.datastax.oss.driver.api.core.metadata.Node; -import com.datastax.oss.driver.api.core.metrics.DefaultNodeMetric; -import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; -import com.datastax.oss.driver.api.core.retry.RetryDecision; -import com.datastax.oss.driver.api.core.retry.RetryPolicy; -import com.datastax.oss.driver.api.core.servererrors.BootstrappingException; -import com.datastax.oss.driver.api.core.servererrors.CoordinatorException; -import com.datastax.oss.driver.api.core.servererrors.FunctionFailureException; -import com.datastax.oss.driver.api.core.servererrors.ProtocolError; -import com.datastax.oss.driver.api.core.servererrors.QueryValidationException; -import com.datastax.oss.driver.api.core.servererrors.ReadTimeoutException; -import com.datastax.oss.driver.api.core.servererrors.UnavailableException; -import com.datastax.oss.driver.api.core.servererrors.WriteTimeoutException; -import com.datastax.oss.driver.api.core.session.throttling.RequestThrottler; import com.datastax.oss.driver.api.core.session.throttling.Throttled; -import com.datastax.oss.driver.internal.core.adminrequest.ThrottledAdminRequestHandler; -import com.datastax.oss.driver.internal.core.adminrequest.UnexpectedResponseException; -import com.datastax.oss.driver.internal.core.channel.DriverChannel; import com.datastax.oss.driver.internal.core.channel.ResponseCallback; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; -import com.datastax.oss.driver.internal.core.cql.Conversions; import com.datastax.oss.driver.internal.core.cql.DefaultExecutionInfo; import com.datastax.oss.driver.internal.core.cql.DefaultRow; -import com.datastax.oss.driver.internal.core.metadata.DefaultNode; -import com.datastax.oss.driver.internal.core.metrics.NodeMetricUpdater; -import com.datastax.oss.driver.internal.core.metrics.SessionMetricUpdater; import com.datastax.oss.driver.internal.core.session.DefaultSession; -import com.datastax.oss.driver.internal.core.session.RepreparePayload; import com.datastax.oss.driver.internal.core.util.CountingIterator; -import com.datastax.oss.driver.internal.core.util.Loggers; -import com.datastax.oss.driver.internal.core.util.collection.QueryPlan; -import com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting; import com.datastax.oss.protocol.internal.Frame; import com.datastax.oss.protocol.internal.Message; -import com.datastax.oss.protocol.internal.request.Prepare; -import com.datastax.oss.protocol.internal.response.Error; import com.datastax.oss.protocol.internal.response.Result; -import com.datastax.oss.protocol.internal.response.error.Unprepared; import com.datastax.oss.protocol.internal.response.result.Rows; -import com.datastax.oss.protocol.internal.response.result.Void; -import com.datastax.oss.protocol.internal.util.Bytes; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; -import io.netty.handler.codec.EncoderException; -import io.netty.util.Timeout; -import io.netty.util.Timer; import io.netty.util.concurrent.Future; import io.netty.util.concurrent.GenericFutureListener; import java.nio.ByteBuffer; -import java.time.Duration; -import java.util.AbstractMap; -import java.util.ArrayDeque; import java.util.List; import java.util.Map; import java.util.Queue; -import java.util.concurrent.CancellationException; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.CompletionStage; -import java.util.concurrent.CopyOnWriteArrayList; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.locks.ReentrantLock; -import net.jcip.annotations.GuardedBy; import net.jcip.annotations.ThreadSafe; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; /** * Handles a request that supports multiple response messages (a.k.a. continuous paging request). */ @ThreadSafe public class ContinuousCqlRequestHandler + extends ContinuousRequestHandlerBase implements ResponseCallback, GenericFutureListener>, Throttled { - private static final Logger LOG = LoggerFactory.getLogger(ContinuousCqlRequestHandler.class); - - private final String logPrefix; - private final Statement statement; - private final DefaultSession session; - private final InternalDriverContext context; - private final DriverExecutionProfile executionProfile; - private final Queue queryPlan; - private final RetryPolicy retryPolicy; - private final RequestThrottler throttler; - private final int maxEnqueuedPages; - private final int maxPages; - private final boolean protocolBackpressureAvailable; - private final boolean isIdempotent; private final Message message; - private final Duration timeoutFirstPage; - private final Duration timeoutOtherPages; - private final Timer timer; - private final SessionMetricUpdater sessionMetricUpdater; - - // The errors on the nodes that were already tried. - // We don't use a map because nodes can appear multiple times. - private final List> errors = new CopyOnWriteArrayList<>(); - - // Coordinates concurrent accesses between the client and I/O threads - private final ReentrantLock lock = new ReentrantLock(); - - // The page queue, storing responses that we have received and have not been consumed by the - // client yet. - @GuardedBy("lock") - private final Queue queue; - - // If the client requests a page and we can't serve it immediately (empty queue), then we create - // this future and have the client wait on it. Otherwise this field is null. - @GuardedBy("lock") - @VisibleForTesting - CompletableFuture pendingResult; - - // How many pages were requested. This is the total number of pages requested from the beginning. - // It will be zero if the protocol does not support numPagesRequested (DSE_V1) - @GuardedBy("lock") - private int numPagesRequested; - - // An integer that represents the state of the continuous paging request: - // - if positive, it is the sequence number of the next expected page; - // - if negative, it is a terminal state, identified by the constants below. - @GuardedBy("lock") - @VisibleForTesting - int state = 1; - private static final int STATE_FINISHED = -1; - private static final int STATE_FAILED = -2; - - // Set when the execution starts, and is never modified after. - private volatile long startTimeNanos; - - // These are set when the first page arrives, and are never modified after. - private volatile ColumnDefinitions columnDefinitions; - - // These change over time as different nodes are tried; - // they can only be null before the first request is sent. - private volatile Node node; - private volatile DriverChannel channel; - private volatile int streamId; - // Set each time a new request/response cycle starts. - private volatile long messageStartTimeNanos; - private volatile Timeout timeout; - - // How many times we've invoked the retry policy and it has returned a "retry" decision (0 for - // the first attempt, 1 for the first retry, etc.). - private volatile int retryCount; - - public ContinuousCqlRequestHandler( + ContinuousCqlRequestHandler( @NonNull Statement statement, @NonNull DefaultSession session, @NonNull InternalDriverContext context, @NonNull String sessionLogPrefix) { - ProtocolVersion protocolVersion = context.getProtocolVersion(); - if (!context - .getProtocolVersionRegistry() - .supports(protocolVersion, DseProtocolFeature.CONTINUOUS_PAGING)) { - throw new IllegalStateException( - "Cannot execute continuous paging requests with protocol version " + protocolVersion); - } - this.logPrefix = sessionLogPrefix + "|" + this.hashCode(); - LOG.trace("[{}] Creating new continuous handler for request {}", logPrefix, statement); - this.statement = statement; - this.session = session; - this.context = context; - if (statement.getExecutionProfile() != null) { - this.executionProfile = statement.getExecutionProfile(); - } else { - DriverConfig config = context.getConfig(); - String profileName = statement.getExecutionProfileName(); - this.executionProfile = - (profileName == null || profileName.isEmpty()) - ? config.getDefaultProfile() - : config.getProfile(profileName); - } - this.queryPlan = - statement.getNode() != null - ? new QueryPlan(statement.getNode()) - : context - .getLoadBalancingPolicyWrapper() - .newQueryPlan(statement, executionProfile.getName(), session); - this.retryPolicy = context.getRetryPolicy(executionProfile.getName()); - Boolean idempotent = statement.isIdempotent(); - this.isIdempotent = - (idempotent == null) - ? executionProfile.getBoolean(DefaultDriverOption.REQUEST_DEFAULT_IDEMPOTENCE) - : idempotent; - this.timeoutFirstPage = - executionProfile.getDuration(DseDriverOption.CONTINUOUS_PAGING_TIMEOUT_FIRST_PAGE); - this.timeoutOtherPages = - executionProfile.getDuration(DseDriverOption.CONTINUOUS_PAGING_TIMEOUT_OTHER_PAGES); - this.timer = context.getNettyOptions().getTimer(); - this.maxEnqueuedPages = - executionProfile.getInt(DseDriverOption.CONTINUOUS_PAGING_MAX_ENQUEUED_PAGES); - this.queue = new ArrayDeque<>(maxEnqueuedPages); - this.maxPages = executionProfile.getInt(DseDriverOption.CONTINUOUS_PAGING_MAX_PAGES); - this.protocolBackpressureAvailable = - protocolVersion.getCode() >= DseProtocolVersion.DSE_V2.getCode(); - this.numPagesRequested = protocolBackpressureAvailable ? maxEnqueuedPages : 0; + super(statement, session, context, sessionLogPrefix, ContinuousAsyncResultSet.class); this.message = DseConversions.toContinuousPagingMessage(statement, executionProfile, context); - this.throttler = context.getRequestThrottler(); - this.throttler.register(this); - this.sessionMetricUpdater = session.getMetricUpdater(); - this.startTimeNanos = System.nanoTime(); - } - - // MAIN LIFECYCLE - - @Override - public void onStreamIdAssigned(int streamId) { - LOG.trace("[{}] Assigned streamId {} on node {}", logPrefix, streamId, node); - this.streamId = streamId; - } - - @Override - public boolean isLastResponse(@NonNull Frame responseFrame) { - Message message = responseFrame.message; - if (message instanceof Rows) { - Rows rows = (Rows) message; - DseRowsMetadata metadata = (DseRowsMetadata) rows.getMetadata(); - return metadata.isLastContinuousPage; - } else { - return message instanceof Error; - } - } - - @Override - public void onThrottleReady(boolean wasDelayed) { - if (wasDelayed) { - session - .getMetricUpdater() - .updateTimer( - DefaultSessionMetric.THROTTLING_DELAY, - executionProfile.getName(), - System.nanoTime() - startTimeNanos, - TimeUnit.NANOSECONDS); - } - sendRequest(null); - } - - public CompletionStage handle() { - return dequeueOrCreatePending(); - } - - /** - * Sends the initial request to the next available node. - * - * @param node if not null, it will be attempted first before the rest of the query plan. It - * happens only when we retry on the same host. - */ - private void sendRequest(@Nullable Node node) { - channel = null; - if (node == null || (channel = session.getChannel(node, logPrefix)) == null) { - while ((node = queryPlan.poll()) != null) { - channel = session.getChannel(node, logPrefix); - if (channel != null) { - break; - } - } - } - if (channel == null || node == null) { - // We've reached the end of the query plan without finding any node to write to; abort the - // continuous paging session. - lock.lock(); - try { - abort(AllNodesFailedException.fromErrors(errors), false); - } finally { - lock.unlock(); - } - } else { - this.node = node; - streamId = -1; - messageStartTimeNanos = System.nanoTime(); - channel.write(message, false, statement.getCustomPayload(), this).addListener(this); - } - } - - /** - * Invoked when the write from {@link #sendRequest(Node)} completes. - * - * @param future The future representing the outcome of the write operation. - */ - @Override - public void operationComplete(@NonNull Future future) { - if (!future.isSuccess()) { - Throwable error = future.cause(); - if (error instanceof EncoderException && error.getCause() instanceof FrameTooLongException) { - trackNodeError(node, error.getCause()); - lock.lock(); - try { - abort(error.getCause(), false); - } finally { - lock.unlock(); - } - } else { - LOG.trace( - "[{}] Failed to send request on {}, trying next node (cause: {})", - logPrefix, - channel, - error); - ((DefaultNode) node) - .getMetricUpdater() - .incrementCounter(DefaultNodeMetric.UNSENT_REQUESTS, executionProfile.getName()); - recordError(node, error); - trackNodeError(node, error.getCause()); - sendRequest(null); - } - } else { - LOG.trace("[{}] Request sent on {}", logPrefix, channel); - timeout = scheduleTimeout(1); - } + throttler.register(this); } - /** - * Invoked when a continuous paging response is received, either a successful or failed one. - * - *

      Delegates further processing to appropriate methods: {@link #processResultResponse(Result, - * Frame)} if the response was successful, or {@link #processErrorResponse(Error)} if it wasn't. - * - * @param response the received {@link Frame}. - */ + @NonNull @Override - public void onResponse(@NonNull Frame response) { - stopNodeMessageTimer(); - cancelTimeout(); - lock.lock(); - try { - if (state < 0) { - LOG.trace("[{}] Got result but the request has been cancelled, ignoring", logPrefix); - return; - } - try { - Message responseMessage = response.message; - if (responseMessage instanceof Result) { - LOG.trace("[{}] Got result", logPrefix); - processResultResponse((Result) responseMessage, response); - } else if (responseMessage instanceof Error) { - LOG.trace("[{}] Got error response", logPrefix); - processErrorResponse((Error) responseMessage); - } else { - IllegalStateException error = - new IllegalStateException("Unexpected response " + responseMessage); - trackNodeError(node, error); - abort(error, false); - } - } catch (Throwable t) { - trackNodeError(node, t); - abort(t, false); - } - } finally { - lock.unlock(); - } + protected Message getMessage() { + return message; } - /** - * Invoked when a continuous paging request hits an unexpected error. - * - *

      Delegates further processing to to the retry policy ({@link - * #processRetryDecision(RetryDecision, Throwable)}. - * - * @param error the error encountered, usually a network problem. - */ @Override - public void onFailure(@NonNull Throwable error) { - cancelTimeout(); - LOG.trace(String.format("[%s] Request failure", logPrefix), error); - RetryDecision decision; - if (!isIdempotent || error instanceof FrameTooLongException) { - decision = RetryDecision.RETHROW; - } else { - decision = retryPolicy.onRequestAborted(statement, error, retryCount); - } - updateErrorMetrics( - ((DefaultNode) node).getMetricUpdater(), - decision, - DefaultNodeMetric.ABORTED_REQUESTS, - DefaultNodeMetric.RETRIES_ON_ABORTED, - DefaultNodeMetric.IGNORES_ON_ABORTED); - lock.lock(); - try { - processRetryDecision(decision, error); - } finally { - lock.unlock(); - } + protected boolean isTracingEnabled() { + return false; } + @NonNull @Override - public void onThrottleFailure(@NonNull RequestThrottlingException error) { - session - .getMetricUpdater() - .incrementCounter(DefaultSessionMetric.THROTTLING_ERRORS, executionProfile.getName()); - lock.lock(); - try { - abort(error, false); - } finally { - lock.unlock(); - } + protected Map createPayload() { + return statement.getCustomPayload(); } - // PROCESSING METHODS - - /** - * Processes a new result response, creating the corresponding {@link ContinuousAsyncResultSet} - * object and then enqueuing it or serving it directly to the user if he was waiting for it. - * - * @param result the result to process. It is normally a {@link Rows} object, but may be a {@link - * Void} object if the retry policy decided to ignore an error. - * @param frame the {@link Frame} (used to create the {@link ExecutionInfo} the first time). - */ - @SuppressWarnings("GuardedBy") // this method is only called with the lock held - private void processResultResponse(@NonNull Result result, @Nullable Frame frame) { - assert lock.isHeldByCurrentThread(); - try { - ExecutionInfo executionInfo = createExecutionInfo(result, frame); - if (result instanceof Rows) { - DseRowsMetadata rowsMetadata = (DseRowsMetadata) ((Rows) result).getMetadata(); - if (columnDefinitions == null) { - // Contrary to ROWS responses from regular queries, - // the first page always includes metadata so we use this - // regardless of whether or not the query was from a prepared statement. - columnDefinitions = Conversions.toColumnDefinitions(rowsMetadata, context); - } - int pageNumber = rowsMetadata.continuousPageNumber; - int currentPage = state; - if (pageNumber != currentPage) { - abort( - new IllegalStateException( - String.format("Received page %d but was expecting %d", pageNumber, currentPage)), - false); - } else { - DefaultContinuousAsyncResultSet resultSet = createResultSet((Rows) result, executionInfo); - if (rowsMetadata.isLastContinuousPage) { - LOG.trace( - "[{}] Received last page ({} - {} rows)", - logPrefix, - pageNumber, - resultSet.remaining()); - state = STATE_FINISHED; - reenableAutoReadIfNeeded(); - enqueueOrCompletePending(resultSet); - stopGlobalRequestTimer(); - } else { - LOG.trace( - "[{}] Received page {} ({} rows)", logPrefix, pageNumber, resultSet.remaining()); - if (currentPage > 0) { - state = currentPage + 1; - } - enqueueOrCompletePending(resultSet); - } - } - } else { - // Void responses happen only when the retry decision is ignore. - assert result instanceof Void; - ContinuousAsyncResultSet resultSet = DefaultContinuousAsyncResultSet.empty(executionInfo); - LOG.trace( - "[{}] Continuous paging interrupted by retry policy decision to ignore error", - logPrefix); - state = STATE_FINISHED; - reenableAutoReadIfNeeded(); - enqueueOrCompletePending(resultSet); - stopGlobalRequestTimer(); - } - } catch (Throwable error) { - abort(error, false); - } - } - - /** - * Processes an unsuccessful response. - * - *

      Depending on the error, may trigger: - * - *

        - *
      1. a re-prepare cycle, see {@link #processUnprepared(Unprepared)}; - *
      2. an immediate retry on the next host, bypassing the retry policy, if the host was - * bootstrapping; - *
      3. an immediate abortion if the error is unrecoverable; - *
      4. further processing if the error is recoverable, see {@link - * #processRecoverableError(CoordinatorException)} - *
      - * - * @param errorMessage the error message received. - */ - @SuppressWarnings("GuardedBy") // this method is only called with the lock held - private void processErrorResponse(@NonNull Error errorMessage) { - assert lock.isHeldByCurrentThread(); - if (errorMessage instanceof Unprepared) { - processUnprepared((Unprepared) errorMessage); - } else { - CoordinatorException error = DseConversions.toThrowable(node, errorMessage, context); - if (error instanceof BootstrappingException) { - LOG.trace("[{}] {} is bootstrapping, trying next node", logPrefix, node); - recordError(node, error); - trackNodeError(node, error); - sendRequest(null); - } else if (error instanceof QueryValidationException - || error instanceof FunctionFailureException - || error instanceof ProtocolError - || state > 1) { - // we only process recoverable errors for the first page, - // errors on subsequent pages will always trigger an immediate abortion - LOG.trace("[{}] Unrecoverable error, rethrowing", logPrefix); - NodeMetricUpdater metricUpdater = ((DefaultNode) node).getMetricUpdater(); - metricUpdater.incrementCounter(DefaultNodeMetric.OTHER_ERRORS, executionProfile.getName()); - trackNodeError(node, error); - abort(error, true); - } else { - processRecoverableError(error); - } - } - } - - /** - * Processes a recoverable error. - * - *

      In most cases, delegates to the retry policy and its decision, see {@link - * #processRetryDecision(RetryDecision, Throwable)}. - * - * @param error the recoverable error. - */ - private void processRecoverableError(@NonNull CoordinatorException error) { - assert lock.isHeldByCurrentThread(); - NodeMetricUpdater metricUpdater = ((DefaultNode) node).getMetricUpdater(); - RetryDecision decision; - if (error instanceof ReadTimeoutException) { - ReadTimeoutException readTimeout = (ReadTimeoutException) error; - decision = - retryPolicy.onReadTimeout( - statement, - readTimeout.getConsistencyLevel(), - readTimeout.getBlockFor(), - readTimeout.getReceived(), - readTimeout.wasDataPresent(), - retryCount); - updateErrorMetrics( - metricUpdater, - decision, - DefaultNodeMetric.READ_TIMEOUTS, - DefaultNodeMetric.RETRIES_ON_READ_TIMEOUT, - DefaultNodeMetric.IGNORES_ON_READ_TIMEOUT); - } else if (error instanceof WriteTimeoutException) { - WriteTimeoutException writeTimeout = (WriteTimeoutException) error; - if (isIdempotent) { - decision = - retryPolicy.onWriteTimeout( - statement, - writeTimeout.getConsistencyLevel(), - writeTimeout.getWriteType(), - writeTimeout.getBlockFor(), - writeTimeout.getReceived(), - retryCount); - } else { - decision = RetryDecision.RETHROW; - } - updateErrorMetrics( - metricUpdater, - decision, - DefaultNodeMetric.WRITE_TIMEOUTS, - DefaultNodeMetric.RETRIES_ON_WRITE_TIMEOUT, - DefaultNodeMetric.IGNORES_ON_WRITE_TIMEOUT); - } else if (error instanceof UnavailableException) { - UnavailableException unavailable = (UnavailableException) error; - decision = - retryPolicy.onUnavailable( - statement, - unavailable.getConsistencyLevel(), - unavailable.getRequired(), - unavailable.getAlive(), - retryCount); - updateErrorMetrics( - metricUpdater, - decision, - DefaultNodeMetric.UNAVAILABLES, - DefaultNodeMetric.RETRIES_ON_UNAVAILABLE, - DefaultNodeMetric.IGNORES_ON_UNAVAILABLE); - } else { - decision = - isIdempotent - ? retryPolicy.onErrorResponse(statement, error, retryCount) - : RetryDecision.RETHROW; - updateErrorMetrics( - metricUpdater, - decision, - DefaultNodeMetric.OTHER_ERRORS, - DefaultNodeMetric.RETRIES_ON_OTHER_ERROR, - DefaultNodeMetric.IGNORES_ON_OTHER_ERROR); - } - processRetryDecision(decision, error); - } - - /** - * Processes an {@link Unprepared} error by re-preparing then retrying on the same host. - * - * @param errorMessage the unprepared error message. - */ - @SuppressWarnings("GuardedBy") // this method is only called with the lock held - private void processUnprepared(@NonNull Unprepared errorMessage) { - assert lock.isHeldByCurrentThread(); - ByteBuffer idToReprepare = ByteBuffer.wrap(errorMessage.id); - LOG.trace( - "[{}] Statement {} is not prepared on {}, re-preparing", - logPrefix, - Bytes.toHexString(idToReprepare), - node); - RepreparePayload repreparePayload = session.getRepreparePayloads().get(idToReprepare); - if (repreparePayload == null) { - throw new IllegalStateException( - String.format( - "Tried to execute unprepared query %s but we don't have the data to re-prepare it", - Bytes.toHexString(idToReprepare))); - } - Prepare prepare = repreparePayload.toMessage(); - Duration timeout = executionProfile.getDuration(DefaultDriverOption.REQUEST_TIMEOUT); - ThrottledAdminRequestHandler.prepare( - channel, - true, - prepare, - repreparePayload.customPayload, - timeout, - throttler, - sessionMetricUpdater, - logPrefix) - .start() - .whenComplete( - (repreparedId, exception) -> { - // If we run into an unrecoverable error, surface it to the client instead of retrying - Throwable fatalError = null; - if (exception == null) { - if (!repreparedId.equals(idToReprepare)) { - IllegalStateException illegalStateException = - new IllegalStateException( - String.format( - "ID mismatch while trying to reprepare (expected %s, got %s). " - + "This prepared statement won't work anymore. " - + "This usually happens when you run a 'USE...' query after " - + "the statement was prepared.", - Bytes.toHexString(idToReprepare), Bytes.toHexString(repreparedId))); - trackNodeError(node, illegalStateException); - fatalError = illegalStateException; - } else { - LOG.trace( - "[{}] Re-prepare successful, retrying on the same node ({})", - logPrefix, - node); - sendRequest(node); - } - } else { - if (exception instanceof UnexpectedResponseException) { - Message prepareErrorMessage = ((UnexpectedResponseException) exception).message; - if (prepareErrorMessage instanceof Error) { - CoordinatorException prepareError = - DseConversions.toThrowable(node, (Error) prepareErrorMessage, context); - if (prepareError instanceof QueryValidationException - || prepareError instanceof FunctionFailureException - || prepareError instanceof ProtocolError) { - LOG.trace("[{}] Unrecoverable error on re-prepare, rethrowing", logPrefix); - trackNodeError(node, prepareError); - fatalError = prepareError; - } - } - } else if (exception instanceof RequestThrottlingException) { - trackNodeError(node, exception); - fatalError = exception; - } - if (fatalError == null) { - LOG.trace("[{}] Re-prepare failed, trying next node", logPrefix); - recordError(node, exception); - trackNodeError(node, exception); - sendRequest(null); - } - } - if (fatalError != null) { - lock.lock(); - try { - abort(fatalError, true); - } finally { - lock.unlock(); - } - } - }); - } - - /** - * Processes the retry decision by triggering a retry, aborting or ignoring; also records the - * failures for further access. - * - * @param decision the decision to process. - * @param error the original error. - */ - @SuppressWarnings({"NonAtomicOperationOnVolatileField", "NonAtomicVolatileUpdate"}) - private void processRetryDecision(@NonNull RetryDecision decision, @NonNull Throwable error) { - assert lock.isHeldByCurrentThread(); - LOG.trace("[{}] Processing retry decision {}", logPrefix, decision); - switch (decision) { - case RETRY_SAME: - recordError(node, error); - trackNodeError(node, error); - retryCount++; - sendRequest(node); - break; - case RETRY_NEXT: - recordError(node, error); - trackNodeError(node, error); - retryCount++; - sendRequest(null); - break; - case RETHROW: - trackNodeError(node, error); - abort(error, true); - break; - case IGNORE: - processResultResponse(Void.INSTANCE, null); - break; - } - } - - // PAGE HANDLING - - /** - * Enqueues a response or, if the client was already waiting for it, completes the pending future. - * - *

      Guarded by {@link #lock}. - * - * @param pageOrError the next page, or an error. - */ - @SuppressWarnings("GuardedBy") // this method is only called with the lock held - private void enqueueOrCompletePending(@NonNull Object pageOrError) { - assert lock.isHeldByCurrentThread(); - if (pendingResult != null) { - if (LOG.isTraceEnabled()) { - LOG.trace( - "[{}] Client was waiting on empty queue, completing with {}", - logPrefix, - asTraceString(pageOrError)); - } - CompletableFuture tmp = pendingResult; - // null out pendingResult before completing it because its completion - // may trigger a call to fetchNextPage -> dequeueOrCreatePending, - // which expects pendingResult to be null. - pendingResult = null; - completeResultSetFuture(tmp, pageOrError); - } else { - if (LOG.isTraceEnabled()) { - LOG.trace("[{}] Enqueuing {}", logPrefix, asTraceString(pageOrError)); - } - queue.add(pageOrError); - // Backpressure without protocol support: if the queue grows too large, - // disable auto-read so that the channel eventually becomes - // non-writable on the server side (causing it to back off for a while) - if (!protocolBackpressureAvailable && queue.size() == maxEnqueuedPages && state > 0) { - LOG.trace( - "[{}] Exceeded {} queued response pages, disabling auto-read", logPrefix, queue.size()); - channel.config().setAutoRead(false); - } - } - } - - /** - * Dequeue a response or, if the queue is empty, create the future that will get notified of the - * next response, when it arrives. - * - *

      Called from user code, see {@link DefaultContinuousAsyncResultSet#fetchNextPage()}. - * - * @return the next page's future; never null. - */ @NonNull - protected CompletableFuture dequeueOrCreatePending() { - lock.lock(); - try { - // If the client was already waiting for a page, there's no way it can call this method again - // (this is guaranteed by our public API because in order to ask for the next page, - // you need the reference to the previous page). - assert pendingResult == null; - - Object head = queue.poll(); - if (!protocolBackpressureAvailable && head != null && queue.size() == maxEnqueuedPages - 1) { - LOG.trace( - "[{}] Back to {} queued response pages, re-enabling auto-read", - logPrefix, - queue.size()); - channel.config().setAutoRead(true); - } - maybeRequestMore(); - if (head != null) { - if (state == STATE_FAILED && !(head instanceof Throwable)) { - LOG.trace( - "[{}] Client requested next page on cancelled queue, discarding page and returning cancelled future", - logPrefix); - return cancelledResultSetFuture(); - } else { - if (LOG.isTraceEnabled()) { - LOG.trace( - "[{}] Client requested next page on non-empty queue, returning immediate future of {}", - logPrefix, - asTraceString(head)); - } - return immediateResultSetFuture(head); - } - } else { - if (state == STATE_FAILED) { - LOG.trace( - "[{}] Client requested next page on cancelled empty queue, returning cancelled future", - logPrefix); - return cancelledResultSetFuture(); - } else { - LOG.trace( - "[{}] Client requested next page but queue is empty, installing future", logPrefix); - pendingResult = createResultSetFuture(); - // Only schedule a timeout if we're past the first page (the first page's timeout is - // handled in sendRequest). - if (state > 1) { - timeout = scheduleTimeout(state); - // Note: each new timeout is cancelled when the next response arrives, see - // onResponse(Frame). - } - return pendingResult; - } - } - } finally { - lock.unlock(); - } - } - - /** - * If the total number of results in the queue and in-flight (requested - received) is less than - * half the queue size, then request more pages, unless the {@link #state} is failed, we're still - * waiting for the first page (so maybe still throttled or in the middle of a retry), or we don't - * support backpressure at the protocol level. - */ - @SuppressWarnings("GuardedBy") - private void maybeRequestMore() { - assert lock.isHeldByCurrentThread(); - if (state < 2 || streamId == -1 || !protocolBackpressureAvailable) { - return; - } - // if we have already requested more than the client needs, then no need to request some more - if (maxPages > 0 && numPagesRequested >= maxPages) { - return; - } - // the pages received so far, which is the state minus one - int received = state - 1; - int requested = numPagesRequested; - // the pages that fit in the queue, which is the queue free space minus the requests in flight - int freeSpace = maxEnqueuedPages - queue.size(); - int inFlight = requested - received; - int numPagesFittingInQueue = freeSpace - inFlight; - if (numPagesFittingInQueue >= maxEnqueuedPages / 2) { - LOG.trace("[{}] Requesting more {} pages", logPrefix, numPagesFittingInQueue); - numPagesRequested = requested + numPagesFittingInQueue; - sendMorePagesRequest(numPagesFittingInQueue); - } - } - - /** - * Sends a request for more pages (a.k.a. backpressure request). - * - * @param nextPages the number of extra pages to request. - */ - @SuppressWarnings("GuardedBy") - private void sendMorePagesRequest(int nextPages) { - assert lock.isHeldByCurrentThread(); - assert channel != null : "expected valid connection in order to request more pages"; - assert protocolBackpressureAvailable; - assert streamId != -1; - - LOG.trace("[{}] Sending request for more pages", logPrefix); - ThrottledAdminRequestHandler.query( - channel, - true, - Revise.requestMoreContinuousPages(streamId, nextPages), - statement.getCustomPayload(), - timeoutOtherPages, - throttler, - session.getMetricUpdater(), - logPrefix, - "request " + nextPages + " more pages for id " + streamId) - .start() - .handle( - (result, error) -> { - if (error != null) { - Loggers.warnWithException( - LOG, "[{}] Error requesting more pages, aborting.", logPrefix, error); - lock.lock(); - try { - // Set fromServer to false because we want the callback to still cancel the - // session if possible or else the server will wait on a timeout. - abort(error, false); - } finally { - lock.unlock(); - } - } - return null; - }); - } - - // TIMEOUT HANDLING - - private Timeout scheduleTimeout(int expectedPage) { - if (expectedPage < 0) { - return null; - } - Duration timeout = (expectedPage == 1) ? timeoutFirstPage : timeoutOtherPages; - if (timeout.toNanos() <= 0) { - return null; - } - LOG.trace("[{}] Scheduling timeout for page {} in {}", logPrefix, expectedPage, timeout); - return timer.newTimeout( - (timeout1) -> { - lock.lock(); - try { - if (state == expectedPage) { - abort( - new DriverTimeoutException( - String.format("Timed out waiting for page %d", expectedPage)), - false); - } else { - // Ignore timeout if the request has moved on in the interim. - LOG.trace( - "[{}] Timeout fired for page {} but query already at state {}, skipping", - logPrefix, - expectedPage, - state); - } - } finally { - lock.unlock(); - } - }, - timeout.toNanos(), - TimeUnit.NANOSECONDS); - } - - /** Cancels the current timeout, if non null. */ - private void cancelTimeout() { - Timeout timeout = this.timeout; - if (timeout != null) { - LOG.trace("[{}] Cancelling timeout", logPrefix); - timeout.cancel(); - } - } - - // CANCELLATION - - /** - * Cancels the continuous paging request. - * - *

      Called from user code, see {@link DefaultContinuousAsyncResultSet#cancel()}, or from a - * driver I/O thread. - */ - void cancel() { - lock.lock(); - try { - if (state < 0) { - return; - } else { - LOG.trace( - "[{}] Cancelling continuous paging session with state {} on node {}", - logPrefix, - state, - node); - state = STATE_FAILED; - if (pendingResult != null) { - pendingResult.cancel(true); - } - // the rest can be done without holding the lock, see below - } - } finally { - lock.unlock(); - } - if (channel != null) { - if (!channel.closeFuture().isDone()) { - this.channel.cancel(this); - } - sendCancelRequest(); - } - reenableAutoReadIfNeeded(); - } - - private void sendCancelRequest() { - LOG.trace("[{}] Sending cancel request", logPrefix); - ThrottledAdminRequestHandler.query( - channel, - true, - Revise.cancelContinuousPaging(streamId), - statement.getCustomPayload(), - timeoutOtherPages, - throttler, - session.getMetricUpdater(), - logPrefix, - "cancel request") - .start() - .handle( - (result, error) -> { - if (error != null) { - Loggers.warnWithException( - LOG, - "[{}] Error sending cancel request. " - + "This is not critical (the request will eventually time out server-side).", - logPrefix, - error); - } else { - LOG.trace("[{}] Continuous paging session cancelled successfully", logPrefix); - } - return null; - }); - } - - // TERMINATION - - private void reenableAutoReadIfNeeded() { - // Make sure we don't leave the channel unreadable - LOG.trace("[{}] Re-enabling auto-read", logPrefix); - if (!protocolBackpressureAvailable) { - channel.config().setAutoRead(true); - } - } - - // ERROR HANDLING - - private void recordError(@NonNull Node node, @NonNull Throwable error) { - errors.add(new AbstractMap.SimpleEntry<>(node, error)); - } - - private void trackNodeError(@NonNull Node node, @NonNull Throwable error) { - long latencyNanos = System.nanoTime() - this.messageStartTimeNanos; - context - .getRequestTracker() - .onNodeError(statement, error, latencyNanos, executionProfile, node, logPrefix); - } - - /** - * Aborts the continuous paging session due to an error that can be either from the server or the - * client. - * - * @param error the error that causes the abortion. - * @param fromServer whether the error was triggered by the coordinator or by the driver. - */ - @SuppressWarnings("GuardedBy") // this method is only called with the lock held - private void abort(@NonNull Throwable error, boolean fromServer) { - assert lock.isHeldByCurrentThread(); - LOG.trace( - "[{}] Aborting due to {} ({})", - logPrefix, - error.getClass().getSimpleName(), - error.getMessage()); - if (channel == null) { - // This only happens when sending the initial request, if no host was available - // or if the iterator returned by the LBP threw an exception. - // In either case the write was not even attempted, and - // we set the state right now. - enqueueOrCompletePending(error); - state = STATE_FAILED; - } else if (state > 0) { - enqueueOrCompletePending(error); - if (fromServer) { - // We can safely assume the server won't send any more responses, - // so set the state and call release() right now. - state = STATE_FAILED; - reenableAutoReadIfNeeded(); - } else { - // attempt to cancel first, i.e. ask server to stop sending responses, - // and only then release. - cancel(); - } - } - stopGlobalRequestTimer(); - } - - // METRICS - - private void stopNodeMessageTimer() { - ((DefaultNode) node) - .getMetricUpdater() - .updateTimer( - DefaultNodeMetric.CQL_MESSAGES, - executionProfile.getName(), - System.nanoTime() - messageStartTimeNanos, - TimeUnit.NANOSECONDS); - } - - private void stopGlobalRequestTimer() { - session - .getMetricUpdater() - .updateTimer( - DseSessionMetric.CONTINUOUS_CQL_REQUESTS, - executionProfile.getName(), - System.nanoTime() - startTimeNanos, - TimeUnit.NANOSECONDS); - } - - private void updateErrorMetrics( - @NonNull NodeMetricUpdater metricUpdater, - @NonNull RetryDecision decision, - @NonNull DefaultNodeMetric error, - @NonNull DefaultNodeMetric retriesOnError, - @NonNull DefaultNodeMetric ignoresOnError) { - metricUpdater.incrementCounter(error, executionProfile.getName()); - switch (decision) { - case RETRY_SAME: - case RETRY_NEXT: - metricUpdater.incrementCounter(DefaultNodeMetric.RETRIES, executionProfile.getName()); - metricUpdater.incrementCounter(retriesOnError, executionProfile.getName()); - break; - case IGNORE: - metricUpdater.incrementCounter(DefaultNodeMetric.IGNORES, executionProfile.getName()); - metricUpdater.incrementCounter(ignoresOnError, executionProfile.getName()); - break; - case RETHROW: - // nothing do do - } + @Override + protected ContinuousAsyncResultSet createEmptyResultSet(@NonNull ExecutionInfo executionInfo) { + return DefaultContinuousAsyncResultSet.empty(executionInfo); } - // UTILITY METHODS - @NonNull - private DefaultExecutionInfo createExecutionInfo( + @Override + protected DefaultExecutionInfo createExecutionInfo( @NonNull Result result, @Nullable Frame response) { ByteBuffer pagingState = result instanceof Rows ? ((Rows) result).getMetadata().pagingState : null; @@ -1165,7 +107,8 @@ private DefaultExecutionInfo createExecutionInfo( } @NonNull - private DefaultContinuousAsyncResultSet createResultSet( + @Override + protected DefaultContinuousAsyncResultSet createResultSet( @NonNull Rows rows, @NonNull ExecutionInfo executionInfo) { Queue> data = rows.getData(); CountingIterator iterator = @@ -1188,73 +131,8 @@ protected Row computeNext() { this); } - @NonNull - private CompletableFuture createResultSetFuture() { - CompletableFuture future = new CompletableFuture<>(); - future.whenComplete( - (rs, t) -> { - if (t instanceof CancellationException) { - // if the future has been canceled by the user, propagate the cancellation - cancel(); - } - }); - return future; - } - - @NonNull - private CompletableFuture immediateResultSetFuture( - @NonNull Object pageOrError) { - CompletableFuture future = createResultSetFuture(); - completeResultSetFuture(future, pageOrError); - return future; - } - - @NonNull - private CompletableFuture cancelledResultSetFuture() { - return immediateResultSetFuture( - new CancellationException( - "Can't get more results because the continuous query has failed already. " - + "Most likely this is because the query was cancelled")); - } - - private void completeResultSetFuture( - @NonNull CompletableFuture future, @NonNull Object pageOrError) { - long now = System.nanoTime(); - long totalLatencyNanos = now - startTimeNanos; - long nodeLatencyNanos = now - messageStartTimeNanos; - if (pageOrError instanceof ContinuousAsyncResultSet) { - if (future.complete((ContinuousAsyncResultSet) pageOrError)) { - throttler.signalSuccess(this); - context - .getRequestTracker() - .onNodeSuccess(statement, nodeLatencyNanos, executionProfile, node, logPrefix); - context - .getRequestTracker() - .onSuccess(statement, totalLatencyNanos, executionProfile, node, logPrefix); - } - } else { - Throwable error = (Throwable) pageOrError; - if (future.completeExceptionally(error)) { - context - .getRequestTracker() - .onError(statement, error, totalLatencyNanos, executionProfile, node, logPrefix); - if (error instanceof DriverTimeoutException) { - throttler.signalTimeout(this); - session - .getMetricUpdater() - .incrementCounter( - DefaultSessionMetric.CQL_CLIENT_TIMEOUTS, executionProfile.getName()); - } else if (!(error instanceof RequestThrottlingException)) { - throttler.signalError(this, error); - } - } - } - } - - @NonNull - private static String asTraceString(@NonNull Object pageOrError) { - return (pageOrError instanceof ContinuousAsyncResultSet) - ? "page " + ((ContinuousAsyncResultSet) pageOrError).pageNumber() - : ((Exception) pageOrError).getClass().getSimpleName(); + @Override + protected int pageNumber(@NonNull ContinuousAsyncResultSet resultSet) { + return resultSet.pageNumber(); } } diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/DefaultContinuousAsyncResultSet.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/DefaultContinuousAsyncResultSet.java index 10a5bfda2f2..c7ffe7551af 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/DefaultContinuousAsyncResultSet.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/DefaultContinuousAsyncResultSet.java @@ -107,7 +107,7 @@ public void cancel() { handler.cancel(); } - static ContinuousAsyncResultSet empty(ExecutionInfo executionInfo) { + public static ContinuousAsyncResultSet empty(ExecutionInfo executionInfo) { return new ContinuousAsyncResultSet() { diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ContinuousAsyncGraphResultSet.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ContinuousAsyncGraphResultSet.java new file mode 100644 index 00000000000..e0816018203 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ContinuousAsyncGraphResultSet.java @@ -0,0 +1,126 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph; + +import com.datastax.dse.driver.api.core.graph.AsyncGraphResultSet; +import com.datastax.dse.driver.api.core.graph.GraphExecutionInfo; +import com.datastax.dse.driver.api.core.graph.GraphNode; +import com.datastax.oss.driver.internal.core.util.CountingIterator; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Collections; +import java.util.Queue; +import java.util.concurrent.CompletionStage; +import net.jcip.annotations.NotThreadSafe; + +@NotThreadSafe // wraps a mutable queue +public class ContinuousAsyncGraphResultSet implements AsyncGraphResultSet { + + private final CountingIterator iterator; + private final int pageNumber; + private final boolean hasMorePages; + private final GraphExecutionInfo executionInfo; + private final ContinuousGraphRequestHandler continuousGraphRequestHandler; + private final Iterable currentPage; + + public ContinuousAsyncGraphResultSet( + GraphExecutionInfo executionInfo, + Queue data, + int pageNumber, + boolean hasMorePages, + ContinuousGraphRequestHandler continuousGraphRequestHandler, + GraphProtocol graphProtocol) { + + this.iterator = new GraphResultIterator(data, graphProtocol); + this.pageNumber = pageNumber; + this.hasMorePages = hasMorePages; + this.executionInfo = executionInfo; + this.continuousGraphRequestHandler = continuousGraphRequestHandler; + this.currentPage = () -> iterator; + } + + @NonNull + @Override + public GraphExecutionInfo getExecutionInfo() { + return executionInfo; + } + + @Override + public int remaining() { + return iterator.remaining(); + } + + @NonNull + @Override + public Iterable currentPage() { + return currentPage; + } + + @Override + public boolean hasMorePages() { + return hasMorePages; + } + + @NonNull + @Override + public CompletionStage fetchNextPage() throws IllegalStateException { + if (!hasMorePages()) { + throw new IllegalStateException( + "Can't call fetchNextPage() on the last page (use hasMorePages() to check)"); + } + return continuousGraphRequestHandler.dequeueOrCreatePending(); + } + + @Override + public void cancel() { + continuousGraphRequestHandler.cancel(); + } + + /** Returns the current page's number. Pages are numbered starting from 1. */ + public int pageNumber() { + return pageNumber; + } + + static AsyncGraphResultSet empty(GraphExecutionInfo executionInfo) { + + return new AsyncGraphResultSet() { + + @NonNull + @Override + public GraphExecutionInfo getExecutionInfo() { + return executionInfo; + } + + @NonNull + @Override + public Iterable currentPage() { + return Collections.emptyList(); + } + + @Override + public int remaining() { + return 0; + } + + @Override + public boolean hasMorePages() { + return false; + } + + @NonNull + @Override + public CompletionStage fetchNextPage() throws IllegalStateException { + throw new IllegalStateException( + "Can't call fetchNextPage() on the last page (use hasMorePages() to check)"); + } + + @Override + public void cancel() { + // noop + } + }; + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandler.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandler.java new file mode 100644 index 00000000000..fea1f4c21d2 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandler.java @@ -0,0 +1,127 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph; + +import com.datastax.dse.driver.api.core.graph.AsyncGraphResultSet; +import com.datastax.dse.driver.api.core.graph.GraphExecutionInfo; +import com.datastax.dse.driver.api.core.graph.GraphNode; +import com.datastax.dse.driver.api.core.graph.GraphStatement; +import com.datastax.dse.driver.internal.core.ContinuousRequestHandlerBase; +import com.datastax.dse.driver.internal.core.graph.binary.GraphBinaryModule; +import com.datastax.dse.protocol.internal.response.result.DseRowsMetadata; +import com.datastax.oss.driver.api.core.session.throttling.Throttled; +import com.datastax.oss.driver.internal.core.channel.ResponseCallback; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.session.DefaultSession; +import com.datastax.oss.protocol.internal.Frame; +import com.datastax.oss.protocol.internal.Message; +import com.datastax.oss.protocol.internal.response.Result; +import com.datastax.oss.protocol.internal.response.result.Rows; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import io.netty.util.concurrent.Future; +import io.netty.util.concurrent.GenericFutureListener; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.ArrayDeque; +import java.util.List; +import java.util.Map; +import java.util.Queue; +import net.jcip.annotations.ThreadSafe; + +/** + * Handles a request that supports multiple response messages (a.k.a. continuous paging request). + */ +@ThreadSafe +public class ContinuousGraphRequestHandler + extends ContinuousRequestHandlerBase, AsyncGraphResultSet, GraphExecutionInfo> + implements ResponseCallback, GenericFutureListener>, Throttled { + private final Message message; + private final GraphProtocol subProtocol; + private final GraphBinaryModule graphBinaryModule; + + ContinuousGraphRequestHandler( + @NonNull GraphStatement statement, + @NonNull DefaultSession session, + @NonNull InternalDriverContext context, + @NonNull String sessionLogPrefix, + @NonNull GraphBinaryModule graphBinaryModule) { + super(statement, session, context, sessionLogPrefix, AsyncGraphResultSet.class); + this.graphBinaryModule = graphBinaryModule; + this.subProtocol = GraphConversions.inferSubProtocol(statement, executionProfile); + this.message = + GraphConversions.createContinuousMessageFromGraphStatement( + statement, subProtocol, executionProfile, this.context, graphBinaryModule); + throttler.register(this); + } + + // MAIN LIFECYCLE + + @NonNull + @Override + protected Message getMessage() { + return message; + } + + @Override + protected boolean isTracingEnabled() { + return this.statement.isTracing(); + } + + @NonNull + @Override + protected Map createPayload() { + return GraphConversions.createCustomPayload( + statement, subProtocol, executionProfile, context, graphBinaryModule); + } + + @NonNull + @Override + protected AsyncGraphResultSet createEmptyResultSet(@NonNull GraphExecutionInfo executionInfo) { + return ContinuousAsyncGraphResultSet.empty(executionInfo); + } + + @NonNull + @Override + protected DefaultGraphExecutionInfo createExecutionInfo( + @NonNull Result result, @Nullable Frame response) { + return new DefaultGraphExecutionInfo(statement, node, 0, 0, errors, response); + } + + @NonNull + @Override + protected ContinuousAsyncGraphResultSet createResultSet( + @NonNull Rows rows, @NonNull GraphExecutionInfo executionInfo) throws IOException { + + Queue graphNodes = new ArrayDeque<>(); + for (List row : rows.getData()) { + if (subProtocol.isGraphBinary()) { + graphNodes.offer(GraphConversions.createGraphBinaryGraphNode(row, this.graphBinaryModule)); + } else { + graphNodes.offer(GraphSONUtils.createGraphNode(row, subProtocol)); + } + } + + DseRowsMetadata metadata = (DseRowsMetadata) rows.getMetadata(); + return new ContinuousAsyncGraphResultSet( + executionInfo, + graphNodes, + metadata.continuousPageNumber, + !metadata.isLastContinuousPage, + this, + subProtocol); + } + + @Override + protected int pageNumber(@NonNull AsyncGraphResultSet resultSet) { + if (resultSet instanceof ContinuousAsyncGraphResultSet) { + return ((ContinuousAsyncGraphResultSet) resultSet).pageNumber(); + } else { // otherwise the AsyncGraphResultSet is not a Continuous Paging Query + return 1; + } + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultAsyncGraphResultSet.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultAsyncGraphResultSet.java index c4160201e44..8a7ce76075c 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultAsyncGraphResultSet.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DefaultAsyncGraphResultSet.java @@ -19,12 +19,10 @@ import com.datastax.dse.driver.api.core.graph.GraphNode; import com.datastax.oss.driver.api.core.cql.ExecutionInfo; import com.datastax.oss.driver.internal.core.util.CountingIterator; -import com.datastax.oss.driver.shaded.guava.common.base.Preconditions; import edu.umd.cs.findbugs.annotations.NonNull; import java.util.Queue; import java.util.concurrent.CompletionStage; import net.jcip.annotations.NotThreadSafe; -import org.apache.tinkerpop.gremlin.process.traversal.Traverser; @NotThreadSafe // wraps a mutable queue public class DefaultAsyncGraphResultSet implements AsyncGraphResultSet { @@ -32,14 +30,12 @@ public class DefaultAsyncGraphResultSet implements AsyncGraphResultSet { private final ExecutionInfo executionInfo; private final CountingIterator iterator; private final Iterable currentPage; - private final GraphProtocol graphProtocol; public DefaultAsyncGraphResultSet( ExecutionInfo executionInfo, Queue data, GraphProtocol graphProtocol) { this.executionInfo = executionInfo; - this.iterator = new GraphResultIterator(data); + this.iterator = new GraphResultIterator(data, graphProtocol); this.currentPage = () -> iterator; - this.graphProtocol = graphProtocol; } @NonNull @@ -68,14 +64,12 @@ public Iterable currentPage() { @Override public boolean hasMorePages() { - // hard-coded until DSE graph supports paging return false; } @NonNull @Override public CompletionStage fetchNextPage() throws IllegalStateException { - // hard-coded until DSE graph supports paging throw new IllegalStateException( "No next page. Use #hasMorePages before calling this method to avoid this error."); } @@ -84,55 +78,4 @@ public CompletionStage fetchNextPage() throws IllegalStateE public void cancel() { // nothing to do } - - private class GraphResultIterator extends CountingIterator { - - private final Queue data; - - // Sometimes a traversal can yield the same result multiple times consecutively. To avoid - // duplicating the data, DSE graph sends it only once with a counter indicating how many times - // it's repeated. - private long repeat = 0; - private GraphNode lastGraphNode = null; - - private GraphResultIterator(Queue data) { - super(data.size()); - this.data = data; - } - - @Override - protected GraphNode computeNext() { - if (repeat > 1) { - repeat -= 1; - // Note that we don't make a defensive copy, we assume the client won't mutate the node - return lastGraphNode; - } - - GraphNode container = data.poll(); - if (container == null) { - return endOfData(); - } - - if (graphProtocol.isGraphBinary()) { - // results are contained in a Traverser object and not a Map if the protocol - // is GraphBinary - Preconditions.checkState( - container.as(Object.class) instanceof Traverser, - "Graph protocol error. Received object should be a Traverser but it is not."); - Traverser t = container.as(Traverser.class); - this.repeat = t.bulk(); - this.lastGraphNode = new ObjectGraphNode(t.get()); - return lastGraphNode; - } else { - // The repeat counter is called "bulk" in the JSON payload - GraphNode b = container.getByKey("bulk"); - if (b != null) { - this.repeat = b.asLong(); - } - - lastGraphNode = container.getByKey("result"); - return lastGraphNode; - } - } - } } diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphConversions.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphConversions.java index 99526532387..0b994167c82 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphConversions.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphConversions.java @@ -22,6 +22,7 @@ import com.datastax.dse.driver.internal.core.context.DseDriverContext; import com.datastax.dse.driver.internal.core.graph.binary.GraphBinaryModule; import com.datastax.dse.protocol.internal.request.RawBytesQuery; +import com.datastax.dse.protocol.internal.request.query.ContinuousPagingOptions; import com.datastax.dse.protocol.internal.request.query.DseQueryOptions; import com.datastax.oss.driver.api.core.ConsistencyLevel; import com.datastax.oss.driver.api.core.DefaultConsistencyLevel; @@ -29,6 +30,7 @@ import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.internal.core.cql.Conversions; import com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting; import com.datastax.oss.driver.shaded.guava.common.base.Preconditions; @@ -70,7 +72,7 @@ public class GraphConversions extends Conversions { @VisibleForTesting static final byte[] EMPTY_STRING_QUERY = "".getBytes(UTF_8); - static GraphProtocol inferSubProtocol( + public static GraphProtocol inferSubProtocol( GraphStatement statement, DriverExecutionProfile config) { String graphProtocol = statement.getSubProtocol(); if (graphProtocol == null) { @@ -88,6 +90,74 @@ static GraphProtocol inferSubProtocol( return GraphProtocol.fromString(graphProtocol); } + public static Message createContinuousMessageFromGraphStatement( + GraphStatement statement, + GraphProtocol subProtocol, + DriverExecutionProfile config, + InternalDriverContext context, + GraphBinaryModule graphBinaryModule) { + + final List encodedQueryParams; + if (!(statement instanceof ScriptGraphStatement) + || ((ScriptGraphStatement) statement).getQueryParams().isEmpty()) { + encodedQueryParams = Collections.emptyList(); + } else { + try { + Map queryParams = ((ScriptGraphStatement) statement).getQueryParams(); + if (subProtocol.isGraphBinary()) { + ByteBuf graphBinaryParams = graphBinaryModule.serialize(queryParams); + encodedQueryParams = + Collections.singletonList(ByteBufUtil.toByteBuffer(graphBinaryParams)); + graphBinaryParams.release(); + } else { + encodedQueryParams = + Collections.singletonList( + GraphSONUtils.serializeToByteBuffer(queryParams, subProtocol)); + } + } catch (IOException e) { + throw new UncheckedIOException( + "Couldn't serialize parameters for GraphStatement: " + statement, e); + } + } + + int consistencyLevel = + DefaultConsistencyLevel.valueOf(config.getString(DefaultDriverOption.REQUEST_CONSISTENCY)) + .getProtocolCode(); + + long timestamp = statement.getTimestamp(); + if (timestamp == Long.MIN_VALUE) { + timestamp = context.getTimestampGenerator().next(); + } + + int pageSize = config.getInt(DseDriverOption.CONTINUOUS_PAGING_PAGE_SIZE); + boolean pageSizeInBytes = config.getBoolean(DseDriverOption.CONTINUOUS_PAGING_PAGE_SIZE_BYTES); + int maxPages = config.getInt(DseDriverOption.CONTINUOUS_PAGING_MAX_PAGES); + int maxPagesPerSecond = config.getInt(DseDriverOption.CONTINUOUS_PAGING_MAX_PAGES_PER_SECOND); + int maxEnqueuedPages = config.getInt(DseDriverOption.CONTINUOUS_PAGING_MAX_ENQUEUED_PAGES); + ContinuousPagingOptions options = + new ContinuousPagingOptions(maxPages, maxPagesPerSecond, maxEnqueuedPages); + + DseQueryOptions queryOptions = + new DseQueryOptions( + consistencyLevel, + encodedQueryParams, + Collections.emptyMap(), // ignored by the DSE Graph server + true, // also ignored + pageSize, + null, + ProtocolConstants.ConsistencyLevel.LOCAL_SERIAL, // also ignored + timestamp, + null, // also ignored + pageSizeInBytes, + options); + + if (statement instanceof ScriptGraphStatement) { + return new Query(((ScriptGraphStatement) statement).getScript(), queryOptions); + } else { + return new RawBytesQuery(getQueryBytes(statement, subProtocol), queryOptions); + } + } + static Message createMessageFromGraphStatement( GraphStatement statement, GraphProtocol subProtocol, @@ -96,7 +166,7 @@ static Message createMessageFromGraphStatement( GraphBinaryModule graphBinaryModule) { final List encodedQueryParams; - if ((!(statement instanceof ScriptGraphStatement)) + if (!(statement instanceof ScriptGraphStatement) || ((ScriptGraphStatement) statement).getQueryParams().isEmpty()) { encodedQueryParams = Collections.emptyList(); } else { @@ -188,11 +258,11 @@ private static byte[] getQueryBytes(GraphStatement statement, GraphProtocol g } } - static Map createCustomPayload( + public static Map createCustomPayload( GraphStatement statement, GraphProtocol subProtocol, DriverExecutionProfile config, - DseDriverContext context, + InternalDriverContext context, GraphBinaryModule graphBinaryModule) { ProtocolVersion protocolVersion = context.getProtocolVersion(); @@ -207,7 +277,7 @@ static Map createCustomPayload( // Don't override anything that's already provided at the statement level if (!statementOptions.containsKey(GRAPH_LANG_OPTION_KEY)) { graphLanguage = - (statement instanceof ScriptGraphStatement) ? LANGUAGE_GROOVY : LANGUAGE_BYTECODE; + statement instanceof ScriptGraphStatement ? LANGUAGE_GROOVY : LANGUAGE_BYTECODE; payload.put(GRAPH_LANG_OPTION_KEY, TypeCodecs.TEXT.encode(graphLanguage, protocolVersion)); } else { graphLanguage = @@ -264,7 +334,7 @@ static Map createCustomPayload( if (!statementOptions.containsKey(GRAPH_READ_CONSISTENCY_LEVEL_OPTION_KEY)) { ConsistencyLevel readCl = statement.getReadConsistencyLevel(); String readClString = - (readCl != null) + readCl != null ? readCl.name() : config.getString(DseDriverOption.GRAPH_READ_CONSISTENCY_LEVEL, null); if (readClString != null) { @@ -277,7 +347,7 @@ static Map createCustomPayload( if (!statementOptions.containsKey(GRAPH_WRITE_CONSISTENCY_LEVEL_OPTION_KEY)) { ConsistencyLevel writeCl = statement.getWriteConsistencyLevel(); String writeClString = - (writeCl != null) + writeCl != null ? writeCl.name() : config.getString(DseDriverOption.GRAPH_WRITE_CONSISTENCY_LEVEL, null); if (writeClString != null) { @@ -311,7 +381,7 @@ private static boolean isSystemQuery(GraphStatement statement, DriverExecutio return config.getBoolean(DseDriverOption.GRAPH_IS_SYSTEM_QUERY, false); } - static GraphNode createGraphBinaryGraphNode( + public static GraphNode createGraphBinaryGraphNode( List data, GraphBinaryModule graphBinaryModule) throws IOException { // there should be only one column in the given row Preconditions.checkArgument(data.size() == 1, "Invalid row given to deserialize"); diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphPagingSupportChecker.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphPagingSupportChecker.java new file mode 100644 index 00000000000..bad78003336 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphPagingSupportChecker.java @@ -0,0 +1,83 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph; + +import com.datastax.dse.driver.api.core.config.DseDriverOption; +import com.datastax.dse.driver.api.core.graph.GraphStatement; +import com.datastax.dse.driver.api.core.graph.PagingEnabledOptions; +import com.datastax.dse.driver.api.core.metadata.DseNodeProperties; +import com.datastax.dse.driver.internal.core.DseProtocolFeature; +import com.datastax.oss.driver.api.core.ProtocolVersion; +import com.datastax.oss.driver.api.core.Version; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.cql.Conversions; +import java.util.Collection; +import java.util.Objects; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class GraphPagingSupportChecker { + private static final Logger LOG = LoggerFactory.getLogger(GraphPagingSupportChecker.class); + static final Version GRAPH_PAGING_MIN_DSE_VERSION = Version.parse("6.8.0"); + + private volatile Boolean contextGraphPagingEnabled; + + // Graph paging is available if + // 1) continuous paging is generally available and + // 2) all hosts are running DSE 6.8+ + // The computation below will be done only once when the session is initialized; if other hosts + // join the cluster later and are not running DSE 6.8, the user has to manually disable graph + // paging. + boolean isPagingEnabled(GraphStatement graphStatement, InternalDriverContext context) { + DriverExecutionProfile driverExecutionProfile = + Conversions.resolveExecutionProfile(graphStatement, context); + LOG.trace( + "GRAPH_PAGING_ENABLED: {}", + driverExecutionProfile.getString(DseDriverOption.GRAPH_PAGING_ENABLED)); + + PagingEnabledOptions pagingEnabledOptions = + PagingEnabledOptions.valueOf( + driverExecutionProfile.getString(DseDriverOption.GRAPH_PAGING_ENABLED)); + if (pagingEnabledOptions == PagingEnabledOptions.DISABLED) { + return false; + } else if (pagingEnabledOptions == PagingEnabledOptions.ENABLED) { + return true; + } else { + return isContextGraphPagingEnabled(context); + } + } + + private boolean isContextGraphPagingEnabled(InternalDriverContext context) { + if (contextGraphPagingEnabled == null) { + ProtocolVersion protocolVersion = context.getProtocolVersion(); + + if (!context + .getProtocolVersionRegistry() + .supports(protocolVersion, DseProtocolFeature.CONTINUOUS_PAGING)) { + contextGraphPagingEnabled = false; + return contextGraphPagingEnabled; + } + + Collection nodes = context.getMetadataManager().getMetadata().getNodes().values(); + + for (Node node : nodes) { + Version dseVersion = (Version) node.getExtras().get(DseNodeProperties.DSE_VERSION); + if (dseVersion == null + || dseVersion.compareTo(Objects.requireNonNull(GRAPH_PAGING_MIN_DSE_VERSION)) < 0) { + contextGraphPagingEnabled = false; + return contextGraphPagingEnabled; + } + } + contextGraphPagingEnabled = true; + return contextGraphPagingEnabled; + } else { + return contextGraphPagingEnabled; + } + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestAsyncProcessor.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestAsyncProcessor.java index 36c9db99801..2703b880180 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestAsyncProcessor.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestAsyncProcessor.java @@ -17,14 +17,15 @@ import com.datastax.dse.driver.api.core.graph.AsyncGraphResultSet; import com.datastax.dse.driver.api.core.graph.GraphStatement; -import com.datastax.dse.driver.internal.core.context.DseDriverContext; import com.datastax.dse.driver.internal.core.graph.binary.GraphBinaryModule; import com.datastax.oss.driver.api.core.session.Request; import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import com.datastax.oss.driver.internal.core.context.DefaultDriverContext; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.internal.core.session.DefaultSession; import com.datastax.oss.driver.internal.core.session.RequestProcessor; import com.datastax.oss.driver.internal.core.util.concurrent.CompletableFutures; +import edu.umd.cs.findbugs.annotations.NonNull; import java.util.concurrent.CompletionStage; import net.jcip.annotations.ThreadSafe; import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryReader; @@ -36,16 +37,20 @@ public class GraphRequestAsyncProcessor implements RequestProcessor, CompletionStage> { private final GraphBinaryModule graphBinaryModule; + private final GraphPagingSupportChecker graphPagingSupportChecker; - public GraphRequestAsyncProcessor(DseDriverContext context) { + public GraphRequestAsyncProcessor( + DefaultDriverContext context, GraphPagingSupportChecker graphPagingSupportChecker) { TypeSerializerRegistry typeSerializerRegistry = GraphBinaryModule.createDseTypeSerializerRegistry(context); this.graphBinaryModule = new GraphBinaryModule( new GraphBinaryReader(typeSerializerRegistry), new GraphBinaryWriter(typeSerializerRegistry)); + this.graphPagingSupportChecker = graphPagingSupportChecker; } + @NonNull public GraphBinaryModule getGraphBinaryModule() { return graphBinaryModule; } @@ -61,9 +66,16 @@ public CompletionStage process( DefaultSession session, InternalDriverContext context, String sessionLogPrefix) { - return new GraphRequestHandler( - request, session, context, sessionLogPrefix, getGraphBinaryModule()) - .handle(); + + if (graphPagingSupportChecker.isPagingEnabled(request, context)) { + return new ContinuousGraphRequestHandler( + request, session, context, sessionLogPrefix, getGraphBinaryModule()) + .handle(); + } else { + return new GraphRequestHandler( + request, session, context, sessionLogPrefix, getGraphBinaryModule()) + .handle(); + } } @Override diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphResultIterator.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphResultIterator.java new file mode 100644 index 00000000000..f0557a71c80 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphResultIterator.java @@ -0,0 +1,68 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph; + +import com.datastax.dse.driver.api.core.graph.GraphNode; +import com.datastax.oss.driver.internal.core.util.CountingIterator; +import com.datastax.oss.driver.shaded.guava.common.base.Preconditions; +import java.util.Queue; +import net.jcip.annotations.NotThreadSafe; +import org.apache.tinkerpop.gremlin.process.traversal.Traverser; + +@NotThreadSafe // wraps a mutable queue +class GraphResultIterator extends CountingIterator { + + private final Queue data; + private final GraphProtocol graphProtocol; + + // Sometimes a traversal can yield the same result multiple times consecutively. To avoid + // duplicating the data, DSE graph sends it only once with a counter indicating how many times + // it's repeated. + private long repeat = 0; + private GraphNode lastGraphNode = null; + + GraphResultIterator(Queue data, GraphProtocol graphProtocol) { + super(data.size()); + this.data = data; + this.graphProtocol = graphProtocol; + } + + @Override + protected GraphNode computeNext() { + if (repeat > 1) { + repeat -= 1; + // Note that we don't make a defensive copy, we assume the client won't mutate the node + return lastGraphNode; + } + + GraphNode container = data.poll(); + if (container == null) { + return endOfData(); + } + + if (graphProtocol.isGraphBinary()) { + // results are contained in a Traverser object and not a Map if the protocol + // is GraphBinary + Preconditions.checkState( + container.as(Object.class) instanceof Traverser, + "Graph protocol error. Received object should be a Traverser but it is not."); + Traverser t = container.as(Traverser.class); + this.repeat = t.bulk(); + this.lastGraphNode = new ObjectGraphNode(t.get()); + return lastGraphNode; + } else { + // The repeat counter is called "bulk" in the JSON payload + GraphNode b = container.getByKey("bulk"); + if (b != null) { + this.repeat = b.asLong(); + } + + lastGraphNode = container.getByKey("result"); + return lastGraphNode; + } + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphResultSets.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphResultSets.java index 63cdf327f35..2a2def58896 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphResultSets.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphResultSets.java @@ -22,7 +22,7 @@ public class GraphResultSets { public static GraphResultSet toSync(AsyncGraphResultSet firstPage) { if (firstPage.hasMorePages()) { - throw new UnsupportedOperationException("TODO implement multi-page results"); + return new MultiPageGraphResultSet(firstPage); } else { return new SinglePageGraphResultSet(firstPage); } diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSONUtils.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSONUtils.java index fdf3d4dccb1..c0685d9e6a6 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSONUtils.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSONUtils.java @@ -39,7 +39,7 @@ import org.apache.tinkerpop.shaded.jackson.core.Version; import org.apache.tinkerpop.shaded.jackson.databind.ObjectMapper; -class GraphSONUtils { +public class GraphSONUtils { private static final LoadingCache OBJECT_MAPPERS = CacheBuilder.newBuilder() @@ -123,7 +123,7 @@ static byte[] serializeToBytes(Object object, GraphProtocol graphSubProtocol) th } } - static GraphNode createGraphNode(List data, GraphProtocol graphSubProtocol) + public static GraphNode createGraphNode(List data, GraphProtocol graphSubProtocol) throws IOException { try { ObjectMapper mapper = OBJECT_MAPPERS.get(graphSubProtocol); diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/MultiPageGraphResultSet.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/MultiPageGraphResultSet.java new file mode 100644 index 00000000000..e4ffe156c97 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/MultiPageGraphResultSet.java @@ -0,0 +1,97 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph; + +import com.datastax.dse.driver.api.core.graph.AsyncGraphResultSet; +import com.datastax.dse.driver.api.core.graph.GraphExecutionInfo; +import com.datastax.dse.driver.api.core.graph.GraphNode; +import com.datastax.dse.driver.api.core.graph.GraphResultSet; +import com.datastax.oss.driver.internal.core.util.CountingIterator; +import com.datastax.oss.driver.internal.core.util.concurrent.BlockingOperation; +import com.datastax.oss.driver.internal.core.util.concurrent.CompletableFutures; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; + +public class MultiPageGraphResultSet implements GraphResultSet { + private final RowIterator iterator; + private final List executionInfos = new ArrayList<>(); + + public MultiPageGraphResultSet(AsyncGraphResultSet firstPage) { + iterator = new RowIterator(firstPage); + executionInfos.add(firstPage.getExecutionInfo()); + } + + @Override + public void cancel() { + iterator.cancel(); + } + + @NonNull + @Override + public GraphExecutionInfo getExecutionInfo() { + return executionInfos.get(executionInfos.size() - 1); + } + + /** + * The execution information for all the queries that have been performed so far to assemble this + * iterable. + * + *

      This will have multiple elements if the query is paged, since the driver performs blocking + * background queries to fetch additional pages transparently as the result set is being iterated. + */ + @NonNull + public List getExecutionInfos() { + return executionInfos; + } + + @NonNull + @Override + public Iterator iterator() { + return iterator; + } + + public class RowIterator extends CountingIterator { + private AsyncGraphResultSet currentPage; + private Iterator currentRows; + private boolean cancelled = false; + + private RowIterator(AsyncGraphResultSet firstPage) { + super(firstPage.remaining()); + currentPage = firstPage; + currentRows = firstPage.currentPage().iterator(); + } + + @Override + protected GraphNode computeNext() { + maybeMoveToNextPage(); + return currentRows.hasNext() ? currentRows.next() : endOfData(); + } + + private void maybeMoveToNextPage() { + if (!cancelled && !currentRows.hasNext() && currentPage.hasMorePages()) { + BlockingOperation.checkNotDriverThread(); + AsyncGraphResultSet nextPage = + CompletableFutures.getUninterruptibly(currentPage.fetchNextPage()); + currentPage = nextPage; + remaining += currentPage.remaining(); + currentRows = nextPage.currentPage().iterator(); + executionInfos.add(nextPage.getExecutionInfo()); + } + } + + private void cancel() { + currentPage.cancel(); + cancelled = true; + } + + public boolean isCancelled() { + return cancelled; + } + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/SinglePageGraphResultSet.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/SinglePageGraphResultSet.java index c945821ff92..09710c51bbf 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/SinglePageGraphResultSet.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/SinglePageGraphResultSet.java @@ -54,6 +54,6 @@ public Iterator iterator() { @Override public void cancel() { - // nothing to do + onlyPage.cancel(); } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java index 994763980d7..620fc8118d7 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java @@ -22,6 +22,7 @@ import com.datastax.dse.driver.internal.core.cql.continuous.ContinuousCqlRequestSyncProcessor; import com.datastax.dse.driver.internal.core.cql.continuous.reactive.ContinuousCqlRequestReactiveProcessor; import com.datastax.dse.driver.internal.core.cql.reactive.CqlRequestReactiveProcessor; +import com.datastax.dse.driver.internal.core.graph.GraphPagingSupportChecker; import com.datastax.dse.driver.internal.core.graph.GraphRequestAsyncProcessor; import com.datastax.dse.driver.internal.core.graph.GraphRequestSyncProcessor; import com.datastax.dse.driver.internal.core.tracker.MultiplexingRequestTracker; @@ -525,7 +526,8 @@ protected RequestProcessorRegistry buildRequestProcessorRegistry() { // graph requests (sync and async) if (DependencyCheck.TINKERPOP.isPresent()) { - GraphRequestAsyncProcessor graphRequestAsyncProcessor = new GraphRequestAsyncProcessor(); + GraphRequestAsyncProcessor graphRequestAsyncProcessor = + new GraphRequestAsyncProcessor(this, new GraphPagingSupportChecker()); GraphRequestSyncProcessor graphRequestSyncProcessor = new GraphRequestSyncProcessor(graphRequestAsyncProcessor); processors.add(graphRequestAsyncProcessor); diff --git a/core/src/main/resources/reference.conf b/core/src/main/resources/reference.conf index 5ad53cdaf92..f4decad2f37 100644 --- a/core/src/main/resources/reference.conf +++ b/core/src/main/resources/reference.conf @@ -1015,6 +1015,24 @@ datastax-java-driver { # Modifiable at runtime: yes, the new value will be used for requests issued after the change. # Overridable in a profile: yes // sub-protocol = "graphson-2.0" + + # + # Whether or not Graph paging should be enabled or disabled for all queries. + # + #

      If AUTO is set, the driver will decide whether or not to enable Graph paging + # based on the protocol version in use and the DSE version of all hosts. For this reason it is + # usually not necessary to call this method. + # + #

      IMPORTANT: Paging for DSE Graph is only available in DSE 6.8 and higher, and requires + # protocol version DSE_V1 or higher and graphs created with the Native engine; enabling paging + # for clusters and graphs that do not meet this requirement may result in query failures. + # + # set whether or not to enable Graph paging, or AUTO to let the driver decide. + # + # Supported values are: ENABLED, DISABLED, AUTO + #/ + paging-enabled = "AUTO" + paging-options = ${datastax-java-driver.advanced.continuous-paging} } # Continuous paging (DataStax Enterprise only) diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerReprepareTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerReprepareTest.java index 989665a5efe..634c6eda5d2 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerReprepareTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerReprepareTest.java @@ -134,7 +134,7 @@ public void should_abort_when_prepare_fails_with_unrecoverable_error(DseProtocol verify(harness.getChannel(node1)).write(any(Query.class), anyBoolean(), anyMap(), any()); verify(harness.getChannel(node1)).write(any(Prepare.class), anyBoolean(), anyMap(), any()); - assertThat(handler.state).isEqualTo(-2); + assertThat(handler.getState()).isEqualTo(-2); assertThat(page1Future) .hasFailedWithThrowableThat() .isInstanceOf(SyntaxError.class) @@ -173,7 +173,7 @@ public void should_try_next_node_when_prepare_fails_with_recoverable_error( // should have tried the next host verify(harness.getChannel(node2)).write(any(Query.class), anyBoolean(), anyMap(), any()); - assertThat(handler.state).isEqualTo(-1); + assertThat(handler.getState()).isEqualTo(-1); assertThatStage(page1Future) .isSuccess( rs -> { diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerRetryTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerRetryTest.java index 27aabce3e30..4ed9e48e9af 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerRetryTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerRetryTest.java @@ -90,7 +90,7 @@ public void should_always_try_next_node_if_bootstrapping( statement, harness.getSession(), harness.getContext(), "test"); CompletionStage resultSetFuture = handler.handle(); - assertThat(handler.state).isEqualTo(-1); + assertThat(handler.getState()).isEqualTo(-1); assertThatStage(resultSetFuture) .isSuccess( @@ -139,7 +139,7 @@ public void should_always_rethrow_query_validation_error( statement, harness.getSession(), harness.getContext(), "test"); CompletionStage resultSetFuture = handler.handle(); - assertThat(handler.state).isEqualTo(-2); + assertThat(handler.getState()).isEqualTo(-2); assertThatStage(resultSetFuture) .isFailed( @@ -190,7 +190,7 @@ public void should_try_next_node_if_idempotent_and_retry_policy_decides_so( statement, harness.getSession(), harness.getContext(), "test"); CompletionStage resultSetFuture = handler.handle(); - assertThat(handler.state).isEqualTo(-1); + assertThat(handler.getState()).isEqualTo(-1); assertThatStage(resultSetFuture) .isSuccess( @@ -249,7 +249,7 @@ public void should_try_same_node_if_idempotent_and_retry_policy_decides_so( statement, harness.getSession(), harness.getContext(), "test"); CompletionStage resultSetFuture = handler.handle(); - assertThat(handler.state).isEqualTo(-1); + assertThat(handler.getState()).isEqualTo(-1); assertThatStage(resultSetFuture) .isSuccess( @@ -307,7 +307,7 @@ public void should_ignore_error_if_idempotent_and_retry_policy_decides_so( statement, harness.getSession(), harness.getContext(), "test"); CompletionStage resultSetFuture = handler.handle(); - assertThat(handler.state).isEqualTo(-1); + assertThat(handler.getState()).isEqualTo(-1); assertThatStage(resultSetFuture) .isSuccess( @@ -364,7 +364,7 @@ public void should_rethrow_error_if_idempotent_and_retry_policy_decides_so( statement, harness.getSession(), harness.getContext(), "test"); CompletionStage resultSetFuture = handler.handle(); - assertThat(handler.state).isEqualTo(-2); + assertThat(handler.getState()).isEqualTo(-2); assertThatStage(resultSetFuture) .isFailed( @@ -421,7 +421,7 @@ public void should_rethrow_error_if_not_idempotent_and_error_unsafe_or_policy_re statement, harness.getSession(), harness.getContext(), "test"); CompletionStage resultSetFuture = handler.handle(); - assertThat(handler.state).isEqualTo(-2); + assertThat(handler.getState()).isEqualTo(-2); assertThatStage(resultSetFuture) .isFailed( diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerTest.java index fca7af05da1..1e9ef1471cf 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerTest.java @@ -116,7 +116,7 @@ public void should_complete_multi_page_result(DseProtocolVersion version) { UNDEFINED_IDEMPOTENCE_STATEMENT, harness.getSession(), harness.getContext(), "test"); CompletionStage page1Future = handler.handle(); - assertThat(handler.pendingResult).isNotNull(); + assertThat(handler.getPendingResult()).isNotNull(); node1Behavior.setResponseSuccess(defaultFrameOf(DseTestFixtures.tenDseRows(1, false))); assertThatStage(page1Future) @@ -138,9 +138,9 @@ public void should_complete_multi_page_result(DseProtocolVersion version) { }); ContinuousAsyncResultSet page1 = CompletableFutures.getCompleted(page1Future); - assertThat(handler.pendingResult).isNull(); + assertThat(handler.getPendingResult()).isNull(); CompletionStage page2Future = page1.fetchNextPage(); - assertThat(handler.pendingResult).isNotNull(); + assertThat(handler.getPendingResult()).isNotNull(); node1Behavior.setResponseSuccess(defaultFrameOf(DseTestFixtures.tenDseRows(2, true))); assertThatStage(page2Future) @@ -311,7 +311,7 @@ public void should_cancel_future_if_session_cancelled(DseProtocolVersion version ContinuousAsyncResultSet page1 = CompletableFutures.getUninterruptibly(page1Future); page1.cancel(); - assertThat(handler.state).isEqualTo(-2); + assertThat(handler.getState()).isEqualTo(-2); assertThat(page1.fetchNextPage()).isCancelled(); } } @@ -332,7 +332,7 @@ public void should_cancel_session_if_future_cancelled(DseProtocolVersion version page1Future.toCompletableFuture().cancel(true); // this should be ignored node1Behavior.setResponseSuccess(defaultFrameOf(DseTestFixtures.tenDseRows(1, false))); - assertThat(handler.state).isEqualTo(-2); + assertThat(handler.getState()).isEqualTo(-2); } } @@ -355,7 +355,7 @@ public void should_not_cancel_session_if_future_cancelled_but_already_done( // to late page1Future.toCompletableFuture().cancel(true); - assertThat(handler.state).isEqualTo(-1); + assertThat(handler.getState()).isEqualTo(-1); } } @@ -372,7 +372,7 @@ public void should_send_cancel_request_if_dse_v2() { CompletionStage page1Future = handler.handle(); page1Future.toCompletableFuture().cancel(true); - assertThat(handler.state).isEqualTo(-2); + assertThat(handler.getState()).isEqualTo(-2); verify(node1Behavior.getChannel()) .write(argThat(this::isCancelRequest), anyBoolean(), anyMap(), any()); } diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphPagingSupportCheckerTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphPagingSupportCheckerTest.java new file mode 100644 index 00000000000..64861bf2821 --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphPagingSupportCheckerTest.java @@ -0,0 +1,172 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph; + +import static com.datastax.dse.driver.api.core.graph.PagingEnabledOptions.AUTO; +import static com.datastax.dse.driver.api.core.graph.PagingEnabledOptions.DISABLED; +import static com.datastax.dse.driver.api.core.graph.PagingEnabledOptions.ENABLED; +import static com.datastax.dse.driver.internal.core.graph.GraphPagingSupportChecker.GRAPH_PAGING_MIN_DSE_VERSION; +import static org.assertj.core.api.AssertionsForInterfaceTypes.assertThat; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import com.datastax.dse.driver.api.core.config.DseDriverOption; +import com.datastax.dse.driver.api.core.graph.GraphStatement; +import com.datastax.dse.driver.api.core.graph.PagingEnabledOptions; +import com.datastax.dse.driver.api.core.metadata.DseNodeProperties; +import com.datastax.dse.driver.internal.core.DseProtocolFeature; +import com.datastax.oss.driver.api.core.DefaultProtocolVersion; +import com.datastax.oss.driver.api.core.Version; +import com.datastax.oss.driver.api.core.config.DriverConfig; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.metadata.Metadata; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.internal.core.ProtocolVersionRegistry; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.metadata.MetadataManager; +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.UUID; +import org.junit.Test; +import org.junit.runner.RunWith; + +@RunWith(DataProviderRunner.class) +public class GraphPagingSupportCheckerTest { + + @UseDataProvider("pagingEnabled") + @Test + public void should_check_if_paging_is_supported( + boolean protocolWithPagingSupport, + PagingEnabledOptions statementGraphPagingEnabled, + PagingEnabledOptions contextGraphPagingEnabled, + List nodeDseVersions, + boolean expected) { + // given + GraphStatement graphStatement = mock(GraphStatement.class); + InternalDriverContext context = protocolWithPagingSupport(protocolWithPagingSupport); + statementGraphPagingEnabled(graphStatement, statementGraphPagingEnabled); + contextGraphPagingEnabled(context, contextGraphPagingEnabled); + addNodeWithDseVersion(context, nodeDseVersions); + + // when + boolean pagingEnabled = + new GraphPagingSupportChecker().isPagingEnabled(graphStatement, context); + + // then + assertThat(pagingEnabled).isEqualTo(expected); + } + + @Test + public void should_not_support_paging_when_statement_profile_not_present() { + // given + GraphStatement graphStatement = mock(GraphStatement.class); + InternalDriverContext context = protocolWithPagingSupport(true); + contextGraphPagingEnabled(context, DISABLED); + addNodeWithDseVersion(context, Collections.singletonList(GRAPH_PAGING_MIN_DSE_VERSION)); + + // when + boolean pagingEnabled = + new GraphPagingSupportChecker().isPagingEnabled(graphStatement, context); + + // then + assertThat(pagingEnabled).isEqualTo(false); + } + + @Test + public void + should_support_paging_when_statement_profile_not_present_but_context_profile_has_paging_enabled() { + // given + GraphStatement graphStatement = mock(GraphStatement.class); + InternalDriverContext context = protocolWithPagingSupport(true); + contextGraphPagingEnabled(context, ENABLED); + addNodeWithDseVersion(context, Collections.singletonList(GRAPH_PAGING_MIN_DSE_VERSION)); + + // when + boolean pagingEnabled = + new GraphPagingSupportChecker().isPagingEnabled(graphStatement, context); + + // then + assertThat(pagingEnabled).isEqualTo(true); + } + + @DataProvider() + public static Object[][] pagingEnabled() { + List listWithGraphPagingNode = Collections.singletonList(GRAPH_PAGING_MIN_DSE_VERSION); + List listWithoutGraphPagingNode = Collections.singletonList(Version.parse("6.7.0")); + List listWithNull = Collections.singletonList(null); + List listWithTwoNodesOneNotSupporting = + Arrays.asList(Version.parse("6.7.0"), GRAPH_PAGING_MIN_DSE_VERSION); + + return new Object[][] { + {false, ENABLED, ENABLED, listWithGraphPagingNode, true}, + {true, ENABLED, ENABLED, listWithoutGraphPagingNode, true}, + {true, ENABLED, DISABLED, listWithGraphPagingNode, true}, + {true, ENABLED, ENABLED, listWithGraphPagingNode, true}, + {true, ENABLED, ENABLED, listWithNull, true}, + {true, ENABLED, ENABLED, listWithTwoNodesOneNotSupporting, true}, + {true, DISABLED, ENABLED, listWithGraphPagingNode, false}, + {true, DISABLED, AUTO, listWithGraphPagingNode, false}, + {true, DISABLED, DISABLED, listWithGraphPagingNode, false}, + {true, AUTO, AUTO, listWithGraphPagingNode, true}, + {true, AUTO, DISABLED, listWithGraphPagingNode, true}, + {false, AUTO, AUTO, listWithGraphPagingNode, false}, + {true, AUTO, AUTO, listWithTwoNodesOneNotSupporting, false}, + {true, AUTO, AUTO, listWithNull, false}, + }; + } + + private void addNodeWithDseVersion(InternalDriverContext context, List dseVersions) { + MetadataManager manager = mock(MetadataManager.class); + when(context.getMetadataManager()).thenReturn(manager); + Metadata metadata = mock(Metadata.class); + when(manager.getMetadata()).thenReturn(metadata); + Map nodes = new HashMap<>(); + for (Version v : dseVersions) { + Node node = mock(Node.class); + Map extras = new HashMap<>(); + extras.put(DseNodeProperties.DSE_VERSION, v); + when(node.getExtras()).thenReturn(extras); + nodes.put(UUID.randomUUID(), node); + } + when(metadata.getNodes()).thenReturn(nodes); + } + + private void contextGraphPagingEnabled( + InternalDriverContext context, PagingEnabledOptions option) { + DriverExecutionProfile driverExecutionProfile = mock(DriverExecutionProfile.class); + when(driverExecutionProfile.getString(DseDriverOption.GRAPH_PAGING_ENABLED)) + .thenReturn(option.name()); + DriverConfig config = mock(DriverConfig.class); + when(context.getConfig()).thenReturn(config); + when(config.getDefaultProfile()).thenReturn(driverExecutionProfile); + } + + private InternalDriverContext protocolWithPagingSupport(boolean pagingSupport) { + InternalDriverContext context = mock(InternalDriverContext.class); + when(context.getProtocolVersion()).thenReturn(DefaultProtocolVersion.V4); + ProtocolVersionRegistry protocolVersionRegistry = mock(ProtocolVersionRegistry.class); + when(protocolVersionRegistry.supports( + DefaultProtocolVersion.V4, DseProtocolFeature.CONTINUOUS_PAGING)) + .thenReturn(pagingSupport); + when(context.getProtocolVersionRegistry()).thenReturn(protocolVersionRegistry); + return context; + } + + private void statementGraphPagingEnabled( + GraphStatement graphStatement, PagingEnabledOptions option) { + DriverExecutionProfile driverExecutionProfile = mock(DriverExecutionProfile.class); + when(driverExecutionProfile.getString(DseDriverOption.GRAPH_PAGING_ENABLED)) + .thenReturn(option.name()); + when(graphStatement.getExecutionProfile()).thenReturn(driverExecutionProfile); + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java index db17e6191f5..1e18842aa75 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java @@ -72,6 +72,7 @@ import java.nio.ByteBuffer; import java.time.Duration; import java.time.LocalDateTime; +import java.time.ZoneOffset; import java.util.ArrayDeque; import java.util.Collections; import java.util.List; @@ -187,7 +188,10 @@ public void should_create_query_message_from_batch_statement(GraphProtocol graph ImmutableList.of( // randomly testing some complex data types. Complete suite of data types test is in // GraphBinaryDataTypesTest - DseGraph.g.addV("person").property("p1", 2.3f).property("p2", LocalDateTime.now()), + DseGraph.g + .addV("person") + .property("p1", 2.3f) + .property("p2", LocalDateTime.now(ZoneOffset.UTC)), DseGraph.g .addV("software") .property("p3", new BigInteger("123456789123456789123456789123456789")) @@ -421,7 +425,11 @@ public void should_return_results_for_statements(GraphProtocol graphProtocol) th DseDriverContext mockContext = Mockito.mock(DseDriverContext.class); GraphBinaryModule module = createGraphBinaryModule(mockContext); - GraphRequestAsyncProcessor p = Mockito.spy(new GraphRequestAsyncProcessor(mockContext)); + GraphPagingSupportChecker graphPagingSupportChecker = mock(GraphPagingSupportChecker.class); + when(graphPagingSupportChecker.isPagingEnabled(any(), any())).thenReturn(false); + + GraphRequestAsyncProcessor p = + Mockito.spy(new GraphRequestAsyncProcessor(mockContext, graphPagingSupportChecker)); when(p.getGraphBinaryModule()).thenReturn(module); Vertex v = @@ -481,7 +489,8 @@ public void should_invoke_request_tracker() throws IOException { DseDriverContext mockContext = Mockito.mock(DseDriverContext.class); GraphBinaryModule module = createGraphBinaryModule(mockContext); - GraphRequestAsyncProcessor p = Mockito.spy(new GraphRequestAsyncProcessor(mockContext)); + GraphRequestAsyncProcessor p = + Mockito.spy(new GraphRequestAsyncProcessor(mockContext, new GraphPagingSupportChecker())); when(p.getGraphBinaryModule()).thenReturn(module); Vertex v = @@ -506,9 +515,12 @@ node, defaultDseFrameOf(singleGraphRow(GraphProtocol.GRAPHSON_2_0, v, module))) when(harness.getContext().getRequestTracker()).thenReturn(requestTracker); GraphStatement graphStatement = ScriptGraphStatement.newInstance("mockQuery"); + GraphPagingSupportChecker graphPagingSupportChecker = mock(GraphPagingSupportChecker.class); + when(graphPagingSupportChecker.isPagingEnabled(any(), any())).thenReturn(false); GraphResultSet grs = new GraphRequestSyncProcessor( - new GraphRequestAsyncProcessor((DseDriverContext) harness.getContext())) + new GraphRequestAsyncProcessor( + (DseDriverContext) harness.getContext(), graphPagingSupportChecker)) .process(graphStatement, harness.getSession(), harness.getContext(), "test-graph"); List nodes = grs.all(); diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphResultSetTestBase.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphResultSetTestBase.java new file mode 100644 index 00000000000..e082a013f85 --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphResultSetTestBase.java @@ -0,0 +1,75 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.when; + +import com.datastax.dse.driver.api.core.graph.AsyncGraphResultSet; +import com.datastax.dse.driver.api.core.graph.GraphExecutionInfo; +import com.datastax.dse.driver.api.core.graph.GraphNode; +import com.datastax.oss.driver.internal.core.util.CountingIterator; +import com.datastax.oss.driver.shaded.guava.common.collect.Lists; +import java.util.Arrays; +import java.util.Iterator; +import java.util.Queue; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionStage; + +public abstract class GraphResultSetTestBase { + + /** Mocks an async result set where column 0 has type INT, with rows with the provided data. */ + protected AsyncGraphResultSet mockPage(boolean nextPage, Integer... data) { + AsyncGraphResultSet page = mock(AsyncGraphResultSet.class); + + GraphExecutionInfo executionInfo = mock(GraphExecutionInfo.class); + when(page.getExecutionInfo()).thenReturn(executionInfo); + + if (nextPage) { + when(page.hasMorePages()).thenReturn(true); + when(page.fetchNextPage()).thenReturn(spy(new CompletableFuture<>())); + } else { + when(page.hasMorePages()).thenReturn(false); + when(page.fetchNextPage()).thenThrow(new IllegalStateException()); + } + + // Emulate DefaultAsyncResultSet's internals (this is a bit sketchy, maybe it would be better + // to use real DefaultAsyncResultSet instances) + Queue queue = Lists.newLinkedList(Arrays.asList(data)); + CountingIterator iterator = + new CountingIterator(queue.size()) { + @Override + protected GraphNode computeNext() { + Integer index = queue.poll(); + return (index == null) ? endOfData() : mockRow(index); + } + }; + when(page.currentPage()).thenReturn(() -> iterator); + when(page.remaining()).thenAnswer(invocation -> iterator.remaining()); + + return page; + } + + private GraphNode mockRow(int index) { + GraphNode row = mock(GraphNode.class); + when(row.asInt()).thenReturn(index); + return row; + } + + protected static void complete( + CompletionStage stage, AsyncGraphResultSet result) { + stage.toCompletableFuture().complete(result); + } + + protected void assertNextRow(Iterator iterator, int expectedValue) { + assertThat(iterator.hasNext()).isTrue(); + GraphNode row = iterator.next(); + assertThat(row.asInt()).isEqualTo(expectedValue); + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphResultSetsTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphResultSetsTest.java new file mode 100644 index 00000000000..3ed28a386d8 --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphResultSetsTest.java @@ -0,0 +1,86 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.dse.driver.api.core.graph.AsyncGraphResultSet; +import com.datastax.dse.driver.api.core.graph.GraphNode; +import com.datastax.dse.driver.api.core.graph.GraphResultSet; +import java.util.Iterator; +import org.junit.Test; + +public class GraphResultSetsTest extends GraphResultSetTestBase { + + @Test + public void should_create_result_set_from_single_page() { + // Given + AsyncGraphResultSet page1 = mockPage(false, 0, 1, 2); + + // When + GraphResultSet resultSet = GraphResultSets.toSync(page1); + + // Then + assertThat(resultSet.getExecutionInfo()).isSameAs(page1.getExecutionInfo()); + + Iterator iterator = resultSet.iterator(); + + assertNextRow(iterator, 0); + assertNextRow(iterator, 1); + assertNextRow(iterator, 2); + + assertThat(iterator.hasNext()).isFalse(); + } + + @Test + public void should_create_result_set_from_multiple_pages() { + // Given + AsyncGraphResultSet page1 = mockPage(true, 0, 1, 2); + AsyncGraphResultSet page2 = mockPage(true, 3, 4, 5); + AsyncGraphResultSet page3 = mockPage(false, 6, 7, 8); + + complete(page1.fetchNextPage(), page2); + complete(page2.fetchNextPage(), page3); + + // When + GraphResultSet resultSet = GraphResultSets.toSync(page1); + + // Then + assertThat(resultSet.iterator().hasNext()).isTrue(); + + assertThat(resultSet.getExecutionInfo()).isSameAs(page1.getExecutionInfo()); + assertThat(((MultiPageGraphResultSet) resultSet).getExecutionInfos()) + .containsExactly(page1.getExecutionInfo()); + + Iterator iterator = resultSet.iterator(); + + assertNextRow(iterator, 0); + assertNextRow(iterator, 1); + assertNextRow(iterator, 2); + + assertThat(iterator.hasNext()).isTrue(); + // This should have triggered the fetch of page2 + assertThat(resultSet.getExecutionInfo()).isEqualTo(page2.getExecutionInfo()); + assertThat(((MultiPageGraphResultSet) resultSet).getExecutionInfos()) + .containsExactly(page1.getExecutionInfo(), page2.getExecutionInfo()); + + assertNextRow(iterator, 3); + assertNextRow(iterator, 4); + assertNextRow(iterator, 5); + + assertThat(iterator.hasNext()).isTrue(); + // This should have triggered the fetch of page3 + assertThat(resultSet.getExecutionInfo()).isEqualTo(page3.getExecutionInfo()); + assertThat(((MultiPageGraphResultSet) resultSet).getExecutionInfos()) + .containsExactly( + page1.getExecutionInfo(), page2.getExecutionInfo(), page3.getExecutionInfo()); + + assertNextRow(iterator, 6); + assertNextRow(iterator, 7); + assertNextRow(iterator, 8); + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/binary/GraphBinaryDataTypesTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/binary/GraphBinaryDataTypesTest.java index c191d2d697b..80b20d86afe 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/binary/GraphBinaryDataTypesTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/binary/GraphBinaryDataTypesTest.java @@ -44,6 +44,7 @@ import java.time.Instant; import java.time.LocalDate; import java.time.LocalTime; +import java.time.ZoneOffset; import org.apache.tinkerpop.gremlin.driver.ser.SerializationException; import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryReader; import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryWriter; @@ -92,8 +93,8 @@ public static Object[][] datatypes() throws UnknownHostException { {23f}, {(short) 23}, {InetAddress.getLocalHost()}, - {LocalDate.now()}, - {LocalTime.now()}, + {LocalDate.now(ZoneOffset.UTC)}, + {LocalTime.now(ZoneOffset.UTC)}, {CqlDuration.newInstance(10, 10, 10000)}, {java.util.UUID.randomUUID()}, {Instant.now()}, diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphPagingIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphPagingIT.java new file mode 100644 index 00000000000..a420d8e50a3 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphPagingIT.java @@ -0,0 +1,379 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph.statement; + +import static com.datastax.dse.driver.api.core.config.DseDriverOption.CONTINUOUS_PAGING_MAX_ENQUEUED_PAGES; +import static com.datastax.dse.driver.api.core.config.DseDriverOption.CONTINUOUS_PAGING_PAGE_SIZE; +import static com.datastax.dse.driver.api.core.cql.continuous.ContinuousPagingITBase.Options; +import static com.datastax.dse.driver.internal.core.graph.GraphProtocol.GRAPH_BINARY_1_0; +import static org.assertj.core.api.AssertionsForInterfaceTypes.assertThat; + +import com.datastax.dse.driver.api.core.config.DseDriverOption; +import com.datastax.dse.driver.api.core.cql.continuous.ContinuousPagingITBase; +import com.datastax.dse.driver.api.core.graph.AsyncGraphResultSet; +import com.datastax.dse.driver.api.core.graph.GraphExecutionInfo; +import com.datastax.dse.driver.api.core.graph.GraphNode; +import com.datastax.dse.driver.api.core.graph.GraphResultSet; +import com.datastax.dse.driver.api.core.graph.GraphStatement; +import com.datastax.dse.driver.api.core.graph.PagingEnabledOptions; +import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; +import com.datastax.dse.driver.api.testinfra.session.DseSessionRule; +import com.datastax.dse.driver.api.testinfra.session.DseSessionRuleBuilder; +import com.datastax.dse.driver.internal.core.graph.MultiPageGraphResultSet; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.internal.core.util.CountingIterator; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import java.net.SocketAddress; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.CompletionStage; +import java.util.concurrent.ExecutionException; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; +import org.junit.runner.RunWith; + +@DseRequirement(min = "6.8.0", description = "Graph paging requires DSE 6.8+") +@RunWith(DataProviderRunner.class) +public class GraphPagingIT { + + private static CustomCcmRule ccmRule = CustomCcmRule.builder().withDseWorkloads("graph").build(); + + private static DseSessionRule sessionRule = + new DseSessionRuleBuilder(ccmRule) + .withCreateGraph() + .withCoreEngine() + .withGraphProtocol(GRAPH_BINARY_1_0.toInternalCode()) + .build(); + + @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); + + @BeforeClass + public static void setupSchema() { + sessionRule + .session() + .execute( + ScriptGraphStatement.newInstance( + "schema.vertexLabel('person')" + + ".ifNotExists()" // required otherwise we get a weird table already exists + // error + + ".partitionBy('pk', Int)" + + ".clusterBy('cc', Int)" + + ".property('name', Text)" + + ".create();") + .setGraphName(sessionRule.getGraphName())); + for (int i = 1; i <= 100; i++) { + sessionRule + .session() + .execute( + ScriptGraphStatement.newInstance( + String.format( + "g.addV('person').property('pk',0).property('cc',%d).property('name', '%s');", + i, "user" + i)) + .setGraphName(sessionRule.getGraphName())); + } + } + + @UseDataProvider(location = ContinuousPagingITBase.class, value = "pagingOptions") + @Test + public void synchronous_paging_with_options(Options options) { + // given + DriverExecutionProfile profile = enablePaging(options, PagingEnabledOptions.ENABLED); + + if (options.sizeInBytes) { + // Page sizes in bytes are not supported with graph queries + return; + } + + // when + GraphResultSet result = + sessionRule + .session() + .execute( + ScriptGraphStatement.newInstance("g.V().hasLabel('person').values('name')") + .setGraphName(sessionRule.getGraphName()) + .setTraversalSource("g") + .setExecutionProfile(profile)); + + // then + List nodes = result.all(); + + assertThat(((CountingIterator) result.iterator()).remaining()).isZero(); + assertThat(nodes).hasSize(options.expectedRows); + for (int i = 1; i <= nodes.size(); i++) { + GraphNode node = nodes.get(i - 1); + assertThat(node.asString()).isEqualTo("user" + i); + } + assertThat(result.getExecutionInfo()).isNotNull(); + assertThat(result.getExecutionInfo().getCoordinator().getEndPoint().resolve()) + .isEqualTo(firstCcmNode()); + assertIfMultiPage(result, options.expectedPages); + } + + @UseDataProvider(location = ContinuousPagingITBase.class, value = "pagingOptions") + @Test + public void synchronous_paging_with_options_when_auto(Options options) { + // given + DriverExecutionProfile profile = enablePaging(options, PagingEnabledOptions.AUTO); + + if (options.sizeInBytes) { + // Page sizes in bytes are not supported with graph queries + return; + } + + // when + GraphResultSet result = + sessionRule + .session() + .execute( + ScriptGraphStatement.newInstance("g.V().hasLabel('person').values('name')") + .setGraphName(sessionRule.getGraphName()) + .setTraversalSource("g") + .setExecutionProfile(profile)); + + // then + List nodes = result.all(); + + assertThat(((CountingIterator) result.iterator()).remaining()).isZero(); + assertThat(nodes).hasSize(options.expectedRows); + for (int i = 1; i <= nodes.size(); i++) { + GraphNode node = nodes.get(i - 1); + assertThat(node.asString()).isEqualTo("user" + i); + } + assertThat(result.getExecutionInfo()).isNotNull(); + assertThat(result.getExecutionInfo().getCoordinator().getEndPoint().resolve()) + .isEqualTo(firstCcmNode()); + + assertIfMultiPage(result, options.expectedPages); + } + + private void assertIfMultiPage(GraphResultSet result, int expectedPages) { + if (result instanceof MultiPageGraphResultSet) { + assertThat(((MultiPageGraphResultSet) result).getExecutionInfos()).hasSize(expectedPages); + assertThat(result.getExecutionInfo()) + .isSameAs(((MultiPageGraphResultSet) result).getExecutionInfos().get(expectedPages - 1)); + } + } + + @UseDataProvider(location = ContinuousPagingITBase.class, value = "pagingOptions") + @Test + public void synchronous_options_with_paging_disabled_should_fallback_to_single_page( + Options options) { + // given + DriverExecutionProfile profile = enablePaging(options, PagingEnabledOptions.DISABLED); + + if (options.sizeInBytes) { + // Page sizes in bytes are not supported with graph queries + return; + } + + // when + GraphResultSet result = + sessionRule + .session() + .execute( + ScriptGraphStatement.newInstance("g.V().hasLabel('person').values('name')") + .setGraphName(sessionRule.getGraphName()) + .setTraversalSource("g") + .setExecutionProfile(profile)); + + // then + List nodes = result.all(); + + assertThat(((CountingIterator) result.iterator()).remaining()).isZero(); + assertThat(nodes).hasSize(100); + for (int i = 1; i <= nodes.size(); i++) { + GraphNode node = nodes.get(i - 1); + assertThat(node.asString()).isEqualTo("user" + i); + } + assertThat(result.getExecutionInfo()).isNotNull(); + assertThat(result.getExecutionInfo().getCoordinator().getEndPoint().resolve()) + .isEqualTo(firstCcmNode()); + } + + @UseDataProvider(location = ContinuousPagingITBase.class, value = "pagingOptions") + @Test + public void asynchronous_paging_with_options(Options options) + throws ExecutionException, InterruptedException { + // given + DriverExecutionProfile profile = enablePaging(options, PagingEnabledOptions.ENABLED); + + if (options.sizeInBytes) { + // Page sizes in bytes are not supported with graph queries + return; + } + + // when + CompletionStage result = + sessionRule + .session() + .executeAsync( + ScriptGraphStatement.newInstance("g.V().hasLabel('person').values('name')") + .setGraphName(sessionRule.getGraphName()) + .setTraversalSource("g") + .setExecutionProfile(profile)); + + // then + checkAsyncResult(result, options, 0, 1, new ArrayList<>()); + } + + @UseDataProvider(location = ContinuousPagingITBase.class, value = "pagingOptions") + @Test + public void asynchronous_paging_with_options_when_auto(Options options) + throws ExecutionException, InterruptedException { + // given + DriverExecutionProfile profile = enablePaging(options, PagingEnabledOptions.AUTO); + + if (options.sizeInBytes) { + // Page sizes in bytes are not supported with graph queries + return; + } + + // when + CompletionStage result = + sessionRule + .session() + .executeAsync( + ScriptGraphStatement.newInstance("g.V().hasLabel('person').values('name')") + .setGraphName(sessionRule.getGraphName()) + .setTraversalSource("g") + .setExecutionProfile(profile)); + + // then + checkAsyncResult(result, options, 0, 1, new ArrayList<>()); + } + + @UseDataProvider(location = ContinuousPagingITBase.class, value = "pagingOptions") + @Test + public void asynchronous_options_with_paging_disabled_should_fallback_to_single_page( + Options options) throws ExecutionException, InterruptedException { + // given + DriverExecutionProfile profile = enablePaging(options, PagingEnabledOptions.DISABLED); + + if (options.sizeInBytes) { + // Page sizes in bytes are not supported with graph queries + return; + } + + // when + CompletionStage result = + sessionRule + .session() + .executeAsync( + ScriptGraphStatement.newInstance("g.V().hasLabel('person').values('name')") + .setGraphName(sessionRule.getGraphName()) + .setTraversalSource("g") + .setExecutionProfile(profile)); + + // then + AsyncGraphResultSet asyncGraphResultSet = result.toCompletableFuture().get(); + for (int i = 1; i <= 100; i++, asyncGraphResultSet.remaining()) { + GraphNode node = asyncGraphResultSet.one(); + assertThat(node.asString()).isEqualTo("user" + i); + } + assertThat(asyncGraphResultSet.remaining()).isEqualTo(0); + } + + private DriverExecutionProfile enablePaging( + Options options, PagingEnabledOptions pagingEnabledOptions) { + DriverExecutionProfile profile = options.asProfile(sessionRule.session()); + profile = profile.withString(DseDriverOption.GRAPH_PAGING_ENABLED, pagingEnabledOptions.name()); + return profile; + } + + private void checkAsyncResult( + CompletionStage future, + Options options, + int rowsFetched, + int pageNumber, + List graphExecutionInfos) + throws ExecutionException, InterruptedException { + AsyncGraphResultSet result = future.toCompletableFuture().get(); + int remaining = result.remaining(); + rowsFetched += remaining; + assertThat(remaining).isLessThanOrEqualTo(options.pageSize); + + if (options.expectedRows == rowsFetched) { + assertThat(result.hasMorePages()).isFalse(); + } else { + assertThat(result.hasMorePages()).isTrue(); + } + + int first = (pageNumber - 1) * options.pageSize + 1; + int last = (pageNumber - 1) * options.pageSize + remaining; + + for (int i = first; i <= last; i++, remaining--) { + GraphNode node = result.one(); + assertThat(node.asString()).isEqualTo("user" + i); + assertThat(result.remaining()).isEqualTo(remaining - 1); + } + + assertThat(result.remaining()).isZero(); + assertThat(result.getExecutionInfo()).isNotNull(); + assertThat(result.getExecutionInfo().getCoordinator().getEndPoint().resolve()) + .isEqualTo(firstCcmNode()); + + graphExecutionInfos.add(result.getExecutionInfo()); + + assertThat(graphExecutionInfos).hasSize(pageNumber); + assertThat(result.getExecutionInfo()).isSameAs(graphExecutionInfos.get(pageNumber - 1)); + if (pageNumber == options.expectedPages) { + assertThat(result.hasMorePages()).isFalse(); + assertThat(options.expectedRows).isEqualTo(rowsFetched); + assertThat(options.expectedPages).isEqualTo(pageNumber); + } else { + assertThat(result.hasMorePages()).isTrue(); + checkAsyncResult( + result.fetchNextPage(), options, rowsFetched, pageNumber + 1, graphExecutionInfos); + } + } + + @Test + public void should_cancel_result_set() { + // given + DriverExecutionProfile profile = enablePaging(); + profile = profile.withInt(CONTINUOUS_PAGING_MAX_ENQUEUED_PAGES, 1); + profile = profile.withInt(CONTINUOUS_PAGING_PAGE_SIZE, 10); + + // when + GraphStatement statement = + ScriptGraphStatement.newInstance("g.V().hasLabel('person').values('name')") + .setGraphName(sessionRule.getGraphName()) + .setTraversalSource("g") + .setExecutionProfile(profile); + MultiPageGraphResultSet results = + (MultiPageGraphResultSet) sessionRule.session().execute(statement); + + assertThat(((MultiPageGraphResultSet.RowIterator) results.iterator()).isCancelled()).isFalse(); + assertThat(((CountingIterator) results.iterator()).remaining()).isEqualTo(10); + results.cancel(); + + assertThat(((MultiPageGraphResultSet.RowIterator) results.iterator()).isCancelled()).isTrue(); + assertThat(((CountingIterator) results.iterator()).remaining()).isEqualTo(10); + for (int i = 0; i < 10; i++) { + results.one(); + } + } + + private DriverExecutionProfile enablePaging() { + DriverExecutionProfile profile = + sessionRule.session().getContext().getConfig().getDefaultProfile(); + profile = + profile.withString( + DseDriverOption.GRAPH_PAGING_ENABLED, PagingEnabledOptions.ENABLED.name()); + return profile; + } + + private SocketAddress firstCcmNode() { + return ccmRule.getContactPoints().iterator().next().resolve(); + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalIT.java index 4f97c6c5a65..e81334ce79f 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalIT.java @@ -36,8 +36,6 @@ import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import com.datastax.oss.driver.shaded.guava.common.collect.Lists; -import java.util.Arrays; -import java.util.Collections; import java.util.List; import java.util.Map; import java.util.concurrent.CompletionStage; @@ -487,11 +485,10 @@ public void should_return_correct_results_when_bulked() { List results = rs.all().stream().map(GraphNode::asString).sorted().collect(Collectors.toList()); - List expected = - Arrays.asList("knows", "created", "created", "knows", "created", "created"); - Collections.sort(expected); - - assertThat(results).isEqualTo(expected); + assertThat(results) + .hasSize(6) + .containsSequence("created", "created", "created", "created") + .containsSequence("knows", "knows"); } @Test diff --git a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/session/CqlSessionRuleBuilder.java b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/session/CqlSessionRuleBuilder.java index 2b9d0447374..21c859e4723 100644 --- a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/session/CqlSessionRuleBuilder.java +++ b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/session/CqlSessionRuleBuilder.java @@ -18,6 +18,8 @@ import static org.assertj.core.api.Assertions.assertThat; import com.datastax.dse.driver.api.core.config.DseDriverOption; +import com.datastax.dse.driver.api.core.graph.PagingEnabledOptions; +import com.datastax.dse.driver.internal.core.config.typesafe.DefaultDseDriverConfigLoader; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; import com.datastax.oss.driver.api.testinfra.CassandraResourceRule; @@ -63,7 +65,10 @@ public SessionRule build() { .get() .withValue( DseDriverOption.GRAPH_NAME.getPath(), - ConfigValueFactory.fromAnyRef(graphName)); + ConfigValueFactory.fromAnyRef(graphName)) + .withValue( + DseDriverOption.GRAPH_PAGING_ENABLED.getPath(), + ConfigValueFactory.fromAnyRef(PagingEnabledOptions.DISABLED.name())); } else { graphName = null; if (loader == null) { From 3c8901a2c6dcf39c19f3ccfc76302520cb658a25 Mon Sep 17 00:00:00 2001 From: Eduard Tudenhoefner Date: Tue, 24 Sep 2019 10:00:03 +0200 Subject: [PATCH 296/979] Update Tinkerpop to 3.4.3 --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 1741f771c54..96b84a186d7 100644 --- a/pom.xml +++ b/pom.xml @@ -48,7 +48,7 @@ 4.1.45.Final 1.7.26 1.2.1 - 3.3.3 + 3.4.3-20190731-199be4b5 1.0.2 2.10.0 2.10.0 From c77d277d6ad664f241d214c7b7dfe86cfb718f2b Mon Sep 17 00:00:00 2001 From: Eduard Tudenhoefner Date: Tue, 24 Sep 2019 13:01:21 +0200 Subject: [PATCH 297/979] Fix integration tests affected by reverting DSP-19741 --- .../api/core/graph/remote/NativeGraphDataTypeRemoteIT.java | 4 +++- .../api/core/graph/statement/NativeGraphDataTypeFluentIT.java | 4 +++- .../api/core/graph/statement/NativeGraphDataTypeScriptIT.java | 3 ++- 3 files changed, 8 insertions(+), 3 deletions(-) diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/NativeGraphDataTypeRemoteIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/NativeGraphDataTypeRemoteIT.java index 45a5999d8da..9230fc4d6ee 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/NativeGraphDataTypeRemoteIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/NativeGraphDataTypeRemoteIT.java @@ -6,6 +6,8 @@ */ package com.datastax.dse.driver.api.core.graph.remote; +import static org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__.unfold; + import com.datastax.dse.driver.api.core.graph.DseGraph; import com.datastax.dse.driver.api.core.graph.NativeGraphDataTypeITBase; import com.datastax.oss.driver.api.testinfra.DseRequirement; @@ -42,6 +44,6 @@ public Map insertVertexThenReadProperties( traversal.iterate(); // query properties - return g.V().has(vertexLabel, "id", vertexID).valueMap().next(); + return g.V().has(vertexLabel, "id", vertexID).valueMap().by(unfold()).next(); } } diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/NativeGraphDataTypeFluentIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/NativeGraphDataTypeFluentIT.java index 17df83f8b29..2f29da2dc78 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/NativeGraphDataTypeFluentIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/NativeGraphDataTypeFluentIT.java @@ -7,6 +7,7 @@ package com.datastax.dse.driver.api.core.graph.statement; import static com.datastax.dse.driver.api.core.graph.DseGraph.g; +import static org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__.unfold; import com.datastax.dse.driver.api.core.graph.FluentGraphStatement; import com.datastax.dse.driver.api.core.graph.NativeGraphDataTypeITBase; @@ -40,7 +41,8 @@ public Map insertVertexThenReadProperties( return session() .execute( - FluentGraphStatement.newInstance(g.V().has(vertexLabel, "id", vertexID).valueMap())) + FluentGraphStatement.newInstance( + g.V().has(vertexLabel, "id", vertexID).valueMap().by(unfold()))) .one() .asMap(); } diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/NativeGraphDataTypeScriptIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/NativeGraphDataTypeScriptIT.java index cf2f804176e..9ac31524b8f 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/NativeGraphDataTypeScriptIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/NativeGraphDataTypeScriptIT.java @@ -44,7 +44,8 @@ protected Map insertVertexThenReadProperties( return session() .execute( - ScriptGraphStatement.newInstance("g.V().has(vertexLabel, 'id', vertexID).valueMap()") + ScriptGraphStatement.newInstance( + "g.V().has(vertexLabel, 'id', vertexID).valueMap().by(unfold())") .setQueryParam("vertexID", vertexID) .setQueryParam("vertexLabel", vertexLabel)) .one() From 5bc58e65ba3427e3de048e6ff6d397ba0de80dcf Mon Sep 17 00:00:00 2001 From: Erik Merkle Date: Tue, 24 Sep 2019 10:00:28 -0500 Subject: [PATCH 298/979] JAVA-2245: Rename graph engine Legacy to Classic and Modern to Core (#275) --- changelog/README.md | 2 ++ .../internal/core/graph/GraphConversions.java | 2 +- ...e.java => ClassicGraphDataTypeITBase.java} | 2 +- ...Base.java => CoreGraphDataTypeITBase.java} | 2 +- ...java => ClassicGraphDataTypeRemoteIT.java} | 4 +-- ...IT.java => CoreGraphDataTypeRemoteIT.java} | 6 ++-- ...java => ClassicGraphDataTypeFluentIT.java} | 4 +-- ...java => ClassicGraphDataTypeScriptIT.java} | 4 +-- ...IT.java => CoreGraphDataTypeFluentIT.java} | 6 ++-- ...IT.java => CoreGraphDataTypeScriptIT.java} | 6 ++-- .../schema/KeyspaceGraphMetadataIT.java | 28 +++++++++++++++++++ 11 files changed, 48 insertions(+), 18 deletions(-) rename integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/{LegacyGraphDataTypeITBase.java => ClassicGraphDataTypeITBase.java} (99%) rename integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/{NativeGraphDataTypeITBase.java => CoreGraphDataTypeITBase.java} (99%) rename integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/{LegacyGraphDataTypeRemoteIT.java => ClassicGraphDataTypeRemoteIT.java} (94%) rename integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/{NativeGraphDataTypeRemoteIT.java => CoreGraphDataTypeRemoteIT.java} (91%) rename integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/{LegacyGraphDataTypeFluentIT.java => ClassicGraphDataTypeFluentIT.java} (94%) rename integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/{LegacyGraphDataTypeScriptIT.java => ClassicGraphDataTypeScriptIT.java} (94%) rename integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/{NativeGraphDataTypeFluentIT.java => CoreGraphDataTypeFluentIT.java} (91%) rename integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/{NativeGraphDataTypeScriptIT.java => CoreGraphDataTypeScriptIT.java} (91%) diff --git a/changelog/README.md b/changelog/README.md index 82bd4c9fdd4..71e4b3ce716 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -3,6 +3,8 @@ ### NGDG (in progress) + +- [improvement] JAVA-2245: Rename graph engine Legacy to Classic and Modern to Core - [new feature] JAVA-2099: Enable Paging Through DSE Driver for Gremlin Traversals (2.x) - [new feature] JAVA-1898: Expose new table-level graph metadata diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphConversions.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphConversions.java index 0b994167c82..eee9f65efb5 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphConversions.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphConversions.java @@ -79,7 +79,7 @@ public static GraphProtocol inferSubProtocol( graphProtocol = config.getString( DseDriverOption.GRAPH_SUB_PROTOCOL, - // TODO pick graphson-3.0 if the target graph uses the native engine + // TODO pick graphson-3.0 if the target graph uses the core engine "graphson-2.0"); } // should not be null because we call config.getString() with a default value diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/LegacyGraphDataTypeITBase.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/ClassicGraphDataTypeITBase.java similarity index 99% rename from integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/LegacyGraphDataTypeITBase.java rename to integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/ClassicGraphDataTypeITBase.java index fd758fe2dd2..4f30a51fec8 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/LegacyGraphDataTypeITBase.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/ClassicGraphDataTypeITBase.java @@ -44,7 +44,7 @@ import org.junit.runner.RunWith; @RunWith(DataProviderRunner.class) -public abstract class LegacyGraphDataTypeITBase { +public abstract class ClassicGraphDataTypeITBase { private static final boolean IS_DSE50 = CcmBridge.VERSION.compareTo(Version.parse("5.1")) < 0; private static final Set TYPES_REQUIRING_DSE51 = diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/NativeGraphDataTypeITBase.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CoreGraphDataTypeITBase.java similarity index 99% rename from integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/NativeGraphDataTypeITBase.java rename to integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CoreGraphDataTypeITBase.java index bee0f0e583b..63266c0303d 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/NativeGraphDataTypeITBase.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CoreGraphDataTypeITBase.java @@ -41,7 +41,7 @@ import org.junit.rules.RuleChain; import org.junit.rules.TestRule; -public abstract class NativeGraphDataTypeITBase { +public abstract class CoreGraphDataTypeITBase { private static CustomCcmRule ccmRule = CustomCcmRule.builder() diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/LegacyGraphDataTypeRemoteIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/ClassicGraphDataTypeRemoteIT.java similarity index 94% rename from integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/LegacyGraphDataTypeRemoteIT.java rename to integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/ClassicGraphDataTypeRemoteIT.java index 0d211e53ab9..9b608c2f1fc 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/LegacyGraphDataTypeRemoteIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/ClassicGraphDataTypeRemoteIT.java @@ -15,8 +15,8 @@ */ package com.datastax.dse.driver.api.core.graph.remote; +import com.datastax.dse.driver.api.core.graph.ClassicGraphDataTypeITBase; import com.datastax.dse.driver.api.core.graph.DseGraph; -import com.datastax.dse.driver.api.core.graph.LegacyGraphDataTypeITBase; import com.datastax.dse.driver.api.core.graph.SampleGraphScripts; import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; import com.datastax.oss.driver.api.core.CqlSession; @@ -31,7 +31,7 @@ import org.junit.rules.TestRule; @DseRequirement(min = "5.0.3", description = "DSE 5.0.3 required for remote TinkerPop support") -public class LegacyGraphDataTypeRemoteIT extends LegacyGraphDataTypeITBase { +public class ClassicGraphDataTypeRemoteIT extends ClassicGraphDataTypeITBase { private static CustomCcmRule ccmRule = CustomCcmRule.builder() diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/NativeGraphDataTypeRemoteIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/CoreGraphDataTypeRemoteIT.java similarity index 91% rename from integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/NativeGraphDataTypeRemoteIT.java rename to integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/CoreGraphDataTypeRemoteIT.java index 9230fc4d6ee..9d404f99693 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/NativeGraphDataTypeRemoteIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/CoreGraphDataTypeRemoteIT.java @@ -8,8 +8,8 @@ import static org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__.unfold; +import com.datastax.dse.driver.api.core.graph.CoreGraphDataTypeITBase; import com.datastax.dse.driver.api.core.graph.DseGraph; -import com.datastax.dse.driver.api.core.graph.NativeGraphDataTypeITBase; import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.categories.IsolatedTests; import com.tngtech.java.junit.dataprovider.DataProviderRunner; @@ -20,10 +20,10 @@ import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; -@DseRequirement(min = "6.8.0", description = "DSE 6.8.0 required for Native graph support") +@DseRequirement(min = "6.8.0", description = "DSE 6.8.0 required for Core graph support") @RunWith(DataProviderRunner.class) @Category(IsolatedTests.class) -public class NativeGraphDataTypeRemoteIT extends NativeGraphDataTypeITBase { +public class CoreGraphDataTypeRemoteIT extends CoreGraphDataTypeITBase { private final GraphTraversalSource g = DseGraph.g.withRemote(DseGraph.remoteConnectionBuilder(session()).build()); diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/LegacyGraphDataTypeFluentIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/ClassicGraphDataTypeFluentIT.java similarity index 94% rename from integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/LegacyGraphDataTypeFluentIT.java rename to integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/ClassicGraphDataTypeFluentIT.java index 424df6a841f..64a23c1c82e 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/LegacyGraphDataTypeFluentIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/ClassicGraphDataTypeFluentIT.java @@ -15,9 +15,9 @@ */ package com.datastax.dse.driver.api.core.graph.statement; +import com.datastax.dse.driver.api.core.graph.ClassicGraphDataTypeITBase; import com.datastax.dse.driver.api.core.graph.DseGraph; import com.datastax.dse.driver.api.core.graph.FluentGraphStatement; -import com.datastax.dse.driver.api.core.graph.LegacyGraphDataTypeITBase; import com.datastax.dse.driver.api.core.graph.SampleGraphScripts; import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; import com.datastax.oss.driver.api.core.CqlSession; @@ -31,7 +31,7 @@ import org.junit.rules.TestRule; @DseRequirement(min = "5.0.3", description = "DSE 5.0.3 required for fluent API support") -public class LegacyGraphDataTypeFluentIT extends LegacyGraphDataTypeITBase { +public class ClassicGraphDataTypeFluentIT extends ClassicGraphDataTypeITBase { private static CustomCcmRule ccmRule = CustomCcmRule.builder() diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/LegacyGraphDataTypeScriptIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/ClassicGraphDataTypeScriptIT.java similarity index 94% rename from integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/LegacyGraphDataTypeScriptIT.java rename to integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/ClassicGraphDataTypeScriptIT.java index 67055c4eb96..b7e27404822 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/LegacyGraphDataTypeScriptIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/ClassicGraphDataTypeScriptIT.java @@ -15,7 +15,7 @@ */ package com.datastax.dse.driver.api.core.graph.statement; -import com.datastax.dse.driver.api.core.graph.LegacyGraphDataTypeITBase; +import com.datastax.dse.driver.api.core.graph.ClassicGraphDataTypeITBase; import com.datastax.dse.driver.api.core.graph.SampleGraphScripts; import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; import com.datastax.oss.driver.api.core.CqlSession; @@ -29,7 +29,7 @@ import org.junit.rules.TestRule; @DseRequirement(min = "5.0.4", description = "DSE 5.0.4 required for script API with GraphSON 2") -public class LegacyGraphDataTypeScriptIT extends LegacyGraphDataTypeITBase { +public class ClassicGraphDataTypeScriptIT extends ClassicGraphDataTypeITBase { private static CustomCcmRule ccmRule = CustomCcmRule.builder() diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/NativeGraphDataTypeFluentIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/CoreGraphDataTypeFluentIT.java similarity index 91% rename from integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/NativeGraphDataTypeFluentIT.java rename to integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/CoreGraphDataTypeFluentIT.java index 2f29da2dc78..032debec46f 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/NativeGraphDataTypeFluentIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/CoreGraphDataTypeFluentIT.java @@ -9,8 +9,8 @@ import static com.datastax.dse.driver.api.core.graph.DseGraph.g; import static org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__.unfold; +import com.datastax.dse.driver.api.core.graph.CoreGraphDataTypeITBase; import com.datastax.dse.driver.api.core.graph.FluentGraphStatement; -import com.datastax.dse.driver.api.core.graph.NativeGraphDataTypeITBase; import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.categories.IsolatedTests; import com.tngtech.java.junit.dataprovider.DataProviderRunner; @@ -20,10 +20,10 @@ import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; -@DseRequirement(min = "6.8.0", description = "DSE 6.8.0 required for Native graph support") +@DseRequirement(min = "6.8.0", description = "DSE 6.8.0 required for Core graph support") @RunWith(DataProviderRunner.class) @Category(IsolatedTests.class) -public class NativeGraphDataTypeFluentIT extends NativeGraphDataTypeITBase { +public class CoreGraphDataTypeFluentIT extends CoreGraphDataTypeITBase { @Override public Map insertVertexThenReadProperties( diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/NativeGraphDataTypeScriptIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/CoreGraphDataTypeScriptIT.java similarity index 91% rename from integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/NativeGraphDataTypeScriptIT.java rename to integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/CoreGraphDataTypeScriptIT.java index 9ac31524b8f..b0db0565a35 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/NativeGraphDataTypeScriptIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/CoreGraphDataTypeScriptIT.java @@ -6,7 +6,7 @@ */ package com.datastax.dse.driver.api.core.graph.statement; -import com.datastax.dse.driver.api.core.graph.NativeGraphDataTypeITBase; +import com.datastax.dse.driver.api.core.graph.CoreGraphDataTypeITBase; import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; import com.datastax.dse.driver.api.core.graph.ScriptGraphStatementBuilder; import com.datastax.oss.driver.api.testinfra.DseRequirement; @@ -16,10 +16,10 @@ import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; -@DseRequirement(min = "6.8.0", description = "DSE 6.8.0 required for Native graph support") +@DseRequirement(min = "6.8.0", description = "DSE 6.8.0 required for Core graph support") @RunWith(DataProviderRunner.class) @Category(IsolatedTests.class) -public class NativeGraphDataTypeScriptIT extends NativeGraphDataTypeITBase { +public class CoreGraphDataTypeScriptIT extends CoreGraphDataTypeITBase { @Override protected Map insertVertexThenReadProperties( diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/KeyspaceGraphMetadataIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/KeyspaceGraphMetadataIT.java index 8c529b5c652..aceba7e2077 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/KeyspaceGraphMetadataIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/KeyspaceGraphMetadataIT.java @@ -7,6 +7,7 @@ package com.datastax.dse.driver.api.core.metadata.schema; import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; import com.datastax.dse.driver.api.core.DseSession; import com.datastax.dse.driver.api.testinfra.session.DseSessionRuleBuilder; @@ -48,6 +49,33 @@ public void should_expose_graph_engine_if_set() { .hasValue("Tinker")); } + @Test + public void should_not_allow_classic_graph_engine_to_be_specified_on_keyspace() { + DseSession session = SESSION_RULE.session(); + assertThatThrownBy( + () -> + session.execute( + "CREATE KEYSPACE keyspace_metadata_it_graph_engine_classic " + + "WITH replication = {'class': 'SimpleStrategy', 'replication_factor': 1} " + + "AND graph_engine = 'Classic'")) + .hasMessageContaining("Invalid/unknown graph engine name 'Classic'"); + } + + @Test + public void should_expose_core_graph_engine_if_set() { + DseSession session = SESSION_RULE.session(); + session.execute( + "CREATE KEYSPACE keyspace_metadata_it_graph_engine_core " + + "WITH replication = {'class': 'SimpleStrategy', 'replication_factor': 1} " + + "AND graph_engine = 'Core'"); + Metadata metadata = session.getMetadata(); + assertThat(metadata.getKeyspace("keyspace_metadata_it_graph_engine_core")) + .hasValueSatisfying( + keyspaceMetadata -> + assertThat(((DseKeyspaceMetadata) keyspaceMetadata).getGraphEngine()) + .hasValue("Core")); + } + @Test public void should_expose_empty_graph_engine_if_not_set() { // The default keyspace created by CcmRule has no graph engine From 38961f3330fa87ddf8aa7449f6773fae87204d3e Mon Sep 17 00:00:00 2001 From: olim7t Date: Thu, 19 Sep 2019 16:15:48 -0700 Subject: [PATCH 299/979] JAVA-2098: Add filter predicates for collections --- changelog/README.md | 1 + .../core/graph/predicates/CqlCollection.java | 70 ++++++ .../core/graph/CqlCollectionPredicate.java | 64 +++++ .../core/graph/binary/GraphBinaryModule.java | 3 + .../core/graph/binary/PairSerializer.java | 34 +++ .../graph/predicates/CqlCollectionTest.java | 77 +++++++ .../api/core/graph/CqlCollectionIT.java | 218 ++++++++++++++++++ 7 files changed, 467 insertions(+) create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/graph/predicates/CqlCollection.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/graph/CqlCollectionPredicate.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/PairSerializer.java create mode 100644 core/src/test/java/com/datastax/dse/driver/api/core/graph/predicates/CqlCollectionTest.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CqlCollectionIT.java diff --git a/changelog/README.md b/changelog/README.md index 71e4b3ce716..560d17a63c6 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### NGDG (in progress) +- [new feature] JAVA-2098: Add filter predicates for collections - [improvement] JAVA-2245: Rename graph engine Legacy to Classic and Modern to Core - [new feature] JAVA-2099: Enable Paging Through DSE Driver for Gremlin Traversals (2.x) - [new feature] JAVA-1898: Expose new table-level graph metadata diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/graph/predicates/CqlCollection.java b/core/src/main/java/com/datastax/dse/driver/api/core/graph/predicates/CqlCollection.java new file mode 100644 index 00000000000..9c05732fbdd --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/graph/predicates/CqlCollection.java @@ -0,0 +1,70 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph.predicates; + +import com.datastax.dse.driver.internal.core.graph.CqlCollectionPredicate; +import java.util.Collection; +import java.util.Map; +import org.apache.tinkerpop.gremlin.process.traversal.P; +import org.javatuples.Pair; + +/** + * Predicates that can be used on CQL collections (lists, sets and maps). + * + *

      Note: CQL collection predicates are only available when using the binary subprotocol. + */ +public class CqlCollection { + + /** + * Checks if the target collection contains the given value. + * + * @param value the value to look for; cannot be {@code null}. + * @return a predicate to apply in a {@link + * org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal}. + */ + @SuppressWarnings("unchecked") + public static , V> P contains(V value) { + return new P(CqlCollectionPredicate.contains, value); + } + + /** + * Checks if the target map contains the given key. + * + * @param key the key to look for; cannot be {@code null}. + * @return a predicate to apply in a {@link + * org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal}. + */ + @SuppressWarnings("unchecked") + public static , K> P containsKey(K key) { + return new P(CqlCollectionPredicate.containsKey, key); + } + + /** + * Checks if the target map contains the given value. + * + * @param value the value to look for; cannot be {@code null}. + * @return a predicate to apply in a {@link + * org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal}. + */ + @SuppressWarnings("unchecked") + public static , V> P containsValue(V value) { + return new P(CqlCollectionPredicate.containsValue, value); + } + + /** + * Checks if the target map contains the given entry. + * + * @param key the key to look for; cannot be {@code null}. + * @param value the value to look for; cannot be {@code null}. + * @return a predicate to apply in a {@link + * org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal}. + */ + @SuppressWarnings("unchecked") + public static , K, V> P entryEq(K key, V value) { + return new P(CqlCollectionPredicate.entryEq, new Pair<>(key, value)); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/CqlCollectionPredicate.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/CqlCollectionPredicate.java new file mode 100644 index 00000000000..3fbc1a4d6a4 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/CqlCollectionPredicate.java @@ -0,0 +1,64 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph; + +import com.datastax.oss.driver.shaded.guava.common.base.Preconditions; +import java.util.Collection; +import java.util.Map; +import java.util.Objects; +import org.javatuples.Pair; + +/** Predicates that can be used on CQL Collections. */ +public enum CqlCollectionPredicate implements DsePredicate { + contains { + @Override + public boolean test(Object value, Object condition) { + preEvaluate(condition); + Preconditions.checkArgument(value instanceof Collection); + return ((Collection) value).contains(condition); + } + }, + + containsKey { + @Override + public boolean test(Object value, Object condition) { + preEvaluate(condition); + Preconditions.checkArgument(value instanceof Map); + return ((Map) value).containsKey(condition); + } + }, + + containsValue { + @Override + public boolean test(Object value, Object condition) { + preEvaluate(condition); + Preconditions.checkArgument(value instanceof Map); + return ((Map) value).containsValue(condition); + } + }, + + entryEq { + @Override + public boolean test(Object value, Object condition) { + preEvaluate(condition); + Preconditions.checkArgument(condition instanceof Pair); + Preconditions.checkArgument(value instanceof Map); + Pair pair = (Pair) condition; + Map map = (Map) value; + return Objects.equals(map.get(pair.getValue0()), pair.getValue1()); + } + }; + + @Override + public boolean isValidCondition(Object condition) { + if (condition instanceof Pair) { + Pair pair = (Pair) condition; + return pair.getValue0() != null && pair.getValue1() != null; + } + return condition != null; + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/GraphBinaryModule.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/GraphBinaryModule.java index 696a81ad0e2..eca38a7c65e 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/GraphBinaryModule.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/GraphBinaryModule.java @@ -22,6 +22,7 @@ import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryReader; import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryWriter; import org.apache.tinkerpop.gremlin.driver.ser.binary.TypeSerializerRegistry; +import org.javatuples.Pair; public class GraphBinaryModule { public static final UnpooledByteBufAllocator ALLOCATOR = new UnpooledByteBufAllocator(false); @@ -34,6 +35,7 @@ public class GraphBinaryModule { static final String GRAPH_BINARY_EDIT_DISTANCE_TYPE_NAME = "driver.dse.search.EditDistance"; static final String GRAPH_BINARY_TUPLE_VALUE_TYPE_NAME = "driver.core.TupleValue"; static final String GRAPH_BINARY_UDT_VALUE_TYPE_NAME = "driver.core.UDTValue"; + static final String GRAPH_BINARY_PAIR_TYPE_NAME = "org.javatuples.Pair"; private final GraphBinaryReader reader; private final GraphBinaryWriter writer; @@ -54,6 +56,7 @@ public static TypeSerializerRegistry createDseTypeSerializerRegistry( .addCustomType(EditDistance.class, new EditDistanceSerializer()) .addCustomType(TupleValue.class, new TupleValueSerializer(driverContext)) .addCustomType(UdtValue.class, new UdtValueSerializer(driverContext)) + .addCustomType(Pair.class, new PairSerializer()) .create(); } diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/PairSerializer.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/PairSerializer.java new file mode 100644 index 00000000000..3d1d8b4855f --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/PairSerializer.java @@ -0,0 +1,34 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph.binary; + +import io.netty.buffer.ByteBuf; +import org.apache.tinkerpop.gremlin.driver.ser.SerializationException; +import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryReader; +import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryWriter; +import org.javatuples.Pair; + +public class PairSerializer extends AbstractDynamicGraphBinaryCustomSerializer { + + @Override + public String getTypeName() { + return GraphBinaryModule.GRAPH_BINARY_PAIR_TYPE_NAME; + } + + @Override + protected Pair readDynamicCustomValue(ByteBuf buffer, GraphBinaryReader context) + throws SerializationException { + return new Pair<>(context.read(buffer), context.read(buffer)); + } + + @Override + protected void writeDynamicCustomValue(Pair value, ByteBuf buffer, GraphBinaryWriter context) + throws SerializationException { + context.write(value.getValue0(), buffer); + context.write(value.getValue1(), buffer); + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/api/core/graph/predicates/CqlCollectionTest.java b/core/src/test/java/com/datastax/dse/driver/api/core/graph/predicates/CqlCollectionTest.java new file mode 100644 index 00000000000..fc22513448f --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/api/core/graph/predicates/CqlCollectionTest.java @@ -0,0 +1,77 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph.predicates; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import com.datastax.oss.driver.shaded.guava.common.collect.Lists; +import com.datastax.oss.driver.shaded.guava.common.collect.Sets; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedHashMap; +import java.util.Map; +import org.apache.tinkerpop.gremlin.process.traversal.P; +import org.junit.Test; + +public class CqlCollectionTest { + + @Test + public void should_evaluate_contains() { + P> contains = CqlCollection.contains("foo"); + assertThat(contains.test(new HashSet<>())).isFalse(); + assertThat(contains.test(new ArrayList<>())).isFalse(); + assertThat(contains.test(Sets.newHashSet("foo"))).isTrue(); + assertThat(contains.test(Lists.newArrayList("foo"))).isTrue(); + assertThat(contains.test(Sets.newHashSet("bar"))).isFalse(); + assertThat(contains.test(Lists.newArrayList("bar"))).isFalse(); + assertThatThrownBy(() -> contains.test(null)).isInstanceOf(IllegalArgumentException.class); + assertThatThrownBy(() -> CqlCollection.contains(null).test(Sets.newHashSet("foo"))) + .isInstanceOf(IllegalArgumentException.class); + } + + @Test + public void should_evaluate_containsKey() { + P> containsKey = CqlCollection.containsKey("foo"); + assertThat(containsKey.test(new HashMap<>())).isFalse(); + assertThat(containsKey.test(new LinkedHashMap<>())).isFalse(); + assertThat(containsKey.test(ImmutableMap.of("foo", "bar"))).isTrue(); + assertThat(containsKey.test(ImmutableMap.of("bar", "foo"))).isFalse(); + assertThatThrownBy(() -> containsKey.test(null)).isInstanceOf(IllegalArgumentException.class); + assertThatThrownBy(() -> CqlCollection.containsKey(null).test(ImmutableMap.of("foo", "bar"))) + .isInstanceOf(IllegalArgumentException.class); + } + + @Test + public void should_evaluate_containsValue() { + P> containsValue = CqlCollection.containsValue("foo"); + assertThat(containsValue.test(new HashMap<>())).isFalse(); + assertThat(containsValue.test(new LinkedHashMap<>())).isFalse(); + assertThat(containsValue.test(ImmutableMap.of("bar", "foo"))).isTrue(); + assertThat(containsValue.test(ImmutableMap.of("foo", "bar"))).isFalse(); + assertThatThrownBy(() -> containsValue.test(null)).isInstanceOf(IllegalArgumentException.class); + assertThatThrownBy(() -> CqlCollection.containsValue(null).test(ImmutableMap.of("foo", "bar"))) + .isInstanceOf(IllegalArgumentException.class); + } + + @Test + public void should_evaluate_entryEq() { + P> entryEq = CqlCollection.entryEq("foo", "bar"); + assertThat(entryEq.test(new HashMap<>())).isFalse(); + assertThat(entryEq.test(new LinkedHashMap<>())).isFalse(); + assertThat(entryEq.test(ImmutableMap.of("foo", "bar"))).isTrue(); + assertThat(entryEq.test(ImmutableMap.of("bar", "foo"))).isFalse(); + assertThatThrownBy(() -> entryEq.test(null)).isInstanceOf(IllegalArgumentException.class); + assertThatThrownBy(() -> CqlCollection.entryEq(null, "foo").test(ImmutableMap.of("foo", "bar"))) + .isInstanceOf(IllegalArgumentException.class); + assertThatThrownBy(() -> CqlCollection.entryEq("foo", null).test(ImmutableMap.of("foo", "bar"))) + .isInstanceOf(IllegalArgumentException.class); + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CqlCollectionIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CqlCollectionIT.java new file mode 100644 index 00000000000..1c713b5c6fe --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CqlCollectionIT.java @@ -0,0 +1,218 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph; + +import static com.datastax.dse.driver.api.core.graph.predicates.CqlCollection.contains; +import static com.datastax.dse.driver.api.core.graph.predicates.CqlCollection.containsKey; +import static com.datastax.dse.driver.api.core.graph.predicates.CqlCollection.entryEq; +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.dse.driver.api.core.graph.predicates.CqlCollection; +import com.datastax.dse.driver.api.testinfra.session.DseSessionRule; +import com.datastax.dse.driver.api.testinfra.session.DseSessionRuleBuilder; +import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.Set; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +@DseRequirement(min = "6.8", description = "DSE 6.8.0 required for collection predicates support") +public class CqlCollectionIT { + + private static final CustomCcmRule CCM_RULE = + CustomCcmRule.builder().withDseWorkloads("graph").build(); + + private static DseSessionRule SESSION_RULE = + new DseSessionRuleBuilder(CCM_RULE) + .withCreateGraph() + .withCoreEngine() + .withGraphProtocol("graph-binary-1.0") + .build(); + + @ClassRule public static TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); + + private final GraphTraversalSource g = + DseGraph.g.withRemote(DseGraph.remoteConnectionBuilder(SESSION_RULE.session()).build()); + + @BeforeClass + public static void setup() { + for (String setupQuery : createSchema()) { + SESSION_RULE.session().execute(ScriptGraphStatement.newInstance(setupQuery)); + } + } + + private static final Collection createSchema() { + return ImmutableList.of( + "schema.vertexLabel('software').ifNotExists().partitionBy('name', Varchar)" + + ".property('myList', listOf(Varchar))" + + ".property('mySet', setOf(Varchar))" + + ".property('myMapKeys', mapOf(Varchar, Int))" + + ".property('myMapValues', mapOf(Int, Varchar))" + + ".property('myMapEntries', mapOf(Int, Varchar))" + + ".property('myFrozenList', frozen(listOf(Varchar)))" + + ".property('myFrozenSet', frozen(setOf(Float)))" + + ".property('myFrozenMap', frozen(mapOf(Int, Varchar)))" + + ".create()", + "schema.vertexLabel('software').secondaryIndex('by_myList').ifNotExists().by('myList').create();" + + "schema.vertexLabel('software').secondaryIndex('by_mySet').ifNotExists().by('mySet').create();" + + "schema.vertexLabel('software').secondaryIndex('by_myMapKeys').ifNotExists().by('myMapKeys').indexKeys().create();" + + "schema.vertexLabel('software').secondaryIndex('by_myMapValues').ifNotExists().by('myMapValues').indexValues().create();" + + "schema.vertexLabel('software').secondaryIndex('by_myMapEntries').ifNotExists().by('myMapEntries').indexEntries().create();" + + "schema.vertexLabel('software').secondaryIndex('by_myFrozenList').ifNotExists().by('myFrozenList').indexFull().create();" + + "schema.vertexLabel('software').secondaryIndex('by_myFrozenSet').ifNotExists().by('myFrozenSet').indexFull().create();" + + "schema.vertexLabel('software').secondaryIndex('by_myFrozenMap').ifNotExists().by('myFrozenMap').indexFull().create()"); + } + + @Test + public void should_apply_contains_predicate_to_non_frozen_list() { + DseSession session = SESSION_RULE.session(); + + List myList1 = com.google.common.collect.ImmutableList.of("apple", "banana"); + List myList2 = com.google.common.collect.ImmutableList.of("cranberry", "orange"); + + session.execute( + FluentGraphStatement.newInstance( + DseGraph.g + .addV("software") + .property("name", "dse list 1") + .property("myList", myList1))); + session.execute( + FluentGraphStatement.newInstance( + DseGraph.g + .addV("software") + .property("name", "dse list 2") + .property("myList", myList2))); + + assertThat(g.V().has("software", "myList", contains("apple")).values("myList").toList()) + .hasSize(1) + .contains(myList1) + .doesNotContain(myList2); + assertThat(g.V().has("software", "myList", contains("strawberry")).toList()).isEmpty(); + } + + @Test + public void should_apply_contains_predicate_to_non_frozen_set() { + DseSession session = SESSION_RULE.session(); + + Set mySet1 = ImmutableSet.of("apple", "banana"); + Set mySet2 = ImmutableSet.of("cranberry", "orange"); + + session.execute( + FluentGraphStatement.newInstance( + DseGraph.g.addV("software").property("name", "dse set 1").property("mySet", mySet1))); + session.execute( + FluentGraphStatement.newInstance( + DseGraph.g.addV("software").property("name", "dse set 2").property("mySet", mySet2))); + + assertThat(g.V().has("software", "mySet", contains("apple")).values("mySet").toList()) + .hasSize(1) + .contains(mySet1) + .doesNotContain(mySet2); + assertThat(g.V().has("software", "mySet", contains("strawberry")).toList()).isEmpty(); + } + + @Test + public void should_apply_containsKey_predicate_to_non_frozen_map() { + DseSession session = SESSION_RULE.session(); + + Map myMap1 = ImmutableMap.builder().put("id1", 1).build(); + Map myMap2 = ImmutableMap.builder().put("id2", 2).build(); + + session.execute( + FluentGraphStatement.newInstance( + DseGraph.g + .addV("software") + .property("name", "dse map containsKey 1") + .property("myMapKeys", myMap1))); + session.execute( + FluentGraphStatement.newInstance( + DseGraph.g + .addV("software") + .property("name", "dse map containsKey 2") + .property("myMapKeys", myMap2))); + + assertThat(g.V().has("software", "myMapKeys", containsKey("id1")).values("myMapKeys").toList()) + .hasSize(1) + .contains(myMap1) + .doesNotContain(myMap2); + assertThat(g.V().has("software", "myMapKeys", containsKey("id3")).toList()).isEmpty(); + } + + @Test + public void should_apply_containsValue_predicate_to_non_frozen_map() { + DseSession session = SESSION_RULE.session(); + + Map myMap1 = ImmutableMap.builder().put(11, "abc").build(); + Map myMap2 = ImmutableMap.builder().put(22, "def").build(); + + session.execute( + FluentGraphStatement.newInstance( + DseGraph.g + .addV("software") + .property("name", "dse map containsValue 1") + .property("myMapValues", myMap1))); + session.execute( + FluentGraphStatement.newInstance( + DseGraph.g + .addV("software") + .property("name", "dse map containsValue 2") + .property("myMapValues", myMap2))); + assertThat( + g.V() + .has("software", "myMapValues", CqlCollection.containsValue("abc")) + .values("myMapValues") + .toList()) + .hasSize(1) + .contains(myMap1) + .doesNotContain(myMap2); + assertThat(g.V().has("software", "myMapValues", CqlCollection.containsValue("xyz")).toList()) + .isEmpty(); + } + + @Test + public void should_apply_entryEq_predicate_to_non_frozen_map() { + DseSession session = SESSION_RULE.session(); + + Map myMap1 = ImmutableMap.builder().put(11, "abc").build(); + Map myMap2 = ImmutableMap.builder().put(22, "def").build(); + + session.execute( + FluentGraphStatement.newInstance( + DseGraph.g + .addV("software") + .property("name", "dse map entryEq 1") + .property("myMapEntries", myMap1))); + session.execute( + FluentGraphStatement.newInstance( + DseGraph.g + .addV("software") + .property("name", "dse map entryEq 2") + .property("myMapEntries", myMap2))); + assertThat( + g.V() + .has("software", "myMapEntries", entryEq(11, "abc")) + .values("myMapEntries") + .toList()) + .hasSize(1) + .contains(myMap1) + .doesNotContain(myMap2); + assertThat(g.V().has("software", "myMapEntries", entryEq(11, "xyz")).toList()).isEmpty(); + assertThat(g.V().has("software", "myMapEntries", entryEq(33, "abc")).toList()).isEmpty(); + assertThat(g.V().has("software", "myMapEntries", entryEq(33, "xyz")).toList()).isEmpty(); + } +} From 659a67b1396e00d21ae1a4571b570e91aec17a78 Mon Sep 17 00:00:00 2001 From: olim7t Date: Thu, 26 Sep 2019 14:42:39 -0700 Subject: [PATCH 300/979] Add missing Tinkerpop dependency for shaded JAR --- core-shaded/pom.xml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index eb7c2948b0c..9744f9e3cf1 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -96,6 +96,10 @@ org.apache.tinkerpop gremlin-core + + org.apache.tinkerpop + gremlin-driver + org.apache.tinkerpop tinkergraph-gremlin From bf13784f8d7cb5378a5d969dc46a926ad20a391c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Eduard=20Tudenh=C3=B6fner?= Date: Tue, 1 Oct 2019 10:41:06 +0200 Subject: [PATCH 301/979] JAVA-2467: Introduce shallowEquals() to DefaultDseKeyspaceMetadata (#277) This is so that the `graph_engine` is being considered when a shallow comparison of Keyspaces happens. This is required so that eventually a `KeyspaceChangeEvent` is fired from `SchemaRefresh.compute(..)`. --- .../schema/DefaultDseKeyspaceMetadata.java | 15 + .../refresh/GraphSchemaRefreshTest.java | 403 ++++++++++++++++++ .../schema/KeyspaceGraphMetadataIT.java | 24 +- 3 files changed, 440 insertions(+), 2 deletions(-) create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/graph/schema/refresh/GraphSchemaRefreshTest.java diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseKeyspaceMetadata.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseKeyspaceMetadata.java index c6fd89e26bb..21ff8f4c6bc 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseKeyspaceMetadata.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseKeyspaceMetadata.java @@ -160,4 +160,19 @@ public int hashCode() { functions, aggregates); } + + @Override + public boolean shallowEquals(Object other) { + if (other == this) { + return true; + } else if (other instanceof DseKeyspaceMetadata) { + DseKeyspaceMetadata that = (DseKeyspaceMetadata) other; + return Objects.equals(this.name, that.getName()) + && this.durableWrites == that.isDurableWrites() + && Objects.equals(this.graphEngine, that.getGraphEngine().orElse(null)) + && Objects.equals(this.replication, that.getReplication()); + } else { + return false; + } + } } diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/schema/refresh/GraphSchemaRefreshTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/schema/refresh/GraphSchemaRefreshTest.java new file mode 100644 index 00000000000..84a33c926ad --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/schema/refresh/GraphSchemaRefreshTest.java @@ -0,0 +1,403 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph.schema.refresh; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.dse.driver.api.core.metadata.schema.DseEdgeMetadata; +import com.datastax.dse.driver.api.core.metadata.schema.DseTableMetadata; +import com.datastax.dse.driver.api.core.metadata.schema.DseVertexMetadata; +import com.datastax.dse.driver.internal.core.metadata.schema.DefaultDseEdgeMetadata; +import com.datastax.dse.driver.internal.core.metadata.schema.DefaultDseKeyspaceMetadata; +import com.datastax.dse.driver.internal.core.metadata.schema.DefaultDseTableMetadata; +import com.datastax.dse.driver.internal.core.metadata.schema.DefaultDseVertexMetadata; +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.metadata.schema.ColumnMetadata; +import com.datastax.oss.driver.api.core.metadata.schema.TableMetadata; +import com.datastax.oss.driver.api.core.type.DataTypes; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.metadata.DefaultMetadata; +import com.datastax.oss.driver.internal.core.metadata.MetadataRefresh; +import com.datastax.oss.driver.internal.core.metadata.schema.DefaultColumnMetadata; +import com.datastax.oss.driver.internal.core.metadata.schema.events.KeyspaceChangeEvent; +import com.datastax.oss.driver.internal.core.metadata.schema.events.TableChangeEvent; +import com.datastax.oss.driver.internal.core.metadata.schema.refresh.SchemaRefresh; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.Collections; +import java.util.Map; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnitRunner; + +@RunWith(MockitoJUnitRunner.class) +public class GraphSchemaRefreshTest { + + private static final DefaultDseTableMetadata OLD_TABLE = + newTable( + CqlIdentifier.fromInternal("ks_with_engine"), + CqlIdentifier.fromInternal("tbl"), + null, + null); + private static final DefaultDseKeyspaceMetadata OLD_KS1 = newKeyspace("ks1", null); + private static final DefaultDseKeyspaceMetadata KS_WITH_ENGINE = + newKeyspace( + CqlIdentifier.fromInternal("ks_with_engine"), + "Core", + ImmutableMap.of(CqlIdentifier.fromInternal("tbl"), OLD_TABLE)); + + @Mock private InternalDriverContext context; + private DefaultMetadata oldMetadata; + + @Before + public void setup() { + oldMetadata = + DefaultMetadata.EMPTY.withSchema( + ImmutableMap.of(OLD_KS1.getName(), OLD_KS1, KS_WITH_ENGINE.getName(), KS_WITH_ENGINE), + false, + context); + } + + @Test + public void should_detect_created_keyspace_without_graph_engine() { + DefaultDseKeyspaceMetadata ks2 = newKeyspace("ks2", null); + SchemaRefresh refresh = + new SchemaRefresh( + ImmutableMap.of( + OLD_KS1.getName(), + OLD_KS1, + KS_WITH_ENGINE.getName(), + KS_WITH_ENGINE, + ks2.getName(), + ks2)); + MetadataRefresh.Result result = refresh.compute(oldMetadata, false, context); + assertThat(result.newMetadata.getKeyspaces()).hasSize(3); + assertThat(result.events).containsExactly(KeyspaceChangeEvent.created(ks2)); + } + + @Test + public void should_detect_created_keyspace_with_graph_engine() { + DefaultDseKeyspaceMetadata ks2 = newKeyspace("ks2", "Core"); + SchemaRefresh refresh = + new SchemaRefresh( + ImmutableMap.of( + OLD_KS1.getName(), + OLD_KS1, + KS_WITH_ENGINE.getName(), + KS_WITH_ENGINE, + ks2.getName(), + ks2)); + MetadataRefresh.Result result = refresh.compute(oldMetadata, false, context); + assertThat(result.newMetadata.getKeyspaces()).hasSize(3); + assertThat(result.events).containsExactly(KeyspaceChangeEvent.created(ks2)); + } + + @Test + public void should_detect_top_level_graph_engine_update_in_keyspace() { + // Change only one top-level option (graph_engine) + DefaultDseKeyspaceMetadata newKs1 = newKeyspace("ks1", "Core"); + SchemaRefresh refresh = + new SchemaRefresh( + ImmutableMap.of(KS_WITH_ENGINE.getName(), KS_WITH_ENGINE, OLD_KS1.getName(), newKs1)); + MetadataRefresh.Result result = refresh.compute(oldMetadata, false, context); + assertThat(result.newMetadata.getKeyspaces()).hasSize(2); + assertThat(result.events).containsExactly(KeyspaceChangeEvent.updated(OLD_KS1, newKs1)); + } + + @Test + public void should_detect_adding_and_renaming_and_removing_vertex_label() { + DefaultDseTableMetadata newTable = + newTable( + KS_WITH_ENGINE.getName(), + CqlIdentifier.fromInternal("tbl"), + new DefaultDseVertexMetadata(CqlIdentifier.fromInternal("someLabel")), + null); + DefaultDseKeyspaceMetadata ks = + newKeyspace( + KS_WITH_ENGINE.getName(), + "Core", + ImmutableMap.of(CqlIdentifier.fromInternal("tbl"), newTable)); + SchemaRefresh refresh = + new SchemaRefresh( + ImmutableMap.of(KS_WITH_ENGINE.getName(), ks, OLD_KS1.getName(), OLD_KS1)); + MetadataRefresh.Result result = refresh.compute(oldMetadata, false, context); + assertThat(result.newMetadata.getKeyspaces()).hasSize(2); + assertThat(result.events).containsExactly(TableChangeEvent.updated(OLD_TABLE, newTable)); + assertThat(result.newMetadata.getKeyspaces().get(KS_WITH_ENGINE.getName())).isNotNull(); + assertThat( + ((DseTableMetadata) + result + .newMetadata + .getKeyspaces() + .get(KS_WITH_ENGINE.getName()) + .getTable("tbl") + .get()) + .getVertex()) + .isNotNull(); + assertThat( + ((DseTableMetadata) + result + .newMetadata + .getKeyspaces() + .get(KS_WITH_ENGINE.getName()) + .getTable("tbl") + .get()) + .getVertex() + .get() + .getLabelName() + .asInternal()) + .isEqualTo("someLabel"); + + // now rename the vertex label + newTable = + newTable( + KS_WITH_ENGINE.getName(), + CqlIdentifier.fromInternal("tbl"), + new DefaultDseVertexMetadata(CqlIdentifier.fromInternal("someNewLabel")), + null); + ks = + newKeyspace( + KS_WITH_ENGINE.getName(), + "Core", + ImmutableMap.of(CqlIdentifier.fromInternal("tbl"), newTable)); + refresh = + new SchemaRefresh( + ImmutableMap.of(KS_WITH_ENGINE.getName(), ks, OLD_KS1.getName(), OLD_KS1)); + result = refresh.compute(oldMetadata, false, context); + assertThat(result.newMetadata.getKeyspaces()).hasSize(2); + assertThat(result.events).containsExactly(TableChangeEvent.updated(OLD_TABLE, newTable)); + assertThat( + ((DseTableMetadata) + result + .newMetadata + .getKeyspaces() + .get(KS_WITH_ENGINE.getName()) + .getTable("tbl") + .get()) + .getVertex() + .get() + .getLabelName() + .asInternal()) + .isEqualTo("someNewLabel"); + + // now remove the vertex label from the table + DefaultMetadata metadataWithVertexLabel = result.newMetadata; + DefaultDseTableMetadata tableWithRemovedLabel = + newTable(KS_WITH_ENGINE.getName(), CqlIdentifier.fromInternal("tbl"), null, null); + ks = + newKeyspace( + KS_WITH_ENGINE.getName(), + "Core", + ImmutableMap.of(CqlIdentifier.fromInternal("tbl"), tableWithRemovedLabel)); + refresh = + new SchemaRefresh( + ImmutableMap.of(KS_WITH_ENGINE.getName(), ks, OLD_KS1.getName(), OLD_KS1)); + result = refresh.compute(metadataWithVertexLabel, false, context); + assertThat(result.newMetadata.getKeyspaces()).hasSize(2); + assertThat(result.events) + .containsExactly(TableChangeEvent.updated(newTable, tableWithRemovedLabel)); + assertThat( + ((DseTableMetadata) + result + .newMetadata + .getKeyspaces() + .get(KS_WITH_ENGINE.getName()) + .getTable("tbl") + .get()) + .getVertex() + .isPresent()) + .isFalse(); + } + + @Test + public void should_detect_adding_and_renaming_and_removing_edge_label() { + DefaultDseTableMetadata newTable = + newTable( + KS_WITH_ENGINE.getName(), + CqlIdentifier.fromInternal("tbl"), + null, + newEdgeMetadata( + CqlIdentifier.fromInternal("created"), + CqlIdentifier.fromInternal("person"), + CqlIdentifier.fromInternal("software"))); + DefaultDseKeyspaceMetadata ks = + newKeyspace( + KS_WITH_ENGINE.getName(), + "Core", + ImmutableMap.of(CqlIdentifier.fromInternal("tbl"), newTable)); + SchemaRefresh refresh = + new SchemaRefresh( + ImmutableMap.of(KS_WITH_ENGINE.getName(), ks, OLD_KS1.getName(), OLD_KS1)); + MetadataRefresh.Result result = refresh.compute(oldMetadata, false, context); + assertThat(result.newMetadata.getKeyspaces()).hasSize(2); + assertThat(result.events).containsExactly(TableChangeEvent.updated(OLD_TABLE, newTable)); + assertThat(result.newMetadata.getKeyspaces().get(KS_WITH_ENGINE.getName())).isNotNull(); + assertThat( + ((DseTableMetadata) + result + .newMetadata + .getKeyspaces() + .get(KS_WITH_ENGINE.getName()) + .getTable("tbl") + .get()) + .getVertex()) + .isNotNull(); + assertThat( + ((DseTableMetadata) + result + .newMetadata + .getKeyspaces() + .get(KS_WITH_ENGINE.getName()) + .getTable("tbl") + .get()) + .getEdge() + .get() + .getLabelName() + .asInternal()) + .isEqualTo("created"); + + // now rename the edge label + newTable = + newTable( + KS_WITH_ENGINE.getName(), + CqlIdentifier.fromInternal("tbl"), + null, + newEdgeMetadata( + CqlIdentifier.fromInternal("CHANGED"), + CqlIdentifier.fromInternal("person"), + CqlIdentifier.fromInternal("software"))); + ks = + newKeyspace( + KS_WITH_ENGINE.getName(), + "Core", + ImmutableMap.of(CqlIdentifier.fromInternal("tbl"), newTable)); + refresh = + new SchemaRefresh( + ImmutableMap.of(KS_WITH_ENGINE.getName(), ks, OLD_KS1.getName(), OLD_KS1)); + result = refresh.compute(oldMetadata, false, context); + assertThat(result.newMetadata.getKeyspaces()).hasSize(2); + assertThat(result.events).containsExactly(TableChangeEvent.updated(OLD_TABLE, newTable)); + assertThat( + ((DseTableMetadata) + result + .newMetadata + .getKeyspaces() + .get(KS_WITH_ENGINE.getName()) + .getTable("tbl") + .get()) + .getEdge() + .get() + .getLabelName() + .asInternal()) + .isEqualTo("CHANGED"); + + // now remove the edge label from the table + DefaultMetadata metadataWithEdgeLabel = result.newMetadata; + DefaultDseTableMetadata tableWithRemovedLabel = + newTable(KS_WITH_ENGINE.getName(), CqlIdentifier.fromInternal("tbl"), null, null); + ks = + newKeyspace( + KS_WITH_ENGINE.getName(), + "Core", + ImmutableMap.of(CqlIdentifier.fromInternal("tbl"), tableWithRemovedLabel)); + refresh = + new SchemaRefresh( + ImmutableMap.of(KS_WITH_ENGINE.getName(), ks, OLD_KS1.getName(), OLD_KS1)); + result = refresh.compute(metadataWithEdgeLabel, false, context); + assertThat(result.newMetadata.getKeyspaces()).hasSize(2); + assertThat(result.events) + .containsExactly(TableChangeEvent.updated(newTable, tableWithRemovedLabel)); + assertThat( + ((DseTableMetadata) + result + .newMetadata + .getKeyspaces() + .get(KS_WITH_ENGINE.getName()) + .getTable("tbl") + .get()) + .getEdge() + .isPresent()) + .isFalse(); + } + + private static DefaultDseKeyspaceMetadata newKeyspace(String name, String graphEngine) { + return new DefaultDseKeyspaceMetadata( + CqlIdentifier.fromInternal(name), + false, + false, + graphEngine, + Collections.emptyMap(), + Collections.emptyMap(), + Collections.emptyMap(), + Collections.emptyMap(), + Collections.emptyMap(), + Collections.emptyMap()); + } + + private static DefaultDseKeyspaceMetadata newKeyspace( + CqlIdentifier name, String graphEngine, @NonNull Map tables) { + return new DefaultDseKeyspaceMetadata( + name, + false, + false, + graphEngine, + Collections.emptyMap(), + Collections.emptyMap(), + tables, + Collections.emptyMap(), + Collections.emptyMap(), + Collections.emptyMap()); + } + + private static DefaultDseTableMetadata newTable( + @NonNull CqlIdentifier keyspace, + @NonNull CqlIdentifier name, + @Nullable DseVertexMetadata vertex, + @Nullable DseEdgeMetadata edge) { + ImmutableList cols = + ImmutableList.of( + new DefaultColumnMetadata( + keyspace, + CqlIdentifier.fromInternal("parent"), + CqlIdentifier.fromInternal("id"), + DataTypes.INT, + false)); + return new DefaultDseTableMetadata( + keyspace, + name, + null, + false, + false, + cols, + Collections.emptyMap(), + Collections.emptyMap(), + Collections.emptyMap(), + Collections.emptyMap(), + vertex, + edge); + } + + private static DefaultDseEdgeMetadata newEdgeMetadata( + @NonNull CqlIdentifier labelName, + @NonNull CqlIdentifier fromTable, + @NonNull CqlIdentifier toTable) { + return new DefaultDseEdgeMetadata( + labelName, + fromTable, + fromTable, + Collections.emptyList(), + Collections.emptyList(), + toTable, + toTable, + Collections.emptyList(), + Collections.emptyList()); + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/KeyspaceGraphMetadataIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/KeyspaceGraphMetadataIT.java index aceba7e2077..4daab6fc467 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/KeyspaceGraphMetadataIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/KeyspaceGraphMetadataIT.java @@ -40,13 +40,33 @@ public void should_expose_graph_engine_if_set() { session.execute( "CREATE KEYSPACE keyspace_metadata_it_graph_engine " + "WITH replication = {'class': 'SimpleStrategy', 'replication_factor': 1} " - + "AND graph_engine = 'Tinker'"); + + "AND graph_engine = 'Core'"); Metadata metadata = session.getMetadata(); assertThat(metadata.getKeyspace("keyspace_metadata_it_graph_engine")) .hasValueSatisfying( keyspaceMetadata -> assertThat(((DseKeyspaceMetadata) keyspaceMetadata).getGraphEngine()) - .hasValue("Tinker")); + .hasValue("Core")); + } + + @Test + public void should_expose_graph_engine_if_keyspace_altered() { + DseSession session = SESSION_RULE.session(); + session.execute( + "CREATE KEYSPACE keyspace_metadata_it_graph_engine_alter " + + "WITH replication = {'class': 'SimpleStrategy', 'replication_factor': 1}"); + assertThat(session.getMetadata().getKeyspace("keyspace_metadata_it_graph_engine_alter")) + .hasValueSatisfying( + keyspaceMetadata -> + assertThat(((DseKeyspaceMetadata) keyspaceMetadata).getGraphEngine()).isEmpty()); + + session.execute( + "ALTER KEYSPACE keyspace_metadata_it_graph_engine_alter WITH graph_engine = 'Core'"); + assertThat(session.getMetadata().getKeyspace("keyspace_metadata_it_graph_engine_alter")) + .hasValueSatisfying( + keyspaceMetadata -> + assertThat(((DseKeyspaceMetadata) keyspaceMetadata).getGraphEngine()) + .hasValue("Core")); } @Test From 2cbe521da2731e008719122268b0e75305da0a83 Mon Sep 17 00:00:00 2001 From: Erik Merkle Date: Wed, 25 Sep 2019 11:56:46 -0500 Subject: [PATCH 302/979] JAVA-2282: Remove GraphSON3 support --- changelog/README.md | 1 + .../internal/core/graph/GraphProtocol.java | 4 +- .../internal/core/graph/GraphSON3SerdeTP.java | 437 ------------------ .../internal/core/graph/GraphSONUtils.java | 12 - core/src/main/resources/reference.conf | 6 + .../internal/core/graph/GraphNodeTest.java | 28 +- .../core/graph/GraphProtocolTest.java | 19 +- .../core/graph/GraphRequestHandlerTest.java | 3 +- 8 files changed, 23 insertions(+), 487 deletions(-) delete mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSON3SerdeTP.java diff --git a/changelog/README.md b/changelog/README.md index 560d17a63c6..aca17137d7b 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### NGDG (in progress) +- [improvement] JAVA-2282: Remove GraphSON3 support - [new feature] JAVA-2098: Add filter predicates for collections - [improvement] JAVA-2245: Rename graph engine Legacy to Classic and Modern to Core - [new feature] JAVA-2099: Enable Paging Through DSE Driver for Gremlin Traversals (2.x) diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphProtocol.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphProtocol.java index eb090dcb90a..51e4078652f 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphProtocol.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphProtocol.java @@ -15,8 +15,8 @@ public enum GraphProtocol { GRAPHSON_1_0("graphson-1.0"), GRAPHSON_2_0("graphson-2.0"), - GRAPHSON_3_0("graphson-3.0"), - GRAPH_BINARY_1_0("graph-binary-1.0"); + GRAPH_BINARY_1_0("graph-binary-1.0"), + ; private static final Map BY_CODE; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSON3SerdeTP.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSON3SerdeTP.java deleted file mode 100644 index 7d44ca195c9..00000000000 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSON3SerdeTP.java +++ /dev/null @@ -1,437 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.datastax.dse.driver.internal.core.graph; - -import com.datastax.dse.driver.api.core.data.geometry.Geometry; -import com.datastax.dse.driver.api.core.data.geometry.LineString; -import com.datastax.dse.driver.api.core.data.geometry.Point; -import com.datastax.dse.driver.api.core.data.geometry.Polygon; -import com.datastax.dse.driver.api.core.graph.predicates.Geo; -import com.datastax.dse.driver.api.core.graph.predicates.Search; -import com.datastax.dse.driver.internal.core.data.geometry.DefaultLineString; -import com.datastax.dse.driver.internal.core.data.geometry.DefaultPoint; -import com.datastax.dse.driver.internal.core.data.geometry.DefaultPolygon; -import com.datastax.dse.driver.internal.core.data.geometry.Distance; -import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; -import java.io.IOException; -import java.util.Collection; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import org.apache.tinkerpop.gremlin.process.traversal.P; -import org.apache.tinkerpop.gremlin.process.traversal.util.AndP; -import org.apache.tinkerpop.gremlin.process.traversal.util.ConnectiveP; -import org.apache.tinkerpop.gremlin.process.traversal.util.OrP; -import org.apache.tinkerpop.gremlin.structure.io.graphson.AbstractObjectDeserializer; -import org.apache.tinkerpop.gremlin.structure.io.graphson.GraphSONTokens; -import org.apache.tinkerpop.gremlin.structure.io.graphson.TinkerPopJacksonModule; -import org.apache.tinkerpop.shaded.jackson.core.JsonGenerator; -import org.apache.tinkerpop.shaded.jackson.core.JsonParser; -import org.apache.tinkerpop.shaded.jackson.databind.DeserializationContext; -import org.apache.tinkerpop.shaded.jackson.databind.SerializerProvider; -import org.apache.tinkerpop.shaded.jackson.databind.deser.std.StdDeserializer; -import org.apache.tinkerpop.shaded.jackson.databind.jsontype.TypeSerializer; -import org.apache.tinkerpop.shaded.jackson.databind.module.SimpleModule; -import org.apache.tinkerpop.shaded.jackson.databind.ser.std.StdScalarSerializer; -import org.apache.tinkerpop.shaded.jackson.databind.ser.std.StdSerializer; - -public class GraphSON3SerdeTP { - /** - * A Jackson Module to use for TinkerPop serialization/deserialization. It extends {@link - * TinkerPopJacksonModule} because of the specific typing format used in GraphSON. - */ - public static class DseGraphModule extends TinkerPopJacksonModule { - - private static final long serialVersionUID = 1L; - - public DseGraphModule() { - super("dse-driver-2.0"); - addSerializer( - DefaultPoint.class, new GraphSON3SerdeTP.DseGraphModule.PointGeometrySerializer()); - addSerializer( - DefaultLineString.class, - new GraphSON3SerdeTP.DseGraphModule.LineStringGeometrySerializer()); - addSerializer( - DefaultPolygon.class, new GraphSON3SerdeTP.DseGraphModule.PolygonGeometrySerializer()); - addSerializer( - Distance.class, new GraphSON3SerdeTP.DseGraphModule.DistanceGeometrySerializer()); - // override TinkerPop's P predicates because of DSE's Search and Geo predicates - addSerializer(P.class, new GraphSON3SerdeTP.DseGraphModule.DsePJacksonSerializer()); - addSerializer( - EditDistance.class, new GraphSON3SerdeTP.DseGraphModule.EditDistanceSerializer()); - - addDeserializer( - DefaultLineString.class, - new GraphSON3SerdeTP.DseGraphModule.LineStringGeometryDeserializer()); - addDeserializer( - DefaultPoint.class, new GraphSON3SerdeTP.DseGraphModule.PointGeometryDeserializer()); - addDeserializer( - DefaultPolygon.class, new GraphSON3SerdeTP.DseGraphModule.PolygonGeometryDeserializer()); - addDeserializer( - Distance.class, new GraphSON3SerdeTP.DseGraphModule.DistanceGeometryDeserializer()); - // override TinkerPop's P predicates because of DSE's Search and Geo predicates - addDeserializer(P.class, new GraphSON3SerdeTP.DseGraphModule.DsePJacksonDeserializer()); - } - - @SuppressWarnings("rawtypes") - @Override - public Map getTypeDefinitions() { - Map definitions = new HashMap<>(); - definitions.put(DefaultLineString.class, "LineString"); - definitions.put(DefaultPoint.class, "Point"); - definitions.put(DefaultPolygon.class, "Polygon"); - definitions.put(byte[].class, "Blob"); - definitions.put(Distance.class, "Distance"); - definitions.put(P.class, "P"); - return definitions; - } - - @Override - public String getTypeNamespace() { - return "dse"; - } - - abstract static class AbstractGeometryJacksonDeserializer - extends StdDeserializer { - - private static final long serialVersionUID = 1L; - - AbstractGeometryJacksonDeserializer(final Class clazz) { - super(clazz); - } - - public abstract T parse(final String val); - - @Override - public T deserialize( - final JsonParser jsonParser, final DeserializationContext deserializationContext) - throws IOException { - return parse(jsonParser.getText()); - } - } - - abstract static class AbstractGeometryJacksonSerializer - extends StdScalarSerializer { - - private static final long serialVersionUID = 1L; - - AbstractGeometryJacksonSerializer(final Class clazz) { - super(clazz); - } - - @Override - public void serialize( - final T value, final JsonGenerator gen, final SerializerProvider serializerProvider) - throws IOException { - gen.writeString(value.asWellKnownText()); - } - } - - public static class LineStringGeometrySerializer - extends AbstractGeometryJacksonSerializer { - - private static final long serialVersionUID = 1L; - - LineStringGeometrySerializer() { - super(LineString.class); - } - } - - public static class LineStringGeometryDeserializer - extends AbstractGeometryJacksonDeserializer { - - private static final long serialVersionUID = 1L; - - LineStringGeometryDeserializer() { - super(DefaultLineString.class); - } - - @Override - public DefaultLineString parse(final String val) { - return (DefaultLineString) LineString.fromWellKnownText(val); - } - } - - public static class PolygonGeometrySerializer - extends AbstractGeometryJacksonSerializer { - - private static final long serialVersionUID = 1L; - - PolygonGeometrySerializer() { - super(Polygon.class); - } - } - - public static class PolygonGeometryDeserializer - extends AbstractGeometryJacksonDeserializer { - - private static final long serialVersionUID = 1L; - - PolygonGeometryDeserializer() { - super(DefaultPolygon.class); - } - - @Override - public DefaultPolygon parse(final String val) { - return (DefaultPolygon) Polygon.fromWellKnownText(val); - } - } - - public static class PointGeometrySerializer extends AbstractGeometryJacksonSerializer { - - private static final long serialVersionUID = 1L; - - PointGeometrySerializer() { - super(Point.class); - } - } - - public static class PointGeometryDeserializer - extends AbstractGeometryJacksonDeserializer { - - private static final long serialVersionUID = 1L; - - PointGeometryDeserializer() { - super(DefaultPoint.class); - } - - @Override - public DefaultPoint parse(final String val) { - return (DefaultPoint) Point.fromWellKnownText(val); - } - } - - public static class DistanceGeometrySerializer - extends AbstractGeometryJacksonSerializer { - - private static final long serialVersionUID = 1L; - - DistanceGeometrySerializer() { - super(Distance.class); - } - } - - public static class DistanceGeometryDeserializer - extends AbstractGeometryJacksonDeserializer { - - private static final long serialVersionUID = 1L; - - DistanceGeometryDeserializer() { - super(Distance.class); - } - - @Override - public Distance parse(final String val) { - return Distance.fromWellKnownText(val); - } - } - - @SuppressWarnings("rawtypes") - static final class DsePJacksonSerializer extends StdScalarSerializer

      { - - private static final long serialVersionUID = 1L; - - DsePJacksonSerializer() { - super(P.class); - } - - @Override - public void serialize( - final P p, final JsonGenerator jsonGenerator, final SerializerProvider serializerProvider) - throws IOException { - jsonGenerator.writeStartObject(); - jsonGenerator.writeStringField("predicateType", getPredicateType(p)); - jsonGenerator.writeStringField( - GraphSONTokens.PREDICATE, - p instanceof ConnectiveP - ? p instanceof AndP ? GraphSONTokens.AND : GraphSONTokens.OR - : p.getBiPredicate().toString()); - if (p instanceof ConnectiveP) { - jsonGenerator.writeArrayFieldStart(GraphSONTokens.VALUE); - for (final P predicate : ((ConnectiveP) p).getPredicates()) { - jsonGenerator.writeObject(predicate); - } - jsonGenerator.writeEndArray(); - } else { - if (p.getValue() instanceof Collection) { - jsonGenerator.writeArrayFieldStart(GraphSONTokens.VALUE); - for (final Object object : (Collection) p.getValue()) { - jsonGenerator.writeObject(object); - } - jsonGenerator.writeEndArray(); - } else { - jsonGenerator.writeObjectField(GraphSONTokens.VALUE, p.getValue()); - } - } - jsonGenerator.writeEndObject(); - } - - private String getPredicateType(P p) { - if (p.getBiPredicate() instanceof SearchPredicate) { - return Search.class.getSimpleName(); - } else if (p.getBiPredicate() instanceof GeoPredicate) { - return Geo.class.getSimpleName(); - } else { - return P.class.getSimpleName(); - } - } - } - - @SuppressWarnings({"unchecked", "rawtypes"}) - static final class DsePJacksonDeserializer extends AbstractObjectDeserializer

      { - - private static final long serialVersionUID = 1L; - - DsePJacksonDeserializer() { - super(P.class); - } - - @Override - public P createObject(final Map data) { - final String predicate = (String) data.get(GraphSONTokens.PREDICATE); - final String predicateType = (String) data.get("predicateType"); - final Object value = data.get(GraphSONTokens.VALUE); - if (predicate.equals(GraphSONTokens.AND) || predicate.equals(GraphSONTokens.OR)) { - return predicate.equals(GraphSONTokens.AND) - ? new AndP((List

      ) value) - : new OrP((List

      ) value); - } else { - try { - if (value instanceof Collection) { - if (predicate.equals("between")) { - return P.between(((List) value).get(0), ((List) value).get(1)); - } else if (predicateType.equals(P.class.getSimpleName()) - && predicate.equals("inside")) { - return P.between(((List) value).get(0), ((List) value).get(1)); - } else if (predicate.equals("outside")) { - return P.outside(((List) value).get(0), ((List) value).get(1)); - } else if (predicate.equals("within")) { - return P.within((Collection) value); - } else if (predicate.equals("without")) { - return P.without((Collection) value); - } else { - return (P) - P.class.getMethod(predicate, Collection.class).invoke(null, (Collection) value); - } - } else { - if (predicate.equals(SearchPredicate.prefix.name())) { - return Search.prefix((String) value); - } else if (predicate.equals(SearchPredicate.tokenPrefix.name())) { - return Search.tokenPrefix((String) value); - } else if (predicate.equals(SearchPredicate.regex.name())) { - return Search.regex((String) value); - } else if (predicate.equals(SearchPredicate.tokenRegex.name())) { - return Search.tokenRegex((String) value); - } else if (predicate.equals(SearchPredicate.token.name())) { - return Search.token((String) value); - } else if (predicate.equals(SearchPredicate.fuzzy.name())) { - Map arguments = (Map) value; - return Search.fuzzy( - (String) arguments.get("query"), (int) arguments.get("distance")); - } else if (predicate.equals(SearchPredicate.tokenFuzzy.name())) { - Map arguments = (Map) value; - return Search.tokenFuzzy( - (String) arguments.get("query"), (int) arguments.get("distance")); - } else if (predicate.equals(SearchPredicate.phrase.name())) { - Map arguments = (Map) value; - return Search.phrase( - (String) arguments.get("query"), (int) arguments.get("distance")); - } else if (predicateType.equals(Geo.class.getSimpleName()) - && predicate.equals(GeoPredicate.inside.name())) { - return Geo.inside( - ((Distance) value).getCenter(), - ((Distance) value).getRadius(), - Geo.Unit.DEGREES); - } else if (predicateType.equals(Geo.class.getSimpleName()) - && predicate.equals(GeoPredicate.insideCartesian.name())) { - return Geo.inside(((Distance) value).getCenter(), ((Distance) value).getRadius()); - } else { - return (P) P.class.getMethod(predicate, Object.class).invoke(null, value); - } - } - } catch (final Exception e) { - throw new IllegalStateException(e.getMessage(), e); - } - } - } - } - - public static class EditDistanceSerializer extends StdSerializer { - - private static final long serialVersionUID = 1L; - - EditDistanceSerializer() { - super(EditDistance.class); - } - - @Override - public void serialize( - EditDistance editDistance, JsonGenerator generator, SerializerProvider provider) - throws IOException { - generator.writeObject( - ImmutableMap.of("query", editDistance.query, "distance", editDistance.distance)); - } - - @Override - public void serializeWithType( - EditDistance editDistance, - JsonGenerator generator, - SerializerProvider provider, - TypeSerializer serializer) - throws IOException { - serialize(editDistance, generator, provider); - } - } - } - - public static class DriverObjectsModule extends SimpleModule { - - private static final long serialVersionUID = 1L; - - public DriverObjectsModule() { - super("datastax-driver-module"); - addSerializer(ObjectGraphNode.class, new ObjectGraphNodeGraphSON3Serializer()); - } - - static final class ObjectGraphNodeGraphSON3Serializer extends StdSerializer { - - private static final long serialVersionUID = 1L; - - protected ObjectGraphNodeGraphSON3Serializer() { - super(ObjectGraphNode.class); - } - - @Override - public void serialize( - ObjectGraphNode objectGraphNode, - JsonGenerator jsonGenerator, - SerializerProvider serializerProvider) - throws IOException { - jsonGenerator.writeObject(objectGraphNode.as(Object.class)); - } - - @Override - public void serializeWithType( - ObjectGraphNode objectGraphNode, - JsonGenerator jsonGenerator, - SerializerProvider serializerProvider, - TypeSerializer typeSerializer) - throws IOException { - serialize(objectGraphNode, jsonGenerator, serializerProvider); - } - } - } -} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSONUtils.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSONUtils.java index c0685d9e6a6..e9f4f87ca39 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSONUtils.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSONUtils.java @@ -33,9 +33,7 @@ import org.apache.tinkerpop.gremlin.structure.io.graphson.GraphSONReader; import org.apache.tinkerpop.gremlin.structure.io.graphson.GraphSONVersion; import org.apache.tinkerpop.gremlin.structure.io.graphson.GraphSONXModuleV2d0; -import org.apache.tinkerpop.gremlin.structure.io.graphson.GraphSONXModuleV3d0; import org.apache.tinkerpop.gremlin.tinkergraph.structure.TinkerIoRegistryV2d0; -import org.apache.tinkerpop.gremlin.tinkergraph.structure.TinkerIoRegistryV3d0; import org.apache.tinkerpop.shaded.jackson.core.Version; import org.apache.tinkerpop.shaded.jackson.databind.ObjectMapper; @@ -85,15 +83,6 @@ public ObjectMapper load(@NonNull GraphProtocol graphSubProtocol) throws Excepti .addCustomModule(new GraphSON2SerdeTP.DriverObjectsModule()) .create() .createMapper(); - case GRAPHSON_3_0: - return GraphSONMapper.build() - .version(GraphSONVersion.V3_0) - .addCustomModule(GraphSONXModuleV3d0.build().create(false)) - .addRegistry(TinkerIoRegistryV3d0.instance()) - .addCustomModule(new GraphSON3SerdeTP.DseGraphModule()) - .addCustomModule(new GraphSON3SerdeTP.DriverObjectsModule()) - .create() - .createMapper(); default: throw new IllegalStateException( @@ -131,7 +120,6 @@ public static GraphNode createGraphNode(List data, GraphProtocol gra case GRAPHSON_1_0: return new LegacyGraphNode(mapper.readTree(Bytes.getArray(data.get(0))), mapper); case GRAPHSON_2_0: - case GRAPHSON_3_0: return new ObjectGraphNode(mapper.readValue(Bytes.getArray(data.get(0)), Object.class)); default: // Should already be caught when we lookup in the cache diff --git a/core/src/main/resources/reference.conf b/core/src/main/resources/reference.conf index f4decad2f37..698ec76727a 100644 --- a/core/src/main/resources/reference.conf +++ b/core/src/main/resources/reference.conf @@ -1011,6 +1011,12 @@ datastax-java-driver { # This can also be overridden programmatically with GraphStatement.setSubProtocol(). If both are # specified, the programmatic value takes precedence, and this option is ignored. # + # Possible values with built-in support in the driver are: + # [ "graphson-1.0", "graphson-2.0", "graph-binary-1.0"] + # + # The default value for DSE 6.7 and lower is "graphson-2.0". For DSE 6.8 and higher, the default + # value is "graphson-binary-1.0" + # # Required: no # Modifiable at runtime: yes, the new value will be used for requests issued after the change. # Overridable in a profile: yes diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphNodeTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphNodeTest.java index e8febdc90e8..c4aa1fabd24 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphNodeTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphNodeTest.java @@ -17,7 +17,6 @@ import static com.datastax.dse.driver.internal.core.graph.GraphProtocol.GRAPHSON_1_0; import static com.datastax.dse.driver.internal.core.graph.GraphProtocol.GRAPHSON_2_0; -import static com.datastax.dse.driver.internal.core.graph.GraphProtocol.GRAPHSON_3_0; import static com.datastax.dse.driver.internal.core.graph.GraphProtocol.GRAPH_BINARY_1_0; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; @@ -41,7 +40,6 @@ import java.util.Collections; import java.util.List; import java.util.Map; -import java.util.Set; import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryReader; import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryWriter; import org.apache.tinkerpop.gremlin.driver.ser.binary.TypeSerializerRegistry; @@ -73,17 +71,6 @@ public void setup() { new GraphBinaryModule(new GraphBinaryReader(registry), new GraphBinaryWriter(registry)); } - @Test - public void should_create_graph_node_for_set_for_graphson_3_0() throws IOException { - // when - GraphNode graphNode = serdeAndCreateGraphNode(ImmutableSet.of("value"), GRAPHSON_3_0); - - // then - assertThat(graphNode.isSet()).isTrue(); - Set set = graphNode.asSet(); - assertThat(set).isEqualTo(ImmutableSet.of("value")); - } - @Test public void should_not_support_set_for_graphson_2_0() throws IOException { // when @@ -115,17 +102,6 @@ public void should_create_graph_node_for_list(GraphProtocol graphVersion) throws assertThat(result).isEqualTo(ImmutableList.of("value")); } - @Test - public void should_create_graph_node_for_map_for_graphson_3_0() throws IOException { - // when - GraphNode graphNode = serdeAndCreateGraphNode(ImmutableMap.of(12, 1234), GRAPHSON_3_0); - - // then - assertThat(graphNode.isMap()).isTrue(); - Map result = graphNode.asMap(); - assertThat(result).isEqualTo(ImmutableMap.of(12, 1234)); - } - @Test @UseDataProvider("allGraphProtocols") public void should_create_graph_node_for_map(GraphProtocol graphProtocol) throws IOException { @@ -306,7 +282,7 @@ private GraphNode serdeAndCreateGraphNode(Object inputValue, GraphProtocol graph @DataProvider public static Object[][] allGraphProtocols() { - return new Object[][] {{GRAPHSON_1_0}, {GRAPHSON_2_0}, {GRAPHSON_3_0}, {GRAPH_BINARY_1_0}}; + return new Object[][] {{GRAPHSON_1_0}, {GRAPHSON_2_0}, {GRAPH_BINARY_1_0}}; } @DataProvider @@ -316,6 +292,6 @@ public static Object[][] graphson1_0and2_0() { @DataProvider public static Object[][] objectGraphNodeProtocols() { - return new Object[][] {{GRAPHSON_2_0}, {GRAPHSON_3_0}, {GRAPH_BINARY_1_0}}; + return new Object[][] {{GRAPHSON_2_0}, {GRAPH_BINARY_1_0}}; } } diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphProtocolTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphProtocolTest.java index 850cc8063b8..4bd33b4fa55 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphProtocolTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphProtocolTest.java @@ -73,16 +73,21 @@ public void should_fail_if_graph_protocol_used_is_invalid() { assertThatThrownBy(() -> GraphProtocol.fromString("invalid")) .isInstanceOf(IllegalArgumentException.class) .hasMessage( - "Graph protocol used [\"invalid\"] unknown. Possible values are: [ \"graphson-1.0\", \"graphson-2.0\", \"graphson-3.0\", \"graph-binary-1.0\"]"); + "Graph protocol used [\"invalid\"] unknown. Possible values are: [ \"graphson-1.0\", \"graphson-2.0\", \"graph-binary-1.0\"]"); + } + + @Test + public void should_fail_if_graph_protocol_used_is_graphson_3() { + assertThatThrownBy(() -> GraphProtocol.fromString("graphson-3.0")) + .isInstanceOf(IllegalArgumentException.class) + .hasMessage( + "Graph protocol used [\"graphson-3.0\"] unknown. Possible values are: [ \"graphson-1.0\", \"graphson-2.0\", \"graph-binary-1.0\"]"); } @DataProvider public static Object[][] protocolObjects() { return new Object[][] { - {GraphProtocol.GRAPHSON_1_0}, - {GraphProtocol.GRAPHSON_2_0}, - {GraphProtocol.GRAPHSON_3_0}, - {GraphProtocol.GRAPH_BINARY_1_0} + {GraphProtocol.GRAPHSON_1_0}, {GraphProtocol.GRAPHSON_2_0}, {GraphProtocol.GRAPH_BINARY_1_0} }; } @@ -90,8 +95,6 @@ public static Object[][] protocolObjects() { public static Object[][] protocolStrings() { // putting manual strings here to be sure to be notified if a value in // GraphProtocol ever changes - return new Object[][] { - {"graphson-1.0"}, {"graphson-2.0"}, {"graphson-3.0"}, {"graph-binary-1.0"} - }; + return new Object[][] {{"graphson-1.0"}, {"graphson-2.0"}, {"graph-binary-1.0"}}; } } diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java index 1e18842aa75..cf1aa9ec7a1 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java @@ -16,7 +16,6 @@ package com.datastax.dse.driver.internal.core.graph; import static com.datastax.dse.driver.internal.core.graph.GraphProtocol.GRAPHSON_2_0; -import static com.datastax.dse.driver.internal.core.graph.GraphProtocol.GRAPHSON_3_0; import static com.datastax.dse.driver.internal.core.graph.GraphProtocol.GRAPH_BINARY_1_0; import static com.datastax.oss.driver.Assertions.assertThat; import static com.datastax.oss.driver.api.core.type.codec.TypeCodecs.BIGINT; @@ -481,7 +480,7 @@ public void should_return_results_for_statements(GraphProtocol graphProtocol) th @DataProvider public static Object[][] bytecodeEnabledGraphProtocols() { - return new Object[][] {{GRAPHSON_2_0}, {GRAPHSON_3_0}, {GRAPH_BINARY_1_0}}; + return new Object[][] {{GRAPHSON_2_0}, {GRAPH_BINARY_1_0}}; } @Test From bcde4b1c0cb4787989a2c36a3d9c354aee8a27e9 Mon Sep 17 00:00:00 2001 From: olim7t Date: Tue, 8 Oct 2019 15:01:56 -0700 Subject: [PATCH 303/979] Upgrade OSS driver to 4.2.2 Also revert erroneous test changes that were based on an OSS 4.x preview build. --- .../context/DseStartupOptionsBuilderTest.java | 30 +++++-------------- 1 file changed, 8 insertions(+), 22 deletions(-) diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/context/DseStartupOptionsBuilderTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/context/DseStartupOptionsBuilderTest.java index 0d255488c3d..d2cdd313369 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/context/DseStartupOptionsBuilderTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/context/DseStartupOptionsBuilderTest.java @@ -39,13 +39,14 @@ import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; +import org.mockito.Mockito; @RunWith(DataProviderRunner.class) public class DseStartupOptionsBuilderTest { private DefaultDriverContext driverContext; - // Mocks for instantiating the DSE driver context + // Mocks for instantiating the default driver context @Mock private DriverConfigLoader configLoader; @Mock private DriverConfig driverConfig; @Mock private DriverExecutionProfile defaultProfile; @@ -81,9 +82,7 @@ private void assertDefaultStartupOptions(Startup startup) { } @Test - public void should_build_startup_options_with_no_compression_if_undefined() { - when(defaultProfile.getString(DefaultDriverOption.PROTOCOL_COMPRESSION, "none")) - .thenReturn("none"); + public void should_build_minimal_startup_options() { buildContext(null, null, null); Startup startup = new Startup(driverContext.getStartupOptions()); assertThat(startup.options).doesNotContainKey(Startup.COMPRESSION_KEY); @@ -104,19 +103,8 @@ public void should_build_startup_options_with_compression(String compression) { assertDefaultStartupOptions(startup); } - @Test - public void should_fail_to_build_startup_options_with_invalid_compression() { - when(defaultProfile.getString(DefaultDriverOption.PROTOCOL_COMPRESSION, "none")) - .thenReturn("foobar"); - buildContext(null, null, null); - assertThatIllegalArgumentException() - .isThrownBy(() -> new Startup(driverContext.getStartupOptions())); - } - @Test public void should_build_startup_options_with_client_id() { - when(defaultProfile.getString(DefaultDriverOption.PROTOCOL_COMPRESSION, "none")) - .thenReturn("none"); UUID customClientId = Uuids.random(); buildContext(customClientId, null, null); Startup startup = new Startup(driverContext.getStartupOptions()); @@ -131,8 +119,6 @@ public void should_build_startup_options_with_client_id() { @Test public void should_build_startup_options_with_application_version_and_name() { - when(defaultProfile.getString(DefaultDriverOption.PROTOCOL_COMPRESSION, "none")) - .thenReturn("none"); buildContext(null, "Custom_App_Name", "Custom_App_Version"); Startup startup = new Startup(driverContext.getStartupOptions()); // assert the app name and version are present @@ -147,7 +133,9 @@ public void should_build_startup_options_with_application_version_and_name() { @Test public void should_build_startup_options_with_all_options() { // mock config to specify "snappy" compression - when(defaultProfile.getString(DefaultDriverOption.PROTOCOL_COMPRESSION, "none")) + Mockito.when(defaultProfile.isDefined(DefaultDriverOption.PROTOCOL_COMPRESSION)) + .thenReturn(Boolean.TRUE); + Mockito.when(defaultProfile.getString(DefaultDriverOption.PROTOCOL_COMPRESSION)) .thenReturn("snappy"); UUID customClientId = Uuids.random(); @@ -164,12 +152,10 @@ public void should_build_startup_options_with_all_options() { @Test public void should_use_configuration_when_no_programmatic_values_provided() { - when(defaultProfile.getString(DseDriverOption.APPLICATION_NAME, null)) + Mockito.when(defaultProfile.getString(DseDriverOption.APPLICATION_NAME, null)) .thenReturn("Config_App_Name"); - when(defaultProfile.getString(DseDriverOption.APPLICATION_VERSION, null)) + Mockito.when(defaultProfile.getString(DseDriverOption.APPLICATION_VERSION, null)) .thenReturn("Config_App_Version"); - when(defaultProfile.getString(DefaultDriverOption.PROTOCOL_COMPRESSION, "none")) - .thenReturn("none"); buildContext(null, null, null); Startup startup = new Startup(driverContext.getStartupOptions()); From 6be3329aba4ea5efde636ab173ff36418f92b479 Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Mon, 14 Oct 2019 16:10:14 -0500 Subject: [PATCH 304/979] JAVA-2492: Parse edge metadata using internal identifiers --- changelog/README.md | 1 + .../schema/parsing/DseTableParser.java | 23 ++++- .../TableGraphMetadataCaseSensitiveIT.java | 92 +++++++++++++++++++ 3 files changed, 111 insertions(+), 5 deletions(-) create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/TableGraphMetadataCaseSensitiveIT.java diff --git a/changelog/README.md b/changelog/README.md index aca17137d7b..443ac3ed2ea 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### NGDG (in progress) +- [bug] JAVA-2492: Parse edge metadata using internal identifiers - [improvement] JAVA-2282: Remove GraphSON3 support - [new feature] JAVA-2098: Add filter predicates for collections - [improvement] JAVA-2245: Rename graph engine Legacy to Classic and Modern to Core diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseTableParser.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseTableParser.java index 17f86007b35..92d1dc84609 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseTableParser.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseTableParser.java @@ -35,7 +35,6 @@ import com.datastax.oss.driver.api.core.type.MapType; import com.datastax.oss.driver.api.core.type.SetType; import com.datastax.oss.driver.api.core.type.UserDefinedType; -import com.datastax.oss.driver.internal.core.CqlIdentifiers; import com.datastax.oss.driver.internal.core.adminrequest.AdminRow; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.internal.core.metadata.schema.parsing.DataTypeClassNameCompositeParser; @@ -48,10 +47,12 @@ import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMultimap; import com.datastax.oss.driver.shaded.guava.common.collect.Multimap; +import edu.umd.cs.findbugs.annotations.NonNull; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Set; import java.util.UUID; import net.jcip.annotations.ThreadSafe; @@ -389,12 +390,24 @@ private DseEdgeMetadata buildEdge( getLabel(row), fromTable, findVertexLabel(fromTable, keyspaceVertices, "incoming"), - CqlIdentifiers.wrap(row.getListOfString("from_partition_key_columns")), - CqlIdentifiers.wrap(row.getListOfString("from_clustering_columns")), + wrapInternal(row.getListOfString("from_partition_key_columns")), + wrapInternal(row.getListOfString("from_clustering_columns")), toTable, findVertexLabel(toTable, keyspaceVertices, "outgoing"), - CqlIdentifiers.wrap(row.getListOfString("to_partition_key_columns")), - CqlIdentifiers.wrap(row.getListOfString("to_clustering_columns"))); + wrapInternal(row.getListOfString("to_partition_key_columns")), + wrapInternal(row.getListOfString("to_clustering_columns"))); + } + + // TODO replace by CqlIdentifiers.wrapInternal() when this is rebased on OSS 4.3.0 + @NonNull + private List wrapInternal(@NonNull Iterable in) { + + Objects.requireNonNull(in, "Input Iterable must not be null"); + ImmutableList.Builder builder = ImmutableList.builder(); + for (String name : in) { + builder.add(CqlIdentifier.fromInternal(name)); + } + return builder.build(); } private CqlIdentifier getLabel(AdminRow row) { diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/TableGraphMetadataCaseSensitiveIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/TableGraphMetadataCaseSensitiveIT.java new file mode 100644 index 00000000000..ea3e4c59cf4 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/TableGraphMetadataCaseSensitiveIT.java @@ -0,0 +1,92 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.metadata.schema; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.dse.driver.api.testinfra.session.DseSessionRuleBuilder; +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.metadata.Metadata; +import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import com.datastax.oss.driver.categories.ParallelizableTests; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +/** + * A regression test for a specific case of schema parsing for graphs built from tables containing + * case-sensitive column names in it's tables. See JAVA-2492 for more information. + */ +@Category(ParallelizableTests.class) +@DseRequirement(min = "6.8") +public class TableGraphMetadataCaseSensitiveIT { + + private static final CcmRule CCM_RULE = CcmRule.getInstance(); + + private static final SessionRule SESSION_RULE = + new DseSessionRuleBuilder(CCM_RULE).build(); + + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); + + @BeforeClass + public static void createTables() { + DseSession session = SESSION_RULE.session(); + + session.execute( + "CREATE TABLE \"Person\" (\"Name\" varchar, \"Age\" int, PRIMARY KEY ((\"Name\"), \"Age\")) WITH VERTEX LABEL"); + session.execute( + "CREATE TABLE \"Software\" (\"Name\" varchar, \"Complexity\" int, PRIMARY KEY ((\"Name\"), \"Complexity\")) WITH VERTEX LABEL"); + session.execute( + "CREATE TABLE \"Created\"" + + " (\"PersonName\" varchar, \"SoftwareName\" varchar, \"PersonAge\" int, \"SoftwareComplexity\" int, weight int," + + " primary key ((\"PersonName\"), \"SoftwareName\", weight)) WITH EDGE LABEL\n" + + " FROM \"Person\"((\"PersonName\"),\"PersonAge\")" + + " TO \"Software\"((\"SoftwareName\"),\"SoftwareComplexity\");"); + } + + @Test + public void should_expose_case_sensitive_edge_metadata() { + DseSession session = SESSION_RULE.session(); + Metadata metadata = session.getMetadata(); + assertThat(metadata.getKeyspace(SESSION_RULE.keyspace())) + .hasValueSatisfying( + keyspaceMetadata -> + assertThat(keyspaceMetadata.getTable(CqlIdentifier.fromInternal("Created"))) + .hasValueSatisfying( + created -> { + DseTableMetadata dseCreated = (DseTableMetadata) created; + assertThat(dseCreated.getEdge()) + .hasValueSatisfying( + edge -> { + assertThat(edge.getFromPartitionKeyColumns()) + .isEqualTo( + ImmutableList.of( + CqlIdentifier.fromInternal("PersonName"))); + assertThat(edge.getToPartitionKeyColumns()) + .isEqualTo( + ImmutableList.of( + CqlIdentifier.fromInternal("SoftwareName"))); + assertThat(edge.getFromClusteringColumns()) + .isEqualTo( + ImmutableList.of( + CqlIdentifier.fromInternal("PersonAge"))); + assertThat(edge.getToClusteringColumns()) + .isEqualTo( + ImmutableList.of( + CqlIdentifier.fromInternal("SoftwareComplexity"))); + }); + })); + } +} From 158817bb37e206fca492c1d76a910bf9ceb4c1be Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Thu, 17 Oct 2019 15:12:56 -0500 Subject: [PATCH 305/979] JAVA-2507: Default timestamp in GraphStatementBuilderBase to Long.MIN_VALUE (#286) JAVA-2507: Default timestamp in GraphStatementBuilderBase to Long.MIN_VALUE --- changelog/README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/changelog/README.md b/changelog/README.md index 443ac3ed2ea..3ab71a3e88b 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### NGDG (in progress) +- [bug] JAVA-2507: Default timestamp in GraphStatementBuilderBase to Long.MIN_VALUE - [bug] JAVA-2492: Parse edge metadata using internal identifiers - [improvement] JAVA-2282: Remove GraphSON3 support - [new feature] JAVA-2098: Add filter predicates for collections From 705fa26055c1466aef3c9077ace506ae83c65931 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 18 Oct 2019 09:55:01 +0300 Subject: [PATCH 306/979] Set version to 2.3.0-ngdg-SNAPSHOT --- changelog/README.md | 1 - 1 file changed, 1 deletion(-) diff --git a/changelog/README.md b/changelog/README.md index 3ab71a3e88b..443ac3ed2ea 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,7 +4,6 @@ ### NGDG (in progress) -- [bug] JAVA-2507: Default timestamp in GraphStatementBuilderBase to Long.MIN_VALUE - [bug] JAVA-2492: Parse edge metadata using internal identifiers - [improvement] JAVA-2282: Remove GraphSON3 support - [new feature] JAVA-2098: Add filter predicates for collections From 94babbdca225e7e8d41c975bfbbe665dbde29902 Mon Sep 17 00:00:00 2001 From: Eduard Tudenhoefner Date: Mon, 21 Oct 2019 16:22:06 +0200 Subject: [PATCH 307/979] Update Tinkerpop version --- core/revapi.json | 18 ++++++++++++++++++ .../core/graph/SocialTraversalSourceDsl.java | 5 +++++ pom.xml | 3 +++ 3 files changed, 26 insertions(+) diff --git a/core/revapi.json b/core/revapi.json index ba5c7fae49a..2f4592ca824 100644 --- a/core/revapi.json +++ b/core/revapi.json @@ -5013,6 +5013,24 @@ "old": "method org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal> org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal::valueMap(java.lang.String[])", "new": "method org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal> org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal::valueMap(java.lang.String[])", "justification": "JAVA-2235: GraphBinary support - TinkerPop upgrade from 3.3 to 3.4" + }, + { + "code": "java.class.externalClassExposedInAPI", + "new": "class org.apache.tinkerpop.gremlin.process.traversal.util.ImmutableExplanation", + "justification": "Upgrade to Tinkerpop 3.4.4" + }, + { + "code": "java.class.nonFinalClassInheritsFromNewClass", + "old": "class org.apache.tinkerpop.gremlin.process.traversal.util.TraversalExplanation", + "new": "class org.apache.tinkerpop.gremlin.process.traversal.util.TraversalExplanation", + "superClass": "org.apache.tinkerpop.gremlin.process.traversal.util.AbstractExplanation", + "justification": "Upgrade to Tinkerpop 3.4.4" + }, + { + "code": "java.class.defaultSerializationChanged", + "old": "class org.apache.tinkerpop.gremlin.process.traversal.util.TraversalExplanation", + "new": "class org.apache.tinkerpop.gremlin.process.traversal.util.TraversalExplanation", + "justification": "Upgrade to Tinkerpop 3.4.4" } ] } diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/SocialTraversalSourceDsl.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/SocialTraversalSourceDsl.java index d7651ac327e..33e1d78a04c 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/SocialTraversalSourceDsl.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/SocialTraversalSourceDsl.java @@ -15,6 +15,7 @@ */ package com.datastax.dse.driver.api.core.graph; +import org.apache.tinkerpop.gremlin.process.remote.RemoteConnection; import org.apache.tinkerpop.gremlin.process.traversal.P; import org.apache.tinkerpop.gremlin.process.traversal.TraversalStrategies; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.DefaultGraphTraversal; @@ -35,6 +36,10 @@ public SocialTraversalSourceDsl(final Graph graph) { super(graph); } + public SocialTraversalSourceDsl(RemoteConnection connection) { + super(connection); + } + public GraphTraversal persons(String... names) { GraphTraversalSource clone = this.clone(); diff --git a/pom.xml b/pom.xml index 96b84a186d7..8ff1ba2cb4a 100644 --- a/pom.xml +++ b/pom.xml @@ -46,6 +46,9 @@ 2.1.11 4.0.5 4.1.45.Final + 1.2.1 + 3.4.4-20190930-6c997860 + 3.4.4 1.7.26 1.2.1 3.4.3-20190731-199be4b5 From a3ba5db39fb80496066b8e27561fe18fb04d9850 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 22 Oct 2019 11:06:00 +0300 Subject: [PATCH 308/979] Use CqlIdentifiers.wrapInternal to wrap collections of identifiers using CQL internal form --- .../schema/parsing/DseTableParser.java | 21 +++++-------------- 1 file changed, 5 insertions(+), 16 deletions(-) diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseTableParser.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseTableParser.java index 92d1dc84609..7b6acfb9040 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseTableParser.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseTableParser.java @@ -35,6 +35,7 @@ import com.datastax.oss.driver.api.core.type.MapType; import com.datastax.oss.driver.api.core.type.SetType; import com.datastax.oss.driver.api.core.type.UserDefinedType; +import com.datastax.oss.driver.internal.core.CqlIdentifiers; import com.datastax.oss.driver.internal.core.adminrequest.AdminRow; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.internal.core.metadata.schema.parsing.DataTypeClassNameCompositeParser; @@ -390,24 +391,12 @@ private DseEdgeMetadata buildEdge( getLabel(row), fromTable, findVertexLabel(fromTable, keyspaceVertices, "incoming"), - wrapInternal(row.getListOfString("from_partition_key_columns")), - wrapInternal(row.getListOfString("from_clustering_columns")), + CqlIdentifiers.wrapInternal(row.getListOfString("from_partition_key_columns")), + CqlIdentifiers.wrapInternal(row.getListOfString("from_clustering_columns")), toTable, findVertexLabel(toTable, keyspaceVertices, "outgoing"), - wrapInternal(row.getListOfString("to_partition_key_columns")), - wrapInternal(row.getListOfString("to_clustering_columns"))); - } - - // TODO replace by CqlIdentifiers.wrapInternal() when this is rebased on OSS 4.3.0 - @NonNull - private List wrapInternal(@NonNull Iterable in) { - - Objects.requireNonNull(in, "Input Iterable must not be null"); - ImmutableList.Builder builder = ImmutableList.builder(); - for (String name : in) { - builder.add(CqlIdentifier.fromInternal(name)); - } - return builder.build(); + CqlIdentifiers.wrapInternal(row.getListOfString("to_partition_key_columns")), + CqlIdentifiers.wrapInternal(row.getListOfString("to_clustering_columns"))); } private CqlIdentifier getLabel(AdminRow row) { From cc882f2f0967bacc949a6583d6c30cd0142aad9e Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 22 Oct 2019 18:49:38 +0300 Subject: [PATCH 309/979] Fix formatting issue --- .../internal/core/metadata/schema/parsing/DseTableParser.java | 2 -- 1 file changed, 2 deletions(-) diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseTableParser.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseTableParser.java index 7b6acfb9040..27bd363a6d3 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseTableParser.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/parsing/DseTableParser.java @@ -48,12 +48,10 @@ import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMultimap; import com.datastax.oss.driver.shaded.guava.common.collect.Multimap; -import edu.umd.cs.findbugs.annotations.NonNull; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; -import java.util.Objects; import java.util.Set; import java.util.UUID; import net.jcip.annotations.ThreadSafe; From e66c555e700d0b37da4972c3532ef551decd9fd4 Mon Sep 17 00:00:00 2001 From: Erik Merkle Date: Wed, 23 Oct 2019 03:08:13 -0500 Subject: [PATCH 310/979] JAVA-2510: Fix GraphBinaryDataTypesTest Codec registry initialization (#292) --- changelog/README.md | 1 + .../core/graph/binary/GraphBinaryDataTypesTest.java | 11 +++++++---- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 443ac3ed2ea..e4ac952d455 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### NGDG (in progress) +- [bug] JAVA-2510: Fix GraphBinaryDataTypesTest Codec registry initialization - [bug] JAVA-2492: Parse edge metadata using internal identifiers - [improvement] JAVA-2282: Remove GraphSON3 support - [new feature] JAVA-2098: Add filter predicates for collections diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/binary/GraphBinaryDataTypesTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/binary/GraphBinaryDataTypesTest.java index 80b20d86afe..dae67965844 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/binary/GraphBinaryDataTypesTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/binary/GraphBinaryDataTypesTest.java @@ -26,7 +26,7 @@ import com.datastax.oss.driver.api.core.type.DataTypes; import com.datastax.oss.driver.api.core.type.TupleType; import com.datastax.oss.driver.api.core.type.UserDefinedType; -import com.datastax.oss.driver.api.core.type.codec.registry.CodecRegistry; +import com.datastax.oss.driver.api.core.type.codec.registry.MutableCodecRegistry; import com.datastax.oss.driver.internal.core.type.UserDefinedTypeBuilder; import com.datastax.oss.driver.internal.core.type.codec.registry.DefaultCodecRegistry; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; @@ -62,9 +62,12 @@ public class GraphBinaryDataTypesTest { @Mock private DseDriverContext context; - private static final CodecRegistry CODEC_REGISTRY = - new DefaultCodecRegistry( - "testDseRegistry", DseTypeCodecs.POINT, DseTypeCodecs.LINE_STRING, DseTypeCodecs.POLYGON); + private static final MutableCodecRegistry CODEC_REGISTRY = + new DefaultCodecRegistry("testDseRegistry"); + + static { + CODEC_REGISTRY.register(DseTypeCodecs.POINT, DseTypeCodecs.LINE_STRING, DseTypeCodecs.POLYGON); + } @Before public void setup() { From 789514eb8c4a3098da021300010f9a565f452137 Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Thu, 14 Nov 2019 14:37:46 -0600 Subject: [PATCH 311/979] JAVA-2477: Make TinkerPop compatible with shaded Netty (#299) JAVA-2477: Make TinkerPop compatible with shaded Netty --- .../internal/core/graph/GraphConversions.java | 38 +- .../graph/GraphRequestAsyncProcessor.java | 6 +- .../core/graph/TinkerpopBufferUtil.java | 21 ++ ...actDynamicGraphBinaryCustomSerializer.java | 30 +- ...ractSimpleGraphBinaryCustomSerializer.java | 35 +- .../binary/ComplexTypeSerializerUtil.java | 48 +-- .../graph/binary/CqlDurationSerializer.java | 16 +- .../core/graph/binary/DistanceSerializer.java | 16 +- .../graph/binary/EditDistanceSerializer.java | 16 +- .../core/graph/binary/GeometrySerializer.java | 20 +- .../core/graph/binary/GraphBinaryModule.java | 26 +- .../core/graph/binary/PairSerializer.java | 16 +- .../graph/binary/TupleValueSerializer.java | 15 +- .../core/graph/binary/UdtValueSerializer.java | 13 +- .../graph/binary/buffer/DseNettyBuffer.java | 268 +++++++++++++ .../binary/buffer/DseNettyBufferFactory.java | 123 ++++++ .../TinkerpopBufferPrimitiveCodec.java | 255 +++++++++++++ .../com/datastax/dse/driver/Assertions.java | 15 + .../dse/driver/TinkerpopBufferAssert.java | 29 ++ .../internal/core/graph/GraphNodeTest.java | 14 +- .../core/graph/GraphRequestHandlerTest.java | 16 +- .../binary/GraphBinaryDataTypesTest.java | 20 +- .../TinkerpopBufferPrimitiveCodecTest.java | 354 ++++++++++++++++++ pom.xml | 6 +- 24 files changed, 1249 insertions(+), 167 deletions(-) create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/graph/TinkerpopBufferUtil.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/buffer/DseNettyBuffer.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/buffer/DseNettyBufferFactory.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/protocol/TinkerpopBufferPrimitiveCodec.java create mode 100644 core/src/test/java/com/datastax/dse/driver/Assertions.java create mode 100644 core/src/test/java/com/datastax/dse/driver/TinkerpopBufferAssert.java create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/protocol/TinkerpopBufferPrimitiveCodecTest.java diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphConversions.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphConversions.java index eee9f65efb5..efffdb2f6e1 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphConversions.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphConversions.java @@ -18,9 +18,14 @@ import static java.nio.charset.StandardCharsets.UTF_8; import com.datastax.dse.driver.api.core.config.DseDriverOption; -import com.datastax.dse.driver.api.core.graph.*; +import com.datastax.dse.driver.api.core.graph.BatchGraphStatement; +import com.datastax.dse.driver.api.core.graph.FluentGraphStatement; +import com.datastax.dse.driver.api.core.graph.GraphNode; +import com.datastax.dse.driver.api.core.graph.GraphStatement; +import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; import com.datastax.dse.driver.internal.core.context.DseDriverContext; import com.datastax.dse.driver.internal.core.graph.binary.GraphBinaryModule; +import com.datastax.dse.driver.internal.core.graph.binary.buffer.DseNettyBufferFactory; import com.datastax.dse.protocol.internal.request.RawBytesQuery; import com.datastax.dse.protocol.internal.request.query.ContinuousPagingOptions; import com.datastax.dse.protocol.internal.request.query.DseQueryOptions; @@ -45,9 +50,13 @@ import java.io.UncheckedIOException; import java.nio.ByteBuffer; import java.time.Duration; -import java.util.*; -import org.apache.tinkerpop.gremlin.driver.ser.SerializationException; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Objects; import org.apache.tinkerpop.gremlin.process.traversal.Traverser; +import org.apache.tinkerpop.gremlin.structure.io.Buffer; +import org.apache.tinkerpop.gremlin.structure.io.BufferFactory; /** * Utility class to move boilerplate out of {@link GraphRequestHandler}. @@ -70,6 +79,8 @@ public class GraphConversions extends Conversions { static final String LANGUAGE_GROOVY = "gremlin-groovy"; static final String LANGUAGE_BYTECODE = "bytecode-json"; + private static final BufferFactory FACTORY = new DseNettyBufferFactory(); + @VisibleForTesting static final byte[] EMPTY_STRING_QUERY = "".getBytes(UTF_8); public static GraphProtocol inferSubProtocol( @@ -105,9 +116,8 @@ public static Message createContinuousMessageFromGraphStatement( try { Map queryParams = ((ScriptGraphStatement) statement).getQueryParams(); if (subProtocol.isGraphBinary()) { - ByteBuf graphBinaryParams = graphBinaryModule.serialize(queryParams); - encodedQueryParams = - Collections.singletonList(ByteBufUtil.toByteBuffer(graphBinaryParams)); + Buffer graphBinaryParams = graphBinaryModule.serialize(queryParams); + encodedQueryParams = Collections.singletonList(graphBinaryParams.nioBuffer()); graphBinaryParams.release(); } else { encodedQueryParams = @@ -173,9 +183,8 @@ static Message createMessageFromGraphStatement( try { Map queryParams = ((ScriptGraphStatement) statement).getQueryParams(); if (subProtocol.isGraphBinary()) { - ByteBuf graphBinaryParams = graphBinaryModule.serialize(queryParams); - encodedQueryParams = - Collections.singletonList(ByteBufUtil.toByteBuffer(graphBinaryParams)); + Buffer graphBinaryParams = graphBinaryModule.serialize(queryParams); + encodedQueryParams = Collections.singletonList(graphBinaryParams.nioBuffer()); graphBinaryParams.release(); } else { encodedQueryParams = @@ -323,10 +332,10 @@ public static Map createCustomPayload( if (subProtocol.isGraphBinary() && graphLanguage.equals(LANGUAGE_BYTECODE)) { Object bytecodeQuery = bytecodeToSerialize(statement); try { - ByteBuf bytecodeByteBuf = graphBinaryModule.serialize(bytecodeQuery); - payload.put(GRAPH_BINARY_QUERY_OPTION_KEY, ByteBufUtil.toByteBuffer(bytecodeByteBuf)); + Buffer bytecodeByteBuf = graphBinaryModule.serialize(bytecodeQuery); + payload.put(GRAPH_BINARY_QUERY_OPTION_KEY, bytecodeByteBuf.nioBuffer()); bytecodeByteBuf.release(); - } catch (SerializationException e) { + } catch (IOException e) { throw new UncheckedIOException(e); } } @@ -386,10 +395,7 @@ public static GraphNode createGraphBinaryGraphNode( // there should be only one column in the given row Preconditions.checkArgument(data.size() == 1, "Invalid row given to deserialize"); - // TODO: avoid the conversion to ByteBuffer and use Netty ByteBuf directly from the driver since - // GraphBinary accepts ByteBufs. - // This would require fiddling with the DseFrameCodecs and the GraphRequestHandler - ByteBuf toDeserialize = ByteBufUtil.toByteBuf(data.get(0)); + Buffer toDeserialize = FACTORY.wrap(data.get(0)); Object deserializedObject = graphBinaryModule.deserialize(toDeserialize); toDeserialize.release(); assert deserializedObject instanceof Traverser diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestAsyncProcessor.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestAsyncProcessor.java index 2703b880180..12266e16ce9 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestAsyncProcessor.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestAsyncProcessor.java @@ -28,9 +28,9 @@ import edu.umd.cs.findbugs.annotations.NonNull; import java.util.concurrent.CompletionStage; import net.jcip.annotations.ThreadSafe; -import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryReader; -import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryWriter; -import org.apache.tinkerpop.gremlin.driver.ser.binary.TypeSerializerRegistry; +import org.apache.tinkerpop.gremlin.structure.io.binary.GraphBinaryReader; +import org.apache.tinkerpop.gremlin.structure.io.binary.GraphBinaryWriter; +import org.apache.tinkerpop.gremlin.structure.io.binary.TypeSerializerRegistry; @ThreadSafe public class GraphRequestAsyncProcessor diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/TinkerpopBufferUtil.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/TinkerpopBufferUtil.java new file mode 100644 index 00000000000..05092f8180d --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/TinkerpopBufferUtil.java @@ -0,0 +1,21 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph; + +import java.nio.ByteBuffer; +import org.apache.tinkerpop.gremlin.structure.io.Buffer; + +/** Mirror of {@link ByteBufUtil} for Tinkerpop Buffer's */ +public class TinkerpopBufferUtil { + + public static ByteBuffer readBytes(Buffer tinkerBuff, int size) { + ByteBuffer res = ByteBuffer.allocate(size); + tinkerBuff.readBytes(res); + res.flip(); + return res; + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/AbstractDynamicGraphBinaryCustomSerializer.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/AbstractDynamicGraphBinaryCustomSerializer.java index 954326fef9d..518837953b4 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/AbstractDynamicGraphBinaryCustomSerializer.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/AbstractDynamicGraphBinaryCustomSerializer.java @@ -6,45 +6,45 @@ */ package com.datastax.dse.driver.internal.core.graph.binary; -import io.netty.buffer.ByteBuf; -import org.apache.tinkerpop.gremlin.driver.ser.SerializationException; -import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryReader; -import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryWriter; +import java.io.IOException; +import org.apache.tinkerpop.gremlin.structure.io.Buffer; +import org.apache.tinkerpop.gremlin.structure.io.binary.GraphBinaryReader; +import org.apache.tinkerpop.gremlin.structure.io.binary.GraphBinaryWriter; /** * Convenience class for dynamic types implemented as Custom types in GraphBinary. This class will * take care of handling {value_length} automatically for implementing classes. {@link - * #writeDynamicCustomValue(Object, ByteBuf, GraphBinaryWriter)} and {@link - * #readDynamicCustomValue(ByteBuf, GraphBinaryReader)} only need to handle writing the internal + * #writeDynamicCustomValue(Object, Buffer, GraphBinaryWriter)} and {@link + * #readDynamicCustomValue(Buffer, GraphBinaryReader)} only need to handle writing the internal * components of the custom type. * * @param the java type the implementing classes will encode and decode. */ public abstract class AbstractDynamicGraphBinaryCustomSerializer extends AbstractSimpleGraphBinaryCustomSerializer { - protected abstract void writeDynamicCustomValue( - T value, ByteBuf buffer, GraphBinaryWriter context) throws SerializationException; + protected abstract void writeDynamicCustomValue(T value, Buffer buffer, GraphBinaryWriter context) + throws IOException; - protected abstract T readDynamicCustomValue(ByteBuf buffer, GraphBinaryReader context) - throws SerializationException; + protected abstract T readDynamicCustomValue(Buffer buffer, GraphBinaryReader context) + throws IOException; @Override - protected T readCustomValue(int valueLength, ByteBuf buffer, GraphBinaryReader context) - throws SerializationException { + protected T readCustomValue(int valueLength, Buffer buffer, GraphBinaryReader context) + throws IOException { int initialIndex = buffer.readerIndex(); // read actual custom value T read = readDynamicCustomValue(buffer, context); // make sure we didn't read more than what was input as {value_length} - checkValueSize((buffer.readerIndex() - initialIndex), valueLength); + checkValueSize(valueLength, (buffer.readerIndex() - initialIndex)); return read; } @Override - protected void writeCustomValue(T value, ByteBuf buffer, GraphBinaryWriter context) - throws SerializationException { + protected void writeCustomValue(T value, Buffer buffer, GraphBinaryWriter context) + throws IOException { // Store the current writer index final int valueLengthIndex = buffer.writerIndex(); diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/AbstractSimpleGraphBinaryCustomSerializer.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/AbstractSimpleGraphBinaryCustomSerializer.java index fc6bb8012cf..fe9457b964d 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/AbstractSimpleGraphBinaryCustomSerializer.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/AbstractSimpleGraphBinaryCustomSerializer.java @@ -7,12 +7,13 @@ package com.datastax.dse.driver.internal.core.graph.binary; import com.datastax.oss.driver.shaded.guava.common.base.Preconditions; -import io.netty.buffer.ByteBuf; +import java.io.IOException; import org.apache.tinkerpop.gremlin.driver.ser.SerializationException; -import org.apache.tinkerpop.gremlin.driver.ser.binary.DataType; -import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryReader; -import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryWriter; -import org.apache.tinkerpop.gremlin.driver.ser.binary.types.CustomTypeSerializer; +import org.apache.tinkerpop.gremlin.structure.io.Buffer; +import org.apache.tinkerpop.gremlin.structure.io.binary.DataType; +import org.apache.tinkerpop.gremlin.structure.io.binary.GraphBinaryReader; +import org.apache.tinkerpop.gremlin.structure.io.binary.GraphBinaryWriter; +import org.apache.tinkerpop.gremlin.structure.io.binary.types.CustomTypeSerializer; /** * A base custom type serializer for DSE types that handles most of the boiler plate code associated @@ -27,7 +28,7 @@ * DSE types. * *

      Implementing classes are still in charge of encoding {value_length}{value_bytes} in the {@link - * #readCustomValue(int, ByteBuf, GraphBinaryReader)} implementations. + * #readCustomValue(int, Buffer, GraphBinaryReader)} implementations. * *

      Implementing classes must override {@link CustomTypeSerializer#getTypeName()} with their own * type name. @@ -42,11 +43,11 @@ abstract class AbstractSimpleGraphBinaryCustomSerializer implements CustomTyp protected static final String INCORRECT_VALUE_LENGTH_ERROR_MESSAGE = "{value_length} read for this value does not correspond to the size of a '%s' value. [%s] bytes required but got [%s]"; - protected abstract T readCustomValue(int valueLength, ByteBuf buffer, GraphBinaryReader context) - throws SerializationException; + protected abstract T readCustomValue(int valueLength, Buffer buffer, GraphBinaryReader context) + throws IOException; - protected abstract void writeCustomValue(T value, ByteBuf buffer, GraphBinaryWriter context) - throws SerializationException; + protected abstract void writeCustomValue(T value, Buffer buffer, GraphBinaryWriter context) + throws IOException; protected void checkValueSize(int lengthRequired, int lengthFound) { Preconditions.checkArgument( @@ -63,7 +64,7 @@ public DataType getDataType() { } @Override - public T read(ByteBuf buffer, GraphBinaryReader context) throws SerializationException { + public T read(Buffer buffer, GraphBinaryReader context) throws IOException { // the type serializer registry will take care of deserializing {custom_type_name} // read {custom_type_info_length} and verify it is 0. // See #write(T, ByteBuf, GraphBinaryWriter) for why it is set to 0 @@ -76,8 +77,8 @@ public T read(ByteBuf buffer, GraphBinaryReader context) throws SerializationExc } @Override - public T readValue(ByteBuf buffer, GraphBinaryReader context, boolean nullable) - throws SerializationException { + public T readValue(Buffer buffer, GraphBinaryReader context, boolean nullable) + throws IOException { if (nullable) { // read {value_flag} final byte valueFlag = buffer.readByte(); @@ -110,8 +111,8 @@ public T readValue(ByteBuf buffer, GraphBinaryReader context, boolean nullable) } @Override - public void write(final T value, final ByteBuf buffer, final GraphBinaryWriter context) - throws SerializationException { + public void write(final T value, final Buffer buffer, final GraphBinaryWriter context) + throws IOException { // the type serializer registry will take care of serializing {custom_type_name} // write "{custom_type_info_length}" to 0 because we don't need it for the DSE types context.writeValue(0, buffer, false); @@ -120,8 +121,8 @@ public void write(final T value, final ByteBuf buffer, final GraphBinaryWriter c @Override public void writeValue( - final T value, final ByteBuf buffer, final GraphBinaryWriter context, final boolean nullable) - throws SerializationException { + final T value, final Buffer buffer, final GraphBinaryWriter context, final boolean nullable) + throws IOException { if (value == null) { if (!nullable) { throw new SerializationException("Unexpected null value when nullable is false"); diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/ComplexTypeSerializerUtil.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/ComplexTypeSerializerUtil.java index 64ebf74d9a1..955bf9b3870 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/ComplexTypeSerializerUtil.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/ComplexTypeSerializerUtil.java @@ -7,11 +7,18 @@ package com.datastax.dse.driver.internal.core.graph.binary; import com.datastax.dse.driver.internal.core.context.DseDriverContext; -import com.datastax.dse.driver.internal.core.graph.ByteBufUtil; +import com.datastax.dse.driver.internal.core.graph.TinkerpopBufferUtil; +import com.datastax.dse.driver.internal.core.graph.binary.buffer.DseNettyBufferFactory; +import com.datastax.dse.driver.internal.core.protocol.TinkerpopBufferPrimitiveCodec; import com.datastax.oss.driver.api.core.data.GettableByIndex; import com.datastax.oss.driver.api.core.data.SettableByIndex; -import com.datastax.oss.driver.api.core.type.*; -import com.datastax.oss.driver.internal.core.protocol.ByteBufPrimitiveCodec; +import com.datastax.oss.driver.api.core.type.CustomType; +import com.datastax.oss.driver.api.core.type.DataType; +import com.datastax.oss.driver.api.core.type.ListType; +import com.datastax.oss.driver.api.core.type.MapType; +import com.datastax.oss.driver.api.core.type.SetType; +import com.datastax.oss.driver.api.core.type.TupleType; +import com.datastax.oss.driver.api.core.type.UserDefinedType; import com.datastax.oss.driver.internal.core.type.DataTypeHelper; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; @@ -19,30 +26,29 @@ import com.datastax.oss.protocol.internal.ProtocolConstants; import com.datastax.oss.protocol.internal.response.result.RawType; import edu.umd.cs.findbugs.annotations.Nullable; -import io.netty.buffer.ByteBuf; import java.nio.BufferUnderflowException; import java.nio.ByteBuffer; import java.util.Objects; +import org.apache.tinkerpop.gremlin.structure.io.Buffer; class ComplexTypeSerializerUtil { - private static final PrimitiveCodec protocolCodec = - new ByteBufPrimitiveCodec(GraphBinaryModule.ALLOCATOR); + private static final PrimitiveCodec codec = + new TinkerpopBufferPrimitiveCodec(new DseNettyBufferFactory()); - static void encodeTypeDefinition(DataType type, ByteBuf buffer, DseDriverContext driverContext) { + static void encodeTypeDefinition(DataType type, Buffer buffer, DseDriverContext driverContext) { RawType protocolType = toProtocolSpec(type); - protocolType.encode(buffer, protocolCodec, driverContext.getProtocolVersion().getCode()); + protocolType.encode(buffer, codec, driverContext.getProtocolVersion().getCode()); } - static DataType decodeTypeDefinition(ByteBuf buffer, DseDriverContext driverContext) { - RawType type = - RawType.decode(buffer, protocolCodec, driverContext.getProtocolVersion().getCode()); + static DataType decodeTypeDefinition(Buffer buffer, DseDriverContext driverContext) { + RawType type = RawType.decode(buffer, codec, driverContext.getProtocolVersion().getCode()); return DataTypeHelper.fromProtocolSpec(type, driverContext); } - /* Netty-based encoding of UDT values, based on the UdtCoded.encode() method, but using Netty buffers directly to avoid + /* Tinkerpop-based encoding of UDT values, based on the UdtCoded.encode() method, but using Tinkerpop buffers directly to avoid unnecessary NIO ByteBuffer copies. */ - static void encodeValue(@Nullable GettableByIndex value, ByteBuf nettyBuf) { + static void encodeValue(@Nullable GettableByIndex value, Buffer tinkerBuff) { if (value == null) { return; } @@ -50,24 +56,24 @@ static void encodeValue(@Nullable GettableByIndex value, ByteBuf nettyBuf) { for (int i = 0; i < value.size(); i++) { ByteBuffer fieldBuffer = value.getBytesUnsafe(i); if (fieldBuffer == null) { - nettyBuf.writeInt(-1); + tinkerBuff.writeInt(-1); } else { - nettyBuf.writeInt(fieldBuffer.remaining()); - nettyBuf.writeBytes(fieldBuffer.duplicate()); + tinkerBuff.writeInt(fieldBuffer.remaining()); + tinkerBuff.writeBytes(fieldBuffer.duplicate()); } } } - /* This method will move forward the netty buffer given in parameter based on the UDT value read. - Content of the method is roughly equivalent to UdtCodec.decode(), but using Netty buffers directly to avoid + /* This method will move forward the Tinkerpop buffer given in parameter based on the UDT value read. + Content of the method is roughly equivalent to UdtCodec.decode(), but using Tinkerpop buffers directly to avoid unnecessary NIO ByteBuffer copies. */ - static > T decodeValue(ByteBuf nettyBuf, T val, int size) { + static > T decodeValue(Buffer tinkerBuff, T val, int size) { try { for (int i = 0; i < size; i++) { - int fieldSize = nettyBuf.readInt(); + int fieldSize = tinkerBuff.readInt(); if (fieldSize >= 0) { // the reassignment is to shut down the error-prone warning about ignoring return values. - val = val.setBytesUnsafe(i, ByteBufUtil.readBytes(nettyBuf, fieldSize)); + val = val.setBytesUnsafe(i, TinkerpopBufferUtil.readBytes(tinkerBuff, fieldSize)); } } return val; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/CqlDurationSerializer.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/CqlDurationSerializer.java index 1b8e595341a..a6fefc92b2d 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/CqlDurationSerializer.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/CqlDurationSerializer.java @@ -7,10 +7,10 @@ package com.datastax.dse.driver.internal.core.graph.binary; import com.datastax.oss.driver.api.core.data.CqlDuration; -import io.netty.buffer.ByteBuf; -import org.apache.tinkerpop.gremlin.driver.ser.SerializationException; -import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryReader; -import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryWriter; +import java.io.IOException; +import org.apache.tinkerpop.gremlin.structure.io.Buffer; +import org.apache.tinkerpop.gremlin.structure.io.binary.GraphBinaryReader; +import org.apache.tinkerpop.gremlin.structure.io.binary.GraphBinaryWriter; public class CqlDurationSerializer extends AbstractSimpleGraphBinaryCustomSerializer { @@ -21,8 +21,8 @@ public String getTypeName() { @Override protected CqlDuration readCustomValue( - final int valueLength, final ByteBuf buffer, final GraphBinaryReader context) - throws SerializationException { + final int valueLength, final Buffer buffer, final GraphBinaryReader context) + throws IOException { checkValueSize(GraphBinaryUtils.sizeOfDuration(), valueLength); return CqlDuration.newInstance( context.readValue(buffer, Integer.class, false), @@ -31,8 +31,8 @@ protected CqlDuration readCustomValue( } @Override - protected void writeCustomValue(CqlDuration value, ByteBuf buffer, GraphBinaryWriter context) - throws SerializationException { + protected void writeCustomValue(CqlDuration value, Buffer buffer, GraphBinaryWriter context) + throws IOException { context.writeValue(GraphBinaryUtils.sizeOfDuration(), buffer, false); context.writeValue(value.getMonths(), buffer, false); context.writeValue(value.getDays(), buffer, false); diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/DistanceSerializer.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/DistanceSerializer.java index 0c02a135f05..420bec9a8f2 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/DistanceSerializer.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/DistanceSerializer.java @@ -8,10 +8,10 @@ import com.datastax.dse.driver.api.core.data.geometry.Point; import com.datastax.dse.driver.internal.core.data.geometry.Distance; -import io.netty.buffer.ByteBuf; -import org.apache.tinkerpop.gremlin.driver.ser.SerializationException; -import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryReader; -import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryWriter; +import java.io.IOException; +import org.apache.tinkerpop.gremlin.structure.io.Buffer; +import org.apache.tinkerpop.gremlin.structure.io.binary.GraphBinaryReader; +import org.apache.tinkerpop.gremlin.structure.io.binary.GraphBinaryWriter; public class DistanceSerializer extends AbstractSimpleGraphBinaryCustomSerializer { @Override @@ -20,16 +20,16 @@ public String getTypeName() { } @Override - protected Distance readCustomValue(int valueLength, ByteBuf buffer, GraphBinaryReader context) - throws SerializationException { + protected Distance readCustomValue(int valueLength, Buffer buffer, GraphBinaryReader context) + throws IOException { Point p = context.readValue(buffer, Point.class, false); checkValueSize(GraphBinaryUtils.sizeOfDistance(p), valueLength); return new Distance(p, context.readValue(buffer, Double.class, false)); } @Override - protected void writeCustomValue(Distance value, ByteBuf buffer, GraphBinaryWriter context) - throws SerializationException { + protected void writeCustomValue(Distance value, Buffer buffer, GraphBinaryWriter context) + throws IOException { context.writeValue(GraphBinaryUtils.sizeOfDistance(value.getCenter()), buffer, false); context.writeValue(value.getCenter(), buffer, false); context.writeValue(value.getRadius(), buffer, false); diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/EditDistanceSerializer.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/EditDistanceSerializer.java index f3afe4e23f3..0d17308e8b6 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/EditDistanceSerializer.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/EditDistanceSerializer.java @@ -7,10 +7,10 @@ package com.datastax.dse.driver.internal.core.graph.binary; import com.datastax.dse.driver.internal.core.graph.EditDistance; -import io.netty.buffer.ByteBuf; -import org.apache.tinkerpop.gremlin.driver.ser.SerializationException; -import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryReader; -import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryWriter; +import java.io.IOException; +import org.apache.tinkerpop.gremlin.structure.io.Buffer; +import org.apache.tinkerpop.gremlin.structure.io.binary.GraphBinaryReader; +import org.apache.tinkerpop.gremlin.structure.io.binary.GraphBinaryWriter; public class EditDistanceSerializer extends AbstractSimpleGraphBinaryCustomSerializer { @@ -20,8 +20,8 @@ public String getTypeName() { } @Override - protected EditDistance readCustomValue(int valueLength, ByteBuf buffer, GraphBinaryReader context) - throws SerializationException { + protected EditDistance readCustomValue(int valueLength, Buffer buffer, GraphBinaryReader context) + throws IOException { int distance = context.readValue(buffer, Integer.class, false); String query = context.readValue(buffer, String.class, false); checkValueSize(GraphBinaryUtils.sizeOfEditDistance(query), valueLength); @@ -30,8 +30,8 @@ protected EditDistance readCustomValue(int valueLength, ByteBuf buffer, GraphBin } @Override - protected void writeCustomValue(EditDistance value, ByteBuf buffer, GraphBinaryWriter context) - throws SerializationException { + protected void writeCustomValue(EditDistance value, Buffer buffer, GraphBinaryWriter context) + throws IOException { context.writeValue(GraphBinaryUtils.sizeOfEditDistance(value.query), buffer, false); context.writeValue(value.distance, buffer, false); context.writeValue(value.query, buffer, false); diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/GeometrySerializer.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/GeometrySerializer.java index 302ba40f3f9..d7428ab3f5f 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/GeometrySerializer.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/GeometrySerializer.java @@ -7,26 +7,26 @@ package com.datastax.dse.driver.internal.core.graph.binary; import com.datastax.dse.driver.api.core.data.geometry.Geometry; -import com.datastax.dse.driver.internal.core.graph.ByteBufUtil; -import io.netty.buffer.ByteBuf; +import com.datastax.dse.driver.internal.core.graph.TinkerpopBufferUtil; +import java.io.IOException; import java.nio.ByteBuffer; -import org.apache.tinkerpop.gremlin.driver.ser.SerializationException; -import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryReader; -import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryWriter; +import org.apache.tinkerpop.gremlin.structure.io.Buffer; +import org.apache.tinkerpop.gremlin.structure.io.binary.GraphBinaryReader; +import org.apache.tinkerpop.gremlin.structure.io.binary.GraphBinaryWriter; public abstract class GeometrySerializer extends AbstractSimpleGraphBinaryCustomSerializer { public abstract T fromWellKnownBinary(ByteBuffer buffer); @Override - protected T readCustomValue(int valueLength, ByteBuf buffer, GraphBinaryReader context) - throws SerializationException { - return fromWellKnownBinary(ByteBufUtil.readBytes(buffer, valueLength)); + protected T readCustomValue(int valueLength, Buffer buffer, GraphBinaryReader context) + throws IOException { + return fromWellKnownBinary(TinkerpopBufferUtil.readBytes(buffer, valueLength)); } @Override - protected void writeCustomValue(T value, ByteBuf buffer, GraphBinaryWriter context) - throws SerializationException { + protected void writeCustomValue(T value, Buffer buffer, GraphBinaryWriter context) + throws IOException { ByteBuffer bb = value.asWellKnownBinary(); // writing the {value_length} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/GraphBinaryModule.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/GraphBinaryModule.java index eca38a7c65e..828e90a3acd 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/GraphBinaryModule.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/GraphBinaryModule.java @@ -12,20 +12,23 @@ import com.datastax.dse.driver.internal.core.context.DseDriverContext; import com.datastax.dse.driver.internal.core.data.geometry.Distance; import com.datastax.dse.driver.internal.core.graph.EditDistance; +import com.datastax.dse.driver.internal.core.graph.binary.buffer.DseNettyBufferFactory; import com.datastax.oss.driver.api.core.data.CqlDuration; import com.datastax.oss.driver.api.core.data.TupleValue; import com.datastax.oss.driver.api.core.data.UdtValue; import io.netty.buffer.ByteBuf; -import io.netty.buffer.ByteBufAllocator; import io.netty.buffer.UnpooledByteBufAllocator; -import org.apache.tinkerpop.gremlin.driver.ser.SerializationException; -import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryReader; -import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryWriter; -import org.apache.tinkerpop.gremlin.driver.ser.binary.TypeSerializerRegistry; +import java.io.IOException; +import org.apache.tinkerpop.gremlin.structure.io.Buffer; +import org.apache.tinkerpop.gremlin.structure.io.BufferFactory; +import org.apache.tinkerpop.gremlin.structure.io.binary.GraphBinaryReader; +import org.apache.tinkerpop.gremlin.structure.io.binary.GraphBinaryWriter; +import org.apache.tinkerpop.gremlin.structure.io.binary.TypeSerializerRegistry; import org.javatuples.Pair; public class GraphBinaryModule { public static final UnpooledByteBufAllocator ALLOCATOR = new UnpooledByteBufAllocator(false); + private static final BufferFactory FACTORY = new DseNettyBufferFactory(); static final String GRAPH_BINARY_POINT_TYPE_NAME = "driver.dse.geometry.Point"; static final String GRAPH_BINARY_LINESTRING_TYPE_NAME = "driver.dse.geometry.LineString"; @@ -61,20 +64,15 @@ public static TypeSerializerRegistry createDseTypeSerializerRegistry( } @SuppressWarnings("TypeParameterUnusedInFormals") - public T deserialize(final ByteBuf buffer) throws SerializationException { + public T deserialize(final Buffer buffer) throws IOException { return reader.read(buffer); } - public ByteBuf serialize(final T value) throws SerializationException { - return serialize(value, ALLOCATOR); + public Buffer serialize(final T value) throws IOException { + return serialize(value, FACTORY.create(ALLOCATOR.heapBuffer())); } - public ByteBuf serialize(final T value, final ByteBufAllocator allocator) - throws SerializationException { - return serialize(value, allocator.heapBuffer()); - } - - public ByteBuf serialize(final T value, final ByteBuf buffer) throws SerializationException { + public Buffer serialize(final T value, final Buffer buffer) throws IOException { try { writer.write(value, buffer); return buffer; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/PairSerializer.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/PairSerializer.java index 3d1d8b4855f..0afde1eca26 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/PairSerializer.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/PairSerializer.java @@ -6,10 +6,10 @@ */ package com.datastax.dse.driver.internal.core.graph.binary; -import io.netty.buffer.ByteBuf; -import org.apache.tinkerpop.gremlin.driver.ser.SerializationException; -import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryReader; -import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryWriter; +import java.io.IOException; +import org.apache.tinkerpop.gremlin.structure.io.Buffer; +import org.apache.tinkerpop.gremlin.structure.io.binary.GraphBinaryReader; +import org.apache.tinkerpop.gremlin.structure.io.binary.GraphBinaryWriter; import org.javatuples.Pair; public class PairSerializer extends AbstractDynamicGraphBinaryCustomSerializer { @@ -20,14 +20,14 @@ public String getTypeName() { } @Override - protected Pair readDynamicCustomValue(ByteBuf buffer, GraphBinaryReader context) - throws SerializationException { + protected Pair readDynamicCustomValue(Buffer buffer, GraphBinaryReader context) + throws IOException { return new Pair<>(context.read(buffer), context.read(buffer)); } @Override - protected void writeDynamicCustomValue(Pair value, ByteBuf buffer, GraphBinaryWriter context) - throws SerializationException { + protected void writeDynamicCustomValue(Pair value, Buffer buffer, GraphBinaryWriter context) + throws IOException { context.write(value.getValue0(), buffer); context.write(value.getValue1(), buffer); } diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/TupleValueSerializer.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/TupleValueSerializer.java index 368bd5baac3..2b5745d8ac5 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/TupleValueSerializer.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/TupleValueSerializer.java @@ -10,10 +10,10 @@ import com.datastax.oss.driver.api.core.data.TupleValue; import com.datastax.oss.driver.api.core.type.DataType; import com.datastax.oss.driver.api.core.type.TupleType; -import io.netty.buffer.ByteBuf; -import org.apache.tinkerpop.gremlin.driver.ser.SerializationException; -import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryReader; -import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryWriter; +import java.io.IOException; +import org.apache.tinkerpop.gremlin.structure.io.Buffer; +import org.apache.tinkerpop.gremlin.structure.io.binary.GraphBinaryReader; +import org.apache.tinkerpop.gremlin.structure.io.binary.GraphBinaryWriter; public class TupleValueSerializer extends AbstractDynamicGraphBinaryCustomSerializer { @@ -29,7 +29,8 @@ public String getTypeName() { } @Override - public TupleValue readDynamicCustomValue(ByteBuf buffer, GraphBinaryReader context) { + public TupleValue readDynamicCustomValue(Buffer buffer, GraphBinaryReader context) + throws IOException { // read the type first DataType type = ComplexTypeSerializerUtil.decodeTypeDefinition(buffer, driverContext); @@ -45,8 +46,8 @@ public TupleValue readDynamicCustomValue(ByteBuf buffer, GraphBinaryReader conte } @Override - public void writeDynamicCustomValue(TupleValue value, ByteBuf buffer, GraphBinaryWriter context) - throws SerializationException { + public void writeDynamicCustomValue(TupleValue value, Buffer buffer, GraphBinaryWriter context) + throws IOException { // write type first in native protocol ComplexTypeSerializerUtil.encodeTypeDefinition(value.getType(), buffer, driverContext); diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/UdtValueSerializer.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/UdtValueSerializer.java index dc97cb19aa4..925af2fc27c 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/UdtValueSerializer.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/UdtValueSerializer.java @@ -10,9 +10,10 @@ import com.datastax.oss.driver.api.core.data.UdtValue; import com.datastax.oss.driver.api.core.type.DataType; import com.datastax.oss.driver.api.core.type.UserDefinedType; -import io.netty.buffer.ByteBuf; -import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryReader; -import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryWriter; +import java.io.IOException; +import org.apache.tinkerpop.gremlin.structure.io.Buffer; +import org.apache.tinkerpop.gremlin.structure.io.binary.GraphBinaryReader; +import org.apache.tinkerpop.gremlin.structure.io.binary.GraphBinaryWriter; public class UdtValueSerializer extends AbstractDynamicGraphBinaryCustomSerializer { private final DseDriverContext driverContext; @@ -27,7 +28,8 @@ public String getTypeName() { } @Override - public UdtValue readDynamicCustomValue(ByteBuf buffer, GraphBinaryReader context) { + public UdtValue readDynamicCustomValue(Buffer buffer, GraphBinaryReader context) + throws IOException { // read type definition first DataType driverType = ComplexTypeSerializerUtil.decodeTypeDefinition(buffer, driverContext); @@ -43,7 +45,8 @@ public UdtValue readDynamicCustomValue(ByteBuf buffer, GraphBinaryReader context } @Override - public void writeDynamicCustomValue(UdtValue value, ByteBuf buffer, GraphBinaryWriter context) { + public void writeDynamicCustomValue(UdtValue value, Buffer buffer, GraphBinaryWriter context) + throws IOException { // write type first in native protocol format ComplexTypeSerializerUtil.encodeTypeDefinition(value.getType(), buffer, driverContext); // write value after diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/buffer/DseNettyBuffer.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/buffer/DseNettyBuffer.java new file mode 100644 index 00000000000..876a9c3eefc --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/buffer/DseNettyBuffer.java @@ -0,0 +1,268 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package com.datastax.dse.driver.internal.core.graph.binary.buffer; + +import io.netty.buffer.ByteBuf; +import java.io.IOException; +import java.io.OutputStream; +import java.nio.ByteBuffer; +import org.apache.tinkerpop.gremlin.structure.io.Buffer; + +/** + * Internal impl of Tinkerpop Buffers. We implement an internal type here to allow for this class to + * use shaded Netty types (without bringing all of Tinkerpop into the shaded JAR). The impl is based + * on the initial impl of {@link NettyBuffer} but we don't guarantee that this class will mirror + * changes to that class over time. + */ +final class DseNettyBuffer implements Buffer { + private final ByteBuf buffer; + + /** + * Creates a new instance. + * + * @param buffer The buffer to wrap. + */ + DseNettyBuffer(ByteBuf buffer) { + if (buffer == null) { + throw new IllegalArgumentException("buffer can't be null"); + } + + this.buffer = buffer; + } + + @Override + public int readableBytes() { + return this.buffer.readableBytes(); + } + + @Override + public int readerIndex() { + return this.buffer.readerIndex(); + } + + @Override + public Buffer readerIndex(final int readerIndex) { + this.buffer.readerIndex(readerIndex); + return this; + } + + @Override + public int writerIndex() { + return this.buffer.writerIndex(); + } + + @Override + public Buffer writerIndex(final int writerIndex) { + this.buffer.writerIndex(writerIndex); + return this; + } + + @Override + public Buffer markWriterIndex() { + this.buffer.markWriterIndex(); + return this; + } + + @Override + public Buffer resetWriterIndex() { + this.buffer.resetWriterIndex(); + return this; + } + + @Override + public int capacity() { + return this.buffer.capacity(); + } + + @Override + public boolean isDirect() { + return this.buffer.isDirect(); + } + + @Override + public boolean readBoolean() { + return this.buffer.readBoolean(); + } + + @Override + public byte readByte() { + return this.buffer.readByte(); + } + + @Override + public short readShort() { + return this.buffer.readShort(); + } + + @Override + public int readInt() { + return this.buffer.readInt(); + } + + @Override + public long readLong() { + return this.buffer.readLong(); + } + + @Override + public float readFloat() { + return this.buffer.readFloat(); + } + + @Override + public double readDouble() { + return this.buffer.readDouble(); + } + + @Override + public Buffer readBytes(final byte[] destination) { + this.buffer.readBytes(destination); + return this; + } + + @Override + public Buffer readBytes(final byte[] destination, final int dstIndex, final int length) { + this.buffer.readBytes(destination, dstIndex, length); + return this; + } + + @Override + public Buffer readBytes(final ByteBuffer dst) { + this.buffer.readBytes(dst); + return this; + } + + @Override + public Buffer readBytes(final OutputStream out, final int length) throws IOException { + this.buffer.readBytes(out, length); + return this; + } + + @Override + public Buffer writeBoolean(final boolean value) { + this.buffer.writeBoolean(value); + return this; + } + + @Override + public Buffer writeByte(final int value) { + this.buffer.writeByte(value); + return this; + } + + @Override + public Buffer writeShort(final int value) { + this.buffer.writeShort(value); + return this; + } + + @Override + public Buffer writeInt(final int value) { + this.buffer.writeInt(value); + return this; + } + + @Override + public Buffer writeLong(final long value) { + this.buffer.writeLong(value); + return this; + } + + @Override + public Buffer writeFloat(final float value) { + this.buffer.writeFloat(value); + return this; + } + + @Override + public Buffer writeDouble(final double value) { + this.buffer.writeDouble(value); + return this; + } + + @Override + public Buffer writeBytes(final byte[] src) { + this.buffer.writeBytes(src); + return this; + } + + @Override + public Buffer writeBytes(final ByteBuffer src) { + this.buffer.writeBytes(src); + return this; + } + + @Override + public Buffer writeBytes(byte[] src, final int srcIndex, final int length) { + this.buffer.writeBytes(src, srcIndex, length); + return this; + } + + @Override + public boolean release() { + return this.buffer.release(); + } + + @Override + public Buffer retain() { + this.buffer.retain(); + return this; + } + + @Override + public int referenceCount() { + return this.buffer.refCnt(); + } + + @Override + public ByteBuffer[] nioBuffers() { + return this.buffer.nioBuffers(); + } + + @Override + public ByteBuffer nioBuffer() { + return this.buffer.nioBuffer(); + } + + @Override + public ByteBuffer nioBuffer(final int index, final int length) { + return this.buffer.nioBuffer(index, length); + } + + @Override + public ByteBuffer[] nioBuffers(final int index, final int length) { + return this.buffer.nioBuffers(index, length); + } + + @Override + public int nioBufferCount() { + return this.buffer.nioBufferCount(); + } + + @Override + public Buffer getBytes(final int index, final byte[] dst) { + this.buffer.getBytes(index, dst); + return this; + } + + /** Returns the underlying buffer. */ + public ByteBuf getUnderlyingBuffer() { + return this.buffer; + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/buffer/DseNettyBufferFactory.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/buffer/DseNettyBufferFactory.java new file mode 100644 index 00000000000..c2abf21aedf --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/buffer/DseNettyBufferFactory.java @@ -0,0 +1,123 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package com.datastax.dse.driver.internal.core.graph.binary.buffer; + +import io.netty.buffer.*; +import java.nio.ByteBuffer; +import java.util.function.Supplier; +import org.apache.tinkerpop.gremlin.driver.ser.NettyBufferFactory; +import org.apache.tinkerpop.gremlin.structure.io.Buffer; +import org.apache.tinkerpop.gremlin.structure.io.BufferFactory; + +/** + * Internal BufferFactory impl for creation of Tinkerpop buffers. We implement an internal type here + * to allow for this class to use shaded Netty types (without bringing all of Tinkerpop into the + * shaded JAR). The impl is based on the initial impl of {@link NettyBufferFactory} but we don't + * guarantee that this class will mirror changes to that class over time. + */ +public class DseNettyBufferFactory implements BufferFactory { + + private static ByteBufAllocator DEFAULT_ALLOCATOR = new UnpooledByteBufAllocator(false); + + private final ByteBufAllocator allocator; + + public DseNettyBufferFactory() { + this.allocator = DEFAULT_ALLOCATOR; + } + + public DseNettyBufferFactory(ByteBufAllocator allocator) { + this.allocator = allocator; + } + + @Override + public Buffer create(final ByteBuf value) { + return new DseNettyBuffer(value); + } + + @Override + public Buffer wrap(final ByteBuffer value) { + return create(Unpooled.wrappedBuffer(value)); + } + + public Buffer heap() { + return create(allocator.heapBuffer()); + } + + public Buffer heap(int initialSize) { + return create(allocator.heapBuffer(initialSize)); + } + + public Buffer heap(int initialSize, int maxSize) { + return create(allocator.heapBuffer(initialSize, maxSize)); + } + + public Buffer io() { + return create(allocator.ioBuffer()); + } + + public Buffer io(int initialSize) { + return create(allocator.ioBuffer(initialSize)); + } + + public Buffer io(int initialSize, int maxSize) { + return create(allocator.ioBuffer(initialSize, maxSize)); + } + + public Buffer direct() { + return create(allocator.directBuffer()); + } + + public Buffer direct(int initialSize) { + return create(allocator.directBuffer(initialSize)); + } + + public Buffer direct(int initialSize, int maxSize) { + return create(allocator.directBuffer(initialSize, maxSize)); + } + + public Buffer composite(ByteBuf... components) { + + CompositeByteBuf buff = allocator.compositeBuffer(components.length); + buff.addComponents(components); + return create(buff); + } + + public Buffer composite(Buffer... components) { + ByteBuf[] nettyBufs = new ByteBuf[components.length]; + for (int i = 0; i < components.length; ++i) { + if (!(components[i] instanceof DseNettyBuffer)) { + throw new IllegalArgumentException("Can only concatenate DseNettyBuffer instances"); + } + nettyBufs[i] = ((DseNettyBuffer) components[i]).getUnderlyingBuffer(); + } + return composite(nettyBufs); + } + + public Buffer withBytes(int... bytes) { + return withBytes(this::heap, bytes); + } + + public Buffer withBytes(Supplier supplier, int... bytes) { + Buffer buff = supplier.get(); + for (int val : bytes) { + buff.writeByte(val); + } + return buff; + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/protocol/TinkerpopBufferPrimitiveCodec.java b/core/src/main/java/com/datastax/dse/driver/internal/core/protocol/TinkerpopBufferPrimitiveCodec.java new file mode 100644 index 00000000000..75b98815954 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/protocol/TinkerpopBufferPrimitiveCodec.java @@ -0,0 +1,255 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.protocol; + +import com.datastax.dse.driver.internal.core.graph.binary.buffer.DseNettyBufferFactory; +import com.datastax.oss.driver.internal.core.protocol.ByteBufPrimitiveCodec; +import com.datastax.oss.driver.shaded.guava.common.base.Charsets; +import com.datastax.oss.protocol.internal.PrimitiveCodec; +import java.net.InetAddress; +import java.net.UnknownHostException; +import java.nio.ByteBuffer; +import java.util.Arrays; +import org.apache.tinkerpop.gremlin.structure.io.Buffer; + +/** + * Minimal implementation of {@link PrimitiveCodec} for Tinkerpop {@link Buffer} instances. + * + *

      This approach represents a temporary design compromise. PrimitiveCodec is primarily used for + * handling data directly from Netty, a task satisfied by {@link ByteBufPrimitiveCodec}. But + * PrimitiveCodec is also used to implement graph serialization for some of the "dynamic" types + * (notably UDTs and tuples). Since we're converting graph serialization to use the new Tinkerpop + * Buffer API we need just enough of a PrimitiveCodec impl to satisfy the needs of graph + * serialization... and nothing more. + * + *

      A more explicit approach would be to change graph serialization to use a different interface, + * some kind of subset of PrimitiveCodec.... and then make PrimitiveCodec extend this interface. + * This is left as future work for now since it involves changes to the native-protocol lib(s). + */ +public class TinkerpopBufferPrimitiveCodec implements PrimitiveCodec { + + private final DseNettyBufferFactory factory; + + public TinkerpopBufferPrimitiveCodec(DseNettyBufferFactory factory) { + this.factory = factory; + } + + @Override + public Buffer allocate(int size) { + // Note: we use io() here to match up to what ByteBufPrimitiveCodec does, but be warned that + // ByteBufs created in this way don't support the array() method used elsewhere in this codec + // (readString() specifically). As such usage of this method to create Buffer instances is + // discouraged; we have a factory for that. + return this.factory.io(size, size); + } + + @Override + public void release(Buffer toRelease) { + toRelease.release(); + } + + @Override + public int sizeOf(Buffer toMeasure) { + return toMeasure.readableBytes(); + } + + // TODO + @Override + public Buffer concat(Buffer left, Buffer right) { + boolean leftReadable = left.readableBytes() > 0; + boolean rightReadable = right.readableBytes() > 0; + if (!(leftReadable || rightReadable)) { + return factory.heap(); + } + if (!leftReadable) { + return right; + } + if (!rightReadable) { + return left; + } + Buffer rv = factory.composite(left, right); + // c.readerIndex() is 0, which is the first readable byte in left + rv.writerIndex( + left.writerIndex() - left.readerIndex() + right.writerIndex() - right.readerIndex()); + return rv; + } + + @Override + public byte readByte(Buffer source) { + return source.readByte(); + } + + @Override + public int readInt(Buffer source) { + return source.readInt(); + } + + @Override + public InetAddress readInetAddr(Buffer source) { + int length = readByte(source) & 0xFF; + byte[] bytes = new byte[length]; + source.readBytes(bytes); + return newInetAddress(bytes); + } + + @Override + public long readLong(Buffer source) { + return source.readLong(); + } + + @Override + public int readUnsignedShort(Buffer source) { + return source.readShort() & 0xFFFF; + } + + @Override + public ByteBuffer readBytes(Buffer source) { + int length = readInt(source); + if (length < 0) return null; + return source.nioBuffer(source.readerIndex(), length); + } + + @Override + public byte[] readShortBytes(Buffer source) { + try { + int length = readUnsignedShort(source); + byte[] bytes = new byte[length]; + source.readBytes(bytes); + return bytes; + } catch (IndexOutOfBoundsException e) { + throw new IllegalArgumentException( + "Not enough bytes to read a byte array preceded by its 2 bytes length"); + } + } + + // Copy of PrimitiveCodec impl + @Override + public String readString(Buffer source) { + int length = readUnsignedShort(source); + return readString(source, length); + } + + @Override + public String readLongString(Buffer source) { + int length = readInt(source); + return readString(source, length); + } + + @Override + public void writeByte(byte b, Buffer dest) { + dest.writeByte(b); + } + + @Override + public void writeInt(int i, Buffer dest) { + dest.writeInt(i); + } + + @Override + public void writeInetAddr(InetAddress address, Buffer dest) { + byte[] bytes = address.getAddress(); + writeByte((byte) bytes.length, dest); + dest.writeBytes(bytes); + } + + @Override + public void writeLong(long l, Buffer dest) { + dest.writeLong(l); + } + + @Override + public void writeUnsignedShort(int i, Buffer dest) { + dest.writeShort(i); + } + + // Copy of PrimitiveCodec impl + @Override + public void writeString(String s, Buffer dest) { + + byte[] bytes = s.getBytes(Charsets.UTF_8); + writeUnsignedShort(bytes.length, dest); + dest.writeBytes(bytes); + } + + @Override + public void writeLongString(String s, Buffer dest) { + byte[] bytes = s.getBytes(Charsets.UTF_8); + writeInt(bytes.length, dest); + dest.writeBytes(bytes); + } + + @Override + public void writeBytes(ByteBuffer bytes, Buffer dest) { + if (bytes == null) { + writeInt(-1, dest); + } else { + writeInt(bytes.remaining(), dest); + dest.writeBytes(bytes.duplicate()); + } + } + + @Override + public void writeBytes(byte[] bytes, Buffer dest) { + if (bytes == null) { + writeInt(-1, dest); + } else { + writeInt(bytes.length, dest); + dest.writeBytes(bytes); + } + } + + @Override + public void writeShortBytes(byte[] bytes, Buffer dest) { + writeUnsignedShort(bytes.length, dest); + dest.writeBytes(bytes); + } + + // Based on PrimitiveCodec impl, although that method leverages some + // Netty built-ins which we have to do manually here + private static String readString(Buffer buff, int length) { + try { + + // Basically what io.netty.buffer.ByteBufUtil.decodeString() does minus some extra + // ByteBuf-specific ops + int offset; + byte[] bytes; + ByteBuffer byteBuff = buff.nioBuffer(); + if (byteBuff.hasArray()) { + + bytes = byteBuff.array(); + offset = byteBuff.arrayOffset(); + } else { + + bytes = new byte[length]; + byteBuff.get(bytes, 0, length); + offset = 0; + } + + String str = new String(bytes, offset, length, Charsets.UTF_8); + + // Ops against the NIO buffers don't impact the read/write indexes for he Buffer + // itself so we have to do that manually + buff.readerIndex(buff.readerIndex() + length); + return str; + } catch (IndexOutOfBoundsException e) { + throw new IllegalArgumentException( + "Not enough bytes to read an UTF-8 serialized string of size " + length, e); + } + } + + // TODO: Code below copied directly from ByteBufPrimitiveCodec, probably want to consolidate this + // somewhere + private static InetAddress newInetAddress(byte[] bytes) { + try { + return InetAddress.getByAddress(bytes); + } catch (UnknownHostException e) { + // Per the Javadoc, the only way this can happen is if the length is illegal + throw new IllegalArgumentException( + String.format("Invalid address length: %d (%s)", bytes.length, Arrays.toString(bytes))); + } + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/Assertions.java b/core/src/test/java/com/datastax/dse/driver/Assertions.java new file mode 100644 index 00000000000..a7a35a17638 --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/Assertions.java @@ -0,0 +1,15 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver; + +import org.apache.tinkerpop.gremlin.structure.io.Buffer; + +public class Assertions extends org.assertj.core.api.Assertions { + public static TinkerpopBufferAssert assertThat(Buffer actual) { + return new TinkerpopBufferAssert(actual); + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/TinkerpopBufferAssert.java b/core/src/test/java/com/datastax/dse/driver/TinkerpopBufferAssert.java new file mode 100644 index 00000000000..0728e6f7cbd --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/TinkerpopBufferAssert.java @@ -0,0 +1,29 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.protocol.internal.util.Bytes; +import org.apache.tinkerpop.gremlin.structure.io.Buffer; +import org.assertj.core.api.AbstractAssert; + +public class TinkerpopBufferAssert extends AbstractAssert { + public TinkerpopBufferAssert(Buffer actual) { + super(actual, TinkerpopBufferAssert.class); + } + + public TinkerpopBufferAssert containsExactly(String hexString) { + + byte[] expectedBytes = Bytes.fromHexString(hexString).array(); + byte[] actualBytes = new byte[expectedBytes.length]; + actual.readBytes(actualBytes); + assertThat(actualBytes).containsExactly(expectedBytes); + assertThat(actual.readableBytes()).isEqualTo(0); + return this; + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphNodeTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphNodeTest.java index c4aa1fabd24..175a83d1102 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphNodeTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphNodeTest.java @@ -34,18 +34,18 @@ import com.tngtech.java.junit.dataprovider.DataProvider; import com.tngtech.java.junit.dataprovider.DataProviderRunner; import com.tngtech.java.junit.dataprovider.UseDataProvider; -import io.netty.buffer.ByteBuf; import java.io.IOException; import java.nio.ByteBuffer; import java.util.Collections; import java.util.List; import java.util.Map; -import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryReader; -import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryWriter; -import org.apache.tinkerpop.gremlin.driver.ser.binary.TypeSerializerRegistry; import org.apache.tinkerpop.gremlin.process.remote.traversal.DefaultRemoteTraverser; import org.apache.tinkerpop.gremlin.process.traversal.Traverser; import org.apache.tinkerpop.gremlin.process.traversal.step.util.EmptyPath; +import org.apache.tinkerpop.gremlin.structure.io.Buffer; +import org.apache.tinkerpop.gremlin.structure.io.binary.GraphBinaryReader; +import org.apache.tinkerpop.gremlin.structure.io.binary.GraphBinaryWriter; +import org.apache.tinkerpop.gremlin.structure.io.binary.TypeSerializerRegistry; import org.apache.tinkerpop.gremlin.structure.util.detached.DetachedEdge; import org.apache.tinkerpop.gremlin.structure.util.detached.DetachedProperty; import org.apache.tinkerpop.gremlin.structure.util.detached.DetachedVertex; @@ -265,9 +265,9 @@ public void should_check_if_node_is_edge(GraphProtocol graphProtocol) throws IOE private GraphNode serdeAndCreateGraphNode(Object inputValue, GraphProtocol graphProtocol) throws IOException { if (graphProtocol.isGraphBinary()) { - ByteBuf nettyBuf = graphBinaryModule.serialize(new DefaultRemoteTraverser<>(inputValue, 0L)); - ByteBuffer nioBuffer = ByteBufUtil.toByteBuffer(nettyBuf); - nettyBuf.release(); + Buffer tinkerBuf = graphBinaryModule.serialize(new DefaultRemoteTraverser<>(inputValue, 0L)); + ByteBuffer nioBuffer = tinkerBuf.nioBuffer(); + tinkerBuf.release(); return new ObjectGraphNode( GraphConversions.createGraphBinaryGraphNode( ImmutableList.of(nioBuffer), graphBinaryModule) diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java index cf1aa9ec7a1..f699059eed1 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java @@ -17,9 +17,9 @@ import static com.datastax.dse.driver.internal.core.graph.GraphProtocol.GRAPHSON_2_0; import static com.datastax.dse.driver.internal.core.graph.GraphProtocol.GRAPH_BINARY_1_0; -import static com.datastax.oss.driver.Assertions.assertThat; import static com.datastax.oss.driver.api.core.type.codec.TypeCodecs.BIGINT; import static com.datastax.oss.driver.api.core.type.codec.TypeCodecs.TEXT; +import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyLong; import static org.mockito.ArgumentMatchers.eq; @@ -65,7 +65,6 @@ import com.tngtech.java.junit.dataprovider.DataProvider; import com.tngtech.java.junit.dataprovider.DataProviderRunner; import com.tngtech.java.junit.dataprovider.UseDataProvider; -import io.netty.buffer.ByteBuf; import java.io.IOException; import java.math.BigInteger; import java.nio.ByteBuffer; @@ -78,12 +77,13 @@ import java.util.Map; import java.util.Queue; import java.util.regex.Pattern; -import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryReader; -import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryWriter; -import org.apache.tinkerpop.gremlin.driver.ser.binary.TypeSerializerRegistry; import org.apache.tinkerpop.gremlin.process.remote.traversal.DefaultRemoteTraverser; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.apache.tinkerpop.gremlin.structure.io.Buffer; +import org.apache.tinkerpop.gremlin.structure.io.binary.GraphBinaryReader; +import org.apache.tinkerpop.gremlin.structure.io.binary.GraphBinaryWriter; +import org.apache.tinkerpop.gremlin.structure.io.binary.TypeSerializerRegistry; import org.apache.tinkerpop.gremlin.structure.util.detached.DetachedVertex; import org.apache.tinkerpop.gremlin.structure.util.detached.DetachedVertexProperty; import org.junit.Before; @@ -226,9 +226,9 @@ private static ByteBuffer serialize( Object value, GraphProtocol graphProtocol, GraphBinaryModule graphBinaryModule) throws IOException { - ByteBuf nettyBuf = graphBinaryModule.serialize(value); - ByteBuffer nioBuffer = ByteBufUtil.toByteBuffer(nettyBuf); - nettyBuf.release(); + Buffer tinkerBuf = graphBinaryModule.serialize(value); + ByteBuffer nioBuffer = tinkerBuf.nioBuffer(); + tinkerBuf.release(); return graphProtocol.isGraphBinary() ? nioBuffer : GraphSONUtils.serializeToByteBuffer(value, graphProtocol); diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/binary/GraphBinaryDataTypesTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/binary/GraphBinaryDataTypesTest.java index dae67965844..cd2775339d0 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/binary/GraphBinaryDataTypesTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/binary/GraphBinaryDataTypesTest.java @@ -35,7 +35,7 @@ import com.tngtech.java.junit.dataprovider.DataProvider; import com.tngtech.java.junit.dataprovider.DataProviderRunner; import com.tngtech.java.junit.dataprovider.UseDataProvider; -import io.netty.buffer.ByteBuf; +import java.io.IOException; import java.math.BigDecimal; import java.math.BigInteger; import java.net.InetAddress; @@ -45,10 +45,10 @@ import java.time.LocalDate; import java.time.LocalTime; import java.time.ZoneOffset; -import org.apache.tinkerpop.gremlin.driver.ser.SerializationException; -import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryReader; -import org.apache.tinkerpop.gremlin.driver.ser.binary.GraphBinaryWriter; -import org.apache.tinkerpop.gremlin.driver.ser.binary.TypeSerializerRegistry; +import org.apache.tinkerpop.gremlin.structure.io.Buffer; +import org.apache.tinkerpop.gremlin.structure.io.binary.GraphBinaryReader; +import org.apache.tinkerpop.gremlin.structure.io.binary.GraphBinaryWriter; +import org.apache.tinkerpop.gremlin.structure.io.binary.TypeSerializerRegistry; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; @@ -160,12 +160,12 @@ public static Object[][] datatypes() throws UnknownHostException { @Test @UseDataProvider("datatypes") - public void datatypesTest(Object value) throws SerializationException { + public void datatypesTest(Object value) throws IOException { verifySerDe(value); } @Test - public void complexUdtTests() throws SerializationException { + public void complexUdtTests() throws IOException { UserDefinedType type1 = new UserDefinedTypeBuilder("ks", "udt1").withField("a", INT).withField("b", TEXT).build(); verifySerDe(type1.newValue(1, "2")); @@ -211,7 +211,7 @@ public void complexUdtTests() throws SerializationException { } @Test - public void complexTypesAndGeoTests() throws SerializationException { + public void complexTypesAndGeoTests() throws IOException { TupleType tuple = tupleOf(DseDataTypes.POINT, DseDataTypes.LINE_STRING, DseDataTypes.POLYGON); tuple.attach(context); @@ -250,8 +250,8 @@ public void complexTypesAndGeoTests() throws SerializationException { } // TODO add predicate tests - private void verifySerDe(Object input) throws SerializationException { - ByteBuf result = graphBinaryModule.serialize(input); + private void verifySerDe(Object input) throws IOException { + Buffer result = graphBinaryModule.serialize(input); Object deserialized = graphBinaryModule.deserialize(result); result.release(); assertThat(deserialized).isEqualTo(input); diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/protocol/TinkerpopBufferPrimitiveCodecTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/protocol/TinkerpopBufferPrimitiveCodecTest.java new file mode 100644 index 00000000000..52368c72b83 --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/protocol/TinkerpopBufferPrimitiveCodecTest.java @@ -0,0 +1,354 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.protocol; + +import static com.datastax.dse.driver.Assertions.assertThat; + +import com.datastax.dse.driver.Assertions; +import com.datastax.dse.driver.internal.core.graph.binary.buffer.DseNettyBufferFactory; +import com.datastax.oss.driver.internal.core.protocol.ByteBufPrimitiveCodecTest; +import com.datastax.oss.protocol.internal.util.Bytes; +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import java.net.InetAddress; +import java.net.InetSocketAddress; +import java.nio.ByteBuffer; +import java.util.function.Supplier; +import org.apache.tinkerpop.gremlin.structure.io.Buffer; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; +import org.junit.runner.RunWith; + +/** + * Note: like {@link ByteBufPrimitiveCodecTest} we don't test trivial methods that simply delegate + * to the underlying Buffer, nor default implementations inherited from {@link + * com.datastax.oss.protocol.internal.PrimitiveCodec}. + */ +@RunWith(DataProviderRunner.class) +public class TinkerpopBufferPrimitiveCodecTest { + + private static final DseNettyBufferFactory factory = new DseNettyBufferFactory(); + private final TinkerpopBufferPrimitiveCodec codec = new TinkerpopBufferPrimitiveCodec(factory); + + @Rule public ExpectedException expectedException = ExpectedException.none(); + + @Test + public void should_concatenate() { + Buffer left = factory.withBytes(0xca, 0xfe); + Buffer right = factory.withBytes(0xba, 0xbe); + assertThat(codec.concat(left, right)).containsExactly("0xcafebabe"); + } + + @Test + public void should_read_inet_v4() { + Buffer source = + factory.withBytes( + // length (as a byte) + 0x04, + // address + 0x7f, + 0x00, + 0x00, + 0x01, + // port (as an int) + 0x00, + 0x00, + 0x23, + 0x52); + InetSocketAddress inet = codec.readInet(source); + assertThat(inet.getAddress().getHostAddress()).isEqualTo("127.0.0.1"); + assertThat(inet.getPort()).isEqualTo(9042); + } + + @Test + public void should_read_inet_v6() { + Buffer lengthAndAddress = factory.heap(17); + lengthAndAddress.writeByte(16); + lengthAndAddress.writeLong(0); + lengthAndAddress.writeLong(1); + Buffer source = + codec.concat( + lengthAndAddress, + // port (as an int) + factory.withBytes(0x00, 0x00, 0x23, 0x52)); + InetSocketAddress inet = codec.readInet(source); + assertThat(inet.getAddress().getHostAddress()).isEqualTo("0:0:0:0:0:0:0:1"); + assertThat(inet.getPort()).isEqualTo(9042); + } + + @Test + public void should_fail_to_read_inet_if_length_invalid() { + expectedException.expect(IllegalArgumentException.class); + expectedException.expectMessage("Invalid address length: 3 ([127, 0, 1])"); + Buffer source = + factory.withBytes( + // length (as a byte) + 0x03, + // address + 0x7f, + 0x00, + 0x01, + // port (as an int) + 0x00, + 0x00, + 0x23, + 0x52); + codec.readInet(source); + } + + @Test + public void should_read_inetaddr_v4() { + Buffer source = + factory.withBytes( + // length (as a byte) + 0x04, + // address + 0x7f, + 0x00, + 0x00, + 0x01); + InetAddress inetAddr = codec.readInetAddr(source); + assertThat(inetAddr.getHostAddress()).isEqualTo("127.0.0.1"); + } + + @Test + public void should_read_inetaddr_v6() { + Buffer source = factory.heap(17); + source.writeByte(16); + source.writeLong(0); + source.writeLong(1); + InetAddress inetAddr = codec.readInetAddr(source); + assertThat(inetAddr.getHostAddress()).isEqualTo("0:0:0:0:0:0:0:1"); + } + + @Test + public void should_fail_to_read_inetaddr_if_length_invalid() { + expectedException.expect(IllegalArgumentException.class); + expectedException.expectMessage("Invalid address length: 3 ([127, 0, 1])"); + + Buffer source = + factory.withBytes( + // length (as a byte) + 0x03, + // address + 0x7f, + 0x00, + 0x01); + codec.readInetAddr(source); + } + + @Test + public void should_read_bytes() { + Buffer source = + factory.withBytes( + // length (as an int) + 0x00, + 0x00, + 0x00, + 0x04, + // contents + 0xca, + 0xfe, + 0xba, + 0xbe); + ByteBuffer bytes = codec.readBytes(source); + assertThat(Bytes.toHexString(bytes)).isEqualTo("0xcafebabe"); + } + + @Test + public void should_read_null_bytes() { + Buffer source = factory.withBytes(0xFF, 0xFF, 0xFF, 0xFF); // -1 (as an int) + assertThat(codec.readBytes(source)).isNull(); + } + + @Test + public void should_read_short_bytes() { + Buffer source = + factory.withBytes( + // length (as an unsigned short) + 0x00, + 0x04, + // contents + 0xca, + 0xfe, + 0xba, + 0xbe); + assertThat(Bytes.toHexString(codec.readShortBytes(source))).isEqualTo("0xcafebabe"); + } + + @DataProvider + public static Object[][] bufferTypes() { + return new Object[][] { + {(Supplier) factory::heap}, + {(Supplier) factory::io}, + {(Supplier) factory::direct} + }; + } + + @Test + @UseDataProvider("bufferTypes") + public void should_read_string(Supplier supplier) { + Buffer source = + factory.withBytes( + supplier, + // length (as an unsigned short) + 0x00, + 0x05, + // UTF-8 contents + 0x68, + 0x65, + 0x6c, + 0x6c, + 0x6f); + assertThat(codec.readString(source)).isEqualTo("hello"); + } + + @Test + public void should_fail_to_read_string_if_not_enough_characters() { + expectedException.expect(IllegalArgumentException.class); + expectedException.expectMessage( + "Not enough bytes to read an UTF-8 serialized string of size 4"); + + Buffer source = factory.heap(); + source.writeShort(4); + + codec.readString(source); + } + + @Test + public void should_read_long_string() { + Buffer source = + factory.withBytes( + // length (as an int) + 0x00, + 0x00, + 0x00, + 0x05, + // UTF-8 contents + 0x68, + 0x65, + 0x6c, + 0x6c, + 0x6f); + assertThat(codec.readLongString(source)).isEqualTo("hello"); + } + + @Test + public void should_fail_to_read_long_string_if_not_enough_characters() { + expectedException.expect(IllegalArgumentException.class); + expectedException.expectMessage( + "Not enough bytes to read an UTF-8 serialized string of size 4"); + Buffer source = factory.heap(4, 4); + source.writeInt(4); + + codec.readLongString(source); + } + + @Test + public void should_write_inet_v4() throws Exception { + Buffer dest = factory.heap(1 + 4 + 4); + InetSocketAddress inet = new InetSocketAddress(InetAddress.getByName("127.0.0.1"), 9042); + codec.writeInet(inet, dest); + assertThat(dest) + .containsExactly( + "0x04" // size as a byte + + "7f000001" // address + + "00002352" // port + ); + } + + @Test + public void should_write_inet_v6() throws Exception { + Buffer dest = factory.heap(1 + 16 + 4); + InetSocketAddress inet = new InetSocketAddress(InetAddress.getByName("::1"), 9042); + codec.writeInet(inet, dest); + assertThat(dest) + .containsExactly( + "0x10" // size as a byte + + "00000000000000000000000000000001" // address + + "00002352" // port + ); + } + + @Test + public void should_write_inetaddr_v4() throws Exception { + Buffer dest = factory.heap(1 + 4); + InetAddress inetAddr = InetAddress.getByName("127.0.0.1"); + codec.writeInetAddr(inetAddr, dest); + assertThat(dest) + .containsExactly( + "0x04" // size as a byte + + "7f000001" // address + ); + } + + @Test + public void should_write_inetaddr_v6() throws Exception { + Buffer dest = factory.heap(1 + 16); + InetAddress inetAddr = InetAddress.getByName("::1"); + codec.writeInetAddr(inetAddr, dest); + Assertions.assertThat(dest) + .containsExactly( + "0x10" // size as a byte + + "00000000000000000000000000000001" // address + ); + } + + @Test + public void should_write_string() { + Buffer dest = factory.heap(); + codec.writeString("hello", dest); + assertThat(dest) + .containsExactly( + "0x0005" // size as an unsigned short + + "68656c6c6f" // UTF-8 contents + ); + } + + @Test + public void should_write_long_string() { + Buffer dest = factory.heap(9); + codec.writeLongString("hello", dest); + assertThat(dest) + .containsExactly( + "0x00000005" + + // size as an int + "68656c6c6f" // UTF-8 contents + ); + } + + @Test + public void should_write_bytes() { + Buffer dest = factory.heap(8); + codec.writeBytes(Bytes.fromHexString("0xcafebabe"), dest); + assertThat(dest) + .containsExactly( + "0x00000004" + + // size as an int + "cafebabe"); + } + + @Test + public void should_write_short_bytes() { + Buffer dest = factory.heap(6); + codec.writeShortBytes(new byte[] {(byte) 0xca, (byte) 0xfe, (byte) 0xba, (byte) 0xbe}, dest); + assertThat(dest) + .containsExactly( + "0x0004" + + // size as an unsigned short + "cafebabe"); + } + + @Test + public void should_write_null_bytes() { + Buffer dest = factory.heap(4); + codec.writeBytes((ByteBuffer) null, dest); + assertThat(dest).containsExactly("0xFFFFFFFF"); + } +} diff --git a/pom.xml b/pom.xml index 8ff1ba2cb4a..c420c62c804 100644 --- a/pom.xml +++ b/pom.xml @@ -47,8 +47,8 @@ 4.0.5 4.1.45.Final 1.2.1 - 3.4.4-20190930-6c997860 - 3.4.4 + 3.4.5-SNAPSHOT + 3.4.5 1.7.26 1.2.1 3.4.3-20190731-199be4b5 @@ -596,6 +596,8 @@ limitations under the License.]]> **/src/main/config/ide/** + src/main/java/com/datastax/dse/driver/internal/core/graph/binary/buffer/DseNettyBuffer.java + src/main/java/com/datastax/dse/driver/internal/core/graph/binary/buffer/DseNettyBufferFactory.java SLASHSTAR_STYLE From c5edf81831a1f48a035da0df333c50cc9397018d Mon Sep 17 00:00:00 2001 From: Erik Merkle Date: Thu, 14 Nov 2019 17:29:20 -0600 Subject: [PATCH 312/979] Rebase on latest 2.x --- core-shaded/pom.xml | 14 ++-- core/pom.xml | 11 +++- .../api/core/config/DseDriverOption.java | 5 ++ .../context/DseStartupOptionsBuilderTest.java | 29 ++++++-- .../binary/GraphBinaryDataTypesTest.java | 11 +++- integration-tests/pom.xml | 2 + .../oss/driver/osgi/OsgiShadedIT.java | 5 ++ .../driver/osgi/support/OsgiGraphTests.java | 66 ++++++++++++------- mapper-processor/pom.xml | 2 +- pom.xml | 3 + 10 files changed, 110 insertions(+), 38 deletions(-) diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 9744f9e3cf1..92b82cb4f54 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -104,6 +104,10 @@ org.apache.tinkerpop tinkergraph-gremlin + + org.javatuples + javatuples + org.reactivestreams reactive-streams @@ -303,11 +307,11 @@ Note: dependencies marked as optional are by default included with optional resolution in the manifest; we only need to manually set the resolution to optional for dependencies declared as non-optional in the pom files. - -->jnr.*;resolution:=optional, com.esri.core.geometry.*;resolution:=optional, org.reactivestreams.*;resolution:=optional, org.apache.tinkerpop.*;resolution:=optional, - !com.google.protobuf.*, !com.jcraft.jzlib.*, !com.ning.compress.*, !lzma.sdk.*, !net.jpountz.xxhash.*, !org.bouncycastle.*, !org.conscrypt.*, !org.apache.commons.logging.*, !org.apache.log4j.*, !org.apache.logging.log4j.*, !org.eclipse.jetty.*, !org.jboss.marshalling.*, !sun.misc.*, !sun.security.*, !com.oracle.svm.core.annotate.*, * + --> + org.reactivestreams.*;resolution:=optional, + org.apache.tinkerpop.*;resolution:=optional, + org.javatuples.*;resolution:=optional, + * jnr.*;resolution:=optional, com.esri.core.geometry.*;resolution:=optional, org.reactivestreams.*;resolution:=optional, org.apache.tinkerpop.*;resolution:=optional, * + --> + com.esri.core.geometry.*;resolution:=optional, + org.reactivestreams.*;resolution:=optional, + org.apache.tinkerpop.*;resolution:=optional, + org.javatuples.*;resolution:=optional, + * com.datastax.oss.driver.*.core.*, com.datastax.dse.driver.*.core.* diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/config/DseDriverOption.java b/core/src/main/java/com/datastax/dse/driver/api/core/config/DseDriverOption.java index f6a8b94236d..e13444356cc 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/config/DseDriverOption.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/config/DseDriverOption.java @@ -174,6 +174,11 @@ public enum DseDriverOption implements DriverOption { */ MONITOR_REPORTING_ENABLED("advanced.monitor-reporting.enabled"), + /** + * Whether to enable paging for Graph queries. + * + *

      Value type: boolean + */ GRAPH_PAGING_ENABLED("advanced.graph.paging-enabled"), ; diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/context/DseStartupOptionsBuilderTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/context/DseStartupOptionsBuilderTest.java index d2cdd313369..a15324e8c95 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/context/DseStartupOptionsBuilderTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/context/DseStartupOptionsBuilderTest.java @@ -46,7 +46,7 @@ public class DseStartupOptionsBuilderTest { private DefaultDriverContext driverContext; - // Mocks for instantiating the default driver context + // Mocks for instantiating the DSE driver context @Mock private DriverConfigLoader configLoader; @Mock private DriverConfig driverConfig; @Mock private DriverExecutionProfile defaultProfile; @@ -82,7 +82,9 @@ private void assertDefaultStartupOptions(Startup startup) { } @Test - public void should_build_minimal_startup_options() { + public void should_build_startup_options_with_no_compression_if_undefined() { + when(defaultProfile.getString(DefaultDriverOption.PROTOCOL_COMPRESSION, "none")) + .thenReturn("none"); buildContext(null, null, null); Startup startup = new Startup(driverContext.getStartupOptions()); assertThat(startup.options).doesNotContainKey(Startup.COMPRESSION_KEY); @@ -103,8 +105,19 @@ public void should_build_startup_options_with_compression(String compression) { assertDefaultStartupOptions(startup); } + @Test + public void should_fail_to_build_startup_options_with_invalid_compression() { + when(defaultProfile.getString(DefaultDriverOption.PROTOCOL_COMPRESSION, "none")) + .thenReturn("foobar"); + buildContext(null, null, null); + assertThatIllegalArgumentException() + .isThrownBy(() -> new Startup(driverContext.getStartupOptions())); + } + @Test public void should_build_startup_options_with_client_id() { + when(defaultProfile.getString(DefaultDriverOption.PROTOCOL_COMPRESSION, "none")) + .thenReturn("none"); UUID customClientId = Uuids.random(); buildContext(customClientId, null, null); Startup startup = new Startup(driverContext.getStartupOptions()); @@ -119,6 +132,8 @@ public void should_build_startup_options_with_client_id() { @Test public void should_build_startup_options_with_application_version_and_name() { + when(defaultProfile.getString(DefaultDriverOption.PROTOCOL_COMPRESSION, "none")) + .thenReturn("none"); buildContext(null, "Custom_App_Name", "Custom_App_Version"); Startup startup = new Startup(driverContext.getStartupOptions()); // assert the app name and version are present @@ -133,9 +148,7 @@ public void should_build_startup_options_with_application_version_and_name() { @Test public void should_build_startup_options_with_all_options() { // mock config to specify "snappy" compression - Mockito.when(defaultProfile.isDefined(DefaultDriverOption.PROTOCOL_COMPRESSION)) - .thenReturn(Boolean.TRUE); - Mockito.when(defaultProfile.getString(DefaultDriverOption.PROTOCOL_COMPRESSION)) + when(defaultProfile.getString(DefaultDriverOption.PROTOCOL_COMPRESSION, "none")) .thenReturn("snappy"); UUID customClientId = Uuids.random(); @@ -152,10 +165,12 @@ public void should_build_startup_options_with_all_options() { @Test public void should_use_configuration_when_no_programmatic_values_provided() { - Mockito.when(defaultProfile.getString(DseDriverOption.APPLICATION_NAME, null)) + when(defaultProfile.getString(DseDriverOption.APPLICATION_NAME, null)) .thenReturn("Config_App_Name"); - Mockito.when(defaultProfile.getString(DseDriverOption.APPLICATION_VERSION, null)) + when(defaultProfile.getString(DseDriverOption.APPLICATION_VERSION, null)) .thenReturn("Config_App_Version"); + when(defaultProfile.getString(DefaultDriverOption.PROTOCOL_COMPRESSION, "none")) + .thenReturn("none"); buildContext(null, null, null); Startup startup = new Startup(driverContext.getStartupOptions()); diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/binary/GraphBinaryDataTypesTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/binary/GraphBinaryDataTypesTest.java index cd2775339d0..b730ff198fe 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/binary/GraphBinaryDataTypesTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/binary/GraphBinaryDataTypesTest.java @@ -6,7 +6,16 @@ */ package com.datastax.dse.driver.internal.core.graph.binary; -import static com.datastax.oss.driver.api.core.type.DataTypes.*; +import static com.datastax.oss.driver.api.core.type.DataTypes.BIGINT; +import static com.datastax.oss.driver.api.core.type.DataTypes.DOUBLE; +import static com.datastax.oss.driver.api.core.type.DataTypes.DURATION; +import static com.datastax.oss.driver.api.core.type.DataTypes.FLOAT; +import static com.datastax.oss.driver.api.core.type.DataTypes.INT; +import static com.datastax.oss.driver.api.core.type.DataTypes.TEXT; +import static com.datastax.oss.driver.api.core.type.DataTypes.listOf; +import static com.datastax.oss.driver.api.core.type.DataTypes.mapOf; +import static com.datastax.oss.driver.api.core.type.DataTypes.setOf; +import static com.datastax.oss.driver.api.core.type.DataTypes.tupleOf; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.when; diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 0d8a009c7ca..6bd552977ff 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -232,6 +232,7 @@ ${reactive-streams.version} ${rxjava.version} ${tinkerpop.version} + ${tinkerpop.osgi.version} @@ -261,6 +262,7 @@ ${reactive-streams.version} ${rxjava.version} ${tinkerpop.version} + ${tinkerpop.osgi.version} diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/osgi/OsgiShadedIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/osgi/OsgiShadedIT.java index 37a018d1b7e..557466f2626 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/osgi/OsgiShadedIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/osgi/OsgiShadedIT.java @@ -55,6 +55,11 @@ public Option[] config() { BundleOptions.tinkerpopBundles()); } + @Override + public Version getDseVersion() { + return CCM_RULE.getDseVersion().orElseThrow(IllegalStateException::new); + } + @Test public void should_connect_and_query_shaded_simple() { connectAndQuerySimple(); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/osgi/support/OsgiGraphTests.java b/integration-tests/src/test/java/com/datastax/oss/driver/osgi/support/OsgiGraphTests.java index 87129e651ed..86587c021a4 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/osgi/support/OsgiGraphTests.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/osgi/support/OsgiGraphTests.java @@ -23,31 +23,30 @@ import com.datastax.dse.driver.api.core.graph.GraphNode; import com.datastax.dse.driver.api.core.graph.GraphResultSet; import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; +import com.datastax.dse.driver.api.testinfra.DseSessionBuilderInstantiator; +import com.datastax.dse.driver.internal.core.graph.GraphProtocol; +import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; import com.datastax.oss.driver.api.core.config.ProgrammaticDriverConfigLoaderBuilder; import java.util.List; -import org.apache.tinkerpop.gremlin.structure.Property; import org.apache.tinkerpop.gremlin.structure.Vertex; public interface OsgiGraphTests extends OsgiSimpleTests { - String CREATE_GRAPH = "system.graph('%s').ifNotExists().create()"; - - String GRAPH_SCHEMA = - "schema.propertyKey('name').Text().ifNotExists().create();" - + "schema.vertexLabel('person').properties('name').ifNotExists().create();"; - - String GRAPH_DATA = "g.addV('person').property('name', 'alice').next();"; - - String ALLOW_SCANS = "schema.config().option('graph.allow_scan').set('true');"; - @Override default ProgrammaticDriverConfigLoaderBuilder configLoaderBuilder() { - return DriverConfigLoader.programmaticBuilder() - .withString(DseDriverOption.GRAPH_NAME, "test_osgi_graph"); + return DseSessionBuilderInstantiator.configLoaderBuilder() + .withString(DseDriverOption.GRAPH_NAME, "test_osgi_graph") + .withString( + DseDriverOption.GRAPH_SUB_PROTOCOL, + getDseVersion().compareTo(Version.parse("6.8.0")) >= 0 + ? GraphProtocol.GRAPH_BINARY_1_0.toInternalCode() + : GraphProtocol.GRAPHSON_2_0.toInternalCode()); } + Version getDseVersion(); + /** * Ensures a session can be established and a query using DSE Graph can be made when running in an * OSGi container. @@ -58,22 +57,43 @@ default void connectAndQueryGraph() { // Test that Graph + Tinkerpop is available session.execute( - ScriptGraphStatement.newInstance(String.format(CREATE_GRAPH, "test_osgi_graph")) + ScriptGraphStatement.newInstance("system.graph('test_osgi_graph').ifNotExists().create()") .setSystemQuery(true)); - session.execute(ScriptGraphStatement.newInstance(GRAPH_SCHEMA)); - session.execute(ScriptGraphStatement.newInstance(GRAPH_DATA)); - session.execute(ScriptGraphStatement.newInstance(ALLOW_SCANS)); + + if (getDseVersion().compareTo(Version.parse("6.8.0")) >= 0) { + setUpCoreEngineGraph(session); + } else { + setUpClassicEngineGraph(session); + } GraphResultSet resultSet = - session.execute( - FluentGraphStatement.newInstance(g.V().hasLabel("person").has("name", "alice"))); + session.execute(FluentGraphStatement.newInstance(g.V().hasLabel("person"))); List results = resultSet.all(); assertThat(results.size()).isEqualTo(1); Vertex actual = results.get(0).asVertex(); - assertThat(actual.properties("name")) - .toIterable() - .extracting(Property::value) - .contains("alice"); + assertThat(actual.label()).isEqualTo("person"); } } + + default void setUpCoreEngineGraph(DseSession session) { + session.execute( + ScriptGraphStatement.newInstance( + "schema.vertexLabel('person').ifNotExists().partitionBy('pk', Int)" + + ".clusterBy('cc', Int).property('name', Text).create();")); + session.execute( + ScriptGraphStatement.newInstance( + "g.addV('person').property('pk',0).property('cc',0).property('name', 'alice');")); + } + + default void setUpClassicEngineGraph(DseSession session) { + session.execute( + ScriptGraphStatement.newInstance( + "schema.propertyKey('name').Text().ifNotExists().create();" + + "schema.vertexLabel('person').properties('name').ifNotExists().create();")); + session.execute( + ScriptGraphStatement.newInstance("g.addV('person').property('name', 'alice').next();")); + session.execute( + ScriptGraphStatement.newInstance( + "schema.config().option('graph.allow_scan').set('true');")); + } } diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 8f41bbbb688..603e5ff8190 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -179,4 +179,4 @@ - + \ No newline at end of file diff --git a/pom.xml b/pom.xml index c420c62c804..e93615d3f2b 100644 --- a/pom.xml +++ b/pom.xml @@ -185,6 +185,9 @@ com.github.spotbugs spotbugs-annotations 3.1.12 + org.javatuples + javatuples + 1.2 com.squareup From a5d0df18fcdd7c527c534c51f3f2a0528f0042a5 Mon Sep 17 00:00:00 2001 From: Erik Merkle Date: Fri, 15 Nov 2019 10:24:10 -0600 Subject: [PATCH 313/979] Remove duplicate "dist" from Travis yaml --- .travis.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 9887e9e2a03..50c05e76856 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,4 +1,3 @@ -dist: trusty language: java dist: trusty sudo: false From 2cc488745b356bf9f383ded0797fa3d50fc7b698 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Wed, 20 Nov 2019 16:02:00 +0100 Subject: [PATCH 314/979] Surround Version.parse with Objects.requireNonNull --- .../internal/core/graph/GraphPagingSupportChecker.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphPagingSupportChecker.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphPagingSupportChecker.java index bad78003336..cac2bcbd8e5 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphPagingSupportChecker.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphPagingSupportChecker.java @@ -24,7 +24,8 @@ public class GraphPagingSupportChecker { private static final Logger LOG = LoggerFactory.getLogger(GraphPagingSupportChecker.class); - static final Version GRAPH_PAGING_MIN_DSE_VERSION = Version.parse("6.8.0"); + static final Version GRAPH_PAGING_MIN_DSE_VERSION = + Objects.requireNonNull(Version.parse("6.8.0")); private volatile Boolean contextGraphPagingEnabled; @@ -68,8 +69,7 @@ private boolean isContextGraphPagingEnabled(InternalDriverContext context) { for (Node node : nodes) { Version dseVersion = (Version) node.getExtras().get(DseNodeProperties.DSE_VERSION); - if (dseVersion == null - || dseVersion.compareTo(Objects.requireNonNull(GRAPH_PAGING_MIN_DSE_VERSION)) < 0) { + if (dseVersion == null || dseVersion.compareTo(GRAPH_PAGING_MIN_DSE_VERSION) < 0) { contextGraphPagingEnabled = false; return contextGraphPagingEnabled; } From 8f2c75b1d0d10d00493128d4442aaf1dba0df847 Mon Sep 17 00:00:00 2001 From: Erik Merkle Date: Thu, 21 Nov 2019 04:12:31 -0600 Subject: [PATCH 315/979] Temporarily add Apache repo for Tinkerpop Snapshot artifacts (#304) --- pom.xml | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/pom.xml b/pom.xml index e93615d3f2b..3d1a11bedfd 100644 --- a/pom.xml +++ b/pom.xml @@ -82,6 +82,28 @@ false ${skipTests} + + + + + Apache + Apache Artifactory for Tinkerpop snapshot releases + https://repository.apache.org/content/groups/snapshots + + warn + false + never + + + warn + true + always + + + + From 89e63c886bcfab26cafcaf4e4f719e197cafcb14 Mon Sep 17 00:00:00 2001 From: Tomasz Lelek Date: Mon, 25 Nov 2019 12:30:56 +0100 Subject: [PATCH 316/979] JAVA-2496: Revisit timeouts for paged graph queries (#302) --- changelog/README.md | 1 + .../api/core/config/DseDriverOption.java | 27 +++ .../ContinuousCqlRequestHandler.java | 44 +++- .../ContinuousRequestHandlerBase.java | 108 ++++++---- .../graph/ContinuousGraphRequestHandler.java | 61 ++++-- .../internal/core/graph/GraphConversions.java | 15 +- .../core/graph/GraphRequestHandler.java | 2 +- core/src/main/resources/reference.conf | 71 ++++++- .../ContinuousGraphRequestHandlerTest.java | 198 ++++++++++++++++++ .../core/graph/GraphRequestHandlerTest.java | 87 +------- .../graph/GraphRequestHandlerTestHarness.java | 28 ++- .../internal/core/graph/GraphTestUtils.java | 151 +++++++++++++ .../graph/{statement => }/GraphPagingIT.java | 183 ++++++++++++---- .../src/test/resources/application.conf | 6 +- 14 files changed, 789 insertions(+), 193 deletions(-) rename core/src/main/java/com/datastax/dse/driver/internal/core/{ => cql/continuous}/ContinuousRequestHandlerBase.java (94%) create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerTest.java create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphTestUtils.java rename integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/{statement => }/GraphPagingIT.java (68%) diff --git a/changelog/README.md b/changelog/README.md index e4ac952d455..90750ecfd74 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### NGDG (in progress) +- [improvement] JAVA-2496: Revisit timeouts for paged graph queries - [bug] JAVA-2510: Fix GraphBinaryDataTypesTest Codec registry initialization - [bug] JAVA-2492: Parse edge metadata using internal identifiers - [improvement] JAVA-2282: Remove GraphSON3 support diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/config/DseDriverOption.java b/core/src/main/java/com/datastax/dse/driver/api/core/config/DseDriverOption.java index e13444356cc..5dab679316f 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/config/DseDriverOption.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/config/DseDriverOption.java @@ -180,6 +180,33 @@ public enum DseDriverOption implements DriverOption { *

      Value type: boolean */ GRAPH_PAGING_ENABLED("advanced.graph.paging-enabled"), + + /** + * The page size for Graph continuous paging. + * + *

      Value type: int + */ + GRAPH_CONTINUOUS_PAGING_PAGE_SIZE("advanced.graph.paging-options.page-size"), + + /** + * The maximum number of Graph continuous pages to return. + * + *

      Value type: int + */ + GRAPH_CONTINUOUS_PAGING_MAX_PAGES("advanced.graph.paging-options.max-pages"), + /** + * The maximum number of Graph continuous pages per second. + * + *

      Value type: int + */ + GRAPH_CONTINUOUS_PAGING_MAX_PAGES_PER_SECOND( + "advanced.graph.paging-options.max-pages-per-second"), + /** + * The maximum number of Graph continuous pages that can be stored in the local queue. + * + *

      Value type: int + */ + GRAPH_CONTINUOUS_PAGING_MAX_ENQUEUED_PAGES("advanced.graph.paging-options.max-enqueued-pages"), ; private final String path; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandler.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandler.java index ddedcbd0227..ba26ea3d8e6 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandler.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandler.java @@ -15,8 +15,8 @@ */ package com.datastax.dse.driver.internal.core.cql.continuous; +import com.datastax.dse.driver.api.core.config.DseDriverOption; import com.datastax.dse.driver.api.core.cql.continuous.ContinuousAsyncResultSet; -import com.datastax.dse.driver.internal.core.ContinuousRequestHandlerBase; import com.datastax.dse.driver.internal.core.cql.DseConversions; import com.datastax.dse.protocol.internal.response.result.DseRowsMetadata; import com.datastax.oss.driver.api.core.cql.ExecutionInfo; @@ -38,6 +38,7 @@ import io.netty.util.concurrent.Future; import io.netty.util.concurrent.GenericFutureListener; import java.nio.ByteBuffer; +import java.time.Duration; import java.util.List; import java.util.Map; import java.util.Queue; @@ -52,6 +53,10 @@ public class ContinuousCqlRequestHandler implements ResponseCallback, GenericFutureListener>, Throttled { private final Message message; + private final Duration firstPageTimeout; + private final Duration otherPagesTimeout; + private final int maxEnqueuedPages; + private final int maxPages; ContinuousCqlRequestHandler( @NonNull Statement statement, @@ -59,8 +64,43 @@ public class ContinuousCqlRequestHandler @NonNull InternalDriverContext context, @NonNull String sessionLogPrefix) { super(statement, session, context, sessionLogPrefix, ContinuousAsyncResultSet.class); - this.message = DseConversions.toContinuousPagingMessage(statement, executionProfile, context); + message = DseConversions.toContinuousPagingMessage(statement, executionProfile, context); throttler.register(this); + firstPageTimeout = + executionProfile.getDuration(DseDriverOption.CONTINUOUS_PAGING_TIMEOUT_FIRST_PAGE); + otherPagesTimeout = + executionProfile.getDuration(DseDriverOption.CONTINUOUS_PAGING_TIMEOUT_OTHER_PAGES); + maxEnqueuedPages = + executionProfile.getInt(DseDriverOption.CONTINUOUS_PAGING_MAX_ENQUEUED_PAGES); + maxPages = executionProfile.getInt(DseDriverOption.CONTINUOUS_PAGING_MAX_PAGES); + } + + @NonNull + @Override + protected Duration getGlobalTimeout() { + return Duration.ZERO; + } + + @NonNull + @Override + protected Duration getPageTimeout(int pageNumber) { + return pageNumber == 1 ? firstPageTimeout : otherPagesTimeout; + } + + @NonNull + @Override + protected Duration getReviseRequestTimeout() { + return otherPagesTimeout; + } + + @Override + protected int getMaxEnqueuedPages() { + return maxEnqueuedPages; + } + + @Override + protected int getMaxPages() { + return maxPages; } @NonNull diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/ContinuousRequestHandlerBase.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousRequestHandlerBase.java similarity index 94% rename from core/src/main/java/com/datastax/dse/driver/internal/core/ContinuousRequestHandlerBase.java rename to core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousRequestHandlerBase.java index 8d4f09e6878..7227a85cf66 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/ContinuousRequestHandlerBase.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousRequestHandlerBase.java @@ -4,14 +4,13 @@ * This software can be used solely with DataStax Enterprise. Please consult the license at * http://www.datastax.com/terms/datastax-dse-driver-license-terms */ -package com.datastax.dse.driver.internal.core; +package com.datastax.dse.driver.internal.core.cql.continuous; import com.datastax.dse.driver.api.core.DseProtocolVersion; -import com.datastax.dse.driver.api.core.config.DseDriverOption; import com.datastax.dse.driver.api.core.cql.continuous.ContinuousAsyncResultSet; import com.datastax.dse.driver.api.core.metrics.DseSessionMetric; +import com.datastax.dse.driver.internal.core.DseProtocolFeature; import com.datastax.dse.driver.internal.core.cql.DseConversions; -import com.datastax.dse.driver.internal.core.cql.continuous.DefaultContinuousAsyncResultSet; import com.datastax.dse.protocol.internal.request.Revise; import com.datastax.dse.protocol.internal.response.result.DseRowsMetadata; import com.datastax.oss.driver.api.core.AllNodesFailedException; @@ -98,7 +97,7 @@ public abstract class ContinuousRequestHandlerBase< private static final Logger LOG = LoggerFactory.getLogger(ContinuousRequestHandlerBase.class); - private final String logPrefix; + protected final String logPrefix; protected final StatementT statement; protected final DefaultSession session; protected final InternalDriverContext context; @@ -106,12 +105,8 @@ public abstract class ContinuousRequestHandlerBase< private final Queue queryPlan; private final RetryPolicy retryPolicy; protected final RequestThrottler throttler; - private final int maxEnqueuedPages; - private final int maxPages; private final boolean protocolBackpressureAvailable; private final boolean isIdempotent; - private final Duration timeoutFirstPage; - private final Duration timeoutOtherPages; private final Timer timer; private final SessionMetricUpdater sessionMetricUpdater; @@ -125,7 +120,7 @@ public abstract class ContinuousRequestHandlerBase< // The page queue, storing responses that we have received and have not been consumed by the // client yet. @GuardedBy("lock") - private final Queue queue; + private Queue queue; // If the client requests a page and we can't serve it immediately (empty queue), then we create // this future and have the client wait on it. Otherwise this field is null. @@ -150,7 +145,7 @@ public abstract class ContinuousRequestHandlerBase< private volatile long startTimeNanos; // These are set when the first page arrives, and are never modified after. - protected volatile ColumnDefinitions columnDefinitions; + volatile ColumnDefinitions columnDefinitions; // These change over time as different nodes are tried; // they can only be null before the first request is sent. @@ -159,7 +154,8 @@ public abstract class ContinuousRequestHandlerBase< private volatile int streamId; // Set each time a new request/response cycle starts. private volatile long messageStartTimeNanos; - private volatile Timeout timeout; + private volatile Timeout pageTimeout; + private volatile Timeout globalTimeout; // How many times we've invoked the retry policy and it has returned a "retry" decision (0 for // the first attempt, 1 for the first retry, etc.). @@ -199,23 +195,28 @@ public ContinuousRequestHandlerBase( idempotent == null ? executionProfile.getBoolean(DefaultDriverOption.REQUEST_DEFAULT_IDEMPOTENCE) : idempotent; - this.timeoutFirstPage = - executionProfile.getDuration(DseDriverOption.CONTINUOUS_PAGING_TIMEOUT_FIRST_PAGE); - this.timeoutOtherPages = - executionProfile.getDuration(DseDriverOption.CONTINUOUS_PAGING_TIMEOUT_OTHER_PAGES); this.timer = context.getNettyOptions().getTimer(); - this.maxEnqueuedPages = - executionProfile.getInt(DseDriverOption.CONTINUOUS_PAGING_MAX_ENQUEUED_PAGES); - this.queue = new ArrayDeque<>(maxEnqueuedPages); - this.maxPages = executionProfile.getInt(DseDriverOption.CONTINUOUS_PAGING_MAX_PAGES); + this.protocolBackpressureAvailable = protocolVersion.getCode() >= DseProtocolVersion.DSE_V2.getCode(); - this.numPagesRequested = protocolBackpressureAvailable ? maxEnqueuedPages : 0; this.throttler = context.getRequestThrottler(); this.sessionMetricUpdater = session.getMetricUpdater(); this.startTimeNanos = System.nanoTime(); } + @NonNull + protected abstract Duration getGlobalTimeout(); + + @NonNull + protected abstract Duration getPageTimeout(int pageNumber); + + @NonNull + protected abstract Duration getReviseRequestTimeout(); + + protected abstract int getMaxEnqueuedPages(); + + protected abstract int getMaxPages(); + @NonNull protected abstract Message getMessage(); @@ -268,6 +269,13 @@ public void onThrottleReady(boolean wasDelayed) { System.nanoTime() - startTimeNanos, TimeUnit.NANOSECONDS); } + lock.lock(); + try { + this.queue = new ArrayDeque<>(getMaxEnqueuedPages()); + this.numPagesRequested = protocolBackpressureAvailable ? getMaxEnqueuedPages() : 0; + } finally { + lock.unlock(); + } sendRequest(null); } @@ -340,7 +348,8 @@ public void operationComplete(@NonNull Future future) { } } else { LOG.trace("[{}] Request sent on {}", logPrefix, channel); - timeout = scheduleTimeout(1); + pageTimeout = schedulePageTimeout(1); + globalTimeout = scheduleGlobalTimeout(); } } @@ -355,7 +364,7 @@ public void operationComplete(@NonNull Future future) { @Override public void onResponse(@NonNull Frame response) { stopNodeMessageTimer(); - cancelTimeout(); + cancelTimeout(pageTimeout); lock.lock(); try { if (state < 0) { @@ -395,7 +404,7 @@ public void onResponse(@NonNull Frame response) { */ @Override public void onFailure(@NonNull Throwable error) { - cancelTimeout(); + cancelTimeout(pageTimeout); LOG.trace(String.format("[%s] Request failure", logPrefix), error); RetryDecision decision; if (!isIdempotent || error instanceof FrameTooLongException) { @@ -469,6 +478,7 @@ private void processResultResponse(@NonNull Result result, @Nullable Frame frame reenableAutoReadIfNeeded(); enqueueOrCompletePending(resultSet); stopGlobalRequestTimer(); + cancelTimeout(globalTimeout); } else { LOG.trace("[{}] Received page {} ({} rows)", logPrefix, pageNumber, pageSize); if (currentPage > 0) { @@ -488,6 +498,7 @@ private void processResultResponse(@NonNull Result result, @Nullable Frame frame reenableAutoReadIfNeeded(); enqueueOrCompletePending(resultSet); stopGlobalRequestTimer(); + cancelTimeout(globalTimeout); } } catch (Throwable error) { abort(error, false); @@ -776,7 +787,7 @@ private void enqueueOrCompletePending(@NonNull Object pageOrError) { // Backpressure without protocol support: if the queue grows too large, // disable auto-read so that the channel eventually becomes // non-writable on the server side (causing it to back off for a while) - if (!protocolBackpressureAvailable && queue.size() == maxEnqueuedPages && state > 0) { + if (!protocolBackpressureAvailable && queue.size() == getMaxEnqueuedPages() && state > 0) { LOG.trace( "[{}] Exceeded {} queued response pages, disabling auto-read", logPrefix, queue.size()); channel.config().setAutoRead(false); @@ -802,7 +813,9 @@ public CompletableFuture dequeueOrCreatePending() { assert pendingResult == null; Object head = queue.poll(); - if (!protocolBackpressureAvailable && head != null && queue.size() == maxEnqueuedPages - 1) { + if (!protocolBackpressureAvailable + && head != null + && queue.size() == getMaxEnqueuedPages() - 1) { LOG.trace( "[{}] Back to {} queued response pages, re-enabling auto-read", logPrefix, @@ -838,7 +851,7 @@ public CompletableFuture dequeueOrCreatePending() { // Only schedule a timeout if we're past the first page (the first page's timeout is // handled in sendRequest). if (state > 1) { - timeout = scheduleTimeout(state); + pageTimeout = schedulePageTimeout(state); // Note: each new timeout is cancelled when the next response arrives, see // onResponse(Frame). } @@ -863,17 +876,17 @@ private void maybeRequestMore() { return; } // if we have already requested more than the client needs, then no need to request some more - if (maxPages > 0 && numPagesRequested >= maxPages) { + if (getMaxPages() > 0 && numPagesRequested >= getMaxPages()) { return; } // the pages received so far, which is the state minus one int received = state - 1; int requested = numPagesRequested; // the pages that fit in the queue, which is the queue free space minus the requests in flight - int freeSpace = maxEnqueuedPages - queue.size(); + int freeSpace = getMaxEnqueuedPages() - queue.size(); int inFlight = requested - received; int numPagesFittingInQueue = freeSpace - inFlight; - if (numPagesFittingInQueue >= maxEnqueuedPages / 2) { + if (numPagesFittingInQueue >= getMaxEnqueuedPages() / 2) { LOG.trace("[{}] Requesting more {} pages", logPrefix, numPagesFittingInQueue); numPagesRequested = requested + numPagesFittingInQueue; sendMorePagesRequest(numPagesFittingInQueue); @@ -898,7 +911,7 @@ private void sendMorePagesRequest(int nextPages) { true, Revise.requestMoreContinuousPages(streamId, nextPages), statement.getCustomPayload(), - timeoutOtherPages, + getReviseRequestTimeout(), throttler, session.getMetricUpdater(), logPrefix, @@ -924,11 +937,11 @@ private void sendMorePagesRequest(int nextPages) { // TIMEOUT HANDLING - private Timeout scheduleTimeout(int expectedPage) { + private Timeout schedulePageTimeout(int expectedPage) { if (expectedPage < 0) { return null; } - Duration timeout = expectedPage == 1 ? timeoutFirstPage : timeoutOtherPages; + Duration timeout = getPageTimeout(expectedPage); if (timeout.toNanos() <= 0) { return null; } @@ -958,9 +971,27 @@ private Timeout scheduleTimeout(int expectedPage) { TimeUnit.NANOSECONDS); } - /** Cancels the current timeout, if non null. */ - private void cancelTimeout() { - Timeout timeout = this.timeout; + private Timeout scheduleGlobalTimeout() { + Duration globalTimeout = getGlobalTimeout(); + if (globalTimeout.toNanos() <= 0) { + return null; + } + LOG.trace("[{}] Scheduling global timeout for pages in {}", logPrefix, globalTimeout); + return timer.newTimeout( + timeout1 -> { + lock.lock(); + try { + abort(new DriverTimeoutException("Query timed out after " + globalTimeout), false); + } finally { + lock.unlock(); + } + }, + globalTimeout.toNanos(), + TimeUnit.NANOSECONDS); + } + + /** Cancels the given timeout, if non null. */ + private void cancelTimeout(Timeout timeout) { if (timeout != null) { LOG.trace("[{}] Cancelling timeout", logPrefix); timeout.cancel(); @@ -1011,7 +1042,7 @@ private void sendCancelRequest() { true, Revise.cancelContinuousPaging(streamId), statement.getCustomPayload(), - timeoutOtherPages, + getReviseRequestTimeout(), throttler, session.getMetricUpdater(), logPrefix, @@ -1092,6 +1123,7 @@ private void abort(@NonNull Throwable error, boolean fromServer) { } } stopGlobalRequestTimer(); + cancelTimeout(globalTimeout); } // METRICS @@ -1210,7 +1242,7 @@ private String asTraceString(@NonNull Object pageOrError) { } @VisibleForTesting - public int getState() { + int getState() { lock.lock(); try { return state; @@ -1220,7 +1252,7 @@ public int getState() { } @VisibleForTesting - public CompletableFuture getPendingResult() { + CompletableFuture getPendingResult() { lock.lock(); try { return pendingResult; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandler.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandler.java index fea1f4c21d2..1c6ebea08d7 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandler.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandler.java @@ -6,17 +6,19 @@ */ package com.datastax.dse.driver.internal.core.graph; +import com.datastax.dse.driver.api.core.config.DseDriverOption; import com.datastax.dse.driver.api.core.graph.AsyncGraphResultSet; import com.datastax.dse.driver.api.core.graph.GraphExecutionInfo; import com.datastax.dse.driver.api.core.graph.GraphNode; import com.datastax.dse.driver.api.core.graph.GraphStatement; -import com.datastax.dse.driver.internal.core.ContinuousRequestHandlerBase; +import com.datastax.dse.driver.internal.core.cql.continuous.ContinuousRequestHandlerBase; import com.datastax.dse.driver.internal.core.graph.binary.GraphBinaryModule; import com.datastax.dse.protocol.internal.response.result.DseRowsMetadata; import com.datastax.oss.driver.api.core.session.throttling.Throttled; import com.datastax.oss.driver.internal.core.channel.ResponseCallback; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.internal.core.session.DefaultSession; +import com.datastax.oss.driver.shaded.guava.common.base.MoreObjects; import com.datastax.oss.protocol.internal.Frame; import com.datastax.oss.protocol.internal.Message; import com.datastax.oss.protocol.internal.response.Result; @@ -27,6 +29,7 @@ import io.netty.util.concurrent.GenericFutureListener; import java.io.IOException; import java.nio.ByteBuffer; +import java.time.Duration; import java.util.ArrayDeque; import java.util.List; import java.util.Map; @@ -34,15 +37,20 @@ import net.jcip.annotations.ThreadSafe; /** - * Handles a request that supports multiple response messages (a.k.a. continuous paging request). + * Handles a Graph request that supports multiple response messages (a.k.a. continuous paging + * request). */ @ThreadSafe public class ContinuousGraphRequestHandler extends ContinuousRequestHandlerBase, AsyncGraphResultSet, GraphExecutionInfo> implements ResponseCallback, GenericFutureListener>, Throttled { + private final Message message; private final GraphProtocol subProtocol; private final GraphBinaryModule graphBinaryModule; + private final Duration globalTimeout; + private final int maxEnqueuedPages; + private final int maxPages; ContinuousGraphRequestHandler( @NonNull GraphStatement statement, @@ -52,14 +60,47 @@ public class ContinuousGraphRequestHandler @NonNull GraphBinaryModule graphBinaryModule) { super(statement, session, context, sessionLogPrefix, AsyncGraphResultSet.class); this.graphBinaryModule = graphBinaryModule; - this.subProtocol = GraphConversions.inferSubProtocol(statement, executionProfile); - this.message = + subProtocol = GraphConversions.inferSubProtocol(statement, executionProfile); + message = GraphConversions.createContinuousMessageFromGraphStatement( - statement, subProtocol, executionProfile, this.context, graphBinaryModule); + statement, subProtocol, executionProfile, context, graphBinaryModule); throttler.register(this); + globalTimeout = + MoreObjects.firstNonNull( + statement.getTimeout(), + executionProfile.getDuration(DseDriverOption.GRAPH_TIMEOUT, Duration.ZERO)); + maxEnqueuedPages = + executionProfile.getInt(DseDriverOption.GRAPH_CONTINUOUS_PAGING_MAX_ENQUEUED_PAGES); + maxPages = executionProfile.getInt(DseDriverOption.GRAPH_CONTINUOUS_PAGING_MAX_PAGES); + } + + @NonNull + @Override + protected Duration getGlobalTimeout() { + return globalTimeout; } - // MAIN LIFECYCLE + @NonNull + @Override + protected Duration getPageTimeout(int pageNumber) { + return Duration.ZERO; + } + + @NonNull + @Override + protected Duration getReviseRequestTimeout() { + return Duration.ZERO; + } + + @Override + protected int getMaxEnqueuedPages() { + return maxEnqueuedPages; + } + + @Override + protected int getMaxPages() { + return maxPages; + } @NonNull @Override @@ -69,7 +110,7 @@ protected Message getMessage() { @Override protected boolean isTracingEnabled() { - return this.statement.isTracing(); + return statement.isTracing(); } @NonNull @@ -118,10 +159,6 @@ protected ContinuousAsyncGraphResultSet createResultSet( @Override protected int pageNumber(@NonNull AsyncGraphResultSet resultSet) { - if (resultSet instanceof ContinuousAsyncGraphResultSet) { - return ((ContinuousAsyncGraphResultSet) resultSet).pageNumber(); - } else { // otherwise the AsyncGraphResultSet is not a Continuous Paging Query - return 1; - } + return ((ContinuousAsyncGraphResultSet) resultSet).pageNumber(); } } diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphConversions.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphConversions.java index efffdb2f6e1..eb0d09d381c 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphConversions.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphConversions.java @@ -139,11 +139,12 @@ public static Message createContinuousMessageFromGraphStatement( timestamp = context.getTimestampGenerator().next(); } - int pageSize = config.getInt(DseDriverOption.CONTINUOUS_PAGING_PAGE_SIZE); - boolean pageSizeInBytes = config.getBoolean(DseDriverOption.CONTINUOUS_PAGING_PAGE_SIZE_BYTES); - int maxPages = config.getInt(DseDriverOption.CONTINUOUS_PAGING_MAX_PAGES); - int maxPagesPerSecond = config.getInt(DseDriverOption.CONTINUOUS_PAGING_MAX_PAGES_PER_SECOND); - int maxEnqueuedPages = config.getInt(DseDriverOption.CONTINUOUS_PAGING_MAX_ENQUEUED_PAGES); + int pageSize = config.getInt(DseDriverOption.GRAPH_CONTINUOUS_PAGING_PAGE_SIZE); + int maxPages = config.getInt(DseDriverOption.GRAPH_CONTINUOUS_PAGING_MAX_PAGES); + int maxPagesPerSecond = + config.getInt(DseDriverOption.GRAPH_CONTINUOUS_PAGING_MAX_PAGES_PER_SECOND); + int maxEnqueuedPages = + config.getInt(DseDriverOption.GRAPH_CONTINUOUS_PAGING_MAX_ENQUEUED_PAGES); ContinuousPagingOptions options = new ContinuousPagingOptions(maxPages, maxPagesPerSecond, maxEnqueuedPages); @@ -158,7 +159,7 @@ public static Message createContinuousMessageFromGraphStatement( ProtocolConstants.ConsistencyLevel.LOCAL_SERIAL, // also ignored timestamp, null, // also ignored - pageSizeInBytes, + false, // graph CP does not support sizeInBytes options); if (statement instanceof ScriptGraphStatement) { @@ -369,7 +370,7 @@ public static Map createCustomPayload( if (!statementOptions.containsKey(GRAPH_TIMEOUT_OPTION_KEY)) { Duration timeout = statement.getTimeout(); if (timeout == null) { - timeout = config.getDuration(DseDriverOption.GRAPH_TIMEOUT, null); + timeout = config.getDuration(DseDriverOption.GRAPH_TIMEOUT, Duration.ZERO); } if (timeout != null && !timeout.isZero()) { payload.put( diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java index 2cad4da2459..318f0d6aad3 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java @@ -181,7 +181,7 @@ public GraphRequestHandler( Duration timeout = graphStatement.getTimeout(); if (timeout == null) { - timeout = executionProfile.getDuration(DseDriverOption.GRAPH_TIMEOUT, null); + timeout = executionProfile.getDuration(DseDriverOption.GRAPH_TIMEOUT, Duration.ZERO); } this.timeoutFuture = scheduleTimeout(timeout); diff --git a/core/src/main/resources/reference.conf b/core/src/main/resources/reference.conf index 698ec76727a..b3ad559730b 100644 --- a/core/src/main/resources/reference.conf +++ b/core/src/main/resources/reference.conf @@ -329,7 +329,10 @@ datastax-java-driver { # This can also be overridden programmatically with GraphStatement.setTimeout(). If both are # specified, the programmatic value takes precedence, and this option is ignored. # - # Required: no (defaults to request.basic.consistency) + # If this value is left unset (default) or is explicitly set to zero, no timeout will be + # applied. + # + # Required: no (defaults to zero - no timeout) # Modifiable at runtime: yes, the new value will be used for requests issued after the change. # Overridable in a profile: yes // timeout = 10 seconds @@ -1027,18 +1030,68 @@ datastax-java-driver { # #

      If AUTO is set, the driver will decide whether or not to enable Graph paging # based on the protocol version in use and the DSE version of all hosts. For this reason it is - # usually not necessary to call this method. - # - #

      IMPORTANT: Paging for DSE Graph is only available in DSE 6.8 and higher, and requires - # protocol version DSE_V1 or higher and graphs created with the Native engine; enabling paging - # for clusters and graphs that do not meet this requirement may result in query failures. + # usually not necessary to change this setting. # - # set whether or not to enable Graph paging, or AUTO to let the driver decide. + #

      IMPORTANT: Paging for DSE Graph is only available in DSE 6.8 and higher, and + # requires protocol version DSE_V1 or higher and graphs created with the Native engine; enabling + # paging for clusters and graphs that do not meet this requirement may result in query failures. # # Supported values are: ENABLED, DISABLED, AUTO - #/ paging-enabled = "AUTO" - paging-options = ${datastax-java-driver.advanced.continuous-paging} + + + paging-options { + + # The page size. + # + # The value specified here can be interpreted in number of rows. + # Interpetation in number of bytes is not supported for graph continuous paging queries. + # + # It controls how many rows will be retrieved simultaneously in a single + # network roundtrip (the goal being to avoid loading too many results in memory at the same + # time). If there are more results, additional requests will be used to retrieve them (either + # automatically if you iterate with the sync API, or explicitly with the async API's + # fetchNextPage method). + # + # The default is the same as the driver's normal request page size, + # i.e., 5000 (rows). + # + # Required: yes + # Modifiable at runtime: yes, the new value will be used for continuous requests issued after + # the change + # Overridable in a profile: yes + page-size = ${datastax-java-driver.advanced.continuous-paging.page-size} + + # The maximum number of pages to return. + # + # The default is zero, which means retrieve all pages. + # + # Required: yes + # Modifiable at runtime: yes, the new value will be used for continuous requests issued after + # the change + # Overridable in a profile: yes + max-pages = ${datastax-java-driver.advanced.continuous-paging.max-pages} + + # Returns the maximum number of pages per second. + # + # The default is zero, which means no limit. + # + # Required: yes + # Modifiable at runtime: yes, the new value will be used for continuous requests issued after + # the change + # Overridable in a profile: yes + max-pages-per-second = ${datastax-java-driver.advanced.continuous-paging.max-pages-per-second} + + # Returns the maximum number of pages per second. + # + # The default is zero, which means no limit. + # + # Required: yes + # Modifiable at runtime: yes, the new value will be used for continuous requests issued after + # the change + # Overridable in a profile: yes + max-enqueued-pages = ${datastax-java-driver.advanced.continuous-paging.max-enqueued-pages} + } } # Continuous paging (DataStax Enterprise only) diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerTest.java new file mode 100644 index 00000000000..c7417621e5e --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerTest.java @@ -0,0 +1,198 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph; + +import static com.datastax.dse.driver.internal.core.graph.GraphTestUtils.createGraphBinaryModule; +import static com.datastax.dse.driver.internal.core.graph.GraphTestUtils.defaultDseFrameOf; +import static com.datastax.dse.driver.internal.core.graph.GraphTestUtils.tenGraphRows; +import static com.datastax.oss.driver.Assertions.assertThat; +import static com.datastax.oss.driver.Assertions.assertThatStage; +import static org.mockito.Mockito.when; + +import com.datastax.dse.driver.api.core.graph.AsyncGraphResultSet; +import com.datastax.dse.driver.api.core.graph.GraphExecutionInfo; +import com.datastax.dse.driver.api.core.graph.GraphNode; +import com.datastax.dse.driver.api.core.graph.GraphStatement; +import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; +import com.datastax.dse.driver.internal.core.context.DseDriverContext; +import com.datastax.dse.driver.internal.core.graph.GraphRequestHandlerTestHarness.Builder; +import com.datastax.dse.driver.internal.core.graph.binary.GraphBinaryModule; +import com.datastax.oss.driver.api.core.DriverTimeoutException; +import com.datastax.oss.driver.internal.core.cql.PoolBehavior; +import com.datastax.oss.driver.internal.core.cql.RequestHandlerTestHarness; +import com.datastax.oss.driver.internal.core.metadata.DefaultNode; +import com.datastax.oss.driver.internal.core.metrics.NodeMetricUpdater; +import com.datastax.oss.driver.internal.core.util.concurrent.CapturingTimer.CapturedTimeout; +import com.datastax.oss.driver.internal.core.util.concurrent.CompletableFutures; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import java.io.IOException; +import java.time.Duration; +import java.util.concurrent.CompletionStage; +import java.util.concurrent.TimeUnit; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; + +@RunWith(DataProviderRunner.class) +public class ContinuousGraphRequestHandlerTest { + + @Mock DseDriverContext mockContext; + @Mock DefaultNode node; + @Mock NodeMetricUpdater nodeMetricUpdater1; + + @Before + public void setup() { + MockitoAnnotations.initMocks(this); + when(node.getMetricUpdater()).thenReturn(nodeMetricUpdater1); + } + + @Test + @UseDataProvider( + location = GraphRequestHandlerTest.class, + value = "bytecodeEnabledGraphProtocols") + public void should_return_paged_results(GraphProtocol graphProtocol) throws IOException { + + GraphBinaryModule module = createGraphBinaryModule(mockContext); + + Builder builder = + GraphRequestHandlerTestHarness.builder() + .withGraphProtocolForTestConfig(graphProtocol.toInternalCode()); + PoolBehavior node1Behavior = builder.customBehavior(node); + + try (RequestHandlerTestHarness harness = builder.build()) { + + GraphStatement graphStatement = + ScriptGraphStatement.newInstance("mockQuery").setExecutionProfileName("test-graph"); + + ContinuousGraphRequestHandler handler = + new ContinuousGraphRequestHandler( + graphStatement, harness.getSession(), harness.getContext(), "test", module); + + // send the initial request + CompletionStage page1Future = handler.handle(); + + node1Behavior.setResponseSuccess( + defaultDseFrameOf(tenGraphRows(graphProtocol, module, 1, false))); + + assertThatStage(page1Future) + .isSuccess( + page1 -> { + assertThat(page1.hasMorePages()).isTrue(); + assertThat(page1.currentPage()).hasSize(10).allMatch(GraphNode::isVertex); + GraphExecutionInfo executionInfo = page1.getExecutionInfo(); + assertThat(executionInfo.getCoordinator()).isEqualTo(node); + assertThat(executionInfo.getErrors()).isEmpty(); + assertThat(executionInfo.getIncomingPayload()).isEmpty(); + assertThat(executionInfo.getSpeculativeExecutionCount()).isEqualTo(0); + assertThat(executionInfo.getSuccessfulExecutionIndex()).isEqualTo(0); + assertThat(executionInfo.getWarnings()).isEmpty(); + }); + + AsyncGraphResultSet page1 = CompletableFutures.getCompleted(page1Future); + CompletionStage page2Future = page1.fetchNextPage(); + + node1Behavior.setResponseSuccess( + defaultDseFrameOf(tenGraphRows(graphProtocol, module, 2, true))); + + assertThatStage(page2Future) + .isSuccess( + page2 -> { + assertThat(page2.hasMorePages()).isFalse(); + assertThat(page2.currentPage()).hasSize(10).allMatch(GraphNode::isVertex); + GraphExecutionInfo executionInfo = page2.getExecutionInfo(); + assertThat(executionInfo.getCoordinator()).isEqualTo(node); + assertThat(executionInfo.getErrors()).isEmpty(); + assertThat(executionInfo.getIncomingPayload()).isEmpty(); + assertThat(executionInfo.getSpeculativeExecutionCount()).isEqualTo(0); + assertThat(executionInfo.getSuccessfulExecutionIndex()).isEqualTo(0); + assertThat(executionInfo.getWarnings()).isEmpty(); + }); + } + } + + @Test + public void should_honor_default_timeout() throws Exception { + // given + GraphBinaryModule binaryModule = createGraphBinaryModule(mockContext); + Duration defaultTimeout = Duration.ofSeconds(1); + + RequestHandlerTestHarness.Builder builder = + GraphRequestHandlerTestHarness.builder().withGraphTimeout(defaultTimeout); + PoolBehavior node1Behavior = builder.customBehavior(node); + + try (RequestHandlerTestHarness harness = builder.build()) { + + GraphStatement graphStatement = ScriptGraphStatement.newInstance("mockQuery"); + + // when + ContinuousGraphRequestHandler handler = + new ContinuousGraphRequestHandler( + graphStatement, harness.getSession(), harness.getContext(), "test", binaryModule); + + // send the initial request + CompletionStage page1Future = handler.handle(); + + // acknowledge the write, will set the global timeout + node1Behavior.verifyWrite(); + node1Behavior.setWriteSuccess(); + + CapturedTimeout globalTimeout = harness.nextScheduledTimeout(); + assertThat(globalTimeout.getDelay(TimeUnit.NANOSECONDS)).isEqualTo(defaultTimeout.toNanos()); + + // will trigger the global timeout and complete it exceptionally + globalTimeout.task().run(globalTimeout); + assertThat(page1Future.toCompletableFuture()) + .hasFailedWithThrowableThat() + .isInstanceOf(DriverTimeoutException.class) + .hasMessageContaining("Query timed out after " + defaultTimeout); + } + } + + @Test + public void should_honor_statement_timeout() throws Exception { + // given + GraphBinaryModule binaryModule = createGraphBinaryModule(mockContext); + Duration defaultTimeout = Duration.ofSeconds(1); + Duration statementTimeout = Duration.ofSeconds(2); + + RequestHandlerTestHarness.Builder builder = + GraphRequestHandlerTestHarness.builder().withGraphTimeout(defaultTimeout); + PoolBehavior node1Behavior = builder.customBehavior(node); + + try (RequestHandlerTestHarness harness = builder.build()) { + + GraphStatement graphStatement = + ScriptGraphStatement.newInstance("mockQuery").setTimeout(statementTimeout); + + // when + ContinuousGraphRequestHandler handler = + new ContinuousGraphRequestHandler( + graphStatement, harness.getSession(), harness.getContext(), "test", binaryModule); + + // send the initial request + CompletionStage page1Future = handler.handle(); + + // acknowledge the write, will set the global timeout + node1Behavior.verifyWrite(); + node1Behavior.setWriteSuccess(); + + CapturedTimeout globalTimeout = harness.nextScheduledTimeout(); + assertThat(globalTimeout.getDelay(TimeUnit.NANOSECONDS)) + .isEqualTo(statementTimeout.toNanos()); + + // will trigger the global timeout and complete it exceptionally + globalTimeout.task().run(globalTimeout); + assertThat(page1Future.toCompletableFuture()) + .hasFailedWithThrowableThat() + .isInstanceOf(DriverTimeoutException.class) + .hasMessageContaining("Query timed out after " + statementTimeout); + } + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java index f699059eed1..7ba65f24812 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java @@ -17,6 +17,10 @@ import static com.datastax.dse.driver.internal.core.graph.GraphProtocol.GRAPHSON_2_0; import static com.datastax.dse.driver.internal.core.graph.GraphProtocol.GRAPH_BINARY_1_0; +import static com.datastax.dse.driver.internal.core.graph.GraphTestUtils.createGraphBinaryModule; +import static com.datastax.dse.driver.internal.core.graph.GraphTestUtils.defaultDseFrameOf; +import static com.datastax.dse.driver.internal.core.graph.GraphTestUtils.serialize; +import static com.datastax.dse.driver.internal.core.graph.GraphTestUtils.singleGraphRow; import static com.datastax.oss.driver.api.core.type.codec.TypeCodecs.BIGINT; import static com.datastax.oss.driver.api.core.type.codec.TypeCodecs.TEXT; import static org.assertj.core.api.Assertions.assertThat; @@ -30,7 +34,6 @@ import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; -import com.datastax.dse.driver.api.core.DseProtocolVersion; import com.datastax.dse.driver.api.core.config.DseDriverOption; import com.datastax.dse.driver.api.core.data.geometry.Point; import com.datastax.dse.driver.api.core.graph.BatchGraphStatement; @@ -54,14 +57,8 @@ import com.datastax.oss.driver.internal.core.metrics.NodeMetricUpdater; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; -import com.datastax.oss.protocol.internal.Frame; import com.datastax.oss.protocol.internal.Message; -import com.datastax.oss.protocol.internal.ProtocolConstants; import com.datastax.oss.protocol.internal.request.Query; -import com.datastax.oss.protocol.internal.response.result.ColumnSpec; -import com.datastax.oss.protocol.internal.response.result.DefaultRows; -import com.datastax.oss.protocol.internal.response.result.RawType; -import com.datastax.oss.protocol.internal.response.result.RowsMetadata; import com.tngtech.java.junit.dataprovider.DataProvider; import com.tngtech.java.junit.dataprovider.DataProviderRunner; import com.tngtech.java.junit.dataprovider.UseDataProvider; @@ -71,19 +68,11 @@ import java.time.Duration; import java.time.LocalDateTime; import java.time.ZoneOffset; -import java.util.ArrayDeque; -import java.util.Collections; import java.util.List; import java.util.Map; -import java.util.Queue; import java.util.regex.Pattern; -import org.apache.tinkerpop.gremlin.process.remote.traversal.DefaultRemoteTraverser; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; import org.apache.tinkerpop.gremlin.structure.Vertex; -import org.apache.tinkerpop.gremlin.structure.io.Buffer; -import org.apache.tinkerpop.gremlin.structure.io.binary.GraphBinaryReader; -import org.apache.tinkerpop.gremlin.structure.io.binary.GraphBinaryWriter; -import org.apache.tinkerpop.gremlin.structure.io.binary.TypeSerializerRegistry; import org.apache.tinkerpop.gremlin.structure.util.detached.DetachedVertex; import org.apache.tinkerpop.gremlin.structure.util.detached.DetachedVertexProperty; import org.junit.Before; @@ -108,11 +97,6 @@ public void setup() { when(node.getMetricUpdater()).thenReturn(nodeMetricUpdater1); } - GraphBinaryModule createGraphBinaryModule(DseDriverContext context) { - TypeSerializerRegistry registry = GraphBinaryModule.createDseTypeSerializerRegistry(context); - return new GraphBinaryModule(new GraphBinaryReader(registry), new GraphBinaryWriter(registry)); - } - @Test @UseDataProvider("bytecodeEnabledGraphProtocols") public void should_create_query_message_from_script_statement(GraphProtocol graphProtocol) @@ -222,18 +206,6 @@ public void should_create_query_message_from_batch_statement(GraphProtocol graph module); } - private static ByteBuffer serialize( - Object value, GraphProtocol graphProtocol, GraphBinaryModule graphBinaryModule) - throws IOException { - - Buffer tinkerBuf = graphBinaryModule.serialize(value); - ByteBuffer nioBuffer = tinkerBuf.nioBuffer(); - tinkerBuf.release(); - return graphProtocol.isGraphBinary() - ? nioBuffer - : GraphSONUtils.serializeToByteBuffer(value, graphProtocol); - } - private void testQueryRequestAndPayloadContents( RawBytesQuery q, Map customPayload, @@ -320,7 +292,7 @@ public void should_create_payload_from_config_options() { Mockito.verify(executionProfile).getString(DseDriverOption.GRAPH_TRAVERSAL_SOURCE, null); Mockito.verify(executionProfile).getString(DseDriverOption.GRAPH_NAME, null); Mockito.verify(executionProfile).getBoolean(DseDriverOption.GRAPH_IS_SYSTEM_QUERY, false); - Mockito.verify(executionProfile).getDuration(DseDriverOption.GRAPH_TIMEOUT, null); + Mockito.verify(executionProfile).getDuration(DseDriverOption.GRAPH_TIMEOUT, Duration.ZERO); Mockito.verify(executionProfile).getString(DseDriverOption.GRAPH_READ_CONSISTENCY_LEVEL, null); Mockito.verify(executionProfile).getString(DseDriverOption.GRAPH_WRITE_CONSISTENCY_LEVEL, null); @@ -372,7 +344,8 @@ public void should_create_payload_from_statement_options() { Mockito.verify(executionProfile, never()).getString(DseDriverOption.GRAPH_NAME, null); Mockito.verify(executionProfile, never()) .getBoolean(DseDriverOption.GRAPH_IS_SYSTEM_QUERY, false); - Mockito.verify(executionProfile, never()).getDuration(DseDriverOption.GRAPH_TIMEOUT, null); + Mockito.verify(executionProfile, never()) + .getDuration(DseDriverOption.GRAPH_TIMEOUT, Duration.ZERO); Mockito.verify(executionProfile, never()) .getString(DseDriverOption.GRAPH_READ_CONSISTENCY_LEVEL, null); Mockito.verify(executionProfile, never()) @@ -473,8 +446,8 @@ public void should_return_results_for_statements(GraphProtocol graphProtocol) th if (!graphProtocol.isGraphBinary()) { // GraphBinary does not encode properties regardless of whether they are present in the // parent element or not :/ - assertThat(v.property("name").id()).isEqualTo(11); - assertThat(v.property("name").value()).isEqualTo("marko"); + assertThat(vRead.property("name").id()).isEqualTo(11); + assertThat(vRead.property("name").value()).isEqualTo("marko"); } } @@ -543,46 +516,4 @@ node, defaultDseFrameOf(singleGraphRow(GraphProtocol.GRAPHSON_2_0, v, module))) matches(LOG_PREFIX_PER_REQUEST)); verifyNoMoreInteractions(requestTracker); } - - private static Frame defaultDseFrameOf(Message responseMessage) { - return Frame.forResponse( - DseProtocolVersion.DSE_V2.getCode(), - 0, - null, - Frame.NO_PAYLOAD, - Collections.emptyList(), - responseMessage); - } - - // Returns a single row, with a single "message" column containing the value - // given in parameter serialized according to the protocol - private static Message singleGraphRow( - GraphProtocol graphProtocol, Object value, GraphBinaryModule module) throws IOException { - RowsMetadata metadata = - new RowsMetadata( - ImmutableList.of( - new ColumnSpec( - "ks", - "table", - "gremlin", - 0, - graphProtocol.isGraphBinary() - ? RawType.PRIMITIVES.get(ProtocolConstants.DataType.BLOB) - : RawType.PRIMITIVES.get(ProtocolConstants.DataType.VARCHAR))), - null, - new int[] {}, - null); - Queue> data = new ArrayDeque<>(); - - data.add( - ImmutableList.of( - serialize( - graphProtocol.isGraphBinary() - // GraphBinary returns results directly inside a Traverser - ? new DefaultRemoteTraverser<>(value, 1) - : ImmutableMap.of("result", value), - graphProtocol, - module))); - return new DefaultRows(metadata, data); - } } diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTestHarness.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTestHarness.java index 65a2c0678f8..c3bc3d19e12 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTestHarness.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTestHarness.java @@ -33,7 +33,8 @@ import java.time.Duration; import java.util.Optional; import javax.annotation.Nullable; -import org.mockito.*; +import org.mockito.ArgumentMatchers; +import org.mockito.Mock; /** * Provides the environment to test a request handler, where a query plan can be defined, and the @@ -50,11 +51,11 @@ public class GraphRequestHandlerTestHarness extends RequestHandlerTestHarness { @Mock EventLoop eventLoop; protected GraphRequestHandlerTestHarness( - Builder builder, @Nullable String graphProtocolForTestConfig) { + Builder builder, @Nullable String graphProtocolForTestConfig, Duration graphTimeout) { super(builder); // not mocked by RequestHandlerTestHarness, will be used when DseDriverOptions.GRAPH_TIMEOUT - // is not null in the config + // is not zero in the config when(eventLoopGroup.next()).thenReturn(eventLoop); // default graph options as in the reference.conf file @@ -63,9 +64,11 @@ protected GraphRequestHandlerTestHarness( .thenReturn("graphson-2.0"); when(defaultProfile.getBoolean(DseDriverOption.GRAPH_IS_SYSTEM_QUERY, false)).thenReturn(false); when(defaultProfile.getString(DseDriverOption.GRAPH_NAME, null)).thenReturn("mockGraph"); + when(defaultProfile.getDuration(DseDriverOption.GRAPH_TIMEOUT, Duration.ZERO)) + .thenReturn(graphTimeout); when(testProfile.getName()).thenReturn("test-graph"); - when(testProfile.getDuration(DseDriverOption.GRAPH_TIMEOUT, null)) + when(testProfile.getDuration(DseDriverOption.GRAPH_TIMEOUT, Duration.ZERO)) .thenReturn(Duration.ofMillis(2L)); when(testProfile.getString(DefaultDriverOption.REQUEST_CONSISTENCY)) .thenReturn(DefaultConsistencyLevel.LOCAL_ONE.name()); @@ -87,8 +90,8 @@ protected GraphRequestHandlerTestHarness( when(config.getProfile("test-graph")).thenReturn(testProfile); when(systemQueryExecutionProfile.getName()).thenReturn("graph-system-query"); - when(systemQueryExecutionProfile.getDuration(DseDriverOption.GRAPH_TIMEOUT, null)) - .thenReturn(Duration.ofMillis(500L)); + when(systemQueryExecutionProfile.getDuration(DseDriverOption.GRAPH_TIMEOUT, Duration.ZERO)) + .thenReturn(Duration.ZERO); when(systemQueryExecutionProfile.getString(DefaultDriverOption.REQUEST_CONSISTENCY)) .thenReturn(DefaultConsistencyLevel.LOCAL_ONE.name()); when(systemQueryExecutionProfile.getInt(DefaultDriverOption.REQUEST_PAGE_SIZE)) @@ -100,7 +103,7 @@ protected GraphRequestHandlerTestHarness( when(systemQueryExecutionProfile.getBoolean(DefaultDriverOption.PREPARE_ON_ALL_NODES)) .thenReturn(true); when(systemQueryExecutionProfile.getName()).thenReturn("graph-system-query"); - when(systemQueryExecutionProfile.getDuration(DseDriverOption.GRAPH_TIMEOUT, null)) + when(systemQueryExecutionProfile.getDuration(DseDriverOption.GRAPH_TIMEOUT, Duration.ZERO)) .thenReturn(Duration.ofMillis(2)); when(systemQueryExecutionProfile.getBoolean(DseDriverOption.GRAPH_IS_SYSTEM_QUERY, false)) .thenReturn(true); @@ -125,6 +128,7 @@ protected GraphRequestHandlerTestHarness( when(dseDriverContext.getCodecRegistry()).thenReturn(CodecRegistry.DEFAULT); when(dseDriverContext.getTimestampGenerator()).thenReturn(timestampGenerator); when(dseDriverContext.getProtocolVersion()).thenReturn(DseProtocolVersion.DSE_V2); + when(dseDriverContext.getProtocolVersionRegistry()).thenReturn(protocolVersionRegistry); when(dseDriverContext.getConsistencyLevelRegistry()) .thenReturn(new DefaultConsistencyLevelRegistry()); when(dseDriverContext.getWriteTypeRegistry()).thenReturn(new DefaultWriteTypeRegistry()); @@ -144,7 +148,8 @@ public static GraphRequestHandlerTestHarness.Builder builder() { public static class Builder extends RequestHandlerTestHarness.Builder { - String graphProtocolForTestConfig; + private String graphProtocolForTestConfig; + private Duration graphTimeout = Duration.ZERO; public Builder withGraphProtocolForTestConfig(String protocol) { this.graphProtocolForTestConfig = protocol; @@ -153,7 +158,12 @@ public Builder withGraphProtocolForTestConfig(String protocol) { @Override public GraphRequestHandlerTestHarness build() { - return new GraphRequestHandlerTestHarness(this, graphProtocolForTestConfig); + return new GraphRequestHandlerTestHarness(this, graphProtocolForTestConfig, graphTimeout); + } + + public RequestHandlerTestHarness.Builder withGraphTimeout(Duration globalTimeout) { + this.graphTimeout = globalTimeout; + return this; } } } diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphTestUtils.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphTestUtils.java new file mode 100644 index 00000000000..90bb9b9561f --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphTestUtils.java @@ -0,0 +1,151 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph; + +import com.datastax.dse.driver.api.core.DseProtocolVersion; +import com.datastax.dse.driver.internal.core.context.DseDriverContext; +import com.datastax.dse.driver.internal.core.graph.binary.GraphBinaryModule; +import com.datastax.dse.protocol.internal.response.result.DseRowsMetadata; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import com.datastax.oss.protocol.internal.Frame; +import com.datastax.oss.protocol.internal.Message; +import com.datastax.oss.protocol.internal.ProtocolConstants; +import com.datastax.oss.protocol.internal.response.result.ColumnSpec; +import com.datastax.oss.protocol.internal.response.result.DefaultRows; +import com.datastax.oss.protocol.internal.response.result.RawType; +import com.datastax.oss.protocol.internal.response.result.Rows; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.ArrayDeque; +import java.util.Collections; +import java.util.List; +import java.util.Queue; +import org.apache.tinkerpop.gremlin.process.remote.traversal.DefaultRemoteTraverser; +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.apache.tinkerpop.gremlin.structure.io.Buffer; +import org.apache.tinkerpop.gremlin.structure.io.binary.GraphBinaryReader; +import org.apache.tinkerpop.gremlin.structure.io.binary.GraphBinaryWriter; +import org.apache.tinkerpop.gremlin.structure.io.binary.TypeSerializerRegistry; +import org.apache.tinkerpop.gremlin.structure.util.detached.DetachedVertex; +import org.apache.tinkerpop.gremlin.structure.util.detached.DetachedVertexProperty; + +public class GraphTestUtils { + public static ByteBuffer serialize( + Object value, GraphProtocol graphProtocol, GraphBinaryModule graphBinaryModule) + throws IOException { + + Buffer tinkerBuf = graphBinaryModule.serialize(value); + ByteBuffer nioBuffer = tinkerBuf.nioBuffer(); + tinkerBuf.release(); + return graphProtocol.isGraphBinary() + ? nioBuffer + : GraphSONUtils.serializeToByteBuffer(value, graphProtocol); + } + + public static Frame defaultDseFrameOf(Message responseMessage) { + return Frame.forResponse( + DseProtocolVersion.DSE_V2.getCode(), + 0, + null, + Frame.NO_PAYLOAD, + Collections.emptyList(), + responseMessage); + } + + public static Message singleGraphRow( + GraphProtocol graphProtocol, Object value, GraphBinaryModule module) throws IOException { + return singleGraphRow(graphProtocol, value, module, 1, true); + } + + // Returns a single row, with a single "message" column containing the value + // given in parameter serialized according to the protocol + public static Message singleGraphRow( + GraphProtocol graphProtocol, Object value, GraphBinaryModule module, int page, boolean isLast) + throws IOException { + DseRowsMetadata metadata = + new DseRowsMetadata( + ImmutableList.of( + new ColumnSpec( + "ks", + "table", + "gremlin", + 0, + graphProtocol.isGraphBinary() + ? RawType.PRIMITIVES.get(ProtocolConstants.DataType.BLOB) + : RawType.PRIMITIVES.get(ProtocolConstants.DataType.VARCHAR))), + null, + new int[] {}, + null, + page, + isLast); + Queue> data = new ArrayDeque<>(); + + data.add( + ImmutableList.of( + serialize( + graphProtocol.isGraphBinary() + // GraphBinary returns results directly inside a Traverser + ? new DefaultRemoteTraverser<>(value, 1) + : ImmutableMap.of("result", value), + graphProtocol, + module))); + return new DefaultRows(metadata, data); + } + + // Returns 10 rows, each with a vertex + public static Rows tenGraphRows( + GraphProtocol graphProtocol, GraphBinaryModule module, int page, boolean last) + throws IOException { + DseRowsMetadata metadata = + new DseRowsMetadata( + ImmutableList.of( + new ColumnSpec( + "ks", + "table", + "gremlin", + 0, + graphProtocol.isGraphBinary() + ? RawType.PRIMITIVES.get(ProtocolConstants.DataType.BLOB) + : RawType.PRIMITIVES.get(ProtocolConstants.DataType.VARCHAR))), + null, + new int[] {}, + null, + page, + last); + Queue> data = new ArrayDeque<>(); + int start = (page - 1) * 10; + for (int i = start; i < start + 10; i++) { + Vertex v = + DetachedVertex.build() + .setId("vertex" + i) + .setLabel("person") + .addProperty( + DetachedVertexProperty.build() + .setId("property" + i) + .setLabel("name") + .setValue("user" + i) + .create()) + .create(); + data.add( + ImmutableList.of( + serialize( + graphProtocol.isGraphBinary() + // GraphBinary returns results directly inside a Traverser + ? new DefaultRemoteTraverser<>(v, 1) + : ImmutableMap.of("result", v), + graphProtocol, + module))); + } + return new DefaultRows(metadata, data); + } + + public static GraphBinaryModule createGraphBinaryModule(DseDriverContext context) { + TypeSerializerRegistry registry = GraphBinaryModule.createDseTypeSerializerRegistry(context); + return new GraphBinaryModule(new GraphBinaryReader(registry), new GraphBinaryWriter(registry)); + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphPagingIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphPagingIT.java similarity index 68% rename from integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphPagingIT.java rename to integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphPagingIT.java index a420d8e50a3..ec1d6219979 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphPagingIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphPagingIT.java @@ -4,26 +4,19 @@ * This software can be used solely with DataStax Enterprise. Please consult the license at * http://www.datastax.com/terms/datastax-dse-driver-license-terms */ -package com.datastax.dse.driver.api.core.graph.statement; +package com.datastax.dse.driver.api.core.graph; -import static com.datastax.dse.driver.api.core.config.DseDriverOption.CONTINUOUS_PAGING_MAX_ENQUEUED_PAGES; -import static com.datastax.dse.driver.api.core.config.DseDriverOption.CONTINUOUS_PAGING_PAGE_SIZE; import static com.datastax.dse.driver.api.core.cql.continuous.ContinuousPagingITBase.Options; -import static com.datastax.dse.driver.internal.core.graph.GraphProtocol.GRAPH_BINARY_1_0; +import static org.assertj.core.api.Assertions.fail; import static org.assertj.core.api.AssertionsForInterfaceTypes.assertThat; import com.datastax.dse.driver.api.core.config.DseDriverOption; import com.datastax.dse.driver.api.core.cql.continuous.ContinuousPagingITBase; -import com.datastax.dse.driver.api.core.graph.AsyncGraphResultSet; -import com.datastax.dse.driver.api.core.graph.GraphExecutionInfo; -import com.datastax.dse.driver.api.core.graph.GraphNode; -import com.datastax.dse.driver.api.core.graph.GraphResultSet; -import com.datastax.dse.driver.api.core.graph.GraphStatement; -import com.datastax.dse.driver.api.core.graph.PagingEnabledOptions; -import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; import com.datastax.dse.driver.api.testinfra.session.DseSessionRule; import com.datastax.dse.driver.api.testinfra.session.DseSessionRuleBuilder; +import com.datastax.dse.driver.internal.core.graph.GraphProtocol; import com.datastax.dse.driver.internal.core.graph.MultiPageGraphResultSet; +import com.datastax.oss.driver.api.core.DriverTimeoutException; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; @@ -31,6 +24,7 @@ import com.tngtech.java.junit.dataprovider.DataProviderRunner; import com.tngtech.java.junit.dataprovider.UseDataProvider; import java.net.SocketAddress; +import java.time.Duration; import java.util.ArrayList; import java.util.List; import java.util.concurrent.CompletionStage; @@ -52,7 +46,7 @@ public class GraphPagingIT { new DseSessionRuleBuilder(ccmRule) .withCreateGraph() .withCoreEngine() - .withGraphProtocol(GRAPH_BINARY_1_0.toInternalCode()) + .withGraphProtocol(GraphProtocol.GRAPH_BINARY_1_0.toInternalCode()) .build(); @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); @@ -64,8 +58,6 @@ public static void setupSchema() { .execute( ScriptGraphStatement.newInstance( "schema.vertexLabel('person')" - + ".ifNotExists()" // required otherwise we get a weird table already exists - // error + ".partitionBy('pk', Int)" + ".clusterBy('cc', Int)" + ".property('name', Text)" @@ -87,7 +79,7 @@ public static void setupSchema() { @Test public void synchronous_paging_with_options(Options options) { // given - DriverExecutionProfile profile = enablePaging(options, PagingEnabledOptions.ENABLED); + DriverExecutionProfile profile = enableGraphPaging(options, PagingEnabledOptions.ENABLED); if (options.sizeInBytes) { // Page sizes in bytes are not supported with graph queries @@ -123,7 +115,7 @@ public void synchronous_paging_with_options(Options options) { @Test public void synchronous_paging_with_options_when_auto(Options options) { // given - DriverExecutionProfile profile = enablePaging(options, PagingEnabledOptions.AUTO); + DriverExecutionProfile profile = enableGraphPaging(options, PagingEnabledOptions.AUTO); if (options.sizeInBytes) { // Page sizes in bytes are not supported with graph queries @@ -169,7 +161,7 @@ private void assertIfMultiPage(GraphResultSet result, int expectedPages) { public void synchronous_options_with_paging_disabled_should_fallback_to_single_page( Options options) { // given - DriverExecutionProfile profile = enablePaging(options, PagingEnabledOptions.DISABLED); + DriverExecutionProfile profile = enableGraphPaging(options, PagingEnabledOptions.DISABLED); if (options.sizeInBytes) { // Page sizes in bytes are not supported with graph queries @@ -205,7 +197,7 @@ public void synchronous_options_with_paging_disabled_should_fallback_to_single_p public void asynchronous_paging_with_options(Options options) throws ExecutionException, InterruptedException { // given - DriverExecutionProfile profile = enablePaging(options, PagingEnabledOptions.ENABLED); + DriverExecutionProfile profile = enableGraphPaging(options, PagingEnabledOptions.ENABLED); if (options.sizeInBytes) { // Page sizes in bytes are not supported with graph queries @@ -231,7 +223,7 @@ public void asynchronous_paging_with_options(Options options) public void asynchronous_paging_with_options_when_auto(Options options) throws ExecutionException, InterruptedException { // given - DriverExecutionProfile profile = enablePaging(options, PagingEnabledOptions.AUTO); + DriverExecutionProfile profile = enableGraphPaging(options, PagingEnabledOptions.AUTO); if (options.sizeInBytes) { // Page sizes in bytes are not supported with graph queries @@ -257,7 +249,7 @@ public void asynchronous_paging_with_options_when_auto(Options options) public void asynchronous_options_with_paging_disabled_should_fallback_to_single_page( Options options) throws ExecutionException, InterruptedException { // given - DriverExecutionProfile profile = enablePaging(options, PagingEnabledOptions.DISABLED); + DriverExecutionProfile profile = enableGraphPaging(options, PagingEnabledOptions.DISABLED); if (options.sizeInBytes) { // Page sizes in bytes are not supported with graph queries @@ -283,13 +275,6 @@ public void asynchronous_options_with_paging_disabled_should_fallback_to_single_ assertThat(asyncGraphResultSet.remaining()).isEqualTo(0); } - private DriverExecutionProfile enablePaging( - Options options, PagingEnabledOptions pagingEnabledOptions) { - DriverExecutionProfile profile = options.asProfile(sessionRule.session()); - profile = profile.withString(DseDriverOption.GRAPH_PAGING_ENABLED, pagingEnabledOptions.name()); - return profile; - } - private void checkAsyncResult( CompletionStage future, Options options, @@ -340,9 +325,10 @@ private void checkAsyncResult( @Test public void should_cancel_result_set() { // given - DriverExecutionProfile profile = enablePaging(); - profile = profile.withInt(CONTINUOUS_PAGING_MAX_ENQUEUED_PAGES, 1); - profile = profile.withInt(CONTINUOUS_PAGING_PAGE_SIZE, 10); + DriverExecutionProfile profile = + enableGraphPaging() + .withInt(DseDriverOption.GRAPH_CONTINUOUS_PAGING_MAX_ENQUEUED_PAGES, 1) + .withInt(DseDriverOption.GRAPH_CONTINUOUS_PAGING_PAGE_SIZE, 10); // when GraphStatement statement = @@ -364,13 +350,138 @@ public void should_cancel_result_set() { } } - private DriverExecutionProfile enablePaging() { + @Test + public void should_trigger_global_timeout_sync_from_config() { + // given + Duration timeout = Duration.ofMillis(100); + DriverExecutionProfile profile = + enableGraphPaging().withDuration(DseDriverOption.GRAPH_TIMEOUT, timeout); + + // when + try { + ccmRule.getCcmBridge().pause(1); + try { + sessionRule + .session() + .execute( + ScriptGraphStatement.newInstance("g.V().hasLabel('person').values('name')") + .setGraphName(sessionRule.getGraphName()) + .setTraversalSource("g") + .setExecutionProfile(profile)); + fail("Expecting DriverTimeoutException"); + } catch (DriverTimeoutException e) { + assertThat(e).hasMessage("Query timed out after " + timeout); + } + } finally { + ccmRule.getCcmBridge().resume(1); + } + } + + @Test + public void should_trigger_global_timeout_sync_from_statement() { + // given + Duration timeout = Duration.ofMillis(100); + + // when + try { + ccmRule.getCcmBridge().pause(1); + try { + sessionRule + .session() + .execute( + ScriptGraphStatement.newInstance("g.V().hasLabel('person').values('name')") + .setGraphName(sessionRule.getGraphName()) + .setTraversalSource("g") + .setTimeout(timeout)); + fail("Expecting DriverTimeoutException"); + } catch (DriverTimeoutException e) { + assertThat(e).hasMessage("Query timed out after " + timeout); + } + } finally { + ccmRule.getCcmBridge().resume(1); + } + } + + @Test + public void should_trigger_global_timeout_async() throws InterruptedException { + // given + Duration timeout = Duration.ofMillis(100); + DriverExecutionProfile profile = + enableGraphPaging().withDuration(DseDriverOption.GRAPH_TIMEOUT, timeout); + + // when + try { + ccmRule.getCcmBridge().pause(1); + CompletionStage result = + sessionRule + .session() + .executeAsync( + ScriptGraphStatement.newInstance("g.V().hasLabel('person').values('name')") + .setGraphName(sessionRule.getGraphName()) + .setTraversalSource("g") + .setExecutionProfile(profile)); + result.toCompletableFuture().get(); + fail("Expecting DriverTimeoutException"); + } catch (ExecutionException e) { + assertThat(e.getCause()).hasMessage("Query timed out after " + timeout); + } finally { + ccmRule.getCcmBridge().resume(1); + } + } + + @Test + public void should_trigger_global_timeout_async_after_first_page() throws InterruptedException { + // given + Duration timeout = Duration.ofSeconds(1); DriverExecutionProfile profile = - sessionRule.session().getContext().getConfig().getDefaultProfile(); - profile = - profile.withString( - DseDriverOption.GRAPH_PAGING_ENABLED, PagingEnabledOptions.ENABLED.name()); - return profile; + enableGraphPaging() + .withDuration(DseDriverOption.GRAPH_TIMEOUT, timeout) + .withInt(DseDriverOption.GRAPH_CONTINUOUS_PAGING_MAX_ENQUEUED_PAGES, 1) + .withInt(DseDriverOption.GRAPH_CONTINUOUS_PAGING_PAGE_SIZE, 10); + + // when + try { + CompletionStage firstPageFuture = + sessionRule + .session() + .executeAsync( + ScriptGraphStatement.newInstance("g.V().hasLabel('person').values('name')") + .setGraphName(sessionRule.getGraphName()) + .setTraversalSource("g") + .setExecutionProfile(profile)); + AsyncGraphResultSet firstPage = firstPageFuture.toCompletableFuture().get(); + ccmRule.getCcmBridge().pause(1); + CompletionStage secondPageFuture = firstPage.fetchNextPage(); + secondPageFuture.toCompletableFuture().get(); + fail("Expecting DriverTimeoutException"); + } catch (ExecutionException e) { + assertThat(e.getCause()).hasMessage("Query timed out after " + timeout); + } finally { + ccmRule.getCcmBridge().resume(1); + } + } + + private DriverExecutionProfile enableGraphPaging() { + return sessionRule + .session() + .getContext() + .getConfig() + .getDefaultProfile() + .withString(DseDriverOption.GRAPH_PAGING_ENABLED, PagingEnabledOptions.ENABLED.name()); + } + + private DriverExecutionProfile enableGraphPaging( + Options options, PagingEnabledOptions pagingEnabledOptions) { + return sessionRule + .session() + .getContext() + .getConfig() + .getDefaultProfile() + .withInt(DseDriverOption.GRAPH_CONTINUOUS_PAGING_PAGE_SIZE, options.pageSize) + .withInt(DseDriverOption.GRAPH_CONTINUOUS_PAGING_MAX_PAGES, options.maxPages) + .withInt( + DseDriverOption.GRAPH_CONTINUOUS_PAGING_MAX_PAGES_PER_SECOND, options.maxPagesPerSecond) + .withString(DseDriverOption.GRAPH_PAGING_ENABLED, pagingEnabledOptions.name()); } private SocketAddress firstCcmNode() { diff --git a/integration-tests/src/test/resources/application.conf b/integration-tests/src/test/resources/application.conf index 5e59d6c8f79..45c1366bd45 100644 --- a/integration-tests/src/test/resources/application.conf +++ b/integration-tests/src/test/resources/application.conf @@ -9,8 +9,8 @@ datastax-java-driver { local-datacenter = dc1 } - # Raise a bit for graph queries request.timeout = 10 seconds + graph.timeout = 10 seconds } advanced { connection { @@ -26,6 +26,10 @@ datastax-java-driver { graph { name = "demo" } + continuous-paging.timeout { + first-page = 10 seconds + other-pages = 10 seconds + } metrics { // Raise histogram bounds because the tests execute DDL queries with a higher timeout session.cql_requests.highest_latency = 30 seconds From d5c0d848a75619f6e45686d27dbeab155500a336 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 25 Nov 2019 12:21:53 +0100 Subject: [PATCH 317/979] Make fields private and final --- .../core/graph/statement/GraphTraversalMetaPropertiesIT.java | 4 ++-- .../core/graph/statement/GraphTraversalMultiPropertiesIT.java | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalMetaPropertiesIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalMetaPropertiesIT.java index 110177efb1d..1486d3fedb6 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalMetaPropertiesIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalMetaPropertiesIT.java @@ -49,7 +49,7 @@ public class GraphTraversalMetaPropertiesIT { @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); /** Builds a simple schema that provides for a vertex with a property with sub properties. */ - public static String metaProps = + private static final String META_PROPS = MAKE_STRICT + ALLOW_SCANS + "schema.propertyKey('sub_prop').Text().create()\n" @@ -65,7 +65,7 @@ public class GraphTraversalMetaPropertiesIT { */ @Test public void should_parse_meta_properties() { - sessionRule.session().execute(ScriptGraphStatement.newInstance(metaProps)); + sessionRule.session().execute(ScriptGraphStatement.newInstance(META_PROPS)); GraphResultSet result = sessionRule diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalMultiPropertiesIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalMultiPropertiesIT.java index 2d2e76576da..deb07050411 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalMultiPropertiesIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalMultiPropertiesIT.java @@ -46,7 +46,7 @@ public class GraphTraversalMultiPropertiesIT { @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); /** Builds a simple schema that provides for a vertex with a multi-cardinality property. */ - public static final String multiProps = + private static final String MULTI_PROPS = MAKE_STRICT + ALLOW_SCANS + "schema.propertyKey('multi_prop').Text().multiple().create()\n" @@ -62,7 +62,7 @@ public class GraphTraversalMultiPropertiesIT { @Test public void should_parse_multiple_cardinality_properties() { // given a schema that defines multiple cardinality properties. - sessionRule.session().execute(ScriptGraphStatement.newInstance(multiProps)); + sessionRule.session().execute(ScriptGraphStatement.newInstance(MULTI_PROPS)); // when adding a vertex with a multiple cardinality property GraphResultSet result = From d02b5b4c13ff84246513b6191bcdf1c28e44c60e Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 25 Nov 2019 12:22:33 +0100 Subject: [PATCH 318/979] Harmonize durations used in tests --- .../api/core/graph/GraphTimeoutsIT.java | 58 ++++++++----------- 1 file changed, 25 insertions(+), 33 deletions(-) diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphTimeoutsIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphTimeoutsIT.java index d2f8c9ed063..7e5c668368d 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphTimeoutsIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphTimeoutsIT.java @@ -22,7 +22,6 @@ import com.datastax.dse.driver.api.core.config.DseDriverOption; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.DriverTimeoutException; -import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.servererrors.InvalidQueryException; import com.datastax.oss.driver.api.testinfra.DseRequirement; @@ -46,7 +45,7 @@ public class GraphTimeoutsIT { @Test public void should_have_driver_wait_indefinitely_by_default_and_server_return_timeout_response() { - long desiredTimeout = 2500L; + Duration serverTimeout = Duration.ofSeconds(1); DriverExecutionProfile drivertest1 = sessionRule @@ -57,14 +56,13 @@ public void should_have_driver_wait_indefinitely_by_default_and_server_return_ti .withString(DseDriverOption.GRAPH_TRAVERSAL_SOURCE, "drivertest1"); // We could have done with the server's default but it's 30 secs so the test would have taken at - // least - // that time. So we simulate a server timeout change. + // least that time. So we simulate a server timeout change. sessionRule .session() .execute( newInstance( "graph.schema().config().option(\"graph.traversal_sources.drivertest1.evaluation_timeout\").set('" - + desiredTimeout + + serverTimeout.toMillis() + " ms')") .setExecutionProfile(drivertest1)); @@ -77,20 +75,19 @@ public void should_have_driver_wait_indefinitely_by_default_and_server_return_ti .setExecutionProfile(drivertest1)); fail("The request should have timed out"); } catch (InvalidQueryException e) { - if (ccmRule.getCcmBridge().getDseVersion().get().compareTo(Version.parse("6.8.0")) >= 0) { - assertThat(e.toString()) - .contains("evaluation exceeded", "threshold of ", desiredTimeout + "ms"); - } else { - assertThat(e.toString()) - .contains("evaluation exceeded", "threshold of ", desiredTimeout + " ms"); - } + assertThat(e) + .hasMessageContainingAll( + "evaluation exceeded", + "threshold of ", + Long.toString(serverTimeout.toMillis()), + "ms"); } } @Test public void should_not_take_into_account_request_timeout_if_more_than_server_timeout() { - long desiredTimeout = 1000L; - int clientTimeout = 32000; + Duration serverTimeout = Duration.ofSeconds(1); + Duration clientTimeout = Duration.ofSeconds(10); DriverExecutionProfile drivertest2 = sessionRule @@ -99,14 +96,14 @@ public void should_not_take_into_account_request_timeout_if_more_than_server_tim .getConfig() .getDefaultProfile() .withString(DseDriverOption.GRAPH_TRAVERSAL_SOURCE, "drivertest2") - .withDuration(DseDriverOption.GRAPH_TIMEOUT, Duration.ofMillis(clientTimeout)); + .withDuration(DseDriverOption.GRAPH_TIMEOUT, clientTimeout); sessionRule .session() .execute( newInstance( "graph.schema().config().option(\"graph.traversal_sources.drivertest2.evaluation_timeout\").set('" - + desiredTimeout + + serverTimeout.toMillis() + " ms')") .setExecutionProfile(drivertest2)); @@ -119,15 +116,19 @@ public void should_not_take_into_account_request_timeout_if_more_than_server_tim .setExecutionProfile(drivertest2)); fail("The request should have timed out"); } catch (InvalidQueryException e) { - assertThat(e.toString()) - .contains("evaluation exceeded", "threshold of ", Long.toString(desiredTimeout), "ms"); + assertThat(e) + .hasMessageContainingAll( + "evaluation exceeded", + "threshold of ", + Long.toString(serverTimeout.toMillis()), + "ms"); } } @Test public void should_take_into_account_request_timeout_if_less_than_server_timeout() { - long serverTimeout = 10000L; - int desiredTimeout = 1000; + Duration serverTimeout = Duration.ofSeconds(10); + Duration clientTimeout = Duration.ofSeconds(1); DriverExecutionProfile drivertest3 = sessionRule @@ -146,7 +147,7 @@ public void should_take_into_account_request_timeout_if_less_than_server_timeout .execute( ScriptGraphStatement.newInstance( "graph.schema().config().option(\"graph.traversal_sources.drivertest3.evaluation_timeout\").set('" - + serverTimeout + + serverTimeout.toMillis() + " ms')") .setExecutionProfile(drivertest3)); @@ -159,19 +160,10 @@ public void should_take_into_account_request_timeout_if_less_than_server_timeout ScriptGraphStatement.newInstance( "java.util.concurrent.TimeUnit.MILLISECONDS.sleep(35000L);1+1") .setExecutionProfile( - drivertest3.withDuration( - DseDriverOption.GRAPH_TIMEOUT, Duration.ofMillis(desiredTimeout)))); + drivertest3.withDuration(DseDriverOption.GRAPH_TIMEOUT, clientTimeout))); fail("The request should have timed out"); - } catch (Exception e) { - // Since server timeout == client timeout, locally concurrency is likely to happen. - // We cannot know for sure if it will be a Client timeout error, or a Server timeout, and - // during tests, both happened and not deterministically. - if (e instanceof InvalidQueryException) { - assertThat(e.toString()) - .contains("evaluation exceeded", "threshold of ", desiredTimeout + " ms"); - } else { - assertThat(e).isInstanceOf(DriverTimeoutException.class); - } + } catch (DriverTimeoutException e) { + assertThat(e).hasMessage("Query timed out after " + clientTimeout); } } } From 473f262174a748623caa8a1e07eb71b256130410 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 25 Nov 2019 13:37:51 +0100 Subject: [PATCH 319/979] Qualify static methods with correct receiver --- .../core/graph/GraphRequestHandlerTest.java | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java index 7ba65f24812..be85abea213 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java @@ -52,6 +52,7 @@ import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.tracker.RequestTracker; import com.datastax.oss.driver.api.core.uuid.Uuids; +import com.datastax.oss.driver.internal.core.cql.Conversions; import com.datastax.oss.driver.internal.core.cql.RequestHandlerTestHarness; import com.datastax.oss.driver.internal.core.metadata.DefaultNode; import com.datastax.oss.driver.internal.core.metrics.NodeMetricUpdater; @@ -112,7 +113,7 @@ public void should_create_query_message_from_script_statement(GraphProtocol grap // when DriverExecutionProfile executionProfile = - GraphConversions.resolveExecutionProfile(graphStatement, harness.getContext()); + Conversions.resolveExecutionProfile(graphStatement, harness.getContext()); Message m = GraphConversions.createMessageFromGraphStatement( @@ -141,7 +142,7 @@ public void should_create_query_message_from_fluent_statement(GraphProtocol grap // when DriverExecutionProfile executionProfile = - GraphConversions.resolveExecutionProfile(graphStatement, harness.getContext()); + Conversions.resolveExecutionProfile(graphStatement, harness.getContext()); Message m = GraphConversions.createMessageFromGraphStatement( @@ -186,7 +187,7 @@ public void should_create_query_message_from_batch_statement(GraphProtocol graph // when DriverExecutionProfile executionProfile = - GraphConversions.resolveExecutionProfile(graphStatement, harness.getContext()); + Conversions.resolveExecutionProfile(graphStatement, harness.getContext()); Message m = GraphConversions.createMessageFromGraphStatement( @@ -237,7 +238,7 @@ public void should_set_correct_query_options_from_graph_statement() throws IOExc // when DriverExecutionProfile executionProfile = - GraphConversions.resolveExecutionProfile(graphStatement, harness.getContext()); + Conversions.resolveExecutionProfile(graphStatement, harness.getContext()); Message m = GraphConversions.createMessageFromGraphStatement( graphStatement, subProtocol, executionProfile, harness.getContext(), module); @@ -282,7 +283,7 @@ public void should_create_payload_from_config_options() { // when DriverExecutionProfile executionProfile = - GraphConversions.resolveExecutionProfile(graphStatement, harness.getContext()); + Conversions.resolveExecutionProfile(graphStatement, harness.getContext()); Map requestPayload = GraphConversions.createCustomPayload( @@ -332,7 +333,7 @@ public void should_create_payload_from_statement_options() { // when DriverExecutionProfile executionProfile = - GraphConversions.resolveExecutionProfile(graphStatement, harness.getContext()); + Conversions.resolveExecutionProfile(graphStatement, harness.getContext()); Map requestPayload = GraphConversions.createCustomPayload( @@ -380,7 +381,7 @@ public void should_not_set_graph_name_on_system_queries() { // when DriverExecutionProfile executionProfile = - GraphConversions.resolveExecutionProfile(graphStatement, harness.getContext()); + Conversions.resolveExecutionProfile(graphStatement, harness.getContext()); Map requestPayload = GraphConversions.createCustomPayload( From a5ed1f71ce3e1d79099d7ed702cb626aac09c931 Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Tue, 26 Nov 2019 08:49:59 -0600 Subject: [PATCH 320/979] JAVA-2477: Changing imported Tinkerpop classes back to stock DSE driver license (#306) JAVA-2477: Changing imported Tinkerpop classes back to stock DSE driver license --- .../graph/binary/buffer/DseNettyBuffer.java | 18 +++--------------- .../binary/buffer/DseNettyBufferFactory.java | 18 +++--------------- pom.xml | 2 -- 3 files changed, 6 insertions(+), 32 deletions(-) diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/buffer/DseNettyBuffer.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/buffer/DseNettyBuffer.java index 876a9c3eefc..cd6759db870 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/buffer/DseNettyBuffer.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/buffer/DseNettyBuffer.java @@ -1,20 +1,8 @@ /* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at + * Copyright DataStax, Inc. * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms */ package com.datastax.dse.driver.internal.core.graph.binary.buffer; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/buffer/DseNettyBufferFactory.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/buffer/DseNettyBufferFactory.java index c2abf21aedf..a482893b7ac 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/buffer/DseNettyBufferFactory.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/buffer/DseNettyBufferFactory.java @@ -1,20 +1,8 @@ /* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at + * Copyright DataStax, Inc. * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms */ package com.datastax.dse.driver.internal.core.graph.binary.buffer; diff --git a/pom.xml b/pom.xml index 3d1a11bedfd..772e744ac34 100644 --- a/pom.xml +++ b/pom.xml @@ -621,8 +621,6 @@ limitations under the License.]]> **/src/main/config/ide/** - src/main/java/com/datastax/dse/driver/internal/core/graph/binary/buffer/DseNettyBuffer.java - src/main/java/com/datastax/dse/driver/internal/core/graph/binary/buffer/DseNettyBufferFactory.java SLASHSTAR_STYLE From 7f568935122fbc7798a73e3d22d4436b92c1dba7 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 26 Nov 2019 19:07:45 +0100 Subject: [PATCH 321/979] Do not force graph paging to DISABLED --- .../driver/api/testinfra/session/CqlSessionRuleBuilder.java | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/session/CqlSessionRuleBuilder.java b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/session/CqlSessionRuleBuilder.java index 21c859e4723..d24bece6ee2 100644 --- a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/session/CqlSessionRuleBuilder.java +++ b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/session/CqlSessionRuleBuilder.java @@ -18,7 +18,6 @@ import static org.assertj.core.api.Assertions.assertThat; import com.datastax.dse.driver.api.core.config.DseDriverOption; -import com.datastax.dse.driver.api.core.graph.PagingEnabledOptions; import com.datastax.dse.driver.internal.core.config.typesafe.DefaultDseDriverConfigLoader; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; @@ -65,10 +64,7 @@ public SessionRule build() { .get() .withValue( DseDriverOption.GRAPH_NAME.getPath(), - ConfigValueFactory.fromAnyRef(graphName)) - .withValue( - DseDriverOption.GRAPH_PAGING_ENABLED.getPath(), - ConfigValueFactory.fromAnyRef(PagingEnabledOptions.DISABLED.name())); + ConfigValueFactory.fromAnyRef(graphName)); } else { graphName = null; if (loader == null) { From 0aa5c23109f2e49718dec93558459d55fa3d14d4 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 26 Nov 2019 19:08:23 +0100 Subject: [PATCH 322/979] Fix flaky test --- .../driver/api/core/graph/GraphTimeoutsIT.java | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphTimeoutsIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphTimeoutsIT.java index 7e5c668368d..aec7b385433 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphTimeoutsIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphTimeoutsIT.java @@ -139,9 +139,8 @@ public void should_take_into_account_request_timeout_if_less_than_server_timeout .withString(DseDriverOption.GRAPH_TRAVERSAL_SOURCE, "drivertest3"); // We could have done with the server's default but it's 30 secs so the test would have taken at - // least - // that time. Also, we don't want to rely on server's default. So we simulate a server timeout - // change. + // least that time. Also, we don't want to rely on server's default. So we simulate a server + // timeout change. sessionRule .session() .execute( @@ -162,8 +161,18 @@ public void should_take_into_account_request_timeout_if_less_than_server_timeout .setExecutionProfile( drivertest3.withDuration(DseDriverOption.GRAPH_TIMEOUT, clientTimeout))); fail("The request should have timed out"); + // Since the driver sends its timeout in the request payload, server timeout will be equal to + // client timeout for this request. We cannot know for sure if it will be a client timeout + // error, or a server timeout, and during tests, both happened and not deterministically. } catch (DriverTimeoutException e) { assertThat(e).hasMessage("Query timed out after " + clientTimeout); + } catch (InvalidQueryException e) { + assertThat(e) + .hasMessageContainingAll( + "evaluation exceeded", + "threshold of ", + Long.toString(clientTimeout.toMillis()), + " ms"); } } } From 0a4b945d943ecac5c6eb94255b4c0e1cb7678760 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 26 Nov 2019 19:09:16 +0100 Subject: [PATCH 323/979] Simplify graph test fixtures --- .../core/graph/GraphRequestHandlerTest.java | 30 ++----------------- .../internal/core/graph/GraphTestUtils.java | 26 ++++++++-------- 2 files changed, 16 insertions(+), 40 deletions(-) diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java index be85abea213..876e345a4b4 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java @@ -74,8 +74,6 @@ import java.util.regex.Pattern; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; import org.apache.tinkerpop.gremlin.structure.Vertex; -import org.apache.tinkerpop.gremlin.structure.util.detached.DetachedVertex; -import org.apache.tinkerpop.gremlin.structure.util.detached.DetachedVertexProperty; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; @@ -405,18 +403,6 @@ public void should_return_results_for_statements(GraphProtocol graphProtocol) th Mockito.spy(new GraphRequestAsyncProcessor(mockContext, graphPagingSupportChecker)); when(p.getGraphBinaryModule()).thenReturn(module); - Vertex v = - DetachedVertex.build() - .setId(1) - .setLabel("person") - .addProperty( - DetachedVertexProperty.build() - .setId(11) - .setLabel("name") - .setValue("marko") - .create()) - .create(); - RequestHandlerTestHarness harness = GraphRequestHandlerTestHarness.builder() .withGraphProtocolForTestConfig(graphProtocol.toInternalCode()) @@ -426,7 +412,7 @@ public void should_return_results_for_statements(GraphProtocol graphProtocol) th // Since that's not possible in the RequestHandlerTestHarness API at the moment, we // have to use another DseDriverContext and GraphBinaryModule here, // instead of reusing the one in the harness' DriverContext - .withResponse(node, defaultDseFrameOf(singleGraphRow(graphProtocol, v, module))) + .withResponse(node, defaultDseFrameOf(singleGraphRow(graphProtocol, module))) .build(); GraphStatement graphStatement = @@ -466,22 +452,10 @@ public void should_invoke_request_tracker() throws IOException { Mockito.spy(new GraphRequestAsyncProcessor(mockContext, new GraphPagingSupportChecker())); when(p.getGraphBinaryModule()).thenReturn(module); - Vertex v = - DetachedVertex.build() - .setId(1) - .setLabel("person") - .addProperty( - DetachedVertexProperty.build() - .setId(11) - .setLabel("name") - .setValue("marko") - .create()) - .create(); - RequestHandlerTestHarness harness = GraphRequestHandlerTestHarness.builder() .withResponse( - node, defaultDseFrameOf(singleGraphRow(GraphProtocol.GRAPHSON_2_0, v, module))) + node, defaultDseFrameOf(singleGraphRow(GraphProtocol.GRAPHSON_2_0, module))) .build(); RequestTracker requestTracker = mock(RequestTracker.class); diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphTestUtils.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphTestUtils.java index 90bb9b9561f..7e0604649c9 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphTestUtils.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphTestUtils.java @@ -57,16 +57,19 @@ public static Frame defaultDseFrameOf(Message responseMessage) { responseMessage); } - public static Message singleGraphRow( - GraphProtocol graphProtocol, Object value, GraphBinaryModule module) throws IOException { - return singleGraphRow(graphProtocol, value, module, 1, true); - } - - // Returns a single row, with a single "message" column containing the value - // given in parameter serialized according to the protocol - public static Message singleGraphRow( - GraphProtocol graphProtocol, Object value, GraphBinaryModule module, int page, boolean isLast) + public static Message singleGraphRow(GraphProtocol graphProtocol, GraphBinaryModule module) throws IOException { + Vertex value = + DetachedVertex.build() + .setId(1) + .setLabel("person") + .addProperty( + DetachedVertexProperty.build() + .setId(11) + .setLabel("name") + .setValue("marko") + .create()) + .create(); DseRowsMetadata metadata = new DseRowsMetadata( ImmutableList.of( @@ -81,10 +84,9 @@ public static Message singleGraphRow( null, new int[] {}, null, - page, - isLast); + 1, + true); Queue> data = new ArrayDeque<>(); - data.add( ImmutableList.of( serialize( From c945b68541f859cfc056b7eb632f6dc7bf8cf7f1 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 26 Nov 2019 19:09:54 +0100 Subject: [PATCH 324/979] Add missing nullability annotation --- .../internal/core/graph/GraphStatementBuilderBaseTest.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphStatementBuilderBaseTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphStatementBuilderBaseTest.java index a4e7be3ecb9..41c0e722781 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphStatementBuilderBaseTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphStatementBuilderBaseTest.java @@ -21,6 +21,7 @@ import com.datastax.dse.driver.api.core.graph.FluentGraphStatement; import com.datastax.dse.driver.api.core.graph.GraphStatementBuilderBase; +import edu.umd.cs.findbugs.annotations.NonNull; import org.junit.Test; public class GraphStatementBuilderBaseTest { @@ -28,9 +29,9 @@ public class GraphStatementBuilderBaseTest { private static class MockGraphStatementBuilder extends GraphStatementBuilderBase { + @NonNull @Override public FluentGraphStatement build() { - FluentGraphStatement rv = mock(FluentGraphStatement.class); when(rv.getTimestamp()).thenReturn(this.timestamp); return rv; From 6999f6c67f9d8e71dafba902841bc97db7837502 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 26 Nov 2019 19:10:31 +0100 Subject: [PATCH 325/979] Override methods for covariant return types --- .../graph/GraphRequestHandlerTestHarness.java | 41 +++++++++++++++++++ 1 file changed, 41 insertions(+) diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTestHarness.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTestHarness.java index c3bc3d19e12..240ec595af6 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTestHarness.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTestHarness.java @@ -21,14 +21,17 @@ import com.datastax.dse.driver.api.core.config.DseDriverOption; import com.datastax.dse.driver.internal.core.context.DseDriverContext; import com.datastax.oss.driver.api.core.DefaultConsistencyLevel; +import com.datastax.oss.driver.api.core.ProtocolVersion; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.type.codec.registry.CodecRegistry; import com.datastax.oss.driver.internal.core.DefaultConsistencyLevelRegistry; import com.datastax.oss.driver.internal.core.cql.RequestHandlerTestHarness; import com.datastax.oss.driver.internal.core.servererrors.DefaultWriteTypeRegistry; import com.datastax.oss.driver.internal.core.session.throttling.PassThroughRequestThrottler; import com.datastax.oss.driver.internal.core.tracker.NoopRequestTracker; +import com.datastax.oss.protocol.internal.Frame; import io.netty.channel.EventLoop; import java.time.Duration; import java.util.Optional; @@ -156,6 +159,44 @@ public Builder withGraphProtocolForTestConfig(String protocol) { return this; } + @Override + public GraphRequestHandlerTestHarness.Builder withEmptyPool(Node node) { + super.withEmptyPool(node); + return this; + } + + @Override + public GraphRequestHandlerTestHarness.Builder withWriteFailure(Node node, Throwable cause) { + super.withWriteFailure(node, cause); + return this; + } + + @Override + public GraphRequestHandlerTestHarness.Builder withResponseFailure(Node node, Throwable cause) { + super.withResponseFailure(node, cause); + return this; + } + + @Override + public GraphRequestHandlerTestHarness.Builder withResponse(Node node, Frame response) { + super.withResponse(node, response); + return this; + } + + @Override + public GraphRequestHandlerTestHarness.Builder withDefaultIdempotence( + boolean defaultIdempotence) { + super.withDefaultIdempotence(defaultIdempotence); + return this; + } + + @Override + public GraphRequestHandlerTestHarness.Builder withProtocolVersion( + ProtocolVersion protocolVersion) { + super.withProtocolVersion(protocolVersion); + return this; + } + @Override public GraphRequestHandlerTestHarness build() { return new GraphRequestHandlerTestHarness(this, graphProtocolForTestConfig, graphTimeout); From 1745a436c2740d2a83e11afebbcf7c08bbaa0b02 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 26 Nov 2019 19:25:56 +0100 Subject: [PATCH 326/979] Surround log with if block --- .../internal/core/graph/GraphPagingSupportChecker.java | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphPagingSupportChecker.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphPagingSupportChecker.java index cac2bcbd8e5..5f479c98426 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphPagingSupportChecker.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphPagingSupportChecker.java @@ -35,16 +35,15 @@ public class GraphPagingSupportChecker { // The computation below will be done only once when the session is initialized; if other hosts // join the cluster later and are not running DSE 6.8, the user has to manually disable graph // paging. - boolean isPagingEnabled(GraphStatement graphStatement, InternalDriverContext context) { + public boolean isPagingEnabled(GraphStatement graphStatement, InternalDriverContext context) { DriverExecutionProfile driverExecutionProfile = Conversions.resolveExecutionProfile(graphStatement, context); - LOG.trace( - "GRAPH_PAGING_ENABLED: {}", - driverExecutionProfile.getString(DseDriverOption.GRAPH_PAGING_ENABLED)); - PagingEnabledOptions pagingEnabledOptions = PagingEnabledOptions.valueOf( driverExecutionProfile.getString(DseDriverOption.GRAPH_PAGING_ENABLED)); + if (LOG.isTraceEnabled()) { + LOG.trace("GRAPH_PAGING_ENABLED: {}", pagingEnabledOptions); + } if (pagingEnabledOptions == PagingEnabledOptions.DISABLED) { return false; } else if (pagingEnabledOptions == PagingEnabledOptions.ENABLED) { From c3d31340f3480a553f1918be9d054bb925432aed Mon Sep 17 00:00:00 2001 From: tomekl007 Date: Thu, 28 Nov 2019 11:49:37 +0100 Subject: [PATCH 327/979] remove whitespace in assertion of should_take_into_account_request_timeout_if_less_than_server_timeout test --- .../com/datastax/dse/driver/api/core/graph/GraphTimeoutsIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphTimeoutsIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphTimeoutsIT.java index aec7b385433..4b8ec8d2d19 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphTimeoutsIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphTimeoutsIT.java @@ -172,7 +172,7 @@ public void should_take_into_account_request_timeout_if_less_than_server_timeout "evaluation exceeded", "threshold of ", Long.toString(clientTimeout.toMillis()), - " ms"); + "ms"); } } } From 26383efe10b56cfdd8429182d095e7e6fc07f4c1 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Thu, 28 Nov 2019 12:38:55 +0100 Subject: [PATCH 328/979] Fix wrong value type for GRAPH_PAGING_ENABLED --- .../datastax/dse/driver/api/core/config/DseDriverOption.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/config/DseDriverOption.java b/core/src/main/java/com/datastax/dse/driver/api/core/config/DseDriverOption.java index 5dab679316f..dbe0ac3a943 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/config/DseDriverOption.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/config/DseDriverOption.java @@ -177,7 +177,7 @@ public enum DseDriverOption implements DriverOption { /** * Whether to enable paging for Graph queries. * - *

      Value type: boolean + *

      Value type: {@link String} */ GRAPH_PAGING_ENABLED("advanced.graph.paging-enabled"), From 68cbf4f76e05ffc9a419bbb95ea0222c2586d0b1 Mon Sep 17 00:00:00 2001 From: Erik Merkle Date: Fri, 29 Nov 2019 03:37:54 -0600 Subject: [PATCH 329/979] JAVA-1579: Change default result format to latest Graph format (#281) --- changelog/README.md | 1 + .../graph/ContinuousGraphRequestHandler.java | 5 +- .../internal/core/graph/GraphConversions.java | 19 - .../core/graph/GraphPagingSupportChecker.java | 82 ----- .../graph/GraphRequestAsyncProcessor.java | 20 +- .../core/graph/GraphRequestHandler.java | 6 +- .../core/graph/GraphSupportChecker.java | 123 +++++++ core/src/main/resources/reference.conf | 5 +- .../ContinuousGraphRequestHandlerTest.java | 37 +- .../graph/GraphPagingSupportCheckerTest.java | 172 --------- .../core/graph/GraphProtocolTest.java | 100 ------ .../core/graph/GraphRequestHandlerTest.java | 103 ++++-- .../graph/GraphRequestHandlerTestHarness.java | 39 ++- .../core/graph/GraphSupportCheckerTest.java | 325 ++++++++++++++++++ .../internal/core/graph/GraphTestUtil.java | 69 ++++ .../core/graph/CoreGraphDataTypeITBase.java | 32 +- .../driver/api/core/graph/GraphPagingIT.java | 89 +++-- .../api/core/graph/GraphTestSupport.java | 54 +++ .../remote/ClassicGraphDataTypeRemoteIT.java | 25 +- .../remote/CoreGraphDataTypeRemoteIT.java | 28 +- .../GraphTraversalMetaPropertiesRemoteIT.java | 16 +- ...GraphTraversalMultiPropertiesRemoteIT.java | 16 +- .../graph/remote/GraphTraversalRemoteIT.java | 24 +- .../ClassicGraphDataTypeFluentIT.java | 25 +- .../ClassicGraphDataTypeScriptIT.java | 25 +- .../statement/CoreGraphDataTypeFluentIT.java | 28 +- .../statement/CoreGraphDataTypeScriptIT.java | 28 +- .../statement/GraphTraversalBatchIT.java | 26 +- .../graph/statement/GraphTraversalIT.java | 54 +-- .../GraphTraversalMetaPropertiesIT.java | 14 +- .../GraphTraversalMultiPropertiesIT.java | 14 +- manual/core/dse/graph/options/README.md | 9 +- pom.xml | 21 -- 33 files changed, 984 insertions(+), 650 deletions(-) delete mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphPagingSupportChecker.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSupportChecker.java delete mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphPagingSupportCheckerTest.java delete mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphProtocolTest.java create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphSupportCheckerTest.java create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphTestUtil.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphTestSupport.java diff --git a/changelog/README.md b/changelog/README.md index 90750ecfd74..320a05d5269 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### NGDG (in progress) +- [improvement] JAVA-1579: Change default result format to latest GraphSON format - [improvement] JAVA-2496: Revisit timeouts for paged graph queries - [bug] JAVA-2510: Fix GraphBinaryDataTypesTest Codec registry initialization - [bug] JAVA-2492: Parse edge metadata using internal identifiers diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandler.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandler.java index 1c6ebea08d7..e5423026ba5 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandler.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandler.java @@ -57,10 +57,11 @@ public class ContinuousGraphRequestHandler @NonNull DefaultSession session, @NonNull InternalDriverContext context, @NonNull String sessionLogPrefix, - @NonNull GraphBinaryModule graphBinaryModule) { + @NonNull GraphBinaryModule graphBinaryModule, + @NonNull GraphSupportChecker graphSupportChecker) { super(statement, session, context, sessionLogPrefix, AsyncGraphResultSet.class); this.graphBinaryModule = graphBinaryModule; - subProtocol = GraphConversions.inferSubProtocol(statement, executionProfile); + subProtocol = graphSupportChecker.inferGraphProtocol(statement, executionProfile, context); message = GraphConversions.createContinuousMessageFromGraphStatement( statement, subProtocol, executionProfile, context, graphBinaryModule); diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphConversions.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphConversions.java index eb0d09d381c..a0344e9dd05 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphConversions.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphConversions.java @@ -53,7 +53,6 @@ import java.util.Collections; import java.util.List; import java.util.Map; -import java.util.Objects; import org.apache.tinkerpop.gremlin.process.traversal.Traverser; import org.apache.tinkerpop.gremlin.structure.io.Buffer; import org.apache.tinkerpop.gremlin.structure.io.BufferFactory; @@ -83,24 +82,6 @@ public class GraphConversions extends Conversions { @VisibleForTesting static final byte[] EMPTY_STRING_QUERY = "".getBytes(UTF_8); - public static GraphProtocol inferSubProtocol( - GraphStatement statement, DriverExecutionProfile config) { - String graphProtocol = statement.getSubProtocol(); - if (graphProtocol == null) { - graphProtocol = - config.getString( - DseDriverOption.GRAPH_SUB_PROTOCOL, - // TODO pick graphson-3.0 if the target graph uses the core engine - "graphson-2.0"); - } - // should not be null because we call config.getString() with a default value - Objects.requireNonNull( - graphProtocol, - "Could not determine the graph protocol for the query. This is a bug, please report."); - - return GraphProtocol.fromString(graphProtocol); - } - public static Message createContinuousMessageFromGraphStatement( GraphStatement statement, GraphProtocol subProtocol, diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphPagingSupportChecker.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphPagingSupportChecker.java deleted file mode 100644 index 5f479c98426..00000000000 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphPagingSupportChecker.java +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms - */ -package com.datastax.dse.driver.internal.core.graph; - -import com.datastax.dse.driver.api.core.config.DseDriverOption; -import com.datastax.dse.driver.api.core.graph.GraphStatement; -import com.datastax.dse.driver.api.core.graph.PagingEnabledOptions; -import com.datastax.dse.driver.api.core.metadata.DseNodeProperties; -import com.datastax.dse.driver.internal.core.DseProtocolFeature; -import com.datastax.oss.driver.api.core.ProtocolVersion; -import com.datastax.oss.driver.api.core.Version; -import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; -import com.datastax.oss.driver.api.core.metadata.Node; -import com.datastax.oss.driver.internal.core.context.InternalDriverContext; -import com.datastax.oss.driver.internal.core.cql.Conversions; -import java.util.Collection; -import java.util.Objects; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class GraphPagingSupportChecker { - private static final Logger LOG = LoggerFactory.getLogger(GraphPagingSupportChecker.class); - static final Version GRAPH_PAGING_MIN_DSE_VERSION = - Objects.requireNonNull(Version.parse("6.8.0")); - - private volatile Boolean contextGraphPagingEnabled; - - // Graph paging is available if - // 1) continuous paging is generally available and - // 2) all hosts are running DSE 6.8+ - // The computation below will be done only once when the session is initialized; if other hosts - // join the cluster later and are not running DSE 6.8, the user has to manually disable graph - // paging. - public boolean isPagingEnabled(GraphStatement graphStatement, InternalDriverContext context) { - DriverExecutionProfile driverExecutionProfile = - Conversions.resolveExecutionProfile(graphStatement, context); - PagingEnabledOptions pagingEnabledOptions = - PagingEnabledOptions.valueOf( - driverExecutionProfile.getString(DseDriverOption.GRAPH_PAGING_ENABLED)); - if (LOG.isTraceEnabled()) { - LOG.trace("GRAPH_PAGING_ENABLED: {}", pagingEnabledOptions); - } - if (pagingEnabledOptions == PagingEnabledOptions.DISABLED) { - return false; - } else if (pagingEnabledOptions == PagingEnabledOptions.ENABLED) { - return true; - } else { - return isContextGraphPagingEnabled(context); - } - } - - private boolean isContextGraphPagingEnabled(InternalDriverContext context) { - if (contextGraphPagingEnabled == null) { - ProtocolVersion protocolVersion = context.getProtocolVersion(); - - if (!context - .getProtocolVersionRegistry() - .supports(protocolVersion, DseProtocolFeature.CONTINUOUS_PAGING)) { - contextGraphPagingEnabled = false; - return contextGraphPagingEnabled; - } - - Collection nodes = context.getMetadataManager().getMetadata().getNodes().values(); - - for (Node node : nodes) { - Version dseVersion = (Version) node.getExtras().get(DseNodeProperties.DSE_VERSION); - if (dseVersion == null || dseVersion.compareTo(GRAPH_PAGING_MIN_DSE_VERSION) < 0) { - contextGraphPagingEnabled = false; - return contextGraphPagingEnabled; - } - } - contextGraphPagingEnabled = true; - return contextGraphPagingEnabled; - } else { - return contextGraphPagingEnabled; - } - } -} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestAsyncProcessor.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestAsyncProcessor.java index 12266e16ce9..14363bc8159 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestAsyncProcessor.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestAsyncProcessor.java @@ -37,7 +37,7 @@ public class GraphRequestAsyncProcessor implements RequestProcessor, CompletionStage> { private final GraphBinaryModule graphBinaryModule; - private final GraphPagingSupportChecker graphPagingSupportChecker; + private final GraphSupportChecker graphSupportChecker; public GraphRequestAsyncProcessor( DefaultDriverContext context, GraphPagingSupportChecker graphPagingSupportChecker) { @@ -47,7 +47,7 @@ public GraphRequestAsyncProcessor( new GraphBinaryModule( new GraphBinaryReader(typeSerializerRegistry), new GraphBinaryWriter(typeSerializerRegistry)); - this.graphPagingSupportChecker = graphPagingSupportChecker; + this.graphSupportChecker = graphSupportChecker; } @NonNull @@ -67,13 +67,23 @@ public CompletionStage process( InternalDriverContext context, String sessionLogPrefix) { - if (graphPagingSupportChecker.isPagingEnabled(request, context)) { + if (graphSupportChecker.isPagingEnabled(request, context)) { return new ContinuousGraphRequestHandler( - request, session, context, sessionLogPrefix, getGraphBinaryModule()) + request, + session, + context, + sessionLogPrefix, + getGraphBinaryModule(), + graphSupportChecker) .handle(); } else { return new GraphRequestHandler( - request, session, context, sessionLogPrefix, getGraphBinaryModule()) + request, + session, + context, + sessionLogPrefix, + getGraphBinaryModule(), + graphSupportChecker) .handle(); } } diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java index 318f0d6aad3..24cf982bfbe 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java @@ -140,7 +140,8 @@ public GraphRequestHandler( @NonNull DefaultSession dseSession, @NonNull InternalDriverContext context, @NonNull String sessionLogPrefix, - @NonNull GraphBinaryModule graphBinaryModule) { + @NonNull GraphBinaryModule graphBinaryModule, + @NonNull GraphSupportChecker graphSupportChecker) { this.startTimeNanos = System.nanoTime(); this.logPrefix = sessionLogPrefix + "|" + this.hashCode(); Preconditions.checkArgument( @@ -195,7 +196,8 @@ public GraphRequestHandler( this.inFlightCallbacks = new CopyOnWriteArrayList<>(); this.graphBinaryModule = graphBinaryModule; - this.subProtocol = GraphConversions.inferSubProtocol(this.graphStatement, executionProfile); + this.subProtocol = + graphSupportChecker.inferGraphProtocol(this.graphStatement, executionProfile, this.context); LOG.debug("[{}], Graph protocol used for query: {}", logPrefix, subProtocol); this.message = diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSupportChecker.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSupportChecker.java new file mode 100644 index 00000000000..b418f24949d --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSupportChecker.java @@ -0,0 +1,123 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph; + +import com.datastax.dse.driver.api.core.config.DseDriverOption; +import com.datastax.dse.driver.api.core.graph.GraphStatement; +import com.datastax.dse.driver.api.core.graph.PagingEnabledOptions; +import com.datastax.dse.driver.api.core.metadata.DseNodeProperties; +import com.datastax.dse.driver.internal.core.DseProtocolFeature; +import com.datastax.oss.driver.api.core.ProtocolVersion; +import com.datastax.oss.driver.api.core.Version; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.cql.Conversions; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Collection; +import java.util.Objects; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class GraphSupportChecker { + private static final Logger LOG = LoggerFactory.getLogger(GraphSupportChecker.class); + static final Version MIN_DSE_VERSION_GRAPH_BINARY_AND_PAGING = + Objects.requireNonNull(Version.parse("6.8.0")); + + private volatile Boolean contextGraphPagingEnabled; + private volatile Boolean isDse68OrAbove; + + // Graph paging is available if + // 1) continuous paging is generally available and + // 2) all hosts are running DSE 6.8+ + // The computation below will be done only once when the session is initialized; if other hosts + // join the cluster later and are not running DSE 6.8, the user has to manually disable graph + // paging. + boolean isPagingEnabled(GraphStatement graphStatement, InternalDriverContext context) { + DriverExecutionProfile driverExecutionProfile = + Conversions.resolveExecutionProfile(graphStatement, context); + PagingEnabledOptions pagingEnabledOptions = + PagingEnabledOptions.valueOf( + driverExecutionProfile.getString(DseDriverOption.GRAPH_PAGING_ENABLED)); + if (LOG.isTraceEnabled()) { + LOG.trace("GRAPH_PAGING_ENABLED: {}", pagingEnabledOptions); + } + if (pagingEnabledOptions == PagingEnabledOptions.DISABLED) { + return false; + } else if (pagingEnabledOptions == PagingEnabledOptions.ENABLED) { + return true; + } else { + return isContextGraphPagingEnabled(context); + } + } + + private boolean isContextGraphPagingEnabled(InternalDriverContext context) { + if (contextGraphPagingEnabled == null) { + ProtocolVersion protocolVersion = context.getProtocolVersion(); + if (!context + .getProtocolVersionRegistry() + .supports(protocolVersion, DseProtocolFeature.CONTINUOUS_PAGING)) { + contextGraphPagingEnabled = false; + } else { + if (isDse68OrAbove == null) { + isDse68OrAbove = checkIsDse68OrAbove(context); + } + contextGraphPagingEnabled = isDse68OrAbove; + } + } + return contextGraphPagingEnabled; + } + + /** + * Determines the default {@link GraphProtocol} for the given context. When a statement is + * executed, if the Graph protocol is not explicitly set on the statement (via {@link + * GraphStatement#setSubProtocol(java.lang.String)}), or is not explicitly set in the config (see + * dse-reference.conf), the default Graph protocol used is determined by the DSE version to which + * the driver is connected. For DSE versions 6.7.x and lower, the default Graph protocol is {@link + * GraphProtocol#GRAPHSON_2_0}. For DSE versions 6.8.0 and higher, the default Graph protocol is + * {@link GraphProtocol#GRAPH_BINARY_1_0}. + * + * @return The default GraphProtocol to used based on the provided context. + */ + GraphProtocol getDefaultGraphProtocol(@NonNull InternalDriverContext context) { + if (isDse68OrAbove == null) { + isDse68OrAbove = checkIsDse68OrAbove(context); + } + // if the DSE version can't be determined, default to GraphSON 2.0 + return isDse68OrAbove ? GraphProtocol.GRAPH_BINARY_1_0 : GraphProtocol.GRAPHSON_2_0; + } + + private boolean checkIsDse68OrAbove(@NonNull InternalDriverContext context) { + Collection nodes = context.getMetadataManager().getMetadata().getNodes().values(); + + for (Node node : nodes) { + Version dseVersion = (Version) node.getExtras().get(DseNodeProperties.DSE_VERSION); + if (dseVersion == null || dseVersion.compareTo(MIN_DSE_VERSION_GRAPH_BINARY_AND_PAGING) < 0) { + return false; + } + } + return true; + } + + GraphProtocol inferGraphProtocol( + GraphStatement statement, DriverExecutionProfile config, InternalDriverContext context) { + String graphProtocol = statement.getSubProtocol(); + if (graphProtocol == null) { + // use the protocol specified in configuration, otherwise get the default from the context + graphProtocol = + (config.isDefined(DseDriverOption.GRAPH_SUB_PROTOCOL)) + ? config.getString(DseDriverOption.GRAPH_SUB_PROTOCOL) + : getDefaultGraphProtocol(context).toInternalCode(); + } + // should not be null because we call config.getString() with a default value + Objects.requireNonNull( + graphProtocol, + "Could not determine the graph protocol for the query. This is a bug, please report."); + + return GraphProtocol.fromString(graphProtocol); + } +} diff --git a/core/src/main/resources/reference.conf b/core/src/main/resources/reference.conf index b3ad559730b..18af688a482 100644 --- a/core/src/main/resources/reference.conf +++ b/core/src/main/resources/reference.conf @@ -1017,8 +1017,9 @@ datastax-java-driver { # Possible values with built-in support in the driver are: # [ "graphson-1.0", "graphson-2.0", "graph-binary-1.0"] # - # The default value for DSE 6.7 and lower is "graphson-2.0". For DSE 6.8 and higher, the default - # value is "graphson-binary-1.0" + # IMPORTANT: The default value for the Graph sub-protocol is based only on the DSE + # version. If the version is DSE 6.7 and lower, "graphson-2.0" will be the default. For DSE 6.8 + # and higher, the default value is "graphson-binary-1.0". # # Required: no # Modifiable at runtime: yes, the new value will be used for requests issued after the change. diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerTest.java index c7417621e5e..e7bb70b182f 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerTest.java @@ -13,6 +13,7 @@ import static com.datastax.oss.driver.Assertions.assertThatStage; import static org.mockito.Mockito.when; +import com.datastax.dse.driver.api.core.config.DseDriverOption; import com.datastax.dse.driver.api.core.graph.AsyncGraphResultSet; import com.datastax.dse.driver.api.core.graph.GraphExecutionInfo; import com.datastax.dse.driver.api.core.graph.GraphNode; @@ -22,6 +23,7 @@ import com.datastax.dse.driver.internal.core.graph.GraphRequestHandlerTestHarness.Builder; import com.datastax.dse.driver.internal.core.graph.binary.GraphBinaryModule; import com.datastax.oss.driver.api.core.DriverTimeoutException; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.internal.core.cql.PoolBehavior; import com.datastax.oss.driver.internal.core.cql.RequestHandlerTestHarness; import com.datastax.oss.driver.internal.core.metadata.DefaultNode; @@ -54,9 +56,7 @@ public void setup() { } @Test - @UseDataProvider( - location = GraphRequestHandlerTest.class, - value = "bytecodeEnabledGraphProtocols") + @UseDataProvider(location = GraphRequestHandlerTest.class, value = "supportedGraphProtocols") public void should_return_paged_results(GraphProtocol graphProtocol) throws IOException { GraphBinaryModule module = createGraphBinaryModule(mockContext); @@ -73,7 +73,12 @@ public void should_return_paged_results(GraphProtocol graphProtocol) throws IOEx ContinuousGraphRequestHandler handler = new ContinuousGraphRequestHandler( - graphStatement, harness.getSession(), harness.getContext(), "test", module); + graphStatement, + harness.getSession(), + harness.getContext(), + "test", + module, + new GraphSupportChecker()); // send the initial request CompletionStage page1Future = handler.handle(); @@ -129,12 +134,22 @@ public void should_honor_default_timeout() throws Exception { try (RequestHandlerTestHarness harness = builder.build()) { + DriverExecutionProfile profile = harness.getContext().getConfig().getDefaultProfile(); + when(profile.isDefined(DseDriverOption.GRAPH_SUB_PROTOCOL)).thenReturn(true); + when(profile.getString(DseDriverOption.GRAPH_SUB_PROTOCOL)) + .thenReturn(GraphProtocol.GRAPH_BINARY_1_0.toInternalCode()); + GraphStatement graphStatement = ScriptGraphStatement.newInstance("mockQuery"); // when ContinuousGraphRequestHandler handler = new ContinuousGraphRequestHandler( - graphStatement, harness.getSession(), harness.getContext(), "test", binaryModule); + graphStatement, + harness.getSession(), + harness.getContext(), + "test", + binaryModule, + new GraphSupportChecker()); // send the initial request CompletionStage page1Future = handler.handle(); @@ -168,13 +183,23 @@ public void should_honor_statement_timeout() throws Exception { try (RequestHandlerTestHarness harness = builder.build()) { + DriverExecutionProfile profile = harness.getContext().getConfig().getDefaultProfile(); + when(profile.isDefined(DseDriverOption.GRAPH_SUB_PROTOCOL)).thenReturn(true); + when(profile.getString(DseDriverOption.GRAPH_SUB_PROTOCOL)) + .thenReturn(GraphProtocol.GRAPH_BINARY_1_0.toInternalCode()); + GraphStatement graphStatement = ScriptGraphStatement.newInstance("mockQuery").setTimeout(statementTimeout); // when ContinuousGraphRequestHandler handler = new ContinuousGraphRequestHandler( - graphStatement, harness.getSession(), harness.getContext(), "test", binaryModule); + graphStatement, + harness.getSession(), + harness.getContext(), + "test", + binaryModule, + new GraphSupportChecker()); // send the initial request CompletionStage page1Future = handler.handle(); diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphPagingSupportCheckerTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphPagingSupportCheckerTest.java deleted file mode 100644 index 64861bf2821..00000000000 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphPagingSupportCheckerTest.java +++ /dev/null @@ -1,172 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms - */ -package com.datastax.dse.driver.internal.core.graph; - -import static com.datastax.dse.driver.api.core.graph.PagingEnabledOptions.AUTO; -import static com.datastax.dse.driver.api.core.graph.PagingEnabledOptions.DISABLED; -import static com.datastax.dse.driver.api.core.graph.PagingEnabledOptions.ENABLED; -import static com.datastax.dse.driver.internal.core.graph.GraphPagingSupportChecker.GRAPH_PAGING_MIN_DSE_VERSION; -import static org.assertj.core.api.AssertionsForInterfaceTypes.assertThat; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import com.datastax.dse.driver.api.core.config.DseDriverOption; -import com.datastax.dse.driver.api.core.graph.GraphStatement; -import com.datastax.dse.driver.api.core.graph.PagingEnabledOptions; -import com.datastax.dse.driver.api.core.metadata.DseNodeProperties; -import com.datastax.dse.driver.internal.core.DseProtocolFeature; -import com.datastax.oss.driver.api.core.DefaultProtocolVersion; -import com.datastax.oss.driver.api.core.Version; -import com.datastax.oss.driver.api.core.config.DriverConfig; -import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; -import com.datastax.oss.driver.api.core.metadata.Metadata; -import com.datastax.oss.driver.api.core.metadata.Node; -import com.datastax.oss.driver.internal.core.ProtocolVersionRegistry; -import com.datastax.oss.driver.internal.core.context.InternalDriverContext; -import com.datastax.oss.driver.internal.core.metadata.MetadataManager; -import com.tngtech.java.junit.dataprovider.DataProvider; -import com.tngtech.java.junit.dataprovider.DataProviderRunner; -import com.tngtech.java.junit.dataprovider.UseDataProvider; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import org.junit.Test; -import org.junit.runner.RunWith; - -@RunWith(DataProviderRunner.class) -public class GraphPagingSupportCheckerTest { - - @UseDataProvider("pagingEnabled") - @Test - public void should_check_if_paging_is_supported( - boolean protocolWithPagingSupport, - PagingEnabledOptions statementGraphPagingEnabled, - PagingEnabledOptions contextGraphPagingEnabled, - List nodeDseVersions, - boolean expected) { - // given - GraphStatement graphStatement = mock(GraphStatement.class); - InternalDriverContext context = protocolWithPagingSupport(protocolWithPagingSupport); - statementGraphPagingEnabled(graphStatement, statementGraphPagingEnabled); - contextGraphPagingEnabled(context, contextGraphPagingEnabled); - addNodeWithDseVersion(context, nodeDseVersions); - - // when - boolean pagingEnabled = - new GraphPagingSupportChecker().isPagingEnabled(graphStatement, context); - - // then - assertThat(pagingEnabled).isEqualTo(expected); - } - - @Test - public void should_not_support_paging_when_statement_profile_not_present() { - // given - GraphStatement graphStatement = mock(GraphStatement.class); - InternalDriverContext context = protocolWithPagingSupport(true); - contextGraphPagingEnabled(context, DISABLED); - addNodeWithDseVersion(context, Collections.singletonList(GRAPH_PAGING_MIN_DSE_VERSION)); - - // when - boolean pagingEnabled = - new GraphPagingSupportChecker().isPagingEnabled(graphStatement, context); - - // then - assertThat(pagingEnabled).isEqualTo(false); - } - - @Test - public void - should_support_paging_when_statement_profile_not_present_but_context_profile_has_paging_enabled() { - // given - GraphStatement graphStatement = mock(GraphStatement.class); - InternalDriverContext context = protocolWithPagingSupport(true); - contextGraphPagingEnabled(context, ENABLED); - addNodeWithDseVersion(context, Collections.singletonList(GRAPH_PAGING_MIN_DSE_VERSION)); - - // when - boolean pagingEnabled = - new GraphPagingSupportChecker().isPagingEnabled(graphStatement, context); - - // then - assertThat(pagingEnabled).isEqualTo(true); - } - - @DataProvider() - public static Object[][] pagingEnabled() { - List listWithGraphPagingNode = Collections.singletonList(GRAPH_PAGING_MIN_DSE_VERSION); - List listWithoutGraphPagingNode = Collections.singletonList(Version.parse("6.7.0")); - List listWithNull = Collections.singletonList(null); - List listWithTwoNodesOneNotSupporting = - Arrays.asList(Version.parse("6.7.0"), GRAPH_PAGING_MIN_DSE_VERSION); - - return new Object[][] { - {false, ENABLED, ENABLED, listWithGraphPagingNode, true}, - {true, ENABLED, ENABLED, listWithoutGraphPagingNode, true}, - {true, ENABLED, DISABLED, listWithGraphPagingNode, true}, - {true, ENABLED, ENABLED, listWithGraphPagingNode, true}, - {true, ENABLED, ENABLED, listWithNull, true}, - {true, ENABLED, ENABLED, listWithTwoNodesOneNotSupporting, true}, - {true, DISABLED, ENABLED, listWithGraphPagingNode, false}, - {true, DISABLED, AUTO, listWithGraphPagingNode, false}, - {true, DISABLED, DISABLED, listWithGraphPagingNode, false}, - {true, AUTO, AUTO, listWithGraphPagingNode, true}, - {true, AUTO, DISABLED, listWithGraphPagingNode, true}, - {false, AUTO, AUTO, listWithGraphPagingNode, false}, - {true, AUTO, AUTO, listWithTwoNodesOneNotSupporting, false}, - {true, AUTO, AUTO, listWithNull, false}, - }; - } - - private void addNodeWithDseVersion(InternalDriverContext context, List dseVersions) { - MetadataManager manager = mock(MetadataManager.class); - when(context.getMetadataManager()).thenReturn(manager); - Metadata metadata = mock(Metadata.class); - when(manager.getMetadata()).thenReturn(metadata); - Map nodes = new HashMap<>(); - for (Version v : dseVersions) { - Node node = mock(Node.class); - Map extras = new HashMap<>(); - extras.put(DseNodeProperties.DSE_VERSION, v); - when(node.getExtras()).thenReturn(extras); - nodes.put(UUID.randomUUID(), node); - } - when(metadata.getNodes()).thenReturn(nodes); - } - - private void contextGraphPagingEnabled( - InternalDriverContext context, PagingEnabledOptions option) { - DriverExecutionProfile driverExecutionProfile = mock(DriverExecutionProfile.class); - when(driverExecutionProfile.getString(DseDriverOption.GRAPH_PAGING_ENABLED)) - .thenReturn(option.name()); - DriverConfig config = mock(DriverConfig.class); - when(context.getConfig()).thenReturn(config); - when(config.getDefaultProfile()).thenReturn(driverExecutionProfile); - } - - private InternalDriverContext protocolWithPagingSupport(boolean pagingSupport) { - InternalDriverContext context = mock(InternalDriverContext.class); - when(context.getProtocolVersion()).thenReturn(DefaultProtocolVersion.V4); - ProtocolVersionRegistry protocolVersionRegistry = mock(ProtocolVersionRegistry.class); - when(protocolVersionRegistry.supports( - DefaultProtocolVersion.V4, DseProtocolFeature.CONTINUOUS_PAGING)) - .thenReturn(pagingSupport); - when(context.getProtocolVersionRegistry()).thenReturn(protocolVersionRegistry); - return context; - } - - private void statementGraphPagingEnabled( - GraphStatement graphStatement, PagingEnabledOptions option) { - DriverExecutionProfile driverExecutionProfile = mock(DriverExecutionProfile.class); - when(driverExecutionProfile.getString(DseDriverOption.GRAPH_PAGING_ENABLED)) - .thenReturn(option.name()); - when(graphStatement.getExecutionProfile()).thenReturn(driverExecutionProfile); - } -} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphProtocolTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphProtocolTest.java deleted file mode 100644 index 4bd33b4fa55..00000000000 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphProtocolTest.java +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms - */ -package com.datastax.dse.driver.internal.core.graph; - -import static org.assertj.core.api.Assertions.assertThat; -import static org.assertj.core.api.Assertions.assertThatThrownBy; -import static org.mockito.Mockito.when; - -import com.datastax.dse.driver.api.core.config.DseDriverOption; -import com.datastax.dse.driver.api.core.graph.GraphStatement; -import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; -import com.tngtech.java.junit.dataprovider.DataProvider; -import com.tngtech.java.junit.dataprovider.DataProviderRunner; -import com.tngtech.java.junit.dataprovider.UseDataProvider; -import org.junit.Rule; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.ArgumentMatchers; -import org.mockito.Mock; -import org.mockito.Mockito; -import org.mockito.junit.MockitoJUnit; -import org.mockito.junit.MockitoRule; - -@RunWith(DataProviderRunner.class) -public class GraphProtocolTest { - - @Mock DriverExecutionProfile executionProfile; - - @Mock GraphStatement graphStatement; - - @Rule public MockitoRule mockitoRule = MockitoJUnit.rule(); - - @Test - @UseDataProvider("protocolObjects") - public void should_pickup_graph_protocol_from_statement(GraphProtocol graphProtocol) { - when(graphStatement.getSubProtocol()).thenReturn(graphProtocol.toInternalCode()); - - GraphProtocol inferredProtocol = - GraphConversions.inferSubProtocol(graphStatement, executionProfile); - - assertThat(inferredProtocol).isEqualTo(graphProtocol); - Mockito.verifyZeroInteractions(executionProfile); - } - - @Test - @UseDataProvider("protocolStrings") - public void should_pickup_graph_protocol_and_parse_from_string_config(String stringConfig) { - when(executionProfile.getString( - ArgumentMatchers.eq(DseDriverOption.GRAPH_SUB_PROTOCOL), ArgumentMatchers.any())) - .thenReturn(stringConfig); - - GraphProtocol inferredProtocol = - GraphConversions.inferSubProtocol(graphStatement, executionProfile); - assertThat(inferredProtocol.toInternalCode()).isEqualTo(stringConfig); - } - - @Test - public void should_use_graphson2_as_default_protocol_when_parsing() { - when(executionProfile.getString( - ArgumentMatchers.eq(DseDriverOption.GRAPH_SUB_PROTOCOL), ArgumentMatchers.anyString())) - .thenAnswer(i -> i.getArguments()[1]); - GraphProtocol inferredProtocol = - GraphConversions.inferSubProtocol(graphStatement, executionProfile); - assertThat(inferredProtocol).isEqualTo(GraphProtocol.GRAPHSON_2_0); - } - - @Test - public void should_fail_if_graph_protocol_used_is_invalid() { - assertThatThrownBy(() -> GraphProtocol.fromString("invalid")) - .isInstanceOf(IllegalArgumentException.class) - .hasMessage( - "Graph protocol used [\"invalid\"] unknown. Possible values are: [ \"graphson-1.0\", \"graphson-2.0\", \"graph-binary-1.0\"]"); - } - - @Test - public void should_fail_if_graph_protocol_used_is_graphson_3() { - assertThatThrownBy(() -> GraphProtocol.fromString("graphson-3.0")) - .isInstanceOf(IllegalArgumentException.class) - .hasMessage( - "Graph protocol used [\"graphson-3.0\"] unknown. Possible values are: [ \"graphson-1.0\", \"graphson-2.0\", \"graph-binary-1.0\"]"); - } - - @DataProvider - public static Object[][] protocolObjects() { - return new Object[][] { - {GraphProtocol.GRAPHSON_1_0}, {GraphProtocol.GRAPHSON_2_0}, {GraphProtocol.GRAPH_BINARY_1_0} - }; - } - - @DataProvider - public static Object[][] protocolStrings() { - // putting manual strings here to be sure to be notified if a value in - // GraphProtocol ever changes - return new Object[][] {{"graphson-1.0"}, {"graphson-2.0"}, {"graph-binary-1.0"}}; - } -} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java index 876e345a4b4..381f2aa5502 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java @@ -15,6 +15,7 @@ */ package com.datastax.dse.driver.internal.core.graph; +import static com.datastax.dse.driver.internal.core.graph.GraphProtocol.GRAPHSON_1_0; import static com.datastax.dse.driver.internal.core.graph.GraphProtocol.GRAPHSON_2_0; import static com.datastax.dse.driver.internal.core.graph.GraphProtocol.GRAPH_BINARY_1_0; import static com.datastax.dse.driver.internal.core.graph.GraphTestUtils.createGraphBinaryModule; @@ -48,6 +49,7 @@ import com.datastax.dse.protocol.internal.request.RawBytesQuery; import com.datastax.dse.protocol.internal.request.query.DseQueryOptions; import com.datastax.oss.driver.api.core.DefaultConsistencyLevel; +import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.tracker.RequestTracker; @@ -97,7 +99,7 @@ public void setup() { } @Test - @UseDataProvider("bytecodeEnabledGraphProtocols") + @UseDataProvider("supportedGraphProtocols") public void should_create_query_message_from_script_statement(GraphProtocol graphProtocol) throws IOException { // initialization @@ -127,7 +129,7 @@ public void should_create_query_message_from_script_statement(GraphProtocol grap } @Test - @UseDataProvider("bytecodeEnabledGraphProtocols") + @UseDataProvider("supportedGraphProtocols") public void should_create_query_message_from_fluent_statement(GraphProtocol graphProtocol) throws IOException { // initialization @@ -161,7 +163,7 @@ public void should_create_query_message_from_fluent_statement(GraphProtocol grap } @Test - @UseDataProvider("bytecodeEnabledGraphProtocols") + @UseDataProvider("supportedGraphProtocols") public void should_create_query_message_from_batch_statement(GraphProtocol graphProtocol) throws IOException { // initialization @@ -225,12 +227,13 @@ private void testQueryRequestAndPayloadContents( } @Test - public void should_set_correct_query_options_from_graph_statement() throws IOException { + @UseDataProvider("supportedGraphProtocols") + public void should_set_correct_query_options_from_graph_statement(GraphProtocol subProtocol) + throws IOException { // initialization GraphRequestHandlerTestHarness harness = GraphRequestHandlerTestHarness.builder().build(); GraphStatement graphStatement = ScriptGraphStatement.newInstance("mockQuery").setQueryParam("name", "value"); - GraphProtocol subProtocol = GraphProtocol.GRAPHSON_2_0; GraphBinaryModule module = createGraphBinaryModule(harness.getContext()); @@ -253,9 +256,7 @@ public void should_set_correct_query_options_from_graph_statement() throws IOExc assertThat(options.defaultTimestamp).isEqualTo(-9223372036854775808L); assertThat(options.positionalValues) .isEqualTo( - ImmutableList.of( - GraphSONUtils.serializeToByteBuffer( - ImmutableMap.of("name", "value"), subProtocol))); + ImmutableList.of(serialize(ImmutableMap.of("name", "value"), subProtocol, module))); m = GraphConversions.createMessageFromGraphStatement( @@ -270,12 +271,12 @@ public void should_set_correct_query_options_from_graph_statement() throws IOExc } @Test - public void should_create_payload_from_config_options() { + @UseDataProvider("supportedGraphProtocols") + public void should_create_payload_from_config_options(GraphProtocol subProtocol) { // initialization GraphRequestHandlerTestHarness harness = GraphRequestHandlerTestHarness.builder().build(); GraphStatement graphStatement = ScriptGraphStatement.newInstance("mockQuery").setExecutionProfileName("test-graph"); - GraphProtocol subProtocol = GraphProtocol.GRAPHSON_2_0; GraphBinaryModule module = createGraphBinaryModule(harness.getContext()); @@ -313,7 +314,8 @@ public void should_create_payload_from_config_options() { } @Test - public void should_create_payload_from_statement_options() { + @UseDataProvider("supportedGraphProtocols") + public void should_create_payload_from_statement_options(GraphProtocol subProtocol) { // initialization GraphRequestHandlerTestHarness harness = GraphRequestHandlerTestHarness.builder().build(); GraphStatement graphStatement = @@ -325,7 +327,6 @@ public void should_create_payload_from_statement_options() { .setWriteConsistencyLevel(DefaultConsistencyLevel.THREE) .setSystemQuery(false) .build(); - GraphProtocol subProtocol = GraphProtocol.GRAPHSON_2_0; GraphBinaryModule module = createGraphBinaryModule(harness.getContext()); @@ -368,12 +369,12 @@ public void should_create_payload_from_statement_options() { } @Test - public void should_not_set_graph_name_on_system_queries() { + @UseDataProvider("supportedGraphProtocols") + public void should_not_set_graph_name_on_system_queries(GraphProtocol subProtocol) { // initialization GraphRequestHandlerTestHarness harness = GraphRequestHandlerTestHarness.builder().build(); GraphStatement graphStatement = ScriptGraphStatement.newInstance("mockQuery").setSystemQuery(true); - GraphProtocol subProtocol = GraphProtocol.GRAPHSON_2_0; GraphBinaryModule module = createGraphBinaryModule(harness.getContext()); @@ -391,21 +392,24 @@ public void should_not_set_graph_name_on_system_queries() { } @Test - @UseDataProvider("bytecodeEnabledGraphProtocols") - public void should_return_results_for_statements(GraphProtocol graphProtocol) throws IOException { - DseDriverContext mockContext = Mockito.mock(DseDriverContext.class); + @UseDataProvider("supportedGraphProtocolsWithDseVersions") + public void should_return_results_for_statements(GraphProtocol graphProtocol, Version dseVersion) + throws IOException { + DseDriverContext mockContext = GraphTestUtil.mockContext(true, dseVersion); GraphBinaryModule module = createGraphBinaryModule(mockContext); - GraphPagingSupportChecker graphPagingSupportChecker = mock(GraphPagingSupportChecker.class); - when(graphPagingSupportChecker.isPagingEnabled(any(), any())).thenReturn(false); + GraphSupportChecker graphSupportChecker = mock(GraphSupportChecker.class); + when(graphSupportChecker.isPagingEnabled(any(), any())).thenReturn(false); + when(graphSupportChecker.inferGraphProtocol(any(), any(), any())).thenReturn(graphProtocol); GraphRequestAsyncProcessor p = - Mockito.spy(new GraphRequestAsyncProcessor(mockContext, graphPagingSupportChecker)); + Mockito.spy(new GraphRequestAsyncProcessor(mockContext, graphSupportChecker)); when(p.getGraphBinaryModule()).thenReturn(module); RequestHandlerTestHarness harness = GraphRequestHandlerTestHarness.builder() .withGraphProtocolForTestConfig(graphProtocol.toInternalCode()) + .withDseVersionInMetadata(dseVersion) // ideally we would be able to provide a function here to // produce results instead of a static predefined response. // Function to which we would pass the harness instance or a (mocked)DriverContext. @@ -424,10 +428,10 @@ public void should_return_results_for_statements(GraphProtocol graphProtocol) th List nodes = grs.all(); assertThat(nodes.size()).isEqualTo(1); - GraphNode node = nodes.get(0); - assertThat(node.isVertex()).isTrue(); + GraphNode graphNode = nodes.get(0); + assertThat(graphNode.isVertex()).isTrue(); - Vertex vRead = node.asVertex(); + Vertex vRead = graphNode.asVertex(); assertThat(vRead.label()).isEqualTo("person"); assertThat(vRead.id()).isEqualTo(1); if (!graphProtocol.isGraphBinary()) { @@ -439,35 +443,60 @@ public void should_return_results_for_statements(GraphProtocol graphProtocol) th } @DataProvider - public static Object[][] bytecodeEnabledGraphProtocols() { - return new Object[][] {{GRAPHSON_2_0}, {GRAPH_BINARY_1_0}}; + public static Object[][] supportedGraphProtocols() { + return new Object[][] {{GRAPHSON_2_0}, {GRAPH_BINARY_1_0}, {GRAPHSON_1_0}}; + } + + @DataProvider + public static Object[][] supportedGraphProtocolsWithDseVersions() { + return new Object[][] { + {GRAPHSON_1_0, GraphTestUtil.DSE_6_7_0}, + {GRAPHSON_1_0, GraphTestUtil.DSE_6_8_0}, + {GRAPHSON_2_0, GraphTestUtil.DSE_6_7_0}, + {GRAPHSON_2_0, GraphTestUtil.DSE_6_8_0}, + {GRAPH_BINARY_1_0, GraphTestUtil.DSE_6_7_0}, + {GRAPH_BINARY_1_0, GraphTestUtil.DSE_6_8_0}, + }; + } + + @DataProvider + public static Object[][] dseVersionsWithDefaultGraphProtocol() { + return new Object[][] { + {GRAPHSON_2_0, GraphTestUtil.DSE_6_7_0}, + {GRAPH_BINARY_1_0, GraphTestUtil.DSE_6_8_0}, + }; } @Test - public void should_invoke_request_tracker() throws IOException { - DseDriverContext mockContext = Mockito.mock(DseDriverContext.class); + @UseDataProvider("dseVersionsWithDefaultGraphProtocol") + public void should_invoke_request_tracker(GraphProtocol defaultProtocol, Version dseVersion) + throws IOException { + DseDriverContext mockContext = GraphTestUtil.mockContext(true, dseVersion); GraphBinaryModule module = createGraphBinaryModule(mockContext); + GraphSupportChecker graphSupportChecker = mock(GraphSupportChecker.class); + when(graphSupportChecker.isPagingEnabled(any(), any())).thenReturn(false); + when(graphSupportChecker.inferGraphProtocol(any(), any(), any())).thenReturn(defaultProtocol); + GraphRequestAsyncProcessor p = - Mockito.spy(new GraphRequestAsyncProcessor(mockContext, new GraphPagingSupportChecker())); + Mockito.spy(new GraphRequestAsyncProcessor(mockContext, graphSupportChecker)); when(p.getGraphBinaryModule()).thenReturn(module); RequestHandlerTestHarness harness = GraphRequestHandlerTestHarness.builder() - .withResponse( - node, defaultDseFrameOf(singleGraphRow(GraphProtocol.GRAPHSON_2_0, module))) + .withDseVersionInMetadata(dseVersion) + .withResponse(node, defaultDseFrameOf(singleGraphRow(defaultProtocol, module))) .build(); RequestTracker requestTracker = mock(RequestTracker.class); when(harness.getContext().getRequestTracker()).thenReturn(requestTracker); GraphStatement graphStatement = ScriptGraphStatement.newInstance("mockQuery"); - GraphPagingSupportChecker graphPagingSupportChecker = mock(GraphPagingSupportChecker.class); - when(graphPagingSupportChecker.isPagingEnabled(any(), any())).thenReturn(false); + GraphResultSet grs = new GraphRequestSyncProcessor( new GraphRequestAsyncProcessor( - (DseDriverContext) harness.getContext(), graphPagingSupportChecker)) + (DseDriverContext) harness.getContext(), graphSupportChecker)) .process(graphStatement, harness.getSession(), harness.getContext(), "test-graph"); List nodes = grs.all(); @@ -479,8 +508,12 @@ node, defaultDseFrameOf(singleGraphRow(GraphProtocol.GRAPHSON_2_0, module))) Vertex actual = graphNode.asVertex(); assertThat(actual.label()).isEqualTo("person"); assertThat(actual.id()).isEqualTo(1); - assertThat(actual.property("name").id()).isEqualTo(11); - assertThat(actual.property("name").value()).isEqualTo("marko"); + if (!defaultProtocol.isGraphBinary()) { + // GraphBinary does not encode properties regardless of whether they are present in the + // parent element or not :/ + assertThat(actual.property("name").id()).isEqualTo(11); + assertThat(actual.property("name").value()).isEqualTo("marko"); + } verify(requestTracker) .onSuccess( diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTestHarness.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTestHarness.java index 240ec595af6..f52d636181a 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTestHarness.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTestHarness.java @@ -22,6 +22,7 @@ import com.datastax.dse.driver.internal.core.context.DseDriverContext; import com.datastax.oss.driver.api.core.DefaultConsistencyLevel; import com.datastax.oss.driver.api.core.ProtocolVersion; +import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.metadata.Node; @@ -34,7 +35,6 @@ import com.datastax.oss.protocol.internal.Frame; import io.netty.channel.EventLoop; import java.time.Duration; -import java.util.Optional; import javax.annotation.Nullable; import org.mockito.ArgumentMatchers; import org.mockito.Mock; @@ -54,7 +54,10 @@ public class GraphRequestHandlerTestHarness extends RequestHandlerTestHarness { @Mock EventLoop eventLoop; protected GraphRequestHandlerTestHarness( - Builder builder, @Nullable String graphProtocolForTestConfig, Duration graphTimeout) { + Builder builder, + @Nullable String graphProtocolForTestConfig, + Duration graphTimeout, + @Nullable Version dseVersionForTestMetadata) { super(builder); // not mocked by RequestHandlerTestHarness, will be used when DseDriverOptions.GRAPH_TIMEOUT @@ -63,8 +66,7 @@ protected GraphRequestHandlerTestHarness( // default graph options as in the reference.conf file when(defaultProfile.getString(DseDriverOption.GRAPH_TRAVERSAL_SOURCE, null)).thenReturn("g"); - when(defaultProfile.getString(DseDriverOption.GRAPH_SUB_PROTOCOL, "graphson-2.0")) - .thenReturn("graphson-2.0"); + when(defaultProfile.isDefined(DseDriverOption.GRAPH_SUB_PROTOCOL)).thenReturn(Boolean.FALSE); when(defaultProfile.getBoolean(DseDriverOption.GRAPH_IS_SYSTEM_QUERY, false)).thenReturn(false); when(defaultProfile.getString(DseDriverOption.GRAPH_NAME, null)).thenReturn("mockGraph"); when(defaultProfile.getDuration(DseDriverOption.GRAPH_TIMEOUT, Duration.ZERO)) @@ -81,9 +83,13 @@ protected GraphRequestHandlerTestHarness( when(testProfile.getBoolean(DefaultDriverOption.REQUEST_DEFAULT_IDEMPOTENCE)).thenReturn(false); when(testProfile.getBoolean(DefaultDriverOption.PREPARE_ON_ALL_NODES)).thenReturn(true); when(testProfile.getString(DseDriverOption.GRAPH_TRAVERSAL_SOURCE, null)).thenReturn("a"); - when(testProfile.getString( - ArgumentMatchers.eq(DseDriverOption.GRAPH_SUB_PROTOCOL), ArgumentMatchers.anyString())) - .thenReturn(Optional.ofNullable(graphProtocolForTestConfig).orElse("graphson-2.0")); + when(testProfile.isDefined(DseDriverOption.GRAPH_SUB_PROTOCOL)) + .thenReturn(graphProtocolForTestConfig != null); + // only mock the config if graphProtocolForTestConfig is not null + if (graphProtocolForTestConfig != null) { + when(testProfile.getString(DseDriverOption.GRAPH_SUB_PROTOCOL)) + .thenReturn(graphProtocolForTestConfig); + } when(testProfile.getBoolean(DseDriverOption.GRAPH_IS_SYSTEM_QUERY, false)).thenReturn(false); when(testProfile.getString(DseDriverOption.GRAPH_NAME, null)).thenReturn("mockGraph"); when(testProfile.getString(DseDriverOption.GRAPH_READ_CONSISTENCY_LEVEL, null)) @@ -138,6 +144,10 @@ protected GraphRequestHandlerTestHarness( when(dseDriverContext.getRequestThrottler()) .thenReturn(new PassThroughRequestThrottler(dseDriverContext)); when(dseDriverContext.getRequestTracker()).thenReturn(new NoopRequestTracker(dseDriverContext)); + // if DSE Version is specified for test metadata, then we need to mock that up on the context + if (dseVersionForTestMetadata != null) { + GraphTestUtil.mockContext(dseDriverContext, true, dseVersionForTestMetadata); + } } @Override @@ -153,6 +163,7 @@ public static class Builder extends RequestHandlerTestHarness.Builder { private String graphProtocolForTestConfig; private Duration graphTimeout = Duration.ZERO; + private Version dseVersionForTestMetadata; public Builder withGraphProtocolForTestConfig(String protocol) { this.graphProtocolForTestConfig = protocol; @@ -197,14 +208,20 @@ public GraphRequestHandlerTestHarness.Builder withProtocolVersion( return this; } - @Override - public GraphRequestHandlerTestHarness build() { - return new GraphRequestHandlerTestHarness(this, graphProtocolForTestConfig, graphTimeout); + public Builder withDseVersionInMetadata(Version dseVersion) { + this.dseVersionForTestMetadata = dseVersion; + return this; } - public RequestHandlerTestHarness.Builder withGraphTimeout(Duration globalTimeout) { + public Builder withGraphTimeout(Duration globalTimeout) { this.graphTimeout = globalTimeout; return this; } + + @Override + public GraphRequestHandlerTestHarness build() { + return new GraphRequestHandlerTestHarness( + this, graphProtocolForTestConfig, graphTimeout, dseVersionForTestMetadata); + } } } diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphSupportCheckerTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphSupportCheckerTest.java new file mode 100644 index 00000000000..0ee43fcb64a --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphSupportCheckerTest.java @@ -0,0 +1,325 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph; + +import static com.datastax.dse.driver.api.core.graph.PagingEnabledOptions.AUTO; +import static com.datastax.dse.driver.api.core.graph.PagingEnabledOptions.DISABLED; +import static com.datastax.dse.driver.api.core.graph.PagingEnabledOptions.ENABLED; +import static com.datastax.dse.driver.internal.core.graph.GraphSupportChecker.MIN_DSE_VERSION_GRAPH_BINARY_AND_PAGING; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.assertj.core.api.AssertionsForInterfaceTypes.assertThat; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.*; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import com.datastax.dse.driver.api.core.config.DseDriverOption; +import com.datastax.dse.driver.api.core.graph.GraphStatement; +import com.datastax.dse.driver.api.core.graph.PagingEnabledOptions; +import com.datastax.dse.driver.api.core.metadata.DseNodeProperties; +import com.datastax.dse.driver.internal.core.DseProtocolFeature; +import com.datastax.dse.driver.internal.core.context.DseDriverContext; +import com.datastax.oss.driver.api.core.DefaultProtocolVersion; +import com.datastax.oss.driver.api.core.Version; +import com.datastax.oss.driver.api.core.config.DriverConfig; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.metadata.Metadata; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.internal.core.ProtocolVersionRegistry; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.metadata.MetadataManager; +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.UUID; +import org.junit.Rule; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnit; +import org.mockito.junit.MockitoRule; + +@RunWith(DataProviderRunner.class) +public class GraphSupportCheckerTest { + + @Mock DriverExecutionProfile executionProfile; + + @Mock GraphStatement graphStatement; + + @Rule public MockitoRule mockitoRule = MockitoJUnit.rule(); + + @UseDataProvider("pagingEnabled") + @Test + public void should_check_if_paging_is_supported( + boolean protocolWithPagingSupport, + PagingEnabledOptions statementGraphPagingEnabled, + PagingEnabledOptions contextGraphPagingEnabled, + List nodeDseVersions, + boolean expected) { + // given + GraphStatement graphStatement = mock(GraphStatement.class); + InternalDriverContext context = protocolWithPagingSupport(protocolWithPagingSupport); + statementGraphPagingEnabled(graphStatement, statementGraphPagingEnabled); + contextGraphPagingEnabled(context, contextGraphPagingEnabled); + addNodeWithDseVersion(context, nodeDseVersions); + + // when + boolean pagingEnabled = new GraphSupportChecker().isPagingEnabled(graphStatement, context); + + // then + assertThat(pagingEnabled).isEqualTo(expected); + } + + @Test + public void should_not_support_paging_when_statement_profile_not_present() { + // given + GraphStatement graphStatement = mock(GraphStatement.class); + InternalDriverContext context = protocolWithPagingSupport(true); + contextGraphPagingEnabled(context, DISABLED); + addNodeWithDseVersion( + context, Collections.singletonList(MIN_DSE_VERSION_GRAPH_BINARY_AND_PAGING)); + + // when + boolean pagingEnabled = new GraphSupportChecker().isPagingEnabled(graphStatement, context); + + // then + assertThat(pagingEnabled).isEqualTo(false); + } + + @Test + public void + should_support_paging_when_statement_profile_not_present_but_context_profile_has_paging_enabled() { + // given + GraphStatement graphStatement = mock(GraphStatement.class); + InternalDriverContext context = protocolWithPagingSupport(true); + contextGraphPagingEnabled(context, ENABLED); + addNodeWithDseVersion( + context, Collections.singletonList(MIN_DSE_VERSION_GRAPH_BINARY_AND_PAGING)); + + // when + boolean pagingEnabled = new GraphSupportChecker().isPagingEnabled(graphStatement, context); + + // then + assertThat(pagingEnabled).isEqualTo(true); + } + + @DataProvider() + public static Object[][] pagingEnabled() { + List listWithGraphPagingNode = + Collections.singletonList(MIN_DSE_VERSION_GRAPH_BINARY_AND_PAGING); + List listWithoutGraphPagingNode = Collections.singletonList(Version.parse("6.7.0")); + List listWithNull = Collections.singletonList(null); + List listWithTwoNodesOneNotSupporting = + Arrays.asList(Version.parse("6.7.0"), MIN_DSE_VERSION_GRAPH_BINARY_AND_PAGING); + + return new Object[][] { + {false, ENABLED, ENABLED, listWithGraphPagingNode, true}, + {true, ENABLED, ENABLED, listWithoutGraphPagingNode, true}, + {true, ENABLED, DISABLED, listWithGraphPagingNode, true}, + {true, ENABLED, ENABLED, listWithGraphPagingNode, true}, + {true, ENABLED, ENABLED, listWithNull, true}, + {true, ENABLED, ENABLED, listWithTwoNodesOneNotSupporting, true}, + {true, DISABLED, ENABLED, listWithGraphPagingNode, false}, + {true, DISABLED, AUTO, listWithGraphPagingNode, false}, + {true, DISABLED, DISABLED, listWithGraphPagingNode, false}, + {true, AUTO, AUTO, listWithGraphPagingNode, true}, + {true, AUTO, DISABLED, listWithGraphPagingNode, true}, + {false, AUTO, AUTO, listWithGraphPagingNode, false}, + {true, AUTO, AUTO, listWithTwoNodesOneNotSupporting, false}, + {true, AUTO, AUTO, listWithNull, false}, + }; + } + + private void addNodeWithDseVersion(InternalDriverContext context, List dseVersions) { + MetadataManager manager = mock(MetadataManager.class); + when(context.getMetadataManager()).thenReturn(manager); + Metadata metadata = mock(Metadata.class); + when(manager.getMetadata()).thenReturn(metadata); + Map nodes = new HashMap<>(); + for (Version v : dseVersions) { + Node node = mock(Node.class); + Map extras = new HashMap<>(); + extras.put(DseNodeProperties.DSE_VERSION, v); + when(node.getExtras()).thenReturn(extras); + nodes.put(UUID.randomUUID(), node); + } + when(metadata.getNodes()).thenReturn(nodes); + } + + private void contextGraphPagingEnabled( + InternalDriverContext context, PagingEnabledOptions option) { + DriverExecutionProfile driverExecutionProfile = mock(DriverExecutionProfile.class); + when(driverExecutionProfile.getString(DseDriverOption.GRAPH_PAGING_ENABLED)) + .thenReturn(option.name()); + DriverConfig config = mock(DriverConfig.class); + when(context.getConfig()).thenReturn(config); + when(config.getDefaultProfile()).thenReturn(driverExecutionProfile); + } + + private InternalDriverContext protocolWithPagingSupport(boolean pagingSupport) { + InternalDriverContext context = mock(InternalDriverContext.class); + when(context.getProtocolVersion()).thenReturn(DefaultProtocolVersion.V4); + ProtocolVersionRegistry protocolVersionRegistry = mock(ProtocolVersionRegistry.class); + when(protocolVersionRegistry.supports( + DefaultProtocolVersion.V4, DseProtocolFeature.CONTINUOUS_PAGING)) + .thenReturn(pagingSupport); + when(context.getProtocolVersionRegistry()).thenReturn(protocolVersionRegistry); + return context; + } + + private void statementGraphPagingEnabled( + GraphStatement graphStatement, PagingEnabledOptions option) { + DriverExecutionProfile driverExecutionProfile = mock(DriverExecutionProfile.class); + when(driverExecutionProfile.getString(DseDriverOption.GRAPH_PAGING_ENABLED)) + .thenReturn(option.name()); + when(graphStatement.getExecutionProfile()).thenReturn(driverExecutionProfile); + } + + @Test + @UseDataProvider("dseVersions") + public void should_determine_default_graph_protocol_from_dse_version( + Version[] dseVersions, GraphProtocol expectedProtocol) { + // mock up the metadata for the context + // using 'true' here will treat null test Versions as no DSE_VERSION info in the metadata + DseDriverContext mockContext = GraphTestUtil.mockContext(true, dseVersions); + GraphProtocol graphProtocol = new GraphSupportChecker().getDefaultGraphProtocol(mockContext); + assertThat(graphProtocol).isEqualTo(expectedProtocol); + } + + @Test + @UseDataProvider("dseVersions") + public void should_determine_default_graph_protocol_from_dse_version_with_null_versions( + Version[] dseVersions, GraphProtocol expectedProtocol) { + // mock up the metadata for the context + // using 'false' here will treat null test Versions as explicit NULL info for DSE_VERSION + DseDriverContext mockContext = GraphTestUtil.mockContext(false, dseVersions); + GraphProtocol graphProtocol = new GraphSupportChecker().getDefaultGraphProtocol(mockContext); + assertThat(graphProtocol).isEqualTo(expectedProtocol); + } + + @DataProvider + public static Object[][] dseVersions() { + return new Object[][] { + {new Version[] {Version.parse("5.0.3")}, GraphProtocol.GRAPHSON_2_0}, + {new Version[] {Version.parse("6.0.1")}, GraphProtocol.GRAPHSON_2_0}, + {new Version[] {Version.parse("6.7.4")}, GraphProtocol.GRAPHSON_2_0}, + {new Version[] {Version.parse("6.8.0")}, GraphProtocol.GRAPH_BINARY_1_0}, + {new Version[] {Version.parse("7.0.0")}, GraphProtocol.GRAPH_BINARY_1_0}, + {new Version[] {Version.parse("5.0.3"), Version.parse("6.8.0")}, GraphProtocol.GRAPHSON_2_0}, + {new Version[] {Version.parse("6.7.4"), Version.parse("6.8.0")}, GraphProtocol.GRAPHSON_2_0}, + {new Version[] {Version.parse("6.8.0"), Version.parse("6.7.4")}, GraphProtocol.GRAPHSON_2_0}, + { + new Version[] {Version.parse("6.8.0"), Version.parse("7.0.0")}, + GraphProtocol.GRAPH_BINARY_1_0 + }, + {new Version[] {Version.parse("6.7.4"), Version.parse("6.7.4")}, GraphProtocol.GRAPHSON_2_0}, + { + new Version[] {Version.parse("6.8.0"), Version.parse("6.8.0")}, + GraphProtocol.GRAPH_BINARY_1_0 + }, + {null, GraphProtocol.GRAPHSON_2_0}, + {new Version[] {null}, GraphProtocol.GRAPHSON_2_0}, + {new Version[] {null, Version.parse("6.8.0")}, GraphProtocol.GRAPHSON_2_0}, + { + new Version[] {Version.parse("6.8.0"), Version.parse("7.0.0"), null}, + GraphProtocol.GRAPHSON_2_0 + }, + }; + } + + @Test + @UseDataProvider("protocolObjects") + public void should_pickup_graph_protocol_from_statement(GraphProtocol graphProtocol) { + when(graphStatement.getSubProtocol()).thenReturn(graphProtocol.toInternalCode()); + + GraphProtocol inferredProtocol = + new GraphSupportChecker() + .inferGraphProtocol( + graphStatement, executionProfile, mock(InternalDriverContext.class)); + + assertThat(inferredProtocol).isEqualTo(graphProtocol); + verifyZeroInteractions(executionProfile); + } + + @Test + @UseDataProvider("protocolStrings") + public void should_pickup_graph_protocol_and_parse_from_string_config( + String stringConfig, Version dseVersion) { + when(executionProfile.isDefined(DseDriverOption.GRAPH_SUB_PROTOCOL)).thenReturn(Boolean.TRUE); + when(executionProfile.getString(eq(DseDriverOption.GRAPH_SUB_PROTOCOL))) + .thenReturn(stringConfig); + + GraphProtocol inferredProtocol = + new GraphSupportChecker() + .inferGraphProtocol( + graphStatement, executionProfile, GraphTestUtil.mockContext(true, dseVersion)); + assertThat(inferredProtocol.toInternalCode()).isEqualTo(stringConfig); + } + + @Test + @UseDataProvider("dseVersions6") + public void should_use_correct_default_protocol_when_parsing(Version dseVersion) { + GraphProtocol inferredProtocol = + new GraphSupportChecker() + .inferGraphProtocol( + graphStatement, executionProfile, GraphTestUtil.mockContext(true, dseVersion)); + // For DSE 6.8 and newer, the default should be GraphSON binary + // for DSE older than 6.8, the default should be GraphSON2 + assertThat(inferredProtocol) + .isEqualTo( + (dseVersion.compareTo(GraphTestUtil.DSE_6_8_0) < 0) + ? GraphProtocol.GRAPHSON_2_0 + : GraphProtocol.GRAPH_BINARY_1_0); + } + + @Test + public void should_fail_if_graph_protocol_used_is_invalid() { + assertThatThrownBy(() -> GraphProtocol.fromString("invalid")) + .isInstanceOf(IllegalArgumentException.class) + .hasMessage( + "Graph protocol used [\"invalid\"] unknown. Possible values are: [ \"graphson-1.0\", \"graphson-2.0\", \"graph-binary-1.0\"]"); + } + + @Test + public void should_fail_if_graph_protocol_used_is_graphson_3() { + assertThatThrownBy(() -> GraphProtocol.fromString("graphson-3.0")) + .isInstanceOf(IllegalArgumentException.class) + .hasMessage( + "Graph protocol used [\"graphson-3.0\"] unknown. Possible values are: [ \"graphson-1.0\", \"graphson-2.0\", \"graph-binary-1.0\"]"); + } + + @DataProvider + public static Object[][] protocolObjects() { + return new Object[][] { + {GraphProtocol.GRAPHSON_1_0}, {GraphProtocol.GRAPHSON_2_0}, {GraphProtocol.GRAPH_BINARY_1_0} + }; + } + + @DataProvider + public static Object[][] protocolStrings() { + // putting manual strings here to be sure to be notified if a value in + // GraphProtocol ever changes + return new Object[][] { + {"graphson-1.0", GraphTestUtil.DSE_6_7_0}, + {"graphson-1.0", GraphTestUtil.DSE_6_8_0}, + {"graphson-2.0", GraphTestUtil.DSE_6_7_0}, + {"graphson-2.0", GraphTestUtil.DSE_6_8_0}, + {"graph-binary-1.0", GraphTestUtil.DSE_6_7_0}, + {"graph-binary-1.0", GraphTestUtil.DSE_6_8_0}, + }; + } + + @DataProvider + public static Object[][] dseVersions6() { + return new Object[][] {{GraphTestUtil.DSE_6_7_0}, {GraphTestUtil.DSE_6_8_0}}; + } +} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphTestUtil.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphTestUtil.java new file mode 100644 index 00000000000..95d63c57f5d --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphTestUtil.java @@ -0,0 +1,69 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import com.datastax.dse.driver.api.core.metadata.DseNodeProperties; +import com.datastax.dse.driver.internal.core.context.DseDriverContext; +import com.datastax.oss.driver.api.core.Version; +import com.datastax.oss.driver.api.core.metadata.Metadata; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.internal.core.metadata.MetadataManager; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; +import java.util.UUID; + +/** Utility for common Graph Unit testing. */ +class GraphTestUtil { + + // Default GraphSON sub protocol version differs based on DSE version, so test with a version less + // than DSE 6.8 as well as DSE 6.8. + static final Version DSE_6_7_0 = Objects.requireNonNull(Version.parse("6.7.0")); + static final Version DSE_6_8_0 = Objects.requireNonNull(Version.parse("6.8.0")); + + static DseDriverContext mockContext(boolean treatNullAsMissing, Version... dseVersions) { + DseDriverContext mockContext = mock(DseDriverContext.class); + return mockContext(mockContext, treatNullAsMissing, dseVersions); + } + + static DseDriverContext mockContext( + DseDriverContext context, boolean treatNullAsMissing, Version... dseVersions) { + // mock bits of the context + MetadataManager metadataManager = mock(MetadataManager.class); + Metadata metadata = mock(Metadata.class); + Map nodeMap = new HashMap<>((dseVersions != null) ? dseVersions.length : 1); + if (dseVersions == null) { + Node node = mock(Node.class); + Map nodeExtras = new HashMap<>(1); + if (!treatNullAsMissing) { + // put an explicit null in for DSE_VERSION + nodeExtras.put(DseNodeProperties.DSE_VERSION, null); + } + nodeMap.put(UUID.randomUUID(), node); + when(node.getExtras()).thenReturn(nodeExtras); + } else { + for (Version dseVersion : dseVersions) { + // create a node with DSE version in its extra data + Node node = mock(Node.class); + Map nodeExtras = new HashMap<>(1); + if (dseVersion != null || !treatNullAsMissing) { + nodeExtras.put(DseNodeProperties.DSE_VERSION, dseVersion); + } + nodeMap.put(UUID.randomUUID(), node); + when(node.getExtras()).thenReturn(nodeExtras); + } + } + // return mocked data when requested + when(metadata.getNodes()).thenReturn(nodeMap); + when(metadataManager.getMetadata()).thenReturn(metadata); + when(context.getMetadataManager()).thenReturn(metadataManager); + return context; + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CoreGraphDataTypeITBase.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CoreGraphDataTypeITBase.java index 63266c0303d..1d4c75518bd 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CoreGraphDataTypeITBase.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CoreGraphDataTypeITBase.java @@ -15,15 +15,11 @@ import com.datastax.dse.driver.api.core.data.geometry.Polygon; import com.datastax.dse.driver.api.core.graph.predicates.Geo; import com.datastax.dse.driver.api.core.type.DseDataTypes; -import com.datastax.dse.driver.api.testinfra.session.DseSessionRule; -import com.datastax.dse.driver.api.testinfra.session.DseSessionRuleBuilder; -import com.datastax.dse.driver.internal.core.graph.GraphProtocol; import com.datastax.oss.driver.api.core.data.CqlDuration; import com.datastax.oss.driver.api.core.data.UdtValue; import com.datastax.oss.driver.api.core.type.TupleType; import com.datastax.oss.driver.api.core.type.UserDefinedType; import com.datastax.oss.driver.api.core.uuid.Uuids; -import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @@ -36,37 +32,13 @@ import java.time.LocalTime; import java.util.Arrays; import java.util.Map; -import org.junit.ClassRule; import org.junit.Test; -import org.junit.rules.RuleChain; -import org.junit.rules.TestRule; public abstract class CoreGraphDataTypeITBase { - private static CustomCcmRule ccmRule = - CustomCcmRule.builder() - .withDseWorkloads("graph") - .withDseConfiguration( - "graph.gremlin_server.scriptEngines.gremlin-groovy.config.sandbox_enabled", "false") - .withDseConfiguration("graph.max_query_params", 32) - .build(); - - private static DseSessionRule sessionRule = - new DseSessionRuleBuilder(ccmRule) - .withCreateGraph() - .withGraphProtocol(GraphProtocol.GRAPH_BINARY_1_0.toInternalCode()) - .withCoreEngine() - .build(); - - @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); - - protected DseSession session() { - return sessionRule.session(); - } + protected abstract DseSession session(); - protected String graphName() { - return sessionRule.getGraphName(); - } + protected abstract String graphName(); @Test public void should_create_and_retrieve_correct_data_with_types() { diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphPagingIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphPagingIT.java index ec1d6219979..3e83397d556 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphPagingIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphPagingIT.java @@ -13,8 +13,6 @@ import com.datastax.dse.driver.api.core.config.DseDriverOption; import com.datastax.dse.driver.api.core.cql.continuous.ContinuousPagingITBase; import com.datastax.dse.driver.api.testinfra.session.DseSessionRule; -import com.datastax.dse.driver.api.testinfra.session.DseSessionRuleBuilder; -import com.datastax.dse.driver.internal.core.graph.GraphProtocol; import com.datastax.dse.driver.internal.core.graph.MultiPageGraphResultSet; import com.datastax.oss.driver.api.core.DriverTimeoutException; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; @@ -40,20 +38,17 @@ @RunWith(DataProviderRunner.class) public class GraphPagingIT { - private static CustomCcmRule ccmRule = CustomCcmRule.builder().withDseWorkloads("graph").build(); + private static final CustomCcmRule CCM_RULE = GraphTestSupport.GRAPH_CCM_RULE_BUILDER.build(); - private static DseSessionRule sessionRule = - new DseSessionRuleBuilder(ccmRule) - .withCreateGraph() - .withCoreEngine() - .withGraphProtocol(GraphProtocol.GRAPH_BINARY_1_0.toInternalCode()) - .build(); + private static final DseSessionRule SESSION_RULE = + GraphTestSupport.getCoreGraphSessionBuilder(CCM_RULE).build(); - @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); @BeforeClass public static void setupSchema() { - sessionRule + SESSION_RULE .session() .execute( ScriptGraphStatement.newInstance( @@ -62,16 +57,16 @@ public static void setupSchema() { + ".clusterBy('cc', Int)" + ".property('name', Text)" + ".create();") - .setGraphName(sessionRule.getGraphName())); + .setGraphName(SESSION_RULE.getGraphName())); for (int i = 1; i <= 100; i++) { - sessionRule + SESSION_RULE .session() .execute( ScriptGraphStatement.newInstance( String.format( "g.addV('person').property('pk',0).property('cc',%d).property('name', '%s');", i, "user" + i)) - .setGraphName(sessionRule.getGraphName())); + .setGraphName(SESSION_RULE.getGraphName())); } } @@ -88,11 +83,11 @@ public void synchronous_paging_with_options(Options options) { // when GraphResultSet result = - sessionRule + SESSION_RULE .session() .execute( ScriptGraphStatement.newInstance("g.V().hasLabel('person').values('name')") - .setGraphName(sessionRule.getGraphName()) + .setGraphName(SESSION_RULE.getGraphName()) .setTraversalSource("g") .setExecutionProfile(profile)); @@ -124,11 +119,11 @@ public void synchronous_paging_with_options_when_auto(Options options) { // when GraphResultSet result = - sessionRule + SESSION_RULE .session() .execute( ScriptGraphStatement.newInstance("g.V().hasLabel('person').values('name')") - .setGraphName(sessionRule.getGraphName()) + .setGraphName(SESSION_RULE.getGraphName()) .setTraversalSource("g") .setExecutionProfile(profile)); @@ -170,11 +165,11 @@ public void synchronous_options_with_paging_disabled_should_fallback_to_single_p // when GraphResultSet result = - sessionRule + SESSION_RULE .session() .execute( ScriptGraphStatement.newInstance("g.V().hasLabel('person').values('name')") - .setGraphName(sessionRule.getGraphName()) + .setGraphName(SESSION_RULE.getGraphName()) .setTraversalSource("g") .setExecutionProfile(profile)); @@ -206,11 +201,11 @@ public void asynchronous_paging_with_options(Options options) // when CompletionStage result = - sessionRule + SESSION_RULE .session() .executeAsync( ScriptGraphStatement.newInstance("g.V().hasLabel('person').values('name')") - .setGraphName(sessionRule.getGraphName()) + .setGraphName(SESSION_RULE.getGraphName()) .setTraversalSource("g") .setExecutionProfile(profile)); @@ -232,11 +227,11 @@ public void asynchronous_paging_with_options_when_auto(Options options) // when CompletionStage result = - sessionRule + SESSION_RULE .session() .executeAsync( ScriptGraphStatement.newInstance("g.V().hasLabel('person').values('name')") - .setGraphName(sessionRule.getGraphName()) + .setGraphName(SESSION_RULE.getGraphName()) .setTraversalSource("g") .setExecutionProfile(profile)); @@ -258,11 +253,11 @@ public void asynchronous_options_with_paging_disabled_should_fallback_to_single_ // when CompletionStage result = - sessionRule + SESSION_RULE .session() .executeAsync( ScriptGraphStatement.newInstance("g.V().hasLabel('person').values('name')") - .setGraphName(sessionRule.getGraphName()) + .setGraphName(SESSION_RULE.getGraphName()) .setTraversalSource("g") .setExecutionProfile(profile)); @@ -333,11 +328,11 @@ public void should_cancel_result_set() { // when GraphStatement statement = ScriptGraphStatement.newInstance("g.V().hasLabel('person').values('name')") - .setGraphName(sessionRule.getGraphName()) + .setGraphName(SESSION_RULE.getGraphName()) .setTraversalSource("g") .setExecutionProfile(profile); MultiPageGraphResultSet results = - (MultiPageGraphResultSet) sessionRule.session().execute(statement); + (MultiPageGraphResultSet) SESSION_RULE.session().execute(statement); assertThat(((MultiPageGraphResultSet.RowIterator) results.iterator()).isCancelled()).isFalse(); assertThat(((CountingIterator) results.iterator()).remaining()).isEqualTo(10); @@ -359,13 +354,13 @@ public void should_trigger_global_timeout_sync_from_config() { // when try { - ccmRule.getCcmBridge().pause(1); + CCM_RULE.getCcmBridge().pause(1); try { - sessionRule + SESSION_RULE .session() .execute( ScriptGraphStatement.newInstance("g.V().hasLabel('person').values('name')") - .setGraphName(sessionRule.getGraphName()) + .setGraphName(SESSION_RULE.getGraphName()) .setTraversalSource("g") .setExecutionProfile(profile)); fail("Expecting DriverTimeoutException"); @@ -373,7 +368,7 @@ public void should_trigger_global_timeout_sync_from_config() { assertThat(e).hasMessage("Query timed out after " + timeout); } } finally { - ccmRule.getCcmBridge().resume(1); + CCM_RULE.getCcmBridge().resume(1); } } @@ -384,13 +379,13 @@ public void should_trigger_global_timeout_sync_from_statement() { // when try { - ccmRule.getCcmBridge().pause(1); + CCM_RULE.getCcmBridge().pause(1); try { - sessionRule + SESSION_RULE .session() .execute( ScriptGraphStatement.newInstance("g.V().hasLabel('person').values('name')") - .setGraphName(sessionRule.getGraphName()) + .setGraphName(SESSION_RULE.getGraphName()) .setTraversalSource("g") .setTimeout(timeout)); fail("Expecting DriverTimeoutException"); @@ -398,7 +393,7 @@ public void should_trigger_global_timeout_sync_from_statement() { assertThat(e).hasMessage("Query timed out after " + timeout); } } finally { - ccmRule.getCcmBridge().resume(1); + CCM_RULE.getCcmBridge().resume(1); } } @@ -411,13 +406,13 @@ public void should_trigger_global_timeout_async() throws InterruptedException { // when try { - ccmRule.getCcmBridge().pause(1); + CCM_RULE.getCcmBridge().pause(1); CompletionStage result = - sessionRule + SESSION_RULE .session() .executeAsync( ScriptGraphStatement.newInstance("g.V().hasLabel('person').values('name')") - .setGraphName(sessionRule.getGraphName()) + .setGraphName(SESSION_RULE.getGraphName()) .setTraversalSource("g") .setExecutionProfile(profile)); result.toCompletableFuture().get(); @@ -425,7 +420,7 @@ public void should_trigger_global_timeout_async() throws InterruptedException { } catch (ExecutionException e) { assertThat(e.getCause()).hasMessage("Query timed out after " + timeout); } finally { - ccmRule.getCcmBridge().resume(1); + CCM_RULE.getCcmBridge().resume(1); } } @@ -442,27 +437,27 @@ public void should_trigger_global_timeout_async_after_first_page() throws Interr // when try { CompletionStage firstPageFuture = - sessionRule + SESSION_RULE .session() .executeAsync( ScriptGraphStatement.newInstance("g.V().hasLabel('person').values('name')") - .setGraphName(sessionRule.getGraphName()) + .setGraphName(SESSION_RULE.getGraphName()) .setTraversalSource("g") .setExecutionProfile(profile)); AsyncGraphResultSet firstPage = firstPageFuture.toCompletableFuture().get(); - ccmRule.getCcmBridge().pause(1); + CCM_RULE.getCcmBridge().pause(1); CompletionStage secondPageFuture = firstPage.fetchNextPage(); secondPageFuture.toCompletableFuture().get(); fail("Expecting DriverTimeoutException"); } catch (ExecutionException e) { assertThat(e.getCause()).hasMessage("Query timed out after " + timeout); } finally { - ccmRule.getCcmBridge().resume(1); + CCM_RULE.getCcmBridge().resume(1); } } private DriverExecutionProfile enableGraphPaging() { - return sessionRule + return SESSION_RULE .session() .getContext() .getConfig() @@ -472,7 +467,7 @@ private DriverExecutionProfile enableGraphPaging() { private DriverExecutionProfile enableGraphPaging( Options options, PagingEnabledOptions pagingEnabledOptions) { - return sessionRule + return SESSION_RULE .session() .getContext() .getConfig() @@ -485,6 +480,6 @@ private DriverExecutionProfile enableGraphPaging( } private SocketAddress firstCcmNode() { - return ccmRule.getContactPoints().iterator().next().resolve(); + return CCM_RULE.getContactPoints().iterator().next().resolve(); } } diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphTestSupport.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphTestSupport.java new file mode 100644 index 00000000000..38f5d9d5f54 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphTestSupport.java @@ -0,0 +1,54 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph; + +import com.datastax.dse.driver.api.testinfra.session.DseSessionRuleBuilder; +import com.datastax.dse.driver.internal.core.graph.GraphProtocol; +import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; + +/** Utility for creating commonly used Rule builders for tests. */ +public class GraphTestSupport { + + /** CCM Rule builder for Graph Data Type tests. */ + public static final CustomCcmRule.Builder CCM_BUILDER_WITH_GRAPH = + CustomCcmRule.builder() + .withDseWorkloads("graph") + .withDseConfiguration("graph.max_query_params", 32) + .withDseConfiguration( + "graph.gremlin_server.scriptEngines.gremlin-groovy.config.sandbox_enabled", "false"); + + /** CCM Rule builder for general Graph workload tests. */ + public static final CustomCcmRule.Builder GRAPH_CCM_RULE_BUILDER = + CustomCcmRule.builder().withDseWorkloads("graph"); + + /** + * Creates a session rule builder for Classic Graph workloads with the default Graph protocol. The + * default GraphProtocol for Classic Graph: GraphSON 2.0. + * + * @param ccmRule CustomCcmRule configured for Graph workloads + * @return A Session rule builder configured for Classic Graph workloads + */ + public static DseSessionRuleBuilder getClassicGraphSessionBuilder(CustomCcmRule ccmRule) { + return new DseSessionRuleBuilder(ccmRule) + .withCreateGraph() + .withGraphProtocol(GraphProtocol.GRAPHSON_2_0.toInternalCode()); + } + + /** + * Creates a session rule builder for Core Graph workloads with the default Graph protocol. The + * default GraphProtocol for Core Graph: Graph Binary 1.0. + * + * @param ccmRule CustomCcmRule configured for Graph workloads + * @return A Session rule builder configured for Core Graph workloads + */ + public static DseSessionRuleBuilder getCoreGraphSessionBuilder(CustomCcmRule ccmRule) { + return new DseSessionRuleBuilder(ccmRule) + .withCreateGraph() + .withCoreEngine() + .withGraphProtocol(GraphProtocol.GRAPH_BINARY_1_0.toInternalCode()); + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/ClassicGraphDataTypeRemoteIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/ClassicGraphDataTypeRemoteIT.java index 9b608c2f1fc..b525e725d4e 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/ClassicGraphDataTypeRemoteIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/ClassicGraphDataTypeRemoteIT.java @@ -17,6 +17,7 @@ import com.datastax.dse.driver.api.core.graph.ClassicGraphDataTypeITBase; import com.datastax.dse.driver.api.core.graph.DseGraph; +import com.datastax.dse.driver.api.core.graph.GraphTestSupport; import com.datastax.dse.driver.api.core.graph.SampleGraphScripts; import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; import com.datastax.oss.driver.api.core.CqlSession; @@ -33,22 +34,22 @@ @DseRequirement(min = "5.0.3", description = "DSE 5.0.3 required for remote TinkerPop support") public class ClassicGraphDataTypeRemoteIT extends ClassicGraphDataTypeITBase { - private static CustomCcmRule ccmRule = - CustomCcmRule.builder() - .withDseWorkloads("graph") - .withDseConfiguration( - "graph.gremlin_server.scriptEngines.gremlin-groovy.config.sandbox_enabled", "false") - .build(); + private static final CustomCcmRule CCM_RULE = GraphTestSupport.GRAPH_CCM_RULE_BUILDER.build(); - private static SessionRule sessionRule = - SessionRule.builder(ccmRule).withCreateGraph().build(); + private static final SessionRule SESSION_RULE = + GraphTestSupport.getClassicGraphSessionBuilder(CCM_RULE).build(); - @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); @BeforeClass public static void setupSchema() { - sessionRule.session().execute(ScriptGraphStatement.newInstance(SampleGraphScripts.ALLOW_SCANS)); - sessionRule.session().execute(ScriptGraphStatement.newInstance(SampleGraphScripts.MAKE_STRICT)); + SESSION_RULE + .session() + .execute(ScriptGraphStatement.newInstance(SampleGraphScripts.ALLOW_SCANS)); + SESSION_RULE + .session() + .execute(ScriptGraphStatement.newInstance(SampleGraphScripts.MAKE_STRICT)); } @Override @@ -57,7 +58,7 @@ public CqlSession session() { } private final GraphTraversalSource g = - DseGraph.g.withRemote(DseGraph.remoteConnectionBuilder(sessionRule.session()).build()); + DseGraph.g.withRemote(DseGraph.remoteConnectionBuilder(SESSION_RULE.session()).build()); @Override public Vertex insertVertexAndReturn(String vertexLabel, String propertyName, Object value) { diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/CoreGraphDataTypeRemoteIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/CoreGraphDataTypeRemoteIT.java index 9d404f99693..7aa6a36df05 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/CoreGraphDataTypeRemoteIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/CoreGraphDataTypeRemoteIT.java @@ -8,23 +8,45 @@ import static org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__.unfold; +import com.datastax.dse.driver.api.core.DseSession; import com.datastax.dse.driver.api.core.graph.CoreGraphDataTypeITBase; import com.datastax.dse.driver.api.core.graph.DseGraph; +import com.datastax.dse.driver.api.core.graph.GraphTestSupport; +import com.datastax.dse.driver.api.testinfra.session.DseSessionRule; import com.datastax.oss.driver.api.testinfra.DseRequirement; -import com.datastax.oss.driver.categories.IsolatedTests; +import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; import com.tngtech.java.junit.dataprovider.DataProviderRunner; import java.util.Map; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; import org.apache.tinkerpop.gremlin.structure.Vertex; -import org.junit.experimental.categories.Category; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; import org.junit.runner.RunWith; @DseRequirement(min = "6.8.0", description = "DSE 6.8.0 required for Core graph support") @RunWith(DataProviderRunner.class) -@Category(IsolatedTests.class) public class CoreGraphDataTypeRemoteIT extends CoreGraphDataTypeITBase { + private static final CustomCcmRule CCM_RULE = GraphTestSupport.CCM_BUILDER_WITH_GRAPH.build(); + + private static final DseSessionRule SESSION_RULE = + GraphTestSupport.getCoreGraphSessionBuilder(CCM_RULE).build(); + + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); + + @Override + protected DseSession session() { + return SESSION_RULE.session(); + } + + @Override + protected String graphName() { + return SESSION_RULE.getGraphName(); + } + private final GraphTraversalSource g = DseGraph.g.withRemote(DseGraph.remoteConnectionBuilder(session()).build()); diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalMetaPropertiesRemoteIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalMetaPropertiesRemoteIT.java index ff15591e59c..e5526341e69 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalMetaPropertiesRemoteIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalMetaPropertiesRemoteIT.java @@ -20,6 +20,7 @@ import static com.datastax.dse.driver.api.core.graph.TinkerGraphAssertions.assertThat; import com.datastax.dse.driver.api.core.graph.DseGraph; +import com.datastax.dse.driver.api.core.graph.GraphTestSupport; import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.testinfra.DseRequirement; @@ -38,18 +39,19 @@ @DseRequirement(min = "5.0.3", description = "DSE 5.0.3 required for remote TinkerPop support") public class GraphTraversalMetaPropertiesRemoteIT { - private static CustomCcmRule ccmRule = CustomCcmRule.builder().withDseWorkloads("graph").build(); + private static final CustomCcmRule CCM_RULE = GraphTestSupport.GRAPH_CCM_RULE_BUILDER.build(); - private static SessionRule sessionRule = - SessionRule.builder(ccmRule).withCreateGraph().build(); + private static final SessionRule SESSION_RULE = + GraphTestSupport.getClassicGraphSessionBuilder(CCM_RULE).build(); - @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); private final GraphTraversalSource g = - DseGraph.g.withRemote(DseGraph.remoteConnectionBuilder(sessionRule.session()).build()); + DseGraph.g.withRemote(DseGraph.remoteConnectionBuilder(SESSION_RULE.session()).build()); /** Builds a simple schema that provides for a vertex with a property with sub properties. */ - public static String metaProps = + public static final String META_PROPS = MAKE_STRICT + ALLOW_SCANS + "schema.propertyKey('sub_prop').Text().create()\n" @@ -66,7 +68,7 @@ public class GraphTraversalMetaPropertiesRemoteIT { @Test public void should_parse_meta_properties() { // given a schema that defines meta properties. - sessionRule.session().execute(ScriptGraphStatement.newInstance(metaProps)); + SESSION_RULE.session().execute(ScriptGraphStatement.newInstance(META_PROPS)); // when adding a vertex with that meta property Vertex v = diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalMultiPropertiesRemoteIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalMultiPropertiesRemoteIT.java index 74e36057011..690d5443f10 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalMultiPropertiesRemoteIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalMultiPropertiesRemoteIT.java @@ -20,6 +20,7 @@ import static com.datastax.dse.driver.api.core.graph.TinkerGraphAssertions.assertThat; import com.datastax.dse.driver.api.core.graph.DseGraph; +import com.datastax.dse.driver.api.core.graph.GraphTestSupport; import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.testinfra.DseRequirement; @@ -37,18 +38,19 @@ @DseRequirement(min = "5.0.3", description = "DSE 5.0.3 required for remote TinkerPop support") public class GraphTraversalMultiPropertiesRemoteIT { - private static CustomCcmRule ccmRule = CustomCcmRule.builder().withDseWorkloads("graph").build(); + private static final CustomCcmRule CCM_RULE = GraphTestSupport.GRAPH_CCM_RULE_BUILDER.build(); - private static SessionRule sessionRule = - SessionRule.builder(ccmRule).withCreateGraph().build(); + private static final SessionRule SESSION_RULE = + GraphTestSupport.getClassicGraphSessionBuilder(CCM_RULE).build(); - @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); private final GraphTraversalSource g = - DseGraph.g.withRemote(DseGraph.remoteConnectionBuilder(sessionRule.session()).build()); + DseGraph.g.withRemote(DseGraph.remoteConnectionBuilder(SESSION_RULE.session()).build()); /** Builds a simple schema that provides for a vertex with a multi-cardinality property. */ - public static final String multiProps = + public static final String MULTI_PROPS = MAKE_STRICT + ALLOW_SCANS + "schema.propertyKey('multi_prop').Text().multiple().create()\n" @@ -64,7 +66,7 @@ public class GraphTraversalMultiPropertiesRemoteIT { @Test public void should_parse_multiple_cardinality_properties() { // given a schema that defines multiple cardinality properties. - sessionRule.session().execute(ScriptGraphStatement.newInstance(multiProps)); + SESSION_RULE.session().execute(ScriptGraphStatement.newInstance(MULTI_PROPS)); // when adding a vertex with a multiple cardinality property Vertex v = diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalRemoteIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalRemoteIT.java index 2164ed58d5c..13ea575fe21 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalRemoteIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalRemoteIT.java @@ -19,6 +19,7 @@ import static org.assertj.core.api.Assertions.fail; import com.datastax.dse.driver.api.core.graph.DseGraph; +import com.datastax.dse.driver.api.core.graph.GraphTestSupport; import com.datastax.dse.driver.api.core.graph.SampleGraphScripts; import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; import com.datastax.dse.driver.api.core.graph.SocialTraversalSource; @@ -56,24 +57,29 @@ @DseRequirement(min = "6.0", description = "DSE 6 required for MODERN_GRAPH script (?)") public class GraphTraversalRemoteIT { - private static CustomCcmRule ccmRule = CustomCcmRule.builder().withDseWorkloads("graph").build(); + private static final CustomCcmRule CCM_RULE = GraphTestSupport.GRAPH_CCM_RULE_BUILDER.build(); - private static SessionRule sessionRule = - SessionRule.builder(ccmRule).withCreateGraph().build(); + private static final SessionRule SESSION_RULE = + GraphTestSupport.getClassicGraphSessionBuilder(CCM_RULE).build(); - @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); @BeforeClass public static void setupSchema() { - sessionRule + SESSION_RULE .session() .execute(ScriptGraphStatement.newInstance(SampleGraphScripts.MODERN_GRAPH)); - sessionRule.session().execute(ScriptGraphStatement.newInstance(SampleGraphScripts.MAKE_STRICT)); - sessionRule.session().execute(ScriptGraphStatement.newInstance(SampleGraphScripts.ALLOW_SCANS)); + SESSION_RULE + .session() + .execute(ScriptGraphStatement.newInstance(SampleGraphScripts.MAKE_STRICT)); + SESSION_RULE + .session() + .execute(ScriptGraphStatement.newInstance(SampleGraphScripts.ALLOW_SCANS)); } private final GraphTraversalSource g = - DseGraph.g.withRemote(DseGraph.remoteConnectionBuilder(sessionRule.session()).build()); + DseGraph.g.withRemote(DseGraph.remoteConnectionBuilder(SESSION_RULE.session()).build()); /** * Ensures that a previously returned {@link Vertex}'s {@link Vertex#id()} can be used as an input @@ -472,7 +478,7 @@ public void should_allow_use_of_dsl() throws Exception { SocialTraversalSource gSocial = EmptyGraph.instance() .traversal(SocialTraversalSource.class) - .withRemote(DseGraph.remoteConnectionBuilder(sessionRule.session()).build()); + .withRemote(DseGraph.remoteConnectionBuilder(SESSION_RULE.session()).build()); List vertices = gSocial.persons("marko").knows("vadas").toList(); assertThat(vertices.size()).isEqualTo(1); assertThat(vertices.get(0)) diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/ClassicGraphDataTypeFluentIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/ClassicGraphDataTypeFluentIT.java index 64a23c1c82e..6786532f5f7 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/ClassicGraphDataTypeFluentIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/ClassicGraphDataTypeFluentIT.java @@ -18,6 +18,7 @@ import com.datastax.dse.driver.api.core.graph.ClassicGraphDataTypeITBase; import com.datastax.dse.driver.api.core.graph.DseGraph; import com.datastax.dse.driver.api.core.graph.FluentGraphStatement; +import com.datastax.dse.driver.api.core.graph.GraphTestSupport; import com.datastax.dse.driver.api.core.graph.SampleGraphScripts; import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; import com.datastax.oss.driver.api.core.CqlSession; @@ -33,22 +34,22 @@ @DseRequirement(min = "5.0.3", description = "DSE 5.0.3 required for fluent API support") public class ClassicGraphDataTypeFluentIT extends ClassicGraphDataTypeITBase { - private static CustomCcmRule ccmRule = - CustomCcmRule.builder() - .withDseWorkloads("graph") - .withDseConfiguration( - "graph.gremlin_server.scriptEngines.gremlin-groovy.config.sandbox_enabled", "false") - .build(); + private static final CustomCcmRule CCM_RULE = GraphTestSupport.CCM_BUILDER_WITH_GRAPH.build(); - private static SessionRule sessionRule = - SessionRule.builder(ccmRule).withCreateGraph().build(); + private static final SessionRule SESSION_RULE = + GraphTestSupport.getClassicGraphSessionBuilder(CCM_RULE).build(); - @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); @BeforeClass public static void setupSchema() { - sessionRule.session().execute(ScriptGraphStatement.newInstance(SampleGraphScripts.ALLOW_SCANS)); - sessionRule.session().execute(ScriptGraphStatement.newInstance(SampleGraphScripts.MAKE_STRICT)); + SESSION_RULE + .session() + .execute(ScriptGraphStatement.newInstance(SampleGraphScripts.ALLOW_SCANS)); + SESSION_RULE + .session() + .execute(ScriptGraphStatement.newInstance(SampleGraphScripts.MAKE_STRICT)); } @Override @@ -58,7 +59,7 @@ public CqlSession session() { @Override public Vertex insertVertexAndReturn(String vertexLabel, String propertyName, Object value) { - return sessionRule + return SESSION_RULE .session() .execute( FluentGraphStatement.newInstance( diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/ClassicGraphDataTypeScriptIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/ClassicGraphDataTypeScriptIT.java index b7e27404822..9bc710f3231 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/ClassicGraphDataTypeScriptIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/ClassicGraphDataTypeScriptIT.java @@ -16,6 +16,7 @@ package com.datastax.dse.driver.api.core.graph.statement; import com.datastax.dse.driver.api.core.graph.ClassicGraphDataTypeITBase; +import com.datastax.dse.driver.api.core.graph.GraphTestSupport; import com.datastax.dse.driver.api.core.graph.SampleGraphScripts; import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; import com.datastax.oss.driver.api.core.CqlSession; @@ -31,22 +32,22 @@ @DseRequirement(min = "5.0.4", description = "DSE 5.0.4 required for script API with GraphSON 2") public class ClassicGraphDataTypeScriptIT extends ClassicGraphDataTypeITBase { - private static CustomCcmRule ccmRule = - CustomCcmRule.builder() - .withDseWorkloads("graph") - .withDseConfiguration( - "graph.gremlin_server.scriptEngines.gremlin-groovy.config.sandbox_enabled", "false") - .build(); + private static final CustomCcmRule CCM_RULE = GraphTestSupport.CCM_BUILDER_WITH_GRAPH.build(); - private static SessionRule sessionRule = - SessionRule.builder(ccmRule).withCreateGraph().build(); + private static final SessionRule SESSION_RULE = + GraphTestSupport.getClassicGraphSessionBuilder(CCM_RULE).build(); - @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); @BeforeClass public static void setupSchema() { - sessionRule.session().execute(ScriptGraphStatement.newInstance(SampleGraphScripts.ALLOW_SCANS)); - sessionRule.session().execute(ScriptGraphStatement.newInstance(SampleGraphScripts.MAKE_STRICT)); + SESSION_RULE + .session() + .execute(ScriptGraphStatement.newInstance(SampleGraphScripts.ALLOW_SCANS)); + SESSION_RULE + .session() + .execute(ScriptGraphStatement.newInstance(SampleGraphScripts.MAKE_STRICT)); } @Override @@ -56,7 +57,7 @@ public CqlSession session() { @Override public Vertex insertVertexAndReturn(String vertexLabel, String propertyName, Object value) { - return sessionRule + return SESSION_RULE .session() .execute( ScriptGraphStatement.builder("g.addV(labelP).property(nameP, valueP)") diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/CoreGraphDataTypeFluentIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/CoreGraphDataTypeFluentIT.java index 032debec46f..93b1ab692b4 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/CoreGraphDataTypeFluentIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/CoreGraphDataTypeFluentIT.java @@ -9,22 +9,44 @@ import static com.datastax.dse.driver.api.core.graph.DseGraph.g; import static org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__.unfold; +import com.datastax.dse.driver.api.core.DseSession; import com.datastax.dse.driver.api.core.graph.CoreGraphDataTypeITBase; import com.datastax.dse.driver.api.core.graph.FluentGraphStatement; +import com.datastax.dse.driver.api.core.graph.GraphTestSupport; +import com.datastax.dse.driver.api.testinfra.session.DseSessionRule; import com.datastax.oss.driver.api.testinfra.DseRequirement; -import com.datastax.oss.driver.categories.IsolatedTests; +import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; import com.tngtech.java.junit.dataprovider.DataProviderRunner; import java.util.Map; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; import org.apache.tinkerpop.gremlin.structure.Vertex; -import org.junit.experimental.categories.Category; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; import org.junit.runner.RunWith; @DseRequirement(min = "6.8.0", description = "DSE 6.8.0 required for Core graph support") @RunWith(DataProviderRunner.class) -@Category(IsolatedTests.class) public class CoreGraphDataTypeFluentIT extends CoreGraphDataTypeITBase { + private static final CustomCcmRule CCM_RULE = GraphTestSupport.CCM_BUILDER_WITH_GRAPH.build(); + + private static final DseSessionRule SESSION_RULE = + GraphTestSupport.getCoreGraphSessionBuilder(CCM_RULE).build(); + + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); + + @Override + protected DseSession session() { + return SESSION_RULE.session(); + } + + @Override + protected String graphName() { + return SESSION_RULE.getGraphName(); + } + @Override public Map insertVertexThenReadProperties( Map properties, int vertexID, String vertexLabel) { diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/CoreGraphDataTypeScriptIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/CoreGraphDataTypeScriptIT.java index b0db0565a35..4a9bb125095 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/CoreGraphDataTypeScriptIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/CoreGraphDataTypeScriptIT.java @@ -6,21 +6,43 @@ */ package com.datastax.dse.driver.api.core.graph.statement; +import com.datastax.dse.driver.api.core.DseSession; import com.datastax.dse.driver.api.core.graph.CoreGraphDataTypeITBase; +import com.datastax.dse.driver.api.core.graph.GraphTestSupport; import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; import com.datastax.dse.driver.api.core.graph.ScriptGraphStatementBuilder; +import com.datastax.dse.driver.api.testinfra.session.DseSessionRule; import com.datastax.oss.driver.api.testinfra.DseRequirement; -import com.datastax.oss.driver.categories.IsolatedTests; +import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; import com.tngtech.java.junit.dataprovider.DataProviderRunner; import java.util.Map; -import org.junit.experimental.categories.Category; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; import org.junit.runner.RunWith; @DseRequirement(min = "6.8.0", description = "DSE 6.8.0 required for Core graph support") @RunWith(DataProviderRunner.class) -@Category(IsolatedTests.class) public class CoreGraphDataTypeScriptIT extends CoreGraphDataTypeITBase { + private static final CustomCcmRule CCM_RULE = GraphTestSupport.CCM_BUILDER_WITH_GRAPH.build(); + + private static final DseSessionRule SESSION_RULE = + GraphTestSupport.getCoreGraphSessionBuilder(CCM_RULE).build(); + + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); + + @Override + protected DseSession session() { + return SESSION_RULE.session(); + } + + @Override + protected String graphName() { + return SESSION_RULE.getGraphName(); + } + @Override protected Map insertVertexThenReadProperties( Map properties, int vertexID, String vertexLabel) { diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalBatchIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalBatchIT.java index e3fa4b91256..aee7f5c0d32 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalBatchIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalBatchIT.java @@ -23,6 +23,7 @@ import com.datastax.dse.driver.api.core.graph.BatchGraphStatement; import com.datastax.dse.driver.api.core.graph.FluentGraphStatement; +import com.datastax.dse.driver.api.core.graph.GraphTestSupport; import com.datastax.dse.driver.api.core.graph.SampleGraphScripts; import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; import com.datastax.oss.driver.api.core.CqlSession; @@ -42,17 +43,20 @@ @DseRequirement(min = "6.0") public class GraphTraversalBatchIT { - private static CustomCcmRule ccmRule = CustomCcmRule.builder().withDseWorkloads("graph").build(); + private static final CustomCcmRule CCM_RULE = GraphTestSupport.GRAPH_CCM_RULE_BUILDER.build(); - private static SessionRule sessionRule = - SessionRule.builder(ccmRule).withCreateGraph().build(); + private static final SessionRule SESSION_RULE = + GraphTestSupport.getClassicGraphSessionBuilder(CCM_RULE).build(); - @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); @BeforeClass public static void setupSchema() { - sessionRule.session().execute(ScriptGraphStatement.newInstance(SampleGraphScripts.ALLOW_SCANS)); - sessionRule + SESSION_RULE + .session() + .execute(ScriptGraphStatement.newInstance(SampleGraphScripts.ALLOW_SCANS)); + SESSION_RULE .session() .execute(ScriptGraphStatement.newInstance(SampleGraphScripts.MAKE_NOT_STRICT)); } @@ -80,10 +84,10 @@ public void should_allow_vertex_and_edge_insertions_in_batch() { assertThat(batch.size()).isEqualTo(2); assertThat(batch2.size()).isEqualTo(3); - sessionRule.session().execute(batch2); + SESSION_RULE.session().execute(batch2); assertThat( - sessionRule + SESSION_RULE .session() .execute(FluentGraphStatement.newInstance(g.V().has("name", "batch1"))) .one() @@ -91,7 +95,7 @@ public void should_allow_vertex_and_edge_insertions_in_batch() { .hasProperty("age", 1); assertThat( - sessionRule + SESSION_RULE .session() .execute(FluentGraphStatement.newInstance(g.V().has("name", "batch2"))) .one() @@ -99,7 +103,7 @@ public void should_allow_vertex_and_edge_insertions_in_batch() { .hasProperty("age", 2); assertThat( - sessionRule + SESSION_RULE .session() .execute(FluentGraphStatement.newInstance(g.V().has("name", "batch1").bothE())) .one() @@ -115,7 +119,7 @@ public void should_fail_if_no_bytecode_in_batch() { BatchGraphStatement.builder().addTraversals(ImmutableList.of()).build(); assertThat(batch.size()).isEqualTo(0); try { - sessionRule.session().execute(batch); + SESSION_RULE.session().execute(batch); fail( "Should have thrown InvalidQueryException because batch does not contain any traversals."); } catch (InvalidQueryException e) { diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalIT.java index e81334ce79f..c6cc5a71262 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalIT.java @@ -26,6 +26,7 @@ import com.datastax.dse.driver.api.core.graph.GraphNode; import com.datastax.dse.driver.api.core.graph.GraphResultSet; import com.datastax.dse.driver.api.core.graph.GraphStatement; +import com.datastax.dse.driver.api.core.graph.GraphTestSupport; import com.datastax.dse.driver.api.core.graph.SampleGraphScripts; import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; import com.datastax.dse.driver.api.core.graph.SocialTraversalSource; @@ -58,20 +59,25 @@ @DseRequirement(min = "6.0", description = "DSE 6 required for MODERN_GRAPH script (?)") public class GraphTraversalIT { - private static CustomCcmRule ccmRule = CustomCcmRule.builder().withDseWorkloads("graph").build(); + private static final CustomCcmRule CCM_RULE = GraphTestSupport.GRAPH_CCM_RULE_BUILDER.build(); - private static SessionRule sessionRule = - SessionRule.builder(ccmRule).withCreateGraph().build(); + private static final SessionRule SESSION_RULE = + GraphTestSupport.getClassicGraphSessionBuilder(CCM_RULE).build(); - @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); @BeforeClass public static void setupSchema() { - sessionRule + SESSION_RULE .session() .execute(ScriptGraphStatement.newInstance(SampleGraphScripts.MODERN_GRAPH)); - sessionRule.session().execute(ScriptGraphStatement.newInstance(SampleGraphScripts.MAKE_STRICT)); - sessionRule.session().execute(ScriptGraphStatement.newInstance(SampleGraphScripts.ALLOW_SCANS)); + SESSION_RULE + .session() + .execute(ScriptGraphStatement.newInstance(SampleGraphScripts.MAKE_STRICT)); + SESSION_RULE + .session() + .execute(ScriptGraphStatement.newInstance(SampleGraphScripts.ALLOW_SCANS)); } /** @@ -85,7 +91,7 @@ public static void setupSchema() { @Test public void should_use_vertex_id_as_parameter() { GraphResultSet resultSet = - sessionRule.session().execute(newInstance(g.V().hasLabel("person").has("name", "marko"))); + SESSION_RULE.session().execute(newInstance(g.V().hasLabel("person").has("name", "marko"))); List results = resultSet.all(); @@ -93,7 +99,7 @@ public void should_use_vertex_id_as_parameter() { Vertex marko = results.get(0).asVertex(); assertThat(marko).hasProperty("name", "marko"); - resultSet = sessionRule.session().execute(newInstance(g.V(marko.id()))); + resultSet = SESSION_RULE.session().execute(newInstance(g.V(marko.id()))); results = resultSet.all(); assertThat(results.size()).isEqualTo(1); @@ -113,7 +119,7 @@ public void should_use_vertex_id_as_parameter() { @Test public void should_use_edge_id_as_parameter() { GraphResultSet resultSet = - sessionRule.session().execute(newInstance(g.E().has("weight", 0.2f))); + SESSION_RULE.session().execute(newInstance(g.E().has("weight", 0.2f))); List results = resultSet.all(); assertThat(results.size()).isEqualTo(1); @@ -121,7 +127,7 @@ public void should_use_edge_id_as_parameter() { Edge created = results.get(0).asEdge(); assertThat(created).hasProperty("weight", 0.2f).hasInVLabel("software").hasOutVLabel("person"); - resultSet = sessionRule.session().execute(newInstance(g.E(created.id()).inV())); + resultSet = SESSION_RULE.session().execute(newInstance(g.E(created.id()).inV())); results = resultSet.all(); assertThat(results.size()).isEqualTo(1); Vertex lop = results.get(0).asVertex(); @@ -140,7 +146,7 @@ public void should_use_edge_id_as_parameter() { @Test public void should_deserialize_vertex_id_as_map() { GraphResultSet resultSet = - sessionRule.session().execute(newInstance(g.V().hasLabel("person").has("name", "marko"))); + SESSION_RULE.session().execute(newInstance(g.V().hasLabel("person").has("name", "marko"))); List results = resultSet.all(); assertThat(results.size()).isEqualTo(1); @@ -173,7 +179,7 @@ public void should_handle_result_object_of_mixed_types() { // find all software vertices and select name, language, and find all vertices that created such // software. GraphResultSet rs = - sessionRule + SESSION_RULE .session() .execute( newInstance( @@ -230,7 +236,7 @@ public void should_handle_result_object_of_mixed_types() { */ @Test public void should_return_zero_results() { - GraphResultSet rs = sessionRule.session().execute(newInstance(g.V().hasLabel("notALabel"))); + GraphResultSet rs = SESSION_RULE.session().execute(newInstance(g.V().hasLabel("notALabel"))); assertThat(rs.all().size()).isZero(); } @@ -245,7 +251,7 @@ public void should_return_zero_results_graphson_2() { GraphStatement simpleGraphStatement = ScriptGraphStatement.newInstance("g.V().hasLabel('notALabel')"); - GraphResultSet rs = sessionRule.session().execute(simpleGraphStatement); + GraphResultSet rs = SESSION_RULE.session().execute(simpleGraphStatement); assertThat(rs.one()).isNull(); } @@ -266,7 +272,7 @@ public void should_return_zero_results_graphson_2() { public void should_handle_lambdas() { // Find all people marko knows and the software they created. GraphResultSet result = - sessionRule + SESSION_RULE .session() .execute( newInstance( @@ -292,7 +298,7 @@ public void should_handle_lambdas() { @Test public void should_resolve_path_with_some_labels() { GraphResultSet rs = - sessionRule + SESSION_RULE .session() .execute( newInstance( @@ -332,7 +338,7 @@ public void should_resolve_path_with_some_labels() { @Test public void should_resolve_path_with_labels() { GraphResultSet rs = - sessionRule + SESSION_RULE .session() .execute( newInstance( @@ -373,7 +379,7 @@ public void should_resolve_path_with_labels() { @Test public void should_resolve_path_without_labels() { GraphResultSet rs = - sessionRule + SESSION_RULE .session() .execute( newInstance( @@ -408,7 +414,7 @@ public void should_parse_tree() { // Get a tree structure showing the paths from mark to people he knows to software they've // created. GraphResultSet rs = - sessionRule + SESSION_RULE .session() .execute( newInstance( @@ -437,7 +443,7 @@ public void should_parse_tree() { @Test public void should_handle_subgraph() { GraphResultSet rs = - sessionRule + SESSION_RULE .session() .execute(newInstance(g.E().hasLabel("knows").subgraph("subGraph").cap("subGraph"))); @@ -462,7 +468,7 @@ public void should_allow_use_of_dsl() throws Exception { GraphStatement gs = newInstance(gSocial.persons("marko").knows("vadas")); - GraphResultSet rs = sessionRule.session().execute(gs); + GraphResultSet rs = SESSION_RULE.session().execute(gs); List results = rs.all(); assertThat(results.size()).isEqualTo(1); @@ -480,7 +486,7 @@ public void should_allow_use_of_dsl() throws Exception { */ @Test public void should_return_correct_results_when_bulked() { - GraphResultSet rs = sessionRule.session().execute(newInstance(g.E().label().barrier())); + GraphResultSet rs = SESSION_RULE.session().execute(newInstance(g.E().label().barrier())); List results = rs.all().stream().map(GraphNode::asString).sorted().collect(Collectors.toList()); @@ -496,7 +502,7 @@ public void should_handle_asynchronous_execution() { StringBuilder names = new StringBuilder(); CompletionStage future = - sessionRule + SESSION_RULE .session() .executeAsync(FluentGraphStatement.newInstance(g.V().hasLabel("person"))); diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalMetaPropertiesIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalMetaPropertiesIT.java index 1486d3fedb6..ea3ee972c24 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalMetaPropertiesIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalMetaPropertiesIT.java @@ -24,6 +24,7 @@ import static com.datastax.dse.driver.api.core.graph.TinkerGraphAssertions.assertThat; import com.datastax.dse.driver.api.core.graph.GraphResultSet; +import com.datastax.dse.driver.api.core.graph.GraphTestSupport; import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.testinfra.DseRequirement; @@ -41,12 +42,13 @@ @DseRequirement(min = "5.0.3", description = "DSE 5.0.3 required for remote TinkerPop support") public class GraphTraversalMetaPropertiesIT { - private static CustomCcmRule ccmRule = CustomCcmRule.builder().withDseWorkloads("graph").build(); + private static final CustomCcmRule CCM_RULE = GraphTestSupport.GRAPH_CCM_RULE_BUILDER.build(); - private static SessionRule sessionRule = - SessionRule.builder(ccmRule).withCreateGraph().build(); + private static final SessionRule SESSION_RULE = + GraphTestSupport.getClassicGraphSessionBuilder(CCM_RULE).build(); - @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); /** Builds a simple schema that provides for a vertex with a property with sub properties. */ private static final String META_PROPS = @@ -65,10 +67,10 @@ public class GraphTraversalMetaPropertiesIT { */ @Test public void should_parse_meta_properties() { - sessionRule.session().execute(ScriptGraphStatement.newInstance(META_PROPS)); + SESSION_RULE.session().execute(ScriptGraphStatement.newInstance(META_PROPS)); GraphResultSet result = - sessionRule + SESSION_RULE .session() .execute( newInstance( diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalMultiPropertiesIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalMultiPropertiesIT.java index deb07050411..947eb59b04d 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalMultiPropertiesIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalMultiPropertiesIT.java @@ -22,6 +22,7 @@ import static com.datastax.dse.driver.api.core.graph.TinkerGraphAssertions.assertThat; import com.datastax.dse.driver.api.core.graph.GraphResultSet; +import com.datastax.dse.driver.api.core.graph.GraphTestSupport; import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.testinfra.DseRequirement; @@ -38,12 +39,13 @@ @DseRequirement(min = "5.0.3", description = "DSE 5.0.3 required for remote TinkerPop support") public class GraphTraversalMultiPropertiesIT { - private static CustomCcmRule ccmRule = CustomCcmRule.builder().withDseWorkloads("graph").build(); + private static final CustomCcmRule CCM_RULE = GraphTestSupport.GRAPH_CCM_RULE_BUILDER.build(); - private static SessionRule sessionRule = - SessionRule.builder(ccmRule).withCreateGraph().build(); + private static final SessionRule SESSION_RULE = + GraphTestSupport.getClassicGraphSessionBuilder(CCM_RULE).build(); - @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); /** Builds a simple schema that provides for a vertex with a multi-cardinality property. */ private static final String MULTI_PROPS = @@ -62,11 +64,11 @@ public class GraphTraversalMultiPropertiesIT { @Test public void should_parse_multiple_cardinality_properties() { // given a schema that defines multiple cardinality properties. - sessionRule.session().execute(ScriptGraphStatement.newInstance(MULTI_PROPS)); + SESSION_RULE.session().execute(ScriptGraphStatement.newInstance(MULTI_PROPS)); // when adding a vertex with a multiple cardinality property GraphResultSet result = - sessionRule + SESSION_RULE .session() .execute( newInstance( diff --git a/manual/core/dse/graph/options/README.md b/manual/core/dse/graph/options/README.md index 82957905018..ad439448aa0 100644 --- a/manual/core/dse/graph/options/README.md +++ b/manual/core/dse/graph/options/README.md @@ -150,4 +150,11 @@ result.one().asVertex(); ``` If you run into that situation, force the sub-protocol to `graphson-1.0` for script statements -(that's not necessary for fluent statements). \ No newline at end of file +(that's not necessary for fluent statements). + +Currently, if the Graph sub-protocol version is not specified on a given GraphStatement, and it's +not explicitly set through `advanced.graph.sub-protocol` in configuration, the version of DSE to +which the driver is connected will determine the default sub-protocol version used by the driver. +For DSE 6.8.0 and later, the driver will pick "graph-binary-1.0" as the default sub-protocol +version. For DSE 6.7.x and older (or in cases where the driver can't determine the DSE version), the +driver will pick "graphson-2.0" as the default sub-protocol version. \ No newline at end of file diff --git a/pom.xml b/pom.xml index 772e744ac34..7ac3397cc82 100644 --- a/pom.xml +++ b/pom.xml @@ -83,27 +83,6 @@ ${skipTests} - - - - Apache - Apache Artifactory for Tinkerpop snapshot releases - https://repository.apache.org/content/groups/snapshots - - warn - false - never - - - warn - true - always - - - - From 43dca1ff054911de7b924d841c85ac0096240e8d Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 29 Nov 2019 11:13:58 +0100 Subject: [PATCH 330/979] Move graph data providers to GraphTestUtils --- .../ContinuousGraphRequestHandlerTest.java | 2 +- .../core/graph/GraphRequestHandlerTest.java | 49 +++++-------------- .../internal/core/graph/GraphTestUtils.java | 30 ++++++++++++ 3 files changed, 42 insertions(+), 39 deletions(-) diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerTest.java index e7bb70b182f..6e9cc0a68d5 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerTest.java @@ -56,7 +56,7 @@ public void setup() { } @Test - @UseDataProvider(location = GraphRequestHandlerTest.class, value = "supportedGraphProtocols") + @UseDataProvider(location = GraphTestUtils.class, value = "supportedGraphProtocols") public void should_return_paged_results(GraphProtocol graphProtocol) throws IOException { GraphBinaryModule module = createGraphBinaryModule(mockContext); diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java index 381f2aa5502..39f9e83cdeb 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java @@ -15,9 +15,6 @@ */ package com.datastax.dse.driver.internal.core.graph; -import static com.datastax.dse.driver.internal.core.graph.GraphProtocol.GRAPHSON_1_0; -import static com.datastax.dse.driver.internal.core.graph.GraphProtocol.GRAPHSON_2_0; -import static com.datastax.dse.driver.internal.core.graph.GraphProtocol.GRAPH_BINARY_1_0; import static com.datastax.dse.driver.internal.core.graph.GraphTestUtils.createGraphBinaryModule; import static com.datastax.dse.driver.internal.core.graph.GraphTestUtils.defaultDseFrameOf; import static com.datastax.dse.driver.internal.core.graph.GraphTestUtils.serialize; @@ -62,7 +59,6 @@ import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import com.datastax.oss.protocol.internal.Message; import com.datastax.oss.protocol.internal.request.Query; -import com.tngtech.java.junit.dataprovider.DataProvider; import com.tngtech.java.junit.dataprovider.DataProviderRunner; import com.tngtech.java.junit.dataprovider.UseDataProvider; import java.io.IOException; @@ -99,7 +95,7 @@ public void setup() { } @Test - @UseDataProvider("supportedGraphProtocols") + @UseDataProvider(location = GraphTestUtils.class, value = "supportedGraphProtocols") public void should_create_query_message_from_script_statement(GraphProtocol graphProtocol) throws IOException { // initialization @@ -129,7 +125,7 @@ public void should_create_query_message_from_script_statement(GraphProtocol grap } @Test - @UseDataProvider("supportedGraphProtocols") + @UseDataProvider(location = GraphTestUtils.class, value = "supportedGraphProtocols") public void should_create_query_message_from_fluent_statement(GraphProtocol graphProtocol) throws IOException { // initialization @@ -163,7 +159,7 @@ public void should_create_query_message_from_fluent_statement(GraphProtocol grap } @Test - @UseDataProvider("supportedGraphProtocols") + @UseDataProvider(location = GraphTestUtils.class, value = "supportedGraphProtocols") public void should_create_query_message_from_batch_statement(GraphProtocol graphProtocol) throws IOException { // initialization @@ -227,7 +223,7 @@ private void testQueryRequestAndPayloadContents( } @Test - @UseDataProvider("supportedGraphProtocols") + @UseDataProvider(location = GraphTestUtils.class, value = "supportedGraphProtocols") public void should_set_correct_query_options_from_graph_statement(GraphProtocol subProtocol) throws IOException { // initialization @@ -271,7 +267,7 @@ public void should_set_correct_query_options_from_graph_statement(GraphProtocol } @Test - @UseDataProvider("supportedGraphProtocols") + @UseDataProvider(location = GraphTestUtils.class, value = "supportedGraphProtocols") public void should_create_payload_from_config_options(GraphProtocol subProtocol) { // initialization GraphRequestHandlerTestHarness harness = GraphRequestHandlerTestHarness.builder().build(); @@ -314,7 +310,7 @@ public void should_create_payload_from_config_options(GraphProtocol subProtocol) } @Test - @UseDataProvider("supportedGraphProtocols") + @UseDataProvider(location = GraphTestUtils.class, value = "supportedGraphProtocols") public void should_create_payload_from_statement_options(GraphProtocol subProtocol) { // initialization GraphRequestHandlerTestHarness harness = GraphRequestHandlerTestHarness.builder().build(); @@ -369,7 +365,7 @@ public void should_create_payload_from_statement_options(GraphProtocol subProtoc } @Test - @UseDataProvider("supportedGraphProtocols") + @UseDataProvider(location = GraphTestUtils.class, value = "supportedGraphProtocols") public void should_not_set_graph_name_on_system_queries(GraphProtocol subProtocol) { // initialization GraphRequestHandlerTestHarness harness = GraphRequestHandlerTestHarness.builder().build(); @@ -392,7 +388,9 @@ public void should_not_set_graph_name_on_system_queries(GraphProtocol subProtoco } @Test - @UseDataProvider("supportedGraphProtocolsWithDseVersions") + @UseDataProvider( + location = GraphTestUtils.class, + value = "supportedGraphProtocolsWithDseVersions") public void should_return_results_for_statements(GraphProtocol graphProtocol, Version dseVersion) throws IOException { DseDriverContext mockContext = GraphTestUtil.mockContext(true, dseVersion); @@ -442,33 +440,8 @@ public void should_return_results_for_statements(GraphProtocol graphProtocol, Ve } } - @DataProvider - public static Object[][] supportedGraphProtocols() { - return new Object[][] {{GRAPHSON_2_0}, {GRAPH_BINARY_1_0}, {GRAPHSON_1_0}}; - } - - @DataProvider - public static Object[][] supportedGraphProtocolsWithDseVersions() { - return new Object[][] { - {GRAPHSON_1_0, GraphTestUtil.DSE_6_7_0}, - {GRAPHSON_1_0, GraphTestUtil.DSE_6_8_0}, - {GRAPHSON_2_0, GraphTestUtil.DSE_6_7_0}, - {GRAPHSON_2_0, GraphTestUtil.DSE_6_8_0}, - {GRAPH_BINARY_1_0, GraphTestUtil.DSE_6_7_0}, - {GRAPH_BINARY_1_0, GraphTestUtil.DSE_6_8_0}, - }; - } - - @DataProvider - public static Object[][] dseVersionsWithDefaultGraphProtocol() { - return new Object[][] { - {GRAPHSON_2_0, GraphTestUtil.DSE_6_7_0}, - {GRAPH_BINARY_1_0, GraphTestUtil.DSE_6_8_0}, - }; - } - @Test - @UseDataProvider("dseVersionsWithDefaultGraphProtocol") + @UseDataProvider(location = GraphTestUtils.class, value = "dseVersionsWithDefaultGraphProtocol") public void should_invoke_request_tracker(GraphProtocol defaultProtocol, Version dseVersion) throws IOException { DseDriverContext mockContext = GraphTestUtil.mockContext(true, dseVersion); diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphTestUtils.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphTestUtils.java index 7e0604649c9..1d73fdeb435 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphTestUtils.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphTestUtils.java @@ -6,6 +6,10 @@ */ package com.datastax.dse.driver.internal.core.graph; +import static com.datastax.dse.driver.internal.core.graph.GraphProtocol.GRAPHSON_1_0; +import static com.datastax.dse.driver.internal.core.graph.GraphProtocol.GRAPHSON_2_0; +import static com.datastax.dse.driver.internal.core.graph.GraphProtocol.GRAPH_BINARY_1_0; + import com.datastax.dse.driver.api.core.DseProtocolVersion; import com.datastax.dse.driver.internal.core.context.DseDriverContext; import com.datastax.dse.driver.internal.core.graph.binary.GraphBinaryModule; @@ -19,6 +23,7 @@ import com.datastax.oss.protocol.internal.response.result.DefaultRows; import com.datastax.oss.protocol.internal.response.result.RawType; import com.datastax.oss.protocol.internal.response.result.Rows; +import com.tngtech.java.junit.dataprovider.DataProvider; import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayDeque; @@ -150,4 +155,29 @@ public static GraphBinaryModule createGraphBinaryModule(DseDriverContext context TypeSerializerRegistry registry = GraphBinaryModule.createDseTypeSerializerRegistry(context); return new GraphBinaryModule(new GraphBinaryReader(registry), new GraphBinaryWriter(registry)); } + + @DataProvider + public static Object[][] supportedGraphProtocols() { + return new Object[][] {{GRAPHSON_2_0}, {GRAPH_BINARY_1_0}, {GRAPHSON_1_0}}; + } + + @DataProvider + public static Object[][] supportedGraphProtocolsWithDseVersions() { + return new Object[][] { + {GRAPHSON_1_0, GraphTestUtil.DSE_6_7_0}, + {GRAPHSON_1_0, GraphTestUtil.DSE_6_8_0}, + {GRAPHSON_2_0, GraphTestUtil.DSE_6_7_0}, + {GRAPHSON_2_0, GraphTestUtil.DSE_6_8_0}, + {GRAPH_BINARY_1_0, GraphTestUtil.DSE_6_7_0}, + {GRAPH_BINARY_1_0, GraphTestUtil.DSE_6_8_0}, + }; + } + + @DataProvider + public static Object[][] dseVersionsWithDefaultGraphProtocol() { + return new Object[][] { + {GRAPHSON_2_0, GraphTestUtil.DSE_6_7_0}, + {GRAPH_BINARY_1_0, GraphTestUtil.DSE_6_8_0}, + }; + } } From fd6ee70c2843aa0a0ef810c79c770efce2355609 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 29 Nov 2019 15:35:33 +0100 Subject: [PATCH 331/979] Use GraphRequestHandlerTestHarness.Builder instead of Builder --- .../graph/GraphRequestHandlerTestHarness.java | 22 +++++++++---------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTestHarness.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTestHarness.java index f52d636181a..be8759e8b4f 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTestHarness.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTestHarness.java @@ -165,11 +165,21 @@ public static class Builder extends RequestHandlerTestHarness.Builder { private Duration graphTimeout = Duration.ZERO; private Version dseVersionForTestMetadata; - public Builder withGraphProtocolForTestConfig(String protocol) { + public GraphRequestHandlerTestHarness.Builder withGraphProtocolForTestConfig(String protocol) { this.graphProtocolForTestConfig = protocol; return this; } + public GraphRequestHandlerTestHarness.Builder withDseVersionInMetadata(Version dseVersion) { + this.dseVersionForTestMetadata = dseVersion; + return this; + } + + public GraphRequestHandlerTestHarness.Builder withGraphTimeout(Duration globalTimeout) { + this.graphTimeout = globalTimeout; + return this; + } + @Override public GraphRequestHandlerTestHarness.Builder withEmptyPool(Node node) { super.withEmptyPool(node); @@ -208,16 +218,6 @@ public GraphRequestHandlerTestHarness.Builder withProtocolVersion( return this; } - public Builder withDseVersionInMetadata(Version dseVersion) { - this.dseVersionForTestMetadata = dseVersion; - return this; - } - - public Builder withGraphTimeout(Duration globalTimeout) { - this.graphTimeout = globalTimeout; - return this; - } - @Override public GraphRequestHandlerTestHarness build() { return new GraphRequestHandlerTestHarness( From aee298903b7df41e18452565a2ef5630d51b4d1d Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 29 Nov 2019 15:50:53 +0100 Subject: [PATCH 332/979] withGraphProtocolForTestConfig should take arg of type GraphProtocol --- .../core/graph/ContinuousGraphRequestHandlerTest.java | 3 +-- .../internal/core/graph/GraphRequestHandlerTest.java | 2 +- .../core/graph/GraphRequestHandlerTestHarness.java | 9 +++++---- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerTest.java index 6e9cc0a68d5..e4266e0b894 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerTest.java @@ -62,8 +62,7 @@ public void should_return_paged_results(GraphProtocol graphProtocol) throws IOEx GraphBinaryModule module = createGraphBinaryModule(mockContext); Builder builder = - GraphRequestHandlerTestHarness.builder() - .withGraphProtocolForTestConfig(graphProtocol.toInternalCode()); + GraphRequestHandlerTestHarness.builder().withGraphProtocolForTestConfig(graphProtocol); PoolBehavior node1Behavior = builder.customBehavior(node); try (RequestHandlerTestHarness harness = builder.build()) { diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java index 39f9e83cdeb..5b176e654b8 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java @@ -406,7 +406,7 @@ public void should_return_results_for_statements(GraphProtocol graphProtocol, Ve RequestHandlerTestHarness harness = GraphRequestHandlerTestHarness.builder() - .withGraphProtocolForTestConfig(graphProtocol.toInternalCode()) + .withGraphProtocolForTestConfig(graphProtocol) .withDseVersionInMetadata(dseVersion) // ideally we would be able to provide a function here to // produce results instead of a static predefined response. diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTestHarness.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTestHarness.java index be8759e8b4f..377506e61a6 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTestHarness.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTestHarness.java @@ -55,7 +55,7 @@ public class GraphRequestHandlerTestHarness extends RequestHandlerTestHarness { protected GraphRequestHandlerTestHarness( Builder builder, - @Nullable String graphProtocolForTestConfig, + @Nullable GraphProtocol graphProtocolForTestConfig, Duration graphTimeout, @Nullable Version dseVersionForTestMetadata) { super(builder); @@ -88,7 +88,7 @@ protected GraphRequestHandlerTestHarness( // only mock the config if graphProtocolForTestConfig is not null if (graphProtocolForTestConfig != null) { when(testProfile.getString(DseDriverOption.GRAPH_SUB_PROTOCOL)) - .thenReturn(graphProtocolForTestConfig); + .thenReturn(graphProtocolForTestConfig.toInternalCode()); } when(testProfile.getBoolean(DseDriverOption.GRAPH_IS_SYSTEM_QUERY, false)).thenReturn(false); when(testProfile.getString(DseDriverOption.GRAPH_NAME, null)).thenReturn("mockGraph"); @@ -161,11 +161,12 @@ public static GraphRequestHandlerTestHarness.Builder builder() { public static class Builder extends RequestHandlerTestHarness.Builder { - private String graphProtocolForTestConfig; + private GraphProtocol graphProtocolForTestConfig; private Duration graphTimeout = Duration.ZERO; private Version dseVersionForTestMetadata; - public GraphRequestHandlerTestHarness.Builder withGraphProtocolForTestConfig(String protocol) { + public GraphRequestHandlerTestHarness.Builder withGraphProtocolForTestConfig( + GraphProtocol protocol) { this.graphProtocolForTestConfig = protocol; return this; } From 011b5c7f369b6c8691ff35eb4af49c897021c461 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 29 Nov 2019 17:11:10 +0100 Subject: [PATCH 333/979] Organize data providers and test fixtures --- .../dse/driver/DseTestDataProviders.java | 9 ++ .../datastax/dse/driver/DseTestFixtures.java | 47 ++++++++ .../ContinuousGraphRequestHandlerTest.java | 3 +- .../core/graph/GraphRequestHandlerTest.java | 114 +++++++++++------- .../graph/GraphRequestHandlerTestHarness.java | 4 +- .../core/graph/GraphSupportCheckerTest.java | 93 +++++++------- .../internal/core/graph/GraphTestUtil.java | 69 ----------- .../internal/core/graph/GraphTestUtils.java | 31 +---- 8 files changed, 178 insertions(+), 192 deletions(-) delete mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphTestUtil.java diff --git a/core/src/test/java/com/datastax/dse/driver/DseTestDataProviders.java b/core/src/test/java/com/datastax/dse/driver/DseTestDataProviders.java index 3cb82defa1c..92712c5ca88 100644 --- a/core/src/test/java/com/datastax/dse/driver/DseTestDataProviders.java +++ b/core/src/test/java/com/datastax/dse/driver/DseTestDataProviders.java @@ -15,6 +15,10 @@ */ package com.datastax.dse.driver; +import static com.datastax.dse.driver.internal.core.graph.GraphProtocol.GRAPHSON_1_0; +import static com.datastax.dse.driver.internal.core.graph.GraphProtocol.GRAPHSON_2_0; +import static com.datastax.dse.driver.internal.core.graph.GraphProtocol.GRAPH_BINARY_1_0; + import com.datastax.dse.driver.api.core.DseProtocolVersion; import com.datastax.oss.driver.api.core.DefaultProtocolVersion; import com.tngtech.java.junit.dataprovider.DataProvider; @@ -39,6 +43,11 @@ public static Object[][] allDseAndOssProtocolVersions() { return concat(DefaultProtocolVersion.values(), DseProtocolVersion.values()); } + @DataProvider + public static Object[][] supportedGraphProtocols() { + return new Object[][] {{GRAPHSON_1_0}, {GRAPHSON_2_0}, {GRAPH_BINARY_1_0}}; + } + @NonNull private static Object[][] concat(Object[]... values) { return Stream.of(values) diff --git a/core/src/test/java/com/datastax/dse/driver/DseTestFixtures.java b/core/src/test/java/com/datastax/dse/driver/DseTestFixtures.java index 38893df3739..1df2357abac 100644 --- a/core/src/test/java/com/datastax/dse/driver/DseTestFixtures.java +++ b/core/src/test/java/com/datastax/dse/driver/DseTestFixtures.java @@ -15,7 +15,16 @@ */ package com.datastax.dse.driver; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import com.datastax.dse.driver.api.core.metadata.DseNodeProperties; +import com.datastax.dse.driver.internal.core.context.DseDriverContext; import com.datastax.dse.protocol.internal.response.result.DseRowsMetadata; +import com.datastax.oss.driver.api.core.Version; +import com.datastax.oss.driver.api.core.metadata.Metadata; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.internal.core.metadata.MetadataManager; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import com.datastax.oss.protocol.internal.ProtocolConstants; import com.datastax.oss.protocol.internal.response.result.ColumnSpec; @@ -25,8 +34,11 @@ import com.datastax.oss.protocol.internal.util.Bytes; import java.nio.ByteBuffer; import java.util.ArrayDeque; +import java.util.HashMap; import java.util.List; +import java.util.Map; import java.util.Queue; +import java.util.UUID; public class DseTestFixtures { @@ -73,4 +85,39 @@ public static Rows tenDseRows(int page, boolean last) { } return new DefaultRows(metadata, data); } + + public static DseDriverContext mockNodesInMetadataWithVersions( + DseDriverContext mockContext, boolean treatNullAsMissing, Version... dseVersions) { + + // mock bits of the context + MetadataManager metadataManager = mock(MetadataManager.class); + Metadata metadata = mock(Metadata.class); + Map nodeMap = new HashMap<>((dseVersions != null) ? dseVersions.length : 1); + if (dseVersions == null) { + Node node = mock(Node.class); + Map nodeExtras = new HashMap<>(1); + if (!treatNullAsMissing) { + // put an explicit null in for DSE_VERSION + nodeExtras.put(DseNodeProperties.DSE_VERSION, null); + } + nodeMap.put(UUID.randomUUID(), node); + when(node.getExtras()).thenReturn(nodeExtras); + } else { + for (Version dseVersion : dseVersions) { + // create a node with DSE version in its extra data + Node node = mock(Node.class); + Map nodeExtras = new HashMap<>(1); + if (dseVersion != null || !treatNullAsMissing) { + nodeExtras.put(DseNodeProperties.DSE_VERSION, dseVersion); + } + nodeMap.put(UUID.randomUUID(), node); + when(node.getExtras()).thenReturn(nodeExtras); + } + } + // return mocked data when requested + when(metadata.getNodes()).thenReturn(nodeMap); + when(metadataManager.getMetadata()).thenReturn(metadata); + when(mockContext.getMetadataManager()).thenReturn(metadataManager); + return mockContext; + } } diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerTest.java index e4266e0b894..a793a0f983e 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerTest.java @@ -13,6 +13,7 @@ import static com.datastax.oss.driver.Assertions.assertThatStage; import static org.mockito.Mockito.when; +import com.datastax.dse.driver.DseTestDataProviders; import com.datastax.dse.driver.api.core.config.DseDriverOption; import com.datastax.dse.driver.api.core.graph.AsyncGraphResultSet; import com.datastax.dse.driver.api.core.graph.GraphExecutionInfo; @@ -56,7 +57,7 @@ public void setup() { } @Test - @UseDataProvider(location = GraphTestUtils.class, value = "supportedGraphProtocols") + @UseDataProvider(location = DseTestDataProviders.class, value = "supportedGraphProtocols") public void should_return_paged_results(GraphProtocol graphProtocol) throws IOException { GraphBinaryModule module = createGraphBinaryModule(mockContext); diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java index 5b176e654b8..95f4a47d611 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java @@ -15,6 +15,9 @@ */ package com.datastax.dse.driver.internal.core.graph; +import static com.datastax.dse.driver.internal.core.graph.GraphProtocol.GRAPHSON_1_0; +import static com.datastax.dse.driver.internal.core.graph.GraphProtocol.GRAPHSON_2_0; +import static com.datastax.dse.driver.internal.core.graph.GraphProtocol.GRAPH_BINARY_1_0; import static com.datastax.dse.driver.internal.core.graph.GraphTestUtils.createGraphBinaryModule; import static com.datastax.dse.driver.internal.core.graph.GraphTestUtils.defaultDseFrameOf; import static com.datastax.dse.driver.internal.core.graph.GraphTestUtils.serialize; @@ -32,6 +35,7 @@ import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; +import com.datastax.dse.driver.DseTestDataProviders; import com.datastax.dse.driver.api.core.config.DseDriverOption; import com.datastax.dse.driver.api.core.data.geometry.Point; import com.datastax.dse.driver.api.core.graph.BatchGraphStatement; @@ -41,7 +45,7 @@ import com.datastax.dse.driver.api.core.graph.GraphResultSet; import com.datastax.dse.driver.api.core.graph.GraphStatement; import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; -import com.datastax.dse.driver.internal.core.context.DseDriverContext; +import com.datastax.dse.driver.internal.core.graph.GraphRequestHandlerTestHarness.Builder; import com.datastax.dse.driver.internal.core.graph.binary.GraphBinaryModule; import com.datastax.dse.protocol.internal.request.RawBytesQuery; import com.datastax.dse.protocol.internal.request.query.DseQueryOptions; @@ -52,13 +56,14 @@ import com.datastax.oss.driver.api.core.tracker.RequestTracker; import com.datastax.oss.driver.api.core.uuid.Uuids; import com.datastax.oss.driver.internal.core.cql.Conversions; -import com.datastax.oss.driver.internal.core.cql.RequestHandlerTestHarness; +import com.datastax.oss.driver.internal.core.cql.PoolBehavior; import com.datastax.oss.driver.internal.core.metadata.DefaultNode; import com.datastax.oss.driver.internal.core.metrics.NodeMetricUpdater; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import com.datastax.oss.protocol.internal.Message; import com.datastax.oss.protocol.internal.request.Query; +import com.tngtech.java.junit.dataprovider.DataProvider; import com.tngtech.java.junit.dataprovider.DataProviderRunner; import com.tngtech.java.junit.dataprovider.UseDataProvider; import java.io.IOException; @@ -95,7 +100,7 @@ public void setup() { } @Test - @UseDataProvider(location = GraphTestUtils.class, value = "supportedGraphProtocols") + @UseDataProvider(location = DseTestDataProviders.class, value = "supportedGraphProtocols") public void should_create_query_message_from_script_statement(GraphProtocol graphProtocol) throws IOException { // initialization @@ -125,7 +130,7 @@ public void should_create_query_message_from_script_statement(GraphProtocol grap } @Test - @UseDataProvider(location = GraphTestUtils.class, value = "supportedGraphProtocols") + @UseDataProvider(location = DseTestDataProviders.class, value = "supportedGraphProtocols") public void should_create_query_message_from_fluent_statement(GraphProtocol graphProtocol) throws IOException { // initialization @@ -159,7 +164,7 @@ public void should_create_query_message_from_fluent_statement(GraphProtocol grap } @Test - @UseDataProvider(location = GraphTestUtils.class, value = "supportedGraphProtocols") + @UseDataProvider(location = DseTestDataProviders.class, value = "supportedGraphProtocols") public void should_create_query_message_from_batch_statement(GraphProtocol graphProtocol) throws IOException { // initialization @@ -223,7 +228,7 @@ private void testQueryRequestAndPayloadContents( } @Test - @UseDataProvider(location = GraphTestUtils.class, value = "supportedGraphProtocols") + @UseDataProvider(location = DseTestDataProviders.class, value = "supportedGraphProtocols") public void should_set_correct_query_options_from_graph_statement(GraphProtocol subProtocol) throws IOException { // initialization @@ -267,7 +272,7 @@ public void should_set_correct_query_options_from_graph_statement(GraphProtocol } @Test - @UseDataProvider(location = GraphTestUtils.class, value = "supportedGraphProtocols") + @UseDataProvider(location = DseTestDataProviders.class, value = "supportedGraphProtocols") public void should_create_payload_from_config_options(GraphProtocol subProtocol) { // initialization GraphRequestHandlerTestHarness harness = GraphRequestHandlerTestHarness.builder().build(); @@ -310,7 +315,7 @@ public void should_create_payload_from_config_options(GraphProtocol subProtocol) } @Test - @UseDataProvider(location = GraphTestUtils.class, value = "supportedGraphProtocols") + @UseDataProvider(location = DseTestDataProviders.class, value = "supportedGraphProtocols") public void should_create_payload_from_statement_options(GraphProtocol subProtocol) { // initialization GraphRequestHandlerTestHarness harness = GraphRequestHandlerTestHarness.builder().build(); @@ -365,7 +370,7 @@ public void should_create_payload_from_statement_options(GraphProtocol subProtoc } @Test - @UseDataProvider(location = GraphTestUtils.class, value = "supportedGraphProtocols") + @UseDataProvider(location = DseTestDataProviders.class, value = "supportedGraphProtocols") public void should_not_set_graph_name_on_system_queries(GraphProtocol subProtocol) { // initialization GraphRequestHandlerTestHarness harness = GraphRequestHandlerTestHarness.builder().build(); @@ -388,35 +393,35 @@ public void should_not_set_graph_name_on_system_queries(GraphProtocol subProtoco } @Test - @UseDataProvider( - location = GraphTestUtils.class, - value = "supportedGraphProtocolsWithDseVersions") + @UseDataProvider("supportedGraphProtocolsWithDseVersions") public void should_return_results_for_statements(GraphProtocol graphProtocol, Version dseVersion) throws IOException { - DseDriverContext mockContext = GraphTestUtil.mockContext(true, dseVersion); - GraphBinaryModule module = createGraphBinaryModule(mockContext); + + Builder builder = + GraphRequestHandlerTestHarness.builder() + .withGraphProtocolForTestConfig(graphProtocol) + .withDseVersionInMetadata(dseVersion); + PoolBehavior node1Behavior = builder.customBehavior(node); + GraphRequestHandlerTestHarness harness = builder.build(); + + GraphBinaryModule module = createGraphBinaryModule(harness.getContext()); + + // ideally we would be able to provide a function here to + // produce results instead of a static predefined response. + // Function to which we would pass the harness instance or a (mocked)DriverContext. + // Since that's not possible in the RequestHandlerTestHarness API at the moment, we + // have to use another DseDriverContext and GraphBinaryModule here, + // instead of reusing the one in the harness' DriverContext + node1Behavior.setResponseSuccess(defaultDseFrameOf(singleGraphRow(graphProtocol, module))); GraphSupportChecker graphSupportChecker = mock(GraphSupportChecker.class); when(graphSupportChecker.isPagingEnabled(any(), any())).thenReturn(false); when(graphSupportChecker.inferGraphProtocol(any(), any(), any())).thenReturn(graphProtocol); GraphRequestAsyncProcessor p = - Mockito.spy(new GraphRequestAsyncProcessor(mockContext, graphSupportChecker)); + Mockito.spy(new GraphRequestAsyncProcessor(harness.getContext(), graphSupportChecker)); when(p.getGraphBinaryModule()).thenReturn(module); - RequestHandlerTestHarness harness = - GraphRequestHandlerTestHarness.builder() - .withGraphProtocolForTestConfig(graphProtocol) - .withDseVersionInMetadata(dseVersion) - // ideally we would be able to provide a function here to - // produce results instead of a static predefined response. - // Function to which we would pass the harness instance or a (mocked)DriverContext. - // Since that's not possible in the RequestHandlerTestHarness API at the moment, we - // have to use another DseDriverContext and GraphBinaryModule here, - // instead of reusing the one in the harness' DriverContext - .withResponse(node, defaultDseFrameOf(singleGraphRow(graphProtocol, module))) - .build(); - GraphStatement graphStatement = ScriptGraphStatement.newInstance("mockQuery").setExecutionProfileName("test-graph"); GraphResultSet grs = @@ -440,36 +445,49 @@ public void should_return_results_for_statements(GraphProtocol graphProtocol, Ve } } + @DataProvider + public static Object[][] supportedGraphProtocolsWithDseVersions() { + return new Object[][] { + {GRAPHSON_1_0, Version.parse("6.7.0")}, + {GRAPHSON_1_0, Version.parse("6.8.0")}, + {GRAPHSON_2_0, Version.parse("6.7.0")}, + {GRAPHSON_2_0, Version.parse("6.8.0")}, + {GRAPH_BINARY_1_0, Version.parse("6.7.0")}, + {GRAPH_BINARY_1_0, Version.parse("6.8.0")}, + }; + } + @Test - @UseDataProvider(location = GraphTestUtils.class, value = "dseVersionsWithDefaultGraphProtocol") - public void should_invoke_request_tracker(GraphProtocol defaultProtocol, Version dseVersion) + @UseDataProvider("dseVersionsWithDefaultGraphProtocol") + public void should_invoke_request_tracker(GraphProtocol graphProtocol, Version dseVersion) throws IOException { - DseDriverContext mockContext = GraphTestUtil.mockContext(true, dseVersion); - GraphBinaryModule module = createGraphBinaryModule(mockContext); + Builder builder = + GraphRequestHandlerTestHarness.builder() + .withGraphProtocolForTestConfig(graphProtocol) + .withDseVersionInMetadata(dseVersion); + PoolBehavior node1Behavior = builder.customBehavior(node); + GraphRequestHandlerTestHarness harness = builder.build(); + + GraphBinaryModule module = createGraphBinaryModule(harness.getContext()); GraphSupportChecker graphSupportChecker = mock(GraphSupportChecker.class); when(graphSupportChecker.isPagingEnabled(any(), any())).thenReturn(false); - when(graphSupportChecker.inferGraphProtocol(any(), any(), any())).thenReturn(defaultProtocol); + when(graphSupportChecker.inferGraphProtocol(any(), any(), any())).thenReturn(graphProtocol); GraphRequestAsyncProcessor p = - Mockito.spy(new GraphRequestAsyncProcessor(mockContext, graphSupportChecker)); + Mockito.spy(new GraphRequestAsyncProcessor(harness.getContext(), graphSupportChecker)); when(p.getGraphBinaryModule()).thenReturn(module); - RequestHandlerTestHarness harness = - GraphRequestHandlerTestHarness.builder() - .withDseVersionInMetadata(dseVersion) - .withResponse(node, defaultDseFrameOf(singleGraphRow(defaultProtocol, module))) - .build(); - RequestTracker requestTracker = mock(RequestTracker.class); when(harness.getContext().getRequestTracker()).thenReturn(requestTracker); GraphStatement graphStatement = ScriptGraphStatement.newInstance("mockQuery"); + node1Behavior.setResponseSuccess(defaultDseFrameOf(singleGraphRow(graphProtocol, module))); + GraphResultSet grs = new GraphRequestSyncProcessor( - new GraphRequestAsyncProcessor( - (DseDriverContext) harness.getContext(), graphSupportChecker)) + new GraphRequestAsyncProcessor(harness.getContext(), graphSupportChecker)) .process(graphStatement, harness.getSession(), harness.getContext(), "test-graph"); List nodes = grs.all(); @@ -481,7 +499,7 @@ public void should_invoke_request_tracker(GraphProtocol defaultProtocol, Version Vertex actual = graphNode.asVertex(); assertThat(actual.label()).isEqualTo("person"); assertThat(actual.id()).isEqualTo(1); - if (!defaultProtocol.isGraphBinary()) { + if (!graphProtocol.isGraphBinary()) { // GraphBinary does not encode properties regardless of whether they are present in the // parent element or not :/ assertThat(actual.property("name").id()).isEqualTo(11); @@ -497,4 +515,14 @@ public void should_invoke_request_tracker(GraphProtocol defaultProtocol, Version matches(LOG_PREFIX_PER_REQUEST)); verifyNoMoreInteractions(requestTracker); } + + @DataProvider + public static Object[][] dseVersionsWithDefaultGraphProtocol() { + // Default GraphSON sub protocol version differs based on DSE version, so test with a version + // less than DSE 6.8 as well as DSE 6.8. + return new Object[][] { + {GRAPHSON_2_0, Version.parse("6.7.0")}, + {GRAPH_BINARY_1_0, Version.parse("6.8.0")}, + }; + } } diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTestHarness.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTestHarness.java index 377506e61a6..c8814d17b65 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTestHarness.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTestHarness.java @@ -17,6 +17,7 @@ import static org.mockito.Mockito.when; +import com.datastax.dse.driver.DseTestFixtures; import com.datastax.dse.driver.api.core.DseProtocolVersion; import com.datastax.dse.driver.api.core.config.DseDriverOption; import com.datastax.dse.driver.internal.core.context.DseDriverContext; @@ -146,7 +147,8 @@ protected GraphRequestHandlerTestHarness( when(dseDriverContext.getRequestTracker()).thenReturn(new NoopRequestTracker(dseDriverContext)); // if DSE Version is specified for test metadata, then we need to mock that up on the context if (dseVersionForTestMetadata != null) { - GraphTestUtil.mockContext(dseDriverContext, true, dseVersionForTestMetadata); + DseTestFixtures.mockNodesInMetadataWithVersions( + dseDriverContext, true, dseVersionForTestMetadata); } } diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphSupportCheckerTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphSupportCheckerTest.java index 0ee43fcb64a..4625bb8cf90 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphSupportCheckerTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphSupportCheckerTest.java @@ -6,6 +6,7 @@ */ package com.datastax.dse.driver.internal.core.graph; +import static com.datastax.dse.driver.DseTestFixtures.mockNodesInMetadataWithVersions; import static com.datastax.dse.driver.api.core.graph.PagingEnabledOptions.AUTO; import static com.datastax.dse.driver.api.core.graph.PagingEnabledOptions.DISABLED; import static com.datastax.dse.driver.api.core.graph.PagingEnabledOptions.ENABLED; @@ -13,17 +14,18 @@ import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.assertj.core.api.AssertionsForInterfaceTypes.assertThat; import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.*; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verifyZeroInteractions; import static org.mockito.Mockito.when; +import com.datastax.dse.driver.DseTestDataProviders; +import com.datastax.dse.driver.api.core.DseProtocolVersion; import com.datastax.dse.driver.api.core.config.DseDriverOption; import com.datastax.dse.driver.api.core.graph.GraphStatement; import com.datastax.dse.driver.api.core.graph.PagingEnabledOptions; import com.datastax.dse.driver.api.core.metadata.DseNodeProperties; import com.datastax.dse.driver.internal.core.DseProtocolFeature; import com.datastax.dse.driver.internal.core.context.DseDriverContext; -import com.datastax.oss.driver.api.core.DefaultProtocolVersion; import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.api.core.config.DriverConfig; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; @@ -57,7 +59,7 @@ public class GraphSupportCheckerTest { @Rule public MockitoRule mockitoRule = MockitoJUnit.rule(); - @UseDataProvider("pagingEnabled") + @UseDataProvider("graphPagingEnabledAndDseVersions") @Test public void should_check_if_paging_is_supported( boolean protocolWithPagingSupport, @@ -113,7 +115,7 @@ public void should_not_support_paging_when_statement_profile_not_present() { } @DataProvider() - public static Object[][] pagingEnabled() { + public static Object[][] graphPagingEnabledAndDseVersions() { List listWithGraphPagingNode = Collections.singletonList(MIN_DSE_VERSION_GRAPH_BINARY_AND_PAGING); List listWithoutGraphPagingNode = Collections.singletonList(Version.parse("6.7.0")); @@ -167,10 +169,10 @@ private void contextGraphPagingEnabled( private InternalDriverContext protocolWithPagingSupport(boolean pagingSupport) { InternalDriverContext context = mock(InternalDriverContext.class); - when(context.getProtocolVersion()).thenReturn(DefaultProtocolVersion.V4); + when(context.getProtocolVersion()).thenReturn(DseProtocolVersion.DSE_V2); ProtocolVersionRegistry protocolVersionRegistry = mock(ProtocolVersionRegistry.class); when(protocolVersionRegistry.supports( - DefaultProtocolVersion.V4, DseProtocolFeature.CONTINUOUS_PAGING)) + DseProtocolVersion.DSE_V2, DseProtocolFeature.CONTINUOUS_PAGING)) .thenReturn(pagingSupport); when(context.getProtocolVersionRegistry()).thenReturn(protocolVersionRegistry); return context; @@ -185,29 +187,31 @@ private void statementGraphPagingEnabled( } @Test - @UseDataProvider("dseVersions") + @UseDataProvider("dseVersionsAndGraphProtocols") public void should_determine_default_graph_protocol_from_dse_version( Version[] dseVersions, GraphProtocol expectedProtocol) { // mock up the metadata for the context // using 'true' here will treat null test Versions as no DSE_VERSION info in the metadata - DseDriverContext mockContext = GraphTestUtil.mockContext(true, dseVersions); - GraphProtocol graphProtocol = new GraphSupportChecker().getDefaultGraphProtocol(mockContext); + DseDriverContext context = + mockNodesInMetadataWithVersions(mock(DseDriverContext.class), true, dseVersions); + GraphProtocol graphProtocol = new GraphSupportChecker().getDefaultGraphProtocol(context); assertThat(graphProtocol).isEqualTo(expectedProtocol); } @Test - @UseDataProvider("dseVersions") + @UseDataProvider("dseVersionsAndGraphProtocols") public void should_determine_default_graph_protocol_from_dse_version_with_null_versions( Version[] dseVersions, GraphProtocol expectedProtocol) { // mock up the metadata for the context // using 'false' here will treat null test Versions as explicit NULL info for DSE_VERSION - DseDriverContext mockContext = GraphTestUtil.mockContext(false, dseVersions); - GraphProtocol graphProtocol = new GraphSupportChecker().getDefaultGraphProtocol(mockContext); + DseDriverContext context = + mockNodesInMetadataWithVersions(mock(DseDriverContext.class), false, dseVersions); + GraphProtocol graphProtocol = new GraphSupportChecker().getDefaultGraphProtocol(context); assertThat(graphProtocol).isEqualTo(expectedProtocol); } @DataProvider - public static Object[][] dseVersions() { + public static Object[][] dseVersionsAndGraphProtocols() { return new Object[][] { {new Version[] {Version.parse("5.0.3")}, GraphProtocol.GRAPHSON_2_0}, {new Version[] {Version.parse("6.0.1")}, GraphProtocol.GRAPHSON_2_0}, @@ -237,7 +241,7 @@ public static Object[][] dseVersions() { } @Test - @UseDataProvider("protocolObjects") + @UseDataProvider(location = DseTestDataProviders.class, value = "supportedGraphProtocols") public void should_pickup_graph_protocol_from_statement(GraphProtocol graphProtocol) { when(graphStatement.getSubProtocol()).thenReturn(graphProtocol.toInternalCode()); @@ -251,36 +255,55 @@ public void should_pickup_graph_protocol_from_statement(GraphProtocol graphProto } @Test - @UseDataProvider("protocolStrings") + @UseDataProvider("graphProtocolStringsAndDseVersions") public void should_pickup_graph_protocol_and_parse_from_string_config( String stringConfig, Version dseVersion) { when(executionProfile.isDefined(DseDriverOption.GRAPH_SUB_PROTOCOL)).thenReturn(Boolean.TRUE); when(executionProfile.getString(eq(DseDriverOption.GRAPH_SUB_PROTOCOL))) .thenReturn(stringConfig); + DseDriverContext context = + mockNodesInMetadataWithVersions(mock(DseDriverContext.class), true, dseVersion); GraphProtocol inferredProtocol = - new GraphSupportChecker() - .inferGraphProtocol( - graphStatement, executionProfile, GraphTestUtil.mockContext(true, dseVersion)); + new GraphSupportChecker().inferGraphProtocol(graphStatement, executionProfile, context); assertThat(inferredProtocol.toInternalCode()).isEqualTo(stringConfig); } + @DataProvider + public static Object[][] graphProtocolStringsAndDseVersions() { + // putting manual strings here to be sure to be notified if a value in + // GraphProtocol ever changes + return new Object[][] { + {"graphson-1.0", Version.parse("6.7.0")}, + {"graphson-1.0", Version.parse("6.8.0")}, + {"graphson-2.0", Version.parse("6.7.0")}, + {"graphson-2.0", Version.parse("6.8.0")}, + {"graph-binary-1.0", Version.parse("6.7.0")}, + {"graph-binary-1.0", Version.parse("6.8.0")}, + }; + } + @Test @UseDataProvider("dseVersions6") public void should_use_correct_default_protocol_when_parsing(Version dseVersion) { + DseDriverContext context = + mockNodesInMetadataWithVersions(mock(DseDriverContext.class), true, dseVersion); GraphProtocol inferredProtocol = - new GraphSupportChecker() - .inferGraphProtocol( - graphStatement, executionProfile, GraphTestUtil.mockContext(true, dseVersion)); + new GraphSupportChecker().inferGraphProtocol(graphStatement, executionProfile, context); // For DSE 6.8 and newer, the default should be GraphSON binary // for DSE older than 6.8, the default should be GraphSON2 assertThat(inferredProtocol) .isEqualTo( - (dseVersion.compareTo(GraphTestUtil.DSE_6_8_0) < 0) + (dseVersion.compareTo(Version.parse("6.8.0")) < 0) ? GraphProtocol.GRAPHSON_2_0 : GraphProtocol.GRAPH_BINARY_1_0); } + @DataProvider + public static Object[][] dseVersions6() { + return new Object[][] {{Version.parse("6.7.0")}, {Version.parse("6.8.0")}}; + } + @Test public void should_fail_if_graph_protocol_used_is_invalid() { assertThatThrownBy(() -> GraphProtocol.fromString("invalid")) @@ -296,30 +319,4 @@ public void should_fail_if_graph_protocol_used_is_graphson_3() { .hasMessage( "Graph protocol used [\"graphson-3.0\"] unknown. Possible values are: [ \"graphson-1.0\", \"graphson-2.0\", \"graph-binary-1.0\"]"); } - - @DataProvider - public static Object[][] protocolObjects() { - return new Object[][] { - {GraphProtocol.GRAPHSON_1_0}, {GraphProtocol.GRAPHSON_2_0}, {GraphProtocol.GRAPH_BINARY_1_0} - }; - } - - @DataProvider - public static Object[][] protocolStrings() { - // putting manual strings here to be sure to be notified if a value in - // GraphProtocol ever changes - return new Object[][] { - {"graphson-1.0", GraphTestUtil.DSE_6_7_0}, - {"graphson-1.0", GraphTestUtil.DSE_6_8_0}, - {"graphson-2.0", GraphTestUtil.DSE_6_7_0}, - {"graphson-2.0", GraphTestUtil.DSE_6_8_0}, - {"graph-binary-1.0", GraphTestUtil.DSE_6_7_0}, - {"graph-binary-1.0", GraphTestUtil.DSE_6_8_0}, - }; - } - - @DataProvider - public static Object[][] dseVersions6() { - return new Object[][] {{GraphTestUtil.DSE_6_7_0}, {GraphTestUtil.DSE_6_8_0}}; - } } diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphTestUtil.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphTestUtil.java deleted file mode 100644 index 95d63c57f5d..00000000000 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphTestUtil.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms - */ -package com.datastax.dse.driver.internal.core.graph; - -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import com.datastax.dse.driver.api.core.metadata.DseNodeProperties; -import com.datastax.dse.driver.internal.core.context.DseDriverContext; -import com.datastax.oss.driver.api.core.Version; -import com.datastax.oss.driver.api.core.metadata.Metadata; -import com.datastax.oss.driver.api.core.metadata.Node; -import com.datastax.oss.driver.internal.core.metadata.MetadataManager; -import java.util.HashMap; -import java.util.Map; -import java.util.Objects; -import java.util.UUID; - -/** Utility for common Graph Unit testing. */ -class GraphTestUtil { - - // Default GraphSON sub protocol version differs based on DSE version, so test with a version less - // than DSE 6.8 as well as DSE 6.8. - static final Version DSE_6_7_0 = Objects.requireNonNull(Version.parse("6.7.0")); - static final Version DSE_6_8_0 = Objects.requireNonNull(Version.parse("6.8.0")); - - static DseDriverContext mockContext(boolean treatNullAsMissing, Version... dseVersions) { - DseDriverContext mockContext = mock(DseDriverContext.class); - return mockContext(mockContext, treatNullAsMissing, dseVersions); - } - - static DseDriverContext mockContext( - DseDriverContext context, boolean treatNullAsMissing, Version... dseVersions) { - // mock bits of the context - MetadataManager metadataManager = mock(MetadataManager.class); - Metadata metadata = mock(Metadata.class); - Map nodeMap = new HashMap<>((dseVersions != null) ? dseVersions.length : 1); - if (dseVersions == null) { - Node node = mock(Node.class); - Map nodeExtras = new HashMap<>(1); - if (!treatNullAsMissing) { - // put an explicit null in for DSE_VERSION - nodeExtras.put(DseNodeProperties.DSE_VERSION, null); - } - nodeMap.put(UUID.randomUUID(), node); - when(node.getExtras()).thenReturn(nodeExtras); - } else { - for (Version dseVersion : dseVersions) { - // create a node with DSE version in its extra data - Node node = mock(Node.class); - Map nodeExtras = new HashMap<>(1); - if (dseVersion != null || !treatNullAsMissing) { - nodeExtras.put(DseNodeProperties.DSE_VERSION, dseVersion); - } - nodeMap.put(UUID.randomUUID(), node); - when(node.getExtras()).thenReturn(nodeExtras); - } - } - // return mocked data when requested - when(metadata.getNodes()).thenReturn(nodeMap); - when(metadataManager.getMetadata()).thenReturn(metadata); - when(context.getMetadataManager()).thenReturn(metadataManager); - return context; - } -} diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphTestUtils.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphTestUtils.java index 1d73fdeb435..abff143a73b 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphTestUtils.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphTestUtils.java @@ -6,10 +6,6 @@ */ package com.datastax.dse.driver.internal.core.graph; -import static com.datastax.dse.driver.internal.core.graph.GraphProtocol.GRAPHSON_1_0; -import static com.datastax.dse.driver.internal.core.graph.GraphProtocol.GRAPHSON_2_0; -import static com.datastax.dse.driver.internal.core.graph.GraphProtocol.GRAPH_BINARY_1_0; - import com.datastax.dse.driver.api.core.DseProtocolVersion; import com.datastax.dse.driver.internal.core.context.DseDriverContext; import com.datastax.dse.driver.internal.core.graph.binary.GraphBinaryModule; @@ -23,7 +19,6 @@ import com.datastax.oss.protocol.internal.response.result.DefaultRows; import com.datastax.oss.protocol.internal.response.result.RawType; import com.datastax.oss.protocol.internal.response.result.Rows; -import com.tngtech.java.junit.dataprovider.DataProvider; import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayDeque; @@ -40,6 +35,7 @@ import org.apache.tinkerpop.gremlin.structure.util.detached.DetachedVertexProperty; public class GraphTestUtils { + public static ByteBuffer serialize( Object value, GraphProtocol graphProtocol, GraphBinaryModule graphBinaryModule) throws IOException { @@ -155,29 +151,4 @@ public static GraphBinaryModule createGraphBinaryModule(DseDriverContext context TypeSerializerRegistry registry = GraphBinaryModule.createDseTypeSerializerRegistry(context); return new GraphBinaryModule(new GraphBinaryReader(registry), new GraphBinaryWriter(registry)); } - - @DataProvider - public static Object[][] supportedGraphProtocols() { - return new Object[][] {{GRAPHSON_2_0}, {GRAPH_BINARY_1_0}, {GRAPHSON_1_0}}; - } - - @DataProvider - public static Object[][] supportedGraphProtocolsWithDseVersions() { - return new Object[][] { - {GRAPHSON_1_0, GraphTestUtil.DSE_6_7_0}, - {GRAPHSON_1_0, GraphTestUtil.DSE_6_8_0}, - {GRAPHSON_2_0, GraphTestUtil.DSE_6_7_0}, - {GRAPHSON_2_0, GraphTestUtil.DSE_6_8_0}, - {GRAPH_BINARY_1_0, GraphTestUtil.DSE_6_7_0}, - {GRAPH_BINARY_1_0, GraphTestUtil.DSE_6_8_0}, - }; - } - - @DataProvider - public static Object[][] dseVersionsWithDefaultGraphProtocol() { - return new Object[][] { - {GRAPHSON_2_0, GraphTestUtil.DSE_6_7_0}, - {GRAPH_BINARY_1_0, GraphTestUtil.DSE_6_8_0}, - }; - } } From bce94b49ce6de142c1a22855ce27a2c8707a64e9 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 29 Nov 2019 17:56:56 +0100 Subject: [PATCH 334/979] Enhance javadocs of GraphSupportChecker and make main methods public --- .../core/graph/GraphSupportChecker.java | 109 ++++++++++++------ .../core/graph/GraphSupportCheckerTest.java | 12 +- 2 files changed, 80 insertions(+), 41 deletions(-) diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSupportChecker.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSupportChecker.java index b418f24949d..2f5a3cd049e 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSupportChecker.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSupportChecker.java @@ -17,6 +17,7 @@ import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.internal.core.cql.Conversions; +import com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting; import edu.umd.cs.findbugs.annotations.NonNull; import java.util.Collection; import java.util.Objects; @@ -24,20 +25,39 @@ import org.slf4j.LoggerFactory; public class GraphSupportChecker { + private static final Logger LOG = LoggerFactory.getLogger(GraphSupportChecker.class); - static final Version MIN_DSE_VERSION_GRAPH_BINARY_AND_PAGING = + + /** + * The minimum DSE version supporting both graph paging and the GraphBinary sub-protocol is DSE + * 6.8. + */ + private static final Version MIN_DSE_VERSION_GRAPH_BINARY_AND_PAGING = Objects.requireNonNull(Version.parse("6.8.0")); private volatile Boolean contextGraphPagingEnabled; private volatile Boolean isDse68OrAbove; - // Graph paging is available if - // 1) continuous paging is generally available and - // 2) all hosts are running DSE 6.8+ - // The computation below will be done only once when the session is initialized; if other hosts - // join the cluster later and are not running DSE 6.8, the user has to manually disable graph - // paging. - boolean isPagingEnabled(GraphStatement graphStatement, InternalDriverContext context) { + /** + * Checks whether graph paging is available. + * + *

      Graph paging is available if: + * + *

        + *
      1. Continuous paging is generally available (this implies protocol version {@link + * com.datastax.dse.driver.api.core.DseProtocolVersion#DSE_V1 DSE_V1} or higher); + *
      2. Graph paging is set to ENABLED or AUTO in the configuration + * with {@link DseDriverOption#GRAPH_PAGING_ENABLED}; + *
      3. If graph paging is set to AUTO, then a check will be performed to verify + * that all hosts are running DSE 6.8+; if that is the case, then graph paging will be + * assumed to be available. + *
      + * + * Note that the hosts check will be done only once, then memoized; if other hosts join the + * cluster later and do not support graph paging, the user has to manually disable graph paging. + */ + public boolean isPagingEnabled( + @NonNull GraphStatement graphStatement, @NonNull InternalDriverContext context) { DriverExecutionProfile driverExecutionProfile = Conversions.resolveExecutionProfile(graphStatement, context); PagingEnabledOptions pagingEnabledOptions = @@ -55,6 +75,52 @@ boolean isPagingEnabled(GraphStatement graphStatement, InternalDriverContext } } + /** + * Infers the {@link GraphProtocol} to use to execute the given statement. + * + *

      The graph protocol is computed as follows: + * + *

        + *
      1. If the statement declares the protocol to use with {@link + * GraphStatement#getSubProtocol()}, then that protocol is returned. + *
      2. If the driver configuration explicitly defines the protocol to use (see {@link + * DseDriverOption#GRAPH_SUB_PROTOCOL} and dse-reference.conf), then that protocol is + * returned. + *
      3. Otherwise, the graph protocol to use is determined by the DSE version of hosts in the + * cluster. If any host has DSE version 6.7.x or lower, the default graph protocol is {@link + * GraphProtocol#GRAPHSON_2_0}. If all hosts have DSE version 6.8.0 or higher, the default + * graph protocol is {@link GraphProtocol#GRAPH_BINARY_1_0}. + *
      + * + * Note that the hosts check will be done only once, then memoized; if other hosts join the and do + * not support the computed graph protocol, the user has to manually set the graph protocol to + * use. + * + *

      Also note that GRAPH_BINARY_1_0 can only be used with "core" graph engines; if + * you are targeting a "classic" graph engine instead, the user has to manually set the graph + * protocol to something else. + */ + @NonNull + public GraphProtocol inferGraphProtocol( + @NonNull GraphStatement statement, + @NonNull DriverExecutionProfile config, + @NonNull InternalDriverContext context) { + String graphProtocol = statement.getSubProtocol(); + if (graphProtocol == null) { + // use the protocol specified in configuration, otherwise get the default from the context + graphProtocol = + (config.isDefined(DseDriverOption.GRAPH_SUB_PROTOCOL)) + ? config.getString(DseDriverOption.GRAPH_SUB_PROTOCOL) + : getDefaultGraphProtocol(context).toInternalCode(); + } + // should not be null because we call config.getString() with a default value + Objects.requireNonNull( + graphProtocol, + "Could not determine the graph protocol for the query. This is a bug, please report."); + + return GraphProtocol.fromString(graphProtocol); + } + private boolean isContextGraphPagingEnabled(InternalDriverContext context) { if (contextGraphPagingEnabled == null) { ProtocolVersion protocolVersion = context.getProtocolVersion(); @@ -73,16 +139,11 @@ private boolean isContextGraphPagingEnabled(InternalDriverContext context) { } /** - * Determines the default {@link GraphProtocol} for the given context. When a statement is - * executed, if the Graph protocol is not explicitly set on the statement (via {@link - * GraphStatement#setSubProtocol(java.lang.String)}), or is not explicitly set in the config (see - * dse-reference.conf), the default Graph protocol used is determined by the DSE version to which - * the driver is connected. For DSE versions 6.7.x and lower, the default Graph protocol is {@link - * GraphProtocol#GRAPHSON_2_0}. For DSE versions 6.8.0 and higher, the default Graph protocol is - * {@link GraphProtocol#GRAPH_BINARY_1_0}. + * Determines the default {@link GraphProtocol} for the given context. * * @return The default GraphProtocol to used based on the provided context. */ + @VisibleForTesting GraphProtocol getDefaultGraphProtocol(@NonNull InternalDriverContext context) { if (isDse68OrAbove == null) { isDse68OrAbove = checkIsDse68OrAbove(context); @@ -102,22 +163,4 @@ private boolean checkIsDse68OrAbove(@NonNull InternalDriverContext context) { } return true; } - - GraphProtocol inferGraphProtocol( - GraphStatement statement, DriverExecutionProfile config, InternalDriverContext context) { - String graphProtocol = statement.getSubProtocol(); - if (graphProtocol == null) { - // use the protocol specified in configuration, otherwise get the default from the context - graphProtocol = - (config.isDefined(DseDriverOption.GRAPH_SUB_PROTOCOL)) - ? config.getString(DseDriverOption.GRAPH_SUB_PROTOCOL) - : getDefaultGraphProtocol(context).toInternalCode(); - } - // should not be null because we call config.getString() with a default value - Objects.requireNonNull( - graphProtocol, - "Could not determine the graph protocol for the query. This is a bug, please report."); - - return GraphProtocol.fromString(graphProtocol); - } } diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphSupportCheckerTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphSupportCheckerTest.java index 4625bb8cf90..5654a0d688e 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphSupportCheckerTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphSupportCheckerTest.java @@ -10,7 +10,6 @@ import static com.datastax.dse.driver.api.core.graph.PagingEnabledOptions.AUTO; import static com.datastax.dse.driver.api.core.graph.PagingEnabledOptions.DISABLED; import static com.datastax.dse.driver.api.core.graph.PagingEnabledOptions.ENABLED; -import static com.datastax.dse.driver.internal.core.graph.GraphSupportChecker.MIN_DSE_VERSION_GRAPH_BINARY_AND_PAGING; import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.assertj.core.api.AssertionsForInterfaceTypes.assertThat; import static org.mockito.ArgumentMatchers.eq; @@ -87,8 +86,7 @@ public void should_not_support_paging_when_statement_profile_not_present() { GraphStatement graphStatement = mock(GraphStatement.class); InternalDriverContext context = protocolWithPagingSupport(true); contextGraphPagingEnabled(context, DISABLED); - addNodeWithDseVersion( - context, Collections.singletonList(MIN_DSE_VERSION_GRAPH_BINARY_AND_PAGING)); + addNodeWithDseVersion(context, Collections.singletonList(Version.parse("6.8.0"))); // when boolean pagingEnabled = new GraphSupportChecker().isPagingEnabled(graphStatement, context); @@ -104,8 +102,7 @@ public void should_not_support_paging_when_statement_profile_not_present() { GraphStatement graphStatement = mock(GraphStatement.class); InternalDriverContext context = protocolWithPagingSupport(true); contextGraphPagingEnabled(context, ENABLED); - addNodeWithDseVersion( - context, Collections.singletonList(MIN_DSE_VERSION_GRAPH_BINARY_AND_PAGING)); + addNodeWithDseVersion(context, Collections.singletonList(Version.parse("6.8.0"))); // when boolean pagingEnabled = new GraphSupportChecker().isPagingEnabled(graphStatement, context); @@ -116,12 +113,11 @@ public void should_not_support_paging_when_statement_profile_not_present() { @DataProvider() public static Object[][] graphPagingEnabledAndDseVersions() { - List listWithGraphPagingNode = - Collections.singletonList(MIN_DSE_VERSION_GRAPH_BINARY_AND_PAGING); + List listWithGraphPagingNode = Collections.singletonList(Version.parse("6.8.0")); List listWithoutGraphPagingNode = Collections.singletonList(Version.parse("6.7.0")); List listWithNull = Collections.singletonList(null); List listWithTwoNodesOneNotSupporting = - Arrays.asList(Version.parse("6.7.0"), MIN_DSE_VERSION_GRAPH_BINARY_AND_PAGING); + Arrays.asList(Version.parse("6.7.0"), Version.parse("6.8.0")); return new Object[][] { {false, ENABLED, ENABLED, listWithGraphPagingNode, true}, From 1bfe33f40409d53ab5064ed8fb28122ea53b26da Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Thu, 7 Nov 2019 12:20:01 -0600 Subject: [PATCH 335/979] JAVA-2472: Enable speculative executions for paged Graph Queries --- changelog/README.md | 1 + .../ContinuousCqlRequestAsyncProcessor.java | 3 +- .../ContinuousCqlRequestHandler.java | 31 +- .../ContinuousRequestHandlerBase.java | 1988 ++++++++++------- .../graph/ContinuousGraphRequestHandler.java | 37 +- .../dse/driver/DseTestDataProviders.java | 34 + ...tinuousCqlRequestHandlerReprepareTest.java | 4 +- .../ContinuousCqlRequestHandlerRetryTest.java | 195 +- .../ContinuousCqlRequestHandlerTest.java | 27 +- ...equestHandlerSpeculativeExecutionTest.java | 520 +++++ .../graph/GraphSpeculativeExecutionIT.java | 91 + 11 files changed, 1987 insertions(+), 944 deletions(-) create mode 100644 core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerSpeculativeExecutionTest.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphSpeculativeExecutionIT.java diff --git a/changelog/README.md b/changelog/README.md index 320a05d5269..36e032d4540 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### NGDG (in progress) +- [improvement] JAVA-2472: Enable speculative executions for paged graph queries - [improvement] JAVA-1579: Change default result format to latest GraphSON format - [improvement] JAVA-2496: Revisit timeouts for paged graph queries - [bug] JAVA-2510: Fix GraphBinaryDataTypesTest Codec registry initialization diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestAsyncProcessor.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestAsyncProcessor.java index a1edf1da2e2..eea0b331e73 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestAsyncProcessor.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestAsyncProcessor.java @@ -44,8 +44,7 @@ public CompletionStage process( DefaultSession session, InternalDriverContext context, String sessionLogPrefix) { - return new ContinuousCqlRequestHandler(request, session, context, sessionLogPrefix) - .dequeueOrCreatePending(); + return new ContinuousCqlRequestHandler(request, session, context, sessionLogPrefix).handle(); } @Override diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandler.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandler.java index ba26ea3d8e6..776c4a6c476 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandler.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandler.java @@ -19,11 +19,11 @@ import com.datastax.dse.driver.api.core.cql.continuous.ContinuousAsyncResultSet; import com.datastax.dse.driver.internal.core.cql.DseConversions; import com.datastax.dse.protocol.internal.response.result.DseRowsMetadata; +import com.datastax.oss.driver.api.core.cql.ColumnDefinitions; import com.datastax.oss.driver.api.core.cql.ExecutionInfo; import com.datastax.oss.driver.api.core.cql.Row; import com.datastax.oss.driver.api.core.cql.Statement; -import com.datastax.oss.driver.api.core.session.throttling.Throttled; -import com.datastax.oss.driver.internal.core.channel.ResponseCallback; +import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.internal.core.cql.DefaultExecutionInfo; import com.datastax.oss.driver.internal.core.cql.DefaultRow; @@ -35,8 +35,6 @@ import com.datastax.oss.protocol.internal.response.result.Rows; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; -import io.netty.util.concurrent.Future; -import io.netty.util.concurrent.GenericFutureListener; import java.nio.ByteBuffer; import java.time.Duration; import java.util.List; @@ -49,8 +47,7 @@ */ @ThreadSafe public class ContinuousCqlRequestHandler - extends ContinuousRequestHandlerBase - implements ResponseCallback, GenericFutureListener>, Throttled { + extends ContinuousRequestHandlerBase { private final Message message; private final Duration firstPageTimeout; @@ -63,9 +60,8 @@ public class ContinuousCqlRequestHandler @NonNull DefaultSession session, @NonNull InternalDriverContext context, @NonNull String sessionLogPrefix) { - super(statement, session, context, sessionLogPrefix, ContinuousAsyncResultSet.class); + super(statement, session, context, sessionLogPrefix, false); message = DseConversions.toContinuousPagingMessage(statement, executionProfile, context); - throttler.register(this); firstPageTimeout = executionProfile.getDuration(DseDriverOption.CONTINUOUS_PAGING_TIMEOUT_FIRST_PAGE); otherPagesTimeout = @@ -77,19 +73,19 @@ public class ContinuousCqlRequestHandler @NonNull @Override - protected Duration getGlobalTimeout() { + protected Duration getGlobalTimeoutDuration() { return Duration.ZERO; } @NonNull @Override - protected Duration getPageTimeout(int pageNumber) { + protected Duration getPageTimeoutDuration(int pageNumber) { return pageNumber == 1 ? firstPageTimeout : otherPagesTimeout; } @NonNull @Override - protected Duration getReviseRequestTimeout() { + protected Duration getReviseRequestTimeoutDuration() { return otherPagesTimeout; } @@ -129,14 +125,17 @@ protected ContinuousAsyncResultSet createEmptyResultSet(@NonNull ExecutionInfo e @NonNull @Override protected DefaultExecutionInfo createExecutionInfo( - @NonNull Result result, @Nullable Frame response) { + @NonNull Node node, + @Nullable Result result, + @Nullable Frame response, + int successfulExecutionIndex) { ByteBuffer pagingState = result instanceof Rows ? ((Rows) result).getMetadata().pagingState : null; return new DefaultExecutionInfo( statement, node, - 0, - 0, + startedSpeculativeExecutionsCount.get(), + successfulExecutionIndex, errors, pagingState, response, @@ -149,7 +148,9 @@ protected DefaultExecutionInfo createExecutionInfo( @NonNull @Override protected DefaultContinuousAsyncResultSet createResultSet( - @NonNull Rows rows, @NonNull ExecutionInfo executionInfo) { + @NonNull Rows rows, + @NonNull ExecutionInfo executionInfo, + @NonNull ColumnDefinitions columnDefinitions) { Queue> data = rows.getData(); CountingIterator iterator = new CountingIterator(data.size()) { diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousRequestHandlerBase.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousRequestHandlerBase.java index 7227a85cf66..e18fe123c7b 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousRequestHandlerBase.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousRequestHandlerBase.java @@ -14,6 +14,8 @@ import com.datastax.dse.protocol.internal.request.Revise; import com.datastax.dse.protocol.internal.response.result.DseRowsMetadata; import com.datastax.oss.driver.api.core.AllNodesFailedException; +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.DriverException; import com.datastax.oss.driver.api.core.DriverTimeoutException; import com.datastax.oss.driver.api.core.ProtocolVersion; import com.datastax.oss.driver.api.core.RequestThrottlingException; @@ -38,6 +40,8 @@ import com.datastax.oss.driver.api.core.session.Request; import com.datastax.oss.driver.api.core.session.throttling.RequestThrottler; import com.datastax.oss.driver.api.core.session.throttling.Throttled; +import com.datastax.oss.driver.api.core.specex.SpeculativeExecutionPolicy; +import com.datastax.oss.driver.api.core.tracker.RequestTracker; import com.datastax.oss.driver.internal.core.adminrequest.ThrottledAdminRequestHandler; import com.datastax.oss.driver.internal.core.adminrequest.UnexpectedResponseException; import com.datastax.oss.driver.internal.core.channel.DriverChannel; @@ -49,6 +53,8 @@ import com.datastax.oss.driver.internal.core.metrics.SessionMetricUpdater; import com.datastax.oss.driver.internal.core.session.DefaultSession; import com.datastax.oss.driver.internal.core.session.RepreparePayload; +import com.datastax.oss.driver.internal.core.tracker.NoopRequestTracker; +import com.datastax.oss.driver.internal.core.tracker.RequestLogger; import com.datastax.oss.driver.internal.core.util.Loggers; import com.datastax.oss.driver.internal.core.util.collection.QueryPlan; import com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting; @@ -81,6 +87,8 @@ import java.util.concurrent.CompletionStage; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.locks.ReentrantLock; import net.jcip.annotations.GuardedBy; import net.jcip.annotations.ThreadSafe; @@ -93,83 +101,105 @@ @ThreadSafe public abstract class ContinuousRequestHandlerBase< StatementT extends Request, ResultSetT, ExecutionInfoT> - implements ResponseCallback, GenericFutureListener>, Throttled { + implements Throttled { private static final Logger LOG = LoggerFactory.getLogger(ContinuousRequestHandlerBase.class); - protected final String logPrefix; + private final String logPrefix; protected final StatementT statement; protected final DefaultSession session; + private final CqlIdentifier keyspace; protected final InternalDriverContext context; protected final DriverExecutionProfile executionProfile; private final Queue queryPlan; private final RetryPolicy retryPolicy; - protected final RequestThrottler throttler; + private final RequestThrottler throttler; private final boolean protocolBackpressureAvailable; private final boolean isIdempotent; private final Timer timer; private final SessionMetricUpdater sessionMetricUpdater; + private final boolean specExecEnabled; + private final SpeculativeExecutionPolicy speculativeExecutionPolicy; + private final List scheduledExecutions; - // The errors on the nodes that were already tried. - // We don't use a map because nodes can appear multiple times. + /** + * The errors on the nodes that were already tried. We don't use a map because nodes can appear + * multiple times. + */ protected final List> errors = new CopyOnWriteArrayList<>(); - // Coordinates concurrent accesses between the client and I/O threads + /** + * Represents the global state of the continuous paging request. This future is not exposed to + * clients, it is used internally to track completion and cancellation. + */ + private final CompletableFuture doneFuture = new CompletableFuture<>(); + + /** + * The list of in-flight executions, one per node. Executions may be triggered by speculative + * executions or retries. An execution is added to this list when the write operation completes. + * It is removed from this list when the callback has done reading responses. + */ + private final List inFlightCallbacks = new CopyOnWriteArrayList<>(); + + /** + * How many speculative executions are currently running (including the initial execution). We + * track this in order to know when to fail the request if all executions have reached the end of + * the query plan. + */ + private final AtomicInteger activeExecutionsCount = new AtomicInteger(0); + + /** + * How many speculative executions have started (excluding the initial execution), whether they + * have completed or not. We track this in order to fill execution info objects with this + * information. + */ + protected final AtomicInteger startedSpeculativeExecutionsCount = new AtomicInteger(0); + + /** + * Coordinates concurrent accesses between the client and I/O threads that wish to enqueue and + * dequeue items from the page queue. + */ private final ReentrantLock lock = new ReentrantLock(); - // The page queue, storing responses that we have received and have not been consumed by the - // client yet. + /** + * The page queue, storing pages that we have received and have not been consumed by the client + * yet. It can also store errors, when the operation completed exceptionally. + */ @GuardedBy("lock") private Queue queue; - // If the client requests a page and we can't serve it immediately (empty queue), then we create - // this future and have the client wait on it. Otherwise this field is null. + /** + * If the client requests a page and we can't serve it immediately (empty queue), then we create + * this future and have the client wait on it. Otherwise this field is null. + */ @GuardedBy("lock") private CompletableFuture pendingResult; - // How many pages were requested. This is the total number of pages requested from the beginning. - // It will be zero if the protocol does not support numPagesRequested (DSE_V1) + /** + * How many pages were requested. This is the total number of pages requested from the beginning. + * It will be zero if the protocol does not support numPagesRequested (DSE_V1) + */ @GuardedBy("lock") private int numPagesRequested; - // An integer that represents the state of the continuous paging request: - // - if positive, it is the sequence number of the next expected page; - // - if negative, it is a terminal state, identified by the constants below. + /** + * The node that has been chosen to deliver results. In case of speculative executions, this is + * the first node that replies with either a result or an error. + */ @GuardedBy("lock") - private int state = 1; - - private static final int STATE_FINISHED = -1; - private static final int STATE_FAILED = -2; + private NodeResponseCallback chosenExecution; // Set when the execution starts, and is never modified after. private volatile long startTimeNanos; - // These are set when the first page arrives, and are never modified after. - volatile ColumnDefinitions columnDefinitions; - - // These change over time as different nodes are tried; - // they can only be null before the first request is sent. - protected volatile Node node; - private volatile DriverChannel channel; - private volatile int streamId; - // Set each time a new request/response cycle starts. - private volatile long messageStartTimeNanos; - private volatile Timeout pageTimeout; private volatile Timeout globalTimeout; - // How many times we've invoked the retry policy and it has returned a "retry" decision (0 for - // the first attempt, 1 for the first retry, etc.). - private volatile int retryCount; - private Class resultSetClass; - public ContinuousRequestHandlerBase( @NonNull StatementT statement, @NonNull DefaultSession session, @NonNull InternalDriverContext context, @NonNull String sessionLogPrefix, - @NonNull Class resultSetClass) { - this.resultSetClass = resultSetClass; - + boolean specExecEnabled) { ProtocolVersion protocolVersion = context.getProtocolVersion(); if (!context .getProtocolVersionRegistry() @@ -181,6 +211,7 @@ public ContinuousRequestHandlerBase( LOG.trace("[{}] Creating new continuous handler for request {}", logPrefix, statement); this.statement = statement; this.session = session; + this.keyspace = session.getKeyspace().orElse(null); this.context = context; this.executionProfile = Conversions.resolveExecutionProfile(this.statement, this.context); this.queryPlan = @@ -196,22 +227,34 @@ public ContinuousRequestHandlerBase( ? executionProfile.getBoolean(DefaultDriverOption.REQUEST_DEFAULT_IDEMPOTENCE) : idempotent; this.timer = context.getNettyOptions().getTimer(); - this.protocolBackpressureAvailable = protocolVersion.getCode() >= DseProtocolVersion.DSE_V2.getCode(); this.throttler = context.getRequestThrottler(); this.sessionMetricUpdater = session.getMetricUpdater(); - this.startTimeNanos = System.nanoTime(); + this.specExecEnabled = specExecEnabled && isIdempotent; + this.speculativeExecutionPolicy = + this.specExecEnabled + ? context.getSpeculativeExecutionPolicy(executionProfile.getName()) + : null; + this.scheduledExecutions = this.specExecEnabled ? new CopyOnWriteArrayList<>() : null; } + /** @return The global timeout, or {@link Duration#ZERO} to disable it. */ @NonNull - protected abstract Duration getGlobalTimeout(); + protected abstract Duration getGlobalTimeoutDuration(); + /** + * @return The timeout for page pageNumber, or {@link Duration#ZERO} to disable it. + */ @NonNull - protected abstract Duration getPageTimeout(int pageNumber); + protected abstract Duration getPageTimeoutDuration(int pageNumber); + /** + * @return The timeout for REVISE requests (cancellation and backpressure), or {@link + * Duration#ZERO} to disable it. + */ @NonNull - protected abstract Duration getReviseRequestTimeout(); + protected abstract Duration getReviseRequestTimeoutDuration(); protected abstract int getMaxEnqueuedPages(); @@ -226,61 +269,65 @@ public ContinuousRequestHandlerBase( protected abstract Map createPayload(); @NonNull - protected abstract ResultSetT createEmptyResultSet(@NonNull ExecutionInfoT executionInfo); - - protected abstract int pageNumber(@NonNull ResultSetT resultSet); + protected abstract ResultSetT createResultSet( + @NonNull Rows rows, + @NonNull ExecutionInfoT executionInfo, + @NonNull ColumnDefinitions columnDefinitions) + throws IOException; + /** @return An empty result set; used only when the retry policy decides to ignore the error. */ @NonNull - protected abstract ExecutionInfoT createExecutionInfo( - @NonNull Result result, @Nullable Frame response); + protected abstract ResultSetT createEmptyResultSet(@NonNull ExecutionInfoT executionInfo); @NonNull - protected abstract ResultSetT createResultSet( - @NonNull Rows rows, @NonNull ExecutionInfoT executionInfo) throws IOException; + protected abstract ExecutionInfoT createExecutionInfo( + @NonNull Node node, + @Nullable Result result, + @Nullable Frame response, + int successfulExecutionIndex); - // MAIN LIFECYCLE + protected abstract int pageNumber(@NonNull ResultSetT resultSet); - @Override - public void onStreamIdAssigned(int streamId) { - LOG.trace("[{}] Assigned streamId {} on node {}", logPrefix, streamId, node); - this.streamId = streamId; + public CompletionStage handle() { + startTimeNanos = System.nanoTime(); + lock.lock(); + try { + this.queue = new ArrayDeque<>(getMaxEnqueuedPages()); + this.numPagesRequested = protocolBackpressureAvailable ? getMaxEnqueuedPages() : 0; + } finally { + lock.unlock(); + } + scheduleGlobalTimeout(); + // must be done last since it may trigger an immediate call to #onThrottleReady + throttler.register(this); + return dequeueOrCreatePending(); } @Override - public boolean isLastResponse(@NonNull Frame responseFrame) { - Message message = responseFrame.message; - if (message instanceof Rows) { - Rows rows = (Rows) message; - DseRowsMetadata metadata = (DseRowsMetadata) rows.getMetadata(); - return metadata.isLastContinuousPage; - } else { - return message instanceof Error; + public void onThrottleReady(boolean wasDelayed) { + if (wasDelayed + // avoid call to nanoTime() if metric is disabled: + && sessionMetricUpdater.isEnabled( + DefaultSessionMetric.THROTTLING_DELAY, executionProfile.getName())) { + sessionMetricUpdater.updateTimer( + DefaultSessionMetric.THROTTLING_DELAY, + executionProfile.getName(), + System.nanoTime() - startTimeNanos, + TimeUnit.NANOSECONDS); } + activeExecutionsCount.incrementAndGet(); + sendRequest(null, 0, 0, specExecEnabled); } @Override - public void onThrottleReady(boolean wasDelayed) { - if (wasDelayed) { + public void onThrottleFailure(@NonNull RequestThrottlingException error) { + if (sessionMetricUpdater.isEnabled( + DefaultSessionMetric.THROTTLING_ERRORS, executionProfile.getName())) { session .getMetricUpdater() - .updateTimer( - DefaultSessionMetric.THROTTLING_DELAY, - executionProfile.getName(), - System.nanoTime() - startTimeNanos, - TimeUnit.NANOSECONDS); + .incrementCounter(DefaultSessionMetric.THROTTLING_ERRORS, executionProfile.getName()); } - lock.lock(); - try { - this.queue = new ArrayDeque<>(getMaxEnqueuedPages()); - this.numPagesRequested = protocolBackpressureAvailable ? getMaxEnqueuedPages() : 0; - } finally { - lock.unlock(); - } - sendRequest(null); - } - - public CompletionStage handle() { - return dequeueOrCreatePending(); + setFailed(null, error); } /** @@ -288,9 +335,18 @@ public CompletionStage handle() { * * @param node if not null, it will be attempted first before the rest of the query plan. It * happens only when we retry on the same host. + * @param currentExecutionIndex 0 for the initial execution, 1 for the first speculative one, etc. + * @param retryCount the number of times that the retry policy was invoked for this execution + * already (note that some internal retries don't go through the policy, and therefore don't + * increment this counter) + * @param scheduleSpeculativeExecution whether to schedule the next speculative execution */ - private void sendRequest(@Nullable Node node) { - channel = null; + private void sendRequest( + @Nullable Node node, + int currentExecutionIndex, + int retryCount, + boolean scheduleSpeculativeExecution) { + DriverChannel channel = null; if (node == null || (channel = session.getChannel(node, logPrefix)) == null) { while ((node = queryPlan.poll()) != null) { channel = session.getChannel(node, logPrefix); @@ -299,79 +355,231 @@ private void sendRequest(@Nullable Node node) { } } } - if (channel == null || node == null) { + if (channel == null) { // We've reached the end of the query plan without finding any node to write to; abort the // continuous paging session. - lock.lock(); - try { - abort(AllNodesFailedException.fromErrors(errors), false); - } finally { - lock.unlock(); + if (activeExecutionsCount.decrementAndGet() == 0) { + AllNodesFailedException error = AllNodesFailedException.fromErrors(errors); + setFailed(null, error); } } else { - this.node = node; - streamId = -1; - messageStartTimeNanos = System.nanoTime(); - channel.write(getMessage(), isTracingEnabled(), createPayload(), this).addListener(this); + NodeResponseCallback nodeResponseCallback = + new NodeResponseCallback( + node, + channel, + currentExecutionIndex, + retryCount, + scheduleSpeculativeExecution, + logPrefix); + channel + .write(getMessage(), isTracingEnabled(), createPayload(), nodeResponseCallback) + .addListener(nodeResponseCallback); } } /** - * Invoked when the write from {@link #sendRequest(Node)} completes. + * Handles the interaction with a single node in the query plan. * - * @param future The future representing the outcome of the write operation. + *

      An instance of this class is created each time we (re)try a node. */ - @Override - public void operationComplete(@NonNull Future future) { - if (!future.isSuccess()) { - Throwable error = future.cause(); - if (error instanceof EncoderException && error.getCause() instanceof FrameTooLongException) { - trackNodeError(node, error.getCause()); - lock.lock(); - try { - abort(error.getCause(), false); - } finally { - lock.unlock(); + private class NodeResponseCallback + implements ResponseCallback, GenericFutureListener> { + + private final long nodeStartTimeNanos = System.nanoTime(); + private final Node node; + private final DriverChannel channel; + // The identifier of the current execution (0 for the initial execution, 1 for the first + // speculative execution, etc.) + private final int executionIndex; + // How many times we've invoked the retry policy and it has returned a "retry" decision (0 for + // the first attempt of each execution). + private final int retryCount; + private final boolean scheduleSpeculativeExecution; + private final String logPrefix; + private final AtomicBoolean cancelled = new AtomicBoolean(false); + + // These are volatile because they can be accessed outside the event loop, mostly inside + // dequeueOrCreatePending and maybeRequestMore, to check whether we need to set a new page + // timeout or to determine if we need to send a new backpressure request. + private volatile int streamId = -1; + private volatile int currentPage = 1; + private volatile Timeout pageTimeout; + + private ColumnDefinitions columnDefinitions; + + NodeResponseCallback( + Node node, + DriverChannel channel, + int executionIndex, + int retryCount, + boolean scheduleSpeculativeExecution, + String logPrefix) { + this.node = node; + this.channel = channel; + this.executionIndex = executionIndex; + this.retryCount = retryCount; + this.scheduleSpeculativeExecution = scheduleSpeculativeExecution; + this.logPrefix = logPrefix + "|" + executionIndex; + } + + @Override + public void onStreamIdAssigned(int streamId) { + LOG.trace("[{}] Assigned streamId {} on node {}", logPrefix, streamId, node); + this.streamId = streamId; + } + + @Override + public boolean isLastResponse(@NonNull Frame responseFrame) { + Message message = responseFrame.message; + if (message instanceof Rows) { + Rows rows = (Rows) message; + DseRowsMetadata metadata = (DseRowsMetadata) rows.getMetadata(); + return metadata.isLastContinuousPage; + } else { + return message instanceof Error; + } + } + + /** + * Invoked when the write of a request completes. + * + * @param future The future representing the outcome of the write operation. + */ + @Override + public void operationComplete(@NonNull Future future) { + if (!future.isSuccess()) { + Throwable error = future.cause(); + if (error instanceof EncoderException + && error.getCause() instanceof FrameTooLongException) { + trackNodeError(error.getCause()); + handleNodeFailure(error.getCause()); + } else { + LOG.trace( + "[{}] Failed to send request on {}, trying next node (cause: {})", + logPrefix, + channel, + error); + ((DefaultNode) node) + .getMetricUpdater() + .incrementCounter(DefaultNodeMetric.UNSENT_REQUESTS, executionProfile.getName()); + recordError(node, error); + trackNodeError(error); + sendRequest(null, executionIndex, retryCount, scheduleSpeculativeExecution); } } else { - LOG.trace( - "[{}] Failed to send request on {}, trying next node (cause: {})", - logPrefix, - channel, - error); - ((DefaultNode) node) - .getMetricUpdater() - .incrementCounter(DefaultNodeMetric.UNSENT_REQUESTS, executionProfile.getName()); - recordError(node, error); - trackNodeError(node, error.getCause()); - sendRequest(null); + LOG.trace("[{}] Request sent on {}", logPrefix, channel); + if (doneFuture.isDone()) { + cancelExecution(); + } else { + inFlightCallbacks.add(this); + if (scheduleSpeculativeExecution && currentPage == 1) { + int nextExecution = executionIndex + 1; + // Note that `node` is the first node of the execution, it might not be the "slow" one + // if there were retries, but in practice retries are rare. + long nextDelay = + speculativeExecutionPolicy.nextExecution(node, keyspace, statement, nextExecution); + if (nextDelay >= 0) { + scheduleSpeculativeExecution(nextExecution, nextDelay); + } else { + LOG.trace( + "[{}] Speculative execution policy returned {}, no next execution", + logPrefix, + nextDelay); + } + } + schedulePageTimeout(); + } } - } else { - LOG.trace("[{}] Request sent on {}", logPrefix, channel); - pageTimeout = schedulePageTimeout(1); - globalTimeout = scheduleGlobalTimeout(); } - } - /** - * Invoked when a continuous paging response is received, either a successful or failed one. - * - *

      Delegates further processing to appropriate methods: {@link #processResultResponse(Result, - * Frame)} if the response was successful, or {@link #processErrorResponse(Error)} if it wasn't. - * - * @param response the received {@link Frame}. - */ - @Override - public void onResponse(@NonNull Frame response) { - stopNodeMessageTimer(); - cancelTimeout(pageTimeout); - lock.lock(); - try { - if (state < 0) { - LOG.trace("[{}] Got result but the request has been cancelled, ignoring", logPrefix); + private void scheduleSpeculativeExecution(int nextExecutionIndex, long delay) { + LOG.trace( + "[{}] Scheduling speculative execution {} in {} ms", + logPrefix, + nextExecutionIndex, + delay); + try { + scheduledExecutions.add( + timer.newTimeout( + (Timeout timeout) -> { + if (!doneFuture.isDone()) { + LOG.trace( + "[{}] Starting speculative execution {}", logPrefix, nextExecutionIndex); + activeExecutionsCount.incrementAndGet(); + startedSpeculativeExecutionsCount.incrementAndGet(); + incrementNodeSpecExecMetric(); + sendRequest(null, nextExecutionIndex, 0, true); + } + }, + delay, + TimeUnit.MILLISECONDS)); + } catch (IllegalStateException e) { + logTimeoutSchedulingError(e); + } + } + + private void schedulePageTimeout() { + int expectedPage = currentPage; + if (expectedPage < 0) { + pageTimeout = null; + return; + } + Duration timeout = getPageTimeoutDuration(expectedPage); + if (timeout.toNanos() <= 0) { + pageTimeout = null; + return; + } + LOG.trace("[{}] Scheduling timeout for page {} in {}", logPrefix, expectedPage, timeout); + try { + pageTimeout = + timer.newTimeout( + timeout1 -> { + if (currentPage == expectedPage) { + LOG.trace( + "[{}] Timeout fired for page {}, cancelling execution", + logPrefix, + currentPage); + handleNodeFailure( + new DriverTimeoutException( + String.format("Timed out waiting for page %d", expectedPage))); + } else { + // Ignore timeout if the request has moved on in the interim. + LOG.trace( + "[{}] Timeout fired for page {} but query already at state {}, skipping", + logPrefix, + expectedPage, + currentPage); + } + }, + timeout.toNanos(), + TimeUnit.NANOSECONDS); + } catch (IllegalStateException e) { + logTimeoutSchedulingError(e); + } + } + + /** + * Invoked when a continuous paging response is received, either a successful or failed one. + * + *

      Delegates further processing to appropriate methods: {@link #processResultResponse(Result, + * Frame)} if the response was successful, or {@link #processErrorResponse(Error)} if it wasn't. + * + * @param response the received {@link Frame}. + */ + @Override + public void onResponse(@NonNull Frame response) { + stopNodeMessageTimer(); + cancelPageTimeout(); + if (doneFuture.isDone()) { + LOG.trace( + "[{}] Got result but the request has been cancelled or completed by another execution, ignoring", + logPrefix); + // cancel to make sure the server will stop sending pages + cancelExecution(); return; } try { + logServerWarnings(response.warnings); Message responseMessage = response.message; if (responseMessage instanceof Result) { LOG.trace("[{}] Got result", logPrefix); @@ -382,380 +590,607 @@ public void onResponse(@NonNull Frame response) { } else { IllegalStateException error = new IllegalStateException("Unexpected response " + responseMessage); - trackNodeError(node, error); - abort(error, false); + trackNodeError(error); + handleNodeFailure(error); } } catch (Throwable t) { - trackNodeError(node, t); - abort(t, false); + trackNodeError(t); + handleNodeFailure(t); } - } finally { - lock.unlock(); } - } - /** - * Invoked when a continuous paging request hits an unexpected error. - * - *

      Delegates further processing to to the retry policy ({@link - * #processRetryDecision(RetryDecision, Throwable)}. - * - * @param error the error encountered, usually a network problem. - */ - @Override - public void onFailure(@NonNull Throwable error) { - cancelTimeout(pageTimeout); - LOG.trace(String.format("[%s] Request failure", logPrefix), error); - RetryDecision decision; - if (!isIdempotent || error instanceof FrameTooLongException) { - decision = RetryDecision.RETHROW; - } else { - decision = retryPolicy.onRequestAborted(statement, error, retryCount); - } - updateErrorMetrics( - ((DefaultNode) node).getMetricUpdater(), - decision, - DefaultNodeMetric.ABORTED_REQUESTS, - DefaultNodeMetric.RETRIES_ON_ABORTED, - DefaultNodeMetric.IGNORES_ON_ABORTED); - lock.lock(); - try { + /** + * Invoked when a continuous paging request hits an unexpected error. + * + *

      Delegates further processing to to the retry policy ({@link + * #processRetryDecision(RetryDecision, Throwable)}. + * + * @param error the error encountered, usually a network problem. + */ + @Override + public void onFailure(@NonNull Throwable error) { + // do not update node metrics + cancelPageTimeout(); + if (doneFuture.isDone()) { + cancelExecution(); + return; + } + LOG.trace("[{}] Request failure, processing: {}", logPrefix, error); + RetryDecision decision; + if (!isIdempotent || error instanceof FrameTooLongException) { + decision = RetryDecision.RETHROW; + } else { + decision = retryPolicy.onRequestAborted(statement, error, retryCount); + } + updateNodeErrorMetrics( + decision, + DefaultNodeMetric.ABORTED_REQUESTS, + DefaultNodeMetric.RETRIES_ON_ABORTED, + DefaultNodeMetric.IGNORES_ON_ABORTED); processRetryDecision(decision, error); - } finally { - lock.unlock(); - } - } - - @Override - public void onThrottleFailure(@NonNull RequestThrottlingException error) { - session - .getMetricUpdater() - .incrementCounter(DefaultSessionMetric.THROTTLING_ERRORS, executionProfile.getName()); - lock.lock(); - try { - abort(error, false); - } finally { - lock.unlock(); } - } - - // PROCESSING METHODS - /** - * Processes a new result response, creating the corresponding {@link ResultSetT} object and then - * enqueuing it or serving it directly to the user if he was waiting for it. - * - * @param result the result to process. It is normally a {@link Rows} object, but may be a {@link - * Void} object if the retry policy decided to ignore an error. - * @param frame the {@link Frame} (used to create the {@link ExecutionInfo} the first time). - */ - @SuppressWarnings("GuardedBy") // this method is only called with the lock held - private void processResultResponse(@NonNull Result result, @Nullable Frame frame) { - assert lock.isHeldByCurrentThread(); - try { - ExecutionInfoT executionInfo = createExecutionInfo(result, frame); - if (result instanceof Rows) { - DseRowsMetadata rowsMetadata = (DseRowsMetadata) ((Rows) result).getMetadata(); - if (columnDefinitions == null) { - // Contrary to ROWS responses from regular queries, - // the first page always includes metadata so we use this - // regardless of whether or not the query was from a prepared statement. - columnDefinitions = Conversions.toColumnDefinitions(rowsMetadata, context); - } - int pageNumber = rowsMetadata.continuousPageNumber; - int currentPage = state; - if (pageNumber != currentPage) { - abort( - new IllegalStateException( - String.format("Received page %d but was expecting %d", pageNumber, currentPage)), - false); - } else { - int pageSize = ((Rows) result).getData().size(); - ResultSetT resultSet = createResultSet((Rows) result, executionInfo); - if (rowsMetadata.isLastContinuousPage) { - LOG.trace("[{}] Received last page ({} - {} rows)", logPrefix, pageNumber, pageSize); - state = STATE_FINISHED; - reenableAutoReadIfNeeded(); - enqueueOrCompletePending(resultSet); - stopGlobalRequestTimer(); - cancelTimeout(globalTimeout); - } else { - LOG.trace("[{}] Received page {} ({} rows)", logPrefix, pageNumber, pageSize); - if (currentPage > 0) { - state = currentPage + 1; + /** + * Processes a new result response, creating the corresponding {@link ResultSetT} object and + * then enqueuing it or serving it directly to the user if he was waiting for it. + * + * @param result the result to process. It is normally a {@link Rows} object, but may be a + * {@link Void} object if the retry policy decided to ignore an error. + * @param frame the {@link Frame} (used to create the {@link ExecutionInfo} the first time). + */ + private void processResultResponse(@NonNull Result result, @Nullable Frame frame) { + try { + if (setChosenExecution(this)) { + ExecutionInfoT executionInfo = createExecutionInfo(node, result, frame, executionIndex); + if (result instanceof Rows) { + DseRowsMetadata rowsMetadata = (DseRowsMetadata) ((Rows) result).getMetadata(); + int pageNumber = rowsMetadata.continuousPageNumber; + int currentPage = this.currentPage; + if (pageNumber != currentPage) { + IllegalStateException error = + new IllegalStateException( + String.format( + "Received page %d but was expecting %d", pageNumber, currentPage)); + handleNodeFailure(error); + } else { + if (columnDefinitions == null) { + // Contrary to ROWS responses from regular queries, + // the first page always includes metadata so we use this + // regardless of whether or not the query was from a prepared statement. + columnDefinitions = Conversions.toColumnDefinitions(rowsMetadata, context); + } + ResultSetT resultSet = + createResultSet((Rows) result, executionInfo, columnDefinitions); + if (rowsMetadata.isLastContinuousPage) { + int pageSize = ((Rows) result).getData().size(); + LOG.trace( + "[{}] Received last page ({} - {} rows)", logPrefix, pageNumber, pageSize); + stopExecution(); + setCompleted(this); + } else { + int pageSize = ((Rows) result).getData().size(); + LOG.trace("[{}] Received page {} ({} rows)", logPrefix, pageNumber, pageSize); + this.currentPage = currentPage + 1; + } + enqueueOrCompletePending(resultSet); + trackNodeSuccess(); } + } else { + // Void responses happen only when the retry decision is ignore. + assert result instanceof Void; + LOG.trace( + "[{}] Continuous paging interrupted by retry policy decision to ignore error", + logPrefix); + ResultSetT resultSet = createEmptyResultSet(executionInfo); + stopExecution(); + setCompleted(this); enqueueOrCompletePending(resultSet); + trackNodeSuccess(); } + } else { + LOG.trace( + "[{}] Discarding response from execution {} because another execution was chosen", + logPrefix, + executionIndex); + cancelExecution(); } - } else { - // Void responses happen only when the retry decision is ignore. - assert result instanceof Void; - ResultSetT resultSet = createEmptyResultSet(executionInfo); - LOG.trace( - "[{}] Continuous paging interrupted by retry policy decision to ignore error", - logPrefix); - state = STATE_FINISHED; - reenableAutoReadIfNeeded(); - enqueueOrCompletePending(resultSet); - stopGlobalRequestTimer(); - cancelTimeout(globalTimeout); + } catch (Throwable error) { + trackNodeError(error); + handleNodeFailure(error); } - } catch (Throwable error) { - abort(error, false); } - } - /** - * Processes an unsuccessful response. - * - *

      Depending on the error, may trigger: - * - *

        - *
      1. a re-prepare cycle, see {@link #processUnprepared(Unprepared)}; - *
      2. an immediate retry on the next host, bypassing the retry policy, if the host was - * bootstrapping; - *
      3. an immediate abortion if the error is unrecoverable; - *
      4. further processing if the error is recoverable, see {@link - * #processRecoverableError(CoordinatorException)} - *
      - * - * @param errorMessage the error message received. - */ - @SuppressWarnings("GuardedBy") // this method is only called with the lock held - private void processErrorResponse(@NonNull Error errorMessage) { - assert lock.isHeldByCurrentThread(); - if (errorMessage instanceof Unprepared) { - processUnprepared((Unprepared) errorMessage); - } else { - CoordinatorException error = DseConversions.toThrowable(node, errorMessage, context); - if (error instanceof BootstrappingException) { - LOG.trace("[{}] {} is bootstrapping, trying next node", logPrefix, node); - recordError(node, error); - trackNodeError(node, error); - sendRequest(null); - } else if (error instanceof QueryValidationException - || error instanceof FunctionFailureException - || error instanceof ProtocolError - || state > 1) { - // we only process recoverable errors for the first page, - // errors on subsequent pages will always trigger an immediate abortion - LOG.trace("[{}] Unrecoverable error, rethrowing", logPrefix); - NodeMetricUpdater metricUpdater = ((DefaultNode) node).getMetricUpdater(); - metricUpdater.incrementCounter(DefaultNodeMetric.OTHER_ERRORS, executionProfile.getName()); - trackNodeError(node, error); - abort(error, true); + /** + * Processes an unsuccessful response. + * + *

      Depending on the error, may trigger: + * + *

        + *
      1. a re-prepare cycle, see {@link #processUnprepared(Unprepared)}; + *
      2. an immediate retry on the next host, bypassing the retry policy, if the host was + * bootstrapping; + *
      3. an immediate abortion if the error is unrecoverable; + *
      4. further processing if the error is recoverable, see {@link + * #processRecoverableError(CoordinatorException)} + *
      + * + * @param errorMessage the error message received. + */ + private void processErrorResponse(@NonNull Error errorMessage) { + if (errorMessage instanceof Unprepared) { + processUnprepared((Unprepared) errorMessage); } else { - processRecoverableError(error); + CoordinatorException error = DseConversions.toThrowable(node, errorMessage, context); + if (error instanceof BootstrappingException) { + LOG.trace("[{}] {} is bootstrapping, trying next node", logPrefix, node); + recordError(node, error); + trackNodeError(error); + sendRequest(null, executionIndex, retryCount, false); + } else if (error instanceof QueryValidationException + || error instanceof FunctionFailureException + || error instanceof ProtocolError + || currentPage > 1) { + // we only process recoverable errors for the first page, + // errors on subsequent pages will always trigger an immediate abortion + LOG.trace("[{}] Unrecoverable error, rethrowing", logPrefix); + NodeMetricUpdater metricUpdater = ((DefaultNode) node).getMetricUpdater(); + metricUpdater.incrementCounter( + DefaultNodeMetric.OTHER_ERRORS, executionProfile.getName()); + trackNodeError(error); + handleNodeFailure(error); + } else { + processRecoverableError(error); + } } } - } - /** - * Processes a recoverable error. - * - *

      In most cases, delegates to the retry policy and its decision, see {@link - * #processRetryDecision(RetryDecision, Throwable)}. - * - * @param error the recoverable error. - */ - private void processRecoverableError(@NonNull CoordinatorException error) { - assert lock.isHeldByCurrentThread(); - NodeMetricUpdater metricUpdater = ((DefaultNode) node).getMetricUpdater(); - RetryDecision decision; - if (error instanceof ReadTimeoutException) { - ReadTimeoutException readTimeout = (ReadTimeoutException) error; - decision = - retryPolicy.onReadTimeout( - statement, - readTimeout.getConsistencyLevel(), - readTimeout.getBlockFor(), - readTimeout.getReceived(), - readTimeout.wasDataPresent(), - retryCount); - updateErrorMetrics( - metricUpdater, - decision, - DefaultNodeMetric.READ_TIMEOUTS, - DefaultNodeMetric.RETRIES_ON_READ_TIMEOUT, - DefaultNodeMetric.IGNORES_ON_READ_TIMEOUT); - } else if (error instanceof WriteTimeoutException) { - WriteTimeoutException writeTimeout = (WriteTimeoutException) error; - if (isIdempotent) { + /** + * Processes a recoverable error. + * + *

      In most cases, delegates to the retry policy and its decision, see {@link + * #processRetryDecision(RetryDecision, Throwable)}. + * + * @param error the recoverable error. + */ + private void processRecoverableError(@NonNull CoordinatorException error) { + RetryDecision decision; + if (error instanceof ReadTimeoutException) { + ReadTimeoutException readTimeout = (ReadTimeoutException) error; + decision = + retryPolicy.onReadTimeout( + statement, + readTimeout.getConsistencyLevel(), + readTimeout.getBlockFor(), + readTimeout.getReceived(), + readTimeout.wasDataPresent(), + retryCount); + updateNodeErrorMetrics( + decision, + DefaultNodeMetric.READ_TIMEOUTS, + DefaultNodeMetric.RETRIES_ON_READ_TIMEOUT, + DefaultNodeMetric.IGNORES_ON_READ_TIMEOUT); + } else if (error instanceof WriteTimeoutException) { + WriteTimeoutException writeTimeout = (WriteTimeoutException) error; + if (isIdempotent) { + decision = + retryPolicy.onWriteTimeout( + statement, + writeTimeout.getConsistencyLevel(), + writeTimeout.getWriteType(), + writeTimeout.getBlockFor(), + writeTimeout.getReceived(), + retryCount); + } else { + decision = RetryDecision.RETHROW; + } + updateNodeErrorMetrics( + decision, + DefaultNodeMetric.WRITE_TIMEOUTS, + DefaultNodeMetric.RETRIES_ON_WRITE_TIMEOUT, + DefaultNodeMetric.IGNORES_ON_WRITE_TIMEOUT); + } else if (error instanceof UnavailableException) { + UnavailableException unavailable = (UnavailableException) error; decision = - retryPolicy.onWriteTimeout( + retryPolicy.onUnavailable( statement, - writeTimeout.getConsistencyLevel(), - writeTimeout.getWriteType(), - writeTimeout.getBlockFor(), - writeTimeout.getReceived(), + unavailable.getConsistencyLevel(), + unavailable.getRequired(), + unavailable.getAlive(), retryCount); + updateNodeErrorMetrics( + decision, + DefaultNodeMetric.UNAVAILABLES, + DefaultNodeMetric.RETRIES_ON_UNAVAILABLE, + DefaultNodeMetric.IGNORES_ON_UNAVAILABLE); } else { - decision = RetryDecision.RETHROW; + decision = + isIdempotent + ? retryPolicy.onErrorResponse(statement, error, retryCount) + : RetryDecision.RETHROW; + updateNodeErrorMetrics( + decision, + DefaultNodeMetric.OTHER_ERRORS, + DefaultNodeMetric.RETRIES_ON_OTHER_ERROR, + DefaultNodeMetric.IGNORES_ON_OTHER_ERROR); } - updateErrorMetrics( - metricUpdater, - decision, - DefaultNodeMetric.WRITE_TIMEOUTS, - DefaultNodeMetric.RETRIES_ON_WRITE_TIMEOUT, - DefaultNodeMetric.IGNORES_ON_WRITE_TIMEOUT); - } else if (error instanceof UnavailableException) { - UnavailableException unavailable = (UnavailableException) error; - decision = - retryPolicy.onUnavailable( - statement, - unavailable.getConsistencyLevel(), - unavailable.getRequired(), - unavailable.getAlive(), - retryCount); - updateErrorMetrics( - metricUpdater, - decision, - DefaultNodeMetric.UNAVAILABLES, - DefaultNodeMetric.RETRIES_ON_UNAVAILABLE, - DefaultNodeMetric.IGNORES_ON_UNAVAILABLE); - } else { - decision = - isIdempotent - ? retryPolicy.onErrorResponse(statement, error, retryCount) - : RetryDecision.RETHROW; - updateErrorMetrics( - metricUpdater, - decision, - DefaultNodeMetric.OTHER_ERRORS, - DefaultNodeMetric.RETRIES_ON_OTHER_ERROR, - DefaultNodeMetric.IGNORES_ON_OTHER_ERROR); + processRetryDecision(decision, error); } - processRetryDecision(decision, error); - } - /** - * Processes an {@link Unprepared} error by re-preparing then retrying on the same host. - * - * @param errorMessage the unprepared error message. - */ - @SuppressWarnings("GuardedBy") // this method is only called with the lock held - private void processUnprepared(@NonNull Unprepared errorMessage) { - assert lock.isHeldByCurrentThread(); - ByteBuffer idToReprepare = ByteBuffer.wrap(errorMessage.id); - LOG.trace( - "[{}] Statement {} is not prepared on {}, re-preparing", - logPrefix, - Bytes.toHexString(idToReprepare), - node); - RepreparePayload repreparePayload = session.getRepreparePayloads().get(idToReprepare); - if (repreparePayload == null) { - throw new IllegalStateException( - String.format( - "Tried to execute unprepared query %s but we don't have the data to re-prepare it", - Bytes.toHexString(idToReprepare))); - } - Prepare prepare = repreparePayload.toMessage(); - Duration timeout = executionProfile.getDuration(DefaultDriverOption.REQUEST_TIMEOUT); - ThrottledAdminRequestHandler.prepare( - channel, - true, - prepare, - repreparePayload.customPayload, - timeout, - throttler, - sessionMetricUpdater, - logPrefix) - .start() - .whenComplete( - (repreparedId, exception) -> { - // If we run into an unrecoverable error, surface it to the client instead of retrying - Throwable fatalError = null; - if (exception == null) { - if (!repreparedId.equals(idToReprepare)) { - IllegalStateException illegalStateException = - new IllegalStateException( - String.format( - "ID mismatch while trying to reprepare (expected %s, got %s). " - + "This prepared statement won't work anymore. " - + "This usually happens when you run a 'USE...' query after " - + "the statement was prepared.", - Bytes.toHexString(idToReprepare), Bytes.toHexString(repreparedId))); - trackNodeError(node, illegalStateException); - fatalError = illegalStateException; + /** + * Processes an {@link Unprepared} error by re-preparing then retrying on the same host. + * + * @param errorMessage the unprepared error message. + */ + private void processUnprepared(@NonNull Unprepared errorMessage) { + ByteBuffer idToReprepare = ByteBuffer.wrap(errorMessage.id); + LOG.trace( + "[{}] Statement {} is not prepared on {}, re-preparing", + logPrefix, + Bytes.toHexString(idToReprepare), + node); + RepreparePayload repreparePayload = session.getRepreparePayloads().get(idToReprepare); + if (repreparePayload == null) { + throw new IllegalStateException( + String.format( + "Tried to execute unprepared query %s but we don't have the data to re-prepare it", + Bytes.toHexString(idToReprepare))); + } + Prepare prepare = repreparePayload.toMessage(); + Duration timeout = executionProfile.getDuration(DefaultDriverOption.REQUEST_TIMEOUT); + ThrottledAdminRequestHandler.prepare( + channel, + true, + prepare, + repreparePayload.customPayload, + timeout, + throttler, + sessionMetricUpdater, + logPrefix) + .start() + .whenComplete( + (repreparedId, exception) -> { + // If we run into an unrecoverable error, surface it to the client instead of + // retrying + Throwable fatalError = null; + if (exception == null) { + if (!repreparedId.equals(idToReprepare)) { + fatalError = + new IllegalStateException( + String.format( + "ID mismatch while trying to reprepare (expected %s, got %s). " + + "This prepared statement won't work anymore. " + + "This usually happens when you run a 'USE...' query after " + + "the statement was prepared.", + Bytes.toHexString(idToReprepare), Bytes.toHexString(repreparedId))); + trackNodeError(fatalError); + } else { + LOG.trace( + "[{}] Re-prepare successful, retrying on the same node ({})", + logPrefix, + node); + stopExecution(); + sendRequest(node, executionIndex, retryCount, false); + } } else { - LOG.trace( - "[{}] Re-prepare successful, retrying on the same node ({})", - logPrefix, - node); - sendRequest(node); - } - } else { - if (exception instanceof UnexpectedResponseException) { - Message prepareErrorMessage = ((UnexpectedResponseException) exception).message; - if (prepareErrorMessage instanceof Error) { - CoordinatorException prepareError = - DseConversions.toThrowable(node, (Error) prepareErrorMessage, context); - if (prepareError instanceof QueryValidationException - || prepareError instanceof FunctionFailureException - || prepareError instanceof ProtocolError) { - LOG.trace("[{}] Unrecoverable error on re-prepare, rethrowing", logPrefix); - trackNodeError(node, prepareError); - fatalError = prepareError; + if (exception instanceof UnexpectedResponseException) { + Message prepareErrorMessage = ((UnexpectedResponseException) exception).message; + if (prepareErrorMessage instanceof Error) { + CoordinatorException prepareError = + DseConversions.toThrowable(node, (Error) prepareErrorMessage, context); + if (prepareError instanceof QueryValidationException + || prepareError instanceof FunctionFailureException + || prepareError instanceof ProtocolError) { + LOG.trace("[{}] Unrecoverable error on re-prepare, rethrowing", logPrefix); + trackNodeError(prepareError); + fatalError = prepareError; + } } + } else if (exception instanceof RequestThrottlingException) { + trackNodeError(exception); + fatalError = exception; + } + if (fatalError == null) { + LOG.trace("[{}] Re-prepare failed, trying next node", logPrefix); + recordError(node, exception); + trackNodeError(exception); + stopExecution(); + sendRequest(null, executionIndex, retryCount, false); + } else { + handleNodeFailure(fatalError); } - } else if (exception instanceof RequestThrottlingException) { - trackNodeError(node, exception); - fatalError = exception; } - if (fatalError == null) { - LOG.trace("[{}] Re-prepare failed, trying next node", logPrefix); - recordError(node, exception); - trackNodeError(node, exception); - sendRequest(null); + }); + } + + /** + * Processes the retry decision by triggering a retry, aborting or ignoring; also records the + * failures for further access. + * + * @param decision the decision to process. + * @param error the original error. + */ + private void processRetryDecision(@NonNull RetryDecision decision, @NonNull Throwable error) { + LOG.trace("[{}] Processing retry decision {}", logPrefix, decision); + switch (decision) { + case RETRY_SAME: + recordError(node, error); + trackNodeError(error); + stopExecution(); + sendRequest(node, executionIndex, retryCount + 1, false); + break; + case RETRY_NEXT: + recordError(node, error); + trackNodeError(error); + stopExecution(); + sendRequest(null, executionIndex, retryCount + 1, false); + break; + case RETHROW: + trackNodeError(error); + handleNodeFailure(error); + break; + case IGNORE: + processResultResponse(Void.INSTANCE, null); + break; + } + } + + private void logServerWarnings(List warnings) { + // log the warnings if they have NOT been disabled + if (warnings != null + && !warnings.isEmpty() + && executionProfile.getBoolean(DefaultDriverOption.REQUEST_LOG_WARNINGS) + && LOG.isWarnEnabled()) { + // use the RequestLogFormatter to format the query + StringBuilder statementString = new StringBuilder(); + context + .getRequestLogFormatter() + .appendRequest( + statement, + executionProfile.getInt( + DefaultDriverOption.REQUEST_LOGGER_MAX_QUERY_LENGTH, + RequestLogger.DEFAULT_REQUEST_LOGGER_MAX_QUERY_LENGTH), + executionProfile.getBoolean( + DefaultDriverOption.REQUEST_LOGGER_VALUES, + RequestLogger.DEFAULT_REQUEST_LOGGER_SHOW_VALUES), + executionProfile.getInt( + DefaultDriverOption.REQUEST_LOGGER_MAX_VALUES, + RequestLogger.DEFAULT_REQUEST_LOGGER_MAX_VALUES), + executionProfile.getInt( + DefaultDriverOption.REQUEST_LOGGER_MAX_VALUE_LENGTH, + RequestLogger.DEFAULT_REQUEST_LOGGER_MAX_VALUE_LENGTH), + statementString); + // log each warning separately + warnings.forEach( + (warning) -> + LOG.warn( + "Query '{}' generated server side warning(s): {}", statementString, warning)); + } + } + + private void stopNodeMessageTimer() { + NodeMetricUpdater nodeMetricUpdater = ((DefaultNode) node).getMetricUpdater(); + if (nodeMetricUpdater.isEnabled(DefaultNodeMetric.CQL_MESSAGES, executionProfile.getName())) { + nodeMetricUpdater.updateTimer( + DefaultNodeMetric.CQL_MESSAGES, + executionProfile.getName(), + System.nanoTime() - nodeStartTimeNanos, + TimeUnit.NANOSECONDS); + } + } + + private void incrementNodeSpecExecMetric() { + NodeMetricUpdater nodeMetricUpdater = ((DefaultNode) node).getMetricUpdater(); + if (nodeMetricUpdater.isEnabled( + DefaultNodeMetric.SPECULATIVE_EXECUTIONS, executionProfile.getName())) { + nodeMetricUpdater.incrementCounter( + DefaultNodeMetric.SPECULATIVE_EXECUTIONS, executionProfile.getName()); + } + } + + private void updateNodeErrorMetrics( + @NonNull RetryDecision decision, + @NonNull DefaultNodeMetric error, + @NonNull DefaultNodeMetric retriesOnError, + @NonNull DefaultNodeMetric ignoresOnError) { + NodeMetricUpdater metricUpdater = ((DefaultNode) node).getMetricUpdater(); + metricUpdater.incrementCounter(error, executionProfile.getName()); + switch (decision) { + case RETRY_SAME: + case RETRY_NEXT: + metricUpdater.incrementCounter(DefaultNodeMetric.RETRIES, executionProfile.getName()); + metricUpdater.incrementCounter(retriesOnError, executionProfile.getName()); + break; + case IGNORE: + metricUpdater.incrementCounter(DefaultNodeMetric.IGNORES, executionProfile.getName()); + metricUpdater.incrementCounter(ignoresOnError, executionProfile.getName()); + break; + case RETHROW: + // nothing do do + } + } + + private void trackNodeSuccess() { + RequestTracker requestTracker = context.getRequestTracker(); + if (!(requestTracker instanceof NoopRequestTracker)) { + long latencyNanos = System.nanoTime() - nodeStartTimeNanos; + requestTracker.onNodeSuccess(statement, latencyNanos, executionProfile, node, logPrefix); + } + } + + private void trackNodeError(@NonNull Throwable error) { + RequestTracker requestTracker = context.getRequestTracker(); + if (!(requestTracker instanceof NoopRequestTracker)) { + long latencyNanos = System.nanoTime() - nodeStartTimeNanos; + requestTracker.onNodeError( + statement, error, latencyNanos, executionProfile, node, logPrefix); + } + } + + private void cancelPageTimeout() { + if (pageTimeout != null) { + LOG.trace("[{}] Cancelling page timeout", logPrefix); + pageTimeout.cancel(); + } + } + + /** + * Cancels this execution (see below) and tries to define itself as the chosen execution, in + * which case, it will also fail the entire operation. + */ + private void handleNodeFailure(Throwable error) { + cancelExecution(); + if (setChosenExecution(this)) { + setFailed(this, error); + } else { + LOG.trace( + "[{}] Discarding error from execution {} because another execution was chosen", + logPrefix, + executionIndex); + } + } + + /** + * Cancels this execution. It will only actually cancel once, other invocations are no-ops. A + * cancellation consists of: cancelling this callback, sending a cancel request to the server, + * and stopping the execution (final cleanup). + * + *

      Cancellation can happen when the execution fails, when the users cancels a future, or when + * a global timeout is fired. + */ + private void cancelExecution() { + if (cancelled.compareAndSet(false, true)) { + try { + LOG.trace("[{}] Cancelling execution", logPrefix); + if (!channel.closeFuture().isDone()) { + channel.cancel(this); + } + sendCancelRequest(); + stopExecution(); + } catch (Throwable t) { + Loggers.warnWithException( + LOG, "[{}] Error cancelling execution {}", logPrefix, executionIndex, t); + } + } + } + + /** + * Stops the execution, that is, removes this callback from the in-flight list and re-enables + * autoread. In general one should call {@link #cancelExecution()}, unless it is certain that + * the server has stopped sending responses and the channel is in autoread. + */ + private void stopExecution() { + inFlightCallbacks.remove(this); + enableAutoReadIfNeeded(); + } + + private void enableAutoReadIfNeeded() { + // Make sure we don't leave the channel unreadable + if (!protocolBackpressureAvailable) { + LOG.trace("[{}] Re-enabling auto-read", logPrefix); + channel.config().setAutoRead(true); + } + } + + private void disableAutoReadIfNeeded() { + if (!protocolBackpressureAvailable) { + LOG.trace("[{}] Disabling auto-read", logPrefix); + channel.config().setAutoRead(false); + } + } + + private void sendCancelRequest() { + LOG.trace("[{}] Sending cancel request", logPrefix); + // Note: In DSE_V1, the cancellation message is called CANCEL, and in DSE_V2, it's + // called REVISE_REQUEST, but their structure is identical which is why we don't need to + // distinguish which protocol is being used. + ThrottledAdminRequestHandler.query( + channel, + true, + Revise.cancelContinuousPaging(streamId), + statement.getCustomPayload(), + getReviseRequestTimeoutDuration(), + throttler, + session.getMetricUpdater(), + logPrefix, + "cancel request") + .start() + .whenComplete( + (result, error) -> { + if (error != null) { + LOG.debug( + "[{}] Error sending cancel request: {}. " + + "This is not critical (the request will eventually time out server-side).", + logPrefix, + error); + } else { + LOG.trace("[{}] Cancel request sent successfully", logPrefix); } - } - if (fatalError != null) { - lock.lock(); - try { - abort(fatalError, true); - } finally { - lock.unlock(); + }); + } + + /** + * Sends a request for more pages (a.k.a. backpressure request). This method should only be + * invoked if protocol backpressure is available. + * + * @param nextPages the number of extra pages to request. + */ + private void sendMorePagesRequest(int nextPages) { + assert protocolBackpressureAvailable + : "REVISE_REQUEST messages with revision type 2 require DSE_V2 or higher"; + LOG.trace("[{}] Sending request for more pages", logPrefix); + ThrottledAdminRequestHandler.query( + channel, + true, + Revise.requestMoreContinuousPages(streamId, nextPages), + statement.getCustomPayload(), + getReviseRequestTimeoutDuration(), + throttler, + session.getMetricUpdater(), + logPrefix, + "request " + nextPages + " more pages for id " + streamId) + .start() + .whenComplete( + (result, error) -> { + if (error != null) { + Loggers.warnWithException( + LOG, "[{}] Error requesting more pages, aborting.", logPrefix, error); + handleNodeFailure(error); } - } - }); + }); + } } /** - * Processes the retry decision by triggering a retry, aborting or ignoring; also records the - * failures for further access. + * Sets the given node callback as the callback that has been chosen to deliver results. * - * @param decision the decision to process. - * @param error the original error. + *

      Note that this method can be called many times, but only the first invocation will actually + * set the chosen callback. All subsequent invocations will return true if the given callback is + * the same as the previously chosen one. This means that once the chosen callback is set, it + * cannot change anymore. + * + * @param execution The node callback to set. + * @return true if the given node callback is now (or was already) the chosen one. */ - @SuppressWarnings({"NonAtomicOperationOnVolatileField", "NonAtomicVolatileUpdate"}) - private void processRetryDecision(@NonNull RetryDecision decision, @NonNull Throwable error) { - assert lock.isHeldByCurrentThread(); - LOG.trace("[{}] Processing retry decision {}", logPrefix, decision); - switch (decision) { - case RETRY_SAME: - recordError(node, error); - trackNodeError(node, error); - retryCount++; - sendRequest(node); - break; - case RETRY_NEXT: - recordError(node, error); - trackNodeError(node, error); - retryCount++; - sendRequest(null); - break; - case RETHROW: - trackNodeError(node, error); - abort(error, true); - break; - case IGNORE: - processResultResponse(Void.INSTANCE, null); - break; + private boolean setChosenExecution(@NonNull NodeResponseCallback execution) { + boolean isChosen; + boolean wasSet = false; + lock.lock(); + try { + if (chosenExecution == null) { + chosenExecution = execution; + wasSet = true; + } + isChosen = chosenExecution == execution; + } finally { + lock.unlock(); } + if (wasSet) { + // cancel all other pending executions, except the chosen one + cancelScheduledTasks(execution); + } + return isChosen; } - // PAGE HANDLING - /** * Enqueues a response or, if the client was already waiting for it, completes the pending future. * @@ -763,35 +1198,43 @@ private void processRetryDecision(@NonNull RetryDecision decision, @NonNull Thro * * @param pageOrError the next page, or an error. */ - @SuppressWarnings("GuardedBy") // this method is only called with the lock held private void enqueueOrCompletePending(@NonNull Object pageOrError) { - assert lock.isHeldByCurrentThread(); - if (pendingResult != null) { - if (LOG.isTraceEnabled()) { - LOG.trace( - "[{}] Client was waiting on empty queue, completing with {}", - logPrefix, - asTraceString(pageOrError)); - } - CompletableFuture tmp = pendingResult; - // null out pendingResult before completing it because its completion - // may trigger a call to fetchNextPage -> dequeueOrCreatePending, - // which expects pendingResult to be null. - pendingResult = null; - completeResultSetFuture(tmp, pageOrError); - } else { - if (LOG.isTraceEnabled()) { - LOG.trace("[{}] Enqueuing {}", logPrefix, asTraceString(pageOrError)); - } - queue.add(pageOrError); - // Backpressure without protocol support: if the queue grows too large, - // disable auto-read so that the channel eventually becomes - // non-writable on the server side (causing it to back off for a while) - if (!protocolBackpressureAvailable && queue.size() == getMaxEnqueuedPages() && state > 0) { - LOG.trace( - "[{}] Exceeded {} queued response pages, disabling auto-read", logPrefix, queue.size()); - channel.config().setAutoRead(false); + lock.lock(); + try { + if (pendingResult != null) { + if (LOG.isTraceEnabled()) { + LOG.trace( + "[{}] Client was waiting on empty queue, completing with {}", + logPrefix, + asTraceString(pageOrError)); + } + CompletableFuture tmp = pendingResult; + // null out pendingResult before completing it because its completion + // may trigger a call to fetchNextPage -> dequeueOrCreatePending, + // which expects pendingResult to be null. + pendingResult = null; + completeResultSetFuture(tmp, pageOrError); + } else { + if (LOG.isTraceEnabled()) { + LOG.trace("[{}] Enqueuing {}", logPrefix, asTraceString(pageOrError)); + } + queue.add(pageOrError); + // Backpressure without protocol support: if the queue grows too large, + // disable auto-read so that the channel eventually becomes + // non-writable on the server side (causing it to back off for a while) + if (!protocolBackpressureAvailable + && chosenExecution != null + && queue.size() == getMaxEnqueuedPages() + && !doneFuture.isDone()) { + LOG.trace( + "[{}] Exceeded {} queued response pages, disabling auto-read", + logPrefix, + queue.size()); + chosenExecution.disableAutoReadIfNeeded(); + } } + } finally { + lock.unlock(); } } @@ -811,52 +1254,43 @@ public CompletableFuture dequeueOrCreatePending() { // (this is guaranteed by our public API because in order to ask for the next page, // you need the reference to the previous page). assert pendingResult == null; - - Object head = queue.poll(); - if (!protocolBackpressureAvailable - && head != null - && queue.size() == getMaxEnqueuedPages() - 1) { + if (doneFuture.isCancelled()) { LOG.trace( - "[{}] Back to {} queued response pages, re-enabling auto-read", - logPrefix, - queue.size()); - channel.config().setAutoRead(true); + "[{}] Client requested next page on cancelled operation, returning cancelled future", + logPrefix); + return newCancelledResultSetFuture(); } + Object head = queue.poll(); maybeRequestMore(); - if (head != null) { - if (state == STATE_FAILED && !(head instanceof Throwable)) { - LOG.trace( - "[{}] Client requested next page on cancelled queue, discarding page and returning cancelled future", - logPrefix); - return cancelledResultSetFuture(); - } else { - if (LOG.isTraceEnabled()) { - LOG.trace( - "[{}] Client requested next page on non-empty queue, returning immediate future of {}", - logPrefix, - asTraceString(head)); - } - return immediateResultSetFuture(head); + if (head == null) { + LOG.trace( + "[{}] Client requested next page but queue is empty, installing future", logPrefix); + pendingResult = newPendingResultSetFuture(); + // Only schedule a timeout if we're past the first page (the first page's timeout is + // handled in NodeResponseCallback.operationComplete). + if (chosenExecution != null && chosenExecution.currentPage > 1) { + chosenExecution.schedulePageTimeout(); + // Note: each new page timeout is cancelled when the next response arrives, see + // onResponse(Frame). } + return pendingResult; } else { - if (state == STATE_FAILED) { + if (!protocolBackpressureAvailable + && chosenExecution != null + && queue.size() == getMaxEnqueuedPages() - 1) { LOG.trace( - "[{}] Client requested next page on cancelled empty queue, returning cancelled future", - logPrefix); - return cancelledResultSetFuture(); - } else { + "[{}] Back to {} queued response pages, re-enabling auto-read", + logPrefix, + queue.size()); + chosenExecution.enableAutoReadIfNeeded(); + } + if (LOG.isTraceEnabled()) { LOG.trace( - "[{}] Client requested next page but queue is empty, installing future", logPrefix); - pendingResult = createResultSetFuture(); - // Only schedule a timeout if we're past the first page (the first page's timeout is - // handled in sendRequest). - if (state > 1) { - pageTimeout = schedulePageTimeout(state); - // Note: each new timeout is cancelled when the next response arrives, see - // onResponse(Frame). - } - return pendingResult; + "[{}] Client requested next page on non-empty queue, returning immediate future of {}", + logPrefix, + asTraceString(head)); } + return newCompletedResultSetFuture(head); } } finally { lock.unlock(); @@ -865,22 +1299,26 @@ public CompletableFuture dequeueOrCreatePending() { /** * If the total number of results in the queue and in-flight (requested - received) is less than - * half the queue size, then request more pages, unless the {@link #state} is failed, we're still - * waiting for the first page (so maybe still throttled or in the middle of a retry), or we don't - * support backpressure at the protocol level. + * half the queue size, then request more pages, unless the {@link #doneFuture} is failed, we're + * still waiting for the first page (so maybe still throttled or in the middle of a retry), or we + * don't support backpressure at the protocol level. */ @SuppressWarnings("GuardedBy") private void maybeRequestMore() { assert lock.isHeldByCurrentThread(); - if (state < 2 || streamId == -1 || !protocolBackpressureAvailable) { + if (doneFuture.isDone() + || chosenExecution == null + || chosenExecution.streamId == -1 + || chosenExecution.currentPage == 1 + || !protocolBackpressureAvailable) { return; } // if we have already requested more than the client needs, then no need to request some more if (getMaxPages() > 0 && numPagesRequested >= getMaxPages()) { return; } - // the pages received so far, which is the state minus one - int received = state - 1; + // the pages received so far, which is the current page minus one + int received = chosenExecution.currentPage - 1; int requested = numPagesRequested; // the pages that fit in the queue, which is the queue free space minus the requests in flight int freeSpace = getMaxEnqueuedPages() - queue.size(); @@ -889,291 +1327,79 @@ private void maybeRequestMore() { if (numPagesFittingInQueue >= getMaxEnqueuedPages() / 2) { LOG.trace("[{}] Requesting more {} pages", logPrefix, numPagesFittingInQueue); numPagesRequested = requested + numPagesFittingInQueue; - sendMorePagesRequest(numPagesFittingInQueue); + chosenExecution.sendMorePagesRequest(numPagesFittingInQueue); } } - /** - * Sends a request for more pages (a.k.a. backpressure request). - * - * @param nextPages the number of extra pages to request. - */ - @SuppressWarnings("GuardedBy") - private void sendMorePagesRequest(int nextPages) { - assert lock.isHeldByCurrentThread(); - assert channel != null : "expected valid connection in order to request more pages"; - assert protocolBackpressureAvailable; - assert streamId != -1; - - LOG.trace("[{}] Sending request for more pages", logPrefix); - ThrottledAdminRequestHandler.query( - channel, - true, - Revise.requestMoreContinuousPages(streamId, nextPages), - statement.getCustomPayload(), - getReviseRequestTimeout(), - throttler, - session.getMetricUpdater(), - logPrefix, - "request " + nextPages + " more pages for id " + streamId) - .start() - .handle( - (result, error) -> { - if (error != null) { - Loggers.warnWithException( - LOG, "[{}] Error requesting more pages, aborting.", logPrefix, error); - lock.lock(); - try { - // Set fromServer to false because we want the callback to still cancel the - // session if possible or else the server will wait on a timeout. - abort(error, false); - } finally { - lock.unlock(); - } - } - return null; - }); - } - - // TIMEOUT HANDLING - - private Timeout schedulePageTimeout(int expectedPage) { - if (expectedPage < 0) { - return null; - } - Duration timeout = getPageTimeout(expectedPage); - if (timeout.toNanos() <= 0) { - return null; - } - LOG.trace("[{}] Scheduling timeout for page {} in {}", logPrefix, expectedPage, timeout); - return timer.newTimeout( - timeout1 -> { - lock.lock(); - try { - if (state == expectedPage) { - abort( - new DriverTimeoutException( - String.format("Timed out waiting for page %d", expectedPage)), - false); - } else { - // Ignore timeout if the request has moved on in the interim. - LOG.trace( - "[{}] Timeout fired for page {} but query already at state {}, skipping", - logPrefix, - expectedPage, - state); - } - } finally { - lock.unlock(); - } - }, - timeout.toNanos(), - TimeUnit.NANOSECONDS); - } - - private Timeout scheduleGlobalTimeout() { - Duration globalTimeout = getGlobalTimeout(); + private void scheduleGlobalTimeout() { + Duration globalTimeout = getGlobalTimeoutDuration(); if (globalTimeout.toNanos() <= 0) { - return null; + return; } LOG.trace("[{}] Scheduling global timeout for pages in {}", logPrefix, globalTimeout); - return timer.newTimeout( - timeout1 -> { - lock.lock(); - try { - abort(new DriverTimeoutException("Query timed out after " + globalTimeout), false); - } finally { - lock.unlock(); - } - }, - globalTimeout.toNanos(), - TimeUnit.NANOSECONDS); + try { + this.globalTimeout = + timer.newTimeout( + timeout -> { + DriverTimeoutException error = + new DriverTimeoutException("Query timed out after " + globalTimeout); + setFailed(null, error); + }, + globalTimeout.toNanos(), + TimeUnit.NANOSECONDS); + } catch (IllegalStateException e) { + logTimeoutSchedulingError(e); + } } - /** Cancels the given timeout, if non null. */ - private void cancelTimeout(Timeout timeout) { - if (timeout != null) { - LOG.trace("[{}] Cancelling timeout", logPrefix); - timeout.cancel(); + private void cancelGlobalTimeout() { + if (globalTimeout != null) { + globalTimeout.cancel(); } } - // CANCELLATION - /** - * Cancels the continuous paging request. + * Cancel all pending and scheduled executions, except the one passed as an argument to the + * method. * - *

      Called from user code, see {@link DefaultContinuousAsyncResultSet#cancel()}, or from a - * driver I/O thread. + * @param toIgnore An optional execution to ignore (will not be cancelled). */ - public void cancel() { - lock.lock(); - try { - if (state < 0) { - return; - } else { - LOG.trace( - "[{}] Cancelling continuous paging session with state {} on node {}", - logPrefix, - state, - node); - state = STATE_FAILED; - if (pendingResult != null) { - pendingResult.cancel(true); - } - // the rest can be done without holding the lock, see below + private void cancelScheduledTasks(@Nullable NodeResponseCallback toIgnore) { + if (scheduledExecutions != null) { + for (Timeout scheduledExecution : scheduledExecutions) { + scheduledExecution.cancel(); } - } finally { - lock.unlock(); } - if (channel != null) { - if (!channel.closeFuture().isDone()) { - this.channel.cancel(this); + for (NodeResponseCallback callback : inFlightCallbacks) { + if (toIgnore == null || toIgnore != callback) { + callback.cancelExecution(); } - sendCancelRequest(); } - reenableAutoReadIfNeeded(); - } - - private void sendCancelRequest() { - LOG.trace("[{}] Sending cancel request", logPrefix); - ThrottledAdminRequestHandler.query( - channel, - true, - Revise.cancelContinuousPaging(streamId), - statement.getCustomPayload(), - getReviseRequestTimeout(), - throttler, - session.getMetricUpdater(), - logPrefix, - "cancel request") - .start() - .handle( - (result, error) -> { - if (error != null) { - Loggers.warnWithException( - LOG, - "[{}] Error sending cancel request. " - + "This is not critical (the request will eventually time out server-side).", - logPrefix, - error); - } else { - LOG.trace("[{}] Continuous paging session cancelled successfully", logPrefix); - } - return null; - }); - } - - // TERMINATION - - private void reenableAutoReadIfNeeded() { - // Make sure we don't leave the channel unreadable - LOG.trace("[{}] Re-enabling auto-read", logPrefix); - if (!protocolBackpressureAvailable) { - channel.config().setAutoRead(true); - } - } - - // ERROR HANDLING - - private void recordError(@NonNull Node node, @NonNull Throwable error) { - errors.add(new AbstractMap.SimpleEntry<>(node, error)); - } - - private void trackNodeError(@NonNull Node node, @NonNull Throwable error) { - long latencyNanos = System.nanoTime() - this.messageStartTimeNanos; - context - .getRequestTracker() - .onNodeError(statement, error, latencyNanos, executionProfile, node, logPrefix); } /** - * Aborts the continuous paging session due to an error that can be either from the server or the - * client. + * Cancels the continuous paging request. * - * @param error the error that causes the abortion. - * @param fromServer whether the error was triggered by the coordinator or by the driver. + *

      Called from user code, see {@link DefaultContinuousAsyncResultSet#cancel()}. */ - @SuppressWarnings("GuardedBy") // this method is only called with the lock held - private void abort(@NonNull Throwable error, boolean fromServer) { - assert lock.isHeldByCurrentThread(); - LOG.trace( - "[{}] Aborting due to {} ({})", - logPrefix, - error.getClass().getSimpleName(), - error.getMessage()); - if (channel == null) { - // This only happens when sending the initial request, if no host was available - // or if the iterator returned by the LBP threw an exception. - // In either case the write was not even attempted, and - // we set the state right now. - enqueueOrCompletePending(error); - state = STATE_FAILED; - } else if (state > 0) { - enqueueOrCompletePending(error); - if (fromServer) { - // We can safely assume the server won't send any more responses, - // so set the state and call release() right now. - state = STATE_FAILED; - reenableAutoReadIfNeeded(); - } else { - // attempt to cancel first, i.e. ask server to stop sending responses, - // and only then release. - cancel(); + public void cancel() { + if (doneFuture.cancel(true)) { + lock.lock(); + try { + LOG.trace("[{}] Cancelling continuous paging session", logPrefix); + if (pendingResult != null) { + pendingResult.cancel(true); + } + } finally { + lock.unlock(); } - } - stopGlobalRequestTimer(); - cancelTimeout(globalTimeout); - } - - // METRICS - - private void stopNodeMessageTimer() { - ((DefaultNode) node) - .getMetricUpdater() - .updateTimer( - DefaultNodeMetric.CQL_MESSAGES, - executionProfile.getName(), - System.nanoTime() - messageStartTimeNanos, - TimeUnit.NANOSECONDS); - } - - private void stopGlobalRequestTimer() { - session - .getMetricUpdater() - .updateTimer( - DseSessionMetric.CONTINUOUS_CQL_REQUESTS, - executionProfile.getName(), - System.nanoTime() - startTimeNanos, - TimeUnit.NANOSECONDS); - } - - private void updateErrorMetrics( - @NonNull NodeMetricUpdater metricUpdater, - @NonNull RetryDecision decision, - @NonNull DefaultNodeMetric error, - @NonNull DefaultNodeMetric retriesOnError, - @NonNull DefaultNodeMetric ignoresOnError) { - metricUpdater.incrementCounter(error, executionProfile.getName()); - switch (decision) { - case RETRY_SAME: - case RETRY_NEXT: - metricUpdater.incrementCounter(DefaultNodeMetric.RETRIES, executionProfile.getName()); - metricUpdater.incrementCounter(retriesOnError, executionProfile.getName()); - break; - case IGNORE: - metricUpdater.incrementCounter(DefaultNodeMetric.IGNORES, executionProfile.getName()); - metricUpdater.incrementCounter(ignoresOnError, executionProfile.getName()); - break; - case RETHROW: - // nothing do do + cancelGlobalTimeout(); + cancelScheduledTasks(null); } } - // UTILITY METHODS - @NonNull - private CompletableFuture createResultSetFuture() { + private CompletableFuture newPendingResultSetFuture() { CompletableFuture future = new CompletableFuture<>(); future.whenComplete( (rs, t) -> { @@ -1186,72 +1412,146 @@ private CompletableFuture createResultSetFuture() { } @NonNull - private CompletableFuture immediateResultSetFuture(@NonNull Object pageOrError) { - CompletableFuture future = createResultSetFuture(); + private CompletableFuture newCompletedResultSetFuture(@NonNull Object pageOrError) { + CompletableFuture future = newPendingResultSetFuture(); completeResultSetFuture(future, pageOrError); return future; } @NonNull - private CompletableFuture cancelledResultSetFuture() { - return immediateResultSetFuture( + private CompletableFuture newCancelledResultSetFuture() { + return newCompletedResultSetFuture( new CancellationException( "Can't get more results because the continuous query has failed already. " + "Most likely this is because the query was cancelled")); } + @SuppressWarnings("unchecked") private void completeResultSetFuture( @NonNull CompletableFuture future, @NonNull Object pageOrError) { - long now = System.nanoTime(); - long totalLatencyNanos = now - startTimeNanos; - long nodeLatencyNanos = now - messageStartTimeNanos; - if (resultSetClass.isInstance(pageOrError)) { - if (future.complete(resultSetClass.cast(pageOrError))) { - throttler.signalSuccess(this); - context - .getRequestTracker() - .onNodeSuccess(statement, nodeLatencyNanos, executionProfile, node, logPrefix); - context - .getRequestTracker() - .onSuccess(statement, totalLatencyNanos, executionProfile, node, logPrefix); - } - } else { + if (pageOrError instanceof Throwable) { Throwable error = (Throwable) pageOrError; - if (future.completeExceptionally(error)) { - context - .getRequestTracker() - .onError(statement, error, totalLatencyNanos, executionProfile, node, logPrefix); - if (error instanceof DriverTimeoutException) { - throttler.signalTimeout(this); - session - .getMetricUpdater() - .incrementCounter( - DefaultSessionMetric.CQL_CLIENT_TIMEOUTS, executionProfile.getName()); - } else if (!(error instanceof RequestThrottlingException)) { - throttler.signalError(this, error); + future.completeExceptionally(error); + } else { + future.complete((ResultSetT) pageOrError); + } + } + + /** + * Called from the chosen execution when it completes successfully. + * + * @param callback The callback that completed the operation, that is, the chosen execution. + */ + private void setCompleted(@NonNull NodeResponseCallback callback) { + if (doneFuture.complete(null)) { + cancelGlobalTimeout(); + throttler.signalSuccess(this); + RequestTracker requestTracker = context.getRequestTracker(); + boolean requestTrackerEnabled = !(requestTracker instanceof NoopRequestTracker); + boolean metricEnabled = + sessionMetricUpdater.isEnabled( + DseSessionMetric.CONTINUOUS_CQL_REQUESTS, executionProfile.getName()); + if (requestTrackerEnabled || metricEnabled) { + long now = System.nanoTime(); + long totalLatencyNanos = now - startTimeNanos; + if (requestTrackerEnabled) { + requestTracker.onSuccess( + statement, totalLatencyNanos, executionProfile, callback.node, logPrefix); + } + if (metricEnabled) { + sessionMetricUpdater.updateTimer( + DseSessionMetric.CONTINUOUS_CQL_REQUESTS, + executionProfile.getName(), + totalLatencyNanos, + TimeUnit.NANOSECONDS); } } } } + /** + * Called when the operation encounters an error and must abort. Can happen in the following + * cases: + * + *

        + *
      1. The throttler failed; + *
      2. All nodes tried failed; + *
      3. The global timeout was fired; + *
      4. The chosen node callback failed. + *
      + * + * @param callback The callback that signals the error, if present (in which case the method is + * being called from the chosen execution), or null if none (in which case the method is being + * called because of cancellation or timeout). + */ + private void setFailed(@Nullable NodeResponseCallback callback, @NonNull Throwable error) { + if (doneFuture.completeExceptionally(error)) { + cancelGlobalTimeout(); + // Must be called here in case we are failing because the global timeout fired + cancelScheduledTasks(null); + if (callback != null && error instanceof DriverException) { + ExecutionInfoT executionInfo = + createExecutionInfo(callback.node, null, null, callback.executionIndex); + // FIXME cannot set ExecutionInfo for Graph here + if (executionInfo instanceof ExecutionInfo) { + ((DriverException) error).setExecutionInfo((ExecutionInfo) executionInfo); + } + } + enqueueOrCompletePending(error); + RequestTracker requestTracker = context.getRequestTracker(); + if (!(requestTracker instanceof NoopRequestTracker)) { + long now = System.nanoTime(); + long totalLatencyNanos = now - startTimeNanos; + requestTracker.onError( + statement, + error, + totalLatencyNanos, + executionProfile, + callback == null ? null : callback.node, + logPrefix); + } + if (error instanceof DriverTimeoutException) { + throttler.signalTimeout(this); + if (sessionMetricUpdater.isEnabled( + DefaultSessionMetric.CQL_CLIENT_TIMEOUTS, executionProfile.getName())) { + sessionMetricUpdater.incrementCounter( + DefaultSessionMetric.CQL_CLIENT_TIMEOUTS, executionProfile.getName()); + } + } else if (!(error instanceof RequestThrottlingException)) { + throttler.signalError(this, error); + } + } + } + + private void recordError(@NonNull Node node, @NonNull Throwable error) { + errors.add(new AbstractMap.SimpleEntry<>(node, error)); + } + + private void logTimeoutSchedulingError(IllegalStateException timeoutError) { + // If we're racing with session shutdown, the timer might be stopped already. We don't want + // to schedule more executions anyway, so swallow the error. + if (!"cannot be started once stopped".equals(timeoutError.getMessage())) { + Loggers.warnWithException( + LOG, "[{}] Error while scheduling timeout", logPrefix, timeoutError); + } + } + + @SuppressWarnings("unchecked") @NonNull private String asTraceString(@NonNull Object pageOrError) { - return resultSetClass.isInstance(pageOrError) - ? "page " + pageNumber(resultSetClass.cast(pageOrError)) - : ((Exception) pageOrError).getClass().getSimpleName(); + return pageOrError instanceof Throwable + ? ((Exception) pageOrError).getClass().getSimpleName() + : "page " + pageNumber((ResultSetT) pageOrError); } @VisibleForTesting - int getState() { - lock.lock(); - try { - return state; - } finally { - lock.unlock(); - } + @NonNull + CompletableFuture getDoneFuture() { + return doneFuture; } @VisibleForTesting + @Nullable CompletableFuture getPendingResult() { lock.lock(); try { @@ -1260,4 +1560,10 @@ CompletableFuture getPendingResult() { lock.unlock(); } } + + @VisibleForTesting + @Nullable + public Timeout getGlobalTimeout() { + return globalTimeout; + } } diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandler.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandler.java index e5423026ba5..776f8e96d2a 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandler.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandler.java @@ -14,8 +14,8 @@ import com.datastax.dse.driver.internal.core.cql.continuous.ContinuousRequestHandlerBase; import com.datastax.dse.driver.internal.core.graph.binary.GraphBinaryModule; import com.datastax.dse.protocol.internal.response.result.DseRowsMetadata; -import com.datastax.oss.driver.api.core.session.throttling.Throttled; -import com.datastax.oss.driver.internal.core.channel.ResponseCallback; +import com.datastax.oss.driver.api.core.cql.ColumnDefinitions; +import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.internal.core.session.DefaultSession; import com.datastax.oss.driver.shaded.guava.common.base.MoreObjects; @@ -25,8 +25,6 @@ import com.datastax.oss.protocol.internal.response.result.Rows; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; -import io.netty.util.concurrent.Future; -import io.netty.util.concurrent.GenericFutureListener; import java.io.IOException; import java.nio.ByteBuffer; import java.time.Duration; @@ -42,8 +40,8 @@ */ @ThreadSafe public class ContinuousGraphRequestHandler - extends ContinuousRequestHandlerBase, AsyncGraphResultSet, GraphExecutionInfo> - implements ResponseCallback, GenericFutureListener>, Throttled { + extends ContinuousRequestHandlerBase< + GraphStatement, AsyncGraphResultSet, GraphExecutionInfo> { private final Message message; private final GraphProtocol subProtocol; @@ -59,13 +57,12 @@ public class ContinuousGraphRequestHandler @NonNull String sessionLogPrefix, @NonNull GraphBinaryModule graphBinaryModule, @NonNull GraphSupportChecker graphSupportChecker) { - super(statement, session, context, sessionLogPrefix, AsyncGraphResultSet.class); + super(statement, session, context, sessionLogPrefix, true); this.graphBinaryModule = graphBinaryModule; subProtocol = graphSupportChecker.inferGraphProtocol(statement, executionProfile, context); message = GraphConversions.createContinuousMessageFromGraphStatement( statement, subProtocol, executionProfile, context, graphBinaryModule); - throttler.register(this); globalTimeout = MoreObjects.firstNonNull( statement.getTimeout(), @@ -77,19 +74,19 @@ public class ContinuousGraphRequestHandler @NonNull @Override - protected Duration getGlobalTimeout() { + protected Duration getGlobalTimeoutDuration() { return globalTimeout; } @NonNull @Override - protected Duration getPageTimeout(int pageNumber) { + protected Duration getPageTimeoutDuration(int pageNumber) { return Duration.ZERO; } @NonNull @Override - protected Duration getReviseRequestTimeout() { + protected Duration getReviseRequestTimeoutDuration() { return Duration.ZERO; } @@ -130,14 +127,26 @@ protected AsyncGraphResultSet createEmptyResultSet(@NonNull GraphExecutionInfo e @NonNull @Override protected DefaultGraphExecutionInfo createExecutionInfo( - @NonNull Result result, @Nullable Frame response) { - return new DefaultGraphExecutionInfo(statement, node, 0, 0, errors, response); + @NonNull Node node, + @Nullable Result result, + @Nullable Frame response, + int successfulExecutionIndex) { + return new DefaultGraphExecutionInfo( + statement, + node, + startedSpeculativeExecutionsCount.get(), + successfulExecutionIndex, + errors, + response); } @NonNull @Override protected ContinuousAsyncGraphResultSet createResultSet( - @NonNull Rows rows, @NonNull GraphExecutionInfo executionInfo) throws IOException { + @NonNull Rows rows, + @NonNull GraphExecutionInfo executionInfo, + @NonNull final ColumnDefinitions columnDefinitions) + throws IOException { Queue graphNodes = new ArrayDeque<>(); for (List row : rows.getData()) { diff --git a/core/src/test/java/com/datastax/dse/driver/DseTestDataProviders.java b/core/src/test/java/com/datastax/dse/driver/DseTestDataProviders.java index 92712c5ca88..682a32407e3 100644 --- a/core/src/test/java/com/datastax/dse/driver/DseTestDataProviders.java +++ b/core/src/test/java/com/datastax/dse/driver/DseTestDataProviders.java @@ -20,6 +20,7 @@ import static com.datastax.dse.driver.internal.core.graph.GraphProtocol.GRAPH_BINARY_1_0; import com.datastax.dse.driver.api.core.DseProtocolVersion; +import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; import com.datastax.oss.driver.api.core.DefaultProtocolVersion; import com.tngtech.java.junit.dataprovider.DataProvider; import edu.umd.cs.findbugs.annotations.NonNull; @@ -28,6 +29,13 @@ public class DseTestDataProviders { + private static final ScriptGraphStatement UNDEFINED_IDEMPOTENCE_STATEMENT = + ScriptGraphStatement.newInstance("undefined idempotence"); + private static final ScriptGraphStatement IDEMPOTENT_STATEMENT = + ScriptGraphStatement.builder("idempotent").setIdempotence(true).build(); + private static final ScriptGraphStatement NON_IDEMPOTENT_STATEMENT = + ScriptGraphStatement.builder("non idempotent").setIdempotence(false).build(); + @DataProvider public static Object[][] allDseProtocolVersions() { return concat(DseProtocolVersion.values()); @@ -48,6 +56,32 @@ public static Object[][] supportedGraphProtocols() { return new Object[][] {{GRAPHSON_1_0}, {GRAPHSON_2_0}, {GRAPH_BINARY_1_0}}; } + /** + * The combination of the default idempotence option and statement setting that produce an + * idempotent statement. + */ + @DataProvider + public static Object[][] idempotentGraphConfig() { + return new Object[][] { + new Object[] {true, UNDEFINED_IDEMPOTENCE_STATEMENT}, + new Object[] {false, IDEMPOTENT_STATEMENT}, + new Object[] {true, IDEMPOTENT_STATEMENT}, + }; + } + + /** + * The combination of the default idempotence option and statement setting that produce a non + * idempotent statement. + */ + @DataProvider + public static Object[][] nonIdempotentGraphConfig() { + return new Object[][] { + new Object[] {false, UNDEFINED_IDEMPOTENCE_STATEMENT}, + new Object[] {true, NON_IDEMPOTENT_STATEMENT}, + new Object[] {false, NON_IDEMPOTENT_STATEMENT}, + }; + } + @NonNull private static Object[][] concat(Object[]... values) { return Stream.of(values) diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerReprepareTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerReprepareTest.java index 634c6eda5d2..71770f9a09f 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerReprepareTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerReprepareTest.java @@ -134,7 +134,7 @@ public void should_abort_when_prepare_fails_with_unrecoverable_error(DseProtocol verify(harness.getChannel(node1)).write(any(Query.class), anyBoolean(), anyMap(), any()); verify(harness.getChannel(node1)).write(any(Prepare.class), anyBoolean(), anyMap(), any()); - assertThat(handler.getState()).isEqualTo(-2); + assertThat(handler.getDoneFuture()).isCompletedExceptionally(); assertThat(page1Future) .hasFailedWithThrowableThat() .isInstanceOf(SyntaxError.class) @@ -173,7 +173,7 @@ public void should_try_next_node_when_prepare_fails_with_recoverable_error( // should have tried the next host verify(harness.getChannel(node2)).write(any(Query.class), anyBoolean(), anyMap(), any()); - assertThat(handler.getState()).isEqualTo(-1); + assertThat(handler.getDoneFuture()).isCompleted(); assertThatStage(page1Future) .isSuccess( rs -> { diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerRetryTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerRetryTest.java index 4ed9e48e9af..6bbe0827472 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerRetryTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerRetryTest.java @@ -23,7 +23,9 @@ import static org.mockito.ArgumentMatchers.anyLong; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.atMost; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; import com.datastax.dse.driver.DseTestFixtures; import com.datastax.dse.driver.api.core.DseProtocolVersion; @@ -90,7 +92,7 @@ public void should_always_try_next_node_if_bootstrapping( statement, harness.getSession(), harness.getContext(), "test"); CompletionStage resultSetFuture = handler.handle(); - assertThat(handler.getState()).isEqualTo(-1); + assertThat(handler.getDoneFuture()).isCompleted(); assertThatStage(resultSetFuture) .isSuccess( @@ -111,7 +113,7 @@ public void should_always_try_next_node_if_bootstrapping( assertThat(executionInfo.getSuccessfulExecutionIndex()).isEqualTo(0); assertThat(executionInfo.getWarnings()).isEmpty(); - Mockito.verifyNoMoreInteractions(harness.getContext().getRetryPolicy(anyString())); + verifyNoMoreInteractions(harness.getContext().getRetryPolicy(anyString())); }); } } @@ -139,7 +141,7 @@ public void should_always_rethrow_query_validation_error( statement, harness.getSession(), harness.getContext(), "test"); CompletionStage resultSetFuture = handler.handle(); - assertThat(handler.getState()).isEqualTo(-2); + assertThat(handler.getDoneFuture()).isCompletedExceptionally(); assertThatStage(resultSetFuture) .isFailed( @@ -147,17 +149,19 @@ public void should_always_rethrow_query_validation_error( assertThat(error) .isInstanceOf(InvalidQueryException.class) .hasMessage("mock message"); - Mockito.verifyNoMoreInteractions(harness.getContext().getRetryPolicy(anyString())); + verifyNoMoreInteractions(harness.getContext().getRetryPolicy(anyString())); - Mockito.verify(nodeMetricUpdater1) + verify(nodeMetricUpdater1) .incrementCounter(eq(DefaultNodeMetric.OTHER_ERRORS), anyString()); - Mockito.verify(nodeMetricUpdater1) + verify(nodeMetricUpdater1) + .isEnabled(eq(DefaultNodeMetric.CQL_MESSAGES), anyString()); + verify(nodeMetricUpdater1) .updateTimer( eq(DefaultNodeMetric.CQL_MESSAGES), anyString(), anyLong(), eq(TimeUnit.NANOSECONDS)); - Mockito.verifyNoMoreInteractions(nodeMetricUpdater1); + verifyNoMoreInteractions(nodeMetricUpdater1); }); } } @@ -190,7 +194,7 @@ public void should_try_next_node_if_idempotent_and_retry_policy_decides_so( statement, harness.getSession(), harness.getContext(), "test"); CompletionStage resultSetFuture = handler.handle(); - assertThat(handler.getState()).isEqualTo(-1); + assertThat(handler.getDoneFuture()).isCompleted(); assertThatStage(resultSetFuture) .isSuccess( @@ -204,19 +208,23 @@ public void should_try_next_node_if_idempotent_and_retry_policy_decides_so( assertThat(executionInfo.getErrors()).hasSize(1); assertThat(executionInfo.getErrors().get(0).getKey()).isEqualTo(node1); - Mockito.verify(nodeMetricUpdater1) + verify(nodeMetricUpdater1) .incrementCounter(eq(failureScenario.errorMetric), anyString()); - Mockito.verify(nodeMetricUpdater1) + verify(nodeMetricUpdater1) .incrementCounter(eq(DefaultNodeMetric.RETRIES), anyString()); - Mockito.verify(nodeMetricUpdater1) + verify(nodeMetricUpdater1) .incrementCounter(eq(failureScenario.retryMetric), anyString()); - Mockito.verify(nodeMetricUpdater1, atMost(1)) - .updateTimer( - eq(DefaultNodeMetric.CQL_MESSAGES), - anyString(), - anyLong(), - eq(TimeUnit.NANOSECONDS)); - Mockito.verifyNoMoreInteractions(nodeMetricUpdater1); + if (!failureScenario.isResponseFailure()) { + verify(nodeMetricUpdater1) + .isEnabled(eq(DefaultNodeMetric.CQL_MESSAGES), anyString()); + verify(nodeMetricUpdater1) + .updateTimer( + eq(DefaultNodeMetric.CQL_MESSAGES), + anyString(), + anyLong(), + eq(TimeUnit.NANOSECONDS)); + } + verifyNoMoreInteractions(nodeMetricUpdater1); }); } } @@ -249,7 +257,7 @@ public void should_try_same_node_if_idempotent_and_retry_policy_decides_so( statement, harness.getSession(), harness.getContext(), "test"); CompletionStage resultSetFuture = handler.handle(); - assertThat(handler.getState()).isEqualTo(-1); + assertThat(handler.getDoneFuture()).isCompleted(); assertThatStage(resultSetFuture) .isSuccess( @@ -263,19 +271,32 @@ public void should_try_same_node_if_idempotent_and_retry_policy_decides_so( assertThat(executionInfo.getErrors()).hasSize(1); assertThat(executionInfo.getErrors().get(0).getKey()).isEqualTo(node1); - Mockito.verify(nodeMetricUpdater1) + verify(nodeMetricUpdater1) .incrementCounter(eq(failureScenario.errorMetric), anyString()); - Mockito.verify(nodeMetricUpdater1) + verify(nodeMetricUpdater1) .incrementCounter(eq(DefaultNodeMetric.RETRIES), anyString()); - Mockito.verify(nodeMetricUpdater1) + verify(nodeMetricUpdater1) .incrementCounter(eq(failureScenario.retryMetric), anyString()); - Mockito.verify(nodeMetricUpdater1, atMost(2)) - .updateTimer( - eq(DefaultNodeMetric.CQL_MESSAGES), - anyString(), - anyLong(), - eq(TimeUnit.NANOSECONDS)); - Mockito.verifyNoMoreInteractions(nodeMetricUpdater1); + if (failureScenario.isResponseFailure()) { + verify(nodeMetricUpdater1) + .isEnabled(eq(DefaultNodeMetric.CQL_MESSAGES), anyString()); + verify(nodeMetricUpdater1) + .updateTimer( + eq(DefaultNodeMetric.CQL_MESSAGES), + anyString(), + anyLong(), + eq(TimeUnit.NANOSECONDS)); + } else { + verify(nodeMetricUpdater1, times(2)) + .isEnabled(eq(DefaultNodeMetric.CQL_MESSAGES), anyString()); + verify(nodeMetricUpdater1, times(2)) + .updateTimer( + eq(DefaultNodeMetric.CQL_MESSAGES), + anyString(), + anyLong(), + eq(TimeUnit.NANOSECONDS)); + } + verifyNoMoreInteractions(nodeMetricUpdater1); }); } } @@ -307,7 +328,7 @@ public void should_ignore_error_if_idempotent_and_retry_policy_decides_so( statement, harness.getSession(), harness.getContext(), "test"); CompletionStage resultSetFuture = handler.handle(); - assertThat(handler.getState()).isEqualTo(-1); + assertThat(handler.getDoneFuture()).isCompleted(); assertThatStage(resultSetFuture) .isSuccess( @@ -319,19 +340,23 @@ public void should_ignore_error_if_idempotent_and_retry_policy_decides_so( assertThat(executionInfo.getCoordinator()).isEqualTo(node1); assertThat(executionInfo.getErrors()).hasSize(0); - Mockito.verify(nodeMetricUpdater1) + verify(nodeMetricUpdater1) .incrementCounter(eq(failureScenario.errorMetric), anyString()); - Mockito.verify(nodeMetricUpdater1) + verify(nodeMetricUpdater1) .incrementCounter(eq(DefaultNodeMetric.IGNORES), anyString()); - Mockito.verify(nodeMetricUpdater1) + verify(nodeMetricUpdater1) .incrementCounter(eq(failureScenario.ignoreMetric), anyString()); - Mockito.verify(nodeMetricUpdater1, atMost(1)) - .updateTimer( - eq(DefaultNodeMetric.CQL_MESSAGES), - anyString(), - anyLong(), - eq(TimeUnit.NANOSECONDS)); - Mockito.verifyNoMoreInteractions(nodeMetricUpdater1); + if (!failureScenario.isResponseFailure()) { + verify(nodeMetricUpdater1) + .isEnabled(eq(DefaultNodeMetric.CQL_MESSAGES), anyString()); + verify(nodeMetricUpdater1) + .updateTimer( + eq(DefaultNodeMetric.CQL_MESSAGES), + anyString(), + anyLong(), + eq(TimeUnit.NANOSECONDS)); + } + verifyNoMoreInteractions(nodeMetricUpdater1); }); } } @@ -364,22 +389,26 @@ public void should_rethrow_error_if_idempotent_and_retry_policy_decides_so( statement, harness.getSession(), harness.getContext(), "test"); CompletionStage resultSetFuture = handler.handle(); - assertThat(handler.getState()).isEqualTo(-2); + assertThat(handler.getDoneFuture()).isCompletedExceptionally(); assertThatStage(resultSetFuture) .isFailed( error -> { assertThat(error).isInstanceOf(failureScenario.expectedExceptionClass); - Mockito.verify(nodeMetricUpdater1) + verify(nodeMetricUpdater1) .incrementCounter(eq(failureScenario.errorMetric), anyString()); - Mockito.verify(nodeMetricUpdater1, atMost(1)) - .updateTimer( - eq(DefaultNodeMetric.CQL_MESSAGES), - anyString(), - anyLong(), - eq(TimeUnit.NANOSECONDS)); - Mockito.verifyNoMoreInteractions(nodeMetricUpdater1); + if (!failureScenario.isResponseFailure()) { + verify(nodeMetricUpdater1) + .isEnabled(eq(DefaultNodeMetric.CQL_MESSAGES), anyString()); + verify(nodeMetricUpdater1) + .updateTimer( + eq(DefaultNodeMetric.CQL_MESSAGES), + anyString(), + anyLong(), + eq(TimeUnit.NANOSECONDS)); + } + verifyNoMoreInteractions(nodeMetricUpdater1); }); } } @@ -421,7 +450,7 @@ public void should_rethrow_error_if_not_idempotent_and_error_unsafe_or_policy_re statement, harness.getSession(), harness.getContext(), "test"); CompletionStage resultSetFuture = handler.handle(); - assertThat(handler.getState()).isEqualTo(-2); + assertThat(handler.getDoneFuture()).isCompletedExceptionally(); assertThatStage(resultSetFuture) .isFailed( @@ -429,19 +458,22 @@ public void should_rethrow_error_if_not_idempotent_and_error_unsafe_or_policy_re assertThat(error).isInstanceOf(failureScenario.expectedExceptionClass); // When non idempotent, the policy is bypassed completely: if (!shouldCallRetryPolicy) { - Mockito.verifyNoMoreInteractions( - harness.getContext().getRetryPolicy(anyString())); + verifyNoMoreInteractions(harness.getContext().getRetryPolicy(anyString())); } - Mockito.verify(nodeMetricUpdater1) + verify(nodeMetricUpdater1) .incrementCounter(eq(failureScenario.errorMetric), anyString()); - Mockito.verify(nodeMetricUpdater1, atMost(1)) - .updateTimer( - eq(DefaultNodeMetric.CQL_MESSAGES), - anyString(), - anyLong(), - eq(TimeUnit.NANOSECONDS)); - Mockito.verifyNoMoreInteractions(nodeMetricUpdater1); + if (!failureScenario.isResponseFailure()) { + verify(nodeMetricUpdater1) + .isEnabled(eq(DefaultNodeMetric.CQL_MESSAGES), anyString()); + verify(nodeMetricUpdater1) + .updateTimer( + eq(DefaultNodeMetric.CQL_MESSAGES), + anyString(), + anyLong(), + eq(TimeUnit.NANOSECONDS)); + } + verifyNoMoreInteractions(nodeMetricUpdater1); }); } } @@ -470,6 +502,22 @@ private abstract static class FailureScenario { abstract void mockRequestError(RequestHandlerTestHarness.Builder builder, Node node); abstract void mockRetryPolicyDecision(RetryPolicy policy, RetryDecision decision); + + abstract boolean isResponseFailure(); + + @Override + public String toString() { + return "FailureScenario{" + + "expectedExceptionClass=" + + expectedExceptionClass + + ", errorMetric=" + + errorMetric + + ", retryMetric=" + + retryMetric + + ", ignoreMetric=" + + ignoreMetric + + '}'; + } } @DataProvider @@ -501,6 +549,11 @@ public void mockRetryPolicyDecision(RetryPolicy policy, RetryDecision decision) eq(0))) .thenReturn(decision); } + + @Override + boolean isResponseFailure() { + return false; + } }, new FailureScenario( WriteTimeoutException.class, @@ -532,6 +585,11 @@ public void mockRetryPolicyDecision(RetryPolicy policy, RetryDecision decision) eq(0))) .thenReturn(decision); } + + @Override + boolean isResponseFailure() { + return false; + } }, new FailureScenario( UnavailableException.class, @@ -558,6 +616,11 @@ public void mockRetryPolicyDecision(RetryPolicy policy, RetryDecision decision) eq(0))) .thenReturn(decision); } + + @Override + boolean isResponseFailure() { + return false; + } }, new FailureScenario( ServerError.class, @@ -579,6 +642,11 @@ public void mockRetryPolicyDecision(RetryPolicy policy, RetryDecision decision) any(SimpleStatement.class), any(ServerError.class), eq(0))) .thenReturn(decision); } + + @Override + boolean isResponseFailure() { + return false; + } }, new FailureScenario( HeartbeatException.class, @@ -597,6 +665,11 @@ public void mockRetryPolicyDecision(RetryPolicy policy, RetryDecision decision) any(SimpleStatement.class), any(HeartbeatException.class), eq(0))) .thenReturn(decision); } + + @Override + boolean isResponseFailure() { + return true; + } }); } diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerTest.java index 1e9ef1471cf..da126ed47c9 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerTest.java @@ -22,6 +22,7 @@ import static com.datastax.oss.driver.Assertions.assertThat; import static com.datastax.oss.driver.Assertions.assertThatStage; import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.awaitility.Awaitility.await; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyBoolean; import static org.mockito.ArgumentMatchers.anyLong; @@ -66,7 +67,8 @@ public class ContinuousCqlRequestHandlerTest extends ContinuousCqlRequestHandlerTestBase { - private static final Pattern LOG_PREFIX_PER_REQUEST = Pattern.compile("test\\|\\d*"); + private static final Pattern LOG_PREFIX_PER_QUERY = Pattern.compile("test\\|\\d+"); + private static final Pattern LOG_PREFIX_PER_EXECUTION = Pattern.compile("test\\|\\d+\\|\\d+"); @Test @UseDataProvider(value = "allDseProtocolVersions", location = DseTestDataProviders.class) @@ -311,7 +313,7 @@ public void should_cancel_future_if_session_cancelled(DseProtocolVersion version ContinuousAsyncResultSet page1 = CompletableFutures.getUninterruptibly(page1Future); page1.cancel(); - assertThat(handler.getState()).isEqualTo(-2); + assertThat(handler.getDoneFuture()).isCancelled(); assertThat(page1.fetchNextPage()).isCancelled(); } } @@ -332,7 +334,7 @@ public void should_cancel_session_if_future_cancelled(DseProtocolVersion version page1Future.toCompletableFuture().cancel(true); // this should be ignored node1Behavior.setResponseSuccess(defaultFrameOf(DseTestFixtures.tenDseRows(1, false))); - assertThat(handler.getState()).isEqualTo(-2); + assertThat(handler.getDoneFuture()).isCancelled(); } } @@ -355,12 +357,12 @@ public void should_not_cancel_session_if_future_cancelled_but_already_done( // to late page1Future.toCompletableFuture().cancel(true); - assertThat(handler.getState()).isEqualTo(-1); + assertThat(handler.getDoneFuture()).isCompleted(); } } @Test - public void should_send_cancel_request_if_dse_v2() { + public void should_send_cancel_request_if_dse_v2() throws InterruptedException { RequestHandlerTestHarness.Builder builder = continuousHarnessBuilder().withProtocolVersion(DSE_V2); PoolBehavior node1Behavior = builder.customBehavior(node1); @@ -371,8 +373,15 @@ public void should_send_cancel_request_if_dse_v2() { UNDEFINED_IDEMPOTENCE_STATEMENT, harness.getSession(), harness.getContext(), "test"); CompletionStage page1Future = handler.handle(); + // will trigger the population of inFlightCallbacks with node 1's execution + node1Behavior.setWriteSuccess(); + + // FIXME remove when JAVA-2552 is ready + // wait until the write is acknowledged and a page timeout is set + await().until(() -> harness.nextScheduledTimeout() != null); + page1Future.toCompletableFuture().cancel(true); - assertThat(handler.getState()).isEqualTo(-2); + assertThat(handler.getDoneFuture()).isCancelled(); verify(node1Behavior.getChannel()) .write(argThat(this::isCancelRequest), anyBoolean(), anyMap(), any()); } @@ -497,21 +506,21 @@ public void should_invoke_request_tracker(DseProtocolVersion version) { anyLong(), any(DriverExecutionProfile.class), eq(node1), - matches(LOG_PREFIX_PER_REQUEST)); + matches(LOG_PREFIX_PER_EXECUTION)); verify(requestTracker) .onNodeSuccess( eq(UNDEFINED_IDEMPOTENCE_STATEMENT), anyLong(), any(DriverExecutionProfile.class), eq(node2), - matches(LOG_PREFIX_PER_REQUEST)); + matches(LOG_PREFIX_PER_EXECUTION)); verify(requestTracker) .onSuccess( eq(UNDEFINED_IDEMPOTENCE_STATEMENT), anyLong(), any(DriverExecutionProfile.class), eq(node2), - matches(LOG_PREFIX_PER_REQUEST)); + matches(LOG_PREFIX_PER_QUERY)); verifyNoMoreInteractions(requestTracker); }); } diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerSpeculativeExecutionTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerSpeculativeExecutionTest.java new file mode 100644 index 00000000000..e4a194b1fda --- /dev/null +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerSpeculativeExecutionTest.java @@ -0,0 +1,520 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.internal.core.graph; + +import static com.datastax.dse.driver.internal.core.graph.GraphTestUtils.createGraphBinaryModule; +import static com.datastax.dse.driver.internal.core.graph.GraphTestUtils.defaultDseFrameOf; +import static com.datastax.dse.driver.internal.core.graph.GraphTestUtils.singleGraphRow; +import static com.datastax.oss.driver.Assertions.assertThat; +import static com.datastax.oss.driver.Assertions.assertThatStage; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyLong; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; +import static org.mockito.Mockito.when; + +import com.datastax.dse.driver.DseTestDataProviders; +import com.datastax.dse.driver.api.core.graph.AsyncGraphResultSet; +import com.datastax.dse.driver.api.core.graph.GraphStatement; +import com.datastax.dse.driver.internal.core.graph.binary.GraphBinaryModule; +import com.datastax.oss.driver.api.core.AllNodesFailedException; +import com.datastax.oss.driver.api.core.NoNodeAvailableException; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.metrics.DefaultNodeMetric; +import com.datastax.oss.driver.api.core.metrics.NodeMetric; +import com.datastax.oss.driver.api.core.servererrors.BootstrappingException; +import com.datastax.oss.driver.api.core.specex.SpeculativeExecutionPolicy; +import com.datastax.oss.driver.internal.core.cql.PoolBehavior; +import com.datastax.oss.driver.internal.core.metadata.DefaultNode; +import com.datastax.oss.driver.internal.core.metrics.NodeMetricUpdater; +import com.datastax.oss.driver.internal.core.util.concurrent.CapturingTimer.CapturedTimeout; +import com.datastax.oss.protocol.internal.ProtocolConstants; +import com.datastax.oss.protocol.internal.response.Error; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import java.time.Duration; +import java.util.Map; +import java.util.concurrent.CompletionStage; +import java.util.concurrent.TimeUnit; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; + +/** + * These tests are almost exact copies of {@link + * com.datastax.oss.driver.internal.core.cql.CqlRequestHandlerSpeculativeExecutionTest}. + */ +@RunWith(DataProviderRunner.class) +public class ContinuousGraphRequestHandlerSpeculativeExecutionTest { + + @Mock DefaultNode node1; + @Mock DefaultNode node2; + @Mock DefaultNode node3; + + @Mock NodeMetricUpdater nodeMetricUpdater1; + @Mock NodeMetricUpdater nodeMetricUpdater2; + @Mock NodeMetricUpdater nodeMetricUpdater3; + + @Mock GraphSupportChecker graphSupportChecker; + + @Before + public void setup() { + MockitoAnnotations.initMocks(this); + when(node1.getMetricUpdater()).thenReturn(nodeMetricUpdater1); + when(node2.getMetricUpdater()).thenReturn(nodeMetricUpdater2); + when(node3.getMetricUpdater()).thenReturn(nodeMetricUpdater3); + when(nodeMetricUpdater1.isEnabled(any(NodeMetric.class), anyString())).thenReturn(true); + when(nodeMetricUpdater2.isEnabled(any(NodeMetric.class), anyString())).thenReturn(true); + when(nodeMetricUpdater3.isEnabled(any(NodeMetric.class), anyString())).thenReturn(true); + when(graphSupportChecker.inferGraphProtocol(any(), any(), any())) + .thenReturn(GraphProtocol.GRAPH_BINARY_1_0); + } + + @Test + @UseDataProvider(location = DseTestDataProviders.class, value = "nonIdempotentGraphConfig") + public void should_not_schedule_speculative_executions_if_not_idempotent( + boolean defaultIdempotence, GraphStatement statement) { + GraphRequestHandlerTestHarness.Builder harnessBuilder = + GraphRequestHandlerTestHarness.builder().withDefaultIdempotence(defaultIdempotence); + PoolBehavior node1Behavior = harnessBuilder.customBehavior(node1); + + try (GraphRequestHandlerTestHarness harness = harnessBuilder.build()) { + SpeculativeExecutionPolicy speculativeExecutionPolicy = + harness.getContext().getSpeculativeExecutionPolicy(DriverExecutionProfile.DEFAULT_NAME); + + GraphBinaryModule module = createGraphBinaryModule(harness.getContext()); + new ContinuousGraphRequestHandler( + statement, + harness.getSession(), + harness.getContext(), + "test", + module, + graphSupportChecker) + .handle(); + + node1Behavior.verifyWrite(); + node1Behavior.setWriteSuccess(); + + // should not schedule any timeout + assertThat(harness.nextScheduledTimeout()).isNull(); + + verifyNoMoreInteractions(speculativeExecutionPolicy); + verifyNoMoreInteractions(nodeMetricUpdater1); + } + } + + @Test + @UseDataProvider(location = DseTestDataProviders.class, value = "idempotentGraphConfig") + public void should_schedule_speculative_executions( + boolean defaultIdempotence, GraphStatement statement) throws Exception { + GraphRequestHandlerTestHarness.Builder harnessBuilder = + GraphRequestHandlerTestHarness.builder().withDefaultIdempotence(defaultIdempotence); + PoolBehavior node1Behavior = harnessBuilder.customBehavior(node1); + PoolBehavior node2Behavior = harnessBuilder.customBehavior(node2); + PoolBehavior node3Behavior = harnessBuilder.customBehavior(node3); + + try (GraphRequestHandlerTestHarness harness = harnessBuilder.build()) { + SpeculativeExecutionPolicy speculativeExecutionPolicy = + harness.getContext().getSpeculativeExecutionPolicy(DriverExecutionProfile.DEFAULT_NAME); + long firstExecutionDelay = 100L; + long secondExecutionDelay = 200L; + when(speculativeExecutionPolicy.nextExecution( + any(Node.class), eq(null), eq(statement), eq(1))) + .thenReturn(firstExecutionDelay); + when(speculativeExecutionPolicy.nextExecution( + any(Node.class), eq(null), eq(statement), eq(2))) + .thenReturn(secondExecutionDelay); + when(speculativeExecutionPolicy.nextExecution( + any(Node.class), eq(null), eq(statement), eq(3))) + .thenReturn(-1L); + + GraphBinaryModule module = createGraphBinaryModule(harness.getContext()); + new ContinuousGraphRequestHandler( + statement, + harness.getSession(), + harness.getContext(), + "test", + module, + graphSupportChecker) + .handle(); + + node1Behavior.verifyWrite(); + node1Behavior.setWriteSuccess(); + + CapturedTimeout speculativeExecution1 = harness.nextScheduledTimeout(); + assertThat(speculativeExecution1.getDelay(TimeUnit.MILLISECONDS)) + .isEqualTo(firstExecutionDelay); + verifyNoMoreInteractions(nodeMetricUpdater1); + speculativeExecution1.task().run(speculativeExecution1); + verify(nodeMetricUpdater1) + .incrementCounter( + DefaultNodeMetric.SPECULATIVE_EXECUTIONS, DriverExecutionProfile.DEFAULT_NAME); + node2Behavior.verifyWrite(); + node2Behavior.setWriteSuccess(); + + CapturedTimeout speculativeExecution2 = harness.nextScheduledTimeout(); + assertThat(speculativeExecution2.getDelay(TimeUnit.MILLISECONDS)) + .isEqualTo(secondExecutionDelay); + verifyNoMoreInteractions(nodeMetricUpdater2); + speculativeExecution2.task().run(speculativeExecution2); + verify(nodeMetricUpdater2) + .incrementCounter( + DefaultNodeMetric.SPECULATIVE_EXECUTIONS, DriverExecutionProfile.DEFAULT_NAME); + node3Behavior.verifyWrite(); + node3Behavior.setWriteSuccess(); + + // No more scheduled tasks since the policy returns 0 on the third call. + assertThat(harness.nextScheduledTimeout()).isNull(); + + // Note that we don't need to complete any response, the test is just about checking that + // executions are started. + } + } + + @Test + @UseDataProvider(location = DseTestDataProviders.class, value = "idempotentGraphConfig") + public void should_not_start_execution_if_result_complete( + boolean defaultIdempotence, GraphStatement statement) throws Exception { + GraphRequestHandlerTestHarness.Builder harnessBuilder = + GraphRequestHandlerTestHarness.builder() + .withGraphTimeout(Duration.ofSeconds(10)) + .withDefaultIdempotence(defaultIdempotence); + PoolBehavior node1Behavior = harnessBuilder.customBehavior(node1); + PoolBehavior node2Behavior = harnessBuilder.customBehavior(node2); + + try (GraphRequestHandlerTestHarness harness = harnessBuilder.build()) { + SpeculativeExecutionPolicy speculativeExecutionPolicy = + harness.getContext().getSpeculativeExecutionPolicy(DriverExecutionProfile.DEFAULT_NAME); + long firstExecutionDelay = 100L; + when(speculativeExecutionPolicy.nextExecution( + any(Node.class), eq(null), eq(statement), eq(1))) + .thenReturn(firstExecutionDelay); + + GraphBinaryModule module = createGraphBinaryModule(harness.getContext()); + ContinuousGraphRequestHandler requestHandler = + new ContinuousGraphRequestHandler( + statement, + harness.getSession(), + harness.getContext(), + "test", + module, + graphSupportChecker); + CompletionStage resultSetFuture = requestHandler.handle(); + node1Behavior.verifyWrite(); + node1Behavior.setWriteSuccess(); + + // The first timeout scheduled should be the global timeout + CapturedTimeout globalTimeout = harness.nextScheduledTimeout(); + assertThat(globalTimeout.getDelay(TimeUnit.SECONDS)).isEqualTo(10); + + // Check that the first execution was scheduled but don't run it yet + CapturedTimeout speculativeExecution1 = harness.nextScheduledTimeout(); + assertThat(speculativeExecution1.getDelay(TimeUnit.MILLISECONDS)) + .isEqualTo(firstExecutionDelay); + + // Complete the request from the initial execution + node1Behavior.setResponseSuccess( + defaultDseFrameOf(singleGraphRow(GraphProtocol.GRAPH_BINARY_1_0, module))); + assertThatStage(resultSetFuture).isSuccess(); + + // Pending speculative executions should have been cancelled. However we don't check + // firstExecutionTask directly because the request handler's onResponse can sometimes be + // invoked before operationComplete (this is very unlikely in practice, but happens in our + // Travis CI build). When that happens, the speculative execution is not recorded yet when + // cancelScheduledTasks runs. + // So check the timeout future instead, since it's cancelled in the same method. + assertThat(requestHandler.getGlobalTimeout()).isNotNull(); + assertThat(requestHandler.getGlobalTimeout().isCancelled()).isTrue(); + + // The fact that we missed the speculative execution is not a problem; even if it starts, it + // will eventually find out that the result is already complete and cancel itself: + speculativeExecution1.task().run(speculativeExecution1); + node2Behavior.verifyNoWrite(); + + verify(nodeMetricUpdater1) + .isEnabled(DefaultNodeMetric.CQL_MESSAGES, DriverExecutionProfile.DEFAULT_NAME); + verify(nodeMetricUpdater1) + .updateTimer( + eq(DefaultNodeMetric.CQL_MESSAGES), + eq(DriverExecutionProfile.DEFAULT_NAME), + anyLong(), + eq(TimeUnit.NANOSECONDS)); + verifyNoMoreInteractions(nodeMetricUpdater1); + } + } + + @Test + @UseDataProvider(location = DseTestDataProviders.class, value = "idempotentGraphConfig") + public void should_fail_if_no_nodes(boolean defaultIdempotence, GraphStatement statement) { + GraphRequestHandlerTestHarness.Builder harnessBuilder = + GraphRequestHandlerTestHarness.builder().withDefaultIdempotence(defaultIdempotence); + // No configured behaviors => will yield an empty query plan + + try (GraphRequestHandlerTestHarness harness = harnessBuilder.build()) { + SpeculativeExecutionPolicy speculativeExecutionPolicy = + harness.getContext().getSpeculativeExecutionPolicy(DriverExecutionProfile.DEFAULT_NAME); + long firstExecutionDelay = 100L; + when(speculativeExecutionPolicy.nextExecution( + any(Node.class), eq(null), eq(statement), eq(1))) + .thenReturn(firstExecutionDelay); + + GraphBinaryModule module = createGraphBinaryModule(harness.getContext()); + CompletionStage resultSetFuture = + new ContinuousGraphRequestHandler( + statement, + harness.getSession(), + harness.getContext(), + "test", + module, + graphSupportChecker) + .handle(); + + assertThatStage(resultSetFuture) + .isFailed(error -> assertThat(error).isInstanceOf(NoNodeAvailableException.class)); + } + } + + @Test + @UseDataProvider(location = DseTestDataProviders.class, value = "idempotentGraphConfig") + public void should_fail_if_no_more_nodes_and_initial_execution_is_last( + boolean defaultIdempotence, GraphStatement statement) throws Exception { + GraphRequestHandlerTestHarness.Builder harnessBuilder = + GraphRequestHandlerTestHarness.builder().withDefaultIdempotence(defaultIdempotence); + PoolBehavior node1Behavior = harnessBuilder.customBehavior(node1); + harnessBuilder.withResponse( + node2, + defaultDseFrameOf(new Error(ProtocolConstants.ErrorCode.IS_BOOTSTRAPPING, "mock message"))); + + try (GraphRequestHandlerTestHarness harness = harnessBuilder.build()) { + SpeculativeExecutionPolicy speculativeExecutionPolicy = + harness.getContext().getSpeculativeExecutionPolicy(DriverExecutionProfile.DEFAULT_NAME); + long firstExecutionDelay = 100L; + when(speculativeExecutionPolicy.nextExecution( + any(Node.class), eq(null), eq(statement), eq(1))) + .thenReturn(firstExecutionDelay); + + GraphBinaryModule module = createGraphBinaryModule(harness.getContext()); + CompletionStage resultSetFuture = + new ContinuousGraphRequestHandler( + statement, + harness.getSession(), + harness.getContext(), + "test", + module, + graphSupportChecker) + .handle(); + node1Behavior.verifyWrite(); + node1Behavior.setWriteSuccess(); + // do not simulate a response from node1 yet + + // Run the next scheduled task to start the speculative execution. node2 will reply with a + // BOOTSTRAPPING error, causing a RETRY_NEXT; but the query plan is now empty so the + // speculative execution stops. + // next scheduled timeout should be the first speculative execution. Get it and run it. + CapturedTimeout speculativeExecution1 = harness.nextScheduledTimeout(); + assertThat(speculativeExecution1.getDelay(TimeUnit.MILLISECONDS)) + .isEqualTo(firstExecutionDelay); + speculativeExecution1.task().run(speculativeExecution1); + + // node1 now replies with the same response, that triggers a RETRY_NEXT + node1Behavior.setResponseSuccess( + defaultDseFrameOf( + new Error(ProtocolConstants.ErrorCode.IS_BOOTSTRAPPING, "mock message"))); + + // But again the query plan is empty so that should fail the request + assertThatStage(resultSetFuture) + .isFailed( + error -> { + assertThat(error).isInstanceOf(AllNodesFailedException.class); + Map nodeErrors = ((AllNodesFailedException) error).getErrors(); + assertThat(nodeErrors).containsOnlyKeys(node1, node2); + assertThat(nodeErrors.get(node1)).isInstanceOf(BootstrappingException.class); + assertThat(nodeErrors.get(node2)).isInstanceOf(BootstrappingException.class); + }); + } + } + + @Test + @UseDataProvider(location = DseTestDataProviders.class, value = "idempotentGraphConfig") + public void should_fail_if_no_more_nodes_and_speculative_execution_is_last( + boolean defaultIdempotence, GraphStatement statement) throws Exception { + GraphRequestHandlerTestHarness.Builder harnessBuilder = + GraphRequestHandlerTestHarness.builder().withDefaultIdempotence(defaultIdempotence); + PoolBehavior node1Behavior = harnessBuilder.customBehavior(node1); + PoolBehavior node2Behavior = harnessBuilder.customBehavior(node2); + + try (GraphRequestHandlerTestHarness harness = harnessBuilder.build()) { + SpeculativeExecutionPolicy speculativeExecutionPolicy = + harness.getContext().getSpeculativeExecutionPolicy(DriverExecutionProfile.DEFAULT_NAME); + long firstExecutionDelay = 100L; + when(speculativeExecutionPolicy.nextExecution( + any(Node.class), eq(null), eq(statement), eq(1))) + .thenReturn(firstExecutionDelay); + + GraphBinaryModule module = createGraphBinaryModule(harness.getContext()); + CompletionStage resultSetFuture = + new ContinuousGraphRequestHandler( + statement, + harness.getSession(), + harness.getContext(), + "test", + module, + graphSupportChecker) + .handle(); + node1Behavior.verifyWrite(); + node1Behavior.setWriteSuccess(); + // do not simulate a response from node1 yet + + // next scheduled timeout should be the first speculative execution. Get it and run it. + CapturedTimeout speculativeExecution1 = harness.nextScheduledTimeout(); + assertThat(speculativeExecution1.getDelay(TimeUnit.MILLISECONDS)) + .isEqualTo(firstExecutionDelay); + speculativeExecution1.task().run(speculativeExecution1); + + // node1 now replies with a BOOTSTRAPPING error that triggers a RETRY_NEXT + // but the query plan is empty so the initial execution stops + node1Behavior.setResponseSuccess( + defaultDseFrameOf( + new Error(ProtocolConstants.ErrorCode.IS_BOOTSTRAPPING, "mock message"))); + + // Same thing with node2, so the speculative execution should reach the end of the query plan + // and fail the request + node2Behavior.setResponseSuccess( + defaultDseFrameOf( + new Error(ProtocolConstants.ErrorCode.IS_BOOTSTRAPPING, "mock message"))); + + assertThatStage(resultSetFuture) + .isFailed( + error -> { + assertThat(error).isInstanceOf(AllNodesFailedException.class); + Map nodeErrors = ((AllNodesFailedException) error).getErrors(); + assertThat(nodeErrors).containsOnlyKeys(node1, node2); + assertThat(nodeErrors.get(node1)).isInstanceOf(BootstrappingException.class); + assertThat(nodeErrors.get(node2)).isInstanceOf(BootstrappingException.class); + }); + } + } + + @Test + @UseDataProvider(location = DseTestDataProviders.class, value = "idempotentGraphConfig") + public void should_retry_in_speculative_executions( + boolean defaultIdempotence, GraphStatement statement) throws Exception { + GraphRequestHandlerTestHarness.Builder harnessBuilder = + GraphRequestHandlerTestHarness.builder().withDefaultIdempotence(defaultIdempotence); + PoolBehavior node1Behavior = harnessBuilder.customBehavior(node1); + PoolBehavior node2Behavior = harnessBuilder.customBehavior(node2); + PoolBehavior node3Behavior = harnessBuilder.customBehavior(node3); + + try (GraphRequestHandlerTestHarness harness = harnessBuilder.build()) { + SpeculativeExecutionPolicy speculativeExecutionPolicy = + harness.getContext().getSpeculativeExecutionPolicy(DriverExecutionProfile.DEFAULT_NAME); + long firstExecutionDelay = 100L; + when(speculativeExecutionPolicy.nextExecution( + any(Node.class), eq(null), eq(statement), eq(1))) + .thenReturn(firstExecutionDelay); + + GraphBinaryModule module = createGraphBinaryModule(harness.getContext()); + CompletionStage resultSetFuture = + new ContinuousGraphRequestHandler( + statement, + harness.getSession(), + harness.getContext(), + "test", + module, + graphSupportChecker) + .handle(); + node1Behavior.verifyWrite(); + node1Behavior.setWriteSuccess(); + // do not simulate a response from node1. The request will stay hanging for the rest of this + // test + + // next scheduled timeout should be the first speculative execution. Get it and run it. + CapturedTimeout speculativeExecution1 = harness.nextScheduledTimeout(); + assertThat(speculativeExecution1.getDelay(TimeUnit.MILLISECONDS)) + .isEqualTo(firstExecutionDelay); + speculativeExecution1.task().run(speculativeExecution1); + + node2Behavior.verifyWrite(); + node2Behavior.setWriteSuccess(); + + // node2 replies with a response that triggers a RETRY_NEXT + node2Behavior.setResponseSuccess( + defaultDseFrameOf( + new Error(ProtocolConstants.ErrorCode.IS_BOOTSTRAPPING, "mock message"))); + + node3Behavior.setResponseSuccess( + defaultDseFrameOf(singleGraphRow(GraphProtocol.GRAPH_BINARY_1_0, module))); + + // The second execution should move to node3 and complete the request + assertThatStage(resultSetFuture).isSuccess(); + + // The request to node1 was still in flight, it should have been cancelled + node1Behavior.verifyCancellation(); + } + } + + @Test + @UseDataProvider(location = DseTestDataProviders.class, value = "idempotentGraphConfig") + public void should_stop_retrying_other_executions_if_result_complete( + boolean defaultIdempotence, GraphStatement statement) throws Exception { + GraphRequestHandlerTestHarness.Builder harnessBuilder = + GraphRequestHandlerTestHarness.builder().withDefaultIdempotence(defaultIdempotence); + PoolBehavior node1Behavior = harnessBuilder.customBehavior(node1); + PoolBehavior node2Behavior = harnessBuilder.customBehavior(node2); + PoolBehavior node3Behavior = harnessBuilder.customBehavior(node3); + + try (GraphRequestHandlerTestHarness harness = harnessBuilder.build()) { + SpeculativeExecutionPolicy speculativeExecutionPolicy = + harness.getContext().getSpeculativeExecutionPolicy(DriverExecutionProfile.DEFAULT_NAME); + long firstExecutionDelay = 100L; + when(speculativeExecutionPolicy.nextExecution( + any(Node.class), eq(null), eq(statement), eq(1))) + .thenReturn(firstExecutionDelay); + + GraphBinaryModule module = createGraphBinaryModule(harness.getContext()); + CompletionStage resultSetFuture = + new ContinuousGraphRequestHandler( + statement, + harness.getSession(), + harness.getContext(), + "test", + module, + graphSupportChecker) + .handle(); + node1Behavior.verifyWrite(); + node1Behavior.setWriteSuccess(); + + // next scheduled timeout should be the first speculative execution. Get it and run it. + CapturedTimeout speculativeExecution1 = harness.nextScheduledTimeout(); + assertThat(speculativeExecution1.getDelay(TimeUnit.MILLISECONDS)) + .isEqualTo(firstExecutionDelay); + speculativeExecution1.task().run(speculativeExecution1); + + node2Behavior.verifyWrite(); + node2Behavior.setWriteSuccess(); + + // Complete the request from the initial execution + node1Behavior.setResponseSuccess( + defaultDseFrameOf(singleGraphRow(GraphProtocol.GRAPH_BINARY_1_0, module))); + assertThatStage(resultSetFuture).isSuccess(); + + // node2 replies with a response that would trigger a RETRY_NEXT if the request was still + // running + node2Behavior.setResponseSuccess( + defaultDseFrameOf( + new Error(ProtocolConstants.ErrorCode.IS_BOOTSTRAPPING, "mock message"))); + + // The speculative execution should not move to node3 because it is stopped + node3Behavior.verifyNoWrite(); + } + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphSpeculativeExecutionIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphSpeculativeExecutionIT.java new file mode 100644 index 00000000000..30267dc52d7 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphSpeculativeExecutionIT.java @@ -0,0 +1,91 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.dse.driver.api.core.config.DseDriverOption; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.session.SessionUtils; +import com.datastax.oss.driver.internal.core.specex.ConstantSpeculativeExecutionPolicy; +import com.datastax.oss.driver.internal.core.specex.NoSpeculativeExecutionPolicy; +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import java.time.Duration; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.runner.RunWith; + +@DseRequirement(min = "6.8.0", description = "DSE 6.8 required for graph paging") +@RunWith(DataProviderRunner.class) +public class GraphSpeculativeExecutionIT { + + @ClassRule + public static CustomCcmRule ccmRule = CustomCcmRule.builder().withDseWorkloads("graph").build(); + + @Test + @UseDataProvider("idempotenceAndSpecExecs") + public void should_use_speculative_executions_when_enabled( + boolean defaultIdempotence, + Boolean statementIdempotence, + Class speculativeExecutionClass, + boolean expectSpeculativeExecutions) { + + try (DseSession session = + DseSession.builder() + .addContactEndPoints(ccmRule.getContactPoints()) + .withConfigLoader( + SessionUtils.configLoaderBuilder() + .withBoolean( + DefaultDriverOption.REQUEST_DEFAULT_IDEMPOTENCE, defaultIdempotence) + .withInt(DefaultDriverOption.SPECULATIVE_EXECUTION_MAX, 10) + .withClass( + DefaultDriverOption.SPECULATIVE_EXECUTION_POLICY_CLASS, + speculativeExecutionClass) + .withDuration( + DefaultDriverOption.SPECULATIVE_EXECUTION_DELAY, Duration.ofMillis(10)) + .withString(DseDriverOption.GRAPH_PAGING_ENABLED, "ENABLED") + .build()) + .build()) { + + GraphStatement statement = + ScriptGraphStatement.newInstance( + "java.util.concurrent.TimeUnit.MILLISECONDS.sleep(1000L);") + .setIdempotent(statementIdempotence); + + GraphResultSet result = session.execute(statement); + int speculativeExecutionCount = result.getExecutionInfo().getSpeculativeExecutionCount(); + if (expectSpeculativeExecutions) { + assertThat(speculativeExecutionCount).isGreaterThan(0); + } else { + assertThat(speculativeExecutionCount).isEqualTo(0); + } + } + } + + @DataProvider + public static Object[][] idempotenceAndSpecExecs() { + return new Object[][] { + new Object[] {false, false, NoSpeculativeExecutionPolicy.class, false}, + new Object[] {false, true, NoSpeculativeExecutionPolicy.class, false}, + new Object[] {false, null, NoSpeculativeExecutionPolicy.class, false}, + new Object[] {true, false, NoSpeculativeExecutionPolicy.class, false}, + new Object[] {true, true, NoSpeculativeExecutionPolicy.class, false}, + new Object[] {true, null, NoSpeculativeExecutionPolicy.class, false}, + new Object[] {false, false, ConstantSpeculativeExecutionPolicy.class, false}, + new Object[] {false, true, ConstantSpeculativeExecutionPolicy.class, true}, + new Object[] {false, null, ConstantSpeculativeExecutionPolicy.class, false}, + new Object[] {true, false, ConstantSpeculativeExecutionPolicy.class, false}, + new Object[] {true, true, ConstantSpeculativeExecutionPolicy.class, true}, + new Object[] {true, null, ConstantSpeculativeExecutionPolicy.class, true}, + }; + } +} From d1d32a6edb612346aa466f5514ccef94a85d80c1 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Thu, 28 Nov 2019 16:46:25 +0100 Subject: [PATCH 336/979] JAVA-2465: Avoid requesting 0 page when executing continuous paging queries --- changelog/README.md | 1 + .../core/cql/continuous/ContinuousRequestHandlerBase.java | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/changelog/README.md b/changelog/README.md index 36e032d4540..c556e8654da 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### NGDG (in progress) +- [bug] JAVA-2465: Avoid requesting 0 page when executing continuous paging queries - [improvement] JAVA-2472: Enable speculative executions for paged graph queries - [improvement] JAVA-1579: Change default result format to latest GraphSON format - [improvement] JAVA-2496: Revisit timeouts for paged graph queries diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousRequestHandlerBase.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousRequestHandlerBase.java index e18fe123c7b..ea1df979c3f 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousRequestHandlerBase.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousRequestHandlerBase.java @@ -1324,7 +1324,7 @@ private void maybeRequestMore() { int freeSpace = getMaxEnqueuedPages() - queue.size(); int inFlight = requested - received; int numPagesFittingInQueue = freeSpace - inFlight; - if (numPagesFittingInQueue >= getMaxEnqueuedPages() / 2) { + if (numPagesFittingInQueue > 0 && numPagesFittingInQueue >= getMaxEnqueuedPages() / 2) { LOG.trace("[{}] Requesting more {} pages", logPrefix, numPagesFittingInQueue); numPagesRequested = requested + numPagesFittingInQueue; chosenExecution.sendMorePagesRequest(numPagesFittingInQueue); From 2357a43f121283efbf66ed1b4f5db8931bd7e234 Mon Sep 17 00:00:00 2001 From: olim7t Date: Wed, 20 Nov 2019 11:11:44 -0800 Subject: [PATCH 337/979] JAVA-2508: Preserve backward compatibility in schema metadata types --- changelog/README.md | 1 + core/revapi.json | 40 ---- .../schema/DseGraphKeyspaceMetadata.java | 63 ++++++ .../schema/DseGraphTableMetadata.java | 146 ++++++++++++++ .../metadata/schema/DseKeyspaceMetadata.java | 187 +----------------- .../metadata/schema/DseTableMetadata.java | 143 +------------- .../schema/DefaultDseKeyspaceMetadata.java | 12 +- .../schema/DefaultDseTableMetadata.java | 8 +- .../refresh/GraphSchemaRefreshTest.java | 18 +- .../schema/KeyspaceGraphMetadataIT.java | 12 +- .../TableGraphMetadataCaseSensitiveIT.java | 2 +- .../metadata/schema/TableGraphMetadataIT.java | 7 +- 12 files changed, 253 insertions(+), 386 deletions(-) create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseGraphKeyspaceMetadata.java create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseGraphTableMetadata.java diff --git a/changelog/README.md b/changelog/README.md index c556e8654da..733fa90364c 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### NGDG (in progress) +- [bug] JAVA-2508: Preserve backward compatibility in schema metadata types - [bug] JAVA-2465: Avoid requesting 0 page when executing continuous paging queries - [improvement] JAVA-2472: Enable speculative executions for paged graph queries - [improvement] JAVA-1579: Change default result format to latest GraphSON format diff --git a/core/revapi.json b/core/revapi.json index 2f4592ca824..98951ab6cf1 100644 --- a/core/revapi.json +++ b/core/revapi.json @@ -4860,46 +4860,6 @@ "newSuperType": "com.datastax.oss.driver.api.core.session.SessionBuilder, SessionT>", "justification": "JAVA-2411: Type parameters were wrong but it is unlikely that implementors would notice that in subclasses" }, - { - "code": "java.method.addedToInterface", - "new": "method java.util.Map com.datastax.dse.driver.api.core.metadata.schema.DseKeyspaceMetadata::getAggregates()", - "justification": "JAVA-1898: Expose new table-level graph metadata" - }, - { - "code": "java.method.addedToInterface", - "new": "method java.util.Map com.datastax.dse.driver.api.core.metadata.schema.DseKeyspaceMetadata::getFunctions()", - "justification": "JAVA-1898: Expose new table-level graph metadata" - }, - { - "code": "java.method.addedToInterface", - "new": "method java.util.Optional com.datastax.dse.driver.api.core.metadata.schema.DseKeyspaceMetadata::getGraphEngine()", - "justification": "JAVA-1898: Expose new table-level graph metadata" - }, - { - "code": "java.method.addedToInterface", - "new": "method java.util.Map com.datastax.dse.driver.api.core.metadata.schema.DseKeyspaceMetadata::getTables()", - "justification": "JAVA-1898: Expose new table-level graph metadata" - }, - { - "code": "java.method.addedToInterface", - "new": "method java.util.Map com.datastax.dse.driver.api.core.metadata.schema.DseKeyspaceMetadata::getViews()", - "justification": "JAVA-1898: Expose new table-level graph metadata" - }, - { - "code": "java.method.addedToInterface", - "new": "method java.util.Optional com.datastax.dse.driver.api.core.metadata.schema.DseTableMetadata::getEdge()", - "justification": "JAVA-1898: Expose new table-level graph metadata" - }, - { - "code": "java.method.addedToInterface", - "new": "method java.util.Map com.datastax.dse.driver.api.core.metadata.schema.DseTableMetadata::getIndexes()", - "justification": "JAVA-1898: Expose new table-level graph metadata" - }, - { - "code": "java.method.addedToInterface", - "new": "method java.util.Optional com.datastax.dse.driver.api.core.metadata.schema.DseTableMetadata::getVertex()", - "justification": "JAVA-1898: Expose new table-level graph metadata" - }, { "code": "java.method.removed", "old": "method org.apache.tinkerpop.gremlin.process.remote.traversal.RemoteTraversal org.apache.tinkerpop.gremlin.process.remote.RemoteConnection::submit(org.apache.tinkerpop.gremlin.process.traversal.Bytecode) throws org.apache.tinkerpop.gremlin.process.remote.RemoteConnectionException", diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseGraphKeyspaceMetadata.java b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseGraphKeyspaceMetadata.java new file mode 100644 index 00000000000..631e73726d0 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseGraphKeyspaceMetadata.java @@ -0,0 +1,63 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.metadata.schema; + +import com.datastax.oss.driver.api.core.metadata.schema.KeyspaceMetadata; +import com.datastax.oss.driver.internal.core.metadata.schema.ScriptBuilder; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Map; +import java.util.Optional; + +/** + * Specialized keyspace metadata, that handles the graph-specific properties introduced in DSE 6.8. + * + *

      This type only exists to avoid breaking binary compatibility. When the driver is connected to + * a DSE cluster, all the {@link KeyspaceMetadata} instances it returns can be safely downcast to + * this interface. + */ +public interface DseGraphKeyspaceMetadata extends DseKeyspaceMetadata { + + /** The graph engine that will be used to interpret this keyspace. */ + @NonNull + Optional getGraphEngine(); + + @NonNull + @Override + default String describe(boolean pretty) { + ScriptBuilder builder = new ScriptBuilder(pretty); + if (isVirtual()) { + builder.append("/* VIRTUAL "); + } else { + builder.append("CREATE "); + } + builder + .append("KEYSPACE ") + .append(getName()) + .append(" WITH replication = { 'class' : '") + .append(getReplication().get("class")) + .append("'"); + for (Map.Entry entry : getReplication().entrySet()) { + if (!entry.getKey().equals("class")) { + builder + .append(", '") + .append(entry.getKey()) + .append("': '") + .append(entry.getValue()) + .append("'"); + } + } + builder.append(" } AND durable_writes = ").append(Boolean.toString(isDurableWrites())); + getGraphEngine() + .ifPresent( + graphEngine -> builder.append(" AND graph_engine ='").append(graphEngine).append("'")); + builder.append(";"); + if (isVirtual()) { + builder.append(" */"); + } + return builder.build(); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseGraphTableMetadata.java b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseGraphTableMetadata.java new file mode 100644 index 00000000000..bae274486c5 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseGraphTableMetadata.java @@ -0,0 +1,146 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.metadata.schema; + +import com.datastax.dse.driver.internal.core.metadata.schema.ScriptHelper; +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.metadata.schema.ClusteringOrder; +import com.datastax.oss.driver.api.core.metadata.schema.ColumnMetadata; +import com.datastax.oss.driver.api.core.metadata.schema.TableMetadata; +import com.datastax.oss.driver.internal.core.metadata.schema.ScriptBuilder; +import com.datastax.oss.driver.internal.core.metadata.schema.parsing.RelationParser; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Map; +import java.util.Optional; + +/** + * Specialized table metadata, that handles the graph-specific properties introduced in DSE 6.8. + * + *

      This type only exists to avoid breaking binary compatibility. When the driver is connected to + * a DSE cluster, all the {@link TableMetadata} instances it returns can be safely downcast to this + * interface. + */ +public interface DseGraphTableMetadata extends DseTableMetadata { + /** + * The vertex metadata if this table represents a vertex in graph, otherwise empty. + * + *

      This is mutually exclusive with {@link #getEdge()}. + */ + @NonNull + Optional getVertex(); + + /** + * The edge metadata if this table represents an edge in graph, otherwise empty. + * + *

      This is mutually exclusive with {@link #getVertex()}. + */ + @NonNull + Optional getEdge(); + + @NonNull + @Override + default String describe(boolean pretty) { + ScriptBuilder builder = new ScriptBuilder(pretty); + if (isVirtual()) { + builder.append("/* VIRTUAL "); + } else { + builder.append("CREATE "); + } + + builder + .append("TABLE ") + .append(getKeyspace()) + .append(".") + .append(getName()) + .append(" (") + .newLine() + .increaseIndent(); + + for (ColumnMetadata column : getColumns().values()) { + builder.append(column.getName()).append(" ").append(column.getType().asCql(true, pretty)); + if (column.isStatic()) { + builder.append(" static"); + } + builder.append(",").newLine(); + } + + // PK + builder.append("PRIMARY KEY ("); + if (getPartitionKey().size() == 1) { // PRIMARY KEY (k + builder.append(getPartitionKey().get(0).getName()); + } else { // PRIMARY KEY ((k1, k2) + builder.append("("); + boolean first = true; + for (ColumnMetadata pkColumn : getPartitionKey()) { + if (first) { + first = false; + } else { + builder.append(", "); + } + builder.append(pkColumn.getName()); + } + builder.append(")"); + } + // PRIMARY KEY (, cc1, cc2, cc3) + for (ColumnMetadata clusteringColumn : getClusteringColumns().keySet()) { + builder.append(", ").append(clusteringColumn.getName()); + } + builder.append(")"); + + builder.newLine().decreaseIndent().append(")"); + + builder.increaseIndent(); + if (isCompactStorage()) { + builder.andWith().append("COMPACT STORAGE"); + } + if (getClusteringColumns().containsValue(ClusteringOrder.DESC)) { + builder.andWith().append("CLUSTERING ORDER BY ("); + boolean first = true; + for (Map.Entry entry : + getClusteringColumns().entrySet()) { + if (first) { + first = false; + } else { + builder.append(", "); + } + builder.append(entry.getKey().getName()).append(" ").append(entry.getValue().name()); + } + builder.append(")"); + } + getVertex() + .ifPresent( + vertex -> { + builder.andWith().append("VERTEX LABEL").append(" ").append(vertex.getLabelName()); + }); + getEdge() + .ifPresent( + edge -> { + builder.andWith().append("EDGE LABEL").append(" ").append(edge.getLabelName()); + ScriptHelper.appendEdgeSide( + builder, + edge.getFromTable(), + edge.getFromLabel(), + edge.getFromPartitionKeyColumns(), + edge.getFromClusteringColumns(), + "FROM"); + ScriptHelper.appendEdgeSide( + builder, + edge.getToTable(), + edge.getToLabel(), + edge.getToPartitionKeyColumns(), + edge.getToClusteringColumns(), + "TO"); + }); + Map options = getOptions(); + RelationParser.appendOptions(options, builder); + builder.append(";"); + if (isVirtual()) { + builder.append(" */"); + } + return builder.build(); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseKeyspaceMetadata.java b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseKeyspaceMetadata.java index 41252eef244..3098bba1aa7 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseKeyspaceMetadata.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseKeyspaceMetadata.java @@ -15,194 +15,19 @@ */ package com.datastax.dse.driver.api.core.metadata.schema; -import com.datastax.oss.driver.api.core.CqlIdentifier; -import com.datastax.oss.driver.api.core.metadata.schema.AggregateMetadata; -import com.datastax.oss.driver.api.core.metadata.schema.FunctionMetadata; -import com.datastax.oss.driver.api.core.metadata.schema.FunctionSignature; import com.datastax.oss.driver.api.core.metadata.schema.KeyspaceMetadata; import com.datastax.oss.driver.api.core.metadata.schema.TableMetadata; -import com.datastax.oss.driver.api.core.metadata.schema.ViewMetadata; -import com.datastax.oss.driver.api.core.type.DataType; -import com.datastax.oss.driver.internal.core.metadata.schema.ScriptBuilder; -import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; -import edu.umd.cs.findbugs.annotations.NonNull; -import java.util.Map; -import java.util.Optional; /** * Specialized keyspace metadata for DSE. * - *

      It has the following differences with {@link KeyspaceMetadata}: + *

      Notes: * *

        - *
      • new method {@link #getGraphEngine()}; - *
      • all sub-elements are specialized for DSE (e.g. {@link #getTables()} returns {@link - * DseTableMetadata} instances). + *
      • this type can always be safely downcast to {@link DseGraphKeyspaceMetadata} (the only + * reason the two interfaces are separate is for backward compatibility). + *
      • all returned elements can be cast to their DSE counterparts, for example {@link + * TableMetadata} to {@link DseTableMetadata}. *
      */ -public interface DseKeyspaceMetadata extends KeyspaceMetadata { - - @NonNull - @Override - Map getTables(); - - @NonNull - @Override - default Optional getTable(@NonNull CqlIdentifier tableId) { - return Optional.ofNullable(getTables().get(tableId)); - } - - @NonNull - @Override - default Optional getTable(@NonNull String tableName) { - return getTable(CqlIdentifier.fromCql(tableName)); - } - - @NonNull - @Override - Map getViews(); - - @NonNull - @Override - default Map getViewsOnTable(@NonNull CqlIdentifier tableId) { - ImmutableMap.Builder builder = ImmutableMap.builder(); - for (ViewMetadata view : getViews().values()) { - if (view.getBaseTable().equals(tableId)) { - builder.put(view.getName(), view); - } - } - return builder.build(); - } - - @NonNull - @Override - default Optional getView(@NonNull CqlIdentifier viewId) { - return Optional.ofNullable(getViews().get(viewId)); - } - - @NonNull - @Override - default Optional getView(@NonNull String viewName) { - return getView(CqlIdentifier.fromCql(viewName)); - } - - @NonNull - @Override - Map getFunctions(); - - @NonNull - @Override - default Optional getFunction(@NonNull FunctionSignature functionSignature) { - return Optional.ofNullable(getFunctions().get(functionSignature)); - } - - @NonNull - @Override - default Optional getFunction( - @NonNull CqlIdentifier functionId, @NonNull Iterable parameterTypes) { - return Optional.ofNullable( - getFunctions().get(new FunctionSignature(functionId, parameterTypes))); - } - - @NonNull - @Override - default Optional getFunction( - @NonNull String functionName, @NonNull Iterable parameterTypes) { - return getFunction(CqlIdentifier.fromCql(functionName), parameterTypes); - } - - @NonNull - @Override - default Optional getFunction( - @NonNull CqlIdentifier functionId, @NonNull DataType... parameterTypes) { - return Optional.ofNullable( - getFunctions().get(new FunctionSignature(functionId, parameterTypes))); - } - - @NonNull - @Override - default Optional getFunction( - @NonNull String functionName, @NonNull DataType... parameterTypes) { - return getFunction(CqlIdentifier.fromCql(functionName), parameterTypes); - } - - @NonNull - @Override - Map getAggregates(); - - @NonNull - @Override - default Optional getAggregate(@NonNull FunctionSignature aggregateSignature) { - return Optional.ofNullable(getAggregates().get(aggregateSignature)); - } - - @NonNull - @Override - default Optional getAggregate( - @NonNull CqlIdentifier aggregateId, @NonNull Iterable parameterTypes) { - return Optional.ofNullable( - getAggregates().get(new FunctionSignature(aggregateId, parameterTypes))); - } - - @NonNull - @Override - default Optional getAggregate( - @NonNull String aggregateName, @NonNull Iterable parameterTypes) { - return getAggregate(CqlIdentifier.fromCql(aggregateName), parameterTypes); - } - - @NonNull - @Override - default Optional getAggregate( - @NonNull CqlIdentifier aggregateId, @NonNull DataType... parameterTypes) { - return Optional.ofNullable( - getAggregates().get(new FunctionSignature(aggregateId, parameterTypes))); - } - - @NonNull - @Override - default Optional getAggregate( - @NonNull String aggregateName, @NonNull DataType... parameterTypes) { - return getAggregate(CqlIdentifier.fromCql(aggregateName), parameterTypes); - } - - /** The graph engine that will be used to interpret this keyspace. */ - @NonNull - Optional getGraphEngine(); - - @NonNull - @Override - default String describe(boolean pretty) { - ScriptBuilder builder = new ScriptBuilder(pretty); - if (isVirtual()) { - builder.append("/* VIRTUAL "); - } else { - builder.append("CREATE "); - } - builder - .append("KEYSPACE ") - .append(getName()) - .append(" WITH replication = { 'class' : '") - .append(getReplication().get("class")) - .append("'"); - for (Map.Entry entry : getReplication().entrySet()) { - if (!entry.getKey().equals("class")) { - builder - .append(", '") - .append(entry.getKey()) - .append("': '") - .append(entry.getValue()) - .append("'"); - } - } - builder.append(" } AND durable_writes = ").append(Boolean.toString(isDurableWrites())); - getGraphEngine() - .ifPresent( - graphEngine -> builder.append(" AND graph_engine ='").append(graphEngine).append("'")); - builder.append(";"); - if (isVirtual()) { - builder.append(" */"); - } - return builder.build(); - } -} +public interface DseKeyspaceMetadata extends KeyspaceMetadata {} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseTableMetadata.java b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseTableMetadata.java index c1c961999ca..25d3cea0c02 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseTableMetadata.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseTableMetadata.java @@ -15,151 +15,20 @@ */ package com.datastax.dse.driver.api.core.metadata.schema; -import com.datastax.dse.driver.internal.core.metadata.schema.ScriptHelper; -import com.datastax.oss.driver.api.core.CqlIdentifier; -import com.datastax.oss.driver.api.core.metadata.schema.ClusteringOrder; import com.datastax.oss.driver.api.core.metadata.schema.ColumnMetadata; import com.datastax.oss.driver.api.core.metadata.schema.IndexMetadata; import com.datastax.oss.driver.api.core.metadata.schema.TableMetadata; -import com.datastax.oss.driver.internal.core.metadata.schema.ScriptBuilder; -import com.datastax.oss.driver.internal.core.metadata.schema.parsing.RelationParser; -import edu.umd.cs.findbugs.annotations.NonNull; -import java.util.Map; -import java.util.Optional; /** * Specialized table metadata for DSE. * - *

      It has the following differences with {@link TableMetadata}: + *

      Notes: * *

        - *
      • new methods {@link #getVertex()} and {@link #getEdge()}; - *
      • all sub-elements are specialized for DSE (e.g. {@link #getIndexes()} returns {@link - * DseIndexMetadata} instances). + *
      • this type can always be safely downcast to {@link DseGraphTableMetadata} (the only reason + * the two interfaces are separate is for backward compatibility). + *
      • all returned {@link ColumnMetadata} can be cast to {@link DseColumnMetadata}, and all + * {@link IndexMetadata} to {@link DseIndexMetadata}. *
      */ -public interface DseTableMetadata extends DseRelationMetadata, TableMetadata { - - @NonNull - @Override - Map getIndexes(); - - /** - * The vertex metadata if this table represents a vertex in graph, otherwise empty. - * - *

      This is mutually exclusive with {@link #getEdge()}. - */ - @NonNull - Optional getVertex(); - - /** - * The edge metadata if this table represents an edge in graph, otherwise empty. - * - *

      This is mutually exclusive with {@link #getVertex()}. - */ - @NonNull - Optional getEdge(); - - @NonNull - @Override - default String describe(boolean pretty) { - ScriptBuilder builder = new ScriptBuilder(pretty); - if (isVirtual()) { - builder.append("/* VIRTUAL "); - } else { - builder.append("CREATE "); - } - - builder - .append("TABLE ") - .append(getKeyspace()) - .append(".") - .append(getName()) - .append(" (") - .newLine() - .increaseIndent(); - - for (ColumnMetadata column : getColumns().values()) { - builder.append(column.getName()).append(" ").append(column.getType().asCql(true, pretty)); - if (column.isStatic()) { - builder.append(" static"); - } - builder.append(",").newLine(); - } - - // PK - builder.append("PRIMARY KEY ("); - if (getPartitionKey().size() == 1) { // PRIMARY KEY (k - builder.append(getPartitionKey().get(0).getName()); - } else { // PRIMARY KEY ((k1, k2) - builder.append("("); - boolean first = true; - for (ColumnMetadata pkColumn : getPartitionKey()) { - if (first) { - first = false; - } else { - builder.append(", "); - } - builder.append(pkColumn.getName()); - } - builder.append(")"); - } - // PRIMARY KEY (, cc1, cc2, cc3) - for (ColumnMetadata clusteringColumn : getClusteringColumns().keySet()) { - builder.append(", ").append(clusteringColumn.getName()); - } - builder.append(")"); - - builder.newLine().decreaseIndent().append(")"); - - builder.increaseIndent(); - if (isCompactStorage()) { - builder.andWith().append("COMPACT STORAGE"); - } - if (getClusteringColumns().containsValue(ClusteringOrder.DESC)) { - builder.andWith().append("CLUSTERING ORDER BY ("); - boolean first = true; - for (Map.Entry entry : - getClusteringColumns().entrySet()) { - if (first) { - first = false; - } else { - builder.append(", "); - } - builder.append(entry.getKey().getName()).append(" ").append(entry.getValue().name()); - } - builder.append(")"); - } - getVertex() - .ifPresent( - vertex -> { - builder.andWith().append("VERTEX LABEL").append(" ").append(vertex.getLabelName()); - }); - getEdge() - .ifPresent( - edge -> { - builder.andWith().append("EDGE LABEL").append(" ").append(edge.getLabelName()); - ScriptHelper.appendEdgeSide( - builder, - edge.getFromTable(), - edge.getFromLabel(), - edge.getFromPartitionKeyColumns(), - edge.getFromClusteringColumns(), - "FROM"); - ScriptHelper.appendEdgeSide( - builder, - edge.getToTable(), - edge.getToLabel(), - edge.getToPartitionKeyColumns(), - edge.getToClusteringColumns(), - "TO"); - }); - Map options = getOptions(); - RelationParser.appendOptions(options, builder); - builder.append(";"); - if (isVirtual()) { - builder.append(" */"); - } - return builder.build(); - } -} +public interface DseTableMetadata extends DseRelationMetadata, TableMetadata {} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseKeyspaceMetadata.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseKeyspaceMetadata.java index 21ff8f4c6bc..381a666310b 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseKeyspaceMetadata.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseKeyspaceMetadata.java @@ -15,7 +15,7 @@ */ package com.datastax.dse.driver.internal.core.metadata.schema; -import com.datastax.dse.driver.api.core.metadata.schema.DseKeyspaceMetadata; +import com.datastax.dse.driver.api.core.metadata.schema.DseGraphKeyspaceMetadata; import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.metadata.schema.AggregateMetadata; import com.datastax.oss.driver.api.core.metadata.schema.FunctionMetadata; @@ -31,7 +31,7 @@ import net.jcip.annotations.Immutable; @Immutable -public class DefaultDseKeyspaceMetadata implements DseKeyspaceMetadata { +public class DefaultDseKeyspaceMetadata implements DseGraphKeyspaceMetadata { @NonNull private final CqlIdentifier name; private final boolean durableWrites; @@ -129,8 +129,8 @@ public Map getAggregates() { public boolean equals(Object other) { if (other == this) { return true; - } else if (other instanceof DseKeyspaceMetadata) { - DseKeyspaceMetadata that = (DseKeyspaceMetadata) other; + } else if (other instanceof DseGraphKeyspaceMetadata) { + DseGraphKeyspaceMetadata that = (DseGraphKeyspaceMetadata) other; return Objects.equals(this.name, that.getName()) && this.durableWrites == that.isDurableWrites() && this.virtual == that.isVirtual() @@ -165,8 +165,8 @@ public int hashCode() { public boolean shallowEquals(Object other) { if (other == this) { return true; - } else if (other instanceof DseKeyspaceMetadata) { - DseKeyspaceMetadata that = (DseKeyspaceMetadata) other; + } else if (other instanceof DseGraphKeyspaceMetadata) { + DseGraphKeyspaceMetadata that = (DseGraphKeyspaceMetadata) other; return Objects.equals(this.name, that.getName()) && this.durableWrites == that.isDurableWrites() && Objects.equals(this.graphEngine, that.getGraphEngine().orElse(null)) diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseTableMetadata.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseTableMetadata.java index 92b3c14a939..93ad64b8186 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseTableMetadata.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseTableMetadata.java @@ -16,7 +16,7 @@ package com.datastax.dse.driver.internal.core.metadata.schema; import com.datastax.dse.driver.api.core.metadata.schema.DseEdgeMetadata; -import com.datastax.dse.driver.api.core.metadata.schema.DseTableMetadata; +import com.datastax.dse.driver.api.core.metadata.schema.DseGraphTableMetadata; import com.datastax.dse.driver.api.core.metadata.schema.DseVertexMetadata; import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.metadata.schema.ClusteringOrder; @@ -32,7 +32,7 @@ import net.jcip.annotations.Immutable; @Immutable -public class DefaultDseTableMetadata implements DseTableMetadata { +public class DefaultDseTableMetadata implements DseGraphTableMetadata { @NonNull private final CqlIdentifier keyspace; @NonNull private final CqlIdentifier name; @@ -149,8 +149,8 @@ public Optional getEdge() { public boolean equals(Object other) { if (other == this) { return true; - } else if (other instanceof DseTableMetadata) { - DseTableMetadata that = (DseTableMetadata) other; + } else if (other instanceof DseGraphTableMetadata) { + DseGraphTableMetadata that = (DseGraphTableMetadata) other; return Objects.equals(this.keyspace, that.getKeyspace()) && Objects.equals(this.name, that.getName()) && Objects.equals(this.id, that.getId().orElse(null)) diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/schema/refresh/GraphSchemaRefreshTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/schema/refresh/GraphSchemaRefreshTest.java index 84a33c926ad..ff93f6ff9c5 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/schema/refresh/GraphSchemaRefreshTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/schema/refresh/GraphSchemaRefreshTest.java @@ -9,7 +9,7 @@ import static org.assertj.core.api.Assertions.assertThat; import com.datastax.dse.driver.api.core.metadata.schema.DseEdgeMetadata; -import com.datastax.dse.driver.api.core.metadata.schema.DseTableMetadata; +import com.datastax.dse.driver.api.core.metadata.schema.DseGraphTableMetadata; import com.datastax.dse.driver.api.core.metadata.schema.DseVertexMetadata; import com.datastax.dse.driver.internal.core.metadata.schema.DefaultDseEdgeMetadata; import com.datastax.dse.driver.internal.core.metadata.schema.DefaultDseKeyspaceMetadata; @@ -133,7 +133,7 @@ public void should_detect_adding_and_renaming_and_removing_vertex_label() { assertThat(result.events).containsExactly(TableChangeEvent.updated(OLD_TABLE, newTable)); assertThat(result.newMetadata.getKeyspaces().get(KS_WITH_ENGINE.getName())).isNotNull(); assertThat( - ((DseTableMetadata) + ((DseGraphTableMetadata) result .newMetadata .getKeyspaces() @@ -143,7 +143,7 @@ public void should_detect_adding_and_renaming_and_removing_vertex_label() { .getVertex()) .isNotNull(); assertThat( - ((DseTableMetadata) + ((DseGraphTableMetadata) result .newMetadata .getKeyspaces() @@ -175,7 +175,7 @@ public void should_detect_adding_and_renaming_and_removing_vertex_label() { assertThat(result.newMetadata.getKeyspaces()).hasSize(2); assertThat(result.events).containsExactly(TableChangeEvent.updated(OLD_TABLE, newTable)); assertThat( - ((DseTableMetadata) + ((DseGraphTableMetadata) result .newMetadata .getKeyspaces() @@ -205,7 +205,7 @@ public void should_detect_adding_and_renaming_and_removing_vertex_label() { assertThat(result.events) .containsExactly(TableChangeEvent.updated(newTable, tableWithRemovedLabel)); assertThat( - ((DseTableMetadata) + ((DseGraphTableMetadata) result .newMetadata .getKeyspaces() @@ -241,7 +241,7 @@ public void should_detect_adding_and_renaming_and_removing_edge_label() { assertThat(result.events).containsExactly(TableChangeEvent.updated(OLD_TABLE, newTable)); assertThat(result.newMetadata.getKeyspaces().get(KS_WITH_ENGINE.getName())).isNotNull(); assertThat( - ((DseTableMetadata) + ((DseGraphTableMetadata) result .newMetadata .getKeyspaces() @@ -251,7 +251,7 @@ public void should_detect_adding_and_renaming_and_removing_edge_label() { .getVertex()) .isNotNull(); assertThat( - ((DseTableMetadata) + ((DseGraphTableMetadata) result .newMetadata .getKeyspaces() @@ -286,7 +286,7 @@ public void should_detect_adding_and_renaming_and_removing_edge_label() { assertThat(result.newMetadata.getKeyspaces()).hasSize(2); assertThat(result.events).containsExactly(TableChangeEvent.updated(OLD_TABLE, newTable)); assertThat( - ((DseTableMetadata) + ((DseGraphTableMetadata) result .newMetadata .getKeyspaces() @@ -316,7 +316,7 @@ public void should_detect_adding_and_renaming_and_removing_edge_label() { assertThat(result.events) .containsExactly(TableChangeEvent.updated(newTable, tableWithRemovedLabel)); assertThat( - ((DseTableMetadata) + ((DseGraphTableMetadata) result .newMetadata .getKeyspaces() diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/KeyspaceGraphMetadataIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/KeyspaceGraphMetadataIT.java index 4daab6fc467..9e35103e03a 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/KeyspaceGraphMetadataIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/KeyspaceGraphMetadataIT.java @@ -45,7 +45,7 @@ public void should_expose_graph_engine_if_set() { assertThat(metadata.getKeyspace("keyspace_metadata_it_graph_engine")) .hasValueSatisfying( keyspaceMetadata -> - assertThat(((DseKeyspaceMetadata) keyspaceMetadata).getGraphEngine()) + assertThat(((DseGraphKeyspaceMetadata) keyspaceMetadata).getGraphEngine()) .hasValue("Core")); } @@ -58,14 +58,15 @@ public void should_expose_graph_engine_if_keyspace_altered() { assertThat(session.getMetadata().getKeyspace("keyspace_metadata_it_graph_engine_alter")) .hasValueSatisfying( keyspaceMetadata -> - assertThat(((DseKeyspaceMetadata) keyspaceMetadata).getGraphEngine()).isEmpty()); + assertThat(((DseGraphKeyspaceMetadata) keyspaceMetadata).getGraphEngine()) + .isEmpty()); session.execute( "ALTER KEYSPACE keyspace_metadata_it_graph_engine_alter WITH graph_engine = 'Core'"); assertThat(session.getMetadata().getKeyspace("keyspace_metadata_it_graph_engine_alter")) .hasValueSatisfying( keyspaceMetadata -> - assertThat(((DseKeyspaceMetadata) keyspaceMetadata).getGraphEngine()) + assertThat(((DseGraphKeyspaceMetadata) keyspaceMetadata).getGraphEngine()) .hasValue("Core")); } @@ -92,7 +93,7 @@ public void should_expose_core_graph_engine_if_set() { assertThat(metadata.getKeyspace("keyspace_metadata_it_graph_engine_core")) .hasValueSatisfying( keyspaceMetadata -> - assertThat(((DseKeyspaceMetadata) keyspaceMetadata).getGraphEngine()) + assertThat(((DseGraphKeyspaceMetadata) keyspaceMetadata).getGraphEngine()) .hasValue("Core")); } @@ -103,6 +104,7 @@ public void should_expose_empty_graph_engine_if_not_set() { assertThat(metadata.getKeyspace(SESSION_RULE.keyspace())) .hasValueSatisfying( keyspaceMetadata -> - assertThat(((DseKeyspaceMetadata) keyspaceMetadata).getGraphEngine()).isEmpty()); + assertThat(((DseGraphKeyspaceMetadata) keyspaceMetadata).getGraphEngine()) + .isEmpty()); } } diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/TableGraphMetadataCaseSensitiveIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/TableGraphMetadataCaseSensitiveIT.java index ea3e4c59cf4..e5e50e7b408 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/TableGraphMetadataCaseSensitiveIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/TableGraphMetadataCaseSensitiveIT.java @@ -66,7 +66,7 @@ public void should_expose_case_sensitive_edge_metadata() { assertThat(keyspaceMetadata.getTable(CqlIdentifier.fromInternal("Created"))) .hasValueSatisfying( created -> { - DseTableMetadata dseCreated = (DseTableMetadata) created; + DseGraphTableMetadata dseCreated = (DseGraphTableMetadata) created; assertThat(dseCreated.getEdge()) .hasValueSatisfying( edge -> { diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/TableGraphMetadataIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/TableGraphMetadataIT.java index 3b50695d21d..86c9d9a6ede 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/TableGraphMetadataIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/TableGraphMetadataIT.java @@ -63,7 +63,7 @@ public void should_expose_vertex_and_edge_metadata() { assertThat(keyspaceMetadata.getTable("person")) .hasValueSatisfying( person -> { - DseTableMetadata dsePerson = (DseTableMetadata) person; + DseGraphTableMetadata dsePerson = (DseGraphTableMetadata) person; assertThat(dsePerson.getVertex()) .hasValueSatisfying( vertex -> @@ -75,7 +75,7 @@ public void should_expose_vertex_and_edge_metadata() { assertThat(keyspaceMetadata.getTable("software")) .hasValueSatisfying( software -> { - DseTableMetadata dseSoftware = (DseTableMetadata) software; + DseGraphTableMetadata dseSoftware = (DseGraphTableMetadata) software; assertThat(dseSoftware.getVertex()) .hasValueSatisfying( vertex -> @@ -87,7 +87,8 @@ public void should_expose_vertex_and_edge_metadata() { assertThat(keyspaceMetadata.getTable("contributors")) .hasValueSatisfying( contributors -> { - DseTableMetadata dseContributors = (DseTableMetadata) contributors; + DseGraphTableMetadata dseContributors = + (DseGraphTableMetadata) contributors; assertThat(dseContributors.getVertex()).isEmpty(); assertThat(dseContributors.getEdge()) .hasValueSatisfying( From abe376df67309774f600ba53ac3e6958f8c9b428 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Wed, 4 Dec 2019 12:12:14 +0100 Subject: [PATCH 338/979] JAVA-2558: Revisit GraphRequestHandler (#310) --- changelog/README.md | 1 + .../internal/core/graph/GraphConversions.java | 3 +- .../graph/GraphRequestAsyncProcessor.java | 9 +- .../core/graph/GraphRequestHandler.java | 731 +++++++++++------- .../core/graph/GraphRequestSyncProcessor.java | 9 +- .../core/graph/GraphRequestHandlerTest.java | 40 +- 6 files changed, 518 insertions(+), 275 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 733fa90364c..da114417983 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### NGDG (in progress) +- [improvement] JAVA-2558: Revisit GraphRequestHandler - [bug] JAVA-2508: Preserve backward compatibility in schema metadata types - [bug] JAVA-2465: Avoid requesting 0 page when executing continuous paging queries - [improvement] JAVA-2472: Enable speculative executions for paged graph queries diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphConversions.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphConversions.java index a0344e9dd05..6637804caaa 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphConversions.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphConversions.java @@ -23,7 +23,6 @@ import com.datastax.dse.driver.api.core.graph.GraphNode; import com.datastax.dse.driver.api.core.graph.GraphStatement; import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; -import com.datastax.dse.driver.internal.core.context.DseDriverContext; import com.datastax.dse.driver.internal.core.graph.binary.GraphBinaryModule; import com.datastax.dse.driver.internal.core.graph.binary.buffer.DseNettyBufferFactory; import com.datastax.dse.protocol.internal.request.RawBytesQuery; @@ -154,7 +153,7 @@ static Message createMessageFromGraphStatement( GraphStatement statement, GraphProtocol subProtocol, DriverExecutionProfile config, - DseDriverContext context, + InternalDriverContext context, GraphBinaryModule graphBinaryModule) { final List encodedQueryParams; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestAsyncProcessor.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestAsyncProcessor.java index 14363bc8159..ea97aab5e2b 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestAsyncProcessor.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestAsyncProcessor.java @@ -16,7 +16,10 @@ package com.datastax.dse.driver.internal.core.graph; import com.datastax.dse.driver.api.core.graph.AsyncGraphResultSet; +import com.datastax.dse.driver.api.core.graph.BatchGraphStatement; +import com.datastax.dse.driver.api.core.graph.FluentGraphStatement; import com.datastax.dse.driver.api.core.graph.GraphStatement; +import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; import com.datastax.dse.driver.internal.core.graph.binary.GraphBinaryModule; import com.datastax.oss.driver.api.core.session.Request; import com.datastax.oss.driver.api.core.type.reflect.GenericType; @@ -57,7 +60,11 @@ public GraphBinaryModule getGraphBinaryModule() { @Override public boolean canProcess(Request request, GenericType resultType) { - return request instanceof GraphStatement && resultType.equals(GraphStatement.ASYNC); + return (request instanceof ScriptGraphStatement + || request instanceof FluentGraphStatement + || request instanceof BatchGraphStatement + || request instanceof BytecodeGraphStatement) + && resultType.equals(GraphStatement.ASYNC); } @Override diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java index 24cf982bfbe..cf7d6d37a21 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java @@ -17,12 +17,9 @@ import com.datastax.dse.driver.api.core.config.DseDriverOption; import com.datastax.dse.driver.api.core.graph.AsyncGraphResultSet; -import com.datastax.dse.driver.api.core.graph.BatchGraphStatement; -import com.datastax.dse.driver.api.core.graph.FluentGraphStatement; import com.datastax.dse.driver.api.core.graph.GraphNode; import com.datastax.dse.driver.api.core.graph.GraphStatement; -import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; -import com.datastax.dse.driver.internal.core.context.DseDriverContext; +import com.datastax.dse.driver.api.core.metrics.DseSessionMetric; import com.datastax.dse.driver.internal.core.graph.binary.GraphBinaryModule; import com.datastax.oss.driver.api.core.AllNodesFailedException; import com.datastax.oss.driver.api.core.DriverTimeoutException; @@ -47,27 +44,39 @@ import com.datastax.oss.driver.api.core.session.throttling.RequestThrottler; import com.datastax.oss.driver.api.core.session.throttling.Throttled; import com.datastax.oss.driver.api.core.specex.SpeculativeExecutionPolicy; +import com.datastax.oss.driver.api.core.tracker.RequestTracker; +import com.datastax.oss.driver.internal.core.adminrequest.ThrottledAdminRequestHandler; +import com.datastax.oss.driver.internal.core.adminrequest.UnexpectedResponseException; import com.datastax.oss.driver.internal.core.channel.DriverChannel; import com.datastax.oss.driver.internal.core.channel.ResponseCallback; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.cql.Conversions; import com.datastax.oss.driver.internal.core.cql.DefaultExecutionInfo; import com.datastax.oss.driver.internal.core.metadata.DefaultNode; import com.datastax.oss.driver.internal.core.metrics.NodeMetricUpdater; +import com.datastax.oss.driver.internal.core.metrics.SessionMetricUpdater; import com.datastax.oss.driver.internal.core.session.DefaultSession; +import com.datastax.oss.driver.internal.core.session.RepreparePayload; +import com.datastax.oss.driver.internal.core.tracker.NoopRequestTracker; +import com.datastax.oss.driver.internal.core.tracker.RequestLogger; import com.datastax.oss.driver.internal.core.util.Loggers; -import com.datastax.oss.driver.shaded.guava.common.base.Preconditions; +import com.datastax.oss.driver.internal.core.util.collection.QueryPlan; import com.datastax.oss.protocol.internal.Frame; import com.datastax.oss.protocol.internal.Message; +import com.datastax.oss.protocol.internal.ProtocolConstants; +import com.datastax.oss.protocol.internal.request.Prepare; import com.datastax.oss.protocol.internal.response.Error; import com.datastax.oss.protocol.internal.response.Result; +import com.datastax.oss.protocol.internal.response.error.Unprepared; import com.datastax.oss.protocol.internal.response.result.Rows; import com.datastax.oss.protocol.internal.response.result.Void; +import com.datastax.oss.protocol.internal.util.Bytes; import edu.umd.cs.findbugs.annotations.NonNull; import io.netty.handler.codec.EncoderException; -import io.netty.util.concurrent.EventExecutor; +import io.netty.util.Timeout; +import io.netty.util.Timer; import io.netty.util.concurrent.Future; import io.netty.util.concurrent.GenericFutureListener; -import io.netty.util.concurrent.ScheduledFuture; import java.nio.ByteBuffer; import java.time.Duration; import java.util.AbstractMap; @@ -90,22 +99,20 @@ public class GraphRequestHandler implements Throttled { private static final Logger LOG = LoggerFactory.getLogger(GraphRequestHandler.class); + private static final long NANOTIME_NOT_MEASURED_YET = -1; + private static final int NO_SUCCESSFUL_EXECUTION = -1; + private final long startTimeNanos; private final String logPrefix; - + private final GraphStatement statement; private final DefaultSession session; - - private final DseDriverContext context; - private Queue queryPlan; + private final InternalDriverContext context; private final DriverExecutionProfile executionProfile; - - private final GraphStatement graphStatement; - private final boolean isIdempotent; protected final CompletableFuture result; private final Message message; + private final Timer timer; private final GraphProtocol subProtocol; - private final EventExecutor scheduler; /** * How many speculative executions are currently running (including the initial execution). We @@ -121,22 +128,24 @@ public class GraphRequestHandler implements Throttled { */ private final AtomicInteger startedSpeculativeExecutionsCount; - private final SpeculativeExecutionPolicy speculativeExecutionPolicy; - - private final ScheduledFuture timeoutFuture; - private final List> scheduledExecutions; - private final List inFlightCallbacks; + private final Duration timeout; + private final Timeout scheduledTimeout; + private final List scheduledExecutions; + private final List inFlightCallbacks; private final RetryPolicy retryPolicy; + private final SpeculativeExecutionPolicy speculativeExecutionPolicy; private final RequestThrottler throttler; + private final RequestTracker requestTracker; + private final SessionMetricUpdater sessionMetricUpdater; private final Map queryCustomPayload; private final GraphBinaryModule graphBinaryModule; // The errors on the nodes that were already tried (lazily initialized on the first error). - // We don'traversals use a map because nodes can appear multiple times. + // We don't use a map because nodes can appear multiple times. private volatile List> errors; - public GraphRequestHandler( - @NonNull GraphStatement graphStatement, + GraphRequestHandler( + @NonNull GraphStatement statement, @NonNull DefaultSession dseSession, @NonNull InternalDriverContext context, @NonNull String sessionLogPrefix, @@ -144,23 +153,15 @@ public GraphRequestHandler( @NonNull GraphSupportChecker graphSupportChecker) { this.startTimeNanos = System.nanoTime(); this.logPrefix = sessionLogPrefix + "|" + this.hashCode(); - Preconditions.checkArgument( - graphStatement instanceof ScriptGraphStatement - || graphStatement instanceof FluentGraphStatement - || graphStatement instanceof BatchGraphStatement - || graphStatement instanceof BytecodeGraphStatement, - "Unknown graph statement type: " + graphStatement.getClass()); - - LOG.trace("[{}] Creating new Graph request handler for request {}", logPrefix, graphStatement); - this.graphStatement = graphStatement; + LOG.trace("[{}] Creating new Graph request handler for request {}", logPrefix, statement); + this.statement = statement; this.session = dseSession; - - Preconditions.checkArgument(context instanceof DseDriverContext); - this.context = ((DseDriverContext) context); - - this.executionProfile = - GraphConversions.resolveExecutionProfile(this.graphStatement, this.context); - Boolean statementIsIdempotent = graphStatement.isIdempotent(); + this.context = context; + this.executionProfile = GraphConversions.resolveExecutionProfile(this.statement, this.context); + this.retryPolicy = context.getRetryPolicy(executionProfile.getName()); + this.speculativeExecutionPolicy = + context.getSpeculativeExecutionPolicy(executionProfile.getName()); + Boolean statementIsIdempotent = statement.isIdempotent(); this.isIdempotent = (statementIsIdempotent == null) ? executionProfile.getBoolean(DefaultDriverOption.REQUEST_DEFAULT_IDEMPOTENCE) @@ -177,44 +178,31 @@ public GraphRequestHandler( } return null; }); - - this.scheduler = context.getNettyOptions().ioEventLoopGroup().next(); - - Duration timeout = graphStatement.getTimeout(); - if (timeout == null) { - timeout = executionProfile.getDuration(DseDriverOption.GRAPH_TIMEOUT, Duration.ZERO); - } - this.timeoutFuture = scheduleTimeout(timeout); - - this.retryPolicy = context.getRetryPolicy(executionProfile.getName()); - this.speculativeExecutionPolicy = - context.getSpeculativeExecutionPolicy(executionProfile.getName()); - this.activeExecutionsCount = new AtomicInteger(1); - this.startedSpeculativeExecutionsCount = new AtomicInteger(0); - this.scheduledExecutions = isIdempotent ? new CopyOnWriteArrayList<>() : null; - - this.inFlightCallbacks = new CopyOnWriteArrayList<>(); this.graphBinaryModule = graphBinaryModule; - this.subProtocol = - graphSupportChecker.inferGraphProtocol(this.graphStatement, executionProfile, this.context); + graphSupportChecker.inferGraphProtocol(this.statement, executionProfile, this.context); LOG.debug("[{}], Graph protocol used for query: {}", logPrefix, subProtocol); - this.message = GraphConversions.createMessageFromGraphStatement( - this.graphStatement, - subProtocol, - executionProfile, - this.context, - this.graphBinaryModule); + this.statement, subProtocol, executionProfile, this.context, this.graphBinaryModule); + this.timer = context.getNettyOptions().getTimer(); + this.timeout = + statement.getTimeout() != null + ? statement.getTimeout() + : executionProfile.getDuration(DseDriverOption.GRAPH_TIMEOUT, null); + this.scheduledTimeout = scheduleTimeout(timeout); + + this.activeExecutionsCount = new AtomicInteger(1); + this.startedSpeculativeExecutionsCount = new AtomicInteger(0); + this.scheduledExecutions = isIdempotent ? new CopyOnWriteArrayList<>() : null; + this.inFlightCallbacks = new CopyOnWriteArrayList<>(); this.queryCustomPayload = GraphConversions.createCustomPayload( - this.graphStatement, - subProtocol, - executionProfile, - this.context, - this.graphBinaryModule); + this.statement, subProtocol, executionProfile, this.context, this.graphBinaryModule); + + this.requestTracker = context.getRequestTracker(); + this.sessionMetricUpdater = session.getMetricUpdater(); this.throttler = context.getRequestThrottler(); this.throttler.register(this); @@ -222,62 +210,73 @@ public GraphRequestHandler( @Override public void onThrottleReady(boolean wasDelayed) { - if (wasDelayed) { - session - .getMetricUpdater() - .updateTimer( - DefaultSessionMetric.THROTTLING_DELAY, - executionProfile.getName(), - System.nanoTime() - startTimeNanos, - TimeUnit.NANOSECONDS); + if (wasDelayed + // avoid call to nanoTime() if metric is disabled: + && sessionMetricUpdater.isEnabled( + DefaultSessionMetric.THROTTLING_DELAY, executionProfile.getName())) { + sessionMetricUpdater.updateTimer( + DefaultSessionMetric.THROTTLING_DELAY, + executionProfile.getName(), + System.nanoTime() - startTimeNanos, + TimeUnit.NANOSECONDS); } - // compute query plan only when the throttling is done. - // TODO thread safety? - this.queryPlan = - context - .getLoadBalancingPolicyWrapper() - .newQueryPlan(graphStatement, executionProfile.getName(), session); - sendRequest(null, 0, 0, true); + Queue queryPlan = + statement.getNode() != null + ? new QueryPlan(statement.getNode()) + : context + .getLoadBalancingPolicyWrapper() + .newQueryPlan(statement, executionProfile.getName(), session); + sendRequest(null, queryPlan, 0, 0, true); } public CompletionStage handle() { return result; } - @Override - public void onThrottleFailure(@NonNull RequestThrottlingException error) { - session - .getMetricUpdater() - .incrementCounter(DefaultSessionMetric.THROTTLING_ERRORS, executionProfile.getName()); - setFinalError(error, null); - } - - private ScheduledFuture scheduleTimeout(Duration timeout) { - if (timeout != null && timeout.toNanos() > 0) { - return scheduler.schedule( - () -> setFinalError(new DriverTimeoutException("Query timed out after " + timeout), null), - timeout.toNanos(), - TimeUnit.NANOSECONDS); - } else { - return null; + private Timeout scheduleTimeout(Duration timeoutDuration) { + if (timeoutDuration != null && timeoutDuration.toNanos() > 0) { + try { + return this.timer.newTimeout( + (Timeout timeout1) -> + setFinalError( + new DriverTimeoutException("Query timed out after " + timeoutDuration), + null, + NO_SUCCESSFUL_EXECUTION), + timeoutDuration.toNanos(), + TimeUnit.NANOSECONDS); + } catch (IllegalStateException e) { + // If we raced with session shutdown the timer might be closed already, rethrow with a more + // explicit message + result.completeExceptionally( + "cannot be started once stopped".equals(e.getMessage()) + ? new IllegalStateException("Session is closed") + : e); + } } + return null; } /** * Sends the request to the next available node. * - * @param node if not null, it will be attempted first before the rest of the query plan. + * @param retriedNode if not null, it will be attempted first before the rest of the query plan. + * @param queryPlan the list of nodes to try (shared with all other executions) * @param currentExecutionIndex 0 for the initial execution, 1 for the first speculative one, etc. * @param retryCount the number of times that the retry policy was invoked for this execution - * already (note that some internal retries don'traversals go through the policy, and - * therefore don'traversals increment this counter) + * already (note that some internal retries don't go through the policy, and therefore don't + * increment this counter) * @param scheduleNextExecution whether to schedule the next speculative execution */ private void sendRequest( - Node node, int currentExecutionIndex, int retryCount, boolean scheduleNextExecution) { + Node retriedNode, + Queue queryPlan, + int currentExecutionIndex, + int retryCount, + boolean scheduleNextExecution) { if (result.isDone()) { return; } + Node node = retriedNode; DriverChannel channel = null; if (node == null || (channel = session.getChannel(node, logPrefix)) == null) { while (!result.isDone() && (node = queryPlan.poll()) != null) { @@ -291,74 +290,204 @@ private void sendRequest( // We've reached the end of the query plan without finding any node to write to if (!result.isDone() && activeExecutionsCount.decrementAndGet() == 0) { // We're the last execution so fail the result - setFinalError(AllNodesFailedException.fromErrors(this.errors), null); + setFinalError( + AllNodesFailedException.fromErrors(this.errors), null, NO_SUCCESSFUL_EXECUTION); } } else { - PerRequestCallback perRequestCallback = - new PerRequestCallback( - node, channel, currentExecutionIndex, retryCount, scheduleNextExecution, logPrefix); - + NodeResponseCallback nodeResponseCallback = + new NodeResponseCallback( + node, + queryPlan, + channel, + currentExecutionIndex, + retryCount, + scheduleNextExecution, + logPrefix); channel - .write(message, graphStatement.isTracing(), queryCustomPayload, perRequestCallback) - .addListener(perRequestCallback); + .write(message, statement.isTracing(), queryCustomPayload, nodeResponseCallback) + .addListener(nodeResponseCallback); } } + private void recordError(Node node, Throwable error) { + // Use a local variable to do only a single single volatile read in the nominal case + List> errorsSnapshot = this.errors; + if (errorsSnapshot == null) { + synchronized (GraphRequestHandler.this) { + errorsSnapshot = this.errors; + if (errorsSnapshot == null) { + this.errors = errorsSnapshot = new CopyOnWriteArrayList<>(); + } + } + } + errorsSnapshot.add(new AbstractMap.SimpleEntry<>(node, error)); + } + private void cancelScheduledTasks() { - if (this.timeoutFuture != null) { - this.timeoutFuture.cancel(false); + if (this.scheduledTimeout != null) { + this.scheduledTimeout.cancel(); } if (scheduledExecutions != null) { - for (ScheduledFuture future : scheduledExecutions) { - future.cancel(false); + for (Timeout scheduledExecution : scheduledExecutions) { + scheduledExecution.cancel(); } } - for (PerRequestCallback callback : inFlightCallbacks) { + for (NodeResponseCallback callback : inFlightCallbacks) { callback.cancel(); } } - private void setFinalError(Throwable error, Node node) { + private void setFinalResult( + Result resultMessage, Frame responseFrame, NodeResponseCallback callback) { + try { + ExecutionInfo executionInfo = buildExecutionInfo(callback, responseFrame); + + Queue graphNodes = new ArrayDeque<>(); + for (List row : ((Rows) resultMessage).getData()) { + if (subProtocol.isGraphBinary()) { + graphNodes.offer( + GraphConversions.createGraphBinaryGraphNode( + row, GraphRequestHandler.this.graphBinaryModule)); + } else { + graphNodes.offer(GraphSONUtils.createGraphNode(row, subProtocol)); + } + } + + DefaultAsyncGraphResultSet resultSet = + new DefaultAsyncGraphResultSet(executionInfo, graphNodes, subProtocol); + if (result.complete(resultSet)) { + cancelScheduledTasks(); + throttler.signalSuccess(this); + + // Only call nanoTime() if we're actually going to use it + long completionTimeNanos = NANOTIME_NOT_MEASURED_YET, + totalLatencyNanos = NANOTIME_NOT_MEASURED_YET; + if (!(requestTracker instanceof NoopRequestTracker)) { + completionTimeNanos = System.nanoTime(); + totalLatencyNanos = completionTimeNanos - startTimeNanos; + long nodeLatencyNanos = completionTimeNanos - callback.nodeStartTimeNanos; + requestTracker.onNodeSuccess( + statement, nodeLatencyNanos, executionProfile, callback.node, logPrefix); + requestTracker.onSuccess( + statement, totalLatencyNanos, executionProfile, callback.node, logPrefix); + } + if (sessionMetricUpdater.isEnabled( + DseSessionMetric.CONTINUOUS_CQL_REQUESTS, executionProfile.getName())) { + if (completionTimeNanos == NANOTIME_NOT_MEASURED_YET) { + completionTimeNanos = System.nanoTime(); + totalLatencyNanos = completionTimeNanos - startTimeNanos; + } + sessionMetricUpdater.updateTimer( + DseSessionMetric.CONTINUOUS_CQL_REQUESTS, + executionProfile.getName(), + totalLatencyNanos, + TimeUnit.NANOSECONDS); + } + } + // log the warnings if they have NOT been disabled + if (!executionInfo.getWarnings().isEmpty() + && executionProfile.getBoolean(DefaultDriverOption.REQUEST_LOG_WARNINGS) + && LOG.isWarnEnabled()) { + logServerWarnings(executionInfo.getWarnings()); + } + } catch (Throwable error) { + setFinalError(error, callback.node, NO_SUCCESSFUL_EXECUTION); + } + } + + private void logServerWarnings(List warnings) { + // use the RequestLogFormatter to format the query + StringBuilder statementString = new StringBuilder(); + context + .getRequestLogFormatter() + .appendRequest( + statement, + executionProfile.getInt( + DefaultDriverOption.REQUEST_LOGGER_MAX_QUERY_LENGTH, + RequestLogger.DEFAULT_REQUEST_LOGGER_MAX_QUERY_LENGTH), + executionProfile.getBoolean( + DefaultDriverOption.REQUEST_LOGGER_VALUES, + RequestLogger.DEFAULT_REQUEST_LOGGER_SHOW_VALUES), + executionProfile.getInt( + DefaultDriverOption.REQUEST_LOGGER_MAX_VALUES, + RequestLogger.DEFAULT_REQUEST_LOGGER_MAX_VALUES), + executionProfile.getInt( + DefaultDriverOption.REQUEST_LOGGER_MAX_VALUE_LENGTH, + RequestLogger.DEFAULT_REQUEST_LOGGER_MAX_VALUE_LENGTH), + statementString); + // log each warning separately + warnings.forEach( + (warning) -> + LOG.warn("Query '{}' generated server side warning(s): {}", statementString, warning)); + } + + private ExecutionInfo buildExecutionInfo(NodeResponseCallback callback, Frame responseFrame) { + return new DefaultExecutionInfo( + statement, + callback.node, + startedSpeculativeExecutionsCount.get(), + callback.execution, + errors, + null, + responseFrame, + true, + session, + context, + executionProfile); + } + + @Override + public void onThrottleFailure(@NonNull RequestThrottlingException error) { + sessionMetricUpdater.incrementCounter( + DefaultSessionMetric.THROTTLING_ERRORS, executionProfile.getName()); + setFinalError(error, null, NO_SUCCESSFUL_EXECUTION); + } + + private void setFinalError(Throwable error, Node node, int execution) { + // FIXME JAVA-2556 + // if (error instanceof DriverException) { + // ((DriverException) error) + // .setExecutionInfo( + // new DefaultExecutionInfo( + // graphStatement, + // node, + // startedSpeculativeExecutionsCount.get(), + // execution, + // errors, + // null, + // null, + // true, + // session, + // context, + // executionProfile)); + // } if (result.completeExceptionally(error)) { cancelScheduledTasks(); - long latencyNanos = System.nanoTime() - startTimeNanos; - context - .getRequestTracker() - .onError(graphStatement, error, latencyNanos, executionProfile, node, logPrefix); + if (!(requestTracker instanceof NoopRequestTracker)) { + long latencyNanos = System.nanoTime() - startTimeNanos; + requestTracker.onError(statement, error, latencyNanos, executionProfile, node, logPrefix); + } if (error instanceof DriverTimeoutException) { throttler.signalTimeout(this); - session - .getMetricUpdater() - .incrementCounter(DefaultSessionMetric.CQL_CLIENT_TIMEOUTS, executionProfile.getName()); + sessionMetricUpdater.incrementCounter( + DefaultSessionMetric.CQL_CLIENT_TIMEOUTS, executionProfile.getName()); } else if (!(error instanceof RequestThrottlingException)) { throttler.signalError(this, error); } } } - private void recordError(Node node, Throwable error) { - // Use a local variable to do only a single single volatile read in the nominal case - List> errorsSnapshot = this.errors; - if (errorsSnapshot == null) { - synchronized (GraphRequestHandler.this) { - errorsSnapshot = this.errors; - if (errorsSnapshot == null) { - this.errors = errorsSnapshot = new CopyOnWriteArrayList<>(); - } - } - } - errorsSnapshot.add(new AbstractMap.SimpleEntry<>(node, error)); - } - /** * Handles the interaction with a single node in the query plan. * *

      An instance of this class is created each time we (re)try a node. */ - private class PerRequestCallback + private class NodeResponseCallback implements ResponseCallback, GenericFutureListener> { - private final long start = System.nanoTime(); + + private final long nodeStartTimeNanos = System.nanoTime(); private final Node node; + private final Queue queryPlan; private final DriverChannel channel; // The identifier of the current execution (0 for the initial execution, 1 for the first // speculative execution, etc.) @@ -369,14 +498,16 @@ private class PerRequestCallback private final boolean scheduleNextExecution; private final String logPrefix; - PerRequestCallback( + private NodeResponseCallback( Node node, + Queue queryPlan, DriverChannel channel, int execution, int retryCount, boolean scheduleNextExecution, String logPrefix) { this.node = node; + this.queryPlan = queryPlan; this.channel = channel; this.execution = execution; this.retryCount = retryCount; @@ -384,36 +515,15 @@ private class PerRequestCallback this.logPrefix = logPrefix + "|" + execution; } - @Override - public void onFailure(Throwable error) { - inFlightCallbacks.remove(this); - if (result.isDone()) { - return; - } - LOG.trace("[{}] Request failure, processing: {}", logPrefix, error.toString()); - RetryDecision decision; - if (!isIdempotent || error instanceof FrameTooLongException) { - decision = RetryDecision.RETHROW; - } else { - decision = retryPolicy.onRequestAborted(graphStatement, error, retryCount); - } - processRetryDecision(decision, error); - updateErrorMetrics( - ((DefaultNode) node).getMetricUpdater(), - decision, - DefaultNodeMetric.ABORTED_REQUESTS, - DefaultNodeMetric.RETRIES_ON_ABORTED, - DefaultNodeMetric.IGNORES_ON_ABORTED); - } - // this gets invoked once the write completes. @Override - public void operationComplete(Future voidFuture) { - if (!voidFuture.isSuccess()) { - Throwable error = voidFuture.cause(); + public void operationComplete(Future future) { + if (!future.isSuccess()) { + Throwable error = future.cause(); if (error instanceof EncoderException && error.getCause() instanceof FrameTooLongException) { - setFinalError(error.getCause(), node); + trackNodeError(node, error.getCause(), NANOTIME_NOT_MEASURED_YET); + setFinalError(error.getCause(), node, execution); } else { LOG.trace( "[{}] Failed to send request on {}, trying next node (cause: {})", @@ -421,51 +531,27 @@ public void operationComplete(Future voidFuture) { channel, error); recordError(node, error); + trackNodeError(node, error, NANOTIME_NOT_MEASURED_YET); ((DefaultNode) node) .getMetricUpdater() .incrementCounter(DefaultNodeMetric.UNSENT_REQUESTS, executionProfile.getName()); - sendRequest(null, execution, retryCount, scheduleNextExecution); // try next node + sendRequest( + null, queryPlan, execution, retryCount, scheduleNextExecution); // try next node } } else { LOG.trace("[{}] Request sent on {}", logPrefix, channel); if (result.isDone()) { // If the handler completed since the last time we checked, cancel directly because we - // don'traversals know if cancelScheduledTasks() has run yet + // don't know if cancelScheduledTasks() has run yet cancel(); } else { inFlightCallbacks.add(this); if (scheduleNextExecution && isIdempotent) { int nextExecution = execution + 1; - // Note that `node` is the first node of the execution, it might not be the "slow" one - // if there were retries, but in practice retries are rare. long nextDelay = - speculativeExecutionPolicy.nextExecution(node, null, graphStatement, nextExecution); + speculativeExecutionPolicy.nextExecution(node, null, statement, nextExecution); if (nextDelay >= 0) { - LOG.trace( - "[{}] Scheduling speculative execution {} in {} ms", - logPrefix, - nextExecution, - nextDelay); - scheduledExecutions.add( - scheduler.schedule( - () -> { - if (!result.isDone()) { - LOG.trace( - "[{}] Starting speculative execution {}", - GraphRequestHandler.this.logPrefix, - nextExecution); - activeExecutionsCount.incrementAndGet(); - startedSpeculativeExecutionsCount.incrementAndGet(); - ((DefaultNode) node) - .getMetricUpdater() - .incrementCounter( - DefaultNodeMetric.SPECULATIVE_EXECUTIONS, - executionProfile.getName()); - sendRequest(null, nextExecution, 0, true); - } - }, - nextDelay, - TimeUnit.MILLISECONDS)); + scheduleSpeculativeExecution(nextExecution, nextDelay); } else { LOG.trace( "[{}] Speculative execution policy returned {}, no next execution", @@ -477,15 +563,53 @@ public void operationComplete(Future voidFuture) { } } + private void scheduleSpeculativeExecution(int index, long delay) { + LOG.trace("[{}] Scheduling speculative execution {} in {} ms", logPrefix, index, delay); + try { + scheduledExecutions.add( + timer.newTimeout( + (Timeout timeout1) -> { + if (!result.isDone()) { + LOG.trace( + "[{}] Starting speculative execution {}", + GraphRequestHandler.this.logPrefix, + index); + activeExecutionsCount.incrementAndGet(); + startedSpeculativeExecutionsCount.incrementAndGet(); + // Note that `node` is the first node of the execution, it might not be the + // "slow" one if there were retries, but in practice retries are rare. + ((DefaultNode) node) + .getMetricUpdater() + .incrementCounter( + DefaultNodeMetric.SPECULATIVE_EXECUTIONS, executionProfile.getName()); + sendRequest(null, queryPlan, index, 0, true); + } + }, + delay, + TimeUnit.MILLISECONDS)); + } catch (IllegalStateException e) { + // If we're racing with session shutdown, the timer might be stopped already. We don't want + // to schedule more executions anyway, so swallow the error. + if (!"cannot be started once stopped".equals(e.getMessage())) { + Loggers.warnWithException( + LOG, "[{}] Error while scheduling speculative execution", logPrefix, e); + } + } + } + @Override public void onResponse(Frame responseFrame) { - ((DefaultNode) node) - .getMetricUpdater() - .updateTimer( - DefaultNodeMetric.CQL_MESSAGES, - executionProfile.getName(), - System.nanoTime() - start, - TimeUnit.NANOSECONDS); + long nodeResponseTimeNanos = NANOTIME_NOT_MEASURED_YET; + NodeMetricUpdater nodeMetricUpdater = ((DefaultNode) node).getMetricUpdater(); + if (nodeMetricUpdater.isEnabled(DefaultNodeMetric.CQL_MESSAGES, executionProfile.getName())) { + nodeResponseTimeNanos = System.nanoTime(); + long nodeLatency = System.nanoTime() - nodeStartTimeNanos; + nodeMetricUpdater.updateTimer( + DefaultNodeMetric.CQL_MESSAGES, + executionProfile.getName(), + nodeLatency, + TimeUnit.NANOSECONDS); + } inFlightCallbacks.remove(this); if (result.isDone()) { return; @@ -499,86 +623,116 @@ public void onResponse(Frame responseFrame) { LOG.trace("[{}] Got error response, processing", logPrefix); processErrorResponse((Error) responseMessage); } else { - setFinalError(new IllegalStateException("Unexpected response " + responseMessage), node); + trackNodeError( + node, + new IllegalStateException("Unexpected response " + responseMessage), + nodeResponseTimeNanos); + setFinalError( + new IllegalStateException("Unexpected response " + responseMessage), node, execution); } } catch (Throwable t) { - setFinalError(t, node); + trackNodeError(node, t, nodeResponseTimeNanos); + setFinalError(t, node, execution); } } - private void setFinalResult( - Result resultMessage, Frame responseFrame, PerRequestCallback callback) { - try { - ExecutionInfo executionInfo = buildExecutionInfo(callback, responseFrame); - - Queue graphNodes = new ArrayDeque<>(); - for (List row : ((Rows) resultMessage).getData()) { - if (subProtocol.isGraphBinary()) { - graphNodes.offer( - GraphConversions.createGraphBinaryGraphNode( - row, GraphRequestHandler.this.graphBinaryModule)); - } else { - graphNodes.offer(GraphSONUtils.createGraphNode(row, subProtocol)); - } - } - - DefaultAsyncGraphResultSet resultSet = - new DefaultAsyncGraphResultSet(executionInfo, graphNodes, subProtocol); - if (result.complete(resultSet)) { - cancelScheduledTasks(); - throttler.signalSuccess(GraphRequestHandler.this); - long latencyNanos = System.nanoTime() - startTimeNanos; - context - .getRequestTracker() - .onSuccess(graphStatement, latencyNanos, executionProfile, callback.node, logPrefix); - session - .getMetricUpdater() - .updateTimer( - DefaultSessionMetric.CQL_REQUESTS, - executionProfile.getName(), - latencyNanos, - TimeUnit.NANOSECONDS); + private void processErrorResponse(Error errorMessage) { + if (errorMessage.code == ProtocolConstants.ErrorCode.UNPREPARED) { + ByteBuffer idToReprepare = ByteBuffer.wrap(((Unprepared) errorMessage).id); + LOG.trace( + "[{}] Statement {} is not prepared on {}, repreparing", + logPrefix, + Bytes.toHexString(idToReprepare), + node); + RepreparePayload repreparePayload = session.getRepreparePayloads().get(idToReprepare); + if (repreparePayload == null) { + throw new IllegalStateException( + String.format( + "Tried to execute unprepared query %s but we don't have the data to reprepare it", + Bytes.toHexString(idToReprepare))); } - } catch (Throwable error) { - setFinalError(error, callback.node); + Prepare reprepareMessage = repreparePayload.toMessage(); + ThrottledAdminRequestHandler reprepareHandler = + ThrottledAdminRequestHandler.prepare( + channel, + reprepareMessage, + repreparePayload.customPayload, + timeout, + throttler, + sessionMetricUpdater, + logPrefix); + reprepareHandler + .start() + .handle( + (repreparedId, exception) -> { + if (exception != null) { + // If the error is not recoverable, surface it to the client instead of retrying + if (exception instanceof UnexpectedResponseException) { + Message prepareErrorMessage = + ((UnexpectedResponseException) exception).message; + if (prepareErrorMessage instanceof Error) { + CoordinatorException prepareError = + Conversions.toThrowable(node, (Error) prepareErrorMessage, context); + if (prepareError instanceof QueryValidationException + || prepareError instanceof FunctionFailureException + || prepareError instanceof ProtocolError) { + LOG.trace("[{}] Unrecoverable error on reprepare, rethrowing", logPrefix); + trackNodeError(node, prepareError, NANOTIME_NOT_MEASURED_YET); + setFinalError(prepareError, node, execution); + return null; + } + } + } else if (exception instanceof RequestThrottlingException) { + trackNodeError(node, exception, NANOTIME_NOT_MEASURED_YET); + setFinalError(exception, node, execution); + return null; + } + recordError(node, exception); + trackNodeError(node, exception, NANOTIME_NOT_MEASURED_YET); + LOG.trace("[{}] Reprepare failed, trying next node", logPrefix); + sendRequest(null, queryPlan, execution, retryCount, false); + } else { + if (!repreparedId.equals(idToReprepare)) { + IllegalStateException illegalStateException = + new IllegalStateException( + String.format( + "ID mismatch while trying to reprepare (expected %s, got %s). " + + "This prepared statement won't work anymore. " + + "This usually happens when you run a 'USE...' query after " + + "the statement was prepared.", + Bytes.toHexString(idToReprepare), + Bytes.toHexString(repreparedId))); + trackNodeError(node, illegalStateException, NANOTIME_NOT_MEASURED_YET); + setFinalError(illegalStateException, node, execution); + } + LOG.trace("[{}] Reprepare sucessful, retrying", logPrefix); + sendRequest(node, queryPlan, execution, retryCount, false); + } + return null; + }); + return; } - } - - private ExecutionInfo buildExecutionInfo(PerRequestCallback callback, Frame responseFrame) { - return new DefaultExecutionInfo( - graphStatement, - callback.node, - startedSpeculativeExecutionsCount.get(), - callback.execution, - errors, - null, - responseFrame, - true, - session, - context, - executionProfile); - } - - private void processErrorResponse(Error errorMessage) { - CoordinatorException error = GraphConversions.toThrowable(node, errorMessage, context); + CoordinatorException error = Conversions.toThrowable(node, errorMessage, context); NodeMetricUpdater metricUpdater = ((DefaultNode) node).getMetricUpdater(); if (error instanceof BootstrappingException) { LOG.trace("[{}] {} is bootstrapping, trying next node", logPrefix, node); recordError(node, error); - sendRequest(null, execution, retryCount, false); + trackNodeError(node, error, NANOTIME_NOT_MEASURED_YET); + sendRequest(null, queryPlan, execution, retryCount, false); } else if (error instanceof QueryValidationException || error instanceof FunctionFailureException || error instanceof ProtocolError) { LOG.trace("[{}] Unrecoverable error, rethrowing", logPrefix); metricUpdater.incrementCounter(DefaultNodeMetric.OTHER_ERRORS, executionProfile.getName()); - setFinalError(error, node); + trackNodeError(node, error, NANOTIME_NOT_MEASURED_YET); + setFinalError(error, node, execution); } else { RetryDecision decision; if (error instanceof ReadTimeoutException) { ReadTimeoutException readTimeout = (ReadTimeoutException) error; decision = retryPolicy.onReadTimeout( - graphStatement, + statement, readTimeout.getConsistencyLevel(), readTimeout.getBlockFor(), readTimeout.getReceived(), @@ -595,7 +749,7 @@ private void processErrorResponse(Error errorMessage) { decision = isIdempotent ? retryPolicy.onWriteTimeout( - graphStatement, + statement, writeTimeout.getConsistencyLevel(), writeTimeout.getWriteType(), writeTimeout.getBlockFor(), @@ -612,7 +766,7 @@ private void processErrorResponse(Error errorMessage) { UnavailableException unavailable = (UnavailableException) error; decision = retryPolicy.onUnavailable( - graphStatement, + statement, unavailable.getConsistencyLevel(), unavailable.getRequired(), unavailable.getAlive(), @@ -626,7 +780,7 @@ private void processErrorResponse(Error errorMessage) { } else { decision = isIdempotent - ? retryPolicy.onErrorResponse(graphStatement, error, retryCount) + ? retryPolicy.onErrorResponse(statement, error, retryCount) : RetryDecision.RETHROW; updateErrorMetrics( metricUpdater, @@ -644,14 +798,17 @@ private void processRetryDecision(RetryDecision decision, Throwable error) { switch (decision) { case RETRY_SAME: recordError(node, error); - sendRequest(node, execution, retryCount + 1, false); + trackNodeError(node, error, NANOTIME_NOT_MEASURED_YET); + sendRequest(node, queryPlan, execution, retryCount + 1, false); break; case RETRY_NEXT: recordError(node, error); - sendRequest(null, execution, retryCount + 1, false); + trackNodeError(node, error, NANOTIME_NOT_MEASURED_YET); + sendRequest(null, queryPlan, execution, retryCount + 1, false); break; case RETHROW: - setFinalError(error, node); + trackNodeError(node, error, NANOTIME_NOT_MEASURED_YET); + setFinalError(error, node, execution); break; case IGNORE: setFinalResult(Void.INSTANCE, null, this); @@ -681,6 +838,28 @@ private void updateErrorMetrics( } } + @Override + public void onFailure(Throwable error) { + inFlightCallbacks.remove(this); + if (result.isDone()) { + return; + } + LOG.trace("[{}] Request failure, processing: {}", logPrefix, error); + RetryDecision decision; + if (!isIdempotent || error instanceof FrameTooLongException) { + decision = RetryDecision.RETHROW; + } else { + decision = retryPolicy.onRequestAborted(statement, error, retryCount); + } + processRetryDecision(decision, error); + updateErrorMetrics( + ((DefaultNode) node).getMetricUpdater(), + decision, + DefaultNodeMetric.ABORTED_REQUESTS, + DefaultNodeMetric.RETRIES_ON_ABORTED, + DefaultNodeMetric.IGNORES_ON_ABORTED); + } + void cancel() { try { if (!channel.closeFuture().isDone()) { @@ -691,6 +870,22 @@ void cancel() { } } + /** + * @param nodeResponseTimeNanos the time we received the response, if it's already been + * measured. If {@link #NANOTIME_NOT_MEASURED_YET}, it hasn't and we need to measure it now + * (this is to avoid unnecessary calls to System.nanoTime) + */ + private void trackNodeError(Node node, Throwable error, long nodeResponseTimeNanos) { + if (requestTracker instanceof NoopRequestTracker) { + return; + } + if (nodeResponseTimeNanos == NANOTIME_NOT_MEASURED_YET) { + nodeResponseTimeNanos = System.nanoTime(); + } + long latencyNanos = nodeResponseTimeNanos - this.nodeStartTimeNanos; + requestTracker.onNodeError(statement, error, latencyNanos, executionProfile, node, logPrefix); + } + @Override public String toString() { return logPrefix; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestSyncProcessor.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestSyncProcessor.java index 196baa1a42b..5447b6657c6 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestSyncProcessor.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestSyncProcessor.java @@ -16,8 +16,11 @@ package com.datastax.dse.driver.internal.core.graph; import com.datastax.dse.driver.api.core.graph.AsyncGraphResultSet; +import com.datastax.dse.driver.api.core.graph.BatchGraphStatement; +import com.datastax.dse.driver.api.core.graph.FluentGraphStatement; import com.datastax.dse.driver.api.core.graph.GraphResultSet; import com.datastax.dse.driver.api.core.graph.GraphStatement; +import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; import com.datastax.oss.driver.api.core.session.Request; import com.datastax.oss.driver.api.core.type.reflect.GenericType; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; @@ -39,7 +42,11 @@ public GraphRequestSyncProcessor(GraphRequestAsyncProcessor asyncProcessor) { @Override public boolean canProcess(Request request, GenericType resultType) { - return request instanceof GraphStatement && resultType.equals(GraphStatement.SYNC); + return (request instanceof ScriptGraphStatement + || request instanceof FluentGraphStatement + || request instanceof BatchGraphStatement + || request instanceof BytecodeGraphStatement) + && resultType.equals(GraphStatement.SYNC); } @Override diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java index 95f4a47d611..8c80c219b10 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java @@ -35,6 +35,7 @@ import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; +import com.datastax.dse.driver.DseSessionMetric; import com.datastax.dse.driver.DseTestDataProviders; import com.datastax.dse.driver.api.core.config.DseDriverOption; import com.datastax.dse.driver.api.core.data.geometry.Point; @@ -53,6 +54,7 @@ import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.metrics.DefaultNodeMetric; import com.datastax.oss.driver.api.core.tracker.RequestTracker; import com.datastax.oss.driver.api.core.uuid.Uuids; import com.datastax.oss.driver.internal.core.cql.Conversions; @@ -74,6 +76,7 @@ import java.time.ZoneOffset; import java.util.List; import java.util.Map; +import java.util.concurrent.TimeUnit; import java.util.regex.Pattern; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; import org.apache.tinkerpop.gremlin.structure.Vertex; @@ -87,7 +90,7 @@ @RunWith(DataProviderRunner.class) public class GraphRequestHandlerTest { - private static final Pattern LOG_PREFIX_PER_REQUEST = Pattern.compile("test-graph\\|\\d*\\|\\d*"); + private static final Pattern LOG_PREFIX_PER_REQUEST = Pattern.compile("test-graph\\|\\d+"); @Mock DefaultNode node; @@ -459,8 +462,12 @@ public static Object[][] supportedGraphProtocolsWithDseVersions() { @Test @UseDataProvider("dseVersionsWithDefaultGraphProtocol") - public void should_invoke_request_tracker(GraphProtocol graphProtocol, Version dseVersion) - throws IOException { + public void should_invoke_request_tracker_and_update_metrics( + GraphProtocol graphProtocol, Version dseVersion) throws IOException { + when(nodeMetricUpdater1.isEnabled( + DefaultNodeMetric.CQL_MESSAGES, DriverExecutionProfile.DEFAULT_NAME)) + .thenReturn(true); + Builder builder = GraphRequestHandlerTestHarness.builder() .withGraphProtocolForTestConfig(graphProtocol) @@ -513,7 +520,34 @@ public void should_invoke_request_tracker(GraphProtocol graphProtocol, Version d any(DriverExecutionProfile.class), eq(node), matches(LOG_PREFIX_PER_REQUEST)); + verify(requestTracker) + .onNodeSuccess( + eq(graphStatement), + anyLong(), + any(DriverExecutionProfile.class), + eq(node), + matches(LOG_PREFIX_PER_REQUEST)); verifyNoMoreInteractions(requestTracker); + + verify(nodeMetricUpdater1) + .isEnabled(DefaultNodeMetric.CQL_MESSAGES, DriverExecutionProfile.DEFAULT_NAME); + verify(nodeMetricUpdater1) + .updateTimer( + eq(DefaultNodeMetric.CQL_MESSAGES), + eq(DriverExecutionProfile.DEFAULT_NAME), + anyLong(), + eq(TimeUnit.NANOSECONDS)); + verifyNoMoreInteractions(nodeMetricUpdater1); + + verify(harness.getSession().getMetricUpdater()) + .isEnabled(DseSessionMetric.CONTINUOUS_CQL_REQUESTS, DriverExecutionProfile.DEFAULT_NAME); + verify(harness.getSession().getMetricUpdater()) + .updateTimer( + eq(DseSessionMetric.CONTINUOUS_CQL_REQUESTS), + eq(DriverExecutionProfile.DEFAULT_NAME), + anyLong(), + eq(TimeUnit.NANOSECONDS)); + verifyNoMoreInteractions(harness.getSession().getMetricUpdater()); } @DataProvider From e3717524e3683e42cbb262ed5cab5defdb97ad67 Mon Sep 17 00:00:00 2001 From: Tomasz Lelek Date: Thu, 5 Dec 2019 12:18:30 +0100 Subject: [PATCH 339/979] JAVA-2238: Make all Graph integration tests use GraphBinary (#311) --- .../internal/core/graph/GraphTestUtils.java | 24 ++ .../graph/ClassicGraphGeoSearchIndexIT.java | 144 +++++++ .../graph/ClassicGraphTextSearchIndexIT.java | 139 +++++++ .../core/graph/CoreGraphGeoSearchIndexIT.java | 112 ++++++ .../graph/CoreGraphTextSearchIndexIT.java | 118 ++++++ .../api/core/graph/GraphGeoSearchIndexIT.java | 266 ------------- .../core/graph/GraphGeoSearchIndexITBase.java | 169 +++++++++ ...T.java => GraphTextSearchIndexITBase.java} | 143 +------ .../api/core/graph/SampleGraphScripts.java | 29 +- .../remote/ClassicGraphTraversalRemoteIT.java | 79 ++++ .../remote/CoreGraphTraversalRemoteIT.java | 71 ++++ ...GraphTraversalMultiPropertiesRemoteIT.java | 1 + ...T.java => GraphTraversalRemoteITBase.java} | 321 +++++++++++----- .../ClassicGraphTraversalBatchIT.java | 65 ++++ .../statement/ClassicGraphTraversalIT.java | 78 ++++ .../statement/CoreGraphTraversalBatchIT.java | 60 +++ .../graph/statement/CoreGraphTraversalIT.java | 69 ++++ .../statement/GraphTraversalBatchIT.java | 132 ------- .../statement/GraphTraversalBatchITBase.java | 150 ++++++++ ...ersalIT.java => GraphTraversalITBase.java} | 350 ++++++++++++------ .../GraphTraversalMultiPropertiesIT.java | 1 + 21 files changed, 1799 insertions(+), 722 deletions(-) create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/ClassicGraphGeoSearchIndexIT.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/ClassicGraphTextSearchIndexIT.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CoreGraphGeoSearchIndexIT.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CoreGraphTextSearchIndexIT.java delete mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphGeoSearchIndexIT.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphGeoSearchIndexITBase.java rename integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/{GraphTextSearchIndexIT.java => GraphTextSearchIndexITBase.java} (60%) create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/ClassicGraphTraversalRemoteIT.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/CoreGraphTraversalRemoteIT.java rename integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/{GraphTraversalRemoteIT.java => GraphTraversalRemoteITBase.java} (60%) create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/ClassicGraphTraversalBatchIT.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/ClassicGraphTraversalIT.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/CoreGraphTraversalBatchIT.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/CoreGraphTraversalIT.java delete mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalBatchIT.java create mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalBatchITBase.java rename integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/{GraphTraversalIT.java => GraphTraversalITBase.java} (60%) diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphTestUtils.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphTestUtils.java index abff143a73b..fd44246833b 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphTestUtils.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphTestUtils.java @@ -6,6 +6,8 @@ */ package com.datastax.dse.driver.internal.core.graph; +import static org.assertj.core.api.Assertions.assertThat; + import com.datastax.dse.driver.api.core.DseProtocolVersion; import com.datastax.dse.driver.internal.core.context.DseDriverContext; import com.datastax.dse.driver.internal.core.graph.binary.GraphBinaryModule; @@ -24,8 +26,11 @@ import java.util.ArrayDeque; import java.util.Collections; import java.util.List; +import java.util.Map; import java.util.Queue; import org.apache.tinkerpop.gremlin.process.remote.traversal.DefaultRemoteTraverser; +import org.apache.tinkerpop.gremlin.structure.Direction; +import org.apache.tinkerpop.gremlin.structure.T; import org.apache.tinkerpop.gremlin.structure.Vertex; import org.apache.tinkerpop.gremlin.structure.io.Buffer; import org.apache.tinkerpop.gremlin.structure.io.binary.GraphBinaryReader; @@ -33,6 +38,7 @@ import org.apache.tinkerpop.gremlin.structure.io.binary.TypeSerializerRegistry; import org.apache.tinkerpop.gremlin.structure.util.detached.DetachedVertex; import org.apache.tinkerpop.gremlin.structure.util.detached.DetachedVertexProperty; +import org.assertj.core.api.InstanceOfAssertFactories; public class GraphTestUtils { @@ -151,4 +157,22 @@ public static GraphBinaryModule createGraphBinaryModule(DseDriverContext context TypeSerializerRegistry registry = GraphBinaryModule.createDseTypeSerializerRegistry(context); return new GraphBinaryModule(new GraphBinaryReader(registry), new GraphBinaryWriter(registry)); } + + public static void assertThatContainsProperties( + Map properties, Object... propsToMatch) { + for (int i = 0; i < propsToMatch.length; i += 2) { + assertThat(properties).containsEntry(propsToMatch[i], propsToMatch[i + 1]); + } + } + + public static void assertThatContainsLabel( + Map properties, Direction direction, String label) { + assertThat(properties) + .hasEntrySatisfying( + direction, + value -> + assertThat(value) + .asInstanceOf(InstanceOfAssertFactories.MAP) + .containsEntry(T.label, label)); + } } diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/ClassicGraphGeoSearchIndexIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/ClassicGraphGeoSearchIndexIT.java new file mode 100644 index 00000000000..29307722804 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/ClassicGraphGeoSearchIndexIT.java @@ -0,0 +1,144 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph; + +import com.datastax.dse.driver.api.testinfra.session.DseSessionRule; +import com.datastax.dse.driver.api.testinfra.session.DseSessionRuleBuilder; +import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.shaded.guava.common.base.Joiner; +import com.datastax.oss.driver.shaded.guava.common.collect.Lists; +import java.util.ArrayList; +import java.util.Collection; +import org.apache.tinkerpop.gremlin.process.traversal.AnonymousTraversalSource; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +@DseRequirement(min = "5.1", description = "DSE 5.1 required for graph geo indexing") +public class ClassicGraphGeoSearchIndexIT extends GraphGeoSearchIndexITBase { + private static CustomCcmRule CCM_RULE = + CustomCcmRule.builder().withDseWorkloads("graph", "solr").build(); + + private static DseSessionRule SESSION_RULE = + new DseSessionRuleBuilder(CCM_RULE).withCreateGraph().build(); + + @ClassRule public static TestRule chain = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); + + private final GraphTraversalSource g = + AnonymousTraversalSource.traversal() + .withRemote(DseGraph.remoteConnectionBuilder(SESSION_RULE.session()).build()); + + @Override + protected boolean isGraphBinary() { + return false; + } + + @Override + protected GraphTraversalSource graphTraversalSource() { + return g; + } + + @BeforeClass + public static void setup() { + for (String setupQuery : geoIndices()) { + SESSION_RULE.session().execute(ScriptGraphStatement.newInstance(setupQuery)); + } + + CCM_RULE.getCcmBridge().reloadCore(1, SESSION_RULE.getGraphName(), "user_p", true); + } + + /** + * A schema representing an address book with search enabled on name, description, and + * coordinates. + */ + public static Collection geoIndices() { + Object[][] providerIndexTypes = indexTypes(); + String[] indexTypes = new String[providerIndexTypes.length]; + for (int i = 0; i < providerIndexTypes.length; i++) { + indexTypes[i] = (String) providerIndexTypes[i][0]; + } + + StringBuilder schema = new StringBuilder("schema.propertyKey('full_name').Text().create()\n"); + StringBuilder propertyKeys = new StringBuilder(); + StringBuilder vertexLabel = new StringBuilder("schema.vertexLabel('user').properties("); + StringBuilder indices = new StringBuilder(); + StringBuilder vertex0 = + new StringBuilder("g.addV('user').property('full_name', 'Paul Thomas Joe')"); + StringBuilder vertex1 = + new StringBuilder("g.addV('user').property('full_name', 'George Bill Steve')"); + String vertex2 = "g.addV('user').property('full_name', 'James Paul Joe')"; + StringBuilder vertex3 = new StringBuilder("g.addV('user').property('full_name', 'Jill Alice')"); + + ArrayList propertyNames = new ArrayList<>(); + propertyNames.add("'full_name'"); + + for (String indexType : indexTypes) { + + propertyKeys.append( + String.format( + "schema.propertyKey('pointPropWithBounds_%s')." + + "Point().withBounds(0.000000, 0.000000, 100.000000, 100.000000).create()\n", + indexType)); + + propertyKeys.append( + String.format( + "schema.propertyKey('pointPropWithGeoBounds_%s').Point().withGeoBounds().create()\n", + indexType)); + + propertyNames.add("'pointPropWithBounds_" + indexType + "'"); + propertyNames.add("'pointPropWithGeoBounds_" + indexType + "'"); + + if (indexType.equals("search")) { + + indices.append( + String.format( + "schema.vertexLabel('user').index('search').search().by('pointPropWithBounds_%s').withError(0.00001, 0.0).by('pointPropWithGeoBounds_%s').withError(0.00001, 0.0).add()\n", + indexType, indexType)); + } else { + + indices.append( + String.format( + "schema.vertexLabel('user').index('by_pointPropWithBounds_%s').%s().by('pointPropWithBounds_%s').add()\n", + indexType, indexType, indexType)); + + indices.append( + String.format( + "schema.vertexLabel('user').index('by_pointPropWithGeoBounds_%s').%s().by('pointPropWithGeoBounds_%s').add()\n", + indexType, indexType, indexType)); + } + + vertex0.append( + String.format( + ".property('pointPropWithBounds_%s', 'POINT(40.0001 40)').property('pointPropWithGeoBounds_%s', 'POINT(40.0001 40)')", + indexType, indexType)); + vertex1.append( + String.format( + ".property('pointPropWithBounds_%s', 'POINT(40 40)').property('pointPropWithGeoBounds_%s', 'POINT(40 40)')", + indexType, indexType)); + vertex3.append( + String.format( + ".property('pointPropWithBounds_%s', 'POINT(30 30)').property('pointPropWithGeoBounds_%s', 'POINT(30 30)')", + indexType, indexType)); + } + + vertexLabel.append(Joiner.on(", ").join(propertyNames)); + vertexLabel.append(").create()\n"); + + schema.append(propertyKeys).append(vertexLabel).append(indices); + + return Lists.newArrayList( + SampleGraphScripts.MAKE_STRICT, + schema.toString(), + vertex0.toString(), + vertex1.toString(), + vertex2, + vertex3.toString()); + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/ClassicGraphTextSearchIndexIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/ClassicGraphTextSearchIndexIT.java new file mode 100644 index 00000000000..c9675c54504 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/ClassicGraphTextSearchIndexIT.java @@ -0,0 +1,139 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph; + +import com.datastax.dse.driver.api.testinfra.session.DseSessionRule; +import com.datastax.dse.driver.api.testinfra.session.DseSessionRuleBuilder; +import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.shaded.guava.common.base.Joiner; +import com.datastax.oss.driver.shaded.guava.common.collect.Lists; +import java.util.ArrayList; +import java.util.Collection; +import org.apache.tinkerpop.gremlin.process.traversal.AnonymousTraversalSource; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +@DseRequirement(min = "5.1", description = "DSE 5.1 required for graph geo indexing") +public class ClassicGraphTextSearchIndexIT extends GraphTextSearchIndexITBase { + private static CustomCcmRule CCM_RULE = + CustomCcmRule.builder().withDseWorkloads("graph", "solr").build(); + + private static DseSessionRule SESSION_RULE = + new DseSessionRuleBuilder(CCM_RULE).withCreateGraph().build(); + + @ClassRule public static TestRule chain = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); + + private final GraphTraversalSource g = + AnonymousTraversalSource.traversal() + .withRemote(DseGraph.remoteConnectionBuilder(SESSION_RULE.session()).build()); + + /** + * A schema representing an address book with 3 properties (full_name_*, description_*, alias_*) + * created for each type of index (search, secondary, materialized). + */ + public static Collection textIndices() { + Object[][] providerIndexTypes = indexTypes(); + String[] indexTypes = new String[providerIndexTypes.length]; + for (int i = 0; i < providerIndexTypes.length; i++) { + indexTypes[i] = (String) providerIndexTypes[i][0]; + } + + StringBuilder schema = new StringBuilder(); + StringBuilder propertyKeys = new StringBuilder(); + StringBuilder vertexLabel = new StringBuilder("schema.vertexLabel('user').properties("); + StringBuilder indices = new StringBuilder(); + StringBuilder vertex0 = new StringBuilder("g.addV('user')"); + StringBuilder vertex1 = new StringBuilder("g.addV('user')"); + StringBuilder vertex2 = new StringBuilder("g.addV('user')"); + StringBuilder vertex3 = new StringBuilder("g.addV('user')"); + + ArrayList propertyNames = new ArrayList<>(); + for (String indexType : indexTypes) { + propertyKeys.append( + String.format( + "schema.propertyKey('full_name_%s').Text().create()\n" + + "schema.propertyKey('description_%s').Text().create()\n" + + "schema.propertyKey('alias_%s').Text().create()\n", + indexType, indexType, indexType)); + + propertyNames.add("'full_name_" + indexType + "'"); + propertyNames.add("'description_" + indexType + "'"); + propertyNames.add("'alias_" + indexType + "'"); + + if (indexType.equals("search")) { + indices.append( + "schema.vertexLabel('user').index('search').search().by('full_name_search').asString().by('description_search').asText().by('alias_search').asString().add()\n"); + } else { + indices.append( + String.format( + "schema.vertexLabel('user').index('by_full_name_%s').%s().by('full_name_%s').add()\n", + indexType, indexType, indexType)); + indices.append( + String.format( + "schema.vertexLabel('user').index('by_description_%s').%s().by('description_%s').add()\n", + indexType, indexType, indexType)); + indices.append( + String.format( + "schema.vertexLabel('user').index('by_alias_name_%s').%s().by('alias_%s').add()\n", + indexType, indexType, indexType)); + } + + vertex0.append( + String.format( + ".property('full_name_%s', 'Paul Thomas Joe').property('description_%s', 'Lives by the hospital').property('alias_%s', 'mario')", + indexType, indexType, indexType)); + vertex1.append( + String.format( + ".property('full_name_%s', 'George Bill Steve').property('description_%s', 'A cold dude').property('alias_%s', 'wario')", + indexType, indexType, indexType)); + vertex2.append( + String.format( + ".property('full_name_%s', 'James Paul Joe').property('description_%s', 'Likes to hang out').property('alias_%s', 'bowser')", + indexType, indexType, indexType)); + vertex3.append( + String.format( + ".property('full_name_%s', 'Jill Alice').property('description_%s', 'Enjoys a very nice cold coca cola').property('alias_%s', 'peach')", + indexType, indexType, indexType)); + } + + vertexLabel.append(Joiner.on(", ").join(propertyNames)); + vertexLabel.append(").create()\n"); + + schema.append(propertyKeys).append(vertexLabel).append(indices); + + return Lists.newArrayList( + SampleGraphScripts.MAKE_STRICT, + schema.toString(), + vertex0.toString(), + vertex1.toString(), + vertex2.toString(), + vertex3.toString()); + } + + @BeforeClass + public static void setup() { + for (String setupQuery : textIndices()) { + SESSION_RULE.session().execute(ScriptGraphStatement.newInstance(setupQuery)); + } + + CCM_RULE.getCcmBridge().reloadCore(1, SESSION_RULE.getGraphName(), "user_p", true); + } + + @Override + protected boolean isGraphBinary() { + return false; + } + + @Override + protected GraphTraversalSource graphTraversalSource() { + return g; + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CoreGraphGeoSearchIndexIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CoreGraphGeoSearchIndexIT.java new file mode 100644 index 00000000000..192878f5ff6 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CoreGraphGeoSearchIndexIT.java @@ -0,0 +1,112 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph; + +import com.datastax.dse.driver.api.testinfra.session.DseSessionRule; +import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.shaded.guava.common.collect.Lists; +import java.util.Collection; +import org.apache.tinkerpop.gremlin.process.traversal.AnonymousTraversalSource; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +@DseRequirement(min = "6.8.0", description = "DSE 6.8.0 required for Core graph support") +public class CoreGraphGeoSearchIndexIT extends GraphGeoSearchIndexITBase { + + private static CustomCcmRule CCM_RULE = + CustomCcmRule.builder().withDseWorkloads("graph", "solr").build(); + + private static DseSessionRule SESSION_RULE = + GraphTestSupport.getCoreGraphSessionBuilder(CCM_RULE).build(); + + @ClassRule public static TestRule chain = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); + private final GraphTraversalSource g = + AnonymousTraversalSource.traversal() + .withRemote(DseGraph.remoteConnectionBuilder(SESSION_RULE.session()).build()) + .with("allow-filtering"); + + @Override + protected boolean isGraphBinary() { + return true; + } + + @Override + protected GraphTraversalSource graphTraversalSource() { + return g; + } + + @BeforeClass + public static void setup() { + for (String setupQuery : geoIndices()) { + SESSION_RULE.session().execute(ScriptGraphStatement.newInstance(setupQuery)); + } + + CCM_RULE.getCcmBridge().reloadCore(1, SESSION_RULE.getGraphName(), "user", true); + } + + /** + * A schema representing an address book with search enabled on name, description, and + * coordinates. + */ + public static Collection geoIndices() { + Object[][] providerIndexTypes = indexTypes(); + String[] indexTypes = new String[providerIndexTypes.length]; + for (int i = 0; i < providerIndexTypes.length; i++) { + indexTypes[i] = (String) providerIndexTypes[i][0]; + } + + StringBuilder schema = + new StringBuilder("schema.vertexLabel('user').partitionBy('full_name', Text)"); + StringBuilder propertyKeys = new StringBuilder(); + StringBuilder indices = new StringBuilder(); + StringBuilder vertex0 = + new StringBuilder("g.addV('user').property('full_name', 'Paul Thomas Joe')"); + StringBuilder vertex1 = + new StringBuilder("g.addV('user').property('full_name', 'George Bill Steve')"); + String vertex2 = "g.addV('user').property('full_name', 'James Paul Joe')"; + StringBuilder vertex3 = new StringBuilder("g.addV('user').property('full_name', 'Jill Alice')"); + + for (String indexType : indexTypes) { + propertyKeys.append(String.format(".property('pointPropWithBounds_%s', Point)\n", indexType)); + + propertyKeys.append( + String.format(".property('pointPropWithGeoBounds_%s', Point)\n", indexType)); + + if (indexType.equals("search")) { + indices.append( + String.format( + "schema.vertexLabel('user').searchIndex().by('pointPropWithBounds_%s').by('pointPropWithGeoBounds_%s').create()\n", + indexType, indexType)); + + } else { + throw new UnsupportedOperationException("IndexType other than search is not supported."); + } + + vertex0.append( + String.format( + ".property('pointPropWithBounds_%s', point(40.0001,40)).property('pointPropWithGeoBounds_%s', point(40.0001,40))", + indexType, indexType)); + vertex1.append( + String.format( + ".property('pointPropWithBounds_%s', point(40,40)).property('pointPropWithGeoBounds_%s', point(40,40))", + indexType, indexType)); + vertex3.append( + String.format( + ".property('pointPropWithBounds_%s', point(30,30)).property('pointPropWithGeoBounds_%s', point(30,30))", + indexType, indexType)); + } + + schema.append(propertyKeys).append(".create();\n").append(indices); + + return Lists.newArrayList( + schema.toString(), vertex0.toString(), vertex1.toString(), vertex2, vertex3.toString()); + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CoreGraphTextSearchIndexIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CoreGraphTextSearchIndexIT.java new file mode 100644 index 00000000000..8db300730af --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CoreGraphTextSearchIndexIT.java @@ -0,0 +1,118 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph; + +import com.datastax.dse.driver.api.testinfra.session.DseSessionRule; +import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.shaded.guava.common.collect.Lists; +import java.util.Collection; +import org.apache.tinkerpop.gremlin.process.traversal.AnonymousTraversalSource; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +@DseRequirement(min = "6.8.0", description = "DSE 6.8.0 required for Core graph support") +public class CoreGraphTextSearchIndexIT extends GraphTextSearchIndexITBase { + + private static CustomCcmRule CCM_RULE = + CustomCcmRule.builder().withDseWorkloads("graph", "solr").build(); + + private static DseSessionRule SESSION_RULE = + GraphTestSupport.getCoreGraphSessionBuilder(CCM_RULE).build(); + + @ClassRule public static TestRule chain = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); + + private final GraphTraversalSource g = + AnonymousTraversalSource.traversal() + .withRemote(DseGraph.remoteConnectionBuilder(SESSION_RULE.session()).build()) + .with("allow-filtering"); + + @Override + protected boolean isGraphBinary() { + return true; + } + + @Override + protected GraphTraversalSource graphTraversalSource() { + return g; + } + + /** + * A schema representing an address book with 3 properties (full_name_*, description_*, alias_*) + * created for each type of index (search, secondary, materialized). + */ + public static Collection textIndices() { + Object[][] providerIndexTypes = indexTypes(); + String[] indexTypes = new String[providerIndexTypes.length]; + for (int i = 0; i < providerIndexTypes.length; i++) { + indexTypes[i] = (String) providerIndexTypes[i][0]; + } + + StringBuilder schema = new StringBuilder("schema.vertexLabel('user')"); + StringBuilder propertyKeys = new StringBuilder(); + StringBuilder indices = new StringBuilder(); + StringBuilder vertex0 = new StringBuilder("g.addV('user')"); + StringBuilder vertex1 = new StringBuilder("g.addV('user')"); + StringBuilder vertex2 = new StringBuilder("g.addV('user')"); + StringBuilder vertex3 = new StringBuilder("g.addV('user')"); + + for (String indexType : indexTypes) { + propertyKeys.append( + String.format( + ".partitionBy('full_name_%s', Text)" + + ".property('description_%s', Text)" + + ".property('alias_%s', Text)\n", + indexType, indexType, indexType)); + + if (indexType.equals("search")) { + indices.append( + "schema.vertexLabel('user').searchIndex().by('full_name_search').asString().by('description_search').asText().by('alias_search').asString().create()\n"); + } else { + throw new UnsupportedOperationException("IndexType other than search is not supported."); + } + + vertex0.append( + String.format( + ".property('full_name_%s', 'Paul Thomas Joe').property('description_%s', 'Lives by the hospital').property('alias_%s', 'mario')", + indexType, indexType, indexType)); + vertex1.append( + String.format( + ".property('full_name_%s', 'George Bill Steve').property('description_%s', 'A cold dude').property('alias_%s', 'wario')", + indexType, indexType, indexType)); + vertex2.append( + String.format( + ".property('full_name_%s', 'James Paul Joe').property('description_%s', 'Likes to hang out').property('alias_%s', 'bowser')", + indexType, indexType, indexType)); + vertex3.append( + String.format( + ".property('full_name_%s', 'Jill Alice').property('description_%s', 'Enjoys a very nice cold coca cola').property('alias_%s', 'peach')", + indexType, indexType, indexType)); + } + + schema.append(propertyKeys).append(".create();\n").append(indices); + + return Lists.newArrayList( + schema.toString(), + vertex0.toString(), + vertex1.toString(), + vertex2.toString(), + vertex3.toString()); + } + + @BeforeClass + public static void setup() { + for (String setupQuery : textIndices()) { + System.out.println("Executing: " + setupQuery); + SESSION_RULE.session().execute(ScriptGraphStatement.newInstance(setupQuery)); + } + + CCM_RULE.getCcmBridge().reloadCore(1, SESSION_RULE.getGraphName(), "user", true); + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphGeoSearchIndexIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphGeoSearchIndexIT.java deleted file mode 100644 index 51d6c0ea16f..00000000000 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphGeoSearchIndexIT.java +++ /dev/null @@ -1,266 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.datastax.dse.driver.api.core.graph; - -import static com.datastax.dse.driver.api.core.graph.TinkerGraphAssertions.assertThat; -import static org.assertj.core.api.Assertions.fail; - -import com.datastax.dse.driver.api.core.data.geometry.Point; -import com.datastax.dse.driver.api.core.graph.predicates.Geo; -import com.datastax.oss.driver.api.core.CqlSession; -import com.datastax.oss.driver.api.core.servererrors.InvalidQueryException; -import com.datastax.oss.driver.api.testinfra.DseRequirement; -import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; -import com.datastax.oss.driver.api.testinfra.session.SessionRule; -import com.datastax.oss.driver.shaded.guava.common.base.Joiner; -import com.datastax.oss.driver.shaded.guava.common.collect.Lists; -import com.tngtech.java.junit.dataprovider.DataProvider; -import com.tngtech.java.junit.dataprovider.DataProviderRunner; -import com.tngtech.java.junit.dataprovider.UseDataProvider; -import java.util.ArrayList; -import java.util.Collection; -import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; -import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; -import org.apache.tinkerpop.gremlin.structure.Vertex; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.rules.RuleChain; -import org.junit.rules.TestRule; -import org.junit.runner.RunWith; - -@DseRequirement(min = "5.1", description = "DSE 5.1 required for graph geo indexing") -@RunWith(DataProviderRunner.class) -public class GraphGeoSearchIndexIT { - - private static CustomCcmRule ccmRule = - CustomCcmRule.builder().withDseWorkloads("graph", "solr").build(); - - private static SessionRule sessionRule = - SessionRule.builder(ccmRule).withCreateGraph().build(); - - @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); - - private final GraphTraversalSource g = - DseGraph.g.withRemote(DseGraph.remoteConnectionBuilder(sessionRule.session()).build()); - - /** - * A schema representing an address book with search enabled on name, description, and - * coordinates. - */ - public static Collection geoIndices() { - Object[][] providerIndexTypes = indexTypes(); - String[] indexTypes = new String[providerIndexTypes.length]; - for (int i = 0; i < providerIndexTypes.length; i++) { - indexTypes[i] = (String) providerIndexTypes[i][0]; - } - - StringBuilder schema = new StringBuilder("schema.propertyKey('full_name').Text().create()\n"); - StringBuilder propertyKeys = new StringBuilder(""); - StringBuilder vertexLabel = new StringBuilder("schema.vertexLabel('user').properties("); - StringBuilder indices = new StringBuilder(""); - StringBuilder vertex0 = - new StringBuilder("g.addV('user').property('full_name', 'Paul Thomas Joe')"); - StringBuilder vertex1 = - new StringBuilder("g.addV('user').property('full_name', 'George Bill Steve')"); - String vertex2 = "g.addV('user').property('full_name', 'James Paul Joe')"; - StringBuilder vertex3 = new StringBuilder("g.addV('user').property('full_name', 'Jill Alice')"); - - ArrayList propertyNames = new ArrayList(); - propertyNames.add("'full_name'"); - - for (String indexType : indexTypes) { - - propertyKeys.append( - String.format( - "schema.propertyKey('pointPropWithBounds_%s').%s.create()\n", - indexType, geoTypeWithBounds("Point()", 0, 0, 100, 100))); - - propertyKeys.append( - String.format( - "schema.propertyKey('pointPropWithGeoBounds_%s').%s.create()\n", - indexType, geoType("Point()"))); - - propertyNames.add("'pointPropWithBounds_" + indexType + "'"); - propertyNames.add("'pointPropWithGeoBounds_" + indexType + "'"); - - if (indexType.equals("search")) { - - indices.append( - String.format( - "schema.vertexLabel('user').index('search').search().by('pointPropWithBounds_%s').withError(0.00001, 0.0).by('pointPropWithGeoBounds_%s').withError(0.00001, 0.0).add()\n", - indexType, indexType)); - } else { - - indices.append( - String.format( - "schema.vertexLabel('user').index('by_pointPropWithBounds_%s').%s().by('pointPropWithBounds_%s').add()\n", - indexType, indexType, indexType)); - - indices.append( - String.format( - "schema.vertexLabel('user').index('by_pointPropWithGeoBounds_%s').%s().by('pointPropWithGeoBounds_%s').add()\n", - indexType, indexType, indexType)); - } - - vertex0.append( - String.format( - ".property('pointPropWithBounds_%s', 'POINT(40.0001 40)').property('pointPropWithGeoBounds_%s', 'POINT(40.0001 40)')", - indexType, indexType)); - vertex1.append( - String.format( - ".property('pointPropWithBounds_%s', 'POINT(40 40)').property('pointPropWithGeoBounds_%s', 'POINT(40 40)')", - indexType, indexType)); - vertex3.append( - String.format( - ".property('pointPropWithBounds_%s', 'POINT(30 30)').property('pointPropWithGeoBounds_%s', 'POINT(30 30)')", - indexType, indexType)); - } - - vertexLabel.append(Joiner.on(", ").join(propertyNames)); - vertexLabel.append(").create()\n"); - - schema.append(propertyKeys).append(vertexLabel).append(indices); - - return Lists.newArrayList( - SampleGraphScripts.MAKE_STRICT, - schema.toString(), - vertex0.toString(), - vertex1.toString(), - vertex2, - vertex3.toString()); - } - - private static String geoTypeWithBounds( - String baseName, - double lowerLimitX, - double lowerLimitY, - double higherLimitX, - double higherLimitY) { - return baseName - + String.format( - ".withBounds(%f, %f, %f, %f)", lowerLimitX, lowerLimitY, higherLimitX, higherLimitY); - } - - private static String geoType(String baseName) { - return baseName + ".withGeoBounds()"; - } - - @BeforeClass - public static void setup() { - for (String setupQuery : geoIndices()) { - sessionRule.session().execute(ScriptGraphStatement.newInstance(setupQuery)); - } - - ccmRule.getCcmBridge().reloadCore(1, sessionRule.getGraphName(), "user_p", true); - } - - @DataProvider - public static Object[][] indexTypes() { - return new Object[][] {{"search"} - - // for some reason, materialized and secondary indices have decided not to work - // I get an exception saying "there is no index for this query, here is the defined - // indices: " and the list contains the indices that are needed. Mysterious. - // There may be something to do with differences in the CCMBridge adapter of the new - // driver, some changes make materialized views and secondary indices to be not - // considered for graph: - // - // , {"materialized"} - // , {"secondary"} - }; - } - - @UseDataProvider("indexTypes") - @Test - public void search_by_distance_cartesian(String indexType) { - // in cartesian geometry, the distance between POINT(30 30) and POINT(40 40) is exactly - // 14.142135623730951 - // any point further than that should be detected outside of the range. - // the vertex "Paul Thomas Joe" is at POINT(40.0001 40), and shouldn't be detected inside the - // range. - GraphTraversal traversal = - g.V() - .has( - "user", - "pointPropWithBounds_" + indexType, - Geo.inside(Point.fromCoordinates((double) 30, (double) 30), 14.142135623730951)) - .values("full_name"); - assertThat(traversal.toList()).containsOnly("George Bill Steve", "Jill Alice"); - } - - @UseDataProvider("indexTypes") - @Test - public void search_by_distance_geodetic(String indexType) { - // in geodetic geometry, the distance between POINT(30 30) and POINT(40 40) is exactly - // 12.908258700131379 - // any point further than that should be detected outside of the range. - // the vertex "Paul Thomas Joe" is at POINT(40.0001 40), and shouldn't be detected inside the - // range. - GraphTraversal traversal = - g.V() - .has( - "user", - "pointPropWithGeoBounds_" + indexType, - Geo.inside( - Point.fromCoordinates((double) 30, (double) 30), - 12.908258700131379, - Geo.Unit.DEGREES)) - .values("full_name"); - assertThat(traversal.toList()).containsOnly("George Bill Steve", "Jill Alice"); - } - - @Test - public void - should_fail_if_geodetic_predicate_used_against_cartesian_property_with_search_index() { - try { - GraphTraversal traversal = - g.V() - .has( - "user", - "pointPropWithBounds_search", - Geo.inside( - Point.fromCoordinates((double) 30, (double) 30), - 12.908258700131379, - Geo.Unit.DEGREES)) - .values("full_name"); - traversal.toList(); - fail("Should have failed executing the traversal because the property type is incorrect"); - } catch (InvalidQueryException e) { - assertThat(e.getMessage()) - .contains("Distance units cannot be used in queries against non-geodetic points."); - } - } - - @Test - public void - should_fail_if_cartesian_predicate_used_against_geodetic_property_with_search_index() { - try { - GraphTraversal traversal = - g.V() - .has( - "user", - "pointPropWithGeoBounds_search", - Geo.inside(Point.fromCoordinates((double) 30, (double) 30), 14.142135623730951)) - .values("full_name"); - traversal.toList(); - fail("Should have failed executing the traversal because the property type is incorrect"); - } catch (InvalidQueryException e) { - assertThat(e.getMessage()) - .contains("Distance units are required for queries against geodetic points."); - } - } -} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphGeoSearchIndexITBase.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphGeoSearchIndexITBase.java new file mode 100644 index 00000000000..166f930e5f9 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphGeoSearchIndexITBase.java @@ -0,0 +1,169 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph; + +import static com.datastax.dse.driver.api.core.graph.TinkerGraphAssertions.assertThat; +import static org.assertj.core.api.Assertions.fail; +import static org.assertj.core.api.AssertionsForClassTypes.assertThatCode; + +import com.datastax.dse.driver.api.core.data.geometry.Point; +import com.datastax.dse.driver.api.core.graph.predicates.Geo; +import com.datastax.oss.driver.api.core.servererrors.InvalidQueryException; +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; +import org.apache.tinkerpop.gremlin.structure.Vertex; +import org.assertj.core.api.Assumptions; +import org.junit.Test; +import org.junit.runner.RunWith; + +@RunWith(DataProviderRunner.class) +public abstract class GraphGeoSearchIndexITBase { + + protected abstract boolean isGraphBinary(); + + protected abstract GraphTraversalSource graphTraversalSource(); + + @DataProvider + public static Object[][] indexTypes() { + return new Object[][] {{"search"} + + // FIXME for some reason, materialized and secondary indices have decided not to work + // I get an exception saying "there is no index for this query, here is the defined + // indices: " and the list contains the indices that are needed. Mysterious. + // There may be something to do with differences in the CCMBridge adapter of the new + // driver, some changes make materialized views and secondary indices to be not + // considered for graph: + // + // , {"materialized"} + // , {"secondary"} + }; + } + + @UseDataProvider("indexTypes") + @Test + public void search_by_distance_cartesian_graphson(String indexType) { + // cartesian is not supported by graph_binary + Assumptions.assumeThat(isGraphBinary()).isFalse(); + // in cartesian geometry, the distance between POINT(30 30) and POINT(40 40) is exactly + // 14.142135623730951 + // any point further than that should be detected outside of the range. + // the vertex "Paul Thomas Joe" is at POINT(40.0001 40), and shouldn't be detected inside the + // range for classic. + + GraphTraversal traversal = + graphTraversalSource() + .V() + .has( + "user", + "pointPropWithBounds_" + indexType, + Geo.inside(Point.fromCoordinates(30, 30), 14.142135623730951)) + .values("full_name"); + assertThat(traversal.toList()).containsOnly("George Bill Steve", "Jill Alice"); + } + + @UseDataProvider("indexTypes") + @Test + public void search_by_distance_geodetic(String indexType) { + // in geodetic geometry, the distance between POINT(30 30) and POINT(40 40) is exactly + // 12.908258700131379 + // any point further than that should be detected outside of the range. + // the vertex "Paul Thomas Joe" is at POINT(40.0001 40), and shouldn't be detected inside the + // range. + GraphTraversal traversal = + graphTraversalSource() + .V() + .has( + "user", + "pointPropWithGeoBounds_" + indexType, + Geo.inside(Point.fromCoordinates(30, 30), 12.908258700131379, Geo.Unit.DEGREES)) + .values("full_name"); + assertThat(traversal.toList()).containsOnly("George Bill Steve", "Jill Alice"); + } + + @Test + public void + should_fail_if_geodetic_predicate_used_against_cartesian_property_with_search_index() { + + // for graph_binary cartesian properties are not supported, thus it does not fail + if (isGraphBinary()) { + assertThatCode( + () -> { + GraphTraversal traversal = + graphTraversalSource() + .V() + .has( + "user", + "pointPropWithBounds_search", + Geo.inside( + Point.fromCoordinates(30, 30), + 12.908258700131379, + Geo.Unit.DEGREES)) + .values("full_name"); + traversal.toList(); + }) + .doesNotThrowAnyException(); + } else { + try { + GraphTraversal traversal = + graphTraversalSource() + .V() + .has( + "user", + "pointPropWithBounds_search", + Geo.inside(Point.fromCoordinates(30, 30), 12.908258700131379, Geo.Unit.DEGREES)) + .values("full_name"); + traversal.toList(); + fail("Should have failed executing the traversal because the property type is incorrect"); + } catch (InvalidQueryException e) { + assertThat(e.getMessage()) + .contains("Distance units cannot be used in queries against non-geodetic points."); + } + } + } + + @Test + public void + should_fail_if_cartesian_predicate_used_against_geodetic_property_with_search_index() { + + if (isGraphBinary()) { + try { + GraphTraversal traversal = + graphTraversalSource() + .V() + .has( + "user", + "pointPropWithGeoBounds_search", + Geo.inside(Point.fromCoordinates(30, 30), 14.142135623730951)) + .values("full_name"); + traversal.toList(); + fail("Should have failed executing the traversal because the property type is incorrect"); + } catch (InvalidQueryException e) { + assertThat(e.getMessage()) + .contains("Predicate 'insideCartesian' is not supported on property"); + } + } else { + try { + GraphTraversal traversal = + graphTraversalSource() + .V() + .has( + "user", + "pointPropWithGeoBounds_search", + Geo.inside(Point.fromCoordinates(30, 30), 14.142135623730951)) + .values("full_name"); + traversal.toList(); + fail("Should have failed executing the traversal because the property type is incorrect"); + } catch (InvalidQueryException e) { + assertThat(e.getMessage()) + .contains("Distance units are required for queries against geodetic points."); + } + } + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphTextSearchIndexIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphTextSearchIndexITBase.java similarity index 60% rename from integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphTextSearchIndexIT.java rename to integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphTextSearchIndexITBase.java index 5f2df44cd39..9a8b3d2eedc 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphTextSearchIndexIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphTextSearchIndexITBase.java @@ -18,139 +18,28 @@ import static org.assertj.core.api.Assertions.assertThat; import com.datastax.dse.driver.api.core.graph.predicates.Search; -import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.testinfra.DseRequirement; -import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; -import com.datastax.oss.driver.api.testinfra.session.SessionRule; -import com.datastax.oss.driver.shaded.guava.common.base.Joiner; -import com.datastax.oss.driver.shaded.guava.common.collect.Lists; import com.tngtech.java.junit.dataprovider.DataProvider; import com.tngtech.java.junit.dataprovider.DataProviderRunner; import com.tngtech.java.junit.dataprovider.UseDataProvider; -import java.util.ArrayList; -import java.util.Collection; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; import org.apache.tinkerpop.gremlin.structure.Vertex; -import org.junit.BeforeClass; -import org.junit.ClassRule; import org.junit.Test; -import org.junit.rules.RuleChain; -import org.junit.rules.TestRule; import org.junit.runner.RunWith; -@DseRequirement(min = "5.1", description = "DSE 5.1 required for graph geo indexing") @RunWith(DataProviderRunner.class) -public class GraphTextSearchIndexIT { +public abstract class GraphTextSearchIndexITBase { - private static CustomCcmRule ccmRule = - CustomCcmRule.builder().withDseWorkloads("graph", "solr").build(); + protected abstract boolean isGraphBinary(); - private static SessionRule sessionRule = - SessionRule.builder(ccmRule).withCreateGraph().build(); - - @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); - - private final GraphTraversalSource g = - DseGraph.g.withRemote(DseGraph.remoteConnectionBuilder(sessionRule.session()).build()); - - /** - * A schema representing an address book with 3 properties (full_name_*, description_*, alias_*) - * created for each type of index (search, secondary, materialized). - */ - public static Collection textIndices() { - Object[][] providerIndexTypes = indexTypes(); - String[] indexTypes = new String[providerIndexTypes.length]; - for (int i = 0; i < providerIndexTypes.length; i++) { - indexTypes[i] = (String) providerIndexTypes[i][0]; - } - - StringBuilder schema = new StringBuilder(""); - StringBuilder propertyKeys = new StringBuilder(""); - StringBuilder vertexLabel = new StringBuilder("schema.vertexLabel('user').properties("); - StringBuilder indices = new StringBuilder(""); - StringBuilder vertex0 = new StringBuilder("g.addV('user')"); - StringBuilder vertex1 = new StringBuilder("g.addV('user')"); - StringBuilder vertex2 = new StringBuilder("g.addV('user')"); - StringBuilder vertex3 = new StringBuilder("g.addV('user')"); - - ArrayList propertyNames = new ArrayList(); - for (String indexType : indexTypes) { - propertyKeys.append( - String.format( - "schema.propertyKey('full_name_%s').Text().create()\n" - + "schema.propertyKey('description_%s').Text().create()\n" - + "schema.propertyKey('alias_%s').Text().create()\n", - indexType, indexType, indexType)); - - propertyNames.add("'full_name_" + indexType + "'"); - propertyNames.add("'description_" + indexType + "'"); - propertyNames.add("'alias_" + indexType + "'"); - - if (indexType.equals("search")) { - indices.append( - "schema.vertexLabel('user').index('search').search().by('full_name_search').asString().by('description_search').asText().by('alias_search').asString().add()\n"); - } else { - indices.append( - String.format( - "schema.vertexLabel('user').index('by_full_name_%s').%s().by('full_name_%s').add()\n", - indexType, indexType, indexType)); - indices.append( - String.format( - "schema.vertexLabel('user').index('by_description_%s').%s().by('description_%s').add()\n", - indexType, indexType, indexType)); - indices.append( - String.format( - "schema.vertexLabel('user').index('by_alias_name_%s').%s().by('alias_%s').add()\n", - indexType, indexType, indexType)); - } - - vertex0.append( - String.format( - ".property('full_name_%s', 'Paul Thomas Joe').property('description_%s', 'Lives by the hospital').property('alias_%s', 'mario')", - indexType, indexType, indexType)); - vertex1.append( - String.format( - ".property('full_name_%s', 'George Bill Steve').property('description_%s', 'A cold dude').property('alias_%s', 'wario')", - indexType, indexType, indexType)); - vertex2.append( - String.format( - ".property('full_name_%s', 'James Paul Joe').property('description_%s', 'Likes to hang out').property('alias_%s', 'bowser')", - indexType, indexType, indexType)); - vertex3.append( - String.format( - ".property('full_name_%s', 'Jill Alice').property('description_%s', 'Enjoys a very nice cold coca cola').property('alias_%s', 'peach')", - indexType, indexType, indexType)); - } - - vertexLabel.append(Joiner.on(", ").join(propertyNames)); - vertexLabel.append(").create()\n"); - - schema.append(propertyKeys).append(vertexLabel).append(indices); - - return Lists.newArrayList( - SampleGraphScripts.MAKE_STRICT, - schema.toString(), - vertex0.toString(), - vertex1.toString(), - vertex2.toString(), - vertex3.toString()); - } - - @BeforeClass - public static void setup() { - for (String setupQuery : textIndices()) { - sessionRule.session().execute(ScriptGraphStatement.newInstance(setupQuery)); - } - - ccmRule.getCcmBridge().reloadCore(1, sessionRule.getGraphName(), "user_p", true); - } + protected abstract GraphTraversalSource graphTraversalSource(); @DataProvider public static Object[][] indexTypes() { return new Object[][] {{"search"} - // for some reason, materialized and secondary indices have decided not to work + // FIXME for some reason, materialized and secondary indices have decided not to work // I get an exception saying "there is no index for this query, here is the defined // indices: " and the list contains the indices that are needed. Mysterious. // There may be something to do with differences in the CCMBridge adapter of the new @@ -175,7 +64,8 @@ public static Object[][] indexTypes() { public void search_by_prefix_search(String indexType) { // Only one user with full_name starting with Paul. GraphTraversal traversal = - g.V() + graphTraversalSource() + .V() .has("user", "full_name_" + indexType, Search.prefix("Paul")) .values("full_name_" + indexType); assertThat(traversal.toList()).containsOnly("Paul Thomas Joe"); @@ -194,7 +84,8 @@ public void search_by_prefix_search(String indexType) { public void search_by_regex(String indexType) { // Only two people with names containing pattern for Paul. GraphTraversal traversal = - g.V() + graphTraversalSource() + .V() .has("user", "full_name_" + indexType, Search.regex(".*Paul.*")) .values("full_name_" + indexType); assertThat(traversal.toList()).containsOnly("Paul Thomas Joe", "James Paul Joe"); @@ -215,7 +106,8 @@ public void search_by_regex(String indexType) { public void search_by_fuzzy(String indexType) { // Alias matches 'awrio' fuzzy GraphTraversal traversal = - g.V() + graphTraversalSource() + .V() .has("user", "alias_" + indexType, Search.fuzzy("awrio", 1)) .values("full_name_" + indexType); // Should not match 'Paul Thomas Joe' since alias is 'mario', which is at distance 2 of 'awrio' @@ -238,7 +130,8 @@ public void search_by_fuzzy(String indexType) { public void search_by_token(String indexType) { // Description containing token 'cold' GraphTraversal traversal = - g.V() + graphTraversalSource() + .V() .has("user", "description_" + indexType, Search.token("cold")) .values("full_name_" + indexType); assertThat(traversal.toList()).containsOnly("Jill Alice", "George Bill Steve"); @@ -255,7 +148,8 @@ public void search_by_token(String indexType) { public void search_by_token_prefix(String indexType) { // Description containing a token starting with h GraphTraversal traversal = - g.V() + graphTraversalSource() + .V() .has("user", "description_" + indexType, Search.tokenPrefix("h")) .values("full_name_" + indexType); assertThat(traversal.toList()).containsOnly("Paul Thomas Joe", "James Paul Joe"); @@ -273,7 +167,8 @@ public void search_by_token_prefix(String indexType) { public void search_by_token_regex(String indexType) { // Description containing nice or hospital GraphTraversal traversal = - g.V() + graphTraversalSource() + .V() .has("user", "description_" + indexType, Search.tokenRegex("(nice|hospital)")) .values("full_name_" + indexType); assertThat(traversal.toList()).containsOnly("Paul Thomas Joe", "Jill Alice"); @@ -294,7 +189,8 @@ public void search_by_token_regex(String indexType) { public void search_by_token_fuzzy(String indexType) { // Description containing 'lives' fuzzy GraphTraversal traversal = - g.V() + graphTraversalSource() + .V() .has("user", "description_" + indexType, Search.tokenFuzzy("lieks", 1)) .values("full_name_" + indexType); // Should not match 'Paul Thomas Joe' since description contains 'Lives' which is at distance of @@ -318,7 +214,8 @@ public void search_by_token_fuzzy(String indexType) { public void search_by_phrase(String indexType) { // Full name contains phrase "Paul Joe" GraphTraversal traversal = - g.V() + graphTraversalSource() + .V() .has("user", "description_" + indexType, Search.phrase("a cold", 2)) .values("full_name_" + indexType); // Should match 'George Bill Steve' since 'A cold dude' is at distance of 0 for 'a cold'. diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/SampleGraphScripts.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/SampleGraphScripts.java index 530a5e38ddd..b5b66217410 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/SampleGraphScripts.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/SampleGraphScripts.java @@ -26,7 +26,7 @@ public class SampleGraphScripts { public static final String ALLOW_SCANS = "schema.config().option('graph.allow_scan').set('true');\n"; - public static final String MODERN_SCHEMA = + private static final String CLASSIC_SCHEMA = "schema.propertyKey('name').Text().ifNotExists().create();\n" + "schema.propertyKey('age').Int().ifNotExists().create();\n" + "schema.propertyKey('lang').Text().ifNotExists().create();\n" @@ -36,18 +36,27 @@ public class SampleGraphScripts { + "schema.edgeLabel('created').properties('weight').connection('person', 'software').ifNotExists().create();\n" + "schema.edgeLabel('knows').properties('weight').connection('person', 'person').ifNotExists().create();\n"; - public static String MODERN_GRAPH = - MODERN_SCHEMA - + "marko = g.addV('person').property('name', 'marko').property('age', 29).next();\n" + private static final String INSERT_DATA = + "marko = g.addV('person').property('name', 'marko').property('age', 29).next();\n" + "vadas = g.addV('person').property('name', 'vadas').property('age', 27).next();\n" + "josh = g.addV('person').property('name', 'josh').property('age', 32).next();\n" + "peter = g.addV('person').property('name', 'peter').property('age', 35).next();\n" + "lop = g.addV('software').property('name', 'lop').property('lang', 'java').next();\n" + "ripple = g.addV('software').property('name', 'ripple').property('lang', 'java').next();\n" - + "g.addE('knows').from(marko).to(vadas).property('weight', 0.5f).next();\n" - + "g.addE('knows').from(marko).to(josh).property('weight', 1.0f).next();\n" - + "g.addE('created').from(marko).to(lop).property('weight', 0.4f).next();\n" - + "g.addE('created').from(josh).to(ripple).property('weight', 1.0f).next();\n" - + "g.addE('created').from(josh).to(lop).property('weight', 0.4f).next();\n" - + "g.addE('created').from(peter).to(lop).property('weight', 0.2f);"; + + "g.V().has('name', 'marko').as('marko').V().has('name', 'vadas').as('vadas').addE('knows').from('marko').property('weight', 0.5f).next();\n" + + "g.V().has('name', 'marko').as('marko').V().has('name', 'josh').as('josh').addE('knows').from('marko').property('weight', 1.0f).next();\n" + + "g.V().has('name', 'marko').as('marko').V().has('name', 'lop').as('lop').addE('created').from('marko').property('weight', 0.4f).next();\n" + + "g.V().has('name', 'josh').as('josh').V().has('name', 'ripple').as('ripple').addE('created').from('josh').property('weight', 1.0f).next();\n" + + "g.V().has('name', 'josh').as('josh').V().has('name', 'lop').as('lop').addE('created').from('josh').property('weight', 0.4f).next();\n" + + "g.V().has('name', 'peter').as('peter').V().has('name', 'lop').as('lop').addE('created').from('peter').property('weight', 0.2f);"; + + public static String CLASSIC_GRAPH = CLASSIC_SCHEMA + INSERT_DATA; + + private static final String CORE_SCHEMA = + "schema.vertexLabel('person').ifNotExists().partitionBy('name', Text).property('age', Int).create();\n" + + "schema.vertexLabel('software').ifNotExists().partitionBy('name', Text).property('lang', Text).create();\n" + + "schema.edgeLabel('created').ifNotExists().from('person').to('software').property('weight', Float).create();\n" + + "schema.edgeLabel('knows').ifNotExists().from('person').to('person').property('weight', Float).create();\n"; + + public static String CORE_GRAPH = CORE_SCHEMA + INSERT_DATA; } diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/ClassicGraphTraversalRemoteIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/ClassicGraphTraversalRemoteIT.java new file mode 100644 index 00000000000..7297bf2cf43 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/ClassicGraphTraversalRemoteIT.java @@ -0,0 +1,79 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph.remote; + +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.dse.driver.api.core.graph.DseGraph; +import com.datastax.dse.driver.api.core.graph.GraphTestSupport; +import com.datastax.dse.driver.api.core.graph.SampleGraphScripts; +import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; +import com.datastax.dse.driver.api.core.graph.SocialTraversalSource; +import com.datastax.dse.driver.api.testinfra.session.DseSessionRule; +import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import org.apache.tinkerpop.gremlin.process.traversal.AnonymousTraversalSource; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; +import org.apache.tinkerpop.gremlin.structure.util.empty.EmptyGraph; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +@DseRequirement( + min = "5.0.9", + description = "DSE 5.0.9 required for inserting edges and vertices script.") +public class ClassicGraphTraversalRemoteIT extends GraphTraversalRemoteITBase { + + private static final CustomCcmRule CCM_RULE = GraphTestSupport.GRAPH_CCM_RULE_BUILDER.build(); + + private static final DseSessionRule SESSION_RULE = + GraphTestSupport.getClassicGraphSessionBuilder(CCM_RULE).build(); + + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); + + @BeforeClass + public static void setupSchema() { + SESSION_RULE + .session() + .execute(ScriptGraphStatement.newInstance(SampleGraphScripts.ALLOW_SCANS)); + SESSION_RULE + .session() + .execute(ScriptGraphStatement.newInstance(SampleGraphScripts.CLASSIC_GRAPH)); + SESSION_RULE + .session() + .execute(ScriptGraphStatement.newInstance(SampleGraphScripts.MAKE_STRICT)); + } + + @Override + protected DseSession session() { + return SESSION_RULE.session(); + } + + @Override + protected boolean isGraphBinary() { + return false; + } + + @Override + protected GraphTraversalSource graphTraversalSource() { + return AnonymousTraversalSource.traversal() + .withRemote(DseGraph.remoteConnectionBuilder(session()).build()); + } + + @Override + protected SocialTraversalSource socialTraversalSource() { + return EmptyGraph.instance() + .traversal(SocialTraversalSource.class) + .withRemote(DseGraph.remoteConnectionBuilder(session()).build()); + } + + @Override + protected CustomCcmRule ccmRule() { + return CCM_RULE; + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/CoreGraphTraversalRemoteIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/CoreGraphTraversalRemoteIT.java new file mode 100644 index 00000000000..5f7a4759e91 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/CoreGraphTraversalRemoteIT.java @@ -0,0 +1,71 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph.remote; + +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.dse.driver.api.core.graph.DseGraph; +import com.datastax.dse.driver.api.core.graph.GraphTestSupport; +import com.datastax.dse.driver.api.core.graph.SampleGraphScripts; +import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; +import com.datastax.dse.driver.api.core.graph.SocialTraversalSource; +import com.datastax.dse.driver.api.testinfra.session.DseSessionRule; +import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import org.apache.tinkerpop.gremlin.process.traversal.AnonymousTraversalSource; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; +import org.apache.tinkerpop.gremlin.structure.util.empty.EmptyGraph; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +@DseRequirement(min = "6.8", description = "DSE 6.8 required for Core graph support") +public class CoreGraphTraversalRemoteIT extends GraphTraversalRemoteITBase { + + private static final CustomCcmRule CCM_RULE = GraphTestSupport.GRAPH_CCM_RULE_BUILDER.build(); + + private static final DseSessionRule SESSION_RULE = + GraphTestSupport.getCoreGraphSessionBuilder(CCM_RULE).build(); + + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); + + @BeforeClass + public static void setupSchema() { + SESSION_RULE.session().execute(ScriptGraphStatement.newInstance(SampleGraphScripts.CORE_GRAPH)); + } + + @Override + protected DseSession session() { + return SESSION_RULE.session(); + } + + @Override + protected boolean isGraphBinary() { + return true; + } + + @Override + protected GraphTraversalSource graphTraversalSource() { + return AnonymousTraversalSource.traversal() + .withRemote(DseGraph.remoteConnectionBuilder(session()).build()) + .with("allow-filtering"); + } + + @Override + protected SocialTraversalSource socialTraversalSource() { + return EmptyGraph.instance() + .traversal(SocialTraversalSource.class) + .withRemote(DseGraph.remoteConnectionBuilder(session()).build()) + .with("allow-filtering"); + } + + @Override + protected CustomCcmRule ccmRule() { + return CCM_RULE; + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalMultiPropertiesRemoteIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalMultiPropertiesRemoteIT.java index 690d5443f10..1cc614dec5e 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalMultiPropertiesRemoteIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalMultiPropertiesRemoteIT.java @@ -35,6 +35,7 @@ import org.junit.rules.RuleChain; import org.junit.rules.TestRule; +// INFO: multi props are not supported in Core @DseRequirement(min = "5.0.3", description = "DSE 5.0.3 required for remote TinkerPop support") public class GraphTraversalMultiPropertiesRemoteIT { diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalRemoteIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalRemoteITBase.java similarity index 60% rename from integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalRemoteIT.java rename to integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalRemoteITBase.java index 13ea575fe21..31fff66d1a7 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalRemoteIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalRemoteITBase.java @@ -16,19 +16,20 @@ package com.datastax.dse.driver.api.core.graph.remote; import static com.datastax.dse.driver.api.core.graph.TinkerGraphAssertions.assertThat; +import static com.datastax.dse.driver.internal.core.graph.GraphTestUtils.assertThatContainsLabel; +import static com.datastax.dse.driver.internal.core.graph.GraphTestUtils.assertThatContainsProperties; +import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.assertj.core.api.Assertions.fail; -import com.datastax.dse.driver.api.core.graph.DseGraph; -import com.datastax.dse.driver.api.core.graph.GraphTestSupport; -import com.datastax.dse.driver.api.core.graph.SampleGraphScripts; -import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; +import com.datastax.dse.driver.Assertions; import com.datastax.dse.driver.api.core.graph.SocialTraversalSource; import com.datastax.dse.driver.api.core.graph.TinkerPathAssert; +import com.datastax.dse.driver.api.core.graph.__; +import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.servererrors.InvalidQueryException; import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; -import com.datastax.oss.driver.api.testinfra.session.SessionRule; import java.util.Arrays; import java.util.Collections; import java.util.List; @@ -41,45 +42,25 @@ import org.apache.tinkerpop.gremlin.process.traversal.Traversal; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; -import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__; import org.apache.tinkerpop.gremlin.process.traversal.step.util.Tree; +import org.apache.tinkerpop.gremlin.structure.Direction; import org.apache.tinkerpop.gremlin.structure.Edge; import org.apache.tinkerpop.gremlin.structure.Graph; import org.apache.tinkerpop.gremlin.structure.Vertex; -import org.apache.tinkerpop.gremlin.structure.util.empty.EmptyGraph; -import org.assertj.core.api.Assertions; -import org.junit.BeforeClass; -import org.junit.ClassRule; +import org.assertj.core.api.Assumptions; import org.junit.Test; -import org.junit.rules.RuleChain; -import org.junit.rules.TestRule; - -@DseRequirement(min = "6.0", description = "DSE 6 required for MODERN_GRAPH script (?)") -public class GraphTraversalRemoteIT { - - private static final CustomCcmRule CCM_RULE = GraphTestSupport.GRAPH_CCM_RULE_BUILDER.build(); - - private static final SessionRule SESSION_RULE = - GraphTestSupport.getClassicGraphSessionBuilder(CCM_RULE).build(); - - @ClassRule - public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); - - @BeforeClass - public static void setupSchema() { - SESSION_RULE - .session() - .execute(ScriptGraphStatement.newInstance(SampleGraphScripts.MODERN_GRAPH)); - SESSION_RULE - .session() - .execute(ScriptGraphStatement.newInstance(SampleGraphScripts.MAKE_STRICT)); - SESSION_RULE - .session() - .execute(ScriptGraphStatement.newInstance(SampleGraphScripts.ALLOW_SCANS)); - } - private final GraphTraversalSource g = - DseGraph.g.withRemote(DseGraph.remoteConnectionBuilder(SESSION_RULE.session()).build()); +public abstract class GraphTraversalRemoteITBase { + + protected abstract CqlSession session(); + + protected abstract boolean isGraphBinary(); + + protected abstract GraphTraversalSource graphTraversalSource(); + + protected abstract SocialTraversalSource socialTraversalSource(); + + protected abstract CustomCcmRule ccmRule(); /** * Ensures that a previously returned {@link Vertex}'s {@link Vertex#id()} can be used as an input @@ -91,9 +72,18 @@ public static void setupSchema() { */ @Test public void should_use_vertex_id_as_parameter() { + GraphTraversalSource g = graphTraversalSource(); + // given an existing vertex Vertex marko = g.V().hasLabel("person").has("name", "marko").next(); - assertThat(marko).hasProperty("name", "marko"); + if (isGraphBinary()) { + Map properties = + g.V().hasLabel("person").has("name", "marko").elementMap("name").next(); + + assertThat(properties).containsEntry("name", "marko"); + } else { + assertThat(marko).hasProperty("name", "marko"); + } // then should be able to retrieve that same vertex by id. assertThat(g.V(marko.id()).next()).isEqualTo(marko); @@ -109,20 +99,47 @@ public void should_use_vertex_id_as_parameter() { */ @Test public void should_use_edge_is_as_parameter() { + GraphTraversalSource g = graphTraversalSource(); + // given an existing edge Edge created = g.E().has("weight", 0.2f).next(); - assertThat(created).hasProperty("weight", 0.2f).hasInVLabel("software").hasOutVLabel("person"); + if (isGraphBinary()) { + List> properties = + g.E().has("weight").elementMap("weight", "software", "person").toList(); + + assertThat(properties) + .anySatisfy( + props -> { + assertThatContainsProperties(props, "weight", 0.2f); + assertThatContainsLabel(props, Direction.IN, "software"); + assertThatContainsLabel(props, Direction.OUT, "person"); + }); + + } else { + assertThat(created) + .hasProperty("weight", 0.2f) + .hasInVLabel("software") + .hasOutVLabel("person"); + } // should be able to retrieve incoming and outgoing vertices by edge id - Vertex in = g.E(created.id()).inV().next(); - Vertex out = g.E(created.id()).outV().next(); + if (isGraphBinary()) { + Map inProperties = g.E(created.id()).inV().elementMap("name", "lang").next(); + Map outProperties = g.E(created.id()).outV().elementMap("name").next(); + assertThatContainsProperties(inProperties, "name", "lop", "lang", "java"); + assertThatContainsProperties(outProperties, "name", "peter"); + + } else { + Vertex in = g.E(created.id()).inV().next(); + Vertex out = g.E(created.id()).outV().next(); - // should resolve to lop - assertThat(in).hasLabel("software").hasProperty("name", "lop").hasProperty("lang", "java"); + // should resolve to lop + assertThat(in).hasLabel("software").hasProperty("name", "lop").hasProperty("lang", "java"); - // should resolve to marko, josh and peter whom created lop. - assertThat(out).hasLabel("person").hasProperty("name", "peter"); + // should resolve to marko, josh and peter whom created lop. + assertThat(out).hasLabel("person").hasProperty("name", "peter"); + } } /** @@ -133,19 +150,25 @@ public void should_use_edge_is_as_parameter() { */ @Test public void should_deserialize_vertex_id_as_map() { + GraphTraversalSource g = graphTraversalSource(); // given an existing vertex Vertex marko = g.V().hasLabel("person").has("name", "marko").next(); // then id should be a map with expected values. - // Note: this is pretty dependent on DSE Graphs underlying id structure which may vary in the - // future. - @SuppressWarnings("unchecked") - Map id = (Map) marko.id(); - assertThat(id) - .hasSize(3) - .containsEntry("~label", "person") - .containsKey("community_id") - .containsKey("member_id"); + // Note: this is pretty dependent on DSE Graphs underlying id structure which may vary in + // the future. + if (isGraphBinary()) { + assertThat(((String) marko.id())).contains("marko"); + assertThat(marko.label()).isEqualTo("person"); + } else { + @SuppressWarnings("unchecked") + Map id = (Map) marko.id(); + assertThat(id) + .hasSize(3) + .containsEntry("~label", "person") + .containsKey("community_id") + .containsKey("member_id"); + } } /** @@ -161,8 +184,9 @@ public void should_deserialize_vertex_id_as_map() { */ @Test public void should_handle_result_object_of_mixed_types() { - // find all software vertices and select name, language, and find all vertices that created such - // software. + GraphTraversalSource g = graphTraversalSource(); + // find all software vertices and select name, language, and find all vertices that created + // such software. List> results = g.V() .hasLabel("software") @@ -184,14 +208,24 @@ public void should_handle_result_object_of_mixed_types() { @SuppressWarnings("unchecked") List vertices = (List) result.get("c"); if (result.get("a").equals("lop")) { - // lop, 'c' should contain marko, josh, peter. - assertThat(vertices) - .extracting(vertex -> vertex.property("name").value()) - .containsOnly("marko", "josh", "peter"); + if (isGraphBinary()) { + // should contain three vertices + assertThat((vertices).size()).isEqualTo(3); + } else { + // lop, 'c' should contain marko, josh, peter. + assertThat(vertices) + .extracting(vertex -> vertex.property("name").value()) + .containsOnly("marko", "josh", "peter"); + } } else { - assertThat(vertices) - .extracting(vertex -> vertex.property("name").value()) - .containsOnly("josh"); + if (isGraphBinary()) { + // has only one label + assertThat((vertices).size()).isEqualTo(1); + } else { + assertThat(vertices) + .extracting(vertex -> vertex.property("name").value()) + .containsOnly("josh"); + } } } } @@ -203,7 +237,10 @@ public void should_handle_result_object_of_mixed_types() { * the edges that connect them. */ @Test - public void should_handle_subgraph() { + public void should_handle_subgraph_graphson() { + Assumptions.assumeThat(isGraphBinary()).isFalse(); + + GraphTraversalSource g = graphTraversalSource(); // retrieve a subgraph on the knows relationship, this omits the created edges. Graph graph = (Graph) g.E().hasLabel("knows").subgraph("subGraph").cap("subGraph").next(); @@ -212,6 +249,24 @@ public void should_handle_subgraph() { assertThat(graph.vertices()).toIterable().hasSize(3); } + /** + * Ensures that a traversal that returns a sub graph can be retrieved. + * + *

      The subgraph is all members in a knows relationship, thus is all people who marko knows and + * the edges that connect them. + */ + @Test + public void should_handle_subgraph_graph_binary() { + Assumptions.assumeThat(isGraphBinary()).isTrue(); + + GraphTraversalSource g = graphTraversalSource(); + // retrieve a subgraph on the knows relationship, this omits the created edges. + String graph = (String) g.E().hasLabel("knows").subgraph("subGraph").cap("subGraph").next(); + + // there should only be 2 edges (since there are are only 2 knows relationships) and 3 vertices + assertThat(graph).contains("vertices:3").contains("edges:2"); + } + /** * Ensures a traversal that yields no results is properly retrieved and is empty. * @@ -219,7 +274,13 @@ public void should_handle_subgraph() { */ @Test public void should_return_zero_results() { - assertThat(g.V().hasLabel("notALabel").toList()).isEmpty(); + if (isGraphBinary()) { + assertThatThrownBy(() -> graphTraversalSource().V().hasLabel("notALabel").toList()) + .isInstanceOf(InvalidQueryException.class) + .hasMessageContaining("Unknown vertex label 'notALabel'"); + } else { + assertThat(graphTraversalSource().V().hasLabel("notALabel").toList()).isEmpty(); + } } /** @@ -236,7 +297,14 @@ public void should_parse_tree() { // created. @SuppressWarnings("unchecked") Tree tree = - g.V().hasLabel("person").out("knows").out("created").tree().by("name").next(); + graphTraversalSource() + .V() + .hasLabel("person") + .out("knows") + .out("created") + .tree() + .by("name") + .next(); // Marko knows josh who created lop and ripple. assertThat(tree).tree("marko").tree("josh").tree("lop").isLeaf(); @@ -261,7 +329,8 @@ public void should_parse_tree() { public void should_handle_lambdas() { // Find all people marko knows and the software they created. List software = - g.V() + graphTraversalSource() + .V() .hasLabel("person") .filter(__.has("name", "marko")) .out("knows") @@ -286,13 +355,15 @@ public void should_handle_lambdas() { */ @Test public void should_handle_tryNext() { - GraphTraversal traversal = g.V().hasLabel("person").has("name", "marko"); + GraphTraversal traversal = + graphTraversalSource().V().hasLabel("person").has("name", "marko"); // value present Optional v0 = traversal.tryNext(); assertThat(v0.isPresent()).isTrue(); - //noinspection OptionalGetWithoutIsPresent - assertThat(v0.get()).hasProperty("name", "marko"); + if (!isGraphBinary()) { + assertThat(v0.get()).hasProperty("name", "marko"); + } // value absent as there was only 1 matching vertex. Optional v1 = traversal.tryNext(); @@ -309,8 +380,10 @@ public void should_handle_tryNext() { * @test_category dse:graph */ @Test - public void should_handle_streaming() { - GraphTraversal traversal = g.V().hasLabel("person"); + public void should_handle_streaming_graphson() { + Assumptions.assumeThat(isGraphBinary()).isFalse(); + + GraphTraversal traversal = graphTraversalSource().V().hasLabel("person"); // retrieve all person vertices to stream, and filter on client side all persons under age 30 // and map to their name. List under30 = @@ -326,6 +399,36 @@ public void should_handle_streaming() { assertThat(traversal.toStream().collect(Collectors.toList())).isEmpty(); } + /** + * Validates that {@link GraphTraversal#toStream()} appropriately creates a stream from the + * underlying iterator on the traversal, and then an attempt to call toStream again yields no + * results. + * + *

      This is more of a test of Tinkerpop than the protocol between the client and DSE graph. + * + * @test_category dse:graph + */ + @Test + public void should_handle_streaming_binary() { + Assumptions.assumeThat(isGraphBinary()).isTrue(); + + GraphTraversal> traversal = + graphTraversalSource().V().hasLabel("person").elementMap("age", "name"); + // retrieve all person vertices to stream, and filter on client side all persons under age 30 + // and map to their name. + List under30 = + traversal + .toStream() + .filter(v -> (Integer) v.get("age") < 30) + .map(v -> (String) v.get("name")) + .collect(Collectors.toList()); + + assertThat(under30).containsOnly("marko", "vadas"); + + // attempt to get a stream again, which should be empty. + assertThat(traversal.toStream().collect(Collectors.toList())).isEmpty(); + } + /** * Validates that when traversing a path and labeling some of the elements during the traversal * that the output elements are properly labeled. @@ -336,7 +439,8 @@ public void should_handle_streaming() { public void should_resolve_path_with_some_labels() { // given a traversal where some objects have labels. List paths = - g.V() + graphTraversalSource() + .V() .hasLabel("person") .has("name", "marko") .as("a") @@ -372,7 +476,8 @@ public void should_resolve_path_with_some_labels() { public void should_resolve_path_with_labels() { // given a traversal where all objects have labels. List paths = - g.V() + graphTraversalSource() + .V() .hasLabel("person") .has("name", "marko") .as("a") @@ -411,7 +516,8 @@ public void should_resolve_path_with_labels() { public void should_resolve_path_without_labels() { // given a traversal where no objects have labels. List paths = - g.V() + graphTraversalSource() + .V() .hasLabel("person") .has("name", "marko") .outE("knows") @@ -430,10 +536,13 @@ public void should_resolve_path_without_labels() { } @Test - public void should_handle_asynchronous_execution() { + public void should_handle_asynchronous_execution_graphson() { + Assumptions.assumeThat(isGraphBinary()).isFalse(); + StringBuilder names = new StringBuilder(); - CompletableFuture> future = g.V().hasLabel("person").promise(Traversal::toList); + CompletableFuture> future = + graphTraversalSource().V().hasLabel("person").promise(Traversal::toList); try { // dumb processing to make sure the completable future works correctly and correct results are // returned @@ -448,6 +557,25 @@ public void should_handle_asynchronous_execution() { assertThat(names.toString()).contains("peter", "marko", "vadas", "josh"); } + @Test + public void should_handle_asynchronous_execution_graph_binary() { + Assumptions.assumeThat(isGraphBinary()).isTrue(); + + StringBuilder names = new StringBuilder(); + + CompletableFuture> future = + graphTraversalSource().V().hasLabel("person").promise(Traversal::toList); + try { + // dumb processing to make sure the completable future works correctly and correct results are + // returned + future.thenAccept(vertices -> vertices.forEach(vertex -> names.append(vertex.id()))).get(); + } catch (InterruptedException | ExecutionException e) { + fail("Shouldn't have thrown an exception waiting for the result to complete"); + } + + assertThat(names.toString()).contains("peter", "marko", "vadas", "josh"); + } + /** * Validates that if a traversal is made that encounters an error on the server side that the * exception is set on the future. @@ -457,7 +585,8 @@ public void should_handle_asynchronous_execution() { @Test @DseRequirement(min = "5.1.0") public void should_fail_future_returned_from_promise_on_query_error() throws Exception { - CompletableFuture future = g.V("invalidid").peerPressure().promise(Traversal::next); + CompletableFuture future = + graphTraversalSource().V("invalidid").peerPressure().promise(Traversal::next); try { future.get(); @@ -474,12 +603,10 @@ public void should_fail_future_returned_from_promise_on_query_error() throws Exc * @test_category dse:graph */ @Test - public void should_allow_use_of_dsl() throws Exception { - SocialTraversalSource gSocial = - EmptyGraph.instance() - .traversal(SocialTraversalSource.class) - .withRemote(DseGraph.remoteConnectionBuilder(SESSION_RULE.session()).build()); - List vertices = gSocial.persons("marko").knows("vadas").toList(); + public void should_allow_use_of_dsl_graphson() { + Assumptions.assumeThat(isGraphBinary()).isFalse(); + + List vertices = socialTraversalSource().persons("marko").knows("vadas").toList(); assertThat(vertices.size()).isEqualTo(1); assertThat(vertices.get(0)) .hasProperty("name", "marko") @@ -487,6 +614,24 @@ public void should_allow_use_of_dsl() throws Exception { .hasLabel("person"); } + /** + * A simple smoke test to ensure that a user can supply a custom {@link GraphTraversalSource} for + * use with DSLs. + * + * @test_category dse:graph + */ + @Test + public void should_allow_use_of_dsl_graph_binary() { + Assumptions.assumeThat(isGraphBinary()).isTrue(); + + List> vertices = + socialTraversalSource().persons("marko").knows("vadas").elementMap("name", "age").toList(); + assertThat(vertices.size()).isEqualTo(1); + + assertThatContainsProperties(vertices.get(0), "name", "marko", "age", 29); + assertThat(vertices.get(0).values()).contains("person"); + } + /** * Ensures that traversals with barriers (which return results bulked) contain the correct amount * of end results. @@ -495,8 +640,12 @@ public void should_allow_use_of_dsl() throws Exception { */ @Test public void should_return_correct_results_when_bulked() { + Optional dseVersion = ccmRule().getCcmBridge().getDseVersion(); + Assumptions.assumeThat( + dseVersion.isPresent() && dseVersion.get().compareTo(Version.parse("5.1.2")) > 0) + .isTrue(); - List results = g.E().label().barrier().toList(); + List results = graphTraversalSource().E().label().barrier().toList(); Collections.sort(results); List expected = diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/ClassicGraphTraversalBatchIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/ClassicGraphTraversalBatchIT.java new file mode 100644 index 00000000000..bfcb54a535d --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/ClassicGraphTraversalBatchIT.java @@ -0,0 +1,65 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph.statement; + +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.dse.driver.api.core.graph.GraphTestSupport; +import com.datastax.dse.driver.api.core.graph.SampleGraphScripts; +import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; +import com.datastax.dse.driver.api.testinfra.session.DseSessionRule; +import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; +import org.apache.tinkerpop.gremlin.structure.util.empty.EmptyGraph; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +@DseRequirement(min = "6.0", description = "DSE 6.0 required for BatchGraphStatement.") +public class ClassicGraphTraversalBatchIT extends GraphTraversalBatchITBase { + + private static final CustomCcmRule CCM_RULE = GraphTestSupport.GRAPH_CCM_RULE_BUILDER.build(); + + private static final DseSessionRule SESSION_RULE = + GraphTestSupport.getClassicGraphSessionBuilder(CCM_RULE).build(); + + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); + + private final GraphTraversalSource g = EmptyGraph.instance().traversal(); + + @BeforeClass + public static void setupSchema() { + SESSION_RULE + .session() + .execute(ScriptGraphStatement.newInstance(SampleGraphScripts.ALLOW_SCANS)); + SESSION_RULE + .session() + .execute(ScriptGraphStatement.newInstance(SampleGraphScripts.MAKE_NOT_STRICT)); + } + + @Override + protected DseSession session() { + return SESSION_RULE.session(); + } + + @Override + protected boolean isGraphBinary() { + return false; + } + + @Override + protected CustomCcmRule ccmRule() { + return CCM_RULE; + } + + @Override + protected GraphTraversalSource graphTraversalSource() { + return g; + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/ClassicGraphTraversalIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/ClassicGraphTraversalIT.java new file mode 100644 index 00000000000..b69412a6a07 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/ClassicGraphTraversalIT.java @@ -0,0 +1,78 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph.statement; + +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.dse.driver.api.core.graph.GraphTestSupport; +import com.datastax.dse.driver.api.core.graph.SampleGraphScripts; +import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; +import com.datastax.dse.driver.api.core.graph.SocialTraversalSource; +import com.datastax.dse.driver.api.testinfra.session.DseSessionRule; +import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; +import org.apache.tinkerpop.gremlin.structure.util.empty.EmptyGraph; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +@DseRequirement( + min = "5.0.9", + description = "DSE 5.0.9 required for inserting edges and vertices script.") +public class ClassicGraphTraversalIT extends GraphTraversalITBase { + + private static final CustomCcmRule CCM_RULE = GraphTestSupport.GRAPH_CCM_RULE_BUILDER.build(); + + private static final DseSessionRule SESSION_RULE = + GraphTestSupport.getClassicGraphSessionBuilder(CCM_RULE).build(); + + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); + + private final GraphTraversalSource graphTraversalSource = EmptyGraph.instance().traversal(); + private final SocialTraversalSource socialTraversal = + EmptyGraph.instance().traversal(SocialTraversalSource.class); + + @BeforeClass + public static void setupSchema() { + SESSION_RULE + .session() + .execute(ScriptGraphStatement.newInstance(SampleGraphScripts.ALLOW_SCANS)); + SESSION_RULE + .session() + .execute(ScriptGraphStatement.newInstance(SampleGraphScripts.CLASSIC_GRAPH)); + SESSION_RULE + .session() + .execute(ScriptGraphStatement.newInstance(SampleGraphScripts.MAKE_STRICT)); + } + + @Override + protected DseSession session() { + return SESSION_RULE.session(); + } + + @Override + protected boolean isGraphBinary() { + return false; + } + + @Override + protected CustomCcmRule ccmRule() { + return CCM_RULE; + } + + @Override + protected GraphTraversalSource graphTraversalSource() { + return graphTraversalSource; + } + + @Override + protected SocialTraversalSource socialTraversalSource() { + return socialTraversal; + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/CoreGraphTraversalBatchIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/CoreGraphTraversalBatchIT.java new file mode 100644 index 00000000000..11cbd453b9c --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/CoreGraphTraversalBatchIT.java @@ -0,0 +1,60 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph.statement; + +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.dse.driver.api.core.graph.GraphTestSupport; +import com.datastax.dse.driver.api.core.graph.SampleGraphScripts; +import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; +import com.datastax.dse.driver.api.testinfra.session.DseSessionRule; +import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; +import org.apache.tinkerpop.gremlin.structure.util.empty.EmptyGraph; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +@DseRequirement(min = "6.8.0", description = "DSE 6.8.0 required for Core graph support") +public class CoreGraphTraversalBatchIT extends GraphTraversalBatchITBase { + + private static final CustomCcmRule CCM_RULE = GraphTestSupport.GRAPH_CCM_RULE_BUILDER.build(); + + private static final DseSessionRule SESSION_RULE = + GraphTestSupport.getCoreGraphSessionBuilder(CCM_RULE).build(); + + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); + + private final GraphTraversalSource g = EmptyGraph.instance().traversal().with("allow-filtering"); + + @BeforeClass + public static void setupSchema() { + SESSION_RULE.session().execute(ScriptGraphStatement.newInstance(SampleGraphScripts.CORE_GRAPH)); + } + + @Override + protected DseSession session() { + return SESSION_RULE.session(); + } + + @Override + protected boolean isGraphBinary() { + return true; + } + + @Override + protected CustomCcmRule ccmRule() { + return CCM_RULE; + } + + @Override + protected GraphTraversalSource graphTraversalSource() { + return g; + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/CoreGraphTraversalIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/CoreGraphTraversalIT.java new file mode 100644 index 00000000000..f45cb5b06f1 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/CoreGraphTraversalIT.java @@ -0,0 +1,69 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph.statement; + +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.dse.driver.api.core.graph.GraphTestSupport; +import com.datastax.dse.driver.api.core.graph.SampleGraphScripts; +import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; +import com.datastax.dse.driver.api.core.graph.SocialTraversalSource; +import com.datastax.dse.driver.api.testinfra.session.DseSessionRule; +import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; +import org.apache.tinkerpop.gremlin.structure.util.empty.EmptyGraph; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +@DseRequirement(min = "6.8.0", description = "DSE 6.8.0 required for Core graph support") +public class CoreGraphTraversalIT extends GraphTraversalITBase { + + private static final CustomCcmRule CCM_RULE = GraphTestSupport.CCM_BUILDER_WITH_GRAPH.build(); + + private static final DseSessionRule SESSION_RULE = + GraphTestSupport.getCoreGraphSessionBuilder(CCM_RULE).build(); + + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); + + private final GraphTraversalSource graphTraversalSource = + EmptyGraph.instance().traversal().with("allow-filtering"); + private final SocialTraversalSource socialTraversalSource = + EmptyGraph.instance().traversal(SocialTraversalSource.class).with("allow-filtering"); + + @BeforeClass + public static void setupSchema() { + SESSION_RULE.session().execute(ScriptGraphStatement.newInstance(SampleGraphScripts.CORE_GRAPH)); + } + + @Override + protected DseSession session() { + return SESSION_RULE.session(); + } + + @Override + protected boolean isGraphBinary() { + return true; + } + + @Override + protected CustomCcmRule ccmRule() { + return CCM_RULE; + } + + @Override + protected GraphTraversalSource graphTraversalSource() { + return graphTraversalSource; + } + + @Override + protected SocialTraversalSource socialTraversalSource() { + return socialTraversalSource; + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalBatchIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalBatchIT.java deleted file mode 100644 index aee7f5c0d32..00000000000 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalBatchIT.java +++ /dev/null @@ -1,132 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.datastax.dse.driver.api.core.graph.statement; - -import static com.datastax.dse.driver.api.core.graph.DseGraph.g; -import static com.datastax.dse.driver.api.core.graph.TinkerGraphAssertions.assertThat; -import static org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__.addE; -import static org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__.addV; -import static org.assertj.core.api.Assertions.fail; - -import com.datastax.dse.driver.api.core.graph.BatchGraphStatement; -import com.datastax.dse.driver.api.core.graph.FluentGraphStatement; -import com.datastax.dse.driver.api.core.graph.GraphTestSupport; -import com.datastax.dse.driver.api.core.graph.SampleGraphScripts; -import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; -import com.datastax.oss.driver.api.core.CqlSession; -import com.datastax.oss.driver.api.core.servererrors.InvalidQueryException; -import com.datastax.oss.driver.api.testinfra.DseRequirement; -import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; -import com.datastax.oss.driver.api.testinfra.session.SessionRule; -import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; -import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__; -import org.apache.tinkerpop.gremlin.structure.Edge; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.rules.RuleChain; -import org.junit.rules.TestRule; - -@DseRequirement(min = "6.0") -public class GraphTraversalBatchIT { - - private static final CustomCcmRule CCM_RULE = GraphTestSupport.GRAPH_CCM_RULE_BUILDER.build(); - - private static final SessionRule SESSION_RULE = - GraphTestSupport.getClassicGraphSessionBuilder(CCM_RULE).build(); - - @ClassRule - public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); - - @BeforeClass - public static void setupSchema() { - SESSION_RULE - .session() - .execute(ScriptGraphStatement.newInstance(SampleGraphScripts.ALLOW_SCANS)); - SESSION_RULE - .session() - .execute(ScriptGraphStatement.newInstance(SampleGraphScripts.MAKE_NOT_STRICT)); - } - - @Test - public void should_allow_vertex_and_edge_insertions_in_batch() { - BatchGraphStatement batch = - BatchGraphStatement.builder() - .addTraversals( - ImmutableList.of( - addV("person").property("name", "batch1").property("age", 1), - addV("person").property("name", "batch2").property("age", 2))) - .build(); - - BatchGraphStatement batch2 = - BatchGraphStatement.builder() - .addTraversals(batch) - .addTraversal( - addE("knows") - .from(__.V().has("name", "batch1")) - .to(__.V().has("name", "batch2")) - .property("weight", 2.3f)) - .build(); - - assertThat(batch.size()).isEqualTo(2); - assertThat(batch2.size()).isEqualTo(3); - - SESSION_RULE.session().execute(batch2); - - assertThat( - SESSION_RULE - .session() - .execute(FluentGraphStatement.newInstance(g.V().has("name", "batch1"))) - .one() - .asVertex()) - .hasProperty("age", 1); - - assertThat( - SESSION_RULE - .session() - .execute(FluentGraphStatement.newInstance(g.V().has("name", "batch2"))) - .one() - .asVertex()) - .hasProperty("age", 2); - - assertThat( - SESSION_RULE - .session() - .execute(FluentGraphStatement.newInstance(g.V().has("name", "batch1").bothE())) - .one() - .asEdge()) - .hasProperty("weight", 2.3f) - .hasOutVLabel("person") - .hasInVLabel("person"); - } - - @Test - public void should_fail_if_no_bytecode_in_batch() { - BatchGraphStatement batch = - BatchGraphStatement.builder().addTraversals(ImmutableList.of()).build(); - assertThat(batch.size()).isEqualTo(0); - try { - SESSION_RULE.session().execute(batch); - fail( - "Should have thrown InvalidQueryException because batch does not contain any traversals."); - } catch (InvalidQueryException e) { - assertThat(e.getMessage()) - .contains( - "Could not read the traversal from the request sent.", - "The batch statement sent does not contain any traversal."); - } - } -} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalBatchITBase.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalBatchITBase.java new file mode 100644 index 00000000000..ffcae3faf58 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalBatchITBase.java @@ -0,0 +1,150 @@ +/* + * Copyright DataStax, Inc. + * + * This software can be used solely with DataStax Enterprise. Please consult the license at + * http://www.datastax.com/terms/datastax-dse-driver-license-terms + */ +package com.datastax.dse.driver.api.core.graph.statement; + +import static com.datastax.dse.driver.api.core.graph.TinkerGraphAssertions.assertThat; +import static com.datastax.dse.driver.internal.core.graph.GraphTestUtils.assertThatContainsLabel; +import static com.datastax.dse.driver.internal.core.graph.GraphTestUtils.assertThatContainsProperties; +import static org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__.addE; +import static org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__.addV; +import static org.assertj.core.api.Assertions.fail; + +import com.datastax.dse.driver.api.core.DseSession; +import com.datastax.dse.driver.api.core.graph.BatchGraphStatement; +import com.datastax.dse.driver.api.core.graph.FluentGraphStatement; +import com.datastax.oss.driver.api.core.servererrors.InvalidQueryException; +import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; +import java.util.Map; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__; +import org.apache.tinkerpop.gremlin.structure.Direction; +import org.apache.tinkerpop.gremlin.structure.Edge; +import org.junit.Test; + +public abstract class GraphTraversalBatchITBase { + + protected abstract DseSession session(); + + protected abstract boolean isGraphBinary(); + + protected abstract CustomCcmRule ccmRule(); + + protected abstract GraphTraversalSource graphTraversalSource(); + + @Test + public void should_allow_vertex_and_edge_insertions_in_batch() { + BatchGraphStatement batch = + BatchGraphStatement.builder() + .addTraversals( + ImmutableList.of( + addV("person").property("name", "batch1").property("age", 1), + addV("person").property("name", "batch2").property("age", 2))) + .build(); + + BatchGraphStatement batch2 = + BatchGraphStatement.builder() + .addTraversals(batch) + .addTraversal( + addE("knows") + .from(__.V().has("name", "batch1")) + .to(__.V().has("name", "batch2")) + .property("weight", 2.3f)) + .build(); + + assertThat(batch.size()).isEqualTo(2); + assertThat(batch2.size()).isEqualTo(3); + + session().execute(batch2); + + if (isGraphBinary()) { + Map properties = + session() + .execute( + FluentGraphStatement.newInstance( + graphTraversalSource().V().has("name", "batch1").elementMap("age"))) + .one() + .asMap(); + + assertThatContainsProperties(properties, "age", 1); + + properties = + session() + .execute( + FluentGraphStatement.newInstance( + graphTraversalSource().V().has("name", "batch2").elementMap("age"))) + .one() + .asMap(); + + assertThatContainsProperties(properties, "age", 2); + + properties = + session() + .execute( + FluentGraphStatement.newInstance( + graphTraversalSource() + .V() + .has("name", "batch1") + .bothE() + .elementMap("weight", "person"))) + .one() + .asMap(); + + assertThatContainsProperties(properties, "weight", 2.3f); + assertThatContainsLabel(properties, Direction.IN, "person"); + assertThatContainsLabel(properties, Direction.OUT, "person"); + + } else { + + assertThat( + session() + .execute( + FluentGraphStatement.newInstance( + graphTraversalSource().V().has("name", "batch1"))) + .one() + .asVertex()) + .hasProperty("age", 1); + + assertThat( + session() + .execute( + FluentGraphStatement.newInstance( + graphTraversalSource().V().has("name", "batch2"))) + .one() + .asVertex()) + .hasProperty("age", 2); + + assertThat( + session() + .execute( + FluentGraphStatement.newInstance( + graphTraversalSource().V().has("name", "batch1").bothE())) + .one() + .asEdge()) + .hasProperty("weight", 2.3f) + .hasOutVLabel("person") + .hasInVLabel("person"); + } + } + + @Test + public void should_fail_if_no_bytecode_in_batch() { + BatchGraphStatement batch = + BatchGraphStatement.builder().addTraversals(ImmutableList.of()).build(); + assertThat(batch.size()).isEqualTo(0); + try { + session().execute(batch); + fail( + "Should have thrown InvalidQueryException because batch does not contain any traversals."); + } catch (InvalidQueryException e) { + assertThat(e.getMessage()) + .contains( + "Could not read the traversal from the request sent.", + "The batch statement sent does not contain any traversal."); + } + } +} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalITBase.java similarity index 60% rename from integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalIT.java rename to integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalITBase.java index c6cc5a71262..5ed2a31830b 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalITBase.java @@ -15,10 +15,12 @@ */ package com.datastax.dse.driver.api.core.graph.statement; -import static com.datastax.dse.driver.api.core.graph.DseGraph.g; import static com.datastax.dse.driver.api.core.graph.FluentGraphStatement.newInstance; import static com.datastax.dse.driver.api.core.graph.TinkerGraphAssertions.assertThat; import static com.datastax.dse.driver.api.core.graph.TinkerPathAssert.validatePathObjects; +import static com.datastax.dse.driver.internal.core.graph.GraphTestUtils.assertThatContainsLabel; +import static com.datastax.dse.driver.internal.core.graph.GraphTestUtils.assertThatContainsProperties; +import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.assertj.core.api.Assertions.fail; import com.datastax.dse.driver.api.core.graph.AsyncGraphResultSet; @@ -26,15 +28,13 @@ import com.datastax.dse.driver.api.core.graph.GraphNode; import com.datastax.dse.driver.api.core.graph.GraphResultSet; import com.datastax.dse.driver.api.core.graph.GraphStatement; -import com.datastax.dse.driver.api.core.graph.GraphTestSupport; -import com.datastax.dse.driver.api.core.graph.SampleGraphScripts; import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; import com.datastax.dse.driver.api.core.graph.SocialTraversalSource; +import com.datastax.oss.driver.api.core.Version; +import com.datastax.oss.driver.api.core.servererrors.InvalidQueryException; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.type.reflect.GenericType; -import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; -import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import com.datastax.oss.driver.shaded.guava.common.collect.Lists; import java.util.List; @@ -43,42 +43,29 @@ import java.util.concurrent.ExecutionException; import java.util.stream.Collectors; import org.apache.tinkerpop.gremlin.process.traversal.Path; +import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__; import org.apache.tinkerpop.gremlin.process.traversal.step.util.Tree; +import org.apache.tinkerpop.gremlin.structure.Direction; import org.apache.tinkerpop.gremlin.structure.Edge; import org.apache.tinkerpop.gremlin.structure.Graph; import org.apache.tinkerpop.gremlin.structure.Vertex; -import org.apache.tinkerpop.gremlin.structure.util.empty.EmptyGraph; -import org.junit.BeforeClass; -import org.junit.ClassRule; +import org.assertj.core.api.Assertions; +import org.assertj.core.api.Assumptions; import org.junit.Test; -import org.junit.rules.RuleChain; -import org.junit.rules.TestRule; - -@DseRequirement(min = "6.0", description = "DSE 6 required for MODERN_GRAPH script (?)") -public class GraphTraversalIT { - - private static final CustomCcmRule CCM_RULE = GraphTestSupport.GRAPH_CCM_RULE_BUILDER.build(); - - private static final SessionRule SESSION_RULE = - GraphTestSupport.getClassicGraphSessionBuilder(CCM_RULE).build(); - - @ClassRule - public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); - - @BeforeClass - public static void setupSchema() { - SESSION_RULE - .session() - .execute(ScriptGraphStatement.newInstance(SampleGraphScripts.MODERN_GRAPH)); - SESSION_RULE - .session() - .execute(ScriptGraphStatement.newInstance(SampleGraphScripts.MAKE_STRICT)); - SESSION_RULE - .session() - .execute(ScriptGraphStatement.newInstance(SampleGraphScripts.ALLOW_SCANS)); - } + +public abstract class GraphTraversalITBase { + + protected abstract CqlSession session(); + + protected abstract boolean isGraphBinary(); + + protected abstract CustomCcmRule ccmRule(); + + protected abstract GraphTraversalSource graphTraversalSource(); + + protected abstract SocialTraversalSource socialTraversalSource(); /** * Ensures that a previously returned {@link Vertex}'s {@link Vertex#id()} can be used as an input @@ -90,16 +77,23 @@ public static void setupSchema() { */ @Test public void should_use_vertex_id_as_parameter() { - GraphResultSet resultSet = - SESSION_RULE.session().execute(newInstance(g.V().hasLabel("person").has("name", "marko"))); + GraphTraversal query = + graphTraversalSource().V().hasLabel("person").has("name", "marko"); + GraphResultSet resultSet = session().execute(newInstance(query)); List results = resultSet.all(); assertThat(results.size()).isEqualTo(1); Vertex marko = results.get(0).asVertex(); - assertThat(marko).hasProperty("name", "marko"); + if (isGraphBinary()) { + Map properties = + session().execute(newInstance(query.elementMap("name"))).one().asMap(); + assertThatContainsProperties(properties, "name", "marko"); + } else { + assertThat(marko).hasProperty("name", "marko"); + } - resultSet = SESSION_RULE.session().execute(newInstance(g.V(marko.id()))); + resultSet = session().execute(newInstance(graphTraversalSource().V(marko.id()))); results = resultSet.all(); assertThat(results.size()).isEqualTo(1); @@ -118,49 +112,80 @@ public void should_use_vertex_id_as_parameter() { */ @Test public void should_use_edge_id_as_parameter() { - GraphResultSet resultSet = - SESSION_RULE.session().execute(newInstance(g.E().has("weight", 0.2f))); + GraphTraversal query = graphTraversalSource().E().has("weight", 0.2f); + GraphResultSet resultSet = session().execute(newInstance(query)); List results = resultSet.all(); assertThat(results.size()).isEqualTo(1); Edge created = results.get(0).asEdge(); - assertThat(created).hasProperty("weight", 0.2f).hasInVLabel("software").hasOutVLabel("person"); - - resultSet = SESSION_RULE.session().execute(newInstance(g.E(created.id()).inV())); - results = resultSet.all(); - assertThat(results.size()).isEqualTo(1); - Vertex lop = results.get(0).asVertex(); + if (isGraphBinary()) { + Map properties = + session() + .execute(newInstance(query.elementMap("weight", "software", "person"))) + .one() + .asMap(); + + assertThatContainsProperties(properties, "weight", 0.2f); + assertThatContainsLabel(properties, Direction.IN, "software"); + assertThatContainsLabel(properties, Direction.OUT, "person"); + } else { + assertThat(created) + .hasProperty("weight", 0.2f) + .hasInVLabel("software") + .hasOutVLabel("person"); + } - assertThat(lop).hasLabel("software").hasProperty("name", "lop").hasProperty("lang", "java"); + if (isGraphBinary()) { + Map inProperties = + session() + .execute( + newInstance( + graphTraversalSource().E(created.id()).inV().elementMap("name", "lang"))) + .one() + .asMap(); + assertThatContainsProperties(inProperties, "name", "lop", "lang", "java"); + } else { + resultSet = session().execute(newInstance(graphTraversalSource().E(created.id()).inV())); + results = resultSet.all(); + assertThat(results.size()).isEqualTo(1); + Vertex lop = results.get(0).asVertex(); + + assertThat(lop).hasLabel("software").hasProperty("name", "lop").hasProperty("lang", "java"); + } } /** * A sanity check that a returned {@link Vertex}'s id is a {@link Map}. This test could break in * the future if the format of a vertex ID changes from a Map to something else in DSE. * - *

      // TODO: this test will break in NGDG - * * @test_category dse:graph */ @Test public void should_deserialize_vertex_id_as_map() { GraphResultSet resultSet = - SESSION_RULE.session().execute(newInstance(g.V().hasLabel("person").has("name", "marko"))); + session() + .execute( + newInstance(graphTraversalSource().V().hasLabel("person").has("name", "marko"))); List results = resultSet.all(); assertThat(results.size()).isEqualTo(1); Vertex marko = results.get(0).asVertex(); - assertThat(marko).hasProperty("name", "marko"); - @SuppressWarnings("unchecked") - Map id = (Map) marko.id(); - assertThat(id) - .hasSize(3) - .containsEntry("~label", "person") - .containsKey("community_id") - .containsKey("member_id"); + if (isGraphBinary()) { + assertThat(((String) marko.id())).contains("marko"); + assertThat(marko.label()).isEqualTo("person"); + } else { + assertThat(marko).hasProperty("name", "marko"); + @SuppressWarnings("unchecked") + Map id = (Map) marko.id(); + assertThat(id) + .hasSize(3) + .containsEntry("~label", "person") + .containsKey("community_id") + .containsKey("member_id"); + } } /** @@ -179,11 +204,11 @@ public void should_handle_result_object_of_mixed_types() { // find all software vertices and select name, language, and find all vertices that created such // software. GraphResultSet rs = - SESSION_RULE - .session() + session() .execute( newInstance( - g.V() + graphTraversalSource() + .V() .hasLabel("software") .as("a", "b", "c") .select("a", "b", "c") @@ -210,21 +235,33 @@ public void should_handle_result_object_of_mixed_types() { GraphNode c = result.getByKey("c"); assertThat(c.isList()).isTrue(); if (result.getByKey("a").asString().equals("lop")) { - // 'c' should contain marko, josh, peter. - // Ensure we have three vertices. - assertThat(c.size()).isEqualTo(3); - List vertices = - Lists.newArrayList( - c.getByIndex(0).asVertex(), c.getByIndex(1).asVertex(), c.getByIndex(2).asVertex()); - assertThat(vertices) - .extracting(vertex -> vertex.property("name").value()) - .containsOnly("marko", "josh", "peter"); + if (isGraphBinary()) { + // should contain three vertices + Assertions.assertThat(c.size()).isEqualTo(3); + } else { + // 'c' should contain marko, josh, peter. + // Ensure we have three vertices. + assertThat(c.size()).isEqualTo(3); + List vertices = + Lists.newArrayList( + c.getByIndex(0).asVertex(), + c.getByIndex(1).asVertex(), + c.getByIndex(2).asVertex()); + assertThat(vertices) + .extracting(vertex -> vertex.property("name").value()) + .containsOnly("marko", "josh", "peter"); + } } else { - // ripple, 'c' should contain josh. - // Ensure we have 1 vertex. - assertThat(c.size()).isEqualTo(1); - Vertex vertex = c.getByIndex(0).asVertex(); - assertThat(vertex).hasProperty("name", "josh"); + if (isGraphBinary()) { + // has only one label + Assertions.assertThat(c.size()).isEqualTo(1); + } else { + // ripple, 'c' should contain josh. + // Ensure we have 1 vertex. + assertThat(c.size()).isEqualTo(1); + Vertex vertex = c.getByIndex(0).asVertex(); + assertThat(vertex).hasProperty("name", "josh"); + } } } } @@ -236,8 +273,17 @@ public void should_handle_result_object_of_mixed_types() { */ @Test public void should_return_zero_results() { - GraphResultSet rs = SESSION_RULE.session().execute(newInstance(g.V().hasLabel("notALabel"))); - assertThat(rs.all().size()).isZero(); + if (isGraphBinary()) { + assertThatThrownBy( + () -> + session().execute(newInstance(graphTraversalSource().V().hasLabel("notALabel")))) + .isInstanceOf(InvalidQueryException.class) + .hasMessageContaining("Unknown vertex label 'notALabel'"); + } else { + GraphResultSet rs = + session().execute(newInstance(graphTraversalSource().V().hasLabel("notALabel"))); + assertThat(rs.all().size()).isZero(); + } } /** @@ -248,10 +294,12 @@ public void should_return_zero_results() { */ @Test public void should_return_zero_results_graphson_2() { + Assumptions.assumeThat(isGraphBinary()).isFalse(); + GraphStatement simpleGraphStatement = ScriptGraphStatement.newInstance("g.V().hasLabel('notALabel')"); - GraphResultSet rs = SESSION_RULE.session().execute(simpleGraphStatement); + GraphResultSet rs = session().execute(simpleGraphStatement); assertThat(rs.one()).isNull(); } @@ -272,11 +320,11 @@ public void should_return_zero_results_graphson_2() { public void should_handle_lambdas() { // Find all people marko knows and the software they created. GraphResultSet result = - SESSION_RULE - .session() + session() .execute( newInstance( - g.V() + graphTraversalSource() + .V() .hasLabel("person") .filter(__.has("name", "marko")) .out("knows") @@ -298,11 +346,11 @@ public void should_handle_lambdas() { @Test public void should_resolve_path_with_some_labels() { GraphResultSet rs = - SESSION_RULE - .session() + session() .execute( newInstance( - g.V() + graphTraversalSource() + .V() .hasLabel("person") .has("name", "marko") .as("a") @@ -338,11 +386,11 @@ public void should_resolve_path_with_some_labels() { @Test public void should_resolve_path_with_labels() { GraphResultSet rs = - SESSION_RULE - .session() + session() .execute( newInstance( - g.V() + graphTraversalSource() + .V() .hasLabel("person") .has("name", "marko") .as("a") @@ -379,11 +427,11 @@ public void should_resolve_path_with_labels() { @Test public void should_resolve_path_without_labels() { GraphResultSet rs = - SESSION_RULE - .session() + session() .execute( newInstance( - g.V() + graphTraversalSource() + .V() .hasLabel("person") .has("name", "marko") .outE("knows") @@ -414,11 +462,16 @@ public void should_parse_tree() { // Get a tree structure showing the paths from mark to people he knows to software they've // created. GraphResultSet rs = - SESSION_RULE - .session() + session() .execute( newInstance( - g.V().hasLabel("person").out("knows").out("created").tree().by("name"))); + graphTraversalSource() + .V() + .hasLabel("person") + .out("knows") + .out("created") + .tree() + .by("name"))); List results = rs.all(); assertThat(results.size()).isEqualTo(1); @@ -441,11 +494,17 @@ public void should_parse_tree() { * the edges that connect them. */ @Test - public void should_handle_subgraph() { + public void should_handle_subgraph_graphson() { + Assumptions.assumeThat(isGraphBinary()).isFalse(); GraphResultSet rs = - SESSION_RULE - .session() - .execute(newInstance(g.E().hasLabel("knows").subgraph("subGraph").cap("subGraph"))); + session() + .execute( + newInstance( + graphTraversalSource() + .E() + .hasLabel("knows") + .subgraph("subGraph") + .cap("subGraph"))); List results = rs.all(); assertThat(results.size()).isEqualTo(1); @@ -456,6 +515,33 @@ public void should_handle_subgraph() { assertThat(graph.vertices()).toIterable().hasSize(3); } + /** + * Ensures that a traversal that returns a sub graph can be retrieved. + * + *

      The subgraph is all members in a knows relationship, thus is all people who marko knows and + * the edges that connect them. + */ + @Test + public void should_handle_subgraph_grap_binary() { + Assumptions.assumeThat(isGraphBinary()).isTrue(); + GraphResultSet rs = + session() + .execute( + newInstance( + graphTraversalSource() + .E() + .hasLabel("knows") + .subgraph("subGraph") + .cap("subGraph"))); + + List results = rs.all(); + assertThat(results.size()).isEqualTo(1); + + String graph = results.get(0).as(String.class); + + assertThat(graph).contains("vertices:3").contains("edges:2"); + } + /** * A simple smoke test to ensure that a user can supply a custom {@link GraphTraversalSource} for * use with DSLs. @@ -463,12 +549,13 @@ public void should_handle_subgraph() { * @test_category dse:graph */ @Test - public void should_allow_use_of_dsl() throws Exception { - SocialTraversalSource gSocial = EmptyGraph.instance().traversal(SocialTraversalSource.class); + public void should_allow_use_of_dsl_graphson() throws Exception { + Assumptions.assumeThat(isGraphBinary()).isFalse(); + SocialTraversalSource gSocial = socialTraversalSource(); GraphStatement gs = newInstance(gSocial.persons("marko").knows("vadas")); - GraphResultSet rs = SESSION_RULE.session().execute(gs); + GraphResultSet rs = session().execute(gs); List results = rs.all(); assertThat(results.size()).isEqualTo(1); @@ -478,6 +565,28 @@ public void should_allow_use_of_dsl() throws Exception { .hasLabel("person"); } + /** + * A simple smoke test to ensure that a user can supply a custom {@link GraphTraversalSource} for + * use with DSLs. + * + * @test_category dse:graph + */ + @Test + public void should_allow_use_of_dsl_graph_binary() throws Exception { + Assumptions.assumeThat(isGraphBinary()).isTrue(); + SocialTraversalSource gSocial = socialTraversalSource(); + + GraphStatement gs = + newInstance(gSocial.persons("marko").knows("vadas").elementMap("name", "age")); + + GraphResultSet rs = session().execute(gs); + List results = rs.all(); + + assertThat(results.size()).isEqualTo(1); + assertThatContainsProperties(results.get(0).asMap(), "name", "marko", "age", 29); + Assertions.assertThat(results.get(0).asMap().values()).contains("person"); + } + /** * Ensures that traversals with barriers (which return results bulked) contain the correct amount * of end results. @@ -486,7 +595,12 @@ public void should_allow_use_of_dsl() throws Exception { */ @Test public void should_return_correct_results_when_bulked() { - GraphResultSet rs = SESSION_RULE.session().execute(newInstance(g.E().label().barrier())); + Assumptions.assumeThat( + ccmRule().getCcmBridge().getDseVersion().get().compareTo(Version.parse("5.1.2")) > 0) + .isTrue(); + + GraphResultSet rs = + session().execute(newInstance(graphTraversalSource().E().label().barrier())); List results = rs.all().stream().map(GraphNode::asString).sorted().collect(Collectors.toList()); @@ -498,13 +612,14 @@ public void should_return_correct_results_when_bulked() { } @Test - public void should_handle_asynchronous_execution() { + public void should_handle_asynchronous_execution_graphson() { + Assumptions.assumeThat(isGraphBinary()).isFalse(); StringBuilder names = new StringBuilder(); CompletionStage future = - SESSION_RULE - .session() - .executeAsync(FluentGraphStatement.newInstance(g.V().hasLabel("person"))); + session() + .executeAsync( + FluentGraphStatement.newInstance(graphTraversalSource().V().hasLabel("person"))); try { // dumb processing to make sure the completable future works correctly and correct results are @@ -520,4 +635,29 @@ public void should_handle_asynchronous_execution() { assertThat(names.toString()).contains("peter", "marko", "vadas", "josh"); } + + @Test + public void should_handle_asynchronous_execution_graph_binary() { + Assumptions.assumeThat(isGraphBinary()).isTrue(); + StringBuilder names = new StringBuilder(); + + CompletionStage future = + session() + .executeAsync( + FluentGraphStatement.newInstance(graphTraversalSource().V().hasLabel("person"))); + + try { + // dumb processing to make sure the completable future works correctly and correct results are + // returned + Iterable results = + future.thenApply(AsyncGraphResultSet::currentPage).toCompletableFuture().get(); + for (GraphNode gn : results) { + names.append(gn.asVertex().id()); + } + } catch (InterruptedException | ExecutionException e) { + fail("Shouldn't have thrown an exception waiting for the result to complete"); + } + + assertThat(names.toString()).contains("peter", "marko", "vadas", "josh"); + } } diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalMultiPropertiesIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalMultiPropertiesIT.java index 947eb59b04d..78bd336dc0a 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalMultiPropertiesIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalMultiPropertiesIT.java @@ -36,6 +36,7 @@ import org.junit.rules.RuleChain; import org.junit.rules.TestRule; +// INFO: multi props are not supported in Core @DseRequirement(min = "5.0.3", description = "DSE 5.0.3 required for remote TinkerPop support") public class GraphTraversalMultiPropertiesIT { From 82bad083d1fa57ff7d14c6ec136ea8ed9e4a6c6b Mon Sep 17 00:00:00 2001 From: Tomasz Lelek Date: Fri, 6 Dec 2019 12:45:51 +0100 Subject: [PATCH 340/979] JAVA-2568: Remove statement preparation logic from Graph request handlers (#318) --- .../ContinuousRequestHandlerBase.java | 1 + .../core/graph/GraphRequestHandler.java | 82 ------------------- 2 files changed, 1 insertion(+), 82 deletions(-) diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousRequestHandlerBase.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousRequestHandlerBase.java index ea1df979c3f..ac6002db961 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousRequestHandlerBase.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousRequestHandlerBase.java @@ -717,6 +717,7 @@ private void processResultResponse(@NonNull Result result, @Nullable Frame frame * @param errorMessage the error message received. */ private void processErrorResponse(@NonNull Error errorMessage) { + // graph does not use prepared statements if (errorMessage instanceof Unprepared) { processUnprepared((Unprepared) errorMessage); } else { diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java index cf7d6d37a21..8a5820aa9af 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java @@ -45,8 +45,6 @@ import com.datastax.oss.driver.api.core.session.throttling.Throttled; import com.datastax.oss.driver.api.core.specex.SpeculativeExecutionPolicy; import com.datastax.oss.driver.api.core.tracker.RequestTracker; -import com.datastax.oss.driver.internal.core.adminrequest.ThrottledAdminRequestHandler; -import com.datastax.oss.driver.internal.core.adminrequest.UnexpectedResponseException; import com.datastax.oss.driver.internal.core.channel.DriverChannel; import com.datastax.oss.driver.internal.core.channel.ResponseCallback; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; @@ -56,21 +54,16 @@ import com.datastax.oss.driver.internal.core.metrics.NodeMetricUpdater; import com.datastax.oss.driver.internal.core.metrics.SessionMetricUpdater; import com.datastax.oss.driver.internal.core.session.DefaultSession; -import com.datastax.oss.driver.internal.core.session.RepreparePayload; import com.datastax.oss.driver.internal.core.tracker.NoopRequestTracker; import com.datastax.oss.driver.internal.core.tracker.RequestLogger; import com.datastax.oss.driver.internal.core.util.Loggers; import com.datastax.oss.driver.internal.core.util.collection.QueryPlan; import com.datastax.oss.protocol.internal.Frame; import com.datastax.oss.protocol.internal.Message; -import com.datastax.oss.protocol.internal.ProtocolConstants; -import com.datastax.oss.protocol.internal.request.Prepare; import com.datastax.oss.protocol.internal.response.Error; import com.datastax.oss.protocol.internal.response.Result; -import com.datastax.oss.protocol.internal.response.error.Unprepared; import com.datastax.oss.protocol.internal.response.result.Rows; import com.datastax.oss.protocol.internal.response.result.Void; -import com.datastax.oss.protocol.internal.util.Bytes; import edu.umd.cs.findbugs.annotations.NonNull; import io.netty.handler.codec.EncoderException; import io.netty.util.Timeout; @@ -637,81 +630,6 @@ public void onResponse(Frame responseFrame) { } private void processErrorResponse(Error errorMessage) { - if (errorMessage.code == ProtocolConstants.ErrorCode.UNPREPARED) { - ByteBuffer idToReprepare = ByteBuffer.wrap(((Unprepared) errorMessage).id); - LOG.trace( - "[{}] Statement {} is not prepared on {}, repreparing", - logPrefix, - Bytes.toHexString(idToReprepare), - node); - RepreparePayload repreparePayload = session.getRepreparePayloads().get(idToReprepare); - if (repreparePayload == null) { - throw new IllegalStateException( - String.format( - "Tried to execute unprepared query %s but we don't have the data to reprepare it", - Bytes.toHexString(idToReprepare))); - } - Prepare reprepareMessage = repreparePayload.toMessage(); - ThrottledAdminRequestHandler reprepareHandler = - ThrottledAdminRequestHandler.prepare( - channel, - reprepareMessage, - repreparePayload.customPayload, - timeout, - throttler, - sessionMetricUpdater, - logPrefix); - reprepareHandler - .start() - .handle( - (repreparedId, exception) -> { - if (exception != null) { - // If the error is not recoverable, surface it to the client instead of retrying - if (exception instanceof UnexpectedResponseException) { - Message prepareErrorMessage = - ((UnexpectedResponseException) exception).message; - if (prepareErrorMessage instanceof Error) { - CoordinatorException prepareError = - Conversions.toThrowable(node, (Error) prepareErrorMessage, context); - if (prepareError instanceof QueryValidationException - || prepareError instanceof FunctionFailureException - || prepareError instanceof ProtocolError) { - LOG.trace("[{}] Unrecoverable error on reprepare, rethrowing", logPrefix); - trackNodeError(node, prepareError, NANOTIME_NOT_MEASURED_YET); - setFinalError(prepareError, node, execution); - return null; - } - } - } else if (exception instanceof RequestThrottlingException) { - trackNodeError(node, exception, NANOTIME_NOT_MEASURED_YET); - setFinalError(exception, node, execution); - return null; - } - recordError(node, exception); - trackNodeError(node, exception, NANOTIME_NOT_MEASURED_YET); - LOG.trace("[{}] Reprepare failed, trying next node", logPrefix); - sendRequest(null, queryPlan, execution, retryCount, false); - } else { - if (!repreparedId.equals(idToReprepare)) { - IllegalStateException illegalStateException = - new IllegalStateException( - String.format( - "ID mismatch while trying to reprepare (expected %s, got %s). " - + "This prepared statement won't work anymore. " - + "This usually happens when you run a 'USE...' query after " - + "the statement was prepared.", - Bytes.toHexString(idToReprepare), - Bytes.toHexString(repreparedId))); - trackNodeError(node, illegalStateException, NANOTIME_NOT_MEASURED_YET); - setFinalError(illegalStateException, node, execution); - } - LOG.trace("[{}] Reprepare sucessful, retrying", logPrefix); - sendRequest(node, queryPlan, execution, retryCount, false); - } - return null; - }); - return; - } CoordinatorException error = Conversions.toThrowable(node, errorMessage, context); NodeMetricUpdater metricUpdater = ((DefaultNode) node).getMetricUpdater(); if (error instanceof BootstrappingException) { From c2f13dbe649b0695b3aa998692367d55a7959e11 Mon Sep 17 00:00:00 2001 From: Greg Bestland Date: Fri, 6 Dec 2019 10:04:33 -0600 Subject: [PATCH 341/979] JAVA-2479: Fix deprecation warnings after upgrade of TinkerPop (#312) * Fix deprecation warnings after upgrade of TinkerPop --- .../dse/driver/internal/core/graph/DseGraphTraversal.java | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DseGraphTraversal.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DseGraphTraversal.java index 45f9b670b16..374a0277ed2 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DseGraphTraversal.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/DseGraphTraversal.java @@ -22,7 +22,6 @@ import net.jcip.annotations.NotThreadSafe; import org.apache.tinkerpop.gremlin.process.remote.traversal.AbstractRemoteTraversal; import org.apache.tinkerpop.gremlin.process.remote.traversal.DefaultRemoteTraverser; -import org.apache.tinkerpop.gremlin.process.remote.traversal.RemoteTraversalSideEffects; import org.apache.tinkerpop.gremlin.process.traversal.Traverser; @NotThreadSafe @@ -35,7 +34,11 @@ public DseGraphTraversal(AsyncGraphResultSet firstPage) { } @Override - public RemoteTraversalSideEffects getSideEffects() { + @SuppressWarnings("deprecation") + public org.apache.tinkerpop.gremlin.process.remote.traversal.RemoteTraversalSideEffects + getSideEffects() { + // This was deprecated as part of TINKERPOP-2265 + // and is no longer being promoted as a feature. // return null but do not throw "NotSupportedException" return null; } From 6bf79782aec76673dba6df5ac827de2f861a528b Mon Sep 17 00:00:00 2001 From: Tomasz Lelek Date: Mon, 9 Dec 2019 09:00:35 +0100 Subject: [PATCH 342/979] JAVA-2571: Revisit usages of DseGraph.g (#319) --- changelog/README.md | 1 + .../dse/driver/api/core/graph/DseGraph.java | 27 +++++++++++++++---- .../DseGraphRemoteConnectionBuilder.java | 2 +- .../api/core/graph/CqlCollectionIT.java | 4 ++- .../remote/ClassicGraphDataTypeRemoteIT.java | 4 ++- .../remote/CoreGraphDataTypeRemoteIT.java | 4 ++- .../GraphTraversalMetaPropertiesRemoteIT.java | 4 ++- ...GraphTraversalMultiPropertiesRemoteIT.java | 4 ++- .../core/dse/graph/fluent/implicit/README.md | 4 +-- 9 files changed, 41 insertions(+), 13 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index da114417983..d33772d5037 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### NGDG (in progress) +- [improvement] JAVA-2571: Revisit usages of DseGraph.g - [improvement] JAVA-2558: Revisit GraphRequestHandler - [bug] JAVA-2508: Preserve backward compatibility in schema metadata types - [bug] JAVA-2465: Avoid requesting 0 page when executing continuous paging queries diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/graph/DseGraph.java b/core/src/main/java/com/datastax/dse/driver/api/core/graph/DseGraph.java index 44d04e6b05f..e9a46c8f5c9 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/graph/DseGraph.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/graph/DseGraph.java @@ -17,6 +17,7 @@ import com.datastax.dse.driver.internal.core.graph.DefaultDseRemoteConnectionBuilder; import com.datastax.oss.driver.api.core.CqlSession; +import org.apache.tinkerpop.gremlin.process.traversal.AnonymousTraversalSource; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; import org.apache.tinkerpop.gremlin.structure.util.empty.EmptyGraph; @@ -27,15 +28,31 @@ public class DseGraph { /** + * IMPORTANT: As of Tinkerpop 3.3.5, you should no longer use this shortcut if you intend + * to connect the traversal to DSE Graph using a {@linkplain + * org.apache.tinkerpop.gremlin.process.remote.RemoteConnection remote connection}, for example + * via the {@link #remoteConnectionBuilder} method declared below. Instead of: + * + *

      {@code
      +   * DseSession session = ...;
      +   * RemoteConnection remoteConnection = DseGraph.remoteConnectionBuilder(session).build();
      +   * GraphTraversalSource g = DseGraph.g.withRemote(remoteConnection);
      +   * }
      + * + * You should now use {@link AnonymousTraversalSource#traversal()}, and adopt the following idiom: + * + *
      {@code
      +   * DseSession session = ...;
      +   * RemoteConnection remoteConnection = DseGraph.remoteConnectionBuilder(session).build();
      +   * GraphTraversalSource g = AnonymousTraversalSource.traversal().withRemote(remoteConnection);
      +   * }
      + * * A general-purpose shortcut for a non-connected TinkerPop {@link GraphTraversalSource} * based on an immutable empty graph. This is really just a shortcut to {@code * EmptyGraph.instance().traversal();}. * - *

      Can be used to create {@link FluentGraphStatement} instances (recommended), or can be - * configured to be remotely connected to DSE Graph using the {@link #remoteConnectionBuilder} - * method. - * - *

      For ease of use you may statically import this variable. + *

      It can be used to create {@link FluentGraphStatement} instances (recommended); for ease of + * use you may statically import this variable. * *

      Calling {@code g.getGraph()} will return a local immutable empty graph which is in no way * connected to the DSE Graph server, it will not allow to modify a DSE Graph directly. To act on diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/graph/DseGraphRemoteConnectionBuilder.java b/core/src/main/java/com/datastax/dse/driver/api/core/graph/DseGraphRemoteConnectionBuilder.java index fe2dca85819..9f982ea3a11 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/graph/DseGraphRemoteConnectionBuilder.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/graph/DseGraphRemoteConnectionBuilder.java @@ -28,7 +28,7 @@ * *

      {@code
        * DseSession dseSession = DseSession.builder().build();
      - * GraphTraversalSource g = DseGraph.g.withRemote(DseGraph.remoteConnectionBuilder(dseSession).build());
      + * GraphTraversalSource g = AnonymousTraversalSource.traversal().withRemote(DseGraph.remoteConnectionBuilder(dseSession).build());
        * List vertices = g.V().hasLabel("person").toList();
        * }
      * diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CqlCollectionIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CqlCollectionIT.java index 1c713b5c6fe..e026e178a24 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CqlCollectionIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CqlCollectionIT.java @@ -24,6 +24,7 @@ import java.util.List; import java.util.Map; import java.util.Set; +import org.apache.tinkerpop.gremlin.process.traversal.AnonymousTraversalSource; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; import org.junit.BeforeClass; import org.junit.ClassRule; @@ -47,7 +48,8 @@ public class CqlCollectionIT { @ClassRule public static TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); private final GraphTraversalSource g = - DseGraph.g.withRemote(DseGraph.remoteConnectionBuilder(SESSION_RULE.session()).build()); + AnonymousTraversalSource.traversal() + .withRemote(DseGraph.remoteConnectionBuilder(SESSION_RULE.session()).build()); @BeforeClass public static void setup() { diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/ClassicGraphDataTypeRemoteIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/ClassicGraphDataTypeRemoteIT.java index b525e725d4e..bcd6b40d21c 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/ClassicGraphDataTypeRemoteIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/ClassicGraphDataTypeRemoteIT.java @@ -24,6 +24,7 @@ import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import org.apache.tinkerpop.gremlin.process.traversal.AnonymousTraversalSource; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; import org.apache.tinkerpop.gremlin.structure.Vertex; import org.junit.BeforeClass; @@ -58,7 +59,8 @@ public CqlSession session() { } private final GraphTraversalSource g = - DseGraph.g.withRemote(DseGraph.remoteConnectionBuilder(SESSION_RULE.session()).build()); + AnonymousTraversalSource.traversal() + .withRemote(DseGraph.remoteConnectionBuilder(SESSION_RULE.session()).build()); @Override public Vertex insertVertexAndReturn(String vertexLabel, String propertyName, Object value) { diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/CoreGraphDataTypeRemoteIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/CoreGraphDataTypeRemoteIT.java index 7aa6a36df05..355ea35af07 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/CoreGraphDataTypeRemoteIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/CoreGraphDataTypeRemoteIT.java @@ -17,6 +17,7 @@ import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; import com.tngtech.java.junit.dataprovider.DataProviderRunner; import java.util.Map; +import org.apache.tinkerpop.gremlin.process.traversal.AnonymousTraversalSource; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; import org.apache.tinkerpop.gremlin.structure.Vertex; @@ -48,7 +49,8 @@ protected String graphName() { } private final GraphTraversalSource g = - DseGraph.g.withRemote(DseGraph.remoteConnectionBuilder(session()).build()); + AnonymousTraversalSource.traversal() + .withRemote(DseGraph.remoteConnectionBuilder(session()).build()); @Override public Map insertVertexThenReadProperties( diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalMetaPropertiesRemoteIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalMetaPropertiesRemoteIT.java index e5526341e69..a40b7c6d397 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalMetaPropertiesRemoteIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalMetaPropertiesRemoteIT.java @@ -26,6 +26,7 @@ import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import org.apache.tinkerpop.gremlin.process.traversal.AnonymousTraversalSource; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; import org.apache.tinkerpop.gremlin.structure.Vertex; import org.apache.tinkerpop.gremlin.structure.VertexProperty; @@ -48,7 +49,8 @@ public class GraphTraversalMetaPropertiesRemoteIT { public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); private final GraphTraversalSource g = - DseGraph.g.withRemote(DseGraph.remoteConnectionBuilder(SESSION_RULE.session()).build()); + AnonymousTraversalSource.traversal() + .withRemote(DseGraph.remoteConnectionBuilder(SESSION_RULE.session()).build()); /** Builds a simple schema that provides for a vertex with a property with sub properties. */ public static final String META_PROPS = diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalMultiPropertiesRemoteIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalMultiPropertiesRemoteIT.java index 1cc614dec5e..6dcd6bda336 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalMultiPropertiesRemoteIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalMultiPropertiesRemoteIT.java @@ -27,6 +27,7 @@ import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import java.util.Iterator; +import org.apache.tinkerpop.gremlin.process.traversal.AnonymousTraversalSource; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; import org.apache.tinkerpop.gremlin.structure.Vertex; import org.apache.tinkerpop.gremlin.structure.VertexProperty; @@ -48,7 +49,8 @@ public class GraphTraversalMultiPropertiesRemoteIT { public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); private final GraphTraversalSource g = - DseGraph.g.withRemote(DseGraph.remoteConnectionBuilder(SESSION_RULE.session()).build()); + AnonymousTraversalSource.traversal() + .withRemote(DseGraph.remoteConnectionBuilder(SESSION_RULE.session()).build()); /** Builds a simple schema that provides for a vertex with a multi-cardinality property. */ public static final String MULTI_PROPS = diff --git a/manual/core/dse/graph/fluent/implicit/README.md b/manual/core/dse/graph/fluent/implicit/README.md index a282b79d983..797189a9ae1 100644 --- a/manual/core/dse/graph/fluent/implicit/README.md +++ b/manual/core/dse/graph/fluent/implicit/README.md @@ -7,7 +7,7 @@ the DSE cluster: CqlSession session = CqlSession.builder().build(); GraphTraversalSource g = - DseGraph.g.withRemote(DseGraph.remoteConnectionBuilder(session).build()); + AnonymousTraversalSource.traversal().withRemote(DseGraph.remoteConnectionBuilder(session).build()); ``` Then build traversals from that source. Whenever you reach a [terminal step] \(such as `next()`, @@ -39,7 +39,7 @@ datastax-java-driver { Pass the profile name to the remote connection builder: ```java -GraphTraversalSource a = DseGraph.g.withRemote( +GraphTraversalSource a = AnonymousTraversalSource.traversal().withRemote( DseGraph.remoteConnectionBuilder(session) .withExecutionProfileName("graph-oltp") .build()); From 626594bfc669872a7cadce133dbb3762e4ba355d Mon Sep 17 00:00:00 2001 From: Tomasz Lelek Date: Tue, 10 Dec 2019 14:04:27 +0100 Subject: [PATCH 343/979] JAVA-2570 do not use global Mock fields. Use inline mock, mocking only fields that are needed by the particular test (#321) --- .../core/graph/GraphSupportCheckerTest.java | 16 ++++++---------- 1 file changed, 6 insertions(+), 10 deletions(-) diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphSupportCheckerTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphSupportCheckerTest.java index 5654a0d688e..ff446076186 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphSupportCheckerTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphSupportCheckerTest.java @@ -42,22 +42,12 @@ import java.util.List; import java.util.Map; import java.util.UUID; -import org.junit.Rule; import org.junit.Test; import org.junit.runner.RunWith; -import org.mockito.Mock; -import org.mockito.junit.MockitoJUnit; -import org.mockito.junit.MockitoRule; @RunWith(DataProviderRunner.class) public class GraphSupportCheckerTest { - @Mock DriverExecutionProfile executionProfile; - - @Mock GraphStatement graphStatement; - - @Rule public MockitoRule mockitoRule = MockitoJUnit.rule(); - @UseDataProvider("graphPagingEnabledAndDseVersions") @Test public void should_check_if_paging_is_supported( @@ -239,6 +229,8 @@ public static Object[][] dseVersionsAndGraphProtocols() { @Test @UseDataProvider(location = DseTestDataProviders.class, value = "supportedGraphProtocols") public void should_pickup_graph_protocol_from_statement(GraphProtocol graphProtocol) { + GraphStatement graphStatement = mock(GraphStatement.class); + DriverExecutionProfile executionProfile = mock(DriverExecutionProfile.class); when(graphStatement.getSubProtocol()).thenReturn(graphProtocol.toInternalCode()); GraphProtocol inferredProtocol = @@ -254,6 +246,8 @@ public void should_pickup_graph_protocol_from_statement(GraphProtocol graphProto @UseDataProvider("graphProtocolStringsAndDseVersions") public void should_pickup_graph_protocol_and_parse_from_string_config( String stringConfig, Version dseVersion) { + GraphStatement graphStatement = mock(GraphStatement.class); + DriverExecutionProfile executionProfile = mock(DriverExecutionProfile.class); when(executionProfile.isDefined(DseDriverOption.GRAPH_SUB_PROTOCOL)).thenReturn(Boolean.TRUE); when(executionProfile.getString(eq(DseDriverOption.GRAPH_SUB_PROTOCOL))) .thenReturn(stringConfig); @@ -282,6 +276,8 @@ public static Object[][] graphProtocolStringsAndDseVersions() { @Test @UseDataProvider("dseVersions6") public void should_use_correct_default_protocol_when_parsing(Version dseVersion) { + GraphStatement graphStatement = mock(GraphStatement.class); + DriverExecutionProfile executionProfile = mock(DriverExecutionProfile.class); DseDriverContext context = mockNodesInMetadataWithVersions(mock(DseDriverContext.class), true, dseVersion); GraphProtocol inferredProtocol = From d3be1e2e5ca8d6610afacca14c738646db0e5d86 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 13 Dec 2019 17:17:47 -0300 Subject: [PATCH 344/979] Upgrade to driver 4.4.0-SNAPSHOT and fix deprecation warnings --- ...aphRequestHandlerSpeculativeExecutionTest.java | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerSpeculativeExecutionTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerSpeculativeExecutionTest.java index e4a194b1fda..1b2f44169db 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerSpeculativeExecutionTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerSpeculativeExecutionTest.java @@ -40,6 +40,7 @@ import com.tngtech.java.junit.dataprovider.DataProviderRunner; import com.tngtech.java.junit.dataprovider.UseDataProvider; import java.time.Duration; +import java.util.List; import java.util.Map; import java.util.concurrent.CompletionStage; import java.util.concurrent.TimeUnit; @@ -335,10 +336,11 @@ public void should_fail_if_no_more_nodes_and_initial_execution_is_last( .isFailed( error -> { assertThat(error).isInstanceOf(AllNodesFailedException.class); - Map nodeErrors = ((AllNodesFailedException) error).getErrors(); + Map> nodeErrors = + ((AllNodesFailedException) error).getAllErrors(); assertThat(nodeErrors).containsOnlyKeys(node1, node2); - assertThat(nodeErrors.get(node1)).isInstanceOf(BootstrappingException.class); - assertThat(nodeErrors.get(node2)).isInstanceOf(BootstrappingException.class); + assertThat(nodeErrors.get(node1).get(0)).isInstanceOf(BootstrappingException.class); + assertThat(nodeErrors.get(node2).get(0)).isInstanceOf(BootstrappingException.class); }); } } @@ -396,10 +398,11 @@ public void should_fail_if_no_more_nodes_and_speculative_execution_is_last( .isFailed( error -> { assertThat(error).isInstanceOf(AllNodesFailedException.class); - Map nodeErrors = ((AllNodesFailedException) error).getErrors(); + Map> nodeErrors = + ((AllNodesFailedException) error).getAllErrors(); assertThat(nodeErrors).containsOnlyKeys(node1, node2); - assertThat(nodeErrors.get(node1)).isInstanceOf(BootstrappingException.class); - assertThat(nodeErrors.get(node2)).isInstanceOf(BootstrappingException.class); + assertThat(nodeErrors.get(node1).get(0)).isInstanceOf(BootstrappingException.class); + assertThat(nodeErrors.get(node2).get(0)).isInstanceOf(BootstrappingException.class); }); } } From cc8b9ed0f9c99d9163dccc814e9d27231489bd07 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 17 Dec 2019 09:04:26 -0300 Subject: [PATCH 345/979] JAVA-2556: Make ExecutionInfo compatible with any Request type (#315) --- changelog/README.md | 1 + .../ContinuousCqlRequestHandler.java | 30 +------------ .../ContinuousRequestHandlerBase.java | 43 +++++++++++-------- .../graph/ContinuousAsyncGraphResultSet.java | 27 +++++++++--- .../graph/ContinuousGraphRequestHandler.java | 29 ++----------- .../core/graph/GraphRequestHandler.java | 37 ++++++++-------- .../core/graph/MultiPageGraphResultSet.java | 27 +++++++++--- .../ContinuousCqlRequestHandlerTest.java | 2 +- ...inuousCqlRequestReactiveProcessorTest.java | 2 +- .../ContinuousGraphRequestHandlerTest.java | 6 +-- .../core/graph/GraphResultSetTestBase.java | 6 +-- .../core/graph/GraphResultSetsTest.java | 22 +++++----- .../driver/api/core/graph/GraphPagingIT.java | 31 +++++++------ .../graph/GraphSpeculativeExecutionIT.java | 5 ++- 14 files changed, 130 insertions(+), 138 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index d33772d5037..fc18c885a84 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### NGDG (in progress) +- [improvement] JAVA-2556: Make ExecutionInfo compatible with any Request type - [improvement] JAVA-2571: Revisit usages of DseGraph.g - [improvement] JAVA-2558: Revisit GraphRequestHandler - [bug] JAVA-2508: Preserve backward compatibility in schema metadata types diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandler.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandler.java index 776c4a6c476..b77303641ad 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandler.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandler.java @@ -23,18 +23,13 @@ import com.datastax.oss.driver.api.core.cql.ExecutionInfo; import com.datastax.oss.driver.api.core.cql.Row; import com.datastax.oss.driver.api.core.cql.Statement; -import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; -import com.datastax.oss.driver.internal.core.cql.DefaultExecutionInfo; import com.datastax.oss.driver.internal.core.cql.DefaultRow; import com.datastax.oss.driver.internal.core.session.DefaultSession; import com.datastax.oss.driver.internal.core.util.CountingIterator; -import com.datastax.oss.protocol.internal.Frame; import com.datastax.oss.protocol.internal.Message; -import com.datastax.oss.protocol.internal.response.Result; import com.datastax.oss.protocol.internal.response.result.Rows; import edu.umd.cs.findbugs.annotations.NonNull; -import edu.umd.cs.findbugs.annotations.Nullable; import java.nio.ByteBuffer; import java.time.Duration; import java.util.List; @@ -47,7 +42,7 @@ */ @ThreadSafe public class ContinuousCqlRequestHandler - extends ContinuousRequestHandlerBase { + extends ContinuousRequestHandlerBase { private final Message message; private final Duration firstPageTimeout; @@ -122,29 +117,6 @@ protected ContinuousAsyncResultSet createEmptyResultSet(@NonNull ExecutionInfo e return DefaultContinuousAsyncResultSet.empty(executionInfo); } - @NonNull - @Override - protected DefaultExecutionInfo createExecutionInfo( - @NonNull Node node, - @Nullable Result result, - @Nullable Frame response, - int successfulExecutionIndex) { - ByteBuffer pagingState = - result instanceof Rows ? ((Rows) result).getMetadata().pagingState : null; - return new DefaultExecutionInfo( - statement, - node, - startedSpeculativeExecutionsCount.get(), - successfulExecutionIndex, - errors, - pagingState, - response, - true, - session, - context, - executionProfile); - } - @NonNull @Override protected DefaultContinuousAsyncResultSet createResultSet( diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousRequestHandlerBase.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousRequestHandlerBase.java index ac6002db961..779f3dd6dfd 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousRequestHandlerBase.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousRequestHandlerBase.java @@ -48,6 +48,7 @@ import com.datastax.oss.driver.internal.core.channel.ResponseCallback; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.internal.core.cql.Conversions; +import com.datastax.oss.driver.internal.core.cql.DefaultExecutionInfo; import com.datastax.oss.driver.internal.core.metadata.DefaultNode; import com.datastax.oss.driver.internal.core.metrics.NodeMetricUpdater; import com.datastax.oss.driver.internal.core.metrics.SessionMetricUpdater; @@ -99,8 +100,7 @@ * Handles a request that supports multiple response messages (a.k.a. continuous paging request). */ @ThreadSafe -public abstract class ContinuousRequestHandlerBase< - StatementT extends Request, ResultSetT, ExecutionInfoT> +public abstract class ContinuousRequestHandlerBase implements Throttled { private static final Logger LOG = LoggerFactory.getLogger(ContinuousRequestHandlerBase.class); @@ -271,20 +271,13 @@ public ContinuousRequestHandlerBase( @NonNull protected abstract ResultSetT createResultSet( @NonNull Rows rows, - @NonNull ExecutionInfoT executionInfo, + @NonNull ExecutionInfo executionInfo, @NonNull ColumnDefinitions columnDefinitions) throws IOException; /** @return An empty result set; used only when the retry policy decides to ignore the error. */ @NonNull - protected abstract ResultSetT createEmptyResultSet(@NonNull ExecutionInfoT executionInfo); - - @NonNull - protected abstract ExecutionInfoT createExecutionInfo( - @NonNull Node node, - @Nullable Result result, - @Nullable Frame response, - int successfulExecutionIndex); + protected abstract ResultSetT createEmptyResultSet(@NonNull ExecutionInfo executionInfo); protected abstract int pageNumber(@NonNull ResultSetT resultSet); @@ -641,7 +634,7 @@ public void onFailure(@NonNull Throwable error) { private void processResultResponse(@NonNull Result result, @Nullable Frame frame) { try { if (setChosenExecution(this)) { - ExecutionInfoT executionInfo = createExecutionInfo(node, result, frame, executionIndex); + ExecutionInfo executionInfo = createExecutionInfo(node, frame, executionIndex); if (result instanceof Rows) { DseRowsMetadata rowsMetadata = (DseRowsMetadata) ((Rows) result).getMetadata(); int pageNumber = rowsMetadata.continuousPageNumber; @@ -1438,6 +1431,23 @@ private void completeResultSetFuture( } } + @NonNull + private ExecutionInfo createExecutionInfo( + @NonNull Node node, @Nullable Frame response, int successfulExecutionIndex) { + return new DefaultExecutionInfo( + statement, + node, + startedSpeculativeExecutionsCount.get(), + successfulExecutionIndex, + errors, + null, + response, + true, + session, + context, + executionProfile); + } + /** * Called from the chosen execution when it completes successfully. * @@ -1491,12 +1501,9 @@ private void setFailed(@Nullable NodeResponseCallback callback, @NonNull Throwab // Must be called here in case we are failing because the global timeout fired cancelScheduledTasks(null); if (callback != null && error instanceof DriverException) { - ExecutionInfoT executionInfo = - createExecutionInfo(callback.node, null, null, callback.executionIndex); - // FIXME cannot set ExecutionInfo for Graph here - if (executionInfo instanceof ExecutionInfo) { - ((DriverException) error).setExecutionInfo((ExecutionInfo) executionInfo); - } + ExecutionInfo executionInfo = + createExecutionInfo(callback.node, null, callback.executionIndex); + ((DriverException) error).setExecutionInfo(executionInfo); } enqueueOrCompletePending(error); RequestTracker requestTracker = context.getRequestTracker(); diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ContinuousAsyncGraphResultSet.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ContinuousAsyncGraphResultSet.java index e0816018203..3318f112995 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ContinuousAsyncGraphResultSet.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ContinuousAsyncGraphResultSet.java @@ -7,8 +7,8 @@ package com.datastax.dse.driver.internal.core.graph; import com.datastax.dse.driver.api.core.graph.AsyncGraphResultSet; -import com.datastax.dse.driver.api.core.graph.GraphExecutionInfo; import com.datastax.dse.driver.api.core.graph.GraphNode; +import com.datastax.oss.driver.api.core.cql.ExecutionInfo; import com.datastax.oss.driver.internal.core.util.CountingIterator; import edu.umd.cs.findbugs.annotations.NonNull; import java.util.Collections; @@ -22,12 +22,12 @@ public class ContinuousAsyncGraphResultSet implements AsyncGraphResultSet { private final CountingIterator iterator; private final int pageNumber; private final boolean hasMorePages; - private final GraphExecutionInfo executionInfo; + private final ExecutionInfo executionInfo; private final ContinuousGraphRequestHandler continuousGraphRequestHandler; private final Iterable currentPage; public ContinuousAsyncGraphResultSet( - GraphExecutionInfo executionInfo, + ExecutionInfo executionInfo, Queue data, int pageNumber, boolean hasMorePages, @@ -44,10 +44,17 @@ public ContinuousAsyncGraphResultSet( @NonNull @Override - public GraphExecutionInfo getExecutionInfo() { + public ExecutionInfo getRequestExecutionInfo() { return executionInfo; } + @NonNull + @Override + @Deprecated + public com.datastax.dse.driver.api.core.graph.GraphExecutionInfo getExecutionInfo() { + return GraphExecutionInfoConverter.convert(executionInfo); + } + @Override public int remaining() { return iterator.remaining(); @@ -84,16 +91,22 @@ public int pageNumber() { return pageNumber; } - static AsyncGraphResultSet empty(GraphExecutionInfo executionInfo) { + static AsyncGraphResultSet empty(ExecutionInfo executionInfo) { return new AsyncGraphResultSet() { - @NonNull @Override - public GraphExecutionInfo getExecutionInfo() { + public ExecutionInfo getRequestExecutionInfo() { return executionInfo; } + @NonNull + @Override + @Deprecated + public com.datastax.dse.driver.api.core.graph.GraphExecutionInfo getExecutionInfo() { + return GraphExecutionInfoConverter.convert(executionInfo); + } + @NonNull @Override public Iterable currentPage() { diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandler.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandler.java index 776f8e96d2a..5e72a24a59a 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandler.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandler.java @@ -8,23 +8,19 @@ import com.datastax.dse.driver.api.core.config.DseDriverOption; import com.datastax.dse.driver.api.core.graph.AsyncGraphResultSet; -import com.datastax.dse.driver.api.core.graph.GraphExecutionInfo; import com.datastax.dse.driver.api.core.graph.GraphNode; import com.datastax.dse.driver.api.core.graph.GraphStatement; import com.datastax.dse.driver.internal.core.cql.continuous.ContinuousRequestHandlerBase; import com.datastax.dse.driver.internal.core.graph.binary.GraphBinaryModule; import com.datastax.dse.protocol.internal.response.result.DseRowsMetadata; import com.datastax.oss.driver.api.core.cql.ColumnDefinitions; -import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.cql.ExecutionInfo; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.internal.core.session.DefaultSession; import com.datastax.oss.driver.shaded.guava.common.base.MoreObjects; -import com.datastax.oss.protocol.internal.Frame; import com.datastax.oss.protocol.internal.Message; -import com.datastax.oss.protocol.internal.response.Result; import com.datastax.oss.protocol.internal.response.result.Rows; import edu.umd.cs.findbugs.annotations.NonNull; -import edu.umd.cs.findbugs.annotations.Nullable; import java.io.IOException; import java.nio.ByteBuffer; import java.time.Duration; @@ -40,8 +36,7 @@ */ @ThreadSafe public class ContinuousGraphRequestHandler - extends ContinuousRequestHandlerBase< - GraphStatement, AsyncGraphResultSet, GraphExecutionInfo> { + extends ContinuousRequestHandlerBase, AsyncGraphResultSet> { private final Message message; private final GraphProtocol subProtocol; @@ -120,31 +115,15 @@ protected Map createPayload() { @NonNull @Override - protected AsyncGraphResultSet createEmptyResultSet(@NonNull GraphExecutionInfo executionInfo) { + protected AsyncGraphResultSet createEmptyResultSet(@NonNull ExecutionInfo executionInfo) { return ContinuousAsyncGraphResultSet.empty(executionInfo); } - @NonNull - @Override - protected DefaultGraphExecutionInfo createExecutionInfo( - @NonNull Node node, - @Nullable Result result, - @Nullable Frame response, - int successfulExecutionIndex) { - return new DefaultGraphExecutionInfo( - statement, - node, - startedSpeculativeExecutionsCount.get(), - successfulExecutionIndex, - errors, - response); - } - @NonNull @Override protected ContinuousAsyncGraphResultSet createResultSet( @NonNull Rows rows, - @NonNull GraphExecutionInfo executionInfo, + @NonNull ExecutionInfo executionInfo, @NonNull final ColumnDefinitions columnDefinitions) throws IOException { diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java index 8a5820aa9af..7cb7e90216b 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java @@ -22,6 +22,7 @@ import com.datastax.dse.driver.api.core.metrics.DseSessionMetric; import com.datastax.dse.driver.internal.core.graph.binary.GraphBinaryModule; import com.datastax.oss.driver.api.core.AllNodesFailedException; +import com.datastax.oss.driver.api.core.DriverException; import com.datastax.oss.driver.api.core.DriverTimeoutException; import com.datastax.oss.driver.api.core.RequestThrottlingException; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; @@ -121,7 +122,6 @@ public class GraphRequestHandler implements Throttled { */ private final AtomicInteger startedSpeculativeExecutionsCount; - private final Duration timeout; private final Timeout scheduledTimeout; private final List scheduledExecutions; private final List inFlightCallbacks; @@ -179,7 +179,7 @@ public class GraphRequestHandler implements Throttled { GraphConversions.createMessageFromGraphStatement( this.statement, subProtocol, executionProfile, this.context, this.graphBinaryModule); this.timer = context.getNettyOptions().getTimer(); - this.timeout = + Duration timeout = statement.getTimeout() != null ? statement.getTimeout() : executionProfile.getDuration(DseDriverOption.GRAPH_TIMEOUT, null); @@ -437,23 +437,22 @@ public void onThrottleFailure(@NonNull RequestThrottlingException error) { } private void setFinalError(Throwable error, Node node, int execution) { - // FIXME JAVA-2556 - // if (error instanceof DriverException) { - // ((DriverException) error) - // .setExecutionInfo( - // new DefaultExecutionInfo( - // graphStatement, - // node, - // startedSpeculativeExecutionsCount.get(), - // execution, - // errors, - // null, - // null, - // true, - // session, - // context, - // executionProfile)); - // } + if (error instanceof DriverException) { + ((DriverException) error) + .setExecutionInfo( + new DefaultExecutionInfo( + statement, + node, + startedSpeculativeExecutionsCount.get(), + execution, + errors, + null, + null, + true, + session, + context, + executionProfile)); + } if (result.completeExceptionally(error)) { cancelScheduledTasks(); if (!(requestTracker instanceof NoopRequestTracker)) { diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/MultiPageGraphResultSet.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/MultiPageGraphResultSet.java index e4ffe156c97..f2d08c705ff 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/MultiPageGraphResultSet.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/MultiPageGraphResultSet.java @@ -7,12 +7,13 @@ package com.datastax.dse.driver.internal.core.graph; import com.datastax.dse.driver.api.core.graph.AsyncGraphResultSet; -import com.datastax.dse.driver.api.core.graph.GraphExecutionInfo; import com.datastax.dse.driver.api.core.graph.GraphNode; import com.datastax.dse.driver.api.core.graph.GraphResultSet; +import com.datastax.oss.driver.api.core.cql.ExecutionInfo; import com.datastax.oss.driver.internal.core.util.CountingIterator; import com.datastax.oss.driver.internal.core.util.concurrent.BlockingOperation; import com.datastax.oss.driver.internal.core.util.concurrent.CompletableFutures; +import com.datastax.oss.driver.shaded.guava.common.collect.Lists; import edu.umd.cs.findbugs.annotations.NonNull; import java.util.ArrayList; import java.util.Iterator; @@ -20,11 +21,11 @@ public class MultiPageGraphResultSet implements GraphResultSet { private final RowIterator iterator; - private final List executionInfos = new ArrayList<>(); + private final List executionInfos = new ArrayList<>(); public MultiPageGraphResultSet(AsyncGraphResultSet firstPage) { iterator = new RowIterator(firstPage); - executionInfos.add(firstPage.getExecutionInfo()); + executionInfos.add(firstPage.getRequestExecutionInfo()); } @Override @@ -34,10 +35,17 @@ public void cancel() { @NonNull @Override - public GraphExecutionInfo getExecutionInfo() { + public ExecutionInfo getRequestExecutionInfo() { return executionInfos.get(executionInfos.size() - 1); } + @NonNull + @Override + @Deprecated + public com.datastax.dse.driver.api.core.graph.GraphExecutionInfo getExecutionInfo() { + return GraphExecutionInfoConverter.convert(getRequestExecutionInfo()); + } + /** * The execution information for all the queries that have been performed so far to assemble this * iterable. @@ -46,10 +54,17 @@ public GraphExecutionInfo getExecutionInfo() { * background queries to fetch additional pages transparently as the result set is being iterated. */ @NonNull - public List getExecutionInfos() { + public List getRequestExecutionInfos() { return executionInfos; } + /** @deprecated use {@link #getRequestExecutionInfos()} instead. */ + @NonNull + @Deprecated + public List getExecutionInfos() { + return Lists.transform(executionInfos, GraphExecutionInfoConverter::convert); + } + @NonNull @Override public Iterator iterator() { @@ -81,7 +96,7 @@ private void maybeMoveToNextPage() { currentPage = nextPage; remaining += currentPage.remaining(); currentRows = nextPage.currentPage().iterator(); - executionInfos.add(nextPage.getExecutionInfo()); + executionInfos.add(nextPage.getRequestExecutionInfo()); } } diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerTest.java index da126ed47c9..2134b95950e 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerTest.java @@ -133,7 +133,7 @@ public void should_complete_multi_page_result(DseProtocolVersion version) { assertThat(executionInfo.getCoordinator()).isEqualTo(node1); assertThat(executionInfo.getErrors()).isEmpty(); assertThat(executionInfo.getIncomingPayload()).isEmpty(); - assertThat(executionInfo.getPagingState()).isNotNull(); + assertThat(executionInfo.getPagingState()).isNull(); assertThat(executionInfo.getSpeculativeExecutionCount()).isEqualTo(0); assertThat(executionInfo.getSuccessfulExecutionIndex()).isEqualTo(0); assertThat(executionInfo.getWarnings()).isEmpty(); diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/reactive/ContinuousCqlRequestReactiveProcessorTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/reactive/ContinuousCqlRequestReactiveProcessorTest.java index ddd956ab84e..9f0addbc514 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/reactive/ContinuousCqlRequestReactiveProcessorTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/reactive/ContinuousCqlRequestReactiveProcessorTest.java @@ -139,7 +139,7 @@ public void should_complete_multi_page_result(DseProtocolVersion version) { assertThat(firstExecutionInfo.getCoordinator()).isEqualTo(node1); assertThat(firstExecutionInfo.getErrors()).isEmpty(); assertThat(firstExecutionInfo.getIncomingPayload()).isEmpty(); - assertThat(firstExecutionInfo.getPagingState()).isNotNull(); + assertThat(firstExecutionInfo.getPagingState()).isNull(); assertThat(firstExecutionInfo.getSpeculativeExecutionCount()).isEqualTo(0); assertThat(firstExecutionInfo.getSuccessfulExecutionIndex()).isEqualTo(0); assertThat(firstExecutionInfo.getWarnings()).isEmpty(); diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerTest.java index a793a0f983e..ba6c6fe9bd6 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerTest.java @@ -16,7 +16,6 @@ import com.datastax.dse.driver.DseTestDataProviders; import com.datastax.dse.driver.api.core.config.DseDriverOption; import com.datastax.dse.driver.api.core.graph.AsyncGraphResultSet; -import com.datastax.dse.driver.api.core.graph.GraphExecutionInfo; import com.datastax.dse.driver.api.core.graph.GraphNode; import com.datastax.dse.driver.api.core.graph.GraphStatement; import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; @@ -25,6 +24,7 @@ import com.datastax.dse.driver.internal.core.graph.binary.GraphBinaryModule; import com.datastax.oss.driver.api.core.DriverTimeoutException; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.cql.ExecutionInfo; import com.datastax.oss.driver.internal.core.cql.PoolBehavior; import com.datastax.oss.driver.internal.core.cql.RequestHandlerTestHarness; import com.datastax.oss.driver.internal.core.metadata.DefaultNode; @@ -91,7 +91,7 @@ public void should_return_paged_results(GraphProtocol graphProtocol) throws IOEx page1 -> { assertThat(page1.hasMorePages()).isTrue(); assertThat(page1.currentPage()).hasSize(10).allMatch(GraphNode::isVertex); - GraphExecutionInfo executionInfo = page1.getExecutionInfo(); + ExecutionInfo executionInfo = page1.getRequestExecutionInfo(); assertThat(executionInfo.getCoordinator()).isEqualTo(node); assertThat(executionInfo.getErrors()).isEmpty(); assertThat(executionInfo.getIncomingPayload()).isEmpty(); @@ -111,7 +111,7 @@ public void should_return_paged_results(GraphProtocol graphProtocol) throws IOEx page2 -> { assertThat(page2.hasMorePages()).isFalse(); assertThat(page2.currentPage()).hasSize(10).allMatch(GraphNode::isVertex); - GraphExecutionInfo executionInfo = page2.getExecutionInfo(); + ExecutionInfo executionInfo = page2.getRequestExecutionInfo(); assertThat(executionInfo.getCoordinator()).isEqualTo(node); assertThat(executionInfo.getErrors()).isEmpty(); assertThat(executionInfo.getIncomingPayload()).isEmpty(); diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphResultSetTestBase.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphResultSetTestBase.java index e082a013f85..f1f3d70de62 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphResultSetTestBase.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphResultSetTestBase.java @@ -12,8 +12,8 @@ import static org.mockito.Mockito.when; import com.datastax.dse.driver.api.core.graph.AsyncGraphResultSet; -import com.datastax.dse.driver.api.core.graph.GraphExecutionInfo; import com.datastax.dse.driver.api.core.graph.GraphNode; +import com.datastax.oss.driver.api.core.cql.ExecutionInfo; import com.datastax.oss.driver.internal.core.util.CountingIterator; import com.datastax.oss.driver.shaded.guava.common.collect.Lists; import java.util.Arrays; @@ -28,8 +28,8 @@ public abstract class GraphResultSetTestBase { protected AsyncGraphResultSet mockPage(boolean nextPage, Integer... data) { AsyncGraphResultSet page = mock(AsyncGraphResultSet.class); - GraphExecutionInfo executionInfo = mock(GraphExecutionInfo.class); - when(page.getExecutionInfo()).thenReturn(executionInfo); + ExecutionInfo executionInfo = mock(ExecutionInfo.class); + when(page.getRequestExecutionInfo()).thenReturn(executionInfo); if (nextPage) { when(page.hasMorePages()).thenReturn(true); diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphResultSetsTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphResultSetsTest.java index 3ed28a386d8..28056903942 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphResultSetsTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphResultSetsTest.java @@ -25,7 +25,7 @@ public void should_create_result_set_from_single_page() { GraphResultSet resultSet = GraphResultSets.toSync(page1); // Then - assertThat(resultSet.getExecutionInfo()).isSameAs(page1.getExecutionInfo()); + assertThat(resultSet.getRequestExecutionInfo()).isSameAs(page1.getRequestExecutionInfo()); Iterator iterator = resultSet.iterator(); @@ -52,9 +52,9 @@ public void should_create_result_set_from_multiple_pages() { // Then assertThat(resultSet.iterator().hasNext()).isTrue(); - assertThat(resultSet.getExecutionInfo()).isSameAs(page1.getExecutionInfo()); - assertThat(((MultiPageGraphResultSet) resultSet).getExecutionInfos()) - .containsExactly(page1.getExecutionInfo()); + assertThat(resultSet.getRequestExecutionInfo()).isSameAs(page1.getRequestExecutionInfo()); + assertThat(((MultiPageGraphResultSet) resultSet).getRequestExecutionInfos()) + .containsExactly(page1.getRequestExecutionInfo()); Iterator iterator = resultSet.iterator(); @@ -64,9 +64,9 @@ public void should_create_result_set_from_multiple_pages() { assertThat(iterator.hasNext()).isTrue(); // This should have triggered the fetch of page2 - assertThat(resultSet.getExecutionInfo()).isEqualTo(page2.getExecutionInfo()); - assertThat(((MultiPageGraphResultSet) resultSet).getExecutionInfos()) - .containsExactly(page1.getExecutionInfo(), page2.getExecutionInfo()); + assertThat(resultSet.getRequestExecutionInfo()).isEqualTo(page2.getRequestExecutionInfo()); + assertThat(((MultiPageGraphResultSet) resultSet).getRequestExecutionInfos()) + .containsExactly(page1.getRequestExecutionInfo(), page2.getRequestExecutionInfo()); assertNextRow(iterator, 3); assertNextRow(iterator, 4); @@ -74,10 +74,12 @@ public void should_create_result_set_from_multiple_pages() { assertThat(iterator.hasNext()).isTrue(); // This should have triggered the fetch of page3 - assertThat(resultSet.getExecutionInfo()).isEqualTo(page3.getExecutionInfo()); - assertThat(((MultiPageGraphResultSet) resultSet).getExecutionInfos()) + assertThat(resultSet.getRequestExecutionInfo()).isEqualTo(page3.getRequestExecutionInfo()); + assertThat(((MultiPageGraphResultSet) resultSet).getRequestExecutionInfos()) .containsExactly( - page1.getExecutionInfo(), page2.getExecutionInfo(), page3.getExecutionInfo()); + page1.getRequestExecutionInfo(), + page2.getRequestExecutionInfo(), + page3.getRequestExecutionInfo()); assertNextRow(iterator, 6); assertNextRow(iterator, 7); diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphPagingIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphPagingIT.java index 3e83397d556..d2fd6e0eeca 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphPagingIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphPagingIT.java @@ -16,6 +16,7 @@ import com.datastax.dse.driver.internal.core.graph.MultiPageGraphResultSet; import com.datastax.oss.driver.api.core.DriverTimeoutException; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.cql.ExecutionInfo; import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; import com.datastax.oss.driver.internal.core.util.CountingIterator; @@ -100,8 +101,8 @@ public void synchronous_paging_with_options(Options options) { GraphNode node = nodes.get(i - 1); assertThat(node.asString()).isEqualTo("user" + i); } - assertThat(result.getExecutionInfo()).isNotNull(); - assertThat(result.getExecutionInfo().getCoordinator().getEndPoint().resolve()) + assertThat(result.getRequestExecutionInfo()).isNotNull(); + assertThat(result.getRequestExecutionInfo().getCoordinator().getEndPoint().resolve()) .isEqualTo(firstCcmNode()); assertIfMultiPage(result, options.expectedPages); } @@ -136,8 +137,8 @@ public void synchronous_paging_with_options_when_auto(Options options) { GraphNode node = nodes.get(i - 1); assertThat(node.asString()).isEqualTo("user" + i); } - assertThat(result.getExecutionInfo()).isNotNull(); - assertThat(result.getExecutionInfo().getCoordinator().getEndPoint().resolve()) + assertThat(result.getRequestExecutionInfo()).isNotNull(); + assertThat(result.getRequestExecutionInfo().getCoordinator().getEndPoint().resolve()) .isEqualTo(firstCcmNode()); assertIfMultiPage(result, options.expectedPages); @@ -145,9 +146,11 @@ public void synchronous_paging_with_options_when_auto(Options options) { private void assertIfMultiPage(GraphResultSet result, int expectedPages) { if (result instanceof MultiPageGraphResultSet) { - assertThat(((MultiPageGraphResultSet) result).getExecutionInfos()).hasSize(expectedPages); - assertThat(result.getExecutionInfo()) - .isSameAs(((MultiPageGraphResultSet) result).getExecutionInfos().get(expectedPages - 1)); + assertThat(((MultiPageGraphResultSet) result).getRequestExecutionInfos()) + .hasSize(expectedPages); + assertThat(result.getRequestExecutionInfo()) + .isSameAs( + ((MultiPageGraphResultSet) result).getRequestExecutionInfos().get(expectedPages - 1)); } } @@ -182,8 +185,8 @@ public void synchronous_options_with_paging_disabled_should_fallback_to_single_p GraphNode node = nodes.get(i - 1); assertThat(node.asString()).isEqualTo("user" + i); } - assertThat(result.getExecutionInfo()).isNotNull(); - assertThat(result.getExecutionInfo().getCoordinator().getEndPoint().resolve()) + assertThat(result.getRequestExecutionInfo()).isNotNull(); + assertThat(result.getRequestExecutionInfo().getCoordinator().getEndPoint().resolve()) .isEqualTo(firstCcmNode()); } @@ -275,7 +278,7 @@ private void checkAsyncResult( Options options, int rowsFetched, int pageNumber, - List graphExecutionInfos) + List graphExecutionInfos) throws ExecutionException, InterruptedException { AsyncGraphResultSet result = future.toCompletableFuture().get(); int remaining = result.remaining(); @@ -298,14 +301,14 @@ private void checkAsyncResult( } assertThat(result.remaining()).isZero(); - assertThat(result.getExecutionInfo()).isNotNull(); - assertThat(result.getExecutionInfo().getCoordinator().getEndPoint().resolve()) + assertThat(result.getRequestExecutionInfo()).isNotNull(); + assertThat(result.getRequestExecutionInfo().getCoordinator().getEndPoint().resolve()) .isEqualTo(firstCcmNode()); - graphExecutionInfos.add(result.getExecutionInfo()); + graphExecutionInfos.add(result.getRequestExecutionInfo()); assertThat(graphExecutionInfos).hasSize(pageNumber); - assertThat(result.getExecutionInfo()).isSameAs(graphExecutionInfos.get(pageNumber - 1)); + assertThat(result.getRequestExecutionInfo()).isSameAs(graphExecutionInfos.get(pageNumber - 1)); if (pageNumber == options.expectedPages) { assertThat(result.hasMorePages()).isFalse(); assertThat(options.expectedRows).isEqualTo(rowsFetched); diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphSpeculativeExecutionIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphSpeculativeExecutionIT.java index 30267dc52d7..44e16619349 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphSpeculativeExecutionIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphSpeculativeExecutionIT.java @@ -56,13 +56,14 @@ public void should_use_speculative_executions_when_enabled( .build()) .build()) { - GraphStatement statement = + ScriptGraphStatement statement = ScriptGraphStatement.newInstance( "java.util.concurrent.TimeUnit.MILLISECONDS.sleep(1000L);") .setIdempotent(statementIdempotence); GraphResultSet result = session.execute(statement); - int speculativeExecutionCount = result.getExecutionInfo().getSpeculativeExecutionCount(); + int speculativeExecutionCount = + result.getRequestExecutionInfo().getSpeculativeExecutionCount(); if (expectSpeculativeExecutions) { assertThat(speculativeExecutionCount).isGreaterThan(0); } else { From 60fa57bae5a42d80cb7991e478a88e426ba4720e Mon Sep 17 00:00:00 2001 From: Tomasz Lelek Date: Tue, 17 Dec 2019 15:17:00 +0100 Subject: [PATCH 346/979] JAVA-2566: Introduce specific metrics for Graph queries (#314) --- changelog/README.md | 1 + .../api/core/config/DseDriverOption.java | 40 +++++++++ .../api/core/metrics/DseNodeMetrics.java | 57 +++++++++++++ .../api/core/metrics/DseSessionMetric.java | 2 + .../ContinuousCqlRequestHandler.java | 13 ++- .../ContinuousRequestHandlerBase.java | 41 +++++++--- .../graph/ContinuousGraphRequestHandler.java | 12 ++- .../core/graph/GraphRequestHandler.java | 11 +-- .../metrics/DropwizardMetricsFactory.java | 10 ++- .../metrics/DropwizardNodeMetricUpdater.java | 8 ++ .../DropwizardSessionMetricUpdater.java | 7 ++ core/src/main/resources/reference.conf | 81 ++++++++++++++++++- ...equestHandlerSpeculativeExecutionTest.java | 5 +- .../ContinuousGraphRequestHandlerTest.java | 36 ++++++++- .../core/graph/GraphRequestHandlerTest.java | 12 +-- .../driver/api/core/graph/GraphPagingIT.java | 41 +++++++++- 16 files changed, 344 insertions(+), 33 deletions(-) create mode 100644 core/src/main/java/com/datastax/dse/driver/api/core/metrics/DseNodeMetrics.java diff --git a/changelog/README.md b/changelog/README.md index fc18c885a84..1a89b8a1a68 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### NGDG (in progress) +- [improvement] JAVA-2566: Introduce specific metrics for Graph queries - [improvement] JAVA-2556: Make ExecutionInfo compatible with any Request type - [improvement] JAVA-2571: Revisit usages of DseGraph.g - [improvement] JAVA-2558: Revisit GraphRequestHandler diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/config/DseDriverOption.java b/core/src/main/java/com/datastax/dse/driver/api/core/config/DseDriverOption.java index dbe0ac3a943..ac493719ef6 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/config/DseDriverOption.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/config/DseDriverOption.java @@ -207,6 +207,46 @@ public enum DseDriverOption implements DriverOption { *

      Value type: int */ GRAPH_CONTINUOUS_PAGING_MAX_ENQUEUED_PAGES("advanced.graph.paging-options.max-enqueued-pages"), + /** + * The largest latency that we expect to record for graph requests. + * + *

      Value-type: {@link java.time.Duration Duration} + */ + METRICS_SESSION_GRAPH_REQUESTS_HIGHEST("advanced.metrics.session.graph-requests.highest-latency"), + /** + * The number of significant decimal digits to which internal structures will maintain for graph + * requests. + * + *

      Value-type: int + */ + METRICS_SESSION_GRAPH_REQUESTS_DIGITS( + "advanced.metrics.session.graph-requests.significant-digits"), + /** + * The interval at which percentile data is refreshed for graph requests. + * + *

      Value-type: {@link java.time.Duration Duration} + */ + METRICS_SESSION_GRAPH_REQUESTS_INTERVAL( + "advanced.metrics.session.graph-requests.refresh-interval"), + /** + * The largest latency that we expect to record for graph requests. + * + *

      Value-type: {@link java.time.Duration Duration} + */ + METRICS_NODE_GRAPH_MESSAGES_HIGHEST("advanced.metrics.node.graph-messages.highest-latency"), + /** + * The number of significant decimal digits to which internal structures will maintain for graph + * requests. + * + *

      Value-type: int + */ + METRICS_NODE_GRAPH_MESSAGES_DIGITS("advanced.metrics.node.graph-messages.significant-digits"), + /** + * The interval at which percentile data is refreshed for graph requests. + * + *

      Value-type: {@link java.time.Duration Duration} + */ + METRICS_NODE_GRAPH_MESSAGES_INTERVAL("advanced.metrics.node.graph-messages.refresh-interval"), ; private final String path; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/metrics/DseNodeMetrics.java b/core/src/main/java/com/datastax/dse/driver/api/core/metrics/DseNodeMetrics.java new file mode 100644 index 00000000000..b9c26442ae8 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/api/core/metrics/DseNodeMetrics.java @@ -0,0 +1,57 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.dse.driver.api.core.metrics; + +import com.datastax.oss.driver.api.core.metrics.NodeMetric; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Map; + +/** See {@code dse-reference.conf} for a description of each metric. */ +public enum DseNodeMetrics implements NodeMetric { + GRAPH_MESSAGES("graph-messages"); + + private static final Map BY_PATH = sortByPath(); + + private final String path; + + DseNodeMetrics(String path) { + this.path = path; + } + + @Override + @NonNull + public String getPath() { + return path; + } + + @NonNull + public static DseNodeMetrics fromPath(@NonNull String path) { + DseNodeMetrics metric = BY_PATH.get(path); + if (metric == null) { + throw new IllegalArgumentException("Unknown node metric path " + path); + } + return metric; + } + + private static Map sortByPath() { + ImmutableMap.Builder result = ImmutableMap.builder(); + for (DseNodeMetrics value : values()) { + result.put(value.getPath(), value); + } + return result.build(); + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/metrics/DseSessionMetric.java b/core/src/main/java/com/datastax/dse/driver/api/core/metrics/DseSessionMetric.java index 10cde726c5b..312a7c25a9d 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/metrics/DseSessionMetric.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/metrics/DseSessionMetric.java @@ -23,6 +23,8 @@ /** See {@code dse-reference.conf} for a description of each metric. */ public enum DseSessionMetric implements SessionMetric { CONTINUOUS_CQL_REQUESTS("continuous-cql-requests"), + GRAPH_REQUESTS("graph-requests"), + GRAPH_CLIENT_TIMEOUTS("graph-client-timeouts"), ; private static final Map BY_PATH = sortByPath(); diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandler.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandler.java index b77303641ad..8aa8d2209e2 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandler.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandler.java @@ -15,6 +15,7 @@ */ package com.datastax.dse.driver.internal.core.cql.continuous; +import com.datastax.dse.driver.DseSessionMetric; import com.datastax.dse.driver.api.core.config.DseDriverOption; import com.datastax.dse.driver.api.core.cql.continuous.ContinuousAsyncResultSet; import com.datastax.dse.driver.internal.core.cql.DseConversions; @@ -23,6 +24,8 @@ import com.datastax.oss.driver.api.core.cql.ExecutionInfo; import com.datastax.oss.driver.api.core.cql.Row; import com.datastax.oss.driver.api.core.cql.Statement; +import com.datastax.oss.driver.api.core.metrics.DefaultNodeMetric; +import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.internal.core.cql.DefaultRow; import com.datastax.oss.driver.internal.core.session.DefaultSession; @@ -55,7 +58,15 @@ public class ContinuousCqlRequestHandler @NonNull DefaultSession session, @NonNull InternalDriverContext context, @NonNull String sessionLogPrefix) { - super(statement, session, context, sessionLogPrefix, false); + super( + statement, + session, + context, + sessionLogPrefix, + false, + DefaultSessionMetric.CQL_CLIENT_TIMEOUTS, + DseSessionMetric.CONTINUOUS_CQL_REQUESTS, + DefaultNodeMetric.CQL_MESSAGES); message = DseConversions.toContinuousPagingMessage(statement, executionProfile, context); firstPageTimeout = executionProfile.getDuration(DseDriverOption.CONTINUOUS_PAGING_TIMEOUT_FIRST_PAGE); diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousRequestHandlerBase.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousRequestHandlerBase.java index 779f3dd6dfd..eb6adbdc293 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousRequestHandlerBase.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousRequestHandlerBase.java @@ -27,6 +27,8 @@ import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.metrics.DefaultNodeMetric; import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; +import com.datastax.oss.driver.api.core.metrics.NodeMetric; +import com.datastax.oss.driver.api.core.metrics.SessionMetric; import com.datastax.oss.driver.api.core.retry.RetryDecision; import com.datastax.oss.driver.api.core.retry.RetryPolicy; import com.datastax.oss.driver.api.core.servererrors.BootstrappingException; @@ -161,6 +163,9 @@ public abstract class ContinuousRequestHandlerBase warnings) { private void stopNodeMessageTimer() { NodeMetricUpdater nodeMetricUpdater = ((DefaultNode) node).getMetricUpdater(); - if (nodeMetricUpdater.isEnabled(DefaultNodeMetric.CQL_MESSAGES, executionProfile.getName())) { + if (nodeMetricUpdater.isEnabled(messagesMetric, executionProfile.getName()) + && stopNodeMessageTimerReported.compareAndSet(false, true)) { nodeMetricUpdater.updateTimer( - DefaultNodeMetric.CQL_MESSAGES, + messagesMetric, executionProfile.getName(), System.nanoTime() - nodeStartTimeNanos, TimeUnit.NANOSECONDS); @@ -1005,7 +1023,8 @@ private void updateNodeErrorMetrics( private void trackNodeSuccess() { RequestTracker requestTracker = context.getRequestTracker(); - if (!(requestTracker instanceof NoopRequestTracker)) { + if (!(requestTracker instanceof NoopRequestTracker) + && nodeSuccessReported.compareAndSet(false, true)) { long latencyNanos = System.nanoTime() - nodeStartTimeNanos; requestTracker.onNodeSuccess(statement, latencyNanos, executionProfile, node, logPrefix); } @@ -1013,7 +1032,8 @@ private void trackNodeSuccess() { private void trackNodeError(@NonNull Throwable error) { RequestTracker requestTracker = context.getRequestTracker(); - if (!(requestTracker instanceof NoopRequestTracker)) { + if (!(requestTracker instanceof NoopRequestTracker) + && nodeErrorReported.compareAndSet(false, true)) { long latencyNanos = System.nanoTime() - nodeStartTimeNanos; requestTracker.onNodeError( statement, error, latencyNanos, executionProfile, node, logPrefix); @@ -1460,8 +1480,7 @@ private void setCompleted(@NonNull NodeResponseCallback callback) { RequestTracker requestTracker = context.getRequestTracker(); boolean requestTrackerEnabled = !(requestTracker instanceof NoopRequestTracker); boolean metricEnabled = - sessionMetricUpdater.isEnabled( - DseSessionMetric.CONTINUOUS_CQL_REQUESTS, executionProfile.getName()); + sessionMetricUpdater.isEnabled(continuousRequestsMetric, executionProfile.getName()); if (requestTrackerEnabled || metricEnabled) { long now = System.nanoTime(); long totalLatencyNanos = now - startTimeNanos; @@ -1471,7 +1490,7 @@ private void setCompleted(@NonNull NodeResponseCallback callback) { } if (metricEnabled) { sessionMetricUpdater.updateTimer( - DseSessionMetric.CONTINUOUS_CQL_REQUESTS, + continuousRequestsMetric, executionProfile.getName(), totalLatencyNanos, TimeUnit.NANOSECONDS); @@ -1520,10 +1539,8 @@ private void setFailed(@Nullable NodeResponseCallback callback, @NonNull Throwab } if (error instanceof DriverTimeoutException) { throttler.signalTimeout(this); - if (sessionMetricUpdater.isEnabled( - DefaultSessionMetric.CQL_CLIENT_TIMEOUTS, executionProfile.getName())) { - sessionMetricUpdater.incrementCounter( - DefaultSessionMetric.CQL_CLIENT_TIMEOUTS, executionProfile.getName()); + if (sessionMetricUpdater.isEnabled(clientTimeoutsMetric, executionProfile.getName())) { + sessionMetricUpdater.incrementCounter(clientTimeoutsMetric, executionProfile.getName()); } } else if (!(error instanceof RequestThrottlingException)) { throttler.signalError(this, error); diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandler.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandler.java index 5e72a24a59a..4b17dacfda7 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandler.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandler.java @@ -6,6 +6,8 @@ */ package com.datastax.dse.driver.internal.core.graph; +import com.datastax.dse.driver.DseNodeMetrics; +import com.datastax.dse.driver.DseSessionMetric; import com.datastax.dse.driver.api.core.config.DseDriverOption; import com.datastax.dse.driver.api.core.graph.AsyncGraphResultSet; import com.datastax.dse.driver.api.core.graph.GraphNode; @@ -52,7 +54,15 @@ public class ContinuousGraphRequestHandler @NonNull String sessionLogPrefix, @NonNull GraphBinaryModule graphBinaryModule, @NonNull GraphSupportChecker graphSupportChecker) { - super(statement, session, context, sessionLogPrefix, true); + super( + statement, + session, + context, + sessionLogPrefix, + true, + DseSessionMetric.GRAPH_CLIENT_TIMEOUTS, + DseSessionMetric.GRAPH_REQUESTS, + DseNodeMetrics.GRAPH_MESSAGES); this.graphBinaryModule = graphBinaryModule; subProtocol = graphSupportChecker.inferGraphProtocol(statement, executionProfile, context); message = diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java index 7cb7e90216b..2ec9116dda5 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java @@ -19,6 +19,7 @@ import com.datastax.dse.driver.api.core.graph.AsyncGraphResultSet; import com.datastax.dse.driver.api.core.graph.GraphNode; import com.datastax.dse.driver.api.core.graph.GraphStatement; +import com.datastax.dse.driver.api.core.metrics.DseNodeMetrics; import com.datastax.dse.driver.api.core.metrics.DseSessionMetric; import com.datastax.dse.driver.internal.core.graph.binary.GraphBinaryModule; import com.datastax.oss.driver.api.core.AllNodesFailedException; @@ -365,13 +366,13 @@ private void setFinalResult( statement, totalLatencyNanos, executionProfile, callback.node, logPrefix); } if (sessionMetricUpdater.isEnabled( - DseSessionMetric.CONTINUOUS_CQL_REQUESTS, executionProfile.getName())) { + DseSessionMetric.GRAPH_REQUESTS, executionProfile.getName())) { if (completionTimeNanos == NANOTIME_NOT_MEASURED_YET) { completionTimeNanos = System.nanoTime(); totalLatencyNanos = completionTimeNanos - startTimeNanos; } sessionMetricUpdater.updateTimer( - DseSessionMetric.CONTINUOUS_CQL_REQUESTS, + DseSessionMetric.GRAPH_REQUESTS, executionProfile.getName(), totalLatencyNanos, TimeUnit.NANOSECONDS); @@ -462,7 +463,7 @@ private void setFinalError(Throwable error, Node node, int execution) { if (error instanceof DriverTimeoutException) { throttler.signalTimeout(this); sessionMetricUpdater.incrementCounter( - DefaultSessionMetric.CQL_CLIENT_TIMEOUTS, executionProfile.getName()); + DseSessionMetric.GRAPH_CLIENT_TIMEOUTS, executionProfile.getName()); } else if (!(error instanceof RequestThrottlingException)) { throttler.signalError(this, error); } @@ -593,11 +594,11 @@ private void scheduleSpeculativeExecution(int index, long delay) { public void onResponse(Frame responseFrame) { long nodeResponseTimeNanos = NANOTIME_NOT_MEASURED_YET; NodeMetricUpdater nodeMetricUpdater = ((DefaultNode) node).getMetricUpdater(); - if (nodeMetricUpdater.isEnabled(DefaultNodeMetric.CQL_MESSAGES, executionProfile.getName())) { + if (nodeMetricUpdater.isEnabled(DseNodeMetrics.GRAPH_MESSAGES, executionProfile.getName())) { nodeResponseTimeNanos = System.nanoTime(); long nodeLatency = System.nanoTime() - nodeStartTimeNanos; nodeMetricUpdater.updateTimer( - DefaultNodeMetric.CQL_MESSAGES, + DseNodeMetrics.GRAPH_MESSAGES, executionProfile.getName(), nodeLatency, TimeUnit.NANOSECONDS); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricsFactory.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricsFactory.java index e3a27165594..b85251a522a 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricsFactory.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricsFactory.java @@ -16,6 +16,7 @@ package com.datastax.oss.driver.internal.core.metrics; import com.codahale.metrics.MetricRegistry; +import com.datastax.dse.driver.api.core.metrics.DseNodeMetrics; import com.datastax.dse.driver.api.core.metrics.DseSessionMetric; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; @@ -28,7 +29,6 @@ import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import edu.umd.cs.findbugs.annotations.Nullable; import java.util.Collections; -import java.util.EnumSet; import java.util.HashSet; import java.util.List; import java.util.Optional; @@ -107,12 +107,16 @@ protected Set parseSessionMetricPaths(List paths) { } protected Set parseNodeMetricPaths(List paths) { - EnumSet result = EnumSet.noneOf(DefaultNodeMetric.class); + Set result = new HashSet<>(); for (String path : paths) { try { result.add(DefaultNodeMetric.fromPath(path)); } catch (IllegalArgumentException e) { - LOG.warn("[{}] Unknown node metric {}, skipping", logPrefix, path); + try { + result.add(DseNodeMetrics.fromPath(path)); + } catch (IllegalArgumentException e1) { + LOG.warn("[{}] Unknown node metric {}, skipping", logPrefix, path); + } } } return Collections.unmodifiableSet(result); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardNodeMetricUpdater.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardNodeMetricUpdater.java index a4322393e29..32c42d2b45c 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardNodeMetricUpdater.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardNodeMetricUpdater.java @@ -17,6 +17,8 @@ import com.codahale.metrics.Gauge; import com.codahale.metrics.MetricRegistry; +import com.datastax.dse.driver.api.core.config.DseDriverOption; +import com.datastax.dse.driver.api.core.metrics.DseNodeMetrics; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.metadata.EndPoint; @@ -82,6 +84,12 @@ public DropwizardNodeMetricUpdater( initializeDefaultCounter(DefaultNodeMetric.SPECULATIVE_EXECUTIONS, null); initializeDefaultCounter(DefaultNodeMetric.CONNECTION_INIT_ERRORS, null); initializeDefaultCounter(DefaultNodeMetric.AUTHENTICATION_ERRORS, null); + initializeHdrTimer( + DseNodeMetrics.GRAPH_MESSAGES, + context.getConfig().getDefaultProfile(), + DseDriverOption.METRICS_NODE_GRAPH_MESSAGES_HIGHEST, + DseDriverOption.METRICS_NODE_GRAPH_MESSAGES_DIGITS, + DseDriverOption.METRICS_NODE_GRAPH_MESSAGES_INTERVAL); } @Override diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardSessionMetricUpdater.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardSessionMetricUpdater.java index 17ba8db1613..95d1a4fbaab 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardSessionMetricUpdater.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardSessionMetricUpdater.java @@ -109,6 +109,13 @@ public DropwizardSessionMetricUpdater( DseDriverOption.CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_HIGHEST, DseDriverOption.CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_DIGITS, DseDriverOption.CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_INTERVAL); + initializeDefaultCounter(DseSessionMetric.GRAPH_CLIENT_TIMEOUTS, null); + initializeHdrTimer( + DseSessionMetric.GRAPH_REQUESTS, + context.getConfig().getDefaultProfile(), + DseDriverOption.METRICS_SESSION_GRAPH_REQUESTS_HIGHEST, + DseDriverOption.METRICS_SESSION_GRAPH_REQUESTS_DIGITS, + DseDriverOption.METRICS_SESSION_GRAPH_REQUESTS_INTERVAL); } @Override diff --git a/core/src/main/resources/reference.conf b/core/src/main/resources/reference.conf index 18af688a482..4f50e1d2ae5 100644 --- a/core/src/main/resources/reference.conf +++ b/core/src/main/resources/reference.conf @@ -1263,9 +1263,25 @@ datastax-java-driver { # are updated during a continuous paging request: # # - At node level: all the usual metrics available for normal CQL requests, such as - # 'cql-messages' and error-related metrics; + # 'cql-messages' and error-related metrics (but these are only updated for the first + # page of results); # - At session level: only 'continuous-cql-requests' is updated (this metric). // continuous-cql-requests, + + # The throughput and latency percentiles of Graph requests (exposed as a Timer). + # + # This metric is a session-level metric and corresponds to the overall duration of the + # session.execute(GraphStatement) call, including any retry. + // graph-requests, + + # The number of graph requests that timed out -- that is, the + # session.execute(GraphStatement) call failed with a DriverTimeoutException (exposed as a + # Counter). + # + # Note that this metric is analogous to the OSS driver's 'cql-client-timeouts' metrics, but + # for Graph requests only. + // graph-client-timeouts + ] # Extra configuration (for the metrics that need it) @@ -1356,6 +1372,46 @@ datastax-java-driver { # time). refresh-interval = 5 minutes } + + # Required: if the 'graph-requests' metric is enabled + # Modifiable at runtime: no + # Overridable in a profile: no + graph-requests { + # The largest latency that we expect to record. + # + # This should be slightly higher than basic.graph.timeout (in theory, readings can't be higher + # than the timeout, but there might be a small overhead due to internal scheduling). + # + # This is used to scale internal data structures. If a higher recording is encountered at + # runtime, it is discarded and a warning is logged. + highest-latency = 12 seconds + + # The number of significant decimal digits to which internal structures will maintain + # value resolution and separation (for example, 3 means that recordings up to 1 second + # will be recorded with a resolution of 1 millisecond or better). + # + # This must be between 0 and 5. If the value is out of range, it defaults to 3 and a + # warning is logged. + significant-digits = 3 + + # The interval at which percentile data is refreshed. + # + # The driver records latency data in a "live" histogram, and serves results from a cached + # snapshot. Each time the snapshot gets older than the interval, the two are switched. + # Note that this switch happens upon fetching the metrics, so if you never fetch the + # recording interval might grow higher (that shouldn't be an issue in a production + # environment because you would typically have a metrics reporter that exports to a + # monitoring tool at a regular interval). + # + # In practice, this means that if you set this to 5 minutes, you're looking at data from a + # 5-minute interval in the past, that is at most 5 minutes old. If you fetch the metrics + # at a faster pace, you will observe the same data for 5 minutes until the interval + # expires. + # + # Note that this does not apply to the total count and rates (those are updated in real + # time). + refresh-interval = 5 minutes + } } # The node-level metrics (all disabled by default). # @@ -1492,6 +1548,18 @@ datastax-java-driver { # to this node (exposed as a Counter). # Authentication errors are also logged at WARN level. // errors.connection.auth, + + # The throughput and latency percentiles of individual graph messages sent to this node as + # part of an overall request (exposed as a Timer). + # + # Note that this does not necessarily correspond to the overall duration of the + # session.execute() call, since the driver might query multiple nodes because of retries + # and speculative executions. Therefore a single "request" (as seen from a client of the + # driver) can be composed of more than one of the "messages" measured by this metric. + # + # Therefore this metric is intended as an insight into the performance of this particular + # node. For statistics on overall request completion, use the session-level graph-requests. + // graph-messages, ] # See cql-requests in the `session` section @@ -1504,6 +1572,17 @@ datastax-java-driver { significant-digits = 3 refresh-interval = 5 minutes } + + # See graph-requests in the `session` section + # + # Required: if the 'graph-messages' metric is enabled + # Modifiable at runtime: no + # Overridable in a profile: no + graph-messages { + highest-latency = 3 seconds + significant-digits = 3 + refresh-interval = 5 minutes + } } } diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerSpeculativeExecutionTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerSpeculativeExecutionTest.java index 1b2f44169db..0326c656154 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerSpeculativeExecutionTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerSpeculativeExecutionTest.java @@ -19,6 +19,7 @@ import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; +import com.datastax.dse.driver.DseNodeMetrics; import com.datastax.dse.driver.DseTestDataProviders; import com.datastax.dse.driver.api.core.graph.AsyncGraphResultSet; import com.datastax.dse.driver.api.core.graph.GraphStatement; @@ -242,10 +243,10 @@ public void should_not_start_execution_if_result_complete( node2Behavior.verifyNoWrite(); verify(nodeMetricUpdater1) - .isEnabled(DefaultNodeMetric.CQL_MESSAGES, DriverExecutionProfile.DEFAULT_NAME); + .isEnabled(DseNodeMetrics.GRAPH_MESSAGES, DriverExecutionProfile.DEFAULT_NAME); verify(nodeMetricUpdater1) .updateTimer( - eq(DefaultNodeMetric.CQL_MESSAGES), + eq(DseNodeMetrics.GRAPH_MESSAGES), eq(DriverExecutionProfile.DEFAULT_NAME), anyLong(), eq(TimeUnit.NANOSECONDS)); diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerTest.java index ba6c6fe9bd6..9179aa1cb15 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerTest.java @@ -11,8 +11,15 @@ import static com.datastax.dse.driver.internal.core.graph.GraphTestUtils.tenGraphRows; import static com.datastax.oss.driver.Assertions.assertThat; import static com.datastax.oss.driver.Assertions.assertThatStage; +import static org.mockito.ArgumentMatchers.anyLong; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; +import com.datastax.dse.driver.DseNodeMetrics; +import com.datastax.dse.driver.DseSessionMetric; import com.datastax.dse.driver.DseTestDataProviders; import com.datastax.dse.driver.api.core.config.DseDriverOption; import com.datastax.dse.driver.api.core.graph.AsyncGraphResultSet; @@ -59,6 +66,8 @@ public void setup() { @Test @UseDataProvider(location = DseTestDataProviders.class, value = "supportedGraphProtocols") public void should_return_paged_results(GraphProtocol graphProtocol) throws IOException { + String profileName = "test-graph"; + when(nodeMetricUpdater1.isEnabled(DseNodeMetrics.GRAPH_MESSAGES, profileName)).thenReturn(true); GraphBinaryModule module = createGraphBinaryModule(mockContext); @@ -69,7 +78,7 @@ public void should_return_paged_results(GraphProtocol graphProtocol) throws IOEx try (RequestHandlerTestHarness harness = builder.build()) { GraphStatement graphStatement = - ScriptGraphStatement.newInstance("mockQuery").setExecutionProfileName("test-graph"); + ScriptGraphStatement.newInstance("mockQuery").setExecutionProfileName(profileName); ContinuousGraphRequestHandler handler = new ContinuousGraphRequestHandler( @@ -119,6 +128,8 @@ public void should_return_paged_results(GraphProtocol graphProtocol) throws IOEx assertThat(executionInfo.getSuccessfulExecutionIndex()).isEqualTo(0); assertThat(executionInfo.getWarnings()).isEmpty(); }); + + validateMetrics(profileName, harness); } } @@ -220,4 +231,27 @@ public void should_honor_statement_timeout() throws Exception { .hasMessageContaining("Query timed out after " + statementTimeout); } } + + private void validateMetrics(String profileName, RequestHandlerTestHarness harness) { + // GRAPH_MESSAGES metrics check call is invoked twice (once per page) + verify(nodeMetricUpdater1, times(2)).isEnabled(DseNodeMetrics.GRAPH_MESSAGES, profileName); + // GRAPH_MESSAGES metrics update is invoked only for the first page + verify(nodeMetricUpdater1, times(1)) + .updateTimer( + eq(DseNodeMetrics.GRAPH_MESSAGES), + eq(profileName), + anyLong(), + eq(TimeUnit.NANOSECONDS)); + verifyNoMoreInteractions(nodeMetricUpdater1); + + verify(harness.getSession().getMetricUpdater()) + .isEnabled(DseSessionMetric.GRAPH_REQUESTS, profileName); + verify(harness.getSession().getMetricUpdater()) + .updateTimer( + eq(DseSessionMetric.GRAPH_REQUESTS), + eq(profileName), + anyLong(), + eq(TimeUnit.NANOSECONDS)); + verifyNoMoreInteractions(harness.getSession().getMetricUpdater()); + } } diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java index 8c80c219b10..c1e47d7901f 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java @@ -35,6 +35,7 @@ import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; +import com.datastax.dse.driver.DseNodeMetrics; import com.datastax.dse.driver.DseSessionMetric; import com.datastax.dse.driver.DseTestDataProviders; import com.datastax.dse.driver.api.core.config.DseDriverOption; @@ -54,7 +55,6 @@ import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; -import com.datastax.oss.driver.api.core.metrics.DefaultNodeMetric; import com.datastax.oss.driver.api.core.tracker.RequestTracker; import com.datastax.oss.driver.api.core.uuid.Uuids; import com.datastax.oss.driver.internal.core.cql.Conversions; @@ -465,7 +465,7 @@ public static Object[][] supportedGraphProtocolsWithDseVersions() { public void should_invoke_request_tracker_and_update_metrics( GraphProtocol graphProtocol, Version dseVersion) throws IOException { when(nodeMetricUpdater1.isEnabled( - DefaultNodeMetric.CQL_MESSAGES, DriverExecutionProfile.DEFAULT_NAME)) + DseNodeMetrics.GRAPH_MESSAGES, DriverExecutionProfile.DEFAULT_NAME)) .thenReturn(true); Builder builder = @@ -530,20 +530,20 @@ public void should_invoke_request_tracker_and_update_metrics( verifyNoMoreInteractions(requestTracker); verify(nodeMetricUpdater1) - .isEnabled(DefaultNodeMetric.CQL_MESSAGES, DriverExecutionProfile.DEFAULT_NAME); + .isEnabled(DseNodeMetrics.GRAPH_MESSAGES, DriverExecutionProfile.DEFAULT_NAME); verify(nodeMetricUpdater1) .updateTimer( - eq(DefaultNodeMetric.CQL_MESSAGES), + eq(DseNodeMetrics.GRAPH_MESSAGES), eq(DriverExecutionProfile.DEFAULT_NAME), anyLong(), eq(TimeUnit.NANOSECONDS)); verifyNoMoreInteractions(nodeMetricUpdater1); verify(harness.getSession().getMetricUpdater()) - .isEnabled(DseSessionMetric.CONTINUOUS_CQL_REQUESTS, DriverExecutionProfile.DEFAULT_NAME); + .isEnabled(DseSessionMetric.GRAPH_REQUESTS, DriverExecutionProfile.DEFAULT_NAME); verify(harness.getSession().getMetricUpdater()) .updateTimer( - eq(DseSessionMetric.CONTINUOUS_CQL_REQUESTS), + eq(DseSessionMetric.GRAPH_REQUESTS), eq(DriverExecutionProfile.DEFAULT_NAME), anyLong(), eq(TimeUnit.NANOSECONDS)); diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphPagingIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphPagingIT.java index d2fd6e0eeca..be91bd133c9 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphPagingIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphPagingIT.java @@ -10,21 +10,30 @@ import static org.assertj.core.api.Assertions.fail; import static org.assertj.core.api.AssertionsForInterfaceTypes.assertThat; +import com.codahale.metrics.Timer; +import com.datastax.dse.driver.DseNodeMetrics; +import com.datastax.dse.driver.DseSessionMetric; +import com.datastax.dse.driver.api.core.DseSession; import com.datastax.dse.driver.api.core.config.DseDriverOption; import com.datastax.dse.driver.api.core.cql.continuous.ContinuousPagingITBase; import com.datastax.dse.driver.api.testinfra.session.DseSessionRule; import com.datastax.dse.driver.internal.core.graph.MultiPageGraphResultSet; import com.datastax.oss.driver.api.core.DriverTimeoutException; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.cql.ExecutionInfo; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.metrics.Metrics; import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.internal.core.util.CountingIterator; import com.tngtech.java.junit.dataprovider.DataProviderRunner; import com.tngtech.java.junit.dataprovider.UseDataProvider; import java.net.SocketAddress; import java.time.Duration; import java.util.ArrayList; +import java.util.Collections; import java.util.List; import java.util.concurrent.CompletionStage; import java.util.concurrent.ExecutionException; @@ -42,7 +51,17 @@ public class GraphPagingIT { private static final CustomCcmRule CCM_RULE = GraphTestSupport.GRAPH_CCM_RULE_BUILDER.build(); private static final DseSessionRule SESSION_RULE = - GraphTestSupport.getCoreGraphSessionBuilder(CCM_RULE).build(); + GraphTestSupport.getCoreGraphSessionBuilder(CCM_RULE) + .withConfigLoader( + SessionUtils.configLoaderBuilder() + .withStringList( + DefaultDriverOption.METRICS_SESSION_ENABLED, + Collections.singletonList(DseSessionMetric.GRAPH_REQUESTS.getPath())) + .withStringList( + DefaultDriverOption.METRICS_NODE_ENABLED, + Collections.singletonList(DseNodeMetrics.GRAPH_MESSAGES.getPath())) + .build()) + .build(); @ClassRule public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); @@ -105,6 +124,7 @@ public void synchronous_paging_with_options(Options options) { assertThat(result.getRequestExecutionInfo().getCoordinator().getEndPoint().resolve()) .isEqualTo(firstCcmNode()); assertIfMultiPage(result, options.expectedPages); + validateMetrics(SESSION_RULE.session()); } @UseDataProvider(location = ContinuousPagingITBase.class, value = "pagingOptions") @@ -142,6 +162,7 @@ public void synchronous_paging_with_options_when_auto(Options options) { .isEqualTo(firstCcmNode()); assertIfMultiPage(result, options.expectedPages); + validateMetrics(SESSION_RULE.session()); } private void assertIfMultiPage(GraphResultSet result, int expectedPages) { @@ -188,6 +209,7 @@ public void synchronous_options_with_paging_disabled_should_fallback_to_single_p assertThat(result.getRequestExecutionInfo()).isNotNull(); assertThat(result.getRequestExecutionInfo().getCoordinator().getEndPoint().resolve()) .isEqualTo(firstCcmNode()); + validateMetrics(SESSION_RULE.session()); } @UseDataProvider(location = ContinuousPagingITBase.class, value = "pagingOptions") @@ -214,6 +236,7 @@ public void asynchronous_paging_with_options(Options options) // then checkAsyncResult(result, options, 0, 1, new ArrayList<>()); + validateMetrics(SESSION_RULE.session()); } @UseDataProvider(location = ContinuousPagingITBase.class, value = "pagingOptions") @@ -240,6 +263,7 @@ public void asynchronous_paging_with_options_when_auto(Options options) // then checkAsyncResult(result, options, 0, 1, new ArrayList<>()); + validateMetrics(SESSION_RULE.session()); } @UseDataProvider(location = ContinuousPagingITBase.class, value = "pagingOptions") @@ -271,6 +295,7 @@ public void asynchronous_options_with_paging_disabled_should_fallback_to_single_ assertThat(node.asString()).isEqualTo("user" + i); } assertThat(asyncGraphResultSet.remaining()).isEqualTo(0); + validateMetrics(SESSION_RULE.session()); } private void checkAsyncResult( @@ -485,4 +510,18 @@ private DriverExecutionProfile enableGraphPaging( private SocketAddress firstCcmNode() { return CCM_RULE.getContactPoints().iterator().next().resolve(); } + + private void validateMetrics(DseSession session) { + Node node = session.getMetadata().getNodes().values().iterator().next(); + assertThat(session.getMetrics()).isPresent(); + Metrics metrics = session.getMetrics().get(); + assertThat(metrics.getNodeMetric(node, DseNodeMetrics.GRAPH_MESSAGES)).isPresent(); + Timer messages = (Timer) metrics.getNodeMetric(node, DseNodeMetrics.GRAPH_MESSAGES).get(); + assertThat(messages.getCount()).isGreaterThan(0); + assertThat(messages.getMeanRate()).isGreaterThan(0); + assertThat(metrics.getSessionMetric(DseSessionMetric.GRAPH_REQUESTS)).isPresent(); + Timer requests = (Timer) metrics.getSessionMetric(DseSessionMetric.GRAPH_REQUESTS).get(); + assertThat(requests.getCount()).isGreaterThan(0); + assertThat(requests.getMeanRate()).isGreaterThan(0); + } } From 659c50821813aecd3689362649c5f169538d567b Mon Sep 17 00:00:00 2001 From: Erik Merkle Date: Fri, 20 Dec 2019 14:02:53 -0600 Subject: [PATCH 347/979] Fix formatting after era4 rebase --- core-shaded/pom.xml | 6 +----- core/pom.xml | 7 +------ .../driver/api/core/graph/PagingEnabledOptions.java | 13 +++++++++++-- .../api/core/graph/predicates/CqlCollection.java | 13 +++++++++++-- .../api/core/metadata/schema/DseEdgeMetadata.java | 13 +++++++++++-- .../metadata/schema/DseGraphKeyspaceMetadata.java | 13 +++++++++++-- .../core/metadata/schema/DseGraphTableMetadata.java | 13 +++++++++++-- .../api/core/metadata/schema/DseVertexMetadata.java | 13 +++++++++++-- .../continuous/ContinuousRequestHandlerBase.java | 13 +++++++++++-- .../dse/driver/internal/core/graph/ByteBufUtil.java | 13 +++++++++++-- .../core/graph/ContinuousAsyncGraphResultSet.java | 13 +++++++++++-- .../core/graph/ContinuousGraphRequestHandler.java | 13 +++++++++++-- .../internal/core/graph/CqlCollectionPredicate.java | 13 +++++++++++-- .../driver/internal/core/graph/GraphProtocol.java | 13 +++++++++++-- .../internal/core/graph/GraphResultIterator.java | 13 +++++++++++-- .../internal/core/graph/GraphSupportChecker.java | 13 +++++++++++-- .../core/graph/MultiPageGraphResultSet.java | 13 +++++++++++-- .../internal/core/graph/TinkerpopBufferUtil.java | 13 +++++++++++-- .../AbstractDynamicGraphBinaryCustomSerializer.java | 13 +++++++++++-- .../AbstractSimpleGraphBinaryCustomSerializer.java | 13 +++++++++++-- .../graph/binary/ComplexTypeSerializerUtil.java | 13 +++++++++++-- .../core/graph/binary/CqlDurationSerializer.java | 13 +++++++++++-- .../core/graph/binary/DistanceSerializer.java | 13 +++++++++++-- .../core/graph/binary/EditDistanceSerializer.java | 13 +++++++++++-- .../core/graph/binary/GeometrySerializer.java | 13 +++++++++++-- .../core/graph/binary/GraphBinaryModule.java | 13 +++++++++++-- .../core/graph/binary/GraphBinaryUtils.java | 13 +++++++++++-- .../core/graph/binary/LineStringSerializer.java | 13 +++++++++++-- .../internal/core/graph/binary/PairSerializer.java | 13 +++++++++++-- .../internal/core/graph/binary/PointSerializer.java | 13 +++++++++++-- .../core/graph/binary/PolygonSerializer.java | 13 +++++++++++-- .../core/graph/binary/TupleValueSerializer.java | 13 +++++++++++-- .../core/graph/binary/UdtValueSerializer.java | 13 +++++++++++-- .../core/graph/binary/buffer/DseNettyBuffer.java | 13 +++++++++++-- .../graph/binary/buffer/DseNettyBufferFactory.java | 13 +++++++++++-- .../metadata/schema/DefaultDseEdgeMetadata.java | 13 +++++++++++-- .../metadata/schema/DefaultDseVertexMetadata.java | 13 +++++++++++-- .../internal/core/metadata/schema/ScriptHelper.java | 13 +++++++++++-- .../metadata/schema/queries/Dse68SchemaQueries.java | 13 +++++++++++-- .../metadata/schema/queries/Dse68SchemaRows.java | 13 +++++++++++-- .../protocol/TinkerpopBufferPrimitiveCodec.java | 13 +++++++++++-- .../java/com/datastax/dse/driver/Assertions.java | 13 +++++++++++-- .../datastax/dse/driver/TinkerpopBufferAssert.java | 13 +++++++++++-- .../core/graph/predicates/CqlCollectionTest.java | 13 +++++++++++-- .../core/context/DseStartupOptionsBuilderTest.java | 1 - ...GraphRequestHandlerSpeculativeExecutionTest.java | 13 +++++++++++-- .../graph/ContinuousGraphRequestHandlerTest.java | 13 +++++++++++-- .../internal/core/graph/GraphResultSetTestBase.java | 13 +++++++++++-- .../internal/core/graph/GraphResultSetsTest.java | 13 +++++++++++-- .../core/graph/GraphSupportCheckerTest.java | 13 +++++++++++-- .../driver/internal/core/graph/GraphTestUtils.java | 13 +++++++++++-- .../core/graph/binary/GraphBinaryDataTypesTest.java | 13 +++++++++++-- .../schema/refresh/GraphSchemaRefreshTest.java | 13 +++++++++++-- .../protocol/TinkerpopBufferPrimitiveCodecTest.java | 13 +++++++++++-- examples/pom.xml | 2 +- .../core/graph/ClassicGraphGeoSearchIndexIT.java | 13 +++++++++++-- .../core/graph/ClassicGraphTextSearchIndexIT.java | 13 +++++++++++-- .../api/core/graph/CoreGraphDataTypeITBase.java | 13 +++++++++++-- .../api/core/graph/CoreGraphGeoSearchIndexIT.java | 13 +++++++++++-- .../api/core/graph/CoreGraphTextSearchIndexIT.java | 13 +++++++++++-- .../dse/driver/api/core/graph/CqlCollectionIT.java | 13 +++++++++++-- .../api/core/graph/GraphGeoSearchIndexITBase.java | 13 +++++++++++-- .../dse/driver/api/core/graph/GraphPagingIT.java | 13 +++++++++++-- .../api/core/graph/GraphSpeculativeExecutionIT.java | 13 +++++++++++-- .../dse/driver/api/core/graph/GraphTestSupport.java | 13 +++++++++++-- .../graph/remote/ClassicGraphTraversalRemoteIT.java | 13 +++++++++++-- .../graph/remote/CoreGraphDataTypeRemoteIT.java | 13 +++++++++++-- .../graph/remote/CoreGraphTraversalRemoteIT.java | 13 +++++++++++-- .../graph/remote/GraphTraversalRemoteITBase.java | 2 +- .../statement/ClassicGraphTraversalBatchIT.java | 13 +++++++++++-- .../graph/statement/ClassicGraphTraversalIT.java | 13 +++++++++++-- .../graph/statement/CoreGraphDataTypeFluentIT.java | 13 +++++++++++-- .../graph/statement/CoreGraphDataTypeScriptIT.java | 13 +++++++++++-- .../graph/statement/CoreGraphTraversalBatchIT.java | 13 +++++++++++-- .../core/graph/statement/CoreGraphTraversalIT.java | 13 +++++++++++-- .../graph/statement/GraphTraversalBatchITBase.java | 13 +++++++++++-- .../core/graph/statement/GraphTraversalITBase.java | 2 +- .../metadata/schema/KeyspaceGraphMetadataIT.java | 13 +++++++++++-- .../schema/TableGraphMetadataCaseSensitiveIT.java | 13 +++++++++++-- .../core/metadata/schema/TableGraphMetadataIT.java | 13 +++++++++++-- .../oss/driver/osgi/support/OsgiGraphTests.java | 3 +-- mapper-processor/pom.xml | 2 +- pom.xml | 4 ++-- test-infra/pom.xml | 2 +- .../testinfra/session/CqlSessionRuleBuilder.java | 1 - 85 files changed, 824 insertions(+), 170 deletions(-) diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 92b82cb4f54..dae6e96943b 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -307,11 +307,7 @@ Note: dependencies marked as optional are by default included with optional resolution in the manifest; we only need to manually set the resolution to optional for dependencies declared as non-optional in the pom files. - --> - org.reactivestreams.*;resolution:=optional, - org.apache.tinkerpop.*;resolution:=optional, - org.javatuples.*;resolution:=optional, - * + -->org.reactivestreams.*;resolution:=optional, org.apache.tinkerpop.*;resolution:=optional, org.javatuples.*;resolution:=optional, * - com.esri.core.geometry.*;resolution:=optional, - org.reactivestreams.*;resolution:=optional, - org.apache.tinkerpop.*;resolution:=optional, - org.javatuples.*;resolution:=optional, - * + -->jnr.*;resolution:=optional, com.esri.core.geometry.*;resolution:=optional, org.reactivestreams.*;resolution:=optional, org.apache.tinkerpop.*;resolution:=optional, org.javatuples.*;resolution:=optional, * com.datastax.oss.driver.*.core.*, com.datastax.dse.driver.*.core.* diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/graph/PagingEnabledOptions.java b/core/src/main/java/com/datastax/dse/driver/api/core/graph/PagingEnabledOptions.java index 762f229623c..b38a75204e1 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/graph/PagingEnabledOptions.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/graph/PagingEnabledOptions.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/graph/predicates/CqlCollection.java b/core/src/main/java/com/datastax/dse/driver/api/core/graph/predicates/CqlCollection.java index 9c05732fbdd..a009925f357 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/graph/predicates/CqlCollection.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/graph/predicates/CqlCollection.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph.predicates; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseEdgeMetadata.java b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseEdgeMetadata.java index dc6280a7df9..1a24a72e8b5 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseEdgeMetadata.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseEdgeMetadata.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.metadata.schema; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseGraphKeyspaceMetadata.java b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseGraphKeyspaceMetadata.java index 631e73726d0..aebfb23753d 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseGraphKeyspaceMetadata.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseGraphKeyspaceMetadata.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.metadata.schema; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseGraphTableMetadata.java b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseGraphTableMetadata.java index bae274486c5..e6e8f1dc5e5 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseGraphTableMetadata.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseGraphTableMetadata.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.metadata.schema; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseVertexMetadata.java b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseVertexMetadata.java index 50e99492485..db4b3b4f3fd 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseVertexMetadata.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/metadata/schema/DseVertexMetadata.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.metadata.schema; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousRequestHandlerBase.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousRequestHandlerBase.java index eb6adbdc293..80763f4ba9b 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousRequestHandlerBase.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousRequestHandlerBase.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.cql.continuous; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ByteBufUtil.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ByteBufUtil.java index dae9a741d98..d009adaf3f7 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ByteBufUtil.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ByteBufUtil.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ContinuousAsyncGraphResultSet.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ContinuousAsyncGraphResultSet.java index 3318f112995..06d66635117 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ContinuousAsyncGraphResultSet.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ContinuousAsyncGraphResultSet.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandler.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandler.java index 4b17dacfda7..2474bebe233 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandler.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandler.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/CqlCollectionPredicate.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/CqlCollectionPredicate.java index 3fbc1a4d6a4..295df6a224d 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/CqlCollectionPredicate.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/CqlCollectionPredicate.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphProtocol.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphProtocol.java index 51e4078652f..373dbe2ad44 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphProtocol.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphProtocol.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphResultIterator.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphResultIterator.java index f0557a71c80..8a000f03c48 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphResultIterator.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphResultIterator.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSupportChecker.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSupportChecker.java index 2f5a3cd049e..b9eb9c86025 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSupportChecker.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphSupportChecker.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/MultiPageGraphResultSet.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/MultiPageGraphResultSet.java index f2d08c705ff..8c7ac3668c3 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/MultiPageGraphResultSet.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/MultiPageGraphResultSet.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/TinkerpopBufferUtil.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/TinkerpopBufferUtil.java index 05092f8180d..f87cadee4fa 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/TinkerpopBufferUtil.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/TinkerpopBufferUtil.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/AbstractDynamicGraphBinaryCustomSerializer.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/AbstractDynamicGraphBinaryCustomSerializer.java index 518837953b4..77be1e885d4 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/AbstractDynamicGraphBinaryCustomSerializer.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/AbstractDynamicGraphBinaryCustomSerializer.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph.binary; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/AbstractSimpleGraphBinaryCustomSerializer.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/AbstractSimpleGraphBinaryCustomSerializer.java index fe9457b964d..976db31cc5d 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/AbstractSimpleGraphBinaryCustomSerializer.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/AbstractSimpleGraphBinaryCustomSerializer.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph.binary; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/ComplexTypeSerializerUtil.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/ComplexTypeSerializerUtil.java index 955bf9b3870..a01439942fd 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/ComplexTypeSerializerUtil.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/ComplexTypeSerializerUtil.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph.binary; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/CqlDurationSerializer.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/CqlDurationSerializer.java index a6fefc92b2d..84a820c1f64 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/CqlDurationSerializer.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/CqlDurationSerializer.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph.binary; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/DistanceSerializer.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/DistanceSerializer.java index 420bec9a8f2..1b77115c0be 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/DistanceSerializer.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/DistanceSerializer.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph.binary; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/EditDistanceSerializer.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/EditDistanceSerializer.java index 0d17308e8b6..6949f94a6ba 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/EditDistanceSerializer.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/EditDistanceSerializer.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph.binary; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/GeometrySerializer.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/GeometrySerializer.java index d7428ab3f5f..baa650bfeaa 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/GeometrySerializer.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/GeometrySerializer.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph.binary; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/GraphBinaryModule.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/GraphBinaryModule.java index 828e90a3acd..f7aa0d071d9 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/GraphBinaryModule.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/GraphBinaryModule.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph.binary; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/GraphBinaryUtils.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/GraphBinaryUtils.java index 28c558a5da2..4bc1d3ab84f 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/GraphBinaryUtils.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/GraphBinaryUtils.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph.binary; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/LineStringSerializer.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/LineStringSerializer.java index 835dbeafa34..4f9917de779 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/LineStringSerializer.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/LineStringSerializer.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph.binary; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/PairSerializer.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/PairSerializer.java index 0afde1eca26..b80c25c6838 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/PairSerializer.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/PairSerializer.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph.binary; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/PointSerializer.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/PointSerializer.java index 49529eea3e0..2c1a07f8e42 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/PointSerializer.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/PointSerializer.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph.binary; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/PolygonSerializer.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/PolygonSerializer.java index e608ad73932..d42c630e68c 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/PolygonSerializer.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/PolygonSerializer.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph.binary; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/TupleValueSerializer.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/TupleValueSerializer.java index 2b5745d8ac5..2780b9c75fb 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/TupleValueSerializer.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/TupleValueSerializer.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph.binary; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/UdtValueSerializer.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/UdtValueSerializer.java index 925af2fc27c..12d00f7beeb 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/UdtValueSerializer.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/UdtValueSerializer.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph.binary; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/buffer/DseNettyBuffer.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/buffer/DseNettyBuffer.java index cd6759db870..bd07bf7b263 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/buffer/DseNettyBuffer.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/buffer/DseNettyBuffer.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph.binary.buffer; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/buffer/DseNettyBufferFactory.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/buffer/DseNettyBufferFactory.java index a482893b7ac..192b0ecec53 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/buffer/DseNettyBufferFactory.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/buffer/DseNettyBufferFactory.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph.binary.buffer; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseEdgeMetadata.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseEdgeMetadata.java index 0223c341e3a..1c0f6628e5a 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseEdgeMetadata.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseEdgeMetadata.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.metadata.schema; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseVertexMetadata.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseVertexMetadata.java index e51b5ebb5b7..efae93e65b6 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseVertexMetadata.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseVertexMetadata.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.metadata.schema; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/ScriptHelper.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/ScriptHelper.java index 7e0d5a2a226..947e9a794b1 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/ScriptHelper.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/ScriptHelper.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.metadata.schema; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/queries/Dse68SchemaQueries.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/queries/Dse68SchemaQueries.java index 281c8b6f751..a0a0ff9ef9c 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/queries/Dse68SchemaQueries.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/queries/Dse68SchemaQueries.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.metadata.schema.queries; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/queries/Dse68SchemaRows.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/queries/Dse68SchemaRows.java index 0a774ec2479..4e4195dbd43 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/queries/Dse68SchemaRows.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/queries/Dse68SchemaRows.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.metadata.schema.queries; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/protocol/TinkerpopBufferPrimitiveCodec.java b/core/src/main/java/com/datastax/dse/driver/internal/core/protocol/TinkerpopBufferPrimitiveCodec.java index 75b98815954..978dd62a790 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/protocol/TinkerpopBufferPrimitiveCodec.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/protocol/TinkerpopBufferPrimitiveCodec.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.protocol; diff --git a/core/src/test/java/com/datastax/dse/driver/Assertions.java b/core/src/test/java/com/datastax/dse/driver/Assertions.java index a7a35a17638..11bf3082d72 100644 --- a/core/src/test/java/com/datastax/dse/driver/Assertions.java +++ b/core/src/test/java/com/datastax/dse/driver/Assertions.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver; diff --git a/core/src/test/java/com/datastax/dse/driver/TinkerpopBufferAssert.java b/core/src/test/java/com/datastax/dse/driver/TinkerpopBufferAssert.java index 0728e6f7cbd..462867e5d2f 100644 --- a/core/src/test/java/com/datastax/dse/driver/TinkerpopBufferAssert.java +++ b/core/src/test/java/com/datastax/dse/driver/TinkerpopBufferAssert.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver; diff --git a/core/src/test/java/com/datastax/dse/driver/api/core/graph/predicates/CqlCollectionTest.java b/core/src/test/java/com/datastax/dse/driver/api/core/graph/predicates/CqlCollectionTest.java index fc22513448f..5c9fdce550c 100644 --- a/core/src/test/java/com/datastax/dse/driver/api/core/graph/predicates/CqlCollectionTest.java +++ b/core/src/test/java/com/datastax/dse/driver/api/core/graph/predicates/CqlCollectionTest.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph.predicates; diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/context/DseStartupOptionsBuilderTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/context/DseStartupOptionsBuilderTest.java index a15324e8c95..0d255488c3d 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/context/DseStartupOptionsBuilderTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/context/DseStartupOptionsBuilderTest.java @@ -39,7 +39,6 @@ import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; -import org.mockito.Mockito; @RunWith(DataProviderRunner.class) public class DseStartupOptionsBuilderTest { diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerSpeculativeExecutionTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerSpeculativeExecutionTest.java index 0326c656154..7b945c498be 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerSpeculativeExecutionTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerSpeculativeExecutionTest.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph; diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerTest.java index 9179aa1cb15..eb37bf410df 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerTest.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph; diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphResultSetTestBase.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphResultSetTestBase.java index f1f3d70de62..6d14e80339b 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphResultSetTestBase.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphResultSetTestBase.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph; diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphResultSetsTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphResultSetsTest.java index 28056903942..bdcb526fba8 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphResultSetsTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphResultSetsTest.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph; diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphSupportCheckerTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphSupportCheckerTest.java index ff446076186..ffa4b77256a 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphSupportCheckerTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphSupportCheckerTest.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph; diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphTestUtils.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphTestUtils.java index fd44246833b..96419540680 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphTestUtils.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphTestUtils.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph; diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/binary/GraphBinaryDataTypesTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/binary/GraphBinaryDataTypesTest.java index b730ff198fe..14cd3857081 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/binary/GraphBinaryDataTypesTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/binary/GraphBinaryDataTypesTest.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph.binary; diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/schema/refresh/GraphSchemaRefreshTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/schema/refresh/GraphSchemaRefreshTest.java index ff93f6ff9c5..3a4887e8691 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/schema/refresh/GraphSchemaRefreshTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/schema/refresh/GraphSchemaRefreshTest.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.graph.schema.refresh; diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/protocol/TinkerpopBufferPrimitiveCodecTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/protocol/TinkerpopBufferPrimitiveCodecTest.java index 52368c72b83..7dd240baf99 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/protocol/TinkerpopBufferPrimitiveCodecTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/protocol/TinkerpopBufferPrimitiveCodecTest.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.internal.core.protocol; diff --git a/examples/pom.xml b/examples/pom.xml index 01f024aaa23..c00dc9e9951 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.5.0-SNAPSHOT + 4.5.0-ngdg-SNAPSHOT java-driver-examples DataStax Java driver for Apache Cassandra(R) - examples. diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/ClassicGraphGeoSearchIndexIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/ClassicGraphGeoSearchIndexIT.java index 29307722804..25d07c15505 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/ClassicGraphGeoSearchIndexIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/ClassicGraphGeoSearchIndexIT.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/ClassicGraphTextSearchIndexIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/ClassicGraphTextSearchIndexIT.java index c9675c54504..88dde1efe3f 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/ClassicGraphTextSearchIndexIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/ClassicGraphTextSearchIndexIT.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CoreGraphDataTypeITBase.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CoreGraphDataTypeITBase.java index 1d4c75518bd..4a9fc85f4f7 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CoreGraphDataTypeITBase.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CoreGraphDataTypeITBase.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CoreGraphGeoSearchIndexIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CoreGraphGeoSearchIndexIT.java index 192878f5ff6..82de1bfbc9d 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CoreGraphGeoSearchIndexIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CoreGraphGeoSearchIndexIT.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CoreGraphTextSearchIndexIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CoreGraphTextSearchIndexIT.java index 8db300730af..d8eb58328b2 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CoreGraphTextSearchIndexIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CoreGraphTextSearchIndexIT.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CqlCollectionIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CqlCollectionIT.java index e026e178a24..f83af3edad5 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CqlCollectionIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CqlCollectionIT.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphGeoSearchIndexITBase.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphGeoSearchIndexITBase.java index 166f930e5f9..52333be3cea 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphGeoSearchIndexITBase.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphGeoSearchIndexITBase.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphPagingIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphPagingIT.java index be91bd133c9..a48eaeb3093 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphPagingIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphPagingIT.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphSpeculativeExecutionIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphSpeculativeExecutionIT.java index 44e16619349..1286d4d5a56 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphSpeculativeExecutionIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphSpeculativeExecutionIT.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphTestSupport.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphTestSupport.java index 38f5d9d5f54..594f790d69c 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphTestSupport.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphTestSupport.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/ClassicGraphTraversalRemoteIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/ClassicGraphTraversalRemoteIT.java index 7297bf2cf43..8a7ec454719 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/ClassicGraphTraversalRemoteIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/ClassicGraphTraversalRemoteIT.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph.remote; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/CoreGraphDataTypeRemoteIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/CoreGraphDataTypeRemoteIT.java index 355ea35af07..0de9ead5a99 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/CoreGraphDataTypeRemoteIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/CoreGraphDataTypeRemoteIT.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph.remote; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/CoreGraphTraversalRemoteIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/CoreGraphTraversalRemoteIT.java index 5f7a4759e91..fc7aa8dfd5c 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/CoreGraphTraversalRemoteIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/CoreGraphTraversalRemoteIT.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph.remote; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalRemoteITBase.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalRemoteITBase.java index 31fff66d1a7..4638eb75690 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalRemoteITBase.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalRemoteITBase.java @@ -25,8 +25,8 @@ import com.datastax.dse.driver.api.core.graph.SocialTraversalSource; import com.datastax.dse.driver.api.core.graph.TinkerPathAssert; import com.datastax.dse.driver.api.core.graph.__; -import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.api.core.servererrors.InvalidQueryException; import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/ClassicGraphTraversalBatchIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/ClassicGraphTraversalBatchIT.java index bfcb54a535d..1230f9e7c10 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/ClassicGraphTraversalBatchIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/ClassicGraphTraversalBatchIT.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph.statement; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/ClassicGraphTraversalIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/ClassicGraphTraversalIT.java index b69412a6a07..d12cdeb4e06 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/ClassicGraphTraversalIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/ClassicGraphTraversalIT.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph.statement; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/CoreGraphDataTypeFluentIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/CoreGraphDataTypeFluentIT.java index 93b1ab692b4..e56ed4faf07 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/CoreGraphDataTypeFluentIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/CoreGraphDataTypeFluentIT.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph.statement; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/CoreGraphDataTypeScriptIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/CoreGraphDataTypeScriptIT.java index 4a9bb125095..c11bbc89e1c 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/CoreGraphDataTypeScriptIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/CoreGraphDataTypeScriptIT.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph.statement; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/CoreGraphTraversalBatchIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/CoreGraphTraversalBatchIT.java index 11cbd453b9c..f8c08a0d440 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/CoreGraphTraversalBatchIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/CoreGraphTraversalBatchIT.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph.statement; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/CoreGraphTraversalIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/CoreGraphTraversalIT.java index f45cb5b06f1..ad00fe63987 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/CoreGraphTraversalIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/CoreGraphTraversalIT.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph.statement; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalBatchITBase.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalBatchITBase.java index ffcae3faf58..7f87806f7b9 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalBatchITBase.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalBatchITBase.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.graph.statement; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalITBase.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalITBase.java index 5ed2a31830b..abcd38a336e 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalITBase.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalITBase.java @@ -30,9 +30,9 @@ import com.datastax.dse.driver.api.core.graph.GraphStatement; import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; import com.datastax.dse.driver.api.core.graph.SocialTraversalSource; +import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.api.core.servererrors.InvalidQueryException; -import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.type.reflect.GenericType; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/KeyspaceGraphMetadataIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/KeyspaceGraphMetadataIT.java index 9e35103e03a..ddf5387e103 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/KeyspaceGraphMetadataIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/KeyspaceGraphMetadataIT.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.metadata.schema; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/TableGraphMetadataCaseSensitiveIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/TableGraphMetadataCaseSensitiveIT.java index e5e50e7b408..953dc8091ea 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/TableGraphMetadataCaseSensitiveIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/TableGraphMetadataCaseSensitiveIT.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.metadata.schema; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/TableGraphMetadataIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/TableGraphMetadataIT.java index 86c9d9a6ede..c6bb55c135e 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/TableGraphMetadataIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/TableGraphMetadataIT.java @@ -1,8 +1,17 @@ /* * Copyright DataStax, Inc. * - * This software can be used solely with DataStax Enterprise. Please consult the license at - * http://www.datastax.com/terms/datastax-dse-driver-license-terms + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package com.datastax.dse.driver.api.core.metadata.schema; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/osgi/support/OsgiGraphTests.java b/integration-tests/src/test/java/com/datastax/oss/driver/osgi/support/OsgiGraphTests.java index 86587c021a4..5527e147d24 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/osgi/support/OsgiGraphTests.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/osgi/support/OsgiGraphTests.java @@ -25,9 +25,8 @@ import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; import com.datastax.dse.driver.api.testinfra.DseSessionBuilderInstantiator; import com.datastax.dse.driver.internal.core.graph.GraphProtocol; -import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.api.core.CqlSession; -import com.datastax.oss.driver.api.core.config.DriverConfigLoader; +import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.api.core.config.ProgrammaticDriverConfigLoaderBuilder; import java.util.List; import org.apache.tinkerpop.gremlin.structure.Vertex; diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 603e5ff8190..8f41bbbb688 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -179,4 +179,4 @@ - \ No newline at end of file + diff --git a/pom.xml b/pom.xml index 7ac3397cc82..17983076834 100644 --- a/pom.xml +++ b/pom.xml @@ -51,7 +51,6 @@ 3.4.5 1.7.26 1.2.1 - 3.4.3-20190731-199be4b5 1.0.2 2.10.0 2.10.0 @@ -82,7 +81,6 @@ false ${skipTests} - @@ -186,6 +184,8 @@ com.github.spotbugs spotbugs-annotations 3.1.12 + + org.javatuples javatuples 1.2 diff --git a/test-infra/pom.xml b/test-infra/pom.xml index e1fda41d2f4..59a962112a7 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.5.0-SNAPSHOT + 4.5.0-ngdg-SNAPSHOT java-driver-test-infra bundle diff --git a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/session/CqlSessionRuleBuilder.java b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/session/CqlSessionRuleBuilder.java index d24bece6ee2..2b9d0447374 100644 --- a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/session/CqlSessionRuleBuilder.java +++ b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/session/CqlSessionRuleBuilder.java @@ -18,7 +18,6 @@ import static org.assertj.core.api.Assertions.assertThat; import com.datastax.dse.driver.api.core.config.DseDriverOption; -import com.datastax.dse.driver.internal.core.config.typesafe.DefaultDseDriverConfigLoader; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; import com.datastax.oss.driver.api.testinfra.CassandraResourceRule; From 0bc3ff4d7936c89363a3b3ac65939bb5cc175c8b Mon Sep 17 00:00:00 2001 From: Erik Merkle Date: Fri, 20 Dec 2019 15:13:28 -0600 Subject: [PATCH 348/979] Fix initial rebase compile issues --- core-shaded/pom.xml | 6 ++- .../ContinuousCqlRequestHandler.java | 2 +- .../graph/ContinuousGraphRequestHandler.java | 4 +- .../core/graph/DseGraphRemoteConnection.java | 20 +------- .../graph/GraphRequestAsyncProcessor.java | 2 +- .../core/graph/GraphSupportChecker.java | 2 +- .../binary/ComplexTypeSerializerUtil.java | 7 +-- .../core/graph/binary/GraphBinaryModule.java | 4 +- .../graph/binary/TupleValueSerializer.java | 6 +-- .../core/graph/binary/UdtValueSerializer.java | 6 +-- .../core/context/DefaultDriverContext.java | 4 +- .../datastax/dse/driver/DseTestFixtures.java | 6 +-- ...equestHandlerSpeculativeExecutionTest.java | 2 +- .../ContinuousGraphRequestHandlerTest.java | 8 ++-- .../internal/core/graph/GraphNodeTest.java | 4 +- .../core/graph/GraphRequestHandlerTest.java | 4 +- .../graph/GraphRequestHandlerTestHarness.java | 6 +-- .../core/graph/GraphSupportCheckerTest.java | 18 +++---- .../internal/core/graph/GraphTestUtils.java | 4 +- .../binary/GraphBinaryDataTypesTest.java | 4 +- .../graph/ClassicGraphGeoSearchIndexIT.java | 10 ++-- .../graph/ClassicGraphTextSearchIndexIT.java | 10 ++-- .../core/graph/CoreGraphDataTypeITBase.java | 8 ++-- .../core/graph/CoreGraphGeoSearchIndexIT.java | 7 +-- .../graph/CoreGraphTextSearchIndexIT.java | 7 +-- .../api/core/graph/CqlCollectionIT.java | 22 ++++----- .../driver/api/core/graph/GraphPagingIT.java | 12 ++--- .../graph/GraphSpeculativeExecutionIT.java | 6 +-- .../api/core/graph/GraphTestSupport.java | 10 ++-- .../remote/ClassicGraphDataTypeRemoteIT.java | 2 +- .../remote/ClassicGraphTraversalRemoteIT.java | 8 ++-- .../remote/CoreGraphDataTypeRemoteIT.java | 8 ++-- .../remote/CoreGraphTraversalRemoteIT.java | 8 ++-- .../ClassicGraphDataTypeFluentIT.java | 2 +- .../ClassicGraphDataTypeScriptIT.java | 2 +- .../ClassicGraphTraversalBatchIT.java | 8 ++-- .../statement/ClassicGraphTraversalIT.java | 8 ++-- .../statement/CoreGraphDataTypeFluentIT.java | 8 ++-- .../statement/CoreGraphDataTypeScriptIT.java | 8 ++-- .../statement/CoreGraphTraversalBatchIT.java | 8 ++-- .../graph/statement/CoreGraphTraversalIT.java | 8 ++-- .../statement/GraphTraversalBatchITBase.java | 4 +- .../schema/KeyspaceGraphMetadataIT.java | 16 +++---- .../TableGraphMetadataCaseSensitiveIT.java | 12 ++--- .../metadata/schema/TableGraphMetadataIT.java | 12 ++--- .../datastax/oss/driver/osgi/OsgiGraphIT.java | 6 +++ .../oss/driver/osgi/OsgiShadedIT.java | 1 + .../driver/osgi/support/BundleOptions.java | 20 ++++---- .../driver/osgi/support/OsgiGraphTests.java | 8 ++-- .../api/testinfra/session/SessionRule.java | 48 +++++++++++++++---- 50 files changed, 223 insertions(+), 193 deletions(-) diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index dae6e96943b..f65d1fbdf15 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -307,7 +307,11 @@ Note: dependencies marked as optional are by default included with optional resolution in the manifest; we only need to manually set the resolution to optional for dependencies declared as non-optional in the pom files. - -->org.reactivestreams.*;resolution:=optional, org.apache.tinkerpop.*;resolution:=optional, org.javatuples.*;resolution:=optional, * + -->jnr.*;resolution:=optional, com.esri.core.geometry.*;resolution:=optional,org.reactivestreams.*;resolution:=optional, org.apache.tinkerpop.*;resolution:=optional, org.javatuples.*;resolution:=optional, + !com.google.protobuf.*, !com.jcraft.jzlib.*, !com.ning.compress.*, !lzma.sdk.*, !net.jpountz.xxhash.*, !org.bouncycastle.*, !org.conscrypt.*, !org.apache.commons.logging.*, !org.apache.log4j.*, !org.apache.logging.log4j.*, !org.eclipse.jetty.*, !org.jboss.marshalling.*, !sun.misc.*, !sun.security.*, !com.oracle.svm.core.annotate.*,!com.barchart.udt.*, !com.fasterxml.aalto.*, !com.sun.nio.sctp.*, !gnu.io.*, * -### NGDG (in progress) +### 4.5.0 (in progress) - [new feature] JAVA-2064: Add support for DSE 6.8 graph options in schema builder - [documentation] JAVA-2559: Fix GraphNode javadocs @@ -25,9 +25,6 @@ - [improvement] JAVA-2245: Rename graph engine Legacy to Classic and Modern to Core - [new feature] JAVA-2099: Enable Paging Through DSE Driver for Gremlin Traversals (2.x) - [new feature] JAVA-1898: Expose new table-level graph metadata - -### 4.5.0 (in progress) - - [bug] JAVA-2642: Fix default value of max-orphan-requests - [bug] JAVA-2644: Revisit channel selection when pool size > 1 - [bug] JAVA-2630: Correctly handle custom classes in IndexMetadata.describe From 09f6ae0457738c6e13e74e471b149b6da8925a26 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 18 Feb 2020 10:14:09 +0100 Subject: [PATCH 363/979] Exclude netty-all from gremlin-driver This exclusion avoids duplicates on the driver's classpath of Netty classes, which is particularly bad when generating the shaded jar. --- pom.xml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/pom.xml b/pom.xml index 0d79062d733..6764f6cc8b5 100644 --- a/pom.xml +++ b/pom.xml @@ -163,6 +163,12 @@ org.apache.tinkerpop gremlin-driver ${tinkerpop.version} + + + io.netty + netty-all + + org.reactivestreams From ce140fad68704a72b409e8f2b4be94f336512d11 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 18 Feb 2020 14:50:09 +0100 Subject: [PATCH 364/979] Remove deprecation warnings in ContinuousGraphRequestHandlerSpeculativeExecutionTest This commit is a follow-up to JAVA-2527 and removes calls to the deprecated method AllNodesFailedException.getErrors() in ContinuousGraphRequestHandlerSpeculativeExecutionTest. --- ...aphRequestHandlerSpeculativeExecutionTest.java | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerSpeculativeExecutionTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerSpeculativeExecutionTest.java index 002277fcb5b..203b49a7ffb 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerSpeculativeExecutionTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerSpeculativeExecutionTest.java @@ -50,6 +50,7 @@ import com.tngtech.java.junit.dataprovider.DataProviderRunner; import com.tngtech.java.junit.dataprovider.UseDataProvider; import java.time.Duration; +import java.util.List; import java.util.Map; import java.util.concurrent.CompletionStage; import java.util.concurrent.TimeUnit; @@ -340,10 +341,11 @@ public void should_fail_if_no_more_nodes_and_initial_execution_is_last( .isFailed( error -> { assertThat(error).isInstanceOf(AllNodesFailedException.class); - Map nodeErrors = ((AllNodesFailedException) error).getErrors(); + Map> nodeErrors = + ((AllNodesFailedException) error).getAllErrors(); assertThat(nodeErrors).containsOnlyKeys(node1, node2); - assertThat(nodeErrors.get(node1)).isInstanceOf(BootstrappingException.class); - assertThat(nodeErrors.get(node2)).isInstanceOf(BootstrappingException.class); + assertThat(nodeErrors.get(node1).get(0)).isInstanceOf(BootstrappingException.class); + assertThat(nodeErrors.get(node2).get(0)).isInstanceOf(BootstrappingException.class); }); } } @@ -401,10 +403,11 @@ public void should_fail_if_no_more_nodes_and_speculative_execution_is_last( .isFailed( error -> { assertThat(error).isInstanceOf(AllNodesFailedException.class); - Map nodeErrors = ((AllNodesFailedException) error).getErrors(); + Map> nodeErrors = + ((AllNodesFailedException) error).getAllErrors(); assertThat(nodeErrors).containsOnlyKeys(node1, node2); - assertThat(nodeErrors.get(node1)).isInstanceOf(BootstrappingException.class); - assertThat(nodeErrors.get(node2)).isInstanceOf(BootstrappingException.class); + assertThat(nodeErrors.get(node1).get(0)).isInstanceOf(BootstrappingException.class); + assertThat(nodeErrors.get(node2).get(0)).isInstanceOf(BootstrappingException.class); }); } } From 8ab4efdb69cf8a81398815f69dd9c833871a0e84 Mon Sep 17 00:00:00 2001 From: Greg Bestland Date: Thu, 20 Feb 2020 02:34:29 -0600 Subject: [PATCH 365/979] JAVA-2650: Race condition on ContinuousRequestHandler init (#52) --- .../core/cql/continuous/ContinuousCqlRequestHandler.java | 4 +++- .../internal/core/graph/ContinuousGraphRequestHandler.java | 4 +++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandler.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandler.java index 0701c01d0de..ca2631fae75 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandler.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandler.java @@ -69,7 +69,6 @@ public class ContinuousCqlRequestHandler DseSessionMetric.CONTINUOUS_CQL_REQUESTS, DefaultNodeMetric.CQL_MESSAGES); message = DseConversions.toContinuousPagingMessage(statement, executionProfile, context); - throttler.register(this); firstPageTimeout = executionProfile.getDuration(DseDriverOption.CONTINUOUS_PAGING_TIMEOUT_FIRST_PAGE); otherPagesTimeout = @@ -77,6 +76,9 @@ public class ContinuousCqlRequestHandler maxEnqueuedPages = executionProfile.getInt(DseDriverOption.CONTINUOUS_PAGING_MAX_ENQUEUED_PAGES); maxPages = executionProfile.getInt(DseDriverOption.CONTINUOUS_PAGING_MAX_PAGES); + // NOTE that ordering of the following statement matters. + // We should register this request after all fields have been initialized. + throttler.register(this); } @NonNull diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandler.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandler.java index fceaeeee0ce..0e4d79f90d3 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandler.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandler.java @@ -78,7 +78,6 @@ public class ContinuousGraphRequestHandler message = GraphConversions.createContinuousMessageFromGraphStatement( statement, subProtocol, executionProfile, context, graphBinaryModule); - throttler.register(this); globalTimeout = MoreObjects.firstNonNull( statement.getTimeout(), @@ -86,6 +85,9 @@ public class ContinuousGraphRequestHandler maxEnqueuedPages = executionProfile.getInt(DseDriverOption.GRAPH_CONTINUOUS_PAGING_MAX_ENQUEUED_PAGES); maxPages = executionProfile.getInt(DseDriverOption.GRAPH_CONTINUOUS_PAGING_MAX_PAGES); + // NOTE that ordering of the following statement matters. + // We should register this request after all fields have been initialized. + throttler.register(this); } @NonNull From 961dc3dc65c6d90fab09cb2be03a174b6276cafe Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 18 Feb 2020 22:54:14 +0100 Subject: [PATCH 366/979] Exclude org.xml.sax, org.w3c.dom and reactor.blockhound from the imported packages org.xml.sax and org.w3c.dom are used by Jackson, but the driver does not need it. reactor.blockhound is used by Netty, but again, the driver does not need it. --- core-shaded/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index f65d1fbdf15..2d328ba65ec 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -311,7 +311,7 @@ !com.google.protobuf.*, !com.jcraft.jzlib.*, !com.ning.compress.*, !lzma.sdk.*, !net.jpountz.xxhash.*, !org.bouncycastle.*, !org.conscrypt.*, !org.apache.commons.logging.*, !org.apache.log4j.*, !org.apache.logging.log4j.*, !org.eclipse.jetty.*, !org.jboss.marshalling.*, !sun.misc.*, !sun.security.*, !com.oracle.svm.core.annotate.*,!com.barchart.udt.*, !com.fasterxml.aalto.*, !com.sun.nio.sctp.*, !gnu.io.*, * + -->!com.google.protobuf.*, !com.jcraft.jzlib.*, !com.ning.compress.*, !lzma.sdk.*, !net.jpountz.xxhash.*, !org.bouncycastle.*, !org.conscrypt.*, !org.apache.commons.logging.*, !org.apache.log4j.*, !org.apache.logging.log4j.*, !org.eclipse.jetty.*, !org.jboss.marshalling.*, !sun.misc.*, !sun.security.*, !com.oracle.svm.core.annotate.*,!com.barchart.udt.*, !com.fasterxml.aalto.*, !com.sun.nio.sctp.*, !gnu.io.*, !org.xml.sax.*, !org.w3c.dom.*, !reactor.blockhound.*, * 1.1.7.3 1.6.0 @@ -69,13 +70,8 @@ 2.0.1 1.1.4 2.2.2 - 20180130 - - 1.9.12 4.0.2 2.0.0-M19 - - 20180130 2.22.2 false ${skipTests} From ca8de6ac15d7e0a15f5476f35481b417f823afc0 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Wed, 19 Feb 2020 11:17:12 +0100 Subject: [PATCH 368/979] Explicitly include org.json:json as direct dependency org.json:json is a transitive dependency of ESRI. ESRI pulls in the version 20090211, which not only is very old, but also has the inconvenience of not being an OSGi bundle, and not having attached sources. Using a more recent version (20190722) allows us to generate better javadocs for the shaded jar, and to run OSGi tests that exercise geotypes. --- pom.xml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/pom.xml b/pom.xml index 5d18cc9dbc8..17c46b54eeb 100644 --- a/pom.xml +++ b/pom.xml @@ -145,6 +145,11 @@ esri-geometry-api ${esri.version} + + org.json + json + ${json.version} + org.apache.tinkerpop gremlin-core From 18f213afa1bfdb50a2690f723233a8deb3f1c210 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Wed, 19 Feb 2020 11:19:06 +0100 Subject: [PATCH 369/979] Improve filtering of META-INF resources when generating shaded contents --- core-shaded/pom.xml | 32 +++++++++++++++++++++++++------- 1 file changed, 25 insertions(+), 7 deletions(-) diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 2d328ba65ec..6de66066d72 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -180,8 +180,31 @@ - + + + com.datastax.oss:* + + META-INF/** + + + + io.netty:* + + META-INF/** + + + + com.esri.geometry:* + + META-INF/** + + + + org.json:* + + META-INF/** + + org.codehaus.jackson:* @@ -217,11 +240,6 @@ ${project.build.outputDirectory} - - META-INF/maven/com.datastax.oss/java-driver-core/**, META-INF/maven/io.netty/**, META-INF/maven/com.esri.geometry/**, META-INF/maven/org.json/** From 1fbf65f6b07ca7b9bf699691b098d31931a80497 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Wed, 19 Feb 2020 11:57:07 +0100 Subject: [PATCH 370/979] Fix Export-Package directive for mapper-runtime The Export-Package directive was missing the export of com.datastax.dse.driver.api.mapper.reactive, a package introduced since the unification of OSS and DSE drivers. --- mapper-runtime/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index 05277a7f60f..ff94a5fa877 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -161,7 +161,7 @@ and the driver can live without them. --> !net.jcip.annotations.*, !edu.umd.cs.findbugs.annotations.*, * - com.datastax.oss.driver.*.mapper.* + com.datastax.*.driver.*.mapper.* From 67be24b2719012b5e191df60bf64ac956b1526dc Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 18 Feb 2020 10:14:51 +0100 Subject: [PATCH 371/979] Fix unbalanced HTML tag in javadocs of HierarchyScanner --- .../driver/internal/mapper/processor/util/HierarchyScanner.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/util/HierarchyScanner.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/util/HierarchyScanner.java index 65a3a1da580..dc4efa7286e 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/util/HierarchyScanner.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/util/HierarchyScanner.java @@ -60,7 +60,7 @@ private static final class ClassForDefaultScanStrategy {} * later use (newInterfacesToScan) *

    • Visit interfacesToScan, and append those interface's parents to * newInterfacesToScan for later use) - *
    • If superClassElement != null Set typeElement := + *
    • If superClassElement != null Set typeElement := * superClassElement, interfacesToScan := newInterfacesToScan and repeat starting at * step 3 *
    • Visit newInterfacesToScan interfaces and their parents until we've reached From 7c5b0890874d5de4746fa677b22837f9399df003 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 18 Feb 2020 11:23:13 +0100 Subject: [PATCH 372/979] Consistently name all Javadoc plugin executions "attach-javadocs" This ensures that: 1) All executions correctly inherit from settings declared in the parent pom, and notably that the Google Tag Manager snippet is inserted in all generated HTML pages. 2) The javadoc execution happens only once per project, including in the shaded-core module. 3) the Javadoc plugin execution happens before that of the assembly plugin in the distribution module. --- core-shaded/pom.xml | 2 +- distribution/pom.xml | 8 ++------ 2 files changed, 3 insertions(+), 7 deletions(-) diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 6de66066d72..4db8dd085d6 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -267,7 +267,7 @@ maven-javadoc-plugin - attach-shaded-javadocs + attach-javadocs jar diff --git a/distribution/pom.xml b/distribution/pom.xml index 5b9c60a83a4..278d382a055 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -120,12 +120,8 @@ maven-javadoc-plugin - dependencies-javadoc - - process-classes + attach-javadocs + package jar From af5979f4f94c368c5ea5c7ad47f34da56f8e8715 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 18 Feb 2020 12:12:11 +0100 Subject: [PATCH 373/979] Exclude internal root packages from Javadoc generation and their subpackages --- core-shaded/pom.xml | 2 +- pom.xml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 4db8dd085d6..af9deddcef8 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -273,7 +273,7 @@ ${project.build.directory}/shaded-sources - com.datastax.oss.driver.internal.*,com.datastax.dse.driver.internal.*,com.datastax.oss.driver.shaded.* + com.datastax.*.driver.internal*,com.datastax.oss.driver.shaded* diff --git a/examples/pom.xml b/examples/pom.xml index c00dc9e9951..01f024aaa23 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.5.0-ngdg-SNAPSHOT + 4.5.0-SNAPSHOT java-driver-examples DataStax Java driver for Apache Cassandra(R) - examples. diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 0d8a009c7ca..df3e8637545 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.5.0-ngdg-SNAPSHOT + 4.5.0-SNAPSHOT java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 8f41bbbb688..2f0e5a4c276 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.5.0-ngdg-SNAPSHOT + 4.5.0-SNAPSHOT java-driver-mapper-processor DataStax Java driver for Apache Cassandra(R) - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index ff94a5fa877..ddc3c80e1dc 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.5.0-ngdg-SNAPSHOT + 4.5.0-SNAPSHOT java-driver-mapper-runtime bundle diff --git a/pom.xml b/pom.xml index dd9535460ba..1721dc5e668 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ 4.0.0 com.datastax.oss java-driver-parent - 4.5.0-ngdg-SNAPSHOT + 4.5.0-SNAPSHOT pom DataStax Java driver for Apache Cassandra(R) A driver for Apache Cassandra(R) 2.1+ that works exclusively with the Cassandra Query Language version 3 (CQL3) and Cassandra's native protocol versions 3 and above. diff --git a/query-builder/pom.xml b/query-builder/pom.xml index 581e30b5af1..c846bbc5466 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.5.0-ngdg-SNAPSHOT + 4.5.0-SNAPSHOT java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index 59a962112a7..e1fda41d2f4 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.5.0-ngdg-SNAPSHOT + 4.5.0-SNAPSHOT java-driver-test-infra bundle From c225588811a85e25d98790ce4b4f399e0ca066e4 Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 24 Feb 2020 15:58:44 -0800 Subject: [PATCH 381/979] Adjust verify timeouts in unit test --- .../driver/internal/core/pool/ChannelPoolShutdownTest.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/pool/ChannelPoolShutdownTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/pool/ChannelPoolShutdownTest.java index b6249ac2554..a2a099981a2 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/pool/ChannelPoolShutdownTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/pool/ChannelPoolShutdownTest.java @@ -71,7 +71,7 @@ public void should_close_all_channels_when_closed() throws Exception { inOrder.verify(eventBus, VERIFY_TIMEOUT.times(1)).fire(ChannelEvent.channelClosed(node)); // Reconnection should have kicked in and started to open channel4, do not complete it yet - verify(reconnectionSchedule).nextDelay(); + verify(reconnectionSchedule, VERIFY_TIMEOUT).nextDelay(); factoryHelper.waitForCalls(node, 1); CompletionStage closeFuture = pool.closeAsync(); @@ -138,7 +138,7 @@ public void should_force_close_all_channels_when_force_closed() throws Exception inOrder.verify(eventBus, VERIFY_TIMEOUT.times(1)).fire(ChannelEvent.channelClosed(node)); // Reconnection should have kicked in and started to open a channel, do not complete it yet - verify(reconnectionSchedule).nextDelay(); + verify(reconnectionSchedule, VERIFY_TIMEOUT).nextDelay(); factoryHelper.waitForCalls(node, 1); CompletionStage closeFuture = pool.forceCloseAsync(); From a4f47d8f95ac9d0c8dfde145f1e1be8e8a40bafe Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 24 Feb 2020 18:11:19 -0800 Subject: [PATCH 382/979] Update version in docs --- README.md | 2 +- changelog/README.md | 2 +- manual/case_sensitivity/README.md | 10 +++--- manual/core/README.md | 26 +++++++------- manual/core/address_resolution/README.md | 2 +- manual/core/async/README.md | 2 +- manual/core/authentication/README.md | 16 ++++----- manual/core/configuration/README.md | 20 +++++------ manual/core/control_connection/README.md | 2 +- manual/core/custom_codecs/README.md | 8 ++--- manual/core/detachable_types/README.md | 14 ++++---- manual/core/dse/geotypes/README.md | 6 ++-- manual/core/dse/graph/README.md | 4 +-- manual/core/dse/graph/fluent/README.md | 4 +-- .../core/dse/graph/fluent/explicit/README.md | 12 +++---- manual/core/dse/graph/results/README.md | 6 ++-- manual/core/dse/graph/script/README.md | 6 ++-- manual/core/idempotence/README.md | 4 +-- manual/core/integration/README.md | 6 ++-- manual/core/load_balancing/README.md | 10 +++--- manual/core/metadata/README.md | 6 ++-- manual/core/metadata/node/README.md | 28 +++++++-------- manual/core/metadata/schema/README.md | 20 +++++------ manual/core/metadata/token/README.md | 4 +-- manual/core/native_protocol/README.md | 6 ++-- manual/core/paging/README.md | 8 ++--- manual/core/performance/README.md | 10 +++--- manual/core/pooling/README.md | 2 +- manual/core/query_timestamps/README.md | 4 +-- manual/core/reactive/README.md | 2 +- manual/core/reconnection/README.md | 8 ++--- manual/core/request_tracker/README.md | 4 +-- manual/core/retries/README.md | 34 +++++++++---------- manual/core/speculative_execution/README.md | 2 +- manual/core/ssl/README.md | 4 +-- manual/core/statements/README.md | 8 ++--- manual/core/statements/batch/README.md | 6 ++-- .../statements/per_query_keyspace/README.md | 2 +- manual/core/statements/prepared/README.md | 8 ++--- manual/core/statements/simple/README.md | 6 ++-- manual/core/temporal_types/README.md | 8 ++--- manual/core/throttling/README.md | 6 ++-- manual/core/tracing/README.md | 12 +++---- manual/core/tuples/README.md | 4 +-- manual/core/udts/README.md | 4 +-- manual/mapper/daos/README.md | 8 ++--- manual/mapper/daos/delete/README.md | 16 ++++----- manual/mapper/daos/getentity/README.md | 16 ++++----- manual/mapper/daos/insert/README.md | 12 +++---- manual/mapper/daos/null_saving/README.md | 10 +++--- manual/mapper/daos/query/README.md | 18 +++++----- manual/mapper/daos/queryprovider/README.md | 16 ++++----- manual/mapper/daos/select/README.md | 24 ++++++------- manual/mapper/daos/setentity/README.md | 10 +++--- .../daos/statement_attributes/README.md | 2 +- manual/mapper/daos/update/README.md | 10 +++--- manual/mapper/entities/README.md | 34 +++++++++---------- manual/mapper/mapper/README.md | 10 +++--- manual/osgi/README.md | 2 +- manual/query_builder/README.md | 10 +++--- manual/query_builder/condition/README.md | 2 +- manual/query_builder/delete/README.md | 4 +-- manual/query_builder/insert/README.md | 2 +- manual/query_builder/relation/README.md | 4 +-- manual/query_builder/schema/README.md | 2 +- .../query_builder/schema/aggregate/README.md | 2 +- .../query_builder/schema/function/README.md | 2 +- manual/query_builder/schema/index/README.md | 2 +- .../query_builder/schema/keyspace/README.md | 2 +- .../schema/materialized_view/README.md | 4 +-- manual/query_builder/schema/table/README.md | 6 ++-- manual/query_builder/schema/type/README.md | 2 +- manual/query_builder/select/README.md | 4 +-- manual/query_builder/term/README.md | 4 +-- manual/query_builder/truncate/README.md | 2 +- manual/query_builder/update/README.md | 4 +-- 76 files changed, 307 insertions(+), 307 deletions(-) diff --git a/README.md b/README.md index 65406ff5688..ecf55ddaca5 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@ *If you're reading this on github.com, please note that this is the readme for the development version and that some features described here might not yet have been released. You can find the documentation for latest version through [DataStax Docs] or via the release tags, e.g. -[4.4.0](https://github.com/datastax/java-driver/tree/4.4.0).* +[4.5.0](https://github.com/datastax/java-driver/tree/4.5.0).* A modern, feature-rich and highly tunable Java client library for [Apache Cassandra®] \(2.1+) and [DataStax Enterprise] \(4.7+), and [DataStax Apollo], using exclusively Cassandra's binary protocol diff --git a/changelog/README.md b/changelog/README.md index 615aa14a374..8e16266c645 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -2,7 +2,7 @@ -### 4.5.0 (in progress) +### 4.5.0 - [bug] JAVA-2654: Make AdminRequestHandler handle integer serialization - [improvement] JAVA-2618: Improve error handling in request handlers diff --git a/manual/case_sensitivity/README.md b/manual/case_sensitivity/README.md index 3adb1bb7572..d88089bb364 100644 --- a/manual/case_sensitivity/README.md +++ b/manual/case_sensitivity/README.md @@ -106,11 +106,11 @@ For "consuming" methods, string overloads are also provided for convenience, for * in other cases, the string is always assumed to be in CQL form, and converted on the fly with `CqlIdentifier.fromCql`. -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/CqlIdentifier.html -[Row]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/Row.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/data/UdtValue.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/data/AccessibleByName.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/CqlIdentifier.html +[Row]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/Row.html +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/data/UdtValue.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/data/AccessibleByName.html ### Good practices diff --git a/manual/core/README.md b/manual/core/README.md index 9a68793745c..87ab66119a5 100644 --- a/manual/core/README.md +++ b/manual/core/README.md @@ -310,18 +310,18 @@ for (ColumnDefinitions.Definition definition : row.getColumnDefinitions()) { } ``` -[CqlSession]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/CqlSession.html -[CqlSession#builder()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/CqlSession.html#builder-- -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/ResultSet.html -[Row]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/Row.html -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/CqlIdentifier.html -[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/data/AccessibleByName.html -[GenericType]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/type/reflect/GenericType.html -[CqlDuration]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/data/CqlDuration.html -[TupleValue]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/data/TupleValue.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/data/UdtValue.html -[SessionBuilder.addContactPoint()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoint-java.net.InetSocketAddress- -[SessionBuilder.addContactPoints()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoints-java.util.Collection- -[SessionBuilder.withLocalDatacenter()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withLocalDatacenter-java.lang.String- +[CqlSession]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/CqlSession.html +[CqlSession#builder()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/CqlSession.html#builder-- +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/ResultSet.html +[Row]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/Row.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/CqlIdentifier.html +[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/data/AccessibleByName.html +[GenericType]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/type/reflect/GenericType.html +[CqlDuration]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/data/CqlDuration.html +[TupleValue]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/data/TupleValue.html +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/data/UdtValue.html +[SessionBuilder.addContactPoint()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoint-java.net.InetSocketAddress- +[SessionBuilder.addContactPoints()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoints-java.util.Collection- +[SessionBuilder.withLocalDatacenter()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withLocalDatacenter-java.lang.String- [CASSANDRA-10145]: https://issues.apache.org/jira/browse/CASSANDRA-10145 \ No newline at end of file diff --git a/manual/core/address_resolution/README.md b/manual/core/address_resolution/README.md index 40d7a485220..0d608fdc229 100644 --- a/manual/core/address_resolution/README.md +++ b/manual/core/address_resolution/README.md @@ -124,7 +124,7 @@ Cassandra node: domain name of the target instance. Then it performs a forward DNS lookup of the domain name; the EC2 DNS does the private/public switch automatically based on location). -[AddressTranslator]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/addresstranslation/AddressTranslator.html +[AddressTranslator]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/addresstranslation/AddressTranslator.html [cassandra.yaml]: https://docs.datastax.com/en/cassandra/3.x/cassandra/configuration/configCassandra_yaml.html [rpc_address]: https://docs.datastax.com/en/cassandra/3.x/cassandra/configuration/configCassandra_yaml.html?scroll=configCassandra_yaml__rpc_address diff --git a/manual/core/async/README.md b/manual/core/async/README.md index f15efce09d2..c2c5b696709 100644 --- a/manual/core/async/README.md +++ b/manual/core/async/README.md @@ -203,4 +203,4 @@ documentation for more details and an example. [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html \ No newline at end of file +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html \ No newline at end of file diff --git a/manual/core/authentication/README.md b/manual/core/authentication/README.md index 1c3f5f72c90..4249a4fbbf1 100644 --- a/manual/core/authentication/README.md +++ b/manual/core/authentication/README.md @@ -215,12 +215,12 @@ session.execute(statement); [SASL]: https://en.wikipedia.org/wiki/Simple_Authentication_and_Security_Layer -[AuthProvider]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/auth/AuthProvider.html -[DriverContext]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/context/DriverContext.html -[PlainTextAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderBase.html -[DseGssApiAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderBase.html -[ProgrammaticDseGssApiAuthProvider]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/dse/driver/api/core/auth/ProgrammaticDseGssApiAuthProvider.html -[ProxyAuthentication.executeAs]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/dse/driver/api/core/auth/ProxyAuthentication.html#executeAs-java.lang.String-StatementT- -[SessionBuilder.withAuthCredentials]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthCredentials-java.lang.String-java.lang.String- -[SessionBuilder.withAuthProvider]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthProvider-com.datastax.oss.driver.api.core.auth.AuthProvider- +[AuthProvider]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/auth/AuthProvider.html +[DriverContext]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/context/DriverContext.html +[PlainTextAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderBase.html +[DseGssApiAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderBase.html +[ProgrammaticDseGssApiAuthProvider]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/dse/driver/api/core/auth/ProgrammaticDseGssApiAuthProvider.html +[ProxyAuthentication.executeAs]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/dse/driver/api/core/auth/ProxyAuthentication.html#executeAs-java.lang.String-StatementT- +[SessionBuilder.withAuthCredentials]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthCredentials-java.lang.String-java.lang.String- +[SessionBuilder.withAuthProvider]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthProvider-com.datastax.oss.driver.api.core.auth.AuthProvider- [reference.conf]: ../configuration/reference/ \ No newline at end of file diff --git a/manual/core/configuration/README.md b/manual/core/configuration/README.md index 119c93650a6..f870405d9bf 100644 --- a/manual/core/configuration/README.md +++ b/manual/core/configuration/README.md @@ -501,16 +501,16 @@ config.getDefaultProfile().getString(MyCustomOption.ADMIN_EMAIL); config.getDefaultProfile().getInt(MyCustomOption.AWESOMENESS_FACTOR); ``` -[DriverConfig]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/config/DriverConfig.html -[DriverExecutionProfile]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/config/DriverExecutionProfile.html -[DriverContext]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/context/DriverContext.html -[DriverOption]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/config/DriverOption.html -[DefaultDriverOption]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/config/DefaultDriverOption.html -[DriverConfigLoader]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html -[DriverConfigLoader.fromClasspath]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromClasspath-java.lang.String- -[DriverConfigLoader.fromFile]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromFile-java.io.File- -[DriverConfigLoader.fromUrl]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromUrl-java.net.URL- -[DriverConfigLoader.programmaticBuilder]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#programmaticBuilder-- +[DriverConfig]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/config/DriverConfig.html +[DriverExecutionProfile]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/config/DriverExecutionProfile.html +[DriverContext]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/context/DriverContext.html +[DriverOption]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/config/DriverOption.html +[DefaultDriverOption]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/config/DefaultDriverOption.html +[DriverConfigLoader]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html +[DriverConfigLoader.fromClasspath]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromClasspath-java.lang.String- +[DriverConfigLoader.fromFile]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromFile-java.io.File- +[DriverConfigLoader.fromUrl]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromUrl-java.net.URL- +[DriverConfigLoader.programmaticBuilder]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#programmaticBuilder-- [Typesafe Config]: https://github.com/typesafehub/config [config standard behavior]: https://github.com/typesafehub/config#standard-behavior diff --git a/manual/core/control_connection/README.md b/manual/core/control_connection/README.md index b549708b102..57c9ab447e6 100644 --- a/manual/core/control_connection/README.md +++ b/manual/core/control_connection/README.md @@ -23,4 +23,4 @@ There are a few options to fine tune the control connection behavior in the `advanced.control-connection` and `advanced.metadata` sections; see the [metadata](../metadata/) pages and the [reference configuration](../configuration/reference/) for all the details. -[Node.getOpenConnections]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- \ No newline at end of file +[Node.getOpenConnections]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- \ No newline at end of file diff --git a/manual/core/custom_codecs/README.md b/manual/core/custom_codecs/README.md index 83057758545..c69233e9c38 100644 --- a/manual/core/custom_codecs/README.md +++ b/manual/core/custom_codecs/README.md @@ -255,7 +255,7 @@ private static String formatRow(Row row) { } ``` -[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html -[GenericType]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/type/reflect/GenericType.html -[TypeCodec]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html -[SessionBuilder.addTypeCodecs]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addTypeCodecs-com.datastax.oss.driver.api.core.type.codec.TypeCodec...- \ No newline at end of file +[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html +[GenericType]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/type/reflect/GenericType.html +[TypeCodec]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html +[SessionBuilder.addTypeCodecs]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addTypeCodecs-com.datastax.oss.driver.api.core.type.codec.TypeCodec...- \ No newline at end of file diff --git a/manual/core/detachable_types/README.md b/manual/core/detachable_types/README.md index 0b823cc6bd6..e328b5f0428 100644 --- a/manual/core/detachable_types/README.md +++ b/manual/core/detachable_types/README.md @@ -137,13 +137,13 @@ Even then, the defaults used by detached objects might be good enough for you: Otherwise, just make sure you reattach objects any time you deserialize them or create them from scratch. -[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html -[CodecRegistry#DEFAULT]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html#DEFAULT -[DataType]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/type/DataType.html -[Detachable]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/detach/Detachable.html -[Session]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/Session.html -[ColumnDefinition]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/ColumnDefinition.html -[Row]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/Row.html +[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html +[CodecRegistry#DEFAULT]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html#DEFAULT +[DataType]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/type/DataType.html +[Detachable]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/detach/Detachable.html +[Session]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/Session.html +[ColumnDefinition]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/ColumnDefinition.html +[Row]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/Row.html [Java serialization]: https://docs.oracle.com/javase/tutorial/jndi/objects/serial.html [protocol specifications]: https://github.com/datastax/native-protocol/tree/1.x/src/main/resources diff --git a/manual/core/dse/geotypes/README.md b/manual/core/dse/geotypes/README.md index 05448529069..bc4bd75f0ba 100644 --- a/manual/core/dse/geotypes/README.md +++ b/manual/core/dse/geotypes/README.md @@ -166,9 +166,9 @@ All geospatial types interoperate with three standard formats: [ESRI]: https://github.com/Esri/geometry-api-java -[LineString]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/dse/driver/api/core/data/geometry/LineString.html -[Point]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/dse/driver/api/core/data/geometry/Point.html -[Polygon]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/dse/driver/api/core/data/geometry/Polygon.html +[LineString]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/dse/driver/api/core/data/geometry/LineString.html +[Point]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/dse/driver/api/core/data/geometry/Point.html +[Polygon]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/dse/driver/api/core/data/geometry/Polygon.html [Well-known text]: https://en.wikipedia.org/wiki/Well-known_text [Well-known binary]: https://en.wikipedia.org/wiki/Well-known_text#Well-known_binary diff --git a/manual/core/dse/graph/README.md b/manual/core/dse/graph/README.md index b7ff46a33ed..f351148fcd0 100644 --- a/manual/core/dse/graph/README.md +++ b/manual/core/dse/graph/README.md @@ -74,8 +74,8 @@ fluent API returns Apache TinkerPop™ types directly. [Apache TinkerPop™]: http://tinkerpop.apache.org/ -[CqlSession]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/CqlSession.html -[GraphSession]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/dse/driver/api/core/graph/GraphSession.html +[CqlSession]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/CqlSession.html +[GraphSession]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/dse/driver/api/core/graph/GraphSession.html [DSE developer guide]: https://docs.datastax.com/en/dse/6.0/dse-dev/datastax_enterprise/graph/graphTOC.html [Gremlin]: https://docs.datastax.com/en/dse/6.0/dse-dev/datastax_enterprise/graph/dseGraphAbout.html#dseGraphAbout__what-is-cql diff --git a/manual/core/dse/graph/fluent/README.md b/manual/core/dse/graph/fluent/README.md index b2ba9d6e141..febdbd833b8 100644 --- a/manual/core/dse/graph/fluent/README.md +++ b/manual/core/dse/graph/fluent/README.md @@ -109,8 +109,8 @@ All the DSE predicates are available on the driver side: .values("name"); ``` -[Search]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/dse/driver/api/core/graph/predicates/Search.html -[Geo]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/dse/driver/api/core/graph/predicates/Geo.html +[Search]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/dse/driver/api/core/graph/predicates/Search.html +[Geo]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/dse/driver/api/core/graph/predicates/Geo.html [Apache TinkerPop™]: http://tinkerpop.apache.org/ [TinkerPop DSL]: http://tinkerpop.apache.org/docs/current/reference/#dsl diff --git a/manual/core/dse/graph/fluent/explicit/README.md b/manual/core/dse/graph/fluent/explicit/README.md index 7db57adc651..26c63276cee 100644 --- a/manual/core/dse/graph/fluent/explicit/README.md +++ b/manual/core/dse/graph/fluent/explicit/README.md @@ -105,9 +105,9 @@ added in a future version. See also the [parent page](../) for topics common to all fluent traversals. -[FluentGraphStatement]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html -[FluentGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html#newInstance-org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal- -[FluentGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html#builder-org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal- -[BatchGraphStatement]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html -[BatchGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html#newInstance-- -[BatchGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html#builder-- +[FluentGraphStatement]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html +[FluentGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html#newInstance-org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal- +[FluentGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html#builder-org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal- +[BatchGraphStatement]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html +[BatchGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html#newInstance-- +[BatchGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html#builder-- diff --git a/manual/core/dse/graph/results/README.md b/manual/core/dse/graph/results/README.md index 08904e7406d..5da23f42812 100644 --- a/manual/core/dse/graph/results/README.md +++ b/manual/core/dse/graph/results/README.md @@ -137,8 +137,8 @@ If a type doesn't have a corresponding `asXxx()` method, use the variant that ta UUID uuid = graphNode.as(UUID.class); ``` -[GraphNode]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/dse/driver/api/core/graph/GraphNode.html -[GraphResultSet]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/dse/driver/api/core/graph/GraphResultSet.html -[AsyncGraphResultSet]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/dse/driver/api/core/graph/AsyncGraphResultSet.html +[GraphNode]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/dse/driver/api/core/graph/GraphNode.html +[GraphResultSet]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/dse/driver/api/core/graph/GraphResultSet.html +[AsyncGraphResultSet]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/dse/driver/api/core/graph/AsyncGraphResultSet.html [DSE data types]: https://docs.datastax.com/en/dse/6.0/dse-dev/datastax_enterprise/graph/reference/refDSEGraphDataTypes.html \ No newline at end of file diff --git a/manual/core/dse/graph/script/README.md b/manual/core/dse/graph/script/README.md index e7474fe374d..474004ba4cb 100644 --- a/manual/core/dse/graph/script/README.md +++ b/manual/core/dse/graph/script/README.md @@ -101,6 +101,6 @@ Note however that some types of queries can only be performed through the script * configuration; * DSE graph schema queries. -[ScriptGraphStatement]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html -[ScriptGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html#newInstance-java.lang.String- -[ScriptGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html#builder-java.lang.String- \ No newline at end of file +[ScriptGraphStatement]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html +[ScriptGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html#newInstance-java.lang.String- +[ScriptGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html#builder-java.lang.String- \ No newline at end of file diff --git a/manual/core/idempotence/README.md b/manual/core/idempotence/README.md index ddbe51b9f81..6b3f0598be4 100644 --- a/manual/core/idempotence/README.md +++ b/manual/core/idempotence/README.md @@ -60,5 +60,5 @@ assert bs.isIdempotent(); The query builder tries to infer idempotence automatically; refer to [its manual](../../query_builder/idempotence/) for more details. -[Statement.setIdempotent]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/Statement.html#setIdempotent-java.lang.Boolean- -[StatementBuilder.setIdempotence]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/StatementBuilder.html#setIdempotence-java.lang.Boolean- +[Statement.setIdempotent]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/Statement.html#setIdempotent-java.lang.Boolean- +[StatementBuilder.setIdempotence]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/StatementBuilder.html#setIdempotence-java.lang.Boolean- diff --git a/manual/core/integration/README.md b/manual/core/integration/README.md index 2c288ffd68d..6542ce9b9e4 100644 --- a/manual/core/integration/README.md +++ b/manual/core/integration/README.md @@ -502,6 +502,6 @@ The remaining core driver dependencies are the only ones that are truly mandator [guava]: https://github.com/google/guava/issues/2721 [annotation processing]: https://docs.oracle.com/javase/8/docs/technotes/tools/windows/javac.html#sthref65 -[Session.getMetrics]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/Session.html#getMetrics-- -[SessionBuilder.addContactPoint]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoint-java.net.InetSocketAddress- -[Uuids]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/uuid/Uuids.html +[Session.getMetrics]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/Session.html#getMetrics-- +[SessionBuilder.addContactPoint]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoint-java.net.InetSocketAddress- +[Uuids]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/uuid/Uuids.html diff --git a/manual/core/load_balancing/README.md b/manual/core/load_balancing/README.md index a560d425b7b..0d089a98d37 100644 --- a/manual/core/load_balancing/README.md +++ b/manual/core/load_balancing/README.md @@ -323,10 +323,10 @@ Then it uses the "closest" distance for any given node. For example: * policy1 changes its suggestion to IGNORED. node1 is set to REMOTE; * policy1 changes its suggestion to REMOTE. node1 stays at REMOTE. -[DriverContext]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/context/DriverContext.html -[LoadBalancingPolicy]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/loadbalancing/LoadBalancingPolicy.html -[getRoutingKeyspace()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKeyspace-- -[getRoutingToken()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/Request.html#getRoutingToken-- -[getRoutingKey()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- +[DriverContext]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/context/DriverContext.html +[LoadBalancingPolicy]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/loadbalancing/LoadBalancingPolicy.html +[getRoutingKeyspace()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKeyspace-- +[getRoutingToken()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/Request.html#getRoutingToken-- +[getRoutingKey()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- [`nodetool status`]: https://docs.datastax.com/en/dse/6.7/dse-dev/datastax_enterprise/tools/nodetool/toolsStatus.html [cqlsh]: https://docs.datastax.com/en/dse/6.7/cql/cql/cql_using/startCqlshStandalone.html diff --git a/manual/core/metadata/README.md b/manual/core/metadata/README.md index c4e6ed83aca..66c5af266f5 100644 --- a/manual/core/metadata/README.md +++ b/manual/core/metadata/README.md @@ -56,6 +56,6 @@ new keyspace in the schema metadata before the token metadata was updated. Schema and node state events are debounced. This allows you to control how often the metadata gets refreshed. See the [Performance](../performance/#debouncing) page for more details. -[Session#getMetadata]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/Session.html#getMetadata-- -[Metadata]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/metadata/Metadata.html -[Node]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/metadata/Node.html \ No newline at end of file +[Session#getMetadata]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/Session.html#getMetadata-- +[Metadata]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/metadata/Metadata.html +[Node]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/metadata/Node.html \ No newline at end of file diff --git a/manual/core/metadata/node/README.md b/manual/core/metadata/node/README.md index 2e8749bf84e..f56302768da 100644 --- a/manual/core/metadata/node/README.md +++ b/manual/core/metadata/node/README.md @@ -112,17 +112,17 @@ beyond the scope of this document; if you're interested, study the `TopologyMoni the source code. -[Metadata#getNodes]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/metadata/Metadata.html#getNodes-- -[Node]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/metadata/Node.html -[Node#getState()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/metadata/Node.html#getState-- -[Node#getDatacenter()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/metadata/Node.html#getDatacenter-- -[Node#getRack()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/metadata/Node.html#getRack-- -[Node#getDistance()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/metadata/Node.html#getDistance-- -[Node#getExtras()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/metadata/Node.html#getExtras-- -[Node#getOpenConnections()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- -[Node#isReconnecting()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/metadata/Node.html#isReconnecting-- -[NodeState]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/metadata/NodeState.html -[NodeStateListener]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/metadata/NodeStateListener.html -[NodeStateListenerBase]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/metadata/NodeStateListenerBase.html -[SessionBuilder.withNodeStateListener]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withNodeStateListener-com.datastax.oss.driver.api.core.metadata.NodeStateListener- -[DseNodeProperties]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/dse/driver/api/core/metadata/DseNodeProperties.html \ No newline at end of file +[Metadata#getNodes]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/metadata/Metadata.html#getNodes-- +[Node]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/metadata/Node.html +[Node#getState()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/metadata/Node.html#getState-- +[Node#getDatacenter()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/metadata/Node.html#getDatacenter-- +[Node#getRack()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/metadata/Node.html#getRack-- +[Node#getDistance()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/metadata/Node.html#getDistance-- +[Node#getExtras()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/metadata/Node.html#getExtras-- +[Node#getOpenConnections()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- +[Node#isReconnecting()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/metadata/Node.html#isReconnecting-- +[NodeState]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/metadata/NodeState.html +[NodeStateListener]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/metadata/NodeStateListener.html +[NodeStateListenerBase]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/metadata/NodeStateListenerBase.html +[SessionBuilder.withNodeStateListener]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withNodeStateListener-com.datastax.oss.driver.api.core.metadata.NodeStateListener- +[DseNodeProperties]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/dse/driver/api/core/metadata/DseNodeProperties.html \ No newline at end of file diff --git a/manual/core/metadata/schema/README.md b/manual/core/metadata/schema/README.md index 00947d40980..34af1f32bb4 100644 --- a/manual/core/metadata/schema/README.md +++ b/manual/core/metadata/schema/README.md @@ -260,15 +260,15 @@ unavailable for the excluded keyspaces. If you issue schema-altering requests from the driver (e.g. `session.execute("CREATE TABLE ..")`), take a look at the [Performance](../../performance/#schema-updates) page for a few tips. -[Metadata#getKeyspaces]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/metadata/Metadata.html#getKeyspaces-- -[SchemaChangeListener]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListener.html -[SchemaChangeListenerBase]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListenerBase.html -[Session#setSchemaMetadataEnabled]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/Session.html#setSchemaMetadataEnabled-java.lang.Boolean- -[Session#checkSchemaAgreementAsync]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/Session.html#checkSchemaAgreementAsync-- -[SessionBuilder#withSchemaChangeListener]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSchemaChangeListener-com.datastax.oss.driver.api.core.metadata.schema.SchemaChangeListener- -[ExecutionInfo#isSchemaInAgreement]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#isSchemaInAgreement-- -[com.datastax.dse.driver.api.core.metadata.schema]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/dse/driver/api/core/metadata/schema/package-frame.html -[DseFunctionMetadata]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/dse/driver/api/core/metadata/schema/DseFunctionMetadata.html -[DseAggregateMetadata]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/dse/driver/api/core/metadata/schema/DseAggregateMetadata.html +[Metadata#getKeyspaces]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/metadata/Metadata.html#getKeyspaces-- +[SchemaChangeListener]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListener.html +[SchemaChangeListenerBase]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListenerBase.html +[Session#setSchemaMetadataEnabled]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/Session.html#setSchemaMetadataEnabled-java.lang.Boolean- +[Session#checkSchemaAgreementAsync]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/Session.html#checkSchemaAgreementAsync-- +[SessionBuilder#withSchemaChangeListener]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSchemaChangeListener-com.datastax.oss.driver.api.core.metadata.schema.SchemaChangeListener- +[ExecutionInfo#isSchemaInAgreement]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#isSchemaInAgreement-- +[com.datastax.dse.driver.api.core.metadata.schema]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/dse/driver/api/core/metadata/schema/package-frame.html +[DseFunctionMetadata]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/dse/driver/api/core/metadata/schema/DseFunctionMetadata.html +[DseAggregateMetadata]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/dse/driver/api/core/metadata/schema/DseAggregateMetadata.html [JAVA-750]: https://datastax-oss.atlassian.net/browse/JAVA-750 \ No newline at end of file diff --git a/manual/core/metadata/token/README.md b/manual/core/metadata/token/README.md index c3838e946bc..9d277c0a5d1 100644 --- a/manual/core/metadata/token/README.md +++ b/manual/core/metadata/token/README.md @@ -169,5 +169,5 @@ on [schema metadata](../schema/). If schema metadata is disabled or filtered, to also be unavailable for the excluded keyspaces. -[Metadata#getTokenMap]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/metadata/Metadata.html#getTokenMap-- -[TokenMap]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/metadata/TokenMap.html \ No newline at end of file +[Metadata#getTokenMap]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/metadata/Metadata.html#getTokenMap-- +[TokenMap]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/metadata/TokenMap.html \ No newline at end of file diff --git a/manual/core/native_protocol/README.md b/manual/core/native_protocol/README.md index 8e3ed4f300d..152655dfbcd 100644 --- a/manual/core/native_protocol/README.md +++ b/manual/core/native_protocol/README.md @@ -120,6 +120,6 @@ If you want to see the details of mixed cluster negotiation, enable `DEBUG` leve [protocol spec]: https://github.com/datastax/native-protocol/tree/1.x/src/main/resources [driver3]: https://docs.datastax.com/en/developer/java-driver/3.5/manual/native_protocol/ -[ExecutionInfo.getWarnings]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getWarnings-- -[Request.getCustomPayload]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/Request.html#getCustomPayload-- -[AttachmentPoint.getProtocolVersion]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/detach/AttachmentPoint.html#getProtocolVersion-- \ No newline at end of file +[ExecutionInfo.getWarnings]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getWarnings-- +[Request.getCustomPayload]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/Request.html#getCustomPayload-- +[AttachmentPoint.getProtocolVersion]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/detach/AttachmentPoint.html#getProtocolVersion-- \ No newline at end of file diff --git a/manual/core/paging/README.md b/manual/core/paging/README.md index ab844124871..4a92db7eabd 100644 --- a/manual/core/paging/README.md +++ b/manual/core/paging/README.md @@ -186,9 +186,9 @@ think you can get away with the performance hit. We recommend that you: The [driver examples] include two complete web service implementations demonstrating forward-only and random (offset-based) paging. -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/ResultSet.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html -[AsyncPagingIterable.hasMorePages]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/AsyncPagingIterable.html#hasMorePages-- -[AsyncPagingIterable.fetchNextPage]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/AsyncPagingIterable.html#fetchNextPage-- +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/ResultSet.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[AsyncPagingIterable.hasMorePages]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/AsyncPagingIterable.html#hasMorePages-- +[AsyncPagingIterable.fetchNextPage]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/AsyncPagingIterable.html#fetchNextPage-- [driver examples]: https://github.com/datastax/java-driver/tree/4.x/examples/src/main/java/com/datastax/oss/driver/examples/paging diff --git a/manual/core/performance/README.md b/manual/core/performance/README.md index 4971f6f79fb..69123907244 100644 --- a/manual/core/performance/README.md +++ b/manual/core/performance/README.md @@ -345,8 +345,8 @@ possible to reuse the same event loop group for I/O, admin tasks, and even your (the driver's internal code is fully asynchronous so it will never block any thread). The timer is the only one that will have to stay on a separate thread. -[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/data/AccessibleByName.html -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/CqlIdentifier.html -[CqlSession.prepare(SimpleStatement)]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/CqlSession.html#prepare-com.datastax.oss.driver.api.core.cql.SimpleStatement- -[GenericType]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/type/reflect/GenericType.html -[Statement.setNode()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/Statement.html#setNode-com.datastax.oss.driver.api.core.metadata.Node- \ No newline at end of file +[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/data/AccessibleByName.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/CqlIdentifier.html +[CqlSession.prepare(SimpleStatement)]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/CqlSession.html#prepare-com.datastax.oss.driver.api.core.cql.SimpleStatement- +[GenericType]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/type/reflect/GenericType.html +[Statement.setNode()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/Statement.html#setNode-com.datastax.oss.driver.api.core.metadata.Node- \ No newline at end of file diff --git a/manual/core/pooling/README.md b/manual/core/pooling/README.md index 6057ee4fa93..d735674bd11 100644 --- a/manual/core/pooling/README.md +++ b/manual/core/pooling/README.md @@ -158,5 +158,5 @@ you experience the issue, here's what to look out for: Try adding more connections per node. Thanks to the driver's hot-reload mechanism, you can do that at runtime and see the effects immediately. -[CqlSession]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/CqlSession.html +[CqlSession]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/CqlSession.html [CASSANDRA-8086]: https://issues.apache.org/jira/browse/CASSANDRA-8086 \ No newline at end of file diff --git a/manual/core/query_timestamps/README.md b/manual/core/query_timestamps/README.md index 0a11d641d98..3627a1fa4d1 100644 --- a/manual/core/query_timestamps/README.md +++ b/manual/core/query_timestamps/README.md @@ -187,9 +187,9 @@ Here is the order of precedence of all the methods described so far: 3. otherwise, if the timestamp generator assigned a timestamp, use it; 4. otherwise, let the server assign the timestamp. -[TimestampGenerator]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/time/TimestampGenerator.html +[TimestampGenerator]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/time/TimestampGenerator.html [gettimeofday]: http://man7.org/linux/man-pages/man2/settimeofday.2.html [JNR]: https://github.com/jnr/jnr-ffi [Lightweight transactions]: https://docs.datastax.com/en/dse/6.0/cql/cql/cql_using/useInsertLWT.html -[Statement.setQueryTimestamp()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/Statement.html#setQueryTimestamp-long- \ No newline at end of file +[Statement.setQueryTimestamp()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/Statement.html#setQueryTimestamp-long- \ No newline at end of file diff --git a/manual/core/reactive/README.md b/manual/core/reactive/README.md index 519decf51e4..ae4f3fcfa57 100644 --- a/manual/core/reactive/README.md +++ b/manual/core/reactive/README.md @@ -363,7 +363,7 @@ more fine-grained control of what should be retried, and how, is required. [reactive streams]: https://en.wikipedia.org/wiki/Reactive_Streams [Reactive Streams API]: https://github.com/reactive-streams/reactive-streams-jvm -[CqlSession]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/CqlSession.html +[CqlSession]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/CqlSession.html [ReactiveSession]: https://docs.datastax.com/en/drivers/java-dse/2.3/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.html [ResultSet]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/ResultSet.html [ReactiveResultSet]: https://docs.datastax.com/en/drivers/java-dse/2.3/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html diff --git a/manual/core/reconnection/README.md b/manual/core/reconnection/README.md index fca4b0dec6c..e858477d826 100644 --- a/manual/core/reconnection/README.md +++ b/manual/core/reconnection/README.md @@ -66,7 +66,7 @@ is the exponential one with the default values, and the control connection is in * [t = 3] node2's pool tries to open the last missing connection, which succeeds. The pool is back to its expected size, node2's reconnection stops. -[ConstantReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/internal/core/connection/ConstantReconnectionPolicy.html -[DriverContext]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/context/DriverContext.html -[ExponentialReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/internal/core/connection/ExponentialReconnectionPolicy.html -[ReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/connection/ReconnectionPolicy.html \ No newline at end of file +[ConstantReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/internal/core/connection/ConstantReconnectionPolicy.html +[DriverContext]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/context/DriverContext.html +[ExponentialReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/internal/core/connection/ExponentialReconnectionPolicy.html +[ReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/connection/ReconnectionPolicy.html \ No newline at end of file diff --git a/manual/core/request_tracker/README.md b/manual/core/request_tracker/README.md index 1f9ddb1ebc6..5fe96254055 100644 --- a/manual/core/request_tracker/README.md +++ b/manual/core/request_tracker/README.md @@ -117,5 +117,5 @@ all FROM users WHERE user_id=? [v0=42] com.datastax.oss.driver.api.core.servererrors.InvalidQueryException: Undefined column name all ``` -[RequestTracker]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/tracker/RequestTracker.html -[SessionBuilder.withRequestTracker]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withRequestTracker-com.datastax.oss.driver.api.core.tracker.RequestTracker- \ No newline at end of file +[RequestTracker]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/tracker/RequestTracker.html +[SessionBuilder.withRequestTracker]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withRequestTracker-com.datastax.oss.driver.api.core.tracker.RequestTracker- \ No newline at end of file diff --git a/manual/core/retries/README.md b/manual/core/retries/README.md index 1a20062ae85..935b43cf035 100644 --- a/manual/core/retries/README.md +++ b/manual/core/retries/README.md @@ -174,20 +174,20 @@ configuration). Each request uses its declared profile's policy. If it doesn't declare any profile, or if the profile doesn't have a dedicated policy, then the default profile's policy is used. -[AllNodesFailedException]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/AllNodesFailedException.html -[ClosedConnectionException]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/connection/ClosedConnectionException.html -[DriverTimeoutException]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/DriverTimeoutException.html -[FunctionFailureException]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/servererrors/FunctionFailureException.html -[HeartbeatException]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/connection/HeartbeatException.html -[ProtocolError]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/servererrors/ProtocolError.html -[OverloadedException]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/servererrors/OverloadedException.html -[QueryValidationException]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/servererrors/QueryValidationException.html -[ReadFailureException]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/servererrors/ReadFailureException.html -[ReadTimeoutException]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/servererrors/ReadTimeoutException.html -[RetryDecision]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/retry/RetryDecision.html -[RetryPolicy]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/retry/RetryPolicy.html -[ServerError]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/servererrors/ServerError.html -[TruncateException]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/servererrors/TruncateException.html -[UnavailableException]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/servererrors/UnavailableException.html -[WriteFailureException]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/servererrors/WriteFailureException.html -[WriteTimeoutException]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/servererrors/WriteTimeoutException.html +[AllNodesFailedException]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/AllNodesFailedException.html +[ClosedConnectionException]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/connection/ClosedConnectionException.html +[DriverTimeoutException]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/DriverTimeoutException.html +[FunctionFailureException]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/servererrors/FunctionFailureException.html +[HeartbeatException]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/connection/HeartbeatException.html +[ProtocolError]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/servererrors/ProtocolError.html +[OverloadedException]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/servererrors/OverloadedException.html +[QueryValidationException]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/servererrors/QueryValidationException.html +[ReadFailureException]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/servererrors/ReadFailureException.html +[ReadTimeoutException]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/servererrors/ReadTimeoutException.html +[RetryDecision]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/retry/RetryDecision.html +[RetryPolicy]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/retry/RetryPolicy.html +[ServerError]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/servererrors/ServerError.html +[TruncateException]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/servererrors/TruncateException.html +[UnavailableException]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/servererrors/UnavailableException.html +[WriteFailureException]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/servererrors/WriteFailureException.html +[WriteTimeoutException]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/servererrors/WriteTimeoutException.html diff --git a/manual/core/speculative_execution/README.md b/manual/core/speculative_execution/README.md index 8f82aea95c5..dcc02fbdc1e 100644 --- a/manual/core/speculative_execution/README.md +++ b/manual/core/speculative_execution/README.md @@ -250,4 +250,4 @@ profiles have the same configuration). Each request uses its declared profile's policy. If it doesn't declare any profile, or if the profile doesn't have a dedicated policy, then the default profile's policy is used. -[SpeculativeExecutionPolicy]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/specex/SpeculativeExecutionPolicy.html \ No newline at end of file +[SpeculativeExecutionPolicy]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/specex/SpeculativeExecutionPolicy.html \ No newline at end of file diff --git a/manual/core/ssl/README.md b/manual/core/ssl/README.md index 33480b70465..106aeb047f6 100644 --- a/manual/core/ssl/README.md +++ b/manual/core/ssl/README.md @@ -188,5 +188,5 @@ the box, but with a bit of custom development it is fairly easy to add. See [dsClientToNode]: https://docs.datastax.com/en/cassandra/3.0/cassandra/configuration/secureSSLClientToNode.html [pickle]: http://thelastpickle.com/blog/2015/09/30/hardening-cassandra-step-by-step-part-1-server-to-server.html [JSSE system properties]: http://docs.oracle.com/javase/6/docs/technotes/guides/security/jsse/JSSERefGuide.html#Customization -[SessionBuilder.withSslEngineFactory]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSslEngineFactory-com.datastax.oss.driver.api.core.ssl.SslEngineFactory- -[SessionBuilder.withSslContext]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSslContext-javax.net.ssl.SSLContext- +[SessionBuilder.withSslEngineFactory]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSslEngineFactory-com.datastax.oss.driver.api.core.ssl.SslEngineFactory- +[SessionBuilder.withSslContext]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSslContext-javax.net.ssl.SSLContext- diff --git a/manual/core/statements/README.md b/manual/core/statements/README.md index c6bc200aa56..7805d49406f 100644 --- a/manual/core/statements/README.md +++ b/manual/core/statements/README.md @@ -59,7 +59,7 @@ the [configuration](../configuration/). Namely, these are: idempotent flag, quer consistency levels and page size. We recommended the configuration approach whenever possible (you can create execution profiles to capture common combinations of those options). -[Statement]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/Statement.html -[StatementBuilder]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/StatementBuilder.html -[execute]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/Session.html#execute-com.datastax.oss.driver.api.core.cql.Statement- -[executeAsync]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/Session.html#executeAsync-com.datastax.oss.driver.api.core.cql.Statement- +[Statement]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/Statement.html +[StatementBuilder]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/StatementBuilder.html +[execute]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/Session.html#execute-com.datastax.oss.driver.api.core.cql.Statement- +[executeAsync]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/Session.html#executeAsync-com.datastax.oss.driver.api.core.cql.Statement- diff --git a/manual/core/statements/batch/README.md b/manual/core/statements/batch/README.md index eb5671b3d2e..97e22c29a50 100644 --- a/manual/core/statements/batch/README.md +++ b/manual/core/statements/batch/README.md @@ -61,8 +61,8 @@ In addition, simple statements with named parameters are currently not supported due to a [protocol limitation][CASSANDRA-10246] that will be fixed in a future version). If you try to execute such a batch, an `IllegalArgumentException` is thrown. -[BatchStatement]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/BatchStatement.html -[BatchStatement.newInstance()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/BatchStatement.html#newInstance-com.datastax.oss.driver.api.core.cql.BatchType- -[BatchStatement.builder()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/BatchStatement.html#builder-com.datastax.oss.driver.api.core.cql.BatchType- +[BatchStatement]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/BatchStatement.html +[BatchStatement.newInstance()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/BatchStatement.html#newInstance-com.datastax.oss.driver.api.core.cql.BatchType- +[BatchStatement.builder()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/BatchStatement.html#builder-com.datastax.oss.driver.api.core.cql.BatchType- [batch_dse]: http://docs.datastax.com/en/dse/6.7/cql/cql/cql_using/useBatch.html [CASSANDRA-10246]: https://issues.apache.org/jira/browse/CASSANDRA-10246 diff --git a/manual/core/statements/per_query_keyspace/README.md b/manual/core/statements/per_query_keyspace/README.md index 2d5fc88e2d1..aed01d27859 100644 --- a/manual/core/statements/per_query_keyspace/README.md +++ b/manual/core/statements/per_query_keyspace/README.md @@ -124,6 +124,6 @@ SimpleStatement statement = At some point in the future, when Cassandra 4 becomes prevalent and using a per-query keyspace is the norm, we'll probably deprecate `setRoutingKeyspace()`. -[token-aware routing]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- +[token-aware routing]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- [CASSANDRA-10145]: https://issues.apache.org/jira/browse/CASSANDRA-10145 \ No newline at end of file diff --git a/manual/core/statements/prepared/README.md b/manual/core/statements/prepared/README.md index a5f3b240bd4..285666b5fac 100644 --- a/manual/core/statements/prepared/README.md +++ b/manual/core/statements/prepared/README.md @@ -330,10 +330,10 @@ With Cassandra 4 and [native protocol](../../native_protocol/) v5, this issue is new version with the response; the driver updates its local cache transparently, and the client can observe the new columns in the result set. -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[Session.prepare]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/CqlSession.html#prepare-com.datastax.oss.driver.api.core.cql.SimpleStatement- +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[Session.prepare]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/CqlSession.html#prepare-com.datastax.oss.driver.api.core.cql.SimpleStatement- [CASSANDRA-10786]: https://issues.apache.org/jira/browse/CASSANDRA-10786 [CASSANDRA-10813]: https://issues.apache.org/jira/browse/CASSANDRA-10813 [guava eviction]: https://github.com/google/guava/wiki/CachesExplained#reference-based-eviction -[PreparedStatement.bind]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/PreparedStatement.html#bind-java.lang.Object...- -[PreparedStatement.boundStatementBuilder]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/PreparedStatement.html#boundStatementBuilder-java.lang.Object...- +[PreparedStatement.bind]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/PreparedStatement.html#bind-java.lang.Object...- +[PreparedStatement.boundStatementBuilder]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/PreparedStatement.html#boundStatementBuilder-java.lang.Object...- diff --git a/manual/core/statements/simple/README.md b/manual/core/statements/simple/README.md index f396884ea79..1ff760c59a7 100644 --- a/manual/core/statements/simple/README.md +++ b/manual/core/statements/simple/README.md @@ -182,6 +182,6 @@ session.execute( Or you could also use [prepared statements](../prepared/), which don't have this limitation since parameter types are known in advance. -[SimpleStatement]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/SimpleStatement.html -[SimpleStatement.newInstance()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/SimpleStatement.html#newInstance-java.lang.String- -[SimpleStatement.builder()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/SimpleStatement.html#builder-java.lang.String- +[SimpleStatement]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/SimpleStatement.html +[SimpleStatement.newInstance()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/SimpleStatement.html#newInstance-java.lang.String- +[SimpleStatement.builder()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/SimpleStatement.html#builder-java.lang.String- diff --git a/manual/core/temporal_types/README.md b/manual/core/temporal_types/README.md index aad88010a12..e79256f9653 100644 --- a/manual/core/temporal_types/README.md +++ b/manual/core/temporal_types/README.md @@ -146,7 +146,7 @@ System.out.println(dateTime.minus(CqlDuration.from("1h15s15ns"))); // prints "2018-10-03T22:59:44.999999985-07:00[America/Los_Angeles]" ``` -[CqlDuration]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/data/CqlDuration.html -[TypeCodecs.ZONED_TIMESTAMP_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#ZONED_TIMESTAMP_SYSTEM -[TypeCodecs.ZONED_TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#ZONED_TIMESTAMP_UTC -[TypeCodecs.zonedTimestampAt()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#zonedTimestampAt-java.time.ZoneId- \ No newline at end of file +[CqlDuration]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/data/CqlDuration.html +[TypeCodecs.ZONED_TIMESTAMP_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#ZONED_TIMESTAMP_SYSTEM +[TypeCodecs.ZONED_TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#ZONED_TIMESTAMP_UTC +[TypeCodecs.zonedTimestampAt()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#zonedTimestampAt-java.time.ZoneId- \ No newline at end of file diff --git a/manual/core/throttling/README.md b/manual/core/throttling/README.md index be54d610dac..35877fe2e77 100644 --- a/manual/core/throttling/README.md +++ b/manual/core/throttling/README.md @@ -145,6 +145,6 @@ datastax-java-driver { If you enable `throttling.delay`, make sure to also check the associated extra options to correctly size the underlying histograms (`metrics.session.throttling.delay.*`). -[RequestThrottlingException]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/RequestThrottlingException.html -[AllNodesFailedException]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/AllNodesFailedException.html -[BusyConnectionException]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/connection/BusyConnectionException.html \ No newline at end of file +[RequestThrottlingException]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/RequestThrottlingException.html +[AllNodesFailedException]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/AllNodesFailedException.html +[BusyConnectionException]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/connection/BusyConnectionException.html \ No newline at end of file diff --git a/manual/core/tracing/README.md b/manual/core/tracing/README.md index 5928f2fe445..a327cbcdefa 100644 --- a/manual/core/tracing/README.md +++ b/manual/core/tracing/README.md @@ -113,9 +113,9 @@ for (TraceEvent event : trace.getEvents()) { If you call `getQueryTrace()` for a statement that didn't have tracing enabled, an exception is thrown. -[ExecutionInfo]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html -[QueryTrace]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/QueryTrace.html -[Statement.setTracing()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/Statement.html#setTracing-boolean- -[StatementBuilder.setTracing()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/StatementBuilder.html#setTracing-- -[ExecutionInfo.getTracingId()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getTracingId-- -[ExecutionInfo.getQueryTrace()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getQueryTrace-- +[ExecutionInfo]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html +[QueryTrace]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/QueryTrace.html +[Statement.setTracing()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/Statement.html#setTracing-boolean- +[StatementBuilder.setTracing()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/StatementBuilder.html#setTracing-- +[ExecutionInfo.getTracingId()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getTracingId-- +[ExecutionInfo.getQueryTrace()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getQueryTrace-- diff --git a/manual/core/tuples/README.md b/manual/core/tuples/README.md index 491ef4974f4..50b9310b6ea 100644 --- a/manual/core/tuples/README.md +++ b/manual/core/tuples/README.md @@ -139,5 +139,5 @@ BoundStatement bs = [cql_doc]: https://docs.datastax.com/en/cql/3.3/cql/cql_reference/tupleType.html -[TupleType]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/type/TupleType.html -[TupleValue]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/data/TupleValue.html +[TupleType]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/type/TupleType.html +[TupleValue]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/data/TupleValue.html diff --git a/manual/core/udts/README.md b/manual/core/udts/README.md index af7d304dd15..d0d431b9af8 100644 --- a/manual/core/udts/README.md +++ b/manual/core/udts/README.md @@ -135,5 +135,5 @@ session.execute(bs); [cql_doc]: https://docs.datastax.com/en/cql/3.3/cql/cql_reference/cqlRefUDType.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/data/UdtValue.html -[UserDefinedType]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/type/UserDefinedType.html \ No newline at end of file +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/data/UdtValue.html +[UserDefinedType]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/type/UserDefinedType.html \ No newline at end of file diff --git a/manual/mapper/daos/README.md b/manual/mapper/daos/README.md index 9da276b720b..83bbd8e5f15 100644 --- a/manual/mapper/daos/README.md +++ b/manual/mapper/daos/README.md @@ -147,8 +147,8 @@ In this case, any annotations declared in `Dao1` would be chosen over `Dao2`. To control how the hierarchy is scanned, annotate interfaces with [@HierarchyScanStrategy]. -[@Dao]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/Dao.html -[@DaoFactory]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.html -[@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html -[@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html +[@Dao]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/Dao.html +[@DaoFactory]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.html +[@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html +[@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html [Entity Inheritance]: ../entities/#inheritance diff --git a/manual/mapper/daos/delete/README.md b/manual/mapper/daos/delete/README.md index 0e604233639..78f6b969c97 100644 --- a/manual/mapper/daos/delete/README.md +++ b/manual/mapper/daos/delete/README.md @@ -142,14 +142,14 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the [naming strategy](../../entities/#naming-strategy)). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html -[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html -[@Delete]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/Delete.html -[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/ResultSet.html -[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html +[@Delete]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/Delete.html +[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/ResultSet.html +[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/BoundStatement.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html \ No newline at end of file diff --git a/manual/mapper/daos/getentity/README.md b/manual/mapper/daos/getentity/README.md index 1bbad280908..6617ff364f6 100644 --- a/manual/mapper/daos/getentity/README.md +++ b/manual/mapper/daos/getentity/README.md @@ -69,14 +69,14 @@ If the return type doesn't match the parameter type (for example [PagingIterable [AsyncResultSet]), the mapper processor will issue a compile-time error. -[@GetEntity]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html -[GettableByName]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/data/GettableByName.html -[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/PagingIterable.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/ResultSet.html -[Row]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/Row.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/data/UdtValue.html +[@GetEntity]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[GettableByName]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/data/GettableByName.html +[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/PagingIterable.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/ResultSet.html +[Row]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/Row.html +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/data/UdtValue.html diff --git a/manual/mapper/daos/insert/README.md b/manual/mapper/daos/insert/README.md index 06f0d345a8c..a5f013c7676 100644 --- a/manual/mapper/daos/insert/README.md +++ b/manual/mapper/daos/insert/README.md @@ -99,12 +99,12 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the [naming strategy](../../entities/#naming-strategy)). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@Insert]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/Insert.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/ResultSet.html -[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- -[ResultSet#getExecutionInfo()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/ResultSet.html#getExecutionInfo-- -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@Insert]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/Insert.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/ResultSet.html +[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- +[ResultSet#getExecutionInfo()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/ResultSet.html#getExecutionInfo-- +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/BoundStatement.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html diff --git a/manual/mapper/daos/null_saving/README.md b/manual/mapper/daos/null_saving/README.md index fd1b3f6f81c..e05e6c0b4e7 100644 --- a/manual/mapper/daos/null_saving/README.md +++ b/manual/mapper/daos/null_saving/README.md @@ -93,10 +93,10 @@ public interface UserDao extends InventoryDao { } ``` -[@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[MapperException]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/MapperException.html -[DO_NOT_SET]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#DO_NOT_SET -[SET_TO_NULL]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#SET_TO_NULL +[@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[MapperException]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/MapperException.html +[DO_NOT_SET]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#DO_NOT_SET +[SET_TO_NULL]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#SET_TO_NULL [CASSANDRA-7304]: https://issues.apache.org/jira/browse/CASSANDRA-7304 diff --git a/manual/mapper/daos/query/README.md b/manual/mapper/daos/query/README.md index f8ff612546d..1b0c8728a58 100644 --- a/manual/mapper/daos/query/README.md +++ b/manual/mapper/daos/query/README.md @@ -106,15 +106,15 @@ Then: query succeeds or not depends on whether the session that the mapper was built with has a [default keyspace]. -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@Query]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/Query.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/ResultSet.html -[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- -[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/PagingIterable.html -[Row]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/Row.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@Query]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/Query.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/ResultSet.html +[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- +[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/PagingIterable.html +[Row]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/Row.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/BoundStatement.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html diff --git a/manual/mapper/daos/queryprovider/README.md b/manual/mapper/daos/queryprovider/README.md index 9ef5e5231ce..8e3204dbf2b 100644 --- a/manual/mapper/daos/queryprovider/README.md +++ b/manual/mapper/daos/queryprovider/README.md @@ -137,11 +137,11 @@ Here is the full implementation: the desired [PagingIterable][PagingIterable]. -[@QueryProvider]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html -[providerClass]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerClass-- -[entityHelpers]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#entityHelpers-- -[providerMethod]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerMethod-- -[MapperContext]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/MapperContext.html -[EntityHelper]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/EntityHelper.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/ResultSet.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/PagingIterable.html +[@QueryProvider]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html +[providerClass]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerClass-- +[entityHelpers]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#entityHelpers-- +[providerMethod]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerMethod-- +[MapperContext]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/MapperContext.html +[EntityHelper]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/EntityHelper.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/ResultSet.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/PagingIterable.html diff --git a/manual/mapper/daos/select/README.md b/manual/mapper/daos/select/README.md index 6c29304a0ca..1ef821e6424 100644 --- a/manual/mapper/daos/select/README.md +++ b/manual/mapper/daos/select/README.md @@ -133,18 +133,18 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the [naming strategy](../../entities/#naming-strategy)). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html -[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html -[@Select]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/Select.html -[allowFiltering()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/Select.html#allowFiltering-- -[customWhereClause()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/Select.html#customWhereClause-- -[groupBy()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/Select.html#groupBy-- -[limit()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/Select.html#limit-- -[orderBy()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/Select.html#orderBy-- -[perPartitionLimit()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/Select.html#perPartitionLimit-- -[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/PagingIterable.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html +[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html +[@Select]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/Select.html +[allowFiltering()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/Select.html#allowFiltering-- +[customWhereClause()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/Select.html#customWhereClause-- +[groupBy()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/Select.html#groupBy-- +[limit()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/Select.html#limit-- +[orderBy()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/Select.html#orderBy-- +[perPartitionLimit()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/Select.html#perPartitionLimit-- +[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/PagingIterable.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html diff --git a/manual/mapper/daos/setentity/README.md b/manual/mapper/daos/setentity/README.md index 47f847a8f4f..e25eece6d5e 100644 --- a/manual/mapper/daos/setentity/README.md +++ b/manual/mapper/daos/setentity/README.md @@ -63,8 +63,8 @@ BoundStatement bind(Product product, BoundStatement statement); If you use a void method with [BoundStatement], the mapper processor will issue a compile-time warning. -[@SetEntity]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/SetEntity.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[BoundStatementBuilder]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/BoundStatementBuilder.html -[SettableByName]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/data/SettableByName.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/data/UdtValue.html +[@SetEntity]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/SetEntity.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[BoundStatementBuilder]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/BoundStatementBuilder.html +[SettableByName]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/data/SettableByName.html +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/data/UdtValue.html diff --git a/manual/mapper/daos/statement_attributes/README.md b/manual/mapper/daos/statement_attributes/README.md index 9d4913f8625..ff56dd625ce 100644 --- a/manual/mapper/daos/statement_attributes/README.md +++ b/manual/mapper/daos/statement_attributes/README.md @@ -60,4 +60,4 @@ Product product = dao.findById(1, builder -> builder.setConsistencyLevel(DefaultConsistencyLevel.QUORUM)); ``` -[@StatementAttributes]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/StatementAttributes.html \ No newline at end of file +[@StatementAttributes]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/StatementAttributes.html \ No newline at end of file diff --git a/manual/mapper/daos/update/README.md b/manual/mapper/daos/update/README.md index 50189224fab..255e0d6f67a 100644 --- a/manual/mapper/daos/update/README.md +++ b/manual/mapper/daos/update/README.md @@ -134,12 +134,12 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the naming convention). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@Update]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/Update.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@Update]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/Update.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html [Boolean]: https://docs.oracle.com/javase/8/docs/api/index.html?java/lang/Boolean.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/ResultSet.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/ResultSet.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/BoundStatement.html diff --git a/manual/mapper/entities/README.md b/manual/mapper/entities/README.md index 55634bcefc6..ddda5fa2820 100644 --- a/manual/mapper/entities/README.md +++ b/manual/mapper/entities/README.md @@ -468,21 +468,21 @@ the same level. To control how the class hierarchy is scanned, annotate classes with [@HierarchyScanStrategy]. -[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html -[@CqlName]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/CqlName.html -[@Dao]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/Dao.html -[@Entity]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/Entity.html -[NameConverter]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/entity/naming/NameConverter.html -[NamingConvention]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/entity/naming/NamingConvention.html -[@NamingStrategy]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/NamingStrategy.html -[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html -[@Computed]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/Computed.html -[@Select]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/Select.html -[@Insert]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/Insert.html -[@Update]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/Update.html -[@GetEntity]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html -[@Query]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/Query.html +[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html +[@CqlName]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/CqlName.html +[@Dao]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/Dao.html +[@Entity]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/Entity.html +[NameConverter]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/entity/naming/NameConverter.html +[NamingConvention]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/entity/naming/NamingConvention.html +[@NamingStrategy]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/NamingStrategy.html +[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html +[@Computed]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/Computed.html +[@Select]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/Select.html +[@Insert]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/Insert.html +[@Update]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/Update.html +[@GetEntity]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html +[@Query]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/Query.html [aliases]: http://cassandra.apache.org/doc/latest/cql/dml.html?#aliases -[@Transient]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/Transient.html -[@TransientProperties]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/TransientProperties.html -[@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html +[@Transient]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/Transient.html +[@TransientProperties]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/TransientProperties.html +[@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html diff --git a/manual/mapper/mapper/README.md b/manual/mapper/mapper/README.md index 47f8a9441ee..ef4b8be7a65 100644 --- a/manual/mapper/mapper/README.md +++ b/manual/mapper/mapper/README.md @@ -207,8 +207,8 @@ InventoryMapper inventoryMapper = new InventoryMapperBuilder(session) .build(); ``` -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/CqlIdentifier.html -[@DaoFactory]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.html -[@DaoKeyspace]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/DaoKeyspace.html -[@DaoTable]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/DaoTable.html -[@Mapper]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/mapper/annotations/Mapper.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/CqlIdentifier.html +[@DaoFactory]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.html +[@DaoKeyspace]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/DaoKeyspace.html +[@DaoTable]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/DaoTable.html +[@Mapper]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/Mapper.html diff --git a/manual/osgi/README.md b/manual/osgi/README.md index b0b153ac5e3..d337eab6b2f 100644 --- a/manual/osgi/README.md +++ b/manual/osgi/README.md @@ -94,5 +94,5 @@ starting the driver: [driver configuration]: ../core/configuration [OSGi]:https://www.osgi.org [JNR]: https://github.com/jnr/jnr-ffi -[withClassLoader()]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withClassLoader-java.lang.ClassLoader- +[withClassLoader()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withClassLoader-java.lang.ClassLoader- [JAVA-1127]:https://datastax-oss.atlassian.net/browse/JAVA-1127 \ No newline at end of file diff --git a/manual/query_builder/README.md b/manual/query_builder/README.md index 25fdcfdfc5d..196d1b168d7 100644 --- a/manual/query_builder/README.md +++ b/manual/query_builder/README.md @@ -212,8 +212,8 @@ For a complete tour of the API, browse the child pages in this manual: * [Terms](term/) * [Idempotence](idempotence/) -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/CqlIdentifier.html -[DseQueryBuilder]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/dse/driver/api/querybuilder/DseQueryBuilder.html -[DseSchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/dse/driver/api/querybuilder/DseSchemaBuilder.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/CqlIdentifier.html +[DseQueryBuilder]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/dse/driver/api/querybuilder/DseQueryBuilder.html +[DseSchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/dse/driver/api/querybuilder/DseSchemaBuilder.html diff --git a/manual/query_builder/condition/README.md b/manual/query_builder/condition/README.md index 6bf09947e3c..764de6ad967 100644 --- a/manual/query_builder/condition/README.md +++ b/manual/query_builder/condition/README.md @@ -132,4 +132,4 @@ It is mutually exclusive with column conditions: if you previously specified col the statement, they will be ignored; conversely, adding a column condition cancels a previous IF EXISTS clause. -[Condition]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/querybuilder/condition/Condition.html +[Condition]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/querybuilder/condition/Condition.html diff --git a/manual/query_builder/delete/README.md b/manual/query_builder/delete/README.md index 0262daf0b77..0d12d260070 100644 --- a/manual/query_builder/delete/README.md +++ b/manual/query_builder/delete/README.md @@ -141,5 +141,5 @@ deleteFrom("user") Conditions are a common feature used by UPDATE and DELETE, so they have a [dedicated page](../condition) in this manual. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[Selector]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/querybuilder/select/Selector.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[Selector]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/querybuilder/select/Selector.html diff --git a/manual/query_builder/insert/README.md b/manual/query_builder/insert/README.md index 876b8a83e36..6f217480b8f 100644 --- a/manual/query_builder/insert/README.md +++ b/manual/query_builder/insert/README.md @@ -114,4 +114,4 @@ is executed. This is distinctly different than setting the value to null. Passin this method will only remove the USING TTL clause from the query, which will not alter the TTL (if one is set) in Cassandra. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html \ No newline at end of file +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html \ No newline at end of file diff --git a/manual/query_builder/relation/README.md b/manual/query_builder/relation/README.md index a33f26868ee..23a9b3acca1 100644 --- a/manual/query_builder/relation/README.md +++ b/manual/query_builder/relation/README.md @@ -201,5 +201,5 @@ This should be used with caution, as it's possible to generate invalid CQL that execution time; on the other hand, it can be used as a workaround to handle new CQL features that are not yet covered by the query builder. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[Relation]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/querybuilder/relation/Relation.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[Relation]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/querybuilder/relation/Relation.html diff --git a/manual/query_builder/schema/README.md b/manual/query_builder/schema/README.md index 07b0d8a5f4d..c604e8298b7 100644 --- a/manual/query_builder/schema/README.md +++ b/manual/query_builder/schema/README.md @@ -44,4 +44,4 @@ element type: * [function](function/) * [aggregate](aggregate/) -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/aggregate/README.md b/manual/query_builder/schema/aggregate/README.md index 9ac8df978da..aced0084389 100644 --- a/manual/query_builder/schema/aggregate/README.md +++ b/manual/query_builder/schema/aggregate/README.md @@ -76,4 +76,4 @@ dropAggregate("average").ifExists(); // DROP AGGREGATE IF EXISTS average ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/function/README.md b/manual/query_builder/schema/function/README.md index 6dfdea05912..1d0bea64564 100644 --- a/manual/query_builder/schema/function/README.md +++ b/manual/query_builder/schema/function/README.md @@ -92,4 +92,4 @@ dropFunction("log").ifExists(); // DROP FUNCTION IF EXISTS log ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/index/README.md b/manual/query_builder/schema/index/README.md index 1a50ae9dad2..758b0fd6998 100644 --- a/manual/query_builder/schema/index/README.md +++ b/manual/query_builder/schema/index/README.md @@ -99,4 +99,4 @@ dropIndex("my_idx").ifExists(); // DROP INDEX IF EXISTS my_idx ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/keyspace/README.md b/manual/query_builder/schema/keyspace/README.md index ee985c10a77..8430868f332 100644 --- a/manual/query_builder/schema/keyspace/README.md +++ b/manual/query_builder/schema/keyspace/README.md @@ -83,6 +83,6 @@ dropKeyspace("cycling").ifExists(); // DROP KEYSPACE IF EXISTS cycling ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/materialized_view/README.md b/manual/query_builder/schema/materialized_view/README.md index 2dfebba8ce8..5963f33f5b3 100644 --- a/manual/query_builder/schema/materialized_view/README.md +++ b/manual/query_builder/schema/materialized_view/README.md @@ -85,5 +85,5 @@ dropTable("cyclist_by_age").ifExists(); // DROP MATERIALIZED VIEW IF EXISTS cyclist_by_age ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html -[RelationStructure]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/querybuilder/schema/RelationStructure.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[RelationStructure]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/querybuilder/schema/RelationStructure.html diff --git a/manual/query_builder/schema/table/README.md b/manual/query_builder/schema/table/README.md index a38775e4b65..46bde5dc902 100644 --- a/manual/query_builder/schema/table/README.md +++ b/manual/query_builder/schema/table/README.md @@ -107,6 +107,6 @@ dropTable("cyclist_name").ifExists(); // DROP TABLE IF EXISTS cyclist_name ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html -[CreateTableWithOptions]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/querybuilder/schema/CreateTableWithOptions.html -[AlterTableWithOptions]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/querybuilder/schema/AlterTableWithOptions.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[CreateTableWithOptions]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/querybuilder/schema/CreateTableWithOptions.html +[AlterTableWithOptions]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/querybuilder/schema/AlterTableWithOptions.html diff --git a/manual/query_builder/schema/type/README.md b/manual/query_builder/schema/type/README.md index a64fb6457fc..cbf27542b08 100644 --- a/manual/query_builder/schema/type/README.md +++ b/manual/query_builder/schema/type/README.md @@ -88,4 +88,4 @@ dropTable("address").ifExists(); // DROP TYPE IF EXISTS address ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/select/README.md b/manual/query_builder/select/README.md index 3fe6b359e2b..377cdd22021 100644 --- a/manual/query_builder/select/README.md +++ b/manual/query_builder/select/README.md @@ -391,5 +391,5 @@ selectFrom("user").all().allowFiltering(); // SELECT * FROM user ALLOW FILTERING ``` -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[Selector]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/querybuilder/select/Selector.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[Selector]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/querybuilder/select/Selector.html diff --git a/manual/query_builder/term/README.md b/manual/query_builder/term/README.md index 0e9a427ebd4..265bd37452f 100644 --- a/manual/query_builder/term/README.md +++ b/manual/query_builder/term/README.md @@ -105,5 +105,5 @@ This should be used with caution, as it's possible to generate invalid CQL that execution time; on the other hand, it can be used as a workaround to handle new CQL features that are not yet covered by the query builder. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html \ No newline at end of file +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html \ No newline at end of file diff --git a/manual/query_builder/truncate/README.md b/manual/query_builder/truncate/README.md index 63bfa0ebb44..3fa0cefead9 100644 --- a/manual/query_builder/truncate/README.md +++ b/manual/query_builder/truncate/README.md @@ -17,4 +17,4 @@ Truncate truncate2 = truncate(CqlIdentifier.fromCql("mytable")); Note that, at this stage, the query is ready to build. After creating a TRUNCATE query it does not take any values. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html diff --git a/manual/query_builder/update/README.md b/manual/query_builder/update/README.md index 8961be20e1e..7bbdf6ac8b4 100644 --- a/manual/query_builder/update/README.md +++ b/manual/query_builder/update/README.md @@ -251,5 +251,5 @@ update("foo") Conditions are a common feature used by UPDATE and DELETE, so they have a [dedicated page](../condition) in this manual. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[Assignment]: https://docs.datastax.com/en/drivers/java/4.4/com/datastax/oss/driver/api/querybuilder/update/Assignment.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[Assignment]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/querybuilder/update/Assignment.html From 0880a97105aa57da6881b73c93dab534d7407360 Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 24 Feb 2020 18:17:39 -0800 Subject: [PATCH 383/979] [maven-release-plugin] prepare release 4.5.0 --- bom/pom.xml | 14 +++++++------- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 11 files changed, 18 insertions(+), 18 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index 583eaa833cd..18293ec41f3 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.5.0-SNAPSHOT + 4.5.0 java-driver-bom pom @@ -31,32 +31,32 @@ com.datastax.oss java-driver-core - 4.5.0-SNAPSHOT + 4.5.0 com.datastax.oss java-driver-core-shaded - 4.5.0-SNAPSHOT + 4.5.0 com.datastax.oss java-driver-mapper-processor - 4.5.0-SNAPSHOT + 4.5.0 com.datastax.oss java-driver-mapper-runtime - 4.5.0-SNAPSHOT + 4.5.0 com.datastax.oss java-driver-query-builder - 4.5.0-SNAPSHOT + 4.5.0 com.datastax.oss java-driver-test-infra - 4.5.0-SNAPSHOT + 4.5.0 com.datastax.oss diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 06e2b7c23de..eefaf6f1fec 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.5.0-SNAPSHOT + 4.5.0 java-driver-core-shaded DataStax Java driver for Apache Cassandra(R) - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index d23b93f587a..73bd34ece58 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.5.0-SNAPSHOT + 4.5.0 java-driver-core bundle diff --git a/distribution/pom.xml b/distribution/pom.xml index b50bc065a47..2e5d92a5f4c 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.5.0-SNAPSHOT + 4.5.0 java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index 01f024aaa23..a58c44107a8 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.5.0-SNAPSHOT + 4.5.0 java-driver-examples DataStax Java driver for Apache Cassandra(R) - examples. diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index df3e8637545..ffa94217c39 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.5.0-SNAPSHOT + 4.5.0 java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 2f0e5a4c276..a20fede9ad0 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.5.0-SNAPSHOT + 4.5.0 java-driver-mapper-processor DataStax Java driver for Apache Cassandra(R) - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index ddc3c80e1dc..67fc2817a5d 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.5.0-SNAPSHOT + 4.5.0 java-driver-mapper-runtime bundle diff --git a/pom.xml b/pom.xml index 1721dc5e668..c9b635a8f04 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ 4.0.0 com.datastax.oss java-driver-parent - 4.5.0-SNAPSHOT + 4.5.0 pom DataStax Java driver for Apache Cassandra(R) A driver for Apache Cassandra(R) 2.1+ that works exclusively with the Cassandra Query Language version 3 (CQL3) and Cassandra's native protocol versions 3 and above. @@ -876,7 +876,7 @@ height="0" width="0" style="display:none;visibility:hidden"> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - HEAD + 4.5.0 diff --git a/query-builder/pom.xml b/query-builder/pom.xml index c846bbc5466..2b189bde432 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.5.0-SNAPSHOT + 4.5.0 java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index e1fda41d2f4..4f5bbf1b4ec 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.5.0-SNAPSHOT + 4.5.0 java-driver-test-infra bundle From 309f3b9276fcce0e27312b723bd36ba6523ac202 Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 24 Feb 2020 18:20:12 -0800 Subject: [PATCH 384/979] [maven-release-plugin] prepare for next development iteration --- bom/pom.xml | 14 +++++++------- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 11 files changed, 18 insertions(+), 18 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index 18293ec41f3..6ea619bedad 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.5.0 + 4.6.0-SNAPSHOT java-driver-bom pom @@ -31,32 +31,32 @@ com.datastax.oss java-driver-core - 4.5.0 + 4.6.0-SNAPSHOT com.datastax.oss java-driver-core-shaded - 4.5.0 + 4.6.0-SNAPSHOT com.datastax.oss java-driver-mapper-processor - 4.5.0 + 4.6.0-SNAPSHOT com.datastax.oss java-driver-mapper-runtime - 4.5.0 + 4.6.0-SNAPSHOT com.datastax.oss java-driver-query-builder - 4.5.0 + 4.6.0-SNAPSHOT com.datastax.oss java-driver-test-infra - 4.5.0 + 4.6.0-SNAPSHOT com.datastax.oss diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index eefaf6f1fec..67911797804 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.5.0 + 4.6.0-SNAPSHOT java-driver-core-shaded DataStax Java driver for Apache Cassandra(R) - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index 73bd34ece58..6e072bbe170 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.5.0 + 4.6.0-SNAPSHOT java-driver-core bundle diff --git a/distribution/pom.xml b/distribution/pom.xml index 2e5d92a5f4c..7859fb8e436 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.5.0 + 4.6.0-SNAPSHOT java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index a58c44107a8..5f85e975697 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.5.0 + 4.6.0-SNAPSHOT java-driver-examples DataStax Java driver for Apache Cassandra(R) - examples. diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index ffa94217c39..69dc5030ad4 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.5.0 + 4.6.0-SNAPSHOT java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index a20fede9ad0..763b74176e9 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.5.0 + 4.6.0-SNAPSHOT java-driver-mapper-processor DataStax Java driver for Apache Cassandra(R) - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index 67fc2817a5d..13a0f8c804d 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.5.0 + 4.6.0-SNAPSHOT java-driver-mapper-runtime bundle diff --git a/pom.xml b/pom.xml index c9b635a8f04..90b84c6b559 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ 4.0.0 com.datastax.oss java-driver-parent - 4.5.0 + 4.6.0-SNAPSHOT pom DataStax Java driver for Apache Cassandra(R) A driver for Apache Cassandra(R) 2.1+ that works exclusively with the Cassandra Query Language version 3 (CQL3) and Cassandra's native protocol versions 3 and above. @@ -876,7 +876,7 @@ height="0" width="0" style="display:none;visibility:hidden"> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - 4.5.0 + HEAD diff --git a/query-builder/pom.xml b/query-builder/pom.xml index 2b189bde432..93bab0ba16c 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.5.0 + 4.6.0-SNAPSHOT java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index 4f5bbf1b4ec..15edc13243c 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.5.0 + 4.6.0-SNAPSHOT java-driver-test-infra bundle From 0d9da9d9c9432b2a401d358f8a7dcf12c7177103 Mon Sep 17 00:00:00 2001 From: olim7t Date: Tue, 25 Feb 2020 09:33:31 -0800 Subject: [PATCH 385/979] Prepare changelog for next development iteration --- changelog/README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/changelog/README.md b/changelog/README.md index 8e16266c645..3d16d2026a6 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -2,6 +2,8 @@ +### 4.6.0 (in progress) + ### 4.5.0 - [bug] JAVA-2654: Make AdminRequestHandler handle integer serialization From 1155605aa02769c8d2edc648c1b5236eaf04a27c Mon Sep 17 00:00:00 2001 From: olim7t Date: Tue, 25 Feb 2020 10:20:42 -0800 Subject: [PATCH 386/979] Fix log assertions in OsgiVanillaIT The level and messages changed in JAVA-2529. --- .../datastax/oss/driver/osgi/OsgiVanillaIT.java | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/osgi/OsgiVanillaIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/osgi/OsgiVanillaIT.java index a9f445fb968..1e492e1d381 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/osgi/OsgiVanillaIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/osgi/OsgiVanillaIT.java @@ -69,7 +69,7 @@ public void addTestAppender() { Logger logger = (Logger) LoggerFactory.getLogger("com.datastax.oss.driver"); Level oldLevel = logger.getLevel(); logger.getLoggerContext().putObject("oldLevel", oldLevel); - logger.setLevel(Level.WARN); + logger.setLevel(Level.INFO); TestAppender appender = new TestAppender(); logger.addAppender(appender); appender.start(); @@ -92,28 +92,30 @@ public void should_connect_and_query_simple() { private void assertLogMessagesPresent() { Logger logger = (Logger) LoggerFactory.getLogger("com.datastax.oss.driver"); TestAppender appender = (TestAppender) logger.getAppender("test"); - List warnLogs = + List infoLogs = appender.events.stream() .filter(event -> event.getLevel().toInt() == Level.INFO.toInt()) .map(ILoggingEvent::getFormattedMessage) .collect(Collectors.toList()); - assertThat(warnLogs).hasSize(3); - assertThat(warnLogs) + assertThat(infoLogs) .anySatisfy( msg -> assertThat(msg) .contains( - "Could not register Geo codecs; ESRI API might be missing from classpath")) + "Could not register Geo codecs; this is normal if ESRI was explicitly " + + "excluded from classpath")) .anySatisfy( msg -> assertThat(msg) .contains( - "Could not register Reactive extensions; Reactive Streams API might be missing from classpath")) + "Could not register Reactive extensions; this is normal if Reactive " + + "Streams was explicitly excluded from classpath")) .anySatisfy( msg -> assertThat(msg) .contains( - "Could not register Graph extensions; Tinkerpop API might be missing from classpath")); + "Could not register Graph extensions; this is normal if Tinkerpop was " + + "explicitly excluded from classpath")); } private static class TestAppender extends AppenderBase { From 72a8ae886a4e68795665a3a8c7f5b970d58dc5f5 Mon Sep 17 00:00:00 2001 From: olim7t Date: Tue, 25 Feb 2020 10:52:50 -0800 Subject: [PATCH 387/979] Adapt materialized view statements in SchemaChangesIT for DSE 6.8 DSE 6.8 adds additional constraints on MV creation: - columns in the PRIMARY KEY directive must all be selected - the WITH CLUSTERING ORDER directive must match the clustering columns in the primary key. --- .../oss/driver/core/metadata/SchemaChangesIT.java | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaChangesIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaChangesIT.java index 6c81710552c..66d8977e801 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaChangesIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaChangesIT.java @@ -239,9 +239,9 @@ public void should_handle_view_creation() { should_handle_creation( "CREATE TABLE scores(user text, game text, score int, PRIMARY KEY (user, game))", "CREATE MATERIALIZED VIEW highscores " - + "AS SELECT user, score FROM scores " + + "AS SELECT game, user, score FROM scores " + "WHERE game IS NOT NULL AND score IS NOT NULL PRIMARY KEY (game, score, user) " - + "WITH CLUSTERING ORDER BY (score DESC)", + + "WITH CLUSTERING ORDER BY (score DESC, user DESC)", metadata -> metadata .getKeyspace(adminSessionRule.keyspace()) @@ -269,9 +269,9 @@ public void should_handle_view_drop() { ImmutableList.of( "CREATE TABLE scores(user text, game text, score int, PRIMARY KEY (user, game))", "CREATE MATERIALIZED VIEW highscores " - + "AS SELECT user, score FROM scores " + + "AS SELECT game, user, score FROM scores " + "WHERE game IS NOT NULL AND score IS NOT NULL PRIMARY KEY (game, score, user) " - + "WITH CLUSTERING ORDER BY (score DESC)"), + + "WITH CLUSTERING ORDER BY (score DESC, user DESC)"), "DROP MATERIALIZED VIEW highscores", metadata -> metadata @@ -287,9 +287,9 @@ public void should_handle_view_update() { ImmutableList.of( "CREATE TABLE scores(user text, game text, score int, PRIMARY KEY (user, game))", "CREATE MATERIALIZED VIEW highscores " - + "AS SELECT user, score FROM scores " + + "AS SELECT game, user, score FROM scores " + "WHERE game IS NOT NULL AND score IS NOT NULL PRIMARY KEY (game, score, user) " - + "WITH CLUSTERING ORDER BY (score DESC)"), + + "WITH CLUSTERING ORDER BY (score DESC, user DESC)"), "ALTER MATERIALIZED VIEW highscores WITH comment = 'The best score for each game'", metadata -> metadata From e084ac1c4ddbdbec96056af798c8c5e534204773 Mon Sep 17 00:00:00 2001 From: olim7t Date: Tue, 25 Feb 2020 17:26:22 -0800 Subject: [PATCH 388/979] Adjust ExecutionInfoWarningsIT assertions for DSE 6.8 --- .../datastax/oss/driver/core/cql/ExecutionInfoWarningsIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/ExecutionInfoWarningsIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/ExecutionInfoWarningsIT.java index 01b7cef5a97..e3648c93424 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/ExecutionInfoWarningsIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/ExecutionInfoWarningsIT.java @@ -196,6 +196,6 @@ public void should_expose_warnings_on_execution_info() { .contains("for") .contains(String.format("%s.test", sessionRule.keyspace().asCql(true))) .contains("is of size") - .contains("exceeding specified threshold")); + .containsPattern("exceeding specified .*threshold")); } } From 69fe94c17e841252b92fe8d8efbf6a6b2c741e9e Mon Sep 17 00:00:00 2001 From: olim7t Date: Wed, 26 Feb 2020 10:05:56 -0800 Subject: [PATCH 389/979] Enable SASI indexes when running mapper tests against C* 4 --- .../api/testinfra/ccm/DefaultCcmBridgeBuilderCustomizer.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/DefaultCcmBridgeBuilderCustomizer.java b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/DefaultCcmBridgeBuilderCustomizer.java index 01cf3888aa2..8dfe6e99b6e 100644 --- a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/DefaultCcmBridgeBuilderCustomizer.java +++ b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/DefaultCcmBridgeBuilderCustomizer.java @@ -20,8 +20,10 @@ public class DefaultCcmBridgeBuilderCustomizer { public static CcmBridge.Builder configureBuilder(CcmBridge.Builder builder) { - if (!CcmBridge.DSE_ENABLEMENT && CcmBridge.VERSION.compareTo(Version.V4_0_0) >= 0) { + if (!CcmBridge.DSE_ENABLEMENT + && CcmBridge.VERSION.nextStable().compareTo(Version.V4_0_0) >= 0) { builder.withCassandraConfiguration("enable_materialized_views", true); + builder.withCassandraConfiguration("enable_sasi_indexes", true); } return builder; } From bea702f3cae935b891f5bdcfd781c13ac468ac56 Mon Sep 17 00:00:00 2001 From: olim7t Date: Wed, 26 Feb 2020 11:41:38 -0800 Subject: [PATCH 390/979] Adapt materialized view statements in SchemaChangesIT for C* 4 --- .../oss/driver/core/metadata/SchemaChangesIT.java | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaChangesIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaChangesIT.java index 66d8977e801..38ab3825485 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaChangesIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaChangesIT.java @@ -240,7 +240,8 @@ public void should_handle_view_creation() { "CREATE TABLE scores(user text, game text, score int, PRIMARY KEY (user, game))", "CREATE MATERIALIZED VIEW highscores " + "AS SELECT game, user, score FROM scores " - + "WHERE game IS NOT NULL AND score IS NOT NULL PRIMARY KEY (game, score, user) " + + "WHERE game IS NOT NULL AND score IS NOT NULL AND user IS NOT NULL " + + "PRIMARY KEY (game, score, user) " + "WITH CLUSTERING ORDER BY (score DESC, user DESC)", metadata -> metadata @@ -252,7 +253,7 @@ public void should_handle_view_creation() { Assertions.assertThat(view.getBaseTable().asInternal()).isEqualTo("scores"); Assertions.assertThat(view.includesAllColumns()).isFalse(); Assertions.assertThat(view.getWhereClause()) - .hasValue("game IS NOT NULL AND score IS NOT NULL"); + .hasValue("game IS NOT NULL AND score IS NOT NULL AND user IS NOT NULL"); Assertions.assertThat(view.getColumns()) .containsOnlyKeys( CqlIdentifier.fromInternal("game"), @@ -270,7 +271,8 @@ public void should_handle_view_drop() { "CREATE TABLE scores(user text, game text, score int, PRIMARY KEY (user, game))", "CREATE MATERIALIZED VIEW highscores " + "AS SELECT game, user, score FROM scores " - + "WHERE game IS NOT NULL AND score IS NOT NULL PRIMARY KEY (game, score, user) " + + "WHERE game IS NOT NULL AND score IS NOT NULL AND user IS NOT NULL " + + "PRIMARY KEY (game, score, user) " + "WITH CLUSTERING ORDER BY (score DESC, user DESC)"), "DROP MATERIALIZED VIEW highscores", metadata -> @@ -288,7 +290,8 @@ public void should_handle_view_update() { "CREATE TABLE scores(user text, game text, score int, PRIMARY KEY (user, game))", "CREATE MATERIALIZED VIEW highscores " + "AS SELECT game, user, score FROM scores " - + "WHERE game IS NOT NULL AND score IS NOT NULL PRIMARY KEY (game, score, user) " + + "WHERE game IS NOT NULL AND score IS NOT NULL AND user IS NOT NULL " + + "PRIMARY KEY (game, score, user) " + "WITH CLUSTERING ORDER BY (score DESC, user DESC)"), "ALTER MATERIALIZED VIEW highscores WITH comment = 'The best score for each game'", metadata -> From c3a6e6b30c4be2ad72986a5dd1e458fc13c2d2a1 Mon Sep 17 00:00:00 2001 From: olim7t Date: Thu, 27 Feb 2020 15:23:12 -0800 Subject: [PATCH 391/979] Reformat NodeState javadocs Bullet lists in the title line do not render well in the HTML output. --- .../driver/api/core/metadata/NodeState.java | 24 +++++++------------ 1 file changed, 8 insertions(+), 16 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/metadata/NodeState.java b/core/src/main/java/com/datastax/oss/driver/api/core/metadata/NodeState.java index c35ea0ccacb..0f4667e909b 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/metadata/NodeState.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/metadata/NodeState.java @@ -30,25 +30,17 @@ public enum NodeState { */ UNKNOWN, /** - * A node is considered up in either of the following situations: - * - *
        - *
      • the driver has at least one active connection to the node. - *
      • the driver is not actively trying to connect to the node (because it's ignored by the - * {@link LoadBalancingPolicy}), but it has received a topology event indicating that the - * node is up. - *
      + * A node is considered up in either of the following situations: 1) the driver has at least one + * active connection to the node, or 2) the driver is not actively trying to connect to the node + * (because it's ignored by the {@link LoadBalancingPolicy}), but it has received a topology event + * indicating that the node is up. */ UP, /** - * A node is considered down in either of the following situations: - * - *
        - *
      • the driver has lost all connections to the node (and is currently trying to reconnect). - *
      • the driver is not actively trying to connect to the node (because it's ignored by the - * {@link LoadBalancingPolicy}), but it has received a topology event indicating that the - * node is down. - *
      + * A node is considered down in either of the following situations: 1) the driver has lost all + * connections to the node (and is currently trying to reconnect), or 2) the driver is not + * actively trying to connect to the node (because it's ignored by the {@link + * LoadBalancingPolicy}), but it has received a topology event indicating that the node is down. */ DOWN, /** From 3497799c26a291abca6fa404341d0a504762929f Mon Sep 17 00:00:00 2001 From: Erik Merkle Date: Wed, 4 Mar 2020 08:50:36 -0600 Subject: [PATCH 392/979] JAVA-1861: Add Metadata.getClusterName() --- changelog/README.md | 2 + .../driver/api/core/metadata/Metadata.java | 12 +++++ .../internal/core/channel/ChannelFactory.java | 6 ++- .../core/metadata/DefaultMetadata.java | 21 +++++++-- .../refresh/GraphSchemaRefreshTest.java | 4 ++ .../ChannelFactoryClusterNameTest.java | 2 +- .../core/channel/ChannelFactoryTestBase.java | 2 +- .../core/metadata/AddNodeRefreshTest.java | 9 ++-- .../metadata/DefaultMetadataTokenMapTest.java | 15 +++--- .../metadata/FullNodeListRefreshTest.java | 5 ++ .../core/metadata/NodeStateManagerTest.java | 2 +- .../core/metadata/RemoveNodeRefreshTest.java | 6 ++- .../schema/refresh/SchemaRefreshTest.java | 4 ++ .../oss/driver/core/metadata/MetadataIT.java | 46 +++++++++++++++++++ .../driver/api/testinfra/ccm/CcmBridge.java | 4 +- 15 files changed, 121 insertions(+), 19 deletions(-) create mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/MetadataIT.java diff --git a/changelog/README.md b/changelog/README.md index 3d16d2026a6..7eb7e50622d 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,8 @@ ### 4.6.0 (in progress) + - [bug] JAVA-1861: Add Metadata.getClusterName() + ### 4.5.0 - [bug] JAVA-2654: Make AdminRequestHandler handle integer serialization diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/metadata/Metadata.java b/core/src/main/java/com/datastax/oss/driver/api/core/metadata/Metadata.java index a996d8a1eaf..287298c44fd 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/metadata/Metadata.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/metadata/Metadata.java @@ -114,4 +114,16 @@ default Optional getKeyspace(@NonNull String keyspaceName) { */ @NonNull Optional getTokenMap(); + + /** + * The cluster name to which this session is connected. The Optional returned should contain the + * value from the server for system.local.cluster_name. + * + *

      Note that this method has a default implementation for backwards compatibility. It is + * expected that any implementing classes override this method. + */ + @NonNull + default Optional getClusterName() { + return Optional.empty(); + } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ChannelFactory.java b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ChannelFactory.java index 49c0d7ac745..6e5e699393b 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ChannelFactory.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ChannelFactory.java @@ -77,7 +77,7 @@ public class ChannelFactory { /** either set from the configuration, or null and will be negotiated */ @VisibleForTesting volatile ProtocolVersion protocolVersion; - @VisibleForTesting volatile String clusterName; + private volatile String clusterName; /** * The value of the {@code PRODUCT_TYPE} option reported by the first channel we opened, in @@ -117,6 +117,10 @@ public void setProtocolVersion(ProtocolVersion newVersion) { this.protocolVersion = newVersion; } + public String getClusterName() { + return clusterName; + } + public CompletionStage connect(Node node, DriverChannelOptions options) { NodeMetricUpdater nodeMetricUpdater; if (node instanceof DefaultNode) { diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultMetadata.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultMetadata.java index b8c4008775a..c34486029fe 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultMetadata.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultMetadata.java @@ -46,17 +46,22 @@ public class DefaultMetadata implements Metadata { private static final Logger LOG = LoggerFactory.getLogger(DefaultMetadata.class); public static DefaultMetadata EMPTY = - new DefaultMetadata(Collections.emptyMap(), Collections.emptyMap(), null); + new DefaultMetadata(Collections.emptyMap(), Collections.emptyMap(), null, null); protected final Map nodes; protected final Map keyspaces; protected final TokenMap tokenMap; + protected final String clusterName; protected DefaultMetadata( - Map nodes, Map keyspaces, TokenMap tokenMap) { + Map nodes, + Map keyspaces, + TokenMap tokenMap, + String clusterName) { this.nodes = nodes; this.keyspaces = keyspaces; this.tokenMap = tokenMap; + this.clusterName = clusterName; } @NonNull @@ -77,6 +82,12 @@ public Optional getTokenMap() { return Optional.ofNullable(tokenMap); } + @NonNull + @Override + public Optional getClusterName() { + return Optional.ofNullable(clusterName); + } + /** * Refreshes the current metadata with the given list of nodes. * @@ -102,7 +113,8 @@ public DefaultMetadata withNodes( ImmutableMap.copyOf(newNodes), this.keyspaces, rebuildTokenMap( - newNodes, keyspaces, tokenMapEnabled, forceFullRebuild, tokenFactory, context)); + newNodes, keyspaces, tokenMapEnabled, forceFullRebuild, tokenFactory, context), + context.getChannelFactory().getClusterName()); } public DefaultMetadata withSchema( @@ -112,7 +124,8 @@ public DefaultMetadata withSchema( return new DefaultMetadata( this.nodes, ImmutableMap.copyOf(newKeyspaces), - rebuildTokenMap(nodes, newKeyspaces, tokenMapEnabled, false, null, context)); + rebuildTokenMap(nodes, newKeyspaces, tokenMapEnabled, false, null, context), + context.getChannelFactory().getClusterName()); } @Nullable diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/schema/refresh/GraphSchemaRefreshTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/schema/refresh/GraphSchemaRefreshTest.java index 3a4887e8691..5047032e95e 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/schema/refresh/GraphSchemaRefreshTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/schema/refresh/GraphSchemaRefreshTest.java @@ -16,6 +16,7 @@ package com.datastax.dse.driver.internal.core.graph.schema.refresh; import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.when; import com.datastax.dse.driver.api.core.metadata.schema.DseEdgeMetadata; import com.datastax.dse.driver.api.core.metadata.schema.DseGraphTableMetadata; @@ -28,6 +29,7 @@ import com.datastax.oss.driver.api.core.metadata.schema.ColumnMetadata; import com.datastax.oss.driver.api.core.metadata.schema.TableMetadata; import com.datastax.oss.driver.api.core.type.DataTypes; +import com.datastax.oss.driver.internal.core.channel.ChannelFactory; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.internal.core.metadata.DefaultMetadata; import com.datastax.oss.driver.internal.core.metadata.MetadataRefresh; @@ -64,10 +66,12 @@ public class GraphSchemaRefreshTest { ImmutableMap.of(CqlIdentifier.fromInternal("tbl"), OLD_TABLE)); @Mock private InternalDriverContext context; + @Mock private ChannelFactory channelFactory; private DefaultMetadata oldMetadata; @Before public void setup() { + when(context.getChannelFactory()).thenReturn(channelFactory); oldMetadata = DefaultMetadata.EMPTY.withSchema( ImmutableMap.of(OLD_KS1.getName(), OLD_KS1, KS_WITH_ENGINE.getName(), KS_WITH_ENGINE), diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/channel/ChannelFactoryClusterNameTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/channel/ChannelFactoryClusterNameTest.java index 5fc9dcea1e6..5a3cf93edeb 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/channel/ChannelFactoryClusterNameTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/channel/ChannelFactoryClusterNameTest.java @@ -48,7 +48,7 @@ public void should_set_cluster_name_from_first_connection() { // Then assertThatStage(channelFuture).isSuccess(); - assertThat(factory.clusterName).isEqualTo("mockClusterName"); + assertThat(factory.getClusterName()).isEqualTo("mockClusterName"); } @Test diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/channel/ChannelFactoryTestBase.java b/core/src/test/java/com/datastax/oss/driver/internal/core/channel/ChannelFactoryTestBase.java index 0d4c15c558c..71b2636e1a5 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/channel/ChannelFactoryTestBase.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/channel/ChannelFactoryTestBase.java @@ -264,7 +264,7 @@ protected void initChannel(Channel channel) throws Exception { new ProtocolInitHandler( context, protocolVersion, - clusterName, + getClusterName(), endPoint, options, heartbeatHandler, diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/AddNodeRefreshTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/AddNodeRefreshTest.java index c5186d527f4..67b3cc36c8e 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/AddNodeRefreshTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/AddNodeRefreshTest.java @@ -20,6 +20,7 @@ import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.uuid.Uuids; +import com.datastax.oss.driver.internal.core.channel.ChannelFactory; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.internal.core.metrics.MetricsFactory; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; @@ -38,12 +39,14 @@ public class AddNodeRefreshTest { @Mock private InternalDriverContext context; @Mock protected MetricsFactory metricsFactory; + @Mock private ChannelFactory channelFactory; private DefaultNode node1; @Before public void setup() { when(context.getMetricsFactory()).thenReturn(metricsFactory); + when(context.getChannelFactory()).thenReturn(channelFactory); node1 = TestNodeFactory.newNode(1, context); } @@ -52,7 +55,7 @@ public void should_add_new_node() { // Given DefaultMetadata oldMetadata = new DefaultMetadata( - ImmutableMap.of(node1.getHostId(), node1), Collections.emptyMap(), null); + ImmutableMap.of(node1.getHostId(), node1), Collections.emptyMap(), null, null); UUID newHostId = Uuids.random(); DefaultEndPoint newEndPoint = TestNodeFactory.newEndPoint(2); UUID newSchemaVersion = Uuids.random(); @@ -86,7 +89,7 @@ public void should_not_add_existing_node_with_same_id_and_endpoint() { // Given DefaultMetadata oldMetadata = new DefaultMetadata( - ImmutableMap.of(node1.getHostId(), node1), Collections.emptyMap(), null); + ImmutableMap.of(node1.getHostId(), node1), Collections.emptyMap(), null, null); DefaultNodeInfo newNodeInfo = DefaultNodeInfo.builder() .withHostId(node1.getHostId()) @@ -112,7 +115,7 @@ public void should_add_existing_node_with_same_id_but_different_endpoint() { // Given DefaultMetadata oldMetadata = new DefaultMetadata( - ImmutableMap.of(node1.getHostId(), node1), Collections.emptyMap(), null); + ImmutableMap.of(node1.getHostId(), node1), Collections.emptyMap(), null, null); DefaultEndPoint newEndPoint = TestNodeFactory.newEndPoint(2); InetSocketAddress newBroadcastRpcAddress = newEndPoint.resolve(); UUID newSchemaVersion = Uuids.random(); diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/DefaultMetadataTokenMapTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/DefaultMetadataTokenMapTest.java index 79e56e1d832..db8f9cd2eda 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/DefaultMetadataTokenMapTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/DefaultMetadataTokenMapTest.java @@ -22,6 +22,7 @@ import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.metadata.schema.KeyspaceMetadata; +import com.datastax.oss.driver.internal.core.channel.ChannelFactory; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.internal.core.metadata.token.DefaultReplicationStrategyFactory; import com.datastax.oss.driver.internal.core.metadata.token.Murmur3TokenFactory; @@ -53,9 +54,11 @@ public class DefaultMetadataTokenMapTest { "class", "org.apache.cassandra.locator.SimpleStrategy", "replication_factor", "1")); @Mock private InternalDriverContext context; + @Mock private ChannelFactory channelFactory; @Before public void setup() { + when(context.getChannelFactory()).thenReturn(channelFactory); DefaultReplicationStrategyFactory replicationStrategyFactory = new DefaultReplicationStrategyFactory(context); when(context.getReplicationStrategyFactory()).thenReturn(replicationStrategyFactory); @@ -65,7 +68,7 @@ public void setup() { public void should_not_build_token_map_when_initializing_with_contact_points() { DefaultMetadata contactPointsMetadata = new DefaultMetadata( - ImmutableMap.of(NODE1.getHostId(), NODE1), Collections.emptyMap(), null); + ImmutableMap.of(NODE1.getHostId(), NODE1), Collections.emptyMap(), null, null); assertThat(contactPointsMetadata.getTokenMap()).isNotPresent(); } @@ -73,7 +76,7 @@ public void should_not_build_token_map_when_initializing_with_contact_points() { public void should_build_minimal_token_map_on_first_refresh() { DefaultMetadata contactPointsMetadata = new DefaultMetadata( - ImmutableMap.of(NODE1.getHostId(), NODE1), Collections.emptyMap(), null); + ImmutableMap.of(NODE1.getHostId(), NODE1), Collections.emptyMap(), null, null); DefaultMetadata firstRefreshMetadata = contactPointsMetadata.withNodes( ImmutableMap.of(NODE1.getHostId(), NODE1), @@ -88,7 +91,7 @@ public void should_build_minimal_token_map_on_first_refresh() { public void should_not_build_token_map_when_disabled() { DefaultMetadata contactPointsMetadata = new DefaultMetadata( - ImmutableMap.of(NODE1.getHostId(), NODE1), Collections.emptyMap(), null); + ImmutableMap.of(NODE1.getHostId(), NODE1), Collections.emptyMap(), null, null); DefaultMetadata firstRefreshMetadata = contactPointsMetadata.withNodes( ImmutableMap.of(NODE1.getHostId(), NODE1), @@ -103,7 +106,7 @@ public void should_not_build_token_map_when_disabled() { public void should_stay_empty_on_first_refresh_if_partitioner_missing() { DefaultMetadata contactPointsMetadata = new DefaultMetadata( - ImmutableMap.of(NODE1.getHostId(), NODE1), Collections.emptyMap(), null); + ImmutableMap.of(NODE1.getHostId(), NODE1), Collections.emptyMap(), null, null); DefaultMetadata firstRefreshMetadata = contactPointsMetadata.withNodes( ImmutableMap.of(NODE1.getHostId(), NODE1), true, true, null, context); @@ -114,7 +117,7 @@ public void should_stay_empty_on_first_refresh_if_partitioner_missing() { public void should_update_minimal_token_map_if_new_node_and_still_no_schema() { DefaultMetadata contactPointsMetadata = new DefaultMetadata( - ImmutableMap.of(NODE1.getHostId(), NODE1), Collections.emptyMap(), null); + ImmutableMap.of(NODE1.getHostId(), NODE1), Collections.emptyMap(), null, null); DefaultMetadata firstRefreshMetadata = contactPointsMetadata.withNodes( ImmutableMap.of(NODE1.getHostId(), NODE1), @@ -136,7 +139,7 @@ public void should_update_minimal_token_map_if_new_node_and_still_no_schema() { public void should_update_token_map_when_schema_changes() { DefaultMetadata contactPointsMetadata = new DefaultMetadata( - ImmutableMap.of(NODE1.getHostId(), NODE1), Collections.emptyMap(), null); + ImmutableMap.of(NODE1.getHostId(), NODE1), Collections.emptyMap(), null, null); DefaultMetadata firstRefreshMetadata = contactPointsMetadata.withNodes( ImmutableMap.of(NODE1.getHostId(), NODE1), diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/FullNodeListRefreshTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/FullNodeListRefreshTest.java index bdf064ed528..c4f8ee22e24 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/FullNodeListRefreshTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/FullNodeListRefreshTest.java @@ -20,6 +20,7 @@ import com.datastax.oss.driver.api.core.metadata.EndPoint; import com.datastax.oss.driver.api.core.uuid.Uuids; +import com.datastax.oss.driver.internal.core.channel.ChannelFactory; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.internal.core.metrics.MetricsFactory; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; @@ -37,6 +38,7 @@ public class FullNodeListRefreshTest { @Mock private InternalDriverContext context; @Mock protected MetricsFactory metricsFactory; + @Mock private ChannelFactory channelFactory; private DefaultNode node1; private DefaultNode node2; @@ -46,6 +48,7 @@ public class FullNodeListRefreshTest { @Before public void setup() { when(context.getMetricsFactory()).thenReturn(metricsFactory); + when(context.getChannelFactory()).thenReturn(channelFactory); node1 = TestNodeFactory.newNode(1, context); node2 = TestNodeFactory.newNode(2, context); @@ -61,6 +64,7 @@ public void should_add_and_remove_nodes() { new DefaultMetadata( ImmutableMap.of(node1.getHostId(), node1, node2.getHostId(), node2), Collections.emptyMap(), + null, null); Iterable newInfos = ImmutableList.of( @@ -88,6 +92,7 @@ public void should_update_existing_nodes() { new DefaultMetadata( ImmutableMap.of(node1.getHostId(), node1, node2.getHostId(), node2), Collections.emptyMap(), + null, null); UUID schemaVersion1 = Uuids.random(); diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/NodeStateManagerTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/NodeStateManagerTest.java index 347185bce80..d1b2f47d8dc 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/NodeStateManagerTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/NodeStateManagerTest.java @@ -94,7 +94,7 @@ public void setup() { .put(node1.getHostId(), node1) .put(node2.getHostId(), node2) .build(); - Metadata metadata = new DefaultMetadata(nodes, Collections.emptyMap(), null); + Metadata metadata = new DefaultMetadata(nodes, Collections.emptyMap(), null, null); when(metadataManager.getMetadata()).thenReturn(metadata); when(metadataManager.refreshNode(any(Node.class))) .thenReturn(CompletableFuture.completedFuture(null)); diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/RemoveNodeRefreshTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/RemoveNodeRefreshTest.java index 29053f2b08e..c783750fe5c 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/RemoveNodeRefreshTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/RemoveNodeRefreshTest.java @@ -18,6 +18,7 @@ import static com.datastax.oss.driver.Assertions.assertThat; import static org.mockito.Mockito.when; +import com.datastax.oss.driver.internal.core.channel.ChannelFactory; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.internal.core.metrics.MetricsFactory; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; @@ -33,6 +34,7 @@ public class RemoveNodeRefreshTest { @Mock private InternalDriverContext context; @Mock protected MetricsFactory metricsFactory; + @Mock private ChannelFactory channelFactory; private DefaultNode node1; private DefaultNode node2; @@ -40,6 +42,7 @@ public class RemoveNodeRefreshTest { @Before public void setup() { when(context.getMetricsFactory()).thenReturn(metricsFactory); + when(context.getChannelFactory()).thenReturn(channelFactory); node1 = TestNodeFactory.newNode(1, context); node2 = TestNodeFactory.newNode(2, context); } @@ -51,6 +54,7 @@ public void should_remove_existing_node() { new DefaultMetadata( ImmutableMap.of(node1.getHostId(), node1, node2.getHostId(), node2), Collections.emptyMap(), + null, null); RemoveNodeRefresh refresh = new RemoveNodeRefresh(node2.getBroadcastRpcAddress().get()); @@ -67,7 +71,7 @@ public void should_not_remove_nonexistent_node() { // Given DefaultMetadata oldMetadata = new DefaultMetadata( - ImmutableMap.of(node1.getHostId(), node1), Collections.emptyMap(), null); + ImmutableMap.of(node1.getHostId(), node1), Collections.emptyMap(), null, null); RemoveNodeRefresh refresh = new RemoveNodeRefresh(node2.getBroadcastRpcAddress().get()); // When diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/refresh/SchemaRefreshTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/refresh/SchemaRefreshTest.java index 1f171d90611..4f148db1dcc 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/refresh/SchemaRefreshTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/refresh/SchemaRefreshTest.java @@ -16,10 +16,12 @@ package com.datastax.oss.driver.internal.core.metadata.schema.refresh; import static com.datastax.oss.driver.Assertions.assertThat; +import static org.mockito.Mockito.when; import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.type.DataTypes; import com.datastax.oss.driver.api.core.type.UserDefinedType; +import com.datastax.oss.driver.internal.core.channel.ChannelFactory; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.internal.core.metadata.DefaultMetadata; import com.datastax.oss.driver.internal.core.metadata.MetadataRefresh; @@ -51,10 +53,12 @@ public class SchemaRefreshTest { private static final DefaultKeyspaceMetadata OLD_KS1 = newKeyspace("ks1", true, OLD_T1, OLD_T2); @Mock private InternalDriverContext context; + @Mock private ChannelFactory channelFactory; private DefaultMetadata oldMetadata; @Before public void setup() { + when(context.getChannelFactory()).thenReturn(channelFactory); oldMetadata = DefaultMetadata.EMPTY.withSchema( ImmutableMap.of(OLD_KS1.getName(), OLD_KS1), false, context); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/MetadataIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/MetadataIT.java new file mode 100644 index 00000000000..2eb60a4e7ab --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/MetadataIT.java @@ -0,0 +1,46 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.core.metadata; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.metadata.Metadata; +import com.datastax.oss.driver.api.testinfra.ccm.CcmBridge; +import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import com.datastax.oss.driver.categories.ParallelizableTests; +import org.junit.Rule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +@Category(ParallelizableTests.class) +public class MetadataIT { + + private CcmRule ccmRule = CcmRule.getInstance(); + + private SessionRule sessionRule = SessionRule.builder(ccmRule).build(); + + @Rule public TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); + + @Test + public void should_expose_cluster_name() { + Metadata metadata = sessionRule.session().getMetadata(); + assertThat(metadata.getClusterName()).hasValue(CcmBridge.CLUSTER_NAME); + } +} diff --git a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CcmBridge.java b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CcmBridge.java index 4155342195d..322efccca4d 100644 --- a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CcmBridge.java +++ b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CcmBridge.java @@ -77,6 +77,8 @@ public class CcmBridge implements AutoCloseable { public static final Boolean DSE_ENABLEMENT = Boolean.getBoolean("ccm.dse"); + public static final String CLUSTER_NAME = "ccm_1"; + public static final String DEFAULT_CLIENT_TRUSTSTORE_PASSWORD = "cassandra1sfun"; public static final String DEFAULT_CLIENT_TRUSTSTORE_PATH = "/client.truststore"; @@ -219,7 +221,7 @@ public void create() { } execute( "create", - "ccm_1", + CLUSTER_NAME, "-i", ipPrefix, "-n", From bcda412cfd73d058600a18d1275142de058e638e Mon Sep 17 00:00:00 2001 From: olim7t Date: Thu, 5 Mar 2020 14:18:07 -0800 Subject: [PATCH 393/979] Merge node metadata ITs --- .../driver/api/core/metadata/MetadataIT.java | 58 ----------- .../driver/core/metadata/NodeMetadataIT.java | 96 ++++++++++++------- 2 files changed, 61 insertions(+), 93 deletions(-) delete mode 100644 integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/MetadataIT.java diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/MetadataIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/MetadataIT.java deleted file mode 100644 index fb3ba5b70b4..00000000000 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/MetadataIT.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.datastax.dse.driver.api.core.metadata; - -import static org.assertj.core.api.Assertions.assertThat; - -import com.datastax.oss.driver.api.core.CqlSession; -import com.datastax.oss.driver.api.core.Version; -import com.datastax.oss.driver.api.core.metadata.Node; -import com.datastax.oss.driver.api.testinfra.DseRequirement; -import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; -import com.datastax.oss.driver.api.testinfra.session.SessionRule; -import com.datastax.oss.driver.categories.ParallelizableTests; -import java.util.Set; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.RuleChain; -import org.junit.rules.TestRule; - -@Category(ParallelizableTests.class) -@DseRequirement(min = "5.1") -public class MetadataIT { - - private static CcmRule ccmRule = CcmRule.getInstance(); - - private static SessionRule sessionRule = SessionRule.builder(ccmRule).build(); - - @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); - - @Test - public void should_expose_dse_node_properties() { - Node node = sessionRule.session().getMetadata().getNodes().values().iterator().next(); - - // Basic checks as we want something that will work with a large range of DSE versions: - assertThat(node.getExtras()) - .containsKeys( - DseNodeProperties.DSE_VERSION, - DseNodeProperties.DSE_WORKLOADS, - DseNodeProperties.SERVER_ID); - assertThat(node.getExtras().get(DseNodeProperties.DSE_VERSION)).isInstanceOf(Version.class); - assertThat(node.getExtras().get(DseNodeProperties.SERVER_ID)).isInstanceOf(String.class); - assertThat(node.getExtras().get(DseNodeProperties.DSE_WORKLOADS)).isInstanceOf(Set.class); - } -} diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/NodeMetadataIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/NodeMetadataIT.java index c236294e9ef..e2e494f83a7 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/NodeMetadataIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/NodeMetadataIT.java @@ -17,13 +17,15 @@ import static org.assertj.core.api.Assertions.assertThat; +import com.datastax.dse.driver.api.core.metadata.DseNodeProperties; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.loadbalancing.NodeDistance; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.metadata.NodeState; +import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CcmBridge; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; -import com.datastax.oss.driver.api.testinfra.session.SessionUtils; +import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.api.testinfra.utils.ConditionChecker; import com.datastax.oss.driver.categories.ParallelizableTests; import com.datastax.oss.driver.internal.core.context.EventBus; @@ -31,51 +33,75 @@ import com.datastax.oss.driver.internal.core.metadata.TopologyEvent; import java.net.InetSocketAddress; import java.util.Collection; -import org.junit.ClassRule; +import java.util.Set; +import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; @Category(ParallelizableTests.class) public class NodeMetadataIT { - @ClassRule public static final CcmRule CCM_RULE = CcmRule.getInstance(); + private CcmRule ccmRule = CcmRule.getInstance(); + + private SessionRule sessionRule = SessionRule.builder(ccmRule).build(); + + @Rule public TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); @Test public void should_expose_node_metadata() { - try (CqlSession session = SessionUtils.newSession(CCM_RULE)) { - Node node = getUniqueNode(session); - // Run a few basic checks given what we know about our test environment: - assertThat(node.getEndPoint()).isNotNull(); - InetSocketAddress connectAddress = (InetSocketAddress) node.getEndPoint().resolve(); - node.getBroadcastAddress() - .ifPresent( - broadcastAddress -> - assertThat(broadcastAddress.getAddress()).isEqualTo(connectAddress.getAddress())); - assertThat(node.getListenAddress().get().getAddress()).isEqualTo(connectAddress.getAddress()); - assertThat(node.getDatacenter()).isEqualTo("dc1"); - assertThat(node.getRack()).isEqualTo("r1"); - if (!CcmBridge.DSE_ENABLEMENT) { - // CcmBridge does not report accurate C* versions for DSE, only approximated values - assertThat(node.getCassandraVersion()).isEqualTo(CCM_RULE.getCassandraVersion()); - } - assertThat(node.getState()).isSameAs(NodeState.UP); - assertThat(node.getDistance()).isSameAs(NodeDistance.LOCAL); - assertThat(node.getHostId()).isNotNull(); - assertThat(node.getSchemaVersion()).isNotNull(); - long upTime1 = node.getUpSinceMillis(); - assertThat(upTime1).isGreaterThan(-1); - - // Note: open connections and reconnection status are covered in NodeStateIT + CqlSession session = sessionRule.session(); - // Force the node down and back up to check that upSinceMillis gets updated - EventBus eventBus = ((InternalDriverContext) session.getContext()).getEventBus(); - eventBus.fire(TopologyEvent.forceDown(node.getBroadcastRpcAddress().get())); - ConditionChecker.checkThat(() -> node.getState() == NodeState.FORCED_DOWN).becomesTrue(); - assertThat(node.getUpSinceMillis()).isEqualTo(-1); - eventBus.fire(TopologyEvent.forceUp(node.getBroadcastRpcAddress().get())); - ConditionChecker.checkThat(() -> node.getState() == NodeState.UP).becomesTrue(); - assertThat(node.getUpSinceMillis()).isGreaterThan(upTime1); + Node node = getUniqueNode(session); + // Run a few basic checks given what we know about our test environment: + assertThat(node.getEndPoint()).isNotNull(); + InetSocketAddress connectAddress = (InetSocketAddress) node.getEndPoint().resolve(); + node.getBroadcastAddress() + .ifPresent( + broadcastAddress -> + assertThat(broadcastAddress.getAddress()).isEqualTo(connectAddress.getAddress())); + assertThat(node.getListenAddress().get().getAddress()).isEqualTo(connectAddress.getAddress()); + assertThat(node.getDatacenter()).isEqualTo("dc1"); + assertThat(node.getRack()).isEqualTo("r1"); + if (!CcmBridge.DSE_ENABLEMENT) { + // CcmBridge does not report accurate C* versions for DSE, only approximated values + assertThat(node.getCassandraVersion()).isEqualTo(ccmRule.getCassandraVersion()); } + assertThat(node.getState()).isSameAs(NodeState.UP); + assertThat(node.getDistance()).isSameAs(NodeDistance.LOCAL); + assertThat(node.getHostId()).isNotNull(); + assertThat(node.getSchemaVersion()).isNotNull(); + long upTime1 = node.getUpSinceMillis(); + assertThat(upTime1).isGreaterThan(-1); + + // Note: open connections and reconnection status are covered in NodeStateIT + + // Force the node down and back up to check that upSinceMillis gets updated + EventBus eventBus = ((InternalDriverContext) session.getContext()).getEventBus(); + eventBus.fire(TopologyEvent.forceDown(node.getBroadcastRpcAddress().get())); + ConditionChecker.checkThat(() -> node.getState() == NodeState.FORCED_DOWN).becomesTrue(); + assertThat(node.getUpSinceMillis()).isEqualTo(-1); + eventBus.fire(TopologyEvent.forceUp(node.getBroadcastRpcAddress().get())); + ConditionChecker.checkThat(() -> node.getState() == NodeState.UP).becomesTrue(); + assertThat(node.getUpSinceMillis()).isGreaterThan(upTime1); + } + + @Test + @DseRequirement(min = "5.1") + public void should_expose_dse_node_properties() { + Node node = getUniqueNode(sessionRule.session()); + + // Basic checks as we want something that will work with a large range of DSE versions: + assertThat(node.getExtras()) + .containsKeys( + DseNodeProperties.DSE_VERSION, + DseNodeProperties.DSE_WORKLOADS, + DseNodeProperties.SERVER_ID); + assertThat(node.getExtras().get(DseNodeProperties.DSE_VERSION)) + .isEqualTo(ccmRule.getDseVersion().get()); + assertThat(node.getExtras().get(DseNodeProperties.SERVER_ID)).isInstanceOf(String.class); + assertThat(node.getExtras().get(DseNodeProperties.DSE_WORKLOADS)).isInstanceOf(Set.class); } private static Node getUniqueNode(CqlSession session) { From 140e48239973144b20dc3dfa55c3742318249626 Mon Sep 17 00:00:00 2001 From: olim7t Date: Thu, 5 Mar 2020 14:24:36 -0800 Subject: [PATCH 394/979] Clarify test rule usage in contribution guidelines --- CONTRIBUTING.md | 34 ++++++++++++++++++++++++++-------- 1 file changed, 26 insertions(+), 8 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index e6eba076dec..c87a3de1d98 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -312,14 +312,11 @@ Do not mix `CcmRule` and `SimulacronRule` in the same test. It makes things hard can be inefficient (if the `SimulacronRule` is method-level, it will create a Simulacron cluster for every test method, even those that only need CCM). -Try to use `@ClassRule` as much as possible: it's more efficient to reuse the same resource across -all test methods. The only exceptions are: -* CCM tests that use `@CassandraRequirement` restrictions at the method level (ex: - `BatchStatementIT`). -* tests where you *really* need to restart from a clean state for every method. +##### Class-level rules -When you use `@ClassRule`, your rules need to be static; also make them final and use constant -naming conventions, like `CCM_RULE`. +Rules annotated with `@ClassRule` wrap the whole test class, and are reused across methods. Try to +use this as much as possible, as it's more efficient. The fields need to be static; also make them +final and use constant naming conventions, like `CCM_RULE`. When you use a server rule (`CcmRule` or `SimulacronRule`) and a `SessionRule` at the same level, wrap them into a rule chain to ensure proper initialization order: @@ -332,7 +329,28 @@ private static final SessionRule SESSION_RULE = SessionRule.builder( public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); ``` -This is not necessary if the server rule is a `@ClassRule` and the session rule is a `@Rule`. +##### Method-level rules + +Rules annotated with `@Rule` wrap each test method. Use lower-camel case for field names: + +```java +private CcmRule ccmRule = CcmRule.getInstance(); +private SessionRule sessionRule = SessionRule.builder(ccmRule).build(); + +@ClassRule +public TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); +``` + +Only use this for: + +* CCM tests that use `@CassandraRequirement` or `@DseRequirement` restrictions at the method level + (ex: `BatchStatementIT`). +* tests where you *really* need to restart from a clean state for every method. + +##### Mixed + +It's also possible to use a `@ClassRule` for CCM / Simulacron, and a `@Rule` for the session rule. +In that case, you don't need to use a rule chain. ## Running the tests From a8034f0dda6c1292b4a0a0430b2a9def95dba65d Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 2 Mar 2020 13:21:17 +0200 Subject: [PATCH 395/979] JAVA-2667: Add ability to fail the build when integration tests fail With this change, 2 behavioral modifications are introduced: 1) The build now by default fails if the integration tests fail. It is the responsibility of CI platforms to modify this behavior if they prefer the build to finish. 2) If any of the 3 executions of integration tests fails (parallel, serial or isolated), the build will still execute all 3 executions before failing. --- .travis.yml | 2 +- build.yaml | 2 +- changelog/README.md | 3 ++- integration-tests/pom.xml | 26 ++++++++++++++++---------- 4 files changed, 20 insertions(+), 13 deletions(-) diff --git a/.travis.yml b/.travis.yml index 50c05e76856..f2e505f2176 100644 --- a/.travis.yml +++ b/.travis.yml @@ -16,7 +16,7 @@ before_install: - jdk_switcher use openjdk8 - ./install-snapshots.sh install: mvn install -DskipTests=true -Dmaven.javadoc.skip=true -B -V -script: mvn test -Djacoco.skip=true -B -V +script: mvn test -Djacoco.skip=true -Dmaven.test.failure.ignore=true -B -V cache: directories: - $HOME/.m2 diff --git a/build.yaml b/build.yaml index 319ed0b9800..42e4c6ff77f 100644 --- a/build.yaml +++ b/build.yaml @@ -77,7 +77,7 @@ build: # Use the matrix JDK for testing jabba use $JABBA_JDK_NAME # Run tests against matrix JDK - mvn -B -V verify --batch-mode --show-version -Dccm.version=$CCM_CASSANDRA_VERSION -Dccm.dse=$CCM_IS_DSE -Dproxy.path=$HOME/proxy -Dmaven.javadoc.skip=true + mvn -B -V verify --batch-mode --show-version -Dccm.version=$CCM_CASSANDRA_VERSION -Dccm.dse=$CCM_IS_DSE -Dproxy.path=$HOME/proxy -Dmaven.javadoc.skip=true -Dmaven.test.failure.ignore=true - xunit: - "**/target/surefire-reports/TEST-*.xml" - "**/target/failsafe-reports/TEST-*.xml" diff --git a/changelog/README.md b/changelog/README.md index 7eb7e50622d..e88071b9faa 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,7 +4,8 @@ ### 4.6.0 (in progress) - - [bug] JAVA-1861: Add Metadata.getClusterName() +- [improvement] JAVA-2667: Add ability to fail the build when integration tests fail +- [bug] JAVA-1861: Add Metadata.getClusterName() ### 4.5.0 diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 69dc5030ad4..fa16a13c93e 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -268,15 +268,12 @@ parallelizable-tests integration-test - verify com.datastax.oss.driver.categories.ParallelizableTests classes 8 - - true - parallelized + ${project.build.directory}/failsafe-reports/failsafe-summary-parallelized.xml ${skipParallelizableITs} @@ -284,12 +281,10 @@ serial-tests integration-test - verify com.datastax.oss.driver.categories.ParallelizableTests, com.datastax.oss.driver.categories.IsolatedTests - true - serial + ${project.build.directory}/failsafe-reports/failsafe-summary-serial.xml ${skipSerialITs} @@ -297,18 +292,29 @@ isolated-tests integration-test - verify com.datastax.oss.driver.categories.IsolatedTests 1 false - true - isolated + ${project.build.directory}/failsafe-reports/failsafe-summary-isolated.xml ${skipIsolatedITs} + + verify + + verify + + + + ${project.build.directory}/failsafe-reports/failsafe-summary-parallelized.xml + ${project.build.directory}/failsafe-reports/failsafe-summary-serial.xml + ${project.build.directory}/failsafe-reports/failsafe-summary-isolated.xml + + + From 0d2ec0ed359f45848b4b91908308e0ba6b7ff052 Mon Sep 17 00:00:00 2001 From: olim7t Date: Wed, 11 Mar 2020 16:01:59 -0700 Subject: [PATCH 396/979] Bump version to 4.5.1-SNAPSHOT --- bom/pom.xml | 14 +++++++------- changelog/README.md | 3 +++ core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- pom.xml | 2 +- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 12 files changed, 20 insertions(+), 17 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index 18293ec41f3..06473a4f92f 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.5.0 + 4.5.1-SNAPSHOT java-driver-bom pom @@ -31,32 +31,32 @@ com.datastax.oss java-driver-core - 4.5.0 + 4.5.1-SNAPSHOT com.datastax.oss java-driver-core-shaded - 4.5.0 + 4.5.1-SNAPSHOT com.datastax.oss java-driver-mapper-processor - 4.5.0 + 4.5.1-SNAPSHOT com.datastax.oss java-driver-mapper-runtime - 4.5.0 + 4.5.1-SNAPSHOT com.datastax.oss java-driver-query-builder - 4.5.0 + 4.5.1-SNAPSHOT com.datastax.oss java-driver-test-infra - 4.5.0 + 4.5.1-SNAPSHOT com.datastax.oss diff --git a/changelog/README.md b/changelog/README.md index 8e16266c645..4be24e05a05 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -2,6 +2,9 @@ +### 4.5.1 (in progress) + + ### 4.5.0 - [bug] JAVA-2654: Make AdminRequestHandler handle integer serialization diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index eefaf6f1fec..1a21193c19b 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.5.0 + 4.5.1-SNAPSHOT java-driver-core-shaded DataStax Java driver for Apache Cassandra(R) - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index 73bd34ece58..4789b36e364 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.5.0 + 4.5.1-SNAPSHOT java-driver-core bundle diff --git a/distribution/pom.xml b/distribution/pom.xml index 2e5d92a5f4c..ee3ca09c3ce 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.5.0 + 4.5.1-SNAPSHOT java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index a58c44107a8..025307d17f3 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.5.0 + 4.5.1-SNAPSHOT java-driver-examples DataStax Java driver for Apache Cassandra(R) - examples. diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index ffa94217c39..d63cd47448b 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.5.0 + 4.5.1-SNAPSHOT java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index a20fede9ad0..eb7c7c0e048 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.5.0 + 4.5.1-SNAPSHOT java-driver-mapper-processor DataStax Java driver for Apache Cassandra(R) - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index 67fc2817a5d..cf6eada15e2 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.5.0 + 4.5.1-SNAPSHOT java-driver-mapper-runtime bundle diff --git a/pom.xml b/pom.xml index c9b635a8f04..e914822abfe 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ 4.0.0 com.datastax.oss java-driver-parent - 4.5.0 + 4.5.1-SNAPSHOT pom DataStax Java driver for Apache Cassandra(R) A driver for Apache Cassandra(R) 2.1+ that works exclusively with the Cassandra Query Language version 3 (CQL3) and Cassandra's native protocol versions 3 and above. diff --git a/query-builder/pom.xml b/query-builder/pom.xml index 2b189bde432..6223b7f1e7f 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.5.0 + 4.5.1-SNAPSHOT java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index 4f5bbf1b4ec..d819205636b 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.5.0 + 4.5.1-SNAPSHOT java-driver-test-infra bundle From 513d8be04be8536bbdc8f5590fa978e78001e106 Mon Sep 17 00:00:00 2001 From: Erik Merkle Date: Fri, 6 Mar 2020 14:37:39 -0600 Subject: [PATCH 397/979] JAVA-2673: Fix mapper generated code for UPDATE with TTL and IF condition --- changelog/README.md | 1 + .../oss/driver/mapper/UpdateCustomIfClauseIT.java | 14 ++++++++++++++ .../processor/dao/DaoUpdateMethodGenerator.java | 3 ++- 3 files changed, 17 insertions(+), 1 deletion(-) diff --git a/changelog/README.md b/changelog/README.md index 4be24e05a05..0af1aec2d33 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.5.1 (in progress) +- [bug] JAVA-2673: Fix mapper generated code for UPDATE with TTL and IF condition ### 4.5.0 diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateCustomIfClauseIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateCustomIfClauseIT.java index 07fd3aba869..53773a20ee1 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateCustomIfClauseIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateCustomIfClauseIT.java @@ -162,6 +162,17 @@ public void should_not_async_update_entity_if_condition_is_not_met() { .isEqualTo(false); } + @Test + public void should_update_entity_if_condition_is_met_using_ttl() { + dao.update( + new Product(FLAMETHROWER.getId(), "Description for length 10", new Dimensions(10, 1, 1))); + assertThat(dao.findById(FLAMETHROWER.getId())).isNotNull(); + + Product otherProduct = + new Product(FLAMETHROWER.getId(), "Other description", new Dimensions(1, 1, 1)); + assertThat(dao.updateIfLengthUsingTtl(otherProduct, 10).wasApplied()).isEqualTo(true); + } + @Mapper public interface InventoryMapper { @DaoFactory @@ -177,6 +188,9 @@ public interface ProductDao { @Update(customIfClause = "dimensions.length = :length") ResultSet updateIfLength(Product product, int length); + @Update(customIfClause = "dimensions.length = :length", ttl = "20") + ResultSet updateIfLengthUsingTtl(Product product, int length); + @Update(customIfClause = "dimensions.length = :length") BoundStatement updateIfLengthStatement(Product product, int length); diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoUpdateMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoUpdateMethodGenerator.java index 02a81be61c9..9babcaee5bb 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoUpdateMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoUpdateMethodGenerator.java @@ -195,9 +195,10 @@ private void generatePrepareRequest( methodBuilder, requestName, helperFieldName, annotation.customWhereClause()); maybeAddTtl(annotation.ttl(), methodBuilder); maybeAddTimestamp(annotation.timestamp(), methodBuilder); + methodBuilder.addCode(")"); maybeAddIfClause(methodBuilder, annotation); - methodBuilder.addCode(").asCql()"); + methodBuilder.addCode(".asCql()"); methodBuilder.addCode(")$];\n"); } From 3d295decad98285992c5680c6a3914fbf02a64ee Mon Sep 17 00:00:00 2001 From: Dan LaRocque Date: Thu, 5 Mar 2020 18:46:50 -0600 Subject: [PATCH 398/979] Add V4+ features: `DATE_TYPE` and `SMALLINT_AND_TINYINT_TYPES` This commit adds two DefaultProtocolFeature enum elements, supported from V4 onward: * SMALLINT_AND_TINYINT_TYPES * DATE_TYPE --- .../internal/core/DefaultProtocolFeature.java | 14 +++++++++++ .../core/DefaultProtocolVersionRegistry.java | 4 +++- .../DefaultProtocolVersionRegistryTest.java | 23 +++++++++++++++++++ 3 files changed, 40 insertions(+), 1 deletion(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/DefaultProtocolFeature.java b/core/src/main/java/com/datastax/oss/driver/internal/core/DefaultProtocolFeature.java index 8d26d1d23f4..7e324f93ee4 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/DefaultProtocolFeature.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/DefaultProtocolFeature.java @@ -35,5 +35,19 @@ public enum DefaultProtocolFeature implements ProtocolFeature { * @see CASSANDRA-10145 */ PER_REQUEST_KEYSPACE, + + /** + * Support for smallint and tinyint types. + * + * @see CASSANDRA-8951 + */ + SMALLINT_AND_TINYINT_TYPES, + + /** + * Support for the date type. + * + * @see CASSANDRA-7523 + */ + DATE_TYPE, ; } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/DefaultProtocolVersionRegistry.java b/core/src/main/java/com/datastax/oss/driver/internal/core/DefaultProtocolVersionRegistry.java index edf24cef73b..f5fff3ecdb2 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/DefaultProtocolVersionRegistry.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/DefaultProtocolVersionRegistry.java @@ -230,7 +230,9 @@ private void removeHigherThan( @Override public boolean supports(ProtocolVersion version, ProtocolFeature feature) { int code = version.getCode(); - if (DefaultProtocolFeature.UNSET_BOUND_VALUES.equals(feature)) { + if (DefaultProtocolFeature.SMALLINT_AND_TINYINT_TYPES.equals(feature) + || DefaultProtocolFeature.DATE_TYPE.equals(feature) + || DefaultProtocolFeature.UNSET_BOUND_VALUES.equals(feature)) { // All DSE versions and all OSS V4+ return DefaultProtocolVersion.V4.getCode() <= code; } else if (DefaultProtocolFeature.PER_REQUEST_KEYSPACE.equals(feature)) { diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/DefaultProtocolVersionRegistryTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/DefaultProtocolVersionRegistryTest.java index 9e9fefb5f53..c86d7c824c5 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/DefaultProtocolVersionRegistryTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/DefaultProtocolVersionRegistryTest.java @@ -15,8 +15,13 @@ */ package com.datastax.oss.driver.internal.core; +import static com.datastax.dse.driver.api.core.DseProtocolVersion.DSE_V1; +import static com.datastax.dse.driver.api.core.DseProtocolVersion.DSE_V2; import static com.datastax.oss.driver.api.core.ProtocolVersion.V3; import static com.datastax.oss.driver.api.core.ProtocolVersion.V4; +import static com.datastax.oss.driver.api.core.ProtocolVersion.V5; +import static com.datastax.oss.driver.internal.core.DefaultProtocolFeature.DATE_TYPE; +import static com.datastax.oss.driver.internal.core.DefaultProtocolFeature.SMALLINT_AND_TINYINT_TYPES; import static org.assertj.core.api.Assertions.assertThat; import com.datastax.dse.driver.api.core.DseProtocolVersion; @@ -114,6 +119,24 @@ public void should_pick_oss_v3_as_highest_common_when_one_node_is_cassandra_2_1( .isEqualTo(ProtocolVersion.V3); } + @Test + public void should_support_date_type_on_oss_v4_and_later() { + assertThat(registry.supports(V3, DATE_TYPE)).isFalse(); + assertThat(registry.supports(V4, DATE_TYPE)).isTrue(); + assertThat(registry.supports(V5, DATE_TYPE)).isTrue(); + assertThat(registry.supports(DSE_V1, DATE_TYPE)).isTrue(); + assertThat(registry.supports(DSE_V2, DATE_TYPE)).isTrue(); + } + + @Test + public void should_support_smallint_and_tinyint_types_on_oss_v4_and_later() { + assertThat(registry.supports(V3, SMALLINT_AND_TINYINT_TYPES)).isFalse(); + assertThat(registry.supports(V4, SMALLINT_AND_TINYINT_TYPES)).isTrue(); + assertThat(registry.supports(V5, SMALLINT_AND_TINYINT_TYPES)).isTrue(); + assertThat(registry.supports(DSE_V1, SMALLINT_AND_TINYINT_TYPES)).isTrue(); + assertThat(registry.supports(DSE_V2, SMALLINT_AND_TINYINT_TYPES)).isTrue(); + } + private Node mockCassandraNode(String rawVersion) { Node node = Mockito.mock(Node.class); if (rawVersion != null) { From 5e1e5107c91c3546e5cc81a29cbccdbbaff0f82b Mon Sep 17 00:00:00 2001 From: Greg Bestland Date: Thu, 12 Mar 2020 12:26:58 -0500 Subject: [PATCH 399/979] JAVA-2633: Add execution profile argument to DAO factory method (#1409) --- changelog/README.md | 1 + .../datastax/oss/driver/mapper/ProfileIT.java | 327 ++++++++++++++++++ .../dao/DaoDeleteMethodGenerator.java | 2 + .../dao/DaoInsertMethodGenerator.java | 1 + .../processor/dao/DaoMethodGenerator.java | 7 + .../dao/DaoQueryMethodGenerator.java | 1 + .../dao/DaoSelectMethodGenerator.java | 1 + .../dao/DaoUpdateMethodGenerator.java | 1 + .../MapperDaoFactoryMethodGenerator.java | 101 +++++- .../MapperDaoFactoryMethodGeneratorTest.java | 2 +- mapper-runtime/revapi.json | 6 + .../oss/driver/api/mapper/MapperContext.java | 7 + .../api/mapper/annotations/DaoProfile.java | 46 +++ .../driver/internal/mapper/DaoCacheKey.java | 21 +- .../internal/mapper/DefaultMapperContext.java | 39 ++- 15 files changed, 534 insertions(+), 29 deletions(-) create mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/mapper/ProfileIT.java create mode 100644 mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/DaoProfile.java diff --git a/changelog/README.md b/changelog/README.md index e88071b9faa..a6dc87a5b2a 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.6.0 (in progress) +- [new feature] JAVA-2633: Add execution profile argument to DAO mapper factory methods - [improvement] JAVA-2667: Add ability to fail the build when integration tests fail - [bug] JAVA-1861: Add Metadata.getClusterName() diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/ProfileIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/ProfileIT.java new file mode 100644 index 00000000000..373ed0b718f --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/ProfileIT.java @@ -0,0 +1,327 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.mapper; + +import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.noRows; +import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.query; +import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.when; +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.DefaultConsistencyLevel; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverConfigLoader; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.mapper.annotations.Dao; +import com.datastax.oss.driver.api.mapper.annotations.DaoFactory; +import com.datastax.oss.driver.api.mapper.annotations.DaoProfile; +import com.datastax.oss.driver.api.mapper.annotations.Delete; +import com.datastax.oss.driver.api.mapper.annotations.Entity; +import com.datastax.oss.driver.api.mapper.annotations.Insert; +import com.datastax.oss.driver.api.mapper.annotations.Mapper; +import com.datastax.oss.driver.api.mapper.annotations.PartitionKey; +import com.datastax.oss.driver.api.mapper.annotations.Query; +import com.datastax.oss.driver.api.mapper.annotations.Select; +import com.datastax.oss.driver.api.mapper.annotations.Update; +import com.datastax.oss.driver.api.testinfra.session.SessionUtils; +import com.datastax.oss.driver.api.testinfra.simulacron.SimulacronRule; +import com.datastax.oss.driver.categories.ParallelizableTests; +import com.datastax.oss.protocol.internal.Message; +import com.datastax.oss.protocol.internal.request.Execute; +import com.datastax.oss.simulacron.common.cluster.ClusterQueryLogReport; +import com.datastax.oss.simulacron.common.cluster.ClusterSpec; +import com.datastax.oss.simulacron.common.cluster.QueryLog; +import com.datastax.oss.simulacron.common.stubbing.PrimeDsl; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Lists; +import java.util.Map; +import java.util.Objects; +import java.util.UUID; +import java.util.concurrent.TimeUnit; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +@Category(ParallelizableTests.class) +public class ProfileIT { + + @ClassRule + public static final SimulacronRule SIMULACRON_RULE = + new SimulacronRule(ClusterSpec.builder().withNodes(1)); + + private static ProfileIT.SimpleDao daoString; + private static ProfileIT.SimpleDao daoClass; + private static CqlSession mapperSession; + + @BeforeClass + public static void setupClass() { + primeDeleteQuery(); + primeInsertQuery(); + primeSelectQuery(); + primeCountQuery(); + primeUpdateQuery(); + + DriverConfigLoader loader = + SessionUtils.configLoaderBuilder() + .startProfile("cl") + .withString(DefaultDriverOption.REQUEST_CONSISTENCY, "ANY") + .build(); + mapperSession = SessionUtils.newSession(SIMULACRON_RULE, loader); + + ProfileIT.InventoryMapper inventoryMapper = + new ProfileIT_InventoryMapperBuilder(mapperSession).build(); + daoString = inventoryMapper.simpleDao("cl"); + DriverExecutionProfile clProfile = mapperSession.getContext().getConfig().getProfile("cl"); + daoClass = inventoryMapper.simpleDao(clProfile); + } + + @Before + public void setup() { + SIMULACRON_RULE.cluster().clearLogs(); + } + + private static final ProfileIT.Simple simple = new ProfileIT.Simple(UUID.randomUUID(), "DATA"); + + @Test + public void should_honor_exec_profile_on_insert() { + daoString.save(simple); + + ClusterQueryLogReport report = SIMULACRON_RULE.cluster().getLogs(); + validateQueryOptions(report.getQueryLogs().get(0)); + + SIMULACRON_RULE.cluster().clearLogs(); + + daoClass.save(simple); + report = SIMULACRON_RULE.cluster().getLogs(); + validateQueryOptions(report.getQueryLogs().get(0)); + } + + @Test + public void should_honor_exec_profile_on_delete() { + daoString.delete(simple); + + ClusterQueryLogReport report = SIMULACRON_RULE.cluster().getLogs(); + validateQueryOptions(report.getQueryLogs().get(0)); + + SIMULACRON_RULE.cluster().clearLogs(); + + daoClass.delete(simple); + report = SIMULACRON_RULE.cluster().getLogs(); + validateQueryOptions(report.getQueryLogs().get(0)); + } + + @Test + public void should_honor_exec_profile_on_update() { + daoString.update(simple); + + ClusterQueryLogReport report = SIMULACRON_RULE.cluster().getLogs(); + validateQueryOptions(report.getQueryLogs().get(0)); + + SIMULACRON_RULE.cluster().clearLogs(); + + daoClass.update(simple); + report = SIMULACRON_RULE.cluster().getLogs(); + validateQueryOptions(report.getQueryLogs().get(0)); + } + + @Test + public void should_honor_exec_profile_on_query() { + daoString.findByPk(simple.pk); + + ClusterQueryLogReport report = SIMULACRON_RULE.cluster().getLogs(); + validateQueryOptions(report.getQueryLogs().get(0)); + + SIMULACRON_RULE.cluster().clearLogs(); + + daoString.findByPk(simple.pk); + report = SIMULACRON_RULE.cluster().getLogs(); + validateQueryOptions(report.getQueryLogs().get(0)); + } + + private void validateQueryOptions(QueryLog log) { + + Message message = log.getFrame().message; + assertThat(message).isInstanceOf(Execute.class); + Execute queryExecute = (Execute) message; + assertThat(queryExecute.options.consistency) + .isEqualTo(DefaultConsistencyLevel.ANY.getProtocolCode()); + } + + private static void primeInsertQuery() { + Map params = ImmutableMap.of("pk", simple.getPk(), "data", simple.getData()); + Map paramTypes = ImmutableMap.of("pk", "uuid", "data", "ascii"); + SIMULACRON_RULE + .cluster() + .prime( + when(query( + "INSERT INTO ks.simple (pk,data) VALUES (:pk,:data)", + Lists.newArrayList( + com.datastax.oss.simulacron.common.codec.ConsistencyLevel.ONE, + com.datastax.oss.simulacron.common.codec.ConsistencyLevel.ANY), + params, + paramTypes)) + .then(noRows())); + } + + private static void primeDeleteQuery() { + Map params = ImmutableMap.of("pk", simple.getPk()); + Map paramTypes = ImmutableMap.of("pk", "uuid"); + SIMULACRON_RULE + .cluster() + .prime( + when(query( + "DELETE FROM ks.simple WHERE pk=:pk", + Lists.newArrayList( + com.datastax.oss.simulacron.common.codec.ConsistencyLevel.ONE, + com.datastax.oss.simulacron.common.codec.ConsistencyLevel.ANY), + params, + paramTypes)) + .then(noRows()) + .delay(1, TimeUnit.MILLISECONDS)); + } + + private static void primeSelectQuery() { + Map params = ImmutableMap.of("pk", simple.getPk()); + Map paramTypes = ImmutableMap.of("pk", "uuid"); + SIMULACRON_RULE + .cluster() + .prime( + when(query( + "SELECT pk,data FROM ks.simple WHERE pk=:pk", + Lists.newArrayList( + com.datastax.oss.simulacron.common.codec.ConsistencyLevel.ONE, + com.datastax.oss.simulacron.common.codec.ConsistencyLevel.ANY), + params, + paramTypes)) + .then(noRows()) + .delay(1, TimeUnit.MILLISECONDS)); + } + + private static void primeCountQuery() { + Map params = ImmutableMap.of("pk", simple.getPk()); + Map paramTypes = ImmutableMap.of("pk", "uuid"); + SIMULACRON_RULE + .cluster() + .prime( + when(query( + "SELECT count(*) FROM ks.simple WHERE pk=:pk", + Lists.newArrayList( + com.datastax.oss.simulacron.common.codec.ConsistencyLevel.ONE, + com.datastax.oss.simulacron.common.codec.ConsistencyLevel.ANY), + params, + paramTypes)) + .then(PrimeDsl.rows().row("count", 1L).columnTypes("count", "bigint").build()) + .delay(1, TimeUnit.MILLISECONDS)); + } + + private static void primeUpdateQuery() { + Map params = ImmutableMap.of("pk", simple.getPk(), "data", simple.getData()); + Map paramTypes = ImmutableMap.of("pk", "uuid", "data", "ascii"); + SIMULACRON_RULE + .cluster() + .prime( + when(query( + "UPDATE ks.simple SET data=:data WHERE pk=:pk", + Lists.newArrayList( + com.datastax.oss.simulacron.common.codec.ConsistencyLevel.ONE, + com.datastax.oss.simulacron.common.codec.ConsistencyLevel.ANY), + params, + paramTypes)) + .then(noRows())); + } + + @Mapper + public interface InventoryMapper { + @DaoFactory + ProfileIT.SimpleDao simpleDao(@DaoProfile String executionProfile); + + @DaoFactory + ProfileIT.SimpleDao simpleDao(@DaoProfile DriverExecutionProfile executionProfile); + } + + @Dao + public interface SimpleDao { + @Insert + void save(ProfileIT.Simple simple); + + @Delete + void delete(ProfileIT.Simple simple); + + @Select + ProfileIT.Simple findByPk(UUID pk); + + @Query("SELECT count(*) FROM ks.simple WHERE pk=:pk") + long count(UUID pk); + + @Update + void update(ProfileIT.Simple simple); + } + + @Entity(defaultKeyspace = "ks") + public static class Simple { + @PartitionKey private UUID pk; + private String data; + + public Simple() {} + + public Simple(UUID pk, String data) { + this.pk = pk; + this.data = data; + } + + public UUID getPk() { + return pk; + } + + public String getData() { + return data; + } + + public void setPk(UUID pk) { + + this.pk = pk; + } + + public void setData(String data) { + this.data = data; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (!(o instanceof ProfileIT.Simple)) { + return false; + } + ProfileIT.Simple simple = (ProfileIT.Simple) o; + return Objects.equals(pk, simple.pk) && Objects.equals(data, simple.data); + } + + @Override + public int hashCode() { + + return Objects.hash(pk, data); + } + + @Override + public String toString() { + return "Simple{" + "pk=" + pk + ", data='" + data + '\'' + '}'; + } + } +} diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGenerator.java index 2772d1f1f34..ed6168287e1 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGenerator.java @@ -223,6 +223,8 @@ public Optional generate() { statementName); populateBuilderWithStatementAttributes(methodBodyBuilder, methodElement); populateBuilderWithFunction(methodBodyBuilder, boundStatementFunction); + populateBuilderWithProfile(methodBodyBuilder); + int nextParameterIndex = 0; if (hasEntityParameter) { warnIfCqlNamePresent(Collections.singletonList(firstParameter)); diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoInsertMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoInsertMethodGenerator.java index 69e2400b7e2..2f71b485073 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoInsertMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoInsertMethodGenerator.java @@ -140,6 +140,7 @@ public Optional generate() { populateBuilderWithStatementAttributes(methodBodyBuilder, methodElement); populateBuilderWithFunction(methodBodyBuilder, boundStatementFunction); + populateBuilderWithProfile(methodBodyBuilder); warnIfCqlNamePresent(parameters.subList(0, 1)); String entityParameterName = parameters.get(0).getSimpleName().toString(); diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoMethodGenerator.java index 6608c4702b4..fd3ec7b2401 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoMethodGenerator.java @@ -146,6 +146,13 @@ protected void populateBuilderWithFunction( } } + protected void populateBuilderWithProfile(CodeBlock.Builder builder) { + builder.beginControlFlow("if(context.getExecutionProfileName() != null)"); + builder.addStatement( + "boundStatementBuilder = boundStatementBuilder.setExecutionProfileName(context.getExecutionProfileName())"); + builder.endControlFlow(); + } + protected void populateBuilderWithStatementAttributes( CodeBlock.Builder builder, ExecutableElement methodElement) { StatementAttributes statementAttributes = diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoQueryMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoQueryMethodGenerator.java index 0d3250c7282..e0fbbf5f721 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoQueryMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoQueryMethodGenerator.java @@ -109,6 +109,7 @@ public Optional generate() { populateBuilderWithStatementAttributes(methodBodyBuilder, methodElement); populateBuilderWithFunction(methodBodyBuilder, boundStatementFunction); + populateBuilderWithProfile(methodBodyBuilder); if (validateCqlNamesPresent(parameters)) { GeneratedCodePatterns.bindParameters( diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGenerator.java index 5125719e89d..662d5e32b71 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGenerator.java @@ -161,6 +161,7 @@ public Optional generate() { statementName); populateBuilderWithStatementAttributes(methodBodyBuilder, methodElement); populateBuilderWithFunction(methodBodyBuilder, boundStatementFunction); + populateBuilderWithProfile(methodBodyBuilder); if (!primaryKeyParameters.isEmpty()) { List primaryKeyNames = diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoUpdateMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoUpdateMethodGenerator.java index 02a81be61c9..46044524025 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoUpdateMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoUpdateMethodGenerator.java @@ -126,6 +126,7 @@ public Optional generate() { populateBuilderWithStatementAttributes(methodBodyBuilder, methodElement); populateBuilderWithFunction(methodBodyBuilder, boundStatementFunction); + populateBuilderWithProfile(methodBodyBuilder); String entityParameterName = parameters.get(0).getSimpleName().toString(); diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/mapper/MapperDaoFactoryMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/mapper/MapperDaoFactoryMethodGenerator.java index 2637ca4b391..5aec6667ec7 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/mapper/MapperDaoFactoryMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/mapper/MapperDaoFactoryMethodGenerator.java @@ -16,9 +16,11 @@ package com.datastax.oss.driver.internal.mapper.processor.mapper; import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.mapper.annotations.Dao; import com.datastax.oss.driver.api.mapper.annotations.DaoFactory; import com.datastax.oss.driver.api.mapper.annotations.DaoKeyspace; +import com.datastax.oss.driver.api.mapper.annotations.DaoProfile; import com.datastax.oss.driver.api.mapper.annotations.DaoTable; import com.datastax.oss.driver.api.mapper.annotations.Mapper; import com.datastax.oss.driver.internal.mapper.DaoCacheKey; @@ -103,6 +105,9 @@ public Optional generate() { // Validate the arguments String keyspaceArgumentName = null; String tableArgumentName = null; + String executionProfileName = null; + boolean executionProfileIsClass = false; + for (VariableElement parameterElement : methodElement.getParameters()) { if (parameterElement.getAnnotation(DaoKeyspace.class) != null) { keyspaceArgumentName = @@ -118,6 +123,19 @@ public Optional generate() { if (tableArgumentName == null) { return Optional.empty(); } + } else if (parameterElement.getAnnotation(DaoProfile.class) != null) { + + executionProfileName = + validateExecutionProfile( + parameterElement, executionProfileName, DaoProfile.class, context); + if (context + .getClassUtils() + .isSame(parameterElement.asType(), DriverExecutionProfile.class)) { + executionProfileIsClass = true; + } + if (executionProfileName == null) { + return Optional.empty(); + } } else { context .getMessager() @@ -125,15 +143,16 @@ public Optional generate() { methodElement, processedType, "Invalid parameter annotations: " - + "%s method parameters must be annotated with @%s or @%s", + + "%s method parameters must be annotated with @%s, @%s or @%s", DaoFactory.class.getSimpleName(), DaoKeyspace.class.getSimpleName(), - DaoTable.class.getSimpleName()); + DaoTable.class.getSimpleName(), + DaoProfile.class.getSimpleName()); return Optional.empty(); } } boolean isCachedByKeyspaceAndTable = - (keyspaceArgumentName != null || tableArgumentName != null); + (keyspaceArgumentName != null || tableArgumentName != null || executionProfileName != null); TypeName returnTypeName = ClassName.get(methodElement.getReturnType()); String suggestedFieldName = methodElement.getSimpleName() + "Cache"; @@ -160,14 +179,29 @@ public Optional generate() { } else { overridingMethodBuilder.addCode("$L", tableArgumentName); } - overridingMethodBuilder - .addCode(");\n") - .addStatement( - "return $L.computeIfAbsent(key, " - + "k -> $T.$L(context.withKeyspaceAndTable(k.getKeyspaceId(), k.getTableId())))", - fieldName, - daoImplementationName, - isAsync ? "initAsync" : "init"); + overridingMethodBuilder.addCode(", "); + if (executionProfileName == null) { + overridingMethodBuilder.addCode("($T)null", String.class); + } else { + + if (!executionProfileIsClass) { + overridingMethodBuilder.addCode("$L", executionProfileName); + } else { + overridingMethodBuilder.addCode("$L.getName()", executionProfileName); + } + } + overridingMethodBuilder.addCode(");\n"); + + overridingMethodBuilder.addCode( + "return $L.computeIfAbsent(key, " + + "k -> $T.$L(context.withKeyspaceAndTable(k.getKeyspaceId(), k.getTableId())", + fieldName, + daoImplementationName, + isAsync ? "initAsync" : "init"); + if (executionProfileName != null) { + overridingMethodBuilder.addCode(".withExecutionProfile($L)", executionProfileName); + } + overridingMethodBuilder.addCode("));\n"); } else { overridingMethodBuilder.addStatement("return $L.get()", fieldName); } @@ -176,33 +210,64 @@ public Optional generate() { private String validateKeyspaceOrTableParameter( VariableElement candidate, String previous, Class annotation, ProcessorContext context) { - if (previous != null) { + if (!isSingleAnnotation(candidate, previous, annotation, context)) { + return null; + } + TypeMirror type = candidate.asType(); + if (!context.getClassUtils().isSame(type, String.class) + && !context.getClassUtils().isSame(type, CqlIdentifier.class)) { context .getMessager() .error( candidate, processedType, - "Invalid parameter annotations: " - + "only one %s method parameter can be annotated with @%s", + "Invalid parameter type: @%s-annotated parameter of %s methods must be of type %s or %s", + annotation.getSimpleName(), DaoFactory.class.getSimpleName(), - annotation.getSimpleName()); + String.class.getSimpleName(), + CqlIdentifier.class.getSimpleName()); + return null; + } + return candidate.getSimpleName().toString(); + } + + private String validateExecutionProfile( + VariableElement candidate, String previous, Class annotation, ProcessorContext context) { + if (!isSingleAnnotation(candidate, previous, annotation, context)) { return null; } TypeMirror type = candidate.asType(); if (!context.getClassUtils().isSame(type, String.class) - && !context.getClassUtils().isSame(type, CqlIdentifier.class)) { + && !context.getClassUtils().isSame(type, DriverExecutionProfile.class)) { context .getMessager() .error( candidate, processedType, - "Invalid parameter type: @%s-annotated parameter of %s methods must be of type %s or %s", + "Invalid parameter type: @%s-annotated parameter of %s methods must be of type %s or %s ", annotation.getSimpleName(), DaoFactory.class.getSimpleName(), String.class.getSimpleName(), - CqlIdentifier.class.getSimpleName()); + DriverExecutionProfile.class.getSimpleName()); return null; } return candidate.getSimpleName().toString(); } + + private boolean isSingleAnnotation( + VariableElement candidate, String previous, Class annotation, ProcessorContext context) { + if (previous != null) { + context + .getMessager() + .error( + candidate, + processedType, + "Invalid parameter annotations: " + + "only one %s method parameter can be annotated with @%s", + DaoFactory.class.getSimpleName(), + annotation.getSimpleName()); + return false; + } + return true; + } } diff --git a/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/mapper/MapperDaoFactoryMethodGeneratorTest.java b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/mapper/MapperDaoFactoryMethodGeneratorTest.java index 6958b8def46..c3a43aa7136 100644 --- a/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/mapper/MapperDaoFactoryMethodGeneratorTest.java +++ b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/mapper/MapperDaoFactoryMethodGeneratorTest.java @@ -55,7 +55,7 @@ public static Object[][] invalidSignatures() { .build(), }, { - "Invalid parameter annotations: DaoFactory method parameters must be annotated with @DaoKeyspace or @DaoTable", + "Invalid parameter annotations: DaoFactory method parameters must be annotated with @DaoKeyspace, @DaoTable or @DaoProfile", MethodSpec.methodBuilder("productDao") .addAnnotation(DaoFactory.class) .addModifiers(Modifier.PUBLIC, Modifier.ABSTRACT) diff --git a/mapper-runtime/revapi.json b/mapper-runtime/revapi.json index 0a89843bf2a..c9deecd0813 100644 --- a/mapper-runtime/revapi.json +++ b/mapper-runtime/revapi.json @@ -56,6 +56,12 @@ "new": "@interface com.datastax.oss.driver.api.mapper.annotations.TransientProperties", "annotation": "@java.lang.annotation.Target({java.lang.annotation.ElementType.TYPE})", "justification": "Oversight, should have been annotated this way from the start" + }, + { + "code": "java.method.addedToInterface", + "new": "method java.lang.String com.datastax.oss.driver.api.mapper.MapperContext::getExecutionProfileName()", + "justification": "JAVA-2633 Adding execution profile to mapper" + } ] } diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/MapperContext.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/MapperContext.java index aaaf14fd7a9..2a73523ca6c 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/MapperContext.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/MapperContext.java @@ -45,6 +45,13 @@ public interface MapperContext { @Nullable CqlIdentifier getTableId(); + /** + * If this context belongs to a DAO that was built with a execution-profile-parameterized mapper + * method, the value of that parameter. Otherwise null. + */ + @Nullable + String getExecutionProfileName(); + /** * Returns an instance of the given converter class. * diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/DaoProfile.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/DaoProfile.java new file mode 100644 index 00000000000..db5cd6b3f27 --- /dev/null +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/DaoProfile.java @@ -0,0 +1,46 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.mapper.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * Annotates the parameter of a {@link DaoFactory} method that indicates the execution profile to + * create a DAO for. + * + *

      Example: + * + *

      * + * + *

      + *  * @Mapper
      + *  * public interface InventoryMapper {
      + *  *   ProductDao productDao(@DaoTable String executionProfile);
      + *  * }
      + *  * 
      + * + * The annotated parameter can be a {@link String}. If it is present, the value will be injected in + * the DAO instance, where it will be used in generated queries. This allows you to reuse the same + * DAO for different execution profiles. + * + * @see DaoFactory + */ +@Target(ElementType.PARAMETER) +@Retention(RetentionPolicy.RUNTIME) +public @interface DaoProfile {} diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DaoCacheKey.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DaoCacheKey.java index 72f96a7ed82..535da277341 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DaoCacheKey.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DaoCacheKey.java @@ -22,22 +22,24 @@ public class DaoCacheKey { private final CqlIdentifier keyspaceId; private final CqlIdentifier tableId; + private final String profileName; - public DaoCacheKey(CqlIdentifier keyspaceId, CqlIdentifier tableId) { + public DaoCacheKey(CqlIdentifier keyspaceId, CqlIdentifier tableId, String profileName) { this.keyspaceId = keyspaceId; this.tableId = tableId; + this.profileName = profileName; } - public DaoCacheKey(CqlIdentifier keyspaceId, String tableName) { - this(keyspaceId, toId(tableName)); + public DaoCacheKey(CqlIdentifier keyspaceId, String tableName, String profileName) { + this(keyspaceId, toId(tableName), profileName); } - public DaoCacheKey(String keyspaceName, CqlIdentifier tableId) { - this(toId(keyspaceName), tableId); + public DaoCacheKey(String keyspaceName, CqlIdentifier tableId, String profileName) { + this(toId(keyspaceName), tableId, profileName); } - public DaoCacheKey(String keyspaceName, String tableName) { - this(toId(keyspaceName), toId(tableName)); + public DaoCacheKey(String keyspaceName, String tableName, String profileName) { + this(toId(keyspaceName), toId(tableName), profileName); } private static CqlIdentifier toId(String name) { @@ -59,7 +61,8 @@ public boolean equals(Object other) { } else if (other instanceof DaoCacheKey) { DaoCacheKey that = (DaoCacheKey) other; return Objects.equals(this.keyspaceId, that.keyspaceId) - && Objects.equals(this.tableId, that.tableId); + && Objects.equals(this.tableId, that.tableId) + && Objects.equals(this.profileName, that.profileName); } else { return false; } @@ -67,6 +70,6 @@ public boolean equals(Object other) { @Override public int hashCode() { - return Objects.hash(keyspaceId, tableId); + return Objects.hash(keyspaceId, tableId, profileName); } } diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DefaultMapperContext.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DefaultMapperContext.java index c6b7961daf0..0af237a4b82 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DefaultMapperContext.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DefaultMapperContext.java @@ -17,6 +17,7 @@ import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.mapper.MapperContext; import com.datastax.oss.driver.api.mapper.MapperException; import com.datastax.oss.driver.api.mapper.entity.naming.NameConverter; @@ -34,6 +35,7 @@ public class DefaultMapperContext implements MapperContext { private final CqlSession session; private final CqlIdentifier keyspaceId; private final CqlIdentifier tableId; + private final String executionProfile; private final ConcurrentMap, NameConverter> nameConverterCache; private final Map customState; @@ -45,6 +47,21 @@ public DefaultMapperContext( session, keyspaceId, null, + null, + new ConcurrentHashMap<>(), + NullAllowingImmutableMap.copyOf(customState)); + } + + public DefaultMapperContext( + @NonNull CqlSession session, + @Nullable CqlIdentifier keyspaceId, + @Nullable String executionProfile, + @NonNull Map customState) { + this( + session, + keyspaceId, + null, + executionProfile, new ConcurrentHashMap<>(), NullAllowingImmutableMap.copyOf(customState)); } @@ -58,6 +75,7 @@ private DefaultMapperContext( CqlSession session, CqlIdentifier keyspaceId, CqlIdentifier tableId, + String executionProfile, ConcurrentMap, NameConverter> nameConverterCache, Map customState) { this.session = session; @@ -65,6 +83,7 @@ private DefaultMapperContext( this.tableId = tableId; this.nameConverterCache = nameConverterCache; this.customState = customState; + this.executionProfile = executionProfile; } public DefaultMapperContext withKeyspaceAndTable( @@ -73,7 +92,19 @@ public DefaultMapperContext withKeyspaceAndTable( && Objects.equals(newTableId, this.tableId)) ? this : new DefaultMapperContext( - session, newKeyspaceId, newTableId, nameConverterCache, customState); + session, newKeyspaceId, newTableId, null, nameConverterCache, customState); + } + + public DefaultMapperContext withExecutionProfile(@Nullable String newExecutionProfile) { + return newExecutionProfile.equals(this.executionProfile) + ? this + : new DefaultMapperContext( + session, keyspaceId, tableId, newExecutionProfile, nameConverterCache, customState); + } + + public DefaultMapperContext withExecutionProfile( + @Nullable DriverExecutionProfile newExecutionProfile) { + return withExecutionProfile(newExecutionProfile.getName()); } @NonNull @@ -94,6 +125,12 @@ public CqlIdentifier getTableId() { return tableId; } + @Nullable + @Override + public String getExecutionProfileName() { + return executionProfile; + } + @NonNull @Override public NameConverter getNameConverter(Class converterClass) { From a4e3a8a5a67a6a408a1b077c7d11eeec8d59a27a Mon Sep 17 00:00:00 2001 From: Greg Bestland Date: Mon, 16 Mar 2020 12:12:21 -0500 Subject: [PATCH 400/979] Update version in docs --- README.md | 2 +- changelog/README.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index ecf55ddaca5..95a1a62ae60 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@ *If you're reading this on github.com, please note that this is the readme for the development version and that some features described here might not yet have been released. You can find the documentation for latest version through [DataStax Docs] or via the release tags, e.g. -[4.5.0](https://github.com/datastax/java-driver/tree/4.5.0).* +[4.5.1](https://github.com/datastax/java-driver/tree/4.5.1).* A modern, feature-rich and highly tunable Java client library for [Apache Cassandra®] \(2.1+) and [DataStax Enterprise] \(4.7+), and [DataStax Apollo], using exclusively Cassandra's binary protocol diff --git a/changelog/README.md b/changelog/README.md index 0af1aec2d33..b5828b6ebaf 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -2,7 +2,7 @@ -### 4.5.1 (in progress) +### 4.5.1 - [bug] JAVA-2673: Fix mapper generated code for UPDATE with TTL and IF condition From c181f96d40a661eb8d68bd45ec1dcfe44a10c46d Mon Sep 17 00:00:00 2001 From: Greg Bestland Date: Mon, 16 Mar 2020 12:27:33 -0500 Subject: [PATCH 401/979] [maven-release-plugin] prepare release 4.5.1 --- bom/pom.xml | 14 +++++++------- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 11 files changed, 18 insertions(+), 18 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index 06473a4f92f..6f9509a7f50 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.5.1-SNAPSHOT + 4.5.1 java-driver-bom pom @@ -31,32 +31,32 @@ com.datastax.oss java-driver-core - 4.5.1-SNAPSHOT + 4.5.1 com.datastax.oss java-driver-core-shaded - 4.5.1-SNAPSHOT + 4.5.1 com.datastax.oss java-driver-mapper-processor - 4.5.1-SNAPSHOT + 4.5.1 com.datastax.oss java-driver-mapper-runtime - 4.5.1-SNAPSHOT + 4.5.1 com.datastax.oss java-driver-query-builder - 4.5.1-SNAPSHOT + 4.5.1 com.datastax.oss java-driver-test-infra - 4.5.1-SNAPSHOT + 4.5.1 com.datastax.oss diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 1a21193c19b..2ba07948ee3 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.5.1-SNAPSHOT + 4.5.1 java-driver-core-shaded DataStax Java driver for Apache Cassandra(R) - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index 4789b36e364..92880336f4a 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.5.1-SNAPSHOT + 4.5.1 java-driver-core bundle diff --git a/distribution/pom.xml b/distribution/pom.xml index ee3ca09c3ce..7ce942005e6 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.5.1-SNAPSHOT + 4.5.1 java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index 025307d17f3..1a8aeabf6ec 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.5.1-SNAPSHOT + 4.5.1 java-driver-examples DataStax Java driver for Apache Cassandra(R) - examples. diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index d63cd47448b..fb434faf6c2 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.5.1-SNAPSHOT + 4.5.1 java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index eb7c7c0e048..74fdad05801 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.5.1-SNAPSHOT + 4.5.1 java-driver-mapper-processor DataStax Java driver for Apache Cassandra(R) - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index cf6eada15e2..9964925f695 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.5.1-SNAPSHOT + 4.5.1 java-driver-mapper-runtime bundle diff --git a/pom.xml b/pom.xml index e914822abfe..7776f141112 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ 4.0.0 com.datastax.oss java-driver-parent - 4.5.1-SNAPSHOT + 4.5.1 pom DataStax Java driver for Apache Cassandra(R) A driver for Apache Cassandra(R) 2.1+ that works exclusively with the Cassandra Query Language version 3 (CQL3) and Cassandra's native protocol versions 3 and above. @@ -876,7 +876,7 @@ height="0" width="0" style="display:none;visibility:hidden"> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - 4.5.0 + 4.5.1 diff --git a/query-builder/pom.xml b/query-builder/pom.xml index 6223b7f1e7f..8d8b8658e08 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.5.1-SNAPSHOT + 4.5.1 java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index d819205636b..3263aaa4128 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.5.1-SNAPSHOT + 4.5.1 java-driver-test-infra bundle From 1c0a0be3b09cb3c699190478b9f108abfdb2e4e7 Mon Sep 17 00:00:00 2001 From: Greg Bestland Date: Mon, 16 Mar 2020 12:27:41 -0500 Subject: [PATCH 402/979] [maven-release-plugin] prepare for next development iteration --- bom/pom.xml | 14 +++++++------- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 11 files changed, 18 insertions(+), 18 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index 6f9509a7f50..ef44ae5325b 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.5.1 + 4.5.2-SNAPSHOT java-driver-bom pom @@ -31,32 +31,32 @@ com.datastax.oss java-driver-core - 4.5.1 + 4.5.2-SNAPSHOT com.datastax.oss java-driver-core-shaded - 4.5.1 + 4.5.2-SNAPSHOT com.datastax.oss java-driver-mapper-processor - 4.5.1 + 4.5.2-SNAPSHOT com.datastax.oss java-driver-mapper-runtime - 4.5.1 + 4.5.2-SNAPSHOT com.datastax.oss java-driver-query-builder - 4.5.1 + 4.5.2-SNAPSHOT com.datastax.oss java-driver-test-infra - 4.5.1 + 4.5.2-SNAPSHOT com.datastax.oss diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 2ba07948ee3..ab3f459f452 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.5.1 + 4.5.2-SNAPSHOT java-driver-core-shaded DataStax Java driver for Apache Cassandra(R) - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index 92880336f4a..718052dd838 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.5.1 + 4.5.2-SNAPSHOT java-driver-core bundle diff --git a/distribution/pom.xml b/distribution/pom.xml index 7ce942005e6..d5bb3129b58 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.5.1 + 4.5.2-SNAPSHOT java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index 1a8aeabf6ec..c437b8a7629 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.5.1 + 4.5.2-SNAPSHOT java-driver-examples DataStax Java driver for Apache Cassandra(R) - examples. diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index fb434faf6c2..c973bab3196 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.5.1 + 4.5.2-SNAPSHOT java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 74fdad05801..e20b9832b09 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.5.1 + 4.5.2-SNAPSHOT java-driver-mapper-processor DataStax Java driver for Apache Cassandra(R) - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index 9964925f695..820219525a8 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.5.1 + 4.5.2-SNAPSHOT java-driver-mapper-runtime bundle diff --git a/pom.xml b/pom.xml index 7776f141112..619c545b924 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ 4.0.0 com.datastax.oss java-driver-parent - 4.5.1 + 4.5.2-SNAPSHOT pom DataStax Java driver for Apache Cassandra(R) A driver for Apache Cassandra(R) 2.1+ that works exclusively with the Cassandra Query Language version 3 (CQL3) and Cassandra's native protocol versions 3 and above. @@ -876,7 +876,7 @@ height="0" width="0" style="display:none;visibility:hidden"> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - 4.5.1 + 4.5.0 diff --git a/query-builder/pom.xml b/query-builder/pom.xml index 8d8b8658e08..b2335277cb3 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.5.1 + 4.5.2-SNAPSHOT java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index 3263aaa4128..74e90eba40d 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.5.1 + 4.5.2-SNAPSHOT java-driver-test-infra bundle From 278bba877f5a89c6ce30782a0824c9055f59f98b Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Thu, 5 Mar 2020 12:04:24 +0200 Subject: [PATCH 403/979] JAVA-2670: Provide base class for mapped custom codecs --- changelog/README.md | 1 + .../api/core/type/codec/MappingCodec.java | 120 +++++++++++ .../core/type/codec/CqlIntToStringCodec.java | 47 +---- .../core/type/codec/MappingCodecTest.java | 102 +++++++++ .../registry/CachingCodecRegistryTest.java | 8 +- .../type/codec/registry/CodecRegistryIT.java | 194 +++++++++++++----- manual/core/custom_codecs/README.md | 108 +++++++++- 7 files changed, 485 insertions(+), 95 deletions(-) create mode 100644 core/src/main/java/com/datastax/oss/driver/api/core/type/codec/MappingCodec.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/MappingCodecTest.java diff --git a/changelog/README.md b/changelog/README.md index 1462e3849f4..9e214115643 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.6.0 (in progress) +- [new feature] JAVA-2670: Provide base class for mapped custom codecs - [new feature] JAVA-2633: Add execution profile argument to DAO mapper factory methods - [improvement] JAVA-2667: Add ability to fail the build when integration tests fail - [bug] JAVA-1861: Add Metadata.getClusterName() diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/type/codec/MappingCodec.java b/core/src/main/java/com/datastax/oss/driver/api/core/type/codec/MappingCodec.java new file mode 100644 index 00000000000..3ad908cbf41 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/api/core/type/codec/MappingCodec.java @@ -0,0 +1,120 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.core.type.codec; + +import com.datastax.oss.driver.api.core.ProtocolVersion; +import com.datastax.oss.driver.api.core.type.DataType; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.nio.ByteBuffer; +import java.util.Objects; + +/** + * A {@link TypeCodec} that maps instances of {@code InnerT}, a driver supported Java type, to + * instances of a target {@code OuterT} Java type. + * + *

      This codec can be used to provide support for Java types that are not natively handled by the + * driver, as long as there is a conversion path to and from another supported Java type. + * + * @param The "inner" Java type; must be a driver supported Java type< (that is, there must + * exist a codec registered for it). + * @param The "outer", or target Java type; this codec will handle the mapping to and from + * {@code InnerT} and {@code OuterT}. + * @see driver + * documentation on custom codecs + * @see + * driver supported Java types + */ +public abstract class MappingCodec implements TypeCodec { + + protected final TypeCodec innerCodec; + protected final GenericType outerJavaType; + + /** + * Creates a new mapping codec providing support for {@code OuterT} based on an existing codec for + * {@code InnerT}. + * + * @param innerCodec The inner codec to use to handle instances of InnerT; must not be null. + * @param outerJavaType The outer Java type; must not be null. + */ + protected MappingCodec( + @NonNull TypeCodec innerCodec, @NonNull GenericType outerJavaType) { + this.innerCodec = Objects.requireNonNull(innerCodec, "innerCodec cannot be null"); + this.outerJavaType = Objects.requireNonNull(outerJavaType, "outerJavaType cannot be null"); + } + + /** @return The type of {@code OuterT}. */ + @NonNull + @Override + public GenericType getJavaType() { + return outerJavaType; + } + + /** @return The type of {@code InnerT}. */ + public GenericType getInnerJavaType() { + return innerCodec.getJavaType(); + } + + @NonNull + @Override + public DataType getCqlType() { + return innerCodec.getCqlType(); + } + + @Override + public ByteBuffer encode(OuterT value, @NonNull ProtocolVersion protocolVersion) { + return innerCodec.encode(outerToInner(value), protocolVersion); + } + + @Override + public OuterT decode(ByteBuffer bytes, @NonNull ProtocolVersion protocolVersion) { + return innerToOuter(innerCodec.decode(bytes, protocolVersion)); + } + + @NonNull + @Override + public String format(OuterT value) { + return innerCodec.format(outerToInner(value)); + } + + @Override + public OuterT parse(String value) { + return innerToOuter(innerCodec.parse(value)); + } + + /** + * Converts from an instance of the inner Java type to an instance of the outer Java type. Used + * when deserializing or parsing. + * + * @param value The value to convert; may be null. + * @return The converted value; may be null. + */ + @Nullable + protected abstract OuterT innerToOuter(@Nullable InnerT value); + + /** + * Converts from an instance of the outer Java type to an instance of the inner Java type. Used + * when serializing or formatting. + * + * @param value The value to convert; may be null. + * @return The converted value; may be null. + */ + @Nullable + protected abstract InnerT outerToInner(@Nullable OuterT value); +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/CqlIntToStringCodec.java b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/CqlIntToStringCodec.java index f52d139f1b4..bb006794656 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/CqlIntToStringCodec.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/CqlIntToStringCodec.java @@ -15,56 +15,31 @@ */ package com.datastax.oss.driver.internal.core.type.codec; -import com.datastax.oss.driver.api.core.ProtocolVersion; -import com.datastax.oss.driver.api.core.type.DataType; -import com.datastax.oss.driver.api.core.type.DataTypes; -import com.datastax.oss.driver.api.core.type.codec.TypeCodec; +import com.datastax.oss.driver.api.core.type.codec.MappingCodec; import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; import com.datastax.oss.driver.api.core.type.reflect.GenericType; -import edu.umd.cs.findbugs.annotations.NonNull; -import java.nio.ByteBuffer; +import edu.umd.cs.findbugs.annotations.Nullable; /** * A sample user codec implementation that we use in our tests. * *

      It maps a CQL string to a Java string containing its textual representation. */ -public class CqlIntToStringCodec implements TypeCodec { +public class CqlIntToStringCodec extends MappingCodec { - @NonNull - @Override - public GenericType getJavaType() { - return GenericType.STRING; - } - - @NonNull - @Override - public DataType getCqlType() { - return DataTypes.INT; - } - - @Override - public ByteBuffer encode(String value, @NonNull ProtocolVersion protocolVersion) { - if (value == null) { - return null; - } else { - return TypeCodecs.INT.encode(Integer.parseInt(value), protocolVersion); - } - } - - @Override - public String decode(ByteBuffer bytes, @NonNull ProtocolVersion protocolVersion) { - return TypeCodecs.INT.decode(bytes, protocolVersion).toString(); + public CqlIntToStringCodec() { + super(TypeCodecs.INT, GenericType.STRING); } - @NonNull + @Nullable @Override - public String format(String value) { - throw new UnsupportedOperationException("Not implemented for this test"); + protected String innerToOuter(@Nullable Integer value) { + return value == null ? null : value.toString(); } + @Nullable @Override - public String parse(String value) { - throw new UnsupportedOperationException("Not implemented for this test"); + protected Integer outerToInner(@Nullable String value) { + return value == null ? null : Integer.parseInt(value); } } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/MappingCodecTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/MappingCodecTest.java new file mode 100644 index 00000000000..80bbb15a8e0 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/MappingCodecTest.java @@ -0,0 +1,102 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.type.codec; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.api.core.type.codec.MappingCodec; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import org.junit.Test; + +public class MappingCodecTest extends CodecTestBase { + + public MappingCodecTest() { + this.codec = new CqlIntToStringCodec(); + } + + @Test + public void should_encode() { + // Our codec relies on the JDK's ByteBuffer API. We're not testing the JDK, so no need to try + // a thousand different values. + assertThat(encode("0")).isEqualTo("0x00000000"); + assertThat(encode(null)).isNull(); + } + + @Test + public void should_decode() { + assertThat(decode("0x00000000")).isEqualTo("0"); + assertThat(decode("0x")).isNull(); + assertThat(decode(null)).isNull(); + } + + @Test(expected = IllegalArgumentException.class) + public void should_fail_to_decode_if_not_enough_bytes() { + decode("0x0000"); + } + + @Test(expected = IllegalArgumentException.class) + public void should_fail_to_decode_if_too_many_bytes() { + decode("0x0000000000000000"); + } + + @Test + public void should_format() { + assertThat(format("0")).isEqualTo("0"); + assertThat(format(null)).isEqualTo("NULL"); + } + + @Test + public void should_parse() { + assertThat(parse("0")).isEqualTo("0"); + assertThat(parse("NULL")).isNull(); + assertThat(parse("null")).isNull(); + assertThat(parse("")).isNull(); + assertThat(parse(null)).isNull(); + } + + @Test(expected = IllegalArgumentException.class) + public void should_fail_to_parse_invalid_input() { + parse("not an int"); + } + + @Test + public void should_accept_generic_type() { + assertThat(codec.accepts(GenericType.of(String.class))).isTrue(); + assertThat(codec.accepts(GenericType.of(int.class))).isFalse(); + assertThat(codec.accepts(GenericType.of(Integer.class))).isFalse(); + } + + @Test + public void should_accept_raw_type() { + assertThat(codec.accepts(String.class)).isTrue(); + assertThat(codec.accepts(int.class)).isFalse(); + assertThat(codec.accepts(Integer.class)).isFalse(); + } + + @Test + public void should_accept_object() { + assertThat(codec.accepts("123")).isTrue(); + // codec accepts any String, even if it can't be encoded + assertThat(codec.accepts("not an int")).isTrue(); + assertThat(codec.accepts(Integer.MIN_VALUE)).isFalse(); + } + + @Test + public void should_expose_inner_and_outer_java_types() { + assertThat(((MappingCodec) codec).getInnerJavaType()).isEqualTo(GenericType.INTEGER); + assertThat(codec.getJavaType()).isEqualTo(GenericType.STRING); + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistryTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistryTest.java index eba88b75cd5..58e42a6149b 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistryTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistryTest.java @@ -115,11 +115,11 @@ public void should_find_user_codec_for_built_in_java_type() { // When the mapping is not ambiguous, the user type should be returned assertThat(registry.codecFor(DataTypes.INT, GenericType.STRING)).isSameAs(intToStringCodec1); assertThat(registry.codecFor(DataTypes.INT, String.class)).isSameAs(intToStringCodec1); - assertThat(registry.codecFor(DataTypes.INT, "")).isSameAs(intToStringCodec1); + assertThat(registry.codecFor(DataTypes.INT, "123")).isSameAs(intToStringCodec1); // When there is an ambiguity with a built-in codec, the built-in codec should have priority assertThat(registry.codecFor(DataTypes.INT)).isSameAs(TypeCodecs.INT); - assertThat(registry.codecFor("")).isSameAs(TypeCodecs.TEXT); + assertThat(registry.codecFor("123")).isSameAs(TypeCodecs.TEXT); verifyZeroInteractions(mockCache); } @@ -513,11 +513,11 @@ public void should_register_user_codec_at_runtime() { // When the mapping is not ambiguous, the user type should be returned assertThat(registry.codecFor(DataTypes.INT, GenericType.STRING)).isSameAs(intToStringCodec); assertThat(registry.codecFor(DataTypes.INT, String.class)).isSameAs(intToStringCodec); - assertThat(registry.codecFor(DataTypes.INT, "")).isSameAs(intToStringCodec); + assertThat(registry.codecFor(DataTypes.INT, "123")).isSameAs(intToStringCodec); // When there is an ambiguity with a built-in codec, the built-in codec should have priority assertThat(registry.codecFor(DataTypes.INT)).isSameAs(TypeCodecs.INT); - assertThat(registry.codecFor("")).isSameAs(TypeCodecs.TEXT); + assertThat(registry.codecFor("123")).isSameAs(TypeCodecs.TEXT); verifyZeroInteractions(mockCache); } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/type/codec/registry/CodecRegistryIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/type/codec/registry/CodecRegistryIT.java index d3f266f330e..14eaba82b87 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/type/codec/registry/CodecRegistryIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/type/codec/registry/CodecRegistryIT.java @@ -24,9 +24,12 @@ import com.datastax.oss.driver.api.core.cql.ResultSet; import com.datastax.oss.driver.api.core.cql.Row; import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.core.data.UdtValue; import com.datastax.oss.driver.api.core.type.DataType; import com.datastax.oss.driver.api.core.type.DataTypes; +import com.datastax.oss.driver.api.core.type.UserDefinedType; import com.datastax.oss.driver.api.core.type.codec.CodecNotFoundException; +import com.datastax.oss.driver.api.core.type.codec.MappingCodec; import com.datastax.oss.driver.api.core.type.codec.TypeCodec; import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; import com.datastax.oss.driver.api.core.type.codec.registry.MutableCodecRegistry; @@ -37,13 +40,17 @@ import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.categories.ParallelizableTests; import com.datastax.oss.driver.internal.core.type.codec.IntCodec; +import com.datastax.oss.driver.internal.core.type.codec.UdtCodec; +import com.google.common.collect.ImmutableMap; import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; import java.nio.ByteBuffer; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Optional; import java.util.function.Predicate; import org.assertj.core.util.Maps; @@ -88,6 +95,20 @@ public static void createSchema() { "CREATE TABLE IF NOT EXISTS test2 (k0 text, k1 int, v map, primary key (k0, k1))") .setExecutionProfile(SESSION_RULE.slowProfile()) .build()); + // table with UDT + SESSION_RULE + .session() + .execute( + SimpleStatement.builder("CREATE TYPE IF NOT EXISTS coordinates (x int, y int)") + .setExecutionProfile(SESSION_RULE.slowProfile()) + .build()); + SESSION_RULE + .session() + .execute( + SimpleStatement.builder( + "CREATE TABLE IF NOT EXISTS test3 (k0 text, k1 int, v map>, primary key (k0, k1))") + .setExecutionProfile(SESSION_RULE.slowProfile()) + .build()); } // A simple codec that allows float values to be used for cassandra int column type. @@ -243,58 +264,7 @@ public void should_register_custom_codec_at_runtime() { } } - // TODO: consider moving this into source as it could be generally useful. - private abstract static class MappingCodec implements TypeCodec { - - private final GenericType javaType; - private final TypeCodec innerCodec; - - MappingCodec(TypeCodec innerCodec, GenericType javaType) { - this.innerCodec = innerCodec; - this.javaType = javaType; - } - - @NonNull - @Override - public GenericType getJavaType() { - return javaType; - } - - @NonNull - @Override - public DataType getCqlType() { - return innerCodec.getCqlType(); - } - - @Override - public ByteBuffer encode(O value, @NonNull ProtocolVersion protocolVersion) { - return innerCodec.encode(encode(value), protocolVersion); - } - - @Override - public O decode(ByteBuffer bytes, @NonNull ProtocolVersion protocolVersion) { - return decode(innerCodec.decode(bytes, protocolVersion)); - } - - @NonNull - @Override - public String format(O value) { - return value == null ? null : innerCodec.format(encode(value)); - } - - @Override - public O parse(String value) { - return value == null || value.isEmpty() || value.equalsIgnoreCase("NULL") - ? null - : decode(innerCodec.parse(value)); - } - - protected abstract O decode(I value); - - protected abstract I encode(O value); - } - - private static class OptionalCodec extends MappingCodec, T> { + private static class OptionalCodec extends MappingCodec> { // in cassandra, empty collections are considered null and vise versa. Predicate isAbsent = @@ -311,12 +281,12 @@ private static class OptionalCodec extends MappingCodec, T> { } @Override - protected Optional decode(T value) { + protected Optional innerToOuter(T value) { return isAbsent.test(value) ? Optional.empty() : Optional.of(value); } @Override - protected T encode(Optional value) { + protected T outerToInner(Optional value) { return value.orElse(null); } } @@ -463,4 +433,120 @@ public void should_be_able_to_handle_empty_collections() { assertThat(row2.getMap(0, Integer.class, String.class)).isEmpty(); } } + + private static final class Coordinates { + + public final int x; + public final int y; + + public Coordinates(int x, int y) { + this.x = x; + this.y = y; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Coordinates that = (Coordinates) o; + return this.x == that.x && this.y == that.y; + } + + @Override + public int hashCode() { + return Objects.hash(x, y); + } + } + + private static class CoordinatesCodec extends MappingCodec { + + public CoordinatesCodec(@NonNull TypeCodec innerCodec) { + super(innerCodec, GenericType.of(Coordinates.class)); + } + + @NonNull + @Override + public UserDefinedType getCqlType() { + return (UserDefinedType) super.getCqlType(); + } + + @Nullable + @Override + protected Coordinates innerToOuter(@Nullable UdtValue value) { + return value == null ? null : new Coordinates(value.getInt("x"), value.getInt("y")); + } + + @Nullable + @Override + protected UdtValue outerToInner(@Nullable Coordinates value) { + return value == null + ? null + : getCqlType().newValue().setInt("x", value.x).setInt("y", value.y); + } + } + + @Test + public void should_register_and_use_custom_codec_for_user_defined_type() { + + Map coordinatesMap = ImmutableMap.of("home", new Coordinates(12, 34)); + GenericType> coordinatesMapType = + GenericType.mapOf(String.class, Coordinates.class); + + // Still create a separate session because we don't want to interfere with other tests + try (CqlSession session = SessionUtils.newSession(CCM_RULE, SESSION_RULE.keyspace())) { + + // register the mapping codec for UDT coordinates + UserDefinedType coordinatesUdt = + session + .getMetadata() + .getKeyspace(SESSION_RULE.keyspace()) + .flatMap(ks -> ks.getUserDefinedType("coordinates")) + .orElseThrow(IllegalStateException::new); + MutableCodecRegistry codecRegistry = + (MutableCodecRegistry) session.getContext().getCodecRegistry(); + + // Retrieve the inner codec + TypeCodec innerCodec = codecRegistry.codecFor(coordinatesUdt); + assertThat(innerCodec).isInstanceOf(UdtCodec.class); + + // Create the "outer" codec and register it + CoordinatesCodec coordinatesCodec = new CoordinatesCodec(innerCodec); + codecRegistry.register(coordinatesCodec); + + // Test that the codec will be used to create on-the-fly codecs + assertThat(codecRegistry.codecFor(Coordinates.class)).isSameAs(coordinatesCodec); + assertThat(codecRegistry.codecFor(coordinatesMapType).accepts(coordinatesMap)).isTrue(); + + // test insertion + PreparedStatement prepared = + session.prepare("INSERT INTO test3 (k0, k1, v) values (?, ?, ?)"); + BoundStatement insert = + prepared + .boundStatementBuilder() + .setString(0, name.getMethodName()) + .setInt(1, 0) + .set( + 2, + coordinatesMap, + coordinatesMapType) // use java type so has to be looked up in registry. + .build(); + session.execute(insert); + + // test retrieval + ResultSet result = + session.execute( + SimpleStatement.builder("SELECT v from test3 where k0 = ? AND k1 = ?") + .addPositionalValues(name.getMethodName(), 0) + .build()); + List rows = result.all(); + assertThat(rows).hasSize(1); + Row row = rows.get(0); + assertThat(row.get(0, coordinatesMapType)).isEqualTo(coordinatesMap); + assertThat(row.getMap(0, String.class, Coordinates.class)).isEqualTo(coordinatesMap); + } + } } diff --git a/manual/core/custom_codecs/README.md b/manual/core/custom_codecs/README.md index c69233e9c38..6fb6dd7ad8e 100644 --- a/manual/core/custom_codecs/README.md +++ b/manual/core/custom_codecs/README.md @@ -191,6 +191,111 @@ String s1 = row.getString("anIntColumn"); // int -> String, will decode String s2 = row.get("anIntColumn", specialCodec); // int -> String, will decode with specialCodec ``` +### Creating custom Java-to-CQL mappings with `MappingCodec` + +The above example, `CqlIntToStringCodec`, could be rewritten to leverage [MappingCodec], an abstract +class that ships with the driver. This class has been designed for situations where we want to +represent a CQL type with a different Java type than the Java type natively supported by the driver, +and the conversion between the former and the latter is straightforward. + +All you have to do is extend `MappingCodec` and implement two methods that perform the conversion +between the supported Java type -- or "inner" type -- and the target Java type -- or "outer" type: + +```java +public class CqlIntToStringCodec extends MappingCodec { + + public CqlIntToStringCodec() { + super(TypeCodecs.INT, GenericType.STRING); + } + + @Nullable + @Override + protected String innerToOuter(@Nullable Integer value) { + return value == null ? null : value.toString(); + } + + @Nullable + @Override + protected Integer outerToInner(@Nullable String value) { + return value == null ? null : Integer.parseInt(value); + } +} +``` + +This technique is especially useful when mapping user-defined types to Java objects. For example, +let's assume the following user-defined type: + +``` +CREATE TYPE coordinates (x int, y int); + ``` + +And let's suppose that we want to map it to the following Java class: + +```java +public class Coordinates { + public final int x; + public final int y; + public Coordinates(int x, int y) { this.x = x; this.y = y; } +} +``` + +All you have to do is create a `MappingCodec` subclass that piggybacks on an existing +`TypeCodec` for the above user-defined type: + +```java +public class CoordinatesCodec extends MappingCodec { + + public CoordinatesCodec(@NonNull TypeCodec innerCodec) { + super(innerCodec, GenericType.of(Coordinates.class)); + } + + @NonNull @Override public UserDefinedType getCqlType() { + return (UserDefinedType) super.getCqlType(); + } + + @Nullable @Override protected Coordinates innerToOuter(@Nullable UdtValue value) { + return value == null ? null : new Coordinates(value.getInt("x"), value.getInt("y")); + } + + @Nullable @Override protected UdtValue outerToInner(@Nullable Coordinates value) { + return value == null ? null : getCqlType().newValue().setInt("x", value.x).setInt("y", value.y); + } +} +``` + +Then the new mapping codec could be registered as follows: + +```java +CqlSession session = ... +CodecRegistry codecRegistry = session.getContext().getCodecRegistry(); +// The target user-defined type +UserDefinedType coordinatesUdt = + session + .getMetadata() + .getKeyspace("...") + .flatMap(ks -> ks.getUserDefinedType("coordinates")) + .orElseThrow(IllegalStateException::new); +// The "inner" codec that handles the conversions from CQL from/to UdtValue +TypeCodec innerCodec = codecRegistry.codecFor(coordinatesUdt); +// The mapping codec that will handle the conversions from/to UdtValue and Coordinates +CoordinatesCodec coordinatesCodec = new CoordinatesCodec(innerCodec); +// Register the new codec +((MutableCodecRegistry) codecRegistry).register(coordinatesCodec); +``` + +...and used just like explained above: + +```java +BoundStatement stmt = ...; +stmt.set("coordinates", new Coordinates(10,20), Coordinates.class); + +Row row = ...; +Coordinates coordinates = row.get("coordinates", Coordinates.class); +``` + +Note: if you need even more advanced mapping capabilities, consider adopting +the driver's [object mapping framework](../../mapper/). + ### Subtype polymorphism Suppose the following class hierarchy: @@ -258,4 +363,5 @@ private static String formatRow(Row row) { [CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html [GenericType]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/type/reflect/GenericType.html [TypeCodec]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html -[SessionBuilder.addTypeCodecs]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addTypeCodecs-com.datastax.oss.driver.api.core.type.codec.TypeCodec...- \ No newline at end of file +[MappingCodec]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/type/codec/MappingCodec.html +[SessionBuilder.addTypeCodecs]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addTypeCodecs-com.datastax.oss.driver.api.core.type.codec.TypeCodec...- From e2dcbd950fa7ac3c169ba2fd9ea730424455f321 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 13 Mar 2020 18:57:19 +0100 Subject: [PATCH 404/979] Add examples of tuple and UDT handling --- .../examples/datatypes/TuplesMapped.java | 192 +++++++++++++++++ .../examples/datatypes/TuplesSimple.java | 115 ++++++++++ .../datatypes/UserDefinedTypesMapped.java | 202 ++++++++++++++++++ .../datatypes/UserDefinedTypesSimple.java | 121 +++++++++++ 4 files changed, 630 insertions(+) create mode 100644 examples/src/main/java/com/datastax/oss/driver/examples/datatypes/TuplesMapped.java create mode 100644 examples/src/main/java/com/datastax/oss/driver/examples/datatypes/TuplesSimple.java create mode 100644 examples/src/main/java/com/datastax/oss/driver/examples/datatypes/UserDefinedTypesMapped.java create mode 100644 examples/src/main/java/com/datastax/oss/driver/examples/datatypes/UserDefinedTypesSimple.java diff --git a/examples/src/main/java/com/datastax/oss/driver/examples/datatypes/TuplesMapped.java b/examples/src/main/java/com/datastax/oss/driver/examples/datatypes/TuplesMapped.java new file mode 100644 index 00000000000..817736df1cd --- /dev/null +++ b/examples/src/main/java/com/datastax/oss/driver/examples/datatypes/TuplesMapped.java @@ -0,0 +1,192 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.examples.datatypes; + +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.cql.BoundStatement; +import com.datastax.oss.driver.api.core.cql.PreparedStatement; +import com.datastax.oss.driver.api.core.cql.Row; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.core.data.TupleValue; +import com.datastax.oss.driver.api.core.type.DataTypes; +import com.datastax.oss.driver.api.core.type.TupleType; +import com.datastax.oss.driver.api.core.type.codec.MappingCodec; +import com.datastax.oss.driver.api.core.type.codec.TypeCodec; +import com.datastax.oss.driver.api.core.type.codec.registry.MutableCodecRegistry; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.Objects; + +/** + * Inserts and retrieves values in columns of tuples. + * + *

      By default, the Java driver maps tuples to {@link TupleValue}. This example goes beyond that + * and shows how to map tuples to arbitrary Java types, leveraging the special {@link MappingCodec}. + * + *

      A simpler example of usage of tuples can be found in {@link TuplesSimple}. + * + *

      Preconditions: + * + *

        + *
      • An Apache Cassandra(R) cluster is running and accessible through the contacts points + * identified by basic.contact-points (see application.conf). + *
      + * + *

      Side effects: + * + *

        + *
      • creates a new keyspace "examples" in the cluster. If a keyspace with this name already + * exists, it will be reused; + *
      • creates a table "examples.tuples". If it already exists, it will be reused; + *
      • inserts data in the table. + *
      + * + * @see TuplesSimple + * @see MappingCodec + * @see driver + * documentation on custom codecs + */ +public class TuplesMapped { + + /** The Java Pojo that will be mapped to the tuple "coordinates". */ + public static class Coordinates { + + private final int x; + private final int y; + + public Coordinates(int x, int y) { + this.x = x; + this.y = y; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } else if (!(o instanceof Coordinates)) { + return false; + } else { + Coordinates that = (Coordinates) o; + return x == that.x && y == that.y; + } + } + + @Override + public int hashCode() { + return Objects.hash(x, y); + } + + @Override + public String toString() { + return "(" + x + ',' + y + ')'; + } + } + + /** The custom codec that will convert to and from {@link Coordinates}. */ + public static class CoordinatesCodec extends MappingCodec { + + public CoordinatesCodec(@NonNull TypeCodec innerCodec) { + super(innerCodec, GenericType.of(Coordinates.class)); + } + + @NonNull + @Override + public TupleType getCqlType() { + return (TupleType) super.getCqlType(); + } + + @Nullable + @Override + protected Coordinates innerToOuter(@Nullable TupleValue value) { + return value == null ? null : new Coordinates(value.getInt(0), value.getInt(1)); + } + + @Nullable + @Override + protected TupleValue outerToInner(@Nullable Coordinates value) { + return value == null ? null : getCqlType().newValue().setInt(0, value.x).setInt(1, value.y); + } + } + + public static void main(String[] args) { + try (CqlSession session = CqlSession.builder().build()) { + createSchema(session); + registerCoordinatesCodec(session); + insertData(session); + retrieveData(session); + } + } + + private static void createSchema(CqlSession session) { + session.execute( + "CREATE KEYSPACE IF NOT EXISTS examples " + + "WITH replication = {'class': 'SimpleStrategy', 'replication_factor': 1}"); + session.execute( + "CREATE TABLE IF NOT EXISTS examples.tuples(k int PRIMARY KEY, c tuple)"); + } + + private static void registerCoordinatesCodec(CqlSession session) { + // retrieve the codec registry + MutableCodecRegistry codecRegistry = + (MutableCodecRegistry) session.getContext().getCodecRegistry(); + // create the tuple metadata + TupleType coordinatesType = DataTypes.tupleOf(DataTypes.INT, DataTypes.INT); + // retrieve the driver built-in codec for the tuple "coordinates" + TypeCodec innerCodec = codecRegistry.codecFor(coordinatesType); + // create a custom codec to map the "coordinates" tuple to the Coordinates class + CoordinatesCodec coordinatesCodec = new CoordinatesCodec(innerCodec); + // register the new codec + codecRegistry.register(coordinatesCodec); + } + + private static void insertData(CqlSession session) { + // prepare the INSERT statement + PreparedStatement prepared = + session.prepare("INSERT INTO examples.tuples (k, c) VALUES (?, ?)"); + + // bind the parameters in one pass + Coordinates coordinates1 = new Coordinates(12, 34); + BoundStatement boundStatement1 = prepared.bind(1, coordinates1); + // execute the insertion + session.execute(boundStatement1); + + // alternate method: bind the parameters one by one + Coordinates coordinates2 = new Coordinates(56, 78); + BoundStatement boundStatement2 = + prepared.bind().setInt("k", 2).set("c", coordinates2, Coordinates.class); + // execute the insertion + session.execute(boundStatement2); + } + + private static void retrieveData(CqlSession session) { + for (int k = 1; k <= 2; k++) { + // Execute the SELECT query and retrieve the single row in the result set + SimpleStatement statement = + SimpleStatement.newInstance("SELECT c FROM examples.tuples WHERE k = ?", k); + Row row = session.execute(statement).one(); + assert row != null; + + // Retrieve the value for column c + Coordinates coordinatesValue = row.get("c", Coordinates.class); + assert coordinatesValue != null; + + // Display the contents of the Coordinates instance + System.out.println("found coordinate: " + coordinatesValue); + } + } +} diff --git a/examples/src/main/java/com/datastax/oss/driver/examples/datatypes/TuplesSimple.java b/examples/src/main/java/com/datastax/oss/driver/examples/datatypes/TuplesSimple.java new file mode 100644 index 00000000000..6f9c90f8482 --- /dev/null +++ b/examples/src/main/java/com/datastax/oss/driver/examples/datatypes/TuplesSimple.java @@ -0,0 +1,115 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.examples.datatypes; + +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.cql.BoundStatement; +import com.datastax.oss.driver.api.core.cql.PreparedStatement; +import com.datastax.oss.driver.api.core.cql.Row; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.core.data.TupleValue; +import com.datastax.oss.driver.api.core.type.DataTypes; +import com.datastax.oss.driver.api.core.type.TupleType; + +/** + * Inserts and retrieves values in columns of tuple types. + * + *

      By default, the Java driver maps tuples to {@link TupleValue}. This example shows how to + * create instances of {@link TupleValue}, how to insert them in the database, and how to retrieve + * such instances from the database. + * + *

      For a more complex example showing how to map tuples to arbitrary Java types, see {@link + * TuplesMapped}. + * + *

      Preconditions: + * + *

        + *
      • An Apache Cassandra(R) cluster is running and accessible through the contacts points + * identified by basic.contact-points (see application.conf). + *
      + * + *

      Side effects: + * + *

        + *
      • creates a new keyspace "examples" in the cluster. If a keyspace with this name already + * exists, it will be reused; + *
      • creates a table "examples.tuples". If it already exists, it will be reused; + *
      • inserts data in the table. + *
      + * + * @see driver + * documentation on custom codecs + * @see TuplesMapped + */ +public class TuplesSimple { + + public static void main(String[] args) { + try (CqlSession session = CqlSession.builder().build()) { + createSchema(session); + insertData(session); + retrieveData(session); + } + } + + private static void createSchema(CqlSession session) { + session.execute( + "CREATE KEYSPACE IF NOT EXISTS examples " + + "WITH replication = {'class': 'SimpleStrategy', 'replication_factor': 1}"); + session.execute( + "CREATE TABLE IF NOT EXISTS examples.tuples(k int PRIMARY KEY, c tuple)"); + } + + private static void insertData(CqlSession session) { + // prepare the INSERT statement + PreparedStatement prepared = + session.prepare("INSERT INTO examples.tuples (k, c) VALUES (?, ?)"); + + // create the tuple metadata + TupleType coordinatesType = DataTypes.tupleOf(DataTypes.INT, DataTypes.INT); + + // bind the parameters in one pass + TupleValue coordinates1 = coordinatesType.newValue(12, 34); + BoundStatement boundStatement1 = prepared.bind(1, coordinates1); + // execute the insertion + session.execute(boundStatement1); + + // alternate method: bind the parameters one by one + TupleValue coordinates2 = coordinatesType.newValue(56, 78); + BoundStatement boundStatement2 = + prepared.bind().setInt("k", 2).setTupleValue("c", coordinates2); + // execute the insertion + session.execute(boundStatement2); + } + + private static void retrieveData(CqlSession session) { + for (int k = 1; k <= 2; k++) { + // Execute the SELECT query and retrieve the single row in the result set + SimpleStatement statement = + SimpleStatement.newInstance("SELECT c FROM examples.tuples WHERE k = ?", k); + Row row = session.execute(statement).one(); + assert row != null; + + // Retrieve the value for column c + TupleValue coordinatesValue = row.getTupleValue("c"); + assert coordinatesValue != null; + + // Display the contents of the tuple + System.out.printf( + "found coordinate: (%d,%d)%n", coordinatesValue.getInt(0), coordinatesValue.getInt(1)); + } + } +} diff --git a/examples/src/main/java/com/datastax/oss/driver/examples/datatypes/UserDefinedTypesMapped.java b/examples/src/main/java/com/datastax/oss/driver/examples/datatypes/UserDefinedTypesMapped.java new file mode 100644 index 00000000000..c634bd0eb9b --- /dev/null +++ b/examples/src/main/java/com/datastax/oss/driver/examples/datatypes/UserDefinedTypesMapped.java @@ -0,0 +1,202 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.examples.datatypes; + +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.cql.BoundStatement; +import com.datastax.oss.driver.api.core.cql.PreparedStatement; +import com.datastax.oss.driver.api.core.cql.Row; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.core.data.UdtValue; +import com.datastax.oss.driver.api.core.type.UserDefinedType; +import com.datastax.oss.driver.api.core.type.codec.MappingCodec; +import com.datastax.oss.driver.api.core.type.codec.TypeCodec; +import com.datastax.oss.driver.api.core.type.codec.registry.MutableCodecRegistry; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.Objects; + +/** + * Inserts and retrieves values in columns of user-defined types. + * + *

      By default, the Java driver maps user-defined types to {@link UdtValue}. This example goes + * beyond that and shows how to map user-defined types to arbitrary Java types, leveraging the + * special {@link MappingCodec}. + * + *

      A simpler example of usage of user-defined types can be found in {@link + * UserDefinedTypesSimple}. + * + *

      Preconditions: + * + *

        + *
      • An Apache Cassandra(R) cluster is running and accessible through the contacts points + * identified by basic.contact-points (see application.conf). + *
      + * + *

      Side effects: + * + *

        + *
      • creates a new keyspace "examples" in the cluster. If a keyspace with this name already + * exists, it will be reused; + *
      • creates a table "examples.udts". If it already exists, it will be reused; + *
      • inserts data in the table. + *
      + * + * @see UserDefinedTypesSimple + * @see MappingCodec + * @see driver + * documentation on custom codecs + */ +public class UserDefinedTypesMapped { + + /** The Java Pojo that will be mapped to the user-defined type "coordinates". */ + public static class Coordinates { + + private final int x; + private final int y; + + public Coordinates(int x, int y) { + this.x = x; + this.y = y; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } else if (!(o instanceof Coordinates)) { + return false; + } else { + Coordinates that = (Coordinates) o; + return x == that.x && y == that.y; + } + } + + @Override + public int hashCode() { + return Objects.hash(x, y); + } + + @Override + public String toString() { + return "(" + x + ',' + y + ')'; + } + } + + /** The custom codec that will convert to and from {@link Coordinates}. */ + public static class CoordinatesCodec extends MappingCodec { + + public CoordinatesCodec(@NonNull TypeCodec innerCodec) { + super(innerCodec, GenericType.of(Coordinates.class)); + } + + @NonNull + @Override + public UserDefinedType getCqlType() { + return (UserDefinedType) super.getCqlType(); + } + + @Nullable + @Override + protected Coordinates innerToOuter(@Nullable UdtValue value) { + return value == null ? null : new Coordinates(value.getInt("x"), value.getInt("y")); + } + + @Nullable + @Override + protected UdtValue outerToInner(@Nullable Coordinates value) { + return value == null + ? null + : getCqlType().newValue().setInt("x", value.x).setInt("y", value.y); + } + } + + public static void main(String[] args) { + try (CqlSession session = CqlSession.builder().build()) { + createSchema(session); + registerCoordinatesCodec(session); + insertData(session); + retrieveData(session); + } + } + + private static void createSchema(CqlSession session) { + session.execute( + "CREATE KEYSPACE IF NOT EXISTS examples " + + "WITH replication = {'class': 'SimpleStrategy', 'replication_factor': 1}"); + session.execute("CREATE TYPE IF NOT EXISTS examples.coordinates(x int, y int)"); + session.execute("CREATE TABLE IF NOT EXISTS examples.udts(k int PRIMARY KEY, c coordinates)"); + } + + private static void registerCoordinatesCodec(CqlSession session) { + // retrieve the codec registry + MutableCodecRegistry codecRegistry = + (MutableCodecRegistry) session.getContext().getCodecRegistry(); + // retrieve the user-defined type metadata + UserDefinedType coordinatesType = retrieveCoordinatesType(session); + // retrieve the driver built-in codec for the user-defined type "coordinates" + TypeCodec innerCodec = codecRegistry.codecFor(coordinatesType); + // create a custom codec to map the "coordinates" user-defined type to the Coordinates class + CoordinatesCodec coordinatesCodec = new CoordinatesCodec(innerCodec); + // register the new codec + codecRegistry.register(coordinatesCodec); + } + + private static void insertData(CqlSession session) { + // prepare the INSERT statement + PreparedStatement prepared = session.prepare("INSERT INTO examples.udts (k, c) VALUES (?, ?)"); + + // bind the parameters in one pass + Coordinates coordinates1 = new Coordinates(12, 34); + BoundStatement boundStatement1 = prepared.bind(1, coordinates1); + // execute the insertion + session.execute(boundStatement1); + + // alternate method: bind the parameters one by one + Coordinates coordinates2 = new Coordinates(56, 78); + BoundStatement boundStatement2 = + prepared.bind().setInt("k", 2).set("c", coordinates2, Coordinates.class); + // execute the insertion + session.execute(boundStatement2); + } + + private static void retrieveData(CqlSession session) { + for (int k = 1; k <= 2; k++) { + // Execute the SELECT query and retrieve the single row in the result set + SimpleStatement statement = + SimpleStatement.newInstance("SELECT c FROM examples.udts WHERE k = ?", k); + Row row = session.execute(statement).one(); + assert row != null; + + // Retrieve the value for column c + Coordinates coordinatesValue = row.get("c", Coordinates.class); + assert coordinatesValue != null; + + // Display the contents of the Coordinates instance + System.out.println("found coordinate: " + coordinatesValue); + } + } + + private static UserDefinedType retrieveCoordinatesType(CqlSession session) { + return session + .getMetadata() + .getKeyspace("examples") + .flatMap(ks -> ks.getUserDefinedType("coordinates")) + .orElseThrow(IllegalStateException::new); + } +} diff --git a/examples/src/main/java/com/datastax/oss/driver/examples/datatypes/UserDefinedTypesSimple.java b/examples/src/main/java/com/datastax/oss/driver/examples/datatypes/UserDefinedTypesSimple.java new file mode 100644 index 00000000000..0d5ca5eb27d --- /dev/null +++ b/examples/src/main/java/com/datastax/oss/driver/examples/datatypes/UserDefinedTypesSimple.java @@ -0,0 +1,121 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.examples.datatypes; + +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.cql.BoundStatement; +import com.datastax.oss.driver.api.core.cql.PreparedStatement; +import com.datastax.oss.driver.api.core.cql.Row; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.core.data.UdtValue; +import com.datastax.oss.driver.api.core.type.UserDefinedType; + +/** + * Inserts and retrieves values in columns of user-defined types. + * + *

      By default, the Java driver maps user-defined types to {@link UdtValue}. This example shows + * how to create instances of {@link UdtValue}, how to insert them in the database, and how to + * retrieve such instances from the database. + * + *

      For a more complex example showing how to map user-defined types to arbitrary Java types, see + * {@link UserDefinedTypesMapped}. + * + *

      Preconditions: + * + *

        + *
      • An Apache Cassandra(R) cluster is running and accessible through the contacts points + * identified by basic.contact-points (see application.conf). + *
      + * + *

      Side effects: + * + *

        + *
      • creates a new keyspace "examples" in the cluster. If a keyspace with this name already + * exists, it will be reused; + *
      • creates a table "examples.udts". If it already exists, it will be reused; + *
      • inserts data in the table. + *
      + * + * @see driver + * documentation on custom codecs + * @see UserDefinedTypesMapped + */ +public class UserDefinedTypesSimple { + + public static void main(String[] args) { + try (CqlSession session = CqlSession.builder().build()) { + createSchema(session); + insertData(session); + retrieveData(session); + } + } + + private static void createSchema(CqlSession session) { + session.execute( + "CREATE KEYSPACE IF NOT EXISTS examples " + + "WITH replication = {'class': 'SimpleStrategy', 'replication_factor': 1}"); + session.execute("CREATE TYPE IF NOT EXISTS examples.coordinates(x int, y int)"); + session.execute("CREATE TABLE IF NOT EXISTS examples.udts(k int PRIMARY KEY, c coordinates)"); + } + + private static void insertData(CqlSession session) { + // prepare the INSERT statement + PreparedStatement prepared = session.prepare("INSERT INTO examples.udts (k, c) VALUES (?, ?)"); + + // retrieve the user-defined type metadata + UserDefinedType coordinatesType = retrieveCoordinatesType(session); + + // bind the parameters in one pass + UdtValue coordinates1 = coordinatesType.newValue(12, 34); + BoundStatement boundStatement1 = prepared.bind(1, coordinates1); + // execute the insertion + session.execute(boundStatement1); + + // alternate method: bind the parameters one by one + UdtValue coordinates2 = coordinatesType.newValue(56, 78); + BoundStatement boundStatement2 = prepared.bind().setInt("k", 2).setUdtValue("c", coordinates2); + // execute the insertion + session.execute(boundStatement2); + } + + private static void retrieveData(CqlSession session) { + for (int k = 1; k <= 2; k++) { + // Execute the SELECT query and retrieve the single row in the result set + SimpleStatement statement = + SimpleStatement.newInstance("SELECT c FROM examples.udts WHERE k = ?", k); + Row row = session.execute(statement).one(); + assert row != null; + + // Retrieve the value for column c + UdtValue coordinatesValue = row.getUdtValue("c"); + assert coordinatesValue != null; + + // Display the contents of the UdtValue instance + System.out.printf( + "found coordinate: (%d,%d)%n", + coordinatesValue.getInt("x"), coordinatesValue.getInt("y")); + } + } + + private static UserDefinedType retrieveCoordinatesType(CqlSession session) { + return session + .getMetadata() + .getKeyspace("examples") + .flatMap(ks -> ks.getUserDefinedType("coordinates")) + .orElseThrow(IllegalStateException::new); + } +} From 437f1ce8b6cbd13e2273c8c7b9e105fc39c8d36d Mon Sep 17 00:00:00 2001 From: Erik Merkle Date: Thu, 19 Mar 2020 13:22:25 -0500 Subject: [PATCH 405/979] Fix malformed javadoc comment. --- .../datastax/oss/driver/api/core/type/codec/MappingCodec.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/type/codec/MappingCodec.java b/core/src/main/java/com/datastax/oss/driver/api/core/type/codec/MappingCodec.java index 3ad908cbf41..6694132cfc6 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/type/codec/MappingCodec.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/type/codec/MappingCodec.java @@ -30,7 +30,7 @@ *

      This codec can be used to provide support for Java types that are not natively handled by the * driver, as long as there is a conversion path to and from another supported Java type. * - * @param The "inner" Java type; must be a driver supported Java type< (that is, there must + * @param The "inner" Java type; must be a driver supported Java type (that is, there must * exist a codec registered for it). * @param The "outer", or target Java type; this codec will handle the mapping to and from * {@code InnerT} and {@code OuterT}. From bab2ef60766222e34633da067b14c3adca3558dc Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Sat, 21 Mar 2020 15:57:43 +0100 Subject: [PATCH 406/979] Fix impl. of getFormattedContents for TupleValue and UdtValue Without this fix, the codec registry might return a registered custom codec for the CQL type instead of generating a new codec on the fly for tuple <-> TupleValue or udt <-> UdtValue. If the registered custom codec does not handle TupleValue or UdtValue, the call to TypeCodec.format() will throw a ClassCastException. --- .../java/com/datastax/oss/driver/api/core/data/TupleValue.java | 2 +- .../java/com/datastax/oss/driver/api/core/data/UdtValue.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/data/TupleValue.java b/core/src/main/java/com/datastax/oss/driver/api/core/data/TupleValue.java index 4a5727cdda0..01689a92f78 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/data/TupleValue.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/data/TupleValue.java @@ -56,7 +56,7 @@ public interface TupleValue extends GettableByIndex, SettableByIndex */ @NonNull default String getFormattedContents() { - return codecRegistry().codecFor(getType()).format(this); + return codecRegistry().codecFor(getType(), TupleValue.class).format(this); } /** diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/data/UdtValue.java b/core/src/main/java/com/datastax/oss/driver/api/core/data/UdtValue.java index df5c6c0cc97..1e67d796bba 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/data/UdtValue.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/data/UdtValue.java @@ -57,7 +57,7 @@ public interface UdtValue */ @NonNull default String getFormattedContents() { - return codecRegistry().codecFor(getType()).format(this); + return codecRegistry().codecFor(getType(), UdtValue.class).format(this); } /** From a97c7b4343b86647a64f212d8f44e4c3c9d8b4e7 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Sat, 21 Mar 2020 15:59:17 +0100 Subject: [PATCH 407/979] Make CqlIntToStringCodec extend MappingCodec --- .../core/type/codec/CqlIntToStringCodec.java | 47 +++++-------------- 1 file changed, 11 insertions(+), 36 deletions(-) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/type/codec/CqlIntToStringCodec.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/type/codec/CqlIntToStringCodec.java index c133d3932b0..9be7100036f 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/type/codec/CqlIntToStringCodec.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/type/codec/CqlIntToStringCodec.java @@ -15,56 +15,31 @@ */ package com.datastax.oss.driver.core.type.codec; -import com.datastax.oss.driver.api.core.ProtocolVersion; -import com.datastax.oss.driver.api.core.type.DataType; -import com.datastax.oss.driver.api.core.type.DataTypes; -import com.datastax.oss.driver.api.core.type.codec.TypeCodec; +import com.datastax.oss.driver.api.core.type.codec.MappingCodec; import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; import com.datastax.oss.driver.api.core.type.reflect.GenericType; -import edu.umd.cs.findbugs.annotations.NonNull; -import java.nio.ByteBuffer; +import edu.umd.cs.findbugs.annotations.Nullable; /** * A sample user codec implementation that we use in our tests. * *

      It maps a CQL string to a Java string containing its textual representation. */ -public class CqlIntToStringCodec implements TypeCodec { +public class CqlIntToStringCodec extends MappingCodec { - @NonNull - @Override - public GenericType getJavaType() { - return GenericType.STRING; - } - - @NonNull - @Override - public DataType getCqlType() { - return DataTypes.INT; - } - - @Override - public ByteBuffer encode(String value, @NonNull ProtocolVersion protocolVersion) { - if (value == null) { - return null; - } else { - return TypeCodecs.INT.encode(Integer.parseInt(value), protocolVersion); - } - } - - @Override - public String decode(ByteBuffer bytes, @NonNull ProtocolVersion protocolVersion) { - return TypeCodecs.INT.decode(bytes, protocolVersion).toString(); + public CqlIntToStringCodec() { + super(TypeCodecs.INT, GenericType.STRING); } - @NonNull + @Nullable @Override - public String format(String value) { - throw new UnsupportedOperationException("Not implemented for this test"); + protected String innerToOuter(@Nullable Integer value) { + return value == null ? null : value.toString(); } + @Nullable @Override - public String parse(String value) { - throw new UnsupportedOperationException("Not implemented for this test"); + protected Integer outerToInner(@Nullable String value) { + return value == null ? null : Integer.parseInt(value); } } From bf272380fe374980253f3c4f1f5c526e27f5708b Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 23 Mar 2020 10:13:54 -0700 Subject: [PATCH 408/979] Fix NodeMetadataIT --- .../driver/core/metadata/NodeMetadataIT.java | 98 +++++++++---------- 1 file changed, 48 insertions(+), 50 deletions(-) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/NodeMetadataIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/NodeMetadataIT.java index e2e494f83a7..97dcc044856 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/NodeMetadataIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/NodeMetadataIT.java @@ -25,7 +25,7 @@ import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CcmBridge; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; -import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.api.testinfra.utils.ConditionChecker; import com.datastax.oss.driver.categories.ParallelizableTests; import com.datastax.oss.driver.internal.core.context.EventBus; @@ -37,71 +37,69 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; -import org.junit.rules.RuleChain; -import org.junit.rules.TestRule; @Category(ParallelizableTests.class) public class NodeMetadataIT { - private CcmRule ccmRule = CcmRule.getInstance(); - - private SessionRule sessionRule = SessionRule.builder(ccmRule).build(); - - @Rule public TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); + @Rule public CcmRule ccmRule = CcmRule.getInstance(); @Test public void should_expose_node_metadata() { - CqlSession session = sessionRule.session(); + try (CqlSession session = SessionUtils.newSession(ccmRule)) { - Node node = getUniqueNode(session); - // Run a few basic checks given what we know about our test environment: - assertThat(node.getEndPoint()).isNotNull(); - InetSocketAddress connectAddress = (InetSocketAddress) node.getEndPoint().resolve(); - node.getBroadcastAddress() - .ifPresent( - broadcastAddress -> - assertThat(broadcastAddress.getAddress()).isEqualTo(connectAddress.getAddress())); - assertThat(node.getListenAddress().get().getAddress()).isEqualTo(connectAddress.getAddress()); - assertThat(node.getDatacenter()).isEqualTo("dc1"); - assertThat(node.getRack()).isEqualTo("r1"); - if (!CcmBridge.DSE_ENABLEMENT) { - // CcmBridge does not report accurate C* versions for DSE, only approximated values - assertThat(node.getCassandraVersion()).isEqualTo(ccmRule.getCassandraVersion()); - } - assertThat(node.getState()).isSameAs(NodeState.UP); - assertThat(node.getDistance()).isSameAs(NodeDistance.LOCAL); - assertThat(node.getHostId()).isNotNull(); - assertThat(node.getSchemaVersion()).isNotNull(); - long upTime1 = node.getUpSinceMillis(); - assertThat(upTime1).isGreaterThan(-1); + Node node = getUniqueNode(session); + // Run a few basic checks given what we know about our test environment: + assertThat(node.getEndPoint()).isNotNull(); + InetSocketAddress connectAddress = (InetSocketAddress) node.getEndPoint().resolve(); + node.getBroadcastAddress() + .ifPresent( + broadcastAddress -> + assertThat(broadcastAddress.getAddress()).isEqualTo(connectAddress.getAddress())); + assertThat(node.getListenAddress().get().getAddress()).isEqualTo(connectAddress.getAddress()); + assertThat(node.getDatacenter()).isEqualTo("dc1"); + assertThat(node.getRack()).isEqualTo("r1"); + if (!CcmBridge.DSE_ENABLEMENT) { + // CcmBridge does not report accurate C* versions for DSE, only approximated values + assertThat(node.getCassandraVersion()).isEqualTo(ccmRule.getCassandraVersion()); + } + assertThat(node.getState()).isSameAs(NodeState.UP); + assertThat(node.getDistance()).isSameAs(NodeDistance.LOCAL); + assertThat(node.getHostId()).isNotNull(); + assertThat(node.getSchemaVersion()).isNotNull(); + long upTime1 = node.getUpSinceMillis(); + assertThat(upTime1).isGreaterThan(-1); - // Note: open connections and reconnection status are covered in NodeStateIT + // Note: open connections and reconnection status are covered in NodeStateIT - // Force the node down and back up to check that upSinceMillis gets updated - EventBus eventBus = ((InternalDriverContext) session.getContext()).getEventBus(); - eventBus.fire(TopologyEvent.forceDown(node.getBroadcastRpcAddress().get())); - ConditionChecker.checkThat(() -> node.getState() == NodeState.FORCED_DOWN).becomesTrue(); - assertThat(node.getUpSinceMillis()).isEqualTo(-1); - eventBus.fire(TopologyEvent.forceUp(node.getBroadcastRpcAddress().get())); - ConditionChecker.checkThat(() -> node.getState() == NodeState.UP).becomesTrue(); - assertThat(node.getUpSinceMillis()).isGreaterThan(upTime1); + // Force the node down and back up to check that upSinceMillis gets updated + EventBus eventBus = ((InternalDriverContext) session.getContext()).getEventBus(); + eventBus.fire(TopologyEvent.forceDown(node.getBroadcastRpcAddress().get())); + ConditionChecker.checkThat(() -> node.getState() == NodeState.FORCED_DOWN).becomesTrue(); + assertThat(node.getUpSinceMillis()).isEqualTo(-1); + eventBus.fire(TopologyEvent.forceUp(node.getBroadcastRpcAddress().get())); + ConditionChecker.checkThat(() -> node.getState() == NodeState.UP).becomesTrue(); + assertThat(node.getUpSinceMillis()).isGreaterThan(upTime1); + } } @Test @DseRequirement(min = "5.1") public void should_expose_dse_node_properties() { - Node node = getUniqueNode(sessionRule.session()); + try (CqlSession session = SessionUtils.newSession(ccmRule)) { + + Node node = getUniqueNode(session); - // Basic checks as we want something that will work with a large range of DSE versions: - assertThat(node.getExtras()) - .containsKeys( - DseNodeProperties.DSE_VERSION, - DseNodeProperties.DSE_WORKLOADS, - DseNodeProperties.SERVER_ID); - assertThat(node.getExtras().get(DseNodeProperties.DSE_VERSION)) - .isEqualTo(ccmRule.getDseVersion().get()); - assertThat(node.getExtras().get(DseNodeProperties.SERVER_ID)).isInstanceOf(String.class); - assertThat(node.getExtras().get(DseNodeProperties.DSE_WORKLOADS)).isInstanceOf(Set.class); + // Basic checks as we want something that will work with a large range of DSE versions: + assertThat(node.getExtras()) + .containsKeys( + DseNodeProperties.DSE_VERSION, + DseNodeProperties.DSE_WORKLOADS, + DseNodeProperties.SERVER_ID); + assertThat(node.getExtras().get(DseNodeProperties.DSE_VERSION)) + .isEqualTo(ccmRule.getDseVersion().get()); + assertThat(node.getExtras().get(DseNodeProperties.SERVER_ID)).isInstanceOf(String.class); + assertThat(node.getExtras().get(DseNodeProperties.DSE_WORKLOADS)).isInstanceOf(Set.class); + } } private static Node getUniqueNode(CqlSession session) { From a85000225f2fb6443e8e66d6757246d2d05d2d7f Mon Sep 17 00:00:00 2001 From: Greg Bestland Date: Tue, 24 Mar 2020 12:35:26 -0500 Subject: [PATCH 409/979] JAVA-2105: Transient Replication Support (#1414) * JAVA-2105: Transient Replication Support --- changelog/README.md | 1 + .../NetworkTopologyReplicationStrategy.java | 12 +-- .../metadata/token/ReplicationFactor.java | 84 +++++++++++++++++++ .../token/SimpleReplicationStrategy.java | 10 +-- .../metadata/token/ReplicationFactorTest.java | 43 ++++++++++ .../token/SimpleReplicationStrategyTest.java | 10 +-- 6 files changed, 144 insertions(+), 16 deletions(-) create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/ReplicationFactor.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/metadata/token/ReplicationFactorTest.java diff --git a/changelog/README.md b/changelog/README.md index 9e214115643..7c0e299c4c5 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.6.0 (in progress) +- [new feature] JAVA-2105: Add support for transient replication - [new feature] JAVA-2670: Provide base class for mapped custom codecs - [new feature] JAVA-2633: Add execution profile argument to DAO mapper factory methods - [improvement] JAVA-2667: Add ability to fail the build when integration tests fail diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/NetworkTopologyReplicationStrategy.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/NetworkTopologyReplicationStrategy.java index 8315ccac2ab..3c017eca8b6 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/NetworkTopologyReplicationStrategy.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/NetworkTopologyReplicationStrategy.java @@ -40,15 +40,15 @@ class NetworkTopologyReplicationStrategy implements ReplicationStrategy { LoggerFactory.getLogger(NetworkTopologyReplicationStrategy.class); private final Map replicationConfig; - private final Map replicationFactors; + private final Map replicationFactors; private final String logPrefix; NetworkTopologyReplicationStrategy(Map replicationConfig, String logPrefix) { this.replicationConfig = replicationConfig; - ImmutableMap.Builder factorsBuilder = ImmutableMap.builder(); + ImmutableMap.Builder factorsBuilder = ImmutableMap.builder(); for (Map.Entry entry : replicationConfig.entrySet()) { if (!entry.getKey().equals("class")) { - factorsBuilder.put(entry.getKey(), Integer.parseInt(entry.getValue())); + factorsBuilder.put(entry.getKey(), ReplicationFactor.fromString(entry.getValue())); } } this.replicationFactors = factorsBuilder.build(); @@ -88,7 +88,7 @@ public SetMultimap computeReplicasByToken( if (dc == null || !allDcReplicas.containsKey(dc)) { continue; } - Integer rf = replicationFactors.get(dc); + Integer rf = replicationFactors.get(dc).fullReplicas(); Set dcReplicas = allDcReplicas.get(dc); if (rf == null || dcReplicas.size() >= rf) { continue; @@ -123,7 +123,7 @@ public SetMultimap computeReplicasByToken( // Warn the user because that leads to quadratic performance of this method (JAVA-702). for (Map.Entry> entry : allDcReplicas.entrySet()) { String dcName = entry.getKey(); - int expectedFactor = replicationFactors.get(dcName); + int expectedFactor = replicationFactors.get(dcName).fullReplicas(); int achievedFactor = entry.getValue().size(); if (achievedFactor < expectedFactor && !warnedDcs.contains(dcName)) { LOG.warn( @@ -148,7 +148,7 @@ private boolean allDone(Map> map, Map dcNodeC for (Map.Entry> entry : map.entrySet()) { String dc = entry.getKey(); int dcCount = (dcNodeCount.get(dc) == null) ? 0 : dcNodeCount.get(dc); - if (entry.getValue().size() < Math.min(replicationFactors.get(dc), dcCount)) { + if (entry.getValue().size() < Math.min(replicationFactors.get(dc).fullReplicas(), dcCount)) { return false; } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/ReplicationFactor.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/ReplicationFactor.java new file mode 100644 index 00000000000..8c153466446 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/ReplicationFactor.java @@ -0,0 +1,84 @@ +package com.datastax.oss.driver.internal.core.metadata.token; +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import com.datastax.oss.driver.shaded.guava.common.base.Preconditions; +import java.util.Objects; + +// This class is a subset of server version at org.apache.cassandra.locator.ReplicationFactor +public class ReplicationFactor { + private final int allReplicas; + private final int fullReplicas; + private final int transientReplicas; + + public ReplicationFactor(int allReplicas, int transientReplicas) { + this.allReplicas = allReplicas; + this.transientReplicas = transientReplicas; + this.fullReplicas = allReplicas - transientReplicas; + } + + public ReplicationFactor(int allReplicas) { + this(allReplicas, 0); + } + + public int fullReplicas() { + return fullReplicas; + } + + public int transientReplicas() { + return transientReplicas; + } + + public boolean hasTransientReplicas() { + return allReplicas != fullReplicas; + } + + public static ReplicationFactor fromString(String s) { + if (s.contains("/")) { + + int slash = s.indexOf('/'); + String allPart = s.substring(0, slash); + String transientPart = s.substring(slash + 1); + Preconditions.checkArgument( + allPart != null && transientPart != null, + "Replication factor format is or /"); + return new ReplicationFactor(Integer.parseInt(allPart), Integer.parseInt(transientPart)); + } else { + return new ReplicationFactor(Integer.parseInt(s), 0); + } + } + + @Override + public String toString() { + return allReplicas + (hasTransientReplicas() ? "/" + transientReplicas() : ""); + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (!(o instanceof ReplicationFactor)) { + return false; + } + ReplicationFactor that = (ReplicationFactor) o; + return allReplicas == that.allReplicas && fullReplicas == that.fullReplicas; + } + + @Override + public int hashCode() { + return Objects.hash(allReplicas, fullReplicas); + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/SimpleReplicationStrategy.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/SimpleReplicationStrategy.java index 4e02dee46bd..3cb1e0458b0 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/SimpleReplicationStrategy.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/SimpleReplicationStrategy.java @@ -30,14 +30,14 @@ @ThreadSafe class SimpleReplicationStrategy implements ReplicationStrategy { - private final int replicationFactor; + private final ReplicationFactor replicationFactor; SimpleReplicationStrategy(Map replicationConfig) { this(extractReplicationFactor(replicationConfig)); } @VisibleForTesting - SimpleReplicationStrategy(int replicationFactor) { + SimpleReplicationStrategy(ReplicationFactor replicationFactor) { this.replicationFactor = replicationFactor; } @@ -45,7 +45,7 @@ class SimpleReplicationStrategy implements ReplicationStrategy { public SetMultimap computeReplicasByToken( Map tokenToPrimary, List ring) { - int rf = Math.min(replicationFactor, ring.size()); + int rf = Math.min(replicationFactor.fullReplicas(), ring.size()); ImmutableSetMultimap.Builder result = ImmutableSetMultimap.builder(); for (int i = 0; i < ring.size(); i++) { @@ -63,9 +63,9 @@ private static Token getTokenWrapping(int i, List ring) { return ring.get(i % ring.size()); } - private static int extractReplicationFactor(Map replicationConfig) { + private static ReplicationFactor extractReplicationFactor(Map replicationConfig) { String factorString = replicationConfig.get("replication_factor"); Preconditions.checkNotNull(factorString, "Missing replication factor in " + replicationConfig); - return Integer.parseInt(factorString); + return ReplicationFactor.fromString(factorString); } } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/token/ReplicationFactorTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/token/ReplicationFactorTest.java new file mode 100644 index 00000000000..3c2e8a12ce3 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/token/ReplicationFactorTest.java @@ -0,0 +1,43 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.metadata.token; + +import static com.datastax.oss.driver.Assertions.assertThat; + +import org.junit.Test; + +public class ReplicationFactorTest { + @Test + public void should_parse_factor_from_string() { + ReplicationFactor transFactor = ReplicationFactor.fromString("3/1"); + assertThat(transFactor.fullReplicas()).isEqualTo(2); + assertThat(transFactor.hasTransientReplicas()).isTrue(); + assertThat(transFactor.transientReplicas()).isEqualTo(1); + + ReplicationFactor factor = ReplicationFactor.fromString("3"); + assertThat(factor.fullReplicas()).isEqualTo(3); + assertThat(factor.hasTransientReplicas()).isFalse(); + assertThat(factor.transientReplicas()).isEqualTo(0); + } + + @Test + public void should_create_string_from_factor() { + ReplicationFactor transFactor = new ReplicationFactor(3, 1); + assertThat(transFactor.toString()).isEqualTo("3/1"); + ReplicationFactor factor = new ReplicationFactor(3); + assertThat(factor.toString()).isEqualTo("3"); + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/token/SimpleReplicationStrategyTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/token/SimpleReplicationStrategyTest.java index 7dd48a0088d..121ea685a75 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/token/SimpleReplicationStrategyTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/token/SimpleReplicationStrategyTest.java @@ -61,7 +61,7 @@ public void should_compute_for_simple_layout() { List ring = ImmutableList.of(TOKEN01, TOKEN06, TOKEN14, TOKEN19); Map tokenToPrimary = ImmutableMap.of(TOKEN01, node1, TOKEN06, node2, TOKEN14, node1, TOKEN19, node2); - SimpleReplicationStrategy strategy = new SimpleReplicationStrategy(2); + SimpleReplicationStrategy strategy = new SimpleReplicationStrategy(new ReplicationFactor(2)); // When SetMultimap replicasByToken = @@ -83,7 +83,7 @@ public void should_compute_when_nodes_own_consecutive_tokens() { List ring = ImmutableList.of(TOKEN01, TOKEN06, TOKEN14, TOKEN19); Map tokenToPrimary = ImmutableMap.of(TOKEN01, node1, TOKEN06, node1, TOKEN14, node2, TOKEN19, node2); - SimpleReplicationStrategy strategy = new SimpleReplicationStrategy(2); + SimpleReplicationStrategy strategy = new SimpleReplicationStrategy(new ReplicationFactor(2)); // When SetMultimap replicasByToken = @@ -104,7 +104,7 @@ public void should_compute_when_ring_unbalanced() { List ring = ImmutableList.of(TOKEN01, TOKEN06, TOKEN14, TOKEN19); Map tokenToPrimary = ImmutableMap.of(TOKEN01, node1, TOKEN06, node1, TOKEN14, node2, TOKEN19, node1); - SimpleReplicationStrategy strategy = new SimpleReplicationStrategy(2); + SimpleReplicationStrategy strategy = new SimpleReplicationStrategy(new ReplicationFactor(2)); // When SetMultimap replicasByToken = @@ -125,7 +125,7 @@ public void should_compute_when_replication_factor_is_larger_than_cluster_size() List ring = ImmutableList.of(TOKEN01, TOKEN06, TOKEN14, TOKEN19); Map tokenToPrimary = ImmutableMap.of(TOKEN01, node1, TOKEN06, node2, TOKEN14, node1, TOKEN19, node2); - SimpleReplicationStrategy strategy = new SimpleReplicationStrategy(6); + SimpleReplicationStrategy strategy = new SimpleReplicationStrategy(new ReplicationFactor(6)); // When SetMultimap replicasByToken = @@ -185,7 +185,7 @@ public void should_compute_for_complex_layout() { .put(TOKEN18, node6) .build(); - SimpleReplicationStrategy strategy = new SimpleReplicationStrategy(3); + SimpleReplicationStrategy strategy = new SimpleReplicationStrategy(new ReplicationFactor(3)); // When SetMultimap replicasByToken = From b32959d9de834f4f741c260ca93f678687227ed7 Mon Sep 17 00:00:00 2001 From: Greg Bestland Date: Tue, 24 Mar 2020 12:58:37 -0500 Subject: [PATCH 410/979] Remove useless check in ReplicationFactor --- .../internal/core/metadata/token/ReplicationFactor.java | 4 ---- 1 file changed, 4 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/ReplicationFactor.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/ReplicationFactor.java index 8c153466446..f20a472d96b 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/ReplicationFactor.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/ReplicationFactor.java @@ -14,7 +14,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import com.datastax.oss.driver.shaded.guava.common.base.Preconditions; import java.util.Objects; // This class is a subset of server version at org.apache.cassandra.locator.ReplicationFactor @@ -51,9 +50,6 @@ public static ReplicationFactor fromString(String s) { int slash = s.indexOf('/'); String allPart = s.substring(0, slash); String transientPart = s.substring(slash + 1); - Preconditions.checkArgument( - allPart != null && transientPart != null, - "Replication factor format is or /"); return new ReplicationFactor(Integer.parseInt(allPart), Integer.parseInt(transientPart)); } else { return new ReplicationFactor(Integer.parseInt(s), 0); From da5560f0f6cbf95d9bb76f4d844cc5e94113af96 Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 2 Mar 2020 15:37:22 -0800 Subject: [PATCH 411/979] Fix erroneous javadoc in DefaultDriverOption --- .../oss/driver/api/core/config/DefaultDriverOption.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java index 072657f8913..924ade78c4d 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java @@ -722,7 +722,7 @@ public enum DefaultDriverOption implements DriverOption { */ NETTY_ADMIN_SHUTDOWN_QUIET_PERIOD("advanced.netty.admin-group.shutdown.quiet-period"), /** - * Units for admin group quiet period and timeout. + * Max time to wait for admin group shutdown. * *

      Value-type: {@link String} */ From b436fd8076536b77b48c3be35627c785eb35b89b Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 6 Jan 2020 14:48:07 -0800 Subject: [PATCH 412/979] Pass class option explicitly when building per-profile policies Building an option on the fly is brittle because it is not equal to the original option. --- .../core/context/DefaultDriverContext.java | 3 +++ .../oss/driver/internal/core/util/Reflection.java | 15 +++++++-------- .../driver/internal/core/util/ReflectionTest.java | 1 + 3 files changed, 11 insertions(+), 8 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java index f69032cde6d..9ef91ea94cf 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java @@ -339,6 +339,7 @@ protected Map buildStartupOptions() { protected Map buildLoadBalancingPolicies() { return Reflection.buildFromConfigProfiles( this, + DefaultDriverOption.LOAD_BALANCING_POLICY_CLASS, DefaultDriverOption.LOAD_BALANCING_POLICY, LoadBalancingPolicy.class, "com.datastax.oss.driver.internal.core.loadbalancing", @@ -348,6 +349,7 @@ protected Map buildLoadBalancingPolicies() { protected Map buildRetryPolicies() { return Reflection.buildFromConfigProfiles( this, + DefaultDriverOption.RETRY_POLICY_CLASS, DefaultDriverOption.RETRY_POLICY, RetryPolicy.class, "com.datastax.oss.driver.internal.core.retry"); @@ -356,6 +358,7 @@ protected Map buildRetryPolicies() { protected Map buildSpeculativeExecutionPolicies() { return Reflection.buildFromConfigProfiles( this, + DefaultDriverOption.SPECULATIVE_EXECUTION_POLICY_CLASS, DefaultDriverOption.SPECULATIVE_EXECUTION_POLICY, SpeculativeExecutionPolicy.class, "com.datastax.oss.driver.internal.core.specex"); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/util/Reflection.java b/core/src/main/java/com/datastax/oss/driver/internal/core/util/Reflection.java index 933c4b4c226..671d1bffd03 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/util/Reflection.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/util/Reflection.java @@ -114,8 +114,11 @@ public static Optional buildFromConfig( * the default profile. * * @param context the driver context. - * @param rootOption the root option for the policy (my-policy in the example above). The class - * name is assumed to be in a 'class' child option. + * @param classNameOption the option that indicates the class (my-policy.class in the example + * above). + * @param rootOption the root of the section containing the policy's configuration (my-policy in + * the example above). Profiles that have the same contents under that section will share the + * same policy instance. * @param expectedSuperType a super-type that the class is expected to implement/extend. * @param defaultPackages the default packages to prepend to the class name if it's not qualified. * They will be tried in order, the first one that matches an existing class will be used. @@ -124,6 +127,7 @@ public static Optional buildFromConfig( */ public static Map buildFromConfigProfiles( InternalDriverContext context, + DriverOption classNameOption, DriverOption rootOption, Class expectedSuperType, String... defaultPackages) { @@ -141,8 +145,7 @@ public static Map buildFromConfigProfiles( // Since all profiles use the same config, we can use any of them String profileName = profiles.iterator().next(); ComponentT policy = - buildFromConfig( - context, profileName, classOption(rootOption), expectedSuperType, defaultPackages) + buildFromConfig(context, profileName, classNameOption, expectedSuperType, defaultPackages) .orElseThrow( () -> new IllegalArgumentException( @@ -239,8 +242,4 @@ public static Optional buildFromConfig( cause); } } - - private static DriverOption classOption(DriverOption rootOption) { - return () -> rootOption.getPath() + ".class"; - } } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/util/ReflectionTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/util/ReflectionTest.java index a809e7b0c9b..5b58dc5bf91 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/util/ReflectionTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/util/ReflectionTest.java @@ -63,6 +63,7 @@ public void should_build_policies_per_profile() { Map policies = Reflection.buildFromConfigProfiles( context, + DefaultDriverOption.SPECULATIVE_EXECUTION_POLICY_CLASS, DefaultDriverOption.SPECULATIVE_EXECUTION_POLICY, SpeculativeExecutionPolicy.class, "com.datastax.oss.driver.internal.core.specex"); From 4ef98e3dd2d66093b15dfde02622d0d5b778e7b6 Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 6 Jan 2020 17:13:05 -0800 Subject: [PATCH 413/979] Relocate MockOptions for reuse in other tests --- .../internal/core/config/{typesafe => }/MockOptions.java | 4 ++-- .../core/config/typesafe/DefaultDriverConfigLoaderTest.java | 1 + .../DefaultProgrammaticDriverConfigLoaderBuilderTest.java | 1 + .../core/config/typesafe/TypesafeDriverConfigTest.java | 1 + 4 files changed, 5 insertions(+), 2 deletions(-) rename core/src/test/java/com/datastax/oss/driver/internal/core/config/{typesafe => }/MockOptions.java (89%) diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/config/typesafe/MockOptions.java b/core/src/test/java/com/datastax/oss/driver/internal/core/config/MockOptions.java similarity index 89% rename from core/src/test/java/com/datastax/oss/driver/internal/core/config/typesafe/MockOptions.java rename to core/src/test/java/com/datastax/oss/driver/internal/core/config/MockOptions.java index c6870f40802..357097a5c8a 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/config/typesafe/MockOptions.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/config/MockOptions.java @@ -13,12 +13,12 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.internal.core.config.typesafe; +package com.datastax.oss.driver.internal.core.config; import com.datastax.oss.driver.api.core.config.DriverOption; import edu.umd.cs.findbugs.annotations.NonNull; -enum MockOptions implements DriverOption { +public enum MockOptions implements DriverOption { INT1("int1"), INT2("int2"), AUTH_PROVIDER("auth_provider"), diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultDriverConfigLoaderTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultDriverConfigLoaderTest.java index f88d12e77d9..ba8c160b8cd 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultDriverConfigLoaderTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultDriverConfigLoaderTest.java @@ -28,6 +28,7 @@ import com.datastax.oss.driver.api.core.config.DriverConfigLoader; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.internal.core.config.ConfigChangeEvent; +import com.datastax.oss.driver.internal.core.config.MockOptions; import com.datastax.oss.driver.internal.core.context.EventBus; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.internal.core.context.NettyOptions; diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultProgrammaticDriverConfigLoaderBuilderTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultProgrammaticDriverConfigLoaderBuilderTest.java index c38e3b7ef29..d992708ac8f 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultProgrammaticDriverConfigLoaderBuilderTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultProgrammaticDriverConfigLoaderBuilderTest.java @@ -19,6 +19,7 @@ import com.datastax.oss.driver.api.core.config.DriverConfig; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; +import com.datastax.oss.driver.internal.core.config.MockOptions; import com.typesafe.config.ConfigFactory; import org.junit.Test; diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/config/typesafe/TypesafeDriverConfigTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/config/typesafe/TypesafeDriverConfigTest.java index 32889e24afb..b268df322de 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/config/typesafe/TypesafeDriverConfigTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/config/typesafe/TypesafeDriverConfigTest.java @@ -19,6 +19,7 @@ import static org.assertj.core.api.Assertions.entry; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.internal.core.config.MockOptions; import com.typesafe.config.Config; import com.typesafe.config.ConfigFactory; import java.util.HashMap; From 9e46299e6efe59cd842c6cc408d8adb5ff21b33f Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 6 Jan 2020 14:34:41 -0800 Subject: [PATCH 414/979] JAVA-2600: Add map-backed config loader --- changelog/README.md | 1 + .../api/core/config/DriverConfigLoader.java | 44 ++ .../driver/api/core/config/OptionsMap.java | 374 +++++++++ .../api/core/config/TypedDriverOption.java | 717 ++++++++++++++++++ .../api/core/type/reflect/GenericType.java | 2 + .../core/config/map/MapBasedDriverConfig.java | 67 ++ .../map/MapBasedDriverConfigLoader.java | 77 ++ .../map/MapBasedDriverExecutionProfile.java | 332 ++++++++ .../api/core/config/OptionsMapTest.java | 50 ++ .../core/config/TypedDriverOptionTest.java | 62 ++ .../core/config/MockTypedOptions.java | 26 + .../map/MapBasedDriverConfigLoaderTest.java | 68 ++ .../config/map/MapBasedDriverConfigTest.java | 101 +++ .../core/config/MapBasedConfigLoaderIT.java | 196 +++++ 14 files changed, 2117 insertions(+) create mode 100644 core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java create mode 100644 core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/config/map/MapBasedDriverConfig.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/config/map/MapBasedDriverConfigLoader.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/config/map/MapBasedDriverExecutionProfile.java create mode 100644 core/src/test/java/com/datastax/oss/driver/api/core/config/OptionsMapTest.java create mode 100644 core/src/test/java/com/datastax/oss/driver/api/core/config/TypedDriverOptionTest.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/config/MockTypedOptions.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/config/map/MapBasedDriverConfigLoaderTest.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/config/map/MapBasedDriverConfigTest.java create mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/core/config/MapBasedConfigLoaderIT.java diff --git a/changelog/README.md b/changelog/README.md index 7c0e299c4c5..2fc3d715f0f 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.6.0 (in progress) +- [new feature] JAVA-2600: Add map-backed config loader - [new feature] JAVA-2105: Add support for transient replication - [new feature] JAVA-2670: Provide base class for mapped custom codecs - [new feature] JAVA-2633: Add execution profile argument to DAO mapper factory methods diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/DriverConfigLoader.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/DriverConfigLoader.java index 7855385cbc3..e08f17171a8 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/DriverConfigLoader.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/DriverConfigLoader.java @@ -17,6 +17,7 @@ import com.datastax.oss.driver.api.core.context.DriverContext; import com.datastax.oss.driver.api.core.session.SessionBuilder; +import com.datastax.oss.driver.internal.core.config.map.MapBasedDriverConfigLoader; import com.datastax.oss.driver.internal.core.config.typesafe.DefaultDriverConfigLoader; import com.datastax.oss.driver.internal.core.config.typesafe.DefaultProgrammaticDriverConfigLoaderBuilder; import com.typesafe.config.Config; @@ -166,6 +167,9 @@ static DriverConfigLoader fromUrl(@NonNull URL url) { /** * Starts a builder that allows configuration options to be overridden programmatically. * + *

      Note that {@link #fromMap(OptionsMap)} provides an alternative approach for programmatic + * configuration, that might be more convenient if you wish to completely bypass Typesafe config. + * *

      For example: * *

      {@code
      @@ -215,12 +219,52 @@ static DriverConfigLoader fromUrl(@NonNull URL url) {
          * basic.config-reload-interval}.
          *
          * 

      Note that the returned builder is not thread-safe. + * + * @see #fromMap(OptionsMap) */ @NonNull static ProgrammaticDriverConfigLoaderBuilder programmaticBuilder() { return new DefaultProgrammaticDriverConfigLoaderBuilder(); } + /** + * Builds an instance backed by an {@link OptionsMap}, which holds all options in memory. + * + *

      This is the simplest implementation. It is intended for clients who wish to completely + * bypass Typesafe config, and instead manage the configuration programmatically. A typical + * example is a third-party tool that already has its own configuration file, and doesn't want to + * introduce a separate mechanism for driver options. + * + *

      With this loader, the driver's built-in {@code reference.conf} file is ignored, the provided + * {@link OptionsMap} must explicitly provide all mandatory options. Note however that {@link + * OptionsMap#driverDefaults()} allows you to initialize an instance with the same default values + * as {@code reference.conf}. + * + *

      +   * // This creates a configuration equivalent to the built-in reference.conf:
      +   * OptionsMap map = OptionsMap.driverDefaults();
      +   *
      +   * // Customize an option:
      +   * map.put(TypedDriverOption.REQUEST_TIMEOUT, Duration.ofSeconds(5));
      +   *
      +   * DriverConfigLoader loader = DriverConfigLoader.fromMap(map);
      +   * CqlSession session = CqlSession.builder()
      +   *     .withConfigLoader(loader)
      +   *     .build();
      +   * 
      + * + *

      If the {@link OptionsMap} is modified at runtime, this will be reflected immediately in the + * configuration, you don't need to call {@link #reload()}. Note however that, depending on the + * option, the driver might not react to a configuration change immediately, or ever (this is + * documented in {@code reference.conf}). + * + * @since 4.6.0 + */ + @NonNull + static DriverConfigLoader fromMap(@NonNull OptionsMap source) { + return new MapBasedDriverConfigLoader(source, source.asRawMap()); + } + /** * Loads the first configuration that will be used to initialize the driver. * diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java new file mode 100644 index 00000000000..fcc2de59d5b --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java @@ -0,0 +1,374 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.core.config; + +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.io.InvalidObjectException; +import java.io.ObjectInputStream; +import java.io.Serializable; +import java.time.Duration; +import java.time.temporal.ChronoUnit; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.function.Consumer; +import net.jcip.annotations.Immutable; +import net.jcip.annotations.ThreadSafe; + +/** + * An in-memory repository of config options, for use with {@link + * DriverConfigLoader#fromMap(OptionsMap)}. + * + *

      This class is intended for clients who wish to assemble the driver configuration in memory, + * instead of loading it from configuration files. Note that {@link #driverDefaults()} can be used + * to pre-initialize the map with the driver's built-in defaults. + * + *

      It functions like a two-dimensional map indexed by execution profile and option. All methods + * have a profile-less variant that applies to the default profile, for example {@link #get(String, + * TypedDriverOption)} and {@link #get(TypedDriverOption)}. Options are represented by {@link + * TypedDriverOption}, which allows this class to enforce additional type-safety guarantees (an + * option can only be set to a value of its intended type). + * + *

      This class is mutable and thread-safe. Live changes are reflected in real time to the driver + * session(s) that use this configuration. + * + * @since 4.6.0 + */ +@ThreadSafe +public class OptionsMap implements Serializable { + + private static final long serialVersionUID = 1; + + /** + * Creates a new instance that contains the driver's default configuration. + * + *

      This will produce a configuration that is equivalent to the {@code reference.conf} file + * bundled with the driver (however, this method does not load any file, and doesn't require + * Typesafe config in the classpath). + */ + @NonNull + public static OptionsMap driverDefaults() { + OptionsMap source = new OptionsMap(); + fillWithDriverDefaults(source); + return source; + } + + private final ConcurrentHashMap> map; + + private final List> changeListeners = new CopyOnWriteArrayList<>(); + + public OptionsMap() { + this(new ConcurrentHashMap<>()); + } + + private OptionsMap(ConcurrentHashMap> map) { + this.map = map; + } + + /** + * Associates the specified value for the specified option, in the specified execution profile. + * + * @return the previous value associated with {@code option}, or {@code null} if the option was + * not defined. + */ + @Nullable + public ValueT put( + @NonNull String profile, @NonNull TypedDriverOption option, @NonNull ValueT value) { + Objects.requireNonNull(option, "option"); + Objects.requireNonNull(value, "value"); + Object previous = getProfileMap(profile).put(option.getRawOption(), value); + if (!value.equals(previous)) { + for (Consumer listener : changeListeners) { + listener.accept(this); + } + } + return cast(previous); + } + + /** + * Associates the specified value for the specified option, in the default execution profile. + * + * @return the previous value associated with {@code option}, or {@code null} if the option was + * not defined. + */ + @Nullable + public ValueT put(@NonNull TypedDriverOption option, @NonNull ValueT value) { + return put(DriverExecutionProfile.DEFAULT_NAME, option, value); + } + + /** + * Returns the value to which the specified option is mapped in the specified profile, or {@code + * null} if the option is not defined. + */ + @Nullable + public ValueT get(@NonNull String profile, @NonNull TypedDriverOption option) { + Objects.requireNonNull(option, "option"); + Object result = getProfileMap(profile).get(option.getRawOption()); + return cast(result); + } + + /** + * Returns the value to which the specified option is mapped in the default profile, or {@code + * null} if the option is not defined. + */ + @Nullable + public ValueT get(@NonNull TypedDriverOption option) { + return get(DriverExecutionProfile.DEFAULT_NAME, option); + } + + /** + * Removes the specified option from the specified profile. + * + * @return the previous value associated with {@code option}, or {@code null} if the option was + * not defined. + */ + @Nullable + public ValueT remove( + @NonNull String profile, @NonNull TypedDriverOption option) { + Objects.requireNonNull(option, "option"); + Object previous = getProfileMap(profile).remove(option.getRawOption()); + if (previous != null) { + for (Consumer listener : changeListeners) { + listener.accept(this); + } + } + return cast(previous); + } + + /** + * Removes the specified option from the default profile. + * + * @return the previous value associated with {@code option}, or {@code null} if the option was + * not defined. + */ + @Nullable + public ValueT remove(@NonNull TypedDriverOption option) { + return remove(DriverExecutionProfile.DEFAULT_NAME, option); + } + + /** + * Registers a listener that will get notified when this object changes. + * + *

      This is mostly for internal use by the driver. Note that listeners are transient, and not + * taken into account by {@link #equals(Object)} and {@link #hashCode()}. + */ + public void addChangeListener(@NonNull Consumer listener) { + changeListeners.add(Objects.requireNonNull(listener)); + } + + /** + * Unregisters a listener that was previously registered with {@link + * #addChangeListener(Consumer)}. + * + * @return {@code true} if the listener was indeed registered for this object. + */ + public boolean removeChangeListener(@NonNull Consumer listener) { + return changeListeners.remove(Objects.requireNonNull(listener)); + } + + @Override + public boolean equals(Object other) { + if (other == this) { + return true; + } else if (other instanceof OptionsMap) { + OptionsMap that = (OptionsMap) other; + return this.map.equals(that.map); + } else { + return false; + } + } + + @Override + public int hashCode() { + return map.hashCode(); + } + + /** + * Returns a live view of this object, using the driver's untyped {@link DriverOption}. + * + *

      This is intended for internal usage by the driver. Modifying the resulting map is strongly + * discouraged, as it could break the type-safety guarantees provided by the public methods. + */ + @NonNull + protected Map> asRawMap() { + return map; + } + + @NonNull + private Map getProfileMap(@NonNull String profile) { + Objects.requireNonNull(profile, "profile"); + return map.computeIfAbsent(profile, p -> new ConcurrentHashMap<>()); + } + + // Isolate the suppressed warning for retrieval. The cast should always succeed unless the user + // messes with asMap() directly. + @SuppressWarnings({"unchecked", "TypeParameterUnusedInFormals"}) + @Nullable + private ValueT cast(@Nullable Object value) { + return (ValueT) value; + } + + /** + * This object gets replaced by an internal proxy for serialization. + * + * @serialData the serialized form of the {@code Map>} used to + * store options internally (listeners are transient). + */ + private Object writeReplace() { + return new SerializationProxy(this.map); + } + + // Should never be called since we serialize a proxy + @SuppressWarnings("UnusedVariable") + private void readObject(ObjectInputStream stream) throws InvalidObjectException { + throw new InvalidObjectException("Proxy required"); + } + + protected static void fillWithDriverDefaults(OptionsMap map) { + // Sorted by order of appearance in reference.conf: + + // Skip CONFIG_RELOAD_INTERVAL because the map-based config doesn't need periodic reloading + map.put(TypedDriverOption.REQUEST_TIMEOUT, Duration.ofSeconds(2)); + map.put(TypedDriverOption.REQUEST_CONSISTENCY, "LOCAL_ONE"); + map.put(TypedDriverOption.REQUEST_PAGE_SIZE, 5000); + map.put(TypedDriverOption.REQUEST_SERIAL_CONSISTENCY, "SERIAL"); + map.put(TypedDriverOption.REQUEST_DEFAULT_IDEMPOTENCE, false); + map.put(TypedDriverOption.GRAPH_TRAVERSAL_SOURCE, "g"); + map.put(TypedDriverOption.LOAD_BALANCING_POLICY_CLASS, "DefaultLoadBalancingPolicy"); + map.put(TypedDriverOption.LOAD_BALANCING_POLICY_SLOW_AVOIDANCE, true); + map.put(TypedDriverOption.CONNECTION_CONNECT_TIMEOUT, Duration.ofSeconds(5)); + map.put(TypedDriverOption.CONNECTION_INIT_QUERY_TIMEOUT, Duration.ofMillis(500)); + map.put(TypedDriverOption.CONNECTION_SET_KEYSPACE_TIMEOUT, Duration.ofMillis(500)); + map.put(TypedDriverOption.CONNECTION_POOL_LOCAL_SIZE, 1); + map.put(TypedDriverOption.CONNECTION_POOL_REMOTE_SIZE, 1); + map.put(TypedDriverOption.CONNECTION_MAX_REQUESTS, 1024); + map.put(TypedDriverOption.CONNECTION_MAX_ORPHAN_REQUESTS, 24576); + map.put(TypedDriverOption.CONNECTION_WARN_INIT_ERROR, true); + map.put(TypedDriverOption.RECONNECT_ON_INIT, false); + map.put(TypedDriverOption.RECONNECTION_POLICY_CLASS, "ExponentialReconnectionPolicy"); + map.put(TypedDriverOption.RECONNECTION_BASE_DELAY, Duration.ofSeconds(1)); + map.put(TypedDriverOption.RECONNECTION_MAX_DELAY, Duration.ofSeconds(60)); + map.put(TypedDriverOption.RETRY_POLICY_CLASS, "DefaultRetryPolicy"); + map.put(TypedDriverOption.SPECULATIVE_EXECUTION_POLICY_CLASS, "NoSpeculativeExecutionPolicy"); + map.put(TypedDriverOption.TIMESTAMP_GENERATOR_CLASS, "AtomicTimestampGenerator"); + map.put(TypedDriverOption.TIMESTAMP_GENERATOR_DRIFT_WARNING_THRESHOLD, Duration.ofSeconds(1)); + map.put(TypedDriverOption.TIMESTAMP_GENERATOR_DRIFT_WARNING_INTERVAL, Duration.ofSeconds(10)); + map.put(TypedDriverOption.TIMESTAMP_GENERATOR_FORCE_JAVA_CLOCK, false); + map.put(TypedDriverOption.REQUEST_TRACKER_CLASS, "NoopRequestTracker"); + map.put(TypedDriverOption.REQUEST_THROTTLER_CLASS, "PassThroughRequestThrottler"); + map.put(TypedDriverOption.METADATA_NODE_STATE_LISTENER_CLASS, "NoopNodeStateListener"); + map.put(TypedDriverOption.METADATA_SCHEMA_CHANGE_LISTENER_CLASS, "NoopSchemaChangeListener"); + map.put(TypedDriverOption.ADDRESS_TRANSLATOR_CLASS, "PassThroughAddressTranslator"); + map.put(TypedDriverOption.RESOLVE_CONTACT_POINTS, true); + map.put(TypedDriverOption.PROTOCOL_MAX_FRAME_LENGTH, 256L * 1024 * 1024); + map.put(TypedDriverOption.REQUEST_WARN_IF_SET_KEYSPACE, true); + map.put(TypedDriverOption.REQUEST_TRACE_ATTEMPTS, 5); + map.put(TypedDriverOption.REQUEST_TRACE_INTERVAL, Duration.ofMillis(3)); + map.put(TypedDriverOption.REQUEST_TRACE_CONSISTENCY, "ONE"); + map.put(TypedDriverOption.REQUEST_LOG_WARNINGS, true); + map.put(TypedDriverOption.GRAPH_PAGING_ENABLED, "AUTO"); + map.put(TypedDriverOption.GRAPH_CONTINUOUS_PAGING_PAGE_SIZE, 5000); + map.put(TypedDriverOption.GRAPH_CONTINUOUS_PAGING_MAX_PAGES, 0); + map.put(TypedDriverOption.GRAPH_CONTINUOUS_PAGING_MAX_PAGES_PER_SECOND, 0); + map.put(TypedDriverOption.GRAPH_CONTINUOUS_PAGING_MAX_ENQUEUED_PAGES, 4); + map.put(TypedDriverOption.CONTINUOUS_PAGING_PAGE_SIZE, 5000); + map.put(TypedDriverOption.CONTINUOUS_PAGING_PAGE_SIZE_BYTES, false); + map.put(TypedDriverOption.CONTINUOUS_PAGING_MAX_PAGES, 0); + map.put(TypedDriverOption.CONTINUOUS_PAGING_MAX_PAGES_PER_SECOND, 0); + map.put(TypedDriverOption.CONTINUOUS_PAGING_MAX_ENQUEUED_PAGES, 4); + map.put(TypedDriverOption.CONTINUOUS_PAGING_TIMEOUT_FIRST_PAGE, Duration.ofSeconds(2)); + map.put(TypedDriverOption.CONTINUOUS_PAGING_TIMEOUT_OTHER_PAGES, Duration.ofSeconds(1)); + map.put(TypedDriverOption.MONITOR_REPORTING_ENABLED, true); + map.put(TypedDriverOption.METRICS_SESSION_ENABLED, Collections.emptyList()); + map.put(TypedDriverOption.METRICS_SESSION_CQL_REQUESTS_HIGHEST, Duration.ofSeconds(3)); + map.put(TypedDriverOption.METRICS_SESSION_CQL_REQUESTS_DIGITS, 3); + map.put(TypedDriverOption.METRICS_SESSION_CQL_REQUESTS_INTERVAL, Duration.ofMinutes(5)); + map.put(TypedDriverOption.METRICS_SESSION_THROTTLING_HIGHEST, Duration.ofSeconds(3)); + map.put(TypedDriverOption.METRICS_SESSION_THROTTLING_DIGITS, 3); + map.put(TypedDriverOption.METRICS_SESSION_THROTTLING_INTERVAL, Duration.ofMinutes(5)); + map.put( + TypedDriverOption.CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_HIGHEST, + Duration.ofSeconds(3)); + map.put(TypedDriverOption.CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_DIGITS, 3); + map.put( + TypedDriverOption.CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_INTERVAL, + Duration.ofMinutes(5)); + map.put(TypedDriverOption.METRICS_SESSION_GRAPH_REQUESTS_HIGHEST, Duration.ofSeconds(3)); + map.put(TypedDriverOption.METRICS_SESSION_GRAPH_REQUESTS_DIGITS, 3); + map.put(TypedDriverOption.METRICS_SESSION_GRAPH_REQUESTS_INTERVAL, Duration.ofMinutes(5)); + map.put(TypedDriverOption.METRICS_NODE_ENABLED, Collections.emptyList()); + map.put(TypedDriverOption.METRICS_NODE_CQL_MESSAGES_HIGHEST, Duration.ofSeconds(3)); + map.put(TypedDriverOption.METRICS_NODE_CQL_MESSAGES_DIGITS, 3); + map.put(TypedDriverOption.METRICS_NODE_CQL_MESSAGES_INTERVAL, Duration.ofMinutes(5)); + map.put(TypedDriverOption.METRICS_NODE_GRAPH_MESSAGES_HIGHEST, Duration.ofSeconds(3)); + map.put(TypedDriverOption.METRICS_NODE_GRAPH_MESSAGES_DIGITS, 3); + map.put(TypedDriverOption.METRICS_NODE_GRAPH_MESSAGES_INTERVAL, Duration.ofMinutes(5)); + map.put(TypedDriverOption.SOCKET_TCP_NODELAY, true); + map.put(TypedDriverOption.HEARTBEAT_INTERVAL, Duration.ofSeconds(30)); + map.put(TypedDriverOption.HEARTBEAT_TIMEOUT, Duration.ofMillis(500)); + map.put(TypedDriverOption.METADATA_TOPOLOGY_WINDOW, Duration.ofSeconds(1)); + map.put(TypedDriverOption.METADATA_TOPOLOGY_MAX_EVENTS, 20); + map.put(TypedDriverOption.METADATA_SCHEMA_ENABLED, true); + map.put(TypedDriverOption.METADATA_SCHEMA_REQUEST_TIMEOUT, Duration.ofSeconds(2)); + map.put(TypedDriverOption.METADATA_SCHEMA_REQUEST_PAGE_SIZE, 5000); + map.put(TypedDriverOption.METADATA_SCHEMA_WINDOW, Duration.ofSeconds(1)); + map.put(TypedDriverOption.METADATA_SCHEMA_MAX_EVENTS, 20); + map.put(TypedDriverOption.METADATA_TOKEN_MAP_ENABLED, true); + map.put(TypedDriverOption.CONTROL_CONNECTION_TIMEOUT, Duration.ofMillis(500)); + map.put(TypedDriverOption.CONTROL_CONNECTION_AGREEMENT_INTERVAL, Duration.ofMillis(200)); + map.put(TypedDriverOption.CONTROL_CONNECTION_AGREEMENT_TIMEOUT, Duration.ofSeconds(10)); + map.put(TypedDriverOption.CONTROL_CONNECTION_AGREEMENT_WARN, true); + map.put(TypedDriverOption.PREPARE_ON_ALL_NODES, true); + map.put(TypedDriverOption.REPREPARE_ENABLED, true); + map.put(TypedDriverOption.REPREPARE_CHECK_SYSTEM_TABLE, false); + map.put(TypedDriverOption.REPREPARE_MAX_STATEMENTS, 0); + map.put(TypedDriverOption.REPREPARE_MAX_PARALLELISM, 100); + map.put(TypedDriverOption.REPREPARE_TIMEOUT, Duration.ofMillis(500)); + map.put(TypedDriverOption.NETTY_DAEMON, false); + map.put(TypedDriverOption.NETTY_IO_SIZE, 0); + map.put(TypedDriverOption.NETTY_IO_SHUTDOWN_QUIET_PERIOD, 2); + map.put(TypedDriverOption.NETTY_IO_SHUTDOWN_TIMEOUT, 15); + map.put(TypedDriverOption.NETTY_IO_SHUTDOWN_UNIT, "SECONDS"); + map.put(TypedDriverOption.NETTY_ADMIN_SIZE, 2); + map.put(TypedDriverOption.NETTY_ADMIN_SHUTDOWN_QUIET_PERIOD, 2); + map.put(TypedDriverOption.NETTY_ADMIN_SHUTDOWN_TIMEOUT, 15); + map.put(TypedDriverOption.NETTY_ADMIN_SHUTDOWN_UNIT, "SECONDS"); + map.put(TypedDriverOption.NETTY_TIMER_TICK_DURATION, Duration.ofMillis(100)); + map.put(TypedDriverOption.NETTY_TIMER_TICKS_PER_WHEEL, 2048); + map.put(TypedDriverOption.COALESCER_MAX_RUNS, 5); + map.put(TypedDriverOption.COALESCER_INTERVAL, Duration.of(10, ChronoUnit.MICROS)); + } + + @Immutable + private static class SerializationProxy implements Serializable { + + private static final long serialVersionUID = 1L; + + private final ConcurrentHashMap> map; + + private SerializationProxy(ConcurrentHashMap> map) { + this.map = map; + } + + private Object readResolve() { + return new OptionsMap(map); + } + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java new file mode 100644 index 00000000000..eaab1d1ee6f --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java @@ -0,0 +1,717 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.core.config; + +import com.datastax.dse.driver.api.core.config.DseDriverOption; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.lang.reflect.Field; +import java.lang.reflect.Modifier; +import java.time.Duration; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.StringJoiner; + +/** + * A type-safe wrapper around {@link DriverOption}, that encodes the intended value type of each + * option. + * + *

      This type was introduced in conjunction with {@link DriverConfigLoader#fromMap(OptionsMap)}. + * Unfortunately, for backward compatibility reasons, it wasn't possible to retrofit the rest of the + * driver to use it; therefore the APIs used to read the configuration, such as {@link DriverConfig} + * and {@link DriverExecutionProfile}, still use the untyped {@link DriverOption}. + * + * @since 4.6.0 + */ +public class TypedDriverOption { + + private static volatile Iterable> builtInValues; + + /** + * Returns the list of all built-in options known to the driver codebase; in other words, all the + * {@link TypedDriverOption} constants defined on this class. + * + *

      Note that 3rd-party driver extensions might define their own {@link TypedDriverOption} + * constants for custom options. + * + *

      This method uses reflection to introspect all the constants on this class; the result is + * computed lazily on the first invocation, and then cached for future calls. + */ + public static Iterable> builtInValues() { + if (builtInValues == null) { + builtInValues = introspectBuiltInValues(); + } + return builtInValues; + } + + private final DriverOption rawOption; + private final GenericType expectedType; + + public TypedDriverOption( + @NonNull DriverOption rawOption, @NonNull GenericType expectedType) { + this.rawOption = Objects.requireNonNull(rawOption); + this.expectedType = Objects.requireNonNull(expectedType); + } + + @NonNull + public DriverOption getRawOption() { + return rawOption; + } + + @NonNull + public GenericType getExpectedType() { + return expectedType; + } + + @Override + public boolean equals(Object other) { + if (other == this) { + return true; + } else if (other instanceof TypedDriverOption) { + TypedDriverOption that = (TypedDriverOption) other; + return this.rawOption.equals(that.rawOption) && this.expectedType.equals(that.expectedType); + } else { + return false; + } + } + + @Override + public int hashCode() { + return Objects.hash(rawOption, expectedType); + } + + @Override + public String toString() { + return new StringJoiner(", ", TypedDriverOption.class.getSimpleName() + "[", "]") + .add("rawOption=" + rawOption) + .add("expectedType=" + expectedType) + .toString(); + } + + /** The contact points to use for the initial connection to the cluster. */ + public static final TypedDriverOption> CONTACT_POINTS = + new TypedDriverOption<>(DefaultDriverOption.CONTACT_POINTS, GenericType.listOf(String.class)); + /** A name that uniquely identifies the driver instance. */ + public static final TypedDriverOption SESSION_NAME = + new TypedDriverOption<>(DefaultDriverOption.SESSION_NAME, GenericType.STRING); + /** The name of the keyspace that the session should initially be connected to. */ + public static final TypedDriverOption SESSION_KEYSPACE = + new TypedDriverOption<>(DefaultDriverOption.SESSION_KEYSPACE, GenericType.STRING); + /** How often the driver tries to reload the configuration. */ + public static final TypedDriverOption CONFIG_RELOAD_INTERVAL = + new TypedDriverOption<>(DefaultDriverOption.CONFIG_RELOAD_INTERVAL, GenericType.DURATION); + /** How long the driver waits for a request to complete. */ + public static final TypedDriverOption REQUEST_TIMEOUT = + new TypedDriverOption<>(DefaultDriverOption.REQUEST_TIMEOUT, GenericType.DURATION); + /** The consistency level. */ + public static final TypedDriverOption REQUEST_CONSISTENCY = + new TypedDriverOption<>(DefaultDriverOption.REQUEST_CONSISTENCY, GenericType.STRING); + /** The page size. */ + public static final TypedDriverOption REQUEST_PAGE_SIZE = + new TypedDriverOption<>(DefaultDriverOption.REQUEST_PAGE_SIZE, GenericType.INTEGER); + /** The serial consistency level. */ + public static final TypedDriverOption REQUEST_SERIAL_CONSISTENCY = + new TypedDriverOption<>(DefaultDriverOption.REQUEST_SERIAL_CONSISTENCY, GenericType.STRING); + /** The default idempotence of a request. */ + public static final TypedDriverOption REQUEST_DEFAULT_IDEMPOTENCE = + new TypedDriverOption<>(DefaultDriverOption.REQUEST_DEFAULT_IDEMPOTENCE, GenericType.BOOLEAN); + /** The class of the load balancing policy. */ + public static final TypedDriverOption LOAD_BALANCING_POLICY_CLASS = + new TypedDriverOption<>(DefaultDriverOption.LOAD_BALANCING_POLICY_CLASS, GenericType.STRING); + /** The datacenter that is considered "local". */ + public static final TypedDriverOption LOAD_BALANCING_LOCAL_DATACENTER = + new TypedDriverOption<>( + DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER, GenericType.STRING); + /** A custom filter to include/exclude nodes. */ + public static final TypedDriverOption LOAD_BALANCING_FILTER_CLASS = + new TypedDriverOption<>(DefaultDriverOption.LOAD_BALANCING_FILTER_CLASS, GenericType.STRING); + /** The timeout to use for internal queries that run as part of the initialization process. */ + public static final TypedDriverOption CONNECTION_INIT_QUERY_TIMEOUT = + new TypedDriverOption<>( + DefaultDriverOption.CONNECTION_INIT_QUERY_TIMEOUT, GenericType.DURATION); + /** The timeout to use when the driver changes the keyspace on a connection at runtime. */ + public static final TypedDriverOption CONNECTION_SET_KEYSPACE_TIMEOUT = + new TypedDriverOption<>( + DefaultDriverOption.CONNECTION_SET_KEYSPACE_TIMEOUT, GenericType.DURATION); + /** The maximum number of requests that can be executed concurrently on a connection. */ + public static final TypedDriverOption CONNECTION_MAX_REQUESTS = + new TypedDriverOption<>(DefaultDriverOption.CONNECTION_MAX_REQUESTS, GenericType.INTEGER); + /** The maximum number of "orphaned" requests before a connection gets closed automatically. */ + public static final TypedDriverOption CONNECTION_MAX_ORPHAN_REQUESTS = + new TypedDriverOption<>( + DefaultDriverOption.CONNECTION_MAX_ORPHAN_REQUESTS, GenericType.INTEGER); + /** Whether to log non-fatal errors when the driver tries to open a new connection. */ + public static final TypedDriverOption CONNECTION_WARN_INIT_ERROR = + new TypedDriverOption<>(DefaultDriverOption.CONNECTION_WARN_INIT_ERROR, GenericType.BOOLEAN); + /** The number of connections in the LOCAL pool. */ + public static final TypedDriverOption CONNECTION_POOL_LOCAL_SIZE = + new TypedDriverOption<>(DefaultDriverOption.CONNECTION_POOL_LOCAL_SIZE, GenericType.INTEGER); + /** The number of connections in the REMOTE pool. */ + public static final TypedDriverOption CONNECTION_POOL_REMOTE_SIZE = + new TypedDriverOption<>(DefaultDriverOption.CONNECTION_POOL_REMOTE_SIZE, GenericType.INTEGER); + /** + * Whether to schedule reconnection attempts if all contact points are unreachable on the first + * initialization attempt. + */ + public static final TypedDriverOption RECONNECT_ON_INIT = + new TypedDriverOption<>(DefaultDriverOption.RECONNECT_ON_INIT, GenericType.BOOLEAN); + /** The class of the reconnection policy. */ + public static final TypedDriverOption RECONNECTION_POLICY_CLASS = + new TypedDriverOption<>(DefaultDriverOption.RECONNECTION_POLICY_CLASS, GenericType.STRING); + /** Base delay for computing time between reconnection attempts. */ + public static final TypedDriverOption RECONNECTION_BASE_DELAY = + new TypedDriverOption<>(DefaultDriverOption.RECONNECTION_BASE_DELAY, GenericType.DURATION); + /** Maximum delay between reconnection attempts. */ + public static final TypedDriverOption RECONNECTION_MAX_DELAY = + new TypedDriverOption<>(DefaultDriverOption.RECONNECTION_MAX_DELAY, GenericType.DURATION); + /** The class of the retry policy. */ + public static final TypedDriverOption RETRY_POLICY_CLASS = + new TypedDriverOption<>(DefaultDriverOption.RETRY_POLICY_CLASS, GenericType.STRING); + /** The class of the speculative execution policy. */ + public static final TypedDriverOption SPECULATIVE_EXECUTION_POLICY_CLASS = + new TypedDriverOption<>( + DefaultDriverOption.SPECULATIVE_EXECUTION_POLICY_CLASS, GenericType.STRING); + /** The maximum number of executions. */ + public static final TypedDriverOption SPECULATIVE_EXECUTION_MAX = + new TypedDriverOption<>(DefaultDriverOption.SPECULATIVE_EXECUTION_MAX, GenericType.INTEGER); + /** The delay between each execution. */ + public static final TypedDriverOption SPECULATIVE_EXECUTION_DELAY = + new TypedDriverOption<>( + DefaultDriverOption.SPECULATIVE_EXECUTION_DELAY, GenericType.DURATION); + /** The class of the authentication provider. */ + public static final TypedDriverOption AUTH_PROVIDER_CLASS = + new TypedDriverOption<>(DefaultDriverOption.AUTH_PROVIDER_CLASS, GenericType.STRING); + /** Plain text auth provider username. */ + public static final TypedDriverOption AUTH_PROVIDER_USER_NAME = + new TypedDriverOption<>(DefaultDriverOption.AUTH_PROVIDER_USER_NAME, GenericType.STRING); + /** Plain text auth provider password. */ + public static final TypedDriverOption AUTH_PROVIDER_PASSWORD = + new TypedDriverOption<>(DefaultDriverOption.AUTH_PROVIDER_PASSWORD, GenericType.STRING); + /** The class of the SSL Engine Factory. */ + public static final TypedDriverOption SSL_ENGINE_FACTORY_CLASS = + new TypedDriverOption<>(DefaultDriverOption.SSL_ENGINE_FACTORY_CLASS, GenericType.STRING); + /** The cipher suites to enable when creating an SSLEngine for a connection. */ + public static final TypedDriverOption> SSL_CIPHER_SUITES = + new TypedDriverOption<>( + DefaultDriverOption.SSL_CIPHER_SUITES, GenericType.listOf(String.class)); + /** + * Whether or not to require validation that the hostname of the server certificate's common name + * matches the hostname of the server being connected to. + */ + public static final TypedDriverOption SSL_HOSTNAME_VALIDATION = + new TypedDriverOption<>(DefaultDriverOption.SSL_HOSTNAME_VALIDATION, GenericType.BOOLEAN); + /** The location of the keystore file. */ + public static final TypedDriverOption SSL_KEYSTORE_PATH = + new TypedDriverOption<>(DefaultDriverOption.SSL_KEYSTORE_PATH, GenericType.STRING); + /** The keystore password. */ + public static final TypedDriverOption SSL_KEYSTORE_PASSWORD = + new TypedDriverOption<>(DefaultDriverOption.SSL_KEYSTORE_PASSWORD, GenericType.STRING); + /** The location of the truststore file. */ + public static final TypedDriverOption SSL_TRUSTSTORE_PATH = + new TypedDriverOption<>(DefaultDriverOption.SSL_TRUSTSTORE_PATH, GenericType.STRING); + /** The truststore password. */ + public static final TypedDriverOption SSL_TRUSTSTORE_PASSWORD = + new TypedDriverOption<>(DefaultDriverOption.SSL_TRUSTSTORE_PASSWORD, GenericType.STRING); + /** The class of the generator that assigns a microsecond timestamp to each request. */ + public static final TypedDriverOption TIMESTAMP_GENERATOR_CLASS = + new TypedDriverOption<>(DefaultDriverOption.TIMESTAMP_GENERATOR_CLASS, GenericType.STRING); + /** Whether to force the driver to use Java's millisecond-precision system clock. */ + public static final TypedDriverOption TIMESTAMP_GENERATOR_FORCE_JAVA_CLOCK = + new TypedDriverOption<>( + DefaultDriverOption.TIMESTAMP_GENERATOR_FORCE_JAVA_CLOCK, GenericType.BOOLEAN); + /** How far in the future timestamps are allowed to drift before the warning is logged. */ + public static final TypedDriverOption TIMESTAMP_GENERATOR_DRIFT_WARNING_THRESHOLD = + new TypedDriverOption<>( + DefaultDriverOption.TIMESTAMP_GENERATOR_DRIFT_WARNING_THRESHOLD, GenericType.DURATION); + /** How often the warning will be logged if timestamps keep drifting above the threshold. */ + public static final TypedDriverOption TIMESTAMP_GENERATOR_DRIFT_WARNING_INTERVAL = + new TypedDriverOption<>( + DefaultDriverOption.TIMESTAMP_GENERATOR_DRIFT_WARNING_INTERVAL, GenericType.DURATION); + /** The class of a session-wide component that tracks the outcome of requests. */ + public static final TypedDriverOption REQUEST_TRACKER_CLASS = + new TypedDriverOption<>(DefaultDriverOption.REQUEST_TRACKER_CLASS, GenericType.STRING); + /** Whether to log successful requests. */ + public static final TypedDriverOption REQUEST_LOGGER_SUCCESS_ENABLED = + new TypedDriverOption<>( + DefaultDriverOption.REQUEST_LOGGER_SUCCESS_ENABLED, GenericType.BOOLEAN); + /** The threshold to classify a successful request as "slow". */ + public static final TypedDriverOption REQUEST_LOGGER_SLOW_THRESHOLD = + new TypedDriverOption<>( + DefaultDriverOption.REQUEST_LOGGER_SLOW_THRESHOLD, GenericType.DURATION); + /** Whether to log slow requests. */ + public static final TypedDriverOption REQUEST_LOGGER_SLOW_ENABLED = + new TypedDriverOption<>(DefaultDriverOption.REQUEST_LOGGER_SLOW_ENABLED, GenericType.BOOLEAN); + /** Whether to log failed requests. */ + public static final TypedDriverOption REQUEST_LOGGER_ERROR_ENABLED = + new TypedDriverOption<>( + DefaultDriverOption.REQUEST_LOGGER_ERROR_ENABLED, GenericType.BOOLEAN); + /** The maximum length of the query string in the log message. */ + public static final TypedDriverOption REQUEST_LOGGER_MAX_QUERY_LENGTH = + new TypedDriverOption<>( + DefaultDriverOption.REQUEST_LOGGER_MAX_QUERY_LENGTH, GenericType.INTEGER); + /** Whether to log bound values in addition to the query string. */ + public static final TypedDriverOption REQUEST_LOGGER_VALUES = + new TypedDriverOption<>(DefaultDriverOption.REQUEST_LOGGER_VALUES, GenericType.BOOLEAN); + /** The maximum length for bound values in the log message. */ + public static final TypedDriverOption REQUEST_LOGGER_MAX_VALUE_LENGTH = + new TypedDriverOption<>( + DefaultDriverOption.REQUEST_LOGGER_MAX_VALUE_LENGTH, GenericType.INTEGER); + /** The maximum number of bound values to log. */ + public static final TypedDriverOption REQUEST_LOGGER_MAX_VALUES = + new TypedDriverOption<>(DefaultDriverOption.REQUEST_LOGGER_MAX_VALUES, GenericType.INTEGER); + /** Whether to log stack traces for failed queries. */ + public static final TypedDriverOption REQUEST_LOGGER_STACK_TRACES = + new TypedDriverOption<>(DefaultDriverOption.REQUEST_LOGGER_STACK_TRACES, GenericType.BOOLEAN); + /** + * The class of a session-wide component that controls the rate at which requests are executed. + */ + public static final TypedDriverOption REQUEST_THROTTLER_CLASS = + new TypedDriverOption<>(DefaultDriverOption.REQUEST_THROTTLER_CLASS, GenericType.STRING); + /** The maximum number of requests that are allowed to execute in parallel. */ + public static final TypedDriverOption REQUEST_THROTTLER_MAX_CONCURRENT_REQUESTS = + new TypedDriverOption<>( + DefaultDriverOption.REQUEST_THROTTLER_MAX_CONCURRENT_REQUESTS, GenericType.INTEGER); + /** The maximum allowed request rate. */ + public static final TypedDriverOption REQUEST_THROTTLER_MAX_REQUESTS_PER_SECOND = + new TypedDriverOption<>( + DefaultDriverOption.REQUEST_THROTTLER_MAX_REQUESTS_PER_SECOND, GenericType.INTEGER); + /** + * The maximum number of requests that can be enqueued when the throttling threshold is exceeded. + */ + public static final TypedDriverOption REQUEST_THROTTLER_MAX_QUEUE_SIZE = + new TypedDriverOption<>( + DefaultDriverOption.REQUEST_THROTTLER_MAX_QUEUE_SIZE, GenericType.INTEGER); + /** How often the throttler attempts to dequeue requests. */ + public static final TypedDriverOption REQUEST_THROTTLER_DRAIN_INTERVAL = + new TypedDriverOption<>( + DefaultDriverOption.REQUEST_THROTTLER_DRAIN_INTERVAL, GenericType.DURATION); + /** The class of a session-wide component that listens for node state changes. */ + public static final TypedDriverOption METADATA_NODE_STATE_LISTENER_CLASS = + new TypedDriverOption<>( + DefaultDriverOption.METADATA_NODE_STATE_LISTENER_CLASS, GenericType.STRING); + /** The class of a session-wide component that listens for schema changes. */ + public static final TypedDriverOption METADATA_SCHEMA_CHANGE_LISTENER_CLASS = + new TypedDriverOption<>( + DefaultDriverOption.METADATA_SCHEMA_CHANGE_LISTENER_CLASS, GenericType.STRING); + /** + * The class of the address translator to use to convert the addresses sent by Cassandra nodes + * into ones that the driver uses to connect. + */ + public static final TypedDriverOption ADDRESS_TRANSLATOR_CLASS = + new TypedDriverOption<>(DefaultDriverOption.ADDRESS_TRANSLATOR_CLASS, GenericType.STRING); + /** The native protocol version to use. */ + public static final TypedDriverOption PROTOCOL_VERSION = + new TypedDriverOption<>(DefaultDriverOption.PROTOCOL_VERSION, GenericType.STRING); + /** The name of the algorithm used to compress protocol frames. */ + public static final TypedDriverOption PROTOCOL_COMPRESSION = + new TypedDriverOption<>(DefaultDriverOption.PROTOCOL_COMPRESSION, GenericType.STRING); + /** The maximum length, in bytes, of the frames supported by the driver. */ + public static final TypedDriverOption PROTOCOL_MAX_FRAME_LENGTH = + new TypedDriverOption<>(DefaultDriverOption.PROTOCOL_MAX_FRAME_LENGTH, GenericType.LONG); + /** + * Whether a warning is logged when a request (such as a CQL `USE ...`) changes the active + * keyspace. + */ + public static final TypedDriverOption REQUEST_WARN_IF_SET_KEYSPACE = + new TypedDriverOption<>( + DefaultDriverOption.REQUEST_WARN_IF_SET_KEYSPACE, GenericType.BOOLEAN); + /** How many times the driver will attempt to fetch the query trace if it is not ready yet. */ + public static final TypedDriverOption REQUEST_TRACE_ATTEMPTS = + new TypedDriverOption<>(DefaultDriverOption.REQUEST_TRACE_ATTEMPTS, GenericType.INTEGER); + /** The interval between each attempt. */ + public static final TypedDriverOption REQUEST_TRACE_INTERVAL = + new TypedDriverOption<>(DefaultDriverOption.REQUEST_TRACE_INTERVAL, GenericType.DURATION); + /** The consistency level to use for trace queries. */ + public static final TypedDriverOption REQUEST_TRACE_CONSISTENCY = + new TypedDriverOption<>(DefaultDriverOption.REQUEST_TRACE_CONSISTENCY, GenericType.STRING); + /** List of enabled session-level metrics. */ + public static final TypedDriverOption> METRICS_SESSION_ENABLED = + new TypedDriverOption<>( + DefaultDriverOption.METRICS_SESSION_ENABLED, GenericType.listOf(String.class)); + /** List of enabled node-level metrics. */ + public static final TypedDriverOption> METRICS_NODE_ENABLED = + new TypedDriverOption<>( + DefaultDriverOption.METRICS_NODE_ENABLED, GenericType.listOf(String.class)); + /** The largest latency that we expect to record for requests. */ + public static final TypedDriverOption METRICS_SESSION_CQL_REQUESTS_HIGHEST = + new TypedDriverOption<>( + DefaultDriverOption.METRICS_SESSION_CQL_REQUESTS_HIGHEST, GenericType.DURATION); + /** + * The number of significant decimal digits to which internal structures will maintain for + * requests. + */ + public static final TypedDriverOption METRICS_SESSION_CQL_REQUESTS_DIGITS = + new TypedDriverOption<>( + DefaultDriverOption.METRICS_SESSION_CQL_REQUESTS_DIGITS, GenericType.INTEGER); + /** The interval at which percentile data is refreshed for requests. */ + public static final TypedDriverOption METRICS_SESSION_CQL_REQUESTS_INTERVAL = + new TypedDriverOption<>( + DefaultDriverOption.METRICS_SESSION_CQL_REQUESTS_INTERVAL, GenericType.DURATION); + /** The largest latency that we expect to record for throttling. */ + public static final TypedDriverOption METRICS_SESSION_THROTTLING_HIGHEST = + new TypedDriverOption<>( + DefaultDriverOption.METRICS_SESSION_THROTTLING_HIGHEST, GenericType.DURATION); + /** + * The number of significant decimal digits to which internal structures will maintain for + * throttling. + */ + public static final TypedDriverOption METRICS_SESSION_THROTTLING_DIGITS = + new TypedDriverOption<>( + DefaultDriverOption.METRICS_SESSION_THROTTLING_DIGITS, GenericType.INTEGER); + /** The interval at which percentile data is refreshed for throttling. */ + public static final TypedDriverOption METRICS_SESSION_THROTTLING_INTERVAL = + new TypedDriverOption<>( + DefaultDriverOption.METRICS_SESSION_THROTTLING_INTERVAL, GenericType.DURATION); + /** The largest latency that we expect to record for requests. */ + public static final TypedDriverOption METRICS_NODE_CQL_MESSAGES_HIGHEST = + new TypedDriverOption<>( + DefaultDriverOption.METRICS_NODE_CQL_MESSAGES_HIGHEST, GenericType.DURATION); + /** + * The number of significant decimal digits to which internal structures will maintain for + * requests. + */ + public static final TypedDriverOption METRICS_NODE_CQL_MESSAGES_DIGITS = + new TypedDriverOption<>( + DefaultDriverOption.METRICS_NODE_CQL_MESSAGES_DIGITS, GenericType.INTEGER); + /** The interval at which percentile data is refreshed for requests. */ + public static final TypedDriverOption METRICS_NODE_CQL_MESSAGES_INTERVAL = + new TypedDriverOption<>( + DefaultDriverOption.METRICS_NODE_CQL_MESSAGES_INTERVAL, GenericType.DURATION); + /** Whether or not to disable the Nagle algorithm. */ + public static final TypedDriverOption SOCKET_TCP_NODELAY = + new TypedDriverOption<>(DefaultDriverOption.SOCKET_TCP_NODELAY, GenericType.BOOLEAN); + /** Whether or not to enable TCP keep-alive probes. */ + public static final TypedDriverOption SOCKET_KEEP_ALIVE = + new TypedDriverOption<>(DefaultDriverOption.SOCKET_KEEP_ALIVE, GenericType.BOOLEAN); + /** Whether or not to allow address reuse. */ + public static final TypedDriverOption SOCKET_REUSE_ADDRESS = + new TypedDriverOption<>(DefaultDriverOption.SOCKET_REUSE_ADDRESS, GenericType.BOOLEAN); + /** Sets the linger interval. */ + public static final TypedDriverOption SOCKET_LINGER_INTERVAL = + new TypedDriverOption<>(DefaultDriverOption.SOCKET_LINGER_INTERVAL, GenericType.INTEGER); + /** Sets a hint to the size of the underlying buffers for incoming network I/O. */ + public static final TypedDriverOption SOCKET_RECEIVE_BUFFER_SIZE = + new TypedDriverOption<>(DefaultDriverOption.SOCKET_RECEIVE_BUFFER_SIZE, GenericType.INTEGER); + /** Sets a hint to the size of the underlying buffers for outgoing network I/O. */ + public static final TypedDriverOption SOCKET_SEND_BUFFER_SIZE = + new TypedDriverOption<>(DefaultDriverOption.SOCKET_SEND_BUFFER_SIZE, GenericType.INTEGER); + /** The connection heartbeat interval. */ + public static final TypedDriverOption HEARTBEAT_INTERVAL = + new TypedDriverOption<>(DefaultDriverOption.HEARTBEAT_INTERVAL, GenericType.DURATION); + /** How long the driver waits for the response to a heartbeat. */ + public static final TypedDriverOption HEARTBEAT_TIMEOUT = + new TypedDriverOption<>(DefaultDriverOption.HEARTBEAT_TIMEOUT, GenericType.DURATION); + /** How long the driver waits to propagate a Topology event. */ + public static final TypedDriverOption METADATA_TOPOLOGY_WINDOW = + new TypedDriverOption<>(DefaultDriverOption.METADATA_TOPOLOGY_WINDOW, GenericType.DURATION); + /** The maximum number of events that can accumulate. */ + public static final TypedDriverOption METADATA_TOPOLOGY_MAX_EVENTS = + new TypedDriverOption<>( + DefaultDriverOption.METADATA_TOPOLOGY_MAX_EVENTS, GenericType.INTEGER); + /** Whether schema metadata is enabled. */ + public static final TypedDriverOption METADATA_SCHEMA_ENABLED = + new TypedDriverOption<>(DefaultDriverOption.METADATA_SCHEMA_ENABLED, GenericType.BOOLEAN); + /** The timeout for the requests to the schema tables. */ + public static final TypedDriverOption METADATA_SCHEMA_REQUEST_TIMEOUT = + new TypedDriverOption<>( + DefaultDriverOption.METADATA_SCHEMA_REQUEST_TIMEOUT, GenericType.DURATION); + /** The page size for the requests to the schema tables. */ + public static final TypedDriverOption METADATA_SCHEMA_REQUEST_PAGE_SIZE = + new TypedDriverOption<>( + DefaultDriverOption.METADATA_SCHEMA_REQUEST_PAGE_SIZE, GenericType.INTEGER); + /** The list of keyspaces for which schema and token metadata should be maintained. */ + public static final TypedDriverOption> METADATA_SCHEMA_REFRESHED_KEYSPACES = + new TypedDriverOption<>( + DefaultDriverOption.METADATA_SCHEMA_REFRESHED_KEYSPACES, + GenericType.listOf(String.class)); + /** How long the driver waits to apply a refresh. */ + public static final TypedDriverOption METADATA_SCHEMA_WINDOW = + new TypedDriverOption<>(DefaultDriverOption.METADATA_SCHEMA_WINDOW, GenericType.DURATION); + /** The maximum number of refreshes that can accumulate. */ + public static final TypedDriverOption METADATA_SCHEMA_MAX_EVENTS = + new TypedDriverOption<>(DefaultDriverOption.METADATA_SCHEMA_MAX_EVENTS, GenericType.INTEGER); + /** Whether token metadata is enabled. */ + public static final TypedDriverOption METADATA_TOKEN_MAP_ENABLED = + new TypedDriverOption<>(DefaultDriverOption.METADATA_TOKEN_MAP_ENABLED, GenericType.BOOLEAN); + /** How long the driver waits for responses to control queries. */ + public static final TypedDriverOption CONTROL_CONNECTION_TIMEOUT = + new TypedDriverOption<>(DefaultDriverOption.CONTROL_CONNECTION_TIMEOUT, GenericType.DURATION); + /** The interval between each schema agreement check attempt. */ + public static final TypedDriverOption CONTROL_CONNECTION_AGREEMENT_INTERVAL = + new TypedDriverOption<>( + DefaultDriverOption.CONTROL_CONNECTION_AGREEMENT_INTERVAL, GenericType.DURATION); + /** The timeout after which schema agreement fails. */ + public static final TypedDriverOption CONTROL_CONNECTION_AGREEMENT_TIMEOUT = + new TypedDriverOption<>( + DefaultDriverOption.CONTROL_CONNECTION_AGREEMENT_TIMEOUT, GenericType.DURATION); + /** Whether to log a warning if schema agreement fails. */ + public static final TypedDriverOption CONTROL_CONNECTION_AGREEMENT_WARN = + new TypedDriverOption<>( + DefaultDriverOption.CONTROL_CONNECTION_AGREEMENT_WARN, GenericType.BOOLEAN); + /** Whether `Session.prepare` calls should be sent to all nodes in the cluster. */ + public static final TypedDriverOption PREPARE_ON_ALL_NODES = + new TypedDriverOption<>(DefaultDriverOption.PREPARE_ON_ALL_NODES, GenericType.BOOLEAN); + /** Whether the driver tries to prepare on new nodes at all. */ + public static final TypedDriverOption REPREPARE_ENABLED = + new TypedDriverOption<>(DefaultDriverOption.REPREPARE_ENABLED, GenericType.BOOLEAN); + /** Whether to check `system.prepared_statements` on the target node before repreparing. */ + public static final TypedDriverOption REPREPARE_CHECK_SYSTEM_TABLE = + new TypedDriverOption<>( + DefaultDriverOption.REPREPARE_CHECK_SYSTEM_TABLE, GenericType.BOOLEAN); + /** The maximum number of statements that should be reprepared. */ + public static final TypedDriverOption REPREPARE_MAX_STATEMENTS = + new TypedDriverOption<>(DefaultDriverOption.REPREPARE_MAX_STATEMENTS, GenericType.INTEGER); + /** The maximum number of concurrent requests when repreparing. */ + public static final TypedDriverOption REPREPARE_MAX_PARALLELISM = + new TypedDriverOption<>(DefaultDriverOption.REPREPARE_MAX_PARALLELISM, GenericType.INTEGER); + /** The request timeout when repreparing. */ + public static final TypedDriverOption REPREPARE_TIMEOUT = + new TypedDriverOption<>(DefaultDriverOption.REPREPARE_TIMEOUT, GenericType.DURATION); + /** The number of threads in the I/O group. */ + public static final TypedDriverOption NETTY_IO_SIZE = + new TypedDriverOption<>(DefaultDriverOption.NETTY_IO_SIZE, GenericType.INTEGER); + /** Quiet period for I/O group shutdown. */ + public static final TypedDriverOption NETTY_IO_SHUTDOWN_QUIET_PERIOD = + new TypedDriverOption<>( + DefaultDriverOption.NETTY_IO_SHUTDOWN_QUIET_PERIOD, GenericType.INTEGER); + /** Max time to wait for I/O group shutdown. */ + public static final TypedDriverOption NETTY_IO_SHUTDOWN_TIMEOUT = + new TypedDriverOption<>(DefaultDriverOption.NETTY_IO_SHUTDOWN_TIMEOUT, GenericType.INTEGER); + /** Units for I/O group quiet period and timeout. */ + public static final TypedDriverOption NETTY_IO_SHUTDOWN_UNIT = + new TypedDriverOption<>(DefaultDriverOption.NETTY_IO_SHUTDOWN_UNIT, GenericType.STRING); + /** The number of threads in the Admin group. */ + public static final TypedDriverOption NETTY_ADMIN_SIZE = + new TypedDriverOption<>(DefaultDriverOption.NETTY_ADMIN_SIZE, GenericType.INTEGER); + /** Quiet period for admin group shutdown. */ + public static final TypedDriverOption NETTY_ADMIN_SHUTDOWN_QUIET_PERIOD = + new TypedDriverOption<>( + DefaultDriverOption.NETTY_ADMIN_SHUTDOWN_QUIET_PERIOD, GenericType.INTEGER); + /** Max time to wait for admin group shutdown. */ + public static final TypedDriverOption NETTY_ADMIN_SHUTDOWN_TIMEOUT = + new TypedDriverOption<>( + DefaultDriverOption.NETTY_ADMIN_SHUTDOWN_TIMEOUT, GenericType.INTEGER); + /** Units for admin group quiet period and timeout. */ + public static final TypedDriverOption NETTY_ADMIN_SHUTDOWN_UNIT = + new TypedDriverOption<>(DefaultDriverOption.NETTY_ADMIN_SHUTDOWN_UNIT, GenericType.STRING); + /** How many times the coalescer is allowed to reschedule itself when it did no work. */ + public static final TypedDriverOption COALESCER_MAX_RUNS = + new TypedDriverOption<>(DefaultDriverOption.COALESCER_MAX_RUNS, GenericType.INTEGER); + /** The coalescer reschedule interval. */ + public static final TypedDriverOption COALESCER_INTERVAL = + new TypedDriverOption<>(DefaultDriverOption.COALESCER_INTERVAL, GenericType.DURATION); + /** Whether to resolve the addresses passed to `basic.contact-points`. */ + public static final TypedDriverOption RESOLVE_CONTACT_POINTS = + new TypedDriverOption<>(DefaultDriverOption.RESOLVE_CONTACT_POINTS, GenericType.BOOLEAN); + /** + * This is how frequent the timer should wake up to check for timed-out tasks or speculative + * executions. + */ + public static final TypedDriverOption NETTY_TIMER_TICK_DURATION = + new TypedDriverOption<>(DefaultDriverOption.NETTY_TIMER_TICK_DURATION, GenericType.DURATION); + /** Number of ticks in the Timer wheel. */ + public static final TypedDriverOption NETTY_TIMER_TICKS_PER_WHEEL = + new TypedDriverOption<>(DefaultDriverOption.NETTY_TIMER_TICKS_PER_WHEEL, GenericType.INTEGER); + /** + * Whether logging of server warnings generated during query execution should be disabled by the + * driver. + */ + public static final TypedDriverOption REQUEST_LOG_WARNINGS = + new TypedDriverOption<>(DefaultDriverOption.REQUEST_LOG_WARNINGS, GenericType.BOOLEAN); + /** Whether the threads created by the driver should be daemon threads. */ + public static final TypedDriverOption NETTY_DAEMON = + new TypedDriverOption<>(DefaultDriverOption.NETTY_DAEMON, GenericType.BOOLEAN); + /** + * The location of the cloud secure bundle used to connect to Datastax Apache Cassandra as a + * service. + */ + public static final TypedDriverOption CLOUD_SECURE_CONNECT_BUNDLE = + new TypedDriverOption<>(DefaultDriverOption.CLOUD_SECURE_CONNECT_BUNDLE, GenericType.STRING); + /** Whether the slow replica avoidance should be enabled in the default LBP. */ + public static final TypedDriverOption LOAD_BALANCING_POLICY_SLOW_AVOIDANCE = + new TypedDriverOption<>( + DefaultDriverOption.LOAD_BALANCING_POLICY_SLOW_AVOIDANCE, GenericType.BOOLEAN); + /** The timeout to use when establishing driver connections. */ + public static final TypedDriverOption CONNECTION_CONNECT_TIMEOUT = + new TypedDriverOption<>(DefaultDriverOption.CONNECTION_CONNECT_TIMEOUT, GenericType.DURATION); + + /** The name of the application using the session. */ + public static final TypedDriverOption APPLICATION_NAME = + new TypedDriverOption<>(DseDriverOption.APPLICATION_NAME, GenericType.STRING); + /** The version of the application using the session. */ + public static final TypedDriverOption APPLICATION_VERSION = + new TypedDriverOption<>(DseDriverOption.APPLICATION_VERSION, GenericType.STRING); + /** Proxy authentication for GSSAPI authentication: allows to login as another user or role. */ + public static final TypedDriverOption AUTH_PROVIDER_AUTHORIZATION_ID = + new TypedDriverOption<>(DseDriverOption.AUTH_PROVIDER_AUTHORIZATION_ID, GenericType.STRING); + /** Service name for GSSAPI authentication. */ + public static final TypedDriverOption AUTH_PROVIDER_SERVICE = + new TypedDriverOption<>(DseDriverOption.AUTH_PROVIDER_SERVICE, GenericType.STRING); + /** Login configuration for GSSAPI authentication. */ + public static final TypedDriverOption AUTH_PROVIDER_LOGIN_CONFIGURATION = + new TypedDriverOption<>( + DseDriverOption.AUTH_PROVIDER_LOGIN_CONFIGURATION, GenericType.STRING); + /** Internal SASL properties, if any, such as QOP, for GSSAPI authentication. */ + public static final TypedDriverOption> AUTH_PROVIDER_SASL_PROPERTIES = + new TypedDriverOption<>( + DseDriverOption.AUTH_PROVIDER_SASL_PROPERTIES, + GenericType.mapOf(GenericType.STRING, GenericType.STRING)); + /** The page size for continuous paging. */ + public static final TypedDriverOption CONTINUOUS_PAGING_PAGE_SIZE = + new TypedDriverOption<>(DseDriverOption.CONTINUOUS_PAGING_PAGE_SIZE, GenericType.INTEGER); + /** + * Whether {@link #CONTINUOUS_PAGING_PAGE_SIZE} should be interpreted in number of rows or bytes. + */ + public static final TypedDriverOption CONTINUOUS_PAGING_PAGE_SIZE_BYTES = + new TypedDriverOption<>( + DseDriverOption.CONTINUOUS_PAGING_PAGE_SIZE_BYTES, GenericType.BOOLEAN); + /** The maximum number of continuous pages to return. */ + public static final TypedDriverOption CONTINUOUS_PAGING_MAX_PAGES = + new TypedDriverOption<>(DseDriverOption.CONTINUOUS_PAGING_MAX_PAGES, GenericType.INTEGER); + /** The maximum number of continuous pages per second. */ + public static final TypedDriverOption CONTINUOUS_PAGING_MAX_PAGES_PER_SECOND = + new TypedDriverOption<>( + DseDriverOption.CONTINUOUS_PAGING_MAX_PAGES_PER_SECOND, GenericType.INTEGER); + /** The maximum number of continuous pages that can be stored in the local queue. */ + public static final TypedDriverOption CONTINUOUS_PAGING_MAX_ENQUEUED_PAGES = + new TypedDriverOption<>( + DseDriverOption.CONTINUOUS_PAGING_MAX_ENQUEUED_PAGES, GenericType.INTEGER); + /** How long to wait for the coordinator to send the first continuous page. */ + public static final TypedDriverOption CONTINUOUS_PAGING_TIMEOUT_FIRST_PAGE = + new TypedDriverOption<>( + DseDriverOption.CONTINUOUS_PAGING_TIMEOUT_FIRST_PAGE, GenericType.DURATION); + /** How long to wait for the coordinator to send subsequent continuous pages. */ + public static final TypedDriverOption CONTINUOUS_PAGING_TIMEOUT_OTHER_PAGES = + new TypedDriverOption<>( + DseDriverOption.CONTINUOUS_PAGING_TIMEOUT_OTHER_PAGES, GenericType.DURATION); + /** The largest latency that we expect to record for continuous requests. */ + public static final TypedDriverOption + CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_HIGHEST = + new TypedDriverOption<>( + DseDriverOption.CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_HIGHEST, + GenericType.DURATION); + /** + * The number of significant decimal digits to which internal structures will maintain for + * continuous requests. + */ + public static final TypedDriverOption + CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_DIGITS = + new TypedDriverOption<>( + DseDriverOption.CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_DIGITS, + GenericType.INTEGER); + /** The interval at which percentile data is refreshed for continuous requests. */ + public static final TypedDriverOption + CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_INTERVAL = + new TypedDriverOption<>( + DseDriverOption.CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_INTERVAL, + GenericType.DURATION); + /** The read consistency level to use for graph statements. */ + public static final TypedDriverOption GRAPH_READ_CONSISTENCY_LEVEL = + new TypedDriverOption<>(DseDriverOption.GRAPH_READ_CONSISTENCY_LEVEL, GenericType.STRING); + /** The write consistency level to use for graph statements. */ + public static final TypedDriverOption GRAPH_WRITE_CONSISTENCY_LEVEL = + new TypedDriverOption<>(DseDriverOption.GRAPH_WRITE_CONSISTENCY_LEVEL, GenericType.STRING); + /** The traversal source to use for graph statements. */ + public static final TypedDriverOption GRAPH_TRAVERSAL_SOURCE = + new TypedDriverOption<>(DseDriverOption.GRAPH_TRAVERSAL_SOURCE, GenericType.STRING); + /** + * The sub-protocol the driver will use to communicate with DSE Graph, on top of the Cassandra + * native protocol. + */ + public static final TypedDriverOption GRAPH_SUB_PROTOCOL = + new TypedDriverOption<>(DseDriverOption.GRAPH_SUB_PROTOCOL, GenericType.STRING); + /** Whether a script statement represents a system query. */ + public static final TypedDriverOption GRAPH_IS_SYSTEM_QUERY = + new TypedDriverOption<>(DseDriverOption.GRAPH_IS_SYSTEM_QUERY, GenericType.BOOLEAN); + /** The name of the graph targeted by graph statements. */ + public static final TypedDriverOption GRAPH_NAME = + new TypedDriverOption<>(DseDriverOption.GRAPH_NAME, GenericType.STRING); + /** How long the driver waits for a graph request to complete. */ + public static final TypedDriverOption GRAPH_TIMEOUT = + new TypedDriverOption<>(DseDriverOption.GRAPH_TIMEOUT, GenericType.DURATION); + /** Whether to send events for Insights monitoring. */ + public static final TypedDriverOption MONITOR_REPORTING_ENABLED = + new TypedDriverOption<>(DseDriverOption.MONITOR_REPORTING_ENABLED, GenericType.BOOLEAN); + /** Whether to enable paging for Graph queries. */ + public static final TypedDriverOption GRAPH_PAGING_ENABLED = + new TypedDriverOption<>(DseDriverOption.GRAPH_PAGING_ENABLED, GenericType.STRING); + /** The page size for Graph continuous paging. */ + public static final TypedDriverOption GRAPH_CONTINUOUS_PAGING_PAGE_SIZE = + new TypedDriverOption<>( + DseDriverOption.GRAPH_CONTINUOUS_PAGING_PAGE_SIZE, GenericType.INTEGER); + /** The maximum number of Graph continuous pages to return. */ + public static final TypedDriverOption GRAPH_CONTINUOUS_PAGING_MAX_PAGES = + new TypedDriverOption<>( + DseDriverOption.GRAPH_CONTINUOUS_PAGING_MAX_PAGES, GenericType.INTEGER); + /** The maximum number of Graph continuous pages per second. */ + public static final TypedDriverOption GRAPH_CONTINUOUS_PAGING_MAX_PAGES_PER_SECOND = + new TypedDriverOption<>( + DseDriverOption.GRAPH_CONTINUOUS_PAGING_MAX_PAGES_PER_SECOND, GenericType.INTEGER); + /** The maximum number of Graph continuous pages that can be stored in the local queue. */ + public static final TypedDriverOption GRAPH_CONTINUOUS_PAGING_MAX_ENQUEUED_PAGES = + new TypedDriverOption<>( + DseDriverOption.GRAPH_CONTINUOUS_PAGING_MAX_ENQUEUED_PAGES, GenericType.INTEGER); + /** The largest latency that we expect to record for graph requests. */ + public static final TypedDriverOption METRICS_SESSION_GRAPH_REQUESTS_HIGHEST = + new TypedDriverOption<>( + DseDriverOption.METRICS_SESSION_GRAPH_REQUESTS_HIGHEST, GenericType.DURATION); + /** + * The number of significant decimal digits to which internal structures will maintain for graph + * requests. + */ + public static final TypedDriverOption METRICS_SESSION_GRAPH_REQUESTS_DIGITS = + new TypedDriverOption<>( + DseDriverOption.METRICS_SESSION_GRAPH_REQUESTS_DIGITS, GenericType.INTEGER); + /** The interval at which percentile data is refreshed for graph requests. */ + public static final TypedDriverOption METRICS_SESSION_GRAPH_REQUESTS_INTERVAL = + new TypedDriverOption<>( + DseDriverOption.METRICS_SESSION_GRAPH_REQUESTS_INTERVAL, GenericType.DURATION); + /** The largest latency that we expect to record for graph requests. */ + public static final TypedDriverOption METRICS_NODE_GRAPH_MESSAGES_HIGHEST = + new TypedDriverOption<>( + DseDriverOption.METRICS_NODE_GRAPH_MESSAGES_HIGHEST, GenericType.DURATION); + /** + * The number of significant decimal digits to which internal structures will maintain for graph + * requests. + */ + public static final TypedDriverOption METRICS_NODE_GRAPH_MESSAGES_DIGITS = + new TypedDriverOption<>( + DseDriverOption.METRICS_NODE_GRAPH_MESSAGES_DIGITS, GenericType.INTEGER); + /** The interval at which percentile data is refreshed for graph requests. */ + public static final TypedDriverOption METRICS_NODE_GRAPH_MESSAGES_INTERVAL = + new TypedDriverOption<>( + DseDriverOption.METRICS_NODE_GRAPH_MESSAGES_INTERVAL, GenericType.DURATION); + + private static Iterable> introspectBuiltInValues() { + try { + ImmutableList.Builder> result = ImmutableList.builder(); + for (Field field : TypedDriverOption.class.getFields()) { + if ((field.getModifiers() & PUBLIC_STATIC_FINAL) == PUBLIC_STATIC_FINAL + && field.getType() == TypedDriverOption.class) { + TypedDriverOption typedOption = (TypedDriverOption) field.get(null); + result.add(typedOption); + } + } + return result.build(); + } catch (IllegalAccessException e) { + throw new IllegalStateException("Unexpected error while introspecting built-in values", e); + } + } + + private static final int PUBLIC_STATIC_FINAL = Modifier.PUBLIC | Modifier.STATIC | Modifier.FINAL; +} diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/type/reflect/GenericType.java b/core/src/main/java/com/datastax/oss/driver/api/core/type/reflect/GenericType.java index daa269862c3..10bf2ef928e 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/type/reflect/GenericType.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/type/reflect/GenericType.java @@ -31,6 +31,7 @@ import java.math.BigInteger; import java.net.InetAddress; import java.nio.ByteBuffer; +import java.time.Duration; import java.time.Instant; import java.time.LocalDate; import java.time.LocalTime; @@ -103,6 +104,7 @@ public class GenericType { public static final GenericType CQL_DURATION = of(CqlDuration.class); public static final GenericType TUPLE_VALUE = of(TupleValue.class); public static final GenericType UDT_VALUE = of(UdtValue.class); + public static final GenericType DURATION = of(Duration.class); @NonNull public static GenericType of(@NonNull Class type) { diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/config/map/MapBasedDriverConfig.java b/core/src/main/java/com/datastax/oss/driver/internal/core/config/map/MapBasedDriverConfig.java new file mode 100644 index 00000000000..6b189e43f3e --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/config/map/MapBasedDriverConfig.java @@ -0,0 +1,67 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.config.map; + +import com.datastax.oss.driver.api.core.config.DriverConfig; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.config.DriverOption; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Collections; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; + +/** @see MapBasedDriverConfigLoader */ +public class MapBasedDriverConfig implements DriverConfig { + + private final Map> optionsMap; + private final Map profiles = new ConcurrentHashMap<>(); + + public MapBasedDriverConfig(Map> optionsMap) { + this.optionsMap = optionsMap; + if (!optionsMap.containsKey(DriverExecutionProfile.DEFAULT_NAME)) { + throw new IllegalArgumentException( + "The options map must contain a profile named " + DriverExecutionProfile.DEFAULT_NAME); + } + createMissingProfiles(); + } + + @NonNull + @Override + public DriverExecutionProfile getProfile(@NonNull String profileName) { + return profiles.computeIfAbsent(profileName, this::newProfile); + } + + @NonNull + @Override + public Map getProfiles() { + // Refresh in case profiles were added to the backing map + createMissingProfiles(); + return Collections.unmodifiableMap(profiles); + } + + private void createMissingProfiles() { + for (Map.Entry> entry : optionsMap.entrySet()) { + String profileName = entry.getKey(); + if (!profiles.containsKey(profileName)) { + profiles.put(profileName, newProfile(profileName)); + } + } + } + + private MapBasedDriverExecutionProfile newProfile(String profileName) { + return new MapBasedDriverExecutionProfile(optionsMap, profileName); + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/config/map/MapBasedDriverConfigLoader.java b/core/src/main/java/com/datastax/oss/driver/internal/core/config/map/MapBasedDriverConfigLoader.java new file mode 100644 index 00000000000..85fcad563c1 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/config/map/MapBasedDriverConfigLoader.java @@ -0,0 +1,77 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.config.map; + +import com.datastax.oss.driver.api.core.config.DriverConfig; +import com.datastax.oss.driver.api.core.config.DriverConfigLoader; +import com.datastax.oss.driver.api.core.config.DriverOption; +import com.datastax.oss.driver.api.core.config.OptionsMap; +import com.datastax.oss.driver.api.core.context.DriverContext; +import com.datastax.oss.driver.internal.core.config.ConfigChangeEvent; +import com.datastax.oss.driver.internal.core.context.EventBus; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Map; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionStage; +import java.util.function.Consumer; + +public class MapBasedDriverConfigLoader implements DriverConfigLoader, Consumer { + + @NonNull private final OptionsMap source; + @NonNull private final Map> rawMap; + private volatile EventBus eventBus; + + public MapBasedDriverConfigLoader( + @NonNull OptionsMap source, @NonNull Map> rawMap) { + this.source = source; + this.rawMap = rawMap; + } + + @NonNull + @Override + public DriverConfig getInitialConfig() { + return new MapBasedDriverConfig(rawMap); + } + + @Override + public void onDriverInit(@NonNull DriverContext context) { + eventBus = ((InternalDriverContext) context).getEventBus(); + source.addChangeListener(this); + } + + @Override + public void accept(OptionsMap map) { + assert eventBus != null; // listener is registered after setting this field + eventBus.fire(ConfigChangeEvent.INSTANCE); + } + + @NonNull + @Override + public CompletionStage reload() { + return CompletableFuture.completedFuture(true); + } + + @Override + public boolean supportsReloading() { + return true; + } + + @Override + public void close() { + source.removeChangeListener(this); + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/config/map/MapBasedDriverExecutionProfile.java b/core/src/main/java/com/datastax/oss/driver/internal/core/config/map/MapBasedDriverExecutionProfile.java new file mode 100644 index 00000000000..b295d8d3760 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/config/map/MapBasedDriverExecutionProfile.java @@ -0,0 +1,332 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.config.map; + +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.config.DriverOption; +import com.datastax.oss.driver.shaded.guava.common.base.Preconditions; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSortedSet; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.time.Duration; +import java.util.AbstractMap; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.SortedSet; + +/** @see MapBasedDriverConfigLoader */ +public class MapBasedDriverExecutionProfile implements DriverExecutionProfile { + + private static final Object NO_VALUE = new Object(); + + private final String profileName; + // Anything that was overridden in a derived profile with `withXxx` methods. Empty for non-derived + // profiles + private final Map overrides; + // The backing map for the current profile + private final Map profile; + // The backing map for the default profile (if the current one is not the default) + private final Map defaultProfile; + + public MapBasedDriverExecutionProfile( + Map> optionsMap, String profileName) { + this( + profileName, + Collections.emptyMap(), + optionsMap.get(profileName), + profileName.equals(DriverExecutionProfile.DEFAULT_NAME) + ? Collections.emptyMap() + : optionsMap.get(DriverExecutionProfile.DEFAULT_NAME)); + Preconditions.checkArgument( + optionsMap.containsKey(profileName), + "Unknown profile '%s'. Check your configuration.", + profileName); + } + + public MapBasedDriverExecutionProfile( + String profileName, + Map overrides, + Map profile, + Map defaultProfile) { + this.profileName = profileName; + this.overrides = overrides; + this.profile = profile; + this.defaultProfile = defaultProfile; + } + + @NonNull + @Override + public String getName() { + return profileName; + } + + @Override + public boolean isDefined(@NonNull DriverOption option) { + if (overrides.containsKey(option)) { + return overrides.get(option) != NO_VALUE; + } else { + return profile.containsKey(option) || defaultProfile.containsKey(option); + } + } + + // Driver options don't encode the type, everything relies on the user putting the right types in + // the backing map, so no point in trying to type-check. + @SuppressWarnings({"unchecked", "TypeParameterUnusedInFormals"}) + @NonNull + private T get(@NonNull DriverOption option) { + Object value = + overrides.getOrDefault(option, profile.getOrDefault(option, defaultProfile.get(option))); + if (value == null || value == NO_VALUE) { + throw new IllegalArgumentException("Missing configuration option " + option.getPath()); + } + return (T) value; + } + + @Override + public boolean getBoolean(@NonNull DriverOption option) { + return get(option); + } + + @NonNull + @Override + public List getBooleanList(@NonNull DriverOption option) { + return get(option); + } + + @Override + public int getInt(@NonNull DriverOption option) { + return get(option); + } + + @NonNull + @Override + public List getIntList(@NonNull DriverOption option) { + return get(option); + } + + @Override + public long getLong(@NonNull DriverOption option) { + return get(option); + } + + @NonNull + @Override + public List getLongList(@NonNull DriverOption option) { + return get(option); + } + + @Override + public double getDouble(@NonNull DriverOption option) { + return get(option); + } + + @NonNull + @Override + public List getDoubleList(@NonNull DriverOption option) { + return get(option); + } + + @NonNull + @Override + public String getString(@NonNull DriverOption option) { + return get(option); + } + + @NonNull + @Override + public List getStringList(@NonNull DriverOption option) { + return get(option); + } + + @NonNull + @Override + public Map getStringMap(@NonNull DriverOption option) { + return get(option); + } + + @Override + public long getBytes(@NonNull DriverOption option) { + return get(option); + } + + @NonNull + @Override + public List getBytesList(DriverOption option) { + return get(option); + } + + @NonNull + @Override + public Duration getDuration(@NonNull DriverOption option) { + return get(option); + } + + @NonNull + @Override + public List getDurationList(@NonNull DriverOption option) { + return get(option); + } + + @NonNull + @Override + public Object getComparisonKey(@NonNull DriverOption option) { + // This method is only used during driver initialization, performance is not crucial + String prefix = option.getPath(); + ImmutableMap.Builder childOptions = ImmutableMap.builder(); + for (Map.Entry entry : entrySet()) { + if (entry.getKey().startsWith(prefix)) { + childOptions.put(entry.getKey(), entry.getValue()); + } + } + return childOptions.build(); + } + + @NonNull + @Override + public SortedSet> entrySet() { + ImmutableSortedSet.Builder> builder = + ImmutableSortedSet.orderedBy(Map.Entry.comparingByKey()); + for (Map backingMap : + // builder.add() ignores duplicates, so process higher precedence backing maps first + ImmutableList.of(overrides, profile, defaultProfile)) { + for (Map.Entry entry : backingMap.entrySet()) { + if (entry.getValue() != NO_VALUE) { + builder.add(new AbstractMap.SimpleEntry<>(entry.getKey().getPath(), entry.getValue())); + } + } + } + return builder.build(); + } + + private DriverExecutionProfile with(@NonNull DriverOption option, Object value) { + ImmutableMap.Builder newOverrides = ImmutableMap.builder(); + for (Map.Entry override : overrides.entrySet()) { + if (!override.getKey().equals(option)) { + newOverrides.put(override.getKey(), override.getValue()); + } + } + newOverrides.put(option, value); + return new MapBasedDriverExecutionProfile( + profileName, newOverrides.build(), profile, defaultProfile); + } + + @NonNull + @Override + public DriverExecutionProfile withBoolean(@NonNull DriverOption option, boolean value) { + return with(option, value); + } + + @NonNull + @Override + public DriverExecutionProfile withBooleanList( + @NonNull DriverOption option, @NonNull List value) { + return with(option, value); + } + + @NonNull + @Override + public DriverExecutionProfile withInt(@NonNull DriverOption option, int value) { + return with(option, value); + } + + @NonNull + @Override + public DriverExecutionProfile withIntList( + @NonNull DriverOption option, @NonNull List value) { + return with(option, value); + } + + @NonNull + @Override + public DriverExecutionProfile withLong(@NonNull DriverOption option, long value) { + return with(option, value); + } + + @NonNull + @Override + public DriverExecutionProfile withLongList( + @NonNull DriverOption option, @NonNull List value) { + return with(option, value); + } + + @NonNull + @Override + public DriverExecutionProfile withDouble(@NonNull DriverOption option, double value) { + return with(option, value); + } + + @NonNull + @Override + public DriverExecutionProfile withDoubleList( + @NonNull DriverOption option, @NonNull List value) { + return with(option, value); + } + + @NonNull + @Override + public DriverExecutionProfile withString(@NonNull DriverOption option, @NonNull String value) { + return with(option, value); + } + + @NonNull + @Override + public DriverExecutionProfile withStringList( + @NonNull DriverOption option, @NonNull List value) { + return with(option, value); + } + + @NonNull + @Override + public DriverExecutionProfile withStringMap( + @NonNull DriverOption option, @NonNull Map value) { + return with(option, value); + } + + @NonNull + @Override + public DriverExecutionProfile withBytes(@NonNull DriverOption option, long value) { + return with(option, value); + } + + @NonNull + @Override + public DriverExecutionProfile withBytesList( + @NonNull DriverOption option, @NonNull List value) { + return with(option, value); + } + + @NonNull + @Override + public DriverExecutionProfile withDuration( + @NonNull DriverOption option, @NonNull Duration value) { + return with(option, value); + } + + @NonNull + @Override + public DriverExecutionProfile withDurationList( + @NonNull DriverOption option, @NonNull List value) { + return with(option, value); + } + + @NonNull + @Override + public DriverExecutionProfile without(@NonNull DriverOption option) { + return with(option, NO_VALUE); + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/api/core/config/OptionsMapTest.java b/core/src/test/java/com/datastax/oss/driver/api/core/config/OptionsMapTest.java new file mode 100644 index 00000000000..f47fb872b16 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/api/core/config/OptionsMapTest.java @@ -0,0 +1,50 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.core.config; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.internal.SerializationHelper; +import java.time.Duration; +import java.util.function.Consumer; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnitRunner; + +@RunWith(MockitoJUnitRunner.class) +public class OptionsMapTest { + @Mock private Consumer mockListener; + + @Test + public void should_serialize_and_deserialize() { + // Given + OptionsMap initial = OptionsMap.driverDefaults(); + Duration slowTimeout = Duration.ofSeconds(30); + initial.put("slow", TypedDriverOption.REQUEST_TIMEOUT, slowTimeout); + initial.addChangeListener(mockListener); + + // When + OptionsMap deserialized = SerializationHelper.serializeAndDeserialize(initial); + + // Then + assertThat(deserialized.get(TypedDriverOption.REQUEST_TIMEOUT)) + .isEqualTo(Duration.ofSeconds(2)); + assertThat(deserialized.get("slow", TypedDriverOption.REQUEST_TIMEOUT)).isEqualTo(slowTimeout); + // Listeners are transient + assertThat(deserialized.removeChangeListener(mockListener)).isFalse(); + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/api/core/config/TypedDriverOptionTest.java b/core/src/test/java/com/datastax/oss/driver/api/core/config/TypedDriverOptionTest.java new file mode 100644 index 00000000000..9c63250d87d --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/api/core/config/TypedDriverOptionTest.java @@ -0,0 +1,62 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.core.config; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.dse.driver.api.core.config.DseDriverOption; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; +import java.util.HashSet; +import java.util.Set; +import org.junit.Test; + +public class TypedDriverOptionTest { + + /** + * Checks that every built-in {@link DriverOption} has an equivalent constant in {@link + * TypedDriverOption}. + */ + @Test + public void should_have_equivalents_for_all_builtin_untyped_options() { + Set optionsThatHaveATypedEquivalent = new HashSet<>(); + for (TypedDriverOption typedOption : TypedDriverOption.builtInValues()) { + optionsThatHaveATypedEquivalent.add(typedOption.getRawOption()); + } + + // These options are only used internally to compare policy configurations across profiles. + // Users never use them directly, so they don't need typed equivalents. + Set exclusions = + ImmutableSet.of( + DefaultDriverOption.LOAD_BALANCING_POLICY, + DefaultDriverOption.RETRY_POLICY, + DefaultDriverOption.SPECULATIVE_EXECUTION_POLICY); + + for (DriverOption option : + ImmutableSet.builder() + .add(DefaultDriverOption.values()) + .add(DseDriverOption.values()) + .build()) { + if (!exclusions.contains(option)) { + assertThat(optionsThatHaveATypedEquivalent) + .as( + "Couldn't find a typed equivalent for %s.%s. " + + "You need to either add a constant in %s, or an exclusion in this test.", + option.getClass().getSimpleName(), option, TypedDriverOption.class.getSimpleName()) + .contains(option); + } + } + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/config/MockTypedOptions.java b/core/src/test/java/com/datastax/oss/driver/internal/core/config/MockTypedOptions.java new file mode 100644 index 00000000000..de6345cc0e2 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/config/MockTypedOptions.java @@ -0,0 +1,26 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.config; + +import com.datastax.oss.driver.api.core.config.TypedDriverOption; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; + +public class MockTypedOptions { + public static final TypedDriverOption INT1 = + new TypedDriverOption<>(MockOptions.INT1, GenericType.INTEGER); + public static final TypedDriverOption INT2 = + new TypedDriverOption<>(MockOptions.INT2, GenericType.INTEGER); +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/config/map/MapBasedDriverConfigLoaderTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/config/map/MapBasedDriverConfigLoaderTest.java new file mode 100644 index 00000000000..24a7df5d1e9 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/config/map/MapBasedDriverConfigLoaderTest.java @@ -0,0 +1,68 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.config.map; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverConfig; +import com.datastax.oss.driver.api.core.config.DriverConfigLoader; +import com.datastax.oss.driver.api.core.config.OptionsMap; +import com.datastax.oss.driver.internal.core.config.MockOptions; +import com.datastax.oss.driver.internal.core.config.MockTypedOptions; +import com.datastax.oss.driver.internal.core.config.typesafe.DefaultDriverConfigLoader; +import java.util.Map; +import java.util.SortedSet; +import org.junit.Test; + +public class MapBasedDriverConfigLoaderTest { + + @Test + public void should_reflect_changes_in_real_time() { + OptionsMap source = new OptionsMap(); + source.put(MockTypedOptions.INT1, 1); + + DriverConfigLoader loader = DriverConfigLoader.fromMap(source); + DriverConfig config = loader.getInitialConfig(); + assertThat(config.getDefaultProfile().getInt(MockOptions.INT1)).isEqualTo(1); + + source.put(MockTypedOptions.INT1, 2); + assertThat(config.getDefaultProfile().getInt(MockOptions.INT1)).isEqualTo(2); + } + + /** + * Checks that, if we ask to pre-fill the default profile, then we get the same set of options as + * the built-in reference.conf. + */ + @Test + public void should_fill_default_profile_like_reference_file() { + SortedSet> memoryBased = + DriverConfigLoader.fromMap(OptionsMap.driverDefaults()) + .getInitialConfig() + .getDefaultProfile() + .entrySet(); + SortedSet> fileBased = + new DefaultDriverConfigLoader().getInitialConfig().getDefaultProfile().entrySet(); + + for (Map.Entry entry : fileBased) { + if (entry.getKey().equals(DefaultDriverOption.CONFIG_RELOAD_INTERVAL.getPath())) { + continue; + } + assertThat(memoryBased).as("Missing entry: " + entry).contains(entry); + } + assertThat(memoryBased).hasSize(fileBased.size() - 1); + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/config/map/MapBasedDriverConfigTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/config/map/MapBasedDriverConfigTest.java new file mode 100644 index 00000000000..f21803c5688 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/config/map/MapBasedDriverConfigTest.java @@ -0,0 +1,101 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.config.map; + +import static com.datastax.oss.driver.Assertions.assertThat; + +import com.datastax.oss.driver.api.core.config.DriverConfig; +import com.datastax.oss.driver.api.core.config.DriverConfigLoader; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.config.OptionsMap; +import com.datastax.oss.driver.internal.core.config.MockOptions; +import com.datastax.oss.driver.internal.core.config.MockTypedOptions; +import org.junit.Test; + +public class MapBasedDriverConfigTest { + + @Test + public void should_load_minimal_config_with_no_profiles() { + OptionsMap source = new OptionsMap(); + source.put(MockTypedOptions.INT1, 42); + DriverConfig config = DriverConfigLoader.fromMap(source).getInitialConfig(); + + assertThat(config).hasIntOption(MockOptions.INT1, 42); + } + + @Test + public void should_inherit_option_in_profile() { + OptionsMap source = new OptionsMap(); + source.put(MockTypedOptions.INT1, 42); + // need to add an unrelated option to create the profile + source.put("profile1", MockTypedOptions.INT2, 1); + DriverConfig config = DriverConfigLoader.fromMap(source).getInitialConfig(); + + assertThat(config) + .hasIntOption(MockOptions.INT1, 42) + .hasIntOption("profile1", MockOptions.INT1, 42); + } + + @Test + public void should_override_option_in_profile() { + OptionsMap source = new OptionsMap(); + source.put(MockTypedOptions.INT1, 42); + source.put("profile1", MockTypedOptions.INT1, 43); + DriverConfig config = DriverConfigLoader.fromMap(source).getInitialConfig(); + + assertThat(config) + .hasIntOption(MockOptions.INT1, 42) + .hasIntOption("profile1", MockOptions.INT1, 43); + } + + @Test + public void should_create_derived_profile_with_new_option() { + OptionsMap source = new OptionsMap(); + source.put(MockTypedOptions.INT1, 42); + DriverConfig config = DriverConfigLoader.fromMap(source).getInitialConfig(); + DriverExecutionProfile base = config.getDefaultProfile(); + DriverExecutionProfile derived = base.withInt(MockOptions.INT2, 43); + + assertThat(base.isDefined(MockOptions.INT2)).isFalse(); + assertThat(derived.isDefined(MockOptions.INT2)).isTrue(); + assertThat(derived.getInt(MockOptions.INT2)).isEqualTo(43); + } + + @Test + public void should_create_derived_profile_overriding_option() { + OptionsMap source = new OptionsMap(); + source.put(MockTypedOptions.INT1, 42); + DriverConfig config = DriverConfigLoader.fromMap(source).getInitialConfig(); + DriverExecutionProfile base = config.getDefaultProfile(); + DriverExecutionProfile derived = base.withInt(MockOptions.INT1, 43); + + assertThat(base.getInt(MockOptions.INT1)).isEqualTo(42); + assertThat(derived.getInt(MockOptions.INT1)).isEqualTo(43); + } + + @Test + public void should_create_derived_profile_unsetting_option() { + OptionsMap source = new OptionsMap(); + source.put(MockTypedOptions.INT1, 42); + source.put(MockTypedOptions.INT2, 43); + DriverConfig config = DriverConfigLoader.fromMap(source).getInitialConfig(); + DriverExecutionProfile base = config.getDefaultProfile(); + DriverExecutionProfile derived = base.without(MockOptions.INT2); + + assertThat(base.getInt(MockOptions.INT2)).isEqualTo(43); + assertThat(derived.isDefined(MockOptions.INT2)).isFalse(); + } +} diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/config/MapBasedConfigLoaderIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/config/MapBasedConfigLoaderIT.java new file mode 100644 index 00000000000..a83dc547c37 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/config/MapBasedConfigLoaderIT.java @@ -0,0 +1,196 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.core.config; + +import static com.datastax.oss.simulacron.common.codec.ConsistencyLevel.QUORUM; +import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.unavailable; +import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.when; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +import com.datastax.oss.driver.api.core.AllNodesFailedException; +import com.datastax.oss.driver.api.core.ConsistencyLevel; +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.config.DriverConfigLoader; +import com.datastax.oss.driver.api.core.config.OptionsMap; +import com.datastax.oss.driver.api.core.config.TypedDriverOption; +import com.datastax.oss.driver.api.core.context.DriverContext; +import com.datastax.oss.driver.api.core.cql.ResultSet; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.retry.RetryDecision; +import com.datastax.oss.driver.api.core.retry.RetryPolicy; +import com.datastax.oss.driver.api.core.servererrors.CoordinatorException; +import com.datastax.oss.driver.api.core.servererrors.UnavailableException; +import com.datastax.oss.driver.api.core.servererrors.WriteType; +import com.datastax.oss.driver.api.core.session.Request; +import com.datastax.oss.driver.api.testinfra.simulacron.SimulacronRule; +import com.datastax.oss.driver.api.testinfra.utils.ConditionChecker; +import com.datastax.oss.driver.categories.ParallelizableTests; +import com.datastax.oss.driver.internal.core.config.ConfigChangeEvent; +import com.datastax.oss.simulacron.common.cluster.ClusterSpec; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.List; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +@Category(ParallelizableTests.class) +public class MapBasedConfigLoaderIT { + + @ClassRule + public static final SimulacronRule SIMULACRON_RULE = + new SimulacronRule(ClusterSpec.builder().withNodes(1)); + + @Before + public void setup() { + SIMULACRON_RULE.cluster().clearPrimes(true); + } + + /** + * Checks that runtime changes to the pool size are reflected in the driver. This is a special + * case because unlike other options, the driver does not re-read the option at regular intervals; + * instead, it relies on the {@link ConfigChangeEvent} being fired. + */ + @Test + public void should_resize_pool_when_config_changes() { + OptionsMap optionsMap = OptionsMap.driverDefaults(); + + try (CqlSession session = + CqlSession.builder() + .addContactEndPoints(SIMULACRON_RULE.getContactPoints()) + .withLocalDatacenter("dc1") + .withConfigLoader(DriverConfigLoader.fromMap(optionsMap)) + .build()) { + + Node node = session.getMetadata().getNodes().values().iterator().next(); + assertThat(node.getOpenConnections()).isEqualTo(2); // control connection + pool (default 1) + + optionsMap.put(TypedDriverOption.CONNECTION_POOL_LOCAL_SIZE, 2); + + ConditionChecker.checkThat(() -> node.getOpenConnections() == 3).becomesTrue(); + } + } + + /** Checks that profiles that have specific policy options will get their own policy instance. */ + @Test + public void should_create_policies_per_profile() { + // Given + // a query that throws UNAVAILABLE + String mockQuery = "mock query"; + SIMULACRON_RULE.cluster().prime(when(mockQuery).then(unavailable(QUORUM, 3, 2))); + + // a default profile that uses the default retry policy, and an alternate profile that uses a + // policy that ignores all errors + OptionsMap optionsMap = OptionsMap.driverDefaults(); + String alternateProfile = "profile1"; + optionsMap.put( + alternateProfile, TypedDriverOption.RETRY_POLICY_CLASS, IgnoreAllPolicy.class.getName()); + + try (CqlSession session = + CqlSession.builder() + .addContactEndPoints(SIMULACRON_RULE.getContactPoints()) + .withLocalDatacenter("dc1") + .withConfigLoader(DriverConfigLoader.fromMap(optionsMap)) + .build()) { + + // When + // executing the query for the default profile + SimpleStatement defaultProfileStatement = SimpleStatement.newInstance(mockQuery); + assertThatThrownBy(() -> session.execute(defaultProfileStatement)) + .satisfies( + t -> { + // Then + // the UNAVAILABLE error is surfaced + assertThat(t).isInstanceOf(AllNodesFailedException.class); + AllNodesFailedException anfe = (AllNodesFailedException) t; + assertThat(anfe.getAllErrors()).hasSize(1); + List nodeErrors = anfe.getAllErrors().values().iterator().next(); + assertThat(nodeErrors).hasSize(1); + assertThat(nodeErrors.get(0)).isInstanceOf(UnavailableException.class); + }); + + // When + // executing the query for the alternate profile + SimpleStatement alternateProfileStatement = + SimpleStatement.newInstance(mockQuery).setExecutionProfileName(alternateProfile); + ResultSet rs = session.execute(alternateProfileStatement); + + // Then + // the error is ignored + assertThat(rs.one()).isNull(); + } + } + + public static class IgnoreAllPolicy implements RetryPolicy { + + public IgnoreAllPolicy( + @SuppressWarnings("unused") DriverContext context, + @SuppressWarnings("unused") String profile) { + // nothing to do + } + + @Override + public RetryDecision onReadTimeout( + @NonNull Request request, + @NonNull ConsistencyLevel cl, + int blockFor, + int received, + boolean dataPresent, + int retryCount) { + return RetryDecision.IGNORE; + } + + @Override + public RetryDecision onWriteTimeout( + @NonNull Request request, + @NonNull ConsistencyLevel cl, + @NonNull WriteType writeType, + int blockFor, + int received, + int retryCount) { + return RetryDecision.IGNORE; + } + + @Override + public RetryDecision onUnavailable( + @NonNull Request request, + @NonNull ConsistencyLevel cl, + int required, + int alive, + int retryCount) { + return RetryDecision.IGNORE; + } + + @Override + public RetryDecision onRequestAborted( + @NonNull Request request, @NonNull Throwable error, int retryCount) { + return RetryDecision.IGNORE; + } + + @Override + public RetryDecision onErrorResponse( + @NonNull Request request, @NonNull CoordinatorException error, int retryCount) { + return RetryDecision.IGNORE; + } + + @Override + public void close() { + // nothing to do + } + } +} From 1463f0f13e118c9dddb55eab6c9cba4c7f199f6b Mon Sep 17 00:00:00 2001 From: olim7t Date: Tue, 25 Feb 2020 15:34:22 -0800 Subject: [PATCH 415/979] JAVA-2184: Refactor DescribeIT to improve maintainability --- changelog/README.md | 1 + .../schema/parsing/RelationParser.java | 26 +- .../oss/driver/core/metadata/DescribeIT.java | 277 ++++++------------ .../dse/4.8.cql} | 0 .../dse/5.0.cql} | 0 .../dse/5.1.cql} | 0 .../src/test/resources/DescribeIT/dse/6.8.cql | 199 +++++++++++++ .../src/test/resources/DescribeIT/oss/2.1.cql | 65 ++++ .../oss/2.2.cql} | 0 .../src/test/resources/DescribeIT/oss/3.0.cql | 187 ++++++++++++ .../test/resources/DescribeIT/oss/3.11.cql | 187 ++++++++++++ .../src/test/resources/DescribeIT/oss/4.0.cql | 184 ++++++++++++ 12 files changed, 939 insertions(+), 187 deletions(-) rename integration-tests/src/test/resources/{describe_it_test_2.1.cql => DescribeIT/dse/4.8.cql} (100%) rename integration-tests/src/test/resources/{describe_it_test_3.0.cql => DescribeIT/dse/5.0.cql} (100%) rename integration-tests/src/test/resources/{describe_it_test_3.11.cql => DescribeIT/dse/5.1.cql} (100%) create mode 100644 integration-tests/src/test/resources/DescribeIT/dse/6.8.cql create mode 100644 integration-tests/src/test/resources/DescribeIT/oss/2.1.cql rename integration-tests/src/test/resources/{describe_it_test_2.2.cql => DescribeIT/oss/2.2.cql} (100%) create mode 100644 integration-tests/src/test/resources/DescribeIT/oss/3.0.cql create mode 100644 integration-tests/src/test/resources/DescribeIT/oss/3.11.cql create mode 100644 integration-tests/src/test/resources/DescribeIT/oss/4.0.cql diff --git a/changelog/README.md b/changelog/README.md index 2fc3d715f0f..800f34e8fc4 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.6.0 (in progress) +- [improvement] JAVA-2184: Refactor DescribeIT to improve maintainability - [new feature] JAVA-2600: Add map-backed config loader - [new feature] JAVA-2105: Add support for transient replication - [new feature] JAVA-2670: Provide base class for mapped custom codecs diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/RelationParser.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/RelationParser.java index 43b942b1669..c756b235ec6 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/RelationParser.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/RelationParser.java @@ -15,7 +15,11 @@ */ package com.datastax.oss.driver.internal.core.metadata.schema.parsing; +import com.datastax.dse.driver.api.core.metadata.DseNodeProperties; import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.Version; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.metadata.schema.Describable; import com.datastax.oss.driver.api.core.metadata.schema.RelationMetadata; import com.datastax.oss.driver.api.core.type.codec.TypeCodec; import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; @@ -72,7 +76,7 @@ protected Map parseOptions(AdminRow row) { CqlIdentifier.fromInternal("compression"), ImmutableMap.copyOf(SimpleJsonParser.parseStringMap(row.getString(name)))); } - } else { + } else if (!isDeprecatedInCassandra4(name)) { // Default case, read the value in a generic fashion Object value = row.get(name, codec); if (value != null) { @@ -83,6 +87,26 @@ protected Map parseOptions(AdminRow row) { return builder.build(); } + /** + * Handle a few oddities in Cassandra 4: some options still appear in system_schema.tables, but + * they are not valid in CREATE statements anymore. We need to exclude them from our metadata, + * otherwise {@link Describable#describe(boolean)} will generate invalid CQL. + */ + private boolean isDeprecatedInCassandra4(String name) { + return isCassandra4OrAbove() + && (name.equals("read_repair_chance") + || name.equals("dclocal_read_repair_chance") + // default_time_to_live is not allowed in CREATE MATERIALIZED VIEW statements + || (name.equals("default_time_to_live") && (this instanceof ViewParser))); + } + + private boolean isCassandra4OrAbove() { + Node node = rows.getNode(); + return !node.getExtras().containsKey(DseNodeProperties.DSE_VERSION) + && node.getCassandraVersion() != null + && node.getCassandraVersion().nextStable().compareTo(Version.V4_0_0) >= 0; + } + public static void appendOptions(Map options, ScriptBuilder builder) { for (Map.Entry entry : options.entrySet()) { CqlIdentifier name = entry.getKey(); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/DescribeIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/DescribeIT.java index a68408ee9af..37e38af2b03 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/DescribeIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/DescribeIT.java @@ -18,7 +18,6 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.fail; -import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; @@ -27,14 +26,16 @@ import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.categories.ParallelizableTests; -import com.google.common.io.ByteStreams; -import com.google.common.io.Closer; -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.InputStream; -import java.io.PrintStream; +import com.datastax.oss.driver.shaded.guava.common.base.Charsets; +import com.datastax.oss.driver.shaded.guava.common.base.Splitter; +import com.google.common.io.Files; +import java.io.File; +import java.net.URL; import java.time.Duration; +import java.util.List; import java.util.Optional; +import java.util.regex.Pattern; +import org.junit.Before; import org.junit.ClassRule; import org.junit.Test; import org.junit.experimental.categories.Category; @@ -50,13 +51,12 @@ public class DescribeIT { private static final CcmRule CCM_RULE = CcmRule.getInstance(); - // disable debouncer to speed up test. private static final SessionRule SESSION_RULE = SessionRule.builder(CCM_RULE) - .withKeyspace(false) .withConfigLoader( SessionUtils.configLoaderBuilder() .withDuration(DefaultDriverOption.REQUEST_TIMEOUT, Duration.ofSeconds(30)) + // disable debouncer to speed up test. .withDuration(DefaultDriverOption.METADATA_SCHEMA_WINDOW, Duration.ofSeconds(0)) .build()) .build(); @@ -64,193 +64,98 @@ public class DescribeIT { @ClassRule public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); - /** - * Creates a keyspace using a variety of features and ensures {@link - * com.datastax.oss.driver.api.core.metadata.schema.Describable#describe(boolean)} contains the - * expected data in the expected order. This is not exhaustive, but covers quite a bit of - * different scenarios (materialized views, aggregates, functions, nested UDTs, etc.). - * - *

      The test also verifies that the generated schema is the same whether the keyspace and its - * schema was created during the lifecycle of the cluster or before connecting. - * - *

      Note that this test might be fragile in the future if default option values change in - * cassandra. In order to deal with new features, we create a schema for each tested C* version, - * and if one is not present the test is failed. - */ - @Test - public void create_schema_and_ensure_exported_cql_is_as_expected() { - CqlIdentifier keyspace = SessionUtils.uniqueKeyspaceId(); - String keyspaceAsCql = keyspace.asCql(true); - String expectedCql = getExpectedCqlString(keyspaceAsCql); - - CqlSession session = SESSION_RULE.session(); - - // create keyspace - session.execute( - String.format( - "CREATE KEYSPACE %s " - + "WITH replication = {'class': 'SimpleStrategy', 'replication_factor': 1}", - keyspace)); - - // connect session to this keyspace. - session.execute(String.format("USE %s", keyspace.asCql(false))); - - Optional originalKsMeta = session.getMetadata().getKeyspace(keyspace); - - // Usertype 'ztype' with two columns. Given name to ensure that even though it has an - // alphabetically later name, it shows up before other user types ('ctype') that depend on it. - session.execute("CREATE TYPE ztype(c text, a int)"); - - // Usertype 'xtype' with two columns. At same level as 'ztype' since both are depended on by - // ctype, should show up before 'ztype' because it's alphabetically before, even though it was - // created after. - session.execute("CREATE TYPE xtype(d text)"); + private static final Splitter STATEMENT_SPLITTER = + // Use a regex to ignore semicolons in function scripts + Splitter.on(Pattern.compile(";\n")).omitEmptyStrings(); - // Usertype 'ctype' which depends on both ztype and xtype, therefore ztype and xtype should show - // up earlier. - session.execute( - String.format( - "CREATE TYPE ctype(z frozen<%s.ztype>, x frozen<%s.xtype>)", - keyspaceAsCql, keyspaceAsCql)); + private Version serverVersion; + private boolean isDse; - // Usertype 'btype' which has no dependencies, should show up before 'xtype' and 'ztype' since - // it's alphabetically before. - session.execute("CREATE TYPE btype(a text)"); - - // Usertype 'atype' which depends on 'ctype', so should show up after 'ctype', 'xtype' and - // 'ztype'. - session.execute(String.format("CREATE TYPE atype(c frozen<%s.ctype>)", keyspaceAsCql)); - - // A simple table with a udt column and LCS compaction strategy. - session.execute( - String.format( - "CREATE TABLE ztable(zkey text, a frozen<%s.atype>, PRIMARY KEY(zkey)) " - + "WITH compaction = {'class' : 'LeveledCompactionStrategy', 'sstable_size_in_mb' : 95}", - keyspaceAsCql)); - - // date type requries 2.2+ - if (CCM_RULE.getCassandraVersion().compareTo(Version.V2_2_0) >= 0) { - // A table that will have materialized views (copied from mv docs) - session.execute( - "CREATE TABLE cyclist_mv(cid uuid, name text, age int, birthday date, country text, " - + "PRIMARY KEY(cid))"); - - // index on table with view, index should be printed first. - session.execute("CREATE INDEX cyclist_by_country ON cyclist_mv(country)"); - - // materialized views require 3.0+ - if (CCM_RULE.getCassandraVersion().compareTo(Version.V3_0_0) >= 0) { - // A materialized view for cyclist_mv, reverse clustering. created first to ensure creation - // order does not matter, alphabetical does. - session.execute( - "CREATE MATERIALIZED VIEW cyclist_by_r_age " - + "AS SELECT age, birthday, name, country " - + "FROM cyclist_mv " - + "WHERE age IS NOT NULL AND cid IS NOT NULL " - + "PRIMARY KEY (age, cid) " - + "WITH CLUSTERING ORDER BY (cid DESC)"); + @Before + public void setup() { + Optional dseVersion = CCM_RULE.getDseVersion(); + isDse = dseVersion.isPresent(); + serverVersion = + isDse ? dseVersion.get().nextStable() : CCM_RULE.getCassandraVersion().nextStable(); + } - // A materialized view for cyclist_mv, select * - session.execute( - "CREATE MATERIALIZED VIEW cyclist_by_a_age " - + "AS SELECT * " - + "FROM cyclist_mv " - + "WHERE age IS NOT NULL AND cid IS NOT NULL " - + "PRIMARY KEY (age, cid)"); + @Test + public void describe_output_should_match_creation_script() throws Exception { + CqlSession session = SESSION_RULE.session(); - // A materialized view for cyclist_mv, select columns - session.execute( - "CREATE MATERIALIZED VIEW cyclist_by_age " - + "AS SELECT age, birthday, name, country " - + "FROM cyclist_mv " - + "WHERE age IS NOT NULL AND cid IS NOT NULL " - + "PRIMARY KEY (age, cid) WITH comment = 'simple view'"); + File scriptFile = getScriptFile(); + String scriptContents = + Files.asCharSource(scriptFile, Charsets.UTF_8) + .read() + .trim() + .replaceAll("ks_0", SESSION_RULE.keyspace().asCql(true)); + List statements = STATEMENT_SPLITTER.splitToList(scriptContents); + + // Skip the first statement (CREATE KEYSPACE), we already have a keyspace + for (int i = 1; i < statements.size(); i++) { + String statement = statements.get(i); + try { + session.execute(statement); + } catch (Exception e) { + fail("Error executing statement %s (%s)", statement, e); } } - // A table with a secondary index, taken from documentation on secondary index. - session.execute( - "CREATE TABLE rank_by_year_and_name(race_year int, race_name text, rank int, cyclist_name text, " - + "PRIMARY KEY((race_year, race_name), rank))"); - - session.execute("CREATE INDEX ryear ON rank_by_year_and_name(race_year)"); - - session.execute("CREATE INDEX rrank ON rank_by_year_and_name(rank)"); - - // udfs and udas require 2.22+ - if (CCM_RULE.getCassandraVersion().compareTo(Version.V2_2_0) >= 0) { - // UDFs - session.execute( - "CREATE OR REPLACE FUNCTION avgState ( state tuple, val int ) CALLED ON NULL INPUT RETURNS tuple LANGUAGE java AS \n" - + " 'if (val !=null) { state.setInt(0, state.getInt(0)+1); state.setLong(1, state.getLong(1)+val.intValue()); } return state;';"); - session.execute( - "CREATE OR REPLACE FUNCTION avgFinal ( state tuple ) CALLED ON NULL INPUT RETURNS double LANGUAGE java AS \n" - + " 'double r = 0; if (state.getInt(0) == 0) return null; r = state.getLong(1); r /= state.getInt(0); return Double.valueOf(r);';"); - - // UDAs - session.execute( - "CREATE AGGREGATE IF NOT EXISTS mean ( int ) \n" - + "SFUNC avgState STYPE tuple FINALFUNC avgFinal INITCOND (0,0);"); - session.execute( - "CREATE AGGREGATE IF NOT EXISTS average ( int ) \n" - + "SFUNC avgState STYPE tuple FINALFUNC avgFinal INITCOND (0,0);"); - } - - // Since metadata is immutable, do not expect anything in the original keyspace meta. - assertThat(originalKsMeta).isPresent(); - - assertThat(originalKsMeta.get().getTables()).isEmpty(); - assertThat(originalKsMeta.get().getViews()).isEmpty(); - assertThat(originalKsMeta.get().getFunctions()).isEmpty(); - assertThat(originalKsMeta.get().getAggregates()).isEmpty(); - assertThat(originalKsMeta.get().getUserDefinedTypes()).isEmpty(); - - // validate that the exported schema matches what was expected exactly. - Optional ks = SESSION_RULE.session().getMetadata().getKeyspace(keyspace); - assertThat(ks.get().describeWithChildren(true).trim()).isEqualTo(expectedCql); - - // Also validate that when you create a Session with schema already created that the exported - // string is the same. - try (CqlSession newSession = SessionUtils.newSession(CCM_RULE)) { - ks = newSession.getMetadata().getKeyspace(keyspace); - assertThat(ks.get().describeWithChildren(true).trim()).isEqualTo(expectedCql); - } + KeyspaceMetadata keyspaceMetadata = + session.getMetadata().getKeyspace(SESSION_RULE.keyspace()).orElseThrow(AssertionError::new); + String describeOutput = keyspaceMetadata.describeWithChildren(true).trim(); + + assertThat(describeOutput) + .as( + "Describe output doesn't match create statements, " + + "maybe you need to add a new script in integration-tests/src/test/resources. " + + "Server version = %s %s, used script = %s", + isDse ? "DSE" : "Cassandra", serverVersion, scriptFile) + .isEqualTo(scriptContents); } - private String getExpectedCqlString(String keyspace) { - String majorMinor = - CCM_RULE.getCassandraVersion().getMajor() + "." + CCM_RULE.getCassandraVersion().getMinor(); - String resourceName = "/describe_it_test_" + majorMinor + ".cql"; - - Closer closer = Closer.create(); - try { - InputStream is = DescribeIT.class.getResourceAsStream(resourceName); - if (is == null) { - // If no schema file is defined for tested cassandra version, just try 3.11. - if (CCM_RULE.getCassandraVersion().compareTo(Version.V3_0_0) >= 0) { - LOG.warn("Could not find schema file for {}, assuming C* 3.11.x", majorMinor); - is = DescribeIT.class.getResourceAsStream("/describe_it_test_3.11.cql"); - if (is == null) { - throw new IOException(); - } - } - } - - closer.register(is); - ByteArrayOutputStream baos = new ByteArrayOutputStream(); - PrintStream ps = new PrintStream(baos); - ByteStreams.copy(is, ps); - return baos.toString().replaceAll("ks_0", keyspace).trim(); - } catch (IOException e) { - LOG.warn("Failure to read {}", resourceName, e); - fail("Unable to read " + resourceName + " is it defined?", e); - } finally { - try { - closer.close(); - } catch (IOException e) { // no op - LOG.warn("Failure closing streams", e); + /** + * Find a creation script in our test resources that matches the current server version. If we + * don't have an exact match, use the closest version below it. + */ + private File getScriptFile() { + URL logbackTestUrl = DescribeIT.class.getResource("/logback-test.xml"); + if (logbackTestUrl == null || logbackTestUrl.getFile().isEmpty()) { + fail( + "Expected to use logback-test.xml to determine location of " + + "target/test-classes, but got URL %s", + logbackTestUrl); + } + File resourcesDir = new File(logbackTestUrl.getFile()).getParentFile(); + File scriptsDir = new File(resourcesDir, isDse ? "DescribeIT/dse" : "DescribeIT/oss"); + LOG.debug("Looking for a matching script in directory {}", scriptsDir); + + File[] candidates = scriptsDir.listFiles(); + assertThat(candidates).isNotNull(); + + File bestFile = null; + Version bestVersion = null; + for (File candidate : candidates) { + String fileName = candidate.getName(); + String candidateVersionString = fileName.substring(0, fileName.lastIndexOf('.')); + Version candidateVersion = Version.parse(candidateVersionString); + LOG.debug("Considering {}, which resolves to version {}", fileName, candidateVersion); + if (candidateVersion.compareTo(serverVersion) > 0) { + LOG.debug("too high, discarding"); + } else if (bestVersion != null && bestVersion.compareTo(candidateVersion) >= 0) { + LOG.debug("not higher than {}, discarding", bestVersion); + } else { + LOG.debug("best so far"); + bestVersion = candidateVersion; + bestFile = candidate; } } - return ""; + assertThat(bestFile) + .as("Could not find create script with version <= %s in %s", serverVersion, scriptsDir) + .isNotNull(); + + LOG.info( + "Using {} to test against {} {}", bestFile, isDse ? "DSE" : "Cassandra", serverVersion); + return bestFile; } } diff --git a/integration-tests/src/test/resources/describe_it_test_2.1.cql b/integration-tests/src/test/resources/DescribeIT/dse/4.8.cql similarity index 100% rename from integration-tests/src/test/resources/describe_it_test_2.1.cql rename to integration-tests/src/test/resources/DescribeIT/dse/4.8.cql diff --git a/integration-tests/src/test/resources/describe_it_test_3.0.cql b/integration-tests/src/test/resources/DescribeIT/dse/5.0.cql similarity index 100% rename from integration-tests/src/test/resources/describe_it_test_3.0.cql rename to integration-tests/src/test/resources/DescribeIT/dse/5.0.cql diff --git a/integration-tests/src/test/resources/describe_it_test_3.11.cql b/integration-tests/src/test/resources/DescribeIT/dse/5.1.cql similarity index 100% rename from integration-tests/src/test/resources/describe_it_test_3.11.cql rename to integration-tests/src/test/resources/DescribeIT/dse/5.1.cql diff --git a/integration-tests/src/test/resources/DescribeIT/dse/6.8.cql b/integration-tests/src/test/resources/DescribeIT/dse/6.8.cql new file mode 100644 index 00000000000..24492c7b176 --- /dev/null +++ b/integration-tests/src/test/resources/DescribeIT/dse/6.8.cql @@ -0,0 +1,199 @@ +CREATE KEYSPACE ks_0 WITH replication = { 'class' : 'org.apache.cassandra.locator.SimpleStrategy', 'replication_factor': '1' } AND durable_writes = true; + +CREATE TYPE ks_0.btype ( + a text +); + +CREATE TYPE ks_0.xtype ( + d text +); + +CREATE TYPE ks_0.ztype ( + c text, + a int +); + +CREATE TYPE ks_0.ctype ( + z frozen, + x frozen +); + +CREATE TYPE ks_0.atype ( + c frozen +); + +CREATE TABLE ks_0.cyclist_mv ( + cid uuid, + age int, + birthday date, + country text, + name text, + PRIMARY KEY (cid) +) WITH additional_write_policy = '99PERCENTILE' + AND bloom_filter_fp_chance = 0.01 + AND caching = {'keys':'ALL','rows_per_partition':'NONE'} + AND comment = '' + AND compaction = {'class':'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy','max_threshold':'32','min_threshold':'4'} + AND compression = {'chunk_length_in_kb':'64','class':'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.0 + AND default_time_to_live = 0 + AND extensions = {} + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair = 'BLOCKING' + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE INDEX cyclist_by_country ON ks_0.cyclist_mv (country); + +CREATE TABLE ks_0.rank_by_year_and_name ( + race_year int, + race_name text, + rank int, + cyclist_name text, + PRIMARY KEY ((race_year, race_name), rank) +) WITH additional_write_policy = '99PERCENTILE' + AND bloom_filter_fp_chance = 0.01 + AND caching = {'keys':'ALL','rows_per_partition':'NONE'} + AND comment = '' + AND compaction = {'class':'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy','max_threshold':'32','min_threshold':'4'} + AND compression = {'chunk_length_in_kb':'64','class':'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.0 + AND default_time_to_live = 0 + AND extensions = {} + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair = 'BLOCKING' + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE INDEX rrank ON ks_0.rank_by_year_and_name (rank); + +CREATE INDEX ryear ON ks_0.rank_by_year_and_name (race_year); + +CREATE TABLE ks_0.ztable ( + zkey text, + a frozen, + PRIMARY KEY (zkey) +) WITH additional_write_policy = '99PERCENTILE' + AND bloom_filter_fp_chance = 0.1 + AND caching = {'keys':'ALL','rows_per_partition':'NONE'} + AND comment = '' + AND compaction = {'class':'org.apache.cassandra.db.compaction.LeveledCompactionStrategy','max_threshold':'32','min_threshold':'4','sstable_size_in_mb':'95'} + AND compression = {'chunk_length_in_kb':'64','class':'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.0 + AND default_time_to_live = 0 + AND extensions = {} + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair = 'BLOCKING' + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE MATERIALIZED VIEW ks_0.cyclist_by_a_age AS +SELECT * FROM ks_0.cyclist_mv +WHERE age IS NOT NULL AND cid IS NOT NULL +PRIMARY KEY (age, cid) WITH additional_write_policy = '99PERCENTILE' + AND bloom_filter_fp_chance = 0.01 + AND caching = {'keys':'ALL','rows_per_partition':'NONE'} + AND comment = '' + AND compaction = {'class':'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy','max_threshold':'32','min_threshold':'4'} + AND compression = {'chunk_length_in_kb':'64','class':'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.0 + AND default_time_to_live = 0 + AND extensions = {} + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair = 'BLOCKING' + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE MATERIALIZED VIEW ks_0.cyclist_by_age AS +SELECT + age, + cid, + birthday, + country, + name +FROM ks_0.cyclist_mv +WHERE age IS NOT NULL AND cid IS NOT NULL +PRIMARY KEY (age, cid) WITH additional_write_policy = '99PERCENTILE' + AND bloom_filter_fp_chance = 0.01 + AND caching = {'keys':'ALL','rows_per_partition':'NONE'} + AND comment = 'simple view' + AND compaction = {'class':'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy','max_threshold':'32','min_threshold':'4'} + AND compression = {'chunk_length_in_kb':'64','class':'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.0 + AND default_time_to_live = 0 + AND extensions = {} + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair = 'BLOCKING' + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE MATERIALIZED VIEW ks_0.cyclist_by_r_age AS +SELECT + age, + cid, + birthday, + country, + name +FROM ks_0.cyclist_mv +WHERE age IS NOT NULL AND cid IS NOT NULL +PRIMARY KEY (age, cid) WITH additional_write_policy = '99PERCENTILE' + AND bloom_filter_fp_chance = 0.01 + AND caching = {'keys':'ALL','rows_per_partition':'NONE'} + AND comment = '' + AND compaction = {'class':'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy','max_threshold':'32','min_threshold':'4'} + AND compression = {'chunk_length_in_kb':'64','class':'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.0 + AND default_time_to_live = 0 + AND extensions = {} + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair = 'BLOCKING' + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE FUNCTION ks_0.avgfinal(state tuple) + CALLED ON NULL INPUT + RETURNS double + LANGUAGE java + AS 'double r = 0; if (state.getInt(0) == 0) return null; r = state.getLong(1); r /= state.getInt(0); return Double.valueOf(r);'; + +CREATE FUNCTION ks_0.avgstate(state tuple,val int) + CALLED ON NULL INPUT + RETURNS tuple + LANGUAGE java + AS 'if (val !=null) { state.setInt(0, state.getInt(0)+1); state.setLong(1, state.getLong(1)+val.intValue()); } return state;'; + +CREATE AGGREGATE ks_0.average(int) + SFUNC avgstate + STYPE tuple + FINALFUNC avgfinal + INITCOND (0,0); + +CREATE AGGREGATE ks_0.mean(int) + SFUNC avgstate + STYPE tuple + FINALFUNC avgfinal + INITCOND (0,0); diff --git a/integration-tests/src/test/resources/DescribeIT/oss/2.1.cql b/integration-tests/src/test/resources/DescribeIT/oss/2.1.cql new file mode 100644 index 00000000000..b05df71a503 --- /dev/null +++ b/integration-tests/src/test/resources/DescribeIT/oss/2.1.cql @@ -0,0 +1,65 @@ +CREATE KEYSPACE ks_0 WITH replication = { 'class' : 'org.apache.cassandra.locator.SimpleStrategy', 'replication_factor': '1' } AND durable_writes = true; + +CREATE TYPE ks_0.btype ( + a text +); + +CREATE TYPE ks_0.xtype ( + d text +); + +CREATE TYPE ks_0.ztype ( + c text, + a int +); + +CREATE TYPE ks_0.ctype ( + z frozen, + x frozen +); + +CREATE TYPE ks_0.atype ( + c frozen +); + +CREATE TABLE ks_0.rank_by_year_and_name ( + race_year int, + race_name text, + rank int, + cyclist_name text, + PRIMARY KEY ((race_year, race_name), rank) +) WITH bloom_filter_fp_chance = 0.01 + AND caching = '{"keys":"ALL", "rows_per_partition":"NONE"}' + AND comment = '' + AND compaction = {'class':'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy'} + AND compression = {'sstable_compression':'org.apache.cassandra.io.compress.LZ4Compressor'} + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND dclocal_read_repair_chance = 0.1 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99.0PERCENTILE'; + +CREATE INDEX ryear ON ks_0.rank_by_year_and_name (race_year); + +CREATE INDEX rrank ON ks_0.rank_by_year_and_name (rank); + +CREATE TABLE ks_0.ztable ( + zkey text, + a frozen, + PRIMARY KEY (zkey) +) WITH bloom_filter_fp_chance = 0.1 + AND caching = '{"keys":"ALL", "rows_per_partition":"NONE"}' + AND comment = '' + AND compaction = {'class':'org.apache.cassandra.db.compaction.LeveledCompactionStrategy','sstable_size_in_mb':'95'} + AND compression = {'sstable_compression':'org.apache.cassandra.io.compress.LZ4Compressor'} + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND dclocal_read_repair_chance = 0.1 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99.0PERCENTILE'; \ No newline at end of file diff --git a/integration-tests/src/test/resources/describe_it_test_2.2.cql b/integration-tests/src/test/resources/DescribeIT/oss/2.2.cql similarity index 100% rename from integration-tests/src/test/resources/describe_it_test_2.2.cql rename to integration-tests/src/test/resources/DescribeIT/oss/2.2.cql diff --git a/integration-tests/src/test/resources/DescribeIT/oss/3.0.cql b/integration-tests/src/test/resources/DescribeIT/oss/3.0.cql new file mode 100644 index 00000000000..fe606992a44 --- /dev/null +++ b/integration-tests/src/test/resources/DescribeIT/oss/3.0.cql @@ -0,0 +1,187 @@ +CREATE KEYSPACE ks_0 WITH replication = { 'class' : 'org.apache.cassandra.locator.SimpleStrategy', 'replication_factor': '1' } AND durable_writes = true; + +CREATE TYPE ks_0.btype ( + a text +); + +CREATE TYPE ks_0.xtype ( + d text +); + +CREATE TYPE ks_0.ztype ( + c text, + a int +); + +CREATE TYPE ks_0.ctype ( + z frozen, + x frozen +); + +CREATE TYPE ks_0.atype ( + c frozen +); + +CREATE TABLE ks_0.cyclist_mv ( + cid uuid, + age int, + birthday date, + country text, + name text, + PRIMARY KEY (cid) +) WITH bloom_filter_fp_chance = 0.01 + AND caching = {'keys':'ALL','rows_per_partition':'NONE'} + AND comment = '' + AND compaction = {'class':'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy','max_threshold':'32','min_threshold':'4'} + AND compression = {'chunk_length_in_kb':'64','class':'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND extensions = {} + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE INDEX cyclist_by_country ON ks_0.cyclist_mv (country); + +CREATE TABLE ks_0.rank_by_year_and_name ( + race_year int, + race_name text, + rank int, + cyclist_name text, + PRIMARY KEY ((race_year, race_name), rank) +) WITH bloom_filter_fp_chance = 0.01 + AND caching = {'keys':'ALL','rows_per_partition':'NONE'} + AND comment = '' + AND compaction = {'class':'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy','max_threshold':'32','min_threshold':'4'} + AND compression = {'chunk_length_in_kb':'64','class':'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND extensions = {} + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE INDEX rrank ON ks_0.rank_by_year_and_name (rank); + +CREATE INDEX ryear ON ks_0.rank_by_year_and_name (race_year); + +CREATE TABLE ks_0.ztable ( + zkey text, + a frozen, + PRIMARY KEY (zkey) +) WITH bloom_filter_fp_chance = 0.1 + AND caching = {'keys':'ALL','rows_per_partition':'NONE'} + AND comment = '' + AND compaction = {'class':'org.apache.cassandra.db.compaction.LeveledCompactionStrategy','sstable_size_in_mb':'95'} + AND compression = {'chunk_length_in_kb':'64','class':'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND extensions = {} + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE MATERIALIZED VIEW ks_0.cyclist_by_a_age AS +SELECT * FROM ks_0.cyclist_mv +WHERE age IS NOT NULL AND cid IS NOT NULL +PRIMARY KEY (age, cid) WITH bloom_filter_fp_chance = 0.01 + AND caching = {'keys':'ALL','rows_per_partition':'NONE'} + AND comment = '' + AND compaction = {'class':'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy','max_threshold':'32','min_threshold':'4'} + AND compression = {'chunk_length_in_kb':'64','class':'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND extensions = {} + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE MATERIALIZED VIEW ks_0.cyclist_by_age AS +SELECT + age, + cid, + birthday, + country, + name +FROM ks_0.cyclist_mv +WHERE age IS NOT NULL AND cid IS NOT NULL +PRIMARY KEY (age, cid) WITH bloom_filter_fp_chance = 0.01 + AND caching = {'keys':'ALL','rows_per_partition':'NONE'} + AND comment = 'simple view' + AND compaction = {'class':'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy','max_threshold':'32','min_threshold':'4'} + AND compression = {'chunk_length_in_kb':'64','class':'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND extensions = {} + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE MATERIALIZED VIEW ks_0.cyclist_by_r_age AS +SELECT + age, + cid, + birthday, + country, + name +FROM ks_0.cyclist_mv +WHERE age IS NOT NULL AND cid IS NOT NULL +PRIMARY KEY (age, cid) WITH bloom_filter_fp_chance = 0.01 + AND caching = {'keys':'ALL','rows_per_partition':'NONE'} + AND comment = '' + AND compaction = {'class':'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy','max_threshold':'32','min_threshold':'4'} + AND compression = {'chunk_length_in_kb':'64','class':'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND extensions = {} + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE FUNCTION ks_0.avgfinal(state tuple) + CALLED ON NULL INPUT + RETURNS double + LANGUAGE java + AS 'double r = 0; if (state.getInt(0) == 0) return null; r = state.getLong(1); r /= state.getInt(0); return Double.valueOf(r);'; + +CREATE FUNCTION ks_0.avgstate(state tuple,val int) + CALLED ON NULL INPUT + RETURNS tuple + LANGUAGE java + AS 'if (val !=null) { state.setInt(0, state.getInt(0)+1); state.setLong(1, state.getLong(1)+val.intValue()); } return state;'; + +CREATE AGGREGATE ks_0.average(int) + SFUNC avgstate + STYPE tuple + FINALFUNC avgfinal + INITCOND (0,0); + +CREATE AGGREGATE ks_0.mean(int) + SFUNC avgstate + STYPE tuple + FINALFUNC avgfinal + INITCOND (0,0); \ No newline at end of file diff --git a/integration-tests/src/test/resources/DescribeIT/oss/3.11.cql b/integration-tests/src/test/resources/DescribeIT/oss/3.11.cql new file mode 100644 index 00000000000..fe606992a44 --- /dev/null +++ b/integration-tests/src/test/resources/DescribeIT/oss/3.11.cql @@ -0,0 +1,187 @@ +CREATE KEYSPACE ks_0 WITH replication = { 'class' : 'org.apache.cassandra.locator.SimpleStrategy', 'replication_factor': '1' } AND durable_writes = true; + +CREATE TYPE ks_0.btype ( + a text +); + +CREATE TYPE ks_0.xtype ( + d text +); + +CREATE TYPE ks_0.ztype ( + c text, + a int +); + +CREATE TYPE ks_0.ctype ( + z frozen, + x frozen +); + +CREATE TYPE ks_0.atype ( + c frozen +); + +CREATE TABLE ks_0.cyclist_mv ( + cid uuid, + age int, + birthday date, + country text, + name text, + PRIMARY KEY (cid) +) WITH bloom_filter_fp_chance = 0.01 + AND caching = {'keys':'ALL','rows_per_partition':'NONE'} + AND comment = '' + AND compaction = {'class':'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy','max_threshold':'32','min_threshold':'4'} + AND compression = {'chunk_length_in_kb':'64','class':'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND extensions = {} + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE INDEX cyclist_by_country ON ks_0.cyclist_mv (country); + +CREATE TABLE ks_0.rank_by_year_and_name ( + race_year int, + race_name text, + rank int, + cyclist_name text, + PRIMARY KEY ((race_year, race_name), rank) +) WITH bloom_filter_fp_chance = 0.01 + AND caching = {'keys':'ALL','rows_per_partition':'NONE'} + AND comment = '' + AND compaction = {'class':'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy','max_threshold':'32','min_threshold':'4'} + AND compression = {'chunk_length_in_kb':'64','class':'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND extensions = {} + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE INDEX rrank ON ks_0.rank_by_year_and_name (rank); + +CREATE INDEX ryear ON ks_0.rank_by_year_and_name (race_year); + +CREATE TABLE ks_0.ztable ( + zkey text, + a frozen, + PRIMARY KEY (zkey) +) WITH bloom_filter_fp_chance = 0.1 + AND caching = {'keys':'ALL','rows_per_partition':'NONE'} + AND comment = '' + AND compaction = {'class':'org.apache.cassandra.db.compaction.LeveledCompactionStrategy','sstable_size_in_mb':'95'} + AND compression = {'chunk_length_in_kb':'64','class':'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND extensions = {} + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE MATERIALIZED VIEW ks_0.cyclist_by_a_age AS +SELECT * FROM ks_0.cyclist_mv +WHERE age IS NOT NULL AND cid IS NOT NULL +PRIMARY KEY (age, cid) WITH bloom_filter_fp_chance = 0.01 + AND caching = {'keys':'ALL','rows_per_partition':'NONE'} + AND comment = '' + AND compaction = {'class':'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy','max_threshold':'32','min_threshold':'4'} + AND compression = {'chunk_length_in_kb':'64','class':'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND extensions = {} + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE MATERIALIZED VIEW ks_0.cyclist_by_age AS +SELECT + age, + cid, + birthday, + country, + name +FROM ks_0.cyclist_mv +WHERE age IS NOT NULL AND cid IS NOT NULL +PRIMARY KEY (age, cid) WITH bloom_filter_fp_chance = 0.01 + AND caching = {'keys':'ALL','rows_per_partition':'NONE'} + AND comment = 'simple view' + AND compaction = {'class':'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy','max_threshold':'32','min_threshold':'4'} + AND compression = {'chunk_length_in_kb':'64','class':'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND extensions = {} + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE MATERIALIZED VIEW ks_0.cyclist_by_r_age AS +SELECT + age, + cid, + birthday, + country, + name +FROM ks_0.cyclist_mv +WHERE age IS NOT NULL AND cid IS NOT NULL +PRIMARY KEY (age, cid) WITH bloom_filter_fp_chance = 0.01 + AND caching = {'keys':'ALL','rows_per_partition':'NONE'} + AND comment = '' + AND compaction = {'class':'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy','max_threshold':'32','min_threshold':'4'} + AND compression = {'chunk_length_in_kb':'64','class':'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND extensions = {} + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE FUNCTION ks_0.avgfinal(state tuple) + CALLED ON NULL INPUT + RETURNS double + LANGUAGE java + AS 'double r = 0; if (state.getInt(0) == 0) return null; r = state.getLong(1); r /= state.getInt(0); return Double.valueOf(r);'; + +CREATE FUNCTION ks_0.avgstate(state tuple,val int) + CALLED ON NULL INPUT + RETURNS tuple + LANGUAGE java + AS 'if (val !=null) { state.setInt(0, state.getInt(0)+1); state.setLong(1, state.getLong(1)+val.intValue()); } return state;'; + +CREATE AGGREGATE ks_0.average(int) + SFUNC avgstate + STYPE tuple + FINALFUNC avgfinal + INITCOND (0,0); + +CREATE AGGREGATE ks_0.mean(int) + SFUNC avgstate + STYPE tuple + FINALFUNC avgfinal + INITCOND (0,0); \ No newline at end of file diff --git a/integration-tests/src/test/resources/DescribeIT/oss/4.0.cql b/integration-tests/src/test/resources/DescribeIT/oss/4.0.cql new file mode 100644 index 00000000000..6c741fd90fe --- /dev/null +++ b/integration-tests/src/test/resources/DescribeIT/oss/4.0.cql @@ -0,0 +1,184 @@ +CREATE KEYSPACE ks_0 WITH replication = { 'class' : 'org.apache.cassandra.locator.SimpleStrategy', 'replication_factor': '1' } AND durable_writes = true; + +CREATE TYPE ks_0.btype ( + a text +); + +CREATE TYPE ks_0.xtype ( + d text +); + +CREATE TYPE ks_0.ztype ( + c text, + a int +); + +CREATE TYPE ks_0.ctype ( + z frozen, + x frozen +); + +CREATE TYPE ks_0.atype ( + c frozen +); + +CREATE TABLE ks_0.cyclist_mv ( + cid uuid, + age int, + birthday date, + country text, + name text, + PRIMARY KEY (cid) +) WITH additional_write_policy = '99p' + AND bloom_filter_fp_chance = 0.01 + AND caching = {'keys':'ALL','rows_per_partition':'NONE'} + AND comment = '' + AND compaction = {'class':'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy','max_threshold':'32','min_threshold':'4'} + AND compression = {'chunk_length_in_kb':'64','class':'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND default_time_to_live = 0 + AND extensions = {} + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair = 'BLOCKING' + AND speculative_retry = '99p'; + +CREATE INDEX cyclist_by_country ON ks_0.cyclist_mv (country); + +CREATE TABLE ks_0.rank_by_year_and_name ( + race_year int, + race_name text, + rank int, + cyclist_name text, + PRIMARY KEY ((race_year, race_name), rank) +) WITH additional_write_policy = '99p' + AND bloom_filter_fp_chance = 0.01 + AND caching = {'keys':'ALL','rows_per_partition':'NONE'} + AND comment = '' + AND compaction = {'class':'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy','max_threshold':'32','min_threshold':'4'} + AND compression = {'chunk_length_in_kb':'64','class':'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND default_time_to_live = 0 + AND extensions = {} + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair = 'BLOCKING' + AND speculative_retry = '99p'; + +CREATE INDEX rrank ON ks_0.rank_by_year_and_name (rank); + +CREATE INDEX ryear ON ks_0.rank_by_year_and_name (race_year); + +CREATE TABLE ks_0.ztable ( + zkey text, + a frozen, + PRIMARY KEY (zkey) +) WITH additional_write_policy = '99p' + AND bloom_filter_fp_chance = 0.1 + AND caching = {'keys':'ALL','rows_per_partition':'NONE'} + AND comment = '' + AND compaction = {'class':'org.apache.cassandra.db.compaction.LeveledCompactionStrategy','max_threshold':'32','min_threshold':'4','sstable_size_in_mb':'95'} + AND compression = {'chunk_length_in_kb':'64','class':'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND default_time_to_live = 0 + AND extensions = {} + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair = 'BLOCKING' + AND speculative_retry = '99p'; + +CREATE MATERIALIZED VIEW ks_0.cyclist_by_a_age AS +SELECT * FROM ks_0.cyclist_mv +WHERE age IS NOT NULL AND cid IS NOT NULL +PRIMARY KEY (age, cid) WITH additional_write_policy = '99p' + AND bloom_filter_fp_chance = 0.01 + AND caching = {'keys':'ALL','rows_per_partition':'NONE'} + AND comment = '' + AND compaction = {'class':'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy','max_threshold':'32','min_threshold':'4'} + AND compression = {'chunk_length_in_kb':'64','class':'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND extensions = {} + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair = 'BLOCKING' + AND speculative_retry = '99p'; + +CREATE MATERIALIZED VIEW ks_0.cyclist_by_age AS +SELECT + age, + cid, + birthday, + country, + name +FROM ks_0.cyclist_mv +WHERE age IS NOT NULL AND cid IS NOT NULL +PRIMARY KEY (age, cid) WITH additional_write_policy = '99p' + AND bloom_filter_fp_chance = 0.01 + AND caching = {'keys':'ALL','rows_per_partition':'NONE'} + AND comment = 'simple view' + AND compaction = {'class':'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy','max_threshold':'32','min_threshold':'4'} + AND compression = {'chunk_length_in_kb':'64','class':'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND extensions = {} + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair = 'BLOCKING' + AND speculative_retry = '99p'; + +CREATE MATERIALIZED VIEW ks_0.cyclist_by_r_age AS +SELECT + age, + cid, + birthday, + country, + name +FROM ks_0.cyclist_mv +WHERE age IS NOT NULL AND cid IS NOT NULL +PRIMARY KEY (age, cid) WITH additional_write_policy = '99p' + AND bloom_filter_fp_chance = 0.01 + AND caching = {'keys':'ALL','rows_per_partition':'NONE'} + AND comment = '' + AND compaction = {'class':'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy','max_threshold':'32','min_threshold':'4'} + AND compression = {'chunk_length_in_kb':'64','class':'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND extensions = {} + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair = 'BLOCKING' + AND speculative_retry = '99p'; + +CREATE FUNCTION ks_0.avgfinal(state tuple) + CALLED ON NULL INPUT + RETURNS double + LANGUAGE java + AS 'double r = 0; if (state.getInt(0) == 0) return null; r = state.getLong(1); r /= state.getInt(0); return Double.valueOf(r);'; + +CREATE FUNCTION ks_0.avgstate(state tuple,val int) + CALLED ON NULL INPUT + RETURNS tuple + LANGUAGE java + AS 'if (val !=null) { state.setInt(0, state.getInt(0)+1); state.setLong(1, state.getLong(1)+val.intValue()); } return state;'; + +CREATE AGGREGATE ks_0.average(int) + SFUNC avgstate + STYPE tuple + FINALFUNC avgfinal + INITCOND (0,0); + +CREATE AGGREGATE ks_0.mean(int) + SFUNC avgstate + STYPE tuple + FINALFUNC avgfinal + INITCOND (0,0); From c6834ac9cb2064f4e6869087dba2dd3f7d746dfe Mon Sep 17 00:00:00 2001 From: Greg Bestland Date: Thu, 26 Mar 2020 10:10:33 -0500 Subject: [PATCH 416/979] JAVA-2679: Add port information to QueryTrace and TraceEvent (#1411) * JAVA-2679: Add port information to QueryTrace and TraceEvent --- changelog/README.md | 1 + .../oss/driver/api/core/cql/QueryTrace.java | 22 ++++++++++++++++- .../oss/driver/api/core/cql/TraceEvent.java | 21 +++++++++++++++- .../internal/core/cql/DefaultQueryTrace.java | 11 +++++++-- .../internal/core/cql/DefaultTraceEvent.java | 10 ++++++-- .../internal/core/cql/QueryTraceFetcher.java | 13 ++++++++-- .../core/cql/QueryTraceFetcherTest.java | 24 +++++++++++++++---- .../oss/driver/core/cql/QueryTraceIT.java | 17 ++++++++++--- 8 files changed, 104 insertions(+), 15 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 800f34e8fc4..a2edaabf485 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.6.0 (in progress) +- [improvement] JAVA-2679: Add port information to QueryTrace and TraceEvent - [improvement] JAVA-2184: Refactor DescribeIT to improve maintainability - [new feature] JAVA-2600: Add map-backed config loader - [new feature] JAVA-2105: Add support for transient replication diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/cql/QueryTrace.java b/core/src/main/java/com/datastax/oss/driver/api/core/cql/QueryTrace.java index 4af0648ce4c..09d93f47fff 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/cql/QueryTrace.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/cql/QueryTrace.java @@ -17,6 +17,7 @@ import edu.umd.cs.findbugs.annotations.NonNull; import java.net.InetAddress; +import java.net.InetSocketAddress; import java.util.List; import java.util.Map; import java.util.UUID; @@ -39,10 +40,29 @@ public interface QueryTrace { /** The server-side duration of the query in microseconds. */ int getDurationMicros(); - /** The IP of the node that coordinated the query. */ + /** + * @deprecated returns the coordinator IP, but {@link #getCoordinatorAddress()} should be + * preferred, since C* 4.0 and above now returns the port was well. + */ @NonNull + @Deprecated InetAddress getCoordinator(); + /** + * The IP and port of the node that coordinated the query. Prior to C* 4.0 the port is not set and + * will default to 0. + * + *

      This method's default implementation returns {@link #getCoordinator()} with the port set to + * 0. The only reason it exists is to preserve binary compatibility. Internally, the driver + * overrides it to set the correct port. + * + * @since 4.6.0 + */ + @NonNull + default InetSocketAddress getCoordinatorAddress() { + return new InetSocketAddress(getCoordinator(), 0); + } + /** The parameters attached to this trace. */ @NonNull Map getParameters(); diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/cql/TraceEvent.java b/core/src/main/java/com/datastax/oss/driver/api/core/cql/TraceEvent.java index c55e874cdc1..36c53e444e0 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/cql/TraceEvent.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/cql/TraceEvent.java @@ -17,6 +17,7 @@ import edu.umd.cs.findbugs.annotations.Nullable; import java.net.InetAddress; +import java.net.InetSocketAddress; /** An event in a {@link QueryTrace}. */ public interface TraceEvent { @@ -28,10 +29,28 @@ public interface TraceEvent { /** The server-side timestamp of the event. */ long getTimestamp(); - /** The IP of the host having generated this event. */ + /** + * @deprecated returns the source IP, but {@link #getSourceAddress()} should be preferred, since + * C* 4.0 and above now returns the port was well. + */ @Nullable + @Deprecated InetAddress getSource(); + /** + * The IP and Port of the host having generated this event. Prior to C* 4.0 the port will be set + * to zero. + * + *

      This method's default implementation returns {@link #getSource()} with the port set to 0. + * The only reason it exists is to preserve binary compatibility. Internally, the driver overrides + * it to set the correct port. + * + * @since 4.6.0 + */ + @Nullable + default InetSocketAddress getSourceAddress() { + return new InetSocketAddress(getSource(), 0); + } /** * The number of microseconds elapsed on the source when this event occurred since the moment when * the source started handling the query. diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultQueryTrace.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultQueryTrace.java index 1caaace911c..d0d864897ce 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultQueryTrace.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultQueryTrace.java @@ -19,6 +19,7 @@ import com.datastax.oss.driver.api.core.cql.TraceEvent; import edu.umd.cs.findbugs.annotations.NonNull; import java.net.InetAddress; +import java.net.InetSocketAddress; import java.util.List; import java.util.Map; import java.util.UUID; @@ -30,7 +31,7 @@ public class DefaultQueryTrace implements QueryTrace { private final UUID tracingId; private final String requestType; private final int durationMicros; - private final InetAddress coordinator; + private final InetSocketAddress coordinator; private final Map parameters; private final long startedAt; private final List events; @@ -39,7 +40,7 @@ public DefaultQueryTrace( UUID tracingId, String requestType, int durationMicros, - InetAddress coordinator, + InetSocketAddress coordinator, Map parameters, long startedAt, List events) { @@ -72,6 +73,12 @@ public int getDurationMicros() { @NonNull @Override public InetAddress getCoordinator() { + return coordinator.getAddress(); + } + + @NonNull + @Override + public InetSocketAddress getCoordinatorAddress() { return coordinator; } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultTraceEvent.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultTraceEvent.java index fab045bd588..01bc66dd2e9 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultTraceEvent.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultTraceEvent.java @@ -17,6 +17,7 @@ import com.datastax.oss.driver.api.core.cql.TraceEvent; import java.net.InetAddress; +import java.net.InetSocketAddress; import java.util.Date; import net.jcip.annotations.Immutable; @@ -25,14 +26,14 @@ public class DefaultTraceEvent implements TraceEvent { private final String activity; private final long timestamp; - private final InetAddress source; + private final InetSocketAddress source; private final int sourceElapsedMicros; private final String threadName; public DefaultTraceEvent( String activity, long timestamp, - InetAddress source, + InetSocketAddress source, int sourceElapsedMicros, String threadName) { this.activity = activity; @@ -55,6 +56,11 @@ public long getTimestamp() { @Override public InetAddress getSource() { + return source.getAddress(); + } + + @Override + public InetSocketAddress getSourceAddress() { return source; } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/QueryTraceFetcher.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/QueryTraceFetcher.java index 383f4f38487..ab32fb4c5af 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/QueryTraceFetcher.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/QueryTraceFetcher.java @@ -26,6 +26,7 @@ import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import com.datastax.oss.driver.shaded.guava.common.collect.Iterables; import io.netty.util.concurrent.EventExecutor; +import java.net.InetSocketAddress; import java.nio.ByteBuffer; import java.time.Instant; import java.util.ArrayList; @@ -135,20 +136,28 @@ private QueryTrace buildTrace(Row sessionRow, Iterable eventRows) { ImmutableList.Builder eventsBuilder = ImmutableList.builder(); for (Row eventRow : eventRows) { UUID eventId = eventRow.getUuid("event_id"); + int sourcePort = 0; + if (eventRow.getColumnDefinitions().contains("source_port")) { + sourcePort = eventRow.getInt("source_port"); + } eventsBuilder.add( new DefaultTraceEvent( eventRow.getString("activity"), eventId == null ? -1 : eventId.timestamp(), - eventRow.getInetAddress("source"), + new InetSocketAddress(eventRow.getInetAddress("source"), sourcePort), eventRow.getInt("source_elapsed"), eventRow.getString("thread"))); } Instant startedAt = sessionRow.getInstant("started_at"); + int coordinatorPort = 0; + if (sessionRow.getColumnDefinitions().contains("coordinator_port")) { + coordinatorPort = sessionRow.getInt("coordinator_port"); + } return new DefaultQueryTrace( tracingId, sessionRow.getString("request"), sessionRow.getInt("duration"), - sessionRow.getInetAddress("coordinator"), + new InetSocketAddress(sessionRow.getInetAddress("coordinator"), coordinatorPort), sessionRow.getMap("parameters", String.class, String.class), startedAt == null ? -1 : startedAt.toEpochMilli(), eventsBuilder.build()); diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/cql/QueryTraceFetcherTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/cql/QueryTraceFetcherTest.java index f84abfe39f7..3030d41f008 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/cql/QueryTraceFetcherTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/cql/QueryTraceFetcherTest.java @@ -30,6 +30,7 @@ import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.cql.AsyncResultSet; +import com.datastax.oss.driver.api.core.cql.ColumnDefinitions; import com.datastax.oss.driver.api.core.cql.ExecutionInfo; import com.datastax.oss.driver.api.core.cql.QueryTrace; import com.datastax.oss.driver.api.core.cql.Row; @@ -67,6 +68,7 @@ public class QueryTraceFetcherTest { private static final UUID TRACING_ID = UUID.randomUUID(); private static final ByteBuffer PAGING_STATE = Bytes.fromHexString("0xdeadbeef"); + private static final int PORT = 7000; @Mock private CqlSession session; @Mock private InternalDriverContext context; @@ -134,7 +136,8 @@ public void should_succeed_when_both_queries_succeed_immediately() { assertThat(trace.getTracingId()).isEqualTo(TRACING_ID); assertThat(trace.getRequestType()).isEqualTo("mock request"); assertThat(trace.getDurationMicros()).isEqualTo(42); - assertThat(trace.getCoordinator()).isEqualTo(address); + assertThat(trace.getCoordinatorAddress().getAddress()).isEqualTo(address); + assertThat(trace.getCoordinatorAddress().getPort()).isEqualTo(PORT); assertThat(trace.getParameters()) .hasSize(2) .containsEntry("key1", "value1") @@ -147,7 +150,9 @@ public void should_succeed_when_both_queries_succeed_immediately() { TraceEvent event = events.get(i); assertThat(event.getActivity()).isEqualTo("mock activity " + i); assertThat(event.getTimestamp()).isEqualTo(i); - assertThat(event.getSource()).isEqualTo(address); + assertThat(event.getSourceAddress()).isNotNull(); + assertThat(event.getSourceAddress().getAddress()).isEqualTo(address); + assertThat(event.getSourceAddress().getPort()).isEqualTo(PORT); assertThat(event.getSourceElapsedMicros()).isEqualTo(i); assertThat(event.getThreadName()).isEqualTo("mock thread " + i); } @@ -214,7 +219,8 @@ public void should_retry_when_session_row_is_incomplete() { assertThat(trace.getTracingId()).isEqualTo(TRACING_ID); assertThat(trace.getRequestType()).isEqualTo("mock request"); assertThat(trace.getDurationMicros()).isEqualTo(42); - assertThat(trace.getCoordinator()).isEqualTo(address); + assertThat(trace.getCoordinatorAddress().getAddress()).isEqualTo(address); + assertThat(trace.getCoordinatorAddress().getPort()).isEqualTo(PORT); assertThat(trace.getParameters()) .hasSize(2) .containsEntry("key1", "value1") @@ -227,7 +233,9 @@ public void should_retry_when_session_row_is_incomplete() { TraceEvent event = events.get(i); assertThat(event.getActivity()).isEqualTo("mock activity " + i); assertThat(event.getTimestamp()).isEqualTo(i); - assertThat(event.getSource()).isEqualTo(address); + assertThat(event.getSourceAddress()).isNotNull(); + assertThat(event.getSourceAddress().getAddress()).isEqualTo(address); + assertThat(event.getSourceAddress().getPort()).isEqualTo(PORT); assertThat(event.getSourceElapsedMicros()).isEqualTo(i); assertThat(event.getThreadName()).isEqualTo("mock thread " + i); } @@ -294,6 +302,8 @@ private CompletionStage incompleteSessionRow() { private CompletionStage sessionRow(Integer duration) { Row row = mock(Row.class); + ColumnDefinitions definitions = mock(ColumnDefinitions.class); + when(row.getColumnDefinitions()).thenReturn(definitions); when(row.getString("request")).thenReturn("mock request"); if (duration == null) { when(row.isNull("duration")).thenReturn(true); @@ -301,6 +311,8 @@ private CompletionStage sessionRow(Integer duration) { when(row.getInt("duration")).thenReturn(duration); } when(row.getInetAddress("coordinator")).thenReturn(address); + when(definitions.contains("coordinator_port")).thenReturn(true); + when(row.getInt("coordinator_port")).thenReturn(PORT); when(row.getMap("parameters", String.class, String.class)) .thenReturn(ImmutableMap.of("key1", "value1", "key2", "value2")); when(row.isNull("started_at")).thenReturn(false); @@ -355,9 +367,13 @@ private CompletionStage multiPageEventRows2() { private Row eventRow(int i) { Row row = mock(Row.class); + ColumnDefinitions definitions = mock(ColumnDefinitions.class); + when(row.getColumnDefinitions()).thenReturn(definitions); when(row.getString("activity")).thenReturn("mock activity " + i); when(row.getUuid("event_id")).thenReturn(Uuids.startOf(i)); when(row.getInetAddress("source")).thenReturn(address); + when(definitions.contains("source_port")).thenReturn(true); + when(row.getInt("source_port")).thenReturn(PORT); when(row.getInt("source_elapsed")).thenReturn(i); when(row.getString("thread")).thenReturn("mock thread " + i); return row; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/QueryTraceIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/QueryTraceIT.java index f6c724e67bb..33eddb2afa2 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/QueryTraceIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/QueryTraceIT.java @@ -18,6 +18,7 @@ import static org.assertj.core.api.Assertions.assertThat; import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.api.core.cql.ExecutionInfo; import com.datastax.oss.driver.api.core.cql.QueryTrace; import com.datastax.oss.driver.api.core.cql.SimpleStatement; @@ -25,6 +26,7 @@ import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.categories.ParallelizableTests; +import java.net.InetAddress; import java.net.InetSocketAddress; import org.junit.ClassRule; import org.junit.Rule; @@ -74,13 +76,18 @@ public void should_fetch_trace_when_tracing_enabled() { assertThat(executionInfo.getTracingId()).isNotNull(); + EndPoint contactPoint = CCM_RULE.getContactPoints().iterator().next(); + InetAddress nodeAddress = ((InetSocketAddress) contactPoint.resolve()).getAddress(); + boolean expectPorts = + CCM_RULE.getCassandraVersion().nextStable().compareTo(Version.V4_0_0) >= 0 + && !CCM_RULE.getDseVersion().isPresent(); + QueryTrace queryTrace = executionInfo.getQueryTrace(); assertThat(queryTrace.getTracingId()).isEqualTo(executionInfo.getTracingId()); assertThat(queryTrace.getRequestType()).isEqualTo("Execute CQL3 query"); assertThat(queryTrace.getDurationMicros()).isPositive(); - EndPoint contactPoint = CCM_RULE.getContactPoints().iterator().next(); - assertThat(queryTrace.getCoordinator()) - .isEqualTo(((InetSocketAddress) contactPoint.resolve()).getAddress()); + assertThat(queryTrace.getCoordinatorAddress().getAddress()).isEqualTo(nodeAddress); + assertThat(queryTrace.getCoordinatorAddress().getPort()).isEqualTo(expectPorts ? 7000 : 0); assertThat(queryTrace.getParameters()) .containsEntry("consistency_level", "LOCAL_ONE") .containsEntry("page_size", "5000") @@ -89,5 +96,9 @@ public void should_fetch_trace_when_tracing_enabled() { assertThat(queryTrace.getStartedAt()).isPositive(); // Don't want to get too deep into event testing because that could change across versions assertThat(queryTrace.getEvents()).isNotEmpty(); + InetSocketAddress sourceAddress0 = queryTrace.getEvents().get(0).getSourceAddress(); + assertThat(sourceAddress0).isNotNull(); + assertThat(sourceAddress0.getAddress()).isEqualTo(nodeAddress); + assertThat(sourceAddress0.getPort()).isEqualTo(expectPorts ? 7000 : 0); } } From 1f6a58e22d808770aeeec02d7c4547bb891d073b Mon Sep 17 00:00:00 2001 From: olim7t Date: Thu, 26 Mar 2020 08:54:58 -0700 Subject: [PATCH 417/979] Adjust debug log in ControlConnection Log the channel instead of the node, as in ChannelPool. This shows the local port, which can be useful when debugging. --- .../oss/driver/internal/core/control/ControlConnection.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/control/ControlConnection.java b/core/src/main/java/com/datastax/oss/driver/internal/core/control/ControlConnection.java index a3bd11d8452..294bfd7e060 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/control/ControlConnection.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/control/ControlConnection.java @@ -426,7 +426,7 @@ private void connect( channel.forceClose(); connect(nodes, errors, onSuccess, onFailure); } else { - LOG.debug("[{}] Connection established to {}", logPrefix, node); + LOG.debug("[{}] New channel opened {}", logPrefix, channel); // Make sure previous channel gets closed (it may still be open if // reconnection was forced) DriverChannel previousChannel = ControlConnection.this.channel; From 8beeb07eb457e2babee744b23dfbf8250c14d369 Mon Sep 17 00:00:00 2001 From: olim7t Date: Fri, 6 Mar 2020 11:04:21 -0800 Subject: [PATCH 418/979] JAVA-2631: Add getIndex() shortcuts to TableMetadata --- changelog/README.md | 1 + .../api/core/metadata/schema/TableMetadata.java | 12 ++++++++++++ .../metadata/schema/parsing/TableParserTest.java | 3 +++ 3 files changed, 16 insertions(+) diff --git a/changelog/README.md b/changelog/README.md index a2edaabf485..71faeae614e 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.6.0 (in progress) +- [improvement] JAVA-2631: Add getIndex() shortcuts to TableMetadata - [improvement] JAVA-2679: Add port information to QueryTrace and TraceEvent - [improvement] JAVA-2184: Refactor DescribeIT to improve maintainability - [new feature] JAVA-2600: Add map-backed config loader diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/metadata/schema/TableMetadata.java b/core/src/main/java/com/datastax/oss/driver/api/core/metadata/schema/TableMetadata.java index 425d08945c0..5f286c23787 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/metadata/schema/TableMetadata.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/metadata/schema/TableMetadata.java @@ -20,6 +20,7 @@ import com.datastax.oss.driver.internal.core.metadata.schema.parsing.RelationParser; import edu.umd.cs.findbugs.annotations.NonNull; import java.util.Map; +import java.util.Optional; /** A table in the schema metadata. */ public interface TableMetadata extends RelationMetadata { @@ -32,6 +33,17 @@ public interface TableMetadata extends RelationMetadata { @NonNull Map getIndexes(); + @NonNull + default Optional getIndex(@NonNull CqlIdentifier indexId) { + return Optional.ofNullable(getIndexes().get(indexId)); + } + + /** Shortcut for {@link #getIndex(CqlIdentifier) getIndex(CqlIdentifier.fromCql(indexName))}. */ + @NonNull + default Optional getIndex(@NonNull String indexName) { + return getIndex(CqlIdentifier.fromCql(indexName)); + } + @NonNull @Override default String describe(boolean pretty) { diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/TableParserTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/TableParserTest.java index 7b4e652fb0f..fe106165f67 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/TableParserTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/TableParserTest.java @@ -180,6 +180,9 @@ private void checkTable(TableMetadata table) { assertThat(index.getClassName()).isNotPresent(); assertThat(index.getKind()).isEqualTo(IndexKind.COMPOSITES); assertThat(index.getTarget()).isEqualTo("v"); + + assertThat(table.getIndex("foo_v_idx")).hasValue(index); + @SuppressWarnings("unchecked") Map compaction = (Map) table.getOptions().get(CqlIdentifier.fromInternal("compaction")); From 4e08500aa205ef2e3d4c305e2e4ed4b1f15b1503 Mon Sep 17 00:00:00 2001 From: olim7t Date: Fri, 27 Mar 2020 10:24:55 -0700 Subject: [PATCH 419/979] JAVA-2711: Fix handling of UDT keys in the mapper --- changelog/README.md | 1 + .../datastax/oss/driver/mapper/UdtKeyIT.java | 196 ++++++++++++++++++ .../generation/GeneratedCodePatterns.java | 34 +-- 3 files changed, 219 insertions(+), 12 deletions(-) create mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/mapper/UdtKeyIT.java diff --git a/changelog/README.md b/changelog/README.md index 71faeae614e..0a50efb1f91 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.6.0 (in progress) +- [bug] JAVA-2711: Fix handling of UDT keys in the mapper - [improvement] JAVA-2631: Add getIndex() shortcuts to TableMetadata - [improvement] JAVA-2679: Add port information to QueryTrace and TraceEvent - [improvement] JAVA-2184: Refactor DescribeIT to improve maintainability diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UdtKeyIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UdtKeyIT.java new file mode 100644 index 00000000000..a826b847520 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UdtKeyIT.java @@ -0,0 +1,196 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.mapper; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.mapper.annotations.Dao; +import com.datastax.oss.driver.api.mapper.annotations.DaoFactory; +import com.datastax.oss.driver.api.mapper.annotations.DaoKeyspace; +import com.datastax.oss.driver.api.mapper.annotations.Entity; +import com.datastax.oss.driver.api.mapper.annotations.Insert; +import com.datastax.oss.driver.api.mapper.annotations.Mapper; +import com.datastax.oss.driver.api.mapper.annotations.PartitionKey; +import com.datastax.oss.driver.api.mapper.annotations.Select; +import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import com.datastax.oss.driver.categories.ParallelizableTests; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; +import java.util.List; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +@Category(ParallelizableTests.class) +public class UdtKeyIT { + + private static final CcmRule CCM_RULE = CcmRule.getInstance(); + + private static final SessionRule SESSION_RULE = SessionRule.builder(CCM_RULE).build(); + + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); + + private static RecordDao dao; + + @BeforeClass + public static void setup() { + CqlSession session = SESSION_RULE.session(); + for (String ddlQuery : + ImmutableList.of( + "CREATE TYPE key (value int)", + "CREATE TABLE record(key frozen PRIMARY KEY, value int)", + "CREATE TABLE multi_key_record(key frozen> PRIMARY KEY, value int)")) { + session.execute( + SimpleStatement.builder(ddlQuery) + .setExecutionProfile(SESSION_RULE.slowProfile()) + .build()); + } + + TestMapper mapper = new UdtKeyIT_TestMapperBuilder(SESSION_RULE.session()).build(); + dao = mapper.recordDao(SESSION_RULE.keyspace()); + } + + @Test + public void should_save_and_retrieve_entity_with_udt_pk() { + // Given + Key key = new Key(1); + dao.save(new Record(key, 42)); + + // When + Record record = dao.findByKey(key); + + // Then + assertThat(record.getValue()).isEqualTo(42); + } + + @Test + public void should_save_and_retrieve_entity_with_udt_collection_pk() { + // Given + List key = ImmutableList.of(new Key(1), new Key(2)); + dao.saveMulti(new MultiKeyRecord(key, 42)); + + // When + MultiKeyRecord record = dao.findMultiByKey(key); + + // Then + assertThat(record.getValue()).isEqualTo(42); + } + + @Entity + public static class Key { + private int value; + + public Key() {} + + public Key(int value) { + this.value = value; + } + + public int getValue() { + return value; + } + + public void setValue(int value) { + this.value = value; + } + } + + @Entity + public static class Record { + @PartitionKey private Key key; + private int value; + + public Record() {} + + public Record(Key key, int value) { + this.key = key; + this.value = value; + } + + public Key getKey() { + return key; + } + + public void setKey(Key key) { + this.key = key; + } + + public int getValue() { + return value; + } + + public void setValue(int value) { + this.value = value; + } + } + + @Entity + public static class MultiKeyRecord { + @PartitionKey private List key; + private int value; + + public MultiKeyRecord() {} + + public MultiKeyRecord(List key, int value) { + this.key = key; + this.value = value; + } + + public List getKey() { + return key; + } + + public void setKey(List key) { + this.key = key; + } + + public int getValue() { + return value; + } + + public void setValue(int value) { + this.value = value; + } + } + + @Dao + interface RecordDao { + @Select + Record findByKey(Key key); + + @Insert + void save(Record record); + + @Select + MultiKeyRecord findMultiByKey(List key); + + @Insert + void saveMulti(MultiKeyRecord record); + } + + @Mapper + interface TestMapper { + @DaoFactory + RecordDao recordDao(@DaoKeyspace CqlIdentifier keyspace); + } +} diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/util/generation/GeneratedCodePatterns.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/util/generation/GeneratedCodePatterns.java index c148b8de580..b50e954dcc4 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/util/generation/GeneratedCodePatterns.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/util/generation/GeneratedCodePatterns.java @@ -296,12 +296,17 @@ public static void setValue( udtValueName, NullSavingStrategy.class, NullSavingStrategy.DO_NOT_SET) - .addStatement("$1L = $1L.setUdtValue($2L, $3L)", targetName, cqlName, udtValueName) - .nextControlFlow( - "else if ($L == $T.$L)", - NULL_SAVING_STRATEGY, - NullSavingStrategy.class, - NullSavingStrategy.SET_TO_NULL) + .addStatement("$1L = $1L.setUdtValue($2L, $3L)", targetName, cqlName, udtValueName); + if (useNullSavingStrategy) { + methodBuilder.nextControlFlow( + "else if ($L == $T.$L)", + NULL_SAVING_STRATEGY, + NullSavingStrategy.class, + NullSavingStrategy.SET_TO_NULL); + } else { + methodBuilder.nextControlFlow("else"); + } + methodBuilder .addStatement("$1L = $1L.setUdtValue($2L, null)", targetName, cqlName) .endControlFlow(); } else { @@ -333,12 +338,17 @@ public static void setValue( targetName, cqlName, rawCollectionName, - enclosingClass.addGenericTypeConstant(type.asRawTypeName())) - .nextControlFlow( - "else if ($L == $T.$L)", - NULL_SAVING_STRATEGY, - NullSavingStrategy.class, - NullSavingStrategy.SET_TO_NULL) + enclosingClass.addGenericTypeConstant(type.asRawTypeName())); + if (useNullSavingStrategy) { + methodBuilder.nextControlFlow( + "else if ($L == $T.$L)", + NULL_SAVING_STRATEGY, + NullSavingStrategy.class, + NullSavingStrategy.SET_TO_NULL); + } else { + methodBuilder.nextControlFlow("else"); + } + methodBuilder .addStatement( "$1L = $1L.set($2L, null, $3L)", targetName, From 3b386ee64c59c0fd32a5bcb10d4a2dc22378ce1c Mon Sep 17 00:00:00 2001 From: olim7t Date: Fri, 3 Apr 2020 17:23:41 -0700 Subject: [PATCH 420/979] Fix dependency plugin configuration Only redirect to a file for the `generate-dependency-list` execution, so that command line invocations (e.g. `mvn dependency:tree`) still use the standard output as expected. --- core/pom.xml | 12 ++++++------ mapper-processor/pom.xml | 12 ++++++------ mapper-runtime/pom.xml | 12 ++++++------ query-builder/pom.xml | 12 ++++++------ 4 files changed, 24 insertions(+), 24 deletions(-) diff --git a/core/pom.xml b/core/pom.xml index 6e072bbe170..c041235cf31 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -311,14 +311,14 @@ list generate-resources + + runtime + true + com.datastax.cassandra,com.datastax.dse + ${project.build.outputDirectory}/com/datastax/dse/driver/internal/deps.txt + - - runtime - true - com.datastax.cassandra,com.datastax.dse - ${project.build.outputDirectory}/com/datastax/dse/driver/internal/deps.txt - diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 763b74176e9..5f26f094aa7 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -168,14 +168,14 @@ list generate-resources + + runtime + true + com.datastax.cassandra,com.datastax.dse + ${project.build.outputDirectory}/com/datastax/dse/driver/internal/mapper/processor/deps.txt + - - runtime - true - com.datastax.cassandra,com.datastax.dse - ${project.build.outputDirectory}/com/datastax/dse/driver/internal/mapper/processor/deps.txt - diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index 13a0f8c804d..c183b13eb22 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -176,14 +176,14 @@ list generate-resources + + runtime + true + com.datastax.cassandra,com.datastax.dse + ${project.build.outputDirectory}/com/datastax/dse/driver/internal/mapper/deps.txt + - - runtime - true - com.datastax.cassandra,com.datastax.dse - ${project.build.outputDirectory}/com/datastax/dse/driver/internal/mapper/deps.txt - diff --git a/query-builder/pom.xml b/query-builder/pom.xml index 93bab0ba16c..6d4a450c948 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -115,14 +115,14 @@ list generate-resources + + runtime + true + com.datastax.cassandra,com.datastax.dse + ${project.build.outputDirectory}/com/datastax/dse/driver/internal/querybuilder/deps.txt + - - runtime - true - com.datastax.cassandra,com.datastax.dse - ${project.build.outputDirectory}/com/datastax/dse/driver/internal/querybuilder/deps.txt - From a33f23cf3fe6a8cbc357a8bad3366c04a1808856 Mon Sep 17 00:00:00 2001 From: olim7t Date: Fri, 3 Apr 2020 17:34:44 -0700 Subject: [PATCH 421/979] Fix install_snapshots.sh Since the introduction of the BOM (8f8e2e7), the native protocol version is no longer expressed as a property in the parent POM. Use the dependency plugin instead, this is more robust than a simple grep. --- install-snapshots.sh | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/install-snapshots.sh b/install-snapshots.sh index 76b0ad15166..4f5d79665ab 100755 --- a/install-snapshots.sh +++ b/install-snapshots.sh @@ -17,7 +17,9 @@ install_snapshot() } } -grep -q '.*-SNAPSHOT' pom.xml +mvn --projects core dependency:list -DincludeArtifactIds=native-protocol | \ + tee /dev/tty | \ + grep -q native-protocol.*SNAPSHOT if [ $? -eq 0 ] ; then install_snapshot https://github.com/datastax/native-protocol.git native-protocol fi From 09364c26f1d47193d6f492657d67e191708fc57d Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 30 Mar 2020 11:08:17 -0700 Subject: [PATCH 422/979] JAVA-2054: Add now_in_seconds to protocol v5 query messages --- bom/pom.xml | 2 +- changelog/README.md | 1 + .../core/insights/InsightsClient.java | 3 +- .../driver/api/core/cql/BatchStatement.java | 9 +- .../api/core/cql/BatchStatementBuilder.java | 3 +- .../api/core/cql/BoundStatementBuilder.java | 3 +- .../driver/api/core/cql/SimpleStatement.java | 9 +- .../api/core/cql/SimpleStatementBuilder.java | 3 +- .../oss/driver/api/core/cql/Statement.java | 36 ++++++ .../driver/api/core/cql/StatementBuilder.java | 8 ++ .../driver/api/core/cql/SyncCqlSession.java | 2 + .../internal/core/DefaultProtocolFeature.java | 7 + .../core/DefaultProtocolVersionRegistry.java | 4 + .../adminrequest/AdminRequestHandler.java | 4 +- .../driver/internal/core/cql/Conversions.java | 15 ++- .../core/cql/DefaultBatchStatement.java | 95 +++++++++++--- .../core/cql/DefaultBoundStatement.java | 85 +++++++++--- .../core/cql/DefaultPreparedStatement.java | 4 +- .../core/cql/DefaultSimpleStatement.java | 99 ++++++++++---- .../core/cql/CqlRequestHandlerTest.java | 2 + .../internal/core/cql/StatementSizeTest.java | 3 +- .../oss/driver/core/cql/NowInSecondsIT.java | 122 ++++++++++++++++++ 22 files changed, 442 insertions(+), 77 deletions(-) create mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/core/cql/NowInSecondsIT.java diff --git a/bom/pom.xml b/bom/pom.xml index 6ea619bedad..7d8c09a02e3 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -61,7 +61,7 @@ com.datastax.oss native-protocol - 1.4.9 + 1.4.10-SNAPSHOT com.datastax.oss diff --git a/changelog/README.md b/changelog/README.md index 0a50efb1f91..a7dbbf6e2ce 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.6.0 (in progress) +- [new feature] JAVA-2054: Add now_in_seconds to protocol v5 query messages - [bug] JAVA-2711: Fix handling of UDT keys in the mapper - [improvement] JAVA-2631: Add getIndex() shortcuts to TableMetadata - [improvement] JAVA-2679: Add port information to QueryTrace and TraceEvent diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/InsightsClient.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/InsightsClient.java index 43318ef1969..3f02e2368a3 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/InsightsClient.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/InsightsClient.java @@ -238,7 +238,8 @@ private QueryOptions createQueryOptionsWithJson(String json) { QueryOptions.DEFAULT.pagingState, QueryOptions.DEFAULT.serialConsistency, QueryOptions.DEFAULT.defaultTimestamp, - QueryOptions.DEFAULT.keyspace); + QueryOptions.DEFAULT.keyspace, + QueryOptions.DEFAULT.nowInSeconds); } private boolean shouldSendEvent() { diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/cql/BatchStatement.java b/core/src/main/java/com/datastax/oss/driver/api/core/cql/BatchStatement.java index 81bdb23db47..0774e5717b2 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/cql/BatchStatement.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/cql/BatchStatement.java @@ -65,7 +65,8 @@ static BatchStatement newInstance(@NonNull BatchType batchType) { null, null, null, - null); + null, + Statement.NO_NOW_IN_SECONDS); } /** @@ -95,7 +96,8 @@ static BatchStatement newInstance( null, null, null, - null); + null, + Statement.NO_NOW_IN_SECONDS); } /** @@ -125,7 +127,8 @@ static BatchStatement newInstance( null, null, null, - null); + null, + Statement.NO_NOW_IN_SECONDS); } /** diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/cql/BatchStatementBuilder.java b/core/src/main/java/com/datastax/oss/driver/api/core/cql/BatchStatementBuilder.java index 373be6ac57b..c05c5d2f0cf 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/cql/BatchStatementBuilder.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/cql/BatchStatementBuilder.java @@ -152,7 +152,8 @@ public BatchStatement build() { consistencyLevel, serialConsistencyLevel, timeout, - node); + node, + nowInSeconds); } public int getStatementsCount() { diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/cql/BoundStatementBuilder.java b/core/src/main/java/com/datastax/oss/driver/api/core/cql/BoundStatementBuilder.java index 6ccfedb1608..e0634f78248 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/cql/BoundStatementBuilder.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/cql/BoundStatementBuilder.java @@ -174,6 +174,7 @@ public BoundStatement build() { timeout, codecRegistry, protocolVersion, - node); + node, + nowInSeconds); } } diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/cql/SimpleStatement.java b/core/src/main/java/com/datastax/oss/driver/api/core/cql/SimpleStatement.java index cf7bec7fa7a..b93d8a7dbfa 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/cql/SimpleStatement.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/cql/SimpleStatement.java @@ -80,7 +80,8 @@ static SimpleStatement newInstance(@NonNull String cqlQuery) { null, null, null, - null); + null, + Statement.NO_NOW_IN_SECONDS); } /** @@ -113,7 +114,8 @@ static SimpleStatement newInstance( null, null, null, - null); + null, + Statement.NO_NOW_IN_SECONDS); } /** @@ -143,7 +145,8 @@ static SimpleStatement newInstance( null, null, null, - null); + null, + Statement.NO_NOW_IN_SECONDS); } /** diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/cql/SimpleStatementBuilder.java b/core/src/main/java/com/datastax/oss/driver/api/core/cql/SimpleStatementBuilder.java index e66a6711041..78ec90d6722 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/cql/SimpleStatementBuilder.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/cql/SimpleStatementBuilder.java @@ -182,6 +182,7 @@ public SimpleStatement build() { consistencyLevel, serialConsistencyLevel, timeout, - node); + node, + nowInSeconds); } } diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/cql/Statement.java b/core/src/main/java/com/datastax/oss/driver/api/core/cql/Statement.java index c06b24e1982..ce25f185838 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/cql/Statement.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/cql/Statement.java @@ -31,6 +31,7 @@ import com.datastax.oss.driver.api.core.time.TimestampGenerator; import com.datastax.oss.driver.api.core.type.reflect.GenericType; import com.datastax.oss.driver.internal.core.util.RoutingKey; +import com.datastax.oss.protocol.internal.request.query.QueryOptions; import edu.umd.cs.findbugs.annotations.CheckReturnValue; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; @@ -67,6 +68,13 @@ public interface Statement> extends Request { GenericType> ASYNC = new GenericType>() {}; + /** + * A special value for {@link #getNowInSeconds()} that means "no value". + * + *

      It is equal to {@link Integer#MIN_VALUE}. + */ + int NO_NOW_IN_SECONDS = QueryOptions.NO_NOW_IN_SECONDS; + /** * Sets the name of the execution profile that will be used for this statement. * @@ -348,6 +356,34 @@ default SelfT setRoutingKey(@NonNull ByteBuffer... newRoutingKeyComponents) { /** Whether tracing information should be recorded for this statement. */ boolean isTracing(); + /** + * A custom "now in seconds" to use when applying the request (for testing purposes). + * + *

      This method's default implementation returns {@link #NO_NOW_IN_SECONDS}. The only reason it + * exists is to preserve binary compatibility. Internally, the driver overrides it to return the + * value that was set programmatically (if any). + * + * @see #NO_NOW_IN_SECONDS + */ + default int getNowInSeconds() { + return NO_NOW_IN_SECONDS; + } + + /** + * Sets the "now in seconds" to use when applying the request (for testing purposes). + * + *

      This method's default implementation returns the statement unchanged. The only reason it + * exists is to preserve binary compatibility. Internally, the driver overrides it to record the + * new value. + * + * @see #NO_NOW_IN_SECONDS + */ + @NonNull + @SuppressWarnings("unchecked") + default SelfT setNowInSeconds(int nowInSeconds) { + return (SelfT) this; + } + /** * Calculates the approximate size in bytes that the statement will have when encoded. * diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/cql/StatementBuilder.java b/core/src/main/java/com/datastax/oss/driver/api/core/cql/StatementBuilder.java index 362a63b51d7..15f1608ec05 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/cql/StatementBuilder.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/cql/StatementBuilder.java @@ -58,6 +58,7 @@ public abstract class StatementBuilder< @Nullable protected ConsistencyLevel serialConsistencyLevel; @Nullable protected Duration timeout; @Nullable protected Node node; + protected int nowInSeconds = Statement.NO_NOW_IN_SECONDS; protected StatementBuilder() { // nothing to do @@ -83,6 +84,7 @@ protected StatementBuilder(StatementT template) { this.serialConsistencyLevel = template.getSerialConsistencyLevel(); this.timeout = template.getTimeout(); this.node = template.getNode(); + this.nowInSeconds = template.getNowInSeconds(); } /** @see Statement#setExecutionProfileName(String) */ @@ -227,6 +229,12 @@ public SelfT setNode(@Nullable Node node) { return self; } + /** @see Statement#setNowInSeconds(int) */ + public SelfT setNowInSeconds(int nowInSeconds) { + this.nowInSeconds = nowInSeconds; + return self; + } + @NonNull protected Map buildCustomPayload() { return (customPayloadBuilder == null) diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/cql/SyncCqlSession.java b/core/src/main/java/com/datastax/oss/driver/api/core/cql/SyncCqlSession.java index 3280f3947ef..e17ccd5ce1c 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/cql/SyncCqlSession.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/cql/SyncCqlSession.java @@ -116,6 +116,8 @@ default ResultSet execute(@NonNull String query) { * set to {@link Long#MIN_VALUE}, meaning that the value will be assigned by the * session's timestamp generator. *

    • {@link Statement#getNode() boundStatement.getNode()} will always be {@code null}. + *
    • {@link Statement#getNowInSeconds()} boundStatement.getNowInSeconds()} will always + * be equal to {@link Statement#NO_NOW_IN_SECONDS}. * * * diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/DefaultProtocolFeature.java b/core/src/main/java/com/datastax/oss/driver/internal/core/DefaultProtocolFeature.java index 7e324f93ee4..78c0338075b 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/DefaultProtocolFeature.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/DefaultProtocolFeature.java @@ -49,5 +49,12 @@ public enum DefaultProtocolFeature implements ProtocolFeature { * @see CASSANDRA-7523 */ DATE_TYPE, + + /** + * The ability to set a custom "now" time on statements (for testing purposes). + * + * @see CASSANDRA-14664 + */ + NOW_IN_SECONDS, ; } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/DefaultProtocolVersionRegistry.java b/core/src/main/java/com/datastax/oss/driver/internal/core/DefaultProtocolVersionRegistry.java index f5fff3ecdb2..63381653a73 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/DefaultProtocolVersionRegistry.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/DefaultProtocolVersionRegistry.java @@ -240,6 +240,10 @@ public boolean supports(ProtocolVersion version, ProtocolFeature feature) { return (DefaultProtocolVersion.V5.getCode() <= code && code < DseProtocolVersion.DSE_V1.getCode()) || DseProtocolVersion.DSE_V2.getCode() <= code; + } else if (DefaultProtocolFeature.NOW_IN_SECONDS.equals(feature)) { + // OSS only, V5+ + return DefaultProtocolVersion.V5.getCode() <= code + && code < DseProtocolVersion.DSE_V1.getCode(); } else if (DseProtocolFeature.CONTINUOUS_PAGING.equals(feature)) { // All DSE versions return DseProtocolVersion.DSE_V1.getCode() <= code; diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/adminrequest/AdminRequestHandler.java b/core/src/main/java/com/datastax/oss/driver/internal/core/adminrequest/AdminRequestHandler.java index d59ba9339b9..46475384eff 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/adminrequest/AdminRequestHandler.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/adminrequest/AdminRequestHandler.java @@ -18,6 +18,7 @@ import com.datastax.oss.driver.api.core.DriverTimeoutException; import com.datastax.oss.driver.api.core.ProtocolVersion; import com.datastax.oss.driver.api.core.connection.BusyConnectionException; +import com.datastax.oss.driver.api.core.cql.Statement; import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; import com.datastax.oss.driver.internal.core.channel.DriverChannel; import com.datastax.oss.driver.internal.core.channel.ResponseCallback; @@ -232,7 +233,8 @@ private static QueryOptions buildQueryOptions( pagingState, ProtocolConstants.ConsistencyLevel.SERIAL, Long.MIN_VALUE, - null); + null, + Statement.NO_NOW_IN_SECONDS); } private static Map serialize( diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/Conversions.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/Conversions.java index e0f470e9e54..81b04fb6452 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/Conversions.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/Conversions.java @@ -142,6 +142,12 @@ public static Message toMessage( ProtocolVersion protocolVersion = context.getProtocolVersion(); ProtocolVersionRegistry protocolVersionRegistry = context.getProtocolVersionRegistry(); CqlIdentifier keyspace = statement.getKeyspace(); + int nowInSeconds = statement.getNowInSeconds(); + if (nowInSeconds != Statement.NO_NOW_IN_SECONDS + && !protocolVersionRegistry.supports( + protocolVersion, DefaultProtocolFeature.NOW_IN_SECONDS)) { + throw new IllegalArgumentException("Can't use nowInSeconds with protocol " + protocolVersion); + } if (statement instanceof SimpleStatement) { SimpleStatement simpleStatement = (SimpleStatement) statement; List positionalValues = simpleStatement.getPositionalValues(); @@ -166,7 +172,8 @@ public static Message toMessage( statement.getPagingState(), serialConsistencyCode, timestamp, - (keyspace == null) ? null : keyspace.asInternal()); + (keyspace == null) ? null : keyspace.asInternal(), + nowInSeconds); return new Query(simpleStatement.getQuery(), queryOptions); } else if (statement instanceof BoundStatement) { BoundStatement boundStatement = (BoundStatement) statement; @@ -186,7 +193,8 @@ public static Message toMessage( statement.getPagingState(), serialConsistencyCode, timestamp, - null); + null, + nowInSeconds); PreparedStatement preparedStatement = boundStatement.getPreparedStatement(); ByteBuffer id = preparedStatement.getId(); ByteBuffer resultMetadataId = preparedStatement.getResultMetadataId(); @@ -236,7 +244,8 @@ public static Message toMessage( consistencyCode, serialConsistencyCode, timestamp, - (keyspace == null) ? null : keyspace.asInternal()); + (keyspace == null) ? null : keyspace.asInternal(), + nowInSeconds); } else { throw new IllegalArgumentException( "Unsupported statement type: " + statement.getClass().getName()); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultBatchStatement.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultBatchStatement.java index ad9fdbc0913..4d6ad45b5b7 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultBatchStatement.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultBatchStatement.java @@ -56,6 +56,7 @@ public class DefaultBatchStatement implements BatchStatement { private final ConsistencyLevel serialConsistencyLevel; private final Duration timeout; private final Node node; + private final int nowInSeconds; public DefaultBatchStatement( BatchType batchType, @@ -75,7 +76,8 @@ public DefaultBatchStatement( ConsistencyLevel consistencyLevel, ConsistencyLevel serialConsistencyLevel, Duration timeout, - Node node) { + Node node, + int nowInSeconds) { this.batchType = batchType; this.statements = ImmutableList.copyOf(statements); this.executionProfileName = executionProfileName; @@ -94,6 +96,7 @@ public DefaultBatchStatement( this.serialConsistencyLevel = serialConsistencyLevel; this.timeout = timeout; this.node = node; + this.nowInSeconds = nowInSeconds; } @NonNull @@ -123,7 +126,8 @@ public BatchStatement setBatchType(@NonNull BatchType newBatchType) { consistencyLevel, serialConsistencyLevel, timeout, - node); + node, + nowInSeconds); } @NonNull @@ -147,7 +151,8 @@ public BatchStatement setKeyspace(@Nullable CqlIdentifier newKeyspace) { consistencyLevel, serialConsistencyLevel, timeout, - node); + node, + nowInSeconds); } @NonNull @@ -175,7 +180,8 @@ public BatchStatement add(@NonNull BatchableStatement statement) { consistencyLevel, serialConsistencyLevel, timeout, - node); + node, + nowInSeconds); } } @@ -207,7 +213,8 @@ public BatchStatement addAll(@NonNull Iterable> consistencyLevel, serialConsistencyLevel, timeout, - node); + node, + nowInSeconds); } } @@ -237,7 +244,8 @@ public BatchStatement clear() { consistencyLevel, serialConsistencyLevel, timeout, - node); + node, + nowInSeconds); } @NonNull @@ -272,7 +280,8 @@ public BatchStatement setPagingState(ByteBuffer newPagingState) { consistencyLevel, serialConsistencyLevel, timeout, - node); + node, + nowInSeconds); } @Override @@ -301,7 +310,8 @@ public BatchStatement setPageSize(int newPageSize) { consistencyLevel, serialConsistencyLevel, timeout, - node); + node, + nowInSeconds); } @Nullable @@ -331,7 +341,8 @@ public BatchStatement setConsistencyLevel(@Nullable ConsistencyLevel newConsiste newConsistencyLevel, serialConsistencyLevel, timeout, - node); + node, + nowInSeconds); } @Nullable @@ -362,7 +373,8 @@ public BatchStatement setSerialConsistencyLevel( consistencyLevel, newSerialConsistencyLevel, timeout, - node); + node, + nowInSeconds); } @Override @@ -391,7 +403,8 @@ public BatchStatement setExecutionProfileName(@Nullable String newConfigProfileN consistencyLevel, serialConsistencyLevel, timeout, - node); + node, + nowInSeconds); } @Override @@ -420,7 +433,8 @@ public DefaultBatchStatement setExecutionProfile(@Nullable DriverExecutionProfil consistencyLevel, serialConsistencyLevel, timeout, - node); + node, + nowInSeconds); } @Override @@ -473,7 +487,8 @@ public BatchStatement setRoutingKeyspace(CqlIdentifier newRoutingKeyspace) { consistencyLevel, serialConsistencyLevel, timeout, - node); + node, + nowInSeconds); } @NonNull @@ -497,7 +512,8 @@ public BatchStatement setNode(@Nullable Node newNode) { consistencyLevel, serialConsistencyLevel, timeout, - newNode); + newNode, + nowInSeconds); } @Nullable @@ -542,7 +558,8 @@ public BatchStatement setRoutingKey(ByteBuffer newRoutingKey) { consistencyLevel, serialConsistencyLevel, timeout, - node); + node, + nowInSeconds); } @Override @@ -581,7 +598,8 @@ public BatchStatement setRoutingToken(Token newRoutingToken) { consistencyLevel, serialConsistencyLevel, timeout, - node); + node, + nowInSeconds); } @NonNull @@ -611,7 +629,8 @@ public DefaultBatchStatement setCustomPayload(@NonNull Map n consistencyLevel, serialConsistencyLevel, timeout, - node); + node, + nowInSeconds); } @Override @@ -646,7 +665,8 @@ public DefaultBatchStatement setIdempotent(Boolean newIdempotence) { consistencyLevel, serialConsistencyLevel, timeout, - node); + node, + nowInSeconds); } @Override @@ -675,7 +695,8 @@ public BatchStatement setTracing(boolean newTracing) { consistencyLevel, serialConsistencyLevel, timeout, - node); + node, + nowInSeconds); } @Override @@ -704,7 +725,8 @@ public BatchStatement setQueryTimestamp(long newTimestamp) { consistencyLevel, serialConsistencyLevel, timeout, - node); + node, + nowInSeconds); } @NonNull @@ -728,6 +750,37 @@ public BatchStatement setTimeout(@Nullable Duration newTimeout) { consistencyLevel, serialConsistencyLevel, newTimeout, - node); + node, + nowInSeconds); + } + + @Override + public int getNowInSeconds() { + return nowInSeconds; + } + + @NonNull + @Override + public BatchStatement setNowInSeconds(int newNowInSeconds) { + return new DefaultBatchStatement( + batchType, + statements, + executionProfileName, + executionProfile, + keyspace, + routingKeyspace, + routingKey, + routingToken, + customPayload, + idempotent, + tracing, + timestamp, + pagingState, + pageSize, + consistencyLevel, + serialConsistencyLevel, + timeout, + node, + newNowInSeconds); } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultBoundStatement.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultBoundStatement.java index 66bd1dd87ba..35c4aa12a8b 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultBoundStatement.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultBoundStatement.java @@ -59,6 +59,7 @@ public class DefaultBoundStatement implements BoundStatement { private final CodecRegistry codecRegistry; private final ProtocolVersion protocolVersion; private final Node node; + private final int nowInSeconds; public DefaultBoundStatement( PreparedStatement preparedStatement, @@ -80,7 +81,8 @@ public DefaultBoundStatement( Duration timeout, CodecRegistry codecRegistry, ProtocolVersion protocolVersion, - Node node) { + Node node, + int nowInSeconds) { this.preparedStatement = preparedStatement; this.variableDefinitions = variableDefinitions; this.values = values; @@ -101,6 +103,7 @@ public DefaultBoundStatement( this.codecRegistry = codecRegistry; this.protocolVersion = protocolVersion; this.node = node; + this.nowInSeconds = nowInSeconds; } @Override @@ -174,7 +177,8 @@ public BoundStatement setBytesUnsafe(int i, ByteBuffer v) { timeout, codecRegistry, protocolVersion, - node); + node, + nowInSeconds); } @NonNull @@ -217,7 +221,8 @@ public BoundStatement setExecutionProfileName(@Nullable String newConfigProfileN timeout, codecRegistry, protocolVersion, - node); + node, + nowInSeconds); } @Override @@ -248,7 +253,8 @@ public BoundStatement setExecutionProfile(@Nullable DriverExecutionProfile newPr timeout, codecRegistry, protocolVersion, - node); + node, + nowInSeconds); } @Override @@ -286,7 +292,8 @@ public BoundStatement setRoutingKeyspace(@Nullable CqlIdentifier newRoutingKeysp timeout, codecRegistry, protocolVersion, - node); + node, + nowInSeconds); } @NonNull @@ -312,7 +319,8 @@ public BoundStatement setNode(@Nullable Node newNode) { timeout, codecRegistry, protocolVersion, - newNode); + newNode, + nowInSeconds); } @Nullable @@ -370,7 +378,8 @@ public BoundStatement setRoutingKey(@Nullable ByteBuffer newRoutingKey) { timeout, codecRegistry, protocolVersion, - node); + node, + nowInSeconds); } @Override @@ -401,7 +410,8 @@ public BoundStatement setRoutingToken(@Nullable Token newRoutingToken) { timeout, codecRegistry, protocolVersion, - node); + node, + nowInSeconds); } @NonNull @@ -433,7 +443,8 @@ public BoundStatement setCustomPayload(@NonNull Map newCusto timeout, codecRegistry, protocolVersion, - node); + node, + nowInSeconds); } @Override @@ -464,7 +475,8 @@ public BoundStatement setIdempotent(@Nullable Boolean newIdempotence) { timeout, codecRegistry, protocolVersion, - node); + node, + nowInSeconds); } @Override @@ -495,7 +507,8 @@ public BoundStatement setTracing(boolean newTracing) { timeout, codecRegistry, protocolVersion, - node); + node, + nowInSeconds); } @Override @@ -526,7 +539,8 @@ public BoundStatement setQueryTimestamp(long newTimestamp) { timeout, codecRegistry, protocolVersion, - node); + node, + nowInSeconds); } @Nullable @@ -558,7 +572,8 @@ public BoundStatement setTimeout(@Nullable Duration newTimeout) { newTimeout, codecRegistry, protocolVersion, - node); + node, + nowInSeconds); } @Override @@ -589,7 +604,8 @@ public BoundStatement setPagingState(@Nullable ByteBuffer newPagingState) { timeout, codecRegistry, protocolVersion, - node); + node, + nowInSeconds); } @Override @@ -620,7 +636,8 @@ public BoundStatement setPageSize(int newPageSize) { timeout, codecRegistry, protocolVersion, - node); + node, + nowInSeconds); } @Nullable @@ -652,7 +669,8 @@ public BoundStatement setConsistencyLevel(@Nullable ConsistencyLevel newConsiste timeout, codecRegistry, protocolVersion, - node); + node, + nowInSeconds); } @Nullable @@ -685,6 +703,39 @@ public BoundStatement setSerialConsistencyLevel( timeout, codecRegistry, protocolVersion, - node); + node, + nowInSeconds); + } + + @Override + public int getNowInSeconds() { + return nowInSeconds; + } + + @NonNull + @Override + public BoundStatement setNowInSeconds(int newNowInSeconds) { + return new DefaultBoundStatement( + preparedStatement, + variableDefinitions, + values, + executionProfileName, + executionProfile, + routingKeyspace, + routingKey, + routingToken, + customPayload, + idempotent, + tracing, + timestamp, + pagingState, + pageSize, + consistencyLevel, + serialConsistencyLevel, + timeout, + codecRegistry, + protocolVersion, + node, + newNowInSeconds); } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultPreparedStatement.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultPreparedStatement.java index 8dfadf9f5a3..1c0c31f4ee2 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultPreparedStatement.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultPreparedStatement.java @@ -23,6 +23,7 @@ import com.datastax.oss.driver.api.core.cql.BoundStatementBuilder; import com.datastax.oss.driver.api.core.cql.ColumnDefinitions; import com.datastax.oss.driver.api.core.cql.PreparedStatement; +import com.datastax.oss.driver.api.core.cql.Statement; import com.datastax.oss.driver.api.core.metadata.token.Token; import com.datastax.oss.driver.api.core.type.codec.registry.CodecRegistry; import com.datastax.oss.driver.internal.core.data.ValuesHelper; @@ -173,7 +174,8 @@ public BoundStatement bind(@NonNull Object... values) { timeoutForBoundStatements, codecRegistry, protocolVersion, - null); + null, + Statement.NO_NOW_IN_SECONDS); } @NonNull diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultSimpleStatement.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultSimpleStatement.java index acad2e11051..604fa5c0051 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultSimpleStatement.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultSimpleStatement.java @@ -55,6 +55,7 @@ public class DefaultSimpleStatement implements SimpleStatement { private final ConsistencyLevel serialConsistencyLevel; private final Duration timeout; private final Node node; + private final int nowInSeconds; /** @see SimpleStatement#builder(String) */ public DefaultSimpleStatement( @@ -76,7 +77,8 @@ public DefaultSimpleStatement( ConsistencyLevel consistencyLevel, ConsistencyLevel serialConsistencyLevel, Duration timeout, - Node node) { + Node node, + int nowInSeconds) { if (!positionalValues.isEmpty() && !namedValues.isEmpty()) { throw new IllegalArgumentException("Can't have both positional and named values"); } @@ -99,6 +101,7 @@ public DefaultSimpleStatement( this.serialConsistencyLevel = serialConsistencyLevel; this.timeout = timeout; this.node = node; + this.nowInSeconds = nowInSeconds; } @NonNull @@ -129,7 +132,8 @@ public SimpleStatement setQuery(@NonNull String newQuery) { consistencyLevel, serialConsistencyLevel, timeout, - node); + node, + nowInSeconds); } @NonNull @@ -160,7 +164,8 @@ public SimpleStatement setPositionalValues(@NonNull List newPositionalVa consistencyLevel, serialConsistencyLevel, timeout, - node); + node, + nowInSeconds); } @NonNull @@ -191,7 +196,8 @@ public SimpleStatement setNamedValuesWithIds(@NonNull Map consistencyLevel, serialConsistencyLevel, timeout, - node); + node, + nowInSeconds); } @Nullable @@ -222,7 +228,8 @@ public SimpleStatement setExecutionProfileName(@Nullable String newConfigProfile consistencyLevel, serialConsistencyLevel, timeout, - node); + node, + nowInSeconds); } @Nullable @@ -253,7 +260,8 @@ public SimpleStatement setExecutionProfile(@Nullable DriverExecutionProfile newP consistencyLevel, serialConsistencyLevel, timeout, - node); + node, + nowInSeconds); } @Nullable @@ -284,7 +292,8 @@ public SimpleStatement setKeyspace(@Nullable CqlIdentifier newKeyspace) { consistencyLevel, serialConsistencyLevel, timeout, - node); + node, + nowInSeconds); } @Nullable @@ -315,7 +324,8 @@ public SimpleStatement setRoutingKeyspace(@Nullable CqlIdentifier newRoutingKeys consistencyLevel, serialConsistencyLevel, timeout, - node); + node, + nowInSeconds); } @NonNull @@ -340,7 +350,8 @@ public SimpleStatement setNode(@Nullable Node newNode) { consistencyLevel, serialConsistencyLevel, timeout, - newNode); + newNode, + nowInSeconds); } @Nullable @@ -377,7 +388,8 @@ public SimpleStatement setRoutingKey(@Nullable ByteBuffer newRoutingKey) { consistencyLevel, serialConsistencyLevel, timeout, - node); + node, + nowInSeconds); } @Nullable @@ -408,7 +420,8 @@ public SimpleStatement setRoutingToken(@Nullable Token newRoutingToken) { consistencyLevel, serialConsistencyLevel, timeout, - node); + node, + nowInSeconds); } @NonNull @@ -439,7 +452,8 @@ public SimpleStatement setCustomPayload(@NonNull Map newCust consistencyLevel, serialConsistencyLevel, timeout, - node); + node, + nowInSeconds); } @Nullable @@ -470,7 +484,8 @@ public SimpleStatement setIdempotent(@Nullable Boolean newIdempotence) { consistencyLevel, serialConsistencyLevel, timeout, - node); + node, + nowInSeconds); } @Override @@ -500,7 +515,8 @@ public SimpleStatement setTracing(boolean newTracing) { consistencyLevel, serialConsistencyLevel, timeout, - node); + node, + nowInSeconds); } @Override @@ -530,7 +546,8 @@ public SimpleStatement setQueryTimestamp(long newTimestamp) { consistencyLevel, serialConsistencyLevel, timeout, - node); + node, + nowInSeconds); } @Nullable @@ -561,7 +578,8 @@ public SimpleStatement setTimeout(@Nullable Duration newTimeout) { consistencyLevel, serialConsistencyLevel, newTimeout, - node); + node, + nowInSeconds); } @Nullable @@ -592,7 +610,8 @@ public SimpleStatement setPagingState(@Nullable ByteBuffer newPagingState) { consistencyLevel, serialConsistencyLevel, timeout, - node); + node, + nowInSeconds); } @Override @@ -622,7 +641,8 @@ public SimpleStatement setPageSize(int newPageSize) { consistencyLevel, serialConsistencyLevel, timeout, - node); + node, + nowInSeconds); } @Nullable @@ -653,7 +673,8 @@ public SimpleStatement setConsistencyLevel(@Nullable ConsistencyLevel newConsist newConsistencyLevel, serialConsistencyLevel, timeout, - node); + node, + nowInSeconds); } @Nullable @@ -685,7 +706,39 @@ public SimpleStatement setSerialConsistencyLevel( consistencyLevel, newSerialConsistencyLevel, timeout, - node); + node, + nowInSeconds); + } + + @Override + public int getNowInSeconds() { + return nowInSeconds; + } + + @NonNull + @Override + public SimpleStatement setNowInSeconds(int newNowInSeconds) { + return new DefaultSimpleStatement( + query, + positionalValues, + namedValues, + executionProfileName, + executionProfile, + keyspace, + routingKeyspace, + routingKey, + routingToken, + customPayload, + idempotent, + tracing, + timestamp, + pagingState, + pageSize, + consistencyLevel, + serialConsistencyLevel, + timeout, + node, + newNowInSeconds); } public static Map wrapKeys(Map namedValues) { @@ -721,7 +774,8 @@ public boolean equals(Object other) { && Objects.equals(this.consistencyLevel, that.consistencyLevel) && Objects.equals(this.serialConsistencyLevel, that.serialConsistencyLevel) && Objects.equals(this.timeout, that.timeout) - && Objects.equals(this.node, that.node); + && Objects.equals(this.node, that.node) + && this.nowInSeconds == that.nowInSeconds; } else { return false; } @@ -748,6 +802,7 @@ public int hashCode() { consistencyLevel, serialConsistencyLevel, timeout, - node); + node, + nowInSeconds); } } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandlerTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandlerTest.java index ca1ff3d3639..5f41fc42f62 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandlerTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandlerTest.java @@ -31,6 +31,7 @@ import com.datastax.oss.driver.api.core.cql.ExecutionInfo; import com.datastax.oss.driver.api.core.cql.PreparedStatement; import com.datastax.oss.driver.api.core.cql.Row; +import com.datastax.oss.driver.api.core.cql.Statement; import com.datastax.oss.driver.internal.core.session.RepreparePayload; import com.datastax.oss.driver.internal.core.util.concurrent.CapturingTimer.CapturedTimeout; import com.datastax.oss.protocol.internal.request.Prepare; @@ -172,6 +173,7 @@ public void should_reprepare_on_the_fly_if_not_prepared() throws InterruptedExce BoundStatement boundStatement = mock(BoundStatement.class); when(boundStatement.getPreparedStatement()).thenReturn(preparedStatement); when(boundStatement.getValues()).thenReturn(Collections.emptyList()); + when(boundStatement.getNowInSeconds()).thenReturn(Statement.NO_NOW_IN_SECONDS); RequestHandlerTestHarness.Builder harnessBuilder = RequestHandlerTestHarness.builder(); // For the first attempt that gets the UNPREPARED response diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/cql/StatementSizeTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/cql/StatementSizeTest.java index e1f80bfd61d..64cc76b0e79 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/cql/StatementSizeTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/cql/StatementSizeTest.java @@ -284,6 +284,7 @@ private BoundStatement newBoundStatement( null, CodecRegistry.DEFAULT, DefaultProtocolVersion.V5, - null); + null, + Statement.NO_NOW_IN_SECONDS); } } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/NowInSecondsIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/NowInSecondsIT.java new file mode 100644 index 00000000000..9bb39eebd86 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/NowInSecondsIT.java @@ -0,0 +1,122 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.core.cql; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.DefaultProtocolVersion; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverConfigLoader; +import com.datastax.oss.driver.api.core.cql.BatchStatement; +import com.datastax.oss.driver.api.core.cql.BatchType; +import com.datastax.oss.driver.api.core.cql.PreparedStatement; +import com.datastax.oss.driver.api.core.cql.ResultSet; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.core.cql.Statement; +import com.datastax.oss.driver.api.testinfra.CassandraRequirement; +import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import com.datastax.oss.driver.categories.ParallelizableTests; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; +import java.util.function.Function; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +@Category(ParallelizableTests.class) +@CassandraRequirement(min = "4.0") +public class NowInSecondsIT { + + private static final CcmRule CCM_RULE = CcmRule.getInstance(); + + private static final SessionRule SESSION_RULE = buildSessionRule(); + + private static SessionRule buildSessionRule() { + // Reminder to revisit the test when V5 comes out of beta: remove the custom config loader and + // inline this method. + assertThat(DefaultProtocolVersion.V5.isBeta()) + .as("This test can be simplified now that protocol v5 is stable") + .isTrue(); + return SessionRule.builder(CCM_RULE) + .withConfigLoader( + DriverConfigLoader.programmaticBuilder() + .withString(DefaultDriverOption.PROTOCOL_VERSION, "V5") + .build()) + .build(); + } + + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); + + @Before + public void setup() { + for (String statement : + ImmutableList.of( + "DROP TABLE IF EXISTS test", "CREATE TABLE test(k int PRIMARY KEY, v int)")) { + SESSION_RULE + .session() + .execute( + SimpleStatement.newInstance(statement) + .setExecutionProfile(SESSION_RULE.slowProfile())); + } + } + + @Test + public void should_use_now_in_seconds_with_simple_statement() { + should_use_now_in_seconds(SimpleStatement::newInstance); + } + + @Test + public void should_use_now_in_seconds_with_bound_statement() { + should_use_now_in_seconds( + queryString -> { + PreparedStatement preparedStatement = SESSION_RULE.session().prepare(queryString); + return preparedStatement.bind(); + }); + } + + @Test + public void should_use_now_in_seconds_with_batch_statement() { + should_use_now_in_seconds( + queryString -> + BatchStatement.newInstance(BatchType.LOGGED, SimpleStatement.newInstance(queryString))); + } + + private > void should_use_now_in_seconds( + Function buildWriteStatement) { + CqlSession session = SESSION_RULE.session(); + + // Given + StatementT writeStatement = + buildWriteStatement.apply("INSERT INTO test (k,v) VALUES (1,1) USING TTL 20"); + SimpleStatement readStatement = + SimpleStatement.newInstance("SELECT TTL(v) FROM test WHERE k = 1"); + + // When + // insert at t = 0 with TTL 20 + session.execute(writeStatement.setNowInSeconds(0)); + // read TTL at t = 10 + ResultSet rs = session.execute(readStatement.setNowInSeconds(10)); + int remainingTtl = rs.one().getInt(0); + + // Then + assertThat(remainingTtl).isEqualTo(10); + } +} From ffc49562eeb3b3b583719279c551e7f848e94724 Mon Sep 17 00:00:00 2001 From: olim7t Date: Tue, 31 Mar 2020 17:44:45 -0700 Subject: [PATCH 423/979] Extract Statement.NO_DEFAULT_TIMESTAMP constant --- .../api/core/graph/BatchGraphStatement.java | 5 +++-- .../api/core/graph/FluentGraphStatement.java | 3 ++- .../core/graph/GraphStatementBuilderBase.java | 3 ++- .../api/core/graph/ScriptGraphStatement.java | 3 ++- .../internal/core/cql/DseConversions.java | 2 +- .../core/graph/BytecodeGraphStatement.java | 3 ++- .../internal/core/graph/GraphConversions.java | 5 +++-- .../oss/driver/api/core/cql/BatchStatement.java | 8 ++++---- .../oss/driver/api/core/cql/BoundStatement.java | 2 +- .../driver/api/core/cql/SimpleStatement.java | 8 ++++---- .../oss/driver/api/core/cql/Statement.java | 17 +++++++++++++---- .../driver/api/core/cql/StatementBuilder.java | 2 +- .../oss/driver/api/core/cql/SyncCqlSession.java | 4 ++-- .../api/core/time/TimestampGenerator.java | 7 +++++-- .../core/adminrequest/AdminRequestHandler.java | 2 +- .../driver/internal/core/cql/Conversions.java | 2 +- .../core/cql/DefaultPreparedStatement.java | 4 ++-- .../core/time/ServerSideTimestampGenerator.java | 3 ++- .../graph/GraphStatementBuilderBaseTest.java | 3 ++- .../core/cql/RequestHandlerTestHarness.java | 3 ++- .../driver/core/cql/BoundStatementCcmIT.java | 3 ++- 21 files changed, 57 insertions(+), 35 deletions(-) diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.java b/core/src/main/java/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.java index 1757212aa71..8d48c1ff42a 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.java @@ -16,6 +16,7 @@ package com.datastax.dse.driver.api.core.graph; import com.datastax.dse.driver.internal.core.graph.DefaultBatchGraphStatement; +import com.datastax.oss.driver.api.core.cql.Statement; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import edu.umd.cs.findbugs.annotations.NonNull; import java.util.Collections; @@ -67,7 +68,7 @@ static BatchGraphStatement newInstance() { null, null, null, - Long.MIN_VALUE, + Statement.NO_DEFAULT_TIMESTAMP, null, null, Collections.emptyMap(), @@ -87,7 +88,7 @@ static BatchGraphStatement newInstance(@NonNull Iterable travers null, null, null, - Long.MIN_VALUE, + Statement.NO_DEFAULT_TIMESTAMP, null, null, Collections.emptyMap(), diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.java b/core/src/main/java/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.java index 3c480ff826c..4f6a1be16be 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.java @@ -16,6 +16,7 @@ package com.datastax.dse.driver.api.core.graph; import com.datastax.dse.driver.internal.core.graph.DefaultFluentGraphStatement; +import com.datastax.oss.driver.api.core.cql.Statement; import edu.umd.cs.findbugs.annotations.NonNull; import java.util.Collections; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; @@ -50,7 +51,7 @@ static FluentGraphStatement newInstance(@NonNull GraphTraversal traversal) null, null, null, - Long.MIN_VALUE, + Statement.NO_DEFAULT_TIMESTAMP, null, null, Collections.emptyMap(), diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphStatementBuilderBase.java b/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphStatementBuilderBase.java index 9892e673d23..062e917cbcf 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphStatementBuilderBase.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphStatementBuilderBase.java @@ -17,6 +17,7 @@ import com.datastax.oss.driver.api.core.ConsistencyLevel; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.cql.Statement; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.protocol.internal.util.collection.NullAllowingImmutableMap; import edu.umd.cs.findbugs.annotations.NonNull; @@ -37,7 +38,7 @@ public abstract class GraphStatementBuilderBase< protected Boolean isIdempotent; protected Duration timeout; protected Node node; - protected long timestamp = Long.MIN_VALUE; + protected long timestamp = Statement.NO_DEFAULT_TIMESTAMP; protected DriverExecutionProfile executionProfile; protected String executionProfileName; private NullAllowingImmutableMap.Builder customPayloadBuilder; diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.java b/core/src/main/java/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.java index 7731b21859b..412cf0e6aeb 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.java @@ -16,6 +16,7 @@ package com.datastax.dse.driver.api.core.graph; import com.datastax.dse.driver.internal.core.graph.DefaultScriptGraphStatement; +import com.datastax.oss.driver.api.core.cql.Statement; import com.datastax.oss.protocol.internal.util.collection.NullAllowingImmutableMap; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; @@ -50,7 +51,7 @@ static ScriptGraphStatement newInstance(@NonNull String script) { null, null, null, - Long.MIN_VALUE, + Statement.NO_DEFAULT_TIMESTAMP, null, null, Collections.emptyMap(), diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/DseConversions.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/DseConversions.java index 32fa823ca2e..3a1eeda1d86 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/DseConversions.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/DseConversions.java @@ -72,7 +72,7 @@ public static Message toContinuousPagingMessage( config.getString(DefaultDriverOption.REQUEST_SERIAL_CONSISTENCY)) : serialConsistency.getProtocolCode(); long timestamp = statement.getQueryTimestamp(); - if (timestamp == Long.MIN_VALUE) { + if (timestamp == Statement.NO_DEFAULT_TIMESTAMP) { timestamp = context.getTimestampGenerator().next(); } CodecRegistry codecRegistry = context.getCodecRegistry(); diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/BytecodeGraphStatement.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/BytecodeGraphStatement.java index 8cad9f6d85c..591b791c7a5 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/BytecodeGraphStatement.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/BytecodeGraphStatement.java @@ -18,6 +18,7 @@ import com.datastax.dse.driver.api.core.graph.FluentGraphStatement; import com.datastax.oss.driver.api.core.ConsistencyLevel; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.cql.Statement; import com.datastax.oss.driver.api.core.metadata.Node; import java.nio.ByteBuffer; import java.time.Duration; @@ -45,7 +46,7 @@ public BytecodeGraphStatement( null, null, null, - Long.MIN_VALUE, + Statement.NO_DEFAULT_TIMESTAMP, executionProfile, executionProfileName, Collections.emptyMap(), diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphConversions.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphConversions.java index 6637804caaa..7af7160baa1 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphConversions.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphConversions.java @@ -33,6 +33,7 @@ import com.datastax.oss.driver.api.core.ProtocolVersion; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.cql.Statement; import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.internal.core.cql.Conversions; @@ -115,7 +116,7 @@ public static Message createContinuousMessageFromGraphStatement( .getProtocolCode(); long timestamp = statement.getTimestamp(); - if (timestamp == Long.MIN_VALUE) { + if (timestamp == Statement.NO_DEFAULT_TIMESTAMP) { timestamp = context.getTimestampGenerator().next(); } @@ -183,7 +184,7 @@ static Message createMessageFromGraphStatement( .getProtocolCode(); long timestamp = statement.getTimestamp(); - if (timestamp == Long.MIN_VALUE) { + if (timestamp == Statement.NO_DEFAULT_TIMESTAMP) { timestamp = context.getTimestampGenerator().next(); } diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/cql/BatchStatement.java b/core/src/main/java/com/datastax/oss/driver/api/core/cql/BatchStatement.java index 0774e5717b2..733b959d9a0 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/cql/BatchStatement.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/cql/BatchStatement.java @@ -59,7 +59,7 @@ static BatchStatement newInstance(@NonNull BatchType batchType) { Collections.emptyMap(), null, false, - Long.MIN_VALUE, + Statement.NO_DEFAULT_TIMESTAMP, null, Integer.MIN_VALUE, null, @@ -90,7 +90,7 @@ static BatchStatement newInstance( Collections.emptyMap(), null, false, - Long.MIN_VALUE, + Statement.NO_DEFAULT_TIMESTAMP, null, Integer.MIN_VALUE, null, @@ -121,7 +121,7 @@ static BatchStatement newInstance( Collections.emptyMap(), null, false, - Long.MIN_VALUE, + Statement.NO_DEFAULT_TIMESTAMP, null, Integer.MIN_VALUE, null, @@ -260,7 +260,7 @@ default int computeSizeInBytes(@NonNull DriverContext context) { // timestamp if (!(context.getTimestampGenerator() instanceof ServerSideTimestampGenerator) - || getQueryTimestamp() != Long.MIN_VALUE) { + || getQueryTimestamp() != Statement.NO_DEFAULT_TIMESTAMP) { size += PrimitiveSizes.LONG; } diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/cql/BoundStatement.java b/core/src/main/java/com/datastax/oss/driver/api/core/cql/BoundStatement.java index 073ec3a97ca..480911fde12 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/cql/BoundStatement.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/cql/BoundStatement.java @@ -86,7 +86,7 @@ default int computeSizeInBytes(@NonNull DriverContext context) { // timestamp if (!(context.getTimestampGenerator() instanceof ServerSideTimestampGenerator) - || getQueryTimestamp() != Long.MIN_VALUE) { + || getQueryTimestamp() != Statement.NO_DEFAULT_TIMESTAMP) { size += PrimitiveSizes.LONG; } diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/cql/SimpleStatement.java b/core/src/main/java/com/datastax/oss/driver/api/core/cql/SimpleStatement.java index b93d8a7dbfa..95841cbdcb4 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/cql/SimpleStatement.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/cql/SimpleStatement.java @@ -74,7 +74,7 @@ static SimpleStatement newInstance(@NonNull String cqlQuery) { NullAllowingImmutableMap.of(), null, false, - Long.MIN_VALUE, + Statement.NO_DEFAULT_TIMESTAMP, null, Integer.MIN_VALUE, null, @@ -108,7 +108,7 @@ static SimpleStatement newInstance( NullAllowingImmutableMap.of(), null, false, - Long.MIN_VALUE, + Statement.NO_DEFAULT_TIMESTAMP, null, Integer.MIN_VALUE, null, @@ -139,7 +139,7 @@ static SimpleStatement newInstance( NullAllowingImmutableMap.of(), null, false, - Long.MIN_VALUE, + Statement.NO_DEFAULT_TIMESTAMP, null, Integer.MIN_VALUE, null, @@ -300,7 +300,7 @@ default int computeSizeInBytes(@NonNull DriverContext context) { // timestamp if (!(context.getTimestampGenerator() instanceof ServerSideTimestampGenerator) - || getQueryTimestamp() != Long.MIN_VALUE) { + || getQueryTimestamp() != Statement.NO_DEFAULT_TIMESTAMP) { size += PrimitiveSizes.LONG; } diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/cql/Statement.java b/core/src/main/java/com/datastax/oss/driver/api/core/cql/Statement.java index ce25f185838..2925cdfd11f 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/cql/Statement.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/cql/Statement.java @@ -68,6 +68,13 @@ public interface Statement> extends Request { GenericType> ASYNC = new GenericType>() {}; + /** + * A special value for {@link #getQueryTimestamp()} that means "no value". + * + *

      It is equal to {@link Long#MIN_VALUE}. + */ + long NO_DEFAULT_TIMESTAMP = QueryOptions.NO_DEFAULT_TIMESTAMP; + /** * A special value for {@link #getNowInSeconds()} that means "no value". * @@ -225,9 +232,10 @@ default SelfT setRoutingKey(@NonNull ByteBuffer... newRoutingKeyComponents) { /** * Returns the query timestamp, in microseconds, to send with the statement. * - *

      If this is equal to {@link Long#MIN_VALUE}, the {@link TimestampGenerator} configured for - * this driver instance will be used to generate a timestamp. + *

      If this is equal to {@link #NO_DEFAULT_TIMESTAMP}, the {@link TimestampGenerator} configured + * for this driver instance will be used to generate a timestamp. * + * @see #NO_DEFAULT_TIMESTAMP * @see TimestampGenerator */ long getQueryTimestamp(); @@ -235,12 +243,13 @@ default SelfT setRoutingKey(@NonNull ByteBuffer... newRoutingKeyComponents) { /** * Sets the query timestamp, in microseconds, to send with the statement. * - *

      If this is equal to {@link Long#MIN_VALUE}, the {@link TimestampGenerator} configured for - * this driver instance will be used to generate a timestamp. + *

      If this is equal to {@link #NO_DEFAULT_TIMESTAMP}, the {@link TimestampGenerator} configured + * for this driver instance will be used to generate a timestamp. * *

      All the driver's built-in implementations are immutable, and return a new instance from this * method. However custom implementations may choose to be mutable and return the same instance. * + * @see #NO_DEFAULT_TIMESTAMP * @see TimestampGenerator */ @NonNull diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/cql/StatementBuilder.java b/core/src/main/java/com/datastax/oss/driver/api/core/cql/StatementBuilder.java index 15f1608ec05..1ba110347ec 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/cql/StatementBuilder.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/cql/StatementBuilder.java @@ -51,7 +51,7 @@ public abstract class StatementBuilder< @Nullable private NullAllowingImmutableMap.Builder customPayloadBuilder; @Nullable protected Boolean idempotent; protected boolean tracing; - protected long timestamp = Long.MIN_VALUE; + protected long timestamp = Statement.NO_DEFAULT_TIMESTAMP; @Nullable protected ByteBuffer pagingState; protected int pageSize = Integer.MIN_VALUE; @Nullable protected ConsistencyLevel consistencyLevel; diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/cql/SyncCqlSession.java b/core/src/main/java/com/datastax/oss/driver/api/core/cql/SyncCqlSession.java index e17ccd5ce1c..1aaf298d6bc 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/cql/SyncCqlSession.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/cql/SyncCqlSession.java @@ -113,8 +113,8 @@ default ResultSet execute(@NonNull String query) { *

    • on the other hand, the following attributes are not propagated: *
        *
      • {@link Statement#getQueryTimestamp() boundStatement.getQueryTimestamp()} will be - * set to {@link Long#MIN_VALUE}, meaning that the value will be assigned by the - * session's timestamp generator. + * set to {@link Statement#NO_DEFAULT_TIMESTAMP}, meaning that the value will be + * assigned by the session's timestamp generator. *
      • {@link Statement#getNode() boundStatement.getNode()} will always be {@code null}. *
      • {@link Statement#getNowInSeconds()} boundStatement.getNowInSeconds()} will always * be equal to {@link Statement#NO_NOW_IN_SECONDS}. diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/time/TimestampGenerator.java b/core/src/main/java/com/datastax/oss/driver/api/core/time/TimestampGenerator.java index cc2fd76016f..b25b0c839d8 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/time/TimestampGenerator.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/time/TimestampGenerator.java @@ -15,6 +15,8 @@ */ package com.datastax.oss.driver.api.core.time; +import com.datastax.oss.driver.api.core.cql.Statement; + /** * Generates client-side, microsecond-precision query timestamps. * @@ -31,8 +33,9 @@ public interface TimestampGenerator extends AutoCloseable { * returned value if the clock tick hasn't changed, and possibly drifting in the future. See the * built-in driver implementations for more details. * - * @return the next timestamp, or {@link Long#MIN_VALUE} to indicate that the driver should not - * send one with the query (and let Cassandra generate a server-side timestamp). + * @return the next timestamp, or {@link Statement#NO_DEFAULT_TIMESTAMP} to indicate that the + * driver should not send one with the query (and let Cassandra generate a server-side + * timestamp). */ long next(); } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/adminrequest/AdminRequestHandler.java b/core/src/main/java/com/datastax/oss/driver/internal/core/adminrequest/AdminRequestHandler.java index 46475384eff..60e033c344d 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/adminrequest/AdminRequestHandler.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/adminrequest/AdminRequestHandler.java @@ -232,7 +232,7 @@ private static QueryOptions buildQueryOptions( pageSize, pagingState, ProtocolConstants.ConsistencyLevel.SERIAL, - Long.MIN_VALUE, + Statement.NO_DEFAULT_TIMESTAMP, null, Statement.NO_NOW_IN_SECONDS); } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/Conversions.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/Conversions.java index 81b04fb6452..81b7dd0f0c4 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/Conversions.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/Conversions.java @@ -135,7 +135,7 @@ public static Message toMessage( config.getString(DefaultDriverOption.REQUEST_SERIAL_CONSISTENCY)) : serialConsistency.getProtocolCode(); long timestamp = statement.getQueryTimestamp(); - if (timestamp == Long.MIN_VALUE) { + if (timestamp == Statement.NO_DEFAULT_TIMESTAMP) { timestamp = context.getTimestampGenerator().next(); } CodecRegistry codecRegistry = context.getCodecRegistry(); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultPreparedStatement.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultPreparedStatement.java index 1c0c31f4ee2..f20c62d1c32 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultPreparedStatement.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultPreparedStatement.java @@ -166,7 +166,7 @@ public BoundStatement bind(@NonNull Object... values) { customPayloadForBoundStatements, areBoundStatementsIdempotent, areBoundStatementsTracing, - Long.MIN_VALUE, + Statement.NO_DEFAULT_TIMESTAMP, pagingStateForBoundStatements, pageSizeForBoundStatements, consistencyLevelForBoundStatements, @@ -194,7 +194,7 @@ public BoundStatementBuilder boundStatementBuilder(@NonNull Object... values) { customPayloadForBoundStatements, areBoundStatementsIdempotent, areBoundStatementsTracing, - Long.MIN_VALUE, + Statement.NO_DEFAULT_TIMESTAMP, pagingStateForBoundStatements, pageSizeForBoundStatements, consistencyLevelForBoundStatements, diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/time/ServerSideTimestampGenerator.java b/core/src/main/java/com/datastax/oss/driver/internal/core/time/ServerSideTimestampGenerator.java index 1e9f6c52eeb..8f66b304e92 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/time/ServerSideTimestampGenerator.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/time/ServerSideTimestampGenerator.java @@ -16,6 +16,7 @@ package com.datastax.oss.driver.internal.core.time; import com.datastax.oss.driver.api.core.context.DriverContext; +import com.datastax.oss.driver.api.core.cql.Statement; import com.datastax.oss.driver.api.core.time.TimestampGenerator; import net.jcip.annotations.ThreadSafe; @@ -45,7 +46,7 @@ public ServerSideTimestampGenerator(@SuppressWarnings("unused") DriverContext co @Override public long next() { - return Long.MIN_VALUE; + return Statement.NO_DEFAULT_TIMESTAMP; } @Override diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphStatementBuilderBaseTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphStatementBuilderBaseTest.java index 41c0e722781..97212882358 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphStatementBuilderBaseTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphStatementBuilderBaseTest.java @@ -21,6 +21,7 @@ import com.datastax.dse.driver.api.core.graph.FluentGraphStatement; import com.datastax.dse.driver.api.core.graph.GraphStatementBuilderBase; +import com.datastax.oss.driver.api.core.cql.Statement; import edu.umd.cs.findbugs.annotations.NonNull; import org.junit.Test; @@ -50,6 +51,6 @@ public void should_use_timestamp_if_set() { public void should_use_correct_default_timestamp_if_not_set() { MockGraphStatementBuilder builder = new MockGraphStatementBuilder(); - assertThat(builder.build().getTimestamp()).isEqualTo(Long.MIN_VALUE); + assertThat(builder.build().getTimestamp()).isEqualTo(Statement.NO_DEFAULT_TIMESTAMP); } } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/cql/RequestHandlerTestHarness.java b/core/src/test/java/com/datastax/oss/driver/internal/core/cql/RequestHandlerTestHarness.java index bbc5ba381ee..53d8623de8f 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/cql/RequestHandlerTestHarness.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/cql/RequestHandlerTestHarness.java @@ -27,6 +27,7 @@ import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverConfig; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.cql.Statement; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.metrics.SessionMetric; import com.datastax.oss.driver.api.core.retry.RetryPolicy; @@ -130,7 +131,7 @@ protected RequestHandlerTestHarness(Builder builder) { when(context.getCodecRegistry()).thenReturn(CodecRegistry.DEFAULT); - when(timestampGenerator.next()).thenReturn(Long.MIN_VALUE); + when(timestampGenerator.next()).thenReturn(Statement.NO_DEFAULT_TIMESTAMP); when(context.getTimestampGenerator()).thenReturn(timestampGenerator); pools = builder.buildMockPools(); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BoundStatementCcmIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BoundStatementCcmIT.java index 1f04f6212d3..abd24d638aa 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BoundStatementCcmIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BoundStatementCcmIT.java @@ -32,6 +32,7 @@ import com.datastax.oss.driver.api.core.cql.Row; import com.datastax.oss.driver.api.core.cql.SimpleStatement; import com.datastax.oss.driver.api.core.cql.SimpleStatementBuilder; +import com.datastax.oss.driver.api.core.cql.Statement; import com.datastax.oss.driver.api.core.metadata.token.Token; import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; import com.datastax.oss.driver.api.testinfra.CassandraRequirement; @@ -344,7 +345,7 @@ public void should_propagate_attributes_when_preparing_a_simple_statement() { // Bound statements do not support per-query keyspaces, so this is not set assertThat(boundStatement.getKeyspace()).isNull(); // Should not be propagated - assertThat(boundStatement.getQueryTimestamp()).isEqualTo(Long.MIN_VALUE); + assertThat(boundStatement.getQueryTimestamp()).isEqualTo(Statement.NO_DEFAULT_TIMESTAMP); } } From 0be0f64bdb4ff40e85dc3c6f04b89dc207eea6fc Mon Sep 17 00:00:00 2001 From: Blazej Bucko Date: Sun, 21 Jul 2019 08:33:16 +0200 Subject: [PATCH 424/979] JAVA-2435: Add automatic-module-names to the manifests --- changelog/README.md | 1 + core-shaded/pom.xml | 1 + core/pom.xml | 7 +++++++ examples/pom.xml | 10 ++++++++++ integration-tests/pom.xml | 7 +++++++ manual/core/integration/README.md | 19 +++++++++++++++++++ mapper-processor/pom.xml | 7 +++++++ mapper-runtime/pom.xml | 10 ++++++++++ query-builder/pom.xml | 10 ++++++++++ test-infra/pom.xml | 10 ++++++++++ 10 files changed, 82 insertions(+) diff --git a/changelog/README.md b/changelog/README.md index a7dbbf6e2ce..a9c0b4fb53c 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.6.0 (in progress) +- [improvement] JAVA-2435: Add automatic-module-names to the manifests - [new feature] JAVA-2054: Add now_in_seconds to protocol v5 query messages - [bug] JAVA-2711: Fix handling of UDT keys in the mapper - [improvement] JAVA-2631: Add getIndex() shortcuts to TableMetadata diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 67911797804..d58b4705c08 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -303,6 +303,7 @@ + com.datastax.oss.driver.core com.datastax.oss.driver.core - - com.github.jnr - jnr-ffi - com.github.jnr jnr-posix diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/PlatformInfoFinder.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/PlatformInfoFinder.java index 1fef44acd0a..f30a8e7a61c 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/PlatformInfoFinder.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/PlatformInfoFinder.java @@ -41,7 +41,6 @@ class PlatformInfoFinder { private static final String MAVEN_IGNORE_LINE = "The following files have been resolved:"; private static final Pattern DEPENDENCY_SPLIT_REGEX = Pattern.compile(":"); static final String UNVERIFIED_RUNTIME_VERSION = "UNVERIFIED"; - public static final String UNKNOWN = "UNKNOWN"; private final Function propertiesUrlProvider; @SuppressWarnings("UnnecessaryLambda") @@ -214,7 +213,7 @@ private boolean lineWithDependencyInfo(String line) { private CPUS getCpuInfo() { int numberOfProcessors = Runtime.getRuntime().availableProcessors(); - String model = Native.isPlatformAvailable() ? Native.getCPU() : UNKNOWN; + String model = Native.getCpu(); return new CPUS(numberOfProcessors, model); } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/os/CpuInfo.java b/core/src/main/java/com/datastax/oss/driver/internal/core/os/CpuInfo.java new file mode 100644 index 00000000000..b5e2329954e --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/os/CpuInfo.java @@ -0,0 +1,142 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.os; + +import java.util.Locale; + +public class CpuInfo { + + /* Copied from equivalent op in jnr.ffi.Platform. We have to have this here as it has to be defined + * before its (multiple) uses in determineCpu() */ + private static final Locale LOCALE = Locale.ENGLISH; + + /* The remainder of this class is largely based on jnr.ffi.Platform in jnr-ffi version 2.1.10. + * We copy it manually here in order to avoid introducing an extra dependency merely for the sake of + * evaluating some system properties. + * + * jnr-ffi copyright notice follows: + * + * Copyright (C) 2008-2010 Wayne Meissner + * + * This file is part of the JNR project. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + /** The supported CPU architectures. */ + public enum Cpu { + /* + * Note The names of the enum values are used in other parts of the + * code to determine where to find the native stub library. Do NOT rename. + */ + + /** 32 bit legacy Intel */ + I386, + + /** 64 bit AMD (aka EM64T/X64) */ + X86_64, + + /** 32 bit Power PC */ + PPC, + + /** 64 bit Power PC */ + PPC64, + + /** 64 bit Power PC little endian */ + PPC64LE, + + /** 32 bit Sun sparc */ + SPARC, + + /** 64 bit Sun sparc */ + SPARCV9, + + /** IBM zSeries S/390 */ + S390X, + + /** 32 bit MIPS (used by nestedvm) */ + MIPS32, + + /** 32 bit ARM */ + ARM, + + /** 64 bit ARM */ + AARCH64, + + /** + * Unknown CPU architecture. A best effort will be made to infer architecture specific values + * such as address and long size. + */ + UNKNOWN; + + @Override + public String toString() { + return name().toLowerCase(LOCALE); + } + } + + public static Cpu determineCpu() { + String archString = System.getProperty("os.arch"); + if (equalsIgnoreCase("x86", archString) + || equalsIgnoreCase("i386", archString) + || equalsIgnoreCase("i86pc", archString) + || equalsIgnoreCase("i686", archString)) { + return Cpu.I386; + } else if (equalsIgnoreCase("x86_64", archString) || equalsIgnoreCase("amd64", archString)) { + return Cpu.X86_64; + } else if (equalsIgnoreCase("ppc", archString) || equalsIgnoreCase("powerpc", archString)) { + return Cpu.PPC; + } else if (equalsIgnoreCase("ppc64", archString) || equalsIgnoreCase("powerpc64", archString)) { + if ("little".equals(System.getProperty("sun.cpu.endian"))) { + return Cpu.PPC64LE; + } + return Cpu.PPC64; + } else if (equalsIgnoreCase("ppc64le", archString) + || equalsIgnoreCase("powerpc64le", archString)) { + return Cpu.PPC64LE; + } else if (equalsIgnoreCase("s390", archString) || equalsIgnoreCase("s390x", archString)) { + return Cpu.S390X; + } else if (equalsIgnoreCase("aarch64", archString)) { + return Cpu.AARCH64; + } else if (equalsIgnoreCase("arm", archString) || equalsIgnoreCase("armv7l", archString)) { + return Cpu.ARM; + } + + // Try to find by lookup up in the CPU list + for (Cpu cpu : Cpu.values()) { + if (equalsIgnoreCase(cpu.name(), archString)) { + return cpu; + } + } + + return Cpu.UNKNOWN; + } + + private static boolean equalsIgnoreCase(String s1, String s2) { + return s1.equalsIgnoreCase(s2) + || s1.toUpperCase(LOCALE).equals(s2.toUpperCase(LOCALE)) + || s1.toLowerCase(LOCALE).equals(s2.toLowerCase(LOCALE)); + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/os/EmptyLibc.java b/core/src/main/java/com/datastax/oss/driver/internal/core/os/EmptyLibc.java new file mode 100644 index 00000000000..88a8e94c73c --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/os/EmptyLibc.java @@ -0,0 +1,37 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.os; + +import java.util.Optional; + +/** A no-op NativeImpl implementation; useful if we can't load one of the others */ +public class EmptyLibc implements Libc { + + @Override + public boolean available() { + return false; + } + + @Override + public Optional gettimeofday() { + return Optional.empty(); + } + + @Override + public Optional getpid() { + return Optional.empty(); + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/os/JnrLibc.java b/core/src/main/java/com/datastax/oss/driver/internal/core/os/JnrLibc.java new file mode 100644 index 00000000000..b67282ae1ce --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/os/JnrLibc.java @@ -0,0 +1,88 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.os; + +import java.util.Optional; +import java.util.function.Consumer; +import jnr.posix.POSIX; +import jnr.posix.POSIXFactory; +import jnr.posix.Timeval; +import jnr.posix.util.DefaultPOSIXHandler; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class JnrLibc implements Libc { + + private static final Logger LOG = LoggerFactory.getLogger(JnrLibc.class); + + private final Optional posix; + + public JnrLibc() { + + this.posix = loadPosix(); + } + + @Override + public Optional gettimeofday() { + + return this.posix.flatMap(this::gettimeofdayImpl); + } + + @Override + public Optional getpid() { + + return this.posix.map(POSIX::getpid); + } + + @Override + public boolean available() { + return this.posix.isPresent(); + } + + private Optional loadPosix() { + + try { + return Optional.of(POSIXFactory.getPOSIX(new DefaultPOSIXHandler(), true)) + .flatMap(p -> catchAll(p, posix -> posix.getpid(), "Error calling getpid()")) + .flatMap(p -> catchAll(p, this::gettimeofdayImpl, "Error calling gettimeofday()")); + } catch (Throwable t) { + LOG.debug("Error loading POSIX", t); + return Optional.empty(); + } + } + + private Optional catchAll(POSIX posix, Consumer fn, String debugStr) { + try { + fn.accept(posix); + return Optional.of(posix); + } catch (Throwable t) { + + LOG.debug(debugStr, t); + return Optional.empty(); + } + } + + private Optional gettimeofdayImpl(POSIX posix) { + + Timeval tv = posix.allocateTimeval(); + int rv = posix.gettimeofday(tv); + if (rv != 0) { + LOG.debug("Expected 0 return value from gettimeofday(), observed " + rv); + return Optional.empty(); + } + return Optional.of(tv.sec() * 1_000_000 + tv.usec()); + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/os/Libc.java b/core/src/main/java/com/datastax/oss/driver/internal/core/os/Libc.java new file mode 100644 index 00000000000..5e4e5e91bd1 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/os/Libc.java @@ -0,0 +1,29 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.os; + +import java.util.Optional; + +public interface Libc { + + /* Maintained to allow Native.isXAvailable() functionality without trying to make a native call if + * the underlying support _is_ available. */ + boolean available(); + + Optional gettimeofday(); + + Optional getpid(); +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/os/Native.java b/core/src/main/java/com/datastax/oss/driver/internal/core/os/Native.java index 7b85a5d9434..8fcc5ce6260 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/os/Native.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/os/Native.java @@ -15,17 +15,6 @@ */ package com.datastax.oss.driver.internal.core.os; -import com.datastax.oss.driver.internal.core.util.Reflection; -import java.lang.reflect.Method; -import jnr.ffi.LibraryLoader; -import jnr.ffi.Platform; -import jnr.ffi.Pointer; -import jnr.ffi.Runtime; -import jnr.ffi.Struct; -import jnr.ffi.annotations.Out; -import jnr.ffi.annotations.Transient; -import jnr.posix.POSIXFactory; -import jnr.posix.util.DefaultPOSIXHandler; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -34,13 +23,28 @@ public class Native { private static final Logger LOG = LoggerFactory.getLogger(Native.class); + private static class LibcLoader { + + public Libc load() { + try { + return new JnrLibc(); + } catch (Throwable t) { + LOG.info( + "Unable to load JNR native implementation. This could be normal if JNR is excluded from the classpath", + t); + return new EmptyLibc(); + } + } + } + + private static final Libc LIBC = new LibcLoader().load(); + private static final CpuInfo.Cpu CPU = CpuInfo.determineCpu(); + + private static final String NATIVE_CALL_ERR_MSG = "Native call failed or was not available"; + /** Whether {@link Native#currentTimeMicros()} is available on this system. */ public static boolean isCurrentTimeMicrosAvailable() { - try { - return LibCLoader.GET_TIME_OF_DAY_AVAILABLE; - } catch (NoClassDefFoundError e) { - return false; - } + return LIBC.available(); } /** @@ -48,158 +52,24 @@ public static boolean isCurrentTimeMicrosAvailable() { * {@link #isCurrentTimeMicrosAvailable()} is true. */ public static long currentTimeMicros() { - if (!isCurrentTimeMicrosAvailable()) { - throw new IllegalStateException( - "Native call not available. " - + "Check isCurrentTimeMicrosAvailable() before calling this method."); - } - LibCLoader.Timeval tv = new LibCLoader.Timeval(LibCLoader.LIB_C_RUNTIME); - int res = LibCLoader.LIB_C.gettimeofday(tv, null); - if (res != 0) { - throw new IllegalStateException("Call to libc.gettimeofday() failed with result " + res); - } - return tv.tv_sec.get() * 1000000 + tv.tv_usec.get(); + return LIBC.gettimeofday().orElseThrow(() -> new IllegalStateException(NATIVE_CALL_ERR_MSG)); } public static boolean isGetProcessIdAvailable() { - try { - return PosixLoader.GET_PID_AVAILABLE; - } catch (NoClassDefFoundError e) { - return false; - } + return LIBC.available(); } public static int getProcessId() { - if (!isGetProcessIdAvailable()) { - throw new IllegalStateException( - "Native call not available. " - + "Check isGetProcessIdAvailable() before calling this method."); - } - return PosixLoader.POSIX.getpid(); + return LIBC.getpid().orElseThrow(() -> new IllegalStateException(NATIVE_CALL_ERR_MSG)); } /** - * Returns {@code true} if JNR {@link Platform} class is loaded, and {@code false} otherwise. - * - * @return {@code true} if JNR {@link Platform} class is loaded. - */ - public static boolean isPlatformAvailable() { - try { - return PlatformLoader.PLATFORM != null; - } catch (NoClassDefFoundError e) { - return false; - } - } - - /** - * Returns the current processor architecture the JVM is running on, as reported by {@link - * Platform#getCPU()}. + * Returns the current processor architecture the JVM is running on. This value should match up to + * what's returned by jnr-ffi's Platform.getCPU() method. * * @return the current processor architecture. - * @throws IllegalStateException if JNR Platform library is not loaded. */ - public static String getCPU() { - if (!isPlatformAvailable()) - throw new IllegalStateException( - "JNR Platform class not loaded. " - + "Check isPlatformAvailable() before calling this method."); - return PlatformLoader.PLATFORM.getCPU().toString(); - } - - /** - * If jnr-ffi is not in the classpath at runtime, we'll fail to initialize the static fields - * below, but we still want {@link Native} to initialize successfully, so use an inner class. - */ - private static class LibCLoader { - - /** Handles libc calls through JNR (must be public). */ - public interface LibC { - int gettimeofday(@Out @Transient Timeval tv, Pointer unused); - } - - // See http://man7.org/linux/man-pages/man2/settimeofday.2.html - private static class Timeval extends Struct { - private final time_t tv_sec = new time_t(); - private final Unsigned32 tv_usec = new Unsigned32(); - - private Timeval(Runtime runtime) { - super(runtime); - } - } - - private static final LibC LIB_C; - private static final Runtime LIB_C_RUNTIME; - private static final boolean GET_TIME_OF_DAY_AVAILABLE; - - static { - LibC libc; - Runtime runtime = null; - try { - libc = LibraryLoader.create(LibC.class).load("c"); - runtime = Runtime.getRuntime(libc); - } catch (Throwable t) { - libc = null; - LOG.debug("Error loading libc", t); - } - LIB_C = libc; - LIB_C_RUNTIME = runtime; - boolean getTimeOfDayAvailable = false; - if (LIB_C_RUNTIME != null) { - try { - getTimeOfDayAvailable = LIB_C.gettimeofday(new Timeval(LIB_C_RUNTIME), null) == 0; - } catch (Throwable t) { - LOG.debug("Error accessing libc.gettimeofday()", t); - } - } - GET_TIME_OF_DAY_AVAILABLE = getTimeOfDayAvailable; - } - } - - /** @see LibCLoader */ - private static class PosixLoader { - @SuppressWarnings("VariableNameSameAsType") - private static final jnr.posix.POSIX POSIX; - - private static final boolean GET_PID_AVAILABLE; - - static { - jnr.posix.POSIX posix; - try { - posix = POSIXFactory.getPOSIX(new DefaultPOSIXHandler(), true); - } catch (Throwable t) { - posix = null; - LOG.debug("Error loading POSIX", t); - } - POSIX = posix; - boolean getPidAvailable = false; - if (POSIX != null) { - try { - POSIX.getpid(); - getPidAvailable = true; - } catch (Throwable t) { - LOG.debug("Error accessing posix.getpid()", t); - } - } - GET_PID_AVAILABLE = getPidAvailable; - } - } - - private static class PlatformLoader { - - private static final Platform PLATFORM; - - static { - Platform platform = null; - try { - Class platformClass = Reflection.loadClass(null, "jnr.ffi.Platform"); - if (platformClass != null) { - Method getNativePlatform = platformClass.getMethod("getNativePlatform"); - platform = (Platform) getNativePlatform.invoke(null); - } - } catch (Throwable t) { - LOG.debug("Error loading jnr.ffi.Platform class, this class will not be available.", t); - } - PLATFORM = platform; - } + public static String getCpu() { + return CPU.toString(); } } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/os/JnrLibcTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/os/JnrLibcTest.java new file mode 100644 index 00000000000..e3bf9a876db --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/os/JnrLibcTest.java @@ -0,0 +1,58 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.os; + +import static org.assertj.core.api.Assertions.*; + +import java.time.Instant; +import java.time.temporal.ChronoUnit; +import java.util.Optional; +import org.junit.Test; + +/** + * Explicitly test native impl based on jnr's POSIX impl. This test should pass on any platform + * which is supported by jnr. + */ +public class JnrLibcTest { + + @Test + public void should_be_available() { + + Libc impl = new JnrLibc(); + assertThat(impl.available()).isTrue(); + } + + @Test + public void should_support_getpid() { + Libc impl = new JnrLibc(); + Optional val = impl.getpid(); + assertThat(val).isNotEmpty(); + assertThat(val.get()).isGreaterThan(1); + } + + @Test + public void should_support_gettimeofday() { + Libc impl = new JnrLibc(); + Optional val = impl.gettimeofday(); + assertThat(val).isNotEmpty(); + assertThat(val.get()).isGreaterThan(0); + + Instant now = Instant.now(); + Instant rvInstant = Instant.EPOCH.plus(val.get(), ChronoUnit.MICROS); + assertThat(rvInstant.isAfter(now.minusSeconds(1))).isTrue(); + assertThat(rvInstant.isBefore(now.plusSeconds(1))).isTrue(); + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/os/NativeTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/os/NativeTest.java index 56e7763486b..309fd28d6d8 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/os/NativeTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/os/NativeTest.java @@ -21,11 +21,9 @@ public class NativeTest { - /** Verifies that {@link Native#getCPU()} returns non-empty cpu architecture */ + /** Verifies that {@link Native#getCpu()} returns non-empty cpu architecture */ @Test - public void should_return_cpu_if_call_is_available() { - if (Native.isPlatformAvailable()) { - assertThat(Native.getCPU()).isNotEmpty(); - } + public void should_return_cpu_info() { + assertThat(Native.getCpu()).isNotEmpty(); } } diff --git a/manual/core/integration/README.md b/manual/core/integration/README.md index 72f6b0b0314..29cc09e39ee 100644 --- a/manual/core/integration/README.md +++ b/manual/core/integration/README.md @@ -303,11 +303,11 @@ The driver performs native calls with [JNR](https://github.com/jnr). This is use * to get the process ID when generating [UUIDs][Uuids]. In both cases, this is completely optional; if system calls are not available on the current -platform, or the libraries fail to load for any reason, the driver falls back to pure Java +platform, or the library fails to load for any reason, the driver falls back to pure Java workarounds. If you don't want to use system calls, or already know (from looking at the driver's logs) that they -are not available on your platform, you can exclude the following dependencies: +are not available on your platform, you can exclude the following dependency: ```xml @@ -315,10 +315,6 @@ are not available on your platform, you can exclude the following dependencies: java-driver-core ${driver.version} - - com.github.jnr - jnr-ffi - com.github.jnr jnr-posix diff --git a/manual/core/query_timestamps/README.md b/manual/core/query_timestamps/README.md index 3627a1fa4d1..12d754d84b1 100644 --- a/manual/core/query_timestamps/README.md +++ b/manual/core/query_timestamps/README.md @@ -190,6 +190,6 @@ Here is the order of precedence of all the methods described so far: [TimestampGenerator]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/time/TimestampGenerator.html [gettimeofday]: http://man7.org/linux/man-pages/man2/settimeofday.2.html -[JNR]: https://github.com/jnr/jnr-ffi +[JNR]: https://github.com/jnr/jnr-posix [Lightweight transactions]: https://docs.datastax.com/en/dse/6.0/cql/cql/cql_using/useInsertLWT.html -[Statement.setQueryTimestamp()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/Statement.html#setQueryTimestamp-long- \ No newline at end of file +[Statement.setQueryTimestamp()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/Statement.html#setQueryTimestamp-long- diff --git a/manual/osgi/README.md b/manual/osgi/README.md index d337eab6b2f..e0c5108bca3 100644 --- a/manual/osgi/README.md +++ b/manual/osgi/README.md @@ -93,6 +93,6 @@ starting the driver: [driver configuration]: ../core/configuration [OSGi]:https://www.osgi.org -[JNR]: https://github.com/jnr/jnr-ffi +[JNR]: https://github.com/jnr/jnr-posix [withClassLoader()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withClassLoader-java.lang.ClassLoader- -[JAVA-1127]:https://datastax-oss.atlassian.net/browse/JAVA-1127 \ No newline at end of file +[JAVA-1127]:https://datastax-oss.atlassian.net/browse/JAVA-1127 diff --git a/pom.xml b/pom.xml index 07ad84b9da6..c4b9d1f2d0d 100644 --- a/pom.xml +++ b/pom.xml @@ -110,11 +110,6 @@ logback-classic ${logback.version} - - com.github.jnr - jnr-ffi - 2.1.10 - org.xerial.snappy snappy-java From 1bf39fa262124e45566bd1438d635884656f3969 Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 20 Apr 2020 11:58:16 -0700 Subject: [PATCH 447/979] Simplify assumptions in InitialNodeListRefresh --- .../core/metadata/InitialNodeListRefresh.java | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/InitialNodeListRefresh.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/InitialNodeListRefresh.java index 92262b72a47..676724de327 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/InitialNodeListRefresh.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/InitialNodeListRefresh.java @@ -18,7 +18,6 @@ import com.datastax.oss.driver.api.core.metadata.EndPoint; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; -import com.datastax.oss.driver.internal.core.metadata.token.DefaultTokenMap; import com.datastax.oss.driver.internal.core.metadata.token.TokenFactory; import com.datastax.oss.driver.internal.core.metadata.token.TokenFactoryRegistry; import com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting; @@ -54,11 +53,10 @@ public Result compute( String logPrefix = context.getSessionName(); TokenFactoryRegistry tokenFactoryRegistry = context.getTokenFactoryRegistry(); - assert oldMetadata.getNodes().isEmpty(); - - TokenFactory tokenFactory = - oldMetadata.getTokenMap().map(m -> ((DefaultTokenMap) m).getTokenFactory()).orElse(null); - boolean tokensChanged = false; + // Since this is the first refresh, and we've stored contact points separately until now, the + // metadata is empty. + assert oldMetadata == DefaultMetadata.EMPTY; + TokenFactory tokenFactory = null; ImmutableMap.Builder newNodesBuilder = ImmutableMap.builder(); @@ -71,10 +69,10 @@ public Result compute( } else { LOG.debug("[{}] Copying contact point {}", logPrefix, node); } - if (tokenFactory == null && nodeInfo.getPartitioner() != null) { + if (tokenMapEnabled && tokenFactory == null && nodeInfo.getPartitioner() != null) { tokenFactory = tokenFactoryRegistry.tokenFactoryFor(nodeInfo.getPartitioner()); } - tokensChanged |= copyInfos(nodeInfo, node, tokenFactory, context); + copyInfos(nodeInfo, node, tokenFactory, context); newNodesBuilder.put(node.getHostId(), node); } @@ -94,7 +92,7 @@ public Result compute( return new Result( oldMetadata.withNodes( - ImmutableMap.copyOf(newNodes), tokenMapEnabled, tokensChanged, tokenFactory, context), + ImmutableMap.copyOf(newNodes), tokenMapEnabled, true, tokenFactory, context), eventsBuilder.build()); } From f3728a0e05ec48ca496c0259513364da8f7951bc Mon Sep 17 00:00:00 2001 From: Erik Merkle Date: Tue, 21 Apr 2020 12:32:52 -0500 Subject: [PATCH 448/979] Fix various issues with tests * Add Awaitility version for OSGi tests * Remove broken graph paging test The test was trying to pause the server between two continuous pages. That can't be done reliably since the server replies asynchronously. * Use Awaitility for Metadata Manager tests Co-authored-by: olim7t --- .../core/metadata/MetadataManagerTest.java | 51 +++++++++++-------- integration-tests/pom.xml | 1 + .../driver/api/core/graph/GraphPagingIT.java | 32 ------------ .../driver/osgi/support/BundleOptions.java | 9 ++++ 4 files changed, 40 insertions(+), 53 deletions(-) diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/MetadataManagerTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/MetadataManagerTest.java index ca02ec2260b..e56462bb42e 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/MetadataManagerTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/MetadataManagerTest.java @@ -17,7 +17,7 @@ import static com.datastax.oss.driver.Assertions.assertThat; import static com.datastax.oss.driver.Assertions.assertThatStage; -import static org.assertj.core.api.Assertions.fail; +import static org.awaitility.Awaitility.await; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.timeout; import static org.mockito.Mockito.verify; @@ -36,21 +36,18 @@ import com.datastax.oss.driver.internal.core.metrics.MetricsFactory; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; -import com.datastax.oss.driver.shaded.guava.common.util.concurrent.Uninterruptibles; import io.netty.channel.DefaultEventLoopGroup; -import io.netty.util.concurrent.Future; import java.net.InetSocketAddress; import java.time.Duration; import java.util.Collections; import java.util.List; import java.util.Optional; import java.util.UUID; +import java.util.concurrent.Callable; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionStage; import java.util.concurrent.CopyOnWriteArrayList; -import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; import org.junit.After; import org.junit.Before; import org.junit.Test; @@ -144,7 +141,7 @@ public void should_copy_contact_points_on_refresh_of_all_nodes() { // When CompletionStage refreshNodesFuture = metadataManager.refreshNodes(); - waitForPendingAdminTasks(); + waitForPendingAdminTasks(() -> metadataManager.refreshes.size() == 1); // Then assertThatStage(refreshNodesFuture).isSuccess(); @@ -173,7 +170,7 @@ public void should_refresh_all_nodes() { // When CompletionStage refreshNodesFuture = metadataManager.refreshNodes(); - waitForPendingAdminTasks(); + waitForPendingAdminTasks(() -> metadataManager.refreshes.size() == 1); // Then assertThatStage(refreshNodesFuture).isSuccess(); @@ -228,7 +225,7 @@ public void should_add_node() { // When metadataManager.addNode(broadcastRpcAddress); - waitForPendingAdminTasks(); + waitForPendingAdminTasks(() -> metadataManager.addNodeCount == 1); // Then assertThat(metadataManager.refreshes).hasSize(1); @@ -251,7 +248,7 @@ public void should_not_add_node_if_broadcast_rpc_address_does_not_match() { // When metadataManager.addNode(broadcastRpcAddress2); - waitForPendingAdminTasks(); + waitForPendingAdminTasks(() -> metadataManager.addNodeCount == 1); // Then assertThat(metadataManager.refreshes).isEmpty(); @@ -266,7 +263,7 @@ public void should_not_add_node_if_topology_monitor_does_not_have_info() { // When metadataManager.addNode(broadcastRpcAddress2); - waitForPendingAdminTasks(); + waitForPendingAdminTasks(() -> metadataManager.addNodeCount == 1); // Then assertThat(metadataManager.refreshes).isEmpty(); @@ -279,7 +276,7 @@ public void should_remove_node() { // When metadataManager.removeNode(broadcastRpcAddress2); - waitForPendingAdminTasks(); + waitForPendingAdminTasks(() -> metadataManager.removeNodeCount == 1); // Then assertThat(metadataManager.refreshes).hasSize(1); @@ -290,6 +287,8 @@ public void should_remove_node() { private static class TestMetadataManager extends MetadataManager { private List refreshes = new CopyOnWriteArrayList<>(); + private volatile int addNodeCount = 0; + private volatile int removeNodeCount = 0; public TestMetadataManager(InternalDriverContext context) { super(context); @@ -301,18 +300,28 @@ Void apply(MetadataRefresh refresh) { refreshes.add(refresh); return null; } + + @Override + public void addNode(InetSocketAddress broadcastRpcAddress) { + // Keep track of addNode calls for condition checking + synchronized (this) { + ++addNodeCount; + } + super.addNode(broadcastRpcAddress); + } + + @Override + public void removeNode(InetSocketAddress broadcastRpcAddress) { + // Keep track of removeNode calls for condition checking + synchronized (this) { + ++removeNodeCount; + } + super.removeNode(broadcastRpcAddress); + } } // Wait for all the tasks on the pool's admin executor to complete. - private void waitForPendingAdminTasks() { - // This works because the event loop group is single-threaded - Future f = adminEventLoopGroup.schedule(() -> null, 5, TimeUnit.NANOSECONDS); - try { - Uninterruptibles.getUninterruptibly(f, 100, TimeUnit.MILLISECONDS); - } catch (ExecutionException e) { - fail("unexpected error", e.getCause()); - } catch (TimeoutException e) { - fail("timed out while waiting for admin tasks to complete", e); - } + private void waitForPendingAdminTasks(Callable condition) { + await().atMost(500, TimeUnit.MILLISECONDS).until(condition); } } diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 8de8e4fc3fe..cc43c56dcd4 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -268,6 +268,7 @@ ${reactive-streams.version} ${rxjava.version} ${tinkerpop.version} + ${awaitility.version} diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphPagingIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphPagingIT.java index e3f422e577e..335aceb9b84 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphPagingIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphPagingIT.java @@ -461,38 +461,6 @@ public void should_trigger_global_timeout_async() throws InterruptedException { } } - @Test - public void should_trigger_global_timeout_async_after_first_page() throws InterruptedException { - // given - Duration timeout = Duration.ofSeconds(1); - DriverExecutionProfile profile = - enableGraphPaging() - .withDuration(DseDriverOption.GRAPH_TIMEOUT, timeout) - .withInt(DseDriverOption.GRAPH_CONTINUOUS_PAGING_MAX_ENQUEUED_PAGES, 1) - .withInt(DseDriverOption.GRAPH_CONTINUOUS_PAGING_PAGE_SIZE, 10); - - // when - try { - CompletionStage firstPageFuture = - SESSION_RULE - .session() - .executeAsync( - ScriptGraphStatement.newInstance("g.V().hasLabel('person').values('name')") - .setGraphName(SESSION_RULE.getGraphName()) - .setTraversalSource("g") - .setExecutionProfile(profile)); - AsyncGraphResultSet firstPage = firstPageFuture.toCompletableFuture().get(); - CCM_RULE.getCcmBridge().pause(1); - CompletionStage secondPageFuture = firstPage.fetchNextPage(); - secondPageFuture.toCompletableFuture().get(); - fail("Expecting DriverTimeoutException"); - } catch (ExecutionException e) { - assertThat(e.getCause()).hasMessage("Query timed out after " + timeout); - } finally { - CCM_RULE.getCcmBridge().resume(1); - } - } - private DriverExecutionProfile enableGraphPaging() { return SESSION_RULE .session() diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/osgi/support/BundleOptions.java b/integration-tests/src/test/java/com/datastax/oss/driver/osgi/support/BundleOptions.java index 37a3d96785d..d5846b20861 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/osgi/support/BundleOptions.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/osgi/support/BundleOptions.java @@ -113,6 +113,7 @@ public static CompositeOption testBundles() { options( driverTestInfraBundle(), simulacronBundles(), + awaitilityBundles(), nettyBundles(), // required by the test infra bundle, even for the shaded jar jacksonBundles(), // required by the Simulacron bundle, even for the shaded jar mavenBundle( @@ -167,6 +168,14 @@ public static CompositeOption simulacronBundles() { simulacronVersion)); } + public static CompositeOption awaitilityBundles() { + String awaitilityVersion = getVersionFromSystemProperty("awaitility.version"); + return () -> + options( + mavenBundle("org.awaitility", "awaitility", awaitilityVersion), + mavenBundle("org.hamcrest", "hamcrest", "2.1")); + } + public static MavenArtifactProvisionOption lz4Bundle() { return mavenBundle("org.lz4", "lz4-java", getVersionFromSystemProperty("lz4.version")); } From 057fe5d2d58a953caade26786f98c1a75ed089b1 Mon Sep 17 00:00:00 2001 From: olim7t Date: Wed, 15 Apr 2020 18:38:11 -0700 Subject: [PATCH 449/979] JAVA-2700: Revisit execution profile parameters in mapper This amends JAVA-2633 (5e1e5107c91c3546e5cc81a29cbccdbbaff0f82b). --- .../driver/api/core/cql/StatementBuilder.java | 7 +- .../datastax/oss/driver/mapper/ProfileIT.java | 240 ++++++++++++------ manual/mapper/mapper/README.md | 20 ++ .../dao/DaoDeleteMethodGenerator.java | 1 - .../dao/DaoInsertMethodGenerator.java | 1 - .../processor/dao/DaoMethodGenerator.java | 7 - .../dao/DaoQueryMethodGenerator.java | 1 - .../dao/DaoSelectMethodGenerator.java | 1 - .../dao/DaoUpdateMethodGenerator.java | 1 - .../mapper/MapperBuilderGenerator.java | 3 +- .../MapperDaoFactoryMethodGenerator.java | 57 ++--- mapper-runtime/revapi.json | 7 +- .../oss/driver/api/mapper/MapperBuilder.java | 47 ++++ .../oss/driver/api/mapper/MapperContext.java | 18 +- .../api/mapper/annotations/DaoProfile.java | 11 +- .../oss/driver/internal/mapper/DaoBase.java | 5 + .../driver/internal/mapper/DaoCacheKey.java | 50 +++- .../internal/mapper/DefaultMapperContext.java | 70 +++-- 18 files changed, 363 insertions(+), 184 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/cql/StatementBuilder.java b/core/src/main/java/com/datastax/oss/driver/api/core/cql/StatementBuilder.java index 1ba110347ec..98d555b4f04 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/cql/StatementBuilder.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/cql/StatementBuilder.java @@ -91,6 +91,9 @@ protected StatementBuilder(StatementT template) { @NonNull public SelfT setExecutionProfileName(@Nullable String executionProfileName) { this.executionProfileName = executionProfileName; + if (executionProfileName != null) { + this.executionProfile = null; + } return self; } @@ -98,7 +101,9 @@ public SelfT setExecutionProfileName(@Nullable String executionProfileName) { @NonNull public SelfT setExecutionProfile(@Nullable DriverExecutionProfile executionProfile) { this.executionProfile = executionProfile; - this.executionProfileName = null; + if (executionProfile != null) { + this.executionProfileName = null; + } return self; } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/ProfileIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/ProfileIT.java index 373ed0b718f..750f9378349 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/ProfileIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/ProfileIT.java @@ -20,11 +20,12 @@ import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.when; import static org.assertj.core.api.Assertions.assertThat; +import com.datastax.oss.driver.api.core.ConsistencyLevel; import com.datastax.oss.driver.api.core.CqlSession; -import com.datastax.oss.driver.api.core.DefaultConsistencyLevel; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; -import com.datastax.oss.driver.api.core.config.DriverConfigLoader; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.cql.BoundStatementBuilder; +import com.datastax.oss.driver.api.mapper.MapperBuilder; import com.datastax.oss.driver.api.mapper.annotations.Dao; import com.datastax.oss.driver.api.mapper.annotations.DaoFactory; import com.datastax.oss.driver.api.mapper.annotations.DaoProfile; @@ -35,38 +36,55 @@ import com.datastax.oss.driver.api.mapper.annotations.PartitionKey; import com.datastax.oss.driver.api.mapper.annotations.Query; import com.datastax.oss.driver.api.mapper.annotations.Select; +import com.datastax.oss.driver.api.mapper.annotations.StatementAttributes; import com.datastax.oss.driver.api.mapper.annotations.Update; +import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.api.testinfra.simulacron.SimulacronRule; import com.datastax.oss.driver.categories.ParallelizableTests; import com.datastax.oss.protocol.internal.Message; import com.datastax.oss.protocol.internal.request.Execute; -import com.datastax.oss.simulacron.common.cluster.ClusterQueryLogReport; import com.datastax.oss.simulacron.common.cluster.ClusterSpec; import com.datastax.oss.simulacron.common.cluster.QueryLog; import com.datastax.oss.simulacron.common.stubbing.PrimeDsl; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; +import java.util.List; import java.util.Map; import java.util.Objects; import java.util.UUID; import java.util.concurrent.TimeUnit; +import java.util.function.UnaryOperator; import org.junit.Before; import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; @Category(ParallelizableTests.class) public class ProfileIT { - @ClassRule - public static final SimulacronRule SIMULACRON_RULE = + private static final SimulacronRule SIMULACRON_RULE = new SimulacronRule(ClusterSpec.builder().withNodes(1)); - private static ProfileIT.SimpleDao daoString; - private static ProfileIT.SimpleDao daoClass; - private static CqlSession mapperSession; + private static final SessionRule SESSION_RULE = + SessionRule.builder(SIMULACRON_RULE) + .withConfigLoader( + SessionUtils.configLoaderBuilder() + .startProfile("cl_one") + .withString(DefaultDriverOption.REQUEST_CONSISTENCY, "ONE") + .build()) + .build(); + + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(SIMULACRON_RULE).around(SESSION_RULE); + + private static final Simple SAMPLE_ENTITY = new Simple(UUID.randomUUID(), "DATA"); + + private static DriverExecutionProfile clTwoProfile; + private MapperBuilder mapperBuilder; @BeforeClass public static void setupClass() { @@ -76,94 +94,137 @@ public static void setupClass() { primeCountQuery(); primeUpdateQuery(); - DriverConfigLoader loader = - SessionUtils.configLoaderBuilder() - .startProfile("cl") - .withString(DefaultDriverOption.REQUEST_CONSISTENCY, "ANY") - .build(); - mapperSession = SessionUtils.newSession(SIMULACRON_RULE, loader); - - ProfileIT.InventoryMapper inventoryMapper = - new ProfileIT_InventoryMapperBuilder(mapperSession).build(); - daoString = inventoryMapper.simpleDao("cl"); - DriverExecutionProfile clProfile = mapperSession.getContext().getConfig().getProfile("cl"); - daoClass = inventoryMapper.simpleDao(clProfile); + // Deliberately based on the default profile, so that we can assert that a dynamically-set + // option is correctly taken into account + clTwoProfile = + SESSION_RULE + .session() + .getContext() + .getConfig() + .getDefaultProfile() + .withString(DefaultDriverOption.REQUEST_CONSISTENCY, "TWO"); } @Before public void setup() { SIMULACRON_RULE.cluster().clearLogs(); + mapperBuilder = SimpleMapper.builder(SESSION_RULE.session()); } - private static final ProfileIT.Simple simple = new ProfileIT.Simple(UUID.randomUUID(), "DATA"); - @Test - public void should_honor_exec_profile_on_insert() { - daoString.save(simple); - - ClusterQueryLogReport report = SIMULACRON_RULE.cluster().getLogs(); - validateQueryOptions(report.getQueryLogs().get(0)); - - SIMULACRON_RULE.cluster().clearLogs(); - - daoClass.save(simple); - report = SIMULACRON_RULE.cluster().getLogs(); - validateQueryOptions(report.getQueryLogs().get(0)); + public void should_build_dao_with_profile_name() { + SimpleMapper mapper = mapperBuilder.build(); + SimpleDao dao = mapper.simpleDao("cl_one"); + assertClForAllQueries(dao, ConsistencyLevel.ONE); } @Test - public void should_honor_exec_profile_on_delete() { - daoString.delete(simple); - - ClusterQueryLogReport report = SIMULACRON_RULE.cluster().getLogs(); - validateQueryOptions(report.getQueryLogs().get(0)); - - SIMULACRON_RULE.cluster().clearLogs(); - - daoClass.delete(simple); - report = SIMULACRON_RULE.cluster().getLogs(); - validateQueryOptions(report.getQueryLogs().get(0)); + public void should_build_dao_with_profile() { + SimpleMapper mapper = mapperBuilder.build(); + SimpleDao dao = mapper.simpleDao(clTwoProfile); + assertClForAllQueries(dao, ConsistencyLevel.TWO); } @Test - public void should_honor_exec_profile_on_update() { - daoString.update(simple); + public void should_inherit_mapper_profile_name() { + SimpleMapper mapper = mapperBuilder.withDefaultExecutionProfileName("cl_one").build(); + SimpleDao dao = mapper.simpleDao(); + assertClForAllQueries(dao, ConsistencyLevel.ONE); + } - ClusterQueryLogReport report = SIMULACRON_RULE.cluster().getLogs(); - validateQueryOptions(report.getQueryLogs().get(0)); + @Test + public void should_inherit_mapper_profile() { + SimpleMapper mapper = mapperBuilder.withDefaultExecutionProfile(clTwoProfile).build(); + SimpleDao dao = mapper.simpleDao(); + assertClForAllQueries(dao, ConsistencyLevel.TWO); + } - SIMULACRON_RULE.cluster().clearLogs(); + @Test + public void should_override_mapper_profile_name() { + SimpleMapper mapper = + mapperBuilder + .withDefaultExecutionProfileName("defaultProfile") // doesn't need to exist + .build(); + SimpleDao dao = mapper.simpleDao("cl_one"); + assertClForAllQueries(dao, ConsistencyLevel.ONE); + } - daoClass.update(simple); - report = SIMULACRON_RULE.cluster().getLogs(); - validateQueryOptions(report.getQueryLogs().get(0)); + @Test + public void should_override_mapper_profile() { + DriverExecutionProfile clThreeProfile = + SESSION_RULE + .session() + .getContext() + .getConfig() + .getDefaultProfile() + .withString(DefaultDriverOption.REQUEST_CONSISTENCY, "THREE"); + SimpleMapper mapper = mapperBuilder.withDefaultExecutionProfile(clThreeProfile).build(); + SimpleDao dao = mapper.simpleDao(clTwoProfile); + assertClForAllQueries(dao, ConsistencyLevel.TWO); } @Test - public void should_honor_exec_profile_on_query() { - daoString.findByPk(simple.pk); + public void should_override_mapper_profile_name_with_a_profile() { + SimpleMapper mapper = + mapperBuilder + .withDefaultExecutionProfileName("defaultProfile") // doesn't need to exist + .build(); + SimpleDao dao = mapper.simpleDao(clTwoProfile); + assertClForAllQueries(dao, ConsistencyLevel.TWO); + } - ClusterQueryLogReport report = SIMULACRON_RULE.cluster().getLogs(); - validateQueryOptions(report.getQueryLogs().get(0)); + @Test + public void should_override_mapper_profile_with_a_name() { + SimpleMapper mapper = mapperBuilder.withDefaultExecutionProfile(clTwoProfile).build(); + SimpleDao dao = mapper.simpleDao("cl_one"); + assertClForAllQueries(dao, ConsistencyLevel.ONE); + } - SIMULACRON_RULE.cluster().clearLogs(); + @Test + public void should_use_default_when_no_profile() { + SimpleMapper mapper = mapperBuilder.build(); + SimpleDao dao = mapper.simpleDao(); + // Default CL inherited from reference.conf + assertClForAllQueries(dao, ConsistencyLevel.LOCAL_ONE); + } - daoString.findByPk(simple.pk); - report = SIMULACRON_RULE.cluster().getLogs(); - validateQueryOptions(report.getQueryLogs().get(0)); + @Test(expected = IllegalStateException.class) + public void should_fail_if_mapper_provides_both_profile_and_name() { + mapperBuilder + .withDefaultExecutionProfileName("cl_one") + .withDefaultExecutionProfile(clTwoProfile); } - private void validateQueryOptions(QueryLog log) { + private void assertClForAllQueries(SimpleDao dao, ConsistencyLevel expectedLevel) { + dao.save(SAMPLE_ENTITY); + assertServerSideCl(expectedLevel); + dao.delete(SAMPLE_ENTITY); + assertServerSideCl(expectedLevel); + dao.update(SAMPLE_ENTITY); + assertServerSideCl(expectedLevel); + dao.findByPk(SAMPLE_ENTITY.pk); + assertServerSideCl(expectedLevel); + + // Special cases: profile defined at the method level with statement attributes, should override + // dao-level profile. + dao.saveWithClOne(SAMPLE_ENTITY); + assertServerSideCl(ConsistencyLevel.ONE); + dao.saveWithCustomAttributes(SAMPLE_ENTITY, bs -> bs.setExecutionProfileName("cl_one")); + assertServerSideCl(ConsistencyLevel.ONE); + } - Message message = log.getFrame().message; + private void assertServerSideCl(ConsistencyLevel expectedCl) { + List queryLogs = SIMULACRON_RULE.cluster().getLogs().getQueryLogs(); + QueryLog lastLog = queryLogs.get(queryLogs.size() - 1); + Message message = lastLog.getFrame().message; assertThat(message).isInstanceOf(Execute.class); Execute queryExecute = (Execute) message; - assertThat(queryExecute.options.consistency) - .isEqualTo(DefaultConsistencyLevel.ANY.getProtocolCode()); + assertThat(queryExecute.options.consistency).isEqualTo(expectedCl.getProtocolCode()); } private static void primeInsertQuery() { - Map params = ImmutableMap.of("pk", simple.getPk(), "data", simple.getData()); + Map params = + ImmutableMap.of("pk", SAMPLE_ENTITY.getPk(), "data", SAMPLE_ENTITY.getData()); Map paramTypes = ImmutableMap.of("pk", "uuid", "data", "ascii"); SIMULACRON_RULE .cluster() @@ -171,15 +232,16 @@ private static void primeInsertQuery() { when(query( "INSERT INTO ks.simple (pk,data) VALUES (:pk,:data)", Lists.newArrayList( + com.datastax.oss.simulacron.common.codec.ConsistencyLevel.LOCAL_ONE, com.datastax.oss.simulacron.common.codec.ConsistencyLevel.ONE, - com.datastax.oss.simulacron.common.codec.ConsistencyLevel.ANY), + com.datastax.oss.simulacron.common.codec.ConsistencyLevel.TWO), params, paramTypes)) .then(noRows())); } private static void primeDeleteQuery() { - Map params = ImmutableMap.of("pk", simple.getPk()); + Map params = ImmutableMap.of("pk", SAMPLE_ENTITY.getPk()); Map paramTypes = ImmutableMap.of("pk", "uuid"); SIMULACRON_RULE .cluster() @@ -187,8 +249,9 @@ private static void primeDeleteQuery() { when(query( "DELETE FROM ks.simple WHERE pk=:pk", Lists.newArrayList( + com.datastax.oss.simulacron.common.codec.ConsistencyLevel.LOCAL_ONE, com.datastax.oss.simulacron.common.codec.ConsistencyLevel.ONE, - com.datastax.oss.simulacron.common.codec.ConsistencyLevel.ANY), + com.datastax.oss.simulacron.common.codec.ConsistencyLevel.TWO), params, paramTypes)) .then(noRows()) @@ -196,7 +259,7 @@ private static void primeDeleteQuery() { } private static void primeSelectQuery() { - Map params = ImmutableMap.of("pk", simple.getPk()); + Map params = ImmutableMap.of("pk", SAMPLE_ENTITY.getPk()); Map paramTypes = ImmutableMap.of("pk", "uuid"); SIMULACRON_RULE .cluster() @@ -204,8 +267,9 @@ private static void primeSelectQuery() { when(query( "SELECT pk,data FROM ks.simple WHERE pk=:pk", Lists.newArrayList( + com.datastax.oss.simulacron.common.codec.ConsistencyLevel.LOCAL_ONE, com.datastax.oss.simulacron.common.codec.ConsistencyLevel.ONE, - com.datastax.oss.simulacron.common.codec.ConsistencyLevel.ANY), + com.datastax.oss.simulacron.common.codec.ConsistencyLevel.TWO), params, paramTypes)) .then(noRows()) @@ -213,7 +277,7 @@ private static void primeSelectQuery() { } private static void primeCountQuery() { - Map params = ImmutableMap.of("pk", simple.getPk()); + Map params = ImmutableMap.of("pk", SAMPLE_ENTITY.getPk()); Map paramTypes = ImmutableMap.of("pk", "uuid"); SIMULACRON_RULE .cluster() @@ -221,8 +285,9 @@ private static void primeCountQuery() { when(query( "SELECT count(*) FROM ks.simple WHERE pk=:pk", Lists.newArrayList( + com.datastax.oss.simulacron.common.codec.ConsistencyLevel.LOCAL_ONE, com.datastax.oss.simulacron.common.codec.ConsistencyLevel.ONE, - com.datastax.oss.simulacron.common.codec.ConsistencyLevel.ANY), + com.datastax.oss.simulacron.common.codec.ConsistencyLevel.TWO), params, paramTypes)) .then(PrimeDsl.rows().row("count", 1L).columnTypes("count", "bigint").build()) @@ -230,7 +295,8 @@ private static void primeCountQuery() { } private static void primeUpdateQuery() { - Map params = ImmutableMap.of("pk", simple.getPk(), "data", simple.getData()); + Map params = + ImmutableMap.of("pk", SAMPLE_ENTITY.getPk(), "data", SAMPLE_ENTITY.getData()); Map paramTypes = ImmutableMap.of("pk", "uuid", "data", "ascii"); SIMULACRON_RULE .cluster() @@ -246,30 +312,44 @@ private static void primeUpdateQuery() { } @Mapper - public interface InventoryMapper { + public interface SimpleMapper { @DaoFactory - ProfileIT.SimpleDao simpleDao(@DaoProfile String executionProfile); + SimpleDao simpleDao(); @DaoFactory - ProfileIT.SimpleDao simpleDao(@DaoProfile DriverExecutionProfile executionProfile); + SimpleDao simpleDao(@DaoProfile String executionProfile); + + @DaoFactory + SimpleDao simpleDao(@DaoProfile DriverExecutionProfile executionProfile); + + static MapperBuilder builder(CqlSession session) { + return new ProfileIT_SimpleMapperBuilder(session); + } } @Dao public interface SimpleDao { @Insert - void save(ProfileIT.Simple simple); + void save(Simple simple); @Delete - void delete(ProfileIT.Simple simple); + void delete(Simple simple); @Select - ProfileIT.Simple findByPk(UUID pk); + Simple findByPk(UUID pk); @Query("SELECT count(*) FROM ks.simple WHERE pk=:pk") long count(UUID pk); @Update - void update(ProfileIT.Simple simple); + void update(Simple simple); + + @Insert + @StatementAttributes(executionProfileName = "cl_one") + void saveWithClOne(Simple simple); + + @Insert + void saveWithCustomAttributes(Simple simple, UnaryOperator attributes); } @Entity(defaultKeyspace = "ks") diff --git a/manual/mapper/mapper/README.md b/manual/mapper/mapper/README.md index ef4b8be7a65..005e8472ac3 100644 --- a/manual/mapper/mapper/README.md +++ b/manual/mapper/mapper/README.md @@ -111,6 +111,8 @@ assert dao1 == dao2; ### DAO parameterization +#### Keyspace and table + The mapper allows you to reuse the same DAO interface for different tables. For example, given the following definitions: @@ -159,6 +161,24 @@ ProductDao dao3 = inventoryMapper.productDao("keyspace3", "table3"); The DAO's keyspace and table can also be injected into custom query strings; see [Query methods](../daos/query/). +#### Execution profile + +Similarly, a DAO can be parameterized to use a particular [configuration +profile](../../core/configuration/#execution-profiles): + +```java +@Mapper +public interface InventoryMapper { + @DaoFactory + ProductDao productDao(@DaoProfile String profileName); + + @DaoFactory + ProductDao productDao(@DaoProfile DriverExecutionProfile profile); +} +``` + +The mapper will call `setExecutionProfileName` / `setExecutionProfile` on every generated statement. + ### Schema validation The mapper validates entity mappings against the database schema at runtime. This check is performed diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGenerator.java index ed6168287e1..a445066106c 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGenerator.java @@ -223,7 +223,6 @@ public Optional generate() { statementName); populateBuilderWithStatementAttributes(methodBodyBuilder, methodElement); populateBuilderWithFunction(methodBodyBuilder, boundStatementFunction); - populateBuilderWithProfile(methodBodyBuilder); int nextParameterIndex = 0; if (hasEntityParameter) { diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoInsertMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoInsertMethodGenerator.java index 2f71b485073..69e2400b7e2 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoInsertMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoInsertMethodGenerator.java @@ -140,7 +140,6 @@ public Optional generate() { populateBuilderWithStatementAttributes(methodBodyBuilder, methodElement); populateBuilderWithFunction(methodBodyBuilder, boundStatementFunction); - populateBuilderWithProfile(methodBodyBuilder); warnIfCqlNamePresent(parameters.subList(0, 1)); String entityParameterName = parameters.get(0).getSimpleName().toString(); diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoMethodGenerator.java index fd3ec7b2401..6608c4702b4 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoMethodGenerator.java @@ -146,13 +146,6 @@ protected void populateBuilderWithFunction( } } - protected void populateBuilderWithProfile(CodeBlock.Builder builder) { - builder.beginControlFlow("if(context.getExecutionProfileName() != null)"); - builder.addStatement( - "boundStatementBuilder = boundStatementBuilder.setExecutionProfileName(context.getExecutionProfileName())"); - builder.endControlFlow(); - } - protected void populateBuilderWithStatementAttributes( CodeBlock.Builder builder, ExecutableElement methodElement) { StatementAttributes statementAttributes = diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoQueryMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoQueryMethodGenerator.java index e0fbbf5f721..0d3250c7282 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoQueryMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoQueryMethodGenerator.java @@ -109,7 +109,6 @@ public Optional generate() { populateBuilderWithStatementAttributes(methodBodyBuilder, methodElement); populateBuilderWithFunction(methodBodyBuilder, boundStatementFunction); - populateBuilderWithProfile(methodBodyBuilder); if (validateCqlNamesPresent(parameters)) { GeneratedCodePatterns.bindParameters( diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGenerator.java index 662d5e32b71..5125719e89d 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGenerator.java @@ -161,7 +161,6 @@ public Optional generate() { statementName); populateBuilderWithStatementAttributes(methodBodyBuilder, methodElement); populateBuilderWithFunction(methodBodyBuilder, boundStatementFunction); - populateBuilderWithProfile(methodBodyBuilder); if (!primaryKeyParameters.isEmpty()) { List primaryKeyNames = diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoUpdateMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoUpdateMethodGenerator.java index df6e774679b..9babcaee5bb 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoUpdateMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoUpdateMethodGenerator.java @@ -126,7 +126,6 @@ public Optional generate() { populateBuilderWithStatementAttributes(methodBodyBuilder, methodElement); populateBuilderWithFunction(methodBodyBuilder, boundStatementFunction); - populateBuilderWithProfile(methodBodyBuilder); String entityParameterName = parameters.get(0).getSimpleName().toString(); diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/mapper/MapperBuilderGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/mapper/MapperBuilderGenerator.java index 1900547e9c6..93ec4f5acfc 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/mapper/MapperBuilderGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/mapper/MapperBuilderGenerator.java @@ -75,7 +75,8 @@ protected JavaFile.Builder getContents() { .addAnnotation(Override.class) .returns(ClassName.get(interfaceElement)) .addStatement( - "$1T context = new $1T(session, defaultKeyspaceId, customState)", + "$1T context = new $1T(session, defaultKeyspaceId, " + + "defaultExecutionProfileName, defaultExecutionProfile, customState)", DefaultMapperContext.class) .addStatement( "return new $T(context)", diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/mapper/MapperDaoFactoryMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/mapper/MapperDaoFactoryMethodGenerator.java index 5aec6667ec7..23f37cc51da 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/mapper/MapperDaoFactoryMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/mapper/MapperDaoFactoryMethodGenerator.java @@ -105,8 +105,8 @@ public Optional generate() { // Validate the arguments String keyspaceArgumentName = null; String tableArgumentName = null; - String executionProfileName = null; - boolean executionProfileIsClass = false; + String profileArgumentName = null; + boolean profileIsClass = false; for (VariableElement parameterElement : methodElement.getParameters()) { if (parameterElement.getAnnotation(DaoKeyspace.class) != null) { @@ -125,15 +125,11 @@ public Optional generate() { } } else if (parameterElement.getAnnotation(DaoProfile.class) != null) { - executionProfileName = - validateExecutionProfile( - parameterElement, executionProfileName, DaoProfile.class, context); - if (context - .getClassUtils() - .isSame(parameterElement.asType(), DriverExecutionProfile.class)) { - executionProfileIsClass = true; - } - if (executionProfileName == null) { + profileArgumentName = + validateExecutionProfile(parameterElement, profileArgumentName, context); + profileIsClass = + context.getClassUtils().isSame(parameterElement.asType(), DriverExecutionProfile.class); + if (profileArgumentName == null) { return Optional.empty(); } } else { @@ -151,22 +147,22 @@ public Optional generate() { return Optional.empty(); } } - boolean isCachedByKeyspaceAndTable = - (keyspaceArgumentName != null || tableArgumentName != null || executionProfileName != null); + boolean isCachedByMethodArguments = + (keyspaceArgumentName != null || tableArgumentName != null || profileArgumentName != null); TypeName returnTypeName = ClassName.get(methodElement.getReturnType()); String suggestedFieldName = methodElement.getSimpleName() + "Cache"; String fieldName = - isCachedByKeyspaceAndTable + isCachedByMethodArguments ? enclosingClass.addDaoMapField(suggestedFieldName, returnTypeName) : enclosingClass.addDaoSimpleField( suggestedFieldName, returnTypeName, daoImplementationName, isAsync); MethodSpec.Builder overridingMethodBuilder = GeneratedCodePatterns.override(methodElement); - if (isCachedByKeyspaceAndTable) { - // DaoCacheKey key = new DaoCacheKey(x, y) - // where x, y is either the name of the parameter or "(CqlIdentifier)null" + if (isCachedByMethodArguments) { + // DaoCacheKey key = new DaoCacheKey(, , , ) + // where ,
        is either the name of the parameter or "(CqlIdentifier)null" overridingMethodBuilder.addCode("$1T key = new $1T(", DaoCacheKey.class); if (keyspaceArgumentName == null) { overridingMethodBuilder.addCode("($T)null", CqlIdentifier.class); @@ -180,28 +176,23 @@ public Optional generate() { overridingMethodBuilder.addCode("$L", tableArgumentName); } overridingMethodBuilder.addCode(", "); - if (executionProfileName == null) { - overridingMethodBuilder.addCode("($T)null", String.class); + if (profileArgumentName == null) { + overridingMethodBuilder.addCode("null, null);\n"); } else { - - if (!executionProfileIsClass) { - overridingMethodBuilder.addCode("$L", executionProfileName); + if (profileIsClass) { + overridingMethodBuilder.addCode("null, $L);\n", profileArgumentName); } else { - overridingMethodBuilder.addCode("$L.getName()", executionProfileName); + overridingMethodBuilder.addCode("$L, null);\n", profileArgumentName); } } - overridingMethodBuilder.addCode(");\n"); - overridingMethodBuilder.addCode( + overridingMethodBuilder.addStatement( "return $L.computeIfAbsent(key, " - + "k -> $T.$L(context.withKeyspaceAndTable(k.getKeyspaceId(), k.getTableId())", + + "k -> $T.$L(context.withDaoParameters(k.getKeyspaceId(), k.getTableId(), " + + "k.getExecutionProfileName(), k.getExecutionProfile())))", fieldName, daoImplementationName, isAsync ? "initAsync" : "init"); - if (executionProfileName != null) { - overridingMethodBuilder.addCode(".withExecutionProfile($L)", executionProfileName); - } - overridingMethodBuilder.addCode("));\n"); } else { overridingMethodBuilder.addStatement("return $L.get()", fieldName); } @@ -232,8 +223,8 @@ private String validateKeyspaceOrTableParameter( } private String validateExecutionProfile( - VariableElement candidate, String previous, Class annotation, ProcessorContext context) { - if (!isSingleAnnotation(candidate, previous, annotation, context)) { + VariableElement candidate, String previous, ProcessorContext context) { + if (!isSingleAnnotation(candidate, previous, DaoProfile.class, context)) { return null; } TypeMirror type = candidate.asType(); @@ -245,7 +236,7 @@ private String validateExecutionProfile( candidate, processedType, "Invalid parameter type: @%s-annotated parameter of %s methods must be of type %s or %s ", - annotation.getSimpleName(), + DaoProfile.class.getSimpleName(), DaoFactory.class.getSimpleName(), String.class.getSimpleName(), DriverExecutionProfile.class.getSimpleName()); diff --git a/mapper-runtime/revapi.json b/mapper-runtime/revapi.json index c9deecd0813..12704ddd974 100644 --- a/mapper-runtime/revapi.json +++ b/mapper-runtime/revapi.json @@ -60,8 +60,13 @@ { "code": "java.method.addedToInterface", "new": "method java.lang.String com.datastax.oss.driver.api.mapper.MapperContext::getExecutionProfileName()", - "justification": "JAVA-2633 Adding execution profile to mapper" + "justification": "JAVA-2633: Add execution profile argument to DAO factory method (accept API break -- it's unlikely that MapperContext will be implemented outside of the driver)" + }, + { + "code": "java.method.addedToInterface", + "new": "method com.datastax.oss.driver.api.core.config.DriverExecutionProfile com.datastax.oss.driver.api.mapper.MapperContext::getExecutionProfile()", + "justification": "JAVA-2633: Add execution profile argument to DAO factory method (accept API break -- it's unlikely that MapperContext will be implemented outside of the driver)" } ] } diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/MapperBuilder.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/MapperBuilder.java index ab87d1bd792..120e399f352 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/MapperBuilder.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/MapperBuilder.java @@ -17,6 +17,7 @@ import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.mapper.annotations.DaoFactory; import com.datastax.oss.driver.api.mapper.annotations.Mapper; import com.datastax.oss.driver.api.mapper.annotations.QueryProvider; @@ -38,6 +39,8 @@ public abstract class MapperBuilder { protected final CqlSession session; protected CqlIdentifier defaultKeyspaceId; protected Map customState; + protected String defaultExecutionProfileName; + protected DriverExecutionProfile defaultExecutionProfile; protected MapperBuilder(CqlSession session) { this.session = session; @@ -95,6 +98,50 @@ public MapperBuilder withDefaultKeyspace(@Nullable String keyspaceName) return withDefaultKeyspace(keyspaceName == null ? null : CqlIdentifier.fromCql(keyspaceName)); } + /** + * Specifies a default execution profile name that will be used for all DAOs built with this + * mapper (unless they specify their own execution profile). + * + *

        This works the same way as the {@linkplain #withDefaultKeyspace(CqlIdentifier) default + * keyspace}. + * + *

        Note that if you had already set a profile with #withDefaultExecutionProfile, this method + * erases it. + * + * @see DaoFactory + */ + @NonNull + public MapperBuilder withDefaultExecutionProfileName( + @Nullable String executionProfileName) { + this.defaultExecutionProfileName = executionProfileName; + if (executionProfileName != null) { + this.defaultExecutionProfile = null; + } + return this; + } + + /** + * Specifies a default execution profile name that will be used for all DAOs built with this + * mapper (unless they specify their own execution profile). + * + *

        This works the same way as the {@linkplain #withDefaultKeyspace(CqlIdentifier) default + * keyspace}. + * + *

        Note that if you had already set a profile name with #withDefaultExecutionProfileName, this + * method erases it. + * + * @see DaoFactory + */ + @NonNull + public MapperBuilder withDefaultExecutionProfile( + @Nullable DriverExecutionProfile executionProfile) { + this.defaultExecutionProfile = executionProfile; + if (executionProfile != null) { + this.defaultExecutionProfileName = null; + } + return this; + } + /** * When the new instance of a class annotated with {@code @Dao} is created an automatic check for * schema validation is performed. It verifies if all {@code @Dao} entity fields are present in diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/MapperContext.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/MapperContext.java index 2a73523ca6c..78858f729e5 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/MapperContext.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/MapperContext.java @@ -17,6 +17,7 @@ import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.mapper.entity.naming.NameConverter; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; @@ -46,12 +47,25 @@ public interface MapperContext { CqlIdentifier getTableId(); /** - * If this context belongs to a DAO that was built with a execution-profile-parameterized mapper - * method, the value of that parameter. Otherwise null. + * If this context belongs to a DAO that was built with a method that takes an execution profile + * name as parameter, the value of that parameter. Otherwise null. + * + *

        Note that this is mutually exclusive with {@link #getExecutionProfile()}: at most one of the + * two methods returns a non-null value (or both return null if no profile was provided). */ @Nullable String getExecutionProfileName(); + /** + * If this context belongs to a DAO that was built with a method that takes an execution profile + * as parameter, the value of that parameter. Otherwise null. + * + *

        Note that this is mutually exclusive with {@link #getExecutionProfileName()}: at most one of + * the two methods returns a non-null value (or both return null if no profile was provided). + */ + @Nullable + DriverExecutionProfile getExecutionProfile(); + /** * Returns an instance of the given converter class. * diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/DaoProfile.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/DaoProfile.java index db5cd6b3f27..02009897909 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/DaoProfile.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/DaoProfile.java @@ -15,6 +15,7 @@ */ package com.datastax.oss.driver.api.mapper.annotations; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; @@ -26,18 +27,16 @@ * *

        Example: * - *

        * - * *

          *  * @Mapper
          *  * public interface InventoryMapper {
        - *  *   ProductDao productDao(@DaoTable String executionProfile);
        + *  *   ProductDao productDao(@DaoProfile String executionProfile);
          *  * }
          *  * 
        * - * The annotated parameter can be a {@link String}. If it is present, the value will be injected in - * the DAO instance, where it will be used in generated queries. This allows you to reuse the same - * DAO for different execution profiles. + * The annotated parameter can be a {@link String} or {@link DriverExecutionProfile}. If it is + * present, the value will be injected in the DAO instance, where it will be used in generated + * queries. This allows you to reuse the same DAO for different execution profiles. * * @see DaoFactory */ diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DaoBase.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DaoBase.java index e51282e0705..d12ab19bfd3 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DaoBase.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DaoBase.java @@ -60,6 +60,11 @@ public class DaoBase { protected static CompletionStage prepare( SimpleStatement statement, MapperContext context) { + if (context.getExecutionProfileName() != null) { + statement = statement.setExecutionProfileName(context.getExecutionProfileName()); + } else if (context.getExecutionProfile() != null) { + statement = statement.setExecutionProfile(context.getExecutionProfile()); + } return context.getSession().prepareAsync(statement); } diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DaoCacheKey.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DaoCacheKey.java index 535da277341..ee6b8f52ac9 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DaoCacheKey.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DaoCacheKey.java @@ -16,30 +16,49 @@ package com.datastax.oss.driver.internal.mapper; import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import java.util.Objects; public class DaoCacheKey { private final CqlIdentifier keyspaceId; private final CqlIdentifier tableId; - private final String profileName; + private final String executionProfileName; + private final DriverExecutionProfile executionProfile; - public DaoCacheKey(CqlIdentifier keyspaceId, CqlIdentifier tableId, String profileName) { + public DaoCacheKey( + CqlIdentifier keyspaceId, + CqlIdentifier tableId, + String executionProfileName, + DriverExecutionProfile executionProfile) { this.keyspaceId = keyspaceId; this.tableId = tableId; - this.profileName = profileName; + this.executionProfileName = executionProfileName; + this.executionProfile = executionProfile; } - public DaoCacheKey(CqlIdentifier keyspaceId, String tableName, String profileName) { - this(keyspaceId, toId(tableName), profileName); + public DaoCacheKey( + CqlIdentifier keyspaceId, + String tableName, + String executionProfileName, + DriverExecutionProfile executionProfile) { + this(keyspaceId, toId(tableName), executionProfileName, executionProfile); } - public DaoCacheKey(String keyspaceName, CqlIdentifier tableId, String profileName) { - this(toId(keyspaceName), tableId, profileName); + public DaoCacheKey( + String keyspaceName, + CqlIdentifier tableId, + String executionProfileName, + DriverExecutionProfile executionProfile) { + this(toId(keyspaceName), tableId, executionProfileName, executionProfile); } - public DaoCacheKey(String keyspaceName, String tableName, String profileName) { - this(toId(keyspaceName), toId(tableName), profileName); + public DaoCacheKey( + String keyspaceName, + String tableName, + String executionProfileName, + DriverExecutionProfile executionProfile) { + this(toId(keyspaceName), toId(tableName), executionProfileName, executionProfile); } private static CqlIdentifier toId(String name) { @@ -54,6 +73,14 @@ public CqlIdentifier getTableId() { return tableId; } + public String getExecutionProfileName() { + return executionProfileName; + } + + public DriverExecutionProfile getExecutionProfile() { + return executionProfile; + } + @Override public boolean equals(Object other) { if (other == this) { @@ -62,7 +89,8 @@ public boolean equals(Object other) { DaoCacheKey that = (DaoCacheKey) other; return Objects.equals(this.keyspaceId, that.keyspaceId) && Objects.equals(this.tableId, that.tableId) - && Objects.equals(this.profileName, that.profileName); + && Objects.equals(this.executionProfileName, that.executionProfileName) + && Objects.equals(this.executionProfile, that.executionProfile); } else { return false; } @@ -70,6 +98,6 @@ public boolean equals(Object other) { @Override public int hashCode() { - return Objects.hash(keyspaceId, tableId, profileName); + return Objects.hash(keyspaceId, tableId, executionProfileName, executionProfile); } } diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DefaultMapperContext.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DefaultMapperContext.java index 0af237a4b82..7631394d630 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DefaultMapperContext.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DefaultMapperContext.java @@ -35,76 +35,66 @@ public class DefaultMapperContext implements MapperContext { private final CqlSession session; private final CqlIdentifier keyspaceId; private final CqlIdentifier tableId; - private final String executionProfile; + private final String executionProfileName; + private final DriverExecutionProfile executionProfile; private final ConcurrentMap, NameConverter> nameConverterCache; private final Map customState; public DefaultMapperContext( @NonNull CqlSession session, @Nullable CqlIdentifier keyspaceId, + @Nullable String executionProfileName, + @Nullable DriverExecutionProfile executionProfile, @NonNull Map customState) { this( session, keyspaceId, null, - null, - new ConcurrentHashMap<>(), - NullAllowingImmutableMap.copyOf(customState)); - } - - public DefaultMapperContext( - @NonNull CqlSession session, - @Nullable CqlIdentifier keyspaceId, - @Nullable String executionProfile, - @NonNull Map customState) { - this( - session, - keyspaceId, - null, + executionProfileName, executionProfile, new ConcurrentHashMap<>(), NullAllowingImmutableMap.copyOf(customState)); } - public DefaultMapperContext( - @NonNull CqlSession session, @NonNull Map customState) { - this(session, null, customState); - } - private DefaultMapperContext( CqlSession session, CqlIdentifier keyspaceId, CqlIdentifier tableId, - String executionProfile, + String executionProfileName, + DriverExecutionProfile executionProfile, ConcurrentMap, NameConverter> nameConverterCache, Map customState) { + if (executionProfile != null && executionProfileName != null) { + // the mapper code prevents this, so we should never get here + throw new IllegalArgumentException("Can't provide both a profile and a name"); + } this.session = session; this.keyspaceId = keyspaceId; this.tableId = tableId; this.nameConverterCache = nameConverterCache; this.customState = customState; + this.executionProfileName = executionProfileName; this.executionProfile = executionProfile; } - public DefaultMapperContext withKeyspaceAndTable( - @Nullable CqlIdentifier newKeyspaceId, @Nullable CqlIdentifier newTableId) { + public DefaultMapperContext withDaoParameters( + @Nullable CqlIdentifier newKeyspaceId, + @Nullable CqlIdentifier newTableId, + @Nullable String newExecutionProfileName, + @Nullable DriverExecutionProfile newExecutionProfile) { return (Objects.equals(newKeyspaceId, this.keyspaceId) - && Objects.equals(newTableId, this.tableId)) - ? this - : new DefaultMapperContext( - session, newKeyspaceId, newTableId, null, nameConverterCache, customState); - } - - public DefaultMapperContext withExecutionProfile(@Nullable String newExecutionProfile) { - return newExecutionProfile.equals(this.executionProfile) + && Objects.equals(newTableId, this.tableId) + && Objects.equals(newExecutionProfileName, this.executionProfileName) + && Objects.equals(newExecutionProfile, this.executionProfile)) ? this : new DefaultMapperContext( - session, keyspaceId, tableId, newExecutionProfile, nameConverterCache, customState); - } - - public DefaultMapperContext withExecutionProfile( - @Nullable DriverExecutionProfile newExecutionProfile) { - return withExecutionProfile(newExecutionProfile.getName()); + session, + newKeyspaceId, + newTableId, + newExecutionProfileName, + newExecutionProfile, + nameConverterCache, + customState); } @NonNull @@ -128,6 +118,12 @@ public CqlIdentifier getTableId() { @Nullable @Override public String getExecutionProfileName() { + return executionProfileName; + } + + @Nullable + @Override + public DriverExecutionProfile getExecutionProfile() { return executionProfile; } From 5ee71a3477bee2b75580f921bdb42d6e950b544e Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 20 Apr 2020 11:16:47 -0700 Subject: [PATCH 450/979] Add 'add' and 'decommission' commands to CcmBridge Co-authored-by: Alexandre Dutra --- .../datastax/oss/driver/api/testinfra/ccm/CcmBridge.java | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CcmBridge.java b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CcmBridge.java index b868cbf851e..8e37612df6a 100644 --- a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CcmBridge.java +++ b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CcmBridge.java @@ -298,6 +298,15 @@ public void stop(int n) { execute("node" + n, "stop"); } + public void add(int n, String dc) { + execute("add", "-i", ipPrefix + n, "-d", dc, "node" + n); + start(n); + } + + public void decommission(int n) { + nodetool(n, "decommission"); + } + synchronized void execute(String... args) { String command = "ccm " From 50e4d1bbc8403a4b187a1cde2978ac0068890c44 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Sun, 19 Apr 2020 23:42:16 +0200 Subject: [PATCH 451/979] JAVA-2744: Recompute token map when node is added --- changelog/README.md | 1 + .../core/metadata/AddNodeRefresh.java | 4 +- .../core/metadata/FullNodeListRefresh.java | 2 +- .../core/metadata/InitialNodeListRefresh.java | 2 +- .../core/metadata/MetadataManager.java | 2 +- .../internal/core/metadata/NodesRefresh.java | 8 +- .../oss/driver/core/session/AddedNodeIT.java | 86 +++++++++++++++++++ .../driver/core/session/RemovedNodeIT.java | 17 +++- 8 files changed, 108 insertions(+), 14 deletions(-) create mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/core/session/AddedNodeIT.java diff --git a/changelog/README.md b/changelog/README.md index 0b7d3125092..967e2765968 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.6.0 (in progress) +- [bug] JAVA-2744: Recompute token map when node is added - [new feature] JAVA-2614: Provide a utility to emulate offset paging on the client side - [new feature] JAVA-2718: Warn when the number of sessions exceeds a configurable threshold - [improvement] JAVA-2664: Add a callback to inject the session in listeners diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/AddNodeRefresh.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/AddNodeRefresh.java index 19868e8dc04..e8806b7651a 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/AddNodeRefresh.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/AddNodeRefresh.java @@ -40,7 +40,7 @@ public Result compute( Node existing = oldNodes.get(newNodeInfo.getHostId()); if (existing == null) { DefaultNode newNode = new DefaultNode(newNodeInfo.getEndPoint(), context); - copyInfos(newNodeInfo, newNode, null, context); + copyInfos(newNodeInfo, newNode, context); Map newNodes = ImmutableMap.builder() .putAll(oldNodes) @@ -54,7 +54,7 @@ public Result compute( // an addition, even though the host_id hasn't changed :( // Update the existing instance and emit an UP event to trigger a pool reconnection. if (!existing.getEndPoint().equals(newNodeInfo.getEndPoint())) { - copyInfos(newNodeInfo, ((DefaultNode) existing), null, context); + copyInfos(newNodeInfo, ((DefaultNode) existing), context); assert newNodeInfo.getBroadcastRpcAddress().isPresent(); // always for peer nodes return new Result( oldMetadata, diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/FullNodeListRefresh.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/FullNodeListRefresh.java index 665d9b264d9..4137a1e2878 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/FullNodeListRefresh.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/FullNodeListRefresh.java @@ -72,7 +72,7 @@ public Result compute( if (tokenFactory == null && nodeInfo.getPartitioner() != null) { tokenFactory = tokenFactoryRegistry.tokenFactoryFor(nodeInfo.getPartitioner()); } - tokensChanged |= copyInfos(nodeInfo, node, tokenFactory, context); + tokensChanged |= copyInfos(nodeInfo, node, context); } Set removed = Sets.difference(oldNodes.keySet(), seen); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/InitialNodeListRefresh.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/InitialNodeListRefresh.java index 676724de327..2fe272e80bd 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/InitialNodeListRefresh.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/InitialNodeListRefresh.java @@ -72,7 +72,7 @@ public Result compute( if (tokenMapEnabled && tokenFactory == null && nodeInfo.getPartitioner() != null) { tokenFactory = tokenFactoryRegistry.tokenFactoryFor(nodeInfo.getPartitioner()); } - copyInfos(nodeInfo, node, tokenFactory, context); + copyInfos(nodeInfo, node, context); newNodesBuilder.put(node.getHostId(), node); } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/MetadataManager.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/MetadataManager.java index 39586b7bfeb..6db1db038bd 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/MetadataManager.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/MetadataManager.java @@ -174,7 +174,7 @@ public CompletionStage refreshNode(Node node) { maybeInfo -> { if (maybeInfo.isPresent()) { boolean tokensChanged = - NodesRefresh.copyInfos(maybeInfo.get(), (DefaultNode) node, null, context); + NodesRefresh.copyInfos(maybeInfo.get(), (DefaultNode) node, context); if (tokensChanged) { apply(new TokensChangedRefresh()); } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/NodesRefresh.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/NodesRefresh.java index 015ad1ad070..868fc53712e 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/NodesRefresh.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/NodesRefresh.java @@ -17,7 +17,6 @@ import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; -import com.datastax.oss.driver.internal.core.metadata.token.TokenFactory; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import java.util.Collections; import java.util.Objects; @@ -35,10 +34,7 @@ abstract class NodesRefresh implements MetadataRefresh { * mutate the tokens in-place, so there is no way to check this after the fact). */ protected static boolean copyInfos( - NodeInfo nodeInfo, - DefaultNode node, - TokenFactory tokenFactory, - InternalDriverContext context) { + NodeInfo nodeInfo, DefaultNode node, InternalDriverContext context) { node.setEndPoint(nodeInfo.getEndPoint(), context); node.broadcastRpcAddress = nodeInfo.getBroadcastRpcAddress().orElse(null); @@ -58,7 +54,7 @@ protected static boolean copyInfos( versionString, node.getEndPoint()); } - boolean tokensChanged = tokenFactory != null && !node.rawTokens.equals(nodeInfo.getTokens()); + boolean tokensChanged = !node.rawTokens.equals(nodeInfo.getTokens()); if (tokensChanged) { node.rawTokens = nodeInfo.getTokens(); } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/session/AddedNodeIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/session/AddedNodeIT.java new file mode 100644 index 00000000000..0d0edb0c6f7 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/session/AddedNodeIT.java @@ -0,0 +1,86 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.core.session; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.awaitility.Awaitility.await; + +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.metadata.NodeStateListener; +import com.datastax.oss.driver.api.core.metadata.token.TokenRange; +import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.session.SessionUtils; +import com.datastax.oss.driver.internal.core.pool.ChannelPool; +import com.datastax.oss.driver.internal.core.session.DefaultSession; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.TimeUnit; +import org.junit.ClassRule; +import org.junit.Test; + +public class AddedNodeIT { + + @ClassRule + public static final CustomCcmRule CCM_RULE = CustomCcmRule.builder().withNodes(3).build(); + + @Test + public void should_signal_and_create_pool_when_node_gets_added() { + AddListener addListener = new AddListener(); + try (CqlSession session = SessionUtils.newSession(CCM_RULE, null, addListener, null, null)) { + assertThat(session.getMetadata().getTokenMap()).isPresent(); + Set tokenRanges = session.getMetadata().getTokenMap().get().getTokenRanges(); + assertThat(tokenRanges).hasSize(3); + CCM_RULE.getCcmBridge().add(4, "dc1"); + await() + .pollInterval(500, TimeUnit.MILLISECONDS) + .atMost(60, TimeUnit.SECONDS) + .until(() -> addListener.addedNode != null); + Map pools = ((DefaultSession) session).getPools(); + await() + .pollInterval(500, TimeUnit.MILLISECONDS) + .atMost(60, TimeUnit.SECONDS) + .until(() -> pools.containsKey(addListener.addedNode)); + await() + .pollInterval(500, TimeUnit.MILLISECONDS) + .atMost(60, TimeUnit.SECONDS) + .until(() -> session.getMetadata().getTokenMap().get().getTokenRanges().size() == 4); + } + } + + static class AddListener implements NodeStateListener { + + volatile Node addedNode; + + @Override + public void onRemove(@NonNull Node node) {} + + @Override + public void onAdd(@NonNull Node node) { + addedNode = node; + } + + @Override + public void onUp(@NonNull Node node) {} + + @Override + public void onDown(@NonNull Node node) {} + + @Override + public void close() throws Exception {} + } +} diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/session/RemovedNodeIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/session/RemovedNodeIT.java index 6fe60ffa74e..ca26e079491 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/session/RemovedNodeIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/session/RemovedNodeIT.java @@ -15,16 +15,20 @@ */ package com.datastax.oss.driver.core.session; +import static org.assertj.core.api.Assertions.assertThat; import static org.awaitility.Awaitility.await; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.metadata.NodeStateListener; +import com.datastax.oss.driver.api.core.metadata.token.TokenRange; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.internal.core.pool.ChannelPool; import com.datastax.oss.driver.internal.core.session.DefaultSession; import edu.umd.cs.findbugs.annotations.NonNull; import java.util.Map; +import java.util.Set; import java.util.concurrent.TimeUnit; import org.junit.ClassRule; import org.junit.Test; @@ -42,18 +46,25 @@ public class RemovedNodeIT { @Test public void should_signal_and_destroy_pool_when_node_gets_removed() { RemovalListener removalListener = new RemovalListener(); - try (CqlSession session = CqlSession.builder().withNodeStateListener(removalListener).build()) { - CCM_RULE.getCcmBridge().nodetool(2, "decommission"); + try (CqlSession session = + SessionUtils.newSession(CCM_RULE, null, removalListener, null, null)) { + assertThat(session.getMetadata().getTokenMap()).isPresent(); + Set tokenRanges = session.getMetadata().getTokenMap().get().getTokenRanges(); + assertThat(tokenRanges).hasSize(4); + CCM_RULE.getCcmBridge().decommission(2); await() .pollInterval(500, TimeUnit.MILLISECONDS) .atMost(60, TimeUnit.SECONDS) .until(() -> removalListener.removedNode != null); - Map pools = ((DefaultSession) session).getPools(); await() .pollInterval(500, TimeUnit.MILLISECONDS) .atMost(60, TimeUnit.SECONDS) .until(() -> !pools.containsKey(removalListener.removedNode)); + await() + .pollInterval(500, TimeUnit.MILLISECONDS) + .atMost(60, TimeUnit.SECONDS) + .until(() -> session.getMetadata().getTokenMap().get().getTokenRanges().size() == 3); } } From e3dff88cdc763551eba5dd846cc7b28321209395 Mon Sep 17 00:00:00 2001 From: Zain Malik Date: Sat, 18 Apr 2020 18:23:53 +0200 Subject: [PATCH 452/979] JAVA-2740: Extend peer validity check to include datacenter, rack and tokens --- changelog/README.md | 1 + .../internal/core/adminrequest/AdminRow.java | 9 ++++ .../core/metadata/DefaultTopologyMonitor.java | 13 +++-- .../metadata/DefaultTopologyMonitorTest.java | 54 +++++++++++-------- 4 files changed, 50 insertions(+), 27 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 967e2765968..05664bbd7c7 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.6.0 (in progress) +- [bug] JAVA-2740: Extend peer validity check to include datacenter, rack and tokens - [bug] JAVA-2744: Recompute token map when node is added - [new feature] JAVA-2614: Provide a utility to emulate offset paging on the client side - [new feature] JAVA-2718: Warn when the number of sessions exceeds a configurable threshold diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/adminrequest/AdminRow.java b/core/src/main/java/com/datastax/oss/driver/internal/core/adminrequest/AdminRow.java index efcb3ce80d9..c4c98a79bd5 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/adminrequest/AdminRow.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/adminrequest/AdminRow.java @@ -100,6 +100,15 @@ public Map getMapOfStringToString(String columnName) { return get(columnName, MAP_OF_STRING_TO_STRING); } + public boolean isNull(String columnName) { + if (!contains(columnName)) { + return true; + } else { + int index = columnSpecs.get(columnName).index; + return data.get(index) == null; + } + } + public boolean contains(String columnName) { return columnSpecs.containsKey(columnName); } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultTopologyMonitor.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultTopologyMonitor.java index e61f77d67dc..87585199b77 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultTopologyMonitor.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultTopologyMonitor.java @@ -539,13 +539,16 @@ protected InetSocketAddress getBroadcastRpcAddress( * node's broadcast RPC address and host ID; otherwise the driver may not work properly. */ protected boolean isPeerValid(AdminRow peerRow) { - boolean hasPeersRpcAddress = peerRow.getInetAddress("rpc_address") != null; + boolean hasPeersRpcAddress = !peerRow.isNull("rpc_address"); boolean hasPeersV2RpcAddress = - peerRow.getInetAddress("native_address") != null - && peerRow.getInteger("native_port") != null; + !peerRow.isNull("native_address") && !peerRow.isNull("native_port"); boolean hasRpcAddress = hasPeersV2RpcAddress || hasPeersRpcAddress; - boolean hasHostId = peerRow.getUuid("host_id") != null; - boolean valid = hasRpcAddress && hasHostId; + boolean valid = + hasRpcAddress + && !peerRow.isNull("host_id") + && !peerRow.isNull("data_center") + && !peerRow.isNull("rack") + && !peerRow.isNull("tokens"); if (!valid) { LOG.warn( "[{}] Found invalid row in {} for peer: {}. " diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/DefaultTopologyMonitorTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/DefaultTopologyMonitorTest.java index 1b10a647611..f989ea14de5 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/DefaultTopologyMonitorTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/DefaultTopologyMonitorTest.java @@ -228,7 +228,7 @@ public void should_refresh_node_from_peers_if_broadcast_address_is_not_present() verify(peer3).getUuid("host_id"); verify(peer3, never()).getString(anyString()); - verify(peer2, times(3)).getUuid("host_id"); + verify(peer2, times(2)).getUuid("host_id"); verify(peer2).getString("data_center"); } @@ -258,7 +258,7 @@ public void should_refresh_node_from_peers_if_broadcast_address_is_not_present_V verify(peer3).getUuid("host_id"); verify(peer3, never()).getString(anyString()); - verify(peer2, times(3)).getUuid("host_id"); + verify(peer2, times(2)).getUuid("host_id"); verify(peer2).getString("data_center"); } @@ -291,7 +291,7 @@ public void should_get_new_node_from_peers() { verify(peer2).getInetAddress("rpc_address"); verify(peer2, never()).getString(anyString()); - verify(peer1, times(2)).getInetAddress("rpc_address"); + verify(peer1).getInetAddress("rpc_address"); verify(peer1).getString("data_center"); } @@ -324,7 +324,7 @@ public void should_get_new_node_from_peers_v2() { verify(peer2).getInetAddress("native_address"); verify(peer2, never()).getString(anyString()); - verify(peer1, times(2)).getInetAddress("native_address"); + verify(peer1).getInetAddress("native_address"); verify(peer1).getString("data_center"); } @@ -367,11 +367,7 @@ public void should_skip_invalid_peers_row(String columnToCheck) { topologyMonitor.isSchemaV2 = false; node2.broadcastAddress = ADDRESS2; AdminRow peer2 = mockPeersRow(2, node2.getHostId()); - if (columnToCheck.equals("rpc_address")) { - when(peer2.getInetAddress(columnToCheck)).thenReturn(null); - } else if (columnToCheck.equals("host_id")) { - when(peer2.getUuid(columnToCheck)).thenReturn(null); - } + when(peer2.isNull(columnToCheck)).thenReturn(true); topologyMonitor.stubQueries( new StubbedQuery( "SELECT * FROM system.peers WHERE peer = :address", @@ -397,17 +393,7 @@ public void should_skip_invalid_peers_row_v2(String columnToCheck) { topologyMonitor.isSchemaV2 = true; node2.broadcastAddress = ADDRESS2; AdminRow peer2 = mockPeersV2Row(2, node2.getHostId()); - switch (columnToCheck) { - case "native_address": - when(peer2.getInetAddress(columnToCheck)).thenReturn(null); - break; - case "native_port": - when(peer2.getInteger(columnToCheck)).thenReturn(null); - break; - case "host_id": - when(peer2.getUuid(columnToCheck)).thenReturn(null); - break; - } + when(peer2.isNull(columnToCheck)).thenReturn(true); topologyMonitor.stubQueries( new StubbedQuery( "SELECT * FROM system.peers_v2 WHERE peer = :address and peer_port = :port", @@ -428,12 +414,14 @@ public void should_skip_invalid_peers_row_v2(String columnToCheck) { @DataProvider public static Object[][] columnsToCheckV1() { - return new Object[][] {{"rpc_address"}, {"host_id"}}; + return new Object[][] {{"rpc_address"}, {"host_id"}, {"data_center"}, {"rack"}, {"tokens"}}; } @DataProvider public static Object[][] columnsToCheckV2() { - return new Object[][] {{"native_address"}, {"native_port"}, {"host_id"}}; + return new Object[][] { + {"native_address"}, {"native_port"}, {"host_id"}, {"data_center"}, {"rack"}, {"tokens"} + }; } @Test @@ -568,18 +556,23 @@ private StubbedQuery(String queryString, AdminResult result) { private AdminRow mockLocalRow(int i, UUID hostId) { try { AdminRow row = mock(AdminRow.class); + when(row.isNull("host_id")).thenReturn(hostId == null); when(row.getUuid("host_id")).thenReturn(hostId); when(row.getInetAddress("broadcast_address")) .thenReturn(InetAddress.getByName("127.0.0." + i)); + when(row.isNull("data_center")).thenReturn(false); when(row.getString("data_center")).thenReturn("dc" + i); when(row.getInetAddress("listen_address")).thenReturn(InetAddress.getByName("127.0.0." + i)); + when(row.isNull("rack")).thenReturn(false); when(row.getString("rack")).thenReturn("rack" + i); when(row.getString("release_version")).thenReturn("release_version" + i); // The driver should not use this column for the local row, because it can contain the // non-broadcast RPC address. Simulate the bug to ensure it's handled correctly. + when(row.isNull("rpc_address")).thenReturn(false); when(row.getInetAddress("rpc_address")).thenReturn(InetAddress.getByName("0.0.0.0")); + when(row.isNull("tokens")).thenReturn(false); when(row.getSetOfString("tokens")).thenReturn(ImmutableSet.of("token" + i)); when(row.contains("peer")).thenReturn(false); return row; @@ -592,14 +585,23 @@ private AdminRow mockLocalRow(int i, UUID hostId) { private AdminRow mockPeersRow(int i, UUID hostId) { try { AdminRow row = mock(AdminRow.class); + when(row.isNull("host_id")).thenReturn(hostId == null); when(row.getUuid("host_id")).thenReturn(hostId); when(row.getInetAddress("peer")).thenReturn(InetAddress.getByName("127.0.0." + i)); + when(row.isNull("data_center")).thenReturn(false); when(row.getString("data_center")).thenReturn("dc" + i); + when(row.isNull("rack")).thenReturn(false); when(row.getString("rack")).thenReturn("rack" + i); when(row.getString("release_version")).thenReturn("release_version" + i); + when(row.isNull("rpc_address")).thenReturn(false); when(row.getInetAddress("rpc_address")).thenReturn(InetAddress.getByName("127.0.0." + i)); + when(row.isNull("tokens")).thenReturn(false); when(row.getSetOfString("tokens")).thenReturn(ImmutableSet.of("token" + i)); when(row.contains("peer")).thenReturn(true); + + when(row.isNull("native_address")).thenReturn(true); + when(row.isNull("native_port")).thenReturn(true); + return row; } catch (UnknownHostException e) { fail("unexpected", e); @@ -610,18 +612,26 @@ private AdminRow mockPeersRow(int i, UUID hostId) { private AdminRow mockPeersV2Row(int i, UUID hostId) { try { AdminRow row = mock(AdminRow.class); + when(row.isNull("host_id")).thenReturn(hostId == null); when(row.getUuid("host_id")).thenReturn(hostId); when(row.getInetAddress("peer")).thenReturn(InetAddress.getByName("127.0.0." + i)); when(row.getInteger("peer_port")).thenReturn(7000 + i); + when(row.isNull("data_center")).thenReturn(false); when(row.getString("data_center")).thenReturn("dc" + i); + when(row.isNull("rack")).thenReturn(false); when(row.getString("rack")).thenReturn("rack" + i); when(row.getString("release_version")).thenReturn("release_version" + i); + when(row.isNull("native_address")).thenReturn(false); when(row.getInetAddress("native_address")).thenReturn(InetAddress.getByName("127.0.0." + i)); + when(row.isNull("native_port")).thenReturn(false); when(row.getInteger("native_port")).thenReturn(9042); + when(row.isNull("tokens")).thenReturn(false); when(row.getSetOfString("tokens")).thenReturn(ImmutableSet.of("token" + i)); when(row.contains("peer")).thenReturn(true); when(row.contains("peer_port")).thenReturn(true); when(row.contains("native_port")).thenReturn(true); + + when(row.isNull("rpc_address")).thenReturn(true); return row; } catch (UnknownHostException e) { fail("unexpected", e); From 434660e522e6bd47e4806981894cedcc64d8f99f Mon Sep 17 00:00:00 2001 From: olim7t Date: Wed, 22 Apr 2020 15:18:48 -0700 Subject: [PATCH 453/979] Handle duplicate host_ids in node list refreshes --- .../core/metadata/FullNodeListRefresh.java | 28 ++-- .../core/metadata/InitialNodeListRefresh.java | 36 ++-- .../metadata/FullNodeListRefreshTest.java | 46 ++++++ .../metadata/InitialNodeListRefreshTest.java | 154 ++++++++++++++++++ .../core/metadata/TestNodeFactory.java | 10 +- 5 files changed, 248 insertions(+), 26 deletions(-) create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/metadata/InitialNodeListRefreshTest.java diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/FullNodeListRefresh.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/FullNodeListRefresh.java index 4137a1e2878..14496bb3399 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/FullNodeListRefresh.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/FullNodeListRefresh.java @@ -62,17 +62,25 @@ public Result compute( for (NodeInfo nodeInfo : nodeInfos) { UUID id = nodeInfo.getHostId(); - seen.add(id); - DefaultNode node = (DefaultNode) oldNodes.get(id); - if (node == null) { - node = new DefaultNode(nodeInfo.getEndPoint(), context); - LOG.debug("[{}] Adding new node {}", logPrefix, node); - added.put(id, node); - } - if (tokenFactory == null && nodeInfo.getPartitioner() != null) { - tokenFactory = tokenFactoryRegistry.tokenFactoryFor(nodeInfo.getPartitioner()); + if (seen.contains(id)) { + LOG.warn( + "[{}] Found duplicate entries with host_id {} in system.peers, " + + "keeping only the first one", + logPrefix, + id); + } else { + seen.add(id); + DefaultNode node = (DefaultNode) oldNodes.get(id); + if (node == null) { + node = new DefaultNode(nodeInfo.getEndPoint(), context); + LOG.debug("[{}] Adding new node {}", logPrefix, node); + added.put(id, node); + } + if (tokenFactory == null && nodeInfo.getPartitioner() != null) { + tokenFactory = tokenFactoryRegistry.tokenFactoryFor(nodeInfo.getPartitioner()); + } + tokensChanged |= copyInfos(nodeInfo, node, context); } - tokensChanged |= copyInfos(nodeInfo, node, context); } Set removed = Sets.difference(oldNodes.keySet(), seen); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/InitialNodeListRefresh.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/InitialNodeListRefresh.java index 2fe272e80bd..e676d9eb2ee 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/InitialNodeListRefresh.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/InitialNodeListRefresh.java @@ -23,6 +23,8 @@ import com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import java.util.HashMap; +import java.util.Map; import java.util.Set; import java.util.UUID; import net.jcip.annotations.ThreadSafe; @@ -58,25 +60,33 @@ public Result compute( assert oldMetadata == DefaultMetadata.EMPTY; TokenFactory tokenFactory = null; - ImmutableMap.Builder newNodesBuilder = ImmutableMap.builder(); + Map newNodes = new HashMap<>(); for (NodeInfo nodeInfo : nodeInfos) { - EndPoint endPoint = nodeInfo.getEndPoint(); - DefaultNode node = findIn(contactPoints, endPoint); - if (node == null) { - node = new DefaultNode(endPoint, context); - LOG.debug("[{}] Adding new node {}", logPrefix, node); + UUID hostId = nodeInfo.getHostId(); + if (newNodes.containsKey(hostId)) { + LOG.warn( + "[{}] Found duplicate entries with host_id {} in system.peers, " + + "keeping only the first one", + logPrefix, + hostId); } else { - LOG.debug("[{}] Copying contact point {}", logPrefix, node); + EndPoint endPoint = nodeInfo.getEndPoint(); + DefaultNode node = findIn(contactPoints, endPoint); + if (node == null) { + node = new DefaultNode(endPoint, context); + LOG.debug("[{}] Adding new node {}", logPrefix, node); + } else { + LOG.debug("[{}] Copying contact point {}", logPrefix, node); + } + if (tokenMapEnabled && tokenFactory == null && nodeInfo.getPartitioner() != null) { + tokenFactory = tokenFactoryRegistry.tokenFactoryFor(nodeInfo.getPartitioner()); + } + copyInfos(nodeInfo, node, context); + newNodes.put(hostId, node); } - if (tokenMapEnabled && tokenFactory == null && nodeInfo.getPartitioner() != null) { - tokenFactory = tokenFactoryRegistry.tokenFactoryFor(nodeInfo.getPartitioner()); - } - copyInfos(nodeInfo, node, context); - newNodesBuilder.put(node.getHostId(), node); } - ImmutableMap newNodes = newNodesBuilder.build(); ImmutableList.Builder eventsBuilder = ImmutableList.builder(); for (DefaultNode newNode : newNodes.values()) { diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/FullNodeListRefreshTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/FullNodeListRefreshTest.java index c4f8ee22e24..1c0185f3882 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/FullNodeListRefreshTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/FullNodeListRefreshTest.java @@ -129,4 +129,50 @@ public void should_update_existing_nodes() { assertThat(node2.getSchemaVersion()).isEqualTo(schemaVersion2); assertThat(result.events).isEmpty(); } + + @Test + public void should_ignore_duplicate_host_ids() { + // Given + DefaultMetadata oldMetadata = + new DefaultMetadata( + ImmutableMap.of(node1.getHostId(), node1, node2.getHostId(), node2), + Collections.emptyMap(), + null, + null); + + Iterable newInfos = + ImmutableList.of( + DefaultNodeInfo.builder() + .withEndPoint(node1.getEndPoint()) + .withDatacenter("dc1") + .withRack("rack1") + .withHostId(node1.getHostId()) + .build(), + DefaultNodeInfo.builder() + .withEndPoint(node2.getEndPoint()) + .withDatacenter("dc1") + .withRack("rack2") + .withHostId(node2.getHostId()) + .build(), + // Duplicate host id for node 2, should be ignored: + DefaultNodeInfo.builder() + .withEndPoint(node2.getEndPoint()) + .withDatacenter("dc1") + .withRack("rack3") + .withHostId(node2.getHostId()) + .build()); + FullNodeListRefresh refresh = new FullNodeListRefresh(newInfos); + + // When + MetadataRefresh.Result result = refresh.compute(oldMetadata, false, context); + + // Then + assertThat(result.newMetadata.getNodes()) + .containsOnlyKeys(node1.getHostId(), node2.getHostId()); + assertThat(node1.getDatacenter()).isEqualTo("dc1"); + assertThat(node1.getRack()).isEqualTo("rack1"); + assertThat(node2.getDatacenter()).isEqualTo("dc1"); + assertThat(node2.getRack()).isEqualTo("rack2"); + assertThat(result.events).isEmpty(); + } } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/InitialNodeListRefreshTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/InitialNodeListRefreshTest.java new file mode 100644 index 00000000000..85c29fc3bc1 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/InitialNodeListRefreshTest.java @@ -0,0 +1,154 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.metadata; + +import static com.datastax.oss.driver.Assertions.assertThat; +import static org.mockito.Mockito.when; + +import com.datastax.oss.driver.api.core.metadata.EndPoint; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.internal.core.channel.ChannelFactory; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.metrics.MetricsFactory; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; +import java.util.Map; +import java.util.UUID; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnitRunner; + +@RunWith(MockitoJUnitRunner.class) +public class InitialNodeListRefreshTest { + + @Mock private InternalDriverContext context; + @Mock protected MetricsFactory metricsFactory; + @Mock private ChannelFactory channelFactory; + + private DefaultNode contactPoint1; + private DefaultNode contactPoint2; + private EndPoint endPoint3; + private UUID hostId1; + private UUID hostId2; + private UUID hostId3; + + @Before + public void setup() { + when(context.getMetricsFactory()).thenReturn(metricsFactory); + when(context.getChannelFactory()).thenReturn(channelFactory); + + contactPoint1 = TestNodeFactory.newContactPoint(1, context); + contactPoint2 = TestNodeFactory.newContactPoint(2, context); + + endPoint3 = TestNodeFactory.newEndPoint(3); + hostId1 = UUID.randomUUID(); + hostId2 = UUID.randomUUID(); + hostId3 = UUID.randomUUID(); + } + + @Test + public void should_copy_contact_points() { + // Given + Iterable newInfos = + ImmutableList.of( + DefaultNodeInfo.builder() + .withEndPoint(contactPoint1.getEndPoint()) + // in practice there are more fields, but hostId is enough to validate the logic + .withHostId(hostId1) + .build(), + DefaultNodeInfo.builder() + .withEndPoint(contactPoint2.getEndPoint()) + .withHostId(hostId2) + .build()); + InitialNodeListRefresh refresh = + new InitialNodeListRefresh(newInfos, ImmutableSet.of(contactPoint1, contactPoint2)); + + // When + MetadataRefresh.Result result = refresh.compute(DefaultMetadata.EMPTY, false, context); + + // Then + // contact points have been copied to the metadata, and completed with missing information + Map newNodes = result.newMetadata.getNodes(); + assertThat(newNodes).containsOnlyKeys(hostId1, hostId2); + assertThat(newNodes.get(hostId1)).isEqualTo(contactPoint1); + assertThat(contactPoint1.getHostId()).isEqualTo(hostId1); + assertThat(newNodes.get(hostId2)).isEqualTo(contactPoint2); + assertThat(contactPoint2.getHostId()).isEqualTo(hostId2); + } + + @Test + public void should_add_other_nodes() { + // Given + Iterable newInfos = + ImmutableList.of( + DefaultNodeInfo.builder() + .withEndPoint(contactPoint1.getEndPoint()) + // in practice there are more fields, but hostId is enough to validate the logic + .withHostId(hostId1) + .build(), + DefaultNodeInfo.builder() + .withEndPoint(contactPoint2.getEndPoint()) + .withHostId(hostId2) + .build(), + DefaultNodeInfo.builder().withEndPoint(endPoint3).withHostId(hostId3).build()); + InitialNodeListRefresh refresh = + new InitialNodeListRefresh(newInfos, ImmutableSet.of(contactPoint1, contactPoint2)); + + // When + MetadataRefresh.Result result = refresh.compute(DefaultMetadata.EMPTY, false, context); + + // Then + // new node created in addition to the contact points + Map newNodes = result.newMetadata.getNodes(); + assertThat(newNodes).containsOnlyKeys(hostId1, hostId2, hostId3); + Node node3 = newNodes.get(hostId3); + assertThat(node3.getEndPoint()).isEqualTo(endPoint3); + assertThat(node3.getHostId()).isEqualTo(hostId3); + } + + @Test + public void should_ignore_duplicate_host_ids() { + // Given + Iterable newInfos = + ImmutableList.of( + DefaultNodeInfo.builder() + .withEndPoint(contactPoint1.getEndPoint()) + // in practice there are more fields, but hostId is enough to validate the logic + .withHostId(hostId1) + .withDatacenter("dc1") + .build(), + DefaultNodeInfo.builder() + .withEndPoint(contactPoint1.getEndPoint()) + .withDatacenter("dc2") + .withHostId(hostId1) + .build()); + InitialNodeListRefresh refresh = + new InitialNodeListRefresh(newInfos, ImmutableSet.of(contactPoint1)); + + // When + MetadataRefresh.Result result = refresh.compute(DefaultMetadata.EMPTY, false, context); + + // Then + // only the first nodeInfo should have been copied + Map newNodes = result.newMetadata.getNodes(); + assertThat(newNodes).containsOnlyKeys(hostId1); + assertThat(newNodes.get(hostId1)).isEqualTo(contactPoint1); + assertThat(contactPoint1.getHostId()).isEqualTo(hostId1); + assertThat(contactPoint1.getDatacenter()).isEqualTo("dc1"); + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/TestNodeFactory.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/TestNodeFactory.java index 3866bbf8ddb..54ab7755c51 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/TestNodeFactory.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/TestNodeFactory.java @@ -22,13 +22,17 @@ public class TestNodeFactory { public static DefaultNode newNode(int lastIpByte, InternalDriverContext context) { - DefaultEndPoint endPoint = newEndPoint(lastIpByte); - DefaultNode node = new DefaultNode(endPoint, context); + DefaultNode node = newContactPoint(lastIpByte, context); node.hostId = UUID.randomUUID(); - node.broadcastRpcAddress = endPoint.resolve(); + node.broadcastRpcAddress = ((InetSocketAddress) node.getEndPoint().resolve()); return node; } + public static DefaultNode newContactPoint(int lastIpByte, InternalDriverContext context) { + DefaultEndPoint endPoint = newEndPoint(lastIpByte); + return new DefaultNode(endPoint, context); + } + public static DefaultEndPoint newEndPoint(int lastByteOfIp) { return new DefaultEndPoint(new InetSocketAddress("127.0.0." + lastByteOfIp, 9042)); } From 62668ef458c87a710d11af17ed873be40c0b02a8 Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Thu, 16 Apr 2020 17:42:30 -0500 Subject: [PATCH 454/979] JAVA-2741: Make keyspace/table metadata impls serializable --- changelog/README.md | 1 + .../schema/DefaultDseEdgeMetadata.java | 5 +- .../schema/DefaultDseKeyspaceMetadata.java | 5 +- .../schema/DefaultDseTableMetadata.java | 5 +- .../schema/DefaultDseVertexMetadata.java | 5 +- .../schema/DefaultDseViewMetadata.java | 5 +- .../metadata/schema/FunctionSignature.java | 6 +- .../schema/DefaultAggregateMetadata.java | 40 +++--- .../schema/DefaultColumnMetadata.java | 6 +- .../schema/DefaultFunctionMetadata.java | 5 +- .../metadata/schema/DefaultIndexMetadata.java | 5 +- .../schema/DefaultKeyspaceMetadata.java | 5 +- .../metadata/schema/DefaultTableMetadata.java | 5 +- .../metadata/schema/DefaultViewMetadata.java | 5 +- .../oss/driver/core/metadata/DescribeIT.java | 123 ++++++++++++++---- .../src/test/resources/DescribeIT/dse/4.8.cql | 3 +- .../src/test/resources/DescribeIT/dse/5.0.cql | 3 +- .../src/test/resources/DescribeIT/dse/5.1.cql | 3 +- .../src/test/resources/DescribeIT/dse/6.8.cql | 3 +- .../src/test/resources/DescribeIT/oss/2.1.cql | 3 +- .../src/test/resources/DescribeIT/oss/2.2.cql | 3 +- .../src/test/resources/DescribeIT/oss/3.0.cql | 3 +- .../test/resources/DescribeIT/oss/3.11.cql | 3 +- .../src/test/resources/DescribeIT/oss/4.0.cql | 3 +- 24 files changed, 193 insertions(+), 60 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 05664bbd7c7..af81f98b16d 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.6.0 (in progress) +- [improvement] JAVA-2741: Make keyspace/table metadata impls serializable - [bug] JAVA-2740: Extend peer validity check to include datacenter, rack and tokens - [bug] JAVA-2744: Recompute token map when node is added - [new feature] JAVA-2614: Provide a utility to emulate offset paging on the client side diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseEdgeMetadata.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseEdgeMetadata.java index 1c0f6628e5a..f19a5e9aa72 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseEdgeMetadata.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseEdgeMetadata.java @@ -19,10 +19,13 @@ import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.shaded.guava.common.base.Preconditions; import edu.umd.cs.findbugs.annotations.NonNull; +import java.io.Serializable; import java.util.List; import java.util.Objects; -public class DefaultDseEdgeMetadata implements DseEdgeMetadata { +public class DefaultDseEdgeMetadata implements DseEdgeMetadata, Serializable { + + private static final long serialVersionUID = 1; @NonNull private final CqlIdentifier labelName; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseKeyspaceMetadata.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseKeyspaceMetadata.java index 381a666310b..2f47cb5aab4 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseKeyspaceMetadata.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseKeyspaceMetadata.java @@ -25,13 +25,16 @@ import com.datastax.oss.driver.api.core.type.UserDefinedType; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; +import java.io.Serializable; import java.util.Map; import java.util.Objects; import java.util.Optional; import net.jcip.annotations.Immutable; @Immutable -public class DefaultDseKeyspaceMetadata implements DseGraphKeyspaceMetadata { +public class DefaultDseKeyspaceMetadata implements DseGraphKeyspaceMetadata, Serializable { + + private static final long serialVersionUID = 1; @NonNull private final CqlIdentifier name; private final boolean durableWrites; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseTableMetadata.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseTableMetadata.java index 93ad64b8186..b6bc1249e26 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseTableMetadata.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseTableMetadata.java @@ -24,6 +24,7 @@ import com.datastax.oss.driver.api.core.metadata.schema.IndexMetadata; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; +import java.io.Serializable; import java.util.List; import java.util.Map; import java.util.Objects; @@ -32,7 +33,9 @@ import net.jcip.annotations.Immutable; @Immutable -public class DefaultDseTableMetadata implements DseGraphTableMetadata { +public class DefaultDseTableMetadata implements DseGraphTableMetadata, Serializable { + + private static final long serialVersionUID = 1; @NonNull private final CqlIdentifier keyspace; @NonNull private final CqlIdentifier name; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseVertexMetadata.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseVertexMetadata.java index efae93e65b6..16d18a11cab 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseVertexMetadata.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseVertexMetadata.java @@ -19,9 +19,12 @@ import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.shaded.guava.common.base.Preconditions; import edu.umd.cs.findbugs.annotations.NonNull; +import java.io.Serializable; import java.util.Objects; -public class DefaultDseVertexMetadata implements DseVertexMetadata { +public class DefaultDseVertexMetadata implements DseVertexMetadata, Serializable { + + private static final long serialVersionUID = 1; @NonNull private final CqlIdentifier labelName; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseViewMetadata.java b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseViewMetadata.java index 31224bb66d3..96c25f99812 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseViewMetadata.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/metadata/schema/DefaultDseViewMetadata.java @@ -23,6 +23,7 @@ import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; +import java.io.Serializable; import java.util.List; import java.util.Map; import java.util.Objects; @@ -31,7 +32,9 @@ import net.jcip.annotations.Immutable; @Immutable -public class DefaultDseViewMetadata implements DseViewMetadata { +public class DefaultDseViewMetadata implements DseViewMetadata, Serializable { + + private static final long serialVersionUID = 1; @NonNull private final CqlIdentifier keyspace; @NonNull private final CqlIdentifier name; diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/metadata/schema/FunctionSignature.java b/core/src/main/java/com/datastax/oss/driver/api/core/metadata/schema/FunctionSignature.java index 14bb7947a60..0b0638f074a 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/metadata/schema/FunctionSignature.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/metadata/schema/FunctionSignature.java @@ -19,6 +19,7 @@ import com.datastax.oss.driver.api.core.type.DataType; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import edu.umd.cs.findbugs.annotations.NonNull; +import java.io.Serializable; import java.util.List; import java.util.Objects; import net.jcip.annotations.Immutable; @@ -30,7 +31,10 @@ * {@code sum(int, int)} are not equal. */ @Immutable -public class FunctionSignature { +public class FunctionSignature implements Serializable { + + private static final long serialVersionUID = 1; + @NonNull private final CqlIdentifier name; @NonNull private final List parameterTypes; diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/DefaultAggregateMetadata.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/DefaultAggregateMetadata.java index c1d62ca0d1d..61a325abdd0 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/DefaultAggregateMetadata.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/DefaultAggregateMetadata.java @@ -22,6 +22,7 @@ import com.datastax.oss.driver.api.core.type.codec.TypeCodec; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; +import java.io.Serializable; import java.util.Objects; import java.util.Optional; import net.jcip.annotations.Immutable; @@ -29,18 +30,20 @@ import org.slf4j.LoggerFactory; @Immutable -public class DefaultAggregateMetadata implements AggregateMetadata { +public class DefaultAggregateMetadata implements AggregateMetadata, Serializable { private static final Logger LOG = LoggerFactory.getLogger(DefaultAggregateMetadata.class); + private static final long serialVersionUID = 1; + @NonNull private final CqlIdentifier keyspace; @NonNull private final FunctionSignature signature; @Nullable private final FunctionSignature finalFuncSignature; @Nullable private final Object initCond; + @Nullable private final String formattedInitCond; @NonNull private final DataType returnType; @NonNull private final FunctionSignature stateFuncSignature; @NonNull private final DataType stateType; - @NonNull private final TypeCodec stateTypeCodec; public DefaultAggregateMetadata( @NonNull CqlIdentifier keyspace, @@ -55,10 +58,10 @@ public DefaultAggregateMetadata( this.signature = signature; this.finalFuncSignature = finalFuncSignature; this.initCond = initCond; + this.formattedInitCond = computeFormattedInitCond(initCond, stateTypeCodec); this.returnType = returnType; this.stateFuncSignature = stateFuncSignature; this.stateType = stateType; - this.stateTypeCodec = stateTypeCodec; } @NonNull @@ -106,18 +109,7 @@ public DataType getStateType() { @NonNull @Override public Optional formatInitCond() { - if (initCond == null) { - return Optional.empty(); - } - try { - return Optional.of(stateTypeCodec.format(initCond)); - } catch (Throwable t) { - LOG.warn( - String.format( - "Failed to format INITCOND for %s.%s, using toString instead", - keyspace.asInternal(), signature.getName().asInternal())); - return Optional.of(initCond.toString()); - } + return Optional.ofNullable(this.formattedInitCond); } @Override @@ -160,4 +152,22 @@ public String toString() { + signature + ")"; } + + @Nullable + private String computeFormattedInitCond( + @Nullable Object initCond, @NonNull TypeCodec stateTypeCodec) { + + if (initCond == null) { + return null; + } + try { + return stateTypeCodec.format(initCond); + } catch (Throwable t) { + LOG.warn( + String.format( + "Failed to format INITCOND for %s.%s, using toString instead", + keyspace.asInternal(), signature.getName().asInternal())); + return initCond.toString(); + } + } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/DefaultColumnMetadata.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/DefaultColumnMetadata.java index aecb40e7329..311c5220db4 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/DefaultColumnMetadata.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/DefaultColumnMetadata.java @@ -19,11 +19,15 @@ import com.datastax.oss.driver.api.core.metadata.schema.ColumnMetadata; import com.datastax.oss.driver.api.core.type.DataType; import edu.umd.cs.findbugs.annotations.NonNull; +import java.io.Serializable; import java.util.Objects; import net.jcip.annotations.Immutable; @Immutable -public class DefaultColumnMetadata implements ColumnMetadata { +public class DefaultColumnMetadata implements ColumnMetadata, Serializable { + + private static final long serialVersionUID = 1; + @NonNull private final CqlIdentifier keyspace; @NonNull private final CqlIdentifier parent; @NonNull private final CqlIdentifier name; diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/DefaultFunctionMetadata.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/DefaultFunctionMetadata.java index bfed800046d..9370ffb4ea2 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/DefaultFunctionMetadata.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/DefaultFunctionMetadata.java @@ -21,12 +21,15 @@ import com.datastax.oss.driver.api.core.type.DataType; import com.datastax.oss.driver.shaded.guava.common.base.Preconditions; import edu.umd.cs.findbugs.annotations.NonNull; +import java.io.Serializable; import java.util.List; import java.util.Objects; import net.jcip.annotations.Immutable; @Immutable -public class DefaultFunctionMetadata implements FunctionMetadata { +public class DefaultFunctionMetadata implements FunctionMetadata, Serializable { + + private static final long serialVersionUID = 1; @NonNull private final CqlIdentifier keyspace; @NonNull private final FunctionSignature signature; diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/DefaultIndexMetadata.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/DefaultIndexMetadata.java index 3fbaeff34b6..ef3a382b071 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/DefaultIndexMetadata.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/DefaultIndexMetadata.java @@ -19,12 +19,15 @@ import com.datastax.oss.driver.api.core.metadata.schema.IndexKind; import com.datastax.oss.driver.api.core.metadata.schema.IndexMetadata; import edu.umd.cs.findbugs.annotations.NonNull; +import java.io.Serializable; import java.util.Map; import java.util.Objects; import net.jcip.annotations.Immutable; @Immutable -public class DefaultIndexMetadata implements IndexMetadata { +public class DefaultIndexMetadata implements IndexMetadata, Serializable { + + private static final long serialVersionUID = 1; @NonNull private final CqlIdentifier keyspace; @NonNull private final CqlIdentifier table; diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/DefaultKeyspaceMetadata.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/DefaultKeyspaceMetadata.java index cb354b583ed..ca61e89e315 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/DefaultKeyspaceMetadata.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/DefaultKeyspaceMetadata.java @@ -24,12 +24,15 @@ import com.datastax.oss.driver.api.core.metadata.schema.ViewMetadata; import com.datastax.oss.driver.api.core.type.UserDefinedType; import edu.umd.cs.findbugs.annotations.NonNull; +import java.io.Serializable; import java.util.Map; import java.util.Objects; import net.jcip.annotations.Immutable; @Immutable -public class DefaultKeyspaceMetadata implements KeyspaceMetadata { +public class DefaultKeyspaceMetadata implements KeyspaceMetadata, Serializable { + + private static final long serialVersionUID = 1; @NonNull private final CqlIdentifier name; private final boolean durableWrites; diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/DefaultTableMetadata.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/DefaultTableMetadata.java index 479067ce0ba..e877e322993 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/DefaultTableMetadata.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/DefaultTableMetadata.java @@ -22,6 +22,7 @@ import com.datastax.oss.driver.api.core.metadata.schema.TableMetadata; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; +import java.io.Serializable; import java.util.List; import java.util.Map; import java.util.Objects; @@ -30,7 +31,9 @@ import net.jcip.annotations.Immutable; @Immutable -public class DefaultTableMetadata implements TableMetadata { +public class DefaultTableMetadata implements TableMetadata, Serializable { + + private static final long serialVersionUID = 1; @NonNull private final CqlIdentifier keyspace; @NonNull private final CqlIdentifier name; diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/DefaultViewMetadata.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/DefaultViewMetadata.java index 53d50931546..520e7d416a0 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/DefaultViewMetadata.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/DefaultViewMetadata.java @@ -23,6 +23,7 @@ import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; +import java.io.Serializable; import java.util.List; import java.util.Map; import java.util.Objects; @@ -31,7 +32,9 @@ import net.jcip.annotations.Immutable; @Immutable -public class DefaultViewMetadata implements ViewMetadata { +public class DefaultViewMetadata implements ViewMetadata, Serializable { + + private static final long serialVersionUID = 1; @NonNull private final CqlIdentifier keyspace; @NonNull private final CqlIdentifier name; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/DescribeIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/DescribeIT.java index 37e38af2b03..ade31eebc24 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/DescribeIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/DescribeIT.java @@ -18,24 +18,31 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.fail; +import com.datastax.dse.driver.internal.core.metadata.schema.DefaultDseKeyspaceMetadata; +import com.datastax.dse.driver.internal.core.metadata.schema.DefaultDseTableMetadata; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.metadata.schema.KeyspaceMetadata; +import com.datastax.oss.driver.api.core.metadata.schema.TableMetadata; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.categories.ParallelizableTests; +import com.datastax.oss.driver.internal.SerializationHelper; +import com.datastax.oss.driver.internal.core.metadata.schema.DefaultKeyspaceMetadata; +import com.datastax.oss.driver.internal.core.metadata.schema.DefaultTableMetadata; import com.datastax.oss.driver.shaded.guava.common.base.Charsets; import com.datastax.oss.driver.shaded.guava.common.base.Splitter; import com.google.common.io.Files; import java.io.File; +import java.io.IOException; import java.net.URL; import java.time.Duration; import java.util.List; import java.util.Optional; import java.util.regex.Pattern; -import org.junit.Before; +import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Test; import org.junit.experimental.categories.Category; @@ -68,38 +75,32 @@ public class DescribeIT { // Use a regex to ignore semicolons in function scripts Splitter.on(Pattern.compile(";\n")).omitEmptyStrings(); - private Version serverVersion; - private boolean isDse; + private static Version serverVersion; + private static boolean isDse; - @Before - public void setup() { + private static File scriptFile; + private static String scriptContents; + + @BeforeClass + public static void setup() { Optional dseVersion = CCM_RULE.getDseVersion(); isDse = dseVersion.isPresent(); serverVersion = isDse ? dseVersion.get().nextStable() : CCM_RULE.getCassandraVersion().nextStable(); + + scriptFile = getScriptFile(); + assertThat(scriptFile).exists(); + assertThat(scriptFile).isFile(); + assertThat(scriptFile).canRead(); + scriptContents = getScriptContents(); + + setupDatabase(); } @Test public void describe_output_should_match_creation_script() throws Exception { - CqlSession session = SESSION_RULE.session(); - - File scriptFile = getScriptFile(); - String scriptContents = - Files.asCharSource(scriptFile, Charsets.UTF_8) - .read() - .trim() - .replaceAll("ks_0", SESSION_RULE.keyspace().asCql(true)); - List statements = STATEMENT_SPLITTER.splitToList(scriptContents); - // Skip the first statement (CREATE KEYSPACE), we already have a keyspace - for (int i = 1; i < statements.size(); i++) { - String statement = statements.get(i); - try { - session.execute(statement); - } catch (Exception e) { - fail("Error executing statement %s (%s)", statement, e); - } - } + CqlSession session = SESSION_RULE.session(); KeyspaceMetadata keyspaceMetadata = session.getMetadata().getKeyspace(SESSION_RULE.keyspace()).orElseThrow(AssertionError::new); @@ -114,11 +115,57 @@ public void describe_output_should_match_creation_script() throws Exception { .isEqualTo(scriptContents); } + private boolean atLeastVersion(Version dseVersion, Version ossVersion) { + Version comparison = isDse ? dseVersion : ossVersion; + return serverVersion.compareTo(comparison) >= 0; + } + + @Test + public void keyspace_metadata_should_be_serializable() throws Exception { + + CqlSession session = SESSION_RULE.session(); + + Optional ksOption = + session.getMetadata().getKeyspace(session.getKeyspace().get()); + assertThat(ksOption).isPresent(); + KeyspaceMetadata ks = ksOption.get(); + assertThat(ks).isInstanceOfAny(DefaultKeyspaceMetadata.class, DefaultDseKeyspaceMetadata.class); + + /* Validate that the keyspace metadata is fully populated */ + assertThat(ks.getUserDefinedTypes()).isNotEmpty(); + assertThat(ks.getTables()).isNotEmpty(); + if (atLeastVersion(Version.V5_0_0, Version.V3_0_0)) { + + assertThat(ks.getViews()).isNotEmpty(); + } + if (atLeastVersion(Version.V5_0_0, Version.V2_2_0)) { + + assertThat(ks.getFunctions()).isNotEmpty(); + assertThat(ks.getAggregates()).isNotEmpty(); + } + + /* A table with an explicit compound primary key + specified clustering column */ + Optional tableOption = ks.getTable("rank_by_year_and_name"); + assertThat(tableOption).isPresent(); + TableMetadata table = tableOption.get(); + assertThat(table).isInstanceOfAny(DefaultTableMetadata.class, DefaultDseTableMetadata.class); + + /* Validate that the table metadata is fully populated */ + assertThat(table.getPartitionKey()).isNotEmpty(); + assertThat(table.getClusteringColumns()).isNotEmpty(); + assertThat(table.getColumns()).isNotEmpty(); + assertThat(table.getOptions()).isNotEmpty(); + assertThat(table.getIndexes()).isNotEmpty(); + + KeyspaceMetadata deserialized = SerializationHelper.serializeAndDeserialize(ks); + assertThat(deserialized).isEqualTo(ks); + } + /** * Find a creation script in our test resources that matches the current server version. If we * don't have an exact match, use the closest version below it. */ - private File getScriptFile() { + private static File getScriptFile() { URL logbackTestUrl = DescribeIT.class.getResource("/logback-test.xml"); if (logbackTestUrl == null || logbackTestUrl.getFile().isEmpty()) { fail( @@ -158,4 +205,32 @@ private File getScriptFile() { "Using {} to test against {} {}", bestFile, isDse ? "DSE" : "Cassandra", serverVersion); return bestFile; } + + private static String getScriptContents() { + + try { + + return Files.asCharSource(scriptFile, Charsets.UTF_8) + .read() + .trim() + .replaceAll("ks_0", SESSION_RULE.keyspace().asCql(true)); + } catch (IOException ioe) { + fail("Exception reading script file " + scriptFile, ioe); + return null; + } + } + + private static void setupDatabase() { + List statements = STATEMENT_SPLITTER.splitToList(scriptContents); + + // Skip the first statement (CREATE KEYSPACE), we already have a keyspace + for (int i = 1; i < statements.size(); i++) { + String statement = statements.get(i); + try { + SESSION_RULE.session().execute(statement); + } catch (Exception e) { + fail("Error executing statement %s (%s)", statement, e); + } + } + } } diff --git a/integration-tests/src/test/resources/DescribeIT/dse/4.8.cql b/integration-tests/src/test/resources/DescribeIT/dse/4.8.cql index b05df71a503..05408ba0924 100644 --- a/integration-tests/src/test/resources/DescribeIT/dse/4.8.cql +++ b/integration-tests/src/test/resources/DescribeIT/dse/4.8.cql @@ -28,7 +28,8 @@ CREATE TABLE ks_0.rank_by_year_and_name ( rank int, cyclist_name text, PRIMARY KEY ((race_year, race_name), rank) -) WITH bloom_filter_fp_chance = 0.01 +) WITH CLUSTERING ORDER BY (rank DESC) + AND bloom_filter_fp_chance = 0.01 AND caching = '{"keys":"ALL", "rows_per_partition":"NONE"}' AND comment = '' AND compaction = {'class':'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy'} diff --git a/integration-tests/src/test/resources/DescribeIT/dse/5.0.cql b/integration-tests/src/test/resources/DescribeIT/dse/5.0.cql index fe606992a44..25b42c58d68 100644 --- a/integration-tests/src/test/resources/DescribeIT/dse/5.0.cql +++ b/integration-tests/src/test/resources/DescribeIT/dse/5.0.cql @@ -53,7 +53,8 @@ CREATE TABLE ks_0.rank_by_year_and_name ( rank int, cyclist_name text, PRIMARY KEY ((race_year, race_name), rank) -) WITH bloom_filter_fp_chance = 0.01 +) WITH CLUSTERING ORDER BY (rank DESC) + AND bloom_filter_fp_chance = 0.01 AND caching = {'keys':'ALL','rows_per_partition':'NONE'} AND comment = '' AND compaction = {'class':'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy','max_threshold':'32','min_threshold':'4'} diff --git a/integration-tests/src/test/resources/DescribeIT/dse/5.1.cql b/integration-tests/src/test/resources/DescribeIT/dse/5.1.cql index fe606992a44..25b42c58d68 100644 --- a/integration-tests/src/test/resources/DescribeIT/dse/5.1.cql +++ b/integration-tests/src/test/resources/DescribeIT/dse/5.1.cql @@ -53,7 +53,8 @@ CREATE TABLE ks_0.rank_by_year_and_name ( rank int, cyclist_name text, PRIMARY KEY ((race_year, race_name), rank) -) WITH bloom_filter_fp_chance = 0.01 +) WITH CLUSTERING ORDER BY (rank DESC) + AND bloom_filter_fp_chance = 0.01 AND caching = {'keys':'ALL','rows_per_partition':'NONE'} AND comment = '' AND compaction = {'class':'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy','max_threshold':'32','min_threshold':'4'} diff --git a/integration-tests/src/test/resources/DescribeIT/dse/6.8.cql b/integration-tests/src/test/resources/DescribeIT/dse/6.8.cql index 24492c7b176..416c397ba97 100644 --- a/integration-tests/src/test/resources/DescribeIT/dse/6.8.cql +++ b/integration-tests/src/test/resources/DescribeIT/dse/6.8.cql @@ -55,7 +55,8 @@ CREATE TABLE ks_0.rank_by_year_and_name ( rank int, cyclist_name text, PRIMARY KEY ((race_year, race_name), rank) -) WITH additional_write_policy = '99PERCENTILE' +) WITH CLUSTERING ORDER BY (rank DESC) + AND additional_write_policy = '99PERCENTILE' AND bloom_filter_fp_chance = 0.01 AND caching = {'keys':'ALL','rows_per_partition':'NONE'} AND comment = '' diff --git a/integration-tests/src/test/resources/DescribeIT/oss/2.1.cql b/integration-tests/src/test/resources/DescribeIT/oss/2.1.cql index b05df71a503..05408ba0924 100644 --- a/integration-tests/src/test/resources/DescribeIT/oss/2.1.cql +++ b/integration-tests/src/test/resources/DescribeIT/oss/2.1.cql @@ -28,7 +28,8 @@ CREATE TABLE ks_0.rank_by_year_and_name ( rank int, cyclist_name text, PRIMARY KEY ((race_year, race_name), rank) -) WITH bloom_filter_fp_chance = 0.01 +) WITH CLUSTERING ORDER BY (rank DESC) + AND bloom_filter_fp_chance = 0.01 AND caching = '{"keys":"ALL", "rows_per_partition":"NONE"}' AND comment = '' AND compaction = {'class':'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy'} diff --git a/integration-tests/src/test/resources/DescribeIT/oss/2.2.cql b/integration-tests/src/test/resources/DescribeIT/oss/2.2.cql index 5749778e71b..5b4442133c3 100644 --- a/integration-tests/src/test/resources/DescribeIT/oss/2.2.cql +++ b/integration-tests/src/test/resources/DescribeIT/oss/2.2.cql @@ -51,7 +51,8 @@ CREATE TABLE ks_0.rank_by_year_and_name ( rank int, cyclist_name text, PRIMARY KEY ((race_year, race_name), rank) -) WITH bloom_filter_fp_chance = 0.01 +) WITH CLUSTERING ORDER BY (rank DESC) + AND bloom_filter_fp_chance = 0.01 AND caching = '{"keys":"ALL", "rows_per_partition":"NONE"}' AND comment = '' AND compaction = {'class':'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy'} diff --git a/integration-tests/src/test/resources/DescribeIT/oss/3.0.cql b/integration-tests/src/test/resources/DescribeIT/oss/3.0.cql index fe606992a44..25b42c58d68 100644 --- a/integration-tests/src/test/resources/DescribeIT/oss/3.0.cql +++ b/integration-tests/src/test/resources/DescribeIT/oss/3.0.cql @@ -53,7 +53,8 @@ CREATE TABLE ks_0.rank_by_year_and_name ( rank int, cyclist_name text, PRIMARY KEY ((race_year, race_name), rank) -) WITH bloom_filter_fp_chance = 0.01 +) WITH CLUSTERING ORDER BY (rank DESC) + AND bloom_filter_fp_chance = 0.01 AND caching = {'keys':'ALL','rows_per_partition':'NONE'} AND comment = '' AND compaction = {'class':'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy','max_threshold':'32','min_threshold':'4'} diff --git a/integration-tests/src/test/resources/DescribeIT/oss/3.11.cql b/integration-tests/src/test/resources/DescribeIT/oss/3.11.cql index fe606992a44..25b42c58d68 100644 --- a/integration-tests/src/test/resources/DescribeIT/oss/3.11.cql +++ b/integration-tests/src/test/resources/DescribeIT/oss/3.11.cql @@ -53,7 +53,8 @@ CREATE TABLE ks_0.rank_by_year_and_name ( rank int, cyclist_name text, PRIMARY KEY ((race_year, race_name), rank) -) WITH bloom_filter_fp_chance = 0.01 +) WITH CLUSTERING ORDER BY (rank DESC) + AND bloom_filter_fp_chance = 0.01 AND caching = {'keys':'ALL','rows_per_partition':'NONE'} AND comment = '' AND compaction = {'class':'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy','max_threshold':'32','min_threshold':'4'} diff --git a/integration-tests/src/test/resources/DescribeIT/oss/4.0.cql b/integration-tests/src/test/resources/DescribeIT/oss/4.0.cql index 6c741fd90fe..15ff0f5e9dc 100644 --- a/integration-tests/src/test/resources/DescribeIT/oss/4.0.cql +++ b/integration-tests/src/test/resources/DescribeIT/oss/4.0.cql @@ -53,7 +53,8 @@ CREATE TABLE ks_0.rank_by_year_and_name ( rank int, cyclist_name text, PRIMARY KEY ((race_year, race_name), rank) -) WITH additional_write_policy = '99p' +) WITH CLUSTERING ORDER BY (rank DESC) + AND additional_write_policy = '99p' AND bloom_filter_fp_chance = 0.01 AND caching = {'keys':'ALL','rows_per_partition':'NONE'} AND comment = '' From d54a5108b30b0c2948bd29f0d00f0c934001a76b Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 27 Apr 2020 16:03:09 -0700 Subject: [PATCH 455/979] Bump native-protocol to 1.4.10 --- bom/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bom/pom.xml b/bom/pom.xml index 7d8c09a02e3..574ea05db11 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -61,7 +61,7 @@ com.datastax.oss native-protocol - 1.4.10-SNAPSHOT + 1.4.10 com.datastax.oss From 12570ea462f12ce48b474764b73ddb9fa251f55b Mon Sep 17 00:00:00 2001 From: olim7t Date: Tue, 28 Apr 2020 07:52:35 -0700 Subject: [PATCH 456/979] Update version in docs --- README.md | 2 +- changelog/README.md | 2 +- manual/case_sensitivity/README.md | 10 +++--- manual/core/README.md | 26 +++++++------- manual/core/address_resolution/README.md | 2 +- manual/core/async/README.md | 2 +- manual/core/authentication/README.md | 16 ++++----- manual/core/configuration/README.md | 20 +++++------ manual/core/control_connection/README.md | 2 +- manual/core/custom_codecs/README.md | 10 +++--- manual/core/detachable_types/README.md | 14 ++++---- manual/core/dse/geotypes/README.md | 6 ++-- manual/core/dse/graph/README.md | 4 +-- manual/core/dse/graph/fluent/README.md | 4 +-- .../core/dse/graph/fluent/explicit/README.md | 12 +++---- manual/core/dse/graph/results/README.md | 6 ++-- manual/core/dse/graph/script/README.md | 6 ++-- manual/core/idempotence/README.md | 4 +-- manual/core/integration/README.md | 6 ++-- manual/core/load_balancing/README.md | 10 +++--- manual/core/metadata/README.md | 6 ++-- manual/core/metadata/node/README.md | 28 +++++++-------- manual/core/metadata/schema/README.md | 20 +++++------ manual/core/metadata/token/README.md | 4 +-- manual/core/native_protocol/README.md | 6 ++-- manual/core/paging/README.md | 10 +++--- manual/core/performance/README.md | 10 +++--- manual/core/pooling/README.md | 2 +- manual/core/query_timestamps/README.md | 4 +-- manual/core/reactive/README.md | 34 ++++++++++--------- manual/core/reconnection/README.md | 8 ++--- manual/core/request_tracker/README.md | 4 +-- manual/core/retries/README.md | 34 +++++++++---------- manual/core/speculative_execution/README.md | 2 +- manual/core/ssl/README.md | 4 +-- manual/core/statements/README.md | 8 ++--- manual/core/statements/batch/README.md | 6 ++-- .../statements/per_query_keyspace/README.md | 2 +- manual/core/statements/prepared/README.md | 8 ++--- manual/core/statements/simple/README.md | 6 ++-- manual/core/temporal_types/README.md | 8 ++--- manual/core/throttling/README.md | 6 ++-- manual/core/tracing/README.md | 12 +++---- manual/core/tuples/README.md | 4 +-- manual/core/udts/README.md | 4 +-- manual/mapper/daos/README.md | 8 ++--- manual/mapper/daos/delete/README.md | 16 ++++----- manual/mapper/daos/getentity/README.md | 16 ++++----- manual/mapper/daos/insert/README.md | 12 +++---- manual/mapper/daos/null_saving/README.md | 10 +++--- manual/mapper/daos/query/README.md | 18 +++++----- manual/mapper/daos/queryprovider/README.md | 16 ++++----- manual/mapper/daos/select/README.md | 24 ++++++------- manual/mapper/daos/setentity/README.md | 10 +++--- .../daos/statement_attributes/README.md | 2 +- manual/mapper/daos/update/README.md | 10 +++--- manual/mapper/entities/README.md | 34 +++++++++---------- manual/mapper/mapper/README.md | 10 +++--- manual/osgi/README.md | 2 +- manual/query_builder/README.md | 10 +++--- manual/query_builder/condition/README.md | 2 +- manual/query_builder/delete/README.md | 4 +-- manual/query_builder/insert/README.md | 2 +- manual/query_builder/relation/README.md | 4 +-- manual/query_builder/schema/README.md | 2 +- .../query_builder/schema/aggregate/README.md | 2 +- .../query_builder/schema/function/README.md | 2 +- manual/query_builder/schema/index/README.md | 2 +- .../query_builder/schema/keyspace/README.md | 2 +- .../schema/materialized_view/README.md | 4 +-- manual/query_builder/schema/table/README.md | 6 ++-- manual/query_builder/schema/type/README.md | 2 +- manual/query_builder/select/README.md | 4 +-- manual/query_builder/term/README.md | 4 +-- manual/query_builder/truncate/README.md | 2 +- manual/query_builder/update/README.md | 4 +-- 76 files changed, 326 insertions(+), 324 deletions(-) diff --git a/README.md b/README.md index 95a1a62ae60..3286b040d0f 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@ *If you're reading this on github.com, please note that this is the readme for the development version and that some features described here might not yet have been released. You can find the documentation for latest version through [DataStax Docs] or via the release tags, e.g. -[4.5.1](https://github.com/datastax/java-driver/tree/4.5.1).* +[4.6.0](https://github.com/datastax/java-driver/tree/4.6.0).* A modern, feature-rich and highly tunable Java client library for [Apache Cassandra®] \(2.1+) and [DataStax Enterprise] \(4.7+), and [DataStax Apollo], using exclusively Cassandra's binary protocol diff --git a/changelog/README.md b/changelog/README.md index af81f98b16d..bc397412a7c 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -2,7 +2,7 @@ -### 4.6.0 (in progress) +### 4.6.0 - [improvement] JAVA-2741: Make keyspace/table metadata impls serializable - [bug] JAVA-2740: Extend peer validity check to include datacenter, rack and tokens diff --git a/manual/case_sensitivity/README.md b/manual/case_sensitivity/README.md index d88089bb364..f73f80629a1 100644 --- a/manual/case_sensitivity/README.md +++ b/manual/case_sensitivity/README.md @@ -106,11 +106,11 @@ For "consuming" methods, string overloads are also provided for convenience, for * in other cases, the string is always assumed to be in CQL form, and converted on the fly with `CqlIdentifier.fromCql`. -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/CqlIdentifier.html -[Row]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/Row.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/data/UdtValue.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/data/AccessibleByName.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/CqlIdentifier.html +[Row]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/Row.html +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/data/UdtValue.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/data/AccessibleByName.html ### Good practices diff --git a/manual/core/README.md b/manual/core/README.md index 87ab66119a5..a56b2474a44 100644 --- a/manual/core/README.md +++ b/manual/core/README.md @@ -310,18 +310,18 @@ for (ColumnDefinitions.Definition definition : row.getColumnDefinitions()) { } ``` -[CqlSession]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/CqlSession.html -[CqlSession#builder()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/CqlSession.html#builder-- -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/ResultSet.html -[Row]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/Row.html -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/CqlIdentifier.html -[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/data/AccessibleByName.html -[GenericType]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/type/reflect/GenericType.html -[CqlDuration]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/data/CqlDuration.html -[TupleValue]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/data/TupleValue.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/data/UdtValue.html -[SessionBuilder.addContactPoint()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoint-java.net.InetSocketAddress- -[SessionBuilder.addContactPoints()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoints-java.util.Collection- -[SessionBuilder.withLocalDatacenter()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withLocalDatacenter-java.lang.String- +[CqlSession]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/CqlSession.html +[CqlSession#builder()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/CqlSession.html#builder-- +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/ResultSet.html +[Row]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/Row.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/CqlIdentifier.html +[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/data/AccessibleByName.html +[GenericType]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/type/reflect/GenericType.html +[CqlDuration]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/data/CqlDuration.html +[TupleValue]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/data/TupleValue.html +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/data/UdtValue.html +[SessionBuilder.addContactPoint()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoint-java.net.InetSocketAddress- +[SessionBuilder.addContactPoints()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoints-java.util.Collection- +[SessionBuilder.withLocalDatacenter()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withLocalDatacenter-java.lang.String- [CASSANDRA-10145]: https://issues.apache.org/jira/browse/CASSANDRA-10145 \ No newline at end of file diff --git a/manual/core/address_resolution/README.md b/manual/core/address_resolution/README.md index 0d608fdc229..fbe154dd12a 100644 --- a/manual/core/address_resolution/README.md +++ b/manual/core/address_resolution/README.md @@ -124,7 +124,7 @@ Cassandra node: domain name of the target instance. Then it performs a forward DNS lookup of the domain name; the EC2 DNS does the private/public switch automatically based on location). -[AddressTranslator]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/addresstranslation/AddressTranslator.html +[AddressTranslator]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/addresstranslation/AddressTranslator.html [cassandra.yaml]: https://docs.datastax.com/en/cassandra/3.x/cassandra/configuration/configCassandra_yaml.html [rpc_address]: https://docs.datastax.com/en/cassandra/3.x/cassandra/configuration/configCassandra_yaml.html?scroll=configCassandra_yaml__rpc_address diff --git a/manual/core/async/README.md b/manual/core/async/README.md index c2c5b696709..99657e45077 100644 --- a/manual/core/async/README.md +++ b/manual/core/async/README.md @@ -203,4 +203,4 @@ documentation for more details and an example. [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html \ No newline at end of file +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html \ No newline at end of file diff --git a/manual/core/authentication/README.md b/manual/core/authentication/README.md index 4249a4fbbf1..d902071474c 100644 --- a/manual/core/authentication/README.md +++ b/manual/core/authentication/README.md @@ -215,12 +215,12 @@ session.execute(statement); [SASL]: https://en.wikipedia.org/wiki/Simple_Authentication_and_Security_Layer -[AuthProvider]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/auth/AuthProvider.html -[DriverContext]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/context/DriverContext.html -[PlainTextAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderBase.html -[DseGssApiAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderBase.html -[ProgrammaticDseGssApiAuthProvider]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/dse/driver/api/core/auth/ProgrammaticDseGssApiAuthProvider.html -[ProxyAuthentication.executeAs]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/dse/driver/api/core/auth/ProxyAuthentication.html#executeAs-java.lang.String-StatementT- -[SessionBuilder.withAuthCredentials]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthCredentials-java.lang.String-java.lang.String- -[SessionBuilder.withAuthProvider]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthProvider-com.datastax.oss.driver.api.core.auth.AuthProvider- +[AuthProvider]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/auth/AuthProvider.html +[DriverContext]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/context/DriverContext.html +[PlainTextAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderBase.html +[DseGssApiAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderBase.html +[ProgrammaticDseGssApiAuthProvider]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/auth/ProgrammaticDseGssApiAuthProvider.html +[ProxyAuthentication.executeAs]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/auth/ProxyAuthentication.html#executeAs-java.lang.String-StatementT- +[SessionBuilder.withAuthCredentials]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthCredentials-java.lang.String-java.lang.String- +[SessionBuilder.withAuthProvider]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthProvider-com.datastax.oss.driver.api.core.auth.AuthProvider- [reference.conf]: ../configuration/reference/ \ No newline at end of file diff --git a/manual/core/configuration/README.md b/manual/core/configuration/README.md index f870405d9bf..150f9fd82af 100644 --- a/manual/core/configuration/README.md +++ b/manual/core/configuration/README.md @@ -501,16 +501,16 @@ config.getDefaultProfile().getString(MyCustomOption.ADMIN_EMAIL); config.getDefaultProfile().getInt(MyCustomOption.AWESOMENESS_FACTOR); ``` -[DriverConfig]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/config/DriverConfig.html -[DriverExecutionProfile]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/config/DriverExecutionProfile.html -[DriverContext]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/context/DriverContext.html -[DriverOption]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/config/DriverOption.html -[DefaultDriverOption]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/config/DefaultDriverOption.html -[DriverConfigLoader]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html -[DriverConfigLoader.fromClasspath]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromClasspath-java.lang.String- -[DriverConfigLoader.fromFile]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromFile-java.io.File- -[DriverConfigLoader.fromUrl]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromUrl-java.net.URL- -[DriverConfigLoader.programmaticBuilder]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#programmaticBuilder-- +[DriverConfig]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/config/DriverConfig.html +[DriverExecutionProfile]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/config/DriverExecutionProfile.html +[DriverContext]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/context/DriverContext.html +[DriverOption]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/config/DriverOption.html +[DefaultDriverOption]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/config/DefaultDriverOption.html +[DriverConfigLoader]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html +[DriverConfigLoader.fromClasspath]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromClasspath-java.lang.String- +[DriverConfigLoader.fromFile]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromFile-java.io.File- +[DriverConfigLoader.fromUrl]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromUrl-java.net.URL- +[DriverConfigLoader.programmaticBuilder]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#programmaticBuilder-- [Typesafe Config]: https://github.com/typesafehub/config [config standard behavior]: https://github.com/typesafehub/config#standard-behavior diff --git a/manual/core/control_connection/README.md b/manual/core/control_connection/README.md index 57c9ab447e6..1b69b82296e 100644 --- a/manual/core/control_connection/README.md +++ b/manual/core/control_connection/README.md @@ -23,4 +23,4 @@ There are a few options to fine tune the control connection behavior in the `advanced.control-connection` and `advanced.metadata` sections; see the [metadata](../metadata/) pages and the [reference configuration](../configuration/reference/) for all the details. -[Node.getOpenConnections]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- \ No newline at end of file +[Node.getOpenConnections]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- \ No newline at end of file diff --git a/manual/core/custom_codecs/README.md b/manual/core/custom_codecs/README.md index 6fb6dd7ad8e..0ce833e141a 100644 --- a/manual/core/custom_codecs/README.md +++ b/manual/core/custom_codecs/README.md @@ -360,8 +360,8 @@ private static String formatRow(Row row) { } ``` -[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html -[GenericType]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/type/reflect/GenericType.html -[TypeCodec]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html -[MappingCodec]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/type/codec/MappingCodec.html -[SessionBuilder.addTypeCodecs]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addTypeCodecs-com.datastax.oss.driver.api.core.type.codec.TypeCodec...- +[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html +[GenericType]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/type/reflect/GenericType.html +[TypeCodec]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html +[MappingCodec]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/type/codec/MappingCodec.html +[SessionBuilder.addTypeCodecs]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addTypeCodecs-com.datastax.oss.driver.api.core.type.codec.TypeCodec...- diff --git a/manual/core/detachable_types/README.md b/manual/core/detachable_types/README.md index e328b5f0428..5264333cc85 100644 --- a/manual/core/detachable_types/README.md +++ b/manual/core/detachable_types/README.md @@ -137,13 +137,13 @@ Even then, the defaults used by detached objects might be good enough for you: Otherwise, just make sure you reattach objects any time you deserialize them or create them from scratch. -[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html -[CodecRegistry#DEFAULT]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html#DEFAULT -[DataType]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/type/DataType.html -[Detachable]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/detach/Detachable.html -[Session]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/Session.html -[ColumnDefinition]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/ColumnDefinition.html -[Row]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/Row.html +[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html +[CodecRegistry#DEFAULT]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html#DEFAULT +[DataType]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/type/DataType.html +[Detachable]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/detach/Detachable.html +[Session]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/Session.html +[ColumnDefinition]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/ColumnDefinition.html +[Row]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/Row.html [Java serialization]: https://docs.oracle.com/javase/tutorial/jndi/objects/serial.html [protocol specifications]: https://github.com/datastax/native-protocol/tree/1.x/src/main/resources diff --git a/manual/core/dse/geotypes/README.md b/manual/core/dse/geotypes/README.md index bc4bd75f0ba..8bbff60db25 100644 --- a/manual/core/dse/geotypes/README.md +++ b/manual/core/dse/geotypes/README.md @@ -166,9 +166,9 @@ All geospatial types interoperate with three standard formats: [ESRI]: https://github.com/Esri/geometry-api-java -[LineString]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/dse/driver/api/core/data/geometry/LineString.html -[Point]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/dse/driver/api/core/data/geometry/Point.html -[Polygon]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/dse/driver/api/core/data/geometry/Polygon.html +[LineString]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/data/geometry/LineString.html +[Point]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/data/geometry/Point.html +[Polygon]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/data/geometry/Polygon.html [Well-known text]: https://en.wikipedia.org/wiki/Well-known_text [Well-known binary]: https://en.wikipedia.org/wiki/Well-known_text#Well-known_binary diff --git a/manual/core/dse/graph/README.md b/manual/core/dse/graph/README.md index f351148fcd0..3660ef4d1be 100644 --- a/manual/core/dse/graph/README.md +++ b/manual/core/dse/graph/README.md @@ -74,8 +74,8 @@ fluent API returns Apache TinkerPop™ types directly. [Apache TinkerPop™]: http://tinkerpop.apache.org/ -[CqlSession]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/CqlSession.html -[GraphSession]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/dse/driver/api/core/graph/GraphSession.html +[CqlSession]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/CqlSession.html +[GraphSession]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/graph/GraphSession.html [DSE developer guide]: https://docs.datastax.com/en/dse/6.0/dse-dev/datastax_enterprise/graph/graphTOC.html [Gremlin]: https://docs.datastax.com/en/dse/6.0/dse-dev/datastax_enterprise/graph/dseGraphAbout.html#dseGraphAbout__what-is-cql diff --git a/manual/core/dse/graph/fluent/README.md b/manual/core/dse/graph/fluent/README.md index febdbd833b8..a1df9472934 100644 --- a/manual/core/dse/graph/fluent/README.md +++ b/manual/core/dse/graph/fluent/README.md @@ -109,8 +109,8 @@ All the DSE predicates are available on the driver side: .values("name"); ``` -[Search]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/dse/driver/api/core/graph/predicates/Search.html -[Geo]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/dse/driver/api/core/graph/predicates/Geo.html +[Search]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/graph/predicates/Search.html +[Geo]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/graph/predicates/Geo.html [Apache TinkerPop™]: http://tinkerpop.apache.org/ [TinkerPop DSL]: http://tinkerpop.apache.org/docs/current/reference/#dsl diff --git a/manual/core/dse/graph/fluent/explicit/README.md b/manual/core/dse/graph/fluent/explicit/README.md index 26c63276cee..dd070a22a1f 100644 --- a/manual/core/dse/graph/fluent/explicit/README.md +++ b/manual/core/dse/graph/fluent/explicit/README.md @@ -105,9 +105,9 @@ added in a future version. See also the [parent page](../) for topics common to all fluent traversals. -[FluentGraphStatement]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html -[FluentGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html#newInstance-org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal- -[FluentGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html#builder-org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal- -[BatchGraphStatement]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html -[BatchGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html#newInstance-- -[BatchGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html#builder-- +[FluentGraphStatement]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html +[FluentGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html#newInstance-org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal- +[FluentGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html#builder-org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal- +[BatchGraphStatement]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html +[BatchGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html#newInstance-- +[BatchGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html#builder-- diff --git a/manual/core/dse/graph/results/README.md b/manual/core/dse/graph/results/README.md index 5da23f42812..26477c52645 100644 --- a/manual/core/dse/graph/results/README.md +++ b/manual/core/dse/graph/results/README.md @@ -137,8 +137,8 @@ If a type doesn't have a corresponding `asXxx()` method, use the variant that ta UUID uuid = graphNode.as(UUID.class); ``` -[GraphNode]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/dse/driver/api/core/graph/GraphNode.html -[GraphResultSet]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/dse/driver/api/core/graph/GraphResultSet.html -[AsyncGraphResultSet]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/dse/driver/api/core/graph/AsyncGraphResultSet.html +[GraphNode]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/graph/GraphNode.html +[GraphResultSet]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/graph/GraphResultSet.html +[AsyncGraphResultSet]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/graph/AsyncGraphResultSet.html [DSE data types]: https://docs.datastax.com/en/dse/6.0/dse-dev/datastax_enterprise/graph/reference/refDSEGraphDataTypes.html \ No newline at end of file diff --git a/manual/core/dse/graph/script/README.md b/manual/core/dse/graph/script/README.md index 474004ba4cb..9078a0b5a51 100644 --- a/manual/core/dse/graph/script/README.md +++ b/manual/core/dse/graph/script/README.md @@ -101,6 +101,6 @@ Note however that some types of queries can only be performed through the script * configuration; * DSE graph schema queries. -[ScriptGraphStatement]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html -[ScriptGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html#newInstance-java.lang.String- -[ScriptGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html#builder-java.lang.String- \ No newline at end of file +[ScriptGraphStatement]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html +[ScriptGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html#newInstance-java.lang.String- +[ScriptGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html#builder-java.lang.String- \ No newline at end of file diff --git a/manual/core/idempotence/README.md b/manual/core/idempotence/README.md index 6b3f0598be4..9bd63f43e59 100644 --- a/manual/core/idempotence/README.md +++ b/manual/core/idempotence/README.md @@ -60,5 +60,5 @@ assert bs.isIdempotent(); The query builder tries to infer idempotence automatically; refer to [its manual](../../query_builder/idempotence/) for more details. -[Statement.setIdempotent]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/Statement.html#setIdempotent-java.lang.Boolean- -[StatementBuilder.setIdempotence]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/StatementBuilder.html#setIdempotence-java.lang.Boolean- +[Statement.setIdempotent]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/Statement.html#setIdempotent-java.lang.Boolean- +[StatementBuilder.setIdempotence]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/StatementBuilder.html#setIdempotence-java.lang.Boolean- diff --git a/manual/core/integration/README.md b/manual/core/integration/README.md index 29cc09e39ee..bd03509bac6 100644 --- a/manual/core/integration/README.md +++ b/manual/core/integration/README.md @@ -517,6 +517,6 @@ The remaining core driver dependencies are the only ones that are truly mandator [guava]: https://github.com/google/guava/issues/2721 [annotation processing]: https://docs.oracle.com/javase/8/docs/technotes/tools/windows/javac.html#sthref65 -[Session.getMetrics]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/Session.html#getMetrics-- -[SessionBuilder.addContactPoint]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoint-java.net.InetSocketAddress- -[Uuids]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/uuid/Uuids.html +[Session.getMetrics]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/Session.html#getMetrics-- +[SessionBuilder.addContactPoint]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoint-java.net.InetSocketAddress- +[Uuids]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/uuid/Uuids.html diff --git a/manual/core/load_balancing/README.md b/manual/core/load_balancing/README.md index 0d089a98d37..5ef033a45d0 100644 --- a/manual/core/load_balancing/README.md +++ b/manual/core/load_balancing/README.md @@ -323,10 +323,10 @@ Then it uses the "closest" distance for any given node. For example: * policy1 changes its suggestion to IGNORED. node1 is set to REMOTE; * policy1 changes its suggestion to REMOTE. node1 stays at REMOTE. -[DriverContext]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/context/DriverContext.html -[LoadBalancingPolicy]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/loadbalancing/LoadBalancingPolicy.html -[getRoutingKeyspace()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKeyspace-- -[getRoutingToken()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/Request.html#getRoutingToken-- -[getRoutingKey()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- +[DriverContext]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/context/DriverContext.html +[LoadBalancingPolicy]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/loadbalancing/LoadBalancingPolicy.html +[getRoutingKeyspace()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKeyspace-- +[getRoutingToken()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/Request.html#getRoutingToken-- +[getRoutingKey()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- [`nodetool status`]: https://docs.datastax.com/en/dse/6.7/dse-dev/datastax_enterprise/tools/nodetool/toolsStatus.html [cqlsh]: https://docs.datastax.com/en/dse/6.7/cql/cql/cql_using/startCqlshStandalone.html diff --git a/manual/core/metadata/README.md b/manual/core/metadata/README.md index 66c5af266f5..5227721dfe2 100644 --- a/manual/core/metadata/README.md +++ b/manual/core/metadata/README.md @@ -56,6 +56,6 @@ new keyspace in the schema metadata before the token metadata was updated. Schema and node state events are debounced. This allows you to control how often the metadata gets refreshed. See the [Performance](../performance/#debouncing) page for more details. -[Session#getMetadata]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/Session.html#getMetadata-- -[Metadata]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/metadata/Metadata.html -[Node]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/metadata/Node.html \ No newline at end of file +[Session#getMetadata]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/Session.html#getMetadata-- +[Metadata]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/metadata/Metadata.html +[Node]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/metadata/Node.html \ No newline at end of file diff --git a/manual/core/metadata/node/README.md b/manual/core/metadata/node/README.md index f56302768da..2fd76bcf750 100644 --- a/manual/core/metadata/node/README.md +++ b/manual/core/metadata/node/README.md @@ -112,17 +112,17 @@ beyond the scope of this document; if you're interested, study the `TopologyMoni the source code. -[Metadata#getNodes]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/metadata/Metadata.html#getNodes-- -[Node]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/metadata/Node.html -[Node#getState()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/metadata/Node.html#getState-- -[Node#getDatacenter()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/metadata/Node.html#getDatacenter-- -[Node#getRack()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/metadata/Node.html#getRack-- -[Node#getDistance()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/metadata/Node.html#getDistance-- -[Node#getExtras()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/metadata/Node.html#getExtras-- -[Node#getOpenConnections()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- -[Node#isReconnecting()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/metadata/Node.html#isReconnecting-- -[NodeState]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/metadata/NodeState.html -[NodeStateListener]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/metadata/NodeStateListener.html -[NodeStateListenerBase]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/metadata/NodeStateListenerBase.html -[SessionBuilder.withNodeStateListener]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withNodeStateListener-com.datastax.oss.driver.api.core.metadata.NodeStateListener- -[DseNodeProperties]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/dse/driver/api/core/metadata/DseNodeProperties.html \ No newline at end of file +[Metadata#getNodes]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/metadata/Metadata.html#getNodes-- +[Node]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/metadata/Node.html +[Node#getState()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/metadata/Node.html#getState-- +[Node#getDatacenter()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/metadata/Node.html#getDatacenter-- +[Node#getRack()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/metadata/Node.html#getRack-- +[Node#getDistance()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/metadata/Node.html#getDistance-- +[Node#getExtras()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/metadata/Node.html#getExtras-- +[Node#getOpenConnections()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- +[Node#isReconnecting()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/metadata/Node.html#isReconnecting-- +[NodeState]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/metadata/NodeState.html +[NodeStateListener]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/metadata/NodeStateListener.html +[NodeStateListenerBase]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/metadata/NodeStateListenerBase.html +[SessionBuilder.withNodeStateListener]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withNodeStateListener-com.datastax.oss.driver.api.core.metadata.NodeStateListener- +[DseNodeProperties]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/metadata/DseNodeProperties.html \ No newline at end of file diff --git a/manual/core/metadata/schema/README.md b/manual/core/metadata/schema/README.md index 34af1f32bb4..cee9acb9906 100644 --- a/manual/core/metadata/schema/README.md +++ b/manual/core/metadata/schema/README.md @@ -260,15 +260,15 @@ unavailable for the excluded keyspaces. If you issue schema-altering requests from the driver (e.g. `session.execute("CREATE TABLE ..")`), take a look at the [Performance](../../performance/#schema-updates) page for a few tips. -[Metadata#getKeyspaces]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/metadata/Metadata.html#getKeyspaces-- -[SchemaChangeListener]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListener.html -[SchemaChangeListenerBase]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListenerBase.html -[Session#setSchemaMetadataEnabled]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/Session.html#setSchemaMetadataEnabled-java.lang.Boolean- -[Session#checkSchemaAgreementAsync]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/Session.html#checkSchemaAgreementAsync-- -[SessionBuilder#withSchemaChangeListener]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSchemaChangeListener-com.datastax.oss.driver.api.core.metadata.schema.SchemaChangeListener- -[ExecutionInfo#isSchemaInAgreement]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#isSchemaInAgreement-- -[com.datastax.dse.driver.api.core.metadata.schema]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/dse/driver/api/core/metadata/schema/package-frame.html -[DseFunctionMetadata]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/dse/driver/api/core/metadata/schema/DseFunctionMetadata.html -[DseAggregateMetadata]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/dse/driver/api/core/metadata/schema/DseAggregateMetadata.html +[Metadata#getKeyspaces]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/metadata/Metadata.html#getKeyspaces-- +[SchemaChangeListener]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListener.html +[SchemaChangeListenerBase]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListenerBase.html +[Session#setSchemaMetadataEnabled]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/Session.html#setSchemaMetadataEnabled-java.lang.Boolean- +[Session#checkSchemaAgreementAsync]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/Session.html#checkSchemaAgreementAsync-- +[SessionBuilder#withSchemaChangeListener]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSchemaChangeListener-com.datastax.oss.driver.api.core.metadata.schema.SchemaChangeListener- +[ExecutionInfo#isSchemaInAgreement]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#isSchemaInAgreement-- +[com.datastax.dse.driver.api.core.metadata.schema]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/metadata/schema/package-frame.html +[DseFunctionMetadata]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/metadata/schema/DseFunctionMetadata.html +[DseAggregateMetadata]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/metadata/schema/DseAggregateMetadata.html [JAVA-750]: https://datastax-oss.atlassian.net/browse/JAVA-750 \ No newline at end of file diff --git a/manual/core/metadata/token/README.md b/manual/core/metadata/token/README.md index 9d277c0a5d1..434e4c31f0d 100644 --- a/manual/core/metadata/token/README.md +++ b/manual/core/metadata/token/README.md @@ -169,5 +169,5 @@ on [schema metadata](../schema/). If schema metadata is disabled or filtered, to also be unavailable for the excluded keyspaces. -[Metadata#getTokenMap]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/metadata/Metadata.html#getTokenMap-- -[TokenMap]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/metadata/TokenMap.html \ No newline at end of file +[Metadata#getTokenMap]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/metadata/Metadata.html#getTokenMap-- +[TokenMap]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/metadata/TokenMap.html \ No newline at end of file diff --git a/manual/core/native_protocol/README.md b/manual/core/native_protocol/README.md index 152655dfbcd..8a277a92dea 100644 --- a/manual/core/native_protocol/README.md +++ b/manual/core/native_protocol/README.md @@ -120,6 +120,6 @@ If you want to see the details of mixed cluster negotiation, enable `DEBUG` leve [protocol spec]: https://github.com/datastax/native-protocol/tree/1.x/src/main/resources [driver3]: https://docs.datastax.com/en/developer/java-driver/3.5/manual/native_protocol/ -[ExecutionInfo.getWarnings]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getWarnings-- -[Request.getCustomPayload]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/Request.html#getCustomPayload-- -[AttachmentPoint.getProtocolVersion]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/detach/AttachmentPoint.html#getProtocolVersion-- \ No newline at end of file +[ExecutionInfo.getWarnings]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getWarnings-- +[Request.getCustomPayload]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/Request.html#getCustomPayload-- +[AttachmentPoint.getProtocolVersion]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/detach/AttachmentPoint.html#getProtocolVersion-- \ No newline at end of file diff --git a/manual/core/paging/README.md b/manual/core/paging/README.md index 3d1005326c6..c273db60753 100644 --- a/manual/core/paging/README.md +++ b/manual/core/paging/README.md @@ -205,10 +205,10 @@ protocol page size and the logical page size to the same value. The [driver examples] include two complete web service implementations demonstrating forward-only and offset paging. -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/ResultSet.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html -[AsyncPagingIterable.hasMorePages]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/AsyncPagingIterable.html#hasMorePages-- -[AsyncPagingIterable.fetchNextPage]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/AsyncPagingIterable.html#fetchNextPage-- -[OffsetPager]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/paging/OffsetPager.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/ResultSet.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[AsyncPagingIterable.hasMorePages]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/AsyncPagingIterable.html#hasMorePages-- +[AsyncPagingIterable.fetchNextPage]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/AsyncPagingIterable.html#fetchNextPage-- +[OffsetPager]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/paging/OffsetPager.html [driver examples]: https://github.com/datastax/java-driver/tree/4.x/examples/src/main/java/com/datastax/oss/driver/examples/paging diff --git a/manual/core/performance/README.md b/manual/core/performance/README.md index 69123907244..ce8144dffca 100644 --- a/manual/core/performance/README.md +++ b/manual/core/performance/README.md @@ -345,8 +345,8 @@ possible to reuse the same event loop group for I/O, admin tasks, and even your (the driver's internal code is fully asynchronous so it will never block any thread). The timer is the only one that will have to stay on a separate thread. -[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/data/AccessibleByName.html -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/CqlIdentifier.html -[CqlSession.prepare(SimpleStatement)]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/CqlSession.html#prepare-com.datastax.oss.driver.api.core.cql.SimpleStatement- -[GenericType]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/type/reflect/GenericType.html -[Statement.setNode()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/Statement.html#setNode-com.datastax.oss.driver.api.core.metadata.Node- \ No newline at end of file +[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/data/AccessibleByName.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/CqlIdentifier.html +[CqlSession.prepare(SimpleStatement)]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/CqlSession.html#prepare-com.datastax.oss.driver.api.core.cql.SimpleStatement- +[GenericType]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/type/reflect/GenericType.html +[Statement.setNode()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/Statement.html#setNode-com.datastax.oss.driver.api.core.metadata.Node- \ No newline at end of file diff --git a/manual/core/pooling/README.md b/manual/core/pooling/README.md index d735674bd11..d1bdba33f1c 100644 --- a/manual/core/pooling/README.md +++ b/manual/core/pooling/README.md @@ -158,5 +158,5 @@ you experience the issue, here's what to look out for: Try adding more connections per node. Thanks to the driver's hot-reload mechanism, you can do that at runtime and see the effects immediately. -[CqlSession]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/CqlSession.html +[CqlSession]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/CqlSession.html [CASSANDRA-8086]: https://issues.apache.org/jira/browse/CASSANDRA-8086 \ No newline at end of file diff --git a/manual/core/query_timestamps/README.md b/manual/core/query_timestamps/README.md index 12d754d84b1..06595d3d9aa 100644 --- a/manual/core/query_timestamps/README.md +++ b/manual/core/query_timestamps/README.md @@ -187,9 +187,9 @@ Here is the order of precedence of all the methods described so far: 3. otherwise, if the timestamp generator assigned a timestamp, use it; 4. otherwise, let the server assign the timestamp. -[TimestampGenerator]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/time/TimestampGenerator.html +[TimestampGenerator]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/time/TimestampGenerator.html [gettimeofday]: http://man7.org/linux/man-pages/man2/settimeofday.2.html [JNR]: https://github.com/jnr/jnr-posix [Lightweight transactions]: https://docs.datastax.com/en/dse/6.0/cql/cql/cql_using/useInsertLWT.html -[Statement.setQueryTimestamp()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/Statement.html#setQueryTimestamp-long- +[Statement.setQueryTimestamp()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/Statement.html#setQueryTimestamp-long- diff --git a/manual/core/reactive/README.md b/manual/core/reactive/README.md index ae4f3fcfa57..47dae02b1ec 100644 --- a/manual/core/reactive/README.md +++ b/manual/core/reactive/README.md @@ -361,23 +361,25 @@ Note that the driver already has a [built-in retry mechanism] that can transpare queries; the above example should be seen as a demonstration of application-level retries, when a more fine-grained control of what should be retried, and how, is required. +[CqlSession]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/CqlSession.html +[ReactiveSession]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/ResultSet.html +[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html +[ReactiveRow]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html +[Row]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/Row.html +[getColumnDefinitions]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#getColumnDefinitions-- +[getExecutionInfos]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#getExecutionInfos-- +[wasApplied]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#wasApplied-- +[ReactiveRow.getColumnDefinitions]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#getColumnDefinitions-- +[ReactiveRow.getExecutionInfo]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#getExecutionInfo-- +[ReactiveRow.wasApplied]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#wasApplied-- + +[built-in retry mechanism]: ../retries/ +[request throttling]: ../throttling/ + +[Managing concurrency in asynchronous query execution]: https://docs.datastax.com/en/devapp/doc/devapp/driverManagingConcurrency.html] +[Publisher]: https://www.reactive-streams.org/reactive-streams-1.0.2-javadoc/org/reactivestreams/Publisher.html [reactive streams]: https://en.wikipedia.org/wiki/Reactive_Streams [Reactive Streams API]: https://github.com/reactive-streams/reactive-streams-jvm -[CqlSession]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/CqlSession.html -[ReactiveSession]: https://docs.datastax.com/en/drivers/java-dse/2.3/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/ResultSet.html -[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java-dse/2.3/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html -[ReactiveRow]: https://docs.datastax.com/en/drivers/java-dse/2.3/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html -[Row]: https://docs.datastax.com/en/drivers/java/4.2/com/datastax/oss/driver/api/core/cql/Row.html -[Publisher]: https://www.reactive-streams.org/reactive-streams-1.0.2-javadoc/org/reactivestreams/Publisher.html [Reactive Streams Specification rule 2.2]: https://github.com/reactive-streams/reactive-streams-jvm#2.2 [Reactor]: https://projectreactor.io/ -[getColumnDefinitions]: https://docs.datastax.com/en/drivers/java-dse/2.3/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#getColumnDefinitions-- -[getExecutionInfos]: https://docs.datastax.com/en/drivers/java-dse/2.3/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#getExecutionInfos-- -[wasApplied]: https://docs.datastax.com/en/drivers/java-dse/2.3/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#wasApplied-- -[ReactiveRow.getColumnDefinitions]: https://docs.datastax.com/en/drivers/java-dse/2.3/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#getColumnDefinitions-- -[ReactiveRow.getExecutionInfo]: https://docs.datastax.com/en/drivers/java-dse/2.3/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#getExecutionInfo-- -[ReactiveRow.wasApplied]: https://docs.datastax.com/en/drivers/java-dse/2.3/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#wasApplied-- -[built-in retry mechanism]: https://docs.datastax.com/en/developer/java-driver/4.3/manual/core/retries/ -[request throttling]: https://docs.datastax.com/en/developer/java-driver/4.3/manual/core/throttling/ -[Managing concurrency in asynchronous query execution]: https://docs.datastax.com/en/devapp/doc/devapp/driverManagingConcurrency.html] \ No newline at end of file diff --git a/manual/core/reconnection/README.md b/manual/core/reconnection/README.md index e858477d826..2f59c55e0dd 100644 --- a/manual/core/reconnection/README.md +++ b/manual/core/reconnection/README.md @@ -66,7 +66,7 @@ is the exponential one with the default values, and the control connection is in * [t = 3] node2's pool tries to open the last missing connection, which succeeds. The pool is back to its expected size, node2's reconnection stops. -[ConstantReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/internal/core/connection/ConstantReconnectionPolicy.html -[DriverContext]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/context/DriverContext.html -[ExponentialReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/internal/core/connection/ExponentialReconnectionPolicy.html -[ReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/connection/ReconnectionPolicy.html \ No newline at end of file +[ConstantReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/internal/core/connection/ConstantReconnectionPolicy.html +[DriverContext]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/context/DriverContext.html +[ExponentialReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/internal/core/connection/ExponentialReconnectionPolicy.html +[ReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/connection/ReconnectionPolicy.html \ No newline at end of file diff --git a/manual/core/request_tracker/README.md b/manual/core/request_tracker/README.md index 5fe96254055..2e417d72826 100644 --- a/manual/core/request_tracker/README.md +++ b/manual/core/request_tracker/README.md @@ -117,5 +117,5 @@ all FROM users WHERE user_id=? [v0=42] com.datastax.oss.driver.api.core.servererrors.InvalidQueryException: Undefined column name all ``` -[RequestTracker]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/tracker/RequestTracker.html -[SessionBuilder.withRequestTracker]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withRequestTracker-com.datastax.oss.driver.api.core.tracker.RequestTracker- \ No newline at end of file +[RequestTracker]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/tracker/RequestTracker.html +[SessionBuilder.withRequestTracker]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withRequestTracker-com.datastax.oss.driver.api.core.tracker.RequestTracker- \ No newline at end of file diff --git a/manual/core/retries/README.md b/manual/core/retries/README.md index 935b43cf035..9b1f6a77173 100644 --- a/manual/core/retries/README.md +++ b/manual/core/retries/README.md @@ -174,20 +174,20 @@ configuration). Each request uses its declared profile's policy. If it doesn't declare any profile, or if the profile doesn't have a dedicated policy, then the default profile's policy is used. -[AllNodesFailedException]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/AllNodesFailedException.html -[ClosedConnectionException]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/connection/ClosedConnectionException.html -[DriverTimeoutException]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/DriverTimeoutException.html -[FunctionFailureException]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/servererrors/FunctionFailureException.html -[HeartbeatException]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/connection/HeartbeatException.html -[ProtocolError]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/servererrors/ProtocolError.html -[OverloadedException]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/servererrors/OverloadedException.html -[QueryValidationException]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/servererrors/QueryValidationException.html -[ReadFailureException]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/servererrors/ReadFailureException.html -[ReadTimeoutException]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/servererrors/ReadTimeoutException.html -[RetryDecision]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/retry/RetryDecision.html -[RetryPolicy]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/retry/RetryPolicy.html -[ServerError]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/servererrors/ServerError.html -[TruncateException]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/servererrors/TruncateException.html -[UnavailableException]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/servererrors/UnavailableException.html -[WriteFailureException]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/servererrors/WriteFailureException.html -[WriteTimeoutException]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/servererrors/WriteTimeoutException.html +[AllNodesFailedException]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/AllNodesFailedException.html +[ClosedConnectionException]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/connection/ClosedConnectionException.html +[DriverTimeoutException]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/DriverTimeoutException.html +[FunctionFailureException]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/servererrors/FunctionFailureException.html +[HeartbeatException]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/connection/HeartbeatException.html +[ProtocolError]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/servererrors/ProtocolError.html +[OverloadedException]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/servererrors/OverloadedException.html +[QueryValidationException]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/servererrors/QueryValidationException.html +[ReadFailureException]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/servererrors/ReadFailureException.html +[ReadTimeoutException]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/servererrors/ReadTimeoutException.html +[RetryDecision]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/retry/RetryDecision.html +[RetryPolicy]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/retry/RetryPolicy.html +[ServerError]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/servererrors/ServerError.html +[TruncateException]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/servererrors/TruncateException.html +[UnavailableException]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/servererrors/UnavailableException.html +[WriteFailureException]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/servererrors/WriteFailureException.html +[WriteTimeoutException]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/servererrors/WriteTimeoutException.html diff --git a/manual/core/speculative_execution/README.md b/manual/core/speculative_execution/README.md index dcc02fbdc1e..9d8af0890df 100644 --- a/manual/core/speculative_execution/README.md +++ b/manual/core/speculative_execution/README.md @@ -250,4 +250,4 @@ profiles have the same configuration). Each request uses its declared profile's policy. If it doesn't declare any profile, or if the profile doesn't have a dedicated policy, then the default profile's policy is used. -[SpeculativeExecutionPolicy]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/specex/SpeculativeExecutionPolicy.html \ No newline at end of file +[SpeculativeExecutionPolicy]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/specex/SpeculativeExecutionPolicy.html \ No newline at end of file diff --git a/manual/core/ssl/README.md b/manual/core/ssl/README.md index 106aeb047f6..7f79c43b233 100644 --- a/manual/core/ssl/README.md +++ b/manual/core/ssl/README.md @@ -188,5 +188,5 @@ the box, but with a bit of custom development it is fairly easy to add. See [dsClientToNode]: https://docs.datastax.com/en/cassandra/3.0/cassandra/configuration/secureSSLClientToNode.html [pickle]: http://thelastpickle.com/blog/2015/09/30/hardening-cassandra-step-by-step-part-1-server-to-server.html [JSSE system properties]: http://docs.oracle.com/javase/6/docs/technotes/guides/security/jsse/JSSERefGuide.html#Customization -[SessionBuilder.withSslEngineFactory]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSslEngineFactory-com.datastax.oss.driver.api.core.ssl.SslEngineFactory- -[SessionBuilder.withSslContext]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSslContext-javax.net.ssl.SSLContext- +[SessionBuilder.withSslEngineFactory]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSslEngineFactory-com.datastax.oss.driver.api.core.ssl.SslEngineFactory- +[SessionBuilder.withSslContext]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSslContext-javax.net.ssl.SSLContext- diff --git a/manual/core/statements/README.md b/manual/core/statements/README.md index 7805d49406f..22e3ccb3046 100644 --- a/manual/core/statements/README.md +++ b/manual/core/statements/README.md @@ -59,7 +59,7 @@ the [configuration](../configuration/). Namely, these are: idempotent flag, quer consistency levels and page size. We recommended the configuration approach whenever possible (you can create execution profiles to capture common combinations of those options). -[Statement]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/Statement.html -[StatementBuilder]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/StatementBuilder.html -[execute]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/Session.html#execute-com.datastax.oss.driver.api.core.cql.Statement- -[executeAsync]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/Session.html#executeAsync-com.datastax.oss.driver.api.core.cql.Statement- +[Statement]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/Statement.html +[StatementBuilder]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/StatementBuilder.html +[execute]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/Session.html#execute-com.datastax.oss.driver.api.core.cql.Statement- +[executeAsync]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/Session.html#executeAsync-com.datastax.oss.driver.api.core.cql.Statement- diff --git a/manual/core/statements/batch/README.md b/manual/core/statements/batch/README.md index 97e22c29a50..71df2039db9 100644 --- a/manual/core/statements/batch/README.md +++ b/manual/core/statements/batch/README.md @@ -61,8 +61,8 @@ In addition, simple statements with named parameters are currently not supported due to a [protocol limitation][CASSANDRA-10246] that will be fixed in a future version). If you try to execute such a batch, an `IllegalArgumentException` is thrown. -[BatchStatement]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/BatchStatement.html -[BatchStatement.newInstance()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/BatchStatement.html#newInstance-com.datastax.oss.driver.api.core.cql.BatchType- -[BatchStatement.builder()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/BatchStatement.html#builder-com.datastax.oss.driver.api.core.cql.BatchType- +[BatchStatement]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/BatchStatement.html +[BatchStatement.newInstance()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/BatchStatement.html#newInstance-com.datastax.oss.driver.api.core.cql.BatchType- +[BatchStatement.builder()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/BatchStatement.html#builder-com.datastax.oss.driver.api.core.cql.BatchType- [batch_dse]: http://docs.datastax.com/en/dse/6.7/cql/cql/cql_using/useBatch.html [CASSANDRA-10246]: https://issues.apache.org/jira/browse/CASSANDRA-10246 diff --git a/manual/core/statements/per_query_keyspace/README.md b/manual/core/statements/per_query_keyspace/README.md index aed01d27859..e7c7d5db7ba 100644 --- a/manual/core/statements/per_query_keyspace/README.md +++ b/manual/core/statements/per_query_keyspace/README.md @@ -124,6 +124,6 @@ SimpleStatement statement = At some point in the future, when Cassandra 4 becomes prevalent and using a per-query keyspace is the norm, we'll probably deprecate `setRoutingKeyspace()`. -[token-aware routing]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- +[token-aware routing]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- [CASSANDRA-10145]: https://issues.apache.org/jira/browse/CASSANDRA-10145 \ No newline at end of file diff --git a/manual/core/statements/prepared/README.md b/manual/core/statements/prepared/README.md index 285666b5fac..e8a5ac1ed60 100644 --- a/manual/core/statements/prepared/README.md +++ b/manual/core/statements/prepared/README.md @@ -330,10 +330,10 @@ With Cassandra 4 and [native protocol](../../native_protocol/) v5, this issue is new version with the response; the driver updates its local cache transparently, and the client can observe the new columns in the result set. -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[Session.prepare]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/CqlSession.html#prepare-com.datastax.oss.driver.api.core.cql.SimpleStatement- +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[Session.prepare]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/CqlSession.html#prepare-com.datastax.oss.driver.api.core.cql.SimpleStatement- [CASSANDRA-10786]: https://issues.apache.org/jira/browse/CASSANDRA-10786 [CASSANDRA-10813]: https://issues.apache.org/jira/browse/CASSANDRA-10813 [guava eviction]: https://github.com/google/guava/wiki/CachesExplained#reference-based-eviction -[PreparedStatement.bind]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/PreparedStatement.html#bind-java.lang.Object...- -[PreparedStatement.boundStatementBuilder]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/PreparedStatement.html#boundStatementBuilder-java.lang.Object...- +[PreparedStatement.bind]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/PreparedStatement.html#bind-java.lang.Object...- +[PreparedStatement.boundStatementBuilder]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/PreparedStatement.html#boundStatementBuilder-java.lang.Object...- diff --git a/manual/core/statements/simple/README.md b/manual/core/statements/simple/README.md index 1ff760c59a7..e78bd67b345 100644 --- a/manual/core/statements/simple/README.md +++ b/manual/core/statements/simple/README.md @@ -182,6 +182,6 @@ session.execute( Or you could also use [prepared statements](../prepared/), which don't have this limitation since parameter types are known in advance. -[SimpleStatement]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/SimpleStatement.html -[SimpleStatement.newInstance()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/SimpleStatement.html#newInstance-java.lang.String- -[SimpleStatement.builder()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/SimpleStatement.html#builder-java.lang.String- +[SimpleStatement]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/SimpleStatement.html +[SimpleStatement.newInstance()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/SimpleStatement.html#newInstance-java.lang.String- +[SimpleStatement.builder()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/SimpleStatement.html#builder-java.lang.String- diff --git a/manual/core/temporal_types/README.md b/manual/core/temporal_types/README.md index e79256f9653..7492841b296 100644 --- a/manual/core/temporal_types/README.md +++ b/manual/core/temporal_types/README.md @@ -146,7 +146,7 @@ System.out.println(dateTime.minus(CqlDuration.from("1h15s15ns"))); // prints "2018-10-03T22:59:44.999999985-07:00[America/Los_Angeles]" ``` -[CqlDuration]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/data/CqlDuration.html -[TypeCodecs.ZONED_TIMESTAMP_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#ZONED_TIMESTAMP_SYSTEM -[TypeCodecs.ZONED_TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#ZONED_TIMESTAMP_UTC -[TypeCodecs.zonedTimestampAt()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#zonedTimestampAt-java.time.ZoneId- \ No newline at end of file +[CqlDuration]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/data/CqlDuration.html +[TypeCodecs.ZONED_TIMESTAMP_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#ZONED_TIMESTAMP_SYSTEM +[TypeCodecs.ZONED_TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#ZONED_TIMESTAMP_UTC +[TypeCodecs.zonedTimestampAt()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#zonedTimestampAt-java.time.ZoneId- \ No newline at end of file diff --git a/manual/core/throttling/README.md b/manual/core/throttling/README.md index 35877fe2e77..41a0cf5736f 100644 --- a/manual/core/throttling/README.md +++ b/manual/core/throttling/README.md @@ -145,6 +145,6 @@ datastax-java-driver { If you enable `throttling.delay`, make sure to also check the associated extra options to correctly size the underlying histograms (`metrics.session.throttling.delay.*`). -[RequestThrottlingException]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/RequestThrottlingException.html -[AllNodesFailedException]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/AllNodesFailedException.html -[BusyConnectionException]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/connection/BusyConnectionException.html \ No newline at end of file +[RequestThrottlingException]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/RequestThrottlingException.html +[AllNodesFailedException]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/AllNodesFailedException.html +[BusyConnectionException]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/connection/BusyConnectionException.html \ No newline at end of file diff --git a/manual/core/tracing/README.md b/manual/core/tracing/README.md index a327cbcdefa..b3778cf37fb 100644 --- a/manual/core/tracing/README.md +++ b/manual/core/tracing/README.md @@ -113,9 +113,9 @@ for (TraceEvent event : trace.getEvents()) { If you call `getQueryTrace()` for a statement that didn't have tracing enabled, an exception is thrown. -[ExecutionInfo]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html -[QueryTrace]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/QueryTrace.html -[Statement.setTracing()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/Statement.html#setTracing-boolean- -[StatementBuilder.setTracing()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/StatementBuilder.html#setTracing-- -[ExecutionInfo.getTracingId()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getTracingId-- -[ExecutionInfo.getQueryTrace()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getQueryTrace-- +[ExecutionInfo]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html +[QueryTrace]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/QueryTrace.html +[Statement.setTracing()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/Statement.html#setTracing-boolean- +[StatementBuilder.setTracing()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/StatementBuilder.html#setTracing-- +[ExecutionInfo.getTracingId()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getTracingId-- +[ExecutionInfo.getQueryTrace()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getQueryTrace-- diff --git a/manual/core/tuples/README.md b/manual/core/tuples/README.md index 50b9310b6ea..b9cdd71bb5e 100644 --- a/manual/core/tuples/README.md +++ b/manual/core/tuples/README.md @@ -139,5 +139,5 @@ BoundStatement bs = [cql_doc]: https://docs.datastax.com/en/cql/3.3/cql/cql_reference/tupleType.html -[TupleType]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/type/TupleType.html -[TupleValue]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/data/TupleValue.html +[TupleType]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/type/TupleType.html +[TupleValue]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/data/TupleValue.html diff --git a/manual/core/udts/README.md b/manual/core/udts/README.md index d0d431b9af8..6b463054d65 100644 --- a/manual/core/udts/README.md +++ b/manual/core/udts/README.md @@ -135,5 +135,5 @@ session.execute(bs); [cql_doc]: https://docs.datastax.com/en/cql/3.3/cql/cql_reference/cqlRefUDType.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/data/UdtValue.html -[UserDefinedType]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/type/UserDefinedType.html \ No newline at end of file +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/data/UdtValue.html +[UserDefinedType]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/type/UserDefinedType.html \ No newline at end of file diff --git a/manual/mapper/daos/README.md b/manual/mapper/daos/README.md index 83bbd8e5f15..fb3af32d34f 100644 --- a/manual/mapper/daos/README.md +++ b/manual/mapper/daos/README.md @@ -147,8 +147,8 @@ In this case, any annotations declared in `Dao1` would be chosen over `Dao2`. To control how the hierarchy is scanned, annotate interfaces with [@HierarchyScanStrategy]. -[@Dao]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/Dao.html -[@DaoFactory]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.html -[@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html -[@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html +[@Dao]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/Dao.html +[@DaoFactory]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.html +[@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html +[@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html [Entity Inheritance]: ../entities/#inheritance diff --git a/manual/mapper/daos/delete/README.md b/manual/mapper/daos/delete/README.md index 78f6b969c97..67f6883a1af 100644 --- a/manual/mapper/daos/delete/README.md +++ b/manual/mapper/daos/delete/README.md @@ -142,14 +142,14 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the [naming strategy](../../entities/#naming-strategy)). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html -[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html -[@Delete]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/Delete.html -[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/ResultSet.html -[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html +[@Delete]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/Delete.html +[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/ResultSet.html +[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/BoundStatement.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html \ No newline at end of file diff --git a/manual/mapper/daos/getentity/README.md b/manual/mapper/daos/getentity/README.md index 6617ff364f6..5d6a06a5749 100644 --- a/manual/mapper/daos/getentity/README.md +++ b/manual/mapper/daos/getentity/README.md @@ -69,14 +69,14 @@ If the return type doesn't match the parameter type (for example [PagingIterable [AsyncResultSet]), the mapper processor will issue a compile-time error. -[@GetEntity]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html -[GettableByName]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/data/GettableByName.html -[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/PagingIterable.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/ResultSet.html -[Row]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/Row.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/data/UdtValue.html +[@GetEntity]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[GettableByName]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/data/GettableByName.html +[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/PagingIterable.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/ResultSet.html +[Row]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/Row.html +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/data/UdtValue.html diff --git a/manual/mapper/daos/insert/README.md b/manual/mapper/daos/insert/README.md index a5f013c7676..ad26943b704 100644 --- a/manual/mapper/daos/insert/README.md +++ b/manual/mapper/daos/insert/README.md @@ -99,12 +99,12 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the [naming strategy](../../entities/#naming-strategy)). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@Insert]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/Insert.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/ResultSet.html -[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- -[ResultSet#getExecutionInfo()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/ResultSet.html#getExecutionInfo-- -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@Insert]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/Insert.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/ResultSet.html +[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- +[ResultSet#getExecutionInfo()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/ResultSet.html#getExecutionInfo-- +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/BoundStatement.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html diff --git a/manual/mapper/daos/null_saving/README.md b/manual/mapper/daos/null_saving/README.md index e05e6c0b4e7..c009a3dfa97 100644 --- a/manual/mapper/daos/null_saving/README.md +++ b/manual/mapper/daos/null_saving/README.md @@ -93,10 +93,10 @@ public interface UserDao extends InventoryDao { } ``` -[@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[MapperException]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/MapperException.html -[DO_NOT_SET]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#DO_NOT_SET -[SET_TO_NULL]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#SET_TO_NULL +[@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[MapperException]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/MapperException.html +[DO_NOT_SET]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#DO_NOT_SET +[SET_TO_NULL]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#SET_TO_NULL [CASSANDRA-7304]: https://issues.apache.org/jira/browse/CASSANDRA-7304 diff --git a/manual/mapper/daos/query/README.md b/manual/mapper/daos/query/README.md index 1b0c8728a58..b83e36288a2 100644 --- a/manual/mapper/daos/query/README.md +++ b/manual/mapper/daos/query/README.md @@ -106,15 +106,15 @@ Then: query succeeds or not depends on whether the session that the mapper was built with has a [default keyspace]. -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@Query]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/Query.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/ResultSet.html -[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- -[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/PagingIterable.html -[Row]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/Row.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@Query]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/Query.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/ResultSet.html +[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- +[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/PagingIterable.html +[Row]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/Row.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/BoundStatement.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html diff --git a/manual/mapper/daos/queryprovider/README.md b/manual/mapper/daos/queryprovider/README.md index 8e3204dbf2b..92d6484c540 100644 --- a/manual/mapper/daos/queryprovider/README.md +++ b/manual/mapper/daos/queryprovider/README.md @@ -137,11 +137,11 @@ Here is the full implementation: the desired [PagingIterable][PagingIterable]. -[@QueryProvider]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html -[providerClass]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerClass-- -[entityHelpers]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#entityHelpers-- -[providerMethod]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerMethod-- -[MapperContext]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/MapperContext.html -[EntityHelper]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/EntityHelper.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/ResultSet.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/PagingIterable.html +[@QueryProvider]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html +[providerClass]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerClass-- +[entityHelpers]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#entityHelpers-- +[providerMethod]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerMethod-- +[MapperContext]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/MapperContext.html +[EntityHelper]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/EntityHelper.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/ResultSet.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/PagingIterable.html diff --git a/manual/mapper/daos/select/README.md b/manual/mapper/daos/select/README.md index 1ef821e6424..8981ee144dd 100644 --- a/manual/mapper/daos/select/README.md +++ b/manual/mapper/daos/select/README.md @@ -133,18 +133,18 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the [naming strategy](../../entities/#naming-strategy)). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html -[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html -[@Select]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/Select.html -[allowFiltering()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/Select.html#allowFiltering-- -[customWhereClause()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/Select.html#customWhereClause-- -[groupBy()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/Select.html#groupBy-- -[limit()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/Select.html#limit-- -[orderBy()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/Select.html#orderBy-- -[perPartitionLimit()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/Select.html#perPartitionLimit-- -[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/PagingIterable.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html +[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html +[@Select]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/Select.html +[allowFiltering()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/Select.html#allowFiltering-- +[customWhereClause()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/Select.html#customWhereClause-- +[groupBy()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/Select.html#groupBy-- +[limit()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/Select.html#limit-- +[orderBy()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/Select.html#orderBy-- +[perPartitionLimit()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/Select.html#perPartitionLimit-- +[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/PagingIterable.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html diff --git a/manual/mapper/daos/setentity/README.md b/manual/mapper/daos/setentity/README.md index e25eece6d5e..2c0a0791b36 100644 --- a/manual/mapper/daos/setentity/README.md +++ b/manual/mapper/daos/setentity/README.md @@ -63,8 +63,8 @@ BoundStatement bind(Product product, BoundStatement statement); If you use a void method with [BoundStatement], the mapper processor will issue a compile-time warning. -[@SetEntity]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/SetEntity.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[BoundStatementBuilder]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/BoundStatementBuilder.html -[SettableByName]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/data/SettableByName.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/data/UdtValue.html +[@SetEntity]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/SetEntity.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[BoundStatementBuilder]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/BoundStatementBuilder.html +[SettableByName]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/data/SettableByName.html +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/data/UdtValue.html diff --git a/manual/mapper/daos/statement_attributes/README.md b/manual/mapper/daos/statement_attributes/README.md index ff56dd625ce..75b0cf079c5 100644 --- a/manual/mapper/daos/statement_attributes/README.md +++ b/manual/mapper/daos/statement_attributes/README.md @@ -60,4 +60,4 @@ Product product = dao.findById(1, builder -> builder.setConsistencyLevel(DefaultConsistencyLevel.QUORUM)); ``` -[@StatementAttributes]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/StatementAttributes.html \ No newline at end of file +[@StatementAttributes]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/StatementAttributes.html \ No newline at end of file diff --git a/manual/mapper/daos/update/README.md b/manual/mapper/daos/update/README.md index 255e0d6f67a..0132cf97b0e 100644 --- a/manual/mapper/daos/update/README.md +++ b/manual/mapper/daos/update/README.md @@ -134,12 +134,12 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the naming convention). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@Update]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/Update.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@Update]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/Update.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html [Boolean]: https://docs.oracle.com/javase/8/docs/api/index.html?java/lang/Boolean.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/ResultSet.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/ResultSet.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/BoundStatement.html diff --git a/manual/mapper/entities/README.md b/manual/mapper/entities/README.md index ddda5fa2820..ae93b78e171 100644 --- a/manual/mapper/entities/README.md +++ b/manual/mapper/entities/README.md @@ -468,21 +468,21 @@ the same level. To control how the class hierarchy is scanned, annotate classes with [@HierarchyScanStrategy]. -[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html -[@CqlName]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/CqlName.html -[@Dao]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/Dao.html -[@Entity]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/Entity.html -[NameConverter]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/entity/naming/NameConverter.html -[NamingConvention]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/entity/naming/NamingConvention.html -[@NamingStrategy]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/NamingStrategy.html -[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html -[@Computed]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/Computed.html -[@Select]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/Select.html -[@Insert]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/Insert.html -[@Update]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/Update.html -[@GetEntity]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html -[@Query]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/Query.html +[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html +[@CqlName]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/CqlName.html +[@Dao]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/Dao.html +[@Entity]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/Entity.html +[NameConverter]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/entity/naming/NameConverter.html +[NamingConvention]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/entity/naming/NamingConvention.html +[@NamingStrategy]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/NamingStrategy.html +[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html +[@Computed]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/Computed.html +[@Select]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/Select.html +[@Insert]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/Insert.html +[@Update]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/Update.html +[@GetEntity]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html +[@Query]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/Query.html [aliases]: http://cassandra.apache.org/doc/latest/cql/dml.html?#aliases -[@Transient]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/Transient.html -[@TransientProperties]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/TransientProperties.html -[@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html +[@Transient]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/Transient.html +[@TransientProperties]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/TransientProperties.html +[@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html diff --git a/manual/mapper/mapper/README.md b/manual/mapper/mapper/README.md index 005e8472ac3..e5ca0217e51 100644 --- a/manual/mapper/mapper/README.md +++ b/manual/mapper/mapper/README.md @@ -227,8 +227,8 @@ InventoryMapper inventoryMapper = new InventoryMapperBuilder(session) .build(); ``` -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/CqlIdentifier.html -[@DaoFactory]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.html -[@DaoKeyspace]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/DaoKeyspace.html -[@DaoTable]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/DaoTable.html -[@Mapper]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/mapper/annotations/Mapper.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/CqlIdentifier.html +[@DaoFactory]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.html +[@DaoKeyspace]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/DaoKeyspace.html +[@DaoTable]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/DaoTable.html +[@Mapper]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/Mapper.html diff --git a/manual/osgi/README.md b/manual/osgi/README.md index e0c5108bca3..9fd66969ed8 100644 --- a/manual/osgi/README.md +++ b/manual/osgi/README.md @@ -94,5 +94,5 @@ starting the driver: [driver configuration]: ../core/configuration [OSGi]:https://www.osgi.org [JNR]: https://github.com/jnr/jnr-posix -[withClassLoader()]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withClassLoader-java.lang.ClassLoader- +[withClassLoader()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withClassLoader-java.lang.ClassLoader- [JAVA-1127]:https://datastax-oss.atlassian.net/browse/JAVA-1127 diff --git a/manual/query_builder/README.md b/manual/query_builder/README.md index 196d1b168d7..db91bea1437 100644 --- a/manual/query_builder/README.md +++ b/manual/query_builder/README.md @@ -212,8 +212,8 @@ For a complete tour of the API, browse the child pages in this manual: * [Terms](term/) * [Idempotence](idempotence/) -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/CqlIdentifier.html -[DseQueryBuilder]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/dse/driver/api/querybuilder/DseQueryBuilder.html -[DseSchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/dse/driver/api/querybuilder/DseSchemaBuilder.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/CqlIdentifier.html +[DseQueryBuilder]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/querybuilder/DseQueryBuilder.html +[DseSchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/querybuilder/DseSchemaBuilder.html diff --git a/manual/query_builder/condition/README.md b/manual/query_builder/condition/README.md index 764de6ad967..da3b1b72c3b 100644 --- a/manual/query_builder/condition/README.md +++ b/manual/query_builder/condition/README.md @@ -132,4 +132,4 @@ It is mutually exclusive with column conditions: if you previously specified col the statement, they will be ignored; conversely, adding a column condition cancels a previous IF EXISTS clause. -[Condition]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/querybuilder/condition/Condition.html +[Condition]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/querybuilder/condition/Condition.html diff --git a/manual/query_builder/delete/README.md b/manual/query_builder/delete/README.md index 0d12d260070..d17ac6cd877 100644 --- a/manual/query_builder/delete/README.md +++ b/manual/query_builder/delete/README.md @@ -141,5 +141,5 @@ deleteFrom("user") Conditions are a common feature used by UPDATE and DELETE, so they have a [dedicated page](../condition) in this manual. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[Selector]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/querybuilder/select/Selector.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[Selector]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/querybuilder/select/Selector.html diff --git a/manual/query_builder/insert/README.md b/manual/query_builder/insert/README.md index 6f217480b8f..c3362645c86 100644 --- a/manual/query_builder/insert/README.md +++ b/manual/query_builder/insert/README.md @@ -114,4 +114,4 @@ is executed. This is distinctly different than setting the value to null. Passin this method will only remove the USING TTL clause from the query, which will not alter the TTL (if one is set) in Cassandra. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html \ No newline at end of file +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html \ No newline at end of file diff --git a/manual/query_builder/relation/README.md b/manual/query_builder/relation/README.md index 23a9b3acca1..f428596d7a5 100644 --- a/manual/query_builder/relation/README.md +++ b/manual/query_builder/relation/README.md @@ -201,5 +201,5 @@ This should be used with caution, as it's possible to generate invalid CQL that execution time; on the other hand, it can be used as a workaround to handle new CQL features that are not yet covered by the query builder. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[Relation]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/querybuilder/relation/Relation.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[Relation]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/querybuilder/relation/Relation.html diff --git a/manual/query_builder/schema/README.md b/manual/query_builder/schema/README.md index c604e8298b7..8aae54d708f 100644 --- a/manual/query_builder/schema/README.md +++ b/manual/query_builder/schema/README.md @@ -44,4 +44,4 @@ element type: * [function](function/) * [aggregate](aggregate/) -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/aggregate/README.md b/manual/query_builder/schema/aggregate/README.md index aced0084389..bbf31f826b9 100644 --- a/manual/query_builder/schema/aggregate/README.md +++ b/manual/query_builder/schema/aggregate/README.md @@ -76,4 +76,4 @@ dropAggregate("average").ifExists(); // DROP AGGREGATE IF EXISTS average ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/function/README.md b/manual/query_builder/schema/function/README.md index 1d0bea64564..a4f4550515d 100644 --- a/manual/query_builder/schema/function/README.md +++ b/manual/query_builder/schema/function/README.md @@ -92,4 +92,4 @@ dropFunction("log").ifExists(); // DROP FUNCTION IF EXISTS log ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/index/README.md b/manual/query_builder/schema/index/README.md index 758b0fd6998..d2b770a9b96 100644 --- a/manual/query_builder/schema/index/README.md +++ b/manual/query_builder/schema/index/README.md @@ -99,4 +99,4 @@ dropIndex("my_idx").ifExists(); // DROP INDEX IF EXISTS my_idx ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/keyspace/README.md b/manual/query_builder/schema/keyspace/README.md index 8430868f332..70b6063af9f 100644 --- a/manual/query_builder/schema/keyspace/README.md +++ b/manual/query_builder/schema/keyspace/README.md @@ -83,6 +83,6 @@ dropKeyspace("cycling").ifExists(); // DROP KEYSPACE IF EXISTS cycling ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/materialized_view/README.md b/manual/query_builder/schema/materialized_view/README.md index 5963f33f5b3..db83986d75d 100644 --- a/manual/query_builder/schema/materialized_view/README.md +++ b/manual/query_builder/schema/materialized_view/README.md @@ -85,5 +85,5 @@ dropTable("cyclist_by_age").ifExists(); // DROP MATERIALIZED VIEW IF EXISTS cyclist_by_age ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html -[RelationStructure]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/querybuilder/schema/RelationStructure.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[RelationStructure]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/querybuilder/schema/RelationStructure.html diff --git a/manual/query_builder/schema/table/README.md b/manual/query_builder/schema/table/README.md index 46bde5dc902..4f01daaa0ab 100644 --- a/manual/query_builder/schema/table/README.md +++ b/manual/query_builder/schema/table/README.md @@ -107,6 +107,6 @@ dropTable("cyclist_name").ifExists(); // DROP TABLE IF EXISTS cyclist_name ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html -[CreateTableWithOptions]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/querybuilder/schema/CreateTableWithOptions.html -[AlterTableWithOptions]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/querybuilder/schema/AlterTableWithOptions.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[CreateTableWithOptions]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/querybuilder/schema/CreateTableWithOptions.html +[AlterTableWithOptions]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/querybuilder/schema/AlterTableWithOptions.html diff --git a/manual/query_builder/schema/type/README.md b/manual/query_builder/schema/type/README.md index cbf27542b08..baca52e0148 100644 --- a/manual/query_builder/schema/type/README.md +++ b/manual/query_builder/schema/type/README.md @@ -88,4 +88,4 @@ dropTable("address").ifExists(); // DROP TYPE IF EXISTS address ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/select/README.md b/manual/query_builder/select/README.md index 377cdd22021..f8a9500ffb7 100644 --- a/manual/query_builder/select/README.md +++ b/manual/query_builder/select/README.md @@ -391,5 +391,5 @@ selectFrom("user").all().allowFiltering(); // SELECT * FROM user ALLOW FILTERING ``` -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[Selector]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/querybuilder/select/Selector.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[Selector]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/querybuilder/select/Selector.html diff --git a/manual/query_builder/term/README.md b/manual/query_builder/term/README.md index 265bd37452f..05db4b04eeb 100644 --- a/manual/query_builder/term/README.md +++ b/manual/query_builder/term/README.md @@ -105,5 +105,5 @@ This should be used with caution, as it's possible to generate invalid CQL that execution time; on the other hand, it can be used as a workaround to handle new CQL features that are not yet covered by the query builder. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html \ No newline at end of file +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html \ No newline at end of file diff --git a/manual/query_builder/truncate/README.md b/manual/query_builder/truncate/README.md index 3fa0cefead9..3babd52439e 100644 --- a/manual/query_builder/truncate/README.md +++ b/manual/query_builder/truncate/README.md @@ -17,4 +17,4 @@ Truncate truncate2 = truncate(CqlIdentifier.fromCql("mytable")); Note that, at this stage, the query is ready to build. After creating a TRUNCATE query it does not take any values. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html diff --git a/manual/query_builder/update/README.md b/manual/query_builder/update/README.md index 7bbdf6ac8b4..435b766c55b 100644 --- a/manual/query_builder/update/README.md +++ b/manual/query_builder/update/README.md @@ -251,5 +251,5 @@ update("foo") Conditions are a common feature used by UPDATE and DELETE, so they have a [dedicated page](../condition) in this manual. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[Assignment]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/querybuilder/update/Assignment.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[Assignment]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/querybuilder/update/Assignment.html From 5e543f9650a563f957c9e462adac44636caf1a0e Mon Sep 17 00:00:00 2001 From: olim7t Date: Tue, 28 Apr 2020 08:09:45 -0700 Subject: [PATCH 457/979] [maven-release-plugin] prepare release 4.6.0 --- bom/pom.xml | 14 +++++++------- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 11 files changed, 18 insertions(+), 18 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index 574ea05db11..90c2d820976 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.6.0-SNAPSHOT + 4.6.0 java-driver-bom pom @@ -31,32 +31,32 @@ com.datastax.oss java-driver-core - 4.6.0-SNAPSHOT + 4.6.0 com.datastax.oss java-driver-core-shaded - 4.6.0-SNAPSHOT + 4.6.0 com.datastax.oss java-driver-mapper-processor - 4.6.0-SNAPSHOT + 4.6.0 com.datastax.oss java-driver-mapper-runtime - 4.6.0-SNAPSHOT + 4.6.0 com.datastax.oss java-driver-query-builder - 4.6.0-SNAPSHOT + 4.6.0 com.datastax.oss java-driver-test-infra - 4.6.0-SNAPSHOT + 4.6.0 com.datastax.oss diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 824e4410e5f..74112951e2b 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.6.0-SNAPSHOT + 4.6.0 java-driver-core-shaded DataStax Java driver for Apache Cassandra(R) - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index 7a5c21cafd4..c953e9920fe 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.6.0-SNAPSHOT + 4.6.0 java-driver-core bundle diff --git a/distribution/pom.xml b/distribution/pom.xml index 7859fb8e436..e25acb2e893 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.6.0-SNAPSHOT + 4.6.0 java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index 692247098c1..4ca2ba560d6 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.6.0-SNAPSHOT + 4.6.0 java-driver-examples DataStax Java driver for Apache Cassandra(R) - examples. diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index cc43c56dcd4..6a8761767d0 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.6.0-SNAPSHOT + 4.6.0 java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 412659fd749..4cf2aee2fc0 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.6.0-SNAPSHOT + 4.6.0 java-driver-mapper-processor DataStax Java driver for Apache Cassandra(R) - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index 75f284cb6da..1d9070f397f 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.6.0-SNAPSHOT + 4.6.0 java-driver-mapper-runtime bundle diff --git a/pom.xml b/pom.xml index c4b9d1f2d0d..0753516f471 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ 4.0.0 com.datastax.oss java-driver-parent - 4.6.0-SNAPSHOT + 4.6.0 pom DataStax Java driver for Apache Cassandra(R) A driver for Apache Cassandra(R) 2.1+ that works exclusively with the Cassandra Query Language version 3 (CQL3) and Cassandra's native protocol versions 3 and above. @@ -872,7 +872,7 @@ height="0" width="0" style="display:none;visibility:hidden"> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - HEAD + 4.6.0 diff --git a/query-builder/pom.xml b/query-builder/pom.xml index d3f7d3ff252..cee048daf61 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.6.0-SNAPSHOT + 4.6.0 java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index ecfbe5cc1d3..bddca2c1b52 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.6.0-SNAPSHOT + 4.6.0 java-driver-test-infra bundle From 64a3d93fad9789e6f73cf2e277c81a8bd88ec44b Mon Sep 17 00:00:00 2001 From: olim7t Date: Tue, 28 Apr 2020 08:09:53 -0700 Subject: [PATCH 458/979] [maven-release-plugin] prepare for next development iteration --- bom/pom.xml | 14 +++++++------- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 11 files changed, 18 insertions(+), 18 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index 90c2d820976..592197b1948 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.6.0 + 4.7.0-SNAPSHOT java-driver-bom pom @@ -31,32 +31,32 @@ com.datastax.oss java-driver-core - 4.6.0 + 4.7.0-SNAPSHOT com.datastax.oss java-driver-core-shaded - 4.6.0 + 4.7.0-SNAPSHOT com.datastax.oss java-driver-mapper-processor - 4.6.0 + 4.7.0-SNAPSHOT com.datastax.oss java-driver-mapper-runtime - 4.6.0 + 4.7.0-SNAPSHOT com.datastax.oss java-driver-query-builder - 4.6.0 + 4.7.0-SNAPSHOT com.datastax.oss java-driver-test-infra - 4.6.0 + 4.7.0-SNAPSHOT com.datastax.oss diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 74112951e2b..1bf114a59a2 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.6.0 + 4.7.0-SNAPSHOT java-driver-core-shaded DataStax Java driver for Apache Cassandra(R) - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index c953e9920fe..2be5cb3b2a1 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.6.0 + 4.7.0-SNAPSHOT java-driver-core bundle diff --git a/distribution/pom.xml b/distribution/pom.xml index e25acb2e893..3bbfcd220d2 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.6.0 + 4.7.0-SNAPSHOT java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index 4ca2ba560d6..151d76cf509 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.6.0 + 4.7.0-SNAPSHOT java-driver-examples DataStax Java driver for Apache Cassandra(R) - examples. diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 6a8761767d0..e7a130640aa 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.6.0 + 4.7.0-SNAPSHOT java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 4cf2aee2fc0..f9dc9fb3fe1 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.6.0 + 4.7.0-SNAPSHOT java-driver-mapper-processor DataStax Java driver for Apache Cassandra(R) - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index 1d9070f397f..761b22dc7a4 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.6.0 + 4.7.0-SNAPSHOT java-driver-mapper-runtime bundle diff --git a/pom.xml b/pom.xml index 0753516f471..dd41cdcc531 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ 4.0.0 com.datastax.oss java-driver-parent - 4.6.0 + 4.7.0-SNAPSHOT pom DataStax Java driver for Apache Cassandra(R) A driver for Apache Cassandra(R) 2.1+ that works exclusively with the Cassandra Query Language version 3 (CQL3) and Cassandra's native protocol versions 3 and above. @@ -872,7 +872,7 @@ height="0" width="0" style="display:none;visibility:hidden"> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - 4.6.0 + HEAD diff --git a/query-builder/pom.xml b/query-builder/pom.xml index cee048daf61..0336eb4cc97 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.6.0 + 4.7.0-SNAPSHOT java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index bddca2c1b52..6ec58abcf25 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.6.0 + 4.7.0-SNAPSHOT java-driver-test-infra bundle From 671bc5ebb7d702fc96a19bf19d62dc3b323ed6a0 Mon Sep 17 00:00:00 2001 From: olim7t Date: Tue, 28 Apr 2020 08:48:53 -0700 Subject: [PATCH 459/979] Prepare changelog for next iteration --- changelog/README.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/changelog/README.md b/changelog/README.md index bc397412a7c..70a190dfffe 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -2,6 +2,9 @@ +### 4.7.0 (in progress) + + ### 4.6.0 - [improvement] JAVA-2741: Make keyspace/table metadata impls serializable From 5b53b0de17bb7feac2a321090a3f96ba151da879 Mon Sep 17 00:00:00 2001 From: olim7t Date: Wed, 6 May 2020 16:49:09 -0700 Subject: [PATCH 460/979] Prepare for next patch version --- bom/pom.xml | 14 +++++++------- changelog/README.md | 3 +++ core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- pom.xml | 2 +- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 12 files changed, 20 insertions(+), 17 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index 90c2d820976..f682b9bb6cb 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.6.0 + 4.6.1-SNAPSHOT java-driver-bom pom @@ -31,32 +31,32 @@ com.datastax.oss java-driver-core - 4.6.0 + 4.6.1-SNAPSHOT com.datastax.oss java-driver-core-shaded - 4.6.0 + 4.6.1-SNAPSHOT com.datastax.oss java-driver-mapper-processor - 4.6.0 + 4.6.1-SNAPSHOT com.datastax.oss java-driver-mapper-runtime - 4.6.0 + 4.6.1-SNAPSHOT com.datastax.oss java-driver-query-builder - 4.6.0 + 4.6.1-SNAPSHOT com.datastax.oss java-driver-test-infra - 4.6.0 + 4.6.1-SNAPSHOT com.datastax.oss diff --git a/changelog/README.md b/changelog/README.md index bc397412a7c..dec64bac9c8 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -2,6 +2,9 @@ +### 4.6.1 (in progress) + + ### 4.6.0 - [improvement] JAVA-2741: Make keyspace/table metadata impls serializable diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 74112951e2b..12bd45b9c2c 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.6.0 + 4.6.1-SNAPSHOT java-driver-core-shaded DataStax Java driver for Apache Cassandra(R) - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index c953e9920fe..294ac64dbd5 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.6.0 + 4.6.1-SNAPSHOT java-driver-core bundle diff --git a/distribution/pom.xml b/distribution/pom.xml index e25acb2e893..32b8756b1a7 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.6.0 + 4.6.1-SNAPSHOT java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index 4ca2ba560d6..cc5e563929d 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.6.0 + 4.6.1-SNAPSHOT java-driver-examples DataStax Java driver for Apache Cassandra(R) - examples. diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 6a8761767d0..cf35e3a0332 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.6.0 + 4.6.1-SNAPSHOT java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 4cf2aee2fc0..24ad788f844 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.6.0 + 4.6.1-SNAPSHOT java-driver-mapper-processor DataStax Java driver for Apache Cassandra(R) - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index 1d9070f397f..f383477c58e 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.6.0 + 4.6.1-SNAPSHOT java-driver-mapper-runtime bundle diff --git a/pom.xml b/pom.xml index 0753516f471..fcca4e9824d 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ 4.0.0 com.datastax.oss java-driver-parent - 4.6.0 + 4.6.1-SNAPSHOT pom DataStax Java driver for Apache Cassandra(R) A driver for Apache Cassandra(R) 2.1+ that works exclusively with the Cassandra Query Language version 3 (CQL3) and Cassandra's native protocol versions 3 and above. diff --git a/query-builder/pom.xml b/query-builder/pom.xml index cee048daf61..bfc00d9b7b8 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.6.0 + 4.6.1-SNAPSHOT java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index bddca2c1b52..6e8abfc74d1 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.6.0 + 4.6.1-SNAPSHOT java-driver-test-infra bundle From bc95909ce0f51f0c0b1cb04de18e29059193f4a3 Mon Sep 17 00:00:00 2001 From: olim7t Date: Wed, 6 May 2020 16:54:09 -0700 Subject: [PATCH 461/979] JAVA-2676: Don't reschedule write coalescer after empty runs --- changelog/README.md | 1 + .../api/core/config/DefaultDriverOption.java | 7 +-- .../driver/api/core/config/OptionsMap.java | 1 - .../api/core/config/TypedDriverOption.java | 3 +- .../core/channel/DefaultWriteCoalescer.java | 43 ++++++++----------- core/src/main/resources/reference.conf | 7 --- 6 files changed, 24 insertions(+), 38 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index dec64bac9c8..1c2b4e66df2 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.6.1 (in progress) +- [bug] JAVA-2676: Don't reschedule write coalescer after empty runs ### 4.6.0 diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java index adfc1f658bf..eedf8011c59 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java @@ -734,11 +734,8 @@ public enum DefaultDriverOption implements DriverOption { */ NETTY_ADMIN_SHUTDOWN_UNIT("advanced.netty.admin-group.shutdown.unit"), - /** - * How many times the coalescer is allowed to reschedule itself when it did no work. - * - *

        Value-type: int - */ + /** @deprecated This option was removed in version 4.6.1. */ + @Deprecated COALESCER_MAX_RUNS("advanced.coalescer.max-runs-with-no-work"), /** * The coalescer reschedule interval. diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java index db0725b6e29..c8e8bbf4cb4 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java @@ -353,7 +353,6 @@ protected static void fillWithDriverDefaults(OptionsMap map) { map.put(TypedDriverOption.NETTY_ADMIN_SHUTDOWN_UNIT, "SECONDS"); map.put(TypedDriverOption.NETTY_TIMER_TICK_DURATION, Duration.ofMillis(100)); map.put(TypedDriverOption.NETTY_TIMER_TICKS_PER_WHEEL, 2048); - map.put(TypedDriverOption.COALESCER_MAX_RUNS, 5); map.put(TypedDriverOption.COALESCER_INTERVAL, Duration.of(10, ChronoUnit.MICROS)); } diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java index c4d796e259a..6413a617649 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java @@ -509,7 +509,8 @@ public String toString() { /** Units for admin group quiet period and timeout. */ public static final TypedDriverOption NETTY_ADMIN_SHUTDOWN_UNIT = new TypedDriverOption<>(DefaultDriverOption.NETTY_ADMIN_SHUTDOWN_UNIT, GenericType.STRING); - /** How many times the coalescer is allowed to reschedule itself when it did no work. */ + /** @deprecated This option was removed in version 4.6.1. */ + @Deprecated public static final TypedDriverOption COALESCER_MAX_RUNS = new TypedDriverOption<>(DefaultDriverOption.COALESCER_MAX_RUNS, GenericType.INTEGER); /** The coalescer reschedule interval. */ diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/DefaultWriteCoalescer.java b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/DefaultWriteCoalescer.java index 29bf2822617..204f1d77b3d 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/DefaultWriteCoalescer.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/DefaultWriteCoalescer.java @@ -37,9 +37,7 @@ * *

        It maintains a queue per event loop, with the writes targeting the channels that run on this * loop. As soon as a write gets enqueued, it triggers a task that will flush the queue (other - * writes can get enqueued before the task runs). Once that task is complete, it re-triggers itself - * as long as new writes have been enqueued, or {@code maxRunsWithNoWork} times if there are no more - * tasks. + * writes may get enqueued before or while the task runs). * *

        Note that Netty provides a similar mechanism out of the box ({@link * io.netty.handler.flush.FlushConsolidationHandler}), but in our experience our approach allows @@ -49,13 +47,11 @@ */ @ThreadSafe public class DefaultWriteCoalescer implements WriteCoalescer { - private final int maxRunsWithNoWork; private final long rescheduleIntervalNanos; private final ConcurrentMap flushers = new ConcurrentHashMap<>(); public DefaultWriteCoalescer(DriverContext context) { DriverExecutionProfile config = context.getConfig().getDefaultProfile(); - maxRunsWithNoWork = config.getInt(DefaultDriverOption.COALESCER_MAX_RUNS); rescheduleIntervalNanos = config.getDuration(DefaultDriverOption.COALESCER_INTERVAL).toNanos(); } @@ -79,9 +75,8 @@ private class Flusher { private final Queue writes = new ConcurrentLinkedQueue<>(); private final AtomicBoolean running = new AtomicBoolean(); - // These variables are accessed only from runOnEventLoop, they don't need to be thread-safe + // This variable is accessed only from runOnEventLoop, it doesn't need to be thread-safe private final Set channels = new HashSet<>(); - private int runsWithNoWork = 0; private Flusher(EventLoop eventLoop) { this.eventLoop = eventLoop; @@ -98,13 +93,11 @@ private void enqueue(Write write) { private void runOnEventLoop() { assert eventLoop.inEventLoop(); - boolean didSomeWork = false; Write write; while ((write = writes.poll()) != null) { Channel channel = write.channel; channels.add(channel); channel.write(write.message, write.writePromise); - didSomeWork = true; } for (Channel channel : channels) { @@ -112,22 +105,24 @@ private void runOnEventLoop() { } channels.clear(); - if (didSomeWork) { - runsWithNoWork = 0; - } else if (++runsWithNoWork > maxRunsWithNoWork) { - // Prepare to stop - running.set(false); - // If no new writes have been enqueued since the previous line, we can return safely - if (writes.isEmpty()) { - return; - } - // Otherwise check if those writes have triggered a new run. If not, we need to do that - // ourselves (i.e. not return yet) - if (!running.compareAndSet(false, true)) { - return; - } + // Prepare to stop + running.set(false); + + // enqueue() can be called concurrently with this method. There is a race condition if it: + // - added an element in the queue after we were done draining it + // - but observed running==true before we flipped it, and therefore didn't schedule another + // run + + // If nothing was added in the queue, there were no concurrent calls, we can stop safely now + if (writes.isEmpty()) { + return; } - if (!eventLoop.isShuttingDown()) { + + // Otherwise, check if one of those calls scheduled a run. If so, they flipped the bit back + // on. If not, we need to do it ourselves. + boolean shouldRestartMyself = running.compareAndSet(false, true); + + if (shouldRestartMyself && !eventLoop.isShuttingDown()) { eventLoop.schedule(this::runOnEventLoop, rescheduleIntervalNanos, TimeUnit.NANOSECONDS); } } diff --git a/core/src/main/resources/reference.conf b/core/src/main/resources/reference.conf index c66885f63b1..34514dd1fbf 100644 --- a/core/src/main/resources/reference.conf +++ b/core/src/main/resources/reference.conf @@ -2020,13 +2020,6 @@ datastax-java-driver { # This is exposed mainly to facilitate tuning during development. You shouldn't have to adjust # this. advanced.coalescer { - # How many times the coalescer is allowed to reschedule itself when it did no work. - # - # Required: yes - # Modifiable at runtime: no - # Overridable in a profile: no - max-runs-with-no-work = 5 - # The reschedule interval. # # Required: yes From 4e6a5cf0f074e4437f8eaedd473e467e5d3c6e9e Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 11 May 2020 10:18:27 -0700 Subject: [PATCH 462/979] Update version in docs --- README.md | 2 +- changelog/README.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 3286b040d0f..0fb47bb6848 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@ *If you're reading this on github.com, please note that this is the readme for the development version and that some features described here might not yet have been released. You can find the documentation for latest version through [DataStax Docs] or via the release tags, e.g. -[4.6.0](https://github.com/datastax/java-driver/tree/4.6.0).* +[4.6.1](https://github.com/datastax/java-driver/tree/4.6.1).* A modern, feature-rich and highly tunable Java client library for [Apache Cassandra®] \(2.1+) and [DataStax Enterprise] \(4.7+), and [DataStax Apollo], using exclusively Cassandra's binary protocol diff --git a/changelog/README.md b/changelog/README.md index 1c2b4e66df2..1efdbb2e591 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -2,7 +2,7 @@ -### 4.6.1 (in progress) +### 4.6.1 - [bug] JAVA-2676: Don't reschedule write coalescer after empty runs From 4c9a03853c9260f93887d6e4c9b5e1fe0f14b76c Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 11 May 2020 10:27:27 -0700 Subject: [PATCH 463/979] [maven-release-plugin] prepare release 4.6.1 --- bom/pom.xml | 14 +++++++------- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 11 files changed, 18 insertions(+), 18 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index f682b9bb6cb..96e7e50d4c4 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.6.1-SNAPSHOT + 4.6.1 java-driver-bom pom @@ -31,32 +31,32 @@ com.datastax.oss java-driver-core - 4.6.1-SNAPSHOT + 4.6.1 com.datastax.oss java-driver-core-shaded - 4.6.1-SNAPSHOT + 4.6.1 com.datastax.oss java-driver-mapper-processor - 4.6.1-SNAPSHOT + 4.6.1 com.datastax.oss java-driver-mapper-runtime - 4.6.1-SNAPSHOT + 4.6.1 com.datastax.oss java-driver-query-builder - 4.6.1-SNAPSHOT + 4.6.1 com.datastax.oss java-driver-test-infra - 4.6.1-SNAPSHOT + 4.6.1 com.datastax.oss diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 12bd45b9c2c..dcf39f7c6bb 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.6.1-SNAPSHOT + 4.6.1 java-driver-core-shaded DataStax Java driver for Apache Cassandra(R) - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index 294ac64dbd5..649dfa9f603 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.6.1-SNAPSHOT + 4.6.1 java-driver-core bundle diff --git a/distribution/pom.xml b/distribution/pom.xml index 32b8756b1a7..9fb7556ce54 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.6.1-SNAPSHOT + 4.6.1 java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index cc5e563929d..a758d76490f 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.6.1-SNAPSHOT + 4.6.1 java-driver-examples DataStax Java driver for Apache Cassandra(R) - examples. diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index cf35e3a0332..4bac898120a 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.6.1-SNAPSHOT + 4.6.1 java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 24ad788f844..4306e0847e0 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.6.1-SNAPSHOT + 4.6.1 java-driver-mapper-processor DataStax Java driver for Apache Cassandra(R) - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index f383477c58e..b7488e65629 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.6.1-SNAPSHOT + 4.6.1 java-driver-mapper-runtime bundle diff --git a/pom.xml b/pom.xml index fcca4e9824d..b1d5b0f76db 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ 4.0.0 com.datastax.oss java-driver-parent - 4.6.1-SNAPSHOT + 4.6.1 pom DataStax Java driver for Apache Cassandra(R) A driver for Apache Cassandra(R) 2.1+ that works exclusively with the Cassandra Query Language version 3 (CQL3) and Cassandra's native protocol versions 3 and above. @@ -872,7 +872,7 @@ height="0" width="0" style="display:none;visibility:hidden"> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - 4.6.0 + 4.6.1 diff --git a/query-builder/pom.xml b/query-builder/pom.xml index bfc00d9b7b8..71fb80dce3a 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.6.1-SNAPSHOT + 4.6.1 java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index 6e8abfc74d1..704bbd03b2d 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.6.1-SNAPSHOT + 4.6.1 java-driver-test-infra bundle From 603eb2d00ea0a8b6cf4e8a4b13cdcc042d9b350c Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 11 May 2020 10:30:38 -0700 Subject: [PATCH 464/979] [maven-release-plugin] prepare for next development iteration --- bom/pom.xml | 14 +++++++------- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 11 files changed, 18 insertions(+), 18 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index 96e7e50d4c4..8211b71dda1 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.6.1 + 4.6.2-SNAPSHOT java-driver-bom pom @@ -31,32 +31,32 @@ com.datastax.oss java-driver-core - 4.6.1 + 4.6.2-SNAPSHOT com.datastax.oss java-driver-core-shaded - 4.6.1 + 4.6.2-SNAPSHOT com.datastax.oss java-driver-mapper-processor - 4.6.1 + 4.6.2-SNAPSHOT com.datastax.oss java-driver-mapper-runtime - 4.6.1 + 4.6.2-SNAPSHOT com.datastax.oss java-driver-query-builder - 4.6.1 + 4.6.2-SNAPSHOT com.datastax.oss java-driver-test-infra - 4.6.1 + 4.6.2-SNAPSHOT com.datastax.oss diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index dcf39f7c6bb..1a1ea0684ba 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.6.1 + 4.6.2-SNAPSHOT java-driver-core-shaded DataStax Java driver for Apache Cassandra(R) - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index 649dfa9f603..783497afa4e 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.6.1 + 4.6.2-SNAPSHOT java-driver-core bundle diff --git a/distribution/pom.xml b/distribution/pom.xml index 9fb7556ce54..90141b1daf5 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.6.1 + 4.6.2-SNAPSHOT java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index a758d76490f..445ae4174c7 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.6.1 + 4.6.2-SNAPSHOT java-driver-examples DataStax Java driver for Apache Cassandra(R) - examples. diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 4bac898120a..3f3762c851f 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.6.1 + 4.6.2-SNAPSHOT java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 4306e0847e0..e4a16a5b43d 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.6.1 + 4.6.2-SNAPSHOT java-driver-mapper-processor DataStax Java driver for Apache Cassandra(R) - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index b7488e65629..c286528b58b 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.6.1 + 4.6.2-SNAPSHOT java-driver-mapper-runtime bundle diff --git a/pom.xml b/pom.xml index b1d5b0f76db..8a5e4f68410 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ 4.0.0 com.datastax.oss java-driver-parent - 4.6.1 + 4.6.2-SNAPSHOT pom DataStax Java driver for Apache Cassandra(R) A driver for Apache Cassandra(R) 2.1+ that works exclusively with the Cassandra Query Language version 3 (CQL3) and Cassandra's native protocol versions 3 and above. @@ -872,7 +872,7 @@ height="0" width="0" style="display:none;visibility:hidden"> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - 4.6.1 + 4.6.0 diff --git a/query-builder/pom.xml b/query-builder/pom.xml index 71fb80dce3a..79a2f66abcf 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.6.1 + 4.6.2-SNAPSHOT java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index 704bbd03b2d..d5d00cddf68 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.6.1 + 4.6.2-SNAPSHOT java-driver-test-infra bundle From e852545bbeb51f8fee0df4c38e37fff039224382 Mon Sep 17 00:00:00 2001 From: olim7t Date: Tue, 28 Apr 2020 16:17:04 -0700 Subject: [PATCH 465/979] JAVA-2747: Revisit semantics of Statement.setExecutionProfile/Name --- changelog/README.md | 1 + .../oss/driver/api/core/cql/Statement.java | 7 +- .../core/cql/DefaultBatchStatement.java | 4 +- .../core/cql/DefaultBoundStatement.java | 4 +- .../core/cql/DefaultSimpleStatement.java | 4 +- .../api/core/cql/StatementProfileTest.java | 194 ++++++++++++++++++ 6 files changed, 206 insertions(+), 8 deletions(-) create mode 100644 core/src/test/java/com/datastax/oss/driver/api/core/cql/StatementProfileTest.java diff --git a/changelog/README.md b/changelog/README.md index ef236f4103c..aef8511caf4 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.7.0 (in progress) +- [improvement] JAVA-2747: Revisit semantics of Statement.setExecutionProfile/Name ### 4.6.1 diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/cql/Statement.java b/core/src/main/java/com/datastax/oss/driver/api/core/cql/Statement.java index 2925cdfd11f..f890870327c 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/cql/Statement.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/cql/Statement.java @@ -85,8 +85,8 @@ public interface Statement> extends Request { /** * Sets the name of the execution profile that will be used for this statement. * - *

        For all the driver's built-in implementations, this method has no effect if {@link - * #setExecutionProfile(DriverExecutionProfile)} has been called with a non-null argument. + *

        For all the driver's built-in implementations, calling this method with a non-null argument + * automatically resets {@link #getExecutionProfile()} to null. * *

        All the driver's built-in implementations are immutable, and return a new instance from this * method. However custom implementations may choose to be mutable and return the same instance. @@ -98,6 +98,9 @@ public interface Statement> extends Request { /** * Sets the execution profile to use for this statement. * + *

        For all the driver's built-in implementations, calling this method with a non-null argument + * automatically resets {@link #getExecutionProfileName()} to null. + * *

        All the driver's built-in implementations are immutable, and return a new instance from this * method. However custom implementations may choose to be mutable and return the same instance. */ diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultBatchStatement.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultBatchStatement.java index 4d6ad45b5b7..94d704c51ad 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultBatchStatement.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultBatchStatement.java @@ -389,7 +389,7 @@ public BatchStatement setExecutionProfileName(@Nullable String newConfigProfileN batchType, statements, newConfigProfileName, - executionProfile, + (newConfigProfileName == null) ? executionProfile : null, keyspace, routingKeyspace, routingKey, @@ -418,7 +418,7 @@ public DefaultBatchStatement setExecutionProfile(@Nullable DriverExecutionProfil return new DefaultBatchStatement( batchType, statements, - executionProfileName, + (newProfile == null) ? executionProfileName : null, newProfile, keyspace, routingKeyspace, diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultBoundStatement.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultBoundStatement.java index 35c4aa12a8b..c1b2119cd1c 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultBoundStatement.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultBoundStatement.java @@ -206,7 +206,7 @@ public BoundStatement setExecutionProfileName(@Nullable String newConfigProfileN variableDefinitions, values, newConfigProfileName, - executionProfile, + (newConfigProfileName == null) ? executionProfile : null, routingKeyspace, routingKey, routingToken, @@ -237,7 +237,7 @@ public BoundStatement setExecutionProfile(@Nullable DriverExecutionProfile newPr preparedStatement, variableDefinitions, values, - executionProfileName, + (newProfile == null) ? executionProfileName : null, newProfile, routingKeyspace, routingKey, diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultSimpleStatement.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultSimpleStatement.java index 604fa5c0051..4efc80a7dcc 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultSimpleStatement.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultSimpleStatement.java @@ -214,7 +214,7 @@ public SimpleStatement setExecutionProfileName(@Nullable String newConfigProfile positionalValues, namedValues, newConfigProfileName, - executionProfile, + (newConfigProfileName == null) ? executionProfile : null, keyspace, routingKeyspace, routingKey, @@ -245,7 +245,7 @@ public SimpleStatement setExecutionProfile(@Nullable DriverExecutionProfile newP query, positionalValues, namedValues, - null, + (newProfile == null) ? executionProfileName : null, newProfile, keyspace, routingKeyspace, diff --git a/core/src/test/java/com/datastax/oss/driver/api/core/cql/StatementProfileTest.java b/core/src/test/java/com/datastax/oss/driver/api/core/cql/StatementProfileTest.java new file mode 100644 index 00000000000..7606979f3a7 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/api/core/cql/StatementProfileTest.java @@ -0,0 +1,194 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.core.cql; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import com.datastax.oss.driver.TestDataProviders; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.internal.core.cql.DefaultBoundStatement; +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import java.nio.ByteBuffer; +import java.time.Duration; +import java.util.Collections; +import org.junit.Test; +import org.junit.runner.RunWith; + +@RunWith(DataProviderRunner.class) +public class StatementProfileTest { + + private static final DriverExecutionProfile PROFILE = mock(DriverExecutionProfile.class); + private static final String NAME = "mockProfileName"; + + @Test + @UseDataProvider("statements") + public void should_set_profile_and_name_on_statement( + Statement statement, + Operation operation1, + Operation operation2, + String expectedName, + DriverExecutionProfile expectedProfile) { + + statement = operation1.applyTo(statement); + statement = operation2.applyTo(statement); + + assertThat(statement.getExecutionProfileName()).isEqualTo(expectedName); + assertThat(statement.getExecutionProfile()).isEqualTo(expectedProfile); + } + + @Test + @UseDataProvider("builders") + public void should_set_profile_and_name_on_builder( + StatementBuilder builder, + Operation operation1, + Operation operation2, + String expectedName, + DriverExecutionProfile expectedProfile) { + + builder = operation1.applyTo(builder); + builder = operation2.applyTo(builder); + + Statement statement = builder.build(); + + assertThat(statement.getExecutionProfileName()).isEqualTo(expectedName); + assertThat(statement.getExecutionProfile()).isEqualTo(expectedProfile); + } + + private static Object[][] scenarios() { + return new Object[][] { + // operation1, operation2, expectedName, expectedProfile + + // only one set: + new Object[] {setProfile(PROFILE), noop(), null, PROFILE}, + new Object[] {setName(NAME), noop(), NAME, null}, + + // last one wins: + new Object[] {setProfile(PROFILE), setName(NAME), NAME, null}, + new Object[] {setName(NAME), setProfile(PROFILE), null, PROFILE}, + + // null does not unset other: + new Object[] {setProfile(PROFILE), setName(null), null, PROFILE}, + new Object[] {setName(NAME), setProfile(null), NAME, null}, + }; + } + + @DataProvider + public static Object[][] statements() { + SimpleStatement simpleStatement = SimpleStatement.newInstance("mock query"); + Object[][] statements = + TestDataProviders.fromList( + simpleStatement, + newBoundStatement(), + BatchStatement.newInstance(BatchType.LOGGED, simpleStatement)); + + return TestDataProviders.combine(statements, scenarios()); + } + + @DataProvider + public static Object[][] builders() { + SimpleStatement simpleStatement = SimpleStatement.newInstance("mock query"); + Object[][] builders = + TestDataProviders.fromList( + SimpleStatement.builder(simpleStatement), + new BoundStatementBuilder(newBoundStatement()), + BatchStatement.builder(BatchType.LOGGED).addStatement(simpleStatement)); + + return TestDataProviders.combine(builders, scenarios()); + } + + private interface Operation { + + Statement applyTo(Statement statement); + + StatementBuilder applyTo(StatementBuilder builder); + } + + private static Operation setProfile(DriverExecutionProfile profile) { + return new Operation() { + @Override + public Statement applyTo(Statement statement) { + return statement.setExecutionProfile(profile); + } + + @Override + public StatementBuilder applyTo(StatementBuilder builder) { + return builder.setExecutionProfile(profile); + } + }; + } + + private static Operation setName(String name) { + return new Operation() { + @Override + public Statement applyTo(Statement statement) { + return statement.setExecutionProfileName(name); + } + + @Override + public StatementBuilder applyTo(StatementBuilder builder) { + return builder.setExecutionProfileName(name); + } + }; + } + + private static Operation noop() { + return new Operation() { + @Override + public Statement applyTo(Statement statement) { + return statement; + } + + @Override + public StatementBuilder applyTo(StatementBuilder builder) { + return builder; + } + }; + } + + private static BoundStatement newBoundStatement() { + // Mock the minimum state needed to create a DefaultBoundStatement that can also be used to + // initialize a builder + PreparedStatement preparedStatement = mock(PreparedStatement.class); + ColumnDefinitions variableDefinitions = mock(ColumnDefinitions.class); + when(preparedStatement.getVariableDefinitions()).thenReturn(variableDefinitions); + return new DefaultBoundStatement( + preparedStatement, + variableDefinitions, + new ByteBuffer[0], + null, + null, + null, + null, + null, + Collections.emptyMap(), + null, + false, + Statement.NO_DEFAULT_TIMESTAMP, + null, + 5000, + null, + null, + Duration.ZERO, + null, + null, + null, + Statement.NO_NOW_IN_SECONDS); + } +} From f4ea838b7650ab5bb10d089e1ea8613ba2592b05 Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Wed, 13 May 2020 11:33:37 -0500 Subject: [PATCH 466/979] JAVA-2663: Pure Graal support for native functions (#1439) --- core/pom.xml | 5 + .../oss/driver/api/core/uuid/Uuids.java | 32 ++++++- .../driver/internal/core/os/GraalGetpid.java | 37 ++++++++ .../driver/internal/core/os/GraalLibc.java | 94 +++++++++++++++++++ .../oss/driver/internal/core/os/Native.java | 19 ++++ pom.xml | 5 + 6 files changed, 190 insertions(+), 2 deletions(-) create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/os/GraalGetpid.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/os/GraalLibc.java diff --git a/core/pom.xml b/core/pom.xml index 2be5cb3b2a1..47858cd2427 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -127,6 +127,11 @@ com.github.spotbugs spotbugs-annotations + + org.graalvm.sdk + graal-sdk + provided + ch.qos.logback logback-classic diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/uuid/Uuids.java b/core/src/main/java/com/datastax/oss/driver/api/core/uuid/Uuids.java index 3cd122d7f97..35cc37e8e7a 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/uuid/Uuids.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/uuid/Uuids.java @@ -113,7 +113,35 @@ public final class Uuids { private Uuids() {} private static final long START_EPOCH = makeEpoch(); - private static final long CLOCK_SEQ_AND_NODE = makeClockSeqAndNode(); + + // Lazily initialize clock seq + node value at time of first access. Quarkus will attempt to + // initialize this class at deployment time which prevents us from just setting this value + // directly. The "node" part of the clock seq + node includes the current PID which (for + // GraalVM users) we obtain via the LLVM interop. That infrastructure isn't setup at Quarkus + // deployment time, however, thus we can't just call makeClockSeqAndNode() in an initializer. + // See JAVA-2663 for more detail on this point. + // + // Container impl adapted from Guava's memoized Supplier impl. + private static class ClockSeqAndNodeContainer { + + private volatile boolean initialized = false; + private long val; + + private long get() { + if (!initialized) { + synchronized (ClockSeqAndNodeContainer.class) { + if (!initialized) { + + initialized = true; + val = makeClockSeqAndNode(); + } + } + } + return val; + } + } + + private static final ClockSeqAndNodeContainer CLOCK_SEQ_AND_NODE = new ClockSeqAndNodeContainer(); // The min and max possible lsb for a UUID. // @@ -437,7 +465,7 @@ private static byte[] toBytes(UUID uuid) { */ @NonNull public static UUID timeBased() { - return new UUID(makeMsb(getCurrentTimestamp()), CLOCK_SEQ_AND_NODE); + return new UUID(makeMsb(getCurrentTimestamp()), CLOCK_SEQ_AND_NODE.get()); } /** diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/os/GraalGetpid.java b/core/src/main/java/com/datastax/oss/driver/internal/core/os/GraalGetpid.java new file mode 100644 index 00000000000..e910f3ce80a --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/os/GraalGetpid.java @@ -0,0 +1,37 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.os; + +import java.util.Collections; +import java.util.List; +import org.graalvm.nativeimage.c.CContext; +import org.graalvm.nativeimage.c.function.CFunction; + +@CContext(GraalGetpid.Directives.class) +public class GraalGetpid { + + static class Directives implements CContext.Directives { + + @Override + public List getHeaderFiles() { + + return Collections.unmodifiableList(Collections.singletonList("")); + } + } + + @CFunction + public static native int getpid(); +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/os/GraalLibc.java b/core/src/main/java/com/datastax/oss/driver/internal/core/os/GraalLibc.java new file mode 100644 index 00000000000..e6dfadb6b7e --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/os/GraalLibc.java @@ -0,0 +1,94 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.os; + +import java.util.Locale; +import java.util.Optional; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class GraalLibc implements Libc { + + private static final Logger LOG = LoggerFactory.getLogger(GraalLibc.class); + + private static final Locale LOCALE = Locale.ENGLISH; + + private static final String MAC_PLATFORM_STR = "mac".toLowerCase(LOCALE); + private static final String DARWIN_PLATFORM_STR = "darwin".toLowerCase(LOCALE); + private static final String LINUX_PLATFORM_STR = "linux".toLowerCase(LOCALE); + + private final boolean available = checkAvailability(); + + /* This method is adapted from of jnr.ffi.Platform.determineOS() in jnr-ffi version 2.1.10. **/ + private boolean checkPlatform() { + + String osName = System.getProperty("os.name").split(" ", -1)[0]; + String compareStr = osName.toLowerCase(Locale.ENGLISH); + return compareStr.startsWith(MAC_PLATFORM_STR) + || compareStr.startsWith(DARWIN_PLATFORM_STR) + || compareStr.startsWith(LINUX_PLATFORM_STR); + } + + private boolean checkAvailability() { + + if (!checkPlatform()) { + return false; + } + + try { + getpidRaw(); + } catch (Throwable t) { + + LOG.debug("Error calling getpid()", t); + return false; + } + + try { + gettimeofdayRaw(); + } catch (Throwable t) { + + LOG.debug("Error calling gettimeofday()", t); + return false; + } + + return true; + } + + @Override + public boolean available() { + return this.available; + } + + /* Substrate includes a substitution for Linux + Darwin which redefines System.nanoTime() to use + * gettimeofday() (unless platform-specific higher-res clocks are available, which is even better). */ + @Override + public Optional gettimeofday() { + return this.available ? Optional.of(gettimeofdayRaw()) : Optional.empty(); + } + + private long gettimeofdayRaw() { + return Math.round(System.nanoTime() / 1_000d); + } + + @Override + public Optional getpid() { + return this.available ? Optional.of(getpidRaw()) : Optional.empty(); + } + + private int getpidRaw() { + return GraalGetpid.getpid(); + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/os/Native.java b/core/src/main/java/com/datastax/oss/driver/internal/core/os/Native.java index 8fcc5ce6260..c2f8b03c740 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/os/Native.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/os/Native.java @@ -25,8 +25,20 @@ public class Native { private static class LibcLoader { + /* These values come from Graal's imageinfo API which aims to offer the ability to detect + * when we're in the Graal build/run time via system props. The maintainers of Graal have + * agreed that this API will not change over time. We reference these props as literals + * to avoid introducing a dependency on Graal code for non-Graal users here. */ + private static final String GRAAL_STATUS_PROP = "org.graalvm.nativeimage.imagecode"; + private static final String GRAAL_BUILDTIME_STATUS = "buildtime"; + private static final String GRAAL_RUNTIME_STATUS = "runtime"; + public Libc load() { try { + if (isGraal()) { + LOG.info("Using Graal-specific native functions"); + return new GraalLibc(); + } return new JnrLibc(); } catch (Throwable t) { LOG.info( @@ -35,6 +47,13 @@ public Libc load() { return new EmptyLibc(); } } + + private boolean isGraal() { + + String val = System.getProperty(GRAAL_STATUS_PROP); + return val != null + && (val.equals(GRAAL_RUNTIME_STATUS) || val.equalsIgnoreCase(GRAAL_BUILDTIME_STATUS)); + } } private static final Libc LIBC = new LibcLoader().load(); diff --git a/pom.xml b/pom.xml index dd41cdcc531..ffbaa5e93d8 100644 --- a/pom.xml +++ b/pom.xml @@ -377,6 +377,11 @@ wiremock 2.25.0 + + org.graalvm.sdk + graal-sdk + 20.0.0 + From 634658c85f3d2748d3034ce93e36392c8d1d734b Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 11 May 2020 11:54:00 +0200 Subject: [PATCH 467/979] Replace DseSession by CqlSession in manual page on Reactive --- manual/core/reactive/README.md | 22 +++++++++++++--------- 1 file changed, 13 insertions(+), 9 deletions(-) diff --git a/manual/core/reactive/README.md b/manual/core/reactive/README.md index 47dae02b1ec..d7bcf18cf8d 100644 --- a/manual/core/reactive/README.md +++ b/manual/core/reactive/README.md @@ -9,7 +9,7 @@ Notes: * reactive capabilities require the [Reactive Streams API] to be present on the classpath. The driver has a dependency on that library, but if your application does not use reactive queries at all, it is possible to exclude it to minimize the number of runtime dependencies. If the library - cannot be found at runtime, reactive queries won't be available and a warning will be logged, but + cannot be found at runtime, reactive queries won't be available, and a warning will be logged, but the driver will otherwise operate normally (this is also valid for OSGi deployments). * for historical reasons, reactive-related driver types reside in a package prefixed with `dse`; however, reactive queries also work with regular Cassandra. @@ -50,7 +50,7 @@ The following example reads from a table and prints all the returned rows to the error, a `DriverException` is thrown and its stack trace is printed to standard error: ```java -try (DseSession session = ...) { +try (CqlSession session = ...) { Flux.from(session.executeReactive("SELECT ...")) .doOnNext(System.out::println) .blockLast(); @@ -65,7 +65,7 @@ The following example inserts rows into a table after printing the queries to th at the first error, if any. Again, in case of error, a `DriverException` is thrown: ```java -try (DseSession session = ...) { +try (CqlSession session = ...) { Flux.just("INSERT ...", "INSERT ...", "INSERT ...", ...) .doOnNext(System.out::println) .flatMap(session::executeReactive) @@ -120,11 +120,12 @@ Publisher getExecutionInfos(); Publisher wasApplied(); ``` -Refer to the javadocs of [getColumnDefinitions], -[getExecutionInfos] and -[wasApplied] for more information on these methods. +Refer to the javadocs of [getColumnDefinitions], [getExecutionInfos] and [wasApplied] for more +information on these methods. -To inspect the contents of the above publishers, simply subscribe to them. Note that these publishers cannot complete before the query itself completes; if the query fails, then these publishers will fail with the same error. +To inspect the contents of the above publishers, simply subscribe to them. Note that these +publishers cannot complete before the query itself completes; if the query fails, then these +publishers will fail with the same error. The following example executes a query, then prints all the available metadata to the console: @@ -141,9 +142,12 @@ System.out.println("Was applied: "); Mono.from(rs.wasApplied()).doOnNext(System.out::println).block(); ``` -Note that it is also possible to inspect query metadata at row level. Each row returned by a reactive query execution implements [`ReactiveRow`][ReactiveRow], the reactive equivalent of a [`Row`][Row]. +Note that it is also possible to inspect query metadata at row level. Each row returned by a +reactive query execution implements [`ReactiveRow`][ReactiveRow], the reactive equivalent of a +[`Row`][Row]. -`ReactiveRow` exposes the same kind of query metadata and execution info found in `ReactiveResultSet`, but for each individual row: +`ReactiveRow` exposes the same kind of query metadata and execution info found in +`ReactiveResultSet`, but for each individual row: ```java ColumnDefinitions getColumnDefinitions(); From c626997b7afbd1bfe1edbf22b345b34ecb6161fd Mon Sep 17 00:00:00 2001 From: olim7t Date: Wed, 3 Jun 2020 13:26:50 -0700 Subject: [PATCH 468/979] Remove outdated mapper test --- .../java/com/datastax/oss/driver/mapper/ProfileIT.java | 7 ------- 1 file changed, 7 deletions(-) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/ProfileIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/ProfileIT.java index 750f9378349..d03f280704d 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/ProfileIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/ProfileIT.java @@ -188,13 +188,6 @@ public void should_use_default_when_no_profile() { assertClForAllQueries(dao, ConsistencyLevel.LOCAL_ONE); } - @Test(expected = IllegalStateException.class) - public void should_fail_if_mapper_provides_both_profile_and_name() { - mapperBuilder - .withDefaultExecutionProfileName("cl_one") - .withDefaultExecutionProfile(clTwoProfile); - } - private void assertClForAllQueries(SimpleDao dao, ConsistencyLevel expectedLevel) { dao.save(SAMPLE_ENTITY); assertServerSideCl(expectedLevel); From 6cb6c2426efd3e2b86d2485e9e15720cc929d877 Mon Sep 17 00:00:00 2001 From: olim7t Date: Wed, 3 Jun 2020 15:14:07 -0700 Subject: [PATCH 469/979] Cover reactive return types in mapper docs --- manual/mapper/daos/delete/README.md | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/manual/mapper/daos/delete/README.md b/manual/mapper/daos/delete/README.md index 67f6883a1af..ae6fb550ae3 100644 --- a/manual/mapper/daos/delete/README.md +++ b/manual/mapper/daos/delete/README.md @@ -127,6 +127,13 @@ The method can return: @Delete(entityClass = Product.class, customIfClause = "description = :expectedDescription") CompletionStage deleteIfDescriptionMatchesAsync(UUID productId, String expectedDescription); ``` + +* a [ReactiveResultSet]. + + ```java + @Delete + ReactiveResultSet deleteReactive(Product product); + ``` Note that you can also return a boolean or result set for non-conditional queries, but there's no practical purpose for that since those queries always return `wasApplied = true` and an empty result @@ -150,6 +157,8 @@ entity class and the [naming strategy](../../entities/#naming-strategy)). [ResultSet]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/ResultSet.html [ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- [BoundStatement]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html + [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html \ No newline at end of file From f4e6f78c5cf2ff3628d285e1149d37f90050a459 Mon Sep 17 00:00:00 2001 From: olim7t Date: Wed, 3 Jun 2020 15:35:01 -0700 Subject: [PATCH 470/979] Cover reactive return types in mapper docs --- manual/mapper/daos/insert/README.md | 9 ++++++++- manual/mapper/daos/query/README.md | 4 ++++ manual/mapper/daos/select/README.md | 8 ++++++++ manual/mapper/daos/update/README.md | 8 ++++++++ .../oss/driver/api/mapper/annotations/Delete.java | 6 ++++++ .../oss/driver/api/mapper/annotations/Insert.java | 6 ++++++ .../oss/driver/api/mapper/annotations/Query.java | 3 +++ .../oss/driver/api/mapper/annotations/Select.java | 6 ++++++ .../oss/driver/api/mapper/annotations/Update.java | 6 ++++++ 9 files changed, 55 insertions(+), 1 deletion(-) diff --git a/manual/mapper/daos/insert/README.md b/manual/mapper/daos/insert/README.md index ad26943b704..4484d11530f 100644 --- a/manual/mapper/daos/insert/README.md +++ b/manual/mapper/daos/insert/README.md @@ -89,6 +89,13 @@ The method can return: CompletableFuture> insertIfNotExists(Product product); ``` +* a [ReactiveResultSet]. + + ```java + @Insert + ReactiveResultSet insertReactive(Product product); + ``` + ### Target keyspace and table If a keyspace was specified [when creating the DAO](../../mapper/#dao-factory-methods), then the @@ -105,7 +112,7 @@ entity class and the [naming strategy](../../entities/#naming-strategy)). [ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- [ResultSet#getExecutionInfo()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/ResultSet.html#getExecutionInfo-- [BoundStatement]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/BoundStatement.html - +[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html diff --git a/manual/mapper/daos/query/README.md b/manual/mapper/daos/query/README.md index b83e36288a2..d24975e28cc 100644 --- a/manual/mapper/daos/query/README.md +++ b/manual/mapper/daos/query/README.md @@ -62,6 +62,8 @@ The method can return: * a [CompletionStage] or [CompletableFuture] of any of the above. The method will execute the query asynchronously. Note that for result sets and iterables, you need to switch to the asynchronous equivalent [AsyncResultSet] and [MappedAsyncPagingIterable] respectively. + +* a [ReactiveResultSet], or a [MappedReactiveResultSet] of the entity class. ### Target keyspace and table @@ -115,6 +117,8 @@ Then: [PagingIterable]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/PagingIterable.html [Row]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/Row.html [BoundStatement]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html +[MappedReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/mapper/reactive/MappedReactiveResultSet.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html diff --git a/manual/mapper/daos/select/README.md b/manual/mapper/daos/select/README.md index 8981ee144dd..22fc54e9a27 100644 --- a/manual/mapper/daos/select/README.md +++ b/manual/mapper/daos/select/README.md @@ -122,6 +122,13 @@ In all cases, the method can return: @Select(customWhereClause = "description LIKE :searchString") CompletionStage> findByDescriptionAsync(String searchString); ``` + +* a [MappedReactiveResultSet] of the entity class. + + ```java + @Select(customWhereClause = "description LIKE :searchString") + MappedReactiveResultSet findByDescriptionReactive(String searchString); + ``` ### Target keyspace and table @@ -145,6 +152,7 @@ entity class and the [naming strategy](../../entities/#naming-strategy)). [perPartitionLimit()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/Select.html#perPartitionLimit-- [MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html [PagingIterable]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/PagingIterable.html +[MappedReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/mapper/reactive/MappedReactiveResultSet.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html diff --git a/manual/mapper/daos/update/README.md b/manual/mapper/daos/update/README.md index 0132cf97b0e..baea6d506fe 100644 --- a/manual/mapper/daos/update/README.md +++ b/manual/mapper/daos/update/README.md @@ -123,6 +123,13 @@ The method can return: @Update(customIfClause = "description = :expectedDescription") CompletableFuture updateIfDescriptionMatches(Product product, String expectedDescription); ``` + +* a [ReactiveResultSet]. + + ```java + @Update + ReactiveResultSet updateReactive(Product product); + ``` ### Target keyspace and table @@ -143,3 +150,4 @@ entity class and the naming convention). [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html [ResultSet]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/ResultSet.html [BoundStatement]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Delete.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Delete.java index b16cd06b4db..aca16abede2 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Delete.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Delete.java @@ -15,6 +15,7 @@ */ package com.datastax.oss.driver.api.mapper.annotations; +import com.datastax.dse.driver.api.core.cql.reactive.ReactiveResultSet; import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.cql.AsyncResultSet; import com.datastax.oss.driver.api.core.cql.BoundStatement; @@ -111,6 +112,11 @@ * @Delete(entityClass = Product.class, customIfClause = "description = :expectedDescription") * CompletionStage<AsyncResultSet> deleteIfDescriptionMatchesAsync(UUID productId, String expectedDescription); * + *

      • a {@link ReactiveResultSet}. + *
        + * @Delete
        + * ReactiveResultSet deleteReactive(Product product);
        + *       
        * * * Note that you can also return a boolean or result set for non-conditional queries, but there's no diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Insert.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Insert.java index 62b9a46dfcb..edf0880f23d 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Insert.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Insert.java @@ -15,6 +15,7 @@ */ package com.datastax.oss.driver.api.mapper.annotations; +import com.datastax.dse.driver.api.core.cql.reactive.ReactiveResultSet; import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.cql.BoundStatement; import com.datastax.oss.driver.api.core.cql.ResultSet; @@ -110,6 +111,11 @@ * @Insert(ifNotExists = true) * CompletableFuture<Optional<Product>> insertIfNotExists(Product product); * + *
      • a {@link ReactiveResultSet}. + *
        + * @Insert
        + * ReactiveResultSet insertReactive(Product product);
        + *       
        * * *

        Target keyspace and table

        diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Query.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Query.java index 3c89453d4a4..bb37e8f2dec 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Query.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Query.java @@ -15,6 +15,8 @@ */ package com.datastax.oss.driver.api.mapper.annotations; +import com.datastax.dse.driver.api.core.cql.reactive.ReactiveResultSet; +import com.datastax.dse.driver.api.mapper.reactive.MappedReactiveResultSet; import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.MappedAsyncPagingIterable; import com.datastax.oss.driver.api.core.PagingIterable; @@ -91,6 +93,7 @@ * execute the query asynchronously. Note that for result sets and iterables, you need to * switch to the asynchronous equivalent {@link AsyncResultSet} and {@link * MappedAsyncPagingIterable} respectively. + *
      • a {@link ReactiveResultSet}, or a {@link MappedReactiveResultSet} of the entity class. * * *

        Target keyspace and table

        diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Select.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Select.java index ea870d407aa..0a7f6c04864 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Select.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Select.java @@ -15,6 +15,7 @@ */ package com.datastax.oss.driver.api.mapper.annotations; +import com.datastax.dse.driver.api.mapper.reactive.MappedReactiveResultSet; import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.MappedAsyncPagingIterable; import com.datastax.oss.driver.api.core.PagingIterable; @@ -120,6 +121,11 @@ * @Select(customWhereClause = "description LIKE :searchString") * CompletionStage<MappedAsyncPagingIterable<Product>> findByDescriptionAsync(String searchString); * + *
      • a {@link MappedReactiveResultSet} of the entity class. + *
        + * @Select(customWhereClause = "description LIKE :searchString")
        + * MappedReactiveResultSet<Product> findByDescriptionReactive(String searchString);
        + *       
        * * *

        Target keyspace and table

        diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Update.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Update.java index 759723346a8..c242dd1fb07 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Update.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Update.java @@ -15,6 +15,7 @@ */ package com.datastax.oss.driver.api.mapper.annotations; +import com.datastax.dse.driver.api.core.cql.reactive.ReactiveResultSet; import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.cql.AsyncResultSet; import com.datastax.oss.driver.api.core.cql.BoundStatement; @@ -123,6 +124,11 @@ * @Update(customIfClause = "description = :expectedDescription") * CompletableFuture<AsyncResultSet> updateIfDescriptionMatches(Product product, String expectedDescription); * + *
      • a {@link ReactiveResultSet}. + *
        + * @Update
        + * ReactiveResultSet updateReactive(Product product);
        + *       
        * * *

        Target keyspace and table

        From d523e9a95e63a740bc6af4645130e438fba248df Mon Sep 17 00:00:00 2001 From: Olivier Michallat Date: Thu, 4 Jun 2020 09:22:10 -0700 Subject: [PATCH 471/979] JAVA-2792: Allow custom results in the mapper (#1446) --- changelog/README.md | 1 + .../oss/driver/mapper/CustomResultTypeIT.java | 167 ++++++++++ .../mapper/GuavaFutureProducerService.java | 108 +++++++ ....mapper.result.MapperResultProducerService | 1 + manual/mapper/.nav | 3 +- manual/mapper/daos/custom_types/README.md | 244 +++++++++++++++ manual/mapper/daos/delete/README.md | 2 + manual/mapper/daos/insert/README.md | 2 + manual/mapper/daos/query/README.md | 2 + manual/mapper/daos/select/README.md | 2 + manual/mapper/daos/update/README.md | 2 + .../processor/DefaultProcessorContext.java | 10 +- .../mapper/processor/MapperProcessor.java | 35 ++- .../mapper/processor/ProcessorContext.java | 2 + .../dao/DaoDeleteMethodGenerator.java | 27 +- .../dao/DaoInsertMethodGenerator.java | 25 +- .../processor/dao/DaoMethodGenerator.java | 39 ++- .../dao/DaoQueryMethodGenerator.java | 23 +- .../processor/dao/DaoReturnTypeKind.java | 15 +- .../dao/DaoSelectMethodGenerator.java | 30 +- .../dao/DaoUpdateMethodGenerator.java | 29 +- .../dao/DefaultDaoReturnTypeKind.java | 296 +++++++++++++++--- .../dao/DefaultDaoReturnTypeParser.java | 29 ++ .../generation/GeneratedCodePatterns.java | 5 +- .../mapper/processor/MapperProcessorTest.java | 9 +- .../dao/DaoDeleteMethodGeneratorTest.java | 36 +++ mapper-runtime/revapi.json | 5 + .../oss/driver/api/mapper/MapperContext.java | 17 + .../driver/api/mapper/annotations/Delete.java | 2 + .../driver/api/mapper/annotations/Insert.java | 2 + .../driver/api/mapper/annotations/Query.java | 2 + .../driver/api/mapper/annotations/Select.java | 2 + .../driver/api/mapper/annotations/Update.java | 2 + .../mapper/result/MapperResultProducer.java | 107 +++++++ .../result/MapperResultProducerService.java | 36 +++ .../internal/mapper/DefaultMapperContext.java | 37 +++ 36 files changed, 1218 insertions(+), 138 deletions(-) create mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/mapper/CustomResultTypeIT.java create mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/mapper/GuavaFutureProducerService.java create mode 100644 integration-tests/src/test/resources/META-INF/services/com.datastax.oss.driver.api.mapper.result.MapperResultProducerService create mode 100644 manual/mapper/daos/custom_types/README.md create mode 100644 mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/result/MapperResultProducer.java create mode 100644 mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/result/MapperResultProducerService.java diff --git a/changelog/README.md b/changelog/README.md index aef8511caf4..e432b087393 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.7.0 (in progress) +- [new feature] JAVA-2792: Allow custom results in the mapper - [improvement] JAVA-2747: Revisit semantics of Statement.setExecutionProfile/Name ### 4.6.1 diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/CustomResultTypeIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/CustomResultTypeIT.java new file mode 100644 index 00000000000..6698eac341a --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/CustomResultTypeIT.java @@ -0,0 +1,167 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.mapper; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.cql.Row; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.mapper.MapperBuilder; +import com.datastax.oss.driver.api.mapper.annotations.Dao; +import com.datastax.oss.driver.api.mapper.annotations.DaoFactory; +import com.datastax.oss.driver.api.mapper.annotations.DaoKeyspace; +import com.datastax.oss.driver.api.mapper.annotations.Delete; +import com.datastax.oss.driver.api.mapper.annotations.Insert; +import com.datastax.oss.driver.api.mapper.annotations.Mapper; +import com.datastax.oss.driver.api.mapper.annotations.Query; +import com.datastax.oss.driver.api.mapper.annotations.Select; +import com.datastax.oss.driver.api.mapper.annotations.Update; +import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import com.datastax.oss.driver.categories.ParallelizableTests; +import com.datastax.oss.driver.shaded.guava.common.util.concurrent.ListenableFuture; +import java.util.UUID; +import java.util.concurrent.ExecutionException; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +@Category(ParallelizableTests.class) +public class CustomResultTypeIT extends InventoryITBase { + + private static final CcmRule CCM_RULE = CcmRule.getInstance(); + + private static final SessionRule SESSION_RULE = SessionRule.builder(CCM_RULE).build(); + + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); + + private static ProductDao dao; + + @BeforeClass + public static void setup() { + CqlSession session = SESSION_RULE.session(); + + for (String query : createStatements(CCM_RULE)) { + session.execute( + SimpleStatement.builder(query).setExecutionProfile(SESSION_RULE.slowProfile()).build()); + } + + InventoryMapper mapper = InventoryMapper.builder(SESSION_RULE.session()).build(); + dao = mapper.productDao(SESSION_RULE.keyspace()); + } + + @Test + public void should_use_custom_result_for_insert_method() + throws ExecutionException, InterruptedException { + + ListenableFuture insertFuture = dao.insert(FLAMETHROWER); + insertFuture.get(); + + Row row = SESSION_RULE.session().execute("SELECT id FROM product").one(); + UUID insertedId = row.getUuid(0); + assertThat(insertedId).isEqualTo(FLAMETHROWER.getId()); + } + + @Test + public void should_use_custom_result_for_select_method() + throws ExecutionException, InterruptedException { + + dao.insert(FLAMETHROWER).get(); + + ListenableFuture selectFuture = dao.select(FLAMETHROWER.getId()); + Product selectedProduct = selectFuture.get(); + assertThat(selectedProduct).isEqualTo(FLAMETHROWER); + } + + @Test + public void should_use_custom_result_for_update_method() + throws ExecutionException, InterruptedException { + + dao.insert(FLAMETHROWER).get(); + + Product productToUpdate = dao.select(FLAMETHROWER.getId()).get(); + productToUpdate.setDescription("changed description"); + ListenableFuture updateFuture = dao.update(productToUpdate); + updateFuture.get(); + + Product selectedProduct = dao.select(FLAMETHROWER.getId()).get(); + assertThat(selectedProduct.getDescription()).isEqualTo("changed description"); + } + + @Test + public void should_use_custom_result_for_delete_method() + throws ExecutionException, InterruptedException { + dao.insert(FLAMETHROWER).get(); + + ListenableFuture deleteFuture = dao.delete(FLAMETHROWER); + deleteFuture.get(); + + Product selectedProduct = dao.select(FLAMETHROWER.getId()).get(); + assertThat(selectedProduct).isNull(); + } + + @Test + public void should_use_custom_result_for_query_method() + throws ExecutionException, InterruptedException { + dao.insert(FLAMETHROWER).get(); + + ListenableFuture deleteFuture = dao.deleteById(FLAMETHROWER.getId()); + deleteFuture.get(); + + Product selectedProduct = dao.select(FLAMETHROWER.getId()).get(); + assertThat(selectedProduct).isNull(); + } + + public interface ListenableFutureDao { + + @Select + ListenableFuture select(UUID id); + + @Update + ListenableFuture update(EntityT entity); + + @Insert + ListenableFuture insert(EntityT entity); + + @Delete + ListenableFuture delete(EntityT entity); + } + + @Dao + public interface ProductDao extends ListenableFutureDao { + + // We could do this easier with @Delete, but the goal here is to test @Query + @Query("DELETE FROM ${keyspaceId}.product WHERE id = :id") + ListenableFuture deleteById(UUID id); + } + + @Mapper + public interface InventoryMapper { + + @DaoFactory + ProductDao productDao(@DaoKeyspace CqlIdentifier keyspace); + + static MapperBuilder builder(CqlSession session) { + return new CustomResultTypeIT_InventoryMapperBuilder(session); + } + } +} diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/GuavaFutureProducerService.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/GuavaFutureProducerService.java new file mode 100644 index 00000000000..dc8a42214a8 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/GuavaFutureProducerService.java @@ -0,0 +1,108 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.mapper; + +import com.datastax.oss.driver.api.core.cql.AsyncResultSet; +import com.datastax.oss.driver.api.core.cql.Row; +import com.datastax.oss.driver.api.core.cql.Statement; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import com.datastax.oss.driver.api.mapper.MapperContext; +import com.datastax.oss.driver.api.mapper.entity.EntityHelper; +import com.datastax.oss.driver.api.mapper.result.MapperResultProducer; +import com.datastax.oss.driver.api.mapper.result.MapperResultProducerService; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; +import com.datastax.oss.driver.shaded.guava.common.util.concurrent.Futures; +import com.datastax.oss.driver.shaded.guava.common.util.concurrent.ListenableFuture; +import com.datastax.oss.driver.shaded.guava.common.util.concurrent.SettableFuture; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; + +public class GuavaFutureProducerService implements MapperResultProducerService { + + @Override + public Iterable getProducers() { + return ImmutableList.of( + // Note that order matters, both producers operate on ListenableFuture, + // the most specific must come first. + new VoidListenableFutureProducer(), new SingleEntityListenableFutureProducer()); + } + + public abstract static class ListenableFutureProducer implements MapperResultProducer { + + @Nullable + @Override + public Object execute( + @NonNull Statement statement, + @NonNull MapperContext context, + @Nullable EntityHelper entityHelper) { + SettableFuture result = SettableFuture.create(); + context + .getSession() + .executeAsync(statement) + .whenComplete( + (resultSet, error) -> { + if (error != null) { + result.setException(error); + } else { + result.set(convert(resultSet, entityHelper)); + } + }); + return result; + } + + protected abstract Object convert( + AsyncResultSet resultSet, EntityHelper entityHelper); + + @Nullable + @Override + public Object wrapError(@NonNull Throwable error) { + return Futures.immediateFailedFuture(error); + } + } + + public static class VoidListenableFutureProducer extends ListenableFutureProducer { + + private static final GenericType> PRODUCED_TYPE = + new GenericType>() {}; + + @Override + public boolean canProduce(@NonNull GenericType resultType) { + return resultType.equals(PRODUCED_TYPE); + } + + @Override + protected Object convert( + AsyncResultSet resultSet, EntityHelper entityHelper) { + // ignore results + return null; + } + } + + public static class SingleEntityListenableFutureProducer extends ListenableFutureProducer { + + @Override + public boolean canProduce(@NonNull GenericType resultType) { + return resultType.getRawType().equals(ListenableFuture.class); + } + + @Override + protected Object convert( + AsyncResultSet resultSet, EntityHelper entityHelper) { + Row row = resultSet.one(); + return (row == null) ? null : entityHelper.get(row); + } + } +} diff --git a/integration-tests/src/test/resources/META-INF/services/com.datastax.oss.driver.api.mapper.result.MapperResultProducerService b/integration-tests/src/test/resources/META-INF/services/com.datastax.oss.driver.api.mapper.result.MapperResultProducerService new file mode 100644 index 00000000000..8ad40a9d327 --- /dev/null +++ b/integration-tests/src/test/resources/META-INF/services/com.datastax.oss.driver.api.mapper.result.MapperResultProducerService @@ -0,0 +1 @@ +com.datastax.oss.driver.mapper.GuavaFutureProducerService \ No newline at end of file diff --git a/manual/mapper/.nav b/manual/mapper/.nav index 09b843995be..7bfdb6c0c8e 100644 --- a/manual/mapper/.nav +++ b/manual/mapper/.nav @@ -1,4 +1,5 @@ entities daos mapper -config \ No newline at end of file +config +custom_types \ No newline at end of file diff --git a/manual/mapper/daos/custom_types/README.md b/manual/mapper/daos/custom_types/README.md new file mode 100644 index 00000000000..82e98c5e475 --- /dev/null +++ b/manual/mapper/daos/custom_types/README.md @@ -0,0 +1,244 @@ +## Custom result types + +The mapper supports a pre-defined set of built-in types for DAO method results. For example, a +[Select](../select/#return-type) method can return a single entity, an asynchronous +`CompletionStage`, a `ReactiveResultSet`, etc. + +Sometimes it's convenient to use your own types. For example if you use a specific Reactive Streams +implementation (RxJava, Reactor, Mutiny...), you probably want your DAOs to return those types +directly, instead of having to wrap every call manually. + +To achieve this, the mapper allows you to plug custom logic that will get invoked when an unknown +type is encountered. + +In the rest of this page, we'll show a simple example that replaces Java's `CompletableFuture` with +Guava's `ListenableFuture`. Our goal is to have the mapper implement this interface: + +```java +import com.google.common.util.concurrent.ListenableFuture; + +@Dao +public interface ProductDao { + @Select + ListenableFuture select(UUID id); + + @Update + ListenableFuture update(Product entity); + + @Insert + ListenableFuture insert(Product entity); + + @Delete + ListenableFuture delete(Product entity); +} +``` + +### Writing the producers + +The basic component that encapsulates conversion logic is [MapperResultProducer]. Our DAO has two +different return types: `ListenableFuture` and `ListenableFuture`. So we're going to +write two producers: + +#### Future of void + +```java +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import com.google.common.util.concurrent.Futures; +import com.google.common.util.concurrent.ListenableFuture; +import com.google.common.util.concurrent.SettableFuture; + +public class FutureOfVoidProducer implements MapperResultProducer { + private static final GenericType> PRODUCED_TYPE = + new GenericType>() {}; + + @Override + public boolean canProduce(GenericType resultType) { + return resultType.equals(PRODUCED_TYPE); // (1) + } + + @Override + public ListenableFuture execute( + Statement statement, MapperContext context, EntityHelper entityHelper) { + CqlSession session = context.getSession(); // (2) + SettableFuture result = SettableFuture.create(); // (3) + session.executeAsync(statement).whenComplete( + (resultSet, error) -> { + if (error != null) { + result.setException(error); + } else { + result.set(null); + }}); + return result; + } + + @Override + public ListenableFuture wrapError(Throwable error) { + return Futures.immediateFailedFuture(error); // (4) + } +} +``` + +All the producer methods will be invoked at runtime, by the mapper-generated DAO implementation: + +1. `canProduce()` is used to select a producer. All registered producers are tried in the order that + they were added, the first one that returns `true` is used. The [GenericType] argument is a + runtime representation of the static type. Here we know exactly the type we're looking for: + `ListenableFuture`. So we can use simple equality. +2. `execute()` is invoked once the statement is ready to be sent. Note that the producer is not only + responsible for converting the result, but also for invoking the appropriate execution method: to + this effect, it receives the [MapperContext], which provides access to the session. The + `entityHelper` argument is not used in this implementation (and in fact it happens to be `null`); + see the next producer for more explanations. +3. We execute the statement asynchronously to obtain a `CompletionStage`, and then convert it into a + `ListenableFuture`. +4. `wrapError()` handles any error thrown throughout the process (either while building the + statement, or while invoking `execute()` in this class). Clients of asynchronous APIs generally + expect to deal with exceptions in future callbacks rather than having to catch them directly, so + we create a failed future. + +Note that we specialized the return types of `execute()` and `wrapError()`, instead of using +`Object` as declared by the parent interface. This is not strictly necessary (the calling code only +knows the parent interface, so there *will* be an unchecked cast), but it makes the code a bit nicer +to read. + +#### Future of entity + +```java +public static class FutureOfEntityProducer implements MapperResultProducer { + @Override + public boolean canProduce(GenericType resultType) { + return resultType.getRawType().equals(ListenableFuture.class); // (1) + } + + @Override + public ListenableFuture execute( + Statement statement, MapperContext context, EntityHelper entityHelper) { + SettableFuture result = SettableFuture.create(); + CqlSession session = context.getSession(); + session + .executeAsync(statement) + .whenComplete( + (resultSet, error) -> { + if (error != null) { + result.setException(error); + } else { + Row row = resultSet.one(); + result.set((row == null) ? null : entityHelper.get(row)); // (2) + } + }); + return result; + } + + @Override + public ListenableFuture wrapError(Throwable error) { + return Futures.immediateFailedFuture(error); // same as other producer + } +} +``` + +1. We could use an exact match with `ListenableFuture` like the previous example, but + that's not very scalable: in a real application, we'll probably have more than one entity, we + don't want to write a separate producer every time. So instead we match `ListenableFuture`. + Note that this would also match `ListenableFuture`, so we'll have to be careful of the order + of the producers (more on that in the "packaging" section below). +2. Whenever a return type references a mapped entity, the mapper processor will detect it and inject + the corresponding [EntityHelper] in the `execute()` method. This is a general-purpose utility + class used throughout the mapper, in this case the method we're more specifically interested in is + `get()`: it allows us to convert CQL rows into entity instances. + +At most one entity class is allowed in the return type. + +#### Matching more complex types + +The two examples above (exact match and matching the raw type) should cover the vast majority of +needs. Occasionally you may encounter cases with deeper level of parameterization, such as +`ListenableFuture>`. To match this you'll have to call `getType()` and switch to +the `java.lang.reflect` world: + +```java +import java.lang.reflect.ParameterizedType; +import java.lang.reflect.Type; + +// Matches ListenableFuture> +public boolean canProduce(GenericType genericType) { + if (genericType.getRawType().equals(ListenableFuture.class)) { + Type type = genericType.getType(); + if (type instanceof ParameterizedType) { + Type[] arguments = ((ParameterizedType) type).getActualTypeArguments(); + if (arguments.length == 1) { + Type argument = arguments[0]; + return argument instanceof ParameterizedType + && ((ParameterizedType) argument).getRawType().equals(Optional.class); + } + } + } + return false; +} +``` + +As you can see, this is not the most pleasant API to work with. + +### Packaging the producers in a service + +Once all the producers are ready, we package them in a class that implements +[MapperResultProducerService]: + +```java +public class GuavaFutureProducerService implements MapperResultProducerService { + @Override + public Iterable getProducers() { + return Arrays.asList( + // Order matters, the most specific must come first. + new FutureOfVoidProducer(), new FutureOfEntityProducer()); + } +} +``` + +As hinted previously, the order of the producers matter: they will be tried from left to right. +Since our "future of entity" producer would also match `Void`, it must come last. + +The mapper uses the Java Service Provider mechanism to register producers: create a new file +`META-INF/services/com.datastax.oss.driver.api.mapper.result.MapperResultProducerService`, +containing the name of the implementation: + +``` +some.package.name.GuavaFutureProducerService +``` + +You can put the producers, service and service descriptor directly in your application, or +distribute them as a standalone JAR if you intend to reuse them. + +### Disabling custom types + +Custom types are handled at runtime. This goes a bit against the philosophy of the rest of the +object mapper, where most of the work is done at compile time thanks to annotation processing. There +are ways to extend the mapper processor, but we feel that this would be too complicated for this use +case. + +One downside is that validation can now only be done at runtime: if you use a return type that isn't +supported by any producer, you'll only find out when you call the method. + +**If you don't use custom types at all**, you can disable the feature with an annotation processor +flag: + +```xml + + + + maven-compiler-plugin + + -Acom.datastax.oss.driver.mapper.customResults.enabled=false + + + + +``` + +With this configuration, if a DAO method declares a non built-in return type, it will be surfaced as +a compiler error. + +[EntityHelper]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/entity/EntityHelper.html +[GenericType]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/type/reflect/GenericType.html +[MapperContext]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/MapperContext.html +[MapperResultProducer]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/result/MapperResultProducer.html +[MapperResultProducerService]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/result/MapperResultProducerService.html diff --git a/manual/mapper/daos/delete/README.md b/manual/mapper/daos/delete/README.md index ae6fb550ae3..b1be86f3a23 100644 --- a/manual/mapper/daos/delete/README.md +++ b/manual/mapper/daos/delete/README.md @@ -135,6 +135,8 @@ The method can return: ReactiveResultSet deleteReactive(Product product); ``` +* a [custom type](../custom_types). + Note that you can also return a boolean or result set for non-conditional queries, but there's no practical purpose for that since those queries always return `wasApplied = true` and an empty result set. diff --git a/manual/mapper/daos/insert/README.md b/manual/mapper/daos/insert/README.md index 4484d11530f..895952ecf7f 100644 --- a/manual/mapper/daos/insert/README.md +++ b/manual/mapper/daos/insert/README.md @@ -96,6 +96,8 @@ The method can return: ReactiveResultSet insertReactive(Product product); ``` +* a [custom type](../custom_types). + ### Target keyspace and table If a keyspace was specified [when creating the DAO](../../mapper/#dao-factory-methods), then the diff --git a/manual/mapper/daos/query/README.md b/manual/mapper/daos/query/README.md index d24975e28cc..d26ead2eeb3 100644 --- a/manual/mapper/daos/query/README.md +++ b/manual/mapper/daos/query/README.md @@ -65,6 +65,8 @@ The method can return: * a [ReactiveResultSet], or a [MappedReactiveResultSet] of the entity class. +* a [custom type](../custom_types). + ### Target keyspace and table To avoid hard-coding the keyspace and table name, the query string supports 3 additional diff --git a/manual/mapper/daos/select/README.md b/manual/mapper/daos/select/README.md index 22fc54e9a27..25bc12a68ad 100644 --- a/manual/mapper/daos/select/README.md +++ b/manual/mapper/daos/select/README.md @@ -130,6 +130,8 @@ In all cases, the method can return: MappedReactiveResultSet findByDescriptionReactive(String searchString); ``` +* a [custom type](../custom_types). + ### Target keyspace and table If a keyspace was specified [when creating the DAO](../../mapper/#dao-factory-methods), then the diff --git a/manual/mapper/daos/update/README.md b/manual/mapper/daos/update/README.md index baea6d506fe..9bd354eac06 100644 --- a/manual/mapper/daos/update/README.md +++ b/manual/mapper/daos/update/README.md @@ -131,6 +131,8 @@ The method can return: ReactiveResultSet updateReactive(Product product); ``` +* a [custom type](../custom_types). + ### Target keyspace and table If a keyspace was specified [when creating the DAO](../../mapper/#dao-factory-methods), then the diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/DefaultProcessorContext.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/DefaultProcessorContext.java index e6d3062539c..0892892d859 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/DefaultProcessorContext.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/DefaultProcessorContext.java @@ -44,6 +44,7 @@ public class DefaultProcessorContext implements ProcessorContext { private final Classes classUtils; private final JavaPoetFiler filer; private final LoggingGenerator loggingGenerator; + private final boolean customResultsEnabled; public DefaultProcessorContext( DecoratedMessager messager, @@ -51,13 +52,15 @@ public DefaultProcessorContext( Elements elementUtils, Filer filer, String indent, - boolean logsEnabled) { + boolean logsEnabled, + boolean customResultsEnabled) { this.messager = messager; this.typeUtils = typeUtils; this.elementUtils = elementUtils; this.classUtils = new Classes(typeUtils, elementUtils); this.filer = new JavaPoetFiler(filer, indent); this.loggingGenerator = new LoggingGenerator(logsEnabled); + this.customResultsEnabled = customResultsEnabled; } protected CodeGeneratorFactory buildCodeGeneratorFactory() { @@ -107,4 +110,9 @@ public EntityFactory getEntityFactory() { public LoggingGenerator getLoggingGenerator() { return loggingGenerator; } + + @Override + public boolean areCustomResultsEnabled() { + return customResultsEnabled; + } } diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/MapperProcessor.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/MapperProcessor.java index c397d972c7a..ffad7dd7163 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/MapperProcessor.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/MapperProcessor.java @@ -38,12 +38,15 @@ public class MapperProcessor extends AbstractProcessor { private static final boolean DEFAULT_MAPPER_LOGS_ENABLED = true; + private static final boolean DEFAULT_CUSTOM_RESULTS_ENABLED = true; private static final String INDENT_AMOUNT_OPTION = "com.datastax.oss.driver.mapper.indent"; private static final String INDENT_WITH_TABS_OPTION = "com.datastax.oss.driver.mapper.indentWithTabs"; private static final String MAPPER_LOGS_ENABLED_OPTION = "com.datastax.oss.driver.mapper.logs.enabled"; + private static final String CUSTOM_RESULTS_ENABLED_OPTION = + "com.datastax.oss.driver.mapper.customResults.enabled"; private DecoratedMessager messager; private Types typeUtils; @@ -51,6 +54,7 @@ public class MapperProcessor extends AbstractProcessor { private Filer filer; private String indent; private boolean logsEnabled; + private boolean customResultsEnabled; @Override public synchronized void init(ProcessingEnvironment processingEnvironment) { @@ -59,23 +63,26 @@ public synchronized void init(ProcessingEnvironment processingEnvironment) { typeUtils = processingEnvironment.getTypeUtils(); elementUtils = processingEnvironment.getElementUtils(); filer = processingEnvironment.getFiler(); - indent = computeIndent(processingEnvironment.getOptions()); - logsEnabled = isLogsEnabled(processingEnvironment.getOptions()); + Map options = processingEnvironment.getOptions(); + indent = computeIndent(options); + logsEnabled = + getBooleanOption(options, MAPPER_LOGS_ENABLED_OPTION, DEFAULT_MAPPER_LOGS_ENABLED); + customResultsEnabled = + getBooleanOption(options, CUSTOM_RESULTS_ENABLED_OPTION, DEFAULT_CUSTOM_RESULTS_ENABLED); } - private boolean isLogsEnabled(Map options) { - String mapperLogsEnabled = options.get(MAPPER_LOGS_ENABLED_OPTION); - if (mapperLogsEnabled != null) { - return Boolean.parseBoolean(mapperLogsEnabled); - } - return DEFAULT_MAPPER_LOGS_ENABLED; + private boolean getBooleanOption( + Map options, String optionName, boolean defaultValue) { + String value = options.get(optionName); + return (value == null) ? defaultValue : Boolean.parseBoolean(value); } @Override public boolean process( Set annotations, RoundEnvironment roundEnvironment) { ProcessorContext context = - buildContext(messager, typeUtils, elementUtils, filer, indent, logsEnabled); + buildContext( + messager, typeUtils, elementUtils, filer, indent, logsEnabled, customResultsEnabled); CodeGeneratorFactory generatorFactory = context.getCodeGeneratorFactory(); processAnnotatedTypes( @@ -93,9 +100,10 @@ protected ProcessorContext buildContext( Elements elementUtils, Filer filer, String indent, - boolean logsEnabled) { + boolean logsEnabled, + boolean customResultsEnabled) { return new DefaultProcessorContext( - messager, typeUtils, elementUtils, filer, indent, logsEnabled); + messager, typeUtils, elementUtils, filer, indent, logsEnabled, customResultsEnabled); } protected void processAnnotatedTypes( @@ -133,7 +141,10 @@ public Set getSupportedAnnotationTypes() { @Override public Set getSupportedOptions() { return ImmutableSet.of( - INDENT_AMOUNT_OPTION, INDENT_WITH_TABS_OPTION, MAPPER_LOGS_ENABLED_OPTION); + INDENT_AMOUNT_OPTION, + INDENT_WITH_TABS_OPTION, + MAPPER_LOGS_ENABLED_OPTION, + CUSTOM_RESULTS_ENABLED_OPTION); } @Override diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/ProcessorContext.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/ProcessorContext.java index fc6fdd46724..78e2b1765c5 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/ProcessorContext.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/ProcessorContext.java @@ -39,4 +39,6 @@ public interface ProcessorContext { EntityFactory getEntityFactory(); LoggingGenerator getLoggingGenerator(); + + boolean areCustomResultsEnabled(); } diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGenerator.java index a445066106c..f1117fad762 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGenerator.java @@ -17,6 +17,7 @@ import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.BOOLEAN; import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.BOUND_STATEMENT; +import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.CUSTOM; import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.FUTURE_OF_ASYNC_RESULT_SET; import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.FUTURE_OF_BOOLEAN; import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.FUTURE_OF_VOID; @@ -71,7 +72,8 @@ protected Set getSupportedReturnTypes() { RESULT_SET, BOUND_STATEMENT, FUTURE_OF_ASYNC_RESULT_SET, - REACTIVE_RESULT_SET); + REACTIVE_RESULT_SET, + CUSTOM); } @Override @@ -215,14 +217,14 @@ public Optional generate() { generatePrepareRequest( methodBuilder, requestName, helperFieldName, primaryKeyParameterCount)); - CodeBlock.Builder methodBodyBuilder = CodeBlock.builder(); + CodeBlock.Builder createStatementBlock = CodeBlock.builder(); - methodBodyBuilder.addStatement( + createStatementBlock.addStatement( "$T boundStatementBuilder = $L.boundStatementBuilder()", BoundStatementBuilder.class, statementName); - populateBuilderWithStatementAttributes(methodBodyBuilder, methodElement); - populateBuilderWithFunction(methodBodyBuilder, boundStatementFunction); + populateBuilderWithStatementAttributes(createStatementBlock, methodElement); + populateBuilderWithFunction(createStatementBlock, boundStatementFunction); int nextParameterIndex = 0; if (hasEntityParameter) { @@ -234,7 +236,7 @@ public Optional generate() { property.getType(), CodeBlock.of("$L.$L()", firstParameter.getSimpleName(), property.getGetterName()), "boundStatementBuilder", - methodBodyBuilder, + createStatementBlock, enclosingClass); } nextParameterIndex = 1; @@ -249,7 +251,7 @@ public Optional generate() { List bindMarkers = parameters.subList(0, primaryKeyParameterCount); warnIfCqlNamePresent(bindMarkers); GeneratedCodePatterns.bindParameters( - bindMarkers, primaryKeyNames, methodBodyBuilder, enclosingClass, context, false); + bindMarkers, primaryKeyNames, createStatementBlock, enclosingClass, context, false); nextParameterIndex = primaryKeyNames.size(); } @@ -269,22 +271,17 @@ public Optional generate() { parameters.subList(nextParameterIndex, parameters.size()); if (validateCqlNamesPresent(bindMarkers)) { GeneratedCodePatterns.bindParameters( - bindMarkers, methodBodyBuilder, enclosingClass, context, false); + bindMarkers, createStatementBlock, enclosingClass, context, false); } else { return Optional.empty(); } } - methodBodyBuilder + createStatementBlock .add("\n") .addStatement("$T boundStatement = boundStatementBuilder.build()", BoundStatement.class); - returnType.getKind().addExecuteStatement(methodBodyBuilder, helperFieldName); - - CodeBlock methodBody = returnType.getKind().wrapWithErrorHandling(methodBodyBuilder.build()); - - return Optional.of( - GeneratedCodePatterns.override(methodElement, typeParameters).addCode(methodBody).build()); + return crudMethod(createStatementBlock, returnType, helperFieldName); } private TypeElement getEntityFromAnnotation() { diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoInsertMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoInsertMethodGenerator.java index 69e2400b7e2..bc7e84d7d5e 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoInsertMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoInsertMethodGenerator.java @@ -17,6 +17,7 @@ import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.BOOLEAN; import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.BOUND_STATEMENT; +import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.CUSTOM; import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.ENTITY; import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.FUTURE_OF_ASYNC_RESULT_SET; import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.FUTURE_OF_BOOLEAN; @@ -75,7 +76,8 @@ protected Set getSupportedReturnTypes() { RESULT_SET, BOUND_STATEMENT, FUTURE_OF_ASYNC_RESULT_SET, - REACTIVE_RESULT_SET); + REACTIVE_RESULT_SET, + CUSTOM); } @Override @@ -131,15 +133,15 @@ public Optional generate() { (methodBuilder, requestName) -> generatePrepareRequest(methodBuilder, requestName, helperFieldName)); - CodeBlock.Builder methodBodyBuilder = CodeBlock.builder(); + CodeBlock.Builder createStatementBlock = CodeBlock.builder(); - methodBodyBuilder.addStatement( + createStatementBlock.addStatement( "$T boundStatementBuilder = $L.boundStatementBuilder()", BoundStatementBuilder.class, statementName); - populateBuilderWithStatementAttributes(methodBodyBuilder, methodElement); - populateBuilderWithFunction(methodBodyBuilder, boundStatementFunction); + populateBuilderWithStatementAttributes(createStatementBlock, methodElement); + populateBuilderWithFunction(createStatementBlock, boundStatementFunction); warnIfCqlNamePresent(parameters.subList(0, 1)); String entityParameterName = parameters.get(0).getSimpleName().toString(); @@ -148,7 +150,7 @@ public Optional generate() { nullSavingStrategyValidation.getNullSavingStrategy( Insert.class, Insert::nullSavingStrategy, methodElement, enclosingClass); - methodBodyBuilder.addStatement( + createStatementBlock.addStatement( "$1L.set($2L, boundStatementBuilder, $3T.$4L)", helperFieldName, entityParameterName, @@ -160,22 +162,17 @@ public Optional generate() { List bindMarkers = parameters.subList(1, parameters.size()); if (validateCqlNamesPresent(bindMarkers)) { GeneratedCodePatterns.bindParameters( - bindMarkers, methodBodyBuilder, enclosingClass, context, false); + bindMarkers, createStatementBlock, enclosingClass, context, false); } else { return Optional.empty(); } } - methodBodyBuilder + createStatementBlock .add("\n") .addStatement("$T boundStatement = boundStatementBuilder.build()", BoundStatement.class); - returnType.getKind().addExecuteStatement(methodBodyBuilder, helperFieldName); - - CodeBlock methodBody = returnType.getKind().wrapWithErrorHandling(methodBodyBuilder.build()); - - return Optional.of( - GeneratedCodePatterns.override(methodElement, typeParameters).addCode(methodBody).build()); + return crudMethod(createStatementBlock, returnType, helperFieldName); } private void generatePrepareRequest( diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoMethodGenerator.java index 6608c4702b4..8e8a8509dba 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoMethodGenerator.java @@ -15,23 +15,29 @@ */ package com.datastax.oss.driver.internal.mapper.processor.dao; +import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.CUSTOM; + import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.cql.BoundStatementBuilder; import com.datastax.oss.driver.api.mapper.annotations.CqlName; import com.datastax.oss.driver.api.mapper.annotations.StatementAttributes; +import com.datastax.oss.driver.api.mapper.result.MapperResultProducer; import com.datastax.oss.driver.api.querybuilder.QueryBuilder; import com.datastax.oss.driver.internal.core.util.Reflection; import com.datastax.oss.driver.internal.mapper.processor.MethodGenerator; import com.datastax.oss.driver.internal.mapper.processor.ProcessorContext; +import com.datastax.oss.driver.internal.mapper.processor.util.generation.GeneratedCodePatterns; import com.squareup.javapoet.CodeBlock; import com.squareup.javapoet.MethodSpec; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.Set; import java.util.function.Function; import java.util.function.UnaryOperator; +import java.util.stream.Collectors; import javax.lang.model.element.ExecutableElement; import javax.lang.model.element.Name; import javax.lang.model.element.TypeElement; @@ -77,7 +83,10 @@ protected DaoReturnType parseAndValidateReturnType( processedType, "Invalid return type: %s methods must return one of %s", annotationName, - validKinds); + validKinds.stream() + .filter(k -> k != CUSTOM) + .map(Object::toString) + .collect(Collectors.joining(", ", "[", "]"))); return null; } return returnType; @@ -234,4 +243,32 @@ protected boolean isFromClassFile() { TypeElement enclosingElement = (TypeElement) methodElement.getEnclosingElement(); return Reflection.loadClass(null, enclosingElement.getQualifiedName().toString()) != null; } + + /** + * Common pattern for CRUD methods that build a bound statement, execute it and convert the result + * into a target type. + * + * @param createStatementBlock the code that creates the statement. It must store it into a + * variable named "boundStatement". + */ + protected Optional crudMethod( + CodeBlock.Builder createStatementBlock, DaoReturnType returnType, String helperFieldName) { + + MethodSpec.Builder method = GeneratedCodePatterns.override(methodElement, typeParameters); + if (returnType.getKind() == CUSTOM) { + method.addStatement( + "$T producer = context.getResultProducer($L)", + MapperResultProducer.class, + enclosingClass.addGenericTypeConstant( + GeneratedCodePatterns.getTypeName(methodElement.getReturnType(), typeParameters))); + } + returnType + .getKind() + .addExecuteStatement(createStatementBlock, helperFieldName, methodElement, typeParameters); + method.addCode( + returnType + .getKind() + .wrapWithErrorHandling(createStatementBlock.build(), methodElement, typeParameters)); + return Optional.of(method.build()); + } } diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoQueryMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoQueryMethodGenerator.java index 0d3250c7282..ff4718ab8dd 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoQueryMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoQueryMethodGenerator.java @@ -86,7 +86,7 @@ public Optional generate() { (methodBuilder, requestName) -> generatePrepareRequest(methodBuilder, requestName, helperFieldName)); - CodeBlock.Builder methodBodyBuilder = CodeBlock.builder(); + CodeBlock.Builder createStatementBlock = CodeBlock.builder(); List parameters = methodElement.getParameters(); @@ -95,7 +95,7 @@ public Optional generate() { parameters = parameters.subList(0, methodElement.getParameters().size() - 1); } - methodBodyBuilder.addStatement( + createStatementBlock.addStatement( "$T boundStatementBuilder = $L.boundStatementBuilder()", BoundStatementBuilder.class, statementName); @@ -104,28 +104,21 @@ public Optional generate() { nullSavingStrategyValidation.getNullSavingStrategy( Query.class, Query::nullSavingStrategy, methodElement, enclosingClass); - methodBodyBuilder.addStatement( + createStatementBlock.addStatement( "$1T nullSavingStrategy = $1T.$2L", NullSavingStrategy.class, nullSavingStrategy); - populateBuilderWithStatementAttributes(methodBodyBuilder, methodElement); - populateBuilderWithFunction(methodBodyBuilder, boundStatementFunction); + populateBuilderWithStatementAttributes(createStatementBlock, methodElement); + populateBuilderWithFunction(createStatementBlock, boundStatementFunction); if (validateCqlNamesPresent(parameters)) { GeneratedCodePatterns.bindParameters( - parameters, methodBodyBuilder, enclosingClass, context, true); + parameters, createStatementBlock, enclosingClass, context, true); - methodBodyBuilder + createStatementBlock .add("\n") .addStatement("$T boundStatement = boundStatementBuilder.build()", BoundStatement.class); - returnType.getKind().addExecuteStatement(methodBodyBuilder, helperFieldName); - - CodeBlock methodBody = returnType.getKind().wrapWithErrorHandling(methodBodyBuilder.build()); - - return Optional.of( - GeneratedCodePatterns.override(methodElement, typeParameters) - .addCode(methodBody) - .build()); + return crudMethod(createStatementBlock, returnType, helperFieldName); } else { return Optional.empty(); } diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoReturnTypeKind.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoReturnTypeKind.java index 35b27284fe3..b4a7284db5d 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoReturnTypeKind.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoReturnTypeKind.java @@ -16,6 +16,10 @@ package com.datastax.oss.driver.internal.mapper.processor.dao; import com.squareup.javapoet.CodeBlock; +import java.util.Map; +import javax.lang.model.element.ExecutableElement; +import javax.lang.model.element.Name; +import javax.lang.model.element.TypeElement; /** * A "kind" of return type of a DAO method. @@ -33,8 +37,14 @@ public interface DaoReturnTypeKind { * @param methodBuilder the method to add the code to. * @param helperFieldName the name of the helper for entity conversions (might not get used for * certain kinds, in that case it's ok to pass null). + * @param methodElement the return type of the method (in case the result must be cast). + * @param typeParameters */ - void addExecuteStatement(CodeBlock.Builder methodBuilder, String helperFieldName); + void addExecuteStatement( + CodeBlock.Builder methodBuilder, + String helperFieldName, + ExecutableElement methodElement, + Map typeParameters); /** * Generates a try-catch around the given code block, to translate unchecked exceptions into a @@ -55,7 +65,8 @@ public interface DaoReturnTypeKind { *

        For some kinds, it's fine to let unchecked exceptions bubble up and no try-catch is * necessary; in this case, this method can return {@code innerBlock} unchanged. */ - CodeBlock wrapWithErrorHandling(CodeBlock innerBlock); + CodeBlock wrapWithErrorHandling( + CodeBlock innerBlock, ExecutableElement methodElement, Map typeParameters); /** A short description suitable for error messages. */ String getDescription(); diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGenerator.java index 5125719e89d..7707fc84b8c 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGenerator.java @@ -15,6 +15,7 @@ */ package com.datastax.oss.driver.internal.mapper.processor.dao; +import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.CUSTOM; import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.ENTITY; import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.FUTURE_OF_ASYNC_PAGING_ITERABLE; import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.FUTURE_OF_ENTITY; @@ -68,7 +69,8 @@ protected Set getSupportedReturnTypes() { FUTURE_OF_OPTIONAL_ENTITY, PAGING_ITERABLE, FUTURE_OF_ASYNC_PAGING_ITERABLE, - MAPPED_REACTIVE_RESULT_SET); + MAPPED_REACTIVE_RESULT_SET, + CUSTOM); } @Override @@ -153,14 +155,14 @@ public Optional generate() { generateSelectRequest( methodBuilder, requestName, helperFieldName, primaryKeyParameters.size())); - CodeBlock.Builder methodBodyBuilder = CodeBlock.builder(); + CodeBlock.Builder createStatementBlock = CodeBlock.builder(); - methodBodyBuilder.addStatement( + createStatementBlock.addStatement( "$T boundStatementBuilder = $L.boundStatementBuilder()", BoundStatementBuilder.class, statementName); - populateBuilderWithStatementAttributes(methodBodyBuilder, methodElement); - populateBuilderWithFunction(methodBodyBuilder, boundStatementFunction); + populateBuilderWithStatementAttributes(createStatementBlock, methodElement); + populateBuilderWithFunction(createStatementBlock, boundStatementFunction); if (!primaryKeyParameters.isEmpty()) { List primaryKeyNames = @@ -169,28 +171,28 @@ public Optional generate() { .collect(Collectors.toList()) .subList(0, primaryKeyParameters.size()); GeneratedCodePatterns.bindParameters( - primaryKeyParameters, primaryKeyNames, methodBodyBuilder, enclosingClass, context, false); + primaryKeyParameters, + primaryKeyNames, + createStatementBlock, + enclosingClass, + context, + false); } if (!freeFormParameters.isEmpty()) { if (validateCqlNamesPresent(freeFormParameters)) { GeneratedCodePatterns.bindParameters( - freeFormParameters, methodBodyBuilder, enclosingClass, context, false); + freeFormParameters, createStatementBlock, enclosingClass, context, false); } else { return Optional.empty(); } } - methodBodyBuilder + createStatementBlock .add("\n") .addStatement("$T boundStatement = boundStatementBuilder.build()", BoundStatement.class); - returnType.getKind().addExecuteStatement(methodBodyBuilder, helperFieldName); - - CodeBlock methodBody = returnType.getKind().wrapWithErrorHandling(methodBodyBuilder.build()); - - return Optional.of( - GeneratedCodePatterns.override(methodElement, typeParameters).addCode(methodBody).build()); + return crudMethod(createStatementBlock, returnType, helperFieldName); } private void generateSelectRequest( diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoUpdateMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoUpdateMethodGenerator.java index 9babcaee5bb..14e13fa149e 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoUpdateMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoUpdateMethodGenerator.java @@ -17,6 +17,7 @@ import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.BOOLEAN; import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.BOUND_STATEMENT; +import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.CUSTOM; import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.FUTURE_OF_ASYNC_RESULT_SET; import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.FUTURE_OF_BOOLEAN; import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.FUTURE_OF_VOID; @@ -70,7 +71,8 @@ protected Set getSupportedReturnTypes() { FUTURE_OF_ASYNC_RESULT_SET, BOOLEAN, FUTURE_OF_BOOLEAN, - REACTIVE_RESULT_SET); + REACTIVE_RESULT_SET, + CUSTOM); } @Override @@ -117,15 +119,15 @@ public Optional generate() { (methodBuilder, requestName) -> generatePrepareRequest(methodBuilder, requestName, helperFieldName)); - CodeBlock.Builder methodBodyBuilder = CodeBlock.builder(); + CodeBlock.Builder createStatementBlock = CodeBlock.builder(); - methodBodyBuilder.addStatement( + createStatementBlock.addStatement( "$T boundStatementBuilder = $L.boundStatementBuilder()", BoundStatementBuilder.class, statementName); - populateBuilderWithStatementAttributes(methodBodyBuilder, methodElement); - populateBuilderWithFunction(methodBodyBuilder, boundStatementFunction); + populateBuilderWithStatementAttributes(createStatementBlock, methodElement); + populateBuilderWithFunction(createStatementBlock, boundStatementFunction); String entityParameterName = parameters.get(0).getSimpleName().toString(); @@ -139,14 +141,14 @@ public Optional generate() { if (customWhereClause.isEmpty()) { // We generated an update by primary key (see maybeAddWhereClause), all entity properties are // present as placeholders. - methodBodyBuilder.addStatement( + createStatementBlock.addStatement( "$1L.set($2L, boundStatementBuilder, $3T.$4L)", helperFieldName, entityParameterName, NullSavingStrategy.class, nullSavingStrategy); } else { - methodBodyBuilder.addStatement( + createStatementBlock.addStatement( "$1T nullSavingStrategy = $1T.$2L", NullSavingStrategy.class, nullSavingStrategy); // Only non-PK properties are present in SET ... clauses. @@ -157,7 +159,7 @@ public Optional generate() { property.getType(), CodeBlock.of("$L.$L()", entityParameterName, property.getGetterName()), "boundStatementBuilder", - methodBodyBuilder, + createStatementBlock, enclosingClass, true); } @@ -169,22 +171,17 @@ public Optional generate() { List bindMarkers = parameters.subList(1, parameters.size()); if (validateCqlNamesPresent(bindMarkers)) { GeneratedCodePatterns.bindParameters( - bindMarkers, methodBodyBuilder, enclosingClass, context, false); + bindMarkers, createStatementBlock, enclosingClass, context, false); } else { return Optional.empty(); } } - methodBodyBuilder + createStatementBlock .add("\n") .addStatement("$T boundStatement = boundStatementBuilder.build()", BoundStatement.class); - returnType.getKind().addExecuteStatement(methodBodyBuilder, helperFieldName); - - CodeBlock methodBody = returnType.getKind().wrapWithErrorHandling(methodBodyBuilder.build()); - - return Optional.of( - GeneratedCodePatterns.override(methodElement, typeParameters).addCode(methodBody).build()); + return crudMethod(createStatementBlock, returnType, helperFieldName); } private void generatePrepareRequest( diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DefaultDaoReturnTypeKind.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DefaultDaoReturnTypeKind.java index 59a7454200c..2e9ba5f7e41 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DefaultDaoReturnTypeKind.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DefaultDaoReturnTypeKind.java @@ -18,236 +18,440 @@ import com.datastax.dse.driver.internal.core.cql.reactive.FailedReactiveResultSet; import com.datastax.dse.driver.internal.mapper.reactive.FailedMappedReactiveResultSet; import com.datastax.oss.driver.internal.core.util.concurrent.CompletableFutures; +import com.datastax.oss.driver.internal.mapper.processor.util.generation.GeneratedCodePatterns; +import com.datastax.oss.driver.shaded.guava.common.base.Throwables; import com.squareup.javapoet.CodeBlock; +import com.squareup.javapoet.TypeName; +import java.util.Map; +import javax.lang.model.element.ExecutableElement; +import javax.lang.model.element.Name; +import javax.lang.model.element.TypeElement; +import javax.lang.model.type.TypeMirror; public enum DefaultDaoReturnTypeKind implements DaoReturnTypeKind { VOID { @Override - public void addExecuteStatement(CodeBlock.Builder methodBuilder, String helperFieldName) { + public void addExecuteStatement( + CodeBlock.Builder methodBuilder, + String helperFieldName, + ExecutableElement methodElement, + Map typeParameters) { // Note that the execute* methods in the generated code are defined in DaoBase methodBuilder.addStatement("execute(boundStatement)"); } @Override - public CodeBlock wrapWithErrorHandling(CodeBlock innerBlock) { + public CodeBlock wrapWithErrorHandling( + CodeBlock innerBlock, + ExecutableElement methodElement, + Map typeParameters) { return innerBlock; } }, BOOLEAN { @Override - public void addExecuteStatement(CodeBlock.Builder methodBuilder, String helperFieldName) { + public void addExecuteStatement( + CodeBlock.Builder methodBuilder, + String helperFieldName, + ExecutableElement methodElement, + Map typeParameters) { methodBuilder.addStatement("return executeAndMapWasAppliedToBoolean(boundStatement)"); } @Override - public CodeBlock wrapWithErrorHandling(CodeBlock innerBlock) { + public CodeBlock wrapWithErrorHandling( + CodeBlock innerBlock, + ExecutableElement methodElement, + Map typeParameters) { return innerBlock; } }, LONG { @Override - public void addExecuteStatement(CodeBlock.Builder methodBuilder, String helperFieldName) { + public void addExecuteStatement( + CodeBlock.Builder methodBuilder, + String helperFieldName, + ExecutableElement methodElement, + Map typeParameters) { methodBuilder.addStatement("return executeAndMapFirstColumnToLong(boundStatement)"); } @Override - public CodeBlock wrapWithErrorHandling(CodeBlock innerBlock) { + public CodeBlock wrapWithErrorHandling( + CodeBlock innerBlock, + ExecutableElement methodElement, + Map typeParameters) { return innerBlock; } }, ROW { @Override - public void addExecuteStatement(CodeBlock.Builder methodBuilder, String helperFieldName) { + public void addExecuteStatement( + CodeBlock.Builder methodBuilder, + String helperFieldName, + ExecutableElement methodElement, + Map typeParameters) { methodBuilder.addStatement("return executeAndExtractFirstRow(boundStatement)"); } @Override - public CodeBlock wrapWithErrorHandling(CodeBlock innerBlock) { + public CodeBlock wrapWithErrorHandling( + CodeBlock innerBlock, + ExecutableElement methodElement, + Map typeParameters) { return innerBlock; } }, ENTITY { @Override - public void addExecuteStatement(CodeBlock.Builder methodBuilder, String helperFieldName) { + public void addExecuteStatement( + CodeBlock.Builder methodBuilder, + String helperFieldName, + ExecutableElement methodElement, + Map typeParameters) { methodBuilder.addStatement( "return executeAndMapToSingleEntity(boundStatement, $L)", helperFieldName); } @Override - public CodeBlock wrapWithErrorHandling(CodeBlock innerBlock) { + public CodeBlock wrapWithErrorHandling( + CodeBlock innerBlock, + ExecutableElement methodElement, + Map typeParameters) { return innerBlock; } }, OPTIONAL_ENTITY { @Override - public void addExecuteStatement(CodeBlock.Builder methodBuilder, String helperFieldName) { + public void addExecuteStatement( + CodeBlock.Builder methodBuilder, + String helperFieldName, + ExecutableElement methodElement, + Map typeParameters) { methodBuilder.addStatement( "return executeAndMapToOptionalEntity(boundStatement, $L)", helperFieldName); } @Override - public CodeBlock wrapWithErrorHandling(CodeBlock innerBlock) { + public CodeBlock wrapWithErrorHandling( + CodeBlock innerBlock, + ExecutableElement methodElement, + Map typeParameters) { return innerBlock; } }, RESULT_SET { @Override - public void addExecuteStatement(CodeBlock.Builder methodBuilder, String helperFieldName) { + public void addExecuteStatement( + CodeBlock.Builder methodBuilder, + String helperFieldName, + ExecutableElement methodElement, + Map typeParameters) { methodBuilder.addStatement("return execute(boundStatement)"); } @Override - public CodeBlock wrapWithErrorHandling(CodeBlock innerBlock) { + public CodeBlock wrapWithErrorHandling( + CodeBlock innerBlock, + ExecutableElement methodElement, + Map typeParameters) { return innerBlock; } }, BOUND_STATEMENT { @Override - public void addExecuteStatement(CodeBlock.Builder methodBuilder, String helperFieldName) { + public void addExecuteStatement( + CodeBlock.Builder methodBuilder, + String helperFieldName, + ExecutableElement methodElement, + Map typeParameters) { methodBuilder.addStatement("return boundStatement"); } @Override - public CodeBlock wrapWithErrorHandling(CodeBlock innerBlock) { + public CodeBlock wrapWithErrorHandling( + CodeBlock innerBlock, + ExecutableElement methodElement, + Map typeParameters) { return innerBlock; } }, PAGING_ITERABLE { @Override - public void addExecuteStatement(CodeBlock.Builder methodBuilder, String helperFieldName) { + public void addExecuteStatement( + CodeBlock.Builder methodBuilder, + String helperFieldName, + ExecutableElement methodElement, + Map typeParameters) { methodBuilder.addStatement( "return executeAndMapToEntityIterable(boundStatement, $L)", helperFieldName); } @Override - public CodeBlock wrapWithErrorHandling(CodeBlock innerBlock) { + public CodeBlock wrapWithErrorHandling( + CodeBlock innerBlock, + ExecutableElement methodElement, + Map typeParameters) { return innerBlock; } }, FUTURE_OF_VOID { @Override - public void addExecuteStatement(CodeBlock.Builder methodBuilder, String helperFieldName) { + public void addExecuteStatement( + CodeBlock.Builder methodBuilder, + String helperFieldName, + ExecutableElement methodElement, + Map typeParameters) { methodBuilder.addStatement("return executeAsyncAndMapToVoid(boundStatement)"); } @Override - public CodeBlock wrapWithErrorHandling(CodeBlock innerBlock) { + public CodeBlock wrapWithErrorHandling( + CodeBlock innerBlock, + ExecutableElement methodElement, + Map typeParameters) { return wrapWithErrorHandling(innerBlock, FAILED_FUTURE); } }, FUTURE_OF_BOOLEAN { @Override - public void addExecuteStatement(CodeBlock.Builder methodBuilder, String helperFieldName) { + public void addExecuteStatement( + CodeBlock.Builder methodBuilder, + String helperFieldName, + ExecutableElement methodElement, + Map typeParameters) { methodBuilder.addStatement("return executeAsyncAndMapWasAppliedToBoolean(boundStatement)"); } @Override - public CodeBlock wrapWithErrorHandling(CodeBlock innerBlock) { + public CodeBlock wrapWithErrorHandling( + CodeBlock innerBlock, + ExecutableElement methodElement, + Map typeParameters) { return wrapWithErrorHandling(innerBlock, FAILED_FUTURE); } }, FUTURE_OF_LONG { @Override - public void addExecuteStatement(CodeBlock.Builder methodBuilder, String helperFieldName) { + public void addExecuteStatement( + CodeBlock.Builder methodBuilder, + String helperFieldName, + ExecutableElement methodElement, + Map typeParameters) { methodBuilder.addStatement("return executeAsyncAndMapFirstColumnToLong(boundStatement)"); } @Override - public CodeBlock wrapWithErrorHandling(CodeBlock innerBlock) { + public CodeBlock wrapWithErrorHandling( + CodeBlock innerBlock, + ExecutableElement methodElement, + Map typeParameters) { return wrapWithErrorHandling(innerBlock, FAILED_FUTURE); } }, FUTURE_OF_ROW { @Override - public void addExecuteStatement(CodeBlock.Builder methodBuilder, String helperFieldName) { + public void addExecuteStatement( + CodeBlock.Builder methodBuilder, + String helperFieldName, + ExecutableElement methodElement, + Map typeParameters) { methodBuilder.addStatement("return executeAsyncAndExtractFirstRow(boundStatement)"); } @Override - public CodeBlock wrapWithErrorHandling(CodeBlock innerBlock) { + public CodeBlock wrapWithErrorHandling( + CodeBlock innerBlock, + ExecutableElement methodElement, + Map typeParameters) { return wrapWithErrorHandling(innerBlock, FAILED_FUTURE); } }, FUTURE_OF_ENTITY { @Override - public void addExecuteStatement(CodeBlock.Builder methodBuilder, String helperFieldName) { + public void addExecuteStatement( + CodeBlock.Builder methodBuilder, + String helperFieldName, + ExecutableElement methodElement, + Map typeParameters) { methodBuilder.addStatement( "return executeAsyncAndMapToSingleEntity(boundStatement, $L)", helperFieldName); } @Override - public CodeBlock wrapWithErrorHandling(CodeBlock innerBlock) { + public CodeBlock wrapWithErrorHandling( + CodeBlock innerBlock, + ExecutableElement methodElement, + Map typeParameters) { return wrapWithErrorHandling(innerBlock, FAILED_FUTURE); } }, FUTURE_OF_OPTIONAL_ENTITY { @Override - public void addExecuteStatement(CodeBlock.Builder methodBuilder, String helperFieldName) { + public void addExecuteStatement( + CodeBlock.Builder methodBuilder, + String helperFieldName, + ExecutableElement methodElement, + Map typeParameters) { methodBuilder.addStatement( "return executeAsyncAndMapToOptionalEntity(boundStatement, $L)", helperFieldName); } @Override - public CodeBlock wrapWithErrorHandling(CodeBlock innerBlock) { + public CodeBlock wrapWithErrorHandling( + CodeBlock innerBlock, + ExecutableElement methodElement, + Map typeParameters) { return wrapWithErrorHandling(innerBlock, FAILED_FUTURE); } }, FUTURE_OF_ASYNC_RESULT_SET { @Override - public void addExecuteStatement(CodeBlock.Builder methodBuilder, String helperFieldName) { + public void addExecuteStatement( + CodeBlock.Builder methodBuilder, + String helperFieldName, + ExecutableElement methodElement, + Map typeParameters) { methodBuilder.addStatement("return executeAsync(boundStatement)"); } @Override - public CodeBlock wrapWithErrorHandling(CodeBlock innerBlock) { + public CodeBlock wrapWithErrorHandling( + CodeBlock innerBlock, + ExecutableElement methodElement, + Map typeParameters) { return wrapWithErrorHandling(innerBlock, FAILED_FUTURE); } }, FUTURE_OF_ASYNC_PAGING_ITERABLE { @Override - public void addExecuteStatement(CodeBlock.Builder methodBuilder, String helperFieldName) { + public void addExecuteStatement( + CodeBlock.Builder methodBuilder, + String helperFieldName, + ExecutableElement methodElement, + Map typeParameters) { methodBuilder.addStatement( "return executeAsyncAndMapToEntityIterable(boundStatement, $L)", helperFieldName); } @Override - public CodeBlock wrapWithErrorHandling(CodeBlock innerBlock) { + public CodeBlock wrapWithErrorHandling( + CodeBlock innerBlock, + ExecutableElement methodElement, + Map typeParameters) { return wrapWithErrorHandling(innerBlock, FAILED_FUTURE); } }, REACTIVE_RESULT_SET { @Override - public void addExecuteStatement(CodeBlock.Builder methodBuilder, String helperFieldName) { + public void addExecuteStatement( + CodeBlock.Builder methodBuilder, + String helperFieldName, + ExecutableElement methodElement, + Map typeParameters) { methodBuilder.addStatement("return executeReactive(boundStatement)"); } @Override - public CodeBlock wrapWithErrorHandling(CodeBlock innerBlock) { + public CodeBlock wrapWithErrorHandling( + CodeBlock innerBlock, + ExecutableElement methodElement, + Map typeParameters) { return wrapWithErrorHandling(innerBlock, FAILED_REACTIVE_RESULT_SET); } }, MAPPED_REACTIVE_RESULT_SET { @Override - public void addExecuteStatement(CodeBlock.Builder methodBuilder, String helperFieldName) { + public void addExecuteStatement( + CodeBlock.Builder methodBuilder, + String helperFieldName, + ExecutableElement methodElement, + Map typeParameters) { methodBuilder.addStatement( "return executeReactiveAndMap(boundStatement, $L)", helperFieldName); } @Override - public CodeBlock wrapWithErrorHandling(CodeBlock innerBlock) { + public CodeBlock wrapWithErrorHandling( + CodeBlock innerBlock, + ExecutableElement methodElement, + Map typeParameters) { return wrapWithErrorHandling(innerBlock, FAILED_MAPPED_REACTIVE_RESULT_SET); } }, + CUSTOM { + @Override + public void addExecuteStatement( + CodeBlock.Builder methodBuilder, + String helperFieldName, + ExecutableElement methodElement, + Map typeParameters) { + TypeName returnTypeName = + GeneratedCodePatterns.getTypeName(methodElement.getReturnType(), typeParameters); + methodBuilder + .addStatement( + "@$1T(\"unchecked\") $2T result =\n($2T) producer.execute(boundStatement, context, $3L)", + SuppressWarnings.class, + returnTypeName, + helperFieldName) + .addStatement("return result"); + } + + @Override + public CodeBlock wrapWithErrorHandling( + CodeBlock innerBlock, + ExecutableElement methodElement, + Map typeParameters) { + + TypeName returnTypeName = + GeneratedCodePatterns.getTypeName(methodElement.getReturnType(), typeParameters); + + // We're wrapping the whole DAO method with a catch block that calls producer.wrapError. + // wrapError can itself throw, so it's wrapped in a nested try-catch: + CodeBlock.Builder callWrapError = + CodeBlock.builder() + .beginControlFlow("try") + .addStatement( + "@$1T(\"unchecked\") $2T result =\n($2T) producer.wrapError(t)", + SuppressWarnings.class, + returnTypeName) + .addStatement("return result"); + + // Any exception that is explicitly declared by the DAO method can be rethrown directly. + // (note: manually a multi-catch would be cleaner, but from here it's simpler to generate + // separate clauses) + for (TypeMirror thrownType : methodElement.getThrownTypes()) { + callWrapError.nextControlFlow("catch ($T e)", thrownType).addStatement("throw e"); + } + + // Otherwise, rethrow unchecked exceptions and wrap checked ones. + callWrapError + .nextControlFlow("catch ($T e)", Exception.class) + .addStatement("$T.throwIfUnchecked(e)", Throwables.class) + .addStatement("throw new $T(e)", RuntimeException.class) + .endControlFlow(); + + return wrapWithErrorHandling(innerBlock, callWrapError.build()); + } + }, + UNSUPPORTED() { @Override - public void addExecuteStatement(CodeBlock.Builder methodBuilder, String helperFieldName) { + public void addExecuteStatement( + CodeBlock.Builder methodBuilder, + String helperFieldName, + ExecutableElement methodElement, + Map typeParameters) { throw new AssertionError("Should never get here"); } @Override - public CodeBlock wrapWithErrorHandling(CodeBlock innerBlock) { + public CodeBlock wrapWithErrorHandling( + CodeBlock innerBlock, + ExecutableElement methodElement, + Map typeParameters) { throw new AssertionError("Should never get here"); } }, @@ -263,15 +467,19 @@ static CodeBlock wrapWithErrorHandling(CodeBlock innerBlock, CodeBlock catchBloc .beginControlFlow("try") .add(innerBlock) .nextControlFlow("catch ($T t)", Throwable.class) - .addStatement(catchBlock) + .add(catchBlock) .endControlFlow() .build(); } private static final CodeBlock FAILED_FUTURE = - CodeBlock.of("return $T.failedFuture(t)", CompletableFutures.class); + CodeBlock.builder() + .addStatement("return $T.failedFuture(t)", CompletableFutures.class) + .build(); private static final CodeBlock FAILED_REACTIVE_RESULT_SET = - CodeBlock.of("return new $T(t)", FailedReactiveResultSet.class); + CodeBlock.builder().addStatement("return new $T(t)", FailedReactiveResultSet.class).build(); private static final CodeBlock FAILED_MAPPED_REACTIVE_RESULT_SET = - CodeBlock.of("return new $T(t)", FailedMappedReactiveResultSet.class); + CodeBlock.builder() + .addStatement("return new $T(t)", FailedMappedReactiveResultSet.class) + .build(); } diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DefaultDaoReturnTypeParser.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DefaultDaoReturnTypeParser.java index f059d3139fd..56cbd5f977f 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DefaultDaoReturnTypeParser.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DefaultDaoReturnTypeParser.java @@ -204,6 +204,14 @@ public DaoReturnType parse( } } } + + // Otherwise assume a custom type. A MappedResultProducer will be looked up from the + // MapperContext at runtime. + if (context.areCustomResultsEnabled()) { + return new DaoReturnType( + DefaultDaoReturnTypeKind.CUSTOM, + findEntityInCustomType(declaredReturnType, typeParameters)); + } } if (returnTypeMirror.getKind() == TypeKind.TYPEVAR) { @@ -232,4 +240,25 @@ public DaoReturnType parse( return DaoReturnType.UNSUPPORTED; } + + /** + * If we're dealing with a {@link DefaultDaoReturnTypeKind#CUSTOM}, we allow one entity element to + * appear at any level of nesting in the type, e.g. {@code MyCustomFuture>}. + */ + private TypeElement findEntityInCustomType( + TypeMirror typeMirror, Map typeParameters) { + TypeElement entityElement = EntityUtils.asEntityElement(typeMirror, typeParameters); + if (entityElement != null) { + return entityElement; + } else if (typeMirror.getKind() == TypeKind.DECLARED) { + for (TypeMirror typeArgument : ((DeclaredType) typeMirror).getTypeArguments()) { + entityElement = findEntityInCustomType(typeArgument, typeParameters); + if (entityElement != null) { + return entityElement; + } + } + } + // null is a valid result even at the top level, a custom type may not contain any entity + return null; + } } diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/util/generation/GeneratedCodePatterns.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/util/generation/GeneratedCodePatterns.java index b50e954dcc4..8be676edb7b 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/util/generation/GeneratedCodePatterns.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/util/generation/GeneratedCodePatterns.java @@ -88,10 +88,13 @@ public static MethodSpec.Builder override( TypeName type = getTypeName(parameterElement.asType(), typeParameters); result.addParameter(type, parameterElement.getSimpleName().toString()); } + for (TypeMirror thrownType : interfaceMethod.getThrownTypes()) { + result.addException(TypeName.get(thrownType)); + } return result; } - private static TypeName getTypeName(TypeMirror mirror, Map typeParameters) { + public static TypeName getTypeName(TypeMirror mirror, Map typeParameters) { if (mirror.getKind() == TypeKind.TYPEVAR) { TypeVariable typeVariable = (TypeVariable) mirror; Name name = typeVariable.asElement().getSimpleName(); diff --git a/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/MapperProcessorTest.java b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/MapperProcessorTest.java index 3413607b174..41a20be561f 100644 --- a/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/MapperProcessorTest.java +++ b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/MapperProcessorTest.java @@ -17,6 +17,7 @@ import static com.google.testing.compile.CompilationSubject.assertThat; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import com.google.testing.compile.Compilation; import com.google.testing.compile.Compiler; import com.squareup.javapoet.JavaFile; @@ -55,14 +56,18 @@ protected MapperProcessor getMapperProcessor() { } /** - * Launches an in-process execution of javac with {@link MapperProcessor} enabled. + * Launches an in-process execution of javac with {@link MapperProcessor} enabled, and custom + * result types disabled. * * @param packageName the package of the types to process. Note that it is currently not possible * to process multiple packages (and it's unlikely to be needed in unit tests). * @param typeSpecs the contents of the classes or interfaces to process. */ protected Compilation compileWithMapperProcessor(String packageName, TypeSpec... typeSpecs) { - return compileWithMapperProcessor(packageName, Collections.emptyList(), typeSpecs); + return compileWithMapperProcessor( + packageName, + ImmutableList.of("-Acom.datastax.oss.driver.mapper.customResults.enabled=false"), + typeSpecs); } protected void should_fail_with_expected_error( diff --git a/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGeneratorTest.java b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGeneratorTest.java index 7b5bda222b1..fe19afddc5a 100644 --- a/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGeneratorTest.java +++ b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGeneratorTest.java @@ -15,15 +15,23 @@ */ package com.datastax.oss.driver.internal.mapper.processor.dao; +import static com.google.testing.compile.CompilationSubject.assertThat; + import com.datastax.oss.driver.api.mapper.annotations.CqlName; +import com.datastax.oss.driver.api.mapper.annotations.Dao; import com.datastax.oss.driver.api.mapper.annotations.Delete; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; +import com.google.testing.compile.Compilation; import com.squareup.javapoet.AnnotationSpec; +import com.squareup.javapoet.ClassName; import com.squareup.javapoet.MethodSpec; import com.squareup.javapoet.ParameterSpec; import com.squareup.javapoet.TypeSpec; import com.tngtech.java.junit.dataprovider.DataProvider; import com.tngtech.java.junit.dataprovider.DataProviderRunner; import com.tngtech.java.junit.dataprovider.UseDataProvider; +import java.util.Collections; +import java.util.List; import java.util.UUID; import javax.lang.model.element.Modifier; import org.junit.Test; @@ -163,4 +171,32 @@ public void should_warn_when_non_bind_marker_has_cql_name() { .addModifiers(Modifier.PUBLIC, Modifier.ABSTRACT) .build()); } + + @Test + public void should_not_fail_on_unsupported_result_when_custom_results_enabled() { + + MethodSpec methodSpec = + MethodSpec.methodBuilder("delete") + .addAnnotation(Delete.class) + .addModifiers(Modifier.PUBLIC, Modifier.ABSTRACT) + .addParameter(ENTITY_CLASS_NAME, "entity") + .returns(Integer.class) // not a built-in return type + .build(); + TypeSpec daoSpec = + TypeSpec.interfaceBuilder(ClassName.get("test", "ProductDao")) + .addModifiers(Modifier.PUBLIC) + .addAnnotation(Dao.class) + .addMethod(methodSpec) + .build(); + + for (List compilerOptions : + ImmutableList.of( + ImmutableList.of("-Acom.datastax.oss.driver.mapper.customResults.enabled=true"), + // The option defaults to true, so it should also work without explicit options: + Collections.emptyList())) { + Compilation compilation = + compileWithMapperProcessor("test", compilerOptions, ENTITY_SPEC, daoSpec); + assertThat(compilation).succeededWithoutWarnings(); + } + } } diff --git a/mapper-runtime/revapi.json b/mapper-runtime/revapi.json index 12704ddd974..18d26a7f7e9 100644 --- a/mapper-runtime/revapi.json +++ b/mapper-runtime/revapi.json @@ -67,6 +67,11 @@ "code": "java.method.addedToInterface", "new": "method com.datastax.oss.driver.api.core.config.DriverExecutionProfile com.datastax.oss.driver.api.mapper.MapperContext::getExecutionProfile()", "justification": "JAVA-2633: Add execution profile argument to DAO factory method (accept API break -- it's unlikely that MapperContext will be implemented outside of the driver)" + }, + { + "code": "java.method.addedToInterface", + "new": "method com.datastax.oss.driver.api.mapper.result.MapperResultProducer com.datastax.oss.driver.api.mapper.MapperContext::getResultProducer(com.datastax.oss.driver.api.core.type.reflect.GenericType)", + "justification": "JAVA-2792: Allow custom results in the mapper (accept API break -- it's unlikely that MapperContext will be implemented outside of the driver)" } ] } diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/MapperContext.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/MapperContext.java index 78858f729e5..249eecc0215 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/MapperContext.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/MapperContext.java @@ -18,7 +18,10 @@ import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; import com.datastax.oss.driver.api.mapper.entity.naming.NameConverter; +import com.datastax.oss.driver.api.mapper.result.MapperResultProducer; +import com.datastax.oss.driver.api.mapper.result.MapperResultProducerService; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import java.util.Map; @@ -83,4 +86,18 @@ public interface MapperContext { */ @NonNull Map getCustomState(); + + /** + * Returns a component that will execute a statement and convert it into a custom result of the + * given type. + * + *

        These components must be registered through the Java Service Provider Interface mechanism, + * see {@link MapperResultProducerService}. + * + *

        The results of this method are cached at the JVM level. + * + * @throws IllegalArgumentException if no producer was registered for this type. + */ + @NonNull + MapperResultProducer getResultProducer(@NonNull GenericType resultToProduce); } diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Delete.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Delete.java index aca16abede2..85b5db6a39e 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Delete.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Delete.java @@ -22,6 +22,7 @@ import com.datastax.oss.driver.api.core.cql.ResultSet; import com.datastax.oss.driver.api.core.session.Session; import com.datastax.oss.driver.api.core.session.SessionBuilder; +import com.datastax.oss.driver.api.mapper.result.MapperResultProducer; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; @@ -117,6 +118,7 @@ * @Delete * ReactiveResultSet deleteReactive(Product product); * + *

      • a {@linkplain MapperResultProducer custom type}. * * * Note that you can also return a boolean or result set for non-conditional queries, but there's no diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Insert.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Insert.java index edf0880f23d..43675626568 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Insert.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Insert.java @@ -22,6 +22,7 @@ import com.datastax.oss.driver.api.core.session.Session; import com.datastax.oss.driver.api.core.session.SessionBuilder; import com.datastax.oss.driver.api.mapper.entity.saving.NullSavingStrategy; +import com.datastax.oss.driver.api.mapper.result.MapperResultProducer; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; @@ -116,6 +117,7 @@ * @Insert * ReactiveResultSet insertReactive(Product product); * + *
      • a {@linkplain MapperResultProducer custom type}. * * *

        Target keyspace and table

        diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Query.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Query.java index bb37e8f2dec..85be754852b 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Query.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Query.java @@ -27,6 +27,7 @@ import com.datastax.oss.driver.api.core.session.Session; import com.datastax.oss.driver.api.core.session.SessionBuilder; import com.datastax.oss.driver.api.mapper.entity.saving.NullSavingStrategy; +import com.datastax.oss.driver.api.mapper.result.MapperResultProducer; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; @@ -94,6 +95,7 @@ * switch to the asynchronous equivalent {@link AsyncResultSet} and {@link * MappedAsyncPagingIterable} respectively. *
      • a {@link ReactiveResultSet}, or a {@link MappedReactiveResultSet} of the entity class. + *
      • a {@linkplain MapperResultProducer custom type}. * * *

        Target keyspace and table

        diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Select.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Select.java index 0a7f6c04864..0e5635cc9e8 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Select.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Select.java @@ -21,6 +21,7 @@ import com.datastax.oss.driver.api.core.PagingIterable; import com.datastax.oss.driver.api.core.session.Session; import com.datastax.oss.driver.api.core.session.SessionBuilder; +import com.datastax.oss.driver.api.mapper.result.MapperResultProducer; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; @@ -126,6 +127,7 @@ * @Select(customWhereClause = "description LIKE :searchString") * MappedReactiveResultSet<Product> findByDescriptionReactive(String searchString); * + *
      • a {@linkplain MapperResultProducer custom type}. * * *

        Target keyspace and table

        diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Update.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Update.java index c242dd1fb07..c0d2a697b2d 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Update.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Update.java @@ -23,6 +23,7 @@ import com.datastax.oss.driver.api.core.session.Session; import com.datastax.oss.driver.api.core.session.SessionBuilder; import com.datastax.oss.driver.api.mapper.entity.saving.NullSavingStrategy; +import com.datastax.oss.driver.api.mapper.result.MapperResultProducer; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; @@ -129,6 +130,7 @@ * @Update * ReactiveResultSet updateReactive(Product product); * + *
      • a {@linkplain MapperResultProducer custom type}. * * *

        Target keyspace and table

        diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/result/MapperResultProducer.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/result/MapperResultProducer.java new file mode 100644 index 00000000000..4940d2eaf07 --- /dev/null +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/result/MapperResultProducer.java @@ -0,0 +1,107 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.mapper.result; + +import com.datastax.oss.driver.api.core.cql.Statement; +import com.datastax.oss.driver.api.core.data.GettableByName; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import com.datastax.oss.driver.api.mapper.MapperContext; +import com.datastax.oss.driver.api.mapper.entity.EntityHelper; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.concurrent.CompletionStage; + +/** + * A component that can be plugged into the object mapper, in order to return custom result types + * from DAO methods. + * + *

        For example, this could be used to substitute a 3rd-party future implementation for {@link + * CompletionStage}: + * + *

        + * public class CustomFutureProducer implements MapperResultProducer {
        + *   ...
        + * }
        + * 
        + * + *

        Producers are registered via the Java Service Provider mechanism (see {@link + * MapperResultProducerService}). DAO methods can then use the new type: + * + *

        + * @Dao
        + * public interface ProductDao {
        + *   @Select
        + *   CustomFuture<Product> findById(UUID productId);
        + * }
        + * 
        + * + * See the javadocs of the methods in this interface for more explanations. + */ +public interface MapperResultProducer { + + /** + * Checks if this producer can handle a particular result type. + * + *

        This will be invoked at runtime to select a producer: if a DAO method declares a return type + * that is not supported natively, then the mapper generates an implementation which, for every + * invocation, iterates through all the producers in the order that they were registered, + * and picks the first one where {@code canProduce()} returns true. + * + * @param resultType the DAO method's declared return type. If checking the top-level type is + * sufficient, then {@link GenericType#getRawType()} should do the trick. If you need to + * recurse into the type arguments, call {@link GenericType#getType()} and use the {@code + * java.lang.reflect} APIs. + */ + boolean canProduce(@NonNull GenericType resultType); + + /** + * Executes the statement generated by the mapper, and converts the result to the expected type. + * + *

        This will be executed at runtime, every time the DAO method is called. + * + * @param statement the statement, ready to execute: the mapper has already bound all the values, + * and set all the necessary attributes (consistency, page size, etc). + * @param context the context in which the DAO method is executed. In particular, this is how you + * get access to the {@linkplain MapperContext#getSession() session}. + * @param entityHelper if the type to produce contains a mapped entity (e.g. {@code + * ListenableFuture}), an instance of the helper class to manipulate that entity. In + * particular, {@link EntityHelper#get(GettableByName) entityHelper.get()} allows you to + * convert rows into entity instances. If the type to produce does not contain an entity, this + * will be {@code null}. + * @return the object to return from the DAO method. This must match the type that this producer + * was selected for, there will be an unchecked cast at runtime. + */ + @SuppressWarnings("TypeParameterUnusedInFormals") + @Nullable + Object execute( + @NonNull Statement statement, + @NonNull MapperContext context, + @Nullable EntityHelper entityHelper); + + /** + * Surfaces any error encountered in the DAO method (either in the generated mapper code that + * builds the statement, or during invocation of {@link #execute}). + * + *

        For some result types, it is expected that errors will be wrapped in some sort of container + * instead of thrown directly; for example a failed future or publisher. + * + *

        If rethrowing is the right thing to do, then it is perfectly fine to do so from this method. + * If you throw checked exceptions, they will be propagated directly if the DAO method also + * declares them, or wrapped into a {@link RuntimeException} otherwise. + */ + @Nullable + Object wrapError(@NonNull Throwable error) throws Exception; +} diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/result/MapperResultProducerService.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/result/MapperResultProducerService.java new file mode 100644 index 00000000000..6ff193fcf49 --- /dev/null +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/result/MapperResultProducerService.java @@ -0,0 +1,36 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.mapper.result; + +/** + * Provides the custom mapper result types that will be used in an application. + * + *

        This class is loaded with the Java Service Provider Interface mechanism, you must reference it + * via a service descriptor: create a file {@code + * META-INF/services/com.datastax.oss.driver.api.mapper.result.MapperResultProducerService}, with + * one or more lines, each referencing the name of an implementing class. + */ +public interface MapperResultProducerService { + + /** + * Returns the producers provided by this service. + * + *

        Note that order matters, the producers will be tried from left to right until one matches. + * If there is some overlap between your producers' {@link MapperResultProducer#canProduce + * canProduce()} implementations, put the most specific ones first. + */ + Iterable getProducers(); +} diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DefaultMapperContext.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DefaultMapperContext.java index 7631394d630..9ba08d8c65d 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DefaultMapperContext.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DefaultMapperContext.java @@ -18,20 +18,31 @@ import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; import com.datastax.oss.driver.api.mapper.MapperContext; import com.datastax.oss.driver.api.mapper.MapperException; import com.datastax.oss.driver.api.mapper.entity.naming.NameConverter; +import com.datastax.oss.driver.api.mapper.result.MapperResultProducer; +import com.datastax.oss.driver.api.mapper.result.MapperResultProducerService; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import com.datastax.oss.protocol.internal.util.collection.NullAllowingImmutableMap; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import java.lang.reflect.InvocationTargetException; +import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.ServiceLoader; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; public class DefaultMapperContext implements MapperContext { + private static final List RESULT_PRODUCERS = getResultProducers(); + + private static final ConcurrentMap, MapperResultProducer> RESULT_PRODUCER_CACHE = + new ConcurrentHashMap<>(); + private final CqlSession session; private final CqlIdentifier keyspaceId; private final CqlIdentifier tableId; @@ -140,6 +151,24 @@ public Map getCustomState() { return customState; } + @NonNull + @Override + public MapperResultProducer getResultProducer(@NonNull GenericType resultToProduce) { + return RESULT_PRODUCER_CACHE.computeIfAbsent( + resultToProduce, + k -> { + for (MapperResultProducer resultProducer : RESULT_PRODUCERS) { + if (resultProducer.canProduce(k)) { + return resultProducer; + } + } + throw new IllegalArgumentException( + String.format( + "Found no registered %s that can produce %s", + MapperResultProducer.class.getSimpleName(), k)); + }); + } + private static NameConverter buildNameConverter(Class converterClass) { try { return converterClass.getDeclaredConstructor().newInstance(); @@ -155,4 +184,12 @@ private static NameConverter buildNameConverter(Class c e); } } + + private static List getResultProducers() { + ImmutableList.Builder result = ImmutableList.builder(); + ServiceLoader loader = + ServiceLoader.load(MapperResultProducerService.class); + loader.iterator().forEachRemaining(provider -> result.addAll(provider.getProducers())); + return result.build(); + } } From 5c370cbc47eb19d0d53762dd408317d9d9f13a62 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Thu, 4 Jun 2020 14:41:26 +0200 Subject: [PATCH 472/979] Mention Ec2MultiRegionAddressTranslator in reference.conf --- core/src/main/resources/reference.conf | 3 +++ 1 file changed, 3 insertions(+) diff --git a/core/src/main/resources/reference.conf b/core/src/main/resources/reference.conf index 34514dd1fbf..0e74e87b4a5 100644 --- a/core/src/main/resources/reference.conf +++ b/core/src/main/resources/reference.conf @@ -873,6 +873,9 @@ datastax-java-driver { # # The driver provides the following implementations out of the box: # - PassThroughAddressTranslator: returns all addresses unchanged + # - Ec2MultiRegionAddressTranslator: suitable for an Amazon multi-region EC2 deployment where + # clients are also deployed in EC2. It optimizes network costs by favoring private IPs over + # public ones whenever possible. # # You can also specify a custom class that implements AddressTranslator and has a public # constructor with a DriverContext argument. From bfbf82734c734e311cc9869ec351cbd3f127df4e Mon Sep 17 00:00:00 2001 From: olim7t Date: Wed, 27 May 2020 21:18:29 -0700 Subject: [PATCH 473/979] JAVA-2793: Add composite config loader --- changelog/README.md | 1 + .../api/core/config/DriverConfigLoader.java | 23 ++ .../core/config/DriverExecutionProfile.java | 131 +++++++++++- .../core/config/DerivedExecutionProfile.java | 196 +++++++++++++++++ .../composite/CompositeDriverConfig.java | 60 ++++++ .../CompositeDriverConfigLoader.java | 98 +++++++++ .../CompositeDriverExecutionProfile.java | 201 ++++++++++++++++++ .../map/MapBasedDriverExecutionProfile.java | 157 +------------- .../TypesafeDriverExecutionProfile.java | 3 +- .../CompositeDriverConfigReloadTest.java | 104 +++++++++ .../composite/CompositeDriverConfigTest.java | 116 ++++++++++ 11 files changed, 933 insertions(+), 157 deletions(-) create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/config/DerivedExecutionProfile.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/config/composite/CompositeDriverConfig.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/config/composite/CompositeDriverConfigLoader.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/config/composite/CompositeDriverExecutionProfile.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/config/composite/CompositeDriverConfigReloadTest.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/config/composite/CompositeDriverConfigTest.java diff --git a/changelog/README.md b/changelog/README.md index e432b087393..56688ae47d5 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.7.0 (in progress) +- [new feature] JAVA-2793: Add composite config loader - [new feature] JAVA-2792: Allow custom results in the mapper - [improvement] JAVA-2747: Revisit semantics of Statement.setExecutionProfile/Name diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/DriverConfigLoader.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/DriverConfigLoader.java index e08f17171a8..7b2c6f31562 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/DriverConfigLoader.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/DriverConfigLoader.java @@ -17,6 +17,7 @@ import com.datastax.oss.driver.api.core.context.DriverContext; import com.datastax.oss.driver.api.core.session.SessionBuilder; +import com.datastax.oss.driver.internal.core.config.composite.CompositeDriverConfigLoader; import com.datastax.oss.driver.internal.core.config.map.MapBasedDriverConfigLoader; import com.datastax.oss.driver.internal.core.config.typesafe.DefaultDriverConfigLoader; import com.datastax.oss.driver.internal.core.config.typesafe.DefaultProgrammaticDriverConfigLoaderBuilder; @@ -265,6 +266,28 @@ static DriverConfigLoader fromMap(@NonNull OptionsMap source) { return new MapBasedDriverConfigLoader(source, source.asRawMap()); } + /** + * Composes two existing config loaders to form a new one. + * + *

        When the driver reads an option, the "primary" config will be queried first. If the option + * is missing, then it will be looked up in the "fallback" config. + * + *

        All execution profiles will be surfaced in the new config. If a profile is defined both in + * the primary and the fallback config, its options will be merged using the same precedence rules + * as described above. + * + *

        The new config is reloadable if at least one of the input configs is. If you invoke {@link + * DriverConfigLoader#reload()} on the new loader, it will reload whatever is reloadable, or fail + * if nothing is. If the input loaders have periodic reloading built-in, each one will reload at + * its own pace, and the changes will be reflected in the new config. + */ + @NonNull + static DriverConfigLoader compose( + @NonNull DriverConfigLoader primaryConfigLoader, + @NonNull DriverConfigLoader fallbackConfigLoader) { + return new CompositeDriverConfigLoader(primaryConfigLoader, fallbackConfigLoader); + } + /** * Loads the first configuration that will be used to initialize the driver. * diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/DriverExecutionProfile.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/DriverExecutionProfile.java index 600b2709065..8d656d0f886 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/DriverExecutionProfile.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/DriverExecutionProfile.java @@ -15,6 +15,8 @@ */ package com.datastax.oss.driver.api.core.config; +import com.datastax.oss.driver.internal.core.config.DerivedExecutionProfile; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import java.time.Duration; @@ -182,12 +184,28 @@ default List getDurationList( /** * Returns a representation of all the child options under a given option. * - *

        This is only used to compare configuration sections across profiles, so the actual - * implementation does not matter, as long as identical sections (same options with same values, - * regardless of order) compare as equal and have the same {@code hashCode()}. + *

        This is used by the driver at initialization time, to compare profiles and determine if it + * must create per-profile policies. For example, if two profiles have the same options in the + * {@code basic.load-balancing-policy} section, they will share the same policy instance. But if + * their options differ, two separate instances will be created. + * + *

        The runtime return type does not matter, as long as identical sections (same options with + * same values, regardless of order) compare as equal and have the same {@code hashCode()}. The + * default implementation builds a map based on the entries from {@link #entrySet()}, it should be + * good for most cases. */ @NonNull - Object getComparisonKey(@NonNull DriverOption option); + default Object getComparisonKey(@NonNull DriverOption option) { + // This method is only used during driver initialization, performance is not crucial + String prefix = option.getPath(); + ImmutableMap.Builder childOptions = ImmutableMap.builder(); + for (Map.Entry entry : entrySet()) { + if (entry.getKey().startsWith(prefix)) { + childOptions.put(entry.getKey(), entry.getValue()); + } + } + return childOptions.build(); + } /** * Enumerates all the entries in this profile, including those that were inherited from another @@ -201,4 +219,109 @@ default List getDurationList( */ @NonNull SortedSet> entrySet(); + + @NonNull + @Override + default DriverExecutionProfile withBoolean(@NonNull DriverOption option, boolean value) { + return DerivedExecutionProfile.with(this, option, value); + } + + @NonNull + @Override + default DriverExecutionProfile withBooleanList( + @NonNull DriverOption option, @NonNull List value) { + return DerivedExecutionProfile.with(this, option, value); + } + + @NonNull + @Override + default DriverExecutionProfile withInt(@NonNull DriverOption option, int value) { + return DerivedExecutionProfile.with(this, option, value); + } + + @NonNull + @Override + default DriverExecutionProfile withIntList( + @NonNull DriverOption option, @NonNull List value) { + return DerivedExecutionProfile.with(this, option, value); + } + + @NonNull + @Override + default DriverExecutionProfile withLong(@NonNull DriverOption option, long value) { + return DerivedExecutionProfile.with(this, option, value); + } + + @NonNull + @Override + default DriverExecutionProfile withLongList( + @NonNull DriverOption option, @NonNull List value) { + return DerivedExecutionProfile.with(this, option, value); + } + + @NonNull + @Override + default DriverExecutionProfile withDouble(@NonNull DriverOption option, double value) { + return DerivedExecutionProfile.with(this, option, value); + } + + @NonNull + @Override + default DriverExecutionProfile withDoubleList( + @NonNull DriverOption option, @NonNull List value) { + return DerivedExecutionProfile.with(this, option, value); + } + + @NonNull + @Override + default DriverExecutionProfile withString(@NonNull DriverOption option, @NonNull String value) { + return DerivedExecutionProfile.with(this, option, value); + } + + @NonNull + @Override + default DriverExecutionProfile withStringList( + @NonNull DriverOption option, @NonNull List value) { + return DerivedExecutionProfile.with(this, option, value); + } + + @NonNull + @Override + default DriverExecutionProfile withStringMap( + @NonNull DriverOption option, @NonNull Map value) { + return DerivedExecutionProfile.with(this, option, value); + } + + @NonNull + @Override + default DriverExecutionProfile withBytes(@NonNull DriverOption option, long value) { + return DerivedExecutionProfile.with(this, option, value); + } + + @NonNull + @Override + default DriverExecutionProfile withBytesList( + @NonNull DriverOption option, @NonNull List value) { + return DerivedExecutionProfile.with(this, option, value); + } + + @NonNull + @Override + default DriverExecutionProfile withDuration( + @NonNull DriverOption option, @NonNull Duration value) { + return DerivedExecutionProfile.with(this, option, value); + } + + @NonNull + @Override + default DriverExecutionProfile withDurationList( + @NonNull DriverOption option, @NonNull List value) { + return DerivedExecutionProfile.with(this, option, value); + } + + @NonNull + @Override + default DriverExecutionProfile without(@NonNull DriverOption option) { + return DerivedExecutionProfile.without(this, option); + } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/config/DerivedExecutionProfile.java b/core/src/main/java/com/datastax/oss/driver/internal/core/config/DerivedExecutionProfile.java new file mode 100644 index 00000000000..c5af1aca472 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/config/DerivedExecutionProfile.java @@ -0,0 +1,196 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.config; + +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.config.DriverOption; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSortedSet; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.time.Duration; +import java.util.AbstractMap; +import java.util.List; +import java.util.Map; +import java.util.SortedSet; +import java.util.function.BiFunction; + +public class DerivedExecutionProfile implements DriverExecutionProfile { + + private static final Object NO_VALUE = new Object(); + + public static DerivedExecutionProfile with( + DriverExecutionProfile baseProfile, DriverOption option, Object value) { + if (baseProfile instanceof DerivedExecutionProfile) { + // Don't nest derived profiles, use same base and add to overrides + DerivedExecutionProfile previousDerived = (DerivedExecutionProfile) baseProfile; + ImmutableMap.Builder newOverrides = ImmutableMap.builder(); + for (Map.Entry override : previousDerived.overrides.entrySet()) { + if (!override.getKey().equals(option)) { + newOverrides.put(override.getKey(), override.getValue()); + } + } + newOverrides.put(option, value); + return new DerivedExecutionProfile(previousDerived.baseProfile, newOverrides.build()); + } else { + return new DerivedExecutionProfile(baseProfile, ImmutableMap.of(option, value)); + } + } + + public static DerivedExecutionProfile without( + DriverExecutionProfile baseProfile, DriverOption option) { + return with(baseProfile, option, NO_VALUE); + } + + private final DriverExecutionProfile baseProfile; + private final Map overrides; + + public DerivedExecutionProfile( + DriverExecutionProfile baseProfile, Map overrides) { + this.baseProfile = baseProfile; + this.overrides = overrides; + } + + @NonNull + @Override + public String getName() { + return baseProfile.getName(); + } + + @Override + public boolean isDefined(@NonNull DriverOption option) { + if (overrides.containsKey(option)) { + return overrides.get(option) != NO_VALUE; + } else { + return baseProfile.isDefined(option); + } + } + + @Override + public boolean getBoolean(@NonNull DriverOption option) { + return get(option, DriverExecutionProfile::getBoolean); + } + + @NonNull + @Override + public List getBooleanList(@NonNull DriverOption option) { + return get(option, DriverExecutionProfile::getBooleanList); + } + + @Override + public int getInt(@NonNull DriverOption option) { + return get(option, DriverExecutionProfile::getInt); + } + + @NonNull + @Override + public List getIntList(@NonNull DriverOption option) { + return get(option, DriverExecutionProfile::getIntList); + } + + @Override + public long getLong(@NonNull DriverOption option) { + return get(option, DriverExecutionProfile::getLong); + } + + @NonNull + @Override + public List getLongList(@NonNull DriverOption option) { + return get(option, DriverExecutionProfile::getLongList); + } + + @Override + public double getDouble(@NonNull DriverOption option) { + return get(option, DriverExecutionProfile::getDouble); + } + + @NonNull + @Override + public List getDoubleList(@NonNull DriverOption option) { + return get(option, DriverExecutionProfile::getDoubleList); + } + + @NonNull + @Override + public String getString(@NonNull DriverOption option) { + return get(option, DriverExecutionProfile::getString); + } + + @NonNull + @Override + public List getStringList(@NonNull DriverOption option) { + return get(option, DriverExecutionProfile::getStringList); + } + + @NonNull + @Override + public Map getStringMap(@NonNull DriverOption option) { + return get(option, DriverExecutionProfile::getStringMap); + } + + @Override + public long getBytes(@NonNull DriverOption option) { + return get(option, DriverExecutionProfile::getBytes); + } + + @NonNull + @Override + public List getBytesList(DriverOption option) { + return get(option, DriverExecutionProfile::getBytesList); + } + + @NonNull + @Override + public Duration getDuration(@NonNull DriverOption option) { + return get(option, DriverExecutionProfile::getDuration); + } + + @NonNull + @Override + public List getDurationList(@NonNull DriverOption option) { + return get(option, DriverExecutionProfile::getDurationList); + } + + @NonNull + @SuppressWarnings("unchecked") + private ValueT get( + @NonNull DriverOption option, + BiFunction getter) { + Object value = overrides.get(option); + if (value == null) { + value = getter.apply(baseProfile, option); + } + if (value == null || value == NO_VALUE) { + throw new IllegalArgumentException("Missing configuration option " + option.getPath()); + } + return (ValueT) value; + } + + @NonNull + @Override + public SortedSet> entrySet() { + ImmutableSortedSet.Builder> builder = + ImmutableSortedSet.orderedBy(Map.Entry.comparingByKey()); + // builder.add() has no effect if the element already exists, so process the overrides first + // since they have higher precedence + for (Map.Entry entry : overrides.entrySet()) { + if (entry.getValue() != NO_VALUE) { + builder.add(new AbstractMap.SimpleEntry<>(entry.getKey().getPath(), entry.getValue())); + } + } + builder.addAll(baseProfile.entrySet()); + return builder.build(); + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/config/composite/CompositeDriverConfig.java b/core/src/main/java/com/datastax/oss/driver/internal/core/config/composite/CompositeDriverConfig.java new file mode 100644 index 00000000000..da4dbe645e3 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/config/composite/CompositeDriverConfig.java @@ -0,0 +1,60 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.config.composite; + +import com.datastax.oss.driver.api.core.config.DriverConfig; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.shaded.guava.common.collect.Sets; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Collections; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.ConcurrentHashMap; + +public class CompositeDriverConfig implements DriverConfig { + + private final DriverConfig primaryConfig; + private final DriverConfig fallbackConfig; + private final Map profiles = new ConcurrentHashMap<>(); + + public CompositeDriverConfig( + @NonNull DriverConfig primaryConfig, @NonNull DriverConfig fallbackConfig) { + this.primaryConfig = Objects.requireNonNull(primaryConfig); + this.fallbackConfig = Objects.requireNonNull(fallbackConfig); + } + + @NonNull + @Override + public DriverExecutionProfile getProfile(@NonNull String profileName) { + return profiles.compute( + profileName, + (k, v) -> + (v == null) + ? new CompositeDriverExecutionProfile(primaryConfig, fallbackConfig, profileName) + : v.refresh()); + } + + @NonNull + @Override + public Map getProfiles() { + // The map is updated lazily, if we want all the profiles we need to fetch them explicitly + for (String name : + Sets.union(primaryConfig.getProfiles().keySet(), fallbackConfig.getProfiles().keySet())) { + getProfile(name); + } + return Collections.unmodifiableMap(profiles); + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/config/composite/CompositeDriverConfigLoader.java b/core/src/main/java/com/datastax/oss/driver/internal/core/config/composite/CompositeDriverConfigLoader.java new file mode 100644 index 00000000000..1f20683b9bb --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/config/composite/CompositeDriverConfigLoader.java @@ -0,0 +1,98 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.config.composite; + +import com.datastax.oss.driver.api.core.config.DriverConfig; +import com.datastax.oss.driver.api.core.config.DriverConfigLoader; +import com.datastax.oss.driver.api.core.context.DriverContext; +import com.datastax.oss.driver.internal.core.util.concurrent.CompletableFutures; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Objects; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionStage; + +public class CompositeDriverConfigLoader implements DriverConfigLoader { + + private final DriverConfigLoader primaryConfigLoader; + private final DriverConfigLoader fallbackConfigLoader; + + public CompositeDriverConfigLoader( + @NonNull DriverConfigLoader primaryConfigLoader, + @NonNull DriverConfigLoader fallbackConfigLoader) { + this.primaryConfigLoader = Objects.requireNonNull(primaryConfigLoader); + this.fallbackConfigLoader = Objects.requireNonNull(fallbackConfigLoader); + } + + @NonNull + @Override + public DriverConfig getInitialConfig() { + DriverConfig primaryConfig = primaryConfigLoader.getInitialConfig(); + DriverConfig fallbackConfig = fallbackConfigLoader.getInitialConfig(); + return new CompositeDriverConfig(primaryConfig, fallbackConfig); + } + + @Override + public void onDriverInit(@NonNull DriverContext context) { + fallbackConfigLoader.onDriverInit(context); + primaryConfigLoader.onDriverInit(context); + } + + @NonNull + @Override + public CompletionStage reload() { + if (!primaryConfigLoader.supportsReloading() && !fallbackConfigLoader.supportsReloading()) { + return CompletableFutures.failedFuture( + new UnsupportedOperationException( + "Reloading is not supported (this is a composite config, " + + "and neither the primary nor the fallback are reloadable)")); + } else if (!primaryConfigLoader.supportsReloading()) { + return fallbackConfigLoader.reload(); + } else if (!fallbackConfigLoader.supportsReloading()) { + return primaryConfigLoader.reload(); + } else { + CompletionStage primaryFuture = primaryConfigLoader.reload(); + CompletionStage fallbackFuture = fallbackConfigLoader.reload(); + CompletableFuture compositeFuture = new CompletableFuture<>(); + primaryFuture.whenComplete( + (primaryChanged, primaryError) -> + fallbackFuture.whenComplete( + (fallbackChanged, fallbackError) -> { + if (primaryError == null && fallbackError == null) { + compositeFuture.complete(primaryChanged || fallbackChanged); + } else if (fallbackError == null) { + compositeFuture.completeExceptionally(primaryError); + } else if (primaryError == null) { + compositeFuture.completeExceptionally(fallbackError); + } else { + primaryError.addSuppressed(fallbackError); + compositeFuture.completeExceptionally(primaryError); + } + })); + return compositeFuture; + } + } + + @Override + public boolean supportsReloading() { + return primaryConfigLoader.supportsReloading() || fallbackConfigLoader.supportsReloading(); + } + + @Override + public void close() { + primaryConfigLoader.close(); + fallbackConfigLoader.close(); + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/config/composite/CompositeDriverExecutionProfile.java b/core/src/main/java/com/datastax/oss/driver/internal/core/config/composite/CompositeDriverExecutionProfile.java new file mode 100644 index 00000000000..5d7df7b417f --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/config/composite/CompositeDriverExecutionProfile.java @@ -0,0 +1,201 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.config.composite; + +import com.datastax.oss.driver.api.core.config.DriverConfig; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.config.DriverOption; +import com.datastax.oss.driver.shaded.guava.common.base.Preconditions; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSortedSet; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.time.Duration; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.SortedSet; +import java.util.TreeSet; +import java.util.function.BiFunction; + +public class CompositeDriverExecutionProfile implements DriverExecutionProfile { + + private final DriverConfig primaryConfig; + private final DriverConfig fallbackConfig; + private final String profileName; + + private volatile DriverExecutionProfile primaryProfile; + private volatile DriverExecutionProfile fallbackProfile; + + public CompositeDriverExecutionProfile( + @NonNull DriverConfig primaryConfig, + @NonNull DriverConfig fallbackConfig, + @NonNull String profileName) { + this.primaryConfig = Objects.requireNonNull(primaryConfig); + this.fallbackConfig = Objects.requireNonNull(fallbackConfig); + this.profileName = Objects.requireNonNull(profileName); + refreshInternal(); + } + + /** + * Fetches the underlying profiles again from the two backing configs. This is because some config + * implementations support adding/removing profiles at runtime. + * + *

        For efficiency reasons this is only done when the user fetches the profile again from the + * main config, not every time an option is fetched from the profile. + */ + public CompositeDriverExecutionProfile refresh() { + return refreshInternal(); + } + + // This method only exists to avoid calling its public, overridable variant from the constructor + private CompositeDriverExecutionProfile refreshInternal() { + // There's no `hasProfile()` in the public API because it didn't make sense until now. So + // unfortunately we have to catch the exception. + try { + primaryProfile = primaryConfig.getProfile(profileName); + } catch (IllegalArgumentException e) { + primaryProfile = null; + } + try { + fallbackProfile = fallbackConfig.getProfile(profileName); + } catch (IllegalArgumentException e) { + fallbackProfile = null; + } + + Preconditions.checkArgument( + primaryProfile != null || fallbackProfile != null, + "Unknown profile '%s'. Check your configuration.", + profileName); + return this; + } + + @NonNull + @Override + public String getName() { + return profileName; + } + + @Override + public boolean isDefined(@NonNull DriverOption option) { + return (primaryProfile != null && primaryProfile.isDefined(option)) + || (fallbackProfile != null && fallbackProfile.isDefined(option)); + } + + @Override + public boolean getBoolean(@NonNull DriverOption option) { + return get(option, DriverExecutionProfile::getBoolean); + } + + @NonNull + @Override + public List getBooleanList(@NonNull DriverOption option) { + return get(option, DriverExecutionProfile::getBooleanList); + } + + @Override + public int getInt(@NonNull DriverOption option) { + return get(option, DriverExecutionProfile::getInt); + } + + @NonNull + @Override + public List getIntList(@NonNull DriverOption option) { + return get(option, DriverExecutionProfile::getIntList); + } + + @Override + public long getLong(@NonNull DriverOption option) { + return get(option, DriverExecutionProfile::getLong); + } + + @NonNull + @Override + public List getLongList(@NonNull DriverOption option) { + return get(option, DriverExecutionProfile::getLongList); + } + + @Override + public double getDouble(@NonNull DriverOption option) { + return get(option, DriverExecutionProfile::getDouble); + } + + @NonNull + @Override + public List getDoubleList(@NonNull DriverOption option) { + return get(option, DriverExecutionProfile::getDoubleList); + } + + @NonNull + @Override + public String getString(@NonNull DriverOption option) { + return get(option, DriverExecutionProfile::getString); + } + + @NonNull + @Override + public List getStringList(@NonNull DriverOption option) { + return get(option, DriverExecutionProfile::getStringList); + } + + @NonNull + @Override + public Map getStringMap(@NonNull DriverOption option) { + return get(option, DriverExecutionProfile::getStringMap); + } + + @Override + public long getBytes(@NonNull DriverOption option) { + return get(option, DriverExecutionProfile::getBytes); + } + + @NonNull + @Override + public List getBytesList(DriverOption option) { + return get(option, DriverExecutionProfile::getBytesList); + } + + @NonNull + @Override + public Duration getDuration(@NonNull DriverOption option) { + return get(option, DriverExecutionProfile::getDuration); + } + + @NonNull + @Override + public List getDurationList(@NonNull DriverOption option) { + return get(option, DriverExecutionProfile::getDurationList); + } + + private ValueT get( + @NonNull DriverOption option, + BiFunction getter) { + if (primaryProfile != null && primaryProfile.isDefined(option)) { + return getter.apply(primaryProfile, option); + } else if (fallbackProfile != null && fallbackProfile.isDefined(option)) { + return getter.apply(fallbackProfile, option); + } else { + throw new IllegalArgumentException("Unknown option: " + option); + } + } + + @NonNull + @Override + public SortedSet> entrySet() { + SortedSet> result = new TreeSet<>(Map.Entry.comparingByKey()); + result.addAll(fallbackProfile.entrySet()); + result.addAll(primaryProfile.entrySet()); + return ImmutableSortedSet.copyOf(result); + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/config/map/MapBasedDriverExecutionProfile.java b/core/src/main/java/com/datastax/oss/driver/internal/core/config/map/MapBasedDriverExecutionProfile.java index b295d8d3760..339d27a6687 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/config/map/MapBasedDriverExecutionProfile.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/config/map/MapBasedDriverExecutionProfile.java @@ -19,7 +19,6 @@ import com.datastax.oss.driver.api.core.config.DriverOption; import com.datastax.oss.driver.shaded.guava.common.base.Preconditions; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; -import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSortedSet; import edu.umd.cs.findbugs.annotations.NonNull; import java.time.Duration; @@ -32,12 +31,7 @@ /** @see MapBasedDriverConfigLoader */ public class MapBasedDriverExecutionProfile implements DriverExecutionProfile { - private static final Object NO_VALUE = new Object(); - private final String profileName; - // Anything that was overridden in a derived profile with `withXxx` methods. Empty for non-derived - // profiles - private final Map overrides; // The backing map for the current profile private final Map profile; // The backing map for the default profile (if the current one is not the default) @@ -47,7 +41,6 @@ public MapBasedDriverExecutionProfile( Map> optionsMap, String profileName) { this( profileName, - Collections.emptyMap(), optionsMap.get(profileName), profileName.equals(DriverExecutionProfile.DEFAULT_NAME) ? Collections.emptyMap() @@ -60,11 +53,9 @@ public MapBasedDriverExecutionProfile( public MapBasedDriverExecutionProfile( String profileName, - Map overrides, Map profile, Map defaultProfile) { this.profileName = profileName; - this.overrides = overrides; this.profile = profile; this.defaultProfile = defaultProfile; } @@ -77,11 +68,7 @@ public String getName() { @Override public boolean isDefined(@NonNull DriverOption option) { - if (overrides.containsKey(option)) { - return overrides.get(option) != NO_VALUE; - } else { - return profile.containsKey(option) || defaultProfile.containsKey(option); - } + return profile.containsKey(option) || defaultProfile.containsKey(option); } // Driver options don't encode the type, everything relies on the user putting the right types in @@ -89,9 +76,8 @@ public boolean isDefined(@NonNull DriverOption option) { @SuppressWarnings({"unchecked", "TypeParameterUnusedInFormals"}) @NonNull private T get(@NonNull DriverOption option) { - Object value = - overrides.getOrDefault(option, profile.getOrDefault(option, defaultProfile.get(option))); - if (value == null || value == NO_VALUE) { + Object value = profile.getOrDefault(option, defaultProfile.get(option)); + if (value == null) { throw new IllegalArgumentException("Missing configuration option " + option.getPath()); } return (T) value; @@ -182,20 +168,6 @@ public List getDurationList(@NonNull DriverOption option) { return get(option); } - @NonNull - @Override - public Object getComparisonKey(@NonNull DriverOption option) { - // This method is only used during driver initialization, performance is not crucial - String prefix = option.getPath(); - ImmutableMap.Builder childOptions = ImmutableMap.builder(); - for (Map.Entry entry : entrySet()) { - if (entry.getKey().startsWith(prefix)) { - childOptions.put(entry.getKey(), entry.getValue()); - } - } - return childOptions.build(); - } - @NonNull @Override public SortedSet> entrySet() { @@ -203,130 +175,11 @@ public SortedSet> entrySet() { ImmutableSortedSet.orderedBy(Map.Entry.comparingByKey()); for (Map backingMap : // builder.add() ignores duplicates, so process higher precedence backing maps first - ImmutableList.of(overrides, profile, defaultProfile)) { + ImmutableList.of(profile, defaultProfile)) { for (Map.Entry entry : backingMap.entrySet()) { - if (entry.getValue() != NO_VALUE) { - builder.add(new AbstractMap.SimpleEntry<>(entry.getKey().getPath(), entry.getValue())); - } + builder.add(new AbstractMap.SimpleEntry<>(entry.getKey().getPath(), entry.getValue())); } } return builder.build(); } - - private DriverExecutionProfile with(@NonNull DriverOption option, Object value) { - ImmutableMap.Builder newOverrides = ImmutableMap.builder(); - for (Map.Entry override : overrides.entrySet()) { - if (!override.getKey().equals(option)) { - newOverrides.put(override.getKey(), override.getValue()); - } - } - newOverrides.put(option, value); - return new MapBasedDriverExecutionProfile( - profileName, newOverrides.build(), profile, defaultProfile); - } - - @NonNull - @Override - public DriverExecutionProfile withBoolean(@NonNull DriverOption option, boolean value) { - return with(option, value); - } - - @NonNull - @Override - public DriverExecutionProfile withBooleanList( - @NonNull DriverOption option, @NonNull List value) { - return with(option, value); - } - - @NonNull - @Override - public DriverExecutionProfile withInt(@NonNull DriverOption option, int value) { - return with(option, value); - } - - @NonNull - @Override - public DriverExecutionProfile withIntList( - @NonNull DriverOption option, @NonNull List value) { - return with(option, value); - } - - @NonNull - @Override - public DriverExecutionProfile withLong(@NonNull DriverOption option, long value) { - return with(option, value); - } - - @NonNull - @Override - public DriverExecutionProfile withLongList( - @NonNull DriverOption option, @NonNull List value) { - return with(option, value); - } - - @NonNull - @Override - public DriverExecutionProfile withDouble(@NonNull DriverOption option, double value) { - return with(option, value); - } - - @NonNull - @Override - public DriverExecutionProfile withDoubleList( - @NonNull DriverOption option, @NonNull List value) { - return with(option, value); - } - - @NonNull - @Override - public DriverExecutionProfile withString(@NonNull DriverOption option, @NonNull String value) { - return with(option, value); - } - - @NonNull - @Override - public DriverExecutionProfile withStringList( - @NonNull DriverOption option, @NonNull List value) { - return with(option, value); - } - - @NonNull - @Override - public DriverExecutionProfile withStringMap( - @NonNull DriverOption option, @NonNull Map value) { - return with(option, value); - } - - @NonNull - @Override - public DriverExecutionProfile withBytes(@NonNull DriverOption option, long value) { - return with(option, value); - } - - @NonNull - @Override - public DriverExecutionProfile withBytesList( - @NonNull DriverOption option, @NonNull List value) { - return with(option, value); - } - - @NonNull - @Override - public DriverExecutionProfile withDuration( - @NonNull DriverOption option, @NonNull Duration value) { - return with(option, value); - } - - @NonNull - @Override - public DriverExecutionProfile withDurationList( - @NonNull DriverOption option, @NonNull List value) { - return with(option, value); - } - - @NonNull - @Override - public DriverExecutionProfile without(@NonNull DriverOption option) { - return with(option, NO_VALUE); - } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/config/typesafe/TypesafeDriverExecutionProfile.java b/core/src/main/java/com/datastax/oss/driver/internal/core/config/typesafe/TypesafeDriverExecutionProfile.java index 31275a4acce..63fe6de2bd8 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/config/typesafe/TypesafeDriverExecutionProfile.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/config/typesafe/TypesafeDriverExecutionProfile.java @@ -64,6 +64,7 @@ public boolean getBoolean(@NonNull DriverOption option) { return getCached(option.getPath(), getEffectiveOptions()::getBoolean); } + // We override `with*` methods because they can be implemented a bit better with Typesafe config @NonNull @Override public DriverExecutionProfile withBoolean(@NonNull DriverOption option, boolean value) { @@ -269,7 +270,7 @@ public DriverExecutionProfile without(@NonNull DriverOption option) { @NonNull @Override public Object getComparisonKey(@NonNull DriverOption option) { - // No need to cache this, it's only used for policy initialization + // This method has a default implementation in the interface, but here we can do it in one line: return getEffectiveOptions().getConfig(option.getPath()); } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/config/composite/CompositeDriverConfigReloadTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/config/composite/CompositeDriverConfigReloadTest.java new file mode 100644 index 00000000000..761ecf9cc60 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/config/composite/CompositeDriverConfigReloadTest.java @@ -0,0 +1,104 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.config.composite; + +import static com.datastax.oss.driver.Assertions.assertThat; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import com.datastax.oss.driver.api.core.config.DriverConfigLoader; +import com.datastax.oss.driver.internal.core.util.concurrent.CompletableFutures; +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionStage; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; + +@RunWith(DataProviderRunner.class) +public class CompositeDriverConfigReloadTest { + + @Mock private DriverConfigLoader primaryLoader; + @Mock private DriverConfigLoader fallbackLoader; + private DriverConfigLoader compositeLoader; + + @Before + public void setup() { + MockitoAnnotations.initMocks(this); + compositeLoader = DriverConfigLoader.compose(primaryLoader, fallbackLoader); + } + + @Test + @UseDataProvider("reloadabilities") + public void should_be_reloadable_if_either_child_is( + boolean primaryIsReloadable, + boolean fallbackIsReloadable, + boolean compositeShouldBeReloadable) { + when(primaryLoader.supportsReloading()).thenReturn(primaryIsReloadable); + when(fallbackLoader.supportsReloading()).thenReturn(fallbackIsReloadable); + assertThat(compositeLoader.supportsReloading()).isEqualTo(compositeShouldBeReloadable); + } + + @Test + @UseDataProvider("reloadabilities") + public void should_delegate_reloading_to_reloadable_children( + boolean primaryIsReloadable, + boolean fallbackIsReloadable, + boolean compositeShouldBeReloadable) { + when(primaryLoader.supportsReloading()).thenReturn(primaryIsReloadable); + when(primaryLoader.reload()) + .thenReturn( + primaryIsReloadable + ? CompletableFuture.completedFuture(true) + : CompletableFutures.failedFuture(new UnsupportedOperationException())); + + when(fallbackLoader.supportsReloading()).thenReturn(fallbackIsReloadable); + when(fallbackLoader.reload()) + .thenReturn( + fallbackIsReloadable + ? CompletableFuture.completedFuture(true) + : CompletableFutures.failedFuture(new UnsupportedOperationException())); + + CompletionStage reloadFuture = compositeLoader.reload(); + + if (compositeShouldBeReloadable) { + assertThat(reloadFuture).isCompletedWithValue(true); + } else { + assertThat(reloadFuture) + .hasFailedWithThrowableThat() + .isInstanceOf(UnsupportedOperationException.class); + } + verify(primaryLoader, primaryIsReloadable ? times(1) : never()).reload(); + verify(fallbackLoader, fallbackIsReloadable ? times(1) : never()).reload(); + } + + @DataProvider + public static Object[][] reloadabilities() { + return new Object[][] { + // primaryIsReloadable, fallbackIsReloadable, compositeShouldBeReloadable + new Object[] {true, true, true}, + new Object[] {true, false, true}, + new Object[] {false, true, true}, + new Object[] {false, false, false}, + }; + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/config/composite/CompositeDriverConfigTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/config/composite/CompositeDriverConfigTest.java new file mode 100644 index 00000000000..1c6121b1121 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/config/composite/CompositeDriverConfigTest.java @@ -0,0 +1,116 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.config.composite; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverConfig; +import com.datastax.oss.driver.api.core.config.DriverConfigLoader; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.config.OptionsMap; +import com.datastax.oss.driver.api.core.config.TypedDriverOption; +import org.junit.Before; +import org.junit.Test; + +public class CompositeDriverConfigTest { + + private OptionsMap primaryMap; + private OptionsMap fallbackMap; + private DriverConfig compositeConfig; + private DriverExecutionProfile compositeDefaultProfile; + + @Before + public void setup() { + primaryMap = new OptionsMap(); + // We need at least one option so that the default profile exists. Do it now to avoid having to + // do it in every test. We use an option that we won't reuse in the tests so that there are no + // unwanted interactions. + primaryMap.put(TypedDriverOption.CONTINUOUS_PAGING_MAX_PAGES, 1); + + fallbackMap = new OptionsMap(); + fallbackMap.put(TypedDriverOption.CONTINUOUS_PAGING_MAX_PAGES, 1); + + DriverConfigLoader compositeLoader = + DriverConfigLoader.compose( + DriverConfigLoader.fromMap(primaryMap), DriverConfigLoader.fromMap(fallbackMap)); + compositeConfig = compositeLoader.getInitialConfig(); + compositeDefaultProfile = compositeConfig.getDefaultProfile(); + } + + @Test + public void should_use_value_from_primary_config() { + primaryMap.put(TypedDriverOption.CONNECTION_POOL_LOCAL_SIZE, 1); + + assertThat(compositeDefaultProfile.isDefined(DefaultDriverOption.CONNECTION_POOL_LOCAL_SIZE)) + .isTrue(); + assertThat(compositeDefaultProfile.getInt(DefaultDriverOption.CONNECTION_POOL_LOCAL_SIZE)) + .isEqualTo(1); + } + + @Test + public void should_ignore_value_from_fallback_config_if_defined_in_both() { + primaryMap.put(TypedDriverOption.CONNECTION_POOL_LOCAL_SIZE, 1); + fallbackMap.put(TypedDriverOption.CONNECTION_POOL_LOCAL_SIZE, 2); + + assertThat(compositeDefaultProfile.isDefined(DefaultDriverOption.CONNECTION_POOL_LOCAL_SIZE)) + .isTrue(); + assertThat(compositeDefaultProfile.getInt(DefaultDriverOption.CONNECTION_POOL_LOCAL_SIZE)) + .isEqualTo(1); + } + + @Test + public void should_use_value_from_fallback_config_if_not_defined_in_primary() { + fallbackMap.put(TypedDriverOption.CONNECTION_POOL_LOCAL_SIZE, 1); + + assertThat(compositeDefaultProfile.isDefined(DefaultDriverOption.CONNECTION_POOL_LOCAL_SIZE)) + .isTrue(); + assertThat(compositeDefaultProfile.getInt(DefaultDriverOption.CONNECTION_POOL_LOCAL_SIZE)) + .isEqualTo(1); + } + + @Test + public void should_merge_profiles() { + primaryMap.put("onlyInPrimary", TypedDriverOption.CONNECTION_POOL_LOCAL_SIZE, 1); + primaryMap.put("inBoth", TypedDriverOption.CONNECTION_POOL_LOCAL_SIZE, 2); + fallbackMap.put("inBoth", TypedDriverOption.CONNECTION_POOL_LOCAL_SIZE, 3); + fallbackMap.put("onlyInFallback", TypedDriverOption.CONNECTION_POOL_LOCAL_SIZE, 4); + + assertThat(compositeConfig.getProfiles()) + .containsKeys( + DriverExecutionProfile.DEFAULT_NAME, + "onlyInPrimary", + "inBoth", + "inBoth", + "onlyInFallback"); + + assertThat( + compositeConfig + .getProfile("onlyInPrimary") + .getInt(DefaultDriverOption.CONNECTION_POOL_LOCAL_SIZE)) + .isEqualTo(1); + assertThat( + compositeConfig + .getProfile("inBoth") + .getInt(DefaultDriverOption.CONNECTION_POOL_LOCAL_SIZE)) + .isEqualTo(2); + assertThat( + compositeConfig + .getProfile("onlyInFallback") + .getInt(DefaultDriverOption.CONNECTION_POOL_LOCAL_SIZE)) + .isEqualTo(4); + } +} From 3adec4f236bd03ea22423b6f97c479fce752e8ed Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 5 Jun 2020 11:18:44 +0200 Subject: [PATCH 474/979] Upgrade shaded guava to 25.1-jre-graal-sub-1 --- bom/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bom/pom.xml b/bom/pom.xml index 592197b1948..298435e9b20 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -66,7 +66,7 @@ com.datastax.oss java-driver-shaded-guava - 25.1-jre + 25.1-jre-graal-sub-1 From 2997d775a091cfee614e49d20c98f5b29518d7c0 Mon Sep 17 00:00:00 2001 From: olim7t Date: Fri, 5 Jun 2020 10:38:20 -0700 Subject: [PATCH 475/979] Stop publishing integration-tests module This was intended for the DSE driver when it was separate. Now that the drivers are unified, it's not needed anymore so don't waste space on Central. --- integration-tests/pom.xml | 39 ++++++++++++--------------------------- 1 file changed, 12 insertions(+), 27 deletions(-) diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index e7a130640aa..045a45ebbee 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -190,29 +190,6 @@ - - maven-jar-plugin - - - - com.datastax.oss.driver.tests.integration - - - - - - test-jar - - test-jar - - - - logback-test.xml - - - - - org.apache.maven.plugins maven-surefire-plugin @@ -332,10 +309,18 @@ true - + + maven-install-plugin + + true + + + + maven-deploy-plugin + + true + + From 150c5e03629922bd64d3e0589bb4a10f69da2e88 Mon Sep 17 00:00:00 2001 From: olim7t Date: Fri, 5 Jun 2020 16:10:22 -0700 Subject: [PATCH 476/979] JAVA-2675: Fix javadoc link in root readme --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 0fb47bb6848..edc97ae740d 100644 --- a/README.md +++ b/README.md @@ -77,7 +77,7 @@ See the [upgrade guide](upgrade_guide/) for details. * [Changelog] * [FAQ] -[API docs]: https://docs.datastax.com/en/drivers/java/4.3 +[API docs]: https://docs.datastax.com/en/drivers/java/4.6 [JIRA]: https://datastax-oss.atlassian.net/browse/JAVA [Mailing list]: https://groups.google.com/a/lists.datastax.com/forum/#!forum/java-driver-user [@dsJavaDriver]: https://twitter.com/dsJavaDriver From e7503a8340fecb5ca1434f5095330bbbdbe31906 Mon Sep 17 00:00:00 2001 From: Ryan Quey Date: Sun, 7 Jun 2020 20:07:15 +0900 Subject: [PATCH 477/979] Fix typo in manual regarding named vs positional setters (#1450) --- manual/core/statements/prepared/README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/manual/core/statements/prepared/README.md b/manual/core/statements/prepared/README.md index e8a5ac1ed60..b15a60d4198 100644 --- a/manual/core/statements/prepared/README.md +++ b/manual/core/statements/prepared/README.md @@ -208,10 +208,10 @@ BoundStatement bound = ps1.bind() .setString("sku", "324378") .setString("description", "LCD screen"); -// Positional: +// Named: bound = bound.unset("description"); -// Named: +// Positional: bound = bound.unset(1); ``` From 529c4eb54308d26c8763d3376b25fc56d6b8a6f5 Mon Sep 17 00:00:00 2001 From: olim7t Date: Fri, 5 Jun 2020 15:44:32 -0700 Subject: [PATCH 478/979] JAVA-2613: Improve connection pooling documentation --- changelog/README.md | 1 + core/src/main/resources/reference.conf | 13 ++++++++++- manual/core/pooling/README.md | 30 ++++++++++++++++++-------- 3 files changed, 34 insertions(+), 10 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 56688ae47d5..f68875c0515 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.7.0 (in progress) +- [documentation] JAVA-2613: Improve connection pooling documentation - [new feature] JAVA-2793: Add composite config loader - [new feature] JAVA-2792: Allow custom results in the mapper - [improvement] JAVA-2747: Revisit semantics of Statement.setExecutionProfile/Name diff --git a/core/src/main/resources/reference.conf b/core/src/main/resources/reference.conf index 0e74e87b4a5..5f6fd9afa0c 100644 --- a/core/src/main/resources/reference.conf +++ b/core/src/main/resources/reference.conf @@ -395,6 +395,11 @@ datastax-java-driver { local { # The number of connections in the pool. # + # Each connection can handle many concurrent requests, so 1 is generally a good place to + # start. You should only need higher values in very high performance scenarios, where + # connections might start maxing out their I/O thread (see the driver's online manual for + # more tuning instructions). + # # Required: yes # Modifiable at runtime: yes; when the change is detected, all active pools will be notified # and will adjust their size. @@ -407,7 +412,13 @@ datastax-java-driver { } # The maximum number of requests that can be executed concurrently on a connection. This must be - # between 1 and 32768. + # strictly positive, and less than 32768. + # + # We recommend against changing this value: the default of 1024 is fine for most situations, + # it's a good balance between sufficient concurrency on the client and reasonable pressure on + # the server. If you're looking for a way to limit the global throughput of the session, this is + # not the right way to do it: use a request throttler instead (see the `advanced.throttler` + # section in this configuration). # # Required: yes # Modifiable at runtime: yes, the new value will be used for connections created after the diff --git a/manual/core/pooling/README.md b/manual/core/pooling/README.md index d1bdba33f1c..f0a0cdb5ca0 100644 --- a/manual/core/pooling/README.md +++ b/manual/core/pooling/README.md @@ -59,6 +59,9 @@ datastax-java-driver.advanced.connection { } ``` +Do not change those values unless informed by concrete performance measurements; see the +[Tuning](#tuning) section at the end of this page. + Unlike previous versions of the driver, pools do not resize dynamically. However you can adjust the options at runtime, the driver will detect and apply the changes. @@ -69,8 +72,9 @@ If connections stay idle for too long, they might be dropped by intermediate net keepalive settings might be impractical in some environments. The driver provides application-side keepalive in the form of a connection heartbeat: when a -connection does receive incoming reads for a given amount of time, the driver will simulate activity -by writing a dummy request to it. If that request fails, the connection is trashed and replaced. +connection does not receive incoming reads for a given amount of time, the driver will simulate +activity by writing a dummy request to it. If that request fails, the connection is trashed and +replaced. This feature is enabled by default. Here are the default values in the configuration: @@ -130,20 +134,28 @@ In particular, it's a good idea to keep an eye on those two metrics: connections from opening (either configuration or network issues, or a server-side limitation -- see [CASSANDRA-8086]); * `pool.available-streams`: if this is often close to 0, it's a sign that the pool is getting - saturated. Maybe `max-requests-per-connection` is too low, or more connections should be added. + saturated. Consider adding more connections per node. ### Tuning The driver defaults should be good for most scenarios. +#### Number of requests per connection + In our experience, raising `max-requests-per-connection` above 1024 does not bring any significant improvement: the server is only going to service so many requests at a time anyway, so additional requests are just going to pile up. -Similarly, 1 connection per node is generally sufficient. However, it might become a bottleneck in -very high performance scenarios: all I/O for a connection happens on the same thread, so it's -possible for that thread to max out its CPU core. In our benchmarks, this happened with a -single-node cluster and a high throughput (approximately 80K requests / second / connection). +Lowering the value is not a good idea either. If your goal is to limit the global throughput of the +driver, a [throttler](../throttling) is a better solution. + +#### Number of connections per node + +1 connection per node (`pool.local.size` or `pool.remote.size`) is generally sufficient. However, it +might become a bottleneck in very high performance scenarios: all I/O for a connection happens on +the same thread, so it's possible for that thread to max out its CPU core. In our benchmarks, this +happened with a single-node cluster and a high throughput (approximately 80K requests / second / +connection). It's unlikely that you'll run into this issue: in most real-world deployments, the driver connects to more than one node, so the load will spread across more I/O threads. However if you suspect that @@ -152,11 +164,11 @@ you experience the issue, here's what to look out for: * the driver throughput plateaus but the process does not appear to max out any system resource (in particular, overall CPU usage is well below 100%); * one of the driver's I/O threads maxes out its CPU core. You can see that with a profiler, or - OS-level tools like `pidstat -tu` on Linux. With the default configuration, I/O threads are called + OS-level tools like `pidstat -tu` on Linux. By default, I/O threads are named `-io-`. Try adding more connections per node. Thanks to the driver's hot-reload mechanism, you can do that -at runtime and see the effects immediately. +at runtime and see the effects immediately. [CqlSession]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/CqlSession.html [CASSANDRA-8086]: https://issues.apache.org/jira/browse/CASSANDRA-8086 \ No newline at end of file From 361fa82e4d94bd09dd9d6de132e59d00983d13e1 Mon Sep 17 00:00:00 2001 From: olim7t Date: Fri, 5 Jun 2020 11:41:15 -0700 Subject: [PATCH 479/979] Follow up to JAVA-2792: Allow custom results in the mapper - parse supertypes when looking for mapped entities in custom result types - only pass Exception to MapperResultProducer.wrapError - remove from signature of MapperResultProducer.execute: it's rarely useful - update examples This amends d523e9a. --- .../mapper/GuavaFutureProducerService.java | 24 +++++----- manual/mapper/daos/custom_types/README.md | 19 ++++---- .../dao/DefaultDaoReturnTypeKind.java | 18 ++++---- .../dao/DefaultDaoReturnTypeParser.java | 45 ++++++++++++++++--- .../mapper/result/MapperResultProducer.java | 7 ++- 5 files changed, 74 insertions(+), 39 deletions(-) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/GuavaFutureProducerService.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/GuavaFutureProducerService.java index dc8a42214a8..d1a44428aba 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/GuavaFutureProducerService.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/GuavaFutureProducerService.java @@ -44,10 +44,10 @@ public abstract static class ListenableFutureProducer implements MapperResultPro @Nullable @Override - public Object execute( + public ListenableFuture execute( @NonNull Statement statement, @NonNull MapperContext context, - @Nullable EntityHelper entityHelper) { + @Nullable EntityHelper entityHelper) { SettableFuture result = SettableFuture.create(); context .getSession() @@ -63,13 +63,14 @@ public Object execute( return result; } - protected abstract Object convert( - AsyncResultSet resultSet, EntityHelper entityHelper); + @Nullable + protected abstract Object convert( + @NonNull AsyncResultSet resultSet, @Nullable EntityHelper entityHelper); @Nullable @Override - public Object wrapError(@NonNull Throwable error) { - return Futures.immediateFailedFuture(error); + public ListenableFuture wrapError(@NonNull Exception e) { + return Futures.immediateFailedFuture(e); } } @@ -83,9 +84,10 @@ public boolean canProduce(@NonNull GenericType resultType) { return resultType.equals(PRODUCED_TYPE); } + @Nullable @Override - protected Object convert( - AsyncResultSet resultSet, EntityHelper entityHelper) { + protected Object convert( + @NonNull AsyncResultSet resultSet, @Nullable EntityHelper entityHelper) { // ignore results return null; } @@ -98,9 +100,11 @@ public boolean canProduce(@NonNull GenericType resultType) { return resultType.getRawType().equals(ListenableFuture.class); } + @Nullable @Override - protected Object convert( - AsyncResultSet resultSet, EntityHelper entityHelper) { + protected Object convert( + @NonNull AsyncResultSet resultSet, @Nullable EntityHelper entityHelper) { + assert entityHelper != null; Row row = resultSet.one(); return (row == null) ? null : entityHelper.get(row); } diff --git a/manual/mapper/daos/custom_types/README.md b/manual/mapper/daos/custom_types/README.md index 82e98c5e475..a8ce02c9af4 100644 --- a/manual/mapper/daos/custom_types/README.md +++ b/manual/mapper/daos/custom_types/README.md @@ -57,8 +57,8 @@ public class FutureOfVoidProducer implements MapperResultProducer { } @Override - public ListenableFuture execute( - Statement statement, MapperContext context, EntityHelper entityHelper) { + public ListenableFuture execute( + Statement statement, MapperContext context, EntityHelper entityHelper) { CqlSession session = context.getSession(); // (2) SettableFuture result = SettableFuture.create(); // (3) session.executeAsync(statement).whenComplete( @@ -72,7 +72,7 @@ public class FutureOfVoidProducer implements MapperResultProducer { } @Override - public ListenableFuture wrapError(Throwable error) { + public ListenableFuture wrapError(Exception error) { return Futures.immediateFailedFuture(error); // (4) } } @@ -104,16 +104,17 @@ to read. #### Future of entity ```java -public static class FutureOfEntityProducer implements MapperResultProducer { +public class FutureOfEntityProducer implements MapperResultProducer { @Override public boolean canProduce(GenericType resultType) { return resultType.getRawType().equals(ListenableFuture.class); // (1) } @Override - public ListenableFuture execute( - Statement statement, MapperContext context, EntityHelper entityHelper) { - SettableFuture result = SettableFuture.create(); + public ListenableFuture execute( + Statement statement, MapperContext context, EntityHelper entityHelper) { + assert entityHelper != null; + SettableFuture result = SettableFuture.create(); CqlSession session = context.getSession(); session .executeAsync(statement) @@ -130,7 +131,7 @@ public static class FutureOfEntityProducer implements MapperResultProducer { } @Override - public ListenableFuture wrapError(Throwable error) { + public ListenableFuture wrapError(Exception error) { return Futures.immediateFailedFuture(error); // same as other producer } } @@ -176,8 +177,6 @@ public boolean canProduce(GenericType genericType) { } ``` -As you can see, this is not the most pleasant API to work with. - ### Packaging the producers in a service Once all the producers are ready, we package them in a class that implements diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DefaultDaoReturnTypeKind.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DefaultDaoReturnTypeKind.java index 2e9ba5f7e41..68558c4ddf2 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DefaultDaoReturnTypeKind.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DefaultDaoReturnTypeKind.java @@ -414,7 +414,7 @@ public CodeBlock wrapWithErrorHandling( CodeBlock.builder() .beginControlFlow("try") .addStatement( - "@$1T(\"unchecked\") $2T result =\n($2T) producer.wrapError(t)", + "@$1T(\"unchecked\") $2T result =\n($2T) producer.wrapError(e)", SuppressWarnings.class, returnTypeName) .addStatement("return result"); @@ -423,14 +423,14 @@ public CodeBlock wrapWithErrorHandling( // (note: manually a multi-catch would be cleaner, but from here it's simpler to generate // separate clauses) for (TypeMirror thrownType : methodElement.getThrownTypes()) { - callWrapError.nextControlFlow("catch ($T e)", thrownType).addStatement("throw e"); + callWrapError.nextControlFlow("catch ($T e2)", thrownType).addStatement("throw e2"); } // Otherwise, rethrow unchecked exceptions and wrap checked ones. callWrapError - .nextControlFlow("catch ($T e)", Exception.class) - .addStatement("$T.throwIfUnchecked(e)", Throwables.class) - .addStatement("throw new $T(e)", RuntimeException.class) + .nextControlFlow("catch ($T e2)", Exception.class) + .addStatement("$T.throwIfUnchecked(e2)", Throwables.class) + .addStatement("throw new $T(e2)", RuntimeException.class) .endControlFlow(); return wrapWithErrorHandling(innerBlock, callWrapError.build()); @@ -466,7 +466,7 @@ static CodeBlock wrapWithErrorHandling(CodeBlock innerBlock, CodeBlock catchBloc return CodeBlock.builder() .beginControlFlow("try") .add(innerBlock) - .nextControlFlow("catch ($T t)", Throwable.class) + .nextControlFlow("catch ($T e)", Exception.class) .add(catchBlock) .endControlFlow() .build(); @@ -474,12 +474,12 @@ static CodeBlock wrapWithErrorHandling(CodeBlock innerBlock, CodeBlock catchBloc private static final CodeBlock FAILED_FUTURE = CodeBlock.builder() - .addStatement("return $T.failedFuture(t)", CompletableFutures.class) + .addStatement("return $T.failedFuture(e)", CompletableFutures.class) .build(); private static final CodeBlock FAILED_REACTIVE_RESULT_SET = - CodeBlock.builder().addStatement("return new $T(t)", FailedReactiveResultSet.class).build(); + CodeBlock.builder().addStatement("return new $T(e)", FailedReactiveResultSet.class).build(); private static final CodeBlock FAILED_MAPPED_REACTIVE_RESULT_SET = CodeBlock.builder() - .addStatement("return new $T(t)", FailedMappedReactiveResultSet.class) + .addStatement("return new $T(e)", FailedMappedReactiveResultSet.class) .build(); } diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DefaultDaoReturnTypeParser.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DefaultDaoReturnTypeParser.java index 56cbd5f977f..25f71c4bda6 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DefaultDaoReturnTypeParser.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DefaultDaoReturnTypeParser.java @@ -26,11 +26,14 @@ import com.datastax.oss.driver.internal.mapper.processor.ProcessorContext; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.ArrayList; +import java.util.List; import java.util.Map; import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionStage; import javax.lang.model.element.Element; +import javax.lang.model.element.ElementKind; import javax.lang.model.element.Name; import javax.lang.model.element.TypeElement; import javax.lang.model.type.DeclaredType; @@ -210,7 +213,7 @@ public DaoReturnType parse( if (context.areCustomResultsEnabled()) { return new DaoReturnType( DefaultDaoReturnTypeKind.CUSTOM, - findEntityInCustomType(declaredReturnType, typeParameters)); + findEntityInCustomType(declaredReturnType, typeParameters, new ArrayList<>())); } } @@ -246,17 +249,47 @@ public DaoReturnType parse( * appear at any level of nesting in the type, e.g. {@code MyCustomFuture>}. */ private TypeElement findEntityInCustomType( - TypeMirror typeMirror, Map typeParameters) { - TypeElement entityElement = EntityUtils.asEntityElement(typeMirror, typeParameters); + TypeMirror type, + Map typeParameters, + List alreadyCheckedTypes) { + + // Generic types can be recursive! e.g. Integer implements Comparable. Avoid infinite + // recursion: + for (TypeMirror alreadyCheckedType : alreadyCheckedTypes) { + if (context.getTypeUtils().isSameType(type, alreadyCheckedType)) { + return null; + } + } + alreadyCheckedTypes.add(type); + + TypeElement entityElement = EntityUtils.asEntityElement(type, typeParameters); if (entityElement != null) { return entityElement; - } else if (typeMirror.getKind() == TypeKind.DECLARED) { - for (TypeMirror typeArgument : ((DeclaredType) typeMirror).getTypeArguments()) { - entityElement = findEntityInCustomType(typeArgument, typeParameters); + } else if (type.getKind() == TypeKind.DECLARED) { + // Check type arguments, e.g. `Foo` where T = Product + DeclaredType declaredType = (DeclaredType) type; + for (TypeMirror typeArgument : declaredType.getTypeArguments()) { + entityElement = findEntityInCustomType(typeArgument, typeParameters, alreadyCheckedTypes); if (entityElement != null) { return entityElement; } } + Element element = declaredType.asElement(); + if (element.getKind() == ElementKind.CLASS || element.getKind() == ElementKind.INTERFACE) { + // Check interfaces, e.g. `Foo implements Iterable`, where T = Product + TypeElement typeElement = (TypeElement) element; + for (TypeMirror parentInterface : typeElement.getInterfaces()) { + entityElement = + findEntityInCustomType(parentInterface, typeParameters, alreadyCheckedTypes); + if (entityElement != null) { + return entityElement; + } + } + // Check superclass (if there is none then the mirror has TypeKind.NONE and the recursive + // call will return null). + return findEntityInCustomType( + typeElement.getSuperclass(), typeParameters, alreadyCheckedTypes); + } } // null is a valid result even at the top level, a custom type may not contain any entity return null; diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/result/MapperResultProducer.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/result/MapperResultProducer.java index 4940d2eaf07..279363ac969 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/result/MapperResultProducer.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/result/MapperResultProducer.java @@ -84,12 +84,11 @@ public interface MapperResultProducer { * @return the object to return from the DAO method. This must match the type that this producer * was selected for, there will be an unchecked cast at runtime. */ - @SuppressWarnings("TypeParameterUnusedInFormals") @Nullable - Object execute( + Object execute( @NonNull Statement statement, @NonNull MapperContext context, - @Nullable EntityHelper entityHelper); + @Nullable EntityHelper entityHelper); /** * Surfaces any error encountered in the DAO method (either in the generated mapper code that @@ -103,5 +102,5 @@ Object execute( * declares them, or wrapped into a {@link RuntimeException} otherwise. */ @Nullable - Object wrapError(@NonNull Throwable error) throws Exception; + Object wrapError(@NonNull Exception e) throws Exception; } From e0d5bc0a4b2e90493685bc6199785f7b7c817d1c Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Thu, 11 Jun 2020 15:55:37 +0200 Subject: [PATCH 480/979] Upgrade Jackson libraries to 2.11.0 --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index ffbaa5e93d8..6df86943c14 100644 --- a/pom.xml +++ b/pom.xml @@ -51,8 +51,8 @@ 1.7.26 1.0.2 20190722 - 2.10.0 - 2.10.0 + 2.11.0 + 2.11.0 1.9.12 1.1.7.3 From a79018cbe08e324cd4bb7347b05a7d464fca32d9 Mon Sep 17 00:00:00 2001 From: olim7t Date: Thu, 4 Jun 2020 16:57:10 -0700 Subject: [PATCH 481/979] JAVA-2666: Document BOM and driver modules --- changelog/README.md | 1 + manual/core/bom/README.md | 107 ++++++++++++++++++++++++++++++ manual/core/integration/README.md | 71 ++++++++++++++++++++ manual/core/shaded_jar/README.md | 48 ++++++++++---- manual/mapper/README.md | 28 +------- manual/mapper/config/README.md | 33 +++++++-- 6 files changed, 244 insertions(+), 44 deletions(-) create mode 100644 manual/core/bom/README.md diff --git a/changelog/README.md b/changelog/README.md index f68875c0515..d24f8fe4893 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.7.0 (in progress) +- [documentation] JAVA-2666: Document BOM and driver modules - [documentation] JAVA-2613: Improve connection pooling documentation - [new feature] JAVA-2793: Add composite config loader - [new feature] JAVA-2792: Allow custom results in the mapper diff --git a/manual/core/bom/README.md b/manual/core/bom/README.md new file mode 100644 index 00000000000..70d83292a08 --- /dev/null +++ b/manual/core/bom/README.md @@ -0,0 +1,107 @@ +## Bill of Materials (BOM) + +A "Bill Of Materials" is a special Maven descriptor that defines the versions of a set of related +artifacts. + +To import the driver's BOM, add the following section in your application's own POM: + +```xml + + ... + + + + com.datastax.oss + java-driver-bom + 4.6.1 + pom + import + + + +``` + +This allows you to omit the version when you later reference the driver artifacts: + +```xml + + ... + + + com.datastax.oss + java-driver-query-builder + + +``` + +The advantage is that this also applies to transitive dependencies. For example, if there is a +third-party library X that depends on `java-driver-core`, and you add a dependency to X in this +project, `java-driver-core` will be set to the BOM version, regardless of which version X declares +in its POM. The driver artifacts are always in sync, however they were pulled into the project. + +### BOM and mapper processor + +If you are using the driver's [object mapper](../../mapper), our recommendation is to declare the +mapper processor in the [annotationProcessorPaths](../../mapper/config/#maven) section of the +compiler plugin configuration. Unfortunately, `` versions don't work there, +this is a known Maven issue ([MCOMPILER-391]). + +As a workaround, you can either declare the mapper processor as a regular dependency in the provided +scope: + +```xml + + + com.datastax.oss + java-driver-mapper-processor + provided + + +``` + +Or keep it in the compiler plugin, but repeat the version explicitly. In that case, it's probably a +good idea to extract a property to keep it in sync with the BOM: + +```xml + + + 4.6.1 + + + + + com.datastax.oss + java-driver-bom + ${java-driver.version} + pom + import + + + + + + + com.datastax.oss + java-driver-mapper-runtime + + + + + + maven-compiler-plugin + + + + + com.datastax.oss + java-driver-mapper-processor + ${java-driver.version} + + + + + + +``` + +[MCOMPILER-391]: https://issues.apache.org/jira/browse/MCOMPILER-391 \ No newline at end of file diff --git a/manual/core/integration/README.md b/manual/core/integration/README.md index bd03509bac6..f4c8fe5a298 100644 --- a/manual/core/integration/README.md +++ b/manual/core/integration/README.md @@ -8,6 +8,77 @@ ----- +### Which artifact(s) should I use? + +There are multiple driver artifacts under the group id +[com.datastax.oss](https://search.maven.org/search?q=g:com.datastax.oss). Here's how to pick the +right dependencies: + +

      • + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
        FeatureArtifact(s)Comments
        + Core functionality: executing queries with CqlSession.execute(), processing the + results with ResultSet, etc. + java‑driver‑core
        + Same as the above, but without explicit dependencies to Netty, + Jackson or ESRI. + java‑driver‑core‑shaded + Replaces java‑driver‑core.
        + See this page. +
        + Query builder: generating CQL query strings programmatically. + java‑driver‑query‑builder
        + Object mapper: generating the boilerplate to execute queries and + convert the results into your own domain classes. + + java‑driver‑mapper‑processor
        + java‑driver‑mapper‑runtime +
        + Both artifacts are needed.
        + See this page. +
        + "Bill Of Materials": can help manage versions if you use multiple driver artifacts. + java‑driver‑bomSee this page.
        + Writing integration tests that run the driver against Cassandra or Simulacron. + java‑driver‑test‑infra + Those APIs are not covered in this manual, but you can look at the driver's contribution + guidelines and internal tests for + guidance. +
        + ### Minimal project structure We publish the driver to [Maven central][central_oss]. Most modern build tools can download the diff --git a/manual/core/shaded_jar/README.md b/manual/core/shaded_jar/README.md index 45ed7fd63af..2f52e44c6a4 100644 --- a/manual/core/shaded_jar/README.md +++ b/manual/core/shaded_jar/README.md @@ -1,16 +1,12 @@ ## Using the shaded JAR -The default driver JAR depends on [Netty](http://netty.io/), which is -used internally for networking. +The default `java-driver-core` JAR depends on a number of [third party +libraries](../integration/#driver-dependencies). This can create conflicts if your application +already uses other versions of those same dependencies. -The driver is compatible with all Netty versions in the range [4.1.7, 4.2.0), -that is, it can work with any version equal to or higher than 4.1.7, and -lesser than 4.2.0. - -This explicit dependency can be a problem if your application already -uses another Netty version. To avoid conflicts, we provide a "shaded" -version of the JAR, which bundles the Netty classes under a different -package name: +To avoid this, we provide an alternative core artifact that shades [Netty](../integration/#netty), +[Jackson](../integration/#jackson) and [ESRI](../integration/#esri). To use it, replace the +dependency to `java-driver-core` by: ```xml @@ -20,8 +16,8 @@ package name: ``` -If you also use the query-builder or some other library that depends on java-driver-core, you need to remove its -dependency to the non-shaded JAR: +If you also use the query-builder, mapper or some other library that depends on java-driver-core, +you need to remove its dependency to the non-shaded JAR: ```xml @@ -29,6 +25,7 @@ dependency to the non-shaded JAR: java-driver-core-shaded ${driver.version} + com.datastax.oss java-driver-query-builder @@ -41,3 +38,30 @@ dependency to the non-shaded JAR: ``` + +Notes: + +* the shading process works by moving the libraries under a different package name, and bundling + them directly into the driver JAR. This should be transparent for client applications: the + impacted dependencies are purely internal, their types are not surfaced in the driver's public + API. +* the driver is compatible with all Netty versions in the range `[4.1.7, 4.2.0)` (equal to or higher + than 4.1.7, and lesser than 4.2.0). If you just need a specific version in that range, you can + avoid the need for the shaded JAR by declaring an explicit dependency in your POM: + + ```xml + + com.datastax.oss + java-driver-core + ${driver.version} + + + + io.netty + netty-handler + 4.1.39.Final + + ``` + + This only works with Netty: for Jackson and ESRI, only the exact version declared in the driver POM + is supported. diff --git a/manual/mapper/README.md b/manual/mapper/README.md index 1de64c90df2..8e745bf44f9 100644 --- a/manual/mapper/README.md +++ b/manual/mapper/README.md @@ -3,31 +3,9 @@ The mapper generates the boilerplate to execute queries and convert the results into application-level objects. -It is published as two artifacts: - -* the `java-driver-mapper-processor` module is **only needed in the compile classpath**, your - application doesn't need to depend on it at runtime. - - ```xml - - com.datastax.oss - java-driver-mapper-processor - ${driver.version} - - ``` - - See [Configuring the annotation processor](config/) for detailed instructions for different - build tools. - -* the `java-driver-mapper-runtime` module is a regular runtime dependency: - - ```xml - - com.datastax.oss - java-driver-mapper-runtime - ${driver.version} - - ``` +It is published as two artifacts: `com.datastax.oss:java-driver-mapper-processor` and +`com.datastax.oss:java-driver-mapper-runtime`. See [Integration](config/) for detailed instructions +for different build tools. ### Quick start diff --git a/manual/mapper/config/README.md b/manual/mapper/config/README.md index 2b8f6dd702d..0ccb224ba97 100644 --- a/manual/mapper/config/README.md +++ b/manual/mapper/config/README.md @@ -1,13 +1,13 @@ -## Configuring the annotation processor +## Integration -The mapper's annotation processor hooks into the Java compiler, and generates additional source -files from your annotated classes before the main compilation happens. It is contained in the -`java-driver-mapper-processor` artifact. +### Builds tools -As a reminder, there is also a `java-driver-mapper-runtime` artifact, which contains the annotations -and a few utility classes. This one is a regular dependency, and it is required at runtime. +The `java-driver-mapper-processor` artifact contains the annotation processor. It hooks into the +Java compiler, and generates additional source files from your annotated classes before the main +compilation happens. It is only required in the compile classpath. -### Builds tools +The `java-driver-mapper-runtime` artifact contains the annotations and a few utility classes. It is +a regular dependency, required at runtime. #### Maven @@ -48,6 +48,25 @@ configuration (make sure you use version 3.5 or higher): ``` +Alternatively (e.g. if you are using the [BOM](../../core/bom/)), you may also declare the processor +as a regular dependency in the "provided" scope: + +```xml + + + com.datastax.oss + java-driver-mapper-processor + ${java-driver.version} + provided + + + com.datastax.oss + java-driver-mapper-runtime + ${java-driver.version} + + +``` + The processor runs every time you execute the `mvn compile` phase. It normally supports incremental builds, but if something looks off you can try a full rebuild with `mvn clean compile`. From 4cd369548e32eabbf772ed4e497a151ca225e28e Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Thu, 11 Jun 2020 11:51:40 -0500 Subject: [PATCH 482/979] JAVA-2803: Move Graal substitutions for protocol compression from cassandra-quarkus into java-driver (#1452) --- core/pom.xml | 5 ++ .../internal/core/os/JnrLibcSubstitution.java | 54 ++++++++++++++++++ .../internal/core/protocol/Lz4Missing.java | 33 +++++++++++ .../core/protocol/Lz4Substitution.java | 55 +++++++++++++++++++ .../core/protocol/SnappySubstitution.java | 52 ++++++++++++++++++ pom.xml | 8 ++- 6 files changed, 206 insertions(+), 1 deletion(-) create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/os/JnrLibcSubstitution.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/protocol/Lz4Missing.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/protocol/Lz4Substitution.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/protocol/SnappySubstitution.java diff --git a/core/pom.xml b/core/pom.xml index 47858cd2427..f690caa1b9c 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -132,6 +132,11 @@ graal-sdk provided + + org.graalvm.nativeimage + svm + provided + ch.qos.logback logback-classic diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/os/JnrLibcSubstitution.java b/core/src/main/java/com/datastax/oss/driver/internal/core/os/JnrLibcSubstitution.java new file mode 100644 index 00000000000..1139238f6c3 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/os/JnrLibcSubstitution.java @@ -0,0 +1,54 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.os; + +import com.oracle.svm.core.annotate.Substitute; +import com.oracle.svm.core.annotate.TargetClass; +import java.util.Optional; + +/** + * Add an explicit Graal substitution for {@link JnrLibc}. If we don't implement something like this + * the analysis done at Graal native image build time will discover the jnr-posix references in + * JnrLibc even though they won't be used at runtime. By default jnr-ffi (used by jnr-posix to do + * it's work) will use {@link ClassLoader#defineClass(String, byte[], int, int)} which isn't + * supported by Graal. This behaviour can be changed with a system property but the cleanest + * solution is simply to remove the references to jnr-posix code via a Graal substitution. + */ +@TargetClass(JnrLibc.class) +@Substitute +final class JnrLibcSubstitution implements Libc { + + @Substitute + public JnrLibcSubstitution() {} + + @Substitute + @Override + public boolean available() { + return false; + } + + @Substitute + @Override + public Optional gettimeofday() { + return Optional.empty(); + } + + @Substitute + @Override + public Optional getpid() { + return Optional.empty(); + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/Lz4Missing.java b/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/Lz4Missing.java new file mode 100644 index 00000000000..9ecccf6df5d --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/Lz4Missing.java @@ -0,0 +1,33 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.protocol; + +import java.util.function.BooleanSupplier; + +public class Lz4Missing implements BooleanSupplier { + + private static final String LZ4_CLZ_NAME = "net.jpountz.lz4.LZ4Compressor"; + + @Override + public boolean getAsBoolean() { + try { + Class.forName(LZ4_CLZ_NAME); + return false; + } catch (ClassNotFoundException e) { + return true; + } + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/Lz4Substitution.java b/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/Lz4Substitution.java new file mode 100644 index 00000000000..12a55be53bf --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/Lz4Substitution.java @@ -0,0 +1,55 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.protocol; + +import com.datastax.oss.driver.api.core.context.DriverContext; +import com.oracle.svm.core.annotate.Inject; +import com.oracle.svm.core.annotate.Substitute; +import com.oracle.svm.core.annotate.TargetClass; +import io.netty.buffer.ByteBuf; + +@TargetClass( + className = "com.datastax.oss.driver.internal.core.protocol.Lz4Compressor", + onlyWith = Lz4Missing.class) +final class Lz4Substitution { + + @Inject + private final String EXCEPTION_MSG = + "This native image was not built with support for LZ4 compression"; + + @Substitute + public Lz4Substitution(DriverContext context) {} + + @Substitute + protected ByteBuf compressHeap(ByteBuf input) { + throw new UnsupportedOperationException(EXCEPTION_MSG); + } + + @Substitute + protected ByteBuf decompressDirect(ByteBuf input) { + throw new UnsupportedOperationException(EXCEPTION_MSG); + } + + @Substitute + protected ByteBuf decompressHeap(ByteBuf input) { + throw new UnsupportedOperationException(EXCEPTION_MSG); + } + + @Substitute + protected ByteBuf compressDirect(ByteBuf input) { + throw new UnsupportedOperationException(EXCEPTION_MSG); + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/SnappySubstitution.java b/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/SnappySubstitution.java new file mode 100644 index 00000000000..ccc1dc74408 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/SnappySubstitution.java @@ -0,0 +1,52 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.protocol; + +import com.oracle.svm.core.annotate.Inject; +import com.oracle.svm.core.annotate.Substitute; +import com.oracle.svm.core.annotate.TargetClass; +import io.netty.buffer.ByteBuf; + +/** + * Snappy compression relies on the underlying native library and thus is not supported for native + * images + */ +@TargetClass(className = "com.datastax.oss.driver.internal.core.protocol.SnappyCompressor") +final class SnappySubstitution { + + @Inject + private final String EXCEPTION_MSG = "Snappy compression is not supported for native images"; + + @Substitute + protected ByteBuf compressHeap(ByteBuf input) { + throw new UnsupportedOperationException(EXCEPTION_MSG); + } + + @Substitute + protected ByteBuf decompressDirect(ByteBuf input) { + throw new UnsupportedOperationException(EXCEPTION_MSG); + } + + @Substitute + protected ByteBuf decompressHeap(ByteBuf input) { + throw new UnsupportedOperationException(EXCEPTION_MSG); + } + + @Substitute + protected ByteBuf compressDirect(ByteBuf input) { + throw new UnsupportedOperationException(EXCEPTION_MSG); + } +} diff --git a/pom.xml b/pom.xml index 6df86943c14..13a3820ff3e 100644 --- a/pom.xml +++ b/pom.xml @@ -73,6 +73,7 @@ 4.0.2 2.0.0-M19 2.22.2 + 20.0.0 false ${skipTests} @@ -380,7 +381,12 @@ org.graalvm.sdk graal-sdk - 20.0.0 + ${graalapi.version} + + + org.graalvm.nativeimage + svm + ${graalapi.version} From 199e6ba6f17eb13471e4b95eb9bd9e0b9bdc4893 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 12 Jun 2020 18:39:29 +0200 Subject: [PATCH 483/979] Fix build failure when certain IT categories are skipped --- integration-tests/pom.xml | 29 +++++++++++++++++++++++------ 1 file changed, 23 insertions(+), 6 deletions(-) diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 045a45ebbee..c39b5fc5221 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -288,16 +288,33 @@ - verify + verify-parallelized verify - - ${project.build.directory}/failsafe-reports/failsafe-summary-parallelized.xml - ${project.build.directory}/failsafe-reports/failsafe-summary-serial.xml - ${project.build.directory}/failsafe-reports/failsafe-summary-isolated.xml - + ${skipParallelizableITs} + ${project.build.directory}/failsafe-reports/failsafe-summary-parallelized.xml + + + + verify-serial + + verify + + + ${skipSerialITs} + ${project.build.directory}/failsafe-reports/failsafe-summary-serial.xml + + + + verify-isolated + + verify + + + ${skipIsolatedITs} + ${project.build.directory}/failsafe-reports/failsafe-summary-isolated.xml
        From d45744a60485e9e9326f59125554c618b24ff90e Mon Sep 17 00:00:00 2001 From: olim7t Date: Fri, 12 Jun 2020 17:38:29 -0700 Subject: [PATCH 484/979] Fix outdated defaults in OptionsMap Also revisit the unit test: it was comparing just the keys, not the values. --- .../driver/api/core/config/OptionsMap.java | 6 +- core/src/main/resources/reference.conf | 2 +- .../map/MapBasedDriverConfigLoaderTest.java | 69 +++++++++++++++---- 3 files changed, 60 insertions(+), 17 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java index c8e8bbf4cb4..c148d32ae96 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java @@ -260,7 +260,7 @@ protected static void fillWithDriverDefaults(OptionsMap map) { map.put(TypedDriverOption.CONNECTION_POOL_LOCAL_SIZE, 1); map.put(TypedDriverOption.CONNECTION_POOL_REMOTE_SIZE, 1); map.put(TypedDriverOption.CONNECTION_MAX_REQUESTS, 1024); - map.put(TypedDriverOption.CONNECTION_MAX_ORPHAN_REQUESTS, 24576); + map.put(TypedDriverOption.CONNECTION_MAX_ORPHAN_REQUESTS, 256); map.put(TypedDriverOption.CONNECTION_WARN_INIT_ERROR, true); map.put(TypedDriverOption.RECONNECT_ON_INIT, false); map.put(TypedDriverOption.RECONNECTION_POLICY_CLASS, "ExponentialReconnectionPolicy"); @@ -306,12 +306,12 @@ protected static void fillWithDriverDefaults(OptionsMap map) { map.put(TypedDriverOption.METRICS_SESSION_THROTTLING_INTERVAL, Duration.ofMinutes(5)); map.put( TypedDriverOption.CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_HIGHEST, - Duration.ofSeconds(3)); + Duration.ofMinutes(2)); map.put(TypedDriverOption.CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_DIGITS, 3); map.put( TypedDriverOption.CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_INTERVAL, Duration.ofMinutes(5)); - map.put(TypedDriverOption.METRICS_SESSION_GRAPH_REQUESTS_HIGHEST, Duration.ofSeconds(3)); + map.put(TypedDriverOption.METRICS_SESSION_GRAPH_REQUESTS_HIGHEST, Duration.ofSeconds(12)); map.put(TypedDriverOption.METRICS_SESSION_GRAPH_REQUESTS_DIGITS, 3); map.put(TypedDriverOption.METRICS_SESSION_GRAPH_REQUESTS_INTERVAL, Duration.ofMinutes(5)); map.put(TypedDriverOption.METRICS_NODE_ENABLED, Collections.emptyList()); diff --git a/core/src/main/resources/reference.conf b/core/src/main/resources/reference.conf index 5f6fd9afa0c..efb86fe983a 100644 --- a/core/src/main/resources/reference.conf +++ b/core/src/main/resources/reference.conf @@ -966,7 +966,7 @@ datastax-java-driver { # Modifiable at runtime: yes, the new value will be used for connections created after the # change. # Overridable in a profile: no - max-frame-length = 256 MB + max-frame-length = 256 MiB } advanced.request { diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/config/map/MapBasedDriverConfigLoaderTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/config/map/MapBasedDriverConfigLoaderTest.java index 24a7df5d1e9..75196b7e539 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/config/map/MapBasedDriverConfigLoaderTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/config/map/MapBasedDriverConfigLoaderTest.java @@ -16,16 +16,21 @@ package com.datastax.oss.driver.internal.core.config.map; import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.fail; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverConfig; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.config.DriverOption; import com.datastax.oss.driver.api.core.config.OptionsMap; +import com.datastax.oss.driver.api.core.config.TypedDriverOption; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; import com.datastax.oss.driver.internal.core.config.MockOptions; import com.datastax.oss.driver.internal.core.config.MockTypedOptions; import com.datastax.oss.driver.internal.core.config.typesafe.DefaultDriverConfigLoader; -import java.util.Map; -import java.util.SortedSet; +import com.typesafe.config.ConfigException; +import java.util.Optional; import org.junit.Test; public class MapBasedDriverConfigLoaderTest { @@ -49,20 +54,58 @@ public void should_reflect_changes_in_real_time() { */ @Test public void should_fill_default_profile_like_reference_file() { - SortedSet> memoryBased = - DriverConfigLoader.fromMap(OptionsMap.driverDefaults()) - .getInitialConfig() - .getDefaultProfile() - .entrySet(); - SortedSet> fileBased = - new DefaultDriverConfigLoader().getInitialConfig().getDefaultProfile().entrySet(); + OptionsMap optionsMap = OptionsMap.driverDefaults(); + DriverExecutionProfile mapBasedConfig = + DriverConfigLoader.fromMap(optionsMap).getInitialConfig().getDefaultProfile(); + DriverExecutionProfile fileBasedConfig = + new DefaultDriverConfigLoader().getInitialConfig().getDefaultProfile(); - for (Map.Entry entry : fileBased) { - if (entry.getKey().equals(DefaultDriverOption.CONFIG_RELOAD_INTERVAL.getPath())) { + // Make sure we're not missing any options. -1 is for CONFIG_RELOAD_INTERVAL, which is not + // defined by OptionsMap because it is irrelevant for the map-based config. + assertThat(mapBasedConfig.entrySet()).hasSize(fileBasedConfig.entrySet().size() - 1); + + for (TypedDriverOption option : TypedDriverOption.builtInValues()) { + if (option.getRawOption() == DefaultDriverOption.CONFIG_RELOAD_INTERVAL) { continue; } - assertThat(memoryBased).as("Missing entry: " + entry).contains(entry); + Optional fileBasedValue = get(fileBasedConfig, option); + Optional mapBasedValue = get(mapBasedConfig, option); + assertThat(mapBasedValue) + .as("Wrong value for %s in OptionsMap", option.getRawOption()) + .isEqualTo(fileBasedValue); + } + } + + private Optional get(DriverExecutionProfile config, TypedDriverOption typedOption) { + DriverOption option = typedOption.getRawOption(); + GenericType type = typedOption.getExpectedType(); + Object value = null; + if (config.isDefined(option)) { + // This is ugly, we have no other way than enumerating all possible types. + // This kind of bridging code between OptionsMap and DriverConfig is unlikely to exist + // anywhere outside of this test. + if (type.equals(GenericType.listOf(String.class))) { + value = config.getStringList(option); + } else if (type.equals(GenericType.STRING)) { + value = config.getString(option); + } else if (type.equals(GenericType.DURATION)) { + value = config.getDuration(option); + } else if (type.equals(GenericType.INTEGER)) { + value = config.getInt(option); + } else if (type.equals(GenericType.BOOLEAN)) { + value = config.getBoolean(option); + } else if (type.equals(GenericType.LONG)) { + try { + value = config.getLong(option); + } catch (ConfigException.WrongType e) { + value = config.getBytes(option); + } + } else if (type.equals(GenericType.mapOf(GenericType.STRING, GenericType.STRING))) { + value = config.getStringMap(option); + } else { + fail("Unexpected type " + type); + } } - assertThat(memoryBased).hasSize(fileBased.size() - 1); + return Optional.ofNullable(value); } } From 57b9488b82cdb1ca8560cac7931d6a71efccc755 Mon Sep 17 00:00:00 2001 From: olim7t Date: Sun, 14 Jun 2020 20:40:47 -0700 Subject: [PATCH 485/979] Document Reactive Streams dependency in manual --- manual/core/integration/README.md | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/manual/core/integration/README.md b/manual/core/integration/README.md index f4c8fe5a298..37119799afb 100644 --- a/manual/core/integration/README.md +++ b/manual/core/integration/README.md @@ -522,6 +522,27 @@ on: unlike the driver, TinkerPop does not follow semantic versioning, so even a (e.g. 3.3.0 vs 3.3.3) could introduce incompatibilities. So do not declare an explicit dependency in your application, let the driver pull it transitively. +#### Reactive Streams + +[Reactive Streams](https://www.reactive-streams.org/) types are referenced in our [reactive +API](../reactive/). + +If you never call any of the `executeReactive` methods, you can exclude the dependency: + +```xml + + com.datastax.oss + java-driver-core + ${driver.version} + + + org.reactivestreams + reactive-streams + + + +``` + #### Documenting annotations The driver team uses annotations to document certain aspects of the code: From e327fa73f8ef3e798b0bc00806900828bb755bd8 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 15 Jun 2020 10:42:50 +0200 Subject: [PATCH 486/979] Do not fail build if there are test failures (#1453) --- Jenkinsfile | 2 ++ 1 file changed, 2 insertions(+) diff --git a/Jenkinsfile b/Jenkinsfile index 387324e9fba..64d6152919c 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -80,6 +80,8 @@ def executeTests() { printenv | sort mvn -B -V ${INTEGRATION_TESTS_FILTER_ARGUMENT} verify \ + -DfailIfNoTests=false \ + -Dmaven.test.failure.ignore=true \ -Dmaven.javadoc.skip=${SKIP_JAVADOCS} \ -Dccm.version=${CCM_CASSANDRA_VERSION} \ -Dccm.dse=${CCM_IS_DSE} \ From 282b5eab57f4f4a09ab13f8f4beade70982a178b Mon Sep 17 00:00:00 2001 From: Ryan Scheidter Date: Mon, 15 Jun 2020 03:58:11 -0500 Subject: [PATCH 487/979] Fix wrong usage example in Javadocs of Metrics.getSessionMetric (#1451) --- .../com/datastax/oss/driver/api/core/metrics/Metrics.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/metrics/Metrics.java b/core/src/main/java/com/datastax/oss/driver/api/core/metrics/Metrics.java index 12b39666cb4..34bcc1d9c22 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/metrics/Metrics.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/metrics/Metrics.java @@ -53,10 +53,10 @@ public interface Metrics { * *
        {@code
            * // Correct:
        -   * Gauge connectedNodes = getNodeMetric(node, DefaultSessionMetric.CONNECTED_NODES);
        +   * Gauge connectedNodes = getSessionMetric(DefaultSessionMetric.CONNECTED_NODES);
            *
            * // Wrong, will throw CCE:
        -   * Counter connectedNodes = getNodeMetric(node, DefaultSessionMetric.CONNECTED_NODES);
        +   * Counter connectedNodes = getSessionMetric(DefaultSessionMetric.CONNECTED_NODES);
            * }
        * * @param profileName the name of the execution profile, or {@code null} if the metric is not From b009651a30dad3f29c548ae32ca7638af73847e8 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 15 Jun 2020 11:05:34 +0200 Subject: [PATCH 488/979] Rename SessionLeakTest to SessionLeakIT --- .../driver/core/{SessionLeakTest.java => SessionLeakIT.java} | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) rename integration-tests/src/test/java/com/datastax/oss/driver/core/{SessionLeakTest.java => SessionLeakIT.java} (99%) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/SessionLeakTest.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/SessionLeakIT.java similarity index 99% rename from integration-tests/src/test/java/com/datastax/oss/driver/core/SessionLeakTest.java rename to integration-tests/src/test/java/com/datastax/oss/driver/core/SessionLeakIT.java index 62b1bb40d6f..ef8bf329174 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/SessionLeakTest.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/SessionLeakIT.java @@ -49,7 +49,7 @@ @Category(IsolatedTests.class) @RunWith(MockitoJUnitRunner.class) -public class SessionLeakTest { +public class SessionLeakIT { @ClassRule public static final SimulacronRule SIMULACRON_RULE = From 590ac020c59286cdee04cb35c05ee7e7b3d788ca Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 29 May 2020 16:57:39 +0200 Subject: [PATCH 489/979] Exclude GraalVM from OSGi bundles --- core-shaded/pom.xml | 9 ++++++--- core/pom.xml | 5 ++++- 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 1bf114a59a2..2c477a4dd9e 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -316,7 +316,10 @@ not required at runtime. -->!net.jcip.annotations.*, !edu.umd.cs.findbugs.annotations.*, !org.graalvm.*, !com.oracle.svm.*, + jnr.*;resolution:=optional, com.esri.core.geometry.*;resolution:=optional,org.reactivestreams.*;resolution:=optional, org.apache.tinkerpop.*;resolution:=optional, org.javatuples.*;resolution:=optional, !com.google.protobuf.*, !com.jcraft.jzlib.*, !com.ning.compress.*, !lzma.sdk.*, !net.jpountz.xxhash.*, !org.bouncycastle.*, !org.conscrypt.*, !org.apache.commons.logging.*, !org.apache.log4j.*, !org.apache.logging.log4j.*, !org.eclipse.jetty.*, !org.jboss.marshalling.*, !sun.misc.*, !sun.security.*, !com.oracle.svm.core.annotate.*,!com.barchart.udt.*, !com.fasterxml.aalto.*, !com.sun.nio.sctp.*, !gnu.io.*, !org.xml.sax.*, !org.w3c.dom.*, !reactor.blockhound.*, * + -->!com.google.protobuf.*, !com.jcraft.jzlib.*, !com.ning.compress.*, !lzma.sdk.*, !net.jpountz.xxhash.*, !org.bouncycastle.*, !org.conscrypt.*, !org.apache.commons.logging.*, !org.apache.log4j.*, !org.apache.logging.log4j.*, !org.eclipse.jetty.*, !org.jboss.marshalling.*, !sun.misc.*, !sun.security.*, !com.barchart.udt.*, !com.fasterxml.aalto.*, !com.sun.nio.sctp.*, !gnu.io.*, !org.xml.sax.*, !org.w3c.dom.*, !reactor.blockhound.*, * !net.jcip.annotations.*, !edu.umd.cs.findbugs.annotations.*, !org.graalvm.*, !com.oracle.svm.*, + * - - !net.jcip.annotations.*, !edu.umd.cs.findbugs.annotations.*, * + + !net.jcip.annotations.*, !edu.umd.cs.findbugs.annotations.*, + org.reactivestreams.*;resolution:=optional, * + com.datastax.*.driver.*.mapper.* diff --git a/mapper-runtime/src/main/java/com/datastax/dse/driver/internal/mapper/reactive/ReactiveDaoBase.java b/mapper-runtime/src/main/java/com/datastax/dse/driver/internal/mapper/reactive/ReactiveDaoBase.java new file mode 100644 index 00000000000..23c21c6f5f7 --- /dev/null +++ b/mapper-runtime/src/main/java/com/datastax/dse/driver/internal/mapper/reactive/ReactiveDaoBase.java @@ -0,0 +1,40 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.dse.driver.internal.mapper.reactive; + +import com.datastax.dse.driver.api.core.cql.reactive.ReactiveResultSet; +import com.datastax.dse.driver.api.mapper.reactive.MappedReactiveResultSet; +import com.datastax.oss.driver.api.core.cql.Statement; +import com.datastax.oss.driver.api.mapper.MapperContext; +import com.datastax.oss.driver.api.mapper.entity.EntityHelper; +import com.datastax.oss.driver.internal.mapper.DaoBase; + +public class ReactiveDaoBase extends DaoBase { + + protected ReactiveDaoBase(MapperContext context) { + super(context); + } + + protected ReactiveResultSet executeReactive(Statement statement) { + return context.getSession().executeReactive(statement); + } + + protected MappedReactiveResultSet executeReactiveAndMap( + Statement statement, EntityHelper entityHelper) { + ReactiveResultSet source = executeReactive(statement); + return new DefaultMappedReactiveResultSet<>(source, entityHelper::get); + } +} diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DaoBase.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DaoBase.java index d12ab19bfd3..51ed93acaf7 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DaoBase.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DaoBase.java @@ -15,9 +15,6 @@ */ package com.datastax.oss.driver.internal.mapper; -import com.datastax.dse.driver.api.core.cql.reactive.ReactiveResultSet; -import com.datastax.dse.driver.api.mapper.reactive.MappedReactiveResultSet; -import com.datastax.dse.driver.internal.mapper.reactive.DefaultMappedReactiveResultSet; import com.datastax.oss.driver.api.core.ConsistencyLevel; import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.MappedAsyncPagingIterable; @@ -284,16 +281,6 @@ CompletableFuture> executeAsyncAndMapToEntity return executeAsync(statement).thenApply(rs -> rs.map(entityHelper::get)); } - protected ReactiveResultSet executeReactive(Statement statement) { - return context.getSession().executeReactive(statement); - } - - protected MappedReactiveResultSet executeReactiveAndMap( - Statement statement, EntityHelper entityHelper) { - ReactiveResultSet source = executeReactive(statement); - return new DefaultMappedReactiveResultSet<>(source, entityHelper::get); - } - protected static void throwIfProtocolVersionV3(MapperContext context) { if (context.getSession().getContext().getProtocolVersion().getCode() <= ProtocolConstants.Version.V3) { From 17497192bfdf85977d222ef1cc9ca2a3bcb2012f Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 7 Apr 2020 20:50:06 +0200 Subject: [PATCH 492/979] Consider Simulacron and Commons Exec as optional dependencies This commit changes these dependencies to optional. This allows the OSGi bundle to declare such dependencies as optional package imports, thus limiting the number of bundles that need to be provisioned for it. --- integration-tests/pom.xml | 10 +++++++ test-infra/pom.xml | 4 +++ test-infra/revapi.json | 30 +++++++++++++++++++ .../driver/api/testinfra/ccm/CcmBridge.java | 8 +++-- 4 files changed, 50 insertions(+), 2 deletions(-) diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index c39b5fc5221..6dc31265e65 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -187,6 +187,16 @@ wiremock test + + com.datastax.oss.simulacron + simulacron-native-server + test + + + org.apache.commons + commons-exec + test + diff --git a/test-infra/pom.xml b/test-infra/pom.xml index 6ec58abcf25..8def9e2628c 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -56,13 +56,17 @@ org.assertj assertj-core + com.datastax.oss.simulacron simulacron-native-server + true + org.apache.commons commons-exec + true org.awaitility diff --git a/test-infra/revapi.json b/test-infra/revapi.json index 7e56d055239..dd6af30dd8c 100644 --- a/test-infra/revapi.json +++ b/test-infra/revapi.json @@ -100,6 +100,36 @@ "old": "field com.datastax.oss.driver.api.testinfra.ccm.CcmBridge.DEFAULT_SERVER_TRUSTSTORE_PASSWORD", "new": "field com.datastax.oss.driver.api.testinfra.ccm.CcmBridge.DEFAULT_SERVER_TRUSTSTORE_PASSWORD", "justification": "JAVA-2620: Use clearly dummy passwords in tests" + }, + { + "code": "java.missing.newClass", + "new": "missing-class com.datastax.oss.simulacron.common.cluster.ClusterSpec", + "justification":"Dependency was made optional" + }, + { + "code": "java.missing.newClass", + "new": "missing-class com.datastax.oss.simulacron.common.cluster.ClusterSpec.Builder", + "justification":"Dependency was made optional" + }, + { + "code": "java.missing.newClass", + "new": "missing-class com.datastax.oss.simulacron.common.cluster.QueryLog", + "justification":"Dependency was made optional" + }, + { + "code": "java.missing.newClass", + "new": "missing-class com.datastax.oss.simulacron.server.BoundCluster", + "justification":"Dependency was made optional" + }, + { + "code": "java.missing.newClass", + "new": "missing-class com.datastax.oss.simulacron.server.BoundTopic", + "justification":"Dependency was made optional" + }, + { + "code": "java.missing.newClass", + "new": "missing-class com.datastax.oss.simulacron.server.Server", + "justification":"Dependency was made optional" } ] } diff --git a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CcmBridge.java b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CcmBridge.java index 8e37612df6a..df5f1af05a2 100644 --- a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CcmBridge.java +++ b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CcmBridge.java @@ -15,8 +15,6 @@ */ package com.datastax.oss.driver.api.testinfra.ccm; -import static io.netty.util.internal.PlatformDependent.isWindows; - import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.shaded.guava.common.base.Joiner; import com.datastax.oss.driver.shaded.guava.common.io.Resources; @@ -32,6 +30,7 @@ import java.util.Collections; import java.util.LinkedHashMap; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.Optional; import java.util.concurrent.TimeUnit; @@ -167,6 +166,11 @@ private CcmBridge( this.dseWorkloads = dseWorkloads; } + // Copied from Netty's PlatformDependent to avoid the dependency on Netty + private static boolean isWindows() { + return System.getProperty("os.name", "").toLowerCase(Locale.US).contains("win"); + } + public Optional getDseVersion() { return DSE_ENABLEMENT ? Optional.of(VERSION) : Optional.empty(); } From c2653fe5f1cff02dc21078fe195feb6eb744f9ec Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Thu, 12 Mar 2020 14:22:43 -0500 Subject: [PATCH 493/979] JAVA-2657: Ability to provide a class loader to load configuration resources This commit introduces the possibility for the user to supply a custom class loader to locate classpath configuration resources such as application.conf. This is particularly useful for OSGi deployments since configuration resources typically reside inside the application bundle, and the only loader capable of locating them is the application bundle class loader. --- changelog/README.md | 1 + .../api/core/config/DriverConfigLoader.java | 59 ++++++++++++++++-- .../api/core/session/SessionBuilder.java | 41 ++++++++++--- .../typesafe/DefaultDriverConfigLoader.java | 35 ++++++++++- ...ProgrammaticDriverConfigLoaderBuilder.java | 60 ++++++++++++------- ...rammaticDriverConfigLoaderBuilderTest.java | 10 ++-- manual/osgi/README.md | 56 +++++++++++++++-- 7 files changed, 214 insertions(+), 48 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index d24f8fe4893..a9130e2c297 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.7.0 (in progress) +- [bug] JAVA-2657: Add ability to specify the class loader to use for application-specific classpath resources - [documentation] JAVA-2666: Document BOM and driver modules - [documentation] JAVA-2613: Improve connection pooling documentation - [new feature] JAVA-2793: Add composite config loader diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/DriverConfigLoader.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/DriverConfigLoader.java index 7b2c6f31562..b70e6bab125 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/DriverConfigLoader.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/DriverConfigLoader.java @@ -15,6 +15,7 @@ */ package com.datastax.oss.driver.api.core.config; +import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.context.DriverContext; import com.datastax.oss.driver.api.core.session.SessionBuilder; import com.datastax.oss.driver.internal.core.config.composite.CompositeDriverConfigLoader; @@ -23,6 +24,7 @@ import com.datastax.oss.driver.internal.core.config.typesafe.DefaultProgrammaticDriverConfigLoaderBuilder; import com.typesafe.config.Config; import com.typesafe.config.ConfigFactory; +import com.typesafe.config.ConfigParseOptions; import edu.umd.cs.findbugs.annotations.NonNull; import java.io.File; import java.net.URL; @@ -36,10 +38,29 @@ */ public interface DriverConfigLoader extends AutoCloseable { + /** + * Builds an instance using the driver's default implementation (based on Typesafe config) except + * that application-specific classpath resources will be located using the provided {@link + * ClassLoader} instead of {@linkplain Thread#getContextClassLoader() the current thread's context + * class loader}. + * + *

        The returned loader will honor the reload interval defined by the option {@code + * basic.config-reload-interval}. + */ + @NonNull + static DriverConfigLoader fromDefaults(@NonNull ClassLoader appClassLoader) { + return new DefaultDriverConfigLoader(appClassLoader); + } + /** * Builds an instance using the driver's default implementation (based on Typesafe config), except * that application-specific options are loaded from a classpath resource with a custom name. * + *

        The class loader used to locate application-specific classpath resources is {@linkplain + * Thread#getContextClassLoader() the current thread's context class loader}. This might not be + * suitable for OSGi deployments, which should use {@link #fromClasspath(String, ClassLoader)} + * instead. + * *

        More precisely, configuration properties are loaded and merged from the following * (first-listed are higher priority): * @@ -60,13 +81,27 @@ public interface DriverConfigLoader extends AutoCloseable { */ @NonNull static DriverConfigLoader fromClasspath(@NonNull String resourceBaseName) { + return fromClasspath(resourceBaseName, Thread.currentThread().getContextClassLoader()); + } + + /** + * Just like {@link #fromClasspath(java.lang.String)} except that application-specific classpath + * resources will be located using the provided {@link ClassLoader} instead of {@linkplain + * Thread#getContextClassLoader() the current thread's context class loader}. + */ + @NonNull + static DriverConfigLoader fromClasspath( + @NonNull String resourceBaseName, @NonNull ClassLoader appClassLoader) { return new DefaultDriverConfigLoader( () -> { ConfigFactory.invalidateCaches(); Config config = ConfigFactory.defaultOverrides() - .withFallback(ConfigFactory.parseResourcesAnySyntax(resourceBaseName)) - .withFallback(ConfigFactory.defaultReference()) + .withFallback( + ConfigFactory.parseResourcesAnySyntax( + resourceBaseName, + ConfigParseOptions.defaults().setClassLoader(appClassLoader))) + .withFallback(ConfigFactory.defaultReference(CqlSession.class.getClassLoader())) .resolve(); return config.getConfig(DefaultDriverConfigLoader.DEFAULT_ROOT_PATH); }); @@ -125,7 +160,7 @@ static DriverConfigLoader fromFile(@NonNull File file) { Config config = ConfigFactory.defaultOverrides() .withFallback(ConfigFactory.parseFileAnySyntax(file)) - .withFallback(ConfigFactory.defaultReference()) + .withFallback(ConfigFactory.defaultReference(CqlSession.class.getClassLoader())) .resolve(); return config.getConfig(DefaultDriverConfigLoader.DEFAULT_ROOT_PATH); }); @@ -159,7 +194,7 @@ static DriverConfigLoader fromUrl(@NonNull URL url) { Config config = ConfigFactory.defaultOverrides() .withFallback(ConfigFactory.parseURL(url)) - .withFallback(ConfigFactory.defaultReference()) + .withFallback(ConfigFactory.defaultReference(CqlSession.class.getClassLoader())) .resolve(); return config.getConfig(DefaultDriverConfigLoader.DEFAULT_ROOT_PATH); }); @@ -214,6 +249,11 @@ static DriverConfigLoader fromUrl(@NonNull URL url) { * Note that {@code application.*} is entirely optional, you may choose to only rely on the * driver's built-in {@code reference.conf} and programmatic overrides. * + *

        The class loader used to locate application-specific classpath resources is {@linkplain + * Thread#getContextClassLoader() the current thread's context class loader}. This might not be + * suitable for OSGi deployments, which should use {@link #programmaticBuilder(ClassLoader)} + * instead. + * *

        The resulting configuration is expected to contain a {@code datastax-java-driver} section. * *

        The loader will honor the reload interval defined by the option {@code @@ -228,6 +268,17 @@ static ProgrammaticDriverConfigLoaderBuilder programmaticBuilder() { return new DefaultProgrammaticDriverConfigLoaderBuilder(); } + /** + * Just like {@link #programmaticBuilder()} except that application-specific classpath resources + * will be located using the provided {@link ClassLoader} instead of {@linkplain + * Thread#getContextClassLoader() the current thread's context class loader}. + */ + @NonNull + static ProgrammaticDriverConfigLoaderBuilder programmaticBuilder( + @NonNull ClassLoader appClassLoader) { + return new DefaultProgrammaticDriverConfigLoaderBuilder(appClassLoader); + } + /** * Builds an instance backed by an {@link OptionsMap}, which holds all options in memory. * diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java b/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java index 1ad6ab2f864..2562c89f614 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java @@ -62,7 +62,6 @@ import java.util.concurrent.Callable; import java.util.concurrent.CompletionStage; import java.util.function.Predicate; -import java.util.function.Supplier; import javax.net.ssl.SSLContext; import net.jcip.annotations.NotThreadSafe; @@ -121,10 +120,20 @@ public SelfT withConfigLoader(@Nullable DriverConfigLoader configLoader) { } @NonNull + @Deprecated protected DriverConfigLoader defaultConfigLoader() { return new DefaultDriverConfigLoader(); } + @NonNull + protected DriverConfigLoader defaultConfigLoader(@Nullable ClassLoader classLoader) { + if (classLoader == null) { + return new DefaultDriverConfigLoader(); + } else { + return new DefaultDriverConfigLoader(classLoader); + } + } + /** * Adds contact points to use for the initial connection to the cluster. * @@ -404,8 +413,20 @@ public SelfT withKeyspace(@Nullable String keyspaceName) { /** * The {@link ClassLoader} to use to reflectively load class names defined in configuration. * - *

        If null, the driver attempts to use the same {@link ClassLoader} that loaded the core driver - * classes, which is generally the right thing to do. + *

        Unless you define a custom {@link #configLoader}, this class loader will also be used to + * locate application-specific configuration resources. + * + *

        If you do not provide any custom class loader, the driver will attempt to use the following + * ones: + * + *

          + *
        1. When reflectively loading class names defined in configuration: same class loader that + * loaded the core driver classes. + *
        2. When locating application-specific configuration resources: the current thread's + * {@linkplain Thread#getContextClassLoader() context class loader}. + *
        + * + * This is generally the right thing to do. * *

        Defining a different class loader is typically only needed in web or OSGi environments where * there are complex class loading requirements. @@ -617,7 +638,13 @@ public SessionT build() { @NonNull protected final CompletionStage buildDefaultSessionAsync() { try { - DriverConfigLoader configLoader = buildIfNull(this.configLoader, this::defaultConfigLoader); + + ProgrammaticArguments programmaticArguments = programmaticArgumentsBuilder.build(); + + DriverConfigLoader configLoader = + this.configLoader != null + ? this.configLoader + : defaultConfigLoader(programmaticArguments.getClassLoader()); DriverExecutionProfile defaultConfig = configLoader.getInitialConfig().getDefaultProfile(); if (cloudConfigInputStream == null) { @@ -663,7 +690,7 @@ protected final CompletionStage buildDefaultSessionAsync() { } return DefaultSession.init( - (InternalDriverContext) buildContext(configLoader, programmaticArgumentsBuilder.build()), + (InternalDriverContext) buildContext(configLoader, programmaticArguments), contactPoints, keyspace); @@ -739,8 +766,4 @@ protected DriverContext buildContext( ClassLoader classLoader) { return null; } - - private static T buildIfNull(T value, Supplier builder) { - return (value == null) ? builder.get() : value; - } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultDriverConfigLoader.java b/core/src/main/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultDriverConfigLoader.java index 178561b323f..c4bf40eafb6 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultDriverConfigLoader.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultDriverConfigLoader.java @@ -15,6 +15,7 @@ */ package com.datastax.oss.driver.internal.core.config.typesafe; +import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverConfig; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; @@ -55,7 +56,11 @@ public class DefaultDriverConfigLoader implements DriverConfigLoader { public static final Supplier DEFAULT_CONFIG_SUPPLIER = () -> { ConfigFactory.invalidateCaches(); - return ConfigFactory.load().getConfig(DEFAULT_ROOT_PATH); + // The thread's context class loader will be used for application classpath resources, + // while the driver class loader will be used for reference classpath resources. + return ConfigFactory.defaultApplication() + .withFallback(ConfigFactory.defaultReference(CqlSession.class.getClassLoader())) + .getConfig(DEFAULT_ROOT_PATH); }; private final Supplier configSupplier; @@ -68,11 +73,34 @@ public class DefaultDriverConfigLoader implements DriverConfigLoader { * Builds a new instance with the default Typesafe config loading rules (documented in {@link * SessionBuilder#withConfigLoader(DriverConfigLoader)}) and the core driver options. This * constructor enables config reloading (that is, {@link #supportsReloading} will return true). + * + *

        Application-specific classpath resources will be located using the {@linkplain + * Thread#getContextClassLoader() the current thread's context class loader}. This might not be + * suitable for OSGi deployments, which should use {@link #DefaultDriverConfigLoader(ClassLoader)} + * instead. */ public DefaultDriverConfigLoader() { this(DEFAULT_CONFIG_SUPPLIER); } + /** + * Builds a new instance with the default Typesafe config loading rules (documented in {@link + * SessionBuilder#withConfigLoader(DriverConfigLoader)}) and the core driver options, except that + * application-specific classpath resources will be located using the provided {@link ClassLoader} + * instead of {@linkplain Thread#getContextClassLoader() the current thread's context class + * loader}. This constructor enables config reloading (that is, {@link #supportsReloading} will + * return true). + */ + public DefaultDriverConfigLoader(@NonNull ClassLoader appClassLoader) { + this( + () -> { + ConfigFactory.invalidateCaches(); + return ConfigFactory.defaultApplication(appClassLoader) + .withFallback(ConfigFactory.defaultReference(CqlSession.class.getClassLoader())) + .getConfig(DEFAULT_ROOT_PATH); + }); + } + /** * Builds an instance with custom arguments, if you want to load the configuration from somewhere * else. This constructor enables config reloading (that is, {@link #supportsReloading} will @@ -81,7 +109,7 @@ public DefaultDriverConfigLoader() { * @param configSupplier A supplier for the Typesafe {@link Config}; it will be invoked once when * this object is instantiated, and at each reload attempt, if reloading is enabled. */ - public DefaultDriverConfigLoader(Supplier configSupplier) { + public DefaultDriverConfigLoader(@NonNull Supplier configSupplier) { this(configSupplier, true); } @@ -93,7 +121,8 @@ public DefaultDriverConfigLoader(Supplier configSupplier) { * this object is instantiated, and at each reload attempt, if reloading is enabled. * @param supportsReloading Whether config reloading should be enabled or not. */ - public DefaultDriverConfigLoader(Supplier configSupplier, boolean supportsReloading) { + public DefaultDriverConfigLoader( + @NonNull Supplier configSupplier, boolean supportsReloading) { this.configSupplier = configSupplier; this.driverConfig = new TypesafeDriverConfig(configSupplier.get()); this.supportsReloading = supportsReloading; diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultProgrammaticDriverConfigLoaderBuilder.java b/core/src/main/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultProgrammaticDriverConfigLoaderBuilder.java index 798566f97a9..802b4080cf9 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultProgrammaticDriverConfigLoaderBuilder.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultProgrammaticDriverConfigLoaderBuilder.java @@ -15,6 +15,7 @@ */ package com.datastax.oss.driver.internal.core.config.typesafe; +import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.config.DriverOption; @@ -36,30 +37,50 @@ public class DefaultProgrammaticDriverConfigLoaderBuilder implements ProgrammaticDriverConfigLoaderBuilder { - public static final Supplier DEFAULT_FALLBACK_SUPPLIER = - () -> ConfigFactory.defaultApplication().withFallback(ConfigFactory.defaultReference()); - private final NullAllowingImmutableMap.Builder values = NullAllowingImmutableMap.builder(); + private final Supplier fallbackSupplier; - private final String rootPath; private String currentProfileName = DriverExecutionProfile.DEFAULT_NAME; + /** + * Creates an instance of {@link DefaultProgrammaticDriverConfigLoaderBuilder} with default + * settings. + * + *

        Fallback configuration for options that haven't been specified programmatically will be + * obtained from standard classpath resources. Application-specific classpath resources will be + * located using the {@linkplain Thread#getContextClassLoader() the current thread's context class + * loader}. This might not be suitable for OSGi deployments, which should use {@link + * #DefaultProgrammaticDriverConfigLoaderBuilder(ClassLoader)} instead. + */ + public DefaultProgrammaticDriverConfigLoaderBuilder() { + this(DefaultDriverConfigLoader.DEFAULT_CONFIG_SUPPLIER); + } + + /** + * Creates an instance of {@link DefaultProgrammaticDriverConfigLoaderBuilder} with default + * settings but a custom class loader. + * + *

        Fallback configuration for options that haven't been specified programmatically will be + * obtained from standard classpath resources. Application-specific classpath resources will be + * located using the provided {@link ClassLoader} instead of {@linkplain + * Thread#getContextClassLoader() the current thread's context class loader}. + */ + public DefaultProgrammaticDriverConfigLoaderBuilder(@NonNull ClassLoader appClassLoader) { + this( + () -> + ConfigFactory.defaultApplication(appClassLoader) + .withFallback(ConfigFactory.defaultReference(CqlSession.class.getClassLoader())) + .getConfig(DefaultDriverConfigLoader.DEFAULT_ROOT_PATH)); + } + /** * @param fallbackSupplier the supplier that will provide fallback configuration for options that * haven't been specified programmatically. - * @param rootPath the root path used in non-programmatic sources (fallback reference.conf and - * system properties). */ - public DefaultProgrammaticDriverConfigLoaderBuilder( - Supplier fallbackSupplier, String rootPath) { + public DefaultProgrammaticDriverConfigLoaderBuilder(@NonNull Supplier fallbackSupplier) { this.fallbackSupplier = fallbackSupplier; - this.rootPath = rootPath; - } - - public DefaultProgrammaticDriverConfigLoaderBuilder() { - this(DEFAULT_FALLBACK_SUPPLIER, DefaultDriverConfigLoader.DEFAULT_ROOT_PATH); } private ProgrammaticDriverConfigLoaderBuilder with( @@ -71,9 +92,6 @@ private ProgrammaticDriverConfigLoaderBuilder with(@NonNull String path, @Nullab if (!DriverExecutionProfile.DEFAULT_NAME.equals(currentProfileName)) { path = "profiles." + currentProfileName + "." + path; } - if (!rootPath.isEmpty()) { - path = rootPath + "." + path; - } values.put(path, value); return this; } @@ -210,12 +228,10 @@ public DriverConfigLoader build() { () -> { ConfigFactory.invalidateCaches(); Config programmaticConfig = buildConfig(); - Config config = - ConfigFactory.defaultOverrides() - .withFallback(programmaticConfig) - .withFallback(fallbackSupplier.get()) - .resolve(); - return rootPath.isEmpty() ? config : config.getConfig(rootPath); + return ConfigFactory.defaultOverrides() + .withFallback(programmaticConfig) + .withFallback(fallbackSupplier.get()) + .resolve(); }); } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultProgrammaticDriverConfigLoaderBuilderTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultProgrammaticDriverConfigLoaderBuilderTest.java index d992708ac8f..9d51b8228df 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultProgrammaticDriverConfigLoaderBuilderTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultProgrammaticDriverConfigLoaderBuilderTest.java @@ -32,7 +32,7 @@ public class DefaultProgrammaticDriverConfigLoaderBuilderTest { public void should_override_option_in_default_profile() { DriverConfigLoader loader = new DefaultProgrammaticDriverConfigLoaderBuilder( - () -> ConfigFactory.parseString(FALLBACK_CONFIG), "") + () -> ConfigFactory.parseString(FALLBACK_CONFIG)) .withInt(MockOptions.INT1, 3) .build(); DriverConfig config = loader.getInitialConfig(); @@ -44,7 +44,7 @@ public void should_override_option_in_default_profile() { public void should_override_option_in_existing_profile() { DriverConfigLoader loader = new DefaultProgrammaticDriverConfigLoaderBuilder( - () -> ConfigFactory.parseString(FALLBACK_CONFIG), "") + () -> ConfigFactory.parseString(FALLBACK_CONFIG)) .startProfile("profile1") .withInt(MockOptions.INT1, 3) .build(); @@ -57,7 +57,7 @@ public void should_override_option_in_existing_profile() { public void should_override_option_in_new_profile() { DriverConfigLoader loader = new DefaultProgrammaticDriverConfigLoaderBuilder( - () -> ConfigFactory.parseString(FALLBACK_CONFIG), "") + () -> ConfigFactory.parseString(FALLBACK_CONFIG)) .startProfile("profile2") .withInt(MockOptions.INT1, 3) .build(); @@ -72,7 +72,7 @@ public void should_override_option_in_new_profile() { public void should_go_back_to_default_profile_when_profile_ends() { DriverConfigLoader loader = new DefaultProgrammaticDriverConfigLoaderBuilder( - () -> ConfigFactory.parseString(FALLBACK_CONFIG), "") + () -> ConfigFactory.parseString(FALLBACK_CONFIG)) .startProfile("profile2") .withInt(MockOptions.INT1, 3) .endProfile() @@ -86,7 +86,7 @@ public void should_go_back_to_default_profile_when_profile_ends() { public void should_handle_multiple_programmatic_profiles() { DriverConfigLoader loader = new DefaultProgrammaticDriverConfigLoaderBuilder( - () -> ConfigFactory.parseString(FALLBACK_CONFIG), "") + () -> ConfigFactory.parseString(FALLBACK_CONFIG)) .startProfile("profile2") .withInt(MockOptions.INT1, 3) .startProfile("profile3") diff --git a/manual/osgi/README.md b/manual/osgi/README.md index 9fd66969ed8..98ac0e862e2 100644 --- a/manual/osgi/README.md +++ b/manual/osgi/README.md @@ -5,6 +5,7 @@ valid OSGi bundles: - `java-driver-core` - `java-driver-query-builder` +- `java-driver-mapper-runtime` - `java-driver-core-shaded` Note: some of the driver dependencies are not valid OSGi bundles. Most of them are optional, and the @@ -48,28 +49,71 @@ and the `ClassLoader` used to reflectively load the class (in this case, `ExponentialReconnectionPolicy`). To overcome these issues, you may specify a `ClassLoader` instance when constructing a `Session` -by using [withClassLoader()]. In a lot of cases, it may be adequate to pass in the `ClassLoader` -from a `Class` that is part of the core driver, i.e.: +by using [withClassLoader()]. + +Alternatively, if you have access to the `BundleContext` (for example, if you are creating the +session in an `Activator` class) you can also obtain the bundle's `ClassLoader` the following way: ```java +BundleContext bundleContext = ...; +Bundle bundle = bundleContext.getBundle(); +BundleWiring bundleWiring = bundle.adapt(BundleWiring.class); +ClassLoader classLoader = bundleWiring.getClassLoader(); CqlSession session = CqlSession.builder() - .withClassLoader(CqlSession.class.getClassLoader()) + .withClassLoader(classLoader) .build(); ``` -Alternatively, if you have access to the `BundleContext` (for example, if you are creating the -session in an `Activator` class) you can also obtain the bundle's `ClassLoader` the following way: +### Using a custom `ClassLoader` for application-bundled configuration resources + +In addition to specifying a `ClassLoader` when constructing a `Session`, you can also specify +a `ClassLoader` instance on certain `DriverConfigLoader` methods for cases when your OSGi +application bundle provides overrides to driver configuration defaults. This is typically done by +including an `application.conf` file in your application bundle. + +For example, you can use [DriverConfigLoader.fromDefaults(ClassLoader)] to use the driver's default +configuration mechanism while specifying a different class loader: ```java BundleContext bundleContext = ...; Bundle bundle = bundleContext.getBundle(); BundleWiring bundleWiring = bundle.adapt(BundleWiring.class); ClassLoader classLoader = bundleWiring.getClassLoader(); + CqlSession session = CqlSession.builder() .withClassLoader(classLoader) + .withConfigLoader(DriverConfigLoader.fromDefaults(classLoader)) .build(); ``` +The above configuration will look for resources named `application.conf` inside the application +bundle, using the right class loader for that. + +Similarly, if you want to use programmatic configuration in you application bundle, but still +want to be able to provide some configuration in an `application.conf` file, you can use +[DriverConfigLoader.programmaticBuilder(ClassLoader)]: + +```java +BundleContext bundleContext = ...; +Bundle bundle = bundleContext.getBundle(); +BundleWiring bundleWiring = bundle.adapt(BundleWiring.class); +ClassLoader classLoader = bundleWiring.getClassLoader(); +DriverConfigLoader loader = + DriverConfigLoader.programmaticBuilder(classLoader) + .withDuration(DefaultDriverOption.REQUEST_TIMEOUT, Duration.ofSeconds(5)) + .startProfile("slow") + .withDuration(DefaultDriverOption.REQUEST_TIMEOUT, Duration.ofSeconds(30)) + .endProfile() + .build(); +CqlSession session = CqlSession.builder() + .withClassLoader(classLoader) + .withConfigLoader(loader) + .build(); +``` + +The above configuration will honor all programmatic settings, but will look for resources named +`application.conf` inside the application bundle, using the right class loader for that. + ## What does the "Error loading libc" DEBUG message mean? The driver is able to perform native system calls through [JNR] in some cases, for example to @@ -96,3 +140,5 @@ starting the driver: [JNR]: https://github.com/jnr/jnr-posix [withClassLoader()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withClassLoader-java.lang.ClassLoader- [JAVA-1127]:https://datastax-oss.atlassian.net/browse/JAVA-1127 +[DriverConfigLoader.fromDefaults(ClassLoader)]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromDefaults-java.lang.ClassLoader- +[DriverConfigLoader.programmaticBuilder(ClassLoader)]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#programmaticBuilder-java.lang.ClassLoader- From 33fc2b8b30cf88706a8e06ba57e0436b59d11a44 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Wed, 1 Apr 2020 12:38:42 -0500 Subject: [PATCH 494/979] JAVA-2658 and JAVA-2301: Refactor OSGi tests and introduce OSGi tests for the mapper This commit introduces a new module dedicated to OSGi tests. It also declares a typical "application" bundle containing a few services that rely on the driver. The tests have been refactored to actually interrogate the application bundle services and check that they can operate normally. This commit also validates that application.conf files can be correctly loaded thanks to JAVA-2657. It also solves JAVA-2301 by introducing tests that use the mapper module. --- changelog/README.md | 2 + integration-tests/pom.xml | 81 ----- .../osgi/OsgiCustomLoadBalancingPolicyIT.java | 71 ---- .../oss/driver/osgi/OsgiGeoTypesIT.java | 62 ---- .../datastax/oss/driver/osgi/OsgiGraphIT.java | 69 ---- .../datastax/oss/driver/osgi/OsgiLz4IT.java | 64 ---- .../oss/driver/osgi/OsgiReactiveIT.java | 62 ---- .../oss/driver/osgi/OsgiShadedIT.java | 83 ----- .../oss/driver/osgi/OsgiSnappyIT.java | 64 ---- .../oss/driver/osgi/OsgiVanillaIT.java | 134 -------- .../driver/osgi/support/BundleOptions.java | 320 ------------------ .../osgi/support/OsgiGeoTypesTests.java | 70 ---- .../driver/osgi/support/OsgiGraphTests.java | 98 ------ .../osgi/support/OsgiReactiveTests.java | 75 ---- .../driver/osgi/support/OsgiSimpleTests.java | 87 ----- osgi-tests/README.md | 48 +++ osgi-tests/pom.xml | 274 +++++++++++++++ .../driver/api/osgi/CustomRetryPolicy.java | 26 ++ .../api/osgi/service/MailboxException.java | 23 ++ .../api/osgi/service/MailboxMessage.java | 106 ++++++ .../api/osgi/service/MailboxService.java | 41 +++ .../osgi/service/geo/GeoMailboxMessage.java | 106 ++++++ .../osgi/service/geo/GeoMailboxService.java | 28 ++ .../service/graph/GraphMailboxService.java | 27 ++ .../reactive/ReactiveMailboxService.java | 27 ++ .../internal/osgi/MailboxActivator.java | 173 ++++++++++ .../internal/osgi/service/MailboxMapper.java | 28 ++ .../osgi/service/MailboxMessageDao.java | 32 ++ .../osgi/service/MailboxServiceImpl.java | 134 ++++++++ .../osgi/service/geo/GeoMailboxMapper.java | 28 ++ .../service/geo/GeoMailboxMessageDao.java | 33 ++ .../service/geo/GeoMailboxServiceImpl.java | 100 ++++++ .../graph/GraphMailboxServiceImpl.java | 101 ++++++ .../reactive/ReactiveMailboxMapper.java | 28 ++ .../reactive/ReactiveMailboxMessageDao.java | 29 ++ .../reactive/ReactiveMailboxServiceImpl.java | 51 +++ .../src/main/resources/application.conf | 42 +++ .../osgi/OsgiCustomLoadBalancingPolicyIT.java | 64 ++++ .../driver/internal/osgi/OsgiDefaultIT.java | 57 ++++ .../driver/internal/osgi/OsgiGeoTypesIT.java | 63 ++++ .../oss/driver/internal/osgi/OsgiGraphIT.java | 63 ++++ .../oss/driver/internal/osgi/OsgiLz4IT.java | 55 +++ .../driver/internal/osgi/OsgiReactiveIT.java | 61 ++++ .../driver/internal/osgi/OsgiShadedIT.java | 53 +++ .../driver/internal/osgi/OsgiSnappyIT.java | 55 +++ .../osgi/checks/DefaultServiceChecks.java | 49 +++ .../osgi/checks/GeoServiceChecks.java | 46 +++ .../osgi/checks/GraphServiceChecks.java | 42 +++ .../osgi/checks/ReactiveServiceChecks.java | 51 +++ .../internal/osgi/support/BundleOptions.java | 210 ++++++++++++ .../osgi/support/CcmExamReactorFactory.java | 30 ++ .../internal/osgi/support/CcmPaxExam.java | 106 ++++++ .../osgi/support/CcmStagedReactor.java | 83 +++++ osgi-tests/src/test/resources/exam.properties | 18 + .../src/test/resources/logback-test.xml | 34 ++ pom.xml | 23 +- 56 files changed, 2645 insertions(+), 1345 deletions(-) delete mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/osgi/OsgiCustomLoadBalancingPolicyIT.java delete mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/osgi/OsgiGeoTypesIT.java delete mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/osgi/OsgiGraphIT.java delete mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/osgi/OsgiLz4IT.java delete mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/osgi/OsgiReactiveIT.java delete mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/osgi/OsgiShadedIT.java delete mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/osgi/OsgiSnappyIT.java delete mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/osgi/OsgiVanillaIT.java delete mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/osgi/support/BundleOptions.java delete mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/osgi/support/OsgiGeoTypesTests.java delete mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/osgi/support/OsgiGraphTests.java delete mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/osgi/support/OsgiReactiveTests.java delete mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/osgi/support/OsgiSimpleTests.java create mode 100644 osgi-tests/README.md create mode 100644 osgi-tests/pom.xml create mode 100644 osgi-tests/src/main/java/com/datastax/oss/driver/api/osgi/CustomRetryPolicy.java create mode 100644 osgi-tests/src/main/java/com/datastax/oss/driver/api/osgi/service/MailboxException.java create mode 100644 osgi-tests/src/main/java/com/datastax/oss/driver/api/osgi/service/MailboxMessage.java create mode 100644 osgi-tests/src/main/java/com/datastax/oss/driver/api/osgi/service/MailboxService.java create mode 100644 osgi-tests/src/main/java/com/datastax/oss/driver/api/osgi/service/geo/GeoMailboxMessage.java create mode 100644 osgi-tests/src/main/java/com/datastax/oss/driver/api/osgi/service/geo/GeoMailboxService.java create mode 100644 osgi-tests/src/main/java/com/datastax/oss/driver/api/osgi/service/graph/GraphMailboxService.java create mode 100644 osgi-tests/src/main/java/com/datastax/oss/driver/api/osgi/service/reactive/ReactiveMailboxService.java create mode 100644 osgi-tests/src/main/java/com/datastax/oss/driver/internal/osgi/MailboxActivator.java create mode 100644 osgi-tests/src/main/java/com/datastax/oss/driver/internal/osgi/service/MailboxMapper.java create mode 100644 osgi-tests/src/main/java/com/datastax/oss/driver/internal/osgi/service/MailboxMessageDao.java create mode 100644 osgi-tests/src/main/java/com/datastax/oss/driver/internal/osgi/service/MailboxServiceImpl.java create mode 100644 osgi-tests/src/main/java/com/datastax/oss/driver/internal/osgi/service/geo/GeoMailboxMapper.java create mode 100644 osgi-tests/src/main/java/com/datastax/oss/driver/internal/osgi/service/geo/GeoMailboxMessageDao.java create mode 100644 osgi-tests/src/main/java/com/datastax/oss/driver/internal/osgi/service/geo/GeoMailboxServiceImpl.java create mode 100644 osgi-tests/src/main/java/com/datastax/oss/driver/internal/osgi/service/graph/GraphMailboxServiceImpl.java create mode 100644 osgi-tests/src/main/java/com/datastax/oss/driver/internal/osgi/service/reactive/ReactiveMailboxMapper.java create mode 100644 osgi-tests/src/main/java/com/datastax/oss/driver/internal/osgi/service/reactive/ReactiveMailboxMessageDao.java create mode 100644 osgi-tests/src/main/java/com/datastax/oss/driver/internal/osgi/service/reactive/ReactiveMailboxServiceImpl.java create mode 100644 osgi-tests/src/main/resources/application.conf create mode 100644 osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/OsgiCustomLoadBalancingPolicyIT.java create mode 100644 osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/OsgiDefaultIT.java create mode 100644 osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/OsgiGeoTypesIT.java create mode 100644 osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/OsgiGraphIT.java create mode 100644 osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/OsgiLz4IT.java create mode 100644 osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/OsgiReactiveIT.java create mode 100644 osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/OsgiShadedIT.java create mode 100644 osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/OsgiSnappyIT.java create mode 100644 osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/checks/DefaultServiceChecks.java create mode 100644 osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/checks/GeoServiceChecks.java create mode 100644 osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/checks/GraphServiceChecks.java create mode 100644 osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/checks/ReactiveServiceChecks.java create mode 100644 osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/BundleOptions.java create mode 100644 osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/CcmExamReactorFactory.java create mode 100644 osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/CcmPaxExam.java create mode 100644 osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/CcmStagedReactor.java create mode 100644 osgi-tests/src/test/resources/exam.properties create mode 100644 osgi-tests/src/test/resources/logback-test.xml diff --git a/changelog/README.md b/changelog/README.md index a9130e2c297..4a32a5ef064 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,8 @@ ### 4.7.0 (in progress) +- [improvement] JAVA-2301: Introduce OSGi tests for the mapper +- [improvement] JAVA-2658: Refactor OSGi tests - [bug] JAVA-2657: Add ability to specify the class loader to use for application-specific classpath resources - [documentation] JAVA-2666: Document BOM and driver modules - [documentation] JAVA-2613: Improve connection pooling documentation diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 6dc31265e65..8efb8086555 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -154,32 +154,6 @@ org.apache.directory.api api-ldap-codec-standalone - 1.0.0-M26 - test - - - org.ops4j.pax.exam - pax-exam-junit4 - test - - - org.ops4j.pax.exam - pax-exam-container-native - test - - - org.ops4j.pax.exam - pax-exam-link-mvn - test - - - org.ops4j.pax.url - pax-url-wrap - test - - - org.apache.felix - org.apache.felix.framework test @@ -200,64 +174,9 @@ - - org.apache.maven.plugins - maven-surefire-plugin - - - - ${project.version} - ${assertj.version} - ${config.version} - ${commons-exec.version} - ${hdrhistogram.version} - ${jackson.version} - ${jackson-databind.version} - ${logback.version} - ${lz4.version} - ${metrics.version} - ${netty.version} - ${simulacron.version} - ${slf4j.version} - ${snappy.version} - ${esri.version} - ${json.version} - ${legacy-jackson.version} - ${reactive-streams.version} - ${rxjava.version} - ${tinkerpop.version} - - - org.apache.maven.plugins maven-failsafe-plugin - - - - ${project.version} - ${assertj.version} - ${config.version} - ${commons-exec.version} - ${hdrhistogram.version} - ${jackson.version} - ${jackson-databind.version} - ${logback.version} - ${lz4.version} - ${metrics.version} - ${netty.version} - ${simulacron.version} - ${slf4j.version} - ${snappy.version} - ${esri.version} - ${json.version} - ${legacy-jackson.version} - ${reactive-streams.version} - ${rxjava.version} - ${tinkerpop.version} - ${awaitility.version} - - parallelizable-tests diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/osgi/OsgiCustomLoadBalancingPolicyIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/osgi/OsgiCustomLoadBalancingPolicyIT.java deleted file mode 100644 index 4f321fea380..00000000000 --- a/integration-tests/src/test/java/com/datastax/oss/driver/osgi/OsgiCustomLoadBalancingPolicyIT.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.datastax.oss.driver.osgi; - -import static org.ops4j.pax.exam.CoreOptions.options; - -import com.datastax.oss.driver.api.core.config.DefaultDriverOption; -import com.datastax.oss.driver.api.core.config.DriverConfigLoader; -import com.datastax.oss.driver.api.core.config.ProgrammaticDriverConfigLoaderBuilder; -import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; -import com.datastax.oss.driver.api.testinfra.loadbalancing.SortingLoadBalancingPolicy; -import com.datastax.oss.driver.categories.IsolatedTests; -import com.datastax.oss.driver.osgi.support.BundleOptions; -import com.datastax.oss.driver.osgi.support.OsgiSimpleTests; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.ops4j.pax.exam.Configuration; -import org.ops4j.pax.exam.Option; -import org.ops4j.pax.exam.junit.PaxExam; -import org.ops4j.pax.exam.spi.reactors.ExamReactorStrategy; -import org.ops4j.pax.exam.spi.reactors.PerMethod; - -/** - * Test that uses a policy from a separate bundle from the core driver to ensure that the driver is - * able to load that policy via Reflection. To support this, the driver uses - * DynamicImport-Package: *. - */ -@RunWith(PaxExam.class) -@ExamReactorStrategy(PerMethod.class) -@Category(IsolatedTests.class) -public class OsgiCustomLoadBalancingPolicyIT implements OsgiSimpleTests { - - @ClassRule - public static final CustomCcmRule CCM_RULE = CustomCcmRule.builder().withNodes(1).build(); - - @Configuration - public Option[] config() { - return options( - BundleOptions.driverCoreBundle(), - BundleOptions.driverQueryBuilderBundle(), - BundleOptions.baseOptions(), - BundleOptions.jacksonBundles()); - } - - @Override - public ProgrammaticDriverConfigLoaderBuilder configLoaderBuilder() { - return DriverConfigLoader.programmaticBuilder() - .withClass( - DefaultDriverOption.LOAD_BALANCING_POLICY_CLASS, SortingLoadBalancingPolicy.class); - } - - @Test - public void should_connect_and_query_with_custom_lbp() { - connectAndQuerySimple(); - } -} diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/osgi/OsgiGeoTypesIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/osgi/OsgiGeoTypesIT.java deleted file mode 100644 index d69363888d6..00000000000 --- a/integration-tests/src/test/java/com/datastax/oss/driver/osgi/OsgiGeoTypesIT.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.datastax.oss.driver.osgi; - -import com.datastax.oss.driver.api.testinfra.DseRequirement; -import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; -import com.datastax.oss.driver.categories.IsolatedTests; -import com.datastax.oss.driver.osgi.support.BundleOptions; -import com.datastax.oss.driver.osgi.support.OsgiGeoTypesTests; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.ops4j.pax.exam.Configuration; -import org.ops4j.pax.exam.CoreOptions; -import org.ops4j.pax.exam.Option; -import org.ops4j.pax.exam.junit.PaxExam; -import org.ops4j.pax.exam.spi.reactors.ExamReactorStrategy; -import org.ops4j.pax.exam.spi.reactors.PerMethod; - -@RunWith(PaxExam.class) -@ExamReactorStrategy(PerMethod.class) -@Category(IsolatedTests.class) -@DseRequirement(min = "5.0", description = "Requires geo types") -public class OsgiGeoTypesIT implements OsgiGeoTypesTests { - - @ClassRule - public static final CustomCcmRule CCM_RULE = CustomCcmRule.builder().withNodes(1).build(); - - @Configuration - public Option[] config() { - return CoreOptions.options( - BundleOptions.driverCoreBundle(), - BundleOptions.driverQueryBuilderBundle(), - BundleOptions.baseOptions(), - BundleOptions.jacksonBundles(), - BundleOptions.esriBundles()); - } - - @Test - public void should_connect_and_query_without_geo_types() { - connectAndQuerySimple(); - } - - @Test - public void should_connect_and_query_with_geo_types() { - connectAndQueryGeoTypes(); - } -} diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/osgi/OsgiGraphIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/osgi/OsgiGraphIT.java deleted file mode 100644 index 0161e72133f..00000000000 --- a/integration-tests/src/test/java/com/datastax/oss/driver/osgi/OsgiGraphIT.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.datastax.oss.driver.osgi; - -import com.datastax.oss.driver.api.core.Version; -import com.datastax.oss.driver.api.testinfra.DseRequirement; -import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; -import com.datastax.oss.driver.categories.IsolatedTests; -import com.datastax.oss.driver.osgi.support.BundleOptions; -import com.datastax.oss.driver.osgi.support.OsgiGraphTests; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.ops4j.pax.exam.Configuration; -import org.ops4j.pax.exam.CoreOptions; -import org.ops4j.pax.exam.Option; -import org.ops4j.pax.exam.junit.PaxExam; -import org.ops4j.pax.exam.spi.reactors.ExamReactorStrategy; -import org.ops4j.pax.exam.spi.reactors.PerMethod; - -@RunWith(PaxExam.class) -@ExamReactorStrategy(PerMethod.class) -@Category(IsolatedTests.class) -@DseRequirement(min = "5.0", description = "Requires Graph") -public class OsgiGraphIT implements OsgiGraphTests { - - @ClassRule - public static final CustomCcmRule CCM_RULE = - CustomCcmRule.builder().withNodes(1).withDseWorkloads("graph").build(); - - @Configuration - public Option[] config() { - return CoreOptions.options( - BundleOptions.driverCoreBundle(), - BundleOptions.driverQueryBuilderBundle(), - BundleOptions.baseOptions(), - BundleOptions.jacksonBundles(), - BundleOptions.tinkerpopBundles()); - } - - @Test - public void should_connect_and_query_without_graph() { - connectAndQuerySimple(); - } - - @Test - public void should_connect_and_query_with_graph() { - connectAndQueryGraph(); - } - - @Override - public Version getDseVersion() { - return CCM_RULE.getDseVersion().orElseThrow(IllegalStateException::new); - } -} diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/osgi/OsgiLz4IT.java b/integration-tests/src/test/java/com/datastax/oss/driver/osgi/OsgiLz4IT.java deleted file mode 100644 index ead8b8dcd07..00000000000 --- a/integration-tests/src/test/java/com/datastax/oss/driver/osgi/OsgiLz4IT.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.datastax.oss.driver.osgi; - -import com.datastax.oss.driver.api.core.config.DefaultDriverOption; -import com.datastax.oss.driver.api.core.config.DriverConfigLoader; -import com.datastax.oss.driver.api.core.config.ProgrammaticDriverConfigLoaderBuilder; -import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; -import com.datastax.oss.driver.categories.IsolatedTests; -import com.datastax.oss.driver.osgi.support.BundleOptions; -import com.datastax.oss.driver.osgi.support.OsgiSimpleTests; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.ops4j.pax.exam.Configuration; -import org.ops4j.pax.exam.CoreOptions; -import org.ops4j.pax.exam.Option; -import org.ops4j.pax.exam.junit.PaxExam; -import org.ops4j.pax.exam.spi.reactors.ExamReactorStrategy; -import org.ops4j.pax.exam.spi.reactors.PerMethod; - -@RunWith(PaxExam.class) -@ExamReactorStrategy(PerMethod.class) -@Category(IsolatedTests.class) -public class OsgiLz4IT implements OsgiSimpleTests { - - @ClassRule - public static final CustomCcmRule CCM_RULE = CustomCcmRule.builder().withNodes(1).build(); - - @Configuration - public Option[] config() { - return CoreOptions.options( - BundleOptions.lz4Bundle(), - BundleOptions.driverCoreBundle(), - BundleOptions.driverQueryBuilderBundle(), - BundleOptions.baseOptions(), - BundleOptions.jacksonBundles()); - } - - @Override - public ProgrammaticDriverConfigLoaderBuilder configLoaderBuilder() { - return DriverConfigLoader.programmaticBuilder() - .withString(DefaultDriverOption.PROTOCOL_COMPRESSION, "lz4"); - } - - @Test - public void should_connect_and_query_with_lz4_compression() { - connectAndQuerySimple(); - } -} diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/osgi/OsgiReactiveIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/osgi/OsgiReactiveIT.java deleted file mode 100644 index 44164c78175..00000000000 --- a/integration-tests/src/test/java/com/datastax/oss/driver/osgi/OsgiReactiveIT.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.datastax.oss.driver.osgi; - -import com.datastax.oss.driver.api.testinfra.DseRequirement; -import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; -import com.datastax.oss.driver.categories.IsolatedTests; -import com.datastax.oss.driver.osgi.support.BundleOptions; -import com.datastax.oss.driver.osgi.support.OsgiReactiveTests; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.ops4j.pax.exam.Configuration; -import org.ops4j.pax.exam.CoreOptions; -import org.ops4j.pax.exam.Option; -import org.ops4j.pax.exam.junit.PaxExam; -import org.ops4j.pax.exam.spi.reactors.ExamReactorStrategy; -import org.ops4j.pax.exam.spi.reactors.PerMethod; - -@RunWith(PaxExam.class) -@ExamReactorStrategy(PerMethod.class) -@Category(IsolatedTests.class) -@DseRequirement(min = "4.7") -public class OsgiReactiveIT implements OsgiReactiveTests { - - @ClassRule - public static final CustomCcmRule CCM_RULE = CustomCcmRule.builder().withNodes(1).build(); - - @Configuration - public Option[] config() { - return CoreOptions.options( - BundleOptions.driverCoreBundle(), - BundleOptions.driverQueryBuilderBundle(), - BundleOptions.baseOptions(), - BundleOptions.jacksonBundles(), - BundleOptions.reactiveBundles()); - } - - @Test - public void should_connect_and_query_without_reactive() { - connectAndQuerySimple(); - } - - @Test - public void should_connect_and_query_with_reactive() { - connectAndQueryReactive(); - } -} diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/osgi/OsgiShadedIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/osgi/OsgiShadedIT.java deleted file mode 100644 index 662f926f390..00000000000 --- a/integration-tests/src/test/java/com/datastax/oss/driver/osgi/OsgiShadedIT.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.datastax.oss.driver.osgi; - -import com.datastax.oss.driver.api.core.Version; -import com.datastax.oss.driver.api.testinfra.DseRequirement; -import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; -import com.datastax.oss.driver.categories.IsolatedTests; -import com.datastax.oss.driver.osgi.support.BundleOptions; -import com.datastax.oss.driver.osgi.support.OsgiGeoTypesTests; -import com.datastax.oss.driver.osgi.support.OsgiGraphTests; -import com.datastax.oss.driver.osgi.support.OsgiReactiveTests; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.ops4j.pax.exam.Configuration; -import org.ops4j.pax.exam.CoreOptions; -import org.ops4j.pax.exam.Option; -import org.ops4j.pax.exam.junit.PaxExam; -import org.ops4j.pax.exam.spi.reactors.ExamReactorStrategy; -import org.ops4j.pax.exam.spi.reactors.PerMethod; - -@RunWith(PaxExam.class) -@ExamReactorStrategy(PerMethod.class) -@Category(IsolatedTests.class) -@DseRequirement(min = "5.0", description = "Requires Graph and geo types") -public class OsgiShadedIT implements OsgiReactiveTests, OsgiGraphTests, OsgiGeoTypesTests { - - @ClassRule - public static final CustomCcmRule CCM_RULE = - CustomCcmRule.builder().withNodes(1).withDseWorkloads("graph").build(); - - @Configuration - public Option[] config() { - return CoreOptions.options( - BundleOptions.driverCoreShadedBundle(), - BundleOptions.driverQueryBuilderBundle(), - BundleOptions.baseOptions(), - // do not include ESRI nor Jackson as they are shaded; include Rx and Tinkerpop because they - // are not shaded - BundleOptions.reactiveBundles(), - BundleOptions.tinkerpopBundles()); - } - - @Override - public Version getDseVersion() { - return CCM_RULE.getDseVersion().orElseThrow(IllegalStateException::new); - } - - @Test - public void should_connect_and_query_shaded_simple() { - connectAndQuerySimple(); - } - - @Test - public void should_connect_and_query_shaded_with_geo_types() { - connectAndQueryGeoTypes(); - } - - @Test - public void should_connect_and_query_shaded_with_graph() { - connectAndQueryGraph(); - } - - @Test - public void should_connect_and_query_shaded_with_reactive() { - connectAndQueryReactive(); - } -} diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/osgi/OsgiSnappyIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/osgi/OsgiSnappyIT.java deleted file mode 100644 index fd4d206e6c0..00000000000 --- a/integration-tests/src/test/java/com/datastax/oss/driver/osgi/OsgiSnappyIT.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.datastax.oss.driver.osgi; - -import com.datastax.oss.driver.api.core.config.DefaultDriverOption; -import com.datastax.oss.driver.api.core.config.DriverConfigLoader; -import com.datastax.oss.driver.api.core.config.ProgrammaticDriverConfigLoaderBuilder; -import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; -import com.datastax.oss.driver.categories.IsolatedTests; -import com.datastax.oss.driver.osgi.support.BundleOptions; -import com.datastax.oss.driver.osgi.support.OsgiSimpleTests; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.ops4j.pax.exam.Configuration; -import org.ops4j.pax.exam.CoreOptions; -import org.ops4j.pax.exam.Option; -import org.ops4j.pax.exam.junit.PaxExam; -import org.ops4j.pax.exam.spi.reactors.ExamReactorStrategy; -import org.ops4j.pax.exam.spi.reactors.PerMethod; - -@RunWith(PaxExam.class) -@ExamReactorStrategy(PerMethod.class) -@Category(IsolatedTests.class) -public class OsgiSnappyIT implements OsgiSimpleTests { - - @ClassRule - public static final CustomCcmRule CCM_RULE = CustomCcmRule.builder().withNodes(1).build(); - - @Configuration - public Option[] config() { - return CoreOptions.options( - BundleOptions.snappyBundle(), - BundleOptions.driverCoreBundle(), - BundleOptions.driverQueryBuilderBundle(), - BundleOptions.baseOptions(), - BundleOptions.jacksonBundles()); - } - - @Override - public ProgrammaticDriverConfigLoaderBuilder configLoaderBuilder() { - return DriverConfigLoader.programmaticBuilder() - .withString(DefaultDriverOption.PROTOCOL_COMPRESSION, "snappy"); - } - - @Test - public void should_connect_and_query_with_snappy_compression() { - connectAndQuerySimple(); - } -} diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/osgi/OsgiVanillaIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/osgi/OsgiVanillaIT.java deleted file mode 100644 index 1e492e1d381..00000000000 --- a/integration-tests/src/test/java/com/datastax/oss/driver/osgi/OsgiVanillaIT.java +++ /dev/null @@ -1,134 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.datastax.oss.driver.osgi; - -import static org.assertj.core.api.Assertions.assertThat; - -import ch.qos.logback.classic.Level; -import ch.qos.logback.classic.Logger; -import ch.qos.logback.classic.spi.ILoggingEvent; -import ch.qos.logback.core.AppenderBase; -import com.datastax.oss.driver.api.testinfra.DseRequirement; -import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; -import com.datastax.oss.driver.categories.IsolatedTests; -import com.datastax.oss.driver.osgi.support.BundleOptions; -import com.datastax.oss.driver.osgi.support.OsgiSimpleTests; -import java.util.List; -import java.util.concurrent.CopyOnWriteArrayList; -import java.util.stream.Collectors; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.ops4j.pax.exam.Configuration; -import org.ops4j.pax.exam.CoreOptions; -import org.ops4j.pax.exam.Option; -import org.ops4j.pax.exam.junit.PaxExam; -import org.ops4j.pax.exam.spi.reactors.ExamReactorStrategy; -import org.ops4j.pax.exam.spi.reactors.PerMethod; -import org.slf4j.LoggerFactory; - -@RunWith(PaxExam.class) -@ExamReactorStrategy(PerMethod.class) -@Category(IsolatedTests.class) -@DseRequirement(min = "4.7") -public class OsgiVanillaIT implements OsgiSimpleTests { - - @ClassRule - public static final CustomCcmRule CCM_RULE = CustomCcmRule.builder().withNodes(1).build(); - - @Configuration - public Option[] config() { - // this configuration purposely excludes bundles whose resolution is optional: - // ESRI, Reactive Streams and Tinkerpop. This allows to validate that the driver can still - // work properly in an OSGi container as long as the missing packages are not accessed. - return CoreOptions.options( - BundleOptions.driverCoreBundle(), - BundleOptions.driverQueryBuilderBundle(), - BundleOptions.baseOptions(), - BundleOptions.jacksonBundles()); - } - - @Before - public void addTestAppender() { - Logger logger = (Logger) LoggerFactory.getLogger("com.datastax.oss.driver"); - Level oldLevel = logger.getLevel(); - logger.getLoggerContext().putObject("oldLevel", oldLevel); - logger.setLevel(Level.INFO); - TestAppender appender = new TestAppender(); - logger.addAppender(appender); - appender.start(); - } - - @After - public void removeTestAppender() { - Logger logger = (Logger) LoggerFactory.getLogger("com.datastax.oss.driver"); - logger.detachAppender("test"); - Level oldLevel = (Level) logger.getLoggerContext().getObject("oldLevel"); - logger.setLevel(oldLevel); - } - - @Test - public void should_connect_and_query_simple() { - connectAndQuerySimple(); - assertLogMessagesPresent(); - } - - private void assertLogMessagesPresent() { - Logger logger = (Logger) LoggerFactory.getLogger("com.datastax.oss.driver"); - TestAppender appender = (TestAppender) logger.getAppender("test"); - List infoLogs = - appender.events.stream() - .filter(event -> event.getLevel().toInt() == Level.INFO.toInt()) - .map(ILoggingEvent::getFormattedMessage) - .collect(Collectors.toList()); - assertThat(infoLogs) - .anySatisfy( - msg -> - assertThat(msg) - .contains( - "Could not register Geo codecs; this is normal if ESRI was explicitly " - + "excluded from classpath")) - .anySatisfy( - msg -> - assertThat(msg) - .contains( - "Could not register Reactive extensions; this is normal if Reactive " - + "Streams was explicitly excluded from classpath")) - .anySatisfy( - msg -> - assertThat(msg) - .contains( - "Could not register Graph extensions; this is normal if Tinkerpop was " - + "explicitly excluded from classpath")); - } - - private static class TestAppender extends AppenderBase { - - private final List events = new CopyOnWriteArrayList<>(); - - private TestAppender() { - name = "test"; - } - - @Override - protected void append(ILoggingEvent event) { - events.add(event); - } - } -} diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/osgi/support/BundleOptions.java b/integration-tests/src/test/java/com/datastax/oss/driver/osgi/support/BundleOptions.java deleted file mode 100644 index d5846b20861..00000000000 --- a/integration-tests/src/test/java/com/datastax/oss/driver/osgi/support/BundleOptions.java +++ /dev/null @@ -1,320 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.datastax.oss.driver.osgi.support; - -import static org.ops4j.pax.exam.CoreOptions.bundle; -import static org.ops4j.pax.exam.CoreOptions.junitBundles; -import static org.ops4j.pax.exam.CoreOptions.mavenBundle; -import static org.ops4j.pax.exam.CoreOptions.options; -import static org.ops4j.pax.exam.CoreOptions.systemProperty; - -import com.datastax.oss.driver.api.core.Version; -import com.datastax.oss.driver.shaded.guava.common.base.Charsets; -import com.datastax.oss.driver.shaded.guava.common.base.Splitter; -import com.google.common.io.CharSource; -import com.google.common.io.Files; -import java.io.File; -import java.io.IOException; -import java.io.UncheckedIOException; -import java.util.List; -import org.ops4j.pax.exam.CoreOptions; -import org.ops4j.pax.exam.options.CompositeOption; -import org.ops4j.pax.exam.options.MavenArtifactProvisionOption; -import org.ops4j.pax.exam.options.UrlProvisionOption; -import org.ops4j.pax.exam.options.WrappedUrlProvisionOption; -import org.ops4j.pax.exam.util.PathUtils; - -public class BundleOptions { - - public static CompositeOption baseOptions() { - // In theory, the options declared here should only include bundles that must be present - // in order for both the non-shaded and shaded driver versions to work properly. - // Bundles that should be present only for the non-shaded driver version should be declared - // elsewhere. - // However we have two exceptions: Netty and FasterXML Jackson; both need to be present in all - // cases because the test bundles requires their presence (see #testBundles method). - return () -> - options( - mavenBundle( - "com.datastax.oss", - "java-driver-shaded-guava", - getVersionFromDepsTxt("com.datastax.oss:java-driver-shaded-guava")), - mavenBundle( - "io.dropwizard.metrics", - "metrics-core", - getVersionFromSystemProperty("metrics.version")), - mavenBundle("org.slf4j", "slf4j-api", getVersionFromSystemProperty("slf4j.version")), - mavenBundle( - "org.hdrhistogram", - "HdrHistogram", - getVersionFromSystemProperty("hdrhistogram.version")), - mavenBundle("com.typesafe", "config", getVersionFromSystemProperty("config.version")), - mavenBundle( - "com.datastax.oss", - "native-protocol", - getVersionFromDepsTxt("com.datastax.oss:native-protocol")), - logbackBundles(), - systemProperty("logback.configurationFile") - .value("file:" + PathUtils.getBaseDir() + "/src/test/resources/logback-test.xml"), - testBundles()); - } - - public static UrlProvisionOption driverCoreBundle() { - return bundle( - "reference:file:" - + PathUtils.getBaseDir() - + "/../core/target/java-driver-core-" - + getVersionFromSystemProperty("project.version") - + ".jar"); - } - - public static UrlProvisionOption driverCoreShadedBundle() { - return bundle( - "reference:file:" - + PathUtils.getBaseDir() - + "/../core-shaded/target/java-driver-core-shaded-" - + getVersionFromSystemProperty("project.version") - + ".jar"); - } - - public static UrlProvisionOption driverQueryBuilderBundle() { - return bundle( - "reference:file:" - + PathUtils.getBaseDir() - + "/../query-builder/target/java-driver-query-builder-" - + getVersionFromSystemProperty("project.version") - + ".jar"); - } - - public static UrlProvisionOption driverTestInfraBundle() { - return bundle( - "reference:file:" - + PathUtils.getBaseDir() - + "/../test-infra/target/java-driver-test-infra-" - + getVersionFromSystemProperty("project.version") - + ".jar"); - } - - public static CompositeOption testBundles() { - return () -> - options( - driverTestInfraBundle(), - simulacronBundles(), - awaitilityBundles(), - nettyBundles(), // required by the test infra bundle, even for the shaded jar - jacksonBundles(), // required by the Simulacron bundle, even for the shaded jar - mavenBundle( - "org.apache.commons", - "commons-exec", - getVersionFromSystemProperty("commons-exec.version")), - mavenBundle( - "org.assertj", "assertj-core", getVersionFromSystemProperty("assertj.version")), - junitBundles()); - } - - public static CompositeOption nettyBundles() { - String nettyVersion = getVersionFromSystemProperty("netty.version"); - return () -> - options( - mavenBundle("io.netty", "netty-handler", nettyVersion), - mavenBundle("io.netty", "netty-buffer", nettyVersion), - mavenBundle("io.netty", "netty-codec", nettyVersion), - mavenBundle("io.netty", "netty-common", nettyVersion), - mavenBundle("io.netty", "netty-transport", nettyVersion), - mavenBundle("io.netty", "netty-resolver", nettyVersion)); - } - - public static CompositeOption logbackBundles() { - String logbackVersion = getVersionFromSystemProperty("logback.version"); - return () -> - options( - mavenBundle("ch.qos.logback", "logback-classic", logbackVersion), - mavenBundle("ch.qos.logback", "logback-core", logbackVersion)); - } - - public static CompositeOption jacksonBundles() { - String jacksonVersion = getVersionFromSystemProperty("jackson.version"); - String jacksonDatabindVersion = getVersionFromSystemProperty("jackson-databind.version"); - return () -> - options( - mavenBundle("com.fasterxml.jackson.core", "jackson-databind", jacksonDatabindVersion), - mavenBundle("com.fasterxml.jackson.core", "jackson-core", jacksonVersion), - mavenBundle("com.fasterxml.jackson.core", "jackson-annotations", jacksonVersion)); - } - - public static CompositeOption simulacronBundles() { - String simulacronVersion = getVersionFromSystemProperty("simulacron.version"); - return () -> - options( - mavenBundle( - "com.datastax.oss.simulacron", "simulacron-native-server", simulacronVersion), - mavenBundle("com.datastax.oss.simulacron", "simulacron-common", simulacronVersion), - mavenBundle( - "com.datastax.oss.simulacron", - "simulacron-native-protocol-json", - simulacronVersion)); - } - - public static CompositeOption awaitilityBundles() { - String awaitilityVersion = getVersionFromSystemProperty("awaitility.version"); - return () -> - options( - mavenBundle("org.awaitility", "awaitility", awaitilityVersion), - mavenBundle("org.hamcrest", "hamcrest", "2.1")); - } - - public static MavenArtifactProvisionOption lz4Bundle() { - return mavenBundle("org.lz4", "lz4-java", getVersionFromSystemProperty("lz4.version")); - } - - public static MavenArtifactProvisionOption snappyBundle() { - return mavenBundle( - "org.xerial.snappy", "snappy-java", getVersionFromSystemProperty("snappy.version")); - } - - public static CompositeOption tinkerpopBundles() { - String mavenVersion = getVersionFromSystemProperty("tinkerpop.version"); - String osgiVersion = toOsgiTinkerpopVersion(mavenVersion); - return () -> - options( - CoreOptions.wrappedBundle( - mavenBundle("org.apache.tinkerpop", "gremlin-core", mavenVersion)) - .exports( - // avoid exporting 'org.apache.tinkerpop.gremlin.*' as other Tinkerpop jars have - // this root package as well - "org.apache.tinkerpop.gremlin.jsr223.*", - "org.apache.tinkerpop.gremlin.process.*", - "org.apache.tinkerpop.gremlin.structure.*", - "org.apache.tinkerpop.gremlin.util.*") - .bundleVersion(osgiVersion) - .bundleSymbolicName("org.apache.tinkerpop.gremlin-core") - .overwriteManifest(WrappedUrlProvisionOption.OverwriteMode.FULL), - CoreOptions.wrappedBundle( - mavenBundle("org.apache.tinkerpop", "gremlin-driver", mavenVersion)) - .exports("org.apache.tinkerpop.gremlin.driver.*") - .bundleVersion(osgiVersion) - .bundleSymbolicName("org.apache.tinkerpop.gremlin-driver") - .overwriteManifest(WrappedUrlProvisionOption.OverwriteMode.FULL), - CoreOptions.wrappedBundle( - mavenBundle("org.apache.tinkerpop", "tinkergraph-gremlin", mavenVersion)) - .exports("org.apache.tinkerpop.gremlin.tinkergraph.*") - .bundleVersion(osgiVersion) - .bundleSymbolicName("org.apache.tinkerpop.tinkergraph-gremlin") - .overwriteManifest(WrappedUrlProvisionOption.OverwriteMode.FULL), - CoreOptions.wrappedBundle( - mavenBundle("org.apache.tinkerpop", "gremlin-shaded", mavenVersion)) - .exports("org.apache.tinkerpop.shaded.*") - .bundleVersion(osgiVersion) - .bundleSymbolicName("org.apache.tinkerpop.gremlin-shaded") - .overwriteManifest(WrappedUrlProvisionOption.OverwriteMode.FULL), - // Note: the versions below are hard-coded because they shouldn't change very often, - // but if the tests fail because of them, we should consider parameterizing them - mavenBundle("commons-configuration", "commons-configuration", "1.10"), - mavenBundle("commons-collections", "commons-collections", "3.2.2"), - mavenBundle("org.apache.commons", "commons-lang3", "3.8.1"), - mavenBundle("commons-lang", "commons-lang", "2.6"), - CoreOptions.wrappedBundle(mavenBundle("org.javatuples", "javatuples", "1.2")) - .exports("org.javatuples.*") - .bundleVersion("1.2") - .bundleSymbolicName("org.javatuples") - .overwriteManifest(WrappedUrlProvisionOption.OverwriteMode.FULL)); - } - - public static CompositeOption esriBundles() { - return () -> - options( - CoreOptions.wrappedBundle( - mavenBundle( - "com.esri.geometry", - "esri-geometry-api", - getVersionFromSystemProperty("esri.version"))) - .exports("com.esri.core.geometry.*") - .imports("org.json", "org.codehaus.jackson") - .bundleVersion(getVersionFromSystemProperty("esri.version")) - .bundleSymbolicName("com.esri.core.geometry") - .overwriteManifest(WrappedUrlProvisionOption.OverwriteMode.FULL), - mavenBundle("org.json", "json", getVersionFromSystemProperty("json.version")), - mavenBundle( - "org.codehaus.jackson", - "jackson-core-asl", - getVersionFromSystemProperty("legacy-jackson.version"))); - } - - public static CompositeOption reactiveBundles() { - return () -> - options( - mavenBundle( - "org.reactivestreams", - "reactive-streams", - getVersionFromSystemProperty("reactive-streams.version")), - mavenBundle( - "io.reactivex.rxjava2", "rxjava", getVersionFromSystemProperty("rxjava.version"))); - } - - private static String getVersionFromSystemProperty(String propertyName) { - String value = System.getProperty(propertyName); - if (value == null) { - throw new IllegalArgumentException(propertyName + " system property is not set"); - } - return value; - } - - /** - * Some versions are not available as system properties because they are hardcoded in the BOM. - * - *

        Rely on the deps.txt file instead. - */ - private static String getVersionFromDepsTxt(String searchString) { - for (String dependency : DepsTxtLoader.lines) { - if (dependency.contains(searchString)) { - List components = Splitter.on(':').splitToList(dependency); - return components.get(components.size() - 2); - } - } - throw new IllegalStateException("Couldn't find version for " + searchString); - } - - private static class DepsTxtLoader { - - private static List lines; - - static { - String path = - PathUtils.getBaseDir() - + "/../core/target/classes/com/datastax/dse/driver/internal/deps.txt"; - CharSource charSource = Files.asCharSource(new File(path), Charsets.UTF_8); - - try { - lines = charSource.readLines(); - } catch (IOException e) { - throw new UncheckedIOException( - "Couldn't load deps.txt for driver core, " - + "make sure you run `mvn generate-resources` before running this test", - e); - } - } - } - - private static String toOsgiTinkerpopVersion(String inVersion) { - - Version inVersionObj = Version.parse(inVersion); - return String.join( - ".", - Integer.toString(inVersionObj.getMajor()), - Integer.toString(inVersionObj.getMinor()), - Integer.toString(inVersionObj.getPatch())); - } -} diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/osgi/support/OsgiGeoTypesTests.java b/integration-tests/src/test/java/com/datastax/oss/driver/osgi/support/OsgiGeoTypesTests.java deleted file mode 100644 index d9b7ad5a8e2..00000000000 --- a/integration-tests/src/test/java/com/datastax/oss/driver/osgi/support/OsgiGeoTypesTests.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.datastax.oss.driver.osgi.support; - -import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.literal; -import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.selectFrom; -import static com.datastax.oss.driver.api.querybuilder.relation.Relation.column; -import static org.assertj.core.api.Assertions.assertThat; - -import com.datastax.dse.driver.api.core.data.geometry.Point; -import com.datastax.dse.driver.api.querybuilder.DseSchemaBuilder; -import com.datastax.oss.driver.api.core.CqlSession; -import com.datastax.oss.driver.api.core.cql.Row; -import com.datastax.oss.driver.api.core.cql.SimpleStatement; -import com.datastax.oss.driver.api.core.type.DataTypes; - -public interface OsgiGeoTypesTests extends OsgiSimpleTests { - - /** - * Ensures a session can be established and a query using Geo Types can be made when running in an - * OSGi container. - */ - default void connectAndQueryGeoTypes() { - - try (CqlSession session = sessionBuilder().build()) { - - session.execute(String.format(CREATE_KEYSPACE, "test_osgi_geo")); - - // test that ESRI is available - session.execute( - // also exercise the DSE query builder - DseSchemaBuilder.createTable("test_osgi_geo", "t1") - .ifNotExists() - .withPartitionKey("pk", DataTypes.INT) - .withColumn("v", DataTypes.custom("PointType")) - .build()); - - Point point = Point.fromCoordinates(-1.0, -5); - - session.execute( - SimpleStatement.newInstance("INSERT INTO test_osgi_geo.t1 (pk, v) VALUES (0, ?)", point)); - - Row row = - session - .execute( - // test that the Query Builder is availabconnectAndQueryle - selectFrom("test_osgi_geo", "t1") - .column("v") - .where(column("pk").isEqualTo(literal(0))) - .build()) - .one(); - - assertThat(row).isNotNull(); - assertThat(row.get(0, Point.class)).isEqualTo(point); - } - } -} diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/osgi/support/OsgiGraphTests.java b/integration-tests/src/test/java/com/datastax/oss/driver/osgi/support/OsgiGraphTests.java deleted file mode 100644 index 0e043ab7c6b..00000000000 --- a/integration-tests/src/test/java/com/datastax/oss/driver/osgi/support/OsgiGraphTests.java +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.datastax.oss.driver.osgi.support; - -import static com.datastax.dse.driver.api.core.graph.DseGraph.g; -import static org.assertj.core.api.Assertions.assertThat; - -import com.datastax.dse.driver.api.core.config.DseDriverOption; -import com.datastax.dse.driver.api.core.graph.FluentGraphStatement; -import com.datastax.dse.driver.api.core.graph.GraphNode; -import com.datastax.dse.driver.api.core.graph.GraphResultSet; -import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; -import com.datastax.dse.driver.internal.core.graph.GraphProtocol; -import com.datastax.oss.driver.api.core.CqlSession; -import com.datastax.oss.driver.api.core.Version; -import com.datastax.oss.driver.api.core.config.DriverConfigLoader; -import com.datastax.oss.driver.api.core.config.ProgrammaticDriverConfigLoaderBuilder; -import java.util.List; -import org.apache.tinkerpop.gremlin.structure.Vertex; - -public interface OsgiGraphTests extends OsgiSimpleTests { - - @Override - default ProgrammaticDriverConfigLoaderBuilder configLoaderBuilder() { - return DriverConfigLoader.programmaticBuilder() - .withString(DseDriverOption.GRAPH_NAME, "test_osgi_graph") - .withString( - DseDriverOption.GRAPH_SUB_PROTOCOL, - getDseVersion().compareTo(Version.parse("6.8.0")) >= 0 - ? GraphProtocol.GRAPH_BINARY_1_0.toInternalCode() - : GraphProtocol.GRAPHSON_2_0.toInternalCode()); - } - - Version getDseVersion(); - - /** - * Ensures a session can be established and a query using DSE Graph can be made when running in an - * OSGi container. - */ - default void connectAndQueryGraph() { - - try (CqlSession session = sessionBuilder().build()) { - - // Test that Graph + Tinkerpop is available - session.execute( - ScriptGraphStatement.newInstance("system.graph('test_osgi_graph').ifNotExists().create()") - .setSystemQuery(true)); - - if (getDseVersion().compareTo(Version.parse("6.8.0")) >= 0) { - setUpCoreEngineGraph(session); - } else { - setUpClassicEngineGraph(session); - } - - GraphResultSet resultSet = - session.execute(FluentGraphStatement.newInstance(g.V().hasLabel("person"))); - List results = resultSet.all(); - assertThat(results.size()).isEqualTo(1); - Vertex actual = results.get(0).asVertex(); - assertThat(actual.label()).isEqualTo("person"); - } - } - - default void setUpCoreEngineGraph(CqlSession session) { - session.execute( - ScriptGraphStatement.newInstance( - "schema.vertexLabel('person').ifNotExists().partitionBy('pk', Int)" - + ".clusterBy('cc', Int).property('name', Text).create();")); - session.execute( - ScriptGraphStatement.newInstance( - "g.addV('person').property('pk',0).property('cc',0).property('name', 'alice');")); - } - - default void setUpClassicEngineGraph(CqlSession session) { - session.execute( - ScriptGraphStatement.newInstance( - "schema.propertyKey('name').Text().ifNotExists().create();" - + "schema.vertexLabel('person').properties('name').ifNotExists().create();")); - session.execute( - ScriptGraphStatement.newInstance("g.addV('person').property('name', 'alice').next();")); - session.execute( - ScriptGraphStatement.newInstance( - "schema.config().option('graph.allow_scan').set('true');")); - } -} diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/osgi/support/OsgiReactiveTests.java b/integration-tests/src/test/java/com/datastax/oss/driver/osgi/support/OsgiReactiveTests.java deleted file mode 100644 index cd7b8119df8..00000000000 --- a/integration-tests/src/test/java/com/datastax/oss/driver/osgi/support/OsgiReactiveTests.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.datastax.oss.driver.osgi.support; - -import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.literal; -import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.selectFrom; -import static com.datastax.oss.driver.api.querybuilder.relation.Relation.column; -import static org.assertj.core.api.Assertions.assertThat; - -import com.datastax.dse.driver.api.querybuilder.DseSchemaBuilder; -import com.datastax.oss.driver.api.core.CqlSession; -import com.datastax.oss.driver.api.core.cql.Row; -import com.datastax.oss.driver.api.core.cql.SimpleStatement; -import com.datastax.oss.driver.api.core.type.DataTypes; -import io.reactivex.Flowable; - -public interface OsgiReactiveTests extends OsgiSimpleTests { - - /** - * Ensures a session can be established and a query using Reactive can be made when running in an - * OSGi container. - */ - default void connectAndQueryReactive() { - - try (CqlSession session = sessionBuilder().build()) { - - Flowable.fromPublisher( - session.executeReactive(String.format(CREATE_KEYSPACE, "test_osgi_reactive"))) - .blockingSubscribe(); - - // test that ESRI is available - Flowable.fromPublisher( - session.executeReactive( - // also exercise the DSE query builder - DseSchemaBuilder.createTable("test_osgi_reactive", "t1") - .ifNotExists() - .withPartitionKey("pk", DataTypes.INT) - .withColumn("v", DataTypes.INT) - .build())) - .blockingSubscribe(); - - Flowable.fromPublisher( - session.executeReactive( - SimpleStatement.newInstance( - "INSERT INTO test_osgi_reactive.t1 (pk, v) VALUES (0, 1)"))) - .blockingSubscribe(); - - Row row = - Flowable.fromPublisher( - session.executeReactive( - // test that the Query Builder is available - selectFrom("test_osgi_reactive", "t1") - .column("v") - .where(column("pk").isEqualTo(literal(0))) - .build())) - .blockingFirst(); - - assertThat(row).isNotNull(); - assertThat(row.getInt(0)).isEqualTo(1); - } - } -} diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/osgi/support/OsgiSimpleTests.java b/integration-tests/src/test/java/com/datastax/oss/driver/osgi/support/OsgiSimpleTests.java deleted file mode 100644 index 9108e0e4c8a..00000000000 --- a/integration-tests/src/test/java/com/datastax/oss/driver/osgi/support/OsgiSimpleTests.java +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.datastax.oss.driver.osgi.support; - -import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.literal; -import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.selectFrom; -import static com.datastax.oss.driver.api.querybuilder.relation.Relation.column; -import static org.assertj.core.api.Assertions.assertThat; - -import com.datastax.dse.driver.api.querybuilder.DseSchemaBuilder; -import com.datastax.oss.driver.api.core.CqlSession; -import com.datastax.oss.driver.api.core.CqlSessionBuilder; -import com.datastax.oss.driver.api.core.config.DriverConfigLoader; -import com.datastax.oss.driver.api.core.config.ProgrammaticDriverConfigLoaderBuilder; -import com.datastax.oss.driver.api.core.cql.Row; -import com.datastax.oss.driver.api.core.cql.SimpleStatement; -import com.datastax.oss.driver.api.core.type.DataTypes; -import com.datastax.oss.driver.internal.core.metadata.DefaultEndPoint; -import java.net.InetSocketAddress; - -public interface OsgiSimpleTests { - - String CREATE_KEYSPACE = - "CREATE KEYSPACE IF NOT EXISTS %s " - + "WITH replication = {'class': 'SimpleStrategy', 'replication_factor': 1}"; - - /** @return config loader builder to be used to create session. */ - default ProgrammaticDriverConfigLoaderBuilder configLoaderBuilder() { - return DriverConfigLoader.programmaticBuilder(); - } - - /** @return The session builder to use for the OSGi tests. */ - default CqlSessionBuilder sessionBuilder() { - return CqlSession.builder() - .addContactEndPoint(new DefaultEndPoint(new InetSocketAddress("127.0.0.1", 9042))) - .withConfigLoader(configLoaderBuilder().build()); - } - - /** - * A very simple test that ensures a session can be established and a query made when running in - * an OSGi container. - */ - default void connectAndQuerySimple() { - - try (CqlSession session = sessionBuilder().build()) { - - session.execute(String.format(CREATE_KEYSPACE, "test_osgi")); - - session.execute( - // Exercise the DSE query builder - DseSchemaBuilder.createTable("test_osgi", "t1") - .ifNotExists() - .withPartitionKey("pk", DataTypes.INT) - .withColumn("v", DataTypes.INT) - .build()); - - session.execute( - SimpleStatement.newInstance("INSERT INTO test_osgi.t1 (pk, v) VALUES (0, 1)")); - - Row row = - session - .execute( - // test that the Query Builder is available - selectFrom("test_osgi", "t1") - .column("v") - .where(column("pk").isEqualTo(literal(0))) - .build()) - .one(); - - assertThat(row).isNotNull(); - assertThat(row.getInt(0)).isEqualTo(1); - } - } -} diff --git a/osgi-tests/README.md b/osgi-tests/README.md new file mode 100644 index 00000000000..bb8d1e3c5f1 --- /dev/null +++ b/osgi-tests/README.md @@ -0,0 +1,48 @@ +# DataStax Java Driver OSGi Tests + +This module contains OSGi tests for the driver. + +It declares a typical "application" bundle containing a few services that rely +on the driver, see `src/main`. + +The integration tests in `src/tests` interrogate the application bundle services +and check that they can operate normally. They exercise different provisioning +configurations to ensure that the driver is usable in most cases. + +## Running the tests + +In order to run the OSGi tests, all other driver modules must have been +previously compiled, that is, their respective `target/classes` directory must +be up-to-date and contain not only the class files, but also an up-to-date OSGi +manifest. + +Therefore, it is recommended to always compile all modules and run the OSGi +integration tests in one single pass, which can be easily done by running, +from the driver's parent module directory: + + mvn clean verify + +This will however also run other integration tests, and might take a long time +to finish. If you prefer to skip other integration tests, and only run the +OSGi ones, you can do so as follows: + + mvn clean verify \ + -DskipParallelizableITs=true \ + -DskipSerialITs=true \ + -DskipIsolatedITs=true + +You can pass the following system properties to your tests: + +1. `ccm.version`: the CCM version to use +2. `ccm.dse`: whether to use DSE +3. `osgi.debug`: whether to enable remote debugging of the OSGi container (see + below). + +## Debugging OSGi tests + +First, you can enable DEBUG logs for the Pax Exam framework by editing the +`src/tests/resources/logback-test.xml` file. + +Alternatively, you can debug the remote OSGi container by passing the system +property `-Dosgi.debug=true`. In this case the framework will prompt for a +remote debugger on port 5005. \ No newline at end of file diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml new file mode 100644 index 00000000000..31d3c0c9b7f --- /dev/null +++ b/osgi-tests/pom.xml @@ -0,0 +1,274 @@ + + + + 4.0.0 + + com.datastax.oss + java-driver-parent + 4.7.0-SNAPSHOT + + java-driver-osgi-tests + jar + DataStax Java driver for Apache Cassandra(R) - OSGi tests + + + + ${project.groupId} + java-driver-bom + ${project.version} + pom + import + + + + + + com.datastax.oss + java-driver-core + + + com.datastax.oss + java-driver-query-builder + + + com.datastax.oss + java-driver-mapper-processor + + + com.datastax.oss + java-driver-mapper-runtime + + + com.github.spotbugs + spotbugs-annotations + + + ch.qos.logback + logback-classic + + + com.datastax.oss + java-driver-shaded-guava + + + org.xerial.snappy + snappy-java + + + org.lz4 + lz4-java + + + org.reactivestreams + reactive-streams + + + org.osgi + org.osgi.core + provided + + + com.datastax.oss + java-driver-test-infra + test + + + org.ops4j.pax.exam + pax-exam-junit4 + test + + + org.ops4j.pax.exam + pax-exam-container-forked + test + + + org.ops4j.pax.exam + pax-exam-link-mvn + test + + + org.ops4j.pax.url + pax-url-wrap + test + + + org.ops4j.pax.url + pax-url-reference + test + + + javax.inject + javax.inject + test + + + org.apache.felix + org.apache.felix.framework + test + + + org.assertj + assertj-core + test + + + org.apache.commons + commons-exec + test + + + io.reactivex.rxjava2 + rxjava + test + + + org.awaitility + awaitility + test + + + + + + org.apache.servicemix.tooling + depends-maven-plugin + 1.4.0 + + + generate-depends-file + + generate-depends-file + + + + + + + org.ops4j + maven-pax-plugin + 1.6.0 + + felix + true + + --platform=felix + --version=${felix.version} + --log=debug + --bootDelegation=sun.misc + + + + + org.apache.felix + maven-bundle-plugin + + + com.datastax.oss.driver.osgi + com.datastax.oss.driver.internal.osgi.MailboxActivator + com.datastax.oss.driver.api.osgi.* + com.datastax.oss.driver.internal.osgi.* + !net.jcip.annotations.*,!edu.umd.cs.findbugs.annotations.*,org.apache.tinkerpop.*;resolution:=optional,* + <_include>-osgi.bnd + + + + + bundle-manifest + process-classes + + manifest + + + + + + maven-surefire-plugin + + + ${project.basedir}/src/test/resources/logback-test.xml + + + + + maven-failsafe-plugin + + + osgi-tests + + integration-test + verify + + + + + + ${project.basedir}/src/test/resources/logback-test.xml + + classes + 1 + + + + org.revapi + revapi-maven-plugin + + true + + + + maven-jar-plugin + + true + + + + maven-javadoc-plugin + + true + + + + maven-source-plugin + + true + + + + maven-install-plugin + + true + + + + maven-deploy-plugin + + true + + + + + diff --git a/osgi-tests/src/main/java/com/datastax/oss/driver/api/osgi/CustomRetryPolicy.java b/osgi-tests/src/main/java/com/datastax/oss/driver/api/osgi/CustomRetryPolicy.java new file mode 100644 index 00000000000..6d73745a8f1 --- /dev/null +++ b/osgi-tests/src/main/java/com/datastax/oss/driver/api/osgi/CustomRetryPolicy.java @@ -0,0 +1,26 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.osgi; + +import com.datastax.oss.driver.api.core.context.DriverContext; +import com.datastax.oss.driver.internal.core.retry.DefaultRetryPolicy; + +public class CustomRetryPolicy extends DefaultRetryPolicy { + + public CustomRetryPolicy(DriverContext context, String profileName) { + super(context, profileName); + } +} diff --git a/osgi-tests/src/main/java/com/datastax/oss/driver/api/osgi/service/MailboxException.java b/osgi-tests/src/main/java/com/datastax/oss/driver/api/osgi/service/MailboxException.java new file mode 100644 index 00000000000..1075ea8ff3a --- /dev/null +++ b/osgi-tests/src/main/java/com/datastax/oss/driver/api/osgi/service/MailboxException.java @@ -0,0 +1,23 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.osgi.service; + +public class MailboxException extends Exception { + + public MailboxException(Throwable cause) { + super("Failure interacting with Mailbox", cause); + } +} diff --git a/osgi-tests/src/main/java/com/datastax/oss/driver/api/osgi/service/MailboxMessage.java b/osgi-tests/src/main/java/com/datastax/oss/driver/api/osgi/service/MailboxMessage.java new file mode 100644 index 00000000000..9dd61760888 --- /dev/null +++ b/osgi-tests/src/main/java/com/datastax/oss/driver/api/osgi/service/MailboxMessage.java @@ -0,0 +1,106 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.osgi.service; + +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.mapper.annotations.ClusteringColumn; +import com.datastax.oss.driver.api.mapper.annotations.CqlName; +import com.datastax.oss.driver.api.mapper.annotations.Entity; +import com.datastax.oss.driver.api.mapper.annotations.PartitionKey; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.time.Instant; +import java.util.Objects; + +@Entity +@CqlName("messages_by_recipient") +public class MailboxMessage { + + public static final CqlIdentifier MAILBOX_TABLE = + CqlIdentifier.fromInternal("messages_by_recipient"); + + @PartitionKey private String recipient; + + @ClusteringColumn private Instant timestamp; + + private String sender; + + private String body; + + public MailboxMessage() {} + + public MailboxMessage( + @NonNull String recipient, + @NonNull Instant timestamp, + @NonNull String sender, + @NonNull String body) { + this.recipient = recipient; + this.timestamp = timestamp; + this.sender = sender; + this.body = body; + } + + public String getRecipient() { + return recipient; + } + + public void setRecipient(String recipient) { + this.recipient = recipient; + } + + public Instant getTimestamp() { + return timestamp; + } + + public void setTimestamp(Instant timestamp) { + this.timestamp = timestamp; + } + + public String getSender() { + return sender; + } + + public void setSender(String sender) { + this.sender = sender; + } + + public String getBody() { + return body; + } + + public void setBody(String body) { + this.body = body; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (!(o instanceof MailboxMessage)) { + return false; + } + MailboxMessage that = (MailboxMessage) o; + return Objects.equals(recipient, that.recipient) + && Objects.equals(timestamp, that.timestamp) + && Objects.equals(sender, that.sender) + && Objects.equals(body, that.body); + } + + @Override + public int hashCode() { + return Objects.hash(recipient, timestamp, sender, body); + } +} diff --git a/osgi-tests/src/main/java/com/datastax/oss/driver/api/osgi/service/MailboxService.java b/osgi-tests/src/main/java/com/datastax/oss/driver/api/osgi/service/MailboxService.java new file mode 100644 index 00000000000..8ade73d14a8 --- /dev/null +++ b/osgi-tests/src/main/java/com/datastax/oss/driver/api/osgi/service/MailboxService.java @@ -0,0 +1,41 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.osgi.service; + +public interface MailboxService { + + /** + * Retrieve all messages for a given recipient. + * + * @param recipient User whose mailbox is being read. + * @return All messages in the mailbox. + */ + Iterable getMessages(String recipient) throws MailboxException; + + /** + * Stores the given message in the appropriate mailbox. + * + * @param message Message to send. + */ + void sendMessage(MailboxMessage message) throws MailboxException; + + /** + * Deletes all mail for the given recipient. + * + * @param recipient User whose mailbox will be cleared. + */ + void clearMailbox(String recipient) throws MailboxException; +} diff --git a/osgi-tests/src/main/java/com/datastax/oss/driver/api/osgi/service/geo/GeoMailboxMessage.java b/osgi-tests/src/main/java/com/datastax/oss/driver/api/osgi/service/geo/GeoMailboxMessage.java new file mode 100644 index 00000000000..63b240fe7f5 --- /dev/null +++ b/osgi-tests/src/main/java/com/datastax/oss/driver/api/osgi/service/geo/GeoMailboxMessage.java @@ -0,0 +1,106 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.osgi.service.geo; + +import com.datastax.dse.driver.api.core.data.geometry.Point; +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.mapper.annotations.ClusteringColumn; +import com.datastax.oss.driver.api.mapper.annotations.CqlName; +import com.datastax.oss.driver.api.mapper.annotations.Entity; +import com.datastax.oss.driver.api.mapper.annotations.PartitionKey; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Objects; + +@Entity +@CqlName("messages_by_location") +public class GeoMailboxMessage { + + public static final CqlIdentifier MAILBOX_TABLE = + CqlIdentifier.fromInternal("messages_by_location"); + + @PartitionKey private String recipient; + + @ClusteringColumn private Point location; + + private String sender; + + private String body; + + public GeoMailboxMessage() {} + + public GeoMailboxMessage( + @NonNull String recipient, + @NonNull Point location, + @NonNull String sender, + @NonNull String body) { + this.location = location; + this.recipient = recipient; + this.sender = sender; + this.body = body; + } + + public String getRecipient() { + return recipient; + } + + public void setRecipient(String recipient) { + this.recipient = recipient; + } + + public Point getLocation() { + return location; + } + + public void setLocation(Point location) { + this.location = location; + } + + public String getSender() { + return sender; + } + + public void setSender(String sender) { + this.sender = sender; + } + + public String getBody() { + return body; + } + + public void setBody(String body) { + this.body = body; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (!(o instanceof GeoMailboxMessage)) { + return false; + } + GeoMailboxMessage that = (GeoMailboxMessage) o; + return Objects.equals(recipient, that.recipient) + && Objects.equals(location, that.location) + && Objects.equals(sender, that.sender) + && Objects.equals(body, that.body); + } + + @Override + public int hashCode() { + return Objects.hash(recipient, location, sender, body); + } +} diff --git a/osgi-tests/src/main/java/com/datastax/oss/driver/api/osgi/service/geo/GeoMailboxService.java b/osgi-tests/src/main/java/com/datastax/oss/driver/api/osgi/service/geo/GeoMailboxService.java new file mode 100644 index 00000000000..d56f193860a --- /dev/null +++ b/osgi-tests/src/main/java/com/datastax/oss/driver/api/osgi/service/geo/GeoMailboxService.java @@ -0,0 +1,28 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.osgi.service.geo; + +import com.datastax.oss.driver.api.osgi.service.MailboxException; +import com.datastax.oss.driver.api.osgi.service.MailboxService; + +public interface GeoMailboxService extends MailboxService { + + void sendGeoMessage(GeoMailboxMessage message) throws MailboxException; + + Iterable getGeoMessages(String recipient) throws MailboxException; + + void clearGeoMailbox(String recipient) throws MailboxException; +} diff --git a/osgi-tests/src/main/java/com/datastax/oss/driver/api/osgi/service/graph/GraphMailboxService.java b/osgi-tests/src/main/java/com/datastax/oss/driver/api/osgi/service/graph/GraphMailboxService.java new file mode 100644 index 00000000000..be54ea2e167 --- /dev/null +++ b/osgi-tests/src/main/java/com/datastax/oss/driver/api/osgi/service/graph/GraphMailboxService.java @@ -0,0 +1,27 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.osgi.service.graph; + +import com.datastax.oss.driver.api.osgi.service.MailboxException; +import com.datastax.oss.driver.api.osgi.service.MailboxMessage; +import com.datastax.oss.driver.api.osgi.service.MailboxService; + +public interface GraphMailboxService extends MailboxService { + + void sendGraphMessage(MailboxMessage message) throws MailboxException; + + Iterable getGraphMessages(String recipient) throws MailboxException; +} diff --git a/osgi-tests/src/main/java/com/datastax/oss/driver/api/osgi/service/reactive/ReactiveMailboxService.java b/osgi-tests/src/main/java/com/datastax/oss/driver/api/osgi/service/reactive/ReactiveMailboxService.java new file mode 100644 index 00000000000..833c16904a9 --- /dev/null +++ b/osgi-tests/src/main/java/com/datastax/oss/driver/api/osgi/service/reactive/ReactiveMailboxService.java @@ -0,0 +1,27 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.osgi.service.reactive; + +import com.datastax.dse.driver.api.mapper.reactive.MappedReactiveResultSet; +import com.datastax.oss.driver.api.osgi.service.MailboxException; +import com.datastax.oss.driver.api.osgi.service.MailboxMessage; +import com.datastax.oss.driver.api.osgi.service.MailboxService; + +public interface ReactiveMailboxService extends MailboxService { + + MappedReactiveResultSet getMessagesReactive(String recipient) + throws MailboxException; +} diff --git a/osgi-tests/src/main/java/com/datastax/oss/driver/internal/osgi/MailboxActivator.java b/osgi-tests/src/main/java/com/datastax/oss/driver/internal/osgi/MailboxActivator.java new file mode 100644 index 00000000000..b4e448adf77 --- /dev/null +++ b/osgi-tests/src/main/java/com/datastax/oss/driver/internal/osgi/MailboxActivator.java @@ -0,0 +1,173 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.osgi; + +import com.datastax.dse.driver.api.core.config.DseDriverOption; +import com.datastax.dse.driver.internal.core.graph.GraphProtocol; +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.CqlSessionBuilder; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverConfigLoader; +import com.datastax.oss.driver.api.core.config.ProgrammaticDriverConfigLoaderBuilder; +import com.datastax.oss.driver.api.osgi.service.MailboxService; +import com.datastax.oss.driver.internal.osgi.service.MailboxServiceImpl; +import com.datastax.oss.driver.internal.osgi.service.geo.GeoMailboxServiceImpl; +import com.datastax.oss.driver.internal.osgi.service.graph.GraphMailboxServiceImpl; +import com.datastax.oss.driver.internal.osgi.service.reactive.ReactiveMailboxServiceImpl; +import java.net.InetSocketAddress; +import java.util.Dictionary; +import java.util.Hashtable; +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import org.osgi.framework.Bundle; +import org.osgi.framework.BundleActivator; +import org.osgi.framework.BundleContext; +import org.osgi.framework.wiring.BundleWiring; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class MailboxActivator implements BundleActivator { + + private static final Logger LOGGER = LoggerFactory.getLogger(MailboxActivator.class); + + private CqlSession session; + private CqlIdentifier keyspace; + private String graphName; + + @Override + public void start(BundleContext context) { + buildSession(context); + registerService(context); + } + + private void buildSession(BundleContext context) { + + Bundle bundle = context.getBundle(); + BundleWiring bundleWiring = bundle.adapt(BundleWiring.class); + ClassLoader classLoader = bundleWiring.getClassLoader(); + + LOGGER.info("Application class loader: {}", classLoader); + + // Use the application bundle class loader to load classes by reflection when + // they are located in the application bundle. This is not strictly required + // as the driver has a "Dynamic-Import:*" directive which makes it capable + // of loading classes outside its bundle. + CqlSessionBuilder builder = CqlSession.builder().withClassLoader(classLoader); + + // Use the application bundle class loader to load configuration resources located + // in the application bundle. This is required, otherwise these resources will + // not be found. + ProgrammaticDriverConfigLoaderBuilder configLoaderBuilder = + DriverConfigLoader.programmaticBuilder(classLoader); + + String contactPointsStr = context.getProperty("cassandra.contactpoints"); + if (contactPointsStr == null) { + contactPointsStr = "127.0.0.1"; + } + LOGGER.info("Contact points: {}", contactPointsStr); + + String portStr = context.getProperty("cassandra.port"); + if (portStr == null) { + portStr = "9042"; + } + LOGGER.info("Port: {}", portStr); + int port = Integer.parseInt(portStr); + + List contactPoints = + Stream.of(contactPointsStr.split(",")) + .map((String host) -> InetSocketAddress.createUnresolved(host, port)) + .collect(Collectors.toList()); + builder.addContactPoints(contactPoints); + + String keyspaceStr = context.getProperty("cassandra.keyspace"); + if (keyspaceStr == null) { + keyspaceStr = "mailbox"; + } + LOGGER.info("Keyspace: {}", keyspaceStr); + keyspace = CqlIdentifier.fromCql(keyspaceStr); + + String lbp = context.getProperty("cassandra.lbp"); + if (lbp != null) { + LOGGER.info("Custom LBP: " + lbp); + configLoaderBuilder.withString(DefaultDriverOption.LOAD_BALANCING_POLICY_CLASS, lbp); + } else { + LOGGER.info("Custom LBP: NO"); + } + + String datacenter = context.getProperty("cassandra.datacenter"); + if (datacenter != null) { + LOGGER.info("Custom datacenter: " + datacenter); + configLoaderBuilder.withString( + DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER, datacenter); + } else { + LOGGER.info("Custom datacenter: NO"); + } + + String compression = context.getProperty("cassandra.compression"); + if (compression != null) { + LOGGER.info("Compression: {}", compression); + configLoaderBuilder.withString(DefaultDriverOption.PROTOCOL_COMPRESSION, compression); + } else { + LOGGER.info("Compression: NONE"); + } + + graphName = context.getProperty("cassandra.graph.name"); + if (graphName != null) { + LOGGER.info("Graph name: {}", graphName); + configLoaderBuilder.withString(DseDriverOption.GRAPH_NAME, graphName); + configLoaderBuilder.withString( + DseDriverOption.GRAPH_SUB_PROTOCOL, GraphProtocol.GRAPH_BINARY_1_0.toInternalCode()); + } else { + LOGGER.info("Graph: NONE"); + } + + builder.withConfigLoader(configLoaderBuilder.build()); + + LOGGER.info("Initializing session"); + session = builder.build(); + LOGGER.info("Session initialized"); + } + + private void registerService(BundleContext context) { + MailboxServiceImpl mailbox; + if ("true".equalsIgnoreCase(context.getProperty("cassandra.reactive"))) { + mailbox = new ReactiveMailboxServiceImpl(session, keyspace); + } else if ("true".equalsIgnoreCase(context.getProperty("cassandra.geo"))) { + mailbox = new GeoMailboxServiceImpl(session, keyspace); + } else if ("true".equalsIgnoreCase(context.getProperty("cassandra.graph"))) { + mailbox = new GraphMailboxServiceImpl(session, keyspace, graphName); + } else { + mailbox = new MailboxServiceImpl(session, keyspace); + } + mailbox.init(); + @SuppressWarnings("JdkObsolete") + Dictionary properties = new Hashtable<>(); + context.registerService(MailboxService.class.getName(), mailbox, properties); + LOGGER.info("Mailbox Service successfully initialized"); + } + + @Override + public void stop(BundleContext context) { + if (session != null) { + LOGGER.info("Closing session"); + session.close(); + session = null; + LOGGER.info("Session closed"); + } + } +} diff --git a/osgi-tests/src/main/java/com/datastax/oss/driver/internal/osgi/service/MailboxMapper.java b/osgi-tests/src/main/java/com/datastax/oss/driver/internal/osgi/service/MailboxMapper.java new file mode 100644 index 00000000000..b2552862e45 --- /dev/null +++ b/osgi-tests/src/main/java/com/datastax/oss/driver/internal/osgi/service/MailboxMapper.java @@ -0,0 +1,28 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.osgi.service; + +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.mapper.annotations.DaoFactory; +import com.datastax.oss.driver.api.mapper.annotations.DaoKeyspace; +import com.datastax.oss.driver.api.mapper.annotations.Mapper; + +@Mapper +public interface MailboxMapper { + + @DaoFactory + MailboxMessageDao mailboxMessageDao(@DaoKeyspace CqlIdentifier keyspace); +} diff --git a/osgi-tests/src/main/java/com/datastax/oss/driver/internal/osgi/service/MailboxMessageDao.java b/osgi-tests/src/main/java/com/datastax/oss/driver/internal/osgi/service/MailboxMessageDao.java new file mode 100644 index 00000000000..76f44e3ae43 --- /dev/null +++ b/osgi-tests/src/main/java/com/datastax/oss/driver/internal/osgi/service/MailboxMessageDao.java @@ -0,0 +1,32 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.osgi.service; + +import com.datastax.oss.driver.api.core.PagingIterable; +import com.datastax.oss.driver.api.mapper.annotations.Dao; +import com.datastax.oss.driver.api.mapper.annotations.Insert; +import com.datastax.oss.driver.api.mapper.annotations.Select; +import com.datastax.oss.driver.api.osgi.service.MailboxMessage; + +@Dao +public interface MailboxMessageDao { + + @Insert + void save(MailboxMessage message); + + @Select + PagingIterable findByRecipient(String recipient); +} diff --git a/osgi-tests/src/main/java/com/datastax/oss/driver/internal/osgi/service/MailboxServiceImpl.java b/osgi-tests/src/main/java/com/datastax/oss/driver/internal/osgi/service/MailboxServiceImpl.java new file mode 100644 index 00000000000..b4843d89a60 --- /dev/null +++ b/osgi-tests/src/main/java/com/datastax/oss/driver/internal/osgi/service/MailboxServiceImpl.java @@ -0,0 +1,134 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.osgi.service; + +import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.bindMarker; +import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.deleteFrom; +import static com.datastax.oss.driver.api.querybuilder.relation.Relation.column; + +import com.codahale.metrics.Timer; +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.cql.BoundStatement; +import com.datastax.oss.driver.api.core.cql.PreparedStatement; +import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; +import com.datastax.oss.driver.api.core.metrics.Metrics; +import com.datastax.oss.driver.api.osgi.service.MailboxException; +import com.datastax.oss.driver.api.osgi.service.MailboxMessage; +import com.datastax.oss.driver.api.osgi.service.MailboxService; +import java.util.Optional; +import net.jcip.annotations.GuardedBy; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class MailboxServiceImpl implements MailboxService { + + private static final Logger LOGGER = LoggerFactory.getLogger(MailboxServiceImpl.class); + + protected final CqlSession session; + protected final CqlIdentifier keyspace; + + @GuardedBy("this") + protected boolean initialized = false; + + private PreparedStatement deleteStatement; + + protected MailboxMessageDao dao; + + public MailboxServiceImpl(CqlSession session, CqlIdentifier keyspace) { + this.session = session; + this.keyspace = keyspace; + } + + public synchronized void init() { + if (initialized) { + return; + } + createSchema(); + prepareStatements(); + createDaos(); + printMetrics(); + initialized = true; + } + + protected void createSchema() { + session.execute("DROP KEYSPACE IF EXISTS test_osgi"); + session.execute( + "CREATE KEYSPACE IF NOT EXISTS test_osgi with replication = {'class': 'SimpleStrategy', 'replication_factor' : 1}"); + session.execute( + "CREATE TABLE " + + keyspace + + "." + + MailboxMessage.MAILBOX_TABLE + + " (" + + "recipient text," + + "timestamp timestamp," + + "sender text," + + "body text," + + "PRIMARY KEY (recipient, timestamp))"); + } + + protected void prepareStatements() { + deleteStatement = + session.prepare( + deleteFrom(keyspace, MailboxMessage.MAILBOX_TABLE) + .where(column("recipient").isEqualTo(bindMarker())) + .build()); + } + + protected void createDaos() { + MailboxMapper mapper = new MailboxMapperBuilder(session).build(); + dao = mapper.mailboxMessageDao(keyspace); + } + + protected void printMetrics() { + // Exercise metrics + if (session.getMetrics().isPresent()) { + Metrics metrics = session.getMetrics().get(); + Optional cqlRequests = metrics.getSessionMetric(DefaultSessionMetric.CQL_REQUESTS); + cqlRequests.ifPresent( + counter -> LOGGER.info("Number of CQL requests: {}", counter.getCount())); + } + } + + @Override + public Iterable getMessages(String recipient) throws MailboxException { + try { + return dao.findByRecipient(recipient); + } catch (Exception e) { + throw new MailboxException(e); + } + } + + @Override + public void sendMessage(MailboxMessage message) throws MailboxException { + try { + dao.save(message); + } catch (Exception e) { + throw new MailboxException(e); + } + } + + @Override + public void clearMailbox(String recipient) throws MailboxException { + try { + BoundStatement statement = deleteStatement.bind(recipient); + session.execute(statement); + } catch (Exception e) { + throw new MailboxException(e); + } + } +} diff --git a/osgi-tests/src/main/java/com/datastax/oss/driver/internal/osgi/service/geo/GeoMailboxMapper.java b/osgi-tests/src/main/java/com/datastax/oss/driver/internal/osgi/service/geo/GeoMailboxMapper.java new file mode 100644 index 00000000000..81d264aefe2 --- /dev/null +++ b/osgi-tests/src/main/java/com/datastax/oss/driver/internal/osgi/service/geo/GeoMailboxMapper.java @@ -0,0 +1,28 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.osgi.service.geo; + +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.mapper.annotations.DaoFactory; +import com.datastax.oss.driver.api.mapper.annotations.DaoKeyspace; +import com.datastax.oss.driver.api.mapper.annotations.Mapper; + +@Mapper +public interface GeoMailboxMapper { + + @DaoFactory + GeoMailboxMessageDao mailboxMessageDao(@DaoKeyspace CqlIdentifier keyspace); +} diff --git a/osgi-tests/src/main/java/com/datastax/oss/driver/internal/osgi/service/geo/GeoMailboxMessageDao.java b/osgi-tests/src/main/java/com/datastax/oss/driver/internal/osgi/service/geo/GeoMailboxMessageDao.java new file mode 100644 index 00000000000..af3b7750a25 --- /dev/null +++ b/osgi-tests/src/main/java/com/datastax/oss/driver/internal/osgi/service/geo/GeoMailboxMessageDao.java @@ -0,0 +1,33 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.osgi.service.geo; + +import com.datastax.oss.driver.api.core.PagingIterable; +import com.datastax.oss.driver.api.mapper.annotations.Dao; +import com.datastax.oss.driver.api.mapper.annotations.Insert; +import com.datastax.oss.driver.api.mapper.annotations.Select; +import com.datastax.oss.driver.api.osgi.service.geo.GeoMailboxMessage; +import com.datastax.oss.driver.internal.osgi.service.MailboxMessageDao; + +@Dao +public interface GeoMailboxMessageDao extends MailboxMessageDao { + + @Insert + void save(GeoMailboxMessage message); + + @Select + PagingIterable findGeoByRecipient(String recipient); +} diff --git a/osgi-tests/src/main/java/com/datastax/oss/driver/internal/osgi/service/geo/GeoMailboxServiceImpl.java b/osgi-tests/src/main/java/com/datastax/oss/driver/internal/osgi/service/geo/GeoMailboxServiceImpl.java new file mode 100644 index 00000000000..4dd59724682 --- /dev/null +++ b/osgi-tests/src/main/java/com/datastax/oss/driver/internal/osgi/service/geo/GeoMailboxServiceImpl.java @@ -0,0 +1,100 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.osgi.service.geo; + +import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.bindMarker; +import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.deleteFrom; +import static com.datastax.oss.driver.api.querybuilder.relation.Relation.column; + +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.cql.BoundStatement; +import com.datastax.oss.driver.api.core.cql.PreparedStatement; +import com.datastax.oss.driver.api.osgi.service.MailboxException; +import com.datastax.oss.driver.api.osgi.service.geo.GeoMailboxMessage; +import com.datastax.oss.driver.api.osgi.service.geo.GeoMailboxService; +import com.datastax.oss.driver.internal.osgi.service.MailboxServiceImpl; + +public class GeoMailboxServiceImpl extends MailboxServiceImpl implements GeoMailboxService { + + private PreparedStatement deleteGeoStatement; + private GeoMailboxMessageDao geoDao; + + public GeoMailboxServiceImpl(CqlSession session, CqlIdentifier keyspace) { + super(session, keyspace); + } + + @Override + protected void createSchema() { + super.createSchema(); + session.execute( + "CREATE TABLE " + + keyspace + + "." + + GeoMailboxMessage.MAILBOX_TABLE + + " (" + + "recipient text," + + "location 'PointType'," + + "sender text," + + "body text," + + "PRIMARY KEY (recipient, location))"); + } + + @Override + protected void prepareStatements() { + super.prepareStatements(); + deleteGeoStatement = + session.prepare( + deleteFrom(keyspace, GeoMailboxMessage.MAILBOX_TABLE) + .where(column("recipient").isEqualTo(bindMarker())) + .build()); + } + + @Override + protected void createDaos() { + super.createDaos(); + GeoMailboxMapper mapper = new GeoMailboxMapperBuilder(session).build(); + geoDao = mapper.mailboxMessageDao(keyspace); + } + + @Override + public void sendGeoMessage(GeoMailboxMessage message) throws MailboxException { + try { + geoDao.save(message); + } catch (Exception e) { + throw new MailboxException(e); + } + } + + @Override + public Iterable getGeoMessages(String recipient) throws MailboxException { + try { + return geoDao.findGeoByRecipient(recipient); + } catch (Exception e) { + throw new MailboxException(e); + } + } + + @Override + public void clearGeoMailbox(String recipient) throws MailboxException { + try { + BoundStatement statement = deleteGeoStatement.bind(recipient); + session.execute(statement); + } catch (Exception e) { + throw new MailboxException(e); + } + } +} diff --git a/osgi-tests/src/main/java/com/datastax/oss/driver/internal/osgi/service/graph/GraphMailboxServiceImpl.java b/osgi-tests/src/main/java/com/datastax/oss/driver/internal/osgi/service/graph/GraphMailboxServiceImpl.java new file mode 100644 index 00000000000..4977cbda793 --- /dev/null +++ b/osgi-tests/src/main/java/com/datastax/oss/driver/internal/osgi/service/graph/GraphMailboxServiceImpl.java @@ -0,0 +1,101 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.osgi.service.graph; + +import static com.datastax.dse.driver.api.core.graph.DseGraph.g; +import static org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__.unfold; + +import com.datastax.dse.driver.api.core.graph.FluentGraphStatement; +import com.datastax.dse.driver.api.core.graph.GraphNode; +import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.osgi.service.MailboxException; +import com.datastax.oss.driver.api.osgi.service.MailboxMessage; +import com.datastax.oss.driver.api.osgi.service.graph.GraphMailboxService; +import com.datastax.oss.driver.internal.osgi.service.MailboxServiceImpl; +import java.time.Instant; +import java.util.stream.Collectors; + +public class GraphMailboxServiceImpl extends MailboxServiceImpl implements GraphMailboxService { + + private final String graphName; + + public GraphMailboxServiceImpl(CqlSession session, CqlIdentifier keyspace, String graphName) { + super(session, keyspace); + this.graphName = graphName; + } + + @Override + protected void createSchema() { + super.createSchema(); + session.execute( + ScriptGraphStatement.newInstance( + String.format("system.graph('%s').ifExists().drop()", graphName)) + .setSystemQuery(true), + ScriptGraphStatement.SYNC); + session.execute( + ScriptGraphStatement.newInstance( + String.format("system.graph('%s').ifNotExists().coreEngine().create()", graphName)) + .setSystemQuery(true), + ScriptGraphStatement.SYNC); + session.execute( + ScriptGraphStatement.newInstance( + "schema.vertexLabel('message')" + + ".partitionBy('recipient', Text)" + + ".clusterBy('timestamp', Timestamp)" + + ".property('sender', Text)" + + ".property('body', Text)" + + ".create();")); + } + + @Override + public Iterable getGraphMessages(String recipient) throws MailboxException { + FluentGraphStatement statement = + FluentGraphStatement.newInstance( + g.V().hasLabel("message").has("recipient", recipient).valueMap().by(unfold())); + try { + return session.execute(statement).all().stream() + .map(GraphNode::asMap) + .map( + vertex -> { + Instant timestamp = (Instant) vertex.get("timestamp"); + String sender = (String) vertex.get("sender"); + String body = (String) vertex.get("body"); + return new MailboxMessage(recipient, timestamp, sender, body); + }) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new MailboxException(e); + } + } + + @Override + public void sendGraphMessage(MailboxMessage message) throws MailboxException { + FluentGraphStatement insertVertex = + FluentGraphStatement.newInstance( + g.addV("message") + .property("recipient", message.getRecipient()) + .property("timestamp", message.getTimestamp()) + .property("sender", message.getSender()) + .property("body", message.getBody())); + try { + session.execute(insertVertex); + } catch (Exception e) { + throw new MailboxException(e); + } + } +} diff --git a/osgi-tests/src/main/java/com/datastax/oss/driver/internal/osgi/service/reactive/ReactiveMailboxMapper.java b/osgi-tests/src/main/java/com/datastax/oss/driver/internal/osgi/service/reactive/ReactiveMailboxMapper.java new file mode 100644 index 00000000000..93511f51dfb --- /dev/null +++ b/osgi-tests/src/main/java/com/datastax/oss/driver/internal/osgi/service/reactive/ReactiveMailboxMapper.java @@ -0,0 +1,28 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.osgi.service.reactive; + +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.mapper.annotations.DaoFactory; +import com.datastax.oss.driver.api.mapper.annotations.DaoKeyspace; +import com.datastax.oss.driver.api.mapper.annotations.Mapper; + +@Mapper +public interface ReactiveMailboxMapper { + + @DaoFactory + ReactiveMailboxMessageDao mailboxMessageDao(@DaoKeyspace CqlIdentifier keyspace); +} diff --git a/osgi-tests/src/main/java/com/datastax/oss/driver/internal/osgi/service/reactive/ReactiveMailboxMessageDao.java b/osgi-tests/src/main/java/com/datastax/oss/driver/internal/osgi/service/reactive/ReactiveMailboxMessageDao.java new file mode 100644 index 00000000000..e558db59357 --- /dev/null +++ b/osgi-tests/src/main/java/com/datastax/oss/driver/internal/osgi/service/reactive/ReactiveMailboxMessageDao.java @@ -0,0 +1,29 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.osgi.service.reactive; + +import com.datastax.dse.driver.api.mapper.reactive.MappedReactiveResultSet; +import com.datastax.oss.driver.api.mapper.annotations.Dao; +import com.datastax.oss.driver.api.mapper.annotations.Select; +import com.datastax.oss.driver.api.osgi.service.MailboxMessage; +import com.datastax.oss.driver.internal.osgi.service.MailboxMessageDao; + +@Dao +public interface ReactiveMailboxMessageDao extends MailboxMessageDao { + + @Select + MappedReactiveResultSet findByRecipientReactive(String recipient); +} diff --git a/osgi-tests/src/main/java/com/datastax/oss/driver/internal/osgi/service/reactive/ReactiveMailboxServiceImpl.java b/osgi-tests/src/main/java/com/datastax/oss/driver/internal/osgi/service/reactive/ReactiveMailboxServiceImpl.java new file mode 100644 index 00000000000..ed349207587 --- /dev/null +++ b/osgi-tests/src/main/java/com/datastax/oss/driver/internal/osgi/service/reactive/ReactiveMailboxServiceImpl.java @@ -0,0 +1,51 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.osgi.service.reactive; + +import com.datastax.dse.driver.api.mapper.reactive.MappedReactiveResultSet; +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.osgi.service.MailboxException; +import com.datastax.oss.driver.api.osgi.service.MailboxMessage; +import com.datastax.oss.driver.api.osgi.service.reactive.ReactiveMailboxService; +import com.datastax.oss.driver.internal.osgi.service.MailboxServiceImpl; + +public class ReactiveMailboxServiceImpl extends MailboxServiceImpl + implements ReactiveMailboxService { + + private ReactiveMailboxMessageDao reactiveDao; + + public ReactiveMailboxServiceImpl(CqlSession session, CqlIdentifier keyspace) { + super(session, keyspace); + } + + @Override + protected void createDaos() { + super.createDaos(); + ReactiveMailboxMapper mapper = new ReactiveMailboxMapperBuilder(session).build(); + reactiveDao = mapper.mailboxMessageDao(keyspace); + } + + @Override + public MappedReactiveResultSet getMessagesReactive(String recipient) + throws MailboxException { + try { + return reactiveDao.findByRecipientReactive(recipient); + } catch (Exception e) { + throw new MailboxException(e); + } + } +} diff --git a/osgi-tests/src/main/resources/application.conf b/osgi-tests/src/main/resources/application.conf new file mode 100644 index 00000000000..8f795524ed2 --- /dev/null +++ b/osgi-tests/src/main/resources/application.conf @@ -0,0 +1,42 @@ +# Configuration overrides for integration tests +datastax-java-driver { + basic { + load-balancing-policy.class = DcInferringLoadBalancingPolicy + request.timeout = 10 seconds + graph.timeout = 10 seconds + } + advanced { + retry-policy.class = com.datastax.oss.driver.api.osgi.CustomRetryPolicy + connection { + init-query-timeout = 5 seconds + set-keyspace-timeout = 5 seconds + } + heartbeat.timeout = 5 seconds + control-connection.timeout = 5 seconds + request { + trace.interval = 1 second + warn-if-set-keyspace = false + } + graph { + name = "demo" + } + continuous-paging.timeout { + first-page = 10 seconds + other-pages = 10 seconds + } + metrics { + session.enabled = [cql-requests] + // Raise histogram bounds because the tests execute DDL queries with a higher timeout + session.cql_requests.highest_latency = 30 seconds + } + // adjust quiet period to 0 seconds to speed up tests + netty { + io-group { + shutdown {quiet-period = 0, timeout = 15, unit = SECONDS} + } + admin-group { + shutdown {quiet-period = 0, timeout = 15, unit = SECONDS} + } + } + } +} \ No newline at end of file diff --git a/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/OsgiCustomLoadBalancingPolicyIT.java b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/OsgiCustomLoadBalancingPolicyIT.java new file mode 100644 index 00000000000..59b0aa6b2e6 --- /dev/null +++ b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/OsgiCustomLoadBalancingPolicyIT.java @@ -0,0 +1,64 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.osgi; + +import com.datastax.oss.driver.api.osgi.service.MailboxService; +import com.datastax.oss.driver.api.testinfra.loadbalancing.SortingLoadBalancingPolicy; +import com.datastax.oss.driver.internal.osgi.checks.DefaultServiceChecks; +import com.datastax.oss.driver.internal.osgi.support.BundleOptions; +import com.datastax.oss.driver.internal.osgi.support.CcmExamReactorFactory; +import com.datastax.oss.driver.internal.osgi.support.CcmPaxExam; +import javax.inject.Inject; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.ops4j.pax.exam.Configuration; +import org.ops4j.pax.exam.CoreOptions; +import org.ops4j.pax.exam.Option; +import org.ops4j.pax.exam.spi.reactors.ExamReactorStrategy; + +/** + * Test that uses a policy from a separate bundle from the core driver to ensure that the driver is + * able to load that policy via Reflection. To support this, the driver uses + * DynamicImport-Package: *. + */ +@RunWith(CcmPaxExam.class) +@ExamReactorStrategy(CcmExamReactorFactory.class) +public class OsgiCustomLoadBalancingPolicyIT { + + @Inject MailboxService service; + + @Configuration + public Option[] config() { + return CoreOptions.options( + BundleOptions.applicationBundle(), + BundleOptions.driverCoreBundle(), + BundleOptions.driverQueryBuilderBundle(), + BundleOptions.driverMapperRuntimeBundle(), + BundleOptions.commonBundles(), + BundleOptions.nettyBundles(), + BundleOptions.jacksonBundles(), + BundleOptions.testBundles(), + CoreOptions.systemProperty("cassandra.lbp") + // This LBP resides in test-infra bundle and will be loaded the driver + // class loader, thanks to the "Dynamic-Import:*" directive + .value(SortingLoadBalancingPolicy.class.getName())); + } + + @Test + public void test_custom_lbp() throws Exception { + DefaultServiceChecks.checkService(service); + } +} diff --git a/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/OsgiDefaultIT.java b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/OsgiDefaultIT.java new file mode 100644 index 00000000000..705a5787f2f --- /dev/null +++ b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/OsgiDefaultIT.java @@ -0,0 +1,57 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.osgi; + +import com.datastax.oss.driver.api.osgi.service.MailboxService; +import com.datastax.oss.driver.internal.osgi.checks.DefaultServiceChecks; +import com.datastax.oss.driver.internal.osgi.support.BundleOptions; +import com.datastax.oss.driver.internal.osgi.support.CcmExamReactorFactory; +import com.datastax.oss.driver.internal.osgi.support.CcmPaxExam; +import javax.inject.Inject; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.ops4j.pax.exam.Configuration; +import org.ops4j.pax.exam.CoreOptions; +import org.ops4j.pax.exam.Option; +import org.ops4j.pax.exam.spi.reactors.ExamReactorStrategy; + +@RunWith(CcmPaxExam.class) +@ExamReactorStrategy(CcmExamReactorFactory.class) +public class OsgiDefaultIT { + + @Inject MailboxService service; + + @Configuration + public Option[] config() { + // this configuration purposely excludes bundles whose resolution should be optional: + // ESRI, Reactive Streams and Tinkerpop. This allows to validate that the driver can still + // work properly in an OSGi container as long as the missing packages are not accessed. + return CoreOptions.options( + BundleOptions.applicationBundle(), + BundleOptions.driverCoreBundle(), + BundleOptions.driverQueryBuilderBundle(), + BundleOptions.driverMapperRuntimeBundle(), + BundleOptions.commonBundles(), + BundleOptions.nettyBundles(), + BundleOptions.jacksonBundles(), + BundleOptions.testBundles()); + } + + @Test + public void test_default() throws Exception { + DefaultServiceChecks.checkService(service); + } +} diff --git a/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/OsgiGeoTypesIT.java b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/OsgiGeoTypesIT.java new file mode 100644 index 00000000000..ef18fade1fe --- /dev/null +++ b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/OsgiGeoTypesIT.java @@ -0,0 +1,63 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.osgi; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.api.osgi.service.MailboxService; +import com.datastax.oss.driver.api.osgi.service.geo.GeoMailboxService; +import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.internal.osgi.checks.DefaultServiceChecks; +import com.datastax.oss.driver.internal.osgi.checks.GeoServiceChecks; +import com.datastax.oss.driver.internal.osgi.support.BundleOptions; +import com.datastax.oss.driver.internal.osgi.support.CcmExamReactorFactory; +import com.datastax.oss.driver.internal.osgi.support.CcmPaxExam; +import javax.inject.Inject; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.ops4j.pax.exam.Configuration; +import org.ops4j.pax.exam.CoreOptions; +import org.ops4j.pax.exam.Option; +import org.ops4j.pax.exam.spi.reactors.ExamReactorStrategy; + +@RunWith(CcmPaxExam.class) +@ExamReactorStrategy(CcmExamReactorFactory.class) +@DseRequirement(min = "5.0", description = "Requires geo types") +public class OsgiGeoTypesIT { + + @Inject MailboxService service; + + @Configuration + public Option[] config() { + return CoreOptions.options( + BundleOptions.applicationBundle(), + BundleOptions.driverCoreBundle(), + BundleOptions.driverQueryBuilderBundle(), + BundleOptions.driverMapperRuntimeBundle(), + BundleOptions.commonBundles(), + BundleOptions.nettyBundles(), + BundleOptions.jacksonBundles(), + BundleOptions.esriBundles(), + BundleOptions.testBundles()); + } + + @Test + public void test_geo_types() throws Exception { + DefaultServiceChecks.checkService(service); + assertThat(service).isInstanceOf(GeoMailboxService.class); + GeoServiceChecks.checkServiceGeo((GeoMailboxService) service); + } +} diff --git a/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/OsgiGraphIT.java b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/OsgiGraphIT.java new file mode 100644 index 00000000000..a34c7946b8b --- /dev/null +++ b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/OsgiGraphIT.java @@ -0,0 +1,63 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.osgi; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.api.osgi.service.MailboxService; +import com.datastax.oss.driver.api.osgi.service.graph.GraphMailboxService; +import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.internal.osgi.checks.DefaultServiceChecks; +import com.datastax.oss.driver.internal.osgi.checks.GraphServiceChecks; +import com.datastax.oss.driver.internal.osgi.support.BundleOptions; +import com.datastax.oss.driver.internal.osgi.support.CcmExamReactorFactory; +import com.datastax.oss.driver.internal.osgi.support.CcmPaxExam; +import javax.inject.Inject; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.ops4j.pax.exam.Configuration; +import org.ops4j.pax.exam.CoreOptions; +import org.ops4j.pax.exam.Option; +import org.ops4j.pax.exam.spi.reactors.ExamReactorStrategy; + +@RunWith(CcmPaxExam.class) +@ExamReactorStrategy(CcmExamReactorFactory.class) +@DseRequirement(min = "6.8", description = "Requires Core Graph") +public class OsgiGraphIT { + + @Inject MailboxService service; + + @Configuration + public Option[] config() { + return CoreOptions.options( + BundleOptions.applicationBundle(), + BundleOptions.driverCoreBundle(), + BundleOptions.driverQueryBuilderBundle(), + BundleOptions.driverMapperRuntimeBundle(), + BundleOptions.commonBundles(), + BundleOptions.nettyBundles(), + BundleOptions.jacksonBundles(), + BundleOptions.tinkerpopBundles(), + BundleOptions.testBundles()); + } + + @Test + public void test_graph() throws Exception { + DefaultServiceChecks.checkService(service); + assertThat(service).isInstanceOf(GraphMailboxService.class); + GraphServiceChecks.checkGraphService((GraphMailboxService) service); + } +} diff --git a/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/OsgiLz4IT.java b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/OsgiLz4IT.java new file mode 100644 index 00000000000..91ada262ca1 --- /dev/null +++ b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/OsgiLz4IT.java @@ -0,0 +1,55 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.osgi; + +import com.datastax.oss.driver.api.osgi.service.MailboxService; +import com.datastax.oss.driver.internal.osgi.checks.DefaultServiceChecks; +import com.datastax.oss.driver.internal.osgi.support.BundleOptions; +import com.datastax.oss.driver.internal.osgi.support.CcmExamReactorFactory; +import com.datastax.oss.driver.internal.osgi.support.CcmPaxExam; +import javax.inject.Inject; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.ops4j.pax.exam.Configuration; +import org.ops4j.pax.exam.CoreOptions; +import org.ops4j.pax.exam.Option; +import org.ops4j.pax.exam.spi.reactors.ExamReactorStrategy; + +@RunWith(CcmPaxExam.class) +@ExamReactorStrategy(CcmExamReactorFactory.class) +public class OsgiLz4IT { + + @Inject MailboxService service; + + @Configuration + public Option[] config() { + return CoreOptions.options( + BundleOptions.applicationBundle(), + BundleOptions.driverCoreBundle(), + BundleOptions.driverQueryBuilderBundle(), + BundleOptions.driverMapperRuntimeBundle(), + BundleOptions.commonBundles(), + BundleOptions.nettyBundles(), + BundleOptions.jacksonBundles(), + BundleOptions.lz4Bundle(), + BundleOptions.testBundles()); + } + + @Test + public void test_lz4_compression() throws Exception { + DefaultServiceChecks.checkService(service); + } +} diff --git a/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/OsgiReactiveIT.java b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/OsgiReactiveIT.java new file mode 100644 index 00000000000..505c01f3ac1 --- /dev/null +++ b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/OsgiReactiveIT.java @@ -0,0 +1,61 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.osgi; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.api.osgi.service.MailboxService; +import com.datastax.oss.driver.api.osgi.service.reactive.ReactiveMailboxService; +import com.datastax.oss.driver.internal.osgi.checks.DefaultServiceChecks; +import com.datastax.oss.driver.internal.osgi.checks.ReactiveServiceChecks; +import com.datastax.oss.driver.internal.osgi.support.BundleOptions; +import com.datastax.oss.driver.internal.osgi.support.CcmExamReactorFactory; +import com.datastax.oss.driver.internal.osgi.support.CcmPaxExam; +import javax.inject.Inject; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.ops4j.pax.exam.Configuration; +import org.ops4j.pax.exam.CoreOptions; +import org.ops4j.pax.exam.Option; +import org.ops4j.pax.exam.spi.reactors.ExamReactorStrategy; + +@RunWith(CcmPaxExam.class) +@ExamReactorStrategy(CcmExamReactorFactory.class) +public class OsgiReactiveIT { + + @Inject MailboxService service; + + @Configuration + public Option[] config() { + return CoreOptions.options( + BundleOptions.applicationBundle(), + BundleOptions.driverCoreBundle(), + BundleOptions.driverQueryBuilderBundle(), + BundleOptions.driverMapperRuntimeBundle(), + BundleOptions.commonBundles(), + BundleOptions.nettyBundles(), + BundleOptions.jacksonBundles(), + BundleOptions.reactiveBundles(), + BundleOptions.testBundles()); + } + + @Test + public void test_reactive() throws Exception { + DefaultServiceChecks.checkService(service); + assertThat(service).isInstanceOf(ReactiveMailboxService.class); + ReactiveServiceChecks.checkServiceReactive((ReactiveMailboxService) service); + } +} diff --git a/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/OsgiShadedIT.java b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/OsgiShadedIT.java new file mode 100644 index 00000000000..21d029faa27 --- /dev/null +++ b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/OsgiShadedIT.java @@ -0,0 +1,53 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.osgi; + +import com.datastax.oss.driver.api.osgi.service.MailboxService; +import com.datastax.oss.driver.internal.osgi.checks.DefaultServiceChecks; +import com.datastax.oss.driver.internal.osgi.support.BundleOptions; +import com.datastax.oss.driver.internal.osgi.support.CcmExamReactorFactory; +import com.datastax.oss.driver.internal.osgi.support.CcmPaxExam; +import javax.inject.Inject; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.ops4j.pax.exam.Configuration; +import org.ops4j.pax.exam.CoreOptions; +import org.ops4j.pax.exam.Option; +import org.ops4j.pax.exam.spi.reactors.ExamReactorStrategy; + +@RunWith(CcmPaxExam.class) +@ExamReactorStrategy(CcmExamReactorFactory.class) +public class OsgiShadedIT { + + @Inject MailboxService service; + + @Configuration + public Option[] config() { + return CoreOptions.options( + BundleOptions.applicationBundle(), + BundleOptions.driverCoreShadedBundle(), + BundleOptions.driverQueryBuilderBundle(), + BundleOptions.driverMapperRuntimeBundle(), + BundleOptions.commonBundles(), + // Netty and Jackson are shaded + BundleOptions.testBundles()); + } + + @Test + public void test_shaded_reactive() throws Exception { + DefaultServiceChecks.checkService(service); + } +} diff --git a/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/OsgiSnappyIT.java b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/OsgiSnappyIT.java new file mode 100644 index 00000000000..f524de74fad --- /dev/null +++ b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/OsgiSnappyIT.java @@ -0,0 +1,55 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.osgi; + +import com.datastax.oss.driver.api.osgi.service.MailboxService; +import com.datastax.oss.driver.internal.osgi.checks.DefaultServiceChecks; +import com.datastax.oss.driver.internal.osgi.support.BundleOptions; +import com.datastax.oss.driver.internal.osgi.support.CcmExamReactorFactory; +import com.datastax.oss.driver.internal.osgi.support.CcmPaxExam; +import javax.inject.Inject; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.ops4j.pax.exam.Configuration; +import org.ops4j.pax.exam.CoreOptions; +import org.ops4j.pax.exam.Option; +import org.ops4j.pax.exam.spi.reactors.ExamReactorStrategy; + +@RunWith(CcmPaxExam.class) +@ExamReactorStrategy(CcmExamReactorFactory.class) +public class OsgiSnappyIT { + + @Inject MailboxService service; + + @Configuration + public Option[] config() { + return CoreOptions.options( + BundleOptions.applicationBundle(), + BundleOptions.driverCoreBundle(), + BundleOptions.driverQueryBuilderBundle(), + BundleOptions.driverMapperRuntimeBundle(), + BundleOptions.commonBundles(), + BundleOptions.nettyBundles(), + BundleOptions.jacksonBundles(), + BundleOptions.snappyBundle(), + BundleOptions.testBundles()); + } + + @Test + public void test_snappy_compression() throws Exception { + DefaultServiceChecks.checkService(service); + } +} diff --git a/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/checks/DefaultServiceChecks.java b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/checks/DefaultServiceChecks.java new file mode 100644 index 00000000000..9c2fe8cb197 --- /dev/null +++ b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/checks/DefaultServiceChecks.java @@ -0,0 +1,49 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.osgi.checks; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.api.osgi.service.MailboxMessage; +import com.datastax.oss.driver.api.osgi.service.MailboxService; +import java.time.Instant; +import java.util.ArrayList; +import java.util.List; + +public class DefaultServiceChecks { + + /** + * Exercises an OSGi service provided by an OSGi bundle that depends on the driver. Ensures that + * queries can be made through the service with the current given configuration. + */ + public static void checkService(MailboxService service) throws Exception { + // Insert some data into mailbox for a particular user. + String recipient = "user@datastax.com"; + try { + List insertedMessages = new ArrayList<>(); + for (int i = 0; i < 30; i++) { + Instant timestamp = Instant.ofEpochMilli(i); + MailboxMessage message = new MailboxMessage(recipient, timestamp, "sender" + i, "body" + i); + insertedMessages.add(message); + service.sendMessage(message); + } + Iterable retrievedMessages = service.getMessages(recipient); + assertThat(retrievedMessages).containsExactlyElementsOf(insertedMessages); + } finally { + service.clearMailbox(recipient); + } + } +} diff --git a/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/checks/GeoServiceChecks.java b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/checks/GeoServiceChecks.java new file mode 100644 index 00000000000..2d93ed93026 --- /dev/null +++ b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/checks/GeoServiceChecks.java @@ -0,0 +1,46 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.osgi.checks; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.dse.driver.api.core.data.geometry.Point; +import com.datastax.oss.driver.api.osgi.service.geo.GeoMailboxMessage; +import com.datastax.oss.driver.api.osgi.service.geo.GeoMailboxService; +import java.util.ArrayList; +import java.util.List; + +public class GeoServiceChecks { + + public static void checkServiceGeo(GeoMailboxService service) throws Exception { + // Insert some data into mailbox for a particular user. + String recipient = "user@datastax.com"; + try { + List insertedMessages = new ArrayList<>(); + for (int i = 0; i < 30; i++) { + Point location = Point.fromCoordinates(i, i); + GeoMailboxMessage message = + new GeoMailboxMessage(recipient, location, "sender" + i, "body" + i); + insertedMessages.add(message); + service.sendGeoMessage(message); + } + Iterable retrievedMessages = service.getGeoMessages(recipient); + assertThat(retrievedMessages).containsExactlyElementsOf(insertedMessages); + } finally { + service.clearGeoMailbox(recipient); + } + } +} diff --git a/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/checks/GraphServiceChecks.java b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/checks/GraphServiceChecks.java new file mode 100644 index 00000000000..ef386d96594 --- /dev/null +++ b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/checks/GraphServiceChecks.java @@ -0,0 +1,42 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.osgi.checks; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.api.osgi.service.MailboxException; +import com.datastax.oss.driver.api.osgi.service.MailboxMessage; +import com.datastax.oss.driver.api.osgi.service.graph.GraphMailboxService; +import java.time.Instant; +import java.util.ArrayList; +import java.util.List; + +public class GraphServiceChecks { + + public static void checkGraphService(GraphMailboxService service) throws MailboxException { + // Insert some data into mailbox for a particular user. + String recipient = "user@datastax.com"; + List insertedMessages = new ArrayList<>(); + for (int i = 0; i < 30; i++) { + Instant timestamp = Instant.ofEpochMilli(i); + MailboxMessage message = new MailboxMessage(recipient, timestamp, "sender" + i, "body" + i); + insertedMessages.add(message); + service.sendGraphMessage(message); + } + Iterable retrievedMessages = service.getGraphMessages(recipient); + assertThat(retrievedMessages).containsExactlyElementsOf(insertedMessages); + } +} diff --git a/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/checks/ReactiveServiceChecks.java b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/checks/ReactiveServiceChecks.java new file mode 100644 index 00000000000..851237889db --- /dev/null +++ b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/checks/ReactiveServiceChecks.java @@ -0,0 +1,51 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.osgi.checks; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.dse.driver.api.mapper.reactive.MappedReactiveResultSet; +import com.datastax.oss.driver.api.osgi.service.MailboxException; +import com.datastax.oss.driver.api.osgi.service.MailboxMessage; +import com.datastax.oss.driver.api.osgi.service.reactive.ReactiveMailboxService; +import io.reactivex.Flowable; +import java.time.Instant; +import java.util.ArrayList; +import java.util.List; + +public class ReactiveServiceChecks { + + public static void checkServiceReactive(ReactiveMailboxService service) throws MailboxException { + // Insert some data into mailbox for a particular user. + String recipient = "user@datastax.com"; + try { + List insertedMessages = new ArrayList<>(); + for (int i = 0; i < 30; i++) { + Instant timestamp = Instant.ofEpochMilli(i); + MailboxMessage message = new MailboxMessage(recipient, timestamp, "sender" + i, "body" + i); + insertedMessages.add(message); + service.sendMessage(message); + } + MappedReactiveResultSet retrievedMessages = + service.getMessagesReactive(recipient); + List messageList = + Flowable.fromPublisher(retrievedMessages).toList().blockingGet(); + assertThat(messageList).containsExactlyElementsOf(insertedMessages); + } finally { + service.clearMailbox(recipient); + } + } +} diff --git a/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/BundleOptions.java b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/BundleOptions.java new file mode 100644 index 00000000000..f12e8fa337a --- /dev/null +++ b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/BundleOptions.java @@ -0,0 +1,210 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.osgi.support; + +import static org.ops4j.pax.exam.CoreOptions.bundle; +import static org.ops4j.pax.exam.CoreOptions.junitBundles; +import static org.ops4j.pax.exam.CoreOptions.mavenBundle; +import static org.ops4j.pax.exam.CoreOptions.options; +import static org.ops4j.pax.exam.CoreOptions.systemProperty; +import static org.ops4j.pax.exam.CoreOptions.systemTimeout; +import static org.ops4j.pax.exam.CoreOptions.vmOption; + +import org.ops4j.pax.exam.CoreOptions; +import org.ops4j.pax.exam.options.CompositeOption; +import org.ops4j.pax.exam.options.UrlProvisionOption; +import org.ops4j.pax.exam.options.WrappedUrlProvisionOption; + +public class BundleOptions { + + public static CompositeOption commonBundles() { + return () -> + options( + mavenBundle("com.datastax.oss", "java-driver-shaded-guava").versionAsInProject(), + mavenBundle("io.dropwizard.metrics", "metrics-core").versionAsInProject(), + mavenBundle("org.slf4j", "slf4j-api").versionAsInProject(), + mavenBundle("org.hdrhistogram", "HdrHistogram").versionAsInProject(), + mavenBundle("com.typesafe", "config").versionAsInProject(), + mavenBundle("com.datastax.oss", "native-protocol").versionAsInProject(), + logbackBundles(), + debugOptions()); + } + + public static CompositeOption applicationBundle() { + return () -> + options( + systemProperty("cassandra.contactpoints").value("127.0.0.1"), + systemProperty("cassandra.port").value("9042"), + systemProperty("cassandra.keyspace").value("test_osgi"), + bundle("reference:file:target/classes")); + } + + public static UrlProvisionOption driverCoreBundle() { + return bundle("reference:file:../core/target/classes"); + } + + public static UrlProvisionOption driverCoreShadedBundle() { + return bundle("reference:file:../core-shaded/target/classes"); + } + + public static UrlProvisionOption driverQueryBuilderBundle() { + return bundle("reference:file:../query-builder/target/classes"); + } + + public static UrlProvisionOption driverMapperRuntimeBundle() { + return bundle("reference:file:../mapper-runtime/target/classes"); + } + + public static UrlProvisionOption driverTestInfraBundle() { + return bundle("reference:file:../test-infra/target/classes"); + } + + public static CompositeOption testBundles() { + return () -> + options( + driverTestInfraBundle(), + mavenBundle("org.apache.commons", "commons-exec").versionAsInProject(), + mavenBundle("org.assertj", "assertj-core").versionAsInProject(), + mavenBundle("org.awaitility", "awaitility").versionAsInProject(), + mavenBundle("org.hamcrest", "hamcrest").versionAsInProject(), + junitBundles()); + } + + public static CompositeOption nettyBundles() { + return () -> + options( + mavenBundle("io.netty", "netty-handler").versionAsInProject(), + mavenBundle("io.netty", "netty-buffer").versionAsInProject(), + mavenBundle("io.netty", "netty-codec").versionAsInProject(), + mavenBundle("io.netty", "netty-common").versionAsInProject(), + mavenBundle("io.netty", "netty-transport").versionAsInProject(), + mavenBundle("io.netty", "netty-resolver").versionAsInProject()); + } + + public static CompositeOption logbackBundles() { + return () -> + options( + mavenBundle("ch.qos.logback", "logback-classic").versionAsInProject(), + mavenBundle("ch.qos.logback", "logback-core").versionAsInProject(), + systemProperty("logback.configurationFile") + .value("file:src/test/resources/logback-test.xml")); + } + + public static CompositeOption jacksonBundles() { + return () -> + options( + mavenBundle("com.fasterxml.jackson.core", "jackson-databind").versionAsInProject(), + mavenBundle("com.fasterxml.jackson.core", "jackson-core").versionAsInProject(), + mavenBundle("com.fasterxml.jackson.core", "jackson-annotations").versionAsInProject()); + } + + public static CompositeOption lz4Bundle() { + return () -> + options( + mavenBundle("org.lz4", "lz4-java").versionAsInProject(), + systemProperty("cassandra.compression").value("LZ4")); + } + + public static CompositeOption snappyBundle() { + return () -> + options( + mavenBundle("org.xerial.snappy", "snappy-java").versionAsInProject(), + systemProperty("cassandra.compression").value("SNAPPY")); + } + + public static CompositeOption tinkerpopBundles() { + return () -> + options( + CoreOptions.wrappedBundle( + mavenBundle("org.apache.tinkerpop", "gremlin-core").versionAsInProject()) + .exports( + // avoid exporting 'org.apache.tinkerpop.gremlin.*' as other Tinkerpop jars have + // this root package as well + "org.apache.tinkerpop.gremlin.jsr223.*", + "org.apache.tinkerpop.gremlin.process.*", + "org.apache.tinkerpop.gremlin.structure.*", + "org.apache.tinkerpop.gremlin.util.*") + .bundleSymbolicName("org.apache.tinkerpop.gremlin-core") + .overwriteManifest(WrappedUrlProvisionOption.OverwriteMode.FULL), + CoreOptions.wrappedBundle( + mavenBundle("org.apache.tinkerpop", "gremlin-driver").versionAsInProject()) + .exports("org.apache.tinkerpop.gremlin.driver.*") + .bundleSymbolicName("org.apache.tinkerpop.gremlin-driver") + .overwriteManifest(WrappedUrlProvisionOption.OverwriteMode.FULL), + CoreOptions.wrappedBundle( + mavenBundle("org.apache.tinkerpop", "tinkergraph-gremlin").versionAsInProject()) + .exports("org.apache.tinkerpop.gremlin.tinkergraph.*") + .bundleSymbolicName("org.apache.tinkerpop.tinkergraph-gremlin") + .overwriteManifest(WrappedUrlProvisionOption.OverwriteMode.FULL), + CoreOptions.wrappedBundle( + mavenBundle("org.apache.tinkerpop", "gremlin-shaded").versionAsInProject()) + .exports("org.apache.tinkerpop.shaded.*") + .bundleSymbolicName("org.apache.tinkerpop.gremlin-shaded") + .overwriteManifest(WrappedUrlProvisionOption.OverwriteMode.FULL), + // Note: the versions below are hard-coded because they shouldn't change very often, + // but if the tests fail because of them, we should consider parameterizing them + mavenBundle("commons-configuration", "commons-configuration", "1.10"), + CoreOptions.wrappedBundle(mavenBundle("commons-logging", "commons-logging", "1.1.1")) + .exports("org.apache.commons.logging.*") + .bundleVersion("1.1.1") + .bundleSymbolicName("org.apache.commons.commons-logging") + .overwriteManifest(WrappedUrlProvisionOption.OverwriteMode.FULL), + mavenBundle("commons-collections", "commons-collections", "3.2.2"), + mavenBundle("org.apache.commons", "commons-lang3", "3.8.1"), + mavenBundle("commons-lang", "commons-lang", "2.6"), + CoreOptions.wrappedBundle(mavenBundle("org.javatuples", "javatuples", "1.2")) + .exports("org.javatuples.*") + .bundleVersion("1.2") + .bundleSymbolicName("org.javatuples") + .overwriteManifest(WrappedUrlProvisionOption.OverwriteMode.FULL), + systemProperty("cassandra.graph").value("true"), + systemProperty("cassandra.graph.name").value("test_osgi_graph")); + } + + public static CompositeOption esriBundles() { + return () -> + options( + CoreOptions.wrappedBundle( + mavenBundle("com.esri.geometry", "esri-geometry-api").versionAsInProject()) + .exports("com.esri.core.geometry.*") + .imports("org.json", "org.codehaus.jackson") + .bundleSymbolicName("com.esri.core.geometry") + .overwriteManifest(WrappedUrlProvisionOption.OverwriteMode.FULL), + mavenBundle("org.json", "json").versionAsInProject(), + mavenBundle("org.codehaus.jackson", "jackson-core-asl").versionAsInProject(), + systemProperty("cassandra.geo").value("true")); + } + + public static CompositeOption reactiveBundles() { + return () -> + options( + mavenBundle("org.reactivestreams", "reactive-streams").versionAsInProject(), + mavenBundle("io.reactivex.rxjava2", "rxjava").versionAsInProject(), + systemProperty("cassandra.reactive").value("true")); + } + + private static CompositeOption debugOptions() { + boolean debug = Boolean.getBoolean("osgi.debug"); + if (debug) { + return () -> + options( + vmOption("-Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=5005"), + systemTimeout(Long.MAX_VALUE)); + } else { + return CoreOptions::options; + } + } +} diff --git a/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/CcmExamReactorFactory.java b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/CcmExamReactorFactory.java new file mode 100644 index 00000000000..5d0c6d7fdab --- /dev/null +++ b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/CcmExamReactorFactory.java @@ -0,0 +1,30 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.osgi.support; + +import java.util.List; +import org.ops4j.pax.exam.TestContainer; +import org.ops4j.pax.exam.TestProbeBuilder; +import org.ops4j.pax.exam.spi.StagedExamReactor; +import org.ops4j.pax.exam.spi.StagedExamReactorFactory; + +public class CcmExamReactorFactory implements StagedExamReactorFactory { + + @Override + public StagedExamReactor create(List containers, List mProbes) { + return new CcmStagedReactor(containers, mProbes); + } +} diff --git a/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/CcmPaxExam.java b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/CcmPaxExam.java new file mode 100644 index 00000000000..8697a0d790d --- /dev/null +++ b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/CcmPaxExam.java @@ -0,0 +1,106 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.osgi.support; + +import static com.datastax.oss.driver.internal.osgi.support.CcmStagedReactor.CCM_BRIDGE; + +import com.datastax.oss.driver.api.core.Version; +import com.datastax.oss.driver.api.testinfra.CassandraRequirement; +import com.datastax.oss.driver.api.testinfra.DseRequirement; +import java.util.Objects; +import java.util.Optional; +import org.junit.AssumptionViolatedException; +import org.junit.runner.Description; +import org.junit.runner.notification.Failure; +import org.junit.runner.notification.RunNotifier; +import org.junit.runners.model.InitializationError; +import org.ops4j.pax.exam.junit.PaxExam; + +public class CcmPaxExam extends PaxExam { + + public CcmPaxExam(Class klass) throws InitializationError { + super(klass); + } + + @Override + public void run(RunNotifier notifier) { + Description description = getDescription(); + CassandraRequirement cassandraRequirement = + description.getAnnotation(CassandraRequirement.class); + if (cassandraRequirement != null) { + if (!cassandraRequirement.min().isEmpty()) { + Version minVersion = Objects.requireNonNull(Version.parse(cassandraRequirement.min())); + if (minVersion.compareTo(CCM_BRIDGE.getCassandraVersion()) > 0) { + fireRequirementsNotMet(notifier, description, cassandraRequirement.min(), false, false); + return; + } + } + if (!cassandraRequirement.max().isEmpty()) { + Version maxVersion = Objects.requireNonNull(Version.parse(cassandraRequirement.max())); + if (maxVersion.compareTo(CCM_BRIDGE.getCassandraVersion()) <= 0) { + fireRequirementsNotMet(notifier, description, cassandraRequirement.max(), true, false); + return; + } + } + } + DseRequirement dseRequirement = description.getAnnotation(DseRequirement.class); + if (dseRequirement != null) { + Optional dseVersionOption = CCM_BRIDGE.getDseVersion(); + if (!dseVersionOption.isPresent()) { + notifier.fireTestAssumptionFailed( + new Failure( + description, + new AssumptionViolatedException("Test Requires DSE but C* is configured."))); + return; + } else { + Version dseVersion = dseVersionOption.get(); + if (!dseRequirement.min().isEmpty()) { + Version minVersion = Objects.requireNonNull(Version.parse(dseRequirement.min())); + if (minVersion.compareTo(dseVersion) > 0) { + fireRequirementsNotMet(notifier, description, dseRequirement.min(), false, true); + return; + } + } + if (!dseRequirement.max().isEmpty()) { + Version maxVersion = Objects.requireNonNull(Version.parse(dseRequirement.max())); + if (maxVersion.compareTo(dseVersion) <= 0) { + fireRequirementsNotMet(notifier, description, dseRequirement.min(), true, true); + return; + } + } + } + } + super.run(notifier); + } + + private void fireRequirementsNotMet( + RunNotifier notifier, + Description description, + String requirement, + boolean lessThan, + boolean dse) { + AssumptionViolatedException e = + new AssumptionViolatedException( + String.format( + "Test requires %s %s %s but %s is configured. Description: %s", + lessThan ? "less than" : "at least", + dse ? "DSE" : "C*", + requirement, + dse ? CCM_BRIDGE.getDseVersion().orElse(null) : CCM_BRIDGE.getCassandraVersion(), + description)); + notifier.fireTestAssumptionFailed(new Failure(description, e)); + } +} diff --git a/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/CcmStagedReactor.java b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/CcmStagedReactor.java new file mode 100644 index 00000000000..5baf7fa6833 --- /dev/null +++ b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/CcmStagedReactor.java @@ -0,0 +1,83 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.osgi.support; + +import com.datastax.oss.driver.api.testinfra.ccm.CcmBridge; +import java.util.List; +import net.jcip.annotations.GuardedBy; +import org.ops4j.pax.exam.TestContainer; +import org.ops4j.pax.exam.TestProbeBuilder; +import org.ops4j.pax.exam.spi.reactors.AllConfinedStagedReactor; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class CcmStagedReactor extends AllConfinedStagedReactor { + + private static final Logger LOGGER = LoggerFactory.getLogger(CcmStagedReactor.class); + + public static final CcmBridge CCM_BRIDGE; + + static { + CcmBridge.Builder builder = CcmBridge.builder().withNodes(1); + if (CcmBridge.DSE_ENABLEMENT) { + builder.withDseWorkloads("graph"); + } + CCM_BRIDGE = builder.build(); + } + + @GuardedBy("this") + private boolean running = false; + + public CcmStagedReactor(List containers, List mProbes) { + super(containers, mProbes); + } + + @Override + public synchronized void beforeSuite() { + if (!running) { + boolean dse = CCM_BRIDGE.getDseVersion().isPresent(); + LOGGER.info( + "Starting CCM, running {} version {}", + dse ? "DSE" : "Cassandra", + dse ? CCM_BRIDGE.getDseVersion().get() : CCM_BRIDGE.getCassandraVersion()); + CCM_BRIDGE.create(); + CCM_BRIDGE.start(); + LOGGER.info("CCM started"); + running = true; + Runtime.getRuntime() + .addShutdownHook( + new Thread( + () -> { + try { + afterSuite(); + } catch (Exception e) { + // silently remove as may have already been removed. + } + })); + } + } + + @Override + public synchronized void afterSuite() { + if (running) { + LOGGER.info("Stopping CCM"); + CCM_BRIDGE.stop(); + CCM_BRIDGE.remove(); + running = false; + LOGGER.info("CCM stopped"); + } + } +} diff --git a/osgi-tests/src/test/resources/exam.properties b/osgi-tests/src/test/resources/exam.properties new file mode 100644 index 00000000000..21438ef0edc --- /dev/null +++ b/osgi-tests/src/test/resources/exam.properties @@ -0,0 +1,18 @@ +# +# Copyright DataStax, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +pax.exam.system=test +pax.exam.logging=none \ No newline at end of file diff --git a/osgi-tests/src/test/resources/logback-test.xml b/osgi-tests/src/test/resources/logback-test.xml new file mode 100644 index 00000000000..70b3e3d3b25 --- /dev/null +++ b/osgi-tests/src/test/resources/logback-test.xml @@ -0,0 +1,34 @@ + + + + + + %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n + + + + + + + + + + + + diff --git a/pom.xml b/pom.xml index 13a3820ff3e..bfbdb5115ff 100644 --- a/pom.xml +++ b/pom.xml @@ -34,6 +34,7 @@ mapper-processor test-infra integration-tests + osgi-tests distribution examples bom @@ -62,7 +63,9 @@ 1.3 4.12 1.2.3 - 4.12.0 + 6.0.0 + 6.0.3 + 4.13.3 0.8.9 1.0 2.28 @@ -232,6 +235,16 @@ commons-exec ${commons-exec.version} + + org.osgi + org.osgi.core + ${osgi.version} + + + org.apache.felix + org.apache.felix.framework + ${felix.version} + org.ops4j.pax.exam pax-exam-junit4 @@ -239,7 +252,7 @@ org.ops4j.pax.exam - pax-exam-container-native + pax-exam-container-forked ${pax-exam.version} @@ -253,9 +266,9 @@ 2.5.4 - org.apache.felix - org.apache.felix.framework - 6.0.0 + org.ops4j.pax.url + pax-url-reference + 2.6.2 org.glassfish From a8e8795df679e03f1be8a0627845be0a06a72694 Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 15 Jun 2020 10:07:52 -0700 Subject: [PATCH 495/979] Add missing changelog entries - JAVA-2663 from f4ea838b7650ab5bb10d089e1ea8613ba2592b05 - JAVA-2803 from 4cd369548e32eabbf772ed4e497a151ca225e28e --- changelog/README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/changelog/README.md b/changelog/README.md index 4a32a5ef064..8f3b02187d4 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -7,10 +7,12 @@ - [improvement] JAVA-2301: Introduce OSGi tests for the mapper - [improvement] JAVA-2658: Refactor OSGi tests - [bug] JAVA-2657: Add ability to specify the class loader to use for application-specific classpath resources +- [improvement] JAVA-2803: Add Graal substitutions for protocol compression - [documentation] JAVA-2666: Document BOM and driver modules - [documentation] JAVA-2613: Improve connection pooling documentation - [new feature] JAVA-2793: Add composite config loader - [new feature] JAVA-2792: Allow custom results in the mapper +- [improvement] JAVA-2663: Add Graal substitutions for native functions - [improvement] JAVA-2747: Revisit semantics of Statement.setExecutionProfile/Name ### 4.6.1 From 957bfd5cb677c14bd025db4ae7057ca1712e1704 Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 15 Jun 2020 10:16:55 -0700 Subject: [PATCH 496/979] Update version in docs --- README.md | 4 +-- changelog/README.md | 2 +- manual/case_sensitivity/README.md | 10 +++--- manual/core/README.md | 26 +++++++------- manual/core/address_resolution/README.md | 2 +- manual/core/async/README.md | 2 +- manual/core/authentication/README.md | 16 ++++----- manual/core/bom/README.md | 4 +-- manual/core/configuration/README.md | 20 +++++------ manual/core/control_connection/README.md | 2 +- manual/core/custom_codecs/README.md | 10 +++--- manual/core/detachable_types/README.md | 14 ++++---- manual/core/dse/geotypes/README.md | 6 ++-- manual/core/dse/graph/README.md | 4 +-- manual/core/dse/graph/fluent/README.md | 4 +-- .../core/dse/graph/fluent/explicit/README.md | 12 +++---- manual/core/dse/graph/results/README.md | 6 ++-- manual/core/dse/graph/script/README.md | 6 ++-- manual/core/idempotence/README.md | 4 +-- manual/core/integration/README.md | 6 ++-- manual/core/load_balancing/README.md | 10 +++--- manual/core/metadata/README.md | 6 ++-- manual/core/metadata/node/README.md | 28 +++++++-------- manual/core/metadata/schema/README.md | 20 +++++------ manual/core/metadata/token/README.md | 4 +-- manual/core/native_protocol/README.md | 6 ++-- manual/core/paging/README.md | 10 +++--- manual/core/performance/README.md | 10 +++--- manual/core/pooling/README.md | 2 +- manual/core/query_timestamps/README.md | 4 +-- manual/core/reactive/README.md | 24 ++++++------- manual/core/reconnection/README.md | 8 ++--- manual/core/request_tracker/README.md | 4 +-- manual/core/retries/README.md | 34 +++++++++---------- manual/core/speculative_execution/README.md | 2 +- manual/core/ssl/README.md | 4 +-- manual/core/statements/README.md | 8 ++--- manual/core/statements/batch/README.md | 6 ++-- .../statements/per_query_keyspace/README.md | 2 +- manual/core/statements/prepared/README.md | 8 ++--- manual/core/statements/simple/README.md | 6 ++-- manual/core/temporal_types/README.md | 8 ++--- manual/core/throttling/README.md | 6 ++-- manual/core/tracing/README.md | 12 +++---- manual/core/tuples/README.md | 4 +-- manual/core/udts/README.md | 4 +-- manual/mapper/daos/README.md | 8 ++--- manual/mapper/daos/custom_types/README.md | 10 +++--- manual/mapper/daos/delete/README.md | 18 +++++----- manual/mapper/daos/getentity/README.md | 16 ++++----- manual/mapper/daos/insert/README.md | 14 ++++---- manual/mapper/daos/null_saving/README.md | 10 +++--- manual/mapper/daos/query/README.md | 22 ++++++------ manual/mapper/daos/queryprovider/README.md | 16 ++++----- manual/mapper/daos/select/README.md | 26 +++++++------- manual/mapper/daos/setentity/README.md | 10 +++--- .../daos/statement_attributes/README.md | 2 +- manual/mapper/daos/update/README.md | 12 +++---- manual/mapper/entities/README.md | 34 +++++++++---------- manual/mapper/mapper/README.md | 10 +++--- manual/osgi/README.md | 6 ++-- manual/query_builder/README.md | 10 +++--- manual/query_builder/condition/README.md | 2 +- manual/query_builder/delete/README.md | 4 +-- manual/query_builder/insert/README.md | 2 +- manual/query_builder/relation/README.md | 4 +-- manual/query_builder/schema/README.md | 2 +- .../query_builder/schema/aggregate/README.md | 2 +- .../query_builder/schema/function/README.md | 2 +- manual/query_builder/schema/index/README.md | 2 +- .../query_builder/schema/keyspace/README.md | 2 +- .../schema/materialized_view/README.md | 4 +-- manual/query_builder/schema/table/README.md | 6 ++-- manual/query_builder/schema/type/README.md | 2 +- manual/query_builder/select/README.md | 4 +-- manual/query_builder/term/README.md | 4 +-- manual/query_builder/truncate/README.md | 2 +- manual/query_builder/update/README.md | 4 +-- upgrade_guide/README.md | 2 +- 79 files changed, 337 insertions(+), 337 deletions(-) diff --git a/README.md b/README.md index edc97ae740d..1147fde9bde 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@ *If you're reading this on github.com, please note that this is the readme for the development version and that some features described here might not yet have been released. You can find the documentation for latest version through [DataStax Docs] or via the release tags, e.g. -[4.6.1](https://github.com/datastax/java-driver/tree/4.6.1).* +[4.7.0](https://github.com/datastax/java-driver/tree/4.7.0).* A modern, feature-rich and highly tunable Java client library for [Apache Cassandra®] \(2.1+) and [DataStax Enterprise] \(4.7+), and [DataStax Apollo], using exclusively Cassandra's binary protocol @@ -77,7 +77,7 @@ See the [upgrade guide](upgrade_guide/) for details. * [Changelog] * [FAQ] -[API docs]: https://docs.datastax.com/en/drivers/java/4.6 +[API docs]: https://docs.datastax.com/en/drivers/java/4.7 [JIRA]: https://datastax-oss.atlassian.net/browse/JAVA [Mailing list]: https://groups.google.com/a/lists.datastax.com/forum/#!forum/java-driver-user [@dsJavaDriver]: https://twitter.com/dsJavaDriver diff --git a/changelog/README.md b/changelog/README.md index 8f3b02187d4..aaf84cc4d84 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -2,7 +2,7 @@ -### 4.7.0 (in progress) +### 4.7.0 - [improvement] JAVA-2301: Introduce OSGi tests for the mapper - [improvement] JAVA-2658: Refactor OSGi tests diff --git a/manual/case_sensitivity/README.md b/manual/case_sensitivity/README.md index f73f80629a1..5c598b079f2 100644 --- a/manual/case_sensitivity/README.md +++ b/manual/case_sensitivity/README.md @@ -106,11 +106,11 @@ For "consuming" methods, string overloads are also provided for convenience, for * in other cases, the string is always assumed to be in CQL form, and converted on the fly with `CqlIdentifier.fromCql`. -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/CqlIdentifier.html -[Row]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/Row.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/data/UdtValue.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/data/AccessibleByName.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/CqlIdentifier.html +[Row]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/Row.html +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/data/UdtValue.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/data/AccessibleByName.html ### Good practices diff --git a/manual/core/README.md b/manual/core/README.md index a56b2474a44..dd873833314 100644 --- a/manual/core/README.md +++ b/manual/core/README.md @@ -310,18 +310,18 @@ for (ColumnDefinitions.Definition definition : row.getColumnDefinitions()) { } ``` -[CqlSession]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/CqlSession.html -[CqlSession#builder()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/CqlSession.html#builder-- -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/ResultSet.html -[Row]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/Row.html -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/CqlIdentifier.html -[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/data/AccessibleByName.html -[GenericType]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/type/reflect/GenericType.html -[CqlDuration]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/data/CqlDuration.html -[TupleValue]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/data/TupleValue.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/data/UdtValue.html -[SessionBuilder.addContactPoint()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoint-java.net.InetSocketAddress- -[SessionBuilder.addContactPoints()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoints-java.util.Collection- -[SessionBuilder.withLocalDatacenter()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withLocalDatacenter-java.lang.String- +[CqlSession]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/CqlSession.html +[CqlSession#builder()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/CqlSession.html#builder-- +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/ResultSet.html +[Row]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/Row.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/CqlIdentifier.html +[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/data/AccessibleByName.html +[GenericType]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/type/reflect/GenericType.html +[CqlDuration]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/data/CqlDuration.html +[TupleValue]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/data/TupleValue.html +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/data/UdtValue.html +[SessionBuilder.addContactPoint()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoint-java.net.InetSocketAddress- +[SessionBuilder.addContactPoints()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoints-java.util.Collection- +[SessionBuilder.withLocalDatacenter()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withLocalDatacenter-java.lang.String- [CASSANDRA-10145]: https://issues.apache.org/jira/browse/CASSANDRA-10145 \ No newline at end of file diff --git a/manual/core/address_resolution/README.md b/manual/core/address_resolution/README.md index fbe154dd12a..91fda354405 100644 --- a/manual/core/address_resolution/README.md +++ b/manual/core/address_resolution/README.md @@ -124,7 +124,7 @@ Cassandra node: domain name of the target instance. Then it performs a forward DNS lookup of the domain name; the EC2 DNS does the private/public switch automatically based on location). -[AddressTranslator]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/addresstranslation/AddressTranslator.html +[AddressTranslator]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/addresstranslation/AddressTranslator.html [cassandra.yaml]: https://docs.datastax.com/en/cassandra/3.x/cassandra/configuration/configCassandra_yaml.html [rpc_address]: https://docs.datastax.com/en/cassandra/3.x/cassandra/configuration/configCassandra_yaml.html?scroll=configCassandra_yaml__rpc_address diff --git a/manual/core/async/README.md b/manual/core/async/README.md index 99657e45077..c5ea6e1b159 100644 --- a/manual/core/async/README.md +++ b/manual/core/async/README.md @@ -203,4 +203,4 @@ documentation for more details and an example. [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html \ No newline at end of file +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html \ No newline at end of file diff --git a/manual/core/authentication/README.md b/manual/core/authentication/README.md index d902071474c..998e5cf7148 100644 --- a/manual/core/authentication/README.md +++ b/manual/core/authentication/README.md @@ -215,12 +215,12 @@ session.execute(statement); [SASL]: https://en.wikipedia.org/wiki/Simple_Authentication_and_Security_Layer -[AuthProvider]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/auth/AuthProvider.html -[DriverContext]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/context/DriverContext.html -[PlainTextAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderBase.html -[DseGssApiAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderBase.html -[ProgrammaticDseGssApiAuthProvider]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/auth/ProgrammaticDseGssApiAuthProvider.html -[ProxyAuthentication.executeAs]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/auth/ProxyAuthentication.html#executeAs-java.lang.String-StatementT- -[SessionBuilder.withAuthCredentials]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthCredentials-java.lang.String-java.lang.String- -[SessionBuilder.withAuthProvider]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthProvider-com.datastax.oss.driver.api.core.auth.AuthProvider- +[AuthProvider]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/auth/AuthProvider.html +[DriverContext]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/context/DriverContext.html +[PlainTextAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderBase.html +[DseGssApiAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderBase.html +[ProgrammaticDseGssApiAuthProvider]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/auth/ProgrammaticDseGssApiAuthProvider.html +[ProxyAuthentication.executeAs]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/auth/ProxyAuthentication.html#executeAs-java.lang.String-StatementT- +[SessionBuilder.withAuthCredentials]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthCredentials-java.lang.String-java.lang.String- +[SessionBuilder.withAuthProvider]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthProvider-com.datastax.oss.driver.api.core.auth.AuthProvider- [reference.conf]: ../configuration/reference/ \ No newline at end of file diff --git a/manual/core/bom/README.md b/manual/core/bom/README.md index 70d83292a08..e80270c2730 100644 --- a/manual/core/bom/README.md +++ b/manual/core/bom/README.md @@ -13,7 +13,7 @@ To import the driver's BOM, add the following section in your application's own com.datastax.oss java-driver-bom - 4.6.1 + 4.7.0 pom import @@ -65,7 +65,7 @@ good idea to extract a property to keep it in sync with the BOM: ```xml - 4.6.1 + 4.7.0 diff --git a/manual/core/configuration/README.md b/manual/core/configuration/README.md index 150f9fd82af..1e3ebdc9ee9 100644 --- a/manual/core/configuration/README.md +++ b/manual/core/configuration/README.md @@ -501,16 +501,16 @@ config.getDefaultProfile().getString(MyCustomOption.ADMIN_EMAIL); config.getDefaultProfile().getInt(MyCustomOption.AWESOMENESS_FACTOR); ``` -[DriverConfig]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/config/DriverConfig.html -[DriverExecutionProfile]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/config/DriverExecutionProfile.html -[DriverContext]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/context/DriverContext.html -[DriverOption]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/config/DriverOption.html -[DefaultDriverOption]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/config/DefaultDriverOption.html -[DriverConfigLoader]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html -[DriverConfigLoader.fromClasspath]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromClasspath-java.lang.String- -[DriverConfigLoader.fromFile]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromFile-java.io.File- -[DriverConfigLoader.fromUrl]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromUrl-java.net.URL- -[DriverConfigLoader.programmaticBuilder]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#programmaticBuilder-- +[DriverConfig]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/config/DriverConfig.html +[DriverExecutionProfile]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/config/DriverExecutionProfile.html +[DriverContext]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/context/DriverContext.html +[DriverOption]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/config/DriverOption.html +[DefaultDriverOption]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/config/DefaultDriverOption.html +[DriverConfigLoader]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html +[DriverConfigLoader.fromClasspath]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromClasspath-java.lang.String- +[DriverConfigLoader.fromFile]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromFile-java.io.File- +[DriverConfigLoader.fromUrl]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromUrl-java.net.URL- +[DriverConfigLoader.programmaticBuilder]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#programmaticBuilder-- [Typesafe Config]: https://github.com/typesafehub/config [config standard behavior]: https://github.com/typesafehub/config#standard-behavior diff --git a/manual/core/control_connection/README.md b/manual/core/control_connection/README.md index 1b69b82296e..9d2b5c46a78 100644 --- a/manual/core/control_connection/README.md +++ b/manual/core/control_connection/README.md @@ -23,4 +23,4 @@ There are a few options to fine tune the control connection behavior in the `advanced.control-connection` and `advanced.metadata` sections; see the [metadata](../metadata/) pages and the [reference configuration](../configuration/reference/) for all the details. -[Node.getOpenConnections]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- \ No newline at end of file +[Node.getOpenConnections]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- \ No newline at end of file diff --git a/manual/core/custom_codecs/README.md b/manual/core/custom_codecs/README.md index 0ce833e141a..171d1d59a18 100644 --- a/manual/core/custom_codecs/README.md +++ b/manual/core/custom_codecs/README.md @@ -360,8 +360,8 @@ private static String formatRow(Row row) { } ``` -[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html -[GenericType]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/type/reflect/GenericType.html -[TypeCodec]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html -[MappingCodec]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/type/codec/MappingCodec.html -[SessionBuilder.addTypeCodecs]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addTypeCodecs-com.datastax.oss.driver.api.core.type.codec.TypeCodec...- +[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html +[GenericType]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/type/reflect/GenericType.html +[TypeCodec]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html +[MappingCodec]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/type/codec/MappingCodec.html +[SessionBuilder.addTypeCodecs]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addTypeCodecs-com.datastax.oss.driver.api.core.type.codec.TypeCodec...- diff --git a/manual/core/detachable_types/README.md b/manual/core/detachable_types/README.md index 5264333cc85..41c51a7a2f4 100644 --- a/manual/core/detachable_types/README.md +++ b/manual/core/detachable_types/README.md @@ -137,13 +137,13 @@ Even then, the defaults used by detached objects might be good enough for you: Otherwise, just make sure you reattach objects any time you deserialize them or create them from scratch. -[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html -[CodecRegistry#DEFAULT]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html#DEFAULT -[DataType]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/type/DataType.html -[Detachable]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/detach/Detachable.html -[Session]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/Session.html -[ColumnDefinition]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/ColumnDefinition.html -[Row]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/Row.html +[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html +[CodecRegistry#DEFAULT]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html#DEFAULT +[DataType]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/type/DataType.html +[Detachable]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/detach/Detachable.html +[Session]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/Session.html +[ColumnDefinition]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/ColumnDefinition.html +[Row]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/Row.html [Java serialization]: https://docs.oracle.com/javase/tutorial/jndi/objects/serial.html [protocol specifications]: https://github.com/datastax/native-protocol/tree/1.x/src/main/resources diff --git a/manual/core/dse/geotypes/README.md b/manual/core/dse/geotypes/README.md index 8bbff60db25..29f8dc1a05d 100644 --- a/manual/core/dse/geotypes/README.md +++ b/manual/core/dse/geotypes/README.md @@ -166,9 +166,9 @@ All geospatial types interoperate with three standard formats: [ESRI]: https://github.com/Esri/geometry-api-java -[LineString]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/data/geometry/LineString.html -[Point]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/data/geometry/Point.html -[Polygon]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/data/geometry/Polygon.html +[LineString]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/data/geometry/LineString.html +[Point]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/data/geometry/Point.html +[Polygon]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/data/geometry/Polygon.html [Well-known text]: https://en.wikipedia.org/wiki/Well-known_text [Well-known binary]: https://en.wikipedia.org/wiki/Well-known_text#Well-known_binary diff --git a/manual/core/dse/graph/README.md b/manual/core/dse/graph/README.md index 3660ef4d1be..25605927e22 100644 --- a/manual/core/dse/graph/README.md +++ b/manual/core/dse/graph/README.md @@ -74,8 +74,8 @@ fluent API returns Apache TinkerPop™ types directly. [Apache TinkerPop™]: http://tinkerpop.apache.org/ -[CqlSession]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/CqlSession.html -[GraphSession]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/graph/GraphSession.html +[CqlSession]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/CqlSession.html +[GraphSession]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/graph/GraphSession.html [DSE developer guide]: https://docs.datastax.com/en/dse/6.0/dse-dev/datastax_enterprise/graph/graphTOC.html [Gremlin]: https://docs.datastax.com/en/dse/6.0/dse-dev/datastax_enterprise/graph/dseGraphAbout.html#dseGraphAbout__what-is-cql diff --git a/manual/core/dse/graph/fluent/README.md b/manual/core/dse/graph/fluent/README.md index a1df9472934..015c80f20f7 100644 --- a/manual/core/dse/graph/fluent/README.md +++ b/manual/core/dse/graph/fluent/README.md @@ -109,8 +109,8 @@ All the DSE predicates are available on the driver side: .values("name"); ``` -[Search]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/graph/predicates/Search.html -[Geo]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/graph/predicates/Geo.html +[Search]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/graph/predicates/Search.html +[Geo]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/graph/predicates/Geo.html [Apache TinkerPop™]: http://tinkerpop.apache.org/ [TinkerPop DSL]: http://tinkerpop.apache.org/docs/current/reference/#dsl diff --git a/manual/core/dse/graph/fluent/explicit/README.md b/manual/core/dse/graph/fluent/explicit/README.md index dd070a22a1f..1a34cabdbac 100644 --- a/manual/core/dse/graph/fluent/explicit/README.md +++ b/manual/core/dse/graph/fluent/explicit/README.md @@ -105,9 +105,9 @@ added in a future version. See also the [parent page](../) for topics common to all fluent traversals. -[FluentGraphStatement]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html -[FluentGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html#newInstance-org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal- -[FluentGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html#builder-org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal- -[BatchGraphStatement]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html -[BatchGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html#newInstance-- -[BatchGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html#builder-- +[FluentGraphStatement]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html +[FluentGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html#newInstance-org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal- +[FluentGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html#builder-org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal- +[BatchGraphStatement]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html +[BatchGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html#newInstance-- +[BatchGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html#builder-- diff --git a/manual/core/dse/graph/results/README.md b/manual/core/dse/graph/results/README.md index 26477c52645..65ba69c2208 100644 --- a/manual/core/dse/graph/results/README.md +++ b/manual/core/dse/graph/results/README.md @@ -137,8 +137,8 @@ If a type doesn't have a corresponding `asXxx()` method, use the variant that ta UUID uuid = graphNode.as(UUID.class); ``` -[GraphNode]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/graph/GraphNode.html -[GraphResultSet]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/graph/GraphResultSet.html -[AsyncGraphResultSet]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/graph/AsyncGraphResultSet.html +[GraphNode]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/graph/GraphNode.html +[GraphResultSet]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/graph/GraphResultSet.html +[AsyncGraphResultSet]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/graph/AsyncGraphResultSet.html [DSE data types]: https://docs.datastax.com/en/dse/6.0/dse-dev/datastax_enterprise/graph/reference/refDSEGraphDataTypes.html \ No newline at end of file diff --git a/manual/core/dse/graph/script/README.md b/manual/core/dse/graph/script/README.md index 9078a0b5a51..d15ba657f1f 100644 --- a/manual/core/dse/graph/script/README.md +++ b/manual/core/dse/graph/script/README.md @@ -101,6 +101,6 @@ Note however that some types of queries can only be performed through the script * configuration; * DSE graph schema queries. -[ScriptGraphStatement]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html -[ScriptGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html#newInstance-java.lang.String- -[ScriptGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html#builder-java.lang.String- \ No newline at end of file +[ScriptGraphStatement]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html +[ScriptGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html#newInstance-java.lang.String- +[ScriptGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html#builder-java.lang.String- \ No newline at end of file diff --git a/manual/core/idempotence/README.md b/manual/core/idempotence/README.md index 9bd63f43e59..7d52c1f9ec1 100644 --- a/manual/core/idempotence/README.md +++ b/manual/core/idempotence/README.md @@ -60,5 +60,5 @@ assert bs.isIdempotent(); The query builder tries to infer idempotence automatically; refer to [its manual](../../query_builder/idempotence/) for more details. -[Statement.setIdempotent]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/Statement.html#setIdempotent-java.lang.Boolean- -[StatementBuilder.setIdempotence]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/StatementBuilder.html#setIdempotence-java.lang.Boolean- +[Statement.setIdempotent]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/Statement.html#setIdempotent-java.lang.Boolean- +[StatementBuilder.setIdempotence]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/StatementBuilder.html#setIdempotence-java.lang.Boolean- diff --git a/manual/core/integration/README.md b/manual/core/integration/README.md index 37119799afb..d1b2f42a77d 100644 --- a/manual/core/integration/README.md +++ b/manual/core/integration/README.md @@ -609,6 +609,6 @@ The remaining core driver dependencies are the only ones that are truly mandator [guava]: https://github.com/google/guava/issues/2721 [annotation processing]: https://docs.oracle.com/javase/8/docs/technotes/tools/windows/javac.html#sthref65 -[Session.getMetrics]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/Session.html#getMetrics-- -[SessionBuilder.addContactPoint]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoint-java.net.InetSocketAddress- -[Uuids]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/uuid/Uuids.html +[Session.getMetrics]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/Session.html#getMetrics-- +[SessionBuilder.addContactPoint]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoint-java.net.InetSocketAddress- +[Uuids]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/uuid/Uuids.html diff --git a/manual/core/load_balancing/README.md b/manual/core/load_balancing/README.md index 5ef033a45d0..948264d06c1 100644 --- a/manual/core/load_balancing/README.md +++ b/manual/core/load_balancing/README.md @@ -323,10 +323,10 @@ Then it uses the "closest" distance for any given node. For example: * policy1 changes its suggestion to IGNORED. node1 is set to REMOTE; * policy1 changes its suggestion to REMOTE. node1 stays at REMOTE. -[DriverContext]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/context/DriverContext.html -[LoadBalancingPolicy]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/loadbalancing/LoadBalancingPolicy.html -[getRoutingKeyspace()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKeyspace-- -[getRoutingToken()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/Request.html#getRoutingToken-- -[getRoutingKey()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- +[DriverContext]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/context/DriverContext.html +[LoadBalancingPolicy]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/loadbalancing/LoadBalancingPolicy.html +[getRoutingKeyspace()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKeyspace-- +[getRoutingToken()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/Request.html#getRoutingToken-- +[getRoutingKey()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- [`nodetool status`]: https://docs.datastax.com/en/dse/6.7/dse-dev/datastax_enterprise/tools/nodetool/toolsStatus.html [cqlsh]: https://docs.datastax.com/en/dse/6.7/cql/cql/cql_using/startCqlshStandalone.html diff --git a/manual/core/metadata/README.md b/manual/core/metadata/README.md index 5227721dfe2..4c1a6acbf1a 100644 --- a/manual/core/metadata/README.md +++ b/manual/core/metadata/README.md @@ -56,6 +56,6 @@ new keyspace in the schema metadata before the token metadata was updated. Schema and node state events are debounced. This allows you to control how often the metadata gets refreshed. See the [Performance](../performance/#debouncing) page for more details. -[Session#getMetadata]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/Session.html#getMetadata-- -[Metadata]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/metadata/Metadata.html -[Node]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/metadata/Node.html \ No newline at end of file +[Session#getMetadata]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/Session.html#getMetadata-- +[Metadata]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/metadata/Metadata.html +[Node]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/metadata/Node.html \ No newline at end of file diff --git a/manual/core/metadata/node/README.md b/manual/core/metadata/node/README.md index 2fd76bcf750..f70a2bf393b 100644 --- a/manual/core/metadata/node/README.md +++ b/manual/core/metadata/node/README.md @@ -112,17 +112,17 @@ beyond the scope of this document; if you're interested, study the `TopologyMoni the source code. -[Metadata#getNodes]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/metadata/Metadata.html#getNodes-- -[Node]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/metadata/Node.html -[Node#getState()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/metadata/Node.html#getState-- -[Node#getDatacenter()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/metadata/Node.html#getDatacenter-- -[Node#getRack()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/metadata/Node.html#getRack-- -[Node#getDistance()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/metadata/Node.html#getDistance-- -[Node#getExtras()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/metadata/Node.html#getExtras-- -[Node#getOpenConnections()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- -[Node#isReconnecting()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/metadata/Node.html#isReconnecting-- -[NodeState]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/metadata/NodeState.html -[NodeStateListener]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/metadata/NodeStateListener.html -[NodeStateListenerBase]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/metadata/NodeStateListenerBase.html -[SessionBuilder.withNodeStateListener]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withNodeStateListener-com.datastax.oss.driver.api.core.metadata.NodeStateListener- -[DseNodeProperties]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/metadata/DseNodeProperties.html \ No newline at end of file +[Metadata#getNodes]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/metadata/Metadata.html#getNodes-- +[Node]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/metadata/Node.html +[Node#getState()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/metadata/Node.html#getState-- +[Node#getDatacenter()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/metadata/Node.html#getDatacenter-- +[Node#getRack()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/metadata/Node.html#getRack-- +[Node#getDistance()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/metadata/Node.html#getDistance-- +[Node#getExtras()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/metadata/Node.html#getExtras-- +[Node#getOpenConnections()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- +[Node#isReconnecting()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/metadata/Node.html#isReconnecting-- +[NodeState]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/metadata/NodeState.html +[NodeStateListener]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/metadata/NodeStateListener.html +[NodeStateListenerBase]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/metadata/NodeStateListenerBase.html +[SessionBuilder.withNodeStateListener]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withNodeStateListener-com.datastax.oss.driver.api.core.metadata.NodeStateListener- +[DseNodeProperties]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/metadata/DseNodeProperties.html \ No newline at end of file diff --git a/manual/core/metadata/schema/README.md b/manual/core/metadata/schema/README.md index cee9acb9906..1374e290334 100644 --- a/manual/core/metadata/schema/README.md +++ b/manual/core/metadata/schema/README.md @@ -260,15 +260,15 @@ unavailable for the excluded keyspaces. If you issue schema-altering requests from the driver (e.g. `session.execute("CREATE TABLE ..")`), take a look at the [Performance](../../performance/#schema-updates) page for a few tips. -[Metadata#getKeyspaces]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/metadata/Metadata.html#getKeyspaces-- -[SchemaChangeListener]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListener.html -[SchemaChangeListenerBase]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListenerBase.html -[Session#setSchemaMetadataEnabled]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/Session.html#setSchemaMetadataEnabled-java.lang.Boolean- -[Session#checkSchemaAgreementAsync]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/Session.html#checkSchemaAgreementAsync-- -[SessionBuilder#withSchemaChangeListener]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSchemaChangeListener-com.datastax.oss.driver.api.core.metadata.schema.SchemaChangeListener- -[ExecutionInfo#isSchemaInAgreement]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#isSchemaInAgreement-- -[com.datastax.dse.driver.api.core.metadata.schema]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/metadata/schema/package-frame.html -[DseFunctionMetadata]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/metadata/schema/DseFunctionMetadata.html -[DseAggregateMetadata]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/metadata/schema/DseAggregateMetadata.html +[Metadata#getKeyspaces]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/metadata/Metadata.html#getKeyspaces-- +[SchemaChangeListener]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListener.html +[SchemaChangeListenerBase]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListenerBase.html +[Session#setSchemaMetadataEnabled]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/Session.html#setSchemaMetadataEnabled-java.lang.Boolean- +[Session#checkSchemaAgreementAsync]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/Session.html#checkSchemaAgreementAsync-- +[SessionBuilder#withSchemaChangeListener]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSchemaChangeListener-com.datastax.oss.driver.api.core.metadata.schema.SchemaChangeListener- +[ExecutionInfo#isSchemaInAgreement]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#isSchemaInAgreement-- +[com.datastax.dse.driver.api.core.metadata.schema]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/metadata/schema/package-frame.html +[DseFunctionMetadata]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/metadata/schema/DseFunctionMetadata.html +[DseAggregateMetadata]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/metadata/schema/DseAggregateMetadata.html [JAVA-750]: https://datastax-oss.atlassian.net/browse/JAVA-750 \ No newline at end of file diff --git a/manual/core/metadata/token/README.md b/manual/core/metadata/token/README.md index 434e4c31f0d..4b45370232f 100644 --- a/manual/core/metadata/token/README.md +++ b/manual/core/metadata/token/README.md @@ -169,5 +169,5 @@ on [schema metadata](../schema/). If schema metadata is disabled or filtered, to also be unavailable for the excluded keyspaces. -[Metadata#getTokenMap]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/metadata/Metadata.html#getTokenMap-- -[TokenMap]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/metadata/TokenMap.html \ No newline at end of file +[Metadata#getTokenMap]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/metadata/Metadata.html#getTokenMap-- +[TokenMap]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/metadata/TokenMap.html \ No newline at end of file diff --git a/manual/core/native_protocol/README.md b/manual/core/native_protocol/README.md index 8a277a92dea..d3c17bddcbb 100644 --- a/manual/core/native_protocol/README.md +++ b/manual/core/native_protocol/README.md @@ -120,6 +120,6 @@ If you want to see the details of mixed cluster negotiation, enable `DEBUG` leve [protocol spec]: https://github.com/datastax/native-protocol/tree/1.x/src/main/resources [driver3]: https://docs.datastax.com/en/developer/java-driver/3.5/manual/native_protocol/ -[ExecutionInfo.getWarnings]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getWarnings-- -[Request.getCustomPayload]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/Request.html#getCustomPayload-- -[AttachmentPoint.getProtocolVersion]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/detach/AttachmentPoint.html#getProtocolVersion-- \ No newline at end of file +[ExecutionInfo.getWarnings]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getWarnings-- +[Request.getCustomPayload]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/Request.html#getCustomPayload-- +[AttachmentPoint.getProtocolVersion]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/detach/AttachmentPoint.html#getProtocolVersion-- \ No newline at end of file diff --git a/manual/core/paging/README.md b/manual/core/paging/README.md index c273db60753..d97e58200d4 100644 --- a/manual/core/paging/README.md +++ b/manual/core/paging/README.md @@ -205,10 +205,10 @@ protocol page size and the logical page size to the same value. The [driver examples] include two complete web service implementations demonstrating forward-only and offset paging. -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/ResultSet.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html -[AsyncPagingIterable.hasMorePages]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/AsyncPagingIterable.html#hasMorePages-- -[AsyncPagingIterable.fetchNextPage]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/AsyncPagingIterable.html#fetchNextPage-- -[OffsetPager]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/paging/OffsetPager.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/ResultSet.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[AsyncPagingIterable.hasMorePages]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/AsyncPagingIterable.html#hasMorePages-- +[AsyncPagingIterable.fetchNextPage]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/AsyncPagingIterable.html#fetchNextPage-- +[OffsetPager]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/paging/OffsetPager.html [driver examples]: https://github.com/datastax/java-driver/tree/4.x/examples/src/main/java/com/datastax/oss/driver/examples/paging diff --git a/manual/core/performance/README.md b/manual/core/performance/README.md index ce8144dffca..5efa6162a16 100644 --- a/manual/core/performance/README.md +++ b/manual/core/performance/README.md @@ -345,8 +345,8 @@ possible to reuse the same event loop group for I/O, admin tasks, and even your (the driver's internal code is fully asynchronous so it will never block any thread). The timer is the only one that will have to stay on a separate thread. -[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/data/AccessibleByName.html -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/CqlIdentifier.html -[CqlSession.prepare(SimpleStatement)]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/CqlSession.html#prepare-com.datastax.oss.driver.api.core.cql.SimpleStatement- -[GenericType]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/type/reflect/GenericType.html -[Statement.setNode()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/Statement.html#setNode-com.datastax.oss.driver.api.core.metadata.Node- \ No newline at end of file +[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/data/AccessibleByName.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/CqlIdentifier.html +[CqlSession.prepare(SimpleStatement)]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/CqlSession.html#prepare-com.datastax.oss.driver.api.core.cql.SimpleStatement- +[GenericType]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/type/reflect/GenericType.html +[Statement.setNode()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/Statement.html#setNode-com.datastax.oss.driver.api.core.metadata.Node- \ No newline at end of file diff --git a/manual/core/pooling/README.md b/manual/core/pooling/README.md index f0a0cdb5ca0..da63efe377c 100644 --- a/manual/core/pooling/README.md +++ b/manual/core/pooling/README.md @@ -170,5 +170,5 @@ you experience the issue, here's what to look out for: Try adding more connections per node. Thanks to the driver's hot-reload mechanism, you can do that at runtime and see the effects immediately. -[CqlSession]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/CqlSession.html +[CqlSession]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/CqlSession.html [CASSANDRA-8086]: https://issues.apache.org/jira/browse/CASSANDRA-8086 \ No newline at end of file diff --git a/manual/core/query_timestamps/README.md b/manual/core/query_timestamps/README.md index 06595d3d9aa..3d873c7d479 100644 --- a/manual/core/query_timestamps/README.md +++ b/manual/core/query_timestamps/README.md @@ -187,9 +187,9 @@ Here is the order of precedence of all the methods described so far: 3. otherwise, if the timestamp generator assigned a timestamp, use it; 4. otherwise, let the server assign the timestamp. -[TimestampGenerator]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/time/TimestampGenerator.html +[TimestampGenerator]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/time/TimestampGenerator.html [gettimeofday]: http://man7.org/linux/man-pages/man2/settimeofday.2.html [JNR]: https://github.com/jnr/jnr-posix [Lightweight transactions]: https://docs.datastax.com/en/dse/6.0/cql/cql/cql_using/useInsertLWT.html -[Statement.setQueryTimestamp()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/Statement.html#setQueryTimestamp-long- +[Statement.setQueryTimestamp()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/Statement.html#setQueryTimestamp-long- diff --git a/manual/core/reactive/README.md b/manual/core/reactive/README.md index d7bcf18cf8d..8be59a4096b 100644 --- a/manual/core/reactive/README.md +++ b/manual/core/reactive/README.md @@ -365,18 +365,18 @@ Note that the driver already has a [built-in retry mechanism] that can transpare queries; the above example should be seen as a demonstration of application-level retries, when a more fine-grained control of what should be retried, and how, is required. -[CqlSession]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/CqlSession.html -[ReactiveSession]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/ResultSet.html -[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html -[ReactiveRow]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html -[Row]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/Row.html -[getColumnDefinitions]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#getColumnDefinitions-- -[getExecutionInfos]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#getExecutionInfos-- -[wasApplied]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#wasApplied-- -[ReactiveRow.getColumnDefinitions]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#getColumnDefinitions-- -[ReactiveRow.getExecutionInfo]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#getExecutionInfo-- -[ReactiveRow.wasApplied]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#wasApplied-- +[CqlSession]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/CqlSession.html +[ReactiveSession]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/ResultSet.html +[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html +[ReactiveRow]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html +[Row]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/Row.html +[getColumnDefinitions]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#getColumnDefinitions-- +[getExecutionInfos]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#getExecutionInfos-- +[wasApplied]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#wasApplied-- +[ReactiveRow.getColumnDefinitions]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#getColumnDefinitions-- +[ReactiveRow.getExecutionInfo]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#getExecutionInfo-- +[ReactiveRow.wasApplied]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#wasApplied-- [built-in retry mechanism]: ../retries/ [request throttling]: ../throttling/ diff --git a/manual/core/reconnection/README.md b/manual/core/reconnection/README.md index 2f59c55e0dd..d79022cb264 100644 --- a/manual/core/reconnection/README.md +++ b/manual/core/reconnection/README.md @@ -66,7 +66,7 @@ is the exponential one with the default values, and the control connection is in * [t = 3] node2's pool tries to open the last missing connection, which succeeds. The pool is back to its expected size, node2's reconnection stops. -[ConstantReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/internal/core/connection/ConstantReconnectionPolicy.html -[DriverContext]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/context/DriverContext.html -[ExponentialReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/internal/core/connection/ExponentialReconnectionPolicy.html -[ReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/connection/ReconnectionPolicy.html \ No newline at end of file +[ConstantReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/internal/core/connection/ConstantReconnectionPolicy.html +[DriverContext]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/context/DriverContext.html +[ExponentialReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/internal/core/connection/ExponentialReconnectionPolicy.html +[ReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/connection/ReconnectionPolicy.html \ No newline at end of file diff --git a/manual/core/request_tracker/README.md b/manual/core/request_tracker/README.md index 2e417d72826..1ff00ccc7d6 100644 --- a/manual/core/request_tracker/README.md +++ b/manual/core/request_tracker/README.md @@ -117,5 +117,5 @@ all FROM users WHERE user_id=? [v0=42] com.datastax.oss.driver.api.core.servererrors.InvalidQueryException: Undefined column name all ``` -[RequestTracker]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/tracker/RequestTracker.html -[SessionBuilder.withRequestTracker]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withRequestTracker-com.datastax.oss.driver.api.core.tracker.RequestTracker- \ No newline at end of file +[RequestTracker]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/tracker/RequestTracker.html +[SessionBuilder.withRequestTracker]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withRequestTracker-com.datastax.oss.driver.api.core.tracker.RequestTracker- \ No newline at end of file diff --git a/manual/core/retries/README.md b/manual/core/retries/README.md index 9b1f6a77173..bd727d452ae 100644 --- a/manual/core/retries/README.md +++ b/manual/core/retries/README.md @@ -174,20 +174,20 @@ configuration). Each request uses its declared profile's policy. If it doesn't declare any profile, or if the profile doesn't have a dedicated policy, then the default profile's policy is used. -[AllNodesFailedException]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/AllNodesFailedException.html -[ClosedConnectionException]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/connection/ClosedConnectionException.html -[DriverTimeoutException]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/DriverTimeoutException.html -[FunctionFailureException]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/servererrors/FunctionFailureException.html -[HeartbeatException]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/connection/HeartbeatException.html -[ProtocolError]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/servererrors/ProtocolError.html -[OverloadedException]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/servererrors/OverloadedException.html -[QueryValidationException]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/servererrors/QueryValidationException.html -[ReadFailureException]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/servererrors/ReadFailureException.html -[ReadTimeoutException]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/servererrors/ReadTimeoutException.html -[RetryDecision]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/retry/RetryDecision.html -[RetryPolicy]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/retry/RetryPolicy.html -[ServerError]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/servererrors/ServerError.html -[TruncateException]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/servererrors/TruncateException.html -[UnavailableException]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/servererrors/UnavailableException.html -[WriteFailureException]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/servererrors/WriteFailureException.html -[WriteTimeoutException]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/servererrors/WriteTimeoutException.html +[AllNodesFailedException]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/AllNodesFailedException.html +[ClosedConnectionException]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/connection/ClosedConnectionException.html +[DriverTimeoutException]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/DriverTimeoutException.html +[FunctionFailureException]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/servererrors/FunctionFailureException.html +[HeartbeatException]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/connection/HeartbeatException.html +[ProtocolError]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/servererrors/ProtocolError.html +[OverloadedException]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/servererrors/OverloadedException.html +[QueryValidationException]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/servererrors/QueryValidationException.html +[ReadFailureException]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/servererrors/ReadFailureException.html +[ReadTimeoutException]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/servererrors/ReadTimeoutException.html +[RetryDecision]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/retry/RetryDecision.html +[RetryPolicy]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/retry/RetryPolicy.html +[ServerError]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/servererrors/ServerError.html +[TruncateException]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/servererrors/TruncateException.html +[UnavailableException]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/servererrors/UnavailableException.html +[WriteFailureException]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/servererrors/WriteFailureException.html +[WriteTimeoutException]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/servererrors/WriteTimeoutException.html diff --git a/manual/core/speculative_execution/README.md b/manual/core/speculative_execution/README.md index 9d8af0890df..22296424c25 100644 --- a/manual/core/speculative_execution/README.md +++ b/manual/core/speculative_execution/README.md @@ -250,4 +250,4 @@ profiles have the same configuration). Each request uses its declared profile's policy. If it doesn't declare any profile, or if the profile doesn't have a dedicated policy, then the default profile's policy is used. -[SpeculativeExecutionPolicy]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/specex/SpeculativeExecutionPolicy.html \ No newline at end of file +[SpeculativeExecutionPolicy]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/specex/SpeculativeExecutionPolicy.html \ No newline at end of file diff --git a/manual/core/ssl/README.md b/manual/core/ssl/README.md index 7f79c43b233..a5c26249af6 100644 --- a/manual/core/ssl/README.md +++ b/manual/core/ssl/README.md @@ -188,5 +188,5 @@ the box, but with a bit of custom development it is fairly easy to add. See [dsClientToNode]: https://docs.datastax.com/en/cassandra/3.0/cassandra/configuration/secureSSLClientToNode.html [pickle]: http://thelastpickle.com/blog/2015/09/30/hardening-cassandra-step-by-step-part-1-server-to-server.html [JSSE system properties]: http://docs.oracle.com/javase/6/docs/technotes/guides/security/jsse/JSSERefGuide.html#Customization -[SessionBuilder.withSslEngineFactory]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSslEngineFactory-com.datastax.oss.driver.api.core.ssl.SslEngineFactory- -[SessionBuilder.withSslContext]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSslContext-javax.net.ssl.SSLContext- +[SessionBuilder.withSslEngineFactory]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSslEngineFactory-com.datastax.oss.driver.api.core.ssl.SslEngineFactory- +[SessionBuilder.withSslContext]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSslContext-javax.net.ssl.SSLContext- diff --git a/manual/core/statements/README.md b/manual/core/statements/README.md index 22e3ccb3046..3c75e8cb3b6 100644 --- a/manual/core/statements/README.md +++ b/manual/core/statements/README.md @@ -59,7 +59,7 @@ the [configuration](../configuration/). Namely, these are: idempotent flag, quer consistency levels and page size. We recommended the configuration approach whenever possible (you can create execution profiles to capture common combinations of those options). -[Statement]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/Statement.html -[StatementBuilder]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/StatementBuilder.html -[execute]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/Session.html#execute-com.datastax.oss.driver.api.core.cql.Statement- -[executeAsync]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/Session.html#executeAsync-com.datastax.oss.driver.api.core.cql.Statement- +[Statement]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/Statement.html +[StatementBuilder]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/StatementBuilder.html +[execute]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/Session.html#execute-com.datastax.oss.driver.api.core.cql.Statement- +[executeAsync]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/Session.html#executeAsync-com.datastax.oss.driver.api.core.cql.Statement- diff --git a/manual/core/statements/batch/README.md b/manual/core/statements/batch/README.md index 71df2039db9..6089389e97e 100644 --- a/manual/core/statements/batch/README.md +++ b/manual/core/statements/batch/README.md @@ -61,8 +61,8 @@ In addition, simple statements with named parameters are currently not supported due to a [protocol limitation][CASSANDRA-10246] that will be fixed in a future version). If you try to execute such a batch, an `IllegalArgumentException` is thrown. -[BatchStatement]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/BatchStatement.html -[BatchStatement.newInstance()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/BatchStatement.html#newInstance-com.datastax.oss.driver.api.core.cql.BatchType- -[BatchStatement.builder()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/BatchStatement.html#builder-com.datastax.oss.driver.api.core.cql.BatchType- +[BatchStatement]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/BatchStatement.html +[BatchStatement.newInstance()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/BatchStatement.html#newInstance-com.datastax.oss.driver.api.core.cql.BatchType- +[BatchStatement.builder()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/BatchStatement.html#builder-com.datastax.oss.driver.api.core.cql.BatchType- [batch_dse]: http://docs.datastax.com/en/dse/6.7/cql/cql/cql_using/useBatch.html [CASSANDRA-10246]: https://issues.apache.org/jira/browse/CASSANDRA-10246 diff --git a/manual/core/statements/per_query_keyspace/README.md b/manual/core/statements/per_query_keyspace/README.md index e7c7d5db7ba..b17e05a42a9 100644 --- a/manual/core/statements/per_query_keyspace/README.md +++ b/manual/core/statements/per_query_keyspace/README.md @@ -124,6 +124,6 @@ SimpleStatement statement = At some point in the future, when Cassandra 4 becomes prevalent and using a per-query keyspace is the norm, we'll probably deprecate `setRoutingKeyspace()`. -[token-aware routing]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- +[token-aware routing]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- [CASSANDRA-10145]: https://issues.apache.org/jira/browse/CASSANDRA-10145 \ No newline at end of file diff --git a/manual/core/statements/prepared/README.md b/manual/core/statements/prepared/README.md index b15a60d4198..029b775f6df 100644 --- a/manual/core/statements/prepared/README.md +++ b/manual/core/statements/prepared/README.md @@ -330,10 +330,10 @@ With Cassandra 4 and [native protocol](../../native_protocol/) v5, this issue is new version with the response; the driver updates its local cache transparently, and the client can observe the new columns in the result set. -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[Session.prepare]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/CqlSession.html#prepare-com.datastax.oss.driver.api.core.cql.SimpleStatement- +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[Session.prepare]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/CqlSession.html#prepare-com.datastax.oss.driver.api.core.cql.SimpleStatement- [CASSANDRA-10786]: https://issues.apache.org/jira/browse/CASSANDRA-10786 [CASSANDRA-10813]: https://issues.apache.org/jira/browse/CASSANDRA-10813 [guava eviction]: https://github.com/google/guava/wiki/CachesExplained#reference-based-eviction -[PreparedStatement.bind]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/PreparedStatement.html#bind-java.lang.Object...- -[PreparedStatement.boundStatementBuilder]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/PreparedStatement.html#boundStatementBuilder-java.lang.Object...- +[PreparedStatement.bind]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/PreparedStatement.html#bind-java.lang.Object...- +[PreparedStatement.boundStatementBuilder]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/PreparedStatement.html#boundStatementBuilder-java.lang.Object...- diff --git a/manual/core/statements/simple/README.md b/manual/core/statements/simple/README.md index e78bd67b345..849413bd230 100644 --- a/manual/core/statements/simple/README.md +++ b/manual/core/statements/simple/README.md @@ -182,6 +182,6 @@ session.execute( Or you could also use [prepared statements](../prepared/), which don't have this limitation since parameter types are known in advance. -[SimpleStatement]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/SimpleStatement.html -[SimpleStatement.newInstance()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/SimpleStatement.html#newInstance-java.lang.String- -[SimpleStatement.builder()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/SimpleStatement.html#builder-java.lang.String- +[SimpleStatement]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/SimpleStatement.html +[SimpleStatement.newInstance()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/SimpleStatement.html#newInstance-java.lang.String- +[SimpleStatement.builder()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/SimpleStatement.html#builder-java.lang.String- diff --git a/manual/core/temporal_types/README.md b/manual/core/temporal_types/README.md index 7492841b296..ba7f69d5c30 100644 --- a/manual/core/temporal_types/README.md +++ b/manual/core/temporal_types/README.md @@ -146,7 +146,7 @@ System.out.println(dateTime.minus(CqlDuration.from("1h15s15ns"))); // prints "2018-10-03T22:59:44.999999985-07:00[America/Los_Angeles]" ``` -[CqlDuration]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/data/CqlDuration.html -[TypeCodecs.ZONED_TIMESTAMP_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#ZONED_TIMESTAMP_SYSTEM -[TypeCodecs.ZONED_TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#ZONED_TIMESTAMP_UTC -[TypeCodecs.zonedTimestampAt()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#zonedTimestampAt-java.time.ZoneId- \ No newline at end of file +[CqlDuration]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/data/CqlDuration.html +[TypeCodecs.ZONED_TIMESTAMP_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#ZONED_TIMESTAMP_SYSTEM +[TypeCodecs.ZONED_TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#ZONED_TIMESTAMP_UTC +[TypeCodecs.zonedTimestampAt()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#zonedTimestampAt-java.time.ZoneId- \ No newline at end of file diff --git a/manual/core/throttling/README.md b/manual/core/throttling/README.md index 41a0cf5736f..9b95c6d6941 100644 --- a/manual/core/throttling/README.md +++ b/manual/core/throttling/README.md @@ -145,6 +145,6 @@ datastax-java-driver { If you enable `throttling.delay`, make sure to also check the associated extra options to correctly size the underlying histograms (`metrics.session.throttling.delay.*`). -[RequestThrottlingException]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/RequestThrottlingException.html -[AllNodesFailedException]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/AllNodesFailedException.html -[BusyConnectionException]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/connection/BusyConnectionException.html \ No newline at end of file +[RequestThrottlingException]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/RequestThrottlingException.html +[AllNodesFailedException]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/AllNodesFailedException.html +[BusyConnectionException]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/connection/BusyConnectionException.html \ No newline at end of file diff --git a/manual/core/tracing/README.md b/manual/core/tracing/README.md index b3778cf37fb..3bc2ee35099 100644 --- a/manual/core/tracing/README.md +++ b/manual/core/tracing/README.md @@ -113,9 +113,9 @@ for (TraceEvent event : trace.getEvents()) { If you call `getQueryTrace()` for a statement that didn't have tracing enabled, an exception is thrown. -[ExecutionInfo]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html -[QueryTrace]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/QueryTrace.html -[Statement.setTracing()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/Statement.html#setTracing-boolean- -[StatementBuilder.setTracing()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/StatementBuilder.html#setTracing-- -[ExecutionInfo.getTracingId()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getTracingId-- -[ExecutionInfo.getQueryTrace()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getQueryTrace-- +[ExecutionInfo]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html +[QueryTrace]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/QueryTrace.html +[Statement.setTracing()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/Statement.html#setTracing-boolean- +[StatementBuilder.setTracing()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/StatementBuilder.html#setTracing-- +[ExecutionInfo.getTracingId()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getTracingId-- +[ExecutionInfo.getQueryTrace()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getQueryTrace-- diff --git a/manual/core/tuples/README.md b/manual/core/tuples/README.md index b9cdd71bb5e..280f4c1366f 100644 --- a/manual/core/tuples/README.md +++ b/manual/core/tuples/README.md @@ -139,5 +139,5 @@ BoundStatement bs = [cql_doc]: https://docs.datastax.com/en/cql/3.3/cql/cql_reference/tupleType.html -[TupleType]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/type/TupleType.html -[TupleValue]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/data/TupleValue.html +[TupleType]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/type/TupleType.html +[TupleValue]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/data/TupleValue.html diff --git a/manual/core/udts/README.md b/manual/core/udts/README.md index 6b463054d65..ff0fbba155f 100644 --- a/manual/core/udts/README.md +++ b/manual/core/udts/README.md @@ -135,5 +135,5 @@ session.execute(bs); [cql_doc]: https://docs.datastax.com/en/cql/3.3/cql/cql_reference/cqlRefUDType.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/data/UdtValue.html -[UserDefinedType]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/type/UserDefinedType.html \ No newline at end of file +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/data/UdtValue.html +[UserDefinedType]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/type/UserDefinedType.html \ No newline at end of file diff --git a/manual/mapper/daos/README.md b/manual/mapper/daos/README.md index fb3af32d34f..f5297bee7aa 100644 --- a/manual/mapper/daos/README.md +++ b/manual/mapper/daos/README.md @@ -147,8 +147,8 @@ In this case, any annotations declared in `Dao1` would be chosen over `Dao2`. To control how the hierarchy is scanned, annotate interfaces with [@HierarchyScanStrategy]. -[@Dao]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/Dao.html -[@DaoFactory]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.html -[@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html -[@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html +[@Dao]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/Dao.html +[@DaoFactory]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.html +[@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html +[@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html [Entity Inheritance]: ../entities/#inheritance diff --git a/manual/mapper/daos/custom_types/README.md b/manual/mapper/daos/custom_types/README.md index a8ce02c9af4..36aadaff522 100644 --- a/manual/mapper/daos/custom_types/README.md +++ b/manual/mapper/daos/custom_types/README.md @@ -236,8 +236,8 @@ flag: With this configuration, if a DAO method declares a non built-in return type, it will be surfaced as a compiler error. -[EntityHelper]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/entity/EntityHelper.html -[GenericType]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/type/reflect/GenericType.html -[MapperContext]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/MapperContext.html -[MapperResultProducer]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/result/MapperResultProducer.html -[MapperResultProducerService]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/result/MapperResultProducerService.html +[EntityHelper]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/entity/EntityHelper.html +[GenericType]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/type/reflect/GenericType.html +[MapperContext]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/MapperContext.html +[MapperResultProducer]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/result/MapperResultProducer.html +[MapperResultProducerService]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/result/MapperResultProducerService.html diff --git a/manual/mapper/daos/delete/README.md b/manual/mapper/daos/delete/README.md index b1be86f3a23..ff90918b442 100644 --- a/manual/mapper/daos/delete/README.md +++ b/manual/mapper/daos/delete/README.md @@ -151,15 +151,15 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the [naming strategy](../../entities/#naming-strategy)). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html -[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html -[@Delete]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/Delete.html -[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/ResultSet.html -[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html +[@Delete]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/Delete.html +[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/ResultSet.html +[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html diff --git a/manual/mapper/daos/getentity/README.md b/manual/mapper/daos/getentity/README.md index 5d6a06a5749..2f5dec7f085 100644 --- a/manual/mapper/daos/getentity/README.md +++ b/manual/mapper/daos/getentity/README.md @@ -69,14 +69,14 @@ If the return type doesn't match the parameter type (for example [PagingIterable [AsyncResultSet]), the mapper processor will issue a compile-time error. -[@GetEntity]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html -[GettableByName]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/data/GettableByName.html -[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/PagingIterable.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/ResultSet.html -[Row]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/Row.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/data/UdtValue.html +[@GetEntity]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[GettableByName]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/data/GettableByName.html +[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/PagingIterable.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/ResultSet.html +[Row]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/Row.html +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/data/UdtValue.html diff --git a/manual/mapper/daos/insert/README.md b/manual/mapper/daos/insert/README.md index 895952ecf7f..0929ae3a83d 100644 --- a/manual/mapper/daos/insert/README.md +++ b/manual/mapper/daos/insert/README.md @@ -108,13 +108,13 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the [naming strategy](../../entities/#naming-strategy)). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@Insert]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/Insert.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/ResultSet.html -[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- -[ResultSet#getExecutionInfo()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/ResultSet.html#getExecutionInfo-- -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@Insert]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/Insert.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/ResultSet.html +[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- +[ResultSet#getExecutionInfo()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/ResultSet.html#getExecutionInfo-- +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html diff --git a/manual/mapper/daos/null_saving/README.md b/manual/mapper/daos/null_saving/README.md index c009a3dfa97..f701eb5392d 100644 --- a/manual/mapper/daos/null_saving/README.md +++ b/manual/mapper/daos/null_saving/README.md @@ -93,10 +93,10 @@ public interface UserDao extends InventoryDao { } ``` -[@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[MapperException]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/MapperException.html -[DO_NOT_SET]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#DO_NOT_SET -[SET_TO_NULL]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#SET_TO_NULL +[@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[MapperException]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/MapperException.html +[DO_NOT_SET]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#DO_NOT_SET +[SET_TO_NULL]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#SET_TO_NULL [CASSANDRA-7304]: https://issues.apache.org/jira/browse/CASSANDRA-7304 diff --git a/manual/mapper/daos/query/README.md b/manual/mapper/daos/query/README.md index d26ead2eeb3..53d14af4b19 100644 --- a/manual/mapper/daos/query/README.md +++ b/manual/mapper/daos/query/README.md @@ -110,17 +110,17 @@ Then: query succeeds or not depends on whether the session that the mapper was built with has a [default keyspace]. -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@Query]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/Query.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/ResultSet.html -[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- -[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/PagingIterable.html -[Row]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/Row.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html -[MappedReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/mapper/reactive/MappedReactiveResultSet.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@Query]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/Query.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/ResultSet.html +[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- +[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/PagingIterable.html +[Row]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/Row.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html +[MappedReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/mapper/reactive/MappedReactiveResultSet.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html diff --git a/manual/mapper/daos/queryprovider/README.md b/manual/mapper/daos/queryprovider/README.md index 92d6484c540..0c1962bcad4 100644 --- a/manual/mapper/daos/queryprovider/README.md +++ b/manual/mapper/daos/queryprovider/README.md @@ -137,11 +137,11 @@ Here is the full implementation: the desired [PagingIterable][PagingIterable]. -[@QueryProvider]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html -[providerClass]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerClass-- -[entityHelpers]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#entityHelpers-- -[providerMethod]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerMethod-- -[MapperContext]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/MapperContext.html -[EntityHelper]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/EntityHelper.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/ResultSet.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/PagingIterable.html +[@QueryProvider]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html +[providerClass]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerClass-- +[entityHelpers]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#entityHelpers-- +[providerMethod]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerMethod-- +[MapperContext]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/MapperContext.html +[EntityHelper]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/EntityHelper.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/ResultSet.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/PagingIterable.html diff --git a/manual/mapper/daos/select/README.md b/manual/mapper/daos/select/README.md index 25bc12a68ad..54e113fd12f 100644 --- a/manual/mapper/daos/select/README.md +++ b/manual/mapper/daos/select/README.md @@ -142,19 +142,19 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the [naming strategy](../../entities/#naming-strategy)). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html -[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html -[@Select]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/Select.html -[allowFiltering()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/Select.html#allowFiltering-- -[customWhereClause()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/Select.html#customWhereClause-- -[groupBy()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/Select.html#groupBy-- -[limit()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/Select.html#limit-- -[orderBy()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/Select.html#orderBy-- -[perPartitionLimit()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/Select.html#perPartitionLimit-- -[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/PagingIterable.html -[MappedReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/mapper/reactive/MappedReactiveResultSet.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html +[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html +[@Select]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/Select.html +[allowFiltering()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/Select.html#allowFiltering-- +[customWhereClause()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/Select.html#customWhereClause-- +[groupBy()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/Select.html#groupBy-- +[limit()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/Select.html#limit-- +[orderBy()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/Select.html#orderBy-- +[perPartitionLimit()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/Select.html#perPartitionLimit-- +[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/PagingIterable.html +[MappedReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/mapper/reactive/MappedReactiveResultSet.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html diff --git a/manual/mapper/daos/setentity/README.md b/manual/mapper/daos/setentity/README.md index 2c0a0791b36..a05540140fd 100644 --- a/manual/mapper/daos/setentity/README.md +++ b/manual/mapper/daos/setentity/README.md @@ -63,8 +63,8 @@ BoundStatement bind(Product product, BoundStatement statement); If you use a void method with [BoundStatement], the mapper processor will issue a compile-time warning. -[@SetEntity]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/SetEntity.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[BoundStatementBuilder]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/BoundStatementBuilder.html -[SettableByName]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/data/SettableByName.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/data/UdtValue.html +[@SetEntity]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/SetEntity.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[BoundStatementBuilder]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/BoundStatementBuilder.html +[SettableByName]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/data/SettableByName.html +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/data/UdtValue.html diff --git a/manual/mapper/daos/statement_attributes/README.md b/manual/mapper/daos/statement_attributes/README.md index 75b0cf079c5..54ed6631272 100644 --- a/manual/mapper/daos/statement_attributes/README.md +++ b/manual/mapper/daos/statement_attributes/README.md @@ -60,4 +60,4 @@ Product product = dao.findById(1, builder -> builder.setConsistencyLevel(DefaultConsistencyLevel.QUORUM)); ``` -[@StatementAttributes]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/StatementAttributes.html \ No newline at end of file +[@StatementAttributes]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/StatementAttributes.html \ No newline at end of file diff --git a/manual/mapper/daos/update/README.md b/manual/mapper/daos/update/README.md index 9bd354eac06..8c8a3fbc57f 100644 --- a/manual/mapper/daos/update/README.md +++ b/manual/mapper/daos/update/README.md @@ -143,13 +143,13 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the naming convention). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@Update]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/Update.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@Update]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/Update.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html [Boolean]: https://docs.oracle.com/javase/8/docs/api/index.html?java/lang/Boolean.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/ResultSet.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/ResultSet.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html diff --git a/manual/mapper/entities/README.md b/manual/mapper/entities/README.md index ae93b78e171..d3c506fec90 100644 --- a/manual/mapper/entities/README.md +++ b/manual/mapper/entities/README.md @@ -468,21 +468,21 @@ the same level. To control how the class hierarchy is scanned, annotate classes with [@HierarchyScanStrategy]. -[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html -[@CqlName]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/CqlName.html -[@Dao]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/Dao.html -[@Entity]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/Entity.html -[NameConverter]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/entity/naming/NameConverter.html -[NamingConvention]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/entity/naming/NamingConvention.html -[@NamingStrategy]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/NamingStrategy.html -[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html -[@Computed]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/Computed.html -[@Select]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/Select.html -[@Insert]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/Insert.html -[@Update]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/Update.html -[@GetEntity]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html -[@Query]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/Query.html +[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html +[@CqlName]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/CqlName.html +[@Dao]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/Dao.html +[@Entity]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/Entity.html +[NameConverter]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/entity/naming/NameConverter.html +[NamingConvention]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/entity/naming/NamingConvention.html +[@NamingStrategy]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/NamingStrategy.html +[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html +[@Computed]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/Computed.html +[@Select]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/Select.html +[@Insert]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/Insert.html +[@Update]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/Update.html +[@GetEntity]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html +[@Query]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/Query.html [aliases]: http://cassandra.apache.org/doc/latest/cql/dml.html?#aliases -[@Transient]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/Transient.html -[@TransientProperties]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/TransientProperties.html -[@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html +[@Transient]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/Transient.html +[@TransientProperties]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/TransientProperties.html +[@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html diff --git a/manual/mapper/mapper/README.md b/manual/mapper/mapper/README.md index e5ca0217e51..499cd7c879e 100644 --- a/manual/mapper/mapper/README.md +++ b/manual/mapper/mapper/README.md @@ -227,8 +227,8 @@ InventoryMapper inventoryMapper = new InventoryMapperBuilder(session) .build(); ``` -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/CqlIdentifier.html -[@DaoFactory]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.html -[@DaoKeyspace]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/DaoKeyspace.html -[@DaoTable]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/DaoTable.html -[@Mapper]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/mapper/annotations/Mapper.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/CqlIdentifier.html +[@DaoFactory]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.html +[@DaoKeyspace]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/DaoKeyspace.html +[@DaoTable]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/DaoTable.html +[@Mapper]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/Mapper.html diff --git a/manual/osgi/README.md b/manual/osgi/README.md index 98ac0e862e2..73f87a35b90 100644 --- a/manual/osgi/README.md +++ b/manual/osgi/README.md @@ -138,7 +138,7 @@ starting the driver: [driver configuration]: ../core/configuration [OSGi]:https://www.osgi.org [JNR]: https://github.com/jnr/jnr-posix -[withClassLoader()]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withClassLoader-java.lang.ClassLoader- +[withClassLoader()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withClassLoader-java.lang.ClassLoader- [JAVA-1127]:https://datastax-oss.atlassian.net/browse/JAVA-1127 -[DriverConfigLoader.fromDefaults(ClassLoader)]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromDefaults-java.lang.ClassLoader- -[DriverConfigLoader.programmaticBuilder(ClassLoader)]: https://docs.datastax.com/en/drivers/java/4.5/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#programmaticBuilder-java.lang.ClassLoader- +[DriverConfigLoader.fromDefaults(ClassLoader)]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromDefaults-java.lang.ClassLoader- +[DriverConfigLoader.programmaticBuilder(ClassLoader)]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#programmaticBuilder-java.lang.ClassLoader- diff --git a/manual/query_builder/README.md b/manual/query_builder/README.md index db91bea1437..96c11589c25 100644 --- a/manual/query_builder/README.md +++ b/manual/query_builder/README.md @@ -212,8 +212,8 @@ For a complete tour of the API, browse the child pages in this manual: * [Terms](term/) * [Idempotence](idempotence/) -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/CqlIdentifier.html -[DseQueryBuilder]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/querybuilder/DseQueryBuilder.html -[DseSchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/dse/driver/api/querybuilder/DseSchemaBuilder.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/CqlIdentifier.html +[DseQueryBuilder]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/querybuilder/DseQueryBuilder.html +[DseSchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/querybuilder/DseSchemaBuilder.html diff --git a/manual/query_builder/condition/README.md b/manual/query_builder/condition/README.md index da3b1b72c3b..1d02bb1bbaa 100644 --- a/manual/query_builder/condition/README.md +++ b/manual/query_builder/condition/README.md @@ -132,4 +132,4 @@ It is mutually exclusive with column conditions: if you previously specified col the statement, they will be ignored; conversely, adding a column condition cancels a previous IF EXISTS clause. -[Condition]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/querybuilder/condition/Condition.html +[Condition]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/querybuilder/condition/Condition.html diff --git a/manual/query_builder/delete/README.md b/manual/query_builder/delete/README.md index d17ac6cd877..a6a3fafd09c 100644 --- a/manual/query_builder/delete/README.md +++ b/manual/query_builder/delete/README.md @@ -141,5 +141,5 @@ deleteFrom("user") Conditions are a common feature used by UPDATE and DELETE, so they have a [dedicated page](../condition) in this manual. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[Selector]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/querybuilder/select/Selector.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[Selector]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/querybuilder/select/Selector.html diff --git a/manual/query_builder/insert/README.md b/manual/query_builder/insert/README.md index c3362645c86..740cd23d99b 100644 --- a/manual/query_builder/insert/README.md +++ b/manual/query_builder/insert/README.md @@ -114,4 +114,4 @@ is executed. This is distinctly different than setting the value to null. Passin this method will only remove the USING TTL clause from the query, which will not alter the TTL (if one is set) in Cassandra. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html \ No newline at end of file +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html \ No newline at end of file diff --git a/manual/query_builder/relation/README.md b/manual/query_builder/relation/README.md index f428596d7a5..4ea0d68fcf5 100644 --- a/manual/query_builder/relation/README.md +++ b/manual/query_builder/relation/README.md @@ -201,5 +201,5 @@ This should be used with caution, as it's possible to generate invalid CQL that execution time; on the other hand, it can be used as a workaround to handle new CQL features that are not yet covered by the query builder. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[Relation]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/querybuilder/relation/Relation.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[Relation]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/querybuilder/relation/Relation.html diff --git a/manual/query_builder/schema/README.md b/manual/query_builder/schema/README.md index 8aae54d708f..969633f356c 100644 --- a/manual/query_builder/schema/README.md +++ b/manual/query_builder/schema/README.md @@ -44,4 +44,4 @@ element type: * [function](function/) * [aggregate](aggregate/) -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/aggregate/README.md b/manual/query_builder/schema/aggregate/README.md index bbf31f826b9..ce81cb1718f 100644 --- a/manual/query_builder/schema/aggregate/README.md +++ b/manual/query_builder/schema/aggregate/README.md @@ -76,4 +76,4 @@ dropAggregate("average").ifExists(); // DROP AGGREGATE IF EXISTS average ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/function/README.md b/manual/query_builder/schema/function/README.md index a4f4550515d..36c4d771bf1 100644 --- a/manual/query_builder/schema/function/README.md +++ b/manual/query_builder/schema/function/README.md @@ -92,4 +92,4 @@ dropFunction("log").ifExists(); // DROP FUNCTION IF EXISTS log ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/index/README.md b/manual/query_builder/schema/index/README.md index d2b770a9b96..754a8fc538b 100644 --- a/manual/query_builder/schema/index/README.md +++ b/manual/query_builder/schema/index/README.md @@ -99,4 +99,4 @@ dropIndex("my_idx").ifExists(); // DROP INDEX IF EXISTS my_idx ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/keyspace/README.md b/manual/query_builder/schema/keyspace/README.md index 70b6063af9f..ea2fd4c05b7 100644 --- a/manual/query_builder/schema/keyspace/README.md +++ b/manual/query_builder/schema/keyspace/README.md @@ -83,6 +83,6 @@ dropKeyspace("cycling").ifExists(); // DROP KEYSPACE IF EXISTS cycling ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/materialized_view/README.md b/manual/query_builder/schema/materialized_view/README.md index db83986d75d..ce8cc62ce76 100644 --- a/manual/query_builder/schema/materialized_view/README.md +++ b/manual/query_builder/schema/materialized_view/README.md @@ -85,5 +85,5 @@ dropTable("cyclist_by_age").ifExists(); // DROP MATERIALIZED VIEW IF EXISTS cyclist_by_age ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html -[RelationStructure]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/querybuilder/schema/RelationStructure.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[RelationStructure]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/querybuilder/schema/RelationStructure.html diff --git a/manual/query_builder/schema/table/README.md b/manual/query_builder/schema/table/README.md index 4f01daaa0ab..98f98bd7795 100644 --- a/manual/query_builder/schema/table/README.md +++ b/manual/query_builder/schema/table/README.md @@ -107,6 +107,6 @@ dropTable("cyclist_name").ifExists(); // DROP TABLE IF EXISTS cyclist_name ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html -[CreateTableWithOptions]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/querybuilder/schema/CreateTableWithOptions.html -[AlterTableWithOptions]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/querybuilder/schema/AlterTableWithOptions.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[CreateTableWithOptions]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/querybuilder/schema/CreateTableWithOptions.html +[AlterTableWithOptions]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/querybuilder/schema/AlterTableWithOptions.html diff --git a/manual/query_builder/schema/type/README.md b/manual/query_builder/schema/type/README.md index baca52e0148..36f0f99d5c5 100644 --- a/manual/query_builder/schema/type/README.md +++ b/manual/query_builder/schema/type/README.md @@ -88,4 +88,4 @@ dropTable("address").ifExists(); // DROP TYPE IF EXISTS address ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/select/README.md b/manual/query_builder/select/README.md index f8a9500ffb7..3eb4ff9632c 100644 --- a/manual/query_builder/select/README.md +++ b/manual/query_builder/select/README.md @@ -391,5 +391,5 @@ selectFrom("user").all().allowFiltering(); // SELECT * FROM user ALLOW FILTERING ``` -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[Selector]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/querybuilder/select/Selector.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[Selector]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/querybuilder/select/Selector.html diff --git a/manual/query_builder/term/README.md b/manual/query_builder/term/README.md index 05db4b04eeb..c2ade0c7926 100644 --- a/manual/query_builder/term/README.md +++ b/manual/query_builder/term/README.md @@ -105,5 +105,5 @@ This should be used with caution, as it's possible to generate invalid CQL that execution time; on the other hand, it can be used as a workaround to handle new CQL features that are not yet covered by the query builder. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html \ No newline at end of file +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html \ No newline at end of file diff --git a/manual/query_builder/truncate/README.md b/manual/query_builder/truncate/README.md index 3babd52439e..80093a29b8b 100644 --- a/manual/query_builder/truncate/README.md +++ b/manual/query_builder/truncate/README.md @@ -17,4 +17,4 @@ Truncate truncate2 = truncate(CqlIdentifier.fromCql("mytable")); Note that, at this stage, the query is ready to build. After creating a TRUNCATE query it does not take any values. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html diff --git a/manual/query_builder/update/README.md b/manual/query_builder/update/README.md index 435b766c55b..7fb78aac1f4 100644 --- a/manual/query_builder/update/README.md +++ b/manual/query_builder/update/README.md @@ -251,5 +251,5 @@ update("foo") Conditions are a common feature used by UPDATE and DELETE, so they have a [dedicated page](../condition) in this manual. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[Assignment]: https://docs.datastax.com/en/drivers/java/4.6/com/datastax/oss/driver/api/querybuilder/update/Assignment.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[Assignment]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/querybuilder/update/Assignment.html diff --git a/upgrade_guide/README.md b/upgrade_guide/README.md index 27e26067790..8c8f422eb67 100644 --- a/upgrade_guide/README.md +++ b/upgrade_guide/README.md @@ -92,7 +92,7 @@ you can obtain in most web environments by calling `Thread.getContextClassLoader See the javadocs of [SessionBuilder.withClassLoader] for more information. -[SessionBuilder.withClassLoader]: https://docs.datastax.com/en/drivers/java/4.3/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withClassLoader-java.lang.ClassLoader- +[SessionBuilder.withClassLoader]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withClassLoader-java.lang.ClassLoader- ### 4.1.0 From 6babe4b6ab6c5e379b4e7aadea67b875ec6f8ecd Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 15 Jun 2020 10:22:27 -0700 Subject: [PATCH 497/979] [maven-release-plugin] prepare release 4.7.0 --- bom/pom.xml | 14 +++++++------- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- osgi-tests/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 12 files changed, 19 insertions(+), 19 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index 298435e9b20..48500d21223 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.0-SNAPSHOT + 4.7.0 java-driver-bom pom @@ -31,32 +31,32 @@ com.datastax.oss java-driver-core - 4.7.0-SNAPSHOT + 4.7.0 com.datastax.oss java-driver-core-shaded - 4.7.0-SNAPSHOT + 4.7.0 com.datastax.oss java-driver-mapper-processor - 4.7.0-SNAPSHOT + 4.7.0 com.datastax.oss java-driver-mapper-runtime - 4.7.0-SNAPSHOT + 4.7.0 com.datastax.oss java-driver-query-builder - 4.7.0-SNAPSHOT + 4.7.0 com.datastax.oss java-driver-test-infra - 4.7.0-SNAPSHOT + 4.7.0 com.datastax.oss diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 2c477a4dd9e..c482c6314ac 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.0-SNAPSHOT + 4.7.0 java-driver-core-shaded DataStax Java driver for Apache Cassandra(R) - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index 035d205373c..faaccbeb34c 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.0-SNAPSHOT + 4.7.0 java-driver-core bundle diff --git a/distribution/pom.xml b/distribution/pom.xml index 3bbfcd220d2..9557d255728 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.0-SNAPSHOT + 4.7.0 java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index 151d76cf509..63f3aaf1f8f 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.7.0-SNAPSHOT + 4.7.0 java-driver-examples DataStax Java driver for Apache Cassandra(R) - examples. diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 8efb8086555..7d8e4ae3701 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.0-SNAPSHOT + 4.7.0 java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index f9dc9fb3fe1..d2cebfe38f3 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.0-SNAPSHOT + 4.7.0 java-driver-mapper-processor DataStax Java driver for Apache Cassandra(R) - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index e07a465b743..9895efddb8f 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.0-SNAPSHOT + 4.7.0 java-driver-mapper-runtime bundle diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml index 31d3c0c9b7f..ae583ede5ee 100644 --- a/osgi-tests/pom.xml +++ b/osgi-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.0-SNAPSHOT + 4.7.0 java-driver-osgi-tests jar diff --git a/pom.xml b/pom.xml index bfbdb5115ff..60b434e5d52 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ 4.0.0 com.datastax.oss java-driver-parent - 4.7.0-SNAPSHOT + 4.7.0 pom DataStax Java driver for Apache Cassandra(R) A driver for Apache Cassandra(R) 2.1+ that works exclusively with the Cassandra Query Language version 3 (CQL3) and Cassandra's native protocol versions 3 and above. @@ -896,7 +896,7 @@ height="0" width="0" style="display:none;visibility:hidden"> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - HEAD + 4.7.0 diff --git a/query-builder/pom.xml b/query-builder/pom.xml index 0336eb4cc97..6efd2a57739 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.0-SNAPSHOT + 4.7.0 java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index 8def9e2628c..3a3ccf774b9 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.0-SNAPSHOT + 4.7.0 java-driver-test-infra bundle From ca7da15567b8ab5ae92ff3b37246b88dd44d2421 Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 15 Jun 2020 10:22:36 -0700 Subject: [PATCH 498/979] [maven-release-plugin] prepare for next development iteration --- bom/pom.xml | 14 +++++++------- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- osgi-tests/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 12 files changed, 19 insertions(+), 19 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index 48500d21223..aecef303882 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.0 + 4.8.0-SNAPSHOT java-driver-bom pom @@ -31,32 +31,32 @@ com.datastax.oss java-driver-core - 4.7.0 + 4.8.0-SNAPSHOT com.datastax.oss java-driver-core-shaded - 4.7.0 + 4.8.0-SNAPSHOT com.datastax.oss java-driver-mapper-processor - 4.7.0 + 4.8.0-SNAPSHOT com.datastax.oss java-driver-mapper-runtime - 4.7.0 + 4.8.0-SNAPSHOT com.datastax.oss java-driver-query-builder - 4.7.0 + 4.8.0-SNAPSHOT com.datastax.oss java-driver-test-infra - 4.7.0 + 4.8.0-SNAPSHOT com.datastax.oss diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index c482c6314ac..bd2b2375332 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.0 + 4.8.0-SNAPSHOT java-driver-core-shaded DataStax Java driver for Apache Cassandra(R) - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index faaccbeb34c..1769109065c 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.0 + 4.8.0-SNAPSHOT java-driver-core bundle diff --git a/distribution/pom.xml b/distribution/pom.xml index 9557d255728..e43a8ba247b 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.0 + 4.8.0-SNAPSHOT java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index 63f3aaf1f8f..1c2d2ba7302 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.7.0 + 4.8.0-SNAPSHOT java-driver-examples DataStax Java driver for Apache Cassandra(R) - examples. diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 7d8e4ae3701..cc612b0b542 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.0 + 4.8.0-SNAPSHOT java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index d2cebfe38f3..cb260f3f173 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.0 + 4.8.0-SNAPSHOT java-driver-mapper-processor DataStax Java driver for Apache Cassandra(R) - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index 9895efddb8f..f23e0d86432 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.0 + 4.8.0-SNAPSHOT java-driver-mapper-runtime bundle diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml index ae583ede5ee..a025bd3b7d1 100644 --- a/osgi-tests/pom.xml +++ b/osgi-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.0 + 4.8.0-SNAPSHOT java-driver-osgi-tests jar diff --git a/pom.xml b/pom.xml index 60b434e5d52..3b8376e7e03 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ 4.0.0 com.datastax.oss java-driver-parent - 4.7.0 + 4.8.0-SNAPSHOT pom DataStax Java driver for Apache Cassandra(R) A driver for Apache Cassandra(R) 2.1+ that works exclusively with the Cassandra Query Language version 3 (CQL3) and Cassandra's native protocol versions 3 and above. @@ -896,7 +896,7 @@ height="0" width="0" style="display:none;visibility:hidden"> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - 4.7.0 + HEAD diff --git a/query-builder/pom.xml b/query-builder/pom.xml index 6efd2a57739..06e0344477f 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.0 + 4.8.0-SNAPSHOT java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index 3a3ccf774b9..98422301622 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.0 + 4.8.0-SNAPSHOT java-driver-test-infra bundle From 56454fa1dd121e708e25921e2280212d4022ce82 Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 15 Jun 2020 15:25:18 -0700 Subject: [PATCH 499/979] Prepare changelog for next iteration --- changelog/README.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/changelog/README.md b/changelog/README.md index aaf84cc4d84..9b909f2e37e 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -2,6 +2,9 @@ +### 4.8.0 (in progress) + + ### 4.7.0 - [improvement] JAVA-2301: Introduce OSGi tests for the mapper From e9b51112658f1c3f69fe989b6d4e2f99912428d3 Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 15 Jun 2020 15:26:37 -0700 Subject: [PATCH 500/979] Exclude test plugins from Nexus staging --- integration-tests/pom.xml | 7 +++++++ osgi-tests/pom.xml | 7 +++++++ 2 files changed, 14 insertions(+) diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index cc612b0b542..ad223906201 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -267,6 +267,13 @@ true + + org.sonatype.plugins + nexus-staging-maven-plugin + + true + + diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml index a025bd3b7d1..140217e319f 100644 --- a/osgi-tests/pom.xml +++ b/osgi-tests/pom.xml @@ -269,6 +269,13 @@ true + + org.sonatype.plugins + nexus-staging-maven-plugin + + true + + From e68bd4f76e48e849f45a98addc1707acfff76419 Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 15 Jun 2020 15:48:19 -0700 Subject: [PATCH 501/979] Fix revapi issue with optional dependency --- test-infra/revapi.json | 36 ++++++++++++++++++++++++++++++++++++ 1 file changed, 36 insertions(+) diff --git a/test-infra/revapi.json b/test-infra/revapi.json index dd6af30dd8c..cf79d3b87f6 100644 --- a/test-infra/revapi.json +++ b/test-infra/revapi.json @@ -130,6 +130,42 @@ "code": "java.missing.newClass", "new": "missing-class com.datastax.oss.simulacron.server.Server", "justification":"Dependency was made optional" + }, + { + "code": "java.missing.oldClass", + "old": "missing-class com.datastax.oss.simulacron.common.cluster.ClusterSpec", + "new": "missing-class com.datastax.oss.simulacron.common.cluster.ClusterSpec", + "justification": "Dependency was made optional" + }, + { + "code": "java.missing.oldClass", + "old": "missing-class com.datastax.oss.simulacron.common.cluster.ClusterSpec.Builder", + "new": "missing-class com.datastax.oss.simulacron.common.cluster.ClusterSpec.Builder", + "justification": "Dependency was made optional" + }, + { + "code": "java.missing.oldClass", + "old": "missing-class com.datastax.oss.simulacron.common.cluster.QueryLog", + "new": "missing-class com.datastax.oss.simulacron.common.cluster.QueryLog", + "justification": "Dependency was made optional" + }, + { + "code": "java.missing.oldClass", + "old": "missing-class com.datastax.oss.simulacron.server.BoundCluster", + "new": "missing-class com.datastax.oss.simulacron.server.BoundCluster", + "justification": "Dependency was made optional" + }, + { + "code": "java.missing.oldClass", + "old": "missing-class com.datastax.oss.simulacron.server.BoundTopic", + "new": "missing-class com.datastax.oss.simulacron.server.BoundTopic", + "justification": "Dependency was made optional" + }, + { + "code": "java.missing.oldClass", + "old": "missing-class com.datastax.oss.simulacron.server.Server", + "new": "missing-class com.datastax.oss.simulacron.server.Server", + "justification": "Dependency was made optional" } ] } From e5ae4d25b3768f0188a7baf3073e1af7df5480fd Mon Sep 17 00:00:00 2001 From: Madhavan Date: Wed, 17 Jun 2020 04:49:01 -0400 Subject: [PATCH 502/979] Rebrand Apollo to Astra (#1455) Co-authored-by: Madhavan Sridharan --- README.md | 6 ++-- .../internal/core/channel/ChannelFactory.java | 5 +++- .../AstraReadCassandraVersion.java} | 28 +++++++++-------- manual/cloud/README.md | 30 +++++++++---------- manual/core/integration/README.md | 2 +- 5 files changed, 38 insertions(+), 33 deletions(-) rename examples/src/main/java/com/datastax/oss/driver/examples/{apollo/ApolloReadCassandraVersion.java => astra/AstraReadCassandraVersion.java} (72%) diff --git a/README.md b/README.md index 1147fde9bde..06d2e68b0eb 100644 --- a/README.md +++ b/README.md @@ -8,13 +8,13 @@ documentation for latest version through [DataStax Docs] or via the release tags [4.7.0](https://github.com/datastax/java-driver/tree/4.7.0).* A modern, feature-rich and highly tunable Java client library for [Apache Cassandra®] \(2.1+) and -[DataStax Enterprise] \(4.7+), and [DataStax Apollo], using exclusively Cassandra's binary protocol +[DataStax Enterprise] \(4.7+), and [DataStax Astra], using exclusively Cassandra's binary protocol and Cassandra Query Language (CQL) v3. [DataStax Docs]: http://docs.datastax.com/en/developer/java-driver/ [Apache Cassandra®]: http://cassandra.apache.org/ [DataStax Enterprise]: https://www.datastax.com/products/datastax-enterprise -[DataStax Apollo]: https://www.datastax.com/constellation/datastax-apollo +[DataStax Astra]: https://www.datastax.com/products/datastax-astra ## Getting the driver @@ -52,7 +52,7 @@ builder](manual/query_builder/), [mapper](manual/mapper)). ## Compatibility The driver is compatible with Apache Cassandra® 2.1 and higher, DataStax Enterprise 4.7 and -higher, and DataStax Apollo. +higher, and DataStax Astra. It requires Java 8 or higher. diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ChannelFactory.java b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ChannelFactory.java index 6e5e699393b..c6d91f7636d 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ChannelFactory.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ChannelFactory.java @@ -60,7 +60,10 @@ public class ChannelFactory { private static final Logger LOG = LoggerFactory.getLogger(ChannelFactory.class); - /** A value for {@link #productType} that indicates that we are connected to Datastax Cloud. */ + /** + * A value for {@link #productType} that indicates that we are connected to DataStax Cloud. This + * value matches the one defined at DSE DB server side at {@code ProductType.java}. + */ private static final String DATASTAX_CLOUD_PRODUCT_TYPE = "DATASTAX_APOLLO"; private static final AtomicBoolean LOGGED_ORPHAN_WARNING = new AtomicBoolean(); diff --git a/examples/src/main/java/com/datastax/oss/driver/examples/apollo/ApolloReadCassandraVersion.java b/examples/src/main/java/com/datastax/oss/driver/examples/astra/AstraReadCassandraVersion.java similarity index 72% rename from examples/src/main/java/com/datastax/oss/driver/examples/apollo/ApolloReadCassandraVersion.java rename to examples/src/main/java/com/datastax/oss/driver/examples/astra/AstraReadCassandraVersion.java index 37175caf398..d434665552a 100644 --- a/examples/src/main/java/com/datastax/oss/driver/examples/apollo/ApolloReadCassandraVersion.java +++ b/examples/src/main/java/com/datastax/oss/driver/examples/astra/AstraReadCassandraVersion.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.examples.apollo; +package com.datastax.oss.driver.examples.astra; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.cql.ResultSet; @@ -21,28 +21,30 @@ import java.nio.file.Paths; /** - * Connects to a DataStax Apollo cluster and extracts basic information from it. + * Connects to a DataStax Astra cluster and extracts basic information from it. * *

        Preconditions: * *

          - *
        • A DataStax Apollo cluster is running and accessible. - *
        • A DataStax Apollo secure connect bundle for the running cluster. + *
        • A DataStax Astra cluster is running and accessible. + *
        • A DataStax Astra secure connect bundle for the running cluster. *
        * *

        Side effects: none. * * @see - * Creating an Apollo Database - * @see - * Providing access to Apollo databases - * @see - * Obtaining Apollo secure connect bundle - * @see Java driver online + * href="https://docs.datastax.com/en/astra/gcp/doc/dscloud/astra/dscloudGettingStarted.html#dscloudCreateCluster"> + * Creating an Astra Database (GCP) + * @see + * Providing access to Astra databases (GCP) + * @see + * Obtaining Astra secure connect bundle (GCP) + * @see Java driver online * manual */ -public class ApolloReadCassandraVersion { +public class AstraReadCassandraVersion { public static void main(String[] args) { @@ -52,7 +54,7 @@ public static void main(String[] args) { CqlSession.builder() // Change the path here to the secure connect bundle location (see javadocs above) .withCloudSecureConnectBundle(Paths.get("/path/to/secure-connect-database_name.zip")) - // Change the user_name and password here for the Apollo instance + // Change the user_name and password here for the Astra instance .withAuthCredentials("user_name", "fakePasswordForTests") // Uncomment the next line to use a specific keyspace // .withKeyspace("keyspace_name") diff --git a/manual/cloud/README.md b/manual/cloud/README.md index e006e13deb3..161a8308c73 100644 --- a/manual/cloud/README.md +++ b/manual/cloud/README.md @@ -1,16 +1,16 @@ -## Connecting to Apollo (Cloud) +## Connecting to Astra (Cloud) -Using the DataStax Java Driver to connect to a DataStax Apollo database is almost identical to using +Using the DataStax Java Driver to connect to a DataStax Astra database is almost identical to using the driver to connect to any normal Apache Cassandra® database. The only differences are in how the driver is configured in an application and that you will need to obtain a *secure connect bundle*. ### Prerequisites 1. [Download][Download Maven] and [install][Install Maven] Maven. -2. Create an Apollo database on [GCP][Create an Apollo database - GCP] or - [AWS][Create an Apollo database - AWS]; alternatively, have a team member provide access to their - Apollo database (instructions for [GCP][Access an Apollo database - GCP] and - [AWS][Access an Apollo database - AWS]) to obtain database connection details. +2. Create an Astra database on [GCP][Create an Astra database - GCP] or + [AWS][Create an Astra database - AWS]; alternatively, have a team member provide access to their + Astra database (instructions for [GCP][Access an Astra database - GCP] and + [AWS][Access an Astra database - AWS]) to obtain database connection details. 3. Download the secure connect bundle (instructions for [GCP][Download the secure connect bundle - GCP] and [AWS][Download the secure connect bundle - AWS]), that contains connection information such as @@ -54,10 +54,10 @@ public class Main { } ``` -The path to the secure connect bundle for your Apollo database is specified with +The path to the secure connect bundle for your Astra database is specified with `withCloudSecureConnectBundle()`. The authentication credentials must be specified separately with `withAuthCredentials()`, and match the username and password that were configured when creating the -Apollo database. +Astra database. Note the following: @@ -108,7 +108,7 @@ public class Main { public static void main(String[] args) { // Create the CqlSession object; it will read the configuration file and pick the right - // values to connect to the Apollo database. + // values to connect to the Astra database. try (CqlSession session = CqlSession.builder().build()) { ResultSet rs = session.execute("select release_version from system.local"); @@ -125,11 +125,11 @@ public class Main { [Download Maven]: https://maven.apache.org/download.cgi [Install Maven]: https://maven.apache.org/install.html -[Create an Apollo database - GCP]: https://helpdocs.datastax.com/gcp/dscloud/apollo/dscloudGettingStarted.html#dscloudCreateCluster -[Create an Apollo database - AWS]: https://helpdocs.datastax.com/aws/dscloud/apollo/dscloudGettingStarted.html#dscloudCreateCluster -[Access an Apollo database - GCP]: https://helpdocs.datastax.com/gcp/dscloud/apollo/dscloudShareClusterDetails.html -[Access an Apollo database - AWS]: https://helpdocs.datastax.com/aws/dscloud/apollo/dscloudShareClusterDetails.html -[Download the secure connect bundle - GCP]: https://helpdocs.datastax.com/gcp/dscloud/apollo/dscloudObtainingCredentials.html -[Download the secure connect bundle - AWS]: https://helpdocs.datastax.com/aws/dscloud/apollo/dscloudObtainingCredentials.html +[Create an Astra database - GCP]: https://docs.datastax.com/en/astra/gcp/doc/dscloud/astra/dscloudGettingStarted.html#dscloudCreateCluster +[Create an Astra database - AWS]: https://docs.datastax.com/en/astra/aws/doc/dscloud/astra/dscloudGettingStarted.html#dscloudCreateCluster +[Access an Astra database - GCP]: https://docs.datastax.com/en/astra/gcp/doc/dscloud/astra/dscloudShareClusterDetails.html +[Access an Astra database - AWS]: https://docs.datastax.com/en/astra/aws/doc/dscloud/astra/dscloudShareClusterDetails.html +[Download the secure connect bundle - GCP]: https://docs.datastax.com/en/astra/gcp/doc/dscloud/astra/dscloudObtainingCredentials.html +[Download the secure connect bundle - AWS]: https://docs.datastax.com/en/astra/aws/doc/dscloud/astra/dscloudObtainingCredentials.html [minimal project structure]: ../core/integration/#minimal-project-structure [driver documentation]: ../core/configuration/ diff --git a/manual/core/integration/README.md b/manual/core/integration/README.md index d1b2f42a77d..1f5cfb6df8f 100644 --- a/manual/core/integration/README.md +++ b/manual/core/integration/README.md @@ -449,7 +449,7 @@ If all of these metrics are disabled, you can remove the dependency: [Jackson](https://github.com/FasterXML/jackson) is used: -* when connecting to [Datastax Apollo](../../cloud/); +* when connecting to [Datastax Astra](../../cloud/); * when Insights monitoring is enabled. If you don't use either of those features, you can safely exclude the dependency: From 1427be868bcbb115258e87cccb1cffcf54c82e04 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Thu, 18 Jun 2020 09:43:31 +0200 Subject: [PATCH 503/979] JAVA-2818: Remove root path only after merging non-programmatic configs (#1456) --- changelog/README.md | 3 +- ...ProgrammaticDriverConfigLoaderBuilder.java | 39 +++++++++++++++---- ...rammaticDriverConfigLoaderBuilderTest.java | 32 ++++++++++++--- 3 files changed, 60 insertions(+), 14 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 9b909f2e37e..0345e845975 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -2,8 +2,9 @@ -### 4.8.0 (in progress) +### 4.7.1 (in progress) +- [bug] JAVA-2818: Remove root path only after merging non-programmatic configs ### 4.7.0 diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultProgrammaticDriverConfigLoaderBuilder.java b/core/src/main/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultProgrammaticDriverConfigLoaderBuilder.java index 802b4080cf9..69f71b61643 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultProgrammaticDriverConfigLoaderBuilder.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultProgrammaticDriverConfigLoaderBuilder.java @@ -37,10 +37,17 @@ public class DefaultProgrammaticDriverConfigLoaderBuilder implements ProgrammaticDriverConfigLoaderBuilder { + public static final Supplier DEFAULT_FALLBACK_SUPPLIER = + () -> + ConfigFactory.defaultApplication() + // Do not remove root path here, it must be done after merging configs + .withFallback(ConfigFactory.defaultReference(CqlSession.class.getClassLoader())); + private final NullAllowingImmutableMap.Builder values = NullAllowingImmutableMap.builder(); private final Supplier fallbackSupplier; + private final String rootPath; private String currentProfileName = DriverExecutionProfile.DEFAULT_NAME; @@ -55,7 +62,7 @@ public class DefaultProgrammaticDriverConfigLoaderBuilder * #DefaultProgrammaticDriverConfigLoaderBuilder(ClassLoader)} instead. */ public DefaultProgrammaticDriverConfigLoaderBuilder() { - this(DefaultDriverConfigLoader.DEFAULT_CONFIG_SUPPLIER); + this(DEFAULT_FALLBACK_SUPPLIER, DefaultDriverConfigLoader.DEFAULT_ROOT_PATH); } /** @@ -71,16 +78,24 @@ public DefaultProgrammaticDriverConfigLoaderBuilder(@NonNull ClassLoader appClas this( () -> ConfigFactory.defaultApplication(appClassLoader) - .withFallback(ConfigFactory.defaultReference(CqlSession.class.getClassLoader())) - .getConfig(DefaultDriverConfigLoader.DEFAULT_ROOT_PATH)); + .withFallback(ConfigFactory.defaultReference(CqlSession.class.getClassLoader())), + DefaultDriverConfigLoader.DEFAULT_ROOT_PATH); } /** + * Creates an instance of {@link DefaultProgrammaticDriverConfigLoaderBuilder} using a custom + * fallback config supplier. + * * @param fallbackSupplier the supplier that will provide fallback configuration for options that * haven't been specified programmatically. + * @param rootPath the root path used in non-programmatic sources (fallback reference.conf and + * system properties). In most cases it should be {@link + * DefaultDriverConfigLoader#DEFAULT_ROOT_PATH}. Cannot be null but can be empty. */ - public DefaultProgrammaticDriverConfigLoaderBuilder(@NonNull Supplier fallbackSupplier) { + public DefaultProgrammaticDriverConfigLoaderBuilder( + @NonNull Supplier fallbackSupplier, @NonNull String rootPath) { this.fallbackSupplier = fallbackSupplier; + this.rootPath = rootPath; } private ProgrammaticDriverConfigLoaderBuilder with( @@ -92,6 +107,9 @@ private ProgrammaticDriverConfigLoaderBuilder with(@NonNull String path, @Nullab if (!DriverExecutionProfile.DEFAULT_NAME.equals(currentProfileName)) { path = "profiles." + currentProfileName + "." + path; } + if (!rootPath.isEmpty()) { + path = rootPath + "." + path; + } values.put(path, value); return this; } @@ -228,10 +246,15 @@ public DriverConfigLoader build() { () -> { ConfigFactory.invalidateCaches(); Config programmaticConfig = buildConfig(); - return ConfigFactory.defaultOverrides() - .withFallback(programmaticConfig) - .withFallback(fallbackSupplier.get()) - .resolve(); + Config config = + ConfigFactory.defaultOverrides() + .withFallback(programmaticConfig) + .withFallback(fallbackSupplier.get()) + .resolve(); + // Only remove rootPath after the merge between system properties + // and fallback configuration, since both are supposed to + // contain the same rootPath prefix. + return rootPath.isEmpty() ? config : config.getConfig(rootPath); }); } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultProgrammaticDriverConfigLoaderBuilderTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultProgrammaticDriverConfigLoaderBuilderTest.java index 9d51b8228df..7e2b6041ff8 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultProgrammaticDriverConfigLoaderBuilderTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultProgrammaticDriverConfigLoaderBuilderTest.java @@ -17,6 +17,7 @@ import static org.assertj.core.api.Assertions.assertThat; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverConfig; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; import com.datastax.oss.driver.internal.core.config.MockOptions; @@ -32,7 +33,7 @@ public class DefaultProgrammaticDriverConfigLoaderBuilderTest { public void should_override_option_in_default_profile() { DriverConfigLoader loader = new DefaultProgrammaticDriverConfigLoaderBuilder( - () -> ConfigFactory.parseString(FALLBACK_CONFIG)) + () -> ConfigFactory.parseString(FALLBACK_CONFIG), "") .withInt(MockOptions.INT1, 3) .build(); DriverConfig config = loader.getInitialConfig(); @@ -44,7 +45,7 @@ public void should_override_option_in_default_profile() { public void should_override_option_in_existing_profile() { DriverConfigLoader loader = new DefaultProgrammaticDriverConfigLoaderBuilder( - () -> ConfigFactory.parseString(FALLBACK_CONFIG)) + () -> ConfigFactory.parseString(FALLBACK_CONFIG), "") .startProfile("profile1") .withInt(MockOptions.INT1, 3) .build(); @@ -57,7 +58,7 @@ public void should_override_option_in_existing_profile() { public void should_override_option_in_new_profile() { DriverConfigLoader loader = new DefaultProgrammaticDriverConfigLoaderBuilder( - () -> ConfigFactory.parseString(FALLBACK_CONFIG)) + () -> ConfigFactory.parseString(FALLBACK_CONFIG), "") .startProfile("profile2") .withInt(MockOptions.INT1, 3) .build(); @@ -72,7 +73,7 @@ public void should_override_option_in_new_profile() { public void should_go_back_to_default_profile_when_profile_ends() { DriverConfigLoader loader = new DefaultProgrammaticDriverConfigLoaderBuilder( - () -> ConfigFactory.parseString(FALLBACK_CONFIG)) + () -> ConfigFactory.parseString(FALLBACK_CONFIG), "") .startProfile("profile2") .withInt(MockOptions.INT1, 3) .endProfile() @@ -86,7 +87,7 @@ public void should_go_back_to_default_profile_when_profile_ends() { public void should_handle_multiple_programmatic_profiles() { DriverConfigLoader loader = new DefaultProgrammaticDriverConfigLoaderBuilder( - () -> ConfigFactory.parseString(FALLBACK_CONFIG)) + () -> ConfigFactory.parseString(FALLBACK_CONFIG), "") .startProfile("profile2") .withInt(MockOptions.INT1, 3) .startProfile("profile3") @@ -96,4 +97,25 @@ public void should_handle_multiple_programmatic_profiles() { assertThat(config.getProfile("profile2").getInt(MockOptions.INT1)).isEqualTo(3); assertThat(config.getProfile("profile3").getInt(MockOptions.INT1)).isEqualTo(4); } + + @Test + public void should_honor_root_path() { + String rootPath = "test-root"; + String propertyKey = rootPath + "." + DefaultDriverOption.CONNECTION_POOL_LOCAL_SIZE.getPath(); + try { + System.setProperty(propertyKey, "42"); + DriverConfigLoader loader = + new DefaultProgrammaticDriverConfigLoaderBuilder( + DefaultProgrammaticDriverConfigLoaderBuilder.DEFAULT_FALLBACK_SUPPLIER, rootPath) + .withInt(DefaultDriverOption.REQUEST_PAGE_SIZE, 1234) + .build(); + DriverConfig config = loader.getInitialConfig(); + assertThat(config.getDefaultProfile().getInt(DefaultDriverOption.CONNECTION_POOL_LOCAL_SIZE)) + .isEqualTo(42); + assertThat(config.getDefaultProfile().getInt(DefaultDriverOption.REQUEST_PAGE_SIZE)) + .isEqualTo(1234); + } finally { + System.clearProperty(propertyKey); + } + } } From 112d0d7cb771e8869a6d3d3f35e645c206b694e2 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Thu, 18 Jun 2020 11:56:32 +0200 Subject: [PATCH 504/979] Update changelog for 4.7.1 release --- changelog/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/changelog/README.md b/changelog/README.md index 0345e845975..18e4e74e2e0 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -2,7 +2,7 @@ -### 4.7.1 (in progress) +### 4.7.1 - [bug] JAVA-2818: Remove root path only after merging non-programmatic configs From a9f0911da102dfd8298d12cf35a28fd295e663d9 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Thu, 18 Jun 2020 12:00:38 +0200 Subject: [PATCH 505/979] [maven-release-plugin] prepare release 4.7.1 --- bom/pom.xml | 14 +++++++------- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- osgi-tests/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 12 files changed, 19 insertions(+), 19 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index aecef303882..ef467c30958 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.8.0-SNAPSHOT + 4.7.1 java-driver-bom pom @@ -31,32 +31,32 @@ com.datastax.oss java-driver-core - 4.8.0-SNAPSHOT + 4.7.1 com.datastax.oss java-driver-core-shaded - 4.8.0-SNAPSHOT + 4.7.1 com.datastax.oss java-driver-mapper-processor - 4.8.0-SNAPSHOT + 4.7.1 com.datastax.oss java-driver-mapper-runtime - 4.8.0-SNAPSHOT + 4.7.1 com.datastax.oss java-driver-query-builder - 4.8.0-SNAPSHOT + 4.7.1 com.datastax.oss java-driver-test-infra - 4.8.0-SNAPSHOT + 4.7.1 com.datastax.oss diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index bd2b2375332..eba4278ae79 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.8.0-SNAPSHOT + 4.7.1 java-driver-core-shaded DataStax Java driver for Apache Cassandra(R) - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index 1769109065c..2bc0a8bc49d 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.8.0-SNAPSHOT + 4.7.1 java-driver-core bundle diff --git a/distribution/pom.xml b/distribution/pom.xml index e43a8ba247b..e43181e5c9a 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.8.0-SNAPSHOT + 4.7.1 java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index 1c2d2ba7302..2db72c66b80 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.8.0-SNAPSHOT + 4.7.1 java-driver-examples DataStax Java driver for Apache Cassandra(R) - examples. diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index ad223906201..1444a9167ec 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.8.0-SNAPSHOT + 4.7.1 java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index cb260f3f173..39a4d21a140 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.8.0-SNAPSHOT + 4.7.1 java-driver-mapper-processor DataStax Java driver for Apache Cassandra(R) - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index f23e0d86432..88363fec459 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.8.0-SNAPSHOT + 4.7.1 java-driver-mapper-runtime bundle diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml index 140217e319f..ff0b1a124fa 100644 --- a/osgi-tests/pom.xml +++ b/osgi-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.8.0-SNAPSHOT + 4.7.1 java-driver-osgi-tests jar diff --git a/pom.xml b/pom.xml index 3b8376e7e03..90c0573cc7c 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ 4.0.0 com.datastax.oss java-driver-parent - 4.8.0-SNAPSHOT + 4.7.1 pom DataStax Java driver for Apache Cassandra(R) A driver for Apache Cassandra(R) 2.1+ that works exclusively with the Cassandra Query Language version 3 (CQL3) and Cassandra's native protocol versions 3 and above. @@ -896,7 +896,7 @@ height="0" width="0" style="display:none;visibility:hidden"> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - HEAD + 4.7.1 diff --git a/query-builder/pom.xml b/query-builder/pom.xml index 06e0344477f..56a1e487088 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.8.0-SNAPSHOT + 4.7.1 java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index 98422301622..dcf61625d14 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.8.0-SNAPSHOT + 4.7.1 java-driver-test-infra bundle From 71cab54f087b24d4cd0643c4bed7da261268433d Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Thu, 18 Jun 2020 12:00:50 +0200 Subject: [PATCH 506/979] [maven-release-plugin] prepare for next development iteration --- bom/pom.xml | 14 +++++++------- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- osgi-tests/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 12 files changed, 19 insertions(+), 19 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index ef467c30958..885dfc896df 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.1 + 4.7.2-SNAPSHOT java-driver-bom pom @@ -31,32 +31,32 @@ com.datastax.oss java-driver-core - 4.7.1 + 4.7.2-SNAPSHOT com.datastax.oss java-driver-core-shaded - 4.7.1 + 4.7.2-SNAPSHOT com.datastax.oss java-driver-mapper-processor - 4.7.1 + 4.7.2-SNAPSHOT com.datastax.oss java-driver-mapper-runtime - 4.7.1 + 4.7.2-SNAPSHOT com.datastax.oss java-driver-query-builder - 4.7.1 + 4.7.2-SNAPSHOT com.datastax.oss java-driver-test-infra - 4.7.1 + 4.7.2-SNAPSHOT com.datastax.oss diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index eba4278ae79..e1bf10ee1c8 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.1 + 4.7.2-SNAPSHOT java-driver-core-shaded DataStax Java driver for Apache Cassandra(R) - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index 2bc0a8bc49d..c3c0e943000 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.1 + 4.7.2-SNAPSHOT java-driver-core bundle diff --git a/distribution/pom.xml b/distribution/pom.xml index e43181e5c9a..c6abb1b3744 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.1 + 4.7.2-SNAPSHOT java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index 2db72c66b80..b92e9ace214 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.7.1 + 4.7.2-SNAPSHOT java-driver-examples DataStax Java driver for Apache Cassandra(R) - examples. diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 1444a9167ec..f68df66cbec 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.1 + 4.7.2-SNAPSHOT java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 39a4d21a140..bb7de9fc1bc 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.1 + 4.7.2-SNAPSHOT java-driver-mapper-processor DataStax Java driver for Apache Cassandra(R) - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index 88363fec459..0892b10b496 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.1 + 4.7.2-SNAPSHOT java-driver-mapper-runtime bundle diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml index ff0b1a124fa..7a70d7d0b3d 100644 --- a/osgi-tests/pom.xml +++ b/osgi-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.1 + 4.7.2-SNAPSHOT java-driver-osgi-tests jar diff --git a/pom.xml b/pom.xml index 90c0573cc7c..6d3e13aa560 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ 4.0.0 com.datastax.oss java-driver-parent - 4.7.1 + 4.7.2-SNAPSHOT pom DataStax Java driver for Apache Cassandra(R) A driver for Apache Cassandra(R) 2.1+ that works exclusively with the Cassandra Query Language version 3 (CQL3) and Cassandra's native protocol versions 3 and above. @@ -896,7 +896,7 @@ height="0" width="0" style="display:none;visibility:hidden"> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - 4.7.1 + HEAD diff --git a/query-builder/pom.xml b/query-builder/pom.xml index 56a1e487088..3346432a225 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.1 + 4.7.2-SNAPSHOT java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index dcf61625d14..e8526a0047f 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.1 + 4.7.2-SNAPSHOT java-driver-test-infra bundle From 278388803a86c9aaafa73639653b985420b440e2 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 19 Jun 2020 15:55:26 +0200 Subject: [PATCH 507/979] JAVA-2770: Fix BoundStatementCcmIT --- .../com/datastax/oss/driver/core/cql/BoundStatementCcmIT.java | 3 --- 1 file changed, 3 deletions(-) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BoundStatementCcmIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BoundStatementCcmIT.java index abd24d638aa..f25730350fb 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BoundStatementCcmIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BoundStatementCcmIT.java @@ -280,7 +280,6 @@ public void should_propagate_attributes_when_preparing_a_simple_statement() { .getDefaultProfile() // Value doesn't matter, we just want a distinct profile .withDuration(DefaultDriverOption.REQUEST_TIMEOUT, Duration.ofSeconds(10)); - String mockConfigProfileName = "mockConfigProfileName"; ByteBuffer mockPagingState = Bytes.fromHexString("0xaaaa"); CqlIdentifier mockKeyspace = supportsPerRequestKeyspace(session) ? CqlIdentifier.fromCql("system") : null; @@ -297,7 +296,6 @@ public void should_propagate_attributes_when_preparing_a_simple_statement() { SimpleStatementBuilder simpleStatementBuilder = SimpleStatement.builder("SELECT release_version FROM system.local") .setExecutionProfile(mockProfile) - .setExecutionProfileName(mockConfigProfileName) .setPagingState(mockPagingState) .setKeyspace(mockKeyspace) .setRoutingKeyspace(mockRoutingKeyspace) @@ -326,7 +324,6 @@ public void should_propagate_attributes_when_preparing_a_simple_statement() { BoundStatement boundStatement = createMethod.apply(preparedStatement); assertThat(boundStatement.getExecutionProfile()).isEqualTo(mockProfile); - assertThat(boundStatement.getExecutionProfileName()).isEqualTo(mockConfigProfileName); assertThat(boundStatement.getPagingState()).isEqualTo(mockPagingState); assertThat(boundStatement.getRoutingKeyspace()) .isEqualTo(mockKeyspace != null ? mockKeyspace : mockRoutingKeyspace); From 71a352fae2208389ff3f66b3a8431c20eb5241e4 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 22 Jun 2020 14:29:31 +0200 Subject: [PATCH 508/979] Fix error when creating session in Astra example This commit can be reverted once JAVA-2813 is implemented. --- .../oss/driver/examples/astra/AstraReadCassandraVersion.java | 2 ++ examples/src/main/resources/application-astra.conf | 1 + 2 files changed, 3 insertions(+) create mode 100644 examples/src/main/resources/application-astra.conf diff --git a/examples/src/main/java/com/datastax/oss/driver/examples/astra/AstraReadCassandraVersion.java b/examples/src/main/java/com/datastax/oss/driver/examples/astra/AstraReadCassandraVersion.java index d434665552a..de8ebb82f03 100644 --- a/examples/src/main/java/com/datastax/oss/driver/examples/astra/AstraReadCassandraVersion.java +++ b/examples/src/main/java/com/datastax/oss/driver/examples/astra/AstraReadCassandraVersion.java @@ -16,6 +16,7 @@ package com.datastax.oss.driver.examples.astra; import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.config.DriverConfigLoader; import com.datastax.oss.driver.api.core.cql.ResultSet; import com.datastax.oss.driver.api.core.cql.Row; import java.nio.file.Paths; @@ -56,6 +57,7 @@ public static void main(String[] args) { .withCloudSecureConnectBundle(Paths.get("/path/to/secure-connect-database_name.zip")) // Change the user_name and password here for the Astra instance .withAuthCredentials("user_name", "fakePasswordForTests") + .withConfigLoader(DriverConfigLoader.fromClasspath("application-astra")) // Uncomment the next line to use a specific keyspace // .withKeyspace("keyspace_name") .build()) { diff --git a/examples/src/main/resources/application-astra.conf b/examples/src/main/resources/application-astra.conf new file mode 100644 index 00000000000..9963cbaed7b --- /dev/null +++ b/examples/src/main/resources/application-astra.conf @@ -0,0 +1 @@ +datastax-java-driver { } \ No newline at end of file From 997ac2b8b7a7afcceedcef2dab8b6f310160b665 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 22 Jun 2020 14:42:30 +0200 Subject: [PATCH 509/979] JAVA-2821: Can't connect to DataStax Astra using driver 4.7.x (#1457) --- changelog/README.md | 4 ++++ .../datastax/oss/driver/api/core/session/SessionBuilder.java | 1 + 2 files changed, 5 insertions(+) diff --git a/changelog/README.md b/changelog/README.md index 18e4e74e2e0..c4718dd751d 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -2,6 +2,10 @@ +### 4.7.2 + +- [bug] JAVA-2821: Can't connect to DataStax Astra using driver 4.7.x + ### 4.7.1 - [bug] JAVA-2818: Remove root path only after merging non-programmatic configs diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java b/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java index 2562c89f614..c1672084787 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java @@ -676,6 +676,7 @@ protected final CompletionStage buildDefaultSessionAsync() { if (cloudConfig.getAuthProvider().isPresent()) { withAuthProvider(cloudConfig.getAuthProvider().get()); } + programmaticArguments = programmaticArgumentsBuilder.build(); } boolean resolveAddresses = From 94422b2276ee08d2fca218c8fa113358b5a076cb Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 22 Jun 2020 15:38:15 +0200 Subject: [PATCH 510/979] [maven-release-plugin] prepare release 4.7.2 --- bom/pom.xml | 14 +++++++------- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- osgi-tests/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 12 files changed, 19 insertions(+), 19 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index 885dfc896df..2235f97e704 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.2-SNAPSHOT + 4.7.2 java-driver-bom pom @@ -31,32 +31,32 @@ com.datastax.oss java-driver-core - 4.7.2-SNAPSHOT + 4.7.2 com.datastax.oss java-driver-core-shaded - 4.7.2-SNAPSHOT + 4.7.2 com.datastax.oss java-driver-mapper-processor - 4.7.2-SNAPSHOT + 4.7.2 com.datastax.oss java-driver-mapper-runtime - 4.7.2-SNAPSHOT + 4.7.2 com.datastax.oss java-driver-query-builder - 4.7.2-SNAPSHOT + 4.7.2 com.datastax.oss java-driver-test-infra - 4.7.2-SNAPSHOT + 4.7.2 com.datastax.oss diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index e1bf10ee1c8..05547eb4fd5 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.2-SNAPSHOT + 4.7.2 java-driver-core-shaded DataStax Java driver for Apache Cassandra(R) - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index c3c0e943000..ca2c414b467 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.2-SNAPSHOT + 4.7.2 java-driver-core bundle diff --git a/distribution/pom.xml b/distribution/pom.xml index c6abb1b3744..a5ef1099550 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.2-SNAPSHOT + 4.7.2 java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index b92e9ace214..88bf6365354 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.7.2-SNAPSHOT + 4.7.2 java-driver-examples DataStax Java driver for Apache Cassandra(R) - examples. diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index f68df66cbec..519f26f4e1c 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.2-SNAPSHOT + 4.7.2 java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index bb7de9fc1bc..900eb450fdd 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.2-SNAPSHOT + 4.7.2 java-driver-mapper-processor DataStax Java driver for Apache Cassandra(R) - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index 0892b10b496..6113db0a24c 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.2-SNAPSHOT + 4.7.2 java-driver-mapper-runtime bundle diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml index 7a70d7d0b3d..6062ae080c4 100644 --- a/osgi-tests/pom.xml +++ b/osgi-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.2-SNAPSHOT + 4.7.2 java-driver-osgi-tests jar diff --git a/pom.xml b/pom.xml index 6d3e13aa560..21fc38b588d 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ 4.0.0 com.datastax.oss java-driver-parent - 4.7.2-SNAPSHOT + 4.7.2 pom DataStax Java driver for Apache Cassandra(R) A driver for Apache Cassandra(R) 2.1+ that works exclusively with the Cassandra Query Language version 3 (CQL3) and Cassandra's native protocol versions 3 and above. @@ -896,7 +896,7 @@ height="0" width="0" style="display:none;visibility:hidden"> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - HEAD + 4.7.2 diff --git a/query-builder/pom.xml b/query-builder/pom.xml index 3346432a225..58cb7a63118 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.2-SNAPSHOT + 4.7.2 java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index e8526a0047f..18c5692d094 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.2-SNAPSHOT + 4.7.2 java-driver-test-infra bundle From 745dba3133ab161e2d53deffef9800acfb631f18 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 22 Jun 2020 15:38:30 +0200 Subject: [PATCH 511/979] [maven-release-plugin] prepare for next development iteration --- bom/pom.xml | 14 +++++++------- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- osgi-tests/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 12 files changed, 19 insertions(+), 19 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index 2235f97e704..489f588398a 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.2 + 4.7.3-SNAPSHOT java-driver-bom pom @@ -31,32 +31,32 @@ com.datastax.oss java-driver-core - 4.7.2 + 4.7.3-SNAPSHOT com.datastax.oss java-driver-core-shaded - 4.7.2 + 4.7.3-SNAPSHOT com.datastax.oss java-driver-mapper-processor - 4.7.2 + 4.7.3-SNAPSHOT com.datastax.oss java-driver-mapper-runtime - 4.7.2 + 4.7.3-SNAPSHOT com.datastax.oss java-driver-query-builder - 4.7.2 + 4.7.3-SNAPSHOT com.datastax.oss java-driver-test-infra - 4.7.2 + 4.7.3-SNAPSHOT com.datastax.oss diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 05547eb4fd5..40e3cdcd552 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.2 + 4.7.3-SNAPSHOT java-driver-core-shaded DataStax Java driver for Apache Cassandra(R) - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index ca2c414b467..6fb9d1cd7e4 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.2 + 4.7.3-SNAPSHOT java-driver-core bundle diff --git a/distribution/pom.xml b/distribution/pom.xml index a5ef1099550..3bfdb2950c5 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.2 + 4.7.3-SNAPSHOT java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index 88bf6365354..6ac5ef172a1 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.7.2 + 4.7.3-SNAPSHOT java-driver-examples DataStax Java driver for Apache Cassandra(R) - examples. diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 519f26f4e1c..037f60c41d7 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.2 + 4.7.3-SNAPSHOT java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 900eb450fdd..0739eb52ccc 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.2 + 4.7.3-SNAPSHOT java-driver-mapper-processor DataStax Java driver for Apache Cassandra(R) - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index 6113db0a24c..92987ed7a6f 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.2 + 4.7.3-SNAPSHOT java-driver-mapper-runtime bundle diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml index 6062ae080c4..fe2860303ff 100644 --- a/osgi-tests/pom.xml +++ b/osgi-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.2 + 4.7.3-SNAPSHOT java-driver-osgi-tests jar diff --git a/pom.xml b/pom.xml index 21fc38b588d..74c2ff433ad 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ 4.0.0 com.datastax.oss java-driver-parent - 4.7.2 + 4.7.3-SNAPSHOT pom DataStax Java driver for Apache Cassandra(R) A driver for Apache Cassandra(R) 2.1+ that works exclusively with the Cassandra Query Language version 3 (CQL3) and Cassandra's native protocol versions 3 and above. @@ -896,7 +896,7 @@ height="0" width="0" style="display:none;visibility:hidden"> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - 4.7.2 + HEAD diff --git a/query-builder/pom.xml b/query-builder/pom.xml index 58cb7a63118..7afe4210373 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.2 + 4.7.3-SNAPSHOT java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index 18c5692d094..aa2e62316d2 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.2 + 4.7.3-SNAPSHOT java-driver-test-infra bundle From 02bdecee8394e4e34a63cc9eef665735b6c54961 Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 22 Jun 2020 14:52:38 -0700 Subject: [PATCH 512/979] Fix version --- bom/pom.xml | 14 +++++++------- changelog/README.md | 3 +++ core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- osgi-tests/pom.xml | 2 +- pom.xml | 2 +- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 13 files changed, 21 insertions(+), 18 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index 489f588398a..aecef303882 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.3-SNAPSHOT + 4.8.0-SNAPSHOT java-driver-bom pom @@ -31,32 +31,32 @@ com.datastax.oss java-driver-core - 4.7.3-SNAPSHOT + 4.8.0-SNAPSHOT com.datastax.oss java-driver-core-shaded - 4.7.3-SNAPSHOT + 4.8.0-SNAPSHOT com.datastax.oss java-driver-mapper-processor - 4.7.3-SNAPSHOT + 4.8.0-SNAPSHOT com.datastax.oss java-driver-mapper-runtime - 4.7.3-SNAPSHOT + 4.8.0-SNAPSHOT com.datastax.oss java-driver-query-builder - 4.7.3-SNAPSHOT + 4.8.0-SNAPSHOT com.datastax.oss java-driver-test-infra - 4.7.3-SNAPSHOT + 4.8.0-SNAPSHOT com.datastax.oss diff --git a/changelog/README.md b/changelog/README.md index c4718dd751d..ab0b99e9d16 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -2,6 +2,9 @@ +### 4.8.0 (in progress) + + ### 4.7.2 - [bug] JAVA-2821: Can't connect to DataStax Astra using driver 4.7.x diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 40e3cdcd552..bd2b2375332 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.3-SNAPSHOT + 4.8.0-SNAPSHOT java-driver-core-shaded DataStax Java driver for Apache Cassandra(R) - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index 6fb9d1cd7e4..1769109065c 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.3-SNAPSHOT + 4.8.0-SNAPSHOT java-driver-core bundle diff --git a/distribution/pom.xml b/distribution/pom.xml index 3bfdb2950c5..e43a8ba247b 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.3-SNAPSHOT + 4.8.0-SNAPSHOT java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index 6ac5ef172a1..1c2d2ba7302 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.7.3-SNAPSHOT + 4.8.0-SNAPSHOT java-driver-examples DataStax Java driver for Apache Cassandra(R) - examples. diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 037f60c41d7..ad223906201 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.3-SNAPSHOT + 4.8.0-SNAPSHOT java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 0739eb52ccc..cb260f3f173 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.3-SNAPSHOT + 4.8.0-SNAPSHOT java-driver-mapper-processor DataStax Java driver for Apache Cassandra(R) - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index 92987ed7a6f..f23e0d86432 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.3-SNAPSHOT + 4.8.0-SNAPSHOT java-driver-mapper-runtime bundle diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml index fe2860303ff..140217e319f 100644 --- a/osgi-tests/pom.xml +++ b/osgi-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.3-SNAPSHOT + 4.8.0-SNAPSHOT java-driver-osgi-tests jar diff --git a/pom.xml b/pom.xml index 74c2ff433ad..3b8376e7e03 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ 4.0.0 com.datastax.oss java-driver-parent - 4.7.3-SNAPSHOT + 4.8.0-SNAPSHOT pom DataStax Java driver for Apache Cassandra(R) A driver for Apache Cassandra(R) 2.1+ that works exclusively with the Cassandra Query Language version 3 (CQL3) and Cassandra's native protocol versions 3 and above. diff --git a/query-builder/pom.xml b/query-builder/pom.xml index 7afe4210373..06e0344477f 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.3-SNAPSHOT + 4.8.0-SNAPSHOT java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index aa2e62316d2..98422301622 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.7.3-SNAPSHOT + 4.8.0-SNAPSHOT java-driver-test-infra bundle From 05befa5d709ca24596e4eee2769103e4fe5ac499 Mon Sep 17 00:00:00 2001 From: olim7t Date: Thu, 25 Jun 2020 15:42:56 -0700 Subject: [PATCH 513/979] Improve async paging examples --- manual/core/paging/README.md | 53 ++++++++++++++++++++++++++---------- 1 file changed, 39 insertions(+), 14 deletions(-) diff --git a/manual/core/paging/README.md b/manual/core/paging/README.md index d97e58200d4..dd953ab3fb5 100644 --- a/manual/core/paging/README.md +++ b/manual/core/paging/README.md @@ -86,27 +86,50 @@ for (Row row : rs) { In previous versions of the driver, the synchronous and asynchronous APIs returned the same `ResultSet` type. This made asynchronous paging very tricky, because it was very easy to accidentally trigger background synchronous queries (which would defeat the whole purpose of async, -or potentially introduce deadlocks). +and potentially introduce deadlocks). To avoid this problem, the driver's asynchronous API now returns a dedicated [AsyncResultSet]; -iteration only yields the current page, and the next page must be explicitly fetched. Here's the -idiomatic way to process a result set asynchronously: +iteration only yields the current page, and the next page must be fetched explicitly. To iterate a +result set in a fully asynchronous manner, you need to compose page futures using the methods of +[CompletionStage]. Here's an example that prints each row on the command line: ```java -CompletionStage futureRs = +CompletionStage resultSetFuture = session.executeAsync("SELECT * FROM myTable WHERE id = 1"); -futureRs.whenComplete(this::processRows); +// The returned stage will complete once all the rows have been printed: +CompletionStage printRowsFuture = resultSetFuture.thenCompose(this::printRows); -void processRows(AsyncResultSet rs, Throwable error) { - if (error != null) { - // The query failed, process the error +private CompletionStage printRows(AsyncResultSet resultSet) { + for (Row row : resultSet.currentPage()) { + System.out.println(row.getFormattedContents()); + } + if (resultSet.hasMorePages()) { + return resultSet.fetchNextPage().thenCompose(this::printRows); + } else { + return CompletableFuture.completedFuture(null); + } +} +``` + +If you need to propagate state throughout the iteration, add parameters to the callback. Here's an +example that counts the number of rows (obviously this is contrived, you would use `SELECT COUNT(*)` +instead of doing this client-side, but it illustrates the basic principle): + +```java +CompletionStage resultSetFuture = + session.executeAsync("SELECT * FROM myTable WHERE id = 1"); +CompletionStage countFuture = resultSetFuture.thenCompose(rs -> countRows(rs, 0)); + +private CompletionStage countRows(AsyncResultSet resultSet, int previousPagesCount) { + int count = previousPagesCount; + for (Row row : resultSet.currentPage()) { + count += 1; + } + if (resultSet.hasMorePages()) { + int finalCount = count; // need a final variable to use in the lambda below + return resultSet.fetchNextPage().thenCompose(rs -> countRows(rs, finalCount)); } else { - for (Row row : rs.currentPage()) { - // Process the row... - } - if (rs.hasMorePages()) { - rs.fetchNextPage().whenComplete(this::processRows); - } + return CompletableFuture.completedFuture(count); } } ``` @@ -211,4 +234,6 @@ and offset paging. [AsyncPagingIterable.fetchNextPage]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/AsyncPagingIterable.html#fetchNextPage-- [OffsetPager]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/paging/OffsetPager.html +[CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html + [driver examples]: https://github.com/datastax/java-driver/tree/4.x/examples/src/main/java/com/datastax/oss/driver/examples/paging From e1dcf9396cb12d6aef0e9b961043c6ca55b2c882 Mon Sep 17 00:00:00 2001 From: Tom van der Woerdt Date: Wed, 1 Jul 2020 19:00:35 +0200 Subject: [PATCH 514/979] JAVA-2837: make StringCodec strict about unicode in ascii Somewhere between 3.x and 4.x the StringCodec implementation started using String.getBytes(charset), which has the caveat of translating unmappable characters into a charset-dependent replacement character, which for ascii is '?'. In other words, if you were to put unicode data into an ascii field, it will just insert a lot of question marks. With this patch the driver will throw an InvalidArgumentException if that happens. --- changelog/README.md | 1 + .../internal/core/type/codec/StringCodec.java | 46 +++++++++++++++-- .../core/type/codec/AsciiCodecTest.java | 50 +++++++++++++++++++ ...tringCodecTest.java => TextCodecTest.java} | 6 +-- 4 files changed, 95 insertions(+), 8 deletions(-) create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/AsciiCodecTest.java rename core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/{StringCodecTest.java => TextCodecTest.java} (93%) diff --git a/changelog/README.md b/changelog/README.md index ab0b99e9d16..04995fdfa8b 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.8.0 (in progress) +- [bug] JAVA-2837: make StringCodec strict about unicode in ascii ### 4.7.2 diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/StringCodec.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/StringCodec.java index bffe3a10fd1..40374dcbf69 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/StringCodec.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/StringCodec.java @@ -20,22 +20,47 @@ import com.datastax.oss.driver.api.core.type.codec.TypeCodec; import com.datastax.oss.driver.api.core.type.reflect.GenericType; import com.datastax.oss.driver.internal.core.util.Strings; -import com.datastax.oss.protocol.internal.util.Bytes; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; +import io.netty.util.concurrent.FastThreadLocal; import java.nio.ByteBuffer; +import java.nio.CharBuffer; +import java.nio.charset.CharacterCodingException; import java.nio.charset.Charset; +import java.nio.charset.CharsetDecoder; +import java.nio.charset.CharsetEncoder; +import java.nio.charset.CodingErrorAction; import net.jcip.annotations.ThreadSafe; @ThreadSafe public class StringCodec implements TypeCodec { private final DataType cqlType; - private final Charset charset; + private final FastThreadLocal charsetEncoder; + private final FastThreadLocal charsetDecoder; public StringCodec(@NonNull DataType cqlType, @NonNull Charset charset) { this.cqlType = cqlType; - this.charset = charset; + charsetEncoder = + new FastThreadLocal() { + @Override + protected CharsetEncoder initialValue() throws Exception { + return charset + .newEncoder() + .onMalformedInput(CodingErrorAction.REPORT) + .onUnmappableCharacter(CodingErrorAction.REPORT); + } + }; + charsetDecoder = + new FastThreadLocal() { + @Override + protected CharsetDecoder initialValue() throws Exception { + return charset + .newDecoder() + .onMalformedInput(CodingErrorAction.REPORT) + .onUnmappableCharacter(CodingErrorAction.REPORT); + } + }; } @NonNull @@ -63,7 +88,14 @@ public boolean accepts(@NonNull Class javaClass) { @Nullable @Override public ByteBuffer encode(@Nullable String value, @NonNull ProtocolVersion protocolVersion) { - return (value == null) ? null : ByteBuffer.wrap(value.getBytes(charset)); + if (value == null) { + return null; + } + try { + return charsetEncoder.get().encode(CharBuffer.wrap(value)); + } catch (CharacterCodingException e) { + throw new IllegalArgumentException(e); + } } @Nullable @@ -74,7 +106,11 @@ public String decode(@Nullable ByteBuffer bytes, @NonNull ProtocolVersion protoc } else if (bytes.remaining() == 0) { return ""; } else { - return new String(Bytes.getArray(bytes), charset); + try { + return charsetDecoder.get().decode(bytes.duplicate()).toString(); + } catch (CharacterCodingException e) { + throw new IllegalArgumentException(e); + } } } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/AsciiCodecTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/AsciiCodecTest.java new file mode 100644 index 00000000000..fda9d72ea40 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/AsciiCodecTest.java @@ -0,0 +1,50 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.type.codec; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; +import org.junit.Test; + +public class AsciiCodecTest extends CodecTestBase { + public AsciiCodecTest() { + this.codec = TypeCodecs.ASCII; + } + + @Test + public void should_encode() { + assertThat(encode("hello")).isEqualTo("0x68656c6c6f"); + assertThat(encode(null)).isNull(); + } + + @Test(expected = IllegalArgumentException.class) + public void should_fail_to_encode_non_ascii() { + encode("hëllo"); + } + + @Test + public void should_decode() { + assertThat(decode("0x68656c6c6f")).isEqualTo("hello"); + assertThat(decode("0x")).isEmpty(); + assertThat(decode(null)).isNull(); + } + + @Test(expected = IllegalArgumentException.class) + public void should_fail_to_decode_non_ascii() { + decode("0x68c3ab6c6c6f"); + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/StringCodecTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/TextCodecTest.java similarity index 93% rename from core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/StringCodecTest.java rename to core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/TextCodecTest.java index 77f33c1ae93..f983534de2a 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/StringCodecTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/TextCodecTest.java @@ -21,10 +21,10 @@ import com.datastax.oss.driver.api.core.type.reflect.GenericType; import org.junit.Test; -public class StringCodecTest extends CodecTestBase { +public class TextCodecTest extends CodecTestBase { - public StringCodecTest() { - // We don't test ASCII, since it only differs by the encoding used + public TextCodecTest() { + // We will test edge cases of ASCII in AsciiCodecTest this.codec = TypeCodecs.TEXT; } From b5d2c1b30f8d928e9e430102fccd054d845ea8cf Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 6 Jul 2020 14:32:48 -0700 Subject: [PATCH 515/979] JAVA-2838: Avoid ConcurrentModificationException when closing connection --- changelog/README.md | 1 + .../core/channel/InFlightHandler.java | 12 ++++- .../core/channel/ProtocolInitHandlerTest.java | 48 +++++++++++++++++++ 3 files changed, 59 insertions(+), 2 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 04995fdfa8b..34c0bc4d1a9 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.8.0 (in progress) +- [bug] JAVA-2838: Avoid ConcurrentModificationException when closing connection - [bug] JAVA-2837: make StringCodec strict about unicode in ascii ### 4.7.2 diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/InFlightHandler.java b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/InFlightHandler.java index b3d1839fe38..f25b621fc66 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/InFlightHandler.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/InFlightHandler.java @@ -27,6 +27,7 @@ import com.datastax.oss.driver.internal.core.util.Loggers; import com.datastax.oss.driver.shaded.guava.common.collect.BiMap; import com.datastax.oss.driver.shaded.guava.common.collect.HashBiMap; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; import com.datastax.oss.protocol.internal.Frame; import com.datastax.oss.protocol.internal.Message; import com.datastax.oss.protocol.internal.request.Query; @@ -39,6 +40,7 @@ import io.netty.util.concurrent.Promise; import java.util.HashMap; import java.util.Map; +import java.util.Set; import net.jcip.annotations.NotThreadSafe; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -357,12 +359,18 @@ private void abortAllInFlight(DriverException cause) { */ private void abortAllInFlight(DriverException cause, ResponseCallback ignore) { if (!inFlight.isEmpty()) { - for (ResponseCallback responseCallback : inFlight.values()) { + + // Create a local copy and clear the map immediately. This prevents + // ConcurrentModificationException if aborting one of the handlers recurses back into this + // method. + Set responseCallbacks = ImmutableSet.copyOf(inFlight.values()); + inFlight.clear(); + + for (ResponseCallback responseCallback : responseCallbacks) { if (responseCallback != ignore) { fail(responseCallback, cause); } } - inFlight.clear(); // It's not necessary to release the stream ids, since we always call this method right before // closing the channel } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/channel/ProtocolInitHandlerTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/channel/ProtocolInitHandlerTest.java index 03974b3911a..48cabc285ec 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/channel/ProtocolInitHandlerTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/channel/ProtocolInitHandlerTest.java @@ -16,10 +16,16 @@ package com.datastax.oss.driver.internal.core.channel; import static com.datastax.oss.driver.Assertions.assertThat; +import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; +import ch.qos.logback.classic.Level; +import ch.qos.logback.classic.Logger; +import ch.qos.logback.classic.spi.ILoggingEvent; +import ch.qos.logback.core.Appender; import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.DefaultProtocolVersion; import com.datastax.oss.driver.api.core.InvalidKeyspaceException; @@ -50,8 +56,10 @@ import com.datastax.oss.protocol.internal.response.result.SetKeyspace; import com.datastax.oss.protocol.internal.util.Bytes; import io.netty.channel.ChannelFuture; +import java.io.IOException; import java.net.InetSocketAddress; import java.time.Duration; +import java.util.ConcurrentModificationException; import java.util.List; import java.util.Map; import java.util.Optional; @@ -60,6 +68,7 @@ import org.junit.Test; import org.mockito.Mock; import org.mockito.MockitoAnnotations; +import org.slf4j.LoggerFactory; public class ProtocolInitHandlerTest extends ChannelHandlerTestBase { @@ -71,6 +80,7 @@ public class ProtocolInitHandlerTest extends ChannelHandlerTestBase { @Mock private InternalDriverContext internalDriverContext; @Mock private DriverConfig driverConfig; @Mock private DriverExecutionProfile defaultProfile; + @Mock private Appender appender; private ProtocolVersionRegistry protocolVersionRegistry = new DefaultProtocolVersionRegistry("test"); @@ -598,4 +608,42 @@ public void should_fail_to_initialize_if_keyspace_is_invalid() { .isInstanceOf(InvalidKeyspaceException.class) .hasMessage("invalid keyspace")); } + + /** + * This covers a corner case where {@code abortAllInFlight} was recursing into itself, causing a + * {@link ConcurrentModificationException}. This was recoverable but caused Netty to generate a + * warning log. + * + * @see JAVA-2838 + */ + @Test + public void should_fail_pending_requests_only_once_if_init_fails() { + Logger logger = + (Logger) LoggerFactory.getLogger("io.netty.channel.AbstractChannelHandlerContext"); + Level levelBefore = logger.getLevel(); + logger.setLevel(Level.WARN); + logger.addAppender(appender); + + channel + .pipeline() + .addLast( + "init", + new ProtocolInitHandler( + internalDriverContext, + DefaultProtocolVersion.V4, + null, + END_POINT, + DriverChannelOptions.DEFAULT, + heartbeatHandler, + false)); + + ChannelFuture connectFuture = channel.connect(new InetSocketAddress("localhost", 9042)); + channel.pipeline().fireExceptionCaught(new IOException("Mock I/O exception")); + assertThat(connectFuture).isFailed(); + + verify(appender, never()).doAppend(any(ILoggingEvent.class)); + + logger.detachAppender(appender); + logger.setLevel(levelBefore); + } } From a90dca5e5414aac558aa899bd406df0151e194db Mon Sep 17 00:00:00 2001 From: olim7t Date: Wed, 8 Jul 2020 11:09:08 -0700 Subject: [PATCH 516/979] Add missing `@NonNull` annotation to UserDefinedType.firstIndexOf --- core/revapi.json | 14 ++++++++++++++ .../oss/driver/api/core/type/UserDefinedType.java | 4 ++-- .../metadata/schema/ShallowUserDefinedType.java | 4 ++-- 3 files changed, 18 insertions(+), 4 deletions(-) diff --git a/core/revapi.json b/core/revapi.json index 98951ab6cf1..478321cce8e 100644 --- a/core/revapi.json +++ b/core/revapi.json @@ -4991,6 +4991,20 @@ "old": "class org.apache.tinkerpop.gremlin.process.traversal.util.TraversalExplanation", "new": "class org.apache.tinkerpop.gremlin.process.traversal.util.TraversalExplanation", "justification": "Upgrade to Tinkerpop 3.4.4" + }, + { + "code": "java.annotation.added", + "old": "parameter int com.datastax.oss.driver.api.core.type.UserDefinedType::firstIndexOf(===com.datastax.oss.driver.api.core.CqlIdentifier===)", + "new": "parameter int com.datastax.oss.driver.api.core.type.UserDefinedType::firstIndexOf(===com.datastax.oss.driver.api.core.CqlIdentifier===)", + "annotation": "@edu.umd.cs.findbugs.annotations.NonNull", + "justification": "Add missing `@NonNull` annotation to UserDefinedType.firstIndexOf" + }, + { + "code": "java.annotation.added", + "old": "parameter int com.datastax.oss.driver.api.core.type.UserDefinedType::firstIndexOf(===java.lang.String===)", + "new": "parameter int com.datastax.oss.driver.api.core.type.UserDefinedType::firstIndexOf(===java.lang.String===)", + "annotation": "@edu.umd.cs.findbugs.annotations.NonNull", + "justification": "Add missing `@NonNull` annotation to UserDefinedType.firstIndexOf" } ] } diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/type/UserDefinedType.java b/core/src/main/java/com/datastax/oss/driver/api/core/type/UserDefinedType.java index b032151cc0e..26132bdeebc 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/type/UserDefinedType.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/type/UserDefinedType.java @@ -39,9 +39,9 @@ public interface UserDefinedType extends DataType, Describable { @NonNull List getFieldNames(); - int firstIndexOf(CqlIdentifier id); + int firstIndexOf(@NonNull CqlIdentifier id); - int firstIndexOf(String name); + int firstIndexOf(@NonNull String name); default boolean contains(@NonNull CqlIdentifier id) { return firstIndexOf(id) >= 0; diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/ShallowUserDefinedType.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/ShallowUserDefinedType.java index 44852689a68..a51df10e708 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/ShallowUserDefinedType.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/ShallowUserDefinedType.java @@ -85,13 +85,13 @@ public List getFieldNames() { } @Override - public int firstIndexOf(CqlIdentifier id) { + public int firstIndexOf(@NonNull CqlIdentifier id) { throw new UnsupportedOperationException( "This implementation should only be used internally, this is likely a driver bug"); } @Override - public int firstIndexOf(String name) { + public int firstIndexOf(@NonNull String name) { throw new UnsupportedOperationException( "This implementation should only be used internally, this is likely a driver bug"); } From 88617da2d866ac95428dad2d292af91f8d91f72b Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 13 Jul 2020 16:35:25 +0200 Subject: [PATCH 517/979] Fix documentation of filter.class, mention two-arg constructor --- core/src/main/resources/reference.conf | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/core/src/main/resources/reference.conf b/core/src/main/resources/reference.conf index efb86fe983a..7847025a499 100644 --- a/core/src/main/resources/reference.conf +++ b/core/src/main/resources/reference.conf @@ -179,8 +179,8 @@ datastax-java-driver { # A custom filter to include/exclude nodes. # # This option is not required; if present, it must be the fully-qualified name of a class that - # implements `java.util.function.Predicate`, and has a public constructor taking a single - # `DriverContext` argument. + # implements `java.util.function.Predicate`, and has a public constructor taking two + # arguments: the DriverContext and a String representing the profile name. # # Alternatively, you can pass an instance of your filter to # CqlSession.builder().withNodeFilter(). In that case, this option will be ignored. From b2b9bee9152fecfdbefda654ccf578325e2a36d8 Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 13 Jul 2020 10:05:33 -0700 Subject: [PATCH 518/979] JAVA-2835: Correctly handle unresolved addresses in DefaultEndPoint.equals Motivation: In InitialNodeListRefresh, the driver tries to match contact points Nodes with system.peers rows, in order to refresh their data. Because the Nodes's hostIds are not known yet, we match by EndPoint (which for the Nodes are the addresses passed by the user, and for the peers rows are the values in rpc_address). The default TopologyMonitor parses rpc_address into *resolved* InetSocketAddress instances. If one of the contact points was passed as an *unresolved* address, the comparison fails. The contact point is considered removed, and the row gets added as a new node. In addition, this will issue the DC mismatch warning ("some contact points are from a different DC"), because the contact point's datacenter was never filled. Modifications: In DefaultEndPoint.equals, if one of the addresses is unresolved but the other is, resolve the former. Result: The comparison in InitialNodeListRefresh succeeds. The contact point Node matches its system.peers row, and gets refreshed properly. Note that: * if both addresses are unresolved, we don't need to resolve. This can happen if the user has a custom AddressTranslator (or a custom TopologyMonitor) that parses system.peers addresses as unresolved addresses. * this EndPoint comparison only happens during the initial refresh. After that, we know the Nodes' hostIds, so we use that instead (see FullNodeListRefresh). --- changelog/README.md | 1 + .../internal/core/metadata/DefaultEndPoint.java | 12 ++++++++++-- 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 34c0bc4d1a9..441119a6fff 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.8.0 (in progress) +- [bug] JAVA-2835: Correctly handle unresolved addresses in DefaultEndPoint.equals - [bug] JAVA-2838: Avoid ConcurrentModificationException when closing connection - [bug] JAVA-2837: make StringCodec strict about unicode in ascii diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultEndPoint.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultEndPoint.java index d2c3d3f44d0..0f657b9bfcf 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultEndPoint.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultEndPoint.java @@ -44,8 +44,16 @@ public boolean equals(Object other) { if (other == this) { return true; } else if (other instanceof DefaultEndPoint) { - DefaultEndPoint that = (DefaultEndPoint) other; - return this.address.equals(that.address); + InetSocketAddress thisAddress = this.address; + InetSocketAddress thatAddress = ((DefaultEndPoint) other).address; + // If only one of the addresses is unresolved, resolve the other. Otherwise (both resolved or + // both unresolved), compare as-is. + if (thisAddress.isUnresolved() && !thatAddress.isUnresolved()) { + thisAddress = new InetSocketAddress(thisAddress.getHostName(), thisAddress.getPort()); + } else if (thatAddress.isUnresolved() && !thisAddress.isUnresolved()) { + thatAddress = new InetSocketAddress(thatAddress.getHostName(), thatAddress.getPort()); + } + return thisAddress.equals(thatAddress); } else { return false; } From 66b6dcd418cb67480fd527874c7e084f2b120801 Mon Sep 17 00:00:00 2001 From: olim7t Date: Thu, 25 Jun 2020 19:00:11 -0700 Subject: [PATCH 519/979] JAVA-2828: Add safe paging state wrapper --- changelog/README.md | 1 + .../driver/api/core/cql/ExecutionInfo.java | 33 ++- .../oss/driver/api/core/cql/PagingState.java | 80 +++++++ .../oss/driver/api/core/cql/Statement.java | 44 ++++ .../driver/internal/core/cql/Conversions.java | 31 +-- .../core/cql/DefaultExecutionInfo.java | 15 ++ .../internal/core/cql/DefaultPagingState.java | 160 ++++++++++++++ .../internal/core/data/ValuesHelper.java | 17 ++ .../oss/driver/internal/core/util/Sizes.java | 8 +- .../oss/driver/core/cql/PagingStateIT.java | 196 ++++++++++++++++++ manual/core/paging/README.md | 26 +++ 11 files changed, 583 insertions(+), 28 deletions(-) create mode 100644 core/src/main/java/com/datastax/oss/driver/api/core/cql/PagingState.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultPagingState.java create mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PagingStateIT.java diff --git a/changelog/README.md b/changelog/README.md index 441119a6fff..459d65f85bd 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.8.0 (in progress) +- [improvement] JAVA-2828: Add safe paging state wrapper - [bug] JAVA-2835: Correctly handle unresolved addresses in DefaultEndPoint.equals - [bug] JAVA-2838: Avoid ConcurrentModificationException when closing connection - [bug] JAVA-2837: make StringCodec strict about unicode in ascii diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/cql/ExecutionInfo.java b/core/src/main/java/com/datastax/oss/driver/api/core/cql/ExecutionInfo.java index 5ba12decbdf..239851f2731 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/cql/ExecutionInfo.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/cql/ExecutionInfo.java @@ -19,12 +19,14 @@ import com.datastax.oss.driver.api.core.DriverException; import com.datastax.oss.driver.api.core.RequestThrottlingException; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.detach.AttachmentPoint; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.retry.RetryDecision; import com.datastax.oss.driver.api.core.servererrors.CoordinatorException; import com.datastax.oss.driver.api.core.session.Request; import com.datastax.oss.driver.api.core.session.Session; import com.datastax.oss.driver.api.core.specex.SpeculativeExecutionPolicy; +import com.datastax.oss.driver.internal.core.cql.DefaultPagingState; import com.datastax.oss.driver.internal.core.util.concurrent.BlockingOperation; import com.datastax.oss.driver.internal.core.util.concurrent.CompletableFutures; import edu.umd.cs.findbugs.annotations.NonNull; @@ -110,16 +112,45 @@ default Request getRequest() { List> getErrors(); /** - * The paging state of the query. + * The paging state of the query, in its raw form. * *

        This represents the next page to be fetched if this query has multiple page of results. It * can be saved and reused later on the same statement. * + *

        Note that this is the equivalent of driver 3's {@code getPagingStateUnsafe()}. If you're + * looking for the method that returns a {@link PagingState}, use {@link #getSafePagingState()}. + * * @return the paging state, or {@code null} if there is no next page. */ @Nullable ByteBuffer getPagingState(); + /** + * The paging state of the query, in a safe wrapper that checks if it's reused on the right + * statement. + * + *

        This represents the next page to be fetched if this query has multiple page of results. It + * can be saved and reused later on the same statement. + * + * @return the paging state, or {@code null} if there is no next page. + */ + @Nullable + default PagingState getSafePagingState() { + // Default implementation for backward compatibility, but we override it in the concrete class, + // because it knows the attachment point. + ByteBuffer rawPagingState = getPagingState(); + if (rawPagingState == null) { + return null; + } else { + Request request = getRequest(); + if (!(request instanceof Statement)) { + throw new IllegalStateException("Only statements should have a paging state"); + } + Statement statement = (Statement) request; + return new DefaultPagingState(rawPagingState, statement, AttachmentPoint.NONE); + } + } + /** * The server-side warnings for this query, if any (otherwise the list will be empty). * diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/cql/PagingState.java b/core/src/main/java/com/datastax/oss/driver/api/core/cql/PagingState.java new file mode 100644 index 00000000000..f490b23ddbe --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/api/core/cql/PagingState.java @@ -0,0 +1,80 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.core.cql; + +import com.datastax.oss.driver.api.core.session.Session; +import com.datastax.oss.driver.internal.core.cql.DefaultPagingState; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.nio.ByteBuffer; + +/** + * A safe wrapper around the paging state of a query. + * + *

        This class performs additional checks to fail fast if the paging state is not reused on the + * same query, and it provides utility methods for conversion to/from strings and byte arrays. + * + *

        The serialized form returned by {@link #toBytes()} and {@link #toString()} is an opaque + * sequence of bytes. Note however that it is not cryptographically secure: the contents are + * not encrypted and the checks are performed with a simple MD5 checksum. If you need stronger + * guarantees, you should build your own wrapper around {@link ExecutionInfo#getPagingState()}. + */ +public interface PagingState { + + /** Parses an instance from a string previously generated with {@code toString()}. */ + @NonNull + static PagingState fromString(@NonNull String string) { + return DefaultPagingState.fromString(string); + } + + /** Parses an instance from a byte array previously generated with {@link #toBytes()}. */ + @NonNull + static PagingState fromBytes(byte[] bytes) { + return DefaultPagingState.fromBytes(bytes); + } + + /** Returns a representation of this object as a byte array. */ + byte[] toBytes(); + + /** + * Checks if this paging state can be safely reused for the given statement. Specifically, the + * query string and any bound values must match. + * + *

        Note that, if {@code statement} is a {@link SimpleStatement} with bound values, those values + * must be encoded in order to perform the check. This method uses the default codec registry and + * default protocol version. This might fail if you use custom codecs; in that case, use {@link + * #matches(Statement, Session)} instead. + * + *

        If {@code statement} is a {@link BoundStatement}, it is always safe to call this method. + */ + default boolean matches(@NonNull Statement statement) { + return matches(statement, null); + } + + /** + * Alternative to {@link #matches(Statement)} that specifies the session the statement will be + * executed with. You only need this for simple statements, and if you use custom codecs. + * Bound statements already know which session they are attached to. + */ + boolean matches(@NonNull Statement statement, @Nullable Session session); + + /** + * Returns the underlying "unsafe" paging state (the equivalent of {@link + * ExecutionInfo#getPagingState()}). + */ + @NonNull + ByteBuffer getRawPagingState(); +} diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/cql/Statement.java b/core/src/main/java/com/datastax/oss/driver/api/core/cql/Statement.java index f890870327c..246260d5b35 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/cql/Statement.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/cql/Statement.java @@ -302,6 +302,50 @@ default SelfT setRoutingKey(@NonNull ByteBuffer... newRoutingKeyComponents) { @CheckReturnValue SelfT setPagingState(@Nullable ByteBuffer newPagingState); + /** + * Sets the paging state to send with the statement, or {@code null} if this statement has no + * paging state. + * + *

        This variant uses the "safe" paging state wrapper, it will throw immediately if the + * statement doesn't match the one that the state was initially extracted from (same query string, + * same parameters). The advantage is that it fails fast, instead of waiting for an error response + * from the server. + * + *

        Note that, if this statement is a {@link SimpleStatement} with bound values, those values + * must be encoded in order to perform the check. This method uses the default codec registry and + * default protocol version. This might fail if you use custom codecs; in that case, use {@link + * #setPagingState(PagingState, Session)} instead. + * + * @throws IllegalArgumentException if the given state does not match this statement. + * @see #setPagingState(ByteBuffer) + * @see ExecutionInfo#getSafePagingState() + */ + @NonNull + @CheckReturnValue + default SelfT setPagingState(@Nullable PagingState newPagingState) { + return setPagingState(newPagingState, null); + } + + /** + * Alternative to {@link #setPagingState(PagingState)} that specifies the session the statement + * will be executed with. You only need this for simple statements, and if you use custom + * codecs. Bound statements already know which session they are attached to. + */ + @NonNull + @CheckReturnValue + default SelfT setPagingState(@Nullable PagingState newPagingState, @Nullable Session session) { + if (newPagingState == null) { + return setPagingState((ByteBuffer) null); + } else if (newPagingState.matches(this, session)) { + return setPagingState(newPagingState.getRawPagingState()); + } else { + throw new IllegalArgumentException( + "Paging state mismatch, " + + "this means that either the paging state contents were altered, " + + "or you're trying to apply it to a different statement"); + } + } + /** * Returns the page size to use for the statement. * diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/Conversions.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/Conversions.java index 81b7dd0f0c4..2dc6e5ddc46 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/Conversions.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/Conversions.java @@ -37,7 +37,6 @@ import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.metadata.schema.ColumnMetadata; import com.datastax.oss.driver.api.core.metadata.schema.RelationMetadata; -import com.datastax.oss.driver.api.core.metadata.token.Token; import com.datastax.oss.driver.api.core.servererrors.AlreadyExistsException; import com.datastax.oss.driver.api.core.servererrors.BootstrappingException; import com.datastax.oss.driver.api.core.servererrors.CoordinatorException; @@ -56,15 +55,12 @@ import com.datastax.oss.driver.api.core.servererrors.WriteFailureException; import com.datastax.oss.driver.api.core.servererrors.WriteTimeoutException; import com.datastax.oss.driver.api.core.session.Request; -import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; import com.datastax.oss.driver.api.core.type.codec.registry.CodecRegistry; import com.datastax.oss.driver.internal.core.ConsistencyLevelRegistry; import com.datastax.oss.driver.internal.core.DefaultProtocolFeature; import com.datastax.oss.driver.internal.core.ProtocolVersionRegistry; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; -import com.datastax.oss.driver.internal.core.metadata.token.ByteOrderedToken; -import com.datastax.oss.driver.internal.core.metadata.token.Murmur3Token; -import com.datastax.oss.driver.internal.core.metadata.token.RandomToken; +import com.datastax.oss.driver.internal.core.data.ValuesHelper; import com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import com.datastax.oss.driver.shaded.guava.common.primitives.Ints; @@ -260,7 +256,10 @@ public static List encode( ByteBuffer[] encodedValues = new ByteBuffer[values.size()]; int i = 0; for (Object value : values) { - encodedValues[i++] = (value == null) ? null : encode(value, codecRegistry, protocolVersion); + encodedValues[i++] = + (value == null) + ? null + : ValuesHelper.encodeToDefaultCqlMapping(value, codecRegistry, protocolVersion); } return NullAllowingImmutableList.of(encodedValues); } @@ -281,30 +280,14 @@ public static Map encode( } else { encodedValues.put( entry.getKey().asInternal(), - encode(entry.getValue(), codecRegistry, protocolVersion)); + ValuesHelper.encodeToDefaultCqlMapping( + entry.getValue(), codecRegistry, protocolVersion)); } } return encodedValues.build(); } } - public static ByteBuffer encode( - Object value, CodecRegistry codecRegistry, ProtocolVersion protocolVersion) { - if (value instanceof Token) { - if (value instanceof Murmur3Token) { - return TypeCodecs.BIGINT.encode(((Murmur3Token) value).getValue(), protocolVersion); - } else if (value instanceof ByteOrderedToken) { - return TypeCodecs.BLOB.encode(((ByteOrderedToken) value).getValue(), protocolVersion); - } else if (value instanceof RandomToken) { - return TypeCodecs.VARINT.encode(((RandomToken) value).getValue(), protocolVersion); - } else { - throw new IllegalArgumentException("Unsupported token type " + value.getClass()); - } - } else { - return codecRegistry.codecFor(value).encode(value, protocolVersion); - } - } - public static void ensureAllSet(BoundStatement boundStatement) { for (int i = 0; i < boundStatement.size(); i++) { if (!boundStatement.isSet(i)) { diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultExecutionInfo.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultExecutionInfo.java index 0d3bd973be3..08ac81154ff 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultExecutionInfo.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultExecutionInfo.java @@ -17,6 +17,7 @@ import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.cql.ExecutionInfo; +import com.datastax.oss.driver.api.core.cql.PagingState; import com.datastax.oss.driver.api.core.cql.QueryTrace; import com.datastax.oss.driver.api.core.cql.Statement; import com.datastax.oss.driver.api.core.metadata.Node; @@ -129,6 +130,20 @@ public ByteBuffer getPagingState() { return pagingState; } + @Nullable + @Override + public PagingState getSafePagingState() { + if (pagingState == null) { + return null; + } else { + if (!(request instanceof Statement)) { + throw new IllegalStateException("Only statements should have a paging state"); + } + Statement statement = (Statement) request; + return new DefaultPagingState(pagingState, statement, session.getContext()); + } + } + @NonNull @Override public List getWarnings() { diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultPagingState.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultPagingState.java new file mode 100644 index 00000000000..f1c9a3fb9b3 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultPagingState.java @@ -0,0 +1,160 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.cql; + +import com.datastax.oss.driver.api.core.cql.BatchStatement; +import com.datastax.oss.driver.api.core.cql.BoundStatement; +import com.datastax.oss.driver.api.core.cql.PagingState; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.core.cql.Statement; +import com.datastax.oss.driver.api.core.detach.AttachmentPoint; +import com.datastax.oss.driver.api.core.session.Session; +import com.datastax.oss.driver.internal.core.data.ValuesHelper; +import com.datastax.oss.protocol.internal.util.Bytes; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.nio.ByteBuffer; +import java.nio.charset.Charset; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.util.Arrays; + +public class DefaultPagingState implements PagingState { + + private final ByteBuffer rawPagingState; + private final byte[] hash; + private final int protocolVersion; + + public DefaultPagingState( + ByteBuffer rawPagingState, Statement statement, AttachmentPoint attachmentPoint) { + this( + rawPagingState, + hash(statement, rawPagingState, attachmentPoint), + attachmentPoint.getProtocolVersion().getCode()); + } + + private DefaultPagingState(ByteBuffer rawPagingState, byte[] hash, int protocolVersion) { + this.rawPagingState = rawPagingState; + this.hash = hash; + this.protocolVersion = protocolVersion; + } + + // Same serialized form as in driver 3: + // size of raw state|size of hash|raw state|hash|protocol version + // + // The protocol version might be absent, in which case it defaults to V2 (this is for backward + // compatibility with 2.0.10 where it is always absent). + public static DefaultPagingState fromBytes(byte[] bytes) { + ByteBuffer buffer = ByteBuffer.wrap(bytes); + short rawPagingStateLength = buffer.getShort(); + short hashLength = buffer.getShort(); + int length = rawPagingStateLength + hashLength + 2; + int legacyLength = rawPagingStateLength + hashLength; // without protocol version + if (buffer.remaining() != length && buffer.remaining() != legacyLength) { + throw new IllegalArgumentException( + "Cannot deserialize paging state, invalid format. The serialized form was corrupted, " + + "or not initially generated from a PagingState object."); + } + byte[] rawPagingState = new byte[rawPagingStateLength]; + buffer.get(rawPagingState); + byte[] hash = new byte[hashLength]; + buffer.get(hash); + int protocolVersion = buffer.hasRemaining() ? buffer.getShort() : 2; + return new DefaultPagingState(ByteBuffer.wrap(rawPagingState), hash, protocolVersion); + } + + @Override + public byte[] toBytes() { + ByteBuffer buffer = ByteBuffer.allocate(rawPagingState.remaining() + hash.length + 6); + buffer.putShort((short) rawPagingState.remaining()); + buffer.putShort((short) hash.length); + buffer.put(rawPagingState.duplicate()); + buffer.put(hash); + buffer.putShort((short) protocolVersion); + buffer.rewind(); + return buffer.array(); + } + + public static DefaultPagingState fromString(String string) { + byte[] bytes = Bytes.getArray(Bytes.fromHexString("0x" + string)); + return fromBytes(bytes); + } + + @Override + public String toString() { + return Bytes.toHexString(toBytes()).substring(2); // remove "0x" prefix + } + + @Override + public boolean matches(@NonNull Statement statement, @Nullable Session session) { + AttachmentPoint attachmentPoint = + (session == null) ? AttachmentPoint.NONE : session.getContext(); + byte[] actual = hash(statement, rawPagingState, attachmentPoint); + return Arrays.equals(actual, hash); + } + + @NonNull + @Override + public ByteBuffer getRawPagingState() { + return rawPagingState; + } + + // Hashes a statement's query string and parameters. We also include the paging state itself in + // the hash computation, to make the serialized form a bit more resistant to manual tampering. + private static byte[] hash( + @NonNull Statement statement, + ByteBuffer rawPagingState, + @NonNull AttachmentPoint attachmentPoint) { + // Batch statements don't have paging, the driver should never call this method for one + assert !(statement instanceof BatchStatement); + + MessageDigest messageDigest; + try { + messageDigest = MessageDigest.getInstance("MD5"); + } catch (NoSuchAlgorithmException e) { + throw new IllegalStateException( + "It looks like this JVM doesn't support MD5 digests, " + + "can't use the rich paging state feature", + e); + } + if (statement instanceof BoundStatement) { + BoundStatement boundStatement = (BoundStatement) statement; + String queryString = boundStatement.getPreparedStatement().getQuery(); + messageDigest.update(queryString.getBytes(Charset.defaultCharset())); + for (ByteBuffer value : boundStatement.getValues()) { + messageDigest.update(value.duplicate()); + } + } else { + SimpleStatement simpleStatement = (SimpleStatement) statement; + String queryString = simpleStatement.getQuery(); + messageDigest.update(queryString.getBytes(Charset.defaultCharset())); + for (Object value : simpleStatement.getPositionalValues()) { + ByteBuffer encodedValue = + ValuesHelper.encodeToDefaultCqlMapping( + value, attachmentPoint.getCodecRegistry(), attachmentPoint.getProtocolVersion()); + messageDigest.update(encodedValue); + } + for (Object value : simpleStatement.getNamedValues().values()) { + ByteBuffer encodedValue = + ValuesHelper.encodeToDefaultCqlMapping( + value, attachmentPoint.getCodecRegistry(), attachmentPoint.getProtocolVersion()); + messageDigest.update(encodedValue); + } + } + messageDigest.update(rawPagingState.duplicate()); + return messageDigest.digest(); + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/data/ValuesHelper.java b/core/src/main/java/com/datastax/oss/driver/internal/core/data/ValuesHelper.java index e33068621d0..f4556559c7b 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/data/ValuesHelper.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/data/ValuesHelper.java @@ -119,4 +119,21 @@ public static ByteBuffer[] encodePreparedValues( } return encodedValues; } + + public static ByteBuffer encodeToDefaultCqlMapping( + Object value, CodecRegistry codecRegistry, ProtocolVersion protocolVersion) { + if (value instanceof Token) { + if (value instanceof Murmur3Token) { + return TypeCodecs.BIGINT.encode(((Murmur3Token) value).getValue(), protocolVersion); + } else if (value instanceof ByteOrderedToken) { + return TypeCodecs.BLOB.encode(((ByteOrderedToken) value).getValue(), protocolVersion); + } else if (value instanceof RandomToken) { + return TypeCodecs.VARINT.encode(((RandomToken) value).getValue(), protocolVersion); + } else { + throw new IllegalArgumentException("Unsupported token type " + value.getClass()); + } + } else { + return codecRegistry.codecFor(value).encode(value, protocolVersion); + } + } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/util/Sizes.java b/core/src/main/java/com/datastax/oss/driver/internal/core/util/Sizes.java index eb7ebedc6be..fc0ebefa219 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/util/Sizes.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/util/Sizes.java @@ -24,7 +24,7 @@ import com.datastax.oss.driver.api.core.cql.Statement; import com.datastax.oss.driver.api.core.session.Request; import com.datastax.oss.driver.api.core.type.codec.registry.CodecRegistry; -import com.datastax.oss.driver.internal.core.cql.Conversions; +import com.datastax.oss.driver.internal.core.data.ValuesHelper; import com.datastax.oss.protocol.internal.FrameCodec; import com.datastax.oss.protocol.internal.PrimitiveSizes; import com.datastax.oss.protocol.internal.request.query.QueryOptions; @@ -83,7 +83,8 @@ public static int sizeOfSimpleStatementValues( List positionalValues = new ArrayList<>(simpleStatement.getPositionalValues().size()); for (Object value : simpleStatement.getPositionalValues()) { - positionalValues.add(Conversions.encode(value, codecRegistry, protocolVersion)); + positionalValues.add( + ValuesHelper.encodeToDefaultCqlMapping(value, codecRegistry, protocolVersion)); } size += Values.sizeOfPositionalValues(positionalValues); @@ -94,7 +95,8 @@ public static int sizeOfSimpleStatementValues( for (Map.Entry value : simpleStatement.getNamedValues().entrySet()) { namedValues.put( value.getKey().asInternal(), - Conversions.encode(value.getValue(), codecRegistry, protocolVersion)); + ValuesHelper.encodeToDefaultCqlMapping( + value.getValue(), codecRegistry, protocolVersion)); } size += Values.sizeOfNamedValues(namedValues); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PagingStateIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PagingStateIT.java new file mode 100644 index 00000000000..ca0b8c34bc0 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PagingStateIT.java @@ -0,0 +1,196 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.core.cql; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.fail; + +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.cql.BoundStatement; +import com.datastax.oss.driver.api.core.cql.PagingState; +import com.datastax.oss.driver.api.core.cql.ResultSet; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.core.type.codec.CodecNotFoundException; +import com.datastax.oss.driver.api.core.type.codec.MappingCodec; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import com.datastax.oss.driver.api.testinfra.session.SessionUtils; +import com.datastax.oss.driver.categories.ParallelizableTests; +import com.datastax.oss.driver.internal.core.type.codec.IntCodec; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.function.UnaryOperator; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Rule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.rules.ExpectedException; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +@Category(ParallelizableTests.class) +public class PagingStateIT { + + private static final CcmRule CCM_RULE = CcmRule.getInstance(); + + private static final SessionRule SESSION_RULE = SessionRule.builder(CCM_RULE).build(); + + @ClassRule public static TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); + + @Rule public ExpectedException thrown = ExpectedException.none(); + + @Before + public void setupSchema() { + CqlSession session = SESSION_RULE.session(); + session.execute( + SimpleStatement.builder( + "CREATE TABLE IF NOT EXISTS foo (k int, cc int, v int, PRIMARY KEY(k, cc))") + .setExecutionProfile(SESSION_RULE.slowProfile()) + .build()); + for (int i = 0; i < 20; i++) { + session.execute( + SimpleStatement.newInstance("INSERT INTO foo (k, cc, v) VALUES (1, ?, ?)", i, i)); + } + } + + @Test + public void should_extract_and_reuse() { + should_extract_and_reuse(UnaryOperator.identity()); + } + + @Test + public void should_convert_to_bytes() { + should_extract_and_reuse(pagingState -> PagingState.fromBytes(pagingState.toBytes())); + } + + @Test + public void should_convert_to_string() { + should_extract_and_reuse(pagingState -> PagingState.fromString(pagingState.toString())); + } + + private void should_extract_and_reuse(UnaryOperator transformation) { + CqlSession session = SESSION_RULE.session(); + + BoundStatement boundStatement = + session + .prepare(SimpleStatement.newInstance("SELECT * FROM foo WHERE k = ?").setPageSize(15)) + .bind(1); + + ResultSet resultSet = session.execute(boundStatement); + assertThat(resultSet.getAvailableWithoutFetching()).isEqualTo(15); + assertThat(resultSet.isFullyFetched()).isFalse(); + + PagingState pagingState = + transformation.apply(resultSet.getExecutionInfo().getSafePagingState()); + + assertThat(pagingState.matches(boundStatement)).isTrue(); + resultSet = session.execute(boundStatement.setPagingState(pagingState)); + assertThat(resultSet.getAvailableWithoutFetching()).isEqualTo(5); + assertThat(resultSet.isFullyFetched()).isTrue(); + } + + @Test + public void should_inject_in_simple_statement_with_custom_codecs() { + try (CqlSession session = + (CqlSession) + SessionUtils.baseBuilder() + .addTypeCodecs(new IntWrapperCodec()) + .addContactEndPoints(CCM_RULE.getContactPoints()) + .withKeyspace(SESSION_RULE.keyspace()) + .build()) { + + SimpleStatement statement = + SimpleStatement.newInstance("SELECT * FROM foo WHERE k = ?", new IntWrapper(1)) + .setPageSize(15); + + ResultSet resultSet = session.execute(statement); + assertThat(resultSet.getAvailableWithoutFetching()).isEqualTo(15); + assertThat(resultSet.isFullyFetched()).isFalse(); + + PagingState pagingState = resultSet.getExecutionInfo().getSafePagingState(); + + // This is the case where we need the session: simple statements are not attached, so + // setPagingState() cannot find the custom codec. + try { + @SuppressWarnings("unused") + SimpleStatement ignored = statement.setPagingState(pagingState); + fail("Expected a CodecNotFoundException"); + } catch (CodecNotFoundException e) { + // expected + } + + resultSet = session.execute(statement.setPagingState(pagingState, session)); + assertThat(resultSet.getAvailableWithoutFetching()).isEqualTo(5); + assertThat(resultSet.isFullyFetched()).isTrue(); + } + } + + @Test + public void should_fail_if_query_does_not_match() { + should_fail("SELECT * FROM foo WHERE k = ?", 1, "SELECT v FROM FOO WHERE k = ?", 1); + } + + @Test + public void should_fail_if_values_do_not_match() { + should_fail("SELECT * FROM foo WHERE k = ?", 1, "SELECT * FROM foo WHERE k = ?", 2); + } + + private void should_fail(String query1, int value1, String query2, int value2) { + CqlSession session = SESSION_RULE.session(); + + BoundStatement boundStatement1 = + session.prepare(SimpleStatement.newInstance(query1).setPageSize(15)).bind(value1); + + ResultSet resultSet = session.execute(boundStatement1); + PagingState pagingState = resultSet.getExecutionInfo().getSafePagingState(); + + thrown.expect(IllegalArgumentException.class); + @SuppressWarnings("unused") + BoundStatement ignored = + session + .prepare(SimpleStatement.newInstance(query2).setPageSize(15)) + .bind(value2) + .setPagingState(pagingState); + } + + static class IntWrapper { + final int value; + + public IntWrapper(int value) { + this.value = value; + } + } + + static class IntWrapperCodec extends MappingCodec { + + protected IntWrapperCodec() { + super(new IntCodec(), GenericType.of(IntWrapper.class)); + } + + @Nullable + @Override + protected IntWrapper innerToOuter(@Nullable Integer value) { + return value == null ? null : new IntWrapper(value); + } + + @Nullable + @Override + protected Integer outerToInner(@Nullable IntWrapper wrapper) { + return wrapper == null ? null : wrapper.value; + } + } +} diff --git a/manual/core/paging/README.md b/manual/core/paging/README.md index dd953ab3fb5..b21794f7f15 100644 --- a/manual/core/paging/README.md +++ b/manual/core/paging/README.md @@ -168,6 +168,31 @@ The paging state can only be reused with the exact same statement (same query st parameters). It is an opaque value that is only meant to be collected, stored and re-used. If you try to modify its contents or reuse it with a different statement, the results are unpredictable. +If you want additional safety, the driver also provides a "safe" wrapper around the raw value: +[PagingState]. + +```java +PagingState pagingState = rs.getExecutionInfo().getSafePagingState(); +``` + +It works in the exact same manner, except that it will throw an `IllegalStateException` if you try +to reinject it in the wrong statement. This allows you to detect the error early, without a +roundtrip to the server. + +Note that, if you use a simple statement and one of the bound values requires a [custom +codec](../custom_codecs), you have to provide a reference to the session when reinjecting the paging +state: + +```java +CustomType value = ... +SimpleStatement statement = SimpleStatement.newInstance("query", value); +// session required here, otherwise you will get a CodecNotFoundException: +statement = statement.setPagingState(pagingState, session); +``` + +This is a small corner case because checking the state requires encoding the values, and a simple +statement doesn't have a reference to the codec registry. If you don't use custom codecs, or if the +statement is a bound statement, you can use the regular `setPagingState(pagingState)`. ### Offset queries @@ -233,6 +258,7 @@ and offset paging. [AsyncPagingIterable.hasMorePages]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/AsyncPagingIterable.html#hasMorePages-- [AsyncPagingIterable.fetchNextPage]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/AsyncPagingIterable.html#fetchNextPage-- [OffsetPager]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/paging/OffsetPager.html +[PagingState]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/PagingState.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html From 7754f5ad2abadd44023ea4c1329b9a3a93dd71bc Mon Sep 17 00:00:00 2001 From: olim7t Date: Wed, 15 Jul 2020 15:42:47 -0700 Subject: [PATCH 520/979] JAVA-2844: Mention ProgrammaticSslEngineFactory in the manual --- manual/core/ssl/README.md | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/manual/core/ssl/README.md b/manual/core/ssl/README.md index a5c26249af6..6bb16a127ca 100644 --- a/manual/core/ssl/README.md +++ b/manual/core/ssl/README.md @@ -169,7 +169,23 @@ CqlSession session = CqlSession.builder() .build(); ``` -There is also a convenience shortcut if you just want to use an existing `javax.net.ssl.SSLContext`: +If you are reusing code that configures SSL programmatically, you can use +[ProgrammaticSslEngineFactory] as an easy way to wrap that into a factory instance: + +```java +SSLContext sslContext = ... +String[] cipherSuites = ... +boolean requireHostNameValidation = ... +CqlSession session = + CqlSession.builder() + .withSslEngineFactory( + new ProgrammaticSslEngineFactory( + sslContext, cipherSuites, requireHostNameValidation)) + .build(); +``` + +Finally, there is a convenient shortcut on the session builder if you just need to pass an +`SSLContext`: ```java SSLContext sslContext = ... @@ -190,3 +206,4 @@ the box, but with a bit of custom development it is fairly easy to add. See [JSSE system properties]: http://docs.oracle.com/javase/6/docs/technotes/guides/security/jsse/JSSERefGuide.html#Customization [SessionBuilder.withSslEngineFactory]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSslEngineFactory-com.datastax.oss.driver.api.core.ssl.SslEngineFactory- [SessionBuilder.withSslContext]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSslContext-javax.net.ssl.SSLContext- +[ProgrammaticSslEngineFactory]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/ssl/ProgrammaticSslEngineFactory.html From cd463d838bf0e5c7fbdfbd363a0b6c243b1be07a Mon Sep 17 00:00:00 2001 From: olim7t Date: Wed, 15 Jul 2020 16:01:24 -0700 Subject: [PATCH 521/979] Fix javadoc error --- .../java/com/datastax/oss/driver/api/core/cql/PagingState.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/cql/PagingState.java b/core/src/main/java/com/datastax/oss/driver/api/core/cql/PagingState.java index f490b23ddbe..dd5a16db167 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/cql/PagingState.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/cql/PagingState.java @@ -27,7 +27,7 @@ *

        This class performs additional checks to fail fast if the paging state is not reused on the * same query, and it provides utility methods for conversion to/from strings and byte arrays. * - *

        The serialized form returned by {@link #toBytes()} and {@link #toString()} is an opaque + *

        The serialized form returned by {@link #toBytes()} and {@link Object#toString()} is an opaque * sequence of bytes. Note however that it is not cryptographically secure: the contents are * not encrypted and the checks are performed with a simple MD5 checksum. If you need stronger * guarantees, you should build your own wrapper around {@link ExecutionInfo#getPagingState()}. From dd48ad7ab5444bac49a7023b6313fce4c22d973b Mon Sep 17 00:00:00 2001 From: Timothy George Lamballais Tessensohn Date: Tue, 14 Jul 2020 14:24:44 +0200 Subject: [PATCH 522/979] Allow injection of CodecRegistry on session builder There are cases where one would want to specify a custom CodecRegistry, this commit adds the option for users to supply their own CodecRegistry while building a Session. --- changelog/README.md | 1 + .../core/session/ProgrammaticArguments.java | 21 +++++++++++++++++-- .../api/core/session/SessionBuilder.java | 13 ++++++++++++ .../core/context/DefaultDriverContext.java | 12 ++++++----- 4 files changed, 40 insertions(+), 7 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 459d65f85bd..08fbc99722f 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.8.0 (in progress) +- [improvement] Allow injection of CodecRegistry on session builder - [improvement] JAVA-2828: Add safe paging state wrapper - [bug] JAVA-2835: Correctly handle unresolved addresses in DefaultEndPoint.equals - [bug] JAVA-2838: Avoid ConcurrentModificationException when closing connection diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/session/ProgrammaticArguments.java b/core/src/main/java/com/datastax/oss/driver/api/core/session/ProgrammaticArguments.java index 19c91ec4140..c0c816dfdbc 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/session/ProgrammaticArguments.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/session/ProgrammaticArguments.java @@ -22,6 +22,7 @@ import com.datastax.oss.driver.api.core.ssl.SslEngineFactory; import com.datastax.oss.driver.api.core.tracker.RequestTracker; import com.datastax.oss.driver.api.core.type.codec.TypeCodec; +import com.datastax.oss.driver.api.core.type.codec.registry.MutableCodecRegistry; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import edu.umd.cs.findbugs.annotations.NonNull; @@ -58,6 +59,7 @@ public static Builder builder() { private final UUID startupClientId; private final String startupApplicationName; private final String startupApplicationVersion; + private final MutableCodecRegistry codecRegistry; private ProgrammaticArguments( @NonNull List> typeCodecs, @@ -72,7 +74,8 @@ private ProgrammaticArguments( @Nullable InetSocketAddress cloudProxyAddress, @Nullable UUID startupClientId, @Nullable String startupApplicationName, - @Nullable String startupApplicationVersion) { + @Nullable String startupApplicationVersion, + @Nullable MutableCodecRegistry codecRegistry) { this.typeCodecs = typeCodecs; this.nodeStateListener = nodeStateListener; @@ -87,6 +90,7 @@ private ProgrammaticArguments( this.startupClientId = startupClientId; this.startupApplicationName = startupApplicationName; this.startupApplicationVersion = startupApplicationVersion; + this.codecRegistry = codecRegistry; } @NonNull @@ -154,6 +158,11 @@ public String getStartupApplicationVersion() { return startupApplicationVersion; } + @Nullable + public MutableCodecRegistry getCodecRegistry() { + return codecRegistry; + } + public static class Builder { private ImmutableList.Builder> typeCodecsBuilder = ImmutableList.builder(); @@ -170,6 +179,7 @@ public static class Builder { private UUID startupClientId; private String startupApplicationName; private String startupApplicationVersion; + private MutableCodecRegistry codecRegistry; @NonNull public Builder addTypeCodecs(@NonNull TypeCodec... typeCodecs) { @@ -267,6 +277,12 @@ public Builder withStartupApplicationVersion(@Nullable String startupApplication return this; } + @NonNull + public Builder withCodecRegistry(@Nullable MutableCodecRegistry codecRegistry) { + this.codecRegistry = codecRegistry; + return this; + } + @NonNull public ProgrammaticArguments build() { return new ProgrammaticArguments( @@ -282,7 +298,8 @@ public ProgrammaticArguments build() { cloudProxyAddress, startupClientId, startupApplicationName, - startupApplicationVersion); + startupApplicationVersion, + codecRegistry); } } } diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java b/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java index c1672084787..9468ec2d4f5 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java @@ -32,6 +32,7 @@ import com.datastax.oss.driver.api.core.ssl.SslEngineFactory; import com.datastax.oss.driver.api.core.tracker.RequestTracker; import com.datastax.oss.driver.api.core.type.codec.TypeCodec; +import com.datastax.oss.driver.api.core.type.codec.registry.MutableCodecRegistry; import com.datastax.oss.driver.api.core.uuid.Uuids; import com.datastax.oss.driver.internal.core.ContactPoints; import com.datastax.oss.driver.internal.core.auth.ProgrammaticPlainTextAuthProvider; @@ -488,6 +489,18 @@ public SelfT withCloudSecureConnectBundle(@NonNull Path cloudConfigPath) { return self; } + /** + * Registers a CodecRegistry to use for the session. + * + *

        When both this and {@link #addTypeCodecs(TypeCodec[])} are called, the added type codecs + * will be registered on the provided CodecRegistry. + */ + @NonNull + public SelfT withCodecRegistry(@Nullable MutableCodecRegistry codecRegistry) { + this.programmaticArgumentsBuilder.withCodecRegistry(codecRegistry); + return self; + } + /** * Configures this SessionBuilder for Cloud deployments by retrieving connection information from * the provided {@link URL}. diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java index 9ef91ea94cf..950dd0a4414 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java @@ -251,8 +251,7 @@ public DefaultDriverContext( this.sessionName = "s" + SESSION_NAME_COUNTER.getAndIncrement(); } this.localDatacentersFromBuilder = programmaticArguments.getLocalDatacenters(); - this.codecRegistry = - buildCodecRegistry(this.sessionName, programmaticArguments.getTypeCodecs()); + this.codecRegistry = buildCodecRegistry(programmaticArguments); this.nodeStateListenerFromBuilder = programmaticArguments.getNodeStateListener(); this.nodeStateListenerRef = new LazyReference<>( @@ -563,9 +562,12 @@ protected RequestProcessorRegistry buildRequestProcessorRegistry() { return new RequestProcessorRegistry(logPrefix, processors.toArray(new RequestProcessor[0])); } - protected CodecRegistry buildCodecRegistry(String logPrefix, List> codecs) { - MutableCodecRegistry registry = new DefaultCodecRegistry(logPrefix); - registry.register(codecs); + protected CodecRegistry buildCodecRegistry(ProgrammaticArguments arguments) { + MutableCodecRegistry registry = arguments.getCodecRegistry(); + if (registry == null) { + registry = new DefaultCodecRegistry(this.sessionName); + } + registry.register(arguments.getTypeCodecs()); registry.register(DseTypeCodecs.DATE_RANGE); if (DependencyCheck.ESRI.isPresent()) { registry.register(DseTypeCodecs.LINE_STRING, DseTypeCodecs.POINT, DseTypeCodecs.POLYGON); From 842415ba240c5b5b31bea8ba022b367388ef5243 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Wed, 18 Mar 2020 23:12:49 +0100 Subject: [PATCH 523/979] JAVA-2691: Provide driver 4 support for extra codecs --- changelog/README.md | 1 + core/revapi.json | 25 + .../api/core/type/codec/ExtraTypeCodecs.java | 482 ++++++++++++++++++ .../api/core/type/codec/TypeCodecs.java | 143 ++++-- .../api/core/type/reflect/GenericType.java | 32 ++ .../internal/core/type/codec/BlobCodec.java | 6 + .../core/type/codec/SimpleBlobCodec.java | 65 +++ .../core/type/codec/TimestampCodec.java | 2 +- .../core/type/codec/extras/OptionalCodec.java | 70 +++ .../array/AbstractListToArrayCodec.java | 198 +++++++ .../AbstractPrimitiveListToArrayCodec.java | 127 +++++ .../extras/array/BooleanListToArrayCodec.java | 95 ++++ .../extras/array/ByteListToArrayCodec.java | 95 ++++ .../extras/array/DoubleListToArrayCodec.java | 91 ++++ .../extras/array/FloatListToArrayCodec.java | 91 ++++ .../extras/array/IntListToArrayCodec.java | 91 ++++ .../extras/array/LongListToArrayCodec.java | 91 ++++ .../extras/array/ObjectListToArrayCodec.java | 146 ++++++ .../extras/array/ShortListToArrayCodec.java | 90 ++++ .../codec/extras/enums/EnumNameCodec.java | 58 +++ .../codec/extras/enums/EnumOrdinalCodec.java | 58 +++ .../type/codec/extras/json/JsonCodec.java | 95 ++-- .../extras/time/LocalTimestampCodec.java | 82 +++ .../time/PersistentZonedTimestampCodec.java | 104 ++++ .../extras/time/TimestampMillisCodec.java | 115 +++++ .../time}/ZonedTimestampCodec.java | 68 +-- .../core/type/codec/SimpleBlobCodecTest.java | 96 ++++ .../type/codec/extras/OptionalCodecTest.java | 83 +++ .../extras/array/BooleanArrayCodecTest.java | 145 ++++++ .../extras/array/ByteArrayCodecTest.java | 149 ++++++ .../extras/array/DoubleArrayCodecTest.java | 149 ++++++ .../extras/array/FloatArrayCodecTest.java | 149 ++++++ .../codec/extras/array/IntArrayCodecTest.java | 149 ++++++ .../extras/array/LongArrayCodecTest.java | 149 ++++++ .../extras/array/ObjectArrayCodecTest.java | 172 +++++++ .../extras/array/ShortArrayCodecTest.java | 149 ++++++ .../codec/extras/enums/EnumNameCodecTest.java | 90 ++++ .../extras/enums/EnumOrdinalCodecTest.java | 95 ++++ .../type/codec/extras/json/JsonCodecTest.java | 115 +++++ .../extras/time/LocalTimestampCodecTest.java | 204 ++++++++ .../PersistentZonedTimestampCodecTest.java | 134 +++++ .../extras/time/TimestampMillisCodecTest.java | 203 ++++++++ .../time}/ZonedTimestampCodecTest.java | 18 +- .../examples/datatypes/CustomCodecs.java | 240 +++++++++ .../json/jackson/JacksonJsonColumn.java | 10 +- .../json/jackson/JacksonJsonFunction.java | 10 +- .../examples/json/jackson/JacksonJsonRow.java | 8 +- .../json/{codecs => jsr}/Jsr353JsonCodec.java | 51 +- .../examples/json/jsr/Jsr353JsonColumn.java | 1 - .../examples/json/jsr/Jsr353JsonFunction.java | 1 - .../examples/json/jsr/Jsr353JsonRow.java | 1 - .../type/codec/registry/CodecRegistryIT.java | 31 +- .../driver/querybuilder/JacksonJsonCodec.java | 122 ----- .../oss/driver/querybuilder/JsonInsertIT.java | 5 +- manual/core/custom_codecs/README.md | 408 +++++++++++++-- 55 files changed, 5296 insertions(+), 362 deletions(-) create mode 100644 core/src/main/java/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/SimpleBlobCodec.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/OptionalCodec.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/AbstractListToArrayCodec.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/AbstractPrimitiveListToArrayCodec.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/BooleanListToArrayCodec.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/ByteListToArrayCodec.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/DoubleListToArrayCodec.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/FloatListToArrayCodec.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/IntListToArrayCodec.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/LongListToArrayCodec.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/ObjectListToArrayCodec.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/ShortListToArrayCodec.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/enums/EnumNameCodec.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/enums/EnumOrdinalCodec.java rename examples/src/main/java/com/datastax/oss/driver/examples/json/codecs/JacksonJsonCodec.java => core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/json/JsonCodec.java (51%) create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/time/LocalTimestampCodec.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/time/PersistentZonedTimestampCodec.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/time/TimestampMillisCodec.java rename core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/{ => extras/time}/ZonedTimestampCodec.java (59%) create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/SimpleBlobCodecTest.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/OptionalCodecTest.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/BooleanArrayCodecTest.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/ByteArrayCodecTest.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/DoubleArrayCodecTest.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/FloatArrayCodecTest.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/IntArrayCodecTest.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/LongArrayCodecTest.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/ObjectArrayCodecTest.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/ShortArrayCodecTest.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/enums/EnumNameCodecTest.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/enums/EnumOrdinalCodecTest.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/json/JsonCodecTest.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/time/LocalTimestampCodecTest.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/time/PersistentZonedTimestampCodecTest.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/time/TimestampMillisCodecTest.java rename core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/{ => extras/time}/ZonedTimestampCodecTest.java (92%) create mode 100644 examples/src/main/java/com/datastax/oss/driver/examples/datatypes/CustomCodecs.java rename examples/src/main/java/com/datastax/oss/driver/examples/json/{codecs => jsr}/Jsr353JsonCodec.java (80%) delete mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/querybuilder/JacksonJsonCodec.java diff --git a/changelog/README.md b/changelog/README.md index 08fbc99722f..328430daa2a 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.8.0 (in progress) +- [new feature] JAVA-2691: Provide driver 4 support for extra codecs - [improvement] Allow injection of CodecRegistry on session builder - [improvement] JAVA-2828: Add safe paging state wrapper - [bug] JAVA-2835: Correctly handle unresolved addresses in DefaultEndPoint.equals diff --git a/core/revapi.json b/core/revapi.json index 478321cce8e..b42dd7ca74f 100644 --- a/core/revapi.json +++ b/core/revapi.json @@ -5005,6 +5005,31 @@ "new": "parameter int com.datastax.oss.driver.api.core.type.UserDefinedType::firstIndexOf(===java.lang.String===)", "annotation": "@edu.umd.cs.findbugs.annotations.NonNull", "justification": "Add missing `@NonNull` annotation to UserDefinedType.firstIndexOf" + }, + { + "code": "java.class.nonPublicPartOfAPI", + "new": "class com.fasterxml.jackson.databind.introspect.AnnotatedConstructor.Serialization", + "justification": "Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.class.nonPublicPartOfAPI", + "new": "class com.fasterxml.jackson.databind.introspect.AnnotatedField.Serialization", + "justification": "Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.class.nonPublicPartOfAPI", + "new": "class com.fasterxml.jackson.databind.introspect.AnnotatedMethod.Serialization", + "justification": "Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.class.nonPublicPartOfAPI", + "new": "class com.fasterxml.jackson.databind.type.TypeParser.MyTokenizer", + "justification": "Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.class.nonPublicPartOfAPI", + "new": "class com.fasterxml.jackson.databind.util.PrimitiveArrayBuilder.Node", + "justification": "Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" } ] } diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.java b/core/src/main/java/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.java new file mode 100644 index 00000000000..6bf044ebf03 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.java @@ -0,0 +1,482 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.core.type.codec; + +import com.datastax.oss.driver.api.core.session.SessionBuilder; +import com.datastax.oss.driver.api.core.type.codec.registry.MutableCodecRegistry; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import com.datastax.oss.driver.internal.core.type.codec.SimpleBlobCodec; +import com.datastax.oss.driver.internal.core.type.codec.TimestampCodec; +import com.datastax.oss.driver.internal.core.type.codec.extras.OptionalCodec; +import com.datastax.oss.driver.internal.core.type.codec.extras.array.BooleanListToArrayCodec; +import com.datastax.oss.driver.internal.core.type.codec.extras.array.ByteListToArrayCodec; +import com.datastax.oss.driver.internal.core.type.codec.extras.array.DoubleListToArrayCodec; +import com.datastax.oss.driver.internal.core.type.codec.extras.array.FloatListToArrayCodec; +import com.datastax.oss.driver.internal.core.type.codec.extras.array.IntListToArrayCodec; +import com.datastax.oss.driver.internal.core.type.codec.extras.array.LongListToArrayCodec; +import com.datastax.oss.driver.internal.core.type.codec.extras.array.ObjectListToArrayCodec; +import com.datastax.oss.driver.internal.core.type.codec.extras.array.ShortListToArrayCodec; +import com.datastax.oss.driver.internal.core.type.codec.extras.enums.EnumNameCodec; +import com.datastax.oss.driver.internal.core.type.codec.extras.enums.EnumOrdinalCodec; +import com.datastax.oss.driver.internal.core.type.codec.extras.json.JsonCodec; +import com.datastax.oss.driver.internal.core.type.codec.extras.time.LocalTimestampCodec; +import com.datastax.oss.driver.internal.core.type.codec.extras.time.PersistentZonedTimestampCodec; +import com.datastax.oss.driver.internal.core.type.codec.extras.time.TimestampMillisCodec; +import com.datastax.oss.driver.internal.core.type.codec.extras.time.ZonedTimestampCodec; +import com.fasterxml.jackson.databind.ObjectMapper; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.nio.ByteBuffer; +import java.time.Instant; +import java.time.LocalDateTime; +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; +import java.util.Optional; + +/** + * Additional codecs that can be registered to handle different type mappings. + * + * @see SessionBuilder#addTypeCodecs(TypeCodec[]) + * @see MutableCodecRegistry#register(TypeCodec) + */ +public class ExtraTypeCodecs { + + /** + * A codec that maps CQL type {@code timestamp} to Java's {@link Instant}, using the UTC time zone + * to parse and format CQL literals. + * + *

        This codec uses {@link ZoneOffset#UTC} as its source of time zone information when + * formatting values as CQL literals, or parsing CQL literals that do not have any time zone + * indication. Note that this only applies to the {@link TypeCodec#format(Object)} and {@link + * TypeCodec#parse(String)} methods; regular encoding and decoding, like setting a value on a + * bound statement or reading a column from a row, are not affected by the time zone. + * + *

        If you need a different time zone, consider other constants in this class, or call {@link + * ExtraTypeCodecs#timestampAt(ZoneId)} instead. + * + * @see TypeCodecs#TIMESTAMP + * @see ExtraTypeCodecs#timestampAt(ZoneId) + */ + public static final TypeCodec TIMESTAMP_UTC = new TimestampCodec(ZoneOffset.UTC); + + /** + * A codec that maps CQL type {@code timestamp} to Java's {@code long}, representing the number of + * milliseconds since the Epoch, using the system's default time zone to parse and format CQL + * literals. + * + *

        This codec uses the system's {@linkplain ZoneId#systemDefault() default time zone} as its + * source of time zone information when formatting values as CQL literals, or parsing CQL literals + * that do not have any time zone indication. Note that this only applies to the {@link + * TypeCodec#format(Object)} and {@link TypeCodec#parse(String)} methods; regular encoding and + * decoding, like setting a value on a bound statement or reading a column from a row, are not + * affected by the time zone. + * + *

        If you need a different time zone, consider other constants in this class, or call {@link + * #timestampMillisAt(ZoneId)} instead. + * + *

        This codec can serve as a replacement for the driver's built-in {@linkplain + * TypeCodecs#TIMESTAMP timestamp} codec, when application code prefers to deal with raw + * milliseconds than with {@link Instant} instances. + * + * @see #TIMESTAMP_MILLIS_UTC + * @see #timestampMillisAt(ZoneId) + */ + public static final PrimitiveLongCodec TIMESTAMP_MILLIS_SYSTEM = new TimestampMillisCodec(); + + /** + * A codec that maps CQL type {@code timestamp} to Java's {@code long}, representing the number of + * milliseconds since the Epoch, using the UTC time zone to parse and format CQL literals. + * + *

        This codec uses {@link ZoneOffset#UTC} as its source of time zone information when + * formatting values as CQL literals, or parsing CQL literals that do not have any time zone + * indication. Note that this only applies to the {@link TypeCodec#format(Object)} and {@link + * TypeCodec#parse(String)} methods; regular encoding and decoding, like setting a value on a + * bound statement or reading a column from a row, are not affected by the time zone. + * + *

        If you need a different time zone, consider other constants in this class, or call {@link + * #timestampMillisAt(ZoneId)} instead. + * + *

        This codec can serve as a replacement for the driver's built-in {@linkplain + * TypeCodecs#TIMESTAMP timestamp} codec, when application code prefers to deal with raw + * milliseconds than with {@link Instant} instances. + * + * @see #TIMESTAMP_MILLIS_SYSTEM + * @see #timestampMillisAt(ZoneId) + */ + public static final PrimitiveLongCodec TIMESTAMP_MILLIS_UTC = + new TimestampMillisCodec(ZoneOffset.UTC); + + /** + * A codec that maps CQL type {@code timestamp} to Java's {@link ZonedDateTime}, using the + * system's default time zone. + * + *

        This codec uses the system's {@linkplain ZoneId#systemDefault() default time zone} as its + * source of time zone information when encoding or decoding. If you need a different time zone, + * consider using other constants in this class, or call {@link #zonedTimestampAt(ZoneId)} + * instead. + * + *

        Note that CQL type {@code timestamp} type does not store any time zone; this codec is + * provided merely as a convenience for users that need to deal with zoned timestamps in their + * applications. + * + * @see #ZONED_TIMESTAMP_UTC + * @see #ZONED_TIMESTAMP_PERSISTED + * @see #zonedTimestampAt(ZoneId) + */ + public static final TypeCodec ZONED_TIMESTAMP_SYSTEM = new ZonedTimestampCodec(); + + /** + * A codec that maps CQL type {@code timestamp} to Java's {@link ZonedDateTime}, using the UTC + * time zone. + * + *

        This codec uses {@link ZoneOffset#UTC} as its source of time zone information when encoding + * or decoding. If you need a different time zone, consider using other constants in this class, + * or call {@link #zonedTimestampAt(ZoneId)} instead. + * + *

        Note that CQL type {@code timestamp} type does not store any time zone; this codec is + * provided merely as a convenience for users that need to deal with zoned timestamps in their + * applications. + * + * @see #ZONED_TIMESTAMP_SYSTEM + * @see #ZONED_TIMESTAMP_PERSISTED + * @see #zonedTimestampAt(ZoneId) + */ + public static final TypeCodec ZONED_TIMESTAMP_UTC = + new ZonedTimestampCodec(ZoneOffset.UTC); + + /** + * A codec that maps CQL type {@code tuple} to Java's {@link ZonedDateTime}, + * providing a pattern for maintaining timezone information in Cassandra. + * + *

        Since CQL type {@code timestamp} does not store any time zone, it is persisted separately in + * the {@code text} field of the tuple, and so when the value is read back the original timezone + * it was written with is preserved. + * + * @see #ZONED_TIMESTAMP_SYSTEM + * @see #ZONED_TIMESTAMP_UTC + * @see #zonedTimestampAt(ZoneId) + */ + public static final TypeCodec ZONED_TIMESTAMP_PERSISTED = + new PersistentZonedTimestampCodec(); + + /** + * A codec that maps CQL type {@code timestamp} to Java's {@link LocalDateTime}, using the + * system's default time zone. + * + *

        This codec uses the system's {@linkplain ZoneId#systemDefault() default time zone} as its + * source of time zone information when encoding or decoding. If you need a different time zone, + * consider using other constants in this class, or call {@link #localTimestampAt(ZoneId)} + * instead. + * + *

        Note that CQL type {@code timestamp} does not store any time zone; this codec is provided + * merely as a convenience for users that need to deal with local date-times in their + * applications. + * + * @see #LOCAL_TIMESTAMP_UTC + * @see #localTimestampAt(ZoneId) + */ + public static final TypeCodec LOCAL_TIMESTAMP_SYSTEM = new LocalTimestampCodec(); + + /** + * A codec that maps CQL type {@code timestamp} to Java's {@link LocalDateTime}, using the UTC + * time zone. + * + *

        This codec uses {@link ZoneOffset#UTC} as its source of time zone information when encoding + * or decoding. If you need a different time zone, consider using other constants in this class, + * or call {@link #localTimestampAt(ZoneId)} instead. + * + *

        Note that CQL type {@code timestamp} does not store any time zone; this codec is provided + * merely as a convenience for users that need to deal with local date-times in their + * applications. + * + * @see #LOCAL_TIMESTAMP_SYSTEM + * @see #localTimestampAt(ZoneId) + */ + public static final TypeCodec LOCAL_TIMESTAMP_UTC = + new LocalTimestampCodec(ZoneOffset.UTC); + + /** + * A codec that maps CQL type {@code blob} to Java's {@code byte[]}. + * + *

        If you are looking for a codec mapping CQL type {@code blob} to the Java type {@link + * ByteBuffer}, you should use {@link TypeCodecs#BLOB} instead. + * + *

        If you are looking for a codec mapping CQL type {@code list BLOB_TO_ARRAY = new SimpleBlobCodec(); + + /** + * A codec that maps CQL type {@code list} to Java's {@code boolean[]}. + * + *

        Note that this codec is designed for performance and converts CQL lists directly to + * {@code boolean[]}, thus avoiding any unnecessary boxing and unboxing of Java primitive {@code + * boolean} values; it also instantiates arrays without the need for an intermediary Java {@code + * List} object. + */ + public static final TypeCodec BOOLEAN_LIST_TO_ARRAY = new BooleanListToArrayCodec(); + + /** + * A codec that maps CQL type {@code list} to Java's {@code byte[]}. + * + *

        This codec is not suitable for reading CQL blobs as byte arrays. If you are looking for a + * codec for the CQL type {@code blob}, you should use {@link TypeCodecs#BLOB} or {@link + * ExtraTypeCodecs#BLOB_TO_ARRAY} instead. + * + *

        Note that this codec is designed for performance and converts CQL lists directly to + * {@code byte[]}, thus avoiding any unnecessary boxing and unboxing of Java primitive {@code + * byte} values; it also instantiates arrays without the need for an intermediary Java {@code + * List} object. + * + * @see TypeCodecs#BLOB + * @see ExtraTypeCodecs#BLOB_TO_ARRAY + */ + public static final TypeCodec BYTE_LIST_TO_ARRAY = new ByteListToArrayCodec(); + + /** + * A codec that maps CQL type {@code list} to Java's {@code short[]}. + * + *

        Note that this codec is designed for performance and converts CQL lists directly to + * {@code short[]}, thus avoiding any unnecessary boxing and unboxing of Java primitive {@code + * short} values; it also instantiates arrays without the need for an intermediary Java {@code + * List} object. + */ + public static final TypeCodec SHORT_LIST_TO_ARRAY = new ShortListToArrayCodec(); + + /** + * A codec that maps CQL type {@code list} to Java's {@code int[]}. + * + *

        Note that this codec is designed for performance and converts CQL lists directly to + * {@code int[]}, thus avoiding any unnecessary boxing and unboxing of Java primitive {@code int} + * values; it also instantiates arrays without the need for an intermediary Java {@code List} + * object. + */ + public static final TypeCodec INT_LIST_TO_ARRAY = new IntListToArrayCodec(); + + /** + * A codec that maps CQL type {@code list} to Java's {@code long[]}. + * + *

        Note that this codec is designed for performance and converts CQL lists directly to + * {@code long[]}, thus avoiding any unnecessary boxing and unboxing of Java primitive {@code + * long} values; it also instantiates arrays without the need for an intermediary Java {@code + * List} object. + */ + public static final TypeCodec LONG_LIST_TO_ARRAY = new LongListToArrayCodec(); + + /** + * A codec that maps CQL type {@code list} to Java's {@code float[]}. + * + *

        Note that this codec is designed for performance and converts CQL lists directly to + * {@code float[]}, thus avoiding any unnecessary boxing and unboxing of Java primitive {@code + * float} values; it also instantiates arrays without the need for an intermediary Java {@code + * List} object. + */ + public static final TypeCodec FLOAT_LIST_TO_ARRAY = new FloatListToArrayCodec(); + + /** + * A codec that maps CQL type {@code list} to Java's {@code double[]}. + * + *

        Note that this codec is designed for performance and converts CQL lists directly to + * {@code double[]}, thus avoiding any unnecessary boxing and unboxing of Java primitive {@code + * double} values; it also instantiates arrays without the need for an intermediary Java {@code + * List} object. + */ + public static final TypeCodec DOUBLE_LIST_TO_ARRAY = new DoubleListToArrayCodec(); + + /** + * Builds a new codec that maps CQL type {@code timestamp} to Java's {@link Instant}, using the + * given time zone to parse and format CQL literals. + * + *

        This codec uses the supplied {@link ZoneId} as its source of time zone information when + * formatting values as CQL literals, or parsing CQL literals that do not have any time zone + * indication. Note that this only applies to the {@link TypeCodec#format(Object)} and {@link + * TypeCodec#parse(String)} methods; regular encoding and decoding, like setting a value on a + * bound statement or reading a column from a row, are not affected by the time zone. + * + * @see TypeCodecs#TIMESTAMP + * @see ExtraTypeCodecs#TIMESTAMP_UTC + */ + @NonNull + public static TypeCodec timestampAt(@NonNull ZoneId timeZone) { + return new TimestampCodec(timeZone); + } + + /** + * Builds a new codec that maps CQL type {@code timestamp} to Java's {@code long}, representing + * the number of milliseconds since the Epoch, using the given time zone to parse and format CQL + * literals. + * + *

        This codec uses the supplied {@link ZoneId} as its source of time zone information when + * formatting values as CQL literals, or parsing CQL literals that do not have any time zone + * indication. Note that this only applies to the {@link TypeCodec#format(Object)} and {@link + * TypeCodec#parse(String)} methods; regular encoding and decoding, like setting a value on a + * bound statement or reading a column from a row, are not affected by the time zone. + * + *

        This codec can serve as a replacement for the driver's built-in {@linkplain + * TypeCodecs#TIMESTAMP timestamp} codec, when application code prefers to deal with raw + * milliseconds than with {@link Instant} instances. + * + * @see ExtraTypeCodecs#TIMESTAMP_MILLIS_SYSTEM + * @see ExtraTypeCodecs#TIMESTAMP_MILLIS_UTC + */ + @NonNull + public static PrimitiveLongCodec timestampMillisAt(@NonNull ZoneId timeZone) { + return new TimestampMillisCodec(timeZone); + } + + /** + * Builds a new codec that maps CQL type {@code timestamp} to Java's {@link ZonedDateTime}. + * + *

        This codec uses the supplied {@link ZoneId} as its source of time zone information when + * encoding or decoding. + * + *

        Note that CQL type {@code timestamp} does not store any time zone; the codecs created by + * this method are provided merely as a convenience for users that need to deal with zoned + * timestamps in their applications. + * + * @see ExtraTypeCodecs#ZONED_TIMESTAMP_SYSTEM + * @see ExtraTypeCodecs#ZONED_TIMESTAMP_UTC + * @see ExtraTypeCodecs#ZONED_TIMESTAMP_PERSISTED + */ + @NonNull + public static TypeCodec zonedTimestampAt(@NonNull ZoneId timeZone) { + return new ZonedTimestampCodec(timeZone); + } + + /** + * Builds a new codec that maps CQL type {@code timestamp} to Java's {@link LocalDateTime}. + * + *

        This codec uses the supplied {@link ZoneId} as its source of time zone information when + * encoding or decoding. + * + *

        Note that CQL type {@code timestamp} does not store any time zone; the codecs created by + * this method are provided merely as a convenience for users that need to deal with local + * date-times in their applications. + * + * @see ExtraTypeCodecs#LOCAL_TIMESTAMP_UTC + * @see #localTimestampAt(ZoneId) + */ + @NonNull + public static TypeCodec localTimestampAt(@NonNull ZoneId timeZone) { + return new LocalTimestampCodec(timeZone); + } + + /** + * Builds a new codec that maps a CQL list to a Java array. Encoding and decoding of elements in + * the array is delegated to the provided element codec. + * + *

        This method is not suitable for Java primitive arrays. Use {@link + * ExtraTypeCodecs#BOOLEAN_LIST_TO_ARRAY}, {@link ExtraTypeCodecs#BYTE_LIST_TO_ARRAY}, {@link + * ExtraTypeCodecs#SHORT_LIST_TO_ARRAY}, {@link ExtraTypeCodecs#INT_LIST_TO_ARRAY}, {@link + * ExtraTypeCodecs#LONG_LIST_TO_ARRAY}, {@link ExtraTypeCodecs#FLOAT_LIST_TO_ARRAY} or {@link + * ExtraTypeCodecs#DOUBLE_LIST_TO_ARRAY} instead. + */ + @NonNull + public static TypeCodec listToArrayOf(@NonNull TypeCodec elementCodec) { + return new ObjectListToArrayCodec<>(elementCodec); + } + + /** + * Builds a new codec that maps CQL type {@code int} to a Java Enum, according to its constants' + * {@linkplain Enum#ordinal() ordinals} (STRONGLY discouraged, see explanations below). + * + *

        This method is provided for compatibility with driver 3, but we strongly recommend against + * it. Relying on enum ordinals is a bad practice: any reordering of the enum constants, or + * insertion of a new constant before the end, will change the ordinals. The codec will keep + * working, but start inserting different codes and corrupting your data. + * + *

        {@link #enumNamesOf(Class)} is a safer alternative, as it is not dependent on the constant + * order. If you still want to use integer codes for storage efficiency, we recommend implementing + * an explicit mapping (for example with a {@code toCode()} method on your enum type). It is then + * fairly straightforward to implement a codec with {@link MappingCodec}, using {@link + * TypeCodecs#INT} as the "inner" codec. + */ + @NonNull + public static > TypeCodec enumOrdinalsOf( + @NonNull Class enumClass) { + return new EnumOrdinalCodec<>(enumClass); + } + + /** + * Builds a new codec that maps CQL type {@code text} to a Java Enum, according to its constants' + * programmatic {@linkplain Enum#name() names}. + * + * @see #enumOrdinalsOf(Class) + */ + @NonNull + public static > TypeCodec enumNamesOf( + @NonNull Class enumClass) { + return new EnumNameCodec<>(enumClass); + } + + /** + * Builds a new codec that wraps another codec's Java type into {@link Optional} instances + * (mapping CQL null to {@link Optional#empty()}). + */ + @NonNull + public static TypeCodec> optionalOf(@NonNull TypeCodec innerCodec) { + return new OptionalCodec<>(innerCodec); + } + + /** + * Builds a new codec that maps CQL type {@code text} to the given Java type, using JSON + * serialization with a default Jackson mapper. + * + * @see Jackson JSON Library + */ + @NonNull + public static TypeCodec json(@NonNull GenericType javaType) { + return new JsonCodec<>(javaType); + } + + /** + * Builds a new codec that maps CQL type {@code text} to the given Java type, using JSON + * serialization with a default Jackson mapper. + * + * @see Jackson JSON Library + */ + @NonNull + public static TypeCodec json(@NonNull Class javaType) { + return new JsonCodec<>(javaType); + } + + /** + * Builds a new codec that maps CQL type {@code text} to the given Java type, using JSON + * serialization with the provided Jackson mapper. + * + * @see Jackson JSON Library + */ + @NonNull + public static TypeCodec json( + @NonNull GenericType javaType, @NonNull ObjectMapper objectMapper) { + return new JsonCodec<>(javaType, objectMapper); + } + + /** + * Builds a new codec that maps CQL type {@code text} to the given Java type, using JSON + * serialization with the provided Jackson mapper. + * + * @see Jackson JSON Library + */ + @NonNull + public static TypeCodec json( + @NonNull Class javaType, @NonNull ObjectMapper objectMapper) { + return new JsonCodec<>(javaType, objectMapper); + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.java b/core/src/main/java/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.java index ac421f2a046..6cd4b68a042 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.java @@ -48,7 +48,6 @@ import com.datastax.oss.driver.internal.core.type.codec.UdtCodec; import com.datastax.oss.driver.internal.core.type.codec.UuidCodec; import com.datastax.oss.driver.internal.core.type.codec.VarIntCodec; -import com.datastax.oss.driver.internal.core.type.codec.ZonedTimestampCodec; import com.datastax.oss.driver.shaded.guava.common.base.Charsets; import com.datastax.oss.driver.shaded.guava.common.base.Preconditions; import edu.umd.cs.findbugs.annotations.NonNull; @@ -60,82 +59,133 @@ import java.time.LocalDate; import java.time.LocalTime; import java.time.ZoneId; -import java.time.ZoneOffset; import java.time.ZonedDateTime; import java.util.List; import java.util.Map; import java.util.Set; import java.util.UUID; -/** Constants and factory methods to obtain type codec instances. */ +/** + * Constants and factory methods to obtain instances of the driver's default type codecs. + * + *

        See also {@link ExtraTypeCodecs} for additional codecs that you can register with your session + * to handle different type mappings. + */ public class TypeCodecs { + /** The default codec that maps CQL type {@code boolean} to Java's {@code boolean}. */ public static final PrimitiveBooleanCodec BOOLEAN = new BooleanCodec(); + + /** The default codec that maps CQL type {@code tinyint} to Java's {@code byte}. */ public static final PrimitiveByteCodec TINYINT = new TinyIntCodec(); + + /** The default codec that maps CQL type {@code double} to Java's {@code double}. */ public static final PrimitiveDoubleCodec DOUBLE = new DoubleCodec(); + + /** The default codec that maps CQL type {@code counter} to Java's {@code long}. */ public static final PrimitiveLongCodec COUNTER = new CounterCodec(); + + /** The default codec that maps CQL type {@code float} to Java's {@code float}. */ public static final PrimitiveFloatCodec FLOAT = new FloatCodec(); + + /** The default codec that maps CQL type {@code int} to Java's {@code int}. */ public static final PrimitiveIntCodec INT = new IntCodec(); + + /** The default codec that maps CQL type {@code bigint} to Java's {@code long}. */ public static final PrimitiveLongCodec BIGINT = new BigIntCodec(); + + /** The default codec that maps CQL type {@code smallint} to Java's {@code short}. */ public static final PrimitiveShortCodec SMALLINT = new SmallIntCodec(); - public static final TypeCodec TIMESTAMP = new TimestampCodec(); /** - * A codec that handles Apache Cassandra(R)'s timestamp type and maps it to Java's {@link - * ZonedDateTime}, using the system's {@linkplain ZoneId#systemDefault() default time zone} as its - * source of time zone information. + * The default codec that maps CQL type {@code timestamp} to Java's {@link Instant}, using the + * system's default time zone to parse and format CQL literals. + * + *

        This codec uses the system's {@linkplain ZoneId#systemDefault() default time zone} as its + * source of time zone information when formatting values as CQL literals, or parsing CQL literals + * that do not have any time zone indication. Note that this only applies to the {@link + * TypeCodec#format(Object)} and {@link TypeCodec#parse(String)} methods; regular encoding and + * decoding, like setting a value on a bound statement or reading a column from a row, are not + * affected by the time zone. * - *

        Note that Apache Cassandra(R)'s timestamp type does not store any time zone; this codec is - * provided merely as a convenience for users that need to deal with zoned timestamps in their - * applications. + *

        If you need a different time zone, consider other codecs in {@link ExtraTypeCodecs}, or call + * {@link ExtraTypeCodecs#timestampAt(ZoneId)} instead. * - * @see #ZONED_TIMESTAMP_UTC - * @see #zonedTimestampAt(ZoneId) + * @see ExtraTypeCodecs#TIMESTAMP_UTC + * @see ExtraTypeCodecs#timestampAt(ZoneId) */ - public static final TypeCodec ZONED_TIMESTAMP_SYSTEM = new ZonedTimestampCodec(); + public static final TypeCodec TIMESTAMP = new TimestampCodec(); + + /** The default codec that maps CQL type {@code date} to Java's {@link LocalDate}. */ + public static final TypeCodec DATE = new DateCodec(); + + /** The default codec that maps CQL type {@code time} to Java's {@link LocalTime}. */ + public static final TypeCodec TIME = new TimeCodec(); /** - * A codec that handles Apache Cassandra(R)'s timestamp type and maps it to Java's {@link - * ZonedDateTime}, using {@link ZoneOffset#UTC} as its source of time zone information. + * The default codec that maps CQL type {@code blob} to Java's {@link ByteBuffer}. + * + *

        If you are looking for a codec mapping CQL type {@code blob} to the Java type {@code + * byte[]}, you should use {@link ExtraTypeCodecs#BLOB_TO_ARRAY} instead. * - *

        Note that Apache Cassandra(R)'s timestamp type does not store any time zone; this codec is - * provided merely as a convenience for users that need to deal with zoned timestamps in their - * applications. + *

        If you are looking for a codec mapping CQL type {@code list} to the Java type + * {@code byte[]}, you should use {@link ExtraTypeCodecs#BYTE_LIST_TO_ARRAY} instead. * - * @see #ZONED_TIMESTAMP_SYSTEM - * @see #zonedTimestampAt(ZoneId) + * @see ExtraTypeCodecs#BLOB_TO_ARRAY + * @see ExtraTypeCodecs#BYTE_LIST_TO_ARRAY */ - public static final TypeCodec ZONED_TIMESTAMP_UTC = - new ZonedTimestampCodec(ZoneOffset.UTC); - - public static final TypeCodec DATE = new DateCodec(); - public static final TypeCodec TIME = new TimeCodec(); public static final TypeCodec BLOB = new BlobCodec(); + + /** The default codec that maps CQL type {@code text} to Java's {@link String}. */ public static final TypeCodec TEXT = new StringCodec(DataTypes.TEXT, Charsets.UTF_8); + /** The default codec that maps CQL type {@code ascii} to Java's {@link String}. */ public static final TypeCodec ASCII = new StringCodec(DataTypes.ASCII, Charsets.US_ASCII); + /** The default codec that maps CQL type {@code varint} to Java's {@link BigInteger}. */ public static final TypeCodec VARINT = new VarIntCodec(); + /** The default codec that maps CQL type {@code decimal} to Java's {@link BigDecimal}. */ public static final TypeCodec DECIMAL = new DecimalCodec(); + /** The default codec that maps CQL type {@code uuid} to Java's {@link UUID}. */ public static final TypeCodec UUID = new UuidCodec(); + /** The default codec that maps CQL type {@code timeuuid} to Java's {@link UUID}. */ public static final TypeCodec TIMEUUID = new TimeUuidCodec(); + /** The default codec that maps CQL type {@code inet} to Java's {@link InetAddress}. */ public static final TypeCodec INET = new InetCodec(); + /** The default codec that maps CQL type {@code duration} to the driver's {@link CqlDuration}. */ public static final TypeCodec DURATION = new CqlDurationCodec(); + /** + * Builds a new codec that maps a CQL custom type to Java's {@link ByteBuffer}. + * + * @param cqlType the fully-qualified name of the custom type. + */ @NonNull public static TypeCodec custom(@NonNull DataType cqlType) { Preconditions.checkArgument(cqlType instanceof CustomType, "cqlType must be a custom type"); return new CustomCodec((CustomType) cqlType); } + /** + * Builds a new codec that maps a CQL list to a Java list, using the given codec to map each + * element. + */ @NonNull public static TypeCodec> listOf(@NonNull TypeCodec elementCodec) { return new ListCodec<>(DataTypes.listOf(elementCodec.getCqlType()), elementCodec); } + /** + * Builds a new codec that maps a CQL set to a Java set, using the given codec to map each + * element. + */ @NonNull public static TypeCodec> setOf(@NonNull TypeCodec elementCodec) { return new SetCodec<>(DataTypes.setOf(elementCodec.getCqlType()), elementCodec); } + /** + * Builds a new codec that maps a CQL map to a Java map, using the given codecs to map each key + * and value. + */ @NonNull public static TypeCodec> mapOf( @NonNull TypeCodec keyCodec, @NonNull TypeCodec valueCodec) { @@ -143,29 +193,56 @@ public static TypeCodec> mapOf( DataTypes.mapOf(keyCodec.getCqlType(), valueCodec.getCqlType()), keyCodec, valueCodec); } + /** + * Builds a new codec that maps a CQL tuple to the driver's {@link TupleValue}, for the given type + * definition. + * + *

        Note that the components of a {@link TupleValue} are stored in their encoded form. They are + * encoded/decoded on the fly when you set or get them, using the codec registry. + */ @NonNull public static TypeCodec tupleOf(@NonNull TupleType cqlType) { return new TupleCodec(cqlType); } + /** + * Builds a new codec that maps a CQL user defined type to the driver's {@link UdtValue}, for the + * given type definition. + * + *

        Note that the fields of a {@link UdtValue} are stored in their encoded form. They are + * encoded/decoded on the fly when you set or get them, using the codec registry. + */ @NonNull public static TypeCodec udtOf(@NonNull UserDefinedType cqlType) { return new UdtCodec(cqlType); } /** - * Returns a codec that handles Apache Cassandra(R)'s timestamp type and maps it to Java's {@link - * ZonedDateTime}, using the supplied {@link ZoneId} as its source of time zone information. + * An alias for {@link ExtraTypeCodecs#ZONED_TIMESTAMP_SYSTEM}. * - *

        Note that Apache Cassandra(R)'s timestamp type does not store any time zone; the codecs - * created by this method are provided merely as a convenience for users that need to deal with - * zoned timestamps in their applications. + *

        This exists for historical reasons: the constant was originally defined in this class, but + * technically it belongs to {@link ExtraTypeCodecs} because this is not a built-in mapping. + */ + public static final TypeCodec ZONED_TIMESTAMP_SYSTEM = + ExtraTypeCodecs.ZONED_TIMESTAMP_SYSTEM; + + /** + * An alias for {@link ExtraTypeCodecs#ZONED_TIMESTAMP_UTC}. + * + *

        This exists for historical reasons: the constant was originally defined in this class, but + * technically it belongs to {@link ExtraTypeCodecs} because this is not a built-in mapping. + */ + public static final TypeCodec ZONED_TIMESTAMP_UTC = + ExtraTypeCodecs.ZONED_TIMESTAMP_UTC; + + /** + * An alias for {@link ExtraTypeCodecs#zonedTimestampAt(ZoneId)}. * - * @see #ZONED_TIMESTAMP_SYSTEM - * @see #ZONED_TIMESTAMP_UTC + *

        This exists for historical reasons: the method was originally defined in this class, but + * technically it belongs to {@link ExtraTypeCodecs} because this is not a built-in mapping. */ @NonNull public static TypeCodec zonedTimestampAt(@NonNull ZoneId timeZone) { - return new ZonedTimestampCodec(timeZone); + return ExtraTypeCodecs.zonedTimestampAt(timeZone); } } diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/type/reflect/GenericType.java b/core/src/main/java/com/datastax/oss/driver/api/core/type/reflect/GenericType.java index 10bf2ef928e..a1977e39f23 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/type/reflect/GenericType.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/type/reflect/GenericType.java @@ -34,10 +34,12 @@ import java.time.Duration; import java.time.Instant; import java.time.LocalDate; +import java.time.LocalDateTime; import java.time.LocalTime; import java.time.ZonedDateTime; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.Set; import java.util.UUID; import net.jcip.annotations.Immutable; @@ -95,6 +97,7 @@ public class GenericType { public static final GenericType ZONED_DATE_TIME = of(ZonedDateTime.class); public static final GenericType LOCAL_DATE = of(LocalDate.class); public static final GenericType LOCAL_TIME = of(LocalTime.class); + public static final GenericType LOCAL_DATE_TIME = of(LocalDateTime.class); public static final GenericType BYTE_BUFFER = of(ByteBuffer.class); public static final GenericType STRING = of(String.class); public static final GenericType BIG_INTEGER = of(BigInteger.class); @@ -162,6 +165,35 @@ public static GenericType> mapOf( return new GenericType<>(token); } + @NonNull + public static GenericType arrayOf(@NonNull Class componentType) { + TypeToken token = + new TypeToken() {}.where(new TypeParameter() {}, TypeToken.of(componentType)); + return new GenericType<>(token); + } + + @NonNull + public static GenericType arrayOf(@NonNull GenericType componentType) { + TypeToken token = + new TypeToken() {}.where(new TypeParameter() {}, componentType.token); + return new GenericType<>(token); + } + + @NonNull + public static GenericType> optionalOf(@NonNull Class componentType) { + TypeToken> token = + new TypeToken>() {}.where( + new TypeParameter() {}, TypeToken.of(componentType)); + return new GenericType<>(token); + } + + @NonNull + public static GenericType> optionalOf(@NonNull GenericType componentType) { + TypeToken> token = + new TypeToken>() {}.where(new TypeParameter() {}, componentType.token); + return new GenericType<>(token); + } + private final TypeToken token; private GenericType(TypeToken token) { diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/BlobCodec.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/BlobCodec.java index 4aeed77b00b..7c036e203cc 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/BlobCodec.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/BlobCodec.java @@ -26,6 +26,12 @@ import java.nio.ByteBuffer; import net.jcip.annotations.ThreadSafe; +/** + * A codec that maps the CQL type {@code blob} to the Java type {@link ByteBuffer}. + * + *

        If you are looking for a codec mapping the CQL type {@code blob} to the Java type {@code + * byte[]}, you should use {@link SimpleBlobCodec} instead. + */ @ThreadSafe public class BlobCodec implements TypeCodec { @NonNull diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/SimpleBlobCodec.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/SimpleBlobCodec.java new file mode 100644 index 00000000000..07ace09adb3 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/SimpleBlobCodec.java @@ -0,0 +1,65 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.type.codec; + +import com.datastax.oss.driver.api.core.data.ByteUtils; +import com.datastax.oss.driver.api.core.type.codec.MappingCodec; +import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import com.datastax.oss.driver.internal.core.type.codec.extras.array.ByteListToArrayCodec; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.nio.ByteBuffer; +import net.jcip.annotations.Immutable; + +/** + * A codec that maps the CQL type {@code blob} to the Java type {@code byte[]}. + * + *

        If you are looking for a codec mapping the CQL type {@code blob} to the Java type {@link + * ByteBuffer}, you should use {@link BlobCodec} instead. + * + *

        If you are looking for a codec mapping the CQL type {@code list { + + public SimpleBlobCodec() { + super(TypeCodecs.BLOB, GenericType.of(byte[].class)); + } + + @Override + public boolean accepts(@NonNull Object value) { + return value instanceof byte[]; + } + + @Override + public boolean accepts(@NonNull Class javaClass) { + return byte[].class.equals(javaClass); + } + + @Nullable + @Override + protected byte[] innerToOuter(@Nullable ByteBuffer value) { + return value == null ? null : ByteUtils.getArray(value); + } + + @Nullable + @Override + protected ByteBuffer outerToInner(@Nullable byte[] value) { + return value == null ? null : ByteBuffer.wrap(value); + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/TimestampCodec.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/TimestampCodec.java index aa7d147581f..3798b68b482 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/TimestampCodec.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/TimestampCodec.java @@ -96,7 +96,7 @@ * * * By default, when parsing, timestamp literals that do not include any time zone information will - * be interpreted using the system's {@linkplain TimeZone#getDefault() default time zone}. This is + * be interpreted using the system's {@linkplain ZoneId#systemDefault() default time zone}. This is * intended to mimic Apache Cassandra(R)'s own parsing behavior (see {@code * org.apache.cassandra.serializers.TimestampSerializer}). The default time zone can be modified * using the {@linkplain TimestampCodec#TimestampCodec(ZoneId) one-arg constructor} that takes a diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/OptionalCodec.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/OptionalCodec.java new file mode 100644 index 00000000000..2361495ad93 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/OptionalCodec.java @@ -0,0 +1,70 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.type.codec.extras; + +import com.datastax.oss.driver.api.core.type.codec.MappingCodec; +import com.datastax.oss.driver.api.core.type.codec.TypeCodec; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.Collection; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; +import net.jcip.annotations.Immutable; + +/** + * A codec that wraps other codecs around {@link Optional} instances. + * + * @param The wrapped Java type. + */ +@Immutable +public class OptionalCodec extends MappingCodec> { + + public OptionalCodec(@NonNull TypeCodec innerCodec) { + super( + Objects.requireNonNull(innerCodec, "innerCodec must not be null"), + GenericType.optionalOf(innerCodec.getJavaType())); + } + + @Override + public boolean accepts(@NonNull Object value) { + Objects.requireNonNull(value); + if (value instanceof Optional) { + Optional optional = (Optional) value; + return optional.map(innerCodec::accepts).orElse(true); + } + return false; + } + + @Nullable + @Override + protected Optional innerToOuter(@Nullable T value) { + return Optional.ofNullable(isAbsent(value) ? null : value); + } + + @Nullable + @Override + protected T outerToInner(@Nullable Optional value) { + return value != null && value.isPresent() ? value.get() : null; + } + + protected boolean isAbsent(@Nullable T value) { + return value == null + || (value instanceof Collection && ((Collection) value).isEmpty()) + || (value instanceof Map && ((Map) value).isEmpty()); + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/AbstractListToArrayCodec.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/AbstractListToArrayCodec.java new file mode 100644 index 00000000000..ce94f148600 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/AbstractListToArrayCodec.java @@ -0,0 +1,198 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.type.codec.extras.array; + +import com.datastax.oss.driver.api.core.type.DataType; +import com.datastax.oss.driver.api.core.type.ListType; +import com.datastax.oss.driver.api.core.type.codec.TypeCodec; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import com.datastax.oss.driver.internal.core.type.codec.ParseUtils; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.lang.reflect.Array; +import java.util.Objects; + +/** + * Base class for all codecs dealing with Java arrays. This class aims to reduce the amount of code + * required to create such codecs. + * + * @param The Java array type this codec handles + */ +public abstract class AbstractListToArrayCodec implements TypeCodec { + + @NonNull protected final ListType cqlType; + @NonNull protected final GenericType javaType; + + /** + * @param cqlType The CQL type. Must be a list type. + * @param arrayType The Java type. Must be an array class. + */ + protected AbstractListToArrayCodec( + @NonNull ListType cqlType, @NonNull GenericType arrayType) { + this.cqlType = Objects.requireNonNull(cqlType, "cqlType cannot be null"); + this.javaType = Objects.requireNonNull(arrayType, "arrayType cannot be null"); + if (!arrayType.isArray()) { + throw new IllegalArgumentException("Expecting Java array class, got " + arrayType); + } + } + + @NonNull + @Override + public GenericType getJavaType() { + return javaType; + } + + @NonNull + @Override + public DataType getCqlType() { + return cqlType; + } + + @NonNull + @Override + public String format(@Nullable ArrayT array) { + if (array == null) { + return "NULL"; + } + int length = Array.getLength(array); + StringBuilder sb = new StringBuilder(); + sb.append('['); + for (int i = 0; i < length; i++) { + if (i != 0) { + sb.append(","); + } + formatElement(sb, array, i); + } + sb.append(']'); + return sb.toString(); + } + + @Nullable + @Override + public ArrayT parse(@Nullable String value) { + if (value == null || value.isEmpty() || value.equalsIgnoreCase("NULL")) { + return null; + } + int idx = skipSpaces(value, 0); + idx = skipOpeningBracket(value, idx); + idx = skipSpaces(value, idx); + if (value.charAt(idx) == ']') { + return newInstance(0); + } + // first pass: determine array length + int length = getArrayLength(value, idx); + // second pass: parse elements + ArrayT array = newInstance(length); + int i = 0; + for (; idx < value.length(); i++) { + int n = skipLiteral(value, idx); + parseElement(value.substring(idx, n), array, i); + idx = skipSpaces(value, n); + if (value.charAt(idx) == ']') { + return array; + } + idx = skipComma(value, idx); + idx = skipSpaces(value, idx); + } + throw new IllegalArgumentException( + String.format("Malformed list value \"%s\", missing closing ']'", value)); + } + + /** + * Creates a new array instance with the given size. + * + * @param size The size of the array to instantiate. + * @return a new array instance with the given size. + */ + @NonNull + protected abstract ArrayT newInstance(int size); + + /** + * Formats the {@code index}th element of {@code array} to {@code output}. + * + * @param output The StringBuilder to write to. + * @param array The array to read from. + * @param index The element index. + */ + protected abstract void formatElement( + @NonNull StringBuilder output, @NonNull ArrayT array, int index); + + /** + * Parses the {@code index}th element of {@code array} from {@code input}. + * + * @param input The String to read from. + * @param array The array to write to. + * @param index The element index. + */ + protected abstract void parseElement(@NonNull String input, @NonNull ArrayT array, int index); + + private int getArrayLength(String value, int idx) { + int length = 1; + for (; idx < value.length(); length++) { + idx = skipLiteral(value, idx); + idx = skipSpaces(value, idx); + if (value.charAt(idx) == ']') { + break; + } + idx = skipComma(value, idx); + idx = skipSpaces(value, idx); + } + return length; + } + + private int skipComma(String value, int idx) { + if (value.charAt(idx) != ',') { + throw new IllegalArgumentException( + String.format( + "Cannot parse list value from \"%s\", at character %d expecting ',' but got '%c'", + value, idx, value.charAt(idx))); + } + return idx + 1; + } + + private int skipOpeningBracket(String value, int idx) { + if (value.charAt(idx) != '[') { + throw new IllegalArgumentException( + String.format( + "cannot parse list value from \"%s\", at character %d expecting '[' but got '%c'", + value, idx, value.charAt(idx))); + } + return idx + 1; + } + + private int skipSpaces(String value, int idx) { + try { + return ParseUtils.skipSpaces(value, idx); + } catch (IllegalArgumentException e) { + throw new IllegalArgumentException( + String.format( + "Cannot parse list value from \"%s\", at character %d expecting space but got '%c'", + value, idx, value.charAt(idx)), + e); + } + } + + private int skipLiteral(String value, int idx) { + try { + return ParseUtils.skipCQLValue(value, idx); + } catch (IllegalArgumentException e) { + throw new IllegalArgumentException( + String.format( + "Cannot parse list value from \"%s\", invalid CQL value at character %d", value, idx), + e); + } + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/AbstractPrimitiveListToArrayCodec.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/AbstractPrimitiveListToArrayCodec.java new file mode 100644 index 00000000000..1e81af8edcf --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/AbstractPrimitiveListToArrayCodec.java @@ -0,0 +1,127 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.type.codec.extras.array; + +import com.datastax.oss.driver.api.core.ProtocolVersion; +import com.datastax.oss.driver.api.core.type.ListType; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.lang.reflect.Array; +import java.nio.ByteBuffer; +import java.util.Objects; + +/** + * Base class for all codecs dealing with Java primitive arrays. This class provides a more + * efficient implementation of {@link #encode(Object, ProtocolVersion)} and {@link + * #decode(ByteBuffer, ProtocolVersion)} for primitive arrays. + * + * @param The Java primitive array type this codec handles + */ +public abstract class AbstractPrimitiveListToArrayCodec + extends AbstractListToArrayCodec { + + /** + * @param cqlType The CQL type. Must be a list type. + * @param javaClass The Java type. Must be an array class. + */ + protected AbstractPrimitiveListToArrayCodec( + @NonNull ListType cqlType, @NonNull GenericType javaClass) { + super(cqlType, javaClass); + GenericType componentType = Objects.requireNonNull(javaClass.getComponentType()); + if (!componentType.isPrimitive()) { + throw new IllegalArgumentException( + "Expecting primitive array component type, got " + componentType); + } + } + + @Nullable + @Override + public ByteBuffer encode( + @Nullable PrimitiveArrayT array, @NonNull ProtocolVersion protocolVersion) { + if (array == null) { + return null; + } + int length = Array.getLength(array); + int sizeOfElement = 4 + sizeOfComponentType(); + int totalSize = 4 + length * sizeOfElement; + ByteBuffer output = ByteBuffer.allocate(totalSize); + output.putInt(length); + for (int i = 0; i < length; i++) { + output.putInt(sizeOfComponentType()); + serializeElement(output, array, i, protocolVersion); + } + output.flip(); + return output; + } + + @Nullable + @Override + public PrimitiveArrayT decode( + @Nullable ByteBuffer bytes, @NonNull ProtocolVersion protocolVersion) { + if (bytes == null || bytes.remaining() == 0) { + return newInstance(0); + } + ByteBuffer input = bytes.duplicate(); + int length = input.getInt(); + PrimitiveArrayT array = newInstance(length); + for (int i = 0; i < length; i++) { + int elementSize = input.getInt(); + // Null elements can happen on the decode path, but we cannot tolerate them + if (elementSize < 0) { + throw new NullPointerException("Primitive arrays cannot store null elements"); + } else { + deserializeElement(input, array, i, protocolVersion); + } + } + return array; + } + + /** + * Return the size in bytes of the array component type. + * + * @return the size in bytes of the array component type. + */ + protected abstract int sizeOfComponentType(); + + /** + * Write the {@code index}th element of {@code array} to {@code output}. + * + * @param output The ByteBuffer to write to. + * @param array The array to read from. + * @param index The element index. + * @param protocolVersion The protocol version to use. + */ + protected abstract void serializeElement( + @NonNull ByteBuffer output, + @NonNull PrimitiveArrayT array, + int index, + @NonNull ProtocolVersion protocolVersion); + + /** + * Read the {@code index}th element of {@code array} from {@code input}. + * + * @param input The ByteBuffer to read from. + * @param array The array to write to. + * @param index The element index. + * @param protocolVersion The protocol version to use. + */ + protected abstract void deserializeElement( + @NonNull ByteBuffer input, + @NonNull PrimitiveArrayT array, + int index, + @NonNull ProtocolVersion protocolVersion); +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/BooleanListToArrayCodec.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/BooleanListToArrayCodec.java new file mode 100644 index 00000000000..fbf2e7c0db0 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/BooleanListToArrayCodec.java @@ -0,0 +1,95 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.type.codec.extras.array; + +import com.datastax.oss.driver.api.core.ProtocolVersion; +import com.datastax.oss.driver.api.core.type.DataTypes; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.nio.ByteBuffer; +import java.util.Objects; +import net.jcip.annotations.Immutable; + +/** + * A codec that maps the CQL type {@code list} to the Java type {@code boolean[]}. + * + *

        Note that this codec is designed for performance and converts CQL lists directly to + * {@code boolean[]}, thus avoiding any unnecessary boxing and unboxing of Java primitive {@code + * boolean} values; it also instantiates arrays without the need for an intermediary Java {@code + * List} object. + */ +@Immutable +public class BooleanListToArrayCodec extends AbstractPrimitiveListToArrayCodec { + + private static final byte TRUE = (byte) 1; + private static final byte FALSE = (byte) 0; + + public BooleanListToArrayCodec() { + super(DataTypes.listOf(DataTypes.BOOLEAN), GenericType.of(boolean[].class)); + } + + @Override + public boolean accepts(@NonNull Class javaClass) { + Objects.requireNonNull(javaClass); + return boolean[].class.equals(javaClass); + } + + @Override + public boolean accepts(@NonNull Object value) { + Objects.requireNonNull(value); + return value instanceof boolean[]; + } + + @Override + protected int sizeOfComponentType() { + return 1; + } + + @Override + protected void serializeElement( + @NonNull ByteBuffer output, + @NonNull boolean[] array, + int index, + @NonNull ProtocolVersion protocolVersion) { + byte element = array[index] ? TRUE : FALSE; + output.put(element); + } + + @Override + protected void deserializeElement( + @NonNull ByteBuffer input, + @NonNull boolean[] array, + int index, + @NonNull ProtocolVersion protocolVersion) { + array[index] = input.get() == TRUE; + } + + @Override + protected void formatElement(@NonNull StringBuilder output, @NonNull boolean[] array, int index) { + output.append(array[index]); + } + + @Override + protected void parseElement(@NonNull String input, @NonNull boolean[] array, int index) { + array[index] = Boolean.parseBoolean(input); + } + + @NonNull + @Override + protected boolean[] newInstance(int size) { + return new boolean[size]; + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/ByteListToArrayCodec.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/ByteListToArrayCodec.java new file mode 100644 index 00000000000..4f3d1f3fbc2 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/ByteListToArrayCodec.java @@ -0,0 +1,95 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.type.codec.extras.array; + +import com.datastax.oss.driver.api.core.ProtocolVersion; +import com.datastax.oss.driver.api.core.type.DataTypes; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import com.datastax.oss.driver.internal.core.type.codec.SimpleBlobCodec; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.nio.ByteBuffer; +import java.util.Objects; +import net.jcip.annotations.Immutable; + +/** + * A codec that maps the CQL type {@code list} to the Java type {@code byte[]}. + * + *

        Note that this codec is not suitable for reading CQL blobs as byte arrays; you should use + * {@link SimpleBlobCodec} for that. + * + *

        Note that this codec is designed for performance and converts CQL lists directly to + * {@code byte[]}, thus avoiding any unnecessary boxing and unboxing of Java primitive {@code byte} + * values; it also instantiates arrays without the need for an intermediary Java {@code List} + * object. + */ +@Immutable +public class ByteListToArrayCodec extends AbstractPrimitiveListToArrayCodec { + + public ByteListToArrayCodec() { + super(DataTypes.listOf(DataTypes.SMALLINT), GenericType.of(byte[].class)); + } + + @Override + public boolean accepts(@NonNull Class javaClass) { + Objects.requireNonNull(javaClass); + return byte[].class.equals(javaClass); + } + + @Override + public boolean accepts(@NonNull Object value) { + Objects.requireNonNull(value); + return value instanceof byte[]; + } + + @Override + protected int sizeOfComponentType() { + return 1; + } + + @Override + protected void serializeElement( + @NonNull ByteBuffer output, + @NonNull byte[] array, + int index, + @NonNull ProtocolVersion protocolVersion) { + output.put(array[index]); + } + + @Override + protected void deserializeElement( + @NonNull ByteBuffer input, + @NonNull byte[] array, + int index, + @NonNull ProtocolVersion protocolVersion) { + array[index] = input.get(); + } + + @Override + protected void formatElement(@NonNull StringBuilder output, @NonNull byte[] array, int index) { + output.append(array[index]); + } + + @Override + protected void parseElement(@NonNull String input, @NonNull byte[] array, int index) { + array[index] = Byte.parseByte(input); + } + + @NonNull + @Override + protected byte[] newInstance(int size) { + return new byte[size]; + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/DoubleListToArrayCodec.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/DoubleListToArrayCodec.java new file mode 100644 index 00000000000..c025a8fb390 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/DoubleListToArrayCodec.java @@ -0,0 +1,91 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.type.codec.extras.array; + +import com.datastax.oss.driver.api.core.ProtocolVersion; +import com.datastax.oss.driver.api.core.type.DataTypes; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.nio.ByteBuffer; +import java.util.Objects; +import net.jcip.annotations.Immutable; + +/** + * A codec that maps the CQL type {@code list} to the Java type {@code double[]}. + * + *

        Note that this codec is designed for performance and converts CQL lists directly to + * {@code double[]}, thus avoiding any unnecessary boxing and unboxing of Java primitive {@code + * double} values; it also instantiates arrays without the need for an intermediary Java {@code + * List} object. + */ +@Immutable +public class DoubleListToArrayCodec extends AbstractPrimitiveListToArrayCodec { + + public DoubleListToArrayCodec() { + super(DataTypes.listOf(DataTypes.DOUBLE), GenericType.of(double[].class)); + } + + @Override + public boolean accepts(@NonNull Class javaClass) { + Objects.requireNonNull(javaClass); + return double[].class.equals(javaClass); + } + + @Override + public boolean accepts(@NonNull Object value) { + Objects.requireNonNull(value); + return value instanceof double[]; + } + + @Override + protected int sizeOfComponentType() { + return 8; + } + + @Override + protected void serializeElement( + @NonNull ByteBuffer output, + @NonNull double[] array, + int index, + @NonNull ProtocolVersion protocolVersion) { + output.putDouble(array[index]); + } + + @Override + protected void deserializeElement( + @NonNull ByteBuffer input, + @NonNull double[] array, + int index, + @NonNull ProtocolVersion protocolVersion) { + array[index] = input.getDouble(); + } + + @Override + protected void formatElement(@NonNull StringBuilder output, @NonNull double[] array, int index) { + output.append(array[index]); + } + + @Override + protected void parseElement(@NonNull String input, @NonNull double[] array, int index) { + array[index] = Double.parseDouble(input); + } + + @NonNull + @Override + protected double[] newInstance(int size) { + return new double[size]; + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/FloatListToArrayCodec.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/FloatListToArrayCodec.java new file mode 100644 index 00000000000..05ead7ed364 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/FloatListToArrayCodec.java @@ -0,0 +1,91 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.type.codec.extras.array; + +import com.datastax.oss.driver.api.core.ProtocolVersion; +import com.datastax.oss.driver.api.core.type.DataTypes; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.nio.ByteBuffer; +import java.util.Objects; +import net.jcip.annotations.Immutable; + +/** + * A codec that maps the CQL type {@code list} to the Java type {@code float[]}. + * + *

        Note that this codec is designed for performance and converts CQL lists directly to + * {@code float[]}, thus avoiding any unnecessary boxing and unboxing of Java primitive {@code + * float} values; it also instantiates arrays without the need for an intermediary Java {@code List} + * object. + */ +@Immutable +public class FloatListToArrayCodec extends AbstractPrimitiveListToArrayCodec { + + public FloatListToArrayCodec() { + super(DataTypes.listOf(DataTypes.FLOAT), GenericType.of(float[].class)); + } + + @Override + public boolean accepts(@NonNull Class javaClass) { + Objects.requireNonNull(javaClass); + return float[].class.equals(javaClass); + } + + @Override + public boolean accepts(@NonNull Object value) { + Objects.requireNonNull(value); + return value instanceof float[]; + } + + @Override + protected int sizeOfComponentType() { + return 4; + } + + @Override + protected void serializeElement( + @NonNull ByteBuffer output, + @NonNull float[] array, + int index, + @NonNull ProtocolVersion protocolVersion) { + output.putFloat(array[index]); + } + + @Override + protected void deserializeElement( + @NonNull ByteBuffer input, + @NonNull float[] array, + int index, + @NonNull ProtocolVersion protocolVersion) { + array[index] = input.getFloat(); + } + + @Override + protected void formatElement(@NonNull StringBuilder output, @NonNull float[] array, int index) { + output.append(array[index]); + } + + @Override + protected void parseElement(@NonNull String input, @NonNull float[] array, int index) { + array[index] = Float.parseFloat(input); + } + + @NonNull + @Override + protected float[] newInstance(int size) { + return new float[size]; + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/IntListToArrayCodec.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/IntListToArrayCodec.java new file mode 100644 index 00000000000..9aa039e53cf --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/IntListToArrayCodec.java @@ -0,0 +1,91 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.type.codec.extras.array; + +import com.datastax.oss.driver.api.core.ProtocolVersion; +import com.datastax.oss.driver.api.core.type.DataTypes; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.nio.ByteBuffer; +import java.util.Objects; +import net.jcip.annotations.Immutable; + +/** + * A codec that maps the CQL type {@code list} to the Java type {@code int[]}. + * + *

        Note that this codec is designed for performance and converts CQL lists directly to + * {@code int[]}, thus avoiding any unnecessary boxing and unboxing of Java primitive {@code int} + * values; it also instantiates arrays without the need for an intermediary Java {@code List} + * object. + */ +@Immutable +public class IntListToArrayCodec extends AbstractPrimitiveListToArrayCodec { + + public IntListToArrayCodec() { + super(DataTypes.listOf(DataTypes.INT), GenericType.of(int[].class)); + } + + @Override + public boolean accepts(@NonNull Class javaClass) { + Objects.requireNonNull(javaClass); + return int[].class.equals(javaClass); + } + + @Override + public boolean accepts(@NonNull Object value) { + Objects.requireNonNull(value); + return value instanceof int[]; + } + + @Override + protected int sizeOfComponentType() { + return 4; + } + + @Override + protected void serializeElement( + @NonNull ByteBuffer output, + @NonNull int[] array, + int index, + @NonNull ProtocolVersion protocolVersion) { + output.putInt(array[index]); + } + + @Override + protected void deserializeElement( + @NonNull ByteBuffer input, + @NonNull int[] array, + int index, + @NonNull ProtocolVersion protocolVersion) { + array[index] = input.getInt(); + } + + @Override + protected void formatElement(@NonNull StringBuilder output, @NonNull int[] array, int index) { + output.append(array[index]); + } + + @Override + protected void parseElement(@NonNull String input, @NonNull int[] array, int index) { + array[index] = Integer.parseInt(input); + } + + @NonNull + @Override + protected int[] newInstance(int size) { + return new int[size]; + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/LongListToArrayCodec.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/LongListToArrayCodec.java new file mode 100644 index 00000000000..9a8abbb778a --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/LongListToArrayCodec.java @@ -0,0 +1,91 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.type.codec.extras.array; + +import com.datastax.oss.driver.api.core.ProtocolVersion; +import com.datastax.oss.driver.api.core.type.DataTypes; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.nio.ByteBuffer; +import java.util.Objects; +import net.jcip.annotations.Immutable; + +/** + * A codec that maps the CQL type {@code list} to the Java type {@code long[]}. + * + *

        Note that this codec is designed for performance and converts CQL lists directly to + * {@code long[]}, thus avoiding any unnecessary boxing and unboxing of Java primitive {@code long} + * values; it also instantiates arrays without the need for an intermediary Java {@code List} + * object. + */ +@Immutable +public class LongListToArrayCodec extends AbstractPrimitiveListToArrayCodec { + + public LongListToArrayCodec() { + super(DataTypes.listOf(DataTypes.BIGINT), GenericType.of(long[].class)); + } + + @Override + public boolean accepts(@NonNull Class javaClass) { + Objects.requireNonNull(javaClass); + return long[].class.equals(javaClass); + } + + @Override + public boolean accepts(@NonNull Object value) { + Objects.requireNonNull(value); + return value instanceof long[]; + } + + @Override + protected int sizeOfComponentType() { + return 8; + } + + @Override + protected void serializeElement( + @NonNull ByteBuffer output, + @NonNull long[] array, + int index, + @NonNull ProtocolVersion protocolVersion) { + output.putLong(array[index]); + } + + @Override + protected void deserializeElement( + @NonNull ByteBuffer input, + @NonNull long[] array, + int index, + @NonNull ProtocolVersion protocolVersion) { + array[index] = input.getLong(); + } + + @Override + protected void formatElement(@NonNull StringBuilder output, @NonNull long[] array, int index) { + output.append(array[index]); + } + + @Override + protected void parseElement(@NonNull String input, @NonNull long[] array, int index) { + array[index] = Long.parseLong(input); + } + + @NonNull + @Override + protected long[] newInstance(int size) { + return new long[size]; + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/ObjectListToArrayCodec.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/ObjectListToArrayCodec.java new file mode 100644 index 00000000000..999534950ce --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/ObjectListToArrayCodec.java @@ -0,0 +1,146 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.type.codec.extras.array; + +import com.datastax.oss.driver.api.core.ProtocolVersion; +import com.datastax.oss.driver.api.core.type.DataTypes; +import com.datastax.oss.driver.api.core.type.codec.TypeCodec; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.lang.reflect.Array; +import java.nio.ByteBuffer; +import java.util.Objects; +import net.jcip.annotations.Immutable; + +/** + * Codec dealing with Java object arrays. Serialization and deserialization of elements in the array + * is delegated to the provided element codec. + * + *

        For example, to create a codec that maps {@code list} to {@code String[]}, declare the + * following: + * + *

        {@code
        + * ObjectListToArrayCodec stringArrayCodec = new ObjectListToArrayCodec<>(TypeCodecs.TEXT);
        + * }
        + * + * @param The Java array component type this codec handles + */ +@Immutable +public class ObjectListToArrayCodec extends AbstractListToArrayCodec { + + private final TypeCodec elementCodec; + + public ObjectListToArrayCodec(@NonNull TypeCodec elementCodec) { + super( + DataTypes.listOf( + Objects.requireNonNull(elementCodec, "elementCodec must not be null").getCqlType()), + GenericType.arrayOf(elementCodec.getJavaType())); + this.elementCodec = elementCodec; + } + + @Override + public boolean accepts(@NonNull Object value) { + Objects.requireNonNull(value); + Class clazz = value.getClass(); + return clazz.isArray() + && clazz.getComponentType().equals(elementCodec.getJavaType().getRawType()); + } + + @Nullable + @Override + public ByteBuffer encode(@Nullable ElementT[] value, @NonNull ProtocolVersion protocolVersion) { + if (value == null) { + return null; + } + int i = 0; + ByteBuffer[] encodedElements = new ByteBuffer[value.length]; + int toAllocate = 4; // initialize with number of elements + for (ElementT elt : value) { + if (elt == null) { + throw new NullPointerException("Collection elements cannot be null"); + } + ByteBuffer encodedElement; + try { + encodedElement = elementCodec.encode(elt, protocolVersion); + } catch (ClassCastException e) { + throw new IllegalArgumentException( + String.format( + "Invalid type for %s element, expecting %s but got %s", + cqlType, elementCodec.getJavaType(), elt.getClass()), + e); + } + if (encodedElement == null) { + throw new NullPointerException("Collection elements cannot encode to CQL NULL"); + } + encodedElements[i++] = encodedElement; + toAllocate += 4 + encodedElement.remaining(); // the element preceded by its size + } + ByteBuffer result = ByteBuffer.allocate(toAllocate); + result.putInt(value.length); + for (ByteBuffer encodedElement : encodedElements) { + result.putInt(encodedElement.remaining()); + result.put(encodedElement); + } + result.flip(); + return result; + } + + @Nullable + @Override + public ElementT[] decode(@Nullable ByteBuffer bytes, @NonNull ProtocolVersion protocolVersion) { + if (bytes == null || bytes.remaining() == 0) { + return newInstance(0); + } + ByteBuffer input = bytes.duplicate(); + int size = input.getInt(); + ElementT[] result = newInstance(size); + for (int i = 0; i < size; i++) { + ElementT element; + int elementSize = input.getInt(); + // Allow null elements on the decode path, because Cassandra might return such collections + // for some computed values in the future -- e.g. SELECT ttl(some_collection) + if (elementSize < 0) { + element = null; + } else { + ByteBuffer encodedElement = input.slice(); + encodedElement.limit(elementSize); + element = elementCodec.decode(encodedElement, protocolVersion); + input.position(input.position() + elementSize); + } + result[i] = element; + } + return result; + } + + @Override + protected void formatElement( + @NonNull StringBuilder output, @NonNull ElementT[] array, int index) { + output.append(elementCodec.format(array[index])); + } + + @Override + protected void parseElement(@NonNull String input, @NonNull ElementT[] array, int index) { + array[index] = elementCodec.parse(input); + } + + @NonNull + @Override + @SuppressWarnings("unchecked") + protected ElementT[] newInstance(int size) { + return (ElementT[]) Array.newInstance(getJavaType().getRawType().getComponentType(), size); + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/ShortListToArrayCodec.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/ShortListToArrayCodec.java new file mode 100644 index 00000000000..bbb9a93815c --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/ShortListToArrayCodec.java @@ -0,0 +1,90 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.type.codec.extras.array; + +import com.datastax.oss.driver.api.core.ProtocolVersion; +import com.datastax.oss.driver.api.core.type.DataTypes; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.nio.ByteBuffer; +import java.util.Objects; +import net.jcip.annotations.Immutable; + +/** + * A codec that maps the CQL type {@code list} to the Java type {@code short[]}. + * + *

        Note that this codec is designed for performance and converts CQL lists directly to + * {@code short[]}, thus avoiding any unnecessary boxing and unboxing of Java primitive {@code + * short} values; it also instantiates arrays without the need for an intermediary Java {@code List} + * object. + */ +@Immutable +public class ShortListToArrayCodec extends AbstractPrimitiveListToArrayCodec { + + public ShortListToArrayCodec() { + super(DataTypes.listOf(DataTypes.SMALLINT), GenericType.of(short[].class)); + } + + @Override + public boolean accepts(@NonNull Class javaClass) { + return short[].class.equals(javaClass); + } + + @Override + public boolean accepts(@NonNull Object value) { + Objects.requireNonNull(value); + return value instanceof short[]; + } + + @Override + protected int sizeOfComponentType() { + return 2; + } + + @Override + protected void serializeElement( + @NonNull ByteBuffer output, + @NonNull short[] array, + int index, + @NonNull ProtocolVersion protocolVersion) { + output.putShort(array[index]); + } + + @Override + protected void deserializeElement( + @NonNull ByteBuffer input, + @NonNull short[] array, + int index, + @NonNull ProtocolVersion protocolVersion) { + array[index] = input.getShort(); + } + + @Override + protected void formatElement(@NonNull StringBuilder output, @NonNull short[] array, int index) { + output.append(array[index]); + } + + @Override + protected void parseElement(@NonNull String input, @NonNull short[] array, int index) { + array[index] = Short.parseShort(input); + } + + @NonNull + @Override + protected short[] newInstance(int size) { + return new short[size]; + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/enums/EnumNameCodec.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/enums/EnumNameCodec.java new file mode 100644 index 00000000000..f98412e21b5 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/enums/EnumNameCodec.java @@ -0,0 +1,58 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.type.codec.extras.enums; + +import com.datastax.oss.driver.api.core.type.codec.MappingCodec; +import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.Objects; +import net.jcip.annotations.Immutable; + +/** + * A codec that serializes {@link Enum} instances as CQL {@code varchar}s representing their + * programmatic names as returned by {@link Enum#name()}. + * + *

        Note that this codec relies on the enum constant names; it is therefore vital that + * enum names never change. + * + * @param The Enum class this codec serializes from and deserializes to. + */ +@Immutable +public class EnumNameCodec> extends MappingCodec { + + private final Class enumClass; + + public EnumNameCodec(@NonNull Class enumClass) { + super( + TypeCodecs.TEXT, + GenericType.of(Objects.requireNonNull(enumClass, "enumClass must not be null"))); + this.enumClass = enumClass; + } + + @Nullable + @Override + protected EnumT innerToOuter(@Nullable String value) { + return value == null || value.isEmpty() ? null : Enum.valueOf(enumClass, value); + } + + @Nullable + @Override + protected String outerToInner(@Nullable EnumT value) { + return value == null ? null : value.name(); + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/enums/EnumOrdinalCodec.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/enums/EnumOrdinalCodec.java new file mode 100644 index 00000000000..d8a826ba74a --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/enums/EnumOrdinalCodec.java @@ -0,0 +1,58 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.type.codec.extras.enums; + +import com.datastax.oss.driver.api.core.type.codec.MappingCodec; +import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.Objects; +import net.jcip.annotations.Immutable; + +/** + * A codec that serializes {@link Enum} instances as CQL {@code int}s representing their ordinal + * values as returned by {@link Enum#ordinal()}. + * + *

        Note that this codec relies on the enum constants declaration order; it is therefore + * vital that this order remains immutable. + * + * @param The Enum class this codec serializes from and deserializes to. + */ +@Immutable +public class EnumOrdinalCodec> extends MappingCodec { + + private final EnumT[] enumConstants; + + public EnumOrdinalCodec(@NonNull Class enumClass) { + super( + TypeCodecs.INT, + GenericType.of(Objects.requireNonNull(enumClass, "enumClass must not be null"))); + this.enumConstants = enumClass.getEnumConstants(); + } + + @Nullable + @Override + protected EnumT innerToOuter(@Nullable Integer value) { + return value == null ? null : enumConstants[value]; + } + + @Nullable + @Override + protected Integer outerToInner(@Nullable EnumT value) { + return value == null ? null : value.ordinal(); + } +} diff --git a/examples/src/main/java/com/datastax/oss/driver/examples/json/codecs/JacksonJsonCodec.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/json/JsonCodec.java similarity index 51% rename from examples/src/main/java/com/datastax/oss/driver/examples/json/codecs/JacksonJsonCodec.java rename to core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/json/JsonCodec.java index ea9c8808b38..f95545f9855 100644 --- a/examples/src/main/java/com/datastax/oss/driver/examples/json/codecs/JacksonJsonCodec.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/json/JsonCodec.java @@ -13,7 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.examples.json.codecs; +package com.datastax.oss.driver.internal.core.type.codec.extras.json; import com.datastax.oss.driver.api.core.ProtocolVersion; import com.datastax.oss.driver.api.core.type.DataType; @@ -30,14 +30,11 @@ import edu.umd.cs.findbugs.annotations.Nullable; import java.io.IOException; import java.nio.ByteBuffer; +import java.util.Objects; /** - * A JSON codec that uses the Jackson library to - * perform serialization and deserialization of JSON objects. - * - *

        This codec maps a single Java object to a single JSON structure at a time; mapping of arrays - * or collections to root-level JSON arrays is not supported, but such a codec can be easily crafted - * after this one. + * A JSON codec that maps arbitrary Java objects to JSON strings stored as CQL type {@code text}, + * using the Jackson library to perform serialization and deserialization of JSON objects. * *

        Note that this codec requires the presence of Jackson library at runtime. If you use Maven, * this can be done by declaring the following dependency in your project: @@ -46,23 +43,47 @@ * * com.fasterxml.jackson.core * jackson-databind - * 2.9.8 + * LATEST * * } + * + * @see Jackson JSON Library + * @param The Java type that this codec serializes from and deserializes to, from JSON strings. */ -public class JacksonJsonCodec implements TypeCodec { +public class JsonCodec implements TypeCodec { private final ObjectMapper objectMapper; private final GenericType javaType; + private final JavaType jacksonJavaType; /** * Creates a new instance for the provided {@code javaClass}, using a default, newly-allocated * {@link ObjectMapper}. * + *

        The codec created with this constructor can handle all primitive CQL types as well as + * collections thereof, however it cannot handle tuples and user-defined types; if you need + * support for such CQL types, you need to create your own {@link ObjectMapper} and use the + * {@linkplain #JsonCodec(Class, ObjectMapper) two-arg constructor} instead. + * * @param javaClass the Java class this codec maps to. */ - public JacksonJsonCodec(Class javaClass) { - this(javaClass, new ObjectMapper()); + public JsonCodec(@NonNull Class javaClass) { + this(GenericType.of(Objects.requireNonNull(javaClass, "javaClass cannot be null"))); + } + + /** + * Creates a new instance for the provided {@code javaType}, using a default, newly-allocated + * {@link ObjectMapper}. + * + *

        The codec created with this constructor can handle all primitive CQL types as well as + * collections thereof, however it cannot handle tuples and user-defined types; if you need + * support for such CQL types, you need to create your own {@link ObjectMapper} and use the + * {@linkplain #JsonCodec(GenericType, ObjectMapper) two-arg constructor} instead. + * + * @param javaType the Java type this codec maps to. + */ + public JsonCodec(@NonNull GenericType javaType) { + this(javaType, new ObjectMapper()); } /** @@ -70,10 +91,25 @@ public JacksonJsonCodec(Class javaClass) { * ObjectMapper}. * * @param javaClass the Java class this codec maps to. + * @param objectMapper the {@link ObjectMapper} instance to use. */ - public JacksonJsonCodec(Class javaClass, ObjectMapper jsonMapper) { - this.javaType = GenericType.of(javaClass); - this.objectMapper = jsonMapper; + public JsonCodec(@NonNull Class javaClass, @NonNull ObjectMapper objectMapper) { + this( + GenericType.of(Objects.requireNonNull(javaClass, "javaClass cannot be null")), + objectMapper); + } + + /** + * Creates a new instance for the provided {@code javaType}, and using the provided {@link + * ObjectMapper}. + * + * @param javaType the Java type this codec maps to. + * @param objectMapper the {@link ObjectMapper} instance to use. + */ + public JsonCodec(@NonNull GenericType javaType, @NonNull ObjectMapper objectMapper) { + this.javaType = Objects.requireNonNull(javaType, "javaType cannot be null"); + this.objectMapper = Objects.requireNonNull(objectMapper, "objectMapper cannot be null"); + this.jacksonJavaType = TypeFactory.defaultInstance().constructType(javaType.getType()); } @NonNull @@ -97,7 +133,7 @@ public ByteBuffer encode(@Nullable T value, @NonNull ProtocolVersion protocolVer try { return ByteBuffer.wrap(objectMapper.writeValueAsBytes(value)); } catch (JsonProcessingException e) { - throw new IllegalArgumentException(e.getMessage(), e); + throw new IllegalArgumentException("Failed to encode value as JSON", e); } } @@ -108,31 +144,30 @@ public T decode(@Nullable ByteBuffer bytes, @NonNull ProtocolVersion protocolVer return null; } try { - return objectMapper.readValue(Bytes.getArray(bytes), toJacksonJavaType()); + return objectMapper.readValue(Bytes.getArray(bytes), jacksonJavaType); } catch (IOException e) { - throw new IllegalArgumentException(e.getMessage(), e); + throw new IllegalArgumentException("Failed to decode JSON value", e); } } @NonNull @Override - public String format(T value) { + public String format(@Nullable T value) { if (value == null) { return "NULL"; } String json; try { json = objectMapper.writeValueAsString(value); - } catch (IOException e) { - throw new IllegalArgumentException(e.getMessage(), e); + } catch (JsonProcessingException e) { + throw new IllegalArgumentException("Failed to format value as JSON", e); } return Strings.quote(json); } @Nullable @Override - @SuppressWarnings("unchecked") - public T parse(String value) { + public T parse(@Nullable String value) { if (value == null || value.isEmpty() || value.equalsIgnoreCase("NULL")) { return null; } @@ -141,21 +176,9 @@ public T parse(String value) { } String json = Strings.unquote(value); try { - return (T) objectMapper.readValue(json, toJacksonJavaType()); + return objectMapper.readValue(json, jacksonJavaType); } catch (IOException e) { - throw new IllegalArgumentException(e.getMessage(), e); + throw new IllegalArgumentException("Failed to parse value as JSON", e); } } - - /** - * This method acts as a bridge between the driver's {@link - * com.datastax.oss.driver.api.core.type.reflect.GenericType GenericType} API and Jackson's {@link - * JavaType} API. - * - * @return A {@link JavaType} instance corresponding to the codec's {@link #getJavaType() Java - * type}. - */ - private JavaType toJacksonJavaType() { - return TypeFactory.defaultInstance().constructType(getJavaType().getType()); - } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/time/LocalTimestampCodec.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/time/LocalTimestampCodec.java new file mode 100644 index 00000000000..98bc54c5455 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/time/LocalTimestampCodec.java @@ -0,0 +1,82 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.type.codec.extras.time; + +import com.datastax.oss.driver.api.core.type.codec.MappingCodec; +import com.datastax.oss.driver.api.core.type.codec.TypeCodec; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import com.datastax.oss.driver.internal.core.type.codec.TimestampCodec; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.time.Instant; +import java.time.LocalDateTime; +import java.time.ZoneId; +import java.util.Objects; +import net.jcip.annotations.Immutable; + +/** + * {@link TypeCodec} that maps {@link LocalDateTime} to CQL {@code timestamp}, allowing the setting + * and retrieval of {@code timestamp} columns as {@link LocalDateTime} instances. + * + *

        This codec shares its logic with {@link TimestampCodec}. See the javadocs of this codec for + * important remarks about implementation notes and accepted timestamp formats. + */ +@Immutable +public class LocalTimestampCodec extends MappingCodec { + + private final ZoneId timeZone; + + /** + * Creates a new {@code LocalTimestampCodec} that converts CQL timestamps into {@link + * LocalDateTime} instances using the system's {@linkplain ZoneId#systemDefault() default time + * zone} as their time zone. The supplied {@code timeZone} will also be used to parse CQL + * timestamp literals that do not include any time zone information. + */ + public LocalTimestampCodec() { + this(ZoneId.systemDefault()); + } + + /** + * Creates a new {@code LocalTimestampCodec} that converts CQL timestamps into {@link + * LocalDateTime} instances using the given {@link ZoneId} as their time zone. The supplied {@code + * timeZone} will also be used to parse CQL timestamp literals that do not include any time zone + * information. + */ + public LocalTimestampCodec(@NonNull ZoneId timeZone) { + super( + new TimestampCodec(Objects.requireNonNull(timeZone, "timeZone cannot be null")), + GenericType.LOCAL_DATE_TIME); + this.timeZone = timeZone; + } + + @Override + public boolean accepts(@NonNull Object value) { + Objects.requireNonNull(value); + return value instanceof LocalDateTime; + } + + @Nullable + @Override + protected LocalDateTime innerToOuter(@Nullable Instant value) { + return value == null ? null : LocalDateTime.ofInstant(value, timeZone); + } + + @Nullable + @Override + protected Instant outerToInner(@Nullable LocalDateTime value) { + return value == null ? null : value.atZone(timeZone).toInstant(); + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/time/PersistentZonedTimestampCodec.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/time/PersistentZonedTimestampCodec.java new file mode 100644 index 00000000000..0cb1681d344 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/time/PersistentZonedTimestampCodec.java @@ -0,0 +1,104 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.type.codec.extras.time; + +import com.datastax.oss.driver.api.core.data.TupleValue; +import com.datastax.oss.driver.api.core.type.DataTypes; +import com.datastax.oss.driver.api.core.type.TupleType; +import com.datastax.oss.driver.api.core.type.codec.ExtraTypeCodecs; +import com.datastax.oss.driver.api.core.type.codec.MappingCodec; +import com.datastax.oss.driver.api.core.type.codec.TypeCodec; +import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.time.Instant; +import java.time.ZoneId; +import java.time.ZonedDateTime; +import java.util.Objects; +import net.jcip.annotations.Immutable; + +/** + * {@link TypeCodec} that maps {@link ZonedDateTime} to CQL {@code tuple}, + * providing a pattern for maintaining timezone information in Cassandra. + * + *

        Since Cassandra's timestamp type does not store any time zone, by using a + * tuple<timestamp,varchar> a timezone can be persisted in the varchar + * field of such tuples, and so when the value is deserialized the original timezone is + * preserved. + * + *

        Note: if you want to retrieve CQL timestamps as {@link ZonedDateTime} instances but don't need + * to persist the time zone to the database, you should rather use {@link ZonedTimestampCodec}. + */ +@Immutable +public class PersistentZonedTimestampCodec extends MappingCodec { + + private static final TupleType CQL_TYPE = DataTypes.tupleOf(DataTypes.TIMESTAMP, DataTypes.TEXT); + + public PersistentZonedTimestampCodec() { + super(TypeCodecs.tupleOf(CQL_TYPE), GenericType.ZONED_DATE_TIME); + } + + @Override + public boolean accepts(@NonNull Object value) { + Objects.requireNonNull(value); + return value instanceof ZonedDateTime; + } + + @NonNull + @Override + public TupleType getCqlType() { + return CQL_TYPE; + } + + @NonNull + @Override + public String format(@Nullable ZonedDateTime value) { + if (value == null) { + return "NULL"; + } + // Use TIMESTAMP_UTC for a better-looking format + return "(" + + ExtraTypeCodecs.TIMESTAMP_UTC.format(value.toInstant()) + + "," + + TypeCodecs.TEXT.format(value.getZone().toString()) + + ")"; + } + + @Nullable + @Override + protected ZonedDateTime innerToOuter(@Nullable TupleValue value) { + if (value == null) { + return null; + } else { + Instant instant = Objects.requireNonNull(value.getInstant(0)); + ZoneId zoneId = ZoneId.of(Objects.requireNonNull(value.getString(1))); + return ZonedDateTime.ofInstant(instant, zoneId); + } + } + + @Nullable + @Override + protected TupleValue outerToInner(@Nullable ZonedDateTime value) { + if (value == null) { + return null; + } else { + Instant instant = value.toInstant(); + String zoneId = value.getZone().toString(); + return getCqlType().newValue(instant, zoneId); + } + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/time/TimestampMillisCodec.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/time/TimestampMillisCodec.java new file mode 100644 index 00000000000..f4b78d4919f --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/time/TimestampMillisCodec.java @@ -0,0 +1,115 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.type.codec.extras.time; + +import com.datastax.oss.driver.api.core.ProtocolVersion; +import com.datastax.oss.driver.api.core.type.DataType; +import com.datastax.oss.driver.api.core.type.DataTypes; +import com.datastax.oss.driver.api.core.type.codec.PrimitiveLongCodec; +import com.datastax.oss.driver.api.core.type.codec.TypeCodec; +import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import com.datastax.oss.driver.internal.core.type.codec.TimestampCodec; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.nio.ByteBuffer; +import java.time.Instant; +import java.time.ZoneId; +import java.util.Objects; +import net.jcip.annotations.Immutable; + +/** + * A {@link TypeCodec} that maps CQL timestamps to Java primitive longs, representing the number of + * milliseconds since the Epoch. + * + *

        This codec can serve as a replacement for the driver's built-in {@link TypeCodecs#TIMESTAMP + * timestamp} codec, when application code prefers to deal with raw milliseconds than with {@link + * Instant} instances. + * + *

        This codec shares its logic with {@link TimestampCodec}. See the javadocs of this codec for + * important remarks about implementation notes and accepted timestamp formats. + */ +@Immutable +public class TimestampMillisCodec implements PrimitiveLongCodec { + + private final TimestampCodec timestampCodec; + + /** + * Creates a new {@code TimestampMillisCodec} that uses the system's {@linkplain + * ZoneId#systemDefault() default time zone} to parse timestamp literals that do not include any + * time zone information. + */ + public TimestampMillisCodec() { + this(ZoneId.systemDefault()); + } + + /** + * Creates a new {@code TimestampMillisCodec} that uses the given {@link ZoneId} to parse + * timestamp literals that do not include any time zone information. + */ + public TimestampMillisCodec(ZoneId defaultZoneId) { + timestampCodec = new TimestampCodec(defaultZoneId); + } + + @NonNull + @Override + public GenericType getJavaType() { + return GenericType.LONG; + } + + @NonNull + @Override + public DataType getCqlType() { + return DataTypes.TIMESTAMP; + } + + @Override + public boolean accepts(@NonNull Class javaClass) { + return javaClass == Long.class || javaClass == long.class; + } + + @Override + public boolean accepts(@NonNull Object value) { + Objects.requireNonNull(value); + return value instanceof Long; + } + + @Nullable + @Override + public ByteBuffer encodePrimitive(long value, @NonNull ProtocolVersion protocolVersion) { + return TypeCodecs.BIGINT.encodePrimitive(value, protocolVersion); + } + + @Override + public long decodePrimitive( + @Nullable ByteBuffer value, @NonNull ProtocolVersion protocolVersion) { + return TypeCodecs.BIGINT.decodePrimitive(value, protocolVersion); + } + + @Nullable + @Override + public Long parse(@Nullable String value) { + Instant instant = timestampCodec.parse(value); + return instant == null ? null : instant.toEpochMilli(); + } + + @NonNull + @Override + public String format(@Nullable Long value) { + Instant instant = value == null ? null : Instant.ofEpochMilli(value); + return timestampCodec.format(instant); + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/ZonedTimestampCodec.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/time/ZonedTimestampCodec.java similarity index 59% rename from core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/ZonedTimestampCodec.java rename to core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/time/ZonedTimestampCodec.java index 16649fd8daa..46015d25597 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/ZonedTimestampCodec.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/time/ZonedTimestampCodec.java @@ -13,19 +13,17 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.internal.core.type.codec; +package com.datastax.oss.driver.internal.core.type.codec.extras.time; -import com.datastax.oss.driver.api.core.ProtocolVersion; -import com.datastax.oss.driver.api.core.type.DataType; -import com.datastax.oss.driver.api.core.type.DataTypes; -import com.datastax.oss.driver.api.core.type.codec.TypeCodec; +import com.datastax.oss.driver.api.core.type.codec.MappingCodec; import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import com.datastax.oss.driver.internal.core.type.codec.TimestampCodec; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; -import java.nio.ByteBuffer; import java.time.Instant; import java.time.ZoneId; import java.time.ZonedDateTime; +import java.util.Objects; import net.jcip.annotations.ThreadSafe; /** @@ -34,7 +32,8 @@ * *

        Note that Apache Cassandra(R)'s timestamp type does not store any time zone; this codec is * provided merely as a convenience for users that need to deal with zoned timestamps in their - * applications. + * applications. If you need to persist the time zone in the database, consider using {@link + * PersistentZonedTimestampCodec} instead. * *

        This codec shares its logic with {@link TimestampCodec}. See the javadocs of this codec for * important remarks about implementation notes and accepted timestamp formats. @@ -42,9 +41,8 @@ * @see TimestampCodec */ @ThreadSafe -public class ZonedTimestampCodec implements TypeCodec { +public class ZonedTimestampCodec extends MappingCodec { - private final TypeCodec instantCodec; private final ZoneId timeZone; /** @@ -64,63 +62,27 @@ public ZonedTimestampCodec() { * information. */ public ZonedTimestampCodec(ZoneId timeZone) { - instantCodec = new TimestampCodec(timeZone); + super( + new TimestampCodec(Objects.requireNonNull(timeZone, "timeZone cannot be null")), + GenericType.ZONED_DATE_TIME); this.timeZone = timeZone; } - @NonNull - @Override - public GenericType getJavaType() { - return GenericType.ZONED_DATE_TIME; - } - - @NonNull - @Override - public DataType getCqlType() { - return DataTypes.TIMESTAMP; - } - @Override public boolean accepts(@NonNull Object value) { + Objects.requireNonNull(value); return value instanceof ZonedDateTime; } - @Override - public boolean accepts(@NonNull Class javaClass) { - return javaClass == ZonedDateTime.class; - } - - @Nullable - @Override - public ByteBuffer encode( - @Nullable ZonedDateTime value, @NonNull ProtocolVersion protocolVersion) { - return instantCodec.encode(value != null ? value.toInstant() : null, protocolVersion); - } - @Nullable @Override - public ZonedDateTime decode( - @Nullable ByteBuffer bytes, @NonNull ProtocolVersion protocolVersion) { - Instant instant = instantCodec.decode(bytes, protocolVersion); - if (instant == null) { - return null; - } - return instant.atZone(timeZone); - } - - @NonNull - @Override - public String format(@Nullable ZonedDateTime value) { - return instantCodec.format(value != null ? value.toInstant() : null); + protected ZonedDateTime innerToOuter(@Nullable Instant value) { + return value == null ? null : value.atZone(timeZone); } @Nullable @Override - public ZonedDateTime parse(@Nullable String value) { - Instant instant = instantCodec.parse(value); - if (instant == null) { - return null; - } - return instant.atZone(timeZone); + protected Instant outerToInner(@Nullable ZonedDateTime value) { + return value == null ? null : value.toInstant(); } } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/SimpleBlobCodecTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/SimpleBlobCodecTest.java new file mode 100644 index 00000000000..c5fd0b88899 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/SimpleBlobCodecTest.java @@ -0,0 +1,96 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.type.codec; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.api.core.ProtocolVersion; +import com.datastax.oss.driver.api.core.type.codec.ExtraTypeCodecs; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import com.datastax.oss.protocol.internal.util.Bytes; +import java.nio.ByteBuffer; +import org.junit.Test; + +public class SimpleBlobCodecTest extends CodecTestBase { + + private static final ByteBuffer BUFFER = Bytes.fromHexString("0xcafebabe"); + private static final byte[] ARRAY = Bytes.getArray(Bytes.fromHexString("0xcafebabe")); + + public SimpleBlobCodecTest() { + this.codec = ExtraTypeCodecs.BLOB_TO_ARRAY; + } + + @Test + public void should_encode() { + assertThat(encode(ARRAY)).isEqualTo("0xcafebabe"); + assertThat(encode(null)).isNull(); + } + + @Test + public void should_not_share_position_between_input_and_encoded() { + ByteBuffer encoded = codec.encode(ARRAY, ProtocolVersion.DEFAULT); + assertThat(encoded).isNotNull(); + assertThat(ARRAY).isEqualTo(Bytes.getArray(encoded)); + } + + @Test + public void should_decode() { + assertThat(decode("0xcafebabe")).isEqualTo(ARRAY); + assertThat(decode("0x")).hasSize(0); + assertThat(decode(null)).isNull(); + } + + @Test + public void should_not_share_position_between_decoded_and_input() { + byte[] decoded = codec.decode(BUFFER, ProtocolVersion.DEFAULT); + assertThat(decoded).isEqualTo(ARRAY); + } + + @Test + public void should_format() { + assertThat(format(ARRAY)).isEqualTo("0xcafebabe"); + assertThat(format(null)).isEqualTo("NULL"); + } + + @Test + public void should_parse() { + assertThat(parse("0xcafebabe")).isEqualTo(ARRAY); + assertThat(parse("NULL")).isNull(); + assertThat(parse("null")).isNull(); + assertThat(parse("")).isNull(); + assertThat(parse(null)).isNull(); + } + + @Test(expected = IllegalArgumentException.class) + public void should_fail_to_parse_invalid_input() { + parse("not a blob"); + } + + @Test + public void should_accept_generic_type() { + assertThat(codec.accepts(GenericType.of(byte[].class))).isTrue(); + } + + @Test + public void should_accept_raw_type() { + assertThat(codec.accepts(byte[].class)).isTrue(); + } + + @Test + public void should_accept_object() { + assertThat(codec.accepts(ARRAY)).isTrue(); + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/OptionalCodecTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/OptionalCodecTest.java new file mode 100644 index 00000000000..21fc0959177 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/OptionalCodecTest.java @@ -0,0 +1,83 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.type.codec.extras; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.api.core.type.codec.ExtraTypeCodecs; +import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import com.datastax.oss.driver.internal.core.type.codec.CodecTestBase; +import java.util.Optional; +import org.junit.Before; +import org.junit.Test; + +public class OptionalCodecTest extends CodecTestBase> { + + @Before + public void setup() { + codec = ExtraTypeCodecs.optionalOf(TypeCodecs.INT); + } + + @Test + public void should_encode() { + // Our codec relies on the JDK's ByteBuffer API. We're not testing the JDK, so no need to try + // a thousand different values. + assertThat(encode(Optional.of(1))).isEqualTo("0x00000001"); + assertThat(encode(Optional.empty())).isNull(); + assertThat(encode(null)).isNull(); + } + + @Test + public void should_decode() { + assertThat(decode("0x00000001")).isPresent().contains(1); + assertThat(decode("0x")).isEmpty(); + assertThat(decode(null)).isEmpty(); + } + + @Test + public void should_format() { + assertThat(format(Optional.of(1))).isEqualTo("1"); + assertThat(format(Optional.empty())).isEqualTo("NULL"); + assertThat(format(null)).isEqualTo("NULL"); + } + + @Test + public void should_parse() { + assertThat(parse("1")).isPresent().contains(1); + assertThat(parse("NULL")).isEmpty(); + assertThat(parse("null")).isEmpty(); + assertThat(parse("")).isEmpty(); + assertThat(parse(null)).isEmpty(); + } + + @Test + public void should_accept_generic_type() { + assertThat(codec.accepts(GenericType.optionalOf(Integer.class))).isTrue(); + } + + @Test + public void should_accept_raw_type() { + assertThat(codec.accepts(Optional.class)).isTrue(); + } + + @Test + public void should_accept_object() { + assertThat(codec.accepts(Optional.of(1))).isTrue(); + assertThat(codec.accepts(Optional.empty())).isTrue(); + assertThat(codec.accepts(Optional.of("foo"))).isFalse(); + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/BooleanArrayCodecTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/BooleanArrayCodecTest.java new file mode 100644 index 00000000000..c43c88c4f28 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/BooleanArrayCodecTest.java @@ -0,0 +1,145 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.type.codec.extras.array; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.api.core.type.codec.ExtraTypeCodecs; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import com.datastax.oss.driver.internal.core.type.codec.CodecTestBase; +import org.junit.Before; +import org.junit.Test; + +public class BooleanArrayCodecTest extends CodecTestBase { + + @Before + public void setup() { + codec = ExtraTypeCodecs.BOOLEAN_LIST_TO_ARRAY; + } + + @Test + public void should_encode_null() { + assertThat(encode(null)).isNull(); + } + + @Test + public void should_encode_empty_array() { + assertThat(encode(new boolean[] {})).isEqualTo("0x00000000"); + } + + @Test + public void should_encode_non_empty_array() { + assertThat(encode(new boolean[] {true, false})) + .isEqualTo( + "0x" + + "00000002" // number of elements + + "00000001" // size of element 1 + + "01" // contents of element 1 + + "00000001" // size of element 2 + + "00" // contents of element 2 + ); + } + + @Test + public void should_decode_null_as_empty_array() { + assertThat(decode(null)).isEmpty(); + } + + @Test + public void should_decode_empty_array() { + assertThat(decode("0x00000000")).isEmpty(); + } + + @Test + public void should_decode_non_empty_array() { + assertThat( + decode( + "0x" + + "00000002" // number of elements + + "00000001" // size of element 1 + + "01" // contents of element 1 + + "00000001" // size of element 2 + + "00" // contents of element 2 + )) + .containsExactly(true, false); + } + + @Test(expected = NullPointerException.class) + public void should_not_decode_array_with_null_elements() { + decode( + "0x" + + "00000001" // number of elements + + "FFFFFFFF" // size of element 1 (-1 for null) + ); + } + + @Test + public void should_format_null_array() { + assertThat(format(null)).isEqualTo("NULL"); + } + + @Test + public void should_format_empty_array() { + assertThat(format(new boolean[] {})).isEqualTo("[]"); + } + + @Test + public void should_format_non_empty_array() { + assertThat(format(new boolean[] {true, false})).isEqualTo("[true,false]"); + } + + @Test + public void should_parse_null_or_empty_string() { + assertThat(parse(null)).isNull(); + assertThat(parse("")).isNull(); + } + + @Test + public void should_parse_empty_array() { + assertThat(parse("[]")).isEmpty(); + } + + @Test + public void should_parse_non_empty_array() { + assertThat(parse("[true,false]")).containsExactly(true, false); + } + + @Test(expected = IllegalArgumentException.class) + public void should_fail_to_parse_malformed_array() { + parse("not an array"); + } + + @Test + public void should_accept_generic_type() { + assertThat(codec.accepts(GenericType.arrayOf(Boolean.TYPE))).isTrue(); + assertThat(codec.accepts(GenericType.arrayOf(Boolean.class))).isFalse(); + assertThat(codec.accepts(GenericType.arrayOf(String.class))).isFalse(); + } + + @Test + public void should_accept_raw_type() { + assertThat(codec.accepts(GenericType.arrayOf(Boolean.TYPE).getRawType())).isTrue(); + assertThat(codec.accepts(GenericType.arrayOf(Boolean.class).getRawType())).isFalse(); + assertThat(codec.accepts(GenericType.arrayOf(String.class).getRawType())).isFalse(); + } + + @Test + public void should_accept_object() { + assertThat(codec.accepts(new boolean[] {true, false})).isTrue(); + assertThat(codec.accepts(new Boolean[] {true, false})).isFalse(); + assertThat(codec.accepts(new String[] {"hello", "world"})).isFalse(); + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/ByteArrayCodecTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/ByteArrayCodecTest.java new file mode 100644 index 00000000000..1d545d3b5a5 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/ByteArrayCodecTest.java @@ -0,0 +1,149 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.type.codec.extras.array; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.api.core.type.codec.ExtraTypeCodecs; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import com.datastax.oss.driver.internal.core.type.codec.CodecTestBase; +import org.junit.Before; +import org.junit.Test; + +public class ByteArrayCodecTest extends CodecTestBase { + + @Before + public void setup() { + codec = ExtraTypeCodecs.BYTE_LIST_TO_ARRAY; + } + + @Test + public void should_encode_null() { + assertThat(encode(null)).isNull(); + } + + @Test + public void should_encode_empty_array() { + assertThat(encode(new byte[] {})).isEqualTo("0x00000000"); + } + + @Test + public void should_encode_non_empty_array() { + assertThat(encode(new byte[] {1, 2, 3})) + .isEqualTo( + "0x" + + "00000003" // number of elements + + "00000001" // size of element 1 + + "01" // contents of element 1 + + "00000001" // size of element 2 + + "02" // contents of element 2 + + "00000001" // size of element 3 + + "03" // contents of element 3 + ); + } + + @Test + public void should_decode_null_as_empty_array() { + assertThat(decode(null)).isEmpty(); + } + + @Test + public void should_decode_empty_array() { + assertThat(decode("0x00000000")).isEmpty(); + } + + @Test + public void should_decode_non_empty_array() { + assertThat( + decode( + "0x" + + "00000003" // number of elements + + "00000001" // size of element 1 + + "01" // contents of element 1 + + "00000001" // size of element 2 + + "02" // contents of element 2 + + "00000001" // size of element 3 + + "03" // contents of element 3 + )) + .containsExactly((byte) 1, (byte) 2, (byte) 3); + } + + @Test(expected = NullPointerException.class) + public void should_not_decode_array_with_null_elements() { + decode( + "0x" + + "00000001" // number of elements + + "FFFFFFFF" // size of element 1 (-1 for null) + ); + } + + @Test + public void should_format_null_array() { + assertThat(format(null)).isEqualTo("NULL"); + } + + @Test + public void should_format_empty_array() { + assertThat(format(new byte[] {})).isEqualTo("[]"); + } + + @Test + public void should_format_non_empty_array() { + assertThat(format(new byte[] {1, 2, 3})).isEqualTo("[1,2,3]"); + } + + @Test + public void should_parse_null_or_empty_string() { + assertThat(parse(null)).isNull(); + assertThat(parse("")).isNull(); + } + + @Test + public void should_parse_empty_array() { + assertThat(parse("[]")).isEmpty(); + } + + @Test + public void should_parse_non_empty_array() { + assertThat(parse("[1,2,3]")).containsExactly((byte) 1, (byte) 2, (byte) 3); + } + + @Test(expected = IllegalArgumentException.class) + public void should_fail_to_parse_malformed_array() { + parse("not an array"); + } + + @Test + public void should_accept_generic_type() { + assertThat(codec.accepts(GenericType.arrayOf(Byte.TYPE))).isTrue(); + assertThat(codec.accepts(GenericType.arrayOf(Byte.class))).isFalse(); + assertThat(codec.accepts(GenericType.arrayOf(String.class))).isFalse(); + } + + @Test + public void should_accept_raw_type() { + assertThat(codec.accepts(GenericType.arrayOf(Byte.TYPE).getRawType())).isTrue(); + assertThat(codec.accepts(GenericType.arrayOf(Byte.class).getRawType())).isFalse(); + assertThat(codec.accepts(GenericType.arrayOf(String.class).getRawType())).isFalse(); + } + + @Test + public void should_accept_object() { + assertThat(codec.accepts(new byte[] {1, 2, 3})).isTrue(); + assertThat(codec.accepts(new Byte[] {1, 2, 3})).isFalse(); + assertThat(codec.accepts(new String[] {"hello", "world"})).isFalse(); + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/DoubleArrayCodecTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/DoubleArrayCodecTest.java new file mode 100644 index 00000000000..1a3ae412cb3 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/DoubleArrayCodecTest.java @@ -0,0 +1,149 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.type.codec.extras.array; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.api.core.type.codec.ExtraTypeCodecs; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import com.datastax.oss.driver.internal.core.type.codec.CodecTestBase; +import org.junit.Before; +import org.junit.Test; + +public class DoubleArrayCodecTest extends CodecTestBase { + + @Before + public void setup() { + codec = ExtraTypeCodecs.DOUBLE_LIST_TO_ARRAY; + } + + @Test + public void should_encode_null() { + assertThat(encode(null)).isNull(); + } + + @Test + public void should_encode_empty_array() { + assertThat(encode(new double[] {})).isEqualTo("0x00000000"); + } + + @Test + public void should_encode_non_empty_array() { + assertThat(encode(new double[] {1.1d, 2.2d, 3.3d})) + .isEqualTo( + "0x" + + "00000003" // number of elements + + "00000008" // size of element 1 + + "3ff199999999999a" // contents of element 1 + + "00000008" // size of element 2 + + "400199999999999a" // contents of element 2 + + "00000008" // size of element 3 + + "400a666666666666" // contents of element 3 + ); + } + + @Test + public void should_decode_null_as_empty_array() { + assertThat(decode(null)).isEmpty(); + } + + @Test + public void should_decode_empty_array() { + assertThat(decode("0x00000000")).isEmpty(); + } + + @Test + public void should_decode_non_empty_array() { + assertThat( + decode( + "0x" + + "00000003" // number of elements + + "00000008" // size of element 1 + + "3ff199999999999a" // contents of element 1 + + "00000008" // size of element 2 + + "400199999999999a" // contents of element 2 + + "00000008" // size of element 3 + + "400a666666666666" // contents of element 3 + )) + .containsExactly(1.1d, 2.2d, 3.3d); + } + + @Test(expected = NullPointerException.class) + public void should_not_decode_array_with_null_elements() { + decode( + "0x" + + "00000001" // number of elements + + "FFFFFFFF" // size of element 1 (-1 for null) + ); + } + + @Test + public void should_format_null_array() { + assertThat(format(null)).isEqualTo("NULL"); + } + + @Test + public void should_format_empty_array() { + assertThat(format(new double[] {})).isEqualTo("[]"); + } + + @Test + public void should_format_non_empty_array() { + assertThat(format(new double[] {1.1d, 2.2d, 3.3d})).isEqualTo("[1.1,2.2,3.3]"); + } + + @Test + public void should_parse_null_or_empty_string() { + assertThat(parse(null)).isNull(); + assertThat(parse("")).isNull(); + } + + @Test + public void should_parse_empty_array() { + assertThat(parse("[]")).isEmpty(); + } + + @Test + public void should_parse_non_empty_array() { + assertThat(parse("[1.1,2.2,3.3]")).containsExactly(1.1d, 2.2d, 3.3d); + } + + @Test(expected = IllegalArgumentException.class) + public void should_fail_to_parse_malformed_array() { + parse("not an array"); + } + + @Test + public void should_accept_generic_type() { + assertThat(codec.accepts(GenericType.arrayOf(Double.TYPE))).isTrue(); + assertThat(codec.accepts(GenericType.arrayOf(Double.class))).isFalse(); + assertThat(codec.accepts(GenericType.arrayOf(String.class))).isFalse(); + } + + @Test + public void should_accept_raw_type() { + assertThat(codec.accepts(GenericType.arrayOf(Double.TYPE).getRawType())).isTrue(); + assertThat(codec.accepts(GenericType.arrayOf(Double.class).getRawType())).isFalse(); + assertThat(codec.accepts(GenericType.arrayOf(String.class).getRawType())).isFalse(); + } + + @Test + public void should_accept_object() { + assertThat(codec.accepts(new double[] {1.1d, 2.2d, 3.3d})).isTrue(); + assertThat(codec.accepts(new Double[] {1.1d, 2.2d, 3.3d})).isFalse(); + assertThat(codec.accepts(new String[] {"hello", "world"})).isFalse(); + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/FloatArrayCodecTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/FloatArrayCodecTest.java new file mode 100644 index 00000000000..cca7604f9ac --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/FloatArrayCodecTest.java @@ -0,0 +1,149 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.type.codec.extras.array; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.api.core.type.codec.ExtraTypeCodecs; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import com.datastax.oss.driver.internal.core.type.codec.CodecTestBase; +import org.junit.Before; +import org.junit.Test; + +public class FloatArrayCodecTest extends CodecTestBase { + + @Before + public void setup() { + codec = ExtraTypeCodecs.FLOAT_LIST_TO_ARRAY; + } + + @Test + public void should_encode_null() { + assertThat(encode(null)).isNull(); + } + + @Test + public void should_encode_empty_array() { + assertThat(encode(new float[] {})).isEqualTo("0x00000000"); + } + + @Test + public void should_encode_non_empty_array() { + assertThat(encode(new float[] {1.1f, 2.2f, 3.3f})) + .isEqualTo( + "0x" + + "00000003" // number of elements + + "00000004" // size of element 1 + + "3f8ccccd" // contents of element 1 + + "00000004" // size of element 2 + + "400ccccd" // contents of element 2 + + "00000004" // size of element 3 + + "40533333" // contents of element 3 + ); + } + + @Test + public void should_decode_null_as_empty_array() { + assertThat(decode(null)).isEmpty(); + } + + @Test + public void should_decode_empty_array() { + assertThat(decode("0x00000000")).isEmpty(); + } + + @Test + public void should_decode_non_empty_array() { + assertThat( + decode( + "0x" + + "00000003" // number of elements + + "00000004" // size of element 1 + + "3f8ccccd" // contents of element 1 + + "00000004" // size of element 2 + + "400ccccd" // contents of element 2 + + "00000004" // size of element 3 + + "40533333" // contents of element 3 + )) + .containsExactly(1.1f, 2.2f, 3.3f); + } + + @Test(expected = NullPointerException.class) + public void should_not_decode_array_with_null_elements() { + decode( + "0x" + + "00000001" // number of elements + + "FFFFFFFF" // size of element 1 (-1 for null) + ); + } + + @Test + public void should_format_null_array() { + assertThat(format(null)).isEqualTo("NULL"); + } + + @Test + public void should_format_empty_array() { + assertThat(format(new float[] {})).isEqualTo("[]"); + } + + @Test + public void should_format_non_empty_array() { + assertThat(format(new float[] {1.1f, 2.2f, 3.3f})).isEqualTo("[1.1,2.2,3.3]"); + } + + @Test + public void should_parse_null_or_empty_string() { + assertThat(parse(null)).isNull(); + assertThat(parse("")).isNull(); + } + + @Test + public void should_parse_empty_array() { + assertThat(parse("[]")).isEmpty(); + } + + @Test + public void should_parse_non_empty_array() { + assertThat(parse("[1.1,2.2,3.3]")).containsExactly(1.1f, 2.2f, 3.3f); + } + + @Test(expected = IllegalArgumentException.class) + public void should_fail_to_parse_malformed_array() { + parse("not an array"); + } + + @Test + public void should_accept_generic_type() { + assertThat(codec.accepts(GenericType.arrayOf(Float.TYPE))).isTrue(); + assertThat(codec.accepts(GenericType.arrayOf(Float.class))).isFalse(); + assertThat(codec.accepts(GenericType.arrayOf(String.class))).isFalse(); + } + + @Test + public void should_accept_raw_type() { + assertThat(codec.accepts(GenericType.arrayOf(Float.TYPE).getRawType())).isTrue(); + assertThat(codec.accepts(GenericType.arrayOf(Float.class).getRawType())).isFalse(); + assertThat(codec.accepts(GenericType.arrayOf(String.class).getRawType())).isFalse(); + } + + @Test + public void should_accept_object() { + assertThat(codec.accepts(new float[] {1.1f, 2.2f, 3.3f})).isTrue(); + assertThat(codec.accepts(new Float[] {1.1f, 2.2f, 3.3f})).isFalse(); + assertThat(codec.accepts(new String[] {"hello", "world"})).isFalse(); + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/IntArrayCodecTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/IntArrayCodecTest.java new file mode 100644 index 00000000000..c26defda4fe --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/IntArrayCodecTest.java @@ -0,0 +1,149 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.type.codec.extras.array; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.api.core.type.codec.ExtraTypeCodecs; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import com.datastax.oss.driver.internal.core.type.codec.CodecTestBase; +import org.junit.Before; +import org.junit.Test; + +public class IntArrayCodecTest extends CodecTestBase { + + @Before + public void setup() { + codec = ExtraTypeCodecs.INT_LIST_TO_ARRAY; + } + + @Test + public void should_encode_null() { + assertThat(encode(null)).isNull(); + } + + @Test + public void should_encode_empty_array() { + assertThat(encode(new int[] {})).isEqualTo("0x00000000"); + } + + @Test + public void should_encode_non_empty_array() { + assertThat(encode(new int[] {1, 2, 3})) + .isEqualTo( + "0x" + + "00000003" // number of elements + + "00000004" // size of element 1 + + "00000001" // contents of element 1 + + "00000004" // size of element 2 + + "00000002" // contents of element 2 + + "00000004" // size of element 3 + + "00000003" // contents of element 3 + ); + } + + @Test + public void should_decode_null_as_empty_array() { + assertThat(decode(null)).isEmpty(); + } + + @Test + public void should_decode_empty_array() { + assertThat(decode("0x00000000")).isEmpty(); + } + + @Test + public void should_decode_non_empty_array() { + assertThat( + decode( + "0x" + + "00000003" // number of elements + + "00000004" // size of element 1 + + "00000001" // contents of element 1 + + "00000004" // size of element 2 + + "00000002" // contents of element 2 + + "00000004" // size of element 3 + + "00000003" // contents of element 3 + )) + .containsExactly(1, 2, 3); + } + + @Test(expected = NullPointerException.class) + public void should_not_decode_array_with_null_elements() { + decode( + "0x" + + "00000001" // number of elements + + "FFFFFFFF" // size of element 1 (-1 for null) + ); + } + + @Test + public void should_format_null_array() { + assertThat(format(null)).isEqualTo("NULL"); + } + + @Test + public void should_format_empty_array() { + assertThat(format(new int[] {})).isEqualTo("[]"); + } + + @Test + public void should_format_non_empty_array() { + assertThat(format(new int[] {1, 2, 3})).isEqualTo("[1,2,3]"); + } + + @Test + public void should_parse_null_or_empty_string() { + assertThat(parse(null)).isNull(); + assertThat(parse("")).isNull(); + } + + @Test + public void should_parse_empty_array() { + assertThat(parse("[]")).isEmpty(); + } + + @Test + public void should_parse_non_empty_array() { + assertThat(parse("[1,2,3]")).containsExactly(1, 2, 3); + } + + @Test(expected = IllegalArgumentException.class) + public void should_fail_to_parse_malformed_array() { + parse("not an array"); + } + + @Test + public void should_accept_generic_type() { + assertThat(codec.accepts(GenericType.arrayOf(Integer.TYPE))).isTrue(); + assertThat(codec.accepts(GenericType.arrayOf(Integer.class))).isFalse(); + assertThat(codec.accepts(GenericType.arrayOf(String.class))).isFalse(); + } + + @Test + public void should_accept_raw_type() { + assertThat(codec.accepts(GenericType.arrayOf(Integer.TYPE).getRawType())).isTrue(); + assertThat(codec.accepts(GenericType.arrayOf(Integer.class).getRawType())).isFalse(); + assertThat(codec.accepts(GenericType.arrayOf(String.class).getRawType())).isFalse(); + } + + @Test + public void should_accept_object() { + assertThat(codec.accepts(new int[] {1, 2, 3})).isTrue(); + assertThat(codec.accepts(new Integer[] {1, 2, 3})).isFalse(); + assertThat(codec.accepts(new String[] {"hello", "world"})).isFalse(); + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/LongArrayCodecTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/LongArrayCodecTest.java new file mode 100644 index 00000000000..67f34c924c9 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/LongArrayCodecTest.java @@ -0,0 +1,149 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.type.codec.extras.array; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.api.core.type.codec.ExtraTypeCodecs; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import com.datastax.oss.driver.internal.core.type.codec.CodecTestBase; +import org.junit.Before; +import org.junit.Test; + +public class LongArrayCodecTest extends CodecTestBase { + + @Before + public void setup() { + codec = ExtraTypeCodecs.LONG_LIST_TO_ARRAY; + } + + @Test + public void should_encode_null() { + assertThat(encode(null)).isNull(); + } + + @Test + public void should_encode_empty_array() { + assertThat(encode(new long[] {})).isEqualTo("0x00000000"); + } + + @Test + public void should_encode_non_empty_array() { + assertThat(encode(new long[] {1, 2, 3})) + .isEqualTo( + "0x" + + "00000003" // number of elements + + "00000008" // size of element 1 + + "0000000000000001" // contents of element 1 + + "00000008" // size of element 2 + + "0000000000000002" // contents of element 2 + + "00000008" // size of element 3 + + "0000000000000003" // contents of element 3 + ); + } + + @Test + public void should_decode_null_as_empty_array() { + assertThat(decode(null)).isEmpty(); + } + + @Test + public void should_decode_empty_array() { + assertThat(decode("0x00000000")).isEmpty(); + } + + @Test + public void should_decode_non_empty_array() { + assertThat( + decode( + "0x" + + "00000003" // number of elements + + "00000008" // size of element 1 + + "0000000000000001" // contents of element 1 + + "00000008" // size of element 2 + + "0000000000000002" // contents of element 2 + + "00000008" // size of element 3 + + "0000000000000003" // contents of element 3 + )) + .containsExactly(1L, 2L, 3L); + } + + @Test(expected = NullPointerException.class) + public void should_not_decode_array_with_null_elements() { + decode( + "0x" + + "00000001" // number of elements + + "FFFFFFFF" // size of element 1 (-1 for null) + ); + } + + @Test + public void should_format_null_array() { + assertThat(format(null)).isEqualTo("NULL"); + } + + @Test + public void should_format_empty_array() { + assertThat(format(new long[] {})).isEqualTo("[]"); + } + + @Test + public void should_format_non_empty_array() { + assertThat(format(new long[] {1, 2, 3})).isEqualTo("[1,2,3]"); + } + + @Test + public void should_parse_null_or_empty_string() { + assertThat(parse(null)).isNull(); + assertThat(parse("")).isNull(); + } + + @Test + public void should_parse_empty_array() { + assertThat(parse("[]")).isEmpty(); + } + + @Test + public void should_parse_non_empty_array() { + assertThat(parse("[1,2,3]")).containsExactly(1L, 2L, 3L); + } + + @Test(expected = IllegalArgumentException.class) + public void should_fail_to_parse_malformed_array() { + parse("not an array"); + } + + @Test + public void should_accept_generic_type() { + assertThat(codec.accepts(GenericType.arrayOf(Long.TYPE))).isTrue(); + assertThat(codec.accepts(GenericType.arrayOf(Long.class))).isFalse(); + assertThat(codec.accepts(GenericType.arrayOf(String.class))).isFalse(); + } + + @Test + public void should_accept_raw_type() { + assertThat(codec.accepts(GenericType.arrayOf(Long.TYPE).getRawType())).isTrue(); + assertThat(codec.accepts(GenericType.arrayOf(Long.class).getRawType())).isFalse(); + assertThat(codec.accepts(GenericType.arrayOf(String.class).getRawType())).isFalse(); + } + + @Test + public void should_accept_object() { + assertThat(codec.accepts(new long[] {1, 2, 3})).isTrue(); + assertThat(codec.accepts(new Long[] {1L, 2L, 3L})).isFalse(); + assertThat(codec.accepts(new String[] {"hello", "world"})).isFalse(); + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/ObjectArrayCodecTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/ObjectArrayCodecTest.java new file mode 100644 index 00000000000..05912864f37 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/ObjectArrayCodecTest.java @@ -0,0 +1,172 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.type.codec.extras.array; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.when; + +import com.datastax.oss.driver.api.core.ProtocolVersion; +import com.datastax.oss.driver.api.core.type.DataTypes; +import com.datastax.oss.driver.api.core.type.codec.ExtraTypeCodecs; +import com.datastax.oss.driver.api.core.type.codec.TypeCodec; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import com.datastax.oss.driver.internal.core.type.codec.CodecTestBase; +import com.datastax.oss.protocol.internal.util.Bytes; +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; + +public class ObjectArrayCodecTest extends CodecTestBase { + + @Mock private TypeCodec elementCodec; + + @Before + public void setup() { + MockitoAnnotations.initMocks(this); + when(elementCodec.getCqlType()).thenReturn(DataTypes.TEXT); + when(elementCodec.getJavaType()).thenReturn(GenericType.STRING); + codec = ExtraTypeCodecs.listToArrayOf(elementCodec); + } + + @Test + public void should_encode_null() { + assertThat(encode(null)).isNull(); + } + + @Test + public void should_encode_empty_array() { + assertThat(encode(new String[] {})).isEqualTo("0x00000000"); + } + + @Test + public void should_encode_non_empty_array() { + when(elementCodec.encode("hello", ProtocolVersion.DEFAULT)) + .thenReturn(Bytes.fromHexString("0x68656c6c6f")); + when(elementCodec.encode("world", ProtocolVersion.DEFAULT)) + .thenReturn(Bytes.fromHexString("0x776f726c64")); + assertThat(encode(new String[] {"hello", "world"})) + .isEqualTo( + "0x" + + "00000002" // number of elements + + "00000005" // size of element 1 + + "68656c6c6f" // contents of element 1 + + "00000005" // size of element 2 + + "776f726c64" // contents of element 3 + ); + } + + @Test + public void should_decode_null_as_empty_array() { + assertThat(decode(null)).isEmpty(); + } + + @Test + public void should_decode_empty_array() { + assertThat(decode("0x00000000")).isEmpty(); + } + + @Test + public void should_decode_non_empty_array() { + when(elementCodec.decode(Bytes.fromHexString("0x68656c6c6f"), ProtocolVersion.DEFAULT)) + .thenReturn("hello"); + when(elementCodec.decode(Bytes.fromHexString("0x776f726c64"), ProtocolVersion.DEFAULT)) + .thenReturn("world"); + assertThat( + decode( + "0x" + + "00000002" // number of elements + + "00000005" // size of element 1 + + "68656c6c6f" // contents of element 1 + + "00000005" // size of element 2 + + "776f726c64" // contents of element 3 + )) + .containsExactly("hello", "world"); + } + + @Test + public void should_decode_array_with_null_elements() { + when(elementCodec.decode(Bytes.fromHexString("0x68656c6c6f"), ProtocolVersion.DEFAULT)) + .thenReturn("hello"); + assertThat( + decode( + "0x" + + "00000002" // number of elements + + "FFFFFFFF" // size of element 1 (-1 for null) + + "00000005" // size of element 2 + + "68656c6c6f" // contents of element 2 + )) + .containsExactly(null, "hello"); + } + + @Test + public void should_format_null_array() { + assertThat(format(null)).isEqualTo("NULL"); + } + + @Test + public void should_format_empty_array() { + assertThat(format(new String[] {})).isEqualTo("[]"); + } + + @Test + public void should_format_non_empty_array() { + when(elementCodec.format("hello")).thenReturn("'hello'"); + when(elementCodec.format("world")).thenReturn("'world'"); + assertThat(format(new String[] {"hello", "world"})).isEqualTo("['hello','world']"); + } + + @Test + public void should_parse_null_or_empty_string() { + assertThat(parse(null)).isNull(); + assertThat(parse("")).isNull(); + } + + @Test + public void should_parse_empty_array() { + assertThat(parse("[]")).isEmpty(); + } + + @Test + public void should_parse_non_empty_array() { + when(elementCodec.parse("'hello'")).thenReturn("hello"); + when(elementCodec.parse("'world'")).thenReturn("world"); + assertThat(parse("['hello','world']")).containsExactly("hello", "world"); + } + + @Test(expected = IllegalArgumentException.class) + public void should_fail_to_parse_malformed_array() { + parse("not an array"); + } + + @Test + public void should_accept_generic_type() { + assertThat(codec.accepts(GenericType.arrayOf(String.class))).isTrue(); + assertThat(codec.accepts(GenericType.arrayOf(Integer.class))).isFalse(); + } + + @Test + public void should_accept_raw_type() { + assertThat(codec.accepts(GenericType.arrayOf(String.class).getRawType())).isTrue(); + assertThat(codec.accepts(GenericType.arrayOf(Integer.class).getRawType())).isFalse(); + } + + @Test + public void should_accept_object() { + assertThat(codec.accepts(new String[] {"hello", "world"})).isTrue(); + assertThat(codec.accepts(new Integer[] {1, 2, 3})).isFalse(); + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/ShortArrayCodecTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/ShortArrayCodecTest.java new file mode 100644 index 00000000000..3cf3ec4bd90 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/array/ShortArrayCodecTest.java @@ -0,0 +1,149 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.type.codec.extras.array; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.api.core.type.codec.ExtraTypeCodecs; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import com.datastax.oss.driver.internal.core.type.codec.CodecTestBase; +import org.junit.Before; +import org.junit.Test; + +public class ShortArrayCodecTest extends CodecTestBase { + + @Before + public void setup() { + codec = ExtraTypeCodecs.SHORT_LIST_TO_ARRAY; + } + + @Test + public void should_encode_null() { + assertThat(encode(null)).isNull(); + } + + @Test + public void should_encode_empty_array() { + assertThat(encode(new short[] {})).isEqualTo("0x00000000"); + } + + @Test + public void should_encode_non_empty_array() { + assertThat(encode(new short[] {1, 2, 3})) + .isEqualTo( + "0x" + + "00000003" // number of elements + + "00000002" // size of element 1 + + "0001" // contents of element 1 + + "00000002" // size of element 2 + + "0002" // contents of element 2 + + "00000002" // size of element 3 + + "0003" // contents of element 3 + ); + } + + @Test + public void should_decode_null_as_empty_array() { + assertThat(decode(null)).isEmpty(); + } + + @Test + public void should_decode_empty_array() { + assertThat(decode("0x00000000")).isEmpty(); + } + + @Test + public void should_decode_non_empty_array() { + assertThat( + decode( + "0x" + + "00000003" // number of elements + + "00000002" // size of element 1 + + "0001" // contents of element 1 + + "00000002" // size of element 2 + + "0002" // contents of element 2 + + "00000002" // size of element 3 + + "0003" // contents of element 3 + )) + .containsExactly((short) 1, (short) 2, (short) 3); + } + + @Test(expected = NullPointerException.class) + public void should_not_decode_array_with_null_elements() { + decode( + "0x" + + "00000001" // number of elements + + "FFFFFFFF" // size of element 1 (-1 for null) + ); + } + + @Test + public void should_format_null_array() { + assertThat(format(null)).isEqualTo("NULL"); + } + + @Test + public void should_format_empty_array() { + assertThat(format(new short[] {})).isEqualTo("[]"); + } + + @Test + public void should_format_non_empty_array() { + assertThat(format(new short[] {1, 2, 3})).isEqualTo("[1,2,3]"); + } + + @Test + public void should_parse_null_or_empty_string() { + assertThat(parse(null)).isNull(); + assertThat(parse("")).isNull(); + } + + @Test + public void should_parse_empty_array() { + assertThat(parse("[]")).isEmpty(); + } + + @Test + public void should_parse_non_empty_array() { + assertThat(parse("[1,2,3]")).containsExactly((short) 1, (short) 2, (short) 3); + } + + @Test(expected = IllegalArgumentException.class) + public void should_fail_to_parse_malformed_array() { + parse("not an array"); + } + + @Test + public void should_accept_generic_type() { + assertThat(codec.accepts(GenericType.arrayOf(Short.TYPE))).isTrue(); + assertThat(codec.accepts(GenericType.arrayOf(Short.class))).isFalse(); + assertThat(codec.accepts(GenericType.arrayOf(String.class))).isFalse(); + } + + @Test + public void should_accept_raw_type() { + assertThat(codec.accepts(GenericType.arrayOf(Short.TYPE).getRawType())).isTrue(); + assertThat(codec.accepts(GenericType.arrayOf(Short.class).getRawType())).isFalse(); + assertThat(codec.accepts(GenericType.arrayOf(String.class).getRawType())).isFalse(); + } + + @Test + public void should_accept_object() { + assertThat(codec.accepts(new short[] {1, 2, 3})).isTrue(); + assertThat(codec.accepts(new Short[] {1, 2, 3})).isFalse(); + assertThat(codec.accepts(new String[] {"hello", "world"})).isFalse(); + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/enums/EnumNameCodecTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/enums/EnumNameCodecTest.java new file mode 100644 index 00000000000..d1d5024b5ed --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/enums/EnumNameCodecTest.java @@ -0,0 +1,90 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.type.codec.extras.enums; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.dse.driver.api.core.DseProtocolVersion; +import com.datastax.oss.driver.api.core.DefaultProtocolVersion; +import com.datastax.oss.driver.api.core.type.codec.ExtraTypeCodecs; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import com.datastax.oss.driver.internal.core.type.codec.CodecTestBase; +import org.junit.Before; +import org.junit.Test; + +public class EnumNameCodecTest extends CodecTestBase { + + @Before + public void setup() { + codec = ExtraTypeCodecs.enumNamesOf(DefaultProtocolVersion.class); + } + + @Test + public void should_encode() { + // Our codec relies on the JDK's ByteBuffer API. We're not testing the JDK, so no need to try + // a thousand different values. + assertThat(encode(DefaultProtocolVersion.V3)).isEqualTo("0x5633"); + assertThat(encode(null)).isNull(); + } + + @Test + public void should_decode() { + assertThat(decode("0x5633")).isEqualTo(DefaultProtocolVersion.V3); + assertThat(decode("0x")).isNull(); + assertThat(decode(null)).isNull(); + } + + @Test(expected = IllegalArgumentException.class) + public void should_fail_to_decode_if_invalid_name() { + decode("0x1234"); + } + + @Test + public void should_format() { + assertThat(format(DefaultProtocolVersion.V3)).isEqualTo("'V3'"); + assertThat(format(null)).isEqualTo("NULL"); + } + + @Test + public void should_parse() { + assertThat(parse("'V3'")).isEqualTo(DefaultProtocolVersion.V3); + assertThat(parse("NULL")).isNull(); + assertThat(parse("null")).isNull(); + assertThat(parse("")).isNull(); + assertThat(parse(null)).isNull(); + } + + @Test(expected = IllegalArgumentException.class) + public void should_fail_to_parse_invalid_input() { + parse("not a valid enum constant"); + } + + @Test + public void should_accept_generic_type() { + assertThat(codec.accepts(GenericType.of(DefaultProtocolVersion.class))).isTrue(); + } + + @Test + public void should_accept_raw_type() { + assertThat(codec.accepts(DefaultProtocolVersion.class)).isTrue(); + } + + @Test + public void should_accept_object() { + assertThat(codec.accepts(DefaultProtocolVersion.V3)).isTrue(); + assertThat(codec.accepts(DseProtocolVersion.DSE_V1)).isFalse(); + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/enums/EnumOrdinalCodecTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/enums/EnumOrdinalCodecTest.java new file mode 100644 index 00000000000..fe4cc91d10d --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/enums/EnumOrdinalCodecTest.java @@ -0,0 +1,95 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.type.codec.extras.enums; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.dse.driver.api.core.DseProtocolVersion; +import com.datastax.oss.driver.api.core.DefaultProtocolVersion; +import com.datastax.oss.driver.api.core.type.codec.ExtraTypeCodecs; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import com.datastax.oss.driver.internal.core.type.codec.CodecTestBase; +import org.junit.Before; +import org.junit.Test; + +public class EnumOrdinalCodecTest extends CodecTestBase { + + @Before + public void setup() { + codec = ExtraTypeCodecs.enumOrdinalsOf(DefaultProtocolVersion.class); + } + + @Test + public void should_encode() { + // Our codec relies on the JDK's ByteBuffer API. We're not testing the JDK, so no need to try + // a thousand different values. + assertThat(encode(DefaultProtocolVersion.values()[0])).isEqualTo("0x00000000"); + assertThat(encode(null)).isNull(); + } + + @Test + public void should_decode() { + assertThat(decode("0x00000000")).isEqualTo(DefaultProtocolVersion.values()[0]); + assertThat(decode("0x")).isNull(); + assertThat(decode(null)).isNull(); + } + + @Test(expected = IllegalArgumentException.class) + public void should_fail_to_decode_if_not_enough_bytes() { + decode("0x0000"); + } + + @Test(expected = IllegalArgumentException.class) + public void should_fail_to_decode_if_too_many_bytes() { + decode("0x0000000000000000"); + } + + @Test + public void should_format() { + assertThat(format(DefaultProtocolVersion.values()[0])).isEqualTo("0"); + assertThat(format(null)).isEqualTo("NULL"); + } + + @Test + public void should_parse() { + assertThat(parse("0")).isEqualTo(DefaultProtocolVersion.values()[0]); + assertThat(parse("NULL")).isNull(); + assertThat(parse("null")).isNull(); + assertThat(parse("")).isNull(); + assertThat(parse(null)).isNull(); + } + + @Test(expected = IllegalArgumentException.class) + public void should_fail_to_parse_invalid_input() { + parse("not an int"); + } + + @Test + public void should_accept_generic_type() { + assertThat(codec.accepts(GenericType.of(DefaultProtocolVersion.class))).isTrue(); + } + + @Test + public void should_accept_raw_type() { + assertThat(codec.accepts(DefaultProtocolVersion.class)).isTrue(); + } + + @Test + public void should_accept_object() { + assertThat(codec.accepts(DefaultProtocolVersion.V3)).isTrue(); + assertThat(codec.accepts(DseProtocolVersion.DSE_V1)).isFalse(); + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/json/JsonCodecTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/json/JsonCodecTest.java new file mode 100644 index 00000000000..f977bded44a --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/json/JsonCodecTest.java @@ -0,0 +1,115 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.type.codec.extras.json; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.fail; + +import com.datastax.oss.driver.api.core.ProtocolVersion; +import com.datastax.oss.driver.api.core.type.codec.ExtraTypeCodecs; +import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import com.datastax.oss.driver.internal.core.type.codec.CodecTestBase; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; +import com.datastax.oss.protocol.internal.util.Bytes; +import java.net.InetAddress; +import java.net.UnknownHostException; +import java.util.Collections; +import java.util.Set; +import org.junit.Before; +import org.junit.Test; + +public class JsonCodecTest extends CodecTestBase> { + + private static final InetAddress V4_ADDRESS; + private static final InetAddress V6_ADDRESS; + private static final Set SET_OF_ADDRESSES; + + static { + try { + V4_ADDRESS = InetAddress.getByName("127.0.0.1"); + V6_ADDRESS = InetAddress.getByName("::1"); + SET_OF_ADDRESSES = ImmutableSet.of(V4_ADDRESS, V6_ADDRESS); + } catch (UnknownHostException e) { + fail("unexpected error", e); + throw new AssertionError(); // never reached + } + } + + @Before + public void setup() { + this.codec = ExtraTypeCodecs.json(GenericType.setOf(GenericType.INET_ADDRESS)); + } + + @Test + public void should_encode() { + assertThat(encode(SET_OF_ADDRESSES)) + .isEqualTo(encodeJson("[\"127.0.0.1\",\"0:0:0:0:0:0:0:1\"]")); + assertThat(encode(Collections.emptySet())).isEqualTo(encodeJson("[]")); + assertThat(encode(null)).isNull(); + } + + @Test + public void should_decode() { + assertThat(decode(encodeJson("[\"127.0.0.1\",\"0:0:0:0:0:0:0:1\"]"))) + .isEqualTo(SET_OF_ADDRESSES); + assertThat(decode(encodeJson("[]"))).isEqualTo(Collections.emptySet()); + assertThat(decode(null)).isNull(); + } + + @Test + public void should_format() { + assertThat(format(SET_OF_ADDRESSES)).isEqualTo("'[\"127.0.0.1\",\"0:0:0:0:0:0:0:1\"]'"); + assertThat(format(Collections.emptySet())).isEqualTo("'[]'"); + assertThat(format(null)).isEqualTo("NULL"); + } + + @Test + public void should_parse() { + assertThat(parse("'[\"127.0.0.1\",\"0:0:0:0:0:0:0:1\"]'")).isEqualTo(SET_OF_ADDRESSES); + assertThat(parse("'[]'")).isEqualTo(Collections.emptySet()); + assertThat(parse("NULL")).isNull(); + assertThat(parse("null")).isNull(); + assertThat(parse("")).isNull(); + assertThat(parse(null)).isNull(); + } + + @Test(expected = IllegalArgumentException.class) + public void should_fail_to_parse_invalid_input() { + parse("not a JSON string"); + } + + @Test + public void should_accept_generic_type() { + assertThat(codec.accepts(GenericType.setOf(GenericType.INET_ADDRESS))).isTrue(); + } + + @Test + public void should_accept_raw_type() { + assertThat(codec.accepts(Set.class)).isTrue(); + } + + @Test + public void should_accept_object() { + assertThat(codec.accepts(SET_OF_ADDRESSES)).isTrue(); + assertThat(codec.accepts(Collections.emptySet())).isTrue(); + assertThat(codec.accepts(Collections.singletonList(V4_ADDRESS))).isFalse(); + } + + private String encodeJson(String json) { + return Bytes.toHexString(TypeCodecs.TEXT.encode(json, ProtocolVersion.DEFAULT)); + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/time/LocalTimestampCodecTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/time/LocalTimestampCodecTest.java new file mode 100644 index 00000000000..d9e235a010b --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/time/LocalTimestampCodecTest.java @@ -0,0 +1,204 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.type.codec.extras.time; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +import com.datastax.oss.driver.api.core.type.codec.ExtraTypeCodecs; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import com.datastax.oss.driver.internal.core.type.codec.CodecTestBase; +import com.datastax.oss.driver.internal.core.type.codec.TimestampCodecTest; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import java.time.Instant; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; +import org.junit.Test; +import org.junit.runner.RunWith; + +@RunWith(DataProviderRunner.class) +public class LocalTimestampCodecTest extends CodecTestBase { + + @Test + public void should_encode() { + codec = ExtraTypeCodecs.LOCAL_TIMESTAMP_UTC; + assertThat(encode(Instant.EPOCH.atZone(ZoneOffset.UTC).toLocalDateTime())) + .isEqualTo("0x0000000000000000"); + assertThat(encode(Instant.ofEpochMilli(128).atZone(ZoneOffset.UTC).toLocalDateTime())) + .isEqualTo("0x0000000000000080"); + assertThat(encode(null)).isNull(); + } + + @Test + public void should_decode() { + codec = ExtraTypeCodecs.LOCAL_TIMESTAMP_UTC; + assertThat(decode("0x0000000000000000")) + .isEqualTo(Instant.EPOCH.atZone(ZoneOffset.UTC).toLocalDateTime()); + assertThat(decode("0x0000000000000080")) + .isEqualTo(Instant.ofEpochMilli(128).atZone(ZoneOffset.UTC).toLocalDateTime()); + assertThat(decode(null)).isNull(); + } + + @Test(expected = IllegalArgumentException.class) + public void should_fail_to_decode_if_not_enough_bytes() { + codec = ExtraTypeCodecs.LOCAL_TIMESTAMP_SYSTEM; + decode("0x0000"); + } + + @Test(expected = IllegalArgumentException.class) + public void should_fail_to_decode_if_too_many_bytes() { + codec = ExtraTypeCodecs.LOCAL_TIMESTAMP_SYSTEM; + decode("0x0000000000000000" + "0000"); + } + + @Test + public void should_format() { + codec = ExtraTypeCodecs.localTimestampAt(ZoneOffset.ofHours(2)); + // No need to test various values because the codec delegates directly to SimpleDateFormat, + // which we assume does its job correctly. + assertThat(format(LocalDateTime.parse("2018-08-16T16:59:34.123"))) + .isEqualTo("'2018-08-16T16:59:34.123+02:00'"); + assertThat(format(null)).isEqualTo("NULL"); + } + + @Test + @UseDataProvider(value = "timeZones", location = TimestampCodecTest.class) + public void should_parse(ZoneId defaultTimeZone) { + codec = ExtraTypeCodecs.localTimestampAt(defaultTimeZone); + + // Raw numbers + assertThat(parse("'0'")).isEqualTo(Instant.EPOCH.atZone(defaultTimeZone).toLocalDateTime()); + assertThat(parse("'-1'")) + .isEqualTo(Instant.EPOCH.minusMillis(1).atZone(defaultTimeZone).toLocalDateTime()); + assertThat(parse("1534463100000")) + .isEqualTo(Instant.ofEpochMilli(1534463100000L).atZone(defaultTimeZone).toLocalDateTime()); + + // Date formats + LocalDateTime expected; + + // date without time, without time zone + expected = LocalDate.parse("2017-01-01").atStartOfDay(); + assertThat(parse("'2017-01-01'")).isEqualTo(expected); + + // date without time, with time zone + expected = + ZonedDateTime.parse("2018-08-16T00:00:00+02:00") + .withZoneSameInstant(defaultTimeZone) + .toLocalDateTime(); + assertThat(parse("'2018-08-16+02'")).isEqualTo(expected); + assertThat(parse("'2018-08-16+0200'")).isEqualTo(expected); + assertThat(parse("'2018-08-16+02:00'")).isEqualTo(expected); + assertThat(parse("'2018-08-16 CEST'")).isEqualTo(expected); + + // date with time, without time zone + expected = LocalDateTime.parse("2018-08-16T23:45"); + assertThat(parse("'2018-08-16T23:45'")).isEqualTo(expected); + assertThat(parse("'2018-08-16 23:45'")).isEqualTo(expected); + + // date with time + seconds, without time zone + expected = LocalDateTime.parse("2019-12-31T16:08:38"); + assertThat(parse("'2019-12-31T16:08:38'")).isEqualTo(expected); + assertThat(parse("'2019-12-31 16:08:38'")).isEqualTo(expected); + + // date with time + seconds + milliseconds, without time zone + expected = LocalDateTime.parse("1950-02-28T12:00:59.230"); + assertThat(parse("'1950-02-28T12:00:59.230'")).isEqualTo(expected); + assertThat(parse("'1950-02-28 12:00:59.230'")).isEqualTo(expected); + + // date with time, with time zone + expected = + ZonedDateTime.parse("1973-06-23T23:59:00.000+01:00") + .withZoneSameInstant(defaultTimeZone) + .toLocalDateTime(); + assertThat(parse("'1973-06-23T23:59+01'")).isEqualTo(expected); + assertThat(parse("'1973-06-23T23:59+0100'")).isEqualTo(expected); + assertThat(parse("'1973-06-23T23:59+01:00'")).isEqualTo(expected); + assertThat(parse("'1973-06-23T23:59 CET'")).isEqualTo(expected); + assertThat(parse("'1973-06-23 23:59+01'")).isEqualTo(expected); + assertThat(parse("'1973-06-23 23:59+0100'")).isEqualTo(expected); + assertThat(parse("'1973-06-23 23:59+01:00'")).isEqualTo(expected); + assertThat(parse("'1973-06-23 23:59 CET'")).isEqualTo(expected); + + // date with time + seconds, with time zone + expected = + ZonedDateTime.parse("1980-01-01T23:59:59.000-08:00") + .withZoneSameInstant(defaultTimeZone) + .toLocalDateTime(); + assertThat(parse("'1980-01-01T23:59:59-08'")).isEqualTo(expected); + assertThat(parse("'1980-01-01T23:59:59-0800'")).isEqualTo(expected); + assertThat(parse("'1980-01-01T23:59:59-08:00'")).isEqualTo(expected); + assertThat(parse("'1980-01-01T23:59:59 PST'")).isEqualTo(expected); + assertThat(parse("'1980-01-01 23:59:59-08'")).isEqualTo(expected); + assertThat(parse("'1980-01-01 23:59:59-0800'")).isEqualTo(expected); + assertThat(parse("'1980-01-01 23:59:59-08:00'")).isEqualTo(expected); + assertThat(parse("'1980-01-01 23:59:59 PST'")).isEqualTo(expected); + + // date with time + seconds + milliseconds, with time zone + expected = + ZonedDateTime.parse("1999-12-31T23:59:59.999+00:00") + .withZoneSameInstant(defaultTimeZone) + .toLocalDateTime(); + assertThat(parse("'1999-12-31T23:59:59.999+00'")).isEqualTo(expected); + assertThat(parse("'1999-12-31T23:59:59.999+0000'")).isEqualTo(expected); + assertThat(parse("'1999-12-31T23:59:59.999+00:00'")).isEqualTo(expected); + assertThat(parse("'1999-12-31T23:59:59.999 UTC'")).isEqualTo(expected); + assertThat(parse("'1999-12-31 23:59:59.999+00'")).isEqualTo(expected); + assertThat(parse("'1999-12-31 23:59:59.999+0000'")).isEqualTo(expected); + assertThat(parse("'1999-12-31 23:59:59.999+00:00'")).isEqualTo(expected); + assertThat(parse("'1999-12-31 23:59:59.999 UTC'")).isEqualTo(expected); + + assertThat(parse("NULL")).isNull(); + assertThat(parse("null")).isNull(); + assertThat(parse("")).isNull(); + assertThat(parse(null)).isNull(); + } + + @Test + public void should_fail_to_parse_invalid_input() { + codec = ExtraTypeCodecs.LOCAL_TIMESTAMP_SYSTEM; + assertThatThrownBy(() -> parse("not a timestamp")) + .isInstanceOf(IllegalArgumentException.class) + .hasMessage("Alphanumeric timestamp literal must be quoted: \"not a timestamp\""); + assertThatThrownBy(() -> parse("'not a timestamp'")) + .isInstanceOf(IllegalArgumentException.class) + .hasMessage("Cannot parse timestamp value from \"'not a timestamp'\""); + } + + @Test + public void should_accept_generic_type() { + codec = ExtraTypeCodecs.LOCAL_TIMESTAMP_SYSTEM; + assertThat(codec.accepts(GenericType.LOCAL_DATE_TIME)).isTrue(); + assertThat(codec.accepts(GenericType.INSTANT)).isFalse(); + } + + @Test + public void should_accept_raw_type() { + codec = ExtraTypeCodecs.LOCAL_TIMESTAMP_SYSTEM; + assertThat(codec.accepts(LocalDateTime.class)).isTrue(); + assertThat(codec.accepts(Instant.class)).isFalse(); + } + + @Test + public void should_accept_object() { + codec = ExtraTypeCodecs.LOCAL_TIMESTAMP_SYSTEM; + assertThat(codec.accepts(LocalDateTime.now(ZoneId.systemDefault()))).isTrue(); + assertThat(codec.accepts(Instant.EPOCH)).isFalse(); + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/time/PersistentZonedTimestampCodecTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/time/PersistentZonedTimestampCodecTest.java new file mode 100644 index 00000000000..d8a125e170f --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/time/PersistentZonedTimestampCodecTest.java @@ -0,0 +1,134 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.type.codec.extras.time; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.api.core.type.codec.ExtraTypeCodecs; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import com.datastax.oss.driver.internal.core.type.codec.CodecTestBase; +import java.time.Instant; +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; +import org.junit.Before; +import org.junit.Test; + +public class PersistentZonedTimestampCodecTest extends CodecTestBase { + + private static final ZonedDateTime EPOCH_UTC = Instant.EPOCH.atZone(ZoneOffset.UTC); + + private static final ZonedDateTime EPOCH_MILLIS_CET = + Instant.ofEpochMilli(128).atZone(ZoneId.of("CET")); + + private static final ZonedDateTime EPOCH_MILLIS_OFFSET = + Instant.ofEpochMilli(128).atZone(ZoneOffset.ofHours(2)); + + private static final ZonedDateTime EPOCH_MILLIS_EUROPE_PARIS = + Instant.ofEpochMilli(-128).atZone(ZoneId.of("Europe/Paris")); + + private static final String EPOCH_UTC_ENCODED = + "0x" + + ("00000008" + "0000000000000000") // size and contents of timestamp + + ("00000001" + "5a"); // size and contents of zone ID + + private static final String EPOCH_MILLIS_CET_ENCODED = + "0x" + + ("00000008" + "0000000000000080") // size and contents of timestamp + + ("00000003" + "434554"); // size and contents of zone ID + + private static final String EPOCH_MILLIS_OFFSET_ENCODED = + "0x" + + ("00000008" + "0000000000000080") // size and contents of timestamp + + ("00000006" + "2b30323a3030"); // size and contents of zone ID + + private static final String EPOCH_MILLIS_EUROPE_PARIS_ENCODED = + "0x" + + ("00000008" + "ffffffffffffff80") // size and contents of timestamp + + ("0000000c" + "4575726f70652f5061726973"); // size and contents of zone ID + + private static final String EPOCH_UTC_FORMATTED = "('1970-01-01T00:00:00.000Z','Z')"; + + private static final String EPOCH_MILLIS_CET_FORMATTED = "('1970-01-01T00:00:00.128Z','CET')"; + + private static final String EPOCH_MILLIS_OFFSET_FORMATTED = + "('1970-01-01T00:00:00.128Z','+02:00')"; + + private static final String EPOCH_MILLIS_EUROPE_PARIS_FORMATTED = + "('1969-12-31T23:59:59.872Z','Europe/Paris')"; + + @Before + public void setup() { + codec = ExtraTypeCodecs.ZONED_TIMESTAMP_PERSISTED; + } + + @Test + public void should_encode() { + assertThat(encode(EPOCH_UTC)).isEqualTo(EPOCH_UTC_ENCODED); + assertThat(encode(EPOCH_MILLIS_CET)).isEqualTo(EPOCH_MILLIS_CET_ENCODED); + assertThat(encode(EPOCH_MILLIS_OFFSET)).isEqualTo(EPOCH_MILLIS_OFFSET_ENCODED); + assertThat(encode(EPOCH_MILLIS_EUROPE_PARIS)).isEqualTo(EPOCH_MILLIS_EUROPE_PARIS_ENCODED); + assertThat(encode(null)).isNull(); + } + + @Test + public void should_decode() { + assertThat(decode(EPOCH_UTC_ENCODED)).isEqualTo(EPOCH_UTC); + assertThat(decode(EPOCH_MILLIS_CET_ENCODED)).isEqualTo(EPOCH_MILLIS_CET); + assertThat(decode(EPOCH_MILLIS_OFFSET_ENCODED)).isEqualTo(EPOCH_MILLIS_OFFSET); + assertThat(decode(EPOCH_MILLIS_EUROPE_PARIS_ENCODED)).isEqualTo(EPOCH_MILLIS_EUROPE_PARIS); + assertThat(decode(null)).isNull(); + } + + @Test + public void should_format() { + assertThat(format(EPOCH_UTC)).isEqualTo(EPOCH_UTC_FORMATTED); + assertThat(format(EPOCH_MILLIS_CET)).isEqualTo(EPOCH_MILLIS_CET_FORMATTED); + assertThat(format(EPOCH_MILLIS_OFFSET)).isEqualTo(EPOCH_MILLIS_OFFSET_FORMATTED); + assertThat(format(EPOCH_MILLIS_EUROPE_PARIS)).isEqualTo(EPOCH_MILLIS_EUROPE_PARIS_FORMATTED); + assertThat(format(null)).isEqualTo("NULL"); + } + + @Test + public void should_parse() { + assertThat(parse(EPOCH_UTC_FORMATTED)).isEqualTo(EPOCH_UTC); + assertThat(parse(EPOCH_MILLIS_CET_FORMATTED)).isEqualTo(EPOCH_MILLIS_CET); + assertThat(parse(EPOCH_MILLIS_OFFSET_FORMATTED)).isEqualTo(EPOCH_MILLIS_OFFSET); + assertThat(parse(EPOCH_MILLIS_EUROPE_PARIS_FORMATTED)).isEqualTo(EPOCH_MILLIS_EUROPE_PARIS); + assertThat(parse("NULL")).isNull(); + assertThat(parse("null")).isNull(); + assertThat(parse("")).isNull(); + assertThat(parse(null)).isNull(); + } + + @Test + public void should_accept_generic_type() { + assertThat(codec.accepts(GenericType.of(ZonedDateTime.class))).isTrue(); + assertThat(codec.accepts(GenericType.of(Integer.class))).isFalse(); + } + + @Test + public void should_accept_raw_type() { + assertThat(codec.accepts(ZonedDateTime.class)).isTrue(); + assertThat(codec.accepts(Integer.class)).isFalse(); + } + + @Test + public void should_accept_object() { + assertThat(codec.accepts(ZonedDateTime.now(ZoneOffset.systemDefault()))).isTrue(); + assertThat(codec.accepts(Integer.MIN_VALUE)).isFalse(); + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/time/TimestampMillisCodecTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/time/TimestampMillisCodecTest.java new file mode 100644 index 00000000000..05e71dfda0d --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/time/TimestampMillisCodecTest.java @@ -0,0 +1,203 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.type.codec.extras.time; + +import static java.time.ZoneOffset.ofHours; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +import com.datastax.oss.driver.api.core.type.codec.ExtraTypeCodecs; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import com.datastax.oss.driver.internal.core.type.codec.CodecTestBase; +import com.datastax.oss.driver.internal.core.type.codec.TimestampCodecTest; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import java.time.Instant; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; +import org.junit.Test; +import org.junit.runner.RunWith; + +@RunWith(DataProviderRunner.class) +public class TimestampMillisCodecTest extends CodecTestBase { + + @Test + public void should_encode() { + codec = ExtraTypeCodecs.TIMESTAMP_MILLIS_UTC; + assertThat(encode(0L)).isEqualTo("0x0000000000000000"); + assertThat(encode(128L)).isEqualTo("0x0000000000000080"); + assertThat(encode(null)).isNull(); + } + + @Test + public void should_decode() { + codec = ExtraTypeCodecs.TIMESTAMP_MILLIS_UTC; + assertThat(decode("0x0000000000000000")).isEqualTo(0L); + assertThat(decode("0x0000000000000080")).isEqualTo(128L); + assertThat(decode(null)).isNull(); + } + + @Test(expected = IllegalArgumentException.class) + public void should_fail_to_decode_if_not_enough_bytes() { + codec = ExtraTypeCodecs.TIMESTAMP_MILLIS_SYSTEM; + decode("0x0000"); + } + + @Test(expected = IllegalArgumentException.class) + public void should_fail_to_decode_if_too_many_bytes() { + codec = ExtraTypeCodecs.TIMESTAMP_MILLIS_SYSTEM; + decode("0x0000000000000000" + "0000"); + } + + @Test + public void should_format() { + codec = ExtraTypeCodecs.timestampMillisAt(ZoneOffset.ofHours(2)); + // No need to test various values because the codec delegates directly to SimpleDateFormat, + // which we assume does its job correctly. + assertThat(format(0L)).isEqualTo("'1970-01-01T02:00:00.000+02:00'"); + assertThat(format(1534435174123L)).isEqualTo("'2018-08-16T17:59:34.123+02:00'"); + assertThat(format(null)).isEqualTo("NULL"); + } + + @Test + @UseDataProvider(value = "timeZones", location = TimestampCodecTest.class) + public void should_parse(ZoneId defaultTimeZone) { + codec = ExtraTypeCodecs.timestampMillisAt(defaultTimeZone); + + // Raw numbers + assertThat(parse("'0'")).isEqualTo(0L); + assertThat(parse("'-1'")).isEqualTo(-1L); + assertThat(parse("1534463100000")).isEqualTo(1534463100000L); + + // Date formats + long expected; + + // date without time, without time zone + expected = + LocalDate.parse("2017-01-01") + .atStartOfDay() + .atZone(defaultTimeZone) + .toInstant() + .toEpochMilli(); + assertThat(parse("'2017-01-01'")).isEqualTo(expected); + + // date without time, with time zone + expected = + LocalDate.parse("2018-08-16").atStartOfDay().atZone(ofHours(2)).toInstant().toEpochMilli(); + assertThat(parse("'2018-08-16+02'")).isEqualTo(expected); + assertThat(parse("'2018-08-16+0200'")).isEqualTo(expected); + assertThat(parse("'2018-08-16+02:00'")).isEqualTo(expected); + assertThat(parse("'2018-08-16 CEST'")).isEqualTo(expected); + + // date with time, without time zone + expected = + LocalDateTime.parse("2018-08-16T23:45").atZone(defaultTimeZone).toInstant().toEpochMilli(); + assertThat(parse("'2018-08-16T23:45'")).isEqualTo(expected); + assertThat(parse("'2018-08-16 23:45'")).isEqualTo(expected); + + // date with time + seconds, without time zone + expected = + LocalDateTime.parse("2019-12-31T16:08:38") + .atZone(defaultTimeZone) + .toInstant() + .toEpochMilli(); + assertThat(parse("'2019-12-31T16:08:38'")).isEqualTo(expected); + assertThat(parse("'2019-12-31 16:08:38'")).isEqualTo(expected); + + // date with time + seconds + milliseconds, without time zone + expected = + LocalDateTime.parse("1950-02-28T12:00:59.230") + .atZone(defaultTimeZone) + .toInstant() + .toEpochMilli(); + assertThat(parse("'1950-02-28T12:00:59.230'")).isEqualTo(expected); + assertThat(parse("'1950-02-28 12:00:59.230'")).isEqualTo(expected); + + // date with time, with time zone + expected = ZonedDateTime.parse("1973-06-23T23:59:00.000+01:00").toInstant().toEpochMilli(); + assertThat(parse("'1973-06-23T23:59+01'")).isEqualTo(expected); + assertThat(parse("'1973-06-23T23:59+0100'")).isEqualTo(expected); + assertThat(parse("'1973-06-23T23:59+01:00'")).isEqualTo(expected); + assertThat(parse("'1973-06-23T23:59 CET'")).isEqualTo(expected); + assertThat(parse("'1973-06-23 23:59+01'")).isEqualTo(expected); + assertThat(parse("'1973-06-23 23:59+0100'")).isEqualTo(expected); + assertThat(parse("'1973-06-23 23:59+01:00'")).isEqualTo(expected); + assertThat(parse("'1973-06-23 23:59 CET'")).isEqualTo(expected); + + // date with time + seconds, with time zone + expected = ZonedDateTime.parse("1980-01-01T23:59:59.000-08:00").toInstant().toEpochMilli(); + assertThat(parse("'1980-01-01T23:59:59-08'")).isEqualTo(expected); + assertThat(parse("'1980-01-01T23:59:59-0800'")).isEqualTo(expected); + assertThat(parse("'1980-01-01T23:59:59-08:00'")).isEqualTo(expected); + assertThat(parse("'1980-01-01T23:59:59 PST'")).isEqualTo(expected); + assertThat(parse("'1980-01-01 23:59:59-08'")).isEqualTo(expected); + assertThat(parse("'1980-01-01 23:59:59-0800'")).isEqualTo(expected); + assertThat(parse("'1980-01-01 23:59:59-08:00'")).isEqualTo(expected); + assertThat(parse("'1980-01-01 23:59:59 PST'")).isEqualTo(expected); + + // date with time + seconds + milliseconds, with time zone + expected = ZonedDateTime.parse("1999-12-31T23:59:59.999+00:00").toInstant().toEpochMilli(); + assertThat(parse("'1999-12-31T23:59:59.999+00'")).isEqualTo(expected); + assertThat(parse("'1999-12-31T23:59:59.999+0000'")).isEqualTo(expected); + assertThat(parse("'1999-12-31T23:59:59.999+00:00'")).isEqualTo(expected); + assertThat(parse("'1999-12-31T23:59:59.999 UTC'")).isEqualTo(expected); + assertThat(parse("'1999-12-31 23:59:59.999+00'")).isEqualTo(expected); + assertThat(parse("'1999-12-31 23:59:59.999+0000'")).isEqualTo(expected); + assertThat(parse("'1999-12-31 23:59:59.999+00:00'")).isEqualTo(expected); + assertThat(parse("'1999-12-31 23:59:59.999 UTC'")).isEqualTo(expected); + + assertThat(parse("NULL")).isNull(); + assertThat(parse("null")).isNull(); + assertThat(parse("")).isNull(); + assertThat(parse(null)).isNull(); + } + + @Test + public void should_fail_to_parse_invalid_input() { + codec = ExtraTypeCodecs.TIMESTAMP_MILLIS_SYSTEM; + assertThatThrownBy(() -> parse("not a timestamp")) + .isInstanceOf(IllegalArgumentException.class) + .hasMessage("Alphanumeric timestamp literal must be quoted: \"not a timestamp\""); + assertThatThrownBy(() -> parse("'not a timestamp'")) + .isInstanceOf(IllegalArgumentException.class) + .hasMessage("Cannot parse timestamp value from \"'not a timestamp'\""); + } + + @Test + public void should_accept_generic_type() { + codec = ExtraTypeCodecs.TIMESTAMP_MILLIS_UTC; + assertThat(codec.accepts(GenericType.LONG)).isTrue(); + assertThat(codec.accepts(GenericType.INSTANT)).isFalse(); + } + + @Test + public void should_accept_raw_type() { + codec = ExtraTypeCodecs.TIMESTAMP_MILLIS_UTC; + assertThat(codec.accepts(Long.class)).isTrue(); + assertThat(codec.accepts(Long.TYPE)).isTrue(); + assertThat(codec.accepts(Instant.class)).isFalse(); + } + + @Test + public void should_accept_object() { + codec = ExtraTypeCodecs.TIMESTAMP_MILLIS_UTC; + assertThat(codec.accepts(Long.MIN_VALUE)).isTrue(); + assertThat(codec.accepts(Instant.EPOCH)).isFalse(); + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/ZonedTimestampCodecTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/time/ZonedTimestampCodecTest.java similarity index 92% rename from core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/ZonedTimestampCodecTest.java rename to core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/time/ZonedTimestampCodecTest.java index 0a93c44bffd..cef8af71d50 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/ZonedTimestampCodecTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/extras/time/ZonedTimestampCodecTest.java @@ -13,14 +13,16 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.internal.core.type.codec; +package com.datastax.oss.driver.internal.core.type.codec.extras.time; import static java.time.ZoneOffset.ofHours; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; -import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; +import com.datastax.oss.driver.api.core.type.codec.ExtraTypeCodecs; import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import com.datastax.oss.driver.internal.core.type.codec.CodecTestBase; +import com.datastax.oss.driver.internal.core.type.codec.TimestampCodecTest; import com.tngtech.java.junit.dataprovider.DataProviderRunner; import com.tngtech.java.junit.dataprovider.UseDataProvider; import java.time.Instant; @@ -38,7 +40,7 @@ public class ZonedTimestampCodecTest extends CodecTestBase { @Test @UseDataProvider(value = "timeZones", location = TimestampCodecTest.class) public void should_encode(ZoneId timeZone) { - codec = TypeCodecs.zonedTimestampAt(timeZone); + codec = ExtraTypeCodecs.zonedTimestampAt(timeZone); assertThat(encode(Instant.EPOCH.atZone(timeZone))).isEqualTo("0x0000000000000000"); assertThat(encode(Instant.ofEpochMilli(128).atZone(timeZone))).isEqualTo("0x0000000000000080"); assertThat(encode(null)).isNull(); @@ -46,7 +48,7 @@ public void should_encode(ZoneId timeZone) { @Test public void should_decode() { - codec = TypeCodecs.ZONED_TIMESTAMP_UTC; + codec = ExtraTypeCodecs.ZONED_TIMESTAMP_UTC; assertThat(decode("0x0000000000000000").toInstant().toEpochMilli()).isEqualTo(0); assertThat(decode("0x0000000000000080").toInstant().toEpochMilli()).isEqualTo(128); assertThat(decode(null)).isNull(); @@ -54,19 +56,19 @@ public void should_decode() { @Test(expected = IllegalArgumentException.class) public void should_fail_to_decode_if_not_enough_bytes() { - codec = TypeCodecs.ZONED_TIMESTAMP_SYSTEM; + codec = ExtraTypeCodecs.ZONED_TIMESTAMP_SYSTEM; decode("0x0000"); } @Test(expected = IllegalArgumentException.class) public void should_fail_to_decode_if_too_many_bytes() { - codec = TypeCodecs.ZONED_TIMESTAMP_SYSTEM; + codec = ExtraTypeCodecs.ZONED_TIMESTAMP_SYSTEM; decode("0x0000000000000000" + "0000"); } @Test public void should_format() { - codec = TypeCodecs.zonedTimestampAt(ZoneOffset.ofHours(2)); + codec = ExtraTypeCodecs.zonedTimestampAt(ZoneOffset.ofHours(2)); // No need to test various values because the codec delegates directly to SimpleDateFormat, // which we assume does its job correctly. assertThat(format(Instant.EPOCH.atZone(ZoneOffset.UTC))) @@ -79,7 +81,7 @@ public void should_format() { @Test @UseDataProvider(value = "timeZones", location = TimestampCodecTest.class) public void should_parse(ZoneId timeZone) { - codec = TypeCodecs.zonedTimestampAt(timeZone); + codec = ExtraTypeCodecs.zonedTimestampAt(timeZone); // Raw numbers assertThat(parse("'0'")).isEqualTo(Instant.EPOCH.atZone(timeZone)); diff --git a/examples/src/main/java/com/datastax/oss/driver/examples/datatypes/CustomCodecs.java b/examples/src/main/java/com/datastax/oss/driver/examples/datatypes/CustomCodecs.java new file mode 100644 index 00000000000..6c31bb02075 --- /dev/null +++ b/examples/src/main/java/com/datastax/oss/driver/examples/datatypes/CustomCodecs.java @@ -0,0 +1,240 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.examples.datatypes; + +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.CqlSessionBuilder; +import com.datastax.oss.driver.api.core.cql.BoundStatement; +import com.datastax.oss.driver.api.core.cql.PreparedStatement; +import com.datastax.oss.driver.api.core.cql.Row; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.core.data.ByteUtils; +import com.datastax.oss.driver.api.core.data.TupleValue; +import com.datastax.oss.driver.api.core.type.codec.ExtraTypeCodecs; +import com.datastax.oss.driver.api.core.type.codec.MappingCodec; +import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.net.InetAddress; +import java.nio.ByteBuffer; +import java.time.ZonedDateTime; +import java.util.Arrays; +import java.util.List; +import java.util.Optional; + +/** + * Inserts and retrieves values using a few custom codecs. + * + *

        Preconditions: + * + *

          + *
        • An Apache Cassandra(R) cluster is running and accessible through the contacts points + * identified by basic.contact-points (see application.conf). + *
        + * + *

        Side effects: + * + *

          + *
        • creates a new keyspace "examples" in the cluster. If a keyspace with this name already + * exists, it will be reused; + *
        • creates a table "examples.videos". If it already exists, it will be reused; + *
        • inserts data in the table. + *
        + * + * More examples of custom codecs can be found in the following examples: + * + *
          + *
        1. Codecs for tuples and UDTs: + *
            + *
          • {@link TuplesSimple} + *
          • {@link TuplesMapped} + *
          • {@link UserDefinedTypesSimple} + *
          • {@link UserDefinedTypesMapped} + *
          + *
        2. Json codecs: + *
            + *
          • {@link com.datastax.oss.driver.examples.json.jackson.JacksonJsonColumn} + *
          • {@link com.datastax.oss.driver.examples.json.jackson.JacksonJsonFunction} + *
          • {@link com.datastax.oss.driver.examples.json.jackson.JacksonJsonRow} + *
          • {@link com.datastax.oss.driver.examples.json.jsr.Jsr353JsonColumn} + *
          • {@link com.datastax.oss.driver.examples.json.jsr.Jsr353JsonFunction} + *
          • {@link com.datastax.oss.driver.examples.json.jsr.Jsr353JsonRow} + *
          + * + * @see driver + * documentation on custom codecs + */ +public class CustomCodecs { + + public static final GenericType> OPTIONAL_OF_INET = + GenericType.optionalOf(InetAddress.class); + + /** A dummy codec converting CQL ints into Java strings. */ + public static class CqlIntToStringCodec extends MappingCodec { + + public CqlIntToStringCodec() { + super(TypeCodecs.INT, GenericType.STRING); + } + + @Nullable + @Override + protected String innerToOuter(@Nullable Integer value) { + return value == null ? null : value.toString(); + } + + @Nullable + @Override + protected Integer outerToInner(@Nullable String value) { + return value == null ? null : Integer.parseInt(value); + } + } + + public enum WeekDay { + MONDAY, + TUESDAY, + WEDNESDAY, + THURSDAY, + FRIDAY, + SATURDAY, + SUNDAY + } + + public static void main(String[] args) { + CqlSessionBuilder builder = CqlSession.builder(); + builder = registerCodecs(builder); + try (CqlSession session = builder.build()) { + createSchema(session); + insertData(session); + retrieveData(session); + } + } + + private static CqlSessionBuilder registerCodecs(CqlSessionBuilder builder) { + return builder.addTypeCodecs( + ExtraTypeCodecs.BLOB_TO_ARRAY, // blob <-> byte[] + ExtraTypeCodecs.ZONED_TIMESTAMP_PERSISTED, // tuple <-> ZonedDateTime + ExtraTypeCodecs.listToArrayOf(TypeCodecs.TEXT), // list <-> String[] + ExtraTypeCodecs.enumNamesOf(WeekDay.class), // text <-> MyEnum + ExtraTypeCodecs.optionalOf(TypeCodecs.INET), // uuid <-> Optional + new CqlIntToStringCodec() // custom codec, int <-> String + ); + } + + private static void createSchema(CqlSession session) { + session.execute( + "CREATE KEYSPACE IF NOT EXISTS examples " + + "WITH replication = {'class': 'SimpleStrategy', 'replication_factor': 1}"); + session.execute( + "CREATE TABLE IF NOT EXISTS examples.videos(" + + "pk int PRIMARY KEY, " + + "contents blob, " + + "uploaded tuple, " + + "tags list, " + + "week_day text, " + + "ip inet" + + ")"); + } + + private static void insertData(CqlSession session) { + // prepare the INSERT statement + PreparedStatement prepared = + session.prepare( + "INSERT INTO examples.videos (pk, contents, uploaded, tags, week_day, ip) " + + "VALUES (:pk, :contents, :uploaded, :tags, :week_day, :ip)"); + + byte[] contents = new byte[] {1, 2, 3, 4}; + ZonedDateTime uploaded = ZonedDateTime.parse("2020-03-21T15:03:45.123+01:00[Europe/Paris]"); + String[] tags = new String[] {"comedy", "US"}; + WeekDay weekDay = WeekDay.SATURDAY; + Optional maybeIp = Optional.empty(); + + // Create a BoundStatement and set values + BoundStatement boundStatement = + prepared + .bind() + .setString("pk", "1") // will use CqlIntToStringCodec + .set("contents", contents, byte[].class) // will use TypeCodecs.BLOB_SIMPLE + .set( + "uploaded", + uploaded, + ZonedDateTime.class) // will use TypeCodecs.ZONED_TIMESTAMP_PERSISTED + .set("tags", tags, String[].class) // will use TypeCodecs.arrayOf(TypeCodecs.TEXT) + .set( + "week_day", + weekDay, + WeekDay.class) // will use TypeCodecs.enumNamesOf(WeekDay.class) + .set( + "ip", maybeIp, OPTIONAL_OF_INET); // will use TypeCodecs.optionalOf(TypeCodecs.INET) + + // execute the insertion + session.execute(boundStatement); + } + + private static void retrieveData(CqlSession session) { + // Execute the SELECT query and retrieve the single row in the result set + SimpleStatement statement = + SimpleStatement.newInstance( + "SELECT pk, contents, uploaded, tags, week_day, ip FROM examples.videos WHERE pk = ?", + // Here, the primary key must be provided as an int, not as a String, because it is not + // possible to use custom codecs in simple statements, only driver built-in codecs. + // If this is an issue, use prepared statements. + 1); + Row row = session.execute(statement).one(); + assert row != null; + + { + // Retrieve values from row using custom codecs + String pk = row.getString("pk"); // will use CqlIntToStringCodec + byte[] contents = row.get("contents", byte[].class); // will use TypeCodecs.BLOB_SIMPLE + ZonedDateTime uploaded = + row.get("uploaded", ZonedDateTime.class); // will use TypeCodecs.ZONED_TIMESTAMP_PERSISTED + String[] tags = + row.get("tags", String[].class); // will use TypeCodecs.arrayOf(TypeCodecs.TEXT) + WeekDay weekDay = + row.get("week_day", WeekDay.class); // will use TypeCodecs.enumNamesOf(WeekDay.class) + Optional maybeIp = + row.get("ip", OPTIONAL_OF_INET); // will use TypeCodecs.optionalOf(TypeCodecs.INET) + + System.out.println("pk: " + pk); + System.out.println("contents: " + Arrays.toString(contents)); + System.out.println("uploaded: " + uploaded); + System.out.println("tags: " + Arrays.toString(tags)); + System.out.println("week day: " + weekDay); + System.out.println("ip: " + maybeIp); + } + + System.out.println("------------------"); + + { + // It is still possible to retrieve the same values from row using driver built-in codecs + int pk = row.getInt("pk"); + ByteBuffer contents = row.getByteBuffer("contents"); + TupleValue uploaded = row.getTupleValue("uploaded"); + List tags = row.getList("tags", String.class); + String weekDay = row.getString("week_day"); + InetAddress ip = row.getInetAddress("ip"); + + System.out.println("pk: " + pk); + System.out.println("contents: " + ByteUtils.toHexString(contents)); + System.out.println( + "uploaded: " + (uploaded == null ? null : uploaded.getFormattedContents())); + System.out.println("tags: " + tags); + System.out.println("week day: " + weekDay); + System.out.println("ip: " + ip); + } + } +} diff --git a/examples/src/main/java/com/datastax/oss/driver/examples/json/jackson/JacksonJsonColumn.java b/examples/src/main/java/com/datastax/oss/driver/examples/json/jackson/JacksonJsonColumn.java index 1873ecbfc3e..427059b6b68 100644 --- a/examples/src/main/java/com/datastax/oss/driver/examples/json/jackson/JacksonJsonColumn.java +++ b/examples/src/main/java/com/datastax/oss/driver/examples/json/jackson/JacksonJsonColumn.java @@ -25,9 +25,9 @@ import com.datastax.oss.driver.api.core.cql.ResultSet; import com.datastax.oss.driver.api.core.cql.Row; import com.datastax.oss.driver.api.core.cql.Statement; +import com.datastax.oss.driver.api.core.type.codec.ExtraTypeCodecs; import com.datastax.oss.driver.api.core.type.codec.TypeCodec; import com.datastax.oss.driver.examples.json.PlainTextJson; -import com.datastax.oss.driver.examples.json.codecs.JacksonJsonCodec; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; @@ -35,9 +35,9 @@ * Illustrates how to map a single table column of type {@code VARCHAR}, containing JSON payloads, * into a Java object using the Jackson library. * - *

          This example makes usage of a custom {@link TypeCodec codec}, {@link JacksonJsonCodec}, which - * is implemented in the java-driver-examples module. If you plan to follow this example, make sure - * to include the following Maven dependencies in your project: + *

          This example makes usage of a {@linkplain ExtraTypeCodecs#json(Class) custom codec for JSON}. + * If you plan to follow this example, make sure to include the following Maven dependencies in your + * project: * *

          {@code
            * 
          @@ -70,7 +70,7 @@
           public class JacksonJsonColumn {
           
             // A codec to convert JSON payloads into User instances;
          -  private static final TypeCodec USER_CODEC = new JacksonJsonCodec<>(User.class);
          +  private static final TypeCodec USER_CODEC = ExtraTypeCodecs.json(User.class);
           
             public static void main(String[] args) {
               try (CqlSession session = CqlSession.builder().addTypeCodecs(USER_CODEC).build()) {
          diff --git a/examples/src/main/java/com/datastax/oss/driver/examples/json/jackson/JacksonJsonFunction.java b/examples/src/main/java/com/datastax/oss/driver/examples/json/jackson/JacksonJsonFunction.java
          index 9e214572067..1cd94cc19d1 100644
          --- a/examples/src/main/java/com/datastax/oss/driver/examples/json/jackson/JacksonJsonFunction.java
          +++ b/examples/src/main/java/com/datastax/oss/driver/examples/json/jackson/JacksonJsonFunction.java
          @@ -27,10 +27,10 @@
           import com.datastax.oss.driver.api.core.cql.Row;
           import com.datastax.oss.driver.api.core.cql.SimpleStatement;
           import com.datastax.oss.driver.api.core.cql.Statement;
          +import com.datastax.oss.driver.api.core.type.codec.ExtraTypeCodecs;
           import com.datastax.oss.driver.api.core.type.codec.TypeCodec;
           import com.datastax.oss.driver.api.querybuilder.select.Selector;
           import com.datastax.oss.driver.examples.json.PlainTextJson;
          -import com.datastax.oss.driver.examples.json.codecs.JacksonJsonCodec;
           import com.fasterxml.jackson.annotation.JsonCreator;
           import com.fasterxml.jackson.annotation.JsonProperty;
           import com.fasterxml.jackson.databind.JsonNode;
          @@ -42,8 +42,8 @@
            * href="http://wiki.fasterxml.com/JacksonHome">Jackson library, and leveraging the {@code
            * toJson()} and {@code fromJson()} functions introduced in Cassandra 2.2.
            *
          - * 

          This example makes usage of a custom {@link TypeCodec codec}, {@link JacksonJsonCodec}. If you - * plan to follow this example, make sure to include the following Maven dependencies in your + *

          This example makes usage of a {@linkplain ExtraTypeCodecs#json(Class) custom codec for JSON}. + * If you plan to follow this example, make sure to include the following Maven dependencies in your * project: * *

          {@code
          @@ -82,10 +82,10 @@
           public class JacksonJsonFunction {
           
             // A codec to convert JSON payloads into User instances;
          -  private static final TypeCodec USER_CODEC = new JacksonJsonCodec<>(User.class);
          +  private static final TypeCodec USER_CODEC = ExtraTypeCodecs.json(User.class);
           
             // A codec to convert generic JSON payloads into JsonNode instances
          -  private static final TypeCodec JSON_NODE_CODEC = new JacksonJsonCodec<>(JsonNode.class);
          +  private static final TypeCodec JSON_NODE_CODEC = ExtraTypeCodecs.json(JsonNode.class);
           
             public static void main(String[] args) {
               try (CqlSession session =
          diff --git a/examples/src/main/java/com/datastax/oss/driver/examples/json/jackson/JacksonJsonRow.java b/examples/src/main/java/com/datastax/oss/driver/examples/json/jackson/JacksonJsonRow.java
          index 2ce4ef4abc8..2063e91c07b 100644
          --- a/examples/src/main/java/com/datastax/oss/driver/examples/json/jackson/JacksonJsonRow.java
          +++ b/examples/src/main/java/com/datastax/oss/driver/examples/json/jackson/JacksonJsonRow.java
          @@ -25,9 +25,9 @@
           import com.datastax.oss.driver.api.core.cql.ResultSet;
           import com.datastax.oss.driver.api.core.cql.Row;
           import com.datastax.oss.driver.api.core.cql.Statement;
          +import com.datastax.oss.driver.api.core.type.codec.ExtraTypeCodecs;
           import com.datastax.oss.driver.api.core.type.codec.TypeCodec;
           import com.datastax.oss.driver.examples.json.PlainTextJson;
          -import com.datastax.oss.driver.examples.json.codecs.JacksonJsonCodec;
           import com.fasterxml.jackson.annotation.JsonCreator;
           import com.fasterxml.jackson.annotation.JsonProperty;
           
          @@ -36,8 +36,8 @@
            * href="http://wiki.fasterxml.com/JacksonHome">Jackson library, and leveraging the {@code
            * SELECT JSON} and {@code INSERT JSON} syntaxes introduced in Cassandra 2.2.
            *
          - * 

          This example makes usage of a custom {@link TypeCodec codec}, {@link JacksonJsonCodec}. If you - * plan to follow this example, make sure to include the following Maven dependencies in your + *

          This example makes usage of a {@linkplain ExtraTypeCodecs#json(Class) custom codec for JSON}. + * If you plan to follow this example, make sure to include the following Maven dependencies in your * project: * *

          {@code
          @@ -73,7 +73,7 @@
            */
           public class JacksonJsonRow {
             // A codec to convert JSON payloads into User instances;
          -  private static final TypeCodec USER_CODEC = new JacksonJsonCodec<>(User.class);
          +  private static final TypeCodec USER_CODEC = ExtraTypeCodecs.json(User.class);
           
             public static void main(String[] args) {
               try (CqlSession session = CqlSession.builder().addTypeCodecs(USER_CODEC).build()) {
          diff --git a/examples/src/main/java/com/datastax/oss/driver/examples/json/codecs/Jsr353JsonCodec.java b/examples/src/main/java/com/datastax/oss/driver/examples/json/jsr/Jsr353JsonCodec.java
          similarity index 80%
          rename from examples/src/main/java/com/datastax/oss/driver/examples/json/codecs/Jsr353JsonCodec.java
          rename to examples/src/main/java/com/datastax/oss/driver/examples/json/jsr/Jsr353JsonCodec.java
          index 1b7eeae4a08..87e408f13cc 100644
          --- a/examples/src/main/java/com/datastax/oss/driver/examples/json/codecs/Jsr353JsonCodec.java
          +++ b/examples/src/main/java/com/datastax/oss/driver/examples/json/jsr/Jsr353JsonCodec.java
          @@ -13,7 +13,7 @@
            * See the License for the specific language governing permissions and
            * limitations under the License.
            */
          -package com.datastax.oss.driver.examples.json.codecs;
          +package com.datastax.oss.driver.examples.json.jsr;
           
           import com.datastax.oss.driver.api.core.ProtocolVersion;
           import com.datastax.oss.driver.api.core.type.DataType;
          @@ -105,7 +105,7 @@ public Jsr353JsonCodec() {
              * @param config A map of provider-specific configuration properties. May be empty or {@code
              *     null}.
              */
          -  public Jsr353JsonCodec(Map config) {
          +  public Jsr353JsonCodec(@Nullable Map config) {
               readerFactory = Json.createReaderFactory(config);
               writerFactory = Json.createWriterFactory(config);
             }
          @@ -130,15 +130,11 @@ public ByteBuffer encode(
                 return null;
               }
               try (ByteArrayOutputStream baos = new ByteArrayOutputStream()) {
          -      try {
          -        JsonWriter writer = writerFactory.createWriter(baos);
          -        writer.write(value);
          -        return ByteBuffer.wrap(baos.toByteArray());
          -      } catch (JsonException e) {
          -        throw new IllegalArgumentException(e.getMessage(), e);
          -      }
          -    } catch (IOException e) {
          -      throw new IllegalArgumentException(e.getMessage(), e);
          +      JsonWriter writer = writerFactory.createWriter(baos);
          +      writer.write(value);
          +      return ByteBuffer.wrap(baos.toByteArray());
          +    } catch (JsonException | IOException e) {
          +      throw new IllegalArgumentException("Failed to encode value as JSON", e);
               }
             }
           
          @@ -150,40 +146,33 @@ public JsonStructure decode(
                 return null;
               }
               try (ByteArrayInputStream bais = new ByteArrayInputStream(Bytes.getArray(bytes))) {
          -      try {
          -        JsonReader reader = readerFactory.createReader(bais);
          -        return reader.read();
          -      } catch (JsonException e) {
          -        throw new IllegalArgumentException(e.getMessage(), e);
          -      }
          -    } catch (IOException e) {
          -      throw new IllegalArgumentException(e.getMessage(), e);
          +      JsonReader reader = readerFactory.createReader(bais);
          +      return reader.read();
          +    } catch (JsonException | IOException e) {
          +      throw new IllegalArgumentException("Failed to decode JSON value", e);
               }
             }
           
             @NonNull
             @Override
          -  public String format(JsonStructure value) throws IllegalArgumentException {
          +  public String format(@Nullable JsonStructure value) {
               if (value == null) {
                 return "NULL";
               }
               String json;
               try (StringWriter sw = new StringWriter()) {
          -      try {
          -        JsonWriter writer = writerFactory.createWriter(sw);
          -        writer.write(value);
          -        json = sw.toString();
          -      } catch (JsonException e) {
          -        throw new IllegalArgumentException(e.getMessage(), e);
          -      }
          -    } catch (IOException e) {
          -      throw new IllegalArgumentException(e.getMessage(), e);
          +      JsonWriter writer = writerFactory.createWriter(sw);
          +      writer.write(value);
          +      json = sw.toString();
          +    } catch (JsonException | IOException e) {
          +      throw new IllegalArgumentException("Failed to format value as JSON", e);
               }
               return Strings.quote(json);
             }
           
          +  @Nullable
             @Override
          -  public JsonStructure parse(String value) throws IllegalArgumentException {
          +  public JsonStructure parse(String value) {
               if (value == null || value.isEmpty() || value.equalsIgnoreCase("NULL")) {
                 return null;
               }
          @@ -195,7 +184,7 @@ public JsonStructure parse(String value) throws IllegalArgumentException {
                 JsonReader reader = readerFactory.createReader(sr);
                 return reader.read();
               } catch (JsonException e) {
          -      throw new IllegalArgumentException(e.getMessage(), e);
          +      throw new IllegalArgumentException("Failed to parse value as JSON", e);
               }
             }
           }
          diff --git a/examples/src/main/java/com/datastax/oss/driver/examples/json/jsr/Jsr353JsonColumn.java b/examples/src/main/java/com/datastax/oss/driver/examples/json/jsr/Jsr353JsonColumn.java
          index 6776399699b..0902f160169 100644
          --- a/examples/src/main/java/com/datastax/oss/driver/examples/json/jsr/Jsr353JsonColumn.java
          +++ b/examples/src/main/java/com/datastax/oss/driver/examples/json/jsr/Jsr353JsonColumn.java
          @@ -27,7 +27,6 @@
           import com.datastax.oss.driver.api.core.cql.Statement;
           import com.datastax.oss.driver.api.core.type.codec.TypeCodec;
           import com.datastax.oss.driver.examples.json.PlainTextJson;
          -import com.datastax.oss.driver.examples.json.codecs.Jsr353JsonCodec;
           import javax.json.Json;
           import javax.json.JsonObject;
           import javax.json.JsonStructure;
          diff --git a/examples/src/main/java/com/datastax/oss/driver/examples/json/jsr/Jsr353JsonFunction.java b/examples/src/main/java/com/datastax/oss/driver/examples/json/jsr/Jsr353JsonFunction.java
          index 60d58275955..073035f6580 100644
          --- a/examples/src/main/java/com/datastax/oss/driver/examples/json/jsr/Jsr353JsonFunction.java
          +++ b/examples/src/main/java/com/datastax/oss/driver/examples/json/jsr/Jsr353JsonFunction.java
          @@ -29,7 +29,6 @@
           import com.datastax.oss.driver.api.core.type.codec.TypeCodec;
           import com.datastax.oss.driver.api.querybuilder.select.Selector;
           import com.datastax.oss.driver.examples.json.PlainTextJson;
          -import com.datastax.oss.driver.examples.json.codecs.Jsr353JsonCodec;
           import javax.json.Json;
           import javax.json.JsonObject;
           import javax.json.JsonStructure;
          diff --git a/examples/src/main/java/com/datastax/oss/driver/examples/json/jsr/Jsr353JsonRow.java b/examples/src/main/java/com/datastax/oss/driver/examples/json/jsr/Jsr353JsonRow.java
          index c7f389f6017..70df8b8d5d0 100644
          --- a/examples/src/main/java/com/datastax/oss/driver/examples/json/jsr/Jsr353JsonRow.java
          +++ b/examples/src/main/java/com/datastax/oss/driver/examples/json/jsr/Jsr353JsonRow.java
          @@ -27,7 +27,6 @@
           import com.datastax.oss.driver.api.core.cql.Statement;
           import com.datastax.oss.driver.api.core.type.codec.TypeCodec;
           import com.datastax.oss.driver.examples.json.PlainTextJson;
          -import com.datastax.oss.driver.examples.json.codecs.Jsr353JsonCodec;
           import javax.json.Json;
           import javax.json.JsonObject;
           import javax.json.JsonStructure;
          diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/type/codec/registry/CodecRegistryIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/type/codec/registry/CodecRegistryIT.java
          index 14eaba82b87..d8482681e84 100644
          --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/type/codec/registry/CodecRegistryIT.java
          +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/type/codec/registry/CodecRegistryIT.java
          @@ -34,25 +34,23 @@
           import com.datastax.oss.driver.api.core.type.codec.TypeCodecs;
           import com.datastax.oss.driver.api.core.type.codec.registry.MutableCodecRegistry;
           import com.datastax.oss.driver.api.core.type.reflect.GenericType;
          -import com.datastax.oss.driver.api.core.type.reflect.GenericTypeParameter;
           import com.datastax.oss.driver.api.testinfra.ccm.CcmRule;
           import com.datastax.oss.driver.api.testinfra.session.SessionRule;
           import com.datastax.oss.driver.api.testinfra.session.SessionUtils;
           import com.datastax.oss.driver.categories.ParallelizableTests;
           import com.datastax.oss.driver.internal.core.type.codec.IntCodec;
           import com.datastax.oss.driver.internal.core.type.codec.UdtCodec;
          +import com.datastax.oss.driver.internal.core.type.codec.extras.OptionalCodec;
           import com.google.common.collect.ImmutableMap;
           import edu.umd.cs.findbugs.annotations.NonNull;
           import edu.umd.cs.findbugs.annotations.Nullable;
           import java.nio.ByteBuffer;
          -import java.util.Collection;
           import java.util.HashMap;
           import java.util.Iterator;
           import java.util.List;
           import java.util.Map;
           import java.util.Objects;
           import java.util.Optional;
          -import java.util.function.Predicate;
           import org.assertj.core.util.Maps;
           import org.junit.BeforeClass;
           import org.junit.ClassRule;
          @@ -264,33 +262,6 @@ public void should_register_custom_codec_at_runtime() {
               }
             }
           
          -  private static class OptionalCodec extends MappingCodec> {
          -
          -    // in cassandra, empty collections are considered null and vise versa.
          -    Predicate isAbsent =
          -        (i) ->
          -            i == null
          -                || (i instanceof Collection && ((Collection) i).isEmpty())
          -                || (i instanceof Map && ((Map) i).isEmpty());
          -
          -    OptionalCodec(TypeCodec innerCodec) {
          -      super(
          -          innerCodec,
          -          new GenericType>() {}.where(
          -              new GenericTypeParameter() {}, innerCodec.getJavaType()));
          -    }
          -
          -    @Override
          -    protected Optional innerToOuter(T value) {
          -      return isAbsent.test(value) ? Optional.empty() : Optional.of(value);
          -    }
          -
          -    @Override
          -    protected T outerToInner(Optional value) {
          -      return value.orElse(null);
          -    }
          -  }
          -
             @Test
             public void should_be_able_to_register_and_use_custom_codec_with_generic_type() {
               // create a cluster with registered codecs using OptionalCodec
          diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/querybuilder/JacksonJsonCodec.java b/integration-tests/src/test/java/com/datastax/oss/driver/querybuilder/JacksonJsonCodec.java
          deleted file mode 100644
          index f6ab774dfd3..00000000000
          --- a/integration-tests/src/test/java/com/datastax/oss/driver/querybuilder/JacksonJsonCodec.java
          +++ /dev/null
          @@ -1,122 +0,0 @@
          -/*
          - * Copyright DataStax, Inc.
          - *
          - * Licensed under the Apache License, Version 2.0 (the "License");
          - * you may not use this file except in compliance with the License.
          - * You may obtain a copy of the License at
          - *
          - * http://www.apache.org/licenses/LICENSE-2.0
          - *
          - * Unless required by applicable law or agreed to in writing, software
          - * distributed under the License is distributed on an "AS IS" BASIS,
          - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
          - * See the License for the specific language governing permissions and
          - * limitations under the License.
          - */
          -package com.datastax.oss.driver.querybuilder;
          -
          -import com.datastax.oss.driver.api.core.ProtocolVersion;
          -import com.datastax.oss.driver.api.core.type.DataType;
          -import com.datastax.oss.driver.api.core.type.DataTypes;
          -import com.datastax.oss.driver.api.core.type.codec.TypeCodec;
          -import com.datastax.oss.driver.api.core.type.reflect.GenericType;
          -import com.datastax.oss.driver.internal.core.util.Strings;
          -import com.datastax.oss.protocol.internal.util.Bytes;
          -import com.fasterxml.jackson.core.JsonProcessingException;
          -import com.fasterxml.jackson.databind.JavaType;
          -import com.fasterxml.jackson.databind.ObjectMapper;
          -import com.fasterxml.jackson.databind.type.TypeFactory;
          -import edu.umd.cs.findbugs.annotations.NonNull;
          -import edu.umd.cs.findbugs.annotations.Nullable;
          -import java.io.IOException;
          -import java.nio.ByteBuffer;
          -
          -public class JacksonJsonCodec implements TypeCodec {
          -
          -  private final ObjectMapper objectMapper;
          -  private final GenericType javaType;
          -
          -  JacksonJsonCodec(Class javaClass) {
          -    this(javaClass, new ObjectMapper());
          -  }
          -
          -  private JacksonJsonCodec(Class javaClass, ObjectMapper objectMapper) {
          -    this.javaType = GenericType.of(javaClass);
          -    this.objectMapper = objectMapper;
          -  }
          -
          -  @NonNull
          -  @Override
          -  public GenericType getJavaType() {
          -    return javaType;
          -  }
          -
          -  @NonNull
          -  @Override
          -  public DataType getCqlType() {
          -    return DataTypes.TEXT;
          -  }
          -
          -  @Nullable
          -  @Override
          -  public ByteBuffer encode(@Nullable T value, @NonNull ProtocolVersion protocolVersion) {
          -    if (value == null) {
          -      return null;
          -    }
          -    try {
          -      return ByteBuffer.wrap(objectMapper.writeValueAsBytes(value));
          -    } catch (JsonProcessingException e) {
          -      throw new IllegalArgumentException(e.getMessage(), e);
          -    }
          -  }
          -
          -  @Nullable
          -  @Override
          -  public T decode(@Nullable ByteBuffer bytes, @NonNull ProtocolVersion protocolVersion) {
          -    if (bytes == null) {
          -      return null;
          -    }
          -    try {
          -      return objectMapper.readValue(Bytes.getArray(bytes), toJacksonJavaType());
          -    } catch (IOException e) {
          -      throw new IllegalArgumentException(e.getMessage(), e);
          -    }
          -  }
          -
          -  @NonNull
          -  @Override
          -  public String format(T value) {
          -    if (value == null) {
          -      return "NULL";
          -    }
          -    String json;
          -    try {
          -      json = objectMapper.writeValueAsString(value);
          -    } catch (IOException e) {
          -      throw new IllegalArgumentException(e.getMessage(), e);
          -    }
          -    return Strings.quote(json);
          -  }
          -
          -  @Nullable
          -  @Override
          -  @SuppressWarnings("unchecked")
          -  public T parse(String value) {
          -    if (value == null || value.isEmpty() || value.equalsIgnoreCase("NULL")) {
          -      return null;
          -    }
          -    if (!Strings.isQuoted(value)) {
          -      throw new IllegalArgumentException("JSON strings must be enclosed by single quotes");
          -    }
          -    String json = Strings.unquote(value);
          -    try {
          -      return (T) objectMapper.readValue(json, toJacksonJavaType());
          -    } catch (IOException e) {
          -      throw new IllegalArgumentException(e.getMessage(), e);
          -    }
          -  }
          -
          -  private JavaType toJacksonJavaType() {
          -    return TypeFactory.defaultInstance().constructType(getJavaType().getType());
          -  }
          -}
          diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/querybuilder/JsonInsertIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/querybuilder/JsonInsertIT.java
          index 8e70627462e..9b6ed735d40 100644
          --- a/integration-tests/src/test/java/com/datastax/oss/driver/querybuilder/JsonInsertIT.java
          +++ b/integration-tests/src/test/java/com/datastax/oss/driver/querybuilder/JsonInsertIT.java
          @@ -28,6 +28,8 @@
           import com.datastax.oss.driver.api.core.cql.SimpleStatement;
           import com.datastax.oss.driver.api.core.cql.Statement;
           import com.datastax.oss.driver.api.core.type.codec.CodecNotFoundException;
          +import com.datastax.oss.driver.api.core.type.codec.ExtraTypeCodecs;
          +import com.datastax.oss.driver.api.core.type.codec.TypeCodec;
           import com.datastax.oss.driver.api.testinfra.CassandraRequirement;
           import com.datastax.oss.driver.api.testinfra.ccm.CcmRule;
           import com.datastax.oss.driver.api.testinfra.session.SessionRule;
          @@ -62,8 +64,7 @@ public class JsonInsertIT {
             @ClassRule
             public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE);
           
          -  private static final JacksonJsonCodec JACKSON_JSON_CODEC =
          -      new JacksonJsonCodec<>(User.class);
          +  private static final TypeCodec JACKSON_JSON_CODEC = ExtraTypeCodecs.json(User.class);
           
             @BeforeClass
             public static void setup() {
          diff --git a/manual/core/custom_codecs/README.md b/manual/core/custom_codecs/README.md
          index 171d1d59a18..ea9315d797f 100644
          --- a/manual/core/custom_codecs/README.md
          +++ b/manual/core/custom_codecs/README.md
          @@ -4,7 +4,7 @@
           
           Define custom Java to CQL mappings.
           
          -* implement the [TypeCodec] interface.
          +* implement the [TypeCodec] interface, or use one of the alternative codecs in `ExtraTypeCodecs`.
           * registering a codec:
             * at init time: [CqlSession.builder().addTypeCodecs()][SessionBuilder.addTypeCodecs]
             * at runtime:
          @@ -39,8 +39,227 @@ Sometimes you might want to use different mappings, for example:
           
           Custom codecs allow you to define those dedicated mappings, and plug them into your session.
           
          +### Using alternative codecs provided by the driver
          +
          +The first thing you can do is use one of the many alternative codecs shipped with the driver. They
          +are exposed on the [ExtraTypeCodecs] class. In this section we are going to introduce these codecs,
          +then you will see how to register and use them in the next sections.
          +
          +#### Mapping CQL blobs to Java arrays
          +
          +The driver default is [TypeCodecs.BLOB], which maps CQL `blob` to Java's [java.nio.ByteBuffer].
          +Check out our [CQL blob example] to understand how to manipulate the `ByteBuffer` API correctly.
          +
          +If the `ByteBuffer` API is too cumbersome for you, an alternative is to use
          +[ExtraTypeCodecs.BLOB_TO_ARRAY] which maps CQL blobs to Java's `byte[]`.
          +
          +#### Mapping CQL lists to Java arrays
          +
          +By default, the driver maps CQL `list` to Java's [java.util.List]. If you prefer to deal with 
          +arrays, the driver offers the following codecs:
          +
          +1. For primitive types:
          +
          +    | Codec | CQL type | Java type |
          +    |---|---|---|
          +    | [ExtraTypeCodecs.BOOLEAN_LIST_TO_ARRAY] | `list` | `boolean[]` |
          +    | [ExtraTypeCodecs.BYTE_LIST_TO_ARRAY] | `list` | `byte[]` |
          +    | [ExtraTypeCodecs.SHORT_LIST_TO_ARRAY] | `list` | `short[]` |
          +    | [ExtraTypeCodecs.INT_LIST_TO_ARRAY] | `list` | `int[]` |
          +    | [ExtraTypeCodecs.LONG_LIST_TO_ARRAY] | `list` | `long[]` |
          +    | [ExtraTypeCodecs.FLOAT_LIST_TO_ARRAY] | `list` | `float[]` |
          +    | [ExtraTypeCodecs.DOUBLE_LIST_TO_ARRAY] | `list` | `double[]` |
          +    
          +2. For other types, you should use [ExtraTypeCodecs.listToArrayOf(TypeCodec)]; for example, to map
          +   CQL `list` to `String[]`:
          +
          +    ```java
          +    TypeCodec stringArrayCodec = ExtraTypeCodecs.listToArrayOf(TypeCodecs.TEXT);
          +    ```
          +
          +#### Mapping CQL timestamps to Java "instant" types
          +
          +By default, the driver maps CQL `timestamp` to Java's [java.time.Instant] \(using
          +[TypeCodecs.TIMESTAMP]). This is the most natural mapping, since neither type contains any time zone
          +information: they just represent absolute points in time.
          +
          +The driver also provides codecs to map to a Java `long` representing the number of milliseconds
          +since the epoch (this is the raw form return by `Instant.toEpochMilli`, and also how Cassandra
          +stores the value internally).
          +
          +In either case, you can pick the time zone that the codec will use for its [format()] and [parse()]
          +methods. Note that this is only relevant for these two methods (follow the links for more
          +explanations on how the driver uses them); for regular encoding and decoding, like setting a value
          +on a bound statement or reading a column from a row, the time zone does not matter.
          +
          +| Codec | CQL type | Java type | Time zone used by `format()` and `parse()` |
          +|---|---|---|---|
          +| [TypeCodecs.TIMESTAMP] | `timestamp` | `Instant` | System default |
          +| [ExtraTypeCodecs.TIMESTAMP_UTC] | `timestamp` | `Instant` | UTC |
          +| [ExtraTypeCodecs.timestampAt(ZoneId)] | `timestamp` | `Instant` | User-provided |
          +| [ExtraTypeCodecs.TIMESTAMP_MILLIS_SYSTEM] | `timestamp` | `long` | System default |
          +| [ExtraTypeCodecs.TIMESTAMP_MILLIS_UTC] | `timestamp` | `long` | UTC |
          +| [ExtraTypeCodecs.timestampMillisAt(ZoneId)] | `timestamp` | `long` | User-provided |
          +
          +For example, given the schema:
          +
          +```
          +CREATE TABLE example (k int PRIMARY KEY, ts timestamp);
          +INSERT INTO example(k, ts) VALUES (1, 0);
          +```
          +
          +When reading column `ts`, all `Instant` codecs return `Instant.ofEpochMilli(0)`. But if asked to
          +format it, they behave differently:
          +
          +* `ExtraTypeCodecs.TIMESTAMP_UTC` returns `'1970-01-01T00:00:00.000Z'`
          +* `ExtraTypeCodecs.timestampAt(ZoneId.of("Europe/Paris")` returns `'1970-01-01T01:00:00.000+01:00'`
          +
          +#### Mapping CQL timestamps to `ZonedDateTime`
          +
          +If your application works with one single, pre-determined time zone, then you probably would like
          +the driver to map `timestamp` to [java.time.ZonedDateTime] with a fixed zone. Use one of the
          +following codecs:
          +
          +| Codec | CQL type | Java type | Time zone used by all codec operations |
          +|---|---|---|---|
          +| [ExtraTypeCodecs.ZONED_TIMESTAMP_SYSTEM] | `timestamp` | `ZonedDateTime` | System default |
          +| [ExtraTypeCodecs.ZONED_TIMESTAMP_UTC] | `timestamp` | `ZonedDateTime` | UTC |
          +| [ExtraTypeCodecs.zonedTimestampAt(ZoneId)] | `timestamp` | `ZonedDateTime` | User-provided |
          +
          +This time, the zone matters for all codec operations, including encoding and decoding. For example,
          +given the schema:
          +                                                                                       
          +```
          +CREATE TABLE example (k int PRIMARY KEY, ts timestamp);
          +INSERT INTO example(k, ts) VALUES (1, 0);
          +```
          +
          +When reading column `ts`:
          +
          +* `ExtraTypeCodecs.ZONED_TIMESTAMP_UTC` returns the same value as
          +  `ZonedDateTime.parse("1970-01-01T00:00Z")`
          +* `ExtraTypeCodecs.zonedTimestampAt(ZoneId.of("Europe/Paris"))` returns the same value as
          +  `ZonedDateTime.parse("1970-01-01T01:00+01:00[Europe/Paris]")`
          +
          +These are two distinct `ZonedDateTime` instances: although they represent the same absolute point in
          +time, they do not compare as equal.
          +
          +#### Mapping CQL timestamps to `LocalDateTime` 
          + 
          +If your application works with one single, pre-determined time zone, but only exposes local
          +date-times, then you probably would like the driver to map timestamps to [java.time.LocalDateTime]
          +obtained from a fixed zone. Use one of the following codecs:
          +
          +| Codec | CQL type | Java type | Time zone used by all codec operations |
          +|---|---|---|---|
          +| [ExtraTypeCodecs.LOCAL_TIMESTAMP_SYSTEM] | `timestamp` | `LocalDateTime` | System default |
          +| [ExtraTypeCodecs.LOCAL_TIMESTAMP_UTC] | `timestamp` | `LocalDateTime` | UTC |
          +| [ExtraTypeCodecs.localTimestampAt(ZoneId)] | `timestamp` | `LocalDateTime` | User-provided |
          +
          +
          +Again, the zone matters for all codec operations, including encoding and decoding. For example,
          +given the schema:
          +                                                                                       
          +```
          +CREATE TABLE example (k int PRIMARY KEY, ts timestamp);
          +INSERT INTO example(k, ts) VALUES (1, 0);
          +```
          +
          +When reading column `ts`:
          +
          +* `ExtraTypeCodecs.LOCAL_TIMESTAMP_UTC` returns `LocalDateTime.of(1970, 1, 1, 0, 0)`
          +* `ExtraTypeCodecs.localTimestampAt(ZoneId.of("Europe/Paris"))` returns `LocalDateTime.of(1970, 1,
          +  1, 1, 0)`
          +
          +#### Storing the time zone in Cassandra
          +
          +If your application needs to remember the time zone that each date was entered with, you need to
          +store it in the database. We suggest using a `tuple`, where the second component
          +holds the [zone id][java.time.ZoneId].
          +
          +If you follow this guideline, then you can use [ExtraTypeCodecs.ZONED_TIMESTAMP_PERSISTED] to map
          +the CQL tuple to [java.time.ZonedDateTime].
          +
          +For example, given the schema:
          +
          +```
          +CREATE TABLE example(k int PRIMARY KEY, zts tuple);
          +INSERT INTO example (k, zts) VALUES (1, (0, 'Z'));
          +INSERT INTO example (k, zts) VALUES (2, (-3600000, 'Europe/Paris'));
          +```
          +
          +When reading column `zts`, `ExtraTypeCodecs.ZONED_TIMESTAMP_PERSISTED` returns:
          +
          +* `ZonedDateTime.parse("1970-01-01T00:00Z")` for the first row
          +* `ZonedDateTime.parse("1970-01-01T00:00+01:00[Europe/Paris]")` for the second row
          +
          +Each value is read back in the time zone that it was written with. But note that you can still
          +compare rows on a absolute timeline with the `timestamp` component of the tuple.
          +
          +#### Mapping to `Optional` instead of `null` 
          +
          +If you prefer to deal with [java.util.Optional] in your application instead of nulls, then you can 
          +use [ExtraTypeCodecs.optionalOf(TypeCodec)]:
          +
          +```java
          +TypeCodec> optionalUuidCodec = ExtraTypeCodecs.optionalOf(TypeCodecs.UUID);
          +```
          +
          +Note that because the CQL native protocol does not distinguish empty collections from null 
          +collection references, this codec will also map empty collections to [Optional.empty()].
          +
          +#### Mapping Java Enums
          +
          +Java [Enums] can be mapped to CQL in two ways:
          +
          +1. By name: [ExtraTypeCodecs.enumNamesOf(Class)] will create a codec for a given `Enum` class that
          +maps its constants to their [programmatic names][Enum.name()]. The corresponding CQL column must be
          +of type `text`. Note that this codec relies on the enum constant names; it is therefore vital that
          +enum names never change.
          +1. By ordinal: [ExtraTypeCodecs.enumOrdinalsOf(Class)] will create a codec for a given `Enum` class
          +that maps its constants to their [ordinal value][Enum.ordinal()]. The corresponding CQL column must
          +be of type `int`.
          +
          +    **We strongly recommend against this approach.** It is provided for compatibility with driver 3,
          +    but relying on ordinals is a bad practice: any reordering of the enum constants, or insertion
          +    of a new constant before the end, will change the ordinals. The codec won't fail, but it will
          +    insert different codes and corrupt your data.
          +
          +    If you really want to use integer codes for storage efficiency, implement an explicit mapping
          +    (for example with a `toCode()` method on your enum type). It is then fairly straightforward to
          +    implement a codec with [MappingCodec](#creating-custom-java-to-cql-mappings-with-mapping-codec),
          +    using `TypeCodecs#INT` as the "inner" codec.
          +
          +For example, assuming the following enum:
          +
          +```java
          +public enum WeekDay {
          +  MONDAY, TUESDAY, WEDNESDAY, THURSDAY, FRIDAY, SATURDAY, SUNDAY 
          +}
          +```
          +
          +You can define codecs for it the following ways:
          +
          +```java
          +// MONDAY will be persisted as "MONDAY", TUESDAY as "TUESDAY", etc.
          +TypeCodec weekDaysByNameCodec = ExtraTypeCodecs.enumNamesOf(WeekDay.class);
          +
          +// MONDAY will be persisted as 0, TUESDAY as 1, etc.
          +TypeCodec weekDaysByNameCodec = ExtraTypeCodecs.enumOrdinalsOf(WeekDay.class);
          +```
          +
          +#### Mapping Json
          +
          +The driver provides out-of-the-box support for mapping Java objects to CQL `text` using the popular
          +Jackson library. The method [ExtraTypeCodecs.json(Class)] will create a codec for a given Java class
          +that maps instances of that class to Json strings, using a newly-allocated, default [ObjectMapper].
          +It is also possible to pass a custom `ObjectMapper` instance using [ExtraTypeCodecs.json(Class,
          +ObjectMapper)] instead.
          +
           ### Writing codecs
           
          +If none of the driver built-in codecs above suits you, it is also possible to roll your own.
          +
           To write a custom codec, implement the [TypeCodec] interface. Here is an example that maps a CQL
           `int` to a Java string containing its textual representation:
           
          @@ -99,11 +318,26 @@ important points:
           
           ### Using codecs
           
          -Once you have your codec, register it when building your session:
          +Once you have your codec, register it when building your session. The following example registers
          +`CqlIntToStringCodec` along with a few driver-supplied alternative codecs:
           
           ```java
          -CqlSession session = CqlSession.builder()
          -    .addTypeCodecs(new CqlIntToStringCodec())
          +enum WeekDay { MONDAY, TUESDAY, WEDNESDAY, THURSDAY, FRIDAY, SATURDAY, SUNDAY };
          +class Price {
          +  ... // a custom POJO that will be serialized as JSON
          +}
          +
          +CqlSession session =
          +  CqlSession.builder()
          +    .addTypeCodecs(
          +      new CqlIntToStringCodec(),                  // user-created codec
          +      ExtraTypeCodecs.ZONED_TIMESTAMP_PERSISTED,  // tuple <-> ZonedDateTime
          +      ExtraTypeCodecs.BLOB_TO_ARRAY,              // blob <-> byte[]
          +      ExtraTypeCodecs.arrayOf(TypeCodecs.TEXT),   // list <-> String[]
          +      ExtraTypeCodecs.enumNamesOf(WeekDay.class), // text <-> WeekDay
          +      ExtraTypeCodecs.json(Price.class),          // text <-> MyJsonPojo
          +      ExtraTypeCodecs.optionalOf(TypeCodecs.UUID) // uuid <-> Optional
          +    )
               .build();
           ```
           
          @@ -111,15 +345,14 @@ You may also add codecs to an existing session at runtime:
           
           ```java
           // The cast is required for backward compatibility reasons (registry mutability was introduced in
          -// 4.3.0). It is safe as long as you didn't hack the driver internals to plug a custom registry
          -// implementation.
          +// 4.3.0). It is safe as long as you didn't write a custom registry implementation.
           MutableCodecRegistry registry =
               (MutableCodecRegistry) session.getContext().getCodecRegistry();
           
           registry.register(new CqlIntToStringCodec());
           ```
           
          -You can now use the new mapping in your code:
          +You can now use the new mappings in your code:
           
           ```java
           // cqlsh:ks> desc table test2;
          @@ -135,35 +368,79 @@ session.execute(
                   .build());
           ```
           
          -Custom codecs are used not only for their base type, but also recursively in collections, tuples and
          -UDTs. For example, once your `int <-> String` codec is registered, you can also read a CQL
          -`list` as a Java `List`:
          +In the above example, the driver will look up in the codec registry a codec for CQL `int` and Java
          +String, and will transparently pick `CqlIntToStringCodec` for that.
          +
          +So far our examples have used a Java type with dedicated accessors in the driver: `getString` and
          +`setString`. But sometimes you won't find suitable accessor methods; for example, there is no 
          +accessor for `ZonedDateTime` or for `Optional`, and yet we registered codecs for these types. 
          +
          +When you want to retrieve such objects, you need a way to tell the driver which Java type you want.
          +You do so by using one of the generic `get` and `set` methods:
           
           ```java
          -// cqlsh:ks> desc table test3;
          -// CREATE TABLE ks.test2 (k int PRIMARY KEY, v list)...
          -ResultSet rs = session.execute("SELECT * FROM ks.test3 WHERE k = 1");
          -List v = rs.one().getList("v", String.class);
          -``` 
          +// Assuming that ExtraTypeCodecs.ZONED_TIMESTAMP_PERSISTED was registered
          +// Assuming that ExtraTypeCodecs.BLOB_TO_ARRAY was registered
          +// Assuming that ExtraTypeCodecs.arrayOf(TypeCodecs.TEXT) was registered
          +
          +// Reading
          +ZonedDateTime v1 = row.get("v1", ZonedDateTime.class); // assuming column is of type timestamp
          +byte[] v2        = row.get("v2", byte[].class);        // assuming column is of type blob
          +String[] v3      = row.get("v3", String[].class);      // assuming column is of type list
           
          -So far our examples have used a Java type with dedicated accessors in the driver: `getString` and
          -`setString`. But you can also map your own Java types. For example, let's assume you have a `Price`
          -class, and have registered a codec that maps it to a particular CQL type. When reading or writing
          -values, you need a way to tell the driver which Java type you want; this is done with the generic
          -`get` and `set` methods with an extra *type token* arguments:
          +
          +// Writing
          +boundStatement.set("v1", v1, ZonedDateTime.class);
          +boundStatement.set("v2", v2, byte[].class);
          +boundStatement.set("v3", v3, String[].class);
          +```
          +
          +This is also valid for arbitrary Java types. This is particularly useful when dealing with Enums and 
          +JSON mappings, for example our `WeekDay` and `Price` types:
           
           ```java
          -GenericType priceType = GenericType.of(Price.class);
          +// Assuming that TypeCodecs.enumNamesOf(WeekDay.class) was registered
          +// Assuming that TypeCodecs.json(Price.class) was registered
           
           // Reading
          -Price price = row.get("v", priceType);
          +WeekDay v1 = row.get("v1", WeekDay.class); // assuming column is of type text
          +Price v2   = row.get("v2", Price.class);   // assuming column is of type text
           
           // Writing
          -boundStatement.set("v", price, priceType);
          +boundStatement.set("v1", v1, WeekDay.class);
          +boundStatement.set("v2", v2, Price.class);
          +```
          +
          +Note that, because the underlying CQL type is `text` you can still retrieve the column's contents
          +as a plain string:
          +
          +```java
          +// Reading
          +String enumName = row.getString("v1");
          +String priceJson = row.getString("v2");
          +
          +// Writing
          +boundStatement.setString("v1", enumName);
          +boundStatement.setString("v2", priceJson);
          +```
          +
          +And finally, for `Optional`, you will need the `get` and `set` methods with an extra *type 
          +token* argument, because `Optional` is a parameterized type:
          +
          +```java
          +// Assuming that TypeCodecs.optionalOf(TypeCodecs.UUID) was registered
          +
          +// Reading
          +Optional opt = row.get("v", GenericType.optionalOf(UUID.class));
          +
          +// Writing
          +boundStatement.set("v", opt, GenericType.optionalOf(UUID.class));
           ```
           
           Type tokens are instances of [GenericType]. They are immutable and thread-safe, you should store
          -them as reusable constants. Generic Java types are fully supported, using the following pattern:
          +them as reusable constants. The `GenericType` class itself has constants and factory methods to help
          +creating `GenericType` objects for common types. If you don't see the type you are looking for, a
          +type token for any Java type can be created using the following pattern:
           
           ```java
           // Notice the '{}': this is an anonymous inner class
          @@ -172,6 +449,25 @@ GenericType> fooBarType = new GenericType>(){};
           Foo v = row.get("v", fooBarType);
           ```
           
          +Custom codecs are used not only for their base type, but also recursively in collections, tuples and
          +UDTs. For example, once your Json codec for the `Price` class is registered, you can also read a CQL
          +`list` as a Java `List`:
          +
          +```java
          +// Assuming that TypeCodecs.json(Price.class) was registered
          +// Assuming that each element of the list column is a valid Json string
          +
          +// Reading
          +List prices1 = row.getList("v", Price.class);
          +// alternative method using the generic get method with type token argument:
          +List prices2 = row.get("v", GenericType.listOf(Price.class));
          +
          +// Writing
          +boundStatement.setList("v", prices1, Price.class);
          +// alternative method using the generic set method with type token argument:
          +boundStatement.set("v", prices2, GenericType.listOf(Price.class));
          +``` 
          +
           Whenever you read or write a value, the driver tries all the built-in mappings first, followed by
           custom codecs. If two codecs can process the same mapping, the one that was registered first is
           used. Note that this means that built-in mappings can't be overridden.
          @@ -191,6 +487,10 @@ String s1 = row.getString("anIntColumn");         // int -> String, will decode
           String s2 = row.get("anIntColumn", specialCodec); // int -> String, will decode with specialCodec
           ``` 
           
          +By doing so, you bypass the codec registry completely and instruct the driver to use the given 
          +codec. Note that it is your responsibility to ensure that the codec can handle the underlying CQL
          +type (this cannot be enforced at compile-time).
          +
           ### Creating custom Java-to-CQL mappings with `MappingCodec`
           
           The above example, `CqlIntToStringCodec`, could be rewritten to leverage [MappingCodec], an abstract 
          @@ -360,8 +660,58 @@ private static String formatRow(Row row) {
           }
           ```
           
          -[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html
          -[GenericType]:   https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/type/reflect/GenericType.html
          -[TypeCodec]:     https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html
          -[MappingCodec]:     https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/type/codec/MappingCodec.html
          -[SessionBuilder.addTypeCodecs]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addTypeCodecs-com.datastax.oss.driver.api.core.type.codec.TypeCodec...-
          +[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html
          +[GenericType]:   https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/reflect/GenericType.html
          +[TypeCodec]:     https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html
          +[format()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html#format-JavaTypeT-
          +[parse()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html#parse-java.lang.String-
          +[MappingCodec]:     https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/MappingCodec.html
          +[SessionBuilder.addTypeCodecs]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addTypeCodecs-com.datastax.oss.driver.api.core.type.codec.TypeCodec...-
          +
          +[Enums]: https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html
          +[Enum.name()]: https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html#name--
          +[Enum.ordinal()]: https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html#ordinal--
          +[java.nio.ByteBuffer]: https://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html
          +[java.util.List]: https://docs.oracle.com/javase/8/docs/api/java/util/List.html
          +[java.util.Optional]: https://docs.oracle.com/javase/8/docs/api/java/util/Optional.html
          +[Optional.empty()]: https://docs.oracle.com/javase/8/docs/api/java/util/Optional.html#empty--
          +[java.time.Instant]: https://docs.oracle.com/javase/8/docs/api/java/time/Instant.html
          +[java.time.ZonedDateTime]: https://docs.oracle.com/javase/8/docs/api/java/time/ZonedDateTime.html
          +[java.time.LocalDateTime]: https://docs.oracle.com/javase/8/docs/api/java/time/LocalDateTime.html
          +[java.time.ZoneId]: https://docs.oracle.com/javase/8/docs/api/java/time/ZoneId.html
          +
          +[ExtraTypeCodecs]:                           https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html
          +[ExtraTypeCodecs.BLOB_TO_ARRAY]:             https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#BLOB_TO_ARRAY
          +[ExtraTypeCodecs.BOOLEAN_LIST_TO_ARRAY]:     https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#BOOLEAN_LIST_TO_ARRAY
          +[ExtraTypeCodecs.BYTE_LIST_TO_ARRAY]:        https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#BYTE_LIST_TO_ARRAY
          +[ExtraTypeCodecs.SHORT_LIST_TO_ARRAY]:       https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#SHORT_LIST_TO_ARRAY
          +[ExtraTypeCodecs.INT_LIST_TO_ARRAY]:         https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#INT_LIST_TO_ARRAY
          +[ExtraTypeCodecs.LONG_LIST_TO_ARRAY]:        https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#LONG_LIST_TO_ARRAY
          +[ExtraTypeCodecs.FLOAT_LIST_TO_ARRAY]:       https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#FLOAT_LIST_TO_ARRAY
          +[ExtraTypeCodecs.DOUBLE_LIST_TO_ARRAY]:      https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#DOUBLE_LIST_TO_ARRAY
          +[ExtraTypeCodecs.listToArrayOf(TypeCodec)]:  https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#listToArrayOf-com.datastax.oss.driver.api.core.type.codec.TypeCodec-
          +[ExtraTypeCodecs.TIMESTAMP_UTC]:             https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#TIMESTAMP_UTC
          +[ExtraTypeCodecs.timestampAt(ZoneId)]:       https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#timestampAt-java.time.ZoneId-
          +[ExtraTypeCodecs.TIMESTAMP_MILLIS_SYSTEM]:   https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#TIMESTAMP_MILLIS_SYSTEM
          +[ExtraTypeCodecs.TIMESTAMP_MILLIS_UTC]:      https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#TIMESTAMP_MILLIS_UTC
          +[ExtraTypeCodecs.timestampMillisAt(ZoneId)]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#timestampMillisAt-java.time.ZoneId-
          +[ExtraTypeCodecs.ZONED_TIMESTAMP_SYSTEM]:    https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#ZONED_TIMESTAMP_SYSTEM
          +[ExtraTypeCodecs.ZONED_TIMESTAMP_UTC]:       https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#ZONED_TIMESTAMP_UTC
          +[ExtraTypeCodecs.zonedTimestampAt(ZoneId)]:  https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#zonedTimestampAt-java.time.ZoneId-
          +[ExtraTypeCodecs.LOCAL_TIMESTAMP_SYSTEM]:    https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#LOCAL_TIMESTAMP_SYSTEM
          +[ExtraTypeCodecs.LOCAL_TIMESTAMP_UTC]:       https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#LOCAL_TIMESTAMP_UTC
          +[ExtraTypeCodecs.localTimestampAt(ZoneId)]:  https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#localTimestampAt-java.time.ZoneId-
          +[ExtraTypeCodecs.ZONED_TIMESTAMP_PERSISTED]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#ZONED_TIMESTAMP_PERSISTED
          +[ExtraTypeCodecs.optionalOf(TypeCodec)]:     https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#optionalOf-com.datastax.oss.driver.api.core.type.codec.TypeCodec-
          +[ExtraTypeCodecs.enumNamesOf(Class)]:        https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#enumNamesOf-java.lang.Class-
          +[ExtraTypeCodecs.enumOrdinalsOf(Class)]:     https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#enumOrdinalsOf-java.lang.Class-
          +[ExtraTypeCodecs.json(Class)]:               https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#json-java.lang.Class-
          +[ExtraTypeCodecs.json(Class, ObjectMapper)]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#json-java.lang.Class-com.fasterxml.jackson.databind.ObjectMapper-
          +
          +[TypeCodecs.BLOB]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#BLOB
          +[TypeCodecs.TIMESTAMP]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#TIMESTAMP
          +
          +
          +[ObjectMapper]: http://fasterxml.github.io/jackson-databind/javadoc/2.10/com/fasterxml/jackson/databind/ObjectMapper.html
          +
          +[CQL blob example]: https://github.com/datastax/java-driver/blob/4.x/examples/src/main/java/com/datastax/oss/driver/examples/datatypes/Blobs.java
          \ No newline at end of file
          
          From 4b828cce7fcbff18c4d4bd3b6933f422b755913a Mon Sep 17 00:00:00 2001
          From: tomekl007 
          Date: Thu, 16 Jul 2020 09:35:47 +0200
          Subject: [PATCH 524/979] JAVA-2846: Give system properties the highest
           precedence in DefaultDriverConfigLoader
          
          ---
           changelog/README.md                           |  1 +
           .../typesafe/DefaultDriverConfigLoader.java   |  8 ++-
           .../map/MapBasedDriverConfigLoaderTest.java   | 11 ++-
           .../DefaultDriverConfigLoaderTest.java        | 70 +++++++++++++++++++
           core/src/test/resources/application.conf      |  3 +
           5 files changed, 90 insertions(+), 3 deletions(-)
           create mode 100644 core/src/test/resources/application.conf
          
          diff --git a/changelog/README.md b/changelog/README.md
          index 328430daa2a..53fc37c5ddd 100644
          --- a/changelog/README.md
          +++ b/changelog/README.md
          @@ -4,6 +4,7 @@
           
           ### 4.8.0 (in progress)
           
          +- [bug] JAVA-2846: Give system properties the highest precedence in DefaultDriverConfigLoader
           - [new feature] JAVA-2691: Provide driver 4 support for extra codecs
           - [improvement] Allow injection of CodecRegistry on session builder
           - [improvement] JAVA-2828: Add safe paging state wrapper
          diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultDriverConfigLoader.java b/core/src/main/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultDriverConfigLoader.java
          index c4bf40eafb6..9b068443874 100644
          --- a/core/src/main/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultDriverConfigLoader.java
          +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultDriverConfigLoader.java
          @@ -58,8 +58,10 @@ public class DefaultDriverConfigLoader implements DriverConfigLoader {
                   ConfigFactory.invalidateCaches();
                   // The thread's context class loader will be used for application classpath resources,
                   // while the driver class loader will be used for reference classpath resources.
          -        return ConfigFactory.defaultApplication()
          +        return ConfigFactory.defaultOverrides()
          +            .withFallback(ConfigFactory.defaultApplication())
                       .withFallback(ConfigFactory.defaultReference(CqlSession.class.getClassLoader()))
          +            .resolve()
                       .getConfig(DEFAULT_ROOT_PATH);
                 };
           
          @@ -95,8 +97,10 @@ public DefaultDriverConfigLoader(@NonNull ClassLoader appClassLoader) {
               this(
                   () -> {
                     ConfigFactory.invalidateCaches();
          -          return ConfigFactory.defaultApplication(appClassLoader)
          +          return ConfigFactory.defaultOverrides()
          +              .withFallback(ConfigFactory.defaultApplication(appClassLoader))
                         .withFallback(ConfigFactory.defaultReference(CqlSession.class.getClassLoader()))
          +              .resolve()
                         .getConfig(DEFAULT_ROOT_PATH);
                   });
             }
          diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/config/map/MapBasedDriverConfigLoaderTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/config/map/MapBasedDriverConfigLoaderTest.java
          index 75196b7e539..0b4cd07458e 100644
          --- a/core/src/test/java/com/datastax/oss/driver/internal/core/config/map/MapBasedDriverConfigLoaderTest.java
          +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/config/map/MapBasedDriverConfigLoaderTest.java
          @@ -15,6 +15,7 @@
            */
           package com.datastax.oss.driver.internal.core.config.map;
           
          +import static com.typesafe.config.ConfigFactory.defaultReference;
           import static org.assertj.core.api.Assertions.assertThat;
           import static org.assertj.core.api.Assertions.fail;
           
          @@ -30,6 +31,7 @@
           import com.datastax.oss.driver.internal.core.config.MockTypedOptions;
           import com.datastax.oss.driver.internal.core.config.typesafe.DefaultDriverConfigLoader;
           import com.typesafe.config.ConfigException;
          +import com.typesafe.config.ConfigFactory;
           import java.util.Optional;
           import org.junit.Test;
           
          @@ -58,7 +60,14 @@ public void should_fill_default_profile_like_reference_file() {
               DriverExecutionProfile mapBasedConfig =
                   DriverConfigLoader.fromMap(optionsMap).getInitialConfig().getDefaultProfile();
               DriverExecutionProfile fileBasedConfig =
          -        new DefaultDriverConfigLoader().getInitialConfig().getDefaultProfile();
          +        new DefaultDriverConfigLoader(
          +                () -> {
          +                  // Only load reference.conf since we are focusing on driver defaults
          +                  ConfigFactory.invalidateCaches();
          +                  return defaultReference().getConfig(DefaultDriverConfigLoader.DEFAULT_ROOT_PATH);
          +                })
          +            .getInitialConfig()
          +            .getDefaultProfile();
           
               // Make sure we're not missing any options. -1 is for CONFIG_RELOAD_INTERVAL, which is not
               // defined by OptionsMap because it is irrelevant for the map-based config.
          diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultDriverConfigLoaderTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultDriverConfigLoaderTest.java
          index ba8c160b8cd..68463413ce2 100644
          --- a/core/src/test/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultDriverConfigLoaderTest.java
          +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultDriverConfigLoaderTest.java
          @@ -210,6 +210,25 @@ public void should_load_from_file() {
                   .isEqualTo(DefaultConsistencyLevel.SERIAL.name());
             }
           
          +  @Test
          +  public void should_load_from_file_with_system_property() {
          +    File file = new File("src/test/resources/config/customApplication.conf");
          +    assertThat(file).exists();
          +    System.setProperty("config.file", file.getAbsolutePath());
          +    try {
          +      DriverConfigLoader loader = new DefaultDriverConfigLoader();
          +      DriverExecutionProfile config = loader.getInitialConfig().getDefaultProfile();
          +      // From customApplication.conf:
          +      assertThat(config.getDuration(DefaultDriverOption.REQUEST_TIMEOUT))
          +          .isEqualTo(Duration.ofMillis(500));
          +      // From reference.conf:
          +      assertThat(config.getString(DefaultDriverOption.REQUEST_SERIAL_CONSISTENCY))
          +          .isEqualTo(DefaultConsistencyLevel.SERIAL.name());
          +    } finally {
          +      System.clearProperty("config.file");
          +    }
          +  }
          +
             @Test
             public void should_return_failed_future_if_reloading_not_supported() {
               DefaultDriverConfigLoader loader =
          @@ -224,4 +243,55 @@ public void should_return_failed_future_if_reloading_not_supported() {
                               .hasMessage(
                                   "This instance of DefaultDriverConfigLoader does not support reloading"));
             }
          +
          +  /** Test for JAVA-2846. */
          +  @Test
          +  public void should_load_setting_from_system_property_when_application_conf_is_also_provided() {
          +    System.setProperty("datastax-java-driver.basic.request.timeout", "1 millisecond");
          +    try {
          +      assertThat(
          +              new DefaultDriverConfigLoader()
          +                  .getInitialConfig()
          +                  .getDefaultProfile()
          +                  .getDuration(DefaultDriverOption.REQUEST_TIMEOUT))
          +          .isEqualTo(Duration.ofMillis(1));
          +    } finally {
          +      System.clearProperty("datastax-java-driver.basic.request.timeout");
          +    }
          +  }
          +
          +  /** Test for JAVA-2846. */
          +  @Test
          +  public void
          +      should_load_and_resolve_setting_from_system_property_when_application_conf_is_also_provided() {
          +    System.setProperty(
          +        "datastax-java-driver.advanced.connection.init-query-timeout", "1234 milliseconds");
          +    try {
          +      assertThat(
          +              new DefaultDriverConfigLoader()
          +                  .getInitialConfig()
          +                  .getDefaultProfile()
          +                  .getDuration(DefaultDriverOption.REQUEST_TIMEOUT))
          +          .isEqualTo(Duration.ofMillis(1234));
          +    } finally {
          +      System.clearProperty("datastax-java-driver.advanced.connection.init-query-timeout");
          +    }
          +  }
          +
          +  /** Test for JAVA-2846. */
          +  @Test
          +  public void
          +      should_load_setting_from_system_property_when_application_conf_is_also_provided_for_custom_classloader() {
          +    System.setProperty("datastax-java-driver.basic.request.timeout", "1 millisecond");
          +    try {
          +      assertThat(
          +              new DefaultDriverConfigLoader(Thread.currentThread().getContextClassLoader())
          +                  .getInitialConfig()
          +                  .getDefaultProfile()
          +                  .getDuration(DefaultDriverOption.REQUEST_TIMEOUT))
          +          .isEqualTo(Duration.ofMillis(1));
          +    } finally {
          +      System.clearProperty("datastax-java-driver.basic.request.timeout");
          +    }
          +  }
           }
          diff --git a/core/src/test/resources/application.conf b/core/src/test/resources/application.conf
          new file mode 100644
          index 00000000000..75cd8820639
          --- /dev/null
          +++ b/core/src/test/resources/application.conf
          @@ -0,0 +1,3 @@
          +datastax-java-driver {
          +  basic.request.timeout = ${datastax-java-driver.advanced.connection.init-query-timeout}
          +}
          \ No newline at end of file
          
          From b98d789cba987372122e6a5f2d6ea5b7e273b634 Mon Sep 17 00:00:00 2001
          From: olim7t 
          Date: Fri, 17 Jul 2020 15:37:03 -0700
          Subject: [PATCH 525/979] Remove explicit dependency to javatuples
          
          It is a transitive dependency of Tinkerpop. We do reference it directly
          from the driver code (CqlCollectionPredicate), so in theory we should
          redeclare it, but it's only used to deserialize graph stuff and not
          exposed in our public API.
          
          The advantage of not declaring it explicitly is that it's one less
          thing to exclude manually if you're not using graph.
          ---
           core-shaded/pom.xml | 4 ----
           core/pom.xml        | 4 ----
           pom.xml             | 5 -----
           3 files changed, 13 deletions(-)
          
          diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml
          index bd2b2375332..4aa22203da7 100644
          --- a/core-shaded/pom.xml
          +++ b/core-shaded/pom.xml
          @@ -100,10 +100,6 @@
                 org.apache.tinkerpop
                 tinkergraph-gremlin
               
          -    
          -      org.javatuples
          -      javatuples
          -    
               
                 org.reactivestreams
                 reactive-streams
          diff --git a/core/pom.xml b/core/pom.xml
          index 1769109065c..99f3f266b34 100644
          --- a/core/pom.xml
          +++ b/core/pom.xml
          @@ -65,10 +65,6 @@
                 com.github.jnr
                 jnr-posix
               
          -    
          -      org.javatuples
          -      javatuples
          -    
               
                 org.xerial.snappy
                 snappy-java
          diff --git a/pom.xml b/pom.xml
          index 3b8376e7e03..859b6a47e21 100644
          --- a/pom.xml
          +++ b/pom.xml
          @@ -190,11 +190,6 @@
                   spotbugs-annotations
                   3.1.12
                 
          -      
          -        org.javatuples
          -        javatuples
          -        1.2
          -      
                 
                   com.squareup
                   javapoet
          
          From ca871adf50f7e3848c37f31c6ebe983ae4458e01 Mon Sep 17 00:00:00 2001
          From: olim7t 
          Date: Wed, 15 Jul 2020 15:55:34 -0700
          Subject: [PATCH 526/979] Bump Netty to 4.1.51
          
          ---
           pom.xml | 2 +-
           1 file changed, 1 insertion(+), 1 deletion(-)
          
          diff --git a/pom.xml b/pom.xml
          index 859b6a47e21..10eace81f4d 100644
          --- a/pom.xml
          +++ b/pom.xml
          @@ -46,7 +46,7 @@
               1.3.4
               2.1.11
               4.0.5
          -    4.1.45.Final
          +    4.1.51.Final
               1.2.1
               3.4.5
               1.7.26
          
          From 3fb67c6e28e16c4f2df22544aae533ecd631106f Mon Sep 17 00:00:00 2001
          From: olim7t 
          Date: Thu, 16 Jul 2020 17:20:31 -0700
          Subject: [PATCH 527/979] JAVA-2829: Log protocol negotiation messages at DEBUG
           level
          
          ---
           changelog/README.md                                   |  1 +
           .../driver/internal/core/channel/ChannelFactory.java  |  2 +-
           manual/core/native_protocol/README.md                 | 11 +++++++----
           3 files changed, 9 insertions(+), 5 deletions(-)
          
          diff --git a/changelog/README.md b/changelog/README.md
          index 53fc37c5ddd..165faae66c1 100644
          --- a/changelog/README.md
          +++ b/changelog/README.md
          @@ -4,6 +4,7 @@
           
           ### 4.8.0 (in progress)
           
          +- [improvement] JAVA-2829: Log protocol negotiation messages at DEBUG level
           - [bug] JAVA-2846: Give system properties the highest precedence in DefaultDriverConfigLoader
           - [new feature] JAVA-2691: Provide driver 4 support for extra codecs
           - [improvement] Allow injection of CodecRegistry on session builder
          diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ChannelFactory.java b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ChannelFactory.java
          index c6d91f7636d..a9fbaf18d81 100644
          --- a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ChannelFactory.java
          +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ChannelFactory.java
          @@ -224,7 +224,7 @@ private void connect(
                         Optional downgraded =
                             context.getProtocolVersionRegistry().downgrade(currentVersion);
                         if (downgraded.isPresent()) {
          -                LOG.info(
          +                LOG.debug(
                               "[{}] Failed to connect with protocol {}, retrying with {}",
                               logPrefix,
                               currentVersion,
          diff --git a/manual/core/native_protocol/README.md b/manual/core/native_protocol/README.md
          index d3c17bddcbb..649a89b71b7 100644
          --- a/manual/core/native_protocol/README.md
          +++ b/manual/core/native_protocol/README.md
          @@ -86,14 +86,17 @@ force the protocol version manually anymore.
           
           ### Debugging protocol negotiation
           
          -The main steps are [logged](../logging/) at level `INFO`. If the driver downgrades while negotiating
          -with the first node, you should see logs such as:
          +You can observe the negotiation process in the [logs](../logging/).
          + 
          +The versions tried while negotiating with the first node are logged at level `DEBUG` in the category
          +`com.datastax.oss.driver.internal.core.channel.ChannelFactory`:
           
           ```
          -INFO ChannelFactory - Failed to connect with protocol v4, retrying with v3
          +DEBUG ChannelFactory - Failed to connect with protocol v4, retrying with v3
           ```
           
          -If it then detects a mixed cluster with lower versions, it will log: 
          +If a mixed cluster renegotiation happens, it is logged at level `INFO` in the category
          +`com.datastax.oss.driver.internal.core.session.DefaultSession`:
           
           ```
           INFO DefaultSession - Negotiated protocol version v4 for the initial contact point, but other nodes
          
          From 67da538ead8dc0303439bde205fef605287b2652 Mon Sep 17 00:00:00 2001
          From: olim7t 
          Date: Wed, 8 Jul 2020 10:32:22 -0700
          Subject: [PATCH 528/979] JAVA-2431: Set all occurrences when bound variables
           are used multiple times
          
          ---
           changelog/README.md                           |   1 +
           .../core/cql/reactive/DefaultReactiveRow.java |  12 +
           .../api/core/cql/BoundStatementBuilder.java   |  21 ++
           .../api/core/cql/ColumnDefinitions.java       |  50 +++-
           .../driver/api/core/data/AccessibleById.java  |  22 ++
           .../api/core/data/AccessibleByName.java       |  22 ++
           .../driver/api/core/data/SettableById.java    | 253 +++++++++++++----
           .../driver/api/core/data/SettableByName.java  | 255 ++++++++++++++----
           .../driver/api/core/type/UserDefinedType.java |  34 +++
           .../core/cql/DefaultBoundStatement.java       |  20 ++
           .../core/cql/DefaultColumnDefinitions.java    |  12 +
           .../driver/internal/core/cql/DefaultRow.java  |  20 ++
           .../core/cql/EmptyColumnDefinitions.java      |  13 +
           .../internal/core/data/DefaultUdtValue.java   |  21 ++
           .../internal/core/data/IdentifierIndex.java   |  48 ++--
           .../schema/ShallowUserDefinedType.java        |  14 +
           .../core/type/DefaultUserDefinedType.java     |  12 +
           .../driver/internal/core/util/Loggers.java    |  11 +
           .../internal/core/cql/reactive/MockRow.java   |  14 +
           .../core/data/IdentifierIndexTest.java        |  37 ++-
           .../driver/core/cql/BoundStatementCcmIT.java  |  39 +++
           .../driver/api/mapper/reactive/MockRow.java   |  14 +
           pom.xml                                       |   5 +
           23 files changed, 811 insertions(+), 139 deletions(-)
          
          diff --git a/changelog/README.md b/changelog/README.md
          index 165faae66c1..8088cd126fd 100644
          --- a/changelog/README.md
          +++ b/changelog/README.md
          @@ -4,6 +4,7 @@
           
           ### 4.8.0 (in progress)
           
          +- [improvement] JAVA-2431: Set all occurrences when bound variables are used multiple times
           - [improvement] JAVA-2829: Log protocol negotiation messages at DEBUG level
           - [bug] JAVA-2846: Give system properties the highest precedence in DefaultDriverConfigLoader
           - [new feature] JAVA-2691: Provide driver 4 support for extra codecs
          diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/DefaultReactiveRow.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/DefaultReactiveRow.java
          index b2708a83633..64382440ca5 100644
          --- a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/DefaultReactiveRow.java
          +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/DefaultReactiveRow.java
          @@ -381,6 +381,12 @@ public TupleValue getTupleValue(@NonNull String name) {
               return row.getTupleValue(name);
             }
           
          +  @NonNull
          +  @Override
          +  public List allIndicesOf(@NonNull String name) {
          +    return row.allIndicesOf(name);
          +  }
          +
             @Override
             public int firstIndexOf(@NonNull String name) {
               return row.firstIndexOf(name);
          @@ -538,6 +544,12 @@ public TupleValue getTupleValue(@NonNull CqlIdentifier id) {
               return row.getTupleValue(id);
             }
           
          +  @NonNull
          +  @Override
          +  public List allIndicesOf(@NonNull CqlIdentifier id) {
          +    return row.allIndicesOf(id);
          +  }
          +
             @Override
             public int firstIndexOf(@NonNull CqlIdentifier id) {
               return row.firstIndexOf(id);
          diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/cql/BoundStatementBuilder.java b/core/src/main/java/com/datastax/oss/driver/api/core/cql/BoundStatementBuilder.java
          index e0634f78248..5cdd07f2a61 100644
          --- a/core/src/main/java/com/datastax/oss/driver/api/core/cql/BoundStatementBuilder.java
          +++ b/core/src/main/java/com/datastax/oss/driver/api/core/cql/BoundStatementBuilder.java
          @@ -27,6 +27,7 @@
           import edu.umd.cs.findbugs.annotations.Nullable;
           import java.nio.ByteBuffer;
           import java.time.Duration;
          +import java.util.List;
           import java.util.Map;
           import net.jcip.annotations.NotThreadSafe;
           
          @@ -98,6 +99,16 @@ public BoundStatementBuilder(@NonNull BoundStatement template) {
               this.node = template.getNode();
             }
           
          +  @NonNull
          +  @Override
          +  public List allIndicesOf(@NonNull CqlIdentifier id) {
          +    List indices = variableDefinitions.allIndicesOf(id);
          +    if (indices.isEmpty()) {
          +      throw new IllegalArgumentException(id + " is not a variable in this bound statement");
          +    }
          +    return indices;
          +  }
          +
             @Override
             public int firstIndexOf(@NonNull CqlIdentifier id) {
               int indexOf = variableDefinitions.firstIndexOf(id);
          @@ -107,6 +118,16 @@ public int firstIndexOf(@NonNull CqlIdentifier id) {
               return indexOf;
             }
           
          +  @NonNull
          +  @Override
          +  public List allIndicesOf(@NonNull String name) {
          +    List indices = variableDefinitions.allIndicesOf(name);
          +    if (indices.isEmpty()) {
          +      throw new IllegalArgumentException(name + " is not a variable in this bound statement");
          +    }
          +    return indices;
          +  }
          +
             @Override
             public int firstIndexOf(@NonNull String name) {
               int indexOf = variableDefinitions.firstIndexOf(name);
          diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/cql/ColumnDefinitions.java b/core/src/main/java/com/datastax/oss/driver/api/core/cql/ColumnDefinitions.java
          index 15b206c0a6f..176884db998 100644
          --- a/core/src/main/java/com/datastax/oss/driver/api/core/cql/ColumnDefinitions.java
          +++ b/core/src/main/java/com/datastax/oss/driver/api/core/cql/ColumnDefinitions.java
          @@ -18,7 +18,10 @@
           import com.datastax.oss.driver.api.core.CqlIdentifier;
           import com.datastax.oss.driver.api.core.data.AccessibleByName;
           import com.datastax.oss.driver.api.core.detach.Detachable;
          +import com.datastax.oss.driver.internal.core.util.Loggers;
           import edu.umd.cs.findbugs.annotations.NonNull;
          +import java.util.Collections;
          +import java.util.List;
           
           /**
            * Metadata about a set of CQL columns.
          @@ -97,22 +100,61 @@ default ColumnDefinition get(@NonNull CqlIdentifier name) {
             /** Whether there is a definition using the given CQL identifier. */
             boolean contains(@NonNull CqlIdentifier id);
           
          +  /**
          +   * Returns the indices of all columns that use the given name.
          +   *
          +   * 

          Because raw strings are ambiguous with regard to case-sensitivity, the argument will be + * interpreted according to the rules described in {@link AccessibleByName}. + * + * @return the indices, or an empty list if no column uses this name. + * @apiNote the default implementation only exists for backward compatibility. It wraps the result + * of {@link #firstIndexOf(String)} in a singleton list, which is not entirely correct, as it + * will only return the first occurrence. Therefore it also logs a warning. + *

          Implementors should always override this method (all built-in driver implementations + * do). + */ + @NonNull + default List allIndicesOf(@NonNull String name) { + Loggers.COLUMN_DEFINITIONS.warn( + "{} should override allIndicesOf(String), the default implementation is a " + + "workaround for backward compatibility, it only returns the first occurrence", + getClass().getName()); + return Collections.singletonList(firstIndexOf(name)); + } + /** * Returns the index of the first column that uses the given name. * *

          Because raw strings are ambiguous with regard to case-sensitivity, the argument will be * interpreted according to the rules described in {@link AccessibleByName}. * - *

          Also, note that if multiple columns use the same name, there is no way to find the index for - * the next occurrences. One way to avoid this is to use aliases in your CQL queries. + * @return the index, or -1 if no column uses this name. */ int firstIndexOf(@NonNull String name); + /** + * Returns the indices of all columns that use the given identifier. + * + * @return the indices, or an empty list if no column uses this identifier. + * @apiNote the default implementation only exists for backward compatibility. It wraps the result + * of {@link #firstIndexOf(CqlIdentifier)} in a singleton list, which is not entirely correct, + * as it will only return the first occurrence. Therefore it also logs a warning. + *

          Implementors should always override this method (all built-in driver implementations + * do). + */ + @NonNull + default List allIndicesOf(@NonNull CqlIdentifier id) { + Loggers.COLUMN_DEFINITIONS.warn( + "{} should override allIndicesOf(CqlIdentifier), the default implementation is a " + + "workaround for backward compatibility, it only returns the first occurrence", + getClass().getName()); + return Collections.singletonList(firstIndexOf(id)); + } + /** * Returns the index of the first column that uses the given identifier. * - *

          Note that if multiple columns use the same identifier, there is no way to find the index for - * the next occurrences. One way to avoid this is to use aliases in your CQL queries. + * @return the index, or -1 if no column uses this identifier. */ int firstIndexOf(@NonNull CqlIdentifier id); } diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/data/AccessibleById.java b/core/src/main/java/com/datastax/oss/driver/api/core/data/AccessibleById.java index f4cedb77c31..7d39b66c861 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/data/AccessibleById.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/data/AccessibleById.java @@ -17,7 +17,10 @@ import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.type.DataType; +import com.datastax.oss.driver.internal.core.util.Loggers; import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Collections; +import java.util.List; /** * A data structure where the values are accessible via a CQL identifier. @@ -26,6 +29,25 @@ */ public interface AccessibleById extends AccessibleByIndex { + /** + * Returns all the indices where a given identifier appears. + * + * @throws IllegalArgumentException if the id is invalid. + * @apiNote the default implementation only exists for backward compatibility. It wraps the result + * of {@link #firstIndexOf(CqlIdentifier)} in a singleton list, which is not entirely correct, + * as it will only return the first occurrence. Therefore it also logs a warning. + *

          Implementors should always override this method (all built-in driver implementations + * do). + */ + @NonNull + default List allIndicesOf(@NonNull CqlIdentifier id) { + Loggers.ACCESSIBLE_BY_ID.warn( + "{} should override allIndicesOf(CqlIdentifier), the default implementation is a " + + "workaround for backward compatibility, it only returns the first occurrence", + getClass().getName()); + return Collections.singletonList(firstIndexOf(id)); + } + /** * Returns the first index where a given identifier appears (depending on the implementation, * identifiers may appear multiple times). diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/data/AccessibleByName.java b/core/src/main/java/com/datastax/oss/driver/api/core/data/AccessibleByName.java index ed7359b9c3e..edd82a40c8e 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/data/AccessibleByName.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/data/AccessibleByName.java @@ -17,7 +17,10 @@ import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.type.DataType; +import com.datastax.oss.driver.internal.core.util.Loggers; import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Collections; +import java.util.List; /** * A data structure where the values are accessible via a name string. @@ -42,6 +45,25 @@ */ public interface AccessibleByName extends AccessibleByIndex { + /** + * Returns all the indices where a given identifier appears. + * + * @throws IllegalArgumentException if the name is invalid. + * @apiNote the default implementation only exists for backward compatibility. It wraps the result + * of {@link #firstIndexOf(String)} in a singleton list, which is not entirely correct, as it + * will only return the first occurrence. Therefore it also logs a warning. + *

          Implementors should always override this method (all built-in driver implementations + * do). + */ + @NonNull + default List allIndicesOf(@NonNull String name) { + Loggers.ACCESSIBLE_BY_NAME.warn( + "{} should override allIndicesOf(String), the default implementation is a " + + "workaround for backward compatibility, it only returns the first occurrence", + getClass().getName()); + return Collections.singletonList(firstIndexOf(name)); + } + /** * Returns the first index where a given identifier appears (depending on the implementation, * identifiers may appear multiple times). diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableById.java b/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableById.java index de9a906ca49..29927a4a42f 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableById.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableById.java @@ -41,7 +41,7 @@ public interface SettableById> extends SettableByIndex, AccessibleById { /** - * Sets the raw binary representation of the value for the first occurrence of {@code id}. + * Sets the raw binary representation of the value for all occurrences of {@code id}. * *

          This is primarily for internal use; you'll likely want to use one of the typed setters * instead, to pass a higher-level Java representation. @@ -59,7 +59,12 @@ public interface SettableById> @NonNull @CheckReturnValue default SelfT setBytesUnsafe(@NonNull CqlIdentifier id, @Nullable ByteBuffer v) { - return setBytesUnsafe(firstIndexOf(id), v); + SelfT result = null; + for (Integer i : allIndicesOf(id)) { + result = (result == null ? this : result).setBytesUnsafe(i, v); + } + assert result != null; // allIndices throws if there are no results + return result; } @NonNull @@ -69,7 +74,7 @@ default DataType getType(@NonNull CqlIdentifier id) { } /** - * Sets the value for the first occurrence of {@code id} to CQL {@code NULL}. + * Sets the value for all occurrences of {@code id} to CQL {@code NULL}. * *

          If you want to avoid the overhead of building a {@code CqlIdentifier}, use the variant of * this method that takes a string argument. @@ -79,12 +84,16 @@ default DataType getType(@NonNull CqlIdentifier id) { @NonNull @CheckReturnValue default SelfT setToNull(@NonNull CqlIdentifier id) { - return setToNull(firstIndexOf(id)); + SelfT result = null; + for (Integer i : allIndicesOf(id)) { + result = (result == null ? this : result).setToNull(i); + } + assert result != null; // allIndices throws if there are no results + return result; } /** - * Sets the value for the first occurrence of {@code id}, using the given codec for the - * conversion. + * Sets the value for all occurrences of {@code id}, using the given codec for the conversion. * *

          This method completely bypasses the {@link #codecRegistry()}, and forces the driver to use * the given codec instead. This can be useful if the codec would collide with a previously @@ -102,11 +111,16 @@ default SelfT setToNull(@NonNull CqlIdentifier id) { @CheckReturnValue default SelfT set( @NonNull CqlIdentifier id, @Nullable ValueT v, @NonNull TypeCodec codec) { - return set(firstIndexOf(id), v, codec); + SelfT result = null; + for (Integer i : allIndicesOf(id)) { + result = (result == null ? this : result).set(i, v, codec); + } + assert result != null; // allIndices throws if there are no results + return result; } /** - * Sets the value for the first occurrence of {@code id}, converting it to the given Java type. + * Sets the value for all occurrences of {@code id}, converting it to the given Java type. * *

          The {@link #codecRegistry()} will be used to look up a codec to handle the conversion. * @@ -123,11 +137,16 @@ default SelfT set( @CheckReturnValue default SelfT set( @NonNull CqlIdentifier id, @Nullable ValueT v, @NonNull GenericType targetType) { - return set(firstIndexOf(id), v, targetType); + SelfT result = null; + for (Integer i : allIndicesOf(id)) { + result = (result == null ? this : result).set(i, v, targetType); + } + assert result != null; // allIndices throws if there are no results + return result; } /** - * Returns the value for the first occurrence of {@code id}, converting it to the given Java type. + * Returns the value for all occurrences of {@code id}, converting it to the given Java type. * *

          The {@link #codecRegistry()} will be used to look up a codec to handle the conversion. * @@ -143,11 +162,16 @@ default SelfT set( @CheckReturnValue default SelfT set( @NonNull CqlIdentifier id, @Nullable ValueT v, @NonNull Class targetClass) { - return set(firstIndexOf(id), v, targetClass); + SelfT result = null; + for (Integer i : allIndicesOf(id)) { + result = (result == null ? this : result).set(i, v, targetClass); + } + assert result != null; // allIndices throws if there are no results + return result; } /** - * Sets the value for the first occurrence of {@code id} to the provided Java primitive boolean. + * Sets the value for all occurrences of {@code id} to the provided Java primitive boolean. * *

          By default, this works with CQL type {@code boolean}. * @@ -162,11 +186,16 @@ default SelfT set( @NonNull @CheckReturnValue default SelfT setBoolean(@NonNull CqlIdentifier id, boolean v) { - return setBoolean(firstIndexOf(id), v); + SelfT result = null; + for (Integer i : allIndicesOf(id)) { + result = (result == null ? this : result).setBoolean(i, v); + } + assert result != null; // allIndices throws if there are no results + return result; } /** - * Sets the value for the first occurrence of {@code id} to the provided Java primitive byte. + * Sets the value for all occurrences of {@code id} to the provided Java primitive byte. * *

          By default, this works with CQL type {@code tinyint}. * @@ -181,11 +210,16 @@ default SelfT setBoolean(@NonNull CqlIdentifier id, boolean v) { @NonNull @CheckReturnValue default SelfT setByte(@NonNull CqlIdentifier id, byte v) { - return setByte(firstIndexOf(id), v); + SelfT result = null; + for (Integer i : allIndicesOf(id)) { + result = (result == null ? this : result).setByte(i, v); + } + assert result != null; // allIndices throws if there are no results + return result; } /** - * Sets the value for the first occurrence of {@code id} to the provided Java primitive double. + * Sets the value for all occurrences of {@code id} to the provided Java primitive double. * *

          By default, this works with CQL type {@code double}. * @@ -200,11 +234,16 @@ default SelfT setByte(@NonNull CqlIdentifier id, byte v) { @NonNull @CheckReturnValue default SelfT setDouble(@NonNull CqlIdentifier id, double v) { - return setDouble(firstIndexOf(id), v); + SelfT result = null; + for (Integer i : allIndicesOf(id)) { + result = (result == null ? this : result).setDouble(i, v); + } + assert result != null; // allIndices throws if there are no results + return result; } /** - * Sets the value for the first occurrence of {@code id} to the provided Java primitive float. + * Sets the value for all occurrences of {@code id} to the provided Java primitive float. * *

          By default, this works with CQL type {@code float}. * @@ -219,11 +258,16 @@ default SelfT setDouble(@NonNull CqlIdentifier id, double v) { @NonNull @CheckReturnValue default SelfT setFloat(@NonNull CqlIdentifier id, float v) { - return setFloat(firstIndexOf(id), v); + SelfT result = null; + for (Integer i : allIndicesOf(id)) { + result = (result == null ? this : result).setFloat(i, v); + } + assert result != null; // allIndices throws if there are no results + return result; } /** - * Sets the value for the first occurrence of {@code id} to the provided Java primitive integer. + * Sets the value for all occurrences of {@code id} to the provided Java primitive integer. * *

          By default, this works with CQL type {@code int}. * @@ -238,11 +282,16 @@ default SelfT setFloat(@NonNull CqlIdentifier id, float v) { @NonNull @CheckReturnValue default SelfT setInt(@NonNull CqlIdentifier id, int v) { - return setInt(firstIndexOf(id), v); + SelfT result = null; + for (Integer i : allIndicesOf(id)) { + result = (result == null ? this : result).setInt(i, v); + } + assert result != null; // allIndices throws if there are no results + return result; } /** - * Sets the value for the first occurrence of {@code id} to the provided Java primitive long. + * Sets the value for all occurrences of {@code id} to the provided Java primitive long. * *

          By default, this works with CQL types {@code bigint} and {@code counter}. * @@ -257,11 +306,16 @@ default SelfT setInt(@NonNull CqlIdentifier id, int v) { @NonNull @CheckReturnValue default SelfT setLong(@NonNull CqlIdentifier id, long v) { - return setLong(firstIndexOf(id), v); + SelfT result = null; + for (Integer i : allIndicesOf(id)) { + result = (result == null ? this : result).setLong(i, v); + } + assert result != null; // allIndices throws if there are no results + return result; } /** - * Sets the value for the first occurrence of {@code id} to the provided Java primitive short. + * Sets the value for all occurrences of {@code id} to the provided Java primitive short. * *

          By default, this works with CQL type {@code smallint}. * @@ -276,11 +330,16 @@ default SelfT setLong(@NonNull CqlIdentifier id, long v) { @NonNull @CheckReturnValue default SelfT setShort(@NonNull CqlIdentifier id, short v) { - return setShort(firstIndexOf(id), v); + SelfT result = null; + for (Integer i : allIndicesOf(id)) { + result = (result == null ? this : result).setShort(i, v); + } + assert result != null; // allIndices throws if there are no results + return result; } /** - * Sets the value for the first occurrence of {@code id} to the provided Java instant. + * Sets the value for all occurrences of {@code id} to the provided Java instant. * *

          By default, this works with CQL type {@code timestamp}. * @@ -292,11 +351,16 @@ default SelfT setShort(@NonNull CqlIdentifier id, short v) { @NonNull @CheckReturnValue default SelfT setInstant(@NonNull CqlIdentifier id, @Nullable Instant v) { - return setInstant(firstIndexOf(id), v); + SelfT result = null; + for (Integer i : allIndicesOf(id)) { + result = (result == null ? this : result).setInstant(i, v); + } + assert result != null; // allIndices throws if there are no results + return result; } /** - * Sets the value for the first occurrence of {@code id} to the provided Java local date. + * Sets the value for all occurrences of {@code id} to the provided Java local date. * *

          By default, this works with CQL type {@code date}. * @@ -308,11 +372,16 @@ default SelfT setInstant(@NonNull CqlIdentifier id, @Nullable Instant v) { @NonNull @CheckReturnValue default SelfT setLocalDate(@NonNull CqlIdentifier id, @Nullable LocalDate v) { - return setLocalDate(firstIndexOf(id), v); + SelfT result = null; + for (Integer i : allIndicesOf(id)) { + result = (result == null ? this : result).setLocalDate(i, v); + } + assert result != null; // allIndices throws if there are no results + return result; } /** - * Sets the value for the first occurrence of {@code id} to the provided Java local time. + * Sets the value for all occurrences of {@code id} to the provided Java local time. * *

          By default, this works with CQL type {@code time}. * @@ -324,11 +393,16 @@ default SelfT setLocalDate(@NonNull CqlIdentifier id, @Nullable LocalDate v) { @NonNull @CheckReturnValue default SelfT setLocalTime(@NonNull CqlIdentifier id, @Nullable LocalTime v) { - return setLocalTime(firstIndexOf(id), v); + SelfT result = null; + for (Integer i : allIndicesOf(id)) { + result = (result == null ? this : result).setLocalTime(i, v); + } + assert result != null; // allIndices throws if there are no results + return result; } /** - * Sets the value for the first occurrence of {@code id} to the provided Java byte buffer. + * Sets the value for all occurrences of {@code id} to the provided Java byte buffer. * *

          By default, this works with CQL type {@code blob}. * @@ -340,11 +414,16 @@ default SelfT setLocalTime(@NonNull CqlIdentifier id, @Nullable LocalTime v) { @NonNull @CheckReturnValue default SelfT setByteBuffer(@NonNull CqlIdentifier id, @Nullable ByteBuffer v) { - return setByteBuffer(firstIndexOf(id), v); + SelfT result = null; + for (Integer i : allIndicesOf(id)) { + result = (result == null ? this : result).setByteBuffer(i, v); + } + assert result != null; // allIndices throws if there are no results + return result; } /** - * Sets the value for the first occurrence of {@code id} to the provided Java string. + * Sets the value for all occurrences of {@code id} to the provided Java string. * *

          By default, this works with CQL types {@code text}, {@code varchar} and {@code ascii}. * @@ -356,11 +435,16 @@ default SelfT setByteBuffer(@NonNull CqlIdentifier id, @Nullable ByteBuffer v) { @NonNull @CheckReturnValue default SelfT setString(@NonNull CqlIdentifier id, @Nullable String v) { - return setString(firstIndexOf(id), v); + SelfT result = null; + for (Integer i : allIndicesOf(id)) { + result = (result == null ? this : result).setString(i, v); + } + assert result != null; // allIndices throws if there are no results + return result; } /** - * Sets the value for the first occurrence of {@code id} to the provided Java big integer. + * Sets the value for all occurrences of {@code id} to the provided Java big integer. * *

          By default, this works with CQL type {@code varint}. * @@ -372,11 +456,16 @@ default SelfT setString(@NonNull CqlIdentifier id, @Nullable String v) { @NonNull @CheckReturnValue default SelfT setBigInteger(@NonNull CqlIdentifier id, @Nullable BigInteger v) { - return setBigInteger(firstIndexOf(id), v); + SelfT result = null; + for (Integer i : allIndicesOf(id)) { + result = (result == null ? this : result).setBigInteger(i, v); + } + assert result != null; // allIndices throws if there are no results + return result; } /** - * Sets the value for the first occurrence of {@code id} to the provided Java big decimal. + * Sets the value for all occurrences of {@code id} to the provided Java big decimal. * *

          By default, this works with CQL type {@code decimal}. * @@ -388,11 +477,16 @@ default SelfT setBigInteger(@NonNull CqlIdentifier id, @Nullable BigInteger v) { @NonNull @CheckReturnValue default SelfT setBigDecimal(@NonNull CqlIdentifier id, @Nullable BigDecimal v) { - return setBigDecimal(firstIndexOf(id), v); + SelfT result = null; + for (Integer i : allIndicesOf(id)) { + result = (result == null ? this : result).setBigDecimal(i, v); + } + assert result != null; // allIndices throws if there are no results + return result; } /** - * Sets the value for the first occurrence of {@code id} to the provided Java UUID. + * Sets the value for all occurrences of {@code id} to the provided Java UUID. * *

          By default, this works with CQL types {@code uuid} and {@code timeuuid}. * @@ -404,11 +498,16 @@ default SelfT setBigDecimal(@NonNull CqlIdentifier id, @Nullable BigDecimal v) { @NonNull @CheckReturnValue default SelfT setUuid(@NonNull CqlIdentifier id, @Nullable UUID v) { - return setUuid(firstIndexOf(id), v); + SelfT result = null; + for (Integer i : allIndicesOf(id)) { + result = (result == null ? this : result).setUuid(i, v); + } + assert result != null; // allIndices throws if there are no results + return result; } /** - * Sets the value for the first occurrence of {@code id} to the provided Java IP address. + * Sets the value for all occurrences of {@code id} to the provided Java IP address. * *

          By default, this works with CQL type {@code inet}. * @@ -420,11 +519,16 @@ default SelfT setUuid(@NonNull CqlIdentifier id, @Nullable UUID v) { @NonNull @CheckReturnValue default SelfT setInetAddress(@NonNull CqlIdentifier id, @Nullable InetAddress v) { - return setInetAddress(firstIndexOf(id), v); + SelfT result = null; + for (Integer i : allIndicesOf(id)) { + result = (result == null ? this : result).setInetAddress(i, v); + } + assert result != null; // allIndices throws if there are no results + return result; } /** - * Sets the value for the first occurrence of {@code id} to the provided duration. + * Sets the value for all occurrences of {@code id} to the provided duration. * *

          By default, this works with CQL type {@code duration}. * @@ -436,11 +540,16 @@ default SelfT setInetAddress(@NonNull CqlIdentifier id, @Nullable InetAddress v) @NonNull @CheckReturnValue default SelfT setCqlDuration(@NonNull CqlIdentifier id, @Nullable CqlDuration v) { - return setCqlDuration(firstIndexOf(id), v); + SelfT result = null; + for (Integer i : allIndicesOf(id)) { + result = (result == null ? this : result).setCqlDuration(i, v); + } + assert result != null; // allIndices throws if there are no results + return result; } /** - * Sets the value for the first occurrence of {@code id} to the provided token. + * Sets the value for all occurrences of {@code id} to the provided token. * *

          This works with the CQL type matching the partitioner in use for this cluster: {@code * bigint} for {@code Murmur3Partitioner}, {@code blob} for {@code ByteOrderedPartitioner}, and @@ -454,11 +563,16 @@ default SelfT setCqlDuration(@NonNull CqlIdentifier id, @Nullable CqlDuration v) @NonNull @CheckReturnValue default SelfT setToken(@NonNull CqlIdentifier id, @NonNull Token v) { - return setToken(firstIndexOf(id), v); + SelfT result = null; + for (Integer i : allIndicesOf(id)) { + result = (result == null ? this : result).setToken(i, v); + } + assert result != null; // allIndices throws if there are no results + return result; } /** - * Sets the value for the first occurrence of {@code id} to the provided Java list. + * Sets the value for all occurrences of {@code id} to the provided Java list. * *

          By default, this works with CQL type {@code list}. * @@ -476,11 +590,16 @@ default SelfT setList( @NonNull CqlIdentifier id, @Nullable List v, @NonNull Class elementsClass) { - return setList(firstIndexOf(id), v, elementsClass); + SelfT result = null; + for (Integer i : allIndicesOf(id)) { + result = (result == null ? this : result).setList(i, v, elementsClass); + } + assert result != null; // allIndices throws if there are no results + return result; } /** - * Sets the value for the first occurrence of {@code id} to the provided Java set. + * Sets the value for all occurrences of {@code id} to the provided Java set. * *

          By default, this works with CQL type {@code set}. * @@ -498,11 +617,16 @@ default SelfT setSet( @NonNull CqlIdentifier id, @Nullable Set v, @NonNull Class elementsClass) { - return setSet(firstIndexOf(id), v, elementsClass); + SelfT result = null; + for (Integer i : allIndicesOf(id)) { + result = (result == null ? this : result).setSet(i, v, elementsClass); + } + assert result != null; // allIndices throws if there are no results + return result; } /** - * Sets the value for the first occurrence of {@code id} to the provided Java map. + * Sets the value for all occurrences of {@code id} to the provided Java map. * *

          By default, this works with CQL type {@code map}. * @@ -521,11 +645,16 @@ default SelfT setMap( @Nullable Map v, @NonNull Class keyClass, @NonNull Class valueClass) { - return setMap(firstIndexOf(id), v, keyClass, valueClass); + SelfT result = null; + for (Integer i : allIndicesOf(id)) { + result = (result == null ? this : result).setMap(i, v, keyClass, valueClass); + } + assert result != null; // allIndices throws if there are no results + return result; } /** - * Sets the value for the first occurrence of {@code id} to the provided user defined type value. + * Sets the value for all occurrences of {@code id} to the provided user defined type value. * *

          By default, this works with CQL user-defined types. * @@ -537,11 +666,16 @@ default SelfT setMap( @NonNull @CheckReturnValue default SelfT setUdtValue(@NonNull CqlIdentifier id, @Nullable UdtValue v) { - return setUdtValue(firstIndexOf(id), v); + SelfT result = null; + for (Integer i : allIndicesOf(id)) { + result = (result == null ? this : result).setUdtValue(i, v); + } + assert result != null; // allIndices throws if there are no results + return result; } /** - * Sets the value for the first occurrence of {@code id} to the provided tuple value. + * Sets the value for all occurrences of {@code id} to the provided tuple value. * *

          By default, this works with CQL tuples. * @@ -553,6 +687,11 @@ default SelfT setUdtValue(@NonNull CqlIdentifier id, @Nullable UdtValue v) { @NonNull @CheckReturnValue default SelfT setTupleValue(@NonNull CqlIdentifier id, @Nullable TupleValue v) { - return setTupleValue(firstIndexOf(id), v); + SelfT result = null; + for (Integer i : allIndicesOf(id)) { + result = (result == null ? this : result).setTupleValue(i, v); + } + assert result != null; // allIndices throws if there are no results + return result; } } diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableByName.java b/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableByName.java index 0ebd95b22cc..0d4fed0688d 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableByName.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableByName.java @@ -40,7 +40,7 @@ public interface SettableByName> extends SettableByIndex, AccessibleByName { /** - * Sets the raw binary representation of the value for the first occurrence of {@code name}. + * Sets the raw binary representation of the value for all occurrences of {@code name}. * *

          This is primarily for internal use; you'll likely want to use one of the typed setters * instead, to pass a higher-level Java representation. @@ -58,7 +58,12 @@ public interface SettableByName> @NonNull @CheckReturnValue default SelfT setBytesUnsafe(@NonNull String name, @Nullable ByteBuffer v) { - return setBytesUnsafe(firstIndexOf(name), v); + SelfT result = null; + for (Integer i : allIndicesOf(name)) { + result = (result == null ? this : result).setBytesUnsafe(i, v); + } + assert result != null; // allIndices throws if there are no results + return result; } @NonNull @@ -68,7 +73,7 @@ default DataType getType(@NonNull String name) { } /** - * Sets the value for the first occurrence of {@code name} to CQL {@code NULL}. + * Sets the value for all occurrences of {@code name} to CQL {@code NULL}. * *

          This method deals with case sensitivity in the way explained in the documentation of {@link * AccessibleByName}. @@ -78,12 +83,16 @@ default DataType getType(@NonNull String name) { @NonNull @CheckReturnValue default SelfT setToNull(@NonNull String name) { - return setToNull(firstIndexOf(name)); + SelfT result = null; + for (Integer i : allIndicesOf(name)) { + result = (result == null ? this : result).setToNull(i); + } + assert result != null; // allIndices throws if there are no results + return result; } /** - * Sets the value for the first occurrence of {@code name}, using the given codec for the - * conversion. + * Sets the value for all occurrences of {@code name}, using the given codec for the conversion. * *

          This method completely bypasses the {@link #codecRegistry()}, and forces the driver to use * the given codec instead. This can be useful if the codec would collide with a previously @@ -101,11 +110,16 @@ default SelfT setToNull(@NonNull String name) { @CheckReturnValue default SelfT set( @NonNull String name, @Nullable ValueT v, @NonNull TypeCodec codec) { - return set(firstIndexOf(name), v, codec); + SelfT result = null; + for (Integer i : allIndicesOf(name)) { + result = (result == null ? this : result).set(i, v, codec); + } + assert result != null; // allIndices throws if there are no results + return result; } /** - * Sets the value for the first occurrence of {@code name}, converting it to the given Java type. + * Sets the value for all occurrences of {@code name}, converting it to the given Java type. * *

          The {@link #codecRegistry()} will be used to look up a codec to handle the conversion. * @@ -122,12 +136,16 @@ default SelfT set( @CheckReturnValue default SelfT set( @NonNull String name, @Nullable ValueT v, @NonNull GenericType targetType) { - return set(firstIndexOf(name), v, targetType); + SelfT result = null; + for (Integer i : allIndicesOf(name)) { + result = (result == null ? this : result).set(i, v, targetType); + } + assert result != null; // allIndices throws if there are no results + return result; } /** - * Returns the value for the first occurrence of {@code name}, converting it to the given Java - * type. + * Returns the value for all occurrences of {@code name}, converting it to the given Java type. * *

          The {@link #codecRegistry()} will be used to look up a codec to handle the conversion. * @@ -143,11 +161,16 @@ default SelfT set( @CheckReturnValue default SelfT set( @NonNull String name, @Nullable ValueT v, @NonNull Class targetClass) { - return set(firstIndexOf(name), v, targetClass); + SelfT result = null; + for (Integer i : allIndicesOf(name)) { + result = (result == null ? this : result).set(i, v, targetClass); + } + assert result != null; // allIndices throws if there are no results + return result; } /** - * Sets the value for the first occurrence of {@code name} to the provided Java primitive boolean. + * Sets the value for all occurrences of {@code name} to the provided Java primitive boolean. * *

          By default, this works with CQL type {@code boolean}. * @@ -162,11 +185,16 @@ default SelfT set( @NonNull @CheckReturnValue default SelfT setBoolean(@NonNull String name, boolean v) { - return setBoolean(firstIndexOf(name), v); + SelfT result = null; + for (Integer i : allIndicesOf(name)) { + result = (result == null ? this : result).setBoolean(i, v); + } + assert result != null; // allIndices throws if there are no results + return result; } /** - * Sets the value for the first occurrence of {@code name} to the provided Java primitive byte. + * Sets the value for all occurrences of {@code name} to the provided Java primitive byte. * *

          By default, this works with CQL type {@code tinyint}. * @@ -181,11 +209,16 @@ default SelfT setBoolean(@NonNull String name, boolean v) { @NonNull @CheckReturnValue default SelfT setByte(@NonNull String name, byte v) { - return setByte(firstIndexOf(name), v); + SelfT result = null; + for (Integer i : allIndicesOf(name)) { + result = (result == null ? this : result).setByte(i, v); + } + assert result != null; // allIndices throws if there are no results + return result; } /** - * Sets the value for the first occurrence of {@code name} to the provided Java primitive double. + * Sets the value for all occurrences of {@code name} to the provided Java primitive double. * *

          By default, this works with CQL type {@code double}. * @@ -200,11 +233,16 @@ default SelfT setByte(@NonNull String name, byte v) { @NonNull @CheckReturnValue default SelfT setDouble(@NonNull String name, double v) { - return setDouble(firstIndexOf(name), v); + SelfT result = null; + for (Integer i : allIndicesOf(name)) { + result = (result == null ? this : result).setDouble(i, v); + } + assert result != null; // allIndices throws if there are no results + return result; } /** - * Sets the value for the first occurrence of {@code name} to the provided Java primitive float. + * Sets the value for all occurrences of {@code name} to the provided Java primitive float. * *

          By default, this works with CQL type {@code float}. * @@ -219,11 +257,16 @@ default SelfT setDouble(@NonNull String name, double v) { @NonNull @CheckReturnValue default SelfT setFloat(@NonNull String name, float v) { - return setFloat(firstIndexOf(name), v); + SelfT result = null; + for (Integer i : allIndicesOf(name)) { + result = (result == null ? this : result).setFloat(i, v); + } + assert result != null; // allIndices throws if there are no results + return result; } /** - * Sets the value for the first occurrence of {@code name} to the provided Java primitive integer. + * Sets the value for all occurrences of {@code name} to the provided Java primitive integer. * *

          By default, this works with CQL type {@code int}. * @@ -238,11 +281,16 @@ default SelfT setFloat(@NonNull String name, float v) { @NonNull @CheckReturnValue default SelfT setInt(@NonNull String name, int v) { - return setInt(firstIndexOf(name), v); + SelfT result = null; + for (Integer i : allIndicesOf(name)) { + result = (result == null ? this : result).setInt(i, v); + } + assert result != null; // allIndices throws if there are no results + return result; } /** - * Sets the value for the first occurrence of {@code name} to the provided Java primitive long. + * Sets the value for all occurrences of {@code name} to the provided Java primitive long. * *

          By default, this works with CQL types {@code bigint} and {@code counter}. * @@ -257,11 +305,16 @@ default SelfT setInt(@NonNull String name, int v) { @NonNull @CheckReturnValue default SelfT setLong(@NonNull String name, long v) { - return setLong(firstIndexOf(name), v); + SelfT result = null; + for (Integer i : allIndicesOf(name)) { + result = (result == null ? this : result).setLong(i, v); + } + assert result != null; // allIndices throws if there are no results + return result; } /** - * Sets the value for the first occurrence of {@code name} to the provided Java primitive short. + * Sets the value for all occurrences of {@code name} to the provided Java primitive short. * *

          By default, this works with CQL type {@code smallint}. * @@ -276,11 +329,16 @@ default SelfT setLong(@NonNull String name, long v) { @NonNull @CheckReturnValue default SelfT setShort(@NonNull String name, short v) { - return setShort(firstIndexOf(name), v); + SelfT result = null; + for (Integer i : allIndicesOf(name)) { + result = (result == null ? this : result).setShort(i, v); + } + assert result != null; // allIndices throws if there are no results + return result; } /** - * Sets the value for the first occurrence of {@code name} to the provided Java instant. + * Sets the value for all occurrences of {@code name} to the provided Java instant. * *

          By default, this works with CQL type {@code timestamp}. * @@ -292,11 +350,16 @@ default SelfT setShort(@NonNull String name, short v) { @NonNull @CheckReturnValue default SelfT setInstant(@NonNull String name, @Nullable Instant v) { - return setInstant(firstIndexOf(name), v); + SelfT result = null; + for (Integer i : allIndicesOf(name)) { + result = (result == null ? this : result).setInstant(i, v); + } + assert result != null; // allIndices throws if there are no results + return result; } /** - * Sets the value for the first occurrence of {@code name} to the provided Java local date. + * Sets the value for all occurrences of {@code name} to the provided Java local date. * *

          By default, this works with CQL type {@code date}. * @@ -308,11 +371,16 @@ default SelfT setInstant(@NonNull String name, @Nullable Instant v) { @NonNull @CheckReturnValue default SelfT setLocalDate(@NonNull String name, @Nullable LocalDate v) { - return setLocalDate(firstIndexOf(name), v); + SelfT result = null; + for (Integer i : allIndicesOf(name)) { + result = (result == null ? this : result).setLocalDate(i, v); + } + assert result != null; // allIndices throws if there are no results + return result; } /** - * Sets the value for the first occurrence of {@code name} to the provided Java local time. + * Sets the value for all occurrences of {@code name} to the provided Java local time. * *

          By default, this works with CQL type {@code time}. * @@ -324,11 +392,16 @@ default SelfT setLocalDate(@NonNull String name, @Nullable LocalDate v) { @NonNull @CheckReturnValue default SelfT setLocalTime(@NonNull String name, @Nullable LocalTime v) { - return setLocalTime(firstIndexOf(name), v); + SelfT result = null; + for (Integer i : allIndicesOf(name)) { + result = (result == null ? this : result).setLocalTime(i, v); + } + assert result != null; // allIndices throws if there are no results + return result; } /** - * Sets the value for the first occurrence of {@code name} to the provided Java byte buffer. + * Sets the value for all occurrences of {@code name} to the provided Java byte buffer. * *

          By default, this works with CQL type {@code blob}. * @@ -340,11 +413,16 @@ default SelfT setLocalTime(@NonNull String name, @Nullable LocalTime v) { @NonNull @CheckReturnValue default SelfT setByteBuffer(@NonNull String name, @Nullable ByteBuffer v) { - return setByteBuffer(firstIndexOf(name), v); + SelfT result = null; + for (Integer i : allIndicesOf(name)) { + result = (result == null ? this : result).setByteBuffer(i, v); + } + assert result != null; // allIndices throws if there are no results + return result; } /** - * Sets the value for the first occurrence of {@code name} to the provided Java string. + * Sets the value for all occurrences of {@code name} to the provided Java string. * *

          By default, this works with CQL types {@code text}, {@code varchar} and {@code ascii}. * @@ -356,11 +434,16 @@ default SelfT setByteBuffer(@NonNull String name, @Nullable ByteBuffer v) { @NonNull @CheckReturnValue default SelfT setString(@NonNull String name, @Nullable String v) { - return setString(firstIndexOf(name), v); + SelfT result = null; + for (Integer i : allIndicesOf(name)) { + result = (result == null ? this : result).setString(i, v); + } + assert result != null; // allIndices throws if there are no results + return result; } /** - * Sets the value for the first occurrence of {@code name} to the provided Java big integer. + * Sets the value for all occurrences of {@code name} to the provided Java big integer. * *

          By default, this works with CQL type {@code varint}. * @@ -372,11 +455,16 @@ default SelfT setString(@NonNull String name, @Nullable String v) { @NonNull @CheckReturnValue default SelfT setBigInteger(@NonNull String name, @Nullable BigInteger v) { - return setBigInteger(firstIndexOf(name), v); + SelfT result = null; + for (Integer i : allIndicesOf(name)) { + result = (result == null ? this : result).setBigInteger(i, v); + } + assert result != null; // allIndices throws if there are no results + return result; } /** - * Sets the value for the first occurrence of {@code name} to the provided Java big decimal. + * Sets the value for all occurrences of {@code name} to the provided Java big decimal. * *

          By default, this works with CQL type {@code decimal}. * @@ -388,11 +476,16 @@ default SelfT setBigInteger(@NonNull String name, @Nullable BigInteger v) { @NonNull @CheckReturnValue default SelfT setBigDecimal(@NonNull String name, @Nullable BigDecimal v) { - return setBigDecimal(firstIndexOf(name), v); + SelfT result = null; + for (Integer i : allIndicesOf(name)) { + result = (result == null ? this : result).setBigDecimal(i, v); + } + assert result != null; // allIndices throws if there are no results + return result; } /** - * Sets the value for the first occurrence of {@code name} to the provided Java UUID. + * Sets the value for all occurrences of {@code name} to the provided Java UUID. * *

          By default, this works with CQL types {@code uuid} and {@code timeuuid}. * @@ -404,11 +497,16 @@ default SelfT setBigDecimal(@NonNull String name, @Nullable BigDecimal v) { @NonNull @CheckReturnValue default SelfT setUuid(@NonNull String name, @Nullable UUID v) { - return setUuid(firstIndexOf(name), v); + SelfT result = null; + for (Integer i : allIndicesOf(name)) { + result = (result == null ? this : result).setUuid(i, v); + } + assert result != null; // allIndices throws if there are no results + return result; } /** - * Sets the value for the first occurrence of {@code name} to the provided Java IP address. + * Sets the value for all occurrences of {@code name} to the provided Java IP address. * *

          By default, this works with CQL type {@code inet}. * @@ -420,11 +518,16 @@ default SelfT setUuid(@NonNull String name, @Nullable UUID v) { @NonNull @CheckReturnValue default SelfT setInetAddress(@NonNull String name, @Nullable InetAddress v) { - return setInetAddress(firstIndexOf(name), v); + SelfT result = null; + for (Integer i : allIndicesOf(name)) { + result = (result == null ? this : result).setInetAddress(i, v); + } + assert result != null; // allIndices throws if there are no results + return result; } /** - * Sets the value for the first occurrence of {@code name} to the provided duration. + * Sets the value for all occurrences of {@code name} to the provided duration. * *

          By default, this works with CQL type {@code duration}. * @@ -436,11 +539,16 @@ default SelfT setInetAddress(@NonNull String name, @Nullable InetAddress v) { @NonNull @CheckReturnValue default SelfT setCqlDuration(@NonNull String name, @Nullable CqlDuration v) { - return setCqlDuration(firstIndexOf(name), v); + SelfT result = null; + for (Integer i : allIndicesOf(name)) { + result = (result == null ? this : result).setCqlDuration(i, v); + } + assert result != null; // allIndices throws if there are no results + return result; } /** - * Sets the value for the first occurrence of {@code name} to the provided token. + * Sets the value for all occurrences of {@code name} to the provided token. * *

          This works with the CQL type matching the partitioner in use for this cluster: {@code * bigint} for {@code Murmur3Partitioner}, {@code blob} for {@code ByteOrderedPartitioner}, and @@ -454,11 +562,16 @@ default SelfT setCqlDuration(@NonNull String name, @Nullable CqlDuration v) { @NonNull @CheckReturnValue default SelfT setToken(@NonNull String name, @NonNull Token v) { - return setToken(firstIndexOf(name), v); + SelfT result = null; + for (Integer i : allIndicesOf(name)) { + result = (result == null ? this : result).setToken(i, v); + } + assert result != null; // allIndices throws if there are no results + return result; } /** - * Sets the value for the first occurrence of {@code name} to the provided Java list. + * Sets the value for all occurrences of {@code name} to the provided Java list. * *

          By default, this works with CQL type {@code list}. * @@ -474,11 +587,16 @@ default SelfT setToken(@NonNull String name, @NonNull Token v) { @CheckReturnValue default SelfT setList( @NonNull String name, @Nullable List v, @NonNull Class elementsClass) { - return setList(firstIndexOf(name), v, elementsClass); + SelfT result = null; + for (Integer i : allIndicesOf(name)) { + result = (result == null ? this : result).setList(i, v, elementsClass); + } + assert result != null; // allIndices throws if there are no results + return result; } /** - * Sets the value for the first occurrence of {@code name} to the provided Java set. + * Sets the value for all occurrences of {@code name} to the provided Java set. * *

          By default, this works with CQL type {@code set}. * @@ -494,11 +612,16 @@ default SelfT setList( @CheckReturnValue default SelfT setSet( @NonNull String name, @Nullable Set v, @NonNull Class elementsClass) { - return setSet(firstIndexOf(name), v, elementsClass); + SelfT result = null; + for (Integer i : allIndicesOf(name)) { + result = (result == null ? this : result).setSet(i, v, elementsClass); + } + assert result != null; // allIndices throws if there are no results + return result; } /** - * Sets the value for the first occurrence of {@code name} to the provided Java map. + * Sets the value for all occurrences of {@code name} to the provided Java map. * *

          By default, this works with CQL type {@code map}. * @@ -517,12 +640,16 @@ default SelfT setMap( @Nullable Map v, @NonNull Class keyClass, @NonNull Class valueClass) { - return setMap(firstIndexOf(name), v, keyClass, valueClass); + SelfT result = null; + for (Integer i : allIndicesOf(name)) { + result = (result == null ? this : result).setMap(i, v, keyClass, valueClass); + } + assert result != null; // allIndices throws if there are no results + return result; } /** - * Sets the value for the first occurrence of {@code name} to the provided user defined type - * value. + * Sets the value for all occurrences of {@code name} to the provided user defined type value. * *

          By default, this works with CQL user-defined types. * @@ -534,11 +661,16 @@ default SelfT setMap( @NonNull @CheckReturnValue default SelfT setUdtValue(@NonNull String name, @Nullable UdtValue v) { - return setUdtValue(firstIndexOf(name), v); + SelfT result = null; + for (Integer i : allIndicesOf(name)) { + result = (result == null ? this : result).setUdtValue(i, v); + } + assert result != null; // allIndices throws if there are no results + return result; } /** - * Sets the value for the first occurrence of {@code name} to the provided tuple value. + * Sets the value for all occurrences of {@code name} to the provided tuple value. * *

          By default, this works with CQL tuples. * @@ -550,6 +682,11 @@ default SelfT setUdtValue(@NonNull String name, @Nullable UdtValue v) { @NonNull @CheckReturnValue default SelfT setTupleValue(@NonNull String name, @Nullable TupleValue v) { - return setTupleValue(firstIndexOf(name), v); + SelfT result = null; + for (Integer i : allIndicesOf(name)) { + result = (result == null ? this : result).setTupleValue(i, v); + } + assert result != null; // allIndices throws if there are no results + return result; } } diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/type/UserDefinedType.java b/core/src/main/java/com/datastax/oss/driver/api/core/type/UserDefinedType.java index 26132bdeebc..50171470611 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/type/UserDefinedType.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/type/UserDefinedType.java @@ -21,9 +21,11 @@ import com.datastax.oss.driver.api.core.metadata.schema.Describable; import com.datastax.oss.driver.api.core.type.codec.registry.CodecRegistry; import com.datastax.oss.driver.internal.core.metadata.schema.ScriptBuilder; +import com.datastax.oss.driver.internal.core.util.Loggers; import com.datastax.oss.protocol.internal.ProtocolConstants; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.Collections; import java.util.List; public interface UserDefinedType extends DataType, Describable { @@ -39,8 +41,40 @@ public interface UserDefinedType extends DataType, Describable { @NonNull List getFieldNames(); + /** + * @apiNote the default implementation only exists for backward compatibility. It wraps the result + * of {@link #firstIndexOf(CqlIdentifier)} in a singleton list, which is not entirely correct, + * as it will only return the first occurrence. Therefore it also logs a warning. + *

          Implementors should always override this method (all built-in driver implementations + * do). + */ + @NonNull + default List allIndicesOf(@NonNull CqlIdentifier id) { + Loggers.USER_DEFINED_TYPE.warn( + "{} should override allIndicesOf(CqlIdentifier), the default implementation is a " + + "workaround for backward compatibility, it only returns the first occurrence", + getClass().getName()); + return Collections.singletonList(firstIndexOf(id)); + } + int firstIndexOf(@NonNull CqlIdentifier id); + /** + * @apiNote the default implementation only exists for backward compatibility. It wraps the result + * of {@link #firstIndexOf(String)} in a singleton list, which is not entirely correct, as it + * will only return the first occurrence. Therefore it also logs a warning. + *

          Implementors should always override this method (all built-in driver implementations + * do). + */ + @NonNull + default List allIndicesOf(@NonNull String name) { + Loggers.USER_DEFINED_TYPE.warn( + "{} should override allIndicesOf(String), the default implementation is a " + + "workaround for backward compatibility, it only returns the first occurrence", + getClass().getName()); + return Collections.singletonList(firstIndexOf(name)); + } + int firstIndexOf(@NonNull String name); default boolean contains(@NonNull CqlIdentifier id) { diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultBoundStatement.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultBoundStatement.java index c1b2119cd1c..4e0b8af3fac 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultBoundStatement.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultBoundStatement.java @@ -117,6 +117,16 @@ public DataType getType(int i) { return variableDefinitions.get(i).getType(); } + @NonNull + @Override + public List allIndicesOf(@NonNull CqlIdentifier id) { + List indices = variableDefinitions.allIndicesOf(id); + if (indices.isEmpty()) { + throw new IllegalArgumentException(id + " is not a variable in this bound statement"); + } + return indices; + } + @Override public int firstIndexOf(@NonNull CqlIdentifier id) { int indexOf = variableDefinitions.firstIndexOf(id); @@ -126,6 +136,16 @@ public int firstIndexOf(@NonNull CqlIdentifier id) { return indexOf; } + @NonNull + @Override + public List allIndicesOf(@NonNull String name) { + List indices = variableDefinitions.allIndicesOf(name); + if (indices.isEmpty()) { + throw new IllegalArgumentException(name + " is not a variable in this bound statement"); + } + return indices; + } + @Override public int firstIndexOf(@NonNull String name) { int indexOf = variableDefinitions.firstIndexOf(name); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultColumnDefinitions.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultColumnDefinitions.java index 87cc99066bd..d77b8e884b2 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultColumnDefinitions.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultColumnDefinitions.java @@ -74,11 +74,23 @@ public boolean contains(@NonNull CqlIdentifier id) { return index.firstIndexOf(id) >= 0; } + @NonNull + @Override + public List allIndicesOf(@NonNull String name) { + return index.allIndicesOf(name); + } + @Override public int firstIndexOf(@NonNull String name) { return index.firstIndexOf(name); } + @NonNull + @Override + public List allIndicesOf(@NonNull CqlIdentifier id) { + return index.allIndicesOf(id); + } + @Override public int firstIndexOf(@NonNull CqlIdentifier id) { return index.firstIndexOf(id); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultRow.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultRow.java index 4a1190a599f..68fa9842269 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultRow.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultRow.java @@ -68,6 +68,16 @@ public DataType getType(int i) { return definitions.get(i).getType(); } + @NonNull + @Override + public List allIndicesOf(@NonNull CqlIdentifier id) { + List indices = definitions.allIndicesOf(id); + if (indices.isEmpty()) { + throw new IllegalArgumentException(id + " is not a column in this row"); + } + return indices; + } + @Override public int firstIndexOf(@NonNull CqlIdentifier id) { int indexOf = definitions.firstIndexOf(id); @@ -83,6 +93,16 @@ public DataType getType(@NonNull CqlIdentifier id) { return definitions.get(firstIndexOf(id)).getType(); } + @NonNull + @Override + public List allIndicesOf(@NonNull String name) { + List indices = definitions.allIndicesOf(name); + if (indices.isEmpty()) { + throw new IllegalArgumentException(name + " is not a column in this row"); + } + return indices; + } + @Override public int firstIndexOf(@NonNull String name) { int indexOf = definitions.firstIndexOf(name); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/EmptyColumnDefinitions.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/EmptyColumnDefinitions.java index fde195ad74a..e6c84a4dfa5 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/EmptyColumnDefinitions.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/EmptyColumnDefinitions.java @@ -22,6 +22,7 @@ import edu.umd.cs.findbugs.annotations.NonNull; import java.util.Collections; import java.util.Iterator; +import java.util.List; /** * The singleton that represents no column definitions (implemented as an enum which provides the @@ -51,11 +52,23 @@ public boolean contains(@NonNull CqlIdentifier id) { return false; } + @NonNull + @Override + public List allIndicesOf(@NonNull String name) { + return Collections.emptyList(); + } + @Override public int firstIndexOf(@NonNull String name) { return -1; } + @NonNull + @Override + public List allIndicesOf(@NonNull CqlIdentifier id) { + return Collections.emptyList(); + } + @Override public int firstIndexOf(@NonNull CqlIdentifier id) { return -1; diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/data/DefaultUdtValue.java b/core/src/main/java/com/datastax/oss/driver/internal/core/data/DefaultUdtValue.java index 9d8ed828241..572269687bf 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/data/DefaultUdtValue.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/data/DefaultUdtValue.java @@ -29,6 +29,7 @@ import java.io.ObjectInputStream; import java.io.Serializable; import java.nio.ByteBuffer; +import java.util.List; import java.util.Objects; import net.jcip.annotations.NotThreadSafe; @@ -75,6 +76,16 @@ public int size() { return values.length; } + @NonNull + @Override + public List allIndicesOf(@NonNull CqlIdentifier id) { + List indices = type.allIndicesOf(id); + if (indices.isEmpty()) { + throw new IllegalArgumentException(id + " is not a field in this UDT"); + } + return indices; + } + @Override public int firstIndexOf(@NonNull CqlIdentifier id) { int indexOf = type.firstIndexOf(id); @@ -84,6 +95,16 @@ public int firstIndexOf(@NonNull CqlIdentifier id) { return indexOf; } + @NonNull + @Override + public List allIndicesOf(@NonNull String name) { + List indices = type.allIndicesOf(name); + if (indices.isEmpty()) { + throw new IllegalArgumentException(name + " is not a field in this UDT"); + } + return indices; + } + @Override public int firstIndexOf(@NonNull String name) { int indexOf = type.firstIndexOf(name); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/data/IdentifierIndex.java b/core/src/main/java/com/datastax/oss/driver/internal/core/data/IdentifierIndex.java index 24454f6e625..17411c4e6d8 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/data/IdentifierIndex.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/data/IdentifierIndex.java @@ -20,9 +20,9 @@ import com.datastax.oss.driver.api.core.data.GettableById; import com.datastax.oss.driver.api.core.data.GettableByName; import com.datastax.oss.driver.internal.core.util.Strings; -import com.datastax.oss.driver.shaded.guava.common.collect.Maps; +import com.datastax.oss.driver.shaded.guava.common.collect.LinkedListMultimap; +import com.datastax.oss.driver.shaded.guava.common.collect.ListMultimap; import java.util.List; -import java.util.Map; import net.jcip.annotations.Immutable; /** @@ -34,39 +34,51 @@ @Immutable public class IdentifierIndex { - private final Map byId; - private final Map byCaseSensitiveName; - private final Map byCaseInsensitiveName; + private final ListMultimap byId; + private final ListMultimap byCaseSensitiveName; + private final ListMultimap byCaseInsensitiveName; public IdentifierIndex(List ids) { - this.byId = Maps.newHashMapWithExpectedSize(ids.size()); - this.byCaseSensitiveName = Maps.newHashMapWithExpectedSize(ids.size()); - this.byCaseInsensitiveName = Maps.newHashMapWithExpectedSize(ids.size()); + this.byId = LinkedListMultimap.create(ids.size()); + this.byCaseSensitiveName = LinkedListMultimap.create(ids.size()); + this.byCaseInsensitiveName = LinkedListMultimap.create(ids.size()); int i = 0; for (CqlIdentifier id : ids) { - byId.putIfAbsent(id, i); - byCaseSensitiveName.putIfAbsent(id.asInternal(), i); - byCaseInsensitiveName.putIfAbsent(id.asInternal().toLowerCase(), i); + byId.put(id, i); + byCaseSensitiveName.put(id.asInternal(), i); + byCaseInsensitiveName.put(id.asInternal().toLowerCase(), i); i += 1; } } + /** + * Returns all occurrences of a given name, given the matching rules described in {@link + * AccessibleByName}. + */ + public List allIndicesOf(String name) { + return Strings.isDoubleQuoted(name) + ? byCaseSensitiveName.get(Strings.unDoubleQuote(name)) + : byCaseInsensitiveName.get(name.toLowerCase()); + } + /** * Returns the first occurrence of a given name, given the matching rules described in {@link * AccessibleByName}, or -1 if it's not in the list. */ public int firstIndexOf(String name) { - Integer index = - Strings.isDoubleQuoted(name) - ? byCaseSensitiveName.get(Strings.unDoubleQuote(name)) - : byCaseInsensitiveName.get(name.toLowerCase()); - return (index == null) ? -1 : index; + List indices = allIndicesOf(name); + return indices.isEmpty() ? -1 : indices.get(0); + } + + /** Returns all occurrences of a given identifier. */ + public List allIndicesOf(CqlIdentifier id) { + return byId.get(id); } /** Returns the first occurrence of a given identifier, or -1 if it's not in the list. */ public int firstIndexOf(CqlIdentifier id) { - Integer index = byId.get(id); - return (index == null) ? -1 : index; + List indices = allIndicesOf(id); + return indices.isEmpty() ? -1 : indices.get(0); } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/ShallowUserDefinedType.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/ShallowUserDefinedType.java index a51df10e708..2d3c86c069f 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/ShallowUserDefinedType.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/ShallowUserDefinedType.java @@ -84,12 +84,26 @@ public List getFieldNames() { "This implementation should only be used internally, this is likely a driver bug"); } + @NonNull + @Override + public List allIndicesOf(@NonNull CqlIdentifier id) { + throw new UnsupportedOperationException( + "This implementation should only be used internally, this is likely a driver bug"); + } + @Override public int firstIndexOf(@NonNull CqlIdentifier id) { throw new UnsupportedOperationException( "This implementation should only be used internally, this is likely a driver bug"); } + @NonNull + @Override + public List allIndicesOf(@NonNull String name) { + throw new UnsupportedOperationException( + "This implementation should only be used internally, this is likely a driver bug"); + } + @Override public int firstIndexOf(@NonNull String name) { throw new UnsupportedOperationException( diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/DefaultUserDefinedType.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/DefaultUserDefinedType.java index 92cfe72fe14..8c2f91eba5c 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/type/DefaultUserDefinedType.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/DefaultUserDefinedType.java @@ -109,11 +109,23 @@ public List getFieldNames() { return fieldNames; } + @NonNull + @Override + public List allIndicesOf(@NonNull CqlIdentifier id) { + return index.allIndicesOf(id); + } + @Override public int firstIndexOf(@NonNull CqlIdentifier id) { return index.firstIndexOf(id); } + @NonNull + @Override + public List allIndicesOf(@NonNull String name) { + return index.allIndicesOf(name); + } + @Override public int firstIndexOf(@NonNull String name) { return index.firstIndexOf(name); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/util/Loggers.java b/core/src/main/java/com/datastax/oss/driver/internal/core/util/Loggers.java index eeb753830bc..0d9d9ab0602 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/util/Loggers.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/util/Loggers.java @@ -15,7 +15,12 @@ */ package com.datastax.oss.driver.internal.core.util; +import com.datastax.oss.driver.api.core.cql.ColumnDefinitions; +import com.datastax.oss.driver.api.core.data.AccessibleById; +import com.datastax.oss.driver.api.core.data.AccessibleByName; +import com.datastax.oss.driver.api.core.type.UserDefinedType; import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class Loggers { @@ -38,4 +43,10 @@ public static void warnWithException(Logger logger, String format, Object... arg } } } + + // Loggers for API interfaces, declared here in order to keep them internal. + public static Logger COLUMN_DEFINITIONS = LoggerFactory.getLogger(ColumnDefinitions.class); + public static Logger ACCESSIBLE_BY_ID = LoggerFactory.getLogger(AccessibleById.class); + public static Logger ACCESSIBLE_BY_NAME = LoggerFactory.getLogger(AccessibleByName.class); + public static Logger USER_DEFINED_TYPE = LoggerFactory.getLogger(UserDefinedType.class); } diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/MockRow.java b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/MockRow.java index 6e2e7196d6d..d629380a1ff 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/MockRow.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/MockRow.java @@ -29,6 +29,8 @@ import com.datastax.oss.driver.internal.core.cql.EmptyColumnDefinitions; import edu.umd.cs.findbugs.annotations.NonNull; import java.nio.ByteBuffer; +import java.util.Collections; +import java.util.List; class MockRow implements Row { @@ -61,11 +63,23 @@ public ColumnDefinitions getColumnDefinitions() { return EmptyColumnDefinitions.INSTANCE; } + @NonNull + @Override + public List allIndicesOf(@NonNull String name) { + return Collections.singletonList(0); + } + @Override public int firstIndexOf(@NonNull String name) { return 0; } + @NonNull + @Override + public List allIndicesOf(@NonNull CqlIdentifier id) { + return Collections.singletonList(0); + } + @Override public int firstIndexOf(@NonNull CqlIdentifier id) { return 0; diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/data/IdentifierIndexTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/data/IdentifierIndexTest.java index 504b5a17740..183e4c17fab 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/data/IdentifierIndexTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/data/IdentifierIndexTest.java @@ -25,7 +25,8 @@ public class IdentifierIndexTest { private static final CqlIdentifier Foo = CqlIdentifier.fromInternal("Foo"); private static final CqlIdentifier foo = CqlIdentifier.fromInternal("foo"); private static final CqlIdentifier fOO = CqlIdentifier.fromInternal("fOO"); - private IdentifierIndex index = new IdentifierIndex(ImmutableList.of(Foo, foo, fOO)); + private IdentifierIndex index = + new IdentifierIndex(ImmutableList.of(Foo, foo, fOO, Foo, foo, fOO)); @Test public void should_find_first_index_of_existing_identifier() { @@ -60,4 +61,38 @@ public void should_find_first_index_of_case_sensitive_name() { public void should_not_find_index_of_nonexistent_case_sensitive_name() { assertThat(index.firstIndexOf("\"FOO\"")).isEqualTo(-1); } + + @Test + public void should_find_all_indices_of_existing_identifier() { + assertThat(index.allIndicesOf(Foo)).containsExactly(0, 3); + assertThat(index.allIndicesOf(foo)).containsExactly(1, 4); + assertThat(index.allIndicesOf(fOO)).containsExactly(2, 5); + } + + @Test + public void should_not_find_indices_of_nonexistent_identifier() { + assertThat(index.allIndicesOf(CqlIdentifier.fromInternal("FOO"))).isEmpty(); + } + + @Test + public void should_all_indices_of_case_insensitive_name() { + assertThat(index.allIndicesOf("foo")).containsExactly(0, 1, 2, 3, 4, 5); + } + + @Test + public void should_not_find_indices_of_nonexistent_case_insensitive_name() { + assertThat(index.allIndicesOf("bar")).isEmpty(); + } + + @Test + public void should_find_all_indices_of_case_sensitive_name() { + assertThat(index.allIndicesOf("\"Foo\"")).containsExactly(0, 3); + assertThat(index.allIndicesOf("\"foo\"")).containsExactly(1, 4); + assertThat(index.allIndicesOf("\"fOO\"")).containsExactly(2, 5); + } + + @Test + public void should_not_find_indices_of_nonexistent_case_sensitive_name() { + assertThat(index.allIndicesOf("\"FOO\"")).isEmpty(); + } } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BoundStatementCcmIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BoundStatementCcmIT.java index f25730350fb..09e27f657ec 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BoundStatementCcmIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BoundStatementCcmIT.java @@ -27,6 +27,7 @@ import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.cql.AsyncResultSet; import com.datastax.oss.driver.api.core.cql.BoundStatement; +import com.datastax.oss.driver.api.core.cql.ColumnDefinitions; import com.datastax.oss.driver.api.core.cql.PreparedStatement; import com.datastax.oss.driver.api.core.cql.ResultSet; import com.datastax.oss.driver.api.core.cql.Row; @@ -365,6 +366,44 @@ public void should_compute_routing_key_when_indices_randomly_distributed() { } } + @Test + public void should_set_all_occurrences_of_variable() { + CqlSession session = sessionRule.session(); + PreparedStatement ps = session.prepare("INSERT INTO test3 (pk1, pk2, v) VALUES (:i, :i, :i)"); + + CqlIdentifier id = CqlIdentifier.fromCql("i"); + ColumnDefinitions variableDefinitions = ps.getVariableDefinitions(); + assertThat(variableDefinitions.allIndicesOf(id)).containsExactly(0, 1, 2); + + should_set_all_occurrences_of_variable(ps.bind().setInt(id, 12)); + should_set_all_occurrences_of_variable(ps.boundStatementBuilder().setInt(id, 12).build()); + } + + private void should_set_all_occurrences_of_variable(BoundStatement bs) { + assertThat(bs.getInt(0)).isEqualTo(12); + assertThat(bs.getInt(1)).isEqualTo(12); + assertThat(bs.getInt(2)).isEqualTo(12); + + // Nothing should be shared internally (this would be a bug if the client later retrieves a + // buffer with getBytesUnsafe and modifies it) + ByteBuffer bytes0 = bs.getBytesUnsafe(0); + ByteBuffer bytes1 = bs.getBytesUnsafe(1); + assertThat(bytes0).isNotNull(); + assertThat(bytes1).isNotNull(); + // Not the same instance + assertThat(bytes0).isNotSameAs(bytes1); + // Contents are not shared + bytes0.putInt(0, 11); + assertThat(bytes1.getInt(0)).isEqualTo(12); + bytes0.putInt(0, 12); + + CqlSession session = sessionRule.session(); + session.execute(bs); + Row row = session.execute("SELECT * FROM test3 WHERE pk1 = 12 AND pk2 = 12").one(); + assertThat(row).isNotNull(); + assertThat(row.getInt("v")).isEqualTo(12); + } + private static void verifyUnset( CqlSession session, BoundStatement boundStatement, String valueName) { session.execute(boundStatement.unset(1)); diff --git a/mapper-runtime/src/test/java/com/datastax/dse/driver/api/mapper/reactive/MockRow.java b/mapper-runtime/src/test/java/com/datastax/dse/driver/api/mapper/reactive/MockRow.java index d223989cdd4..b3aeddfada6 100644 --- a/mapper-runtime/src/test/java/com/datastax/dse/driver/api/mapper/reactive/MockRow.java +++ b/mapper-runtime/src/test/java/com/datastax/dse/driver/api/mapper/reactive/MockRow.java @@ -29,6 +29,8 @@ import com.datastax.oss.driver.internal.core.cql.EmptyColumnDefinitions; import edu.umd.cs.findbugs.annotations.NonNull; import java.nio.ByteBuffer; +import java.util.Collections; +import java.util.List; class MockRow implements Row { @@ -61,11 +63,23 @@ public ColumnDefinitions getColumnDefinitions() { return EmptyColumnDefinitions.INSTANCE; } + @NonNull + @Override + public List allIndicesOf(@NonNull String name) { + return Collections.singletonList(0); + } + @Override public int firstIndexOf(@NonNull String name) { return 0; } + @NonNull + @Override + public List allIndicesOf(@NonNull CqlIdentifier id) { + return Collections.singletonList(0); + } + @Override public int firstIndexOf(@NonNull CqlIdentifier id) { return 0; diff --git a/pom.xml b/pom.xml index 10eace81f4d..106e856c7e2 100644 --- a/pom.xml +++ b/pom.xml @@ -709,6 +709,11 @@ limitations under the License.]]> all,-missing com.datastax.*.driver.internal* + + apiNote + a + API note: + + + org.slf4j + slf4j-api + + com.datastax.oss diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/DefaultProcessorContext.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/DefaultProcessorContext.java index 0892892d859..52c4891115b 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/DefaultProcessorContext.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/DefaultProcessorContext.java @@ -16,7 +16,6 @@ package com.datastax.oss.driver.internal.mapper.processor; import com.datastax.oss.driver.internal.core.context.DefaultDriverContext; -import com.datastax.oss.driver.internal.core.util.concurrent.CycleDetector; import com.datastax.oss.driver.internal.core.util.concurrent.LazyReference; import com.datastax.oss.driver.internal.mapper.processor.dao.LoggingGenerator; import com.datastax.oss.driver.internal.mapper.processor.entity.DefaultEntityFactory; @@ -29,14 +28,11 @@ /** This follows the same principles as {@link DefaultDriverContext}. */ public class DefaultProcessorContext implements ProcessorContext { - private final CycleDetector cycleDetector = - new CycleDetector("Detected cycle in context initialization"); - private final LazyReference codeGeneratorFactoryRef = - new LazyReference<>("codeGeneratorFactory", this::buildCodeGeneratorFactory, cycleDetector); + new LazyReference<>(this::buildCodeGeneratorFactory); private final LazyReference entityFactoryRef = - new LazyReference<>("entityFactory", this::buildEntityFactory, cycleDetector); + new LazyReference<>(this::buildEntityFactory); private final DecoratedMessager messager; private final Types typeUtils; diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/LoggingGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/LoggingGenerator.java index effad305a1b..4e0e4c603c0 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/LoggingGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/LoggingGenerator.java @@ -21,10 +21,14 @@ import com.squareup.javapoet.MethodSpec; import com.squareup.javapoet.TypeSpec; import javax.lang.model.element.Modifier; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; public class LoggingGenerator { + + // Reference these types by name to avoid a compile-time dependency to SFL4J + private static final ClassName LOGGER_FACTORY_CLASS_NAME = + ClassName.get("org.slf4j", "LoggerFactory"); + private static final ClassName LOGGER_CLASS_NAME = ClassName.get("org.slf4j", "Logger"); + private final boolean logsEnabled; public LoggingGenerator(boolean logsEnabled) { @@ -45,12 +49,8 @@ public void addLoggerField(TypeSpec.Builder classBuilder, ClassName className) { if (logsEnabled) { classBuilder.addField( FieldSpec.builder( - ClassName.get(Logger.class), - "LOG", - Modifier.PRIVATE, - Modifier.FINAL, - Modifier.STATIC) - .initializer("$T.getLogger($T.class)", LoggerFactory.class, className) + LOGGER_CLASS_NAME, "LOG", Modifier.PRIVATE, Modifier.FINAL, Modifier.STATIC) + .initializer("$T.getLogger($T.class)", LOGGER_FACTORY_CLASS_NAME, className) .build()); } } From 6379b475a9cef4a94f02687450237edea0da136b Mon Sep 17 00:00:00 2001 From: Tomasz Lelek Date: Tue, 21 Jul 2020 13:22:28 +0200 Subject: [PATCH 531/979] JAVA-2847: Store Fallout scripts in-tree (#1477) --- performance/README.md | 27 +++++++ performance/ctool-cluster-info.png | Bin 0 -> 20505 bytes performance/graphite-setup.yaml | 102 +++++++++++++++++++++++++ performance/metrics-dashboards.png | Bin 0 -> 80419 bytes performance/oss-performance-setup.yaml | 79 +++++++++++++++++++ 5 files changed, 208 insertions(+) create mode 100644 performance/README.md create mode 100644 performance/ctool-cluster-info.png create mode 100644 performance/graphite-setup.yaml create mode 100644 performance/metrics-dashboards.png create mode 100644 performance/oss-performance-setup.yaml diff --git a/performance/README.md b/performance/README.md new file mode 100644 index 00000000000..c4b6ec2e929 --- /dev/null +++ b/performance/README.md @@ -0,0 +1,27 @@ +# How to run the Fallout performance tests + +## Setup Graphite + +1. Create a new test based on the [graphite-setup.yaml](graphite-setup.yaml) template. +2. Modify the parameters to match the scenario. + * Change the dse_version to match one of the versions you plan on testing against + * Change driver_oss_branch to be whatever branch of the driver you are using + * Driver_examples_branch should stay java-driver-4.x +3. Run the graphite-setup test (wait for its successful setup) and get the graphite server address: + * Navigate to running test, and obtain the ip from the observer node this will be used as a graphite server in the other tests. + The ip can be found in the `ctool-cluster-info.txt` file: + ![ctool-cluster-info](ctool-cluster-info.png) + * Login to the graphite server; the address should match the observer’s, the web interface is on port 3000. + The username/password is Graphite's default: `admin/admin` + * Dashboards will be loaded automatically. + + +## Start performance tests + +1. Create a new test based on the [oss-performance-setup.yaml](oss-performance-setup.yaml) template. +2. Change the parameters in the same manner as the first test, with the addition of the graphite_host parameter. +Use the IP from the first cluster’s observer node (the previous setup step). +3. Monitor the performance on the graphite observer host IP (taken from the Setup Graphite step) + The performance tests will report metrics in the two dashboards: + ![metrics-dashboards](metrics-dashboards.png) + diff --git a/performance/ctool-cluster-info.png b/performance/ctool-cluster-info.png new file mode 100644 index 0000000000000000000000000000000000000000..041203cbf42c5ed92bcda0fb4bc34b97a0baa51f GIT binary patch literal 20505 zcmd?Rbx<7L0w>xyK_|g2!6CT23==#^aEIXT?hxE9xVyW1NFW4v2@>4h9o~H3y?6KC z-L2ZUZ(qIIs@MPZRCiC!bf0s6+F_sMB+yX^Q2+oymy#4!1OQkp0DzH2hJ}8@S?OU8 z0P?F+q9V#}8Ar>$PFOpc%+K<*f^{Rb_%S3LYauFM5CZiL>0_l|NiOvcQsD|lb?h~I z${(7DET>oJm~L5DH7rNSEm9ntY!z89*XJB#1}&##)L+}o8OXn0Qd&9iIxBPYJukd3AWfO15my*g9aGa{QS#IiI+Kq!Ha4cF zJ6>x3AWb6{UTSeruU9gQmSlX!$r}(P0+SjEf}A`3*?}|=B#lnp|FsViRdoKQuO{9> z$&i){@4?v8nNH~<;}C@`@J-KVO5Hcs5N=|UyZ40Ge9;Ko^;rtH;LOqv<-$*iyJ%dP zA@2h~X$pH6I^Wi*L<4J4^NuZo9(rPo{0BeRCrODpYT*KNW~?&_!D)z`B!-!|5fd8Y zHF{igDx`r)OoxucY4#puubUMbPeO~ISGxusbj;0LOIdlAm{+|eBDV_Z5~Wo%Z3Nag zf|GtiY%GmSr3>jQ1?KTPR>u6*e;KlWO-?FNrs4}25#-RsJ#U$Z?QD(AeJ@?cwXf3A zuAl1^41#=wrS7-#D9~byBn1|h-a<}zW>+n0SSpq$uwnc@bq*|vs`4)BR`=)9EN=%~ z5d(qRxLP?mZ9N;#zZ>;k*@dU289rAxAvCANw07zytGuqNY(g>sSsMPprpOO=rHUUw z&FdlM#tVr6dC07q&0M}lphEnRC{C8INRUFl$ycci4;Y9u1+;;XzIiTNBW#5{*0ATgaN8-4CazFt@W!wwfM)$jnm_kp%Y?wAv zzi=YR2G4-Y$hRDjUDn_MR#{Q~!XH&z8Jr#;{XQ=J0CI*ht1L_o*;qN>aKOWmLZ9w; zn>l`6zQM)D94$YH?bd8^3MeA_5I9~508WnDUo9u;wetxR9pgf7GApP9L1|@F>BJH- zZ!-frH5BMp?&$xL^&uynp^~s z1)2;_%0#DnERjW~gAIOut}Z%=3O+LkSTbnPm`>ogpJF2vSOfWC1Hg9?3^N3D>`c^W z5E3aOu-a}H35Fq`I_`N_T_=5^^ZfXGg zrV=%`Ca1Nk(uYN!4UvXfi|SZem-`Q8leob;a`Lo@Ak@}Oo2-d4^BkOg*s9XeAAO`h zIr)hKKEzDC85rU)=$@Jw?gPks#6KFhHuA!f&Zx4iwJnGzMVyMnEbkT5JDu;nV}c+@ zecMc7V|^{4cp$&9Op1z~$Q2^?UM^S4Tr8@Ra-YLYnJy~#EuBS$F$W+y#Q2lZy2+tY z+VmV-)od@9j6QBj-7+rZD3LHo+S2K@ic5qh8t`>iQwavW$RTQUbrWYcjjkgVj*KIr ztCq4m+YoVNRn410=HoN)8p4k8K#WWls2DKd4}#Egru=h=W2eC<}{=8c90R zm?9z(Btk#8&jJT(&D=V5qc^@v7fKgu5@PC#I-cxKpkA}W9UoAzW%oQWzCB=}ebfJs`F?;_*sNbO9u_M;Zz2%Ql z&E0L_qUeH9_2XTC*}~Xw%BZ?`St?IMBlv)JYsvjx;y1JzK5JJCOJOb@f{f`lZ*H{N zmA89oTj7dWW#!5Qgjg?%AWcyg-XtcnLpB<;n3fga%w)DIIHJ_wi=K0`H}4GU=N`Xc zqqbH)I8o6Bl{HK%kBJxkCf>2Uv}aeuhp?kblqYdO&n5bH%rVrLpY$i<60T81#&~!= zD*&&G52?wl`+ZuUHv>zpUuPRvNgbc15&5VgJ3*!LhbWW%ji8RPJ>=b?ua4d^@U?m1 zrmNKAIYnhv7rtI{!ACU`k!yoflNDmTFSsl+(S?$$tv1e4bJ@F%UH87{zxj_vLgbx& z8aG0SLX%V%_o|w*U`VCznyLe=5Ea z^Q`?C3ooXob7I(Priv5u3$4$bW7*7a!nJ#U&3fr>iJ}qtkJmoEZG1;GRqlNEL6?`X zdu{uIimmv-dc{fI6~`D%5B7i)TxBDimi)eI(Y0#KLRj|%*3>lqSH;(WihhNi+6?za zTAf551tE`tQyXjs7{?+iWZ0r)MeR7ApXhohSQm3fMV0g95o-d;*H;RtAFQrx!z|7B z=UaTOmJ@~d3sy|5u~k|Q2AI>fbZ+@=hy4py+_FZwycmz&oIXpntK&mbLQ>YBo2?Te zzU7Or=iN3Vp3)gN1x^=^B-uVrX`QKdX4rx*b`2I%ZANqN-+oF>d)M(*<&1_oZI*$4 zsq*sKj$+Dr#9_V3>84pg_<{KtCET`c_m)j6LFcIGzWNu*SSus5`yXw2-Ph4{4&-Sd2?jutA|ug|zQj~e*Ie1yC;m9H=XWNLnb{5- zCuID=*($M}0nwFj`Sj55yIx}{RoA<1%~ybHABCF`QBvaU-kEh%*2|Po7zY>XW>KlT zFur>BAlQ$F;A2^7$_Hm;E-%!+)40>u6EkWbAc!ZYH~c(X{^EQvP6La7$aAy!c8|Wg zQ#qD1N-(DoF2S&r6noa>1^;ci!FPV}`5gbsbCS7Z67poPWwzqZtF_VI1gzXyB5U`9 zYdmH=xX^Q|`@CVUCd;yb)1l2pGjl}1h((e0a%n5R=^biKCkMq$g-ifioWoq&8gckV z6oZ)y-e8|OKoFS+US8QcmaJiP_wA64nJpZM^e!|SP{iQWUy%Y2)Zn1n$c5Vn6co~IsKUE z==1#tu6V{^N#;gp$I$>v9OJ^X9ihilhwD+b?85l#qwx(m=v0EzW2->V@ZOed+(pQ3 z^ZSwA~=D?A6zU!;p^Q2Sc(3N>rw$Ol5~9n-y0cHz=B(Y_vVBlY}A7Bo7%A z66O2$O%TRUe#ys|2Sr(T`*gBm%Zd6F03GezKcr{KRw+Eh-Son|ky#eCxsn(dM#@ff zOq#OepN4A{gpEt!bqF3cIZtY(apXxzcGC{YLET&P2@5THk9?{CqIYsgAHiW^J3rnw zk<-m3!2bC$dFMM8FFU)V!G33dNrT02oca?p(7T%?ersDOB7pYU-D z$IC0rOJ?@2T!wsh;C|nC?_$_ZpMLLyt-=Qq0{c;pMZ!djz2Awpoi)O~4vXjlb(<5y zk9*H*q>g>#xKRL_@9|Fl>o4Rqhw!0Lu!E9GSaA%F#PIxnj?(u-E6nfHZvoEzkN}ac z7eO_#@s-0fE?)FezRdipv{X$u(eBT!c{_>nE4Q~1X1hvFAxwlb8~C^T-%Zy*Fn+^D zQ$cp7#KBr`Hz{st?bj{)dfiive0^ImQ*v!2#&j_1j3`3~oO0{4k#YdQhl-Jt78&icrMAiFIeaoL#Z-Q$C|CK|#VmA>-ncVk+Z}(EK6Tit z=R04nj4O3fM|M7ghv(Hr?=_pYqxbvDX><5&>~>&TJ|KxBV@I8?h+|A-3k0DVyN0B19qxcx=mw&(96p+ z?|_7kVhX7Tjd0oD#6;&yZv)-*F7~2V$0af&6OCosUM^?07XyH^RT`_UwV@GjfaRBu zL*e!MGOKL0w{(JId(|E;9@1{RlJ7nJe1m2jLJ*N^JPC=jZzx=^#tNscQ`!rj=s8N| zuMInnkXvNDlKOV?vUAb7P4TCzs2*f6!>+|?fHor@x^<;WSQ?3QhGIGX9w#VpoMfB4 zHbpt#kS94`2p=ZYzDx5%Yb_akEm*d-sx52F5Mb?M{3iC^gKrECzr+2w_e|cfPH=_Z z)3`k4<&gX3`PuWt+wSo)5&7z|eC5HGzjR#U<=Gyec|;7D-%;l)n%;?hZ`3w!{G)yD zl|zur7`v)@`I_iZSH{syOjTNURS~&~;8p4NEns9qk3FW@YzhO6SWP$Uax9D~8sXMk zH*)P@AO!&C=XomA)#{briAP73`vXfn@de(c1`)Lvc}J%&^(OIUI*Qh##x~IDWV1bj zu~_rSch2gq-%`h888=sHJ+P|VlIWNazh5~d!P)u!(^;flUgLT7L(t{$cUhvt_H1l8 z@EYO}SJQR?d)1}P9hb&T@S=pG*UejU1GGlm_L-_V2Z&AX;Aie~tVosD=MIdf)pYvq z{svB}7f({w;#v^S7J^DIN}eXO1_bJMxt(T?`JsyIlFHcSUg@i!1)+QB1`Hb7T6cOcqb`Nahyh-U zjrXA3`LBX{jlGa8!gf7P71r`v|8bd~zs@OjamPYq%6&C)P}z>KSh`Yrfb=VaX1YC_ zvm!H_ALmPB8=aa}#HeO|pRJ)-1ROPzhgIP~ldZe0r^Vv%Prt~Dg{}Tmi3pTvRYt-u zelcheRAraz4)n`hcmIJ}9!bhKk6ES-pN&mT|4C5(M&5t(6a8i=C?_Bw=Ryp|QDHB7fqJ35g8wJ3Kt>=;(kV`zRvrNe;OaVewO+=)S;>mZT(H z;wA`ch$#7cCZ?bBZcG2vG@@0>{+2fWL+AK^XQyDy6$Z=9*49+xy7oyNbqoWj5XmQxqscs5<$+@M{ORPfND4P8FIxG z$Oc`OsqZMI;wj(9Noq4Ci<0+bAwkUi)mtuz8HVT*2m86Wjwz5KrOHhW4a2Tv$YdN= zYt$0|l6i0gmzI|LU5b#%8UV(nO#nEwLvu&laRWh6e}9qti;kg!{-N|R{M-x;B9z^t z3fHIFD*E}!DovK_opRy=EW08J1ewt}f|VDxpZJfR(`K>R`~goejW<*WU`M_2iQWth zWpNRi)~U1;zKvt}L|`?`R%&K51SOySy#|MlGDb_fk_?-*1cM)y=~Sf53rm^4t@w%| zw^k+=LTO3ClJXcvl&}3rWAOQ#L`W{(d+up2#}Ag`^Wq0x=aX*|p-1r4eJ!t6COBA{ zbx0iCy#JhC_9%JkGu1<}Rpx!Q^eNz{v`(0`O5UPtO)~YOSmt{lsTN5eU-E zNiuK%f3X>;!9QTpfe(NL;99-jQYTH=u4W&UwPcYF>I5*E6 zf;2hNdnm*${kV9bLeWqWP6oB5BRl|x_o`2)cWue=_-)nA3B5l$5%vxHLU}p8pGzib zji!x|ilBOvT-QoYkr+U(A(mJ)x_d3bp$ULcoIIkG(MVO*_EE|Op2vNxDd|8QVxfSa zYC6$00CX&F71Qkx8W{jNU+?L}4=&+vhI#?mp06q_*g)t^8YEy*mK(hKXG?L2H9(ykW=7=(9c^%e0uwR8T`H<0cqn)_%po`Kw?p)Xf1V?Z zL7b$F=)kn`17oyDI^B8|CcsGV_ety42M}`;T3e=U&{UeAogU`$XW%E?=M z@bt$YlXznt1|49F1~M3}P@$it&myZ6M_Pv~z0D>W8d#a_J{s zj8E7>{)MSHCI;rT%dm>2@{^>Yf0~1iv*1uaVBn?Y7oo_3V34Rv<`&HZCjgE#Js95d z@FZ1m8Mm<3AJ;j+VAsUq?6`T8Ca8LV(FYii$wzf9E&Uq#rS$~_aWrF%Rt*cwshrJj zXcxhybwa%GDdd@mi0ms5c6Sz%L7QIV{%O5|7aMcZZ+t`J>A zEWZLA&GJUHZbvw>hFylG6LX=@7horLMHJS zmeCodm^d>IzB6g)pi#rMarkSUI6op=US77GtI9l~13}E@zsq6->w5{kbK!>unqmDYlfx2wJcPu|5V2RZ><<329Iyg z-N*kDwNULaW$s{$N3K-4Ou4N8v?s#M22F5v^5AM6j%nh1$N;PV z^|ed%h7%u&2o>EGYJg!BXDNOW>AZA1Cnm|9idXfrVB0iCs4G2D0nNCYn zbGscB_%i@{_UNcwm?+zdN>0Bd^e-4K$wk0OFf^E713SLjnH-0Hf&W{HzjR!v#*-zX4X%z?z{9h?{n|-ShB> z2BX=2k;>vZy71G$0nXh<#f0zn=AD+7?#r&N3!ZF|NbZz(+s1*tYRjMfHjY+WS*D+y zbA&@5l~MjyvG76#>JV4VKJ8fqVESBZ41FB5?m`COW4_WKDdwSw}W=QODGTr+8qW;HDY>)ifcaKP2Ku1vVxl847)-P4nWB3nveQ zX6|pzBjo5gSg3I_>Nrg6ztc9usq0%eXUMJPdp1O^M)Yn-SLJ*mD?2Hk#@FSxSPf}y z`GaXwx6kz$m$!HBUjK<6&)f{=Z{D$!g{S(LE`3e_{1TKv3a>J{1MGV6Uv&34qdZORi zi&?O7hG}gz>yNow3{Qg7zMZ+gVfQWQa=uklSk8qLj*l+eI1Va%vA@<>^nX`*?7rf3 zJ6tuF-R*R}8I`5wI*_?i?Y1N4_F@{c(#nUdhWQd_s-<&uJQEh<-CKyo|L5*d^|4-U zrNwS_EH%N`Q-4SP&wU&R3F-cQ`Hv7s-R3QhqbE#VKi}v4i^Y8m`6{dn+N==R%-@P+ zcyN9TpQv2t5EsA{^5=3$2a(j6);=e(9@D+JA_~}IFu*4#mBPBw;UGg&q8+^8af1Nw z=v3!{Dy2xmN*2x-_r{A;{{AtXLtdXtU++ud)Pon*mnqa#9AWnl$&7T~534l3O;S$F zzNAL>~cjjqpu@e2|Apb?f>EQOv7i}xD6rIoaZ1l|Wj>_5B_w*)&-s$@uT=Z-; z3^18nJTp^R!e1dwD*MOh=PIxK!T!lX%v{&ez0mJXD?W-%uNRu1**21kmrror)yq0x zH|O8Y*A=fll!twJxnD!FeJa&H8+DyLcsYA?_Nx}=y^0tQe&_4^qv@r}*Hmj^Uv1^_ z>8l|eKBMfD9IOAb=9-N_$0(=%ee|a`wx0xO)r_w7Pr`*_FrwTV zkHfTy;^5H!lE&M2nL@VwC;Rcwx9yWN@Zj9_%x1@`ok;hm1sc36fx_E#9d~))o5l9G zs;TNH(}OF;Z5hYiS6AsP$tJL1pNXmyyoHxDr+&Sz)YO)&u$qqSWM73kz`JF(Fe?$Mn*&t5L_eglx|(dm%b zcFw}_XJ?$wa=TgLm;Rd<)Y4;`9v0VC0r#6<( z0eadRYSiAexPc#|9TE+9vtbo-8hIMr3Q05#Sa^$MPqn5|hA34Z?>)xmc-_lG#_abF znpqC|tLy}q;L073v+33FHoxs@&E@WB;#H{bL@`jS?G(E@iy7Jp^YXov-JcvlDP@I^ zbN0tHGiFyBI-7`f39&2Ty9?Ks+)r&62u15bvIqTH5#3#X96N6!;1ZL?8MT}{kpO_r zL3IZJzRCLDWxuS9tplW!Pq)l}-V;wj{v=Rvw-^@)-@g_ZsQnPX36N4E`F!heIb9K6 zyzZxIrg=4C9tt0gkI6MLgHf-Yv1;sp;emSf^G)&(#d0Ur7>y8G2G{AY3Qz+u%bDLQ z@#n7Rb?;rk_s_iQbqZ>2|&4 zyU;k08Vj3yK6)I(=*%UWq49%)Oo5%p5Wz9wX$T(p=P$j91TkO3MHVNrI38c8jr#S2 zbiuetj@|tZDwc#y!))SF$8mKcRrQRSLCQ{qFS8n!Whp5T7JLj%Bf|jyoRYX5oXJZc z)OWPimS`3?Lnd{if!j0jveAFL)cjDLso;TFzGBopuXzX4HP?(6_|_=@xKo!J&WNe1 zt`iH_-RX?}xrEt)aS85@NcF?$w%0AK6Y6Jl01&NqMc>5Rj0`Wkq{$lgWI@WZW1d8! zk6;IfVoH2Gv~k+}p6g$I@*S(ZaA8K#x^(^N#rgK>La=R8p-(t9d1cYP5-DqpfN$SS zEo0c%Et1ypYG2{qi;`3{Wq;Pv@~rEh6R*1??zxvmBw-;9r^m()`&rpnC-sCD?L$YS z;b%0BuK>_MnZ>YG;f%W0YK6%gXE^|X2L%RZQ-Cxpplj#?a!})M;o$x<$=la0H6YvDQT2 zjDE8>H&T4?;R%476E^qgW~}y7I}c%@{yl{3;4GxwzI4V3APdsS2M624v>e~c7K^}Wo-9VZYWwuS~PgEe|$Os0CKIa{`dnUH zvhI|gzu40xbz1DW()+XtVttNsQTy|J^D;Js*X4N5j1#gF{e1i6`{#Mc=hx_>hUOWn zEC#UIeS06r-~X~-{pYD%*Nt3H^WGH4mrmB68FQPL?PFK_!{!U`*?BfsWL^Hk3Wxc^GcG)-}y0-FJWreKqsa24~>TuKDW-@a++Nc zDNTX>7LSbx*UcSSKihDSHjUhmh?x&L{Oyy1v(nv|Njo)1LE{?Dg$^v_BLOt3!8*fa z>r6+qx`#&Az_E(P>p{Gx4uVYi53sU!6ucQJG;?Z;c@Q&e(sz&nlgF(lL-JdvmzO(T zju!5!U~|ttJIMIy<7bbBEMstR{{vP3_)A%cLNfhDQ)9Etm$jqAl&n}fJTgKh<*;rI zlGgdJDu&hO8Wl3*C#by)^@)=tDH$0Vd8~p=5=F@sN|m7)bq(qbW2Y6Jy|6=&(R==% zyTkueN7-xezM!GGxt<{cngPLO3lmk;){fLe^4kWe`+GZdnfvXf3l$3$*IZ%qg3Nd6 z5ju6N|K)9uzxnTwH$*B9s=*Y9lH0dxzX^l)d~2rq@5oz$^uH=FW?|5u2>zD<14^q{ zTGGyl{ZNno59A$zEZ3LI>o?&4N%3FEJM_OGZ>*mT6#rowBaiSwS8sx`Gf3Xnnnyy< z_J(P&H7YE41wG)ZS)$FPyXs7~J`%wJ8(HuV3$KYk$R$+? z8MCVLhpVS5DlFi~FnGB=TbJ>BMFFD2z3dx00w%cd#Ln+)*3@34hgb7Q>Cqc%%Osr;>e7YU-xZVsFPr6iEl5}I9=KVLPlO1p|64m5 z0B2k^E{wVaJ@}KVnMrdIG&A8*Eh2$8F|Cs}=HL`^W-GX?TDUtJPJ-8#$CH|M=d&^6 zm=~>S7M_~#^TAg7Tq!pKR73a+L&gW@S#V$8$L}F(S|T-NO;;KT3s^=jKKoyLiT;X3AYeK8HYWFvWg1`UU4NykA^8o#JP`$^ zJiM-@zF|Jz-Kr$P9MOb;ewEF8u3IAK5@Yw6bfl&yjy=}uS_M@7P<<(keeRvdvEkpZ z4o+Q1e3GtvNrKL?wpZ0WO^)t|L*+?D?cw2gQHm{_-ByD902%qID4NqFf#IAlJZSGr@_Jl@Nghn>7~s4&HnNmuks@cqcUeC~KV|bjfAb*F>=U zToCGVSDHwSI##Kvfmfp9_z>Y}gvyUJ424g%*S=YMKAn`OC(R}IVp&xUOS`3n&C1AY ztK75sm89!zyD5>4gg$5sIZmmi4Hg&q2o)tlgFf*6OgK_A#*=Hl5?_uzYy(Tb()^KP zP&egpM;*q9VB71%7FgVUH5I=9<$ZG4I0H02#|6>9n&-@K2VTc0Se^;k>^B2AiFT(>t=b*2z;lNpIjl%Ia#c;iEtg}Alk@VS8 zm!bek1I-hYXuz+}1qM8x$#5x!^5HrwN<_cGUkit z*1#mAo}hN(%MedLrop!w40(=8XM(l!0V_HM*S5ZMcBoJt)v7uHS%K~0Sq8{V78CeN z%rZ~XX2QRWb4UOJK7R(dmf(Oc2~r0(2_e9g-F;~3rkOM>E>cUp0PdS$=yeDZ8z(OX zoDhKqhkJFUDZobS9X5Yk&R*zFpGb86v1wv3Q3^3A?`aJN!aiA*GCBnSMsh9}f&9{{ zH~^SW41<|w5(e>@s)NNw{8xF*jGDc57GKB!tqV_?5oYFn5Lb+z2@CB$G15Uwh#s}d zw&9Ee;HOTBT~beUnX#-9wdaqU1P$Ujx?;|4LSYt*9rMJb`QU7K?lcrAhqAG+t!220 z&LtP=IpcJw9xk+L!+-N}n;lOuYYcnlh+k6ArdGnuW(dF4bZ1_);U!k=er6on9?%Po zv%3hyS`VB3N?C3p7e=coJ)x*U7TDMn{80t>ZhkHd(z&cCB4O0=$E*pL2eVWHQ$5U4 zB1=_rN)c6iGVP!vm=IkYNmDY(%HeRi-Bo-cmB*iYc?Yy(9RKvRCiJ-u03d1$aH-{k zU3Z@mG~-`c*&>6Xk=wOVpPMu2+WEm@vlDJ`c{DP=nx?gISr;z{$K zw24!oqrU2*3NYN}B`0bAVbtaB7A=T-h)s$GfMtqMGoJ(i1Ymr~hDw)O6>Ssbc&d3p)ek~;_TD`CAz6naf_I^HeGA2vZoml%tj z$RI=So}y=`P?Tlhzcq)Kx}(sf+s#vNE*rr{AW~ejwrv_U^Fs4)jeAFo63?Ir7_eSo zCntE@=rsTjE+~#UA@Z_&JEC5?+%UZ8#A(O>w$*#c{?XwB4M>VB<}r8KWqUS-pXg0h zYDF0V9@KFkbDW?>X3g4g&x!%SFiNzyG4q6Q#AA*k@=;Urbjrg#Ba%ZExX#T#v^PBM zwyY@!&jiLU1n>+5T|)3S2@!NPgK$%*7I#wk6OJ8$3KnGniVWoal_NYhslF=Hk#u$- zYaH7+UGAy)c%L0T@xX%4U{)#HA{rBH(gZoHaQ4InK53<$4{MaVKm9JV z@A4u$7}%dv(~|QlJ7dnDtKd3T-+Sj7SF@XbP@Xzw@ly3WF6;eDZQp&`{N_gq;m27*h&=nT-y)OC<0m;&Pm*6muA{oZL= zFn_JzHC;tUg~(vthVXlw^2X{qk``gMgxn=1D*>Oq)wOLK(k7O4D=KnULayI~R~`|q;~H8baNudn33nL4kJQmTb<&L6;!J_dEcZ@aKxpcv9M>;jB4%{e zOeMZ3O_r|S_0fDbq2bUh%J_41YAv5-rVTwSMr?y?lizE3nSOMtJWqj_9X6z*%{ z$~3m{BI=ZADq0U#Z_Ukx!Iyj2CVFpH{LRIxnGoc@qQkJ}G_AN%WXwHmm&Dg;YPWiJ zgDDot%^qD5DN>gn2R1z}`>y-)9Qrc_#!Sl94Uu4&lsz}^TF(yz==ca z*MC|7ShfM<8UN-1LJbeQ{piMxJ3 zfftSV{ufWIX@vUSJv}|!+uKmT8y9M2d>%53`wk@@(5v*@^JPYphTc^n%AKOR5|5R+;nX? zmDmHFk6)u6=sU_W~K(G00WUWpfwr9C_U| z(I&5!r%0<@m{>$X(f#;$?jMtpiF=mIG3L?4V1!yFY=*lmmm-m>E#NH%a4R0L4x?h; z+OzOW+~naDl!hOzI)d>(&%+A*H0a?A_$_N47u`z1v0a}|oQvZ5!xIhX6_;kDNyPNl zxMD^A!XTX$dZRQ7Pt43`Rt4w&vVx5UU0bfUQj31Fky8>Z_{w_TOEy^W>JOpf#-B#T zI=EYxzU}53ms48YL$eim4!z{~yHV%dwq9pT>0n?Kf zBod*SyP4>BHl&)G*2V1$9*BOFIQ(2%U+H_pxS;5za>OGX^%4ziQq&=7GP^ke`0IJf zK8f-MzJ7Lf!itGUO#_Se(DDmB*{DRA210Iwa zIW;D}EYGRf3Kx^?vdr_N+@RqIfGA3zs%;4u<~COf6(mF~Tt5y>S-B;*nOIJWQ&WnT zvT0?i)+BpKT)l5?`rH66*QW(QLmZl^ok{2cNyyD-U$u)1L;k{s3W*&V00t zvyNP7m;dNtee^p~iYO{H*D_M^(Peq}3_JbChf*kY?@~o&G&II zwU@?#0(ZO2OHp2~rn!b=|DQ`0O81A{3q0%Gtr?_gUBlYz*{JQ^PxJx)N{mgTG{d$> zmzuf_HoKKir1+t5m&+BEFKpkj+g}Wdbzbwk++Y7fnllC+Iown@<2N6euR?j*9?TID zzaXyjkF3LQXJf?L=QnKL1mNm)ouaw!)%)zh50*BI(R%W29(DPrdJQ@{y58U;-kR3f z&i(z=;}Gb8FHaP7_>)A3?KD4yziJ4?#m-?Bd@X|jwH<+Bw>m;hh@g$Mbl9A6mN5By z)~>spTrNfZ>PokUyEPS_?8SHQr)4hoQP201bv?KCezn&3m@zob_6v8GUm}=ak6Q|= zjcSu+JzSTMDJqSrUaCVqQz%=qzK&U|rK9eEqx9{(II>OUG7Te@?YMsLoCP|T5#W7o z(K(USY~y?8!0Jb(0 zq*4#QNj}n5Vqxjywjr;3YO*LB$WfukP7Bx6wPdC11Hj1|j56OUl<2W7GJ{mqN74d| zz5m#9tEU;HJ#AqkMSSDjRrxT@SKpnpke48rKgzLxr{)sceS0cA8jM&s(DkRHxVdV? z=d>;bAI<0SfIZGsR{_sR){xt5;0H8_fc1#m_&xsI1kQRVK!RxrL*fL+Z``*UbWEik z^N*zN*sDA$4ZF4Dg7^)WhprzzAK`%5DsRJNF%)1AoA*&aUJ?nvg$6hcTRg#(@E;r^ z)=g{77=?ceK=HY+Z$kY5r-kr&mK!|0|8P-=JePSL=TKO5T=G=??wWqfvmNHR*p0k0 zIiD%P3RXl9&rXR9pXuh5t%)krsWvw0M2>E}t2qK5!u_*C1@@4nq}RkEM;~nCHibgw z4;`lBz??-BE{%fzLrp$)(>eqkF15^f#H!ymb@&Iw{ZQs)`}WH7dm>`yihd(qM#yyj zkDwf9uKw|1$w*9(OqWiwb+2j&UmwKWq<%L7b8MLyKJ;4DF& z+-($tX>_~@0C}ovb$PXg)pkQqV5avUZsKdn4ok#)feM+ClF%r ziobd{i-l;J@De++6O^-EGGUUN%Y{qCz4Cvnze!8pLpwamnD&K~G9?rAS!U@KD?q0I z7zTz1pbanM^5N|?NU>OIha4H#RYVoiontre!(_AGlib4IO2)%_zTS@u{_F(7)C8{i z9|*p#+uCli@tv207wbdsvXBk!a_mBa0tM-q(~EhdM{t%JYrXb<5&|L{pU{8hD%+gt8n4C9>56zWjU^ zM6Fh9>3VRcG8S} zlS_-?D)*sV$bZKzYsWnc6_L<-6et~+Ik3su#e4VeWdx6x`SHwmvEK0L8=k4p6Z1*; z&2x8_0G_n3xXyK_8o}C9wY=BQjYbtS?T<}ktgiou1cSi_e;glHOXWFahfugGxR2Fe zp9(J^!CKHOVRdRh^bmLo$MVv1Rtsy1!GVJt)b zpW)A1Wc7r(WiS$bqpD}Gs^xpvbJ*AhZOih~cv!zlBksi`&MiV+;EM@IlIDka%X0Bh zBOK0caA9LxQO+ni9EeA)(Z)A2cUhs)q`z3DUPt73ER<3GlT|)Low0veV1aCGY*6Y$ zV1nDNjmU@#HMZ27Z8Bx39b2`Jj*ceGoYv7nnhtgK^%v*o&_lUwmUzw?urJkr5S)iW zBW52d-j|F=Gw9zNoc~eh{NLS?*U0_~ z&5$J-Cr{ouO8%W~^Kue^n4v&>q1#s;ywOT@iFAqKlmmB6rih~X6UWCk)pPU-15p0C z(@q>XPX1Sn?B7FZLy=_AFtSvb=(}0qLl*dnJcU>ZmuLGlIG;jT?1yfD?KiJkc9eG|?1{-` z*kx}rd$!kDj6o1~l#`tlHZt-^9gwpc{Su}RC8{!FdZP?o+F~8hA|6qi#WBrMvWI`r zpdLB;CHSeXwF;K3!3pZH&PGn5hP6bMc}oafA>F7X9Y}v1l8x$F^MN_xH(+2^fu{8) z9X4LS<8I)!s6wY=zSZ+{^~ndL7xFeex1&L{kZyv&>Nt0zF1Ei9cXz}XO;$m% zQK5Q$86CBd9U4l012yNSD3;^(;bF$NuR$LDwTNl~LHbA0MaV$lzU(%2-Qe1_haiUF z9@o2y9e{MRZXOn42K{aIP!)Q6`dzN70gPuOV@sdc@X=LUMYx@HqaY|iortIipMbN2 zK2Am#gpSzS%A*Y}^ukJ%7sb%hvf`IojohO++v*Oz78pwx7muK@Fh=lRYM!ACqfaS!(T2@3iDN2;Pqr z8Ssh?_<}SRoA>`w*Zd#drVYy26xR9jJ&%0-j(O5%d~4jRO}V9pNJM!acv1ww&n^~D zQv^ygv{sIvW6i2nk(=@0(y+1a^MB zsB!Wj_RRNMKatqNOh$Be;Ej)l_gJu5EalZ4e+(Otifn(6eP4$YI z+YqqZ%G=*jgJoAfXjm?-e42MMwo^Q~US?7E^(?SVqKE9(LVNd4uk@37_1$`(o)0>{ z+LO%x8vgUA-KOmYl-{|GSAeYLTT=&p-N)at0r}mD4a|9I6>)i^&3^ToKc5sWtk9r| zbZD|=T}UO&W~HYMyuK-6*0p9f6HgteV*)pyY0~O8J?RviGPojwovCqxPtys!1`79JhDqof0`Nqx2l2ul%8_N^6Aa(KajVkO z*|?(dsH}d@_PUPt9A8t))H}LeL(D6jacL`udV7+JwTAU#t>nMM!vD|jru+E1De7}5 zmm5Qsggwsw%VhPpMp8v-w8bbkyeIus{xNrS9CWJpylvi34bQn}k`I znM!PL-0^Bjq8MSXN{8rT)@C?Ao5_P|^Pz$w7zyea7XZ+FkL;fA;{m<)E)wSyUAU2YdO&9|`-A0b( z#O~+3ouea<^R@tzl^OVyTR=axWlmFPb_7nGgeM)d~f{hrO}q+ z6#m{4j&e(3ik=?k9{j~DPYCk?nb`QjdHcN3liT3!*xWCE_#~aJ6N*ytB?UQvoJeN7 z_BhS56|r?f-RtQqldRNdfSIcOh1w?eKqeu)5N>5{Smsz%Yz|bkeTDSgqQx1K9pUxW z6ABw$cRf*L5yN(CiJWInC=JJ2LYth*59_MkO@$OVcu48-)w4D;9J)h2H@Tg78}?u@ z*tgWYzXm^N2t%D6y)~*KD~+*ImC&z5uWEi>Ss~kGMH1|!ke9PNmsBmX)OOs%8bF%t z@7*u1@3rY$9?n##Y2}gt&G{k2gf=}_TBXLUnF#eb`J)3*b)@tOSD>=6O@K}9Li#Zk zeW2K5>X6KWda0t_qH3~5j#vvU#k79og2aB;?Ng)SEB22w-1$z#@$=OG^OOz-z@ zW|f*DV-p4A7X8q-HeneyV+2Z?PQV9Ko{A`tZ^wL7M$hr+6$Z#(U)*(c_WpRHV`l5| zNb_^Tt7rFTFCTfh1JG2Fm!5tlV;xBJbg{JlIojg%6 z=6lw%nl57EY0FWatprQ6s5kSl12G#R&z&m^l%E}CrgPzEk-moh#eSgCC^{F zWg`jXIp0#uAGoG)!#=C`ENU2(0~z)G16LYO{?5Fkex) zn7C*i@W8jBcPuBrQ046-qOTGFWWB%}^VPwiu*kZ5Ms+|NRD`zSuebKoXSqSseXyp| zh4(|1%`b7+Uy(+nd-HeTO;63Gy8O0uF_zFwIecy}EB1`Q@k}t%Cd2su(ao-1+lJZX zxzEm@XZ|=NIjyEKxfkc3BOO|bT@YW}nsUn4`;->XpBROvxj9XA-D#v$GEnuq4Wfml zp`WILxVcTp++;JZ1Aqpz|1h20$qTwy=(6*<}xHJ(|w>C?sR6(ZWh-TlixE{B^_0NVi1%j_T=H35p>e!^*4X!ETslTCfbu63I z=g}vV6s9N%KBIOJ3D^bnX#=EslE@Vg%Ex$6B8De8oHjxtNx~No!QCNjvMEAE_EX92 zX#tDLoD>QJu6nIPw^c>?oic*{&bvot5lnK+8XBzC+L2_g(Gw1b!)CLY%;%3HN13gz zifU4#i6c&XC?P>X&3=X!ed}vY9WstGR4MRB9UM^w&bYY-gF{9bxsp$J|I$G>wo&1u zE6uJrIHj)t=GtbE3JhEnlcD8&c4Wcl7;z&zPl9`*s;;hYVva?q5C{YxYTXH?_>lz! zxB-QrJexWLLH|lLHc$LehSpI^VUJ${13Giyn3aP9u_!*ktHhn2g&*B}Q4@6g*X6V# z#^XT*_10%SU8w_y6ZM@3H50xuRDdG@-!A}SGzcsKf;~6s$u(Kns~m2ryfGV``~wcy LAF`uid@ue1$?cJO literal 0 HcmV?d00001 diff --git a/performance/graphite-setup.yaml b/performance/graphite-setup.yaml new file mode 100644 index 00000000000..be6a55b7919 --- /dev/null +++ b/performance/graphite-setup.yaml @@ -0,0 +1,102 @@ +dse_version: 6.0.11 +# Driver branch to use +driver_oss_branch: 4.x +# Driver dse branch to use +driver_dse_branch: 4.x +# Driver version identifier (used as part of graphite prefix) +driver_version: 4.8.0 +# Driver examples branch to use +driver_examples_branch: java-driver-4.x +# How long to run test for +duration: 2d + +--- + +ensemble: + observer: + node.count: 1 + provisioner: + name: ctool + properties: + mark_for_reuse: true + cloud.provider: openstack + cloud.tenant: performance + cloud.instance.type: ms1.small + configuration_manager: + - name: ctool_monitoring + properties: + graphite.create_server: true + server: + node.count: 3 + provisioner: + name: ctool + properties: + mark_for_reuse: true + cloud.provider: openstack + cloud.tenant: performance + cloud.instance.type: ms1.small + configuration_manager: + - name: ctool + properties: + product.type: dse + product.install.type: tarball + product.version: {{dse_version}} + cassandra.yaml: + hinted_handoff_enabled: false + datacenters: + datacenter1: + size: 3 + workload: cassandra + client: + node.count: 1 + provisioner: + name: ctool + properties: + mark_for_reuse: true + cloud.provider: openstack + cloud.tenant: performance + cloud.instance.type: ms1.small + configuration_manager: + - name: ctool + properties: + java.version: openjdk8 + install.maven: true + - name: java_driver + properties: + oss.git.repository: git@github.com:datastax/java-driver.git + oss.git.branch: {{driver_oss_branch}} + dse.git.branch: {{driver_dse_branch}} + type: FOUR_X_OSS + - name: java_driver_duration_test + properties: + git.branch: {{driver_examples_branch}} +workload: + phases: + - upload-dashboards-to-grafana: + module: bash + properties: + script: | + echo "BASH SCRIPT FALLOUT HOST IS..." + echo "${FALLOUT_OBSERVER_NODE0_GRAPHITE_HOST}" + + git clone git@github.com:riptano/testeng-devtools.git ${FALLOUT_SCRATCH_DIR}/dashboard + curl --user admin:admin -d "@${FALLOUT_SCRATCH_DIR}/dashboard/duration-tests/java/grafana/aggregate.json" -X POST -H "Content-Type: application/json" http://${FALLOUT_OBSERVER_NODE0_MONITORING_GRAPHITE_HOST}:3000/api/dashboards/db/ + curl --user admin:admin -d "@${FALLOUT_SCRATCH_DIR}/dashboard/duration-tests/java/grafana/aggregate4.json" -X POST -H "Content-Type: application/json" http://${FALLOUT_OBSERVER_NODE0_MONITORING_GRAPHITE_HOST}:3000/api/dashboards/db/ + curl --user admin:admin -d "@${FALLOUT_SCRATCH_DIR}/dashboard/duration-tests/java/grafana/focus.json" -X POST -H "Content-Type: application/json" http://${FALLOUT_OBSERVER_NODE0_MONITORING_GRAPHITE_HOST}:3000/api/dashboards/db/ + curl --user admin:admin -d "@${FALLOUT_SCRATCH_DIR}/dashboard/duration-tests/java/grafana/focus4.json" -X POST -H "Content-Type: application/json" http://${FALLOUT_OBSERVER_NODE0_MONITORING_GRAPHITE_HOST}:3000/api/dashboards/db/ + target.group: client + - run-endurance: + module: java_driver_duration_test + properties: + duration: {{duration}} + is.four: true + graphite.prefix: endurance-test-java-{{driver_version}}-DSE-{{dse_version}} + - kill_nodes: + module: killnode_rhino + properties: + target.strategy: whitelist + target.number_of_nodes: 1 + target.selector: "*:*" + repeat.delay: 120 + repeat.iterations: 0 + graceful: true diff --git a/performance/metrics-dashboards.png b/performance/metrics-dashboards.png new file mode 100644 index 0000000000000000000000000000000000000000..6ffb85b9f180c63ee80376283250828ff27c790e GIT binary patch literal 80419 zcmd?RcUV*J)-4Kz-jUuBREqS_q<2)Li!|va^q^EBbfgz6(yIbW7Z4N(AWfPyDbh)# z*H8llI4k(u``h>2bH8)$bN{?NPsk!!b*}fFbBr;^B2Hgdi-MGe6bA=~;@(|#LmV6e zI$*him=O4_l@k6K2M1TpMNLiro|+n${!^%vi@PHZ4&O_A8=Gr)1^Is1+S=Is7`V+( z`qbYrHa5}FCiHvjPp{EhkPX3CUB!&y_g8@p_o!_?3@nxh!}|?#Z*qQPeTw%cv#^KGqqaAeMRS zqyNg>0Iwi>pG+V@d=z}2jxGH9n@DGQIF+zwl4nQ^CP{UdLA3z zHj)t@z6Mqmy#6NK){Ptls+3ZF;HC(Qw7CUpx4nqyC{S+zF|}0F6*H?L!d@1~hKC<^ zUtZ=qGBZv3i-}@{PA@NyXD%-LVYm7F4tf=hLj?SF z4R{6Q;r(lE0=hiVzm{<-fbVcrjMVPk1Ky47pE^2vdOJgXj07K?10Rrh-8J*Z!C|!)Xq^jz{3l>9~}7rSzyt_(Z`l6z{B0sTQ)#} z`)UnYU>W)I^^F?jlCV8szE(~EqxUKE9(Dj{GVU` z&l_GH)9gQvDJ}7zhy0Ik{@GGq1bgZK5sJTYzWNjhv?8gz$iHuzBB}Y@E$p3Sb5YlO z2)qNM?CQq^_{|5ru)h>6>p7!fIk5lE z+-$Jie$dX`$BOm!PwQZBh^t(`xzF%#J-VLiAyu+x>o2u9h2r0UwWCHpzqhi5(GNvD zBwp#y70lQ&3>xED9waw!o^Y<3=pS_EPrFIVaw0e%epDd2N0WjGmp#BYDOs$+lX)Zc z1Cu|>_DqKB3%Im6h5N`QaL6O8h*qoHGJ5*m(n@;|EKzTv414WA{XfiL=hru8j zhwK3TC-#AcdEoqRzm($j<1{nwwmoJJGdN+2F1^$Ts`R@GXwon=*=h^vj*;EvQM>s!?!|926S{AFdyeQ5M z@rySnuQh5cmvO*ObOX(}D+XP|A3tb>=0Yoa9%L$m|Lta#Yzydt93&gNCuB|RDRU#oZ@x^XQ22G-LPw$w+~;>M-zXY zO8yg;$I#nvYha{Z(=IrOc`^id)j2;RoM3V$y%sW+hw2QwmtW^G`9p!*@7OwH zY5(w=2%ltC;&GP9Qx+V*rsCpz__ZMVm!hnW5xn2+7gUNeau*f8S{FRx$z4j)^f0mR zcf{;%y>aP+-n3Q=kCtF_VeUP2MwC9joTbL4s3LsRS&HT}dbaFe$ef~seuO`Lo`vFH z&h)jTM0!k0#6%u~l?v}{dMT^*G%;(qQP*nyezvT~FyQyG*_CP^RQ@-_u-r_XlK5v3lP8ZL&r%M8yJ@pmq^@fh2jJJ9(WSTxw^V(GMeE_JUwogU7)+eMH6?5V)|dZGrY{dx z9NrAj(|b+)%eQ19X_pCVI))+NfhpYzcEmJJ+36-vQ2(q(!#|CQ^nu}-o`Zt9zD18Y z&d=o%9(3b%ahgCbyj;3ACee>vnxN%Pxy(Y!8QwcyYQ=dRM@ zu`?G?f8~3lhP#f@jP3j*D)ulHQqr`+fW$pJgR%vB36z=j5t;VsNLuW3!}l~jH&0*n zq=X$MBVG#Gg=kKb84OXQ^%@#K3^C8&YY0tjL5(Duj6>?7-pcahGyLphr<}ZHnJA<)pB`7 z;yzuAJ#TGZd+G~~{0SScok)QSUMUe*t zM!a|<)8mcq%=`x3wie~jH*@kQr^LVM7(mq(l2}x zRu1dC1SyHuL)*Etdg+5`&NRwY=EnrNfz6Wy0%`pVnTrNG=?Ba|vN zd`1~3ju|LcsCj+}PcF(lVTYP>{@YpAM%y}>b-z7KYIsw)519OF`>5CLjCan>`0*U& z-APN&f@^^eX{A-(D`yx1y9^B6J4|Rf)78AFhgDIu=S6IyPoN(>B(}4OlEty4W z2W@zZZlabEoKF`a{f3zTR$K%ekiC?|+FsS*P<}ss@Ym?e5N{$cBoHmiJie*`pQRd@ z_P0;%FyoJdn7cK~NuX*xvIjKg{i`jq8hJ&>Iz(6iut|7Bvb&?D)PCd?ZmdT#J)&NiVh>h%&&vC;-x zI34uy*F@>%r+EI863Fz!AUJq<_XZ)0d~F=xzX$rh z?S}If8^+EfQKcbwA#LDXKjcM0O`Ih5d1Jq9)0{I8B-fEqO=Lt!$o1b64~6;U>c2rV zx89tK=3S<`3&Ht9*q~DMH8K+|sC3_aIwEN^Z*n%&H$F`BVPVhP^xR=X(I;q1iVntp zoBTS)++C2peg-Ov5H_0LeT_RQZST%gpj_{)#l-?C;E1rk4Re2ro@Guwe7*0Qc)7KH zWOOYY+E^4K!DY%+NmM<_y883E!?UBtm$}BPWJPxPWsPo$eb#qD>aM8UI3qN{{EEZs zuKFDgy-|w6UC_2 z>+-oaaRhC022O<@)2#9gY2~FTbA~J81>>I*AN;RA5LM&RZ|nni7wYjnRS*33zgZi* zzt;W)<&7vrKbg+XlmB+t|30`cOAm;m3;8 zN%@LF%$ka~&+l~s#ooy?b91^$sqK-l9&o5~2F2$ibxt71u|q)Rs)(?O*)O<8X?(3~ zRqF{;7o|q{GN+Hf#;1HXgnq+o&*T$@^EnSdPBg!n+oVdMJ7~u+&Q-g895*S)cFl4? z6F&w84hUXThkt2y3t(VPVt_Hvwff(u*a48PE?pp5S@SrWq5beSOUcdiV~Hdj(Xs#s zMyB~YN_YFOQRw~HsUA+8xTV-v|9${hOs2cu)RVH!UW;!a*+sbvF@Z8Q(bEHT2HM*8 z2L9!DE?sW;>Ouc7P1CLG?g&^18gyaNMSG{R7QMbi7(QD=~VPdX)0x4Gar#Mz5Vwa%yJds`?~^5s)3 z(a91q1uz0#T$oRTFJERSMtmUP;2vG^_iKnmND*`HLSty665LX1Ka#UxZa%qr$xd+t zeEb|ZOe)7b+e?yyuWAj<%T(_wzWY~&?R4i$xLJ~tL+#*gF()7Zjkt+`@0Kxw4o<0S z5bv0o67EA5O1H1s7tyfbQ%;t+B+J!XcbfQg*NddO9_ZqeLkn8XAs6_=#^__6dqFvs zSj6R!(p_Z1OUD^p8yHnEmAB>XjjiKpt6|<@WzGFuC}sBhCxMhS%QeWZiTmcQ5&vsi zE7}wX)O{t>F31UGEo1r1!CiBJ1BI925EfI2hptL!aZtwA^slewYbf)Sd4=1(;UB4)(%(3e<)!FOFNzUO!t-p9q~u z*3UwbbW&HMd(N5fEcT;k+5~@G;WzIvW^nxST-ZJza`o3pL91u$HHFuZi!cdRhpjlp zWvvX>GBMFdHsLhGBM&YmM~Ef97k@mq_@01bS<4Hfz40I>Djuwv^CAN+IDZ#nP=8wn zfLx=zc!|(2@`_b47fvVW?;l}pr86Sn5DG=+$_HRYZ@8&2VZ@H!VNy7|TLhFg_wexG zO(G6b8vcB2!FX;&Lgn>heVjFbpE#cLAf@MaA|EfvZPbwIe(y)S7Zf|}^y3#cfPO;& ziWL*HV`JiOSwYoav#Y_?y@5t!0tQa+xB}29*EsDXL3MX2Pb+44gl68IefQ%GT9K6B z)VW?wY+@H5*ZUMR{=g;>Qzw%urh_*Ycc#zgF4CoKzZm$Y;jCb1&gT0Nn8 zDC**;)vm=}Caqyav3loA7a50FfJmPblAo`1KbAo%1eGx9l4943V_`h5XQBz6vy z%o8;4)JpFKx3CTbByka+mZI~07g^zvvt}EHtpTi31B*82Sd=L@V+2W)E_9}dKqO^D zE*ys4-%XWbF;=hR+O@4Ha=n5sbU`bcRvm2hRRQ2?jZjKad%Lw5PR~24PcS4Je(!ti zcj+k~`=1W2;|5JC0Jjw;~5Bbo~FsVpa@>;@%oi6t4o?glsTXr#N^P?WzqT6$h z^4f*tasUkVem?iw~$K9(1k+ZTt)cIMu1?3D*^*|1j}-svVLaSRI2kp+dDIu+n9&SH)WG!g^zpm8czH`cIXmlP#B))ZDCWwOvR>$UaiTuolN>z#ux-O@Su=VDeGcuT$Pc`N2|iO=S^5${QW>{~%Gc2f`%AJ|nf@ z^)FjLTRT+eabMt-UaGiUUs(6W)lFgPtC8w(ojCDA^Ao%46ZIE%R8u2toJnEh5cse_ zEB&p=M;110^-#jTd%z7S<}JrERx&JO#Y0x#9+>Ss+&FQ^@rNd9=}>q@s^yO{OqM9T zN`10(a1g+gH$3KCqqq#IrDJ*wAp8W87vV2QRa9VY}SM{{2)z)lD_qwA*_N5W#zc(RW(IAd*6>Wxu-UhdyT{*0MX z-q+JBB#tuqndZfx`ESZP-+`2ybo8Q!tsmMkme`0#LZhLm8p?J-Yf5dsBtg6>m8t8_ zOpf=#Ef2sUNS)iipnvM&p)q0T@!1$lsz=snI}T(`oRhy<4p3V%QgYegKh_5C)9@ib zcI>Isb2_pu^yKrv3zxFXXpfnov?^ysu2XL05Fe4OCk^RJ6GPWT$*ASJu*Icf(T6%{ zR5oAcf`4$cm8&)*Wx?w_j)cjLU9y~`w0=hMAKFR!&>E&pp4fF-R(W$^mLLc=R@TM> z(ovypF}Qx4iAHGP08j;6oBtB5A(G3ATY&Q|IbrHMxyU`kK|sE;SI~be^;GKtet)oRCVaMKX8E0*kKn)=f|f+9zzL z*h2hL^MwT0W(%SImDcPI2!aFSpXsnsB8TcP^d>q1z-nBf03g`aev6Cd8XNqBuXFVs zrZeRUtqZ$-i>b;vO6uM3Ty4-=pnSLn(r{DUaJvV=kpfL}75y#*KzYim@h>(!a(w?t ztz*tl9kFBd0UKNmgu+!cb>4DhWMcjG%1J&S*V=d;D_1^cF(@CzlD-Pq z-W;N$P`%hJJ>#MPzrzjSQRzoUom$NPtqVRwk1TUhS(w%!HRQe;z*UVg%yS(IZaM-? z02^v=!)|5wE;v2>k+_;(gVr7LD^%$^5$-PJW*efZjf?d7o^n5Mw}18Q{P_j z#v)Eciujk})a5X3(5*oXu8UNx+#8_M`~7~$?0j>HcDK3Qu8Kd3sn@j4VYKSD!eq`H zY44w3k7Y*<+jei|*Q^2pY{sI)mhVMpbokHMk}#p18|E2XKqmk=Hi=xG<{a|ZY8HnIlLH5c2fp~IR%9z?l9e7ltP)a=@-wk>v2%?Oe{GXZ6=0?XL^G)Gx(Q1B5&{_PJ`C`+63;6+Ot6wY;>Rj=BgTpvzC11hs}K4jCx zo@xTWj6cuvWCr)K(yoQxGtW%@GM{<+FN6{?5Io#1V-uFm#FAu_L|5AR8`Dy)q`VQ> zEW05cx%3{;>b-$1i*&9n(og{=dcQNEx_J9*o|nC7lwLd=jfyV0KzM<8idS@DIs}zN z$au&Xtsxtrp+b?xl#3z&o9Yizkru*yK{}XCJO`>mT()cZl6EXFS*qayhCF%Ah0UE^ z5`P)h9x02I?-TK0)t5Z8qqMZ))ii9@cMnO4Z$|6sQj2?#wCT@J=vERfPmHt$1!q-l z^!zz@yv2F4Pcxqb&WL24b=1{Mk-)P@j>^h!=x?{Wej}Z4YsRmv$YaL!%!_8gXT*PwXj8o6(43u9v z4hjcdYyrBvGd&xtQ;8_--_}j6C()K!3WzBWR8WtMAVLKi!eaJr@KBTfQn8(WcgOkW zdC!|fteDNj+Jg~ixBfw34KxWdP}bT+jZs^km5Gq$cWZ3(_WNdvcX@MJgekPXuj|Dy zUr~QwaEn|LO9r)$QI2yQDjP*n`5ziUq-hi*fd2}=4~5k+QrLm|Y9}47SvVgdo#Rrr9oBTJdLEq z=eShx+pJOxV4O@`ck0niTUN#OTF)?gU1}lMvr1i>lC|K`wkv{J^Gm`~4PKMk%o!?6 zK>@^{f#TgdBt(yjACJX`LZ;L)MEaYU?cIQZ-9a?%{;w(Wlm$6j=1?SnTCY;Htt3&} z@aPh;Pdh6T5&(ozUa1wMTyw~$!R#L@Bm@YE|DXT~nFdy+-}yh%MGDeo(PUw!J`Y5tJrcms+<2DHKMi1z(ta z0@Pjw1!;{uf$KO?EQ;+$UO=Lbf(0swwxm)T(EByzJb+{5Id<8#M3>O1R;2GN<{TB^ z`)LpE#{qgZF>4#f(}y0eM)ow+BWXwf_7~wpT9_&~!sO2fJRt8nVf>b3mjCIpcoBxM zr#M0+%2h_fo{FYn=YQtbH5dyVq*S8|RQUqUW_G&4qLjmlA*}qB6qDqX$0TpxBEiS@ zq9<=GRf@giXw09h_LGFROF}Q8qUg(i$=Q?Nj4F8c!i-P2nrPF7cK**-T>Bp+K;zG; z(Om8aeKC7dY-znarXfg;j&DI>O%cEmVYPSkv4Kt{eZ)mG6t*goj4}&(C~^EikBSFx zCaCN+Hq4?jbCGzud7=Z14*~d#WY6oep?u({-~dSVbg?+dw>qB?g@SVz5mAfx=zi^M zSVB4`F0DGq4F7E6mBl&HwiKCL z%Wo3j#R?-K*oNt#SVHQ;>_>?vGrVXN+h%c>t%GXZ)&N<+m>u}^wr z4DTpx<_Pb0&>NW>SY;m35Nw8D_FgGHkj8nsaVArf3{DR98s?X50wi2ngVjjqt-3Dh zPDPfxi8)!GG&s@pDeiJexfPI>$UQ0+4BjxW2rmGCtM`(d8xtkF@08_Uta0U&m5;bmjXo-;!~!503dg z7v3c_LpW}&W_R{uw82&zD0U$3?N%lhq?;>N{U%VL&E}ui?3^Tz9NVOMmMqZczYL*Y z9>=4|J1mirWv-pVqiqOI4qecKft7|ny{uyv0(!0nMtu+f6Kq!e9$i(go2NCN3hedH z)n_XsYiKbe+m{2-63#6Oa@JiWNFwoJf-AWLoZyqUdPDg2VJH32k?X(+eEb$Gu#}^O zDEOwP6o#TCnG7V~K|pv(L?NN6Cd3SBZHDu^mgOQ=oRWJ86O1^Oy+Nqd61VlPcLuC8k+kx?(n;q9~O_YpG9v^8v&|_2y$-6oK+8`DMPw=)|ksf3;3z ztgHHW*7&c5J4F$yeN`jij55hpe?l+OmbUozPYun;Lh(f8qO|FChbAk)6R|VaOH@%W zuG9{0WbHsgXqE0CL6GsS=v^1~xyiPf{BS4CXE;#%KiAPz3Y`>S2lEvgSW!fHJw%O} zEf4USN4Da!N)D*39F?Nu=g@wghDmc`RvO`TKQSZ)rMp|1ua4xcu`<994n4tH*?ecC zoKB9Kcc94*$VsXMQOR2L_q}Y;Ps$9{ZR#zJ(pDNKngPFRh5lS-iDSy3zY=IBUlovC z*=j4zjpi440hh{i7LfgOE}WPV!gM8{44xF?PvHKIHCwE58t~leuW}mEhT^2Fy@wNU zFDRUpVV%tEDB6LrI$Y9+K_Bo0g^7Gi8yhvcg*T-(mFb?QXxN3X{6ha(=etFt)0sbR zyUu$;CyCY-oampdk%^B=fi?E)&0vovcR0078u(Z+9v8Z{*jviHYd$uo|CQl&XO!|KK1$d--mxKKR& ze_{E-d%osJ!U);S)6+NmwMF_nAwqDq0J`KmbEW^i+vnXz z`>r62CU%fFAJ!zHWL2Y>%yKnBE<^$UrgFX3OcQvRYM^nXrYrUlTX25 zW)b?B;p5>??Jgy`Ibbs6_5He|6P8qZjg z&BaWgSuqp+(LGdA6JMg0MW3M@QTSjfeLl&lNs}7?+!!e`xmpYN8K)Szi826W#frHPQY5pKGGU#D6m)(Po%nn6S;xvz?%L<40y60!~MB z=a`bMXO0pOdwn7z7pFtk&6^Q}W>uzc<#qAz;F}+kd^v+Qs}X1OXCbKPgQ0Cbu9Z(y zbfgA6tAN}o=e$jE6pzI~t&C`{S`*{K6cqcRHvwws+kdN`ink{$(yir*{YyK2R?YWl zm-MfEdNbJNL5C{;NbH|$!99Ppl!LTTKNtXsZ|gaqGcl|_?$wOQZCX6tSgbYzh#s3q z9+a`&8MZdvl3W6S`jT@CFq&Z5eR(B1+T?zi|n+k*d9v*mR>n`J}v9zWyGcE7IABb%=;*LqyD`nPU95;%+!q)w%tM|Gck% zb$6CRQ@^bi+ap0Z8e!iH-v4yFQa~8K%FX}fMdAyFmEeRKMT==*X0P3IQ~zag6mfMw z0ygi1L%xP6vyqNnk=Lo{)g5oJg z!%pZ>E1FGYfJ$O$#ljFH&X&1L_RKdma%6Or4{z};_}1gzqHadRHeMuS?HL5^pok2$ zk-d^$UB7n;>$f=&&Xdn0Aneg*aJJMO7Bj3hOKnKB_l;8szzVuDAC&+WsV1Q7jo(qf zt+*Ljb^4ugpD9-dsBT!Z#SjehXIDcv^ijMrv);{A-cM*gB#s^>JBnRs$056+J0O#I zO4@kT%3h{0Xfd&U0P{6EHOwq*QtZkAM^PL($?ADiQ3Ri|58UFbDprtOXHH#Nm8Xj{JJp?Tti{$AL-$+79S49X?`sCi?dwwV*?jxCdj2 z%r9X4d^Slo^p{jlSGE?F1hW2ncEVD9!1UZFfoufNvQ%C-=eBwh3IXnk|L1Aw>qZqz zjKc|_dCO{H^;=;;zeNVkM@x-c7cmbC?wnM_FWw`U2#Edm|Jh~4B0U?}yN{-gceD`s zx;gSxMSH!$KE<~>%XqV4UsuTOO()zN9YSRz7Y2ldhCr`-OsaKv}He zlH~>-i$T(@4FE}LhhMdZ(KsATvui`jK$jt~ta+Q%4A+9~1isdfbbZF3WX%w6`YSg{ z9)Q>GIR6rH#~UMBVh8i`ldXae#}Yj1&rF~E{0(8WKJPWrCeg62(X^JxGY?&mv$EkO zbi;k-2{=UVHSuQh?7rt!_}H-eDj~tb^Xmth%@=A3m*+d718=+d^P7tU9DGm6T*6{| zsov=OB#kf1jhaM4Dh8ESfLjP@VLdT>4!3#A-AcTz35Ls!3ZZ)HwYwlcsfOteeIR&S01BkFV^2_>Z;>sVQIaZdP`ZmuaO%x=6_P9*)qYWU{ zQW9+dP$9wc$dS;zhg@NiV@Xu>N@lPm`dnA`97`t>>kUSkb24Y32yd=vY&&i04a8*< zXs1b|G%jBC2{ZqAj!ahg*~viC$xigCWqWEPI&fJziRDW6J>>gi&_#MHGum>k?dW~e zTIj{EV#PZ2am(fGD)F%u&}W;ZP&cprUrWH*VU=G@)5401o9_Qh2=2ZTg0J782~knG zh@h+B3bt_s9Nz#AP>3h|b@0-%O%h`&Z@GK8T;KFz^0d$*s6BKyl9Nu0{fUE*9ZN2* z%9O=sbgMTxd_nJqsTB|kGNn`9Z;5)ydR^)_8(=H5+&eae@!f%!{>d%QFB$2(bG;&{ z+MuEPnRYAL{;3eUWT1akNt`!YN;CJLim{qHeR85Td{*3Jn;HHg#?XAQZN%%nqv3~3 z7DvDwU8~mM@$cUIe^!$KhZ9Z37@hWygeIts071Dz7m8PeZz9|NHsDS2Sj%Z}0iDwT ziaa+xsK&fl)r4iLxaSRlk4md!-0;TfhALp&IDo~eZ54n(kjJXSh&y6oGMHGN_!SWh zOsUtVMn}rG{0WiCuq#=jJ)BvP7Z^>PO3jE;6G27z`B6IegXSRg4i)3^#o8R=)9ngD z&zAF}2SwU#$#Po{%Ra9Q5Qu(VTDCxc4E8;gv|~#eujS*L9kNUx27FDL{=%1gO9_^K zzdIZE>RZmsCl`^^)5L|7c>%7JQG`@Vd$D$l&l!Jy52J{?IGt!=kUsxYgt*Um7J7MR zU1p>1<20pD>i3lAt%;ehamsTcscF~)LGll6$@jlZ1#TdGD?=^2%~fEH9!ihoF}v+a zzTr5}L1biXN%QHcq=x}icv@_}?-!}0+Wpv2;fkiF8aw{27oFF%1s9ht4+ldTEE?Q0 ztW9|?+{o{EgJVjnNegQUPUbI@Xb2+edWdf-Q%06lPi0)a8yRse2>VUfX zCz1Um68l=(T1}MB*}y{ww2~}I0ZI%|z@$<8Txobk@A(l-A%-6-hna-7J@HLrAIsyz z&hLaR+mImHL%6Rhjhi^dewu($4r`+czPa_#wHq746(R>_D_O?!8)k!hBO7IPx+}+y z?a)GyGoGsJOQ`s(KrK6X><6Xc{1SMZbj#8}=OYWKqvVDz2f7c`2|wGmm9M$9T&d9Nqolb}38e^Te2OO9crAvM(Q|S5lT$b<8 z-_gb2?%!R8ZP(=97H(R=qjgDy6W4Yr^`*SFS~RW{NVV`ZgY1+cMzlqQI3{kyF&(7i zlf~eW2|rn<0ylKE--26Gna``<;tf<7Lq#&Za_keTXEX8C2=yr~*ZO9~BHFMRBYn{G zPHv~E6us>aAMMp8w$*vuoExrqlx82#+7iU0+bv&pYo=INX}}?D*mvA~dbNl)34Ww0 zaQV}}xdteu$Csp-AHmz71_K%Tf~JC**wk?k2Bha<1Qoa0T;I(J>sXNr+Da0#PwnN3 zwY|N4uvHCrwka7rScAa61^o2RL27wGo0lGS&P&pipEi$Q*uS9(Xd|=IZ#pT5qIBkV zw^-vFcfOT+?~hFTNk>G`Dm^pAot5#Mb6G)1d)C^;hyF5<)n4e^DZlxisS;~e15EJ4 zTu{Fvtheb?r=GVc!y&{-49}QYcgrCpJz3;We!Cpt4mOvad|%(kwu5cMPN2`Y;Nn*4 z3}R=lh)kPMaX9fCRO*IZh(C+uK}g?@T;fV1G=L(ta~zi%khaL7h_EYrn*#i170+e7 z0!Ky;pyWwMiK#iaK~#-NqyoWQR$GBghQVE-nl;`!i?%2H+TK*v`HmqQ{DZ-&V)&og z6#ca!t*ZTRC2<3NNHo5f;@E~^dm#7?szTYf=)?kdw;OkkKJO1Ig`%2P6eBK$bTyRP9^nfOWEt=J)$?p>t zw7)U4e!I?dy?*oaT|S1+ipEXznf(oGBC?LW@@s;y{-pS`Tvmz=DzGV?cut!hs{`;T zKpkq7n>s<@=iKR+diqMsr8;KF#}BCjITr`TXM5Iix#){{b)2Fy#S1jjY&K4y^_JP) zaU^;R8e##%GZL`i$kKPWxJ1giTieVkDM&mi_8FBv_7u2G-yhr$4QQ>az4*5AjG@nD zt|j~Z(Y8T1n`INaQ$#ofQ^-lL#TKF-(!xk^LgqY_N5X!gVsr8cLsv-xH!-z+ZdVz6 zbaY_}8`kJtdS{t)iSr?RZBXl4xPQxvLg%eaPsH>oFzu{i%URxX^?d=)BOu?D)N__D z&{09MnrAy=kmUB#1s!vJjKBpsbg_tW$pSI4PQ?dhd?SM&jQi7^E4rrrKG`(9U*-0i zUXcS(!1ShkrZtq0IqhA+Pb|)kUG%nr5j7%q!fT-c0ABowQjp4BiuBq0 zxHz(Vv+Tkr?XQ$7jWw@M+=4-1UAbw+VnO+{gSWH} zz1#JtwTZgB(Xt;E^{%YM3#<(LJlPT@U2UR%Tnu3wl(c*d)-V*Jiifg3#?DJk^$Ezo z=zgI*+9go}S_yg}2 z4s?wj&KYQN8u#ZYOeYNf4o*(8>izQ}3^kcUNxJ#Gk3&DQGsCNuwnUFFWce|46dHOq zZTBu)X;EwWBqEydPJXPcg?qIIsy^moy>x{$|C~$&Ue(I-Jg_5*8_hzo{}d zIxB0VC#5LQNc&wrO^>CT;G^mKfX>@ii)8h)PMHX}(!2@7>c@O_Y^x}iNpXq8SR1T( zedj{#B4ceK`McP=ca0wwq<6CqRaeldyL`EYV|eqAKNRzFU)gSE2!WAdjHVQdIs(Q( z^xs|)&?Tlwp@lUjQ<+L0GS8Rg7Iy8uh56pHKOzvrV|gFf27wViJfE4H^yzS`mB|u= ztGN{EkYX)$WwwAHgxP8!tbXSGHF1@oTyT&L)4Ukz7#-3BW*B_rT3S|E!PSC!f!B=+wIOe!ZM@8Goo7yG%^GmDxb zHc^hfG0g$Yt-ppK{j{1EcBH}xiJ&99PVqirqnXovi*q^&?~?oBCUhdy4@@50gcCGb z+T@GGWa~%G8C%!8h9B}i;UeTb+12b+n$7SH?{Qjz;KV?6?Md$vR%F{`Ohr@ZrxLKjO>t=b?*(3Y8C^T7!G^x|xLg*N`K z@ z{_`H9{kI%9@`+)>mXUglpKMYj@+7L_qYe?;g4+{f=Sw1?YaUwePq;iAMC51JG%s`4 z$H_ZTF-n;HNw*XQg?a0kPx`0_KE|r3jfvo9nZq|mO>m6;qv~bWaY0dWX$6}>Q==Tl z$6jp+$38)${*6 z_;g%qebFnbQDFb7!J6IYN1N3$854oOCzGwYNzY|YS%V!~;mL8sr}@{l6$sz!(%;!8 zHg_HuFf*wtwLxbppLlB1H1w7f`b&3gzpaEhqTHqLpWk9MDqD$1ra!H0+o3-tIw$;H zti+Qzb-seUJUEVG9L(9$akvqwRUbvuswA;EcER{BuE^OSf zSD0HQvwJeC7jn)0se1}h@!b5S50Rsb$V1_#!P`xw$HK>Qz+mizZujh1o?0aEA8(X% zrj|FHmYOg!W`&0M?L2HocNN`km{7QwM0V;y;lFK5LekSeWX)H2Fuua(_xJ8l!%o-HRgCNVQVy$#jJ$@wf8}TIa2^_-tkGb`IVOd9yiqO9F2s zf82J430po^ypA-7i{xd;Kaz0qVqoTmFkg{?a3(c=t-H{ zd%V%K&+mEYwB7jPF}FNF{j!*;$PRGPrn&(dl-(~wSj=OJL^NaU-wYb%?9pA?eQ671 zMKj8SVwuu<%UZTi{nTIsy|)=TXO|1V_~m|i{;tWb0$wC@R#sD2g7Kid2l|Qh=>==@ zaXgdpV@|^4EyUR|zoVYKYz4{3y{k`>!c@sok*3@^TD7A&wo3+i@KBzLdiQj`qceFN z@(gwF-HQ%7SGEjT!!9D>6;l~h3BNkHgTHr`?(S;Xg{A-W0B>Tg&>Q69WsvloDO>0a z>VnNtMrzIhiLl%WsaLuzv&h_H-+{&GSZjwf{XjqCqPd-&T66URGwYVqzdtU6(Z-io}?(r4-q75p+1v09kCx8$;8 zTP}CQlA4j-nCwEVn*;fpzoa$TPi(4TlQgCNt@YWISe0L0C0X!YVq1@nZ%D=Zqh-Ho z&yC-S1aRC7rTYA?9AW;G0>(%co{2T?5+}=btF0;<*e*!VPqrIn!6^${wL`nz78RO67|Kvpmml>YJ zDo8YL%z24K3|wMXkexBot#y{cSzaS?(Hb}OMBmbeJX~Z94V(1dX(oq!qB;IJm=)xS zS`rEV&Dgwexg#Jo>u%%-nV$$nvUc|5%6~O=ukMWJndLKJYi%R5Mys}*kUG!2R%~jE zmJt$e#)$9q%|GTQ&+K%5Q&(SokMP=ItbZUE|Fid*+`%b{C1Yp3ud`;fx>w)nWygn{ zgde6Km5xJLG8ehT;_RnAzWDNtedo+F4H+GfzCCw3UjOJKC+k@kbLZiF!`wy{)mM&3 zW{&jrG(V0$1Xkfrws9I;UF^L(6L1UOReMCwi=O>{OYClK5m87BHKPq>_pv7?XV%TD zvb#i)&BXGVad;HlD{q?O_09Z!=c%g^)c-W<$Fq9fv08iqRpqQidb zNX@;M!X@b!unLasAwu4?0Vjq=-6f08c4+p7~GN)9>qf z_2MT(SN4FLuBW4M$zgsGeoXbF9Duas$xkm6kKF)S#~Y^2X6S#& zCI~!sb<6m)(pFO$RE7(g?dKF82q^FNue$w(GRV?{pSodc%doizIOf&=nMgFL(fV`e z)87X#EXX9n#tRXv4nwYF4;GH@T!eF?1~AH{n&VEj#7sHrI)EELM!lxdNv{wz7NR#c za%7YPpb*zK5Lcm&)gfQAg>t&}G_FZOshguVOdG*!7j zM!e%G%dbBv%A^lq?$ka|zezVLgBiZr={FNX1pRe;hOeOc@pnR^Yb;vq>AZA$a_0wj zZ7YJS);6P2h$r2gCewiaO7{v72aI!`Zm||n*=*g2sVj;?oFC8c_*ZKjR$q$`_^5R9 z%jokB_!wGw#QtC!+)%zPu7g*KkZ9$6@ACo*yEzC?bk1Ka;# z>@B#We86{II;5mK1QZk`BnGJwkdhXZW=H{PX&4wvx>Z1t5-Dlv9J(6>q=xPo8ioPR zi@*O_XP>>+UTc4aH=exj>&XSmc}pcu=8eR(H>nMaGvL!ojmthIXLBmf+`VPoXv)Z1 z7Pdl_hd}7NKAocF$~aL@-h?H?r3)=Jb3f3Pdv_$%g4YyhLzYP;_B9**tNwhzeT@YX z)-NKA4>pGL&X4fx6&@;riF+3=0C#rhi8t-{14T3Tgz9pYCzp)Pc97Y25Jg^v^)OB0 zFE|n(hx2^b`E=pD`J9^3&ys_OVTV(Vo82XBG>Z;%+HEn8w&g*r8fbRl9P%bjIIFgH zmK;q`Q`m&+ISL)tw{UwkbqY3KLuis5vdq?Ob2V$< zpmzAEW`KaS<9_*SaE3TFM5zH;; z9Zk`wI~zEGfE?hzOO!3hq0fl-yrqNfdu)-BJGEMSm%f$Ggb#qu`}e;>uGfh*)d~Cfi-5Rmt?6lR`J0d{@t~i+@6~nz#8} z-_hmBEC$4UTQ76HdIOc`-=F%UnC%JJ8YxA7@h_BOTl}zbWh?K_$R*wUPe*Ed^hZ1^ zTus!On8BHJgj|tX>#hyk**z-X;qo|b?fTqW(p?Jki-WcDOpTkYvV7O8cO>ZqMxlGS z3ZE8RY+{FD`^TMAC3Rg7`;&O2FIveAevmh+N&hZS_U+_>EGd|Mw7gGai?A+j@~iV_ z(+YWWHCJmjQ?q9Do`TLx(&g%?(Qopfe%BnpRZ(iWB zGtcyz_sx*R%df37+)$gMfllv5M{Z4}jG?x_m>An%l{+|u4AeQwTm$v93d=2>a`32- z4;$to4~QL)ze$PvX0;n^d-}rbhsq=Nf^du5>ob1GhP~?LM&RRVHk8w1UL+#$R3e)X zK;(*0osymsgL3ZkH;Or{IYn}YNF0SMk2V`I9B!W4Y} z;!tt9c;N+$xOhoL*^@q=61Rv-r`q}4;xEXBQSm(DbEGZL?12 zE>2|oT*y?)$o#mibFe2PjTEv4nj$@yrH*)QJcWGY`0J3xH~0F3{9%lf5rp?(f3J3<%b(uYGS5jzdZPJ%X60cidFUGW z3iCcoO&x!md$93BY2EsJ`0)i$Nhn1G7_VGbpEtsN3nrX_|*Elf7M&JnzJhjw&&<Mwm_z6nMu}0c_GW|{b z3j@Eymp1;Ay_?GWVP9t@J?|?9UC!H1(2+J%&uc-Xq=Q-UG|&OHcN~Piso)Jeo00fR z-PFqW%W{gaS^^tQ5mP3r^|++BFV`nOCi?gzIoh*p^ZqxK(>{$@rg3XyUF2J?s_sQ5 zV1Euc44K@jCBu zH;Zc#>mOadjrf?*5*2Q9U9F`pfe7 zis3Wy@ep<>Vzd(@%!EZu!@*FIB63l;^M%titr3+)Syt*0fp@g296Juac`$j9UK9~N z-~Gl#*1g0{}u*;5jX$eYl!#WLa>L zZ$a0nc$U(=*+*0^5_fx8zaJQSqXme0%E$)^M5*IAOo>h@kG`{PRXUoI5C)tmeF{=S znQhmwsHOh?={}y|pk%bU`Gm7WpiQ{xk-!XzcCCnDpmAyWl}PMF1_JO7cMb)~R1v}k zT4o%@_!>^0o!`EOEv#j$$&z1mR4OpF&?C=lRempG_y=kj@keTKhGCXE%snER#%1oV zX)(u=>f^&PKwns1*{~Td~B|xXr)uQRs-7QYw53#U|&DJclj%b%wIul!0&SX6KDO z`4utu@s)PjdJDwPgZ6_dBg5)!e}i3iwhprhCDM+H&=WVzbKDTlxiJp4plxpXXl~z? z)P>bfS@`#qw}jX1*(hiJG1iSlP1(gb@><&E779ALw4rXn?)t4Oeo{CWT}!0A8SY6MzuO<7@!o%+iuh0@5iWNaZD#`W z1PVX}Mpy?I(*xg5YF#Q?W9J9zg9o4H8M1~AH@%yQ`w5_f*X1uS4}69jqSQqHc8ve! z8zybO+GPa2yLc2532p4)Kdmy#7Ap=OGopFok?pXV$Pn9rwn( zfG!cA7b3+-Q?c$8qE|V~4GF_X3SnXdwij>E$H%X1hN~V8^PJRXtyf5#o~(Mb6Fa6! z#LEFncj1);%$7S^L!`G4ytLc;z7BN#Vz0{@`oE|PF;`+%{`sy;?P>QBeK9q1O0BXD zvr%=STm-e*NyC80#UgmDnJ#mR>{v)#Cp9o*3{(F_5&pwR*_nT6gnRc~rT}nK^S4PF z0Mrno;b;970JAl<=|}O28@gWveWWqyfmys?EF&v2tP1iKY;6M&HVXdpyOZ^=$av3g zD83pbCmml}I4+&1`X5;&e<$Mia~{EBZ>tiAV^KNR>t z$RRWA$>DsVG{w{*43S*?sh!R*c8dkt-l#BgP@RcWbI|%WehJxIg-GzX*sjkzBd+F16 zzOT8bRh{!2yZfM5m@$^@iIH1@3yrH|D(I>XW%#dt=H!6_a2~ z>1?Pr{v?A9obQqfUgk3*q?}v)@ZArveefz)>Tq#+*bdpA>FPt)u!i}yQ;?fR%~@fI zRCro81Q|r@b=XcOF`(9ZL#}&D=xtmrn-8lj;TQ(1#KMSgM z>^r{PNP|Gneihcf4+M9ZO|T_)C>b3mIKBh_#dF6N*CDgXMX7)dpJW#pZ+rDP(JXGEBsyK2VX#& z=7C@`?Q+Ym!#4x>O}D;$&q?ZqoO%7G(~r@vf05i867SN%kW%3#_9J zaJx=7{GxsfUt!-2{~Z%uK`6Oh!QONTj`CMNe9_l;!!hWWmAG~WS47ozwi36=_h?Nc zr}iwv-NAO8RoV%Ol9%G>spTC$NtFprIgMwn%%OlwvEVqT}Q&4}57_?z_%gA42 zP9zK)CNP2c1iZD5X9_9H9~Bf?QTKZB1-c03RrMEClXhEJBg-&JSsf@M-*6n=x7Z^? zW3EepLcrg0P!B4)zS&){tNo7Y&eZ`aSe2k>zRa@<(0oW0{)p&3S@`oXODvziKuQe0 z8`*y?5=!1e_owe7hX3bM#7Bs!kiSEl{5Dxn4N)I4F^boW&vGBbcW!)~2WxW4+~eyz z$r%zAjCLStO5O|jMHyRCEu4WU4feJ;^m|hXNwS7s&6ZMX`7J}y=8cN#Vs>{VgsGRA z(KSZl{~tP{k^$rX!7xDK`VV^O*a2GkvL%W1i-^NG=MMY$&l}|q?^r_lJtuCLm|rqyFX=CwpyP~l>fL6m6q81CcEeIRe_Fr?L2lYfBtd-q zS*Sve8OgQh!q}>2-#AE%3UYTE*0j@)MP3yuaLZ~uCuE|Cph|$sCbCIwC-zUvv+$E#$Pp;ePcpyxpmy_9!rWq&MpSETVael{FHwVJcg(Z( zUD3SStNjM(A03wLj{2ozv1Cia-mN#f?hVKFc7}Q0zl>}7%GOu(9XrZzv7?s9JNvk( ztRh7;yoZpg7n9_XHr-sAOt&q;q`W5U3PnpJgZg8GdIARScAzSnut&E@j5KxQDV#*w z_lF6O;5uAf3|WuTjs~=I#@|(5yPa?(ZR@>J3~umkBY8$4Uaafc9J!OsJQqG%@Be%} zW=JjX;nbGQ^GPqicEJiBRs>EG>IS3HcsWqqA} zzv1TA`-7tAgzCnrqj5Cu60@mNLASiU1&n86H%E?gKbeg)K=TdtoK(sB9{Mmp9R?Gv z{AK~7e%(ySp`y0&P>7X}PZ7aS&p9J?2w>98pnR)VxfpdgnMPV2QBMr_2#l0)^BsZ& zoCLi+|9c9c49-t*0OPBYL`bTczk2q|LqmUaeCky@z;~1wcfRpY>z;b}&p~*8vF^|M zkomsOy!ro+6w>$zsxF)XSYmh67x;u52Qmt6#0p3&EEq7>?x3uIjmk-}0%X*GJRi2C zjZ@^VHS%rv=w(gDk>qN~@?z@m-DSC;D!c}kb2KX5{CwVHCEg?Rp-*$DyK)S@#qkbB z_Z92ax5JUIS1#4>% z3bx>e(3&$}hiKWLkSinw7cC6m>9#~XWD`mR{IH!tlAQKKVmrj(Z}+y#6QWA=a|g|4 zI7~!MiE@LN0q}hPso!j?wxC{@?rey;__f6RQ*U<4$SngH5?@d5^$uoDIbJTM1<1Uy za)r)AmOR33-m44rf>j$(WRRQABfX;=V%xYK7a6s81ao|;Yg{^@l`DJ{QPYG|3f^`1 zVhH8Qn%+ZoA5xVk&CB4ku5y&@u;xl|tdJ3=^u=IvxH>RM=a%$m5lY2Fi}R!XZx3L% z6*$lP9zDrQK?%HN*VtQjI_WpCD}6J|KTY@Ry|w-1Cq8QD*2@%jv_8$_F*W^`j$Dxo zxh^>}sZEbb|02C(5<7#}WlyrguYQy~@9!(5h)lJwwmszgtB0E!Vv55>s*+htJEq$} z^3khL=t2!h$)s~uQ^rlwbAa=v%$k#xt~lzHXM8@}{e3OGTlVe#usdNVZPVsQ|0&NZ z*3_oWgVlBzYl)ak7bS(qULD88@rphT`t9QzoF1+_|Q+~;_M~|Ah{ad!5n@N*x+v+ zvwV~P4~E&Dz)6)x_+-|*Xf=P?N>K;?)gUH(7tL}CB;JW=W9e>na5Np=YKyp+F)6^oTB;wwj|mwc zPd}>!R17SY?!~q3LqNG&W`+^#KF2Mw#v%AKmHzvx2`?~1E+fr6I(Or^a?|qRq=G%P z;|_}3(yixMz)feEMe>UfVy; z4(roS*J@{GxG!JCW7807E-peJL1N#Bgx`J)jgykzKQc-`vL~5T^(IAMuKOlItcf0( zZ)kHCo4~(yv^elCT43Z#kUl?&H&;#;>&zPeDm97}-2A)&Y;)%)JM3(JbdcJqc3C<{ zJbsz3S(NzXDtb_(jPPFUYuQn?mCo2cvDZ-D7)=~RY1U?L>dHmg>@+4nYSK_0L&9gr zC$xou;?ZN)2K${Z#ukJ5&l>_#XDo5k&$$PQ`Jv?0~^2A{$^8SF~7 zCPkp7d(ap=e&aAlI_-3`_}I7Yb?Q>xmv0U2w?L5A@F$fq97ZrRDKF; zT~C^q=~x|rANsTP+dM)wW?(*Hv0-9-d-}TO0hvcZ$(~=UA+%MNNMo94N@(3%p*;5> z`TWgU)J+1%B$+Pf<`3eKzlM1BLUXea1D!y_6?ai}(kTSK6)@?kZ zZstxqipCN2*BJ4i$;kR}QS|KyoeP5Z7Bkls(;`LdVqRVJBziiA@W%=t>WZvAH*R>H zCseWj#79zeuL3eBhFdO2JkBw1TL=72nvk(-7yrvIx)Pdg zd6rkgY%5hZ9#WtjFdO)6khH|MwG&L1w}Wz59Lg5(@SJ0!)U_t@NsHrNhQ_Whhp;oW zxvANE1k-v%Z_0c)0<=SJQ7f>C*(~FuEur=k8!A-CJp4v~8Gg;|R8v(Jnx-kaK5Y>x zZ^>oHd3`o(4Vm(28NZ;2#B}OTbB4MT&y{3tm4BHPz);p?N-@ckP|JJvW+NDouU=O? zCXXFe@iC>uV4~|h&EfY~j^csO0?AulAzG%lnsvS_POt5`DC+qMvfX)GvDy(n-+f(Y z(z?l9WQ~7?RU&83f(BNW^w#N1d0BF1Z`#Q9+Di~(pVYZ{=1^#_q?!yIhi4YQ#92-<~ULOXp*n)B5@heg={MRbb)u! z4(W^jpopx3yl$R)deP~GUn0-z8n}|1F{i_{?178t@3Kk6KRMlgn16M>;04URG=9@H zBK)uZ5;O>Bd7%lp^j6ATkbks=ur;Lj$3vY7qPC z;A}_jq+cQItL}p@P9A&YOb!;c6iReiP*N<{D?GkIc4o694u?EzeWz%ew}6?m4qZp` zM7Qwch#aw4sr8i4d&AUmsLC#^|7W6acj5%SU&@SP9s|7$ZK4$`$3DKnM{{S}wWkUN}9vT1Bj>H#RJ-xG#-H}#?kaMg4 zlo{B9@|%-@WTe*F5|o+5+=bb+`dbC>9%gRJNAgGsQXMeog{koQG}5Ng#@2ju&UMJW z_X%h4kDI4+J@A@F5zc$a#Nu8?NlptjPCvbyX;D^J!i=lI;i;m=59P;P9-`6|55c#W z!!dD`jf-qcUs!ZaT&!FO6Cd^_sGrA(;$f;5D40`77%gAa8yekKMs~c6kIi^v$F1N< zClfL636Z?&Y^DQyAf=c^Qq;UXvge&UHtR&Fj1y%%&;A%kYD=4!*{Tual$fx3+)6IC zU^n`6nu7NgTy8pJY)xhepB*@NSMD4?`X^68Ppc-(+@Ul}abjM*YO{ zFjbiI`|-D_Wd>I_GN4A2>LbcqRxn0sN%X=yHRE-@U5(iuoa36iL-V#k!|$7+;UQKj zrD$o*kWYy%5(~_19C76K(`o!$@Yck<>+tiu&rGRSf+x)L;W$(t zTk2#iS^1Jhj5V#Hj6M|Ok~OZ`+U~+k_XJV*JhtjTq%e2`+1B^+5zLFElfx?HOX=Lv zN{=N;r<6yA_ME$}o}6HDEY1n0BeN`13m0@GM!(WCxpU?-X-3|6=g6YgRq*jhEP546g8( zxzr1b&Fml$iC$DA>tKRqj!koTZ0br0e7vA+*foc$nFqoD+a%K7(BVm#IWd5vq21r| zeKxrjDnB+)>|B<~kv=V3iaw(eMaw4Y9U!K)Sk50HdGaL3iNO2RbW5?=5*0g+C)*rU zOCY@NEJshzImK29U~e?==rh7?T+IYex;6JiI*4;aOEL7PP5tBhH;X5o{Ls;tv`3yd8X1u%sF%Gk~rt5*HAMVni ze>c|Avf%${cwD@Z|7%I9KdA4<^(Wvb9wA-oy0fuMZXf@^3b>?q@FMZ(oJ>XX9KDuV zEQzBeMOaC%lnZ6F=Pv047hbqaCZ&y5;wfYIwE~=lt#I9Ho{^Zz@PH+?iTs*Fe_oUx zKvdO5S3s-Ig|aHk!|D{d!q@c}gHi-_>PLOstO4(YtA?yro}_CSahw7>He(_12QlXs znZT|=^F*7UqCA_A(>ejgX|FhQ6cXY(D57BT5+{*atju)=vMi z3~&7?B8a^3Wc3L~OvQ}od(>Gq5h~4?&X+O*=GBi7Bou7Sl*+=Ev}lI?33k1MpiI_u z^Y|iaMK6#QUbW?HN*VaK!kRYxDrh)@gZ1`o3VummhChWVv86cnV7 zdfFWaE54u|+aKowO>()(OyOJAvULyZroC#z`OjCn7slHCdQlCo?fAQ@h1s;Q^H~H< zUdGMVvP%u#ij)5_W0?xTfEFS{O8Y4=(MJ>K18)wzdU z2s;~iC?dt}7Z<48NK`u(zb?bIETU z1BRW*1gh4VOdr#|A%_~mar0A>Ounq}hG%X&&+d~+7!Er#_L=IQttCN*;shqiXO*CS zs;BinwhdDdSQ8@VdMS+c@xJGW6=m+WIN_yG|L3=j$9J|Si}D|atVrequdtZ?b}1&o zuE4Y`!6sIGk7fn9(H_bw@X{DwRPJL1_gWH^cB6WFKr$@ghm$37>@4i-1?Q$gd8T`|QL+#$A;kB?! zQE>htCTstr-KGr_u~V?Xv6OG~vVC$$N1(e!`MWL1eABr{jRzmgG-T1wr5{|Yx1tFt zJ6m~DSYpt~*vxw(IjrJ$h`W#7`;wE|g-sbnsUJ2?d&0^Q`J$E;MOthl*c=}!-F5Kv zJ+Y)LY1KTqGkO7*x@G!(u9;`sx9SM<+Ujx_SEyclsmfE6N~hv#OiSL>9%>K<@(}R! z$^>*ul7;IP&<>}3>f$e`v&IMiky&?H`awr^4Ps1y9*PrdQor;dh#;Dqml}6bq|r5QH*j1;C-}#|+5g>fku4W* z{v}N)VAeISnJ{A!uc|2=IxNXbiLuEJuyi)_S3&*!r17Nbr_LpP?$ti6UJzO1=9c;1 z8~3qi(IaAKHfifWF?b@e$)w9#LJ7of7EROz7`ROYS}{3k)vqTCMEkmkrl5!%~7vz!CIR-B+4w81B+}Z=;-HP{d{hau8YQ4b#agN|6=bWht6n~8Z;B+1{D#BOhkad3Rz z;AeP5vp^PK;;U5U#PwBe^=h298CHW?7ZLUfBPudG1yH{k&=H zi757pNlpC5LYEe$)n~!{T~pO+e3iM+9Ut=D&MEBxRr_9>2ALo**I&#mLE|rwdkGWx_maW(TA~tia zSY0PZ-$q-C({lZdtH8*gbqcg zkw1TLG(;(!3Z~=dTyCV_9c}vKeVv5BZ@{$izLn}^&-Ap}6A+ENv=Z;+dGC??>8iN& z4q}#u-Ei~)tKu6q^a*#^kCGGQ9$*okL8x_2^iY3=j2y8e2M^X6EkX8^*-cxD?7cmc zIsgTG6G%#L0h4Xpvd3Zj2-`iX13K@jGjY3FqQ`DB-DEJN9O%jZrT?DFh%Mk0Hs?bv z%llR6kx^9_O7|?MG)vXCFz zF&#}M<=uqxJwp-)lK%HrDfso-kGIxSojk6V|5y@fgZW|}8j7Q%=rtCRi#bPB^R(K_%3t91G#lQA50z38z#m3pA$!lyi>HfyW zd~HTi*L;3Cct~;HjWvg;@}q87YGJ+JRO}?|{&*FUwBma|Rg>(zWZq+TP=V+Nad*c4 zwYE#Gp(l-YvI~HaFKtK%PsM?1f9_U){;Xz!tHfKks`8!R^_QOuqb1<)(30HdxCtWS zwP7sF$)=@;xBnozY9&Z-O~{oU25r8BhKAP3&5TCtUXpu0k-Juq zUeWkrS}e5Cay}(@Uk81-^5XwJ`e9z<_Z9o?ScVR8P~y41FyqwSVI3P8`l*r}d4N^? zKS!bP$Y@wfL~v~h!*K)DEN?D0NeMd@`;!EM`sPB+v9axtB2-~j{jQ$?{%uERsCMIt zDQJl+{Q`2>4?&N?bwqRy?KvT3{XDrx^Zr+R4!?*wgC9@oY45>rFP#EM#k~Z_f_+PY zgD1Oq?2%9F0W3oGPEM{gU;)dQ1x#ef_>(B{SeV#>PYmWtLsL|De@=fcdTJRX4oWUk_ZTA4X zUYCmJyti)Ps0S}A$NkCAhJD4iVOM0i=C`e#{WIklwjIK_I ztWDhzWFGBHs1YpIcUJD-sZosKarIl>jC!(%0OII_cG3!gW#`vrV_=?mbl7yFl6Xh-EZ=(Hx6RHg3dXu}&7%^ux zZ_udWYWLZ^-j(@X0!Hacj_ejoO61;+P3Q&GGCSr6g8H{mUH4X(Upq`&9zB0P<-mw9`@8sJ(^)D`-z$4v{bR&Zf)rVR2Wxf{-S0dO3uFinIr?h^VBJnqO^8+&6ghEvHg` z?T(kvA|D9cGzzkD#|ZXw27n8VH@=fUqK4k4`URFeJ?Y`_lO4skA{DvBctJEpOiU@c<0F;q+);>OT0HM<|4WPlRd~7{rp+dvXhFB)+ zi=VW2ci;quBn1(izn!2-#wjAMrlM0VtD(^3hzZhUdr1I)B>Pkc>pYPlLW(Y$v)W{$ z1B;TE_rOz`H_ak8qi#}YQqr#Y8b17$3vy<10PSXSMK{`|H%-sMDoHZ`{| zmTq=GWfbkN&VC2KUHHfa{(#9g^JzHu`oik> zH$7Lbi4cE9UmSiorTXr}+ylRZ1~M@0dSj0^{V#v#Pg17?ts7VQn|?fl3P{Vg)tVdUm6HrgbpGV%4~Kr?H*wv%>w^OXusilH&yCa@(sK{;avp zhV5KF{Csky#(s8Q4ySd}1>H?4K5OWQ&0Wiw06rc@4##HS$3QlcM78b12kMYAMmOykK8 zueV%RO);*=ok=*0>`nXnddz7;88WgqVZXLwDo|ch+hx)6asy>i`ld6V!1skR@5f(q zWR!JMYb4xfR;9?Ny0lvghcfNvV>EOYs5C}@l(dwkCusT_yg1nUF8!>~>o1!Lo724D z2E7`^v)fh5>W-DiPwsQ9ES^0nt7_PNXS$#nPZc#jB5{d**kSf`*~AaFw{v8TIw&~M z`;_W?Yq-#A07IiSD=!a?4}5g5U0ovNn&a0}VON7FE6(M`VE9!l3RmdHdI52H=E>r( z*WA(zlfM26hh7vP#p>1ly`Cv7O-lWKs|UZ>06N+3U}$edBes$M>hPzvsFGYn^S-qD zH8e`lVNgqW*XQ$d$j!Dci;4f`ngIgL3k&6k#e&!J{I3%WeX(PFs9lmX%D7-`u^4+H z1E}i!n5p9|jBbVQPh4m^#7bMQm>3MVk#IqvM9Dw40P%?6R8iV&yQQ8Q2-=p|*od!q^Ag zIMV;%Bpo+rXg%ub*+9pFy`}d^jzsefr^u-v5+{ z_0eu4)7%iq)b&1=5F(ET?Glv4CiO!-U^QDoQ8%23C7dJEr7Rn3mnAO|_;N&dW@((O z9DHT658fvh{7uK?PNGGha%+qbu$!i`qD;=FhT?!TSbx&l+eL+A9ZRs99Dn;7+oRso zrhGI1lw2}$(T*$!QLP!cwj)m&uvcB9c>_i5oLoktFT;PgV* z^JVFU_m8+nV5n{wh3E&Q#2RDf?dA$=RMZFhPs3ln`%4!`Up7##UCe=N=*E-27a-sl zCwa?;d20&$;OQ8nDSp36+LO+R^P;AfDf->5CmBOw4nD)P2T;EwLLot5CHK;Qk+C2E z)Wg_S)-x=4s^|2Biq@q)6+{qGpu5x$7h?^YswH+@v~`-gN8^8WZF=d0IzF*a{HvN_ z177PoB;;uc9TB-4Uw-Ikhw~($Ysbc59#7VZ#=j>F9hh?EQk0DKd-Evu&;b-&a8rv?y=d$`_;WSpx59@R|KXUHrB}D0n3GdHA2+3_|`bIG@$dn}xWCT?Fy; z2*$IYk`KDT8#M?`-V8q-n7zNS*M6x3R`bp0^A*7O3PX;Dk=-tY(a&dJ!2345{kDg} zZyavwp>*|7-_7(4*2c5oBP(v0lXemoRbTL2zYxU`*R7;LVQ*0{mpyhC$o2l8hcKKH zZHeP?yit-&N;YOC*>T+745o^s#DIq??-xJ4Y2n%ZK;HdqJq6k(>RfyCN2WPoj35_8 z^~=i0bnx=S#~hJE-EiOXp>)aLSd_n5?j8JL10i4Iu2zVEZ$I)+L@mLdLe>v+jy}qD z-$0Az=vI?4BQ!<_flhWM%;}30MzSZJ56G|S{IjP@A4rqmPofUpeJA=~LwUDpU%!D* z9@mJ4aui__bf8An2tL7c zh*`y`t*(VB8ZO;?(EV&h80B#af;qsZYUN^cOSP9Sa4olvq4|x;3bOj+=M1G=(cNA# z>$n!mjhjOdng({iQ<}8c6YnZjlra82{~`0WR-CWkY2W24rDMcS0_S2e+xgEjt}W(I ztCjgMKgCg$ajr2l-ku{XV3&W}kgRN0}B#4U15)wp>v$25bY3Ak-zLkQ6B;!HmpS(KelqSv4; z4k%bb?mK zEc>duH59n#RsmV8mk!RmR*?Kt*m!mHC9&VPpG5DnEots${jHmB|_;qqOSN@u_ixTei3ltj=m`kZJE<*~yMEnPcSG6oxrMeT2{2G3)(uE3F z^U!ZdBm8{OWFXqgWt?Xvk!0krht!j|6lV5uG>)6f#C;mcdDjunh9i?T7-%SaMwz;A zXxwCgPW)!(pH+%!i-d=DEQS?b2+^*xiQ!zWL?H8GtX``tvVD3iu_)(){+Q(RT4QDZ(e=_RYnk8szy_(VEy z8E})?8kA)46vA<|S%@vj10Q%~A^sz759eEK?21HTfajDaW9UJ$unVRmo@2&|FOuZ( zDNt10uW`WG z-oeb9{$SAG7i6Ym)mSb4$iepLh4~^gwOD=XI@?Llemo-PxeRUi>2lEcP_3MxM%jFd ze6km>$7}42O03sb2&b)SiLx(weu&hsOIGMJnQo~0?rh`OjA^tgKETtG$W%M#m%rFj1%>yHiXfwRrSaA>*U!j3P@fT^eOx*yLpRWq+i2^pNwSEYldvDAb@t7=#|h> zUgyNl3d+OIyU7AY=4UteSp45})IHr>Q^WkdfIZC95;Um4z2=kRQogiGEz}U=z;^Q9 zu}$JqXh{uqwg_aI5v=ub292xK&U9f)ak2#$xlw7kQKD`ysH7A`G^<@h53l*TjCv!_ zz`lUjQA4>}ILQWqQJ%-TYp~6ZOOO@ao`^IP0i^32wt6i^wN>}HGEdpW^%mAD?V%2z z=+~nk-x=Piy@b3_0|Bya!A9(vfPqk?bVvk4&uwwEgj<2Kb*pngi;-^O9`N(a!Ts`#P zeAA)tFKV%UYg*(|$24`4S`E>N+Y*0!zL#(oL$3%#H6Y8kb@p2Ga+K(rAT$vXc_sz7 ziy|e}^pT&*#C*RKDCpDasPDE!aAikQ)* zGSA`w9R0ACb~xi~$bEF+G_GA~*bLz$VSaHp z?%#zs_}`rscFTJx7815no=T;wzr`#q(M^s0pWF?6fVe*?=nI2A0t-Br_%-tJ2GcP| z5Fw>ul)UM4;L}O0k$^D^%ajyBX9?;uM@3J1`pXbC)~uR++=A^aP8C6AvI|TvIaUp7{u56c!wVY<3y(=tqi-At+8}y8&H#2 zUNQ*9zN1dru~m3e*3nN#1ZAE(nP*2(r{yR%rD0Fyi0>39RpjI;@`5h8f{RVH*d8g| z7^JwQ<;o0%7FJFX^^N$0oJh~M!87Au574oUkyDq%@-$xj@36%(pl!UF&k3IIt=YoK zfiQ6D+|>L_+d=v=)OJKUQ!TSK#+1lHa_`rRysM>PpqRPO7(WQrxwTTCcJ*%)A2Dg! z#&Yog_9ECJ1?5O2kWE^>zYOL_W9~4XUWUr>}h*WAk4sl1@xKb;tep=8vVCiJcii$~WsjtM8W{A_5vIY;REz zeRSY#{@pkENUJx+;>cCi@1QI7^w*zf#~-iL&`d&UmDeRXh5IucdyGP{9=fK#K!9C2 zhSzB^ok4r}1ae7XzS)aK?5%f^>OJGs;d}b@P-ki#{6~aBX+Vf6p50p}rI~gFs4%)Z zy?S`wRaOGoiDcl!xC`qOm@@V(H&boG(*B#HT>f&2e@1r`uh&gbJJYJ2Pl&V`Gvhp8!6R|sVa$MLktEp30B@^ z2Gi%FO1|q&eR+fyi!?m<_x5p;`IX+V8()75U}?(Iqsh66?VMBJii5TdfGIcykq&PH z?k$8B9L(2NAYQ}DZwRt>}9oL_#GFB$kyYveITi zH06u=ls+{ES`F zV>z$_6xn6gFz3@j8GE+3)kwvg7pm7?J`FGnm4Jug>jC5t!r9*|TR*4;p1(Gk6+Gn~ zcT2x>9rnYnayi>D5z?GKO}%h@(A`1F-(Q^{c^*hSUMUTqZf~iVv-ay8Z97VuG-54KIT2t3MNx)2f}3uAJG7obS}c{&@-MdK-Y|v{88Lw$>(A>v8*@p< z#ugI6firrpzVxdmQjN(RE^L1XZ){=6P=E1m8p~9f*tfHhow-5~`sbdPXQLtUyEJ~9 zHOS`w!`@rPRr!5;qjX4j3L+BH-66A`E8l;i#5Lm#% zMZR-^KllH?-`M-+obz6s8$VnK^Lgf+V~lT%XUbel^-XQI7+?tk?h-MXp&llrSU{e4>R%a$+Pd(NaZYi z%C?#3(;u$R6&|vtTHo|}FP$>}z7oFN=g&a|LoQP7JaN(^IwB7-0jUhI zofTx33>DBPI8~TH8c5p=p1Z_}MWzjtyG#ev#C+w&6zoXd6pNZ1lx>$Zxrpu%+{j#SP85nM6P=<8})r6{fH@vVE3tq-=cVlB=xU$$Q^%2aBeU z7d3iC2k%0gGP+NE)AA$>%C3??$&KElB%j#17_*Q$!t`0_+sgdi_E2BJvZ}9X1ZGuu z!M>w~-GF4*i2gt~ZXhQ_O*4sL&+r!CgUdz+d7#(Ja7=vMm-yVxj!PW!IFqEpwB}tp zm70BW=MTSD<>(y-KVisC<9Pu71dAxR^?w9aHy+s@!N+IMe+4_a^2+pYG6C5zb#p+r|WXSFI$-tXH`Z2w+#4aIjny%OzGCYb&#N?30x*H4sFy zq9l$u4)So~RUwHHH)1J&2a=WiQmC#GfCB`!Y>NNBZa+|8VHoFkdx4yOER>>E*4w4e zACG_kVSd|)?|f>ZzuUl3*R90N`lVsqnOXbw*R#$e>}wHQ+LCBZPX@epme4ALO?@RJ zNxq{9XTW;B5>-h$n_bwNL-=kqd|dQY7Ji;T&ZL`!B^f_gXc5J}Y$AXRfB z44i((G|_p2Qn=q*9LcoC^G+<}suo6RLI$me{nj;e3@aWNlQd=!gxI1S)ldLnDam~a z5wl~k8KFON7BOGd&8#u0@e_id>-4x!l5K^H?V<9WhcZm{g7 z%SVT5@!8+au%MCZK|>as$~`~x`3qgYsByA$47o+|lPLJj49|}~@r%;eZ$J?^R- zZiM)=3LL(nL8(wZAv@iG4-T;(Jrd>GKkEHN;MeA76H+%8nf@NgVaS3nY40R}TG?f72;UTSPTH)K?=9x+af& zKAjg|yud~*z@gm2oL840CVEp|Pf|7v#T`LV~nV=mv z_M%(Z2B*_sy%}e*$uThR{3DMz|K44;=3hfB@ShXRYBAHLVqFwjY-)Zj10>WX<1AncJS$F{LxCZ2woWv~dED{eN6vhX zK85~C$Xhs?hD!**ZZj9rq77kNprY{BRRN$g&#!K16`IZ^4~Sj{g*_{Ia&~nMvYe!~ zvh5MRJ;skU>eUP7Q>I3)739ZC%-{Z3;@!$Ow zdBWb>4UU&KkVHM}*gZP&HKA?tUtf4V?}LJkb^Z?ZIQdZZno57#4g0ozHE)h``y~Tv zhp+K{VBYo3hX5NurW1gu`QE=urWfeD<}#)FH^Pv4OrXv@3+(b2FKQCLQTkS(Y?DVN zyHnwue-Gv(^yJ5ENe$|g*wNfCldYGT{c6(B|L%_I1%TskzCC+>>p^GBj=G_>+hkvJ zZt-C12d%V_Sx~%K@U4kV8v0Hi?CAL4!l59gDq{XhVMgPa zFSL)4AdZj3ap-NYzZJ7D8Qq8>Wpbr2gPDvzPYT#7#lCkQrwZ}h+0?0 z=?jX|Jg%Y!-4R~BR}vW&DEfxnBIyazoVog^0K89a65FTPqV8|awjSFM|1&q^U~aV4 zr*>CIg7d`Y(IcZ@ydQ6-iwJH$()L?PZR}=ujX5Nrz`dusn%gsSaUZhrK%xMfW6ei8 zM>#ewK^kAW7MwucV?=V#(1Q3%IzA`eMtdmbTwZrucmBQz3N@xU*maI&y&-Y=fv)Lve$z5<3MtUa<$Z z{VbCFdV=;b*52i`3JU7;B0dk=G--N6*m8Y0iud2$$+7kP*_DdA>4e8GIaLhZCh93| z_bR(mDM}2JJ2?p2qTJluej))HOeqat z+C$(+Hb3aL^&@4vIcFPc>pi=9)Mt#M%zm7`H4^$Up=Srp84a32+K3&xNC6Rb@Y|pO zX2AT*sZv$X9gIQjSo}NL8-dHJ(b^u5n&X)S_qy5U$y{s0OsJfI0!AaEd%0=YgXjN} zib9P#+aUHM`v$f7J+ON{uaku)fb*?2IyU3xT^O)%{QK0w73TEXqCz3AG_-p#G*@)8NvtWcuSD z6)a?1C-?;$?^~>bHV?`IcdNf`Opnf;!7oydK0OhZqz_Hu8ZW3%ewt(~>I{d~bPD%a z_%6#I1(wn-b4TdKiHhCaIC!TpocD3q)#v`kh+oTtb)(mIeACvjTrhYv3%-9)jbFVnbr-z+lr8nHQm9s4ivw zbB>jJv(m_=nTEA}NBDc0B>bm{x@7-+dt|`rC#0jku4x?C9pRm6^t^*y>1l2f`{Mae zbAmvs$e1qQ6C`djHZ-<|>Md>9gK~~^G+opx=BYkPDoUk#4aHX+MV~g=Hff5lC>?GD zm-9W#wEoRpcR(9xjGDp^_xbI76}Uf|_RV9DJZUqJcWb5P6BIUm?v1tyX21r>tE0J- z{Soy=I41!^e7Z=XL-BSbOs6JWlb2N*XpJnbwz4{NC=>NfTNM_+E%6MIivuOet>jqq zM-l`Z`SIEsZ#SUlO4<4%bj*@ulW%w(ZBlY^=yBd=9K`0xVJUk+7WyMd&S3MT$v%e@WIkAV|F*2iO>Y-6e72GWFOG5>U zxwxtZ)I|+cQ<-*WLvM+=Ch};12-2GXx99WgcJG1)8SCVHg;&w^ZjD(N?q}Ub@JCn+^CPv6GNygM6H9*VT^WT{-dnza>JR2*@tujQM!x0`#5MOq69t z%93S9F2O@W|67G^&^mtu?~e-Ee@=O^dcs}KnO3ODxn|;>PJOTj`*A!4Q$GEnEH2r3 zkEOff@o6`ce%fRGCG>ziv5A?u~|B)bKmx!OG)R5 z!3iFY-^e?yV4-EQZgP|56SBQuHFQ)&@ZhDuhmx!YjrY4@J#h4`?)7o_ID)*>2aGLS}emaP27wZiAI`H8q=55`*~Blcr~ zm4s)Z-Y5~Q-*-zx81qUR8p?Ls(yyN6Npm_`nt`uj}mK3%qd#&5^vh zk~btj1X7k$4{beQ0?MZz+8AH8(CcMq3AMFAHOMsCth9Y0=9jqUJM*)t@|MdHlyFV1 zS9rEHM{1mBbcT@s7OUB)lW$}fQ;P8v$1GBE8Pnmk zP*RQ2vV*E(=(N0-QI15!yx8L)2%e@@F~!^LbGupE?NPsqJbu4K3*&U?msvGuX~aqT z=X>u5Y>>bKojf>}`0wew5SpA4UcvuY%Hw21w&98X(g-|@=xK4gW8jN>))G5w+GNWc zo&oVKlr>R^O=(1&7%9pu{q4uGUA=bk3r>E6iMj3p-jPSsyDz?O7D)~_@-A7rq33!o zE!WO=K{0-P;yqHnOyp(=$>3;?4Sn5|0{psq%{gLK7u$5^czL<0*nSypYoo z)iyE|P(&qmDE4%-@|+n54=@}oW8kWgH@55q7C x8lZ~IYNX}Q`Y1l< z7VE8COjW<&>Q7n%Mkm?|I9qoC)#Q< z)+9H@oO*c5Cm}xN>0!k9-idci@zAay%y zXxyemHHo03Xec*TR?;WumKX6CYPC*NroKTsrUQ2Ct-2WPuqY`4Enb2djIKbU3|*o@ zv{$XeJOjH^HOsy|2Ql`^qfkP#^jQhNNxB}!-bYJ&CEmd#jLiozb$AImQ6z)3nAUBl z2i{y~{>S$ZPOawqY+NoTy%QEvgf|SKA&|Bfm=>MUf*JjT!50^DO%4x4aXB(sd+UdZ zyKE+J^e}SZNz9#e4tW<*`zEHL@>=m)pxqzpM76gKtU z*t9-Aw8n~(gB5w3C%pN;_~9({f#GwtGlIuwmJ|QI-w=2s^Gde#&Hj_jCuk3dDCG@F z1OAI2em7ACeCHgv6b_Z79{n$#=kGIvVVaMDE~ntB`2( z=W4XnNl%vkd&6B$66NcuooyBP;SsTW;JL5pADR2vmngOOpCA*Pzp1N5+=RZkpT`JK!+zii;;FO3%HNWFN zyj&f*=YAkq*mQCaS4&DlWlFA6+xi`=R%6!u6mGdU?zTJ=G^~i2*k^T=8v~oEJtHye zdtK|cFrH=^(Q9Tn8frQ{c?Pi;kq*A*he1tLg|PE7Zwal_R)rTx6`?Q3%~>#}`t6x~ z2sNO}Uo{Kw2n6aG(UG2-cu*Iavfr>>OWu*U@#^7L@J}fLuA=V6+3^09n^ZTi>{Er+tXjS_i!+9MJs{*325nLL+ipMq&;l)T z7_vAyRup=VSU6FG8ENB4?A+zsMwxhu_benp_w=(~*U`)=6OU##b=xuME4MQDh`>#S z%WUGH+{`f1CL(j=1dJXe##VBMXf3&3RP$2~)wYZVaqUKkp7BMY-y5k=%#%&=C@aku z_a?Af38q9E^_I++XgV7$?|zk%x6C0zRfiYEYlaj&{5-y!NVFK{9Gr^hN$FMLk}lm} z?UrxKL&`X4qq}qT^r>>bZExhHRL@9>#V%OJ4f8QGcV_!AQ3D(9C|imbe76|8q_k$q z%$uwrlMdY6ZY&Yitj_5MIC;dT#tlYHS&Zi2IPAkGi%y<-frp&Z`Qli-zEMw+}+5*jm?7YCx;_;uH_8$yj^?PqP)*+*Q@p+k?j`;)R&1339G|tUrL-SCL3T^u*`QKF&C4?!#&E(1KRQZ$vS<;RcV9;jMTlelgUMZX-w<8Zw z-2G=oo2X8uJk=lMe>{U~@fiFN?int9!J6)Kb9Fpb14TInyR=>%mv}Q4k6kv=?e(O+ zxchtG#>JDPxM7D%{xl4YxBja!kN*iES?-DrOV7UIcw}rwB%Zj?_^0=JU#~poomTgI z8M9)gOfnpAziUeqDYW;>-bpdVOC(wj z6Kt2*u0T>RQ|gUbX}X`8M|0Tlnb`6{rk9I|!BZA5}JWBjuX9%ekdF$W>+@oGyXB4$>uv7NC2`ii)AeTr=;Tz#$& zay+Vp(o9m}GSlbIHtF_uj#WZJBIAO?9?uDFg6C?_+zg5fiSRl)IUwurX>DuwY*P-q zS$>tj9GGaHte;)_Pn!sg4a;(Zq6bJSPnEb-)c?)zCTfl=B!>b44RCJNHa6HSkhMUjolxpYii0!t+v z8L5rWhbf|49C{zKL1H#{1YC;%xu)`YMR*ISDheK(iB8&;1pK8FUwi^59uF~u6Gb#UY zr0~wyetM84@aQ?qO}?y$;GmCxvb)II^?4)r%RszIx#Ah_Ftw`d9aO%nDz)$@<9mRv zR^x(&=Yete4Db|XuuBb+Vlb@@eyjO)2*J|lTA!y1R#blnKn#>Tk@r3LDU{2KA!ZH6 zOfA9Fof53q>(H2U20aU#fF&73Bk>#kt%CJ4=^V4~4a*g9A@vMJKIeQxD!dpU8O;^= z3={rYC-#%e1n2xzRXV9-3yB}4-C7TymW?t-8TICw&_M#Ai{bU|dI%)>;1eQKjd zEVeW$-19A{lG{7K$(ZJ}U+%+H;euCPLWd6N^QM9=;W(+0bfm)l$}orE7p)Qkuy0U* z5HT)^(y83aS-AYAvpgim->$*|?DPj$b6^ky7As=o)1UHOO!50cs&SHVhXgZX9eQmB zt^FHV9i%pm@iC8&K|vzk!6iuQ^kBWq{6on6-PP8QZa@bWTuIY}n+pIS1e_3C;u7Fx zy~#VBjGX$PVA9}_>(PPm4GNMiJQn$`(o*oKFAa}gbJ?i&{y0;Mc!~FOD7U8IY|`5Z z#jIwvX6hL?qbU)w=2Ay5WT#XDpg0P25Q1r{kmiU}1sh%oFf-FOYAn>MRejL?g=NkG zTXIra?NAE66294|wUNmr8m}AVqR6}A9QB=aPL^Y(lD*9&r=%|neJgl2xP?j6! zdbmIOShjTBA+g5HLRKZj71zsPc)biShRhq}uT~W!$l%xRiXsNvgInvWGun^tGVoA^ z#i~6M-Ku__D&On{FotkBnddbMDMnP>ina&%D1*R+Gn-Ew0sqV8WR?W7GUdfjZh&8M^@l2SL; zud}a32}4O>*&p5#k{JEPWz=J{Hhm>m=+3(pP2XmBj-@G)D4|&gjf8hC;&#MZ6 z5OX2C11p$fD^d5zx@%Sk+5(C#?VPK_E7|?x;_v*VDVfIj=)V)YwW^;1AZ@rEiI`X# z9xszG*_RBC>F%w$h9GRpd{@ql0p4>$t80}oOjq5ESwSv%i>Svds6Ta6jgn#l8uIVc zmGYy+nZ^rt`LnUG66AAn(F;-L3yys@)r^n%3Kv8%!%u+_Yxp247!t`Q4FNq5$XMCO z26?929Wp(CELcsGz7pP8*yoduDajTRJbanQu&1$dw>w0l;I{VRh~ z=nph&zl`0P%#|y|eZ2yW0p$|&ieP266Pq6P9}cfO6-V;~@`&0`q)Fo=A^0;$+Y_xM zfaL^&3fqHa-8lWWfV16yhae^are-@fj3|LO{-Xrm*TjA!b+AdmjttnC_SIV=CjP@R z0bLGUZP6c0mSEbH^waeMS!`AYSPuffewMEM1{El57^OaD>!Dc7yK!*TeZmpL118-m z_-m`gt-i1mz{#$+DIz=dQyj-k1p3EE1p5fFVn>9D*wY+9hEbZgC&H6KeoUQ=Ku!>D zN6dQZYzb}`R;kI~vR?4zlpBhuX&D6MQV4{66p|=2J9C6NupS$&Hq-V>s5WbupDT7lw^VDe*ew4TOzg(+^v6^qOu-#j#t@fRkBp< zL1^j2;yW{`5?ADorsEO(R?OO?R{RLiiDW=05-`|XstyEuGfrCPwWTC(A*gLZ_0}_N z9)=D;8Vk|h-V(na6H#dm$L-~MN20*&5vlws4|;3K-#%RYkiG(%ID^gsZSORuCApV^1mJu2fGgJ)_ zE1bec9{s904vg9N0IhaJYEvYGoR-2hZQa=zrH^1%Lf6~jzs;=5GDDN>dDUKv8UT3< zv6MUtmNseiE4Al=#xL%_4^|XTM~ai+ZBfU5J(D{8`{PhL5+_qWkPd!q(0{_zgGdqG{ z|0hbuwcF(I5jHZR5=_3a{`{0x5 zd{En{jXUSzHGh3>WZ+ZIakJp{)2jE)<2ri1@I`4ZkP0PE;Q7m)wdi&MA92oJFSNpAC} zW>%Ylmqmuv%Sv?CrAt)8gg{kjO~9csNr++h6(5kW}y zeoOsT^bMi$U!ip>hZQ7u8QY}=M&gcmMd}A@|?*Le%e8r^sept0U-muly zgbr`|I`dvhBP{)@H(-tJ3|r&<+$1}=nhzdIIMq*g_F+KUXYXoDxeGAC`Ofddn>2lS z^NS9?!NkAY8~c8Sb}tzEBq`KP?!8!NLJq@nJge*M3whezSbWCk6Rgx-ib&$`=>IK= z1EruY<9^G7D^W=2_Xoc#Ac_t0&J`9e!76Wip~d-G@R!8NC5b{ldvDx$WWID#p3zR< z7;eP9>2Vf7LZ|~|yL-5smTAl(Mh`AU&0v#jgS@{(Ce2g({9JX^f|bnHf0C&vDy--& z1opIQ!ZZls#d6$QEWB05{v^*S)-YZNfHa)|h+tj&OB&vXII$o>AKt$r(?<31rHfgx z*Z-AuUm+j%$uN|)9IjPw$ICFz)<4Mb{ni;2@+0#EOy<9eHf}D!ZBH3rZb5A4mW8$!QQfj05auA zW;qXDJU#*h5jH#1z1U|H9NkT0jH8;}bjc1w#+9VHRyKBbYP*K zzY%A@%haxXtx8RlI|50{8HDbHAM5b?aaIRBP^m&YIJnKd6jDl1Wf2&_-8Ml+28h#1 zzddQj?N5)6(gFNgm1S(&j4;~}`6~6XppVYc2o6NA%#yXoQGQq(K{r2tLbGY74anG2 zdzBc~OLue7JckIqCsT_fvE27I3}1e{jt6tmp;hYbTVmte|B_u1KnX#wz}oSSn0XOB zVU#6()X-$+$OA_qC(nf1HpwaSaxif7o#8KIW2>P{g z?h#=Wj&&NR;CXsmWrupmw+@(AMo5ZJ@W*QrhRv@L2on{zB0rJI->*oScjryGdJ0)w z&Lb&SM{*H^_gN=#2y9P8cz#tNNr0TAjTZc9Tx;gXSw4U=u0Bvpty#iThr3k2=N(3A zHN7?!!?7!)Qh2uuc6F0<)wW%GoK*5Jgl|5dMmF{8wQ-lxE{IQOoj@NVJV3MX>Sg~b zbNjUv4Uc7%@adPo`@h*H>Myt@&p=4jX+n}pV%3wn58UhF)MExd0n$C8F^|0Rg{G54shA>L@Du=D3SY*n7}E)1iM{DTZBmd(S)aFSCJG` zfn-TATGXd<#kdrl6Jr5JNRJr`330TIB2s-*j>{s8ntc7FLt43Ij4Ou%_s|DPA`zruqW zkD2gmoAB3Gl9@aBNkws8ctR`Kbwi&O6+9|I3wy{S^L8&jSo@N=mif7TSz#vsJDWKX z1AtHdP%F;A3U$Bm!8-{EBGQR;5NdD=Ua)03kWS#X2)H9TZQ>CuSB)(~5TO=_0Z@#5 z=EmH=CVpOK$R9ABzQ3LNSB^z4cwVXfXYM}FjSgu3`!H%as?U9SGk>^owtiuEYMe&` zgs{l5JOIzcpn_B{^LE8h7;;@B&Ej_lvPVZJ4Se@#tw8>ntnkYxe?#}#Md=dx{&s~+=S%t$%6-#7F|2JoVJJu0s_iKOg>u1<} z0OS)7g0%!fhK=U=CrLzI{W4agd3c~Mrqy^wVhbc{vwm+D6yMOK3pA!vY-Z!crvwlT zIU?n0E#)$@y3wv$cXKFU#|bdGNBj#_yqNww2xjDpchCV}_Qp$Or&3eOk@i;mQpE<+ zapyIZ{hN;Oj~5RLpWp9=-}Cz~#d*L_8()WF-$mFS?6MJ93i(XH^zh>#fJ|nS*^cQT zgc?u@f3IBpi|+lF7$qS93tJE*WStsqG`0q?*RPAV`S$6$I#qTOw8x~y`^g-K(zln{ z&M+5{KONC=sb)X{Mfbjzw1VYS}Q0{A?h0T-QxW9+YCq6d3^$nG4J1{{1@8#5b^lk*g?5Z7tm6CTjv1o2mcCKE)qZqGsr*z!c(K zto3Lw*qY`b%Md|}zJ1QNdG+Xb?y+hQ#XCKgHsf39UZce4Q*g8Q=|&8?kJK0^5`D{& zHNZ1AM_DU%=V@{t9vk4)uzouZ2x=3&wM~Mx(0?%Pt!$XTV9cJltpt4{-1H!;mx1J2 z>*oRJScw=A-XMRGC$;CLi=x=nLoN*Q0d5kqGiVRV1+F~4l_rGI|7|{Z(u$TKN5E}9 zjvFo{T_whVw#pwpjQ+F|}qFBV^kyHLUJP6-d!uuEf|CUDe z+Z%oEqn^_F<8aUpnT0CjzG7-C(=WYdQKW!H0SkQYiLZz6{r|HAKHlh?VwrbUOBw$! zsHJ`av`in+QnawI{aAz0cWUtu)G8uXyOFXG=lohIaW*&$oga=6RDOvX(S4rj4ej1u zjNtlyv3Dx{dsVn;rEfBX#`bL=|9}Kn&E{h-T;E3MclJq1x48s8mzBhzK>b+r`=YLI zMujjsFG2`30K%yl%oTYzI4mod!8U-0LHCEuuj@u)J(~w23}6dIri|C3QpY)C6~s`H zix@VUb;r50-2Pv^lF)~h+HJ=`MyO`Z_uV|QI)9D76jpRa>8!nhl@IyW2SYFglP^Wk zGxk#%@a}GGC1#V6Yb@)B(h64`oDHK_;`tk-VND z_6IHgl_-5@ii(aS>HwkN*!Bus3kzO22*BmGD>-6QTvI(dJDYy@?FSCwMmbDD~Hz$Bvrta(FA+cZWX5$~^rC zUAONPbw;G{0!RYpa$@D0;^3U}mZ&vs45!Y*G<1CQjS*_9SV|SeEFmi~A5d zyRYI3-)x`I9EXlu!%;{%o0ZRMibWY}lY@UUo#$_($uy^a?f0Vf@G>RtJ}!~Vz=hWS z7j=py@-@L8i4x8Qz-#vo80~-uWBb-NA&l=2Lj;{-k_dj)|DX8P|Jwz<-VOK<{5shd zpr`4e1NN?THzyevf;lL*T58ysZ=dwhq*6`nlzNr_JH4p?%$4nas6E4lf$h?jblvk( zeN*Czv;q6UbpIZ4p+jR0&OfRiu+#eAI z0#~XT`dO6cJ75<+P#r2>?0R_~BB#b0X++b+7HFCF zhdK|Bvo>o2#!sp$d0t`DNoApIqCF_KJ=o^WdMrJU^Lp&CSjV>XHS74}W(WGVVZ9%; zC-lVQZ&+-u6&lU{;{V{PP8ZsAz^d-Y{8}r7kZ^J*GUmhdsL%6tF7d>Wm}i8z=&2~d zc^99-g5HG(?W}tiJ2yw7u^a6eydU!^CpNt2v$Viy2IQs{Inu}m%5_QLUdl`KaFm;Z zlU(S_5&syI1^pl@X9Qj{=(xF3Sm8n1qKZM*##8O?ex#IPb{e1WDoU0nPGuP1LLsH3m?lITH;1PCt1YkwUS z6kZO$J*_1BetNLuuYBk8kndP;9x>}u@bmu~xbpD+u_LdVV}J;4q`gHM4T3`VQT|_p zZRiN&%Aoyx$m#u$K><)TNz%C>4Z@x%IRgcbzZZHX_cFqOJH#hmkNn{WgEf}Xm3b;g zN8v@K0xEH~{$=7q+`=7u54*Z7cUbM(2Zf3(kF)WZ11E5hHjK3**Drl9(6|lF>7GYm ziA3uL!=`q50gjgat(Tdh=pHE?`bi>wS|*!r ziUA$I3CL%wST3(UWOmT9;%(c!Up2BHh}*a;Mqe4%`*!~cZmh2SGKCYOkM3@sta$Sm zUpPDsroDNE>}PD+6Sw7CpyK%3bxDWCxT@s~m4ixHn#8pBzA0?iIS^b`^%cIl=K%D# zkUZebrs8ehU+S*<54Ai=L5mvUV6O?c-s7mv(wc$Np5*pnNz=KNA%`uM4^XMfIx1G3 z($zWHBY9T?Nn-3 zb&#i8*t@sU)Z=ULa8(T6z7SuFK)jr?P5eB#_vy^$0>0+v2ua$h7-1TW#o^;zkk48VkCY!D8&uA1`AbM3E{;4t7Enim)^SI=mj5MZe6mSM5`+ zY0??=pu43PzhTgPOg^Bv`m2aArMsgawJC8HYcuA;u2(nzR8UUC6GysBDxhL)rEqjr>_oAcZgBjn30q6=Em=Mqv^vN01 zNg+7*NXSS4InaA)^I7aYqH)5lOz2POjmM9q8?xNNOPZGtNO{4)DZKpPh{zSa`-dn? zHC3X>Kgj<${(ftkww76;i^jg1x{e*&U5qhdk@FPe(q{_7vqW)&PJ28fEOMS1mpn!q-ktkCB8yY>r0TbhV6LCyGIE5T%7AiC_`LAgc~I#J}bl z(}Q0c20oA7ex5Zxb>l!E=tQ)36z1I3Zz2@zpr~o>qvgBw-PKR41S5H=zEqvTKNmUZ z9q}2$yjK{kKzAyRaX@zVp7Z|t$yRaC>`{@M+40<^4D5}n&C-;%G}%EI2Py|;vyG3; zx__tJeg{PlxJRJ;tO50SYE(n#r|rRcLbs|EJarP&C!g#u>P_QCL__L4V9966}dITJjP5^1MdhQdy3TB zSM-{wuU>G(IIIgHuYJeDW{J=O&cPtiq5@ZfCd=y?VDRUBdo2quh3d)wh#a4<+bBlw z-PHj}S_q+VQ1F`HL$uF;map5&HC(Iv4t_~_3+op>HZ+KeKLq@(vAvH&J2Fbxtg^2E zM%PpF&sNcfJBQLWfcac0U&ydZR00H$1x46jbLc;LI8&Vg_W>=0C#@}@ik<4(eEaN2 zdwEn`XKM6%(Y;y%b1oxYd=iaFk=!oZ9-tAHiJj_89Gx=Sf&Xvnk=%rPBXE@pPW*a#fZ@NfKckE1=9N(Wn z690}hO?KX%(+P2>O1T7()W$Np#(=1Q@`pDq66UIrU~DEvR6ek>wUfc7U{NC+pDQ|0z@}H z9dMTz+Ue;+kCFnbXFUtH!AXu5u+zCqDe;}dksyMbt+#v*>DDu}6*Y z{crRGLQ|F`mkpT-ghKbW6aSJ|L@Ta$c;IUT1c$HMdvy7)5_0!HhQbSjkWy~yup)yE zHQ`J^8-Nu{0o1&7Mu8_alUeMOLcJF17q>-6z8$K+;;_Ei7N^hsZ#d;ozci2nftp7y zwLxzLimUm(ZOF<}%M^taN9%S8IikHvT;Zy{>M?W!(QrnOr+|G|7W~g^^z^4Y5um3Q z_$QFg$NoSkXwq6KPIy>2pDBHtoS4;(@HDmT;+!{JmSd{z3qaIV-=4#pJOhS%ZqsP4IY*@7rLSue;+H{%vrRs#5 z>W0~8JWj1Aplj2BhLdTVjX^yK@lkxN?vAgB_ovEg(XUi-Xb!Fn~SwXr1GOK|) ziASIYuwU(1Ku!+dpM|WR;hs`|trtA0s`W}e|6EW+T*+P5b-U{5@C{4G9`_I{TYzv? zq4DdDiO~4Luo9u3p8a0_CSJKxx^K@^&jqFpgYbR|j~@5EPB_7N#l;$OL8E0q%0;I1>ORa768sG0A(36iNxG14t$1C1+5O-oV@fC)a-hMO}7oq z!j>E(;CkOwG}vv$5}?uaYIuw2`Q`K$2|rV4dJ$B{-#njwX;v8q=Md^cE2eKmzT)OYN|?I{0f%>Fn{&rX8L+Qla+N0 zKqu!*o0Ac^jUIRT8BIN7$?F$(fukkDmhb zBCBktYWj`d5qGTA^Ut%7Xnzf;APk<*bBCUSURNmgQBV(-POykE9xnkUY!vjYJHD0P zc)8tyj_`;!qAz9-e}~3OO=Q1QY<|#7w2n={q&eBOJ%ns4ME#`1`%TAF(JdC^T{Y(h z9H(3OEZcIRagVw6x+Qe8c%rkhpcH0q9Ri^JG$FqpsNjMqaUa$Ilf$M5OzxoMnlB?h zGBBUM;>!}T1-k;ddRDx$u(rQ_r;mROXf|e~X!?PsRbbszy8DQYyyW!gIq{9}piv1dy^PaDmd(}&#J z>17bK7O6;Ver+_l14*ESY;)RsSi_d+rB^jrC0BMnrP((}8kK0N)lZUum$~qMy9qXm?U>sVH$)I>WxR`yEk2g}(+8L!(B!F3;Aaz3W!#;prDg48Np0 zi@Uyy<{2dMONv_fA#sEjO58Hp+F3X_%hU%PewBJVV?Hh zOTClf>_zj9sy)9Nj-{%_VAVeivV3key_}}rQPP%u;*}+gA0-XtUy9`aYP%OvYmIzx zU?eRa@oEe+!|v<&*TAFtM$2=rV~NgCI`1yqoC||XdV$@Zbce_D7yi7n4s8U3oL;a= zi=Q|WtvH3?>}S$Os}w)lTIDIqq^yQ=a-mH?-z-*$%e|~ z{%wGuDxC?4+FW;1%-;=FKJK=ArSpl1zc2)U;8WjB!@@vWxJ$TKJ<{rgfN=7~YSoR) zpY~MX_|wE0N!a)W?+qRa@Z7(vZ_5=q;qP>y9{7Pz#J;a)oSyrR9yT1$vfHcC_0peM z`WmbmM@!%772mD@b@Dr?XDvvITODo+ziqd&GHKyAJ|a3ob|c0SU5*yQUF=k!>vkC} zbz+%Prwywe*jedxtsB4FvqkIFyO8mv#=q&?A`>%&`x9m-|870~;qiT477t$x&fp1) zUl#}A62}6|s*D2XhxQKR3tMOWA#La0YSCxLTpVB5TQ6qe6DBdITMP7;gUM?pzDauE zRVHuaet>uu|XtUBTk5r?8KR2L6nYRCH;ls^N9{8jCm_!qv&t0s-}?GWf5J}FA?xxC@lM<4F*$xAf)8%4fq)96~HSGWDXe- z&7aCx(L?4$gv@#-j?WHq0G&LHH((0lb*On6T_EXl)j899ozY5Ps8w~Nca&C3r{`7w(1Z1-MFy_#GGE_rMr5DSX6GLJ_C0&57~f#{(kvh<4zX-{#`rR$;_H= zwhfK_*6Camj}8zTbR8`)3|eJxV}_HuZ=gkkOey3w;5EC`#66$U@C{QxfB%cU z&j=auDm7s?tLL|yZYQ>dtRhxZadS%CcuOrJ-JAMB!O!&f>Msn3{ z-yQJZpXDk9j_~?PLDhrtMXdo3T>Ebg&y;cLv+O(`4B;Jf=qj+1bC%wpDAGSHo-3}0 z-Vml(J0z42yVz>F=rM5Wkoa8fR%5}L;SH@mgPH8as4o9n&xvuAuPpaD7}K#&->IH! zlur!UMbvHBYi`>4h(gxQ#b$o3Czh?cEdgG`ixJ!lP0}apj8eW@R<&nP_7fq$cyD^2 zA93JA&qCh4hsa&~p_xEo_({=oppxw#QJ)17B#QnSwdHV)JtW z9_b#TL1e+UmVE=BuF?5jBGqm+<#T9q(=yI^hZO6)AHYxXUw*JrrR{~CT1b`WHMxDl zka~jKu3HCwyU=6OEZLDeOl)+C#;&HN*p$9vRima(x~zBgRkXE3Bb=PFMyk{p4&JXq zTJe+7`o%ovxW(!2CA#o&E4_q+2Hqj!RxBjj9KpfMqmv1qTq2&1=%)Mt+ zQ|tOKDqT8A6RGM_qm+m!MFIg48`3O*^df>%0@4X2fFex<3q_DFg3<*EJ#X`3ecPdYyrIDCH-&k$`)nN zi!xwgM;kHy9}=`s9h7cd17wOrfMUB#wa_U3>)6jc8VzM+%KVjN5AE(Lj>Y*wd+xQHL`3Lrp{l! zP*HVs1I$$)sTN+9U^GRBI@oROXJ7Ku;4K5?=i+DD=Et$6K2!8a;X%gotsjvk#G2KB z+GIUj0e(#m{e%uP4LZ5{$BKl3j+MjSQk(eD#Q3mvR9ayxC)lK(V>e4kbBPLD<|hc$@cKM;fqpXAM=$^neBwKTSUZ$wde?<*hum9y|@KpCzv03VYvp>Ten~W9Q zhX>b#MN~nsT>4h+zt?jVI%J=RBducl$D#T$=6(xy%y$Ya&p-#b4l-Je!FOj~i(Ahw zdSj7>cV?ssjcmJMsBtNA=~P-(Xx?kFS+-nTvE33edF;jAH@9`yf7KS@TO8rg=C*vy zh!eURBFGG@7P#Kb1;rgP`xuDN9!VhjNSqb~V{22{ETg4V-*Y+fMWu0I+7rgdFAu>>Dkj@1D1XsId;O;!2#o?I;gCInVW3C(AZRr4KX?WOfc*=!oxR@zS>e{C0DW z=Krk;T^g)Alz#w8OP*0voEi32Kw&)ombH#5igH+%$WG{Ts5K5S5~J;|ml zmnIGEDIyp;j!5n8o)MEC4+nrB`JTA-NyFgR>=Bi-!d_>YIn~cwx!pMve*i4!n_LlU zQ@*T1wzb5Va4;ntKZ6Pqe!!a>Q9BW#Ll!?b0katk7>vWqlnrzU+$C&YG>-C_dpl0R z?o0_$#Joqk1&+Wc>;}rJJI=LNuJ(-J&!Gea83)*Q*Q3!50VNzeu_xN4H?6};Y6yGP zQgh_G!tw?~Q&HpmkX|>R7vmiA9SP1Vp2JJFxdp%Ml&p~hV1}X+o#`)h@_ZR=I^?vH z%S##DC(E?yS&*G01g>+z_rsP(HotH3vFA1dll_WnE?r~cr}Hfn4ql z5s2`i+@C9Rr9cC7*za#-C&`V(c68|~U0MjnY9AdL#v3zTSblV9bR z*PqD&AC4T8G@P)$n;{NShwV%}5snDo$s&cq_tW`i*SY=VTA=%oGgAo{J&|}&j@#{B zHPml<)Z_ZaH&#v$GoFH-y=UZ{@kB3LWr6cPGlpqPocQ3rDCClKNylRf>2do}l&=Oa zg4CS8tBKAt`x$KGvtl1(=_UL)PAEw8J7@x4yfgRxLUdouS41MM3j*LO_;;K8UOztG z&aRxYQtLOg*v%XSRx75KPFLtAq%4(N1QG<-0*XV#hcn&GC5@DdOfFcaAC53~xN6z) z+Ph}aJkVvf5Ahj_0zn8qQ0c!`&$7E*6l0C75*!Tjg(bKfYwT{O!tq;WYtfQjv3bKT zD!uspi{nM_B${P#kc+YT`mj0y8^w5gOPdbTQb{2(w6`==+%KmSm-*u2P$zD6&p*=# z_2c^Z+IM}P<}-Kr13=0?t%xi8(wk__H2JD_Z4dAsz6x1*_KHWG18~c0%zD!Jz8(R3 zFJ0v18^;d|{@E~1-rPY@H1@bK4SlU$y{)&! zfTMqLe-pR5l8Z;GzB>zj$pEwD&eg>!5NOfx!L*ym~37LCz2VI&Ov`>8AP~gadI&q5RU|wb_i=YuIS^tS=^C1diI)%G;|0Vai5mr$| zgHC_x(PWxHE%(gA;kM|k(Zy1?06$AD`aLoFKk9y)OF)!yoJJ=iERBBMI5FB$$$|R= ztYsB1U(#m2&p658EJ)`zbU{#2KqBY#gt{1O>l<(@HLp&H6fGYN739C`u$5JRXyOZd zqg};BR-DC|;ehBJA=U-8t!Y0@^1`rv%7Wc(Oj2V0Er=iIvU*f&h2^<5c3W&HN5V<< zdm6k_@ncj95g9S?im+6IJ2-k8C3Tcd7ZNv77r_tX<7gc%xb_|kv#lD#QnFC%9T?<( zBsYbC!43*G;VSaWhMo8#T(ZlT?N3!M7b=zZf9apmsshKg-6e7^L_?M>Zk4{^wmxhk zknG%i1jX8$-`gB~j7=Hf&9y{R7^f8EQbDuQ{^#$e*wrd%bp2E$x-F@ZO<_McoT2z0 zI8y<|^@`q@q3T?3K2sChu`Ul z-9Kz~Ph-8FMPiL;yJn7D_B1Qr#VemCa!S1id;%^ar>V2zX7e{_z=AdiDs*u!!ImshSDo=LrV1qPkxa0s4LL~L zbS=fNSzqCzvEB;jV!D&8xJNz217<*+W-rO@V?vaJUjR~6x%y1=jY8vq$;Tqv|)s>IKGfgB26_cjr2960pAIOfTchaf`l>498+;mB-Z*cbx+u98*Fc zthu9F<0_a&88+RVYPL`9M_l#?J!$4h*~pria-L1YBVo}=-;iw5sT7A-Np_2Dj!HMrWvh6h83DduK9q*DH^a{L2V? zgnS8Afi#>2#I;FJK>))nSyK1=>*DP5o+^{KtcIl)!m~@ts`RRF9)M!&3{5(8_|F+6 zG`X$Tc#|5=qTINp9Q@s9A5LUR+t!}&PLz5#x@?5Eez9;vvpOa_3X})>A@M~sbcY=a zj$#t*t5!ZoN=*B@Wc^AHefBPG^0|DGm?D3m;+W6tC05_|5)-@(&QH~rDX-w$-pna) zFPwJu=&+IU26osb^}E{cSG_&LVj)&-cvHUMz`{WR4D7gK<&hs!l|5d=I-XF>!R8y) zkb_`Ofqd`eYvDcWIr0tTI!eQDUB1j29al862;WuCMzjVnGQAr09v_X8%9FjU;W6rI z9)xjya>;CMgG1DZV3mNJJaJ-g>6^dZ(Q|hK=Hu);PUpomOGxva5(USqSc*-22HQBq zyjX~U+E#!^HuCGUm^tc_94AxdF5a?>?d+*{uEC~9Yd)`5QT9a2mw&>y$6vxl+fGHG z%HxV6^i$mrD$i92@(Wq`ba!cd->j0sjmOL2Kzo-~75~gWgA1a(HekCh&u^dPY)CzJ zV%|n;N6>U8H}(tliiq=k>eYjJxqE~G+q2NxHH)2a{*4U{1U>pbt8e#Cdm#IsWB!*o zL4 zC`ShaCm`Uhz{m96(mZdsm$eD7Qfd4!5k*%<<0t>Zhi!m=9L>VO9R=o+F_wS$!4s_g z3UeA+Oo4;Nu~SrU$<4hLnzX0Cwx|(+?~i@JW@WAyU6&^xJShYgT5;UvZqCo{sWB5W z4*51w;Idc)##bUdw>X8oU%4j18fd;JJxZ}&Ir!Le;xd>d6)+~uZ?w3kIrs6UC2~*>FyK25c zSI5NBn@n27_722IyXY|mPQEr)oc*;miLc>?U@YHjF>QEsYWJCspfFqEz^gC@xyEne zqRqimq>)D#`KMiB^J6m+%Lt_xDl3jFLL1RLt6Bl$lG(HIma?SztMFgW*k$LE9f26* z`Vh?8)OM4#o`tz!$IK1r1*&m}Z& z4mcb)`U^G1N<5UJvJ7gH;A1K4Vf5l$uv2aW+!FvN?!m^eYWc{B9>acQ(rWVh^>ev~ zTMd*{HofoFhUlPD_3oE$bLA1RyAQs%JEOiFD--61-*4F9J{$_C?$+!JdTp_5sf}E< zAs4en^ypFjybJy7s#^N2^U9>fA32+162*v4qC3;=09Tp@CN(WeMdtpmbrhm%BI#xr zST+<>f{%GY<~LpB`cYGqx5#ATwHqohf+y1MMxleQA6xGZXK_?nGqHi{0no!9$1;xxI_P@5Eh3( z9|NW=G0yU2lfSW=cwWHd6Fdt^r}2>i&B~1O@18&ZP%wA_mHt;>21NY?L*5bB*D^T8 z^1GMqD`PJzeQ?1fgM@e^zetq0Jw38H-2hcz5W8^vUHbauq-n~N!sU^HqD%Vhp>}5L zc}=13+t?I*P97V(5_#;KO=O6SlX1W~fG+LHyadCkkviy%@yDRpD3gxUo73zOq3vwV zul#K<*|u-iZ*VjxAzor&jyr8ihK&pIoPcd6wW3owzJ4Sz6x9sg#Ba>v=jGJxdUtWErQT#}av0bDxKgZ{T zFc%C_ytAZMIw|Clr?2J4sQhy+7Ian?>23X~Q~g+!pQ+pyVBriRET4|s#k)i8gr|ig zx-?$iNKG%FxWqmTU22wT(3UQq>3WxbFp&PlcVf+M}m0pEoF* z70&G|5;zPsudj|q2R;o`+h_v~r@&U6w_nfxCbPl27CB={&U!9H6PKxif&PdyW+V6* z9w*|NJr#%788uCjK<~F&-1&(G9JwXoq#;C}9!z38(eaX--*zv2&S=SI)?&S`h7R`& z_A_6ph2W87rTcL1?Iy+%FU22O(~ez?vv)|744YONuQwcO-uIprv}ghL^Us7P^Qt$l zlD(a_TOT#($C$NYCf}RB$WNcr_8Aj@^P$}F^gz>>^L=snqPvOg5!R?2bRMcH;-P4( zI5IE)X^ZOQr`*D_RJ=vH>r`sI&)crzR`F${JAyy=RfqA-on-qIUxhvHP~{lb;6>qK zq~d@T17gm7ZzE}nF}P{c+BBtCkuu;h72b*r!=x;_%ik?X^*PN`pe6bV^NF+@3GbVz zyP*l2KChM#Q~M^Xd{($0HzBqX*Pwkb0;lg!UbMth0ykBu95)Hu4eZp#6i>ebq}p*q zIM>kaQBbaRQk|G6l_Y9lt>~56{jW>F)x-}y>z{4&p`DNr-*0s8Wf6|~MEd$O#2+s- z+uvO64(1x>fj=@kS63<|6=X=6gSL;{fFsFGdOCeeP|uD9gHe7~Zio?0DpAmZxH!x8 z+X*S@znqZb;09gYAI8x6Sf%@B!s7oQY6%jt6M8jIrXw6^-qHkV4uLf5ETEAPCq`eh z{cnv#67W(c0xH)JA3!m6$pQwDqPdOHum)`JYgR++>NiYF(N1f(;_xHx1qAQhL%+ms zn4G$-q9hApHuM>LQ~QQV#yIwA17Fz*>ereureVo-ZDlT52#QdW+F3U0B?27=O-za9 z{!~0gOUj{+?FC@`4X{Pf*r!O@DJ=r*N;PbPLXudc6g9iQ)DrY+{*YCj4_qRy6r}mA z1=Z@^)hIUZ820SfYu{{N6YSg1lGb617>M{JR|h3wU7_(x)csA^sU~%dVJ01}E6KC3 zXpc2FN5A9nV%?cE0E5+lK{hUjG|7$F&iy#SVtT+AIr&2yy6jFbJHQn&4OpBWW{9=% z*!0dT53k8UY)ms;s9Xqbi_u{*VC(v!H@)Ww1X8*D#qW*)zrnkznL{a##C9`Mrs>_X zBBZ#3TK^DewwXiixdkba$4WIBS=gfi-r!mKwgmIM=1xX?uuYiv-hNZkc<~`#7MHaF zlW3XI+!{HleV5kUJuu=kUrB0N*&V`i#3+a&(%xI zsFsCKcFUm4?vazkVJaHVXcY zt9un$z~i{(95Y`|{^lJD0EtELu&@rF)D88k9SZ;0Z>9+id7Q7AGx{3%l=nbF8;yT6 z*YNLdK{OkX`Kdid&1AS*>J8@q=Yi9y|JQ-j1vua~CUz}eolZ8>pK(TOW!j=8bQhM~ zal%!m;afgL7^LHSCDg;fou*wuZKpO4&YlrHTezxwr0&|veV>P|B4cL*o(U+8?>{gO zrwdqSUmTbU6;M{%m*=-ro18r|5D{)GK|B`%gvZ>YM9!5vW`nJ!3B@|XrmAkRHFXGA zLB)fe*#J-Ctq991nQ^~&kH2A-gx!tIJ!6MkRE!-)0qVG9cd$0i0jGb5eQ)9Ftooij zf28SagMqpmuz>K6tauA21P5p?jDEfh5X8WQ#vM39c%Bpm@W|~*7{!>k+Z0_PNQKii zM8(Pu+fc?%$i!0SW$Q9ozVIQ5JvVA&p9>0k@g;-1@53%WYhjb4u=g#k2yyJX_pP6F3_yt+X1l2cu5%dh z6WpvKVQgFFDWNXD-xv0RZD!-}!L{D%*>7ZUJ{F(m&N$hl67$~I#b5!i?E$ql?Bi|B z7e&kIp&omOV-k(*5x$fsn90MYt>arC6n$f-BF>Haan;|D%3mtWw!o(b2^ktdcj#6Gi_wF0C7ls(kbF?Ge+T{VHPuiwm5snaQljkSL*%NQ} zr}3zH_^A;b_BomXm{PAw_0%g2Q@k^6*~5Uo58!A|a-SV+h@NHRmVd#@1X}&EUUP2< zJ{28g*6xOC8QR#7!&DEZKqKycT&%*IK5IO-G|8pnv}%FkOA(2%@67NCkDc%%!}jKF z39s_srSEsRb80!F+DioZ5s#xe24t{m-GiJ#sp(46Sc~3;De>A)1qpAcg~=#I{h7VK zI5_%oi1{g{^(MZhbs@)cMIfITOeGP*RorTUU(s*(Kz{iKwVq;=JB}l_3l-ZX_|5($ zi=`pHTY!B=)Ag3gMTY@-1?UT*U$5Q(#+i5)d^8Yy{9j;g*7H}YRZc(J&c{dThzsYb zc5PR{`}Jav==oeS+V}os>r)|qzC-&HPlzP@pVrOK)p~Lh^kGIz>7nATLS+8Ip~|P` z&}g+oDSY8KRzJ5_2;6?759_cnatE1Z{j%JBfO2k`&KG5{G+*aefbVtRWogJaebTMu zbVAbb#tN2-s?{3?P_nYGi!b3s1ZvXxoi%ebChcHDyd~=1bq>Q~*M?y*Qk=m&%)}9E zy}i7C8eAP@QtD%Z<)s;SK$qOmrPd@oD2bjFX0aGT7lQ77R4R&CtQse$C~%W+o`%R) z&S!l8eyow%hxW5V%$TuA0KhA+?)3=(U#KzqW8-4K}ok6~L27H~S! z2=eYjd8#MCZmbD={WaeOSkVb$%PBSGF;)HZpWH?q4NaY+=25$i5@R+9GH|4Et(j^* zXX;d(g+c8DAP7!Bzx|xF`qLTHNf;w-S^&}v<=dcn=L-JD0w7JTOJxM%lMxh{*{3ej zDSRaBZ@lDU>c7xm?ae3qlMWG5=-M-MC*LcTrv{cB$~mTFq0AlixAkSn_sw<>;Cb0f z(`8TiV1W7TLpm!jr)FSg@-OZE+#=9T*yXdhn4SRxA!d^IIHgXno30 zv%x!5BrKBx99RG4eEVBbMmypEX{-5<=j?xI5IHxK%=AlTa8t%T!&}dK{X?eQDB0sH zD^YKCUgXaRy*e4tPLfp(m<;ah^TTQLMlfCF*`M>XtOKvYF{r6JXwOcw*F-&~Kn!tM?VEEU zysIWl(3H%R{s5=nvB`~Y-i?`FW1ia=jwt9dbL;OD;V~@x#a2A^NhgX5YNKD6ADOO! z({o8LQ%9GDVAb#F5KhfO)qsa+L%B^Vta)K-ei|WqMZ>_TOhe71-QBtzq{S!BGVotH znJf{R364P2b6k)$hwzHbyk(n8&yqSsZWFvKwstT=Xo6Iwh$DX zH?UEFJI#rc^B)P%Wq%JgM+q@6C7(6KO_uC%Gy_~<1pCuQ|2Tgd?&6~K`19Rx{HiLo zfn7Y;lRxl<@*KolPq_=xdR1+$m0o5)V`mdok{78V^f|0bh6M%csu4X0{9NF6ehOK+ntbH?Hlw)_ZnqG4$sa z*0u-?X&UH;Zh~pS)^?MgyAc*K(@Lr7*eq5 z2<)4Gdro>Zfgti9X6!4mdz;-GP-bEG+D|iixaz$|(RASbc1=EmVyq?#T265R{!o?} zv~`dmoA9u+Vy|}{^fG`l!=)#g4PFMhdn*7G?u}LWjb7%?6%5n+71*7EXB| z5d85ZE4vIqk)y3pd;528%(X*7`0Jzi2zrR_R3O!EMEf*lDdlAj%ZE0_k-kCzv(zh+ z!Ed?aZdJ8xSB(oG;s^-;zDw-$iiQ6=kw7I@jr7^&J$klu4Rh=e4nZEor4;z1tidIsbXAt`pe^gY zMSZ;RpIkhh)*)atuf{xVJQ&IllF>LaEmY*kEyVs_!Hw+?wRTKrB~-oY~HF?>wYP>B2@msGGO~Ib^c%+gOLFLx}Y;G^DfxA zI+%Yd#pjjf;0uxjTbbYTG`?3E|8B80um`s3Uq|+@8dzfz@*k(xA3#F}P^6Ejl?>=y z&kt?%K#i#M$dkxixtGyBdhkZ9IxAdc{khI|D9S{i5gRIj>auXyEJ7X?MwN{%q>y)t zqXRm~&-b1}2ZB3_1d<^4e@QHzWih%pYpd(Nvn?zecJ3`_zu#gnH>=-i@i^}Jw*2$o z?BAkXcxt98e}tv>QR!D0`I(RX^$s{AIZr;&v|r@~0)PKM)^7f1Xx47w;}rOBrPOBI zZzG&P{-8AfFWPbon~tZ)LTZnkIsVt>1XuHp=}8KZLwA$k(A0Zx;2G?oQ=in%91e;* zq#wK#{7)I-m#%YF-e9+r^?g*PNYrO#w^4b<{NO8gMEM^iyfnW^YI^ z`C~4}xRi^Civ?bxHnp?&lAj~g`M%8hYGa(aSoCtvi0YgbS8Lu$bS(J19Prnc!0bq{5P+l!(w7L)VE}fmL5z6~O+4TybMk zGv4VTf*xAk@+@p|7NE0mpH>*KYP_WV74*c+Y!R_Ied&2_(#7&wUwOC*&knhL|Zqb|^lrGEv7+qCIkE);jdN zIR7!vkR^Wre}2k%PF?E+z||lTiw^RH@2?# z4WM1q@(-@HvF52Ym)vusHt)p5SIm9-)*cc9_yE?CIpqg;N_%wInOx4VfPX(n9D(#p z)XoZD^s2Z&>_5dv8N@a(N;}Np<#GFE<7T5&Mx5{ej-8}bP$`yevQ_(! z-FMcb5zf&++b&Ea%KNR_OTKCFZSxy`Q6$ZQFT{w102gs$ z#2vSZeg`3lP?D!{CjWAoqB+dX-sX2A{MN08EF6XbL-C)Aadg5OJ6A>c0PoZd`21#1 zS}#SaZc~PSleNnu-usci^Cm(@oVsG2e%rshQ_9gz;yp%RwHnGqCax6DDiXa3&wb8>gY6m8>bip3y&K4aA$s9t26i?PlYPEJ zsDgmKo+=VvFoSdY%F!BVg& z&H18t+buVs+u?ZpX01{xPA_VQAM4C7rCuCc;j{lW|7SB9uQI+{;Alur3yOW~Q=42c zcG=NRqx)^0Jj{~#dA4-b1BD&iAN|SyFvx`^(e9CZw1|FM>oQQA>yV$$W=4OEuXmVpitp8L$H}f-F zGd-Vf&KE!|R&w@*8P(B}^a-6J>ESivb~Qd%WxZD-F~9)_eoE2#-gbMvZogF?hTZEe zj4Q}##ECxFHhPr-e;j`G+EQ_={YVLPw)QxeTfj7DukpgYYT0gliwI&%xlxs=+*tqV zD3f5qIM$2_Rsl3gV~cFDEF7>a3<0iI%@+)Jr)z7Q4(;W&NlL+mj-S6734E2UYjcMa zIQ`N(AA~{%$c8unH^(HJcdBpTA2Y2?p`nh--)j?STZ#x*cD0%=h=jeglVr@B9Q<8= z56CUjd~e@;7UlrXZ<>_p)fUFVWB*fsqpEisfoe65D^9Vg+XHO3$w2@M@E6xKJEk9< z`b|a$+r$2l(GkDRiw!Q`01Eir2A~q3wh9n8f3tcmJ)c=zZdmM1wh;fQp&t4TW6Y>) zps|{7O1jeFn0+{tmp0F@&h_DOiUL*Ar~2#R+(X67va{;xi1ZMl@2KUn4b_dd##-&B zvcDu?M_U_wQbuf6LXJ{*TE|NyMm;gmBK7ek!_d=DPhGys`UsW4*xuTbDCN|sS?}el z=J5BL7u5mNtak4Zj0mXe5^n$r3Fp^ZCmN_8nnj8NlZfG3mDb#Roa&0F#E6(HxT!c4dhzcx6HgEJ> zc<)|+!hb~Z8p2-dJVY$&#KlP2hPZ3N{P%0=m@E%m62MCxI26J2_>8_JlUoM=W!(k6 zz!SGZS{{eUSXW8f6e0`96W=;^qE_G~tH|%|ReHttDB_-X{ue@rvN#r}jNVkF_A^8i zZC5W4SS**YKghY*y;K%T9u`m9wj3YNe!hMczGtxe^bE-XdbD z(Q&9&w>@Vph)2DO}3%jhh|RyS^PdJ3|!mw7pdTnX*iym^Wec7HiJj!H0M> z9MD=)eUEa+$ij~OUozgxl&UnLST4A|g}!7~|17PRqub_ta3pt7aQY#lCj?d1RDE5g z9M|T3R=gzFOJqSBO8=Q_x#av}*~1@af0>V4?)shVzu3vz`D1CmsUII_GME?PDVa=I zTwdQkHA>M-N%c=Ne(Z?4@|Y@cnHY+}TV`?dBh2AwFRZ%X`;bb_=a|nE{OAdE)f3~u z%Mv<^%s($bUqxkD~@`+(b!4QkAw>WiWoRYNcosBBl7m6mRgitZmR6lJZ zGTPK}wLFVekb#edq)4eCD6Id8lBf_58<>6f6)jo{LHlx_aWOp zIvI8P;~F=+bN(jv@TgLmFZ$EKb3x;WY?z4r_qKxkpmY31a8865W0HE}{1DDcvE zl2`dt(mrzJY;#5{Dhb6r@cvGY`SC(*fag5#gTuqCGM0|8#g)YFtqj$N@Ch|P&*!HE zBwsFamy92`S|;7BdOXOHK;3=Ee-z=D)t{>Dq)|hXr`~F3Kc_ayb}f+6^b_YLb!ln! zLHmb&&OLXJc>n$rcuC8Q+45J7pBQw*JRhP9mW{3tzj$UyAQ4R7DM>~bJjyQPFX zFXrL{7mYSG?Gl%ylbo3jdPCgh(H3IU+DzAG{0331m&J!HyCc3FskSu6=tyOLmOE2J zO>7kWSq0yOeyNt6zO6!kNV5Lq0s+0@zrhkgrEpWy_6y3#YEjgxahVcQJeE{dlRjH` zjk38+Y#cP|AX#ac8S9K6S~1=l*Mm>p^?rBq(v=}_`J)$dyL111!=E~S8pwt<#P9D& zIy)qCvrg`)u9JR=^;~v9|ND~LDv|Ro-UeCjtNG zQC%Q5?_73LOqh7(HX9`A(OROgjQc{q5eTPl!llrGZ7BO+i2WA$26{*qJ$JX}IB5l% zTm3fc&n3)ISBRD*^S1<{-evFTAu4jO6G3U*(4!8v!a7;M|5%b#P}ht{^`n+ix0Yom zdXg)kQdaiwR-IJ)2ITihHy5@STIO5$nii4zMy;OQU$%TbFTHE~OSNxH-cKc|20q4} zm1dO}d*JZmVm0K?+dh|Fqa(Dpd}xzU)V8u55}@yH+0lPRrLkRvO}#HIAnw7ag3ggE z0wk|+->SwXa*GnvwIVx{1HTeN9!m6fT3b zsgzB0CHYHtnU%(3+}o_{jee!8l{cojs_Bpy5c)0d*e$d!cIpkEjSlSrI`@P;0MS-e zdWEZ%law}BE1miX^Rl+RUE~FGn+7T8!Pvk7PVU5B@#jg`R8Mcy_V7*73*S$7HQim3KApxsqTsnxq`ci z-7d7`oYmiSPz!@$fRDa z;D1G@i=LlHy9=ED6&=%N&5Nz!WZ#7S-C*%vI@g~RPT~{Q<3*FBdT$5@on^KR=V1

          bnVTFOv2T?6xU(cLB5{_Ng@Kst3_{9z?jL^G`_6{aL2v zsvdc3LOGQj0u`6q{d8~N_4A&n_4E7^P}UmHu$=u+H!%G7Q!*EM+0vY|wHc$*1|O`c zSs9A&O{ zqgx*(-Ea@1@3E8~VD%_FnMnIeL!N% zzJN}1Kp*v>+JqY}PpXvO;=tu%eHRW22t+BKIdgkG~Xt<_^AX zUj4HC$!Y7;j|y5OW|+v4tY?%)55+&E<8kONNz@mWL!2aHYAlrG>=I7qUqy^PdFo_1E7V4T+=Nq1H{C!T-)PlButdQbn? zV`ozAYEXNkt;X6j71NTWGP;--_kNH*{#$~qMPeC~L3Z(8C}z$r9SMyc{9c|aXnspB zb@4K87_OXmkw-G_<^Z3`LoWBv4o`$5Btt!L7LJFfln`8P&2KC^Ch9Zbt$`TMZg3rYbC8t&~KcQ=A+4F~&J_wg zI$_OBr)hts>cIuS?vzpqSBaX7Ew9;P+O|UnS9#Z-2Yia_4BSn{UFY5SVC2_$Ayg*E z8?cZc`D(PBGNS{*NBfJ$!LSe}$nQ&`MYoZwg)i1cXS7R2jUF3&*0C1LE*Cm9n?#k- zK`xDrdi;JcdgZjV*MYj9xq3(DAngb+ZuX3Q$lY403)p_N62BhqHHcECdqq21zY}Xj z00@nvM_0u4cnBJu#4f7iL-yCs%C*PCIMEWZyU)vAL3mFNB+d8A)!qrbToAt;uRfDb z7RWcwttW+e4ws%gBa-=pRQPYtQ0>ccP?vPj_OCwoI$l6W>{9E&je%lx#VIe=%^W#SLY@?1{o?!g9`sjDkb^MQ{ zOS^fTy;X4A<>9ayo#;a~<~Cz1lGJ;J<5&ErYBd~d<#=`H|LW{BjwYDDeqm8U5F)bE zy*OVV_mIB$?7t2%?YkFVZ8>F9rOFilT#B19y+5+IO#b1})eA@))X?zxhtj`-#>7DJ z(&=`#c7Vy22VZLc7MN7>gZHqhbpMl%X_c&S87QDMpNwfg{gYjFVU0cDDRiv<&r|r5 zU!PPaZ<7_HBCXmzdY&pICLBEVGyQJ+r374$hZ-dF^S$!q>$oAy5h$b5kK}-4Jc^N@ zZ{t?sqeJo%%g&YHbqDWI?dVw*p=rMSFQ>r6+Emv%uQvw@`3Ix+LgOzF91!Wgep%zr z|0MJ$mCFI-GU{u4lDdJ1;rTO%)-xY!@a;Ht*1k<8m>VsO92Hc{sesN&dDR~_5ut2; z&{Y+96PWPscNy=IJ}4`P;J&R`106VyUTOdJ5i~kg)#cE~v;?K7px*f8!55b4lRwl( zj($c44^Y$QP_Iu8gm%B1knVi;JFx#q=>C3eNbLKV#Aa-8uw2-Iv+LiX8mw)`;wNWZ zKlf0jRTZAxqP@e{7ky~Or_g3pcVXDZv%$KB#F_ze#-BpHn#4?eO9-WZ00jx={!Wj7 zfxjYcAn_xY(yZq&}DZz<|u?RT_m4 z#)}7fcRZs54r23fpx+^njFj zcrcq6_>e5O`G*tU5pE4pp8R?WI}wDu@b$1Bso?rm=+J8d!%C)+JN7zX#}FTfbGD<@ zd*5>vlK21I|7Qk%0nN1?*6eJ{tQNg;YV@?;Ot5Z=ybhM{7C!?KjfYfnZup+V5Ii$- zp)O?}_Yj&DfU?p*bB&vYixxDO_fGgB?C+;u0n{r`nA6qiKCrr6C%4X`h@1+8`=|N9 zr&^z4R*x;s-zEFg=jlRaW|l(ESrQvq_Y`QQv{Tm#OK-;VhgmltVmU1-R=``v!}jlq zpxBb!tu(uy8mC{>;MU*IfrDOda9#`f163EW_3NH56|OAat9~@d;eb{H z+hda~7RddgWocvNx9E?vOv0(IOn<2d%kq8k|NS|qOrmZFezoomI>6Xe z`ET!<$>8Bw3#@n@`KN~`mR>T37LDkB9kRTC#6lBB) zls&lZ%pfUN!VfBRa=B&~GPsEQ(Td_j@X?^k`#Y4;h7Nf|%VFA;`NEra{(huxSCO+N z7er`*%p>+s6eG*2b%Q^y-6GF@=kn%#mp>);du#sX>m)kbT^fJSO87ulK&S0!Q^g{# zB3zwf7;Lc;+QWVDIZm9&8+R!JXePW^Jl#tR*NF5N#HNpe@1gcw(CX$>zKaIAaas3O zp-)(}+C=7d(2z%@kAvZw=phgn&Y362ymkoQf@?=#Fw0+|1vAh~q|Q5=fvbsS3GSj8 zCxMEv`ct-l>8gpLZ_nRt+YGvf`WT!YMLv@xx?c18~RBo%*lF`N67U z%jw^5f_^JFWjPNldT*-@4VPxbMj2ck=e8YBN&8R%-RnuAJ#AfwxCfd;VZXY^J_vL= zrH^Wm94M!7$0Z^K6=?oDf9|^vOJu$NQ0}IlXEL{-^yLtvf z_wwm~#`N~u$+F4X`bg3_M$jiH_v8-sG%;+3Oiq54uSABH!WFN0>G6;xe)hVJXa?lq zX(9cCGs7pXVV!nO(z(dw>)B*K^;ISZdisnCsyvaKX0d43Jaf<%58-4oufHXwx-rTF6}> z$CvUEO>a5+JD2lSGp6}|X8ik>8^1~ZnB=`oWbW!~g%$pKTJ3QYwC@ampB-QZ1!f#J z{>AJqUATvZ&IePsf2ZlEMgNti+s5{Trl3=)F!pn|P;oEDJY)&W$0YZ%6-B)dQm-Vt z6e-Xui0p7B29bpPRE+LLzs?~EBDI+4c>4$H$J2dvum4rqna4x9{(XEHC3$R3DOuBr zex)Q^))`x+vYnEWOq*pahmzee=-8!mRF-6SpYEKF<8&5ifMn4vg#pdYjCj}n-Ti^+`May;2kM-<#qGbe(TP3K|j#3{&A zNhQDMpi9vI^p*=8=p7@k#UPLDUc+Nd0N@mPr34r}HMu+tG1T)Ra`>-8h^s+U_Xzu8 zoQ`R&MkA3IjR_jcPZIOwCOqw1-$;Vy9g5wH&KAL4(3!^}&IFOs?QfSp29gEi9P*gb zU+!bAbD8+Dt{LD|%hRj2_34bFsS#=^u=ngi zf1ooC8h@{jp<>|bnZYXtsU$SxUeH6(Yn*OqlXx=Mh(mC!?$ds1cy$D5LqD07a&VFr z&-xQ~0LXid@Dr)r`yA-D*UkUXyrbVo|8D^RDB#=GKmci31jQE2hkW#GoRyH6_)vtjx(H$Istp(Ty5$+Q(gj-L2tC(AX9{c-YS}Kiev++; ztXQ`E9KdA0V?UjV77u_Dq95EUW20K5s=-^&i~shML|lrpE6I%&91*UoOLn=;6a_f` zI9)rDBKO(EfC8pM%adih3v#vCAlE(vM@0rQ@2M5AzN7);V1kvwDsfpK{`i@r<)62V z?W$?cmIh2Zu)#!+E$#cC(W)HN^^*}^!Q$6(YmC2L7LX`VdE5eAbZk>dtOo3Frzs;} z<7Sf~$G4>bK;7Cs1186UT#Mab6*-l9x}^7@OQ6Ix-(~q1WkX-!yzW+i+QqH72b8M} z%Q{{=+ZMpbh-j~*C)MC?B(C+R3o?hvnPEbP*3Xm&ZUsIA^NKu9vO8`g{0vz}!Q26U z;)4GoIRYbwyUP$_oYe>yp>i3#7Wd|}%YSM@t-HHbZz8gSYPaY3*0uUWp{Olfc2V&I zyGZ%8^MQc32^lXD!*H_4@fSb$Q?g*G7<7397NB87FK=S%bv0kfJIRU@lHYUdbcvV) zP5=5^{auHG#zN^A;WaS>Y+s~A@tX7+7xymDZhVgaVgNRXoRy0O(sjF}EB*Y{TV@C|^JBZ|x(4 z9>C+hA6-%EoZ7xtwiGIDw@df=i2EjFu@PnuS8ozXhlr?e!- zJ!YY{1Kwg(E|PI_p~{){1u!tkK7)>{SSo$Q`&IcgO1xtDvr?lQh{01I^C#B-ZX#tIj_Xs5ybAma`l@?jb5$L#Lfc1HC-Esi(D;+n+$``D!k_H$efGcI+3Sk{Va5UG1 z*5`=r^F+<3XudxMvK_-y$?@};L0{nC0^kg{fvg18Upi~`!NOyei8H4i`!?#h0^BYQ z^)v!G@*)Bg2BB;tYM6Cy_xbe-tq0m630Bp++H`Z%ITwiuQ5SmBe3b*eS`0B=+gPuJ zvFe?#VJnf`s&Ds=_qCfoo9SU)IIyw^^)6O#D}luv`3alOCm~bJF%A@>`ZY5cS^nrU zTuL^rmEo0o~=m ze})-0XbXuwg2}Ivhy1fHYJ94?r-IX(qg`g0|7jaf?!Qd?l8aryk@2+6Ov>m4@#_b}Ptbrwb@tfWhc~VN#NZ1;s z!o`o;o*Tr*WRwRK432OPu-1k($dAIjgWI(G#25)RQ7$fOTPRrD;PikqReSXHAGCo& z-oz{92`0ZTA$1F;PgQN~vVo_XAO@2_ppm8FoHij zi|+DSc+dvYvCjPb34*}@jflUm!>7b0Hh!L_n>XHVdv^fpGEByOdP-Ki2J?k6@m(8O z-1!jmo+jqfQ_5{{QJv|C;sw9`HFlb;bHH0@j3qN7-_x^3_Nk!Fb0X>uat)kiTljwi zb=Q?Qn%G^M-(dD+$8BiQ>%y+CU3a8r zIu{FZzT@K*5rEjnW=BPh=-hOY#$Gq$2^EvZ%>`}VVaJ{IhC}Tyo4*KojKKDxUKP)w zaQx>2A@51Tb55f z2FjEUi;{%zAvdN|FQ_Zjww5|b$0*hR6LB3Pc&7=QNh?9AIS5J@>Zr^$0S$(>P0lro z?^o!(}DbtmvvLsw3uFzkru z9_{ait=~fbL)WUM(rST6shyFK8m%jU2B;R(t_Ix`QT7ew*;&}<+mMk0^ zlrO=>9rk8o&-~e;<|AypEZ8@#AQ&_>mrabr3e!9%Jj8xp;b|`g{PYh9hAaVEHW+en ztBjNGToqei2GaYRPi)~ueEByuUxy>T93`f;R6&5IO%$ArDi{L+S|o?4Oo}Fr1tBFA zy#ZDjFNMOK zI&FDF!RX3gx9L&oYrf|)p-S7xFEiF$)$D@=TQUM4*lEUs_Rdo}ckQ?wbSk!si~v&< zn5CGm?8C!bAje<%k|6e_!GGMA?y6MJEwSNdiKatyw@AD@G|bYd=Rgg@=cd)vN) zy_0yxW!jxAfqE)$#R$IzF>`QgMcQmcNf7Ar3%gZm@iGu0OB89D%sM&2g**N+)Jz3n zBHG7+r5e49vGl&_Jm?8mOmr$D#P~%67aW3q%p8msnjboUE}57+5wd-}7oh64M`SN$ zrmnwy&BRs870Wj2OWH0BMeGz-&mAkaCsy0p6I zrG$2-Uevoh?a|Kyw_|~-Y)|g+ z?58}W_!*S!%MVNMcc=+pTv-+M9F++_$#llM(Q~?m0BW|gld(Mz)O=2?&fegzd5q3U zC>ghJeccd~`($v}=nQ);4h3xxS!E3m>1>XI!cZWfqyNHg4yTC?Tz!+`3US^EB;Y+q z*Ol8PCrlCP>6gFid`;#pjoJc(+7`cxQsZ>x0d#(Ss`>Zgy<*-XFv49t}vwq0YLpT-}F$&7a5 z+B^kBJ13C+iG^xqjh)`J@inMiBN@m9<h=8%Cp~W?kxTT0M2G#01J52Sxw@Z*)Sn z#5_T3alIC$CMrv)$k;bRE^E#v4D2)^w8no;%QB&~4EZKB35V|QCIoaAOSN!9z&lE{ zNi!m93@6c3)~;49+41AyknLb4yWf_?;ZCIS5FMx*0RC@iXM0=Y=GQ%HcrXXZ5(>r- z(9-w%D?yjMk$JQ|1lxvZK#j)-fE($y{l~c52AqW_z5-TP5fo3Gt4XMamO5Ll(B#16 z0R)s-0fg_7#)#sN`Ez-DLX?dQX^G4^yi4^ZYfHlc1<1Pa?_xV+lrPNq+X76Hn_5yqU-BMDfV^X)kV<87bxe|E4M0$;8 zxI6z(e-UD)>^*0fsgpDgH?#cM_S4V&aFnVbgewO%m9L$iKm30j^@qQ>sx-M1y|nOT z4F8w>yaVOmdm3^rRihiGZFsl1& literal 0 HcmV?d00001 diff --git a/performance/oss-performance-setup.yaml b/performance/oss-performance-setup.yaml new file mode 100644 index 00000000000..46eafb238b6 --- /dev/null +++ b/performance/oss-performance-setup.yaml @@ -0,0 +1,79 @@ +cassandra_version: cassandra-3.11.4 +cassandra_install_type: git +# Driver branch to use +driver_oss_branch: 4.x +# Driver dse branch to use +driver_dse_branch: 4.x +# Driver version identifier (used as part of graphite prefix) +driver_version: 4.8.0 +# Driver examples branch to use +driver_examples_branch: java-driver-4.x +# How long to run test for +duration: 2d +# The ip of the observer node from graphite-setup.yaml step +graphite_host: {provide-graphite-host-ip} + +--- + +ensemble: + server: + node.count: 3 + provisioner: + name: ctool + properties: + mark_for_reuse: true + cloud.provider: openstack + cloud.tenant: performance + cloud.instance.type: ms1.small + cluster_ttl: 2d + configuration_manager: + - name: ctool + properties: + java.version: openjdk8 + product.type: cassandra + product.install.type: {{cassandra_install_type}} + product.version: {{cassandra_version}} + cassandra.yaml: + hinted_handoff_enabled: false + client: + node.count: 1 + provisioner: + name: ctool + properties: + mark_for_reuse: true + cluster_ttl: 2d + cloud.provider: openstack + cloud.tenant: performance + cloud.instance.type: ms1.small + configuration_manager: + - name: ctool + properties: + java.version: openjdk8 + install.maven: true + - name: java_driver + properties: + oss.git.repository: git@github.com:datastax/java-driver.git + oss.git.branch: {{driver_oss_branch}} + dse.git.branch: {{driver_dse_branch}} + type: FOUR_X_OSS + - name: java_driver_duration_test + properties: + git.branch: {{driver_examples_branch}} +workload: + phases: + - run-endurance: + module: java_driver_duration_test + properties: + duration: {{duration}} + is.four: true + graphite.host: {{graphite_host}} + graphite.prefix: endurance-test-java-{{driver_version}}-OSS-{{cassandra_version}} + kill_nodes: + module: killnode_rhino + properties: + target.strategy: whitelist + target.number_of_nodes: 1 + target.selector: "*:*" + repeat.delay: 120 + repeat.iterations: 0 + graceful: true From eb93f72929820740e00e32461b31669ab4a40a7d Mon Sep 17 00:00:00 2001 From: olim7t Date: Wed, 22 Jul 2020 11:15:15 -0700 Subject: [PATCH 532/979] Cover line width and Awaitility in contribution guidelines --- CONTRIBUTING.md | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index c87a3de1d98..3546b3b86ea 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -28,6 +28,11 @@ mvn xml-format:xml-format The formatter does not enforce a maximum line length, but please try to keep it below 100 characters to keep files readable across all mediums (IDE, terminal, Github...). +### Other text files (markdown, etc) + +Similarly, enforce a right margin of 100 characters in those files. Editors and IDEs generally have +a way to configure this (for IDEA, install the "Wrap to column" plugin). + ## Coding style -- production code Do not use static imports. They make things harder to understand when you look at the code @@ -213,6 +218,10 @@ Static imports are permitted in a couple of places: when(codecRegistry.codecFor(DataTypes.INT)).thenReturn(codec); verify(codec).decodePrimitive(any(ByteBuffer.class), eq(ProtocolVersion.DEFAULT)); ``` +* All Awaitility methods, e.g.: + ```java + await().until(() -> somethingBecomesTrue()); + ``` Test methods names use lower snake case, generally start with `should`, and clearly indicate the purpose of the test, for example: `should_fail_if_key_already_exists`. If you have trouble coming From 83eec1736d59e9244ef70eee8a256bdb08647e54 Mon Sep 17 00:00:00 2001 From: Tomasz Lelek Date: Thu, 23 Jul 2020 09:59:03 +0200 Subject: [PATCH 533/979] JAVA-2813: Don't fail when secure bundle is specified together with other options (#1476) --- changelog/README.md | 1 + .../core/session/ProgrammaticArguments.java | 6 + .../api/core/session/SessionBuilder.java | 45 +++- .../astra/AstraReadCassandraVersion.java | 2 - .../src/main/resources/application-astra.conf | 1 - .../oss/driver/api/core/cloud/CloudIT.java | 200 +++++++++++++++--- 6 files changed, 219 insertions(+), 36 deletions(-) delete mode 100644 examples/src/main/resources/application-astra.conf diff --git a/changelog/README.md b/changelog/README.md index 4528e0f8e3f..59b7965cda1 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.8.0 (in progress) +- [improvement] JAVA-2813: Don't fail when secure bundle is specified together with other options - [bug] JAVA-2800: Exclude SLF4J from mapper-processor dependencies - [new feature] JAVA-2819: Add DriverConfigLoader.fromString - [improvement] JAVA-2431: Set all occurrences when bound variables are used multiple times diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/session/ProgrammaticArguments.java b/core/src/main/java/com/datastax/oss/driver/api/core/session/ProgrammaticArguments.java index c0c816dfdbc..a28c76d5d0c 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/session/ProgrammaticArguments.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/session/ProgrammaticArguments.java @@ -212,6 +212,12 @@ public Builder withLocalDatacenter( return this; } + @NonNull + public Builder clearDatacenters() { + this.localDatacentersBuilder = ImmutableMap.builder(); + return this; + } + @NonNull public Builder withLocalDatacenters(Map localDatacenters) { for (Map.Entry entry : localDatacenters.entrySet()) { diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java b/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java index 9468ec2d4f5..cf7c171ec09 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java @@ -20,6 +20,7 @@ import com.datastax.oss.driver.api.core.auth.AuthProvider; import com.datastax.oss.driver.api.core.auth.PlainTextAuthProviderBase; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverConfig; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.context.DriverContext; @@ -65,6 +66,8 @@ import java.util.function.Predicate; import javax.net.ssl.SSLContext; import net.jcip.annotations.NotThreadSafe; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Base implementation to build session instances. @@ -77,6 +80,8 @@ @NotThreadSafe public abstract class SessionBuilder { + private static final Logger LOG = LoggerFactory.getLogger(SessionBuilder.class); + @SuppressWarnings("unchecked") protected final SelfT self = (SelfT) this; @@ -87,7 +92,8 @@ public abstract class SessionBuilder { protected ProgrammaticArguments.Builder programmaticArgumentsBuilder = ProgrammaticArguments.builder(); - private boolean sslConfigured = false; + private boolean programmaticSslFactory = false; + private boolean programmaticLocalDatacenter = false; /** * Sets the configuration loader to use. @@ -314,7 +320,7 @@ public SelfT withAuthCredentials( */ @NonNull public SelfT withSslEngineFactory(@Nullable SslEngineFactory sslEngineFactory) { - this.sslConfigured = true; + this.programmaticSslFactory = true; this.programmaticArgumentsBuilder.withSslEngineFactory(sslEngineFactory); return self; } @@ -352,6 +358,7 @@ public SelfT withSslContext(@Nullable SSLContext sslContext) { * if you use a third-party implementation, refer to their documentation. */ public SelfT withLocalDatacenter(@NonNull String profileName, @NonNull String localDatacenter) { + this.programmaticLocalDatacenter = true; this.programmaticArgumentsBuilder.withLocalDatacenter(profileName, localDatacenter); return self; } @@ -671,18 +678,29 @@ protected final CompletionStage buildDefaultSessionAsync() { defaultConfig.getStringList(DefaultDriverOption.CONTACT_POINTS, Collections.emptyList()); if (cloudConfigInputStream != null) { if (!programmaticContactPoints.isEmpty() || !configContactPoints.isEmpty()) { - throw new IllegalStateException( - "Can't use withCloudSecureConnectBundle and addContactPoint(s). They are mutually exclusive."); + LOG.info( + "Both a secure connect bundle and contact points were provided. These are mutually exclusive. The contact points from the secure bundle will have priority."); + // clear the contact points provided in the setting file and via addContactPoints + configContactPoints = Collections.emptyList(); + programmaticContactPoints = new HashSet<>(); } - String configuredSSLFactory = - defaultConfig.getString(DefaultDriverOption.SSL_ENGINE_FACTORY_CLASS, null); - if (sslConfigured || configuredSSLFactory != null) { - throw new IllegalStateException( - "Can't use withCloudSecureConnectBundle and explicitly specify ssl configuration. They are mutually exclusive."); + + if (programmaticSslFactory + || defaultConfig.isDefined(DefaultDriverOption.SSL_ENGINE_FACTORY_CLASS)) { + LOG.info( + "Both a secure connect bundle and SSL options were provided. They are mutually exclusive. The SSL options from the secure bundle will have priority."); } CloudConfig cloudConfig = new CloudConfigFactory().createCloudConfig(cloudConfigInputStream.call()); addContactEndPoints(cloudConfig.getEndPoints()); + + boolean localDataCenterDefined = + anyProfileHasDatacenterDefined(configLoader.getInitialConfig()); + if (programmaticLocalDatacenter || localDataCenterDefined) { + LOG.info( + "Both a secure connect bundle and a local datacenter were provided. They are mutually exclusive. The local datacenter from the secure bundle will have priority."); + programmaticArgumentsBuilder.clearDatacenters(); + } withLocalDatacenter(cloudConfig.getLocalDatacenter()); withSslEngineFactory(cloudConfig.getSslEngineFactory()); withCloudProxyAddress(cloudConfig.getProxyAddress()); @@ -715,6 +733,15 @@ protected final CompletionStage buildDefaultSessionAsync() { } } + private boolean anyProfileHasDatacenterDefined(DriverConfig driverConfig) { + for (DriverExecutionProfile driverExecutionProfile : driverConfig.getProfiles().values()) { + if (driverExecutionProfile.isDefined(DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER)) { + return true; + } + } + return false; + } + /** * Returns URL based on the configUrl setting. If the configUrl has no protocol provided, the * method will fallback to file:// protocol and return URL that has file protocol specified. diff --git a/examples/src/main/java/com/datastax/oss/driver/examples/astra/AstraReadCassandraVersion.java b/examples/src/main/java/com/datastax/oss/driver/examples/astra/AstraReadCassandraVersion.java index de8ebb82f03..d434665552a 100644 --- a/examples/src/main/java/com/datastax/oss/driver/examples/astra/AstraReadCassandraVersion.java +++ b/examples/src/main/java/com/datastax/oss/driver/examples/astra/AstraReadCassandraVersion.java @@ -16,7 +16,6 @@ package com.datastax.oss.driver.examples.astra; import com.datastax.oss.driver.api.core.CqlSession; -import com.datastax.oss.driver.api.core.config.DriverConfigLoader; import com.datastax.oss.driver.api.core.cql.ResultSet; import com.datastax.oss.driver.api.core.cql.Row; import java.nio.file.Paths; @@ -57,7 +56,6 @@ public static void main(String[] args) { .withCloudSecureConnectBundle(Paths.get("/path/to/secure-connect-database_name.zip")) // Change the user_name and password here for the Astra instance .withAuthCredentials("user_name", "fakePasswordForTests") - .withConfigLoader(DriverConfigLoader.fromClasspath("application-astra")) // Uncomment the next line to use a specific keyspace // .withKeyspace("keyspace_name") .build()) { diff --git a/examples/src/main/resources/application-astra.conf b/examples/src/main/resources/application-astra.conf deleted file mode 100644 index 9963cbaed7b..00000000000 --- a/examples/src/main/resources/application-astra.conf +++ /dev/null @@ -1 +0,0 @@ -datastax-java-driver { } \ No newline at end of file diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cloud/CloudIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cloud/CloudIT.java index ce5429a68c9..9ef642976e0 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cloud/CloudIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cloud/CloudIT.java @@ -15,22 +15,27 @@ */ package com.datastax.oss.driver.api.core.cloud; +import static com.datastax.oss.driver.internal.core.util.LoggerTest.setupTestLogger; import static com.github.tomakehurst.wiremock.client.WireMock.aResponse; import static com.github.tomakehurst.wiremock.client.WireMock.any; import static com.github.tomakehurst.wiremock.client.WireMock.stubFor; import static com.github.tomakehurst.wiremock.client.WireMock.urlEqualTo; import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.wireMockConfig; import static org.assertj.core.api.Assertions.assertThat; -import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.mockito.Mockito.timeout; +import static org.mockito.Mockito.verify; +import ch.qos.logback.classic.Level; +import ch.qos.logback.classic.spi.ILoggingEvent; import com.datastax.oss.driver.api.core.CqlSession; -import com.datastax.oss.driver.api.core.CqlSessionBuilder; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; import com.datastax.oss.driver.api.core.cql.ResultSet; +import com.datastax.oss.driver.api.core.session.SessionBuilder; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.categories.IsolatedTests; import com.datastax.oss.driver.internal.core.ssl.DefaultSslEngineFactory; +import com.datastax.oss.driver.internal.core.util.LoggerTest; import com.github.tomakehurst.wiremock.junit.WireMockRule; import java.io.IOException; import java.io.InputStream; @@ -39,6 +44,7 @@ import java.nio.file.Files; import java.nio.file.Path; import java.security.NoSuchAlgorithmException; +import java.util.Collections; import javax.net.ssl.SSLContext; import org.junit.ClassRule; import org.junit.Rule; @@ -210,41 +216,109 @@ public void should_connect_to_proxy_using_url_with_http_protocol_provided_in_the } @Test - public void should_error_when_contact_points_and_secure_bundle_used() { + public void + should_connect_and_log_info_when_contact_points_and_secure_bundle_used_programmatic() { // given + LoggerTest.LoggerSetup logger = setupTestLogger(SessionBuilder.class, Level.INFO); + Path bundle = proxyRule.getProxy().getBundleWithoutCredentialsPath(); - CqlSessionBuilder builder = + + try (CqlSession session = CqlSession.builder() .withCloudSecureConnectBundle(bundle) .addContactPoint(new InetSocketAddress("127.0.0.1", 9042)) - .withAuthCredentials("cassandra", "cassandra"); - assertThatThrownBy(() -> builder.build()) - .isInstanceOf(IllegalStateException.class) - .hasMessage( - "Can't use withCloudSecureConnectBundle and addContactPoint(s). They are mutually exclusive."); + .withAuthCredentials("cassandra", "cassandra") + .build(); ) { + + // when + ResultSet set = session.execute("select * from system.local"); + // then + assertThat(set).isNotNull(); + verify(logger.appender, timeout(500).atLeast(1)) + .doAppend(logger.loggingEventCaptor.capture()); + assertThat( + logger.loggingEventCaptor.getAllValues().stream() + .map(ILoggingEvent::getFormattedMessage)) + .contains( + "Both a secure connect bundle and contact points were provided. These are mutually exclusive. The contact points from the secure bundle will have priority."); + + } finally { + logger.close(); + } + } + + @Test + public void should_connect_and_log_info_when_contact_points_and_secure_bundle_used_config() { + // given + LoggerTest.LoggerSetup logger = setupTestLogger(SessionBuilder.class, Level.INFO); + + DriverConfigLoader loader = + SessionUtils.configLoaderBuilder() + .withStringList( + DefaultDriverOption.CONTACT_POINTS, Collections.singletonList("localhost:9042")) + .build(); + + Path bundle = proxyRule.getProxy().getBundleWithoutCredentialsPath(); + + try (CqlSession session = + CqlSession.builder() + .withConfigLoader(loader) + .withCloudSecureConnectBundle(bundle) + .withAuthCredentials("cassandra", "cassandra") + .build(); ) { + + // when + ResultSet set = session.execute("select * from system.local"); + // then + assertThat(set).isNotNull(); + verify(logger.appender, timeout(500).atLeast(1)) + .doAppend(logger.loggingEventCaptor.capture()); + assertThat( + logger.loggingEventCaptor.getAllValues().stream() + .map(ILoggingEvent::getFormattedMessage)) + .contains( + "Both a secure connect bundle and contact points were provided. These are mutually exclusive. The contact points from the secure bundle will have priority."); + + } finally { + logger.close(); + } } @Test - public void should_error_when_ssl_context_and_secure_bundle_used_programatic() + public void should_connect_and_log_info_when_ssl_context_and_secure_bundle_used_programmatic() throws NoSuchAlgorithmException { // given + LoggerTest.LoggerSetup logger = setupTestLogger(SessionBuilder.class, Level.INFO); + Path bundle = proxyRule.getProxy().getBundleWithoutCredentialsPath(); - CqlSessionBuilder builder = + + try (CqlSession session = CqlSession.builder() .withCloudSecureConnectBundle(bundle) .withAuthCredentials("cassandra", "cassandra") - .withSslContext(SSLContext.getInstance("SSL")); - // then - assertThatThrownBy(() -> builder.build()) - .isInstanceOf(IllegalStateException.class) - .hasMessage( - "Can't use withCloudSecureConnectBundle and explicitly specify ssl configuration. They are mutually exclusive."); + .withSslContext(SSLContext.getInstance("SSL")) + .build()) { + // when + ResultSet set = session.execute("select * from system.local"); + // then + assertThat(set).isNotNull(); + verify(logger.appender, timeout(500).atLeast(1)) + .doAppend(logger.loggingEventCaptor.capture()); + assertThat( + logger.loggingEventCaptor.getAllValues().stream() + .map(ILoggingEvent::getFormattedMessage)) + .contains( + "Both a secure connect bundle and SSL options were provided. They are mutually exclusive. The SSL options from the secure bundle will have priority."); + } finally { + logger.close(); + } } @Test public void should_error_when_ssl_context_and_secure_bundle_used_config() throws NoSuchAlgorithmException { // given + LoggerTest.LoggerSetup logger = setupTestLogger(SessionBuilder.class, Level.INFO); DriverConfigLoader loader = SessionUtils.configLoaderBuilder() @@ -253,16 +327,94 @@ public void should_error_when_ssl_context_and_secure_bundle_used_config() .build(); Path bundle = proxyRule.getProxy().getBundleWithoutCredentialsPath(); - CqlSessionBuilder builder = + + try (CqlSession session = + CqlSession.builder() + .withConfigLoader(loader) + .withCloudSecureConnectBundle(bundle) + .withAuthCredentials("cassandra", "cassandra") + .build()) { + // when + ResultSet set = session.execute("select * from system.local"); + // then + assertThat(set).isNotNull(); + verify(logger.appender, timeout(500).atLeast(1)) + .doAppend(logger.loggingEventCaptor.capture()); + assertThat( + logger.loggingEventCaptor.getAllValues().stream() + .map(ILoggingEvent::getFormattedMessage)) + .contains( + "Both a secure connect bundle and SSL options were provided. They are mutually exclusive. The SSL options from the secure bundle will have priority."); + } finally { + logger.close(); + } + } + + @Test + public void + should_connect_and_log_info_when_local_data_center_and_secure_bundle_used_programmatic() { + // given + LoggerTest.LoggerSetup logger = setupTestLogger(SessionBuilder.class, Level.INFO); + + DriverConfigLoader loader = + SessionUtils.configLoaderBuilder() + .withString(DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER, "dc-ignore") + .build(); + + Path bundle = proxyRule.getProxy().getBundleWithoutCredentialsPath(); + + try (CqlSession session = CqlSession.builder() + .withCloudSecureConnectBundle(bundle) .withConfigLoader(loader) + .withAuthCredentials("cassandra", "cassandra") + .build(); ) { + + // when + ResultSet set = session.execute("select * from system.local"); + // then + assertThat(set).isNotNull(); + verify(logger.appender, timeout(500).atLeast(1)) + .doAppend(logger.loggingEventCaptor.capture()); + assertThat( + logger.loggingEventCaptor.getAllValues().stream() + .map(ILoggingEvent::getFormattedMessage)) + .contains( + "Both a secure connect bundle and a local datacenter were provided. They are mutually exclusive. The local datacenter from the secure bundle will have priority."); + + } finally { + logger.close(); + } + } + + @Test + public void should_connect_and_log_info_when_local_data_center_and_secure_bundle_used_config() { + // given + LoggerTest.LoggerSetup logger = setupTestLogger(SessionBuilder.class, Level.INFO); + + Path bundle = proxyRule.getProxy().getBundleWithoutCredentialsPath(); + + try (CqlSession session = + CqlSession.builder() .withCloudSecureConnectBundle(bundle) - .withAuthCredentials("cassandra", "cassandra"); + .withLocalDatacenter("dc-ignored") + .withAuthCredentials("cassandra", "cassandra") + .build(); ) { + + // when + ResultSet set = session.execute("select * from system.local"); + // then + assertThat(set).isNotNull(); + verify(logger.appender, timeout(500).atLeast(1)) + .doAppend(logger.loggingEventCaptor.capture()); + assertThat( + logger.loggingEventCaptor.getAllValues().stream() + .map(ILoggingEvent::getFormattedMessage)) + .contains( + "Both a secure connect bundle and a local datacenter were provided. They are mutually exclusive. The local datacenter from the secure bundle will have priority."); - // then - assertThatThrownBy(() -> builder.build()) - .isInstanceOf(IllegalStateException.class) - .hasMessage( - "Can't use withCloudSecureConnectBundle and explicitly specify ssl configuration. They are mutually exclusive."); + } finally { + logger.close(); + } } } From 571fcfca73a6282ad0571a34335b6f4916e1a5ba Mon Sep 17 00:00:00 2001 From: Tomasz Lelek Date: Fri, 24 Jul 2020 09:06:07 +0200 Subject: [PATCH 534/979] JAVA-2850: Ignore credentials in secure connect bundle [DataStax Astra] (#1481) --- changelog/README.md | 1 + .../api/core/session/SessionBuilder.java | 3 - .../core/config/cloud/CloudConfig.java | 13 +-- .../core/config/cloud/CloudConfigFactory.java | 25 ++--- .../oss/driver/api/core/cloud/CloudIT.java | 92 +++++++++++++++++-- 5 files changed, 95 insertions(+), 39 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 59b7965cda1..574c62179b2 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.8.0 (in progress) +- [improvement] JAVA-2850: Ignore credentials in secure connect bundle [DataStax Astra] - [improvement] JAVA-2813: Don't fail when secure bundle is specified together with other options - [bug] JAVA-2800: Exclude SLF4J from mapper-processor dependencies - [new feature] JAVA-2819: Add DriverConfigLoader.fromString diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java b/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java index cf7c171ec09..98aa8aa884d 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java @@ -704,9 +704,6 @@ protected final CompletionStage buildDefaultSessionAsync() { withLocalDatacenter(cloudConfig.getLocalDatacenter()); withSslEngineFactory(cloudConfig.getSslEngineFactory()); withCloudProxyAddress(cloudConfig.getProxyAddress()); - if (cloudConfig.getAuthProvider().isPresent()) { - withAuthProvider(cloudConfig.getAuthProvider().get()); - } programmaticArguments = programmaticArgumentsBuilder.build(); } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/config/cloud/CloudConfig.java b/core/src/main/java/com/datastax/oss/driver/internal/core/config/cloud/CloudConfig.java index c85d7a33b8c..e2207e3db95 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/config/cloud/CloudConfig.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/config/cloud/CloudConfig.java @@ -15,15 +15,12 @@ */ package com.datastax.oss.driver.internal.core.config.cloud; -import com.datastax.oss.driver.api.core.auth.AuthProvider; import com.datastax.oss.driver.api.core.metadata.EndPoint; import com.datastax.oss.driver.api.core.ssl.SslEngineFactory; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import edu.umd.cs.findbugs.annotations.NonNull; -import edu.umd.cs.findbugs.annotations.Nullable; import java.net.InetSocketAddress; import java.util.List; -import java.util.Optional; import net.jcip.annotations.ThreadSafe; @ThreadSafe @@ -33,19 +30,16 @@ public class CloudConfig { private final List endPoints; private final String localDatacenter; private final SslEngineFactory sslEngineFactory; - @Nullable private final AuthProvider authProvider; CloudConfig( @NonNull InetSocketAddress proxyAddress, @NonNull List endPoints, @NonNull String localDatacenter, - @NonNull SslEngineFactory sslEngineFactory, - @Nullable AuthProvider authProvider) { + @NonNull SslEngineFactory sslEngineFactory) { this.proxyAddress = proxyAddress; this.endPoints = ImmutableList.copyOf(endPoints); this.localDatacenter = localDatacenter; this.sslEngineFactory = sslEngineFactory; - this.authProvider = authProvider; } @NonNull @@ -67,9 +61,4 @@ public String getLocalDatacenter() { public SslEngineFactory getSslEngineFactory() { return sslEngineFactory; } - - @NonNull - public Optional getAuthProvider() { - return Optional.ofNullable(authProvider); - } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/config/cloud/CloudConfigFactory.java b/core/src/main/java/com/datastax/oss/driver/internal/core/config/cloud/CloudConfigFactory.java index d0c3368f500..f7386dcc390 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/config/cloud/CloudConfigFactory.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/config/cloud/CloudConfigFactory.java @@ -15,9 +15,7 @@ */ package com.datastax.oss.driver.internal.core.config.cloud; -import com.datastax.oss.driver.api.core.auth.AuthProvider; import com.datastax.oss.driver.api.core.metadata.EndPoint; -import com.datastax.oss.driver.internal.core.auth.ProgrammaticPlainTextAuthProvider; import com.datastax.oss.driver.internal.core.metadata.SniEndPoint; import com.datastax.oss.driver.internal.core.ssl.SniSslEngineFactory; import com.datastax.oss.driver.shaded.guava.common.io.ByteStreams; @@ -26,7 +24,6 @@ import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import edu.umd.cs.findbugs.annotations.NonNull; -import edu.umd.cs.findbugs.annotations.Nullable; import java.io.BufferedReader; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; @@ -53,10 +50,12 @@ import javax.net.ssl.SSLContext; import javax.net.ssl.TrustManagerFactory; import net.jcip.annotations.ThreadSafe; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @ThreadSafe public class CloudConfigFactory { - + private static final Logger LOG = LoggerFactory.getLogger(CloudConfigFactory.class); /** * Creates a {@link CloudConfig} with information fetched from the specified Cloud configuration * URL. @@ -138,9 +137,8 @@ public CloudConfig createCloudConfig(@NonNull InputStream cloudConfig) List endPoints = getEndPoints(proxyMetadataJson, sniProxyAddress); String localDatacenter = getLocalDatacenter(proxyMetadataJson); SniSslEngineFactory sslEngineFactory = new SniSslEngineFactory(sslContext); - AuthProvider authProvider = getAuthProvider(configJson); - return new CloudConfig( - sniProxyAddress, endPoints, localDatacenter, sslEngineFactory, authProvider); + validateIfBundleContainsUsernamePassword(configJson); + return new CloudConfig(sniProxyAddress, endPoints, localDatacenter, sslEngineFactory); } @NonNull @@ -176,16 +174,11 @@ protected URL getMetadataServiceUrl(JsonNode configFile) throws MalformedURLExce } } - @Nullable - protected AuthProvider getAuthProvider(JsonNode configFile) { - if (configFile.has("username")) { - String username = configFile.get("username").asText(); - if (configFile.has("password")) { - String password = configFile.get("password").asText(); - return new ProgrammaticPlainTextAuthProvider(username, password); - } + protected void validateIfBundleContainsUsernamePassword(JsonNode configFile) { + if (configFile.has("username") || configFile.has("password")) { + LOG.info( + "The bundle contains config.json with username and/or password. Providing it in the bundle is deprecated and ignored."); } - return null; } @NonNull diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cloud/CloudIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cloud/CloudIT.java index 9ef642976e0..7fbcd631e86 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cloud/CloudIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cloud/CloudIT.java @@ -27,13 +27,16 @@ import ch.qos.logback.classic.Level; import ch.qos.logback.classic.spi.ILoggingEvent; +import com.datastax.oss.driver.api.core.AllNodesFailedException; import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.auth.AuthenticationException; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; import com.datastax.oss.driver.api.core.cql.ResultSet; import com.datastax.oss.driver.api.core.session.SessionBuilder; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.categories.IsolatedTests; +import com.datastax.oss.driver.internal.core.config.cloud.CloudConfigFactory; import com.datastax.oss.driver.internal.core.ssl.DefaultSslEngineFactory; import com.datastax.oss.driver.internal.core.util.LoggerTest; import com.github.tomakehurst.wiremock.junit.WireMockRule; @@ -45,6 +48,7 @@ import java.nio.file.Path; import java.security.NoSuchAlgorithmException; import java.util.Collections; +import java.util.List; import javax.net.ssl.SSLContext; import org.junit.ClassRule; import org.junit.Rule; @@ -67,12 +71,57 @@ public class CloudIT { public void should_connect_to_proxy_using_path() { ResultSet set; Path bundle = proxyRule.getProxy().getDefaultBundlePath(); - try (CqlSession session = CqlSession.builder().withCloudSecureConnectBundle(bundle).build()) { + try (CqlSession session = + CqlSession.builder() + .withAuthCredentials("cassandra", "cassandra") + .withCloudSecureConnectBundle(bundle) + .build()) { set = session.execute("select * from system.local"); } assertThat(set).isNotNull(); } + @Test + public void should_connect_and_log_info_that_config_json_with_username_password_was_provided() { + ResultSet set; + Path bundle = proxyRule.getProxy().getDefaultBundlePath(); + LoggerTest.LoggerSetup logger = setupTestLogger(CloudConfigFactory.class, Level.INFO); + + try (CqlSession session = + CqlSession.builder() + .withAuthCredentials("cassandra", "cassandra") + .withCloudSecureConnectBundle(bundle) + .build()) { + set = session.execute("select * from system.local"); + verify(logger.appender, timeout(500).atLeast(1)) + .doAppend(logger.loggingEventCaptor.capture()); + assertThat( + logger.loggingEventCaptor.getAllValues().stream() + .map(ILoggingEvent::getFormattedMessage)) + .contains( + "The bundle contains config.json with username and/or password. Providing it in the bundle is deprecated and ignored."); + } + assertThat(set).isNotNull(); + } + + @Test + public void + should_fail_with_auth_error_when_connecting_using_bundle_with_username_password_in_config_json() { + Path bundle = proxyRule.getProxy().getDefaultBundlePath(); + + // fails with auth error because username/password from config.json is ignored + AllNodesFailedException exception = null; + try { + CqlSession.builder().withCloudSecureConnectBundle(bundle).build(); + } catch (AllNodesFailedException ex) { + exception = ex; + } + assertThat(exception).isNotNull(); + List errors = exception.getAllErrors().values().iterator().next(); + Throwable firstError = errors.get(0); + assertThat(firstError).isInstanceOf(AuthenticationException.class); + } + @Test public void should_connect_to_proxy_without_credentials() { ResultSet set; @@ -91,7 +140,11 @@ public void should_connect_to_proxy_without_credentials() { public void should_connect_to_proxy_using_non_normalized_path() { Path bundle = proxyRule.getProxy().getBundlesRootPath().resolve("../bundles/creds-v1.zip"); ResultSet set; - try (CqlSession session = CqlSession.builder().withCloudSecureConnectBundle(bundle).build()) { + try (CqlSession session = + CqlSession.builder() + .withAuthCredentials("cassandra", "cassandra") + .withCloudSecureConnectBundle(bundle) + .build()) { set = session.execute("select * from system.local"); } assertThat(set).isNotNull(); @@ -101,7 +154,11 @@ public void should_connect_to_proxy_using_non_normalized_path() { public void should_connect_to_proxy_using_input_stream() throws IOException { InputStream bundle = Files.newInputStream(proxyRule.getProxy().getDefaultBundlePath()); ResultSet set; - try (CqlSession session = CqlSession.builder().withCloudSecureConnectBundle(bundle).build()) { + try (CqlSession session = + CqlSession.builder() + .withAuthCredentials("cassandra", "cassandra") + .withCloudSecureConnectBundle(bundle) + .build()) { set = session.execute("select * from system.local"); } assertThat(set).isNotNull(); @@ -124,7 +181,10 @@ public void should_connect_to_proxy_using_URL() throws IOException { // when ResultSet set; try (CqlSession session = - CqlSession.builder().withCloudSecureConnectBundle(bundleUrl).build()) { + CqlSession.builder() + .withAuthCredentials("cassandra", "cassandra") + .withCloudSecureConnectBundle(bundleUrl) + .build()) { // then set = session.execute("select * from system.local"); @@ -142,7 +202,11 @@ public void should_connect_to_proxy_using_absolute_path_provided_in_the_session_ .build(); // when ResultSet set; - try (CqlSession session = CqlSession.builder().withConfigLoader(loader).build()) { + try (CqlSession session = + CqlSession.builder() + .withAuthCredentials("cassandra", "cassandra") + .withConfigLoader(loader) + .build()) { // then set = session.execute("select * from system.local"); @@ -161,7 +225,11 @@ public void should_connect_to_proxy_using_non_normalized_path_provided_in_the_se .build(); // when ResultSet set; - try (CqlSession session = CqlSession.builder().withConfigLoader(loader).build()) { + try (CqlSession session = + CqlSession.builder() + .withAuthCredentials("cassandra", "cassandra") + .withConfigLoader(loader) + .build()) { // then set = session.execute("select * from system.local"); @@ -180,7 +248,11 @@ public void should_connect_to_proxy_using_non_normalized_path_provided_in_the_se .build(); // when ResultSet set; - try (CqlSession session = CqlSession.builder().withConfigLoader(loader).build()) { + try (CqlSession session = + CqlSession.builder() + .withAuthCredentials("cassandra", "cassandra") + .withConfigLoader(loader) + .build()) { // then set = session.execute("select * from system.local"); @@ -207,7 +279,11 @@ public void should_connect_to_proxy_using_url_with_http_protocol_provided_in_the .build(); // when ResultSet set; - try (CqlSession session = CqlSession.builder().withConfigLoader(loader).build()) { + try (CqlSession session = + CqlSession.builder() + .withAuthCredentials("cassandra", "cassandra") + .withConfigLoader(loader) + .build()) { // then set = session.execute("select * from system.local"); From 743c0b8cb58792eb8681b9e13f940228ee8204c3 Mon Sep 17 00:00:00 2001 From: Tomasz Lelek Date: Fri, 24 Jul 2020 10:22:09 +0200 Subject: [PATCH 535/979] JAVA-2331: Unregister old metrics when a node gets removed or changes RPC address (#1478) --- changelog/README.md | 1 + .../api/core/config/DefaultDriverOption.java | 8 + .../driver/api/core/config/OptionsMap.java | 1 + .../api/core/config/TypedDriverOption.java | 4 + .../core/context/DefaultDriverContext.java | 3 +- .../metrics/DropwizardMetricsFactory.java | 57 ++++- .../metrics/DropwizardNodeMetricUpdater.java | 42 +++- core/src/main/resources/reference.conf | 13 +- .../metrics/DropwizardMetricsFactoryTest.java | 93 ++++++++ .../oss/driver/core/metrics/FakeTicker.java | 40 ++++ .../core/metrics/MetricsSimulacronIT.java | 206 ++++++++++++++++++ 11 files changed, 461 insertions(+), 7 deletions(-) create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricsFactoryTest.java create mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/core/metrics/FakeTicker.java create mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/core/metrics/MetricsSimulacronIT.java diff --git a/changelog/README.md b/changelog/README.md index 574c62179b2..2452b268ded 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.8.0 (in progress) +- [bug] JAVA-2331: Unregister old metrics when a node gets removed or changes RPC address - [improvement] JAVA-2850: Ignore credentials in secure connect bundle [DataStax Astra] - [improvement] JAVA-2813: Don't fail when secure bundle is specified together with other options - [bug] JAVA-2800: Exclude SLF4J from mapper-processor dependencies diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java index eedf8011c59..929886aad8a 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java @@ -808,6 +808,14 @@ public enum DefaultDriverOption implements DriverOption { *

          Value-type: int */ SESSION_LEAK_THRESHOLD("advanced.session-leak.threshold"), + /** + * The period of inactivity after which the node level metrics will be evicted. The eviction will + * happen only if none of the enabled node-level metrics is updated for a given node within this + * time window. + * + *

          Value-type: {@link java.time.Duration Duration} + */ + METRICS_NODE_EXPIRE_AFTER("advanced.metrics.node.expire-after"), ; private final String path; diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java index c148d32ae96..5106c9bfefa 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java @@ -321,6 +321,7 @@ protected static void fillWithDriverDefaults(OptionsMap map) { map.put(TypedDriverOption.METRICS_NODE_GRAPH_MESSAGES_HIGHEST, Duration.ofSeconds(3)); map.put(TypedDriverOption.METRICS_NODE_GRAPH_MESSAGES_DIGITS, 3); map.put(TypedDriverOption.METRICS_NODE_GRAPH_MESSAGES_INTERVAL, Duration.ofMinutes(5)); + map.put(TypedDriverOption.METRICS_NODE_EXPIRE_AFTER, Duration.ofHours(1)); map.put(TypedDriverOption.SOCKET_TCP_NODELAY, true); map.put(TypedDriverOption.HEARTBEAT_INTERVAL, Duration.ofSeconds(30)); map.put(TypedDriverOption.HEARTBEAT_TIMEOUT, Duration.ofMillis(500)); diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java index 6413a617649..7ef5e022c1c 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java @@ -701,6 +701,10 @@ public String toString() { new TypedDriverOption<>( DseDriverOption.METRICS_NODE_GRAPH_MESSAGES_INTERVAL, GenericType.DURATION); + /** The time after which the node level metrics will be evicted. */ + public static final TypedDriverOption METRICS_NODE_EXPIRE_AFTER = + new TypedDriverOption<>(DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER, GenericType.DURATION); + private static Iterable> introspectBuiltInValues() { try { ImmutableList.Builder> result = ImmutableList.builder(); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java index 950dd0a4414..90915293d00 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java @@ -97,6 +97,7 @@ import com.datastax.oss.driver.internal.core.util.Reflection; import com.datastax.oss.driver.internal.core.util.concurrent.CycleDetector; import com.datastax.oss.driver.internal.core.util.concurrent.LazyReference; +import com.datastax.oss.driver.shaded.guava.common.base.Ticker; import com.datastax.oss.protocol.internal.Compressor; import com.datastax.oss.protocol.internal.FrameCodec; import com.datastax.oss.protocol.internal.ProtocolV3ClientCodecs; @@ -600,7 +601,7 @@ protected PoolManager buildPoolManager() { } protected MetricsFactory buildMetricsFactory() { - return new DropwizardMetricsFactory(this); + return new DropwizardMetricsFactory(this, Ticker.systemTicker()); } protected RequestThrottler buildRequestThrottler() { diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricsFactory.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricsFactory.java index 351ba28959d..9064a7433bb 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricsFactory.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricsFactory.java @@ -27,7 +27,13 @@ import com.datastax.oss.driver.api.core.metrics.NodeMetric; import com.datastax.oss.driver.api.core.metrics.SessionMetric; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting; +import com.datastax.oss.driver.shaded.guava.common.base.Ticker; +import com.datastax.oss.driver.shaded.guava.common.cache.Cache; +import com.datastax.oss.driver.shaded.guava.common.cache.CacheBuilder; +import com.datastax.oss.driver.shaded.guava.common.cache.RemovalNotification; import edu.umd.cs.findbugs.annotations.Nullable; +import java.time.Duration; import java.util.Collections; import java.util.HashSet; import java.util.List; @@ -41,6 +47,8 @@ public class DropwizardMetricsFactory implements MetricsFactory { private static final Logger LOG = LoggerFactory.getLogger(DropwizardMetricsFactory.class); + static final Duration LOWEST_ACCEPTABLE_EXPIRE_AFTER = Duration.ofMinutes(5); + static final Duration DEFAULT_EXPIRE_AFTER = Duration.ofHours(1); private final String logPrefix; private final InternalDriverContext context; @@ -48,17 +56,35 @@ public class DropwizardMetricsFactory implements MetricsFactory { private final MetricRegistry registry; @Nullable private final Metrics metrics; private final SessionMetricUpdater sessionUpdater; + private final Cache metricsCache; - public DropwizardMetricsFactory(InternalDriverContext context) { + public DropwizardMetricsFactory(InternalDriverContext context, Ticker ticker) { this.logPrefix = context.getSessionName(); this.context = context; DriverExecutionProfile config = context.getConfig().getDefaultProfile(); Set enabledSessionMetrics = parseSessionMetricPaths(config.getStringList(DefaultDriverOption.METRICS_SESSION_ENABLED)); + Duration evictionTime = getAndValidateEvictionTime(config, logPrefix); + this.enabledNodeMetrics = parseNodeMetricPaths(config.getStringList(DefaultDriverOption.METRICS_NODE_ENABLED)); + metricsCache = + CacheBuilder.newBuilder() + .ticker(ticker) + .expireAfterAccess(evictionTime) + .removalListener( + (RemovalNotification notification) -> { + LOG.debug( + "[{}] Removing metrics for node: {} from cache after {}", + logPrefix, + notification.getKey(), + evictionTime); + notification.getValue().cleanupNodeMetrics(); + }) + .build(); + if (enabledSessionMetrics.isEmpty() && enabledNodeMetrics.isEmpty()) { LOG.debug("[{}] All metrics are disabled, Session.getMetrics will be empty", logPrefix); this.registry = null; @@ -73,6 +99,23 @@ public DropwizardMetricsFactory(InternalDriverContext context) { } } + @VisibleForTesting + static Duration getAndValidateEvictionTime(DriverExecutionProfile config, String logPrefix) { + Duration evictionTime = config.getDuration(DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER); + + if (evictionTime.compareTo(LOWEST_ACCEPTABLE_EXPIRE_AFTER) < 0) { + LOG.warn( + "[{}] Value too low for {}: {} (It should be higher than {}). Forcing to {} instead.", + logPrefix, + DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER.getPath(), + evictionTime, + LOWEST_ACCEPTABLE_EXPIRE_AFTER, + DEFAULT_EXPIRE_AFTER); + } + + return evictionTime; + } + @Override public Optional getMetrics() { return Optional.ofNullable(metrics); @@ -85,9 +128,15 @@ public SessionMetricUpdater getSessionUpdater() { @Override public NodeMetricUpdater newNodeUpdater(Node node) { - return (registry == null) - ? NoopNodeMetricUpdater.INSTANCE - : new DropwizardNodeMetricUpdater(node, enabledNodeMetrics, registry, context); + if (registry == null) { + return NoopNodeMetricUpdater.INSTANCE; + } else { + DropwizardNodeMetricUpdater dropwizardNodeMetricUpdater = + new DropwizardNodeMetricUpdater( + node, enabledNodeMetrics, registry, context, () -> metricsCache.getIfPresent(node)); + metricsCache.put(node, dropwizardNodeMetricUpdater); + return dropwizardNodeMetricUpdater; + } } protected Set parseSessionMetricPaths(List paths) { diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardNodeMetricUpdater.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardNodeMetricUpdater.java index 2c3546c5fee..7961d102659 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardNodeMetricUpdater.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardNodeMetricUpdater.java @@ -16,6 +16,7 @@ package com.datastax.oss.driver.internal.core.metrics; import com.codahale.metrics.Gauge; +import com.codahale.metrics.Metric; import com.codahale.metrics.MetricRegistry; import com.datastax.dse.driver.api.core.config.DseDriverOption; import com.datastax.dse.driver.api.core.metrics.DseNodeMetric; @@ -28,6 +29,7 @@ import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.internal.core.pool.ChannelPool; import java.util.Set; +import java.util.concurrent.TimeUnit; import java.util.function.Function; import net.jcip.annotations.ThreadSafe; @@ -36,13 +38,16 @@ public class DropwizardNodeMetricUpdater extends DropwizardMetricUpdater enabledMetrics, MetricRegistry registry, - InternalDriverContext context) { + InternalDriverContext context, + Runnable signalMetricUpdated) { super(enabledMetrics, registry); + this.signalMetricUpdated = signalMetricUpdated; this.metricNamePrefix = buildPrefix(context.getSessionName(), node.getEndPoint()); DriverExecutionProfile config = context.getConfig().getDefaultProfile(); @@ -101,6 +106,37 @@ private String buildPrefix(String sessionName, EndPoint endPoint) { return sessionName + ".nodes." + endPoint.asMetricPrefix() + "."; } + @Override + public void incrementCounter(NodeMetric metric, String profileName, long amount) { + signalMetricUpdated.run(); + super.incrementCounter(metric, profileName, amount); + } + + @Override + public void updateHistogram(NodeMetric metric, String profileName, long value) { + signalMetricUpdated.run(); + super.updateHistogram(metric, profileName, value); + } + + @Override + public void markMeter(NodeMetric metric, String profileName, long amount) { + signalMetricUpdated.run(); + super.markMeter(metric, profileName, amount); + } + + @Override + public void updateTimer(NodeMetric metric, String profileName, long duration, TimeUnit unit) { + signalMetricUpdated.run(); + super.updateTimer(metric, profileName, duration, unit); + } + + @Override + @SuppressWarnings({"unchecked", "TypeParameterUnusedInFormals"}) + public T getMetric(NodeMetric metric, String profileName) { + signalMetricUpdated.run(); + return super.getMetric(metric, profileName); + } + private void initializePoolGauge( NodeMetric metric, Node node, @@ -116,4 +152,8 @@ private void initializePoolGauge( }); } } + + public void cleanupNodeMetrics() { + registry.removeMatching((name, metric) -> name.startsWith(metricNamePrefix)); + } } diff --git a/core/src/main/resources/reference.conf b/core/src/main/resources/reference.conf index 7847025a499..c7faab0a0bc 100644 --- a/core/src/main/resources/reference.conf +++ b/core/src/main/resources/reference.conf @@ -1611,6 +1611,17 @@ datastax-java-driver { significant-digits = 3 refresh-interval = 5 minutes } + + # The time after which the node level metrics will be evicted. + # The eviction will happen only if none of the enabled node-level + # metrics is updated for a given node for a given time. + # When this interval elapses, all metrics for the idle node are removed. + # If you set it to a value lower than 5 minutes, it will be forced to a default 1 hour. + # + # Required: no (defaults to 1 hour) + # Modifiable at runtime: no + # Overridable in a profile: no + expire-after = 1 hour } } @@ -2063,4 +2074,4 @@ datastax-java-driver { // } // } } -} \ No newline at end of file +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricsFactoryTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricsFactoryTest.java new file mode 100644 index 00000000000..c949e573b62 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricsFactoryTest.java @@ -0,0 +1,93 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.metrics; + +import static com.datastax.oss.driver.internal.core.metrics.DropwizardMetricsFactory.DEFAULT_EXPIRE_AFTER; +import static com.datastax.oss.driver.internal.core.metrics.DropwizardMetricsFactory.LOWEST_ACCEPTABLE_EXPIRE_AFTER; +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.timeout; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import ch.qos.logback.classic.Level; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.internal.core.util.LoggerTest; +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import java.time.Duration; +import org.junit.Test; +import org.junit.runner.RunWith; + +@RunWith(DataProviderRunner.class) +public class DropwizardMetricsFactoryTest { + + private static final String LOG_PREFIX = "prefix"; + + @Test + public void should_log_warning_when_provided_eviction_time_setting_is_too_low() { + // given + Duration expireAfter = LOWEST_ACCEPTABLE_EXPIRE_AFTER.minusMinutes(1); + LoggerTest.LoggerSetup logger = + LoggerTest.setupTestLogger(DropwizardMetricsFactory.class, Level.WARN); + DriverExecutionProfile driverExecutionProfile = mock(DriverExecutionProfile.class); + + // when + when(driverExecutionProfile.getDuration(DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER)) + .thenReturn(expireAfter); + DropwizardMetricsFactory.getAndValidateEvictionTime(driverExecutionProfile, LOG_PREFIX); + + // then + verify(logger.appender, timeout(500).times(1)).doAppend(logger.loggingEventCaptor.capture()); + assertThat(logger.loggingEventCaptor.getValue().getMessage()).isNotNull(); + assertThat(logger.loggingEventCaptor.getValue().getFormattedMessage()) + .contains( + String.format( + "[%s] Value too low for %s: %s (It should be higher than %s). Forcing to %s instead.", + LOG_PREFIX, + DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER.getPath(), + expireAfter, + LOWEST_ACCEPTABLE_EXPIRE_AFTER, + DEFAULT_EXPIRE_AFTER)); + } + + @Test + @UseDataProvider(value = "acceptableEvictionTimes") + public void should_not_log_warning_when_provided_eviction_time_setting_is_acceptable( + Duration expireAfter) { + // given + LoggerTest.LoggerSetup logger = + LoggerTest.setupTestLogger(DropwizardMetricsFactory.class, Level.WARN); + DriverExecutionProfile driverExecutionProfile = mock(DriverExecutionProfile.class); + + // when + when(driverExecutionProfile.getDuration(DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER)) + .thenReturn(expireAfter); + DropwizardMetricsFactory.getAndValidateEvictionTime(driverExecutionProfile, LOG_PREFIX); + + // then + verify(logger.appender, timeout(500).times(0)).doAppend(logger.loggingEventCaptor.capture()); + } + + @DataProvider + public static Object[][] acceptableEvictionTimes() { + return new Object[][] { + {LOWEST_ACCEPTABLE_EXPIRE_AFTER}, {LOWEST_ACCEPTABLE_EXPIRE_AFTER.plusMinutes(1)} + }; + } +} diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metrics/FakeTicker.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metrics/FakeTicker.java new file mode 100644 index 00000000000..0ad0e3b31d5 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metrics/FakeTicker.java @@ -0,0 +1,40 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.core.metrics; + +import com.datastax.oss.driver.shaded.guava.common.base.Ticker; +import java.time.Duration; +import java.util.concurrent.atomic.AtomicLong; + +/** A Ticker whose value can be advanced programmatically in test. */ +public class FakeTicker extends Ticker { + + private final AtomicLong nanos = new AtomicLong(); + + public FakeTicker advance(long nanoseconds) { + nanos.addAndGet(nanoseconds); + return this; + } + + public FakeTicker advance(Duration duration) { + return advance(duration.toNanos()); + } + + @Override + public long read() { + return nanos.get(); + } +} diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metrics/MetricsSimulacronIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metrics/MetricsSimulacronIT.java new file mode 100644 index 00000000000..ecb549382f9 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metrics/MetricsSimulacronIT.java @@ -0,0 +1,206 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.core.metrics; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.awaitility.Awaitility.await; + +import com.codahale.metrics.Meter; +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverConfigLoader; +import com.datastax.oss.driver.api.core.context.DriverContext; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.metrics.DefaultNodeMetric; +import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; +import com.datastax.oss.driver.api.core.metrics.Metrics; +import com.datastax.oss.driver.api.core.session.ProgrammaticArguments; +import com.datastax.oss.driver.api.core.session.SessionBuilder; +import com.datastax.oss.driver.api.testinfra.session.SessionUtils; +import com.datastax.oss.driver.api.testinfra.simulacron.SimulacronRule; +import com.datastax.oss.driver.categories.ParallelizableTests; +import com.datastax.oss.driver.internal.core.context.DefaultDriverContext; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.metrics.DropwizardMetricsFactory; +import com.datastax.oss.driver.internal.core.metrics.MetricsFactory; +import com.datastax.oss.driver.shaded.guava.common.base.Ticker; +import com.datastax.oss.simulacron.common.cluster.ClusterSpec; +import com.google.common.collect.Lists; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.time.Duration; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +@Category(ParallelizableTests.class) +public class MetricsSimulacronIT { + + @ClassRule + public static final SimulacronRule SIMULACRON_RULE = + new SimulacronRule(ClusterSpec.builder().withNodes(1)); + + @Before + public void clearPrimes() { + SIMULACRON_RULE.cluster().clearLogs(); + SIMULACRON_RULE.cluster().clearPrimes(true); + } + + @Test + public void should_remove_node_metrics_and_not_remove_session_metrics_after_eviction_time() { + + // given + DriverConfigLoader loader = + SessionUtils.configLoaderBuilder() + .withStringList( + DefaultDriverOption.METRICS_SESSION_ENABLED, + Lists.newArrayList("bytes-sent", "bytes-received")) + .withStringList( + DefaultDriverOption.METRICS_NODE_ENABLED, + Lists.newArrayList("bytes-sent", "bytes-received")) + .withDuration(DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER, Duration.ofHours(1)) + .build(); + FakeTicker fakeTicker = new FakeTicker(); + try (CqlSession session = + new MetricsTestContextBuilder() + .addContactEndPoints(SIMULACRON_RULE.getContactPoints()) + .withConfigLoader(loader) + .withTicker(fakeTicker) + .build()) { + for (int i = 0; i < 10; i++) { + session.execute("SELECT release_version FROM system.local"); + } + + // when + fakeTicker.advance(Duration.ofHours(2)); + + // then session metrics are not evicted + assertThat(session.getMetrics()) + .hasValueSatisfying( + metrics -> { + assertThat(metrics.getSessionMetric(DefaultSessionMetric.BYTES_SENT)) + .hasValueSatisfying( + bytesSent -> assertThat(bytesSent.getCount()).isGreaterThan(0)); + assertThat(metrics.getSessionMetric(DefaultSessionMetric.BYTES_RECEIVED)) + .hasValueSatisfying( + bytesReceived -> assertThat(bytesReceived.getCount()).isGreaterThan(0)); + }); + + // and node metrics are evicted + await() + .until( + () -> { + // get only node in a cluster and evaluate its metrics. + Node node = session.getMetadata().getNodes().values().iterator().next(); + Metrics metrics = session.getMetrics().get(); + return !metrics.getNodeMetric(node, DefaultNodeMetric.BYTES_SENT).isPresent() + && !metrics + .getNodeMetric(node, DefaultNodeMetric.BYTES_RECEIVED) + .isPresent(); + }); + } + } + + @Test + public void + should_not_evict_not_updated_node_metric_if_any_other_node_level_metric_was_updated() { + // given + DriverConfigLoader loader = + SessionUtils.configLoaderBuilder() + .withStringList( + DefaultDriverOption.METRICS_NODE_ENABLED, + Lists.newArrayList("bytes-sent", "errors.request.aborted")) + .withDuration(DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER, Duration.ofHours(1)) + .build(); + FakeTicker fakeTicker = new FakeTicker(); + try (CqlSession session = + new MetricsTestContextBuilder() + .addContactEndPoints(SIMULACRON_RULE.getContactPoints()) + .withConfigLoader(loader) + .withTicker(fakeTicker) + .build()) { + for (int i = 0; i < 10; i++) { + session.execute("SELECT release_version FROM system.local"); + } + + // when advance time to before eviction + fakeTicker.advance(Duration.ofMinutes(59)); + // execute query that update only bytes-sent + session.execute("SELECT release_version FROM system.local"); + // advance time to after eviction + fakeTicker.advance(Duration.ofMinutes(2)); + + // then all node-level metrics should not be evicted + await() + .until( + () -> { + // get only node in a cluster and evaluate its metrics. + Node node = session.getMetadata().getNodes().values().iterator().next(); + Metrics metrics = session.getMetrics().get(); + return metrics.getNodeMetric(node, DefaultNodeMetric.BYTES_SENT).isPresent() + && metrics + .getNodeMetric(node, DefaultNodeMetric.ABORTED_REQUESTS) + .isPresent(); + }); + } + } + + private static class MetricsTestContextBuilder + extends SessionBuilder { + + private Ticker ticker; + + @Override + protected CqlSession wrap(@NonNull CqlSession defaultSession) { + return defaultSession; + } + + public MetricsTestContextBuilder withTicker(Ticker ticker) { + this.ticker = ticker; + return this; + } + + @Override + protected DriverContext buildContext( + DriverConfigLoader configLoader, ProgrammaticArguments programmaticArguments) { + return new MetricsTestContext(configLoader, programmaticArguments, ticker); + } + } + + private static class MetricsTestContext extends DefaultDriverContext { + private final Ticker ticker; + + public MetricsTestContext( + @NonNull DriverConfigLoader configLoader, + @NonNull ProgrammaticArguments programmaticArguments, + @NonNull Ticker ticker) { + super(configLoader, programmaticArguments); + this.ticker = ticker; + } + + @Override + protected MetricsFactory buildMetricsFactory() { + return new DropwizardMetricsFactoryCustomTicker(this, ticker); + } + + private static class DropwizardMetricsFactoryCustomTicker extends DropwizardMetricsFactory { + + public DropwizardMetricsFactoryCustomTicker(InternalDriverContext context, Ticker ticker) { + super(context, ticker); + } + } + } +} From c2efde334f1f829dd8449cd2c45ac25e14925f7f Mon Sep 17 00:00:00 2001 From: Tomasz Lelek Date: Fri, 24 Jul 2020 15:21:19 +0200 Subject: [PATCH 536/979] JAVA-2331: fix null pointer in tests that are using MockedDriverContextFactory (#1483) --- .../internal/core/context/MockedDriverContextFactory.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/context/MockedDriverContextFactory.java b/core/src/test/java/com/datastax/oss/driver/internal/core/context/MockedDriverContextFactory.java index 91f315461b9..f36e8b33804 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/context/MockedDriverContextFactory.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/context/MockedDriverContextFactory.java @@ -27,6 +27,7 @@ import com.datastax.oss.driver.api.core.session.ProgrammaticArguments; import com.datastax.oss.driver.api.core.tracker.RequestTracker; import com.datastax.oss.driver.shaded.guava.common.collect.Maps; +import java.time.Duration; import java.util.Optional; public class MockedDriverContextFactory { @@ -45,6 +46,8 @@ public static DefaultDriverContext defaultDriverContext( DriverExecutionProfile blankProfile = mock(DriverExecutionProfile.class); when(blankProfile.getString(DefaultDriverOption.PROTOCOL_COMPRESSION, "none")) .thenReturn("none"); + when(blankProfile.getDuration(DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER)) + .thenReturn(Duration.ofMinutes(5)); return blankProfile; }); From 29a6c9a7f1b3523d70e60b06e981e59f012d392b Mon Sep 17 00:00:00 2001 From: olim7t Date: Fri, 24 Jul 2020 11:04:34 -0700 Subject: [PATCH 537/979] JAVA-2331: Use minimum eviction time if set too low --- .../core/metrics/DropwizardMetricsFactory.java | 6 ++---- core/src/main/resources/reference.conf | 14 +++++++++----- .../core/metrics/DropwizardMetricsFactoryTest.java | 6 ++---- 3 files changed, 13 insertions(+), 13 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricsFactory.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricsFactory.java index 9064a7433bb..e859f62d9b5 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricsFactory.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricsFactory.java @@ -48,7 +48,6 @@ public class DropwizardMetricsFactory implements MetricsFactory { private static final Logger LOG = LoggerFactory.getLogger(DropwizardMetricsFactory.class); static final Duration LOWEST_ACCEPTABLE_EXPIRE_AFTER = Duration.ofMinutes(5); - static final Duration DEFAULT_EXPIRE_AFTER = Duration.ofHours(1); private final String logPrefix; private final InternalDriverContext context; @@ -105,12 +104,11 @@ static Duration getAndValidateEvictionTime(DriverExecutionProfile config, String if (evictionTime.compareTo(LOWEST_ACCEPTABLE_EXPIRE_AFTER) < 0) { LOG.warn( - "[{}] Value too low for {}: {} (It should be higher than {}). Forcing to {} instead.", + "[{}] Value too low for {}: {}. Forcing to {} instead.", logPrefix, DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER.getPath(), evictionTime, - LOWEST_ACCEPTABLE_EXPIRE_AFTER, - DEFAULT_EXPIRE_AFTER); + LOWEST_ACCEPTABLE_EXPIRE_AFTER); } return evictionTime; diff --git a/core/src/main/resources/reference.conf b/core/src/main/resources/reference.conf index c7faab0a0bc..9c82608139c 100644 --- a/core/src/main/resources/reference.conf +++ b/core/src/main/resources/reference.conf @@ -1613,12 +1613,16 @@ datastax-java-driver { } # The time after which the node level metrics will be evicted. - # The eviction will happen only if none of the enabled node-level - # metrics is updated for a given node for a given time. - # When this interval elapses, all metrics for the idle node are removed. - # If you set it to a value lower than 5 minutes, it will be forced to a default 1 hour. # - # Required: no (defaults to 1 hour) + # This is used to unregister stale metrics if a node leaves the cluster or gets a new address. + # The eviction will happen only if none of the enabled node-level metrics is updated for a + # given node for a given time. When this interval elapses, all metrics for the idle node are + # removed. + # + # The lowest allowed value is 5 minutes. If you try to set it lower, the driver will log a + # warning and use 5 minutes. + # + # Required: yes # Modifiable at runtime: no # Overridable in a profile: no expire-after = 1 hour diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricsFactoryTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricsFactoryTest.java index c949e573b62..7599b4e15cc 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricsFactoryTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricsFactoryTest.java @@ -15,7 +15,6 @@ */ package com.datastax.oss.driver.internal.core.metrics; -import static com.datastax.oss.driver.internal.core.metrics.DropwizardMetricsFactory.DEFAULT_EXPIRE_AFTER; import static com.datastax.oss.driver.internal.core.metrics.DropwizardMetricsFactory.LOWEST_ACCEPTABLE_EXPIRE_AFTER; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.mock; @@ -58,12 +57,11 @@ public void should_log_warning_when_provided_eviction_time_setting_is_too_low() assertThat(logger.loggingEventCaptor.getValue().getFormattedMessage()) .contains( String.format( - "[%s] Value too low for %s: %s (It should be higher than %s). Forcing to %s instead.", + "[%s] Value too low for %s: %s. Forcing to %s instead.", LOG_PREFIX, DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER.getPath(), expireAfter, - LOWEST_ACCEPTABLE_EXPIRE_AFTER, - DEFAULT_EXPIRE_AFTER)); + LOWEST_ACCEPTABLE_EXPIRE_AFTER)); } @Test From 34b1a5c3c4fe0d001fa475f9ce98ab096dbfb0db Mon Sep 17 00:00:00 2001 From: tomekl007 Date: Wed, 22 Jul 2020 12:45:50 +0200 Subject: [PATCH 538/979] JAVA-2841: Raise timeouts during connection initialization --- changelog/README.md | 1 + .../oss/driver/api/core/config/OptionsMap.java | 12 +++++++----- core/src/main/resources/reference.conf | 2 +- .../typesafe/DefaultDriverConfigLoaderTest.java | 6 +++--- 4 files changed, 12 insertions(+), 9 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 2452b268ded..fd74bcac72f 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.8.0 (in progress) +- [improvement] JAVA-2841: Raise timeouts during connection initialization - [bug] JAVA-2331: Unregister old metrics when a node gets removed or changes RPC address - [improvement] JAVA-2850: Ignore credentials in secure connect bundle [DataStax Astra] - [improvement] JAVA-2813: Don't fail when secure bundle is specified together with other options diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java index 5106c9bfefa..b4d7680e685 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java @@ -242,6 +242,8 @@ private void readObject(ObjectInputStream stream) throws InvalidObjectException } protected static void fillWithDriverDefaults(OptionsMap map) { + Duration initQueryTimeout = Duration.ofSeconds(5); + // Sorted by order of appearance in reference.conf: // Skip CONFIG_RELOAD_INTERVAL because the map-based config doesn't need periodic reloading @@ -255,8 +257,8 @@ protected static void fillWithDriverDefaults(OptionsMap map) { map.put(TypedDriverOption.LOAD_BALANCING_POLICY_SLOW_AVOIDANCE, true); map.put(TypedDriverOption.SESSION_LEAK_THRESHOLD, 4); map.put(TypedDriverOption.CONNECTION_CONNECT_TIMEOUT, Duration.ofSeconds(5)); - map.put(TypedDriverOption.CONNECTION_INIT_QUERY_TIMEOUT, Duration.ofMillis(500)); - map.put(TypedDriverOption.CONNECTION_SET_KEYSPACE_TIMEOUT, Duration.ofMillis(500)); + map.put(TypedDriverOption.CONNECTION_INIT_QUERY_TIMEOUT, initQueryTimeout); + map.put(TypedDriverOption.CONNECTION_SET_KEYSPACE_TIMEOUT, initQueryTimeout); map.put(TypedDriverOption.CONNECTION_POOL_LOCAL_SIZE, 1); map.put(TypedDriverOption.CONNECTION_POOL_REMOTE_SIZE, 1); map.put(TypedDriverOption.CONNECTION_MAX_REQUESTS, 1024); @@ -324,7 +326,7 @@ protected static void fillWithDriverDefaults(OptionsMap map) { map.put(TypedDriverOption.METRICS_NODE_EXPIRE_AFTER, Duration.ofHours(1)); map.put(TypedDriverOption.SOCKET_TCP_NODELAY, true); map.put(TypedDriverOption.HEARTBEAT_INTERVAL, Duration.ofSeconds(30)); - map.put(TypedDriverOption.HEARTBEAT_TIMEOUT, Duration.ofMillis(500)); + map.put(TypedDriverOption.HEARTBEAT_TIMEOUT, initQueryTimeout); map.put(TypedDriverOption.METADATA_TOPOLOGY_WINDOW, Duration.ofSeconds(1)); map.put(TypedDriverOption.METADATA_TOPOLOGY_MAX_EVENTS, 20); map.put(TypedDriverOption.METADATA_SCHEMA_ENABLED, true); @@ -333,7 +335,7 @@ protected static void fillWithDriverDefaults(OptionsMap map) { map.put(TypedDriverOption.METADATA_SCHEMA_WINDOW, Duration.ofSeconds(1)); map.put(TypedDriverOption.METADATA_SCHEMA_MAX_EVENTS, 20); map.put(TypedDriverOption.METADATA_TOKEN_MAP_ENABLED, true); - map.put(TypedDriverOption.CONTROL_CONNECTION_TIMEOUT, Duration.ofMillis(500)); + map.put(TypedDriverOption.CONTROL_CONNECTION_TIMEOUT, initQueryTimeout); map.put(TypedDriverOption.CONTROL_CONNECTION_AGREEMENT_INTERVAL, Duration.ofMillis(200)); map.put(TypedDriverOption.CONTROL_CONNECTION_AGREEMENT_TIMEOUT, Duration.ofSeconds(10)); map.put(TypedDriverOption.CONTROL_CONNECTION_AGREEMENT_WARN, true); @@ -342,7 +344,7 @@ protected static void fillWithDriverDefaults(OptionsMap map) { map.put(TypedDriverOption.REPREPARE_CHECK_SYSTEM_TABLE, false); map.put(TypedDriverOption.REPREPARE_MAX_STATEMENTS, 0); map.put(TypedDriverOption.REPREPARE_MAX_PARALLELISM, 100); - map.put(TypedDriverOption.REPREPARE_TIMEOUT, Duration.ofMillis(500)); + map.put(TypedDriverOption.REPREPARE_TIMEOUT, initQueryTimeout); map.put(TypedDriverOption.NETTY_DAEMON, false); map.put(TypedDriverOption.NETTY_IO_SIZE, 0); map.put(TypedDriverOption.NETTY_IO_SHUTDOWN_QUIET_PERIOD, 2); diff --git a/core/src/main/resources/reference.conf b/core/src/main/resources/reference.conf index 9c82608139c..674cad7cba0 100644 --- a/core/src/main/resources/reference.conf +++ b/core/src/main/resources/reference.conf @@ -377,7 +377,7 @@ datastax-java-driver { # Modifiable at runtime: yes, the new value will be used for connections created after the # change. # Overridable in a profile: no - init-query-timeout = 500 milliseconds + init-query-timeout = 5 seconds # The timeout to use when the driver changes the keyspace on a connection at runtime (this # happens when the client issues a `USE ...` query, and all connections belonging to the current diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultDriverConfigLoaderTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultDriverConfigLoaderTest.java index 8b85a556138..aa4ff4a2287 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultDriverConfigLoaderTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultDriverConfigLoaderTest.java @@ -185,7 +185,7 @@ public void should_load_from_other_classpath_resource() { DriverExecutionProfile config = loader.getInitialConfig().getDefaultProfile(); // From customApplication.conf: assertThat(config.getDuration(DefaultDriverOption.REQUEST_TIMEOUT)) - .isEqualTo(Duration.ofMillis(500)); + .isEqualTo(Duration.ofSeconds(5)); // From customApplication.json: assertThat(config.getInt(DefaultDriverOption.REQUEST_PAGE_SIZE)).isEqualTo(2000); // From customApplication.properties: @@ -204,7 +204,7 @@ public void should_load_from_file() { DriverExecutionProfile config = loader.getInitialConfig().getDefaultProfile(); // From customApplication.conf: assertThat(config.getDuration(DefaultDriverOption.REQUEST_TIMEOUT)) - .isEqualTo(Duration.ofMillis(500)); + .isEqualTo(Duration.ofSeconds(5)); // From reference.conf: assertThat(config.getString(DefaultDriverOption.REQUEST_SERIAL_CONSISTENCY)) .isEqualTo(DefaultConsistencyLevel.SERIAL.name()); @@ -220,7 +220,7 @@ public void should_load_from_file_with_system_property() { DriverExecutionProfile config = loader.getInitialConfig().getDefaultProfile(); // From customApplication.conf: assertThat(config.getDuration(DefaultDriverOption.REQUEST_TIMEOUT)) - .isEqualTo(Duration.ofMillis(500)); + .isEqualTo(Duration.ofSeconds(5)); // From reference.conf: assertThat(config.getString(DefaultDriverOption.REQUEST_SERIAL_CONSISTENCY)) .isEqualTo(DefaultConsistencyLevel.SERIAL.name()); From ef161bae4659e21bad78238a73cb7dedb2e95c69 Mon Sep 17 00:00:00 2001 From: tomekl007 Date: Fri, 24 Jul 2020 10:16:37 +0200 Subject: [PATCH 539/979] OptionsMap - refactor settings that are used multiple times to a variable --- .../driver/api/core/config/OptionsMap.java | 32 ++++++++++++------- 1 file changed, 20 insertions(+), 12 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java index b4d7680e685..941712541fc 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java @@ -243,13 +243,18 @@ private void readObject(ObjectInputStream stream) throws InvalidObjectException protected static void fillWithDriverDefaults(OptionsMap map) { Duration initQueryTimeout = Duration.ofSeconds(5); + Duration requestTimeout = Duration.ofSeconds(2); + int requestPageSize = 5000; + int continuousMaxPages = 0; + int continuousMaxPagesPerSecond = 0; + int continuousMaxEnqueuedPages = 4; // Sorted by order of appearance in reference.conf: // Skip CONFIG_RELOAD_INTERVAL because the map-based config doesn't need periodic reloading - map.put(TypedDriverOption.REQUEST_TIMEOUT, Duration.ofSeconds(2)); + map.put(TypedDriverOption.REQUEST_TIMEOUT, requestTimeout); map.put(TypedDriverOption.REQUEST_CONSISTENCY, "LOCAL_ONE"); - map.put(TypedDriverOption.REQUEST_PAGE_SIZE, 5000); + map.put(TypedDriverOption.REQUEST_PAGE_SIZE, requestPageSize); map.put(TypedDriverOption.REQUEST_SERIAL_CONSISTENCY, "SERIAL"); map.put(TypedDriverOption.REQUEST_DEFAULT_IDEMPOTENCE, false); map.put(TypedDriverOption.GRAPH_TRAVERSAL_SOURCE, "g"); @@ -287,15 +292,18 @@ protected static void fillWithDriverDefaults(OptionsMap map) { map.put(TypedDriverOption.REQUEST_TRACE_CONSISTENCY, "ONE"); map.put(TypedDriverOption.REQUEST_LOG_WARNINGS, true); map.put(TypedDriverOption.GRAPH_PAGING_ENABLED, "AUTO"); - map.put(TypedDriverOption.GRAPH_CONTINUOUS_PAGING_PAGE_SIZE, 5000); - map.put(TypedDriverOption.GRAPH_CONTINUOUS_PAGING_MAX_PAGES, 0); - map.put(TypedDriverOption.GRAPH_CONTINUOUS_PAGING_MAX_PAGES_PER_SECOND, 0); - map.put(TypedDriverOption.GRAPH_CONTINUOUS_PAGING_MAX_ENQUEUED_PAGES, 4); - map.put(TypedDriverOption.CONTINUOUS_PAGING_PAGE_SIZE, 5000); + map.put(TypedDriverOption.GRAPH_CONTINUOUS_PAGING_PAGE_SIZE, requestPageSize); + map.put(TypedDriverOption.GRAPH_CONTINUOUS_PAGING_MAX_PAGES, continuousMaxPages); + map.put( + TypedDriverOption.GRAPH_CONTINUOUS_PAGING_MAX_PAGES_PER_SECOND, + continuousMaxPagesPerSecond); + map.put( + TypedDriverOption.GRAPH_CONTINUOUS_PAGING_MAX_ENQUEUED_PAGES, continuousMaxEnqueuedPages); + map.put(TypedDriverOption.CONTINUOUS_PAGING_PAGE_SIZE, requestPageSize); map.put(TypedDriverOption.CONTINUOUS_PAGING_PAGE_SIZE_BYTES, false); - map.put(TypedDriverOption.CONTINUOUS_PAGING_MAX_PAGES, 0); - map.put(TypedDriverOption.CONTINUOUS_PAGING_MAX_PAGES_PER_SECOND, 0); - map.put(TypedDriverOption.CONTINUOUS_PAGING_MAX_ENQUEUED_PAGES, 4); + map.put(TypedDriverOption.CONTINUOUS_PAGING_MAX_PAGES, continuousMaxPages); + map.put(TypedDriverOption.CONTINUOUS_PAGING_MAX_PAGES_PER_SECOND, continuousMaxPagesPerSecond); + map.put(TypedDriverOption.CONTINUOUS_PAGING_MAX_ENQUEUED_PAGES, continuousMaxEnqueuedPages); map.put(TypedDriverOption.CONTINUOUS_PAGING_TIMEOUT_FIRST_PAGE, Duration.ofSeconds(2)); map.put(TypedDriverOption.CONTINUOUS_PAGING_TIMEOUT_OTHER_PAGES, Duration.ofSeconds(1)); map.put(TypedDriverOption.MONITOR_REPORTING_ENABLED, true); @@ -330,8 +338,8 @@ protected static void fillWithDriverDefaults(OptionsMap map) { map.put(TypedDriverOption.METADATA_TOPOLOGY_WINDOW, Duration.ofSeconds(1)); map.put(TypedDriverOption.METADATA_TOPOLOGY_MAX_EVENTS, 20); map.put(TypedDriverOption.METADATA_SCHEMA_ENABLED, true); - map.put(TypedDriverOption.METADATA_SCHEMA_REQUEST_TIMEOUT, Duration.ofSeconds(2)); - map.put(TypedDriverOption.METADATA_SCHEMA_REQUEST_PAGE_SIZE, 5000); + map.put(TypedDriverOption.METADATA_SCHEMA_REQUEST_TIMEOUT, requestTimeout); + map.put(TypedDriverOption.METADATA_SCHEMA_REQUEST_PAGE_SIZE, requestPageSize); map.put(TypedDriverOption.METADATA_SCHEMA_WINDOW, Duration.ofSeconds(1)); map.put(TypedDriverOption.METADATA_SCHEMA_MAX_EVENTS, 20); map.put(TypedDriverOption.METADATA_TOKEN_MAP_ENABLED, true); From 4e7ba5494eb411e3f1d4b3a9b63698db889e1a90 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 27 Jul 2020 15:15:07 +0200 Subject: [PATCH 540/979] Remove version from java-driver-core test-jar dependency declarations --- mapper-processor/pom.xml | 1 - mapper-runtime/pom.xml | 1 - query-builder/pom.xml | 1 - 3 files changed, 3 deletions(-) diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 7ee5a5f44db..6352765c993 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -105,7 +105,6 @@ com.datastax.oss java-driver-core test - ${project.version} test-jar diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index f23e0d86432..2942cca93f7 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -88,7 +88,6 @@ com.datastax.oss java-driver-core test - ${project.version} test-jar diff --git a/query-builder/pom.xml b/query-builder/pom.xml index 06e0344477f..040d4b8de30 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -73,7 +73,6 @@ com.datastax.oss java-driver-core test - ${project.version} test-jar From ab52296c67c0f305acd4abb9ca3590ac76c198fb Mon Sep 17 00:00:00 2001 From: olim7t Date: Tue, 23 Jun 2020 21:47:44 -0700 Subject: [PATCH 541/979] Extract constants for pipeline handler names --- .../internal/core/channel/ChannelFactory.java | 28 ++++++++++++---- .../core/channel/InFlightHandler.java | 2 +- .../core/channel/ProtocolInitHandler.java | 6 +++- .../core/channel/ChannelFactoryTestBase.java | 5 ++- .../core/channel/ProtocolInitHandlerTest.java | 33 ++++++++++--------- 5 files changed, 48 insertions(+), 26 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ChannelFactory.java b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ChannelFactory.java index a9fbaf18d81..9db68676282 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ChannelFactory.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ChannelFactory.java @@ -74,6 +74,16 @@ public class ChannelFactory { */ private static final String UNKNOWN_PRODUCT_TYPE = "UNKNOWN"; + // The names of the handlers on the pipeline: + public static final String SSL_HANDLER_NAME = "ssl"; + public static final String INBOUND_TRAFFIC_METER_NAME = "inboundTrafficMeter"; + public static final String OUTBOUND_TRAFFIC_METER_NAME = "outboundTrafficMeter"; + public static final String FRAME_TO_BYTES_ENCODER_NAME = "encoder"; + public static final String BYTES_TO_FRAME_DECODER_NAME = "decoder"; + public static final String HEARTBEAT_HANDLER_NAME = "heartbeat"; + public static final String INFLIGHT_HANDLER_NAME = "inflight"; + public static final String INIT_HANDLER_NAME = "init"; + private final String logPrefix; protected final InternalDriverContext context; @@ -312,7 +322,7 @@ protected void initChannel(Channel channel) { context .getSslHandlerFactory() .map(f -> f.newSslHandler(channel, endPoint)) - .map(h -> pipeline.addLast("ssl", h)); + .map(h -> pipeline.addLast(SSL_HANDLER_NAME, h)); // Only add meter handlers on the pipeline if metrics are enabled. SessionMetricUpdater sessionMetricUpdater = @@ -320,23 +330,27 @@ protected void initChannel(Channel channel) { if (nodeMetricUpdater.isEnabled(DefaultNodeMetric.BYTES_RECEIVED, null) || sessionMetricUpdater.isEnabled(DefaultSessionMetric.BYTES_RECEIVED, null)) { pipeline.addLast( - "inboundTrafficMeter", + INBOUND_TRAFFIC_METER_NAME, new InboundTrafficMeter(nodeMetricUpdater, sessionMetricUpdater)); } if (nodeMetricUpdater.isEnabled(DefaultNodeMetric.BYTES_SENT, null) || sessionMetricUpdater.isEnabled(DefaultSessionMetric.BYTES_SENT, null)) { pipeline.addLast( - "outboundTrafficMeter", + OUTBOUND_TRAFFIC_METER_NAME, new OutboundTrafficMeter(nodeMetricUpdater, sessionMetricUpdater)); } pipeline - .addLast("encoder", new FrameEncoder(context.getFrameCodec(), maxFrameLength)) - .addLast("decoder", new FrameDecoder(context.getFrameCodec(), maxFrameLength)) + .addLast( + FRAME_TO_BYTES_ENCODER_NAME, + new FrameEncoder(context.getFrameCodec(), maxFrameLength)) + .addLast( + BYTES_TO_FRAME_DECODER_NAME, + new FrameDecoder(context.getFrameCodec(), maxFrameLength)) // Note: HeartbeatHandler is inserted here once init completes - .addLast("inflight", inFlightHandler) - .addLast("init", initHandler); + .addLast(INFLIGHT_HANDLER_NAME, inFlightHandler) + .addLast(INIT_HANDLER_NAME, initHandler); context.getNettyOptions().afterChannelInitialized(channel); } catch (Throwable t) { diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/InFlightHandler.java b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/InFlightHandler.java index f25b621fc66..eb6bce925c9 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/InFlightHandler.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/InFlightHandler.java @@ -198,7 +198,7 @@ private void startGracefulShutdown(ChannelHandlerContext ctx) { ctx.channel().close(); } else { // remove heartbeat handler from pipeline if present. - ChannelHandler heartbeatHandler = ctx.pipeline().get("heartbeat"); + ChannelHandler heartbeatHandler = ctx.pipeline().get(ChannelFactory.HEARTBEAT_HANDLER_NAME); if (heartbeatHandler != null) { ctx.pipeline().remove(heartbeatHandler); } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ProtocolInitHandler.java b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ProtocolInitHandler.java index f958e0b6477..eea5223b518 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ProtocolInitHandler.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ProtocolInitHandler.java @@ -123,7 +123,11 @@ protected boolean setConnectSuccess() { boolean result = super.setConnectSuccess(); if (result) { // add heartbeat to pipeline now that protocol is initialized. - ctx.pipeline().addBefore("inflight", "heartbeat", heartbeatHandler); + ctx.pipeline() + .addBefore( + ChannelFactory.INFLIGHT_HANDLER_NAME, + ChannelFactory.HEARTBEAT_HANDLER_NAME, + heartbeatHandler); } return result; } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/channel/ChannelFactoryTestBase.java b/core/src/test/java/com/datastax/oss/driver/internal/core/channel/ChannelFactoryTestBase.java index 71b2636e1a5..d28e10d0d9d 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/channel/ChannelFactoryTestBase.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/channel/ChannelFactoryTestBase.java @@ -269,7 +269,10 @@ protected void initChannel(Channel channel) throws Exception { options, heartbeatHandler, productType == null); - channel.pipeline().addLast("inflight", inFlightHandler).addLast("init", initHandler); + channel + .pipeline() + .addLast(ChannelFactory.INFLIGHT_HANDLER_NAME, inFlightHandler) + .addLast(ChannelFactory.INIT_HANDLER_NAME, initHandler); } catch (Throwable t) { resultFuture.completeExceptionally(t); } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/channel/ProtocolInitHandlerTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/channel/ProtocolInitHandlerTest.java index 48cabc285ec..127b4ef6ec4 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/channel/ProtocolInitHandlerTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/channel/ProtocolInitHandlerTest.java @@ -102,7 +102,7 @@ public void setup() { channel .pipeline() .addLast( - "inflight", + ChannelFactory.INFLIGHT_HANDLER_NAME, new InFlightHandler( DefaultProtocolVersion.V4, new StreamIdGenerator(100), @@ -120,7 +120,7 @@ public void should_initialize() { channel .pipeline() .addLast( - "init", + ChannelFactory.INIT_HANDLER_NAME, new ProtocolInitHandler( internalDriverContext, DefaultProtocolVersion.V4, @@ -154,7 +154,7 @@ public void should_query_supported_options() { channel .pipeline() .addLast( - "init", + ChannelFactory.INIT_HANDLER_NAME, new ProtocolInitHandler( internalDriverContext, DefaultProtocolVersion.V4, @@ -207,12 +207,12 @@ public void should_add_heartbeat_handler_to_pipeline_on_success() { heartbeatHandler, false); - channel.pipeline().addLast("init", protocolInitHandler); + channel.pipeline().addLast(ChannelFactory.INIT_HANDLER_NAME, protocolInitHandler); ChannelFuture connectFuture = channel.connect(new InetSocketAddress("localhost", 9042)); // heartbeat should initially not be in pipeline - assertThat(channel.pipeline().get("heartbeat")).isNull(); + assertThat(channel.pipeline().get(ChannelFactory.HEARTBEAT_HANDLER_NAME)).isNull(); // It should send a STARTUP message Frame requestFrame = readOutboundFrame(); @@ -231,7 +231,8 @@ public void should_add_heartbeat_handler_to_pipeline_on_success() { assertThat(connectFuture).isSuccess(); // should have added heartbeat handler to pipeline. - assertThat(channel.pipeline().get("heartbeat")).isEqualTo(heartbeatHandler); + assertThat(channel.pipeline().get(ChannelFactory.HEARTBEAT_HANDLER_NAME)) + .isEqualTo(heartbeatHandler); // should have removed itself from pipeline. assertThat(channel.pipeline().last()).isNotEqualTo(protocolInitHandler); } @@ -241,7 +242,7 @@ public void should_fail_to_initialize_if_init_query_times_out() throws Interrupt channel .pipeline() .addLast( - "init", + ChannelFactory.INIT_HANDLER_NAME, new ProtocolInitHandler( internalDriverContext, DefaultProtocolVersion.V4, @@ -267,7 +268,7 @@ public void should_initialize_with_authentication() { channel .pipeline() .addLast( - "init", + ChannelFactory.INIT_HANDLER_NAME, new ProtocolInitHandler( internalDriverContext, DefaultProtocolVersion.V4, @@ -332,7 +333,7 @@ public void should_invoke_auth_provider_when_server_does_not_send_challenge() { channel .pipeline() .addLast( - "init", + ChannelFactory.INIT_HANDLER_NAME, new ProtocolInitHandler( internalDriverContext, DefaultProtocolVersion.V4, @@ -366,7 +367,7 @@ public void should_fail_to_initialize_if_server_sends_auth_error() throws Throwa channel .pipeline() .addLast( - "init", + ChannelFactory.INIT_HANDLER_NAME, new ProtocolInitHandler( internalDriverContext, DefaultProtocolVersion.V4, @@ -413,7 +414,7 @@ public void should_check_cluster_name_if_provided() { channel .pipeline() .addLast( - "init", + ChannelFactory.INIT_HANDLER_NAME, new ProtocolInitHandler( internalDriverContext, DefaultProtocolVersion.V4, @@ -444,7 +445,7 @@ public void should_fail_to_initialize_if_cluster_name_does_not_match() throws Th channel .pipeline() .addLast( - "init", + ChannelFactory.INIT_HANDLER_NAME, new ProtocolInitHandler( internalDriverContext, DefaultProtocolVersion.V4, @@ -478,7 +479,7 @@ public void should_initialize_with_keyspace() { channel .pipeline() .addLast( - "init", + ChannelFactory.INIT_HANDLER_NAME, new ProtocolInitHandler( internalDriverContext, DefaultProtocolVersion.V4, @@ -510,7 +511,7 @@ public void should_initialize_with_events() { channel .pipeline() .addLast( - "init", + ChannelFactory.INIT_HANDLER_NAME, new ProtocolInitHandler( internalDriverContext, DefaultProtocolVersion.V4, @@ -545,7 +546,7 @@ public void should_initialize_with_keyspace_and_events() { channel .pipeline() .addLast( - "init", + ChannelFactory.INIT_HANDLER_NAME, new ProtocolInitHandler( internalDriverContext, DefaultProtocolVersion.V4, @@ -580,7 +581,7 @@ public void should_fail_to_initialize_if_keyspace_is_invalid() { channel .pipeline() .addLast( - "init", + ChannelFactory.INIT_HANDLER_NAME, new ProtocolInitHandler( internalDriverContext, DefaultProtocolVersion.V4, From 57234900c5b221f87d497e00ce8f78d925ce964d Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 22 Jun 2020 16:19:52 -0700 Subject: [PATCH 542/979] JAVA-2773: Support new protocol v5 message format https://issues.apache.org/jira/browse/CASSANDRA-15299 --- bom/pom.xml | 2 +- changelog/README.md | 1 + .../TinkerpopBufferPrimitiveCodec.java | 26 +++ .../core/connection/CrcMismatchException.java | 39 ++++ .../internal/core/DefaultProtocolFeature.java | 8 + .../core/DefaultProtocolVersionRegistry.java | 3 +- .../internal/core/channel/ChannelFactory.java | 8 +- .../core/channel/ProtocolInitHandler.java | 44 +++++ .../core/context/DefaultDriverContext.java | 28 ++- .../core/context/InternalDriverContext.java | 8 + .../core/protocol/ByteBufCompressor.java | 30 ++- .../core/protocol/ByteBufPrimitiveCodec.java | 26 +++ .../core/protocol/ByteBufSegmentBuilder.java | 182 ++++++++++++++++++ .../core/protocol/BytesToSegmentDecoder.java | 95 +++++++++ .../core/protocol/FrameToSegmentEncoder.java | 67 +++++++ .../internal/core/protocol/Lz4Compressor.java | 43 +++-- .../core/protocol/SegmentToBytesEncoder.java | 45 +++++ .../core/protocol/SegmentToFrameDecoder.java | 116 +++++++++++ .../core/protocol/SnappyCompressor.java | 24 ++- .../oss/driver/internal/core/util/Sizes.java | 2 +- .../protocol/BytesToSegmentDecoderTest.java | 149 ++++++++++++++ .../protocol/SegmentToFrameDecoderTest.java | 95 +++++++++ .../core/protocol/SliceWriteListenerTest.java | 73 +++++++ 23 files changed, 1084 insertions(+), 30 deletions(-) create mode 100644 core/src/main/java/com/datastax/oss/driver/api/core/connection/CrcMismatchException.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/protocol/ByteBufSegmentBuilder.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/protocol/BytesToSegmentDecoder.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/protocol/FrameToSegmentEncoder.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/protocol/SegmentToBytesEncoder.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/protocol/SegmentToFrameDecoder.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/protocol/BytesToSegmentDecoderTest.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/protocol/SegmentToFrameDecoderTest.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/protocol/SliceWriteListenerTest.java diff --git a/bom/pom.xml b/bom/pom.xml index aecef303882..0f9ed52b414 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -61,7 +61,7 @@ com.datastax.oss native-protocol - 1.4.10 + 1.4.11 com.datastax.oss diff --git a/changelog/README.md b/changelog/README.md index fd74bcac72f..9309f45cfdf 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.8.0 (in progress) +- [new feature] JAVA-2773: Support new protocol v5 message format - [improvement] JAVA-2841: Raise timeouts during connection initialization - [bug] JAVA-2331: Unregister old metrics when a node gets removed or changes RPC address - [improvement] JAVA-2850: Ignore credentials in secure connect bundle [DataStax Astra] diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/protocol/TinkerpopBufferPrimitiveCodec.java b/core/src/main/java/com/datastax/dse/driver/internal/core/protocol/TinkerpopBufferPrimitiveCodec.java index 978dd62a790..43a9fd23f24 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/protocol/TinkerpopBufferPrimitiveCodec.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/protocol/TinkerpopBufferPrimitiveCodec.java @@ -23,6 +23,7 @@ import java.net.UnknownHostException; import java.nio.ByteBuffer; import java.util.Arrays; +import java.util.zip.CRC32; import org.apache.tinkerpop.gremlin.structure.io.Buffer; /** @@ -87,6 +88,16 @@ public Buffer concat(Buffer left, Buffer right) { return rv; } + @Override + public void markReaderIndex(Buffer source) { + throw new UnsupportedOperationException(); + } + + @Override + public void resetReaderIndex(Buffer source) { + throw new UnsupportedOperationException(); + } + @Override public byte readByte(Buffer source) { return source.readByte(); @@ -97,6 +108,11 @@ public int readInt(Buffer source) { return source.readInt(); } + @Override + public int readInt(Buffer source, int offset) { + throw new UnsupportedOperationException(); + } + @Override public InetAddress readInetAddr(Buffer source) { int length = readByte(source) & 0xFF; @@ -148,6 +164,16 @@ public String readLongString(Buffer source) { return readString(source, length); } + @Override + public Buffer readRetainedSlice(Buffer source, int sliceLength) { + throw new UnsupportedOperationException(); + } + + @Override + public void updateCrc(Buffer source, CRC32 crc) { + throw new UnsupportedOperationException(); + } + @Override public void writeByte(byte b, Buffer dest) { dest.writeByte(b); diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/connection/CrcMismatchException.java b/core/src/main/java/com/datastax/oss/driver/api/core/connection/CrcMismatchException.java new file mode 100644 index 00000000000..e1115b6e38f --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/api/core/connection/CrcMismatchException.java @@ -0,0 +1,39 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.core.connection; + +import com.datastax.oss.driver.api.core.DriverException; +import edu.umd.cs.findbugs.annotations.NonNull; + +/** + * Thrown when the checksums in a server response don't match (protocol v5 or above). + * + *

          This indicates a data corruption issue, either due to a hardware issue on the client, or on + * the network between the server and the client. It is not recoverable: the driver will drop the + * connection. + */ +public class CrcMismatchException extends DriverException { + + public CrcMismatchException(@NonNull String message) { + super(message, null, null, true); + } + + @NonNull + @Override + public DriverException copy() { + return new CrcMismatchException(getMessage()); + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/DefaultProtocolFeature.java b/core/src/main/java/com/datastax/oss/driver/internal/core/DefaultProtocolFeature.java index 78c0338075b..4a756820faa 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/DefaultProtocolFeature.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/DefaultProtocolFeature.java @@ -56,5 +56,13 @@ public enum DefaultProtocolFeature implements ProtocolFeature { * @see CASSANDRA-14664 */ NOW_IN_SECONDS, + + /** + * The new protocol framing format introduced in Cassandra 4: wrapping multiple frames into a + * single "segment" to checksum (and possibly compress) them together. + * + * @see CASSANDRA-15299 + */ + MODERN_FRAMING, ; } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/DefaultProtocolVersionRegistry.java b/core/src/main/java/com/datastax/oss/driver/internal/core/DefaultProtocolVersionRegistry.java index 63381653a73..2dff6ff22f8 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/DefaultProtocolVersionRegistry.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/DefaultProtocolVersionRegistry.java @@ -240,7 +240,8 @@ public boolean supports(ProtocolVersion version, ProtocolFeature feature) { return (DefaultProtocolVersion.V5.getCode() <= code && code < DseProtocolVersion.DSE_V1.getCode()) || DseProtocolVersion.DSE_V2.getCode() <= code; - } else if (DefaultProtocolFeature.NOW_IN_SECONDS.equals(feature)) { + } else if (DefaultProtocolFeature.NOW_IN_SECONDS.equals(feature) + || DefaultProtocolFeature.MODERN_FRAMING.equals(feature)) { // OSS only, V5+ return DefaultProtocolVersion.V5.getCode() <= code && code < DseProtocolVersion.DSE_V1.getCode(); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ChannelFactory.java b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ChannelFactory.java index 9db68676282..4e2defdce49 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ChannelFactory.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ChannelFactory.java @@ -78,8 +78,12 @@ public class ChannelFactory { public static final String SSL_HANDLER_NAME = "ssl"; public static final String INBOUND_TRAFFIC_METER_NAME = "inboundTrafficMeter"; public static final String OUTBOUND_TRAFFIC_METER_NAME = "outboundTrafficMeter"; - public static final String FRAME_TO_BYTES_ENCODER_NAME = "encoder"; - public static final String BYTES_TO_FRAME_DECODER_NAME = "decoder"; + public static final String FRAME_TO_BYTES_ENCODER_NAME = "frameToBytesEncoder"; + public static final String FRAME_TO_SEGMENT_ENCODER_NAME = "frameToSegmentEncoder"; + public static final String SEGMENT_TO_BYTES_ENCODER_NAME = "segmentToBytesEncoder"; + public static final String BYTES_TO_FRAME_DECODER_NAME = "bytesToFrameDecoder"; + public static final String BYTES_TO_SEGMENT_DECODER_NAME = "bytesToSegmentDecoder"; + public static final String SEGMENT_TO_FRAME_DECODER_NAME = "segmentToFrameDecoder"; public static final String HEARTBEAT_HANDLER_NAME = "heartbeat"; public static final String INFLIGHT_HANDLER_NAME = "inflight"; public static final String INIT_HANDLER_NAME = "init"; diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ProtocolInitHandler.java b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ProtocolInitHandler.java index eea5223b518..9d4969040d6 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ProtocolInitHandler.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ProtocolInitHandler.java @@ -26,7 +26,12 @@ import com.datastax.oss.driver.api.core.connection.ConnectionInitException; import com.datastax.oss.driver.api.core.metadata.EndPoint; import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; +import com.datastax.oss.driver.internal.core.DefaultProtocolFeature; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.protocol.BytesToSegmentDecoder; +import com.datastax.oss.driver.internal.core.protocol.FrameToSegmentEncoder; +import com.datastax.oss.driver.internal.core.protocol.SegmentToBytesEncoder; +import com.datastax.oss.driver.internal.core.protocol.SegmentToFrameDecoder; import com.datastax.oss.driver.internal.core.util.ProtocolUtils; import com.datastax.oss.driver.internal.core.util.concurrent.UncaughtExceptions; import com.datastax.oss.protocol.internal.Message; @@ -46,6 +51,7 @@ import com.datastax.oss.protocol.internal.response.result.Rows; import com.datastax.oss.protocol.internal.response.result.SetKeyspace; import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelPipeline; import java.nio.ByteBuffer; import java.util.List; import java.util.Objects; @@ -200,10 +206,12 @@ void onResponse(Message response) { step = Step.STARTUP; send(); } else if (step == Step.STARTUP && response instanceof Ready) { + maybeSwitchToModernFraming(); context.getAuthProvider().ifPresent(provider -> provider.onMissingChallenge(endPoint)); step = Step.GET_CLUSTER_NAME; send(); } else if (step == Step.STARTUP && response instanceof Authenticate) { + maybeSwitchToModernFraming(); Authenticate authenticate = (Authenticate) response; authenticator = buildAuthenticator(endPoint, authenticate.authenticator); authenticator @@ -366,6 +374,42 @@ public String toString() { } } + /** + * Rearranges the pipeline to deal with the new framing structure in protocol v5 and above. The + * first messages still use the legacy format, we only do this after a successful response to the + * first STARTUP message. + */ + private void maybeSwitchToModernFraming() { + if (context + .getProtocolVersionRegistry() + .supports(initialProtocolVersion, DefaultProtocolFeature.MODERN_FRAMING)) { + + ChannelPipeline pipeline = ctx.pipeline(); + + // We basically add one conversion step in the middle: frames <-> *segments* <-> bytes + // Outbound: + pipeline.replace( + ChannelFactory.FRAME_TO_BYTES_ENCODER_NAME, + ChannelFactory.FRAME_TO_SEGMENT_ENCODER_NAME, + new FrameToSegmentEncoder( + context.getPrimitiveCodec(), context.getFrameCodec(), logPrefix)); + pipeline.addBefore( + ChannelFactory.FRAME_TO_SEGMENT_ENCODER_NAME, + ChannelFactory.SEGMENT_TO_BYTES_ENCODER_NAME, + new SegmentToBytesEncoder(context.getSegmentCodec())); + + // Inbound: + pipeline.replace( + ChannelFactory.BYTES_TO_FRAME_DECODER_NAME, + ChannelFactory.BYTES_TO_SEGMENT_DECODER_NAME, + new BytesToSegmentDecoder(context.getSegmentCodec())); + pipeline.addAfter( + ChannelFactory.BYTES_TO_SEGMENT_DECODER_NAME, + ChannelFactory.SEGMENT_TO_FRAME_DECODER_NAME, + new SegmentToFrameDecoder(context.getFrameCodec(), logPrefix)); + } + } + private String getString(List row, int i) { return TypeCodecs.TEXT.decode(row.get(i), DefaultProtocolVersion.DEFAULT); } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java index 90915293d00..04d0ab21dd7 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java @@ -100,8 +100,10 @@ import com.datastax.oss.driver.shaded.guava.common.base.Ticker; import com.datastax.oss.protocol.internal.Compressor; import com.datastax.oss.protocol.internal.FrameCodec; +import com.datastax.oss.protocol.internal.PrimitiveCodec; import com.datastax.oss.protocol.internal.ProtocolV3ClientCodecs; import com.datastax.oss.protocol.internal.ProtocolV5ClientCodecs; +import com.datastax.oss.protocol.internal.SegmentCodec; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import io.netty.buffer.ByteBuf; @@ -166,8 +168,12 @@ public class DefaultDriverContext implements InternalDriverContext { new LazyReference<>("eventBus", this::buildEventBus, cycleDetector); private final LazyReference> compressorRef = new LazyReference<>("compressor", this::buildCompressor, cycleDetector); + private final LazyReference> primitiveCodecRef = + new LazyReference<>("primitiveCodec", this::buildPrimitiveCodec, cycleDetector); private final LazyReference> frameCodecRef = new LazyReference<>("frameCodec", this::buildFrameCodec, cycleDetector); + private final LazyReference> segmentCodecRef = + new LazyReference<>("segmentCodec", this::buildSegmentCodec, cycleDetector); private final LazyReference protocolVersionRegistryRef = new LazyReference<>( "protocolVersionRegistry", this::buildProtocolVersionRegistry, cycleDetector); @@ -439,9 +445,13 @@ protected Compressor buildCompressor() { } } + protected PrimitiveCodec buildPrimitiveCodec() { + return new ByteBufPrimitiveCodec(getNettyOptions().allocator()); + } + protected FrameCodec buildFrameCodec() { return new FrameCodec<>( - new ByteBufPrimitiveCodec(getNettyOptions().allocator()), + getPrimitiveCodec(), getCompressor(), new ProtocolV3ClientCodecs(), new ProtocolV4ClientCodecsForDse(), @@ -450,6 +460,10 @@ protected FrameCodec buildFrameCodec() { new DseProtocolV2ClientCodecs()); } + protected SegmentCodec buildSegmentCodec() { + return new SegmentCodec<>(getPrimitiveCodec(), getCompressor()); + } + protected ProtocolVersionRegistry buildProtocolVersionRegistry() { return new DefaultProtocolVersionRegistry(getSessionName()); } @@ -782,12 +796,24 @@ public Compressor getCompressor() { return compressorRef.get(); } + @NonNull + @Override + public PrimitiveCodec getPrimitiveCodec() { + return primitiveCodecRef.get(); + } + @NonNull @Override public FrameCodec getFrameCodec() { return frameCodecRef.get(); } + @NonNull + @Override + public SegmentCodec getSegmentCodec() { + return segmentCodecRef.get(); + } + @NonNull @Override public ProtocolVersionRegistry getProtocolVersionRegistry() { diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/context/InternalDriverContext.java b/core/src/main/java/com/datastax/oss/driver/internal/core/context/InternalDriverContext.java index b596149db0d..3d68ec9a5f3 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/context/InternalDriverContext.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/context/InternalDriverContext.java @@ -39,6 +39,8 @@ import com.datastax.oss.driver.internal.core.tracker.RequestLogFormatter; import com.datastax.oss.protocol.internal.Compressor; import com.datastax.oss.protocol.internal.FrameCodec; +import com.datastax.oss.protocol.internal.PrimitiveCodec; +import com.datastax.oss.protocol.internal.SegmentCodec; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import io.netty.buffer.ByteBuf; @@ -57,9 +59,15 @@ public interface InternalDriverContext extends DriverContext { @NonNull Compressor getCompressor(); + @NonNull + PrimitiveCodec getPrimitiveCodec(); + @NonNull FrameCodec getFrameCodec(); + @NonNull + SegmentCodec getSegmentCodec(); + @NonNull ProtocolVersionRegistry getProtocolVersionRegistry(); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/ByteBufCompressor.java b/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/ByteBufCompressor.java index a8e4960ff49..b3f3619a76c 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/ByteBufCompressor.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/ByteBufCompressor.java @@ -25,21 +25,39 @@ public abstract class ByteBufCompressor implements Compressor { @Override public ByteBuf compress(ByteBuf uncompressed) { - return uncompressed.isDirect() ? compressDirect(uncompressed) : compressHeap(uncompressed); + return uncompressed.isDirect() + ? compressDirect(uncompressed, true) + : compressHeap(uncompressed, true); } - protected abstract ByteBuf compressDirect(ByteBuf input); + @Override + public ByteBuf compressWithoutLength(ByteBuf uncompressed) { + return uncompressed.isDirect() + ? compressDirect(uncompressed, false) + : compressHeap(uncompressed, false); + } - protected abstract ByteBuf compressHeap(ByteBuf input); + protected abstract ByteBuf compressDirect(ByteBuf input, boolean prependWithUncompressedLength); + + protected abstract ByteBuf compressHeap(ByteBuf input, boolean prependWithUncompressedLength); @Override public ByteBuf decompress(ByteBuf compressed) { - return compressed.isDirect() ? decompressDirect(compressed) : decompressHeap(compressed); + return decompressWithoutLength(compressed, readUncompressedLength(compressed)); + } + + protected abstract int readUncompressedLength(ByteBuf compressed); + + @Override + public ByteBuf decompressWithoutLength(ByteBuf compressed, int uncompressedLength) { + return compressed.isDirect() + ? decompressDirect(compressed, uncompressedLength) + : decompressHeap(compressed, uncompressedLength); } - protected abstract ByteBuf decompressDirect(ByteBuf input); + protected abstract ByteBuf decompressDirect(ByteBuf input, int uncompressedLength); - protected abstract ByteBuf decompressHeap(ByteBuf input); + protected abstract ByteBuf decompressHeap(ByteBuf input, int uncompressedLength); protected static ByteBuffer inputNioBuffer(ByteBuf buf) { // Using internalNioBuffer(...) as we only hold the reference in this method and so can diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/ByteBufPrimitiveCodec.java b/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/ByteBufPrimitiveCodec.java index 73b92f479de..b7fc6350636 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/ByteBufPrimitiveCodec.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/ByteBufPrimitiveCodec.java @@ -24,6 +24,7 @@ import java.net.UnknownHostException; import java.nio.ByteBuffer; import java.util.Arrays; +import java.util.zip.CRC32; import net.jcip.annotations.ThreadSafe; @ThreadSafe @@ -66,6 +67,16 @@ public ByteBuf concat(ByteBuf left, ByteBuf right) { } } + @Override + public void markReaderIndex(ByteBuf source) { + source.markReaderIndex(); + } + + @Override + public void resetReaderIndex(ByteBuf source) { + source.resetReaderIndex(); + } + @Override public byte readByte(ByteBuf source) { return source.readByte(); @@ -76,6 +87,11 @@ public int readInt(ByteBuf source) { return source.readInt(); } + @Override + public int readInt(ByteBuf source, int offset) { + return source.getInt(source.readerIndex() + offset); + } + @Override public InetAddress readInetAddr(ByteBuf source) { int length = readByte(source) & 0xFF; @@ -127,6 +143,16 @@ public String readLongString(ByteBuf source) { return readString(source, length); } + @Override + public ByteBuf readRetainedSlice(ByteBuf source, int sliceLength) { + return source.readRetainedSlice(sliceLength); + } + + @Override + public void updateCrc(ByteBuf source, CRC32 crc) { + crc.update(source.internalNioBuffer(source.readerIndex(), source.readableBytes())); + } + @Override public void writeByte(byte b, ByteBuf dest) { dest.writeByte(b); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/ByteBufSegmentBuilder.java b/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/ByteBufSegmentBuilder.java new file mode 100644 index 00000000000..b7911de701c --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/ByteBufSegmentBuilder.java @@ -0,0 +1,182 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.protocol; + +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; +import com.datastax.oss.protocol.internal.Frame; +import com.datastax.oss.protocol.internal.FrameCodec; +import com.datastax.oss.protocol.internal.PrimitiveCodec; +import com.datastax.oss.protocol.internal.Segment; +import com.datastax.oss.protocol.internal.SegmentBuilder; +import edu.umd.cs.findbugs.annotations.NonNull; +import io.netty.buffer.ByteBuf; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelPromise; +import io.netty.util.concurrent.Future; +import io.netty.util.concurrent.GenericFutureListener; +import java.util.ArrayList; +import java.util.List; +import net.jcip.annotations.NotThreadSafe; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@NotThreadSafe +public class ByteBufSegmentBuilder extends SegmentBuilder { + + private static final Logger LOG = LoggerFactory.getLogger(ByteBufSegmentBuilder.class); + + private final ChannelHandlerContext context; + private final String logPrefix; + + public ByteBufSegmentBuilder( + @NonNull ChannelHandlerContext context, + @NonNull PrimitiveCodec primitiveCodec, + @NonNull FrameCodec frameCodec, + @NonNull String logPrefix) { + super(primitiveCodec, frameCodec); + this.context = context; + this.logPrefix = logPrefix; + } + + @Override + @NonNull + protected ChannelPromise mergeStates(@NonNull List framePromises) { + if (framePromises.size() == 1) { + return framePromises.get(0); + } + // We concatenate multiple frames into one segment. When the segment is written, all the frames + // are written. + ChannelPromise segmentPromise = context.newPromise(); + ImmutableList dependents = ImmutableList.copyOf(framePromises); + segmentPromise.addListener( + future -> { + if (future.isSuccess()) { + for (ChannelPromise framePromise : dependents) { + framePromise.setSuccess(); + } + } else { + Throwable cause = future.cause(); + for (ChannelPromise framePromise : dependents) { + framePromise.setFailure(cause); + } + } + }); + return segmentPromise; + } + + @Override + @NonNull + protected List splitState(@NonNull ChannelPromise framePromise, int sliceCount) { + // We split one frame into multiple slices. When all slices are written, the frame is written. + List slicePromises = new ArrayList<>(sliceCount); + for (int i = 0; i < sliceCount; i++) { + slicePromises.add(context.newPromise()); + } + GenericFutureListener> sliceListener = + new SliceWriteListener(framePromise, slicePromises); + for (int i = 0; i < sliceCount; i++) { + slicePromises.get(i).addListener(sliceListener); + } + return slicePromises; + } + + @Override + protected void processSegment( + @NonNull Segment segment, @NonNull ChannelPromise segmentPromise) { + context.write(segment, segmentPromise); + } + + @Override + protected void onLargeFrameSplit(@NonNull Frame frame, int frameLength, int sliceCount) { + LOG.trace( + "[{}] Frame {} is too large ({} > {}), splitting into {} segments", + logPrefix, + frame.streamId, + frameLength, + Segment.MAX_PAYLOAD_LENGTH, + sliceCount); + } + + @Override + protected void onSegmentFull( + @NonNull Frame frame, int frameLength, int currentPayloadLength, int currentFrameCount) { + LOG.trace( + "[{}] Current self-contained segment is full ({}/{} bytes, {} frames), processing now", + logPrefix, + currentPayloadLength, + Segment.MAX_PAYLOAD_LENGTH, + currentFrameCount); + } + + @Override + protected void onSmallFrameAdded( + @NonNull Frame frame, int frameLength, int currentPayloadLength, int currentFrameCount) { + LOG.trace( + "[{}] Added frame {} to current self-contained segment " + + "(bringing it to {}/{} bytes, {} frames)", + logPrefix, + frame.streamId, + currentPayloadLength, + Segment.MAX_PAYLOAD_LENGTH, + currentFrameCount); + } + + @Override + protected void onLastSegmentFlushed(int currentPayloadLength, int currentFrameCount) { + LOG.trace( + "[{}] Flushing last self-contained segment ({}/{} bytes, {} frames)", + logPrefix, + currentPayloadLength, + Segment.MAX_PAYLOAD_LENGTH, + currentFrameCount); + } + + @NotThreadSafe + static class SliceWriteListener implements GenericFutureListener> { + + private final ChannelPromise parentPromise; + private final List slicePromises; + + // All slices are written to the same channel, and the segment is built from the Flusher which + // also runs on the same event loop, so we don't need synchronization. + private int remainingSlices; + + SliceWriteListener(@NonNull ChannelPromise parentPromise, List slicePromises) { + this.parentPromise = parentPromise; + this.slicePromises = slicePromises; + this.remainingSlices = slicePromises.size(); + } + + @Override + public void operationComplete(@NonNull Future future) { + if (!parentPromise.isDone()) { + if (future.isSuccess()) { + remainingSlices -= 1; + if (remainingSlices == 0) { + parentPromise.setSuccess(); + } + } else { + // If any slice fails, we can immediately mark the whole frame as failed: + parentPromise.setFailure(future.cause()); + // Cancel any remaining slice, Netty will not send the bytes. + for (ChannelPromise slicePromise : slicePromises) { + slicePromise.cancel(/*Netty ignores this*/ false); + } + } + } + } + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/BytesToSegmentDecoder.java b/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/BytesToSegmentDecoder.java new file mode 100644 index 00000000000..6a816c202ca --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/BytesToSegmentDecoder.java @@ -0,0 +1,95 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.protocol; + +import com.datastax.oss.driver.api.core.connection.CrcMismatchException; +import com.datastax.oss.protocol.internal.Segment; +import com.datastax.oss.protocol.internal.SegmentCodec; +import edu.umd.cs.findbugs.annotations.NonNull; +import io.netty.buffer.ByteBuf; +import io.netty.channel.ChannelHandlerContext; +import io.netty.handler.codec.LengthFieldBasedFrameDecoder; +import java.nio.ByteOrder; +import net.jcip.annotations.NotThreadSafe; + +/** + * Decodes {@link Segment}s from a stream of bytes. + * + *

          This works like a regular length-field-based decoder, but we override {@link + * #getUnadjustedFrameLength} to handle two peculiarities: the length is encoded on 17 bits, and we + * also want to check the header CRC before we use it. So we parse the whole segment header ahead of + * time, and store it until we're ready to build the segment. + */ +@NotThreadSafe +public class BytesToSegmentDecoder extends LengthFieldBasedFrameDecoder { + + private final SegmentCodec segmentCodec; + private SegmentCodec.Header header; + + public BytesToSegmentDecoder(@NonNull SegmentCodec segmentCodec) { + super( + // max length (Netty wants this to be the overall length including everything): + segmentCodec.headerLength() + + SegmentCodec.CRC24_LENGTH + + Segment.MAX_PAYLOAD_LENGTH + + SegmentCodec.CRC32_LENGTH, + // offset and size of the "length" field: that's the whole header + 0, + segmentCodec.headerLength() + SegmentCodec.CRC24_LENGTH, + // length adjustment: add the trailing CRC to the declared length + SegmentCodec.CRC32_LENGTH, + // bytes to skip: the header (we've already parsed it while reading the length) + segmentCodec.headerLength() + SegmentCodec.CRC24_LENGTH); + this.segmentCodec = segmentCodec; + } + + @Override + protected Object decode(ChannelHandlerContext ctx, ByteBuf in) throws Exception { + try { + ByteBuf payloadAndCrc = (ByteBuf) super.decode(ctx, in); + if (payloadAndCrc == null) { + return null; + } else { + assert header != null; + try { + Segment segment = segmentCodec.decode(header, payloadAndCrc); + header = null; + return segment; + } catch (com.datastax.oss.protocol.internal.CrcMismatchException e) { + throw new CrcMismatchException(e.getMessage()); + } + } + } catch (Exception e) { + // Don't hold on to a stale header if we failed to decode the rest of the segment + header = null; + throw e; + } + } + + @Override + protected long getUnadjustedFrameLength(ByteBuf buffer, int offset, int length, ByteOrder order) { + // The parent class calls this repeatedly for the same "frame" if there weren't enough + // accumulated bytes the first time. Only decode the header the first time: + if (header == null) { + try { + header = segmentCodec.decodeHeader(buffer.slice(offset, length)); + } catch (com.datastax.oss.protocol.internal.CrcMismatchException e) { + throw new CrcMismatchException(e.getMessage()); + } + } + return header.payloadLength; + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/FrameToSegmentEncoder.java b/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/FrameToSegmentEncoder.java new file mode 100644 index 00000000000..1e69247c85d --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/FrameToSegmentEncoder.java @@ -0,0 +1,67 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.protocol; + +import com.datastax.oss.protocol.internal.Frame; +import com.datastax.oss.protocol.internal.FrameCodec; +import com.datastax.oss.protocol.internal.PrimitiveCodec; +import edu.umd.cs.findbugs.annotations.NonNull; +import io.netty.buffer.ByteBuf; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelOutboundHandlerAdapter; +import io.netty.channel.ChannelPromise; +import net.jcip.annotations.NotThreadSafe; + +@NotThreadSafe +public class FrameToSegmentEncoder extends ChannelOutboundHandlerAdapter { + + private final PrimitiveCodec primitiveCodec; + private final FrameCodec frameCodec; + private final String logPrefix; + + private ByteBufSegmentBuilder segmentBuilder; + + public FrameToSegmentEncoder( + @NonNull PrimitiveCodec primitiveCodec, + @NonNull FrameCodec frameCodec, + @NonNull String logPrefix) { + this.primitiveCodec = primitiveCodec; + this.frameCodec = frameCodec; + this.logPrefix = logPrefix; + } + + @Override + public void handlerAdded(@NonNull ChannelHandlerContext ctx) { + segmentBuilder = new ByteBufSegmentBuilder(ctx, primitiveCodec, frameCodec, logPrefix); + } + + @Override + public void write( + @NonNull ChannelHandlerContext ctx, @NonNull Object msg, @NonNull ChannelPromise promise) + throws Exception { + if (msg instanceof Frame) { + segmentBuilder.addFrame(((Frame) msg), promise); + } else { + super.write(ctx, msg, promise); + } + } + + @Override + public void flush(@NonNull ChannelHandlerContext ctx) throws Exception { + segmentBuilder.flush(); + super.flush(ctx); + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/Lz4Compressor.java b/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/Lz4Compressor.java index 3dd47d17320..e3b2ce1a344 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/Lz4Compressor.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/Lz4Compressor.java @@ -17,6 +17,7 @@ import com.datastax.oss.driver.api.core.context.DriverContext; import com.datastax.oss.driver.internal.core.util.DependencyCheck; +import com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting; import io.netty.buffer.ByteBuf; import java.nio.ByteBuffer; import net.jcip.annotations.ThreadSafe; @@ -35,9 +36,14 @@ public class Lz4Compressor extends ByteBufCompressor { private final LZ4FastDecompressor decompressor; public Lz4Compressor(DriverContext context) { + this(context.getSessionName()); + } + + @VisibleForTesting + Lz4Compressor(String sessionName) { if (DependencyCheck.LZ4.isPresent()) { LZ4Factory lz4Factory = LZ4Factory.fastestInstance(); - LOG.info("[{}] Using {}", context.getSessionName(), lz4Factory.toString()); + LOG.info("[{}] Using {}", sessionName, lz4Factory.toString()); this.compressor = lz4Factory.fastCompressor(); this.decompressor = lz4Factory.fastDecompressor(); } else { @@ -54,17 +60,20 @@ public String algorithm() { } @Override - protected ByteBuf compressDirect(ByteBuf input) { + protected ByteBuf compressDirect(ByteBuf input, boolean prependWithUncompressedLength) { int maxCompressedLength = compressor.maxCompressedLength(input.readableBytes()); // If the input is direct we will allocate a direct output buffer as well as this will allow us // to use LZ4Compressor.compress and so eliminate memory copies. - ByteBuf output = input.alloc().directBuffer(4 + maxCompressedLength); + ByteBuf output = + input.alloc().directBuffer((prependWithUncompressedLength ? 4 : 0) + maxCompressedLength); try { ByteBuffer in = inputNioBuffer(input); // Increase reader index. input.readerIndex(input.writerIndex()); - output.writeInt(in.remaining()); + if (prependWithUncompressedLength) { + output.writeInt(in.remaining()); + } ByteBuffer out = outputNioBuffer(output); int written = @@ -81,7 +90,7 @@ protected ByteBuf compressDirect(ByteBuf input) { } @Override - protected ByteBuf compressHeap(ByteBuf input) { + protected ByteBuf compressHeap(ByteBuf input, boolean prependWithUncompressedLength) { int maxCompressedLength = compressor.maxCompressedLength(input.readableBytes()); // Not a direct buffer so use byte arrays... @@ -93,9 +102,12 @@ protected ByteBuf compressHeap(ByteBuf input) { // Allocate a heap buffer from the ByteBufAllocator as we may use a PooledByteBufAllocator and // so can eliminate the overhead of allocate a new byte[]. - ByteBuf output = input.alloc().heapBuffer(4 + maxCompressedLength); + ByteBuf output = + input.alloc().heapBuffer((prependWithUncompressedLength ? 4 : 0) + maxCompressedLength); try { - output.writeInt(len); + if (prependWithUncompressedLength) { + output.writeInt(len); + } // calculate the correct offset. int offset = output.arrayOffset() + output.writerIndex(); byte[] out = output.array(); @@ -112,11 +124,15 @@ protected ByteBuf compressHeap(ByteBuf input) { } @Override - protected ByteBuf decompressDirect(ByteBuf input) { + protected int readUncompressedLength(ByteBuf compressed) { + return compressed.readInt(); + } + + @Override + protected ByteBuf decompressDirect(ByteBuf input, int uncompressedLength) { // If the input is direct we will allocate a direct output buffer as well as this will allow us // to use LZ4Compressor.decompress and so eliminate memory copies. int readable = input.readableBytes(); - int uncompressedLength = input.readInt(); ByteBuffer in = inputNioBuffer(input); // Increase reader index. input.readerIndex(input.writerIndex()); @@ -124,7 +140,7 @@ protected ByteBuf decompressDirect(ByteBuf input) { try { ByteBuffer out = outputNioBuffer(output); int read = decompressor.decompress(in, in.position(), out, out.position(), out.remaining()); - if (read != readable - 4) { + if (read != readable) { throw new IllegalArgumentException("Compressed lengths mismatch"); } @@ -139,11 +155,10 @@ protected ByteBuf decompressDirect(ByteBuf input) { } @Override - protected ByteBuf decompressHeap(ByteBuf input) { + protected ByteBuf decompressHeap(ByteBuf input, int uncompressedLength) { // Not a direct buffer so use byte arrays... byte[] in = input.array(); int len = input.readableBytes(); - int uncompressedLength = input.readInt(); int inOffset = input.arrayOffset() + input.readerIndex(); // Increase reader index. input.readerIndex(input.writerIndex()); @@ -153,9 +168,9 @@ protected ByteBuf decompressHeap(ByteBuf input) { ByteBuf output = input.alloc().heapBuffer(uncompressedLength); try { int offset = output.arrayOffset() + output.writerIndex(); - byte out[] = output.array(); + byte[] out = output.array(); int read = decompressor.decompress(in, inOffset, out, offset, uncompressedLength); - if (read != len - 4) { + if (read != len) { throw new IllegalArgumentException("Compressed lengths mismatch"); } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/SegmentToBytesEncoder.java b/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/SegmentToBytesEncoder.java new file mode 100644 index 00000000000..6a4a35fddbd --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/SegmentToBytesEncoder.java @@ -0,0 +1,45 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.protocol; + +import com.datastax.oss.protocol.internal.Segment; +import com.datastax.oss.protocol.internal.SegmentCodec; +import edu.umd.cs.findbugs.annotations.NonNull; +import io.netty.buffer.ByteBuf; +import io.netty.channel.ChannelHandler; +import io.netty.channel.ChannelHandlerContext; +import io.netty.handler.codec.MessageToMessageEncoder; +import java.util.List; +import net.jcip.annotations.ThreadSafe; + +@ThreadSafe +@ChannelHandler.Sharable +public class SegmentToBytesEncoder extends MessageToMessageEncoder> { + + private final SegmentCodec segmentCodec; + + public SegmentToBytesEncoder(@NonNull SegmentCodec segmentCodec) { + this.segmentCodec = segmentCodec; + } + + @Override + protected void encode( + @NonNull ChannelHandlerContext ctx, + @NonNull Segment segment, + @NonNull List out) { + segmentCodec.encode(segment, out); + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/SegmentToFrameDecoder.java b/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/SegmentToFrameDecoder.java new file mode 100644 index 00000000000..13564e47bff --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/SegmentToFrameDecoder.java @@ -0,0 +1,116 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.protocol; + +import com.datastax.oss.protocol.internal.Frame; +import com.datastax.oss.protocol.internal.FrameCodec; +import com.datastax.oss.protocol.internal.Segment; +import edu.umd.cs.findbugs.annotations.NonNull; +import io.netty.buffer.ByteBuf; +import io.netty.buffer.ByteBufAllocator; +import io.netty.buffer.CompositeByteBuf; +import io.netty.channel.ChannelHandlerContext; +import io.netty.handler.codec.MessageToMessageDecoder; +import java.util.ArrayList; +import java.util.List; +import net.jcip.annotations.NotThreadSafe; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Converts the segments decoded by {@link BytesToSegmentDecoder} into legacy frames understood by + * the rest of the driver. + */ +@NotThreadSafe +public class SegmentToFrameDecoder extends MessageToMessageDecoder> { + + private static final Logger LOG = LoggerFactory.getLogger(SegmentToFrameDecoder.class); + + private static final int UNKNOWN_LENGTH = Integer.MIN_VALUE; + + private final FrameCodec frameCodec; + private final String logPrefix; + + // Accumulated state when we are reading a sequence of slices + private int targetLength = UNKNOWN_LENGTH; + private final List accumulatedSlices = new ArrayList<>(); + private int accumulatedLength; + + public SegmentToFrameDecoder(@NonNull FrameCodec frameCodec, @NonNull String logPrefix) { + this.logPrefix = logPrefix; + this.frameCodec = frameCodec; + } + + @Override + protected void decode( + @NonNull ChannelHandlerContext ctx, + @NonNull Segment segment, + @NonNull List out) { + if (segment.isSelfContained) { + decodeSelfContained(segment, out); + } else { + decodeSlice(segment, ctx.alloc(), out); + } + } + + private void decodeSelfContained(Segment segment, List out) { + ByteBuf payload = segment.payload; + int frameCount = 0; + do { + Frame frame = frameCodec.decode(payload); + LOG.trace( + "[{}] Decoded response frame {} from self-contained segment", logPrefix, frame.streamId); + out.add(frame); + frameCount += 1; + } while (payload.isReadable()); + payload.release(); + LOG.trace("[{}] Done processing self-contained segment ({} frames)", logPrefix, frameCount); + } + + private void decodeSlice(Segment segment, ByteBufAllocator allocator, List out) { + assert targetLength != UNKNOWN_LENGTH ^ (accumulatedSlices.isEmpty() && accumulatedLength == 0); + ByteBuf slice = segment.payload; + if (targetLength == UNKNOWN_LENGTH) { + // First slice, read ahead to find the target length + targetLength = FrameCodec.V3_ENCODED_HEADER_SIZE + frameCodec.decodeBodySize(slice); + } + accumulatedSlices.add(slice); + accumulatedLength += slice.readableBytes(); + LOG.trace( + "[{}] Decoded slice {}, {}/{} bytes", + logPrefix, + accumulatedSlices.size(), + accumulatedLength, + targetLength); + assert accumulatedLength <= targetLength; + if (accumulatedLength == targetLength) { + // We've received enough data to reassemble the whole message + CompositeByteBuf encodedFrame = allocator.compositeBuffer(accumulatedSlices.size()); + encodedFrame.addComponents(true, accumulatedSlices); + Frame frame = frameCodec.decode(encodedFrame); + LOG.trace( + "[{}] Decoded response frame {} from {} slices", + logPrefix, + frame.streamId, + accumulatedSlices.size()); + out.add(frame); + // Reset our state + targetLength = UNKNOWN_LENGTH; + accumulatedSlices.clear(); + accumulatedLength = 0; + } + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/SnappyCompressor.java b/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/SnappyCompressor.java index 9461a1a0a41..fbfd3eff9b2 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/SnappyCompressor.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/SnappyCompressor.java @@ -23,6 +23,13 @@ import net.jcip.annotations.ThreadSafe; import org.xerial.snappy.Snappy; +/** + * @implNote The Snappy protocol already encodes the uncompressed length in the compressed payload, + * so {@link #compress(ByteBuf)} and {@link #compressWithoutLength(ByteBuf)} produce the same + * output for this compressor. The corresponding parameters {@code + * prependWithUncompressedLength} and {@code uncompressedLength} are ignored by their respective + * methods. + */ @ThreadSafe public class SnappyCompressor extends ByteBufCompressor { @@ -41,7 +48,8 @@ public String algorithm() { } @Override - protected ByteBuf compressDirect(ByteBuf input) { + protected ByteBuf compressDirect( + ByteBuf input, /*ignored*/ boolean prependWithUncompressedLength) { int maxCompressedLength = Snappy.maxCompressedLength(input.readableBytes()); // If the input is direct we will allocate a direct output buffer as well as this will allow us // to use Snappy.compress(ByteBuffer, ByteBuffer) and so eliminate memory copies. @@ -64,7 +72,7 @@ protected ByteBuf compressDirect(ByteBuf input) { } @Override - protected ByteBuf compressHeap(ByteBuf input) { + protected ByteBuf compressHeap(ByteBuf input, /*ignored*/ boolean prependWithUncompressedLength) { int maxCompressedLength = Snappy.maxCompressedLength(input.readableBytes()); int inOffset = input.arrayOffset() + input.readerIndex(); byte[] in = input.array(); @@ -92,7 +100,15 @@ protected ByteBuf compressHeap(ByteBuf input) { } @Override - protected ByteBuf decompressDirect(ByteBuf input) { + protected int readUncompressedLength(ByteBuf compressed) { + // Since compress methods don't actually prepend with a length, we have nothing to read here. + // Return a bogus length (it will be ignored by the decompress methods, so the actual value + // doesn't matter). + return -1; + } + + @Override + protected ByteBuf decompressDirect(ByteBuf input, /*ignored*/ int uncompressedLength) { ByteBuffer in = inputNioBuffer(input); // Increase reader index. input.readerIndex(input.writerIndex()); @@ -122,7 +138,7 @@ protected ByteBuf decompressDirect(ByteBuf input) { } @Override - protected ByteBuf decompressHeap(ByteBuf input) throws RuntimeException { + protected ByteBuf decompressHeap(ByteBuf input, /*ignored*/ int uncompressedLength) { // Not a direct buffer so use byte arrays... int inOffset = input.arrayOffset() + input.readerIndex(); byte[] in = input.array(); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/util/Sizes.java b/core/src/main/java/com/datastax/oss/driver/internal/core/util/Sizes.java index fc0ebefa219..6ab1cbe10ac 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/util/Sizes.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/util/Sizes.java @@ -44,7 +44,7 @@ public static int minimumRequestSize(Request request) { // Frame header has a fixed size of 9 for protocol version >= V3, which includes Frame flags // size - int size = FrameCodec.headerEncodedSize(); + int size = FrameCodec.V3_ENCODED_HEADER_SIZE; if (!request.getCustomPayload().isEmpty()) { // Custom payload is not supported in v3, but assume user won't have a custom payload set if diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/protocol/BytesToSegmentDecoderTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/protocol/BytesToSegmentDecoderTest.java new file mode 100644 index 00000000000..f4376a3f2ee --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/protocol/BytesToSegmentDecoderTest.java @@ -0,0 +1,149 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.protocol; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.fail; + +import com.datastax.oss.driver.api.core.connection.CrcMismatchException; +import com.datastax.oss.protocol.internal.Compressor; +import com.datastax.oss.protocol.internal.Segment; +import com.datastax.oss.protocol.internal.SegmentCodec; +import com.google.common.base.Strings; +import io.netty.buffer.ByteBuf; +import io.netty.buffer.ByteBufUtil; +import io.netty.buffer.Unpooled; +import io.netty.buffer.UnpooledByteBufAllocator; +import io.netty.channel.embedded.EmbeddedChannel; +import io.netty.handler.codec.DecoderException; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.junit.MockitoJUnitRunner; + +@RunWith(MockitoJUnitRunner.class) +public class BytesToSegmentDecoderTest { + + // Hard-coded test data, the values were generated with our encoding methods. + // We're not really testing the decoding itself here, only that our subclass calls the + // LengthFieldBasedFrameDecoder parent constructor with the right parameters. + private static final ByteBuf REGULAR_HEADER = byteBuf("04000201f9f2"); + private static final ByteBuf REGULAR_PAYLOAD = byteBuf("00000001"); + private static final ByteBuf REGULAR_TRAILER = byteBuf("1fd6022d"); + private static final ByteBuf REGULAR_WRONG_HEADER = byteBuf("04000202f9f2"); + private static final ByteBuf REGULAR_WRONG_TRAILER = byteBuf("1fd6022e"); + + private static final ByteBuf MAX_HEADER = byteBuf("ffff03254047"); + private static final ByteBuf MAX_PAYLOAD = + byteBuf(Strings.repeat("01", Segment.MAX_PAYLOAD_LENGTH)); + private static final ByteBuf MAX_TRAILER = byteBuf("a05c2f13"); + + private static final ByteBuf LZ4_HEADER = byteBuf("120020000491c94f"); + private static final ByteBuf LZ4_PAYLOAD_UNCOMPRESSED = + byteBuf("00000001000000010000000100000001"); + private static final ByteBuf LZ4_PAYLOAD_COMPRESSED = + byteBuf("f00100000001000000010000000100000001"); + private static final ByteBuf LZ4_TRAILER = byteBuf("2bd67f90"); + + private static final Compressor LZ4_COMPRESSOR = new Lz4Compressor("test"); + + private EmbeddedChannel channel; + + @Before + public void setup() { + channel = new EmbeddedChannel(); + } + + @Test + public void should_decode_regular_segment() { + channel.pipeline().addLast(newDecoder(Compressor.none())); + channel.writeInbound(Unpooled.wrappedBuffer(REGULAR_HEADER, REGULAR_PAYLOAD, REGULAR_TRAILER)); + Segment segment = channel.readInbound(); + assertThat(segment.isSelfContained).isTrue(); + assertThat(segment.payload).isEqualTo(REGULAR_PAYLOAD); + } + + @Test + public void should_decode_max_length_segment() { + channel.pipeline().addLast(newDecoder(Compressor.none())); + channel.writeInbound(Unpooled.wrappedBuffer(MAX_HEADER, MAX_PAYLOAD, MAX_TRAILER)); + Segment segment = channel.readInbound(); + assertThat(segment.isSelfContained).isTrue(); + assertThat(segment.payload).isEqualTo(MAX_PAYLOAD); + } + + @Test + public void should_decode_segment_from_multiple_incoming_chunks() { + channel.pipeline().addLast(newDecoder(Compressor.none())); + // Send the header in two slices, to cover the case where the length can't be read the first + // time: + ByteBuf headerStart = REGULAR_HEADER.slice(0, 3); + ByteBuf headerEnd = REGULAR_HEADER.slice(3, 3); + channel.writeInbound(headerStart); + channel.writeInbound(headerEnd); + channel.writeInbound(REGULAR_PAYLOAD.duplicate()); + channel.writeInbound(REGULAR_TRAILER.duplicate()); + Segment segment = channel.readInbound(); + assertThat(segment.isSelfContained).isTrue(); + assertThat(segment.payload).isEqualTo(REGULAR_PAYLOAD); + } + + @Test + public void should_decode_compressed_segment() { + channel.pipeline().addLast(newDecoder(LZ4_COMPRESSOR)); + // We need a contiguous buffer for this one, because of how our decompressor operates + ByteBuf buffer = Unpooled.wrappedBuffer(LZ4_HEADER, LZ4_PAYLOAD_COMPRESSED, LZ4_TRAILER).copy(); + channel.writeInbound(buffer); + Segment segment = channel.readInbound(); + assertThat(segment.isSelfContained).isTrue(); + assertThat(segment.payload).isEqualTo(LZ4_PAYLOAD_UNCOMPRESSED); + } + + @Test + public void should_surface_header_crc_mismatch() { + try { + channel.pipeline().addLast(newDecoder(Compressor.none())); + channel.writeInbound( + Unpooled.wrappedBuffer(REGULAR_WRONG_HEADER, REGULAR_PAYLOAD, REGULAR_TRAILER)); + fail("Expected a " + DecoderException.class.getSimpleName()); + } catch (DecoderException exception) { + assertThat(exception).hasCauseInstanceOf(CrcMismatchException.class); + } + } + + @Test + public void should_surface_trailer_crc_mismatch() { + try { + channel.pipeline().addLast(newDecoder(Compressor.none())); + channel.writeInbound( + Unpooled.wrappedBuffer(REGULAR_HEADER, REGULAR_PAYLOAD, REGULAR_WRONG_TRAILER)); + fail("Expected a " + DecoderException.class.getSimpleName()); + } catch (DecoderException exception) { + assertThat(exception).hasCauseInstanceOf(CrcMismatchException.class); + } + } + + private BytesToSegmentDecoder newDecoder(Compressor compressor) { + return new BytesToSegmentDecoder( + new SegmentCodec<>( + new ByteBufPrimitiveCodec(UnpooledByteBufAllocator.DEFAULT), compressor)); + } + + private static ByteBuf byteBuf(String hex) { + return Unpooled.unreleasableBuffer( + Unpooled.wrappedBuffer(ByteBufUtil.decodeHexDump(hex)).asReadOnly()); + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/protocol/SegmentToFrameDecoderTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/protocol/SegmentToFrameDecoderTest.java new file mode 100644 index 00000000000..2bb93f0901b --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/protocol/SegmentToFrameDecoderTest.java @@ -0,0 +1,95 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.protocol; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.shaded.guava.common.base.Strings; +import com.datastax.oss.protocol.internal.Compressor; +import com.datastax.oss.protocol.internal.Frame; +import com.datastax.oss.protocol.internal.FrameCodec; +import com.datastax.oss.protocol.internal.Message; +import com.datastax.oss.protocol.internal.ProtocolConstants; +import com.datastax.oss.protocol.internal.ProtocolV5ClientCodecs; +import com.datastax.oss.protocol.internal.ProtocolV5ServerCodecs; +import com.datastax.oss.protocol.internal.Segment; +import com.datastax.oss.protocol.internal.request.AuthResponse; +import com.datastax.oss.protocol.internal.response.result.Void; +import com.datastax.oss.protocol.internal.util.Bytes; +import io.netty.buffer.ByteBuf; +import io.netty.buffer.UnpooledByteBufAllocator; +import io.netty.channel.embedded.EmbeddedChannel; +import java.util.Collections; +import org.junit.Before; +import org.junit.Test; + +public class SegmentToFrameDecoderTest { + + private static final FrameCodec FRAME_CODEC = + new FrameCodec<>( + new ByteBufPrimitiveCodec(UnpooledByteBufAllocator.DEFAULT), + Compressor.none(), + new ProtocolV5ClientCodecs(), + new ProtocolV5ServerCodecs()); + + private EmbeddedChannel channel; + + @Before + public void setup() { + channel = new EmbeddedChannel(); + channel.pipeline().addLast(new SegmentToFrameDecoder(FRAME_CODEC, "test")); + } + + @Test + public void should_decode_self_contained() { + ByteBuf payload = UnpooledByteBufAllocator.DEFAULT.buffer(); + payload.writeBytes(encodeFrame(Void.INSTANCE)); + payload.writeBytes(encodeFrame(new AuthResponse(Bytes.fromHexString("0xabcdef")))); + + channel.writeInbound(new Segment<>(payload, true)); + + Frame frame1 = channel.readInbound(); + assertThat(frame1.message).isInstanceOf(Void.class); + Frame frame2 = channel.readInbound(); + assertThat(frame2.message).isInstanceOf(AuthResponse.class); + } + + @Test + public void should_decode_sequence_of_slices() { + ByteBuf encodedFrame = + encodeFrame(new AuthResponse(Bytes.fromHexString("0x" + Strings.repeat("aa", 1011)))); + int sliceLength = 100; + do { + ByteBuf payload = encodedFrame.readSlice(Math.min(sliceLength, encodedFrame.readableBytes())); + channel.writeInbound(new Segment<>(payload, false)); + } while (encodedFrame.isReadable()); + + Frame frame = channel.readInbound(); + assertThat(frame.message).isInstanceOf(AuthResponse.class); + } + + private static ByteBuf encodeFrame(Message message) { + Frame frame = + Frame.forResponse( + ProtocolConstants.Version.V5, + 1, + null, + Collections.emptyMap(), + Collections.emptyList(), + message); + return FRAME_CODEC.encode(frame); + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/protocol/SliceWriteListenerTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/protocol/SliceWriteListenerTest.java new file mode 100644 index 00000000000..6e2d0655195 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/protocol/SliceWriteListenerTest.java @@ -0,0 +1,73 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.protocol; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; +import io.netty.channel.ChannelPromise; +import io.netty.channel.embedded.EmbeddedChannel; +import org.junit.Before; +import org.junit.Test; + +public class SliceWriteListenerTest { + + private final EmbeddedChannel channel = new EmbeddedChannel(); + + private ChannelPromise framePromise, slicePromise1, slicePromise2, slicePromise3; + + @Before + public void setup() { + framePromise = channel.newPromise(); + slicePromise1 = channel.newPromise(); + slicePromise2 = channel.newPromise(); + slicePromise3 = channel.newPromise(); + + ByteBufSegmentBuilder.SliceWriteListener listener = + new ByteBufSegmentBuilder.SliceWriteListener( + framePromise, ImmutableList.of(slicePromise1, slicePromise2, slicePromise3)); + slicePromise1.addListener(listener); + slicePromise2.addListener(listener); + slicePromise3.addListener(listener); + + assertThat(framePromise.isDone()).isFalse(); + } + + @Test + public void should_succeed_frame_if_all_slices_succeed() { + slicePromise1.setSuccess(); + assertThat(framePromise.isDone()).isFalse(); + slicePromise2.setSuccess(); + assertThat(framePromise.isDone()).isFalse(); + slicePromise3.setSuccess(); + + assertThat(framePromise.isSuccess()).isTrue(); + } + + @Test + public void should_fail_frame_and_cancel_remaining_slices_if_one_slice_fails() { + slicePromise1.setSuccess(); + assertThat(framePromise.isDone()).isFalse(); + Exception failure = new Exception("test"); + slicePromise2.setFailure(failure); + + assertThat(framePromise.isDone()).isTrue(); + assertThat(framePromise.isSuccess()).isFalse(); + assertThat(framePromise.cause()).isEqualTo(failure); + + assertThat(slicePromise3.isCancelled()).isTrue(); + } +} From 2fe22ea6401ee5fa7ed7c5b11d5ce4a6cc4a2808 Mon Sep 17 00:00:00 2001 From: Erik Merkle Date: Thu, 9 Jul 2020 14:08:48 -0500 Subject: [PATCH 543/979] JAVA-2808: Provide metrics bindings for Micrometer and MicroProfile --- bom/pom.xml | 10 + changelog/README.md | 1 + .../metrics/DropwizardMetricsFactory.java | 39 +--- .../internal/core/metrics/MetricPaths.java | 66 ++++++ integration-tests/pom.xml | 15 ++ .../common/AbstractMetricsTestBase.java | 87 ++++++++ .../micrometer/MicrometerMetricsIT.java | 193 ++++++++++++++++ .../microprofile/MicroProfileMetricsIT.java | 211 ++++++++++++++++++ manual/developer/common/.nav | 3 +- manual/developer/common/README.md | 17 +- manual/developer/common/metrics/README.md | 117 ++++++++++ metrics/micrometer/pom.xml | 100 +++++++++ .../micrometer/MicrometerDriverContext.java | 45 ++++ .../micrometer/MicrometerMetricUpdater.java | 85 +++++++ .../micrometer/MicrometerMetricsFactory.java | 134 +++++++++++ .../MicrometerNodeMetricUpdater.java | 139 ++++++++++++ .../MicrometerSessionMetricUpdater.java | 144 ++++++++++++ .../MicrometerMetricsFactoryTest.java | 91 ++++++++ metrics/microprofile/pom.xml | 100 +++++++++ .../MicroProfileDriverContext.java | 47 ++++ .../MicroProfileMetricUpdater.java | 86 +++++++ .../MicroProfileMetricsFactory.java | 133 +++++++++++ .../MicroProfileNodeMetricUpdater.java | 140 ++++++++++++ .../MicroProfileSessionMetricUpdater.java | 136 +++++++++++ .../MicroProfileMetricsFactoryTest.java | 91 ++++++++ pom.xml | 17 ++ 26 files changed, 2201 insertions(+), 46 deletions(-) create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/metrics/MetricPaths.java create mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/metrics/common/AbstractMetricsTestBase.java create mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/metrics/micrometer/MicrometerMetricsIT.java create mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/metrics/microprofile/MicroProfileMetricsIT.java create mode 100644 manual/developer/common/metrics/README.md create mode 100644 metrics/micrometer/pom.xml create mode 100644 metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerDriverContext.java create mode 100644 metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerMetricUpdater.java create mode 100644 metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerMetricsFactory.java create mode 100644 metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerNodeMetricUpdater.java create mode 100644 metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerSessionMetricUpdater.java create mode 100644 metrics/micrometer/src/test/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerMetricsFactoryTest.java create mode 100644 metrics/microprofile/pom.xml create mode 100644 metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileDriverContext.java create mode 100644 metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileMetricUpdater.java create mode 100644 metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileMetricsFactory.java create mode 100644 metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileNodeMetricUpdater.java create mode 100644 metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileSessionMetricUpdater.java create mode 100644 metrics/microprofile/src/test/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileMetricsFactoryTest.java diff --git a/bom/pom.xml b/bom/pom.xml index 0f9ed52b414..52200847707 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -58,6 +58,16 @@ java-driver-test-infra 4.8.0-SNAPSHOT + + com.datastax.oss + java-driver-metrics-micrometer + 4.8.0-SNAPSHOT + + + com.datastax.oss + java-driver-metrics-microprofile + 4.8.0-SNAPSHOT + com.datastax.oss native-protocol diff --git a/changelog/README.md b/changelog/README.md index 9309f45cfdf..272f62aa4d6 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.8.0 (in progress) +- [new feature] JAVA-2808: Provide metrics bindings for Micrometer and MicroProfile - [new feature] JAVA-2773: Support new protocol v5 message format - [improvement] JAVA-2841: Raise timeouts during connection initialization - [bug] JAVA-2331: Unregister old metrics when a node gets removed or changes RPC address diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricsFactory.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricsFactory.java index e859f62d9b5..6598596b423 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricsFactory.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricsFactory.java @@ -16,13 +16,9 @@ package com.datastax.oss.driver.internal.core.metrics; import com.codahale.metrics.MetricRegistry; -import com.datastax.dse.driver.api.core.metrics.DseNodeMetric; -import com.datastax.dse.driver.api.core.metrics.DseSessionMetric; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.metadata.Node; -import com.datastax.oss.driver.api.core.metrics.DefaultNodeMetric; -import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; import com.datastax.oss.driver.api.core.metrics.Metrics; import com.datastax.oss.driver.api.core.metrics.NodeMetric; import com.datastax.oss.driver.api.core.metrics.SessionMetric; @@ -34,8 +30,6 @@ import com.datastax.oss.driver.shaded.guava.common.cache.RemovalNotification; import edu.umd.cs.findbugs.annotations.Nullable; import java.time.Duration; -import java.util.Collections; -import java.util.HashSet; import java.util.List; import java.util.Optional; import java.util.Set; @@ -49,7 +43,6 @@ public class DropwizardMetricsFactory implements MetricsFactory { private static final Logger LOG = LoggerFactory.getLogger(DropwizardMetricsFactory.class); static final Duration LOWEST_ACCEPTABLE_EXPIRE_AFTER = Duration.ofMinutes(5); - private final String logPrefix; private final InternalDriverContext context; private final Set enabledNodeMetrics; private final MetricRegistry registry; @@ -58,14 +51,12 @@ public class DropwizardMetricsFactory implements MetricsFactory { private final Cache metricsCache; public DropwizardMetricsFactory(InternalDriverContext context, Ticker ticker) { - this.logPrefix = context.getSessionName(); this.context = context; - + String logPrefix = context.getSessionName(); DriverExecutionProfile config = context.getConfig().getDefaultProfile(); Set enabledSessionMetrics = parseSessionMetricPaths(config.getStringList(DefaultDriverOption.METRICS_SESSION_ENABLED)); Duration evictionTime = getAndValidateEvictionTime(config, logPrefix); - this.enabledNodeMetrics = parseNodeMetricPaths(config.getStringList(DefaultDriverOption.METRICS_NODE_ENABLED)); @@ -138,34 +129,10 @@ public NodeMetricUpdater newNodeUpdater(Node node) { } protected Set parseSessionMetricPaths(List paths) { - Set result = new HashSet<>(); - for (String path : paths) { - try { - result.add(DefaultSessionMetric.fromPath(path)); - } catch (IllegalArgumentException e) { - try { - result.add(DseSessionMetric.fromPath(path)); - } catch (IllegalArgumentException e1) { - LOG.warn("[{}] Unknown session metric {}, skipping", logPrefix, path); - } - } - } - return Collections.unmodifiableSet(result); + return MetricPaths.parseSessionMetricPaths(paths, context.getSessionName()); } protected Set parseNodeMetricPaths(List paths) { - Set result = new HashSet<>(); - for (String path : paths) { - try { - result.add(DefaultNodeMetric.fromPath(path)); - } catch (IllegalArgumentException e) { - try { - result.add(DseNodeMetric.fromPath(path)); - } catch (IllegalArgumentException e1) { - LOG.warn("[{}] Unknown node metric {}, skipping", logPrefix, path); - } - } - } - return Collections.unmodifiableSet(result); + return MetricPaths.parseNodeMetricPaths(paths, context.getSessionName()); } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/MetricPaths.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/MetricPaths.java new file mode 100644 index 00000000000..b95edc74f73 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/MetricPaths.java @@ -0,0 +1,66 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.metrics; + +import com.datastax.dse.driver.api.core.metrics.DseNodeMetric; +import com.datastax.dse.driver.api.core.metrics.DseSessionMetric; +import com.datastax.oss.driver.api.core.metrics.DefaultNodeMetric; +import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; +import com.datastax.oss.driver.api.core.metrics.NodeMetric; +import com.datastax.oss.driver.api.core.metrics.SessionMetric; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class MetricPaths { + + private static final Logger LOG = LoggerFactory.getLogger(DropwizardMetricsFactory.class); + + public static Set parseSessionMetricPaths(List paths, String logPrefix) { + Set result = new HashSet<>(); + for (String path : paths) { + try { + result.add(DefaultSessionMetric.fromPath(path)); + } catch (IllegalArgumentException e) { + try { + result.add(DseSessionMetric.fromPath(path)); + } catch (IllegalArgumentException e1) { + LOG.warn("[{}] Unknown session metric {}, skipping", logPrefix, path); + } + } + } + return Collections.unmodifiableSet(result); + } + + public static Set parseNodeMetricPaths(List paths, String logPrefix) { + Set result = new HashSet<>(); + for (String path : paths) { + try { + result.add(DefaultNodeMetric.fromPath(path)); + } catch (IllegalArgumentException e) { + try { + result.add(DseNodeMetric.fromPath(path)); + } catch (IllegalArgumentException e1) { + LOG.warn("[{}] Unknown node metric {}, skipping", logPrefix, path); + } + } + } + return Collections.unmodifiableSet(result); + } +} diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index ad223906201..3bc2ab59dee 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -71,6 +71,16 @@ test-jar test + + com.datastax.oss + java-driver-metrics-micrometer + test + + + com.datastax.oss + java-driver-metrics-microprofile + test + com.github.spotbugs spotbugs-annotations @@ -171,6 +181,11 @@ commons-exec test + + io.smallrye + smallrye-metrics + test + diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/metrics/common/AbstractMetricsTestBase.java b/integration-tests/src/test/java/com/datastax/oss/driver/metrics/common/AbstractMetricsTestBase.java new file mode 100644 index 00000000000..c17ff8ad80a --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/oss/driver/metrics/common/AbstractMetricsTestBase.java @@ -0,0 +1,87 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.metrics.common; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverConfigLoader; +import com.datastax.oss.driver.api.core.metrics.DefaultNodeMetric; +import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; +import com.datastax.oss.driver.api.core.metrics.NodeMetric; +import com.datastax.oss.driver.api.core.metrics.SessionMetric; +import com.datastax.oss.driver.api.core.session.SessionBuilder; +import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.session.SessionUtils; +import java.util.Collection; +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import org.junit.ClassRule; +import org.junit.Test; + +public abstract class AbstractMetricsTestBase { + + @ClassRule public static final CcmRule CCM_RULE = CcmRule.getInstance(); + + private static final List ENABLED_SESSION_METRICS = + Stream.of(DefaultSessionMetric.values()) + .map(DefaultSessionMetric::getPath) + .collect(Collectors.toList()); + private static final List ENABLED_NODE_METRICS = + Stream.of(DefaultNodeMetric.values()) + .map(DefaultNodeMetric::getPath) + .collect(Collectors.toList()); + + protected abstract SessionBuilder getSessionBuilder(); + + protected abstract void assertMetrics(CqlSession session); + + protected abstract Collection getRegistryMetrics(); + + @Test + public void should_expose_metrics() { + DriverConfigLoader loader = + SessionUtils.configLoaderBuilder() + .withStringList(DefaultDriverOption.METRICS_SESSION_ENABLED, ENABLED_SESSION_METRICS) + .withStringList(DefaultDriverOption.METRICS_NODE_ENABLED, ENABLED_NODE_METRICS) + .build(); + SessionBuilder builder = + getSessionBuilder().addContactEndPoints(CCM_RULE.getContactPoints()); + try (CqlSession session = (CqlSession) builder.withConfigLoader(loader).build()) { + for (int i = 0; i < 10; i++) { + session.execute("SELECT release_version FROM system.local"); + } + + // Should have 10 requests. Assert all applicable metrics. + assertMetricsSize(getRegistryMetrics()); + assertMetrics(session); + } + } + + protected String buildSessionMetricPattern(SessionMetric metric, CqlSession s) { + return s.getContext().getSessionName() + "\\." + metric.getPath(); + } + + protected String buildNodeMetricPattern(NodeMetric metric, CqlSession s) { + return s.getContext().getSessionName() + "\\.nodes\\.\\S*\\." + metric.getPath(); + } + + private void assertMetricsSize(Collection metrics) { + assertThat(metrics).hasSize(ENABLED_SESSION_METRICS.size() + ENABLED_NODE_METRICS.size()); + } +} diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/metrics/micrometer/MicrometerMetricsIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/metrics/micrometer/MicrometerMetricsIT.java new file mode 100644 index 00000000000..a1e63d4b1f2 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/oss/driver/metrics/micrometer/MicrometerMetricsIT.java @@ -0,0 +1,193 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.metrics.micrometer; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.awaitility.Awaitility.await; + +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.config.DriverConfigLoader; +import com.datastax.oss.driver.api.core.context.DriverContext; +import com.datastax.oss.driver.api.core.metrics.DefaultNodeMetric; +import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; +import com.datastax.oss.driver.api.core.session.ProgrammaticArguments; +import com.datastax.oss.driver.api.core.session.SessionBuilder; +import com.datastax.oss.driver.categories.ParallelizableTests; +import com.datastax.oss.driver.internal.metrics.micrometer.MicrometerDriverContext; +import com.datastax.oss.driver.metrics.common.AbstractMetricsTestBase; +import edu.umd.cs.findbugs.annotations.NonNull; +import io.micrometer.core.instrument.Counter; +import io.micrometer.core.instrument.Gauge; +import io.micrometer.core.instrument.Meter; +import io.micrometer.core.instrument.MeterRegistry; +import io.micrometer.core.instrument.Timer; +import io.micrometer.core.instrument.simple.SimpleMeterRegistry; +import java.util.Collection; +import java.util.concurrent.TimeUnit; +import java.util.function.Function; +import java.util.regex.Pattern; +import org.assertj.core.api.Condition; +import org.junit.experimental.categories.Category; + +@Category(ParallelizableTests.class) +public class MicrometerMetricsIT extends AbstractMetricsTestBase { + + private static final MeterRegistry METER_REGISTRY = new SimpleMeterRegistry(); + + @Override + protected void assertMetrics(CqlSession session) { + await() + .pollInterval(500, TimeUnit.MILLISECONDS) + .atMost(5, TimeUnit.SECONDS) + .untilAsserted( + () -> + assertThat(METER_REGISTRY.getMeters()) + .haveExactly( + 1, + buildTimerCondition( + "CQL_REQUESTS should be a SESSION Timer with count 10", + buildSessionMetricPattern(DefaultSessionMetric.CQL_REQUESTS, session), + a -> a == 10)) + .haveExactly( + 1, + buildTimerCondition( + "CQL_MESSAGESS should be a NODE Timer with count 10", + buildNodeMetricPattern(DefaultNodeMetric.CQL_MESSAGES, session), + a -> a == 10)) + .haveExactly( + 1, + buildGaugeCondition( + "CONNECTED_NODES should be a SESSION Gauge with count 1", + buildSessionMetricPattern( + DefaultSessionMetric.CONNECTED_NODES, session), + a -> a == 1)) + .haveExactly( + 1, + buildCounterCondition( + "RETRIES should be a NODE Counter with count 0", + buildNodeMetricPattern(DefaultNodeMetric.RETRIES, session), + a -> a == 0)) + .haveExactly( + 1, + buildCounterCondition( + "BYTES_SENT should be a SESSION Counter with count > 0", + buildSessionMetricPattern(DefaultSessionMetric.BYTES_SENT, session), + a -> a > 0)) + .haveExactly( + 1, + buildCounterCondition( + "BYTES_SENT should be a SESSION Counter with count > 0", + buildNodeMetricPattern(DefaultNodeMetric.BYTES_SENT, session), + a -> a > 0)) + .haveExactly( + 1, + buildCounterCondition( + "BYTES_RECEIVED should be a SESSION Counter with count > 0", + buildSessionMetricPattern(DefaultSessionMetric.BYTES_RECEIVED, session), + a -> a > 0)) + .haveExactly( + 1, + buildGaugeCondition( + "AVAILABLE_STREAMS should be a NODE Gauge with count 1024", + buildNodeMetricPattern(DefaultNodeMetric.AVAILABLE_STREAMS, session), + a -> a == 1024)) + .haveExactly( + 1, + buildCounterCondition( + "BYTES_RECEIVED should be a NODE Counter with count > 0", + buildNodeMetricPattern(DefaultNodeMetric.BYTES_RECEIVED, session), + a -> a > 0))); + } + + @Override + protected SessionBuilder getSessionBuilder() { + return new MicrometerSessionBuilder(METER_REGISTRY); + } + + @Override + protected Collection getRegistryMetrics() { + return METER_REGISTRY.getMeters(); + } + + private Condition buildTimerCondition( + String description, String metricPattern, Function verifyFunction) { + return new Condition(description) { + @Override + public boolean matches(Meter obj) { + if (!(obj instanceof Timer)) { + return false; + } + Timer timer = (Timer) obj; + return Pattern.matches(metricPattern, timer.getId().getName()) + && verifyFunction.apply(timer.count()); + } + }; + } + + private Condition buildCounterCondition( + String description, String metricPattern, Function verifyFunction) { + return new Condition(description) { + @Override + public boolean matches(Meter obj) { + if (!(obj instanceof Counter)) { + return false; + } + Counter counter = (Counter) obj; + return Pattern.matches(metricPattern, counter.getId().getName()) + && verifyFunction.apply(counter.count()); + } + }; + } + + private Condition buildGaugeCondition( + String description, String metricPattern, Function verifyFunction) { + return new Condition(description) { + @Override + public boolean matches(Meter obj) { + if (!(obj instanceof Gauge)) { + return false; + } + Gauge gauge = (Gauge) obj; + return Pattern.matches(metricPattern, gauge.getId().getName()) + && verifyFunction.apply(gauge.value()); + } + }; + } + + private static class MicrometerSessionBuilder + extends SessionBuilder { + + private final MeterRegistry registry; + + MicrometerSessionBuilder(@NonNull MeterRegistry registry) { + this.registry = registry; + } + + @Override + @NonNull + protected CqlSession wrap(@NonNull CqlSession defaultSession) { + return defaultSession; + } + + @Override + @NonNull + protected DriverContext buildContext( + @NonNull DriverConfigLoader configLoader, + @NonNull ProgrammaticArguments programmaticArguments) { + return new MicrometerDriverContext(configLoader, programmaticArguments, registry); + } + } +} diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/metrics/microprofile/MicroProfileMetricsIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/metrics/microprofile/MicroProfileMetricsIT.java new file mode 100644 index 00000000000..97dd5d2ba47 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/oss/driver/metrics/microprofile/MicroProfileMetricsIT.java @@ -0,0 +1,211 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.metrics.microprofile; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.awaitility.Awaitility.await; + +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.config.DriverConfigLoader; +import com.datastax.oss.driver.api.core.context.DriverContext; +import com.datastax.oss.driver.api.core.metrics.DefaultNodeMetric; +import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; +import com.datastax.oss.driver.api.core.session.ProgrammaticArguments; +import com.datastax.oss.driver.api.core.session.SessionBuilder; +import com.datastax.oss.driver.categories.ParallelizableTests; +import com.datastax.oss.driver.internal.metrics.microprofile.MicroProfileDriverContext; +import com.datastax.oss.driver.metrics.common.AbstractMetricsTestBase; +import edu.umd.cs.findbugs.annotations.NonNull; +import io.smallrye.metrics.MetricsRegistryImpl; +import java.util.Collection; +import java.util.Map.Entry; +import java.util.concurrent.TimeUnit; +import java.util.function.Function; +import java.util.regex.Pattern; +import org.assertj.core.api.Condition; +import org.eclipse.microprofile.metrics.Counter; +import org.eclipse.microprofile.metrics.Gauge; +import org.eclipse.microprofile.metrics.Meter; +import org.eclipse.microprofile.metrics.Metric; +import org.eclipse.microprofile.metrics.MetricID; +import org.eclipse.microprofile.metrics.MetricRegistry; +import org.eclipse.microprofile.metrics.Timer; +import org.junit.experimental.categories.Category; + +@Category(ParallelizableTests.class) +public class MicroProfileMetricsIT extends AbstractMetricsTestBase { + + private static final MetricRegistry METRIC_REGISTRY = new MetricsRegistryImpl(); + + @Override + protected void assertMetrics(CqlSession session) { + await() + .pollInterval(500, TimeUnit.MILLISECONDS) + .atMost(5, TimeUnit.SECONDS) + .untilAsserted( + () -> + assertThat(METRIC_REGISTRY.getMetrics()) + .hasEntrySatisfying( + buildTimerCondition( + "CQL_REQUESTS should be a SESSION Timer with count 10", + buildSessionMetricPattern(DefaultSessionMetric.CQL_REQUESTS, session), + a -> a == 10)) + .hasEntrySatisfying( + buildGaugeCondition( + "CONNECTED_NODES should be a SESSION Gauge with count 1", + buildSessionMetricPattern( + DefaultSessionMetric.CONNECTED_NODES, session), + a -> a == 1)) + .hasEntrySatisfying( + buildMeterCondition( + "BYTES_SENT should be a SESSION Meter with count > 0", + buildSessionMetricPattern(DefaultSessionMetric.BYTES_SENT, session), + a -> a > 0)) + .hasEntrySatisfying( + buildMeterCondition( + "BYTES_SENT should be a SESSION Meter with count > 0", + buildNodeMetricPattern(DefaultNodeMetric.BYTES_SENT, session), + a -> a > 0)) + .hasEntrySatisfying( + buildMeterCondition( + "BYTES_RECEIVED should be a SESSION Meter with count > 0", + buildSessionMetricPattern(DefaultSessionMetric.BYTES_RECEIVED, session), + a -> a > 0)) + .hasEntrySatisfying( + buildMeterCondition( + "BYTES_RECEIVED should be a NODE Meter with count > 0", + buildNodeMetricPattern(DefaultNodeMetric.BYTES_RECEIVED, session), + a -> a > 0)) + .hasEntrySatisfying( + buildTimerCondition( + "CQL_MESSAGESS should be a NODE Timer with count 10", + buildNodeMetricPattern(DefaultNodeMetric.CQL_MESSAGES, session), + a -> a == 10)) + .hasEntrySatisfying( + buildGaugeCondition( + "AVAILABLE_STREAMS should be a NODE Gauge with count 1024", + buildNodeMetricPattern(DefaultNodeMetric.AVAILABLE_STREAMS, session), + a -> a == 1024)) + .hasEntrySatisfying( + buildCounterCondition( + "RETRIES should be a NODE Counter with count 0", + buildNodeMetricPattern(DefaultNodeMetric.RETRIES, session), + a -> a == 0))); + } + + @Override + protected SessionBuilder getSessionBuilder() { + return new MicroProfileSessionBuilder(METRIC_REGISTRY); + } + + @Override + protected Collection getRegistryMetrics() { + return METRIC_REGISTRY.getMetrics().entrySet(); + } + + private Condition> buildTimerCondition( + String description, String metricPattern, Function verifyFunction) { + return new Condition>(description) { + @Override + public boolean matches(Entry metric) { + if (!(metric.getValue() instanceof Timer)) { + // Metric is not a Timer + return false; + } + final Timer timer = (Timer) metric.getValue(); + final MetricID id = metric.getKey(); + return verifyFunction.apply(timer.getCount()) + && Pattern.matches(metricPattern, id.getName()); + } + }; + } + + private Condition> buildCounterCondition( + String description, String metricPattern, Function verifyFunction) { + return new Condition>(description) { + @Override + public boolean matches(Entry metric) { + if (!(metric.getValue() instanceof Counter)) { + // Metric is not a Counter + return false; + } + final Counter counter = (Counter) metric.getValue(); + final MetricID id = metric.getKey(); + return verifyFunction.apply(counter.getCount()) + && Pattern.matches(metricPattern, id.getName()); + } + }; + } + + private Condition> buildMeterCondition( + String description, String metricPattern, Function verifyFunction) { + return new Condition>(description) { + @Override + public boolean matches(Entry metric) { + if (!(metric.getValue() instanceof Meter)) { + // Metric is not a Meter + return false; + } + final Meter meter = (Meter) metric.getValue(); + final MetricID id = metric.getKey(); + return verifyFunction.apply(meter.getCount()) + && Pattern.matches(metricPattern, id.getName()); + } + }; + } + + private Condition> buildGaugeCondition( + String description, String metricPattern, Function verifyFunction) { + return new Condition>(description) { + @Override + public boolean matches(Entry metric) { + if (!(metric.getValue() instanceof Gauge)) { + // Metric is not a Gauge + return false; + } + final Gauge gauge = (Gauge) metric.getValue(); + final Number gaugeValue = (Number) gauge.getValue(); + final MetricID id = metric.getKey(); + return verifyFunction.apply(gaugeValue.doubleValue()) + && Pattern.matches(metricPattern, id.getName()); + } + }; + } + + private static class MicroProfileSessionBuilder + extends SessionBuilder { + + private final MetricRegistry registry; + + MicroProfileSessionBuilder(@NonNull MetricRegistry registry) { + this.registry = registry; + } + + @NonNull + @Override + protected CqlSession wrap(@NonNull CqlSession defaultSession) { + return defaultSession; + } + + @NonNull + @Override + protected DriverContext buildContext( + @NonNull DriverConfigLoader configLoader, + @NonNull ProgrammaticArguments programmaticArguments) { + return new MicroProfileDriverContext(configLoader, programmaticArguments, registry); + } + } +} diff --git a/manual/developer/common/.nav b/manual/developer/common/.nav index 16c3d54dcb9..f52fddf980c 100644 --- a/manual/developer/common/.nav +++ b/manual/developer/common/.nav @@ -1,3 +1,4 @@ context concurrency -event_bus \ No newline at end of file +event_bus +metrics \ No newline at end of file diff --git a/manual/developer/common/README.md b/manual/developer/common/README.md index 53db9ff6f31..fdc3b7144c1 100644 --- a/manual/developer/common/README.md +++ b/manual/developer/common/README.md @@ -1,11 +1,10 @@ ## Common infrastructure -This covers utilities or concept that are shared throughout the codebase. - -The [context](context/) is what glues everything together, and your primary entry point to extend -the driver. - -We then explain the two major approaches to deal with [concurrency](concurrency/) in the driver. - -Lastly, we briefly touch on the [event bus](event_bus/), which is used to decouple some of the -internal components through asynchronous messaging. \ No newline at end of file +This covers utilities or concept that are shared throughout the codebase: + +* the [context](context/) is what glues everything together, and your primary entry point to extend + the driver. +* we explain the two major approaches to deal with [concurrency](concurrency/) in the driver. +* the [event bus](event_bus/) is used to decouple some of the internal components through + asynchronous messaging. +* [metrics](metrics/) can be switched to a different implementation framework. \ No newline at end of file diff --git a/manual/developer/common/metrics/README.md b/manual/developer/common/metrics/README.md new file mode 100644 index 00000000000..05c133ded54 --- /dev/null +++ b/manual/developer/common/metrics/README.md @@ -0,0 +1,117 @@ +## Metrics + +[Driver Metrics](../../../core/metrics/) are reported via [Dropwizard Metrics] by default. + +With a bit of custom code, it is possible to switch to a different framework: we provide +alternative implementations for [Micrometer] and [Eclipse MicroProfile Metrics]. + +### Adding Metrics Framework Dependency + +Each implementation lives in a dedicated driver module. Add the following dependency to use +Micrometer: + +```xml + + com.datastax.oss + java-driver-metrics-micrometer + ${driver.version} + +``` + +or the following for MicroProfile: + +```xml + + com.datastax.oss + java-driver-metrics-microprofile + ${driver.version} + +``` + +### Enabling Metrics Framework On A Session + +Once the dependency has been added, you need to +[override the context component](../context/#overriding-a-context-component) `MetricsFactory`. If +this is the only customization you have, we provide context classes out of the box, so you just need +to write a custom session builder. + +For Micrometer: + +```java +import com.datastax.oss.driver.internal.metrics.micrometer.MicrometerDriverContext; +import io.micrometer.core.instrument.MeterRegistry; + +public class CustomSessionBuilder extends SessionBuilder { + + private final MeterRegistry registry; + + public CustomSessionBuilder(MeterRegistry registry){ + this.registry = registry; + } + + @Override + protected DriverContext buildContext( + DriverConfigLoader configLoader, ProgrammaticArguments programmaticArguments) { + return new MicrometerDriverContext(configLoader, programmaticArguments, registry); + } + + @Override + protected CqlSession wrap(@NonNull CqlSession defaultSession) { + // Nothing to do here, nothing changes on the session type + return defaultSession; + } +} +``` + +Or for MicroProfile: + +```java +import com.datastax.oss.driver.internal.metrics.microprofile.MicroProfileDriverContext; +import org.eclipse.microprofile.metrics.MetricRegistry; + +public class CustomSessionBuilder extends SessionBuilder { + + private final MetricRegistry registry; + + public CustomSessionBuilder(MetricRegistry registry){ + this.registry = registry; + } + + @Override + protected DriverContext buildContext( + DriverConfigLoader configLoader, ProgrammaticArguments programmaticArguments) { + return new MicroProfileDriverContext(configLoader, programmaticArguments, registry); + } + + @Override + protected CqlSession wrap(@NonNull CqlSession defaultSession) { + // Nothing to do here, nothing changes on the session type + return defaultSession; + } +} +``` + +Use the new builder class to create your driver session: + +```java +CqlSession session = new CustomSessionBuilder() + .addContactPoint(new InetSocketAddress("1.2.3.4", 9042)) + .withLocalDatacenter("datacenter1") + .build(); +``` + +Notes: + +* For both Micrometer and MicroProfile metrics, your application will need to provide a Registry + implementation to which driver metrics will be registered. Some environments may provide access to + available instances of the registry + ([Spring](https://micrometer.io/docs/ref/spring/1.5#_configuring), for example, provides many + implementations of Micrometer MeterRegistry instances) that can be used. +* `Session.getMetrics()` will only work with the built-in implementation. Our `Metrics` interface + references DropWizard types directly, we didn't want to make it generic because it would + over-complicate the driver API. If you use another framework and need programmatic access to the + metrics, you'll need to find your own way to expose the registry. + +[Dropwizard Metrics]: http://metrics.dropwizard.io/4.0.0/manual/index.html +[Micrometer]: https://micrometer.io/ +[Eclipse MicroProfile Metrics]: https://projects.eclipse.org/projects/technology.microprofile/releases/metrics-2.3 \ No newline at end of file diff --git a/metrics/micrometer/pom.xml b/metrics/micrometer/pom.xml new file mode 100644 index 00000000000..6ad67a69429 --- /dev/null +++ b/metrics/micrometer/pom.xml @@ -0,0 +1,100 @@ + + + + 4.0.0 + + com.datastax.oss + java-driver-parent + 4.8.0-SNAPSHOT + ../../ + + java-driver-metrics-micrometer + bundle + DataStax Java driver for Apache Cassandra(R) - Metrics - Micrometer + + + + ${project.groupId} + java-driver-bom + ${project.version} + pom + import + + + + + + io.micrometer + micrometer-core + + + com.datastax.oss + java-driver-core + + + io.dropwizard.metrics + metrics-core + + + + + ch.qos.logback + logback-classic + test + + + junit + junit + test + + + com.tngtech.java + junit-dataprovider + test + + + org.assertj + assertj-core + test + + + org.mockito + mockito-core + test + + + com.datastax.oss + java-driver-core + test + test-jar + + + + + + org.revapi + revapi-maven-plugin + + + true + + + + + diff --git a/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerDriverContext.java b/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerDriverContext.java new file mode 100644 index 00000000000..41deba431eb --- /dev/null +++ b/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerDriverContext.java @@ -0,0 +1,45 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.metrics.micrometer; + +import com.datastax.oss.driver.api.core.config.DriverConfigLoader; +import com.datastax.oss.driver.api.core.context.DriverContext; +import com.datastax.oss.driver.api.core.session.ProgrammaticArguments; +import com.datastax.oss.driver.internal.core.context.DefaultDriverContext; +import com.datastax.oss.driver.internal.core.metrics.MetricsFactory; +import com.datastax.oss.driver.shaded.guava.common.base.Ticker; +import edu.umd.cs.findbugs.annotations.NonNull; +import io.micrometer.core.instrument.MeterRegistry; + +/** Implementation of {@link DriverContext} that provides for a Micrometer {@link MeterRegistry}. */ +public class MicrometerDriverContext extends DefaultDriverContext { + + private final MeterRegistry registry; + + public MicrometerDriverContext( + @NonNull DriverConfigLoader configLoader, + @NonNull ProgrammaticArguments programmaticArguments, + @NonNull MeterRegistry registry) { + super(configLoader, programmaticArguments); + this.registry = registry; + } + + @Override + @NonNull + protected MetricsFactory buildMetricsFactory() { + return new MicrometerMetricsFactory(this, registry, Ticker.systemTicker()); + } +} diff --git a/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerMetricUpdater.java b/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerMetricUpdater.java new file mode 100644 index 00000000000..b5214c9e4f8 --- /dev/null +++ b/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerMetricUpdater.java @@ -0,0 +1,85 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.metrics.micrometer; + +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.internal.core.metrics.MetricUpdater; +import io.micrometer.core.instrument.MeterRegistry; +import java.util.Set; +import java.util.concurrent.TimeUnit; +import net.jcip.annotations.ThreadSafe; + +@ThreadSafe +public abstract class MicrometerMetricUpdater implements MetricUpdater { + protected final Set enabledMetrics; + protected final MeterRegistry registry; + + protected MicrometerMetricUpdater(Set enabledMetrics, MeterRegistry registry) { + this.enabledMetrics = enabledMetrics; + this.registry = registry; + } + + protected abstract String buildFullName(MetricT metric, String profileName); + + @Override + public void incrementCounter(MetricT metric, String profileName, long amount) { + if (isEnabled(metric, profileName)) { + registry.counter(buildFullName(metric, profileName)).increment(amount); + } + } + + @Override + public void updateHistogram(MetricT metric, String profileName, long value) { + if (isEnabled(metric, profileName)) { + registry.summary(buildFullName(metric, profileName)).record(value); + } + } + + @Override + public void markMeter(MetricT metric, String profileName, long amount) { + if (isEnabled(metric, profileName)) { + registry.counter(buildFullName(metric, profileName)).increment(amount); + } + } + + @Override + public void updateTimer(MetricT metric, String profileName, long duration, TimeUnit unit) { + if (isEnabled(metric, profileName)) { + registry.timer(buildFullName(metric, profileName)).record(duration, unit); + } + } + + @Override + public boolean isEnabled(MetricT metric, String profileName) { + return enabledMetrics.contains(metric); + } + + protected void initializeDefaultCounter(MetricT metric, String profileName) { + if (isEnabled(metric, profileName)) { + // Just initialize eagerly so that the metric appears even when it has no data yet + registry.counter(buildFullName(metric, profileName)); + } + } + + protected void initializeTimer(MetricT metric, DriverExecutionProfile config) { + String profileName = config.getName(); + if (isEnabled(metric, profileName)) { + String fullName = buildFullName(metric, profileName); + + registry.timer(fullName); + } + } +} diff --git a/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerMetricsFactory.java b/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerMetricsFactory.java new file mode 100644 index 00000000000..9e997045139 --- /dev/null +++ b/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerMetricsFactory.java @@ -0,0 +1,134 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.metrics.micrometer; + +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.metrics.Metrics; +import com.datastax.oss.driver.api.core.metrics.NodeMetric; +import com.datastax.oss.driver.api.core.metrics.SessionMetric; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.metrics.MetricPaths; +import com.datastax.oss.driver.internal.core.metrics.MetricsFactory; +import com.datastax.oss.driver.internal.core.metrics.NodeMetricUpdater; +import com.datastax.oss.driver.internal.core.metrics.NoopNodeMetricUpdater; +import com.datastax.oss.driver.internal.core.metrics.NoopSessionMetricUpdater; +import com.datastax.oss.driver.internal.core.metrics.SessionMetricUpdater; +import com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting; +import com.datastax.oss.driver.shaded.guava.common.base.Ticker; +import com.datastax.oss.driver.shaded.guava.common.cache.Cache; +import com.datastax.oss.driver.shaded.guava.common.cache.CacheBuilder; +import com.datastax.oss.driver.shaded.guava.common.cache.RemovalNotification; +import io.micrometer.core.instrument.MeterRegistry; +import java.time.Duration; +import java.util.Optional; +import java.util.Set; +import net.jcip.annotations.ThreadSafe; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@ThreadSafe +public class MicrometerMetricsFactory implements MetricsFactory { + + private static final Logger LOG = LoggerFactory.getLogger(MicrometerMetricsFactory.class); + static final Duration LOWEST_ACCEPTABLE_EXPIRE_AFTER = Duration.ofMinutes(5); + + private final InternalDriverContext context; + private final Set enabledNodeMetrics; + private final MeterRegistry registry; + private final SessionMetricUpdater sessionUpdater; + private final Cache metricsCache; + + public MicrometerMetricsFactory( + InternalDriverContext context, MeterRegistry registry, Ticker ticker) { + this.context = context; + String logPrefix = context.getSessionName(); + DriverExecutionProfile config = context.getConfig().getDefaultProfile(); + Set enabledSessionMetrics = + MetricPaths.parseSessionMetricPaths( + config.getStringList(DefaultDriverOption.METRICS_SESSION_ENABLED), logPrefix); + this.enabledNodeMetrics = + MetricPaths.parseNodeMetricPaths( + config.getStringList(DefaultDriverOption.METRICS_NODE_ENABLED), logPrefix); + + Duration evictionTime = getAndValidateEvictionTime(config, logPrefix); + + metricsCache = + CacheBuilder.newBuilder() + .ticker(ticker) + .expireAfterAccess(evictionTime) + .removalListener( + (RemovalNotification notification) -> { + LOG.debug( + "[{}] Removing metrics for node: {} from cache after {}", + logPrefix, + notification.getKey(), + evictionTime); + notification.getValue().cleanupNodeMetrics(); + }) + .build(); + + if (enabledSessionMetrics.isEmpty() && enabledNodeMetrics.isEmpty()) { + LOG.debug("[{}] All metrics are disabled, Session.getMetrics will be empty", logPrefix); + this.registry = null; + this.sessionUpdater = NoopSessionMetricUpdater.INSTANCE; + } else { + this.registry = registry; + this.sessionUpdater = + new MicrometerSessionMetricUpdater(enabledSessionMetrics, this.registry, this.context); + } + } + + @VisibleForTesting + static Duration getAndValidateEvictionTime(DriverExecutionProfile config, String logPrefix) { + Duration evictionTime = config.getDuration(DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER); + + if (evictionTime.compareTo(LOWEST_ACCEPTABLE_EXPIRE_AFTER) < 0) { + LOG.warn( + "[{}] Value too low for {}: {}. Forcing to {} instead.", + logPrefix, + DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER.getPath(), + evictionTime, + LOWEST_ACCEPTABLE_EXPIRE_AFTER); + } + + return evictionTime; + } + + @Override + public Optional getMetrics() { + throw new UnsupportedOperationException( + "getMetrics() is not supported with Micrometer. The driver publishes its metrics directly to the global MeterRegistry."); + } + + @Override + public SessionMetricUpdater getSessionUpdater() { + return sessionUpdater; + } + + @Override + public NodeMetricUpdater newNodeUpdater(Node node) { + if (registry == null) { + return NoopNodeMetricUpdater.INSTANCE; + } + MicrometerNodeMetricUpdater updater = + new MicrometerNodeMetricUpdater( + node, enabledNodeMetrics, registry, context, () -> metricsCache.getIfPresent(node)); + metricsCache.put(node, updater); + return updater; + } +} diff --git a/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerNodeMetricUpdater.java b/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerNodeMetricUpdater.java new file mode 100644 index 00000000000..f15a5366144 --- /dev/null +++ b/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerNodeMetricUpdater.java @@ -0,0 +1,139 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.metrics.micrometer; + +import com.datastax.dse.driver.api.core.metrics.DseNodeMetric; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.context.DriverContext; +import com.datastax.oss.driver.api.core.metadata.EndPoint; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.metrics.DefaultNodeMetric; +import com.datastax.oss.driver.api.core.metrics.NodeMetric; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.metrics.NodeMetricUpdater; +import com.datastax.oss.driver.internal.core.pool.ChannelPool; +import io.micrometer.core.instrument.MeterRegistry; +import java.util.Set; +import java.util.concurrent.TimeUnit; +import java.util.function.Function; + +public class MicrometerNodeMetricUpdater extends MicrometerMetricUpdater + implements NodeMetricUpdater { + + private final String metricNamePrefix; + private final Runnable signalMetricUpdated; + + public MicrometerNodeMetricUpdater( + Node node, + Set enabledMetrics, + MeterRegistry registry, + DriverContext driverContext, + Runnable signalMetricUpdated) { + super(enabledMetrics, registry); + this.signalMetricUpdated = signalMetricUpdated; + InternalDriverContext context = (InternalDriverContext) driverContext; + this.metricNamePrefix = buildPrefix(driverContext.getSessionName(), node.getEndPoint()); + + DriverExecutionProfile config = driverContext.getConfig().getDefaultProfile(); + + if (enabledMetrics.contains(DefaultNodeMetric.OPEN_CONNECTIONS)) { + this.registry.gauge( + buildFullName(DefaultNodeMetric.OPEN_CONNECTIONS, null), node.getOpenConnections()); + } + initializePoolGauge( + DefaultNodeMetric.AVAILABLE_STREAMS, node, ChannelPool::getAvailableIds, context); + initializePoolGauge(DefaultNodeMetric.IN_FLIGHT, node, ChannelPool::getInFlight, context); + initializePoolGauge( + DefaultNodeMetric.ORPHANED_STREAMS, node, ChannelPool::getOrphanedIds, context); + initializeTimer(DefaultNodeMetric.CQL_MESSAGES, config); + initializeDefaultCounter(DefaultNodeMetric.UNSENT_REQUESTS, null); + initializeDefaultCounter(DefaultNodeMetric.ABORTED_REQUESTS, null); + initializeDefaultCounter(DefaultNodeMetric.WRITE_TIMEOUTS, null); + initializeDefaultCounter(DefaultNodeMetric.READ_TIMEOUTS, null); + initializeDefaultCounter(DefaultNodeMetric.UNAVAILABLES, null); + initializeDefaultCounter(DefaultNodeMetric.OTHER_ERRORS, null); + initializeDefaultCounter(DefaultNodeMetric.RETRIES, null); + initializeDefaultCounter(DefaultNodeMetric.RETRIES_ON_ABORTED, null); + initializeDefaultCounter(DefaultNodeMetric.RETRIES_ON_READ_TIMEOUT, null); + initializeDefaultCounter(DefaultNodeMetric.RETRIES_ON_WRITE_TIMEOUT, null); + initializeDefaultCounter(DefaultNodeMetric.RETRIES_ON_UNAVAILABLE, null); + initializeDefaultCounter(DefaultNodeMetric.RETRIES_ON_OTHER_ERROR, null); + initializeDefaultCounter(DefaultNodeMetric.IGNORES, null); + initializeDefaultCounter(DefaultNodeMetric.IGNORES_ON_ABORTED, null); + initializeDefaultCounter(DefaultNodeMetric.IGNORES_ON_READ_TIMEOUT, null); + initializeDefaultCounter(DefaultNodeMetric.IGNORES_ON_WRITE_TIMEOUT, null); + initializeDefaultCounter(DefaultNodeMetric.IGNORES_ON_UNAVAILABLE, null); + initializeDefaultCounter(DefaultNodeMetric.IGNORES_ON_OTHER_ERROR, null); + initializeDefaultCounter(DefaultNodeMetric.SPECULATIVE_EXECUTIONS, null); + initializeDefaultCounter(DefaultNodeMetric.CONNECTION_INIT_ERRORS, null); + initializeDefaultCounter(DefaultNodeMetric.AUTHENTICATION_ERRORS, null); + initializeTimer(DseNodeMetric.GRAPH_MESSAGES, driverContext.getConfig().getDefaultProfile()); + } + + @Override + public String buildFullName(NodeMetric metric, String profileName) { + return metricNamePrefix + metric.getPath(); + } + + private String buildPrefix(String sessionName, EndPoint endPoint) { + return sessionName + ".nodes." + endPoint.asMetricPrefix() + "."; + } + + @Override + public void incrementCounter(NodeMetric metric, String profileName, long amount) { + signalMetricUpdated.run(); + super.incrementCounter(metric, profileName, amount); + } + + @Override + public void updateHistogram(NodeMetric metric, String profileName, long value) { + signalMetricUpdated.run(); + super.updateHistogram(metric, profileName, value); + } + + @Override + public void markMeter(NodeMetric metric, String profileName, long amount) { + signalMetricUpdated.run(); + super.markMeter(metric, profileName, amount); + } + + @Override + public void updateTimer(NodeMetric metric, String profileName, long duration, TimeUnit unit) { + signalMetricUpdated.run(); + super.updateTimer(metric, profileName, duration, unit); + } + + private void initializePoolGauge( + NodeMetric metric, + Node node, + Function reading, + InternalDriverContext context) { + if (enabledMetrics.contains(metric)) { + final String metricName = buildFullName(metric, null); + registry.gauge( + metricName, + context, + c -> { + ChannelPool pool = c.getPoolManager().getPools().get(node); + return (pool == null) ? 0 : reading.apply(pool); + }); + } + } + + public void cleanupNodeMetrics() { + registry.getMeters().removeIf(metric -> metric.getId().getName().startsWith(metricNamePrefix)); + } +} diff --git a/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerSessionMetricUpdater.java b/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerSessionMetricUpdater.java new file mode 100644 index 00000000000..e1ff7bb7122 --- /dev/null +++ b/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerSessionMetricUpdater.java @@ -0,0 +1,144 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.metrics.micrometer; + +import com.datastax.dse.driver.api.core.metrics.DseSessionMetric; +import com.datastax.oss.driver.api.core.context.DriverContext; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; +import com.datastax.oss.driver.api.core.metrics.SessionMetric; +import com.datastax.oss.driver.api.core.session.throttling.RequestThrottler; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.cql.CqlPrepareAsyncProcessor; +import com.datastax.oss.driver.internal.core.cql.CqlPrepareSyncProcessor; +import com.datastax.oss.driver.internal.core.metrics.SessionMetricUpdater; +import com.datastax.oss.driver.internal.core.session.RequestProcessor; +import com.datastax.oss.driver.internal.core.session.throttling.ConcurrencyLimitingRequestThrottler; +import com.datastax.oss.driver.internal.core.session.throttling.RateLimitingRequestThrottler; +import com.datastax.oss.driver.shaded.guava.common.cache.Cache; +import edu.umd.cs.findbugs.annotations.Nullable; +import io.micrometer.core.instrument.MeterRegistry; +import java.util.Set; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class MicrometerSessionMetricUpdater extends MicrometerMetricUpdater + implements SessionMetricUpdater { + + private static final Logger LOG = LoggerFactory.getLogger(MicrometerSessionMetricUpdater.class); + + private final String metricNamePrefix; + + public MicrometerSessionMetricUpdater( + Set enabledMetrics, MeterRegistry registry, DriverContext driverContext) { + super(enabledMetrics, registry); + InternalDriverContext context = (InternalDriverContext) driverContext; + this.metricNamePrefix = context.getSessionName() + "."; + + if (enabledMetrics.contains(DefaultSessionMetric.CONNECTED_NODES)) { + registerConnectedNodeGauge(context); + } + if (enabledMetrics.contains(DefaultSessionMetric.THROTTLING_QUEUE_SIZE)) { + registerThrottlingQueueGauge(context); + } + if (enabledMetrics.contains(DefaultSessionMetric.CQL_PREPARED_CACHE_SIZE)) { + registerPreparedCacheGauge(context); + } + initializeTimer(DefaultSessionMetric.CQL_REQUESTS, context.getConfig().getDefaultProfile()); + initializeDefaultCounter(DefaultSessionMetric.CQL_CLIENT_TIMEOUTS, null); + initializeTimer(DefaultSessionMetric.THROTTLING_DELAY, context.getConfig().getDefaultProfile()); + initializeDefaultCounter(DefaultSessionMetric.THROTTLING_ERRORS, null); + initializeTimer( + DseSessionMetric.CONTINUOUS_CQL_REQUESTS, context.getConfig().getDefaultProfile()); + initializeDefaultCounter(DseSessionMetric.GRAPH_CLIENT_TIMEOUTS, null); + initializeTimer(DseSessionMetric.GRAPH_REQUESTS, context.getConfig().getDefaultProfile()); + } + + private void registerConnectedNodeGauge(InternalDriverContext context) { + this.registry.gauge( + buildFullName(DefaultSessionMetric.CONNECTED_NODES, null), + context, + c -> { + int count = 0; + for (Node node : c.getMetadataManager().getMetadata().getNodes().values()) { + if (node.getOpenConnections() > 0) { + ++count; + } + } + return count; + }); + } + + private void registerThrottlingQueueGauge(InternalDriverContext context) { + this.registry.gauge( + buildFullName(DefaultSessionMetric.THROTTLING_QUEUE_SIZE, null), + context, + c -> { + RequestThrottler requestThrottler = c.getRequestThrottler(); + String logPrefix = c.getSessionName(); + if (requestThrottler instanceof ConcurrencyLimitingRequestThrottler) { + return ((ConcurrencyLimitingRequestThrottler) requestThrottler).getQueueSize(); + } + if (requestThrottler instanceof RateLimitingRequestThrottler) { + return ((RateLimitingRequestThrottler) requestThrottler).getQueueSize(); + } + LOG.warn( + "[{}] Metric {} does not support {}, it will always return 0", + logPrefix, + DefaultSessionMetric.THROTTLING_QUEUE_SIZE.getPath(), + requestThrottler.getClass().getName()); + return 0; + }); + } + + private void registerPreparedCacheGauge(InternalDriverContext context) { + this.registry.gauge( + buildFullName(DefaultSessionMetric.CQL_PREPARED_CACHE_SIZE, null), + context, + c -> { + Cache cache = getPreparedStatementCache(c); + if (cache == null) { + LOG.warn( + "[{}] Metric {} is enabled in the config, " + + "but it looks like no CQL prepare processor is registered. " + + "The gauge will always return 0", + context.getSessionName(), + DefaultSessionMetric.CQL_PREPARED_CACHE_SIZE.getPath()); + return 0L; + } + return cache.size(); + }); + } + + @Override + public String buildFullName(SessionMetric metric, String profileName) { + return metricNamePrefix + metric.getPath(); + } + + @Nullable + private static Cache getPreparedStatementCache(InternalDriverContext context) { + // By default, both the sync processor and the async one are registered and they share the same + // cache. But with a custom processor registry, there could be only one of the two present. + for (RequestProcessor processor : context.getRequestProcessorRegistry().getProcessors()) { + if (processor instanceof CqlPrepareAsyncProcessor) { + return ((CqlPrepareAsyncProcessor) processor).getCache(); + } else if (processor instanceof CqlPrepareSyncProcessor) { + return ((CqlPrepareSyncProcessor) processor).getCache(); + } + } + return null; + } +} diff --git a/metrics/micrometer/src/test/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerMetricsFactoryTest.java b/metrics/micrometer/src/test/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerMetricsFactoryTest.java new file mode 100644 index 00000000000..697083ee1e5 --- /dev/null +++ b/metrics/micrometer/src/test/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerMetricsFactoryTest.java @@ -0,0 +1,91 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.metrics.micrometer; + +import static com.datastax.oss.driver.internal.metrics.micrometer.MicrometerMetricsFactory.LOWEST_ACCEPTABLE_EXPIRE_AFTER; +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.timeout; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import ch.qos.logback.classic.Level; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.internal.core.util.LoggerTest; +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import java.time.Duration; +import org.junit.Test; +import org.junit.runner.RunWith; + +@RunWith(DataProviderRunner.class) +public class MicrometerMetricsFactoryTest { + + private static final String LOG_PREFIX = "prefix"; + + @Test + public void should_log_warning_when_provided_eviction_time_setting_is_too_low() { + // given + Duration expireAfter = LOWEST_ACCEPTABLE_EXPIRE_AFTER.minusMinutes(1); + LoggerTest.LoggerSetup logger = + LoggerTest.setupTestLogger(MicrometerMetricsFactory.class, Level.WARN); + DriverExecutionProfile driverExecutionProfile = mock(DriverExecutionProfile.class); + + // when + when(driverExecutionProfile.getDuration(DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER)) + .thenReturn(expireAfter); + MicrometerMetricsFactory.getAndValidateEvictionTime(driverExecutionProfile, LOG_PREFIX); + + // then + verify(logger.appender, timeout(500).times(1)).doAppend(logger.loggingEventCaptor.capture()); + assertThat(logger.loggingEventCaptor.getValue().getMessage()).isNotNull(); + assertThat(logger.loggingEventCaptor.getValue().getFormattedMessage()) + .contains( + String.format( + "[%s] Value too low for %s: %s. Forcing to %s instead.", + LOG_PREFIX, + DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER.getPath(), + expireAfter, + LOWEST_ACCEPTABLE_EXPIRE_AFTER)); + } + + @Test + @UseDataProvider(value = "acceptableEvictionTimes") + public void should_not_log_warning_when_provided_eviction_time_setting_is_acceptable( + Duration expireAfter) { + // given + LoggerTest.LoggerSetup logger = + LoggerTest.setupTestLogger(MicrometerMetricsFactory.class, Level.WARN); + DriverExecutionProfile driverExecutionProfile = mock(DriverExecutionProfile.class); + + // when + when(driverExecutionProfile.getDuration(DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER)) + .thenReturn(expireAfter); + MicrometerMetricsFactory.getAndValidateEvictionTime(driverExecutionProfile, LOG_PREFIX); + + // then + verify(logger.appender, timeout(500).times(0)).doAppend(logger.loggingEventCaptor.capture()); + } + + @DataProvider + public static Object[][] acceptableEvictionTimes() { + return new Object[][] { + {LOWEST_ACCEPTABLE_EXPIRE_AFTER}, {LOWEST_ACCEPTABLE_EXPIRE_AFTER.plusMinutes(1)} + }; + } +} diff --git a/metrics/microprofile/pom.xml b/metrics/microprofile/pom.xml new file mode 100644 index 00000000000..b926fbdc34e --- /dev/null +++ b/metrics/microprofile/pom.xml @@ -0,0 +1,100 @@ + + + + 4.0.0 + + com.datastax.oss + java-driver-parent + 4.8.0-SNAPSHOT + ../../ + + java-driver-metrics-microprofile + bundle + DataStax Java driver for Apache Cassandra(R) - Metrics - Microprofile + + + + ${project.groupId} + java-driver-bom + ${project.version} + pom + import + + + + + + org.eclipse.microprofile.metrics + microprofile-metrics-api + + + com.datastax.oss + java-driver-core + + + io.dropwizard.metrics + metrics-core + + + + + ch.qos.logback + logback-classic + test + + + junit + junit + test + + + com.tngtech.java + junit-dataprovider + test + + + org.assertj + assertj-core + test + + + org.mockito + mockito-core + test + + + com.datastax.oss + java-driver-core + test + test-jar + + + + + + org.revapi + revapi-maven-plugin + + + true + + + + + diff --git a/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileDriverContext.java b/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileDriverContext.java new file mode 100644 index 00000000000..97afff035c2 --- /dev/null +++ b/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileDriverContext.java @@ -0,0 +1,47 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.metrics.microprofile; + +import com.datastax.oss.driver.api.core.config.DriverConfigLoader; +import com.datastax.oss.driver.api.core.context.DriverContext; +import com.datastax.oss.driver.api.core.session.ProgrammaticArguments; +import com.datastax.oss.driver.internal.core.context.DefaultDriverContext; +import com.datastax.oss.driver.internal.core.metrics.MetricsFactory; +import com.datastax.oss.driver.shaded.guava.common.base.Ticker; +import edu.umd.cs.findbugs.annotations.NonNull; +import org.eclipse.microprofile.metrics.MetricRegistry; + +/** + * Implementation of {@link DriverContext} that provides for a Micrometer {@link MetricRegistry}. + */ +public class MicroProfileDriverContext extends DefaultDriverContext { + + private final MetricRegistry registry; + + public MicroProfileDriverContext( + @NonNull DriverConfigLoader configLoader, + @NonNull ProgrammaticArguments programmaticArguments, + @NonNull MetricRegistry registry) { + super(configLoader, programmaticArguments); + this.registry = registry; + } + + @Override + @NonNull + protected MetricsFactory buildMetricsFactory() { + return new MicroProfileMetricsFactory(this, registry, Ticker.systemTicker()); + } +} diff --git a/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileMetricUpdater.java b/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileMetricUpdater.java new file mode 100644 index 00000000000..b712d1db8bd --- /dev/null +++ b/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileMetricUpdater.java @@ -0,0 +1,86 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.metrics.microprofile; + +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.internal.core.metrics.MetricUpdater; +import java.util.Set; +import java.util.concurrent.TimeUnit; +import net.jcip.annotations.ThreadSafe; +import org.eclipse.microprofile.metrics.MetricRegistry; + +@ThreadSafe +public abstract class MicroProfileMetricUpdater implements MetricUpdater { + + protected final Set enabledMetrics; + protected final MetricRegistry registry; + + protected MicroProfileMetricUpdater(Set enabledMetrics, MetricRegistry registry) { + this.enabledMetrics = enabledMetrics; + this.registry = registry; + } + + protected abstract String buildFullName(MetricT metric, String profileName); + + @Override + public void incrementCounter(MetricT metric, String profileName, long amount) { + if (isEnabled(metric, profileName)) { + registry.counter(buildFullName(metric, profileName)).inc(amount); + } + } + + @Override + public void updateHistogram(MetricT metric, String profileName, long value) { + if (isEnabled(metric, profileName)) { + registry.histogram(buildFullName(metric, profileName)).update(value); + } + } + + @Override + public void markMeter(MetricT metric, String profileName, long amount) { + if (isEnabled(metric, profileName)) { + registry.meter(buildFullName(metric, profileName)).mark(amount); + } + } + + @Override + public void updateTimer(MetricT metric, String profileName, long duration, TimeUnit unit) { + if (isEnabled(metric, profileName)) { + registry.timer(buildFullName(metric, profileName)).update(duration, unit); + } + } + + @Override + public boolean isEnabled(MetricT metric, String profileName) { + return enabledMetrics.contains(metric); + } + + protected void initializeDefaultCounter(MetricT metric, String profileName) { + if (isEnabled(metric, profileName)) { + // Just initialize eagerly so that the metric appears even when it has no data yet + registry.counter(buildFullName(metric, profileName)); + } + } + + protected void initializeTimer(MetricT metric, DriverExecutionProfile config) { + String profileName = config.getName(); + if (isEnabled(metric, profileName)) { + String fullName = buildFullName(metric, profileName); + + registry.timer(fullName); + } + } +} diff --git a/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileMetricsFactory.java b/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileMetricsFactory.java new file mode 100644 index 00000000000..a0f4136e977 --- /dev/null +++ b/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileMetricsFactory.java @@ -0,0 +1,133 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.metrics.microprofile; + +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.metrics.Metrics; +import com.datastax.oss.driver.api.core.metrics.NodeMetric; +import com.datastax.oss.driver.api.core.metrics.SessionMetric; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.metrics.MetricPaths; +import com.datastax.oss.driver.internal.core.metrics.MetricsFactory; +import com.datastax.oss.driver.internal.core.metrics.NodeMetricUpdater; +import com.datastax.oss.driver.internal.core.metrics.NoopNodeMetricUpdater; +import com.datastax.oss.driver.internal.core.metrics.NoopSessionMetricUpdater; +import com.datastax.oss.driver.internal.core.metrics.SessionMetricUpdater; +import com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting; +import com.datastax.oss.driver.shaded.guava.common.base.Ticker; +import com.datastax.oss.driver.shaded.guava.common.cache.Cache; +import com.datastax.oss.driver.shaded.guava.common.cache.CacheBuilder; +import com.datastax.oss.driver.shaded.guava.common.cache.RemovalNotification; +import java.time.Duration; +import java.util.Optional; +import java.util.Set; +import net.jcip.annotations.ThreadSafe; +import org.eclipse.microprofile.metrics.MetricRegistry; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@ThreadSafe +public class MicroProfileMetricsFactory implements MetricsFactory { + private static final Logger LOG = LoggerFactory.getLogger(MicroProfileMetricsFactory.class); + static final Duration LOWEST_ACCEPTABLE_EXPIRE_AFTER = Duration.ofMinutes(5); + + private final InternalDriverContext context; + private final Set enabledNodeMetrics; + private final MetricRegistry registry; + private final SessionMetricUpdater sessionUpdater; + private final Cache metricsCache; + + public MicroProfileMetricsFactory( + InternalDriverContext context, MetricRegistry registry, Ticker ticker) { + this.context = context; + String logPrefix = context.getSessionName(); + DriverExecutionProfile config = context.getConfig().getDefaultProfile(); + Set enabledSessionMetrics = + MetricPaths.parseSessionMetricPaths( + config.getStringList(DefaultDriverOption.METRICS_SESSION_ENABLED), logPrefix); + this.enabledNodeMetrics = + MetricPaths.parseNodeMetricPaths( + config.getStringList(DefaultDriverOption.METRICS_NODE_ENABLED), logPrefix); + + Duration evictionTime = getAndValidateEvictionTime(config, logPrefix); + + metricsCache = + CacheBuilder.newBuilder() + .ticker(ticker) + .expireAfterAccess(evictionTime) + .removalListener( + (RemovalNotification notification) -> { + LOG.debug( + "[{}] Removing metrics for node: {} from cache after {}", + logPrefix, + notification.getKey(), + evictionTime); + notification.getValue().cleanupNodeMetrics(); + }) + .build(); + + if (enabledSessionMetrics.isEmpty() && enabledNodeMetrics.isEmpty()) { + LOG.debug("[{}] All metrics are disabled.", logPrefix); + this.registry = null; + this.sessionUpdater = NoopSessionMetricUpdater.INSTANCE; + } else { + this.registry = registry; + this.sessionUpdater = + new MicroProfileSessionMetricUpdater(enabledSessionMetrics, this.registry, this.context); + } + } + + @VisibleForTesting + static Duration getAndValidateEvictionTime(DriverExecutionProfile config, String logPrefix) { + Duration evictionTime = config.getDuration(DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER); + + if (evictionTime.compareTo(LOWEST_ACCEPTABLE_EXPIRE_AFTER) < 0) { + LOG.warn( + "[{}] Value too low for {}: {}. Forcing to {} instead.", + logPrefix, + DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER.getPath(), + evictionTime, + LOWEST_ACCEPTABLE_EXPIRE_AFTER); + } + + return evictionTime; + } + + @Override + public Optional getMetrics() { + throw new UnsupportedOperationException( + "getMetrics() is not supported with MicroProfile. The driver publishes its metrics directly to the MetricRegistry."); + } + + @Override + public SessionMetricUpdater getSessionUpdater() { + return sessionUpdater; + } + + @Override + public NodeMetricUpdater newNodeUpdater(Node node) { + if (registry == null) { + return NoopNodeMetricUpdater.INSTANCE; + } + MicroProfileNodeMetricUpdater updater = + new MicroProfileNodeMetricUpdater( + node, enabledNodeMetrics, registry, context, () -> metricsCache.getIfPresent(node)); + metricsCache.put(node, updater); + return updater; + } +} diff --git a/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileNodeMetricUpdater.java b/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileNodeMetricUpdater.java new file mode 100644 index 00000000000..d622210250e --- /dev/null +++ b/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileNodeMetricUpdater.java @@ -0,0 +1,140 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.metrics.microprofile; + +import com.datastax.dse.driver.api.core.metrics.DseNodeMetric; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.context.DriverContext; +import com.datastax.oss.driver.api.core.metadata.EndPoint; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.metrics.DefaultNodeMetric; +import com.datastax.oss.driver.api.core.metrics.NodeMetric; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.metrics.NodeMetricUpdater; +import com.datastax.oss.driver.internal.core.pool.ChannelPool; +import java.util.Set; +import java.util.concurrent.TimeUnit; +import java.util.function.Function; +import org.eclipse.microprofile.metrics.Gauge; +import org.eclipse.microprofile.metrics.MetricRegistry; + +public class MicroProfileNodeMetricUpdater extends MicroProfileMetricUpdater + implements NodeMetricUpdater { + + private final String metricNamePrefix; + private final Runnable signalMetricUpdated; + + public MicroProfileNodeMetricUpdater( + Node node, + Set enabledMetrics, + MetricRegistry registry, + DriverContext driverContext, + Runnable signalMetricUpdated) { + super(enabledMetrics, registry); + this.signalMetricUpdated = signalMetricUpdated; + InternalDriverContext context = (InternalDriverContext) driverContext; + this.metricNamePrefix = buildPrefix(driverContext.getSessionName(), node.getEndPoint()); + + DriverExecutionProfile config = driverContext.getConfig().getDefaultProfile(); + + if (enabledMetrics.contains(DefaultNodeMetric.OPEN_CONNECTIONS)) { + this.registry.register( + buildFullName(DefaultNodeMetric.OPEN_CONNECTIONS, null), + (Gauge) node::getOpenConnections); + } + initializePoolGauge( + DefaultNodeMetric.AVAILABLE_STREAMS, node, ChannelPool::getAvailableIds, context); + initializePoolGauge(DefaultNodeMetric.IN_FLIGHT, node, ChannelPool::getInFlight, context); + initializePoolGauge( + DefaultNodeMetric.ORPHANED_STREAMS, node, ChannelPool::getOrphanedIds, context); + initializeTimer(DefaultNodeMetric.CQL_MESSAGES, config); + initializeDefaultCounter(DefaultNodeMetric.UNSENT_REQUESTS, null); + initializeDefaultCounter(DefaultNodeMetric.ABORTED_REQUESTS, null); + initializeDefaultCounter(DefaultNodeMetric.WRITE_TIMEOUTS, null); + initializeDefaultCounter(DefaultNodeMetric.READ_TIMEOUTS, null); + initializeDefaultCounter(DefaultNodeMetric.UNAVAILABLES, null); + initializeDefaultCounter(DefaultNodeMetric.OTHER_ERRORS, null); + initializeDefaultCounter(DefaultNodeMetric.RETRIES, null); + initializeDefaultCounter(DefaultNodeMetric.RETRIES_ON_ABORTED, null); + initializeDefaultCounter(DefaultNodeMetric.RETRIES_ON_READ_TIMEOUT, null); + initializeDefaultCounter(DefaultNodeMetric.RETRIES_ON_WRITE_TIMEOUT, null); + initializeDefaultCounter(DefaultNodeMetric.RETRIES_ON_UNAVAILABLE, null); + initializeDefaultCounter(DefaultNodeMetric.RETRIES_ON_OTHER_ERROR, null); + initializeDefaultCounter(DefaultNodeMetric.IGNORES, null); + initializeDefaultCounter(DefaultNodeMetric.IGNORES_ON_ABORTED, null); + initializeDefaultCounter(DefaultNodeMetric.IGNORES_ON_READ_TIMEOUT, null); + initializeDefaultCounter(DefaultNodeMetric.IGNORES_ON_WRITE_TIMEOUT, null); + initializeDefaultCounter(DefaultNodeMetric.IGNORES_ON_UNAVAILABLE, null); + initializeDefaultCounter(DefaultNodeMetric.IGNORES_ON_OTHER_ERROR, null); + initializeDefaultCounter(DefaultNodeMetric.SPECULATIVE_EXECUTIONS, null); + initializeDefaultCounter(DefaultNodeMetric.CONNECTION_INIT_ERRORS, null); + initializeDefaultCounter(DefaultNodeMetric.AUTHENTICATION_ERRORS, null); + initializeTimer(DseNodeMetric.GRAPH_MESSAGES, driverContext.getConfig().getDefaultProfile()); + } + + @Override + public String buildFullName(NodeMetric metric, String profileName) { + return metricNamePrefix + metric.getPath(); + } + + private String buildPrefix(String sessionName, EndPoint endPoint) { + return sessionName + ".nodes." + endPoint.asMetricPrefix() + "."; + } + + @Override + public void incrementCounter(NodeMetric metric, String profileName, long amount) { + signalMetricUpdated.run(); + super.incrementCounter(metric, profileName, amount); + } + + @Override + public void updateHistogram(NodeMetric metric, String profileName, long value) { + signalMetricUpdated.run(); + super.updateHistogram(metric, profileName, value); + } + + @Override + public void markMeter(NodeMetric metric, String profileName, long amount) { + signalMetricUpdated.run(); + super.markMeter(metric, profileName, amount); + } + + @Override + public void updateTimer(NodeMetric metric, String profileName, long duration, TimeUnit unit) { + signalMetricUpdated.run(); + super.updateTimer(metric, profileName, duration, unit); + } + + private void initializePoolGauge( + NodeMetric metric, + Node node, + Function reading, + InternalDriverContext context) { + if (enabledMetrics.contains(metric)) { + registry.register( + buildFullName(metric, null), + (Gauge) + () -> { + ChannelPool pool = context.getPoolManager().getPools().get(node); + return (pool == null) ? 0 : reading.apply(pool); + }); + } + } + + public void cleanupNodeMetrics() { + registry.removeMatching((id, metric) -> id.getName().startsWith(metricNamePrefix)); + } +} diff --git a/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileSessionMetricUpdater.java b/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileSessionMetricUpdater.java new file mode 100644 index 00000000000..e46286b453c --- /dev/null +++ b/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileSessionMetricUpdater.java @@ -0,0 +1,136 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.metrics.microprofile; + +import com.datastax.dse.driver.api.core.metrics.DseSessionMetric; +import com.datastax.oss.driver.api.core.context.DriverContext; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; +import com.datastax.oss.driver.api.core.metrics.SessionMetric; +import com.datastax.oss.driver.api.core.session.throttling.RequestThrottler; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.cql.CqlPrepareAsyncProcessor; +import com.datastax.oss.driver.internal.core.cql.CqlPrepareSyncProcessor; +import com.datastax.oss.driver.internal.core.metrics.SessionMetricUpdater; +import com.datastax.oss.driver.internal.core.session.RequestProcessor; +import com.datastax.oss.driver.internal.core.session.throttling.ConcurrencyLimitingRequestThrottler; +import com.datastax.oss.driver.internal.core.session.throttling.RateLimitingRequestThrottler; +import com.datastax.oss.driver.shaded.guava.common.cache.Cache; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.Set; +import org.eclipse.microprofile.metrics.Gauge; +import org.eclipse.microprofile.metrics.MetricRegistry; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class MicroProfileSessionMetricUpdater extends MicroProfileMetricUpdater + implements SessionMetricUpdater { + + private static final Logger LOG = LoggerFactory.getLogger(MicroProfileSessionMetricUpdater.class); + + private final String metricNamePrefix; + + public MicroProfileSessionMetricUpdater( + Set enabledMetrics, MetricRegistry registry, DriverContext driverContext) { + super(enabledMetrics, registry); + InternalDriverContext context = (InternalDriverContext) driverContext; + this.metricNamePrefix = driverContext.getSessionName() + "."; + if (enabledMetrics.contains(DefaultSessionMetric.CONNECTED_NODES)) { + this.registry.register( + buildFullName(DefaultSessionMetric.CONNECTED_NODES, null), + (Gauge) + () -> { + int count = 0; + for (Node node : context.getMetadataManager().getMetadata().getNodes().values()) { + if (node.getOpenConnections() > 0) { + count += 1; + } + } + return count; + }); + } + ; + if (enabledMetrics.contains(DefaultSessionMetric.THROTTLING_QUEUE_SIZE)) { + this.registry.register( + buildFullName(DefaultSessionMetric.THROTTLING_QUEUE_SIZE, null), + buildQueueGauge(context.getRequestThrottler(), context.getSessionName())); + } + if (enabledMetrics.contains(DefaultSessionMetric.CQL_PREPARED_CACHE_SIZE)) { + this.registry.register( + buildFullName(DefaultSessionMetric.CQL_PREPARED_CACHE_SIZE, null), + createPreparedStatementsGauge(context)); + } + initializeTimer(DefaultSessionMetric.CQL_REQUESTS, context.getConfig().getDefaultProfile()); + initializeDefaultCounter(DefaultSessionMetric.CQL_CLIENT_TIMEOUTS, null); + initializeTimer(DefaultSessionMetric.THROTTLING_DELAY, context.getConfig().getDefaultProfile()); + initializeDefaultCounter(DefaultSessionMetric.THROTTLING_ERRORS, null); + initializeTimer( + DseSessionMetric.CONTINUOUS_CQL_REQUESTS, context.getConfig().getDefaultProfile()); + initializeDefaultCounter(DseSessionMetric.GRAPH_CLIENT_TIMEOUTS, null); + initializeTimer(DseSessionMetric.GRAPH_REQUESTS, context.getConfig().getDefaultProfile()); + } + + private Gauge createPreparedStatementsGauge(InternalDriverContext context) { + Cache cache = getPreparedStatementCache(context); + Gauge gauge; + if (cache == null) { + LOG.warn( + "[{}] Metric {} is enabled in the config, " + + "but it looks like no CQL prepare processor is registered. " + + "The gauge will always return 0", + context.getSessionName(), + DefaultSessionMetric.CQL_PREPARED_CACHE_SIZE.getPath()); + gauge = () -> 0L; + } else { + gauge = cache::size; + } + return gauge; + } + + @Override + public String buildFullName(SessionMetric metric, String profileName) { + return metricNamePrefix + metric.getPath(); + } + + private Gauge buildQueueGauge(RequestThrottler requestThrottler, String logPrefix) { + if (requestThrottler instanceof ConcurrencyLimitingRequestThrottler) { + return ((ConcurrencyLimitingRequestThrottler) requestThrottler)::getQueueSize; + } else if (requestThrottler instanceof RateLimitingRequestThrottler) { + return ((RateLimitingRequestThrottler) requestThrottler)::getQueueSize; + } else { + LOG.warn( + "[{}] Metric {} does not support {}, it will always return 0", + logPrefix, + DefaultSessionMetric.THROTTLING_QUEUE_SIZE.getPath(), + requestThrottler.getClass().getName()); + return () -> 0; + } + } + + @Nullable + private static Cache getPreparedStatementCache(InternalDriverContext context) { + // By default, both the sync processor and the async one are registered and they share the same + // cache. But with a custom processor registry, there could be only one of the two present. + for (RequestProcessor processor : context.getRequestProcessorRegistry().getProcessors()) { + if (processor instanceof CqlPrepareAsyncProcessor) { + return ((CqlPrepareAsyncProcessor) processor).getCache(); + } else if (processor instanceof CqlPrepareSyncProcessor) { + return ((CqlPrepareSyncProcessor) processor).getCache(); + } + } + return null; + } +} diff --git a/metrics/microprofile/src/test/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileMetricsFactoryTest.java b/metrics/microprofile/src/test/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileMetricsFactoryTest.java new file mode 100644 index 00000000000..5c450b32248 --- /dev/null +++ b/metrics/microprofile/src/test/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileMetricsFactoryTest.java @@ -0,0 +1,91 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.metrics.microprofile; + +import static com.datastax.oss.driver.internal.metrics.microprofile.MicroProfileMetricsFactory.LOWEST_ACCEPTABLE_EXPIRE_AFTER; +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.timeout; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import ch.qos.logback.classic.Level; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.internal.core.util.LoggerTest; +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import java.time.Duration; +import org.junit.Test; +import org.junit.runner.RunWith; + +@RunWith(DataProviderRunner.class) +public class MicroProfileMetricsFactoryTest { + + private static final String LOG_PREFIX = "prefix"; + + @Test + public void should_log_warning_when_provided_eviction_time_setting_is_too_low() { + // given + Duration expireAfter = LOWEST_ACCEPTABLE_EXPIRE_AFTER.minusMinutes(1); + LoggerTest.LoggerSetup logger = + LoggerTest.setupTestLogger(MicroProfileMetricsFactory.class, Level.WARN); + DriverExecutionProfile driverExecutionProfile = mock(DriverExecutionProfile.class); + + // when + when(driverExecutionProfile.getDuration(DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER)) + .thenReturn(expireAfter); + MicroProfileMetricsFactory.getAndValidateEvictionTime(driverExecutionProfile, LOG_PREFIX); + + // then + verify(logger.appender, timeout(500).times(1)).doAppend(logger.loggingEventCaptor.capture()); + assertThat(logger.loggingEventCaptor.getValue().getMessage()).isNotNull(); + assertThat(logger.loggingEventCaptor.getValue().getFormattedMessage()) + .contains( + String.format( + "[%s] Value too low for %s: %s. Forcing to %s instead.", + LOG_PREFIX, + DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER.getPath(), + expireAfter, + LOWEST_ACCEPTABLE_EXPIRE_AFTER)); + } + + @Test + @UseDataProvider(value = "acceptableEvictionTimes") + public void should_not_log_warning_when_provided_eviction_time_setting_is_acceptable( + Duration expireAfter) { + // given + LoggerTest.LoggerSetup logger = + LoggerTest.setupTestLogger(MicroProfileMetricsFactory.class, Level.WARN); + DriverExecutionProfile driverExecutionProfile = mock(DriverExecutionProfile.class); + + // when + when(driverExecutionProfile.getDuration(DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER)) + .thenReturn(expireAfter); + MicroProfileMetricsFactory.getAndValidateEvictionTime(driverExecutionProfile, LOG_PREFIX); + + // then + verify(logger.appender, timeout(500).times(0)).doAppend(logger.loggingEventCaptor.capture()); + } + + @DataProvider + public static Object[][] acceptableEvictionTimes() { + return new Object[][] { + {LOWEST_ACCEPTABLE_EXPIRE_AFTER}, {LOWEST_ACCEPTABLE_EXPIRE_AFTER.plusMinutes(1)} + }; + } +} diff --git a/pom.xml b/pom.xml index 106e856c7e2..fb0b54eee5f 100644 --- a/pom.xml +++ b/pom.xml @@ -33,6 +33,8 @@ mapper-runtime mapper-processor test-infra + metrics/micrometer + metrics/microprofile integration-tests osgi-tests distribution @@ -396,6 +398,21 @@ svm ${graalapi.version} + + io.micrometer + micrometer-core + 1.5.0 + + + org.eclipse.microprofile.metrics + microprofile-metrics-api + 2.3 + + + io.smallrye + smallrye-metrics + 2.4.2 + From 3d2ec671465cdc99987bfc5a74cb776d507ee7d9 Mon Sep 17 00:00:00 2001 From: olim7t Date: Thu, 16 Jul 2020 15:35:20 -0700 Subject: [PATCH 544/979] JAVA-2811: Add aliases for driver 3 method names --- changelog/README.md | 1 + .../core/cql/reactive/ReactiveSession.java | 42 +++++++++++++ .../driver/api/core/cql/AsyncCqlSession.java | 43 +++++++++++++ .../oss/driver/api/core/cql/Statement.java | 62 +++++++++++++++++++ .../driver/api/core/cql/StatementBuilder.java | 40 ++++++++++++ .../driver/api/core/cql/SyncCqlSession.java | 57 +++++++++++++++++ .../driver/api/core/data/GettableById.java | 9 +++ .../driver/api/core/data/GettableByIndex.java | 9 +++ .../driver/api/core/data/GettableByName.java | 9 +++ .../driver/api/core/data/SettableById.java | 11 ++++ .../driver/api/core/data/SettableByIndex.java | 11 ++++ .../driver/api/core/data/SettableByName.java | 11 ++++ .../api/core/session/SessionBuilder.java | 21 +++++++ 13 files changed, 326 insertions(+) diff --git a/changelog/README.md b/changelog/README.md index 272f62aa4d6..2e5c699eed0 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.8.0 (in progress) +- [improvement] JAVA-2811: Add aliases for driver 3 method names - [new feature] JAVA-2808: Provide metrics bindings for Micrometer and MicroProfile - [new feature] JAVA-2773: Support new protocol v5 message format - [improvement] JAVA-2841: Raise timeouts during connection initialization diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.java b/core/src/main/java/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.java index fa0644c13c6..56adf116840 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.java @@ -20,6 +20,7 @@ import com.datastax.oss.driver.api.core.cql.Statement; import com.datastax.oss.driver.api.core.session.Session; import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Map; import java.util.Objects; import org.reactivestreams.Publisher; @@ -39,14 +40,55 @@ public interface ReactiveSession extends Session { * Returns a {@link Publisher} that, once subscribed to, executes the given query and emits all * the results. * + *

          This is an alias for {@link #executeReactive(Statement)} + * executeReactive(SimpleStatement.newInstance(query))}. + * * @param query the query to execute. * @return The {@link Publisher} that will publish the returned results. + * @see SimpleStatement#newInstance(String) */ @NonNull default ReactiveResultSet executeReactive(@NonNull String query) { return executeReactive(SimpleStatement.newInstance(query)); } + /** + * Returns a {@link Publisher} that, once subscribed to, executes the given query and emits all + * the results. + * + *

          This is an alias for {@link #executeReactive(Statement)} + * executeReactive(SimpleStatement.newInstance(query, values))}. + * + * @param query the query to execute. + * @param values the values for placeholders in the query string. Individual values can be {@code + * null}, but the vararg array itself can't. + * @return The {@link Publisher} that will publish the returned results. + * @see SimpleStatement#newInstance(String,Object...) + */ + @NonNull + default ReactiveResultSet executeReactive(@NonNull String query, @NonNull Object... values) { + return executeReactive(SimpleStatement.newInstance(query, values)); + } + + /** + * Returns a {@link Publisher} that, once subscribed to, executes the given query and emits all + * the results. + * + *

          This is an alias for {@link #executeReactive(Statement)} + * executeReactive(SimpleStatement.newInstance(query,values))}. + * + * @param query the query to execute. + * @param values the values for named placeholders in the query string. Individual values can be + * {@code null}, but the map itself can't. + * @return The {@link Publisher} that will publish the returned results. + * @see SimpleStatement#newInstance(String,Map) + */ + @NonNull + default ReactiveResultSet executeReactive( + @NonNull String query, @NonNull Map values) { + return executeReactive(SimpleStatement.newInstance(query, values)); + } + /** * Returns a {@link Publisher} that, once subscribed to, executes the given query and emits all * the results. diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/cql/AsyncCqlSession.java b/core/src/main/java/com/datastax/oss/driver/api/core/cql/AsyncCqlSession.java index dcd52f251cb..2f554361ea8 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/cql/AsyncCqlSession.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/cql/AsyncCqlSession.java @@ -18,6 +18,7 @@ import com.datastax.oss.driver.api.core.session.Session; import com.datastax.oss.driver.internal.core.cql.DefaultPrepareRequest; import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Map; import java.util.Objects; import java.util.concurrent.CompletionStage; @@ -45,14 +46,56 @@ default CompletionStage executeAsync(@NonNull Statement state * Executes a CQL statement asynchronously (the call returns as soon as the statement was sent, * generally before the result is available). * + *

          This is an alias for {@link #executeAsync(Statement)} + * executeAsync(SimpleStatement.newInstance(query))}. + * * @param query the CQL query to execute. * @return a {@code CompletionStage} that, once complete, will produce the async result set. + * @see SimpleStatement#newInstance(String) */ @NonNull default CompletionStage executeAsync(@NonNull String query) { return executeAsync(SimpleStatement.newInstance(query)); } + /** + * Executes a CQL statement asynchronously (the call returns as soon as the statement was sent, + * generally before the result is available). + * + *

          This is an alias for {@link #executeAsync(Statement)} + * executeAsync(SimpleStatement.newInstance(query, values))}. + * + * @param query the CQL query to execute. + * @param values the values for placeholders in the query string. Individual values can be {@code + * null}, but the vararg array itself can't. + * @return a {@code CompletionStage} that, once complete, will produce the async result set. + * @see SimpleStatement#newInstance(String, Object...) + */ + @NonNull + default CompletionStage executeAsync( + @NonNull String query, @NonNull Object... values) { + return executeAsync(SimpleStatement.newInstance(query, values)); + } + + /** + * Executes a CQL statement asynchronously (the call returns as soon as the statement was sent, + * generally before the result is available). + * + *

          This is an alias for {@link #executeAsync(Statement)} + * executeAsync(SimpleStatement.newInstance(query, values))}. + * + * @param query the CQL query to execute. + * @param values the values for named placeholders in the query string. Individual values can be + * {@code null}, but the map itself can't. + * @return a {@code CompletionStage} that, once complete, will produce the async result set. + * @see SimpleStatement#newInstance(String, Map) + */ + @NonNull + default CompletionStage executeAsync( + @NonNull String query, @NonNull Map values) { + return executeAsync(SimpleStatement.newInstance(query, values)); + } + /** * Prepares a CQL statement asynchronously (the call returns as soon as the prepare query was * sent, generally before the statement is prepared). diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/cql/Statement.java b/core/src/main/java/com/datastax/oss/driver/api/core/cql/Statement.java index 246260d5b35..52023d3a6b7 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/cql/Statement.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/cql/Statement.java @@ -232,6 +232,28 @@ default SelfT setRoutingKey(@NonNull ByteBuffer... newRoutingKeyComponents) { @CheckReturnValue SelfT setTracing(boolean newTracing); + /** + * @deprecated this method only exists to ease the transition from driver 3, it is an alias for + * {@link #setTracing(boolean) setTracing(true)}. + */ + @Deprecated + @NonNull + @CheckReturnValue + default SelfT enableTracing() { + return setTracing(true); + } + + /** + * @deprecated this method only exists to ease the transition from driver 3, it is an alias for + * {@link #setTracing(boolean) setTracing(false)}. + */ + @Deprecated + @NonNull + @CheckReturnValue + default SelfT disableTracing() { + return setTracing(false); + } + /** * Returns the query timestamp, in microseconds, to send with the statement. * @@ -243,6 +265,15 @@ default SelfT setRoutingKey(@NonNull ByteBuffer... newRoutingKeyComponents) { */ long getQueryTimestamp(); + /** + * @deprecated this method only exists to ease the transition from driver 3, it is an alias for + * {@link #getQueryTimestamp()}. + */ + @Deprecated + default long getDefaultTimestamp() { + return getQueryTimestamp(); + } + /** * Sets the query timestamp, in microseconds, to send with the statement. * @@ -259,6 +290,17 @@ default SelfT setRoutingKey(@NonNull ByteBuffer... newRoutingKeyComponents) { @CheckReturnValue SelfT setQueryTimestamp(long newTimestamp); + /** + * @deprecated this method only exists to ease the transition from driver 3, it is an alias for + * {@link #setQueryTimestamp(long)}. + */ + @Deprecated + @NonNull + @CheckReturnValue + default SelfT setDefaultTimestamp(long newTimestamp) { + return setQueryTimestamp(newTimestamp); + } + /** * Sets how long to wait for this request to complete. This is a global limit on the duration of a * session.execute() call, including any retries the driver might do. @@ -355,6 +397,15 @@ default SelfT setPagingState(@Nullable PagingState newPagingState, @Nullable Ses */ int getPageSize(); + /** + * @deprecated this method only exists to ease the transition from driver 3, it is an alias for + * {@link #getPageSize()}. + */ + @Deprecated + default int getFetchSize() { + return getPageSize(); + } + /** * Configures how many rows will be retrieved simultaneously in a single network roundtrip (the * goal being to avoid loading too many results in memory at the same time). @@ -367,6 +418,17 @@ default SelfT setPagingState(@Nullable PagingState newPagingState, @Nullable Ses @CheckReturnValue SelfT setPageSize(int newPageSize); + /** + * @deprecated this method only exists to ease the transition from driver 3, it is an alias for + * {@link #setPageSize(int)}. + */ + @Deprecated + @NonNull + @CheckReturnValue + default SelfT setFetchSize(int newPageSize) { + return setPageSize(newPageSize); + } + /** * Returns the {@link ConsistencyLevel} to use for the statement. * diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/cql/StatementBuilder.java b/core/src/main/java/com/datastax/oss/driver/api/core/cql/StatementBuilder.java index 98d555b4f04..e93bb8e5c96 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/cql/StatementBuilder.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/cql/StatementBuilder.java @@ -186,6 +186,26 @@ public SelfT setTracing(boolean tracing) { return self; } + /** + * @deprecated this method only exists to ease the transition from driver 3, it is an alias for + * {@link #setTracing(boolean) setTracing(true)}. + */ + @Deprecated + @NonNull + public SelfT enableTracing() { + return setTracing(true); + } + + /** + * @deprecated this method only exists to ease the transition from driver 3, it is an alias for + * {@link #setTracing(boolean) setTracing(false)}. + */ + @Deprecated + @NonNull + public SelfT disableTracing() { + return setTracing(false); + } + /** @see Statement#setQueryTimestamp(long) */ @NonNull public SelfT setQueryTimestamp(long timestamp) { @@ -193,6 +213,16 @@ public SelfT setQueryTimestamp(long timestamp) { return self; } + /** + * @deprecated this method only exists to ease the transition from driver 3, it is an alias for + * {@link #setQueryTimestamp(long)}. + */ + @Deprecated + @NonNull + public SelfT setDefaultTimestamp(long timestamp) { + return setQueryTimestamp(timestamp); + } + /** @see Statement#setPagingState(ByteBuffer) */ @NonNull public SelfT setPagingState(@Nullable ByteBuffer pagingState) { @@ -207,6 +237,16 @@ public SelfT setPageSize(int pageSize) { return self; } + /** + * @deprecated this method only exists to ease the transition from driver 3, it is an alias for + * {@link #setPageSize(int)}. + */ + @Deprecated + @NonNull + public SelfT setFetchSize(int pageSize) { + return this.setPageSize(pageSize); + } + /** @see Statement#setConsistencyLevel(ConsistencyLevel) */ @NonNull public SelfT setConsistencyLevel(@Nullable ConsistencyLevel consistencyLevel) { diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/cql/SyncCqlSession.java b/core/src/main/java/com/datastax/oss/driver/api/core/cql/SyncCqlSession.java index 1aaf298d6bc..d691bf41b37 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/cql/SyncCqlSession.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/cql/SyncCqlSession.java @@ -23,6 +23,7 @@ import com.datastax.oss.driver.api.core.session.Session; import com.datastax.oss.driver.internal.core.cql.DefaultPrepareRequest; import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Map; import java.util.Objects; /** @@ -57,6 +58,9 @@ default ResultSet execute(@NonNull Statement statement) { * Executes a CQL statement synchronously (the calling thread blocks until the result becomes * available). * + *

          This is an alias for {@link #execute(Statement) + * execute(SimpleStatement.newInstance(query))}. + * * @param query the CQL query to execute. * @return the result of the query. That result will never be null but can be empty (and will be * for any non SELECT query). @@ -67,12 +71,65 @@ default ResultSet execute(@NonNull Statement statement) { * consistency level successfully. * @throws QueryValidationException if the query if invalid (syntax error, unauthorized or any * other validation problem). + * @see SimpleStatement#newInstance(String) */ @NonNull default ResultSet execute(@NonNull String query) { return execute(SimpleStatement.newInstance(query)); } + /** + * Executes a CQL statement synchronously (the calling thread blocks until the result becomes + * available). + * + *

          This is an alias for {@link #execute(Statement) execute(SimpleStatement.newInstance(query, + * values))}. + * + * @param query the CQL query to execute. + * @param values the values for placeholders in the query string. Individual values can be {@code + * null}, but the vararg array itself can't. + * @return the result of the query. That result will never be null but can be empty (and will be + * for any non SELECT query). + * @throws AllNodesFailedException if no host in the cluster can be contacted successfully to + * execute this query. + * @throws QueryExecutionException if the query triggered an execution exception, i.e. an + * exception thrown by Cassandra when it cannot execute the query with the requested + * consistency level successfully. + * @throws QueryValidationException if the query if invalid (syntax error, unauthorized or any + * other validation problem). + * @see SimpleStatement#newInstance(String, Object...) + */ + @NonNull + default ResultSet execute(@NonNull String query, @NonNull Object... values) { + return execute(SimpleStatement.newInstance(query, values)); + } + + /** + * Executes a CQL statement synchronously (the calling thread blocks until the result becomes + * available). + * + *

          This is an alias for {@link #execute(Statement) execute(SimpleStatement.newInstance(query, + * values))}. + * + * @param query the CQL query to execute. + * @param values the values for named placeholders in the query string. Individual values can be + * {@code null}, but the map itself can't. + * @return the result of the query. That result will never be null but can be empty (and will be + * for any non SELECT query). + * @throws AllNodesFailedException if no host in the cluster can be contacted successfully to + * execute this query. + * @throws QueryExecutionException if the query triggered an execution exception, i.e. an + * exception thrown by Cassandra when it cannot execute the query with the requested + * consistency level successfully. + * @throws QueryValidationException if the query if invalid (syntax error, unauthorized or any + * other validation problem). + * @see SimpleStatement#newInstance(String, Map) + */ + @NonNull + default ResultSet execute(@NonNull String query, @NonNull Map values) { + return execute(SimpleStatement.newInstance(query, values)); + } + /** * Prepares a CQL statement synchronously (the calling thread blocks until the statement is * prepared). diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableById.java b/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableById.java index bf0ccfe1f2b..a6c46e4abe8 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableById.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableById.java @@ -196,6 +196,15 @@ default boolean getBoolean(@NonNull CqlIdentifier id) { return getBoolean(firstIndexOf(id)); } + /** + * @deprecated this method only exists to ease the transition from driver 3, it is an alias for + * {@link #getBoolean(CqlIdentifier)}. + */ + @Deprecated + default boolean getBool(@NonNull CqlIdentifier id) { + return getBoolean(id); + } + /** * Returns the value for the first occurrence of {@code id} as a Java primitive byte. * diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableByIndex.java b/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableByIndex.java index 177fd654507..9e3502732c9 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableByIndex.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableByIndex.java @@ -175,6 +175,15 @@ default boolean getBoolean(int i) { } } + /** + * @deprecated this method only exists to ease the transition from driver 3, it is an alias for + * {@link #getBoolean(int)}. + */ + @Deprecated + default boolean getBool(int i) { + return getBoolean(i); + } + /** * Returns the {@code i}th value as a Java primitive byte. * diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableByName.java b/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableByName.java index c1aca1576c6..abbb16aeb75 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableByName.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableByName.java @@ -196,6 +196,15 @@ default boolean getBoolean(@NonNull String name) { return getBoolean(firstIndexOf(name)); } + /** + * @deprecated this method only exists to ease the transition from driver 3, it is an alias for + * {@link #getBoolean(String)}. + */ + @Deprecated + default boolean getBool(@NonNull String name) { + return getBoolean(name); + } + /** * Returns the value for the first occurrence of {@code name} as a Java primitive byte. * diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableById.java b/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableById.java index 29927a4a42f..0d3cba5601d 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableById.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableById.java @@ -194,6 +194,17 @@ default SelfT setBoolean(@NonNull CqlIdentifier id, boolean v) { return result; } + /** + * @deprecated this method only exists to ease the transition from driver 3, it is an alias for + * {@link #setBoolean(CqlIdentifier, boolean)}. + */ + @Deprecated + @NonNull + @CheckReturnValue + default SelfT setBool(@NonNull CqlIdentifier id, boolean v) { + return setBoolean(id, v); + } + /** * Sets the value for all occurrences of {@code id} to the provided Java primitive byte. * diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableByIndex.java b/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableByIndex.java index 2ff700cc3fa..7cd0b5671ff 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableByIndex.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableByIndex.java @@ -153,6 +153,17 @@ default SelfT setBoolean(int i, boolean v) { : set(i, v, codec); } + /** + * @deprecated this method only exists to ease the transition from driver 3, it is an alias for + * {@link #setBoolean(int, boolean)}. + */ + @Deprecated + @NonNull + @CheckReturnValue + default SelfT setBool(int i, boolean v) { + return setBoolean(i, v); + } + /** * Sets the {@code i}th value to the provided Java primitive byte. * diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableByName.java b/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableByName.java index 0d4fed0688d..d93f4ebf5b2 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableByName.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableByName.java @@ -193,6 +193,17 @@ default SelfT setBoolean(@NonNull String name, boolean v) { return result; } + /** + * @deprecated this method only exists to ease the transition from driver 3, it is an alias + * for{@link #setBoolean(String, boolean)}. + */ + @Deprecated + @NonNull + @CheckReturnValue + default SelfT setBool(@NonNull String name, boolean v) { + return setBoolean(name, v); + } + /** * Sets the value for all occurrences of {@code name} to the provided Java primitive byte. * diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java b/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java index 98aa8aa884d..10fd7bca988 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java @@ -310,6 +310,27 @@ public SelfT withAuthCredentials( new ProgrammaticPlainTextAuthProvider(username, password, authorizationId)); } + /** + * @deprecated this method only exists to ease the transition from driver 3, it is an alias for + * {@link #withAuthCredentials(String, String)}. + */ + @Deprecated + @NonNull + public SelfT withCredentials(@NonNull String username, @NonNull String password) { + return withAuthCredentials(username, password); + } + + /** + * @deprecated this method only exists to ease the transition from driver 3, it is an alias for + * {@link #withAuthCredentials(String, String,String)}. + */ + @Deprecated + @NonNull + public SelfT withCredentials( + @NonNull String username, @NonNull String password, @NonNull String authorizationId) { + return withAuthCredentials(username, password, authorizationId); + } + /** * Registers an SSL engine factory for the session. * From 534dbc1e94018265ec210e3756b26243f3f47354 Mon Sep 17 00:00:00 2001 From: olim7t Date: Tue, 28 Jul 2020 10:39:24 -0700 Subject: [PATCH 545/979] Generate empty javadoc JARs for metric modules --- metrics/micrometer/pom.xml | 22 ++++++++++++++++++++++ metrics/microprofile/pom.xml | 22 ++++++++++++++++++++++ 2 files changed, 44 insertions(+) diff --git a/metrics/micrometer/pom.xml b/metrics/micrometer/pom.xml index 6ad67a69429..8b4c4ff3596 100644 --- a/metrics/micrometer/pom.xml +++ b/metrics/micrometer/pom.xml @@ -87,6 +87,28 @@ + + maven-jar-plugin + + + + javadoc-jar + package + + jar + + + javadoc + + ** + + + + + org.revapi revapi-maven-plugin diff --git a/metrics/microprofile/pom.xml b/metrics/microprofile/pom.xml index b926fbdc34e..022adbd178f 100644 --- a/metrics/microprofile/pom.xml +++ b/metrics/microprofile/pom.xml @@ -87,6 +87,28 @@ + + maven-jar-plugin + + + + javadoc-jar + package + + jar + + + javadoc + + ** + + + + + org.revapi revapi-maven-plugin From 255bdf7fbcff3fb834239c75950647364e22313f Mon Sep 17 00:00:00 2001 From: olim7t Date: Tue, 28 Jul 2020 10:45:00 -0700 Subject: [PATCH 546/979] Update version in docs --- README.md | 4 +-- changelog/README.md | 2 +- manual/case_sensitivity/README.md | 10 +++--- manual/core/README.md | 26 +++++++------- manual/core/address_resolution/README.md | 2 +- manual/core/async/README.md | 2 +- manual/core/authentication/README.md | 16 ++++----- manual/core/bom/README.md | 4 +-- manual/core/configuration/README.md | 20 +++++------ manual/core/control_connection/README.md | 2 +- manual/core/detachable_types/README.md | 14 ++++---- manual/core/dse/geotypes/README.md | 6 ++-- manual/core/dse/graph/README.md | 4 +-- manual/core/dse/graph/fluent/README.md | 4 +-- .../core/dse/graph/fluent/explicit/README.md | 12 +++---- manual/core/dse/graph/results/README.md | 6 ++-- manual/core/dse/graph/script/README.md | 6 ++-- manual/core/idempotence/README.md | 4 +-- manual/core/integration/README.md | 6 ++-- manual/core/load_balancing/README.md | 10 +++--- manual/core/metadata/README.md | 6 ++-- manual/core/metadata/node/README.md | 28 +++++++-------- manual/core/metadata/schema/README.md | 20 +++++------ manual/core/metadata/token/README.md | 4 +-- manual/core/native_protocol/README.md | 6 ++-- manual/core/paging/README.md | 12 +++---- manual/core/performance/README.md | 10 +++--- manual/core/pooling/README.md | 2 +- manual/core/query_timestamps/README.md | 4 +-- manual/core/reactive/README.md | 24 ++++++------- manual/core/reconnection/README.md | 8 ++--- manual/core/request_tracker/README.md | 4 +-- manual/core/retries/README.md | 34 +++++++++---------- manual/core/speculative_execution/README.md | 2 +- manual/core/ssl/README.md | 6 ++-- manual/core/statements/README.md | 8 ++--- manual/core/statements/batch/README.md | 6 ++-- .../statements/per_query_keyspace/README.md | 2 +- manual/core/statements/prepared/README.md | 8 ++--- manual/core/statements/simple/README.md | 6 ++-- manual/core/temporal_types/README.md | 8 ++--- manual/core/throttling/README.md | 6 ++-- manual/core/tracing/README.md | 12 +++---- manual/core/tuples/README.md | 4 +-- manual/core/udts/README.md | 4 +-- manual/mapper/daos/README.md | 8 ++--- manual/mapper/daos/custom_types/README.md | 10 +++--- manual/mapper/daos/delete/README.md | 18 +++++----- manual/mapper/daos/getentity/README.md | 16 ++++----- manual/mapper/daos/insert/README.md | 14 ++++---- manual/mapper/daos/null_saving/README.md | 10 +++--- manual/mapper/daos/query/README.md | 22 ++++++------ manual/mapper/daos/queryprovider/README.md | 16 ++++----- manual/mapper/daos/select/README.md | 26 +++++++------- manual/mapper/daos/setentity/README.md | 10 +++--- .../daos/statement_attributes/README.md | 2 +- manual/mapper/daos/update/README.md | 12 +++---- manual/mapper/entities/README.md | 34 +++++++++---------- manual/mapper/mapper/README.md | 10 +++--- manual/osgi/README.md | 6 ++-- manual/query_builder/README.md | 10 +++--- manual/query_builder/condition/README.md | 2 +- manual/query_builder/delete/README.md | 4 +-- manual/query_builder/insert/README.md | 2 +- manual/query_builder/relation/README.md | 4 +-- manual/query_builder/schema/README.md | 2 +- .../query_builder/schema/aggregate/README.md | 2 +- .../query_builder/schema/function/README.md | 2 +- manual/query_builder/schema/index/README.md | 2 +- .../query_builder/schema/keyspace/README.md | 2 +- .../schema/materialized_view/README.md | 4 +-- manual/query_builder/schema/table/README.md | 6 ++-- manual/query_builder/schema/type/README.md | 2 +- manual/query_builder/select/README.md | 4 +-- manual/query_builder/term/README.md | 4 +-- manual/query_builder/truncate/README.md | 2 +- manual/query_builder/update/README.md | 4 +-- upgrade_guide/README.md | 2 +- 78 files changed, 334 insertions(+), 334 deletions(-) diff --git a/README.md b/README.md index 06d2e68b0eb..1800e5ed0d4 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@ *If you're reading this on github.com, please note that this is the readme for the development version and that some features described here might not yet have been released. You can find the documentation for latest version through [DataStax Docs] or via the release tags, e.g. -[4.7.0](https://github.com/datastax/java-driver/tree/4.7.0).* +[4.8.0](https://github.com/datastax/java-driver/tree/4.8.0).* A modern, feature-rich and highly tunable Java client library for [Apache Cassandra®] \(2.1+) and [DataStax Enterprise] \(4.7+), and [DataStax Astra], using exclusively Cassandra's binary protocol @@ -77,7 +77,7 @@ See the [upgrade guide](upgrade_guide/) for details. * [Changelog] * [FAQ] -[API docs]: https://docs.datastax.com/en/drivers/java/4.7 +[API docs]: https://docs.datastax.com/en/drivers/java/4.8 [JIRA]: https://datastax-oss.atlassian.net/browse/JAVA [Mailing list]: https://groups.google.com/a/lists.datastax.com/forum/#!forum/java-driver-user [@dsJavaDriver]: https://twitter.com/dsJavaDriver diff --git a/changelog/README.md b/changelog/README.md index 2e5c699eed0..aaa937b1386 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -2,7 +2,7 @@ -### 4.8.0 (in progress) +### 4.8.0 - [improvement] JAVA-2811: Add aliases for driver 3 method names - [new feature] JAVA-2808: Provide metrics bindings for Micrometer and MicroProfile diff --git a/manual/case_sensitivity/README.md b/manual/case_sensitivity/README.md index 5c598b079f2..421e8381dc0 100644 --- a/manual/case_sensitivity/README.md +++ b/manual/case_sensitivity/README.md @@ -106,11 +106,11 @@ For "consuming" methods, string overloads are also provided for convenience, for * in other cases, the string is always assumed to be in CQL form, and converted on the fly with `CqlIdentifier.fromCql`. -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/CqlIdentifier.html -[Row]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/Row.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/data/UdtValue.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/data/AccessibleByName.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/CqlIdentifier.html +[Row]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/Row.html +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/data/UdtValue.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/data/AccessibleByName.html ### Good practices diff --git a/manual/core/README.md b/manual/core/README.md index dd873833314..8c2ca80d419 100644 --- a/manual/core/README.md +++ b/manual/core/README.md @@ -310,18 +310,18 @@ for (ColumnDefinitions.Definition definition : row.getColumnDefinitions()) { } ``` -[CqlSession]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/CqlSession.html -[CqlSession#builder()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/CqlSession.html#builder-- -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/ResultSet.html -[Row]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/Row.html -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/CqlIdentifier.html -[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/data/AccessibleByName.html -[GenericType]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/type/reflect/GenericType.html -[CqlDuration]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/data/CqlDuration.html -[TupleValue]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/data/TupleValue.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/data/UdtValue.html -[SessionBuilder.addContactPoint()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoint-java.net.InetSocketAddress- -[SessionBuilder.addContactPoints()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoints-java.util.Collection- -[SessionBuilder.withLocalDatacenter()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withLocalDatacenter-java.lang.String- +[CqlSession]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/CqlSession.html +[CqlSession#builder()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/CqlSession.html#builder-- +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/ResultSet.html +[Row]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/Row.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/CqlIdentifier.html +[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/data/AccessibleByName.html +[GenericType]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/reflect/GenericType.html +[CqlDuration]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/data/CqlDuration.html +[TupleValue]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/data/TupleValue.html +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/data/UdtValue.html +[SessionBuilder.addContactPoint()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoint-java.net.InetSocketAddress- +[SessionBuilder.addContactPoints()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoints-java.util.Collection- +[SessionBuilder.withLocalDatacenter()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withLocalDatacenter-java.lang.String- [CASSANDRA-10145]: https://issues.apache.org/jira/browse/CASSANDRA-10145 \ No newline at end of file diff --git a/manual/core/address_resolution/README.md b/manual/core/address_resolution/README.md index 91fda354405..c973d9d7c77 100644 --- a/manual/core/address_resolution/README.md +++ b/manual/core/address_resolution/README.md @@ -124,7 +124,7 @@ Cassandra node: domain name of the target instance. Then it performs a forward DNS lookup of the domain name; the EC2 DNS does the private/public switch automatically based on location). -[AddressTranslator]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/addresstranslation/AddressTranslator.html +[AddressTranslator]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/addresstranslation/AddressTranslator.html [cassandra.yaml]: https://docs.datastax.com/en/cassandra/3.x/cassandra/configuration/configCassandra_yaml.html [rpc_address]: https://docs.datastax.com/en/cassandra/3.x/cassandra/configuration/configCassandra_yaml.html?scroll=configCassandra_yaml__rpc_address diff --git a/manual/core/async/README.md b/manual/core/async/README.md index c5ea6e1b159..44bb113f559 100644 --- a/manual/core/async/README.md +++ b/manual/core/async/README.md @@ -203,4 +203,4 @@ documentation for more details and an example. [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html \ No newline at end of file +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html \ No newline at end of file diff --git a/manual/core/authentication/README.md b/manual/core/authentication/README.md index 998e5cf7148..6e63371538f 100644 --- a/manual/core/authentication/README.md +++ b/manual/core/authentication/README.md @@ -215,12 +215,12 @@ session.execute(statement); [SASL]: https://en.wikipedia.org/wiki/Simple_Authentication_and_Security_Layer -[AuthProvider]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/auth/AuthProvider.html -[DriverContext]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/context/DriverContext.html -[PlainTextAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderBase.html -[DseGssApiAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderBase.html -[ProgrammaticDseGssApiAuthProvider]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/auth/ProgrammaticDseGssApiAuthProvider.html -[ProxyAuthentication.executeAs]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/auth/ProxyAuthentication.html#executeAs-java.lang.String-StatementT- -[SessionBuilder.withAuthCredentials]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthCredentials-java.lang.String-java.lang.String- -[SessionBuilder.withAuthProvider]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthProvider-com.datastax.oss.driver.api.core.auth.AuthProvider- +[AuthProvider]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/auth/AuthProvider.html +[DriverContext]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/context/DriverContext.html +[PlainTextAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderBase.html +[DseGssApiAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderBase.html +[ProgrammaticDseGssApiAuthProvider]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/auth/ProgrammaticDseGssApiAuthProvider.html +[ProxyAuthentication.executeAs]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/auth/ProxyAuthentication.html#executeAs-java.lang.String-StatementT- +[SessionBuilder.withAuthCredentials]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthCredentials-java.lang.String-java.lang.String- +[SessionBuilder.withAuthProvider]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthProvider-com.datastax.oss.driver.api.core.auth.AuthProvider- [reference.conf]: ../configuration/reference/ \ No newline at end of file diff --git a/manual/core/bom/README.md b/manual/core/bom/README.md index e80270c2730..5ab0faf2722 100644 --- a/manual/core/bom/README.md +++ b/manual/core/bom/README.md @@ -13,7 +13,7 @@ To import the driver's BOM, add the following section in your application's own com.datastax.oss java-driver-bom - 4.7.0 + 4.8.0 pom import @@ -65,7 +65,7 @@ good idea to extract a property to keep it in sync with the BOM: ```xml - 4.7.0 + 4.8.0 diff --git a/manual/core/configuration/README.md b/manual/core/configuration/README.md index 1e3ebdc9ee9..8378a395fe0 100644 --- a/manual/core/configuration/README.md +++ b/manual/core/configuration/README.md @@ -501,16 +501,16 @@ config.getDefaultProfile().getString(MyCustomOption.ADMIN_EMAIL); config.getDefaultProfile().getInt(MyCustomOption.AWESOMENESS_FACTOR); ``` -[DriverConfig]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/config/DriverConfig.html -[DriverExecutionProfile]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/config/DriverExecutionProfile.html -[DriverContext]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/context/DriverContext.html -[DriverOption]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/config/DriverOption.html -[DefaultDriverOption]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/config/DefaultDriverOption.html -[DriverConfigLoader]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html -[DriverConfigLoader.fromClasspath]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromClasspath-java.lang.String- -[DriverConfigLoader.fromFile]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromFile-java.io.File- -[DriverConfigLoader.fromUrl]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromUrl-java.net.URL- -[DriverConfigLoader.programmaticBuilder]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#programmaticBuilder-- +[DriverConfig]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/config/DriverConfig.html +[DriverExecutionProfile]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/config/DriverExecutionProfile.html +[DriverContext]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/context/DriverContext.html +[DriverOption]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/config/DriverOption.html +[DefaultDriverOption]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/config/DefaultDriverOption.html +[DriverConfigLoader]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html +[DriverConfigLoader.fromClasspath]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromClasspath-java.lang.String- +[DriverConfigLoader.fromFile]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromFile-java.io.File- +[DriverConfigLoader.fromUrl]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromUrl-java.net.URL- +[DriverConfigLoader.programmaticBuilder]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#programmaticBuilder-- [Typesafe Config]: https://github.com/typesafehub/config [config standard behavior]: https://github.com/typesafehub/config#standard-behavior diff --git a/manual/core/control_connection/README.md b/manual/core/control_connection/README.md index 9d2b5c46a78..ca40d1ec5ca 100644 --- a/manual/core/control_connection/README.md +++ b/manual/core/control_connection/README.md @@ -23,4 +23,4 @@ There are a few options to fine tune the control connection behavior in the `advanced.control-connection` and `advanced.metadata` sections; see the [metadata](../metadata/) pages and the [reference configuration](../configuration/reference/) for all the details. -[Node.getOpenConnections]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- \ No newline at end of file +[Node.getOpenConnections]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- \ No newline at end of file diff --git a/manual/core/detachable_types/README.md b/manual/core/detachable_types/README.md index 41c51a7a2f4..2c3bb302927 100644 --- a/manual/core/detachable_types/README.md +++ b/manual/core/detachable_types/README.md @@ -137,13 +137,13 @@ Even then, the defaults used by detached objects might be good enough for you: Otherwise, just make sure you reattach objects any time you deserialize them or create them from scratch. -[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html -[CodecRegistry#DEFAULT]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html#DEFAULT -[DataType]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/type/DataType.html -[Detachable]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/detach/Detachable.html -[Session]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/Session.html -[ColumnDefinition]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/ColumnDefinition.html -[Row]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/Row.html +[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html +[CodecRegistry#DEFAULT]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html#DEFAULT +[DataType]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/DataType.html +[Detachable]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/detach/Detachable.html +[Session]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/Session.html +[ColumnDefinition]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/ColumnDefinition.html +[Row]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/Row.html [Java serialization]: https://docs.oracle.com/javase/tutorial/jndi/objects/serial.html [protocol specifications]: https://github.com/datastax/native-protocol/tree/1.x/src/main/resources diff --git a/manual/core/dse/geotypes/README.md b/manual/core/dse/geotypes/README.md index 29f8dc1a05d..8dcfbc02c0c 100644 --- a/manual/core/dse/geotypes/README.md +++ b/manual/core/dse/geotypes/README.md @@ -166,9 +166,9 @@ All geospatial types interoperate with three standard formats: [ESRI]: https://github.com/Esri/geometry-api-java -[LineString]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/data/geometry/LineString.html -[Point]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/data/geometry/Point.html -[Polygon]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/data/geometry/Polygon.html +[LineString]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/data/geometry/LineString.html +[Point]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/data/geometry/Point.html +[Polygon]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/data/geometry/Polygon.html [Well-known text]: https://en.wikipedia.org/wiki/Well-known_text [Well-known binary]: https://en.wikipedia.org/wiki/Well-known_text#Well-known_binary diff --git a/manual/core/dse/graph/README.md b/manual/core/dse/graph/README.md index 25605927e22..46a71335ef2 100644 --- a/manual/core/dse/graph/README.md +++ b/manual/core/dse/graph/README.md @@ -74,8 +74,8 @@ fluent API returns Apache TinkerPop™ types directly. [Apache TinkerPop™]: http://tinkerpop.apache.org/ -[CqlSession]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/CqlSession.html -[GraphSession]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/graph/GraphSession.html +[CqlSession]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/CqlSession.html +[GraphSession]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/graph/GraphSession.html [DSE developer guide]: https://docs.datastax.com/en/dse/6.0/dse-dev/datastax_enterprise/graph/graphTOC.html [Gremlin]: https://docs.datastax.com/en/dse/6.0/dse-dev/datastax_enterprise/graph/dseGraphAbout.html#dseGraphAbout__what-is-cql diff --git a/manual/core/dse/graph/fluent/README.md b/manual/core/dse/graph/fluent/README.md index 015c80f20f7..7e4e53ef6f3 100644 --- a/manual/core/dse/graph/fluent/README.md +++ b/manual/core/dse/graph/fluent/README.md @@ -109,8 +109,8 @@ All the DSE predicates are available on the driver side: .values("name"); ``` -[Search]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/graph/predicates/Search.html -[Geo]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/graph/predicates/Geo.html +[Search]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/graph/predicates/Search.html +[Geo]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/graph/predicates/Geo.html [Apache TinkerPop™]: http://tinkerpop.apache.org/ [TinkerPop DSL]: http://tinkerpop.apache.org/docs/current/reference/#dsl diff --git a/manual/core/dse/graph/fluent/explicit/README.md b/manual/core/dse/graph/fluent/explicit/README.md index 1a34cabdbac..2e866b0ca41 100644 --- a/manual/core/dse/graph/fluent/explicit/README.md +++ b/manual/core/dse/graph/fluent/explicit/README.md @@ -105,9 +105,9 @@ added in a future version. See also the [parent page](../) for topics common to all fluent traversals. -[FluentGraphStatement]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html -[FluentGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html#newInstance-org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal- -[FluentGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html#builder-org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal- -[BatchGraphStatement]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html -[BatchGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html#newInstance-- -[BatchGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html#builder-- +[FluentGraphStatement]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html +[FluentGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html#newInstance-org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal- +[FluentGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html#builder-org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal- +[BatchGraphStatement]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html +[BatchGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html#newInstance-- +[BatchGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html#builder-- diff --git a/manual/core/dse/graph/results/README.md b/manual/core/dse/graph/results/README.md index 65ba69c2208..9492152b28c 100644 --- a/manual/core/dse/graph/results/README.md +++ b/manual/core/dse/graph/results/README.md @@ -137,8 +137,8 @@ If a type doesn't have a corresponding `asXxx()` method, use the variant that ta UUID uuid = graphNode.as(UUID.class); ``` -[GraphNode]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/graph/GraphNode.html -[GraphResultSet]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/graph/GraphResultSet.html -[AsyncGraphResultSet]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/graph/AsyncGraphResultSet.html +[GraphNode]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/graph/GraphNode.html +[GraphResultSet]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/graph/GraphResultSet.html +[AsyncGraphResultSet]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/graph/AsyncGraphResultSet.html [DSE data types]: https://docs.datastax.com/en/dse/6.0/dse-dev/datastax_enterprise/graph/reference/refDSEGraphDataTypes.html \ No newline at end of file diff --git a/manual/core/dse/graph/script/README.md b/manual/core/dse/graph/script/README.md index d15ba657f1f..0240461484f 100644 --- a/manual/core/dse/graph/script/README.md +++ b/manual/core/dse/graph/script/README.md @@ -101,6 +101,6 @@ Note however that some types of queries can only be performed through the script * configuration; * DSE graph schema queries. -[ScriptGraphStatement]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html -[ScriptGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html#newInstance-java.lang.String- -[ScriptGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html#builder-java.lang.String- \ No newline at end of file +[ScriptGraphStatement]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html +[ScriptGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html#newInstance-java.lang.String- +[ScriptGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html#builder-java.lang.String- \ No newline at end of file diff --git a/manual/core/idempotence/README.md b/manual/core/idempotence/README.md index 7d52c1f9ec1..bb064e93744 100644 --- a/manual/core/idempotence/README.md +++ b/manual/core/idempotence/README.md @@ -60,5 +60,5 @@ assert bs.isIdempotent(); The query builder tries to infer idempotence automatically; refer to [its manual](../../query_builder/idempotence/) for more details. -[Statement.setIdempotent]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/Statement.html#setIdempotent-java.lang.Boolean- -[StatementBuilder.setIdempotence]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/StatementBuilder.html#setIdempotence-java.lang.Boolean- +[Statement.setIdempotent]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/Statement.html#setIdempotent-java.lang.Boolean- +[StatementBuilder.setIdempotence]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/StatementBuilder.html#setIdempotence-java.lang.Boolean- diff --git a/manual/core/integration/README.md b/manual/core/integration/README.md index 1f5cfb6df8f..001128fe55b 100644 --- a/manual/core/integration/README.md +++ b/manual/core/integration/README.md @@ -609,6 +609,6 @@ The remaining core driver dependencies are the only ones that are truly mandator [guava]: https://github.com/google/guava/issues/2721 [annotation processing]: https://docs.oracle.com/javase/8/docs/technotes/tools/windows/javac.html#sthref65 -[Session.getMetrics]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/Session.html#getMetrics-- -[SessionBuilder.addContactPoint]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoint-java.net.InetSocketAddress- -[Uuids]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/uuid/Uuids.html +[Session.getMetrics]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/Session.html#getMetrics-- +[SessionBuilder.addContactPoint]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoint-java.net.InetSocketAddress- +[Uuids]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/uuid/Uuids.html diff --git a/manual/core/load_balancing/README.md b/manual/core/load_balancing/README.md index 948264d06c1..3cbb0daee41 100644 --- a/manual/core/load_balancing/README.md +++ b/manual/core/load_balancing/README.md @@ -323,10 +323,10 @@ Then it uses the "closest" distance for any given node. For example: * policy1 changes its suggestion to IGNORED. node1 is set to REMOTE; * policy1 changes its suggestion to REMOTE. node1 stays at REMOTE. -[DriverContext]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/context/DriverContext.html -[LoadBalancingPolicy]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/loadbalancing/LoadBalancingPolicy.html -[getRoutingKeyspace()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKeyspace-- -[getRoutingToken()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/Request.html#getRoutingToken-- -[getRoutingKey()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- +[DriverContext]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/context/DriverContext.html +[LoadBalancingPolicy]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/loadbalancing/LoadBalancingPolicy.html +[getRoutingKeyspace()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKeyspace-- +[getRoutingToken()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/Request.html#getRoutingToken-- +[getRoutingKey()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- [`nodetool status`]: https://docs.datastax.com/en/dse/6.7/dse-dev/datastax_enterprise/tools/nodetool/toolsStatus.html [cqlsh]: https://docs.datastax.com/en/dse/6.7/cql/cql/cql_using/startCqlshStandalone.html diff --git a/manual/core/metadata/README.md b/manual/core/metadata/README.md index 4c1a6acbf1a..94d0980bdde 100644 --- a/manual/core/metadata/README.md +++ b/manual/core/metadata/README.md @@ -56,6 +56,6 @@ new keyspace in the schema metadata before the token metadata was updated. Schema and node state events are debounced. This allows you to control how often the metadata gets refreshed. See the [Performance](../performance/#debouncing) page for more details. -[Session#getMetadata]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/Session.html#getMetadata-- -[Metadata]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/metadata/Metadata.html -[Node]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/metadata/Node.html \ No newline at end of file +[Session#getMetadata]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/Session.html#getMetadata-- +[Metadata]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/metadata/Metadata.html +[Node]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/metadata/Node.html \ No newline at end of file diff --git a/manual/core/metadata/node/README.md b/manual/core/metadata/node/README.md index f70a2bf393b..b0099f09f71 100644 --- a/manual/core/metadata/node/README.md +++ b/manual/core/metadata/node/README.md @@ -112,17 +112,17 @@ beyond the scope of this document; if you're interested, study the `TopologyMoni the source code. -[Metadata#getNodes]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/metadata/Metadata.html#getNodes-- -[Node]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/metadata/Node.html -[Node#getState()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/metadata/Node.html#getState-- -[Node#getDatacenter()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/metadata/Node.html#getDatacenter-- -[Node#getRack()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/metadata/Node.html#getRack-- -[Node#getDistance()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/metadata/Node.html#getDistance-- -[Node#getExtras()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/metadata/Node.html#getExtras-- -[Node#getOpenConnections()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- -[Node#isReconnecting()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/metadata/Node.html#isReconnecting-- -[NodeState]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/metadata/NodeState.html -[NodeStateListener]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/metadata/NodeStateListener.html -[NodeStateListenerBase]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/metadata/NodeStateListenerBase.html -[SessionBuilder.withNodeStateListener]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withNodeStateListener-com.datastax.oss.driver.api.core.metadata.NodeStateListener- -[DseNodeProperties]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/metadata/DseNodeProperties.html \ No newline at end of file +[Metadata#getNodes]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/metadata/Metadata.html#getNodes-- +[Node]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/metadata/Node.html +[Node#getState()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/metadata/Node.html#getState-- +[Node#getDatacenter()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/metadata/Node.html#getDatacenter-- +[Node#getRack()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/metadata/Node.html#getRack-- +[Node#getDistance()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/metadata/Node.html#getDistance-- +[Node#getExtras()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/metadata/Node.html#getExtras-- +[Node#getOpenConnections()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- +[Node#isReconnecting()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/metadata/Node.html#isReconnecting-- +[NodeState]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/metadata/NodeState.html +[NodeStateListener]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/metadata/NodeStateListener.html +[NodeStateListenerBase]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/metadata/NodeStateListenerBase.html +[SessionBuilder.withNodeStateListener]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withNodeStateListener-com.datastax.oss.driver.api.core.metadata.NodeStateListener- +[DseNodeProperties]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/metadata/DseNodeProperties.html \ No newline at end of file diff --git a/manual/core/metadata/schema/README.md b/manual/core/metadata/schema/README.md index 1374e290334..dedc0333cb3 100644 --- a/manual/core/metadata/schema/README.md +++ b/manual/core/metadata/schema/README.md @@ -260,15 +260,15 @@ unavailable for the excluded keyspaces. If you issue schema-altering requests from the driver (e.g. `session.execute("CREATE TABLE ..")`), take a look at the [Performance](../../performance/#schema-updates) page for a few tips. -[Metadata#getKeyspaces]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/metadata/Metadata.html#getKeyspaces-- -[SchemaChangeListener]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListener.html -[SchemaChangeListenerBase]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListenerBase.html -[Session#setSchemaMetadataEnabled]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/Session.html#setSchemaMetadataEnabled-java.lang.Boolean- -[Session#checkSchemaAgreementAsync]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/Session.html#checkSchemaAgreementAsync-- -[SessionBuilder#withSchemaChangeListener]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSchemaChangeListener-com.datastax.oss.driver.api.core.metadata.schema.SchemaChangeListener- -[ExecutionInfo#isSchemaInAgreement]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#isSchemaInAgreement-- -[com.datastax.dse.driver.api.core.metadata.schema]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/metadata/schema/package-frame.html -[DseFunctionMetadata]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/metadata/schema/DseFunctionMetadata.html -[DseAggregateMetadata]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/metadata/schema/DseAggregateMetadata.html +[Metadata#getKeyspaces]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/metadata/Metadata.html#getKeyspaces-- +[SchemaChangeListener]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListener.html +[SchemaChangeListenerBase]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListenerBase.html +[Session#setSchemaMetadataEnabled]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/Session.html#setSchemaMetadataEnabled-java.lang.Boolean- +[Session#checkSchemaAgreementAsync]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/Session.html#checkSchemaAgreementAsync-- +[SessionBuilder#withSchemaChangeListener]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSchemaChangeListener-com.datastax.oss.driver.api.core.metadata.schema.SchemaChangeListener- +[ExecutionInfo#isSchemaInAgreement]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#isSchemaInAgreement-- +[com.datastax.dse.driver.api.core.metadata.schema]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/metadata/schema/package-frame.html +[DseFunctionMetadata]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/metadata/schema/DseFunctionMetadata.html +[DseAggregateMetadata]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/metadata/schema/DseAggregateMetadata.html [JAVA-750]: https://datastax-oss.atlassian.net/browse/JAVA-750 \ No newline at end of file diff --git a/manual/core/metadata/token/README.md b/manual/core/metadata/token/README.md index 4b45370232f..3be4ae48780 100644 --- a/manual/core/metadata/token/README.md +++ b/manual/core/metadata/token/README.md @@ -169,5 +169,5 @@ on [schema metadata](../schema/). If schema metadata is disabled or filtered, to also be unavailable for the excluded keyspaces. -[Metadata#getTokenMap]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/metadata/Metadata.html#getTokenMap-- -[TokenMap]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/metadata/TokenMap.html \ No newline at end of file +[Metadata#getTokenMap]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/metadata/Metadata.html#getTokenMap-- +[TokenMap]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/metadata/TokenMap.html \ No newline at end of file diff --git a/manual/core/native_protocol/README.md b/manual/core/native_protocol/README.md index 649a89b71b7..aca8a2f833a 100644 --- a/manual/core/native_protocol/README.md +++ b/manual/core/native_protocol/README.md @@ -123,6 +123,6 @@ If you want to see the details of mixed cluster negotiation, enable `DEBUG` leve [protocol spec]: https://github.com/datastax/native-protocol/tree/1.x/src/main/resources [driver3]: https://docs.datastax.com/en/developer/java-driver/3.5/manual/native_protocol/ -[ExecutionInfo.getWarnings]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getWarnings-- -[Request.getCustomPayload]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/Request.html#getCustomPayload-- -[AttachmentPoint.getProtocolVersion]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/detach/AttachmentPoint.html#getProtocolVersion-- \ No newline at end of file +[ExecutionInfo.getWarnings]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getWarnings-- +[Request.getCustomPayload]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/Request.html#getCustomPayload-- +[AttachmentPoint.getProtocolVersion]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/detach/AttachmentPoint.html#getProtocolVersion-- \ No newline at end of file diff --git a/manual/core/paging/README.md b/manual/core/paging/README.md index b21794f7f15..3c34da404de 100644 --- a/manual/core/paging/README.md +++ b/manual/core/paging/README.md @@ -253,12 +253,12 @@ protocol page size and the logical page size to the same value. The [driver examples] include two complete web service implementations demonstrating forward-only and offset paging. -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/ResultSet.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html -[AsyncPagingIterable.hasMorePages]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/AsyncPagingIterable.html#hasMorePages-- -[AsyncPagingIterable.fetchNextPage]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/AsyncPagingIterable.html#fetchNextPage-- -[OffsetPager]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/paging/OffsetPager.html -[PagingState]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/PagingState.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/ResultSet.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[AsyncPagingIterable.hasMorePages]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/AsyncPagingIterable.html#hasMorePages-- +[AsyncPagingIterable.fetchNextPage]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/AsyncPagingIterable.html#fetchNextPage-- +[OffsetPager]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/paging/OffsetPager.html +[PagingState]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/PagingState.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html diff --git a/manual/core/performance/README.md b/manual/core/performance/README.md index 5efa6162a16..a81443acc5c 100644 --- a/manual/core/performance/README.md +++ b/manual/core/performance/README.md @@ -345,8 +345,8 @@ possible to reuse the same event loop group for I/O, admin tasks, and even your (the driver's internal code is fully asynchronous so it will never block any thread). The timer is the only one that will have to stay on a separate thread. -[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/data/AccessibleByName.html -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/CqlIdentifier.html -[CqlSession.prepare(SimpleStatement)]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/CqlSession.html#prepare-com.datastax.oss.driver.api.core.cql.SimpleStatement- -[GenericType]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/type/reflect/GenericType.html -[Statement.setNode()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/Statement.html#setNode-com.datastax.oss.driver.api.core.metadata.Node- \ No newline at end of file +[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/data/AccessibleByName.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/CqlIdentifier.html +[CqlSession.prepare(SimpleStatement)]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/CqlSession.html#prepare-com.datastax.oss.driver.api.core.cql.SimpleStatement- +[GenericType]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/reflect/GenericType.html +[Statement.setNode()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/Statement.html#setNode-com.datastax.oss.driver.api.core.metadata.Node- \ No newline at end of file diff --git a/manual/core/pooling/README.md b/manual/core/pooling/README.md index da63efe377c..36ffa75a54d 100644 --- a/manual/core/pooling/README.md +++ b/manual/core/pooling/README.md @@ -170,5 +170,5 @@ you experience the issue, here's what to look out for: Try adding more connections per node. Thanks to the driver's hot-reload mechanism, you can do that at runtime and see the effects immediately. -[CqlSession]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/CqlSession.html +[CqlSession]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/CqlSession.html [CASSANDRA-8086]: https://issues.apache.org/jira/browse/CASSANDRA-8086 \ No newline at end of file diff --git a/manual/core/query_timestamps/README.md b/manual/core/query_timestamps/README.md index 3d873c7d479..f7ef481767e 100644 --- a/manual/core/query_timestamps/README.md +++ b/manual/core/query_timestamps/README.md @@ -187,9 +187,9 @@ Here is the order of precedence of all the methods described so far: 3. otherwise, if the timestamp generator assigned a timestamp, use it; 4. otherwise, let the server assign the timestamp. -[TimestampGenerator]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/time/TimestampGenerator.html +[TimestampGenerator]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/time/TimestampGenerator.html [gettimeofday]: http://man7.org/linux/man-pages/man2/settimeofday.2.html [JNR]: https://github.com/jnr/jnr-posix [Lightweight transactions]: https://docs.datastax.com/en/dse/6.0/cql/cql/cql_using/useInsertLWT.html -[Statement.setQueryTimestamp()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/Statement.html#setQueryTimestamp-long- +[Statement.setQueryTimestamp()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/Statement.html#setQueryTimestamp-long- diff --git a/manual/core/reactive/README.md b/manual/core/reactive/README.md index 8be59a4096b..89c1b5dc56b 100644 --- a/manual/core/reactive/README.md +++ b/manual/core/reactive/README.md @@ -365,18 +365,18 @@ Note that the driver already has a [built-in retry mechanism] that can transpare queries; the above example should be seen as a demonstration of application-level retries, when a more fine-grained control of what should be retried, and how, is required. -[CqlSession]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/CqlSession.html -[ReactiveSession]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/ResultSet.html -[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html -[ReactiveRow]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html -[Row]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/Row.html -[getColumnDefinitions]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#getColumnDefinitions-- -[getExecutionInfos]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#getExecutionInfos-- -[wasApplied]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#wasApplied-- -[ReactiveRow.getColumnDefinitions]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#getColumnDefinitions-- -[ReactiveRow.getExecutionInfo]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#getExecutionInfo-- -[ReactiveRow.wasApplied]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#wasApplied-- +[CqlSession]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/CqlSession.html +[ReactiveSession]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/ResultSet.html +[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html +[ReactiveRow]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html +[Row]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/Row.html +[getColumnDefinitions]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#getColumnDefinitions-- +[getExecutionInfos]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#getExecutionInfos-- +[wasApplied]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#wasApplied-- +[ReactiveRow.getColumnDefinitions]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#getColumnDefinitions-- +[ReactiveRow.getExecutionInfo]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#getExecutionInfo-- +[ReactiveRow.wasApplied]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#wasApplied-- [built-in retry mechanism]: ../retries/ [request throttling]: ../throttling/ diff --git a/manual/core/reconnection/README.md b/manual/core/reconnection/README.md index d79022cb264..86b0f08d280 100644 --- a/manual/core/reconnection/README.md +++ b/manual/core/reconnection/README.md @@ -66,7 +66,7 @@ is the exponential one with the default values, and the control connection is in * [t = 3] node2's pool tries to open the last missing connection, which succeeds. The pool is back to its expected size, node2's reconnection stops. -[ConstantReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/internal/core/connection/ConstantReconnectionPolicy.html -[DriverContext]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/context/DriverContext.html -[ExponentialReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/internal/core/connection/ExponentialReconnectionPolicy.html -[ReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/connection/ReconnectionPolicy.html \ No newline at end of file +[ConstantReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/internal/core/connection/ConstantReconnectionPolicy.html +[DriverContext]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/context/DriverContext.html +[ExponentialReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/internal/core/connection/ExponentialReconnectionPolicy.html +[ReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/connection/ReconnectionPolicy.html \ No newline at end of file diff --git a/manual/core/request_tracker/README.md b/manual/core/request_tracker/README.md index 1ff00ccc7d6..1b4bb9cb717 100644 --- a/manual/core/request_tracker/README.md +++ b/manual/core/request_tracker/README.md @@ -117,5 +117,5 @@ all FROM users WHERE user_id=? [v0=42] com.datastax.oss.driver.api.core.servererrors.InvalidQueryException: Undefined column name all ``` -[RequestTracker]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/tracker/RequestTracker.html -[SessionBuilder.withRequestTracker]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withRequestTracker-com.datastax.oss.driver.api.core.tracker.RequestTracker- \ No newline at end of file +[RequestTracker]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/tracker/RequestTracker.html +[SessionBuilder.withRequestTracker]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withRequestTracker-com.datastax.oss.driver.api.core.tracker.RequestTracker- \ No newline at end of file diff --git a/manual/core/retries/README.md b/manual/core/retries/README.md index bd727d452ae..199f6415d83 100644 --- a/manual/core/retries/README.md +++ b/manual/core/retries/README.md @@ -174,20 +174,20 @@ configuration). Each request uses its declared profile's policy. If it doesn't declare any profile, or if the profile doesn't have a dedicated policy, then the default profile's policy is used. -[AllNodesFailedException]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/AllNodesFailedException.html -[ClosedConnectionException]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/connection/ClosedConnectionException.html -[DriverTimeoutException]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/DriverTimeoutException.html -[FunctionFailureException]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/servererrors/FunctionFailureException.html -[HeartbeatException]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/connection/HeartbeatException.html -[ProtocolError]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/servererrors/ProtocolError.html -[OverloadedException]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/servererrors/OverloadedException.html -[QueryValidationException]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/servererrors/QueryValidationException.html -[ReadFailureException]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/servererrors/ReadFailureException.html -[ReadTimeoutException]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/servererrors/ReadTimeoutException.html -[RetryDecision]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/retry/RetryDecision.html -[RetryPolicy]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/retry/RetryPolicy.html -[ServerError]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/servererrors/ServerError.html -[TruncateException]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/servererrors/TruncateException.html -[UnavailableException]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/servererrors/UnavailableException.html -[WriteFailureException]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/servererrors/WriteFailureException.html -[WriteTimeoutException]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/servererrors/WriteTimeoutException.html +[AllNodesFailedException]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/AllNodesFailedException.html +[ClosedConnectionException]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/connection/ClosedConnectionException.html +[DriverTimeoutException]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/DriverTimeoutException.html +[FunctionFailureException]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/servererrors/FunctionFailureException.html +[HeartbeatException]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/connection/HeartbeatException.html +[ProtocolError]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/servererrors/ProtocolError.html +[OverloadedException]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/servererrors/OverloadedException.html +[QueryValidationException]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/servererrors/QueryValidationException.html +[ReadFailureException]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/servererrors/ReadFailureException.html +[ReadTimeoutException]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/servererrors/ReadTimeoutException.html +[RetryDecision]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/retry/RetryDecision.html +[RetryPolicy]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/retry/RetryPolicy.html +[ServerError]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/servererrors/ServerError.html +[TruncateException]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/servererrors/TruncateException.html +[UnavailableException]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/servererrors/UnavailableException.html +[WriteFailureException]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/servererrors/WriteFailureException.html +[WriteTimeoutException]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/servererrors/WriteTimeoutException.html diff --git a/manual/core/speculative_execution/README.md b/manual/core/speculative_execution/README.md index 22296424c25..1f785d93675 100644 --- a/manual/core/speculative_execution/README.md +++ b/manual/core/speculative_execution/README.md @@ -250,4 +250,4 @@ profiles have the same configuration). Each request uses its declared profile's policy. If it doesn't declare any profile, or if the profile doesn't have a dedicated policy, then the default profile's policy is used. -[SpeculativeExecutionPolicy]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/specex/SpeculativeExecutionPolicy.html \ No newline at end of file +[SpeculativeExecutionPolicy]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/specex/SpeculativeExecutionPolicy.html \ No newline at end of file diff --git a/manual/core/ssl/README.md b/manual/core/ssl/README.md index 6bb16a127ca..48b4e051ade 100644 --- a/manual/core/ssl/README.md +++ b/manual/core/ssl/README.md @@ -204,6 +204,6 @@ the box, but with a bit of custom development it is fairly easy to add. See [dsClientToNode]: https://docs.datastax.com/en/cassandra/3.0/cassandra/configuration/secureSSLClientToNode.html [pickle]: http://thelastpickle.com/blog/2015/09/30/hardening-cassandra-step-by-step-part-1-server-to-server.html [JSSE system properties]: http://docs.oracle.com/javase/6/docs/technotes/guides/security/jsse/JSSERefGuide.html#Customization -[SessionBuilder.withSslEngineFactory]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSslEngineFactory-com.datastax.oss.driver.api.core.ssl.SslEngineFactory- -[SessionBuilder.withSslContext]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSslContext-javax.net.ssl.SSLContext- -[ProgrammaticSslEngineFactory]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/ssl/ProgrammaticSslEngineFactory.html +[SessionBuilder.withSslEngineFactory]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSslEngineFactory-com.datastax.oss.driver.api.core.ssl.SslEngineFactory- +[SessionBuilder.withSslContext]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSslContext-javax.net.ssl.SSLContext- +[ProgrammaticSslEngineFactory]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/ssl/ProgrammaticSslEngineFactory.html diff --git a/manual/core/statements/README.md b/manual/core/statements/README.md index 3c75e8cb3b6..5fefa47229f 100644 --- a/manual/core/statements/README.md +++ b/manual/core/statements/README.md @@ -59,7 +59,7 @@ the [configuration](../configuration/). Namely, these are: idempotent flag, quer consistency levels and page size. We recommended the configuration approach whenever possible (you can create execution profiles to capture common combinations of those options). -[Statement]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/Statement.html -[StatementBuilder]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/StatementBuilder.html -[execute]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/Session.html#execute-com.datastax.oss.driver.api.core.cql.Statement- -[executeAsync]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/Session.html#executeAsync-com.datastax.oss.driver.api.core.cql.Statement- +[Statement]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/Statement.html +[StatementBuilder]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/StatementBuilder.html +[execute]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/Session.html#execute-com.datastax.oss.driver.api.core.cql.Statement- +[executeAsync]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/Session.html#executeAsync-com.datastax.oss.driver.api.core.cql.Statement- diff --git a/manual/core/statements/batch/README.md b/manual/core/statements/batch/README.md index 6089389e97e..c844e7a1806 100644 --- a/manual/core/statements/batch/README.md +++ b/manual/core/statements/batch/README.md @@ -61,8 +61,8 @@ In addition, simple statements with named parameters are currently not supported due to a [protocol limitation][CASSANDRA-10246] that will be fixed in a future version). If you try to execute such a batch, an `IllegalArgumentException` is thrown. -[BatchStatement]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/BatchStatement.html -[BatchStatement.newInstance()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/BatchStatement.html#newInstance-com.datastax.oss.driver.api.core.cql.BatchType- -[BatchStatement.builder()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/BatchStatement.html#builder-com.datastax.oss.driver.api.core.cql.BatchType- +[BatchStatement]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/BatchStatement.html +[BatchStatement.newInstance()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/BatchStatement.html#newInstance-com.datastax.oss.driver.api.core.cql.BatchType- +[BatchStatement.builder()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/BatchStatement.html#builder-com.datastax.oss.driver.api.core.cql.BatchType- [batch_dse]: http://docs.datastax.com/en/dse/6.7/cql/cql/cql_using/useBatch.html [CASSANDRA-10246]: https://issues.apache.org/jira/browse/CASSANDRA-10246 diff --git a/manual/core/statements/per_query_keyspace/README.md b/manual/core/statements/per_query_keyspace/README.md index b17e05a42a9..a6630a363b4 100644 --- a/manual/core/statements/per_query_keyspace/README.md +++ b/manual/core/statements/per_query_keyspace/README.md @@ -124,6 +124,6 @@ SimpleStatement statement = At some point in the future, when Cassandra 4 becomes prevalent and using a per-query keyspace is the norm, we'll probably deprecate `setRoutingKeyspace()`. -[token-aware routing]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- +[token-aware routing]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- [CASSANDRA-10145]: https://issues.apache.org/jira/browse/CASSANDRA-10145 \ No newline at end of file diff --git a/manual/core/statements/prepared/README.md b/manual/core/statements/prepared/README.md index 029b775f6df..d3a1a338932 100644 --- a/manual/core/statements/prepared/README.md +++ b/manual/core/statements/prepared/README.md @@ -330,10 +330,10 @@ With Cassandra 4 and [native protocol](../../native_protocol/) v5, this issue is new version with the response; the driver updates its local cache transparently, and the client can observe the new columns in the result set. -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[Session.prepare]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/CqlSession.html#prepare-com.datastax.oss.driver.api.core.cql.SimpleStatement- +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[Session.prepare]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/CqlSession.html#prepare-com.datastax.oss.driver.api.core.cql.SimpleStatement- [CASSANDRA-10786]: https://issues.apache.org/jira/browse/CASSANDRA-10786 [CASSANDRA-10813]: https://issues.apache.org/jira/browse/CASSANDRA-10813 [guava eviction]: https://github.com/google/guava/wiki/CachesExplained#reference-based-eviction -[PreparedStatement.bind]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/PreparedStatement.html#bind-java.lang.Object...- -[PreparedStatement.boundStatementBuilder]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/PreparedStatement.html#boundStatementBuilder-java.lang.Object...- +[PreparedStatement.bind]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/PreparedStatement.html#bind-java.lang.Object...- +[PreparedStatement.boundStatementBuilder]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/PreparedStatement.html#boundStatementBuilder-java.lang.Object...- diff --git a/manual/core/statements/simple/README.md b/manual/core/statements/simple/README.md index 849413bd230..5c1afdb64aa 100644 --- a/manual/core/statements/simple/README.md +++ b/manual/core/statements/simple/README.md @@ -182,6 +182,6 @@ session.execute( Or you could also use [prepared statements](../prepared/), which don't have this limitation since parameter types are known in advance. -[SimpleStatement]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/SimpleStatement.html -[SimpleStatement.newInstance()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/SimpleStatement.html#newInstance-java.lang.String- -[SimpleStatement.builder()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/SimpleStatement.html#builder-java.lang.String- +[SimpleStatement]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/SimpleStatement.html +[SimpleStatement.newInstance()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/SimpleStatement.html#newInstance-java.lang.String- +[SimpleStatement.builder()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/SimpleStatement.html#builder-java.lang.String- diff --git a/manual/core/temporal_types/README.md b/manual/core/temporal_types/README.md index ba7f69d5c30..77c0aa3e09d 100644 --- a/manual/core/temporal_types/README.md +++ b/manual/core/temporal_types/README.md @@ -146,7 +146,7 @@ System.out.println(dateTime.minus(CqlDuration.from("1h15s15ns"))); // prints "2018-10-03T22:59:44.999999985-07:00[America/Los_Angeles]" ``` -[CqlDuration]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/data/CqlDuration.html -[TypeCodecs.ZONED_TIMESTAMP_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#ZONED_TIMESTAMP_SYSTEM -[TypeCodecs.ZONED_TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#ZONED_TIMESTAMP_UTC -[TypeCodecs.zonedTimestampAt()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#zonedTimestampAt-java.time.ZoneId- \ No newline at end of file +[CqlDuration]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/data/CqlDuration.html +[TypeCodecs.ZONED_TIMESTAMP_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#ZONED_TIMESTAMP_SYSTEM +[TypeCodecs.ZONED_TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#ZONED_TIMESTAMP_UTC +[TypeCodecs.zonedTimestampAt()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#zonedTimestampAt-java.time.ZoneId- \ No newline at end of file diff --git a/manual/core/throttling/README.md b/manual/core/throttling/README.md index 9b95c6d6941..fe5ae0a1ce1 100644 --- a/manual/core/throttling/README.md +++ b/manual/core/throttling/README.md @@ -145,6 +145,6 @@ datastax-java-driver { If you enable `throttling.delay`, make sure to also check the associated extra options to correctly size the underlying histograms (`metrics.session.throttling.delay.*`). -[RequestThrottlingException]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/RequestThrottlingException.html -[AllNodesFailedException]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/AllNodesFailedException.html -[BusyConnectionException]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/connection/BusyConnectionException.html \ No newline at end of file +[RequestThrottlingException]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/RequestThrottlingException.html +[AllNodesFailedException]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/AllNodesFailedException.html +[BusyConnectionException]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/connection/BusyConnectionException.html \ No newline at end of file diff --git a/manual/core/tracing/README.md b/manual/core/tracing/README.md index 3bc2ee35099..5fdee51fea1 100644 --- a/manual/core/tracing/README.md +++ b/manual/core/tracing/README.md @@ -113,9 +113,9 @@ for (TraceEvent event : trace.getEvents()) { If you call `getQueryTrace()` for a statement that didn't have tracing enabled, an exception is thrown. -[ExecutionInfo]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html -[QueryTrace]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/QueryTrace.html -[Statement.setTracing()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/Statement.html#setTracing-boolean- -[StatementBuilder.setTracing()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/StatementBuilder.html#setTracing-- -[ExecutionInfo.getTracingId()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getTracingId-- -[ExecutionInfo.getQueryTrace()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getQueryTrace-- +[ExecutionInfo]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html +[QueryTrace]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/QueryTrace.html +[Statement.setTracing()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/Statement.html#setTracing-boolean- +[StatementBuilder.setTracing()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/StatementBuilder.html#setTracing-- +[ExecutionInfo.getTracingId()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getTracingId-- +[ExecutionInfo.getQueryTrace()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getQueryTrace-- diff --git a/manual/core/tuples/README.md b/manual/core/tuples/README.md index 280f4c1366f..1c6119801e6 100644 --- a/manual/core/tuples/README.md +++ b/manual/core/tuples/README.md @@ -139,5 +139,5 @@ BoundStatement bs = [cql_doc]: https://docs.datastax.com/en/cql/3.3/cql/cql_reference/tupleType.html -[TupleType]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/type/TupleType.html -[TupleValue]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/data/TupleValue.html +[TupleType]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/TupleType.html +[TupleValue]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/data/TupleValue.html diff --git a/manual/core/udts/README.md b/manual/core/udts/README.md index ff0fbba155f..1d22e2317d4 100644 --- a/manual/core/udts/README.md +++ b/manual/core/udts/README.md @@ -135,5 +135,5 @@ session.execute(bs); [cql_doc]: https://docs.datastax.com/en/cql/3.3/cql/cql_reference/cqlRefUDType.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/data/UdtValue.html -[UserDefinedType]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/type/UserDefinedType.html \ No newline at end of file +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/data/UdtValue.html +[UserDefinedType]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/UserDefinedType.html \ No newline at end of file diff --git a/manual/mapper/daos/README.md b/manual/mapper/daos/README.md index f5297bee7aa..ea6e6d45d1a 100644 --- a/manual/mapper/daos/README.md +++ b/manual/mapper/daos/README.md @@ -147,8 +147,8 @@ In this case, any annotations declared in `Dao1` would be chosen over `Dao2`. To control how the hierarchy is scanned, annotate interfaces with [@HierarchyScanStrategy]. -[@Dao]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/Dao.html -[@DaoFactory]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.html -[@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html -[@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html +[@Dao]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/Dao.html +[@DaoFactory]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.html +[@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html +[@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html [Entity Inheritance]: ../entities/#inheritance diff --git a/manual/mapper/daos/custom_types/README.md b/manual/mapper/daos/custom_types/README.md index 36aadaff522..44353ba8bda 100644 --- a/manual/mapper/daos/custom_types/README.md +++ b/manual/mapper/daos/custom_types/README.md @@ -236,8 +236,8 @@ flag: With this configuration, if a DAO method declares a non built-in return type, it will be surfaced as a compiler error. -[EntityHelper]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/entity/EntityHelper.html -[GenericType]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/type/reflect/GenericType.html -[MapperContext]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/MapperContext.html -[MapperResultProducer]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/result/MapperResultProducer.html -[MapperResultProducerService]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/result/MapperResultProducerService.html +[EntityHelper]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/entity/EntityHelper.html +[GenericType]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/reflect/GenericType.html +[MapperContext]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/MapperContext.html +[MapperResultProducer]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/result/MapperResultProducer.html +[MapperResultProducerService]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/result/MapperResultProducerService.html diff --git a/manual/mapper/daos/delete/README.md b/manual/mapper/daos/delete/README.md index ff90918b442..d6de12f069c 100644 --- a/manual/mapper/daos/delete/README.md +++ b/manual/mapper/daos/delete/README.md @@ -151,15 +151,15 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the [naming strategy](../../entities/#naming-strategy)). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html -[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html -[@Delete]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/Delete.html -[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/ResultSet.html -[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html +[@Delete]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/Delete.html +[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/ResultSet.html +[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html diff --git a/manual/mapper/daos/getentity/README.md b/manual/mapper/daos/getentity/README.md index 2f5dec7f085..1ca1f2c8b75 100644 --- a/manual/mapper/daos/getentity/README.md +++ b/manual/mapper/daos/getentity/README.md @@ -69,14 +69,14 @@ If the return type doesn't match the parameter type (for example [PagingIterable [AsyncResultSet]), the mapper processor will issue a compile-time error. -[@GetEntity]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html -[GettableByName]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/data/GettableByName.html -[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/PagingIterable.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/ResultSet.html -[Row]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/Row.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/data/UdtValue.html +[@GetEntity]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[GettableByName]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/data/GettableByName.html +[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/PagingIterable.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/ResultSet.html +[Row]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/Row.html +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/data/UdtValue.html diff --git a/manual/mapper/daos/insert/README.md b/manual/mapper/daos/insert/README.md index 0929ae3a83d..66343a49188 100644 --- a/manual/mapper/daos/insert/README.md +++ b/manual/mapper/daos/insert/README.md @@ -108,13 +108,13 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the [naming strategy](../../entities/#naming-strategy)). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@Insert]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/Insert.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/ResultSet.html -[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- -[ResultSet#getExecutionInfo()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/ResultSet.html#getExecutionInfo-- -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@Insert]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/Insert.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/ResultSet.html +[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- +[ResultSet#getExecutionInfo()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/ResultSet.html#getExecutionInfo-- +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html diff --git a/manual/mapper/daos/null_saving/README.md b/manual/mapper/daos/null_saving/README.md index f701eb5392d..7157469ea3c 100644 --- a/manual/mapper/daos/null_saving/README.md +++ b/manual/mapper/daos/null_saving/README.md @@ -93,10 +93,10 @@ public interface UserDao extends InventoryDao { } ``` -[@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[MapperException]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/MapperException.html -[DO_NOT_SET]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#DO_NOT_SET -[SET_TO_NULL]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#SET_TO_NULL +[@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[MapperException]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/MapperException.html +[DO_NOT_SET]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#DO_NOT_SET +[SET_TO_NULL]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#SET_TO_NULL [CASSANDRA-7304]: https://issues.apache.org/jira/browse/CASSANDRA-7304 diff --git a/manual/mapper/daos/query/README.md b/manual/mapper/daos/query/README.md index 53d14af4b19..c33da41fc3c 100644 --- a/manual/mapper/daos/query/README.md +++ b/manual/mapper/daos/query/README.md @@ -110,17 +110,17 @@ Then: query succeeds or not depends on whether the session that the mapper was built with has a [default keyspace]. -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@Query]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/Query.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/ResultSet.html -[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- -[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/PagingIterable.html -[Row]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/Row.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html -[MappedReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/mapper/reactive/MappedReactiveResultSet.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@Query]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/Query.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/ResultSet.html +[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- +[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/PagingIterable.html +[Row]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/Row.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html +[MappedReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/mapper/reactive/MappedReactiveResultSet.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html diff --git a/manual/mapper/daos/queryprovider/README.md b/manual/mapper/daos/queryprovider/README.md index 0c1962bcad4..60f4c568e6e 100644 --- a/manual/mapper/daos/queryprovider/README.md +++ b/manual/mapper/daos/queryprovider/README.md @@ -137,11 +137,11 @@ Here is the full implementation: the desired [PagingIterable][PagingIterable]. -[@QueryProvider]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html -[providerClass]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerClass-- -[entityHelpers]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#entityHelpers-- -[providerMethod]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerMethod-- -[MapperContext]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/MapperContext.html -[EntityHelper]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/EntityHelper.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/ResultSet.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/PagingIterable.html +[@QueryProvider]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html +[providerClass]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerClass-- +[entityHelpers]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#entityHelpers-- +[providerMethod]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerMethod-- +[MapperContext]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/MapperContext.html +[EntityHelper]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/EntityHelper.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/ResultSet.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/PagingIterable.html diff --git a/manual/mapper/daos/select/README.md b/manual/mapper/daos/select/README.md index 54e113fd12f..211b577331e 100644 --- a/manual/mapper/daos/select/README.md +++ b/manual/mapper/daos/select/README.md @@ -142,19 +142,19 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the [naming strategy](../../entities/#naming-strategy)). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html -[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html -[@Select]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/Select.html -[allowFiltering()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/Select.html#allowFiltering-- -[customWhereClause()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/Select.html#customWhereClause-- -[groupBy()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/Select.html#groupBy-- -[limit()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/Select.html#limit-- -[orderBy()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/Select.html#orderBy-- -[perPartitionLimit()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/Select.html#perPartitionLimit-- -[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/PagingIterable.html -[MappedReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/mapper/reactive/MappedReactiveResultSet.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html +[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html +[@Select]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/Select.html +[allowFiltering()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/Select.html#allowFiltering-- +[customWhereClause()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/Select.html#customWhereClause-- +[groupBy()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/Select.html#groupBy-- +[limit()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/Select.html#limit-- +[orderBy()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/Select.html#orderBy-- +[perPartitionLimit()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/Select.html#perPartitionLimit-- +[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/PagingIterable.html +[MappedReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/mapper/reactive/MappedReactiveResultSet.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html diff --git a/manual/mapper/daos/setentity/README.md b/manual/mapper/daos/setentity/README.md index a05540140fd..f1aacf9293e 100644 --- a/manual/mapper/daos/setentity/README.md +++ b/manual/mapper/daos/setentity/README.md @@ -63,8 +63,8 @@ BoundStatement bind(Product product, BoundStatement statement); If you use a void method with [BoundStatement], the mapper processor will issue a compile-time warning. -[@SetEntity]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/SetEntity.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[BoundStatementBuilder]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/BoundStatementBuilder.html -[SettableByName]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/data/SettableByName.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/data/UdtValue.html +[@SetEntity]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/SetEntity.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[BoundStatementBuilder]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/BoundStatementBuilder.html +[SettableByName]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/data/SettableByName.html +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/data/UdtValue.html diff --git a/manual/mapper/daos/statement_attributes/README.md b/manual/mapper/daos/statement_attributes/README.md index 54ed6631272..c7c89c587de 100644 --- a/manual/mapper/daos/statement_attributes/README.md +++ b/manual/mapper/daos/statement_attributes/README.md @@ -60,4 +60,4 @@ Product product = dao.findById(1, builder -> builder.setConsistencyLevel(DefaultConsistencyLevel.QUORUM)); ``` -[@StatementAttributes]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/StatementAttributes.html \ No newline at end of file +[@StatementAttributes]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/StatementAttributes.html \ No newline at end of file diff --git a/manual/mapper/daos/update/README.md b/manual/mapper/daos/update/README.md index 8c8a3fbc57f..9d5cd32ac8e 100644 --- a/manual/mapper/daos/update/README.md +++ b/manual/mapper/daos/update/README.md @@ -143,13 +143,13 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the naming convention). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@Update]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/Update.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@Update]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/Update.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html [Boolean]: https://docs.oracle.com/javase/8/docs/api/index.html?java/lang/Boolean.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/ResultSet.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/ResultSet.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html diff --git a/manual/mapper/entities/README.md b/manual/mapper/entities/README.md index d3c506fec90..926f640d5e9 100644 --- a/manual/mapper/entities/README.md +++ b/manual/mapper/entities/README.md @@ -468,21 +468,21 @@ the same level. To control how the class hierarchy is scanned, annotate classes with [@HierarchyScanStrategy]. -[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html -[@CqlName]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/CqlName.html -[@Dao]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/Dao.html -[@Entity]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/Entity.html -[NameConverter]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/entity/naming/NameConverter.html -[NamingConvention]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/entity/naming/NamingConvention.html -[@NamingStrategy]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/NamingStrategy.html -[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html -[@Computed]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/Computed.html -[@Select]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/Select.html -[@Insert]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/Insert.html -[@Update]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/Update.html -[@GetEntity]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html -[@Query]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/Query.html +[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html +[@CqlName]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/CqlName.html +[@Dao]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/Dao.html +[@Entity]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/Entity.html +[NameConverter]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/entity/naming/NameConverter.html +[NamingConvention]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/entity/naming/NamingConvention.html +[@NamingStrategy]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/NamingStrategy.html +[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html +[@Computed]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/Computed.html +[@Select]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/Select.html +[@Insert]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/Insert.html +[@Update]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/Update.html +[@GetEntity]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html +[@Query]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/Query.html [aliases]: http://cassandra.apache.org/doc/latest/cql/dml.html?#aliases -[@Transient]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/Transient.html -[@TransientProperties]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/TransientProperties.html -[@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html +[@Transient]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/Transient.html +[@TransientProperties]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/TransientProperties.html +[@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html diff --git a/manual/mapper/mapper/README.md b/manual/mapper/mapper/README.md index 499cd7c879e..40e46450792 100644 --- a/manual/mapper/mapper/README.md +++ b/manual/mapper/mapper/README.md @@ -227,8 +227,8 @@ InventoryMapper inventoryMapper = new InventoryMapperBuilder(session) .build(); ``` -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/CqlIdentifier.html -[@DaoFactory]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.html -[@DaoKeyspace]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/DaoKeyspace.html -[@DaoTable]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/DaoTable.html -[@Mapper]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/mapper/annotations/Mapper.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/CqlIdentifier.html +[@DaoFactory]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.html +[@DaoKeyspace]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/DaoKeyspace.html +[@DaoTable]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/DaoTable.html +[@Mapper]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/Mapper.html diff --git a/manual/osgi/README.md b/manual/osgi/README.md index 73f87a35b90..cff6e47e91c 100644 --- a/manual/osgi/README.md +++ b/manual/osgi/README.md @@ -138,7 +138,7 @@ starting the driver: [driver configuration]: ../core/configuration [OSGi]:https://www.osgi.org [JNR]: https://github.com/jnr/jnr-posix -[withClassLoader()]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withClassLoader-java.lang.ClassLoader- +[withClassLoader()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withClassLoader-java.lang.ClassLoader- [JAVA-1127]:https://datastax-oss.atlassian.net/browse/JAVA-1127 -[DriverConfigLoader.fromDefaults(ClassLoader)]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromDefaults-java.lang.ClassLoader- -[DriverConfigLoader.programmaticBuilder(ClassLoader)]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#programmaticBuilder-java.lang.ClassLoader- +[DriverConfigLoader.fromDefaults(ClassLoader)]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromDefaults-java.lang.ClassLoader- +[DriverConfigLoader.programmaticBuilder(ClassLoader)]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#programmaticBuilder-java.lang.ClassLoader- diff --git a/manual/query_builder/README.md b/manual/query_builder/README.md index 96c11589c25..8512a93066f 100644 --- a/manual/query_builder/README.md +++ b/manual/query_builder/README.md @@ -212,8 +212,8 @@ For a complete tour of the API, browse the child pages in this manual: * [Terms](term/) * [Idempotence](idempotence/) -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/CqlIdentifier.html -[DseQueryBuilder]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/querybuilder/DseQueryBuilder.html -[DseSchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/dse/driver/api/querybuilder/DseSchemaBuilder.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/CqlIdentifier.html +[DseQueryBuilder]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/querybuilder/DseQueryBuilder.html +[DseSchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/querybuilder/DseSchemaBuilder.html diff --git a/manual/query_builder/condition/README.md b/manual/query_builder/condition/README.md index 1d02bb1bbaa..3e728c42f6e 100644 --- a/manual/query_builder/condition/README.md +++ b/manual/query_builder/condition/README.md @@ -132,4 +132,4 @@ It is mutually exclusive with column conditions: if you previously specified col the statement, they will be ignored; conversely, adding a column condition cancels a previous IF EXISTS clause. -[Condition]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/querybuilder/condition/Condition.html +[Condition]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/querybuilder/condition/Condition.html diff --git a/manual/query_builder/delete/README.md b/manual/query_builder/delete/README.md index a6a3fafd09c..4afde12b356 100644 --- a/manual/query_builder/delete/README.md +++ b/manual/query_builder/delete/README.md @@ -141,5 +141,5 @@ deleteFrom("user") Conditions are a common feature used by UPDATE and DELETE, so they have a [dedicated page](../condition) in this manual. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[Selector]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/querybuilder/select/Selector.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[Selector]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/querybuilder/select/Selector.html diff --git a/manual/query_builder/insert/README.md b/manual/query_builder/insert/README.md index 740cd23d99b..4c2d1b07439 100644 --- a/manual/query_builder/insert/README.md +++ b/manual/query_builder/insert/README.md @@ -114,4 +114,4 @@ is executed. This is distinctly different than setting the value to null. Passin this method will only remove the USING TTL clause from the query, which will not alter the TTL (if one is set) in Cassandra. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html \ No newline at end of file +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html \ No newline at end of file diff --git a/manual/query_builder/relation/README.md b/manual/query_builder/relation/README.md index 4ea0d68fcf5..3d00366e8a5 100644 --- a/manual/query_builder/relation/README.md +++ b/manual/query_builder/relation/README.md @@ -201,5 +201,5 @@ This should be used with caution, as it's possible to generate invalid CQL that execution time; on the other hand, it can be used as a workaround to handle new CQL features that are not yet covered by the query builder. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[Relation]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/querybuilder/relation/Relation.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[Relation]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/querybuilder/relation/Relation.html diff --git a/manual/query_builder/schema/README.md b/manual/query_builder/schema/README.md index 969633f356c..740ea49b859 100644 --- a/manual/query_builder/schema/README.md +++ b/manual/query_builder/schema/README.md @@ -44,4 +44,4 @@ element type: * [function](function/) * [aggregate](aggregate/) -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/aggregate/README.md b/manual/query_builder/schema/aggregate/README.md index ce81cb1718f..5849047154d 100644 --- a/manual/query_builder/schema/aggregate/README.md +++ b/manual/query_builder/schema/aggregate/README.md @@ -76,4 +76,4 @@ dropAggregate("average").ifExists(); // DROP AGGREGATE IF EXISTS average ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/function/README.md b/manual/query_builder/schema/function/README.md index 36c4d771bf1..758b3823c24 100644 --- a/manual/query_builder/schema/function/README.md +++ b/manual/query_builder/schema/function/README.md @@ -92,4 +92,4 @@ dropFunction("log").ifExists(); // DROP FUNCTION IF EXISTS log ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/index/README.md b/manual/query_builder/schema/index/README.md index 754a8fc538b..d4bff71b8de 100644 --- a/manual/query_builder/schema/index/README.md +++ b/manual/query_builder/schema/index/README.md @@ -99,4 +99,4 @@ dropIndex("my_idx").ifExists(); // DROP INDEX IF EXISTS my_idx ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/keyspace/README.md b/manual/query_builder/schema/keyspace/README.md index ea2fd4c05b7..39c0151631a 100644 --- a/manual/query_builder/schema/keyspace/README.md +++ b/manual/query_builder/schema/keyspace/README.md @@ -83,6 +83,6 @@ dropKeyspace("cycling").ifExists(); // DROP KEYSPACE IF EXISTS cycling ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/materialized_view/README.md b/manual/query_builder/schema/materialized_view/README.md index ce8cc62ce76..08f724f3860 100644 --- a/manual/query_builder/schema/materialized_view/README.md +++ b/manual/query_builder/schema/materialized_view/README.md @@ -85,5 +85,5 @@ dropTable("cyclist_by_age").ifExists(); // DROP MATERIALIZED VIEW IF EXISTS cyclist_by_age ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html -[RelationStructure]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/querybuilder/schema/RelationStructure.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[RelationStructure]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/querybuilder/schema/RelationStructure.html diff --git a/manual/query_builder/schema/table/README.md b/manual/query_builder/schema/table/README.md index 98f98bd7795..a712b46e11e 100644 --- a/manual/query_builder/schema/table/README.md +++ b/manual/query_builder/schema/table/README.md @@ -107,6 +107,6 @@ dropTable("cyclist_name").ifExists(); // DROP TABLE IF EXISTS cyclist_name ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html -[CreateTableWithOptions]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/querybuilder/schema/CreateTableWithOptions.html -[AlterTableWithOptions]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/querybuilder/schema/AlterTableWithOptions.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[CreateTableWithOptions]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/querybuilder/schema/CreateTableWithOptions.html +[AlterTableWithOptions]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/querybuilder/schema/AlterTableWithOptions.html diff --git a/manual/query_builder/schema/type/README.md b/manual/query_builder/schema/type/README.md index 36f0f99d5c5..8cb4920063d 100644 --- a/manual/query_builder/schema/type/README.md +++ b/manual/query_builder/schema/type/README.md @@ -88,4 +88,4 @@ dropTable("address").ifExists(); // DROP TYPE IF EXISTS address ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/select/README.md b/manual/query_builder/select/README.md index 3eb4ff9632c..0dd676ce397 100644 --- a/manual/query_builder/select/README.md +++ b/manual/query_builder/select/README.md @@ -391,5 +391,5 @@ selectFrom("user").all().allowFiltering(); // SELECT * FROM user ALLOW FILTERING ``` -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[Selector]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/querybuilder/select/Selector.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[Selector]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/querybuilder/select/Selector.html diff --git a/manual/query_builder/term/README.md b/manual/query_builder/term/README.md index c2ade0c7926..a3e87aa7a51 100644 --- a/manual/query_builder/term/README.md +++ b/manual/query_builder/term/README.md @@ -105,5 +105,5 @@ This should be used with caution, as it's possible to generate invalid CQL that execution time; on the other hand, it can be used as a workaround to handle new CQL features that are not yet covered by the query builder. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html \ No newline at end of file +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html \ No newline at end of file diff --git a/manual/query_builder/truncate/README.md b/manual/query_builder/truncate/README.md index 80093a29b8b..f1df985a3d2 100644 --- a/manual/query_builder/truncate/README.md +++ b/manual/query_builder/truncate/README.md @@ -17,4 +17,4 @@ Truncate truncate2 = truncate(CqlIdentifier.fromCql("mytable")); Note that, at this stage, the query is ready to build. After creating a TRUNCATE query it does not take any values. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html diff --git a/manual/query_builder/update/README.md b/manual/query_builder/update/README.md index 7fb78aac1f4..29f0ff72b28 100644 --- a/manual/query_builder/update/README.md +++ b/manual/query_builder/update/README.md @@ -251,5 +251,5 @@ update("foo") Conditions are a common feature used by UPDATE and DELETE, so they have a [dedicated page](../condition) in this manual. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[Assignment]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/querybuilder/update/Assignment.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[Assignment]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/querybuilder/update/Assignment.html diff --git a/upgrade_guide/README.md b/upgrade_guide/README.md index 8c8f422eb67..3faa1b8e1e5 100644 --- a/upgrade_guide/README.md +++ b/upgrade_guide/README.md @@ -92,7 +92,7 @@ you can obtain in most web environments by calling `Thread.getContextClassLoader See the javadocs of [SessionBuilder.withClassLoader] for more information. -[SessionBuilder.withClassLoader]: https://docs.datastax.com/en/drivers/java/4.7/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withClassLoader-java.lang.ClassLoader- +[SessionBuilder.withClassLoader]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withClassLoader-java.lang.ClassLoader- ### 4.1.0 From 94aae4a6dbe4bda6f951e4629fe229aeafe80b86 Mon Sep 17 00:00:00 2001 From: olim7t Date: Tue, 28 Jul 2020 10:51:01 -0700 Subject: [PATCH 547/979] [maven-release-plugin] prepare release 4.8.0 --- bom/pom.xml | 18 +++++++++--------- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- metrics/micrometer/pom.xml | 2 +- metrics/microprofile/pom.xml | 2 +- osgi-tests/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 14 files changed, 23 insertions(+), 23 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index 52200847707..51ed923fa32 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.8.0-SNAPSHOT + 4.8.0 java-driver-bom pom @@ -31,42 +31,42 @@ com.datastax.oss java-driver-core - 4.8.0-SNAPSHOT + 4.8.0 com.datastax.oss java-driver-core-shaded - 4.8.0-SNAPSHOT + 4.8.0 com.datastax.oss java-driver-mapper-processor - 4.8.0-SNAPSHOT + 4.8.0 com.datastax.oss java-driver-mapper-runtime - 4.8.0-SNAPSHOT + 4.8.0 com.datastax.oss java-driver-query-builder - 4.8.0-SNAPSHOT + 4.8.0 com.datastax.oss java-driver-test-infra - 4.8.0-SNAPSHOT + 4.8.0 com.datastax.oss java-driver-metrics-micrometer - 4.8.0-SNAPSHOT + 4.8.0 com.datastax.oss java-driver-metrics-microprofile - 4.8.0-SNAPSHOT + 4.8.0 com.datastax.oss diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 4aa22203da7..91e08122231 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.8.0-SNAPSHOT + 4.8.0 java-driver-core-shaded DataStax Java driver for Apache Cassandra(R) - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index 99f3f266b34..1403c93dcf9 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.8.0-SNAPSHOT + 4.8.0 java-driver-core bundle diff --git a/distribution/pom.xml b/distribution/pom.xml index e43a8ba247b..62f2e850160 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.8.0-SNAPSHOT + 4.8.0 java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index 1c2d2ba7302..2500243af21 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.8.0-SNAPSHOT + 4.8.0 java-driver-examples DataStax Java driver for Apache Cassandra(R) - examples. diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 3bc2ab59dee..dd8422d9e6c 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.8.0-SNAPSHOT + 4.8.0 java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 6352765c993..a073f863763 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.8.0-SNAPSHOT + 4.8.0 java-driver-mapper-processor DataStax Java driver for Apache Cassandra(R) - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index 2942cca93f7..15a78e4187f 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.8.0-SNAPSHOT + 4.8.0 java-driver-mapper-runtime bundle diff --git a/metrics/micrometer/pom.xml b/metrics/micrometer/pom.xml index 8b4c4ff3596..0a408ef64d4 100644 --- a/metrics/micrometer/pom.xml +++ b/metrics/micrometer/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.8.0-SNAPSHOT + 4.8.0 ../../ java-driver-metrics-micrometer diff --git a/metrics/microprofile/pom.xml b/metrics/microprofile/pom.xml index 022adbd178f..37824640b01 100644 --- a/metrics/microprofile/pom.xml +++ b/metrics/microprofile/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.8.0-SNAPSHOT + 4.8.0 ../../ java-driver-metrics-microprofile diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml index 140217e319f..b40444e1bfd 100644 --- a/osgi-tests/pom.xml +++ b/osgi-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.8.0-SNAPSHOT + 4.8.0 java-driver-osgi-tests jar diff --git a/pom.xml b/pom.xml index fb0b54eee5f..9cb4e330bfb 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ 4.0.0 com.datastax.oss java-driver-parent - 4.8.0-SNAPSHOT + 4.8.0 pom DataStax Java driver for Apache Cassandra(R) A driver for Apache Cassandra(R) 2.1+ that works exclusively with the Cassandra Query Language version 3 (CQL3) and Cassandra's native protocol versions 3 and above. @@ -913,7 +913,7 @@ height="0" width="0" style="display:none;visibility:hidden"> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - HEAD + 4.8.0 diff --git a/query-builder/pom.xml b/query-builder/pom.xml index 040d4b8de30..6916475130a 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.8.0-SNAPSHOT + 4.8.0 java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index 98422301622..1f19a1d6b53 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.8.0-SNAPSHOT + 4.8.0 java-driver-test-infra bundle From 9bc2ac8e9db470e7c25744b175ecf0014c13a6ff Mon Sep 17 00:00:00 2001 From: olim7t Date: Tue, 28 Jul 2020 10:54:04 -0700 Subject: [PATCH 548/979] [maven-release-plugin] prepare for next development iteration --- bom/pom.xml | 18 +++++++++--------- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- metrics/micrometer/pom.xml | 2 +- metrics/microprofile/pom.xml | 2 +- osgi-tests/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 14 files changed, 23 insertions(+), 23 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index 51ed923fa32..b49d13978f9 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.8.0 + 4.8.1-SNAPSHOT java-driver-bom pom @@ -31,42 +31,42 @@ com.datastax.oss java-driver-core - 4.8.0 + 4.8.1-SNAPSHOT com.datastax.oss java-driver-core-shaded - 4.8.0 + 4.8.1-SNAPSHOT com.datastax.oss java-driver-mapper-processor - 4.8.0 + 4.8.1-SNAPSHOT com.datastax.oss java-driver-mapper-runtime - 4.8.0 + 4.8.1-SNAPSHOT com.datastax.oss java-driver-query-builder - 4.8.0 + 4.8.1-SNAPSHOT com.datastax.oss java-driver-test-infra - 4.8.0 + 4.8.1-SNAPSHOT com.datastax.oss java-driver-metrics-micrometer - 4.8.0 + 4.8.1-SNAPSHOT com.datastax.oss java-driver-metrics-microprofile - 4.8.0 + 4.8.1-SNAPSHOT com.datastax.oss diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 91e08122231..19fc187a1b9 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.8.0 + 4.8.1-SNAPSHOT java-driver-core-shaded DataStax Java driver for Apache Cassandra(R) - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index 1403c93dcf9..52ff4935323 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.8.0 + 4.8.1-SNAPSHOT java-driver-core bundle diff --git a/distribution/pom.xml b/distribution/pom.xml index 62f2e850160..3b833c02244 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.8.0 + 4.8.1-SNAPSHOT java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index 2500243af21..ea91faac783 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.8.0 + 4.8.1-SNAPSHOT java-driver-examples DataStax Java driver for Apache Cassandra(R) - examples. diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index dd8422d9e6c..b1b9efb26c4 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.8.0 + 4.8.1-SNAPSHOT java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index a073f863763..efd9ab17046 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.8.0 + 4.8.1-SNAPSHOT java-driver-mapper-processor DataStax Java driver for Apache Cassandra(R) - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index 15a78e4187f..ffd05653c98 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.8.0 + 4.8.1-SNAPSHOT java-driver-mapper-runtime bundle diff --git a/metrics/micrometer/pom.xml b/metrics/micrometer/pom.xml index 0a408ef64d4..674293cfd59 100644 --- a/metrics/micrometer/pom.xml +++ b/metrics/micrometer/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.8.0 + 4.8.1-SNAPSHOT ../../ java-driver-metrics-micrometer diff --git a/metrics/microprofile/pom.xml b/metrics/microprofile/pom.xml index 37824640b01..bd3bee46a8c 100644 --- a/metrics/microprofile/pom.xml +++ b/metrics/microprofile/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.8.0 + 4.8.1-SNAPSHOT ../../ java-driver-metrics-microprofile diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml index b40444e1bfd..b5f107349b3 100644 --- a/osgi-tests/pom.xml +++ b/osgi-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.8.0 + 4.8.1-SNAPSHOT java-driver-osgi-tests jar diff --git a/pom.xml b/pom.xml index 9cb4e330bfb..2718341c3f8 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ 4.0.0 com.datastax.oss java-driver-parent - 4.8.0 + 4.8.1-SNAPSHOT pom DataStax Java driver for Apache Cassandra(R) A driver for Apache Cassandra(R) 2.1+ that works exclusively with the Cassandra Query Language version 3 (CQL3) and Cassandra's native protocol versions 3 and above. @@ -913,7 +913,7 @@ height="0" width="0" style="display:none;visibility:hidden"> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - 4.8.0 + HEAD diff --git a/query-builder/pom.xml b/query-builder/pom.xml index 6916475130a..4cd35f62d17 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.8.0 + 4.8.1-SNAPSHOT java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index 1f19a1d6b53..3d1c7536065 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.8.0 + 4.8.1-SNAPSHOT java-driver-test-infra bundle From 842ca7695b9f707b24ee132ba7220b14e108487a Mon Sep 17 00:00:00 2001 From: olim7t Date: Tue, 28 Jul 2020 13:34:16 -0700 Subject: [PATCH 549/979] Fix next snapshot version and prepare changelog --- bom/pom.xml | 18 +++++++++--------- changelog/README.md | 3 +++ core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- metrics/micrometer/pom.xml | 2 +- metrics/microprofile/pom.xml | 2 +- osgi-tests/pom.xml | 2 +- pom.xml | 2 +- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 15 files changed, 25 insertions(+), 22 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index b49d13978f9..56e50782dea 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.8.1-SNAPSHOT + 4.9.0-SNAPSHOT java-driver-bom pom @@ -31,42 +31,42 @@ com.datastax.oss java-driver-core - 4.8.1-SNAPSHOT + 4.9.0-SNAPSHOT com.datastax.oss java-driver-core-shaded - 4.8.1-SNAPSHOT + 4.9.0-SNAPSHOT com.datastax.oss java-driver-mapper-processor - 4.8.1-SNAPSHOT + 4.9.0-SNAPSHOT com.datastax.oss java-driver-mapper-runtime - 4.8.1-SNAPSHOT + 4.9.0-SNAPSHOT com.datastax.oss java-driver-query-builder - 4.8.1-SNAPSHOT + 4.9.0-SNAPSHOT com.datastax.oss java-driver-test-infra - 4.8.1-SNAPSHOT + 4.9.0-SNAPSHOT com.datastax.oss java-driver-metrics-micrometer - 4.8.1-SNAPSHOT + 4.9.0-SNAPSHOT com.datastax.oss java-driver-metrics-microprofile - 4.8.1-SNAPSHOT + 4.9.0-SNAPSHOT com.datastax.oss diff --git a/changelog/README.md b/changelog/README.md index aaa937b1386..23e4548dfcf 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -2,6 +2,9 @@ +### 4.9.0 (in progress) + + ### 4.8.0 - [improvement] JAVA-2811: Add aliases for driver 3 method names diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 19fc187a1b9..f8c760a8b0a 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.8.1-SNAPSHOT + 4.9.0-SNAPSHOT java-driver-core-shaded DataStax Java driver for Apache Cassandra(R) - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index 52ff4935323..109e0dc7055 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.8.1-SNAPSHOT + 4.9.0-SNAPSHOT java-driver-core bundle diff --git a/distribution/pom.xml b/distribution/pom.xml index 3b833c02244..3f9fd7bed6b 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.8.1-SNAPSHOT + 4.9.0-SNAPSHOT java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index ea91faac783..22197e49f68 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.8.1-SNAPSHOT + 4.9.0-SNAPSHOT java-driver-examples DataStax Java driver for Apache Cassandra(R) - examples. diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index b1b9efb26c4..49a1746e9f0 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.8.1-SNAPSHOT + 4.9.0-SNAPSHOT java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index efd9ab17046..50996d52633 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.8.1-SNAPSHOT + 4.9.0-SNAPSHOT java-driver-mapper-processor DataStax Java driver for Apache Cassandra(R) - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index ffd05653c98..92395f0d88e 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.8.1-SNAPSHOT + 4.9.0-SNAPSHOT java-driver-mapper-runtime bundle diff --git a/metrics/micrometer/pom.xml b/metrics/micrometer/pom.xml index 674293cfd59..e21c3d56265 100644 --- a/metrics/micrometer/pom.xml +++ b/metrics/micrometer/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.8.1-SNAPSHOT + 4.9.0-SNAPSHOT ../../ java-driver-metrics-micrometer diff --git a/metrics/microprofile/pom.xml b/metrics/microprofile/pom.xml index bd3bee46a8c..1126a42e726 100644 --- a/metrics/microprofile/pom.xml +++ b/metrics/microprofile/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.8.1-SNAPSHOT + 4.9.0-SNAPSHOT ../../ java-driver-metrics-microprofile diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml index b5f107349b3..6ea23c17796 100644 --- a/osgi-tests/pom.xml +++ b/osgi-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.8.1-SNAPSHOT + 4.9.0-SNAPSHOT java-driver-osgi-tests jar diff --git a/pom.xml b/pom.xml index 2718341c3f8..eb35aa5e5cd 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ 4.0.0 com.datastax.oss java-driver-parent - 4.8.1-SNAPSHOT + 4.9.0-SNAPSHOT pom DataStax Java driver for Apache Cassandra(R) A driver for Apache Cassandra(R) 2.1+ that works exclusively with the Cassandra Query Language version 3 (CQL3) and Cassandra's native protocol versions 3 and above. diff --git a/query-builder/pom.xml b/query-builder/pom.xml index 4cd35f62d17..2dce6720a2b 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.8.1-SNAPSHOT + 4.9.0-SNAPSHOT java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index 3d1c7536065..79a8ed60fef 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.8.1-SNAPSHOT + 4.9.0-SNAPSHOT java-driver-test-infra bundle From 7b57a1170cd271bcf5d79ce1ed1e89b567dbb8c2 Mon Sep 17 00:00:00 2001 From: Erik Merkle Date: Wed, 29 Jul 2020 15:49:47 -0500 Subject: [PATCH 550/979] JAVA-2699: Update protocol version compatibility matrix --- manual/core/native_protocol/README.md | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/manual/core/native_protocol/README.md b/manual/core/native_protocol/README.md index aca8a2f833a..200c15f25b7 100644 --- a/manual/core/native_protocol/README.md +++ b/manual/core/native_protocol/README.md @@ -26,9 +26,9 @@ first node the driver connects to: | Cassandra version | Negotiated protocol version with driver 4 ¹ | |---------------------|-------------------------------------------------| -| 2.1.x (DSE 4.7/4.8) | v3 | +| 2.1.x | v3 | | 2.2.x | v4 | -| 3.x (DSE 5.0/5.1) | v4 | +| 3.x | v4 | | 4.x ² | v5 | *(1) for previous driver versions, see the [3.x documentation][driver3]* @@ -36,6 +36,18 @@ first node the driver connects to: *(2) at the time of writing, Cassandra 4 is not released yet. Protocol v5 support is still in beta, and must be enabled explicitly (negotiation will yield v4).* +Since version 4.5.0, the driver can also use DSE protocols when all nodes are running a version of +DSE. The table below shows the protocol matrix for these cases: + +| DSE version | Negotiated protocol version with driver 4 | +|---------------------|-------------------------------------------------| +| 4.7/4.8 | v3 | +| 5.0 | v4 | +| 5.1 | DSE_V1 ³ | +| 6.0/6.7/6.8 | DSE_V2 ³ | + +*(3) DSE Protocols are chosen before other Cassandra native protocols. + ### Controlling the protocol version To find out which version you're currently using, use the following: @@ -121,7 +133,7 @@ If you want to see the details of mixed cluster negotiation, enable `DEBUG` leve in the face of schema changes [protocol spec]: https://github.com/datastax/native-protocol/tree/1.x/src/main/resources -[driver3]: https://docs.datastax.com/en/developer/java-driver/3.5/manual/native_protocol/ +[driver3]: https://docs.datastax.com/en/developer/java-driver/3.10/manual/native_protocol/ [ExecutionInfo.getWarnings]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getWarnings-- [Request.getCustomPayload]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/Request.html#getCustomPayload-- From 68a7aad0754688dbe7e9a336cc02676080ef62a1 Mon Sep 17 00:00:00 2001 From: Erik Merkle Date: Wed, 29 Jul 2020 15:55:41 -0500 Subject: [PATCH 551/979] Fix typo in README --- manual/core/native_protocol/README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/manual/core/native_protocol/README.md b/manual/core/native_protocol/README.md index 200c15f25b7..f2eab80d2f0 100644 --- a/manual/core/native_protocol/README.md +++ b/manual/core/native_protocol/README.md @@ -46,7 +46,7 @@ DSE. The table below shows the protocol matrix for these cases: | 5.1 | DSE_V1 ³ | | 6.0/6.7/6.8 | DSE_V2 ³ | -*(3) DSE Protocols are chosen before other Cassandra native protocols. +*(3) DSE Protocols are chosen before other Cassandra native protocols.* ### Controlling the protocol version @@ -137,4 +137,4 @@ If you want to see the details of mixed cluster negotiation, enable `DEBUG` leve [ExecutionInfo.getWarnings]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getWarnings-- [Request.getCustomPayload]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/Request.html#getCustomPayload-- -[AttachmentPoint.getProtocolVersion]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/detach/AttachmentPoint.html#getProtocolVersion-- \ No newline at end of file +[AttachmentPoint.getProtocolVersion]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/detach/AttachmentPoint.html#getProtocolVersion-- From 2cf8aaa9d53a05fe75cc009398273cd0808b5fd9 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Thu, 6 Aug 2020 19:02:36 +0200 Subject: [PATCH 552/979] Remove DDAC from builds --- Jenkinsfile | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index 64d6152919c..95eb2aa20b0 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -215,7 +215,6 @@ pipeline { '3.0', // Previous Apache CassandraⓇ '3.11', // Current Apache CassandraⓇ '4.0', // Development Apache CassandraⓇ - 'ddac-5.1', // Current DataStax Distribution of Apache CassandraⓇ 'dse-4.8', // Previous EOSL DataStax Enterprise 'dse-5.0', // Long Term Support DataStax Enterprise 'dse-5.1', // Legacy DataStax Enterprise @@ -252,10 +251,6 @@ pipeline { 4.0 Apache Cassandra® v4.x (CURRENTLY UNDER DEVELOPMENT) - - ddac-5.1 - DataStax Distribution of Apache Cassandra® v5.1.x - dse-4.8 DataStax Enterprise v4.8.x (END OF SERVICE LIFE) @@ -366,8 +361,8 @@ pipeline { // schedules only run against release branches (i.e. 3.x, 4.x, 4.5.x, etc.) parameterizedCron(branchPatternCron.matcher(env.BRANCH_NAME).matches() ? """ # Every weeknight (Monday - Friday) around 2:00 AM - ### JDK8 tests against 2.1, 3.0, DDAC-5.1, DSE 4.8, DSE 5.0, DSE 5.1, DSE-6.0 and DSE 6.7 - H 2 * * 1-5 %CI_SCHEDULE=WEEKNIGHTS;CI_SCHEDULE_SERVER_VERSIONS=2.1 3.0 ddac-5.1 dse-4.8 dse-5.0 dse-5.1 dse-6.0 dse-6.7;CI_SCHEDULE_JABBA_VERSION=1.8 + ### JDK8 tests against 2.1, 3.0, DSE 4.8, DSE 5.0, DSE 5.1, DSE-6.0 and DSE 6.7 + H 2 * * 1-5 %CI_SCHEDULE=WEEKNIGHTS;CI_SCHEDULE_SERVER_VERSIONS=2.1 3.0 dse-4.8 dse-5.0 dse-5.1 dse-6.0 dse-6.7;CI_SCHEDULE_JABBA_VERSION=1.8 ### JDK11 tests against 3.11, 4.0, DSE 6.7 and DSE 6.8.0 H 2 * * 1-5 %CI_SCHEDULE=WEEKNIGHTS;CI_SCHEDULE_SERVER_VERSIONS=3.11 4.0 dse-6.7 dse-6.8.0;CI_SCHEDULE_JABBA_VERSION=openjdk@1.11 # Every weekend (Sunday) around 12:00 PM noon @@ -521,7 +516,6 @@ pipeline { '3.0', // Previous Apache CassandraⓇ '3.11', // Current Apache CassandraⓇ '4.0', // Development Apache CassandraⓇ - 'ddac-5.1', // Current DataStax Distribution of Apache CassandraⓇ 'dse-4.8', // Previous EOSL DataStax Enterprise 'dse-5.0', // Last EOSL DataStax Enterprise 'dse-5.1', // Legacy DataStax Enterprise From 9db6e08694fc42ba31e89c0d52eee8978dca6c70 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Sat, 8 Aug 2020 11:14:12 +0200 Subject: [PATCH 553/979] Do not enable Graph workload when DSE version is < 5.0 --- .../oss/driver/internal/osgi/support/CcmStagedReactor.java | 6 +++++- .../datastax/oss/driver/api/testinfra/ccm/CcmBridge.java | 6 +++++- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/CcmStagedReactor.java b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/CcmStagedReactor.java index 5baf7fa6833..56369869890 100644 --- a/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/CcmStagedReactor.java +++ b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/CcmStagedReactor.java @@ -15,8 +15,10 @@ */ package com.datastax.oss.driver.internal.osgi.support; +import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.api.testinfra.ccm.CcmBridge; import java.util.List; +import java.util.Objects; import net.jcip.annotations.GuardedBy; import org.ops4j.pax.exam.TestContainer; import org.ops4j.pax.exam.TestProbeBuilder; @@ -30,9 +32,11 @@ public class CcmStagedReactor extends AllConfinedStagedReactor { public static final CcmBridge CCM_BRIDGE; + public static final Version DSE_5_0 = Objects.requireNonNull(Version.parse("5.0")); + static { CcmBridge.Builder builder = CcmBridge.builder().withNodes(1); - if (CcmBridge.DSE_ENABLEMENT) { + if (CcmBridge.DSE_ENABLEMENT && CcmBridge.VERSION.compareTo(DSE_5_0) >= 0) { builder.withDseWorkloads("graph"); } CCM_BRIDGE = builder.build(); diff --git a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CcmBridge.java b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CcmBridge.java index df5f1af05a2..9e3f33c1b5b 100644 --- a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CcmBridge.java +++ b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CcmBridge.java @@ -32,6 +32,7 @@ import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.Objects; import java.util.Optional; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; @@ -68,7 +69,8 @@ public class CcmBridge implements AutoCloseable { private final String jvmArgs; - public static final Version VERSION = Version.parse(System.getProperty("ccm.version", "3.11.0")); + public static final Version VERSION = + Objects.requireNonNull(Version.parse(System.getProperty("ccm.version", "3.11.0"))); public static final String INSTALL_DIRECTORY = System.getProperty("ccm.directory"); @@ -480,6 +482,7 @@ public Builder withCreateOption(String option) { /** Enables SSL encryption. */ public Builder withSsl() { cassandraConfiguration.put("client_encryption_options.enabled", "true"); + cassandraConfiguration.put("client_encryption_options.optional", "false"); cassandraConfiguration.put( "client_encryption_options.keystore", DEFAULT_SERVER_KEYSTORE_FILE.getAbsolutePath()); cassandraConfiguration.put( @@ -489,6 +492,7 @@ public Builder withSsl() { public Builder withSslLocalhostCn() { cassandraConfiguration.put("client_encryption_options.enabled", "true"); + cassandraConfiguration.put("client_encryption_options.optional", "false"); cassandraConfiguration.put( "client_encryption_options.keystore", DEFAULT_SERVER_LOCALHOST_KEYSTORE_FILE.getAbsolutePath()); From 6e68709fdf6308e230990fe9047293ef03b3daf5 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Sat, 8 Aug 2020 11:34:41 +0200 Subject: [PATCH 554/979] Use SET_TO_NULL in mapper tests for compatibility with protocol V3 --- .../com/datastax/oss/driver/mapper/CustomResultTypeIT.java | 3 +++ .../src/test/java/com/datastax/oss/driver/mapper/UdtKeyIT.java | 3 +++ .../oss/driver/internal/osgi/service/MailboxMessageDao.java | 3 +++ .../driver/internal/osgi/service/geo/GeoMailboxMessageDao.java | 3 +++ .../osgi/service/reactive/ReactiveMailboxMessageDao.java | 3 +++ 5 files changed, 15 insertions(+) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/CustomResultTypeIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/CustomResultTypeIT.java index 6698eac341a..9d12b26d5ae 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/CustomResultTypeIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/CustomResultTypeIT.java @@ -25,12 +25,14 @@ import com.datastax.oss.driver.api.mapper.annotations.Dao; import com.datastax.oss.driver.api.mapper.annotations.DaoFactory; import com.datastax.oss.driver.api.mapper.annotations.DaoKeyspace; +import com.datastax.oss.driver.api.mapper.annotations.DefaultNullSavingStrategy; import com.datastax.oss.driver.api.mapper.annotations.Delete; import com.datastax.oss.driver.api.mapper.annotations.Insert; import com.datastax.oss.driver.api.mapper.annotations.Mapper; import com.datastax.oss.driver.api.mapper.annotations.Query; import com.datastax.oss.driver.api.mapper.annotations.Select; import com.datastax.oss.driver.api.mapper.annotations.Update; +import com.datastax.oss.driver.api.mapper.entity.saving.NullSavingStrategy; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.categories.ParallelizableTests; @@ -131,6 +133,7 @@ public void should_use_custom_result_for_query_method() assertThat(selectedProduct).isNull(); } + @DefaultNullSavingStrategy(NullSavingStrategy.SET_TO_NULL) public interface ListenableFutureDao { @Select diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UdtKeyIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UdtKeyIT.java index a826b847520..bcb981e0ee6 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UdtKeyIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UdtKeyIT.java @@ -23,11 +23,13 @@ import com.datastax.oss.driver.api.mapper.annotations.Dao; import com.datastax.oss.driver.api.mapper.annotations.DaoFactory; import com.datastax.oss.driver.api.mapper.annotations.DaoKeyspace; +import com.datastax.oss.driver.api.mapper.annotations.DefaultNullSavingStrategy; import com.datastax.oss.driver.api.mapper.annotations.Entity; import com.datastax.oss.driver.api.mapper.annotations.Insert; import com.datastax.oss.driver.api.mapper.annotations.Mapper; import com.datastax.oss.driver.api.mapper.annotations.PartitionKey; import com.datastax.oss.driver.api.mapper.annotations.Select; +import com.datastax.oss.driver.api.mapper.entity.saving.NullSavingStrategy; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.categories.ParallelizableTests; @@ -174,6 +176,7 @@ public void setValue(int value) { } @Dao + @DefaultNullSavingStrategy(NullSavingStrategy.SET_TO_NULL) interface RecordDao { @Select Record findByKey(Key key); diff --git a/osgi-tests/src/main/java/com/datastax/oss/driver/internal/osgi/service/MailboxMessageDao.java b/osgi-tests/src/main/java/com/datastax/oss/driver/internal/osgi/service/MailboxMessageDao.java index 76f44e3ae43..db9b2b8a13f 100644 --- a/osgi-tests/src/main/java/com/datastax/oss/driver/internal/osgi/service/MailboxMessageDao.java +++ b/osgi-tests/src/main/java/com/datastax/oss/driver/internal/osgi/service/MailboxMessageDao.java @@ -17,11 +17,14 @@ import com.datastax.oss.driver.api.core.PagingIterable; import com.datastax.oss.driver.api.mapper.annotations.Dao; +import com.datastax.oss.driver.api.mapper.annotations.DefaultNullSavingStrategy; import com.datastax.oss.driver.api.mapper.annotations.Insert; import com.datastax.oss.driver.api.mapper.annotations.Select; +import com.datastax.oss.driver.api.mapper.entity.saving.NullSavingStrategy; import com.datastax.oss.driver.api.osgi.service.MailboxMessage; @Dao +@DefaultNullSavingStrategy(NullSavingStrategy.SET_TO_NULL) public interface MailboxMessageDao { @Insert diff --git a/osgi-tests/src/main/java/com/datastax/oss/driver/internal/osgi/service/geo/GeoMailboxMessageDao.java b/osgi-tests/src/main/java/com/datastax/oss/driver/internal/osgi/service/geo/GeoMailboxMessageDao.java index af3b7750a25..aac1649ef74 100644 --- a/osgi-tests/src/main/java/com/datastax/oss/driver/internal/osgi/service/geo/GeoMailboxMessageDao.java +++ b/osgi-tests/src/main/java/com/datastax/oss/driver/internal/osgi/service/geo/GeoMailboxMessageDao.java @@ -17,12 +17,15 @@ import com.datastax.oss.driver.api.core.PagingIterable; import com.datastax.oss.driver.api.mapper.annotations.Dao; +import com.datastax.oss.driver.api.mapper.annotations.DefaultNullSavingStrategy; import com.datastax.oss.driver.api.mapper.annotations.Insert; import com.datastax.oss.driver.api.mapper.annotations.Select; +import com.datastax.oss.driver.api.mapper.entity.saving.NullSavingStrategy; import com.datastax.oss.driver.api.osgi.service.geo.GeoMailboxMessage; import com.datastax.oss.driver.internal.osgi.service.MailboxMessageDao; @Dao +@DefaultNullSavingStrategy(NullSavingStrategy.SET_TO_NULL) public interface GeoMailboxMessageDao extends MailboxMessageDao { @Insert diff --git a/osgi-tests/src/main/java/com/datastax/oss/driver/internal/osgi/service/reactive/ReactiveMailboxMessageDao.java b/osgi-tests/src/main/java/com/datastax/oss/driver/internal/osgi/service/reactive/ReactiveMailboxMessageDao.java index e558db59357..87db1dbda4e 100644 --- a/osgi-tests/src/main/java/com/datastax/oss/driver/internal/osgi/service/reactive/ReactiveMailboxMessageDao.java +++ b/osgi-tests/src/main/java/com/datastax/oss/driver/internal/osgi/service/reactive/ReactiveMailboxMessageDao.java @@ -17,11 +17,14 @@ import com.datastax.dse.driver.api.mapper.reactive.MappedReactiveResultSet; import com.datastax.oss.driver.api.mapper.annotations.Dao; +import com.datastax.oss.driver.api.mapper.annotations.DefaultNullSavingStrategy; import com.datastax.oss.driver.api.mapper.annotations.Select; +import com.datastax.oss.driver.api.mapper.entity.saving.NullSavingStrategy; import com.datastax.oss.driver.api.osgi.service.MailboxMessage; import com.datastax.oss.driver.internal.osgi.service.MailboxMessageDao; @Dao +@DefaultNullSavingStrategy(NullSavingStrategy.SET_TO_NULL) public interface ReactiveMailboxMessageDao extends MailboxMessageDao { @Select From 3982f1e383496a812dd3ea2a7a093262b1f5cd03 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Sat, 8 Aug 2020 12:05:40 +0200 Subject: [PATCH 555/979] Fix CcmBridge.add() implementation for DSE clusters --- .../datastax/oss/driver/api/testinfra/ccm/CcmBridge.java | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CcmBridge.java b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CcmBridge.java index 9e3f33c1b5b..c41aa0b278f 100644 --- a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CcmBridge.java +++ b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CcmBridge.java @@ -305,7 +305,11 @@ public void stop(int n) { } public void add(int n, String dc) { - execute("add", "-i", ipPrefix + n, "-d", dc, "node" + n); + if (getDseVersion().isPresent()) { + execute("add", "-i", ipPrefix + n, "-d", dc, "node" + n, "--dse"); + } else { + execute("add", "-i", ipPrefix + n, "-d", dc, "node" + n); + } start(n); } From a92d86213728b79d1b472afc65e394fbea216369 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Sat, 8 Aug 2020 13:05:39 +0200 Subject: [PATCH 556/979] Remove call to System.out --- .../dse/driver/api/core/graph/CoreGraphTextSearchIndexIT.java | 1 - 1 file changed, 1 deletion(-) diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CoreGraphTextSearchIndexIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CoreGraphTextSearchIndexIT.java index 90daac34a7b..9617746e026 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CoreGraphTextSearchIndexIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CoreGraphTextSearchIndexIT.java @@ -119,7 +119,6 @@ public static Collection textIndices() { @BeforeClass public static void setup() { for (String setupQuery : textIndices()) { - System.out.println("Executing: " + setupQuery); SESSION_RULE.session().execute(ScriptGraphStatement.newInstance(setupQuery)); } From b85e20856825a8c22eb168302cecbbf854e27dc9 Mon Sep 17 00:00:00 2001 From: olim7t Date: Tue, 18 Aug 2020 17:20:58 -0700 Subject: [PATCH 557/979] JAVA-2863: Reintroduce mapper processor dependency to SLF4J Motivation: In JAVA-2800 we removed that dependency to avoid SLF4J's "defaulting to NOP" warning when the processor in configured via `-processorpath`. However, the processor depends on driver core classes that sometimes declare a logger. If SLF4J is missing, this will cause a ClassNotFoundException while processing annotations. Some more background: - in theory the processor should be completely isolated from SLF4J (its only output is compiler messages). But sometimes reusing core classes is convenient, we wouldn't want to rewrite them from scratch. A NOP implementation that ignores the logs is what we want, it's just the warning that is undesirable. - we can't declare slf4j-nop as an explicit dependency of the processor, because then it would collide with the application's binding if the processor is configured via the regular classpath instead of `-processorpath`. Modifications: Revert JAVA-2800 to reintroduce the dependency. Add a snippet in the manual to explain how users can add slf4j-nop to their processorpath to avoid the "defaulting to NOP" warning. Result: No more risk of ClassNotFoundException. The warning is back, but it's not harmful and users have a workaround to remove it. --- changelog/README.md | 1 + manual/mapper/config/README.md | 7 +++++++ mapper-processor/pom.xml | 14 -------------- .../mapper/processor/dao/LoggingGenerator.java | 4 +++- 4 files changed, 11 insertions(+), 15 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 23e4548dfcf..7db1344c099 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.9.0 (in progress) +- [bug] JAVA-2863: Reintroduce mapper processor dependency to SLF4J ### 4.8.0 diff --git a/manual/mapper/config/README.md b/manual/mapper/config/README.md index 0ccb224ba97..5d42e104cf6 100644 --- a/manual/mapper/config/README.md +++ b/manual/mapper/config/README.md @@ -41,6 +41,13 @@ configuration (make sure you use version 3.5 or higher): java-driver-mapper-processor ${java-driver.version} + + + org.slf4j + slf4j-nop + 1.7.26 + diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 50996d52633..4aa6a97f79c 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -40,20 +40,6 @@ com.datastax.oss java-driver-mapper-runtime - - - - org.slf4j - slf4j-api - - com.datastax.oss diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/LoggingGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/LoggingGenerator.java index 4e0e4c603c0..00b3c229deb 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/LoggingGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/LoggingGenerator.java @@ -24,7 +24,9 @@ public class LoggingGenerator { - // Reference these types by name to avoid a compile-time dependency to SFL4J + // Reference these types by name. They are in the classpath but that is more of a workaround in + // case they get accidentally referenced via driver core types (see JAVA-2863), the mapper + // processor does not directly "use" SLF4J. private static final ClassName LOGGER_FACTORY_CLASS_NAME = ClassName.get("org.slf4j", "LoggerFactory"); private static final ClassName LOGGER_CLASS_NAME = ClassName.get("org.slf4j", "Logger"); From 2ddfae061c8ce115e088f8c2ec2e2607f1e55427 Mon Sep 17 00:00:00 2001 From: olim7t Date: Thu, 30 Jul 2020 11:21:53 -0700 Subject: [PATCH 558/979] JAVA-2721: Add counter support in the mapper --- changelog/README.md | 1 + .../oss/driver/mapper/IncrementIT.java | 218 ++++++++++++ .../driver/mapper/IncrementWithNullsIT.java | 118 +++++++ manual/mapper/daos/.nav | 1 + manual/mapper/daos/README.md | 1 + manual/mapper/daos/increment/README.md | 86 +++++ .../DefaultCodeGeneratorFactory.java | 6 + .../dao/DaoDeleteMethodGenerator.java | 45 +-- .../dao/DaoIncrementMethodGenerator.java | 320 ++++++++++++++++++ .../processor/dao/DaoMethodGenerator.java | 50 +++ .../mapper/processor/dao/EntityUtils.java | 13 +- .../entity/DefaultPropertyDefinition.java | 7 + .../processor/entity/PropertyDefinition.java | 6 + .../api/mapper/annotations/Increment.java | 122 +++++++ .../oss/driver/internal/mapper/DaoBase.java | 10 +- .../mapper/entity/EntityHelperBase.java | 2 +- 16 files changed, 956 insertions(+), 50 deletions(-) create mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/mapper/IncrementIT.java create mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/mapper/IncrementWithNullsIT.java create mode 100644 manual/mapper/daos/increment/README.md create mode 100644 mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoIncrementMethodGenerator.java create mode 100644 mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Increment.java diff --git a/changelog/README.md b/changelog/README.md index 7db1344c099..b97241e171d 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.9.0 (in progress) +- [new feature] JAVA-2721: Add counter support in the mapper - [bug] JAVA-2863: Reintroduce mapper processor dependency to SLF4J ### 4.8.0 diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/IncrementIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/IncrementIT.java new file mode 100644 index 00000000000..1e1beeef8f7 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/IncrementIT.java @@ -0,0 +1,218 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.mapper; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.mapper.annotations.Dao; +import com.datastax.oss.driver.api.mapper.annotations.DaoFactory; +import com.datastax.oss.driver.api.mapper.annotations.DaoKeyspace; +import com.datastax.oss.driver.api.mapper.annotations.DefaultNullSavingStrategy; +import com.datastax.oss.driver.api.mapper.annotations.Entity; +import com.datastax.oss.driver.api.mapper.annotations.Increment; +import com.datastax.oss.driver.api.mapper.annotations.Mapper; +import com.datastax.oss.driver.api.mapper.annotations.PartitionKey; +import com.datastax.oss.driver.api.mapper.annotations.Select; +import com.datastax.oss.driver.api.mapper.entity.saving.NullSavingStrategy; +import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import com.datastax.oss.driver.categories.ParallelizableTests; +import java.util.Objects; +import java.util.UUID; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +@Category(ParallelizableTests.class) +public class IncrementIT { + + private static final CcmRule CCM_RULE = CcmRule.getInstance(); + + private static final SessionRule SESSION_RULE = SessionRule.builder(CCM_RULE).build(); + + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); + + private static ProductRatingDao dao; + + @BeforeClass + public static void setup() { + CqlSession session = SESSION_RULE.session(); + + session.execute( + SimpleStatement.builder( + "CREATE TABLE product_rating(product_id uuid PRIMARY KEY, " + + "one_star counter, two_star counter, three_star counter, " + + "four_star counter, five_star counter)") + .setExecutionProfile(SESSION_RULE.slowProfile()) + .build()); + + InventoryMapper inventoryMapper = new IncrementIT_InventoryMapperBuilder(session).build(); + dao = inventoryMapper.productRatingDao(SESSION_RULE.keyspace()); + } + + @Test + public void should_increment_counters() { + UUID productId1 = UUID.randomUUID(); + UUID productId2 = UUID.randomUUID(); + + dao.incrementFiveStar(productId1, 1); + dao.incrementFiveStar(productId1, 1); + dao.incrementFourStar(productId1, 1); + + dao.incrementTwoStar(productId2, 1); + dao.incrementThreeStar(productId2, 1); + dao.incrementOneStar(productId2, 1); + + ProductRating product1Totals = dao.get(productId1); + assertThat(product1Totals.getFiveStar()).isEqualTo(2); + assertThat(product1Totals.getFourStar()).isEqualTo(1); + assertThat(product1Totals.getThreeStar()).isEqualTo(0); + assertThat(product1Totals.getTwoStar()).isEqualTo(0); + assertThat(product1Totals.getOneStar()).isEqualTo(0); + + ProductRating product2Totals = dao.get(productId2); + assertThat(product2Totals.getFiveStar()).isEqualTo(0); + assertThat(product2Totals.getFourStar()).isEqualTo(0); + assertThat(product2Totals.getThreeStar()).isEqualTo(1); + assertThat(product2Totals.getTwoStar()).isEqualTo(1); + assertThat(product2Totals.getOneStar()).isEqualTo(1); + } + + @Mapper + public interface InventoryMapper { + @DaoFactory + ProductRatingDao productRatingDao(@DaoKeyspace CqlIdentifier keyspace); + } + + @Dao + @DefaultNullSavingStrategy(NullSavingStrategy.SET_TO_NULL) + public interface ProductRatingDao { + @Select + ProductRating get(UUID productId); + + @Increment(entityClass = ProductRating.class) + void incrementOneStar(UUID productId, long oneStar); + + @Increment(entityClass = ProductRating.class) + void incrementTwoStar(UUID productId, long twoStar); + + @Increment(entityClass = ProductRating.class) + void incrementThreeStar(UUID productId, long threeStar); + + @Increment(entityClass = ProductRating.class) + void incrementFourStar(UUID productId, long fourStar); + + @Increment(entityClass = ProductRating.class) + void incrementFiveStar(UUID productId, long fiveStar); + } + + @Entity + public static class ProductRating { + + @PartitionKey private UUID productId; + private long oneStar; + private long twoStar; + private long threeStar; + private long fourStar; + private long fiveStar; + + public ProductRating() {} + + public UUID getProductId() { + return productId; + } + + public void setProductId(UUID productId) { + this.productId = productId; + } + + public long getOneStar() { + return oneStar; + } + + public void setOneStar(long oneStar) { + this.oneStar = oneStar; + } + + public long getTwoStar() { + return twoStar; + } + + public void setTwoStar(long twoStar) { + this.twoStar = twoStar; + } + + public long getThreeStar() { + return threeStar; + } + + public void setThreeStar(long threeStar) { + this.threeStar = threeStar; + } + + public long getFourStar() { + return fourStar; + } + + public void setFourStar(long fourStar) { + this.fourStar = fourStar; + } + + public long getFiveStar() { + return fiveStar; + } + + public void setFiveStar(long fiveStar) { + this.fiveStar = fiveStar; + } + + @Override + public boolean equals(Object other) { + if (other == this) { + return true; + } else if (other instanceof ProductRating) { + ProductRating that = (ProductRating) other; + return Objects.equals(this.productId, that.productId) + && this.oneStar == that.oneStar + && this.twoStar == that.twoStar + && this.threeStar == that.threeStar + && this.fourStar == that.fourStar + && this.fiveStar == that.fiveStar; + } else { + return false; + } + } + + @Override + public int hashCode() { + return Objects.hash(productId, oneStar, twoStar, threeStar, fourStar, fiveStar); + } + + @Override + public String toString() { + return String.format( + "ProductRating(id=%s, 1*=%d, 2*=%d, 3*=%d, 4*=%d, 5*=%d)", + productId, oneStar, twoStar, threeStar, fourStar, fiveStar); + } + } +} diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/IncrementWithNullsIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/IncrementWithNullsIT.java new file mode 100644 index 00000000000..642bb9c17b9 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/IncrementWithNullsIT.java @@ -0,0 +1,118 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.mapper; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.mapper.annotations.Dao; +import com.datastax.oss.driver.api.mapper.annotations.DaoFactory; +import com.datastax.oss.driver.api.mapper.annotations.DaoKeyspace; +import com.datastax.oss.driver.api.mapper.annotations.DefaultNullSavingStrategy; +import com.datastax.oss.driver.api.mapper.annotations.Increment; +import com.datastax.oss.driver.api.mapper.annotations.Mapper; +import com.datastax.oss.driver.api.mapper.annotations.Select; +import com.datastax.oss.driver.api.mapper.entity.saving.NullSavingStrategy; +import com.datastax.oss.driver.api.testinfra.CassandraRequirement; +import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import com.datastax.oss.driver.categories.ParallelizableTests; +import com.datastax.oss.driver.mapper.IncrementIT.ProductRating; +import java.util.UUID; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +@Category(ParallelizableTests.class) +@CassandraRequirement(min = "2.2") +public class IncrementWithNullsIT { + + private static final CcmRule CCM_RULE = CcmRule.getInstance(); + + private static final SessionRule SESSION_RULE = SessionRule.builder(CCM_RULE).build(); + + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); + + private static ProductRatingDao dao; + + @BeforeClass + public static void setup() { + CqlSession session = SESSION_RULE.session(); + + session.execute( + SimpleStatement.builder( + "CREATE TABLE product_rating(product_id uuid PRIMARY KEY, " + + "one_star counter, two_star counter, three_star counter, " + + "four_star counter, five_star counter)") + .setExecutionProfile(SESSION_RULE.slowProfile()) + .build()); + + InventoryMapper inventoryMapper = + new IncrementWithNullsIT_InventoryMapperBuilder(session).build(); + dao = inventoryMapper.productRatingDao(SESSION_RULE.keyspace()); + } + + @Test + public void should_increment_counters() { + UUID productId1 = UUID.randomUUID(); + UUID productId2 = UUID.randomUUID(); + + dao.increment(productId1, null, null, null, null, 1L); + dao.increment(productId1, null, null, null, null, 1L); + dao.increment(productId1, null, null, null, 1L, null); + + dao.increment(productId2, null, 1L, null, null, null); + dao.increment(productId2, null, null, 1L, null, null); + dao.increment(productId2, 1L, null, null, null, null); + + ProductRating product1Totals = dao.get(productId1); + assertThat(product1Totals.getFiveStar()).isEqualTo(2); + assertThat(product1Totals.getFourStar()).isEqualTo(1); + assertThat(product1Totals.getThreeStar()).isEqualTo(0); + assertThat(product1Totals.getTwoStar()).isEqualTo(0); + assertThat(product1Totals.getOneStar()).isEqualTo(0); + + ProductRating product2Totals = dao.get(productId2); + assertThat(product2Totals.getFiveStar()).isEqualTo(0); + assertThat(product2Totals.getFourStar()).isEqualTo(0); + assertThat(product2Totals.getThreeStar()).isEqualTo(1); + assertThat(product2Totals.getTwoStar()).isEqualTo(1); + assertThat(product2Totals.getOneStar()).isEqualTo(1); + } + + @Mapper + public interface InventoryMapper { + @DaoFactory + ProductRatingDao productRatingDao(@DaoKeyspace CqlIdentifier keyspace); + } + + @Dao + @DefaultNullSavingStrategy(NullSavingStrategy.DO_NOT_SET) + public interface ProductRatingDao { + @Select + ProductRating get(UUID productId); + + @Increment(entityClass = ProductRating.class) + void increment( + UUID productId, Long oneStar, Long twoStar, Long threeStar, Long fourStar, Long fiveStar); + } +} diff --git a/manual/mapper/daos/.nav b/manual/mapper/daos/.nav index 1337eb64101..be60381834f 100644 --- a/manual/mapper/daos/.nav +++ b/manual/mapper/daos/.nav @@ -6,5 +6,6 @@ queryprovider select setentity update +increment null_saving statement_attributes \ No newline at end of file diff --git a/manual/mapper/daos/README.md b/manual/mapper/daos/README.md index ea6e6d45d1a..7fabf887f0d 100644 --- a/manual/mapper/daos/README.md +++ b/manual/mapper/daos/README.md @@ -44,6 +44,7 @@ annotations: * [@Select](select/) * [@SetEntity](setentity/) * [@Update](update/) +* [@Increment](increment/) The methods can have any name. The allowed parameters and return type are specific to each annotation. diff --git a/manual/mapper/daos/increment/README.md b/manual/mapper/daos/increment/README.md new file mode 100644 index 00000000000..74acecb11bf --- /dev/null +++ b/manual/mapper/daos/increment/README.md @@ -0,0 +1,86 @@ +## Increment methods + +Annotate a DAO method with [@Increment] to generate a query that updates a counter table that is +mapped to an entity: + +```java +// CREATE TABLE votes(article_id int PRIMARY KEY, up_votes counter, down_votes counter); + +@Entity +public class Votes { + @PartitionKey private int articleId; + private long upVotes; + private long downVotes; + ... // constructor(s), getters and setters, etc. +} + +@Dao +public interface VotesDao { + @Increment(entityClass = Votes.class) + void incrementUpVotes(int articleId, long upVotes); + + @Increment(entityClass = Votes.class) + void incrementDownVotes(int articleId, long downVotes); + + @Select + Votes findById(int articleId); +} +``` + +### Parameters + +The entity class must be specified with `entityClass` in the annotation. + +The method's parameters must start with the [full primary key](../../entities/#primary-key-columns), +in the exact order (as defined by the [@PartitionKey] and [@ClusteringColumn] annotations in the +entity class). The parameter names don't necessarily need to match the names of the columns, but the +types must match. Unlike other methods like [@Select](../select/) or [@Delete](../delete/), counter +updates cannot operate on a whole partition, they need to target exactly one row; so all the +partition key and clustering columns must be specified. + +Then must follow one or more parameters representing counter increments. Their type must be +`long` or `java.lang.Long`. The name of the parameter must match the name of the entity +property that maps to the counter (that is, the name of the getter without "get" and +decapitalized). Alternatively, you may annotate a parameter with [@CqlName] to specify the +raw column name directly; in that case, the name of the parameter does not matter: + +```java +@Increment(entityClass = Votes.class) +void incrementUpVotes(int articleId, @CqlName("up_votes") long foobar); +``` + +When you invoke the method, each parameter value is interpreted as a **delta** that will be applied +to the counter. In other words, if you pass 1, the counter will be incremented by 1. Negative values +are allowed. If you are using Cassandra 2.2 or above, you can use `Long` and pass `null` for some of +the parameters, they will be ignored (following [NullSavingStrategy#DO_NOT_SET](../null_saving/) +semantics). If you are using Cassandra 2.1, `null` values will trigger a runtime error. + +A `Function` or `UnaryOperator` +can be added as the **last** parameter. It will be applied to the statement before execution. This +allows you to customize certain aspects of the request (page size, timeout, etc) at runtime. See +[statement attributes](../statement_attributes/). + +### Return type + +The method can return `void`, a void [CompletionStage] or [CompletableFuture], or a +[ReactiveResultSet]. + +### Target keyspace and table + +If a keyspace was specified [when creating the DAO](../../mapper/#dao-factory-methods), then the +generated query targets that keyspace. Otherwise, it doesn't specify a keyspace, and will only work +if the mapper was built from a session that has a [default keyspace] set. + +If a table was specified when creating the DAO, then the generated query targets that table. +Otherwise, it uses the default table name for the entity (which is determined by the name of the +entity class and the naming convention). + +[@Increment]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/Increment.html +[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html +[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html +[@CqlName]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/CqlName.html + +[CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html +[CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/DefaultCodeGeneratorFactory.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/DefaultCodeGeneratorFactory.java index 78624381458..bcaececbcb6 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/DefaultCodeGeneratorFactory.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/DefaultCodeGeneratorFactory.java @@ -18,6 +18,7 @@ import com.datastax.oss.driver.api.mapper.annotations.DaoFactory; import com.datastax.oss.driver.api.mapper.annotations.Delete; import com.datastax.oss.driver.api.mapper.annotations.GetEntity; +import com.datastax.oss.driver.api.mapper.annotations.Increment; import com.datastax.oss.driver.api.mapper.annotations.Insert; import com.datastax.oss.driver.api.mapper.annotations.Query; import com.datastax.oss.driver.api.mapper.annotations.QueryProvider; @@ -28,6 +29,7 @@ import com.datastax.oss.driver.internal.mapper.processor.dao.DaoGetEntityMethodGenerator; import com.datastax.oss.driver.internal.mapper.processor.dao.DaoImplementationGenerator; import com.datastax.oss.driver.internal.mapper.processor.dao.DaoImplementationSharedCode; +import com.datastax.oss.driver.internal.mapper.processor.dao.DaoIncrementMethodGenerator; import com.datastax.oss.driver.internal.mapper.processor.dao.DaoInsertMethodGenerator; import com.datastax.oss.driver.internal.mapper.processor.dao.DaoQueryMethodGenerator; import com.datastax.oss.driver.internal.mapper.processor.dao.DaoQueryProviderMethodGenerator; @@ -135,6 +137,10 @@ public Optional newDaoImplementationMethod( return Optional.of( new DaoQueryProviderMethodGenerator( methodElement, typeParameters, processedType, enclosingClass, context)); + } else if (methodElement.getAnnotation(Increment.class) != null) { + return Optional.of( + new DaoIncrementMethodGenerator( + methodElement, typeParameters, processedType, enclosingClass, context)); } else { return Optional.empty(); } diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGenerator.java index 204e5a31f8e..8d710e9d491 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGenerator.java @@ -44,13 +44,10 @@ import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; -import javax.lang.model.element.AnnotationMirror; -import javax.lang.model.element.AnnotationValue; import javax.lang.model.element.ExecutableElement; import javax.lang.model.element.Name; import javax.lang.model.element.TypeElement; import javax.lang.model.element.VariableElement; -import javax.lang.model.type.TypeMirror; public class DaoDeleteMethodGenerator extends DaoMethodGenerator { @@ -151,7 +148,7 @@ public Optional generate() { entityDefinition = context.getEntityFactory().getDefinition(entityElement); primaryKeyParameterCount = entityDefinition.getPrimaryKey().size(); } else { - entityElement = getEntityFromAnnotation(); + entityElement = getEntityClassFromAnnotation(Delete.class); if (entityElement == null) { context .getMessager() @@ -295,46 +292,6 @@ public Optional generate() { return crudMethod(createStatementBlock, returnType, helperFieldName); } - private TypeElement getEntityFromAnnotation() { - // Note: because Delete.entityClass references a class, we can't read it directly through - // methodElement.getAnnotation(Delete.class). - - AnnotationMirror annotationMirror = null; - for (AnnotationMirror candidate : methodElement.getAnnotationMirrors()) { - if (context.getClassUtils().isSame(candidate.getAnnotationType(), Delete.class)) { - annotationMirror = candidate; - break; - } - } - assert annotationMirror != null; - - for (Map.Entry entry : - annotationMirror.getElementValues().entrySet()) { - if (entry.getKey().getSimpleName().contentEquals("entityClass")) { - @SuppressWarnings("unchecked") - List values = (List) entry.getValue().getValue(); - if (values.isEmpty()) { - return null; - } - TypeMirror mirror = (TypeMirror) values.get(0).getValue(); - TypeElement element = EntityUtils.asEntityElement(mirror, typeParameters); - if (values.size() > 1) { - context - .getMessager() - .warn( - methodElement, - processedType, - "Too many entity classes: %s must have at most one 'entityClass' argument " - + "(will use the first one: %s)", - Delete.class.getSimpleName(), - element.getSimpleName()); - } - return element; - } - } - return null; - } - private void generatePrepareRequest( MethodSpec.Builder methodBuilder, String requestName, diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoIncrementMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoIncrementMethodGenerator.java new file mode 100644 index 00000000000..94ba031c856 --- /dev/null +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoIncrementMethodGenerator.java @@ -0,0 +1,320 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.mapper.processor.dao; + +import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.FUTURE_OF_VOID; +import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.REACTIVE_RESULT_SET; +import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.VOID; + +import com.datastax.oss.driver.api.core.cql.BoundStatement; +import com.datastax.oss.driver.api.core.cql.BoundStatementBuilder; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.mapper.annotations.CqlName; +import com.datastax.oss.driver.api.mapper.annotations.Increment; +import com.datastax.oss.driver.api.mapper.entity.saving.NullSavingStrategy; +import com.datastax.oss.driver.api.querybuilder.QueryBuilder; +import com.datastax.oss.driver.api.querybuilder.relation.Relation; +import com.datastax.oss.driver.internal.mapper.processor.ProcessorContext; +import com.datastax.oss.driver.internal.mapper.processor.entity.EntityDefinition; +import com.datastax.oss.driver.internal.mapper.processor.entity.PropertyDefinition; +import com.datastax.oss.driver.internal.mapper.processor.util.generation.GeneratedCodePatterns; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; +import com.squareup.javapoet.ClassName; +import com.squareup.javapoet.CodeBlock; +import com.squareup.javapoet.MethodSpec; +import com.squareup.javapoet.TypeName; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.StreamSupport; +import javax.lang.model.element.ExecutableElement; +import javax.lang.model.element.Name; +import javax.lang.model.element.TypeElement; +import javax.lang.model.element.VariableElement; +import javax.lang.model.type.TypeKind; +import javax.lang.model.type.TypeMirror; + +public class DaoIncrementMethodGenerator extends DaoMethodGenerator { + + public DaoIncrementMethodGenerator( + ExecutableElement methodElement, + Map typeParameters, + TypeElement processedType, + DaoImplementationSharedCode enclosingClass, + ProcessorContext context) { + super(methodElement, typeParameters, processedType, enclosingClass, context); + } + + protected Set getSupportedReturnTypes() { + return ImmutableSet.of(VOID, FUTURE_OF_VOID, REACTIVE_RESULT_SET); + } + + @Override + public boolean requiresReactive() { + // Validate the return type: + DaoReturnType returnType = + parseAndValidateReturnType(getSupportedReturnTypes(), Increment.class.getSimpleName()); + if (returnType == null) { + return false; + } + return returnType.requiresReactive(); + } + + @Override + public Optional generate() { + + TypeElement entityElement = getEntityClassFromAnnotation(Increment.class); + EntityDefinition entityDefinition; + if (entityElement == null) { + context + .getMessager() + .error( + methodElement, + processedType, + "Missing entity class: %s methods must always have an 'entityClass' argument", + Increment.class.getSimpleName()); + return Optional.empty(); + } else { + entityDefinition = context.getEntityFactory().getDefinition(entityElement); + } + + // Validate the parameters: + // - all the PK components of the entity, in order. + // - one or more increment parameters that must match non-PK columns. + // - a Function can be added in last position. + List parameters = methodElement.getParameters(); + VariableElement boundStatementFunction = findBoundStatementFunction(methodElement); + if (boundStatementFunction != null) { + parameters = parameters.subList(0, parameters.size() - 1); + } + + List primaryKeyParameters = parameters; + // Must have at least enough parameters for the full PK + if (primaryKeyParameters.size() < entityDefinition.getPrimaryKey().size()) { + List primaryKeyTypes = + entityDefinition.getPrimaryKey().stream() + .map(d -> d.getType().asTypeName()) + .collect(Collectors.toList()); + context + .getMessager() + .error( + methodElement, + processedType, + "Invalid parameter list: %s methods must specify the entire primary key " + + "(expected primary keys of %s: %s)", + Increment.class.getSimpleName(), + entityElement.getSimpleName(), + primaryKeyTypes); + return Optional.empty(); + } else { + primaryKeyParameters = + primaryKeyParameters.subList(0, entityDefinition.getPrimaryKey().size()); + warnIfCqlNamePresent(primaryKeyParameters); + } + // PK parameter types must match + if (!EntityUtils.areParametersValid( + entityElement, + entityDefinition, + primaryKeyParameters, + Increment.class, + context, + methodElement, + processedType, + "" /* no condition, @Increment must always have the full PK */)) { + return Optional.empty(); + } + + // The remaining parameters are the increments to the counter columns + List incrementParameters = + parameters.subList(primaryKeyParameters.size(), parameters.size()); + if (!validateCqlNamesPresent(incrementParameters)) { + return Optional.empty(); + } + for (VariableElement parameter : incrementParameters) { + TypeMirror type = parameter.asType(); + if (type.getKind() != TypeKind.LONG && !context.getClassUtils().isSame(type, Long.class)) { + context + .getMessager() + .error( + methodElement, + processedType, + "Invalid argument type: increment parameters of %s methods can only be " + + "primitive longs or java.lang.Long. Offending parameter: '%s' (%s)", + Increment.class.getSimpleName(), + parameter.getSimpleName(), + type); + return Optional.empty(); + } + } + + // Validate the return type: + DaoReturnType returnType = + parseAndValidateReturnType(getSupportedReturnTypes(), Increment.class.getSimpleName()); + if (returnType == null) { + return Optional.empty(); + } + + // Generate the method: + String helperFieldName = enclosingClass.addEntityHelperField(ClassName.get(entityElement)); + String statementName = + enclosingClass.addPreparedStatement( + methodElement, + (methodBuilder, requestName) -> + generatePrepareRequest( + methodBuilder, + requestName, + entityDefinition, + helperFieldName, + incrementParameters)); + + CodeBlock.Builder updateStatementBlock = CodeBlock.builder(); + + updateStatementBlock.addStatement( + "$T boundStatementBuilder = $L.boundStatementBuilder()", + BoundStatementBuilder.class, + statementName); + + populateBuilderWithStatementAttributes(updateStatementBlock, methodElement); + populateBuilderWithFunction(updateStatementBlock, boundStatementFunction); + + // Bind the counter increments. The bind parameter names are always the raw parameter names, see + // generatePrepareRequest. + List bindMarkerNames = + incrementParameters.stream() + .map(p -> CodeBlock.of("$S", p.getSimpleName())) + .collect(Collectors.toList()); + // Force the null saving strategy. This will fail if the user targets Cassandra 2.2, but + // SET_TO_NULL would not work with counters anyway. + updateStatementBlock.addStatement( + "final $1T nullSavingStrategy = $1T.$2L", + NullSavingStrategy.class, + NullSavingStrategy.DO_NOT_SET); + GeneratedCodePatterns.bindParameters( + incrementParameters, bindMarkerNames, updateStatementBlock, enclosingClass, context, true); + + // Bind the PK columns + List primaryKeyNames = + entityDefinition.getPrimaryKey().stream() + .map(PropertyDefinition::getCqlName) + .collect(Collectors.toList()); + GeneratedCodePatterns.bindParameters( + primaryKeyParameters, + primaryKeyNames, + updateStatementBlock, + enclosingClass, + context, + false); + + updateStatementBlock + .add("\n") + .addStatement("$T boundStatement = boundStatementBuilder.build()", BoundStatement.class); + + return crudMethod(updateStatementBlock, returnType, helperFieldName); + } + + private void generatePrepareRequest( + MethodSpec.Builder methodBuilder, + String requestName, + EntityDefinition entityDefinition, + String helperFieldName, + List incrementParameters) { + + if (incrementParameters.isEmpty()) { + context + .getMessager() + .error( + methodElement, + processedType, + "%s method must take at least one parameter representing an increment to a " + + "counter column", + Increment.class.getSimpleName()); + return; + } + + methodBuilder + .addStatement("$L.throwIfKeyspaceMissing()", helperFieldName) + .addCode( + "$[$1T $2L = (($3L.getKeyspaceId() == null)\n" + + "? $4T.update($3L.getTableId())\n" + + ": $4T.update($3L.getKeyspaceId(), $3L.getTableId()))", + SimpleStatement.class, + requestName, + helperFieldName, + QueryBuilder.class); + + // Add an increment clause for every non-PK parameter. Example: for a parameter `long oneStar` + // => `.append("one_star", QueryBuilder.bindMarker("oneStar"))` + for (VariableElement parameter : incrementParameters) { + CodeBlock cqlName = null; + CqlName annotation = parameter.getAnnotation(CqlName.class); + if (annotation != null) { + // If a CQL name is provided, use that + cqlName = CodeBlock.of("$S", annotation.value()); + } else { + // Otherwise, try to match the parameter to an entity property based on the names, for + // example parameter `oneStar` matches `ProductRating.getOneStar()`. + for (PropertyDefinition property : entityDefinition.getRegularColumns()) { + if (property.getJavaName().equals(parameter.getSimpleName().toString())) { + cqlName = property.getCqlName(); + break; + } + } + if (cqlName == null) { + List javaNames = + StreamSupport.stream(entityDefinition.getRegularColumns().spliterator(), false) + .map(PropertyDefinition::getJavaName) + .collect(Collectors.toList()); + context + .getMessager() + .error( + parameter, + processedType, + "Could not match '%s' with any counter column in %s (expected one of: %s). " + + "You can also specify a CQL name directly with @%s.", + parameter.getSimpleName(), + entityDefinition.getClassName().simpleName(), + javaNames, + CqlName.class.getSimpleName()); + // Don't return abruptly, execute the rest of the method to finish the Java statement + // cleanly (otherwise JavaPoet throws an error). The generated statement will be + // incorrect, but that doesn't matter since we've already thrown a compile error. + break; + } + } + + // Always use the parameter name. This is what the binding code will expect (see + // GeneratedCodePatterns.bindParameters call in generate()) + String bindMarkerName = parameter.getSimpleName().toString(); + + // We use `append` to generate "c=c+?". QueryBuilder also has `increment` that produces + // "c+=?", but that doesn't work with Cassandra 2.1. + methodBuilder.addCode( + "\n.append($1L, $2T.bindMarker($3S))", cqlName, QueryBuilder.class, bindMarkerName); + } + + for (PropertyDefinition property : entityDefinition.getPrimaryKey()) { + methodBuilder.addCode( + "\n.where($1T.column($2L).isEqualTo($3T.bindMarker($2L)))", + Relation.class, + property.getCqlName(), + QueryBuilder.class); + } + + methodBuilder.addCode("\n.build()$];\n"); + } +} diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoMethodGenerator.java index 8e8a8509dba..a09e70eb632 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoMethodGenerator.java @@ -20,6 +20,8 @@ import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.cql.BoundStatementBuilder; import com.datastax.oss.driver.api.mapper.annotations.CqlName; +import com.datastax.oss.driver.api.mapper.annotations.Delete; +import com.datastax.oss.driver.api.mapper.annotations.Increment; import com.datastax.oss.driver.api.mapper.annotations.StatementAttributes; import com.datastax.oss.driver.api.mapper.result.MapperResultProducer; import com.datastax.oss.driver.api.querybuilder.QueryBuilder; @@ -38,6 +40,8 @@ import java.util.function.Function; import java.util.function.UnaryOperator; import java.util.stream.Collectors; +import javax.lang.model.element.AnnotationMirror; +import javax.lang.model.element.AnnotationValue; import javax.lang.model.element.ExecutableElement; import javax.lang.model.element.Name; import javax.lang.model.element.TypeElement; @@ -271,4 +275,50 @@ protected Optional crudMethod( .wrapWithErrorHandling(createStatementBlock.build(), methodElement, typeParameters)); return Optional.of(method.build()); } + + /** + * Reads the "entityClass" parameter from method annotations that define it (such as {@link + * Delete} or {@link Increment}), and finds the corresponding entity class element if it exists. + */ + protected TypeElement getEntityClassFromAnnotation(Class annotation) { + + // Note: because entityClass references a class, we can't read it directly through + // methodElement.getAnnotation(annotation). + + AnnotationMirror annotationMirror = null; + for (AnnotationMirror candidate : methodElement.getAnnotationMirrors()) { + if (context.getClassUtils().isSame(candidate.getAnnotationType(), annotation)) { + annotationMirror = candidate; + break; + } + } + assert annotationMirror != null; + + for (Map.Entry entry : + annotationMirror.getElementValues().entrySet()) { + if (entry.getKey().getSimpleName().contentEquals("entityClass")) { + @SuppressWarnings("unchecked") + List values = + (List) entry.getValue().getValue(); + if (values.isEmpty()) { + return null; + } + TypeMirror mirror = (TypeMirror) values.get(0).getValue(); + TypeElement element = EntityUtils.asEntityElement(mirror, typeParameters); + if (values.size() > 1) { + context + .getMessager() + .warn( + methodElement, + processedType, + "Too many entity classes: %s must have at most one 'entityClass' argument " + + "(will use the first one: %s)", + annotation.getSimpleName(), + mirror); + } + return element; + } + } + return null; + } } diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/EntityUtils.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/EntityUtils.java index c0fddd3c098..ab298a31ba2 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/EntityUtils.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/EntityUtils.java @@ -106,6 +106,13 @@ public static boolean areParametersValid( ExecutableElement methodElement, TypeElement processedType, String exceptionCondition) { + + if (exceptionCondition == null || exceptionCondition.isEmpty()) { + exceptionCondition = ""; + } else { + exceptionCondition = " that " + exceptionCondition; + } + List primaryKeyTypes = entityDefinition.getPrimaryKey().stream() .map(d -> d.getType().asTypeName()) @@ -123,7 +130,7 @@ public static boolean areParametersValid( .error( methodElement, processedType, - "Invalid parameter list: %s methods that %s " + "Invalid parameter list: %s methods%s " + "must at least specify partition key components " + "(expected partition key of %s: %s)", annotationClass.getSimpleName(), @@ -139,7 +146,7 @@ public static boolean areParametersValid( .error( methodElement, processedType, - "Invalid parameter list: %s methods that %s " + "Invalid parameter list: %s methods%s " + "must match the primary key components in the exact order " + "(expected primary key of %s: %s). Too many parameters provided", annotationClass.getSimpleName(), @@ -159,7 +166,7 @@ public static boolean areParametersValid( .error( methodElement, processedType, - "Invalid parameter list: %s methods that %s " + "Invalid parameter list: %s methods%s " + "must match the primary key components in the exact order " + "(expected primary key of %s: %s). Mismatch at index %d: %s should be %s", annotationClass.getSimpleName(), diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/DefaultPropertyDefinition.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/DefaultPropertyDefinition.java index 9916d00e0dc..8b0376c615d 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/DefaultPropertyDefinition.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/DefaultPropertyDefinition.java @@ -21,6 +21,7 @@ public class DefaultPropertyDefinition implements PropertyDefinition { + private final String javaName; private final CodeBlock selector; private final CodeBlock cqlName; private final String getterName; @@ -35,6 +36,7 @@ public DefaultPropertyDefinition( String setterName, PropertyType type, CqlNameGenerator cqlNameGenerator) { + this.javaName = javaName; this.cqlName = customCqlName @@ -55,6 +57,11 @@ public DefaultPropertyDefinition( this.type = type; } + @Override + public String getJavaName() { + return javaName; + } + @Override public CodeBlock getSelector() { return selector; diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/PropertyDefinition.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/PropertyDefinition.java index f73c9a18f97..ed809528d53 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/PropertyDefinition.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/PropertyDefinition.java @@ -27,6 +27,12 @@ */ public interface PropertyDefinition { + /** + * @return the name of the property, in the JavaBeans sense. In other words this is {@link + * #getGetterName()} minus the "get" prefix and decapitalized. + */ + String getJavaName(); + /** * @return A Java snippet that produces the corresponding expression in a SELECT * statement, for example: diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Increment.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Increment.java new file mode 100644 index 00000000000..c87ccb330b3 --- /dev/null +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Increment.java @@ -0,0 +1,122 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.mapper.annotations; + +import com.datastax.dse.driver.api.core.cql.reactive.ReactiveResultSet; +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.session.Session; +import com.datastax.oss.driver.api.core.session.SessionBuilder; +import com.datastax.oss.driver.api.mapper.entity.saving.NullSavingStrategy; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionStage; +import java.util.function.Function; +import java.util.function.UnaryOperator; + +/** + * Annotates a {@link Dao} method that increments a counter table that is mapped to an {@link + * Entity}-annotated class. + * + *

          Example: + * + *

          + * @Entity
          + * public class Votes {
          + *   @PartitionKey private int articleId;
          + *   private long upVotes;
          + *   private long downVotes;
          + *   ... // constructor(s), getters and setters, etc.
          + * }
          + * @Dao
          + * public interface VotesDao {
          + *   @Increment(entityClass = Votes.class)
          + *   void incrementUpVotes(int articleId, long upVotes);
          + *
          + *   @Increment(entityClass = Votes.class)
          + *   void incrementDownVotes(int articleId, long downVotes);
          + *
          + *   @Select
          + *   Votes findById(int articleId);
          + * }
          + * 
          + * + *

          Parameters

          + * + * The entity class must be specified with {@link #entityClass()}. + * + *

          The method's parameters must start with the full primary key, in the exact order (as defined + * by the {@link PartitionKey} and {@link ClusteringColumn} annotations in the entity class). The + * parameter names don't necessarily need to match the names of the columns, but the types must + * match. Unlike other methods like {@link Select} or {@link Delete}, counter updates cannot operate + * on a whole partition, they need to target exactly one row; so all the partition key and + * clustering columns must be specified. + * + *

          Then must follow one or more parameters representing counter increments. Their type must be + * {@code long} or {@link Long}. The name of the parameter must match the name of the entity + * property that maps to the counter (that is, the name of the getter without "get" and + * decapitalized). Alternatively, you may annotate a parameter with {@link CqlName} to specify the + * raw column name directly; in that case, the name of the parameter does not matter: + * + *

          + * @Increment(entityClass = Votes.class)
          + * void incrementUpVotes(int articleId, @CqlName("up_votes") long foobar);
          + * 
          + * + * When you invoke the method, each parameter value is interpreted as a delta that will be + * applied to the counter. In other words, if you pass 1, the counter will be incremented by 1. + * Negative values are allowed. If you are using Cassandra 2.2 or above, you can use {@link Long} + * and pass {@code null} for some of the parameters, they will be ignored (following {@link + * NullSavingStrategy#DO_NOT_SET} semantics). If you are using Cassandra 2.1, {@code null} values + * will trigger a runtime error. + * + *

          A {@link Function Function<BoundStatementBuilder, BoundStatementBuilder>} or {@link + * UnaryOperator UnaryOperator<BoundStatementBuilder>} can be added as the last + * parameter. It will be applied to the statement before execution. This allows you to customize + * certain aspects of the request (page size, timeout, etc) at runtime. + * + *

          Return type

          + * + *

          The method can return {@code void}, a void {@link CompletionStage} or {@link + * CompletableFuture}, or a {@link ReactiveResultSet}. + * + *

          Target keyspace and table

          + * + *

          If a keyspace was specified when creating the DAO (see {@link DaoFactory}), then the generated + * query targets that keyspace. Otherwise, it doesn't specify a keyspace, and will only work if the + * mapper was built from a {@link Session} that has a {@linkplain + * SessionBuilder#withKeyspace(CqlIdentifier) default keyspace} set. + * + *

          If a table was specified when creating the DAO, then the generated query targets that table. + * Otherwise, it uses the default table name for the entity (which is determined by the name of the + * entity class and the naming convention). + */ +@Target(ElementType.METHOD) +@Retention(RetentionPolicy.RUNTIME) +public @interface Increment { + + /** + * A hint to indicate the entity class that is being targeted. This is mandatory, the mapper will + * issue a compile error if you leave it unset. + * + *

          Note that, for technical reasons, this is an array, but only one element is expected. If you + * specify more than one class, the mapper processor will generate a compile-time warning, and + * proceed with the first one. + */ + Class[] entityClass() default {}; +} diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DaoBase.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DaoBase.java index 51ed93acaf7..27539ce0482 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DaoBase.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DaoBase.java @@ -174,9 +174,11 @@ private ConsistencyLevel getConsistencyLevelFromName(String name) { } protected final MapperContext context; + protected final boolean isProtocolVersionV3; protected DaoBase(MapperContext context) { this.context = context; + this.isProtocolVersionV3 = isProtocolVersionV3(context); } protected ResultSet execute(Statement statement) { @@ -282,12 +284,16 @@ CompletableFuture> executeAsyncAndMapToEntity } protected static void throwIfProtocolVersionV3(MapperContext context) { - if (context.getSession().getContext().getProtocolVersion().getCode() - <= ProtocolConstants.Version.V3) { + if (isProtocolVersionV3(context)) { throw new MapperException( String.format( "You cannot use %s.%s for protocol version V3.", NullSavingStrategy.class.getSimpleName(), NullSavingStrategy.DO_NOT_SET.name())); } } + + protected static boolean isProtocolVersionV3(MapperContext context) { + return context.getSession().getContext().getProtocolVersion().getCode() + <= ProtocolConstants.Version.V3; + } } diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/entity/EntityHelperBase.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/entity/EntityHelperBase.java index 9937a977704..d1198cff5d4 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/entity/EntityHelperBase.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/entity/EntityHelperBase.java @@ -76,7 +76,7 @@ public CqlIdentifier getTableId() { return tableId; } - protected void throwIfKeyspaceMissing() { + public void throwIfKeyspaceMissing() { if (this.getKeyspaceId() == null && !context.getSession().getKeyspace().isPresent()) { throw new MapperException( String.format( From 45b12254d47320d8a325f013c4faf6b7014807ec Mon Sep 17 00:00:00 2001 From: olim7t Date: Fri, 31 Jul 2020 12:17:41 -0700 Subject: [PATCH 559/979] Introduce utility class for capitalization --- .../dao/DaoImplementationGenerator.java | 4 +-- .../entity/DefaultEntityFactory.java | 8 +++--- .../entity/EntityHelperGenerator.java | 4 +-- .../mapper/processor/util/Capitalizer.java | 26 +++++++++++++++++++ .../generation/GeneratedCodePatterns.java | 4 +-- 5 files changed, 36 insertions(+), 10 deletions(-) create mode 100644 mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/util/Capitalizer.java diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoImplementationGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoImplementationGenerator.java index 02803c2ca54..008e2c054b6 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoImplementationGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoImplementationGenerator.java @@ -30,6 +30,7 @@ import com.datastax.oss.driver.internal.mapper.processor.MethodGenerator; import com.datastax.oss.driver.internal.mapper.processor.ProcessorContext; import com.datastax.oss.driver.internal.mapper.processor.SingleFileCodeGenerator; +import com.datastax.oss.driver.internal.mapper.processor.util.Capitalizer; import com.datastax.oss.driver.internal.mapper.processor.util.HierarchyScanner; import com.datastax.oss.driver.internal.mapper.processor.util.NameIndex; import com.datastax.oss.driver.internal.mapper.processor.util.generation.GenericTypeConstantGenerator; @@ -45,7 +46,6 @@ import com.squareup.javapoet.TypeName; import com.squareup.javapoet.TypeSpec; import edu.umd.cs.findbugs.annotations.NonNull; -import java.beans.Introspector; import java.lang.annotation.Annotation; import java.util.ArrayList; import java.util.Collections; @@ -138,7 +138,7 @@ public String addEntityHelperField(ClassName entityClassName) { return entityHelperFields.computeIfAbsent( helperClass, k -> { - String baseName = Introspector.decapitalize(entityClassName.simpleName()) + "Helper"; + String baseName = Capitalizer.decapitalize(entityClassName.simpleName()) + "Helper"; return nameIndex.uniqueField(baseName); }); } diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/DefaultEntityFactory.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/DefaultEntityFactory.java index b9dd5deda3e..bef2242bc47 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/DefaultEntityFactory.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/DefaultEntityFactory.java @@ -26,6 +26,7 @@ import com.datastax.oss.driver.api.mapper.entity.naming.NamingConvention; import com.datastax.oss.driver.internal.mapper.processor.ProcessorContext; import com.datastax.oss.driver.internal.mapper.processor.util.AnnotationScanner; +import com.datastax.oss.driver.internal.mapper.processor.util.Capitalizer; import com.datastax.oss.driver.internal.mapper.processor.util.HierarchyScanner; import com.datastax.oss.driver.internal.mapper.processor.util.ResolvedAnnotation; import com.datastax.oss.driver.internal.mapper.processor.util.generation.PropertyType; @@ -36,7 +37,6 @@ import com.datastax.oss.driver.shaded.guava.common.collect.Sets; import com.squareup.javapoet.ClassName; import edu.umd.cs.findbugs.annotations.Nullable; -import java.beans.Introspector; import java.lang.annotation.Annotation; import java.util.Collections; import java.util.List; @@ -123,10 +123,10 @@ public EntityDefinition getDefinition(TypeElement processedClass) { String propertyName; String setMethodName; if (regularGetterName) { - propertyName = Introspector.decapitalize(getMethodName.substring(3)); + propertyName = Capitalizer.decapitalize(getMethodName.substring(3)); setMethodName = getMethodName.replaceFirst("get", "set"); } else { - propertyName = Introspector.decapitalize(getMethodName.substring(2)); + propertyName = Capitalizer.decapitalize(getMethodName.substring(2)); setMethodName = getMethodName.replaceFirst("is", "set"); } // skip properties we've already encountered. @@ -220,7 +220,7 @@ public EntityDefinition getDefinition(TypeElement processedClass) { Entity.class.getSimpleName()); } - String entityName = Introspector.decapitalize(processedClass.getSimpleName().toString()); + String entityName = Capitalizer.decapitalize(processedClass.getSimpleName().toString()); String defaultKeyspace = processedClass.getAnnotation(Entity.class).defaultKeyspace(); return new DefaultEntityDefinition( diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperGenerator.java index bf7d552d272..2ac1e9adade 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperGenerator.java @@ -21,6 +21,7 @@ import com.datastax.oss.driver.internal.mapper.processor.MethodGenerator; import com.datastax.oss.driver.internal.mapper.processor.ProcessorContext; import com.datastax.oss.driver.internal.mapper.processor.SingleFileCodeGenerator; +import com.datastax.oss.driver.internal.mapper.processor.util.Capitalizer; import com.datastax.oss.driver.internal.mapper.processor.util.NameIndex; import com.datastax.oss.driver.internal.mapper.processor.util.generation.BindableHandlingSharedCode; import com.datastax.oss.driver.internal.mapper.processor.util.generation.GenericTypeConstantGenerator; @@ -33,7 +34,6 @@ import com.squareup.javapoet.ParameterizedTypeName; import com.squareup.javapoet.TypeName; import com.squareup.javapoet.TypeSpec; -import java.beans.Introspector; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -76,7 +76,7 @@ public String addEntityHelperField(ClassName childEntityName) { return childHelpers.computeIfAbsent( childEntityName, k -> { - String baseName = Introspector.decapitalize(childEntityName.simpleName()) + "Helper"; + String baseName = Capitalizer.decapitalize(childEntityName.simpleName()) + "Helper"; return nameIndex.uniqueField(baseName); }); } diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/util/Capitalizer.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/util/Capitalizer.java new file mode 100644 index 00000000000..74b75ca95b2 --- /dev/null +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/util/Capitalizer.java @@ -0,0 +1,26 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.mapper.processor.util; + +import java.beans.Introspector; +import java.util.Objects; + +public class Capitalizer { + + public static String decapitalize(String name) { + return Introspector.decapitalize(Objects.requireNonNull(name)); + } +} diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/util/generation/GeneratedCodePatterns.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/util/generation/GeneratedCodePatterns.java index 8be676edb7b..48574a48721 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/util/generation/GeneratedCodePatterns.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/util/generation/GeneratedCodePatterns.java @@ -25,6 +25,7 @@ import com.datastax.oss.driver.api.mapper.annotations.CqlName; import com.datastax.oss.driver.api.mapper.entity.saving.NullSavingStrategy; import com.datastax.oss.driver.internal.mapper.processor.ProcessorContext; +import com.datastax.oss.driver.internal.mapper.processor.util.Capitalizer; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import com.datastax.oss.driver.shaded.guava.common.collect.Lists; import com.datastax.oss.driver.shaded.guava.common.collect.Maps; @@ -37,7 +38,6 @@ import com.squareup.javapoet.TypeName; import com.squareup.javapoet.TypeSpec; import edu.umd.cs.findbugs.annotations.NonNull; -import java.beans.Introspector; import java.util.ArrayList; import java.util.Collections; import java.util.List; @@ -470,7 +470,7 @@ private static void convertEntitiesIntoUdts( String udtTypeName = enclosingClass .getNameIndex() - .uniqueField(Introspector.decapitalize(entityClass.simpleName()) + "UdtType"); + .uniqueField(Capitalizer.decapitalize(entityClass.simpleName()) + "UdtType"); udtTypesBuilder.addStatement( "$1T $2L = ($1T) $3L", UserDefinedType.class, udtTypeName, currentCqlType); From 5d0a22737e3ce81b723dda3ab184b531f93929d6 Mon Sep 17 00:00:00 2001 From: olim7t Date: Fri, 31 Jul 2020 12:30:20 -0700 Subject: [PATCH 560/979] JAVA-2816: Support immutability and fluent accessors in the mapper --- changelog/README.md | 1 + .../oss/driver/mapper/FluentEntityIT.java | 158 +++++++++++ .../oss/driver/mapper/ImmutableEntityIT.java | 145 ++++++++++ manual/mapper/config/README.md | 184 +------------ manual/mapper/config/kotlin/README.md | 109 ++++++++ manual/mapper/config/lombok/README.md | 90 ++++++ manual/mapper/config/record/README.md | 35 +++ manual/mapper/config/scala/README.md | 57 ++++ manual/mapper/entities/README.md | 108 +++++++- .../mapper/processor/MapperProcessor.java | 21 +- .../mapper/processor/dao/EntityUtils.java | 4 +- .../entity/DefaultEntityDefinition.java | 10 +- .../entity/DefaultEntityFactory.java | 259 ++++++++++++++++-- .../processor/entity/EntityDefinition.java | 4 + .../EntityHelperGetMethodGenerator.java | 129 +++++---- .../processor/entity/PropertyDefinition.java | 4 +- .../mapper/processor/util/Capitalizer.java | 11 + .../util/generation/PropertyType.java | 27 +- .../entity/EntityAnnotationTest.java | 2 +- .../mapper/annotations/PropertyStrategy.java | 88 ++++++ .../api/mapper/entity/naming/GetterStyle.java | 48 ++++ .../api/mapper/entity/naming/SetterStyle.java | 44 +++ 22 files changed, 1258 insertions(+), 280 deletions(-) create mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/mapper/FluentEntityIT.java create mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/mapper/ImmutableEntityIT.java create mode 100644 manual/mapper/config/kotlin/README.md create mode 100644 manual/mapper/config/lombok/README.md create mode 100644 manual/mapper/config/record/README.md create mode 100644 manual/mapper/config/scala/README.md create mode 100644 mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.java create mode 100644 mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/entity/naming/GetterStyle.java create mode 100644 mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/entity/naming/SetterStyle.java diff --git a/changelog/README.md b/changelog/README.md index b97241e171d..f65450c221b 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.9.0 (in progress) +- [new feature] JAVA-2816: Support immutability and fluent accessors in the mapper - [new feature] JAVA-2721: Add counter support in the mapper - [bug] JAVA-2863: Reintroduce mapper processor dependency to SLF4J diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/FluentEntityIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/FluentEntityIT.java new file mode 100644 index 00000000000..2da087ecfd7 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/FluentEntityIT.java @@ -0,0 +1,158 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.mapper; + +import static com.datastax.oss.driver.api.mapper.entity.naming.GetterStyle.FLUENT; +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.mapper.MapperBuilder; +import com.datastax.oss.driver.api.mapper.annotations.CqlName; +import com.datastax.oss.driver.api.mapper.annotations.Dao; +import com.datastax.oss.driver.api.mapper.annotations.DaoFactory; +import com.datastax.oss.driver.api.mapper.annotations.DaoKeyspace; +import com.datastax.oss.driver.api.mapper.annotations.Entity; +import com.datastax.oss.driver.api.mapper.annotations.Insert; +import com.datastax.oss.driver.api.mapper.annotations.Mapper; +import com.datastax.oss.driver.api.mapper.annotations.PartitionKey; +import com.datastax.oss.driver.api.mapper.annotations.PropertyStrategy; +import com.datastax.oss.driver.api.mapper.annotations.Select; +import com.datastax.oss.driver.api.mapper.entity.naming.SetterStyle; +import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import com.datastax.oss.driver.categories.ParallelizableTests; +import java.util.Objects; +import java.util.UUID; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +@Category(ParallelizableTests.class) +public class FluentEntityIT extends InventoryITBase { + + private static final CcmRule CCM_RULE = CcmRule.getInstance(); + private static final SessionRule SESSION_RULE = SessionRule.builder(CCM_RULE).build(); + + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); + + private static FluentProductDao dao; + + @BeforeClass + public static void setup() { + CqlSession session = SESSION_RULE.session(); + + for (String query : createStatements(CCM_RULE)) { + session.execute( + SimpleStatement.builder(query).setExecutionProfile(SESSION_RULE.slowProfile()).build()); + } + + InventoryMapper mapper = InventoryMapper.builder(session).build(); + dao = mapper.immutableProductDao(SESSION_RULE.keyspace()); + } + + @Test + public void should_insert_and_retrieve_immutable_entities() { + FluentProduct originalProduct = + new FluentProduct() + .id(UUID.randomUUID()) + .description("mock description") + .dimensions(new Dimensions(1, 2, 3)); + dao.save(originalProduct); + + FluentProduct retrievedProduct = dao.findById(originalProduct.id()); + assertThat(retrievedProduct).isEqualTo(originalProduct); + } + + @Entity + @CqlName("product") + @PropertyStrategy(getterStyle = FLUENT, setterStyle = SetterStyle.FLUENT) + public static class FluentProduct { + @PartitionKey private UUID id; + private String description; + private Dimensions dimensions; + + public UUID id() { + return id; + } + + public FluentProduct id(UUID id) { + this.id = id; + return this; + } + + public String description() { + return description; + } + + public FluentProduct description(String description) { + this.description = description; + return this; + } + + public Dimensions dimensions() { + return dimensions; + } + + public FluentProduct dimensions(Dimensions dimensions) { + this.dimensions = dimensions; + return this; + } + + @Override + public boolean equals(Object other) { + if (other == this) { + return true; + } else if (other instanceof FluentProduct) { + FluentProduct that = (FluentProduct) other; + return Objects.equals(this.id, that.id) + && Objects.equals(this.description, that.description) + && Objects.equals(this.dimensions, that.dimensions); + } else { + return false; + } + } + + @Override + public int hashCode() { + return Objects.hash(id, description, dimensions); + } + } + + @Mapper + public interface InventoryMapper { + static MapperBuilder builder(CqlSession session) { + return new FluentEntityIT_InventoryMapperBuilder(session); + } + + @DaoFactory + FluentProductDao immutableProductDao(@DaoKeyspace CqlIdentifier keyspace); + } + + @Dao + public interface FluentProductDao { + @Select + FluentProduct findById(UUID productId); + + @Insert + void save(FluentProduct product); + } +} diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/ImmutableEntityIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/ImmutableEntityIT.java new file mode 100644 index 00000000000..ef77e9b5f77 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/ImmutableEntityIT.java @@ -0,0 +1,145 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.mapper; + +import static com.datastax.oss.driver.api.mapper.entity.naming.GetterStyle.FLUENT; +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.mapper.MapperBuilder; +import com.datastax.oss.driver.api.mapper.annotations.CqlName; +import com.datastax.oss.driver.api.mapper.annotations.Dao; +import com.datastax.oss.driver.api.mapper.annotations.DaoFactory; +import com.datastax.oss.driver.api.mapper.annotations.DaoKeyspace; +import com.datastax.oss.driver.api.mapper.annotations.Entity; +import com.datastax.oss.driver.api.mapper.annotations.Insert; +import com.datastax.oss.driver.api.mapper.annotations.Mapper; +import com.datastax.oss.driver.api.mapper.annotations.PartitionKey; +import com.datastax.oss.driver.api.mapper.annotations.PropertyStrategy; +import com.datastax.oss.driver.api.mapper.annotations.Select; +import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import com.datastax.oss.driver.categories.ParallelizableTests; +import java.util.Objects; +import java.util.UUID; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +@Category(ParallelizableTests.class) +public class ImmutableEntityIT extends InventoryITBase { + + private static final CcmRule CCM_RULE = CcmRule.getInstance(); + private static final SessionRule SESSION_RULE = SessionRule.builder(CCM_RULE).build(); + + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); + + private static ImmutableProductDao dao; + + @BeforeClass + public static void setup() { + CqlSession session = SESSION_RULE.session(); + + for (String query : createStatements(CCM_RULE)) { + session.execute( + SimpleStatement.builder(query).setExecutionProfile(SESSION_RULE.slowProfile()).build()); + } + + InventoryMapper mapper = InventoryMapper.builder(session).build(); + dao = mapper.immutableProductDao(SESSION_RULE.keyspace()); + } + + @Test + public void should_insert_and_retrieve_immutable_entities() { + ImmutableProduct originalProduct = + new ImmutableProduct(UUID.randomUUID(), "mock description", new Dimensions(1, 2, 3)); + dao.save(originalProduct); + + ImmutableProduct retrievedProduct = dao.findById(originalProduct.id()); + assertThat(retrievedProduct).isEqualTo(originalProduct); + } + + @Entity + @CqlName("product") + @PropertyStrategy(getterStyle = FLUENT, mutable = false) + public static class ImmutableProduct { + @PartitionKey private final UUID id; + private final String description; + private final Dimensions dimensions; + + public ImmutableProduct(UUID id, String description, Dimensions dimensions) { + this.id = id; + this.description = description; + this.dimensions = dimensions; + } + + public UUID id() { + return id; + } + + public String description() { + return description; + } + + public Dimensions dimensions() { + return dimensions; + } + + @Override + public boolean equals(Object other) { + if (other == this) { + return true; + } else if (other instanceof ImmutableProduct) { + ImmutableProduct that = (ImmutableProduct) other; + return Objects.equals(this.id, that.id) + && Objects.equals(this.description, that.description) + && Objects.equals(this.dimensions, that.dimensions); + } else { + return false; + } + } + + @Override + public int hashCode() { + return Objects.hash(id, description, dimensions); + } + } + + @Mapper + public interface InventoryMapper { + static MapperBuilder builder(CqlSession session) { + return new ImmutableEntityIT_InventoryMapperBuilder(session); + } + + @DaoFactory + ImmutableProductDao immutableProductDao(@DaoKeyspace CqlIdentifier keyspace); + } + + @Dao + public interface ImmutableProductDao { + @Select + ImmutableProduct findById(UUID productId); + + @Insert + void save(ImmutableProduct product); + } +} diff --git a/manual/mapper/config/README.md b/manual/mapper/config/README.md index 5d42e104cf6..5a6df9d2ba7 100644 --- a/manual/mapper/config/README.md +++ b/manual/mapper/config/README.md @@ -109,183 +109,7 @@ You will find the generated files in `build/generated/sources/annotationProcesso ### Integration with other languages and libraries -#### Lombok - -[Lombok](https://projectlombok.org/) is a popular library that automates boilerplate code, such as -getters and setters. This can be convenient for mapped entities: - -```java -import com.datastax.oss.driver.api.mapper.annotations.Entity; -import com.datastax.oss.driver.api.mapper.annotations.PartitionKey; -import lombok.EqualsAndHashCode; -import lombok.Getter; -import lombok.Setter; -import lombok.ToString; - -@Entity -@EqualsAndHashCode -@ToString -public class Product { - @PartitionKey @Getter @Setter private int id; - @Getter @Setter private String description; -} -``` - -The mapper can process Lombok-annotated classes just like regular code. The only requirement is that -Lombok's annotation processor must run *before* the mapper's. - -With Maven, declaring Lombok as a provided dependency is not enough; you must also redeclare it in -the `` section, before the mapper: - -```xml - - ... - ... - - - - - com.datastax.oss - java-driver-mapper-runtime - ${java-driver.version} - - - org.projectlombok - lombok - ${lombok.version} - provided - - - - - - - maven-compiler-plugin - 3.8.1 - - 1.8 - 1.8 - - - org.projectlombok - lombok - ${lombok.version} - - - com.datastax.oss - java-driver-mapper-processor - ${java-driver.version} - - - - - - -``` - -With Gradle, a similar result can be achieved with: - -```groovy -apply plugin: 'java' - -def javaDriverVersion = '...' -def lombokVersion = '...' - -dependencies { - annotationProcessor group: 'org.projectlombok', name: 'lombok', version: lombokVersion - annotationProcessor group: 'com.datastax.oss', name: 'java-driver-mapper-processor', version: javaDriverVersion - compile group: 'com.datastax.oss', name: 'java-driver-mapper-runtime', version: javaDriverVersion - compileOnly group: 'org.projectlombok', name: 'lombok', version: lombokVersion -} -``` - -You'll also need to install a Lombok plugin in your IDE (for IntelliJ IDEA, [this -one](https://plugins.jetbrains.com/plugin/6317-lombok) is available in the marketplace). - -#### Kotlin - -[Kotlin](https://kotlinlang.org/) is an alternative language for the JVM. Its compact syntax and -native support for annotation processing make it a good fit for the mapper. - -To set up your project, refer to the Kotlin website: - -* Maven: configure [dual compilation][maven_kotlin_java] of Kotlin and Java sources. In addition, - you'll need an additional execution of the [kotlin-maven-plugin:kapt][maven_kapt] goal with the - mapper processor before compilation: - - ```xml - - org.jetbrains.kotlin - kotlin-maven-plugin - ${kotlin.version} - - - kapt - kapt - - - src/main/kotlin - src/main/java - - - - com.datastax.oss - java-driver-mapper-processor - ${java-driver.version} - - - - - - compile - compile - ... - - - - ``` - -* Gradle: configure the [kotlin][gradle_kotlin] and [kotlin_kapt][gradle_kapt] plugins in your build - script. In addition, declare the dependency to the mapper processor with `kapt` instead of - `annotationProcessor`: - - ```groovy - apply plugin: 'kotlin' - apply plugin: 'kotlin-kapt' - - dependencies { - kapt group: 'com.datastax.oss', name: 'java-driver-mapper-processor', version: javaDriverVersion - ... - } - ``` - -You can use Kotlin [data classes] for your entities. Just keep in mind that the mapper expects a -no-arg constructor, which means that you must define default values; and setters, which means that -properties must be declared with `var`, not `val`. - -```kotlin -@Entity -data class Product(@PartitionKey var id: Int? = null, var description: String? = null) -``` - -All of the [property annotations](../entities/#property-annotations) can be declared directly on the -constructor properties. - -If you want to take advantage of [null saving strategies](../daos/null_saving/), your properties -should be nullable. - -The other mapper interfaces are pretty similar to the Java versions: - -```kotlin -@Dao -interface ProductDao { - @Insert - fun insert(product: Product) -} -``` - -[maven_kotlin_java]: https://kotlinlang.org/docs/reference/using-maven.html#compiling-kotlin-and-java-sources -[maven_kapt]: https://kotlinlang.org/docs/reference/kapt.html#using-in-maven -[gradle_kotlin]: https://kotlinlang.org/docs/reference/using-gradle.html -[gradle_kapt]: https://kotlinlang.org/docs/reference/kapt.html#using-in-gradle -[data classes]: https://kotlinlang.org/docs/reference/data-classes.html +* [Kotlin](kotlin/) +* [Lombok](lombok/) +* [Java 14 records](record/) +* [Scala](scala/) diff --git a/manual/mapper/config/kotlin/README.md b/manual/mapper/config/kotlin/README.md new file mode 100644 index 00000000000..f94241850ae --- /dev/null +++ b/manual/mapper/config/kotlin/README.md @@ -0,0 +1,109 @@ +## Kotlin + +[Kotlin](https://kotlinlang.org/) is an alternative language for the JVM. Its compact syntax and +native support for annotation processing make it a good fit for the mapper. + +We have a full example at [DataStax-Examples/object-mapper-jvm/kotlin]. + +### Writing the model + +You can use Kotlin [data classes] for your entities. Data classes are usually +[immutable](../../entities/#mutability), but you don't need to declare that explicitly with +[@PropertyStrategy]: the mapper detects that it's processing Kotlin code, and will assume `mutable = +false` by default: + +```kotlin +@Entity +data class Product(@PartitionKey val id: Int?, val description: String?) +``` + +Data classes may also be made mutable (by declaring the components with `var` instead of `val`). If +you choose that approach, you'll have to annotate your entities with [@PropertyStrategy], and also +declare a default value for every component in order to generate a no-arg constructor: + +```kotlin +@Entity +@PropertyStrategy(mutable = true) +data class Product(@PartitionKey var id: Int? = null, var description: String? = null) +``` + +All of the [property annotations](../../entities/#property-annotations) can be declared directly on +the components. + +If you want to take advantage of [null saving strategies](../../daos/null_saving/), your components +should be nullable. + +The other mapper interfaces are direct translations of the Java versions: + +```kotlin +@Dao +interface ProductDao { + @Insert + fun insert(product: Product) +} +``` + +Known limitation: because of a Kotlin bug ([KT-4779]), you can't use default interface methods. They +will appear as abstract methods to the mapper processor, which will generate an error since they are +not properly annotated. As a workaround, you can use a companion object method that takes the DAO as +an argument (as shown in [UserDao.kt]), or query provider methods. + +### Building + +#### Gradle + +See the example's [build.gradle]. + +You enable Kotlin support with [kotlin][gradle_kotlin] and [kotlin_kapt][gradle_kapt], and declare +the mapper processor with the `kapt` directive. + +#### Maven + +Configure [dual compilation][maven_kotlin_java] of Kotlin and Java sources. In addition, you'll need +an additional execution of the [kotlin-maven-plugin:kapt][maven_kapt] goal with the mapper processor +before compilation: + +```xml + + org.jetbrains.kotlin + kotlin-maven-plugin + ${kotlin.version} + + + kapt + kapt + + + src/main/kotlin + src/main/java + + + + com.datastax.oss + java-driver-mapper-processor + ${java-driver.version} + + + + + + compile + compile + ... + + + +``` + +[maven_kotlin_java]: https://kotlinlang.org/docs/reference/using-maven.html#compiling-kotlin-and-java-sources +[maven_kapt]: https://kotlinlang.org/docs/reference/kapt.html#using-in-maven +[gradle_kotlin]: https://kotlinlang.org/docs/reference/using-gradle.html +[gradle_kapt]: https://kotlinlang.org/docs/reference/kapt.html#using-in-gradle +[data classes]: https://kotlinlang.org/docs/reference/data-classes.html +[KT-4779]: https://youtrack.jetbrains.com/issue/KT-4779 + +[DataStax-Examples/object-mapper-jvm/kotlin]: https://github.com/DataStax-Examples/object-mapper-jvm/tree/master/kotlin +[build.gradle]: https://github.com/DataStax-Examples/object-mapper-jvm/blob/master/kotlin/build.gradle +[UserDao.kt]: https://github.com/DataStax-Examples/object-mapper-jvm/blob/master/kotlin/src/main/kotlin/com/datastax/examples/mapper/killrvideo/user/UserDao.kt + +[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html diff --git a/manual/mapper/config/lombok/README.md b/manual/mapper/config/lombok/README.md new file mode 100644 index 00000000000..e2a4f0263c8 --- /dev/null +++ b/manual/mapper/config/lombok/README.md @@ -0,0 +1,90 @@ +## Lombok + +[Lombok](https://projectlombok.org/) is a popular library that automates repetitive code, such as +getters and setters. You can use it in conjunction with the mapper to eliminate even more +boilerplate. + +We have a full example at [DataStax-Examples/object-mapper-jvm/lombok]. + +### Writing the model + +You can either map mutable "data" classes: + +```java +import lombok.Data; +import com.datastax.oss.driver.api.mapper.annotations.*; + +@Data +@Entity +public class Product { + @PartitionKey private int id; + private String description; +} +``` + +Or immutable "value" classes: + +```java +import lombok.Value; +import com.datastax.oss.driver.api.mapper.annotations.*; + +@Value +@Entity +@PropertyStrategy(mutable = false) +public class Product { + @PartitionKey private int id; + private String description; +} +``` + +You can also use Lombok's fluent accessors if you configure the mapper accordingly: + +```java +import lombok.Data; +import lombok.experimental.Accessors; +import com.datastax.oss.driver.api.mapper.annotations.*; +import com.datastax.oss.driver.api.mapper.entity.naming.*; + +@Data +@Accessors(fluent = true) +@Entity +@PropertyStrategy(getterStyle = GetterStyle.FLUENT, setterStyle = SetterStyle.FLUENT) +public static class Product { + @PartitionKey private int id; + private String description; +} +``` + +### Building + +You'll need to configure the Lombok annotation processor in your build. The only requirement is that +it must run *before* the mapper's. + +#### Maven + +See the compiler plugin's configuration in the example's [pom.xml]. + +#### Gradle + +A similar result can be achieved with: + +```groovy +apply plugin: 'java' + +def javaDriverVersion = '...' +def lombokVersion = '...' + +dependencies { + annotationProcessor group: 'org.projectlombok', name: 'lombok', version: lombokVersion + annotationProcessor group: 'com.datastax.oss', name: 'java-driver-mapper-processor', version: javaDriverVersion + compile group: 'com.datastax.oss', name: 'java-driver-mapper-runtime', version: javaDriverVersion + compileOnly group: 'org.projectlombok', name: 'lombok', version: lombokVersion +} +``` + +You'll also need to install a Lombok plugin in your IDE (for IntelliJ IDEA, [this +one](https://plugins.jetbrains.com/plugin/6317-lombok) is available in the marketplace). + + +[DataStax-Examples/object-mapper-jvm/lombok]: https://github.com/DataStax-Examples/object-mapper-jvm/tree/master/lombok +[pom.xml]: https://github.com/DataStax-Examples/object-mapper-jvm/blob/master/lombok/pom.xml diff --git a/manual/mapper/config/record/README.md b/manual/mapper/config/record/README.md new file mode 100644 index 00000000000..c53de01e30d --- /dev/null +++ b/manual/mapper/config/record/README.md @@ -0,0 +1,35 @@ +## Java 14 Records + +Java 14 introduced [Record] as a lightweight, immutable alternative to POJOs. You can map annotated +records as entities. + +We have a full example at [DataStax-Examples/object-mapper-jvm/record]. + +Note: records are a **preview feature** of Java 14. As such the mapper's support for them is also +provided as a preview. + +### Writing the model + +Annotate your records like regular classes: + +```java +@Entity +record Product(@PartitionKey int id, String description) {} +``` + +Records are immutable and use the [fluent getter style](../../entities#getter-style), but you don't +need to declare that explicitly with [@PropertyStrategy]: the mapper detects when it's processing a +record, and will assume `mutable = false, getterStyle = FLUENT` by default. + +### Building + +You need to build with Java 14, and pass the `--enable-preview` flag to both the compiler and the +runtime JVM. See [pom.xml] in the example. + + +[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html + +[DataStax-Examples/object-mapper-jvm/record]: https://github.com/DataStax-Examples/object-mapper-jvm/tree/master/record +[pom.xml]: https://github.com/DataStax-Examples/object-mapper-jvm/blob/master/record/pom.xml + +[Record]: https://docs.oracle.com/en/java/javase/14/docs/api/java.base/java/lang/Record.html diff --git a/manual/mapper/config/scala/README.md b/manual/mapper/config/scala/README.md new file mode 100644 index 00000000000..4c00af152b1 --- /dev/null +++ b/manual/mapper/config/scala/README.md @@ -0,0 +1,57 @@ +## Scala + +[Scala](https://www.scala-lang.org/) is an alternative language for the JVM. It doesn't support +annotation processing natively, so using it with the mapper is a bit more complicated, but it can be +done. + +We have a full example at [DataStax-Examples/object-mapper-jvm/scala]. + +### Writing the model + +You can use Scala case classes for your entities. Notice the peculiar syntax for field annotations: + +```scala +@Entity +case class UserVideo(@(PartitionKey@field) userid: UUID, + @(ClusteringColumn@field)(0) addedDate: Instant, + @(ClusteringColumn@field)(1) videoid: UUID, + name: String, + previewImageLocation: String) +``` + +Case classes are immutable and use the [fluent getter style](../../entities#getter-style), but you +don't need to declare that explicitly with [@PropertyStrategy]: the mapper detects when it's +processing a case class, and will assume `mutable = false, getterStyle = FLUENT` by default. + +The DAOs and main mapper can be defined as Scala traits, that are direct translations of their Java +equivalents: + +```scala +@Dao +trait UserDao { + @Select + def get(userid: UUID): User +} +``` + +### Building + +Since Scala does not support annotation processing, the mapper processor cannot operate on Scala +sources directly. But it can process the compiled class files output by the Scala compiler. So the +compilation happens in 3 phases: + +1. Compile the Scala sources with the regular sbt task. +2. Execute a custom task that runs the annotation processor (`javac -proc:only ...`) on the compiled + class files. +3. Execute another custom task that compiles the Java sources generated by the mapper. + +See the example's [build.sbt] for the full details. + +Because of that process, the sources fed to the processor cannot reference any generated code. So +the application code needs to be placed in a separate subproject, in order to have access to the +mapper builder. + +[DataStax-Examples/object-mapper-jvm/scala]: https://github.com/DataStax-Examples/object-mapper-jvm/tree/master/scala +[build.sbt]: https://github.com/DataStax-Examples/object-mapper-jvm/blob/master/scala/build.sbt + +[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html diff --git a/manual/mapper/entities/README.md b/manual/mapper/entities/README.md index 926f640d5e9..eaa95a8f4cd 100644 --- a/manual/mapper/entities/README.md +++ b/manual/mapper/entities/README.md @@ -8,6 +8,7 @@ POJO annotated with [@Entity], must expose a no-arg constructor. * [@NamingStrategy] * [@CqlName] * [@HierarchyScanStrategy] + * [@PropertyStrategy] * field/method-level annotations: * [@PartitionKey], [@ClusteringColumn] * [@Computed] @@ -37,19 +38,105 @@ public class Product { } ``` -Each entity property will be mapped to a CQL column. In order to detect a property: +Each entity property will be mapped to a CQL column. The way properties are detected is +configurable, as explained below: -* there **must** be a getter method that follows the usual naming convention (e.g. `getDescription`) - and has no parameters. The name of the property is obtained by removing the "get" prefix and - decapitalizing (`description`), and the type of the property is the return type of the getter. -* there **must** be a matching setter method (`setDescription`), with a single parameter that has - the same type as the property (the return type does not matter). +### Property detection -There *may* also be a matching field (`description`) that has the same type as the property, but -this is not mandatory: a property can have only a getter and a setter (for example if the value is -computed, or the field has a different name, or is nested into another field, etc.) +#### Mutability + +By default, the mapper expects mutable entity classes: + +```java +@Entity +public class Product { + @PartitionKey private UUID productId; + + public Product() {} + + public UUID getProductId() { return productId; } + public void setProductId(UUID productId) { this.productId = productId; } +} +``` + +With mutable entities: + +* each entity property: + * **must** have a non-void, no-argument getter method. + * **must** have a corresponding setter method: matching name, and exactly one argument matching + the getter's return type. Note that the return type of the setter does not matter. + * *may* have a corresponding field: matching name and type. +* the type **must** expose a non-private, no-argument constructor. + +When the mapper reads a mutable entity from the database, it will invoke the no-argument +constructor to materialize the instance, and then read and set the properties one by one. + +You can switch to an immutable style with the [@PropertyStrategy] annotation: + +```java +@Entity +@PropertyStrategy(mutable = false) +public class ImmutableProduct { + @PartitionKey private final UUID productId; + + public ImmutableProduct(UUID productId) { this.productId = productId; } + + public UUID getProductId() { return productId; } +} +``` + +With immutable entities: + +* each entity property: + * **must** have a non-void, no-argument getter method. The mapper will not look for a setter. + * *may* have a corresponding field: matching name and type. You'll probably want to make that + field final (although that has no impact on the mapper-generated code). +* the type **must** expose a non-private constructor that takes every + non-[transient](#transient-properties) property, in the declaration order. -The class must expose a no-arg constructor that is at least package-private. +When the mapper reads an immutable entity from the database, it will first read all properties, then +invoke the "all columns" constructor to materialize the instance. + +Note: the "all columns" constructor must take the properties in the order that they are declared in +the entity. If the entity inherits properties from parent types, those must come last in the +constructor signature, ordered from the closest parent to the farthest. If things get too +complicated, a good trick is to deliberately omit the constructor to let the mapper processor fail: +the error message describes the expected signature. + +#### Accessor styles + +By default, the mapper looks for JavaBeans-style accessors: getter prefixed with "get" (or "is" for +boolean properties) and, if the entity is mutable, setter prefixed with "set": + +```java +@Entity +public class Product { + @PartitionKey private UUID productId; + + public UUID getProductId() { return productId; } + public void setProductId(UUID productId) { this.productId = productId; } +} +``` + +You can switch to a "fluent" style (no prefixes) with the [@PropertyStrategy] annotation: + +```java +import static com.datastax.oss.driver.api.mapper.entity.naming.GetterStyle; +import static com.datastax.oss.driver.api.mapper.entity.naming.SetterStyle; + +@Entity +@PropertyStrategy(getterStyle = GetterStyle.FLUENT, setterStyle = SetterStyle.FLUENT) +public class Product { + @PartitionKey private UUID productId; + + public UUID productId() { return productId; } + public void productId(UUID productId) { this.productId = productId; } +} +``` + +Note that if you use the fluent style with immutable entities, Java's built-in `hashCode()` and +`toString()` methods would qualify as properties. The mapper skips them automatically. If you have +other false positives that you'd like to ignore, mark them as [transient](#transient-properties). ### Naming strategy @@ -486,3 +573,4 @@ To control how the class hierarchy is scanned, annotate classes with [@Hierarchy [@Transient]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/Transient.html [@TransientProperties]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/TransientProperties.html [@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html +[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/MapperProcessor.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/MapperProcessor.java index ffad7dd7163..ec3b870def6 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/MapperProcessor.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/MapperProcessor.java @@ -112,13 +112,13 @@ protected void processAnnotatedTypes( ElementKind expectedKind, Function generatorFactory) { for (Element element : roundEnvironment.getElementsAnnotatedWith(annotationClass)) { - if (element.getKind() != expectedKind) { - messager.error( - (TypeElement) element, - "Only %s elements can be annotated with %s", - expectedKind, - annotationClass.getSimpleName()); - } else { + ElementKind actualKind = element.getKind(); + boolean isExpectedElement = + actualKind == expectedKind + // Hack to support Java 14 records without having to compile against JDK 14 (also + // possible because we only expect CLASS for entities). + || (expectedKind == ElementKind.CLASS && actualKind.name().equals("RECORD")); + if (isExpectedElement) { // Safe cast given that we checked the kind above TypeElement typeElement = (TypeElement) element; try { @@ -129,6 +129,13 @@ protected void processAnnotatedTypes( "Unexpected error while writing generated code: %s", Throwables.getStackTraceAsString(e)); } + } else { + messager.error( + (TypeElement) element, + "Only %s elements can be annotated with %s (got %s)", + expectedKind, + annotationClass.getSimpleName(), + actualKind); } } } diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/EntityUtils.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/EntityUtils.java index ab298a31ba2..f38ad1525f4 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/EntityUtils.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/EntityUtils.java @@ -79,7 +79,9 @@ public static TypeElement asEntityElement( } else { return null; } - if (element.getKind() != ElementKind.CLASS) { + if (element.getKind() != ElementKind.CLASS + // Hack to support Java 14 records without having to compile against JDK 14 + && !element.getKind().name().equals("RECORD")) { return null; } TypeElement typeElement = (TypeElement) element; diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/DefaultEntityDefinition.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/DefaultEntityDefinition.java index 6913375dfd3..0ed4f42c90d 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/DefaultEntityDefinition.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/DefaultEntityDefinition.java @@ -31,6 +31,7 @@ public class DefaultEntityDefinition implements EntityDefinition { private final ImmutableList regularColumns; private final ImmutableList computedValues; private final String defaultKeyspace; + private final boolean mutable; public DefaultEntityDefinition( ClassName className, @@ -41,7 +42,8 @@ public DefaultEntityDefinition( List clusteringColumns, List regularColumns, List computedValues, - CqlNameGenerator cqlNameGenerator) { + CqlNameGenerator cqlNameGenerator, + boolean mutable) { this.className = className; this.cqlName = customCqlName @@ -52,6 +54,7 @@ public DefaultEntityDefinition( this.clusteringColumns = clusteringColumns; this.regularColumns = ImmutableList.copyOf(regularColumns); this.computedValues = ImmutableList.copyOf(computedValues); + this.mutable = mutable; } @Override @@ -89,4 +92,9 @@ public Iterable getRegularColumns() { public Iterable getComputedValues() { return computedValues; } + + @Override + public boolean isMutable() { + return mutable; + } } diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/DefaultEntityFactory.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/DefaultEntityFactory.java index bef2242bc47..f9949a1a56f 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/DefaultEntityFactory.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/DefaultEntityFactory.java @@ -21,9 +21,12 @@ import com.datastax.oss.driver.api.mapper.annotations.Entity; import com.datastax.oss.driver.api.mapper.annotations.NamingStrategy; import com.datastax.oss.driver.api.mapper.annotations.PartitionKey; +import com.datastax.oss.driver.api.mapper.annotations.PropertyStrategy; import com.datastax.oss.driver.api.mapper.annotations.Transient; import com.datastax.oss.driver.api.mapper.annotations.TransientProperties; +import com.datastax.oss.driver.api.mapper.entity.naming.GetterStyle; import com.datastax.oss.driver.api.mapper.entity.naming.NamingConvention; +import com.datastax.oss.driver.api.mapper.entity.naming.SetterStyle; import com.datastax.oss.driver.internal.mapper.processor.ProcessorContext; import com.datastax.oss.driver.internal.mapper.processor.util.AnnotationScanner; import com.datastax.oss.driver.internal.mapper.processor.util.Capitalizer; @@ -45,14 +48,17 @@ import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; +import java.util.stream.Collectors; import javax.lang.model.element.AnnotationMirror; import javax.lang.model.element.AnnotationValue; import javax.lang.model.element.Element; import javax.lang.model.element.ElementKind; import javax.lang.model.element.ExecutableElement; import javax.lang.model.element.Modifier; +import javax.lang.model.element.Name; import javax.lang.model.element.TypeElement; import javax.lang.model.element.VariableElement; +import javax.lang.model.type.DeclaredType; import javax.lang.model.type.TypeKind; import javax.lang.model.type.TypeMirror; @@ -83,6 +89,15 @@ public EntityDefinition getDefinition(TypeElement processedClass) { typeHierarchy.add((TypeElement) context.getTypeUtils().asElement(type)); } + Language language = Language.detect(typeHierarchy); + + Optional propertyStrategy = getPropertyStrategy(typeHierarchy); + GetterStyle getterStyle = + propertyStrategy.map(PropertyStrategy::getterStyle).orElse(language.defaultGetterStyle); + SetterStyle setterStyle = + propertyStrategy.map(PropertyStrategy::setterStyle).orElse(language.defaultSetterStyle); + boolean mutable = + propertyStrategy.map(PropertyStrategy::mutable).orElse(language.defaultMutable); CqlNameGenerator cqlNameGenerator = buildCqlNameGenerator(typeHierarchy); Set transientProperties = getTransientPropertyNames(typeHierarchy); @@ -111,32 +126,42 @@ public EntityDefinition getDefinition(TypeElement processedClass) { } String getMethodName = getMethod.getSimpleName().toString(); - boolean regularGetterName = getMethodName.startsWith("get"); - boolean booleanGetterName = - getMethodName.startsWith("is") - && (typeMirror.getKind() == TypeKind.BOOLEAN - || context.getClassUtils().isSame(typeMirror, Boolean.class)); - if (!regularGetterName && !booleanGetterName) { + + // Skip methods that test as false positives with the fluent getter style: toString(), + // hashCode() and a few Scala or Kotlin methods. + if (getMethodName.equals("toString") + || getMethodName.equals("hashCode") + || (language == Language.SCALA_CASE_CLASS + && (getMethodName.equals("productPrefix") + || getMethodName.equals("productArity") + || getMethodName.equals("productIterator") + || getMethodName.equals("productElementNames") + || getMethodName.startsWith("copy$default$"))) + || (language == Language.KOTLIN_DATA_CLASS + && getMethodName.matches("component[0-9]+"))) { continue; } - String propertyName; - String setMethodName; - if (regularGetterName) { - propertyName = Capitalizer.decapitalize(getMethodName.substring(3)); - setMethodName = getMethodName.replaceFirst("get", "set"); - } else { - propertyName = Capitalizer.decapitalize(getMethodName.substring(2)); - setMethodName = getMethodName.replaceFirst("is", "set"); + String propertyName = inferPropertyName(getMethodName, getterStyle, typeMirror); + if (propertyName == null) { + // getMethodName does not follow a known pattern => this is not a getter, skip + continue; } + // skip properties we've already encountered. if (encounteredPropertyNames.contains(propertyName)) { continue; } - ExecutableElement setMethod = findSetMethod(typeHierarchy, setMethodName, typeMirror); - if (setMethod == null) { - continue; // must have both + String setMethodName; + if (mutable) { + setMethodName = inferSetMethodName(propertyName, setterStyle); + ExecutableElement setMethod = findSetMethod(typeHierarchy, setMethodName, typeMirror); + if (setMethod == null) { + continue; // must have both + } + } else { + setMethodName = null; } VariableElement field = findField(typeHierarchy, propertyName, typeMirror); @@ -223,16 +248,55 @@ public EntityDefinition getDefinition(TypeElement processedClass) { String entityName = Capitalizer.decapitalize(processedClass.getSimpleName().toString()); String defaultKeyspace = processedClass.getAnnotation(Entity.class).defaultKeyspace(); - return new DefaultEntityDefinition( - ClassName.get(processedClass), - entityName, - defaultKeyspace.isEmpty() ? null : defaultKeyspace, - Optional.ofNullable(processedClass.getAnnotation(CqlName.class)).map(CqlName::value), - ImmutableList.copyOf(partitionKey.values()), - ImmutableList.copyOf(clusteringColumns.values()), - regularColumns.build(), - computedValues.build(), - cqlNameGenerator); + EntityDefinition entityDefinition = + new DefaultEntityDefinition( + ClassName.get(processedClass), + entityName, + defaultKeyspace.isEmpty() ? null : defaultKeyspace, + Optional.ofNullable(processedClass.getAnnotation(CqlName.class)).map(CqlName::value), + ImmutableList.copyOf(partitionKey.values()), + ImmutableList.copyOf(clusteringColumns.values()), + regularColumns.build(), + computedValues.build(), + cqlNameGenerator, + mutable); + validateConstructor(entityDefinition, processedClass); + return entityDefinition; + } + + private String inferPropertyName(String getMethodName, GetterStyle getterStyle, TypeMirror type) { + switch (getterStyle) { + case FLUENT: + return getMethodName; + case JAVABEANS: + if (getMethodName.startsWith("get") && getMethodName.length() > 3) { + return Capitalizer.decapitalize(getMethodName.substring(3)); + } else if (getMethodName.startsWith("is") + && getMethodName.length() > 2 + && (type.getKind() == TypeKind.BOOLEAN + || context.getClassUtils().isSame(type, Boolean.class))) { + return Capitalizer.decapitalize(getMethodName.substring(2)); + } else { + return null; + } + default: + throw new AssertionError("Unsupported getter style " + getterStyle); + } + } + + private String inferSetMethodName(String propertyName, SetterStyle setterStyle) { + String setMethodName; + switch (setterStyle) { + case JAVABEANS: + setMethodName = "set" + Capitalizer.capitalize(propertyName); + break; + case FLUENT: + setMethodName = propertyName; + break; + default: + throw new AssertionError("Unsupported setter style " + setterStyle); + } + return setMethodName; } @Nullable @@ -452,6 +516,11 @@ private Set getTransientPropertyNames(Set typeHierarchy) { : Collections.emptySet(); } + private Optional getPropertyStrategy(Set typeHierarchy) { + return AnnotationScanner.getClassAnnotation(PropertyStrategy.class, typeHierarchy) + .map(ResolvedAnnotation::getAnnotation); + } + private void reportMultipleAnnotationError( Element element, Class a0, @@ -555,4 +624,140 @@ private void scanMethodAnnotations( } } } + + private void validateConstructor(EntityDefinition entity, TypeElement processedClass) { + if (entity.isMutable()) { + validateNoArgConstructor(processedClass); + } else { + validateAllColumnsConstructor(processedClass, entity.getAllColumns()); + } + } + + private void validateNoArgConstructor(TypeElement processedClass) { + for (Element child : processedClass.getEnclosedElements()) { + if (child.getKind() == ElementKind.CONSTRUCTOR) { + ExecutableElement constructor = (ExecutableElement) child; + Set modifiers = constructor.getModifiers(); + if (!modifiers.contains(Modifier.PRIVATE) && constructor.getParameters().isEmpty()) { + return; + } + } + } + context + .getMessager() + .error( + processedClass, + "Mutable @%s-annotated class must have a no-arg constructor.", + Entity.class.getSimpleName()); + } + + private void validateAllColumnsConstructor( + TypeElement processedClass, List columns) { + for (Element child : processedClass.getEnclosedElements()) { + if (child.getKind() == ElementKind.CONSTRUCTOR) { + ExecutableElement constructor = (ExecutableElement) child; + Set modifiers = constructor.getModifiers(); + if (!modifiers.contains(Modifier.PRIVATE) + && areAssignable(columns, constructor.getParameters())) { + return; + } + } + } + String signature = + columns.stream() + .map( + column -> + String.format("%s %s", column.getType().asTypeMirror(), column.getGetterName())) + .collect(Collectors.joining(", ")); + context + .getMessager() + .error( + processedClass, + "Immutable @%s-annotated class must have an \"all columns\" constructor. " + + "Expected signature: (%s).", + Entity.class.getSimpleName(), + signature); + } + + private boolean areAssignable( + List columns, List parameters) { + if (columns.size() != parameters.size()) { + return false; + } else { + for (int i = 0; i < columns.size(); i++) { + // What the generated code will pass to the constructor: + TypeMirror argumentType = columns.get(i).getType().asTypeMirror(); + // What the constructor declares: + TypeMirror parameterType = parameters.get(i).asType(); + if (!context.getTypeUtils().isAssignable(argumentType, parameterType)) { + return false; + } + } + return true; + } + } + + /** + * The source language (and construct) of an entity type. It impacts the defaults for entities + * that do not explicitly declare the {@link PropertyStrategy} annotation. + */ + private enum Language { + SCALA_CASE_CLASS(false, GetterStyle.FLUENT, null), + KOTLIN_DATA_CLASS(false, GetterStyle.JAVABEANS, null), + JAVA14_RECORD(false, GetterStyle.FLUENT, null), + UNKNOWN(true, GetterStyle.JAVABEANS, SetterStyle.JAVABEANS), + ; + + final boolean defaultMutable; + final GetterStyle defaultGetterStyle; + final SetterStyle defaultSetterStyle; + + Language( + boolean defaultMutable, GetterStyle defaultGetterStyle, SetterStyle defaultSetterStyle) { + this.defaultMutable = defaultMutable; + this.defaultGetterStyle = defaultGetterStyle; + this.defaultSetterStyle = defaultSetterStyle; + } + + static Language detect(Set typeHierarchy) { + for (TypeElement type : typeHierarchy) { + if (isNamed(type, "scala.Product")) { + return SCALA_CASE_CLASS; + } + if (isNamed(type, "java.lang.Record")) { + return JAVA14_RECORD; + } + } + + TypeElement entityClass = typeHierarchy.iterator().next(); + // Kotlin adds `@kotlin.Metadata` on every generated class, we also check `component1` which + // is a generated method specific to data classes (to eliminate regular Kotlin classes). + if (entityClass.getAnnotationMirrors().stream().anyMatch(Language::isKotlinMetadata) + && entityClass.getEnclosedElements().stream() + .anyMatch(e -> isMethodNamed(e, "component1"))) { + return KOTLIN_DATA_CLASS; + } + + return UNKNOWN; + } + + private static boolean isNamed(TypeElement type, String expectedName) { + Name name = type.getQualifiedName(); + return name != null && name.toString().equals(expectedName); + } + + private static boolean isKotlinMetadata(AnnotationMirror a) { + DeclaredType declaredType = a.getAnnotationType(); + if (declaredType.getKind() == TypeKind.DECLARED) { + TypeElement element = (TypeElement) declaredType.asElement(); + return element.getQualifiedName().toString().equals("kotlin.Metadata"); + } + return false; + } + + private static boolean isMethodNamed(Element element, String methodName) { + return element.getKind() == ElementKind.METHOD + && element.getSimpleName().toString().equals(methodName); + } + } } diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityDefinition.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityDefinition.java index 284ae36d649..39c08c1b9e9 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityDefinition.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityDefinition.java @@ -15,6 +15,7 @@ */ package com.datastax.oss.driver.internal.mapper.processor.entity; +import com.datastax.oss.driver.api.mapper.annotations.PropertyStrategy; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import com.squareup.javapoet.ClassName; import com.squareup.javapoet.CodeBlock; @@ -73,4 +74,7 @@ default List getAllValues() { .addAll(getComputedValues()) .build(); } + + /** @see PropertyStrategy#mutable() */ + boolean isMutable(); } diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperGetMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperGetMethodGenerator.java index b6e4f67182c..f0a84517c63 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperGetMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperGetMethodGenerator.java @@ -30,6 +30,8 @@ import com.squareup.javapoet.ParameterSpec; import com.squareup.javapoet.ParameterizedTypeName; import com.squareup.javapoet.TypeName; +import java.util.ArrayList; +import java.util.List; import java.util.Map; import java.util.Optional; import javax.lang.model.element.Modifier; @@ -56,84 +58,123 @@ public Optional generate() { .returns(entityDefinition.getClassName()); TypeName returnType = entityDefinition.getClassName(); - String returnName = "returnValue"; - getBuilder.addStatement("$1T $2L = new $1T()", returnType, returnName); + String resultName = "returnValue"; + boolean mutable = entityDefinition.isMutable(); + if (mutable) { + // Create an instance now, we'll call the setters as we go through the properties + getBuilder.addStatement("$1T $2L = new $1T()", returnType, resultName); + } + + // We store each read property into a local variable, store the names here (this is only used if + // the entity is immutable, we'll call the all-arg constructor at the end). + List propertyValueNames = new ArrayList<>(); for (PropertyDefinition property : entityDefinition.getAllValues()) { PropertyType type = property.getType(); CodeBlock cqlName = property.getCqlName(); String setterName = property.getSetterName(); + String propertyValueName = enclosingClass.getNameIndex().uniqueField("propertyValue"); + propertyValueNames.add(propertyValueName); getBuilder.addCode("\n"); if (type instanceof PropertyType.Simple) { TypeName typeName = ((PropertyType.Simple) type).typeName; String primitiveAccessor = GeneratedCodePatterns.PRIMITIVE_ACCESSORS.get(typeName); if (primitiveAccessor != null) { // Primitive type: use dedicated getter, since it is optimized to avoid boxing - // returnValue.setLength(source.getInt("length")); + // int propertyValue1 = source.getInt("length"); getBuilder.addStatement( - "returnValue.$L(source.get$L($L))", setterName, primitiveAccessor, cqlName); + "$T $L = source.get$L($L)", typeName, propertyValueName, primitiveAccessor, cqlName); } else if (typeName instanceof ClassName) { // Unparameterized class: use the generic, class-based getter: - // returnValue.setId(source.get("id", UUID.class)); + // UUID propertyValue1 = source.get("id", UUID.class); getBuilder.addStatement( - "returnValue.$L(source.get($L, $T.class))", setterName, cqlName, typeName); + "$T $L = source.get($L, $T.class)", typeName, propertyValueName, cqlName, typeName); } else { // Parameterized type: create a constant and use the GenericType-based getter: // private static final GenericType> GENERIC_TYPE = // new GenericType>(){}; - // returnValue.setNames(source.get("names", GENERIC_TYPE)); + // List propertyValue1 = source.get("names", GENERIC_TYPE); // Note that lists, sets and maps of unparameterized classes also fall under that // category. Their getter creates a GenericType under the hood, so there's no performance // advantage in calling them instead of the generic get(). getBuilder.addStatement( - "returnValue.$L(source.get($L, $L))", - setterName, + "$T $L = source.get($L, $L)", + typeName, + propertyValueName, cqlName, enclosingClass.addGenericTypeConstant(typeName)); } } else if (type instanceof PropertyType.SingleEntity) { ClassName entityClass = ((PropertyType.SingleEntity) type).entityName; - // Other entity class: the CQL column is a mapped UDT. Example of generated code: + // Other entity class: the CQL column is a mapped UDT: + // Dimensions propertyValue1; // UdtValue udtValue1 = source.getUdtValue("dimensions"); - // if (udtValue1 != null) { - // Dimensions value1 = dimensionsHelper.get(udtValue1); - // returnValue.setDimensions(value1); + // if (udtValue1 == null) { + // propertyValue1 = null; + // } else { + // propertyValue1 = dimensionsHelper.get(udtValue1); // } + getBuilder.addStatement("$T $L", entityClass, propertyValueName); - // Populate udtInformation String udtValueName = enclosingClass.getNameIndex().uniqueField("udtValue"); - String valueName = enclosingClass.getNameIndex().uniqueField("value"); - // Extract UdtValue to pass it on to underlying helper method getBuilder.addStatement( "$T $L = source.getUdtValue($L)", UdtValue.class, udtValueName, cqlName); - getBuilder.beginControlFlow("if ($L != null)", udtValueName); + + getBuilder + .beginControlFlow("if ($L == null)", udtValueName) + .addStatement("$L = null", propertyValueName) + .nextControlFlow("else"); + // Get underlying udt object and set it on return type String childHelper = enclosingClass.addEntityHelperField(entityClass); - getBuilder.addStatement( - "$T $L = $L.get($L)", entityClass, valueName, childHelper, udtValueName); - getBuilder.addStatement("returnValue.$L($L)", setterName, valueName); + getBuilder.addStatement("$L = $L.get($L)", propertyValueName, childHelper, udtValueName); getBuilder.endControlFlow(); } else { // Collection of other entity class(es): the CQL column is a collection of mapped UDTs // Build a copy of the value, decoding all UdtValue instances into entities on the fly. - String mappedCollectionName = enclosingClass.getNameIndex().uniqueField("mappedCollection"); + // CollectionTypeT propertyValue1; + // RawCollectionTypeT rawCollection1 = source.get("column", GENERIC_TYPE); + // if (rawCollection1 == null) { + // propertyValue1 = null; + // } else { + // traverse rawCollection1 and convert all UdtValue into entity classes, recursing + // into nested collections if necessary + // } + getBuilder.addStatement("$T $L", type.asTypeName(), propertyValueName); + String rawCollectionName = enclosingClass.getNameIndex().uniqueField("rawCollection"); TypeName rawCollectionType = type.asRawTypeName(); + getBuilder.addStatement( + "$T $L = source.get($L, $L)", + rawCollectionType, + rawCollectionName, + cqlName, + enclosingClass.addGenericTypeConstant(rawCollectionType)); + getBuilder - .addStatement( - "$T $L = source.get($L, $L)", - rawCollectionType, - rawCollectionName, - cqlName, - enclosingClass.addGenericTypeConstant(rawCollectionType)) - .beginControlFlow("if ($L != null)", rawCollectionName); - convertUdtsIntoEntities(rawCollectionName, mappedCollectionName, type, getBuilder); - getBuilder - .addStatement("returnValue.$L($L)", setterName, mappedCollectionName) - .endControlFlow(); + .beginControlFlow("if ($L == null)", rawCollectionName) + .addStatement("$L = null", propertyValueName) + .nextControlFlow("else"); + convertUdtsIntoEntities(rawCollectionName, propertyValueName, type, getBuilder); + getBuilder.endControlFlow(); + } + + if (mutable) { + getBuilder.addStatement("$L.$L($L)", resultName, setterName, propertyValueName); + } + } + + if (mutable) { + // We've already created an instance and filled the properties as we went + getBuilder.addStatement("return returnValue"); + } else { + // Assume an all-arg constructor exists, and call it with all the temporary variables + getBuilder.addCode("$[return new $T(", returnType); + for (int i = 0; i < propertyValueNames.size(); i++) { + getBuilder.addCode((i == 0 ? "\n$L" : ",\n$L"), propertyValueNames.get(i)); } + getBuilder.addCode(")$];"); } - getBuilder.addStatement("return returnValue"); return Optional.of(getBuilder.build()); } @@ -143,7 +184,7 @@ public Optional generate() { * * @param rawObjectName the name of the local variable containing the value to convert. * @param mappedObjectName the name of the local variable that will hold the converted value (it - * does not exist yet, this method must generate the declaration). + * already exists). * @param type the type of the value. * @param getBuilder the method where the generated code will be appended. */ @@ -156,16 +197,10 @@ private void convertUdtsIntoEntities( if (type instanceof PropertyType.SingleEntity) { ClassName entityClass = ((PropertyType.SingleEntity) type).entityName; String entityHelperName = enclosingClass.addEntityHelperField(entityClass); - getBuilder.addStatement( - "$T $L = $L.get($L)", - type.asTypeName(), - mappedObjectName, - entityHelperName, - rawObjectName); + getBuilder.addStatement("$L = $L.get($L)", mappedObjectName, entityHelperName, rawObjectName); } else if (type instanceof PropertyType.EntityList) { getBuilder.addStatement( - "$T $L = $T.newArrayListWithExpectedSize($L.size())", - type.asTypeName(), + "$L = $T.newArrayListWithExpectedSize($L.size())", mappedObjectName, Lists.class, rawObjectName); @@ -174,12 +209,12 @@ private void convertUdtsIntoEntities( String rawElementName = enclosingClass.getNameIndex().uniqueField("rawElement"); getBuilder.beginControlFlow("for ($T $L: $L)", rawElementType, rawElementName, rawObjectName); String mappedElementName = enclosingClass.getNameIndex().uniqueField("mappedElement"); + getBuilder.addStatement("$T $L", mappedElementType.asTypeName(), mappedElementName); convertUdtsIntoEntities(rawElementName, mappedElementName, mappedElementType, getBuilder); getBuilder.addStatement("$L.add($L)", mappedObjectName, mappedElementName).endControlFlow(); } else if (type instanceof PropertyType.EntitySet) { getBuilder.addStatement( - "$T $L = $T.newLinkedHashSetWithExpectedSize($L.size())", - type.asTypeName(), + "$L = $T.newLinkedHashSetWithExpectedSize($L.size())", mappedObjectName, Sets.class, rawObjectName); @@ -188,12 +223,12 @@ private void convertUdtsIntoEntities( String rawElementName = enclosingClass.getNameIndex().uniqueField("rawElement"); getBuilder.beginControlFlow("for ($T $L: $L)", rawElementType, rawElementName, rawObjectName); String mappedElementName = enclosingClass.getNameIndex().uniqueField("mappedElement"); + getBuilder.addStatement("$T $L", mappedElementType.asTypeName(), mappedElementName); convertUdtsIntoEntities(rawElementName, mappedElementName, mappedElementType, getBuilder); getBuilder.addStatement("$L.add($L)", mappedObjectName, mappedElementName).endControlFlow(); } else if (type instanceof PropertyType.EntityMap) { getBuilder.addStatement( - "$T $L = $T.newLinkedHashMapWithExpectedSize($L.size())", - type.asTypeName(), + "$L = $T.newLinkedHashMapWithExpectedSize($L.size())", mappedObjectName, Maps.class, rawObjectName); @@ -214,6 +249,7 @@ private void convertUdtsIntoEntities( mappedKeyName = rawKeyName; // no conversion, use the instance as-is } else { mappedKeyName = enclosingClass.getNameIndex().uniqueField("mappedKey"); + getBuilder.addStatement("$T $L", mappedKeyType.asTypeName(), mappedKeyName); convertUdtsIntoEntities(rawKeyName, mappedKeyName, mappedKeyType, getBuilder); } String rawValueName = CodeBlock.of("$L.getValue()", rawEntryName).toString(); @@ -222,6 +258,7 @@ private void convertUdtsIntoEntities( mappedValueName = rawValueName; } else { mappedValueName = enclosingClass.getNameIndex().uniqueField("mappedValue"); + getBuilder.addStatement("$T $L", mappedValueType.asTypeName(), mappedValueName); convertUdtsIntoEntities(rawValueName, mappedValueName, mappedValueType, getBuilder); } getBuilder diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/PropertyDefinition.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/PropertyDefinition.java index ed809528d53..e2daf674e26 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/PropertyDefinition.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/PropertyDefinition.java @@ -18,6 +18,7 @@ import com.datastax.oss.driver.api.core.data.GettableByName; import com.datastax.oss.driver.api.core.data.SettableByName; import com.datastax.oss.driver.api.mapper.annotations.Entity; +import com.datastax.oss.driver.api.mapper.annotations.PropertyStrategy; import com.datastax.oss.driver.internal.mapper.processor.util.generation.PropertyType; import com.squareup.javapoet.CodeBlock; @@ -58,7 +59,8 @@ public interface PropertyDefinition { /** * @return The name of the "set" method associated with this property used to update the value of - * the property on the entity. + * the property on the entity, or {@code null} if the entity was marked as not {@link + * PropertyStrategy#mutable()}. */ String getSetterName(); diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/util/Capitalizer.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/util/Capitalizer.java index 74b75ca95b2..930f4ed279b 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/util/Capitalizer.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/util/Capitalizer.java @@ -23,4 +23,15 @@ public class Capitalizer { public static String decapitalize(String name) { return Introspector.decapitalize(Objects.requireNonNull(name)); } + + public static String capitalize(String name) { + Objects.requireNonNull(name); + if (name.isEmpty() || Character.isUpperCase(name.charAt(0))) { + return name; + } else { + char[] chars = name.toCharArray(); + chars[0] = Character.toUpperCase(chars[0]); + return new String(chars); + } + } } diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/util/generation/PropertyType.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/util/generation/PropertyType.java index 800b0539a6c..11d255c2344 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/util/generation/PropertyType.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/util/generation/PropertyType.java @@ -53,23 +53,33 @@ public static PropertyType parse(TypeMirror typeMirror, ProcessorContext context PropertyType elementType = parse(declaredType.getTypeArguments().get(0), context); return (elementType instanceof Simple) ? new Simple(typeMirror) - : new EntityList(elementType); + : new EntityList(typeMirror, elementType); } else if (context.getClassUtils().isSet(declaredType)) { PropertyType elementType = parse(declaredType.getTypeArguments().get(0), context); return (elementType instanceof Simple) ? new Simple(typeMirror) - : new EntitySet(elementType); + : new EntitySet(typeMirror, elementType); } else if (context.getClassUtils().isMap(declaredType)) { PropertyType keyType = parse(declaredType.getTypeArguments().get(0), context); PropertyType valueType = parse(declaredType.getTypeArguments().get(1), context); return (keyType instanceof Simple && valueType instanceof Simple) ? new Simple(typeMirror) - : new EntityMap(keyType, valueType); + : new EntityMap(typeMirror, keyType, valueType); } } return new Simple(typeMirror); } + private final TypeMirror typeMirror; + + protected PropertyType(TypeMirror typeMirror) { + this.typeMirror = typeMirror; + } + + public TypeMirror asTypeMirror() { + return typeMirror; + } + public abstract TypeName asTypeName(); /** @@ -87,6 +97,7 @@ public static class Simple extends PropertyType { public final TypeName typeName; public Simple(TypeMirror typeMirror) { + super(typeMirror); this.typeName = ClassName.get(typeMirror); } @@ -106,6 +117,7 @@ public static class SingleEntity extends PropertyType { public final ClassName entityName; public SingleEntity(DeclaredType declaredType) { + super(declaredType); this.entityName = (ClassName) TypeName.get(declaredType); } @@ -124,7 +136,8 @@ public TypeName asRawTypeName() { public static class EntityList extends PropertyType { public final PropertyType elementType; - public EntityList(PropertyType elementType) { + public EntityList(TypeMirror typeMirror, PropertyType elementType) { + super(typeMirror); this.elementType = elementType; } @@ -143,7 +156,8 @@ public TypeName asRawTypeName() { public static class EntitySet extends PropertyType { public final PropertyType elementType; - public EntitySet(PropertyType elementType) { + public EntitySet(TypeMirror typeMirror, PropertyType elementType) { + super(typeMirror); this.elementType = elementType; } @@ -163,7 +177,8 @@ public static class EntityMap extends PropertyType { public final PropertyType keyType; public final PropertyType valueType; - public EntityMap(PropertyType keyType, PropertyType valueType) { + public EntityMap(TypeMirror typeMirror, PropertyType keyType, PropertyType valueType) { + super(typeMirror); this.keyType = keyType; this.valueType = valueType; } diff --git a/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityAnnotationTest.java b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityAnnotationTest.java index 545feb79669..6a72b1df678 100644 --- a/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityAnnotationTest.java +++ b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityAnnotationTest.java @@ -97,7 +97,7 @@ public void should_detect_boolean_getter() { StandardLocation.SOURCE_OUTPUT, "test", "Foo_BarHelper__MapperGenerated.java") .contentsAsUtf8String(); contents.contains("target = target.setBoolean(\"bool\", entity.isBool())"); - contents.contains("returnValue.setBool(source.getBoolean(\"bool\"))"); + contents.contains("boolean propertyValue = source.getBoolean(\"bool\");"); } @Test diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.java new file mode 100644 index 00000000000..ecd699ba93f --- /dev/null +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.java @@ -0,0 +1,88 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.mapper.annotations; + +import com.datastax.oss.driver.api.mapper.entity.naming.GetterStyle; +import com.datastax.oss.driver.api.mapper.entity.naming.SetterStyle; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * Annotates an {@link Entity} to customize certain aspects of the introspection process that + * determines which methods are considered as properties, and how new instances will be created. + * + *

          Example: + * + *

          + * @Entity
          + * @PropertyStrategy(getterStyle = FLUENT)
          + * public class Account {
          + *   ...
          + * }
          + * 
          + * + * This annotation can be inherited from an interface or parent class. + * + *

          When neither the entity class nor any of its parent is explicitly annotated, the mapper will + * assume context-dependent defaults: + * + *

            + *
          • for a Scala case class: {@code mutable = false} and {@code getterStyle = FLUENT}. The + * mapper detects this case by checking if the entity implements {@code scala.Product}. + *
          • for a Kotlin data class: {@code mutable = false} and {@code getterStyle = JAVABEANS}. The + * mapper detects this case by checking if the entity is annotated with {@code + * kotlin.Metadata}, and if it has any method named {@code component1} (both of these are + * added automatically by the Kotlin compiler). + *
          • Java records (JDK 14 and above): {@code mutable = false} and {@code getterStyle = FLUENT}. + * The mapper detects this case by checking if the entity extends {@code java.lang.Record}. + *
          • any other case: {@code mutable = true}, {@code getterStyle = JAVABEANS} and {@code + * setterStyle = JAVABEANS}. + *
          + * + * Not that this only applies if the annotation is completely absent. If it is present with only + * some of its attributes, the remaining attributes will get the default declared by the annotation, + * not the context-dependent default above (for example, if a Kotlin data class is annotated with + * {@code @PropertyStrategy(getterStyle = FLUENT)}, it will be mutable). + */ +@Target({ElementType.TYPE}) +@Retention(RetentionPolicy.RUNTIME) +public @interface PropertyStrategy { + + /** The style of getter. See {@link GetterStyle} and its constants for more explanations. */ + GetterStyle getterStyle() default GetterStyle.JAVABEANS; + + /** + * The style of setter. See {@link SetterStyle} and its constants for more explanations. + * + *

          This has no effect if {@link #mutable()} is false. + */ + SetterStyle setterStyle() default SetterStyle.JAVABEANS; + + /** + * Whether the entity is mutable. + * + *

          If this is set to false: + * + *

            + *
          • the mapper won't try to discover setters for the properties; + *
          • it will assume that the entity class has a visible constructor that takes all the + * non-transient properties as arguments. + *
          + */ + boolean mutable() default true; +} diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/entity/naming/GetterStyle.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/entity/naming/GetterStyle.java new file mode 100644 index 00000000000..849265f62b4 --- /dev/null +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/entity/naming/GetterStyle.java @@ -0,0 +1,48 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.mapper.entity.naming; + +import com.datastax.oss.driver.api.mapper.annotations.PropertyStrategy; + +/** + * The style of getter that the mapper will look for when introspecting an entity class. + * + *

          Note that introspection always starts by looking for getters first: no-arg, non-void methods + * that follow the configured style. Then the mapper will try to find a matching field (which is not + * required), and, if the entity is mutable, a setter. + * + * @see PropertyStrategy + */ +public enum GetterStyle { + + /** + * "JavaBeans" style: the method name must start with "get", or "is" for boolean properties. The + * name of the property is the getter name without a prefix, and decapitalized, for example {@code + * int getFoo() => foo}. + */ + JAVABEANS, + + /** + * "Fluent" style: any name will match (as long as the no-arg, not-void rule also holds), and is + * considered to be the property name without any prefix. For example {@code int foo() => foo}. + * + *

          Note that this is the convention used in compiled Scala case classes. Whenever the mapper + * processes a type that implements {@code scala.Product}, it will switch to this style by + * default. + */ + FLUENT, + ; +} diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/entity/naming/SetterStyle.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/entity/naming/SetterStyle.java new file mode 100644 index 00000000000..4bd9b96b297 --- /dev/null +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/entity/naming/SetterStyle.java @@ -0,0 +1,44 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.mapper.entity.naming; + +import com.datastax.oss.driver.api.mapper.annotations.PropertyStrategy; + +/** + * The style of setter that the mapper will look for when introspecting a mutable entity class. + * + *

          Note that introspection always starts by looking for getters first (see {@link GetterStyle}). + * Once a getter has been found, and if the entity is declared as {@link PropertyStrategy#mutable() + * mutable}, the mapper will try to find a matching setter: name inferred as described below, + * exactly one argument matching the property type, and the return type does not matter. + * + * @see PropertyStrategy + */ +public enum SetterStyle { + + /** + * "JavaBeans" style: the method name must start with "set", for example {@code int foo => + * setFoo(int)}. + */ + JAVABEANS, + + /** + * "Fluent" style: the method name must be the name of the property, without any prefix, for + * example {@code int foo => foo(int)}. + */ + FLUENT, + ; +} From abd82873501eea51c2710f3bd3a2ba8b254213bc Mon Sep 17 00:00:00 2001 From: olim7t Date: Wed, 19 Aug 2020 17:45:57 -0700 Subject: [PATCH 561/979] JAVA-2864: Revisit mapper processor's messaging --- changelog/README.md | 1 + .../mapper/processor/DecoratedMessager.java | 183 ++++++------------ .../DefaultCodeGeneratorFactory.java | 3 +- .../mapper/processor/MapperProcessor.java | 4 +- .../dao/DaoDeleteMethodGenerator.java | 6 - .../dao/DaoGetEntityMethodGenerator.java | 5 - .../dao/DaoImplementationGenerator.java | 2 - .../dao/DaoIncrementMethodGenerator.java | 5 - .../dao/DaoInsertMethodGenerator.java | 2 - .../processor/dao/DaoMethodGenerator.java | 6 - .../dao/DaoQueryProviderMethodGenerator.java | 1 - .../dao/DaoSelectMethodGenerator.java | 1 - .../dao/DaoSetEntityMethodGenerator.java | 4 - .../dao/DaoUpdateMethodGenerator.java | 2 - .../mapper/processor/dao/EntityUtils.java | 3 - .../entity/DefaultEntityFactory.java | 54 ++---- .../MapperDaoFactoryMethodGenerator.java | 8 - .../mapper/MapperImplementationGenerator.java | 1 - .../DaoCompiledMethodGeneratorTest.java | 2 +- 19 files changed, 83 insertions(+), 210 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index f65450c221b..990fe276bf4 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.9.0 (in progress) +- [improvement] JAVA-2864: Revisit mapper processor's messaging - [new feature] JAVA-2816: Support immutability and fluent accessors in the mapper - [new feature] JAVA-2721: Add counter support in the mapper - [bug] JAVA-2863: Reintroduce mapper processor dependency to SLF4J diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/DecoratedMessager.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/DecoratedMessager.java index fb3c6d7ddb7..5a8ca4b2561 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/DecoratedMessager.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/DecoratedMessager.java @@ -15,18 +15,19 @@ */ package com.datastax.oss.driver.internal.mapper.processor; -import com.datastax.oss.driver.internal.core.util.Reflection; -import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Objects; +import java.util.Set; import javax.annotation.processing.Messager; import javax.lang.model.element.Element; -import javax.lang.model.element.ExecutableElement; -import javax.lang.model.element.TypeElement; import javax.tools.Diagnostic; /** Wraps {@link Messager} to provide convenience methods. */ public class DecoratedMessager { private final Messager messager; + private final Set emittedMessages = new HashSet<>(); public DecoratedMessager(Messager messager) { this.messager = messager; @@ -37,135 +38,79 @@ public void warn(String template, Object... arguments) { messager.printMessage(Diagnostic.Kind.WARNING, String.format(template, arguments)); } - /** Emits a warning for a type. */ - public void warn(TypeElement typeElement, String template, Object... arguments) { - messager.printMessage(Diagnostic.Kind.WARNING, String.format(template, arguments), typeElement); + /** Emits a warning for a specific element. */ + public void warn(Element element, String template, Object... arguments) { + message(Diagnostic.Kind.WARNING, element, template, arguments); } - /** Emits an error for a type. */ - public void error(TypeElement typeElement, String template, Object... arguments) { - messager.printMessage(Diagnostic.Kind.ERROR, String.format(template, arguments), typeElement); + /** Emits an error for a specific element. */ + public void error(Element element, String template, Object... arguments) { + message(Diagnostic.Kind.ERROR, element, template, arguments); } - /** - * Emits a warning for a program element that might be inherited from another type. - * - * @param targetElement the element to target. - * @param processedType the type that we were processing when we detected the issue. - */ - public void warn( - Element targetElement, TypeElement processedType, String template, Object... arguments) { - new ElementMessager(targetElement, processedType) - .print(Diagnostic.Kind.WARNING, template, arguments); + private void message( + Diagnostic.Kind level, Element element, String template, Object[] arguments) { + if (emittedMessages.add(new MessageId(level, element, template, arguments))) { + messager.printMessage( + level, formatLocation(element) + String.format(template, arguments), element); + } } - /** - * Emits an error for a program element that might be inherited from another type. - * - * @param targetElement the element to target. - * @param processedType the type that we were processing when we detected the issue. - */ - public void error( - Element targetElement, TypeElement processedType, String template, Object... arguments) { - new ElementMessager(targetElement, processedType) - .print(Diagnostic.Kind.ERROR, template, arguments); + private static String formatLocation(Element element) { + switch (element.getKind()) { + case CLASS: + case INTERFACE: + return String.format("[%s] ", element.getSimpleName()); + case FIELD: + case METHOD: + case CONSTRUCTOR: + return String.format( + "[%s.%s] ", element.getEnclosingElement().getSimpleName(), element.getSimpleName()); + case PARAMETER: + Element method = element.getEnclosingElement(); + Element type = method.getEnclosingElement(); + return String.format( + "[%s.%s, parameter %s] ", + type.getSimpleName(), method.getSimpleName(), element.getSimpleName()); + default: + // We don't emit messages for other types of elements in the mapper processor. Handle + // gracefully nevertheless: + return String.format("[%s] ", element); + } } - /** - * Abstracts logic to produce better messages if the target element is inherited from a compiled - * type. - * - *

          Consider the following situation: - * - *

          -   *   interface BaseDao {
          -   *     @Select
          -   *     void select();
          -   *   }
          -   *   @Dao
          -   *   interface ConcreteDao extends BaseDao {}
          -   * 
          - * - * If {@code BaseDao} belongs to a JAR dependency, it is already compiled and the warning or error - * message can't reference a file or line number, it doesn't even mention {@code ConcreteDao}. - * - *

          The goal of this class is to detect those cases, and issue the message on {@code - * ConcreteDao} instead. - */ - private class ElementMessager { - - private final Element actualTargetElement; - // Additional location information that will get prepended to the message - private final String locationInfo; + private static class MessageId { - /** - * @param processedType the type that we are currently processing ({@code ConcreteDao} in the - * example above). - */ - ElementMessager(@NonNull Element intendedTargetElement, @NonNull TypeElement processedType) { + private final Diagnostic.Kind level; + private final Element element; + private final String template; + private final Object[] arguments; - TypeElement declaringType; - switch (intendedTargetElement.getKind()) { - case CLASS: - case INTERFACE: - if (processedType.equals(intendedTargetElement) - || isSourceFile((TypeElement) intendedTargetElement)) { - this.actualTargetElement = intendedTargetElement; - this.locationInfo = ""; - } else { - this.actualTargetElement = processedType; - this.locationInfo = - String.format("[Ancestor %s]", intendedTargetElement.getSimpleName()); - } - break; - case FIELD: - case METHOD: - case CONSTRUCTOR: - declaringType = (TypeElement) intendedTargetElement.getEnclosingElement(); - if (processedType.equals(declaringType) || isSourceFile(declaringType)) { - this.actualTargetElement = intendedTargetElement; - this.locationInfo = ""; - } else { - this.actualTargetElement = processedType; - this.locationInfo = - String.format( - "[%s inherited from %s] ", - intendedTargetElement, declaringType.getSimpleName()); - } - break; - case PARAMETER: - ExecutableElement method = - (ExecutableElement) intendedTargetElement.getEnclosingElement(); - declaringType = (TypeElement) method.getEnclosingElement(); - if (processedType.equals(declaringType) || isSourceFile(declaringType)) { - this.actualTargetElement = intendedTargetElement; - this.locationInfo = ""; - } else { - this.actualTargetElement = processedType; - this.locationInfo = - String.format( - "[Parameter %s of %s inherited from %s] ", - intendedTargetElement.getSimpleName(), - method.getSimpleName(), - declaringType.getSimpleName()); - } - break; - default: - // We don't emit messages for other types of elements in the mapper processor. Handle - // gracefully nevertheless: - this.actualTargetElement = intendedTargetElement; - this.locationInfo = ""; - break; - } + private MessageId(Diagnostic.Kind level, Element element, String template, Object[] arguments) { + this.level = level; + this.element = element; + this.template = template; + this.arguments = arguments; } - void print(Diagnostic.Kind level, String template, Object... arguments) { - messager.printMessage( - level, String.format(locationInfo + template, arguments), actualTargetElement); + @Override + public boolean equals(Object other) { + if (other == this) { + return true; + } else if (other instanceof MessageId) { + MessageId that = (MessageId) other; + return this.level == that.level + && Objects.equals(this.element, that.element) + && Objects.equals(this.template, that.template) + && Arrays.deepEquals(this.arguments, that.arguments); + } else { + return false; + } } - private boolean isSourceFile(TypeElement element) { - return Reflection.loadClass(null, element.getQualifiedName().toString()) == null; + @Override + public int hashCode() { + return Objects.hash(level, element, template, Arrays.hashCode(arguments)); } } } diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/DefaultCodeGeneratorFactory.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/DefaultCodeGeneratorFactory.java index bcaececbcb6..a687b740a5c 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/DefaultCodeGeneratorFactory.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/DefaultCodeGeneratorFactory.java @@ -82,8 +82,7 @@ public Optional newMapperImplementationMethod( MapperImplementationSharedCode enclosingClass) { if (methodElement.getAnnotation(DaoFactory.class) != null) { return Optional.of( - new MapperDaoFactoryMethodGenerator( - methodElement, processedType, enclosingClass, context)); + new MapperDaoFactoryMethodGenerator(methodElement, enclosingClass, context)); } else { return Optional.empty(); } diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/MapperProcessor.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/MapperProcessor.java index ec3b870def6..8064efd2376 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/MapperProcessor.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/MapperProcessor.java @@ -125,13 +125,13 @@ protected void processAnnotatedTypes( generatorFactory.apply(typeElement).generate(); } catch (Exception e) { messager.error( - (TypeElement) element, + element, "Unexpected error while writing generated code: %s", Throwables.getStackTraceAsString(e)); } } else { messager.error( - (TypeElement) element, + element, "Only %s elements can be annotated with %s (got %s)", expectedKind, annotationClass.getSimpleName(), diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGenerator.java index 8d710e9d491..e80ff89eadf 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGenerator.java @@ -94,7 +94,6 @@ public Optional generate() { .getMessager() .error( methodElement, - processedType, "Invalid annotation parameters: %s cannot have both ifExists and customIfClause", Delete.class.getSimpleName()); return Optional.empty(); @@ -118,7 +117,6 @@ public Optional generate() { .getMessager() .error( methodElement, - processedType, "Wrong number of parameters: %s methods with no custom clause " + "must take either an entity instance, or the primary key components", Delete.class.getSimpleName()); @@ -139,7 +137,6 @@ public Optional generate() { .getMessager() .error( methodElement, - processedType, "Invalid parameter list: %s methods that have a custom where clause " + "must not take an Entity (%s) as a parameter", Delete.class.getSimpleName(), @@ -154,7 +151,6 @@ public Optional generate() { .getMessager() .error( methodElement, - processedType, "Missing entity class: %s methods that do not operate on an entity " + "instance must have an 'entityClass' argument", Delete.class.getSimpleName()); @@ -178,7 +174,6 @@ public Optional generate() { .getMessager() .error( methodElement, - processedType, "Invalid parameter list: %s methods that have a custom if clause" + "must specify the entire primary key (expected primary keys of %s: %s)", Delete.class.getSimpleName(), @@ -270,7 +265,6 @@ public Optional generate() { .getMessager() .error( methodElement, - processedType, "Wrong number of parameters: %s methods can only have additional " + "parameters if they specify a custom WHERE or IF clause", Delete.class.getSimpleName()); diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoGetEntityMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoGetEntityMethodGenerator.java index 116713f5779..03fd61008f5 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoGetEntityMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoGetEntityMethodGenerator.java @@ -68,7 +68,6 @@ public Optional generate() { .getMessager() .error( methodElement, - processedType, "Wrong number of parameters: %s methods must have exactly one", GetEntity.class.getSimpleName()); return Optional.empty(); @@ -85,7 +84,6 @@ public Optional generate() { .getMessager() .error( methodElement, - processedType, "Invalid parameter type: %s methods must take a %s, %s or %s", GetEntity.class.getSimpleName(), GettableByName.class.getSimpleName(), @@ -108,7 +106,6 @@ public Optional generate() { .getMessager() .error( methodElement, - processedType, "Invalid return type: %s methods must return %s if the argument is %s", GetEntity.class.getSimpleName(), PagingIterable.class.getSimpleName(), @@ -123,7 +120,6 @@ public Optional generate() { .getMessager() .error( methodElement, - processedType, "Invalid return type: %s methods must return %s if the argument is %s", GetEntity.class.getSimpleName(), MappedAsyncPagingIterable.class.getSimpleName(), @@ -139,7 +135,6 @@ public Optional generate() { .getMessager() .error( methodElement, - processedType, "Invalid return type: " + "%s methods must return a %s-annotated class, or a %s or %s thereof", GetEntity.class.getSimpleName(), diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoImplementationGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoImplementationGenerator.java index 008e2c054b6..479da635e20 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoImplementationGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoImplementationGenerator.java @@ -256,7 +256,6 @@ private Map parseTypeParameters(TypeMirror mirror) { .getMessager() .error( element, - interfaceElement, "Could not resolve type parameter %s " + "on %s from child interfaces. This error usually means an interface " + "was inappropriately annotated with @%s. Interfaces should only be annotated " @@ -325,7 +324,6 @@ protected JavaFile.Builder getContents() { .getMessager() .error( methodElement, - interfaceElement, "Unrecognized method signature: no implementation will be generated"); } else { maybeGenerator.flatMap(MethodGenerator::generate).ifPresent(classBuilder::addMethod); diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoIncrementMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoIncrementMethodGenerator.java index 94ba031c856..2b064490f0a 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoIncrementMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoIncrementMethodGenerator.java @@ -85,7 +85,6 @@ public Optional generate() { .getMessager() .error( methodElement, - processedType, "Missing entity class: %s methods must always have an 'entityClass' argument", Increment.class.getSimpleName()); return Optional.empty(); @@ -114,7 +113,6 @@ public Optional generate() { .getMessager() .error( methodElement, - processedType, "Invalid parameter list: %s methods must specify the entire primary key " + "(expected primary keys of %s: %s)", Increment.class.getSimpleName(), @@ -152,7 +150,6 @@ public Optional generate() { .getMessager() .error( methodElement, - processedType, "Invalid argument type: increment parameters of %s methods can only be " + "primitive longs or java.lang.Long. Offending parameter: '%s' (%s)", Increment.class.getSimpleName(), @@ -239,7 +236,6 @@ private void generatePrepareRequest( .getMessager() .error( methodElement, - processedType, "%s method must take at least one parameter representing an increment to a " + "counter column", Increment.class.getSimpleName()); @@ -283,7 +279,6 @@ private void generatePrepareRequest( .getMessager() .error( parameter, - processedType, "Could not match '%s' with any counter column in %s (expected one of: %s). " + "You can also specify a CQL name directly with @%s.", parameter.getSimpleName(), diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoInsertMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoInsertMethodGenerator.java index 93a0c84526b..945cfeda370 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoInsertMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoInsertMethodGenerator.java @@ -111,7 +111,6 @@ public Optional generate() { .getMessager() .error( methodElement, - processedType, "%s methods must take the entity to insert as the first parameter", Insert.class.getSimpleName()); return Optional.empty(); @@ -129,7 +128,6 @@ public Optional generate() { .getMessager() .error( methodElement, - processedType, "Invalid return type: %s methods must return the same entity as their argument ", Insert.class.getSimpleName()); return Optional.empty(); diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoMethodGenerator.java index a09e70eb632..c79f7929836 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoMethodGenerator.java @@ -84,7 +84,6 @@ protected DaoReturnType parseAndValidateReturnType( .getMessager() .error( methodElement, - processedType, "Invalid return type: %s methods must return one of %s", annotationName, validKinds.stream() @@ -120,7 +119,6 @@ protected void maybeAddSimpleClause( .getMessager() .warn( methodElement, - processedType, "Invalid " + valueDescription + " value: " @@ -137,7 +135,6 @@ protected void maybeAddSimpleClause( .getMessager() .warn( methodElement, - processedType, "Invalid " + valueDescription + " value: " @@ -214,7 +211,6 @@ protected boolean validateCqlNamesPresent(List parame .getMessager() .error( methodElement, - processedType, "Parameter %s is declared in a compiled method " + "and refers to a bind marker " + "and thus must be annotated with @%s", @@ -235,7 +231,6 @@ protected void warnIfCqlNamePresent(List parameters) .getMessager() .warn( methodElement, - processedType, "Parameter %s does not refer to a bind marker, " + "@%s annotation will be ignored", parameter.getSimpleName(), CqlName.class.getSimpleName()); @@ -310,7 +305,6 @@ protected TypeElement getEntityClassFromAnnotation(Class annotation) { .getMessager() .warn( methodElement, - processedType, "Too many entity classes: %s must have at most one 'entityClass' argument " + "(will use the first one: %s)", annotation.getSimpleName(), diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoQueryProviderMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoQueryProviderMethodGenerator.java index 2cf5ecd05be..3fdb99cad09 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoQueryProviderMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoQueryProviderMethodGenerator.java @@ -142,7 +142,6 @@ private List getEntityHelperTypes() { .getMessager() .error( methodElement, - processedType, "Invalid annotation configuration: the elements in %s.entityHelpers " + "must be %s-annotated classes (offending element: %s)", QueryProvider.class.getSimpleName(), diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGenerator.java index 6e27da5d6ba..d29f8e68099 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGenerator.java @@ -256,7 +256,6 @@ private void addOrdering(String orderingSpec, MethodSpec.Builder methodBuilder) .getMessager() .error( methodElement, - processedType, "Can't parse ordering '%s', expected a column name followed by ASC or DESC", orderingSpec); return; diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSetEntityMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSetEntityMethodGenerator.java index 0729612cef2..71b40976a90 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSetEntityMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSetEntityMethodGenerator.java @@ -60,7 +60,6 @@ public Optional generate() { .getMessager() .error( methodElement, - processedType, "Wrong number of parameters: %s methods must have two", SetEntity.class.getSimpleName()); return Optional.empty(); @@ -86,7 +85,6 @@ public Optional generate() { .getMessager() .error( methodElement, - processedType, "Wrong parameter types: %s methods must take a %s " + "and an annotated entity (in any order)", SetEntity.class.getSimpleName(), @@ -103,7 +101,6 @@ public Optional generate() { .getMessager() .warn( methodElement, - processedType, "BoundStatement is immutable, " + "this method will not modify '%s' in place. " + "It should probably return BoundStatement rather than void", @@ -114,7 +111,6 @@ public Optional generate() { .getMessager() .error( methodElement, - processedType, "Invalid return type: %s methods must either be void, or return the same " + "type as their settable parameter (in this case, %s to match '%s')", SetEntity.class.getSimpleName(), diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoUpdateMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoUpdateMethodGenerator.java index 7055c134533..288778ee9c8 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoUpdateMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoUpdateMethodGenerator.java @@ -107,7 +107,6 @@ public Optional generate() { .getMessager() .error( methodElement, - processedType, "%s methods must take the entity to update as the first parameter", Update.class.getSimpleName()); return Optional.empty(); @@ -240,7 +239,6 @@ private void maybeAddIfClause(MethodSpec.Builder methodBuilder, Update annotatio .getMessager() .error( methodElement, - processedType, "Invalid annotation parameters: %s cannot have both ifExists and customIfClause", Update.class.getSimpleName()); } diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/EntityUtils.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/EntityUtils.java index f38ad1525f4..55de618bd93 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/EntityUtils.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/EntityUtils.java @@ -131,7 +131,6 @@ public static boolean areParametersValid( .getMessager() .error( methodElement, - processedType, "Invalid parameter list: %s methods%s " + "must at least specify partition key components " + "(expected partition key of %s: %s)", @@ -147,7 +146,6 @@ public static boolean areParametersValid( .getMessager() .error( methodElement, - processedType, "Invalid parameter list: %s methods%s " + "must match the primary key components in the exact order " + "(expected primary key of %s: %s). Too many parameters provided", @@ -167,7 +165,6 @@ public static boolean areParametersValid( .getMessager() .error( methodElement, - processedType, "Invalid parameter list: %s methods%s " + "must match the primary key components in the exact order " + "(expected primary key of %s: %s). Mismatch at index %d: %s should be %s", diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/DefaultEntityFactory.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/DefaultEntityFactory.java index f9949a1a56f..e82832bf16f 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/DefaultEntityFactory.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/DefaultEntityFactory.java @@ -166,14 +166,8 @@ public EntityDefinition getDefinition(TypeElement processedClass) { VariableElement field = findField(typeHierarchy, propertyName, typeMirror); Map, Annotation> propertyAnnotations = - scanPropertyAnnotations(typeHierarchy, getMethod, field, processedClass); - if (isTransient( - propertyAnnotations, - propertyName, - transientProperties, - getMethod, - field, - processedClass)) { + scanPropertyAnnotations(typeHierarchy, getMethod, field); + if (isTransient(propertyAnnotations, propertyName, transientProperties, getMethod, field)) { continue; } @@ -181,7 +175,7 @@ public EntityDefinition getDefinition(TypeElement processedClass) { int clusteringColumnIndex = getClusteringColumnIndex(propertyAnnotations); Optional customCqlName = getCustomCqlName(propertyAnnotations); Optional computedFormula = - getComputedFormula(propertyAnnotations, getMethod, field, processedClass); + getComputedFormula(propertyAnnotations, getMethod, field); PropertyType propertyType = PropertyType.parse(typeMirror, context); PropertyDefinition property = @@ -202,7 +196,6 @@ public EntityDefinition getDefinition(TypeElement processedClass) { .getMessager() .error( getMethod, - processedClass, "Duplicate partition key index: if multiple properties are annotated " + "with @%s, the annotation must be parameterized with an integer " + "indicating the position. Found duplicate index %d for %s and %s.", @@ -219,7 +212,6 @@ public EntityDefinition getDefinition(TypeElement processedClass) { .getMessager() .error( getMethod, - processedClass, "Duplicate clustering column index: if multiple properties are annotated " + "with @%s, the annotation must be parameterized with an integer " + "indicating the position. Found duplicate index %d for %s and %s.", @@ -364,8 +356,7 @@ private int getClusteringColumnIndex(Map, Annotation private Optional getComputedFormula( Map, Annotation> annotations, ExecutableElement getMethod, - @Nullable VariableElement field, - TypeElement processedClass) { + @Nullable VariableElement field) { Computed annotation = (Computed) annotations.get(Computed.class); if (annotation != null) { @@ -374,9 +365,7 @@ private Optional getComputedFormula( if (value.isEmpty()) { Element element = field != null && field.getAnnotation(Computed.class) != null ? field : getMethod; - context - .getMessager() - .error(element, processedClass, "@Computed value should be non-empty."); + context.getMessager().error(element, "@Computed value should be non-empty."); } return Optional.of(value); } @@ -478,8 +467,7 @@ private boolean isTransient( String propertyName, Set transientProperties, ExecutableElement getMethod, - @Nullable VariableElement field, - TypeElement processedClass) { + @Nullable VariableElement field) { Transient transientAnnotation = (Transient) annotations.get(Transient.class); // check if property name is included in @TransientProperties @@ -499,7 +487,6 @@ private boolean isTransient( .getMessager() .error( element, - processedClass, "Property that is considered transient cannot be annotated with @%s.", exclusiveAnnotation.getSimpleName()); } @@ -522,16 +509,12 @@ private Optional getPropertyStrategy(Set typeHier } private void reportMultipleAnnotationError( - Element element, - Class a0, - Class a1, - TypeElement processedClass) { + Element element, Class a0, Class a1) { if (a0 == a1) { context .getMessager() .warn( element, - processedClass, "@%s should be used either on the field or the getter, but not both. " + "The annotation on this field will be ignored.", a0.getSimpleName()); @@ -540,7 +523,6 @@ private void reportMultipleAnnotationError( .getMessager() .error( element, - processedClass, "Properties can't be annotated with both @%s and @%s.", a0.getSimpleName(), a1.getSimpleName()); @@ -550,14 +532,13 @@ private void reportMultipleAnnotationError( private Map, Annotation> scanPropertyAnnotations( Set typeHierarchy, ExecutableElement getMethod, - @Nullable VariableElement field, - TypeElement processedClass) { + @Nullable VariableElement field) { Map, Annotation> annotations = Maps.newHashMap(); // scan methods first as they should take precedence. - scanMethodAnnotations(typeHierarchy, getMethod, annotations, processedClass); + scanMethodAnnotations(typeHierarchy, getMethod, annotations); if (field != null) { - scanFieldAnnotations(field, annotations, processedClass); + scanFieldAnnotations(field, annotations); } return ImmutableMap.copyOf(annotations); @@ -575,9 +556,7 @@ private Class getExclusiveAnnotation( } private void scanFieldAnnotations( - VariableElement field, - Map, Annotation> annotations, - TypeElement processedClass) { + VariableElement field, Map, Annotation> annotations) { Class exclusiveAnnotation = getExclusiveAnnotation(annotations); for (Class annotationClass : PROPERTY_ANNOTATIONS) { Annotation annotation = field.getAnnotation(annotationClass); @@ -586,8 +565,7 @@ private void scanFieldAnnotations( if (exclusiveAnnotation == null) { exclusiveAnnotation = annotationClass; } else { - reportMultipleAnnotationError( - field, exclusiveAnnotation, annotationClass, processedClass); + reportMultipleAnnotationError(field, exclusiveAnnotation, annotationClass); } } if (!annotations.containsKey(annotationClass)) { @@ -600,8 +578,7 @@ private void scanFieldAnnotations( private void scanMethodAnnotations( Set typeHierarchy, ExecutableElement getMethod, - Map, Annotation> annotations, - TypeElement processedClass) { + Map, Annotation> annotations) { Class exclusiveAnnotation = getExclusiveAnnotation(annotations); for (Class annotationClass : PROPERTY_ANNOTATIONS) { Optional> annotation = @@ -612,10 +589,7 @@ private void scanMethodAnnotations( exclusiveAnnotation = annotationClass; } else { reportMultipleAnnotationError( - annotation.get().getElement(), - exclusiveAnnotation, - annotationClass, - processedClass); + annotation.get().getElement(), exclusiveAnnotation, annotationClass); } } if (!annotations.containsKey(annotationClass)) { diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/mapper/MapperDaoFactoryMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/mapper/MapperDaoFactoryMethodGenerator.java index 23f37cc51da..d18be18e866 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/mapper/MapperDaoFactoryMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/mapper/MapperDaoFactoryMethodGenerator.java @@ -46,17 +46,14 @@ public class MapperDaoFactoryMethodGenerator implements MethodGenerator { private final ExecutableElement methodElement; - private final TypeElement processedType; private final MapperImplementationSharedCode enclosingClass; private final ProcessorContext context; public MapperDaoFactoryMethodGenerator( ExecutableElement methodElement, - TypeElement processedType, MapperImplementationSharedCode enclosingClass, ProcessorContext context) { this.methodElement = methodElement; - this.processedType = processedType; this.enclosingClass = enclosingClass; this.context = context; } @@ -94,7 +91,6 @@ public Optional generate() { .getMessager() .error( methodElement, - processedType, "Invalid return type: %s methods must return a %s-annotated interface, " + "or future thereof", DaoFactory.class.getSimpleName(), @@ -137,7 +133,6 @@ public Optional generate() { .getMessager() .error( methodElement, - processedType, "Invalid parameter annotations: " + "%s method parameters must be annotated with @%s, @%s or @%s", DaoFactory.class.getSimpleName(), @@ -211,7 +206,6 @@ private String validateKeyspaceOrTableParameter( .getMessager() .error( candidate, - processedType, "Invalid parameter type: @%s-annotated parameter of %s methods must be of type %s or %s", annotation.getSimpleName(), DaoFactory.class.getSimpleName(), @@ -234,7 +228,6 @@ private String validateExecutionProfile( .getMessager() .error( candidate, - processedType, "Invalid parameter type: @%s-annotated parameter of %s methods must be of type %s or %s ", DaoProfile.class.getSimpleName(), DaoFactory.class.getSimpleName(), @@ -252,7 +245,6 @@ private boolean isSingleAnnotation( .getMessager() .error( candidate, - processedType, "Invalid parameter annotations: " + "only one %s method parameter can be annotated with @%s", DaoFactory.class.getSimpleName(), diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/mapper/MapperImplementationGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/mapper/MapperImplementationGenerator.java index 0dc40f4b11f..bd2bf69428f 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/mapper/MapperImplementationGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/mapper/MapperImplementationGenerator.java @@ -108,7 +108,6 @@ protected JavaFile.Builder getContents() { .getMessager() .error( methodElement, - interfaceElement, "Unrecognized method signature: no implementation will be generated"); } else { maybeGenerator.flatMap(MethodGenerator::generate).ifPresent(classContents::addMethod); diff --git a/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/compiled/DaoCompiledMethodGeneratorTest.java b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/compiled/DaoCompiledMethodGeneratorTest.java index 96a5e88e449..dcf2e9e196c 100644 --- a/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/compiled/DaoCompiledMethodGeneratorTest.java +++ b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/compiled/DaoCompiledMethodGeneratorTest.java @@ -30,7 +30,7 @@ public class DaoCompiledMethodGeneratorTest extends DaoMethodGeneratorTest { @Test public void should_fail_with_expected_error() { should_fail_with_expected_error( - "[findByDescriptionCompiledWrong(java.lang.String) inherited from CompiledProductDao] " + "[CompiledProductDao.findByDescriptionCompiledWrong] " + "Parameter arg0 is declared in a compiled method " + "and refers to a bind marker " + "and thus must be annotated with @CqlName", From 880ff7b999a9e55fcd55a27171a779a210f7c7e5 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Thu, 6 Aug 2020 10:35:19 +0200 Subject: [PATCH 562/979] Use wildcards in exclusions --- manual/core/integration/README.md | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) diff --git a/manual/core/integration/README.md b/manual/core/integration/README.md index 001128fe55b..e693cb9c2b8 100644 --- a/manual/core/integration/README.md +++ b/manual/core/integration/README.md @@ -462,11 +462,7 @@ If you don't use either of those features, you can safely exclude the dependency com.fasterxml.jackson.core - jackson-core - - - com.fasterxml.jackson.core - jackson-databind + * @@ -507,11 +503,7 @@ If you don't use DSE graph at all, you can exclude the dependencies: org.apache.tinkerpop - gremlin-core - - - org.apache.tinkerpop - tinkergraph-gremlin + * From 959355429e458363f2ea8bd15baea460c9fa332b Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Thu, 6 Aug 2020 10:35:46 +0200 Subject: [PATCH 563/979] Mention other metrics frameworks --- manual/core/integration/README.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/manual/core/integration/README.md b/manual/core/integration/README.md index e693cb9c2b8..e2f43b5926d 100644 --- a/manual/core/integration/README.md +++ b/manual/core/integration/README.md @@ -408,7 +408,8 @@ The driver exposes [metrics](../metrics/) through the [Dropwizard](http://metrics.dropwizard.io/4.0.0/manual/index.html) library. The dependency is declared as required, but metrics are optional. If you've disabled all metrics, -and never call [Session.getMetrics] anywhere in your application, you can remove the dependency: +or if you are using a [different metrics framework](../../developer/common/metrics), and you never +call [Session.getMetrics] anywhere in your application, then you can remove the dependency: ```xml From c629826b0a32095fe2d9062d46c2a945ab192304 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Thu, 6 Aug 2020 10:36:39 +0200 Subject: [PATCH 564/979] Mention json codecs --- manual/core/integration/README.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/manual/core/integration/README.md b/manual/core/integration/README.md index e2f43b5926d..8bdd5b769c9 100644 --- a/manual/core/integration/README.md +++ b/manual/core/integration/README.md @@ -451,7 +451,8 @@ If all of these metrics are disabled, you can remove the dependency: [Jackson](https://github.com/FasterXML/jackson) is used: * when connecting to [Datastax Astra](../../cloud/); -* when Insights monitoring is enabled. +* when Insights monitoring is enabled; +* when [Json codecs](../custom_codecs) are being used. If you don't use either of those features, you can safely exclude the dependency: From a0e913ccda152d3a9e24d81fa43685e3cf195012 Mon Sep 17 00:00:00 2001 From: Erik Merkle Date: Thu, 20 Aug 2020 23:16:52 -0500 Subject: [PATCH 565/979] JAVA-2855: Allow selection of the metrics framework via the config --- changelog/README.md | 1 + .../api/core/config/DefaultDriverOption.java | 7 ++ .../driver/api/core/config/OptionsMap.java | 1 + .../api/core/config/TypedDriverOption.java | 4 + .../core/session/ProgrammaticArguments.java | 20 ++- .../oss/driver/api/core/session/Session.java | 11 +- .../api/core/session/SessionBuilder.java | 23 ++++ .../core/context/DefaultDriverContext.java | 24 +++- .../core/context/InternalDriverContext.java | 16 +++ .../metrics/DropwizardMetricsFactory.java | 32 ++++- core/src/main/resources/reference.conf | 22 ++++ .../context/MockedDriverContextFactory.java | 4 + .../metrics/DropwizardMetricsFactoryTest.java | 42 +++++++ .../common/AbstractMetricsTestBase.java | 17 ++- .../micrometer/MicrometerMetricsIT.java | 39 ++---- .../microprofile/MicroProfileMetricsIT.java | 39 ++---- manual/core/integration/README.md | 7 +- manual/core/metrics/README.md | 85 ++++++++++++- manual/developer/common/.nav | 1 - manual/developer/common/README.md | 1 - manual/developer/common/metrics/README.md | 117 ------------------ .../micrometer/MicrometerDriverContext.java | 45 ------- .../micrometer/MicrometerMetricsFactory.java | 34 +++-- .../MicrometerMetricsFactoryTest.java | 52 ++++++++ .../MicroProfileDriverContext.java | 47 ------- .../MicroProfileMetricsFactory.java | 36 ++++-- .../MicroProfileMetricsFactoryTest.java | 58 +++++++++ 27 files changed, 471 insertions(+), 314 deletions(-) delete mode 100644 manual/developer/common/metrics/README.md delete mode 100644 metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerDriverContext.java delete mode 100644 metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileDriverContext.java diff --git a/changelog/README.md b/changelog/README.md index 990fe276bf4..a74825f7b00 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.9.0 (in progress) +- [improvement] JAVA-2855: Allow selection of the metrics framework via the config - [improvement] JAVA-2864: Revisit mapper processor's messaging - [new feature] JAVA-2816: Support immutability and fluent accessors in the mapper - [new feature] JAVA-2721: Add counter support in the mapper diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java index 929886aad8a..55e50ed9069 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java @@ -816,6 +816,13 @@ public enum DefaultDriverOption implements DriverOption { *

          Value-type: {@link java.time.Duration Duration} */ METRICS_NODE_EXPIRE_AFTER("advanced.metrics.node.expire-after"), + + /** + * The classname of the desired MetricsFactory implementation. + * + *

          Value-type: {@link String} + */ + METRICS_FACTORY_CLASS("advanced.metrics.factory.class"), ; private final String path; diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java index 941712541fc..6de7fc76355 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java @@ -321,6 +321,7 @@ protected static void fillWithDriverDefaults(OptionsMap map) { map.put( TypedDriverOption.CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_INTERVAL, Duration.ofMinutes(5)); + map.put(TypedDriverOption.METRICS_FACTORY_CLASS, "DropwizardMetricsFactory"); map.put(TypedDriverOption.METRICS_SESSION_GRAPH_REQUESTS_HIGHEST, Duration.ofSeconds(12)); map.put(TypedDriverOption.METRICS_SESSION_GRAPH_REQUESTS_DIGITS, 3); map.put(TypedDriverOption.METRICS_SESSION_GRAPH_REQUESTS_INTERVAL, Duration.ofMinutes(5)); diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java index 7ef5e022c1c..c7c80fb9bf4 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java @@ -705,6 +705,10 @@ public String toString() { public static final TypedDriverOption METRICS_NODE_EXPIRE_AFTER = new TypedDriverOption<>(DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER, GenericType.DURATION); + /** The classname of the desired MetricsFactory implementation. */ + public static final TypedDriverOption METRICS_FACTORY_CLASS = + new TypedDriverOption<>(DefaultDriverOption.METRICS_FACTORY_CLASS, GenericType.STRING); + private static Iterable> introspectBuiltInValues() { try { ImmutableList.Builder> result = ImmutableList.builder(); diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/session/ProgrammaticArguments.java b/core/src/main/java/com/datastax/oss/driver/api/core/session/ProgrammaticArguments.java index a28c76d5d0c..6d693f69b72 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/session/ProgrammaticArguments.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/session/ProgrammaticArguments.java @@ -60,6 +60,7 @@ public static Builder builder() { private final String startupApplicationName; private final String startupApplicationVersion; private final MutableCodecRegistry codecRegistry; + private final Object metricRegistry; private ProgrammaticArguments( @NonNull List> typeCodecs, @@ -75,7 +76,8 @@ private ProgrammaticArguments( @Nullable UUID startupClientId, @Nullable String startupApplicationName, @Nullable String startupApplicationVersion, - @Nullable MutableCodecRegistry codecRegistry) { + @Nullable MutableCodecRegistry codecRegistry, + @Nullable Object metricRegistry) { this.typeCodecs = typeCodecs; this.nodeStateListener = nodeStateListener; @@ -91,6 +93,7 @@ private ProgrammaticArguments( this.startupApplicationName = startupApplicationName; this.startupApplicationVersion = startupApplicationVersion; this.codecRegistry = codecRegistry; + this.metricRegistry = metricRegistry; } @NonNull @@ -163,6 +166,11 @@ public MutableCodecRegistry getCodecRegistry() { return codecRegistry; } + @Nullable + public Object getMetricRegistry() { + return metricRegistry; + } + public static class Builder { private ImmutableList.Builder> typeCodecsBuilder = ImmutableList.builder(); @@ -180,6 +188,7 @@ public static class Builder { private String startupApplicationName; private String startupApplicationVersion; private MutableCodecRegistry codecRegistry; + private Object metricRegistry; @NonNull public Builder addTypeCodecs(@NonNull TypeCodec... typeCodecs) { @@ -289,6 +298,12 @@ public Builder withCodecRegistry(@Nullable MutableCodecRegistry codecRegistry) { return this; } + @NonNull + public Builder withMetricRegistry(@Nullable Object metricRegistry) { + this.metricRegistry = metricRegistry; + return this; + } + @NonNull public ProgrammaticArguments build() { return new ProgrammaticArguments( @@ -305,7 +320,8 @@ public ProgrammaticArguments build() { startupClientId, startupApplicationName, startupApplicationVersion, - codecRegistry); + codecRegistry, + metricRegistry); } } } diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/session/Session.java b/core/src/main/java/com/datastax/oss/driver/api/core/session/Session.java index b57545cbc3d..a4157c48b4a 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/session/Session.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/session/Session.java @@ -197,8 +197,15 @@ default boolean checkSchemaAgreement() { Optional getKeyspace(); /** - * Returns a gateway to the driver's metrics, or {@link Optional#empty()} if all metrics are - * disabled. + * Returns a gateway to the driver's DropWizard metrics, or {@link Optional#empty()} if all + * metrics are disabled, or if the driver has been configured to use MicroProfile or Micrometer + * instead of DropWizard (see {@code advanced.metrics.factory.class} in the configuration). + * + *

          {@link Metrics} was originally intended to allow programmatic access to the metrics, but it + * has a hard dependency to the DropWizard API, which makes it unsuitable for alternative metric + * frameworks. A workaround is to inject your own metric registry with {@link + * SessionBuilder#withMetricRegistry(Object)} when you build the session. You can then use the + * framework's proprietary APIs to retrieve the metrics from the registry. */ @NonNull Optional getMetrics(); diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java b/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java index 10fd7bca988..f4bab46022e 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java @@ -650,6 +650,29 @@ public SelfT withApplicationVersion(@Nullable String applicationVersion) { return self; } + /** + * The metric registry object for storing driver metrics. + * + *

          The argument should be an instance of the base registry type for the metrics framework you + * are using (see {@code advanced.metrics.factory.class} in the configuration): + * + *

            + *
          • Dropwizard (the default): {@code com.codahale.metrics.MetricRegistry} + *
          • Micrometer: {@code io.micrometer.core.instrument.MeterRegistry} + *
          • MicroProfile: {@code org.eclipse.microprofile.metrics.MetricRegistry} + *
          + * + * Only MicroProfile requires an external instance of its registry to be provided. For + * Micrometer, if no Registry object is provided, Micrometer's {@code globalRegistry} will be + * used. For Dropwizard, if no Registry object is provided, an instance of {@code MetricRegistry} + * will be created and used. + */ + @NonNull + public SelfT withMetricRegistry(@Nullable Object metricRegistry) { + this.programmaticArgumentsBuilder.withMetricRegistry(metricRegistry); + return self; + } + /** * Creates the session with the options set by this builder. * diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java index 04d0ab21dd7..9ae82192e61 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java @@ -77,7 +77,6 @@ import com.datastax.oss.driver.internal.core.metadata.token.DefaultTokenFactoryRegistry; import com.datastax.oss.driver.internal.core.metadata.token.ReplicationStrategyFactory; import com.datastax.oss.driver.internal.core.metadata.token.TokenFactoryRegistry; -import com.datastax.oss.driver.internal.core.metrics.DropwizardMetricsFactory; import com.datastax.oss.driver.internal.core.metrics.MetricsFactory; import com.datastax.oss.driver.internal.core.pool.ChannelPoolFactory; import com.datastax.oss.driver.internal.core.protocol.ByteBufPrimitiveCodec; @@ -97,7 +96,6 @@ import com.datastax.oss.driver.internal.core.util.Reflection; import com.datastax.oss.driver.internal.core.util.concurrent.CycleDetector; import com.datastax.oss.driver.internal.core.util.concurrent.LazyReference; -import com.datastax.oss.driver.shaded.guava.common.base.Ticker; import com.datastax.oss.protocol.internal.Compressor; import com.datastax.oss.protocol.internal.FrameCodec; import com.datastax.oss.protocol.internal.PrimitiveCodec; @@ -243,6 +241,7 @@ public class DefaultDriverContext implements InternalDriverContext { private final UUID startupClientId; private final String startupApplicationName; private final String startupApplicationVersion; + private final Object metricRegistry; // A stack trace captured in the constructor. Used to extract information about the client // application. private final StackTraceElement[] initStackTrace; @@ -300,6 +299,7 @@ public DefaultDriverContext( stackTrace = new StackTraceElement[] {}; } this.initStackTrace = stackTrace; + this.metricRegistry = programmaticArguments.getMetricRegistry(); } /** @@ -615,7 +615,19 @@ protected PoolManager buildPoolManager() { } protected MetricsFactory buildMetricsFactory() { - return new DropwizardMetricsFactory(this, Ticker.systemTicker()); + return Reflection.buildFromConfig( + this, + DefaultDriverOption.METRICS_FACTORY_CLASS, + MetricsFactory.class, + "com.datastax.oss.driver.internal.core.metrics", + "com.datastax.oss.driver.internal.metrics.microprofile", + "com.datastax.oss.driver.internal.metrics.micrometer") + .orElseThrow( + () -> + new IllegalArgumentException( + String.format( + "Missing metrics factory, check your config (%s)", + DefaultDriverOption.METRICS_FACTORY_CLASS))); } protected RequestThrottler buildRequestThrottler() { @@ -1003,4 +1015,10 @@ public RequestLogFormatter getRequestLogFormatter() { public List getLifecycleListeners() { return lifecycleListenersRef.get(); } + + @Nullable + @Override + public Object getMetricRegistry() { + return metricRegistry; + } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/context/InternalDriverContext.java b/core/src/main/java/com/datastax/oss/driver/internal/core/context/InternalDriverContext.java index 3d68ec9a5f3..3b17b98deef 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/context/InternalDriverContext.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/context/InternalDriverContext.java @@ -180,4 +180,20 @@ default List getLifecycleListeners() { */ @NonNull RequestLogFormatter getRequestLogFormatter(); + + /** + * A metric registry for storing metrics. + * + *

          This will return the object from {@link + * SessionBuilder#withMetricRegistry(java.lang.Object)}. Access to this registry object is only + * intended for {@link MetricsFactory} implementations that need to expose a way to specify the + * registry external to the Factory implementation itself. + * + *

          The default metrics framework used by the Driver is DropWizard and does not need an external + * metrics registry object. + */ + @Nullable + default Object getMetricRegistry() { + return null; + } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricsFactory.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricsFactory.java index 6598596b423..5b81166668d 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricsFactory.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricsFactory.java @@ -18,6 +18,7 @@ import com.codahale.metrics.MetricRegistry; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.context.DriverContext; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.metrics.Metrics; import com.datastax.oss.driver.api.core.metrics.NodeMetric; @@ -50,6 +51,10 @@ public class DropwizardMetricsFactory implements MetricsFactory { private final SessionMetricUpdater sessionUpdater; private final Cache metricsCache; + public DropwizardMetricsFactory(DriverContext context) { + this((InternalDriverContext) context, Ticker.systemTicker()); + } + public DropwizardMetricsFactory(InternalDriverContext context, Ticker ticker) { this.context = context; String logPrefix = context.getSessionName(); @@ -81,11 +86,28 @@ public DropwizardMetricsFactory(InternalDriverContext context, Ticker ticker) { this.sessionUpdater = NoopSessionMetricUpdater.INSTANCE; this.metrics = null; } else { - this.registry = new MetricRegistry(); - DropwizardSessionMetricUpdater dropwizardSessionUpdater = - new DropwizardSessionMetricUpdater(enabledSessionMetrics, registry, context); - this.sessionUpdater = dropwizardSessionUpdater; - this.metrics = new DefaultMetrics(registry, dropwizardSessionUpdater); + // try to get the metric registry from the context + Object possibleMetricRegistry = context.getMetricRegistry(); + if (possibleMetricRegistry == null) { + // metrics are enabled, but a metric registry was not supplied to the context + // create a registry object + possibleMetricRegistry = new MetricRegistry(); + } + if (possibleMetricRegistry instanceof MetricRegistry) { + this.registry = (MetricRegistry) possibleMetricRegistry; + DropwizardSessionMetricUpdater dropwizardSessionUpdater = + new DropwizardSessionMetricUpdater(enabledSessionMetrics, registry, context); + this.sessionUpdater = dropwizardSessionUpdater; + this.metrics = new DefaultMetrics(registry, dropwizardSessionUpdater); + } else { + // Metrics are enabled, but the registry object is not an expected type + throw new IllegalArgumentException( + "Unexpected Metrics registry object. Expected registry object to be of type '" + + MetricRegistry.class.getName() + + "', but was '" + + possibleMetricRegistry.getClass().getName() + + "'"); + } } } diff --git a/core/src/main/resources/reference.conf b/core/src/main/resources/reference.conf index 674cad7cba0..a025e816d30 100644 --- a/core/src/main/resources/reference.conf +++ b/core/src/main/resources/reference.conf @@ -1228,6 +1228,28 @@ datastax-java-driver { } advanced.metrics { + # Metrics Factory configuration. + factory { + # The class for the metrics factory. + # + # The driver provides Dropwizard, Micrometer and MicroProfile metrics out of the box. + # To use Dropwizard, this value should be set to "DropwizardMetricsFactory". To use + # Micrometer, set the value to "MicrometerMetricsFactory". To use MicroProfile, set the value + # to "MicroProfileMetricsFactory". + # + # For Micrometer and MicroProfile, you will also need to add an additional dependency: + # com.datastax.oss:java-driver-metrics-micrometer and + # com.datastax.oss:java-driver-metrics-microprofile respectively. + # + # If you would like to use a different metrics framework, change the factory class to the + # fully-qualified name of a class that implements + # com.datastax.oss.driver.internal.core.metrics.MetricsFactory. + # + # Required: yes + # Modifiable at runtime: no + # Overridable in a profile: no + class = DropwizardMetricsFactory + } # The session-level metrics (all disabled by default). # # Required: yes diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/context/MockedDriverContextFactory.java b/core/src/test/java/com/datastax/oss/driver/internal/core/context/MockedDriverContextFactory.java index f36e8b33804..15d6d296fc4 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/context/MockedDriverContextFactory.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/context/MockedDriverContextFactory.java @@ -48,6 +48,10 @@ public static DefaultDriverContext defaultDriverContext( .thenReturn("none"); when(blankProfile.getDuration(DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER)) .thenReturn(Duration.ofMinutes(5)); + when(blankProfile.isDefined(DefaultDriverOption.METRICS_FACTORY_CLASS)) + .thenReturn(true); + when(blankProfile.getString(DefaultDriverOption.METRICS_FACTORY_CLASS)) + .thenReturn("DropwizardMetricsFactory"); return blankProfile; }); diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricsFactoryTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricsFactoryTest.java index 7599b4e15cc..44d0131283f 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricsFactoryTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricsFactoryTest.java @@ -17,19 +17,26 @@ import static com.datastax.oss.driver.internal.core.metrics.DropwizardMetricsFactory.LOWEST_ACCEPTABLE_EXPIRE_AFTER; import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.fail; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.timeout; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import ch.qos.logback.classic.Level; +import com.codahale.metrics.MetricRegistry; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverConfig; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.internal.core.util.LoggerTest; import com.tngtech.java.junit.dataprovider.DataProvider; import com.tngtech.java.junit.dataprovider.DataProviderRunner; import com.tngtech.java.junit.dataprovider.UseDataProvider; import java.time.Duration; +import java.util.Arrays; +import java.util.List; import org.junit.Test; import org.junit.runner.RunWith; @@ -88,4 +95,39 @@ public static Object[][] acceptableEvictionTimes() { {LOWEST_ACCEPTABLE_EXPIRE_AFTER}, {LOWEST_ACCEPTABLE_EXPIRE_AFTER.plusMinutes(1)} }; } + + @Test + public void should_throw_if_registry_of_wrong_type() { + // given + InternalDriverContext context = mock(InternalDriverContext.class); + DriverExecutionProfile profile = mock(DriverExecutionProfile.class); + DriverConfig config = mock(DriverConfig.class); + Duration expireAfter = LOWEST_ACCEPTABLE_EXPIRE_AFTER.minusMinutes(1); + List enabledMetrics = Arrays.asList(DefaultSessionMetric.CQL_REQUESTS.getPath()); + // when + when(config.getDefaultProfile()).thenReturn(profile); + when(context.getConfig()).thenReturn(config); + when(context.getSessionName()).thenReturn("MockSession"); + // registry object is not a registry type + when(context.getMetricRegistry()).thenReturn(Integer.MAX_VALUE); + when(profile.getDuration(DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER)) + .thenReturn(expireAfter); + when(profile.getStringList(DefaultDriverOption.METRICS_SESSION_ENABLED)) + .thenReturn(enabledMetrics); + // then + try { + new DropwizardMetricsFactory(context); + fail( + "MetricsFactory should require correct registy object type: " + + MetricRegistry.class.getName()); + } catch (IllegalArgumentException iae) { + assertThat(iae.getMessage()) + .isEqualTo( + "Unexpected Metrics registry object. Expected registry object to be of type '" + + MetricRegistry.class.getName() + + "', but was '" + + Integer.class.getName() + + "'"); + } + } } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/metrics/common/AbstractMetricsTestBase.java b/integration-tests/src/test/java/com/datastax/oss/driver/metrics/common/AbstractMetricsTestBase.java index c17ff8ad80a..1748e91028d 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/metrics/common/AbstractMetricsTestBase.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/metrics/common/AbstractMetricsTestBase.java @@ -18,13 +18,13 @@ import static org.assertj.core.api.Assertions.assertThat; import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.CqlSessionBuilder; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; import com.datastax.oss.driver.api.core.metrics.DefaultNodeMetric; import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; import com.datastax.oss.driver.api.core.metrics.NodeMetric; import com.datastax.oss.driver.api.core.metrics.SessionMetric; -import com.datastax.oss.driver.api.core.session.SessionBuilder; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import java.util.Collection; @@ -47,7 +47,11 @@ public abstract class AbstractMetricsTestBase { .map(DefaultNodeMetric::getPath) .collect(Collectors.toList()); - protected abstract SessionBuilder getSessionBuilder(); + protected Object getMetricRegistry() { + return null; + } + + protected abstract String getMetricFactoryClass(); protected abstract void assertMetrics(CqlSession session); @@ -59,10 +63,13 @@ public void should_expose_metrics() { SessionUtils.configLoaderBuilder() .withStringList(DefaultDriverOption.METRICS_SESSION_ENABLED, ENABLED_SESSION_METRICS) .withStringList(DefaultDriverOption.METRICS_NODE_ENABLED, ENABLED_NODE_METRICS) + .withString(DefaultDriverOption.METRICS_FACTORY_CLASS, getMetricFactoryClass()) .build(); - SessionBuilder builder = - getSessionBuilder().addContactEndPoints(CCM_RULE.getContactPoints()); - try (CqlSession session = (CqlSession) builder.withConfigLoader(loader).build()) { + CqlSessionBuilder builder = + CqlSession.builder().addContactEndPoints(CCM_RULE.getContactPoints()); + try (CqlSession session = + (CqlSession) + builder.withConfigLoader(loader).withMetricRegistry(getMetricRegistry()).build()) { for (int i = 0; i < 10; i++) { session.execute("SELECT release_version FROM system.local"); } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/metrics/micrometer/MicrometerMetricsIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/metrics/micrometer/MicrometerMetricsIT.java index a1e63d4b1f2..c9717475cc2 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/metrics/micrometer/MicrometerMetricsIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/metrics/micrometer/MicrometerMetricsIT.java @@ -19,16 +19,10 @@ import static org.awaitility.Awaitility.await; import com.datastax.oss.driver.api.core.CqlSession; -import com.datastax.oss.driver.api.core.config.DriverConfigLoader; -import com.datastax.oss.driver.api.core.context.DriverContext; import com.datastax.oss.driver.api.core.metrics.DefaultNodeMetric; import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; -import com.datastax.oss.driver.api.core.session.ProgrammaticArguments; -import com.datastax.oss.driver.api.core.session.SessionBuilder; import com.datastax.oss.driver.categories.ParallelizableTests; -import com.datastax.oss.driver.internal.metrics.micrometer.MicrometerDriverContext; import com.datastax.oss.driver.metrics.common.AbstractMetricsTestBase; -import edu.umd.cs.findbugs.annotations.NonNull; import io.micrometer.core.instrument.Counter; import io.micrometer.core.instrument.Gauge; import io.micrometer.core.instrument.Meter; @@ -113,8 +107,13 @@ protected void assertMetrics(CqlSession session) { } @Override - protected SessionBuilder getSessionBuilder() { - return new MicrometerSessionBuilder(METER_REGISTRY); + protected Object getMetricRegistry() { + return METER_REGISTRY; + } + + @Override + protected String getMetricFactoryClass() { + return "MicrometerMetricsFactory"; } @Override @@ -166,28 +165,4 @@ public boolean matches(Meter obj) { } }; } - - private static class MicrometerSessionBuilder - extends SessionBuilder { - - private final MeterRegistry registry; - - MicrometerSessionBuilder(@NonNull MeterRegistry registry) { - this.registry = registry; - } - - @Override - @NonNull - protected CqlSession wrap(@NonNull CqlSession defaultSession) { - return defaultSession; - } - - @Override - @NonNull - protected DriverContext buildContext( - @NonNull DriverConfigLoader configLoader, - @NonNull ProgrammaticArguments programmaticArguments) { - return new MicrometerDriverContext(configLoader, programmaticArguments, registry); - } - } } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/metrics/microprofile/MicroProfileMetricsIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/metrics/microprofile/MicroProfileMetricsIT.java index 97dd5d2ba47..446bf9c309b 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/metrics/microprofile/MicroProfileMetricsIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/metrics/microprofile/MicroProfileMetricsIT.java @@ -19,16 +19,10 @@ import static org.awaitility.Awaitility.await; import com.datastax.oss.driver.api.core.CqlSession; -import com.datastax.oss.driver.api.core.config.DriverConfigLoader; -import com.datastax.oss.driver.api.core.context.DriverContext; import com.datastax.oss.driver.api.core.metrics.DefaultNodeMetric; import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; -import com.datastax.oss.driver.api.core.session.ProgrammaticArguments; -import com.datastax.oss.driver.api.core.session.SessionBuilder; import com.datastax.oss.driver.categories.ParallelizableTests; -import com.datastax.oss.driver.internal.metrics.microprofile.MicroProfileDriverContext; import com.datastax.oss.driver.metrics.common.AbstractMetricsTestBase; -import edu.umd.cs.findbugs.annotations.NonNull; import io.smallrye.metrics.MetricsRegistryImpl; import java.util.Collection; import java.util.Map.Entry; @@ -107,8 +101,13 @@ protected void assertMetrics(CqlSession session) { } @Override - protected SessionBuilder getSessionBuilder() { - return new MicroProfileSessionBuilder(METRIC_REGISTRY); + protected Object getMetricRegistry() { + return METRIC_REGISTRY; + } + + @Override + protected String getMetricFactoryClass() { + return "MicroProfileMetricsFactory"; } @Override @@ -184,28 +183,4 @@ public boolean matches(Entry metric) { } }; } - - private static class MicroProfileSessionBuilder - extends SessionBuilder { - - private final MetricRegistry registry; - - MicroProfileSessionBuilder(@NonNull MetricRegistry registry) { - this.registry = registry; - } - - @NonNull - @Override - protected CqlSession wrap(@NonNull CqlSession defaultSession) { - return defaultSession; - } - - @NonNull - @Override - protected DriverContext buildContext( - @NonNull DriverConfigLoader configLoader, - @NonNull ProgrammaticArguments programmaticArguments) { - return new MicroProfileDriverContext(configLoader, programmaticArguments, registry); - } - } } diff --git a/manual/core/integration/README.md b/manual/core/integration/README.md index 8bdd5b769c9..2eb71eeeb24 100644 --- a/manual/core/integration/README.md +++ b/manual/core/integration/README.md @@ -407,9 +407,10 @@ enable compression. See the [Compression](../compression/) page for more details The driver exposes [metrics](../metrics/) through the [Dropwizard](http://metrics.dropwizard.io/4.0.0/manual/index.html) library. -The dependency is declared as required, but metrics are optional. If you've disabled all metrics, -or if you are using a [different metrics framework](../../developer/common/metrics), and you never -call [Session.getMetrics] anywhere in your application, then you can remove the dependency: +The dependency is declared as required, but metrics are optional. If you've disabled all metrics, or +if you are using a [different metrics framework](../metrics/#changing-the-metrics-frameworks), and +you never call [Session.getMetrics] anywhere in your application, then you can remove the +dependency: ```xml diff --git a/manual/core/metrics/README.md b/manual/core/metrics/README.md index 73ebbb73263..4a15a95f2eb 100644 --- a/manual/core/metrics/README.md +++ b/manual/core/metrics/README.md @@ -8,9 +8,10 @@ ----- -The driver exposes measurements of its internal behavior through the popular [Dropwizard Metrics] -library. Application developers can select which metrics are enabled, and export them to a -monitoring tool. +The driver exposes measurements of its internal behavior through a choice of three popular metrics +frameworks: [Dropwizard Metrics], [Micrometer Metrics] or [MicroProfile Metrics]. Application +developers can select a metrics framework, which metrics are enabled, and export them to a +monitoring tool. ### Structure @@ -52,10 +53,82 @@ If you specify a metric that doesn't exist, it will be ignored and a warning wil The `metrics` section may also contain additional configuration for some specific metrics; again, see the [reference configuration] for more details. +#### Changing the Metrics Frameworks + +The default metrics framework is Dropwizard. You can change this to either Micrometer or +MicroProfile in the configuration: + +``` +datastax-java-driver.advanced.metrics { + factory.class = MicrometerMetricsFactory +} +``` + +or + +``` +datastax-java-driver.advanced.metrics { + factory.class = MicroProfileMetricsFactory +} +``` + +In addition to the configuration change above, you will also need to include the appropriate module +in your project. For Micrometer: + +```xml + + com.datastax.oss + java-driver-metrics-micrometer + ${driver.version} + +``` + +For MicroProfile: + +```xml + + com.datastax.oss + java-driver-metrics-microprofile + ${driver.version} + +``` + +#### Metric Registry + +For any of the three metrics frameworks, you can provide an external Metric Registry object when +building a Session. This will easily allow your application to export the driver's operational +metrics to whatever reporting system you want to use. + +```java +CqlSessionBuilder builder = CqlSession.builder(); +builder.withMetricRegistry(myRegistryObject); +CqlSession session = builder.build(); +``` + +In the above example, `myRegistryObject` should be an instance of the base registry type for the +metrics framework you are using: + +``` +Dropwizard: com.codahale.metrics.MetricRegistry +Micrometer: io.micrometer.core.instrument.MeterRegistry +MicroProfile: org.eclipse.microprofile.metrics.MetricRegistry +``` + +**NOTE:** Only MicroProfile **requires** an external instance of its Registry to be provided. For +Micrometer, if no Registry object is provided, Micrometer's `globalRegistry` will be used. For +Dropwizard, if no Registry object is provided, an instance of `MetricRegistry` will be created and +used. + ### Export -The Dropwizard `MetricRegistry` is exposed via `session.getMetrics()`. You can retrieve it and -configure a `Reporter` to send the metrics to a monitoring tool. +The Dropwizard `MetricRegistry` is exposed via `session.getMetrics().getRegistry()`. You can +retrieve it and configure a `Reporter` to send the metrics to a monitoring tool. + +**NOTE:** At this time, `session.getMetrics()` is not available when using Micrometer or +MicroProfile metrics. If you wish to use either of those metrics frameworks, it is recommended to +provide a Registry implementation to the driver as described in the [Metric Registry +section](#metric-registry), and follow best practices for exporting that registry to your desired +reporting framework. #### JMX @@ -136,4 +209,6 @@ CSV files, SLF4J logs and Graphite. Refer to their [manual][Dropwizard manual] f [Dropwizard Metrics]: http://metrics.dropwizard.io/4.0.0/manual/index.html [Dropwizard Manual]: http://metrics.dropwizard.io/4.0.0/getting-started.html#reporting-via-http +[Micrometer Metrics]: https://micrometer.io/docs +[MicroProfile Metrics]: https://github.com/eclipse/microprofile-metrics [reference configuration]: ../configuration/reference/ \ No newline at end of file diff --git a/manual/developer/common/.nav b/manual/developer/common/.nav index f52fddf980c..a841aca40ca 100644 --- a/manual/developer/common/.nav +++ b/manual/developer/common/.nav @@ -1,4 +1,3 @@ context concurrency event_bus -metrics \ No newline at end of file diff --git a/manual/developer/common/README.md b/manual/developer/common/README.md index fdc3b7144c1..c227f0826a5 100644 --- a/manual/developer/common/README.md +++ b/manual/developer/common/README.md @@ -7,4 +7,3 @@ This covers utilities or concept that are shared throughout the codebase: * we explain the two major approaches to deal with [concurrency](concurrency/) in the driver. * the [event bus](event_bus/) is used to decouple some of the internal components through asynchronous messaging. -* [metrics](metrics/) can be switched to a different implementation framework. \ No newline at end of file diff --git a/manual/developer/common/metrics/README.md b/manual/developer/common/metrics/README.md deleted file mode 100644 index 05c133ded54..00000000000 --- a/manual/developer/common/metrics/README.md +++ /dev/null @@ -1,117 +0,0 @@ -## Metrics - -[Driver Metrics](../../../core/metrics/) are reported via [Dropwizard Metrics] by default. - -With a bit of custom code, it is possible to switch to a different framework: we provide -alternative implementations for [Micrometer] and [Eclipse MicroProfile Metrics]. - -### Adding Metrics Framework Dependency - -Each implementation lives in a dedicated driver module. Add the following dependency to use -Micrometer: - -```xml - - com.datastax.oss - java-driver-metrics-micrometer - ${driver.version} - -``` - -or the following for MicroProfile: - -```xml - - com.datastax.oss - java-driver-metrics-microprofile - ${driver.version} - -``` - -### Enabling Metrics Framework On A Session - -Once the dependency has been added, you need to -[override the context component](../context/#overriding-a-context-component) `MetricsFactory`. If -this is the only customization you have, we provide context classes out of the box, so you just need -to write a custom session builder. - -For Micrometer: - -```java -import com.datastax.oss.driver.internal.metrics.micrometer.MicrometerDriverContext; -import io.micrometer.core.instrument.MeterRegistry; - -public class CustomSessionBuilder extends SessionBuilder { - - private final MeterRegistry registry; - - public CustomSessionBuilder(MeterRegistry registry){ - this.registry = registry; - } - - @Override - protected DriverContext buildContext( - DriverConfigLoader configLoader, ProgrammaticArguments programmaticArguments) { - return new MicrometerDriverContext(configLoader, programmaticArguments, registry); - } - - @Override - protected CqlSession wrap(@NonNull CqlSession defaultSession) { - // Nothing to do here, nothing changes on the session type - return defaultSession; - } -} -``` - -Or for MicroProfile: - -```java -import com.datastax.oss.driver.internal.metrics.microprofile.MicroProfileDriverContext; -import org.eclipse.microprofile.metrics.MetricRegistry; - -public class CustomSessionBuilder extends SessionBuilder { - - private final MetricRegistry registry; - - public CustomSessionBuilder(MetricRegistry registry){ - this.registry = registry; - } - - @Override - protected DriverContext buildContext( - DriverConfigLoader configLoader, ProgrammaticArguments programmaticArguments) { - return new MicroProfileDriverContext(configLoader, programmaticArguments, registry); - } - - @Override - protected CqlSession wrap(@NonNull CqlSession defaultSession) { - // Nothing to do here, nothing changes on the session type - return defaultSession; - } -} -``` - -Use the new builder class to create your driver session: - -```java -CqlSession session = new CustomSessionBuilder() - .addContactPoint(new InetSocketAddress("1.2.3.4", 9042)) - .withLocalDatacenter("datacenter1") - .build(); -``` - -Notes: - -* For both Micrometer and MicroProfile metrics, your application will need to provide a Registry - implementation to which driver metrics will be registered. Some environments may provide access to - available instances of the registry - ([Spring](https://micrometer.io/docs/ref/spring/1.5#_configuring), for example, provides many - implementations of Micrometer MeterRegistry instances) that can be used. -* `Session.getMetrics()` will only work with the built-in implementation. Our `Metrics` interface - references DropWizard types directly, we didn't want to make it generic because it would - over-complicate the driver API. If you use another framework and need programmatic access to the - metrics, you'll need to find your own way to expose the registry. - -[Dropwizard Metrics]: http://metrics.dropwizard.io/4.0.0/manual/index.html -[Micrometer]: https://micrometer.io/ -[Eclipse MicroProfile Metrics]: https://projects.eclipse.org/projects/technology.microprofile/releases/metrics-2.3 \ No newline at end of file diff --git a/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerDriverContext.java b/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerDriverContext.java deleted file mode 100644 index 41deba431eb..00000000000 --- a/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerDriverContext.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.datastax.oss.driver.internal.metrics.micrometer; - -import com.datastax.oss.driver.api.core.config.DriverConfigLoader; -import com.datastax.oss.driver.api.core.context.DriverContext; -import com.datastax.oss.driver.api.core.session.ProgrammaticArguments; -import com.datastax.oss.driver.internal.core.context.DefaultDriverContext; -import com.datastax.oss.driver.internal.core.metrics.MetricsFactory; -import com.datastax.oss.driver.shaded.guava.common.base.Ticker; -import edu.umd.cs.findbugs.annotations.NonNull; -import io.micrometer.core.instrument.MeterRegistry; - -/** Implementation of {@link DriverContext} that provides for a Micrometer {@link MeterRegistry}. */ -public class MicrometerDriverContext extends DefaultDriverContext { - - private final MeterRegistry registry; - - public MicrometerDriverContext( - @NonNull DriverConfigLoader configLoader, - @NonNull ProgrammaticArguments programmaticArguments, - @NonNull MeterRegistry registry) { - super(configLoader, programmaticArguments); - this.registry = registry; - } - - @Override - @NonNull - protected MetricsFactory buildMetricsFactory() { - return new MicrometerMetricsFactory(this, registry, Ticker.systemTicker()); - } -} diff --git a/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerMetricsFactory.java b/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerMetricsFactory.java index 9e997045139..90d8badb226 100644 --- a/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerMetricsFactory.java +++ b/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerMetricsFactory.java @@ -17,6 +17,7 @@ import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.context.DriverContext; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.metrics.Metrics; import com.datastax.oss.driver.api.core.metrics.NodeMetric; @@ -53,8 +54,11 @@ public class MicrometerMetricsFactory implements MetricsFactory { private final SessionMetricUpdater sessionUpdater; private final Cache metricsCache; - public MicrometerMetricsFactory( - InternalDriverContext context, MeterRegistry registry, Ticker ticker) { + public MicrometerMetricsFactory(DriverContext context) { + this((InternalDriverContext) context, Ticker.systemTicker()); + } + + public MicrometerMetricsFactory(InternalDriverContext context, Ticker ticker) { this.context = context; String logPrefix = context.getSessionName(); DriverExecutionProfile config = context.getConfig().getDefaultProfile(); @@ -87,9 +91,26 @@ public MicrometerMetricsFactory( this.registry = null; this.sessionUpdater = NoopSessionMetricUpdater.INSTANCE; } else { - this.registry = registry; - this.sessionUpdater = - new MicrometerSessionMetricUpdater(enabledSessionMetrics, this.registry, this.context); + // try to get the metric registry from the context + Object possibleMetricRegistry = context.getMetricRegistry(); + if (possibleMetricRegistry == null) { + // metrics are enabled, but a metric registry was not supplied to the context + // use the global registry + possibleMetricRegistry = io.micrometer.core.instrument.Metrics.globalRegistry; + } + if (possibleMetricRegistry instanceof MeterRegistry) { + this.registry = (MeterRegistry) possibleMetricRegistry; + this.sessionUpdater = + new MicrometerSessionMetricUpdater(enabledSessionMetrics, this.registry, this.context); + } else { + // Metrics are enabled, but the registry object is not an expected type + throw new IllegalArgumentException( + "Unexpected Metrics registry object. Expected registry object to be of type '" + + MeterRegistry.class.getName() + + "', but was '" + + possibleMetricRegistry.getClass().getName() + + "'"); + } } } @@ -111,8 +132,7 @@ static Duration getAndValidateEvictionTime(DriverExecutionProfile config, String @Override public Optional getMetrics() { - throw new UnsupportedOperationException( - "getMetrics() is not supported with Micrometer. The driver publishes its metrics directly to the global MeterRegistry."); + return Optional.empty(); } @Override diff --git a/metrics/micrometer/src/test/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerMetricsFactoryTest.java b/metrics/micrometer/src/test/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerMetricsFactoryTest.java index 697083ee1e5..abaf780f5dd 100644 --- a/metrics/micrometer/src/test/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerMetricsFactoryTest.java +++ b/metrics/micrometer/src/test/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerMetricsFactoryTest.java @@ -17,6 +17,7 @@ import static com.datastax.oss.driver.internal.metrics.micrometer.MicrometerMetricsFactory.LOWEST_ACCEPTABLE_EXPIRE_AFTER; import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.fail; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.timeout; import static org.mockito.Mockito.verify; @@ -24,12 +25,18 @@ import ch.qos.logback.classic.Level; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverConfig; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.internal.core.util.LoggerTest; import com.tngtech.java.junit.dataprovider.DataProvider; import com.tngtech.java.junit.dataprovider.DataProviderRunner; import com.tngtech.java.junit.dataprovider.UseDataProvider; +import io.micrometer.core.instrument.MeterRegistry; import java.time.Duration; +import java.util.Arrays; +import java.util.List; import org.junit.Test; import org.junit.runner.RunWith; @@ -88,4 +95,49 @@ public static Object[][] acceptableEvictionTimes() { {LOWEST_ACCEPTABLE_EXPIRE_AFTER}, {LOWEST_ACCEPTABLE_EXPIRE_AFTER.plusMinutes(1)} }; } + + @Test + @UseDataProvider(value = "invalidRegistryTypes") + public void should_throw_if_wrong_or_missing_registry_type( + Object registryObj, String expectedMsg) { + // given + InternalDriverContext context = mock(InternalDriverContext.class); + DriverExecutionProfile profile = mock(DriverExecutionProfile.class); + DriverConfig config = mock(DriverConfig.class); + Duration expireAfter = LOWEST_ACCEPTABLE_EXPIRE_AFTER.minusMinutes(1); + List enabledMetrics = Arrays.asList(DefaultSessionMetric.CQL_REQUESTS.getPath()); + // when + when(config.getDefaultProfile()).thenReturn(profile); + when(context.getConfig()).thenReturn(config); + when(context.getSessionName()).thenReturn("MockSession"); + // registry object is not a registry type + when(context.getMetricRegistry()).thenReturn(registryObj); + when(profile.getDuration(DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER)) + .thenReturn(expireAfter); + when(profile.getStringList(DefaultDriverOption.METRICS_SESSION_ENABLED)) + .thenReturn(enabledMetrics); + // then + try { + new MicrometerMetricsFactory(context); + fail( + "MetricsFactory should require correct registy object type: " + + MeterRegistry.class.getName()); + } catch (IllegalArgumentException iae) { + assertThat(iae.getMessage()).isEqualTo(expectedMsg); + } + } + + @DataProvider + public static Object[][] invalidRegistryTypes() { + return new Object[][] { + { + Integer.MAX_VALUE, + "Unexpected Metrics registry object. Expected registry object to be of type '" + + MeterRegistry.class.getName() + + "', but was '" + + Integer.class.getName() + + "'" + }, + }; + } } diff --git a/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileDriverContext.java b/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileDriverContext.java deleted file mode 100644 index 97afff035c2..00000000000 --- a/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileDriverContext.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.datastax.oss.driver.internal.metrics.microprofile; - -import com.datastax.oss.driver.api.core.config.DriverConfigLoader; -import com.datastax.oss.driver.api.core.context.DriverContext; -import com.datastax.oss.driver.api.core.session.ProgrammaticArguments; -import com.datastax.oss.driver.internal.core.context.DefaultDriverContext; -import com.datastax.oss.driver.internal.core.metrics.MetricsFactory; -import com.datastax.oss.driver.shaded.guava.common.base.Ticker; -import edu.umd.cs.findbugs.annotations.NonNull; -import org.eclipse.microprofile.metrics.MetricRegistry; - -/** - * Implementation of {@link DriverContext} that provides for a Micrometer {@link MetricRegistry}. - */ -public class MicroProfileDriverContext extends DefaultDriverContext { - - private final MetricRegistry registry; - - public MicroProfileDriverContext( - @NonNull DriverConfigLoader configLoader, - @NonNull ProgrammaticArguments programmaticArguments, - @NonNull MetricRegistry registry) { - super(configLoader, programmaticArguments); - this.registry = registry; - } - - @Override - @NonNull - protected MetricsFactory buildMetricsFactory() { - return new MicroProfileMetricsFactory(this, registry, Ticker.systemTicker()); - } -} diff --git a/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileMetricsFactory.java b/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileMetricsFactory.java index a0f4136e977..6cb9f5de47b 100644 --- a/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileMetricsFactory.java +++ b/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileMetricsFactory.java @@ -17,6 +17,7 @@ import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.context.DriverContext; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.metrics.Metrics; import com.datastax.oss.driver.api.core.metrics.NodeMetric; @@ -52,8 +53,11 @@ public class MicroProfileMetricsFactory implements MetricsFactory { private final SessionMetricUpdater sessionUpdater; private final Cache metricsCache; - public MicroProfileMetricsFactory( - InternalDriverContext context, MetricRegistry registry, Ticker ticker) { + public MicroProfileMetricsFactory(DriverContext context) { + this((InternalDriverContext) context, Ticker.systemTicker()); + } + + public MicroProfileMetricsFactory(InternalDriverContext context, Ticker ticker) { this.context = context; String logPrefix = context.getSessionName(); DriverExecutionProfile config = context.getConfig().getDefaultProfile(); @@ -86,9 +90,28 @@ public MicroProfileMetricsFactory( this.registry = null; this.sessionUpdater = NoopSessionMetricUpdater.INSTANCE; } else { - this.registry = registry; - this.sessionUpdater = - new MicroProfileSessionMetricUpdater(enabledSessionMetrics, this.registry, this.context); + Object possibleMetricRegistry = context.getMetricRegistry(); + if (possibleMetricRegistry == null) { + // metrics are enabled, but a metric registry was not supplied to the context + throw new IllegalArgumentException( + "No metric registry object found. Expected registry object to be of type '" + + MetricRegistry.class.getName() + + "'"); + } + if (possibleMetricRegistry instanceof MetricRegistry) { + this.registry = (MetricRegistry) possibleMetricRegistry; + this.sessionUpdater = + new MicroProfileSessionMetricUpdater( + enabledSessionMetrics, this.registry, this.context); + } else { + // Metrics are enabled, but the registry object is not an expected type + throw new IllegalArgumentException( + "Unexpected Metrics registry object. Expected registry object to be of type '" + + MetricRegistry.class.getName() + + "', but was '" + + possibleMetricRegistry.getClass().getName() + + "'"); + } } } @@ -110,8 +133,7 @@ static Duration getAndValidateEvictionTime(DriverExecutionProfile config, String @Override public Optional getMetrics() { - throw new UnsupportedOperationException( - "getMetrics() is not supported with MicroProfile. The driver publishes its metrics directly to the MetricRegistry."); + return Optional.empty(); } @Override diff --git a/metrics/microprofile/src/test/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileMetricsFactoryTest.java b/metrics/microprofile/src/test/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileMetricsFactoryTest.java index 5c450b32248..453bf281284 100644 --- a/metrics/microprofile/src/test/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileMetricsFactoryTest.java +++ b/metrics/microprofile/src/test/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileMetricsFactoryTest.java @@ -17,6 +17,7 @@ import static com.datastax.oss.driver.internal.metrics.microprofile.MicroProfileMetricsFactory.LOWEST_ACCEPTABLE_EXPIRE_AFTER; import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.fail; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.timeout; import static org.mockito.Mockito.verify; @@ -24,12 +25,18 @@ import ch.qos.logback.classic.Level; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverConfig; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.internal.core.util.LoggerTest; import com.tngtech.java.junit.dataprovider.DataProvider; import com.tngtech.java.junit.dataprovider.DataProviderRunner; import com.tngtech.java.junit.dataprovider.UseDataProvider; import java.time.Duration; +import java.util.Arrays; +import java.util.List; +import org.eclipse.microprofile.metrics.MetricRegistry; import org.junit.Test; import org.junit.runner.RunWith; @@ -88,4 +95,55 @@ public static Object[][] acceptableEvictionTimes() { {LOWEST_ACCEPTABLE_EXPIRE_AFTER}, {LOWEST_ACCEPTABLE_EXPIRE_AFTER.plusMinutes(1)} }; } + + @Test + @UseDataProvider(value = "invalidRegistryTypes") + public void should_throw_if_wrong_or_missing_registry_type( + Object registryObj, String expectedMsg) { + // given + InternalDriverContext context = mock(InternalDriverContext.class); + DriverExecutionProfile profile = mock(DriverExecutionProfile.class); + DriverConfig config = mock(DriverConfig.class); + Duration expireAfter = LOWEST_ACCEPTABLE_EXPIRE_AFTER.minusMinutes(1); + List enabledMetrics = Arrays.asList(DefaultSessionMetric.CQL_REQUESTS.getPath()); + // when + when(config.getDefaultProfile()).thenReturn(profile); + when(context.getConfig()).thenReturn(config); + when(context.getSessionName()).thenReturn("MockSession"); + // registry object is not a registry type + when(context.getMetricRegistry()).thenReturn(registryObj); + when(profile.getDuration(DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER)) + .thenReturn(expireAfter); + when(profile.getStringList(DefaultDriverOption.METRICS_SESSION_ENABLED)) + .thenReturn(enabledMetrics); + // then + try { + new MicroProfileMetricsFactory(context); + fail( + "MetricsFactory should require correct registy object type: " + + MetricRegistry.class.getName()); + } catch (IllegalArgumentException iae) { + assertThat(iae.getMessage()).isEqualTo(expectedMsg); + } + } + + @DataProvider + public static Object[][] invalidRegistryTypes() { + return new Object[][] { + { + Integer.MAX_VALUE, + "Unexpected Metrics registry object. Expected registry object to be of type '" + + MetricRegistry.class.getName() + + "', but was '" + + Integer.class.getName() + + "'" + }, + { + null, + "No metric registry object found. Expected registry object to be of type '" + + MetricRegistry.class.getName() + + "'" + } + }; + } } From a8b7223d39f78c02fcb9d9f884bfc6cccca15590 Mon Sep 17 00:00:00 2001 From: olim7t Date: Tue, 25 Aug 2020 11:30:52 -0700 Subject: [PATCH 566/979] JAVA-2870: Optimize memory usage of token map --- changelog/README.md | 1 + .../token/CanonicalNodeSetBuilder.java | 55 ++++++ .../token/EverywhereReplicationStrategy.java | 14 +- .../core/metadata/token/KeyspaceTokenMap.java | 11 +- .../token/LocalReplicationStrategy.java | 12 +- .../NetworkTopologyReplicationStrategy.java | 24 +-- .../metadata/token/ReplicationStrategy.java | 5 +- .../token/SimpleReplicationStrategy.java | 19 +- ...etworkTopologyReplicationStrategyTest.java | 165 +++++++----------- .../token/SimpleReplicationStrategyTest.java | 35 ++-- 10 files changed, 181 insertions(+), 160 deletions(-) create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/CanonicalNodeSetBuilder.java diff --git a/changelog/README.md b/changelog/README.md index a74825f7b00..ed11049389a 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.9.0 (in progress) +- [improvement] JAVA-2870: Optimize memory usage of token map - [improvement] JAVA-2855: Allow selection of the metrics framework via the config - [improvement] JAVA-2864: Revisit mapper processor's messaging - [new feature] JAVA-2816: Support immutability and fluent accessors in the mapper diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/CanonicalNodeSetBuilder.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/CanonicalNodeSetBuilder.java new file mode 100644 index 00000000000..6ab047896f5 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/CanonicalNodeSetBuilder.java @@ -0,0 +1,55 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.metadata.token; + +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import net.jcip.annotations.NotThreadSafe; + +/** + * A reusable set builder that guarantees that identical sets (same elements in the same order) will + * be represented by the same instance. + */ +@NotThreadSafe +class CanonicalNodeSetBuilder { + + private final Map, Set> canonicalSets = new HashMap<>(); + private final List elements = new ArrayList<>(); + + void add(Node node) { + // This is O(n), but the cardinality is low (max possible size is the replication factor). + if (!elements.contains(node)) { + elements.add(node); + } + } + + int size() { + return elements.size(); + } + + Set build() { + return canonicalSets.computeIfAbsent(elements, ImmutableSet::copyOf); + } + + void clear() { + elements.clear(); + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/EverywhereReplicationStrategy.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/EverywhereReplicationStrategy.java index 5cce9dacc70..d7bb9b4cbde 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/EverywhereReplicationStrategy.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/EverywhereReplicationStrategy.java @@ -17,23 +17,23 @@ import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.metadata.token.Token; -import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSetMultimap; -import com.datastax.oss.driver.shaded.guava.common.collect.SetMultimap; -import java.util.Collection; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; import java.util.List; import java.util.Map; +import java.util.Set; import net.jcip.annotations.ThreadSafe; @ThreadSafe public class EverywhereReplicationStrategy implements ReplicationStrategy { @Override - public SetMultimap computeReplicasByToken( + public Map> computeReplicasByToken( Map tokenToPrimary, List ring) { - ImmutableSetMultimap.Builder result = ImmutableSetMultimap.builder(); - Collection nodes = tokenToPrimary.values(); + ImmutableMap.Builder> result = ImmutableMap.builder(); + Set allNodes = ImmutableSet.copyOf(tokenToPrimary.values()); for (Token token : tokenToPrimary.keySet()) { - result = result.putAll(token, nodes); + result = result.put(token, allNodes); } return result.build(); } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/KeyspaceTokenMap.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/KeyspaceTokenMap.java index d5f6937dd93..36bf7e15924 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/KeyspaceTokenMap.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/KeyspaceTokenMap.java @@ -52,8 +52,7 @@ static KeyspaceTokenMap build( try { ReplicationStrategy strategy = replicationStrategyFactory.newInstance(replicationConfig); - SetMultimap replicasByToken = - strategy.computeReplicasByToken(tokenToPrimary, ring); + Map> replicasByToken = strategy.computeReplicasByToken(tokenToPrimary, ring); SetMultimap tokenRangesByNode; if (ring.size() == 1) { // We forced the single range to ]minToken,minToken], make sure to use that instead of @@ -79,13 +78,13 @@ static KeyspaceTokenMap build( private final List ring; private final SetMultimap tokenRangesByNode; - private final SetMultimap replicasByToken; + private final Map> replicasByToken; private final TokenFactory tokenFactory; private KeyspaceTokenMap( List ring, SetMultimap tokenRangesByNode, - SetMultimap replicasByToken, + Map> replicasByToken, TokenFactory tokenFactory) { this.ring = ring; this.tokenRangesByNode = tokenRangesByNode; @@ -104,7 +103,7 @@ Set getReplicas(ByteBuffer partitionKey) { Set getReplicas(Token token) { // If the token happens to be one of the "primary" tokens, get result directly Set nodes = replicasByToken.get(token); - if (!nodes.isEmpty()) { + if (nodes != null) { return nodes; } // Otherwise, find the closest "primary" token on the ring @@ -119,7 +118,7 @@ Set getReplicas(Token token) { } private static SetMultimap buildTokenRangesByNode( - Set tokenRanges, SetMultimap replicasByToken) { + Set tokenRanges, Map> replicasByToken) { ImmutableSetMultimap.Builder result = ImmutableSetMultimap.builder(); for (TokenRange range : tokenRanges) { for (Node node : replicasByToken.get(range.getEnd())) { diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/LocalReplicationStrategy.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/LocalReplicationStrategy.java index 6e1395fbf2d..6b6007d14cd 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/LocalReplicationStrategy.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/LocalReplicationStrategy.java @@ -17,21 +17,23 @@ import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.metadata.token.Token; -import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSetMultimap; -import com.datastax.oss.driver.shaded.guava.common.collect.SetMultimap; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; import java.util.List; import java.util.Map; +import java.util.Set; import net.jcip.annotations.ThreadSafe; @ThreadSafe class LocalReplicationStrategy implements ReplicationStrategy { @Override - public SetMultimap computeReplicasByToken( + public Map> computeReplicasByToken( Map tokenToPrimary, List ring) { - ImmutableSetMultimap.Builder result = ImmutableSetMultimap.builder(); + ImmutableMap.Builder> result = ImmutableMap.builder(); + // Each token maps to exactly one node for (Map.Entry entry : tokenToPrimary.entrySet()) { - result.put(entry.getKey(), entry.getValue()); + result.put(entry.getKey(), ImmutableSet.of(entry.getValue())); } return result.build(); } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/NetworkTopologyReplicationStrategy.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/NetworkTopologyReplicationStrategy.java index 3c017eca8b6..b90b7ee5955 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/NetworkTopologyReplicationStrategy.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/NetworkTopologyReplicationStrategy.java @@ -18,9 +18,7 @@ import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.metadata.token.Token; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; -import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSetMultimap; import com.datastax.oss.driver.shaded.guava.common.collect.Maps; -import com.datastax.oss.driver.shaded.guava.common.collect.SetMultimap; import com.datastax.oss.driver.shaded.guava.common.collect.Sets; import java.util.HashMap; import java.util.HashSet; @@ -56,14 +54,18 @@ class NetworkTopologyReplicationStrategy implements ReplicationStrategy { } @Override - public SetMultimap computeReplicasByToken( + public Map> computeReplicasByToken( Map tokenToPrimary, List ring) { - // This is essentially a copy of org.apache.cassandra.locator.NetworkTopologyStrategy - ImmutableSetMultimap.Builder result = ImmutableSetMultimap.builder(); + // The implementation of this method was adapted from + // org.apache.cassandra.locator.NetworkTopologyStrategy + + ImmutableMap.Builder> result = ImmutableMap.builder(); Map> racks = getRacksInDcs(tokenToPrimary.values()); Map dcNodeCount = Maps.newHashMapWithExpectedSize(replicationFactors.size()); Set warnedDcs = Sets.newHashSetWithExpectedSize(replicationFactors.size()); + CanonicalNodeSetBuilder replicasBuilder = new CanonicalNodeSetBuilder(); + // find maximum number of nodes in each DC for (Node node : Sets.newHashSet(tokenToPrimary.values())) { String dc = node.getDatacenter(); @@ -71,6 +73,8 @@ public SetMultimap computeReplicasByToken( dcNodeCount.put(dc, dcNodeCount.get(dc) + 1); } for (int i = 0; i < ring.size(); i++) { + replicasBuilder.clear(); + Map> allDcReplicas = new HashMap<>(); Map> seenRacks = new HashMap<>(); Map> skippedDcEndpoints = new HashMap<>(); @@ -80,8 +84,6 @@ public SetMultimap computeReplicasByToken( skippedDcEndpoints.put(dc, new LinkedHashSet<>()); // preserve order } - // Preserve order - primary replica will be first - Set replicas = new LinkedHashSet<>(); for (int j = 0; j < ring.size() && !allDone(allDcReplicas, dcNodeCount); j++) { Node h = tokenToPrimary.get(getTokenWrapping(i + j, ring)); String dc = h.getDatacenter(); @@ -96,14 +98,14 @@ public SetMultimap computeReplicasByToken( String rack = h.getRack(); // Check if we already visited all racks in dc if (rack == null || seenRacks.get(dc).size() == racks.get(dc).size()) { - replicas.add(h); + replicasBuilder.add(h); dcReplicas.add(h); } else { // Is this a new rack? if (seenRacks.get(dc).contains(rack)) { skippedDcEndpoints.get(dc).add(h); } else { - replicas.add(h); + replicasBuilder.add(h); dcReplicas.add(h); seenRacks.get(dc).add(rack); // If we've run out of distinct racks, add the nodes skipped so far @@ -111,7 +113,7 @@ public SetMultimap computeReplicasByToken( Iterator skippedIt = skippedDcEndpoints.get(dc).iterator(); while (skippedIt.hasNext() && dcReplicas.size() < rf) { Node nextSkipped = skippedIt.next(); - replicas.add(nextSkipped); + replicasBuilder.add(nextSkipped); dcReplicas.add(nextSkipped); } } @@ -139,7 +141,7 @@ public SetMultimap computeReplicasByToken( } } - result.putAll(ring.get(i), replicas); + result.put(ring.get(i), replicasBuilder.build()); } return result.build(); } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/ReplicationStrategy.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/ReplicationStrategy.java index 1049c66c81b..2f76f720ccd 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/ReplicationStrategy.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/ReplicationStrategy.java @@ -17,11 +17,10 @@ import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.metadata.token.Token; -import com.datastax.oss.driver.shaded.guava.common.collect.SetMultimap; import java.util.List; import java.util.Map; +import java.util.Set; public interface ReplicationStrategy { - SetMultimap computeReplicasByToken( - Map tokenToPrimary, List ring); + Map> computeReplicasByToken(Map tokenToPrimary, List ring); } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/SimpleReplicationStrategy.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/SimpleReplicationStrategy.java index 3cb1e0458b0..8ec2394deb4 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/SimpleReplicationStrategy.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/SimpleReplicationStrategy.java @@ -19,9 +19,7 @@ import com.datastax.oss.driver.api.core.metadata.token.Token; import com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting; import com.datastax.oss.driver.shaded.guava.common.base.Preconditions; -import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSetMultimap; -import com.datastax.oss.driver.shaded.guava.common.collect.SetMultimap; -import java.util.LinkedHashSet; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import java.util.List; import java.util.Map; import java.util.Set; @@ -42,19 +40,20 @@ class SimpleReplicationStrategy implements ReplicationStrategy { } @Override - public SetMultimap computeReplicasByToken( + public Map> computeReplicasByToken( Map tokenToPrimary, List ring) { int rf = Math.min(replicationFactor.fullReplicas(), ring.size()); - ImmutableSetMultimap.Builder result = ImmutableSetMultimap.builder(); + ImmutableMap.Builder> result = ImmutableMap.builder(); + CanonicalNodeSetBuilder replicasBuilder = new CanonicalNodeSetBuilder(); + for (int i = 0; i < ring.size(); i++) { - // Consecutive sections of the ring can be assigned to the same node - Set replicas = new LinkedHashSet<>(); - for (int j = 0; j < ring.size() && replicas.size() < rf; j++) { - replicas.add(tokenToPrimary.get(getTokenWrapping(i + j, ring))); + replicasBuilder.clear(); + for (int j = 0; j < ring.size() && replicasBuilder.size() < rf; j++) { + replicasBuilder.add(tokenToPrimary.get(getTokenWrapping(i + j, ring))); } - result.putAll(ring.get(i), replicas); + result.put(ring.get(i), replicasBuilder.build()); } return result.build(); } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/token/NetworkTopologyReplicationStrategyTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/token/NetworkTopologyReplicationStrategyTest.java index 01627628609..f6a6cfc3c07 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/token/NetworkTopologyReplicationStrategyTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/token/NetworkTopologyReplicationStrategyTest.java @@ -31,9 +31,9 @@ import com.datastax.oss.driver.api.core.metadata.token.Token; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; -import com.datastax.oss.driver.shaded.guava.common.collect.SetMultimap; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import org.junit.Test; import org.junit.runner.RunWith; @@ -93,16 +93,15 @@ public void should_compute_for_simple_layout() { new NetworkTopologyReplicationStrategy(ImmutableMap.of(DC1, "1", DC2, "1"), "test"); // When - SetMultimap replicasByToken = - strategy.computeReplicasByToken(tokenToPrimary, ring); + Map> replicasByToken = strategy.computeReplicasByToken(tokenToPrimary, ring); // Then assertThat(replicasByToken.keySet().size()).isEqualTo(ring.size()); // Note: this also asserts the iteration order of the sets (unlike containsEntry(token, set)) assertThat(replicasByToken.get(TOKEN01)).containsExactly(node1, node2); assertThat(replicasByToken.get(TOKEN04)).containsExactly(node2, node1); - assertThat(replicasByToken.get(TOKEN14)).containsExactly(node1, node2); - assertThat(replicasByToken.get(TOKEN19)).containsExactly(node2, node1); + assertThat(replicasByToken.get(TOKEN14)).isSameAs(replicasByToken.get(TOKEN01)); + assertThat(replicasByToken.get(TOKEN19)).isSameAs(replicasByToken.get(TOKEN04)); } /** 8 tokens, 4 nodes in 2 DCs in the same racks, RF = 1 in each DC. */ @@ -130,8 +129,7 @@ public void should_compute_for_simple_layout_with_multiple_nodes_per_rack() { new NetworkTopologyReplicationStrategy(ImmutableMap.of(DC1, "1", DC2, "1"), "test"); // When - SetMultimap replicasByToken = - strategy.computeReplicasByToken(tokenToPrimary, ring); + Map> replicasByToken = strategy.computeReplicasByToken(tokenToPrimary, ring); // Then assertThat(replicasByToken.keySet().size()).isEqualTo(ring.size()); @@ -139,10 +137,10 @@ public void should_compute_for_simple_layout_with_multiple_nodes_per_rack() { assertThat(replicasByToken.get(TOKEN03)).containsExactly(node2, node3); assertThat(replicasByToken.get(TOKEN05)).containsExactly(node3, node4); assertThat(replicasByToken.get(TOKEN07)).containsExactly(node4, node1); - assertThat(replicasByToken.get(TOKEN13)).containsExactly(node1, node2); - assertThat(replicasByToken.get(TOKEN15)).containsExactly(node2, node3); - assertThat(replicasByToken.get(TOKEN17)).containsExactly(node3, node4); - assertThat(replicasByToken.get(TOKEN19)).containsExactly(node4, node1); + assertThat(replicasByToken.get(TOKEN13)).isSameAs(replicasByToken.get(TOKEN01)); + assertThat(replicasByToken.get(TOKEN15)).isSameAs(replicasByToken.get(TOKEN03)); + assertThat(replicasByToken.get(TOKEN17)).isSameAs(replicasByToken.get(TOKEN05)); + assertThat(replicasByToken.get(TOKEN19)).isSameAs(replicasByToken.get(TOKEN07)); } /** 6 tokens, 3 nodes in 3 DCs, RF = 1 in each DC. */ @@ -167,17 +165,16 @@ public void should_compute_for_simple_layout_with_3_dcs() { ImmutableMap.of(DC1, "1", DC2, "1", DC3, "1"), "test"); // When - SetMultimap replicasByToken = - strategy.computeReplicasByToken(tokenToPrimary, ring); + Map> replicasByToken = strategy.computeReplicasByToken(tokenToPrimary, ring); // Then assertThat(replicasByToken.keySet().size()).isEqualTo(ring.size()); assertThat(replicasByToken.get(TOKEN01)).containsExactly(node1, node2, node3); assertThat(replicasByToken.get(TOKEN05)).containsExactly(node2, node3, node1); assertThat(replicasByToken.get(TOKEN09)).containsExactly(node3, node1, node2); - assertThat(replicasByToken.get(TOKEN11)).containsExactly(node1, node2, node3); - assertThat(replicasByToken.get(TOKEN15)).containsExactly(node2, node3, node1); - assertThat(replicasByToken.get(TOKEN19)).containsExactly(node3, node1, node2); + assertThat(replicasByToken.get(TOKEN11)).isSameAs(replicasByToken.get(TOKEN01)); + assertThat(replicasByToken.get(TOKEN15)).isSameAs(replicasByToken.get(TOKEN05)); + assertThat(replicasByToken.get(TOKEN19)).isSameAs(replicasByToken.get(TOKEN09)); } /** 10 tokens, 4 nodes in 2 DCs, RF = 2 in each DC, 1 node owns 4 tokens, the others only 2. */ @@ -209,21 +206,21 @@ public void should_compute_for_unbalanced_ring() { new NetworkTopologyReplicationStrategy(ImmutableMap.of(DC1, "2", DC2, "2"), "test"); // When - SetMultimap replicasByToken = - strategy.computeReplicasByToken(tokenToPrimary, ring); + Map> replicasByToken = strategy.computeReplicasByToken(tokenToPrimary, ring); // Then assertThat(replicasByToken.keySet().size()).isEqualTo(ring.size()); assertThat(replicasByToken.get(TOKEN01)).containsExactly(node1, node2, node3, node4); - assertThat(replicasByToken.get(TOKEN03)).containsExactly(node1, node2, node3, node4); + assertThat(replicasByToken.get(TOKEN03)).isSameAs(replicasByToken.get(TOKEN01)); assertThat(replicasByToken.get(TOKEN05)).containsExactly(node2, node3, node4, node1); assertThat(replicasByToken.get(TOKEN07)).containsExactly(node3, node4, node1, node2); assertThat(replicasByToken.get(TOKEN09)).containsExactly(node4, node1, node2, node3); - assertThat(replicasByToken.get(TOKEN11)).containsExactly(node1, node2, node3, node4); - assertThat(replicasByToken.get(TOKEN13)).containsExactly(node1, node2, node3, node4); - assertThat(replicasByToken.get(TOKEN15)).containsExactly(node2, node3, node4, node1); - assertThat(replicasByToken.get(TOKEN17)).containsExactly(node3, node4, node1, node2); - assertThat(replicasByToken.get(TOKEN19)).containsExactly(node4, node1, node2, node3); + assertThat(replicasByToken.get(TOKEN11)).isSameAs(replicasByToken.get(TOKEN01)); + assertThat(replicasByToken.get(TOKEN13)).isSameAs(replicasByToken.get(TOKEN01)); + assertThat(replicasByToken.get(TOKEN15)).isSameAs(replicasByToken.get(TOKEN05)); + assertThat(replicasByToken.get(TOKEN17)).isSameAs(replicasByToken.get(TOKEN07)); + assertThat(replicasByToken.get(TOKEN19)).isSameAs(replicasByToken.get(TOKEN09)); + ; } /** 16 tokens, 8 nodes in 2 DCs with 2 per rack, RF = 2 in each DC. */ @@ -265,8 +262,7 @@ public void should_compute_with_multiple_racks_per_dc() { new NetworkTopologyReplicationStrategy(ImmutableMap.of(DC1, "2", DC2, "2"), "test"); // When - SetMultimap replicasByToken = - strategy.computeReplicasByToken(tokenToPrimary, ring); + Map> replicasByToken = strategy.computeReplicasByToken(tokenToPrimary, ring); // Then assertThat(replicasByToken.keySet().size()).isEqualTo(ring.size()); @@ -278,14 +274,14 @@ public void should_compute_with_multiple_racks_per_dc() { assertThat(replicasByToken.get(TOKEN06)).containsExactly(node6, node7, node8, node1); assertThat(replicasByToken.get(TOKEN07)).containsExactly(node7, node8, node1, node2); assertThat(replicasByToken.get(TOKEN08)).containsExactly(node8, node1, node2, node3); - assertThat(replicasByToken.get(TOKEN12)).containsExactly(node1, node2, node3, node4); - assertThat(replicasByToken.get(TOKEN13)).containsExactly(node2, node3, node4, node5); - assertThat(replicasByToken.get(TOKEN14)).containsExactly(node3, node4, node5, node6); - assertThat(replicasByToken.get(TOKEN15)).containsExactly(node4, node5, node6, node7); - assertThat(replicasByToken.get(TOKEN16)).containsExactly(node5, node6, node7, node8); - assertThat(replicasByToken.get(TOKEN17)).containsExactly(node6, node7, node8, node1); - assertThat(replicasByToken.get(TOKEN18)).containsExactly(node7, node8, node1, node2); - assertThat(replicasByToken.get(TOKEN19)).containsExactly(node8, node1, node2, node3); + assertThat(replicasByToken.get(TOKEN12)).isSameAs(replicasByToken.get(TOKEN01)); + assertThat(replicasByToken.get(TOKEN13)).isSameAs(replicasByToken.get(TOKEN02)); + assertThat(replicasByToken.get(TOKEN14)).isSameAs(replicasByToken.get(TOKEN03)); + assertThat(replicasByToken.get(TOKEN15)).isSameAs(replicasByToken.get(TOKEN04)); + assertThat(replicasByToken.get(TOKEN16)).isSameAs(replicasByToken.get(TOKEN05)); + assertThat(replicasByToken.get(TOKEN17)).isSameAs(replicasByToken.get(TOKEN06)); + assertThat(replicasByToken.get(TOKEN18)).isSameAs(replicasByToken.get(TOKEN07)); + assertThat(replicasByToken.get(TOKEN19)).isSameAs(replicasByToken.get(TOKEN08)); } /** @@ -334,8 +330,7 @@ public void should_pick_dc_replicas_in_different_racks_first() { new NetworkTopologyReplicationStrategy(ImmutableMap.of(DC1, "3", DC2, "3"), "test"); // When - SetMultimap replicasByToken = - strategy.computeReplicasByToken(tokenToPrimary, ring); + Map> replicasByToken = strategy.computeReplicasByToken(tokenToPrimary, ring); // Then assertThat(replicasByToken.keySet().size()).isEqualTo(ring.size()); @@ -355,22 +350,14 @@ public void should_pick_dc_replicas_in_different_racks_first() { .containsExactly(node7, node8, node1, node2, node3, node4); assertThat(replicasByToken.get(TOKEN08)) .containsExactly(node8, node1, node2, node4, node5, node3); - assertThat(replicasByToken.get(TOKEN12)) - .containsExactly(node1, node2, node5, node3, node6, node4); - assertThat(replicasByToken.get(TOKEN13)) - .containsExactly(node2, node3, node5, node6, node4, node7); - assertThat(replicasByToken.get(TOKEN14)) - .containsExactly(node3, node4, node5, node6, node7, node8); - assertThat(replicasByToken.get(TOKEN15)) - .containsExactly(node4, node5, node6, node8, node1, node7); - assertThat(replicasByToken.get(TOKEN16)) - .containsExactly(node5, node6, node1, node7, node2, node8); - assertThat(replicasByToken.get(TOKEN17)) - .containsExactly(node6, node7, node1, node2, node8, node3); - assertThat(replicasByToken.get(TOKEN18)) - .containsExactly(node7, node8, node1, node2, node3, node4); - assertThat(replicasByToken.get(TOKEN19)) - .containsExactly(node8, node1, node2, node4, node5, node3); + assertThat(replicasByToken.get(TOKEN12)).isSameAs(replicasByToken.get(TOKEN01)); + assertThat(replicasByToken.get(TOKEN13)).isSameAs(replicasByToken.get(TOKEN02)); + assertThat(replicasByToken.get(TOKEN14)).isSameAs(replicasByToken.get(TOKEN03)); + assertThat(replicasByToken.get(TOKEN15)).isSameAs(replicasByToken.get(TOKEN04)); + assertThat(replicasByToken.get(TOKEN16)).isSameAs(replicasByToken.get(TOKEN05)); + assertThat(replicasByToken.get(TOKEN17)).isSameAs(replicasByToken.get(TOKEN06)); + assertThat(replicasByToken.get(TOKEN18)).isSameAs(replicasByToken.get(TOKEN07)); + assertThat(replicasByToken.get(TOKEN19)).isSameAs(replicasByToken.get(TOKEN08)); } /** @@ -382,42 +369,34 @@ public void should_pick_dc_replicas_in_different_racks_first() { @Test public void should_pick_dc_replicas_in_different_racks_first_when_nodes_own_consecutive_tokens() { // When - SetMultimap replicasByToken = computeWithDifferentRacksAndConsecutiveTokens(3); + Map> replicasByToken = computeWithDifferentRacksAndConsecutiveTokens(3); // Then assertThat(replicasByToken.keySet().size()).isEqualTo(16); assertThat(replicasByToken.get(TOKEN01)) .containsExactly(node1, node5, node3, node2, node6, node4); - assertThat(replicasByToken.get(TOKEN02)) - .containsExactly(node1, node5, node3, node2, node6, node4); + assertThat(replicasByToken.get(TOKEN02)).isSameAs(replicasByToken.get(TOKEN01)); assertThat(replicasByToken.get(TOKEN03)) .containsExactly(node3, node5, node7, node2, node6, node4); - assertThat(replicasByToken.get(TOKEN04)) - .containsExactly(node3, node5, node7, node2, node6, node4); + assertThat(replicasByToken.get(TOKEN04)).isSameAs(replicasByToken.get(TOKEN03)); assertThat(replicasByToken.get(TOKEN05)) .containsExactly(node5, node2, node6, node4, node1, node7); - assertThat(replicasByToken.get(TOKEN06)) - .containsExactly(node5, node2, node6, node4, node1, node7); + assertThat(replicasByToken.get(TOKEN06)).isSameAs(replicasByToken.get(TOKEN05)); assertThat(replicasByToken.get(TOKEN07)) .containsExactly(node7, node2, node6, node4, node1, node3); - assertThat(replicasByToken.get(TOKEN08)) - .containsExactly(node7, node2, node6, node4, node1, node3); + assertThat(replicasByToken.get(TOKEN08)).isSameAs(replicasByToken.get(TOKEN07)); assertThat(replicasByToken.get(TOKEN12)) .containsExactly(node2, node6, node4, node1, node5, node3); - assertThat(replicasByToken.get(TOKEN13)) - .containsExactly(node2, node6, node4, node1, node5, node3); + assertThat(replicasByToken.get(TOKEN13)).isSameAs(replicasByToken.get(TOKEN12)); assertThat(replicasByToken.get(TOKEN14)) .containsExactly(node4, node6, node8, node1, node5, node3); - assertThat(replicasByToken.get(TOKEN15)) - .containsExactly(node4, node6, node8, node1, node5, node3); + assertThat(replicasByToken.get(TOKEN15)).isSameAs(replicasByToken.get(TOKEN14)); assertThat(replicasByToken.get(TOKEN16)) .containsExactly(node6, node1, node5, node3, node2, node8); - assertThat(replicasByToken.get(TOKEN17)) - .containsExactly(node6, node1, node5, node3, node2, node8); + assertThat(replicasByToken.get(TOKEN17)).isSameAs(replicasByToken.get(TOKEN16)); assertThat(replicasByToken.get(TOKEN18)) .containsExactly(node8, node1, node5, node3, node2, node4); - assertThat(replicasByToken.get(TOKEN19)) - .containsExactly(node8, node1, node5, node3, node2, node4); + assertThat(replicasByToken.get(TOKEN19)).isSameAs(replicasByToken.get(TOKEN18)); } /** @@ -430,45 +409,37 @@ public void should_pick_dc_replicas_in_different_racks_first_when_nodes_own_cons @Test public void should_pick_dc_replicas_in_different_racks_first_when_all_nodes_contain_all_data() { // When - SetMultimap replicasByToken = computeWithDifferentRacksAndConsecutiveTokens(4); + Map> replicasByToken = computeWithDifferentRacksAndConsecutiveTokens(4); // Then assertThat(replicasByToken.keySet().size()).isEqualTo(16); assertThat(replicasByToken.get(TOKEN01)) .containsExactly(node1, node5, node3, node7, node2, node6, node4, node8); - assertThat(replicasByToken.get(TOKEN02)) - .containsExactly(node1, node5, node3, node7, node2, node6, node4, node8); + assertThat(replicasByToken.get(TOKEN02)).isSameAs(replicasByToken.get(TOKEN01)); assertThat(replicasByToken.get(TOKEN03)) .containsExactly(node3, node5, node7, node2, node6, node4, node8, node1); - assertThat(replicasByToken.get(TOKEN04)) - .containsExactly(node3, node5, node7, node2, node6, node4, node8, node1); + assertThat(replicasByToken.get(TOKEN04)).isSameAs(replicasByToken.get(TOKEN03)); assertThat(replicasByToken.get(TOKEN05)) .containsExactly(node5, node2, node6, node4, node8, node1, node7, node3); - assertThat(replicasByToken.get(TOKEN06)) - .containsExactly(node5, node2, node6, node4, node8, node1, node7, node3); + assertThat(replicasByToken.get(TOKEN06)).isSameAs(replicasByToken.get(TOKEN05)); assertThat(replicasByToken.get(TOKEN07)) .containsExactly(node7, node2, node6, node4, node8, node1, node3, node5); - assertThat(replicasByToken.get(TOKEN08)) - .containsExactly(node7, node2, node6, node4, node8, node1, node3, node5); + assertThat(replicasByToken.get(TOKEN08)).isSameAs(replicasByToken.get(TOKEN07)); assertThat(replicasByToken.get(TOKEN12)) .containsExactly(node2, node6, node4, node8, node1, node5, node3, node7); - assertThat(replicasByToken.get(TOKEN13)) - .containsExactly(node2, node6, node4, node8, node1, node5, node3, node7); + assertThat(replicasByToken.get(TOKEN13)).isSameAs(replicasByToken.get(TOKEN12)); assertThat(replicasByToken.get(TOKEN14)) .containsExactly(node4, node6, node8, node1, node5, node3, node7, node2); - assertThat(replicasByToken.get(TOKEN15)) - .containsExactly(node4, node6, node8, node1, node5, node3, node7, node2); + assertThat(replicasByToken.get(TOKEN15)).isSameAs(replicasByToken.get(TOKEN14)); assertThat(replicasByToken.get(TOKEN16)) .containsExactly(node6, node1, node5, node3, node7, node2, node8, node4); - assertThat(replicasByToken.get(TOKEN17)) - .containsExactly(node6, node1, node5, node3, node7, node2, node8, node4); + assertThat(replicasByToken.get(TOKEN17)).isSameAs(replicasByToken.get(TOKEN16)); assertThat(replicasByToken.get(TOKEN18)) .containsExactly(node8, node1, node5, node3, node7, node2, node4, node6); - assertThat(replicasByToken.get(TOKEN19)) - .containsExactly(node8, node1, node5, node3, node7, node2, node4, node6); + assertThat(replicasByToken.get(TOKEN19)).isSameAs(replicasByToken.get(TOKEN18)); } - private SetMultimap computeWithDifferentRacksAndConsecutiveTokens( + private Map> computeWithDifferentRacksAndConsecutiveTokens( int replicationFactor) { List ring = ImmutableList.of( @@ -518,15 +489,15 @@ private SetMultimap computeWithDifferentRacksAndConsecutiveTokens( @Test public void should_compute_complex_layout() { // When - SetMultimap replicasByToken = computeComplexLayout(2); + Map> replicasByToken = computeComplexLayout(2); // Then assertThat(replicasByToken.keySet().size()).isEqualTo(18); assertThat(replicasByToken.get(TOKEN01)).containsExactly(node1, node5, node2, node6); - assertThat(replicasByToken.get(TOKEN02)).containsExactly(node1, node5, node2, node6); + assertThat(replicasByToken.get(TOKEN02)).isSameAs(replicasByToken.get(TOKEN01)); assertThat(replicasByToken.get(TOKEN03)).containsExactly(node5, node3, node2, node6); assertThat(replicasByToken.get(TOKEN04)).containsExactly(node3, node5, node2, node6); - assertThat(replicasByToken.get(TOKEN05)).containsExactly(node1, node5, node2, node6); + assertThat(replicasByToken.get(TOKEN05)).isSameAs(replicasByToken.get(TOKEN01)); assertThat(replicasByToken.get(TOKEN06)).containsExactly(node5, node2, node6, node3); assertThat(replicasByToken.get(TOKEN07)).containsExactly(node2, node6, node3, node5); assertThat(replicasByToken.get(TOKEN08)).containsExactly(node6, node3, node4, node5); @@ -534,8 +505,8 @@ public void should_compute_complex_layout() { assertThat(replicasByToken.get(TOKEN10)).containsExactly(node4, node5, node6, node3); assertThat(replicasByToken.get(TOKEN11)).containsExactly(node5, node4, node6, node3); assertThat(replicasByToken.get(TOKEN12)).containsExactly(node4, node6, node3, node5); - assertThat(replicasByToken.get(TOKEN13)).containsExactly(node4, node6, node3, node5); - assertThat(replicasByToken.get(TOKEN14)).containsExactly(node2, node6, node3, node5); + assertThat(replicasByToken.get(TOKEN13)).isSameAs(replicasByToken.get(TOKEN12)); + assertThat(replicasByToken.get(TOKEN14)).isSameAs(replicasByToken.get(TOKEN07)); assertThat(replicasByToken.get(TOKEN15)).containsExactly(node6, node3, node2, node5); assertThat(replicasByToken.get(TOKEN16)).containsExactly(node3, node2, node6, node5); assertThat(replicasByToken.get(TOKEN17)).containsExactly(node2, node6, node1, node5); @@ -551,14 +522,13 @@ public void should_compute_complex_layout() { @Test public void should_compute_complex_layout_with_rf_too_high() { // When - SetMultimap replicasByToken = computeComplexLayout(4); + Map> replicasByToken = computeComplexLayout(4); // Then assertThat(replicasByToken.keySet().size()).isEqualTo(18); assertThat(replicasByToken.get(TOKEN01)) .containsExactly(node1, node5, node3, node2, node6, node4); - assertThat(replicasByToken.get(TOKEN02)) - .containsExactly(node1, node5, node3, node2, node6, node4); + assertThat(replicasByToken.get(TOKEN02)).isSameAs(replicasByToken.get(TOKEN01)); assertThat(replicasByToken.get(TOKEN03)) .containsExactly(node5, node3, node1, node2, node6, node4); assertThat(replicasByToken.get(TOKEN04)) @@ -579,8 +549,7 @@ public void should_compute_complex_layout_with_rf_too_high() { .containsExactly(node5, node4, node6, node2, node3, node1); assertThat(replicasByToken.get(TOKEN12)) .containsExactly(node4, node6, node2, node3, node5, node1); - assertThat(replicasByToken.get(TOKEN13)) - .containsExactly(node4, node6, node2, node3, node5, node1); + assertThat(replicasByToken.get(TOKEN13)).isSameAs(replicasByToken.get(TOKEN12)); assertThat(replicasByToken.get(TOKEN14)) .containsExactly(node2, node6, node3, node5, node1, node4); assertThat(replicasByToken.get(TOKEN15)) @@ -593,7 +562,7 @@ public void should_compute_complex_layout_with_rf_too_high() { .containsExactly(node6, node1, node5, node3, node2, node4); } - private SetMultimap computeComplexLayout(int replicationFactor) { + private Map> computeComplexLayout(int replicationFactor) { List ring = ImmutableList.of( TOKEN01, TOKEN02, TOKEN03, TOKEN04, TOKEN05, TOKEN06, TOKEN07, TOKEN08, TOKEN09, diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/token/SimpleReplicationStrategyTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/token/SimpleReplicationStrategyTest.java index 121ea685a75..2a849df3306 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/token/SimpleReplicationStrategyTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/token/SimpleReplicationStrategyTest.java @@ -21,9 +21,9 @@ import com.datastax.oss.driver.api.core.metadata.token.Token; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; -import com.datastax.oss.driver.shaded.guava.common.collect.SetMultimap; import java.util.List; import java.util.Map; +import java.util.Set; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; @@ -64,16 +64,15 @@ public void should_compute_for_simple_layout() { SimpleReplicationStrategy strategy = new SimpleReplicationStrategy(new ReplicationFactor(2)); // When - SetMultimap replicasByToken = - strategy.computeReplicasByToken(tokenToPrimary, ring); + Map> replicasByToken = strategy.computeReplicasByToken(tokenToPrimary, ring); // Then assertThat(replicasByToken.keySet().size()).isEqualTo(ring.size()); // Note: this also asserts the iteration order of the sets (unlike containsEntry(token, set)) assertThat(replicasByToken.get(TOKEN01)).containsExactly(node1, node2); assertThat(replicasByToken.get(TOKEN06)).containsExactly(node2, node1); - assertThat(replicasByToken.get(TOKEN14)).containsExactly(node1, node2); - assertThat(replicasByToken.get(TOKEN19)).containsExactly(node2, node1); + assertThat(replicasByToken.get(TOKEN14)).isSameAs(replicasByToken.get(TOKEN01)); + assertThat(replicasByToken.get(TOKEN19)).isSameAs(replicasByToken.get(TOKEN06)); } /** 4 tokens, 2 nodes owning 2 consecutive tokens each, RF = 2. */ @@ -86,15 +85,14 @@ public void should_compute_when_nodes_own_consecutive_tokens() { SimpleReplicationStrategy strategy = new SimpleReplicationStrategy(new ReplicationFactor(2)); // When - SetMultimap replicasByToken = - strategy.computeReplicasByToken(tokenToPrimary, ring); + Map> replicasByToken = strategy.computeReplicasByToken(tokenToPrimary, ring); // Then assertThat(replicasByToken.keySet().size()).isEqualTo(ring.size()); assertThat(replicasByToken.get(TOKEN01)).containsExactly(node1, node2); - assertThat(replicasByToken.get(TOKEN06)).containsExactly(node1, node2); + assertThat(replicasByToken.get(TOKEN06)).isSameAs(replicasByToken.get(TOKEN01)); assertThat(replicasByToken.get(TOKEN14)).containsExactly(node2, node1); - assertThat(replicasByToken.get(TOKEN19)).containsExactly(node2, node1); + assertThat(replicasByToken.get(TOKEN19)).isSameAs(replicasByToken.get(TOKEN14)); } /** 4 tokens, 1 node owns 3 of them, RF = 2. */ @@ -107,8 +105,7 @@ public void should_compute_when_ring_unbalanced() { SimpleReplicationStrategy strategy = new SimpleReplicationStrategy(new ReplicationFactor(2)); // When - SetMultimap replicasByToken = - strategy.computeReplicasByToken(tokenToPrimary, ring); + Map> replicasByToken = strategy.computeReplicasByToken(tokenToPrimary, ring); // Then assertThat(replicasByToken.keySet().size()).isEqualTo(ring.size()); @@ -128,15 +125,14 @@ public void should_compute_when_replication_factor_is_larger_than_cluster_size() SimpleReplicationStrategy strategy = new SimpleReplicationStrategy(new ReplicationFactor(6)); // When - SetMultimap replicasByToken = - strategy.computeReplicasByToken(tokenToPrimary, ring); + Map> replicasByToken = strategy.computeReplicasByToken(tokenToPrimary, ring); // Then assertThat(replicasByToken.keySet().size()).isEqualTo(ring.size()); assertThat(replicasByToken.get(TOKEN01)).containsExactly(node1, node2); assertThat(replicasByToken.get(TOKEN06)).containsExactly(node2, node1); - assertThat(replicasByToken.get(TOKEN14)).containsExactly(node1, node2); - assertThat(replicasByToken.get(TOKEN19)).containsExactly(node2, node1); + assertThat(replicasByToken.get(TOKEN14)).isSameAs(replicasByToken.get(TOKEN01)); + assertThat(replicasByToken.get(TOKEN19)).isSameAs(replicasByToken.get(TOKEN06)); } @Test @@ -188,13 +184,12 @@ public void should_compute_for_complex_layout() { SimpleReplicationStrategy strategy = new SimpleReplicationStrategy(new ReplicationFactor(3)); // When - SetMultimap replicasByToken = - strategy.computeReplicasByToken(tokenToPrimary, ring); + Map> replicasByToken = strategy.computeReplicasByToken(tokenToPrimary, ring); // Then assertThat(replicasByToken.keySet().size()).isEqualTo(ring.size()); assertThat(replicasByToken.get(TOKEN01)).containsExactly(node1, node5, node3); - assertThat(replicasByToken.get(TOKEN02)).containsExactly(node1, node5, node3); + assertThat(replicasByToken.get(TOKEN02)).isSameAs(replicasByToken.get(TOKEN01)); assertThat(replicasByToken.get(TOKEN03)).containsExactly(node5, node3, node1); assertThat(replicasByToken.get(TOKEN04)).containsExactly(node3, node1, node5); assertThat(replicasByToken.get(TOKEN05)).containsExactly(node1, node5, node2); @@ -205,8 +200,8 @@ public void should_compute_for_complex_layout() { assertThat(replicasByToken.get(TOKEN10)).containsExactly(node4, node5, node2); assertThat(replicasByToken.get(TOKEN11)).containsExactly(node5, node4, node2); assertThat(replicasByToken.get(TOKEN12)).containsExactly(node4, node2, node6); - assertThat(replicasByToken.get(TOKEN13)).containsExactly(node4, node2, node6); - assertThat(replicasByToken.get(TOKEN14)).containsExactly(node2, node6, node3); + assertThat(replicasByToken.get(TOKEN13)).isSameAs(replicasByToken.get(TOKEN12)); + assertThat(replicasByToken.get(TOKEN14)).isSameAs(replicasByToken.get(TOKEN07)); assertThat(replicasByToken.get(TOKEN15)).containsExactly(node6, node3, node2); assertThat(replicasByToken.get(TOKEN16)).containsExactly(node3, node2, node6); assertThat(replicasByToken.get(TOKEN17)).containsExactly(node2, node6, node1); From a1e8dfd7df91d28ed81724509cf9bdbb64d2ab22 Mon Sep 17 00:00:00 2001 From: olim7t Date: Thu, 27 Aug 2020 15:36:31 -0700 Subject: [PATCH 567/979] Simplify code in NetworkTopologyReplicationStrategy --- .../token/NetworkTopologyReplicationStrategy.java | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/NetworkTopologyReplicationStrategy.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/NetworkTopologyReplicationStrategy.java index b90b7ee5955..b44e383c2b8 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/NetworkTopologyReplicationStrategy.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/token/NetworkTopologyReplicationStrategy.java @@ -69,8 +69,7 @@ public Map> computeReplicasByToken( // find maximum number of nodes in each DC for (Node node : Sets.newHashSet(tokenToPrimary.values())) { String dc = node.getDatacenter(); - dcNodeCount.putIfAbsent(dc, 0); - dcNodeCount.put(dc, dcNodeCount.get(dc) + 1); + dcNodeCount.merge(dc, 1, Integer::sum); } for (int i = 0; i < ring.size(); i++) { replicasBuilder.clear(); @@ -90,9 +89,11 @@ public Map> computeReplicasByToken( if (dc == null || !allDcReplicas.containsKey(dc)) { continue; } - Integer rf = replicationFactors.get(dc).fullReplicas(); + ReplicationFactor dcConfig = replicationFactors.get(dc); + assert dcConfig != null; // since allDcReplicas.containsKey(dc) + int rf = dcConfig.fullReplicas(); Set dcReplicas = allDcReplicas.get(dc); - if (rf == null || dcReplicas.size() >= rf) { + if (dcReplicas.size() >= rf) { continue; } String rack = h.getRack(); From 9032edc98e36a30f60d0f933047df9f538479d34 Mon Sep 17 00:00:00 2001 From: olim7t Date: Fri, 28 Aug 2020 12:28:50 -0700 Subject: [PATCH 568/979] Fix capitalization rules to infer setter name in mapper --- .../mapper/processor/util/Capitalizer.java | 23 +++++++- .../processor/util/CapitalizerTest.java | 58 +++++++++++++++++++ 2 files changed, 80 insertions(+), 1 deletion(-) create mode 100644 mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/util/CapitalizerTest.java diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/util/Capitalizer.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/util/Capitalizer.java index 930f4ed279b..b48a15ff430 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/util/Capitalizer.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/util/Capitalizer.java @@ -20,13 +20,34 @@ public class Capitalizer { + /** + * Lower cases the first character of a name, for example when inferring the name of a field from + * the name of a getter stripped of its {@code get} prefix. + * + *

          This method respects a weird corner case of the JavaBeans conventions: "in the (unusual) + * special case when there is more than one character and both the first and second characters are + * upper case, we leave it alone. Thus {@code FooBah} becomes {@code fooBah} and {@code X} becomes + * {@code x}, but {@code URL} stays as {@code URL}.". + */ public static String decapitalize(String name) { return Introspector.decapitalize(Objects.requireNonNull(name)); } + /** + * Upper cases the first character of a name, for example when inferring the name of a setter from + * the name of a field. + * + *

          Mirroring the behavior of {@link #decapitalize(String)}, this method returns the string + * unchanged not only if the first character is uppercase, but also if the second is. For + * example, if a field is named {@code cId}, we want to produce the setter name {@code setcId()}, + * not {@code setCId()}. Otherwise applying the process in reverse would produce the field name + * {@code CId}. + */ public static String capitalize(String name) { Objects.requireNonNull(name); - if (name.isEmpty() || Character.isUpperCase(name.charAt(0))) { + if (name.isEmpty() + || Character.isUpperCase(name.charAt(0)) + || (name.length() > 1 && Character.isUpperCase(name.charAt(1)))) { return name; } else { char[] chars = name.toCharArray(); diff --git a/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/util/CapitalizerTest.java b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/util/CapitalizerTest.java new file mode 100644 index 00000000000..c59eca67a04 --- /dev/null +++ b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/util/CapitalizerTest.java @@ -0,0 +1,58 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.mapper.processor.util; + +import static com.datastax.oss.driver.Assertions.assertThat; + +import org.junit.Test; + +public class CapitalizerTest { + + @Test + public void should_decapitalize_regular_strings() { + assertThat(Capitalizer.decapitalize("foo")).isEqualTo("foo"); + assertThat(Capitalizer.decapitalize("Foo")).isEqualTo("foo"); + assertThat(Capitalizer.decapitalize("FooBar")).isEqualTo("fooBar"); + } + + @Test + public void should_not_decapitalize_when_second_char_is_uppercase() { + assertThat(Capitalizer.decapitalize("ID")).isEqualTo("ID"); + assertThat(Capitalizer.decapitalize("XML")).isEqualTo("XML"); + assertThat(Capitalizer.decapitalize("XMLRequest")).isEqualTo("XMLRequest"); + } + + @Test + public void should_capitalize_regular_strings() { + assertThat(Capitalizer.capitalize("foo")).isEqualTo("Foo"); + assertThat(Capitalizer.capitalize("fooBar")).isEqualTo("FooBar"); + } + + @Test + public void should_not_capitalize_when_second_char_is_uppercase() { + assertThat(Capitalizer.capitalize("cId")).isEqualTo("cId"); + } + + @Test + public void should_infer_field_name_and_setter_from_getter() { + // This is the sequence in which the processor uses those methods + String getterName = "getcId"; + String fieldName = Capitalizer.decapitalize(getterName.substring(3)); + String setterName = "set" + Capitalizer.capitalize(fieldName); + assertThat(fieldName).isEqualTo("cId"); + assertThat(setterName).isEqualTo("setcId"); + } +} From addeb9a55dd2489f6537afe46e6e626947ebdb89 Mon Sep 17 00:00:00 2001 From: olim7t Date: Fri, 28 Aug 2020 12:40:44 -0700 Subject: [PATCH 569/979] Use SET_TO_NULL strategy for recent mapper tests --- .../java/com/datastax/oss/driver/mapper/FluentEntityIT.java | 3 +++ .../java/com/datastax/oss/driver/mapper/ImmutableEntityIT.java | 3 +++ 2 files changed, 6 insertions(+) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/FluentEntityIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/FluentEntityIT.java index 2da087ecfd7..d06c1401e0c 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/FluentEntityIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/FluentEntityIT.java @@ -26,6 +26,7 @@ import com.datastax.oss.driver.api.mapper.annotations.Dao; import com.datastax.oss.driver.api.mapper.annotations.DaoFactory; import com.datastax.oss.driver.api.mapper.annotations.DaoKeyspace; +import com.datastax.oss.driver.api.mapper.annotations.DefaultNullSavingStrategy; import com.datastax.oss.driver.api.mapper.annotations.Entity; import com.datastax.oss.driver.api.mapper.annotations.Insert; import com.datastax.oss.driver.api.mapper.annotations.Mapper; @@ -33,6 +34,7 @@ import com.datastax.oss.driver.api.mapper.annotations.PropertyStrategy; import com.datastax.oss.driver.api.mapper.annotations.Select; import com.datastax.oss.driver.api.mapper.entity.naming.SetterStyle; +import com.datastax.oss.driver.api.mapper.entity.saving.NullSavingStrategy; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.categories.ParallelizableTests; @@ -148,6 +150,7 @@ static MapperBuilder builder(CqlSession session) { } @Dao + @DefaultNullSavingStrategy(NullSavingStrategy.SET_TO_NULL) public interface FluentProductDao { @Select FluentProduct findById(UUID productId); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/ImmutableEntityIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/ImmutableEntityIT.java index ef77e9b5f77..ed1afdbfaf8 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/ImmutableEntityIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/ImmutableEntityIT.java @@ -26,12 +26,14 @@ import com.datastax.oss.driver.api.mapper.annotations.Dao; import com.datastax.oss.driver.api.mapper.annotations.DaoFactory; import com.datastax.oss.driver.api.mapper.annotations.DaoKeyspace; +import com.datastax.oss.driver.api.mapper.annotations.DefaultNullSavingStrategy; import com.datastax.oss.driver.api.mapper.annotations.Entity; import com.datastax.oss.driver.api.mapper.annotations.Insert; import com.datastax.oss.driver.api.mapper.annotations.Mapper; import com.datastax.oss.driver.api.mapper.annotations.PartitionKey; import com.datastax.oss.driver.api.mapper.annotations.PropertyStrategy; import com.datastax.oss.driver.api.mapper.annotations.Select; +import com.datastax.oss.driver.api.mapper.entity.saving.NullSavingStrategy; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.categories.ParallelizableTests; @@ -135,6 +137,7 @@ static MapperBuilder builder(CqlSession session) { } @Dao + @DefaultNullSavingStrategy(NullSavingStrategy.SET_TO_NULL) public interface ImmutableProductDao { @Select ImmutableProduct findById(UUID productId); From 27f6f449b9d90a54d9f69812b7b1723b54a716c8 Mon Sep 17 00:00:00 2001 From: olim7t Date: Fri, 28 Aug 2020 10:21:45 -0700 Subject: [PATCH 570/979] JAVA-2867: Revisit compressor substitutions --- changelog/README.md | 1 + .../core/context/DefaultDriverContext.java | 17 +-- .../core/protocol/BuiltInCompressors.java | 46 ++++++++ .../protocol/CompressorSubstitutions.java | 109 ++++++++++++++++++ .../internal/core/protocol/Lz4Missing.java | 33 ------ .../core/protocol/Lz4Substitution.java | 55 --------- .../core/protocol/SnappySubstitution.java | 52 --------- 7 files changed, 158 insertions(+), 155 deletions(-) create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/protocol/BuiltInCompressors.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/protocol/CompressorSubstitutions.java delete mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/protocol/Lz4Missing.java delete mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/protocol/Lz4Substitution.java delete mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/protocol/SnappySubstitution.java diff --git a/changelog/README.md b/changelog/README.md index ed11049389a..d13f28aa261 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.9.0 (in progress) +- [bug] JAVA-2867: Revisit compressor substitutions - [improvement] JAVA-2870: Optimize memory usage of token map - [improvement] JAVA-2855: Allow selection of the metrics framework via the config - [improvement] JAVA-2864: Revisit mapper processor's messaging diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java index 9ae82192e61..5857b0b9be7 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java @@ -79,9 +79,8 @@ import com.datastax.oss.driver.internal.core.metadata.token.TokenFactoryRegistry; import com.datastax.oss.driver.internal.core.metrics.MetricsFactory; import com.datastax.oss.driver.internal.core.pool.ChannelPoolFactory; +import com.datastax.oss.driver.internal.core.protocol.BuiltInCompressors; import com.datastax.oss.driver.internal.core.protocol.ByteBufPrimitiveCodec; -import com.datastax.oss.driver.internal.core.protocol.Lz4Compressor; -import com.datastax.oss.driver.internal.core.protocol.SnappyCompressor; import com.datastax.oss.driver.internal.core.servererrors.DefaultWriteTypeRegistry; import com.datastax.oss.driver.internal.core.servererrors.WriteTypeRegistry; import com.datastax.oss.driver.internal.core.session.PoolManager; @@ -430,19 +429,7 @@ protected Compressor buildCompressor() { DriverExecutionProfile defaultProfile = getConfig().getDefaultProfile(); String name = defaultProfile.getString(DefaultDriverOption.PROTOCOL_COMPRESSION, "none"); assert name != null : "should use default value"; - switch (name.toLowerCase()) { - case "lz4": - return new Lz4Compressor(this); - case "snappy": - return new SnappyCompressor(this); - case "none": - return Compressor.none(); - default: - throw new IllegalArgumentException( - String.format( - "Unsupported compression algorithm '%s' (from configuration option %s)", - name, DefaultDriverOption.PROTOCOL_COMPRESSION.getPath())); - } + return BuiltInCompressors.newInstance(name, this); } protected PrimitiveCodec buildPrimitiveCodec() { diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/BuiltInCompressors.java b/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/BuiltInCompressors.java new file mode 100644 index 00000000000..5f6ad6ec270 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/BuiltInCompressors.java @@ -0,0 +1,46 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.protocol; + +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.context.DriverContext; +import com.datastax.oss.protocol.internal.Compressor; +import io.netty.buffer.ByteBuf; + +/** + * Provides a single entry point to create compressor instances in the driver. + * + *

          Note that this class also serves as a convenient target for GraalVM substitutions, see {@link + * CompressorSubstitutions}. + */ +public class BuiltInCompressors { + + public static Compressor newInstance(String name, DriverContext context) { + switch (name.toLowerCase()) { + case "lz4": + return new Lz4Compressor(context); + case "snappy": + return new SnappyCompressor(context); + case "none": + return Compressor.none(); + default: + throw new IllegalArgumentException( + String.format( + "Unsupported compression algorithm '%s' (from configuration option %s)", + name, DefaultDriverOption.PROTOCOL_COMPRESSION.getPath())); + } + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/CompressorSubstitutions.java b/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/CompressorSubstitutions.java new file mode 100644 index 00000000000..e632a07a0ab --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/CompressorSubstitutions.java @@ -0,0 +1,109 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.protocol; + +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.context.DriverContext; +import com.datastax.oss.protocol.internal.Compressor; +import com.oracle.svm.core.annotate.Delete; +import com.oracle.svm.core.annotate.Substitute; +import com.oracle.svm.core.annotate.TargetClass; +import io.netty.buffer.ByteBuf; +import java.util.function.BooleanSupplier; + +/** + * Handles GraalVM substitutions for compressors: LZ4 is only supported if we can find the native + * library in the classpath, and Snappy is never supported. + * + *

          When a compressor is not supported, we delete its class, and modify {@link + * BuiltInCompressors#newInstance(String, DriverContext)} to throw an error if the user attempts to + * configure it. + */ +public class CompressorSubstitutions { + + @TargetClass(value = BuiltInCompressors.class, onlyWith = Lz4Present.class) + public static final class BuiltInCompressorsLz4Only { + @Substitute + public static Compressor newInstance(String name, DriverContext context) { + switch (name.toLowerCase()) { + case "lz4": + return new Lz4Compressor(context); + case "snappy": + throw new UnsupportedOperationException( + "Snappy compression is not supported for native images"); + case "none": + return Compressor.none(); + default: + throw new IllegalArgumentException( + String.format( + "Unsupported compression algorithm '%s' (from configuration option %s)", + name, DefaultDriverOption.PROTOCOL_COMPRESSION.getPath())); + } + } + } + + @TargetClass(value = BuiltInCompressors.class, onlyWith = Lz4Missing.class) + public static final class NoBuiltInCompressors { + @Substitute + public static Compressor newInstance(String name, DriverContext context) { + switch (name.toLowerCase()) { + case "lz4": + throw new UnsupportedOperationException( + "This native image was not built with support for LZ4 compression"); + case "snappy": + throw new UnsupportedOperationException( + "Snappy compression is not supported for native images"); + case "none": + return Compressor.none(); + default: + throw new IllegalArgumentException( + String.format( + "Unsupported compression algorithm '%s' (from configuration option %s)", + name, DefaultDriverOption.PROTOCOL_COMPRESSION.getPath())); + } + } + } + + @TargetClass(value = Lz4Compressor.class, onlyWith = Lz4Missing.class) + @Delete + public static final class DeleteLz4Compressor {} + + @TargetClass(value = SnappyCompressor.class) + @Delete + public static final class DeleteSnappyCompressor {} + + public static class Lz4Present implements BooleanSupplier { + + private static final String LZ4_CLZ_NAME = "net.jpountz.lz4.LZ4Compressor"; + + @Override + public boolean getAsBoolean() { + try { + Class.forName(LZ4_CLZ_NAME); + return true; + } catch (ClassNotFoundException e) { + return false; + } + } + } + + public static class Lz4Missing extends Lz4Present { + @Override + public boolean getAsBoolean() { + return !super.getAsBoolean(); + } + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/Lz4Missing.java b/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/Lz4Missing.java deleted file mode 100644 index 9ecccf6df5d..00000000000 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/Lz4Missing.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.datastax.oss.driver.internal.core.protocol; - -import java.util.function.BooleanSupplier; - -public class Lz4Missing implements BooleanSupplier { - - private static final String LZ4_CLZ_NAME = "net.jpountz.lz4.LZ4Compressor"; - - @Override - public boolean getAsBoolean() { - try { - Class.forName(LZ4_CLZ_NAME); - return false; - } catch (ClassNotFoundException e) { - return true; - } - } -} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/Lz4Substitution.java b/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/Lz4Substitution.java deleted file mode 100644 index 12a55be53bf..00000000000 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/Lz4Substitution.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.datastax.oss.driver.internal.core.protocol; - -import com.datastax.oss.driver.api.core.context.DriverContext; -import com.oracle.svm.core.annotate.Inject; -import com.oracle.svm.core.annotate.Substitute; -import com.oracle.svm.core.annotate.TargetClass; -import io.netty.buffer.ByteBuf; - -@TargetClass( - className = "com.datastax.oss.driver.internal.core.protocol.Lz4Compressor", - onlyWith = Lz4Missing.class) -final class Lz4Substitution { - - @Inject - private final String EXCEPTION_MSG = - "This native image was not built with support for LZ4 compression"; - - @Substitute - public Lz4Substitution(DriverContext context) {} - - @Substitute - protected ByteBuf compressHeap(ByteBuf input) { - throw new UnsupportedOperationException(EXCEPTION_MSG); - } - - @Substitute - protected ByteBuf decompressDirect(ByteBuf input) { - throw new UnsupportedOperationException(EXCEPTION_MSG); - } - - @Substitute - protected ByteBuf decompressHeap(ByteBuf input) { - throw new UnsupportedOperationException(EXCEPTION_MSG); - } - - @Substitute - protected ByteBuf compressDirect(ByteBuf input) { - throw new UnsupportedOperationException(EXCEPTION_MSG); - } -} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/SnappySubstitution.java b/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/SnappySubstitution.java deleted file mode 100644 index ccc1dc74408..00000000000 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/SnappySubstitution.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.datastax.oss.driver.internal.core.protocol; - -import com.oracle.svm.core.annotate.Inject; -import com.oracle.svm.core.annotate.Substitute; -import com.oracle.svm.core.annotate.TargetClass; -import io.netty.buffer.ByteBuf; - -/** - * Snappy compression relies on the underlying native library and thus is not supported for native - * images - */ -@TargetClass(className = "com.datastax.oss.driver.internal.core.protocol.SnappyCompressor") -final class SnappySubstitution { - - @Inject - private final String EXCEPTION_MSG = "Snappy compression is not supported for native images"; - - @Substitute - protected ByteBuf compressHeap(ByteBuf input) { - throw new UnsupportedOperationException(EXCEPTION_MSG); - } - - @Substitute - protected ByteBuf decompressDirect(ByteBuf input) { - throw new UnsupportedOperationException(EXCEPTION_MSG); - } - - @Substitute - protected ByteBuf decompressHeap(ByteBuf input) { - throw new UnsupportedOperationException(EXCEPTION_MSG); - } - - @Substitute - protected ByteBuf compressDirect(ByteBuf input) { - throw new UnsupportedOperationException(EXCEPTION_MSG); - } -} From 8160e70378a7a38cec9adad4a71928d3a3c12a61 Mon Sep 17 00:00:00 2001 From: olim7t Date: Fri, 28 Aug 2020 10:22:09 -0700 Subject: [PATCH 571/979] Add "fast" profile to speed up mvn install --- CONTRIBUTING.md | 17 +++++++++++++++++ pom.xml | 14 ++++++++++++++ 2 files changed, 31 insertions(+) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 3546b3b86ea..927c7a7aa8c 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -423,6 +423,23 @@ the script to only test what's actually being committed, but I couldn't get it t (it's still in there but commented). Keep this in mind when you commit, and don't forget to re-add the changes if the first attempt failed and you fixed the tests. +## Speeding up the build for local tests + +If you need to install something in your local repository quickly, you can use the `fast` profile to +skip all "non-essential" checks (licenses, formatting, tests, etc): + +``` +mvn clean install -Pfast +``` + +You can speed things up even more by targeting specific modules with the `-pl` option: + +``` +mvn clean install -Pfast -pl core,query-builder,mapper-runtime,mapper-processor,bom +``` + +Please run the normal build at least once before you push your changes. + ## Commits Keep your changes **focused**. Each commit should have a single, clear purpose expressed in its diff --git a/pom.xml b/pom.xml index eb35aa5e5cd..06481b9032b 100644 --- a/pom.xml +++ b/pom.xml @@ -894,6 +894,20 @@ height="0" width="0" style="display:none;visibility:hidden"> + + + fast + + true + true + true + true + true + true + true + true + + From 811412ae9809863007d9ddd17e54f170b1c9dfe3 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Sat, 29 Aug 2020 14:46:15 +0200 Subject: [PATCH 572/979] JAVA-2859: Upgrade Tinkerpop to 3.4.8 This commit also solves: - JAVA-2726: Fix Tinkerpop incompatibility with JPMS - JAVA-2842: Remove security vulnerabilities introduced by Tinkerpop --- changelog/README.md | 3 +++ manual/core/integration/README.md | 15 --------------- pom.xml | 2 +- 3 files changed, 4 insertions(+), 16 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index d13f28aa261..bc4a5fc9911 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,9 @@ ### 4.9.0 (in progress) +- [task] JAVA-2859: Upgrade Tinkerpop to 3.4.8 +- [bug] JAVA-2726: Fix Tinkerpop incompatibility with JPMS +- [bug] JAVA-2842: Remove security vulnerabilities introduced by Tinkerpop - [bug] JAVA-2867: Revisit compressor substitutions - [improvement] JAVA-2870: Optimize memory usage of token map - [improvement] JAVA-2855: Allow selection of the metrics framework via the config diff --git a/manual/core/integration/README.md b/manual/core/integration/README.md index 2eb71eeeb24..219a0ab1af1 100644 --- a/manual/core/integration/README.md +++ b/manual/core/integration/README.md @@ -311,21 +311,6 @@ as `application.conf` and `logback.xml` in our previous examples, must be in the All the driver's artifacts are JPMS automatic modules. -Note that TinkerPop cannot currently be used in a JPMS application. You will get the following -error: - -``` -Error occurred during initialization of boot layer -java.lang.module.FindException: Unable to derive module descriptor for /path/to/gremlin-shaded-3.4.5.jar -Caused by: java.lang.module.InvalidModuleDescriptorException: Provider class com.fasterxml.jackson.core.JsonFactory not in module -``` - -This is a known issue that will be resolved in TinkerPop 3.4.7. The driver will upgrade as soon as -possible, see [JAVA-2726](https://datastax-oss.atlassian.net/browse/JAVA-2726). - -Unfortunately, the only workaround in the meantime is to exclude TinkerPop dependencies, as -explained [here](#tinker-pop). Graph functionality won't be available. - ### Driver dependencies The driver depends on a number of third-party libraries; some of those dependencies are opt-in, diff --git a/pom.xml b/pom.xml index 06481b9032b..073336a4567 100644 --- a/pom.xml +++ b/pom.xml @@ -50,7 +50,7 @@ 4.0.5 4.1.51.Final 1.2.1 - 3.4.5 + 3.4.8 1.7.26 1.0.2 20190722 From a87a44111b5ff7fa9c94afb01f8bb761a273c190 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Sat, 29 Aug 2020 15:04:08 +0200 Subject: [PATCH 573/979] JAVA-2827: Remove dependency to Tinkerpop gremlin-driver --- changelog/README.md | 1 + core-shaded/pom.xml | 4 ---- core/pom.xml | 4 ---- .../AbstractSimpleGraphBinaryCustomSerializer.java | 10 ++++------ .../graph/binary/buffer/DseNettyBufferFactory.java | 14 +++++++++----- .../internal/osgi/support/BundleOptions.java | 5 ----- pom.xml | 11 ----------- 7 files changed, 14 insertions(+), 35 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index bc4a5fc9911..84b6a5b39c0 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.9.0 (in progress) +- [improvement] JAVA-2827: Remove dependency to Tinkerpop gremlin-driver - [task] JAVA-2859: Upgrade Tinkerpop to 3.4.8 - [bug] JAVA-2726: Fix Tinkerpop incompatibility with JPMS - [bug] JAVA-2842: Remove security vulnerabilities introduced by Tinkerpop diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index f8c760a8b0a..e0d3c951c96 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -92,10 +92,6 @@ org.apache.tinkerpop gremlin-core - - org.apache.tinkerpop - gremlin-driver - org.apache.tinkerpop tinkergraph-gremlin diff --git a/core/pom.xml b/core/pom.xml index 109e0dc7055..c7b49c57665 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -99,10 +99,6 @@ org.apache.tinkerpop tinkergraph-gremlin - - org.apache.tinkerpop - gremlin-driver - com.fasterxml.jackson.core jackson-core diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/AbstractSimpleGraphBinaryCustomSerializer.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/AbstractSimpleGraphBinaryCustomSerializer.java index 976db31cc5d..428ee5a72eb 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/AbstractSimpleGraphBinaryCustomSerializer.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/AbstractSimpleGraphBinaryCustomSerializer.java @@ -17,7 +17,6 @@ import com.datastax.oss.driver.shaded.guava.common.base.Preconditions; import java.io.IOException; -import org.apache.tinkerpop.gremlin.driver.ser.SerializationException; import org.apache.tinkerpop.gremlin.structure.io.Buffer; import org.apache.tinkerpop.gremlin.structure.io.binary.DataType; import org.apache.tinkerpop.gremlin.structure.io.binary.GraphBinaryReader; @@ -78,8 +77,7 @@ public T read(Buffer buffer, GraphBinaryReader context) throws IOException { // read {custom_type_info_length} and verify it is 0. // See #write(T, ByteBuf, GraphBinaryWriter) for why it is set to 0 if (context.readValue(buffer, Integer.class, false) != 0) { - throw new SerializationException( - "{custom_type_info} should not be provided for this custom type"); + throw new IOException("{custom_type_info} should not be provided for this custom type"); } return readValue(buffer, context, true); @@ -105,11 +103,11 @@ public T readValue(Buffer buffer, GraphBinaryReader context, boolean nullable) final int valueLength = buffer.readInt(); if (valueLength <= 0) { - throw new SerializationException(String.format("Unexpected value length: %d", valueLength)); + throw new IOException(String.format("Unexpected value length: %d", valueLength)); } if (valueLength > buffer.readableBytes()) { - throw new SerializationException( + throw new IOException( String.format( "Not enough readable bytes: %d bytes required for value (%d bytes available)", valueLength, buffer.readableBytes())); @@ -134,7 +132,7 @@ public void writeValue( throws IOException { if (value == null) { if (!nullable) { - throw new SerializationException("Unexpected null value when nullable is false"); + throw new IOException("Unexpected null value when nullable is false"); } // writes {value_flag} to "1" which means "the value is null" diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/buffer/DseNettyBufferFactory.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/buffer/DseNettyBufferFactory.java index 192b0ecec53..47cf713c05d 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/buffer/DseNettyBufferFactory.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/binary/buffer/DseNettyBufferFactory.java @@ -15,22 +15,26 @@ */ package com.datastax.dse.driver.internal.core.graph.binary.buffer; -import io.netty.buffer.*; +import io.netty.buffer.ByteBuf; +import io.netty.buffer.ByteBufAllocator; +import io.netty.buffer.CompositeByteBuf; +import io.netty.buffer.Unpooled; +import io.netty.buffer.UnpooledByteBufAllocator; import java.nio.ByteBuffer; import java.util.function.Supplier; -import org.apache.tinkerpop.gremlin.driver.ser.NettyBufferFactory; import org.apache.tinkerpop.gremlin.structure.io.Buffer; import org.apache.tinkerpop.gremlin.structure.io.BufferFactory; /** * Internal BufferFactory impl for creation of Tinkerpop buffers. We implement an internal type here * to allow for this class to use shaded Netty types (without bringing all of Tinkerpop into the - * shaded JAR). The impl is based on the initial impl of {@link NettyBufferFactory} but we don't - * guarantee that this class will mirror changes to that class over time. + * shaded JAR). The impl is based on the initial impl of {@code + * org.apache.tinkerpop.gremlin.driver.ser.NettyBufferFactory} but we don't guarantee that this + * class will mirror changes to that class over time. */ public class DseNettyBufferFactory implements BufferFactory { - private static ByteBufAllocator DEFAULT_ALLOCATOR = new UnpooledByteBufAllocator(false); + private static final ByteBufAllocator DEFAULT_ALLOCATOR = new UnpooledByteBufAllocator(false); private final ByteBufAllocator allocator; diff --git a/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/BundleOptions.java b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/BundleOptions.java index f12e8fa337a..c4416768b4a 100644 --- a/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/BundleOptions.java +++ b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/BundleOptions.java @@ -139,11 +139,6 @@ public static CompositeOption tinkerpopBundles() { "org.apache.tinkerpop.gremlin.util.*") .bundleSymbolicName("org.apache.tinkerpop.gremlin-core") .overwriteManifest(WrappedUrlProvisionOption.OverwriteMode.FULL), - CoreOptions.wrappedBundle( - mavenBundle("org.apache.tinkerpop", "gremlin-driver").versionAsInProject()) - .exports("org.apache.tinkerpop.gremlin.driver.*") - .bundleSymbolicName("org.apache.tinkerpop.gremlin-driver") - .overwriteManifest(WrappedUrlProvisionOption.OverwriteMode.FULL), CoreOptions.wrappedBundle( mavenBundle("org.apache.tinkerpop", "tinkergraph-gremlin").versionAsInProject()) .exports("org.apache.tinkerpop.gremlin.tinkergraph.*") diff --git a/pom.xml b/pom.xml index 073336a4567..078d3f3f03f 100644 --- a/pom.xml +++ b/pom.xml @@ -161,17 +161,6 @@ tinkergraph-gremlin ${tinkerpop.version} - - org.apache.tinkerpop - gremlin-driver - ${tinkerpop.version} - - - io.netty - netty-all - - - org.reactivestreams reactive-streams From 11bcd4916687ff584a1be42692e7e8a3150bd200 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Sat, 29 Aug 2020 15:33:05 +0200 Subject: [PATCH 574/979] JAVA-2827: Exclude unused Tinkerpop transitive dependencies --- changelog/README.md | 1 + pom.xml | 18 ++++++++++++++++++ 2 files changed, 19 insertions(+) diff --git a/changelog/README.md b/changelog/README.md index 84b6a5b39c0..13e40ef7f40 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.9.0 (in progress) +- [improvement] JAVA-2827: Exclude unused Tinkerpop transitive dependencies - [improvement] JAVA-2827: Remove dependency to Tinkerpop gremlin-driver - [task] JAVA-2859: Upgrade Tinkerpop to 3.4.8 - [bug] JAVA-2726: Fix Tinkerpop incompatibility with JPMS diff --git a/pom.xml b/pom.xml index 078d3f3f03f..90553385abb 100644 --- a/pom.xml +++ b/pom.xml @@ -155,6 +155,24 @@ org.apache.tinkerpop gremlin-core ${tinkerpop.version} + + + org.yaml + snakeyaml + + + com.carrotsearch + hppc + + + com.jcabi + * + + + net.objecthunter + exp4j + + org.apache.tinkerpop From 60dde9b223de36ebbaaf0d72061282895a48ed8c Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 31 Aug 2020 16:40:11 -0700 Subject: [PATCH 575/979] JAVA-2868: Cover reconnect-on-init in the manual --- changelog/README.md | 1 + manual/core/README.md | 4 ++++ manual/core/reconnection/README.md | 24 +++++++++++++++++++++--- 3 files changed, 26 insertions(+), 3 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 13e40ef7f40..cf7d4c8b78a 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.9.0 (in progress) +- [documentation] JAVA-2868: Cover reconnect-on-init in the manual - [improvement] JAVA-2827: Exclude unused Tinkerpop transitive dependencies - [improvement] JAVA-2827: Remove dependency to Tinkerpop gremlin-driver - [task] JAVA-2859: Upgrade Tinkerpop to 3.4.8 diff --git a/manual/core/README.md b/manual/core/README.md index 8c2ca80d419..385bf367625 100644 --- a/manual/core/README.md +++ b/manual/core/README.md @@ -56,6 +56,10 @@ We recommend that you take a look at the [reference configuration](configuration list of available options, and cross-reference with the sub-sections in this manual for more explanations. +By default, `CqlSession.builder().build()` fails immediately if the cluster is not available. If you +want to retry instead, you can set the [reconnect-on-init](reconnection/#at-init-time) option in the +configuration. + ##### Contact points If you don't specify any contact point, the driver defaults to `127.0.0.1:9042`: diff --git a/manual/core/reconnection/README.md b/manual/core/reconnection/README.md index 86b0f08d280..8a9531fe9a6 100644 --- a/manual/core/reconnection/README.md +++ b/manual/core/reconnection/README.md @@ -7,11 +7,15 @@ When a connection is lost, try to reestablish it at configured intervals. * `advanced.reconnection-policy` in the configuration; defaults to exponential backoff, also available: constant delay, write your own. * applies to connection pools and the control connection. +* `advanced.reconnect-on-init` (false by default) controls whether the session tries to reconnect + when it is first created ----- -If the driver loses a connection to a node, it tries to re-establish it according to a configurable -policy. This is used in two places: +### At runtime + +If a running session loses a connection to a node, it tries to re-establish it according to a +configurable policy. This is used in two places: * [connection pools](../pooling/): for each node, a session has a fixed-size pool of connections to execute user requests. If one or more connections drop, a reconnection gets started for the pool; @@ -64,7 +68,21 @@ is the exponential one with the default values, and the control connection is in [load balancing policy](../load_balancing/) to get a query plan, which happens to start with node4. The connection succeeds, node4 is now the control node and the reconnection stops; * [t = 3] node2's pool tries to open the last missing connection, which succeeds. The pool is back - to its expected size, node2's reconnection stops. + to its expected size, node2's reconnection stops. + +### At init time + +If a session fails to connect when it is first created, the default behavior is to abort and throw +an error immediately. + +If you prefer to retry, you can set the configuration option `advanced.reconnect-on-init` to true. +Instead of failing, the driver will keep attempting to initialize the session at regular intervals, +according to the reconnection policy, until at least one contact point replies. This can be useful +when dealing with containers and microservices. + +Note that the session is not accessible until it is fully ready: the `CqlSessionBuilder.build()` +call — or the future returned by `buildAsync()` — will not complete until the connection +was established. [ConstantReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/internal/core/connection/ConstantReconnectionPolicy.html [DriverContext]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/context/DriverContext.html From a8c3638837e05aba3ae0307b438a552295fab9f6 Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 31 Aug 2020 17:09:11 -0700 Subject: [PATCH 576/979] JAVA-2869: Advise against using 4.5.x-4.6.0 in the upgrade guide --- changelog/README.md | 1 + upgrade_guide/README.md | 6 ++++++ 2 files changed, 7 insertions(+) diff --git a/changelog/README.md b/changelog/README.md index cf7d4c8b78a..9e715b7d9c0 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.9.0 (in progress) +- [documentation] JAVA-2869: Advise against using 4.5.x-4.6.0 in the upgrade guide - [documentation] JAVA-2868: Cover reconnect-on-init in the manual - [improvement] JAVA-2827: Exclude unused Tinkerpop transitive dependencies - [improvement] JAVA-2827: Remove dependency to Tinkerpop gremlin-driver diff --git a/upgrade_guide/README.md b/upgrade_guide/README.md index 3faa1b8e1e5..167b20fc087 100644 --- a/upgrade_guide/README.md +++ b/upgrade_guide/README.md @@ -1,5 +1,11 @@ ## Upgrade guide +### 4.5.x - 4.6.0 + +These versions are subject to [JAVA-2676](https://datastax-oss.atlassian.net/browse/JAVA-2676), a +bug that causes performance degradations in certain scenarios. We strongly recommend upgrading to at +least 4.6.1. + ### 4.4.0 Datastax Enterprise support is now available directly in the main driver. There is no longer a From 0ee273993fb320242413c77517023549313183ac Mon Sep 17 00:00:00 2001 From: olim7t Date: Mon, 31 Aug 2020 16:58:38 -0700 Subject: [PATCH 577/979] JAVA-2823: Make Astra more visible in the docs --- README.md | 5 +++++ changelog/README.md | 1 + 2 files changed, 6 insertions(+) diff --git a/README.md b/README.md index 1800e5ed0d4..4b376e70b94 100644 --- a/README.md +++ b/README.md @@ -59,6 +59,11 @@ It requires Java 8 or higher. Disclaimer: Some DataStax/DataStax Enterprise products might partially work on big-endian systems, but DataStax does not officially support these systems. +## Connecting to DataStax Astra + +The driver comes with built-in support for Astra, DataStax's cloud-native Cassandra-as-a-service +offering. See the dedicated [manual page](manual/cloud/) for more details. + ## Migrating from previous versions Java driver 4 is **not binary compatible** with previous versions. However, most of the concepts diff --git a/changelog/README.md b/changelog/README.md index 9e715b7d9c0..2d244d952ba 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.9.0 (in progress) +- [documentation] JAVA-2823: Make Astra more visible in the docs - [documentation] JAVA-2869: Advise against using 4.5.x-4.6.0 in the upgrade guide - [documentation] JAVA-2868: Cover reconnect-on-init in the manual - [improvement] JAVA-2827: Exclude unused Tinkerpop transitive dependencies From 89f92e1ba1a49aa5603b894812eece4961366526 Mon Sep 17 00:00:00 2001 From: olim7t Date: Tue, 1 Sep 2020 08:51:19 -0700 Subject: [PATCH 578/979] Update version in docs --- README.md | 4 +- changelog/README.md | 22 +++++- manual/case_sensitivity/README.md | 10 +-- manual/core/README.md | 26 +++---- manual/core/address_resolution/README.md | 2 +- manual/core/async/README.md | 2 +- manual/core/authentication/README.md | 16 ++-- manual/core/bom/README.md | 4 +- manual/core/configuration/README.md | 20 ++--- manual/core/control_connection/README.md | 2 +- manual/core/custom_codecs/README.md | 74 +++++++++---------- manual/core/detachable_types/README.md | 14 ++-- manual/core/dse/geotypes/README.md | 6 +- manual/core/dse/graph/README.md | 4 +- manual/core/dse/graph/fluent/README.md | 4 +- .../core/dse/graph/fluent/explicit/README.md | 12 +-- manual/core/dse/graph/results/README.md | 6 +- manual/core/dse/graph/script/README.md | 6 +- manual/core/idempotence/README.md | 4 +- manual/core/integration/README.md | 6 +- manual/core/load_balancing/README.md | 10 +-- manual/core/metadata/README.md | 6 +- manual/core/metadata/node/README.md | 28 +++---- manual/core/metadata/schema/README.md | 20 ++--- manual/core/metadata/token/README.md | 4 +- manual/core/native_protocol/README.md | 6 +- manual/core/paging/README.md | 12 +-- manual/core/performance/README.md | 10 +-- manual/core/pooling/README.md | 2 +- manual/core/query_timestamps/README.md | 4 +- manual/core/reactive/README.md | 24 +++--- manual/core/reconnection/README.md | 8 +- manual/core/request_tracker/README.md | 4 +- manual/core/retries/README.md | 34 ++++----- manual/core/speculative_execution/README.md | 2 +- manual/core/ssl/README.md | 6 +- manual/core/statements/README.md | 8 +- manual/core/statements/batch/README.md | 6 +- .../statements/per_query_keyspace/README.md | 2 +- manual/core/statements/prepared/README.md | 8 +- manual/core/statements/simple/README.md | 6 +- manual/core/temporal_types/README.md | 8 +- manual/core/throttling/README.md | 6 +- manual/core/tracing/README.md | 12 +-- manual/core/tuples/README.md | 4 +- manual/core/udts/README.md | 4 +- manual/mapper/config/kotlin/README.md | 2 +- manual/mapper/config/record/README.md | 2 +- manual/mapper/config/scala/README.md | 2 +- manual/mapper/daos/README.md | 8 +- manual/mapper/daos/custom_types/README.md | 10 +-- manual/mapper/daos/delete/README.md | 18 ++--- manual/mapper/daos/getentity/README.md | 16 ++-- manual/mapper/daos/increment/README.md | 12 +-- manual/mapper/daos/insert/README.md | 14 ++-- manual/mapper/daos/null_saving/README.md | 10 +-- manual/mapper/daos/query/README.md | 22 +++--- manual/mapper/daos/queryprovider/README.md | 16 ++-- manual/mapper/daos/select/README.md | 26 +++---- manual/mapper/daos/setentity/README.md | 10 +-- .../daos/statement_attributes/README.md | 2 +- manual/mapper/daos/update/README.md | 12 +-- manual/mapper/entities/README.md | 36 ++++----- manual/mapper/mapper/README.md | 10 +-- manual/osgi/README.md | 6 +- manual/query_builder/README.md | 10 +-- manual/query_builder/condition/README.md | 2 +- manual/query_builder/delete/README.md | 4 +- manual/query_builder/insert/README.md | 2 +- manual/query_builder/relation/README.md | 4 +- manual/query_builder/schema/README.md | 2 +- .../query_builder/schema/aggregate/README.md | 2 +- .../query_builder/schema/function/README.md | 2 +- manual/query_builder/schema/index/README.md | 2 +- .../query_builder/schema/keyspace/README.md | 2 +- .../schema/materialized_view/README.md | 4 +- manual/query_builder/schema/table/README.md | 6 +- manual/query_builder/schema/type/README.md | 2 +- manual/query_builder/select/README.md | 4 +- manual/query_builder/term/README.md | 4 +- manual/query_builder/truncate/README.md | 2 +- manual/query_builder/update/README.md | 4 +- upgrade_guide/README.md | 2 +- 83 files changed, 401 insertions(+), 381 deletions(-) diff --git a/README.md b/README.md index 4b376e70b94..fc8427f7f73 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@ *If you're reading this on github.com, please note that this is the readme for the development version and that some features described here might not yet have been released. You can find the documentation for latest version through [DataStax Docs] or via the release tags, e.g. -[4.8.0](https://github.com/datastax/java-driver/tree/4.8.0).* +[4.9.0](https://github.com/datastax/java-driver/tree/4.9.0).* A modern, feature-rich and highly tunable Java client library for [Apache Cassandra®] \(2.1+) and [DataStax Enterprise] \(4.7+), and [DataStax Astra], using exclusively Cassandra's binary protocol @@ -82,7 +82,7 @@ See the [upgrade guide](upgrade_guide/) for details. * [Changelog] * [FAQ] -[API docs]: https://docs.datastax.com/en/drivers/java/4.8 +[API docs]: https://docs.datastax.com/en/drivers/java/4.9 [JIRA]: https://datastax-oss.atlassian.net/browse/JAVA [Mailing list]: https://groups.google.com/a/lists.datastax.com/forum/#!forum/java-driver-user [@dsJavaDriver]: https://twitter.com/dsJavaDriver diff --git a/changelog/README.md b/changelog/README.md index 2d244d952ba..bc433a2afe0 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -2,7 +2,7 @@ -### 4.9.0 (in progress) +### 4.9.0 - [documentation] JAVA-2823: Make Astra more visible in the docs - [documentation] JAVA-2869: Advise against using 4.5.x-4.6.0 in the upgrade guide @@ -497,6 +497,26 @@ changelog](https://docs.datastax.com/en/developer/java-driver-dse/latest/changel - [bug] JAVA-1499: Wait for load balancing policy at cluster initialization - [new feature] JAVA-1495: Add prepared statements +## 3.10.2 + +- [bug] JAVA-2860: Avoid NPE if channel initialization crashes. + +## 3.10.1 + +- [bug] JAVA-2857: Fix NPE when built statements without parameters are logged at TRACE level. +- [bug] JAVA-2843: Successfully parse DSE table schema in OSS driver. + +## 3.10.0 + +- [improvement] JAVA-2676: Don't reschedule flusher after empty runs +- [new feature] JAVA-2772: Support new protocol v5 message format + +## 3.9.0 + +- [bug] JAVA-2627: Avoid logging error message including stack trace in request handler. +- [new feature] JAVA-2706: Add now_in_seconds to protocol v5 query messages. +- [improvement] JAVA-2730: Add support for Cassandra® 4.0 table options +- [improvement] JAVA-2702: Transient Replication Support for Cassandra® 4.0 ## 3.8.0 diff --git a/manual/case_sensitivity/README.md b/manual/case_sensitivity/README.md index 421e8381dc0..a2a16ebfdbd 100644 --- a/manual/case_sensitivity/README.md +++ b/manual/case_sensitivity/README.md @@ -106,11 +106,11 @@ For "consuming" methods, string overloads are also provided for convenience, for * in other cases, the string is always assumed to be in CQL form, and converted on the fly with `CqlIdentifier.fromCql`. -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/CqlIdentifier.html -[Row]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/Row.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/data/UdtValue.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/data/AccessibleByName.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/CqlIdentifier.html +[Row]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/Row.html +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/data/UdtValue.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/data/AccessibleByName.html ### Good practices diff --git a/manual/core/README.md b/manual/core/README.md index 385bf367625..aeb21167fea 100644 --- a/manual/core/README.md +++ b/manual/core/README.md @@ -314,18 +314,18 @@ for (ColumnDefinitions.Definition definition : row.getColumnDefinitions()) { } ``` -[CqlSession]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/CqlSession.html -[CqlSession#builder()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/CqlSession.html#builder-- -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/ResultSet.html -[Row]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/Row.html -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/CqlIdentifier.html -[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/data/AccessibleByName.html -[GenericType]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/reflect/GenericType.html -[CqlDuration]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/data/CqlDuration.html -[TupleValue]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/data/TupleValue.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/data/UdtValue.html -[SessionBuilder.addContactPoint()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoint-java.net.InetSocketAddress- -[SessionBuilder.addContactPoints()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoints-java.util.Collection- -[SessionBuilder.withLocalDatacenter()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withLocalDatacenter-java.lang.String- +[CqlSession]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/CqlSession.html +[CqlSession#builder()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/CqlSession.html#builder-- +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/ResultSet.html +[Row]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/Row.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/CqlIdentifier.html +[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/data/AccessibleByName.html +[GenericType]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/reflect/GenericType.html +[CqlDuration]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/data/CqlDuration.html +[TupleValue]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/data/TupleValue.html +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/data/UdtValue.html +[SessionBuilder.addContactPoint()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoint-java.net.InetSocketAddress- +[SessionBuilder.addContactPoints()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoints-java.util.Collection- +[SessionBuilder.withLocalDatacenter()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withLocalDatacenter-java.lang.String- [CASSANDRA-10145]: https://issues.apache.org/jira/browse/CASSANDRA-10145 \ No newline at end of file diff --git a/manual/core/address_resolution/README.md b/manual/core/address_resolution/README.md index c973d9d7c77..7d83c8ff748 100644 --- a/manual/core/address_resolution/README.md +++ b/manual/core/address_resolution/README.md @@ -124,7 +124,7 @@ Cassandra node: domain name of the target instance. Then it performs a forward DNS lookup of the domain name; the EC2 DNS does the private/public switch automatically based on location). -[AddressTranslator]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/addresstranslation/AddressTranslator.html +[AddressTranslator]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/addresstranslation/AddressTranslator.html [cassandra.yaml]: https://docs.datastax.com/en/cassandra/3.x/cassandra/configuration/configCassandra_yaml.html [rpc_address]: https://docs.datastax.com/en/cassandra/3.x/cassandra/configuration/configCassandra_yaml.html?scroll=configCassandra_yaml__rpc_address diff --git a/manual/core/async/README.md b/manual/core/async/README.md index 44bb113f559..66687509cf2 100644 --- a/manual/core/async/README.md +++ b/manual/core/async/README.md @@ -203,4 +203,4 @@ documentation for more details and an example. [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html \ No newline at end of file +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html \ No newline at end of file diff --git a/manual/core/authentication/README.md b/manual/core/authentication/README.md index 6e63371538f..4e77c3d3a61 100644 --- a/manual/core/authentication/README.md +++ b/manual/core/authentication/README.md @@ -215,12 +215,12 @@ session.execute(statement); [SASL]: https://en.wikipedia.org/wiki/Simple_Authentication_and_Security_Layer -[AuthProvider]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/auth/AuthProvider.html -[DriverContext]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/context/DriverContext.html -[PlainTextAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderBase.html -[DseGssApiAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderBase.html -[ProgrammaticDseGssApiAuthProvider]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/auth/ProgrammaticDseGssApiAuthProvider.html -[ProxyAuthentication.executeAs]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/auth/ProxyAuthentication.html#executeAs-java.lang.String-StatementT- -[SessionBuilder.withAuthCredentials]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthCredentials-java.lang.String-java.lang.String- -[SessionBuilder.withAuthProvider]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthProvider-com.datastax.oss.driver.api.core.auth.AuthProvider- +[AuthProvider]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/auth/AuthProvider.html +[DriverContext]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/context/DriverContext.html +[PlainTextAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderBase.html +[DseGssApiAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderBase.html +[ProgrammaticDseGssApiAuthProvider]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/auth/ProgrammaticDseGssApiAuthProvider.html +[ProxyAuthentication.executeAs]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/auth/ProxyAuthentication.html#executeAs-java.lang.String-StatementT- +[SessionBuilder.withAuthCredentials]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthCredentials-java.lang.String-java.lang.String- +[SessionBuilder.withAuthProvider]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthProvider-com.datastax.oss.driver.api.core.auth.AuthProvider- [reference.conf]: ../configuration/reference/ \ No newline at end of file diff --git a/manual/core/bom/README.md b/manual/core/bom/README.md index 5ab0faf2722..922bcffba24 100644 --- a/manual/core/bom/README.md +++ b/manual/core/bom/README.md @@ -13,7 +13,7 @@ To import the driver's BOM, add the following section in your application's own com.datastax.oss java-driver-bom - 4.8.0 + 4.9.0 pom import @@ -65,7 +65,7 @@ good idea to extract a property to keep it in sync with the BOM: ```xml - 4.8.0 + 4.9.0 diff --git a/manual/core/configuration/README.md b/manual/core/configuration/README.md index 8378a395fe0..c99f7c29963 100644 --- a/manual/core/configuration/README.md +++ b/manual/core/configuration/README.md @@ -501,16 +501,16 @@ config.getDefaultProfile().getString(MyCustomOption.ADMIN_EMAIL); config.getDefaultProfile().getInt(MyCustomOption.AWESOMENESS_FACTOR); ``` -[DriverConfig]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/config/DriverConfig.html -[DriverExecutionProfile]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/config/DriverExecutionProfile.html -[DriverContext]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/context/DriverContext.html -[DriverOption]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/config/DriverOption.html -[DefaultDriverOption]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/config/DefaultDriverOption.html -[DriverConfigLoader]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html -[DriverConfigLoader.fromClasspath]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromClasspath-java.lang.String- -[DriverConfigLoader.fromFile]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromFile-java.io.File- -[DriverConfigLoader.fromUrl]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromUrl-java.net.URL- -[DriverConfigLoader.programmaticBuilder]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#programmaticBuilder-- +[DriverConfig]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/config/DriverConfig.html +[DriverExecutionProfile]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/config/DriverExecutionProfile.html +[DriverContext]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/context/DriverContext.html +[DriverOption]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/config/DriverOption.html +[DefaultDriverOption]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/config/DefaultDriverOption.html +[DriverConfigLoader]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html +[DriverConfigLoader.fromClasspath]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromClasspath-java.lang.String- +[DriverConfigLoader.fromFile]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromFile-java.io.File- +[DriverConfigLoader.fromUrl]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromUrl-java.net.URL- +[DriverConfigLoader.programmaticBuilder]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#programmaticBuilder-- [Typesafe Config]: https://github.com/typesafehub/config [config standard behavior]: https://github.com/typesafehub/config#standard-behavior diff --git a/manual/core/control_connection/README.md b/manual/core/control_connection/README.md index ca40d1ec5ca..f688aa88172 100644 --- a/manual/core/control_connection/README.md +++ b/manual/core/control_connection/README.md @@ -23,4 +23,4 @@ There are a few options to fine tune the control connection behavior in the `advanced.control-connection` and `advanced.metadata` sections; see the [metadata](../metadata/) pages and the [reference configuration](../configuration/reference/) for all the details. -[Node.getOpenConnections]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- \ No newline at end of file +[Node.getOpenConnections]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- \ No newline at end of file diff --git a/manual/core/custom_codecs/README.md b/manual/core/custom_codecs/README.md index ea9315d797f..5bda597058b 100644 --- a/manual/core/custom_codecs/README.md +++ b/manual/core/custom_codecs/README.md @@ -660,13 +660,13 @@ private static String formatRow(Row row) { } ``` -[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html -[GenericType]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/reflect/GenericType.html -[TypeCodec]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html -[format()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html#format-JavaTypeT- -[parse()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html#parse-java.lang.String- -[MappingCodec]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/MappingCodec.html -[SessionBuilder.addTypeCodecs]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addTypeCodecs-com.datastax.oss.driver.api.core.type.codec.TypeCodec...- +[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html +[GenericType]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/reflect/GenericType.html +[TypeCodec]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html +[format()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html#format-JavaTypeT- +[parse()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html#parse-java.lang.String- +[MappingCodec]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/MappingCodec.html +[SessionBuilder.addTypeCodecs]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addTypeCodecs-com.datastax.oss.driver.api.core.type.codec.TypeCodec...- [Enums]: https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html [Enum.name()]: https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html#name-- @@ -680,36 +680,36 @@ private static String formatRow(Row row) { [java.time.LocalDateTime]: https://docs.oracle.com/javase/8/docs/api/java/time/LocalDateTime.html [java.time.ZoneId]: https://docs.oracle.com/javase/8/docs/api/java/time/ZoneId.html -[ExtraTypeCodecs]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html -[ExtraTypeCodecs.BLOB_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#BLOB_TO_ARRAY -[ExtraTypeCodecs.BOOLEAN_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#BOOLEAN_LIST_TO_ARRAY -[ExtraTypeCodecs.BYTE_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#BYTE_LIST_TO_ARRAY -[ExtraTypeCodecs.SHORT_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#SHORT_LIST_TO_ARRAY -[ExtraTypeCodecs.INT_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#INT_LIST_TO_ARRAY -[ExtraTypeCodecs.LONG_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#LONG_LIST_TO_ARRAY -[ExtraTypeCodecs.FLOAT_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#FLOAT_LIST_TO_ARRAY -[ExtraTypeCodecs.DOUBLE_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#DOUBLE_LIST_TO_ARRAY -[ExtraTypeCodecs.listToArrayOf(TypeCodec)]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#listToArrayOf-com.datastax.oss.driver.api.core.type.codec.TypeCodec- -[ExtraTypeCodecs.TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#TIMESTAMP_UTC -[ExtraTypeCodecs.timestampAt(ZoneId)]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#timestampAt-java.time.ZoneId- -[ExtraTypeCodecs.TIMESTAMP_MILLIS_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#TIMESTAMP_MILLIS_SYSTEM -[ExtraTypeCodecs.TIMESTAMP_MILLIS_UTC]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#TIMESTAMP_MILLIS_UTC -[ExtraTypeCodecs.timestampMillisAt(ZoneId)]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#timestampMillisAt-java.time.ZoneId- -[ExtraTypeCodecs.ZONED_TIMESTAMP_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#ZONED_TIMESTAMP_SYSTEM -[ExtraTypeCodecs.ZONED_TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#ZONED_TIMESTAMP_UTC -[ExtraTypeCodecs.zonedTimestampAt(ZoneId)]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#zonedTimestampAt-java.time.ZoneId- -[ExtraTypeCodecs.LOCAL_TIMESTAMP_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#LOCAL_TIMESTAMP_SYSTEM -[ExtraTypeCodecs.LOCAL_TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#LOCAL_TIMESTAMP_UTC -[ExtraTypeCodecs.localTimestampAt(ZoneId)]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#localTimestampAt-java.time.ZoneId- -[ExtraTypeCodecs.ZONED_TIMESTAMP_PERSISTED]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#ZONED_TIMESTAMP_PERSISTED -[ExtraTypeCodecs.optionalOf(TypeCodec)]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#optionalOf-com.datastax.oss.driver.api.core.type.codec.TypeCodec- -[ExtraTypeCodecs.enumNamesOf(Class)]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#enumNamesOf-java.lang.Class- -[ExtraTypeCodecs.enumOrdinalsOf(Class)]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#enumOrdinalsOf-java.lang.Class- -[ExtraTypeCodecs.json(Class)]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#json-java.lang.Class- -[ExtraTypeCodecs.json(Class, ObjectMapper)]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#json-java.lang.Class-com.fasterxml.jackson.databind.ObjectMapper- - -[TypeCodecs.BLOB]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#BLOB -[TypeCodecs.TIMESTAMP]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#TIMESTAMP +[ExtraTypeCodecs]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html +[ExtraTypeCodecs.BLOB_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#BLOB_TO_ARRAY +[ExtraTypeCodecs.BOOLEAN_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#BOOLEAN_LIST_TO_ARRAY +[ExtraTypeCodecs.BYTE_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#BYTE_LIST_TO_ARRAY +[ExtraTypeCodecs.SHORT_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#SHORT_LIST_TO_ARRAY +[ExtraTypeCodecs.INT_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#INT_LIST_TO_ARRAY +[ExtraTypeCodecs.LONG_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#LONG_LIST_TO_ARRAY +[ExtraTypeCodecs.FLOAT_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#FLOAT_LIST_TO_ARRAY +[ExtraTypeCodecs.DOUBLE_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#DOUBLE_LIST_TO_ARRAY +[ExtraTypeCodecs.listToArrayOf(TypeCodec)]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#listToArrayOf-com.datastax.oss.driver.api.core.type.codec.TypeCodec- +[ExtraTypeCodecs.TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#TIMESTAMP_UTC +[ExtraTypeCodecs.timestampAt(ZoneId)]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#timestampAt-java.time.ZoneId- +[ExtraTypeCodecs.TIMESTAMP_MILLIS_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#TIMESTAMP_MILLIS_SYSTEM +[ExtraTypeCodecs.TIMESTAMP_MILLIS_UTC]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#TIMESTAMP_MILLIS_UTC +[ExtraTypeCodecs.timestampMillisAt(ZoneId)]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#timestampMillisAt-java.time.ZoneId- +[ExtraTypeCodecs.ZONED_TIMESTAMP_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#ZONED_TIMESTAMP_SYSTEM +[ExtraTypeCodecs.ZONED_TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#ZONED_TIMESTAMP_UTC +[ExtraTypeCodecs.zonedTimestampAt(ZoneId)]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#zonedTimestampAt-java.time.ZoneId- +[ExtraTypeCodecs.LOCAL_TIMESTAMP_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#LOCAL_TIMESTAMP_SYSTEM +[ExtraTypeCodecs.LOCAL_TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#LOCAL_TIMESTAMP_UTC +[ExtraTypeCodecs.localTimestampAt(ZoneId)]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#localTimestampAt-java.time.ZoneId- +[ExtraTypeCodecs.ZONED_TIMESTAMP_PERSISTED]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#ZONED_TIMESTAMP_PERSISTED +[ExtraTypeCodecs.optionalOf(TypeCodec)]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#optionalOf-com.datastax.oss.driver.api.core.type.codec.TypeCodec- +[ExtraTypeCodecs.enumNamesOf(Class)]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#enumNamesOf-java.lang.Class- +[ExtraTypeCodecs.enumOrdinalsOf(Class)]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#enumOrdinalsOf-java.lang.Class- +[ExtraTypeCodecs.json(Class)]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#json-java.lang.Class- +[ExtraTypeCodecs.json(Class, ObjectMapper)]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#json-java.lang.Class-com.fasterxml.jackson.databind.ObjectMapper- + +[TypeCodecs.BLOB]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#BLOB +[TypeCodecs.TIMESTAMP]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#TIMESTAMP [ObjectMapper]: http://fasterxml.github.io/jackson-databind/javadoc/2.10/com/fasterxml/jackson/databind/ObjectMapper.html diff --git a/manual/core/detachable_types/README.md b/manual/core/detachable_types/README.md index 2c3bb302927..ec033d6e522 100644 --- a/manual/core/detachable_types/README.md +++ b/manual/core/detachable_types/README.md @@ -137,13 +137,13 @@ Even then, the defaults used by detached objects might be good enough for you: Otherwise, just make sure you reattach objects any time you deserialize them or create them from scratch. -[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html -[CodecRegistry#DEFAULT]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html#DEFAULT -[DataType]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/DataType.html -[Detachable]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/detach/Detachable.html -[Session]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/Session.html -[ColumnDefinition]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/ColumnDefinition.html -[Row]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/Row.html +[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html +[CodecRegistry#DEFAULT]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html#DEFAULT +[DataType]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/DataType.html +[Detachable]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/detach/Detachable.html +[Session]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/Session.html +[ColumnDefinition]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/ColumnDefinition.html +[Row]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/Row.html [Java serialization]: https://docs.oracle.com/javase/tutorial/jndi/objects/serial.html [protocol specifications]: https://github.com/datastax/native-protocol/tree/1.x/src/main/resources diff --git a/manual/core/dse/geotypes/README.md b/manual/core/dse/geotypes/README.md index 8dcfbc02c0c..a37704eede2 100644 --- a/manual/core/dse/geotypes/README.md +++ b/manual/core/dse/geotypes/README.md @@ -166,9 +166,9 @@ All geospatial types interoperate with three standard formats: [ESRI]: https://github.com/Esri/geometry-api-java -[LineString]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/data/geometry/LineString.html -[Point]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/data/geometry/Point.html -[Polygon]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/data/geometry/Polygon.html +[LineString]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/data/geometry/LineString.html +[Point]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/data/geometry/Point.html +[Polygon]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/data/geometry/Polygon.html [Well-known text]: https://en.wikipedia.org/wiki/Well-known_text [Well-known binary]: https://en.wikipedia.org/wiki/Well-known_text#Well-known_binary diff --git a/manual/core/dse/graph/README.md b/manual/core/dse/graph/README.md index 46a71335ef2..145f8a84f38 100644 --- a/manual/core/dse/graph/README.md +++ b/manual/core/dse/graph/README.md @@ -74,8 +74,8 @@ fluent API returns Apache TinkerPop™ types directly. [Apache TinkerPop™]: http://tinkerpop.apache.org/ -[CqlSession]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/CqlSession.html -[GraphSession]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/graph/GraphSession.html +[CqlSession]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/CqlSession.html +[GraphSession]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/graph/GraphSession.html [DSE developer guide]: https://docs.datastax.com/en/dse/6.0/dse-dev/datastax_enterprise/graph/graphTOC.html [Gremlin]: https://docs.datastax.com/en/dse/6.0/dse-dev/datastax_enterprise/graph/dseGraphAbout.html#dseGraphAbout__what-is-cql diff --git a/manual/core/dse/graph/fluent/README.md b/manual/core/dse/graph/fluent/README.md index 7e4e53ef6f3..072c2193c71 100644 --- a/manual/core/dse/graph/fluent/README.md +++ b/manual/core/dse/graph/fluent/README.md @@ -109,8 +109,8 @@ All the DSE predicates are available on the driver side: .values("name"); ``` -[Search]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/graph/predicates/Search.html -[Geo]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/graph/predicates/Geo.html +[Search]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/graph/predicates/Search.html +[Geo]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/graph/predicates/Geo.html [Apache TinkerPop™]: http://tinkerpop.apache.org/ [TinkerPop DSL]: http://tinkerpop.apache.org/docs/current/reference/#dsl diff --git a/manual/core/dse/graph/fluent/explicit/README.md b/manual/core/dse/graph/fluent/explicit/README.md index 2e866b0ca41..04b447c7919 100644 --- a/manual/core/dse/graph/fluent/explicit/README.md +++ b/manual/core/dse/graph/fluent/explicit/README.md @@ -105,9 +105,9 @@ added in a future version. See also the [parent page](../) for topics common to all fluent traversals. -[FluentGraphStatement]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html -[FluentGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html#newInstance-org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal- -[FluentGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html#builder-org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal- -[BatchGraphStatement]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html -[BatchGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html#newInstance-- -[BatchGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html#builder-- +[FluentGraphStatement]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html +[FluentGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html#newInstance-org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal- +[FluentGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html#builder-org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal- +[BatchGraphStatement]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html +[BatchGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html#newInstance-- +[BatchGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html#builder-- diff --git a/manual/core/dse/graph/results/README.md b/manual/core/dse/graph/results/README.md index 9492152b28c..7ac4a6c4e8f 100644 --- a/manual/core/dse/graph/results/README.md +++ b/manual/core/dse/graph/results/README.md @@ -137,8 +137,8 @@ If a type doesn't have a corresponding `asXxx()` method, use the variant that ta UUID uuid = graphNode.as(UUID.class); ``` -[GraphNode]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/graph/GraphNode.html -[GraphResultSet]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/graph/GraphResultSet.html -[AsyncGraphResultSet]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/graph/AsyncGraphResultSet.html +[GraphNode]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/graph/GraphNode.html +[GraphResultSet]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/graph/GraphResultSet.html +[AsyncGraphResultSet]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/graph/AsyncGraphResultSet.html [DSE data types]: https://docs.datastax.com/en/dse/6.0/dse-dev/datastax_enterprise/graph/reference/refDSEGraphDataTypes.html \ No newline at end of file diff --git a/manual/core/dse/graph/script/README.md b/manual/core/dse/graph/script/README.md index 0240461484f..929f80531ca 100644 --- a/manual/core/dse/graph/script/README.md +++ b/manual/core/dse/graph/script/README.md @@ -101,6 +101,6 @@ Note however that some types of queries can only be performed through the script * configuration; * DSE graph schema queries. -[ScriptGraphStatement]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html -[ScriptGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html#newInstance-java.lang.String- -[ScriptGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html#builder-java.lang.String- \ No newline at end of file +[ScriptGraphStatement]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html +[ScriptGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html#newInstance-java.lang.String- +[ScriptGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html#builder-java.lang.String- \ No newline at end of file diff --git a/manual/core/idempotence/README.md b/manual/core/idempotence/README.md index bb064e93744..fdddaff26f8 100644 --- a/manual/core/idempotence/README.md +++ b/manual/core/idempotence/README.md @@ -60,5 +60,5 @@ assert bs.isIdempotent(); The query builder tries to infer idempotence automatically; refer to [its manual](../../query_builder/idempotence/) for more details. -[Statement.setIdempotent]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/Statement.html#setIdempotent-java.lang.Boolean- -[StatementBuilder.setIdempotence]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/StatementBuilder.html#setIdempotence-java.lang.Boolean- +[Statement.setIdempotent]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/Statement.html#setIdempotent-java.lang.Boolean- +[StatementBuilder.setIdempotence]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/StatementBuilder.html#setIdempotence-java.lang.Boolean- diff --git a/manual/core/integration/README.md b/manual/core/integration/README.md index 219a0ab1af1..382983a106d 100644 --- a/manual/core/integration/README.md +++ b/manual/core/integration/README.md @@ -589,6 +589,6 @@ The remaining core driver dependencies are the only ones that are truly mandator [guava]: https://github.com/google/guava/issues/2721 [annotation processing]: https://docs.oracle.com/javase/8/docs/technotes/tools/windows/javac.html#sthref65 -[Session.getMetrics]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/Session.html#getMetrics-- -[SessionBuilder.addContactPoint]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoint-java.net.InetSocketAddress- -[Uuids]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/uuid/Uuids.html +[Session.getMetrics]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/Session.html#getMetrics-- +[SessionBuilder.addContactPoint]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoint-java.net.InetSocketAddress- +[Uuids]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/uuid/Uuids.html diff --git a/manual/core/load_balancing/README.md b/manual/core/load_balancing/README.md index 3cbb0daee41..dbb22712e32 100644 --- a/manual/core/load_balancing/README.md +++ b/manual/core/load_balancing/README.md @@ -323,10 +323,10 @@ Then it uses the "closest" distance for any given node. For example: * policy1 changes its suggestion to IGNORED. node1 is set to REMOTE; * policy1 changes its suggestion to REMOTE. node1 stays at REMOTE. -[DriverContext]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/context/DriverContext.html -[LoadBalancingPolicy]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/loadbalancing/LoadBalancingPolicy.html -[getRoutingKeyspace()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKeyspace-- -[getRoutingToken()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/Request.html#getRoutingToken-- -[getRoutingKey()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- +[DriverContext]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/context/DriverContext.html +[LoadBalancingPolicy]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/loadbalancing/LoadBalancingPolicy.html +[getRoutingKeyspace()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKeyspace-- +[getRoutingToken()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/Request.html#getRoutingToken-- +[getRoutingKey()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- [`nodetool status`]: https://docs.datastax.com/en/dse/6.7/dse-dev/datastax_enterprise/tools/nodetool/toolsStatus.html [cqlsh]: https://docs.datastax.com/en/dse/6.7/cql/cql/cql_using/startCqlshStandalone.html diff --git a/manual/core/metadata/README.md b/manual/core/metadata/README.md index 94d0980bdde..79cfc96524f 100644 --- a/manual/core/metadata/README.md +++ b/manual/core/metadata/README.md @@ -56,6 +56,6 @@ new keyspace in the schema metadata before the token metadata was updated. Schema and node state events are debounced. This allows you to control how often the metadata gets refreshed. See the [Performance](../performance/#debouncing) page for more details. -[Session#getMetadata]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/Session.html#getMetadata-- -[Metadata]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/metadata/Metadata.html -[Node]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/metadata/Node.html \ No newline at end of file +[Session#getMetadata]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/Session.html#getMetadata-- +[Metadata]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/metadata/Metadata.html +[Node]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/metadata/Node.html \ No newline at end of file diff --git a/manual/core/metadata/node/README.md b/manual/core/metadata/node/README.md index b0099f09f71..49da884802b 100644 --- a/manual/core/metadata/node/README.md +++ b/manual/core/metadata/node/README.md @@ -112,17 +112,17 @@ beyond the scope of this document; if you're interested, study the `TopologyMoni the source code. -[Metadata#getNodes]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/metadata/Metadata.html#getNodes-- -[Node]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/metadata/Node.html -[Node#getState()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/metadata/Node.html#getState-- -[Node#getDatacenter()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/metadata/Node.html#getDatacenter-- -[Node#getRack()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/metadata/Node.html#getRack-- -[Node#getDistance()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/metadata/Node.html#getDistance-- -[Node#getExtras()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/metadata/Node.html#getExtras-- -[Node#getOpenConnections()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- -[Node#isReconnecting()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/metadata/Node.html#isReconnecting-- -[NodeState]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/metadata/NodeState.html -[NodeStateListener]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/metadata/NodeStateListener.html -[NodeStateListenerBase]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/metadata/NodeStateListenerBase.html -[SessionBuilder.withNodeStateListener]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withNodeStateListener-com.datastax.oss.driver.api.core.metadata.NodeStateListener- -[DseNodeProperties]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/metadata/DseNodeProperties.html \ No newline at end of file +[Metadata#getNodes]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/metadata/Metadata.html#getNodes-- +[Node]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/metadata/Node.html +[Node#getState()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/metadata/Node.html#getState-- +[Node#getDatacenter()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/metadata/Node.html#getDatacenter-- +[Node#getRack()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/metadata/Node.html#getRack-- +[Node#getDistance()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/metadata/Node.html#getDistance-- +[Node#getExtras()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/metadata/Node.html#getExtras-- +[Node#getOpenConnections()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- +[Node#isReconnecting()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/metadata/Node.html#isReconnecting-- +[NodeState]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/metadata/NodeState.html +[NodeStateListener]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/metadata/NodeStateListener.html +[NodeStateListenerBase]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/metadata/NodeStateListenerBase.html +[SessionBuilder.withNodeStateListener]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withNodeStateListener-com.datastax.oss.driver.api.core.metadata.NodeStateListener- +[DseNodeProperties]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/metadata/DseNodeProperties.html \ No newline at end of file diff --git a/manual/core/metadata/schema/README.md b/manual/core/metadata/schema/README.md index dedc0333cb3..a140a421ae2 100644 --- a/manual/core/metadata/schema/README.md +++ b/manual/core/metadata/schema/README.md @@ -260,15 +260,15 @@ unavailable for the excluded keyspaces. If you issue schema-altering requests from the driver (e.g. `session.execute("CREATE TABLE ..")`), take a look at the [Performance](../../performance/#schema-updates) page for a few tips. -[Metadata#getKeyspaces]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/metadata/Metadata.html#getKeyspaces-- -[SchemaChangeListener]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListener.html -[SchemaChangeListenerBase]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListenerBase.html -[Session#setSchemaMetadataEnabled]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/Session.html#setSchemaMetadataEnabled-java.lang.Boolean- -[Session#checkSchemaAgreementAsync]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/Session.html#checkSchemaAgreementAsync-- -[SessionBuilder#withSchemaChangeListener]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSchemaChangeListener-com.datastax.oss.driver.api.core.metadata.schema.SchemaChangeListener- -[ExecutionInfo#isSchemaInAgreement]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#isSchemaInAgreement-- -[com.datastax.dse.driver.api.core.metadata.schema]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/metadata/schema/package-frame.html -[DseFunctionMetadata]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/metadata/schema/DseFunctionMetadata.html -[DseAggregateMetadata]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/metadata/schema/DseAggregateMetadata.html +[Metadata#getKeyspaces]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/metadata/Metadata.html#getKeyspaces-- +[SchemaChangeListener]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListener.html +[SchemaChangeListenerBase]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListenerBase.html +[Session#setSchemaMetadataEnabled]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/Session.html#setSchemaMetadataEnabled-java.lang.Boolean- +[Session#checkSchemaAgreementAsync]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/Session.html#checkSchemaAgreementAsync-- +[SessionBuilder#withSchemaChangeListener]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSchemaChangeListener-com.datastax.oss.driver.api.core.metadata.schema.SchemaChangeListener- +[ExecutionInfo#isSchemaInAgreement]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#isSchemaInAgreement-- +[com.datastax.dse.driver.api.core.metadata.schema]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/metadata/schema/package-frame.html +[DseFunctionMetadata]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/metadata/schema/DseFunctionMetadata.html +[DseAggregateMetadata]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/metadata/schema/DseAggregateMetadata.html [JAVA-750]: https://datastax-oss.atlassian.net/browse/JAVA-750 \ No newline at end of file diff --git a/manual/core/metadata/token/README.md b/manual/core/metadata/token/README.md index 3be4ae48780..38765047f86 100644 --- a/manual/core/metadata/token/README.md +++ b/manual/core/metadata/token/README.md @@ -169,5 +169,5 @@ on [schema metadata](../schema/). If schema metadata is disabled or filtered, to also be unavailable for the excluded keyspaces. -[Metadata#getTokenMap]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/metadata/Metadata.html#getTokenMap-- -[TokenMap]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/metadata/TokenMap.html \ No newline at end of file +[Metadata#getTokenMap]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/metadata/Metadata.html#getTokenMap-- +[TokenMap]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/metadata/TokenMap.html \ No newline at end of file diff --git a/manual/core/native_protocol/README.md b/manual/core/native_protocol/README.md index f2eab80d2f0..6704b8394c4 100644 --- a/manual/core/native_protocol/README.md +++ b/manual/core/native_protocol/README.md @@ -135,6 +135,6 @@ If you want to see the details of mixed cluster negotiation, enable `DEBUG` leve [protocol spec]: https://github.com/datastax/native-protocol/tree/1.x/src/main/resources [driver3]: https://docs.datastax.com/en/developer/java-driver/3.10/manual/native_protocol/ -[ExecutionInfo.getWarnings]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getWarnings-- -[Request.getCustomPayload]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/Request.html#getCustomPayload-- -[AttachmentPoint.getProtocolVersion]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/detach/AttachmentPoint.html#getProtocolVersion-- +[ExecutionInfo.getWarnings]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getWarnings-- +[Request.getCustomPayload]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/Request.html#getCustomPayload-- +[AttachmentPoint.getProtocolVersion]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/detach/AttachmentPoint.html#getProtocolVersion-- diff --git a/manual/core/paging/README.md b/manual/core/paging/README.md index 3c34da404de..9dec6c80f78 100644 --- a/manual/core/paging/README.md +++ b/manual/core/paging/README.md @@ -253,12 +253,12 @@ protocol page size and the logical page size to the same value. The [driver examples] include two complete web service implementations demonstrating forward-only and offset paging. -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/ResultSet.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html -[AsyncPagingIterable.hasMorePages]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/AsyncPagingIterable.html#hasMorePages-- -[AsyncPagingIterable.fetchNextPage]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/AsyncPagingIterable.html#fetchNextPage-- -[OffsetPager]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/paging/OffsetPager.html -[PagingState]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/PagingState.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/ResultSet.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[AsyncPagingIterable.hasMorePages]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/AsyncPagingIterable.html#hasMorePages-- +[AsyncPagingIterable.fetchNextPage]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/AsyncPagingIterable.html#fetchNextPage-- +[OffsetPager]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/paging/OffsetPager.html +[PagingState]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/PagingState.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html diff --git a/manual/core/performance/README.md b/manual/core/performance/README.md index a81443acc5c..9e200a3ffb5 100644 --- a/manual/core/performance/README.md +++ b/manual/core/performance/README.md @@ -345,8 +345,8 @@ possible to reuse the same event loop group for I/O, admin tasks, and even your (the driver's internal code is fully asynchronous so it will never block any thread). The timer is the only one that will have to stay on a separate thread. -[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/data/AccessibleByName.html -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/CqlIdentifier.html -[CqlSession.prepare(SimpleStatement)]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/CqlSession.html#prepare-com.datastax.oss.driver.api.core.cql.SimpleStatement- -[GenericType]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/reflect/GenericType.html -[Statement.setNode()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/Statement.html#setNode-com.datastax.oss.driver.api.core.metadata.Node- \ No newline at end of file +[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/data/AccessibleByName.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/CqlIdentifier.html +[CqlSession.prepare(SimpleStatement)]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/CqlSession.html#prepare-com.datastax.oss.driver.api.core.cql.SimpleStatement- +[GenericType]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/reflect/GenericType.html +[Statement.setNode()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/Statement.html#setNode-com.datastax.oss.driver.api.core.metadata.Node- \ No newline at end of file diff --git a/manual/core/pooling/README.md b/manual/core/pooling/README.md index 36ffa75a54d..ef6f1068f2a 100644 --- a/manual/core/pooling/README.md +++ b/manual/core/pooling/README.md @@ -170,5 +170,5 @@ you experience the issue, here's what to look out for: Try adding more connections per node. Thanks to the driver's hot-reload mechanism, you can do that at runtime and see the effects immediately. -[CqlSession]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/CqlSession.html +[CqlSession]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/CqlSession.html [CASSANDRA-8086]: https://issues.apache.org/jira/browse/CASSANDRA-8086 \ No newline at end of file diff --git a/manual/core/query_timestamps/README.md b/manual/core/query_timestamps/README.md index f7ef481767e..ac50c53da90 100644 --- a/manual/core/query_timestamps/README.md +++ b/manual/core/query_timestamps/README.md @@ -187,9 +187,9 @@ Here is the order of precedence of all the methods described so far: 3. otherwise, if the timestamp generator assigned a timestamp, use it; 4. otherwise, let the server assign the timestamp. -[TimestampGenerator]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/time/TimestampGenerator.html +[TimestampGenerator]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/time/TimestampGenerator.html [gettimeofday]: http://man7.org/linux/man-pages/man2/settimeofday.2.html [JNR]: https://github.com/jnr/jnr-posix [Lightweight transactions]: https://docs.datastax.com/en/dse/6.0/cql/cql/cql_using/useInsertLWT.html -[Statement.setQueryTimestamp()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/Statement.html#setQueryTimestamp-long- +[Statement.setQueryTimestamp()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/Statement.html#setQueryTimestamp-long- diff --git a/manual/core/reactive/README.md b/manual/core/reactive/README.md index 89c1b5dc56b..25f74a26579 100644 --- a/manual/core/reactive/README.md +++ b/manual/core/reactive/README.md @@ -365,18 +365,18 @@ Note that the driver already has a [built-in retry mechanism] that can transpare queries; the above example should be seen as a demonstration of application-level retries, when a more fine-grained control of what should be retried, and how, is required. -[CqlSession]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/CqlSession.html -[ReactiveSession]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/ResultSet.html -[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html -[ReactiveRow]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html -[Row]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/Row.html -[getColumnDefinitions]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#getColumnDefinitions-- -[getExecutionInfos]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#getExecutionInfos-- -[wasApplied]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#wasApplied-- -[ReactiveRow.getColumnDefinitions]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#getColumnDefinitions-- -[ReactiveRow.getExecutionInfo]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#getExecutionInfo-- -[ReactiveRow.wasApplied]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#wasApplied-- +[CqlSession]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/CqlSession.html +[ReactiveSession]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/ResultSet.html +[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html +[ReactiveRow]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html +[Row]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/Row.html +[getColumnDefinitions]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#getColumnDefinitions-- +[getExecutionInfos]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#getExecutionInfos-- +[wasApplied]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#wasApplied-- +[ReactiveRow.getColumnDefinitions]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#getColumnDefinitions-- +[ReactiveRow.getExecutionInfo]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#getExecutionInfo-- +[ReactiveRow.wasApplied]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#wasApplied-- [built-in retry mechanism]: ../retries/ [request throttling]: ../throttling/ diff --git a/manual/core/reconnection/README.md b/manual/core/reconnection/README.md index 8a9531fe9a6..24290baf9ab 100644 --- a/manual/core/reconnection/README.md +++ b/manual/core/reconnection/README.md @@ -84,7 +84,7 @@ Note that the session is not accessible until it is fully ready: the `CqlSession call — or the future returned by `buildAsync()` — will not complete until the connection was established. -[ConstantReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/internal/core/connection/ConstantReconnectionPolicy.html -[DriverContext]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/context/DriverContext.html -[ExponentialReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/internal/core/connection/ExponentialReconnectionPolicy.html -[ReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/connection/ReconnectionPolicy.html \ No newline at end of file +[ConstantReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/internal/core/connection/ConstantReconnectionPolicy.html +[DriverContext]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/context/DriverContext.html +[ExponentialReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/internal/core/connection/ExponentialReconnectionPolicy.html +[ReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/connection/ReconnectionPolicy.html \ No newline at end of file diff --git a/manual/core/request_tracker/README.md b/manual/core/request_tracker/README.md index 1b4bb9cb717..dfcf23d4b4b 100644 --- a/manual/core/request_tracker/README.md +++ b/manual/core/request_tracker/README.md @@ -117,5 +117,5 @@ all FROM users WHERE user_id=? [v0=42] com.datastax.oss.driver.api.core.servererrors.InvalidQueryException: Undefined column name all ``` -[RequestTracker]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/tracker/RequestTracker.html -[SessionBuilder.withRequestTracker]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withRequestTracker-com.datastax.oss.driver.api.core.tracker.RequestTracker- \ No newline at end of file +[RequestTracker]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/tracker/RequestTracker.html +[SessionBuilder.withRequestTracker]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withRequestTracker-com.datastax.oss.driver.api.core.tracker.RequestTracker- \ No newline at end of file diff --git a/manual/core/retries/README.md b/manual/core/retries/README.md index 199f6415d83..8c6190f36b1 100644 --- a/manual/core/retries/README.md +++ b/manual/core/retries/README.md @@ -174,20 +174,20 @@ configuration). Each request uses its declared profile's policy. If it doesn't declare any profile, or if the profile doesn't have a dedicated policy, then the default profile's policy is used. -[AllNodesFailedException]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/AllNodesFailedException.html -[ClosedConnectionException]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/connection/ClosedConnectionException.html -[DriverTimeoutException]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/DriverTimeoutException.html -[FunctionFailureException]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/servererrors/FunctionFailureException.html -[HeartbeatException]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/connection/HeartbeatException.html -[ProtocolError]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/servererrors/ProtocolError.html -[OverloadedException]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/servererrors/OverloadedException.html -[QueryValidationException]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/servererrors/QueryValidationException.html -[ReadFailureException]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/servererrors/ReadFailureException.html -[ReadTimeoutException]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/servererrors/ReadTimeoutException.html -[RetryDecision]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/retry/RetryDecision.html -[RetryPolicy]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/retry/RetryPolicy.html -[ServerError]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/servererrors/ServerError.html -[TruncateException]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/servererrors/TruncateException.html -[UnavailableException]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/servererrors/UnavailableException.html -[WriteFailureException]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/servererrors/WriteFailureException.html -[WriteTimeoutException]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/servererrors/WriteTimeoutException.html +[AllNodesFailedException]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/AllNodesFailedException.html +[ClosedConnectionException]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/connection/ClosedConnectionException.html +[DriverTimeoutException]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/DriverTimeoutException.html +[FunctionFailureException]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/servererrors/FunctionFailureException.html +[HeartbeatException]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/connection/HeartbeatException.html +[ProtocolError]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/servererrors/ProtocolError.html +[OverloadedException]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/servererrors/OverloadedException.html +[QueryValidationException]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/servererrors/QueryValidationException.html +[ReadFailureException]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/servererrors/ReadFailureException.html +[ReadTimeoutException]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/servererrors/ReadTimeoutException.html +[RetryDecision]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/retry/RetryDecision.html +[RetryPolicy]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/retry/RetryPolicy.html +[ServerError]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/servererrors/ServerError.html +[TruncateException]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/servererrors/TruncateException.html +[UnavailableException]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/servererrors/UnavailableException.html +[WriteFailureException]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/servererrors/WriteFailureException.html +[WriteTimeoutException]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/servererrors/WriteTimeoutException.html diff --git a/manual/core/speculative_execution/README.md b/manual/core/speculative_execution/README.md index 1f785d93675..aa324c53c4f 100644 --- a/manual/core/speculative_execution/README.md +++ b/manual/core/speculative_execution/README.md @@ -250,4 +250,4 @@ profiles have the same configuration). Each request uses its declared profile's policy. If it doesn't declare any profile, or if the profile doesn't have a dedicated policy, then the default profile's policy is used. -[SpeculativeExecutionPolicy]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/specex/SpeculativeExecutionPolicy.html \ No newline at end of file +[SpeculativeExecutionPolicy]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/specex/SpeculativeExecutionPolicy.html \ No newline at end of file diff --git a/manual/core/ssl/README.md b/manual/core/ssl/README.md index 48b4e051ade..de3a05f59ad 100644 --- a/manual/core/ssl/README.md +++ b/manual/core/ssl/README.md @@ -204,6 +204,6 @@ the box, but with a bit of custom development it is fairly easy to add. See [dsClientToNode]: https://docs.datastax.com/en/cassandra/3.0/cassandra/configuration/secureSSLClientToNode.html [pickle]: http://thelastpickle.com/blog/2015/09/30/hardening-cassandra-step-by-step-part-1-server-to-server.html [JSSE system properties]: http://docs.oracle.com/javase/6/docs/technotes/guides/security/jsse/JSSERefGuide.html#Customization -[SessionBuilder.withSslEngineFactory]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSslEngineFactory-com.datastax.oss.driver.api.core.ssl.SslEngineFactory- -[SessionBuilder.withSslContext]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSslContext-javax.net.ssl.SSLContext- -[ProgrammaticSslEngineFactory]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/ssl/ProgrammaticSslEngineFactory.html +[SessionBuilder.withSslEngineFactory]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSslEngineFactory-com.datastax.oss.driver.api.core.ssl.SslEngineFactory- +[SessionBuilder.withSslContext]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSslContext-javax.net.ssl.SSLContext- +[ProgrammaticSslEngineFactory]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/ssl/ProgrammaticSslEngineFactory.html diff --git a/manual/core/statements/README.md b/manual/core/statements/README.md index 5fefa47229f..8587aa7ffc6 100644 --- a/manual/core/statements/README.md +++ b/manual/core/statements/README.md @@ -59,7 +59,7 @@ the [configuration](../configuration/). Namely, these are: idempotent flag, quer consistency levels and page size. We recommended the configuration approach whenever possible (you can create execution profiles to capture common combinations of those options). -[Statement]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/Statement.html -[StatementBuilder]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/StatementBuilder.html -[execute]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/Session.html#execute-com.datastax.oss.driver.api.core.cql.Statement- -[executeAsync]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/Session.html#executeAsync-com.datastax.oss.driver.api.core.cql.Statement- +[Statement]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/Statement.html +[StatementBuilder]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/StatementBuilder.html +[execute]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/Session.html#execute-com.datastax.oss.driver.api.core.cql.Statement- +[executeAsync]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/Session.html#executeAsync-com.datastax.oss.driver.api.core.cql.Statement- diff --git a/manual/core/statements/batch/README.md b/manual/core/statements/batch/README.md index c844e7a1806..b691e1ea292 100644 --- a/manual/core/statements/batch/README.md +++ b/manual/core/statements/batch/README.md @@ -61,8 +61,8 @@ In addition, simple statements with named parameters are currently not supported due to a [protocol limitation][CASSANDRA-10246] that will be fixed in a future version). If you try to execute such a batch, an `IllegalArgumentException` is thrown. -[BatchStatement]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/BatchStatement.html -[BatchStatement.newInstance()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/BatchStatement.html#newInstance-com.datastax.oss.driver.api.core.cql.BatchType- -[BatchStatement.builder()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/BatchStatement.html#builder-com.datastax.oss.driver.api.core.cql.BatchType- +[BatchStatement]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/BatchStatement.html +[BatchStatement.newInstance()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/BatchStatement.html#newInstance-com.datastax.oss.driver.api.core.cql.BatchType- +[BatchStatement.builder()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/BatchStatement.html#builder-com.datastax.oss.driver.api.core.cql.BatchType- [batch_dse]: http://docs.datastax.com/en/dse/6.7/cql/cql/cql_using/useBatch.html [CASSANDRA-10246]: https://issues.apache.org/jira/browse/CASSANDRA-10246 diff --git a/manual/core/statements/per_query_keyspace/README.md b/manual/core/statements/per_query_keyspace/README.md index a6630a363b4..cc61e732e2b 100644 --- a/manual/core/statements/per_query_keyspace/README.md +++ b/manual/core/statements/per_query_keyspace/README.md @@ -124,6 +124,6 @@ SimpleStatement statement = At some point in the future, when Cassandra 4 becomes prevalent and using a per-query keyspace is the norm, we'll probably deprecate `setRoutingKeyspace()`. -[token-aware routing]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- +[token-aware routing]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- [CASSANDRA-10145]: https://issues.apache.org/jira/browse/CASSANDRA-10145 \ No newline at end of file diff --git a/manual/core/statements/prepared/README.md b/manual/core/statements/prepared/README.md index d3a1a338932..17ffefd39bf 100644 --- a/manual/core/statements/prepared/README.md +++ b/manual/core/statements/prepared/README.md @@ -330,10 +330,10 @@ With Cassandra 4 and [native protocol](../../native_protocol/) v5, this issue is new version with the response; the driver updates its local cache transparently, and the client can observe the new columns in the result set. -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[Session.prepare]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/CqlSession.html#prepare-com.datastax.oss.driver.api.core.cql.SimpleStatement- +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[Session.prepare]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/CqlSession.html#prepare-com.datastax.oss.driver.api.core.cql.SimpleStatement- [CASSANDRA-10786]: https://issues.apache.org/jira/browse/CASSANDRA-10786 [CASSANDRA-10813]: https://issues.apache.org/jira/browse/CASSANDRA-10813 [guava eviction]: https://github.com/google/guava/wiki/CachesExplained#reference-based-eviction -[PreparedStatement.bind]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/PreparedStatement.html#bind-java.lang.Object...- -[PreparedStatement.boundStatementBuilder]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/PreparedStatement.html#boundStatementBuilder-java.lang.Object...- +[PreparedStatement.bind]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/PreparedStatement.html#bind-java.lang.Object...- +[PreparedStatement.boundStatementBuilder]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/PreparedStatement.html#boundStatementBuilder-java.lang.Object...- diff --git a/manual/core/statements/simple/README.md b/manual/core/statements/simple/README.md index 5c1afdb64aa..a9599d9ed12 100644 --- a/manual/core/statements/simple/README.md +++ b/manual/core/statements/simple/README.md @@ -182,6 +182,6 @@ session.execute( Or you could also use [prepared statements](../prepared/), which don't have this limitation since parameter types are known in advance. -[SimpleStatement]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/SimpleStatement.html -[SimpleStatement.newInstance()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/SimpleStatement.html#newInstance-java.lang.String- -[SimpleStatement.builder()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/SimpleStatement.html#builder-java.lang.String- +[SimpleStatement]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/SimpleStatement.html +[SimpleStatement.newInstance()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/SimpleStatement.html#newInstance-java.lang.String- +[SimpleStatement.builder()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/SimpleStatement.html#builder-java.lang.String- diff --git a/manual/core/temporal_types/README.md b/manual/core/temporal_types/README.md index 77c0aa3e09d..17467e8d6c5 100644 --- a/manual/core/temporal_types/README.md +++ b/manual/core/temporal_types/README.md @@ -146,7 +146,7 @@ System.out.println(dateTime.minus(CqlDuration.from("1h15s15ns"))); // prints "2018-10-03T22:59:44.999999985-07:00[America/Los_Angeles]" ``` -[CqlDuration]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/data/CqlDuration.html -[TypeCodecs.ZONED_TIMESTAMP_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#ZONED_TIMESTAMP_SYSTEM -[TypeCodecs.ZONED_TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#ZONED_TIMESTAMP_UTC -[TypeCodecs.zonedTimestampAt()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#zonedTimestampAt-java.time.ZoneId- \ No newline at end of file +[CqlDuration]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/data/CqlDuration.html +[TypeCodecs.ZONED_TIMESTAMP_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#ZONED_TIMESTAMP_SYSTEM +[TypeCodecs.ZONED_TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#ZONED_TIMESTAMP_UTC +[TypeCodecs.zonedTimestampAt()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#zonedTimestampAt-java.time.ZoneId- \ No newline at end of file diff --git a/manual/core/throttling/README.md b/manual/core/throttling/README.md index fe5ae0a1ce1..7eee629ae5a 100644 --- a/manual/core/throttling/README.md +++ b/manual/core/throttling/README.md @@ -145,6 +145,6 @@ datastax-java-driver { If you enable `throttling.delay`, make sure to also check the associated extra options to correctly size the underlying histograms (`metrics.session.throttling.delay.*`). -[RequestThrottlingException]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/RequestThrottlingException.html -[AllNodesFailedException]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/AllNodesFailedException.html -[BusyConnectionException]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/connection/BusyConnectionException.html \ No newline at end of file +[RequestThrottlingException]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/RequestThrottlingException.html +[AllNodesFailedException]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/AllNodesFailedException.html +[BusyConnectionException]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/connection/BusyConnectionException.html \ No newline at end of file diff --git a/manual/core/tracing/README.md b/manual/core/tracing/README.md index 5fdee51fea1..adb267bf07a 100644 --- a/manual/core/tracing/README.md +++ b/manual/core/tracing/README.md @@ -113,9 +113,9 @@ for (TraceEvent event : trace.getEvents()) { If you call `getQueryTrace()` for a statement that didn't have tracing enabled, an exception is thrown. -[ExecutionInfo]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html -[QueryTrace]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/QueryTrace.html -[Statement.setTracing()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/Statement.html#setTracing-boolean- -[StatementBuilder.setTracing()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/StatementBuilder.html#setTracing-- -[ExecutionInfo.getTracingId()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getTracingId-- -[ExecutionInfo.getQueryTrace()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getQueryTrace-- +[ExecutionInfo]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html +[QueryTrace]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/QueryTrace.html +[Statement.setTracing()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/Statement.html#setTracing-boolean- +[StatementBuilder.setTracing()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/StatementBuilder.html#setTracing-- +[ExecutionInfo.getTracingId()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getTracingId-- +[ExecutionInfo.getQueryTrace()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getQueryTrace-- diff --git a/manual/core/tuples/README.md b/manual/core/tuples/README.md index 1c6119801e6..763f6d3ebba 100644 --- a/manual/core/tuples/README.md +++ b/manual/core/tuples/README.md @@ -139,5 +139,5 @@ BoundStatement bs = [cql_doc]: https://docs.datastax.com/en/cql/3.3/cql/cql_reference/tupleType.html -[TupleType]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/TupleType.html -[TupleValue]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/data/TupleValue.html +[TupleType]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/TupleType.html +[TupleValue]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/data/TupleValue.html diff --git a/manual/core/udts/README.md b/manual/core/udts/README.md index 1d22e2317d4..12584c140cb 100644 --- a/manual/core/udts/README.md +++ b/manual/core/udts/README.md @@ -135,5 +135,5 @@ session.execute(bs); [cql_doc]: https://docs.datastax.com/en/cql/3.3/cql/cql_reference/cqlRefUDType.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/data/UdtValue.html -[UserDefinedType]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/UserDefinedType.html \ No newline at end of file +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/data/UdtValue.html +[UserDefinedType]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/UserDefinedType.html \ No newline at end of file diff --git a/manual/mapper/config/kotlin/README.md b/manual/mapper/config/kotlin/README.md index f94241850ae..533ccb62e57 100644 --- a/manual/mapper/config/kotlin/README.md +++ b/manual/mapper/config/kotlin/README.md @@ -106,4 +106,4 @@ before compilation: [build.gradle]: https://github.com/DataStax-Examples/object-mapper-jvm/blob/master/kotlin/build.gradle [UserDao.kt]: https://github.com/DataStax-Examples/object-mapper-jvm/blob/master/kotlin/src/main/kotlin/com/datastax/examples/mapper/killrvideo/user/UserDao.kt -[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html +[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html diff --git a/manual/mapper/config/record/README.md b/manual/mapper/config/record/README.md index c53de01e30d..3a3b1d6e39e 100644 --- a/manual/mapper/config/record/README.md +++ b/manual/mapper/config/record/README.md @@ -27,7 +27,7 @@ You need to build with Java 14, and pass the `--enable-preview` flag to both the runtime JVM. See [pom.xml] in the example. -[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html +[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html [DataStax-Examples/object-mapper-jvm/record]: https://github.com/DataStax-Examples/object-mapper-jvm/tree/master/record [pom.xml]: https://github.com/DataStax-Examples/object-mapper-jvm/blob/master/record/pom.xml diff --git a/manual/mapper/config/scala/README.md b/manual/mapper/config/scala/README.md index 4c00af152b1..46749d10286 100644 --- a/manual/mapper/config/scala/README.md +++ b/manual/mapper/config/scala/README.md @@ -54,4 +54,4 @@ mapper builder. [DataStax-Examples/object-mapper-jvm/scala]: https://github.com/DataStax-Examples/object-mapper-jvm/tree/master/scala [build.sbt]: https://github.com/DataStax-Examples/object-mapper-jvm/blob/master/scala/build.sbt -[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html +[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html diff --git a/manual/mapper/daos/README.md b/manual/mapper/daos/README.md index 7fabf887f0d..a29af184550 100644 --- a/manual/mapper/daos/README.md +++ b/manual/mapper/daos/README.md @@ -148,8 +148,8 @@ In this case, any annotations declared in `Dao1` would be chosen over `Dao2`. To control how the hierarchy is scanned, annotate interfaces with [@HierarchyScanStrategy]. -[@Dao]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/Dao.html -[@DaoFactory]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.html -[@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html -[@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html +[@Dao]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/Dao.html +[@DaoFactory]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.html +[@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html +[@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html [Entity Inheritance]: ../entities/#inheritance diff --git a/manual/mapper/daos/custom_types/README.md b/manual/mapper/daos/custom_types/README.md index 44353ba8bda..18b3900eafc 100644 --- a/manual/mapper/daos/custom_types/README.md +++ b/manual/mapper/daos/custom_types/README.md @@ -236,8 +236,8 @@ flag: With this configuration, if a DAO method declares a non built-in return type, it will be surfaced as a compiler error. -[EntityHelper]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/entity/EntityHelper.html -[GenericType]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/reflect/GenericType.html -[MapperContext]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/MapperContext.html -[MapperResultProducer]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/result/MapperResultProducer.html -[MapperResultProducerService]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/result/MapperResultProducerService.html +[EntityHelper]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/entity/EntityHelper.html +[GenericType]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/reflect/GenericType.html +[MapperContext]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/MapperContext.html +[MapperResultProducer]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/result/MapperResultProducer.html +[MapperResultProducerService]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/result/MapperResultProducerService.html diff --git a/manual/mapper/daos/delete/README.md b/manual/mapper/daos/delete/README.md index d6de12f069c..5fc0550fb02 100644 --- a/manual/mapper/daos/delete/README.md +++ b/manual/mapper/daos/delete/README.md @@ -151,15 +151,15 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the [naming strategy](../../entities/#naming-strategy)). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html -[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html -[@Delete]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/Delete.html -[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/ResultSet.html -[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html +[@Delete]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/Delete.html +[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/ResultSet.html +[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html diff --git a/manual/mapper/daos/getentity/README.md b/manual/mapper/daos/getentity/README.md index 1ca1f2c8b75..b4228a0fce1 100644 --- a/manual/mapper/daos/getentity/README.md +++ b/manual/mapper/daos/getentity/README.md @@ -69,14 +69,14 @@ If the return type doesn't match the parameter type (for example [PagingIterable [AsyncResultSet]), the mapper processor will issue a compile-time error. -[@GetEntity]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html -[GettableByName]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/data/GettableByName.html -[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/PagingIterable.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/ResultSet.html -[Row]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/Row.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/data/UdtValue.html +[@GetEntity]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[GettableByName]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/data/GettableByName.html +[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/PagingIterable.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/ResultSet.html +[Row]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/Row.html +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/data/UdtValue.html diff --git a/manual/mapper/daos/increment/README.md b/manual/mapper/daos/increment/README.md index 74acecb11bf..5c1f2713e2d 100644 --- a/manual/mapper/daos/increment/README.md +++ b/manual/mapper/daos/increment/README.md @@ -75,12 +75,12 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the naming convention). -[@Increment]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/Increment.html -[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html -[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html -[@CqlName]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/CqlName.html +[@Increment]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/Increment.html +[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html +[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html +[@CqlName]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/CqlName.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html diff --git a/manual/mapper/daos/insert/README.md b/manual/mapper/daos/insert/README.md index 66343a49188..1cf6800ade6 100644 --- a/manual/mapper/daos/insert/README.md +++ b/manual/mapper/daos/insert/README.md @@ -108,13 +108,13 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the [naming strategy](../../entities/#naming-strategy)). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@Insert]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/Insert.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/ResultSet.html -[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- -[ResultSet#getExecutionInfo()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/ResultSet.html#getExecutionInfo-- -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@Insert]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/Insert.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/ResultSet.html +[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- +[ResultSet#getExecutionInfo()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/ResultSet.html#getExecutionInfo-- +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html diff --git a/manual/mapper/daos/null_saving/README.md b/manual/mapper/daos/null_saving/README.md index 7157469ea3c..b88b61d4e9e 100644 --- a/manual/mapper/daos/null_saving/README.md +++ b/manual/mapper/daos/null_saving/README.md @@ -93,10 +93,10 @@ public interface UserDao extends InventoryDao { } ``` -[@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[MapperException]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/MapperException.html -[DO_NOT_SET]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#DO_NOT_SET -[SET_TO_NULL]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#SET_TO_NULL +[@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[MapperException]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/MapperException.html +[DO_NOT_SET]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#DO_NOT_SET +[SET_TO_NULL]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#SET_TO_NULL [CASSANDRA-7304]: https://issues.apache.org/jira/browse/CASSANDRA-7304 diff --git a/manual/mapper/daos/query/README.md b/manual/mapper/daos/query/README.md index c33da41fc3c..d053928b379 100644 --- a/manual/mapper/daos/query/README.md +++ b/manual/mapper/daos/query/README.md @@ -110,17 +110,17 @@ Then: query succeeds or not depends on whether the session that the mapper was built with has a [default keyspace]. -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@Query]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/Query.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/ResultSet.html -[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- -[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/PagingIterable.html -[Row]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/Row.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html -[MappedReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/mapper/reactive/MappedReactiveResultSet.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@Query]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/Query.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/ResultSet.html +[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- +[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/PagingIterable.html +[Row]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/Row.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html +[MappedReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/mapper/reactive/MappedReactiveResultSet.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html diff --git a/manual/mapper/daos/queryprovider/README.md b/manual/mapper/daos/queryprovider/README.md index 60f4c568e6e..8a64b94a10f 100644 --- a/manual/mapper/daos/queryprovider/README.md +++ b/manual/mapper/daos/queryprovider/README.md @@ -137,11 +137,11 @@ Here is the full implementation: the desired [PagingIterable][PagingIterable]. -[@QueryProvider]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html -[providerClass]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerClass-- -[entityHelpers]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#entityHelpers-- -[providerMethod]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerMethod-- -[MapperContext]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/MapperContext.html -[EntityHelper]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/EntityHelper.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/ResultSet.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/PagingIterable.html +[@QueryProvider]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html +[providerClass]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerClass-- +[entityHelpers]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#entityHelpers-- +[providerMethod]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerMethod-- +[MapperContext]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/MapperContext.html +[EntityHelper]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/EntityHelper.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/ResultSet.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/PagingIterable.html diff --git a/manual/mapper/daos/select/README.md b/manual/mapper/daos/select/README.md index 211b577331e..5e1cdcdc79d 100644 --- a/manual/mapper/daos/select/README.md +++ b/manual/mapper/daos/select/README.md @@ -142,19 +142,19 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the [naming strategy](../../entities/#naming-strategy)). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html -[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html -[@Select]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/Select.html -[allowFiltering()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/Select.html#allowFiltering-- -[customWhereClause()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/Select.html#customWhereClause-- -[groupBy()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/Select.html#groupBy-- -[limit()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/Select.html#limit-- -[orderBy()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/Select.html#orderBy-- -[perPartitionLimit()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/Select.html#perPartitionLimit-- -[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/PagingIterable.html -[MappedReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/mapper/reactive/MappedReactiveResultSet.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html +[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html +[@Select]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/Select.html +[allowFiltering()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/Select.html#allowFiltering-- +[customWhereClause()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/Select.html#customWhereClause-- +[groupBy()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/Select.html#groupBy-- +[limit()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/Select.html#limit-- +[orderBy()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/Select.html#orderBy-- +[perPartitionLimit()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/Select.html#perPartitionLimit-- +[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/PagingIterable.html +[MappedReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/mapper/reactive/MappedReactiveResultSet.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html diff --git a/manual/mapper/daos/setentity/README.md b/manual/mapper/daos/setentity/README.md index f1aacf9293e..5232afcb277 100644 --- a/manual/mapper/daos/setentity/README.md +++ b/manual/mapper/daos/setentity/README.md @@ -63,8 +63,8 @@ BoundStatement bind(Product product, BoundStatement statement); If you use a void method with [BoundStatement], the mapper processor will issue a compile-time warning. -[@SetEntity]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/SetEntity.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[BoundStatementBuilder]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/BoundStatementBuilder.html -[SettableByName]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/data/SettableByName.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/data/UdtValue.html +[@SetEntity]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/SetEntity.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[BoundStatementBuilder]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/BoundStatementBuilder.html +[SettableByName]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/data/SettableByName.html +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/data/UdtValue.html diff --git a/manual/mapper/daos/statement_attributes/README.md b/manual/mapper/daos/statement_attributes/README.md index c7c89c587de..4f26a84004f 100644 --- a/manual/mapper/daos/statement_attributes/README.md +++ b/manual/mapper/daos/statement_attributes/README.md @@ -60,4 +60,4 @@ Product product = dao.findById(1, builder -> builder.setConsistencyLevel(DefaultConsistencyLevel.QUORUM)); ``` -[@StatementAttributes]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/StatementAttributes.html \ No newline at end of file +[@StatementAttributes]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/StatementAttributes.html \ No newline at end of file diff --git a/manual/mapper/daos/update/README.md b/manual/mapper/daos/update/README.md index 9d5cd32ac8e..2b39db5c4a0 100644 --- a/manual/mapper/daos/update/README.md +++ b/manual/mapper/daos/update/README.md @@ -143,13 +143,13 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the naming convention). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@Update]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/Update.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@Update]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/Update.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html [Boolean]: https://docs.oracle.com/javase/8/docs/api/index.html?java/lang/Boolean.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/ResultSet.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/ResultSet.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html diff --git a/manual/mapper/entities/README.md b/manual/mapper/entities/README.md index eaa95a8f4cd..29a83725750 100644 --- a/manual/mapper/entities/README.md +++ b/manual/mapper/entities/README.md @@ -555,22 +555,22 @@ the same level. To control how the class hierarchy is scanned, annotate classes with [@HierarchyScanStrategy]. -[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html -[@CqlName]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/CqlName.html -[@Dao]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/Dao.html -[@Entity]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/Entity.html -[NameConverter]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/entity/naming/NameConverter.html -[NamingConvention]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/entity/naming/NamingConvention.html -[@NamingStrategy]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/NamingStrategy.html -[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html -[@Computed]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/Computed.html -[@Select]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/Select.html -[@Insert]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/Insert.html -[@Update]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/Update.html -[@GetEntity]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html -[@Query]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/Query.html +[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html +[@CqlName]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/CqlName.html +[@Dao]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/Dao.html +[@Entity]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/Entity.html +[NameConverter]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/entity/naming/NameConverter.html +[NamingConvention]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/entity/naming/NamingConvention.html +[@NamingStrategy]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/NamingStrategy.html +[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html +[@Computed]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/Computed.html +[@Select]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/Select.html +[@Insert]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/Insert.html +[@Update]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/Update.html +[@GetEntity]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html +[@Query]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/Query.html [aliases]: http://cassandra.apache.org/doc/latest/cql/dml.html?#aliases -[@Transient]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/Transient.html -[@TransientProperties]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/TransientProperties.html -[@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html -[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html +[@Transient]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/Transient.html +[@TransientProperties]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/TransientProperties.html +[@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html +[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html diff --git a/manual/mapper/mapper/README.md b/manual/mapper/mapper/README.md index 40e46450792..2dd0e500f4e 100644 --- a/manual/mapper/mapper/README.md +++ b/manual/mapper/mapper/README.md @@ -227,8 +227,8 @@ InventoryMapper inventoryMapper = new InventoryMapperBuilder(session) .build(); ``` -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/CqlIdentifier.html -[@DaoFactory]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.html -[@DaoKeyspace]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/DaoKeyspace.html -[@DaoTable]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/DaoTable.html -[@Mapper]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/mapper/annotations/Mapper.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/CqlIdentifier.html +[@DaoFactory]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.html +[@DaoKeyspace]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/DaoKeyspace.html +[@DaoTable]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/DaoTable.html +[@Mapper]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/Mapper.html diff --git a/manual/osgi/README.md b/manual/osgi/README.md index cff6e47e91c..0983292d859 100644 --- a/manual/osgi/README.md +++ b/manual/osgi/README.md @@ -138,7 +138,7 @@ starting the driver: [driver configuration]: ../core/configuration [OSGi]:https://www.osgi.org [JNR]: https://github.com/jnr/jnr-posix -[withClassLoader()]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withClassLoader-java.lang.ClassLoader- +[withClassLoader()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withClassLoader-java.lang.ClassLoader- [JAVA-1127]:https://datastax-oss.atlassian.net/browse/JAVA-1127 -[DriverConfigLoader.fromDefaults(ClassLoader)]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromDefaults-java.lang.ClassLoader- -[DriverConfigLoader.programmaticBuilder(ClassLoader)]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#programmaticBuilder-java.lang.ClassLoader- +[DriverConfigLoader.fromDefaults(ClassLoader)]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromDefaults-java.lang.ClassLoader- +[DriverConfigLoader.programmaticBuilder(ClassLoader)]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#programmaticBuilder-java.lang.ClassLoader- diff --git a/manual/query_builder/README.md b/manual/query_builder/README.md index 8512a93066f..c3f7d5f5e11 100644 --- a/manual/query_builder/README.md +++ b/manual/query_builder/README.md @@ -212,8 +212,8 @@ For a complete tour of the API, browse the child pages in this manual: * [Terms](term/) * [Idempotence](idempotence/) -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/CqlIdentifier.html -[DseQueryBuilder]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/querybuilder/DseQueryBuilder.html -[DseSchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/dse/driver/api/querybuilder/DseSchemaBuilder.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/CqlIdentifier.html +[DseQueryBuilder]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/querybuilder/DseQueryBuilder.html +[DseSchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/querybuilder/DseSchemaBuilder.html diff --git a/manual/query_builder/condition/README.md b/manual/query_builder/condition/README.md index 3e728c42f6e..9b25e53235b 100644 --- a/manual/query_builder/condition/README.md +++ b/manual/query_builder/condition/README.md @@ -132,4 +132,4 @@ It is mutually exclusive with column conditions: if you previously specified col the statement, they will be ignored; conversely, adding a column condition cancels a previous IF EXISTS clause. -[Condition]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/querybuilder/condition/Condition.html +[Condition]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/querybuilder/condition/Condition.html diff --git a/manual/query_builder/delete/README.md b/manual/query_builder/delete/README.md index 4afde12b356..413366779b0 100644 --- a/manual/query_builder/delete/README.md +++ b/manual/query_builder/delete/README.md @@ -141,5 +141,5 @@ deleteFrom("user") Conditions are a common feature used by UPDATE and DELETE, so they have a [dedicated page](../condition) in this manual. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[Selector]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/querybuilder/select/Selector.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[Selector]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/querybuilder/select/Selector.html diff --git a/manual/query_builder/insert/README.md b/manual/query_builder/insert/README.md index 4c2d1b07439..d038fd9d44a 100644 --- a/manual/query_builder/insert/README.md +++ b/manual/query_builder/insert/README.md @@ -114,4 +114,4 @@ is executed. This is distinctly different than setting the value to null. Passin this method will only remove the USING TTL clause from the query, which will not alter the TTL (if one is set) in Cassandra. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html \ No newline at end of file +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html \ No newline at end of file diff --git a/manual/query_builder/relation/README.md b/manual/query_builder/relation/README.md index 3d00366e8a5..20d0b184cf5 100644 --- a/manual/query_builder/relation/README.md +++ b/manual/query_builder/relation/README.md @@ -201,5 +201,5 @@ This should be used with caution, as it's possible to generate invalid CQL that execution time; on the other hand, it can be used as a workaround to handle new CQL features that are not yet covered by the query builder. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[Relation]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/querybuilder/relation/Relation.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[Relation]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/querybuilder/relation/Relation.html diff --git a/manual/query_builder/schema/README.md b/manual/query_builder/schema/README.md index 740ea49b859..1210491a1c6 100644 --- a/manual/query_builder/schema/README.md +++ b/manual/query_builder/schema/README.md @@ -44,4 +44,4 @@ element type: * [function](function/) * [aggregate](aggregate/) -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/aggregate/README.md b/manual/query_builder/schema/aggregate/README.md index 5849047154d..9a6fba24819 100644 --- a/manual/query_builder/schema/aggregate/README.md +++ b/manual/query_builder/schema/aggregate/README.md @@ -76,4 +76,4 @@ dropAggregate("average").ifExists(); // DROP AGGREGATE IF EXISTS average ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/function/README.md b/manual/query_builder/schema/function/README.md index 758b3823c24..e1a17cf7d92 100644 --- a/manual/query_builder/schema/function/README.md +++ b/manual/query_builder/schema/function/README.md @@ -92,4 +92,4 @@ dropFunction("log").ifExists(); // DROP FUNCTION IF EXISTS log ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/index/README.md b/manual/query_builder/schema/index/README.md index d4bff71b8de..2aa435e5b7c 100644 --- a/manual/query_builder/schema/index/README.md +++ b/manual/query_builder/schema/index/README.md @@ -99,4 +99,4 @@ dropIndex("my_idx").ifExists(); // DROP INDEX IF EXISTS my_idx ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/keyspace/README.md b/manual/query_builder/schema/keyspace/README.md index 39c0151631a..f283ba5b5ee 100644 --- a/manual/query_builder/schema/keyspace/README.md +++ b/manual/query_builder/schema/keyspace/README.md @@ -83,6 +83,6 @@ dropKeyspace("cycling").ifExists(); // DROP KEYSPACE IF EXISTS cycling ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/materialized_view/README.md b/manual/query_builder/schema/materialized_view/README.md index 08f724f3860..d240c6bd282 100644 --- a/manual/query_builder/schema/materialized_view/README.md +++ b/manual/query_builder/schema/materialized_view/README.md @@ -85,5 +85,5 @@ dropTable("cyclist_by_age").ifExists(); // DROP MATERIALIZED VIEW IF EXISTS cyclist_by_age ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html -[RelationStructure]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/querybuilder/schema/RelationStructure.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[RelationStructure]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/querybuilder/schema/RelationStructure.html diff --git a/manual/query_builder/schema/table/README.md b/manual/query_builder/schema/table/README.md index a712b46e11e..ab0664d0280 100644 --- a/manual/query_builder/schema/table/README.md +++ b/manual/query_builder/schema/table/README.md @@ -107,6 +107,6 @@ dropTable("cyclist_name").ifExists(); // DROP TABLE IF EXISTS cyclist_name ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html -[CreateTableWithOptions]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/querybuilder/schema/CreateTableWithOptions.html -[AlterTableWithOptions]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/querybuilder/schema/AlterTableWithOptions.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[CreateTableWithOptions]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/querybuilder/schema/CreateTableWithOptions.html +[AlterTableWithOptions]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/querybuilder/schema/AlterTableWithOptions.html diff --git a/manual/query_builder/schema/type/README.md b/manual/query_builder/schema/type/README.md index 8cb4920063d..b5d6646fcc7 100644 --- a/manual/query_builder/schema/type/README.md +++ b/manual/query_builder/schema/type/README.md @@ -88,4 +88,4 @@ dropTable("address").ifExists(); // DROP TYPE IF EXISTS address ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/select/README.md b/manual/query_builder/select/README.md index 0dd676ce397..e1819f0c128 100644 --- a/manual/query_builder/select/README.md +++ b/manual/query_builder/select/README.md @@ -391,5 +391,5 @@ selectFrom("user").all().allowFiltering(); // SELECT * FROM user ALLOW FILTERING ``` -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[Selector]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/querybuilder/select/Selector.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[Selector]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/querybuilder/select/Selector.html diff --git a/manual/query_builder/term/README.md b/manual/query_builder/term/README.md index a3e87aa7a51..66f8e4dfaac 100644 --- a/manual/query_builder/term/README.md +++ b/manual/query_builder/term/README.md @@ -105,5 +105,5 @@ This should be used with caution, as it's possible to generate invalid CQL that execution time; on the other hand, it can be used as a workaround to handle new CQL features that are not yet covered by the query builder. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html \ No newline at end of file +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html \ No newline at end of file diff --git a/manual/query_builder/truncate/README.md b/manual/query_builder/truncate/README.md index f1df985a3d2..3b8e1518e80 100644 --- a/manual/query_builder/truncate/README.md +++ b/manual/query_builder/truncate/README.md @@ -17,4 +17,4 @@ Truncate truncate2 = truncate(CqlIdentifier.fromCql("mytable")); Note that, at this stage, the query is ready to build. After creating a TRUNCATE query it does not take any values. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html diff --git a/manual/query_builder/update/README.md b/manual/query_builder/update/README.md index 29f0ff72b28..19f9c2f8bb1 100644 --- a/manual/query_builder/update/README.md +++ b/manual/query_builder/update/README.md @@ -251,5 +251,5 @@ update("foo") Conditions are a common feature used by UPDATE and DELETE, so they have a [dedicated page](../condition) in this manual. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[Assignment]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/querybuilder/update/Assignment.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[Assignment]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/querybuilder/update/Assignment.html diff --git a/upgrade_guide/README.md b/upgrade_guide/README.md index 167b20fc087..8451778c92e 100644 --- a/upgrade_guide/README.md +++ b/upgrade_guide/README.md @@ -98,7 +98,7 @@ you can obtain in most web environments by calling `Thread.getContextClassLoader See the javadocs of [SessionBuilder.withClassLoader] for more information. -[SessionBuilder.withClassLoader]: https://docs.datastax.com/en/drivers/java/4.8/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withClassLoader-java.lang.ClassLoader- +[SessionBuilder.withClassLoader]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withClassLoader-java.lang.ClassLoader- ### 4.1.0 From 023278b183e48b2d515b6b85c54e5f446a7addb9 Mon Sep 17 00:00:00 2001 From: olim7t Date: Tue, 1 Sep 2020 09:00:30 -0700 Subject: [PATCH 579/979] [maven-release-plugin] prepare release 4.9.0 --- bom/pom.xml | 18 +++++++++--------- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- metrics/micrometer/pom.xml | 2 +- metrics/microprofile/pom.xml | 2 +- osgi-tests/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 14 files changed, 23 insertions(+), 23 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index 56e50782dea..8b2b2616cf8 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.9.0-SNAPSHOT + 4.9.0 java-driver-bom pom @@ -31,42 +31,42 @@ com.datastax.oss java-driver-core - 4.9.0-SNAPSHOT + 4.9.0 com.datastax.oss java-driver-core-shaded - 4.9.0-SNAPSHOT + 4.9.0 com.datastax.oss java-driver-mapper-processor - 4.9.0-SNAPSHOT + 4.9.0 com.datastax.oss java-driver-mapper-runtime - 4.9.0-SNAPSHOT + 4.9.0 com.datastax.oss java-driver-query-builder - 4.9.0-SNAPSHOT + 4.9.0 com.datastax.oss java-driver-test-infra - 4.9.0-SNAPSHOT + 4.9.0 com.datastax.oss java-driver-metrics-micrometer - 4.9.0-SNAPSHOT + 4.9.0 com.datastax.oss java-driver-metrics-microprofile - 4.9.0-SNAPSHOT + 4.9.0 com.datastax.oss diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index e0d3c951c96..1ce4ef1acc9 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.9.0-SNAPSHOT + 4.9.0 java-driver-core-shaded DataStax Java driver for Apache Cassandra(R) - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index c7b49c57665..b26105b4000 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.9.0-SNAPSHOT + 4.9.0 java-driver-core bundle diff --git a/distribution/pom.xml b/distribution/pom.xml index 3f9fd7bed6b..40539330eb7 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.9.0-SNAPSHOT + 4.9.0 java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index 22197e49f68..4ec271cd979 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.9.0-SNAPSHOT + 4.9.0 java-driver-examples DataStax Java driver for Apache Cassandra(R) - examples. diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 49a1746e9f0..395c19e7ac4 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.9.0-SNAPSHOT + 4.9.0 java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 4aa6a97f79c..ad4c7f3a4ab 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.9.0-SNAPSHOT + 4.9.0 java-driver-mapper-processor DataStax Java driver for Apache Cassandra(R) - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index 92395f0d88e..7a09b53d715 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.9.0-SNAPSHOT + 4.9.0 java-driver-mapper-runtime bundle diff --git a/metrics/micrometer/pom.xml b/metrics/micrometer/pom.xml index e21c3d56265..d8f955df15d 100644 --- a/metrics/micrometer/pom.xml +++ b/metrics/micrometer/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.9.0-SNAPSHOT + 4.9.0 ../../ java-driver-metrics-micrometer diff --git a/metrics/microprofile/pom.xml b/metrics/microprofile/pom.xml index 1126a42e726..aa57db90a71 100644 --- a/metrics/microprofile/pom.xml +++ b/metrics/microprofile/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.9.0-SNAPSHOT + 4.9.0 ../../ java-driver-metrics-microprofile diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml index 6ea23c17796..1c8a7f98eac 100644 --- a/osgi-tests/pom.xml +++ b/osgi-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.9.0-SNAPSHOT + 4.9.0 java-driver-osgi-tests jar diff --git a/pom.xml b/pom.xml index 90553385abb..44ae7e82b9e 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ 4.0.0 com.datastax.oss java-driver-parent - 4.9.0-SNAPSHOT + 4.9.0 pom DataStax Java driver for Apache Cassandra(R) A driver for Apache Cassandra(R) 2.1+ that works exclusively with the Cassandra Query Language version 3 (CQL3) and Cassandra's native protocol versions 3 and above. @@ -934,7 +934,7 @@ height="0" width="0" style="display:none;visibility:hidden"> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - HEAD + 4.9.0 diff --git a/query-builder/pom.xml b/query-builder/pom.xml index 2dce6720a2b..8d96d705770 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.9.0-SNAPSHOT + 4.9.0 java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index 79a8ed60fef..67d3ff227c0 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.9.0-SNAPSHOT + 4.9.0 java-driver-test-infra bundle From f877d551934a637d3c10a9622cff8f60ac1838a0 Mon Sep 17 00:00:00 2001 From: olim7t Date: Tue, 1 Sep 2020 09:03:20 -0700 Subject: [PATCH 580/979] [maven-release-plugin] prepare for next development iteration --- bom/pom.xml | 18 +++++++++--------- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- metrics/micrometer/pom.xml | 2 +- metrics/microprofile/pom.xml | 2 +- osgi-tests/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 14 files changed, 23 insertions(+), 23 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index 8b2b2616cf8..7ee0bf21874 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.9.0 + 4.10.0-SNAPSHOT java-driver-bom pom @@ -31,42 +31,42 @@ com.datastax.oss java-driver-core - 4.9.0 + 4.10.0-SNAPSHOT com.datastax.oss java-driver-core-shaded - 4.9.0 + 4.10.0-SNAPSHOT com.datastax.oss java-driver-mapper-processor - 4.9.0 + 4.10.0-SNAPSHOT com.datastax.oss java-driver-mapper-runtime - 4.9.0 + 4.10.0-SNAPSHOT com.datastax.oss java-driver-query-builder - 4.9.0 + 4.10.0-SNAPSHOT com.datastax.oss java-driver-test-infra - 4.9.0 + 4.10.0-SNAPSHOT com.datastax.oss java-driver-metrics-micrometer - 4.9.0 + 4.10.0-SNAPSHOT com.datastax.oss java-driver-metrics-microprofile - 4.9.0 + 4.10.0-SNAPSHOT com.datastax.oss diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 1ce4ef1acc9..4d92a37736f 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.9.0 + 4.10.0-SNAPSHOT java-driver-core-shaded DataStax Java driver for Apache Cassandra(R) - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index b26105b4000..761a7dc1399 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.9.0 + 4.10.0-SNAPSHOT java-driver-core bundle diff --git a/distribution/pom.xml b/distribution/pom.xml index 40539330eb7..dd1e4e8c7b4 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.9.0 + 4.10.0-SNAPSHOT java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index 4ec271cd979..f2124c8c3f3 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.9.0 + 4.10.0-SNAPSHOT java-driver-examples DataStax Java driver for Apache Cassandra(R) - examples. diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 395c19e7ac4..d9daac364a4 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.9.0 + 4.10.0-SNAPSHOT java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index ad4c7f3a4ab..9cbd8fe224c 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.9.0 + 4.10.0-SNAPSHOT java-driver-mapper-processor DataStax Java driver for Apache Cassandra(R) - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index 7a09b53d715..0c881a55311 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.9.0 + 4.10.0-SNAPSHOT java-driver-mapper-runtime bundle diff --git a/metrics/micrometer/pom.xml b/metrics/micrometer/pom.xml index d8f955df15d..5dde458d947 100644 --- a/metrics/micrometer/pom.xml +++ b/metrics/micrometer/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.9.0 + 4.10.0-SNAPSHOT ../../ java-driver-metrics-micrometer diff --git a/metrics/microprofile/pom.xml b/metrics/microprofile/pom.xml index aa57db90a71..3559b2af797 100644 --- a/metrics/microprofile/pom.xml +++ b/metrics/microprofile/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.9.0 + 4.10.0-SNAPSHOT ../../ java-driver-metrics-microprofile diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml index 1c8a7f98eac..9a3c62ae770 100644 --- a/osgi-tests/pom.xml +++ b/osgi-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.9.0 + 4.10.0-SNAPSHOT java-driver-osgi-tests jar diff --git a/pom.xml b/pom.xml index 44ae7e82b9e..823d979fbab 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ 4.0.0 com.datastax.oss java-driver-parent - 4.9.0 + 4.10.0-SNAPSHOT pom DataStax Java driver for Apache Cassandra(R) A driver for Apache Cassandra(R) 2.1+ that works exclusively with the Cassandra Query Language version 3 (CQL3) and Cassandra's native protocol versions 3 and above. @@ -934,7 +934,7 @@ height="0" width="0" style="display:none;visibility:hidden"> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - 4.9.0 + HEAD diff --git a/query-builder/pom.xml b/query-builder/pom.xml index 8d96d705770..ddf668c530a 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.9.0 + 4.10.0-SNAPSHOT java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index 67d3ff227c0..bc259d4ac83 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.9.0 + 4.10.0-SNAPSHOT java-driver-test-infra bundle From 14277cb60c4a461feb81a1f839c127e480a3aaae Mon Sep 17 00:00:00 2001 From: olim7t Date: Wed, 2 Sep 2020 10:16:49 -0700 Subject: [PATCH 581/979] Prepare changelog for next iteration --- changelog/README.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/changelog/README.md b/changelog/README.md index bc433a2afe0..ae4f025419d 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -2,6 +2,9 @@ +### 4.10.0 (in progress) + + ### 4.9.0 - [documentation] JAVA-2823: Make Astra more visible in the docs From 7f17bbcc379474c452be22018881bb303d32e9bf Mon Sep 17 00:00:00 2001 From: olim7t Date: Thu, 3 Sep 2020 15:27:49 -0700 Subject: [PATCH 582/979] Clean up mapper schema validation docs --- manual/mapper/mapper/README.md | 2 +- .../oss/driver/api/mapper/MapperBuilder.java | 26 +++++++++++--- .../api/mapper/annotations/SchemaHint.java | 34 +++++-------------- 3 files changed, 31 insertions(+), 31 deletions(-) diff --git a/manual/mapper/mapper/README.md b/manual/mapper/mapper/README.md index 2dd0e500f4e..53e5b0cf1fe 100644 --- a/manual/mapper/mapper/README.md +++ b/manual/mapper/mapper/README.md @@ -216,7 +216,7 @@ The following checks are then performed: * additionally, if the target element is a table, the primary key must be [properly annotated](../entities/#primary-key-columns) in the entity. -If any of those steps fails, an `IllegalArgumentException` is thrown. +If any of those steps fails, a warning is logged. Schema validation adds a small startup overhead, so once your application is stable you may want to disable it: diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/MapperBuilder.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/MapperBuilder.java index 120e399f352..ec809009329 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/MapperBuilder.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/MapperBuilder.java @@ -18,9 +18,12 @@ import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.type.codec.registry.CodecRegistry; import com.datastax.oss.driver.api.mapper.annotations.DaoFactory; import com.datastax.oss.driver.api.mapper.annotations.Mapper; +import com.datastax.oss.driver.api.mapper.annotations.NamingStrategy; import com.datastax.oss.driver.api.mapper.annotations.QueryProvider; +import com.datastax.oss.driver.api.mapper.annotations.SchemaHint; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import java.util.HashMap; @@ -143,10 +146,25 @@ public MapperBuilder withDefaultExecutionProfile( } /** - * When the new instance of a class annotated with {@code @Dao} is created an automatic check for - * schema validation is performed. It verifies if all {@code @Dao} entity fields are present in - * CQL table. If not the exception is thrown. This check has startup overhead so once your app is - * stable you may want to disable it. The schema Validation check is enabled by default. + * Whether to validate mapped entities against the database schema. + * + *

          If this is enabled, then every time a new DAO gets created, for each entity referenced in + * the DAO, the mapper will check that there is a corresponding table or UDT. + * + *

            + *
          • for each entity field, the database table or UDT must contain a column with the + * corresponding name (according to the {@link NamingStrategy}). + *
          • the types must be compatible, according to the {@link CodecRegistry} used by the session. + *
          • additionally, if the target element is a table, the primary key must be properly + * annotated in the entity. + *
          + * + * If any of those steps fails, a warning is logged. + * + *

          Schema validation is enabled by default; it adds a small startup overhead, so once your + * application is stable you may want to disable it. + * + * @see SchemaHint */ public MapperBuilder withSchemaValidationEnabled(boolean enableSchemaValidation) { customState.put(SCHEMA_VALIDATION_ENABLED_SETTING, enableSchemaValidation); diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/SchemaHint.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/SchemaHint.java index 42356f55fe3..b33ec132f08 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/SchemaHint.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/SchemaHint.java @@ -15,48 +15,30 @@ */ package com.datastax.oss.driver.api.mapper.annotations; +import com.datastax.oss.driver.api.mapper.MapperBuilder; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; /** - * This annotation could be used only on a class that is annotated with @Entity annotation. The - * logic will be applied only, if you are running mapper {@code withSchemaValidationEnabled(true)}. + * Annotates an entity to indicate which type of schema element it is supposed to map to. This is + * only used to optimize {@linkplain MapperBuilder#withSchemaValidationEnabled(boolean) schema + * validation}, it has no impact on query execution. * *

          Example: * *

            * @Entity
          - * @SchemaHint(targetElement = @SchemaHint.TargetElement.TABLE)
          + * @SchemaHint(targetElement = SchemaHint.TargetElement.TABLE)
            * public class Product {
            *   // fields of the entity
            * }
            * 
          * - *

          By default, if you will create an @Entity without the @SchemaHint annotation, the - * following logic will be applied when doing validation: - * - *

            - *
          1. Check if the given entity is a Table, if it is - validates if all fields of the Entity are - * present in the CQL table. - *
          2. If it is not a table, check if the given entity is a UDT. If this is a case check if all - * Entity fields are present in the CQL UDT type. - *
          3. If there is not information about Table or UDT it means that the given @Entity has no - * corresponding CQL definition and error is generated. - *
          - * - *

          If you want the mapper to generate code only to check the path for UDT or Table you can - * provide the @SchemaHint on the Entity: - * - *

            - *
          1. If you will set the {@code targetElement = TABLE}, then only the code path for checking CQL - * TABLE will be generated. If there is no corresponding CQL Table, then there is no check of - * UDT. The code throws an Exception denoting that CQL Table is missing for this Entity. - *
          2. If you will set the {@code targetElement = UDT}, then only the code path for checking CQL - * UDT will be generated. If there is no corresponding CQL UDT type, the code throws an - * Exception denoting that CQL UDT is missing for this Entity. - *
          + *

          By default, the mapper first tries to match the entity with a table, and if that doesn't work, + * with a UDT. This annotation allows you to provide a hint as to which check should be done, so + * that the mapper can skip the other one. */ @Target(ElementType.TYPE) @Retention(RetentionPolicy.RUNTIME) From 37b48e11762edc8366d38405d9e8d738c9b8e39e Mon Sep 17 00:00:00 2001 From: olim7t Date: Thu, 3 Sep 2020 16:11:45 -0700 Subject: [PATCH 583/979] Revert incorrect doc changes Validation errors are in fact surfaced as IllegalArgumentException. We only warn if no table or UDT can be found. --- manual/mapper/mapper/README.md | 2 +- .../java/com/datastax/oss/driver/api/mapper/MapperBuilder.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/manual/mapper/mapper/README.md b/manual/mapper/mapper/README.md index 53e5b0cf1fe..2dd0e500f4e 100644 --- a/manual/mapper/mapper/README.md +++ b/manual/mapper/mapper/README.md @@ -216,7 +216,7 @@ The following checks are then performed: * additionally, if the target element is a table, the primary key must be [properly annotated](../entities/#primary-key-columns) in the entity. -If any of those steps fails, a warning is logged. +If any of those steps fails, an `IllegalArgumentException` is thrown. Schema validation adds a small startup overhead, so once your application is stable you may want to disable it: diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/MapperBuilder.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/MapperBuilder.java index ec809009329..7e93807afbd 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/MapperBuilder.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/MapperBuilder.java @@ -159,7 +159,7 @@ public MapperBuilder withDefaultExecutionProfile( * annotated in the entity. * * - * If any of those steps fails, a warning is logged. + * If any of those steps fails, an {@link IllegalArgumentException} is thrown. * *

          Schema validation is enabled by default; it adds a small startup overhead, so once your * application is stable you may want to disable it. From 4507473d073cf60bc6c81a96546312940868b394 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Sat, 3 Oct 2020 12:43:57 +0200 Subject: [PATCH 584/979] Make static field DefaultConsistencyLevel.BY_CODE final --- .../datastax/oss/driver/api/core/DefaultConsistencyLevel.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/DefaultConsistencyLevel.java b/core/src/main/java/com/datastax/oss/driver/api/core/DefaultConsistencyLevel.java index acda65e6743..691428651ba 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/DefaultConsistencyLevel.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/DefaultConsistencyLevel.java @@ -69,7 +69,7 @@ public boolean isSerial() { return this == SERIAL || this == LOCAL_SERIAL; } - private static Map BY_CODE = mapByCode(values()); + private static final Map BY_CODE = mapByCode(values()); private static Map mapByCode(DefaultConsistencyLevel[] levels) { ImmutableMap.Builder builder = ImmutableMap.builder(); From 9975dd9ebfefa0191d8a82df9b524cf6187e6872 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Sat, 3 Oct 2020 13:33:16 +0200 Subject: [PATCH 585/979] JAVA-2887: Handle composite profiles with more than one key and/or backed by only one profile Composite profiles with more than one key currently throw ClassCastException from CompositeDriverExecutionProfile.entrySet. Composite profiles backed by only one profile currently throw NullPointerException from CompositeDriverExecutionProfile.entrySet. --- changelog/README.md | 1 + .../CompositeDriverExecutionProfile.java | 41 ++++++++++++++----- .../composite/CompositeDriverConfigTest.java | 29 +++++++++++++ 3 files changed, 60 insertions(+), 11 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index ae4f025419d..f949a0e6201 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.10.0 (in progress) +- [bug] JAVA-2887: Handle composite profiles with more than one key and/or backed by only one profile ### 4.9.0 diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/config/composite/CompositeDriverExecutionProfile.java b/core/src/main/java/com/datastax/oss/driver/internal/core/config/composite/CompositeDriverExecutionProfile.java index 5d7df7b417f..c9ce0205ccb 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/config/composite/CompositeDriverExecutionProfile.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/config/composite/CompositeDriverExecutionProfile.java @@ -21,6 +21,7 @@ import com.datastax.oss.driver.shaded.guava.common.base.Preconditions; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSortedSet; import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; import java.time.Duration; import java.util.List; import java.util.Map; @@ -35,8 +36,8 @@ public class CompositeDriverExecutionProfile implements DriverExecutionProfile { private final DriverConfig fallbackConfig; private final String profileName; - private volatile DriverExecutionProfile primaryProfile; - private volatile DriverExecutionProfile fallbackProfile; + @Nullable private volatile DriverExecutionProfile primaryProfile; + @Nullable private volatile DriverExecutionProfile fallbackProfile; public CompositeDriverExecutionProfile( @NonNull DriverConfig primaryConfig, @@ -89,8 +90,13 @@ public String getName() { @Override public boolean isDefined(@NonNull DriverOption option) { - return (primaryProfile != null && primaryProfile.isDefined(option)) - || (fallbackProfile != null && fallbackProfile.isDefined(option)); + DriverExecutionProfile primaryProfile = this.primaryProfile; + if (primaryProfile != null && primaryProfile.isDefined(option)) { + return true; + } else { + DriverExecutionProfile fallbackProfile = this.fallbackProfile; + return fallbackProfile != null && fallbackProfile.isDefined(option); + } } @Override @@ -181,21 +187,34 @@ public List getDurationList(@NonNull DriverOption option) { private ValueT get( @NonNull DriverOption option, BiFunction getter) { + DriverExecutionProfile primaryProfile = this.primaryProfile; if (primaryProfile != null && primaryProfile.isDefined(option)) { return getter.apply(primaryProfile, option); - } else if (fallbackProfile != null && fallbackProfile.isDefined(option)) { - return getter.apply(fallbackProfile, option); } else { - throw new IllegalArgumentException("Unknown option: " + option); + DriverExecutionProfile fallbackProfile = this.fallbackProfile; + if (fallbackProfile != null && fallbackProfile.isDefined(option)) { + return getter.apply(fallbackProfile, option); + } else { + throw new IllegalArgumentException("Unknown option: " + option); + } } } @NonNull @Override public SortedSet> entrySet() { - SortedSet> result = new TreeSet<>(Map.Entry.comparingByKey()); - result.addAll(fallbackProfile.entrySet()); - result.addAll(primaryProfile.entrySet()); - return ImmutableSortedSet.copyOf(result); + DriverExecutionProfile primaryProfile = this.primaryProfile; + DriverExecutionProfile fallbackProfile = this.fallbackProfile; + if (primaryProfile != null && fallbackProfile != null) { + SortedSet> result = new TreeSet<>(Map.Entry.comparingByKey()); + result.addAll(fallbackProfile.entrySet()); + result.addAll(primaryProfile.entrySet()); + return ImmutableSortedSet.copyOf(Map.Entry.comparingByKey(), result); + } else if (primaryProfile != null) { + return primaryProfile.entrySet(); + } else { + assert fallbackProfile != null; + return fallbackProfile.entrySet(); + } } } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/config/composite/CompositeDriverConfigTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/config/composite/CompositeDriverConfigTest.java index 1c6121b1121..0c456e5e1bb 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/config/composite/CompositeDriverConfigTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/config/composite/CompositeDriverConfigTest.java @@ -16,7 +16,9 @@ package com.datastax.oss.driver.internal.core.config.composite; import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.entry; +import com.datastax.dse.driver.api.core.config.DseDriverOption; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverConfig; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; @@ -59,6 +61,10 @@ public void should_use_value_from_primary_config() { .isTrue(); assertThat(compositeDefaultProfile.getInt(DefaultDriverOption.CONNECTION_POOL_LOCAL_SIZE)) .isEqualTo(1); + assertThat(compositeDefaultProfile.entrySet()) + .containsExactly( + entry(DefaultDriverOption.CONNECTION_POOL_LOCAL_SIZE.getPath(), 1), + entry(DseDriverOption.CONTINUOUS_PAGING_MAX_PAGES.getPath(), 1)); } @Test @@ -70,6 +76,10 @@ public void should_ignore_value_from_fallback_config_if_defined_in_both() { .isTrue(); assertThat(compositeDefaultProfile.getInt(DefaultDriverOption.CONNECTION_POOL_LOCAL_SIZE)) .isEqualTo(1); + assertThat(compositeDefaultProfile.entrySet()) + .containsExactly( + entry(DefaultDriverOption.CONNECTION_POOL_LOCAL_SIZE.getPath(), 1), + entry(DseDriverOption.CONTINUOUS_PAGING_MAX_PAGES.getPath(), 1)); } @Test @@ -80,6 +90,10 @@ public void should_use_value_from_fallback_config_if_not_defined_in_primary() { .isTrue(); assertThat(compositeDefaultProfile.getInt(DefaultDriverOption.CONNECTION_POOL_LOCAL_SIZE)) .isEqualTo(1); + assertThat(compositeDefaultProfile.entrySet()) + .containsExactly( + entry(DefaultDriverOption.CONNECTION_POOL_LOCAL_SIZE.getPath(), 1), + entry(DseDriverOption.CONTINUOUS_PAGING_MAX_PAGES.getPath(), 1)); } @Test @@ -112,5 +126,20 @@ public void should_merge_profiles() { .getProfile("onlyInFallback") .getInt(DefaultDriverOption.CONNECTION_POOL_LOCAL_SIZE)) .isEqualTo(4); + + assertThat(compositeConfig.getProfile("onlyInPrimary").entrySet()) + .containsExactly( + entry(DefaultDriverOption.CONNECTION_POOL_LOCAL_SIZE.getPath(), 1), + entry(DseDriverOption.CONTINUOUS_PAGING_MAX_PAGES.getPath(), 1)); + + assertThat(compositeConfig.getProfile("inBoth").entrySet()) + .containsExactly( + entry(DefaultDriverOption.CONNECTION_POOL_LOCAL_SIZE.getPath(), 2), + entry(DseDriverOption.CONTINUOUS_PAGING_MAX_PAGES.getPath(), 1)); + + assertThat(compositeConfig.getProfile("onlyInFallback").entrySet()) + .containsExactly( + entry(DefaultDriverOption.CONNECTION_POOL_LOCAL_SIZE.getPath(), 4), + entry(DseDriverOption.CONTINUOUS_PAGING_MAX_PAGES.getPath(), 1)); } } From c8e7fc8e402a75c6d437b3b030555121b2092918 Mon Sep 17 00:00:00 2001 From: Olivier Michallat Date: Tue, 13 Oct 2020 03:32:10 -0700 Subject: [PATCH 586/979] JAVA-2647: Handle token types in QueryBuilder.literal() (#1501) --- changelog/README.md | 1 + .../driver/api/querybuilder/QueryBuilder.java | 15 +++++ .../api/querybuilder/TokenLiteralTest.java | 58 +++++++++++++++++++ 3 files changed, 74 insertions(+) create mode 100644 query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/TokenLiteralTest.java diff --git a/changelog/README.md b/changelog/README.md index f949a0e6201..197588a2056 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.10.0 (in progress) +- [bug] JAVA-2647: Handle token types in QueryBuilder.literal() - [bug] JAVA-2887: Handle composite profiles with more than one key and/or backed by only one profile ### 4.9.0 diff --git a/query-builder/src/main/java/com/datastax/oss/driver/api/querybuilder/QueryBuilder.java b/query-builder/src/main/java/com/datastax/oss/driver/api/querybuilder/QueryBuilder.java index 6a42f1d0369..73f466e0637 100644 --- a/query-builder/src/main/java/com/datastax/oss/driver/api/querybuilder/QueryBuilder.java +++ b/query-builder/src/main/java/com/datastax/oss/driver/api/querybuilder/QueryBuilder.java @@ -17,6 +17,7 @@ import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.context.DriverContext; +import com.datastax.oss.driver.api.core.metadata.token.Token; import com.datastax.oss.driver.api.core.type.DataType; import com.datastax.oss.driver.api.core.type.DataTypes; import com.datastax.oss.driver.api.core.type.UserDefinedType; @@ -32,6 +33,9 @@ import com.datastax.oss.driver.api.querybuilder.truncate.Truncate; import com.datastax.oss.driver.api.querybuilder.update.UpdateStart; import com.datastax.oss.driver.internal.core.metadata.schema.ShallowUserDefinedType; +import com.datastax.oss.driver.internal.core.metadata.token.ByteOrderedToken; +import com.datastax.oss.driver.internal.core.metadata.token.Murmur3Token; +import com.datastax.oss.driver.internal.core.metadata.token.RandomToken; import com.datastax.oss.driver.internal.querybuilder.ArithmeticOperator; import com.datastax.oss.driver.internal.querybuilder.DefaultLiteral; import com.datastax.oss.driver.internal.querybuilder.DefaultRaw; @@ -402,6 +406,15 @@ public static Literal literal(@Nullable Object value) { */ @NonNull public static Literal literal(@Nullable Object value, @NonNull CodecRegistry codecRegistry) { + if (value instanceof Murmur3Token) { + value = ((Murmur3Token) value).getValue(); + } else if (value instanceof ByteOrderedToken) { + value = ((ByteOrderedToken) value).getValue(); + } else if (value instanceof RandomToken) { + value = ((RandomToken) value).getValue(); + } else if (value instanceof Token) { + throw new IllegalArgumentException("Unsupported token type: " + value.getClass().getName()); + } try { return literal(value, (value == null) ? null : codecRegistry.codecFor(value)); } catch (CodecNotFoundException e) { @@ -424,6 +437,8 @@ public static Literal literal(@Nullable Object value, @NonNull CodecRegistry cod */ @NonNull public static Literal literal(@Nullable T value, @Nullable TypeCodec codec) { + // Don't handle Token here, if the user calls this directly we assume they passed a codec that + // can handle the value return new DefaultLiteral<>(value, codec); } diff --git a/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/TokenLiteralTest.java b/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/TokenLiteralTest.java new file mode 100644 index 00000000000..ff14dcffe7b --- /dev/null +++ b/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/TokenLiteralTest.java @@ -0,0 +1,58 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.querybuilder; + +import static com.datastax.oss.driver.api.querybuilder.Assertions.assertThat; +import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.literal; +import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.selectFrom; + +import com.datastax.oss.driver.internal.core.metadata.token.ByteOrderedTokenFactory; +import com.datastax.oss.driver.internal.core.metadata.token.Murmur3TokenFactory; +import com.datastax.oss.driver.internal.core.metadata.token.RandomTokenFactory; +import org.junit.Test; + +public class TokenLiteralTest { + + @Test + public void should_inline_murmur3_token_literal() { + assertThat( + selectFrom("test") + .all() + .whereToken("pk") + .isEqualTo(literal(Murmur3TokenFactory.MIN_TOKEN))) + .hasCql("SELECT * FROM test WHERE token(pk)=-9223372036854775808"); + } + + @Test + public void should_inline_byte_ordered_token_literal() { + assertThat( + selectFrom("test") + .all() + .whereToken("pk") + .isEqualTo(literal(ByteOrderedTokenFactory.MIN_TOKEN))) + .hasCql("SELECT * FROM test WHERE token(pk)=0x"); + } + + @Test + public void should_inline_random_token_literal() { + assertThat( + selectFrom("test") + .all() + .whereToken("pk") + .isEqualTo(literal(RandomTokenFactory.MIN_TOKEN))) + .hasCql("SELECT * FROM test WHERE token(pk)=-1"); + } +} From cefb914b6f78712da6bc88e0d0bcd2ead5281830 Mon Sep 17 00:00:00 2001 From: Sarvesh Kaushal Date: Tue, 13 Oct 2020 03:33:05 -0700 Subject: [PATCH 587/979] Removing unnecessary cast to String (#1503) --- .../oss/driver/internal/core/type/codec/BigIntCodecTest.java | 2 +- .../oss/driver/internal/core/type/codec/CounterCodecTest.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/BigIntCodecTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/BigIntCodecTest.java index a2d7fd91ee0..1dfefbdebdd 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/BigIntCodecTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/BigIntCodecTest.java @@ -67,7 +67,7 @@ public void should_fail_to_parse_invalid_input() { @Test(expected = IllegalArgumentException.class) public void should_fail_to_parse_if_out_of_range() { - parse(Long.toString(Long.MAX_VALUE) + "0"); + parse(Long.MAX_VALUE + "0"); } @Test diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/CounterCodecTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/CounterCodecTest.java index 70dbd91c305..bd74fec9c72 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/CounterCodecTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/CounterCodecTest.java @@ -67,7 +67,7 @@ public void should_fail_to_parse_invalid_input() { @Test(expected = IllegalArgumentException.class) public void should_fail_to_parse_if_out_of_range() { - parse(Long.toString(Long.MAX_VALUE) + "0"); + parse(Long.MAX_VALUE + "0"); } @Test From bf89221cbbcbf828378a43e21187f0ac1476a9fe Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 19 Nov 2020 16:31:02 +0100 Subject: [PATCH 588/979] Bump junit from 4.12 to 4.13.1 (#1506) Bumps [junit](https://github.com/junit-team/junit4) from 4.12 to 4.13.1. - [Release notes](https://github.com/junit-team/junit4/releases) - [Changelog](https://github.com/junit-team/junit4/blob/main/doc/ReleaseNotes4.12.md) - [Commits](https://github.com/junit-team/junit4/compare/r4.12...r4.13.1) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 823d979fbab..437ef30d7d2 100644 --- a/pom.xml +++ b/pom.xml @@ -63,7 +63,7 @@ 3.13.1 1.3 - 4.12 + 4.13.1 1.2.3 6.0.0 6.0.3 From d68e0f85d2b7e102d52dae42da2e17e72c024f33 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Thu, 19 Nov 2020 23:26:27 +0100 Subject: [PATCH 589/979] Make LazyReference.lock final --- .../oss/driver/internal/core/util/concurrent/LazyReference.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/util/concurrent/LazyReference.java b/core/src/main/java/com/datastax/oss/driver/internal/core/util/concurrent/LazyReference.java index b1d34dda6ea..ffa0a16fcb6 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/util/concurrent/LazyReference.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/util/concurrent/LazyReference.java @@ -27,7 +27,7 @@ public class LazyReference { private final Supplier supplier; private final CycleDetector checker; private volatile T value; - private ReentrantLock lock = new ReentrantLock(); + private final ReentrantLock lock = new ReentrantLock(); public LazyReference(String name, Supplier supplier, CycleDetector cycleDetector) { this.name = name; From d585dad986ec9d6c849a665d47e2e0fffd1f6cd1 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Thu, 26 Nov 2020 12:10:48 +0100 Subject: [PATCH 590/979] JAVA-2905: Prevent new connections from using a protocol version higher than the negotiated one --- changelog/README.md | 1 + .../oss/driver/internal/core/session/DefaultSession.java | 9 ++++++++- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/changelog/README.md b/changelog/README.md index 197588a2056..9ddce319d39 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.10.0 (in progress) +- [improvement] JAVA-2905: Prevent new connections from using a protocol version higher than the negotiated one - [bug] JAVA-2647: Handle token types in QueryBuilder.literal() - [bug] JAVA-2887: Handle composite profiles with more than one key and/or backed by only one profile diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/session/DefaultSession.java b/core/src/main/java/com/datastax/oss/driver/internal/core/session/DefaultSession.java index 67b203b3724..b12c287ca60 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/session/DefaultSession.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/session/DefaultSession.java @@ -403,7 +403,7 @@ private void afterInitialNodeListRefresh(CqlIdentifier keyspace) { context .getProtocolVersionRegistry() .highestCommon(metadataManager.getMetadata().getNodes().values()); - if (!currentVersion.equals(bestVersion)) { + if (bestVersion.getCode() < currentVersion.getCode()) { LOG.info( "[{}] Negotiated protocol version {} for the initial contact point, " + "but other nodes only support {}, downgrading", @@ -417,6 +417,13 @@ private void afterInitialNodeListRefresh(CqlIdentifier keyspace) { // of the control queries use any protocol-dependent feature. // Keep going as-is, the control connection might switch to the "correct" version later // if it reconnects to another node. + } else if (bestVersion.getCode() > currentVersion.getCode()) { + LOG.info( + "[{}] Negotiated protocol version {} for the initial contact point, " + + "but cluster seems to support {}, keeping the negotiated version", + logPrefix, + currentVersion, + bestVersion); } } metadataManager From a0e4cced158538a4a444792a2e1e5cd5c3df68b9 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Thu, 26 Nov 2020 15:07:11 +0100 Subject: [PATCH 591/979] Remove occurrences of org.junit.rules.ExpectedException --- .../TinkerpopBufferPrimitiveCodecTest.java | 33 ++++------ .../typesafe/TypesafeDriverConfigTest.java | 4 -- ...cInferringLoadBalancingPolicyInitTest.java | 37 ++++++++---- .../DefaultLoadBalancingPolicyInitTest.java | 11 ++-- .../DefaultLoadBalancingPolicyTestBase.java | 4 -- .../core/metadata/DefaultEndPointTest.java | 12 ++-- .../protocol/ByteBufPrimitiveCodecTest.java | 33 ++++------ .../datastax/oss/driver/core/ConnectIT.java | 24 ++++---- .../core/ProtocolVersionMixedClusterIT.java | 34 ++++++----- .../core/config/DriverConfigValidationIT.java | 4 -- .../DriverExecutionProfileReloadIT.java | 22 +++---- .../DriverExecutionProfileSimulacronIT.java | 22 ++++--- .../driver/core/cql/BoundStatementCcmIT.java | 3 - .../core/cql/BoundStatementSimulacronIT.java | 21 +++---- .../oss/driver/core/cql/PagingStateIT.java | 22 +++---- .../driver/core/cql/PerRequestKeyspaceIT.java | 10 ++-- .../driver/core/cql/PreparedStatementIT.java | 12 ++-- .../oss/driver/core/cql/QueryTraceIT.java | 13 ++-- .../core/cql/SimpleStatementSimulacronIT.java | 12 ++-- .../core/session/RequestProcessorIT.java | 18 +++--- .../driver/core/throttling/ThrottlingIT.java | 16 ++--- .../type/codec/registry/CodecRegistryIT.java | 21 ++++--- .../oss/driver/mapper/ComputedIT.java | 13 ++-- .../oss/driver/mapper/PrimitivesIT.java | 4 -- .../mapper/QueryKeyspaceAndTableIT.java | 46 +++++++------- .../oss/driver/mapper/QueryReturnTypesIT.java | 16 +++-- .../driver/mapper/StatementAttributesIT.java | 10 +--- .../insert/RegularInsertTest.java | 60 ++++++++++--------- .../querybuilder/update/UpdateUsingTest.java | 58 +++++++++--------- 29 files changed, 293 insertions(+), 302 deletions(-) diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/protocol/TinkerpopBufferPrimitiveCodecTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/protocol/TinkerpopBufferPrimitiveCodecTest.java index 7dd240baf99..78e9aceeea3 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/protocol/TinkerpopBufferPrimitiveCodecTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/protocol/TinkerpopBufferPrimitiveCodecTest.java @@ -16,6 +16,7 @@ package com.datastax.dse.driver.internal.core.protocol; import static com.datastax.dse.driver.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; import com.datastax.dse.driver.Assertions; import com.datastax.dse.driver.internal.core.graph.binary.buffer.DseNettyBufferFactory; @@ -29,9 +30,7 @@ import java.nio.ByteBuffer; import java.util.function.Supplier; import org.apache.tinkerpop.gremlin.structure.io.Buffer; -import org.junit.Rule; import org.junit.Test; -import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; /** @@ -45,8 +44,6 @@ public class TinkerpopBufferPrimitiveCodecTest { private static final DseNettyBufferFactory factory = new DseNettyBufferFactory(); private final TinkerpopBufferPrimitiveCodec codec = new TinkerpopBufferPrimitiveCodec(factory); - @Rule public ExpectedException expectedException = ExpectedException.none(); - @Test public void should_concatenate() { Buffer left = factory.withBytes(0xca, 0xfe); @@ -93,8 +90,6 @@ public void should_read_inet_v6() { @Test public void should_fail_to_read_inet_if_length_invalid() { - expectedException.expect(IllegalArgumentException.class); - expectedException.expectMessage("Invalid address length: 3 ([127, 0, 1])"); Buffer source = factory.withBytes( // length (as a byte) @@ -108,7 +103,9 @@ public void should_fail_to_read_inet_if_length_invalid() { 0x00, 0x23, 0x52); - codec.readInet(source); + assertThatThrownBy(() -> codec.readInet(source)) + .isInstanceOf(IllegalArgumentException.class) + .hasMessage("Invalid address length: 3 ([127, 0, 1])"); } @Test @@ -138,9 +135,6 @@ public void should_read_inetaddr_v6() { @Test public void should_fail_to_read_inetaddr_if_length_invalid() { - expectedException.expect(IllegalArgumentException.class); - expectedException.expectMessage("Invalid address length: 3 ([127, 0, 1])"); - Buffer source = factory.withBytes( // length (as a byte) @@ -149,7 +143,9 @@ public void should_fail_to_read_inetaddr_if_length_invalid() { 0x7f, 0x00, 0x01); - codec.readInetAddr(source); + assertThatThrownBy(() -> codec.readInetAddr(source)) + .isInstanceOf(IllegalArgumentException.class) + .hasMessage("Invalid address length: 3 ([127, 0, 1])"); } @Test @@ -220,14 +216,12 @@ public void should_read_string(Supplier supplier) { @Test public void should_fail_to_read_string_if_not_enough_characters() { - expectedException.expect(IllegalArgumentException.class); - expectedException.expectMessage( - "Not enough bytes to read an UTF-8 serialized string of size 4"); - Buffer source = factory.heap(); source.writeShort(4); - codec.readString(source); + assertThatThrownBy(() -> codec.readString(source)) + .isInstanceOf(IllegalArgumentException.class) + .hasMessage("Not enough bytes to read an UTF-8 serialized string of size 4"); } @Test @@ -250,13 +244,12 @@ public void should_read_long_string() { @Test public void should_fail_to_read_long_string_if_not_enough_characters() { - expectedException.expect(IllegalArgumentException.class); - expectedException.expectMessage( - "Not enough bytes to read an UTF-8 serialized string of size 4"); Buffer source = factory.heap(4, 4); source.writeInt(4); - codec.readLongString(source); + assertThatThrownBy(() -> codec.readLongString(source)) + .isInstanceOf(IllegalArgumentException.class) + .hasMessage("Not enough bytes to read an UTF-8 serialized string of size 4"); } @Test diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/config/typesafe/TypesafeDriverConfigTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/config/typesafe/TypesafeDriverConfigTest.java index b268df322de..019d50ab2a0 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/config/typesafe/TypesafeDriverConfigTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/config/typesafe/TypesafeDriverConfigTest.java @@ -24,14 +24,10 @@ import com.typesafe.config.ConfigFactory; import java.util.HashMap; import java.util.Map; -import org.junit.Rule; import org.junit.Test; -import org.junit.rules.ExpectedException; public class TypesafeDriverConfigTest { - @Rule public ExpectedException expectedException = ExpectedException.none(); - @Test public void should_load_minimal_config_with_no_profiles() { TypesafeDriverConfig config = parse("int1 = 42"); diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyInitTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyInitTest.java index 71d1ef154c8..c0355ea5198 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyInitTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyInitTest.java @@ -16,6 +16,7 @@ package com.datastax.oss.driver.internal.core.loadbalancing; import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.catchThrowable; import static org.assertj.core.api.Assertions.filter; import static org.mockito.BDDMockito.given; import static org.mockito.Mockito.atLeast; @@ -102,13 +103,19 @@ public void should_require_local_dc_if_contact_points_from_different_dcs() { when(node2.getDatacenter()).thenReturn("dc2"); BasicLoadBalancingPolicy policy = createPolicy(); - thrown.expect(IllegalStateException.class); - thrown.expectMessage( - "No local DC was provided, but the contact points are from different DCs: node1=dc1, node2=dc2"); - // When - policy.init( - ImmutableMap.of(UUID.randomUUID(), node1, UUID.randomUUID(), node2), distanceReporter); + Throwable t = + catchThrowable( + () -> + policy.init( + ImmutableMap.of(UUID.randomUUID(), node1, UUID.randomUUID(), node2), + distanceReporter)); + + // Then + assertThat(t) + .isInstanceOf(IllegalStateException.class) + .hasMessageContaining( + "No local DC was provided, but the contact points are from different DCs: node1=dc1, node2=dc2"); } @Test @@ -121,13 +128,19 @@ public void should_require_local_dc_if_contact_points_have_null_dcs() { when(node2.getDatacenter()).thenReturn(null); BasicLoadBalancingPolicy policy = createPolicy(); - thrown.expect(IllegalStateException.class); - thrown.expectMessage( - "The local DC could not be inferred from contact points, please set it explicitly"); - // When - policy.init( - ImmutableMap.of(UUID.randomUUID(), node1, UUID.randomUUID(), node2), distanceReporter); + Throwable t = + catchThrowable( + () -> + policy.init( + ImmutableMap.of(UUID.randomUUID(), node1, UUID.randomUUID(), node2), + distanceReporter)); + + // Then + assertThat(t) + .isInstanceOf(IllegalStateException.class) + .hasMessageContaining( + "The local DC could not be inferred from contact points, please set it explicitly"); } @Test diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyInitTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyInitTest.java index 2372e3de92d..2c5cd8eb0fe 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyInitTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyInitTest.java @@ -16,6 +16,7 @@ package com.datastax.oss.driver.internal.core.loadbalancing; import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.assertj.core.api.Assertions.filter; import static org.mockito.BDDMockito.given; import static org.mockito.Mockito.atLeast; @@ -102,12 +103,12 @@ public void should_require_local_dc_if_explicit_contact_points() { when(metadataManager.wasImplicitContactPoint()).thenReturn(false); DefaultLoadBalancingPolicy policy = createPolicy(); - thrown.expect(IllegalStateException.class); - thrown.expectMessage( - "Since you provided explicit contact points, the local DC must be explicitly set"); - // When - policy.init(ImmutableMap.of(UUID.randomUUID(), node2), distanceReporter); + assertThatThrownBy( + () -> policy.init(ImmutableMap.of(UUID.randomUUID(), node2), distanceReporter)) + .isInstanceOf(IllegalStateException.class) + .hasMessageContaining( + "Since you provided explicit contact points, the local DC must be explicitly set"); } @Test diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyTestBase.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyTestBase.java index f5ac866e4ef..c98ee523d02 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyTestBase.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyTestBase.java @@ -32,8 +32,6 @@ import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import org.junit.After; import org.junit.Before; -import org.junit.Rule; -import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.mockito.ArgumentCaptor; import org.mockito.Captor; @@ -44,8 +42,6 @@ @RunWith(MockitoJUnitRunner.class) public abstract class DefaultLoadBalancingPolicyTestBase { - @Rule public ExpectedException thrown = ExpectedException.none(); - @Mock protected DefaultNode node1; @Mock protected DefaultNode node2; @Mock protected DefaultNode node3; diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/DefaultEndPointTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/DefaultEndPointTest.java index 5c3689920c1..28e91b26606 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/DefaultEndPointTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/DefaultEndPointTest.java @@ -16,16 +16,13 @@ package com.datastax.oss.driver.internal.core.metadata; import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; import java.net.InetSocketAddress; -import org.junit.Rule; import org.junit.Test; -import org.junit.rules.ExpectedException; public class DefaultEndPointTest { - @Rule public ExpectedException thrown = ExpectedException.none(); - @Test public void should_create_from_host_name() { DefaultEndPoint endPoint = new DefaultEndPoint(new InetSocketAddress("localhost", 9042)); @@ -54,9 +51,8 @@ public void should_create_from_unresolved_address() { @Test public void should_reject_null_address() { - thrown.expect(NullPointerException.class); - thrown.expectMessage("address can't be null"); - - new DefaultEndPoint(null); + assertThatThrownBy(() -> new DefaultEndPoint(null)) + .isInstanceOf(NullPointerException.class) + .hasMessage("address can't be null"); } } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/protocol/ByteBufPrimitiveCodecTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/protocol/ByteBufPrimitiveCodecTest.java index fadb80f871b..18ebb79ea59 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/protocol/ByteBufPrimitiveCodecTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/protocol/ByteBufPrimitiveCodecTest.java @@ -16,6 +16,7 @@ package com.datastax.oss.driver.internal.core.protocol; import static com.datastax.oss.driver.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; import com.datastax.oss.driver.internal.core.util.ByteBufs; import com.datastax.oss.protocol.internal.util.Bytes; @@ -24,9 +25,7 @@ import java.net.InetAddress; import java.net.InetSocketAddress; import java.nio.ByteBuffer; -import org.junit.Rule; import org.junit.Test; -import org.junit.rules.ExpectedException; /** * Note: we don't test trivial methods that simply delegate to ByteBuf, nor default implementations @@ -35,8 +34,6 @@ public class ByteBufPrimitiveCodecTest { private ByteBufPrimitiveCodec codec = new ByteBufPrimitiveCodec(ByteBufAllocator.DEFAULT); - @Rule public ExpectedException expectedException = ExpectedException.none(); - @Test public void should_concatenate() { ByteBuf left = ByteBufs.wrap(0xca, 0xfe); @@ -91,8 +88,6 @@ public void should_read_inet_v6() { @Test public void should_fail_to_read_inet_if_length_invalid() { - expectedException.expect(IllegalArgumentException.class); - expectedException.expectMessage("Invalid address length: 3 ([127, 0, 1])"); ByteBuf source = ByteBufs.wrap( // length (as a byte) @@ -106,7 +101,9 @@ public void should_fail_to_read_inet_if_length_invalid() { 0x00, 0x23, 0x52); - codec.readInet(source); + assertThatThrownBy(() -> codec.readInet(source)) + .isInstanceOf(IllegalArgumentException.class) + .hasMessage("Invalid address length: 3 ([127, 0, 1])"); } @Test @@ -136,9 +133,6 @@ public void should_read_inetaddr_v6() { @Test public void should_fail_to_read_inetaddr_if_length_invalid() { - expectedException.expect(IllegalArgumentException.class); - expectedException.expectMessage("Invalid address length: 3 ([127, 0, 1])"); - ByteBuf source = ByteBufs.wrap( // length (as a byte) @@ -147,7 +141,9 @@ public void should_fail_to_read_inetaddr_if_length_invalid() { 0x7f, 0x00, 0x01); - codec.readInetAddr(source); + assertThatThrownBy(() -> codec.readInetAddr(source)) + .isInstanceOf(IllegalArgumentException.class) + .hasMessage("Invalid address length: 3 ([127, 0, 1])"); } @Test @@ -207,14 +203,12 @@ public void should_read_string() { @Test public void should_fail_to_read_string_if_not_enough_characters() { - expectedException.expect(IllegalArgumentException.class); - expectedException.expectMessage( - "Not enough bytes to read an UTF-8 serialized string of size 4"); - ByteBuf source = codec.allocate(2); source.writeShort(4); - codec.readString(source); + assertThatThrownBy(() -> codec.readString(source)) + .isInstanceOf(IllegalArgumentException.class) + .hasMessage("Not enough bytes to read an UTF-8 serialized string of size 4"); } @Test @@ -237,13 +231,12 @@ public void should_read_long_string() { @Test public void should_fail_to_read_long_string_if_not_enough_characters() { - expectedException.expect(IllegalArgumentException.class); - expectedException.expectMessage( - "Not enough bytes to read an UTF-8 serialized string of size 4"); ByteBuf source = codec.allocate(4); source.writeInt(4); - codec.readLongString(source); + assertThatThrownBy(() -> codec.readLongString(source)) + .isInstanceOf(IllegalArgumentException.class) + .hasMessage("Not enough bytes to read an UTF-8 serialized string of size 4"); } @Test diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/ConnectIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/ConnectIT.java index 4f899151d29..d933643e647 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/ConnectIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/ConnectIT.java @@ -19,6 +19,7 @@ import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.when; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.assertj.core.api.Assertions.catchThrowable; import static org.awaitility.Awaitility.await; import com.datastax.oss.driver.api.core.AllNodesFailedException; @@ -47,10 +48,8 @@ import java.util.concurrent.TimeUnit; import org.junit.Before; import org.junit.ClassRule; -import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; -import org.junit.rules.ExpectedException; @Category(ParallelizableTests.class) public class ConnectIT { @@ -59,8 +58,6 @@ public class ConnectIT { public static final SimulacronRule SIMULACRON_RULE = new SimulacronRule(ClusterSpec.builder().withNodes(2)); - @Rule public ExpectedException thrown = ExpectedException.none(); - @Before public void setup() { SIMULACRON_RULE.cluster().acceptConnections(); @@ -78,14 +75,14 @@ public void should_fail_fast_if_contact_points_unreachable_and_reconnection_disa // Given SIMULACRON_RULE.cluster().rejectConnections(0, RejectScope.STOP); - thrown.expect(AllNodesFailedException.class); - thrown.expectMessage( - "Could not reach any contact point, make sure you've provided valid addresses"); - // When - SessionUtils.newSession(SIMULACRON_RULE); + Throwable t = catchThrowable(() -> SessionUtils.newSession(SIMULACRON_RULE)); - // Then the exception is thrown + // Then + assertThat(t) + .isInstanceOf(AllNodesFailedException.class) + .hasMessageContaining( + "Could not reach any contact point, make sure you've provided valid addresses"); } @Test @@ -103,7 +100,7 @@ public void should_wait_for_contact_points_if_reconnection_enabled() throws Exce .withDuration(DefaultDriverOption.RECONNECTION_BASE_DELAY, Duration.ofMillis(500)) .build(); CompletableFuture sessionFuture = - newSessionAsync(SIMULACRON_RULE, loader).toCompletableFuture(); + newSessionAsync(loader).toCompletableFuture(); // wait a bit to ensure we have a couple of reconnections, otherwise we might race and allow // reconnections before the initial attempt TimeUnit.SECONDS.sleep(2); @@ -182,10 +179,9 @@ public void should_mark_unreachable_contact_points_as_local_and_schedule_reconne } @SuppressWarnings("unchecked") - private CompletionStage newSessionAsync( - SimulacronRule serverRule, DriverConfigLoader loader) { + private CompletionStage newSessionAsync(DriverConfigLoader loader) { return SessionUtils.baseBuilder() - .addContactEndPoints(serverRule.getContactPoints()) + .addContactEndPoints(ConnectIT.SIMULACRON_RULE.getContactPoints()) .withConfigLoader(loader) .buildAsync(); } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/ProtocolVersionMixedClusterIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/ProtocolVersionMixedClusterIT.java index 2dba7ae4ba9..3bd25b8c61d 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/ProtocolVersionMixedClusterIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/ProtocolVersionMixedClusterIT.java @@ -16,6 +16,7 @@ package com.datastax.oss.driver.core; import static com.datastax.oss.driver.assertions.Assertions.assertThat; +import static org.assertj.core.api.Assertions.catchThrowable; import static org.assertj.core.api.Assertions.fail; import com.datastax.oss.driver.api.core.CqlSession; @@ -35,10 +36,8 @@ import com.datastax.oss.simulacron.server.BoundNode; import com.datastax.oss.simulacron.server.BoundTopic; import java.util.stream.Stream; -import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; -import org.junit.rules.ExpectedException; /** * Covers protocol re-negotiation with a mixed cluster: if, after the initial connection and the @@ -47,7 +46,6 @@ */ @Category(ParallelizableTests.class) public class ProtocolVersionMixedClusterIT { - @Rule public ExpectedException thrown = ExpectedException.none(); @Test public void should_downgrade_if_peer_does_not_support_negotiated_version() { @@ -113,19 +111,25 @@ public void should_keep_current_if_supported_by_all_peers() { @Test public void should_fail_if_peer_does_not_support_v3() { - thrown.expect(UnsupportedProtocolVersionException.class); - thrown.expectMessage( - "reports Cassandra version 2.0.9, but the driver only supports 2.1.0 and above"); - try (BoundCluster simulacron = mixedVersions("3.0.0", "2.0.9", "3.11"); - BoundNode contactPoint = simulacron.node(0); - CqlSession ignored = - (CqlSession) - SessionUtils.baseBuilder() - .addContactPoint(contactPoint.inetSocketAddress()) - .build()) { - fail("Cluster init should have failed"); - } + Throwable t = + catchThrowable( + () -> { + try (BoundCluster simulacron = mixedVersions("3.0.0", "2.0.9", "3.11"); + BoundNode contactPoint = simulacron.node(0); + CqlSession ignored = + (CqlSession) + SessionUtils.baseBuilder() + .addContactPoint(contactPoint.inetSocketAddress()) + .build()) { + fail("Cluster init should have failed"); + } + }); + + assertThat(t) + .isInstanceOf(UnsupportedProtocolVersionException.class) + .hasMessageContaining( + "reports Cassandra version 2.0.9, but the driver only supports 2.1.0 and above"); } @Test diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/config/DriverConfigValidationIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/config/DriverConfigValidationIT.java index 5e208eed657..db3c56bceac 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/config/DriverConfigValidationIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/config/DriverConfigValidationIT.java @@ -25,10 +25,8 @@ import com.datastax.oss.driver.categories.ParallelizableTests; import com.datastax.oss.simulacron.common.cluster.ClusterSpec; import org.junit.ClassRule; -import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; -import org.junit.rules.ExpectedException; @Category(ParallelizableTests.class) public class DriverConfigValidationIT { @@ -37,8 +35,6 @@ public class DriverConfigValidationIT { public static final SimulacronRule SIMULACRON_RULE = new SimulacronRule(ClusterSpec.builder().withNodes(1)); - @Rule public ExpectedException thrown = ExpectedException.none(); - @Test public void should_fail_to_init_with_invalid_policy() { should_fail_to_init_with_invalid_policy(DefaultDriverOption.LOAD_BALANCING_POLICY_CLASS); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/config/DriverExecutionProfileReloadIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/config/DriverExecutionProfileReloadIT.java index c2f0971cedd..43105a0d6e2 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/config/DriverExecutionProfileReloadIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/config/DriverExecutionProfileReloadIT.java @@ -19,6 +19,7 @@ import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.noRows; import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.when; import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.catchThrowable; import static org.junit.Assert.fail; import com.datastax.oss.driver.api.core.CqlSession; @@ -36,9 +37,7 @@ import java.util.concurrent.atomic.AtomicReference; import org.junit.Before; import org.junit.ClassRule; -import org.junit.Rule; import org.junit.Test; -import org.junit.rules.ExpectedException; public class DriverExecutionProfileReloadIT { @@ -46,8 +45,6 @@ public class DriverExecutionProfileReloadIT { public static final SimulacronRule SIMULACRON_RULE = new SimulacronRule(ClusterSpec.builder().withNodes(3)); - @Rule public ExpectedException thrown = ExpectedException.none(); - @Before public void clearPrimes() { SIMULACRON_RULE.cluster().clearLogs(); @@ -55,7 +52,7 @@ public void clearPrimes() { } @Test - public void should_periodically_reload_configuration() throws Exception { + public void should_periodically_reload_configuration() { String query = "mockquery"; // Define a loader which configures a reload interval of 2s and current value of configSource. AtomicReference configSource = new AtomicReference<>(""); @@ -93,7 +90,7 @@ public void should_periodically_reload_configuration() throws Exception { } @Test - public void should_reload_configuration_when_event_fired() throws Exception { + public void should_reload_configuration_when_event_fired() { String query = "mockquery"; // Define a loader which configures no automatic reloads and current value of configSource. AtomicReference configSource = new AtomicReference<>(""); @@ -132,7 +129,7 @@ public void should_reload_configuration_when_event_fired() throws Exception { } @Test - public void should_not_allow_dynamically_adding_profile() throws Exception { + public void should_not_allow_dynamically_adding_profile() { String query = "mockquery"; // Define a loader which configures a reload interval of 2s and current value of configSource. AtomicReference configSource = new AtomicReference<>(""); @@ -164,13 +161,18 @@ public void should_not_allow_dynamically_adding_profile() throws Exception { // Execute again, should expect to fail again because doesn't allow to dynamically define // profile. - thrown.expect(IllegalArgumentException.class); - session.execute(SimpleStatement.builder(query).setExecutionProfileName("slow").build()); + Throwable t = + catchThrowable( + () -> + session.execute( + SimpleStatement.builder(query).setExecutionProfileName("slow").build())); + + assertThat(t).isInstanceOf(IllegalArgumentException.class); } } @Test - public void should_reload_profile_config_when_reloading_config() throws Exception { + public void should_reload_profile_config_when_reloading_config() { String query = "mockquery"; // Define a loader which configures a reload interval of 2s and current value of configSource. // Define initial profile settings so it initially exists. diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/config/DriverExecutionProfileSimulacronIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/config/DriverExecutionProfileSimulacronIT.java index 763e816b746..8fe6d9fd573 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/config/DriverExecutionProfileSimulacronIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/config/DriverExecutionProfileSimulacronIT.java @@ -19,6 +19,7 @@ import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.serverError; import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.when; import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.catchThrowable; import static org.junit.Assert.fail; import com.datastax.oss.driver.api.core.AllNodesFailedException; @@ -38,10 +39,8 @@ import java.util.concurrent.TimeUnit; import org.junit.Before; import org.junit.ClassRule; -import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; -import org.junit.rules.ExpectedException; @Category(ParallelizableTests.class) public class DriverExecutionProfileSimulacronIT { @@ -50,8 +49,6 @@ public class DriverExecutionProfileSimulacronIT { public static final SimulacronRule SIMULACRON_RULE = new SimulacronRule(ClusterSpec.builder().withNodes(3)); - @Rule public ExpectedException thrown = ExpectedException.none(); - @Before public void clearPrimes() { SIMULACRON_RULE.cluster().clearLogs(); @@ -66,9 +63,11 @@ public void should_fail_if_config_profile_specified_doesnt_exist() { .setExecutionProfileName("IDONTEXIST") .build(); - thrown.expect(IllegalArgumentException.class); - thrown.expectMessage("Unknown profile 'IDONTEXIST'. Check your configuration"); - session.execute(statement); + Throwable t = catchThrowable(() -> session.execute(statement)); + + assertThat(t) + .isInstanceOf(IllegalArgumentException.class) + .hasMessage("Unknown profile 'IDONTEXIST'. Check your configuration."); } } @@ -119,8 +118,13 @@ public void should_use_profile_default_idempotence() { } // Execute query with profile, should retry on all hosts since query is idempotent. - thrown.expect(AllNodesFailedException.class); - session.execute(SimpleStatement.builder(query).setExecutionProfileName("idem").build()); + Throwable t = + catchThrowable( + () -> + session.execute( + SimpleStatement.builder(query).setExecutionProfileName("idem").build())); + + assertThat(t).isInstanceOf(AllNodesFailedException.class); } } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BoundStatementCcmIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BoundStatementCcmIT.java index 09e27f657ec..75748d37a6c 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BoundStatementCcmIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BoundStatementCcmIT.java @@ -60,7 +60,6 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; -import org.junit.rules.ExpectedException; import org.junit.rules.RuleChain; import org.junit.rules.TestName; import org.junit.rules.TestRule; @@ -84,8 +83,6 @@ public class BoundStatementCcmIT { @Rule public TestName name = new TestName(); - @Rule public ExpectedException thrown = ExpectedException.none(); - private static final String KEY = "test"; private static final int VALUE = 7; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BoundStatementSimulacronIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BoundStatementSimulacronIT.java index 54b3333dbdf..4f31dff1717 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BoundStatementSimulacronIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BoundStatementSimulacronIT.java @@ -19,6 +19,7 @@ import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.query; import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.when; import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.catchThrowable; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.DefaultConsistencyLevel; @@ -40,10 +41,8 @@ import java.util.concurrent.TimeUnit; import org.junit.Before; import org.junit.ClassRule; -import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; -import org.junit.rules.ExpectedException; @Category(ParallelizableTests.class) public class BoundStatementSimulacronIT { @@ -52,8 +51,6 @@ public class BoundStatementSimulacronIT { public static final SimulacronRule SIMULACRON_RULE = new SimulacronRule(ClusterSpec.builder().withNodes(1)); - @Rule public ExpectedException thrown = ExpectedException.none(); - @Before public void clearPrimes() { SIMULACRON_RULE.cluster().clearLogs(); @@ -148,10 +145,11 @@ public void should_use_timeout_from_simple_statement() { .build(); PreparedStatement prepared = session.prepare(st); - thrown.expect(DriverTimeoutException.class); - thrown.expectMessage("Query timed out after PT1S"); + Throwable t = catchThrowable(() -> session.execute(prepared.bind(0))); - session.execute(prepared.bind(0)); + assertThat(t) + .isInstanceOf(DriverTimeoutException.class) + .hasMessage("Query timed out after PT1S"); } } @@ -179,10 +177,13 @@ public void should_use_timeout() { .build(); PreparedStatement prepared = session.prepare(st); - thrown.expect(DriverTimeoutException.class); - thrown.expectMessage("Query timed out after PT0.15S"); + Throwable t = + catchThrowable( + () -> session.execute(prepared.bind(0).setTimeout(Duration.ofMillis(150)))); - session.execute(prepared.bind(0).setTimeout(Duration.ofMillis(150))); + assertThat(t) + .isInstanceOf(DriverTimeoutException.class) + .hasMessage("Query timed out after PT0.15S"); } } } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PagingStateIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PagingStateIT.java index ca0b8c34bc0..689c2390db7 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PagingStateIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PagingStateIT.java @@ -16,6 +16,7 @@ package com.datastax.oss.driver.core.cql; import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.catchThrowable; import static org.assertj.core.api.Assertions.fail; import com.datastax.oss.driver.api.core.CqlSession; @@ -35,10 +36,8 @@ import java.util.function.UnaryOperator; import org.junit.Before; import org.junit.ClassRule; -import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; -import org.junit.rules.ExpectedException; import org.junit.rules.RuleChain; import org.junit.rules.TestRule; @@ -51,8 +50,6 @@ public class PagingStateIT { @ClassRule public static TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); - @Rule public ExpectedException thrown = ExpectedException.none(); - @Before public void setupSchema() { CqlSession session = SESSION_RULE.session(); @@ -158,13 +155,16 @@ private void should_fail(String query1, int value1, String query2, int value2) { ResultSet resultSet = session.execute(boundStatement1); PagingState pagingState = resultSet.getExecutionInfo().getSafePagingState(); - thrown.expect(IllegalArgumentException.class); - @SuppressWarnings("unused") - BoundStatement ignored = - session - .prepare(SimpleStatement.newInstance(query2).setPageSize(15)) - .bind(value2) - .setPagingState(pagingState); + @SuppressWarnings("ResultOfMethodCallIgnored") + Throwable t = + catchThrowable( + () -> + session + .prepare(SimpleStatement.newInstance(query2).setPageSize(15)) + .bind(value2) + .setPagingState(pagingState)); + + assertThat(t).isInstanceOf(IllegalArgumentException.class); } static class IntWrapper { diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PerRequestKeyspaceIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PerRequestKeyspaceIT.java index 32e9e331c52..de6be0afe61 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PerRequestKeyspaceIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PerRequestKeyspaceIT.java @@ -16,6 +16,7 @@ package com.datastax.oss.driver.core.cql; import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.catchThrowable; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; @@ -38,7 +39,6 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; -import org.junit.rules.ExpectedException; import org.junit.rules.RuleChain; import org.junit.rules.TestName; import org.junit.rules.TestRule; @@ -60,7 +60,6 @@ public class PerRequestKeyspaceIT { @Rule public TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); - @Rule public ExpectedException thrown = ExpectedException.none(); @Rule public TestName nameRule = new TestName(); @Before @@ -113,9 +112,10 @@ private void should_reject_statement_with_keyspace_in_protocol_v4(Statement stat .withString(DefaultDriverOption.PROTOCOL_VERSION, "V4") .build(); try (CqlSession session = SessionUtils.newSession(ccmRule, loader)) { - thrown.expect(IllegalArgumentException.class); - thrown.expectMessage("Can't use per-request keyspace with protocol V4"); - session.execute(statement); + Throwable t = catchThrowable(() -> session.execute(statement)); + assertThat(t) + .isInstanceOf(IllegalArgumentException.class) + .hasMessage("Can't use per-request keyspace with protocol V4"); } } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PreparedStatementIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PreparedStatementIT.java index 2e25f600f03..1e7e91084ba 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PreparedStatementIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PreparedStatementIT.java @@ -17,6 +17,7 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.assertj.core.api.Assertions.catchThrowable; import com.codahale.metrics.Gauge; import com.datastax.oss.driver.api.core.CqlSession; @@ -51,7 +52,6 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; -import org.junit.rules.ExpectedException; import org.junit.rules.RuleChain; import org.junit.rules.TestRule; @@ -79,8 +79,6 @@ public class PreparedStatementIT { @Rule public TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); - @Rule public ExpectedException thrown = ExpectedException.none(); - @Before public void setupSchema() { for (String query : @@ -277,11 +275,11 @@ public void should_fail_to_reprepare_if_query_becomes_invalid() { session.prepare("SELECT a, b, c, d FROM prepared_statement_test WHERE a = ?"); session.execute("ALTER TABLE prepared_statement_test DROP d"); - thrown.expect(InvalidQueryException.class); - thrown.expectMessage("Undefined column name d"); - // When - session.execute(ps.bind()); + Throwable t = catchThrowable(() -> session.execute(ps.bind())); + + // Then + assertThat(t).isInstanceOf(InvalidQueryException.class).hasMessage("Undefined column name d"); } @Test diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/QueryTraceIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/QueryTraceIT.java index 33eddb2afa2..567a1263310 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/QueryTraceIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/QueryTraceIT.java @@ -16,6 +16,7 @@ package com.datastax.oss.driver.core.cql; import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.catchThrowable; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.Version; @@ -29,10 +30,8 @@ import java.net.InetAddress; import java.net.InetSocketAddress; import org.junit.ClassRule; -import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; -import org.junit.rules.ExpectedException; import org.junit.rules.RuleChain; import org.junit.rules.TestRule; @@ -46,8 +45,6 @@ public class QueryTraceIT { @ClassRule public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); - @Rule public ExpectedException thrown = ExpectedException.none(); - @Test public void should_not_have_tracing_id_when_tracing_disabled() { ExecutionInfo executionInfo = @@ -58,9 +55,11 @@ public void should_not_have_tracing_id_when_tracing_disabled() { assertThat(executionInfo.getTracingId()).isNull(); - thrown.expect(IllegalStateException.class); - thrown.expectMessage("Tracing was disabled for this request"); - executionInfo.getQueryTrace(); + Throwable t = catchThrowable(executionInfo::getQueryTrace); + + assertThat(t) + .isInstanceOf(IllegalStateException.class) + .hasMessage("Tracing was disabled for this request"); } @Test diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/SimpleStatementSimulacronIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/SimpleStatementSimulacronIT.java index 1c6fa8f2737..e3daeaf0a03 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/SimpleStatementSimulacronIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/SimpleStatementSimulacronIT.java @@ -18,6 +18,7 @@ import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.noRows; import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.when; import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.catchThrowable; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.DefaultConsistencyLevel; @@ -35,10 +36,8 @@ import java.util.concurrent.TimeUnit; import org.junit.Before; import org.junit.ClassRule; -import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; -import org.junit.rules.ExpectedException; import org.junit.rules.RuleChain; import org.junit.rules.TestRule; @@ -54,8 +53,6 @@ public class SimpleStatementSimulacronIT { @ClassRule public static final TestRule CHAIN = RuleChain.outerRule(SIMULACRON_RULE).around(SESSION_RULE); - @Rule public ExpectedException thrown = ExpectedException.none(); - @Before public void clearPrimes() { SIMULACRON_RULE.cluster().clearLogs(); @@ -95,9 +92,10 @@ public void should_use_timeout() { .setConsistencyLevel(DefaultConsistencyLevel.ONE) .build(); - thrown.expect(DriverTimeoutException.class); - thrown.expectMessage("Query timed out after PT1S"); + Throwable t = catchThrowable(() -> SESSION_RULE.session().execute(st)); - SESSION_RULE.session().execute(st); + assertThat(t) + .isInstanceOf(DriverTimeoutException.class) + .hasMessage("Query timed out after PT1S"); } } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/session/RequestProcessorIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/session/RequestProcessorIT.java index 7fe18e3044f..ef6b83f57dc 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/session/RequestProcessorIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/session/RequestProcessorIT.java @@ -16,6 +16,7 @@ package com.datastax.oss.driver.core.session; import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.catchThrowable; import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.CqlSession; @@ -37,10 +38,8 @@ import com.google.common.util.concurrent.Uninterruptibles; import org.junit.BeforeClass; import org.junit.ClassRule; -import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; -import org.junit.rules.ExpectedException; import org.junit.rules.RuleChain; import org.junit.rules.TestRule; @@ -72,8 +71,6 @@ public class RequestProcessorIT { @ClassRule public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); - @Rule public ExpectedException thrown = ExpectedException.none(); - public static final String KEY = "test"; @BeforeClass @@ -150,9 +147,14 @@ public void should_throw_illegal_argument_exception_if_no_matching_processor_fou // Since cluster does not have a processor registered for returning ListenableFuture, an // IllegalArgumentException // should be thrown. - thrown.expect(IllegalArgumentException.class); - SESSION_RULE - .session() - .execute(SimpleStatement.newInstance("select * from test"), GuavaSession.ASYNC); + Throwable t = + catchThrowable( + () -> + SESSION_RULE + .session() + .execute( + SimpleStatement.newInstance("select * from test"), GuavaSession.ASYNC)); + + assertThat(t).isInstanceOf(IllegalArgumentException.class); } } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/throttling/ThrottlingIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/throttling/ThrottlingIT.java index 12d21dce299..ef90cac4e2e 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/throttling/ThrottlingIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/throttling/ThrottlingIT.java @@ -15,6 +15,9 @@ */ package com.datastax.oss.driver.core.throttling; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.catchThrowable; + import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.RequestThrottlingException; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; @@ -27,14 +30,12 @@ import java.util.concurrent.TimeUnit; import org.junit.Rule; import org.junit.Test; -import org.junit.rules.ExpectedException; public class ThrottlingIT { private static final String QUERY = "select * from foo"; @Rule public SimulacronRule simulacron = new SimulacronRule(ClusterSpec.builder().withNodes(1)); - @Rule public ExpectedException thrown = ExpectedException.none(); @Test public void should_reject_request_when_throttling_by_concurrency() { @@ -66,12 +67,13 @@ public void should_reject_request_when_throttling_by_concurrency() { } // The next query should be rejected - thrown.expect(RequestThrottlingException.class); - thrown.expectMessage( - "The session has reached its maximum capacity " - + "(concurrent requests: 10, queue size: 10)"); + Throwable t = catchThrowable(() -> session.execute(QUERY)); - session.execute(QUERY); + assertThat(t) + .isInstanceOf(RequestThrottlingException.class) + .hasMessage( + "The session has reached its maximum capacity " + + "(concurrent requests: 10, queue size: 10)"); } } } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/type/codec/registry/CodecRegistryIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/type/codec/registry/CodecRegistryIT.java index d8482681e84..9988e0c18e0 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/type/codec/registry/CodecRegistryIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/type/codec/registry/CodecRegistryIT.java @@ -16,6 +16,7 @@ package com.datastax.oss.driver.core.type.codec.registry; import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.catchThrowable; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.ProtocolVersion; @@ -57,7 +58,6 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; -import org.junit.rules.ExpectedException; import org.junit.rules.RuleChain; import org.junit.rules.TestName; import org.junit.rules.TestRule; @@ -74,8 +74,6 @@ public class CodecRegistryIT { @Rule public TestName name = new TestName(); - @Rule public ExpectedException thrown = ExpectedException.none(); - @BeforeClass public static void createSchema() { // table with simple primary key, single cell. @@ -153,10 +151,17 @@ public void should_throw_exception_if_no_codec_registered_for_type_set() { PreparedStatement prepared = SESSION_RULE.session().prepare("INSERT INTO test (k, v) values (?, ?)"); - thrown.expect(CodecNotFoundException.class); - // float value for int column should not work since no applicable codec. - prepared.boundStatementBuilder().setString(0, name.getMethodName()).setFloat(1, 3.14f).build(); + Throwable t = + catchThrowable( + () -> + prepared + .boundStatementBuilder() + .setString(0, name.getMethodName()) + .setFloat(1, 3.14f) + .build()); + + assertThat(t).isInstanceOf(CodecNotFoundException.class); } @Test @@ -182,9 +187,9 @@ public void should_throw_exception_if_no_codec_registered_for_type_get() { // should not be able to access int column as float as no codec is registered to handle that. Row row = rows.iterator().next(); - thrown.expect(CodecNotFoundException.class); + Throwable t = catchThrowable(() -> assertThat(row.getFloat("v")).isEqualTo(3.0f)); - assertThat(row.getFloat("v")).isEqualTo(3.0f); + assertThat(t).isInstanceOf(CodecNotFoundException.class); } @Test diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/ComputedIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/ComputedIT.java index 48952b0c823..2ca7ecef743 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/ComputedIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/ComputedIT.java @@ -16,6 +16,7 @@ package com.datastax.oss.driver.mapper; import static com.datastax.oss.driver.assertions.Assertions.assertThat; +import static org.assertj.core.api.Assertions.catchThrowable; import static org.assertj.core.data.Offset.offset; import com.datastax.oss.driver.api.core.CqlIdentifier; @@ -50,10 +51,8 @@ import java.util.concurrent.atomic.AtomicInteger; import org.junit.BeforeClass; import org.junit.ClassRule; -import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; -import org.junit.rules.ExpectedException; import org.junit.rules.RuleChain; import org.junit.rules.TestRule; @@ -67,8 +66,6 @@ public class ComputedIT { @ClassRule public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); - @Rule public ExpectedException thrown = ExpectedException.none(); - private static TestMapper mapper; private static AtomicInteger keyProvider = new AtomicInteger(); @@ -225,9 +222,11 @@ public void should_fail_if_alias_does_not_match_cqlName() { 1)); // should raise an exception as 'writetime' is not found in result set. - thrown.expect(IllegalArgumentException.class); - thrown.expectMessage("writetime is not a column in this row"); - computedDao.get(result.one()); + Throwable t = catchThrowable(() -> computedDao.get(result.one())); + + assertThat(t) + .isInstanceOf(IllegalArgumentException.class) + .hasMessage("writetime is not a column in this row"); } @Test diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/PrimitivesIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/PrimitivesIT.java index 73896740ca1..9cc4004690d 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/PrimitivesIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/PrimitivesIT.java @@ -36,10 +36,8 @@ import java.util.Objects; import org.junit.BeforeClass; import org.junit.ClassRule; -import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; -import org.junit.rules.ExpectedException; import org.junit.rules.RuleChain; import org.junit.rules.TestRule; @@ -54,8 +52,6 @@ public class PrimitivesIT { @ClassRule public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); - @Rule public ExpectedException thrown = ExpectedException.none(); - private static TestMapper mapper; @BeforeClass diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/QueryKeyspaceAndTableIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/QueryKeyspaceAndTableIT.java index 571852f09d5..f3ad91c1c30 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/QueryKeyspaceAndTableIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/QueryKeyspaceAndTableIT.java @@ -16,6 +16,7 @@ package com.datastax.oss.driver.mapper; import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.catchThrowable; import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.CqlSession; @@ -35,10 +36,8 @@ import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import org.junit.BeforeClass; import org.junit.ClassRule; -import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; -import org.junit.rules.ExpectedException; import org.junit.rules.RuleChain; import org.junit.rules.TestRule; @@ -56,8 +55,6 @@ public class QueryKeyspaceAndTableIT { private static final CqlIdentifier OTHER_KEYSPACE = CqlIdentifier.fromCql(QueryKeyspaceAndTableIT.class.getSimpleName() + "_alt"); - @Rule public ExpectedException thrown = ExpectedException.none(); - private static TestMapper mapper; @BeforeClass @@ -93,22 +90,25 @@ public void should_substitute_keyspaceId_and_tableId() { @Test public void should_fail_to_substitute_keyspaceId_if_dao_has_no_keyspace() { - thrown.expect(MapperException.class); - thrown.expectMessage( - "Cannot substitute ${keyspaceId} in query " - + "'SELECT count(*) FROM ${keyspaceId}.${tableId}': " - + "the DAO wasn't built with a keyspace"); - mapper.daoWithKeyspaceAndTableId(null, FOO_TABLE_ID); + Throwable t = catchThrowable(() -> mapper.daoWithKeyspaceAndTableId(null, FOO_TABLE_ID)); + assertThat(t) + .isInstanceOf(MapperException.class) + .hasMessage( + "Cannot substitute ${keyspaceId} in query " + + "'SELECT count(*) FROM ${keyspaceId}.${tableId}': " + + "the DAO wasn't built with a keyspace"); } @Test public void should_fail_to_substitute_tableId_if_dao_has_no_table() { - thrown.expect(MapperException.class); - thrown.expectMessage( - "Cannot substitute ${tableId} in query " - + "'SELECT count(*) FROM ${keyspaceId}.${tableId}': " - + "the DAO wasn't built with a table"); - mapper.daoWithKeyspaceAndTableId(SESSION_RULE.keyspace(), null); + Throwable t = + catchThrowable(() -> mapper.daoWithKeyspaceAndTableId(SESSION_RULE.keyspace(), null)); + assertThat(t) + .isInstanceOf(MapperException.class) + .hasMessage( + "Cannot substitute ${tableId} in query " + + "'SELECT count(*) FROM ${keyspaceId}.${tableId}': " + + "the DAO wasn't built with a table"); } @Test @@ -125,12 +125,14 @@ public void should_not_use_keyspace_in_qualifiedTableId_when_dao_has_no_keyspace @Test public void should_fail_to_substitute_qualifiedTableId_if_dao_has_no_table() { - thrown.expect(MapperException.class); - thrown.expectMessage( - "Cannot substitute ${qualifiedTableId} in query " - + "'SELECT count(*) FROM ${qualifiedTableId}': " - + "the DAO wasn't built with a table"); - mapper.daoWithQualifiedTableId(SESSION_RULE.keyspace(), null); + Throwable t = + catchThrowable(() -> mapper.daoWithQualifiedTableId(SESSION_RULE.keyspace(), null)); + assertThat(t) + .isInstanceOf(MapperException.class) + .hasMessage( + "Cannot substitute ${qualifiedTableId} in query " + + "'SELECT count(*) FROM ${qualifiedTableId}': " + + "the DAO wasn't built with a table"); } @Dao diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/QueryReturnTypesIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/QueryReturnTypesIT.java index bf2091ef56e..716b35faebc 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/QueryReturnTypesIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/QueryReturnTypesIT.java @@ -16,6 +16,7 @@ package com.datastax.oss.driver.mapper; import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.catchThrowable; import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.CqlSession; @@ -49,10 +50,8 @@ import org.junit.Before; import org.junit.BeforeClass; import org.junit.ClassRule; -import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; -import org.junit.rules.ExpectedException; import org.junit.rules.RuleChain; import org.junit.rules.TestRule; @@ -66,8 +65,6 @@ public class QueryReturnTypesIT { @ClassRule public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); - @Rule public ExpectedException thrown = ExpectedException.none(); - private static TestDao dao; @BeforeClass @@ -122,11 +119,12 @@ public void should_execute_count_query_and_map_to_long() { @Test public void should_fail_to_map_to_long_if_query_returns_other_type() { - thrown.expect(MapperException.class); - thrown.expectMessage( - "Expected the query to return a column with CQL type BIGINT in first position " - + "(return type long is intended for COUNT queries)"); - dao.wrongCount(); + Throwable t = catchThrowable(() -> dao.wrongCount()); + assertThat(t) + .isInstanceOf(MapperException.class) + .hasMessage( + "Expected the query to return a column with CQL type BIGINT in first position " + + "(return type long is intended for COUNT queries)"); } @Test diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/StatementAttributesIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/StatementAttributesIT.java index 61f0fbf6c7a..d32fd1f9517 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/StatementAttributesIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/StatementAttributesIT.java @@ -20,6 +20,7 @@ import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.when; import static java.nio.charset.StandardCharsets.UTF_8; import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.catchThrowable; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.DefaultConsistencyLevel; @@ -54,9 +55,7 @@ import org.junit.Before; import org.junit.BeforeClass; import org.junit.ClassRule; -import org.junit.Rule; import org.junit.Test; -import org.junit.rules.ExpectedException; import org.junit.rules.RuleChain; import org.junit.rules.TestRule; @@ -70,8 +69,6 @@ public class StatementAttributesIT { @ClassRule public static final TestRule CHAIN = RuleChain.outerRule(SIMULACRON_RULE).around(SESSION_RULE); - @Rule public ExpectedException thrown = ExpectedException.none(); - private static String PAGING_STATE = "paging_state"; private static int PAGE_SIZE = 13; @@ -192,9 +189,8 @@ public void should_honor_annotation_attributes_on_update() { @Test public void should_fail_runtime_attributes_bad() { - thrown.expect(IllegalStateException.class); - thrown.expectMessage("mock error"); - dao.save(simple, badStatementFunction); + Throwable t = catchThrowable(() -> dao.save(simple, badStatementFunction)); + assertThat(t).isInstanceOf(IllegalStateException.class).hasMessage("mock error"); } private static void primeInsertQuery() { diff --git a/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/insert/RegularInsertTest.java b/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/insert/RegularInsertTest.java index 30e39c4836a..0873cec6da1 100644 --- a/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/insert/RegularInsertTest.java +++ b/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/insert/RegularInsertTest.java @@ -19,19 +19,16 @@ import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.bindMarker; import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.insertInto; import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.literal; +import static org.assertj.core.api.Assertions.catchThrowable; import com.datastax.oss.driver.api.querybuilder.term.Term; import com.datastax.oss.driver.internal.querybuilder.insert.DefaultInsert; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import java.util.Map; -import org.junit.Rule; import org.junit.Test; -import org.junit.rules.ExpectedException; public class RegularInsertTest { - @Rule public ExpectedException thrown = ExpectedException.none(); - @Test public void should_generate_column_assignments() { assertThat(insertInto("foo").value("a", literal(1)).value("b", literal(2))) @@ -123,18 +120,22 @@ public void should_throw_exception_with_invalid_ttl() { DefaultInsert defaultInsert = (DefaultInsert) insertInto("foo").value("a", bindMarker()).usingTtl(10); - thrown.expect(IllegalArgumentException.class); - thrown.expectMessage("TTL value must be a BindMarker or an Integer"); - - new DefaultInsert( - defaultInsert.getKeyspace(), - defaultInsert.getTable(), - (Term) defaultInsert.getJson(), - defaultInsert.getMissingJsonBehavior(), - defaultInsert.getAssignments(), - defaultInsert.getTimestamp(), - new Object(), // invalid TTL object - defaultInsert.isIfNotExists()); + Throwable t = + catchThrowable( + () -> + new DefaultInsert( + defaultInsert.getKeyspace(), + defaultInsert.getTable(), + (Term) defaultInsert.getJson(), + defaultInsert.getMissingJsonBehavior(), + defaultInsert.getAssignments(), + defaultInsert.getTimestamp(), + new Object(), // invalid TTL object + defaultInsert.isIfNotExists())); + + assertThat(t) + .isInstanceOf(IllegalArgumentException.class) + .hasMessage("TTL value must be a BindMarker or an Integer"); } @Test @@ -142,17 +143,20 @@ public void should_throw_exception_with_invalid_timestamp() { DefaultInsert defaultInsert = (DefaultInsert) insertInto("foo").value("a", bindMarker()).usingTimestamp(1); - thrown.expect(IllegalArgumentException.class); - thrown.expectMessage("TIMESTAMP value must be a BindMarker or a Long"); - - new DefaultInsert( - defaultInsert.getKeyspace(), - defaultInsert.getTable(), - (Term) defaultInsert.getJson(), - defaultInsert.getMissingJsonBehavior(), - defaultInsert.getAssignments(), - new Object(), // invalid timestamp object) - defaultInsert.getTtlInSeconds(), - defaultInsert.isIfNotExists()); + Throwable t = + catchThrowable( + () -> + new DefaultInsert( + defaultInsert.getKeyspace(), + defaultInsert.getTable(), + (Term) defaultInsert.getJson(), + defaultInsert.getMissingJsonBehavior(), + defaultInsert.getAssignments(), + new Object(), // invalid timestamp object) + defaultInsert.getTtlInSeconds(), + defaultInsert.isIfNotExists())); + assertThat(t) + .isInstanceOf(IllegalArgumentException.class) + .hasMessage("TIMESTAMP value must be a BindMarker or a Long"); } } diff --git a/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/update/UpdateUsingTest.java b/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/update/UpdateUsingTest.java index 591a53f2200..1a96b05e4f8 100644 --- a/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/update/UpdateUsingTest.java +++ b/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/update/UpdateUsingTest.java @@ -18,16 +18,13 @@ import static com.datastax.oss.driver.api.querybuilder.Assertions.assertThat; import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.bindMarker; import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.update; +import static org.assertj.core.api.Assertions.catchThrowable; import com.datastax.oss.driver.internal.querybuilder.update.DefaultUpdate; -import org.junit.Rule; import org.junit.Test; -import org.junit.rules.ExpectedException; public class UpdateUsingTest { - @Rule public ExpectedException thrown = ExpectedException.none(); - @Test public void should_generate_using_timestamp_clause() { assertThat( @@ -129,18 +126,22 @@ public void should_throw_exception_with_invalid_ttl() { .whereColumn("k") .isEqualTo(bindMarker()); - thrown.expect(IllegalArgumentException.class); - thrown.expectMessage("TTL value must be a BindMarker or an Integer"); + Throwable t = + catchThrowable( + () -> + new DefaultUpdate( + defaultUpdate.getKeyspace(), + defaultUpdate.getTable(), + defaultUpdate.getTimestamp(), + new Object(), // invalid TTL object + defaultUpdate.getAssignments(), + defaultUpdate.getRelations(), + defaultUpdate.isIfExists(), + defaultUpdate.getConditions())); - new DefaultUpdate( - defaultUpdate.getKeyspace(), - defaultUpdate.getTable(), - defaultUpdate.getTimestamp(), - new Object(), // invalid TTL object - defaultUpdate.getAssignments(), - defaultUpdate.getRelations(), - defaultUpdate.isIfExists(), - defaultUpdate.getConditions()); + assertThat(t) + .isInstanceOf(IllegalArgumentException.class) + .hasMessage("TTL value must be a BindMarker or an Integer"); } @Test @@ -153,17 +154,20 @@ public void should_throw_exception_with_invalid_timestamp() { .whereColumn("k") .isEqualTo(bindMarker()); - thrown.expect(IllegalArgumentException.class); - thrown.expectMessage("TIMESTAMP value must be a BindMarker or a Long"); - - new DefaultUpdate( - defaultUpdate.getKeyspace(), - defaultUpdate.getTable(), - new Object(), // invalid timestamp object - defaultUpdate.getTtl(), - defaultUpdate.getAssignments(), - defaultUpdate.getRelations(), - defaultUpdate.isIfExists(), - defaultUpdate.getConditions()); + Throwable t = + catchThrowable( + () -> + new DefaultUpdate( + defaultUpdate.getKeyspace(), + defaultUpdate.getTable(), + new Object(), // invalid timestamp object + defaultUpdate.getTtl(), + defaultUpdate.getAssignments(), + defaultUpdate.getRelations(), + defaultUpdate.isIfExists(), + defaultUpdate.getConditions())); + assertThat(t) + .isInstanceOf(IllegalArgumentException.class) + .hasMessage("TIMESTAMP value must be a BindMarker or a Long"); } } From d86ba858b8b0bd58f1f9609acc7582b36dc85dc1 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Thu, 26 Nov 2020 18:23:06 +0100 Subject: [PATCH 592/979] Convert field to local variable --- .../driver/internal/core/session/DefaultSessionPoolsTest.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/session/DefaultSessionPoolsTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/session/DefaultSessionPoolsTest.java index b42f281a02a..88818f6929b 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/session/DefaultSessionPoolsTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/session/DefaultSessionPoolsTest.java @@ -105,14 +105,13 @@ public class DefaultSessionPoolsTest { private DefaultNode node1; private DefaultNode node2; private DefaultNode node3; - private DefaultEventLoopGroup adminEventLoopGroup; private EventBus eventBus; @Before public void setup() { MockitoAnnotations.initMocks(this); - adminEventLoopGroup = new DefaultEventLoopGroup(1); + DefaultEventLoopGroup adminEventLoopGroup = new DefaultEventLoopGroup(1); when(nettyOptions.adminEventExecutorGroup()).thenReturn(adminEventLoopGroup); when(context.getNettyOptions()).thenReturn(nettyOptions); @@ -153,6 +152,7 @@ public void setup() { node1 = mockLocalNode(1); node2 = mockLocalNode(2); node3 = mockLocalNode(3); + @SuppressWarnings("ConstantConditions") ImmutableMap nodes = ImmutableMap.of( node1.getHostId(), node1, From bc0f569574a631387e37935cfc1b013fd9cb2510 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Thu, 26 Nov 2020 18:23:49 +0100 Subject: [PATCH 593/979] Replace occurrences of Constellation by Astra --- .../driver/api/core/session/SessionBuilder.java | 14 +++++++------- .../examples/mapper/KillrVideoMapperExample.java | 4 ++-- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java b/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java index f4bab46022e..2ee551a2bf6 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java @@ -497,10 +497,10 @@ public SelfT withClassLoader(@Nullable ClassLoader classLoader) { * the provided {@link Path}. * *

          To connect to a Cloud database, you must first download the secure database bundle from the - * DataStax Constellation console that contains the connection information, then instruct the + * DataStax Astra console that contains the connection information, then instruct the * driver to read its contents using either this method or one if its variants. * - *

          For more information, please refer to the DataStax Constellation documentation. + *

          For more information, please refer to the DataStax Astra documentation. * * @param cloudConfigPath Path to the secure connect bundle zip file. * @see #withCloudSecureConnectBundle(URL) @@ -534,10 +534,10 @@ public SelfT withCodecRegistry(@Nullable MutableCodecRegistry codecRegistry) { * the provided {@link URL}. * *

          To connect to a Cloud database, you must first download the secure database bundle from the - * DataStax Constellation console that contains the connection information, then instruct the + * DataStax Astra console that contains the connection information, then instruct the * driver to read its contents using either this method or one if its variants. * - *

          For more information, please refer to the DataStax Constellation documentation. + *

          For more information, please refer to the DataStax Astra documentation. * * @param cloudConfigUrl URL to the secure connect bundle zip file. * @see #withCloudSecureConnectBundle(Path) @@ -554,10 +554,10 @@ public SelfT withCloudSecureConnectBundle(@NonNull URL cloudConfigUrl) { * the provided {@link InputStream}. * *

          To connect to a Cloud database, you must first download the secure database bundle from the - * DataStax Constellation console that contains the connection information, then instruct the + * DataStax Astra console that contains the connection information, then instruct the * driver to read its contents using either this method or one if its variants. * - *

          For more information, please refer to the DataStax Constellation documentation. + *

          For more information, please refer to the DataStax Astra documentation. * *

          Note that the provided stream will be consumed and closed when either {@link * #build()} or {@link #buildAsync()} are called; attempting to reuse it afterwards will result in @@ -584,7 +584,7 @@ public SelfT withCloudSecureConnectBundle(@NonNull InputStream cloudConfigInputS * monitor tailored for Cloud deployments. This topology monitor assumes that the target cluster * should be contacted through the proxy specified here, using SNI routing. * - *

          For more information, please refer to the DataStax Constellation documentation. + *

          For more information, please refer to the DataStax Astra documentation. * * @param cloudProxyAddress The address of the Cloud proxy to use. * @see Server Name Indication diff --git a/examples/src/main/java/com/datastax/oss/driver/examples/mapper/KillrVideoMapperExample.java b/examples/src/main/java/com/datastax/oss/driver/examples/mapper/KillrVideoMapperExample.java index 4c845ba89e9..4ad547767c8 100644 --- a/examples/src/main/java/com/datastax/oss/driver/examples/mapper/KillrVideoMapperExample.java +++ b/examples/src/main/java/com/datastax/oss/driver/examples/mapper/KillrVideoMapperExample.java @@ -112,7 +112,7 @@ public static void main(String[] args) { Video video = new Video(); video.setUserid(user.getUserid()); video.setName( - "Getting Started with DataStax Apache Cassandra as a Service on DataStax Constellation"); + "Getting Started with DataStax Apache Cassandra as a Service on DataStax Astra"); video.setLocation("https://www.youtube.com/watch?v=68xzKpcZURA"); Set tags = new HashSet<>(); tags.add("apachecassandra"); @@ -146,7 +146,7 @@ public static void main(String[] args) { Video template = new Video(); template.setVideoid(video.getVideoid()); template.setName( - "Getting Started with DataStax Apache Cassandra® as a Service on DataStax Constellation"); + "Getting Started with DataStax Apache Cassandra® as a Service on DataStax Astra"); videoDao.update(template); // Reload the whole entity and check the fields video = videoDao.get(video.getVideoid()); From 0e134a493b1adf8f35a23fe68c766add180cbb54 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Thu, 26 Nov 2020 18:24:41 +0100 Subject: [PATCH 594/979] Make fields final --- .../internal/core/metadata/LoadBalancingPolicyWrapper.java | 2 +- .../session/throttling/ConcurrencyLimitingRequestThrottler.java | 2 +- .../internal/core/util/concurrent/ReplayingEventFilter.java | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/LoadBalancingPolicyWrapper.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/LoadBalancingPolicyWrapper.java index bdf0c392e0c..d4446418f4a 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/LoadBalancingPolicyWrapper.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/LoadBalancingPolicyWrapper.java @@ -84,7 +84,7 @@ private enum State { private final String logPrefix; private final ReplayingEventFilter eventFilter = new ReplayingEventFilter<>(this::processNodeStateEvent); - private AtomicReference stateRef = new AtomicReference<>(State.BEFORE_INIT); + private final AtomicReference stateRef = new AtomicReference<>(State.BEFORE_INIT); public LoadBalancingPolicyWrapper( @NonNull InternalDriverContext context, diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/session/throttling/ConcurrencyLimitingRequestThrottler.java b/core/src/main/java/com/datastax/oss/driver/internal/core/session/throttling/ConcurrencyLimitingRequestThrottler.java index ebfc838f4ac..ec71bde30b2 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/session/throttling/ConcurrencyLimitingRequestThrottler.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/session/throttling/ConcurrencyLimitingRequestThrottler.java @@ -65,7 +65,7 @@ public class ConcurrencyLimitingRequestThrottler implements RequestThrottler { private int concurrentRequests; @GuardedBy("lock") - private Deque queue = new ArrayDeque<>(); + private final Deque queue = new ArrayDeque<>(); @GuardedBy("lock") private boolean closed; diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/util/concurrent/ReplayingEventFilter.java b/core/src/main/java/com/datastax/oss/driver/internal/core/util/concurrent/ReplayingEventFilter.java index 5d6fd62918a..f0e42b6b240 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/util/concurrent/ReplayingEventFilter.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/util/concurrent/ReplayingEventFilter.java @@ -55,7 +55,7 @@ private enum State { private State state; @GuardedBy("stateLock") - private List recordedEvents; + private final List recordedEvents; public ReplayingEventFilter(Consumer consumer) { this.consumer = consumer; From 334915ba5cccd2e46ecbe74d736f3eeee92a2e9c Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Thu, 26 Nov 2020 20:43:42 +0100 Subject: [PATCH 595/979] Fix formatting issuess --- .../oss/driver/api/core/session/SessionBuilder.java | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java b/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java index 2ee551a2bf6..af4eb467a95 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java @@ -497,8 +497,8 @@ public SelfT withClassLoader(@Nullable ClassLoader classLoader) { * the provided {@link Path}. * *

          To connect to a Cloud database, you must first download the secure database bundle from the - * DataStax Astra console that contains the connection information, then instruct the - * driver to read its contents using either this method or one if its variants. + * DataStax Astra console that contains the connection information, then instruct the driver to + * read its contents using either this method or one if its variants. * *

          For more information, please refer to the DataStax Astra documentation. * @@ -534,8 +534,8 @@ public SelfT withCodecRegistry(@Nullable MutableCodecRegistry codecRegistry) { * the provided {@link URL}. * *

          To connect to a Cloud database, you must first download the secure database bundle from the - * DataStax Astra console that contains the connection information, then instruct the - * driver to read its contents using either this method or one if its variants. + * DataStax Astra console that contains the connection information, then instruct the driver to + * read its contents using either this method or one if its variants. * *

          For more information, please refer to the DataStax Astra documentation. * @@ -554,8 +554,8 @@ public SelfT withCloudSecureConnectBundle(@NonNull URL cloudConfigUrl) { * the provided {@link InputStream}. * *

          To connect to a Cloud database, you must first download the secure database bundle from the - * DataStax Astra console that contains the connection information, then instruct the - * driver to read its contents using either this method or one if its variants. + * DataStax Astra console that contains the connection information, then instruct the driver to + * read its contents using either this method or one if its variants. * *

          For more information, please refer to the DataStax Astra documentation. * From d44a61a163f892216da13e54bea8eb2ed5943358 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 27 Nov 2020 16:16:59 +0100 Subject: [PATCH 596/979] Make field final --- .../oss/driver/internal/core/channel/ProtocolInitHandler.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ProtocolInitHandler.java b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ProtocolInitHandler.java index 9d4969040d6..1f9c0e2c689 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ProtocolInitHandler.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ProtocolInitHandler.java @@ -79,7 +79,7 @@ class ProtocolInitHandler extends ConnectInitHandler { private final HeartbeatHandler heartbeatHandler; private String logPrefix; private ChannelHandlerContext ctx; - private boolean querySupportedOptions; + private final boolean querySupportedOptions; /** * @param querySupportedOptions whether to send OPTIONS as the first message, to request which From 55c03928ea074df20cae1ed4ed032e0adfed49dc Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 30 Nov 2020 15:22:06 +0100 Subject: [PATCH 597/979] Fix typo in javadocs of ConnectionInitException --- .../oss/driver/api/core/connection/ConnectionInitException.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/connection/ConnectionInitException.java b/core/src/main/java/com/datastax/oss/driver/api/core/connection/ConnectionInitException.java index 4112bdcd6f8..77717bbae63 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/connection/ConnectionInitException.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/connection/ConnectionInitException.java @@ -26,7 +26,7 @@ * *

          The only time when this is returned directly to the client (wrapped in a {@link * AllNodesFailedException}) is at initialization. If it happens later when the driver is already - * connected, it is just logged an the connection is reattempted. + * connected, it is just logged and the connection is reattempted. */ public class ConnectionInitException extends DriverException { public ConnectionInitException(@NonNull String message, @Nullable Throwable cause) { From e8d2fe65cebf118b0d837e90380fc49d13a19fe2 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 30 Nov 2020 15:22:41 +0100 Subject: [PATCH 598/979] Mention that DriverException#getExecutionInfo returns null for low-level exceptions --- .../com/datastax/oss/driver/api/core/DriverException.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/DriverException.java b/core/src/main/java/com/datastax/oss/driver/api/core/DriverException.java index 9497ef15cf5..73e23712444 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/DriverException.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/DriverException.java @@ -77,6 +77,10 @@ protected DriverException( * AllNodesFailedException#getAllErrors()} or {@link ExecutionInfo#getErrors()} do not contain * their own execution info, and therefore return null from this method. * + *

          This method will also return null for low-level exceptions thrown directly from a driver + * channel, such as {@link com.datastax.oss.driver.api.core.connection.ConnectionInitException} or + * {@link com.datastax.oss.driver.api.core.connection.ClosedConnectionException}. + * *

          It will also be null if you serialize and deserialize an exception. */ public ExecutionInfo getExecutionInfo() { From edeeaa0324251fb667dd4998304342ed36c12661 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 30 Nov 2020 15:23:09 +0100 Subject: [PATCH 599/979] Mention effect of idempotence on ClosedConnectionException --- .../api/core/connection/ClosedConnectionException.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/connection/ClosedConnectionException.java b/core/src/main/java/com/datastax/oss/driver/api/core/connection/ClosedConnectionException.java index 9daee547a46..9d0f7709004 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/connection/ClosedConnectionException.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/connection/ClosedConnectionException.java @@ -26,8 +26,9 @@ *

          For example, this can happen if the node is unresponsive and a heartbeat query failed, or if * the node was forced down. * - *

          The driver will always retry these requests on the next node transparently. Therefore, the - * only way to observe this exception is as part of an {@link AllNodesFailedException}. + *

          The driver will retry these requests on the next node transparently, unless the request is not + * idempotent. Therefore, this exception is usually observed as part of an {@link + * AllNodesFailedException}. */ public class ClosedConnectionException extends DriverException { From 587a9149a7e5756fb40d40ab73a03b2bb2b91445 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 18 Dec 2020 16:31:07 +0100 Subject: [PATCH 600/979] JAVA-2890: Fix off-by-one error in UdtCodec (#1508) --- changelog/README.md | 1 + .../driver/internal/core/type/codec/UdtCodec.java | 2 +- .../internal/core/type/codec/UdtCodecTest.java | 15 +++++++++++++++ 3 files changed, 17 insertions(+), 1 deletion(-) diff --git a/changelog/README.md b/changelog/README.md index 9ddce319d39..6129e4f0155 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.10.0 (in progress) +- [bug] JAVA-2890: Fix off-by-one error in UdtCodec - [improvement] JAVA-2905: Prevent new connections from using a protocol version higher than the negotiated one - [bug] JAVA-2647: Handle token types in QueryBuilder.literal() - [bug] JAVA-2887: Handle composite profiles with more than one key and/or backed by only one profile diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/UdtCodec.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/UdtCodec.java index 15c993d76c3..d9b02d7ce3d 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/UdtCodec.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/UdtCodec.java @@ -104,7 +104,7 @@ public UdtValue decode(@Nullable ByteBuffer bytes, @NonNull ProtocolVersion prot UdtValue value = cqlType.newValue(); int i = 0; while (input.hasRemaining()) { - if (i > cqlType.getFieldTypes().size()) { + if (i == cqlType.getFieldTypes().size()) { throw new IllegalArgumentException( String.format( "Too many fields in encoded UDT value, expected %d", diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/UdtCodecTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/UdtCodecTest.java index 1bbaaf0a2d5..383fd5a144f 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/UdtCodecTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/UdtCodecTest.java @@ -133,6 +133,21 @@ public void should_decode_udt() { verify(textCodec).decode(Bytes.fromHexString("0x61"), ProtocolVersion.DEFAULT); } + @Test + public void should_fail_to_decode_udt_when_too_many_fields() { + assertThatThrownBy( + () -> + decode( + "0x" + + ("00000004" + "00000001") + + "ffffffff" + + ("00000001" + "61") + // extra contents + + "ffffffff")) + .isInstanceOf(IllegalArgumentException.class) + .hasMessage("Too many fields in encoded UDT value, expected 3"); + } + /** Test for JAVA-2557. Ensures that the codec can decode null fields with any negative length. */ @Test public void should_decode_negative_element_length_as_null_field() { From a7dc7b70d0b2f29302dab440aaa4d4117a560688 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 18 Dec 2020 16:38:31 +0100 Subject: [PATCH 601/979] JAVA-2883: Use root locale explicitly when changing string case (#1509) This commit changes all occurrences of String.toLowerCase() in production code to use String.toLowerCase(Locale.ROOT) instead. No occurrences of String.toUpperCase() were found in production code. --- changelog/README.md | 1 + .../oss/driver/api/core/CqlIdentifier.java | 3 +- .../oss/driver/api/core/data/CqlDuration.java | 3 +- .../internal/core/data/IdentifierIndex.java | 5 +- .../schema/parsing/DataTypeCqlNameParser.java | 3 +- .../core/protocol/BuiltInCompressors.java | 3 +- .../protocol/CompressorSubstitutions.java | 5 +- .../internal/core/type/PrimitiveType.java | 3 +- .../driver/internal/core/util/Strings.java | 7 +- .../oss/driver/TestDataProviders.java | 21 ++ .../driver/api/core/CqlIdentifierTest.java | 30 +- .../driver/api/core/data/CqlDurationTest.java | 171 +++++++----- .../core/data/IdentifierIndexTest.java | 58 +++- .../parsing/DataTypeClassNameParserTest.java | 260 ++++++++++-------- .../core/protocol/BuiltInCompressorsTest.java | 70 +++++ .../internal/core/type/PrimitiveTypeTest.java | 118 ++++++++ .../internal/core/util/StringsTest.java | 168 +++++++++++ 17 files changed, 721 insertions(+), 208 deletions(-) create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/protocol/BuiltInCompressorsTest.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/type/PrimitiveTypeTest.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/util/StringsTest.java diff --git a/changelog/README.md b/changelog/README.md index 6129e4f0155..886f96a7252 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.10.0 (in progress) +- [bug] JAVA-2883: Use root locale explicitly when changing string case - [bug] JAVA-2890: Fix off-by-one error in UdtCodec - [improvement] JAVA-2905: Prevent new connections from using a protocol version higher than the negotiated one - [bug] JAVA-2647: Handle token types in QueryBuilder.literal() diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/CqlIdentifier.java b/core/src/main/java/com/datastax/oss/driver/api/core/CqlIdentifier.java index 89211d75382..922f4f2e5e5 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/CqlIdentifier.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/CqlIdentifier.java @@ -21,6 +21,7 @@ import java.io.IOException; import java.io.ObjectInputStream; import java.io.Serializable; +import java.util.Locale; import net.jcip.annotations.Immutable; /** @@ -75,7 +76,7 @@ public static CqlIdentifier fromCql(@NonNull String cql) { if (Strings.isDoubleQuoted(cql)) { internal = Strings.unDoubleQuote(cql); } else { - internal = cql.toLowerCase(); + internal = cql.toLowerCase(Locale.ROOT); Preconditions.checkArgument( !Strings.needsDoubleQuotes(internal), "Invalid CQL form [%s]: needs double quotes", cql); } diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/data/CqlDuration.java b/core/src/main/java/com/datastax/oss/driver/api/core/data/CqlDuration.java index 40b6b41ee60..8ec509ea7f6 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/data/CqlDuration.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/data/CqlDuration.java @@ -28,6 +28,7 @@ import java.time.temporal.TemporalUnit; import java.time.temporal.UnsupportedTemporalTypeException; import java.util.List; +import java.util.Locale; import java.util.regex.Matcher; import java.util.regex.Pattern; import net.jcip.annotations.Immutable; @@ -232,7 +233,7 @@ private static long groupAsLong(@NonNull Matcher matcher, int group) { } private static Builder add(@NonNull Builder builder, long number, @NonNull String symbol) { - String s = symbol.toLowerCase(); + String s = symbol.toLowerCase(Locale.ROOT); if (s.equals("y")) { return builder.addYears(number); } else if (s.equals("mo")) { diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/data/IdentifierIndex.java b/core/src/main/java/com/datastax/oss/driver/internal/core/data/IdentifierIndex.java index 17411c4e6d8..29396f08440 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/data/IdentifierIndex.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/data/IdentifierIndex.java @@ -23,6 +23,7 @@ import com.datastax.oss.driver.shaded.guava.common.collect.LinkedListMultimap; import com.datastax.oss.driver.shaded.guava.common.collect.ListMultimap; import java.util.List; +import java.util.Locale; import net.jcip.annotations.Immutable; /** @@ -47,7 +48,7 @@ public IdentifierIndex(List ids) { for (CqlIdentifier id : ids) { byId.put(id, i); byCaseSensitiveName.put(id.asInternal(), i); - byCaseInsensitiveName.put(id.asInternal().toLowerCase(), i); + byCaseInsensitiveName.put(id.asInternal().toLowerCase(Locale.ROOT), i); i += 1; } } @@ -59,7 +60,7 @@ public IdentifierIndex(List ids) { public List allIndicesOf(String name) { return Strings.isDoubleQuoted(name) ? byCaseSensitiveName.get(Strings.unDoubleQuote(name)) - : byCaseInsensitiveName.get(name.toLowerCase()); + : byCaseInsensitiveName.get(name.toLowerCase(Locale.ROOT)); } /** diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/DataTypeCqlNameParser.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/DataTypeCqlNameParser.java index b511c8cadc1..3523aa5c459 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/DataTypeCqlNameParser.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/DataTypeCqlNameParser.java @@ -28,6 +28,7 @@ import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import java.util.ArrayList; import java.util.List; +import java.util.Locale; import java.util.Map; import net.jcip.annotations.ThreadSafe; @@ -68,7 +69,7 @@ private DataType parse( return DataTypes.custom(type); } - DataType nativeType = NATIVE_TYPES_BY_NAME.get(type.toLowerCase()); + DataType nativeType = NATIVE_TYPES_BY_NAME.get(type.toLowerCase(Locale.ROOT)); if (nativeType != null) { return nativeType; } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/BuiltInCompressors.java b/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/BuiltInCompressors.java index 5f6ad6ec270..56fe96d0b83 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/BuiltInCompressors.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/BuiltInCompressors.java @@ -19,6 +19,7 @@ import com.datastax.oss.driver.api.core.context.DriverContext; import com.datastax.oss.protocol.internal.Compressor; import io.netty.buffer.ByteBuf; +import java.util.Locale; /** * Provides a single entry point to create compressor instances in the driver. @@ -29,7 +30,7 @@ public class BuiltInCompressors { public static Compressor newInstance(String name, DriverContext context) { - switch (name.toLowerCase()) { + switch (name.toLowerCase(Locale.ROOT)) { case "lz4": return new Lz4Compressor(context); case "snappy": diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/CompressorSubstitutions.java b/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/CompressorSubstitutions.java index e632a07a0ab..fe43bea0863 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/CompressorSubstitutions.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/CompressorSubstitutions.java @@ -22,6 +22,7 @@ import com.oracle.svm.core.annotate.Substitute; import com.oracle.svm.core.annotate.TargetClass; import io.netty.buffer.ByteBuf; +import java.util.Locale; import java.util.function.BooleanSupplier; /** @@ -38,7 +39,7 @@ public class CompressorSubstitutions { public static final class BuiltInCompressorsLz4Only { @Substitute public static Compressor newInstance(String name, DriverContext context) { - switch (name.toLowerCase()) { + switch (name.toLowerCase(Locale.ROOT)) { case "lz4": return new Lz4Compressor(context); case "snappy": @@ -59,7 +60,7 @@ public static Compressor newInstance(String name, DriverContext context public static final class NoBuiltInCompressors { @Substitute public static Compressor newInstance(String name, DriverContext context) { - switch (name.toLowerCase()) { + switch (name.toLowerCase(Locale.ROOT)) { case "lz4": throw new UnsupportedOperationException( "This native image was not built with support for LZ4 compression"); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/PrimitiveType.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/PrimitiveType.java index 909a58d053a..2eca544d07c 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/type/PrimitiveType.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/PrimitiveType.java @@ -20,6 +20,7 @@ import com.datastax.oss.protocol.internal.ProtocolConstants; import edu.umd.cs.findbugs.annotations.NonNull; import java.io.Serializable; +import java.util.Locale; import net.jcip.annotations.Immutable; @Immutable @@ -67,7 +68,7 @@ public int hashCode() { @NonNull @Override public String asCql(boolean includeFrozen, boolean pretty) { - return codeName(protocolCode).toLowerCase(); + return codeName(protocolCode).toLowerCase(Locale.ROOT); } @Override diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/util/Strings.java b/core/src/main/java/com/datastax/oss/driver/internal/core/util/Strings.java index eb95d2cbc18..b0aa9128c76 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/util/Strings.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/util/Strings.java @@ -15,7 +15,9 @@ */ package com.datastax.oss.driver.internal.core.util; +import com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; +import java.util.Locale; public class Strings { @@ -230,8 +232,9 @@ private static String unquote(String text, char quoteChar) { return new String(result); } - private static boolean isReservedCqlKeyword(String id) { - return id != null && RESERVED_KEYWORDS.contains(id.toLowerCase()); + @VisibleForTesting + static boolean isReservedCqlKeyword(String id) { + return id != null && RESERVED_KEYWORDS.contains(id.toLowerCase(Locale.ROOT)); } /** diff --git a/core/src/test/java/com/datastax/oss/driver/TestDataProviders.java b/core/src/test/java/com/datastax/oss/driver/TestDataProviders.java index 866a4bc9f75..734370996f7 100644 --- a/core/src/test/java/com/datastax/oss/driver/TestDataProviders.java +++ b/core/src/test/java/com/datastax/oss/driver/TestDataProviders.java @@ -17,6 +17,7 @@ import com.tngtech.java.junit.dataprovider.DataProvider; import java.util.Arrays; +import java.util.Locale; public class TestDataProviders { @@ -83,4 +84,24 @@ public static Object[][] combine(Object[][]... providers) { public static Object[][] booleans() { return fromList(true, false); } + + /** An arbitrary set of locales to use when testing locale-sensitive operations. */ + @DataProvider + public static Object[][] locales() { + return new Object[][] { + new Object[] {Locale.US}, + // non-latin alphabets + new Object[] {Locale.CHINA}, + new Object[] {Locale.JAPAN}, + new Object[] {Locale.KOREA}, + new Object[] {new Locale("gr") /* greek */}, + new Object[] {new Locale("ar") /* arabic */}, + // latin-based alphabets with extended character sets + new Object[] {new Locale("vi") /* vietnamese */}, + // JAVA-2883: Turkish is the most problematic locale as String.toLowerCase("TITLE") + // wouldn't return "title" but rather "tıtle", where 'ı' is the 'LATIN SMALL LETTER + // DOTLESS I' character specific to the Turkish language. + new Object[] {new Locale("tr") /* turkish*/}, + }; + } } diff --git a/core/src/test/java/com/datastax/oss/driver/api/core/CqlIdentifierTest.java b/core/src/test/java/com/datastax/oss/driver/api/core/CqlIdentifierTest.java index db440007e92..4a0a692c7d7 100644 --- a/core/src/test/java/com/datastax/oss/driver/api/core/CqlIdentifierTest.java +++ b/core/src/test/java/com/datastax/oss/driver/api/core/CqlIdentifierTest.java @@ -17,9 +17,16 @@ import static org.assertj.core.api.Assertions.assertThat; +import com.datastax.oss.driver.TestDataProviders; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import java.util.Locale; import org.junit.Test; +import org.junit.runner.RunWith; +@RunWith(DataProviderRunner.class) public class CqlIdentifierTest { + @Test public void should_build_from_internal() { assertThat(CqlIdentifier.fromInternal("foo").asInternal()).isEqualTo("foo"); @@ -30,13 +37,22 @@ public void should_build_from_internal() { } @Test - public void should_build_from_valid_cql() { - assertThat(CqlIdentifier.fromCql("foo").asInternal()).isEqualTo("foo"); - assertThat(CqlIdentifier.fromCql("Foo").asInternal()).isEqualTo("foo"); - assertThat(CqlIdentifier.fromCql("\"Foo\"").asInternal()).isEqualTo("Foo"); - assertThat(CqlIdentifier.fromCql("\"foo bar\"").asInternal()).isEqualTo("foo bar"); - assertThat(CqlIdentifier.fromCql("\"foo\"\"bar\"").asInternal()).isEqualTo("foo\"bar"); - assertThat(CqlIdentifier.fromCql("\"create\"").asInternal()).isEqualTo("create"); + @UseDataProvider(location = TestDataProviders.class, value = "locales") + public void should_build_from_valid_cql(Locale locale) { + Locale def = Locale.getDefault(); + try { + Locale.setDefault(locale); + assertThat(CqlIdentifier.fromCql("foo").asInternal()).isEqualTo("foo"); + assertThat(CqlIdentifier.fromCql("Foo").asInternal()).isEqualTo("foo"); + assertThat(CqlIdentifier.fromCql("\"Foo\"").asInternal()).isEqualTo("Foo"); + assertThat(CqlIdentifier.fromCql("\"foo bar\"").asInternal()).isEqualTo("foo bar"); + assertThat(CqlIdentifier.fromCql("\"foo\"\"bar\"").asInternal()).isEqualTo("foo\"bar"); + assertThat(CqlIdentifier.fromCql("\"create\"").asInternal()).isEqualTo("create"); + // JAVA-2883: this would fail under turkish locale if it was used internally + assertThat(CqlIdentifier.fromCql("TITLE").asInternal()).isEqualTo("title"); + } finally { + Locale.setDefault(def); + } } @Test(expected = IllegalArgumentException.class) diff --git a/core/src/test/java/com/datastax/oss/driver/api/core/data/CqlDurationTest.java b/core/src/test/java/com/datastax/oss/driver/api/core/data/CqlDurationTest.java index a880f4a8579..56c0b00b5e3 100644 --- a/core/src/test/java/com/datastax/oss/driver/api/core/data/CqlDurationTest.java +++ b/core/src/test/java/com/datastax/oss/driver/api/core/data/CqlDurationTest.java @@ -19,92 +19,119 @@ import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.assertj.core.api.Assertions.fail; +import com.datastax.oss.driver.TestDataProviders; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; import java.time.ZonedDateTime; import java.time.temporal.ChronoUnit; import java.time.temporal.UnsupportedTemporalTypeException; +import java.util.Locale; import org.junit.Test; +import org.junit.runner.RunWith; +@RunWith(DataProviderRunner.class) public class CqlDurationTest { @Test - public void should_parse_from_string_with_standard_pattern() { - assertThat(CqlDuration.from("1y2mo")).isEqualTo(CqlDuration.newInstance(14, 0, 0)); - assertThat(CqlDuration.from("-1y2mo")).isEqualTo(CqlDuration.newInstance(-14, 0, 0)); - assertThat(CqlDuration.from("1Y2MO")).isEqualTo(CqlDuration.newInstance(14, 0, 0)); - assertThat(CqlDuration.from("2w")).isEqualTo(CqlDuration.newInstance(0, 14, 0)); - assertThat(CqlDuration.from("2d10h")) - .isEqualTo(CqlDuration.newInstance(0, 2, 10 * CqlDuration.NANOS_PER_HOUR)); - assertThat(CqlDuration.from("2d")).isEqualTo(CqlDuration.newInstance(0, 2, 0)); - assertThat(CqlDuration.from("30h")) - .isEqualTo(CqlDuration.newInstance(0, 0, 30 * CqlDuration.NANOS_PER_HOUR)); - assertThat(CqlDuration.from("30h20m")) - .isEqualTo( - CqlDuration.newInstance( - 0, 0, 30 * CqlDuration.NANOS_PER_HOUR + 20 * CqlDuration.NANOS_PER_MINUTE)); - assertThat(CqlDuration.from("20m")) - .isEqualTo(CqlDuration.newInstance(0, 0, 20 * CqlDuration.NANOS_PER_MINUTE)); - assertThat(CqlDuration.from("56s")) - .isEqualTo(CqlDuration.newInstance(0, 0, 56 * CqlDuration.NANOS_PER_SECOND)); - assertThat(CqlDuration.from("567ms")) - .isEqualTo(CqlDuration.newInstance(0, 0, 567 * CqlDuration.NANOS_PER_MILLI)); - assertThat(CqlDuration.from("1950us")) - .isEqualTo(CqlDuration.newInstance(0, 0, 1950 * CqlDuration.NANOS_PER_MICRO)); - assertThat(CqlDuration.from("1950µs")) - .isEqualTo(CqlDuration.newInstance(0, 0, 1950 * CqlDuration.NANOS_PER_MICRO)); - assertThat(CqlDuration.from("1950000ns")).isEqualTo(CqlDuration.newInstance(0, 0, 1950000)); - assertThat(CqlDuration.from("1950000NS")).isEqualTo(CqlDuration.newInstance(0, 0, 1950000)); - assertThat(CqlDuration.from("-1950000ns")).isEqualTo(CqlDuration.newInstance(0, 0, -1950000)); - assertThat(CqlDuration.from("1y3mo2h10m")) - .isEqualTo(CqlDuration.newInstance(15, 0, 130 * CqlDuration.NANOS_PER_MINUTE)); + @UseDataProvider(location = TestDataProviders.class, value = "locales") + public void should_parse_from_string_with_standard_pattern(Locale locale) { + Locale def = Locale.getDefault(); + try { + Locale.setDefault(locale); + assertThat(CqlDuration.from("1y2mo")).isEqualTo(CqlDuration.newInstance(14, 0, 0)); + assertThat(CqlDuration.from("-1y2mo")).isEqualTo(CqlDuration.newInstance(-14, 0, 0)); + assertThat(CqlDuration.from("1Y2MO")).isEqualTo(CqlDuration.newInstance(14, 0, 0)); + assertThat(CqlDuration.from("2w")).isEqualTo(CqlDuration.newInstance(0, 14, 0)); + assertThat(CqlDuration.from("2d10h")) + .isEqualTo(CqlDuration.newInstance(0, 2, 10 * CqlDuration.NANOS_PER_HOUR)); + assertThat(CqlDuration.from("2d")).isEqualTo(CqlDuration.newInstance(0, 2, 0)); + assertThat(CqlDuration.from("30h")) + .isEqualTo(CqlDuration.newInstance(0, 0, 30 * CqlDuration.NANOS_PER_HOUR)); + assertThat(CqlDuration.from("30h20m")) + .isEqualTo( + CqlDuration.newInstance( + 0, 0, 30 * CqlDuration.NANOS_PER_HOUR + 20 * CqlDuration.NANOS_PER_MINUTE)); + assertThat(CqlDuration.from("20m")) + .isEqualTo(CqlDuration.newInstance(0, 0, 20 * CqlDuration.NANOS_PER_MINUTE)); + assertThat(CqlDuration.from("56s")) + .isEqualTo(CqlDuration.newInstance(0, 0, 56 * CqlDuration.NANOS_PER_SECOND)); + assertThat(CqlDuration.from("567ms")) + .isEqualTo(CqlDuration.newInstance(0, 0, 567 * CqlDuration.NANOS_PER_MILLI)); + assertThat(CqlDuration.from("1950us")) + .isEqualTo(CqlDuration.newInstance(0, 0, 1950 * CqlDuration.NANOS_PER_MICRO)); + assertThat(CqlDuration.from("1950µs")) + .isEqualTo(CqlDuration.newInstance(0, 0, 1950 * CqlDuration.NANOS_PER_MICRO)); + assertThat(CqlDuration.from("1950000ns")).isEqualTo(CqlDuration.newInstance(0, 0, 1950000)); + assertThat(CqlDuration.from("1950000NS")).isEqualTo(CqlDuration.newInstance(0, 0, 1950000)); + assertThat(CqlDuration.from("-1950000ns")).isEqualTo(CqlDuration.newInstance(0, 0, -1950000)); + assertThat(CqlDuration.from("1y3mo2h10m")) + .isEqualTo(CqlDuration.newInstance(15, 0, 130 * CqlDuration.NANOS_PER_MINUTE)); + } finally { + Locale.setDefault(def); + } } @Test - public void should_parse_from_string_with_iso8601_pattern() { - assertThat(CqlDuration.from("P1Y2D")).isEqualTo(CqlDuration.newInstance(12, 2, 0)); - assertThat(CqlDuration.from("P1Y2M")).isEqualTo(CqlDuration.newInstance(14, 0, 0)); - assertThat(CqlDuration.from("P2W")).isEqualTo(CqlDuration.newInstance(0, 14, 0)); - assertThat(CqlDuration.from("P1YT2H")) - .isEqualTo(CqlDuration.newInstance(12, 0, 2 * CqlDuration.NANOS_PER_HOUR)); - assertThat(CqlDuration.from("-P1Y2M")).isEqualTo(CqlDuration.newInstance(-14, 0, 0)); - assertThat(CqlDuration.from("P2D")).isEqualTo(CqlDuration.newInstance(0, 2, 0)); - assertThat(CqlDuration.from("PT30H")) - .isEqualTo(CqlDuration.newInstance(0, 0, 30 * CqlDuration.NANOS_PER_HOUR)); - assertThat(CqlDuration.from("PT30H20M")) - .isEqualTo( - CqlDuration.newInstance( - 0, 0, 30 * CqlDuration.NANOS_PER_HOUR + 20 * CqlDuration.NANOS_PER_MINUTE)); - assertThat(CqlDuration.from("PT20M")) - .isEqualTo(CqlDuration.newInstance(0, 0, 20 * CqlDuration.NANOS_PER_MINUTE)); - assertThat(CqlDuration.from("PT56S")) - .isEqualTo(CqlDuration.newInstance(0, 0, 56 * CqlDuration.NANOS_PER_SECOND)); - assertThat(CqlDuration.from("P1Y3MT2H10M")) - .isEqualTo(CqlDuration.newInstance(15, 0, 130 * CqlDuration.NANOS_PER_MINUTE)); + @UseDataProvider(location = TestDataProviders.class, value = "locales") + public void should_parse_from_string_with_iso8601_pattern(Locale locale) { + Locale def = Locale.getDefault(); + try { + Locale.setDefault(locale); + assertThat(CqlDuration.from("P1Y2D")).isEqualTo(CqlDuration.newInstance(12, 2, 0)); + assertThat(CqlDuration.from("P1Y2M")).isEqualTo(CqlDuration.newInstance(14, 0, 0)); + assertThat(CqlDuration.from("P2W")).isEqualTo(CqlDuration.newInstance(0, 14, 0)); + assertThat(CqlDuration.from("P1YT2H")) + .isEqualTo(CqlDuration.newInstance(12, 0, 2 * CqlDuration.NANOS_PER_HOUR)); + assertThat(CqlDuration.from("-P1Y2M")).isEqualTo(CqlDuration.newInstance(-14, 0, 0)); + assertThat(CqlDuration.from("P2D")).isEqualTo(CqlDuration.newInstance(0, 2, 0)); + assertThat(CqlDuration.from("PT30H")) + .isEqualTo(CqlDuration.newInstance(0, 0, 30 * CqlDuration.NANOS_PER_HOUR)); + assertThat(CqlDuration.from("PT30H20M")) + .isEqualTo( + CqlDuration.newInstance( + 0, 0, 30 * CqlDuration.NANOS_PER_HOUR + 20 * CqlDuration.NANOS_PER_MINUTE)); + assertThat(CqlDuration.from("PT20M")) + .isEqualTo(CqlDuration.newInstance(0, 0, 20 * CqlDuration.NANOS_PER_MINUTE)); + assertThat(CqlDuration.from("PT56S")) + .isEqualTo(CqlDuration.newInstance(0, 0, 56 * CqlDuration.NANOS_PER_SECOND)); + assertThat(CqlDuration.from("P1Y3MT2H10M")) + .isEqualTo(CqlDuration.newInstance(15, 0, 130 * CqlDuration.NANOS_PER_MINUTE)); + } finally { + Locale.setDefault(def); + } } @Test - public void should_parse_from_string_with_iso8601_alternative_pattern() { - assertThat(CqlDuration.from("P0001-00-02T00:00:00")) - .isEqualTo(CqlDuration.newInstance(12, 2, 0)); - assertThat(CqlDuration.from("P0001-02-00T00:00:00")) - .isEqualTo(CqlDuration.newInstance(14, 0, 0)); - assertThat(CqlDuration.from("P0001-00-00T02:00:00")) - .isEqualTo(CqlDuration.newInstance(12, 0, 2 * CqlDuration.NANOS_PER_HOUR)); - assertThat(CqlDuration.from("-P0001-02-00T00:00:00")) - .isEqualTo(CqlDuration.newInstance(-14, 0, 0)); - assertThat(CqlDuration.from("P0000-00-02T00:00:00")) - .isEqualTo(CqlDuration.newInstance(0, 2, 0)); - assertThat(CqlDuration.from("P0000-00-00T30:00:00")) - .isEqualTo(CqlDuration.newInstance(0, 0, 30 * CqlDuration.NANOS_PER_HOUR)); - assertThat(CqlDuration.from("P0000-00-00T30:20:00")) - .isEqualTo( - CqlDuration.newInstance( - 0, 0, 30 * CqlDuration.NANOS_PER_HOUR + 20 * CqlDuration.NANOS_PER_MINUTE)); - assertThat(CqlDuration.from("P0000-00-00T00:20:00")) - .isEqualTo(CqlDuration.newInstance(0, 0, 20 * CqlDuration.NANOS_PER_MINUTE)); - assertThat(CqlDuration.from("P0000-00-00T00:00:56")) - .isEqualTo(CqlDuration.newInstance(0, 0, 56 * CqlDuration.NANOS_PER_SECOND)); - assertThat(CqlDuration.from("P0001-03-00T02:10:00")) - .isEqualTo(CqlDuration.newInstance(15, 0, 130 * CqlDuration.NANOS_PER_MINUTE)); + @UseDataProvider(location = TestDataProviders.class, value = "locales") + public void should_parse_from_string_with_iso8601_alternative_pattern(Locale locale) { + Locale def = Locale.getDefault(); + try { + Locale.setDefault(locale); + assertThat(CqlDuration.from("P0001-00-02T00:00:00")) + .isEqualTo(CqlDuration.newInstance(12, 2, 0)); + assertThat(CqlDuration.from("P0001-02-00T00:00:00")) + .isEqualTo(CqlDuration.newInstance(14, 0, 0)); + assertThat(CqlDuration.from("P0001-00-00T02:00:00")) + .isEqualTo(CqlDuration.newInstance(12, 0, 2 * CqlDuration.NANOS_PER_HOUR)); + assertThat(CqlDuration.from("-P0001-02-00T00:00:00")) + .isEqualTo(CqlDuration.newInstance(-14, 0, 0)); + assertThat(CqlDuration.from("P0000-00-02T00:00:00")) + .isEqualTo(CqlDuration.newInstance(0, 2, 0)); + assertThat(CqlDuration.from("P0000-00-00T30:00:00")) + .isEqualTo(CqlDuration.newInstance(0, 0, 30 * CqlDuration.NANOS_PER_HOUR)); + assertThat(CqlDuration.from("P0000-00-00T30:20:00")) + .isEqualTo( + CqlDuration.newInstance( + 0, 0, 30 * CqlDuration.NANOS_PER_HOUR + 20 * CqlDuration.NANOS_PER_MINUTE)); + assertThat(CqlDuration.from("P0000-00-00T00:20:00")) + .isEqualTo(CqlDuration.newInstance(0, 0, 20 * CqlDuration.NANOS_PER_MINUTE)); + assertThat(CqlDuration.from("P0000-00-00T00:00:56")) + .isEqualTo(CqlDuration.newInstance(0, 0, 56 * CqlDuration.NANOS_PER_SECOND)); + assertThat(CqlDuration.from("P0001-03-00T02:10:00")) + .isEqualTo(CqlDuration.newInstance(15, 0, 130 * CqlDuration.NANOS_PER_MINUTE)); + } finally { + Locale.setDefault(def); + } } @Test diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/data/IdentifierIndexTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/data/IdentifierIndexTest.java index 183e4c17fab..ee41fe0bdf0 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/data/IdentifierIndexTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/data/IdentifierIndexTest.java @@ -17,10 +17,16 @@ import static org.assertj.core.api.Assertions.assertThat; +import com.datastax.oss.driver.TestDataProviders; import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import java.util.Locale; import org.junit.Test; +import org.junit.runner.RunWith; +@RunWith(DataProviderRunner.class) public class IdentifierIndexTest { private static final CqlIdentifier Foo = CqlIdentifier.fromInternal("Foo"); private static final CqlIdentifier foo = CqlIdentifier.fromInternal("foo"); @@ -41,13 +47,31 @@ public void should_not_find_index_of_nonexistent_identifier() { } @Test - public void should_find_first_index_of_case_insensitive_name() { - assertThat(index.firstIndexOf("foo")).isEqualTo(0); + @UseDataProvider(location = TestDataProviders.class, value = "locales") + public void should_find_first_index_of_case_insensitive_name(Locale locale) { + Locale def = Locale.getDefault(); + try { + Locale.setDefault(locale); + assertThat(index.firstIndexOf("foo")).isEqualTo(0); + assertThat(index.firstIndexOf("FOO")).isEqualTo(0); + assertThat(index.firstIndexOf("fOO")).isEqualTo(0); + } finally { + Locale.setDefault(def); + } } @Test - public void should_not_find_first_index_of_nonexistent_case_insensitive_name() { - assertThat(index.firstIndexOf("bar")).isEqualTo(-1); + @UseDataProvider(location = TestDataProviders.class, value = "locales") + public void should_not_find_first_index_of_nonexistent_case_insensitive_name(Locale locale) { + Locale def = Locale.getDefault(); + try { + Locale.setDefault(locale); + assertThat(index.firstIndexOf("bar")).isEqualTo(-1); + assertThat(index.firstIndexOf("BAR")).isEqualTo(-1); + assertThat(index.firstIndexOf("bAR")).isEqualTo(-1); + } finally { + Locale.setDefault(def); + } } @Test @@ -75,13 +99,31 @@ public void should_not_find_indices_of_nonexistent_identifier() { } @Test - public void should_all_indices_of_case_insensitive_name() { - assertThat(index.allIndicesOf("foo")).containsExactly(0, 1, 2, 3, 4, 5); + @UseDataProvider(location = TestDataProviders.class, value = "locales") + public void should_find_all_indices_of_case_insensitive_name(Locale locale) { + Locale def = Locale.getDefault(); + try { + Locale.setDefault(locale); + assertThat(index.allIndicesOf("foo")).containsExactly(0, 1, 2, 3, 4, 5); + assertThat(index.allIndicesOf("FOO")).containsExactly(0, 1, 2, 3, 4, 5); + assertThat(index.allIndicesOf("fOO")).containsExactly(0, 1, 2, 3, 4, 5); + } finally { + Locale.setDefault(def); + } } @Test - public void should_not_find_indices_of_nonexistent_case_insensitive_name() { - assertThat(index.allIndicesOf("bar")).isEmpty(); + @UseDataProvider(location = TestDataProviders.class, value = "locales") + public void should_not_find_indices_of_nonexistent_case_insensitive_name(Locale locale) { + Locale def = Locale.getDefault(); + try { + Locale.setDefault(locale); + assertThat(index.allIndicesOf("bar")).isEmpty(); + assertThat(index.allIndicesOf("BAR")).isEmpty(); + assertThat(index.allIndicesOf("bAR")).isEmpty(); + } finally { + Locale.setDefault(def); + } } @Test diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/DataTypeClassNameParserTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/DataTypeClassNameParserTest.java index 21ff579464d..9a0d3034821 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/DataTypeClassNameParserTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/DataTypeClassNameParserTest.java @@ -19,6 +19,7 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; +import com.datastax.oss.driver.TestDataProviders; import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.type.DataType; import com.datastax.oss.driver.api.core.type.DataTypes; @@ -27,14 +28,17 @@ import com.datastax.oss.driver.api.core.type.UserDefinedType; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import java.util.Locale; import java.util.Map; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; -import org.mockito.junit.MockitoJUnitRunner; +import org.mockito.MockitoAnnotations; -@RunWith(MockitoJUnitRunner.class) +@RunWith(DataProviderRunner.class) public class DataTypeClassNameParserTest { private static final CqlIdentifier KEYSPACE_ID = CqlIdentifier.fromInternal("ks"); @@ -44,131 +48,167 @@ public class DataTypeClassNameParserTest { @Before public void setUp() throws Exception { + MockitoAnnotations.initMocks(this); parser = new DataTypeClassNameParser(); } @Test - public void should_parse_native_types() { - for (Map.Entry entry : - DataTypeClassNameParser.NATIVE_TYPES_BY_CLASS_NAME.entrySet()) { - - String className = entry.getKey(); - DataType expectedType = entry.getValue(); - - assertThat(parse(className)).isEqualTo(expectedType); + @UseDataProvider(location = TestDataProviders.class, value = "locales") + public void should_parse_native_types(Locale locale) { + Locale def = Locale.getDefault(); + try { + Locale.setDefault(locale); + for (Map.Entry entry : + DataTypeClassNameParser.NATIVE_TYPES_BY_CLASS_NAME.entrySet()) { + + String className = entry.getKey(); + DataType expectedType = entry.getValue(); + + assertThat(parse(className)).isEqualTo(expectedType); + } + } finally { + Locale.setDefault(def); } } @Test - public void should_parse_collection_types() { - assertThat( - parse( - "org.apache.cassandra.db.marshal.ListType(" - + "org.apache.cassandra.db.marshal.UTF8Type)")) - .isEqualTo(DataTypes.listOf(DataTypes.TEXT)); - - assertThat( - parse( - "org.apache.cassandra.db.marshal.FrozenType(" - + ("org.apache.cassandra.db.marshal.ListType(" - + "org.apache.cassandra.db.marshal.UTF8Type))"))) - .isEqualTo(DataTypes.frozenListOf(DataTypes.TEXT)); - - assertThat( - parse( - "org.apache.cassandra.db.marshal.SetType(" - + "org.apache.cassandra.db.marshal.UTF8Type)")) - .isEqualTo(DataTypes.setOf(DataTypes.TEXT)); - - assertThat( - parse( - "org.apache.cassandra.db.marshal.MapType(" - + "org.apache.cassandra.db.marshal.UTF8Type," - + "org.apache.cassandra.db.marshal.UTF8Type)")) - .isEqualTo(DataTypes.mapOf(DataTypes.TEXT, DataTypes.TEXT)); - - assertThat( - parse( - "org.apache.cassandra.db.marshal.MapType(" - + "org.apache.cassandra.db.marshal.UTF8Type," - + "org.apache.cassandra.db.marshal.FrozenType(" - + ("org.apache.cassandra.db.marshal.MapType(" - + "org.apache.cassandra.db.marshal.Int32Type," - + "org.apache.cassandra.db.marshal.Int32Type)))"))) - .isEqualTo( - DataTypes.mapOf(DataTypes.TEXT, DataTypes.frozenMapOf(DataTypes.INT, DataTypes.INT))); + @UseDataProvider(location = TestDataProviders.class, value = "locales") + public void should_parse_collection_types(Locale locale) { + Locale def = Locale.getDefault(); + try { + Locale.setDefault(locale); + assertThat( + parse( + "org.apache.cassandra.db.marshal.ListType(" + + "org.apache.cassandra.db.marshal.UTF8Type)")) + .isEqualTo(DataTypes.listOf(DataTypes.TEXT)); + + assertThat( + parse( + "org.apache.cassandra.db.marshal.FrozenType(" + + ("org.apache.cassandra.db.marshal.ListType(" + + "org.apache.cassandra.db.marshal.UTF8Type))"))) + .isEqualTo(DataTypes.frozenListOf(DataTypes.TEXT)); + + assertThat( + parse( + "org.apache.cassandra.db.marshal.SetType(" + + "org.apache.cassandra.db.marshal.UTF8Type)")) + .isEqualTo(DataTypes.setOf(DataTypes.TEXT)); + + assertThat( + parse( + "org.apache.cassandra.db.marshal.MapType(" + + "org.apache.cassandra.db.marshal.UTF8Type," + + "org.apache.cassandra.db.marshal.UTF8Type)")) + .isEqualTo(DataTypes.mapOf(DataTypes.TEXT, DataTypes.TEXT)); + + assertThat( + parse( + "org.apache.cassandra.db.marshal.MapType(" + + "org.apache.cassandra.db.marshal.UTF8Type," + + "org.apache.cassandra.db.marshal.FrozenType(" + + ("org.apache.cassandra.db.marshal.MapType(" + + "org.apache.cassandra.db.marshal.Int32Type," + + "org.apache.cassandra.db.marshal.Int32Type)))"))) + .isEqualTo( + DataTypes.mapOf(DataTypes.TEXT, DataTypes.frozenMapOf(DataTypes.INT, DataTypes.INT))); + } finally { + Locale.setDefault(def); + } } @Test - public void should_parse_user_type_when_definition_not_already_available() { - UserDefinedType addressType = - (UserDefinedType) - parse( - "org.apache.cassandra.db.marshal.UserType(" - + "foo,61646472657373," - + ("737472656574:org.apache.cassandra.db.marshal.UTF8Type," - + "7a6970636f6465:org.apache.cassandra.db.marshal.Int32Type," - + ("70686f6e6573:org.apache.cassandra.db.marshal.SetType(" - + "org.apache.cassandra.db.marshal.UserType(foo,70686f6e65," - + "6e616d65:org.apache.cassandra.db.marshal.UTF8Type," - + "6e756d626572:org.apache.cassandra.db.marshal.UTF8Type)") - + "))")); - - assertThat(addressType.getKeyspace().asInternal()).isEqualTo("foo"); - assertThat(addressType.getName().asInternal()).isEqualTo("address"); - assertThat(addressType.isFrozen()).isTrue(); - assertThat(addressType.getFieldNames().size()).isEqualTo(3); - - assertThat(addressType.getFieldNames().get(0).asInternal()).isEqualTo("street"); - assertThat(addressType.getFieldTypes().get(0)).isEqualTo(DataTypes.TEXT); - - assertThat(addressType.getFieldNames().get(1).asInternal()).isEqualTo("zipcode"); - assertThat(addressType.getFieldTypes().get(1)).isEqualTo(DataTypes.INT); - - assertThat(addressType.getFieldNames().get(2).asInternal()).isEqualTo("phones"); - DataType phonesType = addressType.getFieldTypes().get(2); - assertThat(phonesType).isInstanceOf(SetType.class); - UserDefinedType phoneType = ((UserDefinedType) ((SetType) phonesType).getElementType()); - - assertThat(phoneType.getKeyspace().asInternal()).isEqualTo("foo"); - assertThat(phoneType.getName().asInternal()).isEqualTo("phone"); - assertThat(phoneType.isFrozen()).isTrue(); - assertThat(phoneType.getFieldNames().size()).isEqualTo(2); - - assertThat(phoneType.getFieldNames().get(0).asInternal()).isEqualTo("name"); - assertThat(phoneType.getFieldTypes().get(0)).isEqualTo(DataTypes.TEXT); - - assertThat(phoneType.getFieldNames().get(1).asInternal()).isEqualTo("number"); - assertThat(phoneType.getFieldTypes().get(1)).isEqualTo(DataTypes.TEXT); + @UseDataProvider(location = TestDataProviders.class, value = "locales") + public void should_parse_user_type_when_definition_not_already_available(Locale locale) { + Locale def = Locale.getDefault(); + try { + Locale.setDefault(locale); + UserDefinedType addressType = + (UserDefinedType) + parse( + "org.apache.cassandra.db.marshal.UserType(" + + "foo,61646472657373," + + ("737472656574:org.apache.cassandra.db.marshal.UTF8Type," + + "7a6970636f6465:org.apache.cassandra.db.marshal.Int32Type," + + ("70686f6e6573:org.apache.cassandra.db.marshal.SetType(" + + "org.apache.cassandra.db.marshal.UserType(foo,70686f6e65," + + "6e616d65:org.apache.cassandra.db.marshal.UTF8Type," + + "6e756d626572:org.apache.cassandra.db.marshal.UTF8Type)") + + "))")); + + assertThat(addressType.getKeyspace().asInternal()).isEqualTo("foo"); + assertThat(addressType.getName().asInternal()).isEqualTo("address"); + assertThat(addressType.isFrozen()).isTrue(); + assertThat(addressType.getFieldNames().size()).isEqualTo(3); + + assertThat(addressType.getFieldNames().get(0).asInternal()).isEqualTo("street"); + assertThat(addressType.getFieldTypes().get(0)).isEqualTo(DataTypes.TEXT); + + assertThat(addressType.getFieldNames().get(1).asInternal()).isEqualTo("zipcode"); + assertThat(addressType.getFieldTypes().get(1)).isEqualTo(DataTypes.INT); + + assertThat(addressType.getFieldNames().get(2).asInternal()).isEqualTo("phones"); + DataType phonesType = addressType.getFieldTypes().get(2); + assertThat(phonesType).isInstanceOf(SetType.class); + UserDefinedType phoneType = ((UserDefinedType) ((SetType) phonesType).getElementType()); + + assertThat(phoneType.getKeyspace().asInternal()).isEqualTo("foo"); + assertThat(phoneType.getName().asInternal()).isEqualTo("phone"); + assertThat(phoneType.isFrozen()).isTrue(); + assertThat(phoneType.getFieldNames().size()).isEqualTo(2); + + assertThat(phoneType.getFieldNames().get(0).asInternal()).isEqualTo("name"); + assertThat(phoneType.getFieldTypes().get(0)).isEqualTo(DataTypes.TEXT); + + assertThat(phoneType.getFieldNames().get(1).asInternal()).isEqualTo("number"); + assertThat(phoneType.getFieldTypes().get(1)).isEqualTo(DataTypes.TEXT); + } finally { + Locale.setDefault(def); + } } @Test - public void should_make_a_frozen_copy_user_type_when_definition_already_available() { - UserDefinedType existing = mock(UserDefinedType.class); - - parse( - "org.apache.cassandra.db.marshal.UserType(foo,70686f6e65," - + "6e616d65:org.apache.cassandra.db.marshal.UTF8Type," - + "6e756d626572:org.apache.cassandra.db.marshal.UTF8Type)", - ImmutableMap.of(CqlIdentifier.fromInternal("phone"), existing)); - - verify(existing).copy(true); + @UseDataProvider(location = TestDataProviders.class, value = "locales") + public void should_make_a_frozen_copy_user_type_when_definition_already_available(Locale locale) { + Locale def = Locale.getDefault(); + try { + Locale.setDefault(locale); + UserDefinedType existing = mock(UserDefinedType.class); + + parse( + "org.apache.cassandra.db.marshal.UserType(foo,70686f6e65," + + "6e616d65:org.apache.cassandra.db.marshal.UTF8Type," + + "6e756d626572:org.apache.cassandra.db.marshal.UTF8Type)", + ImmutableMap.of(CqlIdentifier.fromInternal("phone"), existing)); + + verify(existing).copy(true); + } finally { + Locale.setDefault(def); + } } @Test - public void should_parse_tuple() { - TupleType tupleType = - (TupleType) - parse( - "org.apache.cassandra.db.marshal.TupleType(" - + "org.apache.cassandra.db.marshal.Int32Type," - + "org.apache.cassandra.db.marshal.UTF8Type," - + "org.apache.cassandra.db.marshal.FloatType)"); - - assertThat(tupleType.getComponentTypes().size()).isEqualTo(3); - assertThat(tupleType.getComponentTypes().get(0)).isEqualTo(DataTypes.INT); - assertThat(tupleType.getComponentTypes().get(1)).isEqualTo(DataTypes.TEXT); - assertThat(tupleType.getComponentTypes().get(2)).isEqualTo(DataTypes.FLOAT); + @UseDataProvider(location = TestDataProviders.class, value = "locales") + public void should_parse_tuple(Locale locale) { + Locale def = Locale.getDefault(); + try { + Locale.setDefault(locale); + TupleType tupleType = + (TupleType) + parse( + "org.apache.cassandra.db.marshal.TupleType(" + + "org.apache.cassandra.db.marshal.Int32Type," + + "org.apache.cassandra.db.marshal.UTF8Type," + + "org.apache.cassandra.db.marshal.FloatType)"); + + assertThat(tupleType.getComponentTypes().size()).isEqualTo(3); + assertThat(tupleType.getComponentTypes().get(0)).isEqualTo(DataTypes.INT); + assertThat(tupleType.getComponentTypes().get(1)).isEqualTo(DataTypes.TEXT); + assertThat(tupleType.getComponentTypes().get(2)).isEqualTo(DataTypes.FLOAT); + } finally { + Locale.setDefault(def); + } } private DataType parse(String toParse) { diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/protocol/BuiltInCompressorsTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/protocol/BuiltInCompressorsTest.java new file mode 100644 index 00000000000..7680292ce32 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/protocol/BuiltInCompressorsTest.java @@ -0,0 +1,70 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.protocol; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +import com.datastax.oss.driver.TestDataProviders; +import com.datastax.oss.driver.api.core.context.DriverContext; +import com.datastax.oss.protocol.internal.NoopCompressor; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import java.util.Locale; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; + +@RunWith(DataProviderRunner.class) +public class BuiltInCompressorsTest { + + @Mock private DriverContext context; + + @Before + public void setUp() { + MockitoAnnotations.initMocks(this); + } + + @Test + @UseDataProvider(location = TestDataProviders.class, value = "locales") + public void should_create_instance_for_supported_algorithms(Locale locale) { + Locale def = Locale.getDefault(); + try { + Locale.setDefault(locale); + assertThat(BuiltInCompressors.newInstance("lz4", context)).isInstanceOf(Lz4Compressor.class); + assertThat(BuiltInCompressors.newInstance("snappy", context)) + .isInstanceOf(SnappyCompressor.class); + assertThat(BuiltInCompressors.newInstance("none", context)) + .isInstanceOf(NoopCompressor.class); + assertThat(BuiltInCompressors.newInstance("LZ4", context)).isInstanceOf(Lz4Compressor.class); + assertThat(BuiltInCompressors.newInstance("SNAPPY", context)) + .isInstanceOf(SnappyCompressor.class); + assertThat(BuiltInCompressors.newInstance("NONE", context)) + .isInstanceOf(NoopCompressor.class); + } finally { + Locale.setDefault(def); + } + } + + @Test + public void should_throw_when_unsupported_algorithm() { + assertThatThrownBy(() -> BuiltInCompressors.newInstance("GZIP", context)) + .isInstanceOf(IllegalArgumentException.class) + .hasMessageContaining("Unsupported compression algorithm 'GZIP'"); + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/type/PrimitiveTypeTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/type/PrimitiveTypeTest.java new file mode 100644 index 00000000000..a382a896571 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/type/PrimitiveTypeTest.java @@ -0,0 +1,118 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.type; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.TestDataProviders; +import com.datastax.oss.driver.api.core.type.DataTypes; +import com.datastax.oss.protocol.internal.ProtocolConstants; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import java.util.Locale; +import org.junit.Test; +import org.junit.runner.RunWith; + +@RunWith(DataProviderRunner.class) +public class PrimitiveTypeTest { + + @Test + public void should_report_protocol_code() { + assertThat(DataTypes.ASCII.getProtocolCode()).isEqualTo(ProtocolConstants.DataType.ASCII); + assertThat(DataTypes.BIGINT.getProtocolCode()).isEqualTo(ProtocolConstants.DataType.BIGINT); + assertThat(DataTypes.BLOB.getProtocolCode()).isEqualTo(ProtocolConstants.DataType.BLOB); + assertThat(DataTypes.BOOLEAN.getProtocolCode()).isEqualTo(ProtocolConstants.DataType.BOOLEAN); + assertThat(DataTypes.COUNTER.getProtocolCode()).isEqualTo(ProtocolConstants.DataType.COUNTER); + assertThat(DataTypes.DECIMAL.getProtocolCode()).isEqualTo(ProtocolConstants.DataType.DECIMAL); + assertThat(DataTypes.DOUBLE.getProtocolCode()).isEqualTo(ProtocolConstants.DataType.DOUBLE); + assertThat(DataTypes.FLOAT.getProtocolCode()).isEqualTo(ProtocolConstants.DataType.FLOAT); + assertThat(DataTypes.INT.getProtocolCode()).isEqualTo(ProtocolConstants.DataType.INT); + assertThat(DataTypes.TIMESTAMP.getProtocolCode()) + .isEqualTo(ProtocolConstants.DataType.TIMESTAMP); + assertThat(DataTypes.UUID.getProtocolCode()).isEqualTo(ProtocolConstants.DataType.UUID); + assertThat(DataTypes.VARINT.getProtocolCode()).isEqualTo(ProtocolConstants.DataType.VARINT); + assertThat(DataTypes.TIMEUUID.getProtocolCode()).isEqualTo(ProtocolConstants.DataType.TIMEUUID); + assertThat(DataTypes.INET.getProtocolCode()).isEqualTo(ProtocolConstants.DataType.INET); + assertThat(DataTypes.DATE.getProtocolCode()).isEqualTo(ProtocolConstants.DataType.DATE); + assertThat(DataTypes.TEXT.getProtocolCode()).isEqualTo(ProtocolConstants.DataType.VARCHAR); + assertThat(DataTypes.TIME.getProtocolCode()).isEqualTo(ProtocolConstants.DataType.TIME); + assertThat(DataTypes.SMALLINT.getProtocolCode()).isEqualTo(ProtocolConstants.DataType.SMALLINT); + assertThat(DataTypes.TINYINT.getProtocolCode()).isEqualTo(ProtocolConstants.DataType.TINYINT); + assertThat(DataTypes.DURATION.getProtocolCode()).isEqualTo(ProtocolConstants.DataType.DURATION); + } + + @Test + @UseDataProvider(location = TestDataProviders.class, value = "locales") + public void should_format_as_cql(Locale locale) { + Locale def = Locale.getDefault(); + try { + Locale.setDefault(locale); + assertThat(DataTypes.ASCII.asCql(true, true)).isEqualTo("ascii"); + assertThat(DataTypes.BIGINT.asCql(true, true)).isEqualTo("bigint"); + assertThat(DataTypes.BLOB.asCql(true, true)).isEqualTo("blob"); + assertThat(DataTypes.BOOLEAN.asCql(true, true)).isEqualTo("boolean"); + assertThat(DataTypes.COUNTER.asCql(true, true)).isEqualTo("counter"); + assertThat(DataTypes.DECIMAL.asCql(true, true)).isEqualTo("decimal"); + assertThat(DataTypes.DOUBLE.asCql(true, true)).isEqualTo("double"); + assertThat(DataTypes.FLOAT.asCql(true, true)).isEqualTo("float"); + assertThat(DataTypes.INT.asCql(true, true)).isEqualTo("int"); + assertThat(DataTypes.TIMESTAMP.asCql(true, true)).isEqualTo("timestamp"); + assertThat(DataTypes.UUID.asCql(true, true)).isEqualTo("uuid"); + assertThat(DataTypes.VARINT.asCql(true, true)).isEqualTo("varint"); + assertThat(DataTypes.TIMEUUID.asCql(true, true)).isEqualTo("timeuuid"); + assertThat(DataTypes.INET.asCql(true, true)).isEqualTo("inet"); + assertThat(DataTypes.DATE.asCql(true, true)).isEqualTo("date"); + assertThat(DataTypes.TEXT.asCql(true, true)).isEqualTo("text"); + assertThat(DataTypes.TIME.asCql(true, true)).isEqualTo("time"); + assertThat(DataTypes.SMALLINT.asCql(true, true)).isEqualTo("smallint"); + assertThat(DataTypes.TINYINT.asCql(true, true)).isEqualTo("tinyint"); + assertThat(DataTypes.DURATION.asCql(true, true)).isEqualTo("duration"); + } finally { + Locale.setDefault(def); + } + } + + @Test + @UseDataProvider(location = TestDataProviders.class, value = "locales") + public void should_format_as_string(Locale locale) { + Locale def = Locale.getDefault(); + try { + Locale.setDefault(locale); + assertThat(DataTypes.ASCII.toString()).isEqualTo("ASCII"); + assertThat(DataTypes.BIGINT.toString()).isEqualTo("BIGINT"); + assertThat(DataTypes.BLOB.toString()).isEqualTo("BLOB"); + assertThat(DataTypes.BOOLEAN.toString()).isEqualTo("BOOLEAN"); + assertThat(DataTypes.COUNTER.toString()).isEqualTo("COUNTER"); + assertThat(DataTypes.DECIMAL.toString()).isEqualTo("DECIMAL"); + assertThat(DataTypes.DOUBLE.toString()).isEqualTo("DOUBLE"); + assertThat(DataTypes.FLOAT.toString()).isEqualTo("FLOAT"); + assertThat(DataTypes.INT.toString()).isEqualTo("INT"); + assertThat(DataTypes.TIMESTAMP.toString()).isEqualTo("TIMESTAMP"); + assertThat(DataTypes.UUID.toString()).isEqualTo("UUID"); + assertThat(DataTypes.VARINT.toString()).isEqualTo("VARINT"); + assertThat(DataTypes.TIMEUUID.toString()).isEqualTo("TIMEUUID"); + assertThat(DataTypes.INET.toString()).isEqualTo("INET"); + assertThat(DataTypes.DATE.toString()).isEqualTo("DATE"); + assertThat(DataTypes.TEXT.toString()).isEqualTo("TEXT"); + assertThat(DataTypes.TIME.toString()).isEqualTo("TIME"); + assertThat(DataTypes.SMALLINT.toString()).isEqualTo("SMALLINT"); + assertThat(DataTypes.TINYINT.toString()).isEqualTo("TINYINT"); + assertThat(DataTypes.DURATION.toString()).isEqualTo("DURATION"); + } finally { + Locale.setDefault(def); + } + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/util/StringsTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/util/StringsTest.java new file mode 100644 index 00000000000..2066a08ab84 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/util/StringsTest.java @@ -0,0 +1,168 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.util; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.TestDataProviders; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import java.util.Locale; +import org.junit.Test; +import org.junit.runner.RunWith; + +@RunWith(DataProviderRunner.class) +public class StringsTest { + + @Test + @UseDataProvider(location = TestDataProviders.class, value = "locales") + public void should_report_cql_keyword(Locale locale) { + Locale def = Locale.getDefault(); + try { + Locale.setDefault(locale); + + assertThat(Strings.isReservedCqlKeyword(null)).isFalse(); + assertThat(Strings.isReservedCqlKeyword("NOT A RESERVED KEYWORD")).isFalse(); + + assertThat(Strings.isReservedCqlKeyword("add")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("allow")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("alter")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("and")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("apply")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("asc")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("authorize")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("batch")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("begin")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("by")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("columnfamily")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("create")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("default")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("delete")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("desc")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("describe")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("drop")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("entries")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("execute")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("from")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("full")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("grant")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("if")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("in")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("index")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("infinity")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("insert")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("into")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("is")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("keyspace")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("limit")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("materialized")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("mbean")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("mbeans")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("modify")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("nan")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("norecursive")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("not")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("null")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("of")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("on")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("or")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("order")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("primary")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("rename")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("replace")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("revoke")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("schema")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("select")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("set")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("table")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("to")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("token")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("truncate")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("unlogged")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("unset")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("update")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("use")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("using")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("view")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("where")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("with")).isTrue(); + + assertThat(Strings.isReservedCqlKeyword("ALLOW")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("ALTER")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("AND")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("APPLY")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("ASC")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("AUTHORIZE")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("BATCH")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("BEGIN")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("BY")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("COLUMNFAMILY")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("CREATE")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("DEFAULT")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("DELETE")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("DESC")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("DESCRIBE")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("DROP")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("ENTRIES")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("EXECUTE")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("FROM")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("FULL")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("GRANT")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("IF")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("IN")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("INDEX")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("INFINITY")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("INSERT")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("INTO")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("IS")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("KEYSPACE")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("LIMIT")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("MATERIALIZED")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("MBEAN")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("MBEANS")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("MODIFY")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("NAN")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("NORECURSIVE")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("NOT")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("NULL")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("OF")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("ON")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("OR")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("ORDER")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("PRIMARY")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("RENAME")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("REPLACE")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("REVOKE")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("SCHEMA")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("SELECT")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("SET")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("TABLE")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("TO")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("TOKEN")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("TRUNCATE")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("UNLOGGED")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("UNSET")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("UPDATE")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("USE")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("USING")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("VIEW")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("WHERE")).isTrue(); + assertThat(Strings.isReservedCqlKeyword("WITH")).isTrue(); + } finally { + Locale.setDefault(def); + } + } +} From 1a828416a8eb566522b84950c65a931101c3ff76 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 18 Dec 2020 16:39:46 +0100 Subject: [PATCH 602/979] JAVA-2889: Remove TypeSafe imports from DriverConfigLoader (#1510) --- changelog/README.md | 1 + .../api/core/config/DriverConfigLoader.java | 52 ++------------- .../typesafe/DefaultDriverConfigLoader.java | 64 +++++++++++++++++++ manual/core/configuration/README.md | 25 +++++++- 4 files changed, 91 insertions(+), 51 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 886f96a7252..1743e46cb41 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.10.0 (in progress) +- [bug] JAVA-2889: Remove TypeSafe imports from DriverConfigLoader - [bug] JAVA-2883: Use root locale explicitly when changing string case - [bug] JAVA-2890: Fix off-by-one error in UdtCodec - [improvement] JAVA-2905: Prevent new connections from using a protocol version higher than the negotiated one diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/DriverConfigLoader.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/DriverConfigLoader.java index f4242592e06..fcc7ea41689 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/DriverConfigLoader.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/DriverConfigLoader.java @@ -15,16 +15,12 @@ */ package com.datastax.oss.driver.api.core.config; -import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.context.DriverContext; import com.datastax.oss.driver.api.core.session.SessionBuilder; import com.datastax.oss.driver.internal.core.config.composite.CompositeDriverConfigLoader; import com.datastax.oss.driver.internal.core.config.map.MapBasedDriverConfigLoader; import com.datastax.oss.driver.internal.core.config.typesafe.DefaultDriverConfigLoader; import com.datastax.oss.driver.internal.core.config.typesafe.DefaultProgrammaticDriverConfigLoaderBuilder; -import com.typesafe.config.Config; -import com.typesafe.config.ConfigFactory; -import com.typesafe.config.ConfigParseOptions; import edu.umd.cs.findbugs.annotations.NonNull; import java.io.File; import java.net.URL; @@ -92,19 +88,7 @@ static DriverConfigLoader fromClasspath(@NonNull String resourceBaseName) { @NonNull static DriverConfigLoader fromClasspath( @NonNull String resourceBaseName, @NonNull ClassLoader appClassLoader) { - return new DefaultDriverConfigLoader( - () -> { - ConfigFactory.invalidateCaches(); - Config config = - ConfigFactory.defaultOverrides() - .withFallback( - ConfigFactory.parseResourcesAnySyntax( - resourceBaseName, - ConfigParseOptions.defaults().setClassLoader(appClassLoader))) - .withFallback(ConfigFactory.defaultReference(CqlSession.class.getClassLoader())) - .resolve(); - return config.getConfig(DefaultDriverConfigLoader.DEFAULT_ROOT_PATH); - }); + return DefaultDriverConfigLoader.fromClasspath(resourceBaseName, appClassLoader); } /** @@ -154,16 +138,7 @@ static DriverConfigLoader fromPath(@NonNull Path file) { */ @NonNull static DriverConfigLoader fromFile(@NonNull File file) { - return new DefaultDriverConfigLoader( - () -> { - ConfigFactory.invalidateCaches(); - Config config = - ConfigFactory.defaultOverrides() - .withFallback(ConfigFactory.parseFileAnySyntax(file)) - .withFallback(ConfigFactory.defaultReference(CqlSession.class.getClassLoader())) - .resolve(); - return config.getConfig(DefaultDriverConfigLoader.DEFAULT_ROOT_PATH); - }); + return DefaultDriverConfigLoader.fromFile(file); } /** @@ -188,16 +163,7 @@ static DriverConfigLoader fromFile(@NonNull File file) { */ @NonNull static DriverConfigLoader fromUrl(@NonNull URL url) { - return new DefaultDriverConfigLoader( - () -> { - ConfigFactory.invalidateCaches(); - Config config = - ConfigFactory.defaultOverrides() - .withFallback(ConfigFactory.parseURL(url)) - .withFallback(ConfigFactory.defaultReference(CqlSession.class.getClassLoader())) - .resolve(); - return config.getConfig(DefaultDriverConfigLoader.DEFAULT_ROOT_PATH); - }); + return DefaultDriverConfigLoader.fromUrl(url); } /** @@ -227,17 +193,7 @@ static DriverConfigLoader fromUrl(@NonNull URL url) { */ @NonNull static DriverConfigLoader fromString(@NonNull String contents) { - return new DefaultDriverConfigLoader( - () -> { - ConfigFactory.invalidateCaches(); - Config config = - ConfigFactory.defaultOverrides() - .withFallback(ConfigFactory.parseString(contents)) - .withFallback(ConfigFactory.defaultReference(CqlSession.class.getClassLoader())) - .resolve(); - return config.getConfig(DefaultDriverConfigLoader.DEFAULT_ROOT_PATH); - }, - false); + return DefaultDriverConfigLoader.fromString(contents); } /** diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultDriverConfigLoader.java b/core/src/main/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultDriverConfigLoader.java index 9b068443874..9f87960adc6 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultDriverConfigLoader.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultDriverConfigLoader.java @@ -30,9 +30,12 @@ import com.datastax.oss.driver.internal.core.util.concurrent.RunOrSchedule; import com.typesafe.config.Config; import com.typesafe.config.ConfigFactory; +import com.typesafe.config.ConfigParseOptions; import edu.umd.cs.findbugs.annotations.NonNull; import io.netty.util.concurrent.EventExecutor; import io.netty.util.concurrent.ScheduledFuture; +import java.io.File; +import java.net.URL; import java.time.Duration; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionStage; @@ -65,6 +68,67 @@ public class DefaultDriverConfigLoader implements DriverConfigLoader { .getConfig(DEFAULT_ROOT_PATH); }; + @NonNull + public static DefaultDriverConfigLoader fromClasspath( + @NonNull String resourceBaseName, @NonNull ClassLoader appClassLoader) { + return new DefaultDriverConfigLoader( + () -> { + ConfigFactory.invalidateCaches(); + Config config = + ConfigFactory.defaultOverrides() + .withFallback( + ConfigFactory.parseResourcesAnySyntax( + resourceBaseName, + ConfigParseOptions.defaults().setClassLoader(appClassLoader))) + .withFallback(ConfigFactory.defaultReference(CqlSession.class.getClassLoader())) + .resolve(); + return config.getConfig(DEFAULT_ROOT_PATH); + }); + } + + @NonNull + public static DriverConfigLoader fromFile(@NonNull File file) { + return new DefaultDriverConfigLoader( + () -> { + ConfigFactory.invalidateCaches(); + Config config = + ConfigFactory.defaultOverrides() + .withFallback(ConfigFactory.parseFileAnySyntax(file)) + .withFallback(ConfigFactory.defaultReference(CqlSession.class.getClassLoader())) + .resolve(); + return config.getConfig(DEFAULT_ROOT_PATH); + }); + } + + @NonNull + public static DriverConfigLoader fromUrl(@NonNull URL url) { + return new DefaultDriverConfigLoader( + () -> { + ConfigFactory.invalidateCaches(); + Config config = + ConfigFactory.defaultOverrides() + .withFallback(ConfigFactory.parseURL(url)) + .withFallback(ConfigFactory.defaultReference(CqlSession.class.getClassLoader())) + .resolve(); + return config.getConfig(DEFAULT_ROOT_PATH); + }); + } + + @NonNull + public static DefaultDriverConfigLoader fromString(@NonNull String contents) { + return new DefaultDriverConfigLoader( + () -> { + ConfigFactory.invalidateCaches(); + Config config = + ConfigFactory.defaultOverrides() + .withFallback(ConfigFactory.parseString(contents)) + .withFallback(ConfigFactory.defaultReference(CqlSession.class.getClassLoader())) + .resolve(); + return config.getConfig(DEFAULT_ROOT_PATH); + }, + false); + } + private final Supplier configSupplier; private final TypesafeDriverConfig driverConfig; private final boolean supportsReloading; diff --git a/manual/core/configuration/README.md b/manual/core/configuration/README.md index c99f7c29963..50432c499cf 100644 --- a/manual/core/configuration/README.md +++ b/manual/core/configuration/README.md @@ -351,9 +351,28 @@ CqlSession session = CqlSession.builder().withConfigLoader(loader).build(); If Typesafe Config doesn't work for you, it is possible to get rid of it entirely. -You will need to provide your own implementations of [DriverConfig] and [DriverExecutionProfile]. -Then write a [DriverConfigLoader] and pass it to the session at initialization, as shown in the -previous sections. Study the built-in implementation (package +Start by excluding Typesafe Config from the list of dependencies required by the driver; if you are +using Maven, this can be achieved as follows: + +```xml + + + com.datastax.oss + java-driver-core + ... + + + com.typesafe + config + + + + + +``` +Next, you will need to provide your own implementations of [DriverConfig] and +[DriverExecutionProfile]. Then write a [DriverConfigLoader] and pass it to the session at +initialization, as shown in the previous sections. Study the built-in implementation (package `com.datastax.oss.driver.internal.core.config.typesafe`) for reference. Reloading is not mandatory: you can choose not to implement it, and the driver will simply keep From a3fff9a6bce4ecb42ba7df90f6f454234facb782 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 18 Dec 2020 16:41:42 +0100 Subject: [PATCH 603/979] JAVA-2894: Clarify usage of Statement.setQueryTimestamp (#1511) --- changelog/README.md | 1 + .../oss/driver/api/core/cql/Statement.java | 16 +++++++++++++++- 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/changelog/README.md b/changelog/README.md index 1743e46cb41..f122a8c89c5 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.10.0 (in progress) +- [documentation] JAVA-2894: Clarify usage of Statement.setQueryTimestamp - [bug] JAVA-2889: Remove TypeSafe imports from DriverConfigLoader - [bug] JAVA-2883: Use root locale explicitly when changing string case - [bug] JAVA-2890: Fix off-by-one error in UdtCodec diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/cql/Statement.java b/core/src/main/java/com/datastax/oss/driver/api/core/cql/Statement.java index 52023d3a6b7..dcfd1420b53 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/cql/Statement.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/cql/Statement.java @@ -255,7 +255,8 @@ default SelfT disableTracing() { } /** - * Returns the query timestamp, in microseconds, to send with the statement. + * Returns the query timestamp, in microseconds, to send with the statement. See {@link + * #setQueryTimestamp(long)} for details. * *

          If this is equal to {@link #NO_DEFAULT_TIMESTAMP}, the {@link TimestampGenerator} configured * for this driver instance will be used to generate a timestamp. @@ -277,6 +278,19 @@ default long getDefaultTimestamp() { /** * Sets the query timestamp, in microseconds, to send with the statement. * + *

          This is an alternative to appending a {@code USING TIMESTAMP} clause in the statement's + * query string, and has the advantage of sending the timestamp separately from the query string + * itself, which doesn't have to be modified when executing the same statement with different + * timestamps. Note that, if both a {@code USING TIMESTAMP} clause and a query timestamp are set + * for a given statement, the timestamp from the {@code USING TIMESTAMP} clause wins. + * + *

          This method can be used on any instance of {@link SimpleStatement}, {@link BoundStatement} + * or {@link BatchStatement}. For a {@link BatchStatement}, the timestamp will apply to all its + * child statements; it is not possible to define per-child timestamps using this method, and + * consequently, if this method is called on a batch child statement, the provided timestamp will + * be silently ignored. If different timestamps are required for individual child statements, this + * can only be achieved with a custom {@code USING TIMESTAMP} clause in each child query. + * *

          If this is equal to {@link #NO_DEFAULT_TIMESTAMP}, the {@link TimestampGenerator} configured * for this driver instance will be used to generate a timestamp. * From 1c580e2efdbbf05151a30d82c014cb5c3dfe415a Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 18 Dec 2020 16:54:49 +0100 Subject: [PATCH 604/979] JAVA-2893: Allow duplicate keys in DefaultProgrammaticDriverConfigLoaderBuilder (#1512) --- changelog/README.md | 1 + ...ultProgrammaticDriverConfigLoaderBuilder.java | 7 +++---- ...rogrammaticDriverConfigLoaderBuilderTest.java | 16 ++++++++++++++++ 3 files changed, 20 insertions(+), 4 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index f122a8c89c5..c05d8d6f5b6 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.10.0 (in progress) +- [improvement] JAVA-2893: Allow duplicate keys in DefaultProgrammaticDriverConfigLoaderBuilder - [documentation] JAVA-2894: Clarify usage of Statement.setQueryTimestamp - [bug] JAVA-2889: Remove TypeSafe imports from DriverConfigLoader - [bug] JAVA-2883: Use root locale explicitly when changing string case diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultProgrammaticDriverConfigLoaderBuilder.java b/core/src/main/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultProgrammaticDriverConfigLoaderBuilder.java index 69f71b61643..3ec2f0d7500 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultProgrammaticDriverConfigLoaderBuilder.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultProgrammaticDriverConfigLoaderBuilder.java @@ -20,13 +20,13 @@ import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.config.DriverOption; import com.datastax.oss.driver.api.core.config.ProgrammaticDriverConfigLoaderBuilder; -import com.datastax.oss.protocol.internal.util.collection.NullAllowingImmutableMap; import com.typesafe.config.Config; import com.typesafe.config.ConfigFactory; import com.typesafe.config.ConfigValueFactory; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import java.time.Duration; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; @@ -43,8 +43,7 @@ public class DefaultProgrammaticDriverConfigLoaderBuilder // Do not remove root path here, it must be done after merging configs .withFallback(ConfigFactory.defaultReference(CqlSession.class.getClassLoader())); - private final NullAllowingImmutableMap.Builder values = - NullAllowingImmutableMap.builder(); + private final Map values = new HashMap<>(); private final Supplier fallbackSupplier; private final String rootPath; @@ -260,7 +259,7 @@ public DriverConfigLoader build() { private Config buildConfig() { Config config = ConfigFactory.empty(); - for (Map.Entry entry : values.build().entrySet()) { + for (Map.Entry entry : values.entrySet()) { config = config.withValue(entry.getKey(), ConfigValueFactory.fromAnyRef(entry.getValue())); } return config; diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultProgrammaticDriverConfigLoaderBuilderTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultProgrammaticDriverConfigLoaderBuilderTest.java index 7e2b6041ff8..6159db508e7 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultProgrammaticDriverConfigLoaderBuilderTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultProgrammaticDriverConfigLoaderBuilderTest.java @@ -29,6 +29,22 @@ public class DefaultProgrammaticDriverConfigLoaderBuilderTest { private static final String FALLBACK_CONFIG = "int1 = 1\nint2 = 2\nprofiles.profile1 { int1 = 11 }"; + @Test + public void should_override_option() { + DriverConfigLoader loader = + new DefaultProgrammaticDriverConfigLoaderBuilder( + () -> ConfigFactory.parseString(FALLBACK_CONFIG), "") + .withInt(MockOptions.INT1, 2) + .withInt(MockOptions.INT1, 3) + .withInt(MockOptions.INT1, 4) + .withInt(MockOptions.INT2, 3) + .withInt(MockOptions.INT2, 4) + .build(); + DriverConfig config = loader.getInitialConfig(); + assertThat(config.getDefaultProfile().getInt(MockOptions.INT1)).isEqualTo(4); + assertThat(config.getDefaultProfile().getInt(MockOptions.INT2)).isEqualTo(4); + } + @Test public void should_override_option_in_default_profile() { DriverConfigLoader loader = From 3059c89437473a652ffb216984557eb9ca64b1b8 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Thu, 19 Nov 2020 23:59:59 +0100 Subject: [PATCH 605/979] JAVA-2449: Use non-cryptographic random number generation in Uuids.random() --- changelog/README.md | 1 + .../core/insights/InsightsClient.java | 4 +- .../oss/driver/api/core/uuid/Uuids.java | 69 ++++++- .../oss/driver/api/core/uuid/UuidsTest.java | 168 +++++++++++++++--- .../concurrent/LimitConcurrencyCustom.java | 4 +- .../LimitConcurrencyCustomAsync.java | 4 +- .../LimitConcurrencyRequestThrottler.java | 4 +- .../mapper/KillrVideoMapperExample.java | 7 +- .../video/CreateVideoQueryProvider.java | 4 +- upgrade_guide/README.md | 24 +++ 10 files changed, 240 insertions(+), 49 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index c05d8d6f5b6..00845e97b3d 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.10.0 (in progress) +- [improvement] JAVA-2449: Use non-cryptographic random number generation in Uuids.random() - [improvement] JAVA-2893: Allow duplicate keys in DefaultProgrammaticDriverConfigLoaderBuilder - [documentation] JAVA-2894: Clarify usage of Statement.setQueryTimestamp - [bug] JAVA-2889: Remove TypeSafe imports from DriverConfigLoader diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/InsightsClient.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/InsightsClient.java index 3f02e2368a3..b3bbaed8b34 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/InsightsClient.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/InsightsClient.java @@ -41,6 +41,7 @@ import com.datastax.oss.driver.api.core.session.SessionBuilder; import com.datastax.oss.driver.api.core.type.DataTypes; import com.datastax.oss.driver.api.core.type.codec.TypeCodec; +import com.datastax.oss.driver.api.core.uuid.Uuids; import com.datastax.oss.driver.internal.core.adminrequest.AdminRequestHandler; import com.datastax.oss.driver.internal.core.context.DefaultDriverContext; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; @@ -65,7 +66,6 @@ import java.util.List; import java.util.Map; import java.util.Set; -import java.util.UUID; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionStage; import java.util.concurrent.ScheduledExecutorService; @@ -90,7 +90,7 @@ public class InsightsClient { static final String DEFAULT_JAVA_APPLICATION = "Default Java Application"; private final ControlConnection controlConnection; - private final String id = UUID.randomUUID().toString(); + private final String id = Uuids.random().toString(); private final InsightsConfiguration insightsConfiguration; private final AtomicInteger numberOfStatusEventErrors = new AtomicInteger(); diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/uuid/Uuids.java b/core/src/main/java/com/datastax/oss/driver/api/core/uuid/Uuids.java index 35cc37e8e7a..39ca0c48d5d 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/uuid/Uuids.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/uuid/Uuids.java @@ -36,6 +36,7 @@ import java.util.Properties; import java.util.Random; import java.util.Set; +import java.util.SplittableRandom; import java.util.TimeZone; import java.util.UUID; import java.util.concurrent.atomic.AtomicLong; @@ -263,11 +264,61 @@ private static long makeClockSeqAndNode() { /** * Creates a new random (version 4) UUID. * - *

          This method is just a convenience for {@link UUID#randomUUID()}. + *

          This method has received a new implementation as of driver 4.10. Unlike the JDK's + * {@link UUID#randomUUID()} method, it does not use anymore the cryptographic {@link + * java.security.SecureRandom} number generator. Instead, it uses the non-cryptographic {@link + * Random} class, with a different seed at every invocation. + * + *

          Using a non-cryptographic generator has two advantages: + * + *

            + *
          1. UUID generation is much faster than with {@link UUID#randomUUID()}; + *
          2. Contrary to {@link UUID#randomUUID()}, UUID generation with this method does not require + * I/O and is not a blocking call, which makes this method better suited for non-blocking + * applications. + *
          + * + * Of course, this method is intended for usage where cryptographic strength is not required, such + * as when generating row identifiers for insertion in the database. If you still need + * cryptographic strength, consider using {@link Uuids#random(Random)} instead, and pass an + * instance of {@link java.security.SecureRandom}. */ @NonNull public static UUID random() { - return UUID.randomUUID(); + return random(new Random()); + } + + /** + * Creates a new random (version 4) UUID using the provided {@link Random} instance. + * + *

          This method offers more flexibility than {@link #random()} as it allows to customize the + * {@link Random} instance to use, and also offers the possibility to reuse instances across + * successive calls. Reusing Random instances is the norm when using {@link + * java.util.concurrent.ThreadLocalRandom}, for instance; however other Random implementations may + * perform poorly under heavy thread contention. + * + *

          Note: some Random implementations, such as {@link java.security.SecureRandom}, may trigger + * I/O activity during random number generation; these instances should not be used in + * non-blocking contexts. + */ + @NonNull + public static UUID random(@NonNull Random random) { + byte[] data = new byte[16]; + random.nextBytes(data); + return buildUuid(data, 4); + } + + /** + * Creates a new random (version 4) UUID using the provided {@link SplittableRandom} instance. + * + *

          This method should be preferred to {@link #random()} when UUID generation happens in massive + * parallel computations, such as when using the ForkJoin framework. Note that {@link + * SplittableRandom} instances are not thread-safe. + */ + @NonNull + public static UUID random(@NonNull SplittableRandom random) { + byte[] data = toBytes(random.nextLong(), random.nextLong()); + return buildUuid(data, 4); } /** @@ -344,7 +395,7 @@ public static UUID nameBased(@NonNull UUID namespace, @NonNull byte[] name, int MessageDigest md = newMessageDigest(version); md.update(toBytes(namespace)); md.update(name); - return buildNamedUuid(md.digest(), version); + return buildUuid(md.digest(), version); } /** @@ -390,7 +441,7 @@ public static UUID nameBased(@NonNull byte[] namespaceAndName, int version) { } MessageDigest md = newMessageDigest(version); md.update(namespaceAndName); - return buildNamedUuid(md.digest(), version); + return buildUuid(md.digest(), version); } @NonNull @@ -408,7 +459,7 @@ private static MessageDigest newMessageDigest(int version) { } @NonNull - private static UUID buildNamedUuid(@NonNull byte[] data, int version) { + private static UUID buildUuid(@NonNull byte[] data, int version) { // clear and set version data[6] &= (byte) 0x0f; data[6] |= (byte) (version << 4); @@ -433,12 +484,16 @@ private static UUID fromBytes(byte[] data) { } private static byte[] toBytes(UUID uuid) { - byte[] out = new byte[16]; long msb = uuid.getMostSignificantBits(); + long lsb = uuid.getLeastSignificantBits(); + return toBytes(msb, lsb); + } + + private static byte[] toBytes(long msb, long lsb) { + byte[] out = new byte[16]; for (int i = 0; i < 8; i++) { out[i] = (byte) (msb >> ((7 - i) * 8)); } - long lsb = uuid.getLeastSignificantBits(); for (int i = 8; i < 16; i++) { out[i] = (byte) (lsb >> ((15 - i) * 8)); } diff --git a/core/src/test/java/com/datastax/oss/driver/api/core/uuid/UuidsTest.java b/core/src/test/java/com/datastax/oss/driver/api/core/uuid/UuidsTest.java index 848aebc7f7a..7396c633f72 100644 --- a/core/src/test/java/com/datastax/oss/driver/api/core/uuid/UuidsTest.java +++ b/core/src/test/java/com/datastax/oss/driver/api/core/uuid/UuidsTest.java @@ -27,18 +27,122 @@ import java.nio.charset.StandardCharsets; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; +import java.security.SecureRandom; import java.util.Arrays; import java.util.HashSet; import java.util.Random; import java.util.Set; +import java.util.SplittableRandom; import java.util.UUID; import java.util.concurrent.ConcurrentSkipListSet; +import java.util.concurrent.ThreadLocalRandom; +import java.util.function.Supplier; import org.junit.Test; import org.junit.runner.RunWith; @RunWith(DataProviderRunner.class) public class UuidsTest { + @Test + public void should_generate_unique_random_uuids_Random() { + Set generated = serialGeneration(1_000_000, Uuids::random); + assertThat(generated).hasSize(1_000_000); + } + + @Test + public void should_generate_unique_random_uuids_shared_Random2() { + Random random = new Random(); + Set generated = serialGeneration(1_000_000, () -> Uuids.random(random)); + assertThat(generated).hasSize(1_000_000); + } + + @Test + public void should_generate_unique_random_uuids_across_threads_shared_Random() throws Exception { + Random random = new Random(); + Set generated = parallelGeneration(10, 10_000, () -> () -> Uuids.random(random)); + assertThat(generated).hasSize(10 * 10_000); + } + + @Test + public void should_generate_unique_random_uuids_shared_SecureRandom() { + SecureRandom random = new SecureRandom(); + Set generated = serialGeneration(1_000_000, () -> Uuids.random(random)); + assertThat(generated).hasSize(1_000_000); + } + + @Test + public void should_generate_unique_random_uuids_across_threads_shared_SecureRandom() + throws Exception { + SecureRandom random = new SecureRandom(); + Set generated = parallelGeneration(10, 10_000, () -> () -> Uuids.random(random)); + assertThat(generated).hasSize(10 * 10_000); + } + + @Test + public void should_generate_unique_random_uuids_ThreadLocalRandom() { + ThreadLocalRandom random = ThreadLocalRandom.current(); + Set generated = serialGeneration(1_000_000, () -> Uuids.random(random)); + assertThat(generated).hasSize(1_000_000); + } + + @Test + public void should_generate_unique_random_uuids_across_threads_ThreadLocalRandom() + throws Exception { + Set generated = + parallelGeneration( + 10, + 10_000, + () -> { + ThreadLocalRandom random = ThreadLocalRandom.current(); + return () -> Uuids.random(random); + }); + assertThat(generated).hasSize(10 * 10_000); + } + + @Test + public void should_generate_unique_random_uuids_Netty_ThreadLocalRandom() { + io.netty.util.internal.ThreadLocalRandom random = + io.netty.util.internal.ThreadLocalRandom.current(); + Set generated = serialGeneration(1_000_000, () -> Uuids.random(random)); + assertThat(generated).hasSize(1_000_000); + } + + @Test + public void should_generate_unique_random_uuids_across_threads_Netty_ThreadLocalRandom() + throws Exception { + Set generated = + parallelGeneration( + 10, + 10_000, + () -> { + io.netty.util.internal.ThreadLocalRandom random = + io.netty.util.internal.ThreadLocalRandom.current(); + return () -> Uuids.random(random); + }); + assertThat(generated).hasSize(10 * 10_000); + } + + @Test + public void should_generate_unique_random_uuids_SplittableRandom() { + SplittableRandom random = new SplittableRandom(); + Set generated = serialGeneration(1_000_000, () -> Uuids.random(random)); + assertThat(generated).hasSize(1_000_000); + } + + @Test + public void should_generate_unique_random_uuids_across_threads_SplittableRandom() + throws Exception { + Set generated = + parallelGeneration( + 10, + 10_000, + () -> { + SplittableRandom random = new SplittableRandom(); + return () -> Uuids.random(random); + }); + assertThat(generated).hasSize(10 * 10_000); + } + @Test @UseDataProvider("byteArrayNames") public void should_generate_name_based_uuid_from_namespace_and_byte_array( @@ -186,7 +290,7 @@ public void should_generate_timestamp_within_10_ms() { // The Uuids class does some computation at class initialization, which may screw up our // assumption below that Uuids.timeBased() takes less than 10ms, so force class loading now. - Uuids.random(); + Uuids.timeBased(); long start = System.currentTimeMillis(); UUID uuid = Uuids.timeBased(); @@ -203,34 +307,14 @@ public void should_generate_timestamp_within_10_ms() { @Test public void should_generate_unique_time_based_uuids() { - int count = 1_000_000; - Set generated = new HashSet<>(count); - - for (int i = 0; i < count; ++i) { - generated.add(Uuids.timeBased()); - } - - assertThat(generated).hasSize(count); + Set generated = serialGeneration(1_000_000, Uuids::timeBased); + assertThat(generated).hasSize(1_000_000); } @Test public void should_generate_unique_time_based_uuids_across_threads() throws Exception { - int threadCount = 10; - int uuidsPerThread = 10_000; - Set generated = new ConcurrentSkipListSet<>(); - - UUIDGenerator[] generators = new UUIDGenerator[threadCount]; - for (int i = 0; i < threadCount; i++) { - generators[i] = new UUIDGenerator(uuidsPerThread, generated); - } - for (int i = 0; i < threadCount; i++) { - generators[i].start(); - } - for (int i = 0; i < threadCount; i++) { - generators[i].join(); - } - - assertThat(generated).hasSize(threadCount * uuidsPerThread); + Set generated = parallelGeneration(10, 10_000, () -> Uuids::timeBased); + assertThat(generated).hasSize(10 * 10_000); } @Test @@ -362,20 +446,48 @@ private static byte[] longToBytes(long x) { return ByteBuffer.allocate(Long.BYTES).putLong(x).array(); } - private static class UUIDGenerator extends Thread { + private Set serialGeneration(int count, Supplier uuidSupplier) { + Set generated = new HashSet<>(count); + for (int i = 0; i < count; ++i) { + generated.add(uuidSupplier.get()); + } + return generated; + } + + public Set parallelGeneration( + int threadCount, int uuidsPerThread, Supplier> uuidSupplier) + throws InterruptedException { + Set generated = new ConcurrentSkipListSet<>(); + UuidGenerator[] generators = new UuidGenerator[threadCount]; + for (int i = 0; i < threadCount; i++) { + generators[i] = new UuidGenerator(uuidsPerThread, uuidSupplier, generated); + } + for (int i = 0; i < threadCount; i++) { + generators[i].start(); + } + for (int i = 0; i < threadCount; i++) { + generators[i].join(); + } + return generated; + } + + private static class UuidGenerator extends Thread { private final int toGenerate; private final Set generated; + private final Supplier> uuidSupplier; - UUIDGenerator(int toGenerate, Set generated) { + UuidGenerator(int toGenerate, Supplier> uuidSupplier, Set generated) { this.toGenerate = toGenerate; this.generated = generated; + this.uuidSupplier = uuidSupplier; } @Override public void run() { + Supplier uuidSupplier = this.uuidSupplier.get(); for (int i = 0; i < toGenerate; ++i) { - generated.add(Uuids.timeBased()); + generated.add(uuidSupplier.get()); } } } diff --git a/examples/src/main/java/com/datastax/oss/driver/examples/concurrent/LimitConcurrencyCustom.java b/examples/src/main/java/com/datastax/oss/driver/examples/concurrent/LimitConcurrencyCustom.java index 9962d414044..64dca3cfcd0 100644 --- a/examples/src/main/java/com/datastax/oss/driver/examples/concurrent/LimitConcurrencyCustom.java +++ b/examples/src/main/java/com/datastax/oss/driver/examples/concurrent/LimitConcurrencyCustom.java @@ -21,7 +21,7 @@ import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.CqlSessionBuilder; import com.datastax.oss.driver.api.core.cql.PreparedStatement; -import java.util.UUID; +import com.datastax.oss.driver.api.core.uuid.Uuids; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; @@ -103,7 +103,7 @@ private static void insertConcurrent(CqlSession session) throws InterruptedExcep executor.submit( () -> { try { - session.execute(pst.bind().setUuid("id", UUID.randomUUID()).setInt("value", counter)); + session.execute(pst.bind().setUuid("id", Uuids.random()).setInt("value", counter)); insertsCounter.incrementAndGet(); } catch (Throwable t) { // On production you should leverage logger and use logger.error() method. diff --git a/examples/src/main/java/com/datastax/oss/driver/examples/concurrent/LimitConcurrencyCustomAsync.java b/examples/src/main/java/com/datastax/oss/driver/examples/concurrent/LimitConcurrencyCustomAsync.java index 45287098e5d..73fd223c386 100644 --- a/examples/src/main/java/com/datastax/oss/driver/examples/concurrent/LimitConcurrencyCustomAsync.java +++ b/examples/src/main/java/com/datastax/oss/driver/examples/concurrent/LimitConcurrencyCustomAsync.java @@ -22,9 +22,9 @@ import com.datastax.oss.driver.api.core.CqlSessionBuilder; import com.datastax.oss.driver.api.core.cql.AsyncResultSet; import com.datastax.oss.driver.api.core.cql.PreparedStatement; +import com.datastax.oss.driver.api.core.uuid.Uuids; import java.util.ArrayList; import java.util.List; -import java.util.UUID; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; import java.util.concurrent.atomic.AtomicInteger; @@ -124,7 +124,7 @@ private static CompletableFuture executeInsert( CqlSession session, PreparedStatement pst, int counter) { return session - .executeAsync(pst.bind().setUuid("id", UUID.randomUUID()).setInt("value", counter)) + .executeAsync(pst.bind().setUuid("id", Uuids.random()).setInt("value", counter)) .toCompletableFuture() .whenComplete( (BiConsumer) diff --git a/examples/src/main/java/com/datastax/oss/driver/examples/concurrent/LimitConcurrencyRequestThrottler.java b/examples/src/main/java/com/datastax/oss/driver/examples/concurrent/LimitConcurrencyRequestThrottler.java index 995054f2c52..71ec8b24ccb 100644 --- a/examples/src/main/java/com/datastax/oss/driver/examples/concurrent/LimitConcurrencyRequestThrottler.java +++ b/examples/src/main/java/com/datastax/oss/driver/examples/concurrent/LimitConcurrencyRequestThrottler.java @@ -21,10 +21,10 @@ import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.cql.PreparedStatement; +import com.datastax.oss.driver.api.core.uuid.Uuids; import com.datastax.oss.driver.internal.core.session.throttling.ConcurrencyLimitingRequestThrottler; import java.util.ArrayList; import java.util.List; -import java.util.UUID; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; @@ -86,7 +86,7 @@ private static void insertConcurrent(CqlSession session) for (int i = 0; i < TOTAL_NUMBER_OF_INSERTS; i++) { pending.add( session - .executeAsync(pst.bind().setUuid("id", UUID.randomUUID()).setInt("value", i)) + .executeAsync(pst.bind().setUuid("id", Uuids.random()).setInt("value", i)) // Transform CompletionState toCompletableFuture to be able to wait for execution of // all using CompletableFuture.allOf .toCompletableFuture()); diff --git a/examples/src/main/java/com/datastax/oss/driver/examples/mapper/KillrVideoMapperExample.java b/examples/src/main/java/com/datastax/oss/driver/examples/mapper/KillrVideoMapperExample.java index 4ad547767c8..790ed09ef07 100644 --- a/examples/src/main/java/com/datastax/oss/driver/examples/mapper/KillrVideoMapperExample.java +++ b/examples/src/main/java/com/datastax/oss/driver/examples/mapper/KillrVideoMapperExample.java @@ -19,6 +19,7 @@ import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.PagingIterable; import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.core.uuid.Uuids; import com.datastax.oss.driver.examples.mapper.killrvideo.KillrVideoMapper; import com.datastax.oss.driver.examples.mapper.killrvideo.user.User; import com.datastax.oss.driver.examples.mapper.killrvideo.user.UserDao; @@ -39,7 +40,6 @@ import java.util.List; import java.util.Optional; import java.util.Set; -import java.util.UUID; import java.util.stream.Collectors; /** @@ -87,8 +87,7 @@ public static void main(String[] args) { // Create a new user UserDao userDao = mapper.userDao(); - User user = - new User(UUID.randomUUID(), "test", "user", "testuser@example.com", Instant.now()); + User user = new User(Uuids.random(), "test", "user", "testuser@example.com", Instant.now()); if (userDao.create(user, "fakePasswordForTests".toCharArray())) { System.out.println("Created " + user); @@ -99,7 +98,7 @@ public static void main(String[] args) { // Creating another user with the same email should fail assert !userDao.create( - new User(UUID.randomUUID(), "test2", "user", "testuser@example.com", Instant.now()), + new User(Uuids.random(), "test2", "user", "testuser@example.com", Instant.now()), "fakePasswordForTests2".toCharArray()); // Simulate login attempts diff --git a/examples/src/main/java/com/datastax/oss/driver/examples/mapper/killrvideo/video/CreateVideoQueryProvider.java b/examples/src/main/java/com/datastax/oss/driver/examples/mapper/killrvideo/video/CreateVideoQueryProvider.java index 85704290903..6ec1c7b1aaf 100644 --- a/examples/src/main/java/com/datastax/oss/driver/examples/mapper/killrvideo/video/CreateVideoQueryProvider.java +++ b/examples/src/main/java/com/datastax/oss/driver/examples/mapper/killrvideo/video/CreateVideoQueryProvider.java @@ -22,13 +22,13 @@ import com.datastax.oss.driver.api.core.cql.BoundStatementBuilder; import com.datastax.oss.driver.api.core.cql.DefaultBatchType; import com.datastax.oss.driver.api.core.cql.PreparedStatement; +import com.datastax.oss.driver.api.core.uuid.Uuids; import com.datastax.oss.driver.api.mapper.MapperContext; import com.datastax.oss.driver.api.mapper.entity.EntityHelper; import com.datastax.oss.driver.api.mapper.entity.saving.NullSavingStrategy; import java.time.Instant; import java.time.ZoneOffset; import java.time.format.DateTimeFormatter; -import java.util.UUID; /** * Provides the implementation of {@link VideoDao#create}. @@ -68,7 +68,7 @@ class CreateVideoQueryProvider { void create(Video video) { if (video.getVideoid() == null) { - video.setVideoid(UUID.randomUUID()); + video.setVideoid(Uuids.random()); } if (video.getAddedDate() == null) { video.setAddedDate(Instant.now()); diff --git a/upgrade_guide/README.md b/upgrade_guide/README.md index 8451778c92e..a873731705b 100644 --- a/upgrade_guide/README.md +++ b/upgrade_guide/README.md @@ -1,5 +1,29 @@ ## Upgrade guide +### 4.10.0 + +[JAVA-2449](https://datastax-oss.atlassian.net/browse/JAVA-2449) modified the implementation of +[Uuids.random()]: this method does not delegate anymore to the JDK's `java.util.UUID.randomUUID()` +implementation, but instead re-implements random UUID generation using the non-cryptographic +random number generator `java.util.Random`. + +For most users, non cryptographic strength is enough and this change should translate into better +performance when generating UUIDs for database insertion. However, in the unlikely case where your +application requires cryptographic strength for UUID generation, you should update your code to +use `java.util.UUID.randomUUID()` instead of `com.datastax.oss.driver.api.core.uuid.Uuids.random()` +from now on. + +This release also introduces two new methods for random UUID generation: + +1. [Uuids.random(Random)]: similar to `Uuids.random()` but allows to pass a custom instance of + `java.util.Random` and/or re-use the same instance across calls. +2. [Uuids.random(SplittableRandom)]: similar to `Uuids.random()` but uses a + `java.util.SplittableRandom` instead. + +[Uuids.random()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/uuid/Uuids.html#random-- +[Uuids.random(Random)]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/uuid/Uuids.html#random-java.util.Random- +[Uuids.random(SplittableRandom)]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/uuid/Uuids.html#random-java.util.SplittableRandom- + ### 4.5.x - 4.6.0 These versions are subject to [JAVA-2676](https://datastax-oss.atlassian.net/browse/JAVA-2676), a From 2765972530d914bc32e749cc053fab22eab526b4 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 20 Nov 2020 00:02:02 +0100 Subject: [PATCH 606/979] Replace Uuids.makeEpoch() with compile-time constant --- .../oss/driver/api/core/uuid/Uuids.java | 30 ++++++++----------- 1 file changed, 12 insertions(+), 18 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/uuid/Uuids.java b/core/src/main/java/com/datastax/oss/driver/api/core/uuid/Uuids.java index 39ca0c48d5d..4b63dee3055 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/uuid/Uuids.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/uuid/Uuids.java @@ -28,7 +28,6 @@ import java.nio.charset.StandardCharsets; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; -import java.util.Calendar; import java.util.Date; import java.util.Enumeration; import java.util.HashSet; @@ -37,7 +36,6 @@ import java.util.Random; import java.util.Set; import java.util.SplittableRandom; -import java.util.TimeZone; import java.util.UUID; import java.util.concurrent.atomic.AtomicLong; import org.slf4j.Logger; @@ -113,7 +111,16 @@ public final class Uuids { private Uuids() {} - private static final long START_EPOCH = makeEpoch(); + /** + * UUID v1 timestamps must be expressed relatively to October 15th, 1582 – the day when Gregorian + * calendar was introduced. This constant captures that moment in time expressed in milliseconds + * before the Unix epoch. It can be obtained by calling: + * + *

          +   *   Instant.parse("1582-10-15T00:00:00Z").toEpochMilli();
          +   * 
          + */ + private static final long START_EPOCH_MILLIS = -12219292800000L; // Lazily initialize clock seq + node value at time of first access. Quarkus will attempt to // initialize this class at deployment time which prevents us from just setting this value @@ -157,19 +164,6 @@ private long get() { private static final AtomicLong lastTimestamp = new AtomicLong(0L); - private static long makeEpoch() { - // UUID v1 timestamps must be in 100-nanoseconds interval since 00:00:00.000 15 Oct 1582. - Calendar c = Calendar.getInstance(TimeZone.getTimeZone("GMT-0")); - c.set(Calendar.YEAR, 1582); - c.set(Calendar.MONTH, Calendar.OCTOBER); - c.set(Calendar.DAY_OF_MONTH, 15); - c.set(Calendar.HOUR_OF_DAY, 0); - c.set(Calendar.MINUTE, 0); - c.set(Calendar.SECOND, 0); - c.set(Calendar.MILLISECOND, 0); - return c.getTimeInMillis(); - } - private static long makeNode() { // We don't have access to the MAC address (in pure JAVA at least) but need to generate a node @@ -590,7 +584,7 @@ public static long unixTimestamp(@NonNull UUID uuid) { uuid.version())); } long timestamp = uuid.timestamp(); - return (timestamp / 10000) + START_EPOCH; + return (timestamp / 10000) + START_EPOCH_MILLIS; } // Use {@link System#currentTimeMillis} for a base time in milliseconds, and if we are in the same @@ -627,7 +621,7 @@ private static long getCurrentTimestamp() { @VisibleForTesting static long fromUnixTimestamp(long tstamp) { - return (tstamp - START_EPOCH) * 10000; + return (tstamp - START_EPOCH_MILLIS) * 10000; } private static long millisOf(long timestamp) { From 8fb2007632b5981d855da29439042c186d909644 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 18 Dec 2020 17:04:17 +0100 Subject: [PATCH 607/979] Bulk upgrade of driver dependencies + test fixes (#1517) --- core/revapi.json | 2 +- .../statement/GraphTraversalBatchITBase.java | 4 +- .../osgi/checks/GeoServiceChecks.java | 2 +- pom.xml | 72 +++++++++---------- 4 files changed, 39 insertions(+), 41 deletions(-) diff --git a/core/revapi.json b/core/revapi.json index b42dd7ca74f..8bf661b8544 100644 --- a/core/revapi.json +++ b/core/revapi.json @@ -5028,7 +5028,7 @@ }, { "code": "java.class.nonPublicPartOfAPI", - "new": "class com.fasterxml.jackson.databind.util.PrimitiveArrayBuilder.Node", + "new": "class com.fasterxml.jackson.databind.util.PrimitiveArrayBuilder.Node", "justification": "Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" } ] diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalBatchITBase.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalBatchITBase.java index 264677b3174..413df0c0436 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalBatchITBase.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalBatchITBase.java @@ -151,9 +151,7 @@ public void should_fail_if_no_bytecode_in_batch() { "Should have thrown InvalidQueryException because batch does not contain any traversals."); } catch (InvalidQueryException e) { assertThat(e.getMessage()) - .contains( - "Could not read the traversal from the request sent.", - "The batch statement sent does not contain any traversal."); + .contains("The batch statement sent does not contain any traversal"); } } } diff --git a/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/checks/GeoServiceChecks.java b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/checks/GeoServiceChecks.java index 2d93ed93026..4aae717502c 100644 --- a/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/checks/GeoServiceChecks.java +++ b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/checks/GeoServiceChecks.java @@ -38,7 +38,7 @@ public static void checkServiceGeo(GeoMailboxService service) throws Exception { service.sendGeoMessage(message); } Iterable retrievedMessages = service.getGeoMessages(recipient); - assertThat(retrievedMessages).containsExactlyElementsOf(insertedMessages); + assertThat(retrievedMessages).containsExactlyInAnyOrderElementsOf(insertedMessages); } finally { service.clearGeoMailbox(recipient); } diff --git a/pom.xml b/pom.xml index 437ef30d7d2..83a04f9b198 100644 --- a/pom.xml +++ b/pom.xml @@ -45,40 +45,40 @@ true UTF-8 UTF-8 - 1.3.4 - 2.1.11 - 4.0.5 + 1.4.1 + 2.1.12 + 4.1.16 4.1.51.Final 1.2.1 3.4.8 1.7.26 - 1.0.2 - 20190722 + 1.0.3 + 20201115 2.11.0 2.11.0 1.9.12 - 1.1.7.3 - 1.6.0 + 1.1.8.1 + 1.7.1 - 3.13.1 + 3.18.1 1.3 4.13.1 1.2.3 6.0.0 6.0.3 - 4.13.3 - 0.8.9 - 1.0 + 4.13.4 + 0.10.0 + 1.1.4 2.28 2.5.0 - 2.0.1 + 2.1.1 1.1.4 - 2.2.2 - 4.0.2 + 2.2.20 + 4.0.3 2.0.0-M19 2.22.2 - 20.0.0 + 20.3.0 false ${skipTests} @@ -129,7 +129,7 @@ com.github.jnr jnr-posix - 3.0.50 + 3.1.2 io.dropwizard.metrics @@ -202,7 +202,7 @@ com.squareup javapoet - 1.11.1 + 1.13.0 junit @@ -267,7 +267,7 @@ org.ops4j.pax.url pax-url-wrap - 2.5.4 + 2.6.3 org.ops4j.pax.url @@ -322,7 +322,7 @@ javax.annotation javax.annotation-api - 1.2 + 1.3.2 com.fasterxml.jackson.core @@ -337,7 +337,7 @@ com.google.testing.compile compile-testing - 0.18 + 0.19 org.awaitility @@ -347,7 +347,7 @@ org.testng testng - 6.14.3 + 7.3.0 org.apache.directory.server @@ -408,7 +408,7 @@ io.micrometer micrometer-core - 1.5.0 + 1.6.1 org.eclipse.microprofile.metrics @@ -418,7 +418,7 @@ io.smallrye smallrye-metrics - 2.4.2 + 2.4.4 @@ -427,7 +427,7 @@ maven-compiler-plugin - 3.6.1 + 3.8.1 com.coveo @@ -454,11 +454,11 @@ maven-shade-plugin - 3.1.1 + 3.2.3 maven-assembly-plugin - 3.1.0 + 3.3.0 @@ -475,15 +475,15 @@ maven-source-plugin - 3.0.1 + 3.1.0 maven-javadoc-plugin - 3.1.1 + 3.2.0 maven-jar-plugin - 3.0.2 + 3.2.0 org.sonatype.plugins @@ -492,7 +492,7 @@ maven-gpg-plugin - 1.5 + 1.6 maven-release-plugin @@ -500,25 +500,25 @@ maven-install-plugin - 2.4 + 2.5.2 maven-deploy-plugin - 2.7 + 2.8.2 maven-dependency-plugin - 3.1.1 + 3.1.2 org.jacoco jacoco-maven-plugin - 0.8.3 + 0.8.5 org.apache.felix maven-bundle-plugin - 3.5.1 + 4.2.1 org.revapi @@ -540,7 +540,7 @@ org.revapi revapi-java - 0.19.1 + 0.22.1 From 3506c24a4fdbb954915fdd44d8ca96c3ca2317bb Mon Sep 17 00:00:00 2001 From: Olivier Michallat Date: Fri, 18 Dec 2020 08:44:43 -0800 Subject: [PATCH 608/979] JAVA-2871: Allow keyspace exclusions in the metadata, and exclude system keyspaces by default (#1500) Co-authored-by: Alexandre Dutra --- changelog/README.md | 2 + .../driver/api/core/config/OptionsMap.java | 4 + .../queries/CassandraSchemaQueries.java | 26 +-- .../schema/queries/CassandraSchemaRows.java | 25 ++- .../schema/queries/KeyspaceFilter.java | 58 +++++ .../queries/RuleBasedKeyspaceFilter.java | 201 ++++++++++++++++++ core/src/main/resources/reference.conf | 35 ++- .../schema/parsing/AggregateParserTest.java | 2 + .../schema/parsing/SchemaParserTest.java | 3 +- .../schema/parsing/SchemaParserTestBase.java | 9 + .../schema/parsing/TableParserTest.java | 2 +- .../schema/parsing/ViewParserTest.java | 2 +- .../queries/Cassandra3SchemaQueriesTest.java | 2 +- .../schema/queries/KeyspaceFilterTest.java | 141 ++++++++++++ manual/core/metadata/schema/README.md | 52 ++++- 15 files changed, 528 insertions(+), 36 deletions(-) create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/KeyspaceFilter.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/RuleBasedKeyspaceFilter.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/KeyspaceFilterTest.java diff --git a/changelog/README.md b/changelog/README.md index 00845e97b3d..ccd1f87e1a1 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,8 @@ ### 4.10.0 (in progress) +- [improvement] JAVA-2871: Allow keyspace exclusions in the metadata, and exclude system keyspaces + by default - [improvement] JAVA-2449: Use non-cryptographic random number generation in Uuids.random() - [improvement] JAVA-2893: Allow duplicate keys in DefaultProgrammaticDriverConfigLoaderBuilder - [documentation] JAVA-2894: Clarify usage of Statement.setQueryTimestamp diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java index 6de7fc76355..5123e341036 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java @@ -15,6 +15,7 @@ */ package com.datastax.oss.driver.api.core.config; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import java.io.InvalidObjectException; @@ -339,6 +340,9 @@ protected static void fillWithDriverDefaults(OptionsMap map) { map.put(TypedDriverOption.METADATA_TOPOLOGY_WINDOW, Duration.ofSeconds(1)); map.put(TypedDriverOption.METADATA_TOPOLOGY_MAX_EVENTS, 20); map.put(TypedDriverOption.METADATA_SCHEMA_ENABLED, true); + map.put( + TypedDriverOption.METADATA_SCHEMA_REFRESHED_KEYSPACES, + ImmutableList.of("!system", "!/^system_.*/", "!/^dse_.*/", "!solr_admin")); map.put(TypedDriverOption.METADATA_SCHEMA_REQUEST_TIMEOUT, requestTimeout); map.put(TypedDriverOption.METADATA_SCHEMA_REQUEST_PAGE_SIZE, requestPageSize); map.put(TypedDriverOption.METADATA_SCHEMA_WINDOW, Duration.ofSeconds(1)); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/CassandraSchemaQueries.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/CassandraSchemaQueries.java index e0bece6929b..69a0788bb8a 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/CassandraSchemaQueries.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/CassandraSchemaQueries.java @@ -48,7 +48,7 @@ public abstract class CassandraSchemaQueries implements SchemaQueries { private final String logPrefix; private final Duration timeout; private final int pageSize; - private final String whereClause; + private final KeyspaceFilter keyspaceFilter; // The future we return from execute, completes when all the queries are done. private final CompletableFuture schemaRowsFuture = new CompletableFuture<>(); private final long startTimeNs = System.nanoTime(); @@ -69,25 +69,8 @@ protected CassandraSchemaQueries( List refreshedKeyspaces = config.getStringList( DefaultDriverOption.METADATA_SCHEMA_REFRESHED_KEYSPACES, Collections.emptyList()); - this.whereClause = buildWhereClause(refreshedKeyspaces); - } - - private static String buildWhereClause(List refreshedKeyspaces) { - if (refreshedKeyspaces.isEmpty()) { - return ""; - } else { - StringBuilder builder = new StringBuilder(" WHERE keyspace_name in ("); - boolean first = true; - for (String keyspace : refreshedKeyspaces) { - if (first) { - first = false; - } else { - builder.append(","); - } - builder.append('\'').append(keyspace).append('\''); - } - return builder.append(")").toString(); - } + assert refreshedKeyspaces != null; // per the default value + this.keyspaceFilter = KeyspaceFilter.newInstance(logPrefix, refreshedKeyspaces); } protected abstract String selectKeyspacesQuery(); @@ -125,7 +108,8 @@ public CompletionStage execute() { private void executeOnAdminExecutor() { assert adminExecutor.inEventLoop(); - schemaRowsBuilder = new CassandraSchemaRows.Builder(node, logPrefix); + schemaRowsBuilder = new CassandraSchemaRows.Builder(node, keyspaceFilter, logPrefix); + String whereClause = keyspaceFilter.getWhereClause(); query(selectKeyspacesQuery() + whereClause, schemaRowsBuilder::withKeyspaces); query(selectTypesQuery() + whereClause, schemaRowsBuilder::withTypes); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/CassandraSchemaRows.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/CassandraSchemaRows.java index 01a380308c4..7b43cd2664f 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/CassandraSchemaRows.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/CassandraSchemaRows.java @@ -170,6 +170,7 @@ public static class Builder { private final Node node; private final DataTypeParser dataTypeParser; private final String tableNameColumn; + private final KeyspaceFilter keyspaceFilter; private final String logPrefix; private final ImmutableList.Builder keyspacesBuilder = ImmutableList.builder(); private final ImmutableList.Builder virtualKeyspacesBuilder = ImmutableList.builder(); @@ -196,8 +197,9 @@ public static class Builder { private final Map> edgesBuilders = new LinkedHashMap<>(); - public Builder(Node node, String logPrefix) { + public Builder(Node node, KeyspaceFilter keyspaceFilter, String logPrefix) { this.node = node; + this.keyspaceFilter = keyspaceFilter; this.logPrefix = logPrefix; if (isCassandraV3OrAbove(node)) { this.tableNameColumn = "table_name"; @@ -229,12 +231,16 @@ private static boolean isCassandraV3OrAbove(Node node) { } public Builder withKeyspaces(Iterable rows) { - keyspacesBuilder.addAll(rows); + for (AdminRow row : rows) { + put(keyspacesBuilder, row); + } return this; } public Builder withVirtualKeyspaces(Iterable rows) { - virtualKeyspacesBuilder.addAll(rows); + for (AdminRow row : rows) { + put(virtualKeyspacesBuilder, row); + } return this; } @@ -315,12 +321,21 @@ public Builder withEdges(Iterable rows) { return this; } + private void put(ImmutableList.Builder builder, AdminRow row) { + String keyspace = row.getString("keyspace_name"); + if (keyspace == null) { + LOG.warn("[{}] Skipping system row with missing keyspace name", logPrefix); + } else if (keyspaceFilter.includes(keyspace)) { + builder.add(row); + } + } + private void putByKeyspace( AdminRow row, ImmutableMultimap.Builder builder) { String keyspace = row.getString("keyspace_name"); if (keyspace == null) { LOG.warn("[{}] Skipping system row with missing keyspace name", logPrefix); - } else { + } else if (keyspaceFilter.includes(keyspace)) { builder.put(CqlIdentifier.fromInternal(keyspace), row); } } @@ -334,7 +349,7 @@ private void putByKeyspaceAndTable( LOG.warn("[{}] Skipping system row with missing keyspace name", logPrefix); } else if (table == null) { LOG.warn("[{}] Skipping system row with missing table name", logPrefix); - } else { + } else if (keyspaceFilter.includes(keyspace)) { ImmutableMultimap.Builder builder = builders.computeIfAbsent( CqlIdentifier.fromInternal(keyspace), s -> ImmutableListMultimap.builder()); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/KeyspaceFilter.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/KeyspaceFilter.java new file mode 100644 index 00000000000..b82ea69b172 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/KeyspaceFilter.java @@ -0,0 +1,58 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.metadata.schema.queries; + +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.List; + +/** + * Filters keyspaces during schema metadata queries. + * + *

          Depending on the circumstances, we do it either on the server side with a WHERE IN clause that + * will be appended to every query, or on the client side with a predicate that will be applied to + * every fetched row. + */ +public interface KeyspaceFilter { + + static KeyspaceFilter newInstance(@NonNull String logPrefix, @NonNull List specs) { + if (specs.isEmpty()) { + return INCLUDE_ALL; + } else { + return new RuleBasedKeyspaceFilter(logPrefix, specs); + } + } + + /** The WHERE IN clause, or an empty string if there is no server-side filtering. */ + @NonNull + String getWhereClause(); + + /** The predicate that will be invoked for client-side filtering. */ + boolean includes(@NonNull String keyspace); + + KeyspaceFilter INCLUDE_ALL = + new KeyspaceFilter() { + @NonNull + @Override + public String getWhereClause() { + return ""; + } + + @Override + public boolean includes(@NonNull String keyspace) { + return true; + } + }; +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/RuleBasedKeyspaceFilter.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/RuleBasedKeyspaceFilter.java new file mode 100644 index 00000000000..20069aa796f --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/RuleBasedKeyspaceFilter.java @@ -0,0 +1,201 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.metadata.schema.queries; + +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Optional; +import java.util.Set; +import java.util.function.Predicate; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.regex.PatternSyntaxException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Filters keyspaces during schema metadata queries. + * + *

          Depending on the circumstances, we do it either on the server side with a WHERE IN clause that + * will be appended to every query, or on the client side with a predicate that will be applied to + * every fetched row. + */ +class RuleBasedKeyspaceFilter implements KeyspaceFilter { + + private static final Logger LOG = LoggerFactory.getLogger(RuleBasedKeyspaceFilter.class); + + private static final Pattern EXACT_INCLUDE = Pattern.compile("\\w+"); + private static final Pattern EXACT_EXCLUDE = Pattern.compile("!\\s*(\\w+)"); + private static final Pattern REGEX_INCLUDE = Pattern.compile("/(.+)/"); + private static final Pattern REGEX_EXCLUDE = Pattern.compile("!\\s*/(.+)/"); + + private final String logPrefix; + private final String whereClause; + private final Set exactIncludes = new HashSet<>(); + private final Set exactExcludes = new HashSet<>(); + private final List> regexIncludes = new ArrayList<>(); + private final List> regexExcludes = new ArrayList<>(); + + private final boolean isDebugEnabled; + private final Set loggedKeyspaces; + + RuleBasedKeyspaceFilter(@NonNull String logPrefix, @NonNull List specs) { + assert !specs.isEmpty(); // see KeyspaceFilter#newInstance + + this.logPrefix = logPrefix; + for (String spec : specs) { + spec = spec.trim(); + Matcher matcher; + if (EXACT_INCLUDE.matcher(spec).matches()) { + exactIncludes.add(spec); + if (exactExcludes.remove(spec)) { + LOG.warn( + "[{}] '{}' is both included and excluded, ignoring the exclusion", logPrefix, spec); + } + } else if ((matcher = EXACT_EXCLUDE.matcher(spec)).matches()) { + String name = matcher.group(1); + if (exactIncludes.contains(name)) { + LOG.warn( + "[{}] '{}' is both included and excluded, ignoring the exclusion", logPrefix, name); + } else { + exactExcludes.add(name); + } + } else if ((matcher = REGEX_INCLUDE.matcher(spec)).matches()) { + compile(matcher.group(1)).map(regexIncludes::add); + } else if ((matcher = REGEX_EXCLUDE.matcher(spec)).matches()) { + compile(matcher.group(1)).map(regexExcludes::add); + } else { + LOG.warn( + "[{}] Error while parsing {}: invalid element '{}', skipping", + logPrefix, + DefaultDriverOption.METADATA_SCHEMA_REFRESHED_KEYSPACES.getPath(), + spec); + } + } + + if (!exactIncludes.isEmpty() && regexIncludes.isEmpty() && regexExcludes.isEmpty()) { + // We can filter on the server + whereClause = buildWhereClause(exactIncludes); + if (!exactExcludes.isEmpty()) { + // Proceed, but this is probably a mistake + LOG.warn( + "[{}] {} only has exact includes and excludes, the excludes are redundant", + logPrefix, + DefaultDriverOption.METADATA_SCHEMA_REFRESHED_KEYSPACES.getPath()); + } + LOG.debug("[{}] Filtering server-side with '{}'", logPrefix, whereClause); + } else { + whereClause = ""; + LOG.debug("[{}] No server-side filtering", logPrefix); + } + + isDebugEnabled = LOG.isDebugEnabled(); + loggedKeyspaces = isDebugEnabled ? new HashSet<>() : null; + } + + @NonNull + @Override + public String getWhereClause() { + return whereClause; + } + + @Override + public boolean includes(@NonNull String keyspace) { + if (exactIncludes.contains(keyspace)) { + log(keyspace, true, "it is included by name"); + return true; + } else if (exactExcludes.contains(keyspace)) { + log(keyspace, false, "it is excluded by name"); + return false; + } else if (regexIncludes.isEmpty()) { + if (regexExcludes.isEmpty()) { + log(keyspace, false, "it is not included by name"); + return false; + } else if (matchesAny(keyspace, regexExcludes)) { + log(keyspace, false, "it matches at least one regex exclude"); + return false; + } else { + log(keyspace, true, "it does not match any regex exclude"); + return true; + } + } else { // !regexIncludes.isEmpty() + if (regexExcludes.isEmpty()) { + if (matchesAny(keyspace, regexIncludes)) { + log(keyspace, true, "it matches at least one regex include"); + return true; + } else { + log(keyspace, false, "it does not match any regex include"); + return false; + } + } else { + if (matchesAny(keyspace, regexIncludes) && !matchesAny(keyspace, regexExcludes)) { + log(keyspace, true, "it matches at least one regex include, and no regex exclude"); + return true; + } else { + log(keyspace, false, "it matches either no regex include, or at least one regex exclude"); + return false; + } + } + } + } + + private void log(@NonNull String keyspace, boolean include, @NonNull String reason) { + if (isDebugEnabled && loggedKeyspaces.add(keyspace)) { + LOG.debug( + "[{}] Filtering {} '{}' because {}", logPrefix, include ? "in" : "out", keyspace, reason); + } + } + + private boolean matchesAny(String keyspace, List> rules) { + for (Predicate rule : rules) { + if (rule.test(keyspace)) { + return true; + } + } + return false; + } + + private Optional> compile(String regex) { + try { + return Optional.of(Pattern.compile(regex).asPredicate()); + } catch (PatternSyntaxException e) { + LOG.warn( + "[{}] Error while parsing {}: syntax error in regex /{}/ ({}), skipping", + this.logPrefix, + DefaultDriverOption.METADATA_SCHEMA_REFRESHED_KEYSPACES.getPath(), + regex, + e.getMessage()); + return Optional.empty(); + } + } + + private static String buildWhereClause(Set keyspaces) { + StringBuilder builder = new StringBuilder(" WHERE keyspace_name IN ("); + boolean first = true; + for (String keyspace : keyspaces) { + if (first) { + first = false; + } else { + builder.append(","); + } + builder.append('\'').append(keyspace).append('\''); + } + return builder.append(')').toString(); + } +} diff --git a/core/src/main/resources/reference.conf b/core/src/main/resources/reference.conf index a025e816d30..d303db0b036 100644 --- a/core/src/main/resources/reference.conf +++ b/core/src/main/resources/reference.conf @@ -1785,14 +1785,41 @@ datastax-java-driver { # Overridable in a profile: no enabled = true - # The list of keyspaces for which schema and token metadata should be maintained. If this - # property is absent or empty, all existing keyspaces are processed. + # The keyspaces for which schema and token metadata should be maintained. # - # Required: no + # Each element can be one of the following: + # 1. An exact name inclusion, for example "Ks1". If the name is case-sensitive, it must appear + # in its exact case. + # 2. An exact name exclusion, for example "!Ks1". + # 3. A regex inclusion, enclosed in slashes, for example "/^Ks.*/". The part between the + # slashes must follow the syntax rules of java.util.regex.Pattern. + # 4. A regex exclusion, for example "!/^Ks.*/". + # + # If the list is empty, or the option is unset, all keyspaces will match. Otherwise: + # + # If a keyspace matches an exact name inclusion, it is always included, regardless of what any + # other rule says. + # Otherwise, if it matches an exact name exclusion, it is always excluded, regardless of what + # any regex rule says. + # Otherwise, if there are regex rules: + # - if they're only inclusions, the keyspace must match at least one of them. + # - if they're only exclusions, the keyspace must match none of them. + # - if they're both, the keyspace must match at least one inclusion and none of the + # exclusions. + # + # If an element is malformed, or if its regex has a syntax error, a warning is logged and that + # single element is ignored. + # + # Try to use only exact name inclusions if possible. This allows the driver to filter on the + # server side with a WHERE IN clause. If you use any other rule, it has to fetch all system + # rows and filter on the client side. + # + # Required: no. The default value excludes all Cassandra and DSE system keyspaces. If the + # option is unset, this is interpreted as "include all keyspaces". # Modifiable at runtime: yes, the new value will be used for refreshes issued after the # change. # Overridable in a profile: no - // refreshed-keyspaces = [ "ks1", "ks2" ] + refreshed-keyspaces = [ "!system", "!/^system_.*/", "!/^dse_.*/", "!solr_admin" ] # The timeout for the requests to the schema tables. # diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/AggregateParserTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/AggregateParserTest.java index ee26b25c95d..03202c4ddad 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/AggregateParserTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/AggregateParserTest.java @@ -56,7 +56,9 @@ public class AggregateParserTest extends SchemaParserTestBase { "0"); @Before + @Override public void setup() { + super.setup(); when(context.getCodecRegistry()).thenReturn(CodecRegistry.DEFAULT); when(context.getProtocolVersion()).thenReturn(ProtocolVersion.DEFAULT); } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/SchemaParserTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/SchemaParserTest.java index 037234b0632..3ff2f497d8b 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/SchemaParserTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/SchemaParserTest.java @@ -137,7 +137,8 @@ public void should_parse_multiple_keyspaces() { } private MetadataRefresh parse(Consumer builderConfig) { - CassandraSchemaRows.Builder builder = new CassandraSchemaRows.Builder(NODE_3_0, "test"); + CassandraSchemaRows.Builder builder = + new CassandraSchemaRows.Builder(NODE_3_0, keyspaceFilter, "test"); builderConfig.accept(builder); SchemaRows rows = builder.build(); return new CassandraSchemaParser(rows, context).parse(); diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/SchemaParserTestBase.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/SchemaParserTestBase.java index 009a2db614f..2a6f74285d2 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/SchemaParserTestBase.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/SchemaParserTestBase.java @@ -16,6 +16,7 @@ package com.datastax.oss.driver.internal.core.metadata.schema.parsing; import static org.assertj.core.api.Assertions.fail; +import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -25,11 +26,13 @@ import com.datastax.oss.driver.internal.core.adminrequest.AdminRow; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.internal.core.metadata.DefaultMetadata; +import com.datastax.oss.driver.internal.core.metadata.schema.queries.KeyspaceFilter; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; import java.nio.ByteBuffer; import java.util.Collections; import java.util.List; +import org.junit.Before; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.junit.MockitoJUnitRunner; @@ -42,6 +45,12 @@ public abstract class SchemaParserTestBase { protected static final CqlIdentifier KEYSPACE_ID = CqlIdentifier.fromInternal("ks"); @Mock protected DefaultMetadata currentMetadata; @Mock protected InternalDriverContext context; + @Mock protected KeyspaceFilter keyspaceFilter; + + @Before + public void setup() { + when(keyspaceFilter.includes(anyString())).thenReturn(true); + } protected static AdminRow mockFunctionRow( String keyspace, diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/TableParserTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/TableParserTest.java index fe106165f67..e8d1228d573 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/TableParserTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/TableParserTest.java @@ -204,7 +204,7 @@ private SchemaRows modernRows( private SchemaRows rows( AdminRow tableRow, Iterable columnRows, Iterable indexesRows, Node node) { CassandraSchemaRows.Builder builder = - new CassandraSchemaRows.Builder(node, "test") + new CassandraSchemaRows.Builder(node, keyspaceFilter, "test") .withTables(ImmutableList.of(tableRow)) .withColumns(columnRows); if (indexesRows != null) { diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/ViewParserTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/ViewParserTest.java index 4fe83cf34b6..122bb12f863 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/ViewParserTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/ViewParserTest.java @@ -86,7 +86,7 @@ public void should_parse_view() { } private SchemaRows rows(AdminRow viewRow, Iterable columnRows) { - return new CassandraSchemaRows.Builder(NODE_3_0, "test") + return new CassandraSchemaRows.Builder(NODE_3_0, keyspaceFilter, "test") .withViews(ImmutableList.of(viewRow)) .withColumns(columnRows) .build(); diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra3SchemaQueriesTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra3SchemaQueriesTest.java index e41fd78d3ed..59c20d032c5 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra3SchemaQueriesTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/Cassandra3SchemaQueriesTest.java @@ -58,7 +58,7 @@ public void should_query_with_keyspace_filter() { DefaultDriverOption.METADATA_SCHEMA_REFRESHED_KEYSPACES, Collections.emptyList())) .thenReturn(ImmutableList.of("ks1", "ks2")); - should_query_with_where_clause(" WHERE keyspace_name in ('ks1','ks2')"); + should_query_with_where_clause(" WHERE keyspace_name IN ('ks1','ks2')"); } private void should_query_with_where_clause(String whereClause) { diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/KeyspaceFilterTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/KeyspaceFilterTest.java new file mode 100644 index 00000000000..286d4d6329e --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/queries/KeyspaceFilterTest.java @@ -0,0 +1,141 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.metadata.schema.queries; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; +import java.util.Arrays; +import java.util.Set; +import java.util.stream.Collectors; +import org.junit.Test; + +public class KeyspaceFilterTest { + + private static final ImmutableSet KEYSPACES = + ImmutableSet.of( + "system", "inventory_test", "inventory_prod", "customers_test", "customers_prod"); + + @Test + public void should_not_filter_when_no_rules() { + KeyspaceFilter filter = KeyspaceFilter.newInstance("test", Arrays.asList()); + assertThat(filter.getWhereClause()).isEmpty(); + assertThat(apply(filter, KEYSPACES)).isEqualTo(KEYSPACES); + } + + @Test + public void should_filter_on_server_when_only_exact_rules() { + KeyspaceFilter filter = + KeyspaceFilter.newInstance( + "test", Arrays.asList("inventory_test", "customers_test", "!system")); + // Note that exact excludes are redundant in this case: either they match an include and will be + // ignored, or they don't and the keyspace is already ignored. + // We let it slide, but a warning is logged. + assertThat(filter.getWhereClause()) + .isEqualTo(" WHERE keyspace_name IN ('inventory_test','customers_test')"); + assertThat(apply(filter, KEYSPACES)).containsOnly("inventory_test", "customers_test"); + } + + @Test + public void should_ignore_exact_exclude_that_collides_with_exact_include() { + KeyspaceFilter filter = + KeyspaceFilter.newInstance("test", Arrays.asList("inventory_test", "!inventory_test")); + assertThat(filter.getWhereClause()).isEqualTo(" WHERE keyspace_name IN ('inventory_test')"); + assertThat(apply(filter, KEYSPACES)).containsOnly("inventory_test"); + + // Order does not matter + filter = KeyspaceFilter.newInstance("test", Arrays.asList("!inventory_test", "inventory_test")); + assertThat(filter.getWhereClause()).isEqualTo(" WHERE keyspace_name IN ('inventory_test')"); + assertThat(apply(filter, KEYSPACES)).containsOnly("inventory_test"); + } + + @Test + public void should_apply_disjoint_exact_and_regex_rules() { + KeyspaceFilter filter = + KeyspaceFilter.newInstance("test", Arrays.asList("inventory_test", "/^customers.*/")); + assertThat(filter.getWhereClause()).isEmpty(); + assertThat(apply(filter, KEYSPACES)) + .containsOnly("inventory_test", "customers_test", "customers_prod"); + + filter = KeyspaceFilter.newInstance("test", Arrays.asList("!system", "!/^inventory.*/")); + assertThat(filter.getWhereClause()).isEmpty(); + assertThat(apply(filter, KEYSPACES)).containsOnly("customers_test", "customers_prod"); + + // The remaining cases could be simplified, but they are supported nevertheless: + /*redundant:*/ + filter = KeyspaceFilter.newInstance("test", Arrays.asList("!/^customers.*/", "inventory_test")); + assertThat(filter.getWhereClause()).isEmpty(); + assertThat(apply(filter, KEYSPACES)).containsOnly("inventory_test", "inventory_prod", "system"); + + /*redundant:*/ + filter = KeyspaceFilter.newInstance("test", Arrays.asList("/^customers.*/", "!system")); + assertThat(filter.getWhereClause()).isEmpty(); + assertThat(apply(filter, KEYSPACES)).containsOnly("customers_test", "customers_prod"); + } + + @Test + public void should_apply_intersecting_exact_and_regex_rules() { + // Include all customer keyspaces except one: + KeyspaceFilter filter = + KeyspaceFilter.newInstance("test", Arrays.asList("/^customers.*/", "!customers_test")); + assertThat(filter.getWhereClause()).isEmpty(); + assertThat(apply(filter, KEYSPACES)).containsOnly("customers_prod"); + + // Exclude all customer keyspaces except one (also implies include every other keyspace): + filter = KeyspaceFilter.newInstance("test", Arrays.asList("!/^customers.*/", "customers_test")); + assertThat(filter.getWhereClause()).isEmpty(); + assertThat(apply(filter, KEYSPACES)) + .containsOnly("customers_test", "inventory_test", "inventory_prod", "system"); + } + + @Test + public void should_apply_intersecting_regex_rules() { + KeyspaceFilter filter = + KeyspaceFilter.newInstance("test", Arrays.asList("/^customers.*/", "!/.*test$/")); + assertThat(filter.getWhereClause()).isEmpty(); + assertThat(apply(filter, KEYSPACES)).containsOnly("customers_prod"); + + // Throwing an exact name in the mix doesn't change the other rules + filter = + KeyspaceFilter.newInstance( + "test", Arrays.asList("inventory_prod", "/^customers.*/", "!/.*test$/")); + assertThat(filter.getWhereClause()).isEmpty(); + assertThat(apply(filter, KEYSPACES)).containsOnly("inventory_prod", "customers_prod"); + } + + @Test + public void should_skip_malformed_rule() { + KeyspaceFilter filter = + KeyspaceFilter.newInstance("test", Arrays.asList("inventory_test", "customers_test", "//")); + assertThat(filter.getWhereClause()) + .isEqualTo(" WHERE keyspace_name IN ('inventory_test','customers_test')"); + assertThat(apply(filter, KEYSPACES)).containsOnly("inventory_test", "customers_test"); + } + + @Test + public void should_skip_invalid_regex() { + KeyspaceFilter filter = + KeyspaceFilter.newInstance( + "test", Arrays.asList("inventory_test", "customers_test", "/*/")); + assertThat(filter.getWhereClause()) + .isEqualTo(" WHERE keyspace_name IN ('inventory_test','customers_test')"); + assertThat(apply(filter, KEYSPACES)).containsOnly("inventory_test", "customers_test"); + } + + private static Set apply(KeyspaceFilter filter, Set keyspaces) { + return keyspaces.stream().filter(filter::includes).collect(Collectors.toSet()); + } +} diff --git a/manual/core/metadata/schema/README.md b/manual/core/metadata/schema/README.md index a140a421ae2..4259e56c107 100644 --- a/manual/core/metadata/schema/README.md +++ b/manual/core/metadata/schema/README.md @@ -132,7 +132,54 @@ You can also limit the metadata to a subset of keyspaces: datastax-java-driver.advanced.metadata.schema.refreshed-keyspaces = [ "users", "products" ] ``` -If the property is absent or the list is empty, it is interpreted as "all keyspaces". +Each element in the list can be one of the following: + +1. An exact name inclusion, for example `"Ks1"`. If the name is case-sensitive, it must appear in + its exact case. +2. An exact name exclusion, for example `"!Ks1"`. +3. A regex inclusion, enclosed in slashes, for example `"/^Ks.*/"`. The part between the slashes + must follow the syntax rules of [java.util.regex.Pattern]. The regex must match the entire + keyspace name (no partial matching). +4. A regex exclusion, for example `"!/^Ks.*/"`. + +If the list is empty, or the option is unset, all keyspaces will match. Otherwise: + +* If a keyspace matches an exact name inclusion, it is always included, regardless of what any other + rule says. +* Otherwise, if it matches an exact name exclusion, it is always excluded, regardless of what any + regex rule says. +* Otherwise, if there are regex rules: + + * if they're only inclusions, the keyspace must match at least one of them. + * if they're only exclusions, the keyspace must match none of them. + * if they're both, the keyspace must match at least one inclusion and none of the + exclusions. + +For example, given the keyspaces `system`, `ks1`, `ks2`, `data1` and `data2`, here's the outcome of +a few filters: + +|Filter|Outcome|Translation| +|---|---|---| +| `[]` | `system`, `ks1`, `ks2`, `data1`, `data2` | Include all. | +| `["ks1", "ks2"]` | `ks1`, `ks2` | Include ks1 and ks2 (recommended, see explanation below). | +| `["!system"]` | `ks1`, `ks2`, `data1`, `data2` | Include all except system. | +| `["/^ks.*/"]` | `ks1`, `ks2` | Include all that start with ks. | +| `["!/^ks.*/"]` | `system`, `data1`, `data2` | Exclude all that start with ks (and include everything else). | +| `["system", "/^ks.*/"]` | `system`, `ks1`, `ks2` | Include system, and all that start with ks. | +| `["/^ks.*/", "!ks2"]` | `ks1` | Include all that start with ks, except ks2. | +| `["!/^ks.*/", "ks1"]` | `system`, `ks1`, `data1`, `data2` | Exclude all that start with ks, except ks1 (and also include everything else). | +| `["/^s.*/", /^ks.*/", "!/.*2$/"]` | `system`, `ks1` | Include all that start with s or ks, except if they end with 2. | + + +If an element is malformed, or if its regex has a syntax error, a warning is logged and that single +element is ignored. + +The default configuration (see [reference.conf](../../configuration/reference/)) excludes all +Cassandra and DSE system keyspaces. + +Try to use only exact name inclusions if possible. This allows the driver to filter on the server +side with a `WHERE IN` clause. If you use any other rule, it has to fetch all system rows and filter +on the client side. Note that, if you change the list at runtime, `onKeyspaceAdded`/`onKeyspaceDropped` will be invoked on your schema listeners for the newly included/excluded keyspaces. @@ -271,4 +318,5 @@ take a look at the [Performance](../../performance/#schema-updates) page for a f [DseFunctionMetadata]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/metadata/schema/DseFunctionMetadata.html [DseAggregateMetadata]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/metadata/schema/DseAggregateMetadata.html -[JAVA-750]: https://datastax-oss.atlassian.net/browse/JAVA-750 \ No newline at end of file +[JAVA-750]: https://datastax-oss.atlassian.net/browse/JAVA-750 +[java.util.regex.Pattern]: https://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html From e63de8a55dcc46fc67b86f436b54cbb522669592 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 18 Dec 2020 18:25:08 +0100 Subject: [PATCH 609/979] Revert Snappy version upgrade Version 1.1.8.1 causes the related OSGi tests to fail. --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 83a04f9b198..08309f95361 100644 --- a/pom.xml +++ b/pom.xml @@ -58,7 +58,7 @@ 2.11.0 1.9.12 - 1.1.8.1 + 1.1.7.3 1.7.1 3.18.1 From 2a705683df13f28eba2fb76ec9cdd8f1e8774df7 Mon Sep 17 00:00:00 2001 From: Olivier Michallat Date: Fri, 18 Dec 2020 09:38:25 -0800 Subject: [PATCH 610/979] JAVA-2877: Allow skipping validation for individual mapped entities (#1502) Co-authored-by: Alexandre Dutra --- changelog/README.md | 1 + .../oss/driver/mapper/SchemaValidationIT.java | 61 ++++++++++++- manual/mapper/mapper/README.md | 3 + ...HelperSchemaValidationMethodGenerator.java | 87 ++++++++++--------- .../api/mapper/annotations/SchemaHint.java | 7 +- 5 files changed, 116 insertions(+), 43 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index ccd1f87e1a1..d720271d9c5 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.10.0 (in progress) +- [improvement] JAVA-2877: Allow skipping validation for individual mapped entities - [improvement] JAVA-2871: Allow keyspace exclusions in the metadata, and exclude system keyspaces by default - [improvement] JAVA-2449: Use non-cryptographic random number generation in Uuids.random() diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SchemaValidationIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SchemaValidationIT.java index ec0c579c3c3..9abaa714996 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SchemaValidationIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SchemaValidationIT.java @@ -15,11 +15,13 @@ */ package com.datastax.oss.driver.mapper; -import static com.datastax.oss.driver.api.mapper.annotations.SchemaHint.*; +import static com.datastax.oss.driver.api.mapper.annotations.SchemaHint.TargetElement; import static com.datastax.oss.driver.internal.core.util.LoggerTest.setupTestLogger; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatCode; import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.never; import static org.mockito.Mockito.timeout; import static org.mockito.Mockito.verify; @@ -28,6 +30,7 @@ import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.cql.SimpleStatement; import com.datastax.oss.driver.api.core.servererrors.InvalidQueryException; +import com.datastax.oss.driver.api.mapper.MapperContext; import com.datastax.oss.driver.api.mapper.annotations.ClusteringColumn; import com.datastax.oss.driver.api.mapper.annotations.Dao; import com.datastax.oss.driver.api.mapper.annotations.DaoFactory; @@ -35,9 +38,11 @@ import com.datastax.oss.driver.api.mapper.annotations.Entity; import com.datastax.oss.driver.api.mapper.annotations.Mapper; import com.datastax.oss.driver.api.mapper.annotations.PartitionKey; +import com.datastax.oss.driver.api.mapper.annotations.QueryProvider; import com.datastax.oss.driver.api.mapper.annotations.SchemaHint; import com.datastax.oss.driver.api.mapper.annotations.Select; import com.datastax.oss.driver.api.mapper.annotations.Update; +import com.datastax.oss.driver.api.mapper.entity.EntityHelper; import com.datastax.oss.driver.api.testinfra.CassandraRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; import com.datastax.oss.driver.api.testinfra.session.SessionRule; @@ -312,6 +317,20 @@ public void should_log_warning_when_passing_not_existing_keyspace() { } } + @Test + public void should_not_warn_or_throw_when_target_element_is_NONE() { + LoggerTest.LoggerSetup logger = + setupTestLogger( + SchemaValidationIT_DoesNotExistNoValidationHelper__MapperGenerated.class, Level.WARN); + + // when + mapper.noValidationDao(sessionRule.keyspace()); + + // then + // no exceptions, no logs + verify(logger.appender, never()).doAppend(any()); + } + @Mapper public interface InventoryMapper { @DaoFactory @@ -353,6 +372,9 @@ ProductSimpleMissingClusteringColumnDao productSimpleMissingClusteringColumn( @DaoFactory ProductPkAndClusteringDao productPkAndClusteringDao(@DaoKeyspace CqlIdentifier keyspace); + + @DaoFactory + NoValidationDao noValidationDao(@DaoKeyspace CqlIdentifier keyspace); } @Dao @@ -437,6 +459,25 @@ public interface ProductPkAndClusteringDao { ProductPkAndClustering findById(UUID productId); } + @Dao + public interface NoValidationDao { + // Not a real query, we just need to reference the entities + @QueryProvider( + providerClass = DummyProvider.class, + entityHelpers = {DoesNotExistNoValidation.class, ProductCqlTableMissingNoValidation.class}) + void doNothing(); + } + + @SuppressWarnings("unused") + static class DummyProvider { + DummyProvider( + MapperContext context, + EntityHelper helper1, + EntityHelper helper2) {} + + void doNothing() {} + } + @Entity public static class ProductCqlTableMissing { @PartitionKey private UUID id; @@ -1188,4 +1229,22 @@ public String toString() { + '}'; } } + + @Entity + @SchemaHint(targetElement = TargetElement.NONE) + public static class DoesNotExistNoValidation { + private int k; + + public int getK() { + return k; + } + + public void setK(int k) { + this.k = k; + } + } + + @Entity + @SchemaHint(targetElement = TargetElement.NONE) + public static class ProductCqlTableMissingNoValidation extends ProductCqlTableMissing {} } diff --git a/manual/mapper/mapper/README.md b/manual/mapper/mapper/README.md index 2dd0e500f4e..e2834b0d99f 100644 --- a/manual/mapper/mapper/README.md +++ b/manual/mapper/mapper/README.md @@ -227,6 +227,9 @@ InventoryMapper inventoryMapper = new InventoryMapperBuilder(session) .build(); ``` +You can also permanently disable validation of an individual entity by annotating it with +`@SchemaHint(targetElement = NONE)`. + [CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/CqlIdentifier.html [@DaoFactory]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.html [@DaoKeyspace]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/DaoKeyspace.html diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperSchemaValidationMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperSchemaValidationMethodGenerator.java index 3aa9957ac82..17b4246d3a3 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperSchemaValidationMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperSchemaValidationMethodGenerator.java @@ -64,51 +64,59 @@ public Optional generate() { .addModifiers(Modifier.PUBLIC) .returns(TypeName.VOID); - // get keyspaceId from context, and if not present fallback to keyspace set on session - methodBuilder.addStatement( - "$1T keyspaceId = this.keyspaceId != null ? this.keyspaceId : context.getSession().getKeyspace().orElse(null)", - CqlIdentifier.class); + Optional targetElement = + Optional.ofNullable(entityTypeElement.getAnnotation(SchemaHint.class)) + .map(SchemaHint::targetElement); - methodBuilder.addStatement("String entityClassName = $S", entityDefinition.getClassName()); - generateKeyspaceNull(methodBuilder); + if (targetElement.isPresent() && targetElement.get() == TargetElement.NONE) { + methodBuilder.addComment( + "Nothing to do, validation was disabled with @SchemaHint(targetElement = NONE)"); + } else { + // get keyspaceId from context, and if not present fallback to keyspace set on session + methodBuilder.addStatement( + "$1T keyspaceId = this.keyspaceId != null ? this.keyspaceId : context.getSession().getKeyspace().orElse(null)", + CqlIdentifier.class); - generateKeyspaceNameWrong(methodBuilder); + methodBuilder.addStatement("String entityClassName = $S", entityDefinition.getClassName()); + generateKeyspaceNull(methodBuilder); - methodBuilder.addStatement( - "$1T<$2T> keyspace = context.getSession().getMetadata().getKeyspace(keyspaceId)", - Optional.class, - KeyspaceMetadata.class); + generateKeyspaceNameWrong(methodBuilder); - // Generates expected names to be present in cql (table or udt) - List expectedCqlNames = - entityDefinition.getAllColumns().stream() - .map(PropertyDefinition::getCqlName) - .collect(Collectors.toList()); - methodBuilder.addStatement( - "$1T<$2T> expectedCqlNames = new $3T<>()", - List.class, - CqlIdentifier.class, - ArrayList.class); - for (CodeBlock expectedCqlName : expectedCqlNames) { methodBuilder.addStatement( - "expectedCqlNames.add($1T.fromCql($2L))", CqlIdentifier.class, expectedCqlName); - } - - methodBuilder.addStatement( - "$1T<$2T> tableMetadata = keyspace.flatMap(v -> v.getTable(tableId))", - Optional.class, - TableMetadata.class); + "$1T<$2T> keyspace = context.getSession().getMetadata().getKeyspace(keyspaceId)", + Optional.class, + KeyspaceMetadata.class); + + // Generates expected names to be present in cql (table or udt) + List expectedCqlNames = + entityDefinition.getAllColumns().stream() + .map(PropertyDefinition::getCqlName) + .collect(Collectors.toList()); + methodBuilder.addStatement( + "$1T<$2T> expectedCqlNames = new $3T<>()", + List.class, + CqlIdentifier.class, + ArrayList.class); + for (CodeBlock expectedCqlName : expectedCqlNames) { + methodBuilder.addStatement( + "expectedCqlNames.add($1T.fromCql($2L))", CqlIdentifier.class, expectedCqlName); + } - // Generated UserDefineTypes metadata - methodBuilder.addStatement( - "$1T<$2T> userDefinedType = keyspace.flatMap(v -> v.getUserDefinedType(tableId))", - Optional.class, - UserDefinedType.class); + methodBuilder.addStatement( + "$1T<$2T> tableMetadata = keyspace.flatMap(v -> v.getTable(tableId))", + Optional.class, + TableMetadata.class); - generateValidationChecks(methodBuilder); + // Generated UserDefineTypes metadata + methodBuilder.addStatement( + "$1T<$2T> userDefinedType = keyspace.flatMap(v -> v.getUserDefinedType(tableId))", + Optional.class, + UserDefinedType.class); - logMissingMetadata(methodBuilder); + generateValidationChecks(methodBuilder, targetElement); + logMissingMetadata(methodBuilder); + } return Optional.of(methodBuilder.build()); } @@ -159,11 +167,8 @@ private void generateKeyspaceNull(MethodSpec.Builder methodBuilder) { methodBuilder.endControlFlow(); } - private void generateValidationChecks(MethodSpec.Builder methodBuilder) { - Optional targetElement = - Optional.ofNullable(entityTypeElement.getAnnotation(SchemaHint.class)) - .map(SchemaHint::targetElement); - + private void generateValidationChecks( + MethodSpec.Builder methodBuilder, Optional targetElement) { // if SchemaHint was not provided explicitly try to match TABLE, then fallback to UDT if (!targetElement.isPresent()) { validateColumnsInTable(methodBuilder); diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/SchemaHint.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/SchemaHint.java index b33ec132f08..c972cdbf936 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/SchemaHint.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/SchemaHint.java @@ -39,6 +39,9 @@ *

          By default, the mapper first tries to match the entity with a table, and if that doesn't work, * with a UDT. This annotation allows you to provide a hint as to which check should be done, so * that the mapper can skip the other one. + * + *

          In addition, you can ask to completely skip the validation for this entity by using {@link + * TargetElement#NONE}. */ @Target(ElementType.TYPE) @Retention(RetentionPolicy.RUNTIME) @@ -47,6 +50,8 @@ enum TargetElement { TABLE, - UDT + UDT, + NONE, + ; } } From d32c7fe52de7a8ed25768134c70ae13c0b3f9fc0 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 18 Dec 2020 19:03:46 +0100 Subject: [PATCH 611/979] JAVA-2903: BlockHound integration (#1514) --- changelog/README.md | 1 + core-shaded/pom.xml | 4 +- core/pom.xml | 7 +- .../ContinuousRequestHandlerBase.java | 44 +-- .../metadata/SafeInitNodeStateListener.java | 3 + .../api/core/session/SessionBuilder.java | 6 + .../session/throttling/RequestThrottler.java | 9 +- .../oss/driver/api/core/uuid/Uuids.java | 6 + .../core/context/DefaultNettyOptions.java | 10 +- .../oss/driver/internal/core/time/Clock.java | 9 +- .../util/concurrent/BlockingOperation.java | 2 +- .../DriverBlockHoundIntegration.java | 109 +++++++ ...ockhound.integration.BlockHoundIntegration | 1 + integration-tests/pom.xml | 15 + .../DriverBlockHoundIntegrationCcmIT.java | 120 ++++++++ .../DriverBlockHoundIntegrationIT.java | 132 ++++++++ .../src/test/resources/application.conf | 2 +- manual/core/async/README.md | 8 +- manual/core/non_blocking/README.md | 289 ++++++++++++++++++ manual/core/reactive/README.md | 6 +- manual/developer/common/concurrency/README.md | 19 +- pom.xml | 17 ++ 22 files changed, 780 insertions(+), 39 deletions(-) create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/util/concurrent/DriverBlockHoundIntegration.java create mode 100644 core/src/main/resources/META-INF/services/reactor.blockhound.integration.BlockHoundIntegration create mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/internal/core/util/concurrent/DriverBlockHoundIntegrationCcmIT.java create mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/internal/core/util/concurrent/DriverBlockHoundIntegrationIT.java create mode 100644 manual/core/non_blocking/README.md diff --git a/changelog/README.md b/changelog/README.md index d720271d9c5..86b2d3d04d3 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.10.0 (in progress) +- [new feature] JAVA-2903: BlockHound integration - [improvement] JAVA-2877: Allow skipping validation for individual mapped entities - [improvement] JAVA-2871: Allow keyspace exclusions in the metadata, and exclude system keyspaces by default diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 4d92a37736f..addaf4070d1 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -317,11 +317,11 @@ Note: dependencies marked as optional are by default included with optional resolution in the manifest; we only need to manually set the resolution to optional for dependencies declared as non-optional in the pom files. - -->jnr.*;resolution:=optional, com.esri.core.geometry.*;resolution:=optional,org.reactivestreams.*;resolution:=optional, org.apache.tinkerpop.*;resolution:=optional, org.javatuples.*;resolution:=optional, + -->jnr.*;resolution:=optional, com.esri.core.geometry.*;resolution:=optional,org.reactivestreams.*;resolution:=optional, org.apache.tinkerpop.*;resolution:=optional, org.javatuples.*;resolution:=optional, reactor.blockhound.*;resolution:=optional, !com.google.protobuf.*, !com.jcraft.jzlib.*, !com.ning.compress.*, !lzma.sdk.*, !net.jpountz.xxhash.*, !org.bouncycastle.*, !org.conscrypt.*, !org.apache.commons.logging.*, !org.apache.log4j.*, !org.apache.logging.log4j.*, !org.eclipse.jetty.*, !org.jboss.marshalling.*, !sun.misc.*, !sun.security.*, !com.barchart.udt.*, !com.fasterxml.aalto.*, !com.sun.nio.sctp.*, !gnu.io.*, !org.xml.sax.*, !org.w3c.dom.*, !reactor.blockhound.*, * + -->!com.google.protobuf.*, !com.jcraft.jzlib.*, !com.ning.compress.*, !lzma.sdk.*, !net.jpountz.xxhash.*, !org.bouncycastle.*, !org.conscrypt.*, !org.apache.commons.logging.*, !org.apache.log4j.*, !org.apache.logging.log4j.*, !org.eclipse.jetty.*, !org.jboss.marshalling.*, !sun.misc.*, !sun.security.*, !com.barchart.udt.*, !com.fasterxml.aalto.*, !com.sun.nio.sctp.*, !gnu.io.*, !org.xml.sax.*, !org.w3c.dom.*, * jnr.*;resolution:=optional, com.esri.core.geometry.*;resolution:=optional, org.reactivestreams.*;resolution:=optional, org.apache.tinkerpop.*;resolution:=optional, org.javatuples.*;resolution:=optional, * + -->jnr.*;resolution:=optional, com.esri.core.geometry.*;resolution:=optional, org.reactivestreams.*;resolution:=optional, org.apache.tinkerpop.*;resolution:=optional, org.javatuples.*;resolution:=optional, reactor.blockhound.*;resolution:=optional, * com.datastax.oss.driver.*.core.*, com.datastax.dse.driver.*.core.* diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousRequestHandlerBase.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousRequestHandlerBase.java index c7754702bcf..de6dee6f8be 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousRequestHandlerBase.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousRequestHandlerBase.java @@ -696,28 +696,28 @@ private Timeout schedulePageTimeout(int expectedPage) { } LOG.trace("[{}] Scheduling timeout for page {} in {}", logPrefix, expectedPage, timeout); return timer.newTimeout( - timeout1 -> { - lock.lock(); - try { - if (state == expectedPage) { - abort( - new DriverTimeoutException( - String.format("Timed out waiting for page %d", expectedPage)), - false); - } else { - // Ignore timeout if the request has moved on in the interim. - LOG.trace( - "[{}] Timeout fired for page {} but query already at state {}, skipping", - logPrefix, - expectedPage, - state); - } - } finally { - lock.unlock(); - } - }, - timeout.toNanos(), - TimeUnit.NANOSECONDS); + t -> onPageTimeout(expectedPage), timeout.toNanos(), TimeUnit.NANOSECONDS); + } + + private void onPageTimeout(int expectedPage) { + lock.lock(); + try { + if (state == expectedPage) { + abort( + new DriverTimeoutException( + String.format("Timed out waiting for page %d", expectedPage)), + false); + } else { + // Ignore timeout if the request has moved on in the interim. + LOG.trace( + "[{}] Timeout fired for page {} but query already at state {}, skipping", + logPrefix, + expectedPage, + state); + } + } finally { + lock.unlock(); + } } /** diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/metadata/SafeInitNodeStateListener.java b/core/src/main/java/com/datastax/oss/driver/api/core/metadata/SafeInitNodeStateListener.java index f75f2179a8a..4eabd59829c 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/metadata/SafeInitNodeStateListener.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/metadata/SafeInitNodeStateListener.java @@ -58,6 +58,9 @@ *

        3. if {@code false}, they are discarded. * * + *

          Usage in non-blocking applications: beware that this class is not lock-free. It is implemented + * with locks for internal coordination. + * * @since 4.6.0 */ public class SafeInitNodeStateListener implements NodeStateListener { diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java b/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java index af4eb467a95..abe28786b62 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java @@ -676,6 +676,8 @@ public SelfT withMetricRegistry(@Nullable Object metricRegistry) { /** * Creates the session with the options set by this builder. * + *

          The session initialization will happen asynchronously in a driver internal thread pool. + * * @return a completion stage that completes with the session when it is fully initialized. */ @NonNull @@ -689,6 +691,10 @@ public CompletionStage buildAsync() { /** * Convenience method to call {@link #buildAsync()} and block on the result. * + *

          Usage in non-blocking applications: beware that session initialization is a costly + * operation. It should only be triggered from a thread that is allowed to block. If that is not + * the case, consider using {@link #buildAsync()} instead. + * *

          This must not be called on a driver thread. */ @NonNull diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/session/throttling/RequestThrottler.java b/core/src/main/java/com/datastax/oss/driver/api/core/session/throttling/RequestThrottler.java index 21ae3b5e396..db7dd432266 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/session/throttling/RequestThrottler.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/session/throttling/RequestThrottler.java @@ -18,7 +18,14 @@ import edu.umd.cs.findbugs.annotations.NonNull; import java.io.Closeable; -/** Limits the number of concurrent requests executed by the driver. */ +/** + * Limits the number of concurrent requests executed by the driver. + * + *

          Usage in non-blocking applications: beware that all built-in implementations of this interface + * use locks for internal coordination, and do not qualify as lock-free, with the obvious exception + * of {@code PassThroughRequestThrottler}. If your application enforces strict lock-freedom, then + * request throttling should not be enabled. + */ public interface RequestThrottler extends Closeable { /** diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/uuid/Uuids.java b/core/src/main/java/com/datastax/oss/driver/api/core/uuid/Uuids.java index 4b63dee3055..337f950aff6 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/uuid/Uuids.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/uuid/Uuids.java @@ -511,6 +511,12 @@ private static byte[] toBytes(long msb, long lsb) { * * If you simply need to perform a range query on a {@code timeuuid} column, use the "fake" UUID * generated by {@link #startOf(long)} and {@link #endOf(long)}. + * + *

          Usage with non-blocking threads: beware that this method may block the calling thread on its + * very first invocation, because the node part of time-based UUIDs needs to be computed at that + * moment, and the computation may require the loading of native libraries. If that is a problem, + * consider invoking this method once from a thread that is allowed to block. Subsequent + * invocations are guaranteed not to block. */ @NonNull public static UUID timeBased() { diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultNettyOptions.java b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultNettyOptions.java index aefd6d55bde..af613d92366 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultNettyOptions.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultNettyOptions.java @@ -108,12 +108,20 @@ public DefaultNettyOptions(InternalDriverContext context) { + "Please set advanced.netty.timer.tick-duration to 100 ms or higher.", tickDuration.toMillis()); } - timer = + this.timer = createTimer(timerThreadFactory, tickDuration); + } + + private HashedWheelTimer createTimer(ThreadFactory timerThreadFactory, Duration tickDuration) { + HashedWheelTimer timer = new HashedWheelTimer( timerThreadFactory, tickDuration.toNanos(), TimeUnit.NANOSECONDS, config.getInt(DefaultDriverOption.NETTY_TIMER_TICKS_PER_WHEEL)); + // Start the background thread eagerly during session initialization because + // it is a blocking operation. + timer.start(); + return timer; } @Override diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/time/Clock.java b/core/src/main/java/com/datastax/oss/driver/internal/core/time/Clock.java index 4a12a788068..dd00171ab63 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/time/Clock.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/time/Clock.java @@ -26,7 +26,14 @@ public interface Clock { Logger LOG = LoggerFactory.getLogger(Clock.class); - /** Returns the best implementation for the current platform. */ + /** + * Returns the best implementation for the current platform. + * + *

          Usage with non-blocking threads: beware that this method may block the calling thread on its + * very first invocation, because native libraries used by the driver will be loaded at that + * moment. If that is a problem, consider invoking this method once from a thread that is allowed + * to block. Subsequent invocations are guaranteed not to block. + */ static Clock getInstance(boolean forceJavaClock) { if (forceJavaClock) { LOG.info("Using Java system clock because this was explicitly required in the configuration"); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/util/concurrent/BlockingOperation.java b/core/src/main/java/com/datastax/oss/driver/internal/core/util/concurrent/BlockingOperation.java index 7797594b7b9..091c88be2ee 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/util/concurrent/BlockingOperation.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/util/concurrent/BlockingOperation.java @@ -59,7 +59,7 @@ public Thread newThread(@NonNull Runnable r) { } } - private static class InternalThread extends FastThreadLocalThread { + static class InternalThread extends FastThreadLocalThread { private InternalThread(Runnable runnable) { super(runnable); } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/util/concurrent/DriverBlockHoundIntegration.java b/core/src/main/java/com/datastax/oss/driver/internal/core/util/concurrent/DriverBlockHoundIntegration.java new file mode 100644 index 00000000000..d18b33c4c69 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/util/concurrent/DriverBlockHoundIntegration.java @@ -0,0 +1,109 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.util.concurrent; + +import com.datastax.oss.driver.internal.core.util.concurrent.BlockingOperation.InternalThread; +import reactor.blockhound.BlockHound; +import reactor.blockhound.integration.BlockHoundIntegration; + +public final class DriverBlockHoundIntegration implements BlockHoundIntegration { + + @Override + public void applyTo(BlockHound.Builder builder) { + + // disallow blocking operations in driver internal threads by default; + // note that session initialization will happen on one of these threads, which is why + // we need to allow a few blocking calls below. + builder.nonBlockingThreadPredicate(current -> current.or(InternalThread.class::isInstance)); + + // blocking calls in initialization methods + + builder.allowBlockingCallsInside( + "com.datastax.oss.driver.internal.core.context.DefaultNettyOptions", "createTimer"); + builder.allowBlockingCallsInside( + "com.datastax.oss.driver.internal.core.os.Native$LibcLoader", "load"); + builder.allowBlockingCallsInside( + // requires native libraries + "com.datastax.oss.driver.internal.core.time.Clock", "getInstance"); + builder.allowBlockingCallsInside( + "com.datastax.oss.driver.internal.core.util.concurrent.LazyReference", "get"); + builder.allowBlockingCallsInside( + "com.datastax.oss.driver.internal.core.util.concurrent.ReplayingEventFilter", "accept"); + builder.allowBlockingCallsInside( + "com.datastax.oss.driver.internal.core.util.concurrent.ReplayingEventFilter", "markReady"); + builder.allowBlockingCallsInside( + "com.datastax.oss.driver.internal.core.util.concurrent.ReplayingEventFilter", "start"); + + // called upon initialization but also on topology/status events + + builder.allowBlockingCallsInside( + "com.datastax.oss.driver.internal.core.metadata.LoadBalancingPolicyWrapper$SinglePolicyDistanceReporter", + "setDistance"); + builder.allowBlockingCallsInside( + "com.datastax.oss.driver.internal.core.pool.ChannelSet", "add"); + builder.allowBlockingCallsInside( + "com.datastax.oss.driver.internal.core.pool.ChannelSet", "remove"); + + // never called directly by the driver; locks that usually operate with low thread contention + + builder.allowBlockingCallsInside( + "com.datastax.oss.driver.internal.core.type.codec.registry.CachingCodecRegistry", + "register"); + builder.allowBlockingCallsInside( + // requires native libraries, for now because of Uuids.getProcessPiece; if JAVA-1116 gets + // implemented, Uuids.getCurrentTimestamp will also require an exception. Pre-emptively + // protect the whole Uuids.timeBased method. + "com.datastax.oss.driver.api.core.uuid.Uuids", "timeBased"); + + // continuous paging + + builder.allowBlockingCallsInside( + "com.datastax.dse.driver.internal.core.cql.continuous.ContinuousRequestHandlerBase$NodeResponseCallback", + "cancel"); + builder.allowBlockingCallsInside( + "com.datastax.dse.driver.internal.core.cql.continuous.ContinuousRequestHandlerBase$NodeResponseCallback", + "dequeueOrCreatePending"); + builder.allowBlockingCallsInside( + "com.datastax.dse.driver.internal.core.cql.continuous.ContinuousRequestHandlerBase$NodeResponseCallback", + "isLastResponse"); + builder.allowBlockingCallsInside( + "com.datastax.dse.driver.internal.core.cql.continuous.ContinuousRequestHandlerBase$NodeResponseCallback", + "onFailure"); + builder.allowBlockingCallsInside( + "com.datastax.dse.driver.internal.core.cql.continuous.ContinuousRequestHandlerBase$NodeResponseCallback", + "onPageTimeout"); + builder.allowBlockingCallsInside( + "com.datastax.dse.driver.internal.core.cql.continuous.ContinuousRequestHandlerBase$NodeResponseCallback", + "onResponse"); + builder.allowBlockingCallsInside( + "com.datastax.dse.driver.internal.core.cql.continuous.ContinuousRequestHandlerBase$NodeResponseCallback", + "onStreamIdAssigned"); + builder.allowBlockingCallsInside( + "com.datastax.dse.driver.internal.core.cql.continuous.ContinuousRequestHandlerBase$NodeResponseCallback", + "operationComplete"); + + // Netty extra exceptions + + // see https://github.com/netty/netty/pull/10810 + builder.allowBlockingCallsInside("io.netty.util.HashedWheelTimer", "start"); + builder.allowBlockingCallsInside("io.netty.util.HashedWheelTimer", "stop"); + + // see https://github.com/netty/netty/pull/10811 + builder.allowBlockingCallsInside("io.netty.util.concurrent.GlobalEventExecutor", "addTask"); + builder.allowBlockingCallsInside( + "io.netty.util.concurrent.SingleThreadEventExecutor", "addTask"); + } +} diff --git a/core/src/main/resources/META-INF/services/reactor.blockhound.integration.BlockHoundIntegration b/core/src/main/resources/META-INF/services/reactor.blockhound.integration.BlockHoundIntegration new file mode 100644 index 00000000000..b848ce24855 --- /dev/null +++ b/core/src/main/resources/META-INF/services/reactor.blockhound.integration.BlockHoundIntegration @@ -0,0 +1 @@ +com.datastax.oss.driver.internal.core.util.concurrent.DriverBlockHoundIntegration \ No newline at end of file diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index d9daac364a4..3cf5c8076cd 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -186,6 +186,21 @@ smallrye-metrics test + + io.projectreactor + reactor-core + test + + + io.projectreactor + reactor-test + test + + + io.projectreactor.tools + blockhound-junit-platform + test + diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/internal/core/util/concurrent/DriverBlockHoundIntegrationCcmIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/internal/core/util/concurrent/DriverBlockHoundIntegrationCcmIT.java new file mode 100644 index 00000000000..c275eaae12b --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/oss/driver/internal/core/util/concurrent/DriverBlockHoundIntegrationCcmIT.java @@ -0,0 +1,120 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.util.concurrent; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Fail.fail; + +import com.datastax.dse.driver.api.core.config.DseDriverOption; +import com.datastax.dse.driver.api.core.cql.continuous.ContinuousAsyncResultSet; +import com.datastax.dse.driver.api.core.cql.continuous.ContinuousPagingITBase; +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.DriverTimeoutException; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.cql.Row; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import com.datastax.oss.driver.categories.IsolatedTests; +import java.time.Duration; +import java.util.concurrent.CompletionStage; +import java.util.concurrent.ExecutionException; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; +import reactor.blockhound.BlockHound; +import reactor.core.publisher.Flux; +import reactor.core.scheduler.Schedulers; +import reactor.test.StepVerifier; + +/** + * This test exercises the driver with BlockHound installed and tests that the rules defined in + * {@link DriverBlockHoundIntegration} are being applied, and especially when continuous paging is + * used. + */ +@DseRequirement( + min = "5.1.0", + description = "Continuous paging is only available from 5.1.0 onwards") +@Category(IsolatedTests.class) +public class DriverBlockHoundIntegrationCcmIT extends ContinuousPagingITBase { + + static { + BlockHound.install(); + } + + private static final CustomCcmRule CCM_RULE = CustomCcmRule.builder().build(); + + // Note: Insights monitoring will be detected by BlockHound, but the error is swallowed and + // logged by DefaultSession.SingleThreaded.notifyListeners, so it's not necessary to explicitly + // disable Insights here. + private static final SessionRule SESSION_RULE = SessionRule.builder(CCM_RULE).build(); + + @ClassRule public static TestRule chain = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); + + @BeforeClass + public static void setUp() { + initialize(SESSION_RULE.session(), SESSION_RULE.slowProfile()); + } + + @Test + public void should_not_detect_blocking_call_with_continuous_paging() { + CqlSession session = SESSION_RULE.session(); + SimpleStatement statement = SimpleStatement.newInstance("SELECT v from test where k=?", KEY); + Flux rows = + Flux.range(0, 10) + .flatMap( + i -> + Flux.fromIterable(session.executeContinuously(statement)) + .subscribeOn(Schedulers.parallel())); + StepVerifier.create(rows).expectNextCount(1000).expectComplete().verify(); + } + + /** Copied from com.datastax.dse.driver.api.core.cql.continuous.ContinuousPagingIT. */ + @Test + public void should_not_detect_blocking_call_with_continuous_paging_when_timeout() + throws Exception { + CqlSession session = SESSION_RULE.session(); + SimpleStatement statement = SimpleStatement.newInstance("SELECT v from test where k=?", KEY); + // Throttle server at a page per second and set client timeout much lower so that the client + // will experience a timeout. + // Note that this might not be perfect if there are pauses in the JVM and the timeout + // doesn't fire soon enough. + DriverExecutionProfile profile = + session + .getContext() + .getConfig() + .getDefaultProfile() + .withInt(DseDriverOption.CONTINUOUS_PAGING_PAGE_SIZE, 10) + .withInt(DseDriverOption.CONTINUOUS_PAGING_MAX_PAGES_PER_SECOND, 1) + .withDuration( + DseDriverOption.CONTINUOUS_PAGING_TIMEOUT_OTHER_PAGES, Duration.ofMillis(100)); + CompletionStage future = + session.executeContinuouslyAsync(statement.setExecutionProfile(profile)); + ContinuousAsyncResultSet pagingResult = CompletableFutures.getUninterruptibly(future); + try { + pagingResult.fetchNextPage().toCompletableFuture().get(); + fail("Expected a timeout"); + } catch (ExecutionException e) { + assertThat(e.getCause()) + .isInstanceOf(DriverTimeoutException.class) + .hasMessageContaining("Timed out waiting for page 2"); + } + } +} diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/internal/core/util/concurrent/DriverBlockHoundIntegrationIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/internal/core/util/concurrent/DriverBlockHoundIntegrationIT.java new file mode 100644 index 00000000000..afe08817fae --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/oss/driver/internal/core/util/concurrent/DriverBlockHoundIntegrationIT.java @@ -0,0 +1,132 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.util.concurrent; + +import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.rows; +import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.when; + +import com.datastax.dse.driver.api.core.cql.reactive.ReactiveRow; +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.uuid.Uuids; +import com.datastax.oss.driver.api.testinfra.session.SessionUtils; +import com.datastax.oss.driver.api.testinfra.simulacron.SimulacronRule; +import com.datastax.oss.driver.categories.IsolatedTests; +import com.datastax.oss.simulacron.common.cluster.ClusterSpec; +import java.util.UUID; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import reactor.blockhound.BlockHound; +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; +import reactor.core.scheduler.Schedulers; +import reactor.test.StepVerifier; + +/** + * This test exercises the driver with BlockHound installed and tests that the rules defined in + * {@link DriverBlockHoundIntegration} are being applied. + */ +@Category(IsolatedTests.class) +public class DriverBlockHoundIntegrationIT { + + static { + BlockHound.install(); + } + + @ClassRule + public static final SimulacronRule SIMULACRON_RULE = + new SimulacronRule(ClusterSpec.builder().withNodes(1)); + + @Before + public void setup() { + SIMULACRON_RULE.cluster().prime(when("SELECT c1, c2 FROM ks.t1").then(rows().row("foo", 42))); + } + + @Test + @SuppressWarnings("BlockingMethodInNonBlockingContext") + public void should_detect_blocking_call() { + // this is just to make sure the detection mechanism is properly installed + Mono blockingPublisher = + Mono.fromCallable( + () -> { + Thread.sleep(1); + return 0; + }) + .subscribeOn(Schedulers.parallel()); + StepVerifier.create(blockingPublisher) + .expectErrorMatches(e -> e instanceof Error && e.getMessage().contains("Blocking call!")) + .verify(); + } + + @Test + public void should_not_detect_blocking_call_on_asynchronous_execution() { + try (CqlSession session = SessionUtils.newSession(SIMULACRON_RULE)) { + Flux rows = + Flux.range(0, 1000) + .flatMap( + i -> + Flux.from(session.executeReactive("SELECT c1, c2 FROM ks.t1")) + .subscribeOn(Schedulers.parallel())); + StepVerifier.create(rows).expectNextCount(1000).expectComplete().verify(); + } + } + + @Test + public void should_not_detect_blocking_call_on_asynchronous_execution_prepared() { + try (CqlSession session = SessionUtils.newSession(SIMULACRON_RULE)) { + Flux rows = + Mono.fromCompletionStage(() -> session.prepareAsync("SELECT c1, c2 FROM ks.t1")) + .flatMapMany( + ps -> + Flux.range(0, 1000) + .map(i -> ps.bind()) + .flatMap( + bs -> + Flux.from(session.executeReactive(bs)) + .subscribeOn(Schedulers.parallel()))); + StepVerifier.create(rows).expectNextCount(1000).expectComplete().verify(); + } + } + + @Test + public void should_not_detect_blocking_call_on_random_uuid_generation() { + Flux uuids = + Flux.create( + sink -> { + for (int i = 0; i < 1_000_000; ++i) { + sink.next(Uuids.random()); + } + sink.complete(); + }) + .subscribeOn(Schedulers.parallel()); + StepVerifier.create(uuids).expectNextCount(1_000_000).expectComplete().verify(); + } + + @Test + public void should_not_detect_blocking_call_on_time_based_uuid_generation() { + Flux uuids = + Flux.create( + sink -> { + for (int i = 0; i < 1_000_000; ++i) { + sink.next(Uuids.timeBased()); + } + sink.complete(); + }) + .subscribeOn(Schedulers.parallel()); + StepVerifier.create(uuids).expectNextCount(1_000_000).expectComplete().verify(); + } +} diff --git a/integration-tests/src/test/resources/application.conf b/integration-tests/src/test/resources/application.conf index 45c1366bd45..668a71059cf 100644 --- a/integration-tests/src/test/resources/application.conf +++ b/integration-tests/src/test/resources/application.conf @@ -8,7 +8,7 @@ datastax-java-driver { # (see CcmBridge). local-datacenter = dc1 } - + config-reload-interval = 0 request.timeout = 10 seconds graph.timeout = 10 seconds } diff --git a/manual/core/async/README.md b/manual/core/async/README.md index 66687509cf2..2b70c64d1a4 100644 --- a/manual/core/async/README.md +++ b/manual/core/async/README.md @@ -61,8 +61,12 @@ resultStage.thenAccept(resultSet -> System.out.println(Thread.currentThread().ge // prints s0-io-n (I/O pool thread) ``` -As long as you use the asynchronous API, the driver never blocks. You can safely call a driver -method from inside a callback: +As long as you use the asynchronous API, the driver will behave in a non-blocking manner: its +internal threads will almost never block. There are a few exceptions to the rule though: see the +manual page on [non-blocking programming](../non_blocking) for details. + +Because the asynchronous API is non-blocking, you can safely call a driver method from inside a +callback, even when the callback's execution is triggered by a future returned by the driver: ```java // Get the department id for a given user: diff --git a/manual/core/non_blocking/README.md b/manual/core/non_blocking/README.md new file mode 100644 index 00000000000..8f0aace18f3 --- /dev/null +++ b/manual/core/non_blocking/README.md @@ -0,0 +1,289 @@ +## Non-blocking programming + +### Quick overview + +With the advent of reactive programming, the demand for fully non-blocking libraries has become +popular among application developers. The recent availability of frameworks enforcing lock-freedom, +such as [Vert.x] or [Reactor], along with tools for automatic detection of blocking calls like +[BlockHound], has exacerbated this trend even more so. + +[Vert.x]: https://vertx.io +[Reactor]: https://projectreactor.io +[BlockHound]: https://github.com/reactor/BlockHound + +**In summary, when used properly, the DataStax Java driver offers non-blocking guarantees for most +of its operations, and during most of the session lifecycle.** + +These guarantees and their exceptions are detailed below. A final chapter explains how to use the +driver with BlockHound. + +The developer guide also has more information on driver internals and its +[concurrency model](../../developer/common/concurrency). + +### Definition of "non-blocking" + +Since the term "non-blocking" is subject to interpretation, in this page the term should be +understood as "[lock-free]": a program is non-blocking if at least one thread is guaranteed to make +progress; such programs are implemented without locks, mutexes nor semaphores, using only low-level +primitives such as atomic variables and CAS (compare-and-swap) instructions. + +A further distinction is generally established between "lock-free" and "wait-free" algorithms: the +former ones allow progress of the overall system, while the latter ones allow each thread to make +progress at any time. This distinction is however rather theoretical and is outside of the scope of +this document. + +[lock-free]: https://www.baeldung.com/lock-free-programming + +### Driver lock-free guarantees + +#### Driver lock-free guarantees per execution models + +The driver offers many execution models. For the built-in ones, the lock-free guarantees are as +follows: + +* The synchronous API is blocking and does not offer any lock-free guarantee. +* The [asynchronous](../async) API is implemented in lock-free algorithms. +* The [reactive](../reactive) API is implemented in lock-free algorithms (it's actually wait-free). + +For example, calling any synchronous method declared in [`SyncCqlSession`], such as [`execute`], +will block until the result is available. These methods should never be used in non-blocking +applications. + +[`SyncCqlSession`]: https://docs.datastax.com/en/drivers/java/latest/com/datastax/oss/driver/api/core/cql/SyncCqlSession.html` +[`execute`]: https://docs.datastax.com/en/drivers/java/latest/com/datastax/oss/driver/api/core/cql/SyncCqlSession.html#execute-com.datastax.oss.driver.api.core.cql.Statement- + +However, the asynchronous methods declared in [`AsyncCqlSession`], such as [`executeAsync`], are all +safe for use in non-blocking applications; the statement execution and asynchronous result delivery +is guaranteed to never block. + +[`AsyncCqlSession`]: https://docs.datastax.com/en/drivers/java/latest/com/datastax/oss/driver/api/core/cql/AsyncCqlSession.html +[`executeAsync`]: https://docs.datastax.com/en/drivers/java/latest/com/datastax/oss/driver/api/core/cql/AsyncCqlSession.html#executeAsync-com.datastax.oss.driver.api.core.cql.Statement- + +The same applies to the methods declared in [`ReactiveSession`] such as [`executeReactive`]: the +returned publisher will never block when subscribed to, until the final results are delivered to +the subscriber. + +[`ReactiveSession`]: https://docs.datastax.com/en/drivers/java/latest/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.html +[`executeReactive`]: https://docs.datastax.com/en/drivers/java/latest/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.html#executeReactive-com.datastax.oss.driver.api.core.cql.Statement- + +There is one exception though: continuous paging queries (a feature specific to DSE) have a special +execution model which uses internal locks for coordination. Although such locks are only held for +extremely brief periods of time, and never under high contention, this execution model doesn't +qualify as lock-free. + +As a consequence, all methods declared in [`ContinuousSession`] and [`ContinuousReactiveSession`] +cannot be considered as implemented 100% lock-free, even those built on top of the asynchronous or +reactive APIs like [`executeContinuouslyAsync`] and [`executeContinuouslyReactive`]. In practice +though, continuous paging is extremely efficient and can safely be used in most non-blocking +contexts, unless they require strict lock-freedom. + +[`ContinuousSession`]: https://docs.datastax.com/en/drivers/java/latest/com/datastax/dse/driver/api/core/cql/continuous/ContinuousSession.html +[`ContinuousReactiveSession`]: https://docs.datastax.com/en/drivers/java/latest/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousReactiveSession.html +[`executeContinuouslyAsync`]: https://docs.datastax.com/en/drivers/java/latest/com/datastax/dse/driver/api/core/cql/continuous/ContinuousSession.html#executeContinuouslyAsync-com.datastax.oss.driver.api.core.cql.Statement- +[`executeContinuouslyReactive`]: https://docs.datastax.com/en/drivers/java/latest/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousReactiveSession.html#executeContinuouslyReactive-com.datastax.oss.driver.api.core.cql.Statement- + +#### Driver lock-free guarantees per session lifecycle phases + +The guarantees vary according to three possible session states: initializing, running, and closing. + +Session initialization is a costly operation that performs many I/O operations, hitting both the +local filesystem (configuration files) and the network (connection initialization). This procedure +is triggered by a call to [`SessionBuilder.buildAsync()`] and happens partially on the calling +thread, and partially asynchronously on an internal driver thread. + +* The creation of the [driver context] happens synchronously on the calling thread. The context + creation usually requires file I/O, mainly to read configuration files. A call to + `SessionBuilder.buildAsync()`, in spite of its name, is thus a blocking call and must be + dispatched to a thread that is allowed to block. +* The rest of the initialization process will happen asynchronously, on an internal driver admin + thread. This process is mostly non-blocking, with a few exceptions listed below. Therefore, + the driver admin thread performing the initialization tasks must be allowed to block, at least + temporarily. + +[driver context]: ../../developer/common/context + +For the reasons above, the initialization phase obviously doesn't qualify as lock-free. For +non-blocking applications, it is generally advised to trigger session initialization during +application startup, before strong non-blocking guarantees are enforced on application threads. + +Similarly, a call to [`SessionBuilder.build()`] should be considered blocking as it will block the +calling thread and wait until the method returns. For this reason, calls to `SessionBuilder.build()` +should be avoided in non-blocking applications. + +[`SessionBuilder.buildAsync()`]: https://docs.datastax.com/en/drivers/java/latest/com/datastax/oss/driver/api/core/session/SessionBuilder.html#buildAsync-- +[`SessionBuilder.build()`]: https://docs.datastax.com/en/drivers/java/latest/com/datastax/oss/driver/api/core/session/SessionBuilder.html#build-- + +Once the session is initialized, however, the driver is guaranteed to be non-blocking during the +session's lifecycle, and under normal operation, unless otherwise noted elsewhere in this document. + +Finally, closing the session is generally non-blocking, but the driver offers no strong guarantees +during that phase. Therefore, calls to any method declared in [`AsyncAutoCloseable`], including the +asynchronous ones like [`closeAsync()`], should also be preferably deferred until the application is +shut down and lock-freedom enforcement is disabled. + +[`AsyncAutoCloseable`]: https://docs.datastax.com/en/drivers/java/latest/com/datastax/oss/driver/api/core/AsyncAutoCloseable.html +[`closeAsync()`]: https://docs.datastax.com/en/drivers/java/latest/com/datastax/oss/driver/api/core/AsyncAutoCloseable.html#closeAsync-- + +#### Driver lock-free guarantees for specific components + +Certain driver components are not implemented in lock-free algorithms. + +For example, [`SafeInitNodeStateListener`] is implemented with internal locks for coordination. It +should not be used if strict lock-freedom is enforced. + +[`SafeInitNodeStateListener`]: https://docs.datastax.com/en/drivers/java/latest/com/datastax/oss/driver/api/core/metadata/SafeInitNodeStateListener.html + +The same is valid for both built-in [request throttlers]: + +* `ConcurrencyLimitingRequestThrottler` +* `RateLimitingRequestThrottler` + +See the section about [throttling](../throttling) for details about these components. Again, they +use locks internally, and depending on how many requests are being executed in parallel, the thread +contention on these locks can be high: in short, if your application enforces strict lock-freedom, +then these components should not be used. + +[request throttlers]: https://docs.datastax.com/en/drivers/java/latest/com/datastax/oss/driver/api/core/session/throttling/RequestThrottler.html + +Other components may be lock-free, *except* for their first invocation. This is the case of the +following items: + +* All built-in implementations of [`TimestampGenerator`], upon instantiation; +* The utility method [`Uuids.timeBased()`]. + +[`TimestampGenerator`]: https://docs.datastax.com/en/drivers/java/latest/com/datastax/oss/driver/api/core/time/TimestampGenerator.html +[`Uuids.timeBased()`]: https://docs.datastax.com/en/drivers/java/latest/com/datastax/oss/driver/api/core/uuid/Uuids.html#timeBased-- + +Both components need to access native libraries when they get initialized and this may involve +hitting the local filesystem, thus causing the initialization to become a blocking call. + +Timestamp generators are automatically created when the session is initialized, and are thus +generally safe to use afterwards. + +`Uuids.timeBased()`, however, is a convenience method that the driver doesn't use internally. For +this reason, it is advised that this method be called once during application startup, so that it is +safe to use it afterwards in a non-blocking context. + +Alternatively, it's possible to disable the usage of client-side timestamp generation, and/or the +usage of native libraries. See the manual sections on [query timestamps](../query_timestamps) and +[integration](../integration) for more information. + +One component, the codec registry, can block when its [`register`] method is called; it is +therefore advised that codecs should be registered during application startup exclusively. See the +[custom codecs](../custom_codecs) section for more details about registering codecs. + +[`register`]: https://docs.datastax.com/en/drivers/java/latest/com/datastax/oss/driver/api/core/type/codec/registry/MutableCodecRegistry.html#register-com.datastax.oss.driver.api.core.type.codec.TypeCodec- + +Finally, a few internal components also use locks, but only during session initialization; once the +session is ready, they are either discarded, or don't use locks anymore for the rest of the +session's lifecycle. + +These components are safe to use once the session is ready, although they could be reported by +lock-freedom monitoring tools. They are listed below in case their exclusion is necessary: + +* `com.datastax.oss.driver.internal.core.context.DefaultNettyOptions` +* `com.datastax.oss.driver.internal.core.util.concurrent.LazyReference` +* `com.datastax.oss.driver.internal.core.util.concurrent.ReplayingEventFilter` + +#### Driver lock-free guarantees on topology and status events + +Topology and status events can cause the driver to use locks temporarily. + +When a node gets added to the cluster, or when a node state changes (DOWN to UP or vice versa), the +driver needs to notify a few components: the load balancing policies need to coordinate in order to +assign a new distance to the node (LOCAL, REMOTE or IGNORED); and the node connection pool will have +to be resized either to accommodate new connections, or to close existing ones. + +These operations use internal locks for coordination. Again, they are only held for extremely brief +periods of time, and never under high contention. Note that this behavior cannot be disabled or +changed; if you need to enforce strict lock-freedom, and topology or status changes are being +reported as infringements, consider adding exceptions for the following method calls: + + * `com.datastax.oss.driver.internal.core.pool.ChannelSet#add(DriverChannel)` + * `com.datastax.oss.driver.internal.core.pool.ChannelSet#remote(DriverChannel)` + * `com.datastax.oss.driver.internal.core.metadata.LoadBalancingPolicyWrapper$SinglePolicyDistanceReporter#setDistance(Node,NodeDistance)` + +#### Driver lock-free guarantees on random uuid generation + +Until driver 4.9, the [`Uuids.random()`] method was a blocking call. Because of that, this method +could not be used in non-blocking contexts, making UUID generation a difficult issue to solve. + +Moreover, this method is used in a few places internally. This situation was unfortunate because +lock-freedom enforcement tools could report calls to that method, but it was impossible to suppress +these calls. Thanks to [JAVA-2449], released with driver 4.10.0, `Uuids.random()` became a +non-blocking call and random UUIDs can now be safely generated in non-blocking applications. + +[`Uuids.random()`]: https://docs.datastax.com/en/drivers/java/latest/com/datastax/oss/driver/api/core/uuid/Uuids.html#random-- +[JAVA-2449]: https://datastax-oss.atlassian.net/browse/JAVA-2449 + +#### Driver lock-free guarantees when reloading the configuration + +The driver has a pluggable configuration mechanism built around the [`DriverConfigLoader`] +interface. Implementors may choose to support [hot-reloading] of configuration files, and the +default built-in implementation has this feature enabled by default. + +Beware that a hot-reloading of the default configuration mechanism is performed on a driver internal +admin thread. If hot-reloading is enabled, then this might be reported by lock-freedom infringement +detectors. If that is the case, it is advised to disable hot-reloading by setting the +`datastax-java-driver.basic.config-reload-interval` option to 0. See the manual page on +[configuration](../configuration) for more information. + +[`DriverConfigLoader`]: https://docs.datastax.com/en/drivers/java/latest/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html +[hot-reloading]: https://docs.datastax.com/en/drivers/java/latest/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#supportsReloading-- + +#### Driver lock-free guarantees when connecting to DSE + +When connecting to clusters running recent DSE versions, the driver automatically enables periodic +status reporting. When preparing the status report, the driver has to hit the local filesystem, and +because of that, the status reporting process does not qualify as lock-free. + +If lock-freedom is being enforced, then automatic status reporting must be disabled by setting the +`datastax-java-driver.advanced.monitor-reporting.enabled` property to false in the driver +configuration. + +### Driver mechanism for detection of blocking calls + +The driver has its own mechanism for detecting blocking calls happening on an internal driver +thread. This mechanism is capable of detecting and reporting blatant cases of misuse of the +asynchronous and reactive APIs, e.g. when the synchronous API is invoked inside a future or callback +produced by the asynchronous execution of a statement. See the core manual page on the +[asynchronous](../async) API or the developer manual page on +[driver concurrency](../../developer/common/concurrency) for details. + +The driver is not capable, however, of detecting low-level lock-freedom infringements, such as the +usage of locks. You must use an external tool to achieve that. See below how to use BlockHound for +that. + +### Using the driver with Reactor BlockHound + +[Reactor]'s tool for automatic detection of blocking calls, [BlockHound], is capable of detecting +and reporting any sort of blocking calls, including I/O, locks, `Thread.sleep`, etc. + +When used with the driver, BlockHound can report some calls that, for the reasons explained above, +could be safely considered as false positives. + +For this reason, the driver, since version 4.10, ships with a custom `DriverBlockHoundIntegration` +class which is automatically discovered by BlockHound through the Service Loader mechanism. It +contains BlockHound customizations that target most of the cases detailed above, and prevent them +from being reported as blocking calls. + +More specifically, the following items are currently declared to be allowed: + +* Loading of native libraries during startup (`TimestampGenerator`); +* Locks held during startup only (`DefaultNettyOptions`, `LazyReference`, `ReplayingEventFilter`); +* Locks held during startup and topology and status events processing (`ChannelSet`, + `DistanceReporter`); +* Locks held when executing continuous paging queries; +* Locks held during calls to `MutableCodecRegistry.register()` and `Uuids.timeBased()`. + +The following items are NOT declared to be allowed and are likely to be reported by BlockHound if +used: + +* Request throttlers; +* Automatic status reporting; +* `SafeInitNodeStateListener`. + +Note that other blocking startup steps, e.g. loading of configuration files, are also not declared +to be allowed, because these are genuine blocking I/O calls. For this reason, if BlockHound is being +used, the loading of the driver context, performed by the thread calling `SessionBuilder.build()` +or `SessionBuilder.buildAsync()`, must be allowed to perform blocking calls. diff --git a/manual/core/reactive/README.md b/manual/core/reactive/README.md index 25f74a26579..1c88908f4cd 100644 --- a/manual/core/reactive/README.md +++ b/manual/core/reactive/README.md @@ -6,13 +6,15 @@ streams]. Notes: -* reactive capabilities require the [Reactive Streams API] to be present on the classpath. The +* Reactive capabilities require the [Reactive Streams API] to be present on the classpath. The driver has a dependency on that library, but if your application does not use reactive queries at all, it is possible to exclude it to minimize the number of runtime dependencies. If the library cannot be found at runtime, reactive queries won't be available, and a warning will be logged, but the driver will otherwise operate normally (this is also valid for OSGi deployments). -* for historical reasons, reactive-related driver types reside in a package prefixed with `dse`; +* For historical reasons, reactive-related driver types reside in a package prefixed with `dse`; however, reactive queries also work with regular Cassandra. +* The reactive execution model is implemented in a non-blocking fashion: see the manual page on + [non-blocking programming](../non_blocking) for details. ### Overview diff --git a/manual/developer/common/concurrency/README.md b/manual/developer/common/concurrency/README.md index 3c6078f2eb4..be51e184ac5 100644 --- a/manual/developer/common/concurrency/README.md +++ b/manual/developer/common/concurrency/README.md @@ -77,12 +77,16 @@ fields, and methods are guaranteed to always run in isolation, eliminating subtl ### Non-blocking -Whether on the hot or cold path, internal code **never blocks**. If an internal component needs to -execute a query, it does so asynchronously, and registers callbacks to process the results. -Examples of this can be found in `ReprepareOnUp` and `DefaultTopologyMonitor` (among others). +Whether on the hot or cold path, internal code is almost 100% lock-free. The driver guarantees on +lock-freedom are [detailed](../../../core/non_blocking) in the core manual. -The only place where the driver blocks are synchronous wrapper methods in the public API, for -example: +If an internal component needs to execute a query, it does so asynchronously, and registers +callbacks to process the results. Examples of this can be found in `ReprepareOnUp` and +`DefaultTopologyMonitor` (among others). + +The only place where the driver blocks is when using the synchronous API (methods declared in +[`SyncCqlSession`]), and when calling other synchronous wrapper methods in the public API, for +example, [`ExecutionInfo.getQueryTrace()`]: ```java public interface ExecutionInfo { @@ -95,6 +99,11 @@ public interface ExecutionInfo { } ``` +When a public API method is blocking, this is generally clearly stated in its javadocs. + +[`ExecutionInfo.getQueryTrace()`]: https://docs.datastax.com/en/drivers/java/latest/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getQueryTrace-- +[`SyncCqlSession`]: https://docs.datastax.com/en/drivers/java/latest/com/datastax/oss/driver/api/core/cql/SyncCqlSession.html` + `BlockingOperation` is a utility to check that those methods aren't called on I/O threads, which could introduce deadlocks. diff --git a/pom.xml b/pom.xml index 08309f95361..5e9fe4473df 100644 --- a/pom.xml +++ b/pom.xml @@ -420,6 +420,23 @@ smallrye-metrics 2.4.4 + + io.projectreactor + reactor-bom + 2020.0.1 + pom + import + + + io.projectreactor.tools + blockhound + 1.0.4.RELEASE + + + io.projectreactor.tools + blockhound-junit-platform + 1.0.4.RELEASE + From e6ae0529f8441ba149a4c35c16b4fd78a235ff95 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Sat, 19 Dec 2020 18:46:22 +0100 Subject: [PATCH 612/979] Fix wrong keyspace inclusion/exclusion logic in MetadataManager (JAVA-2871 follow-up) --- .../oss/driver/internal/core/metadata/MetadataManager.java | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/MetadataManager.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/MetadataManager.java index 6db1db038bd..594c37430d4 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/MetadataManager.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/MetadataManager.java @@ -25,6 +25,7 @@ import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.internal.core.control.ControlConnection; import com.datastax.oss.driver.internal.core.metadata.schema.parsing.SchemaParserFactory; +import com.datastax.oss.driver.internal.core.metadata.schema.queries.KeyspaceFilter; import com.datastax.oss.driver.internal.core.metadata.schema.queries.SchemaQueriesFactory; import com.datastax.oss.driver.internal.core.metadata.schema.queries.SchemaRows; import com.datastax.oss.driver.internal.core.metadata.schema.refresh.SchemaRefresh; @@ -65,6 +66,7 @@ public class MetadataManager implements AsyncAutoCloseable { private volatile DefaultMetadata metadata; // only updated from adminExecutor private volatile boolean schemaEnabledInConfig; private volatile List refreshedKeyspaces; + private volatile KeyspaceFilter keyspaceFilter; private volatile Boolean schemaEnabledProgrammatically; private volatile boolean tokenMapEnabled; private volatile Set contactPoints; @@ -86,6 +88,7 @@ protected MetadataManager(InternalDriverContext context, DefaultMetadata initial this.refreshedKeyspaces = config.getStringList( DefaultDriverOption.METADATA_SCHEMA_REFRESHED_KEYSPACES, Collections.emptyList()); + this.keyspaceFilter = KeyspaceFilter.newInstance(logPrefix, refreshedKeyspaces); this.tokenMapEnabled = config.getBoolean(DefaultDriverOption.METADATA_TOKEN_MAP_ENABLED); context.getEventBus().register(ConfigChangeEvent.class, this::onConfigChanged); @@ -100,6 +103,7 @@ private void onConfigChanged(@SuppressWarnings("unused") ConfigChangeEvent event this.refreshedKeyspaces = config.getStringList( DefaultDriverOption.METADATA_SCHEMA_REFRESHED_KEYSPACES, Collections.emptyList()); + this.keyspaceFilter = KeyspaceFilter.newInstance(logPrefix, refreshedKeyspaces); this.tokenMapEnabled = config.getBoolean(DefaultDriverOption.METADATA_TOKEN_MAP_ENABLED); if ((!schemaEnabledBefore @@ -372,8 +376,7 @@ private void refreshSchema( } // If this is an event, make sure it's not targeting a keyspace that we're ignoring. - boolean isRefreshedKeyspace = - keyspace == null || refreshedKeyspaces.isEmpty() || refreshedKeyspaces.contains(keyspace); + boolean isRefreshedKeyspace = keyspace == null || keyspaceFilter.includes(keyspace); if (isRefreshedKeyspace && (evenIfDisabled || isSchemaEnabled())) { acceptSchemaRequest(future, flushNow); From 5f24d21c2b7e0464b15a15a397c07276f64c4ead Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Sat, 19 Dec 2020 18:46:44 +0100 Subject: [PATCH 613/979] Exclude OpsCenter keyspace by default (JAVA-2871 follow-up) --- .../com/datastax/oss/driver/api/core/config/OptionsMap.java | 2 +- core/src/main/resources/reference.conf | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java index 5123e341036..2b4a767e29d 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java @@ -342,7 +342,7 @@ protected static void fillWithDriverDefaults(OptionsMap map) { map.put(TypedDriverOption.METADATA_SCHEMA_ENABLED, true); map.put( TypedDriverOption.METADATA_SCHEMA_REFRESHED_KEYSPACES, - ImmutableList.of("!system", "!/^system_.*/", "!/^dse_.*/", "!solr_admin")); + ImmutableList.of("!system", "!/^system_.*/", "!/^dse_.*/", "!solr_admin", "!OpsCenter")); map.put(TypedDriverOption.METADATA_SCHEMA_REQUEST_TIMEOUT, requestTimeout); map.put(TypedDriverOption.METADATA_SCHEMA_REQUEST_PAGE_SIZE, requestPageSize); map.put(TypedDriverOption.METADATA_SCHEMA_WINDOW, Duration.ofSeconds(1)); diff --git a/core/src/main/resources/reference.conf b/core/src/main/resources/reference.conf index d303db0b036..3ff88c024b0 100644 --- a/core/src/main/resources/reference.conf +++ b/core/src/main/resources/reference.conf @@ -1819,7 +1819,7 @@ datastax-java-driver { # Modifiable at runtime: yes, the new value will be used for refreshes issued after the # change. # Overridable in a profile: no - refreshed-keyspaces = [ "!system", "!/^system_.*/", "!/^dse_.*/", "!solr_admin" ] + refreshed-keyspaces = [ "!system", "!/^system_.*/", "!/^dse_.*/", "!solr_admin", "!OpsCenter" ] # The timeout for the requests to the schema tables. # From 0be46e020cd8e1e2a32f691d02813333923c5f1a Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Sat, 19 Dec 2020 18:47:22 +0100 Subject: [PATCH 614/979] FIx failing integration tests (JAVA-2871 follow-up) --- .../datastax/oss/driver/core/ConnectIT.java | 4 +-- .../oss/driver/core/metadata/SchemaIT.java | 29 ++++++++----------- 2 files changed, 14 insertions(+), 19 deletions(-) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/ConnectIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/ConnectIT.java index d933643e647..f7619d41791 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/ConnectIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/ConnectIT.java @@ -67,7 +67,7 @@ public void setup() { // Absolute minimum for a working schema metadata (we just want to check that it gets // loaded at startup). when("SELECT * FROM system_schema.keyspaces") - .then(rows().row("keyspace_name", "system"))); + .then(rows().row("keyspace_name", "system").row("keyspace_name", "test"))); } @Test @@ -113,7 +113,7 @@ public void should_wait_for_contact_points_if_reconnection_enabled() throws Exce // Then this doesn't throw try (Session session = sessionFuture.get(30, TimeUnit.SECONDS)) { - assertThat(session.getMetadata().getKeyspaces()).containsKey(CqlIdentifier.fromCql("system")); + assertThat(session.getMetadata().getKeyspaces()).containsKey(CqlIdentifier.fromCql("test")); } } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaIT.java index 249e0a5377d..aa5a862cb42 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaIT.java @@ -60,18 +60,21 @@ public class SchemaIT { @Rule public TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); @Test - public void should_expose_system_and_test_keyspace() { + public void should_not_expose_system_and_test_keyspace() { Map keyspaces = sessionRule.session().getMetadata().getKeyspaces(); assertThat(keyspaces) - .containsKeys( + .doesNotContainKeys( // Don't test exhaustively because system keyspaces depend on the Cassandra version, and // keyspaces from other tests might also be present - CqlIdentifier.fromInternal("system"), - CqlIdentifier.fromInternal("system_traces"), - sessionRule.keyspace()); - assertThat(keyspaces.get(CqlIdentifier.fromInternal("system")).getTables()) - .containsKeys(CqlIdentifier.fromInternal("local"), CqlIdentifier.fromInternal("peers")); + CqlIdentifier.fromInternal("system"), CqlIdentifier.fromInternal("system_traces")); + } + + @Test + public void should_expose_test_keyspace() { + Map keyspaces = + sessionRule.session().getMetadata().getKeyspaces(); + assertThat(keyspaces).containsKey(sessionRule.keyspace()); } @Test @@ -124,11 +127,7 @@ public void should_enable_schema_programmatically_when_disabled_in_config() { .pollInterval(500, TimeUnit.MILLISECONDS) .atMost(60, TimeUnit.SECONDS) .untilAsserted(() -> assertThat(session.getMetadata().getKeyspaces()).isNotEmpty()); - assertThat(session.getMetadata().getKeyspaces()) - .containsKeys( - CqlIdentifier.fromInternal("system"), - CqlIdentifier.fromInternal("system_traces"), - sessionRule.keyspace()); + assertThat(session.getMetadata().getKeyspaces()).containsKey(sessionRule.keyspace()); session.setSchemaMetadataEnabled(null); assertThat(session.isSchemaMetadataEnabled()).isFalse(); @@ -177,11 +176,7 @@ public void should_refresh_schema_manually() { assertThat(session.getMetadata().getKeyspaces()).isEmpty(); Metadata newMetadata = session.refreshSchema(); - assertThat(newMetadata.getKeyspaces()) - .containsKeys( - CqlIdentifier.fromInternal("system"), - CqlIdentifier.fromInternal("system_traces"), - sessionRule.keyspace()); + assertThat(newMetadata.getKeyspaces()).containsKey(sessionRule.keyspace()); assertThat(session.getMetadata()).isSameAs(newMetadata); } From aedcc71acafd383e89e665c97823710c6f480090 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Sat, 19 Dec 2020 19:04:54 +0100 Subject: [PATCH 615/979] Mention automatically excluded keyspaces in upgrade guide (JAVA-2871 follow-up) --- upgrade_guide/README.md | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/upgrade_guide/README.md b/upgrade_guide/README.md index a873731705b..6b1536ad12c 100644 --- a/upgrade_guide/README.md +++ b/upgrade_guide/README.md @@ -2,6 +2,8 @@ ### 4.10.0 +#### Enhancements to the `Uuids` utility class + [JAVA-2449](https://datastax-oss.atlassian.net/browse/JAVA-2449) modified the implementation of [Uuids.random()]: this method does not delegate anymore to the JDK's `java.util.UUID.randomUUID()` implementation, but instead re-implements random UUID generation using the non-cryptographic @@ -24,6 +26,29 @@ This release also introduces two new methods for random UUID generation: [Uuids.random(Random)]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/uuid/Uuids.html#random-java.util.Random- [Uuids.random(SplittableRandom)]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/uuid/Uuids.html#random-java.util.SplittableRandom- +#### System and DSE keyspaces automatically excluded from metadata and token map computation + +[JAVA-2871](https://datastax-oss.atlassian.net/browse/JAVA-2871) now allows for a more fine-grained +control over which keyspaces should qualify for metadata and token map computation, including the +ability to *exclude* keyspaces based on their names. + +From now on, the following keyspaces are automatically excluded: + +1. The `system` keyspace; +2. All keyspaces starting with `system_`; +3. DSE-specific keyspaces: + 1. All keyspaces starting with `dse_`; + 2. The `solr_admin` keyspace; + 3. The `OpsCenter` keyspace. + +This means that they won't show up anymore in [Metadata.getKeyspaces()], and [TokenMap] will return +empty replicas and token ranges for them. If you need the driver to keep computing metadata and +token map for these keyspaces, you now must modify the following configuration option: +`datastax-java-driver.advanced.metadata.schema.refreshed-keyspaces`. + +[Metadata.getKeyspaces()]: https://docs.datastax.com/en/drivers/java/latest/com/datastax/oss/driver/api/core/metadata/Metadata.html#getKeyspaces-- +[TokenMap]: https://docs.datastax.com/en/drivers/java/latest/com/datastax/oss/driver/api/core/metadata/TokenMap.html + ### 4.5.x - 4.6.0 These versions are subject to [JAVA-2676](https://datastax-oss.atlassian.net/browse/JAVA-2676), a From 25d5c7382adffaf09296c5311cfb8fc0375d2e72 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 5 Jan 2021 11:08:19 +0100 Subject: [PATCH 616/979] Simplify lambda --- .../oss/driver/internal/core/control/ControlConnection.java | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/control/ControlConnection.java b/core/src/main/java/com/datastax/oss/driver/internal/core/control/ControlConnection.java index 294bfd7e060..bcba1e76583 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/control/ControlConnection.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/control/ControlConnection.java @@ -303,9 +303,7 @@ private void init( connect( nodes, null, - () -> { - initFuture.complete(null); - }, + () -> initFuture.complete(null), error -> { if (isAuthFailure(error)) { LOG.warn( From ae2d208b24678710a4235f79dc7c8d9d1e84972a Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 5 Jan 2021 11:08:35 +0100 Subject: [PATCH 617/979] Remove unused field --- .../driver/internal/core/control/ControlConnectionTestBase.java | 1 - 1 file changed, 1 deletion(-) diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/control/ControlConnectionTestBase.java b/core/src/test/java/com/datastax/oss/driver/internal/core/control/ControlConnectionTestBase.java index ca349d135a1..8e994c5fdc9 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/control/ControlConnectionTestBase.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/control/ControlConnectionTestBase.java @@ -58,7 +58,6 @@ abstract class ControlConnectionTestBase { protected static final InetSocketAddress ADDRESS1 = new InetSocketAddress("127.0.0.1", 9042); - protected static final InetSocketAddress ADDRESS2 = new InetSocketAddress("127.0.0.2", 9042); /** How long we wait when verifying mocks for async invocations */ protected static final VerificationWithTimeout VERIFY_TIMEOUT = timeout(500); From ad3db9cda4bfd6af74e35346057764fa1745ee21 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 5 Jan 2021 11:17:26 +0100 Subject: [PATCH 618/979] JAVA-2904: Upgrade Jackson to 2.12.0 and Tinkerpop to 3.4.9 --- pom.xml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pom.xml b/pom.xml index 5e9fe4473df..f4097e4d1a4 100644 --- a/pom.xml +++ b/pom.xml @@ -50,12 +50,12 @@ 4.1.16 4.1.51.Final 1.2.1 - 3.4.8 + 3.4.9 1.7.26 1.0.3 20201115 - 2.11.0 - 2.11.0 + 2.12.0 + 2.12.0 1.9.12 1.1.7.3 From 6ec72c452e00c22a7488d6cccec3ef87420f7c6b Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 27 Nov 2020 21:28:39 +0100 Subject: [PATCH 619/979] JAVA-2900: Allow the request to retry to be customized by the retry policy --- .../ContinuousCqlRequestHandler.java | 57 ++--- .../ContinuousRequestHandlerBase.java | 194 ++++++++-------- .../graph/ContinuousGraphRequestHandler.java | 55 +++-- .../internal/core/graph/GraphConversions.java | 17 ++ .../core/graph/GraphRequestHandler.java | 219 ++++++++++-------- .../driver/api/core/retry/RetryDecision.java | 6 +- .../driver/api/core/retry/RetryPolicy.java | 154 ++++++++++++ .../driver/api/core/retry/RetryVerdict.java | 60 +++++ .../driver/internal/core/cql/Conversions.java | 29 +++ .../internal/core/cql/CqlPrepareHandler.java | 99 ++++---- .../internal/core/cql/CqlRequestHandler.java | 219 ++++++++++-------- .../core/retry/DefaultRetryPolicy.java | 5 + .../ContinuousCqlRequestHandlerRetryTest.java | 98 ++++---- .../ContinuousGraphRequestHandlerTest.java | 11 +- .../api/core/retry/RetryPolicyTestBase.java | 17 +- .../core/cql/CqlPrepareHandlerTest.java | 14 +- .../core/cql/CqlRequestHandlerRetryTest.java | 54 ++--- .../oss/driver/core/AllNodesFailedIT.java | 1 + .../core/config/MapBasedConfigLoaderIT.java | 5 + .../driver/core/connection/FrameLengthIT.java | 1 + .../core/retry/PerProfileRetryPolicyIT.java | 6 + upgrade_guide/README.md | 24 ++ 22 files changed, 876 insertions(+), 469 deletions(-) create mode 100644 core/src/main/java/com/datastax/oss/driver/api/core/retry/RetryVerdict.java diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandler.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandler.java index ca2631fae75..310872cfe47 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandler.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandler.java @@ -20,6 +20,7 @@ import com.datastax.dse.driver.api.core.metrics.DseSessionMetric; import com.datastax.dse.driver.internal.core.cql.DseConversions; import com.datastax.dse.protocol.internal.response.result.DseRowsMetadata; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.cql.ColumnDefinitions; import com.datastax.oss.driver.api.core.cql.ExecutionInfo; import com.datastax.oss.driver.api.core.cql.Row; @@ -27,6 +28,7 @@ import com.datastax.oss.driver.api.core.metrics.DefaultNodeMetric; import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.cql.Conversions; import com.datastax.oss.driver.internal.core.cql.DefaultRow; import com.datastax.oss.driver.internal.core.session.DefaultSession; import com.datastax.oss.driver.internal.core.util.CountingIterator; @@ -45,13 +47,7 @@ */ @ThreadSafe public class ContinuousCqlRequestHandler - extends ContinuousRequestHandlerBase { - - private final Message message; - private final Duration firstPageTimeout; - private final Duration otherPagesTimeout; - private final int maxEnqueuedPages; - private final int maxPages; + extends ContinuousRequestHandlerBase, ContinuousAsyncResultSet> { ContinuousCqlRequestHandler( @NonNull Statement statement, @@ -68,14 +64,6 @@ public class ContinuousCqlRequestHandler DefaultSessionMetric.CQL_CLIENT_TIMEOUTS, DseSessionMetric.CONTINUOUS_CQL_REQUESTS, DefaultNodeMetric.CQL_MESSAGES); - message = DseConversions.toContinuousPagingMessage(statement, executionProfile, context); - firstPageTimeout = - executionProfile.getDuration(DseDriverOption.CONTINUOUS_PAGING_TIMEOUT_FIRST_PAGE); - otherPagesTimeout = - executionProfile.getDuration(DseDriverOption.CONTINUOUS_PAGING_TIMEOUT_OTHER_PAGES); - maxEnqueuedPages = - executionProfile.getInt(DseDriverOption.CONTINUOUS_PAGING_MAX_ENQUEUED_PAGES); - maxPages = executionProfile.getInt(DseDriverOption.CONTINUOUS_PAGING_MAX_PAGES); // NOTE that ordering of the following statement matters. // We should register this request after all fields have been initialized. throttler.register(this); @@ -89,40 +77,54 @@ protected Duration getGlobalTimeout() { @NonNull @Override - protected Duration getPageTimeout(int pageNumber) { - return pageNumber == 1 ? firstPageTimeout : otherPagesTimeout; + protected Duration getPageTimeout(@NonNull Statement statement, int pageNumber) { + DriverExecutionProfile executionProfile = + Conversions.resolveExecutionProfile(statement, context); + if (pageNumber == 1) { + return executionProfile.getDuration(DseDriverOption.CONTINUOUS_PAGING_TIMEOUT_FIRST_PAGE); + } else { + return executionProfile.getDuration(DseDriverOption.CONTINUOUS_PAGING_TIMEOUT_OTHER_PAGES); + } } @NonNull @Override - protected Duration getReviseRequestTimeout() { - return otherPagesTimeout; + protected Duration getReviseRequestTimeout(@NonNull Statement statement) { + DriverExecutionProfile executionProfile = + Conversions.resolveExecutionProfile(statement, context); + return executionProfile.getDuration(DseDriverOption.CONTINUOUS_PAGING_TIMEOUT_OTHER_PAGES); } @Override - protected int getMaxEnqueuedPages() { - return maxEnqueuedPages; + protected int getMaxEnqueuedPages(@NonNull Statement statement) { + DriverExecutionProfile executionProfile = + Conversions.resolveExecutionProfile(statement, context); + return executionProfile.getInt(DseDriverOption.CONTINUOUS_PAGING_MAX_ENQUEUED_PAGES); } @Override - protected int getMaxPages() { - return maxPages; + protected int getMaxPages(@NonNull Statement statement) { + DriverExecutionProfile executionProfile = + Conversions.resolveExecutionProfile(statement, context); + return executionProfile.getInt(DseDriverOption.CONTINUOUS_PAGING_MAX_PAGES); } @NonNull @Override - protected Message getMessage() { - return message; + protected Message getMessage(@NonNull Statement statement) { + DriverExecutionProfile executionProfile = + Conversions.resolveExecutionProfile(statement, context); + return DseConversions.toContinuousPagingMessage(statement, executionProfile, context); } @Override - protected boolean isTracingEnabled() { + protected boolean isTracingEnabled(@NonNull Statement statement) { return false; } @NonNull @Override - protected Map createPayload() { + protected Map createPayload(@NonNull Statement statement) { return statement.getCustomPayload(); } @@ -135,6 +137,7 @@ protected ContinuousAsyncResultSet createEmptyResultSet(@NonNull ExecutionInfo e @NonNull @Override protected DefaultContinuousAsyncResultSet createResultSet( + @NonNull Statement statement, @NonNull Rows rows, @NonNull ExecutionInfo executionInfo, @NonNull ColumnDefinitions columnDefinitions) { diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousRequestHandlerBase.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousRequestHandlerBase.java index de6dee6f8be..c7784c2b2d3 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousRequestHandlerBase.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousRequestHandlerBase.java @@ -36,8 +36,8 @@ import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; import com.datastax.oss.driver.api.core.metrics.NodeMetric; import com.datastax.oss.driver.api.core.metrics.SessionMetric; -import com.datastax.oss.driver.api.core.retry.RetryDecision; import com.datastax.oss.driver.api.core.retry.RetryPolicy; +import com.datastax.oss.driver.api.core.retry.RetryVerdict; import com.datastax.oss.driver.api.core.servererrors.BootstrappingException; import com.datastax.oss.driver.api.core.servererrors.CoordinatorException; import com.datastax.oss.driver.api.core.servererrors.FunctionFailureException; @@ -49,7 +49,6 @@ import com.datastax.oss.driver.api.core.session.Request; import com.datastax.oss.driver.api.core.session.throttling.RequestThrottler; import com.datastax.oss.driver.api.core.session.throttling.Throttled; -import com.datastax.oss.driver.api.core.specex.SpeculativeExecutionPolicy; import com.datastax.oss.driver.internal.core.adminrequest.ThrottledAdminRequestHandler; import com.datastax.oss.driver.internal.core.adminrequest.UnexpectedResponseException; import com.datastax.oss.driver.internal.core.channel.DriverChannel; @@ -114,23 +113,19 @@ public abstract class ContinuousRequestHandlerBase queryPlan; - private final RetryPolicy retryPolicy; protected final RequestThrottler throttler; private final boolean protocolBackpressureAvailable; - private final boolean isIdempotent; private final Timer timer; private final SessionMetricUpdater sessionMetricUpdater; private final boolean specExecEnabled; private final SessionMetric clientTimeoutsMetric; private final SessionMetric continuousRequestsMetric; private final NodeMetric messagesMetric; - private final SpeculativeExecutionPolicy speculativeExecutionPolicy; private final List scheduledExecutions; // The errors on the nodes that were already tried. @@ -162,10 +157,10 @@ public abstract class ContinuousRequestHandlerBase resultSetClass; + private final Class resultSetClass; public ContinuousRequestHandlerBase( @NonNull StatementT statement, @@ -191,23 +186,18 @@ public ContinuousRequestHandlerBase( this.messagesMetric = messagesMetric; this.logPrefix = sessionLogPrefix + "|" + this.hashCode(); LOG.trace("[{}] Creating new continuous handler for request {}", logPrefix, statement); - this.statement = statement; + this.initialStatement = statement; this.session = session; this.keyspace = session.getKeyspace().orElse(null); this.context = context; - this.executionProfile = Conversions.resolveExecutionProfile(this.statement, this.context); + DriverExecutionProfile executionProfile = + Conversions.resolveExecutionProfile(statement, context); this.queryPlan = statement.getNode() != null ? new QueryPlan(statement.getNode()) : context .getLoadBalancingPolicyWrapper() .newQueryPlan(statement, executionProfile.getName(), session); - this.retryPolicy = context.getRetryPolicy(executionProfile.getName()); - Boolean idempotent = statement.isIdempotent(); - this.isIdempotent = - idempotent == null - ? executionProfile.getBoolean(DefaultDriverOption.REQUEST_DEFAULT_IDEMPOTENCE) - : idempotent; this.timer = context.getNettyOptions().getTimer(); this.protocolBackpressureAvailable = @@ -215,11 +205,7 @@ public ContinuousRequestHandlerBase( this.throttler = context.getRequestThrottler(); this.sessionMetricUpdater = session.getMetricUpdater(); this.startTimeNanos = System.nanoTime(); - this.specExecEnabled = specExecEnabled && isIdempotent; - this.speculativeExecutionPolicy = - this.specExecEnabled - ? context.getSpeculativeExecutionPolicy(executionProfile.getName()) - : null; + this.specExecEnabled = specExecEnabled; this.scheduledExecutions = this.specExecEnabled ? new CopyOnWriteArrayList<>() : null; } @@ -227,22 +213,22 @@ public ContinuousRequestHandlerBase( protected abstract Duration getGlobalTimeout(); @NonNull - protected abstract Duration getPageTimeout(int pageNumber); + protected abstract Duration getPageTimeout(@NonNull StatementT statement, int pageNumber); @NonNull - protected abstract Duration getReviseRequestTimeout(); + protected abstract Duration getReviseRequestTimeout(@NonNull StatementT statement); - protected abstract int getMaxEnqueuedPages(); + protected abstract int getMaxEnqueuedPages(@NonNull StatementT statement); - protected abstract int getMaxPages(); + protected abstract int getMaxPages(@NonNull StatementT statement); @NonNull - protected abstract Message getMessage(); + protected abstract Message getMessage(@NonNull StatementT statement); - protected abstract boolean isTracingEnabled(); + protected abstract boolean isTracingEnabled(@NonNull StatementT statement); @NonNull - protected abstract Map createPayload(); + protected abstract Map createPayload(@NonNull StatementT statement); @NonNull protected abstract ResultSetT createEmptyResultSet(@NonNull ExecutionInfo executionInfo); @@ -251,6 +237,7 @@ public ContinuousRequestHandlerBase( @NonNull protected abstract ResultSetT createResultSet( + @NonNull StatementT statement, @NonNull Rows rows, @NonNull ExecutionInfo executionInfo, @NonNull ColumnDefinitions columnDefinitions) @@ -260,6 +247,8 @@ protected abstract ResultSetT createResultSet( @Override public void onThrottleReady(boolean wasDelayed) { + DriverExecutionProfile executionProfile = + Conversions.resolveExecutionProfile(initialStatement, context); if (wasDelayed // avoid call to nanoTime() if metric is disabled: && sessionMetricUpdater.isEnabled( @@ -273,11 +262,13 @@ public void onThrottleReady(boolean wasDelayed) { TimeUnit.NANOSECONDS); } activeExecutionsCount.incrementAndGet(); - sendRequest(null, 0, 0, specExecEnabled); + sendRequest(initialStatement, null, 0, 0, specExecEnabled); } @Override public void onThrottleFailure(@NonNull RequestThrottlingException error) { + DriverExecutionProfile executionProfile = + Conversions.resolveExecutionProfile(initialStatement, context); session .getMetricUpdater() .incrementCounter(DefaultSessionMetric.THROTTLING_ERRORS, executionProfile.getName()); @@ -346,6 +337,7 @@ public CompletionStage fetchNextPage() { * @param scheduleSpeculativeExecution whether to schedule the next speculative execution */ private void sendRequest( + StatementT statement, @Nullable Node node, int currentExecutionIndex, int retryCount, @@ -368,6 +360,7 @@ private void sendRequest( } else if (!chosenCallback.isDone()) { NodeResponseCallback nodeResponseCallback = new NodeResponseCallback( + statement, node, channel, currentExecutionIndex, @@ -376,7 +369,11 @@ private void sendRequest( logPrefix); inFlightCallbacks.add(nodeResponseCallback); channel - .write(getMessage(), isTracingEnabled(), createPayload(), nodeResponseCallback) + .write( + getMessage(statement), + isTracingEnabled(statement), + createPayload(statement), + nodeResponseCallback) .addListener(nodeResponseCallback); } } @@ -469,6 +466,7 @@ private class NodeResponseCallback implements ResponseCallback, GenericFutureListener> { private final long messageStartTimeNanos = System.nanoTime(); + private final StatementT statement; private final Node node; private final DriverChannel channel; // The identifier of the current execution (0 for the initial execution, 1 for the first @@ -479,6 +477,8 @@ private class NodeResponseCallback private final String logPrefix; private final boolean scheduleSpeculativeExecution; + private final DriverExecutionProfile executionProfile; + // Coordinates concurrent accesses between the client and I/O threads private final ReentrantLock lock = new ReentrantLock(); @@ -534,18 +534,21 @@ private class NodeResponseCallback private final AtomicBoolean nodeSuccessReported = new AtomicBoolean(false); public NodeResponseCallback( + StatementT statement, Node node, DriverChannel channel, int executionIndex, int retryCount, boolean scheduleSpeculativeExecution, String logPrefix) { + this.statement = statement; this.node = node; this.channel = channel; this.executionIndex = executionIndex; this.retryCount = retryCount; this.scheduleSpeculativeExecution = scheduleSpeculativeExecution; this.logPrefix = logPrefix + "|" + executionIndex; + this.executionProfile = Conversions.resolveExecutionProfile(statement, context); } @Override @@ -632,16 +635,17 @@ public void operationComplete(@NonNull Future future) { .incrementCounter(DefaultNodeMetric.UNSENT_REQUESTS, executionProfile.getName()); recordError(node, error); trackNodeError(node, error.getCause()); - sendRequest(null, executionIndex, retryCount, scheduleSpeculativeExecution); + sendRequest(statement, null, executionIndex, retryCount, scheduleSpeculativeExecution); } } else { LOG.trace("[{}] Request sent on {}", logPrefix, channel); - if (scheduleSpeculativeExecution) { + if (scheduleSpeculativeExecution && Conversions.resolveIdempotence(statement, context)) { int nextExecution = executionIndex + 1; // Note that `node` is the first node of the execution, it might not be the "slow" one // if there were retries, but in practice retries are rare. long nextDelay = - speculativeExecutionPolicy.nextExecution(node, keyspace, statement, nextExecution); + Conversions.resolveSpeculativeExecutionPolicy(statement, context) + .nextExecution(node, keyspace, statement, nextExecution); if (nextDelay >= 0) { scheduleSpeculativeExecution(nextExecution, nextDelay); } else { @@ -676,7 +680,7 @@ private void scheduleSpeculativeExecution(int nextExecutionIndex, long delay) { nodeMetricUpdater.incrementCounter( DefaultNodeMetric.SPECULATIVE_EXECUTIONS, executionProfile.getName()); } - sendRequest(null, nextExecutionIndex, 0, true); + sendRequest(statement, null, nextExecutionIndex, 0, true); } }, delay, @@ -690,7 +694,7 @@ private Timeout schedulePageTimeout(int expectedPage) { if (expectedPage < 0) { return null; } - Duration timeout = getPageTimeout(expectedPage); + Duration timeout = getPageTimeout(statement, expectedPage); if (timeout.toNanos() <= 0) { return null; } @@ -765,7 +769,7 @@ public void onResponse(@NonNull Frame response) { * Invoked when a continuous paging request hits an unexpected error. * *

          Delegates further processing to to the retry policy ({@link - * #processRetryDecision(RetryDecision, Throwable)}. + * #processRetryVerdict(RetryVerdict, Throwable)}. * * @param error the error encountered, usually a network problem. */ @@ -773,12 +777,14 @@ public void onResponse(@NonNull Frame response) { public void onFailure(@NonNull Throwable error) { cancelTimeout(pageTimeout); LOG.trace(String.format("[%s] Request failure", logPrefix), error); - RetryDecision decision; - if (!isIdempotent || error instanceof FrameTooLongException) { - decision = RetryDecision.RETHROW; + RetryVerdict verdict; + if (!Conversions.resolveIdempotence(statement, context) + || error instanceof FrameTooLongException) { + verdict = RetryVerdict.RETHROW; } else { try { - decision = retryPolicy.onRequestAborted(statement, error, retryCount); + RetryPolicy retryPolicy = Conversions.resolveRetryPolicy(statement, context); + verdict = retryPolicy.onRequestAbortedVerdict(statement, error, retryCount); } catch (Throwable cause) { abort( new IllegalStateException("Unexpected error while invoking the retry policy", cause), @@ -788,13 +794,13 @@ public void onFailure(@NonNull Throwable error) { } updateErrorMetrics( ((DefaultNode) node).getMetricUpdater(), - decision, + verdict, DefaultNodeMetric.ABORTED_REQUESTS, DefaultNodeMetric.RETRIES_ON_ABORTED, DefaultNodeMetric.IGNORES_ON_ABORTED); lock.lock(); try { - processRetryDecision(decision, error); + processRetryVerdict(verdict, error); } finally { lock.unlock(); } @@ -814,7 +820,7 @@ public void onFailure(@NonNull Throwable error) { private void processResultResponse(@NonNull Result result, @Nullable Frame frame) { assert lock.isHeldByCurrentThread(); try { - ExecutionInfo executionInfo = createExecutionInfo(node, result, frame, executionIndex); + ExecutionInfo executionInfo = createExecutionInfo(result, frame); if (result instanceof Rows) { DseRowsMetadata rowsMetadata = (DseRowsMetadata) ((Rows) result).getMetadata(); if (columnDefinitions == null) { @@ -833,7 +839,8 @@ private void processResultResponse(@NonNull Result result, @Nullable Frame frame false); } else { int pageSize = ((Rows) result).getData().size(); - ResultSetT resultSet = createResultSet((Rows) result, executionInfo, columnDefinitions); + ResultSetT resultSet = + createResultSet(statement, (Rows) result, executionInfo, columnDefinitions); if (rowsMetadata.isLastContinuousPage) { LOG.trace("[{}] Received last page ({} - {} rows)", logPrefix, pageNumber, pageSize); state = STATE_FINISHED; @@ -894,7 +901,7 @@ private void processErrorResponse(@NonNull Error errorMessage) { LOG.trace("[{}] {} is bootstrapping, trying next node", logPrefix, node); recordError(node, error); trackNodeError(node, error); - sendRequest(null, executionIndex, retryCount, false); + sendRequest(statement, null, executionIndex, retryCount, false); } else if (error instanceof QueryValidationException || error instanceof FunctionFailureException || error instanceof ProtocolError @@ -921,18 +928,19 @@ private void processErrorResponse(@NonNull Error errorMessage) { * Processes a recoverable error. * *

          In most cases, delegates to the retry policy and its decision, see {@link - * #processRetryDecision(RetryDecision, Throwable)}. + * #processRetryVerdict(RetryVerdict, Throwable)}. * * @param error the recoverable error. */ private void processRecoverableError(@NonNull CoordinatorException error) { assert lock.isHeldByCurrentThread(); NodeMetricUpdater metricUpdater = ((DefaultNode) node).getMetricUpdater(); - RetryDecision decision; + RetryVerdict verdict; + RetryPolicy retryPolicy = Conversions.resolveRetryPolicy(statement, context); if (error instanceof ReadTimeoutException) { ReadTimeoutException readTimeout = (ReadTimeoutException) error; - decision = - retryPolicy.onReadTimeout( + verdict = + retryPolicy.onReadTimeoutVerdict( statement, readTimeout.getConsistencyLevel(), readTimeout.getBlockFor(), @@ -941,15 +949,15 @@ private void processRecoverableError(@NonNull CoordinatorException error) { retryCount); updateErrorMetrics( metricUpdater, - decision, + verdict, DefaultNodeMetric.READ_TIMEOUTS, DefaultNodeMetric.RETRIES_ON_READ_TIMEOUT, DefaultNodeMetric.IGNORES_ON_READ_TIMEOUT); } else if (error instanceof WriteTimeoutException) { WriteTimeoutException writeTimeout = (WriteTimeoutException) error; - if (isIdempotent) { - decision = - retryPolicy.onWriteTimeout( + if (Conversions.resolveIdempotence(statement, context)) { + verdict = + retryPolicy.onWriteTimeoutVerdict( statement, writeTimeout.getConsistencyLevel(), writeTimeout.getWriteType(), @@ -957,18 +965,18 @@ private void processRecoverableError(@NonNull CoordinatorException error) { writeTimeout.getReceived(), retryCount); } else { - decision = RetryDecision.RETHROW; + verdict = RetryVerdict.RETHROW; } updateErrorMetrics( metricUpdater, - decision, + verdict, DefaultNodeMetric.WRITE_TIMEOUTS, DefaultNodeMetric.RETRIES_ON_WRITE_TIMEOUT, DefaultNodeMetric.IGNORES_ON_WRITE_TIMEOUT); } else if (error instanceof UnavailableException) { UnavailableException unavailable = (UnavailableException) error; - decision = - retryPolicy.onUnavailable( + verdict = + retryPolicy.onUnavailableVerdict( statement, unavailable.getConsistencyLevel(), unavailable.getRequired(), @@ -976,23 +984,23 @@ private void processRecoverableError(@NonNull CoordinatorException error) { retryCount); updateErrorMetrics( metricUpdater, - decision, + verdict, DefaultNodeMetric.UNAVAILABLES, DefaultNodeMetric.RETRIES_ON_UNAVAILABLE, DefaultNodeMetric.IGNORES_ON_UNAVAILABLE); } else { - decision = - isIdempotent - ? retryPolicy.onErrorResponse(statement, error, retryCount) - : RetryDecision.RETHROW; + verdict = + Conversions.resolveIdempotence(statement, context) + ? retryPolicy.onErrorResponseVerdict(statement, error, retryCount) + : RetryVerdict.RETHROW; updateErrorMetrics( metricUpdater, - decision, + verdict, DefaultNodeMetric.OTHER_ERRORS, DefaultNodeMetric.RETRIES_ON_OTHER_ERROR, DefaultNodeMetric.IGNORES_ON_OTHER_ERROR); } - processRetryDecision(decision, error); + processRetryVerdict(verdict, error); } /** @@ -1050,7 +1058,7 @@ private void processUnprepared(@NonNull Unprepared errorMessage) { "[{}] Re-prepare successful, retrying on the same node ({})", logPrefix, node); - sendRequest(node, executionIndex, retryCount, false); + sendRequest(statement, node, executionIndex, retryCount, false); } } else { if (exception instanceof UnexpectedResponseException) { @@ -1074,7 +1082,7 @@ private void processUnprepared(@NonNull Unprepared errorMessage) { LOG.trace("[{}] Re-prepare failed, trying next node", logPrefix); recordError(node, exception); trackNodeError(node, exception); - sendRequest(null, executionIndex, retryCount, false); + sendRequest(statement, null, executionIndex, retryCount, false); } } if (fatalError != null) { @@ -1092,22 +1100,24 @@ private void processUnprepared(@NonNull Unprepared errorMessage) { * Processes the retry decision by triggering a retry, aborting or ignoring; also records the * failures for further access. * - * @param decision the decision to process. + * @param verdict the verdict to process. * @param error the original error. */ - private void processRetryDecision(@NonNull RetryDecision decision, @NonNull Throwable error) { + private void processRetryVerdict(@NonNull RetryVerdict verdict, @NonNull Throwable error) { assert lock.isHeldByCurrentThread(); - LOG.trace("[{}] Processing retry decision {}", logPrefix, decision); - switch (decision) { + LOG.trace("[{}] Processing retry decision {}", logPrefix, verdict); + switch (verdict.getRetryDecision()) { case RETRY_SAME: recordError(node, error); trackNodeError(node, error); - sendRequest(node, executionIndex, retryCount + 1, false); + sendRequest( + verdict.getRetryRequest(statement), node, executionIndex, retryCount + 1, false); break; case RETRY_NEXT: recordError(node, error); trackNodeError(node, error); - sendRequest(null, executionIndex, retryCount + 1, false); + sendRequest( + verdict.getRetryRequest(statement), null, executionIndex, retryCount + 1, false); break; case RETHROW: trackNodeError(node, error); @@ -1148,8 +1158,8 @@ private void enqueueOrCompletePending(@NonNull Object pageOrError) { return; } - queue = new ArrayDeque<>(getMaxEnqueuedPages()); - numPagesRequested = protocolBackpressureAvailable ? getMaxEnqueuedPages() : 0; + queue = new ArrayDeque<>(getMaxEnqueuedPages(statement)); + numPagesRequested = protocolBackpressureAvailable ? getMaxEnqueuedPages(statement) : 0; cancelScheduledTasks(this); } @@ -1174,7 +1184,9 @@ private void enqueueOrCompletePending(@NonNull Object pageOrError) { // Backpressure without protocol support: if the queue grows too large, // disable auto-read so that the channel eventually becomes // non-writable on the server side (causing it to back off for a while) - if (!protocolBackpressureAvailable && queue.size() == getMaxEnqueuedPages() && state > 0) { + if (!protocolBackpressureAvailable + && queue.size() == getMaxEnqueuedPages(statement) + && state > 0) { LOG.trace( "[{}] Exceeded {} queued response pages, disabling auto-read", logPrefix, @@ -1207,7 +1219,7 @@ public CompletableFuture dequeueOrCreatePending() { head = queue.poll(); if (!protocolBackpressureAvailable && head != null - && queue.size() == getMaxEnqueuedPages() - 1) { + && queue.size() == getMaxEnqueuedPages(statement) - 1) { LOG.trace( "[{}] Back to {} queued response pages, re-enabling auto-read", logPrefix, @@ -1270,17 +1282,19 @@ private void maybeRequestMore() { return; } // if we have already requested more than the client needs, then no need to request some more - if (getMaxPages() > 0 && numPagesRequested >= getMaxPages()) { + int maxPages = getMaxPages(statement); + if (maxPages > 0 && numPagesRequested >= maxPages) { return; } // the pages received so far, which is the state minus one int received = state - 1; int requested = numPagesRequested; // the pages that fit in the queue, which is the queue free space minus the requests in flight - int freeSpace = getMaxEnqueuedPages() - queue.size(); + int freeSpace = getMaxEnqueuedPages(statement) - queue.size(); int inFlight = requested - received; int numPagesFittingInQueue = freeSpace - inFlight; - if (numPagesFittingInQueue > 0 && numPagesFittingInQueue >= getMaxEnqueuedPages() / 2) { + if (numPagesFittingInQueue > 0 + && numPagesFittingInQueue >= getMaxEnqueuedPages(statement) / 2) { LOG.trace("[{}] Requesting more {} pages", logPrefix, numPagesFittingInQueue); numPagesRequested = requested + numPagesFittingInQueue; sendMorePagesRequest(numPagesFittingInQueue); @@ -1305,7 +1319,7 @@ private void sendMorePagesRequest(int nextPages) { true, Revise.requestMoreContinuousPages(streamId, nextPages), statement.getCustomPayload(), - getReviseRequestTimeout(), + getReviseRequestTimeout(statement), throttler, session.getMetricUpdater(), logPrefix, @@ -1384,7 +1398,7 @@ private void sendCancelRequest() { true, Revise.cancelContinuousPaging(streamId), statement.getCustomPayload(), - getReviseRequestTimeout(), + getReviseRequestTimeout(statement), throttler, session.getMetricUpdater(), logPrefix, @@ -1428,7 +1442,7 @@ private void trackNodeError(@NonNull Node node, @NonNull Throwable error) { long latencyNanos = System.nanoTime() - this.messageStartTimeNanos; context .getRequestTracker() - .onNodeError(statement, error, latencyNanos, executionProfile, node, logPrefix); + .onNodeError(this.statement, error, latencyNanos, executionProfile, node, logPrefix); } } @@ -1490,19 +1504,19 @@ private void stopGlobalRequestTimer() { .getMetricUpdater() .updateTimer( continuousRequestsMetric, - executionProfile.getName(), + null, System.nanoTime() - startTimeNanos, TimeUnit.NANOSECONDS); } private void updateErrorMetrics( @NonNull NodeMetricUpdater metricUpdater, - @NonNull RetryDecision decision, + @NonNull RetryVerdict verdict, @NonNull DefaultNodeMetric error, @NonNull DefaultNodeMetric retriesOnError, @NonNull DefaultNodeMetric ignoresOnError) { metricUpdater.incrementCounter(error, executionProfile.getName()); - switch (decision) { + switch (verdict.getRetryDecision()) { case RETRY_SAME: case RETRY_NEXT: metricUpdater.incrementCounter(DefaultNodeMetric.RETRIES, executionProfile.getName()); @@ -1570,18 +1584,14 @@ private void completeResultSetFuture( } @NonNull - private ExecutionInfo createExecutionInfo( - @NonNull Node node, - @NonNull Result result, - @Nullable Frame response, - int successfulExecutionIndex) { + private ExecutionInfo createExecutionInfo(@NonNull Result result, @Nullable Frame response) { ByteBuffer pagingState = result instanceof Rows ? ((Rows) result).getMetadata().pagingState : null; return new DefaultExecutionInfo( statement, node, startedSpeculativeExecutionsCount.get(), - successfulExecutionIndex, + executionIndex, errors, pagingState, response, diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandler.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandler.java index 0e4d79f90d3..d792ed873c6 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandler.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandler.java @@ -24,9 +24,11 @@ import com.datastax.dse.driver.internal.core.cql.continuous.ContinuousRequestHandlerBase; import com.datastax.dse.driver.internal.core.graph.binary.GraphBinaryModule; import com.datastax.dse.protocol.internal.response.result.DseRowsMetadata; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.cql.ColumnDefinitions; import com.datastax.oss.driver.api.core.cql.ExecutionInfo; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.cql.Conversions; import com.datastax.oss.driver.internal.core.session.DefaultSession; import com.datastax.oss.driver.shaded.guava.common.base.MoreObjects; import com.datastax.oss.protocol.internal.Message; @@ -49,12 +51,9 @@ public class ContinuousGraphRequestHandler extends ContinuousRequestHandlerBase, AsyncGraphResultSet> { - private final Message message; - private final GraphProtocol subProtocol; private final GraphBinaryModule graphBinaryModule; + private final GraphSupportChecker graphSupportChecker; private final Duration globalTimeout; - private final int maxEnqueuedPages; - private final int maxPages; ContinuousGraphRequestHandler( @NonNull GraphStatement statement, @@ -74,17 +73,13 @@ public class ContinuousGraphRequestHandler DseSessionMetric.GRAPH_REQUESTS, DseNodeMetric.GRAPH_MESSAGES); this.graphBinaryModule = graphBinaryModule; - subProtocol = graphSupportChecker.inferGraphProtocol(statement, executionProfile, context); - message = - GraphConversions.createContinuousMessageFromGraphStatement( - statement, subProtocol, executionProfile, context, graphBinaryModule); + this.graphSupportChecker = graphSupportChecker; + DriverExecutionProfile executionProfile = + Conversions.resolveExecutionProfile(statement, context); globalTimeout = MoreObjects.firstNonNull( statement.getTimeout(), executionProfile.getDuration(DseDriverOption.GRAPH_TIMEOUT, Duration.ZERO)); - maxEnqueuedPages = - executionProfile.getInt(DseDriverOption.GRAPH_CONTINUOUS_PAGING_MAX_ENQUEUED_PAGES); - maxPages = executionProfile.getInt(DseDriverOption.GRAPH_CONTINUOUS_PAGING_MAX_PAGES); // NOTE that ordering of the following statement matters. // We should register this request after all fields have been initialized. throttler.register(this); @@ -98,40 +93,53 @@ protected Duration getGlobalTimeout() { @NonNull @Override - protected Duration getPageTimeout(int pageNumber) { + protected Duration getPageTimeout(@NonNull GraphStatement statement, int pageNumber) { return Duration.ZERO; } @NonNull @Override - protected Duration getReviseRequestTimeout() { + protected Duration getReviseRequestTimeout(@NonNull GraphStatement statement) { return Duration.ZERO; } @Override - protected int getMaxEnqueuedPages() { - return maxEnqueuedPages; + protected int getMaxEnqueuedPages(@NonNull GraphStatement statement) { + DriverExecutionProfile executionProfile = + Conversions.resolveExecutionProfile(statement, context); + return executionProfile.getInt(DseDriverOption.GRAPH_CONTINUOUS_PAGING_MAX_ENQUEUED_PAGES); } @Override - protected int getMaxPages() { - return maxPages; + protected int getMaxPages(@NonNull GraphStatement statement) { + DriverExecutionProfile executionProfile = + Conversions.resolveExecutionProfile(statement, context); + return executionProfile.getInt(DseDriverOption.GRAPH_CONTINUOUS_PAGING_MAX_PAGES); } @NonNull @Override - protected Message getMessage() { - return message; + protected Message getMessage(@NonNull GraphStatement statement) { + DriverExecutionProfile executionProfile = + Conversions.resolveExecutionProfile(statement, context); + GraphProtocol subProtocol = + graphSupportChecker.inferGraphProtocol(statement, executionProfile, context); + return GraphConversions.createContinuousMessageFromGraphStatement( + statement, subProtocol, executionProfile, context, graphBinaryModule); } @Override - protected boolean isTracingEnabled() { + protected boolean isTracingEnabled(@NonNull GraphStatement statement) { return statement.isTracing(); } @NonNull @Override - protected Map createPayload() { + protected Map createPayload(@NonNull GraphStatement statement) { + DriverExecutionProfile executionProfile = + Conversions.resolveExecutionProfile(statement, context); + GraphProtocol subProtocol = + graphSupportChecker.inferGraphProtocol(statement, executionProfile, context); return GraphConversions.createCustomPayload( statement, subProtocol, executionProfile, context, graphBinaryModule); } @@ -145,10 +153,15 @@ protected AsyncGraphResultSet createEmptyResultSet(@NonNull ExecutionInfo execut @NonNull @Override protected ContinuousAsyncGraphResultSet createResultSet( + @NonNull GraphStatement statement, @NonNull Rows rows, @NonNull ExecutionInfo executionInfo, @NonNull ColumnDefinitions columnDefinitions) throws IOException { + DriverExecutionProfile executionProfile = + Conversions.resolveExecutionProfile(statement, context); + GraphProtocol subProtocol = + graphSupportChecker.inferGraphProtocol(statement, executionProfile, context); Queue graphNodes = new ArrayDeque<>(); for (List row : rows.getData()) { diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphConversions.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphConversions.java index 7af7160baa1..53f8e98b0ee 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphConversions.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphConversions.java @@ -384,4 +384,21 @@ public static GraphNode createGraphBinaryGraphNode( : "Graph protocol error. Received object should be a Traverser but it is not."; return new ObjectGraphNode(deserializedObject); } + + public static Duration resolveGraphRequestTimeout( + GraphStatement statement, InternalDriverContext context) { + DriverExecutionProfile executionProfile = resolveExecutionProfile(statement, context); + return statement.getTimeout() != null + ? statement.getTimeout() + : executionProfile.getDuration(DseDriverOption.GRAPH_TIMEOUT); + } + + public static GraphProtocol resolveGraphSubProtocol( + GraphStatement statement, + GraphSupportChecker graphSupportChecker, + InternalDriverContext context) { + DriverExecutionProfile executionProfile = + Conversions.resolveExecutionProfile(statement, context); + return graphSupportChecker.inferGraphProtocol(statement, executionProfile, context); + } } diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java index 46dbaa88768..b193805a2c6 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java @@ -15,7 +15,6 @@ */ package com.datastax.dse.driver.internal.core.graph; -import com.datastax.dse.driver.api.core.config.DseDriverOption; import com.datastax.dse.driver.api.core.graph.AsyncGraphResultSet; import com.datastax.dse.driver.api.core.graph.GraphNode; import com.datastax.dse.driver.api.core.graph.GraphStatement; @@ -33,8 +32,8 @@ import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.metrics.DefaultNodeMetric; import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; -import com.datastax.oss.driver.api.core.retry.RetryDecision; import com.datastax.oss.driver.api.core.retry.RetryPolicy; +import com.datastax.oss.driver.api.core.retry.RetryVerdict; import com.datastax.oss.driver.api.core.servererrors.BootstrappingException; import com.datastax.oss.driver.api.core.servererrors.CoordinatorException; import com.datastax.oss.driver.api.core.servererrors.FunctionFailureException; @@ -45,7 +44,6 @@ import com.datastax.oss.driver.api.core.servererrors.WriteTimeoutException; import com.datastax.oss.driver.api.core.session.throttling.RequestThrottler; import com.datastax.oss.driver.api.core.session.throttling.Throttled; -import com.datastax.oss.driver.api.core.specex.SpeculativeExecutionPolicy; import com.datastax.oss.driver.api.core.tracker.RequestTracker; import com.datastax.oss.driver.internal.core.channel.DriverChannel; import com.datastax.oss.driver.internal.core.channel.ResponseCallback; @@ -99,15 +97,11 @@ public class GraphRequestHandler implements Throttled { private final long startTimeNanos; private final String logPrefix; - private final GraphStatement statement; + private final GraphStatement initialStatement; private final DefaultSession session; private final InternalDriverContext context; - private final DriverExecutionProfile executionProfile; - private final boolean isIdempotent; protected final CompletableFuture result; - private final Message message; private final Timer timer; - private final GraphProtocol subProtocol; /** * How many speculative executions are currently running (including the initial execution). We @@ -126,13 +120,11 @@ public class GraphRequestHandler implements Throttled { private final Timeout scheduledTimeout; private final List scheduledExecutions; private final List inFlightCallbacks; - private final RetryPolicy retryPolicy; - private final SpeculativeExecutionPolicy speculativeExecutionPolicy; private final RequestThrottler throttler; private final RequestTracker requestTracker; private final SessionMetricUpdater sessionMetricUpdater; - private final Map queryCustomPayload; private final GraphBinaryModule graphBinaryModule; + private final GraphSupportChecker graphSupportChecker; // The errors on the nodes that were already tried (lazily initialized on the first error). // We don't use a map because nodes can appear multiple times. @@ -148,18 +140,10 @@ public class GraphRequestHandler implements Throttled { this.startTimeNanos = System.nanoTime(); this.logPrefix = sessionLogPrefix + "|" + this.hashCode(); LOG.trace("[{}] Creating new Graph request handler for request {}", logPrefix, statement); - this.statement = statement; + this.initialStatement = statement; this.session = dseSession; this.context = context; - this.executionProfile = GraphConversions.resolveExecutionProfile(this.statement, this.context); - this.retryPolicy = context.getRetryPolicy(executionProfile.getName()); - this.speculativeExecutionPolicy = - context.getSpeculativeExecutionPolicy(executionProfile.getName()); - Boolean statementIsIdempotent = statement.isIdempotent(); - this.isIdempotent = - (statementIsIdempotent == null) - ? executionProfile.getBoolean(DefaultDriverOption.REQUEST_DEFAULT_IDEMPOTENCE) - : statementIsIdempotent; + this.graphSupportChecker = graphSupportChecker; this.result = new CompletableFuture<>(); this.result.exceptionally( t -> { @@ -173,37 +157,27 @@ public class GraphRequestHandler implements Throttled { return null; }); this.graphBinaryModule = graphBinaryModule; - this.subProtocol = - graphSupportChecker.inferGraphProtocol(this.statement, executionProfile, this.context); - LOG.debug("[{}], Graph protocol used for query: {}", logPrefix, subProtocol); - this.message = - GraphConversions.createMessageFromGraphStatement( - this.statement, subProtocol, executionProfile, this.context, this.graphBinaryModule); this.timer = context.getNettyOptions().getTimer(); - Duration timeout = - statement.getTimeout() != null - ? statement.getTimeout() - : executionProfile.getDuration(DseDriverOption.GRAPH_TIMEOUT, null); - this.scheduledTimeout = scheduleTimeout(timeout); this.activeExecutionsCount = new AtomicInteger(1); this.startedSpeculativeExecutionsCount = new AtomicInteger(0); - this.scheduledExecutions = isIdempotent ? new CopyOnWriteArrayList<>() : null; + this.scheduledExecutions = new CopyOnWriteArrayList<>(); this.inFlightCallbacks = new CopyOnWriteArrayList<>(); - this.queryCustomPayload = - GraphConversions.createCustomPayload( - this.statement, subProtocol, executionProfile, this.context, this.graphBinaryModule); - this.requestTracker = context.getRequestTracker(); this.sessionMetricUpdater = session.getMetricUpdater(); + Duration timeout = GraphConversions.resolveGraphRequestTimeout(statement, context); + this.scheduledTimeout = scheduleTimeout(timeout); + this.throttler = context.getRequestThrottler(); this.throttler.register(this); } @Override public void onThrottleReady(boolean wasDelayed) { + DriverExecutionProfile executionProfile = + Conversions.resolveExecutionProfile(initialStatement, context); if (wasDelayed // avoid call to nanoTime() if metric is disabled: && sessionMetricUpdater.isEnabled( @@ -215,12 +189,12 @@ public void onThrottleReady(boolean wasDelayed) { TimeUnit.NANOSECONDS); } Queue queryPlan = - statement.getNode() != null - ? new QueryPlan(statement.getNode()) + initialStatement.getNode() != null + ? new QueryPlan(initialStatement.getNode()) : context .getLoadBalancingPolicyWrapper() - .newQueryPlan(statement, executionProfile.getName(), session); - sendRequest(null, queryPlan, 0, 0, true); + .newQueryPlan(initialStatement, executionProfile.getName(), session); + sendRequest(initialStatement, null, queryPlan, 0, 0, true); } public CompletionStage handle() { @@ -233,6 +207,7 @@ private Timeout scheduleTimeout(Duration timeoutDuration) { return this.timer.newTimeout( (Timeout timeout1) -> setFinalError( + initialStatement, new DriverTimeoutException("Query timed out after " + timeoutDuration), null, NO_SUCCESSFUL_EXECUTION), @@ -262,6 +237,7 @@ private Timeout scheduleTimeout(Duration timeoutDuration) { * @param scheduleNextExecution whether to schedule the next speculative execution */ private void sendRequest( + GraphStatement statement, Node retriedNode, Queue queryPlan, int currentExecutionIndex, @@ -285,11 +261,15 @@ private void sendRequest( if (!result.isDone() && activeExecutionsCount.decrementAndGet() == 0) { // We're the last execution so fail the result setFinalError( - AllNodesFailedException.fromErrors(this.errors), null, NO_SUCCESSFUL_EXECUTION); + statement, + AllNodesFailedException.fromErrors(this.errors), + null, + NO_SUCCESSFUL_EXECUTION); } } else { NodeResponseCallback nodeResponseCallback = new NodeResponseCallback( + statement, node, queryPlan, channel, @@ -297,8 +277,18 @@ private void sendRequest( retryCount, scheduleNextExecution, logPrefix); + DriverExecutionProfile executionProfile = + Conversions.resolveExecutionProfile(statement, context); + GraphProtocol graphSubProtocol = + GraphConversions.resolveGraphSubProtocol(statement, graphSupportChecker, context); + Message message = + GraphConversions.createMessageFromGraphStatement( + statement, graphSubProtocol, executionProfile, context, graphBinaryModule); + Map customPayload = + GraphConversions.createCustomPayload( + statement, graphSubProtocol, executionProfile, context, graphBinaryModule); channel - .write(message, statement.isTracing(), queryCustomPayload, nodeResponseCallback) + .write(message, statement.isTracing(), customPayload, nodeResponseCallback) .addListener(nodeResponseCallback); } } @@ -335,7 +325,11 @@ private void setFinalResult( Result resultMessage, Frame responseFrame, NodeResponseCallback callback) { try { ExecutionInfo executionInfo = buildExecutionInfo(callback, responseFrame); - + DriverExecutionProfile executionProfile = + Conversions.resolveExecutionProfile(callback.statement, context); + GraphProtocol subProtocol = + GraphConversions.resolveGraphSubProtocol( + callback.statement, graphSupportChecker, context); Queue graphNodes = new ArrayDeque<>(); for (List row : ((Rows) resultMessage).getData()) { if (subProtocol.isGraphBinary()) { @@ -361,9 +355,9 @@ private void setFinalResult( totalLatencyNanos = completionTimeNanos - startTimeNanos; long nodeLatencyNanos = completionTimeNanos - callback.nodeStartTimeNanos; requestTracker.onNodeSuccess( - statement, nodeLatencyNanos, executionProfile, callback.node, logPrefix); + callback.statement, nodeLatencyNanos, executionProfile, callback.node, logPrefix); requestTracker.onSuccess( - statement, totalLatencyNanos, executionProfile, callback.node, logPrefix); + callback.statement, totalLatencyNanos, executionProfile, callback.node, logPrefix); } if (sessionMetricUpdater.isEnabled( DseSessionMetric.GRAPH_REQUESTS, executionProfile.getName())) { @@ -382,14 +376,16 @@ private void setFinalResult( if (!executionInfo.getWarnings().isEmpty() && executionProfile.getBoolean(DefaultDriverOption.REQUEST_LOG_WARNINGS) && LOG.isWarnEnabled()) { - logServerWarnings(executionInfo.getWarnings()); + logServerWarnings(callback.statement, executionInfo.getWarnings()); } } catch (Throwable error) { - setFinalError(error, callback.node, NO_SUCCESSFUL_EXECUTION); + setFinalError(callback.statement, error, callback.node, NO_SUCCESSFUL_EXECUTION); } } - private void logServerWarnings(List warnings) { + private void logServerWarnings(GraphStatement statement, List warnings) { + DriverExecutionProfile executionProfile = + Conversions.resolveExecutionProfile(statement, context); // use the RequestLogFormatter to format the query StringBuilder statementString = new StringBuilder(); context @@ -416,8 +412,10 @@ private void logServerWarnings(List warnings) { } private ExecutionInfo buildExecutionInfo(NodeResponseCallback callback, Frame responseFrame) { + DriverExecutionProfile executionProfile = + Conversions.resolveExecutionProfile(callback.statement, context); return new DefaultExecutionInfo( - statement, + callback.statement, callback.node, startedSpeculativeExecutionsCount.get(), callback.execution, @@ -432,12 +430,17 @@ private ExecutionInfo buildExecutionInfo(NodeResponseCallback callback, Frame re @Override public void onThrottleFailure(@NonNull RequestThrottlingException error) { + DriverExecutionProfile executionProfile = + Conversions.resolveExecutionProfile(initialStatement, context); sessionMetricUpdater.incrementCounter( DefaultSessionMetric.THROTTLING_ERRORS, executionProfile.getName()); - setFinalError(error, null, NO_SUCCESSFUL_EXECUTION); + setFinalError(initialStatement, error, null, NO_SUCCESSFUL_EXECUTION); } - private void setFinalError(Throwable error, Node node, int execution) { + private void setFinalError( + GraphStatement statement, Throwable error, Node node, int execution) { + DriverExecutionProfile executionProfile = + Conversions.resolveExecutionProfile(statement, context); if (error instanceof DriverException) { ((DriverException) error) .setExecutionInfo( @@ -479,6 +482,7 @@ private class NodeResponseCallback implements ResponseCallback, GenericFutureListener> { private final long nodeStartTimeNanos = System.nanoTime(); + private final GraphStatement statement; private final Node node; private final Queue queryPlan; private final DriverChannel channel; @@ -490,8 +494,10 @@ private class NodeResponseCallback private final int retryCount; private final boolean scheduleNextExecution; private final String logPrefix; + private final DriverExecutionProfile executionProfile; private NodeResponseCallback( + GraphStatement statement, Node node, Queue queryPlan, DriverChannel channel, @@ -499,6 +505,7 @@ private NodeResponseCallback( int retryCount, boolean scheduleNextExecution, String logPrefix) { + this.statement = statement; this.node = node; this.queryPlan = queryPlan; this.channel = channel; @@ -506,6 +513,7 @@ private NodeResponseCallback( this.retryCount = retryCount; this.scheduleNextExecution = scheduleNextExecution; this.logPrefix = logPrefix + "|" + execution; + this.executionProfile = Conversions.resolveExecutionProfile(statement, context); } // this gets invoked once the write completes. @@ -516,7 +524,7 @@ public void operationComplete(Future future) { if (error instanceof EncoderException && error.getCause() instanceof FrameTooLongException) { trackNodeError(node, error.getCause(), NANOTIME_NOT_MEASURED_YET); - setFinalError(error.getCause(), node, execution); + setFinalError(statement, error.getCause(), node, execution); } else { LOG.trace( "[{}] Failed to send request on {}, trying next node (cause: {})", @@ -529,7 +537,12 @@ public void operationComplete(Future future) { .getMetricUpdater() .incrementCounter(DefaultNodeMetric.UNSENT_REQUESTS, executionProfile.getName()); sendRequest( - null, queryPlan, execution, retryCount, scheduleNextExecution); // try next node + statement, + null, + queryPlan, + execution, + retryCount, + scheduleNextExecution); // try next node } } else { LOG.trace("[{}] Request sent on {}", logPrefix, channel); @@ -539,12 +552,13 @@ public void operationComplete(Future future) { cancel(); } else { inFlightCallbacks.add(this); - if (scheduleNextExecution && isIdempotent) { + if (scheduleNextExecution && Conversions.resolveIdempotence(statement, context)) { int nextExecution = execution + 1; long nextDelay; try { nextDelay = - speculativeExecutionPolicy.nextExecution(node, null, statement, nextExecution); + Conversions.resolveSpeculativeExecutionPolicy(statement, context) + .nextExecution(node, null, statement, nextExecution); } catch (Throwable cause) { // This is a bug in the policy, but not fatal since we have at least one other // execution already running. Don't fail the whole request. @@ -586,7 +600,7 @@ private void scheduleSpeculativeExecution(int index, long delay) { .getMetricUpdater() .incrementCounter( DefaultNodeMetric.SPECULATIVE_EXECUTIONS, executionProfile.getName()); - sendRequest(null, queryPlan, index, 0, true); + sendRequest(statement, null, queryPlan, index, 0, true); } }, delay, @@ -632,11 +646,14 @@ public void onResponse(Frame responseFrame) { new IllegalStateException("Unexpected response " + responseMessage), nodeResponseTimeNanos); setFinalError( - new IllegalStateException("Unexpected response " + responseMessage), node, execution); + statement, + new IllegalStateException("Unexpected response " + responseMessage), + node, + execution); } } catch (Throwable t) { trackNodeError(node, t, nodeResponseTimeNanos); - setFinalError(t, node, execution); + setFinalError(statement, t, node, execution); } } @@ -647,20 +664,21 @@ private void processErrorResponse(Error errorMessage) { LOG.trace("[{}] {} is bootstrapping, trying next node", logPrefix, node); recordError(node, error); trackNodeError(node, error, NANOTIME_NOT_MEASURED_YET); - sendRequest(null, queryPlan, execution, retryCount, false); + sendRequest(statement, null, queryPlan, execution, retryCount, false); } else if (error instanceof QueryValidationException || error instanceof FunctionFailureException || error instanceof ProtocolError) { LOG.trace("[{}] Unrecoverable error, rethrowing", logPrefix); metricUpdater.incrementCounter(DefaultNodeMetric.OTHER_ERRORS, executionProfile.getName()); trackNodeError(node, error, NANOTIME_NOT_MEASURED_YET); - setFinalError(error, node, execution); + setFinalError(statement, error, node, execution); } else { - RetryDecision decision; + RetryPolicy retryPolicy = Conversions.resolveRetryPolicy(statement, context); + RetryVerdict verdict; if (error instanceof ReadTimeoutException) { ReadTimeoutException readTimeout = (ReadTimeoutException) error; - decision = - retryPolicy.onReadTimeout( + verdict = + retryPolicy.onReadTimeoutVerdict( statement, readTimeout.getConsistencyLevel(), readTimeout.getBlockFor(), @@ -669,32 +687,32 @@ private void processErrorResponse(Error errorMessage) { retryCount); updateErrorMetrics( metricUpdater, - decision, + verdict, DefaultNodeMetric.READ_TIMEOUTS, DefaultNodeMetric.RETRIES_ON_READ_TIMEOUT, DefaultNodeMetric.IGNORES_ON_READ_TIMEOUT); } else if (error instanceof WriteTimeoutException) { WriteTimeoutException writeTimeout = (WriteTimeoutException) error; - decision = - isIdempotent - ? retryPolicy.onWriteTimeout( + verdict = + Conversions.resolveIdempotence(statement, context) + ? retryPolicy.onWriteTimeoutVerdict( statement, writeTimeout.getConsistencyLevel(), writeTimeout.getWriteType(), writeTimeout.getBlockFor(), writeTimeout.getReceived(), retryCount) - : RetryDecision.RETHROW; + : RetryVerdict.RETHROW; updateErrorMetrics( metricUpdater, - decision, + verdict, DefaultNodeMetric.WRITE_TIMEOUTS, DefaultNodeMetric.RETRIES_ON_WRITE_TIMEOUT, DefaultNodeMetric.IGNORES_ON_WRITE_TIMEOUT); } else if (error instanceof UnavailableException) { UnavailableException unavailable = (UnavailableException) error; - decision = - retryPolicy.onUnavailable( + verdict = + retryPolicy.onUnavailableVerdict( statement, unavailable.getConsistencyLevel(), unavailable.getRequired(), @@ -702,42 +720,54 @@ private void processErrorResponse(Error errorMessage) { retryCount); updateErrorMetrics( metricUpdater, - decision, + verdict, DefaultNodeMetric.UNAVAILABLES, DefaultNodeMetric.RETRIES_ON_UNAVAILABLE, DefaultNodeMetric.IGNORES_ON_UNAVAILABLE); } else { - decision = - isIdempotent - ? retryPolicy.onErrorResponse(statement, error, retryCount) - : RetryDecision.RETHROW; + verdict = + Conversions.resolveIdempotence(statement, context) + ? retryPolicy.onErrorResponseVerdict(statement, error, retryCount) + : RetryVerdict.RETHROW; updateErrorMetrics( metricUpdater, - decision, + verdict, DefaultNodeMetric.OTHER_ERRORS, DefaultNodeMetric.RETRIES_ON_OTHER_ERROR, DefaultNodeMetric.IGNORES_ON_OTHER_ERROR); } - processRetryDecision(decision, error); + processRetryVerdict(verdict, error); } } - private void processRetryDecision(RetryDecision decision, Throwable error) { - LOG.trace("[{}] Processing retry decision {}", logPrefix, decision); - switch (decision) { + private void processRetryVerdict(RetryVerdict verdict, Throwable error) { + LOG.trace("[{}] Processing retry decision {}", logPrefix, verdict); + switch (verdict.getRetryDecision()) { case RETRY_SAME: recordError(node, error); trackNodeError(node, error, NANOTIME_NOT_MEASURED_YET); - sendRequest(node, queryPlan, execution, retryCount + 1, false); + sendRequest( + verdict.getRetryRequest(statement), + node, + queryPlan, + execution, + retryCount + 1, + false); break; case RETRY_NEXT: recordError(node, error); trackNodeError(node, error, NANOTIME_NOT_MEASURED_YET); - sendRequest(null, queryPlan, execution, retryCount + 1, false); + sendRequest( + verdict.getRetryRequest(statement), + null, + queryPlan, + execution, + retryCount + 1, + false); break; case RETHROW: trackNodeError(node, error, NANOTIME_NOT_MEASURED_YET); - setFinalError(error, node, execution); + setFinalError(statement, error, node, execution); break; case IGNORE: setFinalResult(Void.INSTANCE, null, this); @@ -747,12 +777,12 @@ private void processRetryDecision(RetryDecision decision, Throwable error) { private void updateErrorMetrics( NodeMetricUpdater metricUpdater, - RetryDecision decision, + RetryVerdict verdict, DefaultNodeMetric error, DefaultNodeMetric retriesOnError, DefaultNodeMetric ignoresOnError) { metricUpdater.incrementCounter(error, executionProfile.getName()); - switch (decision) { + switch (verdict.getRetryDecision()) { case RETRY_SAME: case RETRY_NEXT: metricUpdater.incrementCounter(DefaultNodeMetric.RETRIES, executionProfile.getName()); @@ -774,24 +804,27 @@ public void onFailure(Throwable error) { return; } LOG.trace("[{}] Request failure, processing: {}", logPrefix, error); - RetryDecision decision; - if (!isIdempotent || error instanceof FrameTooLongException) { - decision = RetryDecision.RETHROW; + RetryVerdict verdict; + if (!Conversions.resolveIdempotence(statement, context) + || error instanceof FrameTooLongException) { + verdict = RetryVerdict.RETHROW; } else { try { - decision = retryPolicy.onRequestAborted(statement, error, retryCount); + RetryPolicy retryPolicy = Conversions.resolveRetryPolicy(statement, context); + verdict = retryPolicy.onRequestAbortedVerdict(statement, error, retryCount); } catch (Throwable cause) { setFinalError( + statement, new IllegalStateException("Unexpected error while invoking the retry policy", cause), node, NO_SUCCESSFUL_EXECUTION); return; } } - processRetryDecision(decision, error); + processRetryVerdict(verdict, error); updateErrorMetrics( ((DefaultNode) node).getMetricUpdater(), - decision, + verdict, DefaultNodeMetric.ABORTED_REQUESTS, DefaultNodeMetric.RETRIES_ON_ABORTED, DefaultNodeMetric.IGNORES_ON_ABORTED); diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/retry/RetryDecision.java b/core/src/main/java/com/datastax/oss/driver/api/core/retry/RetryDecision.java index 8859cdd6e4f..77daed28d93 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/retry/RetryDecision.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/retry/RetryDecision.java @@ -15,7 +15,11 @@ */ package com.datastax.oss.driver.api.core.retry; -/** A decision from the {@link RetryPolicy} on how to handle a retry. */ +/** + * A decision from the {@link RetryPolicy} on how to handle a retry. + * + * @see RetryVerdict#getRetryDecision() + */ public enum RetryDecision { /** Retry the operation on the same node. */ RETRY_SAME, diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/retry/RetryPolicy.java b/core/src/main/java/com/datastax/oss/driver/api/core/retry/RetryPolicy.java index e36658c9d8e..99df4ca26f9 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/retry/RetryPolicy.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/retry/RetryPolicy.java @@ -64,7 +64,10 @@ public interface RetryPolicy extends AutoCloseable { * {@link ReadTimeoutException#wasDataPresent()}. * @param retryCount how many times the retry policy has been invoked already for this request * (not counting the current invocation). + * @deprecated As of version 4.10, use {@link #onReadTimeoutVerdict(Request, ConsistencyLevel, + * int, int, boolean, int)} instead. */ + @Deprecated RetryDecision onReadTimeout( @NonNull Request request, @NonNull ConsistencyLevel cl, @@ -73,6 +76,34 @@ RetryDecision onReadTimeout( boolean dataPresent, int retryCount); + /** + * Whether to retry when the server replied with a {@code READ_TIMEOUT} error; this indicates a + * server-side timeout during a read query, i.e. some replicas did not reply to the + * coordinator in time. + * + * @param request the request that timed out. + * @param cl the requested consistency level. + * @param blockFor the minimum number of replica acknowledgements/responses that were required to + * fulfill the operation. + * @param received the number of replica that had acknowledged/responded to the operation before + * it failed. + * @param dataPresent whether the actual data was amongst the received replica responses. See + * {@link ReadTimeoutException#wasDataPresent()}. + * @param retryCount how many times the retry policy has been invoked already for this request + * (not counting the current invocation). + */ + default RetryVerdict onReadTimeoutVerdict( + @NonNull Request request, + @NonNull ConsistencyLevel cl, + int blockFor, + int received, + boolean dataPresent, + int retryCount) { + RetryDecision decision = + onReadTimeout(request, cl, blockFor, received, dataPresent, retryCount); + return () -> decision; + } + /** * Whether to retry when the server replied with a {@code WRITE_TIMEOUT} error; this indicates a * server-side timeout during a write query, i.e. some replicas did not reply to the @@ -92,7 +123,10 @@ RetryDecision onReadTimeout( * it failed. * @param retryCount how many times the retry policy has been invoked already for this request * (not counting the current invocation). + * @deprecated As of version 4.10, use {@link #onWriteTimeoutVerdict(Request, ConsistencyLevel, + * WriteType, int, int, int)} instead. */ + @Deprecated RetryDecision onWriteTimeout( @NonNull Request request, @NonNull ConsistencyLevel cl, @@ -101,6 +135,37 @@ RetryDecision onWriteTimeout( int received, int retryCount); + /** + * Whether to retry when the server replied with a {@code WRITE_TIMEOUT} error; this indicates a + * server-side timeout during a write query, i.e. some replicas did not reply to the + * coordinator in time. + * + *

          Note that this method will only be invoked for {@link Request#isIdempotent()} idempotent} + * requests: when a write times out, it is impossible to determine with 100% certainty whether the + * mutation was applied or not, so the write is never safe to retry; the driver will rethrow the + * error directly, without invoking the retry policy. + * + * @param request the request that timed out. + * @param cl the requested consistency level. + * @param writeType the type of the write for which the timeout was raised. + * @param blockFor the minimum number of replica acknowledgements/responses that were required to + * fulfill the operation. + * @param received the number of replica that had acknowledged/responded to the operation before + * it failed. + * @param retryCount how many times the retry policy has been invoked already for this request + * (not counting the current invocation). + */ + default RetryVerdict onWriteTimeoutVerdict( + @NonNull Request request, + @NonNull ConsistencyLevel cl, + @NonNull WriteType writeType, + int blockFor, + int received, + int retryCount) { + RetryDecision decision = onWriteTimeout(request, cl, writeType, blockFor, received, retryCount); + return () -> decision; + } + /** * Whether to retry when the server replied with an {@code UNAVAILABLE} error; this indicates that * the coordinator determined that there were not enough replicas alive to perform a query with @@ -114,7 +179,10 @@ RetryDecision onWriteTimeout( * tried to execute the operation. * @param retryCount how many times the retry policy has been invoked already for this request * (not counting the current invocation). + * @deprecated As of version 4.10, use {@link #onUnavailableVerdict(Request, ConsistencyLevel, + * int, int, int)} instead. */ + @Deprecated RetryDecision onUnavailable( @NonNull Request request, @NonNull ConsistencyLevel cl, @@ -122,6 +190,30 @@ RetryDecision onUnavailable( int alive, int retryCount); + /** + * Whether to retry when the server replied with an {@code UNAVAILABLE} error; this indicates that + * the coordinator determined that there were not enough replicas alive to perform a query with + * the requested consistency level. + * + * @param request the request that timed out. + * @param cl the requested consistency level. + * @param required the number of replica acknowledgements/responses required to perform the + * operation (with its required consistency level). + * @param alive the number of replicas that were known to be alive by the coordinator node when it + * tried to execute the operation. + * @param retryCount how many times the retry policy has been invoked already for this request + * (not counting the current invocation). + */ + default RetryVerdict onUnavailableVerdict( + @NonNull Request request, + @NonNull ConsistencyLevel cl, + int required, + int alive, + int retryCount) { + RetryDecision decision = onUnavailable(request, cl, required, alive, retryCount); + return () -> decision; + } + /** * Whether to retry when a request was aborted before we could get a response from the server. * @@ -139,10 +231,37 @@ RetryDecision onUnavailable( * @param error the error. * @param retryCount how many times the retry policy has been invoked already for this request * (not counting the current invocation). + * @deprecated As of version 4.10, use {@link #onRequestAbortedVerdict(Request, Throwable, int)} + * instead. */ + @Deprecated RetryDecision onRequestAborted( @NonNull Request request, @NonNull Throwable error, int retryCount); + /** + * Whether to retry when a request was aborted before we could get a response from the server. + * + *

          This can happen in two cases: if the connection was closed due to an external event (this + * will manifest as a {@link ClosedConnectionException}, or {@link HeartbeatException} for a + * heartbeat failure); or if there was an unexpected error while decoding the response (this can + * only be a driver bug). + * + *

          Note that this method will only be invoked for {@linkplain Request#isIdempotent() + * idempotent} requests: when execution was aborted before getting a response, it is impossible to + * determine with 100% certainty whether a mutation was applied or not, so a write is never safe + * to retry; the driver will rethrow the error directly, without invoking the retry policy. + * + * @param request the request that was aborted. + * @param error the error. + * @param retryCount how many times the retry policy has been invoked already for this request + * (not counting the current invocation). + */ + default RetryVerdict onRequestAbortedVerdict( + @NonNull Request request, @NonNull Throwable error, int retryCount) { + RetryDecision decision = onRequestAborted(request, error, retryCount); + return () -> decision; + } + /** * Whether to retry when the server replied with a recoverable error (other than {@code * READ_TIMEOUT}, {@code WRITE_TIMEOUT} or {@code UNAVAILABLE}). @@ -168,10 +287,45 @@ RetryDecision onRequestAborted( * @param error the error. * @param retryCount how many times the retry policy has been invoked already for this request * (not counting the current invocation). + * @deprecated As of version 4.10, use {@link #onErrorResponseVerdict(Request, + * CoordinatorException, int)} instead. */ + @Deprecated RetryDecision onErrorResponse( @NonNull Request request, @NonNull CoordinatorException error, int retryCount); + /** + * Whether to retry when the server replied with a recoverable error (other than {@code + * READ_TIMEOUT}, {@code WRITE_TIMEOUT} or {@code UNAVAILABLE}). + * + *

          This can happen for the following errors: {@link OverloadedException}, {@link ServerError}, + * {@link TruncateException}, {@link ReadFailureException}, {@link WriteFailureException}. + * + *

          The following errors are handled internally by the driver, and therefore will never + * be encountered in this method: + * + *

            + *
          • {@link BootstrappingException}: always retried on the next node; + *
          • {@link QueryValidationException} (and its subclasses), {@link FunctionFailureException} + * and {@link ProtocolError}: always rethrown. + *
          + * + *

          Note that this method will only be invoked for {@link Request#isIdempotent()} idempotent} + * requests: when execution was aborted before getting a response, it is impossible to determine + * with 100% certainty whether a mutation was applied or not, so a write is never safe to retry; + * the driver will rethrow the error directly, without invoking the retry policy. + * + * @param request the request that failed. + * @param error the error. + * @param retryCount how many times the retry policy has been invoked already for this request + * (not counting the current invocation). + */ + default RetryVerdict onErrorResponseVerdict( + @NonNull Request request, @NonNull CoordinatorException error, int retryCount) { + RetryDecision decision = onErrorResponse(request, error, retryCount); + return () -> decision; + } + /** Called when the cluster that this policy is associated with closes. */ @Override void close(); diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/retry/RetryVerdict.java b/core/src/main/java/com/datastax/oss/driver/api/core/retry/RetryVerdict.java new file mode 100644 index 00000000000..735019aa80f --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/api/core/retry/RetryVerdict.java @@ -0,0 +1,60 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.core.retry; + +import com.datastax.oss.driver.api.core.session.Request; +import edu.umd.cs.findbugs.annotations.NonNull; + +/** + * The verdict returned by a {@link RetryPolicy} determining what to do when a request failed. A + * verdict contains a {@link RetryDecision} indicating if a retry should be attempted at all and + * where, and a method that allows the original request to be modified before the retry. + */ +@FunctionalInterface +public interface RetryVerdict { + + /** A retry verdict that retries the same request on the same node. */ + RetryVerdict RETRY_SAME = () -> RetryDecision.RETRY_SAME; + + /** A retry verdict that retries the same request on the next node in the query plan. */ + RetryVerdict RETRY_NEXT = () -> RetryDecision.RETRY_NEXT; + + /** A retry verdict that ignores the error, returning and empty result set to the caller. */ + RetryVerdict IGNORE = () -> RetryDecision.IGNORE; + + /** A retry verdict that rethrows the execution error to the calling code. */ + RetryVerdict RETHROW = () -> RetryDecision.RETHROW; + + /** @return The retry decision to apply. */ + @NonNull + RetryDecision getRetryDecision(); + + /** + * Returns the request to retry, based on the request that was just executed (and failed). + * + *

          The default retry policy always returns the request as is. Custom retry policies can use + * this method to customize the request to retry, for example, by changing its consistency level, + * query timestamp, custom payload, or even its execution profile. + * + * @param The actual type of the request. + * @param previous The request that was just executed (and failed). + * @return The request to retry. + */ + @NonNull + default RequestT getRetryRequest(@NonNull RequestT previous) { + return previous; + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/Conversions.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/Conversions.java index 2dc6e5ddc46..242bf673a7a 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/Conversions.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/Conversions.java @@ -37,6 +37,7 @@ import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.metadata.schema.ColumnMetadata; import com.datastax.oss.driver.api.core.metadata.schema.RelationMetadata; +import com.datastax.oss.driver.api.core.retry.RetryPolicy; import com.datastax.oss.driver.api.core.servererrors.AlreadyExistsException; import com.datastax.oss.driver.api.core.servererrors.BootstrappingException; import com.datastax.oss.driver.api.core.servererrors.CoordinatorException; @@ -55,6 +56,7 @@ import com.datastax.oss.driver.api.core.servererrors.WriteFailureException; import com.datastax.oss.driver.api.core.servererrors.WriteTimeoutException; import com.datastax.oss.driver.api.core.session.Request; +import com.datastax.oss.driver.api.core.specex.SpeculativeExecutionPolicy; import com.datastax.oss.driver.api.core.type.codec.registry.CodecRegistry; import com.datastax.oss.driver.internal.core.ConsistencyLevelRegistry; import com.datastax.oss.driver.internal.core.DefaultProtocolFeature; @@ -86,6 +88,7 @@ import com.datastax.oss.protocol.internal.util.collection.NullAllowingImmutableList; import com.datastax.oss.protocol.internal.util.collection.NullAllowingImmutableMap; import java.nio.ByteBuffer; +import java.time.Duration; import java.util.ArrayList; import java.util.Collections; import java.util.List; @@ -517,4 +520,30 @@ public static CoordinatorException toThrowable( return new ProtocolError(node, "Unknown error code: " + errorMessage.code); } } + + public static boolean resolveIdempotence(Request request, InternalDriverContext context) { + Boolean requestIsIdempotent = request.isIdempotent(); + DriverExecutionProfile executionProfile = resolveExecutionProfile(request, context); + return (requestIsIdempotent == null) + ? executionProfile.getBoolean(DefaultDriverOption.REQUEST_DEFAULT_IDEMPOTENCE) + : requestIsIdempotent; + } + + public static Duration resolveRequestTimeout(Request request, InternalDriverContext context) { + DriverExecutionProfile executionProfile = resolveExecutionProfile(request, context); + return request.getTimeout() != null + ? request.getTimeout() + : executionProfile.getDuration(DefaultDriverOption.REQUEST_TIMEOUT); + } + + public static RetryPolicy resolveRetryPolicy(Request request, InternalDriverContext context) { + DriverExecutionProfile executionProfile = resolveExecutionProfile(request, context); + return context.getRetryPolicy(executionProfile.getName()); + } + + public static SpeculativeExecutionPolicy resolveSpeculativeExecutionPolicy( + Request request, InternalDriverContext context) { + DriverExecutionProfile executionProfile = resolveExecutionProfile(request, context); + return context.getSpeculativeExecutionPolicy(executionProfile.getName()); + } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlPrepareHandler.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlPrepareHandler.java index 33ce8de23ee..d60a6c65260 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlPrepareHandler.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlPrepareHandler.java @@ -28,6 +28,7 @@ import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; import com.datastax.oss.driver.api.core.retry.RetryDecision; import com.datastax.oss.driver.api.core.retry.RetryPolicy; +import com.datastax.oss.driver.api.core.retry.RetryVerdict; import com.datastax.oss.driver.api.core.servererrors.BootstrappingException; import com.datastax.oss.driver.api.core.servererrors.CoordinatorException; import com.datastax.oss.driver.api.core.servererrors.FunctionFailureException; @@ -79,17 +80,13 @@ public class CqlPrepareHandler implements Throttled { private final long startTimeNanos; private final String logPrefix; - private final PrepareRequest request; + private final PrepareRequest initialRequest; private final DefaultSession session; private final InternalDriverContext context; - private final DriverExecutionProfile executionProfile; private final Queue queryPlan; protected final CompletableFuture result; - private final Message message; private final Timer timer; - private final Duration timeout; private final Timeout scheduledTimeout; - private final RetryPolicy retryPolicy; private final RequestThrottler throttler; private final Boolean prepareOnAllNodes; private volatile InitialPrepareCallback initialCallback; @@ -108,15 +105,14 @@ protected CqlPrepareHandler( this.logPrefix = sessionLogPrefix + "|" + this.hashCode(); LOG.trace("[{}] Creating new handler for prepare request {}", logPrefix, request); - this.request = request; + this.initialRequest = request; this.session = session; this.context = context; - this.executionProfile = Conversions.resolveExecutionProfile(request, context); + DriverExecutionProfile executionProfile = Conversions.resolveExecutionProfile(request, context); this.queryPlan = context .getLoadBalancingPolicyWrapper() .newQueryPlan(request, executionProfile.getName(), session); - this.retryPolicy = context.getRetryPolicy(executionProfile.getName()); this.result = new CompletableFuture<>(); this.result.exceptionally( @@ -130,22 +126,9 @@ protected CqlPrepareHandler( } return null; }); - ProtocolVersion protocolVersion = context.getProtocolVersion(); - ProtocolVersionRegistry registry = context.getProtocolVersionRegistry(); - CqlIdentifier keyspace = request.getKeyspace(); - if (keyspace != null - && !registry.supports(protocolVersion, DefaultProtocolFeature.PER_REQUEST_KEYSPACE)) { - throw new IllegalArgumentException( - "Can't use per-request keyspace with protocol " + protocolVersion); - } - this.message = - new Prepare(request.getQuery(), (keyspace == null) ? null : keyspace.asInternal()); this.timer = context.getNettyOptions().getTimer(); - this.timeout = - request.getTimeout() != null - ? request.getTimeout() - : executionProfile.getDuration(DefaultDriverOption.REQUEST_TIMEOUT); + Duration timeout = Conversions.resolveRequestTimeout(request, context); this.scheduledTimeout = scheduleTimeout(timeout); this.prepareOnAllNodes = executionProfile.getBoolean(DefaultDriverOption.PREPARE_ON_ALL_NODES); @@ -155,6 +138,8 @@ protected CqlPrepareHandler( @Override public void onThrottleReady(boolean wasDelayed) { + DriverExecutionProfile executionProfile = + Conversions.resolveExecutionProfile(initialRequest, context); if (wasDelayed) { session .getMetricUpdater() @@ -164,7 +149,7 @@ public void onThrottleReady(boolean wasDelayed) { System.nanoTime() - startTimeNanos, TimeUnit.NANOSECONDS); } - sendRequest(null, 0); + sendRequest(initialRequest, null, 0); } public CompletableFuture handle() { @@ -193,7 +178,7 @@ private void cancelTimeout() { } } - private void sendRequest(Node node, int retryCount) { + private void sendRequest(PrepareRequest request, Node node, int retryCount) { if (result.isDone()) { return; } @@ -210,13 +195,29 @@ private void sendRequest(Node node, int retryCount) { setFinalError(AllNodesFailedException.fromErrors(this.errors)); } else { InitialPrepareCallback initialPrepareCallback = - new InitialPrepareCallback(node, channel, retryCount); + new InitialPrepareCallback(request, node, channel, retryCount); + + Prepare message = toPrepareMessage(request); + channel .write(message, false, request.getCustomPayload(), initialPrepareCallback) .addListener(initialPrepareCallback); } } + @NonNull + private Prepare toPrepareMessage(PrepareRequest request) { + ProtocolVersion protocolVersion = context.getProtocolVersion(); + ProtocolVersionRegistry registry = context.getProtocolVersionRegistry(); + CqlIdentifier keyspace = request.getKeyspace(); + if (keyspace != null + && !registry.supports(protocolVersion, DefaultProtocolFeature.PER_REQUEST_KEYSPACE)) { + throw new IllegalArgumentException( + "Can't use per-request keyspace with protocol " + protocolVersion); + } + return new Prepare(request.getQuery(), (keyspace == null) ? null : keyspace.asInternal()); + } + private void recordError(Node node, Throwable error) { // Use a local variable to do only a single single volatile read in the nominal case List> errorsSnapshot = this.errors; @@ -231,19 +232,19 @@ private void recordError(Node node, Throwable error) { errorsSnapshot.add(new AbstractMap.SimpleEntry<>(node, error)); } - private void setFinalResult(Prepared prepared) { + private void setFinalResult(PrepareRequest request, Prepared response) { // Whatever happens below, we're done with this stream id throttler.signalSuccess(this); DefaultPreparedStatement preparedStatement = - Conversions.toPreparedStatement(prepared, request, context); + Conversions.toPreparedStatement(response, request, context); session .getRepreparePayloads() .put(preparedStatement.getId(), preparedStatement.getRepreparePayload()); if (prepareOnAllNodes) { - prepareOnOtherNodes() + prepareOnOtherNodes(request) .thenRun( () -> { LOG.trace( @@ -261,19 +262,19 @@ private void setFinalResult(Prepared prepared) { } } - private CompletionStage prepareOnOtherNodes() { + private CompletionStage prepareOnOtherNodes(PrepareRequest request) { List> otherNodesFutures = new ArrayList<>(); // Only process the rest of the query plan. Any node before that is either the coordinator, or // a node that failed (we assume that retrying right now has little chance of success). for (Node node : queryPlan) { - otherNodesFutures.add(prepareOnOtherNode(node)); + otherNodesFutures.add(prepareOnOtherNode(request, node)); } return CompletableFutures.allDone(otherNodesFutures); } // Try to reprepare on another node, after the initial query has succeeded. Errors are not // blocking, the preparation will be retried later on that node. Simply warn and move on. - private CompletionStage prepareOnOtherNode(Node node) { + private CompletionStage prepareOnOtherNode(PrepareRequest request, Node node) { LOG.trace("[{}] Repreparing on {}", logPrefix, node); DriverChannel channel = session.getChannel(node, logPrefix); if (channel == null) { @@ -284,9 +285,9 @@ private CompletionStage prepareOnOtherNode(Node node) { ThrottledAdminRequestHandler.prepare( channel, false, - message, + toPrepareMessage(request), request.getCustomPayload(), - timeout, + Conversions.resolveRequestTimeout(request, context), throttler, session.getMetricUpdater(), logPrefix); @@ -307,6 +308,8 @@ private CompletionStage prepareOnOtherNode(Node node) { @Override public void onThrottleFailure(@NonNull RequestThrottlingException error) { + DriverExecutionProfile executionProfile = + Conversions.resolveExecutionProfile(initialRequest, context); session .getMetricUpdater() .incrementCounter(DefaultSessionMetric.THROTTLING_ERRORS, executionProfile.getName()); @@ -326,13 +329,16 @@ private void setFinalError(Throwable error) { private class InitialPrepareCallback implements ResponseCallback, GenericFutureListener> { + private final PrepareRequest request; private final Node node; private final DriverChannel channel; // How many times we've invoked the retry policy and it has returned a "retry" decision (0 for // the first attempt of each execution). private final int retryCount; - private InitialPrepareCallback(Node node, DriverChannel channel, int retryCount) { + private InitialPrepareCallback( + PrepareRequest request, Node node, DriverChannel channel, int retryCount) { + this.request = request; this.node = node; this.channel = channel; this.retryCount = retryCount; @@ -348,7 +354,7 @@ public void operationComplete(Future future) { node, future.cause().toString()); recordError(node, future.cause()); - sendRequest(null, retryCount); // try next host + sendRequest(request, null, retryCount); // try next host } else { if (result.isDone()) { // Might happen if the timeout just fired @@ -369,7 +375,7 @@ public void onResponse(Frame responseFrame) { Message responseMessage = responseFrame.message; if (responseMessage instanceof Prepared) { LOG.trace("[{}] Got result, completing", logPrefix); - setFinalResult((Prepared) responseMessage); + setFinalResult(request, (Prepared) responseMessage); } else if (responseMessage instanceof Error) { LOG.trace("[{}] Got error response, processing", logPrefix); processErrorResponse((Error) responseMessage); @@ -399,7 +405,7 @@ private void processErrorResponse(Error errorMessage) { if (error instanceof BootstrappingException) { LOG.trace("[{}] {} is bootstrapping, trying next node", logPrefix, node); recordError(node, error); - sendRequest(null, retryCount); + sendRequest(request, null, retryCount); } else if (error instanceof QueryValidationException || error instanceof FunctionFailureException || error instanceof ProtocolError) { @@ -408,21 +414,23 @@ private void processErrorResponse(Error errorMessage) { } else { // Because prepare requests are known to always be idempotent, we call the retry policy // directly, without checking the flag. - RetryDecision decision = retryPolicy.onErrorResponse(request, error, retryCount); - processRetryDecision(decision, error); + RetryPolicy retryPolicy = Conversions.resolveRetryPolicy(request, context); + RetryVerdict verdict = retryPolicy.onErrorResponseVerdict(request, error, retryCount); + processRetryVerdict(verdict, error); } } - private void processRetryDecision(RetryDecision decision, Throwable error) { + private void processRetryVerdict(RetryVerdict verdict, Throwable error) { + RetryDecision decision = verdict.getRetryDecision(); LOG.trace("[{}] Processing retry decision {}", logPrefix, decision); switch (decision) { case RETRY_SAME: recordError(node, error); - sendRequest(node, retryCount + 1); + sendRequest(verdict.getRetryRequest(request), node, retryCount + 1); break; case RETRY_NEXT: recordError(node, error); - sendRequest(null, retryCount + 1); + sendRequest(verdict.getRetryRequest(request), null, retryCount + 1); break; case RETHROW: setFinalError(error); @@ -442,15 +450,16 @@ public void onFailure(Throwable error) { return; } LOG.trace("[{}] Request failure, processing: {}", logPrefix, error.toString()); - RetryDecision decision; + RetryVerdict verdict; try { - decision = retryPolicy.onRequestAborted(request, error, retryCount); + RetryPolicy retryPolicy = Conversions.resolveRetryPolicy(request, context); + verdict = retryPolicy.onRequestAbortedVerdict(request, error, retryCount); } catch (Throwable cause) { setFinalError( new IllegalStateException("Unexpected error while invoking the retry policy", cause)); return; } - processRetryDecision(decision, error); + processRetryVerdict(verdict, error); } public void cancel() { diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java index 0f973964694..166563b3160 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java @@ -29,8 +29,8 @@ import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.metrics.DefaultNodeMetric; import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; -import com.datastax.oss.driver.api.core.retry.RetryDecision; import com.datastax.oss.driver.api.core.retry.RetryPolicy; +import com.datastax.oss.driver.api.core.retry.RetryVerdict; import com.datastax.oss.driver.api.core.servererrors.BootstrappingException; import com.datastax.oss.driver.api.core.servererrors.CoordinatorException; import com.datastax.oss.driver.api.core.servererrors.FunctionFailureException; @@ -41,7 +41,6 @@ import com.datastax.oss.driver.api.core.servererrors.WriteTimeoutException; import com.datastax.oss.driver.api.core.session.throttling.RequestThrottler; import com.datastax.oss.driver.api.core.session.throttling.Throttled; -import com.datastax.oss.driver.api.core.specex.SpeculativeExecutionPolicy; import com.datastax.oss.driver.api.core.tracker.RequestTracker; import com.datastax.oss.driver.internal.core.adminrequest.ThrottledAdminRequestHandler; import com.datastax.oss.driver.internal.core.adminrequest.UnexpectedResponseException; @@ -99,14 +98,11 @@ public class CqlRequestHandler implements Throttled { private final long startTimeNanos; private final String logPrefix; - private final Statement statement; + private final Statement initialStatement; private final DefaultSession session; private final CqlIdentifier keyspace; private final InternalDriverContext context; - @NonNull private final DriverExecutionProfile executionProfile; - private final boolean isIdempotent; protected final CompletableFuture result; - private final Message message; private final Timer timer; /** * How many speculative executions are currently running (including the initial execution). We @@ -121,12 +117,9 @@ public class CqlRequestHandler implements Throttled { */ private final AtomicInteger startedSpeculativeExecutionsCount; - private final Duration timeout; final Timeout scheduledTimeout; final List scheduledExecutions; private final List inFlightCallbacks; - private final RetryPolicy retryPolicy; - private final SpeculativeExecutionPolicy speculativeExecutionPolicy; private final RequestThrottler throttler; private final RequestTracker requestTracker; private final SessionMetricUpdater sessionMetricUpdater; @@ -145,19 +138,10 @@ protected CqlRequestHandler( this.logPrefix = sessionLogPrefix + "|" + this.hashCode(); LOG.trace("[{}] Creating new handler for request {}", logPrefix, statement); - this.statement = statement; + this.initialStatement = statement; this.session = session; this.keyspace = session.getKeyspace().orElse(null); this.context = context; - this.executionProfile = Conversions.resolveExecutionProfile(statement, context); - this.retryPolicy = context.getRetryPolicy(executionProfile.getName()); - this.speculativeExecutionPolicy = - context.getSpeculativeExecutionPolicy(executionProfile.getName()); - Boolean statementIsIdempotent = statement.isIdempotent(); - this.isIdempotent = - (statementIsIdempotent == null) - ? executionProfile.getBoolean(DefaultDriverOption.REQUEST_DEFAULT_IDEMPOTENCE) - : statementIsIdempotent; this.result = new CompletableFuture<>(); this.result.exceptionally( t -> { @@ -170,29 +154,27 @@ protected CqlRequestHandler( } return null; }); - this.message = Conversions.toMessage(statement, executionProfile, context); - this.timer = context.getNettyOptions().getTimer(); - - this.timeout = - statement.getTimeout() != null - ? statement.getTimeout() - : executionProfile.getDuration(DefaultDriverOption.REQUEST_TIMEOUT); - this.scheduledTimeout = scheduleTimeout(timeout); this.activeExecutionsCount = new AtomicInteger(1); this.startedSpeculativeExecutionsCount = new AtomicInteger(0); - this.scheduledExecutions = isIdempotent ? new CopyOnWriteArrayList<>() : null; + this.scheduledExecutions = new CopyOnWriteArrayList<>(); this.inFlightCallbacks = new CopyOnWriteArrayList<>(); this.requestTracker = context.getRequestTracker(); this.sessionMetricUpdater = session.getMetricUpdater(); + this.timer = context.getNettyOptions().getTimer(); + Duration timeout = Conversions.resolveRequestTimeout(statement, context); + this.scheduledTimeout = scheduleTimeout(timeout); + this.throttler = context.getRequestThrottler(); this.throttler.register(this); } @Override public void onThrottleReady(boolean wasDelayed) { + DriverExecutionProfile executionProfile = + Conversions.resolveExecutionProfile(initialStatement, context); if (wasDelayed // avoid call to nanoTime() if metric is disabled: && sessionMetricUpdater.isEnabled( @@ -204,12 +186,12 @@ public void onThrottleReady(boolean wasDelayed) { TimeUnit.NANOSECONDS); } Queue queryPlan = - this.statement.getNode() != null - ? new QueryPlan(this.statement.getNode()) + this.initialStatement.getNode() != null + ? new QueryPlan(this.initialStatement.getNode()) : context .getLoadBalancingPolicyWrapper() - .newQueryPlan(statement, executionProfile.getName(), session); - sendRequest(null, queryPlan, 0, 0, true); + .newQueryPlan(initialStatement, executionProfile.getName(), session); + sendRequest(initialStatement, null, queryPlan, 0, 0, true); } public CompletionStage handle() { @@ -222,6 +204,7 @@ private Timeout scheduleTimeout(Duration timeoutDuration) { return this.timer.newTimeout( (Timeout timeout1) -> setFinalError( + initialStatement, new DriverTimeoutException("Query timed out after " + timeoutDuration), null, -1), @@ -242,6 +225,7 @@ private Timeout scheduleTimeout(Duration timeoutDuration) { /** * Sends the request to the next available node. * + * @param statement The statement to execute. * @param retriedNode if not null, it will be attempted first before the rest of the query plan. * @param queryPlan the list of nodes to try (shared with all other executions) * @param currentExecutionIndex 0 for the initial execution, 1 for the first speculative one, etc. @@ -251,6 +235,7 @@ private Timeout scheduleTimeout(Duration timeoutDuration) { * @param scheduleNextExecution whether to schedule the next speculative execution */ private void sendRequest( + Statement statement, Node retriedNode, Queue queryPlan, int currentExecutionIndex, @@ -273,11 +258,12 @@ private void sendRequest( // We've reached the end of the query plan without finding any node to write to if (!result.isDone() && activeExecutionsCount.decrementAndGet() == 0) { // We're the last execution so fail the result - setFinalError(AllNodesFailedException.fromErrors(this.errors), null, -1); + setFinalError(statement, AllNodesFailedException.fromErrors(this.errors), null, -1); } } else { NodeResponseCallback nodeResponseCallback = new NodeResponseCallback( + statement, node, queryPlan, channel, @@ -285,6 +271,9 @@ private void sendRequest( retryCount, scheduleNextExecution, logPrefix); + DriverExecutionProfile executionProfile = + Conversions.resolveExecutionProfile(statement, context); + Message message = Conversions.toMessage(statement, executionProfile, context); channel .write(message, statement.isTracing(), statement.getCustomPayload(), nodeResponseCallback) .addListener(nodeResponseCallback); @@ -336,40 +325,51 @@ private void setFinalResult( // Only call nanoTime() if we're actually going to use it long completionTimeNanos = NANOTIME_NOT_MEASURED_YET, totalLatencyNanos = NANOTIME_NOT_MEASURED_YET; + if (!(requestTracker instanceof NoopRequestTracker)) { completionTimeNanos = System.nanoTime(); totalLatencyNanos = completionTimeNanos - startTimeNanos; long nodeLatencyNanos = completionTimeNanos - callback.nodeStartTimeNanos; requestTracker.onNodeSuccess( - statement, nodeLatencyNanos, executionProfile, callback.node, logPrefix); + callback.statement, + nodeLatencyNanos, + callback.executionProfile, + callback.node, + logPrefix); requestTracker.onSuccess( - statement, totalLatencyNanos, executionProfile, callback.node, logPrefix); + callback.statement, + totalLatencyNanos, + callback.executionProfile, + callback.node, + logPrefix); } if (sessionMetricUpdater.isEnabled( - DefaultSessionMetric.CQL_REQUESTS, executionProfile.getName())) { + DefaultSessionMetric.CQL_REQUESTS, callback.executionProfile.getName())) { if (completionTimeNanos == NANOTIME_NOT_MEASURED_YET) { completionTimeNanos = System.nanoTime(); totalLatencyNanos = completionTimeNanos - startTimeNanos; } sessionMetricUpdater.updateTimer( DefaultSessionMetric.CQL_REQUESTS, - executionProfile.getName(), + callback.executionProfile.getName(), totalLatencyNanos, TimeUnit.NANOSECONDS); } } // log the warnings if they have NOT been disabled if (!executionInfo.getWarnings().isEmpty() - && executionProfile.getBoolean(DefaultDriverOption.REQUEST_LOG_WARNINGS) + && callback.executionProfile.getBoolean(DefaultDriverOption.REQUEST_LOG_WARNINGS) && LOG.isWarnEnabled()) { - logServerWarnings(executionInfo.getWarnings()); + logServerWarnings( + callback.statement, callback.executionProfile, executionInfo.getWarnings()); } } catch (Throwable error) { - setFinalError(error, callback.node, -1); + setFinalError(callback.statement, error, callback.node, -1); } } - private void logServerWarnings(List warnings) { + private void logServerWarnings( + Statement statement, DriverExecutionProfile executionProfile, List warnings) { // use the RequestLogFormatter to format the query StringBuilder statementString = new StringBuilder(); context @@ -403,7 +403,7 @@ private ExecutionInfo buildExecutionInfo( ByteBuffer pagingState = (resultMessage instanceof Rows) ? ((Rows) resultMessage).getMetadata().pagingState : null; return new DefaultExecutionInfo( - statement, + callback.statement, callback.node, startedSpeculativeExecutionsCount.get(), callback.execution, @@ -413,17 +413,21 @@ private ExecutionInfo buildExecutionInfo( schemaInAgreement, session, context, - executionProfile); + callback.executionProfile); } @Override public void onThrottleFailure(@NonNull RequestThrottlingException error) { + DriverExecutionProfile executionProfile = + Conversions.resolveExecutionProfile(initialStatement, context); sessionMetricUpdater.incrementCounter( DefaultSessionMetric.THROTTLING_ERRORS, executionProfile.getName()); - setFinalError(error, null, -1); + setFinalError(initialStatement, error, null, -1); } - private void setFinalError(Throwable error, Node node, int execution) { + private void setFinalError(Statement statement, Throwable error, Node node, int execution) { + DriverExecutionProfile executionProfile = + Conversions.resolveExecutionProfile(statement, context); if (error instanceof DriverException) { ((DriverException) error) .setExecutionInfo( @@ -465,6 +469,8 @@ private class NodeResponseCallback implements ResponseCallback, GenericFutureListener> { private final long nodeStartTimeNanos = System.nanoTime(); + private final Statement statement; + private final DriverExecutionProfile executionProfile; private final Node node; private final Queue queryPlan; private final DriverChannel channel; @@ -478,6 +484,7 @@ private class NodeResponseCallback private final String logPrefix; private NodeResponseCallback( + Statement statement, Node node, Queue queryPlan, DriverChannel channel, @@ -485,6 +492,7 @@ private NodeResponseCallback( int retryCount, boolean scheduleNextExecution, String logPrefix) { + this.statement = statement; this.node = node; this.queryPlan = queryPlan; this.channel = channel; @@ -492,6 +500,7 @@ private NodeResponseCallback( this.retryCount = retryCount; this.scheduleNextExecution = scheduleNextExecution; this.logPrefix = logPrefix + "|" + execution; + this.executionProfile = Conversions.resolveExecutionProfile(statement, context); } // this gets invoked once the write completes. @@ -502,7 +511,7 @@ public void operationComplete(Future future) throws Exception { if (error instanceof EncoderException && error.getCause() instanceof FrameTooLongException) { trackNodeError(node, error.getCause(), NANOTIME_NOT_MEASURED_YET); - setFinalError(error.getCause(), node, execution); + setFinalError(statement, error.getCause(), node, execution); } else { LOG.trace( "[{}] Failed to send request on {}, trying next node (cause: {})", @@ -515,7 +524,12 @@ public void operationComplete(Future future) throws Exception { .getMetricUpdater() .incrementCounter(DefaultNodeMetric.UNSENT_REQUESTS, executionProfile.getName()); sendRequest( - null, queryPlan, execution, retryCount, scheduleNextExecution); // try next node + statement, + null, + queryPlan, + execution, + retryCount, + scheduleNextExecution); // try next node } } else { LOG.trace("[{}] Request sent on {}", logPrefix, channel); @@ -525,13 +539,13 @@ public void operationComplete(Future future) throws Exception { cancel(); } else { inFlightCallbacks.add(this); - if (scheduleNextExecution && isIdempotent) { + if (scheduleNextExecution && Conversions.resolveIdempotence(statement, context)) { int nextExecution = execution + 1; long nextDelay; try { nextDelay = - speculativeExecutionPolicy.nextExecution( - node, keyspace, statement, nextExecution); + Conversions.resolveSpeculativeExecutionPolicy(statement, context) + .nextExecution(node, keyspace, statement, nextExecution); } catch (Throwable cause) { // This is a bug in the policy, but not fatal since we have at least one other // execution already running. Don't fail the whole request. @@ -573,7 +587,7 @@ private void scheduleSpeculativeExecution(int index, long delay) { .getMetricUpdater() .incrementCounter( DefaultNodeMetric.SPECULATIVE_EXECUTIONS, executionProfile.getName()); - sendRequest(null, queryPlan, index, 0, true); + sendRequest(statement, null, queryPlan, index, 0, true); } }, delay, @@ -645,11 +659,14 @@ public void onResponse(Frame responseFrame) { new IllegalStateException("Unexpected response " + responseMessage), nodeResponseTimeNanos); setFinalError( - new IllegalStateException("Unexpected response " + responseMessage), node, execution); + statement, + new IllegalStateException("Unexpected response " + responseMessage), + node, + execution); } } catch (Throwable t) { trackNodeError(node, t, nodeResponseTimeNanos); - setFinalError(t, node, execution); + setFinalError(statement, t, node, execution); } } @@ -675,7 +692,7 @@ private void processErrorResponse(Error errorMessage) { true, reprepareMessage, repreparePayload.customPayload, - timeout, + Conversions.resolveRequestTimeout(statement, context), throttler, sessionMetricUpdater, logPrefix); @@ -696,19 +713,19 @@ private void processErrorResponse(Error errorMessage) { || prepareError instanceof ProtocolError) { LOG.trace("[{}] Unrecoverable error on reprepare, rethrowing", logPrefix); trackNodeError(node, prepareError, NANOTIME_NOT_MEASURED_YET); - setFinalError(prepareError, node, execution); + setFinalError(statement, prepareError, node, execution); return null; } } } else if (exception instanceof RequestThrottlingException) { trackNodeError(node, exception, NANOTIME_NOT_MEASURED_YET); - setFinalError(exception, node, execution); + setFinalError(statement, exception, node, execution); return null; } recordError(node, exception); trackNodeError(node, exception, NANOTIME_NOT_MEASURED_YET); LOG.trace("[{}] Reprepare failed, trying next node", logPrefix); - sendRequest(null, queryPlan, execution, retryCount, false); + sendRequest(statement, null, queryPlan, execution, retryCount, false); } else { if (!repreparedId.equals(idToReprepare)) { IllegalStateException illegalStateException = @@ -721,10 +738,10 @@ private void processErrorResponse(Error errorMessage) { Bytes.toHexString(idToReprepare), Bytes.toHexString(repreparedId))); trackNodeError(node, illegalStateException, NANOTIME_NOT_MEASURED_YET); - setFinalError(illegalStateException, node, execution); + setFinalError(statement, illegalStateException, node, execution); } LOG.trace("[{}] Reprepare sucessful, retrying", logPrefix); - sendRequest(node, queryPlan, execution, retryCount, false); + sendRequest(statement, node, queryPlan, execution, retryCount, false); } return null; }); @@ -736,20 +753,21 @@ private void processErrorResponse(Error errorMessage) { LOG.trace("[{}] {} is bootstrapping, trying next node", logPrefix, node); recordError(node, error); trackNodeError(node, error, NANOTIME_NOT_MEASURED_YET); - sendRequest(null, queryPlan, execution, retryCount, false); + sendRequest(statement, null, queryPlan, execution, retryCount, false); } else if (error instanceof QueryValidationException || error instanceof FunctionFailureException || error instanceof ProtocolError) { LOG.trace("[{}] Unrecoverable error, rethrowing", logPrefix); metricUpdater.incrementCounter(DefaultNodeMetric.OTHER_ERRORS, executionProfile.getName()); trackNodeError(node, error, NANOTIME_NOT_MEASURED_YET); - setFinalError(error, node, execution); + setFinalError(statement, error, node, execution); } else { - RetryDecision decision; + RetryPolicy retryPolicy = Conversions.resolveRetryPolicy(statement, context); + RetryVerdict verdict; if (error instanceof ReadTimeoutException) { ReadTimeoutException readTimeout = (ReadTimeoutException) error; - decision = - retryPolicy.onReadTimeout( + verdict = + retryPolicy.onReadTimeoutVerdict( statement, readTimeout.getConsistencyLevel(), readTimeout.getBlockFor(), @@ -758,32 +776,32 @@ private void processErrorResponse(Error errorMessage) { retryCount); updateErrorMetrics( metricUpdater, - decision, + verdict, DefaultNodeMetric.READ_TIMEOUTS, DefaultNodeMetric.RETRIES_ON_READ_TIMEOUT, DefaultNodeMetric.IGNORES_ON_READ_TIMEOUT); } else if (error instanceof WriteTimeoutException) { WriteTimeoutException writeTimeout = (WriteTimeoutException) error; - decision = - isIdempotent - ? retryPolicy.onWriteTimeout( + verdict = + Conversions.resolveIdempotence(statement, context) + ? retryPolicy.onWriteTimeoutVerdict( statement, writeTimeout.getConsistencyLevel(), writeTimeout.getWriteType(), writeTimeout.getBlockFor(), writeTimeout.getReceived(), retryCount) - : RetryDecision.RETHROW; + : RetryVerdict.RETHROW; updateErrorMetrics( metricUpdater, - decision, + verdict, DefaultNodeMetric.WRITE_TIMEOUTS, DefaultNodeMetric.RETRIES_ON_WRITE_TIMEOUT, DefaultNodeMetric.IGNORES_ON_WRITE_TIMEOUT); } else if (error instanceof UnavailableException) { UnavailableException unavailable = (UnavailableException) error; - decision = - retryPolicy.onUnavailable( + verdict = + retryPolicy.onUnavailableVerdict( statement, unavailable.getConsistencyLevel(), unavailable.getRequired(), @@ -791,42 +809,54 @@ private void processErrorResponse(Error errorMessage) { retryCount); updateErrorMetrics( metricUpdater, - decision, + verdict, DefaultNodeMetric.UNAVAILABLES, DefaultNodeMetric.RETRIES_ON_UNAVAILABLE, DefaultNodeMetric.IGNORES_ON_UNAVAILABLE); } else { - decision = - isIdempotent - ? retryPolicy.onErrorResponse(statement, error, retryCount) - : RetryDecision.RETHROW; + verdict = + Conversions.resolveIdempotence(statement, context) + ? retryPolicy.onErrorResponseVerdict(statement, error, retryCount) + : RetryVerdict.RETHROW; updateErrorMetrics( metricUpdater, - decision, + verdict, DefaultNodeMetric.OTHER_ERRORS, DefaultNodeMetric.RETRIES_ON_OTHER_ERROR, DefaultNodeMetric.IGNORES_ON_OTHER_ERROR); } - processRetryDecision(decision, error); + processRetryVerdict(verdict, error); } } - private void processRetryDecision(RetryDecision decision, Throwable error) { - LOG.trace("[{}] Processing retry decision {}", logPrefix, decision); - switch (decision) { + private void processRetryVerdict(RetryVerdict verdict, Throwable error) { + LOG.trace("[{}] Processing retry decision {}", logPrefix, verdict); + switch (verdict.getRetryDecision()) { case RETRY_SAME: recordError(node, error); trackNodeError(node, error, NANOTIME_NOT_MEASURED_YET); - sendRequest(node, queryPlan, execution, retryCount + 1, false); + sendRequest( + verdict.getRetryRequest(statement), + node, + queryPlan, + execution, + retryCount + 1, + false); break; case RETRY_NEXT: recordError(node, error); trackNodeError(node, error, NANOTIME_NOT_MEASURED_YET); - sendRequest(null, queryPlan, execution, retryCount + 1, false); + sendRequest( + verdict.getRetryRequest(statement), + null, + queryPlan, + execution, + retryCount + 1, + false); break; case RETHROW: trackNodeError(node, error, NANOTIME_NOT_MEASURED_YET); - setFinalError(error, node, execution); + setFinalError(statement, error, node, execution); break; case IGNORE: setFinalResult(Void.INSTANCE, null, true, this); @@ -836,12 +866,12 @@ private void processRetryDecision(RetryDecision decision, Throwable error) { private void updateErrorMetrics( NodeMetricUpdater metricUpdater, - RetryDecision decision, + RetryVerdict verdict, DefaultNodeMetric error, DefaultNodeMetric retriesOnError, DefaultNodeMetric ignoresOnError) { metricUpdater.incrementCounter(error, executionProfile.getName()); - switch (decision) { + switch (verdict.getRetryDecision()) { case RETRY_SAME: case RETRY_NEXT: metricUpdater.incrementCounter(DefaultNodeMetric.RETRIES, executionProfile.getName()); @@ -863,24 +893,27 @@ public void onFailure(Throwable error) { return; } LOG.trace("[{}] Request failure, processing: {}", logPrefix, error); - RetryDecision decision; - if (!isIdempotent || error instanceof FrameTooLongException) { - decision = RetryDecision.RETHROW; + RetryVerdict verdict; + if (!Conversions.resolveIdempotence(statement, context) + || error instanceof FrameTooLongException) { + verdict = RetryVerdict.RETHROW; } else { try { - decision = retryPolicy.onRequestAborted(statement, error, retryCount); + RetryPolicy retryPolicy = Conversions.resolveRetryPolicy(statement, context); + verdict = retryPolicy.onRequestAbortedVerdict(statement, error, retryCount); } catch (Throwable cause) { setFinalError( + statement, new IllegalStateException("Unexpected error while invoking the retry policy", cause), null, execution); return; } } - processRetryDecision(decision, error); + processRetryVerdict(verdict, error); updateErrorMetrics( ((DefaultNode) node).getMetricUpdater(), - decision, + verdict, DefaultNodeMetric.ABORTED_REQUESTS, DefaultNodeMetric.RETRIES_ON_ABORTED, DefaultNodeMetric.IGNORES_ON_ABORTED); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/retry/DefaultRetryPolicy.java b/core/src/main/java/com/datastax/oss/driver/internal/core/retry/DefaultRetryPolicy.java index b988f66fce8..b9653bc158a 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/retry/DefaultRetryPolicy.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/retry/DefaultRetryPolicy.java @@ -102,6 +102,7 @@ public DefaultRetryPolicy( *

          Otherwise, the exception is rethrown. */ @Override + @Deprecated public RetryDecision onReadTimeout( @NonNull Request request, @NonNull ConsistencyLevel cl, @@ -135,6 +136,7 @@ public RetryDecision onReadTimeout( *

          Otherwise, the exception is rethrown. */ @Override + @Deprecated public RetryDecision onWriteTimeout( @NonNull Request request, @NonNull ConsistencyLevel cl, @@ -167,6 +169,7 @@ public RetryDecision onWriteTimeout( *

          Otherwise, the exception is rethrown. */ @Override + @Deprecated public RetryDecision onUnavailable( @NonNull Request request, @NonNull ConsistencyLevel cl, @@ -190,6 +193,7 @@ public RetryDecision onUnavailable( * (assuming a driver bug) in all other cases. */ @Override + @Deprecated public RetryDecision onRequestAborted( @NonNull Request request, @NonNull Throwable error, int retryCount) { @@ -212,6 +216,7 @@ public RetryDecision onRequestAborted( * node. */ @Override + @Deprecated public RetryDecision onErrorResponse( @NonNull Request request, @NonNull CoordinatorException error, int retryCount) { diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerRetryTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerRetryTest.java index 4241c091515..3f6af732f11 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerRetryTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerRetryTest.java @@ -24,6 +24,7 @@ import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.atMost; +import static org.mockito.Mockito.when; import com.datastax.dse.driver.DseTestFixtures; import com.datastax.dse.driver.api.core.DseProtocolVersion; @@ -38,8 +39,8 @@ import com.datastax.oss.driver.api.core.cql.Statement; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.metrics.DefaultNodeMetric; -import com.datastax.oss.driver.api.core.retry.RetryDecision; import com.datastax.oss.driver.api.core.retry.RetryPolicy; +import com.datastax.oss.driver.api.core.retry.RetryVerdict; import com.datastax.oss.driver.api.core.servererrors.BootstrappingException; import com.datastax.oss.driver.api.core.servererrors.DefaultWriteType; import com.datastax.oss.driver.api.core.servererrors.InvalidQueryException; @@ -170,8 +171,8 @@ public void should_try_next_node_if_idempotent_and_retry_policy_decides_so( harnessBuilder.withResponse(node2, defaultFrameOf(DseTestFixtures.singleDseRow())); try (RequestHandlerTestHarness harness = harnessBuilder.build()) { - failureScenario.mockRetryPolicyDecision( - harness.getContext().getRetryPolicy(anyString()), RetryDecision.RETRY_NEXT); + failureScenario.mockRetryPolicyVerdict( + harness.getContext().getRetryPolicy(anyString()), RetryVerdict.RETRY_NEXT); ContinuousCqlRequestHandler handler = new ContinuousCqlRequestHandler( @@ -225,8 +226,8 @@ public void should_try_same_node_if_idempotent_and_retry_policy_decides_so( harnessBuilder.withResponse(node1, defaultFrameOf(DseTestFixtures.singleDseRow())); try (RequestHandlerTestHarness harness = harnessBuilder.build()) { - failureScenario.mockRetryPolicyDecision( - harness.getContext().getRetryPolicy(anyString()), RetryDecision.RETRY_SAME); + failureScenario.mockRetryPolicyVerdict( + harness.getContext().getRetryPolicy(anyString()), RetryVerdict.RETRY_SAME); ContinuousCqlRequestHandler handler = new ContinuousCqlRequestHandler( @@ -279,8 +280,8 @@ public void should_ignore_error_if_idempotent_and_retry_policy_decides_so( failureScenario.mockRequestError(harnessBuilder, node1); try (RequestHandlerTestHarness harness = harnessBuilder.build()) { - failureScenario.mockRetryPolicyDecision( - harness.getContext().getRetryPolicy(anyString()), RetryDecision.IGNORE); + failureScenario.mockRetryPolicyVerdict( + harness.getContext().getRetryPolicy(anyString()), RetryVerdict.IGNORE); ContinuousCqlRequestHandler handler = new ContinuousCqlRequestHandler( @@ -332,8 +333,8 @@ public void should_rethrow_error_if_idempotent_and_retry_policy_decides_so( try (RequestHandlerTestHarness harness = harnessBuilder.build()) { - failureScenario.mockRetryPolicyDecision( - harness.getContext().getRetryPolicy(anyString()), RetryDecision.RETHROW); + failureScenario.mockRetryPolicyVerdict( + harness.getContext().getRetryPolicy(anyString()), RetryVerdict.RETHROW); ContinuousCqlRequestHandler handler = new ContinuousCqlRequestHandler( @@ -384,8 +385,8 @@ public void should_rethrow_error_if_not_idempotent_and_error_unsafe_or_policy_re try (RequestHandlerTestHarness harness = harnessBuilder.build()) { if (shouldCallRetryPolicy) { - failureScenario.mockRetryPolicyDecision( - harness.getContext().getRetryPolicy(anyString()), RetryDecision.RETHROW); + failureScenario.mockRetryPolicyVerdict( + harness.getContext().getRetryPolicy(anyString()), RetryVerdict.RETHROW); } ContinuousCqlRequestHandler handler = @@ -441,7 +442,7 @@ private abstract static class FailureScenario { abstract void mockRequestError(RequestHandlerTestHarness.Builder builder, Node node); - abstract void mockRetryPolicyDecision(RetryPolicy policy, RetryDecision decision); + abstract void mockRetryPolicyVerdict(RetryPolicy policy, RetryVerdict verdict); } @DataProvider @@ -462,16 +463,15 @@ public void mockRequestError(RequestHandlerTestHarness.Builder builder, Node nod } @Override - public void mockRetryPolicyDecision(RetryPolicy policy, RetryDecision decision) { - Mockito.when( - policy.onReadTimeout( - any(SimpleStatement.class), - eq(DefaultConsistencyLevel.LOCAL_ONE), - eq(2), - eq(1), - eq(true), - eq(0))) - .thenReturn(decision); + public void mockRetryPolicyVerdict(RetryPolicy policy, RetryVerdict verdict) { + when(policy.onReadTimeoutVerdict( + any(SimpleStatement.class), + eq(DefaultConsistencyLevel.LOCAL_ONE), + eq(2), + eq(1), + eq(true), + eq(0))) + .thenReturn(verdict); } }, new FailureScenario( @@ -493,16 +493,15 @@ public void mockRequestError(RequestHandlerTestHarness.Builder builder, Node nod } @Override - public void mockRetryPolicyDecision(RetryPolicy policy, RetryDecision decision) { - Mockito.when( - policy.onWriteTimeout( - any(SimpleStatement.class), - eq(DefaultConsistencyLevel.LOCAL_ONE), - eq(DefaultWriteType.SIMPLE), - eq(2), - eq(1), - eq(0))) - .thenReturn(decision); + public void mockRetryPolicyVerdict(RetryPolicy policy, RetryVerdict verdict) { + when(policy.onWriteTimeoutVerdict( + any(SimpleStatement.class), + eq(DefaultConsistencyLevel.LOCAL_ONE), + eq(DefaultWriteType.SIMPLE), + eq(2), + eq(1), + eq(0))) + .thenReturn(verdict); } }, new FailureScenario( @@ -520,15 +519,14 @@ public void mockRequestError(RequestHandlerTestHarness.Builder builder, Node nod } @Override - public void mockRetryPolicyDecision(RetryPolicy policy, RetryDecision decision) { - Mockito.when( - policy.onUnavailable( - any(SimpleStatement.class), - eq(DefaultConsistencyLevel.LOCAL_ONE), - eq(2), - eq(1), - eq(0))) - .thenReturn(decision); + public void mockRetryPolicyVerdict(RetryPolicy policy, RetryVerdict verdict) { + when(policy.onUnavailableVerdict( + any(SimpleStatement.class), + eq(DefaultConsistencyLevel.LOCAL_ONE), + eq(2), + eq(1), + eq(0))) + .thenReturn(verdict); } }, new FailureScenario( @@ -545,11 +543,10 @@ public void mockRequestError(RequestHandlerTestHarness.Builder builder, Node nod } @Override - public void mockRetryPolicyDecision(RetryPolicy policy, RetryDecision decision) { - Mockito.when( - policy.onErrorResponse( - any(SimpleStatement.class), any(ServerError.class), eq(0))) - .thenReturn(decision); + public void mockRetryPolicyVerdict(RetryPolicy policy, RetryVerdict verdict) { + when(policy.onErrorResponseVerdict( + any(SimpleStatement.class), any(ServerError.class), eq(0))) + .thenReturn(verdict); } }, new FailureScenario( @@ -563,11 +560,10 @@ public void mockRequestError(RequestHandlerTestHarness.Builder builder, Node nod } @Override - public void mockRetryPolicyDecision(RetryPolicy policy, RetryDecision decision) { - Mockito.when( - policy.onRequestAborted( - any(SimpleStatement.class), any(HeartbeatException.class), eq(0))) - .thenReturn(decision); + public void mockRetryPolicyVerdict(RetryPolicy policy, RetryVerdict verdict) { + when(policy.onRequestAbortedVerdict( + any(SimpleStatement.class), any(HeartbeatException.class), eq(0))) + .thenReturn(verdict); } }); } diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerTest.java index 63935dc7afb..a5d0c5934d8 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerTest.java @@ -85,7 +85,7 @@ public void should_return_paged_results(GraphProtocol graphProtocol) throws IOEx try (RequestHandlerTestHarness harness = builder.build()) { - GraphStatement graphStatement = + GraphStatement graphStatement = ScriptGraphStatement.newInstance("mockQuery").setExecutionProfileName(profileName); ContinuousGraphRequestHandler handler = @@ -158,7 +158,7 @@ public void should_honor_default_timeout() throws Exception { when(profile.getString(DseDriverOption.GRAPH_SUB_PROTOCOL)) .thenReturn(GraphProtocol.GRAPH_BINARY_1_0.toInternalCode()); - GraphStatement graphStatement = ScriptGraphStatement.newInstance("mockQuery"); + GraphStatement graphStatement = ScriptGraphStatement.newInstance("mockQuery"); // when ContinuousGraphRequestHandler handler = @@ -207,7 +207,7 @@ public void should_honor_statement_timeout() throws Exception { when(profile.getString(DseDriverOption.GRAPH_SUB_PROTOCOL)) .thenReturn(GraphProtocol.GRAPH_BINARY_1_0.toInternalCode()); - GraphStatement graphStatement = + GraphStatement graphStatement = ScriptGraphStatement.newInstance("mockQuery").setTimeout(statementTimeout); // when @@ -249,10 +249,7 @@ private void validateMetrics(String profileName, RequestHandlerTestHarness harne verify(harness.getSession().getMetricUpdater()) .updateTimer( - eq(DseSessionMetric.GRAPH_REQUESTS), - eq(profileName), - anyLong(), - eq(TimeUnit.NANOSECONDS)); + eq(DseSessionMetric.GRAPH_REQUESTS), eq(null), anyLong(), eq(TimeUnit.NANOSECONDS)); verifyNoMoreInteractions(harness.getSession().getMetricUpdater()); } } diff --git a/core/src/test/java/com/datastax/oss/driver/api/core/retry/RetryPolicyTestBase.java b/core/src/test/java/com/datastax/oss/driver/api/core/retry/RetryPolicyTestBase.java index 78c227816e9..e24c61e6585 100644 --- a/core/src/test/java/com/datastax/oss/driver/api/core/retry/RetryPolicyTestBase.java +++ b/core/src/test/java/com/datastax/oss/driver/api/core/retry/RetryPolicyTestBase.java @@ -40,27 +40,34 @@ protected RetryPolicyTestBase(RetryPolicy policy) { protected Assert assertOnReadTimeout( ConsistencyLevel cl, int blockFor, int received, boolean dataPresent, int retryCount) { return assertThat( - policy.onReadTimeout(request, cl, blockFor, received, dataPresent, retryCount)); + policy + .onReadTimeoutVerdict(request, cl, blockFor, received, dataPresent, retryCount) + .getRetryDecision()); } protected Assert assertOnWriteTimeout( ConsistencyLevel cl, WriteType writeType, int blockFor, int received, int retryCount) { return assertThat( - policy.onWriteTimeout(request, cl, writeType, blockFor, received, retryCount)); + policy + .onWriteTimeoutVerdict(request, cl, writeType, blockFor, received, retryCount) + .getRetryDecision()); } protected Assert assertOnUnavailable( ConsistencyLevel cl, int required, int alive, int retryCount) { - return assertThat(policy.onUnavailable(request, cl, required, alive, retryCount)); + return assertThat( + policy.onUnavailableVerdict(request, cl, required, alive, retryCount).getRetryDecision()); } protected Assert assertOnRequestAborted( Class errorClass, int retryCount) { - return assertThat(policy.onRequestAborted(request, mock(errorClass), retryCount)); + return assertThat( + policy.onRequestAbortedVerdict(request, mock(errorClass), retryCount).getRetryDecision()); } protected Assert assertOnErrorResponse( Class errorClass, int retryCount) { - return assertThat(policy.onErrorResponse(request, mock(errorClass), retryCount)); + return assertThat( + policy.onErrorResponseVerdict(request, mock(errorClass), retryCount).getRetryDecision()); } } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/cql/CqlPrepareHandlerTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/cql/CqlPrepareHandlerTest.java index 3e0503bb1e0..0bafdb41305 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/cql/CqlPrepareHandlerTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/cql/CqlPrepareHandlerTest.java @@ -31,8 +31,8 @@ import com.datastax.oss.driver.api.core.cql.PreparedStatement; import com.datastax.oss.driver.api.core.cql.SimpleStatement; import com.datastax.oss.driver.api.core.metadata.Node; -import com.datastax.oss.driver.api.core.retry.RetryDecision; import com.datastax.oss.driver.api.core.retry.RetryPolicy; +import com.datastax.oss.driver.api.core.retry.RetryVerdict; import com.datastax.oss.driver.api.core.servererrors.OverloadedException; import com.datastax.oss.driver.internal.core.channel.ResponseCallback; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; @@ -177,8 +177,8 @@ public void should_retry_initial_prepare_if_recoverable_error() { when(harness .getContext() .getRetryPolicy(anyString()) - .onErrorResponse(eq(PREPARE_REQUEST), any(OverloadedException.class), eq(0))) - .thenReturn(RetryDecision.RETRY_NEXT); + .onErrorResponseVerdict(eq(PREPARE_REQUEST), any(OverloadedException.class), eq(0))) + .thenReturn(RetryVerdict.RETRY_NEXT); CompletionStage prepareFuture = new CqlPrepareHandler(PREPARE_REQUEST, harness.getSession(), harness.getContext(), "test") @@ -210,8 +210,8 @@ public void should_not_retry_initial_prepare_if_unrecoverable_error() { when(harness .getContext() .getRetryPolicy(anyString()) - .onErrorResponse(eq(PREPARE_REQUEST), any(OverloadedException.class), eq(0))) - .thenReturn(RetryDecision.RETHROW); + .onErrorResponseVerdict(eq(PREPARE_REQUEST), any(OverloadedException.class), eq(0))) + .thenReturn(RetryVerdict.RETHROW); CompletionStage prepareFuture = new CqlPrepareHandler(PREPARE_REQUEST, harness.getSession(), harness.getContext(), "test") @@ -243,9 +243,9 @@ public void should_fail_if_retry_policy_ignores_error() { // Make node1's error unrecoverable, will rethrow RetryPolicy mockRetryPolicy = harness.getContext().getRetryPolicy(DriverExecutionProfile.DEFAULT_NAME); - when(mockRetryPolicy.onErrorResponse( + when(mockRetryPolicy.onErrorResponseVerdict( eq(PREPARE_REQUEST), any(OverloadedException.class), eq(0))) - .thenReturn(RetryDecision.IGNORE); + .thenReturn(RetryVerdict.IGNORE); CompletionStage prepareFuture = new CqlPrepareHandler(PREPARE_REQUEST, harness.getSession(), harness.getContext(), "test") diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandlerRetryTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandlerRetryTest.java index ef04f814e94..06710e4c4c7 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandlerRetryTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandlerRetryTest.java @@ -37,8 +37,8 @@ import com.datastax.oss.driver.api.core.cql.Statement; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.metrics.DefaultNodeMetric; -import com.datastax.oss.driver.api.core.retry.RetryDecision; import com.datastax.oss.driver.api.core.retry.RetryPolicy; +import com.datastax.oss.driver.api.core.retry.RetryVerdict; import com.datastax.oss.driver.api.core.servererrors.BootstrappingException; import com.datastax.oss.driver.api.core.servererrors.DefaultWriteType; import com.datastax.oss.driver.api.core.servererrors.InvalidQueryException; @@ -152,8 +152,8 @@ public void should_try_next_node_if_idempotent_and_retry_policy_decides_so( harnessBuilder.withResponse(node2, defaultFrameOf(singleRow())); try (RequestHandlerTestHarness harness = harnessBuilder.build()) { - failureScenario.mockRetryPolicyDecision( - harness.getContext().getRetryPolicy(anyString()), RetryDecision.RETRY_NEXT); + failureScenario.mockRetryPolicyVerdict( + harness.getContext().getRetryPolicy(anyString()), RetryVerdict.RETRY_NEXT); CompletionStage resultSetFuture = new CqlRequestHandler(statement, harness.getSession(), harness.getContext(), "test") @@ -203,8 +203,8 @@ public void should_try_same_node_if_idempotent_and_retry_policy_decides_so( harnessBuilder.withResponse(node1, defaultFrameOf(singleRow())); try (RequestHandlerTestHarness harness = harnessBuilder.build()) { - failureScenario.mockRetryPolicyDecision( - harness.getContext().getRetryPolicy(anyString()), RetryDecision.RETRY_SAME); + failureScenario.mockRetryPolicyVerdict( + harness.getContext().getRetryPolicy(anyString()), RetryVerdict.RETRY_SAME); CompletionStage resultSetFuture = new CqlRequestHandler(statement, harness.getSession(), harness.getContext(), "test") @@ -253,8 +253,8 @@ public void should_ignore_error_if_idempotent_and_retry_policy_decides_so( failureScenario.mockRequestError(harnessBuilder, node1); try (RequestHandlerTestHarness harness = harnessBuilder.build()) { - failureScenario.mockRetryPolicyDecision( - harness.getContext().getRetryPolicy(anyString()), RetryDecision.IGNORE); + failureScenario.mockRetryPolicyVerdict( + harness.getContext().getRetryPolicy(anyString()), RetryVerdict.IGNORE); CompletionStage resultSetFuture = new CqlRequestHandler(statement, harness.getSession(), harness.getContext(), "test") @@ -302,8 +302,8 @@ public void should_rethrow_error_if_idempotent_and_retry_policy_decides_so( try (RequestHandlerTestHarness harness = harnessBuilder.build()) { - failureScenario.mockRetryPolicyDecision( - harness.getContext().getRetryPolicy(anyString()), RetryDecision.RETHROW); + failureScenario.mockRetryPolicyVerdict( + harness.getContext().getRetryPolicy(anyString()), RetryVerdict.RETHROW); CompletionStage resultSetFuture = new CqlRequestHandler(statement, harness.getSession(), harness.getContext(), "test") @@ -349,8 +349,8 @@ public void should_rethrow_error_if_not_idempotent_and_error_unsafe_or_policy_re try (RequestHandlerTestHarness harness = harnessBuilder.build()) { if (shouldCallRetryPolicy) { - failureScenario.mockRetryPolicyDecision( - harness.getContext().getRetryPolicy(anyString()), RetryDecision.RETHROW); + failureScenario.mockRetryPolicyVerdict( + harness.getContext().getRetryPolicy(anyString()), RetryVerdict.RETHROW); } CompletionStage resultSetFuture = @@ -405,7 +405,7 @@ protected FailureScenario( abstract void mockRequestError(RequestHandlerTestHarness.Builder builder, Node node); - abstract void mockRetryPolicyDecision(RetryPolicy policy, RetryDecision decision); + abstract void mockRetryPolicyVerdict(RetryPolicy policy, RetryVerdict verdict); } @DataProvider @@ -426,15 +426,15 @@ public void mockRequestError(RequestHandlerTestHarness.Builder builder, Node nod } @Override - public void mockRetryPolicyDecision(RetryPolicy policy, RetryDecision decision) { - when(policy.onReadTimeout( + public void mockRetryPolicyVerdict(RetryPolicy policy, RetryVerdict verdict) { + when(policy.onReadTimeoutVerdict( any(Statement.class), eq(DefaultConsistencyLevel.LOCAL_ONE), eq(2), eq(1), eq(true), eq(0))) - .thenReturn(decision); + .thenReturn(verdict); } }, new FailureScenario( @@ -456,15 +456,15 @@ public void mockRequestError(RequestHandlerTestHarness.Builder builder, Node nod } @Override - public void mockRetryPolicyDecision(RetryPolicy policy, RetryDecision decision) { - when(policy.onWriteTimeout( + public void mockRetryPolicyVerdict(RetryPolicy policy, RetryVerdict verdict) { + when(policy.onWriteTimeoutVerdict( any(Statement.class), eq(DefaultConsistencyLevel.LOCAL_ONE), eq(DefaultWriteType.SIMPLE), eq(2), eq(1), eq(0))) - .thenReturn(decision); + .thenReturn(verdict); } }, new FailureScenario( @@ -482,14 +482,14 @@ public void mockRequestError(RequestHandlerTestHarness.Builder builder, Node nod } @Override - public void mockRetryPolicyDecision(RetryPolicy policy, RetryDecision decision) { - when(policy.onUnavailable( + public void mockRetryPolicyVerdict(RetryPolicy policy, RetryVerdict verdict) { + when(policy.onUnavailableVerdict( any(Statement.class), eq(DefaultConsistencyLevel.LOCAL_ONE), eq(2), eq(1), eq(0))) - .thenReturn(decision); + .thenReturn(verdict); } }, new FailureScenario( @@ -506,9 +506,9 @@ public void mockRequestError(RequestHandlerTestHarness.Builder builder, Node nod } @Override - public void mockRetryPolicyDecision(RetryPolicy policy, RetryDecision decision) { - when(policy.onErrorResponse(any(Statement.class), any(ServerError.class), eq(0))) - .thenReturn(decision); + public void mockRetryPolicyVerdict(RetryPolicy policy, RetryVerdict verdict) { + when(policy.onErrorResponseVerdict(any(Statement.class), any(ServerError.class), eq(0))) + .thenReturn(verdict); } }, new FailureScenario( @@ -522,10 +522,10 @@ public void mockRequestError(RequestHandlerTestHarness.Builder builder, Node nod } @Override - public void mockRetryPolicyDecision(RetryPolicy policy, RetryDecision decision) { - when(policy.onRequestAborted( + public void mockRetryPolicyVerdict(RetryPolicy policy, RetryVerdict verdict) { + when(policy.onRequestAbortedVerdict( any(Statement.class), any(HeartbeatException.class), eq(0))) - .thenReturn(decision); + .thenReturn(verdict); } }); } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/AllNodesFailedIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/AllNodesFailedIT.java index dfab751cb1a..a4453401d93 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/AllNodesFailedIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/AllNodesFailedIT.java @@ -86,6 +86,7 @@ public MultipleRetryPolicy(DriverContext context, String profileName) { } @Override + @Deprecated public RetryDecision onReadTimeout( @NonNull Request request, @NonNull ConsistencyLevel cl, diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/config/MapBasedConfigLoaderIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/config/MapBasedConfigLoaderIT.java index 1dbb70602c0..859d6c567ad 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/config/MapBasedConfigLoaderIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/config/MapBasedConfigLoaderIT.java @@ -149,6 +149,7 @@ public IgnoreAllPolicy( } @Override + @Deprecated public RetryDecision onReadTimeout( @NonNull Request request, @NonNull ConsistencyLevel cl, @@ -160,6 +161,7 @@ public RetryDecision onReadTimeout( } @Override + @Deprecated public RetryDecision onWriteTimeout( @NonNull Request request, @NonNull ConsistencyLevel cl, @@ -171,6 +173,7 @@ public RetryDecision onWriteTimeout( } @Override + @Deprecated public RetryDecision onUnavailable( @NonNull Request request, @NonNull ConsistencyLevel cl, @@ -181,12 +184,14 @@ public RetryDecision onUnavailable( } @Override + @Deprecated public RetryDecision onRequestAborted( @NonNull Request request, @NonNull Throwable error, int retryCount) { return RetryDecision.IGNORE; } @Override + @Deprecated public RetryDecision onErrorResponse( @NonNull Request request, @NonNull CoordinatorException error, int retryCount) { return RetryDecision.IGNORE; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/connection/FrameLengthIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/connection/FrameLengthIT.java index f8c5fb44d6f..c4f39711687 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/connection/FrameLengthIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/connection/FrameLengthIT.java @@ -122,6 +122,7 @@ public AlwaysRetryAbortedPolicy(DriverContext context, String profileName) { } @Override + @Deprecated public RetryDecision onRequestAborted( @NonNull Request request, @NonNull Throwable error, int retryCount) { return RetryDecision.RETRY_NEXT; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/retry/PerProfileRetryPolicyIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/retry/PerProfileRetryPolicyIT.java index 0b851f4b7f6..2e7665dcc6f 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/retry/PerProfileRetryPolicyIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/retry/PerProfileRetryPolicyIT.java @@ -149,9 +149,11 @@ public void should_use_policy_from_config_when_not_configured_in_request_profile // A policy that simply rethrows always. public static class NoRetryPolicy implements RetryPolicy { + @SuppressWarnings("unused") public NoRetryPolicy(DriverContext context, String profileName) {} @Override + @Deprecated public RetryDecision onReadTimeout( @NonNull Request request, @NonNull ConsistencyLevel cl, @@ -163,6 +165,7 @@ public RetryDecision onReadTimeout( } @Override + @Deprecated public RetryDecision onWriteTimeout( @NonNull Request request, @NonNull ConsistencyLevel cl, @@ -174,6 +177,7 @@ public RetryDecision onWriteTimeout( } @Override + @Deprecated public RetryDecision onUnavailable( @NonNull Request request, @NonNull ConsistencyLevel cl, @@ -184,12 +188,14 @@ public RetryDecision onUnavailable( } @Override + @Deprecated public RetryDecision onRequestAborted( @NonNull Request request, @NonNull Throwable error, int retryCount) { return RetryDecision.RETHROW; } @Override + @Deprecated public RetryDecision onErrorResponse( @NonNull Request request, @NonNull CoordinatorException error, int retryCount) { return RetryDecision.RETHROW; diff --git a/upgrade_guide/README.md b/upgrade_guide/README.md index 6b1536ad12c..51c0917f018 100644 --- a/upgrade_guide/README.md +++ b/upgrade_guide/README.md @@ -2,6 +2,30 @@ ### 4.10.0 +#### New `RetryVerdict` API + +[JAVA-2900](https://datastax-oss.atlassian.net/browse/JAVA-2900) introduced [`RetryVerdict`], a new +interface that allows custom retry policies to customize the request before it is retried. + +For this reason, the following methods in the `RetryPolicy` interface were added; they all return +a `RetryVerdict` instance: + +1. [`onReadTimeoutVerdict`](https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/retry/RetryPolicy.html#onReadTimeoutVerdict-com.datastax.oss.driver.api.core.session.Request-com.datastax.oss.driver.api.core.ConsistencyLevel-int-int-boolean-int-) +2. [`onWriteTimeoutVerdict`](https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/retry/RetryPolicy.html#onWriteTimeoutVerdict-com.datastax.oss.driver.api.core.session.Request-com.datastax.oss.driver.api.core.ConsistencyLevel-com.datastax.oss.driver.api.core.servererrors.WriteType-int-int-int-) +3. [`onUnavailableVerdict`](https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/retry/RetryPolicy.html#onUnavailableVerdict-com.datastax.oss.driver.api.core.session.Request-com.datastax.oss.driver.api.core.ConsistencyLevel-int-int-int-) +4. [`onRequestAbortedVerdict`](https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/retry/RetryPolicy.html#onRequestAbortedVerdict-com.datastax.oss.driver.api.core.session.Request-java.lang.Throwable-int-) +5. [`onErrorResponseVerdict`](https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/retry/RetryPolicy.html#onErrorResponseVerdict-com.datastax.oss.driver.api.core.session.Request-com.datastax.oss.driver.api.core.servererrors.CoordinatorException-int-) + +The following methods were deprecated: + +1. [`onReadTimeout`](https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/retry/RetryPolicy.html#onReadTimeout-com.datastax.oss.driver.api.core.session.Request-com.datastax.oss.driver.api.core.ConsistencyLevel-int-int-boolean-int-) +2. [`onWriteTimeout`](https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/retry/RetryPolicy.html#onWriteTimeout-com.datastax.oss.driver.api.core.session.Request-com.datastax.oss.driver.api.core.ConsistencyLevel-com.datastax.oss.driver.api.core.servererrors.WriteType-int-int-int-) +3. [`onUnavailable`](https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/retry/RetryPolicy.html#onUnavailable-com.datastax.oss.driver.api.core.session.Request-com.datastax.oss.driver.api.core.ConsistencyLevel-int-int-int-) +4. [`onRequestAborted`](https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/retry/RetryPolicy.html#onRequestAborted-com.datastax.oss.driver.api.core.session.Request-java.lang.Throwable-int-) +5. [`onErrorResponse`](https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/retry/RetryPolicy.html#onErrorResponse-com.datastax.oss.driver.api.core.session.Request-com.datastax.oss.driver.api.core.servererrors.CoordinatorException-int-) + +[`RetryVerdict`]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/retry/RetryVerdict.html + #### Enhancements to the `Uuids` utility class [JAVA-2449](https://datastax-oss.atlassian.net/browse/JAVA-2449) modified the implementation of From d1b782e4509d818e035737564def296748eedeb2 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 30 Nov 2020 14:28:50 +0100 Subject: [PATCH 620/979] JAVA-2900: Re-introduce consistency downgrading retries --- changelog/README.md | 1 + .../driver/api/core/retry/RetryVerdict.java | 9 +- .../ConsistencyDowngradingRetryPolicy.java | 374 +++++ .../ConsistencyDowngradingRetryVerdict.java | 55 + .../core/retry/DefaultRetryPolicy.java | 4 +- .../core/retry/DefaultRetryVerdict.java | 40 + core/src/main/resources/reference.conf | 8 +- ...ConsistencyDowngradingRetryPolicyTest.java | 146 ++ .../core/retry/DefaultRetryPolicyTest.java | 52 +- .../api/core/retry/RetryPolicyTestBase.java | 56 +- .../examples/retry/DowngradingRetry.java | 12 +- faq/README.md | 45 +- .../ConsistencyDowngradingRetryPolicyIT.java | 1326 +++++++++++++++++ manual/core/retries/README.md | 128 +- upgrade_guide/README.md | 9 +- 15 files changed, 2153 insertions(+), 112 deletions(-) create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/retry/ConsistencyDowngradingRetryPolicy.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/retry/ConsistencyDowngradingRetryVerdict.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/retry/DefaultRetryVerdict.java create mode 100644 core/src/test/java/com/datastax/oss/driver/api/core/retry/ConsistencyDowngradingRetryPolicyTest.java create mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/core/retry/ConsistencyDowngradingRetryPolicyIT.java diff --git a/changelog/README.md b/changelog/README.md index 86b2d3d04d3..8f740eb8870 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.10.0 (in progress) +- [new feature] JAVA-2900: Re-introduce consistency downgrading retries - [new feature] JAVA-2903: BlockHound integration - [improvement] JAVA-2877: Allow skipping validation for individual mapped entities - [improvement] JAVA-2871: Allow keyspace exclusions in the metadata, and exclude system keyspaces diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/retry/RetryVerdict.java b/core/src/main/java/com/datastax/oss/driver/api/core/retry/RetryVerdict.java index 735019aa80f..9a430e75445 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/retry/RetryVerdict.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/retry/RetryVerdict.java @@ -16,6 +16,7 @@ package com.datastax.oss.driver.api.core.retry; import com.datastax.oss.driver.api.core.session.Request; +import com.datastax.oss.driver.internal.core.retry.DefaultRetryVerdict; import edu.umd.cs.findbugs.annotations.NonNull; /** @@ -27,16 +28,16 @@ public interface RetryVerdict { /** A retry verdict that retries the same request on the same node. */ - RetryVerdict RETRY_SAME = () -> RetryDecision.RETRY_SAME; + RetryVerdict RETRY_SAME = new DefaultRetryVerdict(RetryDecision.RETRY_SAME); /** A retry verdict that retries the same request on the next node in the query plan. */ - RetryVerdict RETRY_NEXT = () -> RetryDecision.RETRY_NEXT; + RetryVerdict RETRY_NEXT = new DefaultRetryVerdict(RetryDecision.RETRY_NEXT); /** A retry verdict that ignores the error, returning and empty result set to the caller. */ - RetryVerdict IGNORE = () -> RetryDecision.IGNORE; + RetryVerdict IGNORE = new DefaultRetryVerdict(RetryDecision.IGNORE); /** A retry verdict that rethrows the execution error to the calling code. */ - RetryVerdict RETHROW = () -> RetryDecision.RETHROW; + RetryVerdict RETHROW = new DefaultRetryVerdict(RetryDecision.RETHROW); /** @return The retry decision to apply. */ @NonNull diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/retry/ConsistencyDowngradingRetryPolicy.java b/core/src/main/java/com/datastax/oss/driver/internal/core/retry/ConsistencyDowngradingRetryPolicy.java new file mode 100644 index 00000000000..6364bb38592 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/retry/ConsistencyDowngradingRetryPolicy.java @@ -0,0 +1,374 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.retry; + +import static com.datastax.oss.driver.api.core.servererrors.WriteType.BATCH; +import static com.datastax.oss.driver.api.core.servererrors.WriteType.BATCH_LOG; +import static com.datastax.oss.driver.api.core.servererrors.WriteType.SIMPLE; +import static com.datastax.oss.driver.api.core.servererrors.WriteType.UNLOGGED_BATCH; + +import com.datastax.oss.driver.api.core.ConsistencyLevel; +import com.datastax.oss.driver.api.core.connection.ClosedConnectionException; +import com.datastax.oss.driver.api.core.connection.HeartbeatException; +import com.datastax.oss.driver.api.core.context.DriverContext; +import com.datastax.oss.driver.api.core.retry.RetryDecision; +import com.datastax.oss.driver.api.core.retry.RetryPolicy; +import com.datastax.oss.driver.api.core.retry.RetryVerdict; +import com.datastax.oss.driver.api.core.servererrors.CoordinatorException; +import com.datastax.oss.driver.api.core.servererrors.ReadFailureException; +import com.datastax.oss.driver.api.core.servererrors.WriteFailureException; +import com.datastax.oss.driver.api.core.servererrors.WriteType; +import com.datastax.oss.driver.api.core.session.Request; +import com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting; +import edu.umd.cs.findbugs.annotations.NonNull; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * A retry policy that sometimes retries with a lower consistency level than the one initially + * requested. + * + *

          BEWARE: this policy may retry queries using a lower consistency level than the one + * initially requested. By doing so, it may break consistency guarantees. In other words, if you use + * this retry policy, there are cases (documented below) where a read at {@code QUORUM} may + * not see a preceding write at {@code QUORUM}. Furthermore, this policy doesn't always respect + * datacenter locality; for example, it may downgrade {@code LOCAL_QUORUM} to {@code ONE}, and thus + * could accidentally send a write that was intended for the local datacenter to another + * datacenter.Do not use this policy unless you have understood the cases where this can happen and + * are ok with that. + * + *

          This policy implements the same retries than the {@link DefaultRetryPolicy} policy. But on top + * of that, it also retries in the following cases: + * + *

            + *
          • On a read timeout: if the number of replicas that responded is greater than one, but lower + * than is required by the requested consistency level, the operation is retried at a lower + * consistency level. + *
          • On a write timeout: if the operation is a {@code WriteType.UNLOGGED_BATCH} and at least one + * replica acknowledged the write, the operation is retried at a lower consistency level. + * Furthermore, for other operations, if at least one replica acknowledged the write, the + * timeout is ignored. + *
          • On an unavailable exception: if at least one replica is alive, the operation is retried at + * a lower consistency level. + *
          + * + * The lower consistency level to use for retries is determined by the following rules: + * + *
            + *
          • if more than 3 replicas responded, use {@code THREE}. + *
          • if 1, 2 or 3 replicas responded, use the corresponding level {@code ONE}, {@code TWO} or + * {@code THREE}. + *
          + * + * Note that if the initial consistency level was {@code EACH_QUORUM}, Cassandra returns the number + * of live replicas in the datacenter that failed to reach consistency, not the overall + * number in the cluster. Therefore if this number is 0, we still retry at {@code ONE}, on the + * assumption that a host may still be up in another datacenter. + * + *

          The reasoning behind this retry policy is the following one. If, based on the information the + * Cassandra coordinator node returns, retrying the operation with the initially requested + * consistency has a chance to succeed, do it. Otherwise, if based on this information, we know that + * the initially requested consistency level cannot be achieved currently, then: + * + *

            + *
          • For writes, ignore the exception (thus silently failing the consistency requirement) if we + * know the write has been persisted on at least one replica. + *
          • For reads, try reading at a lower consistency level (thus silently failing the consistency + * requirement). + *
          + * + * In other words, this policy implements the idea that if the requested consistency level cannot be + * achieved, the next best thing for writes is to make sure the data is persisted, and that reading + * something is better than reading nothing, even if there is a risk of reading stale data. + */ +public class ConsistencyDowngradingRetryPolicy implements RetryPolicy { + + private static final Logger LOG = + LoggerFactory.getLogger(ConsistencyDowngradingRetryPolicy.class); + + @VisibleForTesting + public static final String VERDICT_ON_READ_TIMEOUT = + "[{}] Verdict on read timeout (consistency: {}, required responses: {}, " + + "received responses: {}, data retrieved: {}, retries: {}): {}"; + + @VisibleForTesting + public static final String VERDICT_ON_WRITE_TIMEOUT = + "[{}] Verdict on write timeout (consistency: {}, write type: {}, " + + "required acknowledgments: {}, received acknowledgments: {}, retries: {}): {}"; + + @VisibleForTesting + public static final String VERDICT_ON_UNAVAILABLE = + "[{}] Verdict on unavailable exception (consistency: {}, " + + "required replica: {}, alive replica: {}, retries: {}): {}"; + + @VisibleForTesting + public static final String VERDICT_ON_ABORTED = + "[{}] Verdict on aborted request (type: {}, message: '{}', retries: {}): {}"; + + @VisibleForTesting + public static final String VERDICT_ON_ERROR = + "[{}] Verdict on node error (type: {}, message: '{}', retries: {}): {}"; + + private final String logPrefix; + + @SuppressWarnings("unused") + public ConsistencyDowngradingRetryPolicy( + @NonNull DriverContext context, @NonNull String profileName) { + this(context.getSessionName() + "|" + profileName); + } + + public ConsistencyDowngradingRetryPolicy(@NonNull String logPrefix) { + this.logPrefix = logPrefix; + } + + /** + * {@inheritDoc} + * + *

          This implementation triggers a maximum of one retry. If less replicas responded than + * required by the consistency level (but at least one replica did respond), the operation is + * retried at a lower consistency level. If enough replicas responded but data was not retrieved, + * the operation is retried with the initial consistency level. Otherwise, an exception is thrown. + */ + @Override + public RetryVerdict onReadTimeoutVerdict( + @NonNull Request request, + @NonNull ConsistencyLevel cl, + int blockFor, + int received, + boolean dataPresent, + int retryCount) { + RetryVerdict verdict; + if (retryCount != 0) { + verdict = RetryVerdict.RETHROW; + } else if (cl.isSerial()) { + // CAS reads are not all that useful in terms of visibility of the writes since CAS write + // supports the normal consistency levels on the committing phase. So the main use case for + // CAS reads is probably for when you've timed out on a CAS write and want to make sure what + // happened. Downgrading in that case would be always wrong so we just special-case to + // rethrow. + verdict = RetryVerdict.RETHROW; + } else if (received < blockFor) { + verdict = maybeDowngrade(received, cl); + } else if (!dataPresent) { + // Retry with same CL since this usually means that enough replica are alive to satisfy the + // consistency but the coordinator picked a dead one for data retrieval, not having detected + // that replica as dead yet. + verdict = RetryVerdict.RETRY_SAME; + } else { + // This usually means a digest mismatch, in which case it's pointless to retry since + // the inconsistency has to be repaired first. + verdict = RetryVerdict.RETHROW; + } + if (LOG.isTraceEnabled()) { + LOG.trace( + VERDICT_ON_READ_TIMEOUT, + logPrefix, + cl, + blockFor, + received, + dataPresent, + retryCount, + verdict); + } + return verdict; + } + + /** + * {@inheritDoc} + * + *

          This implementation triggers a maximum of one retry. If {@code writeType == + * WriteType.BATCH_LOG}, the write is retried with the initial consistency level. If {@code + * writeType == WriteType.UNLOGGED_BATCH} and at least one replica acknowledged, the write is + * retried with a lower consistency level (with unlogged batch, a write timeout can always + * mean that part of the batch haven't been persisted at all, even if {@code receivedAcks > 0}). + * For other write types ({@code WriteType.SIMPLE} and {@code WriteType.BATCH}), if we know the + * write has been persisted on at least one replica, we ignore the exception. Otherwise, an + * exception is thrown. + */ + @Override + public RetryVerdict onWriteTimeoutVerdict( + @NonNull Request request, + @NonNull ConsistencyLevel cl, + @NonNull WriteType writeType, + int blockFor, + int received, + int retryCount) { + RetryVerdict verdict; + if (retryCount != 0) { + verdict = RetryVerdict.RETHROW; + } else if (SIMPLE.equals(writeType) || BATCH.equals(writeType)) { + // Since we provide atomicity, if at least one replica acknowledged the write, + // there is no point in retrying + verdict = received > 0 ? RetryVerdict.IGNORE : RetryVerdict.RETHROW; + } else if (UNLOGGED_BATCH.equals(writeType)) { + // Since only part of the batch could have been persisted, + // retry with whatever consistency should allow to persist all + verdict = maybeDowngrade(received, cl); + } else if (BATCH_LOG.equals(writeType)) { + verdict = RetryVerdict.RETRY_SAME; + } else { + verdict = RetryVerdict.RETHROW; + } + if (LOG.isTraceEnabled()) { + LOG.trace( + VERDICT_ON_WRITE_TIMEOUT, + logPrefix, + cl, + writeType, + blockFor, + received, + retryCount, + verdict); + } + return verdict; + } + + /** + * {@inheritDoc} + * + *

          This implementation triggers a maximum of one retry. If at least one replica is known to be + * alive, the operation is retried at a lower consistency level. + */ + @Override + public RetryVerdict onUnavailableVerdict( + @NonNull Request request, + @NonNull ConsistencyLevel cl, + int required, + int alive, + int retryCount) { + RetryVerdict verdict; + if (retryCount != 0) { + verdict = RetryVerdict.RETHROW; + } else if (cl.isSerial()) { + // JAVA-764: if the requested consistency level is serial, it means that the + // operation failed at the paxos phase of a LWT. + // Retry on the next host, on the assumption that the initial coordinator could be + // network-isolated. + verdict = RetryVerdict.RETRY_NEXT; + } else { + verdict = maybeDowngrade(alive, cl); + } + if (LOG.isTraceEnabled()) { + LOG.trace(VERDICT_ON_UNAVAILABLE, logPrefix, cl, required, alive, retryCount, verdict); + } + return verdict; + } + + @Override + public RetryVerdict onRequestAbortedVerdict( + @NonNull Request request, @NonNull Throwable error, int retryCount) { + RetryVerdict verdict = + error instanceof ClosedConnectionException || error instanceof HeartbeatException + ? RetryVerdict.RETRY_NEXT + : RetryVerdict.RETHROW; + if (LOG.isTraceEnabled()) { + LOG.trace( + VERDICT_ON_ABORTED, + logPrefix, + error.getClass().getSimpleName(), + error.getMessage(), + retryCount, + verdict); + } + return verdict; + } + + @Override + public RetryVerdict onErrorResponseVerdict( + @NonNull Request request, @NonNull CoordinatorException error, int retryCount) { + RetryVerdict verdict = + error instanceof WriteFailureException || error instanceof ReadFailureException + ? RetryVerdict.RETHROW + : RetryVerdict.RETRY_NEXT; + if (LOG.isTraceEnabled()) { + LOG.trace( + VERDICT_ON_ERROR, + logPrefix, + error.getClass().getSimpleName(), + error.getMessage(), + retryCount, + verdict); + } + return verdict; + } + + @Override + @Deprecated + public RetryDecision onReadTimeout( + @NonNull Request request, + @NonNull ConsistencyLevel cl, + int blockFor, + int received, + boolean dataPresent, + int retryCount) { + throw new UnsupportedOperationException("onReadTimeout"); + } + + @Override + @Deprecated + public RetryDecision onWriteTimeout( + @NonNull Request request, + @NonNull ConsistencyLevel cl, + @NonNull WriteType writeType, + int blockFor, + int received, + int retryCount) { + throw new UnsupportedOperationException("onWriteTimeout"); + } + + @Override + @Deprecated + public RetryDecision onUnavailable( + @NonNull Request request, + @NonNull ConsistencyLevel cl, + int required, + int alive, + int retryCount) { + throw new UnsupportedOperationException("onUnavailable"); + } + + @Override + @Deprecated + public RetryDecision onRequestAborted( + @NonNull Request request, @NonNull Throwable error, int retryCount) { + throw new UnsupportedOperationException("onRequestAborted"); + } + + @Override + @Deprecated + public RetryDecision onErrorResponse( + @NonNull Request request, @NonNull CoordinatorException error, int retryCount) { + throw new UnsupportedOperationException("onErrorResponse"); + } + + @Override + public void close() {} + + private RetryVerdict maybeDowngrade(int alive, ConsistencyLevel current) { + if (alive >= 3) { + return new ConsistencyDowngradingRetryVerdict(ConsistencyLevel.THREE); + } + if (alive == 2) { + return new ConsistencyDowngradingRetryVerdict(ConsistencyLevel.TWO); + } + // JAVA-1005: EACH_QUORUM does not report a global number of alive replicas + // so even if we get 0 alive replicas, there might be a node up in some other datacenter + if (alive == 1 || current.getProtocolCode() == ConsistencyLevel.EACH_QUORUM.getProtocolCode()) { + return new ConsistencyDowngradingRetryVerdict(ConsistencyLevel.ONE); + } + return RetryVerdict.RETHROW; + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/retry/ConsistencyDowngradingRetryVerdict.java b/core/src/main/java/com/datastax/oss/driver/internal/core/retry/ConsistencyDowngradingRetryVerdict.java new file mode 100644 index 00000000000..cdb3cff4a6a --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/retry/ConsistencyDowngradingRetryVerdict.java @@ -0,0 +1,55 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.retry; + +import com.datastax.oss.driver.api.core.ConsistencyLevel; +import com.datastax.oss.driver.api.core.cql.Statement; +import com.datastax.oss.driver.api.core.retry.RetryDecision; +import com.datastax.oss.driver.api.core.retry.RetryVerdict; +import com.datastax.oss.driver.api.core.session.Request; +import edu.umd.cs.findbugs.annotations.NonNull; + +public class ConsistencyDowngradingRetryVerdict implements RetryVerdict { + + private final ConsistencyLevel consistencyLevel; + + public ConsistencyDowngradingRetryVerdict(@NonNull ConsistencyLevel consistencyLevel) { + this.consistencyLevel = consistencyLevel; + } + + @NonNull + @Override + public RetryDecision getRetryDecision() { + return RetryDecision.RETRY_SAME; + } + + @NonNull + @Override + public RequestT getRetryRequest(@NonNull RequestT previous) { + if (previous instanceof Statement) { + Statement statement = (Statement) previous; + @SuppressWarnings("unchecked") + RequestT toRetry = (RequestT) statement.setConsistencyLevel(consistencyLevel); + return toRetry; + } + return previous; + } + + @Override + public String toString() { + return getRetryDecision() + " at consistency " + consistencyLevel; + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/retry/DefaultRetryPolicy.java b/core/src/main/java/com/datastax/oss/driver/internal/core/retry/DefaultRetryPolicy.java index b9653bc158a..790dbf2b0aa 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/retry/DefaultRetryPolicy.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/retry/DefaultRetryPolicy.java @@ -83,9 +83,7 @@ public class DefaultRetryPolicy implements RetryPolicy { private final String logPrefix; - public DefaultRetryPolicy( - @SuppressWarnings("unused") DriverContext context, - @SuppressWarnings("unused") String profileName) { + public DefaultRetryPolicy(DriverContext context, String profileName) { this.logPrefix = (context != null ? context.getSessionName() : null) + "|" + profileName; } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/retry/DefaultRetryVerdict.java b/core/src/main/java/com/datastax/oss/driver/internal/core/retry/DefaultRetryVerdict.java new file mode 100644 index 00000000000..e813b89725a --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/retry/DefaultRetryVerdict.java @@ -0,0 +1,40 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.retry; + +import com.datastax.oss.driver.api.core.retry.RetryDecision; +import com.datastax.oss.driver.api.core.retry.RetryVerdict; +import edu.umd.cs.findbugs.annotations.NonNull; + +public class DefaultRetryVerdict implements RetryVerdict { + + private final RetryDecision decision; + + public DefaultRetryVerdict(@NonNull RetryDecision decision) { + this.decision = decision; + } + + @NonNull + @Override + public RetryDecision getRetryDecision() { + return decision; + } + + @Override + public String toString() { + return getRetryDecision().name(); + } +} diff --git a/core/src/main/resources/reference.conf b/core/src/main/resources/reference.conf index 3ff88c024b0..e4090ae7481 100644 --- a/core/src/main/resources/reference.conf +++ b/core/src/main/resources/reference.conf @@ -516,7 +516,13 @@ datastax-java-driver { # The class of the policy. If it is not qualified, the driver assumes that it resides in the # package com.datastax.oss.driver.internal.core.retry. # - # The driver provides a single implementation out of the box: DefaultRetryPolicy. + # The driver provides two implementations out of the box: + # + # - DefaultRetryPolicy: the default policy, should almost always be the right choice. + # - ConsistencyDowngradingRetryPolicy: an alternative policy that weakens consistency guarantees + # as a trade-off to maximize the chance of success when retrying. Use with caution. + # + # Refer to the manual to understand how these policies work. # # You can also specify a custom class that implements RetryPolicy and has a public constructor # with two arguments: the DriverContext and a String representing the profile name. diff --git a/core/src/test/java/com/datastax/oss/driver/api/core/retry/ConsistencyDowngradingRetryPolicyTest.java b/core/src/test/java/com/datastax/oss/driver/api/core/retry/ConsistencyDowngradingRetryPolicyTest.java new file mode 100644 index 00000000000..da93084804c --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/api/core/retry/ConsistencyDowngradingRetryPolicyTest.java @@ -0,0 +1,146 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.core.retry; + +import static com.datastax.oss.driver.api.core.ConsistencyLevel.EACH_QUORUM; +import static com.datastax.oss.driver.api.core.ConsistencyLevel.ONE; +import static com.datastax.oss.driver.api.core.ConsistencyLevel.QUORUM; +import static com.datastax.oss.driver.api.core.ConsistencyLevel.SERIAL; +import static com.datastax.oss.driver.api.core.ConsistencyLevel.THREE; +import static com.datastax.oss.driver.api.core.ConsistencyLevel.TWO; +import static com.datastax.oss.driver.api.core.retry.RetryDecision.IGNORE; +import static com.datastax.oss.driver.api.core.retry.RetryDecision.RETHROW; +import static com.datastax.oss.driver.api.core.retry.RetryDecision.RETRY_NEXT; +import static com.datastax.oss.driver.api.core.retry.RetryDecision.RETRY_SAME; +import static com.datastax.oss.driver.api.core.servererrors.WriteType.BATCH; +import static com.datastax.oss.driver.api.core.servererrors.WriteType.BATCH_LOG; +import static com.datastax.oss.driver.api.core.servererrors.WriteType.CAS; +import static com.datastax.oss.driver.api.core.servererrors.WriteType.CDC; +import static com.datastax.oss.driver.api.core.servererrors.WriteType.COUNTER; +import static com.datastax.oss.driver.api.core.servererrors.WriteType.SIMPLE; +import static com.datastax.oss.driver.api.core.servererrors.WriteType.UNLOGGED_BATCH; +import static com.datastax.oss.driver.api.core.servererrors.WriteType.VIEW; + +import com.datastax.oss.driver.api.core.connection.ClosedConnectionException; +import com.datastax.oss.driver.api.core.connection.HeartbeatException; +import com.datastax.oss.driver.api.core.servererrors.OverloadedException; +import com.datastax.oss.driver.api.core.servererrors.ReadFailureException; +import com.datastax.oss.driver.api.core.servererrors.ServerError; +import com.datastax.oss.driver.api.core.servererrors.TruncateException; +import com.datastax.oss.driver.api.core.servererrors.WriteFailureException; +import com.datastax.oss.driver.internal.core.retry.ConsistencyDowngradingRetryPolicy; +import org.junit.Test; + +public class ConsistencyDowngradingRetryPolicyTest extends RetryPolicyTestBase { + + public ConsistencyDowngradingRetryPolicyTest() { + super(new ConsistencyDowngradingRetryPolicy("test")); + } + + @Test + public void should_process_read_timeouts() { + // retry count != 0 + assertOnReadTimeout(QUORUM, 2, 2, false, 1).hasDecision(RETHROW); + // serial CL + assertOnReadTimeout(SERIAL, 2, 2, false, 0).hasDecision(RETHROW); + // received < blockFor + assertOnReadTimeout(QUORUM, 4, 3, true, 0).hasDecision(RETRY_SAME).hasConsistency(THREE); + assertOnReadTimeout(QUORUM, 4, 3, false, 0).hasDecision(RETRY_SAME).hasConsistency(THREE); + assertOnReadTimeout(QUORUM, 3, 2, true, 0).hasDecision(RETRY_SAME).hasConsistency(TWO); + assertOnReadTimeout(QUORUM, 3, 2, false, 0).hasDecision(RETRY_SAME).hasConsistency(TWO); + assertOnReadTimeout(QUORUM, 2, 1, true, 0).hasDecision(RETRY_SAME).hasConsistency(ONE); + assertOnReadTimeout(QUORUM, 2, 1, false, 0).hasDecision(RETRY_SAME).hasConsistency(ONE); + assertOnReadTimeout(EACH_QUORUM, 2, 0, true, 0).hasDecision(RETRY_SAME).hasConsistency(ONE); + assertOnReadTimeout(EACH_QUORUM, 2, 0, false, 0).hasDecision(RETRY_SAME).hasConsistency(ONE); + assertOnReadTimeout(QUORUM, 2, 0, true, 0).hasDecision(RETHROW); + assertOnReadTimeout(QUORUM, 2, 0, false, 0).hasDecision(RETHROW); + // data present + assertOnReadTimeout(QUORUM, 2, 2, false, 0).hasDecision(RETRY_SAME); + assertOnReadTimeout(QUORUM, 2, 2, true, 0).hasDecision(RETHROW); + } + + @Test + public void should_process_write_timeouts() { + // retry count != 0 + assertOnWriteTimeout(QUORUM, BATCH_LOG, 2, 0, 1).hasDecision(RETHROW); + // SIMPLE + assertOnWriteTimeout(QUORUM, SIMPLE, 2, 1, 0).hasDecision(IGNORE); + assertOnWriteTimeout(QUORUM, SIMPLE, 2, 0, 0).hasDecision(RETHROW); + // BATCH + assertOnWriteTimeout(QUORUM, BATCH, 2, 1, 0).hasDecision(IGNORE); + assertOnWriteTimeout(QUORUM, BATCH, 2, 0, 0).hasDecision(RETHROW); + // UNLOGGED_BATCH + assertOnWriteTimeout(QUORUM, UNLOGGED_BATCH, 4, 3, 0) + .hasDecision(RETRY_SAME) + .hasConsistency(THREE); + assertOnWriteTimeout(QUORUM, UNLOGGED_BATCH, 3, 2, 0) + .hasDecision(RETRY_SAME) + .hasConsistency(TWO); + assertOnWriteTimeout(QUORUM, UNLOGGED_BATCH, 2, 1, 0) + .hasDecision(RETRY_SAME) + .hasConsistency(ONE); + assertOnWriteTimeout(EACH_QUORUM, UNLOGGED_BATCH, 2, 0, 0) + .hasDecision(RETRY_SAME) + .hasConsistency(ONE); + assertOnWriteTimeout(QUORUM, UNLOGGED_BATCH, 2, 0, 0).hasDecision(RETHROW); + // BATCH_LOG + assertOnWriteTimeout(QUORUM, BATCH_LOG, 2, 1, 0).hasDecision(RETRY_SAME); + // others + assertOnWriteTimeout(QUORUM, COUNTER, 2, 1, 0).hasDecision(RETHROW); + assertOnWriteTimeout(QUORUM, CAS, 2, 1, 0).hasDecision(RETHROW); + assertOnWriteTimeout(QUORUM, VIEW, 2, 1, 0).hasDecision(RETHROW); + assertOnWriteTimeout(QUORUM, CDC, 2, 1, 0).hasDecision(RETHROW); + } + + @Test + public void should_process_unavailable() { + // retry count != 0 + assertOnUnavailable(QUORUM, 2, 1, 1).hasDecision(RETHROW); + // SERIAL + assertOnUnavailable(SERIAL, 2, 1, 0).hasDecision(RETRY_NEXT); + // downgrade + assertOnUnavailable(QUORUM, 4, 3, 0).hasDecision(RETRY_SAME).hasConsistency(THREE); + assertOnUnavailable(QUORUM, 3, 2, 0).hasDecision(RETRY_SAME).hasConsistency(TWO); + assertOnUnavailable(QUORUM, 2, 1, 0).hasDecision(RETRY_SAME).hasConsistency(ONE); + assertOnUnavailable(EACH_QUORUM, 2, 0, 0).hasDecision(RETRY_SAME).hasConsistency(ONE); + assertOnUnavailable(QUORUM, 2, 0, 0).hasDecision(RETHROW); + } + + @Test + public void should_process_aborted_request() { + assertOnRequestAborted(ClosedConnectionException.class, 0).hasDecision(RETRY_NEXT); + assertOnRequestAborted(ClosedConnectionException.class, 1).hasDecision(RETRY_NEXT); + assertOnRequestAborted(HeartbeatException.class, 0).hasDecision(RETRY_NEXT); + assertOnRequestAborted(HeartbeatException.class, 1).hasDecision(RETRY_NEXT); + assertOnRequestAborted(Throwable.class, 0).hasDecision(RETHROW); + } + + @Test + public void should_process_error_response() { + assertOnErrorResponse(ReadFailureException.class, 0).hasDecision(RETHROW); + assertOnErrorResponse(ReadFailureException.class, 1).hasDecision(RETHROW); + assertOnErrorResponse(WriteFailureException.class, 0).hasDecision(RETHROW); + assertOnErrorResponse(WriteFailureException.class, 1).hasDecision(RETHROW); + assertOnErrorResponse(WriteFailureException.class, 1).hasDecision(RETHROW); + + assertOnErrorResponse(OverloadedException.class, 0).hasDecision(RETRY_NEXT); + assertOnErrorResponse(OverloadedException.class, 1).hasDecision(RETRY_NEXT); + assertOnErrorResponse(ServerError.class, 0).hasDecision(RETRY_NEXT); + assertOnErrorResponse(ServerError.class, 1).hasDecision(RETRY_NEXT); + assertOnErrorResponse(TruncateException.class, 0).hasDecision(RETRY_NEXT); + assertOnErrorResponse(TruncateException.class, 1).hasDecision(RETRY_NEXT); + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/api/core/retry/DefaultRetryPolicyTest.java b/core/src/test/java/com/datastax/oss/driver/api/core/retry/DefaultRetryPolicyTest.java index dac4dcafe20..6e68f9fd199 100644 --- a/core/src/test/java/com/datastax/oss/driver/api/core/retry/DefaultRetryPolicyTest.java +++ b/core/src/test/java/com/datastax/oss/driver/api/core/retry/DefaultRetryPolicyTest.java @@ -40,48 +40,48 @@ public DefaultRetryPolicyTest() { @Test public void should_process_read_timeouts() { - assertOnReadTimeout(QUORUM, 2, 2, false, 0).isEqualTo(RETRY_SAME); - assertOnReadTimeout(QUORUM, 2, 2, false, 1).isEqualTo(RETHROW); - assertOnReadTimeout(QUORUM, 2, 2, true, 0).isEqualTo(RETHROW); - assertOnReadTimeout(QUORUM, 2, 1, true, 0).isEqualTo(RETHROW); - assertOnReadTimeout(QUORUM, 2, 1, false, 0).isEqualTo(RETHROW); + assertOnReadTimeout(QUORUM, 2, 2, false, 0).hasDecision(RETRY_SAME); + assertOnReadTimeout(QUORUM, 2, 2, false, 1).hasDecision(RETHROW); + assertOnReadTimeout(QUORUM, 2, 2, true, 0).hasDecision(RETHROW); + assertOnReadTimeout(QUORUM, 2, 1, true, 0).hasDecision(RETHROW); + assertOnReadTimeout(QUORUM, 2, 1, false, 0).hasDecision(RETHROW); } @Test public void should_process_write_timeouts() { - assertOnWriteTimeout(QUORUM, BATCH_LOG, 2, 0, 0).isEqualTo(RETRY_SAME); - assertOnWriteTimeout(QUORUM, BATCH_LOG, 2, 0, 1).isEqualTo(RETHROW); - assertOnWriteTimeout(QUORUM, SIMPLE, 2, 0, 0).isEqualTo(RETHROW); + assertOnWriteTimeout(QUORUM, BATCH_LOG, 2, 0, 0).hasDecision(RETRY_SAME); + assertOnWriteTimeout(QUORUM, BATCH_LOG, 2, 0, 1).hasDecision(RETHROW); + assertOnWriteTimeout(QUORUM, SIMPLE, 2, 0, 0).hasDecision(RETHROW); } @Test public void should_process_unavailable() { - assertOnUnavailable(QUORUM, 2, 1, 0).isEqualTo(RETRY_NEXT); - assertOnUnavailable(QUORUM, 2, 1, 1).isEqualTo(RETHROW); + assertOnUnavailable(QUORUM, 2, 1, 0).hasDecision(RETRY_NEXT); + assertOnUnavailable(QUORUM, 2, 1, 1).hasDecision(RETHROW); } @Test public void should_process_aborted_request() { - assertOnRequestAborted(ClosedConnectionException.class, 0).isEqualTo(RETRY_NEXT); - assertOnRequestAborted(ClosedConnectionException.class, 1).isEqualTo(RETRY_NEXT); - assertOnRequestAborted(HeartbeatException.class, 0).isEqualTo(RETRY_NEXT); - assertOnRequestAborted(HeartbeatException.class, 1).isEqualTo(RETRY_NEXT); - assertOnRequestAborted(Throwable.class, 0).isEqualTo(RETHROW); + assertOnRequestAborted(ClosedConnectionException.class, 0).hasDecision(RETRY_NEXT); + assertOnRequestAborted(ClosedConnectionException.class, 1).hasDecision(RETRY_NEXT); + assertOnRequestAborted(HeartbeatException.class, 0).hasDecision(RETRY_NEXT); + assertOnRequestAborted(HeartbeatException.class, 1).hasDecision(RETRY_NEXT); + assertOnRequestAborted(Throwable.class, 0).hasDecision(RETHROW); } @Test public void should_process_error_response() { - assertOnErrorResponse(ReadFailureException.class, 0).isEqualTo(RETHROW); - assertOnErrorResponse(ReadFailureException.class, 1).isEqualTo(RETHROW); - assertOnErrorResponse(WriteFailureException.class, 0).isEqualTo(RETHROW); - assertOnErrorResponse(WriteFailureException.class, 1).isEqualTo(RETHROW); - assertOnErrorResponse(WriteFailureException.class, 1).isEqualTo(RETHROW); + assertOnErrorResponse(ReadFailureException.class, 0).hasDecision(RETHROW); + assertOnErrorResponse(ReadFailureException.class, 1).hasDecision(RETHROW); + assertOnErrorResponse(WriteFailureException.class, 0).hasDecision(RETHROW); + assertOnErrorResponse(WriteFailureException.class, 1).hasDecision(RETHROW); + assertOnErrorResponse(WriteFailureException.class, 1).hasDecision(RETHROW); - assertOnErrorResponse(OverloadedException.class, 0).isEqualTo(RETRY_NEXT); - assertOnErrorResponse(OverloadedException.class, 1).isEqualTo(RETRY_NEXT); - assertOnErrorResponse(ServerError.class, 0).isEqualTo(RETRY_NEXT); - assertOnErrorResponse(ServerError.class, 1).isEqualTo(RETRY_NEXT); - assertOnErrorResponse(TruncateException.class, 0).isEqualTo(RETRY_NEXT); - assertOnErrorResponse(TruncateException.class, 1).isEqualTo(RETRY_NEXT); + assertOnErrorResponse(OverloadedException.class, 0).hasDecision(RETRY_NEXT); + assertOnErrorResponse(OverloadedException.class, 1).hasDecision(RETRY_NEXT); + assertOnErrorResponse(ServerError.class, 0).hasDecision(RETRY_NEXT); + assertOnErrorResponse(ServerError.class, 1).hasDecision(RETRY_NEXT); + assertOnErrorResponse(TruncateException.class, 0).hasDecision(RETRY_NEXT); + assertOnErrorResponse(TruncateException.class, 1).hasDecision(RETRY_NEXT); } } diff --git a/core/src/test/java/com/datastax/oss/driver/api/core/retry/RetryPolicyTestBase.java b/core/src/test/java/com/datastax/oss/driver/api/core/retry/RetryPolicyTestBase.java index e24c61e6585..3da2a7cc1b0 100644 --- a/core/src/test/java/com/datastax/oss/driver/api/core/retry/RetryPolicyTestBase.java +++ b/core/src/test/java/com/datastax/oss/driver/api/core/retry/RetryPolicyTestBase.java @@ -22,7 +22,8 @@ import com.datastax.oss.driver.api.core.servererrors.CoordinatorException; import com.datastax.oss.driver.api.core.servererrors.WriteType; import com.datastax.oss.driver.api.core.session.Request; -import org.assertj.core.api.Assert; +import com.datastax.oss.driver.internal.core.retry.ConsistencyDowngradingRetryVerdict; +import org.assertj.core.api.AbstractAssert; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.junit.MockitoJUnitRunner; @@ -37,37 +38,52 @@ protected RetryPolicyTestBase(RetryPolicy policy) { this.policy = policy; } - protected Assert assertOnReadTimeout( + protected RetryVerdictAssert assertOnReadTimeout( ConsistencyLevel cl, int blockFor, int received, boolean dataPresent, int retryCount) { - return assertThat( - policy - .onReadTimeoutVerdict(request, cl, blockFor, received, dataPresent, retryCount) - .getRetryDecision()); + return new RetryVerdictAssert( + policy.onReadTimeoutVerdict(request, cl, blockFor, received, dataPresent, retryCount)); } - protected Assert assertOnWriteTimeout( + protected RetryVerdictAssert assertOnWriteTimeout( ConsistencyLevel cl, WriteType writeType, int blockFor, int received, int retryCount) { - return assertThat( - policy - .onWriteTimeoutVerdict(request, cl, writeType, blockFor, received, retryCount) - .getRetryDecision()); + return new RetryVerdictAssert( + policy.onWriteTimeoutVerdict(request, cl, writeType, blockFor, received, retryCount)); } - protected Assert assertOnUnavailable( + protected RetryVerdictAssert assertOnUnavailable( ConsistencyLevel cl, int required, int alive, int retryCount) { - return assertThat( - policy.onUnavailableVerdict(request, cl, required, alive, retryCount).getRetryDecision()); + return new RetryVerdictAssert( + policy.onUnavailableVerdict(request, cl, required, alive, retryCount)); } - protected Assert assertOnRequestAborted( + protected RetryVerdictAssert assertOnRequestAborted( Class errorClass, int retryCount) { - return assertThat( - policy.onRequestAbortedVerdict(request, mock(errorClass), retryCount).getRetryDecision()); + return new RetryVerdictAssert( + policy.onRequestAbortedVerdict(request, mock(errorClass), retryCount)); } - protected Assert assertOnErrorResponse( + protected RetryVerdictAssert assertOnErrorResponse( Class errorClass, int retryCount) { - return assertThat( - policy.onErrorResponseVerdict(request, mock(errorClass), retryCount).getRetryDecision()); + return new RetryVerdictAssert( + policy.onErrorResponseVerdict(request, mock(errorClass), retryCount)); + } + + public static class RetryVerdictAssert extends AbstractAssert { + RetryVerdictAssert(RetryVerdict actual) { + super(actual, RetryVerdictAssert.class); + } + + public RetryVerdictAssert hasDecision(RetryDecision decision) { + assertThat(actual.getRetryDecision()).isEqualTo(decision); + return this; + } + + public RetryVerdictAssert hasConsistency(ConsistencyLevel cl) { + assertThat(actual) + .isInstanceOf(ConsistencyDowngradingRetryVerdict.class) + .extracting("consistencyLevel") + .isEqualTo(cl); + return this; + } } } diff --git a/examples/src/main/java/com/datastax/oss/driver/examples/retry/DowngradingRetry.java b/examples/src/main/java/com/datastax/oss/driver/examples/retry/DowngradingRetry.java index c83e2ef2296..2ed917fd449 100644 --- a/examples/src/main/java/com/datastax/oss/driver/examples/retry/DowngradingRetry.java +++ b/examples/src/main/java/com/datastax/oss/driver/examples/retry/DowngradingRetry.java @@ -39,9 +39,15 @@ /** * This example illustrates how to implement a downgrading retry strategy from application code. * - *

          This was provided as a built-in policy in driver 3 ({@code - * DowngradingConsistencyRetryPolicy}), but has been removed from driver 4. See the FAQ. + *

          This strategy is equivalent to the logic implemented by the consistency downgrading retry + * policy, but we think that such a logic should be implemented at application level whenever + * possible. + * + *

          See the FAQ + * and the manual + * section on retries. * *

          Preconditions: * diff --git a/faq/README.md b/faq/README.md index 1b00a971567..842804431a2 100644 --- a/faq/README.md +++ b/faq/README.md @@ -43,24 +43,33 @@ use a fully asynchronous programming model (chaining callbacks instead of blocki At any rate, `CompletionStage` has a `toCompletableFuture()` method. In current JDK versions, every `CompletionStage` is a `CompletableFuture`, so the conversion has no performance overhead. -### Where is `DowngradingConsistencyRetryPolicy`? - -That retry policy was deprecated in driver 3.5.0, and does not exist anymore in 4.0.0. The main -motivation is that this behavior should be the application's concern, not the driver's. - -We recognize that there are use cases where downgrading is good -- for instance, a dashboard -application would present the latest information by reading at QUORUM, but it's acceptable for it to -display stale information by reading at ONE sometimes. - -But APIs provided by the driver should instead encourage idiomatic use of a distributed system like -Apache Cassandra, and a downgrading policy works against this. It suggests that an anti-pattern such -as "try to read at QUORUM, but fall back to ONE if that fails" is a good idea in general use cases, -when in reality it provides no better consistency guarantees than working directly at ONE, but with -higher latencies. - -We therefore urge users to carefully choose upfront the consistency level that works best for their -use cases. If there is a legitimate reason to downgrade and retry, that should be handled by the -application code. +### Where is `DowngradingConsistencyRetryPolicy` from driver 3? + +**As of driver 4.10, this retry policy was made available again as a built-in alternative to the +default retry policy**: see the [manual](../manual/core/retries) to understand how to use it. +For versions between 4.0 and 4.9 inclusive, there is no built-in equivalent of driver 3 +`DowngradingConsistencyRetryPolicy`. + +That retry policy was indeed removed in driver 4.0.0. The main motivation is that this behavior +should be the application's concern, not the driver's. APIs provided by the driver should instead +encourage idiomatic use of a distributed system like Apache Cassandra, and a downgrading policy +works against this. It suggests that an anti-pattern such as "try to read at QUORUM, but fall back +to ONE if that fails" is a good idea in general use cases, when in reality it provides no better +consistency guarantees than working directly at ONE, but with higher latencies. + +However, we recognize that there are use cases where downgrading is good -- for instance, a +dashboard application would present the latest information by reading at QUORUM, but it's acceptable +for it to display stale information by reading at ONE sometimes. + +Thanks to [JAVA-2900], an equivalent retry policy with downgrading behavior was re-introduced in +driver 4.10. Nonetheless, we urge users to avoid using it unless strictly required, and instead, +carefully choose upfront the consistency level that works best for their use cases. Even if there +is a legitimate reason to downgrade and retry, that should be preferably handled by the application +code. An example of downgrading retries implemented at application level can be found in the driver +[examples repository]. + +[JAVA-2900]: https://datastax-oss.atlassian.net/browse/JAVA-2900 +[examples repository]: https://github.com/datastax/java-driver/blob/4.x/examples/src/main/java/com/datastax/oss/driver/examples/retry/DowngradingRetry.java ### I want to set a date on a bound statement, where did `setTimestamp()` go? diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/retry/ConsistencyDowngradingRetryPolicyIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/retry/ConsistencyDowngradingRetryPolicyIT.java new file mode 100644 index 00000000000..2a34a7cd639 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/retry/ConsistencyDowngradingRetryPolicyIT.java @@ -0,0 +1,1326 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.core.retry; + +import static com.datastax.oss.simulacron.common.codec.WriteType.BATCH_LOG; +import static com.datastax.oss.simulacron.common.codec.WriteType.UNLOGGED_BATCH; +import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.closeConnection; +import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.readFailure; +import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.readTimeout; +import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.serverError; +import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.unavailable; +import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.when; +import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.writeFailure; +import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.writeTimeout; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.fail; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.after; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.timeout; +import static org.mockito.Mockito.verify; + +import ch.qos.logback.classic.Level; +import ch.qos.logback.classic.Logger; +import ch.qos.logback.classic.spi.ILoggingEvent; +import ch.qos.logback.core.Appender; +import com.datastax.oss.driver.api.core.AllNodesFailedException; +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.DefaultConsistencyLevel; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.connection.ClosedConnectionException; +import com.datastax.oss.driver.api.core.cql.ExecutionInfo; +import com.datastax.oss.driver.api.core.cql.ResultSet; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.core.cql.Statement; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.retry.RetryVerdict; +import com.datastax.oss.driver.api.core.servererrors.DefaultWriteType; +import com.datastax.oss.driver.api.core.servererrors.ReadFailureException; +import com.datastax.oss.driver.api.core.servererrors.ReadTimeoutException; +import com.datastax.oss.driver.api.core.servererrors.ServerError; +import com.datastax.oss.driver.api.core.servererrors.UnavailableException; +import com.datastax.oss.driver.api.core.servererrors.WriteFailureException; +import com.datastax.oss.driver.api.core.servererrors.WriteTimeoutException; +import com.datastax.oss.driver.api.testinfra.loadbalancing.SortingLoadBalancingPolicy; +import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import com.datastax.oss.driver.api.testinfra.session.SessionUtils; +import com.datastax.oss.driver.api.testinfra.simulacron.QueryCounter; +import com.datastax.oss.driver.api.testinfra.simulacron.SimulacronRule; +import com.datastax.oss.driver.internal.core.retry.ConsistencyDowngradingRetryPolicy; +import com.datastax.oss.driver.internal.core.retry.ConsistencyDowngradingRetryVerdict; +import com.datastax.oss.simulacron.common.cluster.ClusterSpec; +import com.datastax.oss.simulacron.common.codec.ConsistencyLevel; +import com.datastax.oss.simulacron.common.codec.WriteType; +import com.datastax.oss.simulacron.common.request.Query; +import com.datastax.oss.simulacron.common.request.Request; +import com.datastax.oss.simulacron.common.stubbing.CloseType; +import com.datastax.oss.simulacron.common.stubbing.DisconnectAction; +import com.datastax.oss.simulacron.server.BoundNode; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import java.net.SocketAddress; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import org.junit.After; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Rule; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.ArgumentCaptor; +import org.slf4j.LoggerFactory; +import org.slf4j.helpers.MessageFormatter; + +@RunWith(DataProviderRunner.class) +public class ConsistencyDowngradingRetryPolicyIT { + + @ClassRule + public static final SimulacronRule SIMULACRON_RULE = + new SimulacronRule(ClusterSpec.builder().withNodes(3)); + + public @Rule SessionRule sessionRule = + SessionRule.builder(SIMULACRON_RULE) + .withConfigLoader( + SessionUtils.configLoaderBuilder() + .withBoolean(DefaultDriverOption.REQUEST_DEFAULT_IDEMPOTENCE, true) + .withClass( + DefaultDriverOption.RETRY_POLICY_CLASS, + ConsistencyDowngradingRetryPolicy.class) + .withClass( + DefaultDriverOption.LOAD_BALANCING_POLICY_CLASS, + SortingLoadBalancingPolicy.class) + .build()) + .build(); + + private static final String QUERY_STR = "irrelevant"; + + private static final Request QUERY_LOCAL_QUORUM = + new Query(QUERY_STR, ImmutableList.of(ConsistencyLevel.LOCAL_QUORUM), null, null); + + private static final Request QUERY_ONE = + new Query(QUERY_STR, ImmutableList.of(ConsistencyLevel.ONE), null, null); + + private static final Request QUERY_LOCAL_SERIAL = + new Query(QUERY_STR, ImmutableList.of(ConsistencyLevel.LOCAL_SERIAL), null, null); + + private static final SimpleStatement STATEMENT_LOCAL_QUORUM = + SimpleStatement.builder(QUERY_STR) + .setConsistencyLevel(DefaultConsistencyLevel.LOCAL_QUORUM) + .build(); + + private static final SimpleStatement STATEMENT_LOCAL_SERIAL = + SimpleStatement.builder(QUERY_STR) + .setConsistencyLevel(DefaultConsistencyLevel.LOCAL_SERIAL) + .build(); + + @SuppressWarnings("deprecation") + private final QueryCounter localQuorumCounter = + QueryCounter.builder(SIMULACRON_RULE.cluster()) + .withFilter( + (l) -> + l.getQuery().equals(QUERY_STR) + && l.getConsistency().equals(ConsistencyLevel.LOCAL_QUORUM)) + .build(); + + @SuppressWarnings("deprecation") + private final QueryCounter oneCounter = + QueryCounter.builder(SIMULACRON_RULE.cluster()) + .withFilter( + (l) -> + l.getQuery().equals(QUERY_STR) && l.getConsistency().equals(ConsistencyLevel.ONE)) + .build(); + + @SuppressWarnings("deprecation") + private final QueryCounter localSerialCounter = + QueryCounter.builder(SIMULACRON_RULE.cluster()) + .withFilter( + (l) -> + l.getQuery().equals(QUERY_STR) + && l.getConsistency().equals(ConsistencyLevel.LOCAL_SERIAL)) + .build(); + + private ArgumentCaptor loggingEventCaptor; + private Appender appender; + private Logger logger; + private Level oldLevel; + private String logPrefix; + private BoundNode node0; + private BoundNode node1; + + @Before + public void setup() { + loggingEventCaptor = ArgumentCaptor.forClass(ILoggingEvent.class); + @SuppressWarnings("unchecked") + Appender appender = (Appender) mock(Appender.class); + this.appender = appender; + logger = (Logger) LoggerFactory.getLogger(ConsistencyDowngradingRetryPolicy.class); + oldLevel = logger.getLevel(); + logger.setLevel(Level.TRACE); + logger.addAppender(appender); + // the log prefix we expect in retry logging messages. + logPrefix = sessionRule.session().getName() + "|default"; + // clear activity logs and primes between tests since simulacron instance is shared. + SIMULACRON_RULE.cluster().clearLogs(); + SIMULACRON_RULE.cluster().clearPrimes(true); + node0 = SIMULACRON_RULE.cluster().node(0); + node1 = SIMULACRON_RULE.cluster().node(1); + } + + @After + public void teardown() { + logger.detachAppender(appender); + logger.setLevel(oldLevel); + } + + @Test + public void should_rethrow_on_read_timeout_when_enough_responses_and_data_present() { + // given a node that will respond to query with a read timeout where data is present and enough + // replicas replied. + node0.prime( + when(QUERY_LOCAL_QUORUM).then(readTimeout(ConsistencyLevel.LOCAL_QUORUM, 2, 2, true))); + + try { + sessionRule.session().execute(STATEMENT_LOCAL_QUORUM); + fail("expected a ReadTimeoutException"); + } catch (ReadTimeoutException rte) { + // then an exception should have been thrown + assertThat(rte) + .hasMessageContaining( + "Cassandra timeout during read query at consistency LOCAL_QUORUM (timeout while waiting for repair of inconsistent replica)"); + assertThat(rte.getConsistencyLevel()).isEqualTo(DefaultConsistencyLevel.LOCAL_QUORUM); + assertThat(rte.getReceived()).isEqualTo(2); + assertThat(rte.getBlockFor()).isEqualTo(2); + assertThat(rte.wasDataPresent()).isTrue(); + // should not have been retried + List> errors = rte.getExecutionInfo().getErrors(); + assertThat(errors).isEmpty(); + // the host that returned the response should be node 0. + assertThat(coordinatorAddress(rte.getExecutionInfo())).isEqualTo(node0.getAddress()); + } + + // there should have been no retry. + localQuorumCounter.assertTotalCount(1); + localQuorumCounter.assertNodeCounts(1, 0, 0); + oneCounter.assertTotalCount(0); + + // expect 1 message: RETHROW + verify(appender, timeout(500)).doAppend(loggingEventCaptor.capture()); + List loggedEvents = loggingEventCaptor.getAllValues(); + assertThat(loggedEvents).hasSize(1); + assertThat(loggedEvents.get(0).getFormattedMessage()) + .isEqualTo( + expectedMessage( + ConsistencyDowngradingRetryPolicy.VERDICT_ON_READ_TIMEOUT, + logPrefix, + DefaultConsistencyLevel.LOCAL_QUORUM, + 2, + 2, + true, + 0, + RetryVerdict.RETHROW)); + } + + @Test + public void should_retry_on_same_on_read_timeout_when_enough_responses_but_data_not_present() { + // given a node that will respond to query with a read timeout where data is present. + node0.prime( + when(QUERY_LOCAL_QUORUM).then(readTimeout(ConsistencyLevel.LOCAL_QUORUM, 2, 2, false))); + + try { + sessionRule.session().execute(STATEMENT_LOCAL_QUORUM); + fail("expected a ReadTimeoutException"); + } catch (ReadTimeoutException rte) { + // then an exception should have been thrown + assertThat(rte) + .hasMessageContaining( + "Cassandra timeout during read query at consistency LOCAL_QUORUM (the replica queried for data didn't respond)"); + assertThat(rte.getConsistencyLevel()).isEqualTo(DefaultConsistencyLevel.LOCAL_QUORUM); + assertThat(rte.getReceived()).isEqualTo(2); + assertThat(rte.getBlockFor()).isEqualTo(2); + assertThat(rte.wasDataPresent()).isFalse(); + // the host that returned the response should be node 0. + assertThat(coordinatorAddress(rte.getExecutionInfo())).isEqualTo(node0.getAddress()); + // should have failed at first attempt at LOCAL_QUORUM as well + List> errors = rte.getExecutionInfo().getErrors(); + assertThat(errors).hasSize(1); + Entry error = errors.get(0); + assertThat(error.getKey().getEndPoint().resolve()).isEqualTo(node0.getAddress()); + assertThat(error.getValue()) + .isInstanceOfSatisfying( + ReadTimeoutException.class, + rte1 -> { + assertThat(rte1) + .hasMessageContaining( + "Cassandra timeout during read query at consistency LOCAL_QUORUM (the replica queried for data didn't respond)"); + assertThat(rte1.getConsistencyLevel()) + .isEqualTo(DefaultConsistencyLevel.LOCAL_QUORUM); + assertThat(rte1.getReceived()).isEqualTo(2); + assertThat(rte1.getBlockFor()).isEqualTo(2); + assertThat(rte1.wasDataPresent()).isFalse(); + }); + } + + // there should have been a retry, and it should have been executed on the same host, + // with same consistency. + localQuorumCounter.assertTotalCount(2); + localQuorumCounter.assertNodeCounts(2, 0, 0); + oneCounter.assertTotalCount(0); + + // expect 2 messages: RETRY_SAME, then RETHROW + verify(appender, timeout(500).times(2)).doAppend(loggingEventCaptor.capture()); + List loggedEvents = loggingEventCaptor.getAllValues(); + assertThat(loggedEvents).hasSize(2); + assertThat(loggedEvents.get(0).getFormattedMessage()) + .isEqualTo( + expectedMessage( + ConsistencyDowngradingRetryPolicy.VERDICT_ON_READ_TIMEOUT, + logPrefix, + DefaultConsistencyLevel.LOCAL_QUORUM, + 2, + 2, + false, + 0, + RetryVerdict.RETRY_SAME)); + assertThat(loggedEvents.get(1).getFormattedMessage()) + .isEqualTo( + expectedMessage( + ConsistencyDowngradingRetryPolicy.VERDICT_ON_READ_TIMEOUT, + logPrefix, + DefaultConsistencyLevel.LOCAL_QUORUM, + 2, + 2, + false, + 1, + RetryVerdict.RETHROW)); + } + + @Test + public void should_downgrade_on_read_timeout_when_not_enough_responses() { + // given a node that will respond to a query with a read timeout where 2 out of 3 responses are + // received. In this case, digest requests succeeded, but not the data request. + node0.prime( + when(QUERY_LOCAL_QUORUM).then(readTimeout(ConsistencyLevel.LOCAL_QUORUM, 1, 2, true))); + + ResultSet rs = sessionRule.session().execute(STATEMENT_LOCAL_QUORUM); + + // the host that returned the response should be node 0. + assertThat(coordinatorAddress(rs.getExecutionInfo())).isEqualTo(node0.getAddress()); + // should have failed at first attempt at LOCAL_QUORUM + List> errors = rs.getExecutionInfo().getErrors(); + assertThat(errors).hasSize(1); + Entry error = errors.get(0); + assertThat(error.getKey().getEndPoint().resolve()).isEqualTo(node0.getAddress()); + assertThat(error.getValue()) + .isInstanceOfSatisfying( + ReadTimeoutException.class, + rte -> { + assertThat(rte) + .hasMessageContaining( + "Cassandra timeout during read query at consistency LOCAL_QUORUM (2 responses were required but only 1 replica responded)"); + assertThat(rte.getConsistencyLevel()).isEqualTo(DefaultConsistencyLevel.LOCAL_QUORUM); + assertThat(rte.getReceived()).isEqualTo(1); + assertThat(rte.getBlockFor()).isEqualTo(2); + assertThat(rte.wasDataPresent()).isTrue(); + }); + + // should have succeeded in second attempt at ONE + Statement request = (Statement) rs.getExecutionInfo().getRequest(); + assertThat(request.getConsistencyLevel()).isEqualTo(DefaultConsistencyLevel.ONE); + + // there should have been a retry, and it should have been executed on the same host, + // but with consistency ONE. + localQuorumCounter.assertTotalCount(1); + localQuorumCounter.assertNodeCounts(1, 0, 0); + oneCounter.assertTotalCount(1); + oneCounter.assertNodeCounts(1, 0, 0); + + // expect 1 message: RETRY_SAME with ONE + verify(appender, timeout(500)).doAppend(loggingEventCaptor.capture()); + List loggedEvents = loggingEventCaptor.getAllValues(); + assertThat(loggedEvents).hasSize(1); + assertThat(loggedEvents.get(0).getFormattedMessage()) + .isEqualTo( + expectedMessage( + ConsistencyDowngradingRetryPolicy.VERDICT_ON_READ_TIMEOUT, + logPrefix, + DefaultConsistencyLevel.LOCAL_QUORUM, + 2, + 1, + true, + 0, + new ConsistencyDowngradingRetryVerdict(DefaultConsistencyLevel.ONE))); + } + + @Test + public void should_retry_on_read_timeout_when_enough_responses_and_data_not_present() { + // given a node that will respond to a query with a read timeout where 3 out of 3 responses are + // received, but data is not present. + node0.prime( + when(QUERY_LOCAL_QUORUM).then(readTimeout(ConsistencyLevel.LOCAL_QUORUM, 2, 2, false))); + + try { + // when executing a query. + sessionRule.session().execute(STATEMENT_LOCAL_QUORUM); + fail("Expected a ReadTimeoutException"); + } catch (ReadTimeoutException rte) { + // then a read timeout exception is thrown. + assertThat(rte) + .hasMessageContaining( + "Cassandra timeout during read query at consistency LOCAL_QUORUM (the replica queried for data didn't respond)"); + assertThat(rte.getConsistencyLevel()).isEqualTo(DefaultConsistencyLevel.LOCAL_QUORUM); + assertThat(rte.getReceived()).isEqualTo(2); + assertThat(rte.getBlockFor()).isEqualTo(2); + assertThat(rte.wasDataPresent()).isFalse(); + // the host that returned the response should be node 0. + assertThat(coordinatorAddress(rte.getExecutionInfo())).isEqualTo(node0.getAddress()); + // should have failed at first attempt at LOCAL_QUORUM + List> errors = rte.getExecutionInfo().getErrors(); + assertThat(errors).hasSize(1); + Entry error = errors.get(0); + assertThat(error.getKey().getEndPoint().resolve()).isEqualTo(node0.getAddress()); + assertThat(error.getValue()) + .isInstanceOfSatisfying( + ReadTimeoutException.class, + rte1 -> { + assertThat(rte) + .hasMessageContaining( + "Cassandra timeout during read query at consistency LOCAL_QUORUM (the replica queried for data didn't respond)"); + assertThat(rte1.getConsistencyLevel()) + .isEqualTo(DefaultConsistencyLevel.LOCAL_QUORUM); + assertThat(rte1.getReceived()).isEqualTo(2); + assertThat(rte1.getBlockFor()).isEqualTo(2); + assertThat(rte1.wasDataPresent()).isFalse(); + }); + } + + // there should have been a retry, and it should have been executed on the same host. + localQuorumCounter.assertTotalCount(2); + localQuorumCounter.assertNodeCounts(2, 0, 0); + oneCounter.assertTotalCount(0); + + // verify log events were emitted as expected + verify(appender, timeout(500).times(2)).doAppend(loggingEventCaptor.capture()); + List loggedEvents = loggingEventCaptor.getAllValues(); + assertThat(loggedEvents).hasSize(2); + assertThat(loggedEvents.get(0).getFormattedMessage()) + .isEqualTo( + expectedMessage( + ConsistencyDowngradingRetryPolicy.VERDICT_ON_READ_TIMEOUT, + logPrefix, + DefaultConsistencyLevel.LOCAL_QUORUM, + 2, + 2, + false, + 0, + RetryVerdict.RETRY_SAME)); + assertThat(loggedEvents.get(1).getFormattedMessage()) + .isEqualTo( + expectedMessage( + ConsistencyDowngradingRetryPolicy.VERDICT_ON_READ_TIMEOUT, + logPrefix, + DefaultConsistencyLevel.LOCAL_QUORUM, + 2, + 2, + false, + 1, + RetryVerdict.RETHROW)); + } + + @Test + public void should_only_retry_once_on_read_type() { + // given a node that will respond to a query with a read timeout at 2 CLs. + node0.prime( + when(QUERY_LOCAL_QUORUM).then(readTimeout(ConsistencyLevel.LOCAL_QUORUM, 1, 2, true))); + node0.prime(when(QUERY_ONE).then(readTimeout(ConsistencyLevel.ONE, 0, 1, false))); + try { + // when executing a query. + sessionRule.session().execute(STATEMENT_LOCAL_QUORUM); + fail("Expected a ReadTimeoutException"); + } catch (ReadTimeoutException wte) { + // then a read timeout exception is thrown + assertThat(wte) + .hasMessageContaining( + "Cassandra timeout during read query at consistency ONE (1 responses were required but only 0 replica responded)"); + assertThat(wte.getConsistencyLevel()).isEqualTo(DefaultConsistencyLevel.ONE); + assertThat(wte.getReceived()).isEqualTo(0); + assertThat(wte.getBlockFor()).isEqualTo(1); + assertThat(wte.wasDataPresent()).isFalse(); + // the host that returned the response should be node 0. + assertThat(coordinatorAddress(wte.getExecutionInfo())).isEqualTo(node0.getAddress()); + // should have failed at first attempt at LOCAL_QUORUM as well + List> errors = wte.getExecutionInfo().getErrors(); + assertThat(errors).hasSize(1); + Entry error = errors.get(0); + assertThat(error.getKey().getEndPoint().resolve()).isEqualTo(node0.getAddress()); + assertThat(error.getValue()) + .isInstanceOfSatisfying( + ReadTimeoutException.class, + wte1 -> { + assertThat(wte1) + .hasMessageContaining( + "Cassandra timeout during read query at consistency LOCAL_QUORUM (2 responses were required but only 1 replica responded)"); + assertThat(wte1.getConsistencyLevel()) + .isEqualTo(DefaultConsistencyLevel.LOCAL_QUORUM); + assertThat(wte1.getReceived()).isEqualTo(1); + assertThat(wte1.getBlockFor()).isEqualTo(2); + assertThat(wte1.wasDataPresent()).isTrue(); + }); + } + + // should have been retried on same host, but at consistency ONE. + localQuorumCounter.assertTotalCount(1); + localQuorumCounter.assertNodeCounts(1, 0, 0); + oneCounter.assertTotalCount(1); + oneCounter.assertNodeCounts(1, 0, 0); + + // verify log events were emitted as expected + verify(appender, timeout(500).times(2)).doAppend(loggingEventCaptor.capture()); + List loggedEvents = loggingEventCaptor.getAllValues(); + assertThat(loggedEvents).hasSize(2); + assertThat(loggedEvents.get(0).getFormattedMessage()) + .isEqualTo( + expectedMessage( + ConsistencyDowngradingRetryPolicy.VERDICT_ON_READ_TIMEOUT, + logPrefix, + DefaultConsistencyLevel.LOCAL_QUORUM, + 2, + 1, + true, + 0, + new ConsistencyDowngradingRetryVerdict(DefaultConsistencyLevel.ONE))); + assertThat(loggedEvents.get(1).getFormattedMessage()) + .isEqualTo( + expectedMessage( + ConsistencyDowngradingRetryPolicy.VERDICT_ON_READ_TIMEOUT, + logPrefix, + DefaultConsistencyLevel.ONE, + 1, + 0, + false, + 1, + RetryVerdict.RETHROW)); + } + + @Test + public void should_retry_on_write_timeout_if_write_type_batch_log() { + // given a node that will respond to query with a write timeout with write type of batch log. + node0.prime( + when(QUERY_LOCAL_QUORUM) + .then(writeTimeout(ConsistencyLevel.LOCAL_QUORUM, 1, 2, BATCH_LOG))); + + try { + // when executing a query. + sessionRule.session().execute(STATEMENT_LOCAL_QUORUM); + fail("WriteTimeoutException expected"); + } catch (WriteTimeoutException wte) { + // then a write timeout exception is thrown + assertThat(wte) + .hasMessageContaining( + "Cassandra timeout during BATCH_LOG write query at consistency LOCAL_QUORUM (2 replica were required but only 1 acknowledged the write)"); + assertThat(wte.getConsistencyLevel()).isEqualTo(DefaultConsistencyLevel.LOCAL_QUORUM); + assertThat(wte.getReceived()).isEqualTo(1); + assertThat(wte.getBlockFor()).isEqualTo(2); + assertThat(wte.getWriteType()).isEqualTo(DefaultWriteType.BATCH_LOG); + // the host that returned the response should be node 0. + assertThat(coordinatorAddress(wte.getExecutionInfo())).isEqualTo(node0.getAddress()); + // should have failed at first attempt at LOCAL_QUORUM as well + List> errors = wte.getExecutionInfo().getErrors(); + assertThat(errors).hasSize(1); + Entry error = errors.get(0); + assertThat(error.getKey().getEndPoint().resolve()).isEqualTo(node0.getAddress()); + assertThat(error.getValue()) + .isInstanceOfSatisfying( + WriteTimeoutException.class, + wte1 -> { + assertThat(wte1) + .hasMessageContaining( + "Cassandra timeout during BATCH_LOG write query at consistency LOCAL_QUORUM (2 replica were required but only 1 acknowledged the write)"); + assertThat(wte1.getConsistencyLevel()) + .isEqualTo(DefaultConsistencyLevel.LOCAL_QUORUM); + assertThat(wte1.getReceived()).isEqualTo(1); + assertThat(wte1.getBlockFor()).isEqualTo(2); + assertThat(wte1.getWriteType()).isEqualTo(DefaultWriteType.BATCH_LOG); + }); + } + + // there should have been a retry, and it should have been executed on the same host. + localQuorumCounter.assertTotalCount(2); + localQuorumCounter.assertNodeCounts(2, 0, 0); + oneCounter.assertTotalCount(0); + + // verify log events were emitted as expected + verify(appender, timeout(500).times(2)).doAppend(loggingEventCaptor.capture()); + List loggedEvents = loggingEventCaptor.getAllValues(); + assertThat(loggedEvents).hasSize(2); + assertThat(loggedEvents.get(0).getFormattedMessage()) + .isEqualTo( + expectedMessage( + ConsistencyDowngradingRetryPolicy.VERDICT_ON_WRITE_TIMEOUT, + logPrefix, + DefaultConsistencyLevel.LOCAL_QUORUM, + DefaultWriteType.BATCH_LOG, + 2, + 1, + 0, + RetryVerdict.RETRY_SAME)); + assertThat(loggedEvents.get(1).getFormattedMessage()) + .isEqualTo( + expectedMessage( + ConsistencyDowngradingRetryPolicy.VERDICT_ON_WRITE_TIMEOUT, + logPrefix, + DefaultConsistencyLevel.LOCAL_QUORUM, + DefaultWriteType.BATCH_LOG, + 2, + 1, + 1, + RetryVerdict.RETHROW)); + } + + @Test + public void should_not_retry_on_write_timeout_if_write_type_batch_log_but_non_idempotent() { + // given a node that will respond to query with a write timeout with write type of batch log. + node0.prime( + when(QUERY_LOCAL_QUORUM) + .then(writeTimeout(ConsistencyLevel.LOCAL_QUORUM, 1, 2, BATCH_LOG))); + + try { + // when executing a non-idempotent query. + sessionRule.session().execute(STATEMENT_LOCAL_QUORUM.setIdempotent(false)); + fail("WriteTimeoutException expected"); + } catch (WriteTimeoutException wte) { + // then a write timeout exception is thrown + assertThat(wte) + .hasMessageContaining( + "Cassandra timeout during BATCH_LOG write query at consistency LOCAL_QUORUM (2 replica were required but only 1 acknowledged the write)"); + assertThat(wte.getConsistencyLevel()).isEqualTo(DefaultConsistencyLevel.LOCAL_QUORUM); + assertThat(wte.getReceived()).isEqualTo(1); + assertThat(wte.getBlockFor()).isEqualTo(2); + assertThat(wte.getWriteType()).isEqualTo(DefaultWriteType.BATCH_LOG); + // the host that returned the response should be node 0. + assertThat(coordinatorAddress(wte.getExecutionInfo())).isEqualTo(node0.getAddress()); + // should not have been retried + List> errors = wte.getExecutionInfo().getErrors(); + assertThat(errors).isEmpty(); + } + + // should not have been retried. + localQuorumCounter.assertTotalCount(1); + oneCounter.assertTotalCount(0); + + // expect no logging messages since there was no retry + verify(appender, after(500).times(0)).doAppend(any(ILoggingEvent.class)); + } + + @DataProvider({"SIMPLE,SIMPLE", "BATCH,BATCH"}) + @Test + public void should_ignore_on_write_timeout_if_write_type_ignorable_and_at_least_one_ack_received( + WriteType writeType, DefaultWriteType driverWriteType) { + // given a node that will respond to query with a write timeout with write type that is either + // SIMPLE or BATCH. + node0.prime( + when(QUERY_LOCAL_QUORUM) + .then(writeTimeout(ConsistencyLevel.LOCAL_QUORUM, 1, 2, writeType))); + + // when executing a query. + ResultSet rs = sessionRule.session().execute(STATEMENT_LOCAL_QUORUM); + + // should have ignored the write timeout + assertThat(rs.all()).isEmpty(); + // the host that returned the response should be node 0. + assertThat(coordinatorAddress(rs.getExecutionInfo())).isEqualTo(node0.getAddress()); + assertThat(rs.getExecutionInfo().getErrors()).isEmpty(); + + // should not have been retried. + localQuorumCounter.assertTotalCount(1); + oneCounter.assertTotalCount(0); + + // verify log event was emitted for each host as expected + verify(appender, after(500)).doAppend(loggingEventCaptor.capture()); + // final log message should have 2 retries + assertThat(loggingEventCaptor.getValue().getFormattedMessage()) + .isEqualTo( + expectedMessage( + ConsistencyDowngradingRetryPolicy.VERDICT_ON_WRITE_TIMEOUT, + logPrefix, + DefaultConsistencyLevel.LOCAL_QUORUM, + driverWriteType, + 2, + 1, + 0, + RetryVerdict.IGNORE)); + } + + @DataProvider({"SIMPLE,SIMPLE", "BATCH,BATCH"}) + @Test + public void should_throw_on_write_timeout_if_write_type_ignorable_but_no_ack_received( + WriteType writeType, DefaultWriteType driverWriteType) { + // given a node that will respond to query with a write timeout with write type that is either + // SIMPLE or BATCH. + node0.prime( + when(QUERY_LOCAL_QUORUM) + .then(writeTimeout(ConsistencyLevel.LOCAL_QUORUM, 0, 2, writeType))); + + try { + // when executing a query. + sessionRule.session().execute(STATEMENT_LOCAL_QUORUM); + fail("WriteTimeoutException expected"); + } catch (WriteTimeoutException wte) { + // then a write timeout exception is thrown + assertThat(wte) + .hasMessageContaining( + "Cassandra timeout during " + + driverWriteType + + " write query at consistency LOCAL_QUORUM (2 replica were required but only 0 acknowledged the write)"); + assertThat(wte.getConsistencyLevel()).isEqualTo(DefaultConsistencyLevel.LOCAL_QUORUM); + assertThat(wte.getReceived()).isEqualTo(0); + assertThat(wte.getBlockFor()).isEqualTo(2); + assertThat(wte.getWriteType()).isEqualTo(driverWriteType); + // the host that returned the response should be node 0. + assertThat(coordinatorAddress(wte.getExecutionInfo())).isEqualTo(node0.getAddress()); + // should not have been retried + List> errors = wte.getExecutionInfo().getErrors(); + assertThat(errors).isEmpty(); + } + + // should not have been retried. + localQuorumCounter.assertTotalCount(1); + oneCounter.assertTotalCount(0); + + // verify log event was emitted for each host as expected + verify(appender, after(500)).doAppend(loggingEventCaptor.capture()); + // final log message should have 2 retries + assertThat(loggingEventCaptor.getValue().getFormattedMessage()) + .isEqualTo( + expectedMessage( + ConsistencyDowngradingRetryPolicy.VERDICT_ON_WRITE_TIMEOUT, + logPrefix, + DefaultConsistencyLevel.LOCAL_QUORUM, + driverWriteType, + 2, + 0, + 0, + RetryVerdict.RETHROW)); + } + + @Test + public void should_downgrade_on_write_timeout_if_write_type_unlogged_batch() { + // given a node that will respond to query with a write timeout with write type of batch log. + node0.prime( + when(QUERY_LOCAL_QUORUM) + .then(writeTimeout(ConsistencyLevel.LOCAL_QUORUM, 1, 2, UNLOGGED_BATCH))); + + // when executing a query. + ResultSet rs = sessionRule.session().execute(STATEMENT_LOCAL_QUORUM); + + // the host that returned the response should be node 0. + assertThat(coordinatorAddress(rs.getExecutionInfo())).isEqualTo(node0.getAddress()); + // should have failed at first attempt at LOCAL_QUORUM + List> errors = rs.getExecutionInfo().getErrors(); + assertThat(errors).hasSize(1); + Entry error = errors.get(0); + assertThat(error.getKey().getEndPoint().resolve()).isEqualTo(node0.getAddress()); + assertThat(error.getValue()) + .isInstanceOfSatisfying( + WriteTimeoutException.class, + wte -> { + assertThat(wte) + .hasMessageContaining( + "Cassandra timeout during UNLOGGED_BATCH write query at consistency LOCAL_QUORUM (2 replica were required but only 1 acknowledged the write)"); + assertThat(wte.getConsistencyLevel()).isEqualTo(DefaultConsistencyLevel.LOCAL_QUORUM); + assertThat(wte.getReceived()).isEqualTo(1); + assertThat(wte.getBlockFor()).isEqualTo(2); + assertThat(wte.getWriteType()).isEqualTo(DefaultWriteType.UNLOGGED_BATCH); + }); + + // should have succeeded in second attempt at ONE + Statement request = (Statement) rs.getExecutionInfo().getRequest(); + assertThat(request.getConsistencyLevel()).isEqualTo(DefaultConsistencyLevel.ONE); + + // there should have been a retry, and it should have been executed on the same host, + // but at consistency ONE. + localQuorumCounter.assertTotalCount(1); + localQuorumCounter.assertNodeCounts(1, 0, 0); + oneCounter.assertTotalCount(1); + oneCounter.assertNodeCounts(1, 0, 0); + + // verify 1 log event was emitted as expected + verify(appender, timeout(500)).doAppend(loggingEventCaptor.capture()); + List loggedEvents = loggingEventCaptor.getAllValues(); + assertThat(loggedEvents).hasSize(1); + assertThat(loggedEvents.get(0).getFormattedMessage()) + .isEqualTo( + expectedMessage( + ConsistencyDowngradingRetryPolicy.VERDICT_ON_WRITE_TIMEOUT, + logPrefix, + DefaultConsistencyLevel.LOCAL_QUORUM, + DefaultWriteType.UNLOGGED_BATCH, + 2, + 1, + 0, + new ConsistencyDowngradingRetryVerdict(DefaultConsistencyLevel.ONE))); + } + + @Test + public void + should_not_downgrade_on_write_timeout_if_write_type_unlogged_batch_and_non_idempotent() { + // given a node that will respond to query with a write timeout with write type UNLOGGED_BATCH. + node0.prime( + when(QUERY_LOCAL_QUORUM) + .then(writeTimeout(ConsistencyLevel.LOCAL_QUORUM, 1, 2, UNLOGGED_BATCH))); + + try { + // when executing a non-idempotent query. + sessionRule.session().execute(STATEMENT_LOCAL_QUORUM.setIdempotent(false)); + fail("WriteTimeoutException expected"); + } catch (WriteTimeoutException wte) { + // then a write timeout exception is thrown + assertThat(wte) + .hasMessageContaining( + "Cassandra timeout during UNLOGGED_BATCH write query at consistency LOCAL_QUORUM (2 replica were required but only 1 acknowledged the write)"); + assertThat(wte.getConsistencyLevel()).isEqualTo(DefaultConsistencyLevel.LOCAL_QUORUM); + assertThat(wte.getReceived()).isEqualTo(1); + assertThat(wte.getBlockFor()).isEqualTo(2); + assertThat(wte.getWriteType()).isEqualTo(DefaultWriteType.UNLOGGED_BATCH); + // the host that returned the response should be node 0. + assertThat(coordinatorAddress(wte.getExecutionInfo())).isEqualTo(node0.getAddress()); + // should not have been retried + List> errors = wte.getExecutionInfo().getErrors(); + assertThat(errors).isEmpty(); + } + + // should not have been retried. + localQuorumCounter.assertTotalCount(1); + oneCounter.assertTotalCount(0); + + // expect no logging messages since there was no retry + verify(appender, after(500).times(0)).doAppend(any(ILoggingEvent.class)); + } + + @Test + public void should_only_retry_once_on_write_type() { + // given a node that will respond to a query with a write timeout at 2 CLs. + node0.prime( + when(QUERY_LOCAL_QUORUM) + .then(writeTimeout(ConsistencyLevel.LOCAL_QUORUM, 1, 2, UNLOGGED_BATCH))); + node0.prime(when(QUERY_ONE).then(writeTimeout(ConsistencyLevel.ONE, 0, 1, UNLOGGED_BATCH))); + try { + // when executing a query. + sessionRule.session().execute(STATEMENT_LOCAL_QUORUM); + fail("Expected a WriteTimeoutException"); + } catch (WriteTimeoutException wte) { + // then a write timeout exception is thrown + assertThat(wte) + .hasMessageContaining( + "Cassandra timeout during UNLOGGED_BATCH write query at consistency ONE (1 replica were required but only 0 acknowledged the write)"); + assertThat(wte.getConsistencyLevel()).isEqualTo(DefaultConsistencyLevel.ONE); + assertThat(wte.getReceived()).isEqualTo(0); + assertThat(wte.getBlockFor()).isEqualTo(1); + assertThat(wte.getWriteType()).isEqualTo(DefaultWriteType.UNLOGGED_BATCH); + // the host that returned the response should be node 0. + assertThat(coordinatorAddress(wte.getExecutionInfo())).isEqualTo(node0.getAddress()); + // should have failed at first attempt at LOCAL_QUORUM as well + List> errors = wte.getExecutionInfo().getErrors(); + assertThat(errors).hasSize(1); + Entry error = errors.get(0); + assertThat(error.getKey().getEndPoint().resolve()).isEqualTo(node0.getAddress()); + assertThat(error.getValue()) + .isInstanceOfSatisfying( + WriteTimeoutException.class, + wte1 -> { + assertThat(wte1) + .hasMessageContaining( + "Cassandra timeout during UNLOGGED_BATCH write query at consistency LOCAL_QUORUM (2 replica were required but only 1 acknowledged the write)"); + assertThat(wte1.getConsistencyLevel()) + .isEqualTo(DefaultConsistencyLevel.LOCAL_QUORUM); + assertThat(wte1.getReceived()).isEqualTo(1); + assertThat(wte1.getBlockFor()).isEqualTo(2); + assertThat(wte1.getWriteType()).isEqualTo(DefaultWriteType.UNLOGGED_BATCH); + }); + } + + // should have been retried on same host, but at consistency ONE. + localQuorumCounter.assertTotalCount(1); + localQuorumCounter.assertNodeCounts(1, 0, 0); + oneCounter.assertTotalCount(1); + oneCounter.assertNodeCounts(1, 0, 0); + + // verify log events were emitted as expected + verify(appender, timeout(500).times(2)).doAppend(loggingEventCaptor.capture()); + List loggedEvents = loggingEventCaptor.getAllValues(); + assertThat(loggedEvents).hasSize(2); + assertThat(loggedEvents.get(0).getFormattedMessage()) + .isEqualTo( + expectedMessage( + ConsistencyDowngradingRetryPolicy.VERDICT_ON_WRITE_TIMEOUT, + logPrefix, + DefaultConsistencyLevel.LOCAL_QUORUM, + DefaultWriteType.UNLOGGED_BATCH, + 2, + 1, + 0, + new ConsistencyDowngradingRetryVerdict(DefaultConsistencyLevel.ONE))); + assertThat(loggedEvents.get(1).getFormattedMessage()) + .isEqualTo( + expectedMessage( + ConsistencyDowngradingRetryPolicy.VERDICT_ON_WRITE_TIMEOUT, + logPrefix, + DefaultConsistencyLevel.ONE, + DefaultWriteType.UNLOGGED_BATCH, + 1, + 0, + 1, + RetryVerdict.RETHROW)); + } + + @Test + public void should_retry_on_next_host_on_unavailable_if_LWT() { + // given a node that will respond to a query with an unavailable. + node0.prime(when(QUERY_LOCAL_SERIAL).then(unavailable(ConsistencyLevel.LOCAL_SERIAL, 2, 1))); + + // when executing a query. + ResultSet result = sessionRule.session().execute(STATEMENT_LOCAL_SERIAL); + // then we should get a response, and the host that returned the response should be node 1. + assertThat(coordinatorAddress(result.getExecutionInfo())).isEqualTo(node1.getAddress()); + // the execution info on the result set indicates there was + // an error on the host that received the query. + assertThat(result.getExecutionInfo().getErrors()).hasSize(1); + Map.Entry error = result.getExecutionInfo().getErrors().get(0); + assertThat(error.getKey().getEndPoint().resolve()).isEqualTo(node0.getAddress()); + assertThat(error.getValue()) + .isInstanceOfSatisfying( + UnavailableException.class, + ue -> { + assertThat(ue) + .hasMessageContaining( + "Not enough replicas available for query at consistency LOCAL_SERIAL (2 required but only 1 alive)"); + assertThat(ue.getConsistencyLevel()).isEqualTo(DefaultConsistencyLevel.LOCAL_SERIAL); + assertThat(ue.getAlive()).isEqualTo(1); + assertThat(ue.getRequired()).isEqualTo(2); + }); + + // should have been retried on another host. + localSerialCounter.assertTotalCount(2); + localSerialCounter.assertNodeCounts(1, 1, 0); + localQuorumCounter.assertTotalCount(0); + oneCounter.assertTotalCount(0); + + // verify log event was emitted as expected + verify(appender, timeout(500)).doAppend(loggingEventCaptor.capture()); + assertThat(loggingEventCaptor.getValue().getFormattedMessage()) + .isEqualTo( + expectedMessage( + ConsistencyDowngradingRetryPolicy.VERDICT_ON_UNAVAILABLE, + logPrefix, + DefaultConsistencyLevel.LOCAL_SERIAL, + 2, + 1, + 0, + RetryVerdict.RETRY_NEXT)); + } + + @Test + public void should_downgrade_on_unavailable() { + // given a node that will respond to a query with an unavailable. + node0.prime(when(QUERY_LOCAL_QUORUM).then(unavailable(ConsistencyLevel.LOCAL_QUORUM, 2, 1))); + + // when executing a query. + ResultSet rs = sessionRule.session().execute(STATEMENT_LOCAL_QUORUM); + // then we should get a response, and the host that returned the response should be node 0. + assertThat(coordinatorAddress(rs.getExecutionInfo())).isEqualTo(node0.getAddress()); + // the execution info on the result set indicates there was + // an error on the host that received the query. + assertThat(rs.getExecutionInfo().getErrors()).hasSize(1); + Map.Entry error = rs.getExecutionInfo().getErrors().get(0); + assertThat(error.getKey().getEndPoint().resolve()).isEqualTo(node0.getAddress()); + assertThat(error.getValue()) + .isInstanceOfSatisfying( + UnavailableException.class, + ue -> { + assertThat(ue) + .hasMessageContaining( + "Not enough replicas available for query at consistency LOCAL_QUORUM (2 required but only 1 alive)"); + assertThat(ue.getConsistencyLevel()).isEqualTo(DefaultConsistencyLevel.LOCAL_QUORUM); + assertThat(ue.getAlive()).isEqualTo(1); + assertThat(ue.getRequired()).isEqualTo(2); + }); + + // should have succeeded in second attempt at ONE + Statement request = (Statement) rs.getExecutionInfo().getRequest(); + assertThat(request.getConsistencyLevel()).isEqualTo(DefaultConsistencyLevel.ONE); + + // should have been retried on the same host, but at ONE. + localQuorumCounter.assertTotalCount(1); + localQuorumCounter.assertNodeCounts(1, 0, 0); + oneCounter.assertTotalCount(1); + oneCounter.assertNodeCounts(1, 0, 0); + + // verify log event was emitted as expected + verify(appender, timeout(500)).doAppend(loggingEventCaptor.capture()); + assertThat(loggingEventCaptor.getValue().getFormattedMessage()) + .isEqualTo( + expectedMessage( + ConsistencyDowngradingRetryPolicy.VERDICT_ON_UNAVAILABLE, + logPrefix, + DefaultConsistencyLevel.LOCAL_QUORUM, + 2, + 1, + 0, + new ConsistencyDowngradingRetryVerdict(DefaultConsistencyLevel.ONE))); + } + + @Test + public void should_only_retry_once_on_unavailable() { + // given two nodes that will respond to a query with an unavailable. + node0.prime(when(QUERY_LOCAL_QUORUM).then(unavailable(ConsistencyLevel.LOCAL_QUORUM, 2, 1))); + node0.prime(when(QUERY_ONE).then(unavailable(ConsistencyLevel.ONE, 1, 0))); + + try { + // when executing a query. + sessionRule.session().execute(STATEMENT_LOCAL_QUORUM); + fail("Expected an UnavailableException"); + } catch (UnavailableException ue) { + // then we should get an unavailable exception with the host being node 1 (since it was second + // tried). + assertThat(ue) + .hasMessageContaining( + "Not enough replicas available for query at consistency ONE (1 required but only 0 alive)"); + assertThat(ue.getConsistencyLevel()).isEqualTo(DefaultConsistencyLevel.ONE); + assertThat(ue.getRequired()).isEqualTo(1); + assertThat(ue.getAlive()).isEqualTo(0); + assertThat(ue.getExecutionInfo().getErrors()).hasSize(1); + Map.Entry error = ue.getExecutionInfo().getErrors().get(0); + assertThat(error.getKey().getEndPoint().resolve()).isEqualTo(node0.getAddress()); + assertThat(error.getValue()) + .isInstanceOfSatisfying( + UnavailableException.class, + ue1 -> { + assertThat(ue1) + .hasMessageContaining( + "Not enough replicas available for query at consistency LOCAL_QUORUM (2 required but only 1 alive)"); + assertThat(ue1.getConsistencyLevel()) + .isEqualTo(DefaultConsistencyLevel.LOCAL_QUORUM); + assertThat(ue1.getRequired()).isEqualTo(2); + assertThat(ue1.getAlive()).isEqualTo(1); + }); + } + + // should have been retried on same host, but at ONE. + localQuorumCounter.assertTotalCount(1); + localQuorumCounter.assertNodeCounts(1, 0, 0); + oneCounter.assertTotalCount(1); + oneCounter.assertNodeCounts(1, 0, 0); + + // verify log events were emitted as expected + verify(appender, timeout(500).times(2)).doAppend(loggingEventCaptor.capture()); + List loggedEvents = loggingEventCaptor.getAllValues(); + assertThat(loggedEvents).hasSize(2); + assertThat(loggedEvents.get(0).getFormattedMessage()) + .isEqualTo( + expectedMessage( + ConsistencyDowngradingRetryPolicy.VERDICT_ON_UNAVAILABLE, + logPrefix, + DefaultConsistencyLevel.LOCAL_QUORUM, + 2, + 1, + 0, + new ConsistencyDowngradingRetryVerdict(DefaultConsistencyLevel.ONE))); + assertThat(loggedEvents.get(1).getFormattedMessage()) + .isEqualTo( + expectedMessage( + ConsistencyDowngradingRetryPolicy.VERDICT_ON_UNAVAILABLE, + logPrefix, + DefaultConsistencyLevel.ONE, + 1, + 0, + 1, + RetryVerdict.RETHROW)); + } + + @Test + public void should_retry_on_next_host_on_connection_error_if_idempotent() { + // given a node that will close its connection as result of receiving a query. + node0.prime( + when(QUERY_LOCAL_QUORUM) + .then(closeConnection(DisconnectAction.Scope.CONNECTION, CloseType.DISCONNECT))); + + // when executing a query. + ResultSet result = sessionRule.session().execute(STATEMENT_LOCAL_QUORUM); + // then we should get a response, and the execution info on the result set indicates there was + // an error on the host that received the query. + assertThat(result.getExecutionInfo().getErrors()).hasSize(1); + Map.Entry error = result.getExecutionInfo().getErrors().get(0); + assertThat(error.getKey().getEndPoint().resolve()).isEqualTo(node0.getAddress()); + assertThat(error.getValue()).isInstanceOf(ClosedConnectionException.class); + // the host that returned the response should be node 1. + assertThat(coordinatorAddress(result.getExecutionInfo())).isEqualTo(node1.getAddress()); + + // should have been retried. + localQuorumCounter.assertTotalCount(2); + // expected query on node 0, and retry on node 2. + localQuorumCounter.assertNodeCounts(1, 1, 0); + oneCounter.assertTotalCount(0); + + // verify log event was emitted as expected + verify(appender, timeout(500)).doAppend(loggingEventCaptor.capture()); + assertThat(loggingEventCaptor.getValue().getFormattedMessage()) + .isEqualTo( + expectedMessage( + ConsistencyDowngradingRetryPolicy.VERDICT_ON_ABORTED, + logPrefix, + ClosedConnectionException.class.getSimpleName(), + error.getValue().getMessage(), + 0, + RetryVerdict.RETRY_NEXT)); + } + + @Test + public void should_keep_retrying_on_next_host_on_connection_error() { + // given a request for which every node will close its connection upon receiving it. + SIMULACRON_RULE + .cluster() + .prime( + when(QUERY_LOCAL_QUORUM) + .then(closeConnection(DisconnectAction.Scope.CONNECTION, CloseType.DISCONNECT))); + + try { + // when executing a query. + sessionRule.session().execute(STATEMENT_LOCAL_QUORUM); + fail("AllNodesFailedException expected"); + } catch (AllNodesFailedException ex) { + // then an AllNodesFailedException should be raised indicating that all nodes failed the + // request. + assertThat(ex.getAllErrors()).hasSize(3); + } + + // should have been tried on all nodes. + // should have been retried. + localQuorumCounter.assertTotalCount(3); + // expected query on node 0, and retry on node 2 and 3. + localQuorumCounter.assertNodeCounts(1, 1, 1); + oneCounter.assertTotalCount(0); + + // verify log event was emitted for each host as expected + verify(appender, after(500).times(3)).doAppend(loggingEventCaptor.capture()); + // final log message should have 2 retries + assertThat(loggingEventCaptor.getValue().getFormattedMessage()) + .isEqualTo( + expectedMessage( + ConsistencyDowngradingRetryPolicy.VERDICT_ON_ABORTED, + logPrefix, + ClosedConnectionException.class.getSimpleName(), + "Lost connection to remote peer", + 2, + RetryVerdict.RETRY_NEXT)); + } + + @Test + public void should_not_retry_on_connection_error_if_non_idempotent() { + // given a node that will close its connection as result of receiving a query. + node0.prime( + when(QUERY_LOCAL_QUORUM) + .then(closeConnection(DisconnectAction.Scope.CONNECTION, CloseType.DISCONNECT))); + + try { + // when executing a non-idempotent query. + sessionRule.session().execute(STATEMENT_LOCAL_QUORUM.setIdempotent(false)); + fail("ClosedConnectionException expected"); + } catch (ClosedConnectionException ex) { + // then a ClosedConnectionException should be raised, indicating that the connection closed + // while handling the request on that node. + // this clearly indicates that the request wasn't retried. + // Exception should indicate that node 0 was the failing node. + // FIXME JAVA-2908 + // Node coordinator = ex.getExecutionInfo().getCoordinator(); + // assertThat(coordinator).isNotNull(); + // assertThat(coordinator.getEndPoint().resolve()) + // .isEqualTo(SIMULACRON_RULE.cluster().node(0).getAddress()); + } + + // should not have been retried. + localQuorumCounter.assertTotalCount(1); + oneCounter.assertTotalCount(0); + + // expect no logging messages since there was no retry + verify(appender, after(500).times(0)).doAppend(any(ILoggingEvent.class)); + } + + @Test + public void should_keep_retrying_on_next_host_on_error_response() { + // given every node responding with a server error. + SIMULACRON_RULE + .cluster() + .prime(when(QUERY_LOCAL_QUORUM).then(serverError("this is a server error"))); + + try { + // when executing a query. + sessionRule.session().execute(STATEMENT_LOCAL_QUORUM); + fail("Expected an AllNodesFailedException"); + } catch (AllNodesFailedException e) { + // then we should get an all nodes failed exception, indicating the query was tried each node. + assertThat(e.getAllErrors()).hasSize(3); + for (List nodeErrors : e.getAllErrors().values()) { + for (Throwable nodeError : nodeErrors) { + assertThat(nodeError).isInstanceOf(ServerError.class); + assertThat(nodeError).hasMessage("this is a server error"); + } + } + } + + // should have been tried on all nodes. + localQuorumCounter.assertTotalCount(3); + localQuorumCounter.assertNodeCounts(1, 1, 1); + + // verify log event was emitted for each host as expected + verify(appender, after(500).times(3)).doAppend(loggingEventCaptor.capture()); + // final log message should have 2 retries + assertThat(loggingEventCaptor.getValue().getFormattedMessage()) + .isEqualTo( + expectedMessage( + ConsistencyDowngradingRetryPolicy.VERDICT_ON_ERROR, + logPrefix, + ServerError.class.getSimpleName(), + "this is a server error", + 2, + RetryVerdict.RETRY_NEXT)); + } + + @Test + public void should_not_retry_on_next_host_on_error_response_if_write_failure() { + // given every node responding with a write failure. + SIMULACRON_RULE + .cluster() + .prime( + when(QUERY_LOCAL_QUORUM) + .then( + writeFailure( + ConsistencyLevel.LOCAL_QUORUM, 1, 2, ImmutableMap.of(), WriteType.SIMPLE))); + try { + // when executing a query + sessionRule.session().execute(STATEMENT_LOCAL_QUORUM); + fail("Expected a WriteFailureException"); + } catch (WriteFailureException wfe) { + // then we should get a write failure exception with the host being node 1 (since it was + // second tried). + assertThat(wfe) + .hasMessageContaining( + "Cassandra failure during write query at consistency LOCAL_QUORUM (2 responses were required but only 1 replica responded, 0 failed)"); + assertThat(wfe.getConsistencyLevel()).isEqualTo(DefaultConsistencyLevel.LOCAL_QUORUM); + assertThat(wfe.getBlockFor()).isEqualTo(2); + assertThat(wfe.getReceived()).isEqualTo(1); + assertThat(wfe.getWriteType()).isEqualTo(DefaultWriteType.SIMPLE); + assertThat(wfe.getReasonMap()).isEmpty(); + } + + // should only have been tried on first node. + localQuorumCounter.assertTotalCount(1); + localQuorumCounter.assertNodeCounts(1, 0, 0); + oneCounter.assertTotalCount(0); + + // verify log event was emitted as expected + verify(appender, timeout(500)).doAppend(loggingEventCaptor.capture()); + assertThat(loggingEventCaptor.getValue().getFormattedMessage()) + .isEqualTo( + expectedMessage( + ConsistencyDowngradingRetryPolicy.VERDICT_ON_ERROR, + logPrefix, + WriteFailureException.class.getSimpleName(), + "Cassandra failure during write query at consistency LOCAL_QUORUM (2 responses were required but only 1 replica responded, 0 failed)", + 0, + RetryVerdict.RETHROW)); + } + + @Test + public void should_not_retry_on_next_host_on_error_response_if_read_failure() { + // given every node responding with a read failure. + SIMULACRON_RULE + .cluster() + .prime( + when(QUERY_LOCAL_QUORUM) + .then(readFailure(ConsistencyLevel.LOCAL_QUORUM, 1, 2, ImmutableMap.of(), true))); + try { + // when executing a query + sessionRule.session().execute(STATEMENT_LOCAL_QUORUM); + fail("Expected a ReadFailureException"); + } catch (ReadFailureException rfe) { + // then we should get a read failure exception with the host being node 1 (since it was + // second tried). + assertThat(rfe) + .hasMessageContaining( + "Cassandra failure during read query at consistency LOCAL_QUORUM (2 responses were required but only 1 replica responded, 0 failed)"); + assertThat(rfe.getConsistencyLevel()).isEqualTo(DefaultConsistencyLevel.LOCAL_QUORUM); + assertThat(rfe.getBlockFor()).isEqualTo(2); + assertThat(rfe.getReceived()).isEqualTo(1); + assertThat(rfe.wasDataPresent()).isTrue(); + assertThat(rfe.getReasonMap()).isEmpty(); + } + + // should only have been tried on first node. + localQuorumCounter.assertTotalCount(1); + localQuorumCounter.assertNodeCounts(1, 0, 0); + oneCounter.assertTotalCount(0); + + // verify log event was emitted as expected + verify(appender, timeout(500)).doAppend(loggingEventCaptor.capture()); + assertThat(loggingEventCaptor.getValue().getFormattedMessage()) + .isEqualTo( + expectedMessage( + ConsistencyDowngradingRetryPolicy.VERDICT_ON_ERROR, + logPrefix, + ReadFailureException.class.getSimpleName(), + "Cassandra failure during read query at consistency LOCAL_QUORUM (2 responses were required but only 1 replica responded, 0 failed)", + 0, + RetryVerdict.RETHROW)); + } + + @Test + public void should_not_retry_on_next_host_on_error_response_if_non_idempotent() { + // given every node responding with a server error. + SIMULACRON_RULE + .cluster() + .prime(when(QUERY_LOCAL_QUORUM).then(serverError("this is a server error"))); + + try { + // when executing a query that is not idempotent + sessionRule.session().execute(STATEMENT_LOCAL_QUORUM.setIdempotent(false)); + fail("Expected a ServerError"); + } catch (ServerError e) { + // then should get a server error from first host. + assertThat(e.getMessage()).isEqualTo("this is a server error"); + } + + // should only have been tried on first node. + localQuorumCounter.assertTotalCount(1); + localQuorumCounter.assertNodeCounts(1, 0, 0); + oneCounter.assertTotalCount(0); + + // expect no logging messages since there was no retry + verify(appender, after(500).times(0)).doAppend(any(ILoggingEvent.class)); + } + + private String expectedMessage(String template, Object... args) { + return MessageFormatter.arrayFormat(template, args).getMessage(); + } + + private SocketAddress coordinatorAddress(ExecutionInfo executionInfo) { + Node coordinator = executionInfo.getCoordinator(); + assertThat(coordinator).isNotNull(); + return coordinator.getEndPoint().resolve(); + } +} diff --git a/manual/core/retries/README.md b/manual/core/retries/README.md index 8c6190f36b1..fbcd8e92720 100644 --- a/manual/core/retries/README.md +++ b/manual/core/retries/README.md @@ -7,25 +7,56 @@ What to do when a request failed on a node: retry (same or other node), rethrow, * `advanced.retry-policy` in the configuration. Default policy retries at most once, in cases that have a high chance of success; you can also write your own. * can have per-profile policies. -* only kicks in if the query is idempotent. +* only kicks in if the query is [idempotent](../idempotence). ----- When a query fails, it sometimes makes sense to retry it: the error might be temporary, or the query might work on a different node. The driver uses a *retry policy* to determine when and how to retry. -It is defined in the [configuration](../configuration/): - + +### Built-in retry policies + +The driver ships with two retry policies: `DefaultRetryPolicy` –– the default –– and +`ConsistencyDowngradingRetryPolicy`. + +The default retry policy should be preferred in most cases as it only retries when *it is perfectly +safe to do so*, and when *the chances of success are high enough* to warrant a retry. + +`ConsistencyDowngradingRetryPolicy` is provided for cases where the application can tolerate a +temporary degradation of its consistency guarantees. Its general behavior is as follows: if, based +on the information the coordinator returns, retrying the operation with the initially requested +consistency has a chance to succeed, do it. Otherwise, if based on this information, we know that +the initially requested consistency level *cannot be achieved currently*, then: + +* For writes, ignore the exception *if we know the write has been persisted on at least one + replica*. +* For reads, try reading again at a weaker consistency level. + +Keep in mind that this may break invariants! For example, if your application relies on immediate +write visibility by writing and reading at QUORUM only, downgrading a write to ONE could cause that +write to go unnoticed by subsequent reads at QUORUM. Furthermore, this policy doesn't always respect +datacenter locality; for example, it may downgrade LOCAL_QUORUM to ONE, and thus could accidentally +send a write that was intended for the local datacenter to another datacenter. In summary: **only +use this retry policy if you understand the consequences.** + +Since `DefaultRetryPolicy` is already the driver's default retry policy, no special configuration +is required to activate it. To use `ConsistencyDowngradingRetryPolicy` instead, the following +option must be declared in the driver [configuration](../configuration/): + ``` -datastax-java-driver.advanced.retry-policy { - class = DefaultRetryPolicy -} +datastax-java-driver.advanced.retry-policy.class = ConsistencyDowngradingRetryPolicy ``` -The behavior of the default policy will be detailed in the sections below. You can also use your -own policy by specifying the fully-qualified name of a class that implements [RetryPolicy]. +You can also use your own policy by specifying for the above option the fully-qualified name of a +class that implements [RetryPolicy]. + +### Behavior -The policy has several methods that cover different error cases. Each method returns a decision to -indicate what to do next: +The behavior of both policies will be detailed in the sections below. + +The policy has several methods that cover different error cases. Each method returns a +[RetryVerdict]. A retry verdict essentially provides the driver with a [RetryDecision] to indicate +what to do next. There are four possible retry decisions: * retry on the same node; * retry on the next node in the [query plan](../load_balancing/) for this statement; @@ -33,7 +64,7 @@ indicate what to do next: using the asynchronous API); * ignore the exception. That is, mark the request as successful, and return an empty result set. -### onUnavailable +#### `onUnavailableVerdict` A request reached the coordinator, but there weren't enough live replicas to achieve the requested consistency level. The coordinator replied with an `UNAVAILABLE` error. @@ -48,7 +79,14 @@ rationale is that the first coordinator might have been network-isolated from al (thinking they're down), but still able to communicate with the client; in that case, retrying on the same node has almost no chance of success, but moving to the next node might solve the issue. -### onReadTimeout +`ConsistencyDowngradingRetryPolicy` also triggers a maximum of one retry, but instead of trying the +next node, it will downgrade the initial consistency level, if possible, and retry *the same node*. +Note that if it is not possible to downgrade, this policy will rethrow the exception. For example, +if the original consistency level was QUORUM, and 2 replicas were required to achieve a quorum, but +only one replica is alive, then the query will be retried with consistency ONE. If no replica was +alive however, there is no point in downgrading, and the policy will rethrow. + +#### `onReadTimeoutVerdict` A read request reached the coordinator, which initially believed that there were enough live replicas to process it. But one or several replicas were too slow to answer within the predefined @@ -73,7 +111,12 @@ retrieval, not having detected that replica as dead yet. The reasoning is that b the timeout, the dead replica will likely have been detected as dead and the retry has a high chance of success. -### onWriteTimeout +`ConsistencyDowngradingRetryPolicy` behaves like the default policy when enough replicas responded. +If not enough replicas responded however, it will attempt to downgrade the initial consistency +level, and retry *the same node*. If it is not possible to downgrade, this policy will rethrow the +exception. + +#### `onWriteTimeoutVerdict` This is similar to `onReadTimeout`, but for write operations. The reason reads and writes are handled separately is because a read is obviously a non mutating operation, whereas a write is @@ -91,7 +134,20 @@ small subset of nodes in the local datacenter; a timeout usually means that none alive but the coordinator hadn't detected them as dead yet. By the time we get the timeout, the dead nodes will likely have been detected as dead, and the retry has a high chance of success. -### onRequestAborted +`ConsistencyDowngradingRetryPolicy` also triggers a maximum of one retry, but behaves differently: + +* For `SIMPLE` and `BATCH` write types: if at least one replica acknowledged the write, the policy + will assume that the write will be eventually replicated, and decide to ignore the error; in other + words, it will consider that the write already succeeded, albeit with weaker consistency + guarantees: retrying is therefore useless. If no replica acknowledged the write, the policy will + rethrow the error. +* For `UNLOGGED_BATCH` write type: since only part of the batch could have been persisted, the + policy will attempt to downgrade the consistency level and retry *on the same node*. If + downgrading is not possible, the policy will rethrow. +* For `BATCH_LOG` write type: the policy will retry the same node, for the reasons explained above. +* For other write types: the policy will always rethrow. + +#### `onRequestAbortedVerdict` The request was aborted before we could get a response from the coordinator. This can happen in two cases: @@ -104,10 +160,10 @@ cases: This method is only invoked for [idempotent](../idempotence/) statements. Otherwise, the driver bypasses the retry policy and always rethrows the error. -The default policy retries on the next node if the connection was closed, and rethrows (assuming a -driver bug) in all other cases. +Both the default policy and `ConsistencyDowngradingRetryPolicy` retry on the next node if the +connection was closed, and rethrow (assuming a driver bug) in all other cases. -### onErrorResponse +#### `onErrorResponseVerdict` The coordinator replied with an error other than `READ_TIMEOUT`, `WRITE_TIMEOUT` or `UNAVAILABLE`. Namely, this covers [OverloadedException], [ServerError], [TruncateException], @@ -116,7 +172,8 @@ Namely, this covers [OverloadedException], [ServerError], [TruncateException], This method is only invoked for [idempotent](../idempotence/) statements. Otherwise, the driver bypasses the retry policy and always rethrows the error. -The default policy rethrows read and write failures, and retries other errors on the next node. +Both the default policy and `ConsistencyDowngradingRetryPolicy` rethrow read and write failures, +and retry other errors on the next node. ### Hard-coded rules @@ -174,20 +231,21 @@ configuration). Each request uses its declared profile's policy. If it doesn't declare any profile, or if the profile doesn't have a dedicated policy, then the default profile's policy is used. -[AllNodesFailedException]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/AllNodesFailedException.html -[ClosedConnectionException]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/connection/ClosedConnectionException.html -[DriverTimeoutException]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/DriverTimeoutException.html -[FunctionFailureException]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/servererrors/FunctionFailureException.html -[HeartbeatException]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/connection/HeartbeatException.html -[ProtocolError]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/servererrors/ProtocolError.html -[OverloadedException]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/servererrors/OverloadedException.html -[QueryValidationException]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/servererrors/QueryValidationException.html -[ReadFailureException]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/servererrors/ReadFailureException.html -[ReadTimeoutException]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/servererrors/ReadTimeoutException.html -[RetryDecision]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/retry/RetryDecision.html -[RetryPolicy]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/retry/RetryPolicy.html -[ServerError]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/servererrors/ServerError.html -[TruncateException]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/servererrors/TruncateException.html -[UnavailableException]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/servererrors/UnavailableException.html -[WriteFailureException]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/servererrors/WriteFailureException.html -[WriteTimeoutException]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/servererrors/WriteTimeoutException.html +[AllNodesFailedException]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/AllNodesFailedException.html +[ClosedConnectionException]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/connection/ClosedConnectionException.html +[DriverTimeoutException]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/DriverTimeoutException.html +[FunctionFailureException]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/servererrors/FunctionFailureException.html +[HeartbeatException]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/connection/HeartbeatException.html +[ProtocolError]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/servererrors/ProtocolError.html +[OverloadedException]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/servererrors/OverloadedException.html +[QueryValidationException]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/servererrors/QueryValidationException.html +[ReadFailureException]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/servererrors/ReadFailureException.html +[ReadTimeoutException]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/servererrors/ReadTimeoutException.html +[RetryDecision]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/retry/RetryDecision.html +[RetryPolicy]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/retry/RetryPolicy.html +[RetryVerdict]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/retry/RetryVerdict.html +[ServerError]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/servererrors/ServerError.html +[TruncateException]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/servererrors/TruncateException.html +[UnavailableException]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/servererrors/UnavailableException.html +[WriteFailureException]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/servererrors/WriteFailureException.html +[WriteTimeoutException]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/servererrors/WriteTimeoutException.html diff --git a/upgrade_guide/README.md b/upgrade_guide/README.md index 51c0917f018..88ba4cd8332 100644 --- a/upgrade_guide/README.md +++ b/upgrade_guide/README.md @@ -16,7 +16,7 @@ a `RetryVerdict` instance: 4. [`onRequestAbortedVerdict`](https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/retry/RetryPolicy.html#onRequestAbortedVerdict-com.datastax.oss.driver.api.core.session.Request-java.lang.Throwable-int-) 5. [`onErrorResponseVerdict`](https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/retry/RetryPolicy.html#onErrorResponseVerdict-com.datastax.oss.driver.api.core.session.Request-com.datastax.oss.driver.api.core.servererrors.CoordinatorException-int-) -The following methods were deprecated: +The following methods were deprecated and will be removed in the next major version: 1. [`onReadTimeout`](https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/retry/RetryPolicy.html#onReadTimeout-com.datastax.oss.driver.api.core.session.Request-com.datastax.oss.driver.api.core.ConsistencyLevel-int-int-boolean-int-) 2. [`onWriteTimeout`](https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/retry/RetryPolicy.html#onWriteTimeout-com.datastax.oss.driver.api.core.session.Request-com.datastax.oss.driver.api.core.ConsistencyLevel-com.datastax.oss.driver.api.core.servererrors.WriteType-int-int-int-) @@ -24,6 +24,11 @@ The following methods were deprecated: 4. [`onRequestAborted`](https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/retry/RetryPolicy.html#onRequestAborted-com.datastax.oss.driver.api.core.session.Request-java.lang.Throwable-int-) 5. [`onErrorResponse`](https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/retry/RetryPolicy.html#onErrorResponse-com.datastax.oss.driver.api.core.session.Request-com.datastax.oss.driver.api.core.servererrors.CoordinatorException-int-) +Driver 4.10.0 also re-introduced a retry policy whose behavior is equivalent to the +`DowngradingConsistencyRetryPolicy` from driver 3.x. See this +[FAQ entry](https://docs.datastax.com/en/developer/java-driver/latest/faq/#where-is-downgrading-consistency-retry-policy) +for more information. + [`RetryVerdict`]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/retry/RetryVerdict.html #### Enhancements to the `Uuids` utility class @@ -33,7 +38,7 @@ The following methods were deprecated: implementation, but instead re-implements random UUID generation using the non-cryptographic random number generator `java.util.Random`. -For most users, non cryptographic strength is enough and this change should translate into better +For most users, non-cryptographic strength is enough and this change should translate into better performance when generating UUIDs for database insertion. However, in the unlikely case where your application requires cryptographic strength for UUID generation, you should update your code to use `java.util.UUID.randomUUID()` instead of `com.datastax.oss.driver.api.core.uuid.Uuids.random()` From db803e84b98a13c40d4c37b598221fbde924d20e Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 5 Jan 2021 12:01:15 +0100 Subject: [PATCH 621/979] JAVA-2899: Re-introduce cross-DC failover in driver 4 (#1518) --- changelog/README.md | 1 + .../ContinuousRequestHandlerBase.java | 4 +- .../core/graph/GraphRequestHandler.java | 4 +- .../api/core/config/DefaultDriverOption.java | 15 + .../driver/api/core/config/OptionsMap.java | 2 + .../api/core/config/TypedDriverOption.java | 13 + .../core/ConsistencyLevelRegistry.java | 2 + .../core/DefaultConsistencyLevelRegistry.java | 5 + .../internal/core/cql/CqlRequestHandler.java | 4 +- .../BasicLoadBalancingPolicy.java | 210 ++++++-- .../DefaultLoadBalancingPolicy.java | 13 +- .../helper/DefaultNodeFilterHelper.java | 9 +- .../helper/OptionalLocalDcHelper.java | 29 +- .../nodeset/DcAgnosticNodeSet.java | 52 ++ .../loadbalancing/nodeset/MultiDcNodeSet.java | 93 ++++ .../core/loadbalancing/nodeset/NodeSet.java | 69 +++ .../nodeset/SingleDcNodeSet.java | 71 +++ .../util/collection/CompositeQueryPlan.java | 86 +++ .../core/util/collection/EmptyQueryPlan.java | 43 ++ .../core/util/collection/LazyQueryPlan.java | 56 ++ .../core/util/collection/QueryPlan.java | 86 ++- .../core/util/collection/QueryPlanBase.java | 73 +++ .../core/util/collection/SimpleQueryPlan.java | 45 ++ core/src/main/resources/reference.conf | 113 +++- ...asicLoadBalancingPolicyDcAgnosticTest.java | 56 ++ ...asicLoadBalancingPolicyDcFailoverTest.java | 153 ++++++ .../BasicLoadBalancingPolicyDistanceTest.java | 220 ++++++++ .../BasicLoadBalancingPolicyEventsTest.java | 22 +- .../BasicLoadBalancingPolicyInitTest.java | 39 +- ...BasicLoadBalancingPolicyQueryPlanTest.java | 33 +- ...ringLoadBalancingPolicyDcFailoverTest.java | 87 +++ ...erringLoadBalancingPolicyDistanceTest.java | 62 +++ ...nferringLoadBalancingPolicyEventsTest.java | 2 +- ...cInferringLoadBalancingPolicyInitTest.java | 14 +- ...rringLoadBalancingPolicyQueryPlanTest.java | 2 +- ...aultLoadBalancingPolicyDcFailoverTest.java | 87 +++ ...efaultLoadBalancingPolicyDistanceTest.java | 61 +++ .../DefaultLoadBalancingPolicyEventsTest.java | 2 +- .../DefaultLoadBalancingPolicyInitTest.java | 16 +- ...faultLoadBalancingPolicyQueryPlanTest.java | 7 +- ...LoadBalancingPolicyRequestTrackerTest.java | 3 +- ....java => LoadBalancingPolicyTestBase.java} | 11 +- .../nodeset/DcAgnosticNodeSetTest.java | 59 ++ .../nodeset/MultiDcNodeSetTest.java | 81 +++ .../nodeset/SingleDcNodeSetTest.java | 71 +++ .../collection/CompositeQueryPlanTest.java | 40 ++ .../util/collection/LazyQueryPlanTest.java | 34 ++ ...ryPlanTest.java => QueryPlanTestBase.java} | 55 +- .../util/collection/SimpleQueryPlanTest.java | 29 + examples/pom.xml | 5 + .../failover/CrossDatacenterFailover.java | 456 ++++++++++++++++ faq/README.md | 14 + .../AllLoadBalancingPoliciesSimulacronIT.java | 503 ++++++++++++++++++ manual/core/load_balancing/README.md | 155 +++++- pom.xml | 2 +- upgrade_guide/README.md | 9 + 56 files changed, 3230 insertions(+), 258 deletions(-) create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/nodeset/DcAgnosticNodeSet.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/nodeset/MultiDcNodeSet.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/nodeset/NodeSet.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/nodeset/SingleDcNodeSet.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/util/collection/CompositeQueryPlan.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/util/collection/EmptyQueryPlan.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/util/collection/LazyQueryPlan.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/util/collection/QueryPlanBase.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/util/collection/SimpleQueryPlan.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyDcAgnosticTest.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyDcFailoverTest.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyDistanceTest.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyDcFailoverTest.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyDistanceTest.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyDcFailoverTest.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyDistanceTest.java rename core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/{DefaultLoadBalancingPolicyTestBase.java => LoadBalancingPolicyTestBase.java} (85%) create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/nodeset/DcAgnosticNodeSetTest.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/nodeset/MultiDcNodeSetTest.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/nodeset/SingleDcNodeSetTest.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/util/collection/CompositeQueryPlanTest.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/util/collection/LazyQueryPlanTest.java rename core/src/test/java/com/datastax/oss/driver/internal/core/util/collection/{QueryPlanTest.java => QueryPlanTestBase.java} (58%) create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/util/collection/SimpleQueryPlanTest.java create mode 100644 examples/src/main/java/com/datastax/oss/driver/examples/failover/CrossDatacenterFailover.java create mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/core/loadbalancing/AllLoadBalancingPoliciesSimulacronIT.java diff --git a/changelog/README.md b/changelog/README.md index 8f740eb8870..4c7cf9fbf4d 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.10.0 (in progress) +- [new feature] JAVA-2899: Re-introduce cross-DC failover in driver 4 - [new feature] JAVA-2900: Re-introduce consistency downgrading retries - [new feature] JAVA-2903: BlockHound integration - [improvement] JAVA-2877: Allow skipping validation for individual mapped entities diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousRequestHandlerBase.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousRequestHandlerBase.java index c7784c2b2d3..f97bc684e37 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousRequestHandlerBase.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousRequestHandlerBase.java @@ -62,7 +62,7 @@ import com.datastax.oss.driver.internal.core.session.DefaultSession; import com.datastax.oss.driver.internal.core.session.RepreparePayload; import com.datastax.oss.driver.internal.core.util.Loggers; -import com.datastax.oss.driver.internal.core.util.collection.QueryPlan; +import com.datastax.oss.driver.internal.core.util.collection.SimpleQueryPlan; import com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting; import com.datastax.oss.protocol.internal.Frame; import com.datastax.oss.protocol.internal.Message; @@ -194,7 +194,7 @@ public ContinuousRequestHandlerBase( Conversions.resolveExecutionProfile(statement, context); this.queryPlan = statement.getNode() != null - ? new QueryPlan(statement.getNode()) + ? new SimpleQueryPlan(statement.getNode()) : context .getLoadBalancingPolicyWrapper() .newQueryPlan(statement, executionProfile.getName(), session); diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java index b193805a2c6..ca84f1c634a 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java @@ -57,7 +57,7 @@ import com.datastax.oss.driver.internal.core.tracker.NoopRequestTracker; import com.datastax.oss.driver.internal.core.tracker.RequestLogger; import com.datastax.oss.driver.internal.core.util.Loggers; -import com.datastax.oss.driver.internal.core.util.collection.QueryPlan; +import com.datastax.oss.driver.internal.core.util.collection.SimpleQueryPlan; import com.datastax.oss.protocol.internal.Frame; import com.datastax.oss.protocol.internal.Message; import com.datastax.oss.protocol.internal.response.Error; @@ -190,7 +190,7 @@ public void onThrottleReady(boolean wasDelayed) { } Queue queryPlan = initialStatement.getNode() != null - ? new QueryPlan(initialStatement.getNode()) + ? new SimpleQueryPlan(initialStatement.getNode()) : context .getLoadBalancingPolicyWrapper() .newQueryPlan(initialStatement, executionProfile.getName(), session); diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java index 55e50ed9069..d9bc504bea4 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java @@ -823,6 +823,21 @@ public enum DefaultDriverOption implements DriverOption { *

          Value-type: {@link String} */ METRICS_FACTORY_CLASS("advanced.metrics.factory.class"), + + /** + * The maximum number of nodes from remote DCs to include in query plans. + * + *

          Value-Type: int + */ + LOAD_BALANCING_DC_FAILOVER_MAX_NODES_PER_REMOTE_DC( + "advanced.load-balancing-policy.dc-failover.max-nodes-per-remote-dc"), + /** + * Whether to consider nodes from remote DCs if the request's consistency level is local. + * + *

          Value-Type: boolean + */ + LOAD_BALANCING_DC_FAILOVER_ALLOW_FOR_LOCAL_CONSISTENCY_LEVELS( + "advanced.load-balancing-policy.dc-failover.allow-for-local-consistency-levels"), ; private final String path; diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java index 2b4a767e29d..c5eb7829deb 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java @@ -370,6 +370,8 @@ protected static void fillWithDriverDefaults(OptionsMap map) { map.put(TypedDriverOption.NETTY_TIMER_TICK_DURATION, Duration.ofMillis(100)); map.put(TypedDriverOption.NETTY_TIMER_TICKS_PER_WHEEL, 2048); map.put(TypedDriverOption.COALESCER_INTERVAL, Duration.of(10, ChronoUnit.MICROS)); + map.put(TypedDriverOption.LOAD_BALANCING_DC_FAILOVER_MAX_NODES_PER_REMOTE_DC, 0); + map.put(TypedDriverOption.LOAD_BALANCING_DC_FAILOVER_ALLOW_FOR_LOCAL_CONSISTENCY_LEVELS, false); } @Immutable diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java index c7c80fb9bf4..bf4223bf45c 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java @@ -709,6 +709,19 @@ public String toString() { public static final TypedDriverOption METRICS_FACTORY_CLASS = new TypedDriverOption<>(DefaultDriverOption.METRICS_FACTORY_CLASS, GenericType.STRING); + /** The maximum number of nodes from remote DCs to include in query plans. */ + public static final TypedDriverOption + LOAD_BALANCING_DC_FAILOVER_MAX_NODES_PER_REMOTE_DC = + new TypedDriverOption<>( + DefaultDriverOption.LOAD_BALANCING_DC_FAILOVER_MAX_NODES_PER_REMOTE_DC, + GenericType.INTEGER); + /** Whether to consider nodes from remote DCs if the request's consistency level is local. */ + public static final TypedDriverOption + LOAD_BALANCING_DC_FAILOVER_ALLOW_FOR_LOCAL_CONSISTENCY_LEVELS = + new TypedDriverOption<>( + DefaultDriverOption.LOAD_BALANCING_DC_FAILOVER_ALLOW_FOR_LOCAL_CONSISTENCY_LEVELS, + GenericType.BOOLEAN); + private static Iterable> introspectBuiltInValues() { try { ImmutableList.Builder> result = ImmutableList.builder(); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/ConsistencyLevelRegistry.java b/core/src/main/java/com/datastax/oss/driver/internal/core/ConsistencyLevelRegistry.java index c9353df9b55..54f06840412 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/ConsistencyLevelRegistry.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/ConsistencyLevelRegistry.java @@ -29,6 +29,8 @@ public interface ConsistencyLevelRegistry { int nameToCode(String name); + ConsistencyLevel nameToLevel(String name); + /** @return all the values known to this driver instance. */ Iterable getValues(); } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/DefaultConsistencyLevelRegistry.java b/core/src/main/java/com/datastax/oss/driver/internal/core/DefaultConsistencyLevelRegistry.java index ba833674292..8357ef6d023 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/DefaultConsistencyLevelRegistry.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/DefaultConsistencyLevelRegistry.java @@ -46,6 +46,11 @@ public int nameToCode(String name) { return NAME_TO_CODE.get(name); } + @Override + public ConsistencyLevel nameToLevel(String name) { + return DefaultConsistencyLevel.valueOf(name); + } + @Override public Iterable getValues() { return VALUES; diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java index 166563b3160..743d11c9ad4 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java @@ -55,7 +55,7 @@ import com.datastax.oss.driver.internal.core.tracker.NoopRequestTracker; import com.datastax.oss.driver.internal.core.tracker.RequestLogger; import com.datastax.oss.driver.internal.core.util.Loggers; -import com.datastax.oss.driver.internal.core.util.collection.QueryPlan; +import com.datastax.oss.driver.internal.core.util.collection.SimpleQueryPlan; import com.datastax.oss.protocol.internal.Frame; import com.datastax.oss.protocol.internal.Message; import com.datastax.oss.protocol.internal.ProtocolConstants; @@ -187,7 +187,7 @@ public void onThrottleReady(boolean wasDelayed) { } Queue queryPlan = this.initialStatement.getNode() != null - ? new QueryPlan(this.initialStatement.getNode()) + ? new SimpleQueryPlan(this.initialStatement.getNode()) : context .getLoadBalancingPolicyWrapper() .newQueryPlan(initialStatement, executionProfile.getName(), session); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicy.java b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicy.java index 23b921a6eb9..dd9b4145b18 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicy.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicy.java @@ -15,9 +15,12 @@ */ package com.datastax.oss.driver.internal.core.loadbalancing; +import com.datastax.oss.driver.api.core.ConsistencyLevel; import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.context.DriverContext; +import com.datastax.oss.driver.api.core.cql.Statement; import com.datastax.oss.driver.api.core.loadbalancing.LoadBalancingPolicy; import com.datastax.oss.driver.api.core.loadbalancing.NodeDistance; import com.datastax.oss.driver.api.core.metadata.Node; @@ -29,19 +32,25 @@ import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.internal.core.loadbalancing.helper.DefaultNodeFilterHelper; import com.datastax.oss.driver.internal.core.loadbalancing.helper.OptionalLocalDcHelper; +import com.datastax.oss.driver.internal.core.loadbalancing.nodeset.DcAgnosticNodeSet; +import com.datastax.oss.driver.internal.core.loadbalancing.nodeset.MultiDcNodeSet; +import com.datastax.oss.driver.internal.core.loadbalancing.nodeset.NodeSet; +import com.datastax.oss.driver.internal.core.loadbalancing.nodeset.SingleDcNodeSet; import com.datastax.oss.driver.internal.core.util.ArrayUtils; +import com.datastax.oss.driver.internal.core.util.collection.CompositeQueryPlan; +import com.datastax.oss.driver.internal.core.util.collection.LazyQueryPlan; import com.datastax.oss.driver.internal.core.util.collection.QueryPlan; -import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; +import com.datastax.oss.driver.internal.core.util.collection.SimpleQueryPlan; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import java.nio.ByteBuffer; import java.util.Collections; import java.util.Map; +import java.util.Objects; import java.util.Optional; import java.util.Queue; import java.util.Set; import java.util.UUID; -import java.util.concurrent.CopyOnWriteArraySet; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.IntUnaryOperator; import java.util.function.Predicate; @@ -88,36 +97,58 @@ public class BasicLoadBalancingPolicy implements LoadBalancingPolicy { private static final Logger LOG = LoggerFactory.getLogger(BasicLoadBalancingPolicy.class); protected static final IntUnaryOperator INCREMENT = i -> (i == Integer.MAX_VALUE) ? 0 : i + 1; + private static final Object[] EMPTY_NODES = new Object[0]; @NonNull protected final InternalDriverContext context; @NonNull protected final DriverExecutionProfile profile; @NonNull protected final String logPrefix; protected final AtomicInteger roundRobinAmount = new AtomicInteger(); - protected final CopyOnWriteArraySet liveNodes = new CopyOnWriteArraySet<>(); + + private final int maxNodesPerRemoteDc; + private final boolean allowDcFailoverForLocalCl; + private final ConsistencyLevel defaultConsistencyLevel; // private because they should be set in init() and never be modified after private volatile DistanceReporter distanceReporter; private volatile Predicate filter; private volatile String localDc; + private volatile NodeSet liveNodes; public BasicLoadBalancingPolicy(@NonNull DriverContext context, @NonNull String profileName) { this.context = (InternalDriverContext) context; profile = context.getConfig().getProfile(profileName); logPrefix = context.getSessionName() + "|" + profileName; - } - - /** @return The local datacenter, if known; empty otherwise. */ - public Optional getLocalDatacenter() { - return Optional.ofNullable(localDc); + maxNodesPerRemoteDc = + profile.getInt(DefaultDriverOption.LOAD_BALANCING_DC_FAILOVER_MAX_NODES_PER_REMOTE_DC); + allowDcFailoverForLocalCl = + profile.getBoolean( + DefaultDriverOption.LOAD_BALANCING_DC_FAILOVER_ALLOW_FOR_LOCAL_CONSISTENCY_LEVELS); + defaultConsistencyLevel = + this.context + .getConsistencyLevelRegistry() + .nameToLevel(profile.getString(DefaultDriverOption.REQUEST_CONSISTENCY)); } /** - * @return An immutable copy of the nodes currently considered as live; if the local datacenter is - * known, this set will contain only nodes belonging to that datacenter. + * Returns the local datacenter name, if known; empty otherwise. + * + *

          When this method returns null, then datacenter awareness is completely disabled. All + * non-ignored nodes will be considered "local" regardless of their actual datacenters, and will + * have equal chances of being selected for query plans. + * + *

          After the policy is {@linkplain #init(Map, DistanceReporter) initialized} this method will + * return the local datacenter that was discovered by calling {@link #discoverLocalDc(Map)}. + * Before initialization, this method always returns null. */ - public Set getLiveNodes() { - return ImmutableSet.copyOf(liveNodes); + @Nullable + protected String getLocalDatacenter() { + return localDc; + } + + /** @return The nodes currently considered as live. */ + protected NodeSet getLiveNodes() { + return liveNodes; } @Override @@ -125,17 +156,18 @@ public void init(@NonNull Map nodes, @NonNull DistanceReporter dista this.distanceReporter = distanceReporter; localDc = discoverLocalDc(nodes).orElse(null); filter = createNodeFilter(localDc, nodes); + liveNodes = + localDc == null + ? new DcAgnosticNodeSet() + : maxNodesPerRemoteDc <= 0 ? new SingleDcNodeSet(localDc) : new MultiDcNodeSet(); for (Node node : nodes.values()) { - if (filter.test(node)) { - distanceReporter.setDistance(node, NodeDistance.LOCAL); - if (node.getState() != NodeState.DOWN) { - // This includes state == UNKNOWN. If the node turns out to be unreachable, this will be - // detected when we try to open a pool to it, it will get marked down and this will be - // signaled back to this policy - liveNodes.add(node); - } - } else { - distanceReporter.setDistance(node, NodeDistance.IGNORED); + NodeDistance distance = computeNodeDistance(node); + distanceReporter.setDistance(node, distance); + if (distance != NodeDistance.IGNORED && node.getState() != NodeState.DOWN) { + // This includes state == UNKNOWN. If the node turns out to be unreachable, this will be + // detected when we try to open a pool to it, it will get marked down and this will be + // signaled back to this policy, which will then remove it from the live set. + liveNodes.add(node); } } } @@ -151,6 +183,10 @@ public void init(@NonNull Map nodes, @NonNull DistanceReporter dista * Optional#empty empty}, if they require a local datacenter to be defined in order to operate * properly. * + *

          If this method returns empty, then datacenter awareness will be completely disabled. All + * non-ignored nodes will be considered "local" regardless of their actual datacenters, and will + * have equal chances of being selected for query plans. + * * @param nodes All the nodes that were known to exist in the cluster (regardless of their state) * when the load balancing policy was initialized. This argument is provided in case * implementors need to inspect the cluster topology to discover the local datacenter. @@ -187,7 +223,7 @@ protected Predicate createNodeFilter( @Override public Queue newQueryPlan(@Nullable Request request, @Nullable Session session) { // Take a snapshot since the set is concurrent: - Object[] currentNodes = liveNodes.toArray(); + Object[] currentNodes = liveNodes.dc(localDc).toArray(); Set allReplicas = getReplicas(request, session); int replicaCount = 0; // in currentNodes @@ -216,7 +252,8 @@ public Queue newQueryPlan(@Nullable Request request, @Nullable Session ses currentNodes.length - replicaCount, roundRobinAmount.getAndUpdate(INCREMENT)); - return new QueryPlan(currentNodes); + QueryPlan plan = currentNodes.length == 0 ? QueryPlan.EMPTY : new SimpleQueryPlan(currentNodes); + return maybeAddDcFailover(request, plan); } @NonNull @@ -233,9 +270,9 @@ protected Set getReplicas(@Nullable Request request, @Nullable Session ses // Note: we're on the hot path and the getXxx methods are potentially more than simple getters, // so we only call each method when strictly necessary (which is why the code below looks a bit // weird). - CqlIdentifier keyspace = null; - Token token = null; - ByteBuffer key = null; + CqlIdentifier keyspace; + Token token; + ByteBuffer key; try { keyspace = request.getKeyspace(); if (keyspace == null) { @@ -265,34 +302,79 @@ protected Set getReplicas(@Nullable Request request, @Nullable Session ses : tokenMap.getReplicas(keyspace, key); } + @NonNull + protected Queue maybeAddDcFailover(@Nullable Request request, @NonNull Queue local) { + if (maxNodesPerRemoteDc <= 0 || localDc == null) { + return local; + } + if (!allowDcFailoverForLocalCl && request instanceof Statement) { + Statement statement = (Statement) request; + ConsistencyLevel consistency = statement.getConsistencyLevel(); + if (consistency == null) { + consistency = defaultConsistencyLevel; + } + if (consistency.isDcLocal()) { + return local; + } + } + QueryPlan remote = + new LazyQueryPlan() { + + @Override + protected Object[] computeNodes() { + Object[] dcs = liveNodes.dcs().toArray(); + if (dcs.length <= 1) { + return EMPTY_NODES; + } + Object[] remoteNodes = new Object[(dcs.length - 1) * maxNodesPerRemoteDc]; + int remoteNodesLength = 0; + for (Object dc : dcs) { + if (!dc.equals(localDc)) { + Object[] remoteNodesInDc = liveNodes.dc((String) dc).toArray(); + for (int i = 0; i < maxNodesPerRemoteDc && i < remoteNodesInDc.length; i++) { + remoteNodes[remoteNodesLength++] = remoteNodesInDc[i]; + } + } + } + if (remoteNodesLength == 0) { + return EMPTY_NODES; + } + shuffleHead(remoteNodes, remoteNodesLength); + if (remoteNodes.length == remoteNodesLength) { + return remoteNodes; + } + Object[] trimmedRemoteNodes = new Object[remoteNodesLength]; + System.arraycopy(remoteNodes, 0, trimmedRemoteNodes, 0, remoteNodesLength); + return trimmedRemoteNodes; + } + }; + + return new CompositeQueryPlan(local, remote); + } + /** Exposed as a protected method so that it can be accessed by tests */ - protected void shuffleHead(Object[] currentNodes, int replicaCount) { - ArrayUtils.shuffleHead(currentNodes, replicaCount); + protected void shuffleHead(Object[] currentNodes, int headLength) { + ArrayUtils.shuffleHead(currentNodes, headLength); } @Override public void onAdd(@NonNull Node node) { - if (filter.test(node)) { - LOG.debug("[{}] {} was added, setting distance to LOCAL", logPrefix, node); - // Setting to a non-ignored distance triggers the session to open a pool, which will in turn - // set the node UP when the first channel gets opened. - distanceReporter.setDistance(node, NodeDistance.LOCAL); - } else { - distanceReporter.setDistance(node, NodeDistance.IGNORED); - } + NodeDistance distance = computeNodeDistance(node); + // Setting to a non-ignored distance triggers the session to open a pool, which will in turn + // set the node UP when the first channel gets opened, then #onUp will be called, and the + // node will be eventually added to the live set. + distanceReporter.setDistance(node, distance); + LOG.debug("[{}] {} was added, setting distance to {}", logPrefix, node, distance); } @Override public void onUp(@NonNull Node node) { - if (filter.test(node)) { - // Normally this is already the case, but the filter could be dynamic and have ignored the - // node previously. - distanceReporter.setDistance(node, NodeDistance.LOCAL); - if (liveNodes.add(node)) { - LOG.debug("[{}] {} came back UP, added to live set", logPrefix, node); - } - } else { - distanceReporter.setDistance(node, NodeDistance.IGNORED); + NodeDistance distance = computeNodeDistance(node); + if (node.getDistance() != distance) { + distanceReporter.setDistance(node, distance); + } + if (distance != NodeDistance.IGNORED && liveNodes.add(node)) { + LOG.debug("[{}] {} came back UP, added to live set", logPrefix, node); } } @@ -310,6 +392,42 @@ public void onRemove(@NonNull Node node) { } } + /** + * Computes the distance of the given node. + * + *

          This method is called during {@linkplain #init(Map, DistanceReporter) initialization}, when + * a node {@linkplain #onAdd(Node) is added}, and when a node {@linkplain #onUp(Node) is back UP}. + */ + protected NodeDistance computeNodeDistance(@NonNull Node node) { + // We interrogate the filter every time since it could be dynamic + // and change its verdict between two invocations of this method. + if (!filter.test(node)) { + return NodeDistance.IGNORED; + } + // no local DC is defined, all nodes accepted by the filter are LOCAL. + if (localDc == null) { + return NodeDistance.LOCAL; + } + // the node is LOCAL if its datacenter is the local datacenter. + if (Objects.equals(node.getDatacenter(), localDc)) { + return NodeDistance.LOCAL; + } + // otherwise the node will be either REMOTE or IGNORED, depending + // on how many remote nodes we accept per DC. + if (maxNodesPerRemoteDc > 0) { + Object[] remoteNodes = liveNodes.dc(node.getDatacenter()).toArray(); + for (int i = 0; i < maxNodesPerRemoteDc; i++) { + if (i == remoteNodes.length) { + // there is still room for one more REMOTE node in this DC + return NodeDistance.REMOTE; + } else if (remoteNodes[i] == node) { + return NodeDistance.REMOTE; + } + } + } + return NodeDistance.IGNORED; + } + @Override public void close() { // nothing to do diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicy.java b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicy.java index bc609c2ece3..175f9556eaf 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicy.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicy.java @@ -31,6 +31,7 @@ import com.datastax.oss.driver.internal.core.session.DefaultSession; import com.datastax.oss.driver.internal.core.util.ArrayUtils; import com.datastax.oss.driver.internal.core.util.collection.QueryPlan; +import com.datastax.oss.driver.internal.core.util.collection.SimpleQueryPlan; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import java.util.BitSet; @@ -126,7 +127,7 @@ public Queue newQueryPlan(@Nullable Request request, @Nullable Session ses } // Take a snapshot since the set is concurrent: - Object[] currentNodes = liveNodes.toArray(); + Object[] currentNodes = getLiveNodes().dc(getLocalDatacenter()).toArray(); Set allReplicas = getReplicas(request, session); int replicaCount = 0; // in currentNodes @@ -157,6 +158,7 @@ public Queue newQueryPlan(@Nullable Request request, @Nullable Session ses long now = nanoTime(); for (int i = 0; i < replicaCount; i++) { Node node = (Node) currentNodes[i]; + assert node != null; Long upTimeNanos = upTimes.get(node); if (upTimeNanos != null && now - upTimeNanos - NEWLY_UP_INTERVAL_NANOS < 0 @@ -225,7 +227,8 @@ > getInFlight((Node) currentNodes[1], session)) { currentNodes.length - replicaCount, roundRobinAmount.getAndUpdate(INCREMENT)); - return new QueryPlan(currentNodes); + QueryPlan plan = currentNodes.length == 0 ? QueryPlan.EMPTY : new SimpleQueryPlan(currentNodes); + return maybeAddDcFailover(request, plan); } @Override @@ -249,12 +252,6 @@ public void onNodeError( updateResponseTimes(node); } - /** Exposed as a protected method so that it can be accessed by tests */ - @Override - protected void shuffleHead(Object[] currentNodes, int replicaCount) { - super.shuffleHead(currentNodes, replicaCount); - } - /** Exposed as a protected method so that it can be accessed by tests */ protected long nanoTime() { return System.nanoTime(); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/helper/DefaultNodeFilterHelper.java b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/helper/DefaultNodeFilterHelper.java index 10630e58fad..9eae21589ed 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/helper/DefaultNodeFilterHelper.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/helper/DefaultNodeFilterHelper.java @@ -62,14 +62,7 @@ public Predicate createNodeFilter( @Nullable String localDc, @NonNull Map nodes) { Predicate filterFromConfig = nodeFilterFromConfig(); return node -> { - if (localDc != null && !localDc.equals(node.getDatacenter())) { - LOG.debug( - "[{}] Ignoring {} because it doesn't belong to the local DC {}", - logPrefix, - node, - localDc); - return false; - } else if (!filterFromConfig.test(node)) { + if (!filterFromConfig.test(node)) { LOG.debug( "[{}] Ignoring {} because it doesn't match the user-provided predicate", logPrefix, diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/helper/OptionalLocalDcHelper.java b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/helper/OptionalLocalDcHelper.java index e513b2b8b83..4c67803127e 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/helper/OptionalLocalDcHelper.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/helper/OptionalLocalDcHelper.java @@ -74,6 +74,7 @@ public Optional discoverLocalDc(@NonNull Map nodes) { checkLocalDatacenterCompatibility(localDc, context.getMetadataManager().getContactPoints()); return Optional.of(localDc); } else { + LOG.debug("[{}] Local DC not set, DC awareness will be disabled", logPrefix); return Optional.empty(); } } @@ -83,26 +84,28 @@ public Optional discoverLocalDc(@NonNull Map nodes) { * configuration, or programmatically. * *

          The default implementation logs a warning when a contact point reports a datacenter - * different from the local one. + * different from the local one, and only for the default profile. * * @param localDc The local datacenter, as specified in the config, or programmatically. * @param contactPoints The contact points provided when creating the session. */ protected void checkLocalDatacenterCompatibility( @NonNull String localDc, Set contactPoints) { - Set badContactPoints = new LinkedHashSet<>(); - for (Node node : contactPoints) { - if (!Objects.equals(localDc, node.getDatacenter())) { - badContactPoints.add(node); + if (profile.getName().equals(DriverExecutionProfile.DEFAULT_NAME)) { + Set badContactPoints = new LinkedHashSet<>(); + for (Node node : contactPoints) { + if (!Objects.equals(localDc, node.getDatacenter())) { + badContactPoints.add(node); + } + } + if (!badContactPoints.isEmpty()) { + LOG.warn( + "[{}] You specified {} as the local DC, but some contact points are from a different DC: {}; " + + "please provide the correct local DC, or check your contact points", + logPrefix, + localDc, + formatNodesAndDcs(badContactPoints)); } - } - if (!badContactPoints.isEmpty()) { - LOG.warn( - "[{}] You specified {} as the local DC, but some contact points are from a different DC: {}; " - + "please provide the correct local DC, or check your contact points", - logPrefix, - localDc, - formatNodesAndDcs(badContactPoints)); } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/nodeset/DcAgnosticNodeSet.java b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/nodeset/DcAgnosticNodeSet.java new file mode 100644 index 00000000000..d9cf67fb7c0 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/nodeset/DcAgnosticNodeSet.java @@ -0,0 +1,52 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.loadbalancing.nodeset; + +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.Collections; +import java.util.Set; +import java.util.concurrent.CopyOnWriteArraySet; +import net.jcip.annotations.ThreadSafe; + +@ThreadSafe +public class DcAgnosticNodeSet implements NodeSet { + + @VisibleForTesting final Set nodes = new CopyOnWriteArraySet<>(); + + @Override + public boolean add(@NonNull Node node) { + return nodes.add(node); + } + + @Override + public boolean remove(@NonNull Node node) { + return nodes.remove(node); + } + + @Override + @NonNull + public Set dc(@Nullable String dc) { + return nodes; + } + + @Override + public Set dcs() { + return Collections.emptySet(); + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/nodeset/MultiDcNodeSet.java b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/nodeset/MultiDcNodeSet.java new file mode 100644 index 00000000000..5c3d425ba69 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/nodeset/MultiDcNodeSet.java @@ -0,0 +1,93 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.loadbalancing.nodeset; + +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.Collections; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.CopyOnWriteArraySet; +import java.util.concurrent.atomic.AtomicBoolean; +import net.jcip.annotations.ThreadSafe; + +@ThreadSafe +public class MultiDcNodeSet implements NodeSet { + + private static final String UNKNOWN_DC = ""; + + @VisibleForTesting final Map> nodes = new ConcurrentHashMap<>(); + + @Override + public boolean add(@NonNull Node node) { + AtomicBoolean added = new AtomicBoolean(); + nodes.compute( + getMapKey(node), + (key, current) -> { + if (current == null) { + // We use CopyOnWriteArraySet because we need + // 1) to preserve insertion order, and + // 2) a "snapshot"-style toArray() implementation + current = new CopyOnWriteArraySet<>(); + } + if (current.add(node)) { + added.set(true); + } + return current; + }); + return added.get(); + } + + @Override + public boolean remove(@NonNull Node node) { + AtomicBoolean removed = new AtomicBoolean(); + nodes.compute( + getMapKey(node), + (key, current) -> { + if (current != null) { + if (current.remove(node)) { + removed.set(true); + } + } + return current; + }); + return removed.get(); + } + + @Override + @NonNull + public Set dc(@Nullable String dc) { + return nodes.getOrDefault(getMapKey(dc), Collections.emptySet()); + } + + @Override + public Set dcs() { + return nodes.keySet(); + } + + @NonNull + private String getMapKey(@NonNull Node node) { + return getMapKey(node.getDatacenter()); + } + + @NonNull + private String getMapKey(@Nullable String dc) { + return dc == null ? UNKNOWN_DC : dc; + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/nodeset/NodeSet.java b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/nodeset/NodeSet.java new file mode 100644 index 00000000000..2b66642d198 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/nodeset/NodeSet.java @@ -0,0 +1,69 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.loadbalancing.nodeset; + +import com.datastax.oss.driver.api.core.metadata.Node; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.Set; +import net.jcip.annotations.ThreadSafe; + +/** + * A thread-safe abstraction around a map of nodes per datacenter, to facilitate node management by + * load balancing policies. + */ +@ThreadSafe +public interface NodeSet { + + /** + * Adds the given node to this set. + * + *

          If this set was initialized with datacenter awareness, the node will be added to its + * datacenter's specific set; otherwise, the node is added to a general set containing all nodes + * in the cluster. + * + * @param node The node to add. + * @return true if the node was added, false otherwise (because it was already present). + */ + boolean add(@NonNull Node node); + + /** + * Removes the node from the set. + * + * @param node The node to remove. + * @return true if the node was removed, false otherwise (because it was not present). + */ + boolean remove(@NonNull Node node); + + /** + * Returns the current nodes in the given datacenter. + * + *

          If this set was initialized with datacenter awareness, the returned set will contain only + * nodes pertaining to the given datacenter; otherwise, the given datacenter name is ignored and + * the returned set will contain all nodes in the cluster. + * + * @param dc The datacenter name, or null if the datacenter name is not known, or irrelevant. + * @return the current nodes in the given datacenter. + */ + @NonNull + Set dc(@Nullable String dc); + + /** + * Returns the current datacenter names known to this set. If datacenter awareness has been + * disabled, this method returns an empty set. + */ + Set dcs(); +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/nodeset/SingleDcNodeSet.java b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/nodeset/SingleDcNodeSet.java new file mode 100644 index 00000000000..e638913edfd --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/nodeset/SingleDcNodeSet.java @@ -0,0 +1,71 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.loadbalancing.nodeset; + +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.Collections; +import java.util.Objects; +import java.util.Set; +import java.util.concurrent.CopyOnWriteArraySet; +import net.jcip.annotations.ThreadSafe; + +@ThreadSafe +public class SingleDcNodeSet implements NodeSet { + + @VisibleForTesting final Set nodes = new CopyOnWriteArraySet<>(); + + private final String dc; + private final Set dcs; + + public SingleDcNodeSet(@NonNull String dc) { + this.dc = dc; + dcs = ImmutableSet.of(dc); + } + + @Override + public boolean add(@NonNull Node node) { + if (Objects.equals(node.getDatacenter(), dc)) { + return nodes.add(node); + } + return false; + } + + @Override + public boolean remove(@NonNull Node node) { + if (Objects.equals(node.getDatacenter(), dc)) { + return nodes.remove(node); + } + return false; + } + + @Override + @NonNull + public Set dc(@Nullable String dc) { + if (Objects.equals(this.dc, dc)) { + return nodes; + } + return Collections.emptySet(); + } + + @Override + public Set dcs() { + return dcs; + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/util/collection/CompositeQueryPlan.java b/core/src/main/java/com/datastax/oss/driver/internal/core/util/collection/CompositeQueryPlan.java new file mode 100644 index 00000000000..3992e14a4d7 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/util/collection/CompositeQueryPlan.java @@ -0,0 +1,86 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.util.collection; + +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.shaded.guava.common.collect.Iterators; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.AbstractQueue; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.Queue; +import java.util.concurrent.atomic.AtomicInteger; +import net.jcip.annotations.ThreadSafe; + +/** A query plan that encompasses many child plans, and consumes them one by one. */ +@ThreadSafe +public class CompositeQueryPlan extends AbstractQueue implements QueryPlan { + + private final Queue[] plans; + private final AtomicInteger currentPlan = new AtomicInteger(0); + + @SafeVarargs + public CompositeQueryPlan(@NonNull Queue... plans) { + if (plans.length == 0) { + throw new IllegalArgumentException("at least one child plan must be provided"); + } + for (Queue plan : plans) { + if (plan == null) { + throw new NullPointerException("child plan cannot be null"); + } + } + this.plans = plans; + } + + @Nullable + @Override + public Node poll() { + while (true) { + int current = currentPlan.get(); + Queue plan = plans[current]; + Node n = plan.poll(); + if (n != null) { + return n; + } + int next = current + 1; + if (next == plans.length) { + return null; + } + currentPlan.compareAndSet(current, next); + } + } + + @NonNull + @Override + public Iterator iterator() { + List> its = new ArrayList<>(plans.length); + for (Queue plan : plans) { + its.add(plan.iterator()); + } + return Iterators.concat(its.iterator()); + } + + @Override + public int size() { + int size = 0; + for (Queue plan : plans) { + size += plan.size(); + } + return size; + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/util/collection/EmptyQueryPlan.java b/core/src/main/java/com/datastax/oss/driver/internal/core/util/collection/EmptyQueryPlan.java new file mode 100644 index 00000000000..8149084480e --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/util/collection/EmptyQueryPlan.java @@ -0,0 +1,43 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.util.collection; + +import com.datastax.oss.driver.api.core.metadata.Node; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.AbstractQueue; +import java.util.Collections; +import java.util.Iterator; +import net.jcip.annotations.ThreadSafe; + +@ThreadSafe +class EmptyQueryPlan extends AbstractQueue implements QueryPlan { + + @Override + public Node poll() { + return null; + } + + @NonNull + @Override + public Iterator iterator() { + return Collections.emptyIterator(); + } + + @Override + public int size() { + return 0; + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/util/collection/LazyQueryPlan.java b/core/src/main/java/com/datastax/oss/driver/internal/core/util/collection/LazyQueryPlan.java new file mode 100644 index 00000000000..cbbe6fb9d0c --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/util/collection/LazyQueryPlan.java @@ -0,0 +1,56 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.util.collection; + +import com.datastax.oss.driver.api.core.metadata.Node; +import net.jcip.annotations.ThreadSafe; + +/** + * A query plan where nodes are computed lazily, when the plan is consumed for the first time. + * + *

          This class can be useful when a query plan computation is heavy but the plan has a low chance + * of ever being consumed, e.g. the last query plan in a {@link CompositeQueryPlan}. + */ +@ThreadSafe +public abstract class LazyQueryPlan extends QueryPlanBase { + + private volatile Object[] nodes; + + /** + * Computes and returns the nodes to use for this query plan. + * + *

          For efficiency, the declared return type is {@code Object[]} but all elements must be + * instances of {@link Node}. See {@link #getNodes()} for details. + * + *

          This method is guaranteed to be invoked only once, at the first call to {@link #poll()}. + * + *

          Implementors must avoid blocking calls in this method as it will be invoked on the driver's + * hot path. + */ + protected abstract Object[] computeNodes(); + + @Override + protected Object[] getNodes() { + if (nodes == null) { + synchronized (this) { + if (nodes == null) { + nodes = computeNodes(); + } + } + } + return nodes; + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/util/collection/QueryPlan.java b/core/src/main/java/com/datastax/oss/driver/internal/core/util/collection/QueryPlan.java index dfe2a45757f..858b856210d 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/util/collection/QueryPlan.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/util/collection/QueryPlan.java @@ -15,98 +15,76 @@ */ package com.datastax.oss.driver.internal.core.util.collection; -import com.datastax.oss.driver.api.core.loadbalancing.LoadBalancingPolicy; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.session.Request; import com.datastax.oss.driver.api.core.session.Session; -import com.datastax.oss.driver.internal.core.loadbalancing.DefaultLoadBalancingPolicy; -import com.datastax.oss.driver.shaded.guava.common.collect.Iterators; import edu.umd.cs.findbugs.annotations.NonNull; -import edu.umd.cs.findbugs.annotations.Nullable; -import java.util.AbstractCollection; -import java.util.Arrays; -import java.util.Collections; import java.util.Iterator; import java.util.Queue; -import java.util.concurrent.atomic.AtomicInteger; import net.jcip.annotations.ThreadSafe; /** - * A specialized, thread-safe queue implementation for {@link - * LoadBalancingPolicy#newQueryPlan(Request, Session)}. + * A specialized, thread-safe node queue for use when creating {@linkplain + * com.datastax.oss.driver.api.core.loadbalancing.LoadBalancingPolicy#newQueryPlan(Request, Session) + * query plans}. * - *

          All nodes must be provided at construction time. After that, the only valid mutation operation - * is {@link #poll()}, other methods throw. + *

          This interface and its built-in implementations are not general-purpose queues; they are + * tailored for the specific use case of creating query plans in the driver. They make a few + * unconventional API choices for the sake of performance. * - *

          This class is not a general-purpose implementation, it is tailored for a specific use case in - * the driver. It makes a few unconventional API choices for the sake of performance (see {@link - * #QueryPlan(Object...)}. It can be reused for custom load balancing policies; if you plan to do - * so, study the source code of {@link DefaultLoadBalancingPolicy}. + *

          Furthermore, the driver only consumes query plans through calls to its {@link #poll()} method; + * therefore, this method is the only valid mutation operation for a query plan, other mutating + * methods throw. + * + *

          Both {@link #size()} and {@link #iterator()} are supported and never throw, even if called + * concurrently. These methods are implemented for reporting purposes only, the driver itself does + * not use them. + * + *

          All built-in {@link QueryPlan} implementations can be safely reused for custom load balancing + * policies; if you plan to do so, study the source code of {@link + * com.datastax.oss.driver.internal.core.loadbalancing.DefaultLoadBalancingPolicy} or {@link + * com.datastax.oss.driver.internal.core.loadbalancing.BasicLoadBalancingPolicy}. + * + * @see QueryPlanBase */ @ThreadSafe -public class QueryPlan extends AbstractCollection implements Queue { - - private final Object[] nodes; - private final AtomicInteger nextIndex = new AtomicInteger(); - - /** - * @param nodes the nodes to initially fill the queue with. For efficiency, there is no defensive - * copy, the provided array is used directly. The declared type is {@code Object[]} because of - * implementation details of {@link DefaultLoadBalancingPolicy}, but all elements must be - * instances of {@link Node}, otherwise instance methods will fail later. - */ - public QueryPlan(@NonNull Object... nodes) { - this.nodes = nodes; - } +public interface QueryPlan extends Queue { - @Nullable - @Override - public Node poll() { - // We don't handle overflow. In practice it won't be an issue, since the driver stops polling - // once the query plan is empty. - int i = nextIndex.getAndIncrement(); - return (i >= nodes.length) ? null : (Node) nodes[i]; - } + QueryPlan EMPTY = new EmptyQueryPlan(); /** * {@inheritDoc} * - *

          The returned iterator reflects the state of the queue at the time of the call, and is not - * affected by further modifications. + *

          Implementation note: query plan iterators are snapshots that reflect the contents of the + * queue at the time of the call, and are not affected by further modifications. Successive calls + * to this method will return different objects. */ @NonNull @Override - public Iterator iterator() { - int i = nextIndex.get(); - if (i >= nodes.length) { - return Collections.emptyList().iterator(); - } else { - return Iterators.forArray(Arrays.copyOfRange(nodes, i, nodes.length, Node[].class)); - } - } + Iterator iterator(); @Override - public int size() { - return Math.max(nodes.length - nextIndex.get(), 0); + default boolean offer(Node node) { + throw new UnsupportedOperationException("Not implemented"); } @Override - public boolean offer(Node node) { + default Node peek() { throw new UnsupportedOperationException("Not implemented"); } @Override - public Node remove() { + default boolean add(Node node) { throw new UnsupportedOperationException("Not implemented"); } @Override - public Node element() { + default Node remove() { throw new UnsupportedOperationException("Not implemented"); } @Override - public Node peek() { + default Node element() { throw new UnsupportedOperationException("Not implemented"); } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/util/collection/QueryPlanBase.java b/core/src/main/java/com/datastax/oss/driver/internal/core/util/collection/QueryPlanBase.java new file mode 100644 index 00000000000..a82a450eb9f --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/util/collection/QueryPlanBase.java @@ -0,0 +1,73 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.util.collection; + +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.shaded.guava.common.collect.Iterators; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.AbstractQueue; +import java.util.Arrays; +import java.util.Collections; +import java.util.Iterator; +import java.util.concurrent.atomic.AtomicInteger; +import net.jcip.annotations.ThreadSafe; + +@ThreadSafe +public abstract class QueryPlanBase extends AbstractQueue implements QueryPlan { + + private final AtomicInteger nextIndex = new AtomicInteger(); + + /** + * Returns the nodes in this query plan; the returned array should stay the same across + * invocations. + * + *

          The declared return type is {@code Object[]} because of implementation details of {@link + * com.datastax.oss.driver.internal.core.loadbalancing.DefaultLoadBalancingPolicy + * DefaultLoadBalancingPolicy} and {@link + * com.datastax.oss.driver.internal.core.loadbalancing.BasicLoadBalancingPolicy + * BasicLoadBalancingPolicy}, but all elements must be instances of {@link Node}, otherwise + * instance methods will fail later. + */ + protected abstract Object[] getNodes(); + + @Nullable + @Override + public Node poll() { + // We don't handle overflow. In practice it won't be an issue, since the driver stops polling + // once the query plan is empty. + int i = nextIndex.getAndIncrement(); + Object[] nodes = getNodes(); + return (i >= nodes.length) ? null : (Node) nodes[i]; + } + + @NonNull + @Override + public Iterator iterator() { + int i = nextIndex.get(); + Object[] nodes = getNodes(); + if (i >= nodes.length) { + return Collections.emptyIterator(); + } else { + return Iterators.forArray(Arrays.copyOfRange(nodes, i, nodes.length, Node[].class)); + } + } + + @Override + public int size() { + return Math.max(getNodes().length - nextIndex.get(), 0); + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/util/collection/SimpleQueryPlan.java b/core/src/main/java/com/datastax/oss/driver/internal/core/util/collection/SimpleQueryPlan.java new file mode 100644 index 00000000000..0130744de51 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/util/collection/SimpleQueryPlan.java @@ -0,0 +1,45 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.util.collection; + +import com.datastax.oss.driver.api.core.metadata.Node; +import edu.umd.cs.findbugs.annotations.NonNull; +import net.jcip.annotations.ThreadSafe; + +/** Query plan where nodes must be provided at construction time. */ +@ThreadSafe +public class SimpleQueryPlan extends QueryPlanBase { + + private final Object[] nodes; + + /** + * Creates a new query plan with the given nodes. + * + *

          For efficiency, there is no defensive copy, the provided array is used directly. The + * declared type is {@code Object[]} but all elements must be instances of {@link Node}. See + * {@link #getNodes()} for details. + * + * @param nodes the nodes to initially fill the queue with. + */ + public SimpleQueryPlan(@NonNull Object... nodes) { + this.nodes = nodes; + } + + @Override + protected Object[] getNodes() { + return nodes; + } +} diff --git a/core/src/main/resources/reference.conf b/core/src/main/resources/reference.conf index e4090ae7481..23e3fc2d14e 100644 --- a/core/src/main/resources/reference.conf +++ b/core/src/main/resources/reference.conf @@ -155,7 +155,25 @@ datastax-java-driver { # - com.datastax.oss.driver.internal.core.loadbalancing. # - com.datastax.dse.driver.internal.core.loadbalancing. # - # The driver provides a single implementation out of the box: DefaultLoadBalancingPolicy. + # The driver provides three implementations out of the box: + # + # - `DefaultLoadBalancingPolicy`: should almost always be used; it requires a local datacenter + # to be specified either programmatically when creating the session, or via the configuration + # option: datastax-java-driver.basic.load-balancing-policy.local-datacenter. It can also + # use a highly efficient slow replica avoidance mechanism, which is by default enabled – see + # the option: datastax-java-driver.basic.load-balancing-policy.slow-replica-avoidance. + # - `DcInferringLoadBalancingPolicy`: similar to `DefaultLoadBalancingPolicy`, but does not + # require a local datacenter to be defined, in which case it will attempt to infer the local + # datacenter from the provided contact points, if possible; if that fails, it will throw an + # error during session initialization. This policy is intended mostly for ETL tools and + # should not be used by normal applications. + # - `BasicLoadBalancingPolicy`: similar to `DefaultLoadBalancingPolicy`, but does not have + # the slow replica avoidance mechanism. More importantly, it is the only policy capable of + # operating without local datacenter defined, in which case it will consider nodes in the + # cluster in a datacenter-agnostic way. Beware that this could cause spikes in + # cross-datacenter traffic! This policy is provided mostly as a starting point for users + # wishing to implement their own load balancing policy; it should not be used as is in normal + # applications. # # You can also specify a custom class that implements LoadBalancingPolicy and has a public # constructor with two arguments: the DriverContext and a String representing the profile name. @@ -390,25 +408,42 @@ datastax-java-driver { set-keyspace-timeout = ${datastax-java-driver.advanced.connection.init-query-timeout} # The driver maintains a connection pool to each node, according to the distance assigned to it - # by the load balancing policy. If the distance is IGNORED, no connections are maintained. + # by the load balancing policy. + # If the distance is LOCAL, then local.size connections are opened; if the distance is REMOTE, + # then remote.size connections are opened. If the distance is IGNORED, no connections at all + # are maintained. pool { - local { - # The number of connections in the pool. - # - # Each connection can handle many concurrent requests, so 1 is generally a good place to - # start. You should only need higher values in very high performance scenarios, where - # connections might start maxing out their I/O thread (see the driver's online manual for - # more tuning instructions). - # - # Required: yes - # Modifiable at runtime: yes; when the change is detected, all active pools will be notified - # and will adjust their size. - # Overridable in a profile: no - size = 1 - } - remote { - size = 1 - } + # The number of connections in the pool for a node whose distance is LOCAL, that is, a node + # that belongs to the local datacenter, as inferred by the load balancing or defined by the + # option: datastax-java-driver.basic.load-balancing-policy.local-datacenter. + # + # Each connection can handle many concurrent requests, so 1 is generally a good place to + # start. You should only need higher values in very high performance scenarios, where + # connections might start maxing out their I/O thread (see the driver's online manual for + # more tuning instructions). + # + # Required: yes + # Modifiable at runtime: yes; when the change is detected, all active pools will be notified + # and will adjust their size. + # Overridable in a profile: no + local.size = 1 + + # The number of connections in the pool for a node whose distance is REMOTE, that is, a node + # that does not belong to the local datacenter. + # + # Note: by default, the built-in load-balancing policies will never assign the REMOTE distance + # to any node, to avoid cross-datacenter network traffic. If you want to change this behavior + # and understand the consequences, configure your policy to accept nodes in remote + # datacenters by adjusting the following advanced options: + # + # - datastax-java-driver.advanced.load-balancing-policy.dc-failover.max-nodes-per-remote-dc + # - datastax-java-driver.advanced.load-balancing-policy.dc-failover.allow-for-local-consistency-levels + # + # Required: yes + # Modifiable at runtime: yes; when the change is detected, all active pools will be notified + # and will adjust their size. + # Overridable in a profile: no + remote.size = 1 } # The maximum number of requests that can be executed concurrently on a connection. This must be @@ -464,6 +499,46 @@ datastax-java-driver { warn-on-init-error = true } + # Advanced options for the built-in load-balancing policies. + advanced.load-balancing-policy { + # Cross-datacenter failover configuration: configure the load-balancing policies to use nodes + # in remote datacenters. + dc-failover { + # The maximum number of nodes to contact in each remote datacenter. + # + # By default, this number is zero, to avoid cross-datacenter network traffic. When this + # number is greater than zero: + # + # - The load policies will assign the REMOTE distance to that many nodes in each remote + # datacenter. + # - The driver will then attempt to open connections to those nodes. The actual number of + # connections to open to each one of those nodes is configurable via the option: + # datastax-java-driver.advanced.connection.pool.remote.size. + # - The load-balancing policies will include those remote nodes (and only those) in query + # plans, effectively enabling cross-datacenter failover. + # + # Beware that enabling such failover can result in cross-datacenter network traffic spikes, + # if the local datacenter is down or experiencing high latencies! + # + # Required: yes + # Modifiable at runtime: no + # Overridable in a profile: yes + max-nodes-per-remote-dc = 0 + + # Whether cross-datacenter failover should be allowed for requests executed with local + # consistency levels (LOCAL_ONE, LOCAL_QUORUM and LOCAL_SERIAL). + # + # This is disabled by default. Enabling this feature may have unexpected results, since a + # local consistency level may have different semantics depending on the replication factor in + # use in each datacenter. + # + # Required: yes + # Modifiable at runtime: no + # Overridable in a profile: yes + allow-for-local-consistency-levels = false + } + } + # Whether to schedule reconnection attempts if all contact points are unreachable on the first # initialization attempt. # diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyDcAgnosticTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyDcAgnosticTest.java new file mode 100644 index 00000000000..3004ea36931 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyDcAgnosticTest.java @@ -0,0 +1,56 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.loadbalancing; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.when; + +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; +import java.util.Optional; +import org.junit.Before; +import org.junit.runner.RunWith; +import org.mockito.junit.MockitoJUnitRunner; + +// TODO fix unnecessary stubbing of config option in parent class (and stop using "silent" runner) +@RunWith(MockitoJUnitRunner.Silent.class) +public class BasicLoadBalancingPolicyDcAgnosticTest extends BasicLoadBalancingPolicyQueryPlanTest { + + @Before + @Override + public void setup() { + super.setup(); + when(defaultProfile.isDefined(DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER)) + .thenReturn(false); + when(metadataManager.getContactPoints()).thenReturn(ImmutableSet.of(node1)); + when(metadataManager.getMetadata()).thenReturn(metadata); + when(metadata.getTokenMap()).thenAnswer(invocation -> Optional.of(this.tokenMap)); + + // since there is no local datacenter defined, the policy should behave with DC awareness + // disabled and pick nodes regardless of their datacenters; we therefore expect all tests of + // BasicLoadBalancingPolicyQueryPlanTest to pass even with the below DC distribution. + when(node1.getDatacenter()).thenReturn("dc1"); + when(node2.getDatacenter()).thenReturn("dc2"); + when(node3.getDatacenter()).thenReturn("dc3"); + when(node4.getDatacenter()).thenReturn("dc4"); + when(node5.getDatacenter()).thenReturn(null); + + policy = createAndInitPolicy(); + + assertThat(policy.getLiveNodes().dc(null)).containsExactly(node1, node2, node3, node4, node5); + assertThat(policy.getLiveNodes().dcs()).isEmpty(); + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyDcFailoverTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyDcFailoverTest.java new file mode 100644 index 00000000000..9ad2f8503a0 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyDcFailoverTest.java @@ -0,0 +1,153 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.loadbalancing; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.atLeast; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.internal.core.metadata.DefaultNode; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; +import java.util.Map; +import java.util.UUID; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnitRunner; + +// TODO fix unnecessary stubbing of config option in parent class (and stop using "silent" runner) +@RunWith(MockitoJUnitRunner.Silent.class) +public class BasicLoadBalancingPolicyDcFailoverTest extends BasicLoadBalancingPolicyQueryPlanTest { + + @Mock protected DefaultNode node6; + @Mock protected DefaultNode node7; + @Mock protected DefaultNode node8; + @Mock protected DefaultNode node9; + + @Test + @Override + public void should_prioritize_single_replica() { + when(request.getRoutingKeyspace()).thenReturn(KEYSPACE); + when(request.getRoutingKey()).thenReturn(ROUTING_KEY); + when(tokenMap.getReplicas(KEYSPACE, ROUTING_KEY)).thenReturn(ImmutableSet.of(node3)); + + // node3 always first, round-robin on the rest, then remote nodes + assertThat(policy.newQueryPlan(request, session)) + .containsExactly(node3, node1, node2, node4, node5, node7, node8); + assertThat(policy.newQueryPlan(request, session)) + .containsExactly(node3, node2, node1, node4, node5, node7, node8); + assertThat(policy.newQueryPlan(request, session)) + .containsExactly(node3, node1, node2, node4, node5, node7, node8); + + // Should not shuffle replicas since there is only one + verify(policy, never()).shuffleHead(any(), eq(1)); + // But should shuffle remote nodes + verify(policy, times(3)).shuffleHead(any(), eq(4)); + } + + @Test + @Override + public void should_prioritize_and_shuffle_replicas() { + when(request.getRoutingKeyspace()).thenReturn(KEYSPACE); + when(request.getRoutingKey()).thenReturn(ROUTING_KEY); + when(tokenMap.getReplicas(KEYSPACE, ROUTING_KEY)) + .thenReturn(ImmutableSet.of(node2, node3, node5, node8)); + + // node 5 and 8 being in a remote DC, they don't get a boost for being a replica + assertThat(policy.newQueryPlan(request, session)) + .containsExactly(node2, node3, node1, node4, node5, node7, node8); + assertThat(policy.newQueryPlan(request, session)) + .containsExactly(node2, node3, node1, node4, node5, node7, node8); + assertThat(policy.newQueryPlan(request, session)) + .containsExactly(node2, node3, node1, node4, node5, node7, node8); + + // should shuffle replicas + verify(policy, times(3)).shuffleHead(any(), eq(2)); + // should shuffle remote nodes + verify(policy, times(3)).shuffleHead(any(), eq(4)); + // No power of two choices with only two replicas + verify(session, never()).getPools(); + } + + @Override + protected void assertRoundRobinQueryPlans() { + // nodes 4 to 9 being in a remote DC, they always appear after nodes 1, 2, 3 + for (int i = 0; i < 3; i++) { + assertThat(policy.newQueryPlan(request, session)) + .containsExactly(node1, node2, node3, node4, node5, node7, node8); + assertThat(policy.newQueryPlan(request, session)) + .containsExactly(node2, node3, node1, node4, node5, node7, node8); + assertThat(policy.newQueryPlan(request, session)) + .containsExactly(node3, node1, node2, node4, node5, node7, node8); + } + // should shuffle remote nodes + verify(policy, atLeast(1)).shuffleHead(any(), eq(4)); + } + + @Override + protected BasicLoadBalancingPolicy createAndInitPolicy() { + when(node4.getDatacenter()).thenReturn("dc2"); + when(node5.getDatacenter()).thenReturn("dc2"); + when(node6.getDatacenter()).thenReturn("dc2"); + when(node7.getDatacenter()).thenReturn("dc3"); + when(node8.getDatacenter()).thenReturn("dc3"); + when(node9.getDatacenter()).thenReturn("dc3"); + // Accept 2 nodes per remote DC + when(defaultProfile.getInt( + DefaultDriverOption.LOAD_BALANCING_DC_FAILOVER_MAX_NODES_PER_REMOTE_DC)) + .thenReturn(2); + when(defaultProfile.getBoolean( + DefaultDriverOption.LOAD_BALANCING_DC_FAILOVER_ALLOW_FOR_LOCAL_CONSISTENCY_LEVELS)) + .thenReturn(false); + // Use a subclass to disable shuffling, we just spy to make sure that the shuffling method was + // called (makes tests easier) + BasicLoadBalancingPolicy policy = + spy( + new BasicLoadBalancingPolicy(context, DriverExecutionProfile.DEFAULT_NAME) { + @Override + protected void shuffleHead(Object[] currentNodes, int headLength) { + // nothing (keep in same order) + } + }); + Map nodes = + ImmutableMap.builder() + .put(UUID.randomUUID(), node1) + .put(UUID.randomUUID(), node2) + .put(UUID.randomUUID(), node3) + .put(UUID.randomUUID(), node4) + .put(UUID.randomUUID(), node5) + .put(UUID.randomUUID(), node6) + .put(UUID.randomUUID(), node7) + .put(UUID.randomUUID(), node8) + .put(UUID.randomUUID(), node9) + .build(); + policy.init(nodes, distanceReporter); + assertThat(policy.getLiveNodes().dc("dc1")).containsExactly(node1, node2, node3); + assertThat(policy.getLiveNodes().dc("dc2")).containsExactly(node4, node5); // only 2 allowed + assertThat(policy.getLiveNodes().dc("dc3")).containsExactly(node7, node8); // only 2 allowed + return policy; + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyDistanceTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyDistanceTest.java new file mode 100644 index 00000000000..762720ac014 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyDistanceTest.java @@ -0,0 +1,220 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.loadbalancing; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.BDDMockito.given; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.loadbalancing.NodeDistance; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.UUID; +import java.util.function.Predicate; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnitRunner; + +// TODO fix unnecessary stubbing of config option in parent class (and stop using "silent" runner) +@RunWith(MockitoJUnitRunner.Silent.class) +public class BasicLoadBalancingPolicyDistanceTest extends LoadBalancingPolicyTestBase { + + @Mock private Predicate filter; + + private ImmutableMap nodes; + + @Before + @Override + public void setup() { + super.setup(); + when(filter.test(node1)).thenReturn(true); + when(filter.test(node2)).thenReturn(true); + when(filter.test(node3)).thenReturn(true); + when(filter.test(node4)).thenReturn(true); + when(context.getNodeFilter(DriverExecutionProfile.DEFAULT_NAME)).thenReturn(filter); + when(metadataManager.getContactPoints()).thenReturn(ImmutableSet.of(node1, node2, node3)); + nodes = + ImmutableMap.of( + UUID.randomUUID(), node1, UUID.randomUUID(), node2, UUID.randomUUID(), node3); + } + + @Test + public void should_report_IGNORED_when_excluded_by_filter() { + // Given + given(filter.test(node1)).willReturn(false); + BasicLoadBalancingPolicy policy = createPolicy(); + // When + policy.init(nodes, distanceReporter); + // Then + verify(distanceReporter).setDistance(node1, NodeDistance.IGNORED); + assertThat(policy.getLiveNodes().dc("dc1")).containsExactly(node2, node3); + } + + @Test + public void should_report_LOCAL_when_dc_agnostic() { + // Given + given(defaultProfile.isDefined(DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER)) + .willReturn(false); + given(node1.getDatacenter()).willReturn(null); + given(node2.getDatacenter()).willReturn("dc1"); + given(node3.getDatacenter()).willReturn("dc2"); + BasicLoadBalancingPolicy policy = createPolicy(); + // When + policy.init(nodes, distanceReporter); + // Then + verify(distanceReporter).setDistance(node1, NodeDistance.LOCAL); + verify(distanceReporter).setDistance(node2, NodeDistance.LOCAL); + verify(distanceReporter).setDistance(node3, NodeDistance.LOCAL); + assertThat(policy.getLiveNodes().dc(null)).containsExactly(node1, node2, node3); + } + + @Test + public void should_report_LOCAL_when_node_in_local_dc() { + // Given + BasicLoadBalancingPolicy policy = createPolicy(); + // When + policy.init(nodes, distanceReporter); + // Then + verify(distanceReporter).setDistance(node1, NodeDistance.LOCAL); + verify(distanceReporter).setDistance(node2, NodeDistance.LOCAL); + verify(distanceReporter).setDistance(node3, NodeDistance.LOCAL); + assertThat(policy.getLiveNodes().dc("dc1")).containsExactly(node1, node2, node3); + } + + @Test + public void should_report_IGNORED_when_node_not_in_local_dc() { + // Given + given(node1.getDatacenter()).willReturn(null); + given(node2.getDatacenter()).willReturn("dc2"); + given(node3.getDatacenter()).willReturn("dc3"); + BasicLoadBalancingPolicy policy = createPolicy(); + // When + policy.init(nodes, distanceReporter); + // Then + // Note: driver 3 would have reported LOCAL for node1 since its datacenter is null + verify(distanceReporter).setDistance(node1, NodeDistance.IGNORED); + verify(distanceReporter).setDistance(node2, NodeDistance.IGNORED); + verify(distanceReporter).setDistance(node3, NodeDistance.IGNORED); + assertThat(policy.getLiveNodes().dc(null)).isEmpty(); + assertThat(policy.getLiveNodes().dc("dc1")).isEmpty(); + assertThat(policy.getLiveNodes().dc("dc2")).isEmpty(); + assertThat(policy.getLiveNodes().dc("dc3")).isEmpty(); + } + + @Test + public void should_report_REMOTE_when_node_not_in_local_dc_and_dc_failover_enabled() { + // Given + given(node1.getDatacenter()).willReturn("dc2"); + given(node2.getDatacenter()).willReturn("dc3"); + given(node3.getDatacenter()).willReturn("dc4"); + given( + defaultProfile.getInt( + DefaultDriverOption.LOAD_BALANCING_DC_FAILOVER_MAX_NODES_PER_REMOTE_DC)) + .willReturn(1); + BasicLoadBalancingPolicy policy = createPolicy(); + // When + policy.init(nodes, distanceReporter); + // Then + verify(distanceReporter).setDistance(node1, NodeDistance.REMOTE); + verify(distanceReporter).setDistance(node2, NodeDistance.REMOTE); + verify(distanceReporter).setDistance(node3, NodeDistance.REMOTE); + assertThat(policy.getLiveNodes().dc("dc1")).isEmpty(); + assertThat(policy.getLiveNodes().dc("dc2")).containsExactly(node1); + assertThat(policy.getLiveNodes().dc("dc3")).containsExactly(node2); + assertThat(policy.getLiveNodes().dc("dc4")).containsExactly(node3); + } + + @Test + public void should_report_IGNORED_when_node_not_in_local_dc_and_too_many_nodes_for_dc_failover() { + // Given + given(node1.getDatacenter()).willReturn("dc2"); + given(node2.getDatacenter()).willReturn("dc2"); + given(node3.getDatacenter()).willReturn("dc2"); + given( + defaultProfile.getInt( + DefaultDriverOption.LOAD_BALANCING_DC_FAILOVER_MAX_NODES_PER_REMOTE_DC)) + .willReturn(2); + BasicLoadBalancingPolicy policy = createPolicy(); + // When + policy.init(nodes, distanceReporter); + // Then + verify(distanceReporter).setDistance(node1, NodeDistance.REMOTE); + verify(distanceReporter).setDistance(node2, NodeDistance.REMOTE); + verify(distanceReporter).setDistance(node3, NodeDistance.IGNORED); + assertThat(policy.getLiveNodes().dc("dc1")).isEmpty(); + assertThat(policy.getLiveNodes().dc("dc2")).containsExactly(node1, node2); + } + + @Test + public void should_report_REMOTE_when_remote_node_up_and_dc_failover() { + // Given + given(node1.getDatacenter()).willReturn("dc2"); + given(node2.getDatacenter()).willReturn("dc2"); + given(node3.getDatacenter()).willReturn("dc2"); + given(node4.getDatacenter()).willReturn("dc2"); + given( + defaultProfile.getInt( + DefaultDriverOption.LOAD_BALANCING_DC_FAILOVER_MAX_NODES_PER_REMOTE_DC)) + .willReturn(4); + BasicLoadBalancingPolicy policy = createPolicy(); + // When + policy.init(nodes, distanceReporter); + policy.onUp(node4); + // Then + verify(distanceReporter).setDistance(node1, NodeDistance.REMOTE); + verify(distanceReporter).setDistance(node2, NodeDistance.REMOTE); + verify(distanceReporter).setDistance(node3, NodeDistance.REMOTE); + verify(distanceReporter).setDistance(node4, NodeDistance.REMOTE); + assertThat(policy.getLiveNodes().dc("dc1")).isEmpty(); + assertThat(policy.getLiveNodes().dc("dc2")).containsExactly(node1, node2, node3, node4); + } + + @Test + public void should_report_IGNORED_when_remote_node_up_and_too_many_nodes_for_dc_failover() { + // Given + given(node1.getDatacenter()).willReturn("dc2"); + given(node2.getDatacenter()).willReturn("dc2"); + given(node3.getDatacenter()).willReturn("dc2"); + given(node4.getDatacenter()).willReturn("dc2"); + given( + defaultProfile.getInt( + DefaultDriverOption.LOAD_BALANCING_DC_FAILOVER_MAX_NODES_PER_REMOTE_DC)) + .willReturn(3); + BasicLoadBalancingPolicy policy = createPolicy(); + // When + policy.init(nodes, distanceReporter); + policy.onUp(node4); + // Then + verify(distanceReporter).setDistance(node1, NodeDistance.REMOTE); + verify(distanceReporter).setDistance(node2, NodeDistance.REMOTE); + verify(distanceReporter).setDistance(node3, NodeDistance.REMOTE); + verify(distanceReporter).setDistance(node4, NodeDistance.IGNORED); + assertThat(policy.getLiveNodes().dc("dc1")).isEmpty(); + assertThat(policy.getLiveNodes().dc("dc2")).containsExactly(node1, node2, node3); + } + + @NonNull + protected BasicLoadBalancingPolicy createPolicy() { + return new BasicLoadBalancingPolicy(context, DriverExecutionProfile.DEFAULT_NAME); + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyEventsTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyEventsTest.java index f8dee137d64..6640b2b946c 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyEventsTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyEventsTest.java @@ -39,7 +39,7 @@ // TODO fix unnecessary stubbing of config option in parent class (and stop using "silent" runner) @RunWith(MockitoJUnitRunner.Silent.class) -public class BasicLoadBalancingPolicyEventsTest extends DefaultLoadBalancingPolicyTestBase { +public class BasicLoadBalancingPolicyEventsTest extends LoadBalancingPolicyTestBase { @Mock private Predicate filter; @@ -62,7 +62,7 @@ public void should_remove_down_node_from_live_set() { policy.onDown(node2); // Then - assertThat(policy.getLiveNodes()).containsExactlyInAnyOrder(node1); + assertThat(policy.getLiveNodes().dc("dc1")).containsExactly(node1); verify(distanceReporter, never()).setDistance(eq(node2), any(NodeDistance.class)); // should have been called only once, during initialization, but not during onDown verify(filter).test(node2); @@ -74,7 +74,7 @@ public void should_remove_removed_node_from_live_set() { policy.onRemove(node2); // Then - assertThat(policy.getLiveNodes()).containsExactlyInAnyOrder(node1); + assertThat(policy.getLiveNodes().dc("dc1")).containsExactly(node1); verify(distanceReporter, never()).setDistance(eq(node2), any(NodeDistance.class)); // should have been called only once, during initialization, but not during onRemove verify(filter).test(node2); @@ -89,7 +89,7 @@ public void should_set_added_node_to_local() { verify(distanceReporter).setDistance(node3, NodeDistance.LOCAL); verify(filter).test(node3); // Not added to the live set yet, we're waiting for the pool to open - assertThat(policy.getLiveNodes()).containsExactlyInAnyOrder(node1, node2); + assertThat(policy.getLiveNodes().dc("dc1")).containsExactly(node1, node2); } @Test @@ -102,7 +102,7 @@ public void should_ignore_added_node_when_filtered() { // Then verify(distanceReporter).setDistance(node3, NodeDistance.IGNORED); - assertThat(policy.getLiveNodes()).containsExactlyInAnyOrder(node1, node2); + assertThat(policy.getLiveNodes().dc("dc1")).containsExactly(node1, node2); } @Test @@ -115,7 +115,8 @@ public void should_ignore_added_node_when_remote_dc() { // Then verify(distanceReporter).setDistance(node3, NodeDistance.IGNORED); - assertThat(policy.getLiveNodes()).containsExactlyInAnyOrder(node1, node2); + assertThat(policy.getLiveNodes().dc("dc1")).containsExactly(node1, node2); + assertThat(policy.getLiveNodes().dc("dc2")).isEmpty(); } @Test @@ -126,7 +127,7 @@ public void should_add_up_node_to_live_set() { // Then verify(distanceReporter).setDistance(node3, NodeDistance.LOCAL); verify(filter).test(node3); - assertThat(policy.getLiveNodes()).containsExactlyInAnyOrder(node1, node2, node3); + assertThat(policy.getLiveNodes().dc("dc1")).containsExactly(node1, node2, node3); } @Test @@ -140,7 +141,7 @@ public void should_ignore_up_node_when_filtered() { // Then verify(distanceReporter).setDistance(node3, NodeDistance.IGNORED); verify(filter).test(node3); - assertThat(policy.getLiveNodes()).containsExactlyInAnyOrder(node1, node2); + assertThat(policy.getLiveNodes().dc("dc1")).containsExactly(node1, node2); } @Test @@ -153,7 +154,8 @@ public void should_ignore_up_node_when_remote_dc() { // Then verify(distanceReporter).setDistance(node3, NodeDistance.IGNORED); - assertThat(policy.getLiveNodes()).containsExactlyInAnyOrder(node1, node2); + assertThat(policy.getLiveNodes().dc("dc1")).containsExactly(node1, node2); + assertThat(policy.getLiveNodes().dc("dc2")).isEmpty(); } @NonNull @@ -162,7 +164,7 @@ protected BasicLoadBalancingPolicy createAndInitPolicy() { new BasicLoadBalancingPolicy(context, DriverExecutionProfile.DEFAULT_NAME); policy.init( ImmutableMap.of(UUID.randomUUID(), node1, UUID.randomUUID(), node2), distanceReporter); - assertThat(policy.liveNodes).containsExactlyInAnyOrder(node1, node2); + assertThat(policy.getLiveNodes().dc("dc1")).containsExactly(node1, node2); return policy; } } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyInitTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyInitTest.java index b4bca1638a6..56caff5c0aa 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyInitTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyInitTest.java @@ -19,7 +19,6 @@ import static org.assertj.core.api.Assertions.filter; import static org.mockito.Mockito.atLeast; import static org.mockito.Mockito.never; -import static org.mockito.Mockito.reset; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @@ -39,14 +38,7 @@ // TODO fix unnecessary stubbing of config option in parent class (and stop using "silent" runner) @RunWith(MockitoJUnitRunner.Silent.class) -public class BasicLoadBalancingPolicyInitTest extends DefaultLoadBalancingPolicyTestBase { - - @Override - public void setup() { - super.setup(); - reset(defaultProfile); - when(defaultProfile.getName()).thenReturn(DriverExecutionProfile.DEFAULT_NAME); - } +public class BasicLoadBalancingPolicyInitTest extends LoadBalancingPolicyTestBase { @Test public void should_use_local_dc_if_provided_via_config() { @@ -63,7 +55,7 @@ public void should_use_local_dc_if_provided_via_config() { policy.init(ImmutableMap.of(UUID.randomUUID(), node1), distanceReporter); // Then - assertThat(policy.getLocalDatacenter()).contains("dc1"); + assertThat(policy.getLocalDatacenter()).isEqualTo("dc1"); } @Test @@ -79,7 +71,7 @@ public void should_use_local_dc_if_provided_via_context() { policy.init(ImmutableMap.of(UUID.randomUUID(), node1), distanceReporter); // Then - assertThat(policy.getLocalDatacenter()).contains("dc1"); + assertThat(policy.getLocalDatacenter()).isEqualTo("dc1"); verify(defaultProfile, never()) .getString(DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER, null); } @@ -89,15 +81,22 @@ public void should_not_infer_local_dc_if_not_provided() { // Given when(defaultProfile.isDefined(DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER)) .thenReturn(false); + when(node1.getDatacenter()).thenReturn("dc1"); + when(node2.getDatacenter()).thenReturn("dc2"); + when(node3.getDatacenter()).thenReturn("dc3"); BasicLoadBalancingPolicy policy = new BasicLoadBalancingPolicy(context, DriverExecutionProfile.DEFAULT_NAME) {}; // When policy.init( - ImmutableMap.of(UUID.randomUUID(), node1, UUID.randomUUID(), node2), distanceReporter); + ImmutableMap.of( + UUID.randomUUID(), node1, UUID.randomUUID(), node2, UUID.randomUUID(), node3), + distanceReporter); // Then - assertThat(policy.getLocalDatacenter()).isEmpty(); + assertThat(policy.getLocalDatacenter()).isNull(); + // should not warn about contact points not being in the same DC + verify(appender, never()).doAppend(loggingEventCaptor.capture()); } @Test @@ -148,13 +147,12 @@ public void should_include_nodes_from_local_dc_if_local_dc_set() { verify(distanceReporter).setDistance(node2, NodeDistance.LOCAL); verify(distanceReporter).setDistance(node3, NodeDistance.LOCAL); // But only include UP or UNKNOWN nodes in the live set - assertThat(policy.getLiveNodes()).containsExactlyInAnyOrder(node1, node3); + assertThat(policy.getLiveNodes().dc("dc1")).containsExactly(node1, node3); } @Test public void should_ignore_nodes_from_remote_dcs_if_local_dc_set() { // Given - when(context.getLocalDatacenter(DriverExecutionProfile.DEFAULT_NAME)).thenReturn("dc1"); when(node2.getDatacenter()).thenReturn("dc2"); when(node3.getDatacenter()).thenReturn("dc3"); when(metadataManager.getContactPoints()).thenReturn(ImmutableSet.of(node1, node2)); @@ -170,12 +168,17 @@ public void should_ignore_nodes_from_remote_dcs_if_local_dc_set() { verify(distanceReporter).setDistance(node1, NodeDistance.LOCAL); verify(distanceReporter).setDistance(node2, NodeDistance.IGNORED); verify(distanceReporter).setDistance(node3, NodeDistance.IGNORED); - assertThat(policy.getLiveNodes()).containsExactlyInAnyOrder(node1); + assertThat(policy.getLiveNodes().dc("dc1")).containsExactly(node1); } @Test public void should_not_ignore_nodes_from_remote_dcs_if_local_dc_not_set() { // Given + when(defaultProfile.isDefined(DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER)) + .thenReturn(false); + when(node2.getDatacenter()).thenReturn("dc2"); + when(node3.getDatacenter()).thenReturn("dc3"); + when(metadataManager.getContactPoints()).thenReturn(ImmutableSet.of(node1, node2)); BasicLoadBalancingPolicy policy = createPolicy(); // When @@ -188,7 +191,7 @@ public void should_not_ignore_nodes_from_remote_dcs_if_local_dc_not_set() { verify(distanceReporter).setDistance(node1, NodeDistance.LOCAL); verify(distanceReporter).setDistance(node2, NodeDistance.LOCAL); verify(distanceReporter).setDistance(node3, NodeDistance.LOCAL); - assertThat(policy.getLiveNodes()).containsExactlyInAnyOrder(node1, node2, node3); + assertThat(policy.getLiveNodes().dc("dc1")).containsExactly(node1, node2, node3); } @Test @@ -209,7 +212,7 @@ public void should_ignore_nodes_excluded_by_filter() { verify(distanceReporter).setDistance(node1, NodeDistance.LOCAL); verify(distanceReporter).setDistance(node2, NodeDistance.IGNORED); verify(distanceReporter).setDistance(node3, NodeDistance.IGNORED); - assertThat(policy.getLiveNodes()).containsExactlyInAnyOrder(node1); + assertThat(policy.getLiveNodes().dc("dc1")).containsExactly(node1); } @NonNull diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyQueryPlanTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyQueryPlanTest.java index f5c68e79c2b..518203e57c7 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyQueryPlanTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyQueryPlanTest.java @@ -31,7 +31,6 @@ import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; -import com.datastax.oss.driver.api.core.context.DriverContext; import com.datastax.oss.driver.api.core.metadata.Metadata; import com.datastax.oss.driver.api.core.metadata.TokenMap; import com.datastax.oss.driver.api.core.metadata.token.Token; @@ -52,7 +51,7 @@ // TODO fix unnecessary stubbing of config option in parent class (and stop using "silent" runner) @RunWith(MockitoJUnitRunner.Silent.class) -public class BasicLoadBalancingPolicyQueryPlanTest extends DefaultLoadBalancingPolicyTestBase { +public class BasicLoadBalancingPolicyQueryPlanTest extends LoadBalancingPolicyTestBase { protected static final CqlIdentifier KEYSPACE = CqlIdentifier.fromInternal("ks"); protected static final ByteBuffer ROUTING_KEY = Bytes.fromHexString("0xdeadbeef"); @@ -74,10 +73,6 @@ public void setup() { when(metadata.getTokenMap()).thenAnswer(invocation -> Optional.of(this.tokenMap)); policy = createAndInitPolicy(); - - // Note: this test relies on the fact that the policy uses a CopyOnWriteArraySet which preserves - // insertion order. - assertThat(policy.liveNodes).containsExactly(node1, node2, node3, node4, node5); } @Test @@ -186,7 +181,7 @@ public void should_use_round_robin_when_token_map_absent() { } @Test - public void should_round_robin_and_log_error_when_request_throws() { + public void should_use_round_robin_and_log_error_when_request_throws() { // Given given(request.getKeyspace()).willThrow(new NullPointerException()); // When @@ -197,7 +192,7 @@ public void should_round_robin_and_log_error_when_request_throws() { .contains("Unexpected error while trying to compute query plan"); } - private void assertRoundRobinQueryPlans() { + protected void assertRoundRobinQueryPlans() { for (int i = 0; i < 3; i++) { assertThat(policy.newQueryPlan(request, session)) .containsExactly(node1, node2, node3, node4, node5); @@ -253,8 +248,14 @@ public void should_prioritize_and_shuffle_replicas() { protected BasicLoadBalancingPolicy createAndInitPolicy() { // Use a subclass to disable shuffling, we just spy to make sure that the shuffling method was // called (makes tests easier) - NonShufflingBasicLoadBalancingPolicy policy = - spy(new NonShufflingBasicLoadBalancingPolicy(context, DriverExecutionProfile.DEFAULT_NAME)); + BasicLoadBalancingPolicy policy = + spy( + new BasicLoadBalancingPolicy(context, DriverExecutionProfile.DEFAULT_NAME) { + @Override + protected void shuffleHead(Object[] currentNodes, int headLength) { + // nothing (keep in same order) + } + }); policy.init( ImmutableMap.of( UUID.randomUUID(), node1, @@ -263,17 +264,7 @@ protected BasicLoadBalancingPolicy createAndInitPolicy() { UUID.randomUUID(), node4, UUID.randomUUID(), node5), distanceReporter); + assertThat(policy.getLiveNodes().dc("dc1")).containsExactly(node1, node2, node3, node4, node5); return policy; } - - static class NonShufflingBasicLoadBalancingPolicy extends BasicLoadBalancingPolicy { - NonShufflingBasicLoadBalancingPolicy(DriverContext context, String profileName) { - super(context, profileName); - } - - @Override - protected void shuffleHead(Object[] currentNodes, int replicaCount) { - // nothing (keep in same order) - } - } } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyDcFailoverTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyDcFailoverTest.java new file mode 100644 index 00000000000..38faf7d1beb --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyDcFailoverTest.java @@ -0,0 +1,87 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.loadbalancing; + +import static com.datastax.oss.driver.api.core.config.DriverExecutionProfile.DEFAULT_NAME; +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.BDDMockito.given; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.when; + +import com.datastax.dse.driver.internal.core.tracker.MultiplexingRequestTracker; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import java.util.Map; +import java.util.UUID; +import org.junit.Before; +import org.junit.runner.RunWith; +import org.mockito.junit.MockitoJUnitRunner; + +// TODO fix unnecessary stubbing of config option in parent class (and stop using "silent" runner) +@RunWith(MockitoJUnitRunner.Silent.class) +public class DcInferringLoadBalancingPolicyDcFailoverTest + extends BasicLoadBalancingPolicyDcFailoverTest { + + @Override + @Before + public void setup() { + given(context.getRequestTracker()).willReturn(new MultiplexingRequestTracker()); + super.setup(); + } + + @Override + protected DcInferringLoadBalancingPolicy createAndInitPolicy() { + when(node4.getDatacenter()).thenReturn("dc2"); + when(node5.getDatacenter()).thenReturn("dc2"); + when(node6.getDatacenter()).thenReturn("dc2"); + when(node7.getDatacenter()).thenReturn("dc3"); + when(node8.getDatacenter()).thenReturn("dc3"); + when(node9.getDatacenter()).thenReturn("dc3"); + // Accept 2 nodes per remote DC + when(defaultProfile.getInt( + DefaultDriverOption.LOAD_BALANCING_DC_FAILOVER_MAX_NODES_PER_REMOTE_DC)) + .thenReturn(2); + when(defaultProfile.getBoolean( + DefaultDriverOption.LOAD_BALANCING_DC_FAILOVER_ALLOW_FOR_LOCAL_CONSISTENCY_LEVELS)) + .thenReturn(false); + // Use a subclass to disable shuffling, we just spy to make sure that the shuffling method was + // called (makes tests easier) + DcInferringLoadBalancingPolicy policy = + spy( + new DcInferringLoadBalancingPolicy(context, DEFAULT_NAME) { + @Override + protected void shuffleHead(Object[] array, int n) {} + }); + Map nodes = + ImmutableMap.builder() + .put(UUID.randomUUID(), node1) + .put(UUID.randomUUID(), node2) + .put(UUID.randomUUID(), node3) + .put(UUID.randomUUID(), node4) + .put(UUID.randomUUID(), node5) + .put(UUID.randomUUID(), node6) + .put(UUID.randomUUID(), node7) + .put(UUID.randomUUID(), node8) + .put(UUID.randomUUID(), node9) + .build(); + policy.init(nodes, distanceReporter); + assertThat(policy.getLiveNodes().dc("dc1")).containsExactly(node1, node2, node3); + assertThat(policy.getLiveNodes().dc("dc2")).containsExactly(node4, node5); // only 2 allowed + assertThat(policy.getLiveNodes().dc("dc3")).containsExactly(node7, node8); // only 2 allowed + return policy; + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyDistanceTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyDistanceTest.java new file mode 100644 index 00000000000..6d644edcf2a --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyDistanceTest.java @@ -0,0 +1,62 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.loadbalancing; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.catchThrowable; +import static org.mockito.BDDMockito.given; + +import com.datastax.dse.driver.internal.core.tracker.MultiplexingRequestTracker; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import edu.umd.cs.findbugs.annotations.NonNull; +import org.junit.Before; +import org.junit.runner.RunWith; +import org.mockito.junit.MockitoJUnitRunner; + +// TODO fix unnecessary stubbing of config option in parent class (and stop using "silent" runner) +@RunWith(MockitoJUnitRunner.Silent.class) +public class DcInferringLoadBalancingPolicyDistanceTest + extends BasicLoadBalancingPolicyDistanceTest { + + @Override + @Before + public void setup() { + given(context.getRequestTracker()).willReturn(new MultiplexingRequestTracker()); + super.setup(); + } + + @Override + public void should_report_LOCAL_when_dc_agnostic() { + // This policy cannot operate when contact points are from different DCs + Throwable error = catchThrowable(super::should_report_LOCAL_when_dc_agnostic); + assertThat(error) + .isInstanceOfSatisfying( + IllegalStateException.class, + ise -> + assertThat(ise) + .hasMessageContaining( + "No local DC was provided, but the contact points are from different DCs") + .hasMessageContaining("node1=null") + .hasMessageContaining("node2=dc1") + .hasMessageContaining("node3=dc2")); + } + + @NonNull + @Override + protected BasicLoadBalancingPolicy createPolicy() { + return new DcInferringLoadBalancingPolicy(context, DriverExecutionProfile.DEFAULT_NAME); + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyEventsTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyEventsTest.java index 7535e8c8fce..aa01ff08acf 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyEventsTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyEventsTest.java @@ -42,7 +42,7 @@ protected BasicLoadBalancingPolicy createAndInitPolicy() { new DcInferringLoadBalancingPolicy(context, DEFAULT_NAME); policy.init( ImmutableMap.of(UUID.randomUUID(), node1, UUID.randomUUID(), node2), distanceReporter); - assertThat(policy.getLiveNodes()).containsOnly(node1, node2); + assertThat(policy.getLiveNodes().dc("dc1")).containsOnly(node1, node2); reset(distanceReporter); return policy; } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyInitTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyInitTest.java index c0355ea5198..b57f0050985 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyInitTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyInitTest.java @@ -38,7 +38,7 @@ import org.junit.Before; import org.junit.Test; -public class DcInferringLoadBalancingPolicyInitTest extends DefaultLoadBalancingPolicyTestBase { +public class DcInferringLoadBalancingPolicyInitTest extends LoadBalancingPolicyTestBase { @Override @Before @@ -58,7 +58,7 @@ public void should_use_local_dc_if_provided_via_config() { policy.init(ImmutableMap.of(UUID.randomUUID(), node1), distanceReporter); // Then - assertThat(policy.getLocalDatacenter()).contains("dc1"); + assertThat(policy.getLocalDatacenter()).isEqualTo("dc1"); } @Test @@ -74,7 +74,7 @@ public void should_use_local_dc_if_provided_via_context() { policy.init(ImmutableMap.of(UUID.randomUUID(), node1), distanceReporter); // Then - assertThat(policy.getLocalDatacenter()).contains("dc1"); + assertThat(policy.getLocalDatacenter()).isEqualTo("dc1"); verify(defaultProfile, never()) .getString(DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER, null); } @@ -91,7 +91,7 @@ public void should_infer_local_dc_from_contact_points() { policy.init(ImmutableMap.of(UUID.randomUUID(), node1), distanceReporter); // Then - assertThat(policy.getLocalDatacenter()).contains("dc1"); + assertThat(policy.getLocalDatacenter()).isEqualTo("dc1"); } @Test @@ -190,7 +190,7 @@ public void should_include_nodes_from_local_dc() { verify(distanceReporter).setDistance(node2, NodeDistance.LOCAL); verify(distanceReporter).setDistance(node3, NodeDistance.LOCAL); // But only include UP or UNKNOWN nodes in the live set - assertThat(policy.getLiveNodes()).containsExactlyInAnyOrder(node1, node3); + assertThat(policy.getLiveNodes().dc("dc1")).containsExactly(node1, node3); } @Test @@ -211,7 +211,7 @@ public void should_ignore_nodes_from_remote_dcs() { verify(distanceReporter).setDistance(node1, NodeDistance.LOCAL); verify(distanceReporter).setDistance(node2, NodeDistance.IGNORED); verify(distanceReporter).setDistance(node3, NodeDistance.IGNORED); - assertThat(policy.getLiveNodes()).containsExactlyInAnyOrder(node1); + assertThat(policy.getLiveNodes().dc("dc1")).containsExactly(node1); } @Test @@ -233,7 +233,7 @@ public void should_ignore_nodes_excluded_by_filter() { verify(distanceReporter).setDistance(node1, NodeDistance.LOCAL); verify(distanceReporter).setDistance(node2, NodeDistance.IGNORED); verify(distanceReporter).setDistance(node3, NodeDistance.IGNORED); - assertThat(policy.getLiveNodes()).containsExactlyInAnyOrder(node1); + assertThat(policy.getLiveNodes().dc("dc1")).containsExactly(node1); } @NonNull diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyQueryPlanTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyQueryPlanTest.java index 1c2b8f09e67..f60ed95697e 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyQueryPlanTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyQueryPlanTest.java @@ -25,7 +25,7 @@ public class DcInferringLoadBalancingPolicyQueryPlanTest extends DefaultLoadBalancingPolicyQueryPlanTest { @Override - protected DefaultLoadBalancingPolicy createAndInitPolicy() { + protected DcInferringLoadBalancingPolicy createAndInitPolicy() { DcInferringLoadBalancingPolicy policy = spy( new DcInferringLoadBalancingPolicy(context, DEFAULT_NAME) { diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyDcFailoverTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyDcFailoverTest.java new file mode 100644 index 00000000000..34302a196e7 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyDcFailoverTest.java @@ -0,0 +1,87 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.loadbalancing; + +import static com.datastax.oss.driver.api.core.config.DriverExecutionProfile.DEFAULT_NAME; +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.BDDMockito.given; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.when; + +import com.datastax.dse.driver.internal.core.tracker.MultiplexingRequestTracker; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import java.util.Map; +import java.util.UUID; +import org.junit.Before; +import org.junit.runner.RunWith; +import org.mockito.junit.MockitoJUnitRunner; + +// TODO fix unnecessary stubbing of config option in parent class (and stop using "silent" runner) +@RunWith(MockitoJUnitRunner.Silent.class) +public class DefaultLoadBalancingPolicyDcFailoverTest + extends BasicLoadBalancingPolicyDcFailoverTest { + + @Override + @Before + public void setup() { + given(context.getRequestTracker()).willReturn(new MultiplexingRequestTracker()); + super.setup(); + } + + @Override + protected DefaultLoadBalancingPolicy createAndInitPolicy() { + when(node4.getDatacenter()).thenReturn("dc2"); + when(node5.getDatacenter()).thenReturn("dc2"); + when(node6.getDatacenter()).thenReturn("dc2"); + when(node7.getDatacenter()).thenReturn("dc3"); + when(node8.getDatacenter()).thenReturn("dc3"); + when(node9.getDatacenter()).thenReturn("dc3"); + // Accept 2 nodes per remote DC + when(defaultProfile.getInt( + DefaultDriverOption.LOAD_BALANCING_DC_FAILOVER_MAX_NODES_PER_REMOTE_DC)) + .thenReturn(2); + when(defaultProfile.getBoolean( + DefaultDriverOption.LOAD_BALANCING_DC_FAILOVER_ALLOW_FOR_LOCAL_CONSISTENCY_LEVELS)) + .thenReturn(false); + // Use a subclass to disable shuffling, we just spy to make sure that the shuffling method was + // called (makes tests easier) + DefaultLoadBalancingPolicy policy = + spy( + new DefaultLoadBalancingPolicy(context, DEFAULT_NAME) { + @Override + protected void shuffleHead(Object[] array, int n) {} + }); + Map nodes = + ImmutableMap.builder() + .put(UUID.randomUUID(), node1) + .put(UUID.randomUUID(), node2) + .put(UUID.randomUUID(), node3) + .put(UUID.randomUUID(), node4) + .put(UUID.randomUUID(), node5) + .put(UUID.randomUUID(), node6) + .put(UUID.randomUUID(), node7) + .put(UUID.randomUUID(), node8) + .put(UUID.randomUUID(), node9) + .build(); + policy.init(nodes, distanceReporter); + assertThat(policy.getLiveNodes().dc("dc1")).containsExactly(node1, node2, node3); + assertThat(policy.getLiveNodes().dc("dc2")).containsExactly(node4, node5); // only 2 allowed + assertThat(policy.getLiveNodes().dc("dc3")).containsExactly(node7, node8); // only 2 allowed + return policy; + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyDistanceTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyDistanceTest.java new file mode 100644 index 00000000000..8db9d0d1019 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyDistanceTest.java @@ -0,0 +1,61 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.loadbalancing; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.catchThrowable; +import static org.mockito.BDDMockito.given; + +import com.datastax.dse.driver.internal.core.tracker.MultiplexingRequestTracker; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import edu.umd.cs.findbugs.annotations.NonNull; +import org.junit.Before; +import org.junit.runner.RunWith; +import org.mockito.junit.MockitoJUnitRunner; + +// TODO fix unnecessary stubbing of config option in parent class (and stop using "silent" runner) +@RunWith(MockitoJUnitRunner.Silent.class) +public class DefaultLoadBalancingPolicyDistanceTest extends BasicLoadBalancingPolicyDistanceTest { + + @Override + @Before + public void setup() { + given(context.getRequestTracker()).willReturn(new MultiplexingRequestTracker()); + super.setup(); + } + + @Override + public void should_report_LOCAL_when_dc_agnostic() { + // This policy cannot operate in dc-agnostic mode + Throwable error = catchThrowable(super::should_report_LOCAL_when_dc_agnostic); + assertThat(error) + .isInstanceOfSatisfying( + IllegalStateException.class, + ise -> + assertThat(ise) + .hasMessageContaining("the local DC must be explicitly set") + .hasMessageContaining("node1=null") + .hasMessageContaining("node2=dc1") + .hasMessageContaining("node3=dc2") + .hasMessageContaining("Current DCs in this cluster are: dc1, dc2")); + } + + @NonNull + @Override + protected BasicLoadBalancingPolicy createPolicy() { + return new DefaultLoadBalancingPolicy(context, DriverExecutionProfile.DEFAULT_NAME); + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyEventsTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyEventsTest.java index 5c0f1b8c581..22f80b1f36d 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyEventsTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyEventsTest.java @@ -41,7 +41,7 @@ protected DefaultLoadBalancingPolicy createAndInitPolicy() { DefaultLoadBalancingPolicy policy = new DefaultLoadBalancingPolicy(context, DEFAULT_NAME); policy.init( ImmutableMap.of(UUID.randomUUID(), node1, UUID.randomUUID(), node2), distanceReporter); - assertThat(policy.getLiveNodes()).containsOnly(node1, node2); + assertThat(policy.getLiveNodes().dc("dc1")).containsOnly(node1, node2); reset(distanceReporter); return policy; } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyInitTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyInitTest.java index 2c5cd8eb0fe..c6202c3432b 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyInitTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyInitTest.java @@ -38,7 +38,7 @@ import org.junit.Before; import org.junit.Test; -public class DefaultLoadBalancingPolicyInitTest extends DefaultLoadBalancingPolicyTestBase { +public class DefaultLoadBalancingPolicyInitTest extends LoadBalancingPolicyTestBase { @Override @Before @@ -58,7 +58,7 @@ public void should_use_local_dc_if_provided_via_config() { policy.init(ImmutableMap.of(UUID.randomUUID(), node1), distanceReporter); // Then - assertThat(policy.getLocalDatacenter()).contains("dc1"); + assertThat(policy.getLocalDatacenter()).isEqualTo("dc1"); } @Test @@ -74,7 +74,7 @@ public void should_use_local_dc_if_provided_via_context() { policy.init(ImmutableMap.of(UUID.randomUUID(), node1), distanceReporter); // Then - assertThat(policy.getLocalDatacenter()).contains("dc1"); + assertThat(policy.getLocalDatacenter()).isEqualTo("dc1"); verify(defaultProfile, never()) .getString(DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER, null); } @@ -92,7 +92,7 @@ public void should_infer_local_dc_if_no_explicit_contact_points() { policy.init(ImmutableMap.of(UUID.randomUUID(), node1), distanceReporter); // Then - assertThat(policy.getLocalDatacenter()).contains("dc1"); + assertThat(policy.getLocalDatacenter()).isEqualTo("dc1"); } @Test @@ -158,7 +158,7 @@ public void should_include_nodes_from_local_dc() { verify(distanceReporter).setDistance(node2, NodeDistance.LOCAL); verify(distanceReporter).setDistance(node3, NodeDistance.LOCAL); // But only include UP or UNKNOWN nodes in the live set - assertThat(policy.getLiveNodes()).containsExactlyInAnyOrder(node1, node3); + assertThat(policy.getLiveNodes().dc("dc1")).containsExactly(node1, node3); } @Test @@ -179,7 +179,9 @@ public void should_ignore_nodes_from_remote_dcs() { verify(distanceReporter).setDistance(node1, NodeDistance.LOCAL); verify(distanceReporter).setDistance(node2, NodeDistance.IGNORED); verify(distanceReporter).setDistance(node3, NodeDistance.IGNORED); - assertThat(policy.getLiveNodes()).containsExactlyInAnyOrder(node1); + assertThat(policy.getLiveNodes().dc("dc1")).containsExactly(node1); + assertThat(policy.getLiveNodes().dc("dc2")).isEmpty(); + assertThat(policy.getLiveNodes().dc("dc3")).isEmpty(); } @Test @@ -201,7 +203,7 @@ public void should_ignore_nodes_excluded_by_filter() { verify(distanceReporter).setDistance(node1, NodeDistance.LOCAL); verify(distanceReporter).setDistance(node2, NodeDistance.IGNORED); verify(distanceReporter).setDistance(node3, NodeDistance.IGNORED); - assertThat(policy.getLiveNodes()).containsExactlyInAnyOrder(node1); + assertThat(policy.getLiveNodes().dc("dc1")).containsExactly(node1); } @NonNull diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyQueryPlanTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyQueryPlanTest.java index b3e65fdd4f2..207a5b409b7 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyQueryPlanTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyQueryPlanTest.java @@ -78,9 +78,10 @@ public void setup() { given(metadata.getTokenMap()).willAnswer(invocation -> Optional.of(tokenMap)); super.setup(); dsePolicy = (DefaultLoadBalancingPolicy) policy; - // Note: tests in this class rely on the fact that the policy uses a CopyOnWriteArraySet which - // preserves insertion order, which is why we can use containsExactly() throughout this class. - assertThat(dsePolicy.getLiveNodes()).containsExactly(node1, node2, node3, node4, node5); + // Note: this assertion relies on the fact that policy.getLiveNodes() implementation preserves + // insertion order. + assertThat(dsePolicy.getLiveNodes().dc("dc1")) + .containsExactly(node1, node2, node3, node4, node5); } @Test diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyRequestTrackerTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyRequestTrackerTest.java index 76517fc2c0c..f87f5b38f43 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyRequestTrackerTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyRequestTrackerTest.java @@ -29,8 +29,7 @@ import org.junit.Test; import org.mockito.Mock; -public class DefaultLoadBalancingPolicyRequestTrackerTest - extends DefaultLoadBalancingPolicyTestBase { +public class DefaultLoadBalancingPolicyRequestTrackerTest extends LoadBalancingPolicyTestBase { @Mock Request request; @Mock DriverExecutionProfile profile; diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyTestBase.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/LoadBalancingPolicyTestBase.java similarity index 85% rename from core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyTestBase.java rename to core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/LoadBalancingPolicyTestBase.java index c98ee523d02..33eb0697321 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyTestBase.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/LoadBalancingPolicyTestBase.java @@ -26,6 +26,7 @@ import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.loadbalancing.LoadBalancingPolicy; import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.internal.core.DefaultConsistencyLevelRegistry; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.internal.core.metadata.DefaultNode; import com.datastax.oss.driver.internal.core.metadata.MetadataManager; @@ -40,7 +41,7 @@ import org.slf4j.LoggerFactory; @RunWith(MockitoJUnitRunner.class) -public abstract class DefaultLoadBalancingPolicyTestBase { +public abstract class LoadBalancingPolicyTestBase { @Mock protected DefaultNode node1; @Mock protected DefaultNode node2; @@ -71,6 +72,13 @@ public void setup() { .thenReturn("dc1"); when(defaultProfile.getBoolean(DefaultDriverOption.LOAD_BALANCING_POLICY_SLOW_AVOIDANCE, true)) .thenReturn(true); + when(defaultProfile.getInt( + DefaultDriverOption.LOAD_BALANCING_DC_FAILOVER_MAX_NODES_PER_REMOTE_DC)) + .thenReturn(0); + when(defaultProfile.getBoolean( + DefaultDriverOption.LOAD_BALANCING_DC_FAILOVER_ALLOW_FOR_LOCAL_CONSISTENCY_LEVELS)) + .thenReturn(false); + when(defaultProfile.getString(DefaultDriverOption.REQUEST_CONSISTENCY)).thenReturn("ONE"); when(context.getMetadataManager()).thenReturn(metadataManager); @@ -83,6 +91,7 @@ public void setup() { } when(context.getLocalDatacenter(anyString())).thenReturn(null); + when(context.getConsistencyLevelRegistry()).thenReturn(new DefaultConsistencyLevelRegistry()); } @After diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/nodeset/DcAgnosticNodeSetTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/nodeset/DcAgnosticNodeSetTest.java new file mode 100644 index 00000000000..80268c037b4 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/nodeset/DcAgnosticNodeSetTest.java @@ -0,0 +1,59 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.loadbalancing.nodeset; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.mock; + +import com.datastax.oss.driver.api.core.metadata.Node; +import org.junit.Test; + +public class DcAgnosticNodeSetTest { + + @Test + public void should_add_node() { + DcAgnosticNodeSet set = new DcAgnosticNodeSet(); + Node node = mock(Node.class); + assertThat(set.add(node)).isTrue(); + assertThat(set.add(node)).isFalse(); + } + + @Test + public void should_remove_node() { + DcAgnosticNodeSet set = new DcAgnosticNodeSet(); + Node node = mock(Node.class); + set.add(node); + assertThat(set.remove(node)).isTrue(); + assertThat(set.remove(node)).isFalse(); + } + + @Test + public void should_return_all_nodes() { + DcAgnosticNodeSet set = new DcAgnosticNodeSet(); + Node node1 = mock(Node.class); + set.add(node1); + Node node2 = mock(Node.class); + set.add(node2); + assertThat(set.dc(null)).contains(node1, node2); + assertThat(set.dc("irrelevant")).contains(node1, node2); + } + + @Test + public void should_return_empty_dcs() { + DcAgnosticNodeSet set = new DcAgnosticNodeSet(); + assertThat(set.dcs()).isEmpty(); + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/nodeset/MultiDcNodeSetTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/nodeset/MultiDcNodeSetTest.java new file mode 100644 index 00000000000..4a021a3e838 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/nodeset/MultiDcNodeSetTest.java @@ -0,0 +1,81 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.loadbalancing.nodeset; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import com.datastax.oss.driver.api.core.metadata.Node; +import org.junit.Test; + +public class MultiDcNodeSetTest { + + @Test + public void should_add_node() { + MultiDcNodeSet set = new MultiDcNodeSet(); + Node node1 = mockNode("dc1"); + assertThat(set.add(node1)).isTrue(); + assertThat(set.add(node1)).isFalse(); + Node node2 = mockNode("dc2"); + assertThat(set.add(node2)).isTrue(); + assertThat(set.add(node2)).isFalse(); + } + + @Test + public void should_remove_node() { + MultiDcNodeSet set = new MultiDcNodeSet(); + Node node1 = mockNode("dc1"); + set.add(node1); + assertThat(set.remove(node1)).isTrue(); + assertThat(set.remove(node1)).isFalse(); + Node node2 = mockNode("dc2"); + set.add(node2); + assertThat(set.remove(node2)).isTrue(); + assertThat(set.remove(node2)).isFalse(); + } + + @Test + public void should_return_all_nodes_in_dc() { + MultiDcNodeSet set = new MultiDcNodeSet(); + Node node1 = mockNode("dc1"); + set.add(node1); + Node node2 = mockNode("dc1"); + set.add(node2); + Node node3 = mockNode("dc2"); + set.add(node3); + assertThat(set.dc("dc1")).contains(node1, node2); + assertThat(set.dc("dc2")).contains(node3); + assertThat(set.dc("dc3")).isEmpty(); + assertThat(set.dc(null)).isEmpty(); + } + + @Test + public void should_return_all_dcs() { + MultiDcNodeSet set = new MultiDcNodeSet(); + Node node1 = mockNode("dc1"); + set.add(node1); + Node node2 = mockNode("dc2"); + set.add(node2); + assertThat(set.dcs()).contains("dc1", "dc2"); + } + + private Node mockNode(String dc) { + Node node = mock(Node.class); + when(node.getDatacenter()).thenReturn(dc); + return node; + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/nodeset/SingleDcNodeSetTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/nodeset/SingleDcNodeSetTest.java new file mode 100644 index 00000000000..336d18a66ed --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/nodeset/SingleDcNodeSetTest.java @@ -0,0 +1,71 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.loadbalancing.nodeset; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import com.datastax.oss.driver.api.core.metadata.Node; +import org.junit.Test; + +public class SingleDcNodeSetTest { + + @Test + public void should_add_node() { + SingleDcNodeSet set = new SingleDcNodeSet("dc1"); + Node node1 = mockNode("dc1"); + assertThat(set.add(node1)).isTrue(); + assertThat(set.add(node1)).isFalse(); + Node node2 = mockNode("dc2"); + assertThat(set.add(node2)).isFalse(); + } + + @Test + public void should_remove_node() { + SingleDcNodeSet set = new SingleDcNodeSet("dc1"); + Node node = mockNode("dc1"); + set.add(node); + assertThat(set.remove(node)).isTrue(); + assertThat(set.remove(node)).isFalse(); + } + + @Test + public void should_return_all_nodes_if_local_dc() { + SingleDcNodeSet set = new SingleDcNodeSet("dc1"); + Node node1 = mockNode("dc1"); + set.add(node1); + Node node2 = mockNode("dc1"); + set.add(node2); + Node node3 = mockNode("dc2"); + set.add(node3); + assertThat(set.dc("dc1")).contains(node1, node2); + assertThat(set.dc("dc2")).isEmpty(); + assertThat(set.dc(null)).isEmpty(); + } + + @Test + public void should_return_only_local_dc() { + SingleDcNodeSet set = new SingleDcNodeSet("dc1"); + assertThat(set.dcs()).contains("dc1"); + } + + private Node mockNode(String dc) { + Node node = mock(Node.class); + when(node.getDatacenter()).thenReturn(dc); + return node; + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/util/collection/CompositeQueryPlanTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/util/collection/CompositeQueryPlanTest.java new file mode 100644 index 00000000000..c460ecaaeec --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/util/collection/CompositeQueryPlanTest.java @@ -0,0 +1,40 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.util.collection; + +import com.datastax.oss.driver.api.core.metadata.Node; +import org.junit.runner.RunWith; +import org.mockito.junit.MockitoJUnitRunner; + +@RunWith(MockitoJUnitRunner.class) +public class CompositeQueryPlanTest extends QueryPlanTestBase { + + @Override + protected QueryPlan newQueryPlan(Node... nodes) { + Object[] n1 = new Object[nodes.length / 2]; + Object[] n2 = new Object[nodes.length - n1.length]; + System.arraycopy(nodes, 0, n1, 0, n1.length); + System.arraycopy(nodes, n1.length, n2, 0, n2.length); + return new CompositeQueryPlan( + new SimpleQueryPlan(n1), + new LazyQueryPlan() { + @Override + protected Object[] computeNodes() { + return n2; + } + }); + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/util/collection/LazyQueryPlanTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/util/collection/LazyQueryPlanTest.java new file mode 100644 index 00000000000..b6a7cbf39e7 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/util/collection/LazyQueryPlanTest.java @@ -0,0 +1,34 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.util.collection; + +import com.datastax.oss.driver.api.core.metadata.Node; +import org.junit.runner.RunWith; +import org.mockito.junit.MockitoJUnitRunner; + +@RunWith(MockitoJUnitRunner.class) +public class LazyQueryPlanTest extends QueryPlanTestBase { + + @Override + protected QueryPlan newQueryPlan(Node... nodes) { + return new LazyQueryPlan() { + @Override + protected Object[] computeNodes() { + return nodes; + } + }; + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/util/collection/QueryPlanTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/util/collection/QueryPlanTestBase.java similarity index 58% rename from core/src/test/java/com/datastax/oss/driver/internal/core/util/collection/QueryPlanTest.java rename to core/src/test/java/com/datastax/oss/driver/internal/core/util/collection/QueryPlanTestBase.java index 8157a2662ee..8c9f4d7b9ff 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/util/collection/QueryPlanTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/util/collection/QueryPlanTestBase.java @@ -16,16 +16,21 @@ package com.datastax.oss.driver.internal.core.util.collection; import static com.datastax.oss.driver.Assertions.assertThat; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; import com.datastax.oss.driver.api.core.metadata.Node; +import java.util.Comparator; import java.util.Iterator; +import java.util.Set; +import java.util.concurrent.ConcurrentSkipListSet; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.junit.MockitoJUnitRunner; @RunWith(MockitoJUnitRunner.class) -public class QueryPlanTest { +public abstract class QueryPlanTestBase { @Mock private Node node1; @Mock private Node node2; @@ -33,7 +38,7 @@ public class QueryPlanTest { @Test public void should_poll_elements() { - QueryPlan queryPlan = new QueryPlan(node1, node2, node3); + QueryPlan queryPlan = newQueryPlan(node1, node2, node3); assertThat(queryPlan.poll()).isSameAs(node1); assertThat(queryPlan.poll()).isSameAs(node2); assertThat(queryPlan.poll()).isSameAs(node3); @@ -41,9 +46,49 @@ public void should_poll_elements() { assertThat(queryPlan.poll()).isNull(); } + @Test + public void should_poll_elements_concurrently() throws InterruptedException { + for (int runs = 0; runs < 5; runs++) { + Node[] nodes = new Node[1000]; + for (int i = 0; i < 1000; i++) { + nodes[i] = mock(Node.class, "node" + i); + when(nodes[i].getOpenConnections()).thenReturn(i); + } + QueryPlan queryPlan = newQueryPlan(nodes); + Set actual = + new ConcurrentSkipListSet<>(Comparator.comparingInt(Node::getOpenConnections)); + Thread[] threads = new Thread[5]; + for (int i = 0; i < 5; i++) { + threads[i] = + new Thread( + () -> { + while (true) { + Node node = queryPlan.poll(); + if (node == null) { + return; + } + actual.add(node); + } + }); + } + for (Thread thread : threads) { + thread.start(); + } + for (Thread thread : threads) { + thread.join(); + } + assertThat(actual).hasSize(1000); + Iterator iterator = actual.iterator(); + for (int i = 0; iterator.hasNext(); i++) { + Node node = iterator.next(); + assertThat(node.getOpenConnections()).isEqualTo(i); + } + } + } + @Test public void should_return_size() { - QueryPlan queryPlan = new QueryPlan(node1, node2, node3); + QueryPlan queryPlan = newQueryPlan(node1, node2, node3); assertThat(queryPlan.size()).isEqualTo(3); queryPlan.poll(); assertThat(queryPlan.size()).isEqualTo(2); @@ -57,7 +102,7 @@ public void should_return_size() { @Test public void should_return_iterator() { - QueryPlan queryPlan = new QueryPlan(node1, node2, node3); + QueryPlan queryPlan = newQueryPlan(node1, node2, node3); Iterator iterator3 = queryPlan.iterator(); queryPlan.poll(); Iterator iterator2 = queryPlan.iterator(); @@ -74,4 +119,6 @@ public void should_return_iterator() { assertThat(iterator0).toIterable().isEmpty(); assertThat(iterator00).toIterable().isEmpty(); } + + protected abstract QueryPlan newQueryPlan(Node... nodes); } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/util/collection/SimpleQueryPlanTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/util/collection/SimpleQueryPlanTest.java new file mode 100644 index 00000000000..d8ee2cdd506 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/util/collection/SimpleQueryPlanTest.java @@ -0,0 +1,29 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.util.collection; + +import com.datastax.oss.driver.api.core.metadata.Node; +import org.junit.runner.RunWith; +import org.mockito.junit.MockitoJUnitRunner; + +@RunWith(MockitoJUnitRunner.class) +public class SimpleQueryPlanTest extends QueryPlanTestBase { + + @Override + protected QueryPlan newQueryPlan(Node... nodes) { + return new SimpleQueryPlan((Object[]) nodes); + } +} diff --git a/examples/pom.xml b/examples/pom.xml index f2124c8c3f3..3255b92e6ae 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -135,6 +135,11 @@ bcrypt 0.8.0 + + + io.projectreactor + reactor-core + diff --git a/examples/src/main/java/com/datastax/oss/driver/examples/failover/CrossDatacenterFailover.java b/examples/src/main/java/com/datastax/oss/driver/examples/failover/CrossDatacenterFailover.java new file mode 100644 index 00000000000..19d6b98d9d6 --- /dev/null +++ b/examples/src/main/java/com/datastax/oss/driver/examples/failover/CrossDatacenterFailover.java @@ -0,0 +1,456 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.examples.failover; + +import com.datastax.dse.driver.api.core.cql.reactive.ReactiveRow; +import com.datastax.oss.driver.api.core.AllNodesFailedException; +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.DriverException; +import com.datastax.oss.driver.api.core.DriverTimeoutException; +import com.datastax.oss.driver.api.core.NoNodeAvailableException; +import com.datastax.oss.driver.api.core.config.DriverConfigLoader; +import com.datastax.oss.driver.api.core.config.OptionsMap; +import com.datastax.oss.driver.api.core.config.TypedDriverOption; +import com.datastax.oss.driver.api.core.cql.AsyncResultSet; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.core.cql.Statement; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.servererrors.CoordinatorException; +import com.datastax.oss.driver.api.core.servererrors.QueryConsistencyException; +import com.datastax.oss.driver.api.core.servererrors.UnavailableException; +import com.datastax.oss.driver.internal.core.util.concurrent.CompletableFutures; +import java.util.Collections; +import java.util.List; +import java.util.Map.Entry; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionStage; +import java.util.concurrent.ExecutionException; +import reactor.core.publisher.Flux; + +/** + * This example illustrates how to implement a cross-datacenter failover strategy from application + * code. + * + *

          Starting with driver 4.10, cross-datacenter failover is also provided as a configuration + * option for built-in load balancing policies. See Load + * balancing in the manual. + * + *

          This example demonstrates how to achieve the same effect in application code, which confers + * more fained-grained control over which statements should be retried and where. + * + *

          The logic that decides whether or not a cross-DC failover should be attempted is presented in + * the {@link #shouldFailover(DriverException)} method below; study it carefully and adapt it to + * your needs if necessary. + * + *

          The actual request execution and failover code is presented in 3 different programming styles: + * + *

            + *
          1. Synchronous: see the {@link #writeSync()} method below; + *
          2. Asynchronous: see the {@link #writeAsync()} method below; + *
          3. Reactive (using Reactor): see the {@link + * #writeReactive()} method below. + *
          + * + * The 3 styles are identical in terms of failover effect; they are all included merely to help + * programmers pick the variant that is closest to the style they use. + * + *

          Preconditions: + * + *

            + *
          • An Apache Cassandra(R) cluster with two datacenters, dc1 and dc2, containing at least 3 + * nodes in each datacenter, is running and accessible through the contact point: + * 127.0.0.1:9042. + *
          + * + *

          Side effects: + * + *

            + *
          1. Creates a new keyspace {@code failover} in the cluster, with replication factor 3 in both + * datacenters. If a keyspace with this name already exists, it will be reused; + *
          2. Creates a new table {@code failover.orders}. If a table with that name exists already, it + * will be reused; + *
          3. Tries to write a row in the table using the local datacenter dc1; + *
          4. If the local datacenter dc1 is down, retries the write in the remote datacenter dc2. + *
          + * + * @see Java driver online + * manual + */ +public class CrossDatacenterFailover { + + public static void main(String[] args) throws Exception { + + CrossDatacenterFailover client = new CrossDatacenterFailover(); + + try { + + // Note: when this example is executed, at least the local DC must be available + // since the driver will try to reach contact points in that DC. + + client.connect(); + client.createSchema(); + + // To fully exercise this example, try to stop the entire dc1 here; then observe how + // the writes executed below will first fail in dc1, then be diverted to dc2, where they will + // succeed. + + client.writeSync(); + client.writeAsync(); + client.writeReactive(); + + } finally { + client.close(); + } + } + + private CqlSession session; + + private CrossDatacenterFailover() {} + + /** Initiates a connection to the cluster. */ + private void connect() { + + // For simplicity, this example uses a 100% in-memory configuration loader, but the same + // configuration can be achieved with the more traditional file-based approach. + // Simply put the below snippet in your application.conf file to get the same config: + + /* + datastax-java-driver { + basic.contact-points = [ "127.0.0.1:9042" ] + basic.load-balancing-policy.local-datacenter = "dc1" + basic.request.consistency = LOCAL_QUORUM + profiles { + remote { + basic.load-balancing-policy.local-datacenter = "dc2" + basic.request.consistency = LOCAL_ONE + } + } + } + */ + + OptionsMap options = OptionsMap.driverDefaults(); + // set the datacenter to dc1 in the default profile; this makes dc1 the local datacenter + options.put(TypedDriverOption.LOAD_BALANCING_LOCAL_DATACENTER, "dc1"); + // set the datacenter to dc2 in the "remote" profile + options.put("remote", TypedDriverOption.LOAD_BALANCING_LOCAL_DATACENTER, "dc2"); + // make sure to provide a contact point belonging to dc1, not dc2! + options.put(TypedDriverOption.CONTACT_POINTS, Collections.singletonList("127.0.0.1:9042")); + // in this example, the default consistency level is LOCAL_QUORUM + options.put(TypedDriverOption.REQUEST_CONSISTENCY, "LOCAL_QUORUM"); + // but when failing over, the consistency level will be automatically downgraded to LOCAL_ONE + options.put("remote", TypedDriverOption.REQUEST_CONSISTENCY, "LOCAL_ONE"); + + session = CqlSession.builder().withConfigLoader(DriverConfigLoader.fromMap(options)).build(); + + System.out.println("Connected to cluster with session: " + session.getName()); + } + + /** Creates the schema (keyspace) and table for this example. */ + private void createSchema() { + + session.execute( + "CREATE KEYSPACE IF NOT EXISTS failover WITH replication " + + "= {'class':'NetworkTopologyStrategy', 'dc1':3, 'dc2':3}"); + + session.execute( + "CREATE TABLE IF NOT EXISTS failover.orders (" + + "product_id uuid," + + "timestamp timestamp," + + "price double," + + "PRIMARY KEY (product_id,timestamp)" + + ")"); + } + + /** Inserts data synchronously using the local DC, retrying if necessary in a remote DC. */ + private void writeSync() { + + System.out.println("------- DC failover (sync) ------- "); + + Statement statement = + SimpleStatement.newInstance( + "INSERT INTO failover.orders " + + "(product_id, timestamp, price) " + + "VALUES (" + + "756716f7-2e54-4715-9f00-91dcbea6cf50," + + "'2018-02-26T13:53:46.345+01:00'," + + "2.34)"); + + try { + + // try the statement using the default profile, which targets the local datacenter dc1. + session.execute(statement); + + System.out.println("Write succeeded"); + + } catch (DriverException e) { + + if (shouldFailover(e)) { + + System.out.println("Write failed in local DC, retrying in remote DC"); + + try { + + // try the statement using the remote profile, which targets the remote datacenter dc2. + session.execute(statement.setExecutionProfileName("remote")); + + System.out.println("Write succeeded"); + + } catch (DriverException e2) { + + System.out.println("Write failed in remote DC"); + + e2.printStackTrace(); + } + } + } + // let other errors propagate + } + + /** Inserts data asynchronously using the local DC, retrying if necessary in a remote DC. */ + private void writeAsync() throws ExecutionException, InterruptedException { + + System.out.println("------- DC failover (async) ------- "); + + Statement statement = + SimpleStatement.newInstance( + "INSERT INTO failover.orders " + + "(product_id, timestamp, price) " + + "VALUES (" + + "756716f7-2e54-4715-9f00-91dcbea6cf50," + + "'2018-02-26T13:53:46.345+01:00'," + + "2.34)"); + + CompletionStage result = + // try the statement using the default profile, which targets the local datacenter dc1. + session + .executeAsync(statement) + .handle( + (rs, error) -> { + if (error == null) { + return CompletableFuture.completedFuture(rs); + } else { + if (error instanceof DriverException + && shouldFailover((DriverException) error)) { + System.out.println("Write failed in local DC, retrying in remote DC"); + // try the statement using the remote profile, which targets the remote + // datacenter dc2. + return session.executeAsync(statement.setExecutionProfileName("remote")); + } + // let other errors propagate + return CompletableFutures.failedFuture(error); + } + }) + // unwrap (flatmap) the nested future + .thenCompose(future -> future) + .whenComplete( + (rs, error) -> { + if (error == null) { + System.out.println("Write succeeded"); + } else { + System.out.println("Write failed in remote DC"); + error.printStackTrace(); + } + }); + + // for the sake of this example, wait for the operation to finish + result.toCompletableFuture().get(); + } + + /** Inserts data reactively using the local DC, retrying if necessary in a remote DC. */ + private void writeReactive() { + + System.out.println("------- DC failover (reactive) ------- "); + + Statement statement = + SimpleStatement.newInstance( + "INSERT INTO failover.orders " + + "(product_id, timestamp, price) " + + "VALUES (" + + "756716f7-2e54-4715-9f00-91dcbea6cf50," + + "'2018-02-26T13:53:46.345+01:00'," + + "2.34)"); + + Flux result = + // try the statement using the default profile, which targets the local datacenter dc1. + Flux.from(session.executeReactive(statement)) + .onErrorResume( + DriverException.class, + error -> { + if (shouldFailover(error)) { + System.out.println("Write failed in local DC, retrying in remote DC"); + // try the statement using the remote profile, which targets the remote + // datacenter dc2. + return session.executeReactive(statement.setExecutionProfileName("remote")); + } else { + return Flux.error(error); + } + }) + .doOnComplete(() -> System.out.println("Write succeeded")) + .doOnError( + error -> { + System.out.println("Write failed"); + error.printStackTrace(); + }); + + // for the sake of this example, wait for the operation to finish + result.blockLast(); + } + + /** + * Analyzes the error and decides whether to failover to a remote DC. + * + *

          The logic below categorizes driver exceptions in four main groups: + * + *

            + *
          1. Total DC outage: all nodes in DC were known to be down when the request was executed; + *
          2. Partial DC outage: one or many nodes responded, but reported a replica availability + * problem; + *
          3. DC unreachable: one or many nodes were queried, but none responded (timeout); + *
          4. Other errors. + *
          + * + * A DC failover is authorized for the first three groups above: total DC outage, partial DC + * outage, and DC unreachable. + * + *

          This logic is provided as a good starting point for users to create their own DC failover + * strategy; please adjust it to your exact needs. + */ + private boolean shouldFailover(DriverException mainException) { + + if (mainException instanceof NoNodeAvailableException) { + + // No node could be tried, because all nodes in the query plan were down. This could be a + // total DC outage, so trying another DC makes sense. + System.out.println("All nodes were down in this datacenter, failing over"); + return true; + + } else if (mainException instanceof AllNodesFailedException) { + + // Many nodes were tried (as decided by the retry policy), but all failed. This could be a + // partial DC outage: some nodes were up, but the replicas were down. + + boolean failover = false; + + // Inspect the error to find out how many coordinators were tried, and which errors they + // returned. + for (Entry> entry : + ((AllNodesFailedException) mainException).getAllErrors().entrySet()) { + + Node coordinator = entry.getKey(); + List errors = entry.getValue(); + + System.out.printf( + "Node %s in DC %s was tried %d times but failed with:%n", + coordinator.getEndPoint(), coordinator.getDatacenter(), errors.size()); + + for (Throwable nodeException : errors) { + + System.out.printf("\t- %s%n", nodeException); + + // If the error was a replica availability error, then we know that some replicas were + // down in this DC. Retrying in another DC could solve the problem. Other errors don't + // necessarily mean that the DC is unavailable, so we ignore them. + if (isReplicaAvailabilityError(nodeException)) { + failover = true; + } + } + } + + // Authorize the failover if at least one of the coordinators reported a replica availability + // error that could be solved by trying another DC. + if (failover) { + System.out.println( + "Some nodes tried in this DC reported a replica availability error, failing over"); + } else { + System.out.println("All nodes tried in this DC failed unexpectedly, not failing over"); + } + return failover; + + } else if (mainException instanceof DriverTimeoutException) { + + // One or many nodes were tried, but none replied in a timely manner, and the timeout defined + // by the option `datastax-java-driver.basic.request.timeout` was triggered. + // This could be a DC outage as well, or a network partition issue, so trying another DC may + // make sense. + // Note about SLAs: if your application needs to comply with SLAs, and the maximum acceptable + // latency for a request is equal or very close to the request timeout, beware that failing + // over to a different datacenter here could potentially break your SLA. + + System.out.println( + "No node in this DC replied before the timeout was triggered, failing over"); + return true; + + } else if (mainException instanceof CoordinatorException) { + + // Only one node was tried, and it failed (and the retry policy did not tell the driver to + // retry this request, but rather to surface the error immediately). This is rather unusual + // as the driver's default retry policy retries most of these errors, but some custom retry + // policies could decide otherwise. So we apply the same logic as above: if the error is a + // replica availability error, we authorize the failover. + + Node coordinator = ((CoordinatorException) mainException).getCoordinator(); + System.out.printf( + "Node %s in DC %s was tried once but failed with: %s%n", + coordinator.getEndPoint(), coordinator.getDatacenter(), mainException); + + boolean failover = isReplicaAvailabilityError(mainException); + if (failover) { + System.out.println( + "The only node tried in this DC reported a replica availability error, failing over"); + } else { + System.out.println("The only node tried in this DC failed unexpectedly, not failing over"); + } + return failover; + + } else { + + // The request failed with a rather unusual error. This generally indicates a more serious + // issue, since the retry policy decided to surface the error immediately. Trying another DC + // is probably a bad idea. + System.out.println("The request failed unexpectedly, not failing over: " + mainException); + return false; + } + } + + /** + * Whether the given error is a replica availability error. + * + *

          A replica availability error means that the initial consistency level could not be met + * because not enough replicas were alive. + * + *

          When this error happens, it can be worth failing over to a remote DC, as long as at + * least one of the following conditions apply: + * + *

            + *
          1. if the initial consistency level was DC-local, trying another DC may succeed; + *
          2. if the initial consistency level can be downgraded, then retrying again may succeed (in + * the same DC, or in another one). + *
          + * + * In this example both conditions above apply, so we authorize the failover whenever we detect a + * replica availability error. + */ + private boolean isReplicaAvailabilityError(Throwable t) { + return t instanceof UnavailableException || t instanceof QueryConsistencyException; + } + + private void close() { + if (session != null) { + session.close(); + } + } +} diff --git a/faq/README.md b/faq/README.md index 842804431a2..315bf934cd2 100644 --- a/faq/README.md +++ b/faq/README.md @@ -71,6 +71,20 @@ code. An example of downgrading retries implemented at application level can be [JAVA-2900]: https://datastax-oss.atlassian.net/browse/JAVA-2900 [examples repository]: https://github.com/datastax/java-driver/blob/4.x/examples/src/main/java/com/datastax/oss/driver/examples/retry/DowngradingRetry.java +### Where is the cross-datacenter failover feature that existed in driver 3? + +In driver 3, it was possible to configure the load balancing policy to automatically failover to +a remote datacenter, when the local datacenter is down. + +This ability is considered a misfeature and has been removed from driver 4.0 onwards. + +However, due to popular demand, cross-datacenter failover has been brought back to driver 4 in +version 4.10.0. + +If you are using a driver version >= 4.10.0, read the [manual](../manual/core/loadbalancing/) to +understand how to enable this feature; for driver versions < 4.10.0, this feature is simply not +available. + ### I want to set a date on a bound statement, where did `setTimestamp()` go? The driver now uses Java 8's improved date and time API. CQL type `timestamp` is mapped to diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/loadbalancing/AllLoadBalancingPoliciesSimulacronIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/loadbalancing/AllLoadBalancingPoliciesSimulacronIT.java new file mode 100644 index 00000000000..bee2aa21fb6 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/loadbalancing/AllLoadBalancingPoliciesSimulacronIT.java @@ -0,0 +1,503 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.core.loadbalancing; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.catchThrowable; +import static org.awaitility.Awaitility.await; + +import com.datastax.oss.driver.api.core.ConsistencyLevel; +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.DefaultConsistencyLevel; +import com.datastax.oss.driver.api.core.NoNodeAvailableException; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverConfigLoader; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.metadata.NodeState; +import com.datastax.oss.driver.api.core.metadata.TokenMap; +import com.datastax.oss.driver.api.testinfra.session.SessionUtils; +import com.datastax.oss.driver.api.testinfra.simulacron.SimulacronRule; +import com.datastax.oss.driver.categories.ParallelizableTests; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import com.datastax.oss.protocol.internal.request.Query; +import com.datastax.oss.simulacron.common.cluster.ClusterSpec; +import com.datastax.oss.simulacron.common.cluster.QueryLog; +import com.datastax.oss.simulacron.common.stubbing.PrimeDsl; +import com.datastax.oss.simulacron.common.stubbing.PrimeDsl.RowBuilder; +import com.datastax.oss.simulacron.server.BoundCluster; +import com.datastax.oss.simulacron.server.BoundNode; +import com.datastax.oss.simulacron.server.BoundTopic; +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import java.net.SocketAddress; +import java.nio.ByteBuffer; +import java.time.Duration; +import java.util.Arrays; +import java.util.List; +import java.util.Objects; +import java.util.function.Predicate; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.runner.RunWith; + +@Category(ParallelizableTests.class) +@RunWith(DataProviderRunner.class) +public class AllLoadBalancingPoliciesSimulacronIT { + + @ClassRule + public static final SimulacronRule SIMULACRON_RULE = + new SimulacronRule(ClusterSpec.builder().withNodes(5, 5, 5)); + + @Before + public void reset() { + SIMULACRON_RULE.cluster().start(); + SIMULACRON_RULE.cluster().clearLogs(); + SIMULACRON_RULE.cluster().clearPrimes(true); + SIMULACRON_RULE + .cluster() + .prime( + PrimeDsl.when("SELECT * FROM system_schema.keyspaces") + .then(new RowBuilder().columnTypes(KEYSPACE_COLUMNS).row(KEYSPACE_ROW).build())); + } + + @Test + @DataProvider({ + "BasicLoadBalancingPolicy,dc1", + "DefaultLoadBalancingPolicy,dc1", + "DcInferringLoadBalancingPolicy,dc1", + "DcInferringLoadBalancingPolicy,null", + }) + public void should_round_robin_within_local_dc_when_dc_aware_but_not_token_aware( + String lbp, String dc) { + + // given: DC is provided or inferred, token awareness is disabled and remote DCs are allowed + try (CqlSession session = newSession(lbp, dc, 2, true, false)) { + + // when: a query is executed 50 times. + for (int i = 0; i < 50; i++) { + session.execute(QUERY); + } + + // then: each node in local DC should get an equal number of requests. + for (int i = 0; i < 5; i++) { + assertThat(queries(0, i).count()).isEqualTo(10); + } + + // then: no node in the remote DC should get a request. + assertThat(queries(1).count()).isEqualTo(0); + assertThat(queries(2).count()).isEqualTo(0); + } + } + + @Test + @DataProvider({ + "BasicLoadBalancingPolicy,dc1,ONE", + "BasicLoadBalancingPolicy,dc1,LOCAL_ONE", + "BasicLoadBalancingPolicy,dc1,TWO", + "BasicLoadBalancingPolicy,dc1,QUORUM", + "BasicLoadBalancingPolicy,dc1,LOCAL_QUORUM", + "DefaultLoadBalancingPolicy,dc1,ONE", + "DefaultLoadBalancingPolicy,dc1,LOCAL_ONE", + "DefaultLoadBalancingPolicy,dc1,TWO", + "DefaultLoadBalancingPolicy,dc1,QUORUM", + "DefaultLoadBalancingPolicy,dc1,LOCAL_QUORUM", + "DcInferringLoadBalancingPolicy,dc1,ONE", + "DcInferringLoadBalancingPolicy,dc1,LOCAL_ONE", + "DcInferringLoadBalancingPolicy,dc1,TWO", + "DcInferringLoadBalancingPolicy,dc1,QUORUM", + "DcInferringLoadBalancingPolicy,dc1,LOCAL_QUORUM", + "DcInferringLoadBalancingPolicy,null,ONE", + "DcInferringLoadBalancingPolicy,null,LOCAL_ONE", + "DcInferringLoadBalancingPolicy,null,TWO", + "DcInferringLoadBalancingPolicy,null,QUORUM", + "DcInferringLoadBalancingPolicy,null,LOCAL_QUORUM", + }) + public void should_use_local_replicas_when_dc_aware_and_token_aware_and_enough_local_replicas_up( + String lbp, String dc, DefaultConsistencyLevel cl) { + + // given: DC is provided or inferred, token awareness enabled, remotes allowed, CL <= 2 + try (CqlSession session = newSession(lbp, dc, 2, true)) { + + // given: one replica and 2 non-replicas down in local DC, but CL <= 2 still achievable + List aliveReplicas = degradeLocalDc(session); + + // when: a query is executed 50 times and some nodes are down in the local DC. + for (int i = 0; i < 50; i++) { + session.execute( + SimpleStatement.newInstance(QUERY) + .setConsistencyLevel(cl) + .setRoutingKeyspace("test") + .setRoutingKey(ROUTING_KEY)); + } + + // then: all requests should be distributed to the remaining up replicas in local DC + BoundNode alive1 = findNode(aliveReplicas.get(0)); + BoundNode alive2 = findNode(aliveReplicas.get(1)); + assertThat(queries(alive1).count() + queries(alive2).count()).isEqualTo(50); + + // then: no node in the remote DCs should get a request. + assertThat(queries(1).count()).isEqualTo(0); + assertThat(queries(2).count()).isEqualTo(0); + } + } + + @Test + public void should_round_robin_within_all_dcs_when_dc_agnostic() { + + // given: DC-agnostic LBP, no local DC, remotes not allowed, token awareness enabled + try (CqlSession session = newSession("BasicLoadBalancingPolicy", null, 0, false)) { + + // when: a query is executed 150 times. + for (int i = 0; i < 150; i++) { + session.execute( + SimpleStatement.newInstance(QUERY) + // local CL should be ignored since there is no local DC + .setConsistencyLevel(ConsistencyLevel.LOCAL_QUORUM)); + } + + // then: each node should get 10 requests, even remote ones since the LBP is DC-agnostic. + for (int dc = 0; dc < 3; dc++) { + for (int n = 0; n < 5; n++) { + assertThat(queries(dc, n).count()).isEqualTo(10); + } + } + } + } + + @Test + @DataProvider({ + "BasicLoadBalancingPolicy,dc1,ONE", + "BasicLoadBalancingPolicy,dc1,TWO", + "BasicLoadBalancingPolicy,dc1,THREE", + "BasicLoadBalancingPolicy,dc1,QUORUM", + "BasicLoadBalancingPolicy,dc1,ANY", + "DefaultLoadBalancingPolicy,dc1,ONE", + "DefaultLoadBalancingPolicy,dc1,TWO", + "DefaultLoadBalancingPolicy,dc1,THREE", + "DefaultLoadBalancingPolicy,dc1,QUORUM", + "DefaultLoadBalancingPolicy,dc1,ANY", + "DcInferringLoadBalancingPolicy,dc1,ONE", + "DcInferringLoadBalancingPolicy,dc1,TWO", + "DcInferringLoadBalancingPolicy,dc1,THREE", + "DcInferringLoadBalancingPolicy,dc1,QUORUM", + "DcInferringLoadBalancingPolicy,dc1,ANY", + "DcInferringLoadBalancingPolicy,null,ONE", + "DcInferringLoadBalancingPolicy,null,TWO", + "DcInferringLoadBalancingPolicy,null,THREE", + "DcInferringLoadBalancingPolicy,null,QUORUM", + "DcInferringLoadBalancingPolicy,null,ANY", + }) + public void should_use_remote_nodes_when_no_up_nodes_in_local_dc_for_non_local_cl( + String lbp, String dc, DefaultConsistencyLevel cl) { + + // given: 1 remote allowed per DC and a non-local CL, token awareness enabled + try (CqlSession session = newSession(lbp, dc, 1, false)) { + + // given: local DC is down + stopLocalDc(session); + + // when: a query is executed 50 times and all nodes are down in local DC. + for (int i = 0; i < 50; i++) { + session.execute( + SimpleStatement.newInstance(QUERY) + .setConsistencyLevel(cl) + .setRoutingKeyspace("test") + .setRoutingKey(ROUTING_KEY)); + } + + // then: only 1 node in each remote DC should get requests (we can't know which ones exactly). + assertThat(queries(1).count() + queries(2).count()).isEqualTo(50); + } + } + + @Test + @DataProvider({ + "BasicLoadBalancingPolicy,dc1,LOCAL_ONE", + "BasicLoadBalancingPolicy,dc1,LOCAL_QUORUM", + "BasicLoadBalancingPolicy,dc1,LOCAL_SERIAL", + "DefaultLoadBalancingPolicy,dc1,LOCAL_ONE", + "DefaultLoadBalancingPolicy,dc1,LOCAL_QUORUM", + "DefaultLoadBalancingPolicy,dc1,LOCAL_SERIAL", + "DcInferringLoadBalancingPolicy,dc1,LOCAL_ONE", + "DcInferringLoadBalancingPolicy,dc1,LOCAL_QUORUM", + "DcInferringLoadBalancingPolicy,dc1,LOCAL_SERIAL", + "DcInferringLoadBalancingPolicy,null,LOCAL_ONE", + "DcInferringLoadBalancingPolicy,null,LOCAL_QUORUM", + "DcInferringLoadBalancingPolicy,null,LOCAL_SERIAL", + }) + public void should_not_use_remote_nodes_when_using_local_cl( + String lbp, String dc, DefaultConsistencyLevel cl) { + + // given: remotes allowed but not for local CL, token awareness enabled, local CL + try (CqlSession session = newSession(lbp, dc, 5, false)) { + + // given: local DC is down + stopLocalDc(session); + + // when: a query is executed 50 times and all nodes are down in local DC. + for (int i = 0; i < 50; i++) { + Throwable t = + catchThrowable( + () -> + session.execute( + SimpleStatement.newInstance(QUERY) + .setConsistencyLevel(cl) + .setRoutingKeyspace("test") + .setRoutingKey(ROUTING_KEY))); + + // then: expect a NNAE for a local CL since no local replicas available. + assertThat(t).isInstanceOf(NoNodeAvailableException.class); + } + + // then: no node in the remote DCs should get a request. + assertThat(queries(1).count()).isEqualTo(0); + assertThat(queries(2).count()).isEqualTo(0); + } + } + + @Test + @DataProvider({ + "BasicLoadBalancingPolicy,dc1,LOCAL_ONE", + "BasicLoadBalancingPolicy,dc1,LOCAL_QUORUM", + "DefaultLoadBalancingPolicy,dc1,LOCAL_ONE", + "DefaultLoadBalancingPolicy,dc1,LOCAL_QUORUM", + "DcInferringLoadBalancingPolicy,dc1,LOCAL_ONE", + "DcInferringLoadBalancingPolicy,dc1,LOCAL_QUORUM", + "DcInferringLoadBalancingPolicy,null,LOCAL_ONE", + "DcInferringLoadBalancingPolicy,null,LOCAL_QUORUM", + }) + public void should_use_remote_nodes_when_using_local_cl_if_allowed( + String lbp, String dc, DefaultConsistencyLevel cl) { + + // given: only one node allowed per remote DC and remotes allowed even for local CLs. + try (CqlSession session = newSession(lbp, dc, 1, true)) { + + // given: local DC is down + stopLocalDc(session); + + // when: a query is executed 50 times and all nodes are down in local DC. + for (int i = 0; i < 50; i++) { + session.execute( + SimpleStatement.newInstance(QUERY) + .setConsistencyLevel(cl) + .setRoutingKeyspace("test") + .setRoutingKey(ROUTING_KEY)); + } + + // then: only 1 node in each remote DC should get requests (we can't know which ones exactly). + assertThat(queries(1).count() + queries(2).count()).isEqualTo(50); + } + } + + @Test + @DataProvider({ + "BasicLoadBalancingPolicy,dc1", + "DefaultLoadBalancingPolicy,dc1", + "DcInferringLoadBalancingPolicy,dc1", + "DcInferringLoadBalancingPolicy,null" + }) + public void should_not_use_excluded_dc_using_node_filter(String lbp, String dc) { + + // given: remotes allowed even for local CLs, but node filter excluding dc2 + try (CqlSession session = newSession(lbp, dc, 5, true, true, excludeDc("dc2"))) { + + // when: A query is made and nodes for the local dc are available. + for (int i = 0; i < 50; i++) { + session.execute( + SimpleStatement.newInstance(QUERY) + .setRoutingKeyspace("test") + .setRoutingKey(ROUTING_KEY)); + } + + // then: only nodes in the local DC should have been queried. + assertThat(queries(0).count()).isEqualTo(50); + assertThat(queries(1).count()).isEqualTo(0); + assertThat(queries(2).count()).isEqualTo(0); + + // given: local DC is down + stopLocalDc(session); + + SIMULACRON_RULE.cluster().clearLogs(); + + // when: A query is made and all nodes in the local dc are down. + for (int i = 0; i < 50; i++) { + session.execute( + SimpleStatement.newInstance(QUERY) + .setRoutingKeyspace("test") + .setRoutingKey(ROUTING_KEY)); + } + + // then: Only nodes in DC3 should have been queried, since DC2 is excluded and DC1 is down. + assertThat(queries(0).count()).isEqualTo(0); + assertThat(queries(1).count()).isEqualTo(0); + assertThat(queries(2).count()).isEqualTo(50); + } + } + + private static final ByteBuffer ROUTING_KEY = ByteBuffer.wrap(new byte[] {1, 2, 3, 4}); + + private static final String[] KEYSPACE_COLUMNS = + new String[] { + "keyspace_name", "varchar", + "durable_writes", "boolean", + "replication", "map" + }; + + private static final Object[] KEYSPACE_ROW = + new Object[] { + "keyspace_name", + "test", + "durable_writes", + true, + "replication", + ImmutableMap.of( + "class", + "org.apache.cassandra.locator.NetworkTopologyStrategy", + "dc1", + "3", + "dc2", + "3", + "dc3", + "3") + }; + + private static final String QUERY = "SELECT * FROM test.foo"; + + private CqlSession newSession(String lbp, String dc, int maxRemoteNodes, boolean allowLocalCl) { + return newSession(lbp, dc, maxRemoteNodes, allowLocalCl, true); + } + + private CqlSession newSession( + String lbp, String dc, int maxRemoteNodes, boolean allowLocalCl, boolean tokenAware) { + return newSession(lbp, dc, maxRemoteNodes, allowLocalCl, tokenAware, null); + } + + private CqlSession newSession( + String lbp, + String dc, + int maxRemoteNodes, + boolean allowLocalCl, + boolean tokenAware, + Predicate nodeFilter) { + DriverConfigLoader loader = + SessionUtils.configLoaderBuilder() + .withBoolean(DefaultDriverOption.METADATA_SCHEMA_ENABLED, tokenAware) + .withString(DefaultDriverOption.LOAD_BALANCING_POLICY_CLASS, lbp) + .withString(DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER, dc) + .withInt( + DefaultDriverOption.LOAD_BALANCING_DC_FAILOVER_MAX_NODES_PER_REMOTE_DC, + maxRemoteNodes) + .withBoolean( + DefaultDriverOption.LOAD_BALANCING_DC_FAILOVER_ALLOW_FOR_LOCAL_CONSISTENCY_LEVELS, + allowLocalCl) + .build(); + return SessionUtils.newSession(SIMULACRON_RULE, null, null, null, nodeFilter, loader); + } + + private BoundNode findNode(Node node) { + BoundCluster simulacron = SIMULACRON_RULE.cluster(); + SocketAddress toFind = node.getEndPoint().resolve(); + for (BoundNode boundNode : simulacron.getNodes()) { + if (boundNode.getAddress().equals(toFind)) { + return boundNode; + } + } + throw new AssertionError("Could not find node: " + toFind); + } + + private void stopLocalDc(CqlSession session) { + SIMULACRON_RULE.cluster().dc(0).stop(); + awaitDown(nodesInDc(session, "dc1")); + } + + private List degradeLocalDc(CqlSession session) { + // stop 1 replica and 2 non-replicas in dc1 + List localReplicas = replicasInDc(session, "dc1"); + assertThat(localReplicas).hasSize(3); + BoundNode replica1 = findNode(localReplicas.get(0)); + + List localOthers = nonReplicasInDc(session, "dc1"); + assertThat(localOthers).hasSize(2); + BoundNode other1 = findNode(localOthers.get(0)); + BoundNode other2 = findNode(localOthers.get(1)); + + replica1.stop(); + other1.stop(); + other2.stop(); + + awaitDown(localReplicas.get(0), localOthers.get(0), localOthers.get(1)); + return localReplicas.subList(1, 3); + } + + private Stream queries(int dc, int node) { + return queries(SIMULACRON_RULE.cluster().dc(dc).node(node)); + } + + private Stream queries(int dc) { + return queries(SIMULACRON_RULE.cluster().dc(dc)); + } + + private Stream queries(BoundTopic topic) { + return topic.getLogs().getQueryLogs().stream() + .filter(q -> q.getFrame().message instanceof Query) + .filter(q -> ((Query) q.getFrame().message).query.equals(QUERY)); + } + + private List nodesInDc(CqlSession session, String dcName) { + return session.getMetadata().getNodes().values().stream() + .filter(n -> Objects.equals(n.getDatacenter(), dcName)) + .collect(Collectors.toList()); + } + + private List replicasInDc(CqlSession session, String dcName) { + assertThat(session.getMetadata().getTokenMap()).isPresent(); + TokenMap tokenMap = session.getMetadata().getTokenMap().get(); + return tokenMap.getReplicas("test", ROUTING_KEY).stream() + .filter(n -> Objects.equals(n.getDatacenter(), dcName)) + .collect(Collectors.toList()); + } + + private List nonReplicasInDc( + CqlSession session, @SuppressWarnings("SameParameterValue") String dcName) { + List nodes = nodesInDc(session, dcName); + nodes.removeAll(replicasInDc(session, dcName)); + return nodes; + } + + private Predicate excludeDc(@SuppressWarnings("SameParameterValue") String dcName) { + return node -> !Objects.equals(node.getDatacenter(), dcName); + } + + private void awaitDown(Node... nodes) { + awaitDown(Arrays.asList(nodes)); + } + + private void awaitDown(Iterable nodes) { + await() + .atMost(Duration.ofSeconds(10)) + .untilAsserted( + () -> { + for (Node node : nodes) { + assertThat(node.getState()).isEqualTo(NodeState.DOWN); + } + }); + } +} diff --git a/manual/core/load_balancing/README.md b/manual/core/load_balancing/README.md index dbb22712e32..3210c916a61 100644 --- a/manual/core/load_balancing/README.md +++ b/manual/core/load_balancing/README.md @@ -37,6 +37,9 @@ For each node, the policy computes a *distance* that determines how connections Typically, the distance will reflect network topology (e.g. local vs. remote datacenter), although that is entirely up to each policy implementation. It can also change over time. +The driver built-in policies only ever assign the `LOCAL` or `IGNORED` distance, to avoid cross- +datacenter traffic (see below to understand how to change this behavior). + #### Query plan Each time the driver executes a query, it asks the policy to compute a *query plan*, in other words @@ -50,23 +53,40 @@ return plans that: * only contain nodes that are known to be able to process queries, i.e. neither ignored nor down; * favor local nodes over remote ones. -### Default policy +### Built-in policies In previous versions, the driver provided a wide variety of built-in load balancing policies; in addition, they could be nested into each other, yielding an even higher number of choices. In our experience, this has proven to be too complicated: it's not obvious which policy(ies) to choose for -a given use case, and nested policies can sometimes affect each other's effects in subtle and hard -to predict ways. - -In driver 4+, we are taking a more opinionated approach: we provide a default load balancing policy, -that we consider the best choice for most cases. You can still write a -[custom implementation](#custom-implementation) if you have special requirements. - -#### Local only - -The default policy **only connects to a single datacenter**. The rationale is that a typical -multi-region deployment will collocate one or more application instances with each Cassandra -datacenter: +a given use case, and nested policies can sometimes affect each other's effects in subtle and hard- +to-predict ways. + +In driver 4+, we are taking a different approach: we provide only a handful of load balancing +policies, that we consider the best choices for most cases: + +- `DefaultLoadBalancingPolicy` should almost always be used; it requires a local datacenter to be + specified either programmatically when creating the session, or via the configuration (see below). + It can also use a highly efficient slow replica avoidance mechanism, which is by default enabled. +- `DcInferringLoadBalancingPolicy` is similar to `DefaultLoadBalancingPolicy`, but does not require + a local datacenter to be defined, in which case it will attempt to infer the local datacenter from + the provided contact points. If that's not possible, it will throw an error during session + initialization. This policy is intended mostly for ETL tools and is not recommended for normal + applications. +- `BasicLoadBalancingPolicy` is similar to `DefaultLoadBalancingPolicy`, but does not have the slow + replica avoidance mechanism. More importantly, it is the only policy capable of operating without + local datacenter defined, in which case it will consider nodes in the cluster in a datacenter- + agnostic way. Beware that this could cause spikes in cross-datacenter traffic! This policy is + provided mostly as a starting point for users wishing to implement their own load balancing + policy; it should not be used as is in normal applications. + +You can still write a [custom implementation](#custom-implementation) if you have special +requirements. + +#### Datacenter locality + +By default, both `DefaultLoadBalancingPolicy` and `DcInferringLoadBalancingPolicy` **only connect to +a single datacenter**. The rationale is that a typical multi-region deployment will collocate one or +more application instances with each Cassandra datacenter: ```ditaa /----+----\ @@ -96,14 +116,7 @@ datacenter: +-------------------+ +-------------------+ ``` -In previous driver versions, you could configure application-level failover, such as: "if all the -Cassandra nodes in DC1 are down, allow app1 to connect to the nodes in DC2". We now believe that -this is not the right place to handle this: if a whole datacenter went down at once, it probably -means a catastrophic failure happened in Region1, and the application node is down as well. -Failover should be cross-region instead (handled by the load balancer in this example). - -Therefore the default policy does not allow remote nodes; it only ever assigns the `LOCAL` or -`IGNORED` distance. You **must** provide a local datacenter name, either in the configuration: +When using these policies you **must** provide a local datacenter name, either in the configuration: ``` datastax-java-driver.basic.load-balancing-policy { @@ -126,7 +139,7 @@ that case, the driver will connect to 127.0.0.1:9042, and use that node's datace for a better out-of-the-box experience for users who have just downloaded the driver; beyond that initial development phase, you should provide explicit contact points and a local datacenter. -#### Finding the local datacenter +##### Finding the local datacenter To check which datacenters are defined in a given cluster, you can run [`nodetool status`]. It will print information about each node in the cluster, grouped by datacenters. Here is an example: @@ -165,6 +178,82 @@ data_center DC1 ``` +#### Cross-datacenter failover + +Since the driver by default only contacts nodes in the local datacenter, what happens if the whole +datacenter is down? Resuming the example shown in the diagram above, shouldn't the driver +temporarily allow app1 to connect to the nodes in DC2? + +We believe that, while appealing by its simplicity, such ability is not the right way to handle a +datacenter failure: resuming our example above, if the whole DC1 datacenter went down at once, it +probably means a catastrophic failure happened in Region1, and the application node is down as well. +Failover should be cross-region instead (handled by the load balancer in the above example). + +However, due to popular demand, starting with driver 4.10, we re-introduced cross-datacenter +failover in the driver built-in load balancing policies. + +Cross-datacenter failover is enabled with the following configuration option: + +``` +datastax-java-driver.advanced.load-balancing-policy.dc-failover { + max-nodes-per-remote-dc = 2 +} +``` + +The default for `max-nodes-per-remote-dc` is zero, which means that failover is disabled. Setting +this option to any value greater than zero will have the following effects: + +- The load balancing policies will assign the `REMOTE` distance to that many nodes *in each remote + datacenter*. +- The driver will then attempt to open connections to those nodes. The actual number of connections + to open to each one of those nodes is configurable, see [Connection pools](../pooling/) for + more details. By default, the driver opens only one connection to each node. +- Those remote nodes (and only those) will then become eligible for inclusion in query plans, + effectively enabling cross-datacenter failover. + +Beware that enabling such failover can result in cross-datacenter network traffic spikes, if the +local datacenter is down or experiencing high latencies! + +Cross-datacenter failover can also have unexpected consequences when using local consistency levels +(LOCAL_ONE, LOCAL_QUORUM and LOCAL_SERIAL). Indeed, a local consistency level may have different +semantics depending on the replication factor (RF) in use in each datacenter: if the local DC has +RF=3 for a given keyspace, but the remote DC has RF=1 for it, achieving LOCAL_QUORUM in the local DC +means 2 replicas required, but in the remote DC, only one will be required. + +For this reason, cross-datacenter failover for local consistency levels is disabled by default. If +you want to enable this and understand the consequences, then set the following option to true: + +``` +datastax-java-driver.advanced.load-balancing-policy.dc-failover { + allow-for-local-consistency-levels = true +} +``` + +##### Alternatives to driver-level cross-datacenter failover + +Before you jump into the failover technique explained above, please also consider the following +alternatives: + +1. **Application-level failover**: instead of letting the driver do the failover, implement the +failover logic in your application. Granted, this solution wouldn't be much better if the +application servers are co-located with the Cassandra datacenter itself. It's also a bit more work, +but at least, you would have full control over the failover procedure: you could for example decide, +based on the exact error that prevented the local datacenter from fulfilling a given request, +whether a failover would make sense, and which remote datacenter to use for that specific request. +Such a fine-grained logic is not possible with a driver-level failover. Besides, if you opt for this +approach, execution profiles can come in handy. See "Using multiple policies" below and also check +our [application-level failover example] for a good starting point. + +2. **Infrastructure-level failover**: in this scenario, the failover is handled by the +infrastructure. To resume our example above, if Region1 goes down, the load balancers in your +infrastructure would transparently switch all the traffic intended for that region to Region2, +possibly scaling up its bandwidth to cope with the network traffic spike. This is by far the best +solution for the cross-datacenter failover issue in general, but we acknowledge that it also +requires a purpose-built infrastructure. To help you explore this option, read our [white paper]. + +[application-level failover example]: https://github.com/datastax/java-driver/blob/4.x/examples/src/main/java/com/datastax/oss/driver/examples/retry/CrossDatacenterFailover.java +[white paper]: https://www.datastax.com/sites/default/files/content/whitepaper/files/2019-09/Designing-Fault-Tolerant-Applications-DataStax.pdf + #### Token-aware The default policy is **token-aware** by default: requests will be routed in priority to the @@ -242,11 +331,20 @@ routing information, you need to provide it manually. ##### Policy behavior When the policy computes a query plan, it first inspects the statement's routing information. If -there isn't any, the query plan is a simple round-robin shuffle of all connected nodes. +there isn't any, the query plan is a simple round-robin shuffle of all connected nodes that are +located in the local datacenter. + +If the statement has routing information, the policy uses it to determine the *local* replicas that +hold the corresponding data. Then it returns a query plan containing these replicas shuffled in +random order, followed by a round-robin shuffle of the rest of the nodes. -If the statement has routing information, the policy uses it to determine the replicas that hold the -corresponding data. Then it returns a query plan containing the replicas shuffled in random order, -followed by a round-robin shuffle of the rest of the nodes. +If cross-datacenter failover has been activated as explained above, some remote nodes may appear in +query plans as well. With the driver built-in policies, remote nodes always come after local nodes +in query plans: this way, if the local datacenter is up, local nodes will be tried first, and remote +nodes are unlikely to ever be queried. If the local datacenter goes down however, all the local +nodes in query plans will likely fail, causing the query plans to eventually try remote nodes +instead. If the local datacenter unavailability persists, local nodes will be eventually marked down +and will be removed from query plans completely from query plans, until they are back up again. #### Optional node filtering @@ -284,7 +382,9 @@ If a programmatic filter is provided, the configuration option is ignored. You can use your own implementation by specifying its fully-qualified name in the configuration. -Study the [LoadBalancingPolicy] interface and the default implementation for the low-level details. +Study the [LoadBalancingPolicy] interface and the built-in [BasicLoadingBalancingPolicy] for the +low-level details. Feel free to extend `BasicLoadingBalancingPolicy` and override only the methods +that you wish to modify – but keep in mind that it may be simpler to just start from scratch. ### Using multiple policies @@ -325,6 +425,7 @@ Then it uses the "closest" distance for any given node. For example: [DriverContext]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/context/DriverContext.html [LoadBalancingPolicy]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/loadbalancing/LoadBalancingPolicy.html +[BasicLoadBalancingPolicy]: https://github.com/datastax/java-driver/blob/4.x/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicy.java [getRoutingKeyspace()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKeyspace-- [getRoutingToken()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/Request.html#getRoutingToken-- [getRoutingKey()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- diff --git a/pom.xml b/pom.xml index f4097e4d1a4..6d5d7f4c3d6 100644 --- a/pom.xml +++ b/pom.xml @@ -423,7 +423,7 @@ io.projectreactor reactor-bom - 2020.0.1 + 2020.0.2 pom import diff --git a/upgrade_guide/README.md b/upgrade_guide/README.md index 88ba4cd8332..bba68707f8d 100644 --- a/upgrade_guide/README.md +++ b/upgrade_guide/README.md @@ -2,6 +2,15 @@ ### 4.10.0 +#### Cross-datacenter failover + +[JAVA-2899](https://datastax-oss.atlassian.net/browse/JAVA-2676) re-introduced the ability to +perform cross-datacenter failover using the driver's built-in load balancing policies. See [Load +balancing](../manual/core/loadbalancing/) in the manual for details. + +Cross-datacenter failover is disabled by default, therefore existing applications should not +experience any disruption. + #### New `RetryVerdict` API [JAVA-2900](https://datastax-oss.atlassian.net/browse/JAVA-2900) introduced [`RetryVerdict`], a new From e5f174dbae7af4cf3438355335e8e14f3d1f715f Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 5 Jan 2021 12:05:54 +0100 Subject: [PATCH 622/979] JAVA-2902: Consider computed values when validating constructors for immutable entities (#1520) --- changelog/README.md | 1 + .../oss/driver/mapper/ImmutableEntityIT.java | 13 +++- .../entity/DefaultEntityFactory.java | 11 +-- .../entity/EntityPropertyAnnotationsTest.java | 75 +++++++++++++++++++ 4 files changed, 93 insertions(+), 7 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 4c7cf9fbf4d..f3d5edda543 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.10.0 (in progress) +- [bug] JAVA-2902: Consider computed values when validating constructors for immutable entities - [new feature] JAVA-2899: Re-introduce cross-DC failover in driver 4 - [new feature] JAVA-2900: Re-introduce consistency downgrading retries - [new feature] JAVA-2903: BlockHound integration diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/ImmutableEntityIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/ImmutableEntityIT.java index ed1afdbfaf8..9ed1666f848 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/ImmutableEntityIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/ImmutableEntityIT.java @@ -22,6 +22,7 @@ import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.cql.SimpleStatement; import com.datastax.oss.driver.api.mapper.MapperBuilder; +import com.datastax.oss.driver.api.mapper.annotations.Computed; import com.datastax.oss.driver.api.mapper.annotations.CqlName; import com.datastax.oss.driver.api.mapper.annotations.Dao; import com.datastax.oss.driver.api.mapper.annotations.DaoFactory; @@ -73,7 +74,7 @@ public static void setup() { @Test public void should_insert_and_retrieve_immutable_entities() { ImmutableProduct originalProduct = - new ImmutableProduct(UUID.randomUUID(), "mock description", new Dimensions(1, 2, 3)); + new ImmutableProduct(UUID.randomUUID(), "mock description", new Dimensions(1, 2, 3), -1); dao.save(originalProduct); ImmutableProduct retrievedProduct = dao.findById(originalProduct.id()); @@ -88,10 +89,14 @@ public static class ImmutableProduct { private final String description; private final Dimensions dimensions; - public ImmutableProduct(UUID id, String description, Dimensions dimensions) { + @Computed("writetime(description)") + private final long writetime; + + public ImmutableProduct(UUID id, String description, Dimensions dimensions, long writetime) { this.id = id; this.description = description; this.dimensions = dimensions; + this.writetime = writetime; } public UUID id() { @@ -106,6 +111,10 @@ public Dimensions dimensions() { return dimensions; } + public long writetime() { + return writetime; + } + @Override public boolean equals(Object other) { if (other == this) { diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/DefaultEntityFactory.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/DefaultEntityFactory.java index e82832bf16f..6e5a9ac6488 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/DefaultEntityFactory.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/DefaultEntityFactory.java @@ -603,7 +603,7 @@ private void validateConstructor(EntityDefinition entity, TypeElement processedC if (entity.isMutable()) { validateNoArgConstructor(processedClass); } else { - validateAllColumnsConstructor(processedClass, entity.getAllColumns()); + validateAllValuesConstructor(processedClass, entity.getAllValues()); } } @@ -625,7 +625,7 @@ private void validateNoArgConstructor(TypeElement processedClass) { Entity.class.getSimpleName()); } - private void validateAllColumnsConstructor( + private void validateAllValuesConstructor( TypeElement processedClass, List columns) { for (Element child : processedClass.getEnclosedElements()) { if (child.getKind() == ElementKind.CONSTRUCTOR) { @@ -641,15 +641,16 @@ && areAssignable(columns, constructor.getParameters())) { columns.stream() .map( column -> - String.format("%s %s", column.getType().asTypeMirror(), column.getGetterName())) + String.format("%s %s", column.getType().asTypeMirror(), column.getJavaName())) .collect(Collectors.joining(", ")); context .getMessager() .error( processedClass, - "Immutable @%s-annotated class must have an \"all columns\" constructor. " - + "Expected signature: (%s).", + "Immutable @%s-annotated class must have an \"all values\" constructor. " + + "Expected signature: %s(%s).", Entity.class.getSimpleName(), + processedClass.getSimpleName(), signature); } diff --git a/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityPropertyAnnotationsTest.java b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityPropertyAnnotationsTest.java index 59f0fb9e98d..ba047240057 100644 --- a/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityPropertyAnnotationsTest.java +++ b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityPropertyAnnotationsTest.java @@ -19,12 +19,14 @@ import com.datastax.oss.driver.api.mapper.annotations.Computed; import com.datastax.oss.driver.api.mapper.annotations.Entity; import com.datastax.oss.driver.api.mapper.annotations.PartitionKey; +import com.datastax.oss.driver.api.mapper.annotations.PropertyStrategy; import com.datastax.oss.driver.api.mapper.annotations.Transient; import com.datastax.oss.driver.internal.mapper.processor.MapperProcessorTest; import com.squareup.javapoet.AnnotationSpec; import com.squareup.javapoet.ClassName; import com.squareup.javapoet.FieldSpec; import com.squareup.javapoet.MethodSpec; +import com.squareup.javapoet.ParameterSpec; import com.squareup.javapoet.TypeSpec; import com.tngtech.java.junit.dataprovider.DataProvider; import com.tngtech.java.junit.dataprovider.DataProviderRunner; @@ -320,6 +322,79 @@ public static Object[][] entitiesWithErrors() { .build()) .build(), }, + { + "Mutable @Entity-annotated class must have a no-arg constructor.", + TypeSpec.classBuilder(ClassName.get("test", "Product")) + .addAnnotation(Entity.class) + .addField( + FieldSpec.builder(UUID.class, "id", Modifier.PRIVATE) + .addModifiers(Modifier.FINAL) + .addAnnotation(PartitionKey.class) + .build()) + .addMethod( + MethodSpec.constructorBuilder() + .addParameter(ParameterSpec.builder(UUID.class, "id").build()) + .addModifiers(Modifier.PUBLIC) + .addStatement("this.id = id") + .build()) + .addMethod( + MethodSpec.methodBuilder("getId") + .returns(UUID.class) + .addModifiers(Modifier.PUBLIC) + .addStatement("return id") + .build()) + .addMethod( + MethodSpec.methodBuilder("setId") + .addParameter(UUID.class, "id") + .addModifiers(Modifier.PUBLIC) + .addStatement("this.id = id") + .build()) + .build(), + }, + { + "Immutable @Entity-annotated class must have an \"all values\" constructor. " + + "Expected signature: Product(java.util.UUID id, java.lang.String name, long writetime).", + TypeSpec.classBuilder(ClassName.get("test", "Product")) + .addAnnotation(Entity.class) + .addAnnotation( + AnnotationSpec.builder(PropertyStrategy.class) + .addMember("mutable", "false") + .build()) + .addField( + FieldSpec.builder(UUID.class, "id", Modifier.PRIVATE) + .addModifiers(Modifier.FINAL) + .addAnnotation(PartitionKey.class) + .build()) + .addField( + FieldSpec.builder(String.class, "name", Modifier.PRIVATE) + .addModifiers(Modifier.FINAL) + .build()) + .addField( + FieldSpec.builder(String.class, "writetime", Modifier.PRIVATE) + .addModifiers(Modifier.FINAL) + .addAnnotation( + AnnotationSpec.builder(Computed.class).addMember("value", "$S", "").build()) + .build()) + .addMethod( + MethodSpec.methodBuilder("getId") + .returns(UUID.class) + .addModifiers(Modifier.PUBLIC) + .addStatement("return id") + .build()) + .addMethod( + MethodSpec.methodBuilder("getName") + .returns(String.class) + .addModifiers(Modifier.PUBLIC) + .addStatement("return name") + .build()) + .addMethod( + MethodSpec.methodBuilder("getWritetime") + .returns(Long.TYPE) + .addModifiers(Modifier.PUBLIC) + .addStatement("return writetime") + .build()) + .build(), + }, }; } } From 22d8bc5f5d81adad760377b85f6617cf6fb723fd Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 5 Jan 2021 12:07:30 +0100 Subject: [PATCH 623/979] JAVA-2911: Prevent control connection from scheduling too many reconnections (#1521) --- changelog/README.md | 1 + .../core/control/ControlConnection.java | 24 +++++++++++++++---- 2 files changed, 20 insertions(+), 5 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index f3d5edda543..c6b981226dd 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.10.0 (in progress) +- [bug] JAVA-2911: Prevent control connection from scheduling too many reconnections - [bug] JAVA-2902: Consider computed values when validating constructors for immutable entities - [new feature] JAVA-2899: Re-introduce cross-DC failover in driver 4 - [new feature] JAVA-2900: Re-introduce consistency downgrading retries diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/control/ControlConnection.java b/core/src/main/java/com/datastax/oss/driver/internal/core/control/ControlConnection.java index bcba1e76583..3fcfd120086 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/control/ControlConnection.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/control/ControlConnection.java @@ -425,13 +425,15 @@ private void connect( connect(nodes, errors, onSuccess, onFailure); } else { LOG.debug("[{}] New channel opened {}", logPrefix, channel); - // Make sure previous channel gets closed (it may still be open if - // reconnection was forced) DriverChannel previousChannel = ControlConnection.this.channel; + ControlConnection.this.channel = channel; if (previousChannel != null) { + // We were reconnecting: make sure previous channel gets closed (it may + // still be open if reconnection was forced) + LOG.debug( + "[{}] Forcefully closing previous channel {}", logPrefix, channel); previousChannel.forceClose(); } - ControlConnection.this.channel = channel; context.getEventBus().fire(ChannelEvent.channelOpened(node)); channel .closeFuture() @@ -503,9 +505,21 @@ private void onSuccessfulReconnect() { private void onChannelClosed(DriverChannel channel, Node node) { assert adminExecutor.inEventLoop(); if (!closeWasCalled) { - LOG.debug("[{}] Lost channel {}", logPrefix, channel); context.getEventBus().fire(ChannelEvent.channelClosed(node)); - reconnection.start(); + // If this channel is the current control channel, we must start a + // reconnection attempt to get a new control channel. + if (channel == ControlConnection.this.channel) { + LOG.debug( + "[{}] The current control channel {} was closed, scheduling reconnection", + logPrefix, + channel); + reconnection.start(); + } else { + LOG.trace( + "[{}] A previous control channel {} was closed, reconnection not required", + logPrefix, + channel); + } } } From 11448aa6532791f331f3f46dec03bf3d0870a63e Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 5 Jan 2021 14:06:25 +0100 Subject: [PATCH 624/979] JAVA-2904 follow-up: fix Revapi errors caused by the upgrade of Jackson --- core/revapi.json | 410 +++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 410 insertions(+) diff --git a/core/revapi.json b/core/revapi.json index 8bf661b8544..592d810e571 100644 --- a/core/revapi.json +++ b/core/revapi.json @@ -5030,6 +5030,416 @@ "code": "java.class.nonPublicPartOfAPI", "new": "class com.fasterxml.jackson.databind.util.PrimitiveArrayBuilder.Node", "justification": "Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.field.enumConstantOrderChanged", + "old": "field com.fasterxml.jackson.annotation.JsonTypeInfo.Id.CUSTOM", + "new": "field com.fasterxml.jackson.annotation.JsonTypeInfo.Id.CUSTOM", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.field.serialVersionUIDUnchanged", + "old": "field com.fasterxml.jackson.core.Base64Variant.serialVersionUID", + "new": "field com.fasterxml.jackson.core.Base64Variant.serialVersionUID", + "serialVersionUID": "1", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.class.nonFinalClassInheritsFromNewClass", + "old": "class com.fasterxml.jackson.core.JsonGenerationException", + "new": "class com.fasterxml.jackson.core.JsonGenerationException", + "superClass": "com.fasterxml.jackson.core.JacksonException", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.class.nonFinalClassInheritsFromNewClass", + "old": "class com.fasterxml.jackson.core.JsonParseException", + "new": "class com.fasterxml.jackson.core.JsonParseException", + "superClass": "com.fasterxml.jackson.core.JacksonException", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.class.nonFinalClassInheritsFromNewClass", + "old": "class com.fasterxml.jackson.core.JsonProcessingException", + "new": "class com.fasterxml.jackson.core.JsonProcessingException", + "superClass": "com.fasterxml.jackson.core.JacksonException", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.class.nonPublicPartOfAPI", + "old": "class com.fasterxml.jackson.core.sym.ByteQuadsCanonicalizer.TableInfo", + "new": "class com.fasterxml.jackson.core.sym.ByteQuadsCanonicalizer.TableInfo", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.class.nonPublicPartOfAPI", + "old": "class com.fasterxml.jackson.core.sym.CharsToNameCanonicalizer.Bucket", + "new": "class com.fasterxml.jackson.core.sym.CharsToNameCanonicalizer.Bucket", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.class.nonPublicPartOfAPI", + "old": "class com.fasterxml.jackson.core.sym.CharsToNameCanonicalizer.TableInfo", + "new": "class com.fasterxml.jackson.core.sym.CharsToNameCanonicalizer.TableInfo", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.method.removed", + "old": "method java.lang.String[] com.fasterxml.jackson.databind.AnnotationIntrospector::findPropertiesToIgnore(com.fasterxml.jackson.databind.introspect.Annotated)", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.class.defaultSerializationChanged", + "old": "class com.fasterxml.jackson.databind.AnnotationIntrospector", + "new": "class com.fasterxml.jackson.databind.AnnotationIntrospector", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.field.serialVersionUIDUnchanged", + "old": "field com.fasterxml.jackson.databind.DeserializationConfig.serialVersionUID", + "new": "field com.fasterxml.jackson.databind.DeserializationConfig.serialVersionUID", + "serialVersionUID": "2", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.method.returnTypeChanged", + "old": "method void com.fasterxml.jackson.databind.DeserializationConfig::initialize(com.fasterxml.jackson.core.JsonParser)", + "new": "method com.fasterxml.jackson.core.JsonParser com.fasterxml.jackson.databind.DeserializationConfig::initialize(com.fasterxml.jackson.core.JsonParser)", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.method.returnTypeTypeParametersChanged", + "old": "method T com.fasterxml.jackson.databind.DeserializationConfig::introspect(com.fasterxml.jackson.databind.JavaType)", + "new": "method com.fasterxml.jackson.databind.BeanDescription com.fasterxml.jackson.databind.DeserializationConfig::introspect(com.fasterxml.jackson.databind.JavaType)", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.generics.formalTypeParameterRemoved", + "old": "method T com.fasterxml.jackson.databind.DeserializationConfig::introspect(com.fasterxml.jackson.databind.JavaType)", + "new": "method com.fasterxml.jackson.databind.BeanDescription com.fasterxml.jackson.databind.DeserializationConfig::introspect(com.fasterxml.jackson.databind.JavaType)", + "typeParameter": "T extends com.fasterxml.jackson.databind.BeanDescription", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.method.returnTypeTypeParametersChanged", + "old": "method T com.fasterxml.jackson.databind.DeserializationConfig::introspectForBuilder(com.fasterxml.jackson.databind.JavaType)", + "new": "method com.fasterxml.jackson.databind.BeanDescription com.fasterxml.jackson.databind.DeserializationConfig::introspectForBuilder(com.fasterxml.jackson.databind.JavaType)", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.generics.formalTypeParameterRemoved", + "old": "method T com.fasterxml.jackson.databind.DeserializationConfig::introspectForBuilder(com.fasterxml.jackson.databind.JavaType)", + "new": "method com.fasterxml.jackson.databind.BeanDescription com.fasterxml.jackson.databind.DeserializationConfig::introspectForBuilder(com.fasterxml.jackson.databind.JavaType)", + "typeParameter": "T extends com.fasterxml.jackson.databind.BeanDescription", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.method.returnTypeTypeParametersChanged", + "old": "method T com.fasterxml.jackson.databind.DeserializationConfig::introspectForCreation(com.fasterxml.jackson.databind.JavaType)", + "new": "method com.fasterxml.jackson.databind.BeanDescription com.fasterxml.jackson.databind.DeserializationConfig::introspectForCreation(com.fasterxml.jackson.databind.JavaType)", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.generics.formalTypeParameterRemoved", + "old": "method T com.fasterxml.jackson.databind.DeserializationConfig::introspectForCreation(com.fasterxml.jackson.databind.JavaType)", + "new": "method com.fasterxml.jackson.databind.BeanDescription com.fasterxml.jackson.databind.DeserializationConfig::introspectForCreation(com.fasterxml.jackson.databind.JavaType)", + "typeParameter": "T extends com.fasterxml.jackson.databind.BeanDescription", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.field.serialVersionUIDUnchanged", + "old": "field com.fasterxml.jackson.databind.DeserializationContext.serialVersionUID", + "new": "field com.fasterxml.jackson.databind.DeserializationContext.serialVersionUID", + "serialVersionUID": "1", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.method.finalMethodAddedToNonFinalClass", + "new": "method boolean com.fasterxml.jackson.databind.DeserializationContext::isEnabled(com.fasterxml.jackson.core.StreamReadCapability)", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.method.finalMethodAddedToNonFinalClass", + "new": "method boolean com.fasterxml.jackson.databind.JavaType::isRecordType()", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.class.nonFinalClassInheritsFromNewClass", + "old": "class com.fasterxml.jackson.databind.JsonMappingException", + "new": "class com.fasterxml.jackson.databind.JsonMappingException", + "superClass": "com.fasterxml.jackson.core.JacksonException", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.field.enumConstantOrderChanged", + "old": "field com.fasterxml.jackson.databind.MapperFeature.ACCEPT_CASE_INSENSITIVE_ENUMS", + "new": "field com.fasterxml.jackson.databind.MapperFeature.ACCEPT_CASE_INSENSITIVE_ENUMS", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.field.enumConstantOrderChanged", + "old": "field com.fasterxml.jackson.databind.MapperFeature.ACCEPT_CASE_INSENSITIVE_PROPERTIES", + "new": "field com.fasterxml.jackson.databind.MapperFeature.ACCEPT_CASE_INSENSITIVE_PROPERTIES", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.field.enumConstantOrderChanged", + "old": "field com.fasterxml.jackson.databind.MapperFeature.ACCEPT_CASE_INSENSITIVE_VALUES", + "new": "field com.fasterxml.jackson.databind.MapperFeature.ACCEPT_CASE_INSENSITIVE_VALUES", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.field.enumConstantOrderChanged", + "old": "field com.fasterxml.jackson.databind.MapperFeature.ALLOW_COERCION_OF_SCALARS", + "new": "field com.fasterxml.jackson.databind.MapperFeature.ALLOW_COERCION_OF_SCALARS", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.field.enumConstantOrderChanged", + "old": "field com.fasterxml.jackson.databind.MapperFeature.ALLOW_EXPLICIT_PROPERTY_RENAMING", + "new": "field com.fasterxml.jackson.databind.MapperFeature.ALLOW_EXPLICIT_PROPERTY_RENAMING", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.field.enumConstantOrderChanged", + "old": "field com.fasterxml.jackson.databind.MapperFeature.BLOCK_UNSAFE_POLYMORPHIC_BASE_TYPES", + "new": "field com.fasterxml.jackson.databind.MapperFeature.BLOCK_UNSAFE_POLYMORPHIC_BASE_TYPES", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.field.enumConstantOrderChanged", + "old": "field com.fasterxml.jackson.databind.MapperFeature.CAN_OVERRIDE_ACCESS_MODIFIERS", + "new": "field com.fasterxml.jackson.databind.MapperFeature.CAN_OVERRIDE_ACCESS_MODIFIERS", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.field.enumConstantOrderChanged", + "old": "field com.fasterxml.jackson.databind.MapperFeature.DEFAULT_VIEW_INCLUSION", + "new": "field com.fasterxml.jackson.databind.MapperFeature.DEFAULT_VIEW_INCLUSION", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.field.enumConstantOrderChanged", + "old": "field com.fasterxml.jackson.databind.MapperFeature.IGNORE_DUPLICATE_MODULE_REGISTRATIONS", + "new": "field com.fasterxml.jackson.databind.MapperFeature.IGNORE_DUPLICATE_MODULE_REGISTRATIONS", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.field.enumConstantOrderChanged", + "old": "field com.fasterxml.jackson.databind.MapperFeature.IGNORE_MERGE_FOR_UNMERGEABLE", + "new": "field com.fasterxml.jackson.databind.MapperFeature.IGNORE_MERGE_FOR_UNMERGEABLE", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.field.enumConstantOrderChanged", + "old": "field com.fasterxml.jackson.databind.MapperFeature.OVERRIDE_PUBLIC_ACCESS_MODIFIERS", + "new": "field com.fasterxml.jackson.databind.MapperFeature.OVERRIDE_PUBLIC_ACCESS_MODIFIERS", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.field.enumConstantOrderChanged", + "old": "field com.fasterxml.jackson.databind.MapperFeature.SORT_PROPERTIES_ALPHABETICALLY", + "new": "field com.fasterxml.jackson.databind.MapperFeature.SORT_PROPERTIES_ALPHABETICALLY", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.field.enumConstantOrderChanged", + "old": "field com.fasterxml.jackson.databind.MapperFeature.USE_BASE_TYPE_AS_DEFAULT_IMPL", + "new": "field com.fasterxml.jackson.databind.MapperFeature.USE_BASE_TYPE_AS_DEFAULT_IMPL", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.field.enumConstantOrderChanged", + "old": "field com.fasterxml.jackson.databind.MapperFeature.USE_STATIC_TYPING", + "new": "field com.fasterxml.jackson.databind.MapperFeature.USE_STATIC_TYPING", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.field.enumConstantOrderChanged", + "old": "field com.fasterxml.jackson.databind.MapperFeature.USE_STD_BEAN_NAMING", + "new": "field com.fasterxml.jackson.databind.MapperFeature.USE_STD_BEAN_NAMING", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.field.enumConstantOrderChanged", + "old": "field com.fasterxml.jackson.databind.MapperFeature.USE_WRAPPER_NAME_AS_PROPERTY_NAME", + "new": "field com.fasterxml.jackson.databind.MapperFeature.USE_WRAPPER_NAME_AS_PROPERTY_NAME", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.field.serialVersionUIDUnchanged", + "old": "field com.fasterxml.jackson.databind.ObjectMapper.serialVersionUID", + "new": "field com.fasterxml.jackson.databind.ObjectMapper.serialVersionUID", + "serialVersionUID": "2", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.method.removed", + "old": "method java.lang.Object com.fasterxml.jackson.databind.ObjectMapper::_unwrapAndDeserialize(com.fasterxml.jackson.core.JsonParser, com.fasterxml.jackson.databind.DeserializationContext, com.fasterxml.jackson.databind.DeserializationConfig, com.fasterxml.jackson.databind.JavaType, com.fasterxml.jackson.databind.JsonDeserializer) throws java.io.IOException", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.method.finalMethodAddedToNonFinalClass", + "new": "method void com.fasterxml.jackson.databind.ObjectMapper::_writeValueAndClose(com.fasterxml.jackson.core.JsonGenerator, java.lang.Object) throws java.io.IOException", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.method.exception.runtimeAdded", + "old": "method T com.fasterxml.jackson.databind.ObjectMapper::treeToValue(com.fasterxml.jackson.core.TreeNode, java.lang.Class) throws com.fasterxml.jackson.core.JsonProcessingException", + "new": "method T com.fasterxml.jackson.databind.ObjectMapper::treeToValue(com.fasterxml.jackson.core.TreeNode, java.lang.Class) throws java.lang.IllegalArgumentException, com.fasterxml.jackson.core.JsonProcessingException", + "exception": "java.lang.IllegalArgumentException", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.method.removed", + "old": "method java.lang.Object com.fasterxml.jackson.databind.ObjectReader::_unwrapAndDeserialize(com.fasterxml.jackson.core.JsonParser, com.fasterxml.jackson.databind.DeserializationContext, com.fasterxml.jackson.databind.JavaType, com.fasterxml.jackson.databind.JsonDeserializer) throws java.io.IOException", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.method.removed", + "old": "method void com.fasterxml.jackson.databind.ObjectWriter::_configAndWriteValue(com.fasterxml.jackson.core.JsonGenerator, java.lang.Object) throws java.io.IOException", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.method.returnTypeChanged", + "old": "method void com.fasterxml.jackson.databind.ObjectWriter::_configureGenerator(com.fasterxml.jackson.core.JsonGenerator)", + "new": "method com.fasterxml.jackson.core.JsonGenerator com.fasterxml.jackson.databind.ObjectWriter::_configureGenerator(com.fasterxml.jackson.core.JsonGenerator)", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.method.finalMethodAddedToNonFinalClass", + "new": "method void com.fasterxml.jackson.databind.ObjectWriter::_writeValueAndClose(com.fasterxml.jackson.core.JsonGenerator, java.lang.Object) throws java.io.IOException", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.field.serialVersionUIDChanged", + "new": "field com.fasterxml.jackson.databind.PropertyNamingStrategy.serialVersionUID", + "oldSerialVersionUID": "-5237220944964015475", + "newSerialVersionUID": "2", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.method.returnTypeTypeParametersChanged", + "old": "method T com.fasterxml.jackson.databind.SerializationConfig::introspect(com.fasterxml.jackson.databind.JavaType)", + "new": "method com.fasterxml.jackson.databind.BeanDescription com.fasterxml.jackson.databind.SerializationConfig::introspect(com.fasterxml.jackson.databind.JavaType)", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.generics.formalTypeParameterRemoved", + "old": "method T com.fasterxml.jackson.databind.SerializationConfig::introspect(com.fasterxml.jackson.databind.JavaType)", + "new": "method com.fasterxml.jackson.databind.BeanDescription com.fasterxml.jackson.databind.SerializationConfig::introspect(com.fasterxml.jackson.databind.JavaType)", + "typeParameter": "T extends com.fasterxml.jackson.databind.BeanDescription", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.field.serialVersionUIDUnchanged", + "old": "field com.fasterxml.jackson.databind.cfg.BaseSettings.serialVersionUID", + "new": "field com.fasterxml.jackson.databind.cfg.BaseSettings.serialVersionUID", + "serialVersionUID": "1", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.method.numberOfParametersChanged", + "old": "method void com.fasterxml.jackson.databind.cfg.BaseSettings::(com.fasterxml.jackson.databind.introspect.ClassIntrospector, com.fasterxml.jackson.databind.AnnotationIntrospector, com.fasterxml.jackson.databind.PropertyNamingStrategy, com.fasterxml.jackson.databind.type.TypeFactory, com.fasterxml.jackson.databind.jsontype.TypeResolverBuilder, java.text.DateFormat, com.fasterxml.jackson.databind.cfg.HandlerInstantiator, java.util.Locale, java.util.TimeZone, com.fasterxml.jackson.core.Base64Variant)", + "new": "method void com.fasterxml.jackson.databind.cfg.BaseSettings::(com.fasterxml.jackson.databind.introspect.ClassIntrospector, com.fasterxml.jackson.databind.AnnotationIntrospector, com.fasterxml.jackson.databind.PropertyNamingStrategy, com.fasterxml.jackson.databind.type.TypeFactory, com.fasterxml.jackson.databind.jsontype.TypeResolverBuilder, java.text.DateFormat, com.fasterxml.jackson.databind.cfg.HandlerInstantiator, java.util.Locale, java.util.TimeZone, com.fasterxml.jackson.core.Base64Variant, com.fasterxml.jackson.databind.jsontype.PolymorphicTypeValidator, com.fasterxml.jackson.databind.introspect.AccessorNamingStrategy.Provider)", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.method.finalMethodAddedToNonFinalClass", + "new": "method com.fasterxml.jackson.databind.introspect.AccessorNamingStrategy.Provider com.fasterxml.jackson.databind.cfg.MapperConfig>::getAccessorNaming()", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.method.abstractMethodAdded", + "new": "method com.fasterxml.jackson.annotation.JsonIncludeProperties.Value com.fasterxml.jackson.databind.cfg.MapperConfig>::getDefaultPropertyInclusions(java.lang.Class, com.fasterxml.jackson.databind.introspect.AnnotatedClass)", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.field.serialVersionUIDUnchanged", + "old": "field com.fasterxml.jackson.databind.deser.DefaultDeserializationContext.serialVersionUID", + "new": "field com.fasterxml.jackson.databind.deser.DefaultDeserializationContext.serialVersionUID", + "serialVersionUID": "1", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.method.abstractMethodAdded", + "new": "method com.fasterxml.jackson.databind.deser.DefaultDeserializationContext com.fasterxml.jackson.databind.deser.DefaultDeserializationContext::createDummyInstance(com.fasterxml.jackson.databind.DeserializationConfig)", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.class.nonFinalClassInheritsFromNewClass", + "old": "class com.fasterxml.jackson.databind.deser.UnresolvedForwardReference", + "new": "class com.fasterxml.jackson.databind.deser.UnresolvedForwardReference", + "superClass": "com.fasterxml.jackson.core.JacksonException", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.method.removed", + "old": "method com.fasterxml.jackson.databind.introspect.AnnotatedParameter com.fasterxml.jackson.databind.deser.ValueInstantiator::getIncompleteParameter()", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.method.removed", + "old": "method void com.fasterxml.jackson.databind.deser.impl.BeanPropertyMap::(boolean, java.util.Collection)", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.method.numberOfParametersChanged", + "old": "method com.fasterxml.jackson.databind.deser.impl.BeanPropertyMap com.fasterxml.jackson.databind.deser.impl.BeanPropertyMap::construct(java.util.Collection, boolean)", + "new": "method com.fasterxml.jackson.databind.deser.impl.BeanPropertyMap com.fasterxml.jackson.databind.deser.impl.BeanPropertyMap::construct(com.fasterxml.jackson.databind.cfg.MapperConfig, java.util.Collection, java.util.Map>, boolean)", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.method.removed", + "old": "method void com.fasterxml.jackson.databind.deser.impl.BeanPropertyMap::replace(com.fasterxml.jackson.databind.deser.SettableBeanProperty)", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.method.abstractMethodAdded", + "new": "method com.fasterxml.jackson.databind.BeanDescription com.fasterxml.jackson.databind.introspect.ClassIntrospector::forDeserializationWithBuilder(com.fasterxml.jackson.databind.DeserializationConfig, com.fasterxml.jackson.databind.JavaType, com.fasterxml.jackson.databind.introspect.ClassIntrospector.MixInResolver, com.fasterxml.jackson.databind.BeanDescription)", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.method.numberOfParametersChanged", + "old": "method void com.fasterxml.jackson.databind.ser.BeanSerializer::(com.fasterxml.jackson.databind.ser.std.BeanSerializerBase, java.util.Set)", + "new": "method void com.fasterxml.jackson.databind.ser.BeanSerializer::(com.fasterxml.jackson.databind.ser.std.BeanSerializerBase, java.util.Set, java.util.Set)", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.method.removed", + "old": "method com.fasterxml.jackson.databind.ser.impl.PropertySerializerMap com.fasterxml.jackson.databind.ser.impl.PropertySerializerMap::emptyMap()", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.method.visibilityReduced", + "old": "method void com.fasterxml.jackson.databind.ser.std.BeanSerializerBase::(com.fasterxml.jackson.databind.ser.std.BeanSerializerBase, com.fasterxml.jackson.databind.ser.BeanPropertyWriter[], com.fasterxml.jackson.databind.ser.BeanPropertyWriter[])", + "new": "method void com.fasterxml.jackson.databind.ser.std.BeanSerializerBase::(com.fasterxml.jackson.databind.ser.std.BeanSerializerBase, com.fasterxml.jackson.databind.ser.BeanPropertyWriter[], com.fasterxml.jackson.databind.ser.BeanPropertyWriter[])", + "oldVisibility": "public", + "newVisibility": "protected", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.method.abstractMethodAdded", + "new": "method com.fasterxml.jackson.databind.ser.std.BeanSerializerBase com.fasterxml.jackson.databind.ser.std.BeanSerializerBase::withByNameInclusion(java.util.Set, java.util.Set)", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.method.abstractMethodAdded", + "new": "method com.fasterxml.jackson.databind.ser.std.BeanSerializerBase com.fasterxml.jackson.databind.ser.std.BeanSerializerBase::withProperties(com.fasterxml.jackson.databind.ser.BeanPropertyWriter[], com.fasterxml.jackson.databind.ser.BeanPropertyWriter[])", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.field.typeChanged", + "old": "field com.fasterxml.jackson.databind.type.TypeFactory._typeCache", + "new": "field com.fasterxml.jackson.databind.type.TypeFactory._typeCache", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.field.serialVersionUIDUnchanged", + "old": "field com.fasterxml.jackson.databind.type.TypeFactory.serialVersionUID", + "new": "field com.fasterxml.jackson.databind.type.TypeFactory.serialVersionUID", + "serialVersionUID": "1", + "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" } ] } From d765a6a59162ee596418001ec56b9360679d8d6b Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 5 Jan 2021 16:06:49 +0100 Subject: [PATCH 625/979] Remove redundant call to Collections.unmodifiableList in GraalGetpid --- .../com/datastax/oss/driver/internal/core/os/GraalGetpid.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/os/GraalGetpid.java b/core/src/main/java/com/datastax/oss/driver/internal/core/os/GraalGetpid.java index e910f3ce80a..913f13557da 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/os/GraalGetpid.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/os/GraalGetpid.java @@ -28,7 +28,7 @@ static class Directives implements CContext.Directives { @Override public List getHeaderFiles() { - return Collections.unmodifiableList(Collections.singletonList("")); + return Collections.singletonList(""); } } From 84c6dfe920b6aa8a8af299f35f5e042803323a51 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 5 Jan 2021 16:08:10 +0100 Subject: [PATCH 626/979] Fix various compiler warnings --- ...tinuousCqlRequestHandlerReprepareTest.java | 14 +++--- .../CompositeDriverConfigReloadTest.java | 7 +-- .../LoadBalancingPolicyWrapperTest.java | 48 ++++++++----------- 3 files changed, 30 insertions(+), 39 deletions(-) diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerReprepareTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerReprepareTest.java index 634c6eda5d2..2d287210c2a 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerReprepareTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerReprepareTest.java @@ -18,6 +18,7 @@ import static com.datastax.oss.driver.Assertions.assertThat; import static com.datastax.oss.driver.Assertions.assertThatStage; import static com.datastax.oss.protocol.internal.Frame.NO_PAYLOAD; +import static org.assertj.core.api.Assertions.catchThrowable; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyBoolean; import static org.mockito.ArgumentMatchers.anyMap; @@ -91,7 +92,7 @@ public void should_prepare_and_retry_on_same_node(DseProtocolVersion version) { when(harness.getChannel(node1).write(any(Prepare.class), anyBoolean(), anyMap(), any())) .then( invocation -> { - AdminRequestHandler admin = invocation.getArgument(3); + AdminRequestHandler admin = invocation.getArgument(3); admin.onResponse(defaultFrameOf(prepared)); return future; }); @@ -121,7 +122,7 @@ public void should_abort_when_prepare_fails_with_unrecoverable_error(DseProtocol when(harness.getChannel(node1).write(any(Prepare.class), anyBoolean(), anyMap(), any())) .then( invocation -> { - AdminRequestHandler admin = invocation.getArgument(3); + AdminRequestHandler admin = invocation.getArgument(3); admin.onResponse(defaultFrameOf(unrecoverable)); return future; }); @@ -135,10 +136,9 @@ public void should_abort_when_prepare_fails_with_unrecoverable_error(DseProtocol verify(harness.getChannel(node1)).write(any(Prepare.class), anyBoolean(), anyMap(), any()); assertThat(handler.getState()).isEqualTo(-2); - assertThat(page1Future) - .hasFailedWithThrowableThat() - .isInstanceOf(SyntaxError.class) - .hasMessageContaining("bad query"); + assertThat(page1Future).isCompletedExceptionally(); + Throwable t = catchThrowable(() -> page1Future.toCompletableFuture().get()); + assertThat(t).hasRootCauseInstanceOf(SyntaxError.class).hasMessageContaining("bad query"); } } @@ -158,7 +158,7 @@ public void should_try_next_node_when_prepare_fails_with_recoverable_error( when(harness.getChannel(node1).write(any(Prepare.class), anyBoolean(), anyMap(), any())) .then( invocation -> { - AdminRequestHandler admin = invocation.getArgument(3); + AdminRequestHandler admin = invocation.getArgument(3); admin.onResponse(defaultFrameOf(recoverable)); return future; }); diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/config/composite/CompositeDriverConfigReloadTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/config/composite/CompositeDriverConfigReloadTest.java index 761ecf9cc60..b97ae4f5b45 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/config/composite/CompositeDriverConfigReloadTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/config/composite/CompositeDriverConfigReloadTest.java @@ -16,6 +16,7 @@ package com.datastax.oss.driver.internal.core.config.composite; import static com.datastax.oss.driver.Assertions.assertThat; +import static org.assertj.core.api.Assertions.catchThrowable; import static org.mockito.Mockito.never; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; @@ -83,9 +84,9 @@ public void should_delegate_reloading_to_reloadable_children( if (compositeShouldBeReloadable) { assertThat(reloadFuture).isCompletedWithValue(true); } else { - assertThat(reloadFuture) - .hasFailedWithThrowableThat() - .isInstanceOf(UnsupportedOperationException.class); + assertThat(reloadFuture).isCompletedExceptionally(); + Throwable t = catchThrowable(() -> reloadFuture.toCompletableFuture().get()); + assertThat(t).hasRootCauseInstanceOf(UnsupportedOperationException.class); } verify(primaryLoader, primaryIsReloadable ? times(1) : never()).reload(); verify(fallbackLoader, fallbackIsReloadable ? times(1) : never()).reload(); diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/LoadBalancingPolicyWrapperTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/LoadBalancingPolicyWrapperTest.java index d7be8e96b0b..a9bd9951b9d 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/LoadBalancingPolicyWrapperTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/LoadBalancingPolicyWrapperTest.java @@ -18,7 +18,6 @@ import static com.datastax.oss.driver.Assertions.assertThat; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyMap; -import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.inOrder; import static org.mockito.Mockito.never; import static org.mockito.Mockito.spy; @@ -41,6 +40,7 @@ import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; import com.datastax.oss.driver.shaded.guava.common.collect.Lists; import java.util.Map; +import java.util.Objects; import java.util.Queue; import java.util.Set; import java.util.UUID; @@ -54,7 +54,6 @@ import org.mockito.InOrder; import org.mockito.Mock; import org.mockito.junit.MockitoJUnitRunner; -import org.mockito.stubbing.Answer; @RunWith(MockitoJUnitRunner.class) public class LoadBalancingPolicyWrapperTest { @@ -63,9 +62,8 @@ public class LoadBalancingPolicyWrapperTest { private DefaultNode node2; private DefaultNode node3; - private Map allNodes; private Set contactPoints; - private Queue defaultPolicysQueryPlan; + private Queue defaultPolicyQueryPlan; @Mock private InternalDriverContext context; @Mock private LoadBalancingPolicy policy1; @@ -88,16 +86,18 @@ public void setup() { node3 = TestNodeFactory.newNode(3, context); contactPoints = ImmutableSet.of(node1, node2); - allNodes = + Map allNodes = ImmutableMap.of( - node1.getHostId(), node1, node2.getHostId(), node2, node3.getHostId(), node3); + Objects.requireNonNull(node1.getHostId()), node1, + Objects.requireNonNull(node2.getHostId()), node2, + Objects.requireNonNull(node3.getHostId()), node3); when(metadataManager.getMetadata()).thenReturn(metadata); when(metadata.getNodes()).thenReturn(allNodes); when(metadataManager.getContactPoints()).thenReturn(contactPoints); when(context.getMetadataManager()).thenReturn(metadataManager); - defaultPolicysQueryPlan = Lists.newLinkedList(ImmutableList.of(node3, node2, node1)); - when(policy1.newQueryPlan(null, null)).thenReturn(defaultPolicysQueryPlan); + defaultPolicyQueryPlan = Lists.newLinkedList(ImmutableList.of(node3, node2, node1)); + when(policy1.newQueryPlan(null, null)).thenReturn(defaultPolicyQueryPlan); eventBus = spy(new EventBus("test")); when(context.getEventBus()).thenReturn(eventBus); @@ -125,7 +125,7 @@ public void should_build_query_plan_from_contact_points_before_init() { for (LoadBalancingPolicy policy : ImmutableList.of(policy1, policy2, policy3)) { verify(policy, never()).newQueryPlan(null, null); } - assertThat(queryPlan).containsOnlyElementsOf(contactPoints); + assertThat(queryPlan).hasSameElementsAs(contactPoints); } @Test @@ -142,7 +142,7 @@ public void should_fetch_query_plan_from_policy_after_init() { // Then // no-arg newQueryPlan() uses the default profile verify(policy1).newQueryPlan(null, null); - assertThat(queryPlan).isEqualTo(defaultPolicysQueryPlan); + assertThat(queryPlan).isEqualTo(defaultPolicyQueryPlan); } @Test @@ -236,25 +236,16 @@ public void should_propagate_node_states_to_policies_after_init() { @Test public void should_accumulate_events_during_init_and_replay() throws InterruptedException { // Given - // Hack to obtain concurrency: the main thread blocks in init, while another thread fires an - // event on the bus - CountDownLatch eventLatch = new CountDownLatch(3); + // Hack to obtain concurrency: the main thread releases another thread and blocks; then the + // other thread fires an event on the bus and unblocks the main thread. + CountDownLatch eventLatch = new CountDownLatch(1); CountDownLatch initLatch = new CountDownLatch(1); - Answer mockInit = - i -> { - eventLatch.countDown(); - initLatch.await(500, TimeUnit.MILLISECONDS); - return null; - }; - for (LoadBalancingPolicy policy : ImmutableList.of(policy1, policy2, policy3)) { - doAnswer(mockInit).when(policy).init(anyMap(), any(DistanceReporter.class)); - } // When Runnable runnable = () -> { try { - eventLatch.await(500, TimeUnit.MILLISECONDS); + eventLatch.await(); } catch (InterruptedException e) { throw new RuntimeException(e); } @@ -266,15 +257,14 @@ public void should_accumulate_events_during_init_and_replay() throws Interrupted wrapper.init(); // Then - // wait for init launch to signal that runnable is complete. - initLatch.await(500, TimeUnit.MILLISECONDS); + // unblock the thread that will fire the event, and waits until it finishes + eventLatch.countDown(); + boolean ok = initLatch.await(500, TimeUnit.MILLISECONDS); + assertThat(ok).isTrue(); for (LoadBalancingPolicy policy : ImmutableList.of(policy1, policy2, policy3)) { verify(policy).onDown(node1); } - if (thread.isAlive()) { - // thread still completing - sleep to allow thread to complete. - Thread.sleep(500); - } + thread.join(500); assertThat(thread.isAlive()).isFalse(); } } From 97c665730bdfcb4657985cab64935274d0f750ee Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 5 Jan 2021 16:09:00 +0100 Subject: [PATCH 627/979] JAVA-2871 follow-up: Fix failing tests due to wrong keyspace exclusions --- .../oss/driver/core/metadata/SchemaIT.java | 158 ++++++++++-------- 1 file changed, 90 insertions(+), 68 deletions(-) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaIT.java index aa5a862cb42..dc915e25c77 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaIT.java @@ -38,8 +38,10 @@ import com.datastax.oss.driver.categories.ParallelizableTests; import com.datastax.oss.protocol.internal.util.Bytes; import java.nio.ByteBuffer; +import java.util.Arrays; import java.util.Collections; import java.util.Map; +import java.util.Objects; import java.util.concurrent.TimeUnit; import org.junit.AssumptionViolatedException; import org.junit.Rule; @@ -51,11 +53,12 @@ @Category(ParallelizableTests.class) public class SchemaIT { - private static final Version DSE_MIN_VIRTUAL_TABLES = Version.parse("6.7.0"); + private static final Version DSE_MIN_VIRTUAL_TABLES = + Objects.requireNonNull(Version.parse("6.7.0")); - private CcmRule ccmRule = CcmRule.getInstance(); + private final CcmRule ccmRule = CcmRule.getInstance(); - private SessionRule sessionRule = SessionRule.builder(ccmRule).build(); + private final SessionRule sessionRule = SessionRule.builder(ccmRule).build(); @Rule public TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); @@ -187,59 +190,69 @@ public void should_refresh_schema_manually() { public void should_get_virtual_metadata() { skipIfDse60(); - Metadata md = sessionRule.session().getMetadata(); - KeyspaceMetadata kmd = md.getKeyspace("system_views").get(); - - // Keyspace name should be set, marked as virtual, and have at least sstable_tasks table. - // All other values should be defaulted since they are not defined in the virtual schema tables. - assertThat(kmd.getTables().size()).isGreaterThanOrEqualTo(1); - assertThat(kmd.isVirtual()).isTrue(); - assertThat(kmd.isDurableWrites()).isFalse(); - assertThat(kmd.getName().asCql(true)).isEqualTo("system_views"); - - // Virtual tables lack User Types, Functions, Views and Aggregates - assertThat(kmd.getUserDefinedTypes().size()).isEqualTo(0); - assertThat(kmd.getFunctions().size()).isEqualTo(0); - assertThat(kmd.getViews().size()).isEqualTo(0); - assertThat(kmd.getAggregates().size()).isEqualTo(0); - - assertThat(kmd.describe(true)) - .isEqualTo( - "/* VIRTUAL KEYSPACE system_views WITH replication = { 'class' : 'null' } " - + "AND durable_writes = false; */"); - // Table name should be set, marked as virtual, and it should have columns set. - // indexes, views, clustering column, clustering order and id are not defined in the virtual - // schema tables. - TableMetadata tm = kmd.getTable("sstable_tasks").get(); - assertThat(tm).isNotNull(); - assertThat(tm.getName().toString()).isEqualTo("sstable_tasks"); - assertThat(tm.isVirtual()).isTrue(); - assertThat(tm.getColumns().size()).isEqualTo(7); - assertThat(tm.getIndexes().size()).isEqualTo(0); - assertThat(tm.getPartitionKey().size()).isEqualTo(1); - assertThat(tm.getPartitionKey().get(0).getName().toString()).isEqualTo("keyspace_name"); - assertThat(tm.getClusteringColumns().size()).isEqualTo(2); - assertThat(tm.getId().isPresent()).isFalse(); - assertThat(tm.getOptions().size()).isEqualTo(0); - assertThat(tm.getKeyspace()).isEqualTo(kmd.getName()); - assertThat(tm.describe(true)) - .isEqualTo( - "/* VIRTUAL TABLE system_views.sstable_tasks (\n" - + " keyspace_name text,\n" - + " table_name text,\n" - + " task_id uuid,\n" - + " kind text,\n" - + " progress bigint,\n" - + " total bigint,\n" - + " unit text,\n" - + " PRIMARY KEY (keyspace_name, table_name, task_id)\n" - + "); */"); - // ColumnMetadata is as expected - ColumnMetadata cm = tm.getColumn("progress").get(); - assertThat(cm).isNotNull(); - assertThat(cm.getParent()).isEqualTo(tm.getName()); - assertThat(cm.getType()).isEqualTo(DataTypes.BIGINT); - assertThat(cm.getName().toString()).isEqualTo("progress"); + DriverConfigLoader loader = + SessionUtils.configLoaderBuilder() + .withStringList( + DefaultDriverOption.METADATA_SCHEMA_REFRESHED_KEYSPACES, + Collections.singletonList("system_views")) + .build(); + try (CqlSession session = SessionUtils.newSession(ccmRule, loader)) { + + Metadata md = session.getMetadata(); + KeyspaceMetadata kmd = md.getKeyspace("system_views").get(); + + // Keyspace name should be set, marked as virtual, and have at least sstable_tasks table. + // All other values should be defaulted since they are not defined in the virtual schema + // tables. + assertThat(kmd.getTables().size()).isGreaterThanOrEqualTo(1); + assertThat(kmd.isVirtual()).isTrue(); + assertThat(kmd.isDurableWrites()).isFalse(); + assertThat(kmd.getName().asCql(true)).isEqualTo("system_views"); + + // Virtual tables lack User Types, Functions, Views and Aggregates + assertThat(kmd.getUserDefinedTypes().size()).isEqualTo(0); + assertThat(kmd.getFunctions().size()).isEqualTo(0); + assertThat(kmd.getViews().size()).isEqualTo(0); + assertThat(kmd.getAggregates().size()).isEqualTo(0); + + assertThat(kmd.describe(true)) + .isEqualTo( + "/* VIRTUAL KEYSPACE system_views WITH replication = { 'class' : 'null' } " + + "AND durable_writes = false; */"); + // Table name should be set, marked as virtual, and it should have columns set. + // indexes, views, clustering column, clustering order and id are not defined in the virtual + // schema tables. + TableMetadata tm = kmd.getTable("sstable_tasks").get(); + assertThat(tm).isNotNull(); + assertThat(tm.getName().toString()).isEqualTo("sstable_tasks"); + assertThat(tm.isVirtual()).isTrue(); + assertThat(tm.getColumns().size()).isEqualTo(7); + assertThat(tm.getIndexes().size()).isEqualTo(0); + assertThat(tm.getPartitionKey().size()).isEqualTo(1); + assertThat(tm.getPartitionKey().get(0).getName().toString()).isEqualTo("keyspace_name"); + assertThat(tm.getClusteringColumns().size()).isEqualTo(2); + assertThat(tm.getId().isPresent()).isFalse(); + assertThat(tm.getOptions().size()).isEqualTo(0); + assertThat(tm.getKeyspace()).isEqualTo(kmd.getName()); + assertThat(tm.describe(true)) + .isEqualTo( + "/* VIRTUAL TABLE system_views.sstable_tasks (\n" + + " keyspace_name text,\n" + + " table_name text,\n" + + " task_id uuid,\n" + + " kind text,\n" + + " progress bigint,\n" + + " total bigint,\n" + + " unit text,\n" + + " PRIMARY KEY (keyspace_name, table_name, task_id)\n" + + "); */"); + // ColumnMetadata is as expected + ColumnMetadata cm = tm.getColumn("progress").get(); + assertThat(cm).isNotNull(); + assertThat(cm.getParent()).isEqualTo(tm.getName()); + assertThat(cm.getType()).isEqualTo(DataTypes.BIGINT); + assertThat(cm.getName().toString()).isEqualTo("progress"); + } } @CassandraRequirement(min = "4.0", description = "virtual tables introduced in 4.0") @@ -247,18 +260,27 @@ public void should_get_virtual_metadata() { public void should_exclude_virtual_keyspaces_from_token_map() { skipIfDse60(); - Metadata metadata = sessionRule.session().getMetadata(); - Map keyspaces = metadata.getKeyspaces(); - assertThat(keyspaces) - .containsKey(CqlIdentifier.fromCql("system_views")) - .containsKey(CqlIdentifier.fromCql("system_virtual_schema")); - - TokenMap tokenMap = metadata.getTokenMap().orElseThrow(AssertionError::new); - ByteBuffer partitionKey = Bytes.fromHexString("0x00"); // value does not matter - assertThat(tokenMap.getReplicas("system_views", partitionKey)).isEmpty(); - assertThat(tokenMap.getReplicas("system_virtual_schema", partitionKey)).isEmpty(); - // Check that a non-virtual keyspace is present - assertThat(tokenMap.getReplicas(sessionRule.keyspace(), partitionKey)).isNotEmpty(); + DriverConfigLoader loader = + SessionUtils.configLoaderBuilder() + .withStringList( + DefaultDriverOption.METADATA_SCHEMA_REFRESHED_KEYSPACES, + Arrays.asList( + "system_views", "system_virtual_schema", sessionRule.keyspace().asInternal())) + .build(); + try (CqlSession session = SessionUtils.newSession(ccmRule, loader)) { + Metadata metadata = session.getMetadata(); + Map keyspaces = metadata.getKeyspaces(); + assertThat(keyspaces) + .containsKey(CqlIdentifier.fromCql("system_views")) + .containsKey(CqlIdentifier.fromCql("system_virtual_schema")); + + TokenMap tokenMap = metadata.getTokenMap().orElseThrow(AssertionError::new); + ByteBuffer partitionKey = Bytes.fromHexString("0x00"); // value does not matter + assertThat(tokenMap.getReplicas("system_views", partitionKey)).isEmpty(); + assertThat(tokenMap.getReplicas("system_virtual_schema", partitionKey)).isEmpty(); + // Check that a non-virtual keyspace is present + assertThat(tokenMap.getReplicas(sessionRule.keyspace(), partitionKey)).isNotEmpty(); + } } private void skipIfDse60() { From 915fbea5e2ce889caacb475bcb95a363cf4b720b Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 5 Jan 2021 16:15:18 +0100 Subject: [PATCH 628/979] Fix various compiler warnings in integration tests --- .../graph/ClassicGraphDataTypeITBase.java | 38 +++++++++-------- .../core/graph/CoreGraphDataTypeITBase.java | 41 +++++++++++++------ .../TableGraphMetadataCaseSensitiveIT.java | 2 +- .../metadata/schema/TableGraphMetadataIT.java | 2 +- 4 files changed, 50 insertions(+), 33 deletions(-) diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/ClassicGraphDataTypeITBase.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/ClassicGraphDataTypeITBase.java index 4f30a51fec8..7251a88fe4f 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/ClassicGraphDataTypeITBase.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/ClassicGraphDataTypeITBase.java @@ -35,6 +35,7 @@ import java.time.Instant; import java.time.LocalDate; import java.time.LocalTime; +import java.util.Objects; import java.util.Set; import java.util.UUID; import java.util.concurrent.atomic.AtomicInteger; @@ -46,11 +47,12 @@ @RunWith(DataProviderRunner.class) public abstract class ClassicGraphDataTypeITBase { - private static final boolean IS_DSE50 = CcmBridge.VERSION.compareTo(Version.parse("5.1")) < 0; + private static final boolean IS_DSE50 = + CcmBridge.VERSION.compareTo(Objects.requireNonNull(Version.parse("5.1"))) < 0; private static final Set TYPES_REQUIRING_DSE51 = ImmutableSet.of("Date()", "Time()", "Point()", "Linestring()", "Polygon()"); - private static AtomicInteger schemaCounter = new AtomicInteger(); + private static final AtomicInteger SCHEMA_COUNTER = new AtomicInteger(); @DataProvider public static Object[][] typeSamples() { @@ -97,29 +99,29 @@ public static Object[][] typeSamples() { {"Decimal()", new BigDecimal("8675309.9998")}, {"Varint()", new BigInteger("8675309")}, // Geospatial types - {"Point().withBounds(-2, -2, 2, 2)", Point.fromCoordinates((double) 0, (double) 1)}, - {"Point().withBounds(-40, -40, 40, 40)", Point.fromCoordinates((double) -5, (double) 20)}, + {"Point().withBounds(-2, -2, 2, 2)", Point.fromCoordinates(0, 1)}, + {"Point().withBounds(-40, -40, 40, 40)", Point.fromCoordinates(-5, 20)}, { "Linestring().withGeoBounds()", LineString.fromPoints( - Point.fromCoordinates((double) 30, (double) 10), - Point.fromCoordinates((double) 10, (double) 30), - Point.fromCoordinates((double) 40, (double) 40)) + Point.fromCoordinates(30, 10), + Point.fromCoordinates(10, 30), + Point.fromCoordinates(40, 40)) }, { "Polygon().withGeoBounds()", Polygon.builder() .addRing( - Point.fromCoordinates((double) 35, (double) 10), - Point.fromCoordinates((double) 45, (double) 45), - Point.fromCoordinates((double) 15, (double) 40), - Point.fromCoordinates((double) 10, (double) 20), - Point.fromCoordinates((double) 35, (double) 10)) + Point.fromCoordinates(35, 10), + Point.fromCoordinates(45, 45), + Point.fromCoordinates(15, 40), + Point.fromCoordinates(10, 20), + Point.fromCoordinates(35, 10)) .addRing( - Point.fromCoordinates((double) 20, (double) 30), - Point.fromCoordinates((double) 35, (double) 35), - Point.fromCoordinates((double) 30, (double) 20), - Point.fromCoordinates((double) 20, (double) 30)) + Point.fromCoordinates(20, 30), + Point.fromCoordinates(35, 35), + Point.fromCoordinates(30, 20), + Point.fromCoordinates(20, 30)) .build() } }; @@ -133,11 +135,11 @@ public void should_create_and_retrieve_vertex_property_with_correct_type( throw new AssumptionViolatedException(type + " not supported in DSE " + CcmBridge.VERSION); } - int id = schemaCounter.getAndIncrement(); + int id = SCHEMA_COUNTER.getAndIncrement(); String vertexLabel = "vertex" + id; String propertyName = "prop" + id; - GraphStatement addVertexLabelAndProperty = + GraphStatement addVertexLabelAndProperty = ScriptGraphStatement.builder( "schema.propertyKey(property)." + type diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CoreGraphDataTypeITBase.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CoreGraphDataTypeITBase.java index ef9568ce80f..333110096a7 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CoreGraphDataTypeITBase.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CoreGraphDataTypeITBase.java @@ -15,7 +15,11 @@ */ package com.datastax.dse.driver.api.core.graph; -import static com.datastax.oss.driver.api.core.type.DataTypes.*; +import static com.datastax.oss.driver.api.core.type.DataTypes.BIGINT; +import static com.datastax.oss.driver.api.core.type.DataTypes.INT; +import static com.datastax.oss.driver.api.core.type.DataTypes.TEXT; +import static com.datastax.oss.driver.api.core.type.DataTypes.listOf; +import static com.datastax.oss.driver.api.core.type.DataTypes.tupleOf; import static org.assertj.core.api.Assertions.assertThat; import com.datastax.dse.driver.api.core.data.geometry.LineString; @@ -97,9 +101,8 @@ public void should_create_and_retrieve_correct_data_with_types() { session .getMetadata() .getKeyspace(graphName()) - .get() - .getUserDefinedType("udt_graphbinary") - .get() + .flatMap(keyspace -> keyspace.getUserDefinedType("udt_graphbinary")) + .orElseThrow(IllegalStateException::new) .newValue( "some text", tupleOf(INT, TEXT).newValue(5, "Bar"), "some missing text")) .put( @@ -107,9 +110,10 @@ public void should_create_and_retrieve_correct_data_with_types() { session .getMetadata() .getKeyspace(graphName()) - .get() - .getUserDefinedType("udt_graphbinarygeo") - .get() + .flatMap( + keyspaceMetadata -> + keyspaceMetadata.getUserDefinedType("udt_graphbinarygeo")) + .orElseThrow(IllegalStateException::new) .newValue( Point.fromCoordinates(3.3, 4.4), LineString.fromPoints( @@ -148,8 +152,7 @@ public void should_insert_and_retrieve_nested_UDTS_and_tuples() { CqlSession session = session(); // use CQL to create type for now because DSP-17567 is not in yet, so this is more stable - session.execute( - String.format("CREATE TYPE %s.udt1(" + "a int" + ", b text" + ")", graphName())); + session.execute(String.format("CREATE TYPE %s.udt1(a int, b text)", graphName())); session.execute( String.format( @@ -175,11 +178,19 @@ public void should_insert_and_retrieve_nested_UDTS_and_tuples() { graphName())); UserDefinedType udt1 = - session.getMetadata().getKeyspace(graphName()).get().getUserDefinedType("udt1").get(); + session + .getMetadata() + .getKeyspace(graphName()) + .flatMap(keyspace -> keyspace.getUserDefinedType("udt1")) + .orElseThrow(IllegalStateException::new); UdtValue udtValue1 = udt1.newValue(1, "2"); UserDefinedType udt2 = - session.getMetadata().getKeyspace(graphName()).get().getUserDefinedType("udt2").get(); + session + .getMetadata() + .getKeyspace(graphName()) + .flatMap(keyspace -> keyspace.getUserDefinedType("udt2")) + .orElseThrow(IllegalStateException::new); TupleType secondNested = tupleOf(BIGINT, listOf(BIGINT)); TupleType firstNested = tupleOf(TEXT, secondNested); UdtValue udtValue2 = @@ -191,7 +202,11 @@ public void should_insert_and_retrieve_nested_UDTS_and_tuples() { firstNested.newValue("6", secondNested.newValue(7L, ImmutableList.of(8L)))); UserDefinedType udt3 = - session.getMetadata().getKeyspace(graphName()).get().getUserDefinedType("udt3").get(); + session + .getMetadata() + .getKeyspace(graphName()) + .flatMap(keyspace -> keyspace.getUserDefinedType("udt3")) + .orElseThrow(IllegalStateException::new); UdtValue udtValue3 = udt3.newValue( ImmutableList.of(1), @@ -225,7 +240,7 @@ private void runTest(Map properties, String vertexLabel, int ver properties.forEach((k, v) -> assertThat(results.get(formatPropertyName(k))).isEqualTo(v)); } - private static GraphStatement createVertexLabelStatement( + private static GraphStatement createVertexLabelStatement( Map properties, String vertexLabel) { StringBuilder ddl = new StringBuilder("schema.vertexLabel(vertexLabel).ifNotExists().partitionBy('id', Int)"); diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/TableGraphMetadataCaseSensitiveIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/TableGraphMetadataCaseSensitiveIT.java index 420d231c554..77bfeb13896 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/TableGraphMetadataCaseSensitiveIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/TableGraphMetadataCaseSensitiveIT.java @@ -35,7 +35,7 @@ /** * A regression test for a specific case of schema parsing for graphs built from tables containing - * case-sensitive column names in it's tables. See JAVA-2492 for more information. + * case-sensitive column names in its tables. See JAVA-2492 for more information. */ @Category(ParallelizableTests.class) @DseRequirement(min = "6.8") diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/TableGraphMetadataIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/TableGraphMetadataIT.java index f9516fa22a3..933951dd7f8 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/TableGraphMetadataIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/TableGraphMetadataIT.java @@ -48,7 +48,7 @@ public class TableGraphMetadataIT { public static void createTables() { CqlSession session = SESSION_RULE.session(); - session.execute("CREATE TABLE person (name text PRIMARY KEY) " + "WITH VERTEX LABEL"); + session.execute("CREATE TABLE person (name text PRIMARY KEY) WITH VERTEX LABEL"); session.execute( "CREATE TABLE software (company text, name text, version int, " + "PRIMARY KEY ((company, name), version)) " From 6dcd345cdf0e4a61cbd9dc771abdd6611e9a58b9 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 5 Jan 2021 18:50:34 +0100 Subject: [PATCH 629/979] Improve error messages when assertions fail in TokenITBase --- .../oss/driver/core/metadata/TokenITBase.java | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/TokenITBase.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/TokenITBase.java index 08ac446b22f..4ebe1bc4e7b 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/TokenITBase.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/TokenITBase.java @@ -250,17 +250,27 @@ private void checkRanges(Session session, CqlIdentifier keyspace, int replicatio Set hostRanges = tokenMap.getTokenRanges(keyspace, node); // Special case: When using vnodes the tokens are not evenly assigned to each replica. if (!useVnodes) { - assertThat(hostRanges).hasSize(replicationFactor * tokensPerNode); + assertThat(hostRanges) + .as( + "Node %s: expected %d ranges, got %d", + node, replicationFactor * tokensPerNode, hostRanges.size()) + .hasSize(replicationFactor * tokensPerNode); } allRangesWithDuplicates.addAll(hostRanges); } // Special case check for vnodes to ensure that total number of replicated ranges is correct. - assertThat(allRangesWithDuplicates).hasSize(3 * tokensPerNode * replicationFactor); + assertThat(allRangesWithDuplicates) + .as( + "Expected %d total replicated ranges with duplicates, got %d", + 3 * replicationFactor * tokensPerNode, allRangesWithDuplicates.size()) + .hasSize(3 * replicationFactor * tokensPerNode); // Once we ignore duplicates, the number of ranges should match the number of nodes. Set allRanges = new TreeSet<>(allRangesWithDuplicates); - assertThat(allRanges).hasSize(3 * tokensPerNode); + assertThat(allRanges) + .as("Expected %d total replicated ranges, got %d", 3 * tokensPerNode, allRanges.size()) + .hasSize(3 * tokensPerNode); // And the ranges should cover the whole ring and no ranges intersect. checkRanges(allRanges); @@ -269,7 +279,7 @@ private void checkRanges(Session session, CqlIdentifier keyspace, int replicatio // Ensures that no ranges intersect and that they cover the entire ring. private void checkRanges(Collection ranges) { // Ensure no ranges intersect. - TokenRange[] rangesArray = ranges.toArray(new TokenRange[ranges.size()]); + TokenRange[] rangesArray = ranges.toArray(new TokenRange[0]); for (int i = 0; i < rangesArray.length; i++) { TokenRange rangeI = rangesArray[i]; for (int j = i + 1; j < rangesArray.length; j++) { From 22065cab793ed2d91577749f032e401e8be02cab Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 11 Jan 2021 15:22:37 +0100 Subject: [PATCH 630/979] Remove unnecessary `@VisibleForTesting` annotation --- .../internal/core/loadbalancing/nodeset/DcAgnosticNodeSet.java | 3 +-- .../internal/core/loadbalancing/nodeset/MultiDcNodeSet.java | 3 +-- .../internal/core/loadbalancing/nodeset/SingleDcNodeSet.java | 3 +-- 3 files changed, 3 insertions(+), 6 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/nodeset/DcAgnosticNodeSet.java b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/nodeset/DcAgnosticNodeSet.java index d9cf67fb7c0..7d947805d81 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/nodeset/DcAgnosticNodeSet.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/nodeset/DcAgnosticNodeSet.java @@ -16,7 +16,6 @@ package com.datastax.oss.driver.internal.core.loadbalancing.nodeset; import com.datastax.oss.driver.api.core.metadata.Node; -import com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import java.util.Collections; @@ -27,7 +26,7 @@ @ThreadSafe public class DcAgnosticNodeSet implements NodeSet { - @VisibleForTesting final Set nodes = new CopyOnWriteArraySet<>(); + private final Set nodes = new CopyOnWriteArraySet<>(); @Override public boolean add(@NonNull Node node) { diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/nodeset/MultiDcNodeSet.java b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/nodeset/MultiDcNodeSet.java index 5c3d425ba69..741c8e9d3d7 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/nodeset/MultiDcNodeSet.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/nodeset/MultiDcNodeSet.java @@ -16,7 +16,6 @@ package com.datastax.oss.driver.internal.core.loadbalancing.nodeset; import com.datastax.oss.driver.api.core.metadata.Node; -import com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import java.util.Collections; @@ -32,7 +31,7 @@ public class MultiDcNodeSet implements NodeSet { private static final String UNKNOWN_DC = ""; - @VisibleForTesting final Map> nodes = new ConcurrentHashMap<>(); + private final Map> nodes = new ConcurrentHashMap<>(); @Override public boolean add(@NonNull Node node) { diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/nodeset/SingleDcNodeSet.java b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/nodeset/SingleDcNodeSet.java index e638913edfd..ea7f42a0492 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/nodeset/SingleDcNodeSet.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/nodeset/SingleDcNodeSet.java @@ -16,7 +16,6 @@ package com.datastax.oss.driver.internal.core.loadbalancing.nodeset; import com.datastax.oss.driver.api.core.metadata.Node; -import com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; @@ -29,7 +28,7 @@ @ThreadSafe public class SingleDcNodeSet implements NodeSet { - @VisibleForTesting final Set nodes = new CopyOnWriteArraySet<>(); + private final Set nodes = new CopyOnWriteArraySet<>(); private final String dc; private final Set dcs; From 5f0d5365bb2ba9d6dd057c2fabef2d2d3d493e98 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 11 Jan 2021 15:22:49 +0100 Subject: [PATCH 631/979] Make fields final --- .../oss/driver/api/core/session/ProgrammaticArguments.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/session/ProgrammaticArguments.java b/core/src/main/java/com/datastax/oss/driver/api/core/session/ProgrammaticArguments.java index 6d693f69b72..75a49fb3a59 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/session/ProgrammaticArguments.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/session/ProgrammaticArguments.java @@ -173,12 +173,12 @@ public Object getMetricRegistry() { public static class Builder { - private ImmutableList.Builder> typeCodecsBuilder = ImmutableList.builder(); + private final ImmutableList.Builder> typeCodecsBuilder = ImmutableList.builder(); private NodeStateListener nodeStateListener; private SchemaChangeListener schemaChangeListener; private RequestTracker requestTracker; private ImmutableMap.Builder localDatacentersBuilder = ImmutableMap.builder(); - private ImmutableMap.Builder> nodeFiltersBuilder = + private final ImmutableMap.Builder> nodeFiltersBuilder = ImmutableMap.builder(); private ClassLoader classLoader; private AuthProvider authProvider; From 57fdce590f0dbc79858a888be37918976a56c220 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 11 Jan 2021 15:32:17 +0100 Subject: [PATCH 632/979] Log error when BlockHound fails to be installed --- .../DriverBlockHoundIntegrationCcmIT.java | 13 ++++++++++--- .../DriverBlockHoundIntegrationIT.java | 18 +++++++++++++++--- 2 files changed, 25 insertions(+), 6 deletions(-) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/internal/core/util/concurrent/DriverBlockHoundIntegrationCcmIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/internal/core/util/concurrent/DriverBlockHoundIntegrationCcmIT.java index c275eaae12b..e771b28116a 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/internal/core/util/concurrent/DriverBlockHoundIntegrationCcmIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/internal/core/util/concurrent/DriverBlockHoundIntegrationCcmIT.java @@ -39,6 +39,8 @@ import org.junit.experimental.categories.Category; import org.junit.rules.RuleChain; import org.junit.rules.TestRule; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import reactor.blockhound.BlockHound; import reactor.core.publisher.Flux; import reactor.core.scheduler.Schedulers; @@ -55,9 +57,8 @@ @Category(IsolatedTests.class) public class DriverBlockHoundIntegrationCcmIT extends ContinuousPagingITBase { - static { - BlockHound.install(); - } + private static final Logger LOGGER = + LoggerFactory.getLogger(DriverBlockHoundIntegrationCcmIT.class); private static final CustomCcmRule CCM_RULE = CustomCcmRule.builder().build(); @@ -70,6 +71,12 @@ public class DriverBlockHoundIntegrationCcmIT extends ContinuousPagingITBase { @BeforeClass public static void setUp() { + try { + BlockHound.install(); + } catch (Throwable t) { + LOGGER.error("BlockHound could not be installed", t); + fail("BlockHound could not be installed", t); + } initialize(SESSION_RULE.session(), SESSION_RULE.slowProfile()); } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/internal/core/util/concurrent/DriverBlockHoundIntegrationIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/internal/core/util/concurrent/DriverBlockHoundIntegrationIT.java index afe08817fae..8c6b197bbd5 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/internal/core/util/concurrent/DriverBlockHoundIntegrationIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/internal/core/util/concurrent/DriverBlockHoundIntegrationIT.java @@ -17,6 +17,7 @@ import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.rows; import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.when; +import static org.assertj.core.api.Fail.fail; import com.datastax.dse.driver.api.core.cql.reactive.ReactiveRow; import com.datastax.oss.driver.api.core.CqlSession; @@ -27,9 +28,12 @@ import com.datastax.oss.simulacron.common.cluster.ClusterSpec; import java.util.UUID; import org.junit.Before; +import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Test; import org.junit.experimental.categories.Category; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import reactor.blockhound.BlockHound; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; @@ -43,14 +47,22 @@ @Category(IsolatedTests.class) public class DriverBlockHoundIntegrationIT { - static { - BlockHound.install(); - } + private static final Logger LOGGER = LoggerFactory.getLogger(DriverBlockHoundIntegrationIT.class); @ClassRule public static final SimulacronRule SIMULACRON_RULE = new SimulacronRule(ClusterSpec.builder().withNodes(1)); + @BeforeClass + public static void setUp() { + try { + BlockHound.install(); + } catch (Throwable t) { + LOGGER.error("BlockHound could not be installed", t); + fail("BlockHound could not be installed", t); + } + } + @Before public void setup() { SIMULACRON_RULE.cluster().prime(when("SELECT c1, c2 FROM ks.t1").then(rows().row("foo", 42))); From b6f72736e061de14ec6f2f26fdce97a665b37092 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 12 Jan 2021 15:14:29 +0100 Subject: [PATCH 633/979] Upgrade native-protocol to 1.4.12 --- bom/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bom/pom.xml b/bom/pom.xml index 7ee0bf21874..7e2aa86d4a5 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -71,7 +71,7 @@ com.datastax.oss native-protocol - 1.4.11 + 1.4.12 com.datastax.oss From f624ebf5df4d93eff34b4033ac57d9fb4a6b12f5 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 12 Jan 2021 15:52:25 +0100 Subject: [PATCH 634/979] JAVA-2907: Switch Tinkerpop to an optional dependency (#1522) --- changelog/README.md | 2 ++ core-shaded/pom.xml | 2 ++ core/pom.xml | 2 ++ core/revapi.json | 45 +++++++++++++++++++++++++++++++++++++++ integration-tests/pom.xml | 10 +++++++++ osgi-tests/pom.xml | 8 +++++++ upgrade_guide/README.md | 28 ++++++++++++++++++++++++ 7 files changed, 97 insertions(+) diff --git a/changelog/README.md b/changelog/README.md index c6b981226dd..d54e6d8713a 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,8 @@ ### 4.10.0 (in progress) +- [improvement] JAVA-2907: Switch Tinkerpop to an optional dependency +- [improvement] JAVA-2904: Upgrade Jackson to 2.12.0 and Tinkerpop to 3.4.9 - [bug] JAVA-2911: Prevent control connection from scheduling too many reconnections - [bug] JAVA-2902: Consider computed values when validating constructors for immutable entities - [new feature] JAVA-2899: Re-introduce cross-DC failover in driver 4 diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index addaf4070d1..ba8101ea31f 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -91,10 +91,12 @@ org.apache.tinkerpop gremlin-core + true org.apache.tinkerpop tinkergraph-gremlin + true org.reactivestreams diff --git a/core/pom.xml b/core/pom.xml index 6049cb2ef63..40090ea75a9 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -94,10 +94,12 @@ org.apache.tinkerpop gremlin-core + true org.apache.tinkerpop tinkergraph-gremlin + true com.fasterxml.jackson.core diff --git a/core/revapi.json b/core/revapi.json index 592d810e571..b20a307277c 100644 --- a/core/revapi.json +++ b/core/revapi.json @@ -5440,6 +5440,51 @@ "new": "field com.fasterxml.jackson.databind.type.TypeFactory.serialVersionUID", "serialVersionUID": "1", "justification": "JAVA-2904: Jackson upgraded to 2.12.0. Caused by the exposure of ObjectMapper as a parameter in ExtraTypeCodecs.json()" + }, + { + "code": "java.missing.newClass", + "new": "missing-class org.apache.tinkerpop.gremlin.process.remote.RemoteConnection", + "justification": "JAVA-2907: switched Tinkerpop dependency to optional" + }, + { + "code": "java.missing.newClass", + "new": "missing-class org.apache.tinkerpop.gremlin.process.traversal.P", + "justification": "JAVA-2907: switched Tinkerpop dependency to optional" + }, + { + "code": "java.missing.newClass", + "new": "missing-class org.apache.tinkerpop.gremlin.process.traversal.Path", + "justification": "JAVA-2907: switched Tinkerpop dependency to optional" + }, + { + "code": "java.missing.newClass", + "new": "missing-class org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal", + "justification": "JAVA-2907: switched Tinkerpop dependency to optional" + }, + { + "code": "java.missing.newClass", + "new": "missing-class org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource", + "justification": "JAVA-2907: switched Tinkerpop dependency to optional" + }, + { + "code": "java.missing.newClass", + "new": "missing-class org.apache.tinkerpop.gremlin.structure.Edge", + "justification": "JAVA-2907: switched Tinkerpop dependency to optional" + }, + { + "code": "java.missing.newClass", + "new": "missing-class org.apache.tinkerpop.gremlin.structure.Property", + "justification": "JAVA-2907: switched Tinkerpop dependency to optional" + }, + { + "code": "java.missing.newClass", + "new": "missing-class org.apache.tinkerpop.gremlin.structure.Vertex", + "justification": "JAVA-2907: switched Tinkerpop dependency to optional" + }, + { + "code": "java.missing.newClass", + "new": "missing-class org.apache.tinkerpop.gremlin.structure.VertexProperty", + "justification": "JAVA-2907: switched Tinkerpop dependency to optional" } ] } diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 3cf5c8076cd..553199ea830 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -131,6 +131,16 @@ rxjava test + + org.apache.tinkerpop + gremlin-core + test + + + org.apache.tinkerpop + tinkergraph-gremlin + test + org.apache.directory.server apacheds-core diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml index 9a3c62ae770..cfbc99e7368 100644 --- a/osgi-tests/pom.xml +++ b/osgi-tests/pom.xml @@ -78,6 +78,14 @@ org.reactivestreams reactive-streams + + org.apache.tinkerpop + gremlin-core + + + org.apache.tinkerpop + tinkergraph-gremlin + org.osgi org.osgi.core diff --git a/upgrade_guide/README.md b/upgrade_guide/README.md index bba68707f8d..3b3125de12f 100644 --- a/upgrade_guide/README.md +++ b/upgrade_guide/README.md @@ -87,6 +87,34 @@ token map for these keyspaces, you now must modify the following configuration o [Metadata.getKeyspaces()]: https://docs.datastax.com/en/drivers/java/latest/com/datastax/oss/driver/api/core/metadata/Metadata.html#getKeyspaces-- [TokenMap]: https://docs.datastax.com/en/drivers/java/latest/com/datastax/oss/driver/api/core/metadata/TokenMap.html +#### DSE Graph dependencies are now optional + +Until driver 4.9.0, the driver declared a mandatory dependency to Apache Tinkerpop, a library +required only when connecting to DSE Graph. The vast majority of Apache Cassandra users did not need +that library, but were paying the price of having that heavy-weight library in their application's +classpath. + +_Starting with driver 4.10.0, Tinkerpop is now considered an optional dependency_. + +Regular users of Apache Cassandra that do not use DSE Graph will not notice any disruption. + +DSE Graph users, however, will now have to explicitly declare a dependency to Apache Tinkerpop. This +can be achieved with Maven by adding the following dependencies to the `` section of +your POM file: + +```xml + + org.apache.tinkerpop + gremlin-core + ${tinkerpop.version} + + + org.apache.tinkerpop + tinkergraph-gremlin + ${tinkerpop.version} + +``` + ### 4.5.x - 4.6.0 These versions are subject to [JAVA-2676](https://datastax-oss.atlassian.net/browse/JAVA-2676), a From 1ad477a2ed725336b67e842f932eb590cbbb5d6a Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 12 Jan 2021 15:56:21 +0100 Subject: [PATCH 635/979] Update Fallout duration test scripts and associated README (#1523) --- performance/README.md | 103 ++++++++++++++++++------- performance/ctool-cluster-info.png | Bin 20505 -> 14654 bytes performance/duration-test.yaml | 82 ++++++++++++++++++++ performance/graphite-setup.yaml | 94 +++++----------------- performance/metrics-dashboards.png | Bin 80419 -> 0 bytes performance/oss-performance-setup.yaml | 79 ------------------- 6 files changed, 176 insertions(+), 182 deletions(-) create mode 100644 performance/duration-test.yaml delete mode 100644 performance/metrics-dashboards.png delete mode 100644 performance/oss-performance-setup.yaml diff --git a/performance/README.md b/performance/README.md index c4b6ec2e929..e42c83a57f7 100644 --- a/performance/README.md +++ b/performance/README.md @@ -1,27 +1,76 @@ -# How to run the Fallout performance tests - -## Setup Graphite - -1. Create a new test based on the [graphite-setup.yaml](graphite-setup.yaml) template. -2. Modify the parameters to match the scenario. - * Change the dse_version to match one of the versions you plan on testing against - * Change driver_oss_branch to be whatever branch of the driver you are using - * Driver_examples_branch should stay java-driver-4.x -3. Run the graphite-setup test (wait for its successful setup) and get the graphite server address: - * Navigate to running test, and obtain the ip from the observer node this will be used as a graphite server in the other tests. - The ip can be found in the `ctool-cluster-info.txt` file: - ![ctool-cluster-info](ctool-cluster-info.png) - * Login to the graphite server; the address should match the observer’s, the web interface is on port 3000. - The username/password is Graphite's default: `admin/admin` - * Dashboards will be loaded automatically. - - -## Start performance tests - -1. Create a new test based on the [oss-performance-setup.yaml](oss-performance-setup.yaml) template. -2. Change the parameters in the same manner as the first test, with the addition of the graphite_host parameter. -Use the IP from the first cluster’s observer node (the previous setup step). -3. Monitor the performance on the graphite observer host IP (taken from the Setup Graphite step) - The performance tests will report metrics in the two dashboards: - ![metrics-dashboards](metrics-dashboards.png) - +# How to run the Driver duration tests + +Note: the procedure described in this page is currently only accessible to DataStax employees. + +## Overview + +A duration test applies a constant, pre-defined load to the cluster for an extended period of time, +typically 2 or 3 days, while also generating some chaos by randomly restarting nodes. The load is +a mix of reads, writes, and deletes. + +Duration tests are useful to detect performance regressions between 2 different driver versions. + +The Java driver duration tests are stored in a [private +repository](https://github.com/riptano/driver-examples/tree/java-driver-4.x/java/durationTest/) +accessible only to DataStax employees. + +A duration test executes in an infinite loop the following actions: + +1. Confirm row does not exist +2. Write row +3. Confirm read of row +4. Delete row +5. Confirm row does not exist + +The actions are performed randomly via SimpleStatements, BatchStatements (except on reads), and +PreparedStatements. + +## Running the duration tests on DataStax Fallout + +DataStax internal Fallout server has modules that allow to automate running and monitoring duration +tests. + +### Step 0: Set up a Graphite server + +1. If you haven't done this yet, create a new Fallout test based on the [graphite-setup.yaml] + template. +2. Run the test and wait for its successful completion. + * Choose a `keep_alive` parameter that is large enough to run all the planned duration tests. + E.g. if you intend to run duration tests for 10 days, set this parameter to a value greater + than or equal to `10d`. The default is 15 days. +3. Obtain the IP of the Graphite server: + * Navigate to the test artifacts. The IP can be found in the `ctool-cluster-info.txt` file of + the server group: + ![ctool-cluster-info](ctool-cluster-info.png) + * Log in to the Graphite server to check that the server was correctly set up: + `http://:3000` (VPN required). + The username/password is Graphite's default: `admin/admin`. + +Two Grafana dashboards should be loaded automatically: + +* `Java Driver 4 Duration Test Metrics (aggregate)`: provides high-level information such as + the number of completed tests per minute. Useful to compare different test runs. +* `Java Driver 4 Duration Test Metrics (focus)`: provides detailed information for one specific + test run. Can be useful to drill down on issues encountered during the test, or to inspect + latencies, throughput, etc. + +If the above Grafana dashboards are not loaded for some reason, they can be found in this [private +repository](https://github.com/riptano/testeng-devtools/tree/master/duration-tests/java/grafana). + +### Steps 1 to N: Run duration tests and compare results + +1. If you haven't done this yet, create a new Fallout test based on the [duration-test.yaml] + template. +2. For each combination of server and driver that you wish to test, launch a distinct test run and + modify its parameters to match the desired scenario: + * Change `server_type` and`server_version` to match the exact server you plan on testing + against; + * Change `driver_rev` and `driver_label` to be whatever driver revision you are using ( + `driver_label` is merely for reporting purposes); + * Don't forget to change the `graphite_host` parameter to match the Graphite server IP obtained + in the previous step; + * Finally, choose the desired duration (default is 2 days). +3. Run the test and monitor the performance on the Graphite server. + +Once a test run is finished, the cluster and the client VMs are destroyed, but their logs are +conserved as test artifacts in Fallout. diff --git a/performance/ctool-cluster-info.png b/performance/ctool-cluster-info.png index 041203cbf42c5ed92bcda0fb4bc34b97a0baa51f..550b077b7ebe785c5bf0aad8a4ff6ac9aac5b478 100644 GIT binary patch literal 14654 zcmZ{L1yo$kvhED-?(Xgc32wn5xLa^{cL?t88X&m41b2700E1g_=aF;%EAPJhW>(Lx z?y9e4ckQ*iWWp8XB@p58-~a#sqLieV5&!@s{h*a$pgw-5{cBAC00b)wQBegcQBe{F zM>|ss>yIYM@MLvp4doH69PNZSArV*-63QE@up2VUAgod)NPJRQiX;^DzzD>#AAwfb zs5SZJWD!+W{tR@6>R1Dt*zj%D%*0;2w_<)OX3y-^3;e$Ht< z1lA~M_n$s}>3_K18c=)CemO1Quh61-R)q-j(^jo5Zo4MfrI!Xg`!N-G>lCpE-d?fSuq0>yQ%?KpTyt`;+mv!?bQP#Nen1b1RV24 z!?%rsLd`-yi=1#0dyNXEt{~|_A%{djR+0GhQz|=#;5cM^FE%TBh{waM6l!1V<%4if z#Ck1LCk?6m$hB}5Hcok3q;mfXtF+g8R4yUiQl{`J-9)VNbAzXdy- z40e0n>%S$&zI(ma+?9vGa-d2_TQ8G~`Yb2l$Rd!1po)eywE)jUpSMvj@c^0Z=iy$6 z2Y(iFE~BR9-DO*Z4@uV3nRF-~nMNh2fsdj|jgrqzSS^@)&VAqx`ol=+4!Y|A?(M2+ zWc2&coQ=Gcq9|=W%t5zxzp?Gi;`|13-gw0;8b`p&{&V^#-i$Sv$d1LWMF~`^RmoqP zlmD(NFvxj#&mIMYipUhc|m{pho)vF)(kJw9`0R>q2 zmxOo~3UE|XHe;qb99rQ;GSIMVA>JeI=ZGvZCX%yZ!ada4=ni3le4Mfn%L4HdOdhOT zv2=58yjb}!?9;gBz%~fo@RWSjLxBb){%}1JyF9`}nFeI=G`gRW>4r#Y)Puc0YB3lH z6Ai9w4J$Cpdo63lAF`^y5o#dcp;GTSX9e(fV-kc=Z|itN2@vZCr0#lM^1OWVMf3~0 z-_eIr2_)^w*_Javr$S|eVTb(^q}0pQi`t9ZlF9>n5hnJ7X(rDZ`VqBN9OKC5o;h1AUuH){N8-92@XaMUXGMi436|w>BbGq2}`&6O{G#5dP@LD zaE64gROQc{9|DEGc{3Iw1vv^7G|nu^-2>L7#Gry=jP$(WfqXj_0=wBNUSt0 zYCdt}U31WVim_auJ)A4FikXXA9G}t2Z&kkzG)={l8k45ih%9atvkG0Lt)zC2vXwWR zX}uY^$-ZH~k+=EL)X1sLtK2T;)#aM$S{;)TQ!f8wk|7%|+lYs-Ev+ho9W+zrlpv-lCLGj{N`~>pw&uzpl(&5JRbK%r9^5WF2et~}VdpCfqhCR)k zWzHb72lso<%xZy;xRV5StiY$IDpE^xGaM`KA^Xs8<==ASYZXi4m(3GrXbXspbaw%} zvb(o~L(%PWhg7~)B8j(D)UtXk&N#B-oCQ%188cRv=6I%b>>IYLrmJ?QHYo;8!>9f$ z;7$>(#U5>sp4V8{dhqCxl98=g99V9QBPls4U}MoK2Mk-nc#KsboAO++plz+TQ*$_+P}0hx>z|ppPv6_a;SE>xU0F8ye+wD z0hONN?zZ+`cVPF>Pg@13CoW0Yxn1yGh;I5laX%P8p57ke+41KH`W`$`cbaw*Y@Kxy z`XY3xblLb4zWsb7c&B?G0`3Dl{B?m-=}qIZ`vUvrLd8N-+wtA^40elrYECFv7#_StvS0FD(5G6y$|egNK{BpS10pC?0*pjJ{ilc0xD5I8_s*v~2#Yk%4EH#>4mb#|WmFAShqT(uyQ$kaA zExM6I%IWy#c!1GN`$gGSzA=Z#>NwZ}YOb!BFtbV^oBh?Ej+BGe#qJnDouNE%Bx6*p z`t#ONb3J-JwF9u}smHaI+LiLox54FOhrR%!KAPewkDIO}v^yr%on9vRE4bs^Rxn zpYn0>yYHupo%+6C-a^yE*^#GavnVqbc-Cz1u2!btMc{lhzEAz0v`Dw&ar?b4ah-PE zG$pZLJYlD4&}lFdIN3Yh%MVWrUyt?HG;A@m`o)h?UTeLj-(2c<0<~b!n}V_!eFDaY z)^WY_37dDiz2=%@Dkw9>k9HqZpTU8SmJwH@N7v!ckb_{#D|ZW6F$)BcxSsMWl{;b53oi`>Q_wPrDX{W34t9a+3mUBMD70%|5C~xpFSVB=}J~ z6$mwgl1~*ndXK*BcGnAxKyAxrJR8Q9CyPw`wX?JQwUe@nGHEvyx4jGZCU%E|)?7Zn zvQ3kka^t0gjUO9?UWWHsRd*#-n^rAus)$*jU_H@LiSfR<~!$ zX41tR5oo()IeL%$=|P92e##h_;HPGn7qCd*bi9}^R9^b}m2bn9>Sg?~p49AXwowkR z!{)vB9OMPO1hc1JMc1BhY1KmI?XaC~7omldU3vSdL+kH|rR2f8KW9VLPx|~GjEAc$ zC#NT=wqhIRUFr7Mca^lwJhs=iZW|+=*H5-@8*g60_ptYo#O8c7KIRWK$8Kv|$j&Wp z1YQ_7CASXiyIx;gzUm1i3lJXaTtz)q+_a8dA2S_P{XCC5r)Q?>I`pRXUD;B;3Ed}7 zuC~^Dy)x_0t^Aef$*V7;huo?8NA3OWyi4cZ@+k7i>w)P&_x*VX`HA?>`{u>{TzR(m z;FTR(65J%$Mv%)F_TABs#mrt|Jl;OU7m{Up`o3~ImCg$Kv zD6%d1ZNS4pavfL_oc+R;vw+Yoyj%fC%GD8z1RxOr4T>pOJKFSCBpDTDm2mtz{hW1f{^1%K>8c3Q4{vR|z`GW@#Ru+|# z`nW0^IhvT*I+@!!uM?fQe>A|_OKLg+09cfN3s6dl;t~J=bFxs;aMqBM|YW*AN1d5W-^k0i8x#FlWE8)kciqj znvifZu`sca3BZw%knlMgoAM}$iT?xtsPU7TJ3HI+Ff+TkxiPu1Gub(sF|&UD{F#}B zjhT&&@k4^q$-~z9n>(Ye6ZyY8`Hz0YOq`4yE$p2w>}*N?_WRAy&c&IZjO_0~|M~g% zJWbp!{%a&#r+=39u|Vd(9A;J~7UusA=4@g5|AGDG{2TVKb^UudzQ3LEs5+TAirU%O znAkcC{MQ}l``1YS7xTa7`EQ_tg}aHhrkKSC(&=MO0<4^z|DgUS=YK_N{1=jgi~GML z{~PDOA^$Fc=c}W|$2`CNy@vvDGt;$!~1 zE&pws{;dQ5YW+|G0XRP9|5PXexLBP=P5^*bLrP3o#T|H-4ehTwurP!SZ=KI1qm9*5 zbkC#&Ax3JcAdHdhPYFdk=*=2mvize&G0@=s2U@#uw0A^O34{s;%F(a;&iSIgx^CYC zx6R4(-CEEu#+n+p8;7y+tn|yF%{n zEg1A_vhq26>8?|}v>gP)m@t@(S}Kc`+SFqyjEP1=F}T<+33Gck{B#{;$t%OPDLx*G zii)!(isYr5^*`kd(ibkzv5}DWaDo7NEA?h$85~xHy6vt?rAm~U8*tET9g6!&;2MJM zu4f79>}I1m{9b!kW6bydG{xf`lE=XaA`e$B1{y+u=?Sha21@CqONb*f)UX#Ks49(0 zt;Ry>WV$O*s=jnE9OkK(+qsv+z>BFbu!xuq1~rHZDhNIV5(0pV3H6 zM7jsrD(o9q@bKV{>|ckT6Ui-TyaxzEAu|+eF#se$3-w5a4N(*4Zi731e<6_=Mt1}i z^^OF@2@mJWRoJ`Jx+85ny3=NZ=e!hN|6s`l;RJb?8)()~wi1tfzoebDs4t`KLiu;T zJz4RFu$>%)=+94-`I{o%!WpPt~GCi0ajx{rC|NuS{jdG6Shet5s1#An3~{ zhpNyyJS@pRfV!`c^v7`9Q%Ol_xUXBQ+vU&KFA*$m4-d%5ge(uKzlrG6_MjKtKzVYV z@4ke`tltz;ujh)tnzbB{_#E!P;hHi^R8?>O*#QYL3DG$HOar33L>3+TOhGXn^~qiT zU7=15k;%63u&|o!+VP(>YMGoil&q|Z=dA}h4e8l+{xjc^1!%H)T-ZP(!(<{3%ei~G zc_UMlNyC&izxSka2fBTZp=Vue!vt;4AN6@XnMzKdwhtW&#ehu+FXwlcD_d2a%&ZAim?Lr5C((M_|(k5ZW21KM(Nd<>wSf= zn_@*Pv>CT#1ISWRdv02(yJGU zN5>iIrr!>=>pyVEF_Y{qhs_#*@^Hp?2v#V0?Kb&%i*;7&OwNdy0D*}1pM$T8MSU=g zMiMB0!l)XSOI~A56d8{s{4{Q8H0S}N2S4PnT9lyad) za7bhM@6w_}p8*IRSpq(7d^gjgvp>GTpM*X`8~ED+a9-a&RC?lvL-kyX~l26k<7FHVG)^`*cKIFw^1TRNn4(p%-a04P`qf-YCFRUe4-r ztkQa1R&KoA9RTG{?0p$9&|EX``|ukYeC%V0G3s1zLjd$yhf;6Nk8f;G-)A_*F~(9ku$VN=jrOtxIBZtu-60i-1-vzU-=DY2*LX#mY&Ynx52l)*Vsh13Zi$5= zTaeNF*r)i6g!*;HC2VT3?7%vtR(jG6&Hy41AUB{mg zTK9nnOt}_zWjIq?f0$shR!17;jW!q1g1TOE37Vqg5TP4RoPlt@H2_6|>-+obgUlF#6J*vDS4RD|7~t;vXp1TGAeweCdF8*g>`TpgEJj~3c>C4YxT#sdH~T0|&z znC~~esx`G}4hy9cjc*QTt|oaeGz4o!2;8-T30`(9aMp=e!AOA6LXb72-ztBb&Zxx_ z@gt;ZH(Hv=ym!6bR#%RfC>CI(RW-QPWlv`_wxKm5CZ|;kff$c-Mi)0opqY%`2!pxap35^ zNSEVjtbKrdulsYV^(q3UnGXd9_4FW*B66)8FafZnOO=X@dxK!b{XnD1b>=f&c|@ zG9cu9FTo5P{0-AfA3G#hH7f(fj5Ixk_F9e! zML|NH{wTdMb!5xq;~K={=l$L_rV2Wv+(780w1Dt?=Zl#QQ-@-f>3lwRfr!Bf1hs`` z%SOJ-Zt$yq^si}=-E3?IVwxCWk%zzTH`#pwBK|l>Vpp)4Gq7bpoIf^|)1cqc5P5E< zK%$bs=YYVZ|JJZ5*2H2x-t}xXlWZRnFq5`B;I<_fUu!fZ{h<&h&v$3NqZmS=YfD8k z*vjsP?3eIMQEyE)Yib{b3V$J)K)}0Yz^3#0oR`q7P7a{qqw)3Qupn1&Z0-Mq{CeO4 znD3ArHFlVjv#FCGUm|c4at-wxlOTE`n&BokVqo!!NULvXz1Z4 zweOfaE(3um5kb!wX$qg#^HLbKGupoZlKdUxYi=mLD!?&<-G_YcH_X&vrDsl3^f*Q9 zmmu>DQmAZJ8>AMibgi~SA|>!Ck|2u?E>`k1`rRKADQ=R*^0IEWzklaoLbp255D|QT zsn;ZwwQg zM2`#wwm%)0Pl3U8M*#$kpsY1rz~_NxfEukv%LGRreohHqB$nUQd8Z%ez}A27XLQ`% z(VT?9(;+!H^j#AG0gxmE^Xa@d1QGxAi?~|z&hGBSo*PI9*AOM`eY}%~J_SI_N2aq( z|24J$y_>Q7;6e{TL8H*+`^wP7X3!0&Pcny(gq7K|^YfB_dpQu) ze$SKHUK}v9m%3sDirj?z^9!QwLPy!l)NT_?QLq&z#dmNQ zbs(kMT;7zE1BvWscd+VppPYf$`z<@Hz3F^W(8tR)p+-evNZMqbiDjamY(HcG=F;mt zZMVY>AsPG;Oaz9Dor9g_Sp`5Y^gVq+Fdu~m02kaoNQc~e3K&j?Bv@t$H4zfRf;WE) zeB3|guw7rS08~JlI$*?hA7#63S}Y>M1;Xz#keZ9cibaKqMTNJ(ON%3fJfKz~O3V3a zgiwdc;XZ|YNg;;Si>ndn?e#G@Q|lb=J0qpq0~#V5=19wrQM<6ksqj-TpSP>N)k8%VM-2U2gAfYCp`60bOH%wy1 z`;BrEK++QJkE;qDr9f1CgPZ)DhzZ)6EmN*E_R>l0PmQEKX^m-%1n>9`%}fq|3(J=Y zM z3`l#ClzSTfy%2OEW7mRY{pmmyP+mZdQ$HgRqoj|2b9b*3Jfo$BP8<)?DFPQ&_=RGZ zYqWOxC17^O(Z->Wa7hplkc2)^kgcGkp`qAF8us|JV-^W}PxuvV?Y4>^DX2G~zKH&C)ePRpKoufcpM{p3@~Zma3W$|@ zs%Oc??G_;{uv8^4?Vr&4M8FOPr9Rc@LRGmvxVsg?>lSfVRWEo|wVr-9gUx^eZa}%c z)hy(>@@F&WD-|*2<$;W;GeIuc8sZs`*Jrrs9Q7xWU7;9Zn(yKFSyBAFJyT4AN%H%c zj7d1<-xsJW`o@!Z4m(RXuZoqJLYzDI4h^?A2G~MwipmJ0hBe_nmuS(HbKjRJAPIaI z_PR<0D#QJ*j1vyi%<`uwmt;Ui&%5E;fG0)%PE_|{GnD^hGxR_me>ZJVzYN?F8a~G% z36kkC6>NH`f0+>fj9t)@C+-)+o2h=j@%h{}hYR|!rZ@DZ<{O64Rp7eEqmIl&x;5Q&p$mMwckASa+LGFC(iAIcY7!#P->F~saSsvj- zjeaQZtZptZmOEoMK2nX=krgcV_%p&y>+h%FY3VAVFyKzc?+74SK9$-<-_0w_@l~<- zN-F|NEyEcEr%Hz2W8f4phZQ{yJB{gK>R9K7WiL&^pg4;;)&N9NtDGLBAoBg!Cp`cc zbykP{@wO{(YpcEI+3lw;?l>zH$DYNkekUD5p-`uo^Mk|vrEdPVgH-U%YyPemX35IX z4zb+SY`4I)6;|f8D|3$z16G0Mv%Q{hi9uceN%9ad7W^OqoCqdc#@5T`FV`OHuT2|C zxrz8M3zzb5Ep`2F_3FdI-Tu&DPnT1M2<&b|wH=s{cK-*+X_M+mB4{`Y=xNI0ubr^fTUFB4XF3&jszlTCIO^}uBh>)K<2KOD}q;!Xc3^uL&aZX)LSaqF5kn?cW*}- z&Q9zg1-!+u(Q=_q@SxAE^kMBnC-g~LJ|92|Hr?-U-}CUy0YG7%XuDsWJS(`?qCX`V zwjpUI`Gd?ak~>X*LoXm8um_ndQY(fG)5Fl2s`_CVgHs^;!zIvQS9R*h)(vT-ZO_o( z!1KeqEVL?Uh`f=>QwWwrfU_I_cw0dgh<0L~Fx33hGd3oOv|VLP9s0QB`KB;A;SLu} zxwl-EpKp!c!`X@Dwa7ejzhYr115gx}Zaw>IdvZX{EOLe)OEu5)xZ>xxDr*avUefm? zbY2gtg<#JaAAMMII|z_?7uetDE4GYp)Qrm{!W?c({|*fX2Kb}vrZ&BPkk}fVu2~?C z=yA7VW^#YNq10@68;$0G;0oIuik< zJAm&a>EOeNQfhTHX*bo(#ca&9Z5zS)bJ>SHru>zkl2NN+7!?JRzDyb~rF}y8Jbe0K zms)=G{B?+e^Jh?=zwmKcywFm@*PIl_XC#?{Lz-}esSU1~2PJwDA0;3OH-{_v$OB9z zV>^RNw2pCQCwx)Q7k$)1{v|9KyEA)z+R`zRITYm1{DS$36cGWkH(}Y z5YiPnV1$PB>4l6=|~9!vRKzj1n(XE818Qj(L4?z4^&;-qe;E>Gv>uNyDFssP2b z&Awizv%bD4v}Rz92ff?yy%Seq)9;gJeCS&mXht-j$K#>oI$v!=x-IsfHo@R;e~NMA zOpL(&0Z|e}9%8N?-lxQSs%?skrg&GpkLxNO&H{@U`0_?*iepIQaK^+JTl z#-UpKa?b;sCBM!5W+dCt# zO)&6LbfP%yMstmR99>KvE`O)Dnf|7eJfi)QkdROmrUmDPOpurJzPO;tB`Z&n zi-Ezk-g$q13jOv%Td%|u-2H-X32apf@^5V)-X7*wP-0+7#VNqo*Rbr)?g)cvaEd~}&=@!c$jaR&5D$`p&6gFNxaBvg~PiPb&4j)I^E(X(At0e?7 zkDgW_t^Z`HERW7m-5*5Y118iHx9F4&h+AdPp_Tg7kE#t1WzF#*os?fW?nh+L7Arz} z?C0FUp%o{EQd5$>jK3L5Wo0X21VxM_xU`#`|9o}4HQ}ff(&`V8}fs7ONrnKTo z_>Bto*3wT3<2S3ZvulpFv%({c>wdv=Fqy-F2`i%B zrg_bHE4X|{B2veLO(?07mUWi0(4tlwO;()Sh@hXQSq7l6r&=d^rlzFA9p_l|C6wH% z*{;%09IGTKQIZ+&-o4hTJZBV=;ArPJe5zLe0@L?WXCA2%1Ct2Kyq9@Qv*BrqINroe zwP{BlF$6)Th-M#S7Roo`sG_-`C&};pMjuT5JjN;A7*SMc8!Zh*ppms|gD>t5aX}S~ zJ7vbaMVhKcOYh}^P|kTC?^r}$q}o@e)F$ST{>!?&V#PsnR19nG+Ge)Fm68?}lLH(DE#Y?&{{Gsh4sHP>-jDwnxU_K8jCj3>?KiO01jMImjj z&M?hxD^I#gjgFd9%RUNi13_cGjqQcTDh{Q@`U0=0dzVoQOP4=>*LmJr;_`boROq%Z za=r`Oe(dmUt&v21jG>|7tWbX!Z3XeHS*{P2&(lr(NFp_UcMB}ECZm35NB_XU$Gqk6 zx$W~Rv>HdHhE*uqvO&d-^r5uI$%0`TgoB5CZ*>%Ws`xjl(xU z^-qsqie!B}yYXF{51Yepq|(=3m2@_<0!?QBehGcEtk{%U+p-xj)3htcsdFnRK!eQ` zY3qHo#ntJtGl*ny_-M?rU4eCpju+skjALiNd}$Cr~4+6KJ~D7+27&{FK*G#8<0Xe{gEk@Qg{f zSq6{){g*d~Tf3}&t2!Rvdx30L?cyLp+{W{7Z;w0u`l1snCzGK~a!>t@jrq<31b_Q$ zRCeQm!C#BH#hn^-ex3)|9zHvcghRqdPdy?YCga}Gk=ZF1SQl}a?J>g%grO$mQ~1d# z!#Ba1Mf+R6aSqo_V_CQ1vo9drtMi}%)6YO5K6+ld17DpcQQsABgh0tPs!NG7EE}qd$hu_pXgVbnZb?W(S+mkGZFg@1@5I_p>4?>P?BjI!) zMpKhF4S(`(gaTx6+GGGx${mKmJYHI$k~CgHSc3&tJ!-*LkbNPx)-Rk5kC6cyi-L&2wV?Sb{ceWU#mD|Gc?Xul&e@`Y^Nt0S<-DN({A<8VoO!R@gH zwv+rjM~_`g>4OI6C);41>o>3!ZDzHmlc=?eYgM+Bc=qe+SNr_aQsQ#kEZ<*AgB&xl z_BxrLTrLAOoWu+$)-+_p~2;7g{^n2w3VHb64@ILygI#vR^|6K zhgg+TaS@gS2q#;@KN_;Nt1B4$x^8H1Bf(c?a0>%`R1~@25<8TXLaGfy_Ln8l>vz^qbKXmJ*VT?K7#%!K#!4;eB3a z#z@kF7f3%bWrPw_X6zZR#wz!et`;OS7Wuk58ts=!7VB(BLztmomKZ-PZKi#UBf@+6 zQls|aX($qp#&bnFM7eI=&#>CNUD9t>T~pqFdZZ%JBH`>mLHqDACoJiCP2fe-VjD>a zLE;g3RPH3J>&4r3J`VcB8uV%PE?8}}-Fnmq!6Co|EIw3H1o}PkT~Xxgx8P0xj6Ov( zSq_PzzuSpq&O!BtqDbdQNqV|Hkuf$djAySaebsI9ZC+n%==fAsS#G5j(*%8I{&O+# znF-%Vp<9lzOJ(d`ZLHZ6#SEl)XSFsTCyl-|%;PfEbiqU8lhF^cx0GZ)v6c4-T4-D? zV2!kIbZ^PwcKVg1mb%xX#j!w_8b8TcKDJERZfqVK0wqKvWBr&=_fo1ar6R`q+5ILc z=?eTK=V}bP?kFw94dtfo_~*oGy9m+qfpcrqm#lvE==fA4E>#=vBp*RjXBwquD08MQ^Nue& z)nK5YzA2k?b=h8)3XhCwc>Y;fHqMZ|6g>+l0$cjMOI>6soqLgp8jJH{^}y*kvzX$>6z(a}*LdYZv=i%?W}2Zf#2E9SY4G@jsJ;AY!@ zada+}W58xO|JE0>vICuo-|l>5coj;}?%NKve#?8&K~BiyjIzVWkkN6wsLf_Pj8YF) z8hZLSlI2^)B$0BYY5fgf+SKr zEcxAd6Ujq1InIZ%WWH%5oXzI~wzSYxke+YLH<#w?Bgk(4<@57b$rZLv0Tq=%8i$Qz z5NM=*7Q7IkoB)O@E#{wwkq-z>Kzw$NX{gjz%vh|fIL|WC)H$r7tB4*`3lT#)M0+DD z3czf!UvQ9)y=d`t+F|hted^96c1OSQx~~2uQ=MYMJ(zm z!mI$QffY|~QOv;kXEvQBD#4-TQCEo^q@*5~sdNOu1p`xdrKm9gK6!ooTbew3%lz#( z8@rDzrf4L**FtsHTV5L9e_v7z+^M{_nC9MGXlyXlCayBM%;u6$QI25ZTLhe(i z`ozgvXpv z=@hd>%jCWL&O$L6N29CRdxgSMs}MYF?x3a&_)~dbrL;=$Y1GevH%pTud(6mY}_^ z`o%H$dbiQ0gKE}D^YLDY$D!rmt7ToA=ppJPGnpi6htWvs3aWiJ;NW5R%i}d8Q8>N0 zhCjdPFiVR2$H{g=DNy36ixQeaHjYB5Lb+HT>;3oKF}+$v+(*0;+7ZYOXZWh+A$7siH_`7@3kZBQeH8GQ17b(8=N-e0KOx#V1s zBP0_K!)`iTdVq1O%y++awmO?Ss%z6p-}NX!WRat+kl9km&9#-_u$HvWbi@`oaFPNG z<)sLZ*7+|F$6wK^ZMP9CF(_Q#5uH6n_ zBh|}!vuv%{@=?)itNy-TTwB96#Fzjq@QsIoEJ=VvaSC;v&2xtJnIucK(~BR*!z|o;X#)vzd+PXGDDR)RABZ-h6|F^5cQvJ1s0M zY%h~};IL}BT5|K2?-~Hz!AAlq%U!{d6Kk^E=B_yDc)PmPK& zwwE}wzy-H;htL2e7ShP982Wn8FBtsSfp+OLQ+ER6V68(Rk$-;Ao0{})TA!xm?+hri z_~UR4+IOcbb9!qA)U@mvbdl58h3*SZzZZ$0pbz1TUpUfC`C37j~@yC%C7 z_hJ0BYp1x1-WVYpV{O_X#uScAeB(~D9Sw?&(4_h{6ExmQW`wbcpRfOye!#no-||Me^&XC#q`PK zR}{t#RE))Y(h$jP52&V<4EX?{{{FWKgCg@v0C1%VF+IjWJRnr7*c6pus-4-OEgcYw zNt^_eaTDr&KZ%2(U>vwy1f*pQ-3Al8NycOv1(ydzTSRrYSjgw!VO%Fy%)FFA^jPB3&?*h5}9L93=+W8#z(<$q8JO36m|VWleL}>9;AGzc>HNus&FM! z9p}d?Cc3XYd~}C}Xm+9Dl|5hpqv&7xz9BQ?h-XqJ0%88J-pH|zr9|u($tfgZA`&rWf44G54^AEh6(DwT!gyjFe&HH)2oo|1+mdXwWgDo zvoSIAeuMqVFA-_v3aTHwFHW9m4VOf5Owesrct*8+jC#t+^g3wwS#sNhEDusO*kfQ9 zvIj9Y7|B2Cln8}|Mr@72iJ8*!8jMurxNYqU)eu~|#RgU?PmkrRf0pYs3C>#V6gmbn z0FcWOAd(yk*x2-$<9@qubaa2VXB{Cj68`Jd|8UkX^wMLbpvd;TP)_qX?aO+x#!ZAk z>~-upZ0k6Y*zGVMv@PK_e*}N3k$&w&;semi9rB}U75q^UdUT8nu!${p@ugUzOx(Li zWd3IV1x*5-;oxBaJHd80kzV-#lu9f*#Qv*qn%T|akK$;4gUNf^VZ#Qr{f@u<{+0Zf z2!2#Ov>nKr4=(iimH`g)<86X7kjozy8`~_GV8*u8{x_0Bf`POh^E(hk#iDyC1OvsJ ziwWg9@;6y9%gNupe6Bc4i8hI3&wEj!;Ni1Lo%+;n_rN&tBfdgtl&|q)iq@&a^Y7gD z1V5rPqkcqQjDHgo;~@Xm0zd1e{Eg~7P63gFu5yR;;QvY*_k7gq3}RA24&A##Z%*Cd l`7Z~xOM!nSHSM?Gfp-uSYBL1{)_)UurM}3ERf!k`{vQFf&NKi3 literal 20505 zcmd?Rbx<7L0w>xyK_|g2!6CT23==#^aEIXT?hxE9xVyW1NFW4v2@>4h9o~H3y?6KC z-L2ZUZ(qIIs@MPZRCiC!bf0s6+F_sMB+yX^Q2+oymy#4!1OQkp0DzH2hJ}8@S?OU8 z0P?F+q9V#}8Ar>$PFOpc%+K<*f^{Rb_%S3LYauFM5CZiL>0_l|NiOvcQsD|lb?h~I z${(7DET>oJm~L5DH7rNSEm9ntY!z89*XJB#1}&##)L+}o8OXn0Qd&9iIxBPYJukd3AWfO15my*g9aGa{QS#IiI+Kq!Ha4cF zJ6>x3AWb6{UTSeruU9gQmSlX!$r}(P0+SjEf}A`3*?}|=B#lnp|FsViRdoKQuO{9> z$&i){@4?v8nNH~<;}C@`@J-KVO5Hcs5N=|UyZ40Ge9;Ko^;rtH;LOqv<-$*iyJ%dP zA@2h~X$pH6I^Wi*L<4J4^NuZo9(rPo{0BeRCrODpYT*KNW~?&_!D)z`B!-!|5fd8Y zHF{igDx`r)OoxucY4#puubUMbPeO~ISGxusbj;0LOIdlAm{+|eBDV_Z5~Wo%Z3Nag zf|GtiY%GmSr3>jQ1?KTPR>u6*e;KlWO-?FNrs4}25#-RsJ#U$Z?QD(AeJ@?cwXf3A zuAl1^41#=wrS7-#D9~byBn1|h-a<}zW>+n0SSpq$uwnc@bq*|vs`4)BR`=)9EN=%~ z5d(qRxLP?mZ9N;#zZ>;k*@dU289rAxAvCANw07zytGuqNY(g>sSsMPprpOO=rHUUw z&FdlM#tVr6dC07q&0M}lphEnRC{C8INRUFl$ycci4;Y9u1+;;XzIiTNBW#5{*0ATgaN8-4CazFt@W!wwfM)$jnm_kp%Y?wAv zzi=YR2G4-Y$hRDjUDn_MR#{Q~!XH&z8Jr#;{XQ=J0CI*ht1L_o*;qN>aKOWmLZ9w; zn>l`6zQM)D94$YH?bd8^3MeA_5I9~508WnDUo9u;wetxR9pgf7GApP9L1|@F>BJH- zZ!-frH5BMp?&$xL^&uynp^~s z1)2;_%0#DnERjW~gAIOut}Z%=3O+LkSTbnPm`>ogpJF2vSOfWC1Hg9?3^N3D>`c^W z5E3aOu-a}H35Fq`I_`N_T_=5^^ZfXGg zrV=%`Ca1Nk(uYN!4UvXfi|SZem-`Q8leob;a`Lo@Ak@}Oo2-d4^BkOg*s9XeAAO`h zIr)hKKEzDC85rU)=$@Jw?gPks#6KFhHuA!f&Zx4iwJnGzMVyMnEbkT5JDu;nV}c+@ zecMc7V|^{4cp$&9Op1z~$Q2^?UM^S4Tr8@Ra-YLYnJy~#EuBS$F$W+y#Q2lZy2+tY z+VmV-)od@9j6QBj-7+rZD3LHo+S2K@ic5qh8t`>iQwavW$RTQUbrWYcjjkgVj*KIr ztCq4m+YoVNRn410=HoN)8p4k8K#WWls2DKd4}#Egru=h=W2eC<}{=8c90R zm?9z(Btk#8&jJT(&D=V5qc^@v7fKgu5@PC#I-cxKpkA}W9UoAzW%oQWzCB=}ebfJs`F?;_*sNbO9u_M;Zz2%Ql z&E0L_qUeH9_2XTC*}~Xw%BZ?`St?IMBlv)JYsvjx;y1JzK5JJCOJOb@f{f`lZ*H{N zmA89oTj7dWW#!5Qgjg?%AWcyg-XtcnLpB<;n3fga%w)DIIHJ_wi=K0`H}4GU=N`Xc zqqbH)I8o6Bl{HK%kBJxkCf>2Uv}aeuhp?kblqYdO&n5bH%rVrLpY$i<60T81#&~!= zD*&&G52?wl`+ZuUHv>zpUuPRvNgbc15&5VgJ3*!LhbWW%ji8RPJ>=b?ua4d^@U?m1 zrmNKAIYnhv7rtI{!ACU`k!yoflNDmTFSsl+(S?$$tv1e4bJ@F%UH87{zxj_vLgbx& z8aG0SLX%V%_o|w*U`VCznyLe=5Ea z^Q`?C3ooXob7I(Priv5u3$4$bW7*7a!nJ#U&3fr>iJ}qtkJmoEZG1;GRqlNEL6?`X zdu{uIimmv-dc{fI6~`D%5B7i)TxBDimi)eI(Y0#KLRj|%*3>lqSH;(WihhNi+6?za zTAf551tE`tQyXjs7{?+iWZ0r)MeR7ApXhohSQm3fMV0g95o-d;*H;RtAFQrx!z|7B z=UaTOmJ@~d3sy|5u~k|Q2AI>fbZ+@=hy4py+_FZwycmz&oIXpntK&mbLQ>YBo2?Te zzU7Or=iN3Vp3)gN1x^=^B-uVrX`QKdX4rx*b`2I%ZANqN-+oF>d)M(*<&1_oZI*$4 zsq*sKj$+Dr#9_V3>84pg_<{KtCET`c_m)j6LFcIGzWNu*SSus5`yXw2-Ph4{4&-Sd2?jutA|ug|zQj~e*Ie1yC;m9H=XWNLnb{5- zCuID=*($M}0nwFj`Sj55yIx}{RoA<1%~ybHABCF`QBvaU-kEh%*2|Po7zY>XW>KlT zFur>BAlQ$F;A2^7$_Hm;E-%!+)40>u6EkWbAc!ZYH~c(X{^EQvP6La7$aAy!c8|Wg zQ#qD1N-(DoF2S&r6noa>1^;ci!FPV}`5gbsbCS7Z67poPWwzqZtF_VI1gzXyB5U`9 zYdmH=xX^Q|`@CVUCd;yb)1l2pGjl}1h((e0a%n5R=^biKCkMq$g-ifioWoq&8gckV z6oZ)y-e8|OKoFS+US8QcmaJiP_wA64nJpZM^e!|SP{iQWUy%Y2)Zn1n$c5Vn6co~IsKUE z==1#tu6V{^N#;gp$I$>v9OJ^X9ihilhwD+b?85l#qwx(m=v0EzW2->V@ZOed+(pQ3 z^ZSwA~=D?A6zU!;p^Q2Sc(3N>rw$Ol5~9n-y0cHz=B(Y_vVBlY}A7Bo7%A z66O2$O%TRUe#ys|2Sr(T`*gBm%Zd6F03GezKcr{KRw+Eh-Son|ky#eCxsn(dM#@ff zOq#OepN4A{gpEt!bqF3cIZtY(apXxzcGC{YLET&P2@5THk9?{CqIYsgAHiW^J3rnw zk<-m3!2bC$dFMM8FFU)V!G33dNrT02oca?p(7T%?ersDOB7pYU-D z$IC0rOJ?@2T!wsh;C|nC?_$_ZpMLLyt-=Qq0{c;pMZ!djz2Awpoi)O~4vXjlb(<5y zk9*H*q>g>#xKRL_@9|Fl>o4Rqhw!0Lu!E9GSaA%F#PIxnj?(u-E6nfHZvoEzkN}ac z7eO_#@s-0fE?)FezRdipv{X$u(eBT!c{_>nE4Q~1X1hvFAxwlb8~C^T-%Zy*Fn+^D zQ$cp7#KBr`Hz{st?bj{)dfiive0^ImQ*v!2#&j_1j3`3~oO0{4k#YdQhl-Jt78&icrMAiFIeaoL#Z-Q$C|CK|#VmA>-ncVk+Z}(EK6Tit z=R04nj4O3fM|M7ghv(Hr?=_pYqxbvDX><5&>~>&TJ|KxBV@I8?h+|A-3k0DVyN0B19qxcx=mw&(96p+ z?|_7kVhX7Tjd0oD#6;&yZv)-*F7~2V$0af&6OCosUM^?07XyH^RT`_UwV@GjfaRBu zL*e!MGOKL0w{(JId(|E;9@1{RlJ7nJe1m2jLJ*N^JPC=jZzx=^#tNscQ`!rj=s8N| zuMInnkXvNDlKOV?vUAb7P4TCzs2*f6!>+|?fHor@x^<;WSQ?3QhGIGX9w#VpoMfB4 zHbpt#kS94`2p=ZYzDx5%Yb_akEm*d-sx52F5Mb?M{3iC^gKrECzr+2w_e|cfPH=_Z z)3`k4<&gX3`PuWt+wSo)5&7z|eC5HGzjR#U<=Gyec|;7D-%;l)n%;?hZ`3w!{G)yD zl|zur7`v)@`I_iZSH{syOjTNURS~&~;8p4NEns9qk3FW@YzhO6SWP$Uax9D~8sXMk zH*)P@AO!&C=XomA)#{briAP73`vXfn@de(c1`)Lvc}J%&^(OIUI*Qh##x~IDWV1bj zu~_rSch2gq-%`h888=sHJ+P|VlIWNazh5~d!P)u!(^;flUgLT7L(t{$cUhvt_H1l8 z@EYO}SJQR?d)1}P9hb&T@S=pG*UejU1GGlm_L-_V2Z&AX;Aie~tVosD=MIdf)pYvq z{svB}7f({w;#v^S7J^DIN}eXO1_bJMxt(T?`JsyIlFHcSUg@i!1)+QB1`Hb7T6cOcqb`Nahyh-U zjrXA3`LBX{jlGa8!gf7P71r`v|8bd~zs@OjamPYq%6&C)P}z>KSh`Yrfb=VaX1YC_ zvm!H_ALmPB8=aa}#HeO|pRJ)-1ROPzhgIP~ldZe0r^Vv%Prt~Dg{}Tmi3pTvRYt-u zelcheRAraz4)n`hcmIJ}9!bhKk6ES-pN&mT|4C5(M&5t(6a8i=C?_Bw=Ryp|QDHB7fqJ35g8wJ3Kt>=;(kV`zRvrNe;OaVewO+=)S;>mZT(H z;wA`ch$#7cCZ?bBZcG2vG@@0>{+2fWL+AK^XQyDy6$Z=9*49+xy7oyNbqoWj5XmQxqscs5<$+@M{ORPfND4P8FIxG z$Oc`OsqZMI;wj(9Noq4Ci<0+bAwkUi)mtuz8HVT*2m86Wjwz5KrOHhW4a2Tv$YdN= zYt$0|l6i0gmzI|LU5b#%8UV(nO#nEwLvu&laRWh6e}9qti;kg!{-N|R{M-x;B9z^t z3fHIFD*E}!DovK_opRy=EW08J1ewt}f|VDxpZJfR(`K>R`~goejW<*WU`M_2iQWth zWpNRi)~U1;zKvt}L|`?`R%&K51SOySy#|MlGDb_fk_?-*1cM)y=~Sf53rm^4t@w%| zw^k+=LTO3ClJXcvl&}3rWAOQ#L`W{(d+up2#}Ag`^Wq0x=aX*|p-1r4eJ!t6COBA{ zbx0iCy#JhC_9%JkGu1<}Rpx!Q^eNz{v`(0`O5UPtO)~YOSmt{lsTN5eU-E zNiuK%f3X>;!9QTpfe(NL;99-jQYTH=u4W&UwPcYF>I5*E6 zf;2hNdnm*${kV9bLeWqWP6oB5BRl|x_o`2)cWue=_-)nA3B5l$5%vxHLU}p8pGzib zji!x|ilBOvT-QoYkr+U(A(mJ)x_d3bp$ULcoIIkG(MVO*_EE|Op2vNxDd|8QVxfSa zYC6$00CX&F71Qkx8W{jNU+?L}4=&+vhI#?mp06q_*g)t^8YEy*mK(hKXG?L2H9(ykW=7=(9c^%e0uwR8T`H<0cqn)_%po`Kw?p)Xf1V?Z zL7b$F=)kn`17oyDI^B8|CcsGV_ety42M}`;T3e=U&{UeAogU`$XW%E?=M z@bt$YlXznt1|49F1~M3}P@$it&myZ6M_Pv~z0D>W8d#a_J{s zj8E7>{)MSHCI;rT%dm>2@{^>Yf0~1iv*1uaVBn?Y7oo_3V34Rv<`&HZCjgE#Js95d z@FZ1m8Mm<3AJ;j+VAsUq?6`T8Ca8LV(FYii$wzf9E&Uq#rS$~_aWrF%Rt*cwshrJj zXcxhybwa%GDdd@mi0ms5c6Sz%L7QIV{%O5|7aMcZZ+t`J>A zEWZLA&GJUHZbvw>hFylG6LX=@7horLMHJS zmeCodm^d>IzB6g)pi#rMarkSUI6op=US77GtI9l~13}E@zsq6->w5{kbK!>unqmDYlfx2wJcPu|5V2RZ><<329Iyg z-N*kDwNULaW$s{$N3K-4Ou4N8v?s#M22F5v^5AM6j%nh1$N;PV z^|ed%h7%u&2o>EGYJg!BXDNOW>AZA1Cnm|9idXfrVB0iCs4G2D0nNCYn zbGscB_%i@{_UNcwm?+zdN>0Bd^e-4K$wk0OFf^E713SLjnH-0Hf&W{HzjR!v#*-zX4X%z?z{9h?{n|-ShB> z2BX=2k;>vZy71G$0nXh<#f0zn=AD+7?#r&N3!ZF|NbZz(+s1*tYRjMfHjY+WS*D+y zbA&@5l~MjyvG76#>JV4VKJ8fqVESBZ41FB5?m`COW4_WKDdwSw}W=QODGTr+8qW;HDY>)ifcaKP2Ku1vVxl847)-P4nWB3nveQ zX6|pzBjo5gSg3I_>Nrg6ztc9usq0%eXUMJPdp1O^M)Yn-SLJ*mD?2Hk#@FSxSPf}y z`GaXwx6kz$m$!HBUjK<6&)f{=Z{D$!g{S(LE`3e_{1TKv3a>J{1MGV6Uv&34qdZORi zi&?O7hG}gz>yNow3{Qg7zMZ+gVfQWQa=uklSk8qLj*l+eI1Va%vA@<>^nX`*?7rf3 zJ6tuF-R*R}8I`5wI*_?i?Y1N4_F@{c(#nUdhWQd_s-<&uJQEh<-CKyo|L5*d^|4-U zrNwS_EH%N`Q-4SP&wU&R3F-cQ`Hv7s-R3QhqbE#VKi}v4i^Y8m`6{dn+N==R%-@P+ zcyN9TpQv2t5EsA{^5=3$2a(j6);=e(9@D+JA_~}IFu*4#mBPBw;UGg&q8+^8af1Nw z=v3!{Dy2xmN*2x-_r{A;{{AtXLtdXtU++ud)Pon*mnqa#9AWnl$&7T~534l3O;S$F zzNAL>~cjjqpu@e2|Apb?f>EQOv7i}xD6rIoaZ1l|Wj>_5B_w*)&-s$@uT=Z-; z3^18nJTp^R!e1dwD*MOh=PIxK!T!lX%v{&ez0mJXD?W-%uNRu1**21kmrror)yq0x zH|O8Y*A=fll!twJxnD!FeJa&H8+DyLcsYA?_Nx}=y^0tQe&_4^qv@r}*Hmj^Uv1^_ z>8l|eKBMfD9IOAb=9-N_$0(=%ee|a`wx0xO)r_w7Pr`*_FrwTV zkHfTy;^5H!lE&M2nL@VwC;Rcwx9yWN@Zj9_%x1@`ok;hm1sc36fx_E#9d~))o5l9G zs;TNH(}OF;Z5hYiS6AsP$tJL1pNXmyyoHxDr+&Sz)YO)&u$qqSWM73kz`JF(Fe?$Mn*&t5L_eglx|(dm%b zcFw}_XJ?$wa=TgLm;Rd<)Y4;`9v0VC0r#6<( z0eadRYSiAexPc#|9TE+9vtbo-8hIMr3Q05#Sa^$MPqn5|hA34Z?>)xmc-_lG#_abF znpqC|tLy}q;L073v+33FHoxs@&E@WB;#H{bL@`jS?G(E@iy7Jp^YXov-JcvlDP@I^ zbN0tHGiFyBI-7`f39&2Ty9?Ks+)r&62u15bvIqTH5#3#X96N6!;1ZL?8MT}{kpO_r zL3IZJzRCLDWxuS9tplW!Pq)l}-V;wj{v=Rvw-^@)-@g_ZsQnPX36N4E`F!heIb9K6 zyzZxIrg=4C9tt0gkI6MLgHf-Yv1;sp;emSf^G)&(#d0Ur7>y8G2G{AY3Qz+u%bDLQ z@#n7Rb?;rk_s_iQbqZ>2|&4 zyU;k08Vj3yK6)I(=*%UWq49%)Oo5%p5Wz9wX$T(p=P$j91TkO3MHVNrI38c8jr#S2 zbiuetj@|tZDwc#y!))SF$8mKcRrQRSLCQ{qFS8n!Whp5T7JLj%Bf|jyoRYX5oXJZc z)OWPimS`3?Lnd{if!j0jveAFL)cjDLso;TFzGBopuXzX4HP?(6_|_=@xKo!J&WNe1 zt`iH_-RX?}xrEt)aS85@NcF?$w%0AK6Y6Jl01&NqMc>5Rj0`Wkq{$lgWI@WZW1d8! zk6;IfVoH2Gv~k+}p6g$I@*S(ZaA8K#x^(^N#rgK>La=R8p-(t9d1cYP5-DqpfN$SS zEo0c%Et1ypYG2{qi;`3{Wq;Pv@~rEh6R*1??zxvmBw-;9r^m()`&rpnC-sCD?L$YS z;b%0BuK>_MnZ>YG;f%W0YK6%gXE^|X2L%RZQ-Cxpplj#?a!})M;o$x<$=la0H6YvDQT2 zjDE8>H&T4?;R%476E^qgW~}y7I}c%@{yl{3;4GxwzI4V3APdsS2M624v>e~c7K^}Wo-9VZYWwuS~PgEe|$Os0CKIa{`dnUH zvhI|gzu40xbz1DW()+XtVttNsQTy|J^D;Js*X4N5j1#gF{e1i6`{#Mc=hx_>hUOWn zEC#UIeS06r-~X~-{pYD%*Nt3H^WGH4mrmB68FQPL?PFK_!{!U`*?BfsWL^Hk3Wxc^GcG)-}y0-FJWreKqsa24~>TuKDW-@a++Nc zDNTX>7LSbx*UcSSKihDSHjUhmh?x&L{Oyy1v(nv|Njo)1LE{?Dg$^v_BLOt3!8*fa z>r6+qx`#&Az_E(P>p{Gx4uVYi53sU!6ucQJG;?Z;c@Q&e(sz&nlgF(lL-JdvmzO(T zju!5!U~|ttJIMIy<7bbBEMstR{{vP3_)A%cLNfhDQ)9Etm$jqAl&n}fJTgKh<*;rI zlGgdJDu&hO8Wl3*C#by)^@)=tDH$0Vd8~p=5=F@sN|m7)bq(qbW2Y6Jy|6=&(R==% zyTkueN7-xezM!GGxt<{cngPLO3lmk;){fLe^4kWe`+GZdnfvXf3l$3$*IZ%qg3Nd6 z5ju6N|K)9uzxnTwH$*B9s=*Y9lH0dxzX^l)d~2rq@5oz$^uH=FW?|5u2>zD<14^q{ zTGGyl{ZNno59A$zEZ3LI>o?&4N%3FEJM_OGZ>*mT6#rowBaiSwS8sx`Gf3Xnnnyy< z_J(P&H7YE41wG)ZS)$FPyXs7~J`%wJ8(HuV3$KYk$R$+? z8MCVLhpVS5DlFi~FnGB=TbJ>BMFFD2z3dx00w%cd#Ln+)*3@34hgb7Q>Cqc%%Osr;>e7YU-xZVsFPr6iEl5}I9=KVLPlO1p|64m5 z0B2k^E{wVaJ@}KVnMrdIG&A8*Eh2$8F|Cs}=HL`^W-GX?TDUtJPJ-8#$CH|M=d&^6 zm=~>S7M_~#^TAg7Tq!pKR73a+L&gW@S#V$8$L}F(S|T-NO;;KT3s^=jKKoyLiT;X3AYeK8HYWFvWg1`UU4NykA^8o#JP`$^ zJiM-@zF|Jz-Kr$P9MOb;ewEF8u3IAK5@Yw6bfl&yjy=}uS_M@7P<<(keeRvdvEkpZ z4o+Q1e3GtvNrKL?wpZ0WO^)t|L*+?D?cw2gQHm{_-ByD902%qID4NqFf#IAlJZSGr@_Jl@Nghn>7~s4&HnNmuks@cqcUeC~KV|bjfAb*F>=U zToCGVSDHwSI##Kvfmfp9_z>Y}gvyUJ424g%*S=YMKAn`OC(R}IVp&xUOS`3n&C1AY ztK75sm89!zyD5>4gg$5sIZmmi4Hg&q2o)tlgFf*6OgK_A#*=Hl5?_uzYy(Tb()^KP zP&egpM;*q9VB71%7FgVUH5I=9<$ZG4I0H02#|6>9n&-@K2VTc0Se^;k>^B2AiFT(>t=b*2z;lNpIjl%Ia#c;iEtg}Alk@VS8 zm!bek1I-hYXuz+}1qM8x$#5x!^5HrwN<_cGUkit z*1#mAo}hN(%MedLrop!w40(=8XM(l!0V_HM*S5ZMcBoJt)v7uHS%K~0Sq8{V78CeN z%rZ~XX2QRWb4UOJK7R(dmf(Oc2~r0(2_e9g-F;~3rkOM>E>cUp0PdS$=yeDZ8z(OX zoDhKqhkJFUDZobS9X5Yk&R*zFpGb86v1wv3Q3^3A?`aJN!aiA*GCBnSMsh9}f&9{{ zH~^SW41<|w5(e>@s)NNw{8xF*jGDc57GKB!tqV_?5oYFn5Lb+z2@CB$G15Uwh#s}d zw&9Ee;HOTBT~beUnX#-9wdaqU1P$Ujx?;|4LSYt*9rMJb`QU7K?lcrAhqAG+t!220 z&LtP=IpcJw9xk+L!+-N}n;lOuYYcnlh+k6ArdGnuW(dF4bZ1_);U!k=er6on9?%Po zv%3hyS`VB3N?C3p7e=coJ)x*U7TDMn{80t>ZhkHd(z&cCB4O0=$E*pL2eVWHQ$5U4 zB1=_rN)c6iGVP!vm=IkYNmDY(%HeRi-Bo-cmB*iYc?Yy(9RKvRCiJ-u03d1$aH-{k zU3Z@mG~-`c*&>6Xk=wOVpPMu2+WEm@vlDJ`c{DP=nx?gISr;z{$K zw24!oqrU2*3NYN}B`0bAVbtaB7A=T-h)s$GfMtqMGoJ(i1Ymr~hDw)O6>Ssbc&d3p)ek~;_TD`CAz6naf_I^HeGA2vZoml%tj z$RI=So}y=`P?Tlhzcq)Kx}(sf+s#vNE*rr{AW~ejwrv_U^Fs4)jeAFo63?Ir7_eSo zCntE@=rsTjE+~#UA@Z_&JEC5?+%UZ8#A(O>w$*#c{?XwB4M>VB<}r8KWqUS-pXg0h zYDF0V9@KFkbDW?>X3g4g&x!%SFiNzyG4q6Q#AA*k@=;Urbjrg#Ba%ZExX#T#v^PBM zwyY@!&jiLU1n>+5T|)3S2@!NPgK$%*7I#wk6OJ8$3KnGniVWoal_NYhslF=Hk#u$- zYaH7+UGAy)c%L0T@xX%4U{)#HA{rBH(gZoHaQ4InK53<$4{MaVKm9JV z@A4u$7}%dv(~|QlJ7dnDtKd3T-+Sj7SF@XbP@Xzw@ly3WF6;eDZQp&`{N_gq;m27*h&=nT-y)OC<0m;&Pm*6muA{oZL= zFn_JzHC;tUg~(vthVXlw^2X{qk``gMgxn=1D*>Oq)wOLK(k7O4D=KnULayI~R~`|q;~H8baNudn33nL4kJQmTb<&L6;!J_dEcZ@aKxpcv9M>;jB4%{e zOeMZ3O_r|S_0fDbq2bUh%J_41YAv5-rVTwSMr?y?lizE3nSOMtJWqj_9X6z*%{ z$~3m{BI=ZADq0U#Z_Ukx!Iyj2CVFpH{LRIxnGoc@qQkJ}G_AN%WXwHmm&Dg;YPWiJ zgDDot%^qD5DN>gn2R1z}`>y-)9Qrc_#!Sl94Uu4&lsz}^TF(yz==ca z*MC|7ShfM<8UN-1LJbeQ{piMxJ3 zfftSV{ufWIX@vUSJv}|!+uKmT8y9M2d>%53`wk@@(5v*@^JPYphTc^n%AKOR5|5R+;nX? zmDmHFk6)u6=sU_W~K(G00WUWpfwr9C_U| z(I&5!r%0<@m{>$X(f#;$?jMtpiF=mIG3L?4V1!yFY=*lmmm-m>E#NH%a4R0L4x?h; z+OzOW+~naDl!hOzI)d>(&%+A*H0a?A_$_N47u`z1v0a}|oQvZ5!xIhX6_;kDNyPNl zxMD^A!XTX$dZRQ7Pt43`Rt4w&vVx5UU0bfUQj31Fky8>Z_{w_TOEy^W>JOpf#-B#T zI=EYxzU}53ms48YL$eim4!z{~yHV%dwq9pT>0n?Kf zBod*SyP4>BHl&)G*2V1$9*BOFIQ(2%U+H_pxS;5za>OGX^%4ziQq&=7GP^ke`0IJf zK8f-MzJ7Lf!itGUO#_Se(DDmB*{DRA210Iwa zIW;D}EYGRf3Kx^?vdr_N+@RqIfGA3zs%;4u<~COf6(mF~Tt5y>S-B;*nOIJWQ&WnT zvT0?i)+BpKT)l5?`rH66*QW(QLmZl^ok{2cNyyD-U$u)1L;k{s3W*&V00t zvyNP7m;dNtee^p~iYO{H*D_M^(Peq}3_JbChf*kY?@~o&G&II zwU@?#0(ZO2OHp2~rn!b=|DQ`0O81A{3q0%Gtr?_gUBlYz*{JQ^PxJx)N{mgTG{d$> zmzuf_HoKKir1+t5m&+BEFKpkj+g}Wdbzbwk++Y7fnllC+Iown@<2N6euR?j*9?TID zzaXyjkF3LQXJf?L=QnKL1mNm)ouaw!)%)zh50*BI(R%W29(DPrdJQ@{y58U;-kR3f z&i(z=;}Gb8FHaP7_>)A3?KD4yziJ4?#m-?Bd@X|jwH<+Bw>m;hh@g$Mbl9A6mN5By z)~>spTrNfZ>PokUyEPS_?8SHQr)4hoQP201bv?KCezn&3m@zob_6v8GUm}=ak6Q|= zjcSu+JzSTMDJqSrUaCVqQz%=qzK&U|rK9eEqx9{(II>OUG7Te@?YMsLoCP|T5#W7o z(K(USY~y?8!0Jb(0 zq*4#QNj}n5Vqxjywjr;3YO*LB$WfukP7Bx6wPdC11Hj1|j56OUl<2W7GJ{mqN74d| zz5m#9tEU;HJ#AqkMSSDjRrxT@SKpnpke48rKgzLxr{)sceS0cA8jM&s(DkRHxVdV? z=d>;bAI<0SfIZGsR{_sR){xt5;0H8_fc1#m_&xsI1kQRVK!RxrL*fL+Z``*UbWEik z^N*zN*sDA$4ZF4Dg7^)WhprzzAK`%5DsRJNF%)1AoA*&aUJ?nvg$6hcTRg#(@E;r^ z)=g{77=?ceK=HY+Z$kY5r-kr&mK!|0|8P-=JePSL=TKO5T=G=??wWqfvmNHR*p0k0 zIiD%P3RXl9&rXR9pXuh5t%)krsWvw0M2>E}t2qK5!u_*C1@@4nq}RkEM;~nCHibgw z4;`lBz??-BE{%fzLrp$)(>eqkF15^f#H!ymb@&Iw{ZQs)`}WH7dm>`yihd(qM#yyj zkDwf9uKw|1$w*9(OqWiwb+2j&UmwKWq<%L7b8MLyKJ;4DF& z+-($tX>_~@0C}ovb$PXg)pkQqV5avUZsKdn4ok#)feM+ClF%r ziobd{i-l;J@De++6O^-EGGUUN%Y{qCz4Cvnze!8pLpwamnD&K~G9?rAS!U@KD?q0I z7zTz1pbanM^5N|?NU>OIha4H#RYVoiontre!(_AGlib4IO2)%_zTS@u{_F(7)C8{i z9|*p#+uCli@tv207wbdsvXBk!a_mBa0tM-q(~EhdM{t%JYrXb<5&|L{pU{8hD%+gt8n4C9>56zWjU^ zM6Fh9>3VRcG8S} zlS_-?D)*sV$bZKzYsWnc6_L<-6et~+Ik3su#e4VeWdx6x`SHwmvEK0L8=k4p6Z1*; z&2x8_0G_n3xXyK_8o}C9wY=BQjYbtS?T<}ktgiou1cSi_e;glHOXWFahfugGxR2Fe zp9(J^!CKHOVRdRh^bmLo$MVv1Rtsy1!GVJt)b zpW)A1Wc7r(WiS$bqpD}Gs^xpvbJ*AhZOih~cv!zlBksi`&MiV+;EM@IlIDka%X0Bh zBOK0caA9LxQO+ni9EeA)(Z)A2cUhs)q`z3DUPt73ER<3GlT|)Low0veV1aCGY*6Y$ zV1nDNjmU@#HMZ27Z8Bx39b2`Jj*ceGoYv7nnhtgK^%v*o&_lUwmUzw?urJkr5S)iW zBW52d-j|F=Gw9zNoc~eh{NLS?*U0_~ z&5$J-Cr{ouO8%W~^Kue^n4v&>q1#s;ywOT@iFAqKlmmB6rih~X6UWCk)pPU-15p0C z(@q>XPX1Sn?B7FZLy=_AFtSvb=(}0qLl*dnJcU>ZmuLGlIG;jT?1yfD?KiJkc9eG|?1{-` z*kx}rd$!kDj6o1~l#`tlHZt-^9gwpc{Su}RC8{!FdZP?o+F~8hA|6qi#WBrMvWI`r zpdLB;CHSeXwF;K3!3pZH&PGn5hP6bMc}oafA>F7X9Y}v1l8x$F^MN_xH(+2^fu{8) z9X4LS<8I)!s6wY=zSZ+{^~ndL7xFeex1&L{kZyv&>Nt0zF1Ei9cXz}XO;$m% zQK5Q$86CBd9U4l012yNSD3;^(;bF$NuR$LDwTNl~LHbA0MaV$lzU(%2-Qe1_haiUF z9@o2y9e{MRZXOn42K{aIP!)Q6`dzN70gPuOV@sdc@X=LUMYx@HqaY|iortIipMbN2 zK2Am#gpSzS%A*Y}^ukJ%7sb%hvf`IojohO++v*Oz78pwx7muK@Fh=lRYM!ACqfaS!(T2@3iDN2;Pqr z8Ssh?_<}SRoA>`w*Zd#drVYy26xR9jJ&%0-j(O5%d~4jRO}V9pNJM!acv1ww&n^~D zQv^ygv{sIvW6i2nk(=@0(y+1a^MB zsB!Wj_RRNMKatqNOh$Be;Ej)l_gJu5EalZ4e+(Otifn(6eP4$YI z+YqqZ%G=*jgJoAfXjm?-e42MMwo^Q~US?7E^(?SVqKE9(LVNd4uk@37_1$`(o)0>{ z+LO%x8vgUA-KOmYl-{|GSAeYLTT=&p-N)at0r}mD4a|9I6>)i^&3^ToKc5sWtk9r| zbZD|=T}UO&W~HYMyuK-6*0p9f6HgteV*)pyY0~O8J?RviGPojwovCqxPtys!1`79JhDqof0`Nqx2l2ul%8_N^6Aa(KajVkO z*|?(dsH}d@_PUPt9A8t))H}LeL(D6jacL`udV7+JwTAU#t>nMM!vD|jru+E1De7}5 zmm5Qsggwsw%VhPpMp8v-w8bbkyeIus{xNrS9CWJpylvi34bQn}k`I znM!PL-0^Bjq8MSXN{8rT)@C?Ao5_P|^Pz$w7zyea7XZ+FkL;fA;{m<)E)wSyUAU2YdO&9|`-A0b( z#O~+3ouea<^R@tzl^OVyTR=axWlmFPb_7nGgeM)d~f{hrO}q+ z6#m{4j&e(3ik=?k9{j~DPYCk?nb`QjdHcN3liT3!*xWCE_#~aJ6N*ytB?UQvoJeN7 z_BhS56|r?f-RtQqldRNdfSIcOh1w?eKqeu)5N>5{Smsz%Yz|bkeTDSgqQx1K9pUxW z6ABw$cRf*L5yN(CiJWInC=JJ2LYth*59_MkO@$OVcu48-)w4D;9J)h2H@Tg78}?u@ z*tgWYzXm^N2t%D6y)~*KD~+*ImC&z5uWEi>Ss~kGMH1|!ke9PNmsBmX)OOs%8bF%t z@7*u1@3rY$9?n##Y2}gt&G{k2gf=}_TBXLUnF#eb`J)3*b)@tOSD>=6O@K}9Li#Zk zeW2K5>X6KWda0t_qH3~5j#vvU#k79og2aB;?Ng)SEB22w-1$z#@$=OG^OOz-z@ zW|f*DV-p4A7X8q-HeneyV+2Z?PQV9Ko{A`tZ^wL7M$hr+6$Z#(U)*(c_WpRHV`l5| zNb_^Tt7rFTFCTfh1JG2Fm!5tlV;xBJbg{JlIojg%6 z=6lw%nl57EY0FWatprQ6s5kSl12G#R&z&m^l%E}CrgPzEk-moh#eSgCC^{F zWg`jXIp0#uAGoG)!#=C`ENU2(0~z)G16LYO{?5Fkex) zn7C*i@W8jBcPuBrQ046-qOTGFWWB%}^VPwiu*kZ5Ms+|NRD`zSuebKoXSqSseXyp| zh4(|1%`b7+Uy(+nd-HeTO;63Gy8O0uF_zFwIecy}EB1`Q@k}t%Cd2su(ao-1+lJZX zxzEm@XZ|=NIjyEKxfkc3BOO|bT@YW}nsUn4`;->XpBROvxj9XA-D#v$GEnuq4Wfml zp`WILxVcTp++;JZ1Aqpz|1h20$qTwy=(6*<}xHJ(|w>C?sR6(ZWh-TlixE{B^_0NVi1%j_T=H35p>e!^*4X!ETslTCfbu63I z=g}vV6s9N%KBIOJ3D^bnX#=EslE@Vg%Ex$6B8De8oHjxtNx~No!QCNjvMEAE_EX92 zX#tDLoD>QJu6nIPw^c>?oic*{&bvot5lnK+8XBzC+L2_g(Gw1b!)CLY%;%3HN13gz zifU4#i6c&XC?P>X&3=X!ed}vY9WstGR4MRB9UM^w&bYY-gF{9bxsp$J|I$G>wo&1u zE6uJrIHj)t=GtbE3JhEnlcD8&c4Wcl7;z&zPl9`*s;;hYVva?q5C{YxYTXH?_>lz! zxB-QrJexWLLH|lLHc$LehSpI^VUJ${13Giyn3aP9u_!*ktHhn2g&*B}Q4@6g*X6V# z#^XT*_10%SU8w_y6ZM@3H50xuRDdG@-!A}SGzcsKf;~6s$u(Kns~m2ryfGV``~wcy LAF`uid@ue1$?cJO diff --git a/performance/duration-test.yaml b/performance/duration-test.yaml new file mode 100644 index 00000000000..8a50e0de3b5 --- /dev/null +++ b/performance/duration-test.yaml @@ -0,0 +1,82 @@ +# Possible values: cassandra or dse +server_type: cassandra +# Server version (e.g. 3.11.7 or 6.8.8) +server_version: 3.11.7 +# The driver Git revision to checkout and build (can be a branch name, a tag name or a commit SHA) +driver_rev: 4.x +# A distinctive driver label to use, for reporting purposes (will appear in Graphite metric names) +driver_label: 4.10.0 +# The IP of a running Graphite server, see graphite-setup.yaml +graphite_host: 1.2.3.4 +# How long to run the duration test, default: 2 days +duration: 2d +# Cloud-specific settings +cloud_provider: nebula +cloud_tenant: drivers-automation +instance_type: m4.4xlarge + +--- + +ensemble: + server: + node.count: 3 + provisioner: + name: ctool + properties: + mark_for_reuse: false + cloud.provider: {{cloud_provider}} + cloud.tenant: {{cloud_tenant}} + cloud.instance.type: {{instance_type}} + configuration_manager: + - name: ctool + properties: + java.version: openjdk8 + product.install.type: tarball + product.type: {{server_type}} + product.version: {{server_version}} + cassandra.yaml: + hinted_handoff_enabled: false + datacenters: + datacenter1: + size: 3 + workload: cassandra + client: + node.count: 1 + provisioner: + name: ctool + properties: + mark_for_reuse: false + cloud.provider: {{cloud_provider}} + cloud.tenant: {{cloud_tenant}} + cloud.instance.type: {{instance_type}} + configuration_manager: + - name: ctool + properties: + java.version: openjdk8 + install.maven: true + - name: java_driver + properties: + oss.git.repository: git@github.com:datastax/java-driver.git + oss.git.branch: {{driver_rev}} + type: FOUR_X_OSS + - name: java_driver_duration_test + properties: + git.branch: java-driver-4.x +workload: + phases: + - run-duration-test: + module: java_driver_duration_test + properties: + is.four: true + duration: {{duration}} + graphite.host: {{graphite_host}} + graphite.prefix: duration-test-java-driver-{{driver_label}}-{{server_type}}-{{server_version}} + kill-nodes: + module: killnode_rhino + properties: + target.strategy: whitelist + target.number_of_nodes: 1 + target.selector: "*:*" + repeat.delay: 120 + repeat.iterations: 0 + graceful: true diff --git a/performance/graphite-setup.yaml b/performance/graphite-setup.yaml index be6a55b7919..04c37aecfd9 100644 --- a/performance/graphite-setup.yaml +++ b/performance/graphite-setup.yaml @@ -1,102 +1,44 @@ -dse_version: 6.0.11 -# Driver branch to use -driver_oss_branch: 4.x -# Driver dse branch to use -driver_dse_branch: 4.x -# Driver version identifier (used as part of graphite prefix) -driver_version: 4.8.0 -# Driver examples branch to use -driver_examples_branch: java-driver-4.x -# How long to run test for -duration: 2d +# How long should the Graphite server be kept alive, default: 15 days +keep_alive: 15d +# Cloud-specific settings +cloud_provider: nebula +cloud_tenant: drivers-automation +instance_type: m4.2xlarge --- ensemble: - observer: + server: node.count: 1 provisioner: name: ctool properties: mark_for_reuse: true - cloud.provider: openstack - cloud.tenant: performance - cloud.instance.type: ms1.small + cluster_ttl: {{keep_alive}} + cloud.provider: {{cloud_provider}} + cloud.tenant: {{cloud_tenant}} + cloud.instance.type: {{instance_type}} configuration_manager: - name: ctool_monitoring properties: graphite.create_server: true - server: - node.count: 3 - provisioner: - name: ctool - properties: - mark_for_reuse: true - cloud.provider: openstack - cloud.tenant: performance - cloud.instance.type: ms1.small - configuration_manager: - - name: ctool - properties: - product.type: dse - product.install.type: tarball - product.version: {{dse_version}} - cassandra.yaml: - hinted_handoff_enabled: false - datacenters: - datacenter1: - size: 3 - workload: cassandra client: node.count: 1 provisioner: name: ctool properties: - mark_for_reuse: true - cloud.provider: openstack - cloud.tenant: performance - cloud.instance.type: ms1.small - configuration_manager: - - name: ctool - properties: - java.version: openjdk8 - install.maven: true - - name: java_driver - properties: - oss.git.repository: git@github.com:datastax/java-driver.git - oss.git.branch: {{driver_oss_branch}} - dse.git.branch: {{driver_dse_branch}} - type: FOUR_X_OSS - - name: java_driver_duration_test - properties: - git.branch: {{driver_examples_branch}} + mark_for_reuse: false + cloud.provider: {{cloud_provider}} + cloud.tenant: {{cloud_tenant}} + cloud.instance.type: {{instance_type}} workload: phases: - upload-dashboards-to-grafana: module: bash properties: script: | - echo "BASH SCRIPT FALLOUT HOST IS..." - echo "${FALLOUT_OBSERVER_NODE0_GRAPHITE_HOST}" - + echo "Graphite server IP: ${FALLOUT_SERVER_NODE0_MONITORING_GRAPHITE_HOST}" git clone git@github.com:riptano/testeng-devtools.git ${FALLOUT_SCRATCH_DIR}/dashboard - curl --user admin:admin -d "@${FALLOUT_SCRATCH_DIR}/dashboard/duration-tests/java/grafana/aggregate.json" -X POST -H "Content-Type: application/json" http://${FALLOUT_OBSERVER_NODE0_MONITORING_GRAPHITE_HOST}:3000/api/dashboards/db/ - curl --user admin:admin -d "@${FALLOUT_SCRATCH_DIR}/dashboard/duration-tests/java/grafana/aggregate4.json" -X POST -H "Content-Type: application/json" http://${FALLOUT_OBSERVER_NODE0_MONITORING_GRAPHITE_HOST}:3000/api/dashboards/db/ - curl --user admin:admin -d "@${FALLOUT_SCRATCH_DIR}/dashboard/duration-tests/java/grafana/focus.json" -X POST -H "Content-Type: application/json" http://${FALLOUT_OBSERVER_NODE0_MONITORING_GRAPHITE_HOST}:3000/api/dashboards/db/ - curl --user admin:admin -d "@${FALLOUT_SCRATCH_DIR}/dashboard/duration-tests/java/grafana/focus4.json" -X POST -H "Content-Type: application/json" http://${FALLOUT_OBSERVER_NODE0_MONITORING_GRAPHITE_HOST}:3000/api/dashboards/db/ + curl --user admin:admin -d "@${FALLOUT_SCRATCH_DIR}/dashboard/duration-tests/java/grafana/aggregate4.json" -X POST -H "Content-Type: application/json" http://${FALLOUT_SERVER_NODE0_MONITORING_GRAPHITE_HOST}:3000/api/dashboards/db/ + curl --user admin:admin -d "@${FALLOUT_SCRATCH_DIR}/dashboard/duration-tests/java/grafana/focus4.json" -X POST -H "Content-Type: application/json" http://${FALLOUT_SERVER_NODE0_MONITORING_GRAPHITE_HOST}:3000/api/dashboards/db/ target.group: client - - run-endurance: - module: java_driver_duration_test - properties: - duration: {{duration}} - is.four: true - graphite.prefix: endurance-test-java-{{driver_version}}-DSE-{{dse_version}} - - kill_nodes: - module: killnode_rhino - properties: - target.strategy: whitelist - target.number_of_nodes: 1 - target.selector: "*:*" - repeat.delay: 120 - repeat.iterations: 0 - graceful: true diff --git a/performance/metrics-dashboards.png b/performance/metrics-dashboards.png deleted file mode 100644 index 6ffb85b9f180c63ee80376283250828ff27c790e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 80419 zcmd?RcUV*J)-4Kz-jUuBREqS_q<2)Li!|va^q^EBbfgz6(yIbW7Z4N(AWfPyDbh)# z*H8llI4k(u``h>2bH8)$bN{?NPsk!!b*}fFbBr;^B2Hgdi-MGe6bA=~;@(|#LmV6e zI$*him=O4_l@k6K2M1TpMNLiro|+n${!^%vi@PHZ4&O_A8=Gr)1^Is1+S=Is7`V+( z`qbYrHa5}FCiHvjPp{EhkPX3CUB!&y_g8@p_o!_?3@nxh!}|?#Z*qQPeTw%cv#^KGqqaAeMRS zqyNg>0Iwi>pG+V@d=z}2jxGH9n@DGQIF+zwl4nQ^CP{UdLA3z zHj)t@z6Mqmy#6NK){Ptls+3ZF;HC(Qw7CUpx4nqyC{S+zF|}0F6*H?L!d@1~hKC<^ zUtZ=qGBZv3i-}@{PA@NyXD%-LVYm7F4tf=hLj?SF z4R{6Q;r(lE0=hiVzm{<-fbVcrjMVPk1Ky47pE^2vdOJgXj07K?10Rrh-8J*Z!C|!)Xq^jz{3l>9~}7rSzyt_(Z`l6z{B0sTQ)#} z`)UnYU>W)I^^F?jlCV8szE(~EqxUKE9(Dj{GVU` z&l_GH)9gQvDJ}7zhy0Ik{@GGq1bgZK5sJTYzWNjhv?8gz$iHuzBB}Y@E$p3Sb5YlO z2)qNM?CQq^_{|5ru)h>6>p7!fIk5lE z+-$Jie$dX`$BOm!PwQZBh^t(`xzF%#J-VLiAyu+x>o2u9h2r0UwWCHpzqhi5(GNvD zBwp#y70lQ&3>xED9waw!o^Y<3=pS_EPrFIVaw0e%epDd2N0WjGmp#BYDOs$+lX)Zc z1Cu|>_DqKB3%Im6h5N`QaL6O8h*qoHGJ5*m(n@;|EKzTv414WA{XfiL=hru8j zhwK3TC-#AcdEoqRzm($j<1{nwwmoJJGdN+2F1^$Ts`R@GXwon=*=h^vj*;EvQM>s!?!|926S{AFdyeQ5M z@rySnuQh5cmvO*ObOX(}D+XP|A3tb>=0Yoa9%L$m|Lta#Yzydt93&gNCuB|RDRU#oZ@x^XQ22G-LPw$w+~;>M-zXY zO8yg;$I#nvYha{Z(=IrOc`^id)j2;RoM3V$y%sW+hw2QwmtW^G`9p!*@7OwH zY5(w=2%ltC;&GP9Qx+V*rsCpz__ZMVm!hnW5xn2+7gUNeau*f8S{FRx$z4j)^f0mR zcf{;%y>aP+-n3Q=kCtF_VeUP2MwC9joTbL4s3LsRS&HT}dbaFe$ef~seuO`Lo`vFH z&h)jTM0!k0#6%u~l?v}{dMT^*G%;(qQP*nyezvT~FyQyG*_CP^RQ@-_u-r_XlK5v3lP8ZL&r%M8yJ@pmq^@fh2jJJ9(WSTxw^V(GMeE_JUwogU7)+eMH6?5V)|dZGrY{dx z9NrAj(|b+)%eQ19X_pCVI))+NfhpYzcEmJJ+36-vQ2(q(!#|CQ^nu}-o`Zt9zD18Y z&d=o%9(3b%ahgCbyj;3ACee>vnxN%Pxy(Y!8QwcyYQ=dRM@ zu`?G?f8~3lhP#f@jP3j*D)ulHQqr`+fW$pJgR%vB36z=j5t;VsNLuW3!}l~jH&0*n zq=X$MBVG#Gg=kKb84OXQ^%@#K3^C8&YY0tjL5(Duj6>?7-pcahGyLphr<}ZHnJA<)pB`7 z;yzuAJ#TGZd+G~~{0SScok)QSUMUe*t zM!a|<)8mcq%=`x3wie~jH*@kQr^LVM7(mq(l2}x zRu1dC1SyHuL)*Etdg+5`&NRwY=EnrNfz6Wy0%`pVnTrNG=?Ba|vN zd`1~3ju|LcsCj+}PcF(lVTYP>{@YpAM%y}>b-z7KYIsw)519OF`>5CLjCan>`0*U& z-APN&f@^^eX{A-(D`yx1y9^B6J4|Rf)78AFhgDIu=S6IyPoN(>B(}4OlEty4W z2W@zZZlabEoKF`a{f3zTR$K%ekiC?|+FsS*P<}ss@Ym?e5N{$cBoHmiJie*`pQRd@ z_P0;%FyoJdn7cK~NuX*xvIjKg{i`jq8hJ&>Iz(6iut|7Bvb&?D)PCd?ZmdT#J)&NiVh>h%&&vC;-x zI34uy*F@>%r+EI863Fz!AUJq<_XZ)0d~F=xzX$rh z?S}If8^+EfQKcbwA#LDXKjcM0O`Ih5d1Jq9)0{I8B-fEqO=Lt!$o1b64~6;U>c2rV zx89tK=3S<`3&Ht9*q~DMH8K+|sC3_aIwEN^Z*n%&H$F`BVPVhP^xR=X(I;q1iVntp zoBTS)++C2peg-Ov5H_0LeT_RQZST%gpj_{)#l-?C;E1rk4Re2ro@Guwe7*0Qc)7KH zWOOYY+E^4K!DY%+NmM<_y883E!?UBtm$}BPWJPxPWsPo$eb#qD>aM8UI3qN{{EEZs zuKFDgy-|w6UC_2 z>+-oaaRhC022O<@)2#9gY2~FTbA~J81>>I*AN;RA5LM&RZ|nni7wYjnRS*33zgZi* zzt;W)<&7vrKbg+XlmB+t|30`cOAm;m3;8 zN%@LF%$ka~&+l~s#ooy?b91^$sqK-l9&o5~2F2$ibxt71u|q)Rs)(?O*)O<8X?(3~ zRqF{;7o|q{GN+Hf#;1HXgnq+o&*T$@^EnSdPBg!n+oVdMJ7~u+&Q-g895*S)cFl4? z6F&w84hUXThkt2y3t(VPVt_Hvwff(u*a48PE?pp5S@SrWq5beSOUcdiV~Hdj(Xs#s zMyB~YN_YFOQRw~HsUA+8xTV-v|9${hOs2cu)RVH!UW;!a*+sbvF@Z8Q(bEHT2HM*8 z2L9!DE?sW;>Ouc7P1CLG?g&^18gyaNMSG{R7QMbi7(QD=~VPdX)0x4Gar#Mz5Vwa%yJds`?~^5s)3 z(a91q1uz0#T$oRTFJERSMtmUP;2vG^_iKnmND*`HLSty665LX1Ka#UxZa%qr$xd+t zeEb|ZOe)7b+e?yyuWAj<%T(_wzWY~&?R4i$xLJ~tL+#*gF()7Zjkt+`@0Kxw4o<0S z5bv0o67EA5O1H1s7tyfbQ%;t+B+J!XcbfQg*NddO9_ZqeLkn8XAs6_=#^__6dqFvs zSj6R!(p_Z1OUD^p8yHnEmAB>XjjiKpt6|<@WzGFuC}sBhCxMhS%QeWZiTmcQ5&vsi zE7}wX)O{t>F31UGEo1r1!CiBJ1BI925EfI2hptL!aZtwA^slewYbf)Sd4=1(;UB4)(%(3e<)!FOFNzUO!t-p9q~u z*3UwbbW&HMd(N5fEcT;k+5~@G;WzIvW^nxST-ZJza`o3pL91u$HHFuZi!cdRhpjlp zWvvX>GBMFdHsLhGBM&YmM~Ef97k@mq_@01bS<4Hfz40I>Djuwv^CAN+IDZ#nP=8wn zfLx=zc!|(2@`_b47fvVW?;l}pr86Sn5DG=+$_HRYZ@8&2VZ@H!VNy7|TLhFg_wexG zO(G6b8vcB2!FX;&Lgn>heVjFbpE#cLAf@MaA|EfvZPbwIe(y)S7Zf|}^y3#cfPO;& ziWL*HV`JiOSwYoav#Y_?y@5t!0tQa+xB}29*EsDXL3MX2Pb+44gl68IefQ%GT9K6B z)VW?wY+@H5*ZUMR{=g;>Qzw%urh_*Ycc#zgF4CoKzZm$Y;jCb1&gT0Nn8 zDC**;)vm=}Caqyav3loA7a50FfJmPblAo`1KbAo%1eGx9l4943V_`h5XQBz6vy z%o8;4)JpFKx3CTbByka+mZI~07g^zvvt}EHtpTi31B*82Sd=L@V+2W)E_9}dKqO^D zE*ys4-%XWbF;=hR+O@4Ha=n5sbU`bcRvm2hRRQ2?jZjKad%Lw5PR~24PcS4Je(!ti zcj+k~`=1W2;|5JC0Jjw;~5Bbo~FsVpa@>;@%oi6t4o?glsTXr#N^P?WzqT6$h z^4f*tasUkVem?iw~$K9(1k+ZTt)cIMu1?3D*^*|1j}-svVLaSRI2kp+dDIu+n9&SH)WG!g^zpm8czH`cIXmlP#B))ZDCWwOvR>$UaiTuolN>z#ux-O@Su=VDeGcuT$Pc`N2|iO=S^5${QW>{~%Gc2f`%AJ|nf@ z^)FjLTRT+eabMt-UaGiUUs(6W)lFgPtC8w(ojCDA^Ao%46ZIE%R8u2toJnEh5cse_ zEB&p=M;110^-#jTd%z7S<}JrERx&JO#Y0x#9+>Ss+&FQ^@rNd9=}>q@s^yO{OqM9T zN`10(a1g+gH$3KCqqq#IrDJ*wAp8W87vV2QRa9VY}SM{{2)z)lD_qwA*_N5W#zc(RW(IAd*6>Wxu-UhdyT{*0MX z-q+JBB#tuqndZfx`ESZP-+`2ybo8Q!tsmMkme`0#LZhLm8p?J-Yf5dsBtg6>m8t8_ zOpf=#Ef2sUNS)iipnvM&p)q0T@!1$lsz=snI}T(`oRhy<4p3V%QgYegKh_5C)9@ib zcI>Isb2_pu^yKrv3zxFXXpfnov?^ysu2XL05Fe4OCk^RJ6GPWT$*ASJu*Icf(T6%{ zR5oAcf`4$cm8&)*Wx?w_j)cjLU9y~`w0=hMAKFR!&>E&pp4fF-R(W$^mLLc=R@TM> z(ovypF}Qx4iAHGP08j;6oBtB5A(G3ATY&Q|IbrHMxyU`kK|sE;SI~be^;GKtet)oRCVaMKX8E0*kKn)=f|f+9zzL z*h2hL^MwT0W(%SImDcPI2!aFSpXsnsB8TcP^d>q1z-nBf03g`aev6Cd8XNqBuXFVs zrZeRUtqZ$-i>b;vO6uM3Ty4-=pnSLn(r{DUaJvV=kpfL}75y#*KzYim@h>(!a(w?t ztz*tl9kFBd0UKNmgu+!cb>4DhWMcjG%1J&S*V=d;D_1^cF(@CzlD-Pq z-W;N$P`%hJJ>#MPzrzjSQRzoUom$NPtqVRwk1TUhS(w%!HRQe;z*UVg%yS(IZaM-? z02^v=!)|5wE;v2>k+_;(gVr7LD^%$^5$-PJW*efZjf?d7o^n5Mw}18Q{P_j z#v)Eciujk})a5X3(5*oXu8UNx+#8_M`~7~$?0j>HcDK3Qu8Kd3sn@j4VYKSD!eq`H zY44w3k7Y*<+jei|*Q^2pY{sI)mhVMpbokHMk}#p18|E2XKqmk=Hi=xG<{a|ZY8HnIlLH5c2fp~IR%9z?l9e7ltP)a=@-wk>v2%?Oe{GXZ6=0?XL^G)Gx(Q1B5&{_PJ`C`+63;6+Ot6wY;>Rj=BgTpvzC11hs}K4jCx zo@xTWj6cuvWCr)K(yoQxGtW%@GM{<+FN6{?5Io#1V-uFm#FAu_L|5AR8`Dy)q`VQ> zEW05cx%3{;>b-$1i*&9n(og{=dcQNEx_J9*o|nC7lwLd=jfyV0KzM<8idS@DIs}zN z$au&Xtsxtrp+b?xl#3z&o9Yizkru*yK{}XCJO`>mT()cZl6EXFS*qayhCF%Ah0UE^ z5`P)h9x02I?-TK0)t5Z8qqMZ))ii9@cMnO4Z$|6sQj2?#wCT@J=vERfPmHt$1!q-l z^!zz@yv2F4Pcxqb&WL24b=1{Mk-)P@j>^h!=x?{Wej}Z4YsRmv$YaL!%!_8gXT*PwXj8o6(43u9v z4hjcdYyrBvGd&xtQ;8_--_}j6C()K!3WzBWR8WtMAVLKi!eaJr@KBTfQn8(WcgOkW zdC!|fteDNj+Jg~ixBfw34KxWdP}bT+jZs^km5Gq$cWZ3(_WNdvcX@MJgekPXuj|Dy zUr~QwaEn|LO9r)$QI2yQDjP*n`5ziUq-hi*fd2}=4~5k+QrLm|Y9}47SvVgdo#Rrr9oBTJdLEq z=eShx+pJOxV4O@`ck0niTUN#OTF)?gU1}lMvr1i>lC|K`wkv{J^Gm`~4PKMk%o!?6 zK>@^{f#TgdBt(yjACJX`LZ;L)MEaYU?cIQZ-9a?%{;w(Wlm$6j=1?SnTCY;Htt3&} z@aPh;Pdh6T5&(ozUa1wMTyw~$!R#L@Bm@YE|DXT~nFdy+-}yh%MGDeo(PUw!J`Y5tJrcms+<2DHKMi1z(ta z0@Pjw1!;{uf$KO?EQ;+$UO=Lbf(0swwxm)T(EByzJb+{5Id<8#M3>O1R;2GN<{TB^ z`)LpE#{qgZF>4#f(}y0eM)ow+BWXwf_7~wpT9_&~!sO2fJRt8nVf>b3mjCIpcoBxM zr#M0+%2h_fo{FYn=YQtbH5dyVq*S8|RQUqUW_G&4qLjmlA*}qB6qDqX$0TpxBEiS@ zq9<=GRf@giXw09h_LGFROF}Q8qUg(i$=Q?Nj4F8c!i-P2nrPF7cK**-T>Bp+K;zG; z(Om8aeKC7dY-znarXfg;j&DI>O%cEmVYPSkv4Kt{eZ)mG6t*goj4}&(C~^EikBSFx zCaCN+Hq4?jbCGzud7=Z14*~d#WY6oep?u({-~dSVbg?+dw>qB?g@SVz5mAfx=zi^M zSVB4`F0DGq4F7E6mBl&HwiKCL z%Wo3j#R?-K*oNt#SVHQ;>_>?vGrVXN+h%c>t%GXZ)&N<+m>u}^wr z4DTpx<_Pb0&>NW>SY;m35Nw8D_FgGHkj8nsaVArf3{DR98s?X50wi2ngVjjqt-3Dh zPDPfxi8)!GG&s@pDeiJexfPI>$UQ0+4BjxW2rmGCtM`(d8xtkF@08_Uta0U&m5;bmjXo-;!~!503dg z7v3c_LpW}&W_R{uw82&zD0U$3?N%lhq?;>N{U%VL&E}ui?3^Tz9NVOMmMqZczYL*Y z9>=4|J1mirWv-pVqiqOI4qecKft7|ny{uyv0(!0nMtu+f6Kq!e9$i(go2NCN3hedH z)n_XsYiKbe+m{2-63#6Oa@JiWNFwoJf-AWLoZyqUdPDg2VJH32k?X(+eEb$Gu#}^O zDEOwP6o#TCnG7V~K|pv(L?NN6Cd3SBZHDu^mgOQ=oRWJ86O1^Oy+Nqd61VlPcLuC8k+kx?(n;q9~O_YpG9v^8v&|_2y$-6oK+8`DMPw=)|ksf3;3z ztgHHW*7&c5J4F$yeN`jij55hpe?l+OmbUozPYun;Lh(f8qO|FChbAk)6R|VaOH@%W zuG9{0WbHsgXqE0CL6GsS=v^1~xyiPf{BS4CXE;#%KiAPz3Y`>S2lEvgSW!fHJw%O} zEf4USN4Da!N)D*39F?Nu=g@wghDmc`RvO`TKQSZ)rMp|1ua4xcu`<994n4tH*?ecC zoKB9Kcc94*$VsXMQOR2L_q}Y;Ps$9{ZR#zJ(pDNKngPFRh5lS-iDSy3zY=IBUlovC z*=j4zjpi440hh{i7LfgOE}WPV!gM8{44xF?PvHKIHCwE58t~leuW}mEhT^2Fy@wNU zFDRUpVV%tEDB6LrI$Y9+K_Bo0g^7Gi8yhvcg*T-(mFb?QXxN3X{6ha(=etFt)0sbR zyUu$;CyCY-oampdk%^B=fi?E)&0vovcR0078u(Z+9v8Z{*jviHYd$uo|CQl&XO!|KK1$d--mxKKR& ze_{E-d%osJ!U);S)6+NmwMF_nAwqDq0J`KmbEW^i+vnXz z`>r62CU%fFAJ!zHWL2Y>%yKnBE<^$UrgFX3OcQvRYM^nXrYrUlTX25 zW)b?B;p5>??Jgy`Ibbs6_5He|6P8qZjg z&BaWgSuqp+(LGdA6JMg0MW3M@QTSjfeLl&lNs}7?+!!e`xmpYN8K)Szi826W#frHPQY5pKGGU#D6m)(Po%nn6S;xvz?%L<40y60!~MB z=a`bMXO0pOdwn7z7pFtk&6^Q}W>uzc<#qAz;F}+kd^v+Qs}X1OXCbKPgQ0Cbu9Z(y zbfgA6tAN}o=e$jE6pzI~t&C`{S`*{K6cqcRHvwws+kdN`ink{$(yir*{YyK2R?YWl zm-MfEdNbJNL5C{;NbH|$!99Ppl!LTTKNtXsZ|gaqGcl|_?$wOQZCX6tSgbYzh#s3q z9+a`&8MZdvl3W6S`jT@CFq&Z5eR(B1+T?zi|n+k*d9v*mR>n`J}v9zWyGcE7IABb%=;*LqyD`nPU95;%+!q)w%tM|Gck% zb$6CRQ@^bi+ap0Z8e!iH-v4yFQa~8K%FX}fMdAyFmEeRKMT==*X0P3IQ~zag6mfMw z0ygi1L%xP6vyqNnk=Lo{)g5oJg z!%pZ>E1FGYfJ$O$#ljFH&X&1L_RKdma%6Or4{z};_}1gzqHadRHeMuS?HL5^pok2$ zk-d^$UB7n;>$f=&&Xdn0Aneg*aJJMO7Bj3hOKnKB_l;8szzVuDAC&+WsV1Q7jo(qf zt+*Ljb^4ugpD9-dsBT!Z#SjehXIDcv^ijMrv);{A-cM*gB#s^>JBnRs$056+J0O#I zO4@kT%3h{0Xfd&U0P{6EHOwq*QtZkAM^PL($?ADiQ3Ri|58UFbDprtOXHH#Nm8Xj{JJp?Tti{$AL-$+79S49X?`sCi?dwwV*?jxCdj2 z%r9X4d^Slo^p{jlSGE?F1hW2ncEVD9!1UZFfoufNvQ%C-=eBwh3IXnk|L1Aw>qZqz zjKc|_dCO{H^;=;;zeNVkM@x-c7cmbC?wnM_FWw`U2#Edm|Jh~4B0U?}yN{-gceD`s zx;gSxMSH!$KE<~>%XqV4UsuTOO()zN9YSRz7Y2ldhCr`-OsaKv}He zlH~>-i$T(@4FE}LhhMdZ(KsATvui`jK$jt~ta+Q%4A+9~1isdfbbZF3WX%w6`YSg{ z9)Q>GIR6rH#~UMBVh8i`ldXae#}Yj1&rF~E{0(8WKJPWrCeg62(X^JxGY?&mv$EkO zbi;k-2{=UVHSuQh?7rt!_}H-eDj~tb^Xmth%@=A3m*+d718=+d^P7tU9DGm6T*6{| zsov=OB#kf1jhaM4Dh8ESfLjP@VLdT>4!3#A-AcTz35Ls!3ZZ)HwYwlcsfOteeIR&S01BkFV^2_>Z;>sVQIaZdP`ZmuaO%x=6_P9*)qYWU{ zQW9+dP$9wc$dS;zhg@NiV@Xu>N@lPm`dnA`97`t>>kUSkb24Y32yd=vY&&i04a8*< zXs1b|G%jBC2{ZqAj!ahg*~viC$xigCWqWEPI&fJziRDW6J>>gi&_#MHGum>k?dW~e zTIj{EV#PZ2am(fGD)F%u&}W;ZP&cprUrWH*VU=G@)5401o9_Qh2=2ZTg0J782~knG zh@h+B3bt_s9Nz#AP>3h|b@0-%O%h`&Z@GK8T;KFz^0d$*s6BKyl9Nu0{fUE*9ZN2* z%9O=sbgMTxd_nJqsTB|kGNn`9Z;5)ydR^)_8(=H5+&eae@!f%!{>d%QFB$2(bG;&{ z+MuEPnRYAL{;3eUWT1akNt`!YN;CJLim{qHeR85Td{*3Jn;HHg#?XAQZN%%nqv3~3 z7DvDwU8~mM@$cUIe^!$KhZ9Z37@hWygeIts071Dz7m8PeZz9|NHsDS2Sj%Z}0iDwT ziaa+xsK&fl)r4iLxaSRlk4md!-0;TfhALp&IDo~eZ54n(kjJXSh&y6oGMHGN_!SWh zOsUtVMn}rG{0WiCuq#=jJ)BvP7Z^>PO3jE;6G27z`B6IegXSRg4i)3^#o8R=)9ngD z&zAF}2SwU#$#Po{%Ra9Q5Qu(VTDCxc4E8;gv|~#eujS*L9kNUx27FDL{=%1gO9_^K zzdIZE>RZmsCl`^^)5L|7c>%7JQG`@Vd$D$l&l!Jy52J{?IGt!=kUsxYgt*Um7J7MR zU1p>1<20pD>i3lAt%;ehamsTcscF~)LGll6$@jlZ1#TdGD?=^2%~fEH9!ihoF}v+a zzTr5}L1biXN%QHcq=x}icv@_}?-!}0+Wpv2;fkiF8aw{27oFF%1s9ht4+ldTEE?Q0 ztW9|?+{o{EgJVjnNegQUPUbI@Xb2+edWdf-Q%06lPi0)a8yRse2>VUfX zCz1Um68l=(T1}MB*}y{ww2~}I0ZI%|z@$<8Txobk@A(l-A%-6-hna-7J@HLrAIsyz z&hLaR+mImHL%6Rhjhi^dewu($4r`+czPa_#wHq746(R>_D_O?!8)k!hBO7IPx+}+y z?a)GyGoGsJOQ`s(KrK6X><6Xc{1SMZbj#8}=OYWKqvVDz2f7c`2|wGmm9M$9T&d9Nqolb}38e^Te2OO9crAvM(Q|S5lT$b<8 z-_gb2?%!R8ZP(=97H(R=qjgDy6W4Yr^`*SFS~RW{NVV`ZgY1+cMzlqQI3{kyF&(7i zlf~eW2|rn<0ylKE--26Gna``<;tf<7Lq#&Za_keTXEX8C2=yr~*ZO9~BHFMRBYn{G zPHv~E6us>aAMMp8w$*vuoExrqlx82#+7iU0+bv&pYo=INX}}?D*mvA~dbNl)34Ww0 zaQV}}xdteu$Csp-AHmz71_K%Tf~JC**wk?k2Bha<1Qoa0T;I(J>sXNr+Da0#PwnN3 zwY|N4uvHCrwka7rScAa61^o2RL27wGo0lGS&P&pipEi$Q*uS9(Xd|=IZ#pT5qIBkV zw^-vFcfOT+?~hFTNk>G`Dm^pAot5#Mb6G)1d)C^;hyF5<)n4e^DZlxisS;~e15EJ4 zTu{Fvtheb?r=GVc!y&{-49}QYcgrCpJz3;We!Cpt4mOvad|%(kwu5cMPN2`Y;Nn*4 z3}R=lh)kPMaX9fCRO*IZh(C+uK}g?@T;fV1G=L(ta~zi%khaL7h_EYrn*#i170+e7 z0!Ky;pyWwMiK#iaK~#-NqyoWQR$GBghQVE-nl;`!i?%2H+TK*v`HmqQ{DZ-&V)&og z6#ca!t*ZTRC2<3NNHo5f;@E~^dm#7?szTYf=)?kdw;OkkKJO1Ig`%2P6eBK$bTyRP9^nfOWEt=J)$?p>t zw7)U4e!I?dy?*oaT|S1+ipEXznf(oGBC?LW@@s;y{-pS`Tvmz=DzGV?cut!hs{`;T zKpkq7n>s<@=iKR+diqMsr8;KF#}BCjITr`TXM5Iix#){{b)2Fy#S1jjY&K4y^_JP) zaU^;R8e##%GZL`i$kKPWxJ1giTieVkDM&mi_8FBv_7u2G-yhr$4QQ>az4*5AjG@nD zt|j~Z(Y8T1n`INaQ$#ofQ^-lL#TKF-(!xk^LgqY_N5X!gVsr8cLsv-xH!-z+ZdVz6 zbaY_}8`kJtdS{t)iSr?RZBXl4xPQxvLg%eaPsH>oFzu{i%URxX^?d=)BOu?D)N__D z&{09MnrAy=kmUB#1s!vJjKBpsbg_tW$pSI4PQ?dhd?SM&jQi7^E4rrrKG`(9U*-0i zUXcS(!1ShkrZtq0IqhA+Pb|)kUG%nr5j7%q!fT-c0ABowQjp4BiuBq0 zxHz(Vv+Tkr?XQ$7jWw@M+=4-1UAbw+VnO+{gSWH} zz1#JtwTZgB(Xt;E^{%YM3#<(LJlPT@U2UR%Tnu3wl(c*d)-V*Jiifg3#?DJk^$Ezo z=zgI*+9go}S_yg}2 z4s?wj&KYQN8u#ZYOeYNf4o*(8>izQ}3^kcUNxJ#Gk3&DQGsCNuwnUFFWce|46dHOq zZTBu)X;EwWBqEydPJXPcg?qIIsy^moy>x{$|C~$&Ue(I-Jg_5*8_hzo{}d zIxB0VC#5LQNc&wrO^>CT;G^mKfX>@ii)8h)PMHX}(!2@7>c@O_Y^x}iNpXq8SR1T( zedj{#B4ceK`McP=ca0wwq<6CqRaeldyL`EYV|eqAKNRzFU)gSE2!WAdjHVQdIs(Q( z^xs|)&?Tlwp@lUjQ<+L0GS8Rg7Iy8uh56pHKOzvrV|gFf27wViJfE4H^yzS`mB|u= ztGN{EkYX)$WwwAHgxP8!tbXSGHF1@oTyT&L)4Ukz7#-3BW*B_rT3S|E!PSC!f!B=+wIOe!ZM@8Goo7yG%^GmDxb zHc^hfG0g$Yt-ppK{j{1EcBH}xiJ&99PVqirqnXovi*q^&?~?oBCUhdy4@@50gcCGb z+T@GGWa~%G8C%!8h9B}i;UeTb+12b+n$7SH?{Qjz;KV?6?Md$vR%F{`Ohr@ZrxLKjO>t=b?*(3Y8C^T7!G^x|xLg*N`K z@ z{_`H9{kI%9@`+)>mXUglpKMYj@+7L_qYe?;g4+{f=Sw1?YaUwePq;iAMC51JG%s`4 z$H_ZTF-n;HNw*XQg?a0kPx`0_KE|r3jfvo9nZq|mO>m6;qv~bWaY0dWX$6}>Q==Tl z$6jp+$38)${*6 z_;g%qebFnbQDFb7!J6IYN1N3$854oOCzGwYNzY|YS%V!~;mL8sr}@{l6$sz!(%;!8 zHg_HuFf*wtwLxbppLlB1H1w7f`b&3gzpaEhqTHqLpWk9MDqD$1ra!H0+o3-tIw$;H zti+Qzb-seUJUEVG9L(9$akvqwRUbvuswA;EcER{BuE^OSf zSD0HQvwJeC7jn)0se1}h@!b5S50Rsb$V1_#!P`xw$HK>Qz+mizZujh1o?0aEA8(X% zrj|FHmYOg!W`&0M?L2HocNN`km{7QwM0V;y;lFK5LekSeWX)H2Fuua(_xJ8l!%o-HRgCNVQVy$#jJ$@wf8}TIa2^_-tkGb`IVOd9yiqO9F2s zf82J430po^ypA-7i{xd;Kaz0qVqoTmFkg{?a3(c=t-H{ zd%V%K&+mEYwB7jPF}FNF{j!*;$PRGPrn&(dl-(~wSj=OJL^NaU-wYb%?9pA?eQ671 zMKj8SVwuu<%UZTi{nTIsy|)=TXO|1V_~m|i{;tWb0$wC@R#sD2g7Kid2l|Qh=>==@ zaXgdpV@|^4EyUR|zoVYKYz4{3y{k`>!c@sok*3@^TD7A&wo3+i@KBzLdiQj`qceFN z@(gwF-HQ%7SGEjT!!9D>6;l~h3BNkHgTHr`?(S;Xg{A-W0B>Tg&>Q69WsvloDO>0a z>VnNtMrzIhiLl%WsaLuzv&h_H-+{&GSZjwf{XjqCqPd-&T66URGwYVqzdtU6(Z-io}?(r4-q75p+1v09kCx8$;8 zTP}CQlA4j-nCwEVn*;fpzoa$TPi(4TlQgCNt@YWISe0L0C0X!YVq1@nZ%D=Zqh-Ho z&yC-S1aRC7rTYA?9AW;G0>(%co{2T?5+}=btF0;<*e*!VPqrIn!6^${wL`nz78RO67|Kvpmml>YJ zDo8YL%z24K3|wMXkexBot#y{cSzaS?(Hb}OMBmbeJX~Z94V(1dX(oq!qB;IJm=)xS zS`rEV&Dgwexg#Jo>u%%-nV$$nvUc|5%6~O=ukMWJndLKJYi%R5Mys}*kUG!2R%~jE zmJt$e#)$9q%|GTQ&+K%5Q&(SokMP=ItbZUE|Fid*+`%b{C1Yp3ud`;fx>w)nWygn{ zgde6Km5xJLG8ehT;_RnAzWDNtedo+F4H+GfzCCw3UjOJKC+k@kbLZiF!`wy{)mM&3 zW{&jrG(V0$1Xkfrws9I;UF^L(6L1UOReMCwi=O>{OYClK5m87BHKPq>_pv7?XV%TD zvb#i)&BXGVad;HlD{q?O_09Z!=c%g^)c-W<$Fq9fv08iqRpqQidb zNX@;M!X@b!unLasAwu4?0Vjq=-6f08c4+p7~GN)9>qf z_2MT(SN4FLuBW4M$zgsGeoXbF9Duas$xkm6kKF)S#~Y^2X6S#& zCI~!sb<6m)(pFO$RE7(g?dKF82q^FNue$w(GRV?{pSodc%doizIOf&=nMgFL(fV`e z)87X#EXX9n#tRXv4nwYF4;GH@T!eF?1~AH{n&VEj#7sHrI)EELM!lxdNv{wz7NR#c za%7YPpb*zK5Lcm&)gfQAg>t&}G_FZOshguVOdG*!7j zM!e%G%dbBv%A^lq?$ka|zezVLgBiZr={FNX1pRe;hOeOc@pnR^Yb;vq>AZA$a_0wj zZ7YJS);6P2h$r2gCewiaO7{v72aI!`Zm||n*=*g2sVj;?oFC8c_*ZKjR$q$`_^5R9 z%jokB_!wGw#QtC!+)%zPu7g*KkZ9$6@ACo*yEzC?bk1Ka;# z>@B#We86{II;5mK1QZk`BnGJwkdhXZW=H{PX&4wvx>Z1t5-Dlv9J(6>q=xPo8ioPR zi@*O_XP>>+UTc4aH=exj>&XSmc}pcu=8eR(H>nMaGvL!ojmthIXLBmf+`VPoXv)Z1 z7Pdl_hd}7NKAocF$~aL@-h?H?r3)=Jb3f3Pdv_$%g4YyhLzYP;_B9**tNwhzeT@YX z)-NKA4>pGL&X4fx6&@;riF+3=0C#rhi8t-{14T3Tgz9pYCzp)Pc97Y25Jg^v^)OB0 zFE|n(hx2^b`E=pD`J9^3&ys_OVTV(Vo82XBG>Z;%+HEn8w&g*r8fbRl9P%bjIIFgH zmK;q`Q`m&+ISL)tw{UwkbqY3KLuis5vdq?Ob2V$< zpmzAEW`KaS<9_*SaE3TFM5zH;; z9Zk`wI~zEGfE?hzOO!3hq0fl-yrqNfdu)-BJGEMSm%f$Ggb#qu`}e;>uGfh*)d~Cfi-5Rmt?6lR`J0d{@t~i+@6~nz#8} z-_hmBEC$4UTQ76HdIOc`-=F%UnC%JJ8YxA7@h_BOTl}zbWh?K_$R*wUPe*Ed^hZ1^ zTus!On8BHJgj|tX>#hyk**z-X;qo|b?fTqW(p?Jki-WcDOpTkYvV7O8cO>ZqMxlGS z3ZE8RY+{FD`^TMAC3Rg7`;&O2FIveAevmh+N&hZS_U+_>EGd|Mw7gGai?A+j@~iV_ z(+YWWHCJmjQ?q9Do`TLx(&g%?(Qopfe%BnpRZ(iWB zGtcyz_sx*R%df37+)$gMfllv5M{Z4}jG?x_m>An%l{+|u4AeQwTm$v93d=2>a`32- z4;$to4~QL)ze$PvX0;n^d-}rbhsq=Nf^du5>ob1GhP~?LM&RRVHk8w1UL+#$R3e)X zK;(*0osymsgL3ZkH;Or{IYn}YNF0SMk2V`I9B!W4Y} z;!tt9c;N+$xOhoL*^@q=61Rv-r`q}4;xEXBQSm(DbEGZL?12 zE>2|oT*y?)$o#mibFe2PjTEv4nj$@yrH*)QJcWGY`0J3xH~0F3{9%lf5rp?(f3J3<%b(uYGS5jzdZPJ%X60cidFUGW z3iCcoO&x!md$93BY2EsJ`0)i$Nhn1G7_VGbpEtsN3nrX_|*Elf7M&JnzJhjw&&<Mwm_z6nMu}0c_GW|{b z3j@Eymp1;Ay_?GWVP9t@J?|?9UC!H1(2+J%&uc-Xq=Q-UG|&OHcN~Piso)Jeo00fR z-PFqW%W{gaS^^tQ5mP3r^|++BFV`nOCi?gzIoh*p^ZqxK(>{$@rg3XyUF2J?s_sQ5 zV1Euc44K@jCBu zH;Zc#>mOadjrf?*5*2Q9U9F`pfe7 zis3Wy@ep<>Vzd(@%!EZu!@*FIB63l;^M%titr3+)Syt*0fp@g296Juac`$j9UK9~N z-~Gl#*1g0{}u*;5jX$eYl!#WLa>L zZ$a0nc$U(=*+*0^5_fx8zaJQSqXme0%E$)^M5*IAOo>h@kG`{PRXUoI5C)tmeF{=S znQhmwsHOh?={}y|pk%bU`Gm7WpiQ{xk-!XzcCCnDpmAyWl}PMF1_JO7cMb)~R1v}k zT4o%@_!>^0o!`EOEv#j$$&z1mR4OpF&?C=lRempG_y=kj@keTKhGCXE%snER#%1oV zX)(u=>f^&PKwns1*{~Td~B|xXr)uQRs-7QYw53#U|&DJclj%b%wIul!0&SX6KDO z`4utu@s)PjdJDwPgZ6_dBg5)!e}i3iwhprhCDM+H&=WVzbKDTlxiJp4plxpXXl~z? z)P>bfS@`#qw}jX1*(hiJG1iSlP1(gb@><&E779ALw4rXn?)t4Oeo{CWT}!0A8SY6MzuO<7@!o%+iuh0@5iWNaZD#`W z1PVX}Mpy?I(*xg5YF#Q?W9J9zg9o4H8M1~AH@%yQ`w5_f*X1uS4}69jqSQqHc8ve! z8zybO+GPa2yLc2532p4)Kdmy#7Ap=OGopFok?pXV$Pn9rwn( zfG!cA7b3+-Q?c$8qE|V~4GF_X3SnXdwij>E$H%X1hN~V8^PJRXtyf5#o~(Mb6Fa6! z#LEFncj1);%$7S^L!`G4ytLc;z7BN#Vz0{@`oE|PF;`+%{`sy;?P>QBeK9q1O0BXD zvr%=STm-e*NyC80#UgmDnJ#mR>{v)#Cp9o*3{(F_5&pwR*_nT6gnRc~rT}nK^S4PF z0Mrno;b;970JAl<=|}O28@gWveWWqyfmys?EF&v2tP1iKY;6M&HVXdpyOZ^=$av3g zD83pbCmml}I4+&1`X5;&e<$Mia~{EBZ>tiAV^KNR>t z$RRWA$>DsVG{w{*43S*?sh!R*c8dkt-l#BgP@RcWbI|%WehJxIg-GzX*sjkzBd+F16 zzOT8bRh{!2yZfM5m@$^@iIH1@3yrH|D(I>XW%#dt=H!6_a2~ z>1?Pr{v?A9obQqfUgk3*q?}v)@ZArveefz)>Tq#+*bdpA>FPt)u!i}yQ;?fR%~@fI zRCro81Q|r@b=XcOF`(9ZL#}&D=xtmrn-8lj;TQ(1#KMSgM z>^r{PNP|Gneihcf4+M9ZO|T_)C>b3mIKBh_#dF6N*CDgXMX7)dpJW#pZ+rDP(JXGEBsyK2VX#& z=7C@`?Q+Ym!#4x>O}D;$&q?ZqoO%7G(~r@vf05i867SN%kW%3#_9J zaJx=7{GxsfUt!-2{~Z%uK`6Oh!QONTj`CMNe9_l;!!hWWmAG~WS47ozwi36=_h?Nc zr}iwv-NAO8RoV%Ol9%G>spTC$NtFprIgMwn%%OlwvEVqT}Q&4}57_?z_%gA42 zP9zK)CNP2c1iZD5X9_9H9~Bf?QTKZB1-c03RrMEClXhEJBg-&JSsf@M-*6n=x7Z^? zW3EepLcrg0P!B4)zS&){tNo7Y&eZ`aSe2k>zRa@<(0oW0{)p&3S@`oXODvziKuQe0 z8`*y?5=!1e_owe7hX3bM#7Bs!kiSEl{5Dxn4N)I4F^boW&vGBbcW!)~2WxW4+~eyz z$r%zAjCLStO5O|jMHyRCEu4WU4feJ;^m|hXNwS7s&6ZMX`7J}y=8cN#Vs>{VgsGRA z(KSZl{~tP{k^$rX!7xDK`VV^O*a2GkvL%W1i-^NG=MMY$&l}|q?^r_lJtuCLm|rqyFX=CwpyP~l>fL6m6q81CcEeIRe_Fr?L2lYfBtd-q zS*Sve8OgQh!q}>2-#AE%3UYTE*0j@)MP3yuaLZ~uCuE|Cph|$sCbCIwC-zUvv+$E#$Pp;ePcpyxpmy_9!rWq&MpSETVael{FHwVJcg(Z( zUD3SStNjM(A03wLj{2ozv1Cia-mN#f?hVKFc7}Q0zl>}7%GOu(9XrZzv7?s9JNvk( ztRh7;yoZpg7n9_XHr-sAOt&q;q`W5U3PnpJgZg8GdIARScAzSnut&E@j5KxQDV#*w z_lF6O;5uAf3|WuTjs~=I#@|(5yPa?(ZR@>J3~umkBY8$4Uaafc9J!OsJQqG%@Be%} zW=JjX;nbGQ^GPqicEJiBRs>EG>IS3HcsWqqA} zzv1TA`-7tAgzCnrqj5Cu60@mNLASiU1&n86H%E?gKbeg)K=TdtoK(sB9{Mmp9R?Gv z{AK~7e%(ySp`y0&P>7X}PZ7aS&p9J?2w>98pnR)VxfpdgnMPV2QBMr_2#l0)^BsZ& zoCLi+|9c9c49-t*0OPBYL`bTczk2q|LqmUaeCky@z;~1wcfRpY>z;b}&p~*8vF^|M zkomsOy!ro+6w>$zsxF)XSYmh67x;u52Qmt6#0p3&EEq7>?x3uIjmk-}0%X*GJRi2C zjZ@^VHS%rv=w(gDk>qN~@?z@m-DSC;D!c}kb2KX5{CwVHCEg?Rp-*$DyK)S@#qkbB z_Z92ax5JUIS1#4>% z3bx>e(3&$}hiKWLkSinw7cC6m>9#~XWD`mR{IH!tlAQKKVmrj(Z}+y#6QWA=a|g|4 zI7~!MiE@LN0q}hPso!j?wxC{@?rey;__f6RQ*U<4$SngH5?@d5^$uoDIbJTM1<1Uy za)r)AmOR33-m44rf>j$(WRRQABfX;=V%xYK7a6s81ao|;Yg{^@l`DJ{QPYG|3f^`1 zVhH8Qn%+ZoA5xVk&CB4ku5y&@u;xl|tdJ3=^u=IvxH>RM=a%$m5lY2Fi}R!XZx3L% z6*$lP9zDrQK?%HN*VtQjI_WpCD}6J|KTY@Ry|w-1Cq8QD*2@%jv_8$_F*W^`j$Dxo zxh^>}sZEbb|02C(5<7#}WlyrguYQy~@9!(5h)lJwwmszgtB0E!Vv55>s*+htJEq$} z^3khL=t2!h$)s~uQ^rlwbAa=v%$k#xt~lzHXM8@}{e3OGTlVe#usdNVZPVsQ|0&NZ z*3_oWgVlBzYl)ak7bS(qULD88@rphT`t9QzoF1+_|Q+~;_M~|Ah{ad!5n@N*x+v+ zvwV~P4~E&Dz)6)x_+-|*Xf=P?N>K;?)gUH(7tL}CB;JW=W9e>na5Np=YKyp+F)6^oTB;wwj|mwc zPd}>!R17SY?!~q3LqNG&W`+^#KF2Mw#v%AKmHzvx2`?~1E+fr6I(Or^a?|qRq=G%P z;|_}3(yixMz)feEMe>UfVy; z4(roS*J@{GxG!JCW7807E-peJL1N#Bgx`J)jgykzKQc-`vL~5T^(IAMuKOlItcf0( zZ)kHCo4~(yv^elCT43Z#kUl?&H&;#;>&zPeDm97}-2A)&Y;)%)JM3(JbdcJqc3C<{ zJbsz3S(NzXDtb_(jPPFUYuQn?mCo2cvDZ-D7)=~RY1U?L>dHmg>@+4nYSK_0L&9gr zC$xou;?ZN)2K${Z#ukJ5&l>_#XDo5k&$$PQ`Jv?0~^2A{$^8SF~7 zCPkp7d(ap=e&aAlI_-3`_}I7Yb?Q>xmv0U2w?L5A@F$fq97ZrRDKF; zT~C^q=~x|rANsTP+dM)wW?(*Hv0-9-d-}TO0hvcZ$(~=UA+%MNNMo94N@(3%p*;5> z`TWgU)J+1%B$+Pf<`3eKzlM1BLUXea1D!y_6?ai}(kTSK6)@?kZ zZstxqipCN2*BJ4i$;kR}QS|KyoeP5Z7Bkls(;`LdVqRVJBziiA@W%=t>WZvAH*R>H zCseWj#79zeuL3eBhFdO2JkBw1TL=72nvk(-7yrvIx)Pdg zd6rkgY%5hZ9#WtjFdO)6khH|MwG&L1w}Wz59Lg5(@SJ0!)U_t@NsHrNhQ_Whhp;oW zxvANE1k-v%Z_0c)0<=SJQ7f>C*(~FuEur=k8!A-CJp4v~8Gg;|R8v(Jnx-kaK5Y>x zZ^>oHd3`o(4Vm(28NZ;2#B}OTbB4MT&y{3tm4BHPz);p?N-@ckP|JJvW+NDouU=O? zCXXFe@iC>uV4~|h&EfY~j^csO0?AulAzG%lnsvS_POt5`DC+qMvfX)GvDy(n-+f(Y z(z?l9WQ~7?RU&83f(BNW^w#N1d0BF1Z`#Q9+Di~(pVYZ{=1^#_q?!yIhi4YQ#92-<~ULOXp*n)B5@heg={MRbb)u! z4(W^jpopx3yl$R)deP~GUn0-z8n}|1F{i_{?178t@3Kk6KRMlgn16M>;04URG=9@H zBK)uZ5;O>Bd7%lp^j6ATkbks=ur;Lj$3vY7qPC z;A}_jq+cQItL}p@P9A&YOb!;c6iReiP*N<{D?GkIc4o694u?EzeWz%ew}6?m4qZp` zM7Qwch#aw4sr8i4d&AUmsLC#^|7W6acj5%SU&@SP9s|7$ZK4$`$3DKnM{{S}wWkUN}9vT1Bj>H#RJ-xG#-H}#?kaMg4 zlo{B9@|%-@WTe*F5|o+5+=bb+`dbC>9%gRJNAgGsQXMeog{koQG}5Ng#@2ju&UMJW z_X%h4kDI4+J@A@F5zc$a#Nu8?NlptjPCvbyX;D^J!i=lI;i;m=59P;P9-`6|55c#W z!!dD`jf-qcUs!ZaT&!FO6Cd^_sGrA(;$f;5D40`77%gAa8yekKMs~c6kIi^v$F1N< zClfL636Z?&Y^DQyAf=c^Qq;UXvge&UHtR&Fj1y%%&;A%kYD=4!*{Tual$fx3+)6IC zU^n`6nu7NgTy8pJY)xhepB*@NSMD4?`X^68Ppc-(+@Ul}abjM*YO{ zFjbiI`|-D_Wd>I_GN4A2>LbcqRxn0sN%X=yHRE-@U5(iuoa36iL-V#k!|$7+;UQKj zrD$o*kWYy%5(~_19C76K(`o!$@Yck<>+tiu&rGRSf+x)L;W$(t zTk2#iS^1Jhj5V#Hj6M|Ok~OZ`+U~+k_XJV*JhtjTq%e2`+1B^+5zLFElfx?HOX=Lv zN{=N;r<6yA_ME$}o}6HDEY1n0BeN`13m0@GM!(WCxpU?-X-3|6=g6YgRq*jhEP546g8( zxzr1b&Fml$iC$DA>tKRqj!koTZ0br0e7vA+*foc$nFqoD+a%K7(BVm#IWd5vq21r| zeKxrjDnB+)>|B<~kv=V3iaw(eMaw4Y9U!K)Sk50HdGaL3iNO2RbW5?=5*0g+C)*rU zOCY@NEJshzImK29U~e?==rh7?T+IYex;6JiI*4;aOEL7PP5tBhH;X5o{Ls;tv`3yd8X1u%sF%Gk~rt5*HAMVni ze>c|Avf%${cwD@Z|7%I9KdA4<^(Wvb9wA-oy0fuMZXf@^3b>?q@FMZ(oJ>XX9KDuV zEQzBeMOaC%lnZ6F=Pv047hbqaCZ&y5;wfYIwE~=lt#I9Ho{^Zz@PH+?iTs*Fe_oUx zKvdO5S3s-Ig|aHk!|D{d!q@c}gHi-_>PLOstO4(YtA?yro}_CSahw7>He(_12QlXs znZT|=^F*7UqCA_A(>ejgX|FhQ6cXY(D57BT5+{*atju)=vMi z3~&7?B8a^3Wc3L~OvQ}od(>Gq5h~4?&X+O*=GBi7Bou7Sl*+=Ev}lI?33k1MpiI_u z^Y|iaMK6#QUbW?HN*VaK!kRYxDrh)@gZ1`o3VummhChWVv86cnV7 zdfFWaE54u|+aKowO>()(OyOJAvULyZroC#z`OjCn7slHCdQlCo?fAQ@h1s;Q^H~H< zUdGMVvP%u#ij)5_W0?xTfEFS{O8Y4=(MJ>K18)wzdU z2s;~iC?dt}7Z<48NK`u(zb?bIETU z1BRW*1gh4VOdr#|A%_~mar0A>Ounq}hG%X&&+d~+7!Er#_L=IQttCN*;shqiXO*CS zs;BinwhdDdSQ8@VdMS+c@xJGW6=m+WIN_yG|L3=j$9J|Si}D|atVrequdtZ?b}1&o zuE4Y`!6sIGk7fn9(H_bw@X{DwRPJL1_gWH^cB6WFKr$@ghm$37>@4i-1?Q$gd8T`|QL+#$A;kB?! zQE>htCTstr-KGr_u~V?Xv6OG~vVC$$N1(e!`MWL1eABr{jRzmgG-T1wr5{|Yx1tFt zJ6m~DSYpt~*vxw(IjrJ$h`W#7`;wE|g-sbnsUJ2?d&0^Q`J$E;MOthl*c=}!-F5Kv zJ+Y)LY1KTqGkO7*x@G!(u9;`sx9SM<+Ujx_SEyclsmfE6N~hv#OiSL>9%>K<@(}R! z$^>*ul7;IP&<>}3>f$e`v&IMiky&?H`awr^4Ps1y9*PrdQor;dh#;Dqml}6bq|r5QH*j1;C-}#|+5g>fku4W* z{v}N)VAeISnJ{A!uc|2=IxNXbiLuEJuyi)_S3&*!r17Nbr_LpP?$ti6UJzO1=9c;1 z8~3qi(IaAKHfifWF?b@e$)w9#LJ7of7EROz7`ROYS}{3k)vqTCMEkmkrl5!%~7vz!CIR-B+4w81B+}Z=;-HP{d{hau8YQ4b#agN|6=bWht6n~8Z;B+1{D#BOhkad3Rz z;AeP5vp^PK;;U5U#PwBe^=h298CHW?7ZLUfBPudG1yH{k&=H zi757pNlpC5LYEe$)n~!{T~pO+e3iM+9Ut=D&MEBxRr_9>2ALo**I&#mLE|rwdkGWx_maW(TA~tia zSY0PZ-$q-C({lZdtH8*gbqcg zkw1TLG(;(!3Z~=dTyCV_9c}vKeVv5BZ@{$izLn}^&-Ap}6A+ENv=Z;+dGC??>8iN& z4q}#u-Ei~)tKu6q^a*#^kCGGQ9$*okL8x_2^iY3=j2y8e2M^X6EkX8^*-cxD?7cmc zIsgTG6G%#L0h4Xpvd3Zj2-`iX13K@jGjY3FqQ`DB-DEJN9O%jZrT?DFh%Mk0Hs?bv z%llR6kx^9_O7|?MG)vXCFz zF&#}M<=uqxJwp-)lK%HrDfso-kGIxSojk6V|5y@fgZW|}8j7Q%=rtCRi#bPB^R(K_%3t91G#lQA50z38z#m3pA$!lyi>HfyW zd~HTi*L;3Cct~;HjWvg;@}q87YGJ+JRO}?|{&*FUwBma|Rg>(zWZq+TP=V+Nad*c4 zwYE#Gp(l-YvI~HaFKtK%PsM?1f9_U){;Xz!tHfKks`8!R^_QOuqb1<)(30HdxCtWS zwP7sF$)=@;xBnozY9&Z-O~{oU25r8BhKAP3&5TCtUXpu0k-Juq zUeWkrS}e5Cay}(@Uk81-^5XwJ`e9z<_Z9o?ScVR8P~y41FyqwSVI3P8`l*r}d4N^? zKS!bP$Y@wfL~v~h!*K)DEN?D0NeMd@`;!EM`sPB+v9axtB2-~j{jQ$?{%uERsCMIt zDQJl+{Q`2>4?&N?bwqRy?KvT3{XDrx^Zr+R4!?*wgC9@oY45>rFP#EM#k~Z_f_+PY zgD1Oq?2%9F0W3oGPEM{gU;)dQ1x#ef_>(B{SeV#>PYmWtLsL|De@=fcdTJRX4oWUk_ZTA4X zUYCmJyti)Ps0S}A$NkCAhJD4iVOM0i=C`e#{WIklwjIK_I ztWDhzWFGBHs1YpIcUJD-sZosKarIl>jC!(%0OII_cG3!gW#`vrV_=?mbl7yFl6Xh-EZ=(Hx6RHg3dXu}&7%^ux zZ_udWYWLZ^-j(@X0!Hacj_ejoO61;+P3Q&GGCSr6g8H{mUH4X(Upq`&9zB0P<-mw9`@8sJ(^)D`-z$4v{bR&Zf)rVR2Wxf{-S0dO3uFinIr?h^VBJnqO^8+&6ghEvHg` z?T(kvA|D9cGzzkD#|ZXw27n8VH@=fUqK4k4`URFeJ?Y`_lO4skA{DvBctJEpOiU@c<0F;q+);>OT0HM<|4WPlRd~7{rp+dvXhFB)+ zi=VW2ci;quBn1(izn!2-#wjAMrlM0VtD(^3hzZhUdr1I)B>Pkc>pYPlLW(Y$v)W{$ z1B;TE_rOz`H_ak8qi#}YQqr#Y8b17$3vy<10PSXSMK{`|H%-sMDoHZ`{| zmTq=GWfbkN&VC2KUHHfa{(#9g^JzHu`oik> zH$7Lbi4cE9UmSiorTXr}+ylRZ1~M@0dSj0^{V#v#Pg17?ts7VQn|?fl3P{Vg)tVdUm6HrgbpGV%4~Kr?H*wv%>w^OXusilH&yCa@(sK{;avp zhV5KF{Csky#(s8Q4ySd}1>H?4K5OWQ&0Wiw06rc@4##HS$3QlcM78b12kMYAMmOykK8 zueV%RO);*=ok=*0>`nXnddz7;88WgqVZXLwDo|ch+hx)6asy>i`ld6V!1skR@5f(q zWR!JMYb4xfR;9?Ny0lvghcfNvV>EOYs5C}@l(dwkCusT_yg1nUF8!>~>o1!Lo724D z2E7`^v)fh5>W-DiPwsQ9ES^0nt7_PNXS$#nPZc#jB5{d**kSf`*~AaFw{v8TIw&~M z`;_W?Yq-#A07IiSD=!a?4}5g5U0ovNn&a0}VON7FE6(M`VE9!l3RmdHdI52H=E>r( z*WA(zlfM26hh7vP#p>1ly`Cv7O-lWKs|UZ>06N+3U}$edBes$M>hPzvsFGYn^S-qD zH8e`lVNgqW*XQ$d$j!Dci;4f`ngIgL3k&6k#e&!J{I3%WeX(PFs9lmX%D7-`u^4+H z1E}i!n5p9|jBbVQPh4m^#7bMQm>3MVk#IqvM9Dw40P%?6R8iV&yQQ8Q2-=p|*od!q^Ag zIMV;%Bpo+rXg%ub*+9pFy`}d^jzsefr^u-v5+{ z_0eu4)7%iq)b&1=5F(ET?Glv4CiO!-U^QDoQ8%23C7dJEr7Rn3mnAO|_;N&dW@((O z9DHT658fvh{7uK?PNGGha%+qbu$!i`qD;=FhT?!TSbx&l+eL+A9ZRs99Dn;7+oRso zrhGI1lw2}$(T*$!QLP!cwj)m&uvcB9c>_i5oLoktFT;PgV* z^JVFU_m8+nV5n{wh3E&Q#2RDf?dA$=RMZFhPs3ln`%4!`Up7##UCe=N=*E-27a-sl zCwa?;d20&$;OQ8nDSp36+LO+R^P;AfDf->5CmBOw4nD)P2T;EwLLot5CHK;Qk+C2E z)Wg_S)-x=4s^|2Biq@q)6+{qGpu5x$7h?^YswH+@v~`-gN8^8WZF=d0IzF*a{HvN_ z177PoB;;uc9TB-4Uw-Ikhw~($Ysbc59#7VZ#=j>F9hh?EQk0DKd-Evu&;b-&a8rv?y=d$`_;WSpx59@R|KXUHrB}D0n3GdHA2+3_|`bIG@$dn}xWCT?Fy; z2*$IYk`KDT8#M?`-V8q-n7zNS*M6x3R`bp0^A*7O3PX;Dk=-tY(a&dJ!2345{kDg} zZyavwp>*|7-_7(4*2c5oBP(v0lXemoRbTL2zYxU`*R7;LVQ*0{mpyhC$o2l8hcKKH zZHeP?yit-&N;YOC*>T+745o^s#DIq??-xJ4Y2n%ZK;HdqJq6k(>RfyCN2WPoj35_8 z^~=i0bnx=S#~hJE-EiOXp>)aLSd_n5?j8JL10i4Iu2zVEZ$I)+L@mLdLe>v+jy}qD z-$0Az=vI?4BQ!<_flhWM%;}30MzSZJ56G|S{IjP@A4rqmPofUpeJA=~LwUDpU%!D* z9@mJ4aui__bf8An2tL7c zh*`y`t*(VB8ZO;?(EV&h80B#af;qsZYUN^cOSP9Sa4olvq4|x;3bOj+=M1G=(cNA# z>$n!mjhjOdng({iQ<}8c6YnZjlra82{~`0WR-CWkY2W24rDMcS0_S2e+xgEjt}W(I ztCjgMKgCg$ajr2l-ku{XV3&W}kgRN0}B#4U15)wp>v$25bY3Ak-zLkQ6B;!HmpS(KelqSv4; z4k%bb?mK zEc>duH59n#RsmV8mk!RmR*?Kt*m!mHC9&VPpG5DnEots${jHmB|_;qqOSN@u_ixTei3ltj=m`kZJE<*~yMEnPcSG6oxrMeT2{2G3)(uE3F z^U!ZdBm8{OWFXqgWt?Xvk!0krht!j|6lV5uG>)6f#C;mcdDjunh9i?T7-%SaMwz;A zXxwCgPW)!(pH+%!i-d=DEQS?b2+^*xiQ!zWL?H8GtX``tvVD3iu_)(){+Q(RT4QDZ(e=_RYnk8szy_(VEy z8E})?8kA)46vA<|S%@vj10Q%~A^sz759eEK?21HTfajDaW9UJ$unVRmo@2&|FOuZ( zDNt10uW`WG z-oeb9{$SAG7i6Ym)mSb4$iepLh4~^gwOD=XI@?Llemo-PxeRUi>2lEcP_3MxM%jFd ze6km>$7}42O03sb2&b)SiLx(weu&hsOIGMJnQo~0?rh`OjA^tgKETtG$W%M#m%rFj1%>yHiXfwRrSaA>*U!j3P@fT^eOx*yLpRWq+i2^pNwSEYldvDAb@t7=#|h> zUgyNl3d+OIyU7AY=4UteSp45})IHr>Q^WkdfIZC95;Um4z2=kRQogiGEz}U=z;^Q9 zu}$JqXh{uqwg_aI5v=ub292xK&U9f)ak2#$xlw7kQKD`ysH7A`G^<@h53l*TjCv!_ zz`lUjQA4>}ILQWqQJ%-TYp~6ZOOO@ao`^IP0i^32wt6i^wN>}HGEdpW^%mAD?V%2z z=+~nk-x=Piy@b3_0|Bya!A9(vfPqk?bVvk4&uwwEgj<2Kb*pngi;-^O9`N(a!Ts`#P zeAA)tFKV%UYg*(|$24`4S`E>N+Y*0!zL#(oL$3%#H6Y8kb@p2Ga+K(rAT$vXc_sz7 ziy|e}^pT&*#C*RKDCpDasPDE!aAikQ)* zGSA`w9R0ACb~xi~$bEF+G_GA~*bLz$VSaHp z?%#zs_}`rscFTJx7815no=T;wzr`#q(M^s0pWF?6fVe*?=nI2A0t-Br_%-tJ2GcP| z5Fw>ul)UM4;L}O0k$^D^%ajyBX9?;uM@3J1`pXbC)~uR++=A^aP8C6AvI|TvIaUp7{u56c!wVY<3y(=tqi-At+8}y8&H#2 zUNQ*9zN1dru~m3e*3nN#1ZAE(nP*2(r{yR%rD0Fyi0>39RpjI;@`5h8f{RVH*d8g| z7^JwQ<;o0%7FJFX^^N$0oJh~M!87Au574oUkyDq%@-$xj@36%(pl!UF&k3IIt=YoK zfiQ6D+|>L_+d=v=)OJKUQ!TSK#+1lHa_`rRysM>PpqRPO7(WQrxwTTCcJ*%)A2Dg! z#&Yog_9ECJ1?5O2kWE^>zYOL_W9~4XUWUr>}h*WAk4sl1@xKb;tep=8vVCiJcii$~WsjtM8W{A_5vIY;REz zeRSY#{@pkENUJx+;>cCi@1QI7^w*zf#~-iL&`d&UmDeRXh5IucdyGP{9=fK#K!9C2 zhSzB^ok4r}1ae7XzS)aK?5%f^>OJGs;d}b@P-ki#{6~aBX+Vf6p50p}rI~gFs4%)Z zy?S`wRaOGoiDcl!xC`qOm@@V(H&boG(*B#HT>f&2e@1r`uh&gbJJYJ2Pl&V`Gvhp8!6R|sVa$MLktEp30B@^ z2Gi%FO1|q&eR+fyi!?m<_x5p;`IX+V8()75U}?(Iqsh66?VMBJii5TdfGIcykq&PH z?k$8B9L(2NAYQ}DZwRt>}9oL_#GFB$kyYveITi zH06u=ls+{ES`F zV>z$_6xn6gFz3@j8GE+3)kwvg7pm7?J`FGnm4Jug>jC5t!r9*|TR*4;p1(Gk6+Gn~ zcT2x>9rnYnayi>D5z?GKO}%h@(A`1F-(Q^{c^*hSUMUTqZf~iVv-ay8Z97VuG-54KIT2t3MNx)2f}3uAJG7obS}c{&@-MdK-Y|v{88Lw$>(A>v8*@p< z#ugI6firrpzVxdmQjN(RE^L1XZ){=6P=E1m8p~9f*tfHhow-5~`sbdPXQLtUyEJ~9 zHOS`w!`@rPRr!5;qjX4j3L+BH-66A`E8l;i#5Lm#% zMZR-^KllH?-`M-+obz6s8$VnK^Lgf+V~lT%XUbel^-XQI7+?tk?h-MXp&llrSU{e4>R%a$+Pd(NaZYi z%C?#3(;u$R6&|vtTHo|}FP$>}z7oFN=g&a|LoQP7JaN(^IwB7-0jUhI zofTx33>DBPI8~TH8c5p=p1Z_}MWzjtyG#ev#C+w&6zoXd6pNZ1lx>$Zxrpu%+{j#SP85nM6P=<8})r6{fH@vVE3tq-=cVlB=xU$$Q^%2aBeU z7d3iC2k%0gGP+NE)AA$>%C3??$&KElB%j#17_*Q$!t`0_+sgdi_E2BJvZ}9X1ZGuu z!M>w~-GF4*i2gt~ZXhQ_O*4sL&+r!CgUdz+d7#(Ja7=vMm-yVxj!PW!IFqEpwB}tp zm70BW=MTSD<>(y-KVisC<9Pu71dAxR^?w9aHy+s@!N+IMe+4_a^2+pYG6C5zb#p+r|WXSFI$-tXH`Z2w+#4aIjny%OzGCYb&#N?30x*H4sFy zq9l$u4)So~RUwHHH)1J&2a=WiQmC#GfCB`!Y>NNBZa+|8VHoFkdx4yOER>>E*4w4e zACG_kVSd|)?|f>ZzuUl3*R90N`lVsqnOXbw*R#$e>}wHQ+LCBZPX@epme4ALO?@RJ zNxq{9XTW;B5>-h$n_bwNL-=kqd|dQY7Ji;T&ZL`!B^f_gXc5J}Y$AXRfB z44i((G|_p2Qn=q*9LcoC^G+<}suo6RLI$me{nj;e3@aWNlQd=!gxI1S)ldLnDam~a z5wl~k8KFON7BOGd&8#u0@e_id>-4x!l5K^H?V<9WhcZm{g7 z%SVT5@!8+au%MCZK|>as$~`~x`3qgYsByA$47o+|lPLJj49|}~@r%;eZ$J?^R- zZiM)=3LL(nL8(wZAv@iG4-T;(Jrd>GKkEHN;MeA76H+%8nf@NgVaS3nY40R}TG?f72;UTSPTH)K?=9x+af& zKAjg|yud~*z@gm2oL840CVEp|Pf|7v#T`LV~nV=mv z_M%(Z2B*_sy%}e*$uThR{3DMz|K44;=3hfB@ShXRYBAHLVqFwjY-)Zj10>WX<1AncJS$F{LxCZ2woWv~dED{eN6vhX zK85~C$Xhs?hD!**ZZj9rq77kNprY{BRRN$g&#!K16`IZ^4~Sj{g*_{Ia&~nMvYe!~ zvh5MRJ;skU>eUP7Q>I3)739ZC%-{Z3;@!$Ow zdBWb>4UU&KkVHM}*gZP&HKA?tUtf4V?}LJkb^Z?ZIQdZZno57#4g0ozHE)h``y~Tv zhp+K{VBYo3hX5NurW1gu`QE=urWfeD<}#)FH^Pv4OrXv@3+(b2FKQCLQTkS(Y?DVN zyHnwue-Gv(^yJ5ENe$|g*wNfCldYGT{c6(B|L%_I1%TskzCC+>>p^GBj=G_>+hkvJ zZt-C12d%V_Sx~%K@U4kV8v0Hi?CAL4!l59gDq{XhVMgPa zFSL)4AdZj3ap-NYzZJ7D8Qq8>Wpbr2gPDvzPYT#7#lCkQrwZ}h+0?0 z=?jX|Jg%Y!-4R~BR}vW&DEfxnBIyazoVog^0K89a65FTPqV8|awjSFM|1&q^U~aV4 zr*>CIg7d`Y(IcZ@ydQ6-iwJH$()L?PZR}=ujX5Nrz`dusn%gsSaUZhrK%xMfW6ei8 zM>#ewK^kAW7MwucV?=V#(1Q3%IzA`eMtdmbTwZrucmBQz3N@xU*maI&y&-Y=fv)Lve$z5<3MtUa<$Z z{VbCFdV=;b*52i`3JU7;B0dk=G--N6*m8Y0iud2$$+7kP*_DdA>4e8GIaLhZCh93| z_bR(mDM}2JJ2?p2qTJluej))HOeqat z+C$(+Hb3aL^&@4vIcFPc>pi=9)Mt#M%zm7`H4^$Up=Srp84a32+K3&xNC6Rb@Y|pO zX2AT*sZv$X9gIQjSo}NL8-dHJ(b^u5n&X)S_qy5U$y{s0OsJfI0!AaEd%0=YgXjN} zib9P#+aUHM`v$f7J+ON{uaku)fb*?2IyU3xT^O)%{QK0w73TEXqCz3AG_-p#G*@)8NvtWcuSD z6)a?1C-?;$?^~>bHV?`IcdNf`Opnf;!7oydK0OhZqz_Hu8ZW3%ewt(~>I{d~bPD%a z_%6#I1(wn-b4TdKiHhCaIC!TpocD3q)#v`kh+oTtb)(mIeACvjTrhYv3%-9)jbFVnbr-z+lr8nHQm9s4ivw zbB>jJv(m_=nTEA}NBDc0B>bm{x@7-+dt|`rC#0jku4x?C9pRm6^t^*y>1l2f`{Mae zbAmvs$e1qQ6C`djHZ-<|>Md>9gK~~^G+opx=BYkPDoUk#4aHX+MV~g=Hff5lC>?GD zm-9W#wEoRpcR(9xjGDp^_xbI76}Uf|_RV9DJZUqJcWb5P6BIUm?v1tyX21r>tE0J- z{Soy=I41!^e7Z=XL-BSbOs6JWlb2N*XpJnbwz4{NC=>NfTNM_+E%6MIivuOet>jqq zM-l`Z`SIEsZ#SUlO4<4%bj*@ulW%w(ZBlY^=yBd=9K`0xVJUk+7WyMd&S3MT$v%e@WIkAV|F*2iO>Y-6e72GWFOG5>U zxwxtZ)I|+cQ<-*WLvM+=Ch};12-2GXx99WgcJG1)8SCVHg;&w^ZjD(N?q}Ub@JCn+^CPv6GNygM6H9*VT^WT{-dnza>JR2*@tujQM!x0`#5MOq69t z%93S9F2O@W|67G^&^mtu?~e-Ee@=O^dcs}KnO3ODxn|;>PJOTj`*A!4Q$GEnEH2r3 zkEOff@o6`ce%fRGCG>ziv5A?u~|B)bKmx!OG)R5 z!3iFY-^e?yV4-EQZgP|56SBQuHFQ)&@ZhDuhmx!YjrY4@J#h4`?)7o_ID)*>2aGLS}emaP27wZiAI`H8q=55`*~Blcr~ zm4s)Z-Y5~Q-*-zx81qUR8p?Ls(yyN6Npm_`nt`uj}mK3%qd#&5^vh zk~btj1X7k$4{beQ0?MZz+8AH8(CcMq3AMFAHOMsCth9Y0=9jqUJM*)t@|MdHlyFV1 zS9rEHM{1mBbcT@s7OUB)lW$}fQ;P8v$1GBE8Pnmk zP*RQ2vV*E(=(N0-QI15!yx8L)2%e@@F~!^LbGupE?NPsqJbu4K3*&U?msvGuX~aqT z=X>u5Y>>bKojf>}`0wew5SpA4UcvuY%Hw21w&98X(g-|@=xK4gW8jN>))G5w+GNWc zo&oVKlr>R^O=(1&7%9pu{q4uGUA=bk3r>E6iMj3p-jPSsyDz?O7D)~_@-A7rq33!o zE!WO=K{0-P;yqHnOyp(=$>3;?4Sn5|0{psq%{gLK7u$5^czL<0*nSypYoo z)iyE|P(&qmDE4%-@|+n54=@}oW8kWgH@55q7C x8lZ~IYNX}Q`Y1l< z7VE8COjW<&>Q7n%Mkm?|I9qoC)#Q< z)+9H@oO*c5Cm}xN>0!k9-idci@zAay%y zXxyemHHo03Xec*TR?;WumKX6CYPC*NroKTsrUQ2Ct-2WPuqY`4Enb2djIKbU3|*o@ zv{$XeJOjH^HOsy|2Ql`^qfkP#^jQhNNxB}!-bYJ&CEmd#jLiozb$AImQ6z)3nAUBl z2i{y~{>S$ZPOawqY+NoTy%QEvgf|SKA&|Bfm=>MUf*JjT!50^DO%4x4aXB(sd+UdZ zyKE+J^e}SZNz9#e4tW<*`zEHL@>=m)pxqzpM76gKtU z*t9-Aw8n~(gB5w3C%pN;_~9({f#GwtGlIuwmJ|QI-w=2s^Gde#&Hj_jCuk3dDCG@F z1OAI2em7ACeCHgv6b_Z79{n$#=kGIvVVaMDE~ntB`2( z=W4XnNl%vkd&6B$66NcuooyBP;SsTW;JL5pADR2vmngOOpCA*Pzp1N5+=RZkpT`JK!+zii;;FO3%HNWFN zyj&f*=YAkq*mQCaS4&DlWlFA6+xi`=R%6!u6mGdU?zTJ=G^~i2*k^T=8v~oEJtHye zdtK|cFrH=^(Q9Tn8frQ{c?Pi;kq*A*he1tLg|PE7Zwal_R)rTx6`?Q3%~>#}`t6x~ z2sNO}Uo{Kw2n6aG(UG2-cu*Iavfr>>OWu*U@#^7L@J}fLuA=V6+3^09n^ZTi>{Er+tXjS_i!+9MJs{*325nLL+ipMq&;l)T z7_vAyRup=VSU6FG8ENB4?A+zsMwxhu_benp_w=(~*U`)=6OU##b=xuME4MQDh`>#S z%WUGH+{`f1CL(j=1dJXe##VBMXf3&3RP$2~)wYZVaqUKkp7BMY-y5k=%#%&=C@aku z_a?Af38q9E^_I++XgV7$?|zk%x6C0zRfiYEYlaj&{5-y!NVFK{9Gr^hN$FMLk}lm} z?UrxKL&`X4qq}qT^r>>bZExhHRL@9>#V%OJ4f8QGcV_!AQ3D(9C|imbe76|8q_k$q z%$uwrlMdY6ZY&Yitj_5MIC;dT#tlYHS&Zi2IPAkGi%y<-frp&Z`Qli-zEMw+}+5*jm?7YCx;_;uH_8$yj^?PqP)*+*Q@p+k?j`;)R&1339G|tUrL-SCL3T^u*`QKF&C4?!#&E(1KRQZ$vS<;RcV9;jMTlelgUMZX-w<8Zw z-2G=oo2X8uJk=lMe>{U~@fiFN?int9!J6)Kb9Fpb14TInyR=>%mv}Q4k6kv=?e(O+ zxchtG#>JDPxM7D%{xl4YxBja!kN*iES?-DrOV7UIcw}rwB%Zj?_^0=JU#~poomTgI z8M9)gOfnpAziUeqDYW;>-bpdVOC(wj z6Kt2*u0T>RQ|gUbX}X`8M|0Tlnb`6{rk9I|!BZA5}JWBjuX9%ekdF$W>+@oGyXB4$>uv7NC2`ii)AeTr=;Tz#$& zay+Vp(o9m}GSlbIHtF_uj#WZJBIAO?9?uDFg6C?_+zg5fiSRl)IUwurX>DuwY*P-q zS$>tj9GGaHte;)_Pn!sg4a;(Zq6bJSPnEb-)c?)zCTfl=B!>b44RCJNHa6HSkhMUjolxpYii0!t+v z8L5rWhbf|49C{zKL1H#{1YC;%xu)`YMR*ISDheK(iB8&;1pK8FUwi^59uF~u6Gb#UY zr0~wyetM84@aQ?qO}?y$;GmCxvb)II^?4)r%RszIx#Ah_Ftw`d9aO%nDz)$@<9mRv zR^x(&=Yete4Db|XuuBb+Vlb@@eyjO)2*J|lTA!y1R#blnKn#>Tk@r3LDU{2KA!ZH6 zOfA9Fof53q>(H2U20aU#fF&73Bk>#kt%CJ4=^V4~4a*g9A@vMJKIeQxD!dpU8O;^= z3={rYC-#%e1n2xzRXV9-3yB}4-C7TymW?t-8TICw&_M#Ai{bU|dI%)>;1eQKjd zEVeW$-19A{lG{7K$(ZJ}U+%+H;euCPLWd6N^QM9=;W(+0bfm)l$}orE7p)Qkuy0U* z5HT)^(y83aS-AYAvpgim->$*|?DPj$b6^ky7As=o)1UHOO!50cs&SHVhXgZX9eQmB zt^FHV9i%pm@iC8&K|vzk!6iuQ^kBWq{6on6-PP8QZa@bWTuIY}n+pIS1e_3C;u7Fx zy~#VBjGX$PVA9}_>(PPm4GNMiJQn$`(o*oKFAa}gbJ?i&{y0;Mc!~FOD7U8IY|`5Z z#jIwvX6hL?qbU)w=2Ay5WT#XDpg0P25Q1r{kmiU}1sh%oFf-FOYAn>MRejL?g=NkG zTXIra?NAE66294|wUNmr8m}AVqR6}A9QB=aPL^Y(lD*9&r=%|neJgl2xP?j6! zdbmIOShjTBA+g5HLRKZj71zsPc)biShRhq}uT~W!$l%xRiXsNvgInvWGun^tGVoA^ z#i~6M-Ku__D&On{FotkBnddbMDMnP>ina&%D1*R+Gn-Ew0sqV8WR?W7GUdfjZh&8M^@l2SL; zud}a32}4O>*&p5#k{JEPWz=J{Hhm>m=+3(pP2XmBj-@G)D4|&gjf8hC;&#MZ6 z5OX2C11p$fD^d5zx@%Sk+5(C#?VPK_E7|?x;_v*VDVfIj=)V)YwW^;1AZ@rEiI`X# z9xszG*_RBC>F%w$h9GRpd{@ql0p4>$t80}oOjq5ESwSv%i>Svds6Ta6jgn#l8uIVc zmGYy+nZ^rt`LnUG66AAn(F;-L3yys@)r^n%3Kv8%!%u+_Yxp247!t`Q4FNq5$XMCO z26?929Wp(CELcsGz7pP8*yoduDajTRJbanQu&1$dw>w0l;I{VRh~ z=nph&zl`0P%#|y|eZ2yW0p$|&ieP266Pq6P9}cfO6-V;~@`&0`q)Fo=A^0;$+Y_xM zfaL^&3fqHa-8lWWfV16yhae^are-@fj3|LO{-Xrm*TjA!b+AdmjttnC_SIV=CjP@R z0bLGUZP6c0mSEbH^waeMS!`AYSPuffewMEM1{El57^OaD>!Dc7yK!*TeZmpL118-m z_-m`gt-i1mz{#$+DIz=dQyj-k1p3EE1p5fFVn>9D*wY+9hEbZgC&H6KeoUQ=Ku!>D zN6dQZYzb}`R;kI~vR?4zlpBhuX&D6MQV4{66p|=2J9C6NupS$&Hq-V>s5WbupDT7lw^VDe*ew4TOzg(+^v6^qOu-#j#t@fRkBp< zL1^j2;yW{`5?ADorsEO(R?OO?R{RLiiDW=05-`|XstyEuGfrCPwWTC(A*gLZ_0}_N z9)=D;8Vk|h-V(na6H#dm$L-~MN20*&5vlws4|;3K-#%RYkiG(%ID^gsZSORuCApV^1mJu2fGgJ)_ zE1bec9{s904vg9N0IhaJYEvYGoR-2hZQa=zrH^1%Lf6~jzs;=5GDDN>dDUKv8UT3< zv6MUtmNseiE4Al=#xL%_4^|XTM~ai+ZBfU5J(D{8`{PhL5+_qWkPd!q(0{_zgGdqG{ z|0hbuwcF(I5jHZR5=_3a{`{0x5 zd{En{jXUSzHGh3>WZ+ZIakJp{)2jE)<2ri1@I`4ZkP0PE;Q7m)wdi&MA92oJFSNpAC} zW>%Ylmqmuv%Sv?CrAt)8gg{kjO~9csNr++h6(5kW}y zeoOsT^bMi$U!ip>hZQ7u8QY}=M&gcmMd}A@|?*Le%e8r^sept0U-muly zgbr`|I`dvhBP{)@H(-tJ3|r&<+$1}=nhzdIIMq*g_F+KUXYXoDxeGAC`Ofddn>2lS z^NS9?!NkAY8~c8Sb}tzEBq`KP?!8!NLJq@nJge*M3whezSbWCk6Rgx-ib&$`=>IK= z1EruY<9^G7D^W=2_Xoc#Ac_t0&J`9e!76Wip~d-G@R!8NC5b{ldvDx$WWID#p3zR< z7;eP9>2Vf7LZ|~|yL-5smTAl(Mh`AU&0v#jgS@{(Ce2g({9JX^f|bnHf0C&vDy--& z1opIQ!ZZls#d6$QEWB05{v^*S)-YZNfHa)|h+tj&OB&vXII$o>AKt$r(?<31rHfgx z*Z-AuUm+j%$uN|)9IjPw$ICFz)<4Mb{ni;2@+0#EOy<9eHf}D!ZBH3rZb5A4mW8$!QQfj05auA zW;qXDJU#*h5jH#1z1U|H9NkT0jH8;}bjc1w#+9VHRyKBbYP*K zzY%A@%haxXtx8RlI|50{8HDbHAM5b?aaIRBP^m&YIJnKd6jDl1Wf2&_-8Ml+28h#1 zzddQj?N5)6(gFNgm1S(&j4;~}`6~6XppVYc2o6NA%#yXoQGQq(K{r2tLbGY74anG2 zdzBc~OLue7JckIqCsT_fvE27I3}1e{jt6tmp;hYbTVmte|B_u1KnX#wz}oSSn0XOB zVU#6()X-$+$OA_qC(nf1HpwaSaxif7o#8KIW2>P{g z?h#=Wj&&NR;CXsmWrupmw+@(AMo5ZJ@W*QrhRv@L2on{zB0rJI->*oScjryGdJ0)w z&Lb&SM{*H^_gN=#2y9P8cz#tNNr0TAjTZc9Tx;gXSw4U=u0Bvpty#iThr3k2=N(3A zHN7?!!?7!)Qh2uuc6F0<)wW%GoK*5Jgl|5dMmF{8wQ-lxE{IQOoj@NVJV3MX>Sg~b zbNjUv4Uc7%@adPo`@h*H>Myt@&p=4jX+n}pV%3wn58UhF)MExd0n$C8F^|0Rg{G54shA>L@Du=D3SY*n7}E)1iM{DTZBmd(S)aFSCJG` zfn-TATGXd<#kdrl6Jr5JNRJr`330TIB2s-*j>{s8ntc7FLt43Ij4Ou%_s|DPA`zruqW zkD2gmoAB3Gl9@aBNkws8ctR`Kbwi&O6+9|I3wy{S^L8&jSo@N=mif7TSz#vsJDWKX z1AtHdP%F;A3U$Bm!8-{EBGQR;5NdD=Ua)03kWS#X2)H9TZQ>CuSB)(~5TO=_0Z@#5 z=EmH=CVpOK$R9ABzQ3LNSB^z4cwVXfXYM}FjSgu3`!H%as?U9SGk>^owtiuEYMe&` zgs{l5JOIzcpn_B{^LE8h7;;@B&Ej_lvPVZJ4Se@#tw8>ntnkYxe?#}#Md=dx{&s~+=S%t$%6-#7F|2JoVJJu0s_iKOg>u1<} z0OS)7g0%!fhK=U=CrLzI{W4agd3c~Mrqy^wVhbc{vwm+D6yMOK3pA!vY-Z!crvwlT zIU?n0E#)$@y3wv$cXKFU#|bdGNBj#_yqNww2xjDpchCV}_Qp$Or&3eOk@i;mQpE<+ zapyIZ{hN;Oj~5RLpWp9=-}Cz~#d*L_8()WF-$mFS?6MJ93i(XH^zh>#fJ|nS*^cQT zgc?u@f3IBpi|+lF7$qS93tJE*WStsqG`0q?*RPAV`S$6$I#qTOw8x~y`^g-K(zln{ z&M+5{KONC=sb)X{Mfbjzw1VYS}Q0{A?h0T-QxW9+YCq6d3^$nG4J1{{1@8#5b^lk*g?5Z7tm6CTjv1o2mcCKE)qZqGsr*z!c(K zto3Lw*qY`b%Md|}zJ1QNdG+Xb?y+hQ#XCKgHsf39UZce4Q*g8Q=|&8?kJK0^5`D{& zHNZ1AM_DU%=V@{t9vk4)uzouZ2x=3&wM~Mx(0?%Pt!$XTV9cJltpt4{-1H!;mx1J2 z>*oRJScw=A-XMRGC$;CLi=x=nLoN*Q0d5kqGiVRV1+F~4l_rGI|7|{Z(u$TKN5E}9 zjvFo{T_whVw#pwpjQ+F|}qFBV^kyHLUJP6-d!uuEf|CUDe z+Z%oEqn^_F<8aUpnT0CjzG7-C(=WYdQKW!H0SkQYiLZz6{r|HAKHlh?VwrbUOBw$! zsHJ`av`in+QnawI{aAz0cWUtu)G8uXyOFXG=lohIaW*&$oga=6RDOvX(S4rj4ej1u zjNtlyv3Dx{dsVn;rEfBX#`bL=|9}Kn&E{h-T;E3MclJq1x48s8mzBhzK>b+r`=YLI zMujjsFG2`30K%yl%oTYzI4mod!8U-0LHCEuuj@u)J(~w23}6dIri|C3QpY)C6~s`H zix@VUb;r50-2Pv^lF)~h+HJ=`MyO`Z_uV|QI)9D76jpRa>8!nhl@IyW2SYFglP^Wk zGxk#%@a}GGC1#V6Yb@)B(h64`oDHK_;`tk-VND z_6IHgl_-5@ii(aS>HwkN*!Bus3kzO22*BmGD>-6QTvI(dJDYy@?FSCwMmbDD~Hz$Bvrta(FA+cZWX5$~^rC zUAONPbw;G{0!RYpa$@D0;^3U}mZ&vs45!Y*G<1CQjS*_9SV|SeEFmi~A5d zyRYI3-)x`I9EXlu!%;{%o0ZRMibWY}lY@UUo#$_($uy^a?f0Vf@G>RtJ}!~Vz=hWS z7j=py@-@L8i4x8Qz-#vo80~-uWBb-NA&l=2Lj;{-k_dj)|DX8P|Jwz<-VOK<{5shd zpr`4e1NN?THzyevf;lL*T58ysZ=dwhq*6`nlzNr_JH4p?%$4nas6E4lf$h?jblvk( zeN*Czv;q6UbpIZ4p+jR0&OfRiu+#eAI z0#~XT`dO6cJ75<+P#r2>?0R_~BB#b0X++b+7HFCF zhdK|Bvo>o2#!sp$d0t`DNoApIqCF_KJ=o^WdMrJU^Lp&CSjV>XHS74}W(WGVVZ9%; zC-lVQZ&+-u6&lU{;{V{PP8ZsAz^d-Y{8}r7kZ^J*GUmhdsL%6tF7d>Wm}i8z=&2~d zc^99-g5HG(?W}tiJ2yw7u^a6eydU!^CpNt2v$Viy2IQs{Inu}m%5_QLUdl`KaFm;Z zlU(S_5&syI1^pl@X9Qj{=(xF3Sm8n1qKZM*##8O?ex#IPb{e1WDoU0nPGuP1LLsH3m?lITH;1PCt1YkwUS z6kZO$J*_1BetNLuuYBk8kndP;9x>}u@bmu~xbpD+u_LdVV}J;4q`gHM4T3`VQT|_p zZRiN&%Aoyx$m#u$K><)TNz%C>4Z@x%IRgcbzZZHX_cFqOJH#hmkNn{WgEf}Xm3b;g zN8v@K0xEH~{$=7q+`=7u54*Z7cUbM(2Zf3(kF)WZ11E5hHjK3**Drl9(6|lF>7GYm ziA3uL!=`q50gjgat(Tdh=pHE?`bi>wS|*!r ziUA$I3CL%wST3(UWOmT9;%(c!Up2BHh}*a;Mqe4%`*!~cZmh2SGKCYOkM3@sta$Sm zUpPDsroDNE>}PD+6Sw7CpyK%3bxDWCxT@s~m4ixHn#8pBzA0?iIS^b`^%cIl=K%D# zkUZebrs8ehU+S*<54Ai=L5mvUV6O?c-s7mv(wc$Np5*pnNz=KNA%`uM4^XMfIx1G3 z($zWHBY9T?Nn-3 zb&#i8*t@sU)Z=ULa8(T6z7SuFK)jr?P5eB#_vy^$0>0+v2ua$h7-1TW#o^;zkk48VkCY!D8&uA1`AbM3E{;4t7Enim)^SI=mj5MZe6mSM5`+ zY0??=pu43PzhTgPOg^Bv`m2aArMsgawJC8HYcuA;u2(nzR8UUC6GysBDxhL)rEqjr>_oAcZgBjn30q6=Em=Mqv^vN01 zNg+7*NXSS4InaA)^I7aYqH)5lOz2POjmM9q8?xNNOPZGtNO{4)DZKpPh{zSa`-dn? zHC3X>Kgj<${(ftkww76;i^jg1x{e*&U5qhdk@FPe(q{_7vqW)&PJ28fEOMS1mpn!q-ktkCB8yY>r0TbhV6LCyGIE5T%7AiC_`LAgc~I#J}bl z(}Q0c20oA7ex5Zxb>l!E=tQ)36z1I3Zz2@zpr~o>qvgBw-PKR41S5H=zEqvTKNmUZ z9q}2$yjK{kKzAyRaX@zVp7Z|t$yRaC>`{@M+40<^4D5}n&C-;%G}%EI2Py|;vyG3; zx__tJeg{PlxJRJ;tO50SYE(n#r|rRcLbs|EJarP&C!g#u>P_QCL__L4V9966}dITJjP5^1MdhQdy3TB zSM-{wuU>G(IIIgHuYJeDW{J=O&cPtiq5@ZfCd=y?VDRUBdo2quh3d)wh#a4<+bBlw z-PHj}S_q+VQ1F`HL$uF;map5&HC(Iv4t_~_3+op>HZ+KeKLq@(vAvH&J2Fbxtg^2E zM%PpF&sNcfJBQLWfcac0U&ydZR00H$1x46jbLc;LI8&Vg_W>=0C#@}@ik<4(eEaN2 zdwEn`XKM6%(Y;y%b1oxYd=iaFk=!oZ9-tAHiJj_89Gx=Sf&Xvnk=%rPBXE@pPW*a#fZ@NfKckE1=9N(Wn z690}hO?KX%(+P2>O1T7()W$Np#(=1Q@`pDq66UIrU~DEvR6ek>wUfc7U{NC+pDQ|0z@}H z9dMTz+Ue;+kCFnbXFUtH!AXu5u+zCqDe;}dksyMbt+#v*>DDu}6*Y z{crRGLQ|F`mkpT-ghKbW6aSJ|L@Ta$c;IUT1c$HMdvy7)5_0!HhQbSjkWy~yup)yE zHQ`J^8-Nu{0o1&7Mu8_alUeMOLcJF17q>-6z8$K+;;_Ei7N^hsZ#d;ozci2nftp7y zwLxzLimUm(ZOF<}%M^taN9%S8IikHvT;Zy{>M?W!(QrnOr+|G|7W~g^^z^4Y5um3Q z_$QFg$NoSkXwq6KPIy>2pDBHtoS4;(@HDmT;+!{JmSd{z3qaIV-=4#pJOhS%ZqsP4IY*@7rLSue;+H{%vrRs#5 z>W0~8JWj1Aplj2BhLdTVjX^yK@lkxN?vAgB_ovEg(XUi-Xb!Fn~SwXr1GOK|) ziASIYuwU(1Ku!+dpM|WR;hs`|trtA0s`W}e|6EW+T*+P5b-U{5@C{4G9`_I{TYzv? zq4DdDiO~4Luo9u3p8a0_CSJKxx^K@^&jqFpgYbR|j~@5EPB_7N#l;$OL8E0q%0;I1>ORa768sG0A(36iNxG14t$1C1+5O-oV@fC)a-hMO}7oq z!j>E(;CkOwG}vv$5}?uaYIuw2`Q`K$2|rV4dJ$B{-#njwX;v8q=Md^cE2eKmzT)OYN|?I{0f%>Fn{&rX8L+Qla+N0 zKqu!*o0Ac^jUIRT8BIN7$?F$(fukkDmhb zBCBktYWj`d5qGTA^Ut%7Xnzf;APk<*bBCUSURNmgQBV(-POykE9xnkUY!vjYJHD0P zc)8tyj_`;!qAz9-e}~3OO=Q1QY<|#7w2n={q&eBOJ%ns4ME#`1`%TAF(JdC^T{Y(h z9H(3OEZcIRagVw6x+Qe8c%rkhpcH0q9Ri^JG$FqpsNjMqaUa$Ilf$M5OzxoMnlB?h zGBBUM;>!}T1-k;ddRDx$u(rQ_r;mROXf|e~X!?PsRbbszy8DQYyyW!gIq{9}piv1dy^PaDmd(}&#J z>17bK7O6;Ver+_l14*ESY;)RsSi_d+rB^jrC0BMnrP((}8kK0N)lZUum$~qMy9qXm?U>sVH$)I>WxR`yEk2g}(+8L!(B!F3;Aaz3W!#;prDg48Np0 zi@Uyy<{2dMONv_fA#sEjO58Hp+F3X_%hU%PewBJVV?Hh zOTClf>_zj9sy)9Nj-{%_VAVeivV3key_}}rQPP%u;*}+gA0-XtUy9`aYP%OvYmIzx zU?eRa@oEe+!|v<&*TAFtM$2=rV~NgCI`1yqoC||XdV$@Zbce_D7yi7n4s8U3oL;a= zi=Q|WtvH3?>}S$Os}w)lTIDIqq^yQ=a-mH?-z-*$%e|~ z{%wGuDxC?4+FW;1%-;=FKJK=ArSpl1zc2)U;8WjB!@@vWxJ$TKJ<{rgfN=7~YSoR) zpY~MX_|wE0N!a)W?+qRa@Z7(vZ_5=q;qP>y9{7Pz#J;a)oSyrR9yT1$vfHcC_0peM z`WmbmM@!%772mD@b@Dr?XDvvITODo+ziqd&GHKyAJ|a3ob|c0SU5*yQUF=k!>vkC} zbz+%Prwywe*jedxtsB4FvqkIFyO8mv#=q&?A`>%&`x9m-|870~;qiT477t$x&fp1) zUl#}A62}6|s*D2XhxQKR3tMOWA#La0YSCxLTpVB5TQ6qe6DBdITMP7;gUM?pzDauE zRVHuaet>uu|XtUBTk5r?8KR2L6nYRCH;ls^N9{8jCm_!qv&t0s-}?GWf5J}FA?xxC@lM<4F*$xAf)8%4fq)96~HSGWDXe- z&7aCx(L?4$gv@#-j?WHq0G&LHH((0lb*On6T_EXl)j899ozY5Ps8w~Nca&C3r{`7w(1Z1-MFy_#GGE_rMr5DSX6GLJ_C0&57~f#{(kvh<4zX-{#`rR$;_H= zwhfK_*6Camj}8zTbR8`)3|eJxV}_HuZ=gkkOey3w;5EC`#66$U@C{QxfB%cU z&j=auDm7s?tLL|yZYQ>dtRhxZadS%CcuOrJ-JAMB!O!&f>Msn3{ z-yQJZpXDk9j_~?PLDhrtMXdo3T>Ebg&y;cLv+O(`4B;Jf=qj+1bC%wpDAGSHo-3}0 z-Vml(J0z42yVz>F=rM5Wkoa8fR%5}L;SH@mgPH8as4o9n&xvuAuPpaD7}K#&->IH! zlur!UMbvHBYi`>4h(gxQ#b$o3Czh?cEdgG`ixJ!lP0}apj8eW@R<&nP_7fq$cyD^2 zA93JA&qCh4hsa&~p_xEo_({=oppxw#QJ)17B#QnSwdHV)JtW z9_b#TL1e+UmVE=BuF?5jBGqm+<#T9q(=yI^hZO6)AHYxXUw*JrrR{~CT1b`WHMxDl zka~jKu3HCwyU=6OEZLDeOl)+C#;&HN*p$9vRima(x~zBgRkXE3Bb=PFMyk{p4&JXq zTJe+7`o%ovxW(!2CA#o&E4_q+2Hqj!RxBjj9KpfMqmv1qTq2&1=%)Mt+ zQ|tOKDqT8A6RGM_qm+m!MFIg48`3O*^df>%0@4X2fFex<3q_DFg3<*EJ#X`3ecPdYyrIDCH-&k$`)nN zi!xwgM;kHy9}=`s9h7cd17wOrfMUB#wa_U3>)6jc8VzM+%KVjN5AE(Lj>Y*wd+xQHL`3Lrp{l! zP*HVs1I$$)sTN+9U^GRBI@oROXJ7Ku;4K5?=i+DD=Et$6K2!8a;X%gotsjvk#G2KB z+GIUj0e(#m{e%uP4LZ5{$BKl3j+MjSQk(eD#Q3mvR9ayxC)lK(V>e4kbBPLD<|hc$@cKM;fqpXAM=$^neBwKTSUZ$wde?<*hum9y|@KpCzv03VYvp>Ten~W9Q zhX>b#MN~nsT>4h+zt?jVI%J=RBducl$D#T$=6(xy%y$Ya&p-#b4l-Je!FOj~i(Ahw zdSj7>cV?ssjcmJMsBtNA=~P-(Xx?kFS+-nTvE33edF;jAH@9`yf7KS@TO8rg=C*vy zh!eURBFGG@7P#Kb1;rgP`xuDN9!VhjNSqb~V{22{ETg4V-*Y+fMWu0I+7rgdFAu>>Dkj@1D1XsId;O;!2#o?I;gCInVW3C(AZRr4KX?WOfc*=!oxR@zS>e{C0DW z=Krk;T^g)Alz#w8OP*0voEi32Kw&)ombH#5igH+%$WG{Ts5K5S5~J;|ml zmnIGEDIyp;j!5n8o)MEC4+nrB`JTA-NyFgR>=Bi-!d_>YIn~cwx!pMve*i4!n_LlU zQ@*T1wzb5Va4;ntKZ6Pqe!!a>Q9BW#Ll!?b0katk7>vWqlnrzU+$C&YG>-C_dpl0R z?o0_$#Joqk1&+Wc>;}rJJI=LNuJ(-J&!Gea83)*Q*Q3!50VNzeu_xN4H?6};Y6yGP zQgh_G!tw?~Q&HpmkX|>R7vmiA9SP1Vp2JJFxdp%Ml&p~hV1}X+o#`)h@_ZR=I^?vH z%S##DC(E?yS&*G01g>+z_rsP(HotH3vFA1dll_WnE?r~cr}Hfn4ql z5s2`i+@C9Rr9cC7*za#-C&`V(c68|~U0MjnY9AdL#v3zTSblV9bR z*PqD&AC4T8G@P)$n;{NShwV%}5snDo$s&cq_tW`i*SY=VTA=%oGgAo{J&|}&j@#{B zHPml<)Z_ZaH&#v$GoFH-y=UZ{@kB3LWr6cPGlpqPocQ3rDCClKNylRf>2do}l&=Oa zg4CS8tBKAt`x$KGvtl1(=_UL)PAEw8J7@x4yfgRxLUdouS41MM3j*LO_;;K8UOztG z&aRxYQtLOg*v%XSRx75KPFLtAq%4(N1QG<-0*XV#hcn&GC5@DdOfFcaAC53~xN6z) z+Ph}aJkVvf5Ahj_0zn8qQ0c!`&$7E*6l0C75*!Tjg(bKfYwT{O!tq;WYtfQjv3bKT zD!uspi{nM_B${P#kc+YT`mj0y8^w5gOPdbTQb{2(w6`==+%KmSm-*u2P$zD6&p*=# z_2c^Z+IM}P<}-Kr13=0?t%xi8(wk__H2JD_Z4dAsz6x1*_KHWG18~c0%zD!Jz8(R3 zFJ0v18^;d|{@E~1-rPY@H1@bK4SlU$y{)&! zfTMqLe-pR5l8Z;GzB>zj$pEwD&eg>!5NOfx!L*ym~37LCz2VI&Ov`>8AP~gadI&q5RU|wb_i=YuIS^tS=^C1diI)%G;|0Vai5mr$| zgHC_x(PWxHE%(gA;kM|k(Zy1?06$AD`aLoFKk9y)OF)!yoJJ=iERBBMI5FB$$$|R= ztYsB1U(#m2&p658EJ)`zbU{#2KqBY#gt{1O>l<(@HLp&H6fGYN739C`u$5JRXyOZd zqg};BR-DC|;ehBJA=U-8t!Y0@^1`rv%7Wc(Oj2V0Er=iIvU*f&h2^<5c3W&HN5V<< zdm6k_@ncj95g9S?im+6IJ2-k8C3Tcd7ZNv77r_tX<7gc%xb_|kv#lD#QnFC%9T?<( zBsYbC!43*G;VSaWhMo8#T(ZlT?N3!M7b=zZf9apmsshKg-6e7^L_?M>Zk4{^wmxhk zknG%i1jX8$-`gB~j7=Hf&9y{R7^f8EQbDuQ{^#$e*wrd%bp2E$x-F@ZO<_McoT2z0 zI8y<|^@`q@q3T?3K2sChu`Ul z-9Kz~Ph-8FMPiL;yJn7D_B1Qr#VemCa!S1id;%^ar>V2zX7e{_z=AdiDs*u!!ImshSDo=LrV1qPkxa0s4LL~L zbS=fNSzqCzvEB;jV!D&8xJNz217<*+W-rO@V?vaJUjR~6x%y1=jY8vq$;Tqv|)s>IKGfgB26_cjr2960pAIOfTchaf`l>498+;mB-Z*cbx+u98*Fc zthu9F<0_a&88+RVYPL`9M_l#?J!$4h*~pria-L1YBVo}=-;iw5sT7A-Np_2Dj!HMrWvh6h83DduK9q*DH^a{L2V? zgnS8Afi#>2#I;FJK>))nSyK1=>*DP5o+^{KtcIl)!m~@ts`RRF9)M!&3{5(8_|F+6 zG`X$Tc#|5=qTINp9Q@s9A5LUR+t!}&PLz5#x@?5Eez9;vvpOa_3X})>A@M~sbcY=a zj$#t*t5!ZoN=*B@Wc^AHefBPG^0|DGm?D3m;+W6tC05_|5)-@(&QH~rDX-w$-pna) zFPwJu=&+IU26osb^}E{cSG_&LVj)&-cvHUMz`{WR4D7gK<&hs!l|5d=I-XF>!R8y) zkb_`Ofqd`eYvDcWIr0tTI!eQDUB1j29al862;WuCMzjVnGQAr09v_X8%9FjU;W6rI z9)xjya>;CMgG1DZV3mNJJaJ-g>6^dZ(Q|hK=Hu);PUpomOGxva5(USqSc*-22HQBq zyjX~U+E#!^HuCGUm^tc_94AxdF5a?>?d+*{uEC~9Yd)`5QT9a2mw&>y$6vxl+fGHG z%HxV6^i$mrD$i92@(Wq`ba!cd->j0sjmOL2Kzo-~75~gWgA1a(HekCh&u^dPY)CzJ zV%|n;N6>U8H}(tliiq=k>eYjJxqE~G+q2NxHH)2a{*4U{1U>pbt8e#Cdm#IsWB!*o zL4 zC`ShaCm`Uhz{m96(mZdsm$eD7Qfd4!5k*%<<0t>Zhi!m=9L>VO9R=o+F_wS$!4s_g z3UeA+Oo4;Nu~SrU$<4hLnzX0Cwx|(+?~i@JW@WAyU6&^xJShYgT5;UvZqCo{sWB5W z4*51w;Idc)##bUdw>X8oU%4j18fd;JJxZ}&Ir!Le;xd>d6)+~uZ?w3kIrs6UC2~*>FyK25c zSI5NBn@n27_722IyXY|mPQEr)oc*;miLc>?U@YHjF>QEsYWJCspfFqEz^gC@xyEne zqRqimq>)D#`KMiB^J6m+%Lt_xDl3jFLL1RLt6Bl$lG(HIma?SztMFgW*k$LE9f26* z`Vh?8)OM4#o`tz!$IK1r1*&m}Z& z4mcb)`U^G1N<5UJvJ7gH;A1K4Vf5l$uv2aW+!FvN?!m^eYWc{B9>acQ(rWVh^>ev~ zTMd*{HofoFhUlPD_3oE$bLA1RyAQs%JEOiFD--61-*4F9J{$_C?$+!JdTp_5sf}E< zAs4en^ypFjybJy7s#^N2^U9>fA32+162*v4qC3;=09Tp@CN(WeMdtpmbrhm%BI#xr zST+<>f{%GY<~LpB`cYGqx5#ATwHqohf+y1MMxleQA6xGZXK_?nGqHi{0no!9$1;xxI_P@5Eh3( z9|NW=G0yU2lfSW=cwWHd6Fdt^r}2>i&B~1O@18&ZP%wA_mHt;>21NY?L*5bB*D^T8 z^1GMqD`PJzeQ?1fgM@e^zetq0Jw38H-2hcz5W8^vUHbauq-n~N!sU^HqD%Vhp>}5L zc}=13+t?I*P97V(5_#;KO=O6SlX1W~fG+LHyadCkkviy%@yDRpD3gxUo73zOq3vwV zul#K<*|u-iZ*VjxAzor&jyr8ihK&pIoPcd6wW3owzJ4Sz6x9sg#Ba>v=jGJxdUtWErQT#}av0bDxKgZ{T zFc%C_ytAZMIw|Clr?2J4sQhy+7Ian?>23X~Q~g+!pQ+pyVBriRET4|s#k)i8gr|ig zx-?$iNKG%FxWqmTU22wT(3UQq>3WxbFp&PlcVf+M}m0pEoF* z70&G|5;zPsudj|q2R;o`+h_v~r@&U6w_nfxCbPl27CB={&U!9H6PKxif&PdyW+V6* z9w*|NJr#%788uCjK<~F&-1&(G9JwXoq#;C}9!z38(eaX--*zv2&S=SI)?&S`h7R`& z_A_6ph2W87rTcL1?Iy+%FU22O(~ez?vv)|744YONuQwcO-uIprv}ghL^Us7P^Qt$l zlD(a_TOT#($C$NYCf}RB$WNcr_8Aj@^P$}F^gz>>^L=snqPvOg5!R?2bRMcH;-P4( zI5IE)X^ZOQr`*D_RJ=vH>r`sI&)crzR`F${JAyy=RfqA-on-qIUxhvHP~{lb;6>qK zq~d@T17gm7ZzE}nF}P{c+BBtCkuu;h72b*r!=x;_%ik?X^*PN`pe6bV^NF+@3GbVz zyP*l2KChM#Q~M^Xd{($0HzBqX*Pwkb0;lg!UbMth0ykBu95)Hu4eZp#6i>ebq}p*q zIM>kaQBbaRQk|G6l_Y9lt>~56{jW>F)x-}y>z{4&p`DNr-*0s8Wf6|~MEd$O#2+s- z+uvO64(1x>fj=@kS63<|6=X=6gSL;{fFsFGdOCeeP|uD9gHe7~Zio?0DpAmZxH!x8 z+X*S@znqZb;09gYAI8x6Sf%@B!s7oQY6%jt6M8jIrXw6^-qHkV4uLf5ETEAPCq`eh z{cnv#67W(c0xH)JA3!m6$pQwDqPdOHum)`JYgR++>NiYF(N1f(;_xHx1qAQhL%+ms zn4G$-q9hApHuM>LQ~QQV#yIwA17Fz*>ereureVo-ZDlT52#QdW+F3U0B?27=O-za9 z{!~0gOUj{+?FC@`4X{Pf*r!O@DJ=r*N;PbPLXudc6g9iQ)DrY+{*YCj4_qRy6r}mA z1=Z@^)hIUZ820SfYu{{N6YSg1lGb617>M{JR|h3wU7_(x)csA^sU~%dVJ01}E6KC3 zXpc2FN5A9nV%?cE0E5+lK{hUjG|7$F&iy#SVtT+AIr&2yy6jFbJHQn&4OpBWW{9=% z*!0dT53k8UY)ms;s9Xqbi_u{*VC(v!H@)Ww1X8*D#qW*)zrnkznL{a##C9`Mrs>_X zBBZ#3TK^DewwXiixdkba$4WIBS=gfi-r!mKwgmIM=1xX?uuYiv-hNZkc<~`#7MHaF zlW3XI+!{HleV5kUJuu=kUrB0N*&V`i#3+a&(%xI zsFsCKcFUm4?vazkVJaHVXcY zt9un$z~i{(95Y`|{^lJD0EtELu&@rF)D88k9SZ;0Z>9+id7Q7AGx{3%l=nbF8;yT6 z*YNLdK{OkX`Kdid&1AS*>J8@q=Yi9y|JQ-j1vua~CUz}eolZ8>pK(TOW!j=8bQhM~ zal%!m;afgL7^LHSCDg;fou*wuZKpO4&YlrHTezxwr0&|veV>P|B4cL*o(U+8?>{gO zrwdqSUmTbU6;M{%m*=-ro18r|5D{)GK|B`%gvZ>YM9!5vW`nJ!3B@|XrmAkRHFXGA zLB)fe*#J-Ctq991nQ^~&kH2A-gx!tIJ!6MkRE!-)0qVG9cd$0i0jGb5eQ)9Ftooij zf28SagMqpmuz>K6tauA21P5p?jDEfh5X8WQ#vM39c%Bpm@W|~*7{!>k+Z0_PNQKii zM8(Pu+fc?%$i!0SW$Q9ozVIQ5JvVA&p9>0k@g;-1@53%WYhjb4u=g#k2yyJX_pP6F3_yt+X1l2cu5%dh z6WpvKVQgFFDWNXD-xv0RZD!-}!L{D%*>7ZUJ{F(m&N$hl67$~I#b5!i?E$ql?Bi|B z7e&kIp&omOV-k(*5x$fsn90MYt>arC6n$f-BF>Haan;|D%3mtWw!o(b2^ktdcj#6Gi_wF0C7ls(kbF?Ge+T{VHPuiwm5snaQljkSL*%NQ} zr}3zH_^A;b_BomXm{PAw_0%g2Q@k^6*~5Uo58!A|a-SV+h@NHRmVd#@1X}&EUUP2< zJ{28g*6xOC8QR#7!&DEZKqKycT&%*IK5IO-G|8pnv}%FkOA(2%@67NCkDc%%!}jKF z39s_srSEsRb80!F+DioZ5s#xe24t{m-GiJ#sp(46Sc~3;De>A)1qpAcg~=#I{h7VK zI5_%oi1{g{^(MZhbs@)cMIfITOeGP*RorTUU(s*(Kz{iKwVq;=JB}l_3l-ZX_|5($ zi=`pHTY!B=)Ag3gMTY@-1?UT*U$5Q(#+i5)d^8Yy{9j;g*7H}YRZc(J&c{dThzsYb zc5PR{`}Jav==oeS+V}os>r)|qzC-&HPlzP@pVrOK)p~Lh^kGIz>7nATLS+8Ip~|P` z&}g+oDSY8KRzJ5_2;6?759_cnatE1Z{j%JBfO2k`&KG5{G+*aefbVtRWogJaebTMu zbVAbb#tN2-s?{3?P_nYGi!b3s1ZvXxoi%ebChcHDyd~=1bq>Q~*M?y*Qk=m&%)}9E zy}i7C8eAP@QtD%Z<)s;SK$qOmrPd@oD2bjFX0aGT7lQ77R4R&CtQse$C~%W+o`%R) z&S!l8eyow%hxW5V%$TuA0KhA+?)3=(U#KzqW8-4K}ok6~L27H~S! z2=eYjd8#MCZmbD={WaeOSkVb$%PBSGF;)HZpWH?q4NaY+=25$i5@R+9GH|4Et(j^* zXX;d(g+c8DAP7!Bzx|xF`qLTHNf;w-S^&}v<=dcn=L-JD0w7JTOJxM%lMxh{*{3ej zDSRaBZ@lDU>c7xm?ae3qlMWG5=-M-MC*LcTrv{cB$~mTFq0AlixAkSn_sw<>;Cb0f z(`8TiV1W7TLpm!jr)FSg@-OZE+#=9T*yXdhn4SRxA!d^IIHgXno30 zv%x!5BrKBx99RG4eEVBbMmypEX{-5<=j?xI5IHxK%=AlTa8t%T!&}dK{X?eQDB0sH zD^YKCUgXaRy*e4tPLfp(m<;ah^TTQLMlfCF*`M>XtOKvYF{r6JXwOcw*F-&~Kn!tM?VEEU zysIWl(3H%R{s5=nvB`~Y-i?`FW1ia=jwt9dbL;OD;V~@x#a2A^NhgX5YNKD6ADOO! z({o8LQ%9GDVAb#F5KhfO)qsa+L%B^Vta)K-ei|WqMZ>_TOhe71-QBtzq{S!BGVotH znJf{R364P2b6k)$hwzHbyk(n8&yqSsZWFvKwstT=Xo6Iwh$DX zH?UEFJI#rc^B)P%Wq%JgM+q@6C7(6KO_uC%Gy_~<1pCuQ|2Tgd?&6~K`19Rx{HiLo zfn7Y;lRxl<@*KolPq_=xdR1+$m0o5)V`mdok{78V^f|0bh6M%csu4X0{9NF6ehOK+ntbH?Hlw)_ZnqG4$sa z*0u-?X&UH;Zh~pS)^?MgyAc*K(@Lr7*eq5 z2<)4Gdro>Zfgti9X6!4mdz;-GP-bEG+D|iixaz$|(RASbc1=EmVyq?#T265R{!o?} zv~`dmoA9u+Vy|}{^fG`l!=)#g4PFMhdn*7G?u}LWjb7%?6%5n+71*7EXB| z5d85ZE4vIqk)y3pd;528%(X*7`0Jzi2zrR_R3O!EMEf*lDdlAj%ZE0_k-kCzv(zh+ z!Ed?aZdJ8xSB(oG;s^-;zDw-$iiQ6=kw7I@jr7^&J$klu4Rh=e4nZEor4;z1tidIsbXAt`pe^gY zMSZ;RpIkhh)*)atuf{xVJQ&IllF>LaEmY*kEyVs_!Hw+?wRTKrB~-oY~HF?>wYP>B2@msGGO~Ib^c%+gOLFLx}Y;G^DfxA zI+%Yd#pjjf;0uxjTbbYTG`?3E|8B80um`s3Uq|+@8dzfz@*k(xA3#F}P^6Ejl?>=y z&kt?%K#i#M$dkxixtGyBdhkZ9IxAdc{khI|D9S{i5gRIj>auXyEJ7X?MwN{%q>y)t zqXRm~&-b1}2ZB3_1d<^4e@QHzWih%pYpd(Nvn?zecJ3`_zu#gnH>=-i@i^}Jw*2$o z?BAkXcxt98e}tv>QR!D0`I(RX^$s{AIZr;&v|r@~0)PKM)^7f1Xx47w;}rOBrPOBI zZzG&P{-8AfFWPbon~tZ)LTZnkIsVt>1XuHp=}8KZLwA$k(A0Zx;2G?oQ=in%91e;* zq#wK#{7)I-m#%YF-e9+r^?g*PNYrO#w^4b<{NO8gMEM^iyfnW^YI^ z`C~4}xRi^Civ?bxHnp?&lAj~g`M%8hYGa(aSoCtvi0YgbS8Lu$bS(J19Prnc!0bq{5P+l!(w7L)VE}fmL5z6~O+4TybMk zGv4VTf*xAk@+@p|7NE0mpH>*KYP_WV74*c+Y!R_Ied&2_(#7&wUwOC*&knhL|Zqb|^lrGEv7+qCIkE);jdN zIR7!vkR^Wre}2k%PF?E+z||lTiw^RH@2?# z4WM1q@(-@HvF52Ym)vusHt)p5SIm9-)*cc9_yE?CIpqg;N_%wInOx4VfPX(n9D(#p z)XoZD^s2Z&>_5dv8N@a(N;}Np<#GFE<7T5&Mx5{ej-8}bP$`yevQ_(! z-FMcb5zf&++b&Ea%KNR_OTKCFZSxy`Q6$ZQFT{w102gs$ z#2vSZeg`3lP?D!{CjWAoqB+dX-sX2A{MN08EF6XbL-C)Aadg5OJ6A>c0PoZd`21#1 zS}#SaZc~PSleNnu-usci^Cm(@oVsG2e%rshQ_9gz;yp%RwHnGqCax6DDiXa3&wb8>gY6m8>bip3y&K4aA$s9t26i?PlYPEJ zsDgmKo+=VvFoSdY%F!BVg& z&H18t+buVs+u?ZpX01{xPA_VQAM4C7rCuCc;j{lW|7SB9uQI+{;Alur3yOW~Q=42c zcG=NRqx)^0Jj{~#dA4-b1BD&iAN|SyFvx`^(e9CZw1|FM>oQQA>yV$$W=4OEuXmVpitp8L$H}f-F zGd-Vf&KE!|R&w@*8P(B}^a-6J>ESivb~Qd%WxZD-F~9)_eoE2#-gbMvZogF?hTZEe zj4Q}##ECxFHhPr-e;j`G+EQ_={YVLPw)QxeTfj7DukpgYYT0gliwI&%xlxs=+*tqV zD3f5qIM$2_Rsl3gV~cFDEF7>a3<0iI%@+)Jr)z7Q4(;W&NlL+mj-S6734E2UYjcMa zIQ`N(AA~{%$c8unH^(HJcdBpTA2Y2?p`nh--)j?STZ#x*cD0%=h=jeglVr@B9Q<8= z56CUjd~e@;7UlrXZ<>_p)fUFVWB*fsqpEisfoe65D^9Vg+XHO3$w2@M@E6xKJEk9< z`b|a$+r$2l(GkDRiw!Q`01Eir2A~q3wh9n8f3tcmJ)c=zZdmM1wh;fQp&t4TW6Y>) zps|{7O1jeFn0+{tmp0F@&h_DOiUL*Ar~2#R+(X67va{;xi1ZMl@2KUn4b_dd##-&B zvcDu?M_U_wQbuf6LXJ{*TE|NyMm;gmBK7ek!_d=DPhGys`UsW4*xuTbDCN|sS?}el z=J5BL7u5mNtak4Zj0mXe5^n$r3Fp^ZCmN_8nnj8NlZfG3mDb#Roa&0F#E6(HxT!c4dhzcx6HgEJ> zc<)|+!hb~Z8p2-dJVY$&#KlP2hPZ3N{P%0=m@E%m62MCxI26J2_>8_JlUoM=W!(k6 zz!SGZS{{eUSXW8f6e0`96W=;^qE_G~tH|%|ReHttDB_-X{ue@rvN#r}jNVkF_A^8i zZC5W4SS**YKghY*y;K%T9u`m9wj3YNe!hMczGtxe^bE-XdbD z(Q&9&w>@Vph)2DO}3%jhh|RyS^PdJ3|!mw7pdTnX*iym^Wec7HiJj!H0M> z9MD=)eUEa+$ij~OUozgxl&UnLST4A|g}!7~|17PRqub_ta3pt7aQY#lCj?d1RDE5g z9M|T3R=gzFOJqSBO8=Q_x#av}*~1@af0>V4?)shVzu3vz`D1CmsUII_GME?PDVa=I zTwdQkHA>M-N%c=Ne(Z?4@|Y@cnHY+}TV`?dBh2AwFRZ%X`;bb_=a|nE{OAdE)f3~u z%Mv<^%s($bUqxkD~@`+(b!4QkAw>WiWoRYNcosBBl7m6mRgitZmR6lJZ zGTPK}wLFVekb#edq)4eCD6Id8lBf_58<>6f6)jo{LHlx_aWOp zIvI8P;~F=+bN(jv@TgLmFZ$EKb3x;WY?z4r_qKxkpmY31a8865W0HE}{1DDcvE zl2`dt(mrzJY;#5{Dhb6r@cvGY`SC(*fag5#gTuqCGM0|8#g)YFtqj$N@Ch|P&*!HE zBwsFamy92`S|;7BdOXOHK;3=Ee-z=D)t{>Dq)|hXr`~F3Kc_ayb}f+6^b_YLb!ln! zLHmb&&OLXJc>n$rcuC8Q+45J7pBQw*JRhP9mW{3tzj$UyAQ4R7DM>~bJjyQPFX zFXrL{7mYSG?Gl%ylbo3jdPCgh(H3IU+DzAG{0331m&J!HyCc3FskSu6=tyOLmOE2J zO>7kWSq0yOeyNt6zO6!kNV5Lq0s+0@zrhkgrEpWy_6y3#YEjgxahVcQJeE{dlRjH` zjk38+Y#cP|AX#ac8S9K6S~1=l*Mm>p^?rBq(v=}_`J)$dyL111!=E~S8pwt<#P9D& zIy)qCvrg`)u9JR=^;~v9|ND~LDv|Ro-UeCjtNG zQC%Q5?_73LOqh7(HX9`A(OROgjQc{q5eTPl!llrGZ7BO+i2WA$26{*qJ$JX}IB5l% zTm3fc&n3)ISBRD*^S1<{-evFTAu4jO6G3U*(4!8v!a7;M|5%b#P}ht{^`n+ix0Yom zdXg)kQdaiwR-IJ)2ITihHy5@STIO5$nii4zMy;OQU$%TbFTHE~OSNxH-cKc|20q4} zm1dO}d*JZmVm0K?+dh|Fqa(Dpd}xzU)V8u55}@yH+0lPRrLkRvO}#HIAnw7ag3ggE z0wk|+->SwXa*GnvwIVx{1HTeN9!m6fT3b zsgzB0CHYHtnU%(3+}o_{jee!8l{cojs_Bpy5c)0d*e$d!cIpkEjSlSrI`@P;0MS-e zdWEZ%law}BE1miX^Rl+RUE~FGn+7T8!Pvk7PVU5B@#jg`R8Mcy_V7*73*S$7HQim3KApxsqTsnxq`ci z-7d7`oYmiSPz!@$fRDa z;D1G@i=LlHy9=ED6&=%N&5Nz!WZ#7S-C*%vI@g~RPT~{Q<3*FBdT$5@on^KR=V1

          bnVTFOv2T?6xU(cLB5{_Ng@Kst3_{9z?jL^G`_6{aL2v zsvdc3LOGQj0u`6q{d8~N_4A&n_4E7^P}UmHu$=u+H!%G7Q!*EM+0vY|wHc$*1|O`c zSs9A&O{ zqgx*(-Ea@1@3E8~VD%_FnMnIeL!N% zzJN}1Kp*v>+JqY}PpXvO;=tu%eHRW22t+BKIdgkG~Xt<_^AX zUj4HC$!Y7;j|y5OW|+v4tY?%)55+&E<8kONNz@mWL!2aHYAlrG>=I7qUqy^PdFo_1E7V4T+=Nq1H{C!T-)PlButdQbn? zV`ozAYEXNkt;X6j71NTWGP;--_kNH*{#$~qMPeC~L3Z(8C}z$r9SMyc{9c|aXnspB zb@4K87_OXmkw-G_<^Z3`LoWBv4o`$5Btt!L7LJFfln`8P&2KC^Ch9Zbt$`TMZg3rYbC8t&~KcQ=A+4F~&J_wg zI$_OBr)hts>cIuS?vzpqSBaX7Ew9;P+O|UnS9#Z-2Yia_4BSn{UFY5SVC2_$Ayg*E z8?cZc`D(PBGNS{*NBfJ$!LSe}$nQ&`MYoZwg)i1cXS7R2jUF3&*0C1LE*Cm9n?#k- zK`xDrdi;JcdgZjV*MYj9xq3(DAngb+ZuX3Q$lY403)p_N62BhqHHcECdqq21zY}Xj z00@nvM_0u4cnBJu#4f7iL-yCs%C*PCIMEWZyU)vAL3mFNB+d8A)!qrbToAt;uRfDb z7RWcwttW+e4ws%gBa-=pRQPYtQ0>ccP?vPj_OCwoI$l6W>{9E&je%lx#VIe=%^W#SLY@?1{o?!g9`sjDkb^MQ{ zOS^fTy;X4A<>9ayo#;a~<~Cz1lGJ;J<5&ErYBd~d<#=`H|LW{BjwYDDeqm8U5F)bE zy*OVV_mIB$?7t2%?YkFVZ8>F9rOFilT#B19y+5+IO#b1})eA@))X?zxhtj`-#>7DJ z(&=`#c7Vy22VZLc7MN7>gZHqhbpMl%X_c&S87QDMpNwfg{gYjFVU0cDDRiv<&r|r5 zU!PPaZ<7_HBCXmzdY&pICLBEVGyQJ+r374$hZ-dF^S$!q>$oAy5h$b5kK}-4Jc^N@ zZ{t?sqeJo%%g&YHbqDWI?dVw*p=rMSFQ>r6+Emv%uQvw@`3Ix+LgOzF91!Wgep%zr z|0MJ$mCFI-GU{u4lDdJ1;rTO%)-xY!@a;Ht*1k<8m>VsO92Hc{sesN&dDR~_5ut2; z&{Y+96PWPscNy=IJ}4`P;J&R`106VyUTOdJ5i~kg)#cE~v;?K7px*f8!55b4lRwl( zj($c44^Y$QP_Iu8gm%B1knVi;JFx#q=>C3eNbLKV#Aa-8uw2-Iv+LiX8mw)`;wNWZ zKlf0jRTZAxqP@e{7ky~Or_g3pcVXDZv%$KB#F_ze#-BpHn#4?eO9-WZ00jx={!Wj7 zfxjYcAn_xY(yZq&}DZz<|u?RT_m4 z#)}7fcRZs54r23fpx+^njFj zcrcq6_>e5O`G*tU5pE4pp8R?WI}wDu@b$1Bso?rm=+J8d!%C)+JN7zX#}FTfbGD<@ zd*5>vlK21I|7Qk%0nN1?*6eJ{tQNg;YV@?;Ot5Z=ybhM{7C!?KjfYfnZup+V5Ii$- zp)O?}_Yj&DfU?p*bB&vYixxDO_fGgB?C+;u0n{r`nA6qiKCrr6C%4X`h@1+8`=|N9 zr&^z4R*x;s-zEFg=jlRaW|l(ESrQvq_Y`QQv{Tm#OK-;VhgmltVmU1-R=``v!}jlq zpxBb!tu(uy8mC{>;MU*IfrDOda9#`f163EW_3NH56|OAat9~@d;eb{H z+hda~7RddgWocvNx9E?vOv0(IOn<2d%kq8k|NS|qOrmZFezoomI>6Xe z`ET!<$>8Bw3#@n@`KN~`mR>T37LDkB9kRTC#6lBB) zls&lZ%pfUN!VfBRa=B&~GPsEQ(Td_j@X?^k`#Y4;h7Nf|%VFA;`NEra{(huxSCO+N z7er`*%p>+s6eG*2b%Q^y-6GF@=kn%#mp>);du#sX>m)kbT^fJSO87ulK&S0!Q^g{# zB3zwf7;Lc;+QWVDIZm9&8+R!JXePW^Jl#tR*NF5N#HNpe@1gcw(CX$>zKaIAaas3O zp-)(}+C=7d(2z%@kAvZw=phgn&Y362ymkoQf@?=#Fw0+|1vAh~q|Q5=fvbsS3GSj8 zCxMEv`ct-l>8gpLZ_nRt+YGvf`WT!YMLv@xx?c18~RBo%*lF`N67U z%jw^5f_^JFWjPNldT*-@4VPxbMj2ck=e8YBN&8R%-RnuAJ#AfwxCfd;VZXY^J_vL= zrH^Wm94M!7$0Z^K6=?oDf9|^vOJu$NQ0}IlXEL{-^yLtvf z_wwm~#`N~u$+F4X`bg3_M$jiH_v8-sG%;+3Oiq54uSABH!WFN0>G6;xe)hVJXa?lq zX(9cCGs7pXVV!nO(z(dw>)B*K^;ISZdisnCsyvaKX0d43Jaf<%58-4oufHXwx-rTF6}> z$CvUEO>a5+JD2lSGp6}|X8ik>8^1~ZnB=`oWbW!~g%$pKTJ3QYwC@ampB-QZ1!f#J z{>AJqUATvZ&IePsf2ZlEMgNti+s5{Trl3=)F!pn|P;oEDJY)&W$0YZ%6-B)dQm-Vt z6e-Xui0p7B29bpPRE+LLzs?~EBDI+4c>4$H$J2dvum4rqna4x9{(XEHC3$R3DOuBr zex)Q^))`x+vYnEWOq*pahmzee=-8!mRF-6SpYEKF<8&5ifMn4vg#pdYjCj}n-Ti^+`May;2kM-<#qGbe(TP3K|j#3{&A zNhQDMpi9vI^p*=8=p7@k#UPLDUc+Nd0N@mPr34r}HMu+tG1T)Ra`>-8h^s+U_Xzu8 zoQ`R&MkA3IjR_jcPZIOwCOqw1-$;Vy9g5wH&KAL4(3!^}&IFOs?QfSp29gEi9P*gb zU+!bAbD8+Dt{LD|%hRj2_34bFsS#=^u=ngi zf1ooC8h@{jp<>|bnZYXtsU$SxUeH6(Yn*OqlXx=Mh(mC!?$ds1cy$D5LqD07a&VFr z&-xQ~0LXid@Dr)r`yA-D*UkUXyrbVo|8D^RDB#=GKmci31jQE2hkW#GoRyH6_)vtjx(H$Istp(Ty5$+Q(gj-L2tC(AX9{c-YS}Kiev++; ztXQ`E9KdA0V?UjV77u_Dq95EUW20K5s=-^&i~shML|lrpE6I%&91*UoOLn=;6a_f` zI9)rDBKO(EfC8pM%adih3v#vCAlE(vM@0rQ@2M5AzN7);V1kvwDsfpK{`i@r<)62V z?W$?cmIh2Zu)#!+E$#cC(W)HN^^*}^!Q$6(YmC2L7LX`VdE5eAbZk>dtOo3Frzs;} z<7Sf~$G4>bK;7Cs1186UT#Mab6*-l9x}^7@OQ6Ix-(~q1WkX-!yzW+i+QqH72b8M} z%Q{{=+ZMpbh-j~*C)MC?B(C+R3o?hvnPEbP*3Xm&ZUsIA^NKu9vO8`g{0vz}!Q26U z;)4GoIRYbwyUP$_oYe>yp>i3#7Wd|}%YSM@t-HHbZz8gSYPaY3*0uUWp{Olfc2V&I zyGZ%8^MQc32^lXD!*H_4@fSb$Q?g*G7<7397NB87FK=S%bv0kfJIRU@lHYUdbcvV) zP5=5^{auHG#zN^A;WaS>Y+s~A@tX7+7xymDZhVgaVgNRXoRy0O(sjF}EB*Y{TV@C|^JBZ|x(4 z9>C+hA6-%EoZ7xtwiGIDw@df=i2EjFu@PnuS8ozXhlr?e!- zJ!YY{1Kwg(E|PI_p~{){1u!tkK7)>{SSo$Q`&IcgO1xtDvr?lQh{01I^C#B-ZX#tIj_Xs5ybAma`l@?jb5$L#Lfc1HC-Esi(D;+n+$``D!k_H$efGcI+3Sk{Va5UG1 z*5`=r^F+<3XudxMvK_-y$?@};L0{nC0^kg{fvg18Upi~`!NOyei8H4i`!?#h0^BYQ z^)v!G@*)Bg2BB;tYM6Cy_xbe-tq0m630Bp++H`Z%ITwiuQ5SmBe3b*eS`0B=+gPuJ zvFe?#VJnf`s&Ds=_qCfoo9SU)IIyw^^)6O#D}luv`3alOCm~bJF%A@>`ZY5cS^nrU zTuL^rmEo0o~=m ze})-0XbXuwg2}Ivhy1fHYJ94?r-IX(qg`g0|7jaf?!Qd?l8aryk@2+6Ov>m4@#_b}Ptbrwb@tfWhc~VN#NZ1;s z!o`o;o*Tr*WRwRK432OPu-1k($dAIjgWI(G#25)RQ7$fOTPRrD;PikqReSXHAGCo& z-oz{92`0ZTA$1F;PgQN~vVo_XAO@2_ppm8FoHij zi|+DSc+dvYvCjPb34*}@jflUm!>7b0Hh!L_n>XHVdv^fpGEByOdP-Ki2J?k6@m(8O z-1!jmo+jqfQ_5{{QJv|C;sw9`HFlb;bHH0@j3qN7-_x^3_Nk!Fb0X>uat)kiTljwi zb=Q?Qn%G^M-(dD+$8BiQ>%y+CU3a8r zIu{FZzT@K*5rEjnW=BPh=-hOY#$Gq$2^EvZ%>`}VVaJ{IhC}Tyo4*KojKKDxUKP)w zaQx>2A@51Tb55f z2FjEUi;{%zAvdN|FQ_Zjww5|b$0*hR6LB3Pc&7=QNh?9AIS5J@>Zr^$0S$(>P0lro z?^o!(}DbtmvvLsw3uFzkru z9_{ait=~fbL)WUM(rST6shyFK8m%jU2B;R(t_Ix`QT7ew*;&}<+mMk0^ zlrO=>9rk8o&-~e;<|AypEZ8@#AQ&_>mrabr3e!9%Jj8xp;b|`g{PYh9hAaVEHW+en ztBjNGToqei2GaYRPi)~ueEByuUxy>T93`f;R6&5IO%$ArDi{L+S|o?4Oo}Fr1tBFA zy#ZDjFNMOK zI&FDF!RX3gx9L&oYrf|)p-S7xFEiF$)$D@=TQUM4*lEUs_Rdo}ckQ?wbSk!si~v&< zn5CGm?8C!bAje<%k|6e_!GGMA?y6MJEwSNdiKatyw@AD@G|bYd=Rgg@=cd)vN) zy_0yxW!jxAfqE)$#R$IzF>`QgMcQmcNf7Ar3%gZm@iGu0OB89D%sM&2g**N+)Jz3n zBHG7+r5e49vGl&_Jm?8mOmr$D#P~%67aW3q%p8msnjboUE}57+5wd-}7oh64M`SN$ zrmnwy&BRs870Wj2OWH0BMeGz-&mAkaCsy0p6I zrG$2-Uevoh?a|Kyw_|~-Y)|g+ z?58}W_!*S!%MVNMcc=+pTv-+M9F++_$#llM(Q~?m0BW|gld(Mz)O=2?&fegzd5q3U zC>ghJeccd~`($v}=nQ);4h3xxS!E3m>1>XI!cZWfqyNHg4yTC?Tz!+`3US^EB;Y+q z*Ol8PCrlCP>6gFid`;#pjoJc(+7`cxQsZ>x0d#(Ss`>Zgy<*-XFv49t}vwq0YLpT-}F$&7a5 z+B^kBJ13C+iG^xqjh)`J@inMiBN@m9<h=8%Cp~W?kxTT0M2G#01J52Sxw@Z*)Sn z#5_T3alIC$CMrv)$k;bRE^E#v4D2)^w8no;%QB&~4EZKB35V|QCIoaAOSN!9z&lE{ zNi!m93@6c3)~;49+41AyknLb4yWf_?;ZCIS5FMx*0RC@iXM0=Y=GQ%HcrXXZ5(>r- z(9-w%D?yjMk$JQ|1lxvZK#j)-fE($y{l~c52AqW_z5-TP5fo3Gt4XMamO5Ll(B#16 z0R)s-0fg_7#)#sN`Ez-DLX?dQX^G4^yi4^ZYfHlc1<1Pa?_xV+lrPNq+X76Hn_5yqU-BMDfV^X)kV<87bxe|E4M0$;8 zxI6z(e-UD)>^*0fsgpDgH?#cM_S4V&aFnVbgewO%m9L$iKm30j^@qQ>sx-M1y|nOT z4F8w>yaVOmdm3^rRihiGZFsl1& diff --git a/performance/oss-performance-setup.yaml b/performance/oss-performance-setup.yaml deleted file mode 100644 index 46eafb238b6..00000000000 --- a/performance/oss-performance-setup.yaml +++ /dev/null @@ -1,79 +0,0 @@ -cassandra_version: cassandra-3.11.4 -cassandra_install_type: git -# Driver branch to use -driver_oss_branch: 4.x -# Driver dse branch to use -driver_dse_branch: 4.x -# Driver version identifier (used as part of graphite prefix) -driver_version: 4.8.0 -# Driver examples branch to use -driver_examples_branch: java-driver-4.x -# How long to run test for -duration: 2d -# The ip of the observer node from graphite-setup.yaml step -graphite_host: {provide-graphite-host-ip} - ---- - -ensemble: - server: - node.count: 3 - provisioner: - name: ctool - properties: - mark_for_reuse: true - cloud.provider: openstack - cloud.tenant: performance - cloud.instance.type: ms1.small - cluster_ttl: 2d - configuration_manager: - - name: ctool - properties: - java.version: openjdk8 - product.type: cassandra - product.install.type: {{cassandra_install_type}} - product.version: {{cassandra_version}} - cassandra.yaml: - hinted_handoff_enabled: false - client: - node.count: 1 - provisioner: - name: ctool - properties: - mark_for_reuse: true - cluster_ttl: 2d - cloud.provider: openstack - cloud.tenant: performance - cloud.instance.type: ms1.small - configuration_manager: - - name: ctool - properties: - java.version: openjdk8 - install.maven: true - - name: java_driver - properties: - oss.git.repository: git@github.com:datastax/java-driver.git - oss.git.branch: {{driver_oss_branch}} - dse.git.branch: {{driver_dse_branch}} - type: FOUR_X_OSS - - name: java_driver_duration_test - properties: - git.branch: {{driver_examples_branch}} -workload: - phases: - - run-endurance: - module: java_driver_duration_test - properties: - duration: {{duration}} - is.four: true - graphite.host: {{graphite_host}} - graphite.prefix: endurance-test-java-{{driver_version}}-OSS-{{cassandra_version}} - kill_nodes: - module: killnode_rhino - properties: - target.strategy: whitelist - target.number_of_nodes: 1 - target.selector: "*:*" - repeat.delay: 120 - repeat.iterations: 0 - graceful: true From 146edde75a567c08e3026c34b719f358b776d6bd Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 12 Jan 2021 17:25:57 +0100 Subject: [PATCH 636/979] Exclude ByteOrderedTokenIT when running C* 4.0-beta4+ (CASSANDRA-13701) --- .../oss/driver/core/metadata/ByteOrderedTokenIT.java | 6 ++++++ .../oss/driver/core/metadata/ByteOrderedTokenVnodesIT.java | 6 ++++++ 2 files changed, 12 insertions(+) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/ByteOrderedTokenIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/ByteOrderedTokenIT.java index dbc5dc06c2a..63473704c8a 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/ByteOrderedTokenIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/ByteOrderedTokenIT.java @@ -17,6 +17,7 @@ import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.testinfra.CassandraRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; @@ -27,6 +28,11 @@ import org.junit.rules.RuleChain; import org.junit.rules.TestRule; +@CassandraRequirement( + max = "4.0-beta4", + description = + "Token allocation is not compatible with this partitioner, " + + "but is enabled by default in C* 4.0 (see CASSANDRA-7032 and CASSANDRA-13701)") public class ByteOrderedTokenIT extends TokenITBase { private static final CustomCcmRule CCM_RULE = diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/ByteOrderedTokenVnodesIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/ByteOrderedTokenVnodesIT.java index 76e9e08fff4..62fd20719dd 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/ByteOrderedTokenVnodesIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/ByteOrderedTokenVnodesIT.java @@ -17,6 +17,7 @@ import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.testinfra.CassandraRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; @@ -27,6 +28,11 @@ import org.junit.rules.RuleChain; import org.junit.rules.TestRule; +@CassandraRequirement( + max = "4.0-beta4", + description = + "Token allocation is not compatible with this partitioner, " + + "but is enabled by default in C* 4.0 (see CASSANDRA-7032 and CASSANDRA-13701)") public class ByteOrderedTokenVnodesIT extends TokenITBase { private static final CustomCcmRule CCM_RULE = From a39e4895e3d600e549411eee4bd3b8392ba6e403 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 12 Jan 2021 17:27:23 +0100 Subject: [PATCH 637/979] Temporarily suspend vnodes tests when running C* 4.0-beta4+ (CASSANDRA-16364) --- .../oss/driver/core/metadata/Murmur3TokenVnodesIT.java | 5 +++++ .../oss/driver/core/metadata/RandomTokenVnodesIT.java | 5 +++++ 2 files changed, 10 insertions(+) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/Murmur3TokenVnodesIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/Murmur3TokenVnodesIT.java index 54bb1d0db26..28c219e8a91 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/Murmur3TokenVnodesIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/Murmur3TokenVnodesIT.java @@ -17,6 +17,7 @@ import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.testinfra.CassandraRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; @@ -27,6 +28,10 @@ import org.junit.rules.RuleChain; import org.junit.rules.TestRule; +@CassandraRequirement( + max = "4.0-beta4", + // TODO Re-enable when CASSANDRA-16364 is fixed + description = "TODO Re-enable when CASSANDRA-16364 is fixed") public class Murmur3TokenVnodesIT extends TokenITBase { private static final CustomCcmRule CCM_RULE = diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/RandomTokenVnodesIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/RandomTokenVnodesIT.java index 924ed515169..08b226bdc51 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/RandomTokenVnodesIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/RandomTokenVnodesIT.java @@ -17,6 +17,7 @@ import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.testinfra.CassandraRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; @@ -27,6 +28,10 @@ import org.junit.rules.RuleChain; import org.junit.rules.TestRule; +@CassandraRequirement( + max = "4.0-beta4", + // TODO Re-enable when CASSANDRA-16364 is fixed + description = "TODO Re-enable when CASSANDRA-16364 is fixed") public class RandomTokenVnodesIT extends TokenITBase { private static final CustomCcmRule CCM_RULE = From 5d037ccdd42732ae7e1145011bfe45027f21142f Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Wed, 13 Jan 2021 15:12:45 +0100 Subject: [PATCH 638/979] Remove dead link in DseGssApiAuthProvider --- .../dse/driver/internal/core/auth/DseGssApiAuthProvider.java | 4 ---- 1 file changed, 4 deletions(-) diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/auth/DseGssApiAuthProvider.java b/core/src/main/java/com/datastax/dse/driver/internal/core/auth/DseGssApiAuthProvider.java index be1b64fad7c..10501af8c01 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/auth/DseGssApiAuthProvider.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/auth/DseGssApiAuthProvider.java @@ -150,10 +150,6 @@ * } * } * - * - * @see Authenticating - * a DSE cluster with Kerberos */ @ThreadSafe public class DseGssApiAuthProvider extends DseGssApiAuthProviderBase { From 2e896c1c8cadbdd677590cdf2706fc55a423a0da Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Wed, 13 Jan 2021 17:45:10 +0100 Subject: [PATCH 639/979] Include Tinkerpop dependencies when generating distribution javadocs (JAVA-2907 follow-up) --- distribution/pom.xml | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/distribution/pom.xml b/distribution/pom.xml index dd1e4e8c7b4..70acdb59f06 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -140,6 +140,16 @@ snappy-java ${snappy.version} + + org.apache.tinkerpop + gremlin-core + ${tinkerpop.version} + + + org.apache.tinkerpop + tinkergraph-gremlin + ${tinkerpop.version} + From 889fe572e3720cfd4bcff06ce5cac3274243d110 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Wed, 13 Jan 2021 17:59:56 +0100 Subject: [PATCH 640/979] [maven-release-plugin] prepare release 4.10.0 --- bom/pom.xml | 18 +++++++++--------- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- metrics/micrometer/pom.xml | 2 +- metrics/microprofile/pom.xml | 2 +- osgi-tests/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 14 files changed, 23 insertions(+), 23 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index 7e2aa86d4a5..308a76f75e3 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.10.0-SNAPSHOT + 4.10.0 java-driver-bom pom @@ -31,42 +31,42 @@ com.datastax.oss java-driver-core - 4.10.0-SNAPSHOT + 4.10.0 com.datastax.oss java-driver-core-shaded - 4.10.0-SNAPSHOT + 4.10.0 com.datastax.oss java-driver-mapper-processor - 4.10.0-SNAPSHOT + 4.10.0 com.datastax.oss java-driver-mapper-runtime - 4.10.0-SNAPSHOT + 4.10.0 com.datastax.oss java-driver-query-builder - 4.10.0-SNAPSHOT + 4.10.0 com.datastax.oss java-driver-test-infra - 4.10.0-SNAPSHOT + 4.10.0 com.datastax.oss java-driver-metrics-micrometer - 4.10.0-SNAPSHOT + 4.10.0 com.datastax.oss java-driver-metrics-microprofile - 4.10.0-SNAPSHOT + 4.10.0 com.datastax.oss diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index ba8101ea31f..df8600563d7 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.10.0-SNAPSHOT + 4.10.0 java-driver-core-shaded DataStax Java driver for Apache Cassandra(R) - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index 40090ea75a9..333d92bdc6f 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.10.0-SNAPSHOT + 4.10.0 java-driver-core bundle diff --git a/distribution/pom.xml b/distribution/pom.xml index 70acdb59f06..727c3c15a01 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.10.0-SNAPSHOT + 4.10.0 java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index 3255b92e6ae..3b24e714472 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.10.0-SNAPSHOT + 4.10.0 java-driver-examples DataStax Java driver for Apache Cassandra(R) - examples. diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 553199ea830..a6fb63f7093 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.10.0-SNAPSHOT + 4.10.0 java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 9cbd8fe224c..ecb6b308967 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.10.0-SNAPSHOT + 4.10.0 java-driver-mapper-processor DataStax Java driver for Apache Cassandra(R) - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index 0c881a55311..5574d4694b8 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.10.0-SNAPSHOT + 4.10.0 java-driver-mapper-runtime bundle diff --git a/metrics/micrometer/pom.xml b/metrics/micrometer/pom.xml index 5dde458d947..da70e6f654e 100644 --- a/metrics/micrometer/pom.xml +++ b/metrics/micrometer/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.10.0-SNAPSHOT + 4.10.0 ../../ java-driver-metrics-micrometer diff --git a/metrics/microprofile/pom.xml b/metrics/microprofile/pom.xml index 3559b2af797..09d29e0c631 100644 --- a/metrics/microprofile/pom.xml +++ b/metrics/microprofile/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.10.0-SNAPSHOT + 4.10.0 ../../ java-driver-metrics-microprofile diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml index cfbc99e7368..efd69a623e5 100644 --- a/osgi-tests/pom.xml +++ b/osgi-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.10.0-SNAPSHOT + 4.10.0 java-driver-osgi-tests jar diff --git a/pom.xml b/pom.xml index 6d5d7f4c3d6..a20234999ff 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ 4.0.0 com.datastax.oss java-driver-parent - 4.10.0-SNAPSHOT + 4.10.0 pom DataStax Java driver for Apache Cassandra(R) A driver for Apache Cassandra(R) 2.1+ that works exclusively with the Cassandra Query Language version 3 (CQL3) and Cassandra's native protocol versions 3 and above. @@ -951,7 +951,7 @@ height="0" width="0" style="display:none;visibility:hidden"> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - HEAD + 4.10.0 diff --git a/query-builder/pom.xml b/query-builder/pom.xml index ddf668c530a..153f9db9bd3 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.10.0-SNAPSHOT + 4.10.0 java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index bc259d4ac83..a91fdc388c4 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.10.0-SNAPSHOT + 4.10.0 java-driver-test-infra bundle From 2e300fe1cc822d5234c69ba539c4e05b828b85b9 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Wed, 13 Jan 2021 18:00:11 +0100 Subject: [PATCH 641/979] [maven-release-plugin] prepare for next development iteration --- bom/pom.xml | 18 +++++++++--------- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- metrics/micrometer/pom.xml | 2 +- metrics/microprofile/pom.xml | 2 +- osgi-tests/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 14 files changed, 23 insertions(+), 23 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index 308a76f75e3..1527d39f924 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.10.0 + 4.11.0-SNAPSHOT java-driver-bom pom @@ -31,42 +31,42 @@ com.datastax.oss java-driver-core - 4.10.0 + 4.11.0-SNAPSHOT com.datastax.oss java-driver-core-shaded - 4.10.0 + 4.11.0-SNAPSHOT com.datastax.oss java-driver-mapper-processor - 4.10.0 + 4.11.0-SNAPSHOT com.datastax.oss java-driver-mapper-runtime - 4.10.0 + 4.11.0-SNAPSHOT com.datastax.oss java-driver-query-builder - 4.10.0 + 4.11.0-SNAPSHOT com.datastax.oss java-driver-test-infra - 4.10.0 + 4.11.0-SNAPSHOT com.datastax.oss java-driver-metrics-micrometer - 4.10.0 + 4.11.0-SNAPSHOT com.datastax.oss java-driver-metrics-microprofile - 4.10.0 + 4.11.0-SNAPSHOT com.datastax.oss diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index df8600563d7..dd07c3c8be8 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.10.0 + 4.11.0-SNAPSHOT java-driver-core-shaded DataStax Java driver for Apache Cassandra(R) - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index 333d92bdc6f..5e545545541 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.10.0 + 4.11.0-SNAPSHOT java-driver-core bundle diff --git a/distribution/pom.xml b/distribution/pom.xml index 727c3c15a01..65de3f5bbb2 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.10.0 + 4.11.0-SNAPSHOT java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index 3b24e714472..a8aa460b95d 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.10.0 + 4.11.0-SNAPSHOT java-driver-examples DataStax Java driver for Apache Cassandra(R) - examples. diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index a6fb63f7093..5dbcaf6996c 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.10.0 + 4.11.0-SNAPSHOT java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index ecb6b308967..5de6def1d9d 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.10.0 + 4.11.0-SNAPSHOT java-driver-mapper-processor DataStax Java driver for Apache Cassandra(R) - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index 5574d4694b8..f65a8d9f584 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.10.0 + 4.11.0-SNAPSHOT java-driver-mapper-runtime bundle diff --git a/metrics/micrometer/pom.xml b/metrics/micrometer/pom.xml index da70e6f654e..8b0134f3307 100644 --- a/metrics/micrometer/pom.xml +++ b/metrics/micrometer/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.10.0 + 4.11.0-SNAPSHOT ../../ java-driver-metrics-micrometer diff --git a/metrics/microprofile/pom.xml b/metrics/microprofile/pom.xml index 09d29e0c631..138deb6f11a 100644 --- a/metrics/microprofile/pom.xml +++ b/metrics/microprofile/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.10.0 + 4.11.0-SNAPSHOT ../../ java-driver-metrics-microprofile diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml index efd69a623e5..1fb3438eff1 100644 --- a/osgi-tests/pom.xml +++ b/osgi-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.10.0 + 4.11.0-SNAPSHOT java-driver-osgi-tests jar diff --git a/pom.xml b/pom.xml index a20234999ff..add6f984b6e 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ 4.0.0 com.datastax.oss java-driver-parent - 4.10.0 + 4.11.0-SNAPSHOT pom DataStax Java driver for Apache Cassandra(R) A driver for Apache Cassandra(R) 2.1+ that works exclusively with the Cassandra Query Language version 3 (CQL3) and Cassandra's native protocol versions 3 and above. @@ -951,7 +951,7 @@ height="0" width="0" style="display:none;visibility:hidden"> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - 4.10.0 + HEAD diff --git a/query-builder/pom.xml b/query-builder/pom.xml index 153f9db9bd3..5e0e4cf0a89 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.10.0 + 4.11.0-SNAPSHOT java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index a91fdc388c4..4bd0d80bfe2 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.10.0 + 4.11.0-SNAPSHOT java-driver-test-infra bundle From e4c4569fa56a35337fd478b705989182f7de5634 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Wed, 13 Jan 2021 16:43:31 +0100 Subject: [PATCH 642/979] Update version in docs --- README.md | 4 +- changelog/README.md | 2 +- manual/case_sensitivity/README.md | 10 +-- manual/core/README.md | 26 +++---- manual/core/address_resolution/README.md | 2 +- manual/core/async/README.md | 2 +- manual/core/authentication/README.md | 16 ++-- manual/core/bom/README.md | 4 +- manual/core/configuration/README.md | 20 ++--- manual/core/control_connection/README.md | 2 +- manual/core/custom_codecs/README.md | 74 +++++++++---------- manual/core/detachable_types/README.md | 14 ++-- manual/core/dse/geotypes/README.md | 6 +- manual/core/dse/graph/README.md | 4 +- manual/core/dse/graph/fluent/README.md | 4 +- .../core/dse/graph/fluent/explicit/README.md | 12 +-- manual/core/dse/graph/results/README.md | 6 +- manual/core/dse/graph/script/README.md | 6 +- manual/core/idempotence/README.md | 4 +- manual/core/integration/README.md | 6 +- manual/core/load_balancing/README.md | 10 +-- manual/core/metadata/README.md | 6 +- manual/core/metadata/node/README.md | 28 +++---- manual/core/metadata/schema/README.md | 20 ++--- manual/core/metadata/token/README.md | 4 +- manual/core/native_protocol/README.md | 6 +- manual/core/non_blocking/README.md | 44 +++++------ manual/core/paging/README.md | 12 +-- manual/core/performance/README.md | 10 +-- manual/core/pooling/README.md | 2 +- manual/core/query_timestamps/README.md | 4 +- manual/core/reactive/README.md | 24 +++--- manual/core/reconnection/README.md | 8 +- manual/core/request_tracker/README.md | 4 +- manual/core/speculative_execution/README.md | 2 +- manual/core/ssl/README.md | 6 +- manual/core/statements/README.md | 8 +- manual/core/statements/batch/README.md | 6 +- .../statements/per_query_keyspace/README.md | 2 +- manual/core/statements/prepared/README.md | 8 +- manual/core/statements/simple/README.md | 6 +- manual/core/temporal_types/README.md | 8 +- manual/core/throttling/README.md | 6 +- manual/core/tracing/README.md | 12 +-- manual/core/tuples/README.md | 4 +- manual/core/udts/README.md | 4 +- manual/developer/common/concurrency/README.md | 4 +- manual/mapper/config/kotlin/README.md | 2 +- manual/mapper/config/record/README.md | 2 +- manual/mapper/config/scala/README.md | 2 +- manual/mapper/daos/README.md | 8 +- manual/mapper/daos/custom_types/README.md | 10 +-- manual/mapper/daos/delete/README.md | 18 ++--- manual/mapper/daos/getentity/README.md | 16 ++-- manual/mapper/daos/increment/README.md | 12 +-- manual/mapper/daos/insert/README.md | 14 ++-- manual/mapper/daos/null_saving/README.md | 10 +-- manual/mapper/daos/query/README.md | 22 +++--- manual/mapper/daos/queryprovider/README.md | 16 ++-- manual/mapper/daos/select/README.md | 26 +++---- manual/mapper/daos/setentity/README.md | 10 +-- .../daos/statement_attributes/README.md | 2 +- manual/mapper/daos/update/README.md | 12 +-- manual/mapper/entities/README.md | 36 ++++----- manual/mapper/mapper/README.md | 10 +-- manual/osgi/README.md | 6 +- manual/query_builder/README.md | 10 +-- manual/query_builder/condition/README.md | 2 +- manual/query_builder/delete/README.md | 4 +- manual/query_builder/insert/README.md | 2 +- manual/query_builder/relation/README.md | 4 +- manual/query_builder/schema/README.md | 2 +- .../query_builder/schema/aggregate/README.md | 2 +- .../query_builder/schema/function/README.md | 2 +- manual/query_builder/schema/index/README.md | 2 +- .../query_builder/schema/keyspace/README.md | 2 +- .../schema/materialized_view/README.md | 4 +- manual/query_builder/schema/table/README.md | 6 +- manual/query_builder/schema/type/README.md | 2 +- manual/query_builder/select/README.md | 4 +- manual/query_builder/term/README.md | 4 +- manual/query_builder/truncate/README.md | 2 +- manual/query_builder/update/README.md | 4 +- upgrade_guide/README.md | 6 +- 84 files changed, 390 insertions(+), 390 deletions(-) diff --git a/README.md b/README.md index fc8427f7f73..5b7c323ec85 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@ *If you're reading this on github.com, please note that this is the readme for the development version and that some features described here might not yet have been released. You can find the documentation for latest version through [DataStax Docs] or via the release tags, e.g. -[4.9.0](https://github.com/datastax/java-driver/tree/4.9.0).* +[4.10.0](https://github.com/datastax/java-driver/tree/4.10.0).* A modern, feature-rich and highly tunable Java client library for [Apache Cassandra®] \(2.1+) and [DataStax Enterprise] \(4.7+), and [DataStax Astra], using exclusively Cassandra's binary protocol @@ -82,7 +82,7 @@ See the [upgrade guide](upgrade_guide/) for details. * [Changelog] * [FAQ] -[API docs]: https://docs.datastax.com/en/drivers/java/4.9 +[API docs]: https://docs.datastax.com/en/drivers/java/4.10 [JIRA]: https://datastax-oss.atlassian.net/browse/JAVA [Mailing list]: https://groups.google.com/a/lists.datastax.com/forum/#!forum/java-driver-user [@dsJavaDriver]: https://twitter.com/dsJavaDriver diff --git a/changelog/README.md b/changelog/README.md index d54e6d8713a..5dd62b63f38 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -2,7 +2,7 @@ -### 4.10.0 (in progress) +### 4.10.0 - [improvement] JAVA-2907: Switch Tinkerpop to an optional dependency - [improvement] JAVA-2904: Upgrade Jackson to 2.12.0 and Tinkerpop to 3.4.9 diff --git a/manual/case_sensitivity/README.md b/manual/case_sensitivity/README.md index a2a16ebfdbd..576ee41823d 100644 --- a/manual/case_sensitivity/README.md +++ b/manual/case_sensitivity/README.md @@ -106,11 +106,11 @@ For "consuming" methods, string overloads are also provided for convenience, for * in other cases, the string is always assumed to be in CQL form, and converted on the fly with `CqlIdentifier.fromCql`. -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/CqlIdentifier.html -[Row]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/Row.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/data/UdtValue.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/data/AccessibleByName.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/CqlIdentifier.html +[Row]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/Row.html +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/data/UdtValue.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/data/AccessibleByName.html ### Good practices diff --git a/manual/core/README.md b/manual/core/README.md index aeb21167fea..7ff1fc3ab0e 100644 --- a/manual/core/README.md +++ b/manual/core/README.md @@ -314,18 +314,18 @@ for (ColumnDefinitions.Definition definition : row.getColumnDefinitions()) { } ``` -[CqlSession]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/CqlSession.html -[CqlSession#builder()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/CqlSession.html#builder-- -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/ResultSet.html -[Row]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/Row.html -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/CqlIdentifier.html -[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/data/AccessibleByName.html -[GenericType]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/reflect/GenericType.html -[CqlDuration]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/data/CqlDuration.html -[TupleValue]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/data/TupleValue.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/data/UdtValue.html -[SessionBuilder.addContactPoint()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoint-java.net.InetSocketAddress- -[SessionBuilder.addContactPoints()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoints-java.util.Collection- -[SessionBuilder.withLocalDatacenter()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withLocalDatacenter-java.lang.String- +[CqlSession]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/CqlSession.html +[CqlSession#builder()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/CqlSession.html#builder-- +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/ResultSet.html +[Row]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/Row.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/CqlIdentifier.html +[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/data/AccessibleByName.html +[GenericType]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/reflect/GenericType.html +[CqlDuration]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/data/CqlDuration.html +[TupleValue]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/data/TupleValue.html +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/data/UdtValue.html +[SessionBuilder.addContactPoint()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoint-java.net.InetSocketAddress- +[SessionBuilder.addContactPoints()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoints-java.util.Collection- +[SessionBuilder.withLocalDatacenter()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withLocalDatacenter-java.lang.String- [CASSANDRA-10145]: https://issues.apache.org/jira/browse/CASSANDRA-10145 \ No newline at end of file diff --git a/manual/core/address_resolution/README.md b/manual/core/address_resolution/README.md index 7d83c8ff748..b3b0ff0dd17 100644 --- a/manual/core/address_resolution/README.md +++ b/manual/core/address_resolution/README.md @@ -124,7 +124,7 @@ Cassandra node: domain name of the target instance. Then it performs a forward DNS lookup of the domain name; the EC2 DNS does the private/public switch automatically based on location). -[AddressTranslator]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/addresstranslation/AddressTranslator.html +[AddressTranslator]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/addresstranslation/AddressTranslator.html [cassandra.yaml]: https://docs.datastax.com/en/cassandra/3.x/cassandra/configuration/configCassandra_yaml.html [rpc_address]: https://docs.datastax.com/en/cassandra/3.x/cassandra/configuration/configCassandra_yaml.html?scroll=configCassandra_yaml__rpc_address diff --git a/manual/core/async/README.md b/manual/core/async/README.md index 2b70c64d1a4..d3d936a2758 100644 --- a/manual/core/async/README.md +++ b/manual/core/async/README.md @@ -207,4 +207,4 @@ documentation for more details and an example. [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html \ No newline at end of file +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html \ No newline at end of file diff --git a/manual/core/authentication/README.md b/manual/core/authentication/README.md index 4e77c3d3a61..90181f3b98e 100644 --- a/manual/core/authentication/README.md +++ b/manual/core/authentication/README.md @@ -215,12 +215,12 @@ session.execute(statement); [SASL]: https://en.wikipedia.org/wiki/Simple_Authentication_and_Security_Layer -[AuthProvider]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/auth/AuthProvider.html -[DriverContext]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/context/DriverContext.html -[PlainTextAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderBase.html -[DseGssApiAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderBase.html -[ProgrammaticDseGssApiAuthProvider]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/auth/ProgrammaticDseGssApiAuthProvider.html -[ProxyAuthentication.executeAs]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/auth/ProxyAuthentication.html#executeAs-java.lang.String-StatementT- -[SessionBuilder.withAuthCredentials]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthCredentials-java.lang.String-java.lang.String- -[SessionBuilder.withAuthProvider]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthProvider-com.datastax.oss.driver.api.core.auth.AuthProvider- +[AuthProvider]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/auth/AuthProvider.html +[DriverContext]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/context/DriverContext.html +[PlainTextAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderBase.html +[DseGssApiAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderBase.html +[ProgrammaticDseGssApiAuthProvider]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/auth/ProgrammaticDseGssApiAuthProvider.html +[ProxyAuthentication.executeAs]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/auth/ProxyAuthentication.html#executeAs-java.lang.String-StatementT- +[SessionBuilder.withAuthCredentials]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthCredentials-java.lang.String-java.lang.String- +[SessionBuilder.withAuthProvider]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthProvider-com.datastax.oss.driver.api.core.auth.AuthProvider- [reference.conf]: ../configuration/reference/ \ No newline at end of file diff --git a/manual/core/bom/README.md b/manual/core/bom/README.md index 922bcffba24..cc896b138a1 100644 --- a/manual/core/bom/README.md +++ b/manual/core/bom/README.md @@ -13,7 +13,7 @@ To import the driver's BOM, add the following section in your application's own com.datastax.oss java-driver-bom - 4.9.0 + 4.10.0 pom import @@ -65,7 +65,7 @@ good idea to extract a property to keep it in sync with the BOM: ```xml - 4.9.0 + 4.10.0 diff --git a/manual/core/configuration/README.md b/manual/core/configuration/README.md index 50432c499cf..8d8b11e1065 100644 --- a/manual/core/configuration/README.md +++ b/manual/core/configuration/README.md @@ -520,16 +520,16 @@ config.getDefaultProfile().getString(MyCustomOption.ADMIN_EMAIL); config.getDefaultProfile().getInt(MyCustomOption.AWESOMENESS_FACTOR); ``` -[DriverConfig]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/config/DriverConfig.html -[DriverExecutionProfile]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/config/DriverExecutionProfile.html -[DriverContext]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/context/DriverContext.html -[DriverOption]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/config/DriverOption.html -[DefaultDriverOption]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/config/DefaultDriverOption.html -[DriverConfigLoader]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html -[DriverConfigLoader.fromClasspath]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromClasspath-java.lang.String- -[DriverConfigLoader.fromFile]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromFile-java.io.File- -[DriverConfigLoader.fromUrl]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromUrl-java.net.URL- -[DriverConfigLoader.programmaticBuilder]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#programmaticBuilder-- +[DriverConfig]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/config/DriverConfig.html +[DriverExecutionProfile]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/config/DriverExecutionProfile.html +[DriverContext]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/context/DriverContext.html +[DriverOption]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/config/DriverOption.html +[DefaultDriverOption]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/config/DefaultDriverOption.html +[DriverConfigLoader]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html +[DriverConfigLoader.fromClasspath]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromClasspath-java.lang.String- +[DriverConfigLoader.fromFile]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromFile-java.io.File- +[DriverConfigLoader.fromUrl]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromUrl-java.net.URL- +[DriverConfigLoader.programmaticBuilder]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#programmaticBuilder-- [Typesafe Config]: https://github.com/typesafehub/config [config standard behavior]: https://github.com/typesafehub/config#standard-behavior diff --git a/manual/core/control_connection/README.md b/manual/core/control_connection/README.md index f688aa88172..77977089641 100644 --- a/manual/core/control_connection/README.md +++ b/manual/core/control_connection/README.md @@ -23,4 +23,4 @@ There are a few options to fine tune the control connection behavior in the `advanced.control-connection` and `advanced.metadata` sections; see the [metadata](../metadata/) pages and the [reference configuration](../configuration/reference/) for all the details. -[Node.getOpenConnections]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- \ No newline at end of file +[Node.getOpenConnections]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- \ No newline at end of file diff --git a/manual/core/custom_codecs/README.md b/manual/core/custom_codecs/README.md index 5bda597058b..53d2a785d02 100644 --- a/manual/core/custom_codecs/README.md +++ b/manual/core/custom_codecs/README.md @@ -660,13 +660,13 @@ private static String formatRow(Row row) { } ``` -[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html -[GenericType]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/reflect/GenericType.html -[TypeCodec]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html -[format()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html#format-JavaTypeT- -[parse()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html#parse-java.lang.String- -[MappingCodec]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/MappingCodec.html -[SessionBuilder.addTypeCodecs]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addTypeCodecs-com.datastax.oss.driver.api.core.type.codec.TypeCodec...- +[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html +[GenericType]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/reflect/GenericType.html +[TypeCodec]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html +[format()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html#format-JavaTypeT- +[parse()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html#parse-java.lang.String- +[MappingCodec]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/MappingCodec.html +[SessionBuilder.addTypeCodecs]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addTypeCodecs-com.datastax.oss.driver.api.core.type.codec.TypeCodec...- [Enums]: https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html [Enum.name()]: https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html#name-- @@ -680,36 +680,36 @@ private static String formatRow(Row row) { [java.time.LocalDateTime]: https://docs.oracle.com/javase/8/docs/api/java/time/LocalDateTime.html [java.time.ZoneId]: https://docs.oracle.com/javase/8/docs/api/java/time/ZoneId.html -[ExtraTypeCodecs]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html -[ExtraTypeCodecs.BLOB_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#BLOB_TO_ARRAY -[ExtraTypeCodecs.BOOLEAN_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#BOOLEAN_LIST_TO_ARRAY -[ExtraTypeCodecs.BYTE_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#BYTE_LIST_TO_ARRAY -[ExtraTypeCodecs.SHORT_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#SHORT_LIST_TO_ARRAY -[ExtraTypeCodecs.INT_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#INT_LIST_TO_ARRAY -[ExtraTypeCodecs.LONG_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#LONG_LIST_TO_ARRAY -[ExtraTypeCodecs.FLOAT_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#FLOAT_LIST_TO_ARRAY -[ExtraTypeCodecs.DOUBLE_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#DOUBLE_LIST_TO_ARRAY -[ExtraTypeCodecs.listToArrayOf(TypeCodec)]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#listToArrayOf-com.datastax.oss.driver.api.core.type.codec.TypeCodec- -[ExtraTypeCodecs.TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#TIMESTAMP_UTC -[ExtraTypeCodecs.timestampAt(ZoneId)]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#timestampAt-java.time.ZoneId- -[ExtraTypeCodecs.TIMESTAMP_MILLIS_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#TIMESTAMP_MILLIS_SYSTEM -[ExtraTypeCodecs.TIMESTAMP_MILLIS_UTC]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#TIMESTAMP_MILLIS_UTC -[ExtraTypeCodecs.timestampMillisAt(ZoneId)]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#timestampMillisAt-java.time.ZoneId- -[ExtraTypeCodecs.ZONED_TIMESTAMP_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#ZONED_TIMESTAMP_SYSTEM -[ExtraTypeCodecs.ZONED_TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#ZONED_TIMESTAMP_UTC -[ExtraTypeCodecs.zonedTimestampAt(ZoneId)]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#zonedTimestampAt-java.time.ZoneId- -[ExtraTypeCodecs.LOCAL_TIMESTAMP_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#LOCAL_TIMESTAMP_SYSTEM -[ExtraTypeCodecs.LOCAL_TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#LOCAL_TIMESTAMP_UTC -[ExtraTypeCodecs.localTimestampAt(ZoneId)]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#localTimestampAt-java.time.ZoneId- -[ExtraTypeCodecs.ZONED_TIMESTAMP_PERSISTED]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#ZONED_TIMESTAMP_PERSISTED -[ExtraTypeCodecs.optionalOf(TypeCodec)]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#optionalOf-com.datastax.oss.driver.api.core.type.codec.TypeCodec- -[ExtraTypeCodecs.enumNamesOf(Class)]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#enumNamesOf-java.lang.Class- -[ExtraTypeCodecs.enumOrdinalsOf(Class)]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#enumOrdinalsOf-java.lang.Class- -[ExtraTypeCodecs.json(Class)]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#json-java.lang.Class- -[ExtraTypeCodecs.json(Class, ObjectMapper)]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#json-java.lang.Class-com.fasterxml.jackson.databind.ObjectMapper- - -[TypeCodecs.BLOB]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#BLOB -[TypeCodecs.TIMESTAMP]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#TIMESTAMP +[ExtraTypeCodecs]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html +[ExtraTypeCodecs.BLOB_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#BLOB_TO_ARRAY +[ExtraTypeCodecs.BOOLEAN_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#BOOLEAN_LIST_TO_ARRAY +[ExtraTypeCodecs.BYTE_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#BYTE_LIST_TO_ARRAY +[ExtraTypeCodecs.SHORT_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#SHORT_LIST_TO_ARRAY +[ExtraTypeCodecs.INT_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#INT_LIST_TO_ARRAY +[ExtraTypeCodecs.LONG_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#LONG_LIST_TO_ARRAY +[ExtraTypeCodecs.FLOAT_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#FLOAT_LIST_TO_ARRAY +[ExtraTypeCodecs.DOUBLE_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#DOUBLE_LIST_TO_ARRAY +[ExtraTypeCodecs.listToArrayOf(TypeCodec)]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#listToArrayOf-com.datastax.oss.driver.api.core.type.codec.TypeCodec- +[ExtraTypeCodecs.TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#TIMESTAMP_UTC +[ExtraTypeCodecs.timestampAt(ZoneId)]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#timestampAt-java.time.ZoneId- +[ExtraTypeCodecs.TIMESTAMP_MILLIS_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#TIMESTAMP_MILLIS_SYSTEM +[ExtraTypeCodecs.TIMESTAMP_MILLIS_UTC]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#TIMESTAMP_MILLIS_UTC +[ExtraTypeCodecs.timestampMillisAt(ZoneId)]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#timestampMillisAt-java.time.ZoneId- +[ExtraTypeCodecs.ZONED_TIMESTAMP_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#ZONED_TIMESTAMP_SYSTEM +[ExtraTypeCodecs.ZONED_TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#ZONED_TIMESTAMP_UTC +[ExtraTypeCodecs.zonedTimestampAt(ZoneId)]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#zonedTimestampAt-java.time.ZoneId- +[ExtraTypeCodecs.LOCAL_TIMESTAMP_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#LOCAL_TIMESTAMP_SYSTEM +[ExtraTypeCodecs.LOCAL_TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#LOCAL_TIMESTAMP_UTC +[ExtraTypeCodecs.localTimestampAt(ZoneId)]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#localTimestampAt-java.time.ZoneId- +[ExtraTypeCodecs.ZONED_TIMESTAMP_PERSISTED]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#ZONED_TIMESTAMP_PERSISTED +[ExtraTypeCodecs.optionalOf(TypeCodec)]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#optionalOf-com.datastax.oss.driver.api.core.type.codec.TypeCodec- +[ExtraTypeCodecs.enumNamesOf(Class)]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#enumNamesOf-java.lang.Class- +[ExtraTypeCodecs.enumOrdinalsOf(Class)]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#enumOrdinalsOf-java.lang.Class- +[ExtraTypeCodecs.json(Class)]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#json-java.lang.Class- +[ExtraTypeCodecs.json(Class, ObjectMapper)]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#json-java.lang.Class-com.fasterxml.jackson.databind.ObjectMapper- + +[TypeCodecs.BLOB]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#BLOB +[TypeCodecs.TIMESTAMP]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#TIMESTAMP [ObjectMapper]: http://fasterxml.github.io/jackson-databind/javadoc/2.10/com/fasterxml/jackson/databind/ObjectMapper.html diff --git a/manual/core/detachable_types/README.md b/manual/core/detachable_types/README.md index ec033d6e522..f81f376831c 100644 --- a/manual/core/detachable_types/README.md +++ b/manual/core/detachable_types/README.md @@ -137,13 +137,13 @@ Even then, the defaults used by detached objects might be good enough for you: Otherwise, just make sure you reattach objects any time you deserialize them or create them from scratch. -[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html -[CodecRegistry#DEFAULT]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html#DEFAULT -[DataType]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/DataType.html -[Detachable]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/detach/Detachable.html -[Session]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/Session.html -[ColumnDefinition]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/ColumnDefinition.html -[Row]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/Row.html +[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html +[CodecRegistry#DEFAULT]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html#DEFAULT +[DataType]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/DataType.html +[Detachable]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/detach/Detachable.html +[Session]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/Session.html +[ColumnDefinition]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/ColumnDefinition.html +[Row]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/Row.html [Java serialization]: https://docs.oracle.com/javase/tutorial/jndi/objects/serial.html [protocol specifications]: https://github.com/datastax/native-protocol/tree/1.x/src/main/resources diff --git a/manual/core/dse/geotypes/README.md b/manual/core/dse/geotypes/README.md index a37704eede2..a7c676e8cdf 100644 --- a/manual/core/dse/geotypes/README.md +++ b/manual/core/dse/geotypes/README.md @@ -166,9 +166,9 @@ All geospatial types interoperate with three standard formats: [ESRI]: https://github.com/Esri/geometry-api-java -[LineString]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/data/geometry/LineString.html -[Point]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/data/geometry/Point.html -[Polygon]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/data/geometry/Polygon.html +[LineString]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/data/geometry/LineString.html +[Point]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/data/geometry/Point.html +[Polygon]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/data/geometry/Polygon.html [Well-known text]: https://en.wikipedia.org/wiki/Well-known_text [Well-known binary]: https://en.wikipedia.org/wiki/Well-known_text#Well-known_binary diff --git a/manual/core/dse/graph/README.md b/manual/core/dse/graph/README.md index 145f8a84f38..1bf8d7825ba 100644 --- a/manual/core/dse/graph/README.md +++ b/manual/core/dse/graph/README.md @@ -74,8 +74,8 @@ fluent API returns Apache TinkerPop™ types directly. [Apache TinkerPop™]: http://tinkerpop.apache.org/ -[CqlSession]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/CqlSession.html -[GraphSession]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/graph/GraphSession.html +[CqlSession]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/CqlSession.html +[GraphSession]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/graph/GraphSession.html [DSE developer guide]: https://docs.datastax.com/en/dse/6.0/dse-dev/datastax_enterprise/graph/graphTOC.html [Gremlin]: https://docs.datastax.com/en/dse/6.0/dse-dev/datastax_enterprise/graph/dseGraphAbout.html#dseGraphAbout__what-is-cql diff --git a/manual/core/dse/graph/fluent/README.md b/manual/core/dse/graph/fluent/README.md index 072c2193c71..2d907231e58 100644 --- a/manual/core/dse/graph/fluent/README.md +++ b/manual/core/dse/graph/fluent/README.md @@ -109,8 +109,8 @@ All the DSE predicates are available on the driver side: .values("name"); ``` -[Search]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/graph/predicates/Search.html -[Geo]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/graph/predicates/Geo.html +[Search]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/graph/predicates/Search.html +[Geo]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/graph/predicates/Geo.html [Apache TinkerPop™]: http://tinkerpop.apache.org/ [TinkerPop DSL]: http://tinkerpop.apache.org/docs/current/reference/#dsl diff --git a/manual/core/dse/graph/fluent/explicit/README.md b/manual/core/dse/graph/fluent/explicit/README.md index 04b447c7919..5ba18bc6891 100644 --- a/manual/core/dse/graph/fluent/explicit/README.md +++ b/manual/core/dse/graph/fluent/explicit/README.md @@ -105,9 +105,9 @@ added in a future version. See also the [parent page](../) for topics common to all fluent traversals. -[FluentGraphStatement]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html -[FluentGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html#newInstance-org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal- -[FluentGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html#builder-org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal- -[BatchGraphStatement]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html -[BatchGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html#newInstance-- -[BatchGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html#builder-- +[FluentGraphStatement]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html +[FluentGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html#newInstance-org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal- +[FluentGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html#builder-org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal- +[BatchGraphStatement]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html +[BatchGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html#newInstance-- +[BatchGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html#builder-- diff --git a/manual/core/dse/graph/results/README.md b/manual/core/dse/graph/results/README.md index 7ac4a6c4e8f..e40f29d7f9a 100644 --- a/manual/core/dse/graph/results/README.md +++ b/manual/core/dse/graph/results/README.md @@ -137,8 +137,8 @@ If a type doesn't have a corresponding `asXxx()` method, use the variant that ta UUID uuid = graphNode.as(UUID.class); ``` -[GraphNode]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/graph/GraphNode.html -[GraphResultSet]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/graph/GraphResultSet.html -[AsyncGraphResultSet]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/graph/AsyncGraphResultSet.html +[GraphNode]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/graph/GraphNode.html +[GraphResultSet]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/graph/GraphResultSet.html +[AsyncGraphResultSet]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/graph/AsyncGraphResultSet.html [DSE data types]: https://docs.datastax.com/en/dse/6.0/dse-dev/datastax_enterprise/graph/reference/refDSEGraphDataTypes.html \ No newline at end of file diff --git a/manual/core/dse/graph/script/README.md b/manual/core/dse/graph/script/README.md index 929f80531ca..2869ce22a4b 100644 --- a/manual/core/dse/graph/script/README.md +++ b/manual/core/dse/graph/script/README.md @@ -101,6 +101,6 @@ Note however that some types of queries can only be performed through the script * configuration; * DSE graph schema queries. -[ScriptGraphStatement]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html -[ScriptGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html#newInstance-java.lang.String- -[ScriptGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html#builder-java.lang.String- \ No newline at end of file +[ScriptGraphStatement]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html +[ScriptGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html#newInstance-java.lang.String- +[ScriptGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html#builder-java.lang.String- \ No newline at end of file diff --git a/manual/core/idempotence/README.md b/manual/core/idempotence/README.md index fdddaff26f8..505ad2a40be 100644 --- a/manual/core/idempotence/README.md +++ b/manual/core/idempotence/README.md @@ -60,5 +60,5 @@ assert bs.isIdempotent(); The query builder tries to infer idempotence automatically; refer to [its manual](../../query_builder/idempotence/) for more details. -[Statement.setIdempotent]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/Statement.html#setIdempotent-java.lang.Boolean- -[StatementBuilder.setIdempotence]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/StatementBuilder.html#setIdempotence-java.lang.Boolean- +[Statement.setIdempotent]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/Statement.html#setIdempotent-java.lang.Boolean- +[StatementBuilder.setIdempotence]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/StatementBuilder.html#setIdempotence-java.lang.Boolean- diff --git a/manual/core/integration/README.md b/manual/core/integration/README.md index 382983a106d..87be4598121 100644 --- a/manual/core/integration/README.md +++ b/manual/core/integration/README.md @@ -589,6 +589,6 @@ The remaining core driver dependencies are the only ones that are truly mandator [guava]: https://github.com/google/guava/issues/2721 [annotation processing]: https://docs.oracle.com/javase/8/docs/technotes/tools/windows/javac.html#sthref65 -[Session.getMetrics]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/Session.html#getMetrics-- -[SessionBuilder.addContactPoint]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoint-java.net.InetSocketAddress- -[Uuids]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/uuid/Uuids.html +[Session.getMetrics]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/Session.html#getMetrics-- +[SessionBuilder.addContactPoint]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoint-java.net.InetSocketAddress- +[Uuids]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/uuid/Uuids.html diff --git a/manual/core/load_balancing/README.md b/manual/core/load_balancing/README.md index 3210c916a61..e65c7ef50d9 100644 --- a/manual/core/load_balancing/README.md +++ b/manual/core/load_balancing/README.md @@ -423,11 +423,11 @@ Then it uses the "closest" distance for any given node. For example: * policy1 changes its suggestion to IGNORED. node1 is set to REMOTE; * policy1 changes its suggestion to REMOTE. node1 stays at REMOTE. -[DriverContext]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/context/DriverContext.html -[LoadBalancingPolicy]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/loadbalancing/LoadBalancingPolicy.html +[DriverContext]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/context/DriverContext.html +[LoadBalancingPolicy]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/loadbalancing/LoadBalancingPolicy.html [BasicLoadBalancingPolicy]: https://github.com/datastax/java-driver/blob/4.x/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicy.java -[getRoutingKeyspace()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKeyspace-- -[getRoutingToken()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/Request.html#getRoutingToken-- -[getRoutingKey()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- +[getRoutingKeyspace()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKeyspace-- +[getRoutingToken()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/Request.html#getRoutingToken-- +[getRoutingKey()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- [`nodetool status`]: https://docs.datastax.com/en/dse/6.7/dse-dev/datastax_enterprise/tools/nodetool/toolsStatus.html [cqlsh]: https://docs.datastax.com/en/dse/6.7/cql/cql/cql_using/startCqlshStandalone.html diff --git a/manual/core/metadata/README.md b/manual/core/metadata/README.md index 79cfc96524f..51b7c4621ea 100644 --- a/manual/core/metadata/README.md +++ b/manual/core/metadata/README.md @@ -56,6 +56,6 @@ new keyspace in the schema metadata before the token metadata was updated. Schema and node state events are debounced. This allows you to control how often the metadata gets refreshed. See the [Performance](../performance/#debouncing) page for more details. -[Session#getMetadata]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/Session.html#getMetadata-- -[Metadata]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/metadata/Metadata.html -[Node]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/metadata/Node.html \ No newline at end of file +[Session#getMetadata]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/Session.html#getMetadata-- +[Metadata]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/metadata/Metadata.html +[Node]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/metadata/Node.html \ No newline at end of file diff --git a/manual/core/metadata/node/README.md b/manual/core/metadata/node/README.md index 49da884802b..31503c10be4 100644 --- a/manual/core/metadata/node/README.md +++ b/manual/core/metadata/node/README.md @@ -112,17 +112,17 @@ beyond the scope of this document; if you're interested, study the `TopologyMoni the source code. -[Metadata#getNodes]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/metadata/Metadata.html#getNodes-- -[Node]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/metadata/Node.html -[Node#getState()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/metadata/Node.html#getState-- -[Node#getDatacenter()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/metadata/Node.html#getDatacenter-- -[Node#getRack()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/metadata/Node.html#getRack-- -[Node#getDistance()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/metadata/Node.html#getDistance-- -[Node#getExtras()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/metadata/Node.html#getExtras-- -[Node#getOpenConnections()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- -[Node#isReconnecting()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/metadata/Node.html#isReconnecting-- -[NodeState]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/metadata/NodeState.html -[NodeStateListener]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/metadata/NodeStateListener.html -[NodeStateListenerBase]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/metadata/NodeStateListenerBase.html -[SessionBuilder.withNodeStateListener]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withNodeStateListener-com.datastax.oss.driver.api.core.metadata.NodeStateListener- -[DseNodeProperties]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/metadata/DseNodeProperties.html \ No newline at end of file +[Metadata#getNodes]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/metadata/Metadata.html#getNodes-- +[Node]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/metadata/Node.html +[Node#getState()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/metadata/Node.html#getState-- +[Node#getDatacenter()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/metadata/Node.html#getDatacenter-- +[Node#getRack()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/metadata/Node.html#getRack-- +[Node#getDistance()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/metadata/Node.html#getDistance-- +[Node#getExtras()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/metadata/Node.html#getExtras-- +[Node#getOpenConnections()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- +[Node#isReconnecting()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/metadata/Node.html#isReconnecting-- +[NodeState]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/metadata/NodeState.html +[NodeStateListener]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/metadata/NodeStateListener.html +[NodeStateListenerBase]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/metadata/NodeStateListenerBase.html +[SessionBuilder.withNodeStateListener]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withNodeStateListener-com.datastax.oss.driver.api.core.metadata.NodeStateListener- +[DseNodeProperties]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/metadata/DseNodeProperties.html \ No newline at end of file diff --git a/manual/core/metadata/schema/README.md b/manual/core/metadata/schema/README.md index 4259e56c107..fc02e8a8cdc 100644 --- a/manual/core/metadata/schema/README.md +++ b/manual/core/metadata/schema/README.md @@ -307,16 +307,16 @@ unavailable for the excluded keyspaces. If you issue schema-altering requests from the driver (e.g. `session.execute("CREATE TABLE ..")`), take a look at the [Performance](../../performance/#schema-updates) page for a few tips. -[Metadata#getKeyspaces]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/metadata/Metadata.html#getKeyspaces-- -[SchemaChangeListener]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListener.html -[SchemaChangeListenerBase]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListenerBase.html -[Session#setSchemaMetadataEnabled]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/Session.html#setSchemaMetadataEnabled-java.lang.Boolean- -[Session#checkSchemaAgreementAsync]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/Session.html#checkSchemaAgreementAsync-- -[SessionBuilder#withSchemaChangeListener]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSchemaChangeListener-com.datastax.oss.driver.api.core.metadata.schema.SchemaChangeListener- -[ExecutionInfo#isSchemaInAgreement]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#isSchemaInAgreement-- -[com.datastax.dse.driver.api.core.metadata.schema]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/metadata/schema/package-frame.html -[DseFunctionMetadata]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/metadata/schema/DseFunctionMetadata.html -[DseAggregateMetadata]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/metadata/schema/DseAggregateMetadata.html +[Metadata#getKeyspaces]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/metadata/Metadata.html#getKeyspaces-- +[SchemaChangeListener]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListener.html +[SchemaChangeListenerBase]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListenerBase.html +[Session#setSchemaMetadataEnabled]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/Session.html#setSchemaMetadataEnabled-java.lang.Boolean- +[Session#checkSchemaAgreementAsync]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/Session.html#checkSchemaAgreementAsync-- +[SessionBuilder#withSchemaChangeListener]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSchemaChangeListener-com.datastax.oss.driver.api.core.metadata.schema.SchemaChangeListener- +[ExecutionInfo#isSchemaInAgreement]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#isSchemaInAgreement-- +[com.datastax.dse.driver.api.core.metadata.schema]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/metadata/schema/package-frame.html +[DseFunctionMetadata]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/metadata/schema/DseFunctionMetadata.html +[DseAggregateMetadata]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/metadata/schema/DseAggregateMetadata.html [JAVA-750]: https://datastax-oss.atlassian.net/browse/JAVA-750 [java.util.regex.Pattern]: https://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html diff --git a/manual/core/metadata/token/README.md b/manual/core/metadata/token/README.md index 38765047f86..8c98f389cef 100644 --- a/manual/core/metadata/token/README.md +++ b/manual/core/metadata/token/README.md @@ -169,5 +169,5 @@ on [schema metadata](../schema/). If schema metadata is disabled or filtered, to also be unavailable for the excluded keyspaces. -[Metadata#getTokenMap]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/metadata/Metadata.html#getTokenMap-- -[TokenMap]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/metadata/TokenMap.html \ No newline at end of file +[Metadata#getTokenMap]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/metadata/Metadata.html#getTokenMap-- +[TokenMap]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/metadata/TokenMap.html \ No newline at end of file diff --git a/manual/core/native_protocol/README.md b/manual/core/native_protocol/README.md index 6704b8394c4..616b04beeda 100644 --- a/manual/core/native_protocol/README.md +++ b/manual/core/native_protocol/README.md @@ -135,6 +135,6 @@ If you want to see the details of mixed cluster negotiation, enable `DEBUG` leve [protocol spec]: https://github.com/datastax/native-protocol/tree/1.x/src/main/resources [driver3]: https://docs.datastax.com/en/developer/java-driver/3.10/manual/native_protocol/ -[ExecutionInfo.getWarnings]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getWarnings-- -[Request.getCustomPayload]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/Request.html#getCustomPayload-- -[AttachmentPoint.getProtocolVersion]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/detach/AttachmentPoint.html#getProtocolVersion-- +[ExecutionInfo.getWarnings]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getWarnings-- +[Request.getCustomPayload]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/Request.html#getCustomPayload-- +[AttachmentPoint.getProtocolVersion]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/detach/AttachmentPoint.html#getProtocolVersion-- diff --git a/manual/core/non_blocking/README.md b/manual/core/non_blocking/README.md index 8f0aace18f3..8ddbcfd8069 100644 --- a/manual/core/non_blocking/README.md +++ b/manual/core/non_blocking/README.md @@ -49,22 +49,22 @@ For example, calling any synchronous method declared in [`SyncCqlSession`], such will block until the result is available. These methods should never be used in non-blocking applications. -[`SyncCqlSession`]: https://docs.datastax.com/en/drivers/java/latest/com/datastax/oss/driver/api/core/cql/SyncCqlSession.html` -[`execute`]: https://docs.datastax.com/en/drivers/java/latest/com/datastax/oss/driver/api/core/cql/SyncCqlSession.html#execute-com.datastax.oss.driver.api.core.cql.Statement- +[`SyncCqlSession`]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/SyncCqlSession.html` +[`execute`]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/SyncCqlSession.html#execute-com.datastax.oss.driver.api.core.cql.Statement- However, the asynchronous methods declared in [`AsyncCqlSession`], such as [`executeAsync`], are all safe for use in non-blocking applications; the statement execution and asynchronous result delivery is guaranteed to never block. -[`AsyncCqlSession`]: https://docs.datastax.com/en/drivers/java/latest/com/datastax/oss/driver/api/core/cql/AsyncCqlSession.html -[`executeAsync`]: https://docs.datastax.com/en/drivers/java/latest/com/datastax/oss/driver/api/core/cql/AsyncCqlSession.html#executeAsync-com.datastax.oss.driver.api.core.cql.Statement- +[`AsyncCqlSession`]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/AsyncCqlSession.html +[`executeAsync`]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/AsyncCqlSession.html#executeAsync-com.datastax.oss.driver.api.core.cql.Statement- The same applies to the methods declared in [`ReactiveSession`] such as [`executeReactive`]: the returned publisher will never block when subscribed to, until the final results are delivered to the subscriber. -[`ReactiveSession`]: https://docs.datastax.com/en/drivers/java/latest/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.html -[`executeReactive`]: https://docs.datastax.com/en/drivers/java/latest/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.html#executeReactive-com.datastax.oss.driver.api.core.cql.Statement- +[`ReactiveSession`]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.html +[`executeReactive`]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.html#executeReactive-com.datastax.oss.driver.api.core.cql.Statement- There is one exception though: continuous paging queries (a feature specific to DSE) have a special execution model which uses internal locks for coordination. Although such locks are only held for @@ -77,10 +77,10 @@ reactive APIs like [`executeContinuouslyAsync`] and [`executeContinuouslyReactiv though, continuous paging is extremely efficient and can safely be used in most non-blocking contexts, unless they require strict lock-freedom. -[`ContinuousSession`]: https://docs.datastax.com/en/drivers/java/latest/com/datastax/dse/driver/api/core/cql/continuous/ContinuousSession.html -[`ContinuousReactiveSession`]: https://docs.datastax.com/en/drivers/java/latest/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousReactiveSession.html -[`executeContinuouslyAsync`]: https://docs.datastax.com/en/drivers/java/latest/com/datastax/dse/driver/api/core/cql/continuous/ContinuousSession.html#executeContinuouslyAsync-com.datastax.oss.driver.api.core.cql.Statement- -[`executeContinuouslyReactive`]: https://docs.datastax.com/en/drivers/java/latest/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousReactiveSession.html#executeContinuouslyReactive-com.datastax.oss.driver.api.core.cql.Statement- +[`ContinuousSession`]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/cql/continuous/ContinuousSession.html +[`ContinuousReactiveSession`]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousReactiveSession.html +[`executeContinuouslyAsync`]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/cql/continuous/ContinuousSession.html#executeContinuouslyAsync-com.datastax.oss.driver.api.core.cql.Statement- +[`executeContinuouslyReactive`]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousReactiveSession.html#executeContinuouslyReactive-com.datastax.oss.driver.api.core.cql.Statement- #### Driver lock-free guarantees per session lifecycle phases @@ -110,8 +110,8 @@ Similarly, a call to [`SessionBuilder.build()`] should be considered blocking as calling thread and wait until the method returns. For this reason, calls to `SessionBuilder.build()` should be avoided in non-blocking applications. -[`SessionBuilder.buildAsync()`]: https://docs.datastax.com/en/drivers/java/latest/com/datastax/oss/driver/api/core/session/SessionBuilder.html#buildAsync-- -[`SessionBuilder.build()`]: https://docs.datastax.com/en/drivers/java/latest/com/datastax/oss/driver/api/core/session/SessionBuilder.html#build-- +[`SessionBuilder.buildAsync()`]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/SessionBuilder.html#buildAsync-- +[`SessionBuilder.build()`]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/SessionBuilder.html#build-- Once the session is initialized, however, the driver is guaranteed to be non-blocking during the session's lifecycle, and under normal operation, unless otherwise noted elsewhere in this document. @@ -121,8 +121,8 @@ during that phase. Therefore, calls to any method declared in [`AsyncAutoCloseab asynchronous ones like [`closeAsync()`], should also be preferably deferred until the application is shut down and lock-freedom enforcement is disabled. -[`AsyncAutoCloseable`]: https://docs.datastax.com/en/drivers/java/latest/com/datastax/oss/driver/api/core/AsyncAutoCloseable.html -[`closeAsync()`]: https://docs.datastax.com/en/drivers/java/latest/com/datastax/oss/driver/api/core/AsyncAutoCloseable.html#closeAsync-- +[`AsyncAutoCloseable`]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/AsyncAutoCloseable.html +[`closeAsync()`]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/AsyncAutoCloseable.html#closeAsync-- #### Driver lock-free guarantees for specific components @@ -131,7 +131,7 @@ Certain driver components are not implemented in lock-free algorithms. For example, [`SafeInitNodeStateListener`] is implemented with internal locks for coordination. It should not be used if strict lock-freedom is enforced. -[`SafeInitNodeStateListener`]: https://docs.datastax.com/en/drivers/java/latest/com/datastax/oss/driver/api/core/metadata/SafeInitNodeStateListener.html +[`SafeInitNodeStateListener`]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/metadata/SafeInitNodeStateListener.html The same is valid for both built-in [request throttlers]: @@ -143,7 +143,7 @@ use locks internally, and depending on how many requests are being executed in p contention on these locks can be high: in short, if your application enforces strict lock-freedom, then these components should not be used. -[request throttlers]: https://docs.datastax.com/en/drivers/java/latest/com/datastax/oss/driver/api/core/session/throttling/RequestThrottler.html +[request throttlers]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/throttling/RequestThrottler.html Other components may be lock-free, *except* for their first invocation. This is the case of the following items: @@ -151,8 +151,8 @@ following items: * All built-in implementations of [`TimestampGenerator`], upon instantiation; * The utility method [`Uuids.timeBased()`]. -[`TimestampGenerator`]: https://docs.datastax.com/en/drivers/java/latest/com/datastax/oss/driver/api/core/time/TimestampGenerator.html -[`Uuids.timeBased()`]: https://docs.datastax.com/en/drivers/java/latest/com/datastax/oss/driver/api/core/uuid/Uuids.html#timeBased-- +[`TimestampGenerator`]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/time/TimestampGenerator.html +[`Uuids.timeBased()`]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/uuid/Uuids.html#timeBased-- Both components need to access native libraries when they get initialized and this may involve hitting the local filesystem, thus causing the initialization to become a blocking call. @@ -172,7 +172,7 @@ One component, the codec registry, can block when its [`register`] method is cal therefore advised that codecs should be registered during application startup exclusively. See the [custom codecs](../custom_codecs) section for more details about registering codecs. -[`register`]: https://docs.datastax.com/en/drivers/java/latest/com/datastax/oss/driver/api/core/type/codec/registry/MutableCodecRegistry.html#register-com.datastax.oss.driver.api.core.type.codec.TypeCodec- +[`register`]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/registry/MutableCodecRegistry.html#register-com.datastax.oss.driver.api.core.type.codec.TypeCodec- Finally, a few internal components also use locks, but only during session initialization; once the session is ready, they are either discarded, or don't use locks anymore for the rest of the @@ -213,7 +213,7 @@ lock-freedom enforcement tools could report calls to that method, but it was imp these calls. Thanks to [JAVA-2449], released with driver 4.10.0, `Uuids.random()` became a non-blocking call and random UUIDs can now be safely generated in non-blocking applications. -[`Uuids.random()`]: https://docs.datastax.com/en/drivers/java/latest/com/datastax/oss/driver/api/core/uuid/Uuids.html#random-- +[`Uuids.random()`]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/uuid/Uuids.html#random-- [JAVA-2449]: https://datastax-oss.atlassian.net/browse/JAVA-2449 #### Driver lock-free guarantees when reloading the configuration @@ -228,8 +228,8 @@ detectors. If that is the case, it is advised to disable hot-reloading by settin `datastax-java-driver.basic.config-reload-interval` option to 0. See the manual page on [configuration](../configuration) for more information. -[`DriverConfigLoader`]: https://docs.datastax.com/en/drivers/java/latest/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html -[hot-reloading]: https://docs.datastax.com/en/drivers/java/latest/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#supportsReloading-- +[`DriverConfigLoader`]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html +[hot-reloading]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#supportsReloading-- #### Driver lock-free guarantees when connecting to DSE diff --git a/manual/core/paging/README.md b/manual/core/paging/README.md index 9dec6c80f78..ffae1dfb6fb 100644 --- a/manual/core/paging/README.md +++ b/manual/core/paging/README.md @@ -253,12 +253,12 @@ protocol page size and the logical page size to the same value. The [driver examples] include two complete web service implementations demonstrating forward-only and offset paging. -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/ResultSet.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html -[AsyncPagingIterable.hasMorePages]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/AsyncPagingIterable.html#hasMorePages-- -[AsyncPagingIterable.fetchNextPage]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/AsyncPagingIterable.html#fetchNextPage-- -[OffsetPager]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/paging/OffsetPager.html -[PagingState]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/PagingState.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/ResultSet.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[AsyncPagingIterable.hasMorePages]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/AsyncPagingIterable.html#hasMorePages-- +[AsyncPagingIterable.fetchNextPage]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/AsyncPagingIterable.html#fetchNextPage-- +[OffsetPager]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/paging/OffsetPager.html +[PagingState]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/PagingState.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html diff --git a/manual/core/performance/README.md b/manual/core/performance/README.md index 9e200a3ffb5..691b4735aea 100644 --- a/manual/core/performance/README.md +++ b/manual/core/performance/README.md @@ -345,8 +345,8 @@ possible to reuse the same event loop group for I/O, admin tasks, and even your (the driver's internal code is fully asynchronous so it will never block any thread). The timer is the only one that will have to stay on a separate thread. -[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/data/AccessibleByName.html -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/CqlIdentifier.html -[CqlSession.prepare(SimpleStatement)]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/CqlSession.html#prepare-com.datastax.oss.driver.api.core.cql.SimpleStatement- -[GenericType]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/reflect/GenericType.html -[Statement.setNode()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/Statement.html#setNode-com.datastax.oss.driver.api.core.metadata.Node- \ No newline at end of file +[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/data/AccessibleByName.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/CqlIdentifier.html +[CqlSession.prepare(SimpleStatement)]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/CqlSession.html#prepare-com.datastax.oss.driver.api.core.cql.SimpleStatement- +[GenericType]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/reflect/GenericType.html +[Statement.setNode()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/Statement.html#setNode-com.datastax.oss.driver.api.core.metadata.Node- \ No newline at end of file diff --git a/manual/core/pooling/README.md b/manual/core/pooling/README.md index ef6f1068f2a..8fb17c02f8b 100644 --- a/manual/core/pooling/README.md +++ b/manual/core/pooling/README.md @@ -170,5 +170,5 @@ you experience the issue, here's what to look out for: Try adding more connections per node. Thanks to the driver's hot-reload mechanism, you can do that at runtime and see the effects immediately. -[CqlSession]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/CqlSession.html +[CqlSession]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/CqlSession.html [CASSANDRA-8086]: https://issues.apache.org/jira/browse/CASSANDRA-8086 \ No newline at end of file diff --git a/manual/core/query_timestamps/README.md b/manual/core/query_timestamps/README.md index ac50c53da90..a3264c1f0e1 100644 --- a/manual/core/query_timestamps/README.md +++ b/manual/core/query_timestamps/README.md @@ -187,9 +187,9 @@ Here is the order of precedence of all the methods described so far: 3. otherwise, if the timestamp generator assigned a timestamp, use it; 4. otherwise, let the server assign the timestamp. -[TimestampGenerator]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/time/TimestampGenerator.html +[TimestampGenerator]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/time/TimestampGenerator.html [gettimeofday]: http://man7.org/linux/man-pages/man2/settimeofday.2.html [JNR]: https://github.com/jnr/jnr-posix [Lightweight transactions]: https://docs.datastax.com/en/dse/6.0/cql/cql/cql_using/useInsertLWT.html -[Statement.setQueryTimestamp()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/Statement.html#setQueryTimestamp-long- +[Statement.setQueryTimestamp()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/Statement.html#setQueryTimestamp-long- diff --git a/manual/core/reactive/README.md b/manual/core/reactive/README.md index 1c88908f4cd..57b3a98cebb 100644 --- a/manual/core/reactive/README.md +++ b/manual/core/reactive/README.md @@ -367,18 +367,18 @@ Note that the driver already has a [built-in retry mechanism] that can transpare queries; the above example should be seen as a demonstration of application-level retries, when a more fine-grained control of what should be retried, and how, is required. -[CqlSession]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/CqlSession.html -[ReactiveSession]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/ResultSet.html -[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html -[ReactiveRow]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html -[Row]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/Row.html -[getColumnDefinitions]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#getColumnDefinitions-- -[getExecutionInfos]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#getExecutionInfos-- -[wasApplied]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#wasApplied-- -[ReactiveRow.getColumnDefinitions]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#getColumnDefinitions-- -[ReactiveRow.getExecutionInfo]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#getExecutionInfo-- -[ReactiveRow.wasApplied]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#wasApplied-- +[CqlSession]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/CqlSession.html +[ReactiveSession]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/ResultSet.html +[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html +[ReactiveRow]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html +[Row]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/Row.html +[getColumnDefinitions]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#getColumnDefinitions-- +[getExecutionInfos]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#getExecutionInfos-- +[wasApplied]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#wasApplied-- +[ReactiveRow.getColumnDefinitions]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#getColumnDefinitions-- +[ReactiveRow.getExecutionInfo]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#getExecutionInfo-- +[ReactiveRow.wasApplied]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#wasApplied-- [built-in retry mechanism]: ../retries/ [request throttling]: ../throttling/ diff --git a/manual/core/reconnection/README.md b/manual/core/reconnection/README.md index 24290baf9ab..08a1e30f4c9 100644 --- a/manual/core/reconnection/README.md +++ b/manual/core/reconnection/README.md @@ -84,7 +84,7 @@ Note that the session is not accessible until it is fully ready: the `CqlSession call — or the future returned by `buildAsync()` — will not complete until the connection was established. -[ConstantReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/internal/core/connection/ConstantReconnectionPolicy.html -[DriverContext]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/context/DriverContext.html -[ExponentialReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/internal/core/connection/ExponentialReconnectionPolicy.html -[ReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/connection/ReconnectionPolicy.html \ No newline at end of file +[ConstantReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/internal/core/connection/ConstantReconnectionPolicy.html +[DriverContext]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/context/DriverContext.html +[ExponentialReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/internal/core/connection/ExponentialReconnectionPolicy.html +[ReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/connection/ReconnectionPolicy.html \ No newline at end of file diff --git a/manual/core/request_tracker/README.md b/manual/core/request_tracker/README.md index dfcf23d4b4b..1d52fbceb26 100644 --- a/manual/core/request_tracker/README.md +++ b/manual/core/request_tracker/README.md @@ -117,5 +117,5 @@ all FROM users WHERE user_id=? [v0=42] com.datastax.oss.driver.api.core.servererrors.InvalidQueryException: Undefined column name all ``` -[RequestTracker]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/tracker/RequestTracker.html -[SessionBuilder.withRequestTracker]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withRequestTracker-com.datastax.oss.driver.api.core.tracker.RequestTracker- \ No newline at end of file +[RequestTracker]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/tracker/RequestTracker.html +[SessionBuilder.withRequestTracker]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withRequestTracker-com.datastax.oss.driver.api.core.tracker.RequestTracker- \ No newline at end of file diff --git a/manual/core/speculative_execution/README.md b/manual/core/speculative_execution/README.md index aa324c53c4f..698ac2f42a4 100644 --- a/manual/core/speculative_execution/README.md +++ b/manual/core/speculative_execution/README.md @@ -250,4 +250,4 @@ profiles have the same configuration). Each request uses its declared profile's policy. If it doesn't declare any profile, or if the profile doesn't have a dedicated policy, then the default profile's policy is used. -[SpeculativeExecutionPolicy]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/specex/SpeculativeExecutionPolicy.html \ No newline at end of file +[SpeculativeExecutionPolicy]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/specex/SpeculativeExecutionPolicy.html \ No newline at end of file diff --git a/manual/core/ssl/README.md b/manual/core/ssl/README.md index de3a05f59ad..cdbc5ca817c 100644 --- a/manual/core/ssl/README.md +++ b/manual/core/ssl/README.md @@ -204,6 +204,6 @@ the box, but with a bit of custom development it is fairly easy to add. See [dsClientToNode]: https://docs.datastax.com/en/cassandra/3.0/cassandra/configuration/secureSSLClientToNode.html [pickle]: http://thelastpickle.com/blog/2015/09/30/hardening-cassandra-step-by-step-part-1-server-to-server.html [JSSE system properties]: http://docs.oracle.com/javase/6/docs/technotes/guides/security/jsse/JSSERefGuide.html#Customization -[SessionBuilder.withSslEngineFactory]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSslEngineFactory-com.datastax.oss.driver.api.core.ssl.SslEngineFactory- -[SessionBuilder.withSslContext]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSslContext-javax.net.ssl.SSLContext- -[ProgrammaticSslEngineFactory]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/ssl/ProgrammaticSslEngineFactory.html +[SessionBuilder.withSslEngineFactory]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSslEngineFactory-com.datastax.oss.driver.api.core.ssl.SslEngineFactory- +[SessionBuilder.withSslContext]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSslContext-javax.net.ssl.SSLContext- +[ProgrammaticSslEngineFactory]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/ssl/ProgrammaticSslEngineFactory.html diff --git a/manual/core/statements/README.md b/manual/core/statements/README.md index 8587aa7ffc6..90dec7a960d 100644 --- a/manual/core/statements/README.md +++ b/manual/core/statements/README.md @@ -59,7 +59,7 @@ the [configuration](../configuration/). Namely, these are: idempotent flag, quer consistency levels and page size. We recommended the configuration approach whenever possible (you can create execution profiles to capture common combinations of those options). -[Statement]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/Statement.html -[StatementBuilder]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/StatementBuilder.html -[execute]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/Session.html#execute-com.datastax.oss.driver.api.core.cql.Statement- -[executeAsync]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/Session.html#executeAsync-com.datastax.oss.driver.api.core.cql.Statement- +[Statement]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/Statement.html +[StatementBuilder]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/StatementBuilder.html +[execute]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/Session.html#execute-com.datastax.oss.driver.api.core.cql.Statement- +[executeAsync]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/Session.html#executeAsync-com.datastax.oss.driver.api.core.cql.Statement- diff --git a/manual/core/statements/batch/README.md b/manual/core/statements/batch/README.md index b691e1ea292..b1cb7c70967 100644 --- a/manual/core/statements/batch/README.md +++ b/manual/core/statements/batch/README.md @@ -61,8 +61,8 @@ In addition, simple statements with named parameters are currently not supported due to a [protocol limitation][CASSANDRA-10246] that will be fixed in a future version). If you try to execute such a batch, an `IllegalArgumentException` is thrown. -[BatchStatement]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/BatchStatement.html -[BatchStatement.newInstance()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/BatchStatement.html#newInstance-com.datastax.oss.driver.api.core.cql.BatchType- -[BatchStatement.builder()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/BatchStatement.html#builder-com.datastax.oss.driver.api.core.cql.BatchType- +[BatchStatement]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/BatchStatement.html +[BatchStatement.newInstance()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/BatchStatement.html#newInstance-com.datastax.oss.driver.api.core.cql.BatchType- +[BatchStatement.builder()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/BatchStatement.html#builder-com.datastax.oss.driver.api.core.cql.BatchType- [batch_dse]: http://docs.datastax.com/en/dse/6.7/cql/cql/cql_using/useBatch.html [CASSANDRA-10246]: https://issues.apache.org/jira/browse/CASSANDRA-10246 diff --git a/manual/core/statements/per_query_keyspace/README.md b/manual/core/statements/per_query_keyspace/README.md index cc61e732e2b..66e9d451387 100644 --- a/manual/core/statements/per_query_keyspace/README.md +++ b/manual/core/statements/per_query_keyspace/README.md @@ -124,6 +124,6 @@ SimpleStatement statement = At some point in the future, when Cassandra 4 becomes prevalent and using a per-query keyspace is the norm, we'll probably deprecate `setRoutingKeyspace()`. -[token-aware routing]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- +[token-aware routing]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- [CASSANDRA-10145]: https://issues.apache.org/jira/browse/CASSANDRA-10145 \ No newline at end of file diff --git a/manual/core/statements/prepared/README.md b/manual/core/statements/prepared/README.md index 17ffefd39bf..e74813d8e6d 100644 --- a/manual/core/statements/prepared/README.md +++ b/manual/core/statements/prepared/README.md @@ -330,10 +330,10 @@ With Cassandra 4 and [native protocol](../../native_protocol/) v5, this issue is new version with the response; the driver updates its local cache transparently, and the client can observe the new columns in the result set. -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[Session.prepare]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/CqlSession.html#prepare-com.datastax.oss.driver.api.core.cql.SimpleStatement- +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[Session.prepare]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/CqlSession.html#prepare-com.datastax.oss.driver.api.core.cql.SimpleStatement- [CASSANDRA-10786]: https://issues.apache.org/jira/browse/CASSANDRA-10786 [CASSANDRA-10813]: https://issues.apache.org/jira/browse/CASSANDRA-10813 [guava eviction]: https://github.com/google/guava/wiki/CachesExplained#reference-based-eviction -[PreparedStatement.bind]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/PreparedStatement.html#bind-java.lang.Object...- -[PreparedStatement.boundStatementBuilder]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/PreparedStatement.html#boundStatementBuilder-java.lang.Object...- +[PreparedStatement.bind]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/PreparedStatement.html#bind-java.lang.Object...- +[PreparedStatement.boundStatementBuilder]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/PreparedStatement.html#boundStatementBuilder-java.lang.Object...- diff --git a/manual/core/statements/simple/README.md b/manual/core/statements/simple/README.md index a9599d9ed12..deff971fea9 100644 --- a/manual/core/statements/simple/README.md +++ b/manual/core/statements/simple/README.md @@ -182,6 +182,6 @@ session.execute( Or you could also use [prepared statements](../prepared/), which don't have this limitation since parameter types are known in advance. -[SimpleStatement]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/SimpleStatement.html -[SimpleStatement.newInstance()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/SimpleStatement.html#newInstance-java.lang.String- -[SimpleStatement.builder()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/SimpleStatement.html#builder-java.lang.String- +[SimpleStatement]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/SimpleStatement.html +[SimpleStatement.newInstance()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/SimpleStatement.html#newInstance-java.lang.String- +[SimpleStatement.builder()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/SimpleStatement.html#builder-java.lang.String- diff --git a/manual/core/temporal_types/README.md b/manual/core/temporal_types/README.md index 17467e8d6c5..a9985099fd7 100644 --- a/manual/core/temporal_types/README.md +++ b/manual/core/temporal_types/README.md @@ -146,7 +146,7 @@ System.out.println(dateTime.minus(CqlDuration.from("1h15s15ns"))); // prints "2018-10-03T22:59:44.999999985-07:00[America/Los_Angeles]" ``` -[CqlDuration]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/data/CqlDuration.html -[TypeCodecs.ZONED_TIMESTAMP_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#ZONED_TIMESTAMP_SYSTEM -[TypeCodecs.ZONED_TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#ZONED_TIMESTAMP_UTC -[TypeCodecs.zonedTimestampAt()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#zonedTimestampAt-java.time.ZoneId- \ No newline at end of file +[CqlDuration]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/data/CqlDuration.html +[TypeCodecs.ZONED_TIMESTAMP_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#ZONED_TIMESTAMP_SYSTEM +[TypeCodecs.ZONED_TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#ZONED_TIMESTAMP_UTC +[TypeCodecs.zonedTimestampAt()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#zonedTimestampAt-java.time.ZoneId- \ No newline at end of file diff --git a/manual/core/throttling/README.md b/manual/core/throttling/README.md index 7eee629ae5a..6e779811aa0 100644 --- a/manual/core/throttling/README.md +++ b/manual/core/throttling/README.md @@ -145,6 +145,6 @@ datastax-java-driver { If you enable `throttling.delay`, make sure to also check the associated extra options to correctly size the underlying histograms (`metrics.session.throttling.delay.*`). -[RequestThrottlingException]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/RequestThrottlingException.html -[AllNodesFailedException]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/AllNodesFailedException.html -[BusyConnectionException]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/connection/BusyConnectionException.html \ No newline at end of file +[RequestThrottlingException]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/RequestThrottlingException.html +[AllNodesFailedException]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/AllNodesFailedException.html +[BusyConnectionException]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/connection/BusyConnectionException.html \ No newline at end of file diff --git a/manual/core/tracing/README.md b/manual/core/tracing/README.md index adb267bf07a..3708988ad39 100644 --- a/manual/core/tracing/README.md +++ b/manual/core/tracing/README.md @@ -113,9 +113,9 @@ for (TraceEvent event : trace.getEvents()) { If you call `getQueryTrace()` for a statement that didn't have tracing enabled, an exception is thrown. -[ExecutionInfo]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html -[QueryTrace]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/QueryTrace.html -[Statement.setTracing()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/Statement.html#setTracing-boolean- -[StatementBuilder.setTracing()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/StatementBuilder.html#setTracing-- -[ExecutionInfo.getTracingId()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getTracingId-- -[ExecutionInfo.getQueryTrace()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getQueryTrace-- +[ExecutionInfo]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html +[QueryTrace]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/QueryTrace.html +[Statement.setTracing()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/Statement.html#setTracing-boolean- +[StatementBuilder.setTracing()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/StatementBuilder.html#setTracing-- +[ExecutionInfo.getTracingId()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getTracingId-- +[ExecutionInfo.getQueryTrace()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getQueryTrace-- diff --git a/manual/core/tuples/README.md b/manual/core/tuples/README.md index 763f6d3ebba..2de2788c87a 100644 --- a/manual/core/tuples/README.md +++ b/manual/core/tuples/README.md @@ -139,5 +139,5 @@ BoundStatement bs = [cql_doc]: https://docs.datastax.com/en/cql/3.3/cql/cql_reference/tupleType.html -[TupleType]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/TupleType.html -[TupleValue]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/data/TupleValue.html +[TupleType]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/TupleType.html +[TupleValue]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/data/TupleValue.html diff --git a/manual/core/udts/README.md b/manual/core/udts/README.md index 12584c140cb..6cadf225623 100644 --- a/manual/core/udts/README.md +++ b/manual/core/udts/README.md @@ -135,5 +135,5 @@ session.execute(bs); [cql_doc]: https://docs.datastax.com/en/cql/3.3/cql/cql_reference/cqlRefUDType.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/data/UdtValue.html -[UserDefinedType]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/UserDefinedType.html \ No newline at end of file +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/data/UdtValue.html +[UserDefinedType]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/UserDefinedType.html \ No newline at end of file diff --git a/manual/developer/common/concurrency/README.md b/manual/developer/common/concurrency/README.md index be51e184ac5..545eda62533 100644 --- a/manual/developer/common/concurrency/README.md +++ b/manual/developer/common/concurrency/README.md @@ -101,8 +101,8 @@ public interface ExecutionInfo { When a public API method is blocking, this is generally clearly stated in its javadocs. -[`ExecutionInfo.getQueryTrace()`]: https://docs.datastax.com/en/drivers/java/latest/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getQueryTrace-- -[`SyncCqlSession`]: https://docs.datastax.com/en/drivers/java/latest/com/datastax/oss/driver/api/core/cql/SyncCqlSession.html` +[`ExecutionInfo.getQueryTrace()`]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getQueryTrace-- +[`SyncCqlSession`]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/SyncCqlSession.html` `BlockingOperation` is a utility to check that those methods aren't called on I/O threads, which could introduce deadlocks. diff --git a/manual/mapper/config/kotlin/README.md b/manual/mapper/config/kotlin/README.md index 533ccb62e57..1ff4fa80910 100644 --- a/manual/mapper/config/kotlin/README.md +++ b/manual/mapper/config/kotlin/README.md @@ -106,4 +106,4 @@ before compilation: [build.gradle]: https://github.com/DataStax-Examples/object-mapper-jvm/blob/master/kotlin/build.gradle [UserDao.kt]: https://github.com/DataStax-Examples/object-mapper-jvm/blob/master/kotlin/src/main/kotlin/com/datastax/examples/mapper/killrvideo/user/UserDao.kt -[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html +[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html diff --git a/manual/mapper/config/record/README.md b/manual/mapper/config/record/README.md index 3a3b1d6e39e..de7e3159816 100644 --- a/manual/mapper/config/record/README.md +++ b/manual/mapper/config/record/README.md @@ -27,7 +27,7 @@ You need to build with Java 14, and pass the `--enable-preview` flag to both the runtime JVM. See [pom.xml] in the example. -[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html +[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html [DataStax-Examples/object-mapper-jvm/record]: https://github.com/DataStax-Examples/object-mapper-jvm/tree/master/record [pom.xml]: https://github.com/DataStax-Examples/object-mapper-jvm/blob/master/record/pom.xml diff --git a/manual/mapper/config/scala/README.md b/manual/mapper/config/scala/README.md index 46749d10286..b925895c80c 100644 --- a/manual/mapper/config/scala/README.md +++ b/manual/mapper/config/scala/README.md @@ -54,4 +54,4 @@ mapper builder. [DataStax-Examples/object-mapper-jvm/scala]: https://github.com/DataStax-Examples/object-mapper-jvm/tree/master/scala [build.sbt]: https://github.com/DataStax-Examples/object-mapper-jvm/blob/master/scala/build.sbt -[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html +[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html diff --git a/manual/mapper/daos/README.md b/manual/mapper/daos/README.md index a29af184550..091f669f269 100644 --- a/manual/mapper/daos/README.md +++ b/manual/mapper/daos/README.md @@ -148,8 +148,8 @@ In this case, any annotations declared in `Dao1` would be chosen over `Dao2`. To control how the hierarchy is scanned, annotate interfaces with [@HierarchyScanStrategy]. -[@Dao]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/Dao.html -[@DaoFactory]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.html -[@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html -[@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html +[@Dao]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/Dao.html +[@DaoFactory]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.html +[@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html +[@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html [Entity Inheritance]: ../entities/#inheritance diff --git a/manual/mapper/daos/custom_types/README.md b/manual/mapper/daos/custom_types/README.md index 18b3900eafc..705549502de 100644 --- a/manual/mapper/daos/custom_types/README.md +++ b/manual/mapper/daos/custom_types/README.md @@ -236,8 +236,8 @@ flag: With this configuration, if a DAO method declares a non built-in return type, it will be surfaced as a compiler error. -[EntityHelper]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/entity/EntityHelper.html -[GenericType]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/reflect/GenericType.html -[MapperContext]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/MapperContext.html -[MapperResultProducer]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/result/MapperResultProducer.html -[MapperResultProducerService]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/result/MapperResultProducerService.html +[EntityHelper]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/entity/EntityHelper.html +[GenericType]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/reflect/GenericType.html +[MapperContext]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/MapperContext.html +[MapperResultProducer]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/result/MapperResultProducer.html +[MapperResultProducerService]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/result/MapperResultProducerService.html diff --git a/manual/mapper/daos/delete/README.md b/manual/mapper/daos/delete/README.md index 5fc0550fb02..00831156973 100644 --- a/manual/mapper/daos/delete/README.md +++ b/manual/mapper/daos/delete/README.md @@ -151,15 +151,15 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the [naming strategy](../../entities/#naming-strategy)). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html -[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html -[@Delete]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/Delete.html -[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/ResultSet.html -[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html +[@Delete]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/Delete.html +[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/ResultSet.html +[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html diff --git a/manual/mapper/daos/getentity/README.md b/manual/mapper/daos/getentity/README.md index b4228a0fce1..87ab6f7f826 100644 --- a/manual/mapper/daos/getentity/README.md +++ b/manual/mapper/daos/getentity/README.md @@ -69,14 +69,14 @@ If the return type doesn't match the parameter type (for example [PagingIterable [AsyncResultSet]), the mapper processor will issue a compile-time error. -[@GetEntity]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html -[GettableByName]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/data/GettableByName.html -[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/PagingIterable.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/ResultSet.html -[Row]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/Row.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/data/UdtValue.html +[@GetEntity]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[GettableByName]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/data/GettableByName.html +[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/PagingIterable.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/ResultSet.html +[Row]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/Row.html +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/data/UdtValue.html diff --git a/manual/mapper/daos/increment/README.md b/manual/mapper/daos/increment/README.md index 5c1f2713e2d..ede46a8a629 100644 --- a/manual/mapper/daos/increment/README.md +++ b/manual/mapper/daos/increment/README.md @@ -75,12 +75,12 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the naming convention). -[@Increment]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/Increment.html -[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html -[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html -[@CqlName]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/CqlName.html +[@Increment]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/Increment.html +[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html +[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html +[@CqlName]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/CqlName.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html diff --git a/manual/mapper/daos/insert/README.md b/manual/mapper/daos/insert/README.md index 1cf6800ade6..0daf347c5b3 100644 --- a/manual/mapper/daos/insert/README.md +++ b/manual/mapper/daos/insert/README.md @@ -108,13 +108,13 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the [naming strategy](../../entities/#naming-strategy)). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@Insert]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/Insert.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/ResultSet.html -[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- -[ResultSet#getExecutionInfo()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/ResultSet.html#getExecutionInfo-- -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@Insert]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/Insert.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/ResultSet.html +[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- +[ResultSet#getExecutionInfo()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/ResultSet.html#getExecutionInfo-- +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html diff --git a/manual/mapper/daos/null_saving/README.md b/manual/mapper/daos/null_saving/README.md index b88b61d4e9e..265f2b5a278 100644 --- a/manual/mapper/daos/null_saving/README.md +++ b/manual/mapper/daos/null_saving/README.md @@ -93,10 +93,10 @@ public interface UserDao extends InventoryDao { } ``` -[@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[MapperException]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/MapperException.html -[DO_NOT_SET]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#DO_NOT_SET -[SET_TO_NULL]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#SET_TO_NULL +[@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[MapperException]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/MapperException.html +[DO_NOT_SET]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#DO_NOT_SET +[SET_TO_NULL]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#SET_TO_NULL [CASSANDRA-7304]: https://issues.apache.org/jira/browse/CASSANDRA-7304 diff --git a/manual/mapper/daos/query/README.md b/manual/mapper/daos/query/README.md index d053928b379..e019ce9c5e8 100644 --- a/manual/mapper/daos/query/README.md +++ b/manual/mapper/daos/query/README.md @@ -110,17 +110,17 @@ Then: query succeeds or not depends on whether the session that the mapper was built with has a [default keyspace]. -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@Query]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/Query.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/ResultSet.html -[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- -[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/PagingIterable.html -[Row]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/Row.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html -[MappedReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/mapper/reactive/MappedReactiveResultSet.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@Query]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/Query.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/ResultSet.html +[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- +[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/PagingIterable.html +[Row]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/Row.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html +[MappedReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/mapper/reactive/MappedReactiveResultSet.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html diff --git a/manual/mapper/daos/queryprovider/README.md b/manual/mapper/daos/queryprovider/README.md index 8a64b94a10f..79423b674c3 100644 --- a/manual/mapper/daos/queryprovider/README.md +++ b/manual/mapper/daos/queryprovider/README.md @@ -137,11 +137,11 @@ Here is the full implementation: the desired [PagingIterable][PagingIterable]. -[@QueryProvider]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html -[providerClass]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerClass-- -[entityHelpers]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#entityHelpers-- -[providerMethod]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerMethod-- -[MapperContext]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/MapperContext.html -[EntityHelper]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/EntityHelper.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/ResultSet.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/PagingIterable.html +[@QueryProvider]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html +[providerClass]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerClass-- +[entityHelpers]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#entityHelpers-- +[providerMethod]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerMethod-- +[MapperContext]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/MapperContext.html +[EntityHelper]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/EntityHelper.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/ResultSet.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/PagingIterable.html diff --git a/manual/mapper/daos/select/README.md b/manual/mapper/daos/select/README.md index 5e1cdcdc79d..d31c1ce9faa 100644 --- a/manual/mapper/daos/select/README.md +++ b/manual/mapper/daos/select/README.md @@ -142,19 +142,19 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the [naming strategy](../../entities/#naming-strategy)). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html -[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html -[@Select]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/Select.html -[allowFiltering()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/Select.html#allowFiltering-- -[customWhereClause()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/Select.html#customWhereClause-- -[groupBy()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/Select.html#groupBy-- -[limit()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/Select.html#limit-- -[orderBy()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/Select.html#orderBy-- -[perPartitionLimit()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/Select.html#perPartitionLimit-- -[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/PagingIterable.html -[MappedReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/mapper/reactive/MappedReactiveResultSet.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html +[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html +[@Select]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/Select.html +[allowFiltering()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/Select.html#allowFiltering-- +[customWhereClause()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/Select.html#customWhereClause-- +[groupBy()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/Select.html#groupBy-- +[limit()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/Select.html#limit-- +[orderBy()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/Select.html#orderBy-- +[perPartitionLimit()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/Select.html#perPartitionLimit-- +[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/PagingIterable.html +[MappedReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/mapper/reactive/MappedReactiveResultSet.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html diff --git a/manual/mapper/daos/setentity/README.md b/manual/mapper/daos/setentity/README.md index 5232afcb277..f1f5646265b 100644 --- a/manual/mapper/daos/setentity/README.md +++ b/manual/mapper/daos/setentity/README.md @@ -63,8 +63,8 @@ BoundStatement bind(Product product, BoundStatement statement); If you use a void method with [BoundStatement], the mapper processor will issue a compile-time warning. -[@SetEntity]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/SetEntity.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[BoundStatementBuilder]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/BoundStatementBuilder.html -[SettableByName]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/data/SettableByName.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/data/UdtValue.html +[@SetEntity]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/SetEntity.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[BoundStatementBuilder]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/BoundStatementBuilder.html +[SettableByName]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/data/SettableByName.html +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/data/UdtValue.html diff --git a/manual/mapper/daos/statement_attributes/README.md b/manual/mapper/daos/statement_attributes/README.md index 4f26a84004f..be9c2a2a23a 100644 --- a/manual/mapper/daos/statement_attributes/README.md +++ b/manual/mapper/daos/statement_attributes/README.md @@ -60,4 +60,4 @@ Product product = dao.findById(1, builder -> builder.setConsistencyLevel(DefaultConsistencyLevel.QUORUM)); ``` -[@StatementAttributes]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/StatementAttributes.html \ No newline at end of file +[@StatementAttributes]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/StatementAttributes.html \ No newline at end of file diff --git a/manual/mapper/daos/update/README.md b/manual/mapper/daos/update/README.md index 2b39db5c4a0..22c974ff894 100644 --- a/manual/mapper/daos/update/README.md +++ b/manual/mapper/daos/update/README.md @@ -143,13 +143,13 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the naming convention). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@Update]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/Update.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@Update]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/Update.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html [Boolean]: https://docs.oracle.com/javase/8/docs/api/index.html?java/lang/Boolean.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/ResultSet.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/ResultSet.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html diff --git a/manual/mapper/entities/README.md b/manual/mapper/entities/README.md index 29a83725750..27039d76d51 100644 --- a/manual/mapper/entities/README.md +++ b/manual/mapper/entities/README.md @@ -555,22 +555,22 @@ the same level. To control how the class hierarchy is scanned, annotate classes with [@HierarchyScanStrategy]. -[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html -[@CqlName]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/CqlName.html -[@Dao]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/Dao.html -[@Entity]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/Entity.html -[NameConverter]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/entity/naming/NameConverter.html -[NamingConvention]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/entity/naming/NamingConvention.html -[@NamingStrategy]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/NamingStrategy.html -[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html -[@Computed]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/Computed.html -[@Select]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/Select.html -[@Insert]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/Insert.html -[@Update]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/Update.html -[@GetEntity]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html -[@Query]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/Query.html +[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html +[@CqlName]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/CqlName.html +[@Dao]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/Dao.html +[@Entity]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/Entity.html +[NameConverter]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/entity/naming/NameConverter.html +[NamingConvention]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/entity/naming/NamingConvention.html +[@NamingStrategy]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/NamingStrategy.html +[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html +[@Computed]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/Computed.html +[@Select]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/Select.html +[@Insert]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/Insert.html +[@Update]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/Update.html +[@GetEntity]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html +[@Query]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/Query.html [aliases]: http://cassandra.apache.org/doc/latest/cql/dml.html?#aliases -[@Transient]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/Transient.html -[@TransientProperties]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/TransientProperties.html -[@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html -[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html +[@Transient]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/Transient.html +[@TransientProperties]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/TransientProperties.html +[@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html +[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html diff --git a/manual/mapper/mapper/README.md b/manual/mapper/mapper/README.md index e2834b0d99f..8d6ee979a75 100644 --- a/manual/mapper/mapper/README.md +++ b/manual/mapper/mapper/README.md @@ -230,8 +230,8 @@ InventoryMapper inventoryMapper = new InventoryMapperBuilder(session) You can also permanently disable validation of an individual entity by annotating it with `@SchemaHint(targetElement = NONE)`. -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/CqlIdentifier.html -[@DaoFactory]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.html -[@DaoKeyspace]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/DaoKeyspace.html -[@DaoTable]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/DaoTable.html -[@Mapper]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/mapper/annotations/Mapper.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/CqlIdentifier.html +[@DaoFactory]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.html +[@DaoKeyspace]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/DaoKeyspace.html +[@DaoTable]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/DaoTable.html +[@Mapper]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/Mapper.html diff --git a/manual/osgi/README.md b/manual/osgi/README.md index 0983292d859..65b1efdb85a 100644 --- a/manual/osgi/README.md +++ b/manual/osgi/README.md @@ -138,7 +138,7 @@ starting the driver: [driver configuration]: ../core/configuration [OSGi]:https://www.osgi.org [JNR]: https://github.com/jnr/jnr-posix -[withClassLoader()]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withClassLoader-java.lang.ClassLoader- +[withClassLoader()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withClassLoader-java.lang.ClassLoader- [JAVA-1127]:https://datastax-oss.atlassian.net/browse/JAVA-1127 -[DriverConfigLoader.fromDefaults(ClassLoader)]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromDefaults-java.lang.ClassLoader- -[DriverConfigLoader.programmaticBuilder(ClassLoader)]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#programmaticBuilder-java.lang.ClassLoader- +[DriverConfigLoader.fromDefaults(ClassLoader)]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromDefaults-java.lang.ClassLoader- +[DriverConfigLoader.programmaticBuilder(ClassLoader)]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#programmaticBuilder-java.lang.ClassLoader- diff --git a/manual/query_builder/README.md b/manual/query_builder/README.md index c3f7d5f5e11..ab7c39ffb1a 100644 --- a/manual/query_builder/README.md +++ b/manual/query_builder/README.md @@ -212,8 +212,8 @@ For a complete tour of the API, browse the child pages in this manual: * [Terms](term/) * [Idempotence](idempotence/) -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/CqlIdentifier.html -[DseQueryBuilder]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/querybuilder/DseQueryBuilder.html -[DseSchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/dse/driver/api/querybuilder/DseSchemaBuilder.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/CqlIdentifier.html +[DseQueryBuilder]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/querybuilder/DseQueryBuilder.html +[DseSchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/querybuilder/DseSchemaBuilder.html diff --git a/manual/query_builder/condition/README.md b/manual/query_builder/condition/README.md index 9b25e53235b..f4d406da249 100644 --- a/manual/query_builder/condition/README.md +++ b/manual/query_builder/condition/README.md @@ -132,4 +132,4 @@ It is mutually exclusive with column conditions: if you previously specified col the statement, they will be ignored; conversely, adding a column condition cancels a previous IF EXISTS clause. -[Condition]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/querybuilder/condition/Condition.html +[Condition]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/querybuilder/condition/Condition.html diff --git a/manual/query_builder/delete/README.md b/manual/query_builder/delete/README.md index 413366779b0..c349f6c8c44 100644 --- a/manual/query_builder/delete/README.md +++ b/manual/query_builder/delete/README.md @@ -141,5 +141,5 @@ deleteFrom("user") Conditions are a common feature used by UPDATE and DELETE, so they have a [dedicated page](../condition) in this manual. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[Selector]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/querybuilder/select/Selector.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[Selector]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/querybuilder/select/Selector.html diff --git a/manual/query_builder/insert/README.md b/manual/query_builder/insert/README.md index d038fd9d44a..ae17f467843 100644 --- a/manual/query_builder/insert/README.md +++ b/manual/query_builder/insert/README.md @@ -114,4 +114,4 @@ is executed. This is distinctly different than setting the value to null. Passin this method will only remove the USING TTL clause from the query, which will not alter the TTL (if one is set) in Cassandra. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html \ No newline at end of file +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html \ No newline at end of file diff --git a/manual/query_builder/relation/README.md b/manual/query_builder/relation/README.md index 20d0b184cf5..8b6862e4fed 100644 --- a/manual/query_builder/relation/README.md +++ b/manual/query_builder/relation/README.md @@ -201,5 +201,5 @@ This should be used with caution, as it's possible to generate invalid CQL that execution time; on the other hand, it can be used as a workaround to handle new CQL features that are not yet covered by the query builder. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[Relation]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/querybuilder/relation/Relation.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[Relation]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/querybuilder/relation/Relation.html diff --git a/manual/query_builder/schema/README.md b/manual/query_builder/schema/README.md index 1210491a1c6..140b1cf430f 100644 --- a/manual/query_builder/schema/README.md +++ b/manual/query_builder/schema/README.md @@ -44,4 +44,4 @@ element type: * [function](function/) * [aggregate](aggregate/) -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/aggregate/README.md b/manual/query_builder/schema/aggregate/README.md index 9a6fba24819..b2d005495a6 100644 --- a/manual/query_builder/schema/aggregate/README.md +++ b/manual/query_builder/schema/aggregate/README.md @@ -76,4 +76,4 @@ dropAggregate("average").ifExists(); // DROP AGGREGATE IF EXISTS average ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/function/README.md b/manual/query_builder/schema/function/README.md index e1a17cf7d92..7a782dcd6d6 100644 --- a/manual/query_builder/schema/function/README.md +++ b/manual/query_builder/schema/function/README.md @@ -92,4 +92,4 @@ dropFunction("log").ifExists(); // DROP FUNCTION IF EXISTS log ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/index/README.md b/manual/query_builder/schema/index/README.md index 2aa435e5b7c..0c2603ef0fe 100644 --- a/manual/query_builder/schema/index/README.md +++ b/manual/query_builder/schema/index/README.md @@ -99,4 +99,4 @@ dropIndex("my_idx").ifExists(); // DROP INDEX IF EXISTS my_idx ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/keyspace/README.md b/manual/query_builder/schema/keyspace/README.md index f283ba5b5ee..9347b3bbb56 100644 --- a/manual/query_builder/schema/keyspace/README.md +++ b/manual/query_builder/schema/keyspace/README.md @@ -83,6 +83,6 @@ dropKeyspace("cycling").ifExists(); // DROP KEYSPACE IF EXISTS cycling ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/materialized_view/README.md b/manual/query_builder/schema/materialized_view/README.md index d240c6bd282..609395d2b6e 100644 --- a/manual/query_builder/schema/materialized_view/README.md +++ b/manual/query_builder/schema/materialized_view/README.md @@ -85,5 +85,5 @@ dropTable("cyclist_by_age").ifExists(); // DROP MATERIALIZED VIEW IF EXISTS cyclist_by_age ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html -[RelationStructure]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/querybuilder/schema/RelationStructure.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[RelationStructure]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/querybuilder/schema/RelationStructure.html diff --git a/manual/query_builder/schema/table/README.md b/manual/query_builder/schema/table/README.md index ab0664d0280..d743f584002 100644 --- a/manual/query_builder/schema/table/README.md +++ b/manual/query_builder/schema/table/README.md @@ -107,6 +107,6 @@ dropTable("cyclist_name").ifExists(); // DROP TABLE IF EXISTS cyclist_name ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html -[CreateTableWithOptions]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/querybuilder/schema/CreateTableWithOptions.html -[AlterTableWithOptions]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/querybuilder/schema/AlterTableWithOptions.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[CreateTableWithOptions]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/querybuilder/schema/CreateTableWithOptions.html +[AlterTableWithOptions]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/querybuilder/schema/AlterTableWithOptions.html diff --git a/manual/query_builder/schema/type/README.md b/manual/query_builder/schema/type/README.md index b5d6646fcc7..cd23078f1eb 100644 --- a/manual/query_builder/schema/type/README.md +++ b/manual/query_builder/schema/type/README.md @@ -88,4 +88,4 @@ dropTable("address").ifExists(); // DROP TYPE IF EXISTS address ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/select/README.md b/manual/query_builder/select/README.md index e1819f0c128..1b71ea8434c 100644 --- a/manual/query_builder/select/README.md +++ b/manual/query_builder/select/README.md @@ -391,5 +391,5 @@ selectFrom("user").all().allowFiltering(); // SELECT * FROM user ALLOW FILTERING ``` -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[Selector]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/querybuilder/select/Selector.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[Selector]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/querybuilder/select/Selector.html diff --git a/manual/query_builder/term/README.md b/manual/query_builder/term/README.md index 66f8e4dfaac..eb95162e3d3 100644 --- a/manual/query_builder/term/README.md +++ b/manual/query_builder/term/README.md @@ -105,5 +105,5 @@ This should be used with caution, as it's possible to generate invalid CQL that execution time; on the other hand, it can be used as a workaround to handle new CQL features that are not yet covered by the query builder. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html \ No newline at end of file +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html \ No newline at end of file diff --git a/manual/query_builder/truncate/README.md b/manual/query_builder/truncate/README.md index 3b8e1518e80..b1f87d276e9 100644 --- a/manual/query_builder/truncate/README.md +++ b/manual/query_builder/truncate/README.md @@ -17,4 +17,4 @@ Truncate truncate2 = truncate(CqlIdentifier.fromCql("mytable")); Note that, at this stage, the query is ready to build. After creating a TRUNCATE query it does not take any values. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html diff --git a/manual/query_builder/update/README.md b/manual/query_builder/update/README.md index 19f9c2f8bb1..2c75f869220 100644 --- a/manual/query_builder/update/README.md +++ b/manual/query_builder/update/README.md @@ -251,5 +251,5 @@ update("foo") Conditions are a common feature used by UPDATE and DELETE, so they have a [dedicated page](../condition) in this manual. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[Assignment]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/querybuilder/update/Assignment.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[Assignment]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/querybuilder/update/Assignment.html diff --git a/upgrade_guide/README.md b/upgrade_guide/README.md index 3b3125de12f..5dcaa3ef8f9 100644 --- a/upgrade_guide/README.md +++ b/upgrade_guide/README.md @@ -84,8 +84,8 @@ empty replicas and token ranges for them. If you need the driver to keep computi token map for these keyspaces, you now must modify the following configuration option: `datastax-java-driver.advanced.metadata.schema.refreshed-keyspaces`. -[Metadata.getKeyspaces()]: https://docs.datastax.com/en/drivers/java/latest/com/datastax/oss/driver/api/core/metadata/Metadata.html#getKeyspaces-- -[TokenMap]: https://docs.datastax.com/en/drivers/java/latest/com/datastax/oss/driver/api/core/metadata/TokenMap.html +[Metadata.getKeyspaces()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/metadata/Metadata.html#getKeyspaces-- +[TokenMap]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/metadata/TokenMap.html #### DSE Graph dependencies are now optional @@ -213,7 +213,7 @@ you can obtain in most web environments by calling `Thread.getContextClassLoader See the javadocs of [SessionBuilder.withClassLoader] for more information. -[SessionBuilder.withClassLoader]: https://docs.datastax.com/en/drivers/java/4.9/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withClassLoader-java.lang.ClassLoader- +[SessionBuilder.withClassLoader]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withClassLoader-java.lang.ClassLoader- ### 4.1.0 From 89e01fb320a9ed402179a6760501f18ea8f6bbcb Mon Sep 17 00:00:00 2001 From: Patrick Decat Date: Wed, 20 Jan 2021 15:12:26 +0100 Subject: [PATCH 643/979] Fix incorrect link to https://datastax-oss.atlassian.net/browse/JAVA-2899 (#1527) --- upgrade_guide/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/upgrade_guide/README.md b/upgrade_guide/README.md index 5dcaa3ef8f9..e36a9bebf44 100644 --- a/upgrade_guide/README.md +++ b/upgrade_guide/README.md @@ -4,7 +4,7 @@ #### Cross-datacenter failover -[JAVA-2899](https://datastax-oss.atlassian.net/browse/JAVA-2676) re-introduced the ability to +[JAVA-2899](https://datastax-oss.atlassian.net/browse/JAVA-2899) re-introduced the ability to perform cross-datacenter failover using the driver's built-in load balancing policies. See [Load balancing](../manual/core/loadbalancing/) in the manual for details. From 3c3c3ff61250844be667e9e18d5aa85536c59fd5 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 22 Jan 2021 10:33:07 +0100 Subject: [PATCH 644/979] JAVA-2907 follow-up: Add more revapi exceptions --- core/revapi.json | 54 ++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 54 insertions(+) diff --git a/core/revapi.json b/core/revapi.json index b20a307277c..4dfc79dc859 100644 --- a/core/revapi.json +++ b/core/revapi.json @@ -5485,6 +5485,60 @@ "code": "java.missing.newClass", "new": "missing-class org.apache.tinkerpop.gremlin.structure.VertexProperty", "justification": "JAVA-2907: switched Tinkerpop dependency to optional" + }, + { + "code": "java.missing.oldClass", + "old": "missing-class org.apache.tinkerpop.gremlin.process.remote.RemoteConnection", + "new": "missing-class org.apache.tinkerpop.gremlin.process.remote.RemoteConnection", + "justification": "JAVA-2907: switched Tinkerpop dependency to optional" + }, + { + "code": "java.missing.oldClass", + "old": "missing-class org.apache.tinkerpop.gremlin.process.traversal.P", + "new": "missing-class org.apache.tinkerpop.gremlin.process.traversal.P", + "justification": "JAVA-2907: switched Tinkerpop dependency to optional" + }, + { + "code": "java.missing.oldClass", + "old": "missing-class org.apache.tinkerpop.gremlin.process.traversal.Path", + "new": "missing-class org.apache.tinkerpop.gremlin.process.traversal.Path", + "justification": "JAVA-2907: switched Tinkerpop dependency to optional" + }, + { + "code": "java.missing.oldClass", + "old": "missing-class org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal", + "new": "missing-class org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal", + "justification": "JAVA-2907: switched Tinkerpop dependency to optional" + }, + { + "code": "java.missing.oldClass", + "old": "missing-class org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource", + "new": "missing-class org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource", + "justification": "JAVA-2907: switched Tinkerpop dependency to optional" + }, + { + "code": "java.missing.oldClass", + "old": "missing-class org.apache.tinkerpop.gremlin.structure.Edge", + "new": "missing-class org.apache.tinkerpop.gremlin.structure.Edge", + "justification": "JAVA-2907: switched Tinkerpop dependency to optional" + }, + { + "code": "java.missing.oldClass", + "old": "missing-class org.apache.tinkerpop.gremlin.structure.Property", + "new": "missing-class org.apache.tinkerpop.gremlin.structure.Property", + "justification": "JAVA-2907: switched Tinkerpop dependency to optional" + }, + { + "code": "java.missing.oldClass", + "old": "missing-class org.apache.tinkerpop.gremlin.structure.Vertex", + "new": "missing-class org.apache.tinkerpop.gremlin.structure.Vertex", + "justification": "JAVA-2907: switched Tinkerpop dependency to optional" + }, + { + "code": "java.missing.oldClass", + "old": "missing-class org.apache.tinkerpop.gremlin.structure.VertexProperty", + "new": "missing-class org.apache.tinkerpop.gremlin.structure.VertexProperty", + "justification": "JAVA-2907: switched Tinkerpop dependency to optional" } ] } From a86cb80d1182353a13b525f5b79e62fa6e825efc Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 22 Jan 2021 14:24:36 +0100 Subject: [PATCH 645/979] JAVA-2907 follow-up: Add TinkerPop version compatibility matrix --- manual/core/integration/README.md | 52 ++++++++++++++++++++++++++----- pom.xml | 4 +++ upgrade_guide/README.md | 13 +++++--- 3 files changed, 56 insertions(+), 13 deletions(-) diff --git a/manual/core/integration/README.md b/manual/core/integration/README.md index 87be4598121..37b28810105 100644 --- a/manual/core/integration/README.md +++ b/manual/core/integration/README.md @@ -440,7 +440,8 @@ If all of these metrics are disabled, you can remove the dependency: * when Insights monitoring is enabled; * when [Json codecs](../custom_codecs) are being used. -If you don't use either of those features, you can safely exclude the dependency: +Jackson is declared as a required dependency, but the driver can operate normally without it. If you +don't use any of the above features, you can safely exclude the dependency: ```xml @@ -461,7 +462,8 @@ If you don't use either of those features, you can safely exclude the dependency Our [geospatial types](../dse/geotypes/) implementation is based on the [Esri Geometry API](https://github.com/Esri/geometry-api-java). -If you don't use geospatial types anywhere in your application, you can exclude the dependency: +Esri is declared as a required dependency, but the driver can operate normally without it. If you +don't use geospatial types anywhere in your application, you can exclude the dependency: ```xml @@ -471,7 +473,7 @@ If you don't use geospatial types anywhere in your application, you can exclude com.esri.geometry - esri-geometry-api + * @@ -479,9 +481,13 @@ If you don't use geospatial types anywhere in your application, you can exclude #### TinkerPop -[Apache TinkerPop™](http://tinkerpop.apache.org/) is used in our [graph API](../dse/graph/). +[Apache TinkerPop™](http://tinkerpop.apache.org/) is used in our [graph API](../dse/graph/), +introduced in the OSS driver in version 4.4.0 (it was previously a feature only available in the +now-retired DSE driver). -If you don't use DSE graph at all, you can exclude the dependencies: +For driver versions ranging from 4.4.0 to 4.9.0 inclusive, TinkerPop is declared as a required +dependency, but the driver can operate normally without it. If you don't use the graph API at all, +you can exclude the TinkerPop dependencies: ```xml @@ -497,17 +503,47 @@ If you don't use DSE graph at all, you can exclude the dependencies: ``` +Starting with driver 4.10 however, TinkerPop switched to an optional dependency. Excluding TinkerPop +explicitly is not required anymore if you don't use it. _If you do use the graph API though, you now +need to explicitly include the dependencies below in your application_: + +```xml + + org.apache.tinkerpop + gremlin-core + ${tinkerpop.version} + + + org.apache.tinkerpop + tinkergraph-gremlin + ${tinkerpop.version} + +``` + If you do use graph, it is important to keep the precise TinkerPop version that the driver depends on: unlike the driver, TinkerPop does not follow semantic versioning, so even a patch version change -(e.g. 3.3.0 vs 3.3.3) could introduce incompatibilities. So do not declare an explicit dependency in -your application, let the driver pull it transitively. +(e.g. 3.3.0 vs 3.3.3) could introduce incompatibilities. + +Here are the recommended TinkerPop versions for each driver version: + + + + + + + + + + +
          Driver versionTinkerPop version
          4.10.03.4.9
          4.9.03.4.8
          4.8.03.4.5
          4.7.03.4.5
          4.6.03.4.5
          4.5.03.4.5
          4.4.03.3.3
          #### Reactive Streams [Reactive Streams](https://www.reactive-streams.org/) types are referenced in our [reactive API](../reactive/). -If you never call any of the `executeReactive` methods, you can exclude the dependency: +The Reactive Streams API is declared as a required dependency, but the driver can operate normally +without it. If you never call any of the `executeReactive` methods, you can exclude the dependency: ```xml diff --git a/pom.xml b/pom.xml index add6f984b6e..81587e11aa4 100644 --- a/pom.xml +++ b/pom.xml @@ -50,6 +50,10 @@ 4.1.16 4.1.51.Final 1.2.1 + 3.4.9 1.7.26 1.0.3 diff --git a/upgrade_guide/README.md b/upgrade_guide/README.md index e36a9bebf44..80909ad15da 100644 --- a/upgrade_guide/README.md +++ b/upgrade_guide/README.md @@ -35,7 +35,7 @@ The following methods were deprecated and will be removed in the next major vers Driver 4.10.0 also re-introduced a retry policy whose behavior is equivalent to the `DowngradingConsistencyRetryPolicy` from driver 3.x. See this -[FAQ entry](https://docs.datastax.com/en/developer/java-driver/latest/faq/#where-is-downgrading-consistency-retry-policy) +[FAQ entry](https://docs.datastax.com/en/developer/java-driver/4.10/faq/#where-is-downgrading-consistency-retry-policy) for more information. [`RetryVerdict`]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/retry/RetryVerdict.html @@ -89,16 +89,16 @@ token map for these keyspaces, you now must modify the following configuration o #### DSE Graph dependencies are now optional -Until driver 4.9.0, the driver declared a mandatory dependency to Apache Tinkerpop, a library +Until driver 4.9.0, the driver declared a mandatory dependency to Apache TinkerPop, a library required only when connecting to DSE Graph. The vast majority of Apache Cassandra users did not need that library, but were paying the price of having that heavy-weight library in their application's classpath. -_Starting with driver 4.10.0, Tinkerpop is now considered an optional dependency_. +_Starting with driver 4.10.0, TinkerPop is now considered an optional dependency_. Regular users of Apache Cassandra that do not use DSE Graph will not notice any disruption. -DSE Graph users, however, will now have to explicitly declare a dependency to Apache Tinkerpop. This +DSE Graph users, however, will now have to explicitly declare a dependency to Apache TinkerPop. This can be achieved with Maven by adding the following dependencies to the `` section of your POM file: @@ -115,6 +115,9 @@ your POM file: ``` +See the [integration](../manual/core/integration/#tinker-pop) section in the manual for more details +as well as a driver vs. TinkerPop version compatibility matrix. + ### 4.5.x - 4.6.0 These versions are subject to [JAVA-2676](https://datastax-oss.atlassian.net/browse/JAVA-2676), a @@ -135,7 +138,7 @@ Apart from that, the only visible change is that DSE-specific features are now e * new execution methods: `CqlSession.executeGraph`, `CqlSession.executeContinuously*`. They all have default implementations so this doesn't break binary compatibility. You can just ignore them. -* new driver dependencies: Tinkerpop, ESRI, Reactive Streams. If you want to keep your classpath +* new driver dependencies: TinkerPop, ESRI, Reactive Streams. If you want to keep your classpath lean, you can exclude some dependencies when you don't use the corresponding DSE features; see the [Integration>Driver dependencies](../manual/core/integration/#driver-dependencies) section. From 346db046074f35f9b02993f1b607d0300f7c1be0 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 1 Feb 2021 18:11:14 +0100 Subject: [PATCH 646/979] Fix wrong link to the cross-DC failover example --- manual/core/load_balancing/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/manual/core/load_balancing/README.md b/manual/core/load_balancing/README.md index e65c7ef50d9..3d09f0282b6 100644 --- a/manual/core/load_balancing/README.md +++ b/manual/core/load_balancing/README.md @@ -251,7 +251,7 @@ possibly scaling up its bandwidth to cope with the network traffic spike. This i solution for the cross-datacenter failover issue in general, but we acknowledge that it also requires a purpose-built infrastructure. To help you explore this option, read our [white paper]. -[application-level failover example]: https://github.com/datastax/java-driver/blob/4.x/examples/src/main/java/com/datastax/oss/driver/examples/retry/CrossDatacenterFailover.java +[application-level failover example]: https://github.com/datastax/java-driver/blob/4.x/examples/src/main/java/com/datastax/oss/driver/examples/failover/CrossDatacenterFailover.java [white paper]: https://www.datastax.com/sites/default/files/content/whitepaper/files/2019-09/Designing-Fault-Tolerant-Applications-DataStax.pdf #### Token-aware From 73b8cc196a3cce4403b7ee5e2db8c2c4227225c4 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Wed, 10 Feb 2021 16:19:22 +0100 Subject: [PATCH 647/979] Use Entry.comparingByKey instead of Comparator.comparing(Entry::getKey) --- .../core/config/typesafe/TypesafeDriverExecutionProfile.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/config/typesafe/TypesafeDriverExecutionProfile.java b/core/src/main/java/com/datastax/oss/driver/internal/core/config/typesafe/TypesafeDriverExecutionProfile.java index 63fe6de2bd8..a920ab30df6 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/config/typesafe/TypesafeDriverExecutionProfile.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/config/typesafe/TypesafeDriverExecutionProfile.java @@ -30,7 +30,6 @@ import java.time.Duration; import java.util.AbstractMap; import java.util.Collections; -import java.util.Comparator; import java.util.List; import java.util.Map; import java.util.Set; @@ -278,7 +277,7 @@ public Object getComparisonKey(@NonNull DriverOption option) { @Override public SortedSet> entrySet() { ImmutableSortedSet.Builder> builder = - ImmutableSortedSet.orderedBy(Comparator.comparing(Map.Entry::getKey)); + ImmutableSortedSet.orderedBy(Map.Entry.comparingByKey()); for (Map.Entry entry : getEffectiveOptions().entrySet()) { builder.add(new AbstractMap.SimpleEntry<>(entry.getKey(), entry.getValue().unwrapped())); } From fd9cb23d369c0e749ddf4feaf6947ee0faf54769 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 12 Feb 2021 13:36:21 +0100 Subject: [PATCH 648/979] Remove unused Maven property --- pom.xml | 1 - 1 file changed, 1 deletion(-) diff --git a/pom.xml b/pom.xml index 81587e11aa4..0152ac1a420 100644 --- a/pom.xml +++ b/pom.xml @@ -42,7 +42,6 @@ bom - true UTF-8 UTF-8 1.4.1 From 257c7fafccb701496b844a3d96766379ac4efb23 Mon Sep 17 00:00:00 2001 From: Annamalai Muthalagappan <41874936+annamalai87@users.noreply.github.com> Date: Fri, 12 Feb 2021 11:51:56 -0600 Subject: [PATCH 649/979] JAVA-2918: Exclude invalid peers from schema agreement checks (#1528) Co-authored-by: Alexandre Dutra --- changelog/README.md | 4 + .../core/metadata/DefaultTopologyMonitor.java | 16 +-- .../core/metadata/PeerRowValidator.java | 41 ++++++ .../core/metadata/SchemaAgreementChecker.java | 52 ++++---- .../core/metadata/PeerRowValidatorTest.java | 122 ++++++++++++++++++ .../metadata/SchemaAgreementCheckerTest.java | 109 +++++++++++----- .../core/metadata/TestNodeFactory.java | 7 + 7 files changed, 280 insertions(+), 71 deletions(-) create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/metadata/PeerRowValidator.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/metadata/PeerRowValidatorTest.java diff --git a/changelog/README.md b/changelog/README.md index 5dd62b63f38..3c1554bdb12 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -2,6 +2,10 @@ +### 4.11.0 (in progress) + +- [bug] JAVA-2918: Exclude invalid peers from schema agreement checks + ### 4.10.0 - [improvement] JAVA-2907: Switch Tinkerpop to an optional dependency diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultTopologyMonitor.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultTopologyMonitor.java index 87585199b77..da5fc2115eb 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultTopologyMonitor.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultTopologyMonitor.java @@ -539,24 +539,16 @@ protected InetSocketAddress getBroadcastRpcAddress( * node's broadcast RPC address and host ID; otherwise the driver may not work properly. */ protected boolean isPeerValid(AdminRow peerRow) { - boolean hasPeersRpcAddress = !peerRow.isNull("rpc_address"); - boolean hasPeersV2RpcAddress = - !peerRow.isNull("native_address") && !peerRow.isNull("native_port"); - boolean hasRpcAddress = hasPeersV2RpcAddress || hasPeersRpcAddress; - boolean valid = - hasRpcAddress - && !peerRow.isNull("host_id") - && !peerRow.isNull("data_center") - && !peerRow.isNull("rack") - && !peerRow.isNull("tokens"); - if (!valid) { + if (PeerRowValidator.isValid(peerRow)) { + return true; + } else { LOG.warn( "[{}] Found invalid row in {} for peer: {}. " + "This is likely a gossip or snitch issue, this node will be ignored.", logPrefix, getPeerTableName(), peerRow.getInetAddress("peer")); + return false; } - return valid; } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/PeerRowValidator.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/PeerRowValidator.java new file mode 100644 index 00000000000..735eb4ee1cd --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/PeerRowValidator.java @@ -0,0 +1,41 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.datastax.oss.driver.internal.core.metadata; + +import com.datastax.oss.driver.internal.core.adminrequest.AdminRow; +import edu.umd.cs.findbugs.annotations.NonNull; +import net.jcip.annotations.ThreadSafe; + +@ThreadSafe +public class PeerRowValidator { + + /** Returns {@code true} if the given peer row is valid, and {@code false} otherwise. */ + public static boolean isValid(@NonNull AdminRow peerRow) { + + boolean hasPeersRpcAddress = !peerRow.isNull("rpc_address"); + boolean hasPeersV2RpcAddress = + !peerRow.isNull("native_address") && !peerRow.isNull("native_port"); + boolean hasRpcAddress = hasPeersRpcAddress || hasPeersV2RpcAddress; + + return hasRpcAddress + && !peerRow.isNull("host_id") + && !peerRow.isNull("data_center") + && !peerRow.isNull("rack") + && !peerRow.isNull("tokens") + && !peerRow.isNull("schema_version"); + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/SchemaAgreementChecker.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/SchemaAgreementChecker.java index 61f75c573ab..006ce380449 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/SchemaAgreementChecker.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/SchemaAgreementChecker.java @@ -32,6 +32,7 @@ import java.time.Duration; import java.util.Iterator; import java.util.Map; +import java.util.Objects; import java.util.Set; import java.util.UUID; import java.util.concurrent.CompletableFuture; @@ -105,8 +106,7 @@ private void sendQueries() { } else { CompletionStage localQuery = query("SELECT schema_version FROM system.local WHERE key='local'"); - CompletionStage peersQuery = - query("SELECT host_id, schema_version FROM system.peers"); + CompletionStage peersQuery = query("SELECT * FROM system.peers"); localQuery .thenCombine(peersQuery, this::extractSchemaVersions) @@ -142,31 +142,10 @@ private Set extractSchemaVersions(AdminResult controlNodeResult, AdminResu Map nodes = context.getMetadataManager().getMetadata().getNodes(); for (AdminRow peerRow : peersResult) { - UUID hostId = peerRow.getUuid("host_id"); - if (hostId == null) { - LOG.warn( - "[{}] Missing host_id in system.peers row, excluding from schema agreement check", - logPrefix); - continue; - } - UUID schemaVersion = peerRow.getUuid("schema_version"); - if (schemaVersion == null) { - LOG.warn( - "[{}] Missing schema_version in system.peers row for {}, " - + "excluding from schema agreement check", - logPrefix, - hostId); - continue; - } - Node node = nodes.get(hostId); - if (node == null) { - LOG.warn("[{}] Unknown peer {}, excluding from schema agreement check", logPrefix, hostId); - continue; - } else if (node.getState() != NodeState.UP) { - LOG.debug("[{}] Peer {} is down, excluding from schema agreement check", logPrefix, hostId); - continue; + if (isPeerValid(peerRow, nodes)) { + UUID schemaVersion = Objects.requireNonNull(peerRow.getUuid("schema_version")); + schemaVersions.add(schemaVersion); } - schemaVersions.add(schemaVersion); } return schemaVersions.build(); } @@ -207,4 +186,25 @@ protected CompletionStage query(String queryString) { channel, queryString, queryTimeout, INFINITE_PAGE_SIZE, logPrefix) .start(); } + + protected boolean isPeerValid(AdminRow peerRow, Map nodes) { + if (PeerRowValidator.isValid(peerRow)) { + UUID hostId = peerRow.getUuid("host_id"); + Node node = nodes.get(hostId); + if (node == null) { + LOG.warn("[{}] Unknown peer {}, excluding from schema agreement check", logPrefix, hostId); + return false; + } else if (node.getState() != NodeState.UP) { + LOG.debug("[{}] Peer {} is down, excluding from schema agreement check", logPrefix, hostId); + return false; + } + return true; + } else { + LOG.warn( + "[{}] Found invalid system.peers row for peer: {}, excluding from schema agreement check.", + logPrefix, + peerRow.getInetAddress("peer")); + return false; + } + } } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/PeerRowValidatorTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/PeerRowValidatorTest.java new file mode 100644 index 00000000000..f02b7169d30 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/PeerRowValidatorTest.java @@ -0,0 +1,122 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.datastax.oss.driver.internal.core.metadata; + +import static com.datastax.oss.driver.Assertions.assertThat; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import com.datastax.oss.driver.internal.core.adminrequest.AdminRow; +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import org.junit.Test; +import org.junit.runner.RunWith; + +@RunWith(DataProviderRunner.class) +public class PeerRowValidatorTest { + + @DataProvider + public static Object[][] nullColumnsV1() { + return new Object[][] { + {"rpc_address"}, {"host_id"}, {"data_center"}, {"rack"}, {"tokens"}, {"schema_version"} + }; + } + + @DataProvider + public static Object[][] nullColumnsV2() { + return new Object[][] { + {"native_address"}, + {"native_port"}, + {"host_id"}, + {"data_center"}, + {"rack"}, + {"tokens"}, + {"schema_version"} + }; + } + + @Test + @UseDataProvider("nullColumnsV1") + public void should_fail_for_invalid_peer_v1(String nullColumn) { + assertThat(PeerRowValidator.isValid(mockRowV1(nullColumn))).isFalse(); + } + + @Test + @UseDataProvider("nullColumnsV2") + public void should_fail_for_invalid_peer_v2(String nullColumn) { + assertThat(PeerRowValidator.isValid(mockRowV2(nullColumn))).isFalse(); + } + + @Test + public void should_succeed_for_valid_peer_v1() { + AdminRow peerRow = mock(AdminRow.class); + when(peerRow.isNull("host_id")).thenReturn(false); + when(peerRow.isNull("rpc_address")).thenReturn(false); + when(peerRow.isNull("native_address")).thenReturn(true); + when(peerRow.isNull("native_port")).thenReturn(true); + when(peerRow.isNull("data_center")).thenReturn(false); + when(peerRow.isNull("rack")).thenReturn(false); + when(peerRow.isNull("tokens")).thenReturn(false); + when(peerRow.isNull("schema_version")).thenReturn(false); + + assertThat(PeerRowValidator.isValid(peerRow)).isTrue(); + } + + @Test + public void should_succeed_for_valid_peer_v2() { + AdminRow peerRow = mock(AdminRow.class); + when(peerRow.isNull("host_id")).thenReturn(false); + when(peerRow.isNull("rpc_address")).thenReturn(true); + when(peerRow.isNull("native_address")).thenReturn(false); + when(peerRow.isNull("native_port")).thenReturn(false); + when(peerRow.isNull("data_center")).thenReturn(false); + when(peerRow.isNull("rack")).thenReturn(false); + when(peerRow.isNull("tokens")).thenReturn(false); + when(peerRow.isNull("schema_version")).thenReturn(false); + + assertThat(PeerRowValidator.isValid(peerRow)).isTrue(); + } + + private AdminRow mockRowV1(String nullColumn) { + AdminRow peerRow = mock(AdminRow.class); + when(peerRow.isNull("host_id")).thenReturn(nullColumn.equals("host_id")); + when(peerRow.isNull("rpc_address")).thenReturn(nullColumn.equals("rpc_address")); + when(peerRow.isNull("native_address")).thenReturn(true); + when(peerRow.isNull("native_port")).thenReturn(true); + when(peerRow.isNull("data_center")).thenReturn(nullColumn.equals("data_center")); + when(peerRow.isNull("rack")).thenReturn(nullColumn.equals("rack")); + when(peerRow.isNull("tokens")).thenReturn(nullColumn.equals("tokens")); + when(peerRow.isNull("schema_version")).thenReturn(nullColumn.equals("schema_version")); + + return peerRow; + } + + private AdminRow mockRowV2(String nullColumn) { + AdminRow peerRow = mock(AdminRow.class); + when(peerRow.isNull("host_id")).thenReturn(nullColumn.equals("host_id")); + when(peerRow.isNull("native_address")).thenReturn(nullColumn.equals("native_address")); + when(peerRow.isNull("native_port")).thenReturn(nullColumn.equals("native_port")); + when(peerRow.isNull("rpc_address")).thenReturn(true); + when(peerRow.isNull("data_center")).thenReturn(nullColumn.equals("data_center")); + when(peerRow.isNull("rack")).thenReturn(nullColumn.equals("rack")); + when(peerRow.isNull("tokens")).thenReturn(nullColumn.equals("tokens")); + when(peerRow.isNull("schema_version")).thenReturn(nullColumn.equals("schema_version")); + + return peerRow; + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/SchemaAgreementCheckerTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/SchemaAgreementCheckerTest.java index dc143327ecb..26d4aafea03 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/SchemaAgreementCheckerTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/SchemaAgreementCheckerTest.java @@ -35,11 +35,15 @@ import com.datastax.oss.driver.internal.core.metrics.MetricsFactory; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import com.datastax.oss.driver.shaded.guava.common.collect.Iterators; +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; import io.netty.channel.EventLoop; import java.time.Duration; import java.util.ArrayDeque; import java.util.Arrays; import java.util.Map; +import java.util.Objects; import java.util.Queue; import java.util.UUID; import java.util.concurrent.CompletableFuture; @@ -49,14 +53,16 @@ import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; -import org.mockito.junit.MockitoJUnitRunner; +import org.mockito.MockitoAnnotations; -@RunWith(MockitoJUnitRunner.class) +@RunWith(DataProviderRunner.class) public class SchemaAgreementCheckerTest { private static final UUID VERSION1 = UUID.randomUUID(); private static final UUID VERSION2 = UUID.randomUUID(); + private static final UUID NODE_2_HOST_ID = UUID.randomUUID(); + @Mock private InternalDriverContext context; @Mock private DriverConfig config; @Mock private DriverExecutionProfile defaultConfig; @@ -70,10 +76,11 @@ public class SchemaAgreementCheckerTest { @Before public void setup() { + MockitoAnnotations.initMocks(this); when(context.getMetricsFactory()).thenReturn(metricsFactory); node1 = TestNodeFactory.newNode(1, context); - node2 = TestNodeFactory.newNode(2, context); + node2 = TestNodeFactory.newNode(2, NODE_2_HOST_ID, context); when(defaultConfig.getDuration(DefaultDriverOption.CONTROL_CONNECTION_TIMEOUT)) .thenReturn(Duration.ofSeconds(1)); @@ -86,7 +93,12 @@ public void setup() { when(config.getDefaultProfile()).thenReturn(defaultConfig); when(context.getConfig()).thenReturn(config); - Map nodes = ImmutableMap.of(node1.getHostId(), node1, node2.getHostId(), node2); + Map nodes = + ImmutableMap.of( + Objects.requireNonNull(node1.getHostId()), + node1, + Objects.requireNonNull(node2.getHostId()), + node2); when(metadata.getNodes()).thenReturn(nodes); when(metadataManager.getMetadata()).thenReturn(metadata); when(context.getMetadataManager()).thenReturn(metadataManager); @@ -124,9 +136,8 @@ public void should_succeed_if_only_one_node() { checker.stubQueries( new StubbedQuery( "SELECT schema_version FROM system.local WHERE key='local'", - mockResult(mockRow(null, VERSION1))), - new StubbedQuery( - "SELECT host_id, schema_version FROM system.peers", mockResult(/*empty*/ ))); + mockResult(mockLocalRow(VERSION1))), + new StubbedQuery("SELECT * FROM system.peers", mockResult(/*empty*/ ))); // When CompletionStage future = checker.run(); @@ -142,10 +153,8 @@ public void should_succeed_if_versions_match_on_first_try() { checker.stubQueries( new StubbedQuery( "SELECT schema_version FROM system.local WHERE key='local'", - mockResult(mockRow(null, VERSION1))), - new StubbedQuery( - "SELECT host_id, schema_version FROM system.peers", - mockResult(mockRow(node2.getHostId(), VERSION1)))); + mockResult(mockLocalRow(VERSION1))), + new StubbedQuery("SELECT * FROM system.peers", mockResult(mockValidPeerRow(VERSION1)))); // When CompletionStage future = checker.run(); @@ -162,10 +171,8 @@ public void should_ignore_down_peers() { checker.stubQueries( new StubbedQuery( "SELECT schema_version FROM system.local WHERE key='local'", - mockResult(mockRow(null, VERSION1))), - new StubbedQuery( - "SELECT host_id, schema_version FROM system.peers", - mockResult(mockRow(node2.getHostId(), VERSION2)))); + mockResult(mockLocalRow(VERSION1))), + new StubbedQuery("SELECT * FROM system.peers", mockResult(mockValidPeerRow(VERSION2)))); // When CompletionStage future = checker.run(); @@ -174,17 +181,34 @@ public void should_ignore_down_peers() { assertThatStage(future).isSuccess(b -> assertThat(b).isTrue()); } + @DataProvider + public static Object[][] malformedPeer() { + return new Object[][] { + // missing host id + {mockPeerRow(null, VERSION2, true, true, true, true)}, + // missing schema version + {mockPeerRow(NODE_2_HOST_ID, null, true, true, true, true)}, + // missing datacenter + {mockPeerRow(NODE_2_HOST_ID, VERSION2, false, true, true, true)}, + // missing rack + {mockPeerRow(NODE_2_HOST_ID, VERSION2, true, false, true, true)}, + // missing RPC address + {mockPeerRow(NODE_2_HOST_ID, VERSION2, true, true, false, true)}, + // missing tokens + {mockPeerRow(NODE_2_HOST_ID, VERSION2, true, true, true, false)}, + }; + } + @Test - public void should_ignore_malformed_rows() { + @UseDataProvider("malformedPeer") + public void should_ignore_malformed_rows(AdminRow malformedPeer) { // Given TestSchemaAgreementChecker checker = new TestSchemaAgreementChecker(channel, context); checker.stubQueries( new StubbedQuery( "SELECT schema_version FROM system.local WHERE key='local'", - mockResult(mockRow(null, VERSION1))), - new StubbedQuery( - "SELECT host_id, schema_version FROM system.peers", - mockResult(mockRow(null, VERSION2)))); // missing host_id + mockResult(mockLocalRow(VERSION1))), + new StubbedQuery("SELECT * FROM system.peers", mockResult(malformedPeer))); // When CompletionStage future = checker.run(); @@ -201,18 +225,14 @@ public void should_reschedule_if_versions_do_not_match_on_first_try() { // First round new StubbedQuery( "SELECT schema_version FROM system.local WHERE key='local'", - mockResult(mockRow(null, VERSION1))), - new StubbedQuery( - "SELECT host_id, schema_version FROM system.peers", - mockResult(mockRow(node2.getHostId(), VERSION2))), + mockResult(mockLocalRow(VERSION1))), + new StubbedQuery("SELECT * FROM system.peers", mockResult(mockValidPeerRow(VERSION2))), // Second round new StubbedQuery( "SELECT schema_version FROM system.local WHERE key='local'", - mockResult(mockRow(null, VERSION1))), - new StubbedQuery( - "SELECT host_id, schema_version FROM system.peers", - mockResult(mockRow(node2.getHostId(), VERSION1)))); + mockResult(mockLocalRow(VERSION1))), + new StubbedQuery("SELECT * FROM system.peers", mockResult(mockValidPeerRow(VERSION1)))); // When CompletionStage future = checker.run(); @@ -230,10 +250,8 @@ public void should_fail_if_versions_do_not_match_after_timeout() { checker.stubQueries( new StubbedQuery( "SELECT schema_version FROM system.local WHERE key='local'", - mockResult(mockRow(null, VERSION1))), - new StubbedQuery( - "SELECT host_id, schema_version FROM system.peers", - mockResult(mockRow(node2.getHostId(), VERSION1)))); + mockResult(mockLocalRow(VERSION1))), + new StubbedQuery("SELECT * FROM system.peers", mockResult(mockValidPeerRow(VERSION1)))); // When CompletionStage future = checker.run(); @@ -274,10 +292,35 @@ private StubbedQuery(String queryString, AdminResult result) { } } - private AdminRow mockRow(UUID hostId, UUID schemaVersion) { + private AdminRow mockLocalRow(@SuppressWarnings("SameParameterValue") UUID schemaVersion) { + AdminRow row = mock(AdminRow.class); + when(row.getUuid("host_id")).thenReturn(node1.getHostId()); + when(row.getUuid("schema_version")).thenReturn(schemaVersion); + return row; + } + + private AdminRow mockValidPeerRow(UUID schemaVersion) { + return mockPeerRow(node2.getHostId(), schemaVersion, true, true, true, true); + } + + private static AdminRow mockPeerRow( + UUID hostId, + UUID schemaVersion, + boolean hasDatacenter, + boolean hasRack, + boolean hasRpcAddress, + boolean hasTokens) { AdminRow row = mock(AdminRow.class); when(row.getUuid("host_id")).thenReturn(hostId); + when(row.isNull("host_id")).thenReturn(hostId == null); when(row.getUuid("schema_version")).thenReturn(schemaVersion); + when(row.isNull("schema_version")).thenReturn(schemaVersion == null); + when(row.isNull("data_center")).thenReturn(!hasDatacenter); + when(row.isNull("rack")).thenReturn(!hasRack); + when(row.isNull("tokens")).thenReturn(!hasTokens); + when(row.isNull("rpc_address")).thenReturn(!hasRpcAddress); + when(row.isNull("native_address")).thenReturn(true); + when(row.isNull("native_port")).thenReturn(true); return row; } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/TestNodeFactory.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/TestNodeFactory.java index 54ab7755c51..c98f5943c70 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/TestNodeFactory.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/TestNodeFactory.java @@ -28,6 +28,13 @@ public static DefaultNode newNode(int lastIpByte, InternalDriverContext context) return node; } + public static DefaultNode newNode(int lastIpByte, UUID hostId, InternalDriverContext context) { + DefaultNode node = newContactPoint(lastIpByte, context); + node.hostId = hostId; + node.broadcastRpcAddress = ((InetSocketAddress) node.getEndPoint().resolve()); + return node; + } + public static DefaultNode newContactPoint(int lastIpByte, InternalDriverContext context) { DefaultEndPoint endPoint = newEndPoint(lastIpByte); return new DefaultNode(endPoint, context); From abcbdde52a887c62048b209f89e1a2e6c61f062b Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Wed, 3 Mar 2021 22:10:42 +0100 Subject: [PATCH 650/979] Make field PoolManager.repreparePayloads final --- .../datastax/oss/driver/internal/core/session/PoolManager.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/session/PoolManager.java b/core/src/main/java/com/datastax/oss/driver/internal/core/session/PoolManager.java index 6127c00226d..68f3519cf52 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/session/PoolManager.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/session/PoolManager.java @@ -82,7 +82,7 @@ public class PoolManager implements AsyncAutoCloseable { // (e.g. DefaultPreparedStatement) which are handled at the protocol level (e.g. // CqlPrepareAsyncProcessor). We keep the two separate to avoid introducing a dependency from the // session to a particular processor implementation. - private ConcurrentMap repreparePayloads = + private final ConcurrentMap repreparePayloads = new MapMaker().weakValues().makeMap(); private final String logPrefix; From c72d5b9cb90867d3860629c2be95fa849645d7c6 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Sun, 14 Mar 2021 18:41:08 +0100 Subject: [PATCH 651/979] Rename test in OsgiShadedIT --- .../com/datastax/oss/driver/internal/osgi/OsgiShadedIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/OsgiShadedIT.java b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/OsgiShadedIT.java index 21d029faa27..a03b7fa796b 100644 --- a/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/OsgiShadedIT.java +++ b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/OsgiShadedIT.java @@ -47,7 +47,7 @@ public Option[] config() { } @Test - public void test_shaded_reactive() throws Exception { + public void test_shaded() throws Exception { DefaultServiceChecks.checkService(service); } } From f3ee4847464aef151fbc2cc5cf5455de48a5d72a Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Sun, 14 Mar 2021 20:34:04 +0100 Subject: [PATCH 652/979] Replace deprecated AssertJ method --- .../graph/ContinuousGraphRequestHandlerTest.java | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerTest.java index a5d0c5934d8..de92761e45e 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/ContinuousGraphRequestHandlerTest.java @@ -20,6 +20,7 @@ import static com.datastax.dse.driver.internal.core.graph.GraphTestUtils.tenGraphRows; import static com.datastax.oss.driver.Assertions.assertThat; import static com.datastax.oss.driver.Assertions.assertThatStage; +import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.mockito.ArgumentMatchers.anyLong; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.times; @@ -182,9 +183,10 @@ public void should_honor_default_timeout() throws Exception { // will trigger the global timeout and complete it exceptionally globalTimeout.task().run(globalTimeout); - assertThat(page1Future.toCompletableFuture()) - .hasFailedWithThrowableThat() - .isInstanceOf(DriverTimeoutException.class) + assertThat(page1Future.toCompletableFuture()).isCompletedExceptionally(); + + assertThatThrownBy(() -> page1Future.toCompletableFuture().get()) + .hasRootCauseExactlyInstanceOf(DriverTimeoutException.class) .hasMessageContaining("Query timed out after " + defaultTimeout); } } @@ -233,9 +235,10 @@ public void should_honor_statement_timeout() throws Exception { // will trigger the global timeout and complete it exceptionally globalTimeout.task().run(globalTimeout); - assertThat(page1Future.toCompletableFuture()) - .hasFailedWithThrowableThat() - .isInstanceOf(DriverTimeoutException.class) + assertThat(page1Future.toCompletableFuture()).isCompletedExceptionally(); + + assertThatThrownBy(() -> page1Future.toCompletableFuture().get()) + .hasRootCauseExactlyInstanceOf(DriverTimeoutException.class) .hasMessageContaining("Query timed out after " + statementTimeout); } } From 890a8c8c3daff480922017d3d998f2c54eceba95 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Sun, 14 Mar 2021 21:06:56 +0100 Subject: [PATCH 653/979] Use DependencyCheck in CompressorSubstitutions --- .../core/protocol/CompressorSubstitutions.java | 12 +++--------- 1 file changed, 3 insertions(+), 9 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/CompressorSubstitutions.java b/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/CompressorSubstitutions.java index fe43bea0863..c760344940c 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/CompressorSubstitutions.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/CompressorSubstitutions.java @@ -17,6 +17,7 @@ import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.context.DriverContext; +import com.datastax.oss.driver.internal.core.util.DependencyCheck; import com.datastax.oss.protocol.internal.Compressor; import com.oracle.svm.core.annotate.Delete; import com.oracle.svm.core.annotate.Substitute; @@ -33,6 +34,7 @@ * BuiltInCompressors#newInstance(String, DriverContext)} to throw an error if the user attempts to * configure it. */ +@SuppressWarnings("unused") public class CompressorSubstitutions { @TargetClass(value = BuiltInCompressors.class, onlyWith = Lz4Present.class) @@ -87,17 +89,9 @@ public static final class DeleteLz4Compressor {} public static final class DeleteSnappyCompressor {} public static class Lz4Present implements BooleanSupplier { - - private static final String LZ4_CLZ_NAME = "net.jpountz.lz4.LZ4Compressor"; - @Override public boolean getAsBoolean() { - try { - Class.forName(LZ4_CLZ_NAME); - return true; - } catch (ClassNotFoundException e) { - return false; - } + return DependencyCheck.LZ4.isPresent(); } } From d75ddadf5bf326c9530ff90c949f4c5ff3a6055e Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Sun, 14 Mar 2021 19:04:53 +0100 Subject: [PATCH 654/979] JAVA-2917: Include GraalVM substitutions for request processors and geo codecs --- changelog/README.md | 1 + .../type/codec/DseTypeCodecsRegistrar.java | 36 ++++++ .../DseTypeCodecsRegistrarSubstitutions.java | 43 +++++++ .../core/context/DefaultDriverContext.java | 88 ++------------ .../session/BuiltInRequestProcessors.java | 111 ++++++++++++++++++ ...BuiltInRequestProcessorsSubstitutions.java | 88 ++++++++++++++ 6 files changed, 286 insertions(+), 81 deletions(-) create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/type/codec/DseTypeCodecsRegistrar.java create mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/type/codec/DseTypeCodecsRegistrarSubstitutions.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/session/BuiltInRequestProcessors.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/session/BuiltInRequestProcessorsSubstitutions.java diff --git a/changelog/README.md b/changelog/README.md index 3c1554bdb12..9255ba3df72 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.11.0 (in progress) +- [improvement] JAVA-2917: Include GraalVM substitutions for request processors and geo codecs - [bug] JAVA-2918: Exclude invalid peers from schema agreement checks ### 4.10.0 diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/type/codec/DseTypeCodecsRegistrar.java b/core/src/main/java/com/datastax/dse/driver/internal/core/type/codec/DseTypeCodecsRegistrar.java new file mode 100644 index 00000000000..5075caa68b2 --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/type/codec/DseTypeCodecsRegistrar.java @@ -0,0 +1,36 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.dse.driver.internal.core.type.codec; + +import com.datastax.dse.driver.api.core.type.codec.DseTypeCodecs; +import com.datastax.oss.driver.api.core.type.codec.registry.MutableCodecRegistry; +import com.datastax.oss.driver.internal.core.util.DependencyCheck; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class DseTypeCodecsRegistrar { + + private static final Logger LOG = LoggerFactory.getLogger(DseTypeCodecsRegistrar.class); + + public static void registerDseCodecs(MutableCodecRegistry registry) { + registry.register(DseTypeCodecs.DATE_RANGE); + if (DependencyCheck.ESRI.isPresent()) { + registry.register(DseTypeCodecs.LINE_STRING, DseTypeCodecs.POINT, DseTypeCodecs.POLYGON); + } else { + LOG.debug("ESRI was not found on the classpath: geo codecs will not be available"); + } + } +} diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/type/codec/DseTypeCodecsRegistrarSubstitutions.java b/core/src/main/java/com/datastax/dse/driver/internal/core/type/codec/DseTypeCodecsRegistrarSubstitutions.java new file mode 100644 index 00000000000..51c4958824d --- /dev/null +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/type/codec/DseTypeCodecsRegistrarSubstitutions.java @@ -0,0 +1,43 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.dse.driver.internal.core.type.codec; + +import com.datastax.dse.driver.api.core.type.codec.DseTypeCodecs; +import com.datastax.oss.driver.api.core.type.codec.registry.MutableCodecRegistry; +import com.datastax.oss.driver.internal.core.util.DependencyCheck; +import com.oracle.svm.core.annotate.Substitute; +import com.oracle.svm.core.annotate.TargetClass; +import java.util.function.BooleanSupplier; + +@SuppressWarnings("unused") +public class DseTypeCodecsRegistrarSubstitutions { + + @TargetClass(value = DseTypeCodecsRegistrar.class, onlyWith = EsriMissing.class) + public static final class DseTypeCodecsRegistrarEsriMissing { + + @Substitute + public static void registerDseCodecs(MutableCodecRegistry registry) { + registry.register(DseTypeCodecs.DATE_RANGE); + } + } + + public static class EsriMissing implements BooleanSupplier { + @Override + public boolean getAsBoolean() { + return !DependencyCheck.ESRI.isPresent(); + } + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java index 5857b0b9be7..f37c2eae3dc 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java @@ -16,17 +16,9 @@ package com.datastax.oss.driver.internal.core.context; import com.datastax.dse.driver.api.core.config.DseDriverOption; -import com.datastax.dse.driver.api.core.type.codec.DseTypeCodecs; import com.datastax.dse.driver.internal.core.InsightsClientLifecycleListener; -import com.datastax.dse.driver.internal.core.cql.continuous.ContinuousCqlRequestAsyncProcessor; -import com.datastax.dse.driver.internal.core.cql.continuous.ContinuousCqlRequestSyncProcessor; -import com.datastax.dse.driver.internal.core.cql.continuous.reactive.ContinuousCqlRequestReactiveProcessor; -import com.datastax.dse.driver.internal.core.cql.reactive.CqlRequestReactiveProcessor; -import com.datastax.dse.driver.internal.core.graph.GraphRequestAsyncProcessor; -import com.datastax.dse.driver.internal.core.graph.GraphRequestSyncProcessor; -import com.datastax.dse.driver.internal.core.graph.GraphSupportChecker; -import com.datastax.dse.driver.internal.core.graph.reactive.ReactiveGraphRequestProcessor; import com.datastax.dse.driver.internal.core.tracker.MultiplexingRequestTracker; +import com.datastax.dse.driver.internal.core.type.codec.DseTypeCodecsRegistrar; import com.datastax.dse.protocol.internal.DseProtocolV1ClientCodecs; import com.datastax.dse.protocol.internal.DseProtocolV2ClientCodecs; import com.datastax.dse.protocol.internal.ProtocolV4ClientCodecsForDse; @@ -60,10 +52,6 @@ import com.datastax.oss.driver.internal.core.channel.DefaultWriteCoalescer; import com.datastax.oss.driver.internal.core.channel.WriteCoalescer; import com.datastax.oss.driver.internal.core.control.ControlConnection; -import com.datastax.oss.driver.internal.core.cql.CqlPrepareAsyncProcessor; -import com.datastax.oss.driver.internal.core.cql.CqlPrepareSyncProcessor; -import com.datastax.oss.driver.internal.core.cql.CqlRequestAsyncProcessor; -import com.datastax.oss.driver.internal.core.cql.CqlRequestSyncProcessor; import com.datastax.oss.driver.internal.core.metadata.CloudTopologyMonitor; import com.datastax.oss.driver.internal.core.metadata.DefaultTopologyMonitor; import com.datastax.oss.driver.internal.core.metadata.LoadBalancingPolicyWrapper; @@ -83,6 +71,7 @@ import com.datastax.oss.driver.internal.core.protocol.ByteBufPrimitiveCodec; import com.datastax.oss.driver.internal.core.servererrors.DefaultWriteTypeRegistry; import com.datastax.oss.driver.internal.core.servererrors.WriteTypeRegistry; +import com.datastax.oss.driver.internal.core.session.BuiltInRequestProcessors; import com.datastax.oss.driver.internal.core.session.PoolManager; import com.datastax.oss.driver.internal.core.session.RequestProcessor; import com.datastax.oss.driver.internal.core.session.RequestProcessorRegistry; @@ -105,7 +94,6 @@ import edu.umd.cs.findbugs.annotations.Nullable; import io.netty.buffer.ByteBuf; import java.net.InetSocketAddress; -import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; @@ -503,65 +491,10 @@ protected ControlConnection buildControlConnection() { } protected RequestProcessorRegistry buildRequestProcessorRegistry() { - String logPrefix = getSessionName(); - - List> processors = new ArrayList<>(); - - // regular requests (sync and async) - CqlRequestAsyncProcessor cqlRequestAsyncProcessor = new CqlRequestAsyncProcessor(); - CqlRequestSyncProcessor cqlRequestSyncProcessor = - new CqlRequestSyncProcessor(cqlRequestAsyncProcessor); - processors.add(cqlRequestAsyncProcessor); - processors.add(cqlRequestSyncProcessor); - - // prepare requests (sync and async) - CqlPrepareAsyncProcessor cqlPrepareAsyncProcessor = new CqlPrepareAsyncProcessor(); - CqlPrepareSyncProcessor cqlPrepareSyncProcessor = - new CqlPrepareSyncProcessor(cqlPrepareAsyncProcessor); - processors.add(cqlPrepareAsyncProcessor); - processors.add(cqlPrepareSyncProcessor); - - // continuous requests (sync and async) - ContinuousCqlRequestAsyncProcessor continuousCqlRequestAsyncProcessor = - new ContinuousCqlRequestAsyncProcessor(); - ContinuousCqlRequestSyncProcessor continuousCqlRequestSyncProcessor = - new ContinuousCqlRequestSyncProcessor(continuousCqlRequestAsyncProcessor); - processors.add(continuousCqlRequestAsyncProcessor); - processors.add(continuousCqlRequestSyncProcessor); - - // graph requests (sync and async) - GraphRequestAsyncProcessor graphRequestAsyncProcessor = null; - if (DependencyCheck.TINKERPOP.isPresent()) { - graphRequestAsyncProcessor = new GraphRequestAsyncProcessor(this, new GraphSupportChecker()); - GraphRequestSyncProcessor graphRequestSyncProcessor = - new GraphRequestSyncProcessor(graphRequestAsyncProcessor); - processors.add(graphRequestAsyncProcessor); - processors.add(graphRequestSyncProcessor); - } else { - LOG.info( - "Could not register Graph extensions; " - + "this is normal if Tinkerpop was explicitly excluded from classpath"); - } - - // reactive requests (regular, continuous and graph) - if (DependencyCheck.REACTIVE_STREAMS.isPresent()) { - CqlRequestReactiveProcessor cqlRequestReactiveProcessor = - new CqlRequestReactiveProcessor(cqlRequestAsyncProcessor); - ContinuousCqlRequestReactiveProcessor continuousCqlRequestReactiveProcessor = - new ContinuousCqlRequestReactiveProcessor(continuousCqlRequestAsyncProcessor); - processors.add(cqlRequestReactiveProcessor); - processors.add(continuousCqlRequestReactiveProcessor); - if (graphRequestAsyncProcessor != null) { - ReactiveGraphRequestProcessor reactiveGraphRequestProcessor = - new ReactiveGraphRequestProcessor(graphRequestAsyncProcessor); - processors.add(reactiveGraphRequestProcessor); - } - } else { - LOG.info( - "Could not register Reactive extensions; " - + "this is normal if Reactive Streams was explicitly excluded from classpath"); - } - return new RequestProcessorRegistry(logPrefix, processors.toArray(new RequestProcessor[0])); + List> processors = + BuiltInRequestProcessors.createDefaultProcessors(this); + return new RequestProcessorRegistry( + getSessionName(), processors.toArray(new RequestProcessor[0])); } protected CodecRegistry buildCodecRegistry(ProgrammaticArguments arguments) { @@ -570,14 +503,7 @@ protected CodecRegistry buildCodecRegistry(ProgrammaticArguments arguments) { registry = new DefaultCodecRegistry(this.sessionName); } registry.register(arguments.getTypeCodecs()); - registry.register(DseTypeCodecs.DATE_RANGE); - if (DependencyCheck.ESRI.isPresent()) { - registry.register(DseTypeCodecs.LINE_STRING, DseTypeCodecs.POINT, DseTypeCodecs.POLYGON); - } else { - LOG.info( - "Could not register Geo codecs; " - + "this is normal if ESRI was explicitly excluded from classpath"); - } + DseTypeCodecsRegistrar.registerDseCodecs(registry); return registry; } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/session/BuiltInRequestProcessors.java b/core/src/main/java/com/datastax/oss/driver/internal/core/session/BuiltInRequestProcessors.java new file mode 100644 index 00000000000..a4690847838 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/session/BuiltInRequestProcessors.java @@ -0,0 +1,111 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.session; + +import com.datastax.dse.driver.internal.core.cql.continuous.ContinuousCqlRequestAsyncProcessor; +import com.datastax.dse.driver.internal.core.cql.continuous.ContinuousCqlRequestSyncProcessor; +import com.datastax.dse.driver.internal.core.cql.continuous.reactive.ContinuousCqlRequestReactiveProcessor; +import com.datastax.dse.driver.internal.core.cql.reactive.CqlRequestReactiveProcessor; +import com.datastax.dse.driver.internal.core.graph.GraphRequestAsyncProcessor; +import com.datastax.dse.driver.internal.core.graph.GraphRequestSyncProcessor; +import com.datastax.dse.driver.internal.core.graph.GraphSupportChecker; +import com.datastax.dse.driver.internal.core.graph.reactive.ReactiveGraphRequestProcessor; +import com.datastax.oss.driver.internal.core.context.DefaultDriverContext; +import com.datastax.oss.driver.internal.core.cql.CqlPrepareAsyncProcessor; +import com.datastax.oss.driver.internal.core.cql.CqlPrepareSyncProcessor; +import com.datastax.oss.driver.internal.core.cql.CqlRequestAsyncProcessor; +import com.datastax.oss.driver.internal.core.cql.CqlRequestSyncProcessor; +import com.datastax.oss.driver.internal.core.util.DependencyCheck; +import java.util.ArrayList; +import java.util.List; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class BuiltInRequestProcessors { + + private static final Logger LOG = LoggerFactory.getLogger(BuiltInRequestProcessors.class); + + public static List> createDefaultProcessors(DefaultDriverContext context) { + List> processors = new ArrayList<>(); + addBasicProcessors(processors); + if (DependencyCheck.TINKERPOP.isPresent()) { + addGraphProcessors(context, processors); + } else { + LOG.debug("Tinkerpop was not found on the classpath: graph extensions will not be available"); + } + if (DependencyCheck.REACTIVE_STREAMS.isPresent()) { + addReactiveProcessors(processors); + } else { + LOG.debug( + "Reactive Streams was not found on the classpath: reactive extensions will not be available"); + } + if (DependencyCheck.REACTIVE_STREAMS.isPresent() && DependencyCheck.TINKERPOP.isPresent()) { + addGraphReactiveProcessors(context, processors); + } + return processors; + } + + public static void addBasicProcessors(List> processors) { + // regular requests (sync and async) + CqlRequestAsyncProcessor cqlRequestAsyncProcessor = new CqlRequestAsyncProcessor(); + CqlRequestSyncProcessor cqlRequestSyncProcessor = + new CqlRequestSyncProcessor(cqlRequestAsyncProcessor); + processors.add(cqlRequestAsyncProcessor); + processors.add(cqlRequestSyncProcessor); + + // prepare requests (sync and async) + CqlPrepareAsyncProcessor cqlPrepareAsyncProcessor = new CqlPrepareAsyncProcessor(); + CqlPrepareSyncProcessor cqlPrepareSyncProcessor = + new CqlPrepareSyncProcessor(cqlPrepareAsyncProcessor); + processors.add(cqlPrepareAsyncProcessor); + processors.add(cqlPrepareSyncProcessor); + + // continuous requests (sync and async) + ContinuousCqlRequestAsyncProcessor continuousCqlRequestAsyncProcessor = + new ContinuousCqlRequestAsyncProcessor(); + ContinuousCqlRequestSyncProcessor continuousCqlRequestSyncProcessor = + new ContinuousCqlRequestSyncProcessor(continuousCqlRequestAsyncProcessor); + processors.add(continuousCqlRequestAsyncProcessor); + processors.add(continuousCqlRequestSyncProcessor); + } + + public static void addGraphProcessors( + DefaultDriverContext context, List> processors) { + GraphRequestAsyncProcessor graphRequestAsyncProcessor = + new GraphRequestAsyncProcessor(context, new GraphSupportChecker()); + GraphRequestSyncProcessor graphRequestSyncProcessor = + new GraphRequestSyncProcessor(graphRequestAsyncProcessor); + processors.add(graphRequestAsyncProcessor); + processors.add(graphRequestSyncProcessor); + } + + public static void addReactiveProcessors(List> processors) { + CqlRequestReactiveProcessor cqlRequestReactiveProcessor = + new CqlRequestReactiveProcessor(new CqlRequestAsyncProcessor()); + ContinuousCqlRequestReactiveProcessor continuousCqlRequestReactiveProcessor = + new ContinuousCqlRequestReactiveProcessor(new ContinuousCqlRequestAsyncProcessor()); + processors.add(cqlRequestReactiveProcessor); + processors.add(continuousCqlRequestReactiveProcessor); + } + + public static void addGraphReactiveProcessors( + DefaultDriverContext context, List> processors) { + ReactiveGraphRequestProcessor reactiveGraphRequestProcessor = + new ReactiveGraphRequestProcessor( + new GraphRequestAsyncProcessor(context, new GraphSupportChecker())); + processors.add(reactiveGraphRequestProcessor); + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/session/BuiltInRequestProcessorsSubstitutions.java b/core/src/main/java/com/datastax/oss/driver/internal/core/session/BuiltInRequestProcessorsSubstitutions.java new file mode 100644 index 00000000000..e0afbb06892 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/session/BuiltInRequestProcessorsSubstitutions.java @@ -0,0 +1,88 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.session; + +import com.datastax.oss.driver.internal.core.context.DefaultDriverContext; +import com.datastax.oss.driver.internal.core.util.DependencyCheck; +import com.oracle.svm.core.annotate.Substitute; +import com.oracle.svm.core.annotate.TargetClass; +import java.util.ArrayList; +import java.util.List; +import java.util.function.BooleanSupplier; + +@SuppressWarnings("unused") +public class BuiltInRequestProcessorsSubstitutions { + + @TargetClass(value = BuiltInRequestProcessors.class, onlyWith = GraphMissingReactiveMissing.class) + public static final class BuiltInRequestProcessorsGraphMissingReactiveMissing { + + @Substitute + public static List> createDefaultProcessors( + DefaultDriverContext context) { + List> processors = new ArrayList<>(); + BuiltInRequestProcessors.addBasicProcessors(processors); + return processors; + } + } + + @TargetClass(value = BuiltInRequestProcessors.class, onlyWith = GraphMissingReactivePresent.class) + public static final class BuiltInRequestProcessorsGraphMissingReactivePresent { + + @Substitute + public static List> createDefaultProcessors( + DefaultDriverContext context) { + List> processors = new ArrayList<>(); + BuiltInRequestProcessors.addBasicProcessors(processors); + BuiltInRequestProcessors.addReactiveProcessors(processors); + return processors; + } + } + + @TargetClass(value = BuiltInRequestProcessors.class, onlyWith = GraphPresentReactiveMissing.class) + public static final class BuiltInRequestProcessorsGraphPresentReactiveMissing { + + @Substitute + public static List> createDefaultProcessors( + DefaultDriverContext context) { + List> processors = new ArrayList<>(); + BuiltInRequestProcessors.addBasicProcessors(processors); + BuiltInRequestProcessors.addGraphProcessors(context, processors); + return processors; + } + } + + public static class GraphMissingReactiveMissing implements BooleanSupplier { + @Override + public boolean getAsBoolean() { + return !DependencyCheck.TINKERPOP.isPresent() + && !DependencyCheck.REACTIVE_STREAMS.isPresent(); + } + } + + public static class GraphMissingReactivePresent implements BooleanSupplier { + @Override + public boolean getAsBoolean() { + return !DependencyCheck.TINKERPOP.isPresent() && DependencyCheck.REACTIVE_STREAMS.isPresent(); + } + } + + public static class GraphPresentReactiveMissing implements BooleanSupplier { + @Override + public boolean getAsBoolean() { + return DependencyCheck.TINKERPOP.isPresent() && !DependencyCheck.REACTIVE_STREAMS.isPresent(); + } + } +} From 0df1d2ea5ce398b73405ff4482070ed1e9ddf180 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Sun, 14 Mar 2021 16:45:55 +0100 Subject: [PATCH 655/979] JAVA-2927: Make Dropwizard truly optional --- changelog/README.md | 1 + .../driver/api/core/config/OptionsMap.java | 2 +- .../core/metrics/DefaultMetricsFactory.java | 58 +++++ .../DefaultMetricsFactorySubstitutions.java | 55 +++++ .../internal/core/metrics/MetricPaths.java | 2 +- .../core/metrics/NoopMetricsFactory.java | 64 +++++ .../internal/core/util/DependencyCheck.java | 1 + core/src/main/resources/reference.conf | 53 ++-- .../context/MockedDriverContextFactory.java | 2 +- .../core/metrics/NoopMetricsFactoryTest.java | 62 +++++ manual/core/integration/README.md | 33 ++- manual/core/metrics/README.md | 233 ++++++++++++------ metrics/micrometer/pom.xml | 4 + metrics/microprofile/pom.xml | 4 + pom.xml | 2 +- 15 files changed, 469 insertions(+), 107 deletions(-) create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DefaultMetricsFactory.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DefaultMetricsFactorySubstitutions.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/metrics/NoopMetricsFactory.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/metrics/NoopMetricsFactoryTest.java diff --git a/changelog/README.md b/changelog/README.md index 9255ba3df72..968219ce36d 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.11.0 (in progress) +- [bug] JAVA-2927: Make Dropwizard truly optional - [improvement] JAVA-2917: Include GraalVM substitutions for request processors and geo codecs - [bug] JAVA-2918: Exclude invalid peers from schema agreement checks diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java index c5eb7829deb..b0fd39b57c2 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java @@ -322,7 +322,7 @@ protected static void fillWithDriverDefaults(OptionsMap map) { map.put( TypedDriverOption.CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_INTERVAL, Duration.ofMinutes(5)); - map.put(TypedDriverOption.METRICS_FACTORY_CLASS, "DropwizardMetricsFactory"); + map.put(TypedDriverOption.METRICS_FACTORY_CLASS, "DefaultMetricsFactory"); map.put(TypedDriverOption.METRICS_SESSION_GRAPH_REQUESTS_HIGHEST, Duration.ofSeconds(12)); map.put(TypedDriverOption.METRICS_SESSION_GRAPH_REQUESTS_DIGITS, 3); map.put(TypedDriverOption.METRICS_SESSION_GRAPH_REQUESTS_INTERVAL, Duration.ofMinutes(5)); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DefaultMetricsFactory.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DefaultMetricsFactory.java new file mode 100644 index 00000000000..e6d78d30dc1 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DefaultMetricsFactory.java @@ -0,0 +1,58 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.metrics; + +import com.datastax.oss.driver.api.core.context.DriverContext; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.metrics.Metrics; +import com.datastax.oss.driver.internal.core.util.DependencyCheck; +import java.util.Optional; +import net.jcip.annotations.ThreadSafe; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@ThreadSafe +public class DefaultMetricsFactory implements MetricsFactory { + + private static final Logger LOG = LoggerFactory.getLogger(DefaultMetricsFactory.class); + + private final MetricsFactory delegate; + + @SuppressWarnings("unused") + public DefaultMetricsFactory(DriverContext context) { + if (DependencyCheck.DROPWIZARD.isPresent()) { + this.delegate = new DropwizardMetricsFactory(context); + } else { + this.delegate = new NoopMetricsFactory(context); + } + LOG.debug("[{}] Using {}", context.getSessionName(), delegate.getClass().getSimpleName()); + } + + @Override + public Optional getMetrics() { + return delegate.getMetrics(); + } + + @Override + public SessionMetricUpdater getSessionUpdater() { + return delegate.getSessionUpdater(); + } + + @Override + public NodeMetricUpdater newNodeUpdater(Node node) { + return delegate.newNodeUpdater(node); + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DefaultMetricsFactorySubstitutions.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DefaultMetricsFactorySubstitutions.java new file mode 100644 index 00000000000..3965efc8354 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DefaultMetricsFactorySubstitutions.java @@ -0,0 +1,55 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.metrics; + +import com.datastax.oss.driver.api.core.context.DriverContext; +import com.datastax.oss.driver.internal.core.util.DependencyCheck; +import com.oracle.svm.core.annotate.Alias; +import com.oracle.svm.core.annotate.Delete; +import com.oracle.svm.core.annotate.Substitute; +import com.oracle.svm.core.annotate.TargetClass; +import com.oracle.svm.core.annotate.TargetElement; +import java.util.function.BooleanSupplier; + +@SuppressWarnings("unused") +public class DefaultMetricsFactorySubstitutions { + + @TargetClass(value = DefaultMetricsFactory.class, onlyWith = DropwizardMissing.class) + public static final class DefaultMetricsFactoryDropwizardMissing { + + @Alias + @TargetElement(name = "delegate") + @SuppressWarnings({"FieldCanBeLocal", "FieldMayBeFinal"}) + private MetricsFactory delegate; + + @Substitute + @TargetElement(name = TargetElement.CONSTRUCTOR_NAME) + public DefaultMetricsFactoryDropwizardMissing(DriverContext context) { + this.delegate = new NoopMetricsFactory(context); + } + } + + @TargetClass(value = DropwizardMetricsFactory.class, onlyWith = DropwizardMissing.class) + @Delete + public static final class DeleteDropwizardMetricsFactory {} + + public static class DropwizardMissing implements BooleanSupplier { + @Override + public boolean getAsBoolean() { + return !DependencyCheck.DROPWIZARD.isPresent(); + } + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/MetricPaths.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/MetricPaths.java index b95edc74f73..1b513884bc5 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/MetricPaths.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/MetricPaths.java @@ -30,7 +30,7 @@ public class MetricPaths { - private static final Logger LOG = LoggerFactory.getLogger(DropwizardMetricsFactory.class); + private static final Logger LOG = LoggerFactory.getLogger(MetricPaths.class); public static Set parseSessionMetricPaths(List paths, String logPrefix) { Set result = new HashSet<>(); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/NoopMetricsFactory.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/NoopMetricsFactory.java new file mode 100644 index 00000000000..501f554dd2f --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/NoopMetricsFactory.java @@ -0,0 +1,64 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.metrics; + +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.context.DriverContext; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.metrics.Metrics; +import java.util.List; +import java.util.Optional; +import net.jcip.annotations.ThreadSafe; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@ThreadSafe +public class NoopMetricsFactory implements MetricsFactory { + + private static final Logger LOG = LoggerFactory.getLogger(NoopMetricsFactory.class); + + @SuppressWarnings("unused") + public NoopMetricsFactory(DriverContext context) { + String logPrefix = context.getSessionName(); + DriverExecutionProfile config = context.getConfig().getDefaultProfile(); + List enabledSessionMetrics = + config.getStringList(DefaultDriverOption.METRICS_SESSION_ENABLED); + List enabledNodeMetrics = + config.getStringList(DefaultDriverOption.METRICS_NODE_ENABLED); + if (!enabledSessionMetrics.isEmpty() || !enabledNodeMetrics.isEmpty()) { + LOG.warn( + "[{}] Some session-level or node-level metrics were enabled, " + + "but NoopMetricsFactory is being used: all metrics will be empty", + logPrefix); + } + } + + @Override + public Optional getMetrics() { + return Optional.empty(); + } + + @Override + public SessionMetricUpdater getSessionUpdater() { + return NoopSessionMetricUpdater.INSTANCE; + } + + @Override + public NodeMetricUpdater newNodeUpdater(Node node) { + return NoopNodeMetricUpdater.INSTANCE; + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/util/DependencyCheck.java b/core/src/main/java/com/datastax/oss/driver/internal/core/util/DependencyCheck.java index 1f3b6ae4480..8af213b71ed 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/util/DependencyCheck.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/util/DependencyCheck.java @@ -36,6 +36,7 @@ public enum DependencyCheck { "com.fasterxml.jackson.core.JsonParser", // jackson-databind "com.fasterxml.jackson.databind.ObjectMapper"), + DROPWIZARD("com.codahale.metrics.MetricRegistry"), ; /** diff --git a/core/src/main/resources/reference.conf b/core/src/main/resources/reference.conf index 23e3fc2d14e..53b8e8afdf3 100644 --- a/core/src/main/resources/reference.conf +++ b/core/src/main/resources/reference.conf @@ -1313,23 +1313,40 @@ datastax-java-driver { factory { # The class for the metrics factory. # - # The driver provides Dropwizard, Micrometer and MicroProfile metrics out of the box. - # To use Dropwizard, this value should be set to "DropwizardMetricsFactory". To use - # Micrometer, set the value to "MicrometerMetricsFactory". To use MicroProfile, set the value - # to "MicroProfileMetricsFactory". + # The driver provides out-of-the-box support for three metrics libraries: Dropwizard, + # Micrometer and MicroProfile Metrics. # - # For Micrometer and MicroProfile, you will also need to add an additional dependency: - # com.datastax.oss:java-driver-metrics-micrometer and - # com.datastax.oss:java-driver-metrics-microprofile respectively. + # Dropwizard is the default metrics library in the driver; to use Dropwizard, this value + # should be left to its default, "DefaultMetricsFactory", or set to + # "DropwizardMetricsFactory". The only difference between the two is that the former will work + # even if Dropwizard is not present on the classpath (in which case it will silently disable + # metrics), while the latter requires its presence. # - # If you would like to use a different metrics framework, change the factory class to the - # fully-qualified name of a class that implements - # com.datastax.oss.driver.internal.core.metrics.MetricsFactory. + # To select Micrometer, set the value to "MicrometerMetricsFactory", and to select + # MicroProfile Metrics, set the value to "MicroProfileMetricsFactory". For these libraries to + # be used, you will also need to add an additional dependency: + # - Micrometer: com.datastax.oss:java-driver-metrics-micrometer + # - MicroProfile: com.datastax.oss:java-driver-metrics-microprofile + # + # If you would like to use another metrics library, set this value to the fully-qualified name + # of a class that implements com.datastax.oss.driver.internal.core.metrics.MetricsFactory. + # + # It is also possible to use "NoopMetricsFactory", which forcibly disables metrics completely. + # In fact, "DefaultMetricsFactory" delegates to "DropwizardMetricsFactory" if Dropwizard is + # present on the classpath, or to "NoopMetricsFactory" if it isn't. + # + # Note: specifying a metrics factory is not enough to enable metrics; for the driver to + # actually start collecting metrics, you also need to specify which metrics to collect. See + # the following options for more information: + # - advanced.metrics.session.enabled + # - advanced.metrics.node.enabled + # + # See also the driver online manual for extensive instructions about how to configure metrics. # # Required: yes # Modifiable at runtime: no # Overridable in a profile: no - class = DropwizardMetricsFactory + class = DefaultMetricsFactory } # The session-level metrics (all disabled by default). # @@ -1417,7 +1434,7 @@ datastax-java-driver { # Extra configuration (for the metrics that need it) - # Required: if the 'cql-requests' metric is enabled + # Required: if the 'cql-requests' metric is enabled, and Dropwizard / HdrHistogram is used. # Modifiable at runtime: no # Overridable in a profile: no cql-requests { @@ -1457,7 +1474,8 @@ datastax-java-driver { refresh-interval = 5 minutes } - # Required: if the 'throttling.delay' metric is enabled + # Required: if the 'throttling.delay' metric is enabled, and Dropwizard / HdrHistogram is + # used. # Modifiable at runtime: no # Overridable in a profile: no throttling.delay { @@ -1466,7 +1484,8 @@ datastax-java-driver { refresh-interval = 5 minutes } - # Required: if the 'continuous-cql-requests' metric is enabled + # Required: if the 'continuous-cql-requests' metric is enabled, and Dropwizard / HdrHistogram + # is used # Modifiable at runtime: no # Overridable in a profile: no continuous-cql-requests { @@ -1504,7 +1523,7 @@ datastax-java-driver { refresh-interval = 5 minutes } - # Required: if the 'graph-requests' metric is enabled + # Required: if the 'graph-requests' metric is enabled, and Dropwizard / HdrHistogram is used # Modifiable at runtime: no # Overridable in a profile: no graph-requests { @@ -1695,7 +1714,7 @@ datastax-java-driver { # See cql-requests in the `session` section # - # Required: if the 'cql-messages' metric is enabled + # Required: if the 'cql-messages' metric is enabled, and Dropwizard / HdrHistogram is used # Modifiable at runtime: no # Overridable in a profile: no cql-messages { @@ -1706,7 +1725,7 @@ datastax-java-driver { # See graph-requests in the `session` section # - # Required: if the 'graph-messages' metric is enabled + # Required: if the 'graph-messages' metric is enabled, and Dropwizard / HdrHistogram is used # Modifiable at runtime: no # Overridable in a profile: no graph-messages { diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/context/MockedDriverContextFactory.java b/core/src/test/java/com/datastax/oss/driver/internal/core/context/MockedDriverContextFactory.java index 15d6d296fc4..580f558dc33 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/context/MockedDriverContextFactory.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/context/MockedDriverContextFactory.java @@ -51,7 +51,7 @@ public static DefaultDriverContext defaultDriverContext( when(blankProfile.isDefined(DefaultDriverOption.METRICS_FACTORY_CLASS)) .thenReturn(true); when(blankProfile.getString(DefaultDriverOption.METRICS_FACTORY_CLASS)) - .thenReturn("DropwizardMetricsFactory"); + .thenReturn("DefaultMetricsFactory"); return blankProfile; }); diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metrics/NoopMetricsFactoryTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metrics/NoopMetricsFactoryTest.java new file mode 100644 index 00000000000..bcf40ef301f --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metrics/NoopMetricsFactoryTest.java @@ -0,0 +1,62 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.metrics; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import ch.qos.logback.classic.Level; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverConfig; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.util.LoggerTest; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import java.util.Collections; +import org.junit.Test; +import org.junit.runner.RunWith; + +@RunWith(DataProviderRunner.class) +public class NoopMetricsFactoryTest { + + @Test + public void should_log_warning_when_metrics_enabled() { + // given + InternalDriverContext context = mock(InternalDriverContext.class); + DriverConfig config = mock(DriverConfig.class); + DriverExecutionProfile profile = mock(DriverExecutionProfile.class); + when(context.getSessionName()).thenReturn("MockSession"); + when(context.getConfig()).thenReturn(config); + when(config.getDefaultProfile()).thenReturn(profile); + when(profile.getStringList(DefaultDriverOption.METRICS_SESSION_ENABLED)) + .thenReturn(Collections.singletonList(DefaultSessionMetric.CQL_REQUESTS.getPath())); + LoggerTest.LoggerSetup logger = + LoggerTest.setupTestLogger(NoopMetricsFactory.class, Level.WARN); + + // when + new NoopMetricsFactory(context); + + // then + verify(logger.appender, times(1)).doAppend(logger.loggingEventCaptor.capture()); + assertThat(logger.loggingEventCaptor.getValue().getMessage()).isNotNull(); + assertThat(logger.loggingEventCaptor.getValue().getFormattedMessage()) + .contains("[MockSession] Some session-level or node-level metrics were enabled"); + } +} diff --git a/manual/core/integration/README.md b/manual/core/integration/README.md index 37b28810105..4927089f98c 100644 --- a/manual/core/integration/README.md +++ b/manual/core/integration/README.md @@ -56,6 +56,20 @@ right dependencies: See this page. + + + Instrumenting the driver and gathering metrics using the Micrometer metrics library. + + java‑driver‑metrics‑micrometer + See this page. + + + + Instrumenting the driver and gathering metrics using the MicroProfile Metrics library. + + java‑driver‑metrics‑microprofile + See this page. + "Bill Of Materials": can help manage versions if you use multiple driver artifacts. @@ -390,12 +404,11 @@ enable compression. See the [Compression](../compression/) page for more details #### Metrics The driver exposes [metrics](../metrics/) through the -[Dropwizard](http://metrics.dropwizard.io/4.0.0/manual/index.html) library. +[Dropwizard](http://metrics.dropwizard.io/4.1.2/) library. The dependency is declared as required, but metrics are optional. If you've disabled all metrics, or -if you are using a [different metrics framework](../metrics/#changing-the-metrics-frameworks), and -you never call [Session.getMetrics] anywhere in your application, then you can remove the -dependency: +if you are using a different metrics library, and you never call [Session.getMetrics] anywhere in +your application, then you can remove the dependency: ```xml @@ -411,12 +424,14 @@ dependency: ``` -In addition, "timer" metrics use [HdrHistogram](http://hdrhistogram.github.io/HdrHistogram/) to -record latency percentiles. At the time of writing, these metrics are: `cql-requests`, -`throttling.delay` and `cql-messages`; you can also identify them by reading the comments in the -[configuration reference](../configuration/reference/) (look for "exposed as a Timer"). +In addition, when using Dropwizard, "timer" metrics use +[HdrHistogram](http://hdrhistogram.github.io/HdrHistogram/) to record latency percentiles. At the +time of writing, these metrics are: `cql-requests`, `throttling.delay` and `cql-messages`; you can +also identify them by reading the comments in the [configuration +reference](../configuration/reference/) (look for "exposed as a Timer"). -If all of these metrics are disabled, you can remove the dependency: +If all of these metrics are disabled, or if you use a different metrics library, you can remove the +dependency: ```xml diff --git a/manual/core/metrics/README.md b/manual/core/metrics/README.md index 4a15a95f2eb..276245dc524 100644 --- a/manual/core/metrics/README.md +++ b/manual/core/metrics/README.md @@ -2,69 +2,72 @@ ### Quick overview -* `advanced.metrics` in the configuration. All disabled by default, can be selected individually. +* `advanced.metrics` in the configuration. All metrics disabled by default. To enable, select the + metrics library to use, then define which individual metrics to activate. * some metrics are per node, others global to the session, or both. * unlike driver 3, JMX is not provided out of the box. You need to add the dependency manually. ----- -The driver exposes measurements of its internal behavior through a choice of three popular metrics -frameworks: [Dropwizard Metrics], [Micrometer Metrics] or [MicroProfile Metrics]. Application -developers can select a metrics framework, which metrics are enabled, and export them to a -monitoring tool. +The driver is able to report measurements of its internal behavior to a variety of metrics +libraries, and ships with bindings for three popular ones: [Dropwizard Metrics] , [Micrometer +Metrics] and [MicroProfile Metrics]. -### Structure - -There are two categories of metrics: +### Selecting a Metrics Library -* session-level: the measured data is global to a `Session` instance. For example, `connected-nodes` - measures the number of nodes to which we have connections. -* node-level: the data is specific to a node (and therefore there is one metric instance per node). - For example, `pool.open-connections` measures the number of connections open to this particular - node. - -Metric names are path-like, dot-separated strings. The driver prefixes them with the name of the -session (see `session-name` in the configuration), and in the case of node-level metrics, `nodes` -followed by a textual representation of the node's address. For example: +#### Dropwizard Metrics -``` -s0.connected-nodes => 2 -s0.nodes.127_0_0_1:9042.pool.open-connections => 2 -s0.nodes.127_0_0_2:9042.pool.open-connections => 1 -``` +Dropwizard is the driver's default metrics library; there is no additional configuration nor any +extra dependency to add if you wish to use Dropwizard. -### Configuration +#### Micrometer -By default, all metrics are disabled. You can turn them on individually in the configuration, by -adding their name to these lists: +To use Micrometer you must: + +1. Define `MicrometerMetricsFactory` as the metrics factory to use in the driver configuration: ``` datastax-java-driver.advanced.metrics { - session.enabled = [ connected-nodes, cql-requests ] - node.enabled = [ pool.open-connections, pool.in-flight ] + factory.class = MicrometerMetricsFactory } ``` -To find out which metrics are available, see the [reference configuration]. It contains a -commented-out line for each metric, with detailed explanations on its intended usage. +2. Add a dependency to `java-driver-metrics-micrometer` in your application. This separate driver +module contains the actual bindings for Micrometer, and depends itself on the Micrometer library: -If you specify a metric that doesn't exist, it will be ignored and a warning will be logged. +```xml + + com.datastax.oss + java-driver-metrics-micrometer + ${driver.version} + +``` -The `metrics` section may also contain additional configuration for some specific metrics; again, -see the [reference configuration] for more details. +3. You should also exclude Dropwizard and HdrHistogram, which are two transitive dependencies of the +driver, because they are not relevant when using Micrometer: -#### Changing the Metrics Frameworks +```xml + + com.datastax.oss + java-driver-core + + + io.dropwizard.metrics + metrics-core + + + org.hdrhistogram + HdrHistogram + + + +``` -The default metrics framework is Dropwizard. You can change this to either Micrometer or -MicroProfile in the configuration: +#### MicroProfile Metrics -``` -datastax-java-driver.advanced.metrics { - factory.class = MicrometerMetricsFactory -} -``` +To use MicroProfile Metrics you must: -or +1. Define `MicroProfileMetricsFactory` as the metrics factory to use in the driver configuration: ``` datastax-java-driver.advanced.metrics { @@ -72,32 +75,100 @@ datastax-java-driver.advanced.metrics { } ``` -In addition to the configuration change above, you will also need to include the appropriate module -in your project. For Micrometer: +2. Add a dependency to `java-driver-metrics-microprofile` in your application. This separate driver +module contains the actual bindings for MicroProfile, and depends itself on the MicroProfile Metrics +library: ```xml com.datastax.oss - java-driver-metrics-micrometer + java-driver-metrics-microprofile ${driver.version} ``` -For MicroProfile: +3. You should also exclude Dropwizard and HdrHistogram, which are two transitive dependencies of the +driver, because they are not relevant when using MicroProfile Metrics: ```xml com.datastax.oss - java-driver-metrics-microprofile - ${driver.version} + java-driver-core + + + io.dropwizard.metrics + metrics-core + + + org.hdrhistogram + HdrHistogram + + ``` -#### Metric Registry +#### Other Metrics libraries + +Other metrics libraries can also be used. However, you will need to provide a custom +metrics factory. Simply implement the +`com.datastax.oss.driver.internal.core.metrics.MetricsFactory` interface for your library of choice, +then pass the fully-qualified name of that implementation class to the driver using the +`advanced.metrics.factory.class` option. See the [reference configuration]. + +You will certainly need to add the metrics library as a dependency to your application as well. +It is also recommended excluding Dropwizard and HdrHistogram, as shown above. + +### Enabling specific driver metrics + +Now that the metrics library is configured, you need to activate the driver metrics you are +interested in. + +There are two categories of driver metrics: + +* session-level: the measured data is global to a `Session` instance. For example, `connected-nodes` + measures the number of nodes to which we have connections. +* node-level: the data is specific to a node (and therefore there is one metric instance per node). + For example, `pool.open-connections` measures the number of connections open to this particular + node. + +Metric names are path-like, dot-separated strings. The driver prefixes them with the name of the +session (see `session-name` in the configuration), and in the case of node-level metrics, `nodes` +followed by a textual representation of the node's address. For example: + +``` +s0.connected-nodes => 2 +s0.nodes.127_0_0_1:9042.pool.open-connections => 2 +s0.nodes.127_0_0_2:9042.pool.open-connections => 1 +``` + +To find out which metrics are available, see the [reference configuration]. It contains a +commented-out line for each metric, with detailed explanations on its intended usage. + +By default, all metrics are disabled. You can turn them on individually in the configuration, by +adding their name to these lists: + +``` +datastax-java-driver.advanced.metrics { + session.enabled = [ connected-nodes, cql-requests ] + node.enabled = [ pool.open-connections, pool.in-flight ] +} +``` + +If you specify a metric that doesn't exist, it will be ignored, and a warning will be logged. -For any of the three metrics frameworks, you can provide an external Metric Registry object when -building a Session. This will easily allow your application to export the driver's operational -metrics to whatever reporting system you want to use. +Finally, if you are using Dropwizard and enabled any metric of timer type, such as `cql-requests`, +it is also possible to provide additional configuration to fine-tune the underlying histogram's +characteristics and precision, such as its highest expected latency, its number of significant +digits to use, and its refresh interval. Again, see the [reference configuration] for more details. + +### Using an external metric registry + +Regardless of which metrics library is used, you can provide an external metric registry object when +building a session. This allows the driver to transparently export its operational metrics to +whatever reporting system you want to use. + +To pass a metric registry object to the session, use the `CqlSessionBuilder.withMetricRegistry()` +method: ```java CqlSessionBuilder builder = CqlSession.builder(); @@ -105,43 +176,51 @@ builder.withMetricRegistry(myRegistryObject); CqlSession session = builder.build(); ``` -In the above example, `myRegistryObject` should be an instance of the base registry type for the -metrics framework you are using: +Beware that the driver does not inspect the provided object, it simply passes it to the metrics +factory in use; it is the user's responsibility to provide registry objects compatible with the +metrics library in use. For reference, here are the expected base types for the three built-in +metrics libraries: -``` -Dropwizard: com.codahale.metrics.MetricRegistry -Micrometer: io.micrometer.core.instrument.MeterRegistry -MicroProfile: org.eclipse.microprofile.metrics.MetricRegistry -``` +* Dropwizard: `com.codahale.metrics.MetricRegistry` +* Micrometer: `io.micrometer.core.instrument.MeterRegistry` +* MicroProfile: `org.eclipse.microprofile.metrics.MetricRegistry` + +**NOTE:** MicroProfile **requires** an external instance of its registry to be provided. For +Micrometer, if no registry object is provided, Micrometer's `globalRegistry` will be used. For +Dropwizard, if no registry object is provided, an instance of `MetricRegistry` will be created and +used (in which case, it can be retrieved programmatically if needed, see below). -**NOTE:** Only MicroProfile **requires** an external instance of its Registry to be provided. For -Micrometer, if no Registry object is provided, Micrometer's `globalRegistry` will be used. For -Dropwizard, if no Registry object is provided, an instance of `MetricRegistry` will be created and -used. +### Programmatic access to driver metrics -### Export +Programmatic access to driver metrics is only available when using Dropwizard Metrics. Users of +other libraries are encouraged to provide an external registry when creating the driver session (see +above), then use it to gain programmatic access to the driver metrics. -The Dropwizard `MetricRegistry` is exposed via `session.getMetrics().getRegistry()`. You can -retrieve it and configure a `Reporter` to send the metrics to a monitoring tool. +The Dropwizard `MetricRegistry` object is exposed in the driver API via +`session.getMetrics().getRegistry()`. You can retrieve it and, for example, configure a `Reporter` +to send the metrics to a monitoring tool. -**NOTE:** At this time, `session.getMetrics()` is not available when using Micrometer or -MicroProfile metrics. If you wish to use either of those metrics frameworks, it is recommended to -provide a Registry implementation to the driver as described in the [Metric Registry -section](#metric-registry), and follow best practices for exporting that registry to your desired -reporting framework. +**NOTE:** Beware that `session.getMetrics()` is not available when using other metrics libraries, +and will throw a `NoClassDefFoundError` at runtime if accessed in such circumstances. -#### JMX +### Exposing driver metrics with JMX Unlike previous driver versions, JMX support is not included out of the box. +The way to add JMX support to your application depends largely on the metrics library being used. We +show below instructions for Dropwizard only. Micrometer also has support for JMX: please refer to +its [official documentation][Micrometer JMX]. + +#### Dropwizard Metrics + Add the following dependency to your application (make sure the version matches the `metrics-core` dependency of the driver): -``` +```xml io.dropwizard.metrics metrics-jmx - 4.0.2 + 4.1.2 ``` @@ -201,14 +280,14 @@ JmxReporter reporter = reporter.start(); ``` -#### Other protocols +### Exporting metrics with other protocols Dropwizard Metrics has built-in reporters for other output formats: JSON (via a servlet), stdout, CSV files, SLF4J logs and Graphite. Refer to their [manual][Dropwizard manual] for more details. - -[Dropwizard Metrics]: http://metrics.dropwizard.io/4.0.0/manual/index.html -[Dropwizard Manual]: http://metrics.dropwizard.io/4.0.0/getting-started.html#reporting-via-http +[Dropwizard Metrics]: https://metrics.dropwizard.io/4.1.2 +[Dropwizard Manual]: https://metrics.dropwizard.io/4.1.2/getting-started.html [Micrometer Metrics]: https://micrometer.io/docs +[Micrometer JMX]: https://micrometer.io/docs/registry/jmx [MicroProfile Metrics]: https://github.com/eclipse/microprofile-metrics [reference configuration]: ../configuration/reference/ \ No newline at end of file diff --git a/metrics/micrometer/pom.xml b/metrics/micrometer/pom.xml index 8b0134f3307..5593610f010 100644 --- a/metrics/micrometer/pom.xml +++ b/metrics/micrometer/pom.xml @@ -51,6 +51,10 @@ io.dropwizard.metrics metrics-core + + org.hdrhistogram + HdrHistogram + diff --git a/metrics/microprofile/pom.xml b/metrics/microprofile/pom.xml index 138deb6f11a..f6549e4d73c 100644 --- a/metrics/microprofile/pom.xml +++ b/metrics/microprofile/pom.xml @@ -51,6 +51,10 @@ io.dropwizard.metrics metrics-core + + org.hdrhistogram + HdrHistogram + diff --git a/pom.xml b/pom.xml index 0152ac1a420..b834259994b 100644 --- a/pom.xml +++ b/pom.xml @@ -81,7 +81,7 @@ 4.0.3 2.0.0-M19 2.22.2 - 20.3.0 + 21.0.0.2 false ${skipTests} From 367284fd0aa2bc3583f3afc87e11825129c2c8b9 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 15 Mar 2021 16:21:54 +0100 Subject: [PATCH 656/979] Memoize DependencyCheck.isPresent --- .../internal/core/util/DependencyCheck.java | 42 ++++++++++--------- 1 file changed, 23 insertions(+), 19 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/util/DependencyCheck.java b/core/src/main/java/com/datastax/oss/driver/internal/core/util/DependencyCheck.java index 8af213b71ed..0accb5388a0 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/util/DependencyCheck.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/util/DependencyCheck.java @@ -15,7 +15,8 @@ */ package com.datastax.oss.driver.internal.core.util; -import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; +import com.datastax.oss.driver.shaded.guava.common.base.Supplier; +import com.datastax.oss.driver.shaded.guava.common.base.Suppliers; /** * A set of driver optional dependencies and a common mechanism to test the presence of such @@ -39,21 +40,32 @@ public enum DependencyCheck { DROPWIZARD("com.codahale.metrics.MetricRegistry"), ; + @SuppressWarnings("ImmutableEnumChecker") + private final Supplier present; + /** - * The fully-qualified name of classes that must exist for the dependency to work properly; we use - * them to test the presence of the whole dependency on the classpath, including its transitive - * dependencies if applicable. This assumes that if these classes are present, then the entire - * library is present and functional, and vice versa. + * We use the given fully-qualified names of classes to test the presence of the whole dependency + * on the classpath, including its transitive dependencies if applicable. This assumes that if + * these classes are present, then the entire library is present and functional, and vice versa. * *

          Note: some of the libraries declared here may be shaded; in these cases the shade plugin * will replace the package names listed above with names starting with {@code * com.datastax.oss.driver.shaded.*}, but the presence check would still work as expected. */ - @SuppressWarnings("ImmutableEnumChecker") - private final ImmutableSet fqcns; - - DependencyCheck(String... fqcns) { - this.fqcns = ImmutableSet.copyOf(fqcns); + DependencyCheck(String... classNamesToTest) { + this.present = + Suppliers.memoize( + () -> { + for (String classNameToTest : classNamesToTest) { + // Always use the driver class loader, assuming that the driver classes and + // the dependency classes are either being loaded by the same class loader, + // or – as in OSGi deployments – by two distinct, but compatible class loaders. + if (Reflection.loadClass(null, classNameToTest) == null) { + return false; + } + } + return true; + }); } /** @@ -62,14 +74,6 @@ public enum DependencyCheck { * @return true if the dependency is present and loadable, false otherwise. */ public boolean isPresent() { - for (String fqcn : fqcns) { - // Always use the driver class loader, assuming that the driver classes and - // the dependency classes are either being loaded by the same class loader, - // or – as in OSGi deployments – by two distinct, but compatible class loaders. - if (Reflection.loadClass(null, fqcn) == null) { - return false; - } - } - return true; + return present.get(); } } From 8b88f280b419d28e0b09e8a38deb41d76f91c445 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 15 Mar 2021 17:01:10 +0100 Subject: [PATCH 657/979] JAVA-2916: Annotate generated classes with `@SuppressWarnings` (#1530) --- changelog/README.md | 1 + .../mapper/processor/dao/DaoImplementationGenerator.java | 5 +++++ .../mapper/processor/entity/EntityHelperGenerator.java | 5 +++++ .../mapper/processor/mapper/MapperBuilderGenerator.java | 5 +++++ .../processor/mapper/MapperImplementationGenerator.java | 5 +++++ 5 files changed, 21 insertions(+) diff --git a/changelog/README.md b/changelog/README.md index 968219ce36d..7fe757e4343 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.11.0 (in progress) +- [improvement] JAVA-2916: Annotate generated classes with `@SuppressWarnings` - [bug] JAVA-2927: Make Dropwizard truly optional - [improvement] JAVA-2917: Include GraalVM substitutions for request processors and geo codecs - [bug] JAVA-2918: Exclude invalid peers from schema agreement checks diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoImplementationGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoImplementationGenerator.java index 479da635e20..ef4d516a275 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoImplementationGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoImplementationGenerator.java @@ -37,6 +37,7 @@ import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; import com.datastax.oss.driver.shaded.guava.common.collect.Maps; +import com.squareup.javapoet.AnnotationSpec; import com.squareup.javapoet.ClassName; import com.squareup.javapoet.CodeBlock; import com.squareup.javapoet.FieldSpec; @@ -301,6 +302,10 @@ protected JavaFile.Builder getContents() { TypeSpec.Builder classBuilder = TypeSpec.classBuilder(implementationName) .addJavadoc(JAVADOC_GENERATED_WARNING) + .addAnnotation( + AnnotationSpec.builder(SuppressWarnings.class) + .addMember("value", "\"all\"") + .build()) .addModifiers(Modifier.PUBLIC) .addSuperinterface(ClassName.get(interfaceElement)); diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperGenerator.java index 2ac1e9adade..dc33d7deb12 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperGenerator.java @@ -26,6 +26,7 @@ import com.datastax.oss.driver.internal.mapper.processor.util.generation.BindableHandlingSharedCode; import com.datastax.oss.driver.internal.mapper.processor.util.generation.GenericTypeConstantGenerator; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; +import com.squareup.javapoet.AnnotationSpec; import com.squareup.javapoet.ClassName; import com.squareup.javapoet.CodeBlock; import com.squareup.javapoet.FieldSpec; @@ -87,6 +88,10 @@ protected JavaFile.Builder getContents() { TypeSpec.Builder classContents = TypeSpec.classBuilder(helperName) .addJavadoc(JAVADOC_GENERATED_WARNING) + .addAnnotation( + AnnotationSpec.builder(SuppressWarnings.class) + .addMember("value", "\"all\"") + .build()) .addModifiers(Modifier.PUBLIC) .superclass( ParameterizedTypeName.get( diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/mapper/MapperBuilderGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/mapper/MapperBuilderGenerator.java index 93ec4f5acfc..c1a4bb078b4 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/mapper/MapperBuilderGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/mapper/MapperBuilderGenerator.java @@ -21,6 +21,7 @@ import com.datastax.oss.driver.internal.mapper.processor.GeneratedNames; import com.datastax.oss.driver.internal.mapper.processor.ProcessorContext; import com.datastax.oss.driver.internal.mapper.processor.SingleFileCodeGenerator; +import com.squareup.javapoet.AnnotationSpec; import com.squareup.javapoet.ClassName; import com.squareup.javapoet.JavaFile; import com.squareup.javapoet.MethodSpec; @@ -62,6 +63,10 @@ protected JavaFile.Builder getContents() { getSessionClass()) .addJavadoc(JAVADOC_PARAGRAPH_SEPARATOR) .addJavadoc(JAVADOC_GENERATED_WARNING) + .addAnnotation( + AnnotationSpec.builder(SuppressWarnings.class) + .addMember("value", "\"all\"") + .build()) .addModifiers(Modifier.PUBLIC) .addMethod( MethodSpec.constructorBuilder() diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/mapper/MapperImplementationGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/mapper/MapperImplementationGenerator.java index bd2bf69428f..ca050f291e1 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/mapper/MapperImplementationGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/mapper/MapperImplementationGenerator.java @@ -24,6 +24,7 @@ import com.datastax.oss.driver.internal.mapper.processor.SingleFileCodeGenerator; import com.datastax.oss.driver.internal.mapper.processor.util.NameIndex; import com.datastax.oss.driver.internal.mapper.processor.util.generation.GeneratedCodePatterns; +import com.squareup.javapoet.AnnotationSpec; import com.squareup.javapoet.ClassName; import com.squareup.javapoet.FieldSpec; import com.squareup.javapoet.JavaFile; @@ -91,6 +92,10 @@ protected JavaFile.Builder getContents() { GeneratedNames.mapperBuilder(interfaceElement)) .addJavadoc(JAVADOC_PARAGRAPH_SEPARATOR) .addJavadoc(JAVADOC_GENERATED_WARNING) + .addAnnotation( + AnnotationSpec.builder(SuppressWarnings.class) + .addMember("value", "\"all\"") + .build()) .addModifiers(Modifier.PUBLIC) .addSuperinterface(ClassName.get(interfaceElement)); From a70705d1fc1231c7b57388aa89ab6a9976918bd8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Eduard=20Tudenh=C3=B6fner?= Date: Mon, 15 Mar 2021 18:52:29 +0100 Subject: [PATCH 658/979] JAVA-2704: Remove protocol v5 beta status (#1437) Co-authored-by: Alexandre Dutra --- bom/pom.xml | 2 +- changelog/README.md | 1 + .../api/core/DefaultProtocolVersion.java | 7 +++++-- .../oss/driver/api/core/ProtocolVersion.java | 3 ++- .../core/DefaultProtocolVersionRegistry.java | 13 ++++++++++-- .../DefaultProtocolVersionRegistryTest.java | 15 +++++++++++-- .../oss/driver/core/cql/NowInSecondsIT.java | 21 ++----------------- 7 files changed, 35 insertions(+), 27 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index 1527d39f924..6aaae045061 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -71,7 +71,7 @@ com.datastax.oss native-protocol - 1.4.12 + 1.4.13-SNAPSHOT com.datastax.oss diff --git a/changelog/README.md b/changelog/README.md index 7fe757e4343..420fd72b325 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.11.0 (in progress) +- [improvement] JAVA-2704: Remove protocol v5 beta status, add v6-beta - [improvement] JAVA-2916: Annotate generated classes with `@SuppressWarnings` - [bug] JAVA-2927: Make Dropwizard truly optional - [improvement] JAVA-2917: Include GraalVM substitutions for request processors and geo codecs diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/DefaultProtocolVersion.java b/core/src/main/java/com/datastax/oss/driver/api/core/DefaultProtocolVersion.java index 1a207a9ede9..9d0135dec8f 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/DefaultProtocolVersion.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/DefaultProtocolVersion.java @@ -30,14 +30,17 @@ public enum DefaultProtocolVersion implements ProtocolVersion { /** Version 4, supported by Cassandra 2.2 and above. */ V4(ProtocolConstants.Version.V4, false), + /** Version 5, supported by Cassandra 4.0 and above. */ + V5(ProtocolConstants.Version.V5, false), + /** - * Version 5, currently supported as a beta preview in Cassandra 3.10 and above. + * Version 6, currently supported as a beta preview in Cassandra 4.0 and above. * *

          Do not use this in production. * * @see ProtocolVersion#isBeta() */ - V5(ProtocolConstants.Version.V5, true), + V6(ProtocolConstants.Version.V6, true), ; // Note that, for the sake of convenience, we also expose shortcuts to these constants on the // ProtocolVersion interface. If you add a new enum constant, remember to update the interface as diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/ProtocolVersion.java b/core/src/main/java/com/datastax/oss/driver/api/core/ProtocolVersion.java index a633bcf892f..9aacf85c0a2 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/ProtocolVersion.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/ProtocolVersion.java @@ -31,13 +31,14 @@ public interface ProtocolVersion { ProtocolVersion V3 = DefaultProtocolVersion.V3; ProtocolVersion V4 = DefaultProtocolVersion.V4; ProtocolVersion V5 = DefaultProtocolVersion.V5; + ProtocolVersion V6 = DefaultProtocolVersion.V6; ProtocolVersion DSE_V1 = DseProtocolVersion.DSE_V1; ProtocolVersion DSE_V2 = DseProtocolVersion.DSE_V2; /** The default version used for {@link Detachable detached} objects. */ // Implementation note: we can't use the ProtocolVersionRegistry here, this has to be a // compile-time constant. - ProtocolVersion DEFAULT = DefaultProtocolVersion.V4; + ProtocolVersion DEFAULT = DefaultProtocolVersion.V5; /** * A numeric code that uniquely identifies the version (this is the code used in network frames). diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/DefaultProtocolVersionRegistry.java b/core/src/main/java/com/datastax/oss/driver/internal/core/DefaultProtocolVersionRegistry.java index 2dff6ff22f8..08826758e98 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/DefaultProtocolVersionRegistry.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/DefaultProtocolVersionRegistry.java @@ -61,6 +61,9 @@ public class DefaultProtocolVersionRegistry implements ProtocolVersionRegistry { @VisibleForTesting static final Version DSE_6_0_0 = Objects.requireNonNull(Version.parse("6.0.0")); + @VisibleForTesting + static final Version DSE_7_0_0 = Objects.requireNonNull(Version.parse("7.0.0")); + private final String logPrefix; public DefaultProtocolVersionRegistry(String logPrefix) { @@ -150,9 +153,12 @@ public ProtocolVersion highestCommon(Collection nodes) { } else if (dseVersion.compareTo(DSE_6_0_0) < 0) { // DSE 5.1 removeHigherThan(DefaultProtocolVersion.V4, DseProtocolVersion.DSE_V1, candidates); - } else { + } else if (dseVersion.compareTo(DSE_7_0_0) < 0) { // DSE 6 removeHigherThan(DefaultProtocolVersion.V4, DseProtocolVersion.DSE_V2, candidates); + } else { + // DSE 7.0 + removeHigherThan(DefaultProtocolVersion.V5, DseProtocolVersion.DSE_V2, candidates); } } else { // not DSE Version cassandraVersion = node.getCassandraVersion(); @@ -181,9 +187,12 @@ public ProtocolVersion highestCommon(Collection nodes) { } else if (cassandraVersion.compareTo(Version.V2_2_0) < 0) { // 2.1.0 removeHigherThan(DefaultProtocolVersion.V3, null, candidates); - } else { + } else if (cassandraVersion.compareTo(Version.V4_0_0) < 0) { // 2.2, 3.x removeHigherThan(DefaultProtocolVersion.V4, null, candidates); + } else { + // 4.0 + removeHigherThan(DefaultProtocolVersion.V5, null, candidates); } } } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/DefaultProtocolVersionRegistryTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/DefaultProtocolVersionRegistryTest.java index c86d7c824c5..9d81a3bdd3d 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/DefaultProtocolVersionRegistryTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/DefaultProtocolVersionRegistryTest.java @@ -20,6 +20,7 @@ import static com.datastax.oss.driver.api.core.ProtocolVersion.V3; import static com.datastax.oss.driver.api.core.ProtocolVersion.V4; import static com.datastax.oss.driver.api.core.ProtocolVersion.V5; +import static com.datastax.oss.driver.api.core.ProtocolVersion.V6; import static com.datastax.oss.driver.internal.core.DefaultProtocolFeature.DATE_TYPE; import static com.datastax.oss.driver.internal.core.DefaultProtocolFeature.SMALLINT_AND_TINYINT_TYPES; import static org.assertj.core.api.Assertions.assertThat; @@ -64,7 +65,13 @@ public void should_not_downgrade_if_no_lower_version() { @Test public void should_downgrade_from_dse_to_oss() { - assertThat(registry.downgrade(DseProtocolVersion.DSE_V1).get()).isEqualTo(ProtocolVersion.V4); + assertThat(registry.downgrade(DseProtocolVersion.DSE_V1).get()).isEqualTo(ProtocolVersion.V5); + } + + @Test + public void should_pick_dse_v2_as_highest_common_when_all_nodes_are_dse_7() { + assertThat(registry.highestCommon(ImmutableList.of(mockDseNode("7.0"), mockDseNode("7.1")))) + .isEqualTo(DseProtocolVersion.DSE_V2); } @Test @@ -124,6 +131,7 @@ public void should_support_date_type_on_oss_v4_and_later() { assertThat(registry.supports(V3, DATE_TYPE)).isFalse(); assertThat(registry.supports(V4, DATE_TYPE)).isTrue(); assertThat(registry.supports(V5, DATE_TYPE)).isTrue(); + assertThat(registry.supports(V6, DATE_TYPE)).isTrue(); assertThat(registry.supports(DSE_V1, DATE_TYPE)).isTrue(); assertThat(registry.supports(DSE_V2, DATE_TYPE)).isTrue(); } @@ -133,6 +141,7 @@ public void should_support_smallint_and_tinyint_types_on_oss_v4_and_later() { assertThat(registry.supports(V3, SMALLINT_AND_TINYINT_TYPES)).isFalse(); assertThat(registry.supports(V4, SMALLINT_AND_TINYINT_TYPES)).isTrue(); assertThat(registry.supports(V5, SMALLINT_AND_TINYINT_TYPES)).isTrue(); + assertThat(registry.supports(V6, SMALLINT_AND_TINYINT_TYPES)).isTrue(); assertThat(registry.supports(DSE_V1, SMALLINT_AND_TINYINT_TYPES)).isTrue(); assertThat(registry.supports(DSE_V2, SMALLINT_AND_TINYINT_TYPES)).isTrue(); } @@ -152,7 +161,9 @@ private Node mockDseNode(String rawDseVersion) { .thenReturn(ImmutableMap.of(DseNodeProperties.DSE_VERSION, dseVersion)); Version cassandraVersion; - if (dseVersion.compareTo(DefaultProtocolVersionRegistry.DSE_6_0_0) >= 0) { + if (dseVersion.compareTo(DefaultProtocolVersionRegistry.DSE_7_0_0) >= 0) { + cassandraVersion = Version.parse("5.0"); + } else if (dseVersion.compareTo(DefaultProtocolVersionRegistry.DSE_6_0_0) >= 0) { cassandraVersion = Version.parse("4.0"); } else if (dseVersion.compareTo(DefaultProtocolVersionRegistry.DSE_5_1_0) >= 0) { cassandraVersion = Version.parse("3.11"); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/NowInSecondsIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/NowInSecondsIT.java index 255875f4d54..2b570329d51 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/NowInSecondsIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/NowInSecondsIT.java @@ -18,9 +18,6 @@ import static org.assertj.core.api.Assertions.assertThat; import com.datastax.oss.driver.api.core.CqlSession; -import com.datastax.oss.driver.api.core.DefaultProtocolVersion; -import com.datastax.oss.driver.api.core.config.DefaultDriverOption; -import com.datastax.oss.driver.api.core.config.DriverConfigLoader; import com.datastax.oss.driver.api.core.cql.BatchStatement; import com.datastax.oss.driver.api.core.cql.BatchType; import com.datastax.oss.driver.api.core.cql.PreparedStatement; @@ -45,27 +42,13 @@ @CassandraRequirement(min = "4.0") @DseRequirement( // Use next version -- not sure if it will be in by then, but as a reminder to check - min = "6.9", + min = "7.0", description = "Feature not available in DSE yet") public class NowInSecondsIT { private static final CcmRule CCM_RULE = CcmRule.getInstance(); - private static final SessionRule SESSION_RULE = buildSessionRule(); - - private static SessionRule buildSessionRule() { - // Reminder to revisit the test when V5 comes out of beta: remove the custom config loader and - // inline this method. - assertThat(DefaultProtocolVersion.V5.isBeta()) - .as("This test can be simplified now that protocol v5 is stable") - .isTrue(); - return SessionRule.builder(CCM_RULE) - .withConfigLoader( - DriverConfigLoader.programmaticBuilder() - .withString(DefaultDriverOption.PROTOCOL_VERSION, "V5") - .build()) - .build(); - } + private static final SessionRule SESSION_RULE = SessionRule.builder(CCM_RULE).build(); @ClassRule public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); From 0f522118e68e632d0d9fab9ce057e81c4e3993c9 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 15 Mar 2021 22:17:23 +0100 Subject: [PATCH 659/979] Add loggers for CCM and Simulacron --- integration-tests/src/test/resources/logback-test.xml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/integration-tests/src/test/resources/logback-test.xml b/integration-tests/src/test/resources/logback-test.xml index 77fa051841e..078ca8a1911 100644 --- a/integration-tests/src/test/resources/logback-test.xml +++ b/integration-tests/src/test/resources/logback-test.xml @@ -28,6 +28,8 @@ + + - + From 632424655af70a3edbfacf21a0c493f923e9d215 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 15 Mar 2021 22:17:46 +0100 Subject: [PATCH 660/979] Upgrade to Simulacron 0.11.0 --- .../ProtocolVersionInitialNegotiationIT.java | 233 ++++++++++++++++-- .../core/cql/BoundStatementSimulacronIT.java | 10 +- .../datastax/oss/driver/mapper/ProfileIT.java | 34 ++- .../driver/mapper/StatementAttributesIT.java | 32 ++- pom.xml | 2 +- 5 files changed, 261 insertions(+), 50 deletions(-) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/ProtocolVersionInitialNegotiationIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/ProtocolVersionInitialNegotiationIT.java index 1bc10fa6e83..64e38e7268e 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/ProtocolVersionInitialNegotiationIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/ProtocolVersionInitialNegotiationIT.java @@ -18,7 +18,7 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.fail; -import com.datastax.dse.driver.api.core.metadata.DseNodeProperties; +import com.datastax.dse.driver.api.core.DseProtocolVersion; import com.datastax.oss.driver.api.core.AllNodesFailedException; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.DefaultProtocolVersion; @@ -26,8 +26,6 @@ import com.datastax.oss.driver.api.core.UnsupportedProtocolVersionException; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; -import com.datastax.oss.driver.api.core.metadata.Metadata; -import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.testinfra.CassandraRequirement; import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; @@ -46,7 +44,7 @@ public class ProtocolVersionInitialNegotiationIT { @CassandraRequirement( min = "2.1", max = "2.2", - description = "required to downgrade to an older version") + description = "Only C* in [2.1,2.2[ has V3 as its highest version") @Test public void should_downgrade_to_v3() { try (CqlSession session = SessionUtils.newSession(ccm)) { @@ -56,18 +54,36 @@ public void should_downgrade_to_v3() { } @CassandraRequirement( - min = "2.1", - max = "2.2", - description = "required to downgrade to an older version") + min = "2.2", + max = "4.0", + description = "Only C* in [2.2,4.0[ has V4 as its highest version") + @Test + public void should_downgrade_to_v4() { + try (CqlSession session = SessionUtils.newSession(ccm)) { + assertThat(session.getContext().getProtocolVersion().getCode()).isEqualTo(4); + session.execute("select * from system.local"); + } + } + + @DseRequirement( + max = "6.0", + description = "Only DSE in [*,6.0[ has DSE_V1 as its highest version") @Test - public void should_fail_if_provided_version_isnt_supported() { + public void should_downgrade_to_dse_v1() { + try (CqlSession session = SessionUtils.newSession(ccm)) { + assertThat(session.getContext().getProtocolVersion()).isEqualTo(DseProtocolVersion.DSE_V1); + session.execute("select * from system.local"); + } + } + + @CassandraRequirement(max = "2.2", description = "Only C* in [*,2.2[ has V4 unsupported") + @Test + public void should_fail_if_provided_v4_is_not_supported() { DriverConfigLoader loader = SessionUtils.configLoaderBuilder() .withString(DefaultDriverOption.PROTOCOL_VERSION, "V4") .build(); - try (CqlSession session = SessionUtils.newSession(ccm, loader)) { - assertThat(session.getContext().getProtocolVersion().getCode()).isEqualTo(3); - session.execute("select * from system.local"); + try (CqlSession ignored = SessionUtils.newSession(ccm, loader)) { fail("Expected an AllNodesFailedException"); } catch (AllNodesFailedException anfe) { Throwable cause = anfe.getAllErrors().values().iterator().next().get(0); @@ -79,24 +95,110 @@ public void should_fail_if_provided_version_isnt_supported() { } } + @CassandraRequirement( + min = "2.1", + max = "4.0", + description = "Only C* in [2.1,4.0[ has V5 unsupported or supported as beta") + @Test + public void should_fail_if_provided_v5_is_not_supported() { + DriverConfigLoader loader = + SessionUtils.configLoaderBuilder() + .withString(DefaultDriverOption.PROTOCOL_VERSION, "V5") + .build(); + try (CqlSession ignored = SessionUtils.newSession(ccm, loader)) { + fail("Expected an AllNodesFailedException"); + } catch (AllNodesFailedException anfe) { + Throwable cause = anfe.getAllErrors().values().iterator().next().get(0); + assertThat(cause).isInstanceOf(UnsupportedProtocolVersionException.class); + UnsupportedProtocolVersionException unsupportedException = + (UnsupportedProtocolVersionException) cause; + assertThat(unsupportedException.getAttemptedVersions()) + .containsOnly(DefaultProtocolVersion.V5); + } + } + + @DseRequirement( + max = "7.0", + description = "Only DSE in [*,7.0[ has V5 unsupported or supported as beta") + @Test + public void should_fail_if_provided_v5_is_not_supported_dse() { + DriverConfigLoader loader = + SessionUtils.configLoaderBuilder() + .withString(DefaultDriverOption.PROTOCOL_VERSION, "V5") + .build(); + try (CqlSession ignored = SessionUtils.newSession(ccm, loader)) { + fail("Expected an AllNodesFailedException"); + } catch (AllNodesFailedException anfe) { + Throwable cause = anfe.getAllErrors().values().iterator().next().get(0); + assertThat(cause).isInstanceOf(UnsupportedProtocolVersionException.class); + UnsupportedProtocolVersionException unsupportedException = + (UnsupportedProtocolVersionException) cause; + assertThat(unsupportedException.getAttemptedVersions()) + .containsOnly(DefaultProtocolVersion.V5); + } + } + + @DseRequirement(max = "5.1", description = "Only DSE in [*,5.1[ has DSE_V1 unsupported") + @Test + public void should_fail_if_provided_dse_v1_is_not_supported() { + DriverConfigLoader loader = + SessionUtils.configLoaderBuilder() + .withString(DefaultDriverOption.PROTOCOL_VERSION, "DSE_V1") + .build(); + try (CqlSession ignored = SessionUtils.newSession(ccm, loader)) { + fail("Expected an AllNodesFailedException"); + } catch (AllNodesFailedException anfe) { + Throwable cause = anfe.getAllErrors().values().iterator().next().get(0); + assertThat(cause).isInstanceOf(UnsupportedProtocolVersionException.class); + UnsupportedProtocolVersionException unsupportedException = + (UnsupportedProtocolVersionException) cause; + assertThat(unsupportedException.getAttemptedVersions()) + .containsOnly(DseProtocolVersion.DSE_V1); + } + } + + @DseRequirement(max = "6.0", description = "Only DSE in [*,6.0[ has DSE_V2 unsupported") + @Test + public void should_fail_if_provided_dse_v2_is_not_supported() { + DriverConfigLoader loader = + SessionUtils.configLoaderBuilder() + .withString(DefaultDriverOption.PROTOCOL_VERSION, "DSE_V2") + .build(); + try (CqlSession ignored = SessionUtils.newSession(ccm, loader)) { + fail("Expected an AllNodesFailedException"); + } catch (AllNodesFailedException anfe) { + Throwable cause = anfe.getAllErrors().values().iterator().next().get(0); + assertThat(cause).isInstanceOf(UnsupportedProtocolVersionException.class); + UnsupportedProtocolVersionException unsupportedException = + (UnsupportedProtocolVersionException) cause; + assertThat(unsupportedException.getAttemptedVersions()) + .containsOnly(DseProtocolVersion.DSE_V2); + } + } + /** Note that this test will need to be updated as new protocol versions are introduced. */ - @CassandraRequirement(min = "2.2", description = "required to meet default protocol version") - @DseRequirement(min = "6.0", description = "required to meet default protocol version") + @CassandraRequirement(min = "4.0", description = "Only C* in [4.0,*[ has V5 supported") @Test public void should_not_downgrade_if_server_supports_latest_version() { try (CqlSession session = SessionUtils.newSession(ccm)) { - Metadata metadata = session.getMetadata(); - Node node = metadata.getNodes().values().iterator().next(); - boolean isDse = node.getExtras().containsKey(DseNodeProperties.DSE_VERSION); - assertThat(session.getContext().getProtocolVersion()) - .isEqualTo(isDse ? ProtocolVersion.DSE_V2 : ProtocolVersion.V4); + assertThat(session.getContext().getProtocolVersion()).isEqualTo(ProtocolVersion.V5); session.execute("select * from system.local"); } } - @CassandraRequirement(min = "2.2", description = "required to use an older protocol version") + /** Note that this test will need to be updated as new protocol versions are introduced. */ + @DseRequirement(min = "6.0", description = "Only DSE in [6.0,*[ has DSE_V2 supported") + @Test + public void should_not_downgrade_if_server_supports_latest_version_dse() { + try (CqlSession session = SessionUtils.newSession(ccm)) { + assertThat(session.getContext().getProtocolVersion()).isEqualTo(ProtocolVersion.DSE_V2); + session.execute("select * from system.local"); + } + } + + @CassandraRequirement(min = "2.1", description = "Only C* in [2.1,*[ has V3 supported") @Test - public void should_use_explicitly_provided_protocol_version() { + public void should_use_explicitly_provided_v3() { DriverConfigLoader loader = SessionUtils.configLoaderBuilder() .withString(DefaultDriverOption.PROTOCOL_VERSION, "V3") @@ -106,4 +208,95 @@ public void should_use_explicitly_provided_protocol_version() { session.execute("select * from system.local"); } } + + @DseRequirement(min = "4.8", description = "Only DSE in [4.8,*[ has V3 supported") + @Test + public void should_use_explicitly_provided_v3_dse() { + DriverConfigLoader loader = + SessionUtils.configLoaderBuilder() + .withString(DefaultDriverOption.PROTOCOL_VERSION, "V3") + .build(); + try (CqlSession session = SessionUtils.newSession(ccm, loader)) { + assertThat(session.getContext().getProtocolVersion().getCode()).isEqualTo(3); + session.execute("select * from system.local"); + } + } + + @CassandraRequirement(min = "2.2", description = "Only C* in [2.2,*[ has V4 supported") + @Test + public void should_use_explicitly_provided_v4() { + DriverConfigLoader loader = + SessionUtils.configLoaderBuilder() + .withString(DefaultDriverOption.PROTOCOL_VERSION, "V4") + .build(); + try (CqlSession session = SessionUtils.newSession(ccm, loader)) { + assertThat(session.getContext().getProtocolVersion().getCode()).isEqualTo(4); + session.execute("select * from system.local"); + } + } + + @DseRequirement(min = "5.0", description = "Only DSE in [5.0,*[ has V4 supported") + @Test + public void should_use_explicitly_provided_v4_dse() { + DriverConfigLoader loader = + SessionUtils.configLoaderBuilder() + .withString(DefaultDriverOption.PROTOCOL_VERSION, "V4") + .build(); + try (CqlSession session = SessionUtils.newSession(ccm, loader)) { + assertThat(session.getContext().getProtocolVersion().getCode()).isEqualTo(4); + session.execute("select * from system.local"); + } + } + + @CassandraRequirement(min = "4.0", description = "Only C* in [4.0,*[ has V5 supported") + @Test + public void should_use_explicitly_provided_v5() { + DriverConfigLoader loader = + SessionUtils.configLoaderBuilder() + .withString(DefaultDriverOption.PROTOCOL_VERSION, "V5") + .build(); + try (CqlSession session = SessionUtils.newSession(ccm, loader)) { + assertThat(session.getContext().getProtocolVersion().getCode()).isEqualTo(5); + session.execute("select * from system.local"); + } + } + + @DseRequirement(min = "7.0", description = "Only DSE in [7.0,*[ has V5 supported") + @Test + public void should_use_explicitly_provided_v5_dse() { + DriverConfigLoader loader = + SessionUtils.configLoaderBuilder() + .withString(DefaultDriverOption.PROTOCOL_VERSION, "V5") + .build(); + try (CqlSession session = SessionUtils.newSession(ccm, loader)) { + assertThat(session.getContext().getProtocolVersion().getCode()).isEqualTo(5); + session.execute("select * from system.local"); + } + } + + @DseRequirement(min = "5.1", description = "Only DSE in [5.1,*[ has DSE_V1 supported") + @Test + public void should_use_explicitly_provided_dse_v1() { + DriverConfigLoader loader = + SessionUtils.configLoaderBuilder() + .withString(DefaultDriverOption.PROTOCOL_VERSION, "DSE_V1") + .build(); + try (CqlSession session = SessionUtils.newSession(ccm, loader)) { + assertThat(session.getContext().getProtocolVersion()).isEqualTo(DseProtocolVersion.DSE_V1); + session.execute("select * from system.local"); + } + } + + @DseRequirement(min = "6.0", description = "Only DSE in [6.0,*[ has DSE_V2 supported") + @Test + public void should_use_explicitly_provided_dse_v2() { + DriverConfigLoader loader = + SessionUtils.configLoaderBuilder() + .withString(DefaultDriverOption.PROTOCOL_VERSION, "DSE_V2") + .build(); + try (CqlSession session = SessionUtils.newSession(ccm, loader)) { + assertThat(session.getContext().getProtocolVersion()).isEqualTo(DseProtocolVersion.DSE_V2); + session.execute("select * from system.local"); + } + } } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BoundStatementSimulacronIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BoundStatementSimulacronIT.java index 4f31dff1717..71fc5eb429e 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BoundStatementSimulacronIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BoundStatementSimulacronIT.java @@ -36,8 +36,8 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import java.time.Duration; +import java.util.LinkedHashMap; import java.util.List; -import java.util.Map; import java.util.concurrent.TimeUnit; import org.junit.Before; import org.junit.ClassRule; @@ -125,8 +125,8 @@ public void should_use_consistencies() { @Test public void should_use_timeout_from_simple_statement() { try (CqlSession session = SessionUtils.newSession(SIMULACRON_RULE)) { - Map params = ImmutableMap.of("k", 0); - Map paramTypes = ImmutableMap.of("k", "int"); + LinkedHashMap params = new LinkedHashMap<>(ImmutableMap.of("k", 0)); + LinkedHashMap paramTypes = new LinkedHashMap<>(ImmutableMap.of("k", "int")); SIMULACRON_RULE .cluster() .prime( @@ -156,8 +156,8 @@ public void should_use_timeout_from_simple_statement() { @Test public void should_use_timeout() { try (CqlSession session = SessionUtils.newSession(SIMULACRON_RULE)) { - Map params = ImmutableMap.of("k", 0); - Map paramTypes = ImmutableMap.of("k", "int"); + LinkedHashMap params = new LinkedHashMap<>(ImmutableMap.of("k", 0)); + LinkedHashMap paramTypes = new LinkedHashMap<>(ImmutableMap.of("k", "int")); // set timeout on simple statement, but will be unused since overridden by bound statement. SIMULACRON_RULE .cluster() diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/ProfileIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/ProfileIT.java index d03f280704d..eac0410759e 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/ProfileIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/ProfileIT.java @@ -49,8 +49,8 @@ import com.datastax.oss.simulacron.common.stubbing.PrimeDsl; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; +import java.util.LinkedHashMap; import java.util.List; -import java.util.Map; import java.util.Objects; import java.util.UUID; import java.util.concurrent.TimeUnit; @@ -216,9 +216,11 @@ private void assertServerSideCl(ConsistencyLevel expectedCl) { } private static void primeInsertQuery() { - Map params = - ImmutableMap.of("pk", SAMPLE_ENTITY.getPk(), "data", SAMPLE_ENTITY.getData()); - Map paramTypes = ImmutableMap.of("pk", "uuid", "data", "ascii"); + LinkedHashMap params = + new LinkedHashMap<>( + ImmutableMap.of("pk", SAMPLE_ENTITY.getPk(), "data", SAMPLE_ENTITY.getData())); + LinkedHashMap paramTypes = + new LinkedHashMap<>(ImmutableMap.of("pk", "uuid", "data", "ascii")); SIMULACRON_RULE .cluster() .prime( @@ -234,8 +236,9 @@ private static void primeInsertQuery() { } private static void primeDeleteQuery() { - Map params = ImmutableMap.of("pk", SAMPLE_ENTITY.getPk()); - Map paramTypes = ImmutableMap.of("pk", "uuid"); + LinkedHashMap params = + new LinkedHashMap<>(ImmutableMap.of("pk", SAMPLE_ENTITY.getPk())); + LinkedHashMap paramTypes = new LinkedHashMap<>(ImmutableMap.of("pk", "uuid")); SIMULACRON_RULE .cluster() .prime( @@ -252,8 +255,9 @@ private static void primeDeleteQuery() { } private static void primeSelectQuery() { - Map params = ImmutableMap.of("pk", SAMPLE_ENTITY.getPk()); - Map paramTypes = ImmutableMap.of("pk", "uuid"); + LinkedHashMap params = + new LinkedHashMap<>(ImmutableMap.of("pk", SAMPLE_ENTITY.getPk())); + LinkedHashMap paramTypes = new LinkedHashMap<>(ImmutableMap.of("pk", "uuid")); SIMULACRON_RULE .cluster() .prime( @@ -270,8 +274,9 @@ private static void primeSelectQuery() { } private static void primeCountQuery() { - Map params = ImmutableMap.of("pk", SAMPLE_ENTITY.getPk()); - Map paramTypes = ImmutableMap.of("pk", "uuid"); + LinkedHashMap params = + new LinkedHashMap<>(ImmutableMap.of("pk", SAMPLE_ENTITY.getPk())); + LinkedHashMap paramTypes = new LinkedHashMap<>(ImmutableMap.of("pk", "uuid")); SIMULACRON_RULE .cluster() .prime( @@ -288,9 +293,11 @@ private static void primeCountQuery() { } private static void primeUpdateQuery() { - Map params = - ImmutableMap.of("pk", SAMPLE_ENTITY.getPk(), "data", SAMPLE_ENTITY.getData()); - Map paramTypes = ImmutableMap.of("pk", "uuid", "data", "ascii"); + LinkedHashMap params = + new LinkedHashMap<>( + ImmutableMap.of("pk", SAMPLE_ENTITY.getPk(), "data", SAMPLE_ENTITY.getData())); + LinkedHashMap paramTypes = + new LinkedHashMap<>(ImmutableMap.of("pk", "uuid", "data", "ascii")); SIMULACRON_RULE .cluster() .prime( @@ -329,6 +336,7 @@ public interface SimpleDao { void delete(Simple simple); @Select + @SuppressWarnings("UnusedReturnValue") Simple findByPk(UUID pk); @Query("SELECT count(*) FROM ks.simple WHERE pk=:pk") diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/StatementAttributesIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/StatementAttributesIT.java index d32fd1f9517..46a10d465ad 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/StatementAttributesIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/StatementAttributesIT.java @@ -47,7 +47,7 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import java.nio.ByteBuffer; -import java.util.Map; +import java.util.LinkedHashMap; import java.util.Objects; import java.util.UUID; import java.util.concurrent.TimeUnit; @@ -194,8 +194,10 @@ public void should_fail_runtime_attributes_bad() { } private static void primeInsertQuery() { - Map params = ImmutableMap.of("pk", simple.getPk(), "data", simple.getData()); - Map paramTypes = ImmutableMap.of("pk", "uuid", "data", "ascii"); + LinkedHashMap params = + new LinkedHashMap<>(ImmutableMap.of("pk", simple.getPk(), "data", simple.getData())); + LinkedHashMap paramTypes = + new LinkedHashMap<>(ImmutableMap.of("pk", "uuid", "data", "ascii")); SIMULACRON_RULE .cluster() .prime( @@ -210,8 +212,9 @@ private static void primeInsertQuery() { } private static void primeDeleteQuery() { - Map params = ImmutableMap.of("pk", simple.getPk()); - Map paramTypes = ImmutableMap.of("pk", "uuid"); + LinkedHashMap params = + new LinkedHashMap<>(ImmutableMap.of("pk", simple.getPk())); + LinkedHashMap paramTypes = new LinkedHashMap<>(ImmutableMap.of("pk", "uuid")); SIMULACRON_RULE .cluster() .prime( @@ -227,8 +230,9 @@ private static void primeDeleteQuery() { } private static void primeSelectQuery() { - Map params = ImmutableMap.of("pk", simple.getPk()); - Map paramTypes = ImmutableMap.of("pk", "uuid"); + LinkedHashMap params = + new LinkedHashMap<>(ImmutableMap.of("pk", simple.getPk())); + LinkedHashMap paramTypes = new LinkedHashMap<>(ImmutableMap.of("pk", "uuid")); SIMULACRON_RULE .cluster() .prime( @@ -244,8 +248,9 @@ private static void primeSelectQuery() { } private static void primeCountQuery() { - Map params = ImmutableMap.of("pk", simple.getPk()); - Map paramTypes = ImmutableMap.of("pk", "uuid"); + LinkedHashMap params = + new LinkedHashMap<>(ImmutableMap.of("pk", simple.getPk())); + LinkedHashMap paramTypes = new LinkedHashMap<>(ImmutableMap.of("pk", "uuid")); SIMULACRON_RULE .cluster() .prime( @@ -261,8 +266,10 @@ private static void primeCountQuery() { } private static void primeUpdateQuery() { - Map params = ImmutableMap.of("pk", simple.getPk(), "data", simple.getData()); - Map paramTypes = ImmutableMap.of("pk", "uuid", "data", "ascii"); + LinkedHashMap params = + new LinkedHashMap<>(ImmutableMap.of("pk", simple.getPk(), "data", simple.getData())); + LinkedHashMap paramTypes = + new LinkedHashMap<>(ImmutableMap.of("pk", "uuid", "data", "ascii")); SIMULACRON_RULE .cluster() .prime( @@ -319,10 +326,12 @@ public interface SimpleDao { void delete2(Simple simple); @Select + @SuppressWarnings("UnusedReturnValue") Simple findByPk(UUID pk, Function function); @Select @StatementAttributes(consistencyLevel = "ANY", serialConsistencyLevel = "QUORUM", pageSize = 13) + @SuppressWarnings("UnusedReturnValue") Simple findByPk2(UUID pk); @Query("SELECT count(*) FROM ks.simple WHERE pk=:pk") @@ -330,6 +339,7 @@ public interface SimpleDao { @Query("SELECT count(*) FROM ks.simple WHERE pk=:pk") @StatementAttributes(consistencyLevel = "ANY", serialConsistencyLevel = "QUORUM", pageSize = 13) + @SuppressWarnings("UnusedReturnValue") long count2(UUID pk); @Update diff --git a/pom.xml b/pom.xml index b834259994b..eb6ab69f887 100644 --- a/pom.xml +++ b/pom.xml @@ -71,7 +71,7 @@ 6.0.0 6.0.3 4.13.4 - 0.10.0 + 0.11.0 1.1.4 2.28 2.5.0 From c3cb29664057e5fb470465ac36fd58649d14b630 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 16 Mar 2021 10:18:37 +0100 Subject: [PATCH 661/979] JAVA-2925: Consider protocol version unsupported when server requires USE_BETA flag for it (#1537) --- changelog/README.md | 1 + .../core/channel/ProtocolInitHandler.java | 8 ++-- ...ChannelFactoryProtocolNegotiationTest.java | 38 +++++++++++++++++++ 3 files changed, 44 insertions(+), 3 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 420fd72b325..984dd9259c5 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.11.0 (in progress) +- [bug] JAVA-2925: Consider protocol version unsupported when server requires USE_BETA flag for it - [improvement] JAVA-2704: Remove protocol v5 beta status, add v6-beta - [improvement] JAVA-2916: Annotate generated classes with `@SuppressWarnings` - [bug] JAVA-2927: Make Dropwizard truly optional diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ProtocolInitHandler.java b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ProtocolInitHandler.java index 1f9c0e2c689..99462ce3462 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ProtocolInitHandler.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ProtocolInitHandler.java @@ -326,9 +326,11 @@ void onResponse(Message response) { (step == Step.OPTIONS && querySupportedOptions) || step == Step.STARTUP; boolean serverOrProtocolError = error.code == ErrorCode.PROTOCOL_ERROR || error.code == ErrorCode.SERVER_ERROR; - if (firstRequest - && serverOrProtocolError - && error.message.contains("Invalid or unsupported protocol version")) { + boolean badProtocolVersionMessage = + error.message.contains("Invalid or unsupported protocol version") + // JAVA-2925: server is behind driver and considers the proposed version as beta + || error.message.contains("Beta version of the protocol used"); + if (firstRequest && serverOrProtocolError && badProtocolVersionMessage) { fail( UnsupportedProtocolVersionException.forSingleAttempt( endPoint, initialProtocolVersion)); diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/channel/ChannelFactoryProtocolNegotiationTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/channel/ChannelFactoryProtocolNegotiationTest.java index 189561c161b..2d7aeec2b62 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/channel/ChannelFactoryProtocolNegotiationTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/channel/ChannelFactoryProtocolNegotiationTest.java @@ -94,6 +94,44 @@ public void should_fail_if_version_specified_and_not_supported_by_server(int err }); } + @Test + public void should_fail_if_version_specified_and_considered_beta_by_server() { + // Given + when(defaultProfile.isDefined(DefaultDriverOption.PROTOCOL_VERSION)).thenReturn(true); + when(defaultProfile.getString(DefaultDriverOption.PROTOCOL_VERSION)).thenReturn("V5"); + when(protocolVersionRegistry.fromName("V5")).thenReturn(DefaultProtocolVersion.V5); + ChannelFactory factory = newChannelFactory(); + + // When + CompletionStage channelFuture = + factory.connect( + SERVER_ADDRESS, DriverChannelOptions.DEFAULT, NoopNodeMetricUpdater.INSTANCE); + + Frame requestFrame = readOutboundFrame(); + assertThat(requestFrame.message).isInstanceOf(Options.class); + writeInboundFrame(requestFrame, TestResponses.supportedResponse("mock_key", "mock_value")); + + requestFrame = readOutboundFrame(); + assertThat(requestFrame.protocolVersion).isEqualTo(DefaultProtocolVersion.V5.getCode()); + // Server considers v5 beta, e.g. C* 3.10 or 3.11 + writeInboundFrame( + requestFrame, + new Error( + ProtocolConstants.ErrorCode.PROTOCOL_ERROR, + "Beta version of the protocol used (5/v5-beta), but USE_BETA flag is unset")); + + // Then + assertThatStage(channelFuture) + .isFailed( + e -> { + assertThat(e) + .isInstanceOf(UnsupportedProtocolVersionException.class) + .hasMessageContaining("Host does not support protocol version V5"); + assertThat(((UnsupportedProtocolVersionException) e).getAttemptedVersions()) + .containsExactly(DefaultProtocolVersion.V5); + }); + } + @Test public void should_succeed_if_version_not_specified_and_server_supports_latest_supported() { // Given From 39be4f71aa974d783a11b94ff1fa70beaa7e6848 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 16 Mar 2021 10:21:45 +0100 Subject: [PATCH 662/979] Fix failing tests in ProtocolVersionInitialNegotiationIT --- .../ProtocolVersionInitialNegotiationIT.java | 68 ++++++++++++++++--- 1 file changed, 59 insertions(+), 9 deletions(-) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/ProtocolVersionInitialNegotiationIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/ProtocolVersionInitialNegotiationIT.java index 64e38e7268e..366e9fe5669 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/ProtocolVersionInitialNegotiationIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/ProtocolVersionInitialNegotiationIT.java @@ -31,6 +31,7 @@ import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.categories.ParallelizableTests; +import org.junit.Assume; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; @@ -46,7 +47,17 @@ public class ProtocolVersionInitialNegotiationIT { max = "2.2", description = "Only C* in [2.1,2.2[ has V3 as its highest version") @Test - public void should_downgrade_to_v3() { + public void should_downgrade_to_v3_oss() { + Assume.assumeFalse("This test is only for OSS C*", ccm.getDseVersion().isPresent()); + try (CqlSession session = SessionUtils.newSession(ccm)) { + assertThat(session.getContext().getProtocolVersion().getCode()).isEqualTo(3); + session.execute("select * from system.local"); + } + } + + @DseRequirement(max = "5.0", description = "Only DSE in [*,5.0[ has V3 as its highest version") + @Test + public void should_downgrade_to_v3_dse() { try (CqlSession session = SessionUtils.newSession(ccm)) { assertThat(session.getContext().getProtocolVersion().getCode()).isEqualTo(3); session.execute("select * from system.local"); @@ -58,7 +69,20 @@ public void should_downgrade_to_v3() { max = "4.0", description = "Only C* in [2.2,4.0[ has V4 as its highest version") @Test - public void should_downgrade_to_v4() { + public void should_downgrade_to_v4_oss() { + Assume.assumeFalse("This test is only for OSS C*", ccm.getDseVersion().isPresent()); + try (CqlSession session = SessionUtils.newSession(ccm)) { + assertThat(session.getContext().getProtocolVersion().getCode()).isEqualTo(4); + session.execute("select * from system.local"); + } + } + + @DseRequirement( + min = "5.0", + max = "5.1", + description = "Only DSE in [5.0,5.1[ has V4 as its highest version") + @Test + public void should_downgrade_to_v4_dse() { try (CqlSession session = SessionUtils.newSession(ccm)) { assertThat(session.getContext().getProtocolVersion().getCode()).isEqualTo(4); session.execute("select * from system.local"); @@ -66,8 +90,9 @@ public void should_downgrade_to_v4() { } @DseRequirement( + min = "5.1", max = "6.0", - description = "Only DSE in [*,6.0[ has DSE_V1 as its highest version") + description = "Only DSE in [5.1,6.0[ has DSE_V1 as its highest version") @Test public void should_downgrade_to_dse_v1() { try (CqlSession session = SessionUtils.newSession(ccm)) { @@ -78,7 +103,27 @@ public void should_downgrade_to_dse_v1() { @CassandraRequirement(max = "2.2", description = "Only C* in [*,2.2[ has V4 unsupported") @Test - public void should_fail_if_provided_v4_is_not_supported() { + public void should_fail_if_provided_v4_is_not_supported_oss() { + Assume.assumeFalse("This test is only for OSS C*", ccm.getDseVersion().isPresent()); + DriverConfigLoader loader = + SessionUtils.configLoaderBuilder() + .withString(DefaultDriverOption.PROTOCOL_VERSION, "V4") + .build(); + try (CqlSession ignored = SessionUtils.newSession(ccm, loader)) { + fail("Expected an AllNodesFailedException"); + } catch (AllNodesFailedException anfe) { + Throwable cause = anfe.getAllErrors().values().iterator().next().get(0); + assertThat(cause).isInstanceOf(UnsupportedProtocolVersionException.class); + UnsupportedProtocolVersionException unsupportedException = + (UnsupportedProtocolVersionException) cause; + assertThat(unsupportedException.getAttemptedVersions()) + .containsOnly(DefaultProtocolVersion.V4); + } + } + + @DseRequirement(max = "5.0", description = "Only DSE in [*,5.0[ has V4 unsupported") + @Test + public void should_fail_if_provided_v4_is_not_supported_dse() { DriverConfigLoader loader = SessionUtils.configLoaderBuilder() .withString(DefaultDriverOption.PROTOCOL_VERSION, "V4") @@ -100,7 +145,8 @@ public void should_fail_if_provided_v4_is_not_supported() { max = "4.0", description = "Only C* in [2.1,4.0[ has V5 unsupported or supported as beta") @Test - public void should_fail_if_provided_v5_is_not_supported() { + public void should_fail_if_provided_v5_is_not_supported_oss() { + Assume.assumeFalse("This test is only for OSS C*", ccm.getDseVersion().isPresent()); DriverConfigLoader loader = SessionUtils.configLoaderBuilder() .withString(DefaultDriverOption.PROTOCOL_VERSION, "V5") @@ -179,7 +225,8 @@ public void should_fail_if_provided_dse_v2_is_not_supported() { /** Note that this test will need to be updated as new protocol versions are introduced. */ @CassandraRequirement(min = "4.0", description = "Only C* in [4.0,*[ has V5 supported") @Test - public void should_not_downgrade_if_server_supports_latest_version() { + public void should_not_downgrade_if_server_supports_latest_version_oss() { + Assume.assumeFalse("This test is only for OSS C*", ccm.getDseVersion().isPresent()); try (CqlSession session = SessionUtils.newSession(ccm)) { assertThat(session.getContext().getProtocolVersion()).isEqualTo(ProtocolVersion.V5); session.execute("select * from system.local"); @@ -198,7 +245,8 @@ public void should_not_downgrade_if_server_supports_latest_version_dse() { @CassandraRequirement(min = "2.1", description = "Only C* in [2.1,*[ has V3 supported") @Test - public void should_use_explicitly_provided_v3() { + public void should_use_explicitly_provided_v3_oss() { + Assume.assumeFalse("This test is only for OSS C*", ccm.getDseVersion().isPresent()); DriverConfigLoader loader = SessionUtils.configLoaderBuilder() .withString(DefaultDriverOption.PROTOCOL_VERSION, "V3") @@ -224,7 +272,8 @@ public void should_use_explicitly_provided_v3_dse() { @CassandraRequirement(min = "2.2", description = "Only C* in [2.2,*[ has V4 supported") @Test - public void should_use_explicitly_provided_v4() { + public void should_use_explicitly_provided_v4_oss() { + Assume.assumeFalse("This test is only for OSS C*", ccm.getDseVersion().isPresent()); DriverConfigLoader loader = SessionUtils.configLoaderBuilder() .withString(DefaultDriverOption.PROTOCOL_VERSION, "V4") @@ -250,7 +299,8 @@ public void should_use_explicitly_provided_v4_dse() { @CassandraRequirement(min = "4.0", description = "Only C* in [4.0,*[ has V5 supported") @Test - public void should_use_explicitly_provided_v5() { + public void should_use_explicitly_provided_v5_oss() { + Assume.assumeFalse("This test is only for OSS C*", ccm.getDseVersion().isPresent()); DriverConfigLoader loader = SessionUtils.configLoaderBuilder() .withString(DefaultDriverOption.PROTOCOL_VERSION, "V5") From 85b784bab7a2f775e1d7433dbcf50df8aad8125b Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Wed, 17 Mar 2021 00:09:44 +0100 Subject: [PATCH 663/979] Set CCM logger level to ERROR --- integration-tests/src/test/resources/logback-test.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/integration-tests/src/test/resources/logback-test.xml b/integration-tests/src/test/resources/logback-test.xml index 078ca8a1911..36dd79c1040 100644 --- a/integration-tests/src/test/resources/logback-test.xml +++ b/integration-tests/src/test/resources/logback-test.xml @@ -29,7 +29,7 @@ - + From faa7f7c379ca5ffaeaffc31f2cd9ace0a5f5bc42 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Wed, 17 Mar 2021 00:12:47 +0100 Subject: [PATCH 664/979] Add note in the upgrade guide about protocol v5 (JAVA-2704) --- upgrade_guide/README.md | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/upgrade_guide/README.md b/upgrade_guide/README.md index 80909ad15da..54d1e86c6a7 100644 --- a/upgrade_guide/README.md +++ b/upgrade_guide/README.md @@ -1,5 +1,16 @@ ## Upgrade guide +### 4.11.0 + +#### Native protocol V5 is now production-ready + +Thanks to [JAVA-2704](https://datastax-oss.atlassian.net/browse/JAVA-2704), 4.11.0 is the first +version in the driver 4.x series to fully support Cassandra's native protocol version 5, which has +been promoted from beta to production-ready in the upcoming Cassandra 4.0 release. + +Users should not experience any disruption. When connecting to Cassandra 4.0, V5 will be +transparently selected as the protocol version to use. + ### 4.10.0 #### Cross-datacenter failover From 17f61165c3b2580979da10a98b12899417fe0a8a Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Wed, 17 Mar 2021 22:15:44 +0100 Subject: [PATCH 665/979] JAVA-2872: Ability to customize metric names and tags (#1540) --- changelog/README.md | 1 + .../api/core/config/DefaultDriverOption.java | 14 + .../driver/api/core/config/OptionsMap.java | 1 + .../api/core/config/TypedDriverOption.java | 8 + .../driver/api/core/metrics/NodeMetric.java | 5 +- .../api/core/metrics/SessionMetric.java | 3 +- .../core/context/DefaultDriverContext.java | 23 ++ .../core/context/InternalDriverContext.java | 4 + .../internal/core/metadata/SniEndPoint.java | 3 + .../core/metrics/AbstractMetricUpdater.java | 120 +++++++ .../core/metrics/DefaultMetricId.java | 68 ++++ .../metrics/DefaultMetricIdGenerator.java | 64 ++++ .../core/metrics/DropwizardMetricUpdater.java | 164 +++++++--- .../metrics/DropwizardMetricsFactory.java | 4 +- .../metrics/DropwizardNodeMetricUpdater.java | 115 +++---- .../DropwizardSessionMetricUpdater.java | 116 ++----- .../internal/core/metrics/MetricId.java | 47 +++ .../core/metrics/MetricIdGenerator.java | 47 +++ .../internal/core/metrics/MetricUpdater.java | 16 +- .../metrics/TaggingMetricIdGenerator.java | 71 +++++ core/src/main/resources/reference.conf | 72 ++++- .../metrics/DefaultMetricIdGeneratorTest.java | 108 +++++++ .../core/metrics/DefaultMetricIdTest.java | 61 ++++ .../metrics/DropwizardMetricsFactoryTest.java | 10 +- .../metrics/TaggingMetricIdGeneratorTest.java | 114 +++++++ .../common/AbstractMetricsTestBase.java | 217 ++++++++++--- .../micrometer/MicrometerMetricsIT.java | 280 ++++++++++------- .../microprofile/MicroProfileMetricsIT.java | 295 ++++++++++-------- manual/core/metrics/README.md | 55 ++++ .../micrometer/MicrometerMetricUpdater.java | 109 +++++-- .../micrometer/MicrometerMetricsFactory.java | 4 +- .../MicrometerNodeMetricUpdater.java | 108 +++---- .../MicrometerSessionMetricUpdater.java | 126 ++------ .../metrics/micrometer/MicrometerTags.java | 33 ++ .../MicrometerMetricsFactoryTest.java | 10 +- .../MicroProfileMetricUpdater.java | 123 ++++++-- .../MicroProfileMetricsFactory.java | 4 +- .../MicroProfileNodeMetricUpdater.java | 112 +++---- .../MicroProfileSessionMetricUpdater.java | 122 ++------ .../microprofile/MicroProfileTags.java | 33 ++ .../MicroProfileMetricsFactoryTest.java | 10 +- upgrade_guide/README.md | 13 + 42 files changed, 1995 insertions(+), 918 deletions(-) create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/metrics/AbstractMetricUpdater.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DefaultMetricId.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DefaultMetricIdGenerator.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/metrics/MetricId.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/metrics/MetricIdGenerator.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/metrics/TaggingMetricIdGenerator.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/metrics/DefaultMetricIdGeneratorTest.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/metrics/DefaultMetricIdTest.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/metrics/TaggingMetricIdGeneratorTest.java create mode 100644 metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerTags.java create mode 100644 metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileTags.java diff --git a/changelog/README.md b/changelog/README.md index 984dd9259c5..2d3d1d0c280 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.11.0 (in progress) +- [new feature] JAVA-2872: Ability to customize metric names and tags - [bug] JAVA-2925: Consider protocol version unsupported when server requires USE_BETA flag for it - [improvement] JAVA-2704: Remove protocol v5 beta status, add v6-beta - [improvement] JAVA-2916: Annotate generated classes with `@SuppressWarnings` diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java index d9bc504bea4..150305dfea4 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java @@ -838,6 +838,20 @@ public enum DefaultDriverOption implements DriverOption { */ LOAD_BALANCING_DC_FAILOVER_ALLOW_FOR_LOCAL_CONSISTENCY_LEVELS( "advanced.load-balancing-policy.dc-failover.allow-for-local-consistency-levels"), + + /** + * The classname of the desired {@code MetricIdGenerator} implementation. + * + *

          Value-type: {@link String} + */ + METRICS_ID_GENERATOR_CLASS("advanced.metrics.id-generator.class"), + + /** + * The value of the prefix to prepend to all metric names. + * + *

          Value-type: {@link String} + */ + METRICS_ID_GENERATOR_PREFIX("advanced.metrics.id-generator.prefix"), ; private final String path; diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java index b0fd39b57c2..9c4758f531f 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java @@ -323,6 +323,7 @@ protected static void fillWithDriverDefaults(OptionsMap map) { TypedDriverOption.CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_INTERVAL, Duration.ofMinutes(5)); map.put(TypedDriverOption.METRICS_FACTORY_CLASS, "DefaultMetricsFactory"); + map.put(TypedDriverOption.METRICS_ID_GENERATOR_CLASS, "DefaultMetricIdGenerator"); map.put(TypedDriverOption.METRICS_SESSION_GRAPH_REQUESTS_HIGHEST, Duration.ofSeconds(12)); map.put(TypedDriverOption.METRICS_SESSION_GRAPH_REQUESTS_DIGITS, 3); map.put(TypedDriverOption.METRICS_SESSION_GRAPH_REQUESTS_INTERVAL, Duration.ofMinutes(5)); diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java index bf4223bf45c..d2687da68d7 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java @@ -709,6 +709,14 @@ public String toString() { public static final TypedDriverOption METRICS_FACTORY_CLASS = new TypedDriverOption<>(DefaultDriverOption.METRICS_FACTORY_CLASS, GenericType.STRING); + /** The classname of the desired {@code MetricIdGenerator} implementation. */ + public static final TypedDriverOption METRICS_ID_GENERATOR_CLASS = + new TypedDriverOption<>(DefaultDriverOption.METRICS_ID_GENERATOR_CLASS, GenericType.STRING); + + /** The value of the prefix to prepend to all metric names. */ + public static final TypedDriverOption METRICS_ID_GENERATOR_PREFIX = + new TypedDriverOption<>(DefaultDriverOption.METRICS_ID_GENERATOR_PREFIX, GenericType.STRING); + /** The maximum number of nodes from remote DCs to include in query plans. */ public static final TypedDriverOption LOAD_BALANCING_DC_FAILOVER_MAX_NODES_PER_REMOTE_DC = diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/metrics/NodeMetric.java b/core/src/main/java/com/datastax/oss/driver/api/core/metrics/NodeMetric.java index 6f7c3c8e7f6..5fac2ef30da 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/metrics/NodeMetric.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/metrics/NodeMetric.java @@ -21,8 +21,9 @@ /** * A node-level metric exposed through {@link Session#getMetrics()}. * - *

          All metrics exposed out of the box by the driver are instances of {@link DefaultNodeMetric} - * (this interface only exists to allow custom metrics in driver extensions). + *

          All metrics exposed out of the box by the driver are instances of {@link DefaultNodeMetric} or + * {@link com.datastax.dse.driver.api.core.metrics.DseNodeMetric DseNodeMetric} (this interface only + * exists to allow custom metrics in driver extensions). * * @see SessionMetric */ diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/metrics/SessionMetric.java b/core/src/main/java/com/datastax/oss/driver/api/core/metrics/SessionMetric.java index 4b591e14085..c0da00df070 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/metrics/SessionMetric.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/metrics/SessionMetric.java @@ -22,7 +22,8 @@ * A session-level metric exposed through {@link Session#getMetrics()}. * *

          All metrics exposed out of the box by the driver are instances of {@link DefaultSessionMetric} - * (this interface only exists to allow custom metrics in driver extensions). + * or {@link com.datastax.dse.driver.api.core.metrics.DseSessionMetric DseSessionMetric} (this + * interface only exists to allow custom metrics in driver extensions). * * @see NodeMetric */ diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java index f37c2eae3dc..8cbe488253b 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java @@ -65,6 +65,7 @@ import com.datastax.oss.driver.internal.core.metadata.token.DefaultTokenFactoryRegistry; import com.datastax.oss.driver.internal.core.metadata.token.ReplicationStrategyFactory; import com.datastax.oss.driver.internal.core.metadata.token.TokenFactoryRegistry; +import com.datastax.oss.driver.internal.core.metrics.MetricIdGenerator; import com.datastax.oss.driver.internal.core.metrics.MetricsFactory; import com.datastax.oss.driver.internal.core.pool.ChannelPoolFactory; import com.datastax.oss.driver.internal.core.protocol.BuiltInCompressors; @@ -200,6 +201,8 @@ public class DefaultDriverContext implements InternalDriverContext { new LazyReference<>("poolManager", this::buildPoolManager, cycleDetector); private final LazyReference metricsFactoryRef = new LazyReference<>("metricsFactory", this::buildMetricsFactory, cycleDetector); + private final LazyReference metricIdGeneratorRef = + new LazyReference<>("metricIdGenerator", this::buildMetricIdGenerator, cycleDetector); private final LazyReference requestThrottlerRef = new LazyReference<>("requestThrottler", this::buildRequestThrottler, cycleDetector); private final LazyReference> startupOptionsRef = @@ -543,6 +546,20 @@ protected MetricsFactory buildMetricsFactory() { DefaultDriverOption.METRICS_FACTORY_CLASS))); } + protected MetricIdGenerator buildMetricIdGenerator() { + return Reflection.buildFromConfig( + this, + DefaultDriverOption.METRICS_ID_GENERATOR_CLASS, + MetricIdGenerator.class, + "com.datastax.oss.driver.internal.core.metrics") + .orElseThrow( + () -> + new IllegalArgumentException( + String.format( + "Missing metric descriptor, check your config (%s)", + DefaultDriverOption.METRICS_ID_GENERATOR_CLASS))); + } + protected RequestThrottler buildRequestThrottler() { return Reflection.buildFromConfig( this, @@ -853,6 +870,12 @@ public MetricsFactory getMetricsFactory() { return metricsFactoryRef.get(); } + @NonNull + @Override + public MetricIdGenerator getMetricIdGenerator() { + return metricIdGeneratorRef.get(); + } + @NonNull @Override public RequestThrottler getRequestThrottler() { diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/context/InternalDriverContext.java b/core/src/main/java/com/datastax/oss/driver/internal/core/context/InternalDriverContext.java index 3b17b98deef..0bfc07d73a2 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/context/InternalDriverContext.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/context/InternalDriverContext.java @@ -30,6 +30,7 @@ import com.datastax.oss.driver.internal.core.metadata.schema.queries.SchemaQueriesFactory; import com.datastax.oss.driver.internal.core.metadata.token.ReplicationStrategyFactory; import com.datastax.oss.driver.internal.core.metadata.token.TokenFactoryRegistry; +import com.datastax.oss.driver.internal.core.metrics.MetricIdGenerator; import com.datastax.oss.driver.internal.core.metrics.MetricsFactory; import com.datastax.oss.driver.internal.core.pool.ChannelPoolFactory; import com.datastax.oss.driver.internal.core.servererrors.WriteTypeRegistry; @@ -125,6 +126,9 @@ public interface InternalDriverContext extends DriverContext { @NonNull MetricsFactory getMetricsFactory(); + @NonNull + MetricIdGenerator getMetricIdGenerator(); + /** * The value that was passed to {@link SessionBuilder#withLocalDatacenter(String,String)} for this * particular profile. If it was specified through the configuration instead, this method will diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/SniEndPoint.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/SniEndPoint.java index 5c2918e1f34..475e1b56b95 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/SniEndPoint.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/SniEndPoint.java @@ -17,6 +17,7 @@ import com.datastax.oss.driver.api.core.metadata.EndPoint; import com.datastax.oss.driver.shaded.guava.common.primitives.UnsignedBytes; +import edu.umd.cs.findbugs.annotations.NonNull; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.UnknownHostException; @@ -48,6 +49,7 @@ public String getServerName() { return serverName; } + @NonNull @Override public InetSocketAddress resolve() { try { @@ -93,6 +95,7 @@ public String toString() { return proxyAddress.toString() + ":" + serverName; } + @NonNull @Override public String asMetricPrefix() { String hostString = proxyAddress.getHostString(); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/AbstractMetricUpdater.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/AbstractMetricUpdater.java new file mode 100644 index 00000000000..04fdc2c58bc --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/AbstractMetricUpdater.java @@ -0,0 +1,120 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.metrics; + +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; +import com.datastax.oss.driver.api.core.session.throttling.RequestThrottler; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.cql.CqlPrepareAsyncProcessor; +import com.datastax.oss.driver.internal.core.cql.CqlPrepareSyncProcessor; +import com.datastax.oss.driver.internal.core.pool.ChannelPool; +import com.datastax.oss.driver.internal.core.session.RequestProcessor; +import com.datastax.oss.driver.internal.core.session.throttling.ConcurrencyLimitingRequestThrottler; +import com.datastax.oss.driver.internal.core.session.throttling.RateLimitingRequestThrottler; +import com.datastax.oss.driver.shaded.guava.common.cache.Cache; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.Set; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public abstract class AbstractMetricUpdater implements MetricUpdater { + + private static final Logger LOG = LoggerFactory.getLogger(AbstractMetricUpdater.class); + + protected final InternalDriverContext context; + protected final Set enabledMetrics; + + protected AbstractMetricUpdater(InternalDriverContext context, Set enabledMetrics) { + this.context = context; + this.enabledMetrics = enabledMetrics; + } + + @Override + public boolean isEnabled(MetricT metric, String profileName) { + return enabledMetrics.contains(metric); + } + + protected int connectedNodes() { + int count = 0; + for (Node node : context.getMetadataManager().getMetadata().getNodes().values()) { + if (node.getOpenConnections() > 0) { + count++; + } + } + return count; + } + + protected int throttlingQueueSize() { + RequestThrottler requestThrottler = context.getRequestThrottler(); + String logPrefix = context.getSessionName(); + if (requestThrottler instanceof ConcurrencyLimitingRequestThrottler) { + return ((ConcurrencyLimitingRequestThrottler) requestThrottler).getQueueSize(); + } + if (requestThrottler instanceof RateLimitingRequestThrottler) { + return ((RateLimitingRequestThrottler) requestThrottler).getQueueSize(); + } + LOG.warn( + "[{}] Metric {} does not support {}, it will always return 0", + logPrefix, + DefaultSessionMetric.THROTTLING_QUEUE_SIZE.getPath(), + requestThrottler.getClass().getName()); + return 0; + } + + protected long preparedStatementCacheSize() { + Cache cache = getPreparedStatementCache(); + if (cache == null) { + LOG.warn( + "[{}] Metric {} is enabled in the config, " + + "but it looks like no CQL prepare processor is registered. " + + "The gauge will always return 0", + context.getSessionName(), + DefaultSessionMetric.CQL_PREPARED_CACHE_SIZE.getPath()); + return 0L; + } + return cache.size(); + } + + @Nullable + protected Cache getPreparedStatementCache() { + // By default, both the sync processor and the async ones are registered and they share the same + // cache. But with a custom processor registry, there could be only one of the two present. + for (RequestProcessor processor : context.getRequestProcessorRegistry().getProcessors()) { + if (processor instanceof CqlPrepareAsyncProcessor) { + return ((CqlPrepareAsyncProcessor) processor).getCache(); + } else if (processor instanceof CqlPrepareSyncProcessor) { + return ((CqlPrepareSyncProcessor) processor).getCache(); + } + } + return null; + } + + protected int availableStreamIds(Node node) { + ChannelPool pool = context.getPoolManager().getPools().get(node); + return (pool == null) ? 0 : pool.getAvailableIds(); + } + + protected int inFlightRequests(Node node) { + ChannelPool pool = context.getPoolManager().getPools().get(node); + return (pool == null) ? 0 : pool.getInFlight(); + } + + protected int orphanedStreamIds(Node node) { + ChannelPool pool = context.getPoolManager().getPools().get(node); + return (pool == null) ? 0 : pool.getOrphanedIds(); + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DefaultMetricId.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DefaultMetricId.java new file mode 100644 index 00000000000..bd200854b24 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DefaultMetricId.java @@ -0,0 +1,68 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.metrics; + +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Map; +import java.util.Objects; +import net.jcip.annotations.ThreadSafe; + +@ThreadSafe +public final class DefaultMetricId implements MetricId { + + private final String name; + private final ImmutableMap tags; + + public DefaultMetricId(String name, Map tags) { + this.name = Objects.requireNonNull(name, "name cannot be null"); + this.tags = ImmutableMap.copyOf(Objects.requireNonNull(tags, "tags cannot be null")); + } + + @NonNull + @Override + public String getName() { + return name; + } + + @NonNull + @Override + public Map getTags() { + return tags; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + DefaultMetricId that = (DefaultMetricId) o; + return name.equals(that.name) && tags.equals(that.tags); + } + + @Override + public int hashCode() { + return Objects.hash(name, tags); + } + + @Override + public String toString() { + return name + tags; + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DefaultMetricIdGenerator.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DefaultMetricIdGenerator.java new file mode 100644 index 00000000000..614c7d6eb9a --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DefaultMetricIdGenerator.java @@ -0,0 +1,64 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.metrics; + +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.context.DriverContext; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.metrics.NodeMetric; +import com.datastax.oss.driver.api.core.metrics.SessionMetric; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Objects; + +/** + * The default {@link MetricIdGenerator}. + * + *

          This generator generates unique names, containing the session name, the node endpoint (for + * node metrics), and the metric prefix. It does not generate tags. + */ +public class DefaultMetricIdGenerator implements MetricIdGenerator { + + private final String sessionPrefix; + private final String nodePrefix; + + @SuppressWarnings("unused") + public DefaultMetricIdGenerator(DriverContext context) { + String sessionName = context.getSessionName(); + String prefix = + Objects.requireNonNull( + context + .getConfig() + .getDefaultProfile() + .getString(DefaultDriverOption.METRICS_ID_GENERATOR_PREFIX, "")); + sessionPrefix = prefix.isEmpty() ? sessionName + '.' : prefix + '.' + sessionName + '.'; + nodePrefix = sessionPrefix + "nodes."; + } + + @NonNull + @Override + public MetricId sessionMetricId(@NonNull SessionMetric metric) { + return new DefaultMetricId(sessionPrefix + metric.getPath(), ImmutableMap.of()); + } + + @NonNull + @Override + public MetricId nodeMetricId(@NonNull Node node, @NonNull NodeMetric metric) { + return new DefaultMetricId( + nodePrefix + node.getEndPoint().asMetricPrefix() + '.' + metric.getPath(), + ImmutableMap.of()); + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricUpdater.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricUpdater.java index 0c47637d780..7605d770069 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricUpdater.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricUpdater.java @@ -15,110 +15,172 @@ */ package com.datastax.oss.driver.internal.core.metrics; +import com.codahale.metrics.Counter; +import com.codahale.metrics.Histogram; +import com.codahale.metrics.Meter; import com.codahale.metrics.Metric; import com.codahale.metrics.MetricRegistry; +import com.codahale.metrics.Reservoir; import com.codahale.metrics.Timer; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.config.DriverOption; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import edu.umd.cs.findbugs.annotations.Nullable; import java.time.Duration; import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; import java.util.concurrent.TimeUnit; +import java.util.function.Supplier; import net.jcip.annotations.ThreadSafe; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @ThreadSafe -public abstract class DropwizardMetricUpdater implements MetricUpdater { +public abstract class DropwizardMetricUpdater extends AbstractMetricUpdater { private static final Logger LOG = LoggerFactory.getLogger(DropwizardMetricUpdater.class); - protected final Set enabledMetrics; protected final MetricRegistry registry; - protected DropwizardMetricUpdater(Set enabledMetrics, MetricRegistry registry) { - this.enabledMetrics = enabledMetrics; + protected final ConcurrentMap metrics = new ConcurrentHashMap<>(); + + protected final ConcurrentMap reservoirs = new ConcurrentHashMap<>(); + + protected DropwizardMetricUpdater( + InternalDriverContext context, Set enabledMetrics, MetricRegistry registry) { + super(context, enabledMetrics); this.registry = registry; } - protected abstract String buildFullName(MetricT metric, String profileName); + @SuppressWarnings({"unchecked", "TypeParameterUnusedInFormals"}) + public T getMetric(MetricT metric, String profileName) { + return (T) metrics.get(metric); + } @Override - public void incrementCounter(MetricT metric, String profileName, long amount) { + public void incrementCounter(MetricT metric, @Nullable String profileName, long amount) { if (isEnabled(metric, profileName)) { - registry.counter(buildFullName(metric, profileName)).inc(amount); + getOrCreateCounterFor(metric).inc(amount); } } @Override - public void updateHistogram(MetricT metric, String profileName, long value) { + public void updateHistogram(MetricT metric, @Nullable String profileName, long value) { if (isEnabled(metric, profileName)) { - registry.histogram(buildFullName(metric, profileName)).update(value); + getOrCreateHistogramFor(metric).update(value); } } @Override - public void markMeter(MetricT metric, String profileName, long amount) { + public void markMeter(MetricT metric, @Nullable String profileName, long amount) { if (isEnabled(metric, profileName)) { - registry.meter(buildFullName(metric, profileName)).mark(amount); + getOrCreateMeterFor(metric).mark(amount); } } @Override - public void updateTimer(MetricT metric, String profileName, long duration, TimeUnit unit) { + public void updateTimer( + MetricT metric, @Nullable String profileName, long duration, TimeUnit unit) { if (isEnabled(metric, profileName)) { - registry.timer(buildFullName(metric, profileName)).update(duration, unit); + getOrCreateTimerFor(metric).update(duration, unit); } } - @SuppressWarnings({"unchecked", "TypeParameterUnusedInFormals"}) - public T getMetric(MetricT metric, String profileName) { - return (T) registry.getMetrics().get(buildFullName(metric, profileName)); + protected abstract MetricId getMetricId(MetricT metric); + + protected void initializeGauge( + MetricT metric, DriverExecutionProfile profile, Supplier supplier) { + if (isEnabled(metric, profile.getName())) { + metrics.computeIfAbsent( + metric, + m -> { + MetricId id = getMetricId(m); + return registry.gauge(id.getName(), () -> supplier::get); + }); + } } - @Override - public boolean isEnabled(MetricT metric, String profileName) { - return enabledMetrics.contains(metric); + protected void initializeCounter(MetricT metric, DriverExecutionProfile profile) { + if (isEnabled(metric, profile.getName())) { + getOrCreateCounterFor(metric); + } } - protected void initializeDefaultCounter(MetricT metric, String profileName) { - if (isEnabled(metric, profileName)) { - // Just initialize eagerly so that the metric appears even when it has no data yet - registry.counter(buildFullName(metric, profileName)); + protected void initializeHdrTimer( + MetricT metric, + DriverExecutionProfile profile, + DriverOption highestLatency, + DriverOption significantDigits, + DriverOption interval) { + if (isEnabled(metric, profile.getName())) { + reservoirs.computeIfAbsent( + metric, m -> createHdrReservoir(m, profile, highestLatency, significantDigits, interval)); + getOrCreateTimerFor(metric); } } - protected void initializeHdrTimer( + protected Counter getOrCreateCounterFor(MetricT metric) { + return (Counter) + metrics.computeIfAbsent( + metric, + m -> { + MetricId id = getMetricId(m); + return registry.counter(id.getName()); + }); + } + + protected Meter getOrCreateMeterFor(MetricT metric) { + return (Meter) + metrics.computeIfAbsent( + metric, + m -> { + MetricId id = getMetricId(m); + return registry.meter(id.getName()); + }); + } + + protected Histogram getOrCreateHistogramFor(MetricT metric) { + return (Histogram) + metrics.computeIfAbsent( + metric, + m -> { + MetricId id = getMetricId(m); + return registry.histogram(id.getName()); + }); + } + + protected Timer getOrCreateTimerFor(MetricT metric) { + return (Timer) + metrics.computeIfAbsent( + metric, + m -> { + MetricId id = getMetricId(m); + Reservoir reservoir = reservoirs.get(metric); + Timer timer = reservoir == null ? new Timer() : new Timer(reservoir); + return registry.timer(id.getName(), () -> timer); + }); + } + + protected HdrReservoir createHdrReservoir( MetricT metric, - DriverExecutionProfile config, + DriverExecutionProfile profile, DriverOption highestLatencyOption, DriverOption significantDigitsOption, DriverOption intervalOption) { - String profileName = config.getName(); - if (isEnabled(metric, profileName)) { - String fullName = buildFullName(metric, profileName); - - Duration highestLatency = config.getDuration(highestLatencyOption); - final int significantDigits; - int d = config.getInt(significantDigitsOption); - if (d >= 0 && d <= 5) { - significantDigits = d; - } else { - LOG.warn( - "[{}] Configuration option {} is out of range (expected between 0 and 5, found {}); " - + "using 3 instead.", - fullName, - significantDigitsOption, - d); - significantDigits = 3; - } - Duration refreshInterval = config.getDuration(intervalOption); - - // Initialize eagerly to use the custom implementation - registry.timer( - fullName, - () -> - new Timer( - new HdrReservoir(highestLatency, significantDigits, refreshInterval, fullName))); + MetricId id = getMetricId(metric); + Duration highestLatency = profile.getDuration(highestLatencyOption); + int significantDigits = profile.getInt(significantDigitsOption); + if (significantDigits < 0 || significantDigits > 5) { + LOG.warn( + "[{}] Configuration option {} is out of range (expected between 0 and 5, " + + "found {}); using 3 instead.", + id.getName(), + significantDigitsOption, + significantDigits); + significantDigits = 3; } + Duration refreshInterval = profile.getDuration(intervalOption); + return new HdrReservoir(highestLatency, significantDigits, refreshInterval, id.getName()); } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricsFactory.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricsFactory.java index 5b81166668d..8cfac64fbe4 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricsFactory.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricsFactory.java @@ -96,7 +96,7 @@ public DropwizardMetricsFactory(InternalDriverContext context, Ticker ticker) { if (possibleMetricRegistry instanceof MetricRegistry) { this.registry = (MetricRegistry) possibleMetricRegistry; DropwizardSessionMetricUpdater dropwizardSessionUpdater = - new DropwizardSessionMetricUpdater(enabledSessionMetrics, registry, context); + new DropwizardSessionMetricUpdater(context, enabledSessionMetrics, registry); this.sessionUpdater = dropwizardSessionUpdater; this.metrics = new DefaultMetrics(registry, dropwizardSessionUpdater); } else { @@ -144,7 +144,7 @@ public NodeMetricUpdater newNodeUpdater(Node node) { } else { DropwizardNodeMetricUpdater dropwizardNodeMetricUpdater = new DropwizardNodeMetricUpdater( - node, enabledNodeMetrics, registry, context, () -> metricsCache.getIfPresent(node)); + node, context, enabledNodeMetrics, registry, () -> metricsCache.getIfPresent(node)); metricsCache.put(node, dropwizardNodeMetricUpdater); return dropwizardNodeMetricUpdater; } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardNodeMetricUpdater.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardNodeMetricUpdater.java index 7961d102659..ca50f57d1c1 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardNodeMetricUpdater.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardNodeMetricUpdater.java @@ -15,97 +15,80 @@ */ package com.datastax.oss.driver.internal.core.metrics; -import com.codahale.metrics.Gauge; import com.codahale.metrics.Metric; import com.codahale.metrics.MetricRegistry; import com.datastax.dse.driver.api.core.config.DseDriverOption; import com.datastax.dse.driver.api.core.metrics.DseNodeMetric; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; -import com.datastax.oss.driver.api.core.metadata.EndPoint; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.metrics.DefaultNodeMetric; import com.datastax.oss.driver.api.core.metrics.NodeMetric; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; -import com.datastax.oss.driver.internal.core.pool.ChannelPool; import java.util.Set; import java.util.concurrent.TimeUnit; -import java.util.function.Function; import net.jcip.annotations.ThreadSafe; @ThreadSafe public class DropwizardNodeMetricUpdater extends DropwizardMetricUpdater implements NodeMetricUpdater { - private final String metricNamePrefix; + private final Node node; private final Runnable signalMetricUpdated; public DropwizardNodeMetricUpdater( Node node, + InternalDriverContext context, Set enabledMetrics, MetricRegistry registry, - InternalDriverContext context, Runnable signalMetricUpdated) { - super(enabledMetrics, registry); + super(context, enabledMetrics, registry); + this.node = node; this.signalMetricUpdated = signalMetricUpdated; - this.metricNamePrefix = buildPrefix(context.getSessionName(), node.getEndPoint()); - DriverExecutionProfile config = context.getConfig().getDefaultProfile(); + DriverExecutionProfile profile = context.getConfig().getDefaultProfile(); + + initializeGauge(DefaultNodeMetric.OPEN_CONNECTIONS, profile, node::getOpenConnections); + initializeGauge(DefaultNodeMetric.AVAILABLE_STREAMS, profile, () -> availableStreamIds(node)); + initializeGauge(DefaultNodeMetric.IN_FLIGHT, profile, () -> inFlightRequests(node)); + initializeGauge(DefaultNodeMetric.ORPHANED_STREAMS, profile, () -> orphanedStreamIds(node)); + + initializeCounter(DefaultNodeMetric.UNSENT_REQUESTS, profile); + initializeCounter(DefaultNodeMetric.ABORTED_REQUESTS, profile); + initializeCounter(DefaultNodeMetric.WRITE_TIMEOUTS, profile); + initializeCounter(DefaultNodeMetric.READ_TIMEOUTS, profile); + initializeCounter(DefaultNodeMetric.UNAVAILABLES, profile); + initializeCounter(DefaultNodeMetric.OTHER_ERRORS, profile); + initializeCounter(DefaultNodeMetric.RETRIES, profile); + initializeCounter(DefaultNodeMetric.RETRIES_ON_ABORTED, profile); + initializeCounter(DefaultNodeMetric.RETRIES_ON_READ_TIMEOUT, profile); + initializeCounter(DefaultNodeMetric.RETRIES_ON_WRITE_TIMEOUT, profile); + initializeCounter(DefaultNodeMetric.RETRIES_ON_UNAVAILABLE, profile); + initializeCounter(DefaultNodeMetric.RETRIES_ON_OTHER_ERROR, profile); + initializeCounter(DefaultNodeMetric.IGNORES, profile); + initializeCounter(DefaultNodeMetric.IGNORES_ON_ABORTED, profile); + initializeCounter(DefaultNodeMetric.IGNORES_ON_READ_TIMEOUT, profile); + initializeCounter(DefaultNodeMetric.IGNORES_ON_WRITE_TIMEOUT, profile); + initializeCounter(DefaultNodeMetric.IGNORES_ON_UNAVAILABLE, profile); + initializeCounter(DefaultNodeMetric.IGNORES_ON_OTHER_ERROR, profile); + initializeCounter(DefaultNodeMetric.SPECULATIVE_EXECUTIONS, profile); + initializeCounter(DefaultNodeMetric.CONNECTION_INIT_ERRORS, profile); + initializeCounter(DefaultNodeMetric.AUTHENTICATION_ERRORS, profile); - if (enabledMetrics.contains(DefaultNodeMetric.OPEN_CONNECTIONS)) { - this.registry.register( - buildFullName(DefaultNodeMetric.OPEN_CONNECTIONS, null), - (Gauge) node::getOpenConnections); - } - initializePoolGauge( - DefaultNodeMetric.AVAILABLE_STREAMS, node, ChannelPool::getAvailableIds, context); - initializePoolGauge(DefaultNodeMetric.IN_FLIGHT, node, ChannelPool::getInFlight, context); - initializePoolGauge( - DefaultNodeMetric.ORPHANED_STREAMS, node, ChannelPool::getOrphanedIds, context); initializeHdrTimer( DefaultNodeMetric.CQL_MESSAGES, - config, + profile, DefaultDriverOption.METRICS_NODE_CQL_MESSAGES_HIGHEST, DefaultDriverOption.METRICS_NODE_CQL_MESSAGES_DIGITS, DefaultDriverOption.METRICS_NODE_CQL_MESSAGES_INTERVAL); - initializeDefaultCounter(DefaultNodeMetric.UNSENT_REQUESTS, null); - initializeDefaultCounter(DefaultNodeMetric.ABORTED_REQUESTS, null); - initializeDefaultCounter(DefaultNodeMetric.WRITE_TIMEOUTS, null); - initializeDefaultCounter(DefaultNodeMetric.READ_TIMEOUTS, null); - initializeDefaultCounter(DefaultNodeMetric.UNAVAILABLES, null); - initializeDefaultCounter(DefaultNodeMetric.OTHER_ERRORS, null); - initializeDefaultCounter(DefaultNodeMetric.RETRIES, null); - initializeDefaultCounter(DefaultNodeMetric.RETRIES_ON_ABORTED, null); - initializeDefaultCounter(DefaultNodeMetric.RETRIES_ON_READ_TIMEOUT, null); - initializeDefaultCounter(DefaultNodeMetric.RETRIES_ON_WRITE_TIMEOUT, null); - initializeDefaultCounter(DefaultNodeMetric.RETRIES_ON_UNAVAILABLE, null); - initializeDefaultCounter(DefaultNodeMetric.RETRIES_ON_OTHER_ERROR, null); - initializeDefaultCounter(DefaultNodeMetric.IGNORES, null); - initializeDefaultCounter(DefaultNodeMetric.IGNORES_ON_ABORTED, null); - initializeDefaultCounter(DefaultNodeMetric.IGNORES_ON_READ_TIMEOUT, null); - initializeDefaultCounter(DefaultNodeMetric.IGNORES_ON_WRITE_TIMEOUT, null); - initializeDefaultCounter(DefaultNodeMetric.IGNORES_ON_UNAVAILABLE, null); - initializeDefaultCounter(DefaultNodeMetric.IGNORES_ON_OTHER_ERROR, null); - initializeDefaultCounter(DefaultNodeMetric.SPECULATIVE_EXECUTIONS, null); - initializeDefaultCounter(DefaultNodeMetric.CONNECTION_INIT_ERRORS, null); - initializeDefaultCounter(DefaultNodeMetric.AUTHENTICATION_ERRORS, null); initializeHdrTimer( DseNodeMetric.GRAPH_MESSAGES, - context.getConfig().getDefaultProfile(), + profile, DseDriverOption.METRICS_NODE_GRAPH_MESSAGES_HIGHEST, DseDriverOption.METRICS_NODE_GRAPH_MESSAGES_DIGITS, DseDriverOption.METRICS_NODE_GRAPH_MESSAGES_INTERVAL); } - @Override - public String buildFullName(NodeMetric metric, String profileName) { - return metricNamePrefix + metric.getPath(); - } - - private String buildPrefix(String sessionName, EndPoint endPoint) { - return sessionName + ".nodes." + endPoint.asMetricPrefix() + "."; - } - @Override public void incrementCounter(NodeMetric metric, String profileName, long amount) { signalMetricUpdated.run(); @@ -131,29 +114,27 @@ public void updateTimer(NodeMetric metric, String profileName, long duration, Ti } @Override - @SuppressWarnings({"unchecked", "TypeParameterUnusedInFormals"}) + @SuppressWarnings("TypeParameterUnusedInFormals") public T getMetric(NodeMetric metric, String profileName) { signalMetricUpdated.run(); return super.getMetric(metric, profileName); } - private void initializePoolGauge( - NodeMetric metric, - Node node, - Function reading, - InternalDriverContext context) { - if (enabledMetrics.contains(metric)) { - registry.register( - buildFullName(metric, null), - (Gauge) - () -> { - ChannelPool pool = context.getPoolManager().getPools().get(node); - return (pool == null) ? 0 : reading.apply(pool); - }); + public void cleanupNodeMetrics() { + for (NodeMetric metric : metrics.keySet()) { + MetricId id = getMetricId(metric); + registry.remove(id.getName()); } + metrics.clear(); + reservoirs.clear(); } - public void cleanupNodeMetrics() { - registry.removeMatching((name, metric) -> name.startsWith(metricNamePrefix)); + @Override + protected MetricId getMetricId(NodeMetric metric) { + MetricId id = context.getMetricIdGenerator().nodeMetricId(node, metric); + if (!id.getTags().isEmpty()) { + throw new IllegalStateException("Cannot use metric tags with Dropwizard"); + } + return id; } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardSessionMetricUpdater.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardSessionMetricUpdater.java index 95d1a4fbaab..9013c2d2749 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardSessionMetricUpdater.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardSessionMetricUpdater.java @@ -15,140 +15,68 @@ */ package com.datastax.oss.driver.internal.core.metrics; -import com.codahale.metrics.Gauge; import com.codahale.metrics.MetricRegistry; import com.datastax.dse.driver.api.core.config.DseDriverOption; import com.datastax.dse.driver.api.core.metrics.DseSessionMetric; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; -import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; import com.datastax.oss.driver.api.core.metrics.SessionMetric; -import com.datastax.oss.driver.api.core.session.throttling.RequestThrottler; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; -import com.datastax.oss.driver.internal.core.cql.CqlPrepareAsyncProcessor; -import com.datastax.oss.driver.internal.core.cql.CqlPrepareSyncProcessor; -import com.datastax.oss.driver.internal.core.session.RequestProcessor; -import com.datastax.oss.driver.internal.core.session.throttling.ConcurrencyLimitingRequestThrottler; -import com.datastax.oss.driver.internal.core.session.throttling.RateLimitingRequestThrottler; -import com.datastax.oss.driver.shaded.guava.common.cache.Cache; -import edu.umd.cs.findbugs.annotations.Nullable; import java.util.Set; import net.jcip.annotations.ThreadSafe; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; @ThreadSafe public class DropwizardSessionMetricUpdater extends DropwizardMetricUpdater implements SessionMetricUpdater { - private static final Logger LOG = LoggerFactory.getLogger(DropwizardSessionMetricUpdater.class); + public DropwizardSessionMetricUpdater( + InternalDriverContext context, Set enabledMetrics, MetricRegistry registry) { + super(context, enabledMetrics, registry); - private final String metricNamePrefix; + DriverExecutionProfile profile = context.getConfig().getDefaultProfile(); - public DropwizardSessionMetricUpdater( - Set enabledMetrics, MetricRegistry registry, InternalDriverContext context) { - super(enabledMetrics, registry); - this.metricNamePrefix = context.getSessionName() + "."; + initializeGauge(DefaultSessionMetric.CONNECTED_NODES, profile, this::connectedNodes); + initializeGauge(DefaultSessionMetric.THROTTLING_QUEUE_SIZE, profile, this::throttlingQueueSize); + initializeGauge( + DefaultSessionMetric.CQL_PREPARED_CACHE_SIZE, profile, this::preparedStatementCacheSize); + + initializeCounter(DefaultSessionMetric.CQL_CLIENT_TIMEOUTS, profile); + initializeCounter(DefaultSessionMetric.THROTTLING_ERRORS, profile); + initializeCounter(DseSessionMetric.GRAPH_CLIENT_TIMEOUTS, profile); - if (enabledMetrics.contains(DefaultSessionMetric.CONNECTED_NODES)) { - this.registry.gauge( - buildFullName(DefaultSessionMetric.CONNECTED_NODES, null), - () -> - () -> { - int count = 0; - for (Node node : context.getMetadataManager().getMetadata().getNodes().values()) { - if (node.getOpenConnections() > 0) { - count += 1; - } - } - return count; - }); - } - if (enabledMetrics.contains(DefaultSessionMetric.THROTTLING_QUEUE_SIZE)) { - this.registry.gauge( - buildFullName(DefaultSessionMetric.THROTTLING_QUEUE_SIZE, null), - () -> buildQueueGauge(context.getRequestThrottler(), context.getSessionName())); - } - if (enabledMetrics.contains(DefaultSessionMetric.CQL_PREPARED_CACHE_SIZE)) { - this.registry.gauge( - buildFullName(DefaultSessionMetric.CQL_PREPARED_CACHE_SIZE, null), - () -> { - Cache cache = getPreparedStatementCache(context); - Gauge gauge; - if (cache == null) { - LOG.warn( - "[{}] Metric {} is enabled in the config, " - + "but it looks like no CQL prepare processor is registered. " - + "The gauge will always return 0", - context.getSessionName(), - DefaultSessionMetric.CQL_PREPARED_CACHE_SIZE.getPath()); - gauge = () -> 0L; - } else { - gauge = cache::size; - } - return gauge; - }); - } initializeHdrTimer( DefaultSessionMetric.CQL_REQUESTS, - context.getConfig().getDefaultProfile(), + profile, DefaultDriverOption.METRICS_SESSION_CQL_REQUESTS_HIGHEST, DefaultDriverOption.METRICS_SESSION_CQL_REQUESTS_DIGITS, DefaultDriverOption.METRICS_SESSION_CQL_REQUESTS_INTERVAL); - initializeDefaultCounter(DefaultSessionMetric.CQL_CLIENT_TIMEOUTS, null); initializeHdrTimer( DefaultSessionMetric.THROTTLING_DELAY, - context.getConfig().getDefaultProfile(), + profile, DefaultDriverOption.METRICS_SESSION_THROTTLING_HIGHEST, DefaultDriverOption.METRICS_SESSION_THROTTLING_DIGITS, DefaultDriverOption.METRICS_SESSION_THROTTLING_INTERVAL); - initializeDefaultCounter(DefaultSessionMetric.THROTTLING_ERRORS, null); initializeHdrTimer( DseSessionMetric.CONTINUOUS_CQL_REQUESTS, - context.getConfig().getDefaultProfile(), + profile, DseDriverOption.CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_HIGHEST, DseDriverOption.CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_DIGITS, DseDriverOption.CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_INTERVAL); - initializeDefaultCounter(DseSessionMetric.GRAPH_CLIENT_TIMEOUTS, null); initializeHdrTimer( DseSessionMetric.GRAPH_REQUESTS, - context.getConfig().getDefaultProfile(), + profile, DseDriverOption.METRICS_SESSION_GRAPH_REQUESTS_HIGHEST, DseDriverOption.METRICS_SESSION_GRAPH_REQUESTS_DIGITS, DseDriverOption.METRICS_SESSION_GRAPH_REQUESTS_INTERVAL); } @Override - public String buildFullName(SessionMetric metric, String profileName) { - return metricNamePrefix + metric.getPath(); - } - - private Gauge buildQueueGauge(RequestThrottler requestThrottler, String logPrefix) { - if (requestThrottler instanceof ConcurrencyLimitingRequestThrottler) { - return ((ConcurrencyLimitingRequestThrottler) requestThrottler)::getQueueSize; - } else if (requestThrottler instanceof RateLimitingRequestThrottler) { - return ((RateLimitingRequestThrottler) requestThrottler)::getQueueSize; - } else { - LOG.warn( - "[{}] Metric {} does not support {}, it will always return 0", - logPrefix, - DefaultSessionMetric.THROTTLING_QUEUE_SIZE.getPath(), - requestThrottler.getClass().getName()); - return () -> 0; - } - } - - @Nullable - private static Cache getPreparedStatementCache(InternalDriverContext context) { - // By default, both the sync processor and the async one are registered and they share the same - // cache. But with a custom processor registry, there could be only one of the two present. - for (RequestProcessor processor : context.getRequestProcessorRegistry().getProcessors()) { - if (processor instanceof CqlPrepareAsyncProcessor) { - return ((CqlPrepareAsyncProcessor) processor).getCache(); - } else if (processor instanceof CqlPrepareSyncProcessor) { - return ((CqlPrepareSyncProcessor) processor).getCache(); - } + protected MetricId getMetricId(SessionMetric metric) { + MetricId id = context.getMetricIdGenerator().sessionMetricId(metric); + if (!id.getTags().isEmpty()) { + throw new IllegalStateException("Cannot use metric tags with Dropwizard"); } - return null; + return id; } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/MetricId.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/MetricId.java new file mode 100644 index 00000000000..6f8c308472c --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/MetricId.java @@ -0,0 +1,47 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.metrics; + +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Map; + +/** + * The identifier of a metric. + * + *

          The driver will use the reported name and tags to register the described metric against the + * current metric registry. + * + *

          A metric identifier is unique, that is, the combination of its name and its tags is expected + * to be unique for a given metric registry. + */ +public interface MetricId { + + /** + * Returns this metric name. + * + *

          Metric names can be any non-empty string, but it is recommended to create metric names that + * have path-like structures separated by a dot, e.g. {@code path.to.my.custom.metric}. Driver + * built-in implementations of this interface abide by this rule. + * + * @return The metric name; cannot be empty nor null. + */ + @NonNull + String getName(); + + /** @return The metric tags, or empty if no tag is defined; cannot be null. */ + @NonNull + Map getTags(); +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/MetricIdGenerator.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/MetricIdGenerator.java new file mode 100644 index 00000000000..7cfd39bf37b --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/MetricIdGenerator.java @@ -0,0 +1,47 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.metrics; + +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.metrics.NodeMetric; +import com.datastax.oss.driver.api.core.metrics.SessionMetric; +import edu.umd.cs.findbugs.annotations.NonNull; + +/** + * A {@link MetricIdGenerator} is used to generate the unique identifiers by which a metric should + * be registered against the current metrics registry. + * + *

          The driver ships with two implementations of this interface; {@code DefaultMetricIdGenerator} + * and {@code TaggingMetricIdGenerator}. + * + *

          {@code DefaultMetricIdGenerator} is the default implementation; it generates metric + * identifiers with unique names and no tags. + * + *

          {@code TaggingMetricIdGenerator} generates metric identifiers whose uniqueness stems from the + * combination of their names and tags. + * + *

          See the driver's {@code reference.conf} file. + */ +public interface MetricIdGenerator { + + /** Generates a {@link MetricId} for the given {@link SessionMetric}. */ + @NonNull + MetricId sessionMetricId(@NonNull SessionMetric metric); + + /** Generates a {@link MetricId} for the given {@link Node and }{@link NodeMetric}. */ + @NonNull + MetricId nodeMetricId(@NonNull Node node, @NonNull NodeMetric metric); +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/MetricUpdater.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/MetricUpdater.java index f8dc93460b5..e545e4baf79 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/MetricUpdater.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/MetricUpdater.java @@ -15,6 +15,7 @@ */ package com.datastax.oss.driver.internal.core.metrics; +import edu.umd.cs.findbugs.annotations.Nullable; import java.util.concurrent.TimeUnit; /** @@ -25,21 +26,22 @@ */ public interface MetricUpdater { - void incrementCounter(MetricT metric, String profileName, long amount); + void incrementCounter(MetricT metric, @Nullable String profileName, long amount); - default void incrementCounter(MetricT metric, String profileName) { + default void incrementCounter(MetricT metric, @Nullable String profileName) { incrementCounter(metric, profileName, 1); } - void updateHistogram(MetricT metric, String profileName, long value); + // note: currently unused + void updateHistogram(MetricT metric, @Nullable String profileName, long value); - void markMeter(MetricT metric, String profileName, long amount); + void markMeter(MetricT metric, @Nullable String profileName, long amount); - default void markMeter(MetricT metric, String profileName) { + default void markMeter(MetricT metric, @Nullable String profileName) { markMeter(metric, profileName, 1); } - void updateTimer(MetricT metric, String profileName, long duration, TimeUnit unit); + void updateTimer(MetricT metric, @Nullable String profileName, long duration, TimeUnit unit); - boolean isEnabled(MetricT metric, String profileName); + boolean isEnabled(MetricT metric, @Nullable String profileName); } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/TaggingMetricIdGenerator.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/TaggingMetricIdGenerator.java new file mode 100644 index 00000000000..d49d1abb357 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/TaggingMetricIdGenerator.java @@ -0,0 +1,71 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.metrics; + +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.context.DriverContext; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.metrics.NodeMetric; +import com.datastax.oss.driver.api.core.metrics.SessionMetric; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Objects; + +/** + * A {@link MetricIdGenerator} that generates metric identifiers using a combination of names and + * tags. + * + *

          Session metric identifiers contain a name starting with "session." and ending with the metric + * path, and a tag with the key "session" and the value of the current session name. + * + *

          Node metric identifiers contain a name starting with "nodes." and ending with the metric path, + * and two tags: one with the key "session" and the value of the current session name, the other + * with the key "node" and the value of the current node endpoint. + */ +public class TaggingMetricIdGenerator implements MetricIdGenerator { + + private final String sessionName; + private final String sessionPrefix; + private final String nodePrefix; + + @SuppressWarnings("unused") + public TaggingMetricIdGenerator(DriverContext context) { + sessionName = context.getSessionName(); + String prefix = + Objects.requireNonNull( + context + .getConfig() + .getDefaultProfile() + .getString(DefaultDriverOption.METRICS_ID_GENERATOR_PREFIX, "")); + sessionPrefix = prefix.isEmpty() ? "session." : prefix + ".session."; + nodePrefix = prefix.isEmpty() ? "nodes." : prefix + ".nodes."; + } + + @NonNull + @Override + public MetricId sessionMetricId(@NonNull SessionMetric metric) { + return new DefaultMetricId( + sessionPrefix + metric.getPath(), ImmutableMap.of("session", sessionName)); + } + + @NonNull + @Override + public MetricId nodeMetricId(@NonNull Node node, @NonNull NodeMetric metric) { + return new DefaultMetricId( + nodePrefix + metric.getPath(), + ImmutableMap.of("session", sessionName, "node", node.getEndPoint().toString())); + } +} diff --git a/core/src/main/resources/reference.conf b/core/src/main/resources/reference.conf index 53b8e8afdf3..49ae947d556 100644 --- a/core/src/main/resources/reference.conf +++ b/core/src/main/resources/reference.conf @@ -1348,6 +1348,64 @@ datastax-java-driver { # Overridable in a profile: no class = DefaultMetricsFactory } + + # This section configures how metric ids are generated. A metric id is a unique combination of + # a metric name and metric tags. + id-generator { + + # The class name of a component implementing + # com.datastax.oss.driver.internal.core.metrics.MetricIdGenerator. If it is not qualified, the + # driver assumes that it resides in the package com.datastax.oss.driver.internal.core.metrics. + # + # The driver ships with two built-in implementations: + # + # - DefaultMetricIdGenerator: generates identifiers composed solely of (unique) metric names; + # it does not generate tags. It is mostly suitable for use with metrics libraries that do + # not support tags, like Dropwizard. + # - TaggingMetricIdGenerator: generates identifiers composed of name and tags. It is mostly + # suitable for use with metrics libraries that support tags, like Micrometer or MicroProfile + # Metrics. + # + # For example, here is how each one of them generates identifiers for the session metric + # "bytes-sent", assuming that the session is named "s0": + # - DefaultMetricIdGenerator: name "s0.bytes-sent", tags: {}. + # - TaggingMetricIdGenerator: name "session.bytes-sent", tags: {"session":"s0"} + # + # Here is how each one of them generates identifiers for the node metric "bytes-sent", + # assuming that the session is named "s0", and the node's broadcast address is 10.1.2.3:9042: + # - DefaultMetricIdGenerator: name "s0.nodes.10_1_2_3:9042.bytes-sent", tags: {}. + # - TaggingMetricIdGenerator: name "nodes.bytes-sent", tags: { "session" : "s0", + # "node" : "\10.1.2.3:9042" } + # + # As shown above, both built-in implementations generate names that are path-like structures + # separated by dots. This is indeed the most common expected format by reporting tools. + # + # Required: yes + # Modifiable at runtime: no + # Overridable in a profile: no + class = DefaultMetricIdGenerator + + # An optional prefix to prepend to each generated metric name. + # + # The prefix should not start nor end with a dot or any other path separator; the following + # are two valid examples: "cassandra" or "myapp.prod.cassandra". + # + # For example, if this prefix is set to "cassandra", here is how the session metric + # "bytes-sent" would be named, assuming that the session is named "s0": + # - with DefaultMetricIdGenerator: "cassandra.s0.bytes-sent" + # - with TaggingMetricIdGenerator: "cassandra.session.bytes-sent" + # + # Here is how the node metric "bytes-sent" would be named, assuming that the session is named + # "s0", and the node's broadcast address is 10.1.2.3:9042: + # - with DefaultMetricIdGenerator: "cassandra.s0.nodes.10_1_2_3:9042.bytes-sent" + # - with TaggingMetricIdGenerator: "cassandra.nodes.bytes-sent" + # + # Required: no + # Modifiable at runtime: no + # Overridable in a profile: no + // prefix = "cassandra" + } + # The session-level metrics (all disabled by default). # # Required: yes @@ -1355,10 +1413,12 @@ datastax-java-driver { # Overridable in a profile: no session { enabled = [ - # The number and rate of bytes sent for the entire session (exposed as a Meter). + # The number and rate of bytes sent for the entire session (exposed as a Meter if available, + # otherwise as a Counter). // bytes-sent, - # The number and rate of bytes received for the entire session (exposed as a Meter). + # The number and rate of bytes received for the entire session (exposed as a Meter if + # available, otherwise as a Counter). // bytes-received # The number of nodes to which the driver has at least one active connection (exposed as a @@ -1375,7 +1435,7 @@ datastax-java-driver { # with a DriverTimeoutException (exposed as a Counter). // cql-client-timeouts, - # The size of the driver-side cache of CQL prepared statements. + # The size of the driver-side cache of CQL prepared statements (exposed as a Gauge). # # The cache uses weak values eviction, so this represents the number of PreparedStatement # instances that your application has created, and is still holding a reference to. Note @@ -1596,10 +1656,12 @@ datastax-java-driver { # See the description of the connection.max-orphan-requests option for more details. // pool.orphaned-streams, - # The number and rate of bytes sent to this node (exposed as a Meter). + # The number and rate of bytes sent to this node (exposed as a Meter if available, otherwise + # as a Counter). // bytes-sent, - # The number and rate of bytes received from this node (exposed as a Meter). + # The number and rate of bytes received from this node (exposed as a Meter if available, + # otherwise as a Counter). // bytes-received, # The throughput and latency percentiles of individual CQL messages sent to this node as diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metrics/DefaultMetricIdGeneratorTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metrics/DefaultMetricIdGeneratorTest.java new file mode 100644 index 00000000000..851f7b843c2 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metrics/DefaultMetricIdGeneratorTest.java @@ -0,0 +1,108 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.metrics; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.BDDMockito.given; + +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverConfig; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.metadata.EndPoint; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.metrics.DefaultNodeMetric; +import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; + +@RunWith(DataProviderRunner.class) +public class DefaultMetricIdGeneratorTest { + + @Mock private InternalDriverContext context; + + @Mock private DriverConfig config; + + @Mock private DriverExecutionProfile profile; + + @Mock private Node node; + + @Mock private EndPoint endpoint; + + @Before + public void setUp() throws Exception { + MockitoAnnotations.initMocks(this); + given(context.getConfig()).willReturn(config); + given(context.getSessionName()).willReturn("s0"); + given(config.getDefaultProfile()).willReturn(profile); + given(node.getEndPoint()).willReturn(endpoint); + given(endpoint.asMetricPrefix()).willReturn("10_1_2_3:9042"); + } + + @Test + @UseDataProvider("sessionMetrics") + public void should_generate_session_metric(String prefix, String expectedName) { + // given + given(profile.getString(DefaultDriverOption.METRICS_ID_GENERATOR_PREFIX, "")) + .willReturn(prefix); + DefaultMetricIdGenerator generator = new DefaultMetricIdGenerator(context); + // when + MetricId id = generator.sessionMetricId(DefaultSessionMetric.CONNECTED_NODES); + // then + assertThat(id.getName()).isEqualTo(expectedName); + assertThat(id.getTags()).isEmpty(); + } + + @Test + @UseDataProvider("nodeMetrics") + public void should_generate_node_metric(String prefix, String expectedName) { + // given + given(profile.getString(DefaultDriverOption.METRICS_ID_GENERATOR_PREFIX, "")) + .willReturn(prefix); + DefaultMetricIdGenerator generator = new DefaultMetricIdGenerator(context); + // when + MetricId id = generator.nodeMetricId(node, DefaultNodeMetric.CQL_MESSAGES); + // then + assertThat(id.getName()).isEqualTo(expectedName); + assertThat(id.getTags()).isEmpty(); + } + + @DataProvider + public static Object[][] sessionMetrics() { + String suffix = DefaultSessionMetric.CONNECTED_NODES.getPath(); + return new Object[][] { + new Object[] {"", "s0." + suffix}, + new Object[] {"cassandra", "cassandra.s0." + suffix}, + new Object[] {"app.cassandra", "app.cassandra.s0." + suffix} + }; + } + + @DataProvider + public static Object[][] nodeMetrics() { + String suffix = DefaultNodeMetric.CQL_MESSAGES.getPath(); + return new Object[][] { + new Object[] {"", "s0.nodes.10_1_2_3:9042." + suffix}, + new Object[] {"cassandra", "cassandra.s0.nodes.10_1_2_3:9042." + suffix}, + new Object[] {"app.cassandra", "app.cassandra.s0.nodes.10_1_2_3:9042." + suffix} + }; + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metrics/DefaultMetricIdTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metrics/DefaultMetricIdTest.java new file mode 100644 index 00000000000..91b7e9490af --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metrics/DefaultMetricIdTest.java @@ -0,0 +1,61 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.metrics; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.google.common.collect.ImmutableMap; +import org.junit.Test; + +public class DefaultMetricIdTest { + + @Test + public void testGetName() { + DefaultMetricId id = new DefaultMetricId("metric1", ImmutableMap.of()); + assertThat(id.getName()).isEqualTo("metric1"); + } + + @Test + public void testGetTags() { + DefaultMetricId id = + new DefaultMetricId("metric1", ImmutableMap.of("tag1", "value1", "tag2", "value2")); + assertThat(id.getTags()) + .hasSize(2) + .containsEntry("tag1", "value1") + .containsEntry("tag2", "value2"); + } + + @Test + public void testEquals() { + DefaultMetricId id1 = + new DefaultMetricId("metric1", ImmutableMap.of("tag1", "value1", "tag2", "value2")); + DefaultMetricId id2 = + new DefaultMetricId("metric1", ImmutableMap.of("tag1", "value1", "tag2", "value2")); + DefaultMetricId id3 = + new DefaultMetricId("metric2", ImmutableMap.of("tag1", "value1", "tag2", "value2")); + DefaultMetricId id4 = new DefaultMetricId("metric1", ImmutableMap.of("tag2", "value2")); + assertThat(id1).isEqualTo(id2).isNotEqualTo(id3).isNotEqualTo(id4); + } + + @Test + public void testHashCode() { + DefaultMetricId id1 = + new DefaultMetricId("metric1", ImmutableMap.of("tag1", "value1", "tag2", "value2")); + DefaultMetricId id2 = + new DefaultMetricId("metric1", ImmutableMap.of("tag1", "value1", "tag2", "value2")); + assertThat(id1).hasSameHashCodeAs(id2); + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricsFactoryTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricsFactoryTest.java index 44d0131283f..51886d712a6 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricsFactoryTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricsFactoryTest.java @@ -35,7 +35,7 @@ import com.tngtech.java.junit.dataprovider.DataProviderRunner; import com.tngtech.java.junit.dataprovider.UseDataProvider; import java.time.Duration; -import java.util.Arrays; +import java.util.Collections; import java.util.List; import org.junit.Test; import org.junit.runner.RunWith; @@ -102,8 +102,8 @@ public void should_throw_if_registry_of_wrong_type() { InternalDriverContext context = mock(InternalDriverContext.class); DriverExecutionProfile profile = mock(DriverExecutionProfile.class); DriverConfig config = mock(DriverConfig.class); - Duration expireAfter = LOWEST_ACCEPTABLE_EXPIRE_AFTER.minusMinutes(1); - List enabledMetrics = Arrays.asList(DefaultSessionMetric.CQL_REQUESTS.getPath()); + List enabledMetrics = + Collections.singletonList(DefaultSessionMetric.CQL_REQUESTS.getPath()); // when when(config.getDefaultProfile()).thenReturn(profile); when(context.getConfig()).thenReturn(config); @@ -111,14 +111,14 @@ public void should_throw_if_registry_of_wrong_type() { // registry object is not a registry type when(context.getMetricRegistry()).thenReturn(Integer.MAX_VALUE); when(profile.getDuration(DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER)) - .thenReturn(expireAfter); + .thenReturn(LOWEST_ACCEPTABLE_EXPIRE_AFTER); when(profile.getStringList(DefaultDriverOption.METRICS_SESSION_ENABLED)) .thenReturn(enabledMetrics); // then try { new DropwizardMetricsFactory(context); fail( - "MetricsFactory should require correct registy object type: " + "MetricsFactory should require correct registry object type: " + MetricRegistry.class.getName()); } catch (IllegalArgumentException iae) { assertThat(iae.getMessage()) diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metrics/TaggingMetricIdGeneratorTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metrics/TaggingMetricIdGeneratorTest.java new file mode 100644 index 00000000000..02fdc9fb0ab --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metrics/TaggingMetricIdGeneratorTest.java @@ -0,0 +1,114 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.metrics; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.BDDMockito.given; + +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverConfig; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.metadata.EndPoint; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.metrics.DefaultNodeMetric; +import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.google.common.collect.ImmutableMap; +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import java.util.Map; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; + +@RunWith(DataProviderRunner.class) +public class TaggingMetricIdGeneratorTest { + + @Mock private InternalDriverContext context; + + @Mock private DriverConfig config; + + @Mock private DriverExecutionProfile profile; + + @Mock private Node node; + + @Mock private EndPoint endpoint; + + @Before + public void setUp() throws Exception { + MockitoAnnotations.initMocks(this); + given(context.getConfig()).willReturn(config); + given(context.getSessionName()).willReturn("s0"); + given(config.getDefaultProfile()).willReturn(profile); + given(node.getEndPoint()).willReturn(endpoint); + given(endpoint.toString()).willReturn("/10.1.2.3:9042"); + } + + @Test + @UseDataProvider("sessionMetrics") + public void should_generate_session_metric( + String prefix, String expectedName, Map expectedTags) { + // given + given(profile.getString(DefaultDriverOption.METRICS_ID_GENERATOR_PREFIX, "")) + .willReturn(prefix); + TaggingMetricIdGenerator generator = new TaggingMetricIdGenerator(context); + // when + MetricId id = generator.sessionMetricId(DefaultSessionMetric.CONNECTED_NODES); + // then + assertThat(id.getName()).isEqualTo(expectedName); + assertThat(id.getTags()).isEqualTo(expectedTags); + } + + @Test + @UseDataProvider("nodeMetrics") + public void should_generate_node_metric( + String prefix, String expectedName, Map expectedTags) { + // given + given(profile.getString(DefaultDriverOption.METRICS_ID_GENERATOR_PREFIX, "")) + .willReturn(prefix); + TaggingMetricIdGenerator generator = new TaggingMetricIdGenerator(context); + // when + MetricId id = generator.nodeMetricId(node, DefaultNodeMetric.CQL_MESSAGES); + // then + assertThat(id.getName()).isEqualTo(expectedName); + assertThat(id.getTags()).isEqualTo(expectedTags); + } + + @DataProvider + public static Object[][] sessionMetrics() { + String suffix = DefaultSessionMetric.CONNECTED_NODES.getPath(); + ImmutableMap tags = ImmutableMap.of("session", "s0"); + return new Object[][] { + new Object[] {"", "session." + suffix, tags}, + new Object[] {"cassandra", "cassandra.session." + suffix, tags}, + new Object[] {"app.cassandra", "app.cassandra.session." + suffix, tags} + }; + } + + @DataProvider + public static Object[][] nodeMetrics() { + String suffix = DefaultNodeMetric.CQL_MESSAGES.getPath(); + ImmutableMap tags = ImmutableMap.of("session", "s0", "node", "/10.1.2.3:9042"); + return new Object[][] { + new Object[] {"", "nodes." + suffix, tags}, + new Object[] {"cassandra", "cassandra.nodes." + suffix, tags}, + new Object[] {"app.cassandra", "app.cassandra.nodes." + suffix, tags} + }; + } +} diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/metrics/common/AbstractMetricsTestBase.java b/integration-tests/src/test/java/com/datastax/oss/driver/metrics/common/AbstractMetricsTestBase.java index 1748e91028d..f6bd9c23c5e 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/metrics/common/AbstractMetricsTestBase.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/metrics/common/AbstractMetricsTestBase.java @@ -15,80 +15,215 @@ */ package com.datastax.oss.driver.metrics.common; -import static org.assertj.core.api.Assertions.assertThat; - import com.datastax.oss.driver.api.core.CqlSession; -import com.datastax.oss.driver.api.core.CqlSessionBuilder; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; +import com.datastax.oss.driver.api.core.context.DriverContext; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.metrics.DefaultNodeMetric; import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; -import com.datastax.oss.driver.api.core.metrics.NodeMetric; -import com.datastax.oss.driver.api.core.metrics.SessionMetric; -import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.core.session.ProgrammaticArguments; +import com.datastax.oss.driver.api.core.session.SessionBuilder; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; -import java.util.Collection; +import com.datastax.oss.driver.api.testinfra.simulacron.SimulacronRule; +import com.datastax.oss.driver.core.metrics.FakeTicker; +import com.datastax.oss.driver.internal.core.context.DefaultDriverContext; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.metrics.DefaultMetricIdGenerator; +import com.datastax.oss.driver.internal.core.metrics.MetricsFactory; +import com.datastax.oss.driver.internal.core.metrics.TaggingMetricIdGenerator; +import com.datastax.oss.driver.shaded.guava.common.base.Ticker; +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.net.InetSocketAddress; +import java.time.Duration; +import java.util.Arrays; import java.util.List; import java.util.stream.Collectors; -import java.util.stream.Stream; -import org.junit.ClassRule; +import org.junit.Before; import org.junit.Test; +import org.junit.runner.RunWith; +@RunWith(DataProviderRunner.class) public abstract class AbstractMetricsTestBase { - @ClassRule public static final CcmRule CCM_RULE = CcmRule.getInstance(); + protected static final List ENABLED_SESSION_METRICS = + Arrays.asList(DefaultSessionMetric.values()); - private static final List ENABLED_SESSION_METRICS = - Stream.of(DefaultSessionMetric.values()) - .map(DefaultSessionMetric::getPath) - .collect(Collectors.toList()); - private static final List ENABLED_NODE_METRICS = - Stream.of(DefaultNodeMetric.values()) - .map(DefaultNodeMetric::getPath) - .collect(Collectors.toList()); + protected static final List ENABLED_NODE_METRICS = + Arrays.asList(DefaultNodeMetric.values()); - protected Object getMetricRegistry() { - return null; - } + protected abstract SimulacronRule simulacron(); + + protected abstract Object newMetricRegistry(); - protected abstract String getMetricFactoryClass(); + protected abstract String getMetricsFactoryClass(); + + protected abstract MetricsFactory newTickingMetricsFactory( + InternalDriverContext context, Ticker ticker); protected abstract void assertMetrics(CqlSession session); - protected abstract Collection getRegistryMetrics(); + protected abstract void assertNodeMetricsEvicted(CqlSession session, Node node) throws Exception; + + protected abstract void assertNodeMetricsNotEvicted(CqlSession session, Node node) + throws Exception; + + @Before + public void clearPrimes() { + simulacron().cluster().clearLogs(); + simulacron().cluster().clearPrimes(true); + } @Test - public void should_expose_metrics() { + @UseDataProvider("descriptorsAndPrefixes") + public void should_expose_metrics(Class descriptorClass, String prefix) { + DriverConfigLoader loader = SessionUtils.configLoaderBuilder() - .withStringList(DefaultDriverOption.METRICS_SESSION_ENABLED, ENABLED_SESSION_METRICS) - .withStringList(DefaultDriverOption.METRICS_NODE_ENABLED, ENABLED_NODE_METRICS) - .withString(DefaultDriverOption.METRICS_FACTORY_CLASS, getMetricFactoryClass()) + .withStringList( + DefaultDriverOption.METRICS_SESSION_ENABLED, + ENABLED_SESSION_METRICS.stream() + .map(DefaultSessionMetric::getPath) + .collect(Collectors.toList())) + .withStringList( + DefaultDriverOption.METRICS_NODE_ENABLED, + ENABLED_NODE_METRICS.stream() + .map(DefaultNodeMetric::getPath) + .collect(Collectors.toList())) + .withString(DefaultDriverOption.METRICS_FACTORY_CLASS, getMetricsFactoryClass()) + .withString( + DefaultDriverOption.METRICS_ID_GENERATOR_CLASS, descriptorClass.getSimpleName()) + .withString(DefaultDriverOption.METRICS_ID_GENERATOR_PREFIX, prefix) .build(); - CqlSessionBuilder builder = - CqlSession.builder().addContactEndPoints(CCM_RULE.getContactPoints()); + try (CqlSession session = - (CqlSession) - builder.withConfigLoader(loader).withMetricRegistry(getMetricRegistry()).build()) { - for (int i = 0; i < 10; i++) { - session.execute("SELECT release_version FROM system.local"); + CqlSession.builder() + .addContactEndPoints(simulacron().getContactPoints()) + .withConfigLoader(loader) + .withMetricRegistry(newMetricRegistry()) + .build()) { + + for (Node node : session.getMetadata().getNodes().values()) { + for (int i = 0; i < 10; i++) { + session.execute( + SimpleStatement.newInstance("SELECT release_version FROM system.local") + .setNode(node)); + } } - // Should have 10 requests. Assert all applicable metrics. - assertMetricsSize(getRegistryMetrics()); assertMetrics(session); } } - protected String buildSessionMetricPattern(SessionMetric metric, CqlSession s) { - return s.getContext().getSessionName() + "\\." + metric.getPath(); + @DataProvider + public static Object[][] descriptorsAndPrefixes() { + return new Object[][] { + new Object[] {DefaultMetricIdGenerator.class, ""}, + new Object[] {DefaultMetricIdGenerator.class, "cassandra"}, + new Object[] {TaggingMetricIdGenerator.class, ""}, + new Object[] {TaggingMetricIdGenerator.class, "cassandra"}, + }; } - protected String buildNodeMetricPattern(NodeMetric metric, CqlSession s) { - return s.getContext().getSessionName() + "\\.nodes\\.\\S*\\." + metric.getPath(); + @Test + public void should_evict_node_level_metrics() throws Exception { + // given + DriverConfigLoader loader = + SessionUtils.configLoaderBuilder() + .withStringList( + DefaultDriverOption.METRICS_SESSION_ENABLED, + ENABLED_SESSION_METRICS.stream() + .map(DefaultSessionMetric::getPath) + .collect(Collectors.toList())) + .withStringList( + DefaultDriverOption.METRICS_NODE_ENABLED, + ENABLED_NODE_METRICS.stream() + .map(DefaultNodeMetric::getPath) + .collect(Collectors.toList())) + .withDuration(DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER, Duration.ofHours(1)) + .build(); + FakeTicker fakeTicker = new FakeTicker(); + try (CqlSession session = + new TestSessionBuilder() + .addContactEndPoints(simulacron().getContactPoints()) + .withConfigLoader(loader) + .withMetricRegistry(newMetricRegistry()) + .withTicker(fakeTicker) + .build()) { + + for (Node node : session.getMetadata().getNodes().values()) { + for (int i = 0; i < 10; i++) { + session.execute( + SimpleStatement.newInstance("SELECT release_version FROM system.local") + .setNode(node)); + } + } + + Node node1 = findNode(session, 0); + Node node2 = findNode(session, 1); + Node node3 = findNode(session, 2); + + // when advance time to before eviction + fakeTicker.advance(Duration.ofMinutes(59)); + // execute query that updates only node1 + session.execute( + SimpleStatement.newInstance("SELECT release_version FROM system.local").setNode(node1)); + // advance time to after eviction + fakeTicker.advance(Duration.ofMinutes(2)); + + // then no node-level metrics should be evicted from node1 + assertNodeMetricsNotEvicted(session, node1); + // node2 and node3 metrics should have been evicted + assertNodeMetricsEvicted(session, node2); + assertNodeMetricsEvicted(session, node3); + } + } + + private Node findNode(CqlSession session, int id) { + InetSocketAddress address1 = simulacron().cluster().node(id).inetSocketAddress(); + return session.getMetadata().findNode(address1).orElseThrow(IllegalStateException::new); + } + + private class TestSessionBuilder extends SessionBuilder { + + private Ticker ticker; + + @Override + protected CqlSession wrap(@NonNull CqlSession defaultSession) { + return defaultSession; + } + + public TestSessionBuilder withTicker(Ticker ticker) { + this.ticker = ticker; + return this; + } + + @Override + protected DriverContext buildContext( + DriverConfigLoader configLoader, ProgrammaticArguments programmaticArguments) { + return new TestDriverContext(configLoader, programmaticArguments, ticker); + } } - private void assertMetricsSize(Collection metrics) { - assertThat(metrics).hasSize(ENABLED_SESSION_METRICS.size() + ENABLED_NODE_METRICS.size()); + private class TestDriverContext extends DefaultDriverContext { + + private final Ticker ticker; + + public TestDriverContext( + @NonNull DriverConfigLoader configLoader, + @NonNull ProgrammaticArguments programmaticArguments, + @NonNull Ticker ticker) { + super(configLoader, programmaticArguments); + this.ticker = ticker; + } + + @Override + protected MetricsFactory buildMetricsFactory() { + return newTickingMetricsFactory(this, ticker); + } } } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/metrics/micrometer/MicrometerMetricsIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/metrics/micrometer/MicrometerMetricsIT.java index c9717475cc2..d4261c2d967 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/metrics/micrometer/MicrometerMetricsIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/metrics/micrometer/MicrometerMetricsIT.java @@ -16,153 +16,211 @@ package com.datastax.oss.driver.metrics.micrometer; import static org.assertj.core.api.Assertions.assertThat; -import static org.awaitility.Awaitility.await; import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.metrics.DefaultNodeMetric; import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; +import com.datastax.oss.driver.api.testinfra.simulacron.SimulacronRule; import com.datastax.oss.driver.categories.ParallelizableTests; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.metrics.MetricId; +import com.datastax.oss.driver.internal.core.metrics.MetricIdGenerator; +import com.datastax.oss.driver.internal.core.metrics.MetricsFactory; +import com.datastax.oss.driver.internal.metrics.micrometer.MicrometerMetricsFactory; +import com.datastax.oss.driver.internal.metrics.micrometer.MicrometerNodeMetricUpdater; +import com.datastax.oss.driver.internal.metrics.micrometer.MicrometerTags; import com.datastax.oss.driver.metrics.common.AbstractMetricsTestBase; +import com.datastax.oss.driver.shaded.guava.common.base.Ticker; +import com.datastax.oss.driver.shaded.guava.common.cache.Cache; +import com.datastax.oss.simulacron.common.cluster.ClusterSpec; import io.micrometer.core.instrument.Counter; import io.micrometer.core.instrument.Gauge; import io.micrometer.core.instrument.Meter; import io.micrometer.core.instrument.MeterRegistry; +import io.micrometer.core.instrument.Tag; import io.micrometer.core.instrument.Timer; import io.micrometer.core.instrument.simple.SimpleMeterRegistry; -import java.util.Collection; -import java.util.concurrent.TimeUnit; -import java.util.function.Function; -import java.util.regex.Pattern; -import org.assertj.core.api.Condition; +import java.lang.reflect.Field; +import org.junit.ClassRule; import org.junit.experimental.categories.Category; @Category(ParallelizableTests.class) public class MicrometerMetricsIT extends AbstractMetricsTestBase { - private static final MeterRegistry METER_REGISTRY = new SimpleMeterRegistry(); + @ClassRule + public static final SimulacronRule SIMULACRON_RULE = + new SimulacronRule(ClusterSpec.builder().withNodes(3)); @Override - protected void assertMetrics(CqlSession session) { - await() - .pollInterval(500, TimeUnit.MILLISECONDS) - .atMost(5, TimeUnit.SECONDS) - .untilAsserted( - () -> - assertThat(METER_REGISTRY.getMeters()) - .haveExactly( - 1, - buildTimerCondition( - "CQL_REQUESTS should be a SESSION Timer with count 10", - buildSessionMetricPattern(DefaultSessionMetric.CQL_REQUESTS, session), - a -> a == 10)) - .haveExactly( - 1, - buildTimerCondition( - "CQL_MESSAGESS should be a NODE Timer with count 10", - buildNodeMetricPattern(DefaultNodeMetric.CQL_MESSAGES, session), - a -> a == 10)) - .haveExactly( - 1, - buildGaugeCondition( - "CONNECTED_NODES should be a SESSION Gauge with count 1", - buildSessionMetricPattern( - DefaultSessionMetric.CONNECTED_NODES, session), - a -> a == 1)) - .haveExactly( - 1, - buildCounterCondition( - "RETRIES should be a NODE Counter with count 0", - buildNodeMetricPattern(DefaultNodeMetric.RETRIES, session), - a -> a == 0)) - .haveExactly( - 1, - buildCounterCondition( - "BYTES_SENT should be a SESSION Counter with count > 0", - buildSessionMetricPattern(DefaultSessionMetric.BYTES_SENT, session), - a -> a > 0)) - .haveExactly( - 1, - buildCounterCondition( - "BYTES_SENT should be a SESSION Counter with count > 0", - buildNodeMetricPattern(DefaultNodeMetric.BYTES_SENT, session), - a -> a > 0)) - .haveExactly( - 1, - buildCounterCondition( - "BYTES_RECEIVED should be a SESSION Counter with count > 0", - buildSessionMetricPattern(DefaultSessionMetric.BYTES_RECEIVED, session), - a -> a > 0)) - .haveExactly( - 1, - buildGaugeCondition( - "AVAILABLE_STREAMS should be a NODE Gauge with count 1024", - buildNodeMetricPattern(DefaultNodeMetric.AVAILABLE_STREAMS, session), - a -> a == 1024)) - .haveExactly( - 1, - buildCounterCondition( - "BYTES_RECEIVED should be a NODE Counter with count > 0", - buildNodeMetricPattern(DefaultNodeMetric.BYTES_RECEIVED, session), - a -> a > 0))); + protected SimulacronRule simulacron() { + return SIMULACRON_RULE; } @Override - protected Object getMetricRegistry() { - return METER_REGISTRY; + protected MeterRegistry newMetricRegistry() { + return new SimpleMeterRegistry(); } @Override - protected String getMetricFactoryClass() { + protected String getMetricsFactoryClass() { return "MicrometerMetricsFactory"; } @Override - protected Collection getRegistryMetrics() { - return METER_REGISTRY.getMeters(); + protected MetricsFactory newTickingMetricsFactory(InternalDriverContext context, Ticker ticker) { + return new MicrometerMetricsFactory(context, ticker); } - private Condition buildTimerCondition( - String description, String metricPattern, Function verifyFunction) { - return new Condition(description) { - @Override - public boolean matches(Meter obj) { - if (!(obj instanceof Timer)) { - return false; - } - Timer timer = (Timer) obj; - return Pattern.matches(metricPattern, timer.getId().getName()) - && verifyFunction.apply(timer.count()); + @Override + protected void assertMetrics(CqlSession session) { + + MeterRegistry registry = + (MeterRegistry) ((InternalDriverContext) session.getContext()).getMetricRegistry(); + assertThat(registry).isNotNull(); + + assertThat(registry.getMeters()) + .hasSize(ENABLED_SESSION_METRICS.size() + ENABLED_NODE_METRICS.size() * 3); + + MetricIdGenerator metricIdGenerator = + ((InternalDriverContext) session.getContext()).getMetricIdGenerator(); + + for (DefaultSessionMetric metric : ENABLED_SESSION_METRICS) { + MetricId id = metricIdGenerator.sessionMetricId(metric); + Iterable tags = MicrometerTags.toMicrometerTags(id.getTags()); + Meter m = registry.find(id.getName()).tags(tags).meter(); + assertThat(m).isNotNull(); + switch (metric) { + case BYTES_SENT: + case BYTES_RECEIVED: + assertThat(m).isInstanceOf(Counter.class); + assertThat(((Counter) m).count()).isGreaterThan(0.0); + break; + case CONNECTED_NODES: + assertThat(m).isInstanceOf(Gauge.class); + assertThat(((Gauge) m).value()).isEqualTo(3.0); + break; + case CQL_REQUESTS: + assertThat(m).isInstanceOf(Timer.class); + assertThat(((Timer) m).count()).isEqualTo(30); + break; + case CQL_CLIENT_TIMEOUTS: + case THROTTLING_ERRORS: + assertThat(m).isInstanceOf(Counter.class); + assertThat(((Counter) m).count()).isZero(); + break; + case THROTTLING_DELAY: + assertThat(m).isInstanceOf(Timer.class); + assertThat(((Timer) m).count()).isZero(); + break; + case THROTTLING_QUEUE_SIZE: + case CQL_PREPARED_CACHE_SIZE: + assertThat(m).isInstanceOf(Gauge.class); + assertThat(((Gauge) m).value()).isZero(); + break; } - }; - } + } - private Condition buildCounterCondition( - String description, String metricPattern, Function verifyFunction) { - return new Condition(description) { - @Override - public boolean matches(Meter obj) { - if (!(obj instanceof Counter)) { - return false; + for (Node node : session.getMetadata().getNodes().values()) { + + for (DefaultNodeMetric metric : ENABLED_NODE_METRICS) { + MetricId id = metricIdGenerator.nodeMetricId(node, metric); + Iterable tags = MicrometerTags.toMicrometerTags(id.getTags()); + Meter m = registry.find(id.getName()).tags(tags).meter(); + assertThat(m).isNotNull(); + switch (metric) { + case OPEN_CONNECTIONS: + assertThat(m).isInstanceOf(Gauge.class); + // control node has 2 connections + assertThat(((Gauge) m).value()).isBetween(1.0, 2.0); + break; + case CQL_MESSAGES: + assertThat(m).isInstanceOf(Timer.class); + assertThat(((Timer) m).count()).isEqualTo(10); + break; + case AVAILABLE_STREAMS: + assertThat(m).isInstanceOf(Gauge.class); + assertThat(((Gauge) m).value()).isGreaterThan(100); + break; + case IN_FLIGHT: + assertThat(m).isInstanceOf(Gauge.class); + break; + case ORPHANED_STREAMS: + assertThat(m).isInstanceOf(Gauge.class); + assertThat(((Gauge) m).value()).isZero(); + break; + case BYTES_SENT: + case BYTES_RECEIVED: + assertThat(m).isInstanceOf(Counter.class); + assertThat(((Counter) m).count()).isGreaterThan(0.0); + break; + case UNSENT_REQUESTS: + case ABORTED_REQUESTS: + case WRITE_TIMEOUTS: + case READ_TIMEOUTS: + case UNAVAILABLES: + case OTHER_ERRORS: + case RETRIES: + case RETRIES_ON_ABORTED: + case RETRIES_ON_READ_TIMEOUT: + case RETRIES_ON_WRITE_TIMEOUT: + case RETRIES_ON_UNAVAILABLE: + case RETRIES_ON_OTHER_ERROR: + case IGNORES: + case IGNORES_ON_ABORTED: + case IGNORES_ON_READ_TIMEOUT: + case IGNORES_ON_WRITE_TIMEOUT: + case IGNORES_ON_UNAVAILABLE: + case IGNORES_ON_OTHER_ERROR: + case SPECULATIVE_EXECUTIONS: + case CONNECTION_INIT_ERRORS: + case AUTHENTICATION_ERRORS: + assertThat(m).isInstanceOf(Counter.class); + assertThat(((Counter) m).count()).isZero(); + break; } - Counter counter = (Counter) obj; - return Pattern.matches(metricPattern, counter.getId().getName()) - && verifyFunction.apply(counter.count()); } - }; + } } - private Condition buildGaugeCondition( - String description, String metricPattern, Function verifyFunction) { - return new Condition(description) { - @Override - public boolean matches(Meter obj) { - if (!(obj instanceof Gauge)) { - return false; - } - Gauge gauge = (Gauge) obj; - return Pattern.matches(metricPattern, gauge.getId().getName()) - && verifyFunction.apply(gauge.value()); - } - }; + @Override + protected void assertNodeMetricsNotEvicted(CqlSession session, Node node) throws Exception { + InternalDriverContext context = (InternalDriverContext) session.getContext(); + MetricIdGenerator metricIdGenerator = context.getMetricIdGenerator(); + MeterRegistry registry = (MeterRegistry) context.getMetricRegistry(); + assertThat(registry).isNotNull(); + // FIXME see JAVA-2929 + triggerCacheCleanup(context.getMetricsFactory()); + for (DefaultNodeMetric metric : ENABLED_NODE_METRICS) { + MetricId id = metricIdGenerator.nodeMetricId(node, metric); + Iterable tags = MicrometerTags.toMicrometerTags(id.getTags()); + Meter m = registry.find(id.getName()).tags(tags).meter(); + assertThat(m).isNotNull(); + } + } + + @Override + protected void assertNodeMetricsEvicted(CqlSession session, Node node) throws Exception { + InternalDriverContext context = (InternalDriverContext) session.getContext(); + MetricIdGenerator metricIdGenerator = context.getMetricIdGenerator(); + MeterRegistry registry = (MeterRegistry) context.getMetricRegistry(); + assertThat(registry).isNotNull(); + // FIXME see JAVA-2929 + triggerCacheCleanup(context.getMetricsFactory()); + for (DefaultNodeMetric metric : ENABLED_NODE_METRICS) { + MetricId id = metricIdGenerator.nodeMetricId(node, metric); + Iterable tags = MicrometerTags.toMicrometerTags(id.getTags()); + Meter m = registry.find(id.getName()).tags(tags).meter(); + assertThat(m).isNull(); + } + } + + private void triggerCacheCleanup(MetricsFactory metricsFactory) throws Exception { + Field metricsCache = MicrometerMetricsFactory.class.getDeclaredField("metricsCache"); + metricsCache.setAccessible(true); + @SuppressWarnings("unchecked") + Cache cache = + (Cache) metricsCache.get(metricsFactory); + cache.cleanUp(); } } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/metrics/microprofile/MicroProfileMetricsIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/metrics/microprofile/MicroProfileMetricsIT.java index 446bf9c309b..52a05f7d593 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/metrics/microprofile/MicroProfileMetricsIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/metrics/microprofile/MicroProfileMetricsIT.java @@ -16,171 +16,216 @@ package com.datastax.oss.driver.metrics.microprofile; import static org.assertj.core.api.Assertions.assertThat; -import static org.awaitility.Awaitility.await; import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.metrics.DefaultNodeMetric; import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; +import com.datastax.oss.driver.api.testinfra.simulacron.SimulacronRule; import com.datastax.oss.driver.categories.ParallelizableTests; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.metrics.MetricId; +import com.datastax.oss.driver.internal.core.metrics.MetricIdGenerator; +import com.datastax.oss.driver.internal.core.metrics.MetricsFactory; +import com.datastax.oss.driver.internal.metrics.micrometer.MicrometerNodeMetricUpdater; +import com.datastax.oss.driver.internal.metrics.microprofile.MicroProfileMetricsFactory; +import com.datastax.oss.driver.internal.metrics.microprofile.MicroProfileTags; import com.datastax.oss.driver.metrics.common.AbstractMetricsTestBase; +import com.datastax.oss.driver.shaded.guava.common.base.Ticker; +import com.datastax.oss.driver.shaded.guava.common.cache.Cache; +import com.datastax.oss.simulacron.common.cluster.ClusterSpec; import io.smallrye.metrics.MetricsRegistryImpl; -import java.util.Collection; -import java.util.Map.Entry; -import java.util.concurrent.TimeUnit; -import java.util.function.Function; -import java.util.regex.Pattern; -import org.assertj.core.api.Condition; +import java.lang.reflect.Field; import org.eclipse.microprofile.metrics.Counter; import org.eclipse.microprofile.metrics.Gauge; import org.eclipse.microprofile.metrics.Meter; import org.eclipse.microprofile.metrics.Metric; import org.eclipse.microprofile.metrics.MetricID; import org.eclipse.microprofile.metrics.MetricRegistry; +import org.eclipse.microprofile.metrics.Tag; import org.eclipse.microprofile.metrics.Timer; +import org.junit.ClassRule; import org.junit.experimental.categories.Category; @Category(ParallelizableTests.class) public class MicroProfileMetricsIT extends AbstractMetricsTestBase { - private static final MetricRegistry METRIC_REGISTRY = new MetricsRegistryImpl(); + @ClassRule + public static final SimulacronRule SIMULACRON_RULE = + new SimulacronRule(ClusterSpec.builder().withNodes(3)); @Override - protected void assertMetrics(CqlSession session) { - await() - .pollInterval(500, TimeUnit.MILLISECONDS) - .atMost(5, TimeUnit.SECONDS) - .untilAsserted( - () -> - assertThat(METRIC_REGISTRY.getMetrics()) - .hasEntrySatisfying( - buildTimerCondition( - "CQL_REQUESTS should be a SESSION Timer with count 10", - buildSessionMetricPattern(DefaultSessionMetric.CQL_REQUESTS, session), - a -> a == 10)) - .hasEntrySatisfying( - buildGaugeCondition( - "CONNECTED_NODES should be a SESSION Gauge with count 1", - buildSessionMetricPattern( - DefaultSessionMetric.CONNECTED_NODES, session), - a -> a == 1)) - .hasEntrySatisfying( - buildMeterCondition( - "BYTES_SENT should be a SESSION Meter with count > 0", - buildSessionMetricPattern(DefaultSessionMetric.BYTES_SENT, session), - a -> a > 0)) - .hasEntrySatisfying( - buildMeterCondition( - "BYTES_SENT should be a SESSION Meter with count > 0", - buildNodeMetricPattern(DefaultNodeMetric.BYTES_SENT, session), - a -> a > 0)) - .hasEntrySatisfying( - buildMeterCondition( - "BYTES_RECEIVED should be a SESSION Meter with count > 0", - buildSessionMetricPattern(DefaultSessionMetric.BYTES_RECEIVED, session), - a -> a > 0)) - .hasEntrySatisfying( - buildMeterCondition( - "BYTES_RECEIVED should be a NODE Meter with count > 0", - buildNodeMetricPattern(DefaultNodeMetric.BYTES_RECEIVED, session), - a -> a > 0)) - .hasEntrySatisfying( - buildTimerCondition( - "CQL_MESSAGESS should be a NODE Timer with count 10", - buildNodeMetricPattern(DefaultNodeMetric.CQL_MESSAGES, session), - a -> a == 10)) - .hasEntrySatisfying( - buildGaugeCondition( - "AVAILABLE_STREAMS should be a NODE Gauge with count 1024", - buildNodeMetricPattern(DefaultNodeMetric.AVAILABLE_STREAMS, session), - a -> a == 1024)) - .hasEntrySatisfying( - buildCounterCondition( - "RETRIES should be a NODE Counter with count 0", - buildNodeMetricPattern(DefaultNodeMetric.RETRIES, session), - a -> a == 0))); + protected SimulacronRule simulacron() { + return SIMULACRON_RULE; } @Override - protected Object getMetricRegistry() { - return METRIC_REGISTRY; + protected MetricRegistry newMetricRegistry() { + return new MetricsRegistryImpl(); } @Override - protected String getMetricFactoryClass() { + protected String getMetricsFactoryClass() { return "MicroProfileMetricsFactory"; } @Override - protected Collection getRegistryMetrics() { - return METRIC_REGISTRY.getMetrics().entrySet(); + protected MetricsFactory newTickingMetricsFactory(InternalDriverContext context, Ticker ticker) { + return new MicroProfileMetricsFactory(context, ticker); } - private Condition> buildTimerCondition( - String description, String metricPattern, Function verifyFunction) { - return new Condition>(description) { - @Override - public boolean matches(Entry metric) { - if (!(metric.getValue() instanceof Timer)) { - // Metric is not a Timer - return false; - } - final Timer timer = (Timer) metric.getValue(); - final MetricID id = metric.getKey(); - return verifyFunction.apply(timer.getCount()) - && Pattern.matches(metricPattern, id.getName()); + @Override + protected void assertMetrics(CqlSession session) { + + MetricRegistry registry = + (MetricRegistry) ((InternalDriverContext) session.getContext()).getMetricRegistry(); + assertThat(registry).isNotNull(); + + assertThat(registry.getMetrics()) + .hasSize(ENABLED_SESSION_METRICS.size() + ENABLED_NODE_METRICS.size() * 3); + + MetricIdGenerator metricIdGenerator = + ((InternalDriverContext) session.getContext()).getMetricIdGenerator(); + + for (DefaultSessionMetric metric : ENABLED_SESSION_METRICS) { + MetricId metricId = metricIdGenerator.sessionMetricId(metric); + Tag[] tags = MicroProfileTags.toMicroProfileTags(metricId.getTags()); + MetricID id = new MetricID(metricId.getName(), tags); + Metric m = registry.getMetrics().get(id); + assertThat(m).isNotNull(); + switch (metric) { + case BYTES_SENT: + case BYTES_RECEIVED: + assertThat(m).isInstanceOf(Meter.class); + assertThat(((Meter) m).getCount()).isGreaterThan(0L); + break; + case CONNECTED_NODES: + assertThat(m).isInstanceOf(Gauge.class); + assertThat((Integer) ((Gauge) m).getValue()).isEqualTo(3); + break; + case CQL_REQUESTS: + assertThat(m).isInstanceOf(Timer.class); + assertThat(((Timer) m).getCount()).isEqualTo(30L); + break; + case CQL_CLIENT_TIMEOUTS: + case THROTTLING_ERRORS: + assertThat(m).isInstanceOf(Counter.class); + assertThat(((Counter) m).getCount()).isZero(); + break; + case THROTTLING_DELAY: + assertThat(m).isInstanceOf(Timer.class); + assertThat(((Timer) m).getCount()).isZero(); + break; + case THROTTLING_QUEUE_SIZE: + assertThat(m).isInstanceOf(Gauge.class); + assertThat((Integer) ((Gauge) m).getValue()).isZero(); + break; + case CQL_PREPARED_CACHE_SIZE: + assertThat(m).isInstanceOf(Gauge.class); + assertThat((Long) ((Gauge) m).getValue()).isZero(); + break; } - }; - } + } - private Condition> buildCounterCondition( - String description, String metricPattern, Function verifyFunction) { - return new Condition>(description) { - @Override - public boolean matches(Entry metric) { - if (!(metric.getValue() instanceof Counter)) { - // Metric is not a Counter - return false; + for (Node node : session.getMetadata().getNodes().values()) { + + for (DefaultNodeMetric metric : ENABLED_NODE_METRICS) { + MetricId description = metricIdGenerator.nodeMetricId(node, metric); + Tag[] tags = MicroProfileTags.toMicroProfileTags(description.getTags()); + MetricID id = new MetricID(description.getName(), tags); + Metric m = registry.getMetrics().get(id); + assertThat(m).isNotNull(); + switch (metric) { + case OPEN_CONNECTIONS: + assertThat(m).isInstanceOf(Gauge.class); + // control node has 2 connections + assertThat((Integer) ((Gauge) m).getValue()).isBetween(1, 2); + break; + case CQL_MESSAGES: + assertThat(m).isInstanceOf(Timer.class); + assertThat(((Timer) m).getCount()).isEqualTo(10L); + break; + case AVAILABLE_STREAMS: + assertThat(m).isInstanceOf(Gauge.class); + assertThat((Integer) ((Gauge) m).getValue()).isGreaterThan(100); + break; + case IN_FLIGHT: + assertThat(m).isInstanceOf(Gauge.class); + break; + case ORPHANED_STREAMS: + assertThat(m).isInstanceOf(Gauge.class); + assertThat((Integer) ((Gauge) m).getValue()).isZero(); + break; + case BYTES_SENT: + case BYTES_RECEIVED: + assertThat(m).isInstanceOf(Meter.class); + assertThat(((Meter) m).getCount()).isGreaterThan(0L); + break; + case UNSENT_REQUESTS: + case ABORTED_REQUESTS: + case WRITE_TIMEOUTS: + case READ_TIMEOUTS: + case UNAVAILABLES: + case OTHER_ERRORS: + case RETRIES: + case RETRIES_ON_ABORTED: + case RETRIES_ON_READ_TIMEOUT: + case RETRIES_ON_WRITE_TIMEOUT: + case RETRIES_ON_UNAVAILABLE: + case RETRIES_ON_OTHER_ERROR: + case IGNORES: + case IGNORES_ON_ABORTED: + case IGNORES_ON_READ_TIMEOUT: + case IGNORES_ON_WRITE_TIMEOUT: + case IGNORES_ON_UNAVAILABLE: + case IGNORES_ON_OTHER_ERROR: + case SPECULATIVE_EXECUTIONS: + case CONNECTION_INIT_ERRORS: + case AUTHENTICATION_ERRORS: + assertThat(m).isInstanceOf(Counter.class); + assertThat(((Counter) m).getCount()).isZero(); + break; } - final Counter counter = (Counter) metric.getValue(); - final MetricID id = metric.getKey(); - return verifyFunction.apply(counter.getCount()) - && Pattern.matches(metricPattern, id.getName()); } - }; + } } - private Condition> buildMeterCondition( - String description, String metricPattern, Function verifyFunction) { - return new Condition>(description) { - @Override - public boolean matches(Entry metric) { - if (!(metric.getValue() instanceof Meter)) { - // Metric is not a Meter - return false; - } - final Meter meter = (Meter) metric.getValue(); - final MetricID id = metric.getKey(); - return verifyFunction.apply(meter.getCount()) - && Pattern.matches(metricPattern, id.getName()); - } - }; + @Override + protected void assertNodeMetricsNotEvicted(CqlSession session, Node node) throws Exception { + InternalDriverContext context = (InternalDriverContext) session.getContext(); + MetricIdGenerator metricIdGenerator = context.getMetricIdGenerator(); + MetricRegistry registry = (MetricRegistry) context.getMetricRegistry(); + assertThat(registry).isNotNull(); + // FIXME see JAVA-2929 + triggerCacheCleanup(context.getMetricsFactory()); + for (DefaultNodeMetric metric : ENABLED_NODE_METRICS) { + MetricId id = metricIdGenerator.nodeMetricId(node, metric); + Tag[] tags = MicroProfileTags.toMicroProfileTags(id.getTags()); + assertThat(registry.getMetrics()).containsKey(new MetricID(id.getName(), tags)); + } } - private Condition> buildGaugeCondition( - String description, String metricPattern, Function verifyFunction) { - return new Condition>(description) { - @Override - public boolean matches(Entry metric) { - if (!(metric.getValue() instanceof Gauge)) { - // Metric is not a Gauge - return false; - } - final Gauge gauge = (Gauge) metric.getValue(); - final Number gaugeValue = (Number) gauge.getValue(); - final MetricID id = metric.getKey(); - return verifyFunction.apply(gaugeValue.doubleValue()) - && Pattern.matches(metricPattern, id.getName()); - } - }; + @Override + protected void assertNodeMetricsEvicted(CqlSession session, Node node) throws Exception { + InternalDriverContext context = (InternalDriverContext) session.getContext(); + MetricIdGenerator metricIdGenerator = context.getMetricIdGenerator(); + MetricRegistry registry = (MetricRegistry) context.getMetricRegistry(); + assertThat(registry).isNotNull(); + // FIXME see JAVA-2929 + triggerCacheCleanup(context.getMetricsFactory()); + for (DefaultNodeMetric metric : ENABLED_NODE_METRICS) { + MetricId id = metricIdGenerator.nodeMetricId(node, metric); + Tag[] tags = MicroProfileTags.toMicroProfileTags(id.getTags()); + assertThat(registry.getMetrics()).doesNotContainKey(new MetricID(id.getName(), tags)); + } + } + + private void triggerCacheCleanup(MetricsFactory metricsFactory) throws Exception { + Field metricsCache = MicroProfileMetricsFactory.class.getDeclaredField("metricsCache"); + metricsCache.setAccessible(true); + @SuppressWarnings("unchecked") + Cache cache = + (Cache) metricsCache.get(metricsFactory); + cache.cleanUp(); } } diff --git a/manual/core/metrics/README.md b/manual/core/metrics/README.md index 276245dc524..46bfe7689bd 100644 --- a/manual/core/metrics/README.md +++ b/manual/core/metrics/README.md @@ -161,6 +161,61 @@ it is also possible to provide additional configuration to fine-tune the underly characteristics and precision, such as its highest expected latency, its number of significant digits to use, and its refresh interval. Again, see the [reference configuration] for more details. +### Selecting a metric identifier style + +Most metric libraries uniquely identify a metric by a name and, optionally, by a set of key-value +pairs, usually called tags. + +The `advanced.metrics.id-generator.class` option is used to customize how the driver generates +metric identifiers. The driver ships with two built-in implementations: + +- `DefaultMetricIdGenerator`: generates identifiers composed solely of (unique) metric names; it + does not generate tags. It is mostly suitable for use with metrics libraries that do not support + tags, like Dropwizard. +- `TaggingMetricIdGenerator`: generates identifiers composed of name and tags. It is mostly suitable + for use with metrics libraries that support tags, like Micrometer or MicroProfile Metrics. + +For example, here is how each one of them generates identifiers for the session metric "bytes-sent", +assuming that the session is named "s0": + +- `DefaultMetricIdGenerator`: + - name:`s0.bytes-sent` + - tags: `{}` +- `TaggingMetricIdGenerator`: + - name: `session.bytes-sent` + - tags: `{ "session" : "s0" }` + +Here is how each one of them generates identifiers for the node metric "bytes-sent", assuming that +the session is named "s0", and the node's broadcast address is 10.1.2.3:9042: + +- `DefaultMetricIdGenerator`: + - name : `s0.nodes.10_1_2_3:9042.bytes-sent` + - tags: `{}` +- `TaggingMetricIdGenerator`: + - name `nodes.bytes-sent` + - tags: `{ "session" : "s0", "node" : "\10.1.2.3:9042" }` + +As shown above, both built-in implementations generate names that are path-like structures separated +by dots. This is indeed the most common expected format by reporting tools. + +Finally, it is also possible to define a global prefix for all metric names; this can be done with +the `advanced.metrics.id-generator.prefix` option. + +The prefix should not start nor end with a dot or any other path separator; the following are two +valid examples: `cassandra` or `myapp.prod.cassandra`. + +For example, if this prefix is set to `cassandra`, here is how the session metric "bytes-sent" would +be named, assuming that the session is named "s0": + +- with `DefaultMetricIdGenerator`: `cassandra.s0.bytes-sent` +- with `TaggingMetricIdGenerator`: `cassandra.session.bytes-sent` + +Here is how the node metric "bytes-sent" would be named, assuming that the session is named "s0", +and the node's broadcast address is 10.1.2.3:9042: + +- with `DefaultMetricIdGenerator`: `cassandra.s0.nodes.10_1_2_3:9042.bytes-sent` +- with `TaggingMetricIdGenerator`: `cassandra.nodes.bytes-sent` + ### Using an external metric registry Regardless of which metrics library is used, you can provide an external metric registry object when diff --git a/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerMetricUpdater.java b/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerMetricUpdater.java index b5214c9e4f8..d00539df191 100644 --- a/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerMetricUpdater.java +++ b/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerMetricUpdater.java @@ -16,70 +16,123 @@ package com.datastax.oss.driver.internal.metrics.micrometer; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; -import com.datastax.oss.driver.internal.core.metrics.MetricUpdater; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.metrics.AbstractMetricUpdater; +import com.datastax.oss.driver.internal.core.metrics.MetricId; +import edu.umd.cs.findbugs.annotations.Nullable; +import io.micrometer.core.instrument.Counter; +import io.micrometer.core.instrument.DistributionSummary; +import io.micrometer.core.instrument.Gauge; +import io.micrometer.core.instrument.Meter; import io.micrometer.core.instrument.MeterRegistry; +import io.micrometer.core.instrument.Tag; +import io.micrometer.core.instrument.Timer; import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; import java.util.concurrent.TimeUnit; +import java.util.function.Supplier; import net.jcip.annotations.ThreadSafe; @ThreadSafe -public abstract class MicrometerMetricUpdater implements MetricUpdater { - protected final Set enabledMetrics; +public abstract class MicrometerMetricUpdater extends AbstractMetricUpdater { + protected final MeterRegistry registry; + protected final ConcurrentMap metrics = new ConcurrentHashMap<>(); - protected MicrometerMetricUpdater(Set enabledMetrics, MeterRegistry registry) { - this.enabledMetrics = enabledMetrics; + protected MicrometerMetricUpdater( + InternalDriverContext context, Set enabledMetrics, MeterRegistry registry) { + super(context, enabledMetrics); this.registry = registry; } - protected abstract String buildFullName(MetricT metric, String profileName); - @Override - public void incrementCounter(MetricT metric, String profileName, long amount) { + public void incrementCounter(MetricT metric, @Nullable String profileName, long amount) { if (isEnabled(metric, profileName)) { - registry.counter(buildFullName(metric, profileName)).increment(amount); + getOrCreateCounterFor(metric).increment(amount); } } @Override - public void updateHistogram(MetricT metric, String profileName, long value) { + public void updateHistogram(MetricT metric, @Nullable String profileName, long value) { if (isEnabled(metric, profileName)) { - registry.summary(buildFullName(metric, profileName)).record(value); + getOrCreateDistributionSummaryFor(metric).record(value); } } @Override - public void markMeter(MetricT metric, String profileName, long amount) { + public void markMeter(MetricT metric, @Nullable String profileName, long amount) { if (isEnabled(metric, profileName)) { - registry.counter(buildFullName(metric, profileName)).increment(amount); + // There is no meter type in Micrometer, so use a counter + getOrCreateCounterFor(metric).increment(amount); } } @Override - public void updateTimer(MetricT metric, String profileName, long duration, TimeUnit unit) { + public void updateTimer( + MetricT metric, @Nullable String profileName, long duration, TimeUnit unit) { if (isEnabled(metric, profileName)) { - registry.timer(buildFullName(metric, profileName)).record(duration, unit); + getOrCreateTimerFor(metric).record(duration, unit); } } - @Override - public boolean isEnabled(MetricT metric, String profileName) { - return enabledMetrics.contains(metric); - } + protected abstract MetricId getMetricId(MetricT metric); - protected void initializeDefaultCounter(MetricT metric, String profileName) { - if (isEnabled(metric, profileName)) { - // Just initialize eagerly so that the metric appears even when it has no data yet - registry.counter(buildFullName(metric, profileName)); + protected void initializeGauge( + MetricT metric, DriverExecutionProfile profile, Supplier supplier) { + if (isEnabled(metric, profile.getName())) { + metrics.computeIfAbsent( + metric, + m -> { + MetricId id = getMetricId(m); + Iterable tags = MicrometerTags.toMicrometerTags(id.getTags()); + return Gauge.builder(id.getName(), supplier).tags(tags).register(registry); + }); } } - protected void initializeTimer(MetricT metric, DriverExecutionProfile config) { - String profileName = config.getName(); - if (isEnabled(metric, profileName)) { - String fullName = buildFullName(metric, profileName); + protected void initializeCounter(MetricT metric, DriverExecutionProfile profile) { + if (isEnabled(metric, profile.getName())) { + getOrCreateCounterFor(metric); + } + } - registry.timer(fullName); + protected void initializeTimer(MetricT metric, DriverExecutionProfile profile) { + if (isEnabled(metric, profile.getName())) { + getOrCreateTimerFor(metric); } } + + protected Counter getOrCreateCounterFor(MetricT metric) { + return (Counter) + metrics.computeIfAbsent( + metric, + m -> { + MetricId id = getMetricId(m); + Iterable tags = MicrometerTags.toMicrometerTags(id.getTags()); + return Counter.builder(id.getName()).tags(tags).register(registry); + }); + } + + protected DistributionSummary getOrCreateDistributionSummaryFor(MetricT metric) { + return (DistributionSummary) + metrics.computeIfAbsent( + metric, + m -> { + MetricId id = getMetricId(m); + Iterable tags = MicrometerTags.toMicrometerTags(id.getTags()); + return DistributionSummary.builder(id.getName()).tags(tags).register(registry); + }); + } + + protected Timer getOrCreateTimerFor(MetricT metric) { + return (Timer) + metrics.computeIfAbsent( + metric, + m -> { + MetricId id = getMetricId(m); + Iterable tags = MicrometerTags.toMicrometerTags(id.getTags()); + return Timer.builder(id.getName()).tags(tags).register(registry); + }); + } } diff --git a/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerMetricsFactory.java b/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerMetricsFactory.java index 90d8badb226..f326b308733 100644 --- a/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerMetricsFactory.java +++ b/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerMetricsFactory.java @@ -101,7 +101,7 @@ public MicrometerMetricsFactory(InternalDriverContext context, Ticker ticker) { if (possibleMetricRegistry instanceof MeterRegistry) { this.registry = (MeterRegistry) possibleMetricRegistry; this.sessionUpdater = - new MicrometerSessionMetricUpdater(enabledSessionMetrics, this.registry, this.context); + new MicrometerSessionMetricUpdater(this.context, enabledSessionMetrics, this.registry); } else { // Metrics are enabled, but the registry object is not an expected type throw new IllegalArgumentException( @@ -147,7 +147,7 @@ public NodeMetricUpdater newNodeUpdater(Node node) { } MicrometerNodeMetricUpdater updater = new MicrometerNodeMetricUpdater( - node, enabledNodeMetrics, registry, context, () -> metricsCache.getIfPresent(node)); + node, context, enabledNodeMetrics, registry, () -> metricsCache.getIfPresent(node)); metricsCache.put(node, updater); return updater; } diff --git a/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerNodeMetricUpdater.java b/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerNodeMetricUpdater.java index f15a5366144..3c77839c4b5 100644 --- a/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerNodeMetricUpdater.java +++ b/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerNodeMetricUpdater.java @@ -17,79 +17,66 @@ import com.datastax.dse.driver.api.core.metrics.DseNodeMetric; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; -import com.datastax.oss.driver.api.core.context.DriverContext; -import com.datastax.oss.driver.api.core.metadata.EndPoint; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.metrics.DefaultNodeMetric; import com.datastax.oss.driver.api.core.metrics.NodeMetric; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.metrics.MetricId; import com.datastax.oss.driver.internal.core.metrics.NodeMetricUpdater; -import com.datastax.oss.driver.internal.core.pool.ChannelPool; +import io.micrometer.core.instrument.Meter; import io.micrometer.core.instrument.MeterRegistry; import java.util.Set; import java.util.concurrent.TimeUnit; -import java.util.function.Function; +import net.jcip.annotations.ThreadSafe; +@ThreadSafe public class MicrometerNodeMetricUpdater extends MicrometerMetricUpdater implements NodeMetricUpdater { - private final String metricNamePrefix; + private final Node node; private final Runnable signalMetricUpdated; public MicrometerNodeMetricUpdater( Node node, + InternalDriverContext context, Set enabledMetrics, MeterRegistry registry, - DriverContext driverContext, Runnable signalMetricUpdated) { - super(enabledMetrics, registry); + super(context, enabledMetrics, registry); + this.node = node; this.signalMetricUpdated = signalMetricUpdated; - InternalDriverContext context = (InternalDriverContext) driverContext; - this.metricNamePrefix = buildPrefix(driverContext.getSessionName(), node.getEndPoint()); - DriverExecutionProfile config = driverContext.getConfig().getDefaultProfile(); + DriverExecutionProfile profile = context.getConfig().getDefaultProfile(); - if (enabledMetrics.contains(DefaultNodeMetric.OPEN_CONNECTIONS)) { - this.registry.gauge( - buildFullName(DefaultNodeMetric.OPEN_CONNECTIONS, null), node.getOpenConnections()); - } - initializePoolGauge( - DefaultNodeMetric.AVAILABLE_STREAMS, node, ChannelPool::getAvailableIds, context); - initializePoolGauge(DefaultNodeMetric.IN_FLIGHT, node, ChannelPool::getInFlight, context); - initializePoolGauge( - DefaultNodeMetric.ORPHANED_STREAMS, node, ChannelPool::getOrphanedIds, context); - initializeTimer(DefaultNodeMetric.CQL_MESSAGES, config); - initializeDefaultCounter(DefaultNodeMetric.UNSENT_REQUESTS, null); - initializeDefaultCounter(DefaultNodeMetric.ABORTED_REQUESTS, null); - initializeDefaultCounter(DefaultNodeMetric.WRITE_TIMEOUTS, null); - initializeDefaultCounter(DefaultNodeMetric.READ_TIMEOUTS, null); - initializeDefaultCounter(DefaultNodeMetric.UNAVAILABLES, null); - initializeDefaultCounter(DefaultNodeMetric.OTHER_ERRORS, null); - initializeDefaultCounter(DefaultNodeMetric.RETRIES, null); - initializeDefaultCounter(DefaultNodeMetric.RETRIES_ON_ABORTED, null); - initializeDefaultCounter(DefaultNodeMetric.RETRIES_ON_READ_TIMEOUT, null); - initializeDefaultCounter(DefaultNodeMetric.RETRIES_ON_WRITE_TIMEOUT, null); - initializeDefaultCounter(DefaultNodeMetric.RETRIES_ON_UNAVAILABLE, null); - initializeDefaultCounter(DefaultNodeMetric.RETRIES_ON_OTHER_ERROR, null); - initializeDefaultCounter(DefaultNodeMetric.IGNORES, null); - initializeDefaultCounter(DefaultNodeMetric.IGNORES_ON_ABORTED, null); - initializeDefaultCounter(DefaultNodeMetric.IGNORES_ON_READ_TIMEOUT, null); - initializeDefaultCounter(DefaultNodeMetric.IGNORES_ON_WRITE_TIMEOUT, null); - initializeDefaultCounter(DefaultNodeMetric.IGNORES_ON_UNAVAILABLE, null); - initializeDefaultCounter(DefaultNodeMetric.IGNORES_ON_OTHER_ERROR, null); - initializeDefaultCounter(DefaultNodeMetric.SPECULATIVE_EXECUTIONS, null); - initializeDefaultCounter(DefaultNodeMetric.CONNECTION_INIT_ERRORS, null); - initializeDefaultCounter(DefaultNodeMetric.AUTHENTICATION_ERRORS, null); - initializeTimer(DseNodeMetric.GRAPH_MESSAGES, driverContext.getConfig().getDefaultProfile()); - } + initializeGauge(DefaultNodeMetric.OPEN_CONNECTIONS, profile, node::getOpenConnections); + initializeGauge(DefaultNodeMetric.AVAILABLE_STREAMS, profile, () -> availableStreamIds(node)); + initializeGauge(DefaultNodeMetric.IN_FLIGHT, profile, () -> inFlightRequests(node)); + initializeGauge(DefaultNodeMetric.ORPHANED_STREAMS, profile, () -> orphanedStreamIds(node)); - @Override - public String buildFullName(NodeMetric metric, String profileName) { - return metricNamePrefix + metric.getPath(); - } + initializeCounter(DefaultNodeMetric.UNSENT_REQUESTS, profile); + initializeCounter(DefaultNodeMetric.ABORTED_REQUESTS, profile); + initializeCounter(DefaultNodeMetric.WRITE_TIMEOUTS, profile); + initializeCounter(DefaultNodeMetric.READ_TIMEOUTS, profile); + initializeCounter(DefaultNodeMetric.UNAVAILABLES, profile); + initializeCounter(DefaultNodeMetric.OTHER_ERRORS, profile); + initializeCounter(DefaultNodeMetric.RETRIES, profile); + initializeCounter(DefaultNodeMetric.RETRIES_ON_ABORTED, profile); + initializeCounter(DefaultNodeMetric.RETRIES_ON_READ_TIMEOUT, profile); + initializeCounter(DefaultNodeMetric.RETRIES_ON_WRITE_TIMEOUT, profile); + initializeCounter(DefaultNodeMetric.RETRIES_ON_UNAVAILABLE, profile); + initializeCounter(DefaultNodeMetric.RETRIES_ON_OTHER_ERROR, profile); + initializeCounter(DefaultNodeMetric.IGNORES, profile); + initializeCounter(DefaultNodeMetric.IGNORES_ON_ABORTED, profile); + initializeCounter(DefaultNodeMetric.IGNORES_ON_READ_TIMEOUT, profile); + initializeCounter(DefaultNodeMetric.IGNORES_ON_WRITE_TIMEOUT, profile); + initializeCounter(DefaultNodeMetric.IGNORES_ON_UNAVAILABLE, profile); + initializeCounter(DefaultNodeMetric.IGNORES_ON_OTHER_ERROR, profile); + initializeCounter(DefaultNodeMetric.SPECULATIVE_EXECUTIONS, profile); + initializeCounter(DefaultNodeMetric.CONNECTION_INIT_ERRORS, profile); + initializeCounter(DefaultNodeMetric.AUTHENTICATION_ERRORS, profile); - private String buildPrefix(String sessionName, EndPoint endPoint) { - return sessionName + ".nodes." + endPoint.asMetricPrefix() + "."; + initializeTimer(DefaultNodeMetric.CQL_MESSAGES, profile); + initializeTimer(DseNodeMetric.GRAPH_MESSAGES, profile); } @Override @@ -116,24 +103,15 @@ public void updateTimer(NodeMetric metric, String profileName, long duration, Ti super.updateTimer(metric, profileName, duration, unit); } - private void initializePoolGauge( - NodeMetric metric, - Node node, - Function reading, - InternalDriverContext context) { - if (enabledMetrics.contains(metric)) { - final String metricName = buildFullName(metric, null); - registry.gauge( - metricName, - context, - c -> { - ChannelPool pool = c.getPoolManager().getPools().get(node); - return (pool == null) ? 0 : reading.apply(pool); - }); + public void cleanupNodeMetrics() { + for (Meter meter : metrics.values()) { + registry.remove(meter); } + metrics.clear(); } - public void cleanupNodeMetrics() { - registry.getMeters().removeIf(metric -> metric.getId().getName().startsWith(metricNamePrefix)); + @Override + protected MetricId getMetricId(NodeMetric metric) { + return context.getMetricIdGenerator().nodeMetricId(node, metric); } } diff --git a/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerSessionMetricUpdater.java b/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerSessionMetricUpdater.java index e1ff7bb7122..93106ea77a9 100644 --- a/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerSessionMetricUpdater.java +++ b/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerSessionMetricUpdater.java @@ -16,129 +16,43 @@ package com.datastax.oss.driver.internal.metrics.micrometer; import com.datastax.dse.driver.api.core.metrics.DseSessionMetric; -import com.datastax.oss.driver.api.core.context.DriverContext; -import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; import com.datastax.oss.driver.api.core.metrics.SessionMetric; -import com.datastax.oss.driver.api.core.session.throttling.RequestThrottler; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; -import com.datastax.oss.driver.internal.core.cql.CqlPrepareAsyncProcessor; -import com.datastax.oss.driver.internal.core.cql.CqlPrepareSyncProcessor; +import com.datastax.oss.driver.internal.core.metrics.MetricId; import com.datastax.oss.driver.internal.core.metrics.SessionMetricUpdater; -import com.datastax.oss.driver.internal.core.session.RequestProcessor; -import com.datastax.oss.driver.internal.core.session.throttling.ConcurrencyLimitingRequestThrottler; -import com.datastax.oss.driver.internal.core.session.throttling.RateLimitingRequestThrottler; -import com.datastax.oss.driver.shaded.guava.common.cache.Cache; -import edu.umd.cs.findbugs.annotations.Nullable; import io.micrometer.core.instrument.MeterRegistry; import java.util.Set; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import net.jcip.annotations.ThreadSafe; +@ThreadSafe public class MicrometerSessionMetricUpdater extends MicrometerMetricUpdater implements SessionMetricUpdater { - private static final Logger LOG = LoggerFactory.getLogger(MicrometerSessionMetricUpdater.class); - - private final String metricNamePrefix; - public MicrometerSessionMetricUpdater( - Set enabledMetrics, MeterRegistry registry, DriverContext driverContext) { - super(enabledMetrics, registry); - InternalDriverContext context = (InternalDriverContext) driverContext; - this.metricNamePrefix = context.getSessionName() + "."; + InternalDriverContext context, Set enabledMetrics, MeterRegistry registry) { + super(context, enabledMetrics, registry); - if (enabledMetrics.contains(DefaultSessionMetric.CONNECTED_NODES)) { - registerConnectedNodeGauge(context); - } - if (enabledMetrics.contains(DefaultSessionMetric.THROTTLING_QUEUE_SIZE)) { - registerThrottlingQueueGauge(context); - } - if (enabledMetrics.contains(DefaultSessionMetric.CQL_PREPARED_CACHE_SIZE)) { - registerPreparedCacheGauge(context); - } - initializeTimer(DefaultSessionMetric.CQL_REQUESTS, context.getConfig().getDefaultProfile()); - initializeDefaultCounter(DefaultSessionMetric.CQL_CLIENT_TIMEOUTS, null); - initializeTimer(DefaultSessionMetric.THROTTLING_DELAY, context.getConfig().getDefaultProfile()); - initializeDefaultCounter(DefaultSessionMetric.THROTTLING_ERRORS, null); - initializeTimer( - DseSessionMetric.CONTINUOUS_CQL_REQUESTS, context.getConfig().getDefaultProfile()); - initializeDefaultCounter(DseSessionMetric.GRAPH_CLIENT_TIMEOUTS, null); - initializeTimer(DseSessionMetric.GRAPH_REQUESTS, context.getConfig().getDefaultProfile()); - } + DriverExecutionProfile profile = context.getConfig().getDefaultProfile(); - private void registerConnectedNodeGauge(InternalDriverContext context) { - this.registry.gauge( - buildFullName(DefaultSessionMetric.CONNECTED_NODES, null), - context, - c -> { - int count = 0; - for (Node node : c.getMetadataManager().getMetadata().getNodes().values()) { - if (node.getOpenConnections() > 0) { - ++count; - } - } - return count; - }); - } + initializeGauge(DefaultSessionMetric.CONNECTED_NODES, profile, this::connectedNodes); + initializeGauge(DefaultSessionMetric.THROTTLING_QUEUE_SIZE, profile, this::throttlingQueueSize); + initializeGauge( + DefaultSessionMetric.CQL_PREPARED_CACHE_SIZE, profile, this::preparedStatementCacheSize); - private void registerThrottlingQueueGauge(InternalDriverContext context) { - this.registry.gauge( - buildFullName(DefaultSessionMetric.THROTTLING_QUEUE_SIZE, null), - context, - c -> { - RequestThrottler requestThrottler = c.getRequestThrottler(); - String logPrefix = c.getSessionName(); - if (requestThrottler instanceof ConcurrencyLimitingRequestThrottler) { - return ((ConcurrencyLimitingRequestThrottler) requestThrottler).getQueueSize(); - } - if (requestThrottler instanceof RateLimitingRequestThrottler) { - return ((RateLimitingRequestThrottler) requestThrottler).getQueueSize(); - } - LOG.warn( - "[{}] Metric {} does not support {}, it will always return 0", - logPrefix, - DefaultSessionMetric.THROTTLING_QUEUE_SIZE.getPath(), - requestThrottler.getClass().getName()); - return 0; - }); - } + initializeCounter(DefaultSessionMetric.CQL_CLIENT_TIMEOUTS, profile); + initializeCounter(DefaultSessionMetric.THROTTLING_ERRORS, profile); + initializeCounter(DseSessionMetric.GRAPH_CLIENT_TIMEOUTS, profile); - private void registerPreparedCacheGauge(InternalDriverContext context) { - this.registry.gauge( - buildFullName(DefaultSessionMetric.CQL_PREPARED_CACHE_SIZE, null), - context, - c -> { - Cache cache = getPreparedStatementCache(c); - if (cache == null) { - LOG.warn( - "[{}] Metric {} is enabled in the config, " - + "but it looks like no CQL prepare processor is registered. " - + "The gauge will always return 0", - context.getSessionName(), - DefaultSessionMetric.CQL_PREPARED_CACHE_SIZE.getPath()); - return 0L; - } - return cache.size(); - }); + initializeTimer(DefaultSessionMetric.CQL_REQUESTS, profile); + initializeTimer(DefaultSessionMetric.THROTTLING_DELAY, profile); + initializeTimer(DseSessionMetric.CONTINUOUS_CQL_REQUESTS, profile); + initializeTimer(DseSessionMetric.GRAPH_REQUESTS, profile); } @Override - public String buildFullName(SessionMetric metric, String profileName) { - return metricNamePrefix + metric.getPath(); - } - - @Nullable - private static Cache getPreparedStatementCache(InternalDriverContext context) { - // By default, both the sync processor and the async one are registered and they share the same - // cache. But with a custom processor registry, there could be only one of the two present. - for (RequestProcessor processor : context.getRequestProcessorRegistry().getProcessors()) { - if (processor instanceof CqlPrepareAsyncProcessor) { - return ((CqlPrepareAsyncProcessor) processor).getCache(); - } else if (processor instanceof CqlPrepareSyncProcessor) { - return ((CqlPrepareSyncProcessor) processor).getCache(); - } - } - return null; + protected MetricId getMetricId(SessionMetric metric) { + return context.getMetricIdGenerator().sessionMetricId(metric); } } diff --git a/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerTags.java b/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerTags.java new file mode 100644 index 00000000000..7c6e54d59bf --- /dev/null +++ b/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerTags.java @@ -0,0 +1,33 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.metrics.micrometer; + +import io.micrometer.core.instrument.Tag; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; + +public class MicrometerTags { + + public static Iterable toMicrometerTags(Map tags) { + List micrometerTags = new ArrayList<>(tags.size()); + for (Entry entry : tags.entrySet()) { + micrometerTags.add(Tag.of(entry.getKey(), entry.getValue())); + } + return micrometerTags; + } +} diff --git a/metrics/micrometer/src/test/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerMetricsFactoryTest.java b/metrics/micrometer/src/test/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerMetricsFactoryTest.java index abaf780f5dd..17532ff30bd 100644 --- a/metrics/micrometer/src/test/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerMetricsFactoryTest.java +++ b/metrics/micrometer/src/test/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerMetricsFactoryTest.java @@ -35,7 +35,7 @@ import com.tngtech.java.junit.dataprovider.UseDataProvider; import io.micrometer.core.instrument.MeterRegistry; import java.time.Duration; -import java.util.Arrays; +import java.util.Collections; import java.util.List; import org.junit.Test; import org.junit.runner.RunWith; @@ -104,8 +104,8 @@ public void should_throw_if_wrong_or_missing_registry_type( InternalDriverContext context = mock(InternalDriverContext.class); DriverExecutionProfile profile = mock(DriverExecutionProfile.class); DriverConfig config = mock(DriverConfig.class); - Duration expireAfter = LOWEST_ACCEPTABLE_EXPIRE_AFTER.minusMinutes(1); - List enabledMetrics = Arrays.asList(DefaultSessionMetric.CQL_REQUESTS.getPath()); + List enabledMetrics = + Collections.singletonList(DefaultSessionMetric.CQL_REQUESTS.getPath()); // when when(config.getDefaultProfile()).thenReturn(profile); when(context.getConfig()).thenReturn(config); @@ -113,14 +113,14 @@ public void should_throw_if_wrong_or_missing_registry_type( // registry object is not a registry type when(context.getMetricRegistry()).thenReturn(registryObj); when(profile.getDuration(DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER)) - .thenReturn(expireAfter); + .thenReturn(LOWEST_ACCEPTABLE_EXPIRE_AFTER); when(profile.getStringList(DefaultDriverOption.METRICS_SESSION_ENABLED)) .thenReturn(enabledMetrics); // then try { new MicrometerMetricsFactory(context); fail( - "MetricsFactory should require correct registy object type: " + "MetricsFactory should require correct registry object type: " + MeterRegistry.class.getName()); } catch (IllegalArgumentException iae) { assertThat(iae.getMessage()).isEqualTo(expectedMsg); diff --git a/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileMetricUpdater.java b/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileMetricUpdater.java index b712d1db8bd..2d5ea9013c5 100644 --- a/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileMetricUpdater.java +++ b/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileMetricUpdater.java @@ -16,71 +16,138 @@ package com.datastax.oss.driver.internal.metrics.microprofile; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; -import com.datastax.oss.driver.internal.core.metrics.MetricUpdater; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.metrics.AbstractMetricUpdater; +import com.datastax.oss.driver.internal.core.metrics.MetricId; +import edu.umd.cs.findbugs.annotations.Nullable; import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; import java.util.concurrent.TimeUnit; import net.jcip.annotations.ThreadSafe; +import org.eclipse.microprofile.metrics.Counter; +import org.eclipse.microprofile.metrics.Gauge; +import org.eclipse.microprofile.metrics.Histogram; +import org.eclipse.microprofile.metrics.Metadata; +import org.eclipse.microprofile.metrics.Meter; +import org.eclipse.microprofile.metrics.Metric; import org.eclipse.microprofile.metrics.MetricRegistry; +import org.eclipse.microprofile.metrics.MetricType; +import org.eclipse.microprofile.metrics.Tag; +import org.eclipse.microprofile.metrics.Timer; @ThreadSafe -public abstract class MicroProfileMetricUpdater implements MetricUpdater { +public abstract class MicroProfileMetricUpdater extends AbstractMetricUpdater { - protected final Set enabledMetrics; protected final MetricRegistry registry; + protected final ConcurrentMap metrics = new ConcurrentHashMap<>(); - protected MicroProfileMetricUpdater(Set enabledMetrics, MetricRegistry registry) { - this.enabledMetrics = enabledMetrics; + protected MicroProfileMetricUpdater( + InternalDriverContext context, Set enabledMetrics, MetricRegistry registry) { + super(context, enabledMetrics); this.registry = registry; } - protected abstract String buildFullName(MetricT metric, String profileName); - @Override - public void incrementCounter(MetricT metric, String profileName, long amount) { + public void incrementCounter(MetricT metric, @Nullable String profileName, long amount) { if (isEnabled(metric, profileName)) { - registry.counter(buildFullName(metric, profileName)).inc(amount); + getOrCreateCounterFor(metric).inc(amount); } } @Override - public void updateHistogram(MetricT metric, String profileName, long value) { + public void updateHistogram(MetricT metric, @Nullable String profileName, long value) { if (isEnabled(metric, profileName)) { - registry.histogram(buildFullName(metric, profileName)).update(value); + getOrCreateHistogramFor(metric).update(value); } } @Override - public void markMeter(MetricT metric, String profileName, long amount) { + public void markMeter(MetricT metric, @Nullable String profileName, long amount) { if (isEnabled(metric, profileName)) { - registry.meter(buildFullName(metric, profileName)).mark(amount); + getOrCreateMeterFor(metric).mark(amount); } } @Override - public void updateTimer(MetricT metric, String profileName, long duration, TimeUnit unit) { + public void updateTimer( + MetricT metric, @Nullable String profileName, long duration, TimeUnit unit) { if (isEnabled(metric, profileName)) { - registry.timer(buildFullName(metric, profileName)).update(duration, unit); + getOrCreateTimerFor(metric).update(duration, unit); } } - @Override - public boolean isEnabled(MetricT metric, String profileName) { - return enabledMetrics.contains(metric); - } + protected abstract MetricId getMetricId(MetricT metric); - protected void initializeDefaultCounter(MetricT metric, String profileName) { - if (isEnabled(metric, profileName)) { - // Just initialize eagerly so that the metric appears even when it has no data yet - registry.counter(buildFullName(metric, profileName)); + protected void initializeGauge( + MetricT metric, DriverExecutionProfile profile, Gauge supplier) { + if (isEnabled(metric, profile.getName())) { + metrics.computeIfAbsent( + metric, + m -> { + MetricId id = getMetricId(m); + String name = id.getName(); + Tag[] tags = MicroProfileTags.toMicroProfileTags(id.getTags()); + Metadata metadata = + Metadata.builder().withName(name).withType(MetricType.GAUGE).build(); + return registry.register(metadata, supplier, tags); + }); } } - protected void initializeTimer(MetricT metric, DriverExecutionProfile config) { - String profileName = config.getName(); - if (isEnabled(metric, profileName)) { - String fullName = buildFullName(metric, profileName); + protected void initializeCounter(MetricT metric, DriverExecutionProfile profile) { + if (isEnabled(metric, profile.getName())) { + getOrCreateCounterFor(metric); + } + } - registry.timer(fullName); + protected void initializeTimer(MetricT metric, DriverExecutionProfile profile) { + if (isEnabled(metric, profile.getName())) { + getOrCreateTimerFor(metric); } } + + protected Counter getOrCreateCounterFor(MetricT metric) { + return (Counter) + metrics.computeIfAbsent( + metric, + m -> { + MetricId id = getMetricId(m); + Tag[] tags = MicroProfileTags.toMicroProfileTags(id.getTags()); + return registry.counter(id.getName(), tags); + }); + } + + protected Meter getOrCreateMeterFor(MetricT metric) { + return (Meter) + metrics.computeIfAbsent( + metric, + m -> { + MetricId id = getMetricId(m); + Tag[] tags = MicroProfileTags.toMicroProfileTags(id.getTags()); + return registry.meter(id.getName(), tags); + }); + } + + protected Histogram getOrCreateHistogramFor(MetricT metric) { + return (Histogram) + metrics.computeIfAbsent( + metric, + m -> { + MetricId id = getMetricId(m); + Tag[] tags = MicroProfileTags.toMicroProfileTags(id.getTags()); + return registry.histogram(id.getName(), tags); + }); + } + + protected Timer getOrCreateTimerFor(MetricT metric) { + return (Timer) + metrics.computeIfAbsent( + metric, + m -> { + MetricId id = getMetricId(m); + Tag[] tags = MicroProfileTags.toMicroProfileTags(id.getTags()); + return registry.timer(id.getName(), tags); + }); + } } diff --git a/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileMetricsFactory.java b/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileMetricsFactory.java index 6cb9f5de47b..f06974a20d4 100644 --- a/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileMetricsFactory.java +++ b/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileMetricsFactory.java @@ -102,7 +102,7 @@ public MicroProfileMetricsFactory(InternalDriverContext context, Ticker ticker) this.registry = (MetricRegistry) possibleMetricRegistry; this.sessionUpdater = new MicroProfileSessionMetricUpdater( - enabledSessionMetrics, this.registry, this.context); + this.context, enabledSessionMetrics, this.registry); } else { // Metrics are enabled, but the registry object is not an expected type throw new IllegalArgumentException( @@ -148,7 +148,7 @@ public NodeMetricUpdater newNodeUpdater(Node node) { } MicroProfileNodeMetricUpdater updater = new MicroProfileNodeMetricUpdater( - node, enabledNodeMetrics, registry, context, () -> metricsCache.getIfPresent(node)); + node, context, enabledNodeMetrics, registry, () -> metricsCache.getIfPresent(node)); metricsCache.put(node, updater); return updater; } diff --git a/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileNodeMetricUpdater.java b/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileNodeMetricUpdater.java index d622210250e..5693819ac56 100644 --- a/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileNodeMetricUpdater.java +++ b/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileNodeMetricUpdater.java @@ -17,81 +17,67 @@ import com.datastax.dse.driver.api.core.metrics.DseNodeMetric; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; -import com.datastax.oss.driver.api.core.context.DriverContext; -import com.datastax.oss.driver.api.core.metadata.EndPoint; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.metrics.DefaultNodeMetric; import com.datastax.oss.driver.api.core.metrics.NodeMetric; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.metrics.MetricId; import com.datastax.oss.driver.internal.core.metrics.NodeMetricUpdater; -import com.datastax.oss.driver.internal.core.pool.ChannelPool; import java.util.Set; import java.util.concurrent.TimeUnit; -import java.util.function.Function; -import org.eclipse.microprofile.metrics.Gauge; +import net.jcip.annotations.ThreadSafe; +import org.eclipse.microprofile.metrics.MetricID; import org.eclipse.microprofile.metrics.MetricRegistry; +import org.eclipse.microprofile.metrics.Tag; +@ThreadSafe public class MicroProfileNodeMetricUpdater extends MicroProfileMetricUpdater implements NodeMetricUpdater { - private final String metricNamePrefix; + private final Node node; private final Runnable signalMetricUpdated; public MicroProfileNodeMetricUpdater( Node node, + InternalDriverContext context, Set enabledMetrics, MetricRegistry registry, - DriverContext driverContext, Runnable signalMetricUpdated) { - super(enabledMetrics, registry); + super(context, enabledMetrics, registry); + this.node = node; this.signalMetricUpdated = signalMetricUpdated; - InternalDriverContext context = (InternalDriverContext) driverContext; - this.metricNamePrefix = buildPrefix(driverContext.getSessionName(), node.getEndPoint()); - DriverExecutionProfile config = driverContext.getConfig().getDefaultProfile(); + DriverExecutionProfile profile = context.getConfig().getDefaultProfile(); - if (enabledMetrics.contains(DefaultNodeMetric.OPEN_CONNECTIONS)) { - this.registry.register( - buildFullName(DefaultNodeMetric.OPEN_CONNECTIONS, null), - (Gauge) node::getOpenConnections); - } - initializePoolGauge( - DefaultNodeMetric.AVAILABLE_STREAMS, node, ChannelPool::getAvailableIds, context); - initializePoolGauge(DefaultNodeMetric.IN_FLIGHT, node, ChannelPool::getInFlight, context); - initializePoolGauge( - DefaultNodeMetric.ORPHANED_STREAMS, node, ChannelPool::getOrphanedIds, context); - initializeTimer(DefaultNodeMetric.CQL_MESSAGES, config); - initializeDefaultCounter(DefaultNodeMetric.UNSENT_REQUESTS, null); - initializeDefaultCounter(DefaultNodeMetric.ABORTED_REQUESTS, null); - initializeDefaultCounter(DefaultNodeMetric.WRITE_TIMEOUTS, null); - initializeDefaultCounter(DefaultNodeMetric.READ_TIMEOUTS, null); - initializeDefaultCounter(DefaultNodeMetric.UNAVAILABLES, null); - initializeDefaultCounter(DefaultNodeMetric.OTHER_ERRORS, null); - initializeDefaultCounter(DefaultNodeMetric.RETRIES, null); - initializeDefaultCounter(DefaultNodeMetric.RETRIES_ON_ABORTED, null); - initializeDefaultCounter(DefaultNodeMetric.RETRIES_ON_READ_TIMEOUT, null); - initializeDefaultCounter(DefaultNodeMetric.RETRIES_ON_WRITE_TIMEOUT, null); - initializeDefaultCounter(DefaultNodeMetric.RETRIES_ON_UNAVAILABLE, null); - initializeDefaultCounter(DefaultNodeMetric.RETRIES_ON_OTHER_ERROR, null); - initializeDefaultCounter(DefaultNodeMetric.IGNORES, null); - initializeDefaultCounter(DefaultNodeMetric.IGNORES_ON_ABORTED, null); - initializeDefaultCounter(DefaultNodeMetric.IGNORES_ON_READ_TIMEOUT, null); - initializeDefaultCounter(DefaultNodeMetric.IGNORES_ON_WRITE_TIMEOUT, null); - initializeDefaultCounter(DefaultNodeMetric.IGNORES_ON_UNAVAILABLE, null); - initializeDefaultCounter(DefaultNodeMetric.IGNORES_ON_OTHER_ERROR, null); - initializeDefaultCounter(DefaultNodeMetric.SPECULATIVE_EXECUTIONS, null); - initializeDefaultCounter(DefaultNodeMetric.CONNECTION_INIT_ERRORS, null); - initializeDefaultCounter(DefaultNodeMetric.AUTHENTICATION_ERRORS, null); - initializeTimer(DseNodeMetric.GRAPH_MESSAGES, driverContext.getConfig().getDefaultProfile()); - } + initializeGauge(DefaultNodeMetric.OPEN_CONNECTIONS, profile, node::getOpenConnections); + initializeGauge(DefaultNodeMetric.AVAILABLE_STREAMS, profile, () -> availableStreamIds(node)); + initializeGauge(DefaultNodeMetric.IN_FLIGHT, profile, () -> inFlightRequests(node)); + initializeGauge(DefaultNodeMetric.ORPHANED_STREAMS, profile, () -> orphanedStreamIds(node)); - @Override - public String buildFullName(NodeMetric metric, String profileName) { - return metricNamePrefix + metric.getPath(); - } + initializeCounter(DefaultNodeMetric.UNSENT_REQUESTS, profile); + initializeCounter(DefaultNodeMetric.ABORTED_REQUESTS, profile); + initializeCounter(DefaultNodeMetric.WRITE_TIMEOUTS, profile); + initializeCounter(DefaultNodeMetric.READ_TIMEOUTS, profile); + initializeCounter(DefaultNodeMetric.UNAVAILABLES, profile); + initializeCounter(DefaultNodeMetric.OTHER_ERRORS, profile); + initializeCounter(DefaultNodeMetric.RETRIES, profile); + initializeCounter(DefaultNodeMetric.RETRIES_ON_ABORTED, profile); + initializeCounter(DefaultNodeMetric.RETRIES_ON_READ_TIMEOUT, profile); + initializeCounter(DefaultNodeMetric.RETRIES_ON_WRITE_TIMEOUT, profile); + initializeCounter(DefaultNodeMetric.RETRIES_ON_UNAVAILABLE, profile); + initializeCounter(DefaultNodeMetric.RETRIES_ON_OTHER_ERROR, profile); + initializeCounter(DefaultNodeMetric.IGNORES, profile); + initializeCounter(DefaultNodeMetric.IGNORES_ON_ABORTED, profile); + initializeCounter(DefaultNodeMetric.IGNORES_ON_READ_TIMEOUT, profile); + initializeCounter(DefaultNodeMetric.IGNORES_ON_WRITE_TIMEOUT, profile); + initializeCounter(DefaultNodeMetric.IGNORES_ON_UNAVAILABLE, profile); + initializeCounter(DefaultNodeMetric.IGNORES_ON_OTHER_ERROR, profile); + initializeCounter(DefaultNodeMetric.SPECULATIVE_EXECUTIONS, profile); + initializeCounter(DefaultNodeMetric.CONNECTION_INIT_ERRORS, profile); + initializeCounter(DefaultNodeMetric.AUTHENTICATION_ERRORS, profile); - private String buildPrefix(String sessionName, EndPoint endPoint) { - return sessionName + ".nodes." + endPoint.asMetricPrefix() + "."; + initializeTimer(DefaultNodeMetric.CQL_MESSAGES, profile); + initializeTimer(DseNodeMetric.GRAPH_MESSAGES, profile); } @Override @@ -118,23 +104,17 @@ public void updateTimer(NodeMetric metric, String profileName, long duration, Ti super.updateTimer(metric, profileName, duration, unit); } - private void initializePoolGauge( - NodeMetric metric, - Node node, - Function reading, - InternalDriverContext context) { - if (enabledMetrics.contains(metric)) { - registry.register( - buildFullName(metric, null), - (Gauge) - () -> { - ChannelPool pool = context.getPoolManager().getPools().get(node); - return (pool == null) ? 0 : reading.apply(pool); - }); + public void cleanupNodeMetrics() { + for (NodeMetric metric : metrics.keySet()) { + MetricId id = getMetricId(metric); + Tag[] tags = MicroProfileTags.toMicroProfileTags(id.getTags()); + registry.remove(new MetricID(id.getName(), tags)); } + metrics.clear(); } - public void cleanupNodeMetrics() { - registry.removeMatching((id, metric) -> id.getName().startsWith(metricNamePrefix)); + @Override + protected MetricId getMetricId(NodeMetric metric) { + return context.getMetricIdGenerator().nodeMetricId(node, metric); } } diff --git a/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileSessionMetricUpdater.java b/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileSessionMetricUpdater.java index e46286b453c..7fddc8f150e 100644 --- a/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileSessionMetricUpdater.java +++ b/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileSessionMetricUpdater.java @@ -16,121 +16,43 @@ package com.datastax.oss.driver.internal.metrics.microprofile; import com.datastax.dse.driver.api.core.metrics.DseSessionMetric; -import com.datastax.oss.driver.api.core.context.DriverContext; -import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; import com.datastax.oss.driver.api.core.metrics.SessionMetric; -import com.datastax.oss.driver.api.core.session.throttling.RequestThrottler; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; -import com.datastax.oss.driver.internal.core.cql.CqlPrepareAsyncProcessor; -import com.datastax.oss.driver.internal.core.cql.CqlPrepareSyncProcessor; +import com.datastax.oss.driver.internal.core.metrics.MetricId; import com.datastax.oss.driver.internal.core.metrics.SessionMetricUpdater; -import com.datastax.oss.driver.internal.core.session.RequestProcessor; -import com.datastax.oss.driver.internal.core.session.throttling.ConcurrencyLimitingRequestThrottler; -import com.datastax.oss.driver.internal.core.session.throttling.RateLimitingRequestThrottler; -import com.datastax.oss.driver.shaded.guava.common.cache.Cache; -import edu.umd.cs.findbugs.annotations.Nullable; import java.util.Set; -import org.eclipse.microprofile.metrics.Gauge; +import net.jcip.annotations.ThreadSafe; import org.eclipse.microprofile.metrics.MetricRegistry; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +@ThreadSafe public class MicroProfileSessionMetricUpdater extends MicroProfileMetricUpdater implements SessionMetricUpdater { - private static final Logger LOG = LoggerFactory.getLogger(MicroProfileSessionMetricUpdater.class); - - private final String metricNamePrefix; - public MicroProfileSessionMetricUpdater( - Set enabledMetrics, MetricRegistry registry, DriverContext driverContext) { - super(enabledMetrics, registry); - InternalDriverContext context = (InternalDriverContext) driverContext; - this.metricNamePrefix = driverContext.getSessionName() + "."; - if (enabledMetrics.contains(DefaultSessionMetric.CONNECTED_NODES)) { - this.registry.register( - buildFullName(DefaultSessionMetric.CONNECTED_NODES, null), - (Gauge) - () -> { - int count = 0; - for (Node node : context.getMetadataManager().getMetadata().getNodes().values()) { - if (node.getOpenConnections() > 0) { - count += 1; - } - } - return count; - }); - } - ; - if (enabledMetrics.contains(DefaultSessionMetric.THROTTLING_QUEUE_SIZE)) { - this.registry.register( - buildFullName(DefaultSessionMetric.THROTTLING_QUEUE_SIZE, null), - buildQueueGauge(context.getRequestThrottler(), context.getSessionName())); - } - if (enabledMetrics.contains(DefaultSessionMetric.CQL_PREPARED_CACHE_SIZE)) { - this.registry.register( - buildFullName(DefaultSessionMetric.CQL_PREPARED_CACHE_SIZE, null), - createPreparedStatementsGauge(context)); - } - initializeTimer(DefaultSessionMetric.CQL_REQUESTS, context.getConfig().getDefaultProfile()); - initializeDefaultCounter(DefaultSessionMetric.CQL_CLIENT_TIMEOUTS, null); - initializeTimer(DefaultSessionMetric.THROTTLING_DELAY, context.getConfig().getDefaultProfile()); - initializeDefaultCounter(DefaultSessionMetric.THROTTLING_ERRORS, null); - initializeTimer( - DseSessionMetric.CONTINUOUS_CQL_REQUESTS, context.getConfig().getDefaultProfile()); - initializeDefaultCounter(DseSessionMetric.GRAPH_CLIENT_TIMEOUTS, null); - initializeTimer(DseSessionMetric.GRAPH_REQUESTS, context.getConfig().getDefaultProfile()); - } + InternalDriverContext context, Set enabledMetrics, MetricRegistry registry) { + super(context, enabledMetrics, registry); - private Gauge createPreparedStatementsGauge(InternalDriverContext context) { - Cache cache = getPreparedStatementCache(context); - Gauge gauge; - if (cache == null) { - LOG.warn( - "[{}] Metric {} is enabled in the config, " - + "but it looks like no CQL prepare processor is registered. " - + "The gauge will always return 0", - context.getSessionName(), - DefaultSessionMetric.CQL_PREPARED_CACHE_SIZE.getPath()); - gauge = () -> 0L; - } else { - gauge = cache::size; - } - return gauge; - } + DriverExecutionProfile profile = context.getConfig().getDefaultProfile(); - @Override - public String buildFullName(SessionMetric metric, String profileName) { - return metricNamePrefix + metric.getPath(); - } + initializeGauge(DefaultSessionMetric.CONNECTED_NODES, profile, this::connectedNodes); + initializeGauge(DefaultSessionMetric.THROTTLING_QUEUE_SIZE, profile, this::throttlingQueueSize); + initializeGauge( + DefaultSessionMetric.CQL_PREPARED_CACHE_SIZE, profile, this::preparedStatementCacheSize); - private Gauge buildQueueGauge(RequestThrottler requestThrottler, String logPrefix) { - if (requestThrottler instanceof ConcurrencyLimitingRequestThrottler) { - return ((ConcurrencyLimitingRequestThrottler) requestThrottler)::getQueueSize; - } else if (requestThrottler instanceof RateLimitingRequestThrottler) { - return ((RateLimitingRequestThrottler) requestThrottler)::getQueueSize; - } else { - LOG.warn( - "[{}] Metric {} does not support {}, it will always return 0", - logPrefix, - DefaultSessionMetric.THROTTLING_QUEUE_SIZE.getPath(), - requestThrottler.getClass().getName()); - return () -> 0; - } + initializeTimer(DefaultSessionMetric.CQL_REQUESTS, profile); + initializeTimer(DefaultSessionMetric.THROTTLING_DELAY, profile); + initializeTimer(DseSessionMetric.CONTINUOUS_CQL_REQUESTS, profile); + initializeTimer(DseSessionMetric.GRAPH_REQUESTS, profile); + + initializeCounter(DefaultSessionMetric.CQL_CLIENT_TIMEOUTS, profile); + initializeCounter(DefaultSessionMetric.THROTTLING_ERRORS, profile); + initializeCounter(DseSessionMetric.GRAPH_CLIENT_TIMEOUTS, profile); } - @Nullable - private static Cache getPreparedStatementCache(InternalDriverContext context) { - // By default, both the sync processor and the async one are registered and they share the same - // cache. But with a custom processor registry, there could be only one of the two present. - for (RequestProcessor processor : context.getRequestProcessorRegistry().getProcessors()) { - if (processor instanceof CqlPrepareAsyncProcessor) { - return ((CqlPrepareAsyncProcessor) processor).getCache(); - } else if (processor instanceof CqlPrepareSyncProcessor) { - return ((CqlPrepareSyncProcessor) processor).getCache(); - } - } - return null; + @Override + protected MetricId getMetricId(SessionMetric metric) { + return context.getMetricIdGenerator().sessionMetricId(metric); } } diff --git a/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileTags.java b/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileTags.java new file mode 100644 index 00000000000..fe87ee7c3e6 --- /dev/null +++ b/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileTags.java @@ -0,0 +1,33 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.metrics.microprofile; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import org.eclipse.microprofile.metrics.Tag; + +public class MicroProfileTags { + + public static Tag[] toMicroProfileTags(Map tags) { + List micrometerTags = new ArrayList<>(tags.size()); + for (Entry entry : tags.entrySet()) { + micrometerTags.add(new Tag(entry.getKey(), entry.getValue())); + } + return micrometerTags.toArray(new Tag[0]); + } +} diff --git a/metrics/microprofile/src/test/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileMetricsFactoryTest.java b/metrics/microprofile/src/test/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileMetricsFactoryTest.java index 453bf281284..e8a00c2dc8c 100644 --- a/metrics/microprofile/src/test/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileMetricsFactoryTest.java +++ b/metrics/microprofile/src/test/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileMetricsFactoryTest.java @@ -34,7 +34,7 @@ import com.tngtech.java.junit.dataprovider.DataProviderRunner; import com.tngtech.java.junit.dataprovider.UseDataProvider; import java.time.Duration; -import java.util.Arrays; +import java.util.Collections; import java.util.List; import org.eclipse.microprofile.metrics.MetricRegistry; import org.junit.Test; @@ -104,8 +104,8 @@ public void should_throw_if_wrong_or_missing_registry_type( InternalDriverContext context = mock(InternalDriverContext.class); DriverExecutionProfile profile = mock(DriverExecutionProfile.class); DriverConfig config = mock(DriverConfig.class); - Duration expireAfter = LOWEST_ACCEPTABLE_EXPIRE_AFTER.minusMinutes(1); - List enabledMetrics = Arrays.asList(DefaultSessionMetric.CQL_REQUESTS.getPath()); + List enabledMetrics = + Collections.singletonList(DefaultSessionMetric.CQL_REQUESTS.getPath()); // when when(config.getDefaultProfile()).thenReturn(profile); when(context.getConfig()).thenReturn(config); @@ -113,14 +113,14 @@ public void should_throw_if_wrong_or_missing_registry_type( // registry object is not a registry type when(context.getMetricRegistry()).thenReturn(registryObj); when(profile.getDuration(DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER)) - .thenReturn(expireAfter); + .thenReturn(LOWEST_ACCEPTABLE_EXPIRE_AFTER); when(profile.getStringList(DefaultDriverOption.METRICS_SESSION_ENABLED)) .thenReturn(enabledMetrics); // then try { new MicroProfileMetricsFactory(context); fail( - "MetricsFactory should require correct registy object type: " + "MetricsFactory should require correct registry object type: " + MetricRegistry.class.getName()); } catch (IllegalArgumentException iae) { assertThat(iae.getMessage()).isEqualTo(expectedMsg); diff --git a/upgrade_guide/README.md b/upgrade_guide/README.md index 54d1e86c6a7..55166b5b60b 100644 --- a/upgrade_guide/README.md +++ b/upgrade_guide/README.md @@ -11,6 +11,19 @@ been promoted from beta to production-ready in the upcoming Cassandra 4.0 releas Users should not experience any disruption. When connecting to Cassandra 4.0, V5 will be transparently selected as the protocol version to use. +#### Customizable metric names, support for metric tags + +[JAVA-2872](https://datastax-oss.atlassian.net/browse/JAVA-2872) introduced the ability to configure +how metric identifiers are generated. Metric names can now be configured, but most importantly, +metric tags are now supported. See the [metrics](../manual/core/metrics/) section of the online +manual, or the `advanced.metrics.id-generator` section in the +[reference.conf](../manual/core/configuration/reference/) file for details. + +Users should not experience any disruption. However, those using metrics libraries that support tags +are encouraged to try out the new `TaggingMetricIdGenerator`, as it generates metric names and tags +that will look more familiar to users of libraries such as Micrometer or MicroProfile Metrics (and +look nicer when exported to Prometheus or Graphite). + ### 4.10.0 #### Cross-datacenter failover From 9251657c8e0acc37a2d960c6e0fe7c8c664e6ca5 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 19 Mar 2021 19:12:23 +0100 Subject: [PATCH 666/979] JAVA-2928: Generate counter increment/decrement constructs compatible with legacy C* versions (#1538) --- changelog/README.md | 2 + .../api/querybuilder/update/Assignment.java | 15 +++--- .../update/CounterAssignment.java | 53 +++++++++++++++++-- .../update/DecrementAssignment.java | 29 ++++++++++ .../update/IncrementAssignment.java | 29 ++++++++++ .../update/UpdateFluentAssignmentTest.java | 12 ++--- .../update/UpdateIdempotenceTest.java | 4 +- 7 files changed, 124 insertions(+), 20 deletions(-) create mode 100644 query-builder/src/main/java/com/datastax/oss/driver/internal/querybuilder/update/DecrementAssignment.java create mode 100644 query-builder/src/main/java/com/datastax/oss/driver/internal/querybuilder/update/IncrementAssignment.java diff --git a/changelog/README.md b/changelog/README.md index 2d3d1d0c280..57b7d637d08 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,8 @@ ### 4.11.0 (in progress) +- [bug] JAVA-2928: Generate counter increment/decrement constructs compatible with legacy C* + versions - [new feature] JAVA-2872: Ability to customize metric names and tags - [bug] JAVA-2925: Consider protocol version unsupported when server requires USE_BETA flag for it - [improvement] JAVA-2704: Remove protocol v5 beta status, add v6-beta diff --git a/query-builder/src/main/java/com/datastax/oss/driver/api/querybuilder/update/Assignment.java b/query-builder/src/main/java/com/datastax/oss/driver/api/querybuilder/update/Assignment.java index f2590766767..a05d48ceebd 100644 --- a/query-builder/src/main/java/com/datastax/oss/driver/api/querybuilder/update/Assignment.java +++ b/query-builder/src/main/java/com/datastax/oss/driver/api/querybuilder/update/Assignment.java @@ -28,8 +28,9 @@ import com.datastax.oss.driver.internal.querybuilder.update.AppendListElementAssignment; import com.datastax.oss.driver.internal.querybuilder.update.AppendMapEntryAssignment; import com.datastax.oss.driver.internal.querybuilder.update.AppendSetElementAssignment; -import com.datastax.oss.driver.internal.querybuilder.update.CounterAssignment; +import com.datastax.oss.driver.internal.querybuilder.update.DecrementAssignment; import com.datastax.oss.driver.internal.querybuilder.update.DefaultAssignment; +import com.datastax.oss.driver.internal.querybuilder.update.IncrementAssignment; import com.datastax.oss.driver.internal.querybuilder.update.PrependAssignment; import com.datastax.oss.driver.internal.querybuilder.update.PrependListElementAssignment; import com.datastax.oss.driver.internal.querybuilder.update.PrependMapEntryAssignment; @@ -109,10 +110,10 @@ static Assignment setListValue( return setListValue(CqlIdentifier.fromCql(columnName), index, value); } - /** Increments a counter, as in {@code SET c+=?}. */ + /** Increments a counter, as in {@code SET c=c+?}. */ @NonNull static Assignment increment(@NonNull CqlIdentifier columnId, @NonNull Term amount) { - return new CounterAssignment(new ColumnLeftOperand(columnId), "+=", amount); + return new IncrementAssignment(columnId, amount); } /** @@ -124,7 +125,7 @@ static Assignment increment(@NonNull String columnName, @NonNull Term amount) { return increment(CqlIdentifier.fromCql(columnName), amount); } - /** Increments a counter by 1, as in {@code SET c+=1} . */ + /** Increments a counter by 1, as in {@code SET c=c+1} . */ @NonNull static Assignment increment(@NonNull CqlIdentifier columnId) { return increment(columnId, QueryBuilder.literal(1)); @@ -136,10 +137,10 @@ static Assignment increment(@NonNull String columnName) { return increment(CqlIdentifier.fromCql(columnName)); } - /** Decrements a counter, as in {@code SET c-=?}. */ + /** Decrements a counter, as in {@code SET c=c-?}. */ @NonNull static Assignment decrement(@NonNull CqlIdentifier columnId, @NonNull Term amount) { - return new CounterAssignment(new ColumnLeftOperand(columnId), "-=", amount); + return new DecrementAssignment(columnId, amount); } /** @@ -151,7 +152,7 @@ static Assignment decrement(@NonNull String columnName, @NonNull Term amount) { return decrement(CqlIdentifier.fromCql(columnName), amount); } - /** Decrements a counter by 1, as in {@code SET c-=1} . */ + /** Decrements a counter by 1, as in {@code SET c=c-1} . */ @NonNull static Assignment decrement(@NonNull CqlIdentifier columnId) { return decrement(columnId, QueryBuilder.literal(1)); diff --git a/query-builder/src/main/java/com/datastax/oss/driver/internal/querybuilder/update/CounterAssignment.java b/query-builder/src/main/java/com/datastax/oss/driver/internal/querybuilder/update/CounterAssignment.java index ff1280de5dd..99c30e134fa 100644 --- a/query-builder/src/main/java/com/datastax/oss/driver/internal/querybuilder/update/CounterAssignment.java +++ b/query-builder/src/main/java/com/datastax/oss/driver/internal/querybuilder/update/CounterAssignment.java @@ -15,21 +15,64 @@ */ package com.datastax.oss.driver.internal.querybuilder.update; +import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.querybuilder.term.Term; -import com.datastax.oss.driver.internal.querybuilder.lhs.LeftOperand; +import com.datastax.oss.driver.api.querybuilder.update.Assignment; +import com.datastax.oss.driver.shaded.guava.common.base.Preconditions; import edu.umd.cs.findbugs.annotations.NonNull; import net.jcip.annotations.Immutable; @Immutable -public class CounterAssignment extends DefaultAssignment { +public abstract class CounterAssignment implements Assignment { - public CounterAssignment( - @NonNull LeftOperand leftOperand, @NonNull String operator, @NonNull Term rightOperand) { - super(leftOperand, operator, rightOperand); + public enum Operator { + INCREMENT("%1$s=%1$s+%2$s"), + DECREMENT("%1$s=%1$s-%2$s"), + ; + + public final String pattern; + + Operator(String pattern) { + this.pattern = pattern; + } + } + + private final CqlIdentifier columnId; + private final Operator operator; + private final Term value; + + protected CounterAssignment( + @NonNull CqlIdentifier columnId, @NonNull Operator operator, @NonNull Term value) { + Preconditions.checkNotNull(columnId); + Preconditions.checkNotNull(value); + this.columnId = columnId; + this.operator = operator; + this.value = value; + } + + @Override + public void appendTo(@NonNull StringBuilder builder) { + builder.append(String.format(operator.pattern, columnId.asCql(true), buildRightOperand())); + } + + private String buildRightOperand() { + StringBuilder builder = new StringBuilder(); + value.appendTo(builder); + return builder.toString(); } @Override public boolean isIdempotent() { return false; } + + @NonNull + public CqlIdentifier getColumnId() { + return columnId; + } + + @NonNull + public Term getValue() { + return value; + } } diff --git a/query-builder/src/main/java/com/datastax/oss/driver/internal/querybuilder/update/DecrementAssignment.java b/query-builder/src/main/java/com/datastax/oss/driver/internal/querybuilder/update/DecrementAssignment.java new file mode 100644 index 00000000000..e91c6b2dbd1 --- /dev/null +++ b/query-builder/src/main/java/com/datastax/oss/driver/internal/querybuilder/update/DecrementAssignment.java @@ -0,0 +1,29 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.querybuilder.update; + +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.querybuilder.term.Term; +import edu.umd.cs.findbugs.annotations.NonNull; +import net.jcip.annotations.Immutable; + +@Immutable +public class DecrementAssignment extends CounterAssignment { + + public DecrementAssignment(@NonNull CqlIdentifier columnId, @NonNull Term value) { + super(columnId, Operator.DECREMENT, value); + } +} diff --git a/query-builder/src/main/java/com/datastax/oss/driver/internal/querybuilder/update/IncrementAssignment.java b/query-builder/src/main/java/com/datastax/oss/driver/internal/querybuilder/update/IncrementAssignment.java new file mode 100644 index 00000000000..8c5bf6b40bb --- /dev/null +++ b/query-builder/src/main/java/com/datastax/oss/driver/internal/querybuilder/update/IncrementAssignment.java @@ -0,0 +1,29 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.querybuilder.update; + +import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.querybuilder.term.Term; +import edu.umd.cs.findbugs.annotations.NonNull; +import net.jcip.annotations.Immutable; + +@Immutable +public class IncrementAssignment extends CounterAssignment { + + public IncrementAssignment(@NonNull CqlIdentifier columnId, @NonNull Term value) { + super(columnId, Operator.INCREMENT, value); + } +} diff --git a/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/update/UpdateFluentAssignmentTest.java b/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/update/UpdateFluentAssignmentTest.java index 77cf9ed00ca..ad68d04b3ea 100644 --- a/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/update/UpdateFluentAssignmentTest.java +++ b/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/update/UpdateFluentAssignmentTest.java @@ -73,18 +73,18 @@ public void should_generate_list_value_assignment() { @Test public void should_generate_counter_operations() { assertThat(update("foo").increment("c").whereColumn("k").isEqualTo(bindMarker())) - .hasCql("UPDATE foo SET c+=1 WHERE k=?"); + .hasCql("UPDATE foo SET c=c+1 WHERE k=?"); assertThat(update("foo").increment("c", literal(2)).whereColumn("k").isEqualTo(bindMarker())) - .hasCql("UPDATE foo SET c+=2 WHERE k=?"); + .hasCql("UPDATE foo SET c=c+2 WHERE k=?"); assertThat(update("foo").increment("c", bindMarker()).whereColumn("k").isEqualTo(bindMarker())) - .hasCql("UPDATE foo SET c+=? WHERE k=?"); + .hasCql("UPDATE foo SET c=c+? WHERE k=?"); assertThat(update("foo").decrement("c").whereColumn("k").isEqualTo(bindMarker())) - .hasCql("UPDATE foo SET c-=1 WHERE k=?"); + .hasCql("UPDATE foo SET c=c-1 WHERE k=?"); assertThat(update("foo").decrement("c", literal(2)).whereColumn("k").isEqualTo(bindMarker())) - .hasCql("UPDATE foo SET c-=2 WHERE k=?"); + .hasCql("UPDATE foo SET c=c-2 WHERE k=?"); assertThat(update("foo").decrement("c", bindMarker()).whereColumn("k").isEqualTo(bindMarker())) - .hasCql("UPDATE foo SET c-=? WHERE k=?"); + .hasCql("UPDATE foo SET c=c-? WHERE k=?"); } @Test diff --git a/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/update/UpdateIdempotenceTest.java b/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/update/UpdateIdempotenceTest.java index 1a3b05614ea..fb48b5deca8 100644 --- a/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/update/UpdateIdempotenceTest.java +++ b/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/update/UpdateIdempotenceTest.java @@ -90,10 +90,10 @@ public void should_not_be_idempotent_if_using_non_idempotent_term_in_relation() @Test public void should_not_be_idempotent_if_updating_counter() { assertThat(update("foo").increment("c").whereColumn("k").isEqualTo(bindMarker())) - .hasCql("UPDATE foo SET c+=1 WHERE k=?") + .hasCql("UPDATE foo SET c=c+1 WHERE k=?") .isNotIdempotent(); assertThat(update("foo").decrement("c").whereColumn("k").isEqualTo(bindMarker())) - .hasCql("UPDATE foo SET c-=1 WHERE k=?") + .hasCql("UPDATE foo SET c=c-1 WHERE k=?") .isNotIdempotent(); } From ad278f5091c2a8f64693a0a6f6ddf99a8ef004af Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 19 Mar 2021 19:15:01 +0100 Subject: [PATCH 667/979] JAVA-2830: Add mapper support for Java streams (#1539) --- changelog/README.md | 1 + .../oss/driver/mapper/GetEntityIT.java | 12 ++++++ .../oss/driver/mapper/QueryProviderIT.java | 20 +++++++++ .../oss/driver/mapper/QueryReturnTypesIT.java | 10 +++++ .../datastax/oss/driver/mapper/SelectIT.java | 43 +++++++++++++++++++ manual/mapper/daos/getentity/README.md | 17 +++++++- manual/mapper/daos/query/README.md | 5 +++ manual/mapper/daos/select/README.md | 15 +++++++ .../dao/DaoGetEntityMethodGenerator.java | 32 ++++++++++++-- .../dao/DaoSelectMethodGenerator.java | 2 + .../dao/DefaultDaoReturnTypeKind.java | 20 +++++++++ .../dao/DefaultDaoReturnTypeParser.java | 2 + .../dao/DaoGetEntityMethodGeneratorTest.java | 18 ++++++-- .../dao/DaoQueryMethodGeneratorTest.java | 2 +- .../dao/DaoSelectMethodGeneratorTest.java | 4 +- .../oss/driver/internal/mapper/DaoBase.java | 7 +++ 16 files changed, 199 insertions(+), 11 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 57b7d637d08..bedafa80225 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.11.0 (in progress) +- [new feature] JAVA-2830: Add mapper support for Java streams - [bug] JAVA-2928: Generate counter increment/decrement constructs compatible with legacy C* versions - [new feature] JAVA-2872: Ability to customize metric names and tags diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/GetEntityIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/GetEntityIT.java index dd789a70925..2ca29a688a9 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/GetEntityIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/GetEntityIT.java @@ -38,6 +38,7 @@ import com.datastax.oss.driver.categories.ParallelizableTests; import com.datastax.oss.driver.internal.core.util.concurrent.CompletableFutures; import com.datastax.oss.driver.shaded.guava.common.collect.Sets; +import java.util.stream.Stream; import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Test; @@ -116,6 +117,14 @@ public void should_get_iterable_from_result_set() { assertThat(Sets.newHashSet(products)).containsOnly(FLAMETHROWER, MP3_DOWNLOAD); } + @Test + public void should_get_stream_from_result_set() { + CqlSession session = SESSION_RULE.session(); + ResultSet rs = session.execute("SELECT * FROM product"); + Stream products = dao.getAsStream(rs); + assertThat(products).containsOnly(FLAMETHROWER, MP3_DOWNLOAD); + } + @Test public void should_get_async_iterable_from_async_result_set() { CqlSession session = SESSION_RULE.session(); @@ -141,6 +150,9 @@ public interface ProductDao { @GetEntity PagingIterable get(ResultSet resultSet); + @GetEntity + Stream getAsStream(ResultSet resultSet); + @GetEntity MappedAsyncPagingIterable get(AsyncResultSet resultSet); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/QueryProviderIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/QueryProviderIT.java index 6cb5c37f2ac..1fb92637b22 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/QueryProviderIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/QueryProviderIT.java @@ -43,6 +43,8 @@ import com.datastax.oss.driver.categories.ParallelizableTests; import java.util.Objects; import java.util.concurrent.atomic.AtomicInteger; +import java.util.stream.Stream; +import java.util.stream.StreamSupport; import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Test; @@ -115,9 +117,15 @@ public interface SensorMapper { @Dao @DefaultNullSavingStrategy(NullSavingStrategy.SET_TO_NULL) public interface SensorDao { + @QueryProvider(providerClass = FindSliceProvider.class, entityHelpers = SensorReading.class) PagingIterable findSlice(int id, Integer month, Integer day); + @QueryProvider( + providerClass = FindSliceStreamProvider.class, + entityHelpers = SensorReading.class) + Stream findSliceAsStream(int id, Integer month, Integer day); + @Insert void save(SensorReading reading); } @@ -164,6 +172,18 @@ public PagingIterable findSlice(int id, Integer month, Integer da } } + public static class FindSliceStreamProvider extends FindSliceProvider { + + public FindSliceStreamProvider( + MapperContext context, EntityHelper sensorReadingHelper) { + super(context, sensorReadingHelper); + } + + public Stream findSliceAsStream(int id, Integer month, Integer day) { + return StreamSupport.stream(findSlice(id, month, day).spliterator(), false); + } + } + @Entity public static class SensorReading { @PartitionKey private int id; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/QueryReturnTypesIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/QueryReturnTypesIT.java index 716b35faebc..8002bf19f6a 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/QueryReturnTypesIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/QueryReturnTypesIT.java @@ -47,6 +47,7 @@ import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionStage; +import java.util.stream.Stream; import org.junit.Before; import org.junit.BeforeClass; import org.junit.ClassRule; @@ -224,6 +225,12 @@ public void should_execute_query_and_map_to_iterable() { assertThat(iterable.all()).hasSize(10); } + @Test + public void should_execute_query_and_map_to_stream() { + Stream stream = dao.findByIdAsStream(1); + assertThat(stream).hasSize(10); + } + @Test public void should_execute_async_query_and_map_to_iterable() { MappedAsyncPagingIterable iterable = @@ -288,6 +295,9 @@ public interface TestDao { @Query("SELECT * FROM ${qualifiedTableId} WHERE id = :id") PagingIterable findById(int id); + @Query("SELECT * FROM ${qualifiedTableId} WHERE id = :id") + Stream findByIdAsStream(int id); + @Query("SELECT * FROM ${qualifiedTableId} WHERE id = :id") CompletableFuture> findByIdAsync(int id); } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SelectIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SelectIT.java index e47b86f74f3..2dda4b7e63a 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SelectIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SelectIT.java @@ -35,6 +35,7 @@ import java.util.Optional; import java.util.UUID; import java.util.concurrent.CompletionStage; +import java.util.stream.Stream; import org.junit.Before; import org.junit.BeforeClass; import org.junit.ClassRule; @@ -99,6 +100,11 @@ public void should_select_all() { assertThat(dao.all().all()).hasSize(2); } + @Test + public void should_select_all_stream() { + assertThat(dao.stream()).hasSize(2); + } + @Test public void should_select_by_primary_key_asynchronously() { assertThat(CompletableFutures.getUninterruptibly(dao.findByIdAsync(FLAMETHROWER.getId()))) @@ -141,6 +147,18 @@ public void should_select_all_sales() { MP3_DOWNLOAD_SALE_1); } + @Test + public void should_select_all_sales_stream() { + assertThat(saleDao.stream()) + .containsOnly( + FLAMETHROWER_SALE_1, + FLAMETHROWER_SALE_3, + FLAMETHROWER_SALE_4, + FLAMETHROWER_SALE_2, + FLAMETHROWER_SALE_5, + MP3_DOWNLOAD_SALE_1); + } + @Test public void should_select_by_partition_key() { assertThat(saleDao.salesByIdForDay(FLAMETHROWER.getId(), DATE_1).all()) @@ -148,12 +166,25 @@ public void should_select_by_partition_key() { FLAMETHROWER_SALE_1, FLAMETHROWER_SALE_3, FLAMETHROWER_SALE_2, FLAMETHROWER_SALE_4); } + @Test + public void should_select_by_partition_key_stream() { + assertThat(saleDao.salesByIdForDayStream(FLAMETHROWER.getId(), DATE_1)) + .containsOnly( + FLAMETHROWER_SALE_1, FLAMETHROWER_SALE_3, FLAMETHROWER_SALE_2, FLAMETHROWER_SALE_4); + } + @Test public void should_select_by_partition_key_and_partial_clustering() { assertThat(saleDao.salesByIdForCustomer(FLAMETHROWER.getId(), DATE_1, 1).all()) .containsOnly(FLAMETHROWER_SALE_1, FLAMETHROWER_SALE_3, FLAMETHROWER_SALE_4); } + @Test + public void should_select_by_partition_key_and_partial_clustering_stream() { + assertThat(saleDao.salesByIdForCustomerStream(FLAMETHROWER.getId(), DATE_1, 1)) + .containsOnly(FLAMETHROWER_SALE_1, FLAMETHROWER_SALE_3, FLAMETHROWER_SALE_4); + } + @Test public void should_select_by_primary_key_sales() { assertThat( @@ -180,6 +211,9 @@ public interface ProductDao { @Select PagingIterable all(); + @Select + Stream stream(); + @Select Optional findOptionalById(UUID productId); @@ -203,14 +237,23 @@ public interface ProductSaleDao { @Select PagingIterable all(); + @Select + Stream stream(); + // partition key provided @Select PagingIterable salesByIdForDay(UUID id, String day); + @Select + Stream salesByIdForDayStream(UUID id, String day); + // partition key and partial clustering key @Select PagingIterable salesByIdForCustomer(UUID id, String day, int customerId); + @Select + Stream salesByIdForCustomerStream(UUID id, String day, int customerId); + // full primary key @Select ProductSale salesByIdForCustomerAtTime(UUID id, String day, int customerId, UUID ts); diff --git a/manual/mapper/daos/getentity/README.md b/manual/mapper/daos/getentity/README.md index 87ab6f7f826..6eff3b1e88b 100644 --- a/manual/mapper/daos/getentity/README.md +++ b/manual/mapper/daos/getentity/README.md @@ -56,7 +56,20 @@ The method can return: @GetEntity PagingIterable asProducts(ResultSet resultSet); ``` - + +* a [Stream] of an entity class. In that case, the type of the parameter **must** be [ResultSet]. + Each row in the result set will be converted into an entity instance. + + Note: even if streams are lazily evaluated, results are fetched synchronously; therefore, as the + returned stream is traversed, blocking calls may occur, as more results are fetched from the + server in the background. For details about the stream's characteristics, see + [PagingIterable.spliterator]. + + ```java + @GetEntity + Stream asProducts(ResultSet resultSet); + ``` + * a [MappedAsyncPagingIterable] of an entity class. In that case, the type of the parameter **must** be [AsyncResultSet]. Each row in the result set will be converted into an entity instance. @@ -74,10 +87,12 @@ If the return type doesn't match the parameter type (for example [PagingIterable [GettableByName]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/data/GettableByName.html [MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html [PagingIterable]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/PagingIterable.html +[PagingIterable.spliterator]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/PagingIterable.html#spliterator-- [ResultSet]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/ResultSet.html [Row]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/Row.html [UdtValue]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/data/UdtValue.html +[Stream]: https://docs.oracle.com/javase/8/docs/api/java/util/stream/Stream.html diff --git a/manual/mapper/daos/query/README.md b/manual/mapper/daos/query/README.md index e019ce9c5e8..81ac435b9cd 100644 --- a/manual/mapper/daos/query/README.md +++ b/manual/mapper/daos/query/README.md @@ -59,6 +59,9 @@ The method can return: * a [PagingIterable]. The method will convert each row into an entity instance. +* a [Stream]. The method will convert each row into an entity instance. For details about the + stream's characteristics, see [PagingIterable.spliterator]. + * a [CompletionStage] or [CompletableFuture] of any of the above. The method will execute the query asynchronously. Note that for result sets and iterables, you need to switch to the asynchronous equivalent [AsyncResultSet] and [MappedAsyncPagingIterable] respectively. @@ -117,6 +120,7 @@ Then: [ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- [MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html [PagingIterable]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/PagingIterable.html +[PagingIterable.spliterator]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/PagingIterable.html#spliterator-- [Row]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/Row.html [BoundStatement]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/BoundStatement.html [ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html @@ -125,3 +129,4 @@ Then: [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html [Optional]: https://docs.oracle.com/javase/8/docs/api/java/util/Optional.html +[Stream]: https://docs.oracle.com/javase/8/docs/api/java/util/stream/Stream.html diff --git a/manual/mapper/daos/select/README.md b/manual/mapper/daos/select/README.md index d31c1ce9faa..1b00ae17438 100644 --- a/manual/mapper/daos/select/README.md +++ b/manual/mapper/daos/select/README.md @@ -108,6 +108,19 @@ In all cases, the method can return: PagingIterable findByDescription(String searchString); ``` +* a [Stream] of the entity class. It behaves like a result set, except that each element is a mapped + entity instead of a row. + + Note: even if streams are lazily evaluated, the query will be executed synchronously; also, as + the returned stream is traversed, more blocking calls may occur, as more results are fetched + from the server in the background. For details about the stream's characteristics, see + [PagingIterable.spliterator]. + + ```java + @Select(customWhereClause = "description LIKE :searchString") + Stream findByDescription(String searchString); + ``` + * a [CompletionStage] or [CompletableFuture] of any of the above. The method will execute the query asynchronously. Note that for iterables, you need to switch to the asynchronous equivalent [MappedAsyncPagingIterable]. @@ -154,8 +167,10 @@ entity class and the [naming strategy](../../entities/#naming-strategy)). [perPartitionLimit()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/Select.html#perPartitionLimit-- [MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html [PagingIterable]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/PagingIterable.html +[PagingIterable.spliterator]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/PagingIterable.html#spliterator-- [MappedReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/mapper/reactive/MappedReactiveResultSet.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html [Optional]: https://docs.oracle.com/javase/8/docs/api/java/util/Optional.html +[Stream]: https://docs.oracle.com/javase/8/docs/api/java/util/stream/Stream.html diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoGetEntityMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoGetEntityMethodGenerator.java index 03fd61008f5..1d4d52fe940 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoGetEntityMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoGetEntityMethodGenerator.java @@ -29,6 +29,8 @@ import com.squareup.javapoet.MethodSpec; import java.util.Map; import java.util.Optional; +import java.util.stream.Stream; +import java.util.stream.StreamSupport; import javax.lang.model.element.Element; import javax.lang.model.element.ExecutableElement; import javax.lang.model.element.Name; @@ -48,6 +50,8 @@ private enum Transformation { ONE, /** Iterable of rows to iterable of entity. */ MAP, + /** Iterable of rows to stream of entity. */ + STREAM, } public DaoGetEntityMethodGenerator( @@ -106,7 +110,7 @@ public Optional generate() { .getMessager() .error( methodElement, - "Invalid return type: %s methods must return %s if the argument is %s", + "Invalid return type: %s methods returning %s must have an argument of type %s", GetEntity.class.getSimpleName(), PagingIterable.class.getSimpleName(), ResultSet.class.getSimpleName()); @@ -114,13 +118,27 @@ public Optional generate() { } entityElement = EntityUtils.typeArgumentAsEntityElement(returnType, typeParameters); transformation = Transformation.MAP; + } else if (context.getClassUtils().isSame(element, Stream.class)) { + if (!parameterIsResultSet) { + context + .getMessager() + .error( + methodElement, + "Invalid return type: %s methods returning %s must have an argument of type %s", + GetEntity.class.getSimpleName(), + Stream.class.getSimpleName(), + ResultSet.class.getSimpleName()); + return Optional.empty(); + } + entityElement = EntityUtils.typeArgumentAsEntityElement(returnType, typeParameters); + transformation = Transformation.STREAM; } else if (context.getClassUtils().isSame(element, MappedAsyncPagingIterable.class)) { if (!parameterIsAsyncResultSet) { context .getMessager() .error( methodElement, - "Invalid return type: %s methods must return %s if the argument is %s", + "Invalid return type: %s methods returning %s must have an argument of type %s", GetEntity.class.getSimpleName(), MappedAsyncPagingIterable.class.getSimpleName(), AsyncResultSet.class.getSimpleName()); @@ -136,10 +154,11 @@ public Optional generate() { .error( methodElement, "Invalid return type: " - + "%s methods must return a %s-annotated class, or a %s or %s thereof", + + "%s methods must return a %s-annotated class, or a %s, a %s or %s thereof", GetEntity.class.getSimpleName(), Entity.class.getSimpleName(), PagingIterable.class.getSimpleName(), + Stream.class.getSimpleName(), MappedAsyncPagingIterable.class.getSimpleName()); return Optional.empty(); } @@ -162,6 +181,13 @@ public Optional generate() { overridingMethodBuilder.addStatement( "return $L.map($L::get)", parameterName, helperFieldName); break; + case STREAM: + overridingMethodBuilder.addStatement( + "return $T.stream($L.map($L::get).spliterator(), false)", + StreamSupport.class, + parameterName, + helperFieldName); + break; } return Optional.of(overridingMethodBuilder.build()); } diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGenerator.java index d29f8e68099..8c8878b133b 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGenerator.java @@ -23,6 +23,7 @@ import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.MAPPED_REACTIVE_RESULT_SET; import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.OPTIONAL_ENTITY; import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.PAGING_ITERABLE; +import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.STREAM; import com.datastax.oss.driver.api.core.cql.BoundStatement; import com.datastax.oss.driver.api.core.cql.BoundStatementBuilder; @@ -68,6 +69,7 @@ protected Set getSupportedReturnTypes() { FUTURE_OF_ENTITY, FUTURE_OF_OPTIONAL_ENTITY, PAGING_ITERABLE, + STREAM, FUTURE_OF_ASYNC_PAGING_ITERABLE, MAPPED_REACTIVE_RESULT_SET, CUSTOM); diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DefaultDaoReturnTypeKind.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DefaultDaoReturnTypeKind.java index e4501f06ef3..41c841cd2e7 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DefaultDaoReturnTypeKind.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DefaultDaoReturnTypeKind.java @@ -455,6 +455,26 @@ public CodeBlock wrapWithErrorHandling( throw new AssertionError("Should never get here"); } }, + + STREAM { + @Override + public void addExecuteStatement( + CodeBlock.Builder methodBuilder, + String helperFieldName, + ExecutableElement methodElement, + Map typeParameters) { + methodBuilder.addStatement( + "return executeAndMapToEntityStream(boundStatement, $L)", helperFieldName); + } + + @Override + public CodeBlock wrapWithErrorHandling( + CodeBlock innerBlock, + ExecutableElement methodElement, + Map typeParameters) { + return innerBlock; + } + }, ; @Override diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DefaultDaoReturnTypeParser.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DefaultDaoReturnTypeParser.java index 25f71c4bda6..5d7c18c63cf 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DefaultDaoReturnTypeParser.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DefaultDaoReturnTypeParser.java @@ -32,6 +32,7 @@ import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionStage; +import java.util.stream.Stream; import javax.lang.model.element.Element; import javax.lang.model.element.ElementKind; import javax.lang.model.element.Name; @@ -75,6 +76,7 @@ public class DefaultDaoReturnTypeParser implements DaoReturnTypeParser { .put(CompletionStage.class, DefaultDaoReturnTypeKind.FUTURE_OF_ENTITY) .put(CompletableFuture.class, DefaultDaoReturnTypeKind.FUTURE_OF_ENTITY) .put(PagingIterable.class, DefaultDaoReturnTypeKind.PAGING_ITERABLE) + .put(Stream.class, DefaultDaoReturnTypeKind.STREAM) .put(MappedReactiveResultSet.class, DefaultDaoReturnTypeKind.MAPPED_REACTIVE_RESULT_SET) .build(); diff --git a/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoGetEntityMethodGeneratorTest.java b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoGetEntityMethodGeneratorTest.java index 01a08149893..cf7591c1020 100644 --- a/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoGetEntityMethodGeneratorTest.java +++ b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoGetEntityMethodGeneratorTest.java @@ -27,6 +27,7 @@ import com.tngtech.java.junit.dataprovider.DataProvider; import com.tngtech.java.junit.dataprovider.DataProviderRunner; import com.tngtech.java.junit.dataprovider.UseDataProvider; +import java.util.stream.Stream; import javax.lang.model.element.Modifier; import org.junit.Test; import org.junit.runner.RunWith; @@ -69,7 +70,7 @@ public static Object[][] invalidSignatures() { .build(), }, { - "Invalid return type: GetEntity methods must return a Entity-annotated class, or a PagingIterable or MappedAsyncPagingIterable thereof", + "Invalid return type: GetEntity methods must return a Entity-annotated class, or a PagingIterable, a Stream or MappedAsyncPagingIterable thereof", MethodSpec.methodBuilder("get") .addAnnotation(GetEntity.class) .addModifiers(Modifier.PUBLIC, Modifier.ABSTRACT) @@ -77,7 +78,7 @@ public static Object[][] invalidSignatures() { .build(), }, { - "Invalid return type: GetEntity methods must return a Entity-annotated class, or a PagingIterable or MappedAsyncPagingIterable thereof", + "Invalid return type: GetEntity methods must return a Entity-annotated class, or a PagingIterable, a Stream or MappedAsyncPagingIterable thereof", MethodSpec.methodBuilder("get") .addAnnotation(GetEntity.class) .addModifiers(Modifier.PUBLIC, Modifier.ABSTRACT) @@ -86,7 +87,7 @@ public static Object[][] invalidSignatures() { .build(), }, { - "Invalid return type: GetEntity methods must return PagingIterable if the argument is ResultSet", + "Invalid return type: GetEntity methods returning PagingIterable must have an argument of type ResultSet", MethodSpec.methodBuilder("get") .addAnnotation(GetEntity.class) .addModifiers(Modifier.PUBLIC, Modifier.ABSTRACT) @@ -96,7 +97,16 @@ public static Object[][] invalidSignatures() { .build(), }, { - "Invalid return type: GetEntity methods must return MappedAsyncPagingIterable if the argument is AsyncResultSet", + "Invalid return type: GetEntity methods returning Stream must have an argument of type ResultSet", + MethodSpec.methodBuilder("get") + .addAnnotation(GetEntity.class) + .addModifiers(Modifier.PUBLIC, Modifier.ABSTRACT) + .addParameter(ParameterSpec.builder(Row.class, "source").build()) + .returns(ParameterizedTypeName.get(ClassName.get(Stream.class), ENTITY_CLASS_NAME)) + .build(), + }, + { + "Invalid return type: GetEntity methods returning MappedAsyncPagingIterable must have an argument of type AsyncResultSet", MethodSpec.methodBuilder("get") .addAnnotation(GetEntity.class) .addModifiers(Modifier.PUBLIC, Modifier.ABSTRACT) diff --git a/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoQueryMethodGeneratorTest.java b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoQueryMethodGeneratorTest.java index 882d8fd26e6..ea0f28badce 100644 --- a/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoQueryMethodGeneratorTest.java +++ b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoQueryMethodGeneratorTest.java @@ -45,7 +45,7 @@ public static Object[][] invalidSignatures() { + "ENTITY, OPTIONAL_ENTITY, RESULT_SET, BOUND_STATEMENT, PAGING_ITERABLE, FUTURE_OF_VOID, " + "FUTURE_OF_BOOLEAN, FUTURE_OF_LONG, FUTURE_OF_ROW, FUTURE_OF_ENTITY, " + "FUTURE_OF_OPTIONAL_ENTITY, FUTURE_OF_ASYNC_RESULT_SET, " - + "FUTURE_OF_ASYNC_PAGING_ITERABLE, REACTIVE_RESULT_SET, MAPPED_REACTIVE_RESULT_SET]", + + "FUTURE_OF_ASYNC_PAGING_ITERABLE, REACTIVE_RESULT_SET, MAPPED_REACTIVE_RESULT_SET, STREAM]", MethodSpec.methodBuilder("select") .addAnnotation( AnnotationSpec.builder(Query.class) diff --git a/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGeneratorTest.java b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGeneratorTest.java index 01e2f6aa9dd..c133d19e41a 100644 --- a/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGeneratorTest.java +++ b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGeneratorTest.java @@ -41,7 +41,7 @@ public static Object[][] invalidSignatures() { return new Object[][] { { "Invalid return type: Select methods must return one of [ENTITY, OPTIONAL_ENTITY, " - + "FUTURE_OF_ENTITY, FUTURE_OF_OPTIONAL_ENTITY, PAGING_ITERABLE, " + + "FUTURE_OF_ENTITY, FUTURE_OF_OPTIONAL_ENTITY, PAGING_ITERABLE, STREAM, " + "FUTURE_OF_ASYNC_PAGING_ITERABLE, MAPPED_REACTIVE_RESULT_SET]", MethodSpec.methodBuilder("select") .addAnnotation(Select.class) @@ -51,7 +51,7 @@ public static Object[][] invalidSignatures() { }, { "Invalid return type: Select methods must return one of [ENTITY, OPTIONAL_ENTITY, " - + "FUTURE_OF_ENTITY, FUTURE_OF_OPTIONAL_ENTITY, PAGING_ITERABLE, " + + "FUTURE_OF_ENTITY, FUTURE_OF_OPTIONAL_ENTITY, PAGING_ITERABLE, STREAM, " + "FUTURE_OF_ASYNC_PAGING_ITERABLE, MAPPED_REACTIVE_RESULT_SET]", MethodSpec.methodBuilder("select") .addAnnotation(Select.class) diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DaoBase.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DaoBase.java index 27539ce0482..4af39a59b84 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DaoBase.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DaoBase.java @@ -40,6 +40,8 @@ import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionStage; +import java.util.stream.Stream; +import java.util.stream.StreamSupport; /** Base class for generated implementations of {@link Dao}-annotated interfaces. */ public class DaoBase { @@ -240,6 +242,11 @@ protected PagingIterable executeAndMapToEntityIterable( return execute(statement).map(entityHelper::get); } + protected Stream executeAndMapToEntityStream( + Statement statement, EntityHelper entityHelper) { + return StreamSupport.stream(execute(statement).map(entityHelper::get).spliterator(), false); + } + protected CompletableFuture executeAsync(Statement statement) { CompletionStage stage = context.getSession().executeAsync(statement); // We allow DAO interfaces to return CompletableFuture instead of CompletionStage. This method From 02da73841c4aaefa8daa76fcd871d1203f322f69 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 19 Mar 2021 19:35:57 +0100 Subject: [PATCH 668/979] JAVA-2929: Revisit node-level metric eviction (#1541) --- changelog/README.md | 1 + .../core/metrics/AbstractMetricUpdater.java | 67 ++++- .../core/metrics/DropwizardMetricUpdater.java | 13 +- .../metrics/DropwizardMetricsFactory.java | 94 +++---- .../metrics/DropwizardNodeMetricUpdater.java | 47 +--- .../metrics/DropwizardMetricsFactoryTest.java | 73 +---- .../DropwizardNodeMetricUpdaterTest.java | 160 +++++++++++ .../core/metrics/DropwizardMetricsIT.java | 216 ++++++++++++++ .../oss/driver/core/metrics/FakeTicker.java | 40 --- .../oss/driver/core/metrics/MetricsIT.java | 133 --------- .../driver/core/metrics/MetricsITBase.java | 263 ++++++++++++++++++ .../core/metrics/MetricsSimulacronIT.java | 206 -------------- .../common/AbstractMetricsTestBase.java | 229 --------------- .../micrometer/MicrometerMetricsIT.java | 89 +++--- .../microprofile/MicroProfileMetricsIT.java | 115 +++----- .../micrometer/MicrometerMetricUpdater.java | 9 + .../micrometer/MicrometerMetricsFactory.java | 83 ++---- .../MicrometerNodeMetricUpdater.java | 40 +-- .../MicrometerMetricsFactoryTest.java | 91 +----- .../MicrometerNodeMetricUpdaterTest.java | 150 ++++++++++ metrics/microprofile/pom.xml | 5 + .../MicroProfileMetricUpdater.java | 12 + .../MicroProfileMetricsFactory.java | 85 +++--- .../MicroProfileNodeMetricUpdater.java | 43 +-- .../MicroProfileSessionMetricUpdater.java | 8 +- .../MicroProfileMetricsFactoryTest.java | 62 +---- .../MicroProfileNodeMetricsUpdaterTest.java | 152 ++++++++++ 27 files changed, 1248 insertions(+), 1238 deletions(-) create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/metrics/DropwizardNodeMetricUpdaterTest.java create mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/core/metrics/DropwizardMetricsIT.java delete mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/core/metrics/FakeTicker.java delete mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/core/metrics/MetricsIT.java create mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/core/metrics/MetricsITBase.java delete mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/core/metrics/MetricsSimulacronIT.java delete mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/metrics/common/AbstractMetricsTestBase.java create mode 100644 metrics/micrometer/src/test/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerNodeMetricUpdaterTest.java create mode 100644 metrics/microprofile/src/test/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileNodeMetricsUpdaterTest.java diff --git a/changelog/README.md b/changelog/README.md index bedafa80225..707ec720e58 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.11.0 (in progress) +- [improvement] JAVA-2929: Revisit node-level metric eviction - [new feature] JAVA-2830: Add mapper support for Java streams - [bug] JAVA-2928: Generate counter increment/decrement constructs compatible with legacy C* versions diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/AbstractMetricUpdater.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/AbstractMetricUpdater.java index 04fdc2c58bc..53f704bfb98 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/AbstractMetricUpdater.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/AbstractMetricUpdater.java @@ -15,6 +15,8 @@ */ package com.datastax.oss.driver.internal.core.metrics; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; import com.datastax.oss.driver.api.core.session.throttling.RequestThrottler; @@ -27,7 +29,11 @@ import com.datastax.oss.driver.internal.core.session.throttling.RateLimitingRequestThrottler; import com.datastax.oss.driver.shaded.guava.common.cache.Cache; import edu.umd.cs.findbugs.annotations.Nullable; +import io.netty.util.Timeout; +import java.time.Duration; import java.util.Set; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -35,12 +41,30 @@ public abstract class AbstractMetricUpdater implements MetricUpdater enabledMetrics; + private final AtomicReference metricsExpirationTimeoutRef = new AtomicReference<>(); + private final Duration expireAfter; + protected AbstractMetricUpdater(InternalDriverContext context, Set enabledMetrics) { this.context = context; this.enabledMetrics = enabledMetrics; + DriverExecutionProfile config = context.getConfig().getDefaultProfile(); + Duration expireAfter = config.getDuration(DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER); + if (expireAfter.compareTo(MIN_EXPIRE_AFTER) < 0) { + LOG.warn( + "[{}] Value too low for {}: {}. Forcing to {} instead.", + context.getSessionName(), + DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER.getPath(), + expireAfter, + MIN_EXPIRE_AFTER); + expireAfter = MIN_EXPIRE_AFTER; + } + this.expireAfter = expireAfter; } @Override @@ -48,6 +72,10 @@ public boolean isEnabled(MetricT metric, String profileName) { return enabledMetrics.contains(metric); } + public Duration getExpireAfter() { + return expireAfter; + } + protected int connectedNodes() { int count = 0; for (Node node : context.getMetadataManager().getMetadata().getNodes().values()) { @@ -60,7 +88,6 @@ protected int connectedNodes() { protected int throttlingQueueSize() { RequestThrottler requestThrottler = context.getRequestThrottler(); - String logPrefix = context.getSessionName(); if (requestThrottler instanceof ConcurrencyLimitingRequestThrottler) { return ((ConcurrencyLimitingRequestThrottler) requestThrottler).getQueueSize(); } @@ -69,7 +96,7 @@ protected int throttlingQueueSize() { } LOG.warn( "[{}] Metric {} does not support {}, it will always return 0", - logPrefix, + context.getSessionName(), DefaultSessionMetric.THROTTLING_QUEUE_SIZE.getPath(), requestThrottler.getClass().getName()); return 0; @@ -117,4 +144,40 @@ protected int orphanedStreamIds(Node node) { ChannelPool pool = context.getPoolManager().getPools().get(node); return (pool == null) ? 0 : pool.getOrphanedIds(); } + + protected void startMetricsExpirationTimeout() { + metricsExpirationTimeoutRef.accumulateAndGet( + newTimeout(), + (current, update) -> { + if (current == null) { + return update; + } else { + update.cancel(); + return current; + } + }); + } + + protected void cancelMetricsExpirationTimeout() { + Timeout t = metricsExpirationTimeoutRef.getAndSet(null); + if (t != null) { + t.cancel(); + } + } + + protected Timeout newTimeout() { + return context + .getNettyOptions() + .getTimer() + .newTimeout( + t -> { + if (t.isExpired()) { + clearMetrics(); + } + }, + expireAfter.toNanos(), + TimeUnit.NANOSECONDS); + } + + protected abstract void clearMetrics(); } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricUpdater.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricUpdater.java index 7605d770069..275cfa1185d 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricUpdater.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricUpdater.java @@ -54,7 +54,8 @@ protected DropwizardMetricUpdater( } @SuppressWarnings({"unchecked", "TypeParameterUnusedInFormals"}) - public T getMetric(MetricT metric, String profileName) { + public T getMetric( + MetricT metric, @SuppressWarnings("unused") String profileName) { return (T) metrics.get(metric); } @@ -87,6 +88,16 @@ public void updateTimer( } } + @Override + protected void clearMetrics() { + for (MetricT metric : metrics.keySet()) { + MetricId id = getMetricId(metric); + registry.remove(id.getName()); + } + metrics.clear(); + reservoirs.clear(); + } + protected abstract MetricId getMetricId(MetricT metric); protected void initializeGauge( diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricsFactory.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricsFactory.java index 8cfac64fbe4..96ccceae270 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricsFactory.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricsFactory.java @@ -20,18 +20,15 @@ import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.context.DriverContext; import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.metadata.NodeState; import com.datastax.oss.driver.api.core.metrics.Metrics; import com.datastax.oss.driver.api.core.metrics.NodeMetric; import com.datastax.oss.driver.api.core.metrics.SessionMetric; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; -import com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting; -import com.datastax.oss.driver.shaded.guava.common.base.Ticker; -import com.datastax.oss.driver.shaded.guava.common.cache.Cache; -import com.datastax.oss.driver.shaded.guava.common.cache.CacheBuilder; -import com.datastax.oss.driver.shaded.guava.common.cache.RemovalNotification; +import com.datastax.oss.driver.internal.core.metadata.NodeStateEvent; +import com.datastax.oss.driver.internal.core.util.concurrent.RunOrSchedule; import edu.umd.cs.findbugs.annotations.Nullable; -import java.time.Duration; -import java.util.List; +import io.netty.util.concurrent.EventExecutor; import java.util.Optional; import java.util.Set; import net.jcip.annotations.ThreadSafe; @@ -42,44 +39,23 @@ public class DropwizardMetricsFactory implements MetricsFactory { private static final Logger LOG = LoggerFactory.getLogger(DropwizardMetricsFactory.class); - static final Duration LOWEST_ACCEPTABLE_EXPIRE_AFTER = Duration.ofMinutes(5); private final InternalDriverContext context; private final Set enabledNodeMetrics; private final MetricRegistry registry; @Nullable private final Metrics metrics; private final SessionMetricUpdater sessionUpdater; - private final Cache metricsCache; public DropwizardMetricsFactory(DriverContext context) { - this((InternalDriverContext) context, Ticker.systemTicker()); - } - - public DropwizardMetricsFactory(InternalDriverContext context, Ticker ticker) { - this.context = context; + this.context = (InternalDriverContext) context; String logPrefix = context.getSessionName(); DriverExecutionProfile config = context.getConfig().getDefaultProfile(); Set enabledSessionMetrics = - parseSessionMetricPaths(config.getStringList(DefaultDriverOption.METRICS_SESSION_ENABLED)); - Duration evictionTime = getAndValidateEvictionTime(config, logPrefix); + MetricPaths.parseSessionMetricPaths( + config.getStringList(DefaultDriverOption.METRICS_SESSION_ENABLED), logPrefix); this.enabledNodeMetrics = - parseNodeMetricPaths(config.getStringList(DefaultDriverOption.METRICS_NODE_ENABLED)); - - metricsCache = - CacheBuilder.newBuilder() - .ticker(ticker) - .expireAfterAccess(evictionTime) - .removalListener( - (RemovalNotification notification) -> { - LOG.debug( - "[{}] Removing metrics for node: {} from cache after {}", - logPrefix, - notification.getKey(), - evictionTime); - notification.getValue().cleanupNodeMetrics(); - }) - .build(); - + MetricPaths.parseNodeMetricPaths( + config.getStringList(DefaultDriverOption.METRICS_NODE_ENABLED), logPrefix); if (enabledSessionMetrics.isEmpty() && enabledNodeMetrics.isEmpty()) { LOG.debug("[{}] All metrics are disabled, Session.getMetrics will be empty", logPrefix); this.registry = null; @@ -87,7 +63,7 @@ public DropwizardMetricsFactory(InternalDriverContext context, Ticker ticker) { this.metrics = null; } else { // try to get the metric registry from the context - Object possibleMetricRegistry = context.getMetricRegistry(); + Object possibleMetricRegistry = this.context.getMetricRegistry(); if (possibleMetricRegistry == null) { // metrics are enabled, but a metric registry was not supplied to the context // create a registry object @@ -96,7 +72,7 @@ public DropwizardMetricsFactory(InternalDriverContext context, Ticker ticker) { if (possibleMetricRegistry instanceof MetricRegistry) { this.registry = (MetricRegistry) possibleMetricRegistry; DropwizardSessionMetricUpdater dropwizardSessionUpdater = - new DropwizardSessionMetricUpdater(context, enabledSessionMetrics, registry); + new DropwizardSessionMetricUpdater(this.context, enabledSessionMetrics, registry); this.sessionUpdater = dropwizardSessionUpdater; this.metrics = new DefaultMetrics(registry, dropwizardSessionUpdater); } else { @@ -108,25 +84,18 @@ public DropwizardMetricsFactory(InternalDriverContext context, Ticker ticker) { + possibleMetricRegistry.getClass().getName() + "'"); } + if (!enabledNodeMetrics.isEmpty()) { + EventExecutor adminEventExecutor = + this.context.getNettyOptions().adminEventExecutorGroup().next(); + this.context + .getEventBus() + .register( + NodeStateEvent.class, + RunOrSchedule.on(adminEventExecutor, this::processNodeStateEvent)); + } } } - @VisibleForTesting - static Duration getAndValidateEvictionTime(DriverExecutionProfile config, String logPrefix) { - Duration evictionTime = config.getDuration(DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER); - - if (evictionTime.compareTo(LOWEST_ACCEPTABLE_EXPIRE_AFTER) < 0) { - LOG.warn( - "[{}] Value too low for {}: {}. Forcing to {} instead.", - logPrefix, - DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER.getPath(), - evictionTime, - LOWEST_ACCEPTABLE_EXPIRE_AFTER); - } - - return evictionTime; - } - @Override public Optional getMetrics() { return Optional.ofNullable(metrics); @@ -142,19 +111,20 @@ public NodeMetricUpdater newNodeUpdater(Node node) { if (registry == null) { return NoopNodeMetricUpdater.INSTANCE; } else { - DropwizardNodeMetricUpdater dropwizardNodeMetricUpdater = - new DropwizardNodeMetricUpdater( - node, context, enabledNodeMetrics, registry, () -> metricsCache.getIfPresent(node)); - metricsCache.put(node, dropwizardNodeMetricUpdater); - return dropwizardNodeMetricUpdater; + return new DropwizardNodeMetricUpdater(node, context, enabledNodeMetrics, registry); } } - protected Set parseSessionMetricPaths(List paths) { - return MetricPaths.parseSessionMetricPaths(paths, context.getSessionName()); - } - - protected Set parseNodeMetricPaths(List paths) { - return MetricPaths.parseNodeMetricPaths(paths, context.getSessionName()); + protected void processNodeStateEvent(NodeStateEvent event) { + if (event.newState == NodeState.DOWN + || event.newState == NodeState.FORCED_DOWN + || event.newState == null) { + // node is DOWN or REMOVED + ((DropwizardNodeMetricUpdater) event.node.getMetricUpdater()).startMetricsExpirationTimeout(); + } else if (event.newState == NodeState.UP || event.newState == NodeState.UNKNOWN) { + // node is UP or ADDED + ((DropwizardNodeMetricUpdater) event.node.getMetricUpdater()) + .cancelMetricsExpirationTimeout(); + } } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardNodeMetricUpdater.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardNodeMetricUpdater.java index ca50f57d1c1..e5de076b548 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardNodeMetricUpdater.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardNodeMetricUpdater.java @@ -15,7 +15,6 @@ */ package com.datastax.oss.driver.internal.core.metrics; -import com.codahale.metrics.Metric; import com.codahale.metrics.MetricRegistry; import com.datastax.dse.driver.api.core.config.DseDriverOption; import com.datastax.dse.driver.api.core.metrics.DseNodeMetric; @@ -26,7 +25,6 @@ import com.datastax.oss.driver.api.core.metrics.NodeMetric; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import java.util.Set; -import java.util.concurrent.TimeUnit; import net.jcip.annotations.ThreadSafe; @ThreadSafe @@ -34,17 +32,14 @@ public class DropwizardNodeMetricUpdater extends DropwizardMetricUpdater enabledMetrics, - MetricRegistry registry, - Runnable signalMetricUpdated) { + MetricRegistry registry) { super(context, enabledMetrics, registry); this.node = node; - this.signalMetricUpdated = signalMetricUpdated; DriverExecutionProfile profile = context.getConfig().getDefaultProfile(); @@ -89,46 +84,6 @@ public DropwizardNodeMetricUpdater( DseDriverOption.METRICS_NODE_GRAPH_MESSAGES_INTERVAL); } - @Override - public void incrementCounter(NodeMetric metric, String profileName, long amount) { - signalMetricUpdated.run(); - super.incrementCounter(metric, profileName, amount); - } - - @Override - public void updateHistogram(NodeMetric metric, String profileName, long value) { - signalMetricUpdated.run(); - super.updateHistogram(metric, profileName, value); - } - - @Override - public void markMeter(NodeMetric metric, String profileName, long amount) { - signalMetricUpdated.run(); - super.markMeter(metric, profileName, amount); - } - - @Override - public void updateTimer(NodeMetric metric, String profileName, long duration, TimeUnit unit) { - signalMetricUpdated.run(); - super.updateTimer(metric, profileName, duration, unit); - } - - @Override - @SuppressWarnings("TypeParameterUnusedInFormals") - public T getMetric(NodeMetric metric, String profileName) { - signalMetricUpdated.run(); - return super.getMetric(metric, profileName); - } - - public void cleanupNodeMetrics() { - for (NodeMetric metric : metrics.keySet()) { - MetricId id = getMetricId(metric); - registry.remove(id.getName()); - } - metrics.clear(); - reservoirs.clear(); - } - @Override protected MetricId getMetricId(NodeMetric metric) { MetricId id = context.getMetricIdGenerator().nodeMetricId(node, metric); diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricsFactoryTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricsFactoryTest.java index 51886d712a6..7434bb5b74b 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricsFactoryTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricsFactoryTest.java @@ -15,87 +15,24 @@ */ package com.datastax.oss.driver.internal.core.metrics; -import static com.datastax.oss.driver.internal.core.metrics.DropwizardMetricsFactory.LOWEST_ACCEPTABLE_EXPIRE_AFTER; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.fail; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.timeout; -import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; -import ch.qos.logback.classic.Level; import com.codahale.metrics.MetricRegistry; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverConfig; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; -import com.datastax.oss.driver.internal.core.util.LoggerTest; -import com.tngtech.java.junit.dataprovider.DataProvider; -import com.tngtech.java.junit.dataprovider.DataProviderRunner; -import com.tngtech.java.junit.dataprovider.UseDataProvider; import java.time.Duration; import java.util.Collections; import java.util.List; import org.junit.Test; -import org.junit.runner.RunWith; -@RunWith(DataProviderRunner.class) public class DropwizardMetricsFactoryTest { - private static final String LOG_PREFIX = "prefix"; - - @Test - public void should_log_warning_when_provided_eviction_time_setting_is_too_low() { - // given - Duration expireAfter = LOWEST_ACCEPTABLE_EXPIRE_AFTER.minusMinutes(1); - LoggerTest.LoggerSetup logger = - LoggerTest.setupTestLogger(DropwizardMetricsFactory.class, Level.WARN); - DriverExecutionProfile driverExecutionProfile = mock(DriverExecutionProfile.class); - - // when - when(driverExecutionProfile.getDuration(DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER)) - .thenReturn(expireAfter); - DropwizardMetricsFactory.getAndValidateEvictionTime(driverExecutionProfile, LOG_PREFIX); - - // then - verify(logger.appender, timeout(500).times(1)).doAppend(logger.loggingEventCaptor.capture()); - assertThat(logger.loggingEventCaptor.getValue().getMessage()).isNotNull(); - assertThat(logger.loggingEventCaptor.getValue().getFormattedMessage()) - .contains( - String.format( - "[%s] Value too low for %s: %s. Forcing to %s instead.", - LOG_PREFIX, - DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER.getPath(), - expireAfter, - LOWEST_ACCEPTABLE_EXPIRE_AFTER)); - } - - @Test - @UseDataProvider(value = "acceptableEvictionTimes") - public void should_not_log_warning_when_provided_eviction_time_setting_is_acceptable( - Duration expireAfter) { - // given - LoggerTest.LoggerSetup logger = - LoggerTest.setupTestLogger(DropwizardMetricsFactory.class, Level.WARN); - DriverExecutionProfile driverExecutionProfile = mock(DriverExecutionProfile.class); - - // when - when(driverExecutionProfile.getDuration(DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER)) - .thenReturn(expireAfter); - DropwizardMetricsFactory.getAndValidateEvictionTime(driverExecutionProfile, LOG_PREFIX); - - // then - verify(logger.appender, timeout(500).times(0)).doAppend(logger.loggingEventCaptor.capture()); - } - - @DataProvider - public static Object[][] acceptableEvictionTimes() { - return new Object[][] { - {LOWEST_ACCEPTABLE_EXPIRE_AFTER}, {LOWEST_ACCEPTABLE_EXPIRE_AFTER.plusMinutes(1)} - }; - } - @Test public void should_throw_if_registry_of_wrong_type() { // given @@ -111,7 +48,7 @@ public void should_throw_if_registry_of_wrong_type() { // registry object is not a registry type when(context.getMetricRegistry()).thenReturn(Integer.MAX_VALUE); when(profile.getDuration(DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER)) - .thenReturn(LOWEST_ACCEPTABLE_EXPIRE_AFTER); + .thenReturn(Duration.ofHours(1)); when(profile.getStringList(DefaultDriverOption.METRICS_SESSION_ENABLED)) .thenReturn(enabledMetrics); // then @@ -123,11 +60,9 @@ public void should_throw_if_registry_of_wrong_type() { } catch (IllegalArgumentException iae) { assertThat(iae.getMessage()) .isEqualTo( - "Unexpected Metrics registry object. Expected registry object to be of type '" - + MetricRegistry.class.getName() - + "', but was '" - + Integer.class.getName() - + "'"); + "Unexpected Metrics registry object. " + + "Expected registry object to be of type '%s', but was '%s'", + MetricRegistry.class.getName(), Integer.class.getName()); } } } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metrics/DropwizardNodeMetricUpdaterTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metrics/DropwizardNodeMetricUpdaterTest.java new file mode 100644 index 00000000000..38ce2b45b19 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metrics/DropwizardNodeMetricUpdaterTest.java @@ -0,0 +1,160 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.metrics; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.timeout; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import ch.qos.logback.classic.Level; +import com.codahale.metrics.MetricRegistry; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverConfig; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.config.DriverOption; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.metrics.DefaultNodeMetric; +import com.datastax.oss.driver.api.core.metrics.NodeMetric; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.util.LoggerTest; +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import java.time.Duration; +import java.util.Collections; +import java.util.Set; +import java.util.function.Supplier; +import org.junit.Test; +import org.junit.runner.RunWith; + +@RunWith(DataProviderRunner.class) +public class DropwizardNodeMetricUpdaterTest { + + @Test + public void should_log_warning_when_provided_eviction_time_setting_is_too_low() { + // given + LoggerTest.LoggerSetup logger = + LoggerTest.setupTestLogger(AbstractMetricUpdater.class, Level.WARN); + Node node = mock(Node.class); + InternalDriverContext context = mock(InternalDriverContext.class); + DriverExecutionProfile profile = mock(DriverExecutionProfile.class); + DriverConfig config = mock(DriverConfig.class); + Set enabledMetrics = Collections.singleton(DefaultNodeMetric.CQL_MESSAGES); + Duration expireAfter = AbstractMetricUpdater.MIN_EXPIRE_AFTER.minusMinutes(1); + + // when + when(context.getSessionName()).thenReturn("prefix"); + when(context.getConfig()).thenReturn(config); + when(config.getDefaultProfile()).thenReturn(profile); + when(profile.getDuration(DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER)) + .thenReturn(expireAfter); + + DropwizardNodeMetricUpdater updater = + new DropwizardNodeMetricUpdater(node, context, enabledMetrics, new MetricRegistry()) { + @Override + protected void initializeGauge( + NodeMetric metric, DriverExecutionProfile profile, Supplier supplier) { + // do nothing + } + + @Override + protected void initializeCounter(NodeMetric metric, DriverExecutionProfile profile) { + // do nothing + } + + @Override + protected void initializeHdrTimer( + NodeMetric metric, + DriverExecutionProfile profile, + DriverOption highestLatency, + DriverOption significantDigits, + DriverOption interval) { + // do nothing + } + }; + + // then + assertThat(updater.getExpireAfter()).isEqualTo(AbstractMetricUpdater.MIN_EXPIRE_AFTER); + verify(logger.appender, timeout(500).times(1)).doAppend(logger.loggingEventCaptor.capture()); + assertThat(logger.loggingEventCaptor.getValue().getMessage()).isNotNull(); + assertThat(logger.loggingEventCaptor.getValue().getFormattedMessage()) + .contains( + String.format( + "[prefix] Value too low for %s: %s. Forcing to %s instead.", + DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER.getPath(), + expireAfter, + AbstractMetricUpdater.MIN_EXPIRE_AFTER)); + } + + @Test + @UseDataProvider(value = "acceptableEvictionTimes") + public void should_not_log_warning_when_provided_eviction_time_setting_is_acceptable( + Duration expireAfter) { + // given + LoggerTest.LoggerSetup logger = + LoggerTest.setupTestLogger(AbstractMetricUpdater.class, Level.WARN); + Node node = mock(Node.class); + InternalDriverContext context = mock(InternalDriverContext.class); + DriverExecutionProfile profile = mock(DriverExecutionProfile.class); + DriverConfig config = mock(DriverConfig.class); + Set enabledMetrics = Collections.singleton(DefaultNodeMetric.CQL_MESSAGES); + + // when + when(context.getSessionName()).thenReturn("prefix"); + when(context.getConfig()).thenReturn(config); + when(config.getDefaultProfile()).thenReturn(profile); + when(profile.getDuration(DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER)) + .thenReturn(expireAfter); + + DropwizardNodeMetricUpdater updater = + new DropwizardNodeMetricUpdater(node, context, enabledMetrics, new MetricRegistry()) { + @Override + protected void initializeGauge( + NodeMetric metric, DriverExecutionProfile profile, Supplier supplier) { + // do nothing + } + + @Override + protected void initializeCounter(NodeMetric metric, DriverExecutionProfile profile) { + // do nothing + } + + @Override + protected void initializeHdrTimer( + NodeMetric metric, + DriverExecutionProfile profile, + DriverOption highestLatency, + DriverOption significantDigits, + DriverOption interval) { + // do nothing + } + }; + + // then + assertThat(updater.getExpireAfter()).isEqualTo(expireAfter); + verify(logger.appender, timeout(500).times(0)).doAppend(logger.loggingEventCaptor.capture()); + } + + @DataProvider + public static Object[][] acceptableEvictionTimes() { + return new Object[][] { + {AbstractMetricUpdater.MIN_EXPIRE_AFTER}, + {AbstractMetricUpdater.MIN_EXPIRE_AFTER.plusMinutes(1)} + }; + } +} diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metrics/DropwizardMetricsIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metrics/DropwizardMetricsIT.java new file mode 100644 index 00000000000..57bda625ce2 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metrics/DropwizardMetricsIT.java @@ -0,0 +1,216 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.core.metrics; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.codahale.metrics.Counter; +import com.codahale.metrics.Gauge; +import com.codahale.metrics.Meter; +import com.codahale.metrics.Metric; +import com.codahale.metrics.MetricRegistry; +import com.codahale.metrics.Timer; +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.metrics.DefaultNodeMetric; +import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; +import com.datastax.oss.driver.api.core.metrics.Metrics; +import com.datastax.oss.driver.api.testinfra.simulacron.SimulacronRule; +import com.datastax.oss.driver.categories.ParallelizableTests; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.metrics.MetricId; +import com.datastax.oss.driver.internal.core.metrics.MetricIdGenerator; +import com.datastax.oss.simulacron.common.cluster.ClusterSpec; +import java.util.ArrayList; +import java.util.List; +import org.junit.ClassRule; +import org.junit.experimental.categories.Category; + +@Category(ParallelizableTests.class) +public class DropwizardMetricsIT extends MetricsITBase { + + @ClassRule + public static final SimulacronRule SIMULACRON_RULE = + new SimulacronRule(ClusterSpec.builder().withNodes(3)); + + @Override + protected SimulacronRule simulacron() { + return SIMULACRON_RULE; + } + + @Override + protected MetricRegistry newMetricRegistry() { + return new MetricRegistry(); + } + + @Override + protected String getMetricsFactoryClass() { + return "DropwizardMetricsFactory"; + } + + @Override + protected void assertMetricsPresent(CqlSession session) { + + MetricRegistry registry = + (MetricRegistry) ((InternalDriverContext) session.getContext()).getMetricRegistry(); + assertThat(registry).isNotNull(); + + assertThat(registry.getMetrics()) + .hasSize(ENABLED_SESSION_METRICS.size() + ENABLED_NODE_METRICS.size() * 3); + + MetricIdGenerator metricIdGenerator = + ((InternalDriverContext) session.getContext()).getMetricIdGenerator(); + + assertThat(session.getMetrics()).isPresent(); + Metrics metrics = session.getMetrics().get(); + + for (DefaultSessionMetric metric : ENABLED_SESSION_METRICS) { + + MetricId id = metricIdGenerator.sessionMetricId(metric); + Metric m = registry.getMetrics().get(id.getName()); + assertThat(m).isNotNull(); + + // assert that the same metric is retrievable through the registry and through the driver API + assertThat(metrics.getSessionMetric(metric)) + .isPresent() + .hasValueSatisfying(v -> assertThat(v).isSameAs(m)); + + switch (metric) { + case CONNECTED_NODES: + assertThat(m).isInstanceOf(Gauge.class); + assertThat((Integer) ((Gauge) m).getValue()).isEqualTo(3); + break; + case CQL_REQUESTS: + assertThat(m).isInstanceOf(Timer.class); + assertThat(((Timer) m).getCount()).isEqualTo(30); + break; + case CQL_PREPARED_CACHE_SIZE: + assertThat(m).isInstanceOf(Gauge.class); + assertThat((Long) ((Gauge) m).getValue()).isOne(); + break; + case BYTES_SENT: + case BYTES_RECEIVED: + assertThat(m).isInstanceOf(Meter.class); + assertThat(((Meter) m).getCount()).isGreaterThan(0); + break; + case CQL_CLIENT_TIMEOUTS: + case THROTTLING_ERRORS: + assertThat(m).isInstanceOf(Counter.class); + assertThat(((Counter) m).getCount()).isZero(); + break; + case THROTTLING_DELAY: + assertThat(m).isInstanceOf(Timer.class); + assertThat(((Timer) m).getCount()).isZero(); + break; + case THROTTLING_QUEUE_SIZE: + assertThat(m).isInstanceOf(Gauge.class); + assertThat((Integer) ((Gauge) m).getValue()).isZero(); + break; + } + } + + for (Node node : session.getMetadata().getNodes().values()) { + + for (DefaultNodeMetric metric : ENABLED_NODE_METRICS) { + + MetricId id = metricIdGenerator.nodeMetricId(node, metric); + Metric m = registry.getMetrics().get(id.getName()); + assertThat(m).isNotNull(); + + // assert that the same metric is retrievable through the registry and through the driver + // API + assertThat(metrics.getNodeMetric(node, metric)) + .isPresent() + .hasValueSatisfying(v -> assertThat(v).isSameAs(m)); + + switch (metric) { + case OPEN_CONNECTIONS: + assertThat(m).isInstanceOf(Gauge.class); + // control node has 2 connections + assertThat((Integer) ((Gauge) m).getValue()).isBetween(1, 2); + break; + case CQL_MESSAGES: + assertThat(m).isInstanceOf(Timer.class); + assertThat(((Timer) m).getCount()).isEqualTo(10); + break; + case READ_TIMEOUTS: + case WRITE_TIMEOUTS: + case UNAVAILABLES: + case OTHER_ERRORS: + case ABORTED_REQUESTS: + case UNSENT_REQUESTS: + case RETRIES: + case IGNORES: + case RETRIES_ON_READ_TIMEOUT: + case RETRIES_ON_WRITE_TIMEOUT: + case RETRIES_ON_UNAVAILABLE: + case RETRIES_ON_OTHER_ERROR: + case RETRIES_ON_ABORTED: + case IGNORES_ON_READ_TIMEOUT: + case IGNORES_ON_WRITE_TIMEOUT: + case IGNORES_ON_UNAVAILABLE: + case IGNORES_ON_OTHER_ERROR: + case IGNORES_ON_ABORTED: + case SPECULATIVE_EXECUTIONS: + case CONNECTION_INIT_ERRORS: + case AUTHENTICATION_ERRORS: + assertThat(m).isInstanceOf(Counter.class); + assertThat(((Counter) m).getCount()).isZero(); + break; + case BYTES_SENT: + case BYTES_RECEIVED: + assertThat(m).isInstanceOf(Meter.class); + assertThat(((Meter) m).getCount()).isGreaterThan(0L); + break; + case AVAILABLE_STREAMS: + case IN_FLIGHT: + case ORPHANED_STREAMS: + assertThat(m).isInstanceOf(Gauge.class); + break; + } + } + } + } + + @Override + protected void assertNodeMetricsNotEvicted(CqlSession session, Node node) { + InternalDriverContext context = (InternalDriverContext) session.getContext(); + MetricRegistry registry = (MetricRegistry) context.getMetricRegistry(); + assertThat(registry).isNotNull(); + for (String id : nodeMetricIds(context, node)) { + assertThat(registry.getMetrics()).containsKey(id); + } + } + + @Override + protected void assertNodeMetricsEvicted(CqlSession session, Node node) { + InternalDriverContext context = (InternalDriverContext) session.getContext(); + MetricRegistry registry = (MetricRegistry) context.getMetricRegistry(); + assertThat(registry).isNotNull(); + for (String id : nodeMetricIds(context, node)) { + assertThat(registry.getMetrics()).doesNotContainKey(id); + } + } + + private List nodeMetricIds(InternalDriverContext context, Node node) { + List ids = new ArrayList<>(); + for (DefaultNodeMetric metric : ENABLED_NODE_METRICS) { + MetricId id = context.getMetricIdGenerator().nodeMetricId(node, metric); + ids.add(id.getName()); + } + return ids; + } +} diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metrics/FakeTicker.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metrics/FakeTicker.java deleted file mode 100644 index 0ad0e3b31d5..00000000000 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/metrics/FakeTicker.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.datastax.oss.driver.core.metrics; - -import com.datastax.oss.driver.shaded.guava.common.base.Ticker; -import java.time.Duration; -import java.util.concurrent.atomic.AtomicLong; - -/** A Ticker whose value can be advanced programmatically in test. */ -public class FakeTicker extends Ticker { - - private final AtomicLong nanos = new AtomicLong(); - - public FakeTicker advance(long nanoseconds) { - nanos.addAndGet(nanoseconds); - return this; - } - - public FakeTicker advance(Duration duration) { - return advance(duration.toNanos()); - } - - @Override - public long read() { - return nanos.get(); - } -} diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metrics/MetricsIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metrics/MetricsIT.java deleted file mode 100644 index 299b8ea7de2..00000000000 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/metrics/MetricsIT.java +++ /dev/null @@ -1,133 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.datastax.oss.driver.core.metrics; - -import static org.assertj.core.api.Assertions.assertThat; -import static org.awaitility.Awaitility.await; - -import com.codahale.metrics.Meter; -import com.codahale.metrics.Timer; -import com.datastax.oss.driver.api.core.CqlSession; -import com.datastax.oss.driver.api.core.config.DefaultDriverOption; -import com.datastax.oss.driver.api.core.config.DriverConfigLoader; -import com.datastax.oss.driver.api.core.metadata.Node; -import com.datastax.oss.driver.api.core.metrics.DefaultNodeMetric; -import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; -import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; -import com.datastax.oss.driver.api.testinfra.session.SessionUtils; -import com.datastax.oss.driver.categories.ParallelizableTests; -import com.google.common.collect.Lists; -import java.util.Collections; -import java.util.concurrent.TimeUnit; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; - -@Category(ParallelizableTests.class) -public class MetricsIT { - - @ClassRule public static final CcmRule CCM_RULE = CcmRule.getInstance(); - - @Test - public void should_expose_metrics() { - DriverConfigLoader loader = - SessionUtils.configLoaderBuilder() - .withStringList( - DefaultDriverOption.METRICS_SESSION_ENABLED, - Collections.singletonList("cql-requests")) - .build(); - try (CqlSession session = SessionUtils.newSession(CCM_RULE, loader)) { - for (int i = 0; i < 10; i++) { - session.execute("SELECT release_version FROM system.local"); - } - - // Should have 10 requests, check within 5 seconds as metric increments after - // caller is notified. - await() - .pollInterval(500, TimeUnit.MILLISECONDS) - .atMost(5, TimeUnit.SECONDS) - .untilAsserted( - () -> - assertThat(session.getMetrics()) - .hasValueSatisfying( - metrics -> - assertThat( - metrics.getSessionMetric( - DefaultSessionMetric.CQL_REQUESTS)) - .hasValueSatisfying( - cqlRequests -> { - // No need to be very sophisticated, metrics are already - // covered individually in unit tests. - assertThat(cqlRequests.getCount()).isEqualTo(10); - }))); - } - } - - @Test - public void should_expose_bytes_sent_and_received() { - DriverConfigLoader loader = - SessionUtils.configLoaderBuilder() - .withStringList( - DefaultDriverOption.METRICS_SESSION_ENABLED, - Lists.newArrayList("bytes-sent", "bytes-received")) - .withStringList( - DefaultDriverOption.METRICS_NODE_ENABLED, - Lists.newArrayList("bytes-sent", "bytes-received")) - .build(); - try (CqlSession session = SessionUtils.newSession(CCM_RULE, loader)) { - for (int i = 0; i < 10; i++) { - session.execute("SELECT release_version FROM system.local"); - } - - assertThat(session.getMetrics()) - .hasValueSatisfying( - metrics -> { - assertThat(metrics.getSessionMetric(DefaultSessionMetric.BYTES_SENT)) - .hasValueSatisfying( - // Can't be precise here as payload can be dependent on protocol version. - bytesSent -> assertThat(bytesSent.getCount()).isGreaterThan(0)); - assertThat(metrics.getSessionMetric(DefaultSessionMetric.BYTES_RECEIVED)) - .hasValueSatisfying( - bytesReceived -> assertThat(bytesReceived.getCount()).isGreaterThan(0)); - - // get only node in cluster and evaluate its metrics. - Node node = session.getMetadata().getNodes().values().iterator().next(); - assertThat(metrics.getNodeMetric(node, DefaultNodeMetric.BYTES_SENT)) - .hasValueSatisfying( - bytesSent -> assertThat(bytesSent.getCount()).isGreaterThan(0)); - assertThat(metrics.getNodeMetric(node, DefaultNodeMetric.BYTES_RECEIVED)) - .hasValueSatisfying( - bytesReceived -> assertThat(bytesReceived.getCount()).isGreaterThan(0)); - }); - } - } - - @Test - public void should_not_expose_metrics_if_disabled() { - DriverConfigLoader loader = - SessionUtils.configLoaderBuilder() - .withStringList(DefaultDriverOption.METRICS_SESSION_ENABLED, Collections.emptyList()) - .withStringList(DefaultDriverOption.METRICS_NODE_ENABLED, Collections.emptyList()) - .build(); - try (CqlSession session = SessionUtils.newSession(CCM_RULE, loader)) { - for (int i = 0; i < 10; i++) { - session.execute("SELECT release_version FROM system.local"); - } - - assertThat(session.getMetrics()).isEmpty(); - } - } -} diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metrics/MetricsITBase.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metrics/MetricsITBase.java new file mode 100644 index 00000000000..b8ee7ed1b03 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metrics/MetricsITBase.java @@ -0,0 +1,263 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.core.metrics; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.awaitility.Awaitility.await; + +import com.codahale.metrics.MetricRegistry; +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverConfigLoader; +import com.datastax.oss.driver.api.core.config.ProgrammaticDriverConfigLoaderBuilder; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.metadata.NodeState; +import com.datastax.oss.driver.api.core.metrics.DefaultNodeMetric; +import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; +import com.datastax.oss.driver.api.testinfra.session.SessionUtils; +import com.datastax.oss.driver.api.testinfra.simulacron.SimulacronRule; +import com.datastax.oss.driver.internal.core.context.EventBus; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.metadata.DefaultNode; +import com.datastax.oss.driver.internal.core.metadata.NodeStateEvent; +import com.datastax.oss.driver.internal.core.metrics.AbstractMetricUpdater; +import com.datastax.oss.driver.internal.core.metrics.DefaultMetricIdGenerator; +import com.datastax.oss.driver.internal.core.metrics.TaggingMetricIdGenerator; +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import java.net.InetSocketAddress; +import java.time.Duration; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.stream.Collectors; +import org.junit.Assume; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; + +@RunWith(DataProviderRunner.class) +public abstract class MetricsITBase { + + protected static final List ENABLED_SESSION_METRICS = + Arrays.asList(DefaultSessionMetric.values()); + + protected static final List ENABLED_NODE_METRICS = + Arrays.asList(DefaultNodeMetric.values()); + + protected abstract SimulacronRule simulacron(); + + protected abstract Object newMetricRegistry(); + + protected abstract String getMetricsFactoryClass(); + + protected abstract void assertMetricsPresent(CqlSession session); + + protected abstract void assertNodeMetricsEvicted(CqlSession session, Node node) throws Exception; + + protected abstract void assertNodeMetricsNotEvicted(CqlSession session, Node node) + throws Exception; + + @Before + public void resetSimulacron() { + simulacron().cluster().clearLogs(); + simulacron().cluster().clearPrimes(true); + } + + @Test + @UseDataProvider("descriptorsAndPrefixes") + public void should_expose_metrics_if_enabled(Class metricIdGenerator, String prefix) { + + Assume.assumeFalse( + "Cannot use metric tags with Dropwizard", + metricIdGenerator.getSimpleName().contains("Tagging") + && getMetricsFactoryClass().contains("Dropwizard")); + + DriverConfigLoader loader = + allMetricsEnabled() + .withString( + DefaultDriverOption.METRICS_ID_GENERATOR_CLASS, metricIdGenerator.getSimpleName()) + .withString(DefaultDriverOption.METRICS_ID_GENERATOR_PREFIX, prefix) + .build(); + + try (CqlSession session = + CqlSession.builder() + .addContactEndPoints(simulacron().getContactPoints()) + .withConfigLoader(loader) + .withMetricRegistry(newMetricRegistry()) + .build()) { + + session.prepare("irrelevant"); + queryAllNodes(session); + assertMetricsPresent(session); + } + } + + @DataProvider + public static Object[][] descriptorsAndPrefixes() { + return new Object[][] { + new Object[] {DefaultMetricIdGenerator.class, ""}, + new Object[] {DefaultMetricIdGenerator.class, "cassandra"}, + new Object[] {TaggingMetricIdGenerator.class, ""}, + new Object[] {TaggingMetricIdGenerator.class, "cassandra"}, + }; + } + + @Test + public void should_not_expose_metrics_if_disabled() { + DriverConfigLoader loader = + SessionUtils.configLoaderBuilder() + .withStringList(DefaultDriverOption.METRICS_SESSION_ENABLED, Collections.emptyList()) + .withStringList(DefaultDriverOption.METRICS_NODE_ENABLED, Collections.emptyList()) + .withString(DefaultDriverOption.METRICS_FACTORY_CLASS, getMetricsFactoryClass()) + .build(); + try (CqlSession session = + CqlSession.builder() + .addContactEndPoints(simulacron().getContactPoints()) + .withConfigLoader(loader) + .build()) { + queryAllNodes(session); + MetricRegistry registry = + (MetricRegistry) ((InternalDriverContext) session.getContext()).getMetricRegistry(); + assertThat(registry).isNull(); + assertThat(session.getMetrics()).isEmpty(); + } + } + + @Test + public void should_evict_down_node_metrics_when_timeout_fires() throws Exception { + // given + Duration expireAfter = Duration.ofSeconds(1); + DriverConfigLoader loader = + allMetricsEnabled() + .withDuration(DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER, expireAfter) + .build(); + + AbstractMetricUpdater.MIN_EXPIRE_AFTER = expireAfter; + + try (CqlSession session = + CqlSession.builder() + .addContactEndPoints(simulacron().getContactPoints()) + .withConfigLoader(loader) + .withMetricRegistry(newMetricRegistry()) + .build()) { + + queryAllNodes(session); + + DefaultNode node1 = findNode(session, 0); + DefaultNode node2 = findNode(session, 1); + DefaultNode node3 = findNode(session, 2); + + EventBus eventBus = ((InternalDriverContext) session.getContext()).getEventBus(); + + // trigger node1 UP -> DOWN + eventBus.fire(NodeStateEvent.changed(NodeState.UP, NodeState.DOWN, node1)); + + Thread.sleep(expireAfter.toMillis()); + + // then node-level metrics should be evicted from node1, but + // node2 and node3 metrics should not have been evicted + await().untilAsserted(() -> assertNodeMetricsEvicted(session, node1)); + assertNodeMetricsNotEvicted(session, node2); + assertNodeMetricsNotEvicted(session, node3); + + } finally { + AbstractMetricUpdater.MIN_EXPIRE_AFTER = Duration.ofMinutes(5); + } + } + + @Test + public void should_not_evict_down_node_metrics_when_node_is_back_up_before_timeout() + throws Exception { + // given + Duration expireAfter = Duration.ofSeconds(2); + DriverConfigLoader loader = + allMetricsEnabled() + .withDuration(DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER, expireAfter) + .build(); + + AbstractMetricUpdater.MIN_EXPIRE_AFTER = expireAfter; + + try (CqlSession session = + CqlSession.builder() + .addContactEndPoints(simulacron().getContactPoints()) + .withConfigLoader(loader) + .withMetricRegistry(newMetricRegistry()) + .build()) { + + queryAllNodes(session); + + DefaultNode node1 = findNode(session, 0); + DefaultNode node2 = findNode(session, 1); + DefaultNode node3 = findNode(session, 2); + + EventBus eventBus = ((InternalDriverContext) session.getContext()).getEventBus(); + + // trigger nodes UP -> DOWN + eventBus.fire(NodeStateEvent.changed(NodeState.UP, NodeState.DOWN, node1)); + eventBus.fire(NodeStateEvent.changed(NodeState.UP, NodeState.FORCED_DOWN, node2)); + eventBus.fire(NodeStateEvent.removed(node3)); + + Thread.sleep(500); + + // trigger nodes DOWN -> UP, should cancel the timeouts + eventBus.fire(NodeStateEvent.changed(NodeState.DOWN, NodeState.UP, node1)); + eventBus.fire(NodeStateEvent.changed(NodeState.FORCED_DOWN, NodeState.UP, node2)); + eventBus.fire(NodeStateEvent.added(node3)); + + Thread.sleep(expireAfter.toMillis()); + + // then no node-level metrics should be evicted + assertNodeMetricsNotEvicted(session, node1); + assertNodeMetricsNotEvicted(session, node2); + assertNodeMetricsNotEvicted(session, node3); + + } finally { + AbstractMetricUpdater.MIN_EXPIRE_AFTER = Duration.ofMinutes(5); + } + } + + private ProgrammaticDriverConfigLoaderBuilder allMetricsEnabled() { + return SessionUtils.configLoaderBuilder() + .withStringList( + DefaultDriverOption.METRICS_SESSION_ENABLED, + ENABLED_SESSION_METRICS.stream() + .map(DefaultSessionMetric::getPath) + .collect(Collectors.toList())) + .withStringList( + DefaultDriverOption.METRICS_NODE_ENABLED, + ENABLED_NODE_METRICS.stream() + .map(DefaultNodeMetric::getPath) + .collect(Collectors.toList())) + .withString(DefaultDriverOption.METRICS_FACTORY_CLASS, getMetricsFactoryClass()); + } + + private void queryAllNodes(CqlSession session) { + for (Node node : session.getMetadata().getNodes().values()) { + for (int i = 0; i < 10; i++) { + session.execute(SimpleStatement.newInstance("irrelevant").setNode(node)); + } + } + } + + private DefaultNode findNode(CqlSession session, int id) { + InetSocketAddress address1 = simulacron().cluster().node(id).inetSocketAddress(); + return (DefaultNode) + session.getMetadata().findNode(address1).orElseThrow(IllegalStateException::new); + } +} diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metrics/MetricsSimulacronIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metrics/MetricsSimulacronIT.java deleted file mode 100644 index ecb549382f9..00000000000 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/metrics/MetricsSimulacronIT.java +++ /dev/null @@ -1,206 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.datastax.oss.driver.core.metrics; - -import static org.assertj.core.api.Assertions.assertThat; -import static org.awaitility.Awaitility.await; - -import com.codahale.metrics.Meter; -import com.datastax.oss.driver.api.core.CqlSession; -import com.datastax.oss.driver.api.core.config.DefaultDriverOption; -import com.datastax.oss.driver.api.core.config.DriverConfigLoader; -import com.datastax.oss.driver.api.core.context.DriverContext; -import com.datastax.oss.driver.api.core.metadata.Node; -import com.datastax.oss.driver.api.core.metrics.DefaultNodeMetric; -import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; -import com.datastax.oss.driver.api.core.metrics.Metrics; -import com.datastax.oss.driver.api.core.session.ProgrammaticArguments; -import com.datastax.oss.driver.api.core.session.SessionBuilder; -import com.datastax.oss.driver.api.testinfra.session.SessionUtils; -import com.datastax.oss.driver.api.testinfra.simulacron.SimulacronRule; -import com.datastax.oss.driver.categories.ParallelizableTests; -import com.datastax.oss.driver.internal.core.context.DefaultDriverContext; -import com.datastax.oss.driver.internal.core.context.InternalDriverContext; -import com.datastax.oss.driver.internal.core.metrics.DropwizardMetricsFactory; -import com.datastax.oss.driver.internal.core.metrics.MetricsFactory; -import com.datastax.oss.driver.shaded.guava.common.base.Ticker; -import com.datastax.oss.simulacron.common.cluster.ClusterSpec; -import com.google.common.collect.Lists; -import edu.umd.cs.findbugs.annotations.NonNull; -import java.time.Duration; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; - -@Category(ParallelizableTests.class) -public class MetricsSimulacronIT { - - @ClassRule - public static final SimulacronRule SIMULACRON_RULE = - new SimulacronRule(ClusterSpec.builder().withNodes(1)); - - @Before - public void clearPrimes() { - SIMULACRON_RULE.cluster().clearLogs(); - SIMULACRON_RULE.cluster().clearPrimes(true); - } - - @Test - public void should_remove_node_metrics_and_not_remove_session_metrics_after_eviction_time() { - - // given - DriverConfigLoader loader = - SessionUtils.configLoaderBuilder() - .withStringList( - DefaultDriverOption.METRICS_SESSION_ENABLED, - Lists.newArrayList("bytes-sent", "bytes-received")) - .withStringList( - DefaultDriverOption.METRICS_NODE_ENABLED, - Lists.newArrayList("bytes-sent", "bytes-received")) - .withDuration(DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER, Duration.ofHours(1)) - .build(); - FakeTicker fakeTicker = new FakeTicker(); - try (CqlSession session = - new MetricsTestContextBuilder() - .addContactEndPoints(SIMULACRON_RULE.getContactPoints()) - .withConfigLoader(loader) - .withTicker(fakeTicker) - .build()) { - for (int i = 0; i < 10; i++) { - session.execute("SELECT release_version FROM system.local"); - } - - // when - fakeTicker.advance(Duration.ofHours(2)); - - // then session metrics are not evicted - assertThat(session.getMetrics()) - .hasValueSatisfying( - metrics -> { - assertThat(metrics.getSessionMetric(DefaultSessionMetric.BYTES_SENT)) - .hasValueSatisfying( - bytesSent -> assertThat(bytesSent.getCount()).isGreaterThan(0)); - assertThat(metrics.getSessionMetric(DefaultSessionMetric.BYTES_RECEIVED)) - .hasValueSatisfying( - bytesReceived -> assertThat(bytesReceived.getCount()).isGreaterThan(0)); - }); - - // and node metrics are evicted - await() - .until( - () -> { - // get only node in a cluster and evaluate its metrics. - Node node = session.getMetadata().getNodes().values().iterator().next(); - Metrics metrics = session.getMetrics().get(); - return !metrics.getNodeMetric(node, DefaultNodeMetric.BYTES_SENT).isPresent() - && !metrics - .getNodeMetric(node, DefaultNodeMetric.BYTES_RECEIVED) - .isPresent(); - }); - } - } - - @Test - public void - should_not_evict_not_updated_node_metric_if_any_other_node_level_metric_was_updated() { - // given - DriverConfigLoader loader = - SessionUtils.configLoaderBuilder() - .withStringList( - DefaultDriverOption.METRICS_NODE_ENABLED, - Lists.newArrayList("bytes-sent", "errors.request.aborted")) - .withDuration(DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER, Duration.ofHours(1)) - .build(); - FakeTicker fakeTicker = new FakeTicker(); - try (CqlSession session = - new MetricsTestContextBuilder() - .addContactEndPoints(SIMULACRON_RULE.getContactPoints()) - .withConfigLoader(loader) - .withTicker(fakeTicker) - .build()) { - for (int i = 0; i < 10; i++) { - session.execute("SELECT release_version FROM system.local"); - } - - // when advance time to before eviction - fakeTicker.advance(Duration.ofMinutes(59)); - // execute query that update only bytes-sent - session.execute("SELECT release_version FROM system.local"); - // advance time to after eviction - fakeTicker.advance(Duration.ofMinutes(2)); - - // then all node-level metrics should not be evicted - await() - .until( - () -> { - // get only node in a cluster and evaluate its metrics. - Node node = session.getMetadata().getNodes().values().iterator().next(); - Metrics metrics = session.getMetrics().get(); - return metrics.getNodeMetric(node, DefaultNodeMetric.BYTES_SENT).isPresent() - && metrics - .getNodeMetric(node, DefaultNodeMetric.ABORTED_REQUESTS) - .isPresent(); - }); - } - } - - private static class MetricsTestContextBuilder - extends SessionBuilder { - - private Ticker ticker; - - @Override - protected CqlSession wrap(@NonNull CqlSession defaultSession) { - return defaultSession; - } - - public MetricsTestContextBuilder withTicker(Ticker ticker) { - this.ticker = ticker; - return this; - } - - @Override - protected DriverContext buildContext( - DriverConfigLoader configLoader, ProgrammaticArguments programmaticArguments) { - return new MetricsTestContext(configLoader, programmaticArguments, ticker); - } - } - - private static class MetricsTestContext extends DefaultDriverContext { - private final Ticker ticker; - - public MetricsTestContext( - @NonNull DriverConfigLoader configLoader, - @NonNull ProgrammaticArguments programmaticArguments, - @NonNull Ticker ticker) { - super(configLoader, programmaticArguments); - this.ticker = ticker; - } - - @Override - protected MetricsFactory buildMetricsFactory() { - return new DropwizardMetricsFactoryCustomTicker(this, ticker); - } - - private static class DropwizardMetricsFactoryCustomTicker extends DropwizardMetricsFactory { - - public DropwizardMetricsFactoryCustomTicker(InternalDriverContext context, Ticker ticker) { - super(context, ticker); - } - } - } -} diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/metrics/common/AbstractMetricsTestBase.java b/integration-tests/src/test/java/com/datastax/oss/driver/metrics/common/AbstractMetricsTestBase.java deleted file mode 100644 index f6bd9c23c5e..00000000000 --- a/integration-tests/src/test/java/com/datastax/oss/driver/metrics/common/AbstractMetricsTestBase.java +++ /dev/null @@ -1,229 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.datastax.oss.driver.metrics.common; - -import com.datastax.oss.driver.api.core.CqlSession; -import com.datastax.oss.driver.api.core.config.DefaultDriverOption; -import com.datastax.oss.driver.api.core.config.DriverConfigLoader; -import com.datastax.oss.driver.api.core.context.DriverContext; -import com.datastax.oss.driver.api.core.cql.SimpleStatement; -import com.datastax.oss.driver.api.core.metadata.Node; -import com.datastax.oss.driver.api.core.metrics.DefaultNodeMetric; -import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; -import com.datastax.oss.driver.api.core.session.ProgrammaticArguments; -import com.datastax.oss.driver.api.core.session.SessionBuilder; -import com.datastax.oss.driver.api.testinfra.session.SessionUtils; -import com.datastax.oss.driver.api.testinfra.simulacron.SimulacronRule; -import com.datastax.oss.driver.core.metrics.FakeTicker; -import com.datastax.oss.driver.internal.core.context.DefaultDriverContext; -import com.datastax.oss.driver.internal.core.context.InternalDriverContext; -import com.datastax.oss.driver.internal.core.metrics.DefaultMetricIdGenerator; -import com.datastax.oss.driver.internal.core.metrics.MetricsFactory; -import com.datastax.oss.driver.internal.core.metrics.TaggingMetricIdGenerator; -import com.datastax.oss.driver.shaded.guava.common.base.Ticker; -import com.tngtech.java.junit.dataprovider.DataProvider; -import com.tngtech.java.junit.dataprovider.DataProviderRunner; -import com.tngtech.java.junit.dataprovider.UseDataProvider; -import edu.umd.cs.findbugs.annotations.NonNull; -import java.net.InetSocketAddress; -import java.time.Duration; -import java.util.Arrays; -import java.util.List; -import java.util.stream.Collectors; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; - -@RunWith(DataProviderRunner.class) -public abstract class AbstractMetricsTestBase { - - protected static final List ENABLED_SESSION_METRICS = - Arrays.asList(DefaultSessionMetric.values()); - - protected static final List ENABLED_NODE_METRICS = - Arrays.asList(DefaultNodeMetric.values()); - - protected abstract SimulacronRule simulacron(); - - protected abstract Object newMetricRegistry(); - - protected abstract String getMetricsFactoryClass(); - - protected abstract MetricsFactory newTickingMetricsFactory( - InternalDriverContext context, Ticker ticker); - - protected abstract void assertMetrics(CqlSession session); - - protected abstract void assertNodeMetricsEvicted(CqlSession session, Node node) throws Exception; - - protected abstract void assertNodeMetricsNotEvicted(CqlSession session, Node node) - throws Exception; - - @Before - public void clearPrimes() { - simulacron().cluster().clearLogs(); - simulacron().cluster().clearPrimes(true); - } - - @Test - @UseDataProvider("descriptorsAndPrefixes") - public void should_expose_metrics(Class descriptorClass, String prefix) { - - DriverConfigLoader loader = - SessionUtils.configLoaderBuilder() - .withStringList( - DefaultDriverOption.METRICS_SESSION_ENABLED, - ENABLED_SESSION_METRICS.stream() - .map(DefaultSessionMetric::getPath) - .collect(Collectors.toList())) - .withStringList( - DefaultDriverOption.METRICS_NODE_ENABLED, - ENABLED_NODE_METRICS.stream() - .map(DefaultNodeMetric::getPath) - .collect(Collectors.toList())) - .withString(DefaultDriverOption.METRICS_FACTORY_CLASS, getMetricsFactoryClass()) - .withString( - DefaultDriverOption.METRICS_ID_GENERATOR_CLASS, descriptorClass.getSimpleName()) - .withString(DefaultDriverOption.METRICS_ID_GENERATOR_PREFIX, prefix) - .build(); - - try (CqlSession session = - CqlSession.builder() - .addContactEndPoints(simulacron().getContactPoints()) - .withConfigLoader(loader) - .withMetricRegistry(newMetricRegistry()) - .build()) { - - for (Node node : session.getMetadata().getNodes().values()) { - for (int i = 0; i < 10; i++) { - session.execute( - SimpleStatement.newInstance("SELECT release_version FROM system.local") - .setNode(node)); - } - } - - assertMetrics(session); - } - } - - @DataProvider - public static Object[][] descriptorsAndPrefixes() { - return new Object[][] { - new Object[] {DefaultMetricIdGenerator.class, ""}, - new Object[] {DefaultMetricIdGenerator.class, "cassandra"}, - new Object[] {TaggingMetricIdGenerator.class, ""}, - new Object[] {TaggingMetricIdGenerator.class, "cassandra"}, - }; - } - - @Test - public void should_evict_node_level_metrics() throws Exception { - // given - DriverConfigLoader loader = - SessionUtils.configLoaderBuilder() - .withStringList( - DefaultDriverOption.METRICS_SESSION_ENABLED, - ENABLED_SESSION_METRICS.stream() - .map(DefaultSessionMetric::getPath) - .collect(Collectors.toList())) - .withStringList( - DefaultDriverOption.METRICS_NODE_ENABLED, - ENABLED_NODE_METRICS.stream() - .map(DefaultNodeMetric::getPath) - .collect(Collectors.toList())) - .withDuration(DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER, Duration.ofHours(1)) - .build(); - FakeTicker fakeTicker = new FakeTicker(); - try (CqlSession session = - new TestSessionBuilder() - .addContactEndPoints(simulacron().getContactPoints()) - .withConfigLoader(loader) - .withMetricRegistry(newMetricRegistry()) - .withTicker(fakeTicker) - .build()) { - - for (Node node : session.getMetadata().getNodes().values()) { - for (int i = 0; i < 10; i++) { - session.execute( - SimpleStatement.newInstance("SELECT release_version FROM system.local") - .setNode(node)); - } - } - - Node node1 = findNode(session, 0); - Node node2 = findNode(session, 1); - Node node3 = findNode(session, 2); - - // when advance time to before eviction - fakeTicker.advance(Duration.ofMinutes(59)); - // execute query that updates only node1 - session.execute( - SimpleStatement.newInstance("SELECT release_version FROM system.local").setNode(node1)); - // advance time to after eviction - fakeTicker.advance(Duration.ofMinutes(2)); - - // then no node-level metrics should be evicted from node1 - assertNodeMetricsNotEvicted(session, node1); - // node2 and node3 metrics should have been evicted - assertNodeMetricsEvicted(session, node2); - assertNodeMetricsEvicted(session, node3); - } - } - - private Node findNode(CqlSession session, int id) { - InetSocketAddress address1 = simulacron().cluster().node(id).inetSocketAddress(); - return session.getMetadata().findNode(address1).orElseThrow(IllegalStateException::new); - } - - private class TestSessionBuilder extends SessionBuilder { - - private Ticker ticker; - - @Override - protected CqlSession wrap(@NonNull CqlSession defaultSession) { - return defaultSession; - } - - public TestSessionBuilder withTicker(Ticker ticker) { - this.ticker = ticker; - return this; - } - - @Override - protected DriverContext buildContext( - DriverConfigLoader configLoader, ProgrammaticArguments programmaticArguments) { - return new TestDriverContext(configLoader, programmaticArguments, ticker); - } - } - - private class TestDriverContext extends DefaultDriverContext { - - private final Ticker ticker; - - public TestDriverContext( - @NonNull DriverConfigLoader configLoader, - @NonNull ProgrammaticArguments programmaticArguments, - @NonNull Ticker ticker) { - super(configLoader, programmaticArguments); - this.ticker = ticker; - } - - @Override - protected MetricsFactory buildMetricsFactory() { - return newTickingMetricsFactory(this, ticker); - } - } -} diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/metrics/micrometer/MicrometerMetricsIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/metrics/micrometer/MicrometerMetricsIT.java index d4261c2d967..ddfc8913d63 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/metrics/micrometer/MicrometerMetricsIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/metrics/micrometer/MicrometerMetricsIT.java @@ -23,16 +23,11 @@ import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; import com.datastax.oss.driver.api.testinfra.simulacron.SimulacronRule; import com.datastax.oss.driver.categories.ParallelizableTests; +import com.datastax.oss.driver.core.metrics.MetricsITBase; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.internal.core.metrics.MetricId; import com.datastax.oss.driver.internal.core.metrics.MetricIdGenerator; -import com.datastax.oss.driver.internal.core.metrics.MetricsFactory; -import com.datastax.oss.driver.internal.metrics.micrometer.MicrometerMetricsFactory; -import com.datastax.oss.driver.internal.metrics.micrometer.MicrometerNodeMetricUpdater; import com.datastax.oss.driver.internal.metrics.micrometer.MicrometerTags; -import com.datastax.oss.driver.metrics.common.AbstractMetricsTestBase; -import com.datastax.oss.driver.shaded.guava.common.base.Ticker; -import com.datastax.oss.driver.shaded.guava.common.cache.Cache; import com.datastax.oss.simulacron.common.cluster.ClusterSpec; import io.micrometer.core.instrument.Counter; import io.micrometer.core.instrument.Gauge; @@ -41,12 +36,11 @@ import io.micrometer.core.instrument.Tag; import io.micrometer.core.instrument.Timer; import io.micrometer.core.instrument.simple.SimpleMeterRegistry; -import java.lang.reflect.Field; import org.junit.ClassRule; import org.junit.experimental.categories.Category; @Category(ParallelizableTests.class) -public class MicrometerMetricsIT extends AbstractMetricsTestBase { +public class MicrometerMetricsIT extends MetricsITBase { @ClassRule public static final SimulacronRule SIMULACRON_RULE = @@ -68,12 +62,7 @@ protected String getMetricsFactoryClass() { } @Override - protected MetricsFactory newTickingMetricsFactory(InternalDriverContext context, Ticker ticker) { - return new MicrometerMetricsFactory(context, ticker); - } - - @Override - protected void assertMetrics(CqlSession session) { + protected void assertMetricsPresent(CqlSession session) { MeterRegistry registry = (MeterRegistry) ((InternalDriverContext) session.getContext()).getMetricRegistry(); @@ -91,19 +80,23 @@ protected void assertMetrics(CqlSession session) { Meter m = registry.find(id.getName()).tags(tags).meter(); assertThat(m).isNotNull(); switch (metric) { - case BYTES_SENT: - case BYTES_RECEIVED: - assertThat(m).isInstanceOf(Counter.class); - assertThat(((Counter) m).count()).isGreaterThan(0.0); - break; case CONNECTED_NODES: assertThat(m).isInstanceOf(Gauge.class); - assertThat(((Gauge) m).value()).isEqualTo(3.0); + assertThat(((Gauge) m).value()).isEqualTo(3); break; case CQL_REQUESTS: assertThat(m).isInstanceOf(Timer.class); assertThat(((Timer) m).count()).isEqualTo(30); break; + case CQL_PREPARED_CACHE_SIZE: + assertThat(m).isInstanceOf(Gauge.class); + assertThat(((Gauge) m).value()).isOne(); + break; + case BYTES_SENT: + case BYTES_RECEIVED: + assertThat(m).isInstanceOf(Counter.class); + assertThat(((Counter) m).count()).isGreaterThan(0); + break; case CQL_CLIENT_TIMEOUTS: case THROTTLING_ERRORS: assertThat(m).isInstanceOf(Counter.class); @@ -114,7 +107,6 @@ protected void assertMetrics(CqlSession session) { assertThat(((Timer) m).count()).isZero(); break; case THROTTLING_QUEUE_SIZE: - case CQL_PREPARED_CACHE_SIZE: assertThat(m).isInstanceOf(Gauge.class); assertThat(((Gauge) m).value()).isZero(); break; @@ -138,59 +130,51 @@ protected void assertMetrics(CqlSession session) { assertThat(m).isInstanceOf(Timer.class); assertThat(((Timer) m).count()).isEqualTo(10); break; - case AVAILABLE_STREAMS: - assertThat(m).isInstanceOf(Gauge.class); - assertThat(((Gauge) m).value()).isGreaterThan(100); - break; - case IN_FLIGHT: - assertThat(m).isInstanceOf(Gauge.class); - break; - case ORPHANED_STREAMS: - assertThat(m).isInstanceOf(Gauge.class); - assertThat(((Gauge) m).value()).isZero(); - break; - case BYTES_SENT: - case BYTES_RECEIVED: - assertThat(m).isInstanceOf(Counter.class); - assertThat(((Counter) m).count()).isGreaterThan(0.0); - break; - case UNSENT_REQUESTS: - case ABORTED_REQUESTS: - case WRITE_TIMEOUTS: case READ_TIMEOUTS: + case WRITE_TIMEOUTS: case UNAVAILABLES: case OTHER_ERRORS: + case ABORTED_REQUESTS: + case UNSENT_REQUESTS: case RETRIES: - case RETRIES_ON_ABORTED: + case IGNORES: case RETRIES_ON_READ_TIMEOUT: case RETRIES_ON_WRITE_TIMEOUT: case RETRIES_ON_UNAVAILABLE: case RETRIES_ON_OTHER_ERROR: - case IGNORES: - case IGNORES_ON_ABORTED: + case RETRIES_ON_ABORTED: case IGNORES_ON_READ_TIMEOUT: case IGNORES_ON_WRITE_TIMEOUT: case IGNORES_ON_UNAVAILABLE: case IGNORES_ON_OTHER_ERROR: + case IGNORES_ON_ABORTED: case SPECULATIVE_EXECUTIONS: case CONNECTION_INIT_ERRORS: case AUTHENTICATION_ERRORS: assertThat(m).isInstanceOf(Counter.class); assertThat(((Counter) m).count()).isZero(); break; + case BYTES_SENT: + case BYTES_RECEIVED: + assertThat(m).isInstanceOf(Counter.class); + assertThat(((Counter) m).count()).isGreaterThan(0.0); + break; + case AVAILABLE_STREAMS: + case IN_FLIGHT: + case ORPHANED_STREAMS: + assertThat(m).isInstanceOf(Gauge.class); + break; } } } } @Override - protected void assertNodeMetricsNotEvicted(CqlSession session, Node node) throws Exception { + protected void assertNodeMetricsNotEvicted(CqlSession session, Node node) { InternalDriverContext context = (InternalDriverContext) session.getContext(); MetricIdGenerator metricIdGenerator = context.getMetricIdGenerator(); MeterRegistry registry = (MeterRegistry) context.getMetricRegistry(); assertThat(registry).isNotNull(); - // FIXME see JAVA-2929 - triggerCacheCleanup(context.getMetricsFactory()); for (DefaultNodeMetric metric : ENABLED_NODE_METRICS) { MetricId id = metricIdGenerator.nodeMetricId(node, metric); Iterable tags = MicrometerTags.toMicrometerTags(id.getTags()); @@ -200,13 +184,11 @@ protected void assertNodeMetricsNotEvicted(CqlSession session, Node node) throws } @Override - protected void assertNodeMetricsEvicted(CqlSession session, Node node) throws Exception { + protected void assertNodeMetricsEvicted(CqlSession session, Node node) { InternalDriverContext context = (InternalDriverContext) session.getContext(); MetricIdGenerator metricIdGenerator = context.getMetricIdGenerator(); MeterRegistry registry = (MeterRegistry) context.getMetricRegistry(); assertThat(registry).isNotNull(); - // FIXME see JAVA-2929 - triggerCacheCleanup(context.getMetricsFactory()); for (DefaultNodeMetric metric : ENABLED_NODE_METRICS) { MetricId id = metricIdGenerator.nodeMetricId(node, metric); Iterable tags = MicrometerTags.toMicrometerTags(id.getTags()); @@ -214,13 +196,4 @@ protected void assertNodeMetricsEvicted(CqlSession session, Node node) throws Ex assertThat(m).isNull(); } } - - private void triggerCacheCleanup(MetricsFactory metricsFactory) throws Exception { - Field metricsCache = MicrometerMetricsFactory.class.getDeclaredField("metricsCache"); - metricsCache.setAccessible(true); - @SuppressWarnings("unchecked") - Cache cache = - (Cache) metricsCache.get(metricsFactory); - cache.cleanUp(); - } } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/metrics/microprofile/MicroProfileMetricsIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/metrics/microprofile/MicroProfileMetricsIT.java index 52a05f7d593..67a7f83c982 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/metrics/microprofile/MicroProfileMetricsIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/metrics/microprofile/MicroProfileMetricsIT.java @@ -23,19 +23,15 @@ import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; import com.datastax.oss.driver.api.testinfra.simulacron.SimulacronRule; import com.datastax.oss.driver.categories.ParallelizableTests; +import com.datastax.oss.driver.core.metrics.MetricsITBase; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.internal.core.metrics.MetricId; import com.datastax.oss.driver.internal.core.metrics.MetricIdGenerator; -import com.datastax.oss.driver.internal.core.metrics.MetricsFactory; -import com.datastax.oss.driver.internal.metrics.micrometer.MicrometerNodeMetricUpdater; -import com.datastax.oss.driver.internal.metrics.microprofile.MicroProfileMetricsFactory; import com.datastax.oss.driver.internal.metrics.microprofile.MicroProfileTags; -import com.datastax.oss.driver.metrics.common.AbstractMetricsTestBase; -import com.datastax.oss.driver.shaded.guava.common.base.Ticker; -import com.datastax.oss.driver.shaded.guava.common.cache.Cache; import com.datastax.oss.simulacron.common.cluster.ClusterSpec; import io.smallrye.metrics.MetricsRegistryImpl; -import java.lang.reflect.Field; +import java.util.ArrayList; +import java.util.List; import org.eclipse.microprofile.metrics.Counter; import org.eclipse.microprofile.metrics.Gauge; import org.eclipse.microprofile.metrics.Meter; @@ -48,7 +44,7 @@ import org.junit.experimental.categories.Category; @Category(ParallelizableTests.class) -public class MicroProfileMetricsIT extends AbstractMetricsTestBase { +public class MicroProfileMetricsIT extends MetricsITBase { @ClassRule public static final SimulacronRule SIMULACRON_RULE = @@ -70,12 +66,7 @@ protected String getMetricsFactoryClass() { } @Override - protected MetricsFactory newTickingMetricsFactory(InternalDriverContext context, Ticker ticker) { - return new MicroProfileMetricsFactory(context, ticker); - } - - @Override - protected void assertMetrics(CqlSession session) { + protected void assertMetricsPresent(CqlSession session) { MetricRegistry registry = (MetricRegistry) ((InternalDriverContext) session.getContext()).getMetricRegistry(); @@ -94,18 +85,22 @@ protected void assertMetrics(CqlSession session) { Metric m = registry.getMetrics().get(id); assertThat(m).isNotNull(); switch (metric) { - case BYTES_SENT: - case BYTES_RECEIVED: - assertThat(m).isInstanceOf(Meter.class); - assertThat(((Meter) m).getCount()).isGreaterThan(0L); - break; case CONNECTED_NODES: assertThat(m).isInstanceOf(Gauge.class); assertThat((Integer) ((Gauge) m).getValue()).isEqualTo(3); break; case CQL_REQUESTS: assertThat(m).isInstanceOf(Timer.class); - assertThat(((Timer) m).getCount()).isEqualTo(30L); + assertThat(((Timer) m).getCount()).isEqualTo(30); + break; + case CQL_PREPARED_CACHE_SIZE: + assertThat(m).isInstanceOf(Gauge.class); + assertThat((Long) ((Gauge) m).getValue()).isOne(); + break; + case BYTES_SENT: + case BYTES_RECEIVED: + assertThat(m).isInstanceOf(Meter.class); + assertThat(((Meter) m).getCount()).isGreaterThan(0); break; case CQL_CLIENT_TIMEOUTS: case THROTTLING_ERRORS: @@ -120,10 +115,6 @@ protected void assertMetrics(CqlSession session) { assertThat(m).isInstanceOf(Gauge.class); assertThat((Integer) ((Gauge) m).getValue()).isZero(); break; - case CQL_PREPARED_CACHE_SIZE: - assertThat(m).isInstanceOf(Gauge.class); - assertThat((Long) ((Gauge) m).getValue()).isZero(); - break; } } @@ -143,89 +134,73 @@ protected void assertMetrics(CqlSession session) { break; case CQL_MESSAGES: assertThat(m).isInstanceOf(Timer.class); - assertThat(((Timer) m).getCount()).isEqualTo(10L); - break; - case AVAILABLE_STREAMS: - assertThat(m).isInstanceOf(Gauge.class); - assertThat((Integer) ((Gauge) m).getValue()).isGreaterThan(100); - break; - case IN_FLIGHT: - assertThat(m).isInstanceOf(Gauge.class); - break; - case ORPHANED_STREAMS: - assertThat(m).isInstanceOf(Gauge.class); - assertThat((Integer) ((Gauge) m).getValue()).isZero(); + assertThat(((Timer) m).getCount()).isEqualTo(10); break; - case BYTES_SENT: - case BYTES_RECEIVED: - assertThat(m).isInstanceOf(Meter.class); - assertThat(((Meter) m).getCount()).isGreaterThan(0L); - break; - case UNSENT_REQUESTS: - case ABORTED_REQUESTS: - case WRITE_TIMEOUTS: case READ_TIMEOUTS: + case WRITE_TIMEOUTS: case UNAVAILABLES: case OTHER_ERRORS: + case ABORTED_REQUESTS: + case UNSENT_REQUESTS: case RETRIES: - case RETRIES_ON_ABORTED: + case IGNORES: case RETRIES_ON_READ_TIMEOUT: case RETRIES_ON_WRITE_TIMEOUT: case RETRIES_ON_UNAVAILABLE: case RETRIES_ON_OTHER_ERROR: - case IGNORES: - case IGNORES_ON_ABORTED: + case RETRIES_ON_ABORTED: case IGNORES_ON_READ_TIMEOUT: case IGNORES_ON_WRITE_TIMEOUT: case IGNORES_ON_UNAVAILABLE: case IGNORES_ON_OTHER_ERROR: + case IGNORES_ON_ABORTED: case SPECULATIVE_EXECUTIONS: case CONNECTION_INIT_ERRORS: case AUTHENTICATION_ERRORS: assertThat(m).isInstanceOf(Counter.class); assertThat(((Counter) m).getCount()).isZero(); break; + case BYTES_SENT: + case BYTES_RECEIVED: + assertThat(m).isInstanceOf(Meter.class); + assertThat(((Meter) m).getCount()).isGreaterThan(0L); + break; + case AVAILABLE_STREAMS: + case IN_FLIGHT: + case ORPHANED_STREAMS: + assertThat(m).isInstanceOf(Gauge.class); + break; } } } } @Override - protected void assertNodeMetricsNotEvicted(CqlSession session, Node node) throws Exception { + protected void assertNodeMetricsNotEvicted(CqlSession session, Node node) { InternalDriverContext context = (InternalDriverContext) session.getContext(); - MetricIdGenerator metricIdGenerator = context.getMetricIdGenerator(); MetricRegistry registry = (MetricRegistry) context.getMetricRegistry(); assertThat(registry).isNotNull(); - // FIXME see JAVA-2929 - triggerCacheCleanup(context.getMetricsFactory()); - for (DefaultNodeMetric metric : ENABLED_NODE_METRICS) { - MetricId id = metricIdGenerator.nodeMetricId(node, metric); - Tag[] tags = MicroProfileTags.toMicroProfileTags(id.getTags()); - assertThat(registry.getMetrics()).containsKey(new MetricID(id.getName(), tags)); + for (MetricID id : nodeMetricIds(context, node)) { + assertThat(registry.getMetrics()).containsKey(id); } } @Override - protected void assertNodeMetricsEvicted(CqlSession session, Node node) throws Exception { + protected void assertNodeMetricsEvicted(CqlSession session, Node node) { InternalDriverContext context = (InternalDriverContext) session.getContext(); - MetricIdGenerator metricIdGenerator = context.getMetricIdGenerator(); MetricRegistry registry = (MetricRegistry) context.getMetricRegistry(); assertThat(registry).isNotNull(); - // FIXME see JAVA-2929 - triggerCacheCleanup(context.getMetricsFactory()); - for (DefaultNodeMetric metric : ENABLED_NODE_METRICS) { - MetricId id = metricIdGenerator.nodeMetricId(node, metric); - Tag[] tags = MicroProfileTags.toMicroProfileTags(id.getTags()); - assertThat(registry.getMetrics()).doesNotContainKey(new MetricID(id.getName(), tags)); + for (MetricID id : nodeMetricIds(context, node)) { + assertThat(registry.getMetrics()).doesNotContainKey(id); } } - private void triggerCacheCleanup(MetricsFactory metricsFactory) throws Exception { - Field metricsCache = MicroProfileMetricsFactory.class.getDeclaredField("metricsCache"); - metricsCache.setAccessible(true); - @SuppressWarnings("unchecked") - Cache cache = - (Cache) metricsCache.get(metricsFactory); - cache.cleanUp(); + private List nodeMetricIds(InternalDriverContext context, Node node) { + List ids = new ArrayList<>(); + for (DefaultNodeMetric metric : ENABLED_NODE_METRICS) { + MetricId id = context.getMetricIdGenerator().nodeMetricId(node, metric); + ids.add(new MetricID(id.getName(), MicroProfileTags.toMicroProfileTags(id.getTags()))); + } + return ids; } } diff --git a/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerMetricUpdater.java b/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerMetricUpdater.java index d00539df191..89f6c03bff2 100644 --- a/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerMetricUpdater.java +++ b/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerMetricUpdater.java @@ -38,6 +38,7 @@ public abstract class MicrometerMetricUpdater extends AbstractMetricUpdater { protected final MeterRegistry registry; + protected final ConcurrentMap metrics = new ConcurrentHashMap<>(); protected MicrometerMetricUpdater( @@ -76,6 +77,14 @@ public void updateTimer( } } + @Override + protected void clearMetrics() { + for (Meter metric : metrics.values()) { + registry.remove(metric); + } + metrics.clear(); + } + protected abstract MetricId getMetricId(MetricT metric); protected void initializeGauge( diff --git a/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerMetricsFactory.java b/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerMetricsFactory.java index f326b308733..a39a4924612 100644 --- a/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerMetricsFactory.java +++ b/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerMetricsFactory.java @@ -19,23 +19,21 @@ import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.context.DriverContext; import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.metadata.NodeState; import com.datastax.oss.driver.api.core.metrics.Metrics; import com.datastax.oss.driver.api.core.metrics.NodeMetric; import com.datastax.oss.driver.api.core.metrics.SessionMetric; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.metadata.NodeStateEvent; import com.datastax.oss.driver.internal.core.metrics.MetricPaths; import com.datastax.oss.driver.internal.core.metrics.MetricsFactory; import com.datastax.oss.driver.internal.core.metrics.NodeMetricUpdater; import com.datastax.oss.driver.internal.core.metrics.NoopNodeMetricUpdater; import com.datastax.oss.driver.internal.core.metrics.NoopSessionMetricUpdater; import com.datastax.oss.driver.internal.core.metrics.SessionMetricUpdater; -import com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting; -import com.datastax.oss.driver.shaded.guava.common.base.Ticker; -import com.datastax.oss.driver.shaded.guava.common.cache.Cache; -import com.datastax.oss.driver.shaded.guava.common.cache.CacheBuilder; -import com.datastax.oss.driver.shaded.guava.common.cache.RemovalNotification; +import com.datastax.oss.driver.internal.core.util.concurrent.RunOrSchedule; import io.micrometer.core.instrument.MeterRegistry; -import java.time.Duration; +import io.netty.util.concurrent.EventExecutor; import java.util.Optional; import java.util.Set; import net.jcip.annotations.ThreadSafe; @@ -46,20 +44,14 @@ public class MicrometerMetricsFactory implements MetricsFactory { private static final Logger LOG = LoggerFactory.getLogger(MicrometerMetricsFactory.class); - static final Duration LOWEST_ACCEPTABLE_EXPIRE_AFTER = Duration.ofMinutes(5); private final InternalDriverContext context; private final Set enabledNodeMetrics; private final MeterRegistry registry; private final SessionMetricUpdater sessionUpdater; - private final Cache metricsCache; public MicrometerMetricsFactory(DriverContext context) { - this((InternalDriverContext) context, Ticker.systemTicker()); - } - - public MicrometerMetricsFactory(InternalDriverContext context, Ticker ticker) { - this.context = context; + this.context = (InternalDriverContext) context; String logPrefix = context.getSessionName(); DriverExecutionProfile config = context.getConfig().getDefaultProfile(); Set enabledSessionMetrics = @@ -68,31 +60,13 @@ public MicrometerMetricsFactory(InternalDriverContext context, Ticker ticker) { this.enabledNodeMetrics = MetricPaths.parseNodeMetricPaths( config.getStringList(DefaultDriverOption.METRICS_NODE_ENABLED), logPrefix); - - Duration evictionTime = getAndValidateEvictionTime(config, logPrefix); - - metricsCache = - CacheBuilder.newBuilder() - .ticker(ticker) - .expireAfterAccess(evictionTime) - .removalListener( - (RemovalNotification notification) -> { - LOG.debug( - "[{}] Removing metrics for node: {} from cache after {}", - logPrefix, - notification.getKey(), - evictionTime); - notification.getValue().cleanupNodeMetrics(); - }) - .build(); - if (enabledSessionMetrics.isEmpty() && enabledNodeMetrics.isEmpty()) { LOG.debug("[{}] All metrics are disabled, Session.getMetrics will be empty", logPrefix); this.registry = null; this.sessionUpdater = NoopSessionMetricUpdater.INSTANCE; } else { // try to get the metric registry from the context - Object possibleMetricRegistry = context.getMetricRegistry(); + Object possibleMetricRegistry = this.context.getMetricRegistry(); if (possibleMetricRegistry == null) { // metrics are enabled, but a metric registry was not supplied to the context // use the global registry @@ -111,25 +85,18 @@ public MicrometerMetricsFactory(InternalDriverContext context, Ticker ticker) { + possibleMetricRegistry.getClass().getName() + "'"); } + if (!enabledNodeMetrics.isEmpty()) { + EventExecutor adminEventExecutor = + this.context.getNettyOptions().adminEventExecutorGroup().next(); + this.context + .getEventBus() + .register( + NodeStateEvent.class, + RunOrSchedule.on(adminEventExecutor, this::processNodeStateEvent)); + } } } - @VisibleForTesting - static Duration getAndValidateEvictionTime(DriverExecutionProfile config, String logPrefix) { - Duration evictionTime = config.getDuration(DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER); - - if (evictionTime.compareTo(LOWEST_ACCEPTABLE_EXPIRE_AFTER) < 0) { - LOG.warn( - "[{}] Value too low for {}: {}. Forcing to {} instead.", - logPrefix, - DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER.getPath(), - evictionTime, - LOWEST_ACCEPTABLE_EXPIRE_AFTER); - } - - return evictionTime; - } - @Override public Optional getMetrics() { return Optional.empty(); @@ -144,11 +111,21 @@ public SessionMetricUpdater getSessionUpdater() { public NodeMetricUpdater newNodeUpdater(Node node) { if (registry == null) { return NoopNodeMetricUpdater.INSTANCE; + } else { + return new MicrometerNodeMetricUpdater(node, context, enabledNodeMetrics, registry); + } + } + + protected void processNodeStateEvent(NodeStateEvent event) { + if (event.newState == NodeState.DOWN + || event.newState == NodeState.FORCED_DOWN + || event.newState == null) { + // node is DOWN or REMOVED + ((MicrometerNodeMetricUpdater) event.node.getMetricUpdater()).startMetricsExpirationTimeout(); + } else if (event.newState == NodeState.UP || event.newState == NodeState.UNKNOWN) { + // node is UP or ADDED + ((MicrometerNodeMetricUpdater) event.node.getMetricUpdater()) + .cancelMetricsExpirationTimeout(); } - MicrometerNodeMetricUpdater updater = - new MicrometerNodeMetricUpdater( - node, context, enabledNodeMetrics, registry, () -> metricsCache.getIfPresent(node)); - metricsCache.put(node, updater); - return updater; } } diff --git a/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerNodeMetricUpdater.java b/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerNodeMetricUpdater.java index 3c77839c4b5..a0c4cbd05a6 100644 --- a/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerNodeMetricUpdater.java +++ b/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerNodeMetricUpdater.java @@ -23,10 +23,8 @@ import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.internal.core.metrics.MetricId; import com.datastax.oss.driver.internal.core.metrics.NodeMetricUpdater; -import io.micrometer.core.instrument.Meter; import io.micrometer.core.instrument.MeterRegistry; import java.util.Set; -import java.util.concurrent.TimeUnit; import net.jcip.annotations.ThreadSafe; @ThreadSafe @@ -34,17 +32,14 @@ public class MicrometerNodeMetricUpdater extends MicrometerMetricUpdater enabledMetrics, - MeterRegistry registry, - Runnable signalMetricUpdated) { + MeterRegistry registry) { super(context, enabledMetrics, registry); this.node = node; - this.signalMetricUpdated = signalMetricUpdated; DriverExecutionProfile profile = context.getConfig().getDefaultProfile(); @@ -80,38 +75,17 @@ public MicrometerNodeMetricUpdater( } @Override - public void incrementCounter(NodeMetric metric, String profileName, long amount) { - signalMetricUpdated.run(); - super.incrementCounter(metric, profileName, amount); - } - - @Override - public void updateHistogram(NodeMetric metric, String profileName, long value) { - signalMetricUpdated.run(); - super.updateHistogram(metric, profileName, value); - } - - @Override - public void markMeter(NodeMetric metric, String profileName, long amount) { - signalMetricUpdated.run(); - super.markMeter(metric, profileName, amount); + protected MetricId getMetricId(NodeMetric metric) { + return context.getMetricIdGenerator().nodeMetricId(node, metric); } @Override - public void updateTimer(NodeMetric metric, String profileName, long duration, TimeUnit unit) { - signalMetricUpdated.run(); - super.updateTimer(metric, profileName, duration, unit); - } - - public void cleanupNodeMetrics() { - for (Meter meter : metrics.values()) { - registry.remove(meter); - } - metrics.clear(); + protected void startMetricsExpirationTimeout() { + super.startMetricsExpirationTimeout(); } @Override - protected MetricId getMetricId(NodeMetric metric) { - return context.getMetricIdGenerator().nodeMetricId(node, metric); + protected void cancelMetricsExpirationTimeout() { + super.cancelMetricsExpirationTimeout(); } } diff --git a/metrics/micrometer/src/test/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerMetricsFactoryTest.java b/metrics/micrometer/src/test/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerMetricsFactoryTest.java index 17532ff30bd..e2ad28f08e6 100644 --- a/metrics/micrometer/src/test/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerMetricsFactoryTest.java +++ b/metrics/micrometer/src/test/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerMetricsFactoryTest.java @@ -15,91 +15,26 @@ */ package com.datastax.oss.driver.internal.metrics.micrometer; -import static com.datastax.oss.driver.internal.metrics.micrometer.MicrometerMetricsFactory.LOWEST_ACCEPTABLE_EXPIRE_AFTER; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.fail; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.timeout; -import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; -import ch.qos.logback.classic.Level; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverConfig; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; -import com.datastax.oss.driver.internal.core.util.LoggerTest; -import com.tngtech.java.junit.dataprovider.DataProvider; -import com.tngtech.java.junit.dataprovider.DataProviderRunner; -import com.tngtech.java.junit.dataprovider.UseDataProvider; import io.micrometer.core.instrument.MeterRegistry; import java.time.Duration; import java.util.Collections; import java.util.List; import org.junit.Test; -import org.junit.runner.RunWith; -@RunWith(DataProviderRunner.class) public class MicrometerMetricsFactoryTest { - private static final String LOG_PREFIX = "prefix"; - @Test - public void should_log_warning_when_provided_eviction_time_setting_is_too_low() { - // given - Duration expireAfter = LOWEST_ACCEPTABLE_EXPIRE_AFTER.minusMinutes(1); - LoggerTest.LoggerSetup logger = - LoggerTest.setupTestLogger(MicrometerMetricsFactory.class, Level.WARN); - DriverExecutionProfile driverExecutionProfile = mock(DriverExecutionProfile.class); - - // when - when(driverExecutionProfile.getDuration(DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER)) - .thenReturn(expireAfter); - MicrometerMetricsFactory.getAndValidateEvictionTime(driverExecutionProfile, LOG_PREFIX); - - // then - verify(logger.appender, timeout(500).times(1)).doAppend(logger.loggingEventCaptor.capture()); - assertThat(logger.loggingEventCaptor.getValue().getMessage()).isNotNull(); - assertThat(logger.loggingEventCaptor.getValue().getFormattedMessage()) - .contains( - String.format( - "[%s] Value too low for %s: %s. Forcing to %s instead.", - LOG_PREFIX, - DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER.getPath(), - expireAfter, - LOWEST_ACCEPTABLE_EXPIRE_AFTER)); - } - - @Test - @UseDataProvider(value = "acceptableEvictionTimes") - public void should_not_log_warning_when_provided_eviction_time_setting_is_acceptable( - Duration expireAfter) { - // given - LoggerTest.LoggerSetup logger = - LoggerTest.setupTestLogger(MicrometerMetricsFactory.class, Level.WARN); - DriverExecutionProfile driverExecutionProfile = mock(DriverExecutionProfile.class); - - // when - when(driverExecutionProfile.getDuration(DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER)) - .thenReturn(expireAfter); - MicrometerMetricsFactory.getAndValidateEvictionTime(driverExecutionProfile, LOG_PREFIX); - - // then - verify(logger.appender, timeout(500).times(0)).doAppend(logger.loggingEventCaptor.capture()); - } - - @DataProvider - public static Object[][] acceptableEvictionTimes() { - return new Object[][] { - {LOWEST_ACCEPTABLE_EXPIRE_AFTER}, {LOWEST_ACCEPTABLE_EXPIRE_AFTER.plusMinutes(1)} - }; - } - - @Test - @UseDataProvider(value = "invalidRegistryTypes") - public void should_throw_if_wrong_or_missing_registry_type( - Object registryObj, String expectedMsg) { + public void should_throw_if_wrong_or_missing_registry_type() { // given InternalDriverContext context = mock(InternalDriverContext.class); DriverExecutionProfile profile = mock(DriverExecutionProfile.class); @@ -111,9 +46,9 @@ public void should_throw_if_wrong_or_missing_registry_type( when(context.getConfig()).thenReturn(config); when(context.getSessionName()).thenReturn("MockSession"); // registry object is not a registry type - when(context.getMetricRegistry()).thenReturn(registryObj); + when(context.getMetricRegistry()).thenReturn(Integer.MAX_VALUE); when(profile.getDuration(DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER)) - .thenReturn(LOWEST_ACCEPTABLE_EXPIRE_AFTER); + .thenReturn(Duration.ofHours(1)); when(profile.getStringList(DefaultDriverOption.METRICS_SESSION_ENABLED)) .thenReturn(enabledMetrics); // then @@ -123,21 +58,11 @@ public void should_throw_if_wrong_or_missing_registry_type( "MetricsFactory should require correct registry object type: " + MeterRegistry.class.getName()); } catch (IllegalArgumentException iae) { - assertThat(iae.getMessage()).isEqualTo(expectedMsg); + assertThat(iae.getMessage()) + .isEqualTo( + "Unexpected Metrics registry object. " + + "Expected registry object to be of type '%s', but was '%s'", + MeterRegistry.class.getName(), Integer.class.getName()); } } - - @DataProvider - public static Object[][] invalidRegistryTypes() { - return new Object[][] { - { - Integer.MAX_VALUE, - "Unexpected Metrics registry object. Expected registry object to be of type '" - + MeterRegistry.class.getName() - + "', but was '" - + Integer.class.getName() - + "'" - }, - }; - } } diff --git a/metrics/micrometer/src/test/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerNodeMetricUpdaterTest.java b/metrics/micrometer/src/test/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerNodeMetricUpdaterTest.java new file mode 100644 index 00000000000..badea84e6db --- /dev/null +++ b/metrics/micrometer/src/test/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerNodeMetricUpdaterTest.java @@ -0,0 +1,150 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.metrics.micrometer; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.timeout; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import ch.qos.logback.classic.Level; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverConfig; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.metrics.DefaultNodeMetric; +import com.datastax.oss.driver.api.core.metrics.NodeMetric; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.metrics.AbstractMetricUpdater; +import com.datastax.oss.driver.internal.core.util.LoggerTest; +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import io.micrometer.core.instrument.simple.SimpleMeterRegistry; +import java.time.Duration; +import java.util.Collections; +import java.util.Set; +import java.util.function.Supplier; +import org.junit.Test; +import org.junit.runner.RunWith; + +@RunWith(DataProviderRunner.class) +public class MicrometerNodeMetricUpdaterTest { + + @Test + public void should_log_warning_when_provided_eviction_time_setting_is_too_low() { + // given + LoggerTest.LoggerSetup logger = + LoggerTest.setupTestLogger(AbstractMetricUpdater.class, Level.WARN); + Node node = mock(Node.class); + InternalDriverContext context = mock(InternalDriverContext.class); + DriverExecutionProfile profile = mock(DriverExecutionProfile.class); + DriverConfig config = mock(DriverConfig.class); + Set enabledMetrics = Collections.singleton(DefaultNodeMetric.CQL_MESSAGES); + Duration expireAfter = AbstractMetricUpdater.MIN_EXPIRE_AFTER.minusMinutes(1); + + // when + when(context.getSessionName()).thenReturn("prefix"); + when(context.getConfig()).thenReturn(config); + when(config.getDefaultProfile()).thenReturn(profile); + when(profile.getDuration(DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER)) + .thenReturn(expireAfter); + + MicrometerNodeMetricUpdater updater = + new MicrometerNodeMetricUpdater(node, context, enabledMetrics, new SimpleMeterRegistry()) { + @Override + protected void initializeGauge( + NodeMetric metric, DriverExecutionProfile profile, Supplier supplier) { + // do nothing + } + + @Override + protected void initializeCounter(NodeMetric metric, DriverExecutionProfile profile) { + // do nothing + } + + @Override + protected void initializeTimer(NodeMetric metric, DriverExecutionProfile profile) { + // do nothing + } + }; + + // then + assertThat(updater.getExpireAfter()).isEqualTo(AbstractMetricUpdater.MIN_EXPIRE_AFTER); + verify(logger.appender, timeout(500).times(1)).doAppend(logger.loggingEventCaptor.capture()); + assertThat(logger.loggingEventCaptor.getValue().getMessage()).isNotNull(); + assertThat(logger.loggingEventCaptor.getValue().getFormattedMessage()) + .contains( + String.format( + "[prefix] Value too low for %s: %s. Forcing to %s instead.", + DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER.getPath(), + expireAfter, + AbstractMetricUpdater.MIN_EXPIRE_AFTER)); + } + + @Test + @UseDataProvider(value = "acceptableEvictionTimes") + public void should_not_log_warning_when_provided_eviction_time_setting_is_acceptable( + Duration expireAfter) { + // given + LoggerTest.LoggerSetup logger = + LoggerTest.setupTestLogger(AbstractMetricUpdater.class, Level.WARN); + Node node = mock(Node.class); + InternalDriverContext context = mock(InternalDriverContext.class); + DriverExecutionProfile profile = mock(DriverExecutionProfile.class); + DriverConfig config = mock(DriverConfig.class); + Set enabledMetrics = Collections.singleton(DefaultNodeMetric.CQL_MESSAGES); + + // when + when(context.getSessionName()).thenReturn("prefix"); + when(context.getConfig()).thenReturn(config); + when(config.getDefaultProfile()).thenReturn(profile); + when(profile.getDuration(DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER)) + .thenReturn(expireAfter); + + MicrometerNodeMetricUpdater updater = + new MicrometerNodeMetricUpdater(node, context, enabledMetrics, new SimpleMeterRegistry()) { + @Override + protected void initializeGauge( + NodeMetric metric, DriverExecutionProfile profile, Supplier supplier) { + // do nothing + } + + @Override + protected void initializeCounter(NodeMetric metric, DriverExecutionProfile profile) { + // do nothing + } + + @Override + protected void initializeTimer(NodeMetric metric, DriverExecutionProfile profile) { + // do nothing + } + }; + + // then + assertThat(updater.getExpireAfter()).isEqualTo(expireAfter); + verify(logger.appender, timeout(500).times(0)).doAppend(logger.loggingEventCaptor.capture()); + } + + @DataProvider + public static Object[][] acceptableEvictionTimes() { + return new Object[][] { + {AbstractMetricUpdater.MIN_EXPIRE_AFTER}, + {AbstractMetricUpdater.MIN_EXPIRE_AFTER.plusMinutes(1)} + }; + } +} diff --git a/metrics/microprofile/pom.xml b/metrics/microprofile/pom.xml index f6549e4d73c..454562e3ed2 100644 --- a/metrics/microprofile/pom.xml +++ b/metrics/microprofile/pom.xml @@ -57,6 +57,11 @@ + + io.smallrye + smallrye-metrics + test + ch.qos.logback logback-classic diff --git a/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileMetricUpdater.java b/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileMetricUpdater.java index 2d5ea9013c5..ea06e2bff47 100644 --- a/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileMetricUpdater.java +++ b/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileMetricUpdater.java @@ -31,6 +31,7 @@ import org.eclipse.microprofile.metrics.Metadata; import org.eclipse.microprofile.metrics.Meter; import org.eclipse.microprofile.metrics.Metric; +import org.eclipse.microprofile.metrics.MetricID; import org.eclipse.microprofile.metrics.MetricRegistry; import org.eclipse.microprofile.metrics.MetricType; import org.eclipse.microprofile.metrics.Tag; @@ -40,6 +41,7 @@ public abstract class MicroProfileMetricUpdater extends AbstractMetricUpdater { protected final MetricRegistry registry; + protected final ConcurrentMap metrics = new ConcurrentHashMap<>(); protected MicroProfileMetricUpdater( @@ -77,6 +79,16 @@ public void updateTimer( } } + @Override + protected void clearMetrics() { + for (MetricT metric : metrics.keySet()) { + MetricId id = getMetricId(metric); + Tag[] tags = MicroProfileTags.toMicroProfileTags(id.getTags()); + registry.remove(new MetricID(id.getName(), tags)); + } + metrics.clear(); + } + protected abstract MetricId getMetricId(MetricT metric); protected void initializeGauge( diff --git a/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileMetricsFactory.java b/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileMetricsFactory.java index f06974a20d4..11987af7cfa 100644 --- a/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileMetricsFactory.java +++ b/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileMetricsFactory.java @@ -19,22 +19,20 @@ import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.context.DriverContext; import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.metadata.NodeState; import com.datastax.oss.driver.api.core.metrics.Metrics; import com.datastax.oss.driver.api.core.metrics.NodeMetric; import com.datastax.oss.driver.api.core.metrics.SessionMetric; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.metadata.NodeStateEvent; import com.datastax.oss.driver.internal.core.metrics.MetricPaths; import com.datastax.oss.driver.internal.core.metrics.MetricsFactory; import com.datastax.oss.driver.internal.core.metrics.NodeMetricUpdater; import com.datastax.oss.driver.internal.core.metrics.NoopNodeMetricUpdater; import com.datastax.oss.driver.internal.core.metrics.NoopSessionMetricUpdater; import com.datastax.oss.driver.internal.core.metrics.SessionMetricUpdater; -import com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting; -import com.datastax.oss.driver.shaded.guava.common.base.Ticker; -import com.datastax.oss.driver.shaded.guava.common.cache.Cache; -import com.datastax.oss.driver.shaded.guava.common.cache.CacheBuilder; -import com.datastax.oss.driver.shaded.guava.common.cache.RemovalNotification; -import java.time.Duration; +import com.datastax.oss.driver.internal.core.util.concurrent.RunOrSchedule; +import io.netty.util.concurrent.EventExecutor; import java.util.Optional; import java.util.Set; import net.jcip.annotations.ThreadSafe; @@ -44,21 +42,16 @@ @ThreadSafe public class MicroProfileMetricsFactory implements MetricsFactory { + private static final Logger LOG = LoggerFactory.getLogger(MicroProfileMetricsFactory.class); - static final Duration LOWEST_ACCEPTABLE_EXPIRE_AFTER = Duration.ofMinutes(5); private final InternalDriverContext context; private final Set enabledNodeMetrics; private final MetricRegistry registry; private final SessionMetricUpdater sessionUpdater; - private final Cache metricsCache; public MicroProfileMetricsFactory(DriverContext context) { - this((InternalDriverContext) context, Ticker.systemTicker()); - } - - public MicroProfileMetricsFactory(InternalDriverContext context, Ticker ticker) { - this.context = context; + this.context = (InternalDriverContext) context; String logPrefix = context.getSessionName(); DriverExecutionProfile config = context.getConfig().getDefaultProfile(); Set enabledSessionMetrics = @@ -67,30 +60,12 @@ public MicroProfileMetricsFactory(InternalDriverContext context, Ticker ticker) this.enabledNodeMetrics = MetricPaths.parseNodeMetricPaths( config.getStringList(DefaultDriverOption.METRICS_NODE_ENABLED), logPrefix); - - Duration evictionTime = getAndValidateEvictionTime(config, logPrefix); - - metricsCache = - CacheBuilder.newBuilder() - .ticker(ticker) - .expireAfterAccess(evictionTime) - .removalListener( - (RemovalNotification notification) -> { - LOG.debug( - "[{}] Removing metrics for node: {} from cache after {}", - logPrefix, - notification.getKey(), - evictionTime); - notification.getValue().cleanupNodeMetrics(); - }) - .build(); - if (enabledSessionMetrics.isEmpty() && enabledNodeMetrics.isEmpty()) { LOG.debug("[{}] All metrics are disabled.", logPrefix); this.registry = null; this.sessionUpdater = NoopSessionMetricUpdater.INSTANCE; } else { - Object possibleMetricRegistry = context.getMetricRegistry(); + Object possibleMetricRegistry = this.context.getMetricRegistry(); if (possibleMetricRegistry == null) { // metrics are enabled, but a metric registry was not supplied to the context throw new IllegalArgumentException( @@ -112,25 +87,18 @@ public MicroProfileMetricsFactory(InternalDriverContext context, Ticker ticker) + possibleMetricRegistry.getClass().getName() + "'"); } + if (!enabledNodeMetrics.isEmpty()) { + EventExecutor adminEventExecutor = + this.context.getNettyOptions().adminEventExecutorGroup().next(); + this.context + .getEventBus() + .register( + NodeStateEvent.class, + RunOrSchedule.on(adminEventExecutor, this::processNodeStateEvent)); + } } } - @VisibleForTesting - static Duration getAndValidateEvictionTime(DriverExecutionProfile config, String logPrefix) { - Duration evictionTime = config.getDuration(DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER); - - if (evictionTime.compareTo(LOWEST_ACCEPTABLE_EXPIRE_AFTER) < 0) { - LOG.warn( - "[{}] Value too low for {}: {}. Forcing to {} instead.", - logPrefix, - DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER.getPath(), - evictionTime, - LOWEST_ACCEPTABLE_EXPIRE_AFTER); - } - - return evictionTime; - } - @Override public Optional getMetrics() { return Optional.empty(); @@ -145,11 +113,22 @@ public SessionMetricUpdater getSessionUpdater() { public NodeMetricUpdater newNodeUpdater(Node node) { if (registry == null) { return NoopNodeMetricUpdater.INSTANCE; + } else { + return new MicroProfileNodeMetricUpdater(node, context, enabledNodeMetrics, registry); + } + } + + protected void processNodeStateEvent(NodeStateEvent event) { + if (event.newState == NodeState.DOWN + || event.newState == NodeState.FORCED_DOWN + || event.newState == null) { + // node is DOWN or REMOVED + ((MicroProfileNodeMetricUpdater) event.node.getMetricUpdater()) + .startMetricsExpirationTimeout(); + } else if (event.newState == NodeState.UP || event.newState == NodeState.UNKNOWN) { + // node is UP or ADDED + ((MicroProfileNodeMetricUpdater) event.node.getMetricUpdater()) + .cancelMetricsExpirationTimeout(); } - MicroProfileNodeMetricUpdater updater = - new MicroProfileNodeMetricUpdater( - node, context, enabledNodeMetrics, registry, () -> metricsCache.getIfPresent(node)); - metricsCache.put(node, updater); - return updater; } } diff --git a/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileNodeMetricUpdater.java b/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileNodeMetricUpdater.java index 5693819ac56..0994425f2e9 100644 --- a/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileNodeMetricUpdater.java +++ b/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileNodeMetricUpdater.java @@ -24,28 +24,22 @@ import com.datastax.oss.driver.internal.core.metrics.MetricId; import com.datastax.oss.driver.internal.core.metrics.NodeMetricUpdater; import java.util.Set; -import java.util.concurrent.TimeUnit; import net.jcip.annotations.ThreadSafe; -import org.eclipse.microprofile.metrics.MetricID; import org.eclipse.microprofile.metrics.MetricRegistry; -import org.eclipse.microprofile.metrics.Tag; @ThreadSafe public class MicroProfileNodeMetricUpdater extends MicroProfileMetricUpdater implements NodeMetricUpdater { private final Node node; - private final Runnable signalMetricUpdated; public MicroProfileNodeMetricUpdater( Node node, InternalDriverContext context, Set enabledMetrics, - MetricRegistry registry, - Runnable signalMetricUpdated) { + MetricRegistry registry) { super(context, enabledMetrics, registry); this.node = node; - this.signalMetricUpdated = signalMetricUpdated; DriverExecutionProfile profile = context.getConfig().getDefaultProfile(); @@ -81,40 +75,17 @@ public MicroProfileNodeMetricUpdater( } @Override - public void incrementCounter(NodeMetric metric, String profileName, long amount) { - signalMetricUpdated.run(); - super.incrementCounter(metric, profileName, amount); - } - - @Override - public void updateHistogram(NodeMetric metric, String profileName, long value) { - signalMetricUpdated.run(); - super.updateHistogram(metric, profileName, value); - } - - @Override - public void markMeter(NodeMetric metric, String profileName, long amount) { - signalMetricUpdated.run(); - super.markMeter(metric, profileName, amount); + protected MetricId getMetricId(NodeMetric metric) { + return context.getMetricIdGenerator().nodeMetricId(node, metric); } @Override - public void updateTimer(NodeMetric metric, String profileName, long duration, TimeUnit unit) { - signalMetricUpdated.run(); - super.updateTimer(metric, profileName, duration, unit); - } - - public void cleanupNodeMetrics() { - for (NodeMetric metric : metrics.keySet()) { - MetricId id = getMetricId(metric); - Tag[] tags = MicroProfileTags.toMicroProfileTags(id.getTags()); - registry.remove(new MetricID(id.getName(), tags)); - } - metrics.clear(); + protected void startMetricsExpirationTimeout() { + super.startMetricsExpirationTimeout(); } @Override - protected MetricId getMetricId(NodeMetric metric) { - return context.getMetricIdGenerator().nodeMetricId(node, metric); + protected void cancelMetricsExpirationTimeout() { + super.cancelMetricsExpirationTimeout(); } } diff --git a/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileSessionMetricUpdater.java b/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileSessionMetricUpdater.java index 7fddc8f150e..f507f3e25d2 100644 --- a/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileSessionMetricUpdater.java +++ b/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileSessionMetricUpdater.java @@ -41,14 +41,14 @@ public MicroProfileSessionMetricUpdater( initializeGauge( DefaultSessionMetric.CQL_PREPARED_CACHE_SIZE, profile, this::preparedStatementCacheSize); + initializeCounter(DefaultSessionMetric.CQL_CLIENT_TIMEOUTS, profile); + initializeCounter(DefaultSessionMetric.THROTTLING_ERRORS, profile); + initializeCounter(DseSessionMetric.GRAPH_CLIENT_TIMEOUTS, profile); + initializeTimer(DefaultSessionMetric.CQL_REQUESTS, profile); initializeTimer(DefaultSessionMetric.THROTTLING_DELAY, profile); initializeTimer(DseSessionMetric.CONTINUOUS_CQL_REQUESTS, profile); initializeTimer(DseSessionMetric.GRAPH_REQUESTS, profile); - - initializeCounter(DefaultSessionMetric.CQL_CLIENT_TIMEOUTS, profile); - initializeCounter(DefaultSessionMetric.THROTTLING_ERRORS, profile); - initializeCounter(DseSessionMetric.GRAPH_CLIENT_TIMEOUTS, profile); } @Override diff --git a/metrics/microprofile/src/test/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileMetricsFactoryTest.java b/metrics/microprofile/src/test/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileMetricsFactoryTest.java index e8a00c2dc8c..07b7e107ba3 100644 --- a/metrics/microprofile/src/test/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileMetricsFactoryTest.java +++ b/metrics/microprofile/src/test/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileMetricsFactoryTest.java @@ -15,25 +15,20 @@ */ package com.datastax.oss.driver.internal.metrics.microprofile; -import static com.datastax.oss.driver.internal.metrics.microprofile.MicroProfileMetricsFactory.LOWEST_ACCEPTABLE_EXPIRE_AFTER; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.fail; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.timeout; -import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; -import ch.qos.logback.classic.Level; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverConfig; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; -import com.datastax.oss.driver.internal.core.util.LoggerTest; +import com.datastax.oss.driver.internal.core.metrics.AbstractMetricUpdater; import com.tngtech.java.junit.dataprovider.DataProvider; import com.tngtech.java.junit.dataprovider.DataProviderRunner; import com.tngtech.java.junit.dataprovider.UseDataProvider; -import java.time.Duration; import java.util.Collections; import java.util.List; import org.eclipse.microprofile.metrics.MetricRegistry; @@ -43,59 +38,6 @@ @RunWith(DataProviderRunner.class) public class MicroProfileMetricsFactoryTest { - private static final String LOG_PREFIX = "prefix"; - - @Test - public void should_log_warning_when_provided_eviction_time_setting_is_too_low() { - // given - Duration expireAfter = LOWEST_ACCEPTABLE_EXPIRE_AFTER.minusMinutes(1); - LoggerTest.LoggerSetup logger = - LoggerTest.setupTestLogger(MicroProfileMetricsFactory.class, Level.WARN); - DriverExecutionProfile driverExecutionProfile = mock(DriverExecutionProfile.class); - - // when - when(driverExecutionProfile.getDuration(DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER)) - .thenReturn(expireAfter); - MicroProfileMetricsFactory.getAndValidateEvictionTime(driverExecutionProfile, LOG_PREFIX); - - // then - verify(logger.appender, timeout(500).times(1)).doAppend(logger.loggingEventCaptor.capture()); - assertThat(logger.loggingEventCaptor.getValue().getMessage()).isNotNull(); - assertThat(logger.loggingEventCaptor.getValue().getFormattedMessage()) - .contains( - String.format( - "[%s] Value too low for %s: %s. Forcing to %s instead.", - LOG_PREFIX, - DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER.getPath(), - expireAfter, - LOWEST_ACCEPTABLE_EXPIRE_AFTER)); - } - - @Test - @UseDataProvider(value = "acceptableEvictionTimes") - public void should_not_log_warning_when_provided_eviction_time_setting_is_acceptable( - Duration expireAfter) { - // given - LoggerTest.LoggerSetup logger = - LoggerTest.setupTestLogger(MicroProfileMetricsFactory.class, Level.WARN); - DriverExecutionProfile driverExecutionProfile = mock(DriverExecutionProfile.class); - - // when - when(driverExecutionProfile.getDuration(DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER)) - .thenReturn(expireAfter); - MicroProfileMetricsFactory.getAndValidateEvictionTime(driverExecutionProfile, LOG_PREFIX); - - // then - verify(logger.appender, timeout(500).times(0)).doAppend(logger.loggingEventCaptor.capture()); - } - - @DataProvider - public static Object[][] acceptableEvictionTimes() { - return new Object[][] { - {LOWEST_ACCEPTABLE_EXPIRE_AFTER}, {LOWEST_ACCEPTABLE_EXPIRE_AFTER.plusMinutes(1)} - }; - } - @Test @UseDataProvider(value = "invalidRegistryTypes") public void should_throw_if_wrong_or_missing_registry_type( @@ -113,7 +55,7 @@ public void should_throw_if_wrong_or_missing_registry_type( // registry object is not a registry type when(context.getMetricRegistry()).thenReturn(registryObj); when(profile.getDuration(DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER)) - .thenReturn(LOWEST_ACCEPTABLE_EXPIRE_AFTER); + .thenReturn(AbstractMetricUpdater.MIN_EXPIRE_AFTER); when(profile.getStringList(DefaultDriverOption.METRICS_SESSION_ENABLED)) .thenReturn(enabledMetrics); // then diff --git a/metrics/microprofile/src/test/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileNodeMetricsUpdaterTest.java b/metrics/microprofile/src/test/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileNodeMetricsUpdaterTest.java new file mode 100644 index 00000000000..81414b5de8e --- /dev/null +++ b/metrics/microprofile/src/test/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileNodeMetricsUpdaterTest.java @@ -0,0 +1,152 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.metrics.microprofile; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.timeout; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import ch.qos.logback.classic.Level; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverConfig; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.metrics.DefaultNodeMetric; +import com.datastax.oss.driver.api.core.metrics.NodeMetric; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.metrics.AbstractMetricUpdater; +import com.datastax.oss.driver.internal.core.util.LoggerTest; +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import io.smallrye.metrics.MetricsRegistryImpl; +import java.time.Duration; +import java.util.Collections; +import java.util.Set; +import org.eclipse.microprofile.metrics.Gauge; +import org.junit.Test; +import org.junit.runner.RunWith; + +@RunWith(DataProviderRunner.class) +public class MicroProfileNodeMetricsUpdaterTest { + + @Test + public void should_log_warning_when_provided_eviction_time_setting_is_too_low() { + // given + LoggerTest.LoggerSetup logger = + LoggerTest.setupTestLogger(AbstractMetricUpdater.class, Level.WARN); + Node node = mock(Node.class); + InternalDriverContext context = mock(InternalDriverContext.class); + DriverExecutionProfile profile = mock(DriverExecutionProfile.class); + DriverConfig config = mock(DriverConfig.class); + Set enabledMetrics = Collections.singleton(DefaultNodeMetric.CQL_MESSAGES); + Duration expireAfter = AbstractMetricUpdater.MIN_EXPIRE_AFTER.minusMinutes(1); + + // when + when(context.getSessionName()).thenReturn("prefix"); + when(context.getConfig()).thenReturn(config); + when(config.getDefaultProfile()).thenReturn(profile); + when(profile.getDuration(DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER)) + .thenReturn(expireAfter); + + MicroProfileNodeMetricUpdater updater = + new MicroProfileNodeMetricUpdater( + node, context, enabledMetrics, new MetricsRegistryImpl()) { + @Override + protected void initializeGauge( + NodeMetric metric, DriverExecutionProfile profile, Gauge supplier) { + // do nothing + } + + @Override + protected void initializeCounter(NodeMetric metric, DriverExecutionProfile profile) { + // do nothing + } + + @Override + protected void initializeTimer(NodeMetric metric, DriverExecutionProfile profile) { + // do nothing + } + }; + + // then + assertThat(updater.getExpireAfter()).isEqualTo(AbstractMetricUpdater.MIN_EXPIRE_AFTER); + verify(logger.appender, timeout(500).times(1)).doAppend(logger.loggingEventCaptor.capture()); + assertThat(logger.loggingEventCaptor.getValue().getMessage()).isNotNull(); + assertThat(logger.loggingEventCaptor.getValue().getFormattedMessage()) + .contains( + String.format( + "[prefix] Value too low for %s: %s. Forcing to %s instead.", + DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER.getPath(), + expireAfter, + AbstractMetricUpdater.MIN_EXPIRE_AFTER)); + } + + @Test + @UseDataProvider(value = "acceptableEvictionTimes") + public void should_not_log_warning_when_provided_eviction_time_setting_is_acceptable( + Duration expireAfter) { + // given + LoggerTest.LoggerSetup logger = + LoggerTest.setupTestLogger(AbstractMetricUpdater.class, Level.WARN); + Node node = mock(Node.class); + InternalDriverContext context = mock(InternalDriverContext.class); + DriverExecutionProfile profile = mock(DriverExecutionProfile.class); + DriverConfig config = mock(DriverConfig.class); + Set enabledMetrics = Collections.singleton(DefaultNodeMetric.CQL_MESSAGES); + + // when + when(context.getSessionName()).thenReturn("prefix"); + when(context.getConfig()).thenReturn(config); + when(config.getDefaultProfile()).thenReturn(profile); + when(profile.getDuration(DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER)) + .thenReturn(expireAfter); + + MicroProfileNodeMetricUpdater updater = + new MicroProfileNodeMetricUpdater( + node, context, enabledMetrics, new MetricsRegistryImpl()) { + @Override + protected void initializeGauge( + NodeMetric metric, DriverExecutionProfile profile, Gauge supplier) { + // do nothing + } + + @Override + protected void initializeCounter(NodeMetric metric, DriverExecutionProfile profile) { + // do nothing + } + + @Override + protected void initializeTimer(NodeMetric metric, DriverExecutionProfile profile) { + // do nothing + } + }; + + // then + assertThat(updater.getExpireAfter()).isEqualTo(expireAfter); + verify(logger.appender, timeout(500).times(0)).doAppend(logger.loggingEventCaptor.capture()); + } + + @DataProvider + public static Object[][] acceptableEvictionTimes() { + return new Object[][] { + {AbstractMetricUpdater.MIN_EXPIRE_AFTER}, + {AbstractMetricUpdater.MIN_EXPIRE_AFTER.plusMinutes(1)} + }; + } +} From 130fd257467f05e838bf23ce904733bc14936b79 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 22 Mar 2021 17:39:34 +0100 Subject: [PATCH 669/979] JAVA-2914: Transform node filter into a more flexible node distance evaluator (#1524) --- changelog/README.md | 1 + .../insights/ExecutionProfilesInfoFinder.java | 9 +- .../api/core/config/DefaultDriverOption.java | 11 ++ .../api/core/config/TypedDriverOption.java | 14 ++- .../loadbalancing/NodeDistanceEvaluator.java | 52 ++++++++ .../core/session/ProgrammaticArguments.java | 42 +++++++ .../api/core/session/SessionBuilder.java | 76 +++++++++++- .../core/context/DefaultDriverContext.java | 9 +- .../core/context/InternalDriverContext.java | 12 +- .../BasicLoadBalancingPolicy.java | 33 +++--- .../DefaultNodeDistanceEvaluatorHelper.java | 112 ++++++++++++++++++ .../helper/DefaultNodeFilterHelper.java | 96 --------------- ....java => NodeDistanceEvaluatorHelper.java} | 15 +-- .../NodeFilterToDistanceEvaluatorAdapter.java | 38 ++++++ core/src/main/resources/reference.conf | 21 ++++ .../insights/ExecutionProfileMockUtil.java | 4 +- .../context/MockedDriverContextFactory.java | 2 +- .../BasicLoadBalancingPolicyDistanceTest.java | 24 ++-- .../BasicLoadBalancingPolicyEventsTest.java | 23 ++-- .../BasicLoadBalancingPolicyInitTest.java | 15 +-- ...cInferringLoadBalancingPolicyInitTest.java | 14 +-- .../DefaultLoadBalancingPolicyInitTest.java | 16 +-- manual/core/load_balancing/README.md | 32 ++--- upgrade_guide/README.md | 44 +++++++ 24 files changed, 517 insertions(+), 198 deletions(-) create mode 100644 core/src/main/java/com/datastax/oss/driver/api/core/loadbalancing/NodeDistanceEvaluator.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/helper/DefaultNodeDistanceEvaluatorHelper.java delete mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/helper/DefaultNodeFilterHelper.java rename core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/helper/{NodeFilterHelper.java => NodeDistanceEvaluatorHelper.java} (76%) create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/helper/NodeFilterToDistanceEvaluatorAdapter.java diff --git a/changelog/README.md b/changelog/README.md index 707ec720e58..4a5c5399f04 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.11.0 (in progress) +- [improvement] JAVA-2914: Transform node filter into a more flexible node distance evaluator - [improvement] JAVA-2929: Revisit node-level metric eviction - [new feature] JAVA-2830: Add mapper support for Java streams - [bug] JAVA-2928: Generate counter increment/decrement constructs compatible with legacy C* diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/ExecutionProfilesInfoFinder.java b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/ExecutionProfilesInfoFinder.java index a255b5b0de0..8e7a8e59982 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/insights/ExecutionProfilesInfoFinder.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/insights/ExecutionProfilesInfoFinder.java @@ -158,9 +158,12 @@ private LoadBalancingInfo getLoadBalancingInfo(DriverExecutionProfile driverExec "localDataCenter", driverExecutionProfile.getString(DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER)); } - options.put( - "filterFunction", - driverExecutionProfile.isDefined(DefaultDriverOption.LOAD_BALANCING_FILTER_CLASS)); + @SuppressWarnings("deprecation") + boolean hasNodeFiltering = + driverExecutionProfile.isDefined(DefaultDriverOption.LOAD_BALANCING_FILTER_CLASS) + || driverExecutionProfile.isDefined( + DefaultDriverOption.LOAD_BALANCING_DISTANCE_EVALUATOR_CLASS); + options.put("filterFunction", hasNodeFiltering); ClassSettingDetails loadBalancingDetails = PackageUtil.getLoadBalancingDetails( driverExecutionProfile.getString(DefaultDriverOption.LOAD_BALANCING_POLICY_CLASS)); diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java index 150305dfea4..fde3e87857a 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java @@ -97,7 +97,10 @@ public enum DefaultDriverOption implements DriverOption { * A custom filter to include/exclude nodes. * *

          Value-Type: {@link String} + * + * @deprecated use {@link #LOAD_BALANCING_DISTANCE_EVALUATOR_CLASS} instead. */ + @Deprecated LOAD_BALANCING_FILTER_CLASS("basic.load-balancing-policy.filter.class"), /** @@ -852,6 +855,14 @@ public enum DefaultDriverOption implements DriverOption { *

          Value-type: {@link String} */ METRICS_ID_GENERATOR_PREFIX("advanced.metrics.id-generator.prefix"), + + /** + * The class name of a custom {@link + * com.datastax.oss.driver.api.core.loadbalancing.NodeDistanceEvaluator}. + * + *

          Value-Type: {@link String} + */ + LOAD_BALANCING_DISTANCE_EVALUATOR_CLASS("basic.load-balancing-policy.evaluator.class"), ; private final String path; diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java index d2687da68d7..094f928d83c 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java @@ -137,9 +137,21 @@ public String toString() { public static final TypedDriverOption LOAD_BALANCING_LOCAL_DATACENTER = new TypedDriverOption<>( DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER, GenericType.STRING); - /** A custom filter to include/exclude nodes. */ + /** + * A custom filter to include/exclude nodes. + * + * @deprecated Use {@link #LOAD_BALANCING_DISTANCE_EVALUATOR_CLASS} instead. + */ + @Deprecated public static final TypedDriverOption LOAD_BALANCING_FILTER_CLASS = new TypedDriverOption<>(DefaultDriverOption.LOAD_BALANCING_FILTER_CLASS, GenericType.STRING); + /** + * The class name of a custom {@link + * com.datastax.oss.driver.api.core.loadbalancing.NodeDistanceEvaluator}. + */ + public static final TypedDriverOption LOAD_BALANCING_DISTANCE_EVALUATOR_CLASS = + new TypedDriverOption<>( + DefaultDriverOption.LOAD_BALANCING_DISTANCE_EVALUATOR_CLASS, GenericType.STRING); /** The timeout to use for internal queries that run as part of the initialization process. */ public static final TypedDriverOption CONNECTION_INIT_QUERY_TIMEOUT = new TypedDriverOption<>( diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/loadbalancing/NodeDistanceEvaluator.java b/core/src/main/java/com/datastax/oss/driver/api/core/loadbalancing/NodeDistanceEvaluator.java new file mode 100644 index 00000000000..21f610e3f21 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/api/core/loadbalancing/NodeDistanceEvaluator.java @@ -0,0 +1,52 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.core.loadbalancing; + +import com.datastax.oss.driver.api.core.metadata.Node; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; + +/** + * A pluggable {@link NodeDistance} evaluator. + * + *

          Node distance evaluators are recognized by all the driver built-in load balancing policies. + * They can be specified {@linkplain + * com.datastax.oss.driver.api.core.session.SessionBuilder#withNodeDistanceEvaluator(String, + * NodeDistanceEvaluator) programmatically} or through the configuration (with the {@code + * load-balancing-policy.evaluator.class} option). + * + * @see com.datastax.oss.driver.api.core.session.SessionBuilder#withNodeDistanceEvaluator(String, + * NodeDistanceEvaluator) + */ +@FunctionalInterface +public interface NodeDistanceEvaluator { + + /** + * Evaluates the distance to apply to the given node. + * + *

          This method will be invoked each time the {@link LoadBalancingPolicy} processes a topology + * or state change, and will be passed the node being inspected, and the local datacenter name (or + * null if none is defined). If it returns a non-null {@link NodeDistance}, the policy will + * suggest that distance for the node; if it returns null, the policy will assign a default + * distance instead, based on its internal algorithm for computing node distances. + * + * @param node The node to assign a new distance to. + * @param localDc The local datacenter name, if defined, or null otherwise. + * @return The {@link NodeDistance} to assign to the node, or null to let the policy decide. + */ + @Nullable + NodeDistance evaluateDistance(@NonNull Node node, @Nullable String localDc); +} diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/session/ProgrammaticArguments.java b/core/src/main/java/com/datastax/oss/driver/api/core/session/ProgrammaticArguments.java index 75a49fb3a59..9e4f034ef00 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/session/ProgrammaticArguments.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/session/ProgrammaticArguments.java @@ -16,6 +16,7 @@ package com.datastax.oss.driver.api.core.session; import com.datastax.oss.driver.api.core.auth.AuthProvider; +import com.datastax.oss.driver.api.core.loadbalancing.NodeDistanceEvaluator; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.metadata.NodeStateListener; import com.datastax.oss.driver.api.core.metadata.schema.SchemaChangeListener; @@ -23,6 +24,7 @@ import com.datastax.oss.driver.api.core.tracker.RequestTracker; import com.datastax.oss.driver.api.core.type.codec.TypeCodec; import com.datastax.oss.driver.api.core.type.codec.registry.MutableCodecRegistry; +import com.datastax.oss.driver.internal.core.loadbalancing.helper.NodeFilterToDistanceEvaluatorAdapter; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import edu.umd.cs.findbugs.annotations.NonNull; @@ -30,6 +32,7 @@ import java.net.InetSocketAddress; import java.util.List; import java.util.Map; +import java.util.Map.Entry; import java.util.UUID; import java.util.function.Predicate; @@ -52,6 +55,7 @@ public static Builder builder() { private final RequestTracker requestTracker; private final Map localDatacenters; private final Map> nodeFilters; + private final Map nodeDistanceEvaluators; private final ClassLoader classLoader; private final AuthProvider authProvider; private final SslEngineFactory sslEngineFactory; @@ -69,6 +73,7 @@ private ProgrammaticArguments( @Nullable RequestTracker requestTracker, @NonNull Map localDatacenters, @NonNull Map> nodeFilters, + @NonNull Map nodeDistanceEvaluators, @Nullable ClassLoader classLoader, @Nullable AuthProvider authProvider, @Nullable SslEngineFactory sslEngineFactory, @@ -85,6 +90,7 @@ private ProgrammaticArguments( this.requestTracker = requestTracker; this.localDatacenters = localDatacenters; this.nodeFilters = nodeFilters; + this.nodeDistanceEvaluators = nodeDistanceEvaluators; this.classLoader = classLoader; this.authProvider = authProvider; this.sslEngineFactory = sslEngineFactory; @@ -122,10 +128,17 @@ public Map getLocalDatacenters() { } @NonNull + @Deprecated + @SuppressWarnings("DeprecatedIsStillUsed") public Map> getNodeFilters() { return nodeFilters; } + @NonNull + public Map getNodeDistanceEvaluators() { + return nodeDistanceEvaluators; + } + @Nullable public ClassLoader getClassLoader() { return classLoader; @@ -180,6 +193,8 @@ public static class Builder { private ImmutableMap.Builder localDatacentersBuilder = ImmutableMap.builder(); private final ImmutableMap.Builder> nodeFiltersBuilder = ImmutableMap.builder(); + private final ImmutableMap.Builder + nodeDistanceEvaluatorsBuilder = ImmutableMap.builder(); private ClassLoader classLoader; private AuthProvider authProvider; private SslEngineFactory sslEngineFactory; @@ -236,16 +251,42 @@ public Builder withLocalDatacenters(Map localDatacenters) { } @NonNull + public Builder withNodeDistanceEvaluator( + @NonNull String profileName, @NonNull NodeDistanceEvaluator nodeDistanceEvaluator) { + this.nodeDistanceEvaluatorsBuilder.put(profileName, nodeDistanceEvaluator); + return this; + } + + @NonNull + public Builder withNodeDistanceEvaluators( + Map nodeDistanceReporters) { + for (Entry entry : nodeDistanceReporters.entrySet()) { + this.nodeDistanceEvaluatorsBuilder.put(entry.getKey(), entry.getValue()); + } + return this; + } + + /** + * @deprecated Use {@link #withNodeDistanceEvaluator(String, NodeDistanceEvaluator)} instead. + */ + @NonNull + @Deprecated public Builder withNodeFilter( @NonNull String profileName, @NonNull Predicate nodeFilter) { this.nodeFiltersBuilder.put(profileName, nodeFilter); + this.nodeDistanceEvaluatorsBuilder.put( + profileName, new NodeFilterToDistanceEvaluatorAdapter(nodeFilter)); return this; } + /** @deprecated Use {@link #withNodeDistanceEvaluators(Map)} instead. */ @NonNull + @Deprecated public Builder withNodeFilters(Map> nodeFilters) { for (Map.Entry> entry : nodeFilters.entrySet()) { this.nodeFiltersBuilder.put(entry.getKey(), entry.getValue()); + this.nodeDistanceEvaluatorsBuilder.put( + entry.getKey(), new NodeFilterToDistanceEvaluatorAdapter(entry.getValue())); } return this; } @@ -313,6 +354,7 @@ public ProgrammaticArguments build() { requestTracker, localDatacentersBuilder.build(), nodeFiltersBuilder.build(), + nodeDistanceEvaluatorsBuilder.build(), classLoader, authProvider, sslEngineFactory, diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java b/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java index abe28786b62..990044b66c9 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java @@ -25,6 +25,7 @@ import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.context.DriverContext; import com.datastax.oss.driver.api.core.loadbalancing.LoadBalancingPolicy; +import com.datastax.oss.driver.api.core.loadbalancing.NodeDistanceEvaluator; import com.datastax.oss.driver.api.core.metadata.EndPoint; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.metadata.NodeStateListener; @@ -389,6 +390,36 @@ public SelfT withLocalDatacenter(@NonNull String localDatacenter) { return withLocalDatacenter(DriverExecutionProfile.DEFAULT_NAME, localDatacenter); } + /** + * Adds a custom {@link NodeDistanceEvaluator} for a particular execution profile. This assumes + * that you're also using a dedicated load balancing policy for that profile. + * + *

          Node distance evaluators are honored by all the driver built-in load balancing policies. If + * you use a custom policy implementation however, you'll need to explicitly invoke the evaluator + * whenever appropriate. + * + *

          If an evaluator is specified programmatically with this method, it overrides the + * configuration (that is, the {@code load-balancing-policy.evaluator.class} option will be + * ignored). + * + * @see #withNodeDistanceEvaluator(NodeDistanceEvaluator) + */ + @NonNull + public SelfT withNodeDistanceEvaluator( + @NonNull String profileName, @NonNull NodeDistanceEvaluator nodeDistanceEvaluator) { + this.programmaticArgumentsBuilder.withNodeDistanceEvaluator(profileName, nodeDistanceEvaluator); + return self; + } + + /** + * Alias to {@link #withNodeDistanceEvaluator(String, NodeDistanceEvaluator)} for the default + * profile. + */ + @NonNull + public SelfT withNodeDistanceEvaluator(@NonNull NodeDistanceEvaluator nodeDistanceEvaluator) { + return withNodeDistanceEvaluator(DriverExecutionProfile.DEFAULT_NAME, nodeDistanceEvaluator); + } + /** * Adds a custom filter to include/exclude nodes for a particular execution profile. This assumes * that you're also using a dedicated load balancing policy for that profile. @@ -398,21 +429,60 @@ public SelfT withLocalDatacenter(@NonNull String localDatacenter) { * policy will suggest distance IGNORED (meaning the driver won't ever connect to it if all * policies agree), and never included in any query plan. * - *

          Note that this behavior is implemented in the default load balancing policy. If you use a - * custom policy implementation, you'll need to explicitly invoke the filter. + *

          Note that this behavior is implemented in the driver built-in load balancing policies. If + * you use a custom policy implementation, you'll need to explicitly invoke the filter. * *

          If the filter is specified programmatically with this method, it overrides the configuration * (that is, the {@code load-balancing-policy.filter.class} option will be ignored). * + *

          This method has been deprecated in favor of {@link + * #withNodeDistanceEvaluator(String, NodeDistanceEvaluator)}. If you were using node + * filters, you can easily replace your filters with the following implementation of {@link + * NodeDistanceEvaluator}: + * + *

          {@code
          +   * public class NodeFilterToDistanceEvaluatorAdapter implements NodeDistanceEvaluator {
          +   *
          +   *   private final Predicate nodeFilter;
          +   *
          +   *   public NodeFilterToDistanceEvaluatorAdapter(Predicate nodeFilter) {
          +   *     this.nodeFilter = nodeFilter;
          +   *   }
          +   *
          +   *   public NodeDistance evaluateDistance(Node node, String localDc) {
          +   *     return nodeFilter.test(node) ? null : NodeDistance.IGNORED;
          +   *   }
          +   * }
          +   * }
          + * + * The same can be achieved using a lambda + closure: + * + *
          {@code
          +   * Predicate nodeFilter = ...
          +   * NodeDistanceEvaluator evaluator =
          +   *   (node, localDc) -> nodeFilter.test(node) ? null : NodeDistance.IGNORED;
          +   * }
          + * * @see #withNodeFilter(Predicate) + * @deprecated Use {@link #withNodeDistanceEvaluator(String, NodeDistanceEvaluator)} instead. */ + @Deprecated @NonNull public SelfT withNodeFilter(@NonNull String profileName, @NonNull Predicate nodeFilter) { this.programmaticArgumentsBuilder.withNodeFilter(profileName, nodeFilter); return self; } - /** Alias to {@link #withNodeFilter(String, Predicate)} for the default profile. */ + /** + * Alias to {@link #withNodeFilter(String, Predicate)} for the default profile. + * + *

          This method has been deprecated in favor of {@link + * #withNodeDistanceEvaluator(NodeDistanceEvaluator)}. See the javadocs of {@link + * #withNodeFilter(String, Predicate)} to understand how to migrate your legacy node filters. + * + * @deprecated Use {@link #withNodeDistanceEvaluator(NodeDistanceEvaluator)} instead. + */ + @Deprecated @NonNull public SelfT withNodeFilter(@NonNull Predicate nodeFilter) { return withNodeFilter(DriverExecutionProfile.DEFAULT_NAME, nodeFilter); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java index 8cbe488253b..64925699a64 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java @@ -31,6 +31,7 @@ import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.connection.ReconnectionPolicy; import com.datastax.oss.driver.api.core.loadbalancing.LoadBalancingPolicy; +import com.datastax.oss.driver.api.core.loadbalancing.NodeDistanceEvaluator; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.metadata.NodeStateListener; import com.datastax.oss.driver.api.core.metadata.schema.SchemaChangeListener; @@ -223,7 +224,7 @@ public class DefaultDriverContext implements InternalDriverContext { private final SchemaChangeListener schemaChangeListenerFromBuilder; private final RequestTracker requestTrackerFromBuilder; private final Map localDatacentersFromBuilder; - private final Map> nodeFiltersFromBuilder; + private final Map nodeDistanceEvaluatorsFromBuilder; private final ClassLoader classLoader; private final InetSocketAddress cloudProxyAddress; private final LazyReference requestLogFormatterRef = @@ -275,7 +276,7 @@ public DefaultDriverContext( "sslEngineFactory", () -> buildSslEngineFactory(programmaticArguments.getSslEngineFactory()), cycleDetector); - this.nodeFiltersFromBuilder = programmaticArguments.getNodeFilters(); + this.nodeDistanceEvaluatorsFromBuilder = programmaticArguments.getNodeDistanceEvaluators(); this.classLoader = programmaticArguments.getClassLoader(); this.cloudProxyAddress = programmaticArguments.getCloudProxyAddress(); this.startupClientId = programmaticArguments.getStartupClientId(); @@ -908,8 +909,8 @@ public String getLocalDatacenter(@NonNull String profileName) { @Nullable @Override - public Predicate getNodeFilter(@NonNull String profileName) { - return nodeFiltersFromBuilder.get(profileName); + public NodeDistanceEvaluator getNodeDistanceEvaluator(@NonNull String profileName) { + return nodeDistanceEvaluatorsFromBuilder.get(profileName); } @Nullable diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/context/InternalDriverContext.java b/core/src/main/java/com/datastax/oss/driver/internal/core/context/InternalDriverContext.java index 0bfc07d73a2..3e3f21d0e41 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/context/InternalDriverContext.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/context/InternalDriverContext.java @@ -16,7 +16,7 @@ package com.datastax.oss.driver.internal.core.context; import com.datastax.oss.driver.api.core.context.DriverContext; -import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.loadbalancing.NodeDistanceEvaluator; import com.datastax.oss.driver.api.core.session.SessionBuilder; import com.datastax.oss.driver.internal.core.ConsistencyLevelRegistry; import com.datastax.oss.driver.internal.core.ProtocolVersionRegistry; @@ -49,7 +49,6 @@ import java.util.List; import java.util.Map; import java.util.Optional; -import java.util.function.Predicate; /** Extends the driver context with additional components that are not exposed by our public API. */ public interface InternalDriverContext extends DriverContext { @@ -138,12 +137,13 @@ public interface InternalDriverContext extends DriverContext { String getLocalDatacenter(@NonNull String profileName); /** - * This is the filter from {@link SessionBuilder#withNodeFilter(String, Predicate)}. If the filter - * for this profile was specified through the configuration instead, this method will return - * {@code null}. + * This is the node distance evaluator from {@link + * SessionBuilder#withNodeDistanceEvaluator(String, NodeDistanceEvaluator)}. If the evaluator for + * this profile was specified through the configuration instead, this method will return {@code + * null}. */ @Nullable - Predicate getNodeFilter(@NonNull String profileName); + NodeDistanceEvaluator getNodeDistanceEvaluator(@NonNull String profileName); /** * The {@link ClassLoader} to use to reflectively load class names defined in configuration. If diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicy.java b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicy.java index dd9b4145b18..395412272ce 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicy.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicy.java @@ -23,6 +23,7 @@ import com.datastax.oss.driver.api.core.cql.Statement; import com.datastax.oss.driver.api.core.loadbalancing.LoadBalancingPolicy; import com.datastax.oss.driver.api.core.loadbalancing.NodeDistance; +import com.datastax.oss.driver.api.core.loadbalancing.NodeDistanceEvaluator; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.metadata.NodeState; import com.datastax.oss.driver.api.core.metadata.TokenMap; @@ -30,7 +31,7 @@ import com.datastax.oss.driver.api.core.session.Request; import com.datastax.oss.driver.api.core.session.Session; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; -import com.datastax.oss.driver.internal.core.loadbalancing.helper.DefaultNodeFilterHelper; +import com.datastax.oss.driver.internal.core.loadbalancing.helper.DefaultNodeDistanceEvaluatorHelper; import com.datastax.oss.driver.internal.core.loadbalancing.helper.OptionalLocalDcHelper; import com.datastax.oss.driver.internal.core.loadbalancing.nodeset.DcAgnosticNodeSet; import com.datastax.oss.driver.internal.core.loadbalancing.nodeset.MultiDcNodeSet; @@ -53,7 +54,6 @@ import java.util.UUID; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.IntUnaryOperator; -import java.util.function.Predicate; import net.jcip.annotations.ThreadSafe; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -111,7 +111,7 @@ public class BasicLoadBalancingPolicy implements LoadBalancingPolicy { // private because they should be set in init() and never be modified after private volatile DistanceReporter distanceReporter; - private volatile Predicate filter; + private volatile NodeDistanceEvaluator nodeDistanceEvaluator; private volatile String localDc; private volatile NodeSet liveNodes; @@ -155,7 +155,7 @@ protected NodeSet getLiveNodes() { public void init(@NonNull Map nodes, @NonNull DistanceReporter distanceReporter) { this.distanceReporter = distanceReporter; localDc = discoverLocalDc(nodes).orElse(null); - filter = createNodeFilter(localDc, nodes); + nodeDistanceEvaluator = createNodeDistanceEvaluator(localDc, nodes); liveNodes = localDc == null ? new DcAgnosticNodeSet() @@ -200,7 +200,7 @@ protected Optional discoverLocalDc(@NonNull Map nodes) { } /** - * Creates a new node filter to use with this policy. + * Creates a new node distance evaluator to use with this policy. * *

          This method is called only once, during {@linkplain LoadBalancingPolicy#init(Map, * LoadBalancingPolicy.DistanceReporter) initialization}, and only after local datacenter @@ -209,14 +209,14 @@ protected Optional discoverLocalDc(@NonNull Map nodes) { * @param localDc The local datacenter that was just discovered, or null if none found. * @param nodes All the nodes that were known to exist in the cluster (regardless of their state) * when the load balancing policy was initialized. This argument is provided in case - * implementors need to inspect the cluster topology to create the node filter. - * @return the node filter to use. + * implementors need to inspect the cluster topology to create the evaluator. + * @return the distance evaluator to use. */ @NonNull - protected Predicate createNodeFilter( + protected NodeDistanceEvaluator createNodeDistanceEvaluator( @Nullable String localDc, @NonNull Map nodes) { - return new DefaultNodeFilterHelper(context, profile, logPrefix) - .createNodeFilter(localDc, nodes); + return new DefaultNodeDistanceEvaluatorHelper(context, profile, logPrefix) + .createNodeDistanceEvaluator(localDc, nodes); } @NonNull @@ -399,20 +399,21 @@ public void onRemove(@NonNull Node node) { * a node {@linkplain #onAdd(Node) is added}, and when a node {@linkplain #onUp(Node) is back UP}. */ protected NodeDistance computeNodeDistance(@NonNull Node node) { - // We interrogate the filter every time since it could be dynamic + // We interrogate the custom evaluator every time since it could be dynamic // and change its verdict between two invocations of this method. - if (!filter.test(node)) { - return NodeDistance.IGNORED; + NodeDistance distance = nodeDistanceEvaluator.evaluateDistance(node, localDc); + if (distance != null) { + return distance; } - // no local DC is defined, all nodes accepted by the filter are LOCAL. + // no local DC defined: all nodes are considered LOCAL. if (localDc == null) { return NodeDistance.LOCAL; } - // the node is LOCAL if its datacenter is the local datacenter. + // otherwise, the node is LOCAL if its datacenter is the local datacenter. if (Objects.equals(node.getDatacenter(), localDc)) { return NodeDistance.LOCAL; } - // otherwise the node will be either REMOTE or IGNORED, depending + // otherwise, the node will be either REMOTE or IGNORED, depending // on how many remote nodes we accept per DC. if (maxNodesPerRemoteDc > 0) { Object[] remoteNodes = liveNodes.dc(node.getDatacenter()).toArray(); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/helper/DefaultNodeDistanceEvaluatorHelper.java b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/helper/DefaultNodeDistanceEvaluatorHelper.java new file mode 100644 index 00000000000..446533b23c7 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/helper/DefaultNodeDistanceEvaluatorHelper.java @@ -0,0 +1,112 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.loadbalancing.helper; + +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.loadbalancing.NodeDistance; +import com.datastax.oss.driver.api.core.loadbalancing.NodeDistanceEvaluator; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.util.Reflection; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.Map; +import java.util.UUID; +import java.util.function.Predicate; +import net.jcip.annotations.ThreadSafe; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * A {@link NodeDistanceEvaluatorHelper} implementation that fetches the user-supplied evaluator, if + * any, from the programmatic configuration API, or else, from the driver configuration. If no + * user-supplied evaluator can be retrieved, a dummy evaluator will be used which always evaluates + * null distances. + */ +@ThreadSafe +public class DefaultNodeDistanceEvaluatorHelper implements NodeDistanceEvaluatorHelper { + + private static final Logger LOG = + LoggerFactory.getLogger(DefaultNodeDistanceEvaluatorHelper.class); + + @NonNull protected final InternalDriverContext context; + @NonNull protected final DriverExecutionProfile profile; + @NonNull protected final String logPrefix; + + public DefaultNodeDistanceEvaluatorHelper( + @NonNull InternalDriverContext context, + @NonNull DriverExecutionProfile profile, + @NonNull String logPrefix) { + this.context = context; + this.profile = profile; + this.logPrefix = logPrefix; + } + + @NonNull + @Override + public NodeDistanceEvaluator createNodeDistanceEvaluator( + @Nullable String localDc, @NonNull Map nodes) { + NodeDistanceEvaluator nodeDistanceEvaluatorFromConfig = nodeDistanceEvaluatorFromConfig(); + return (node, dc) -> { + NodeDistance distance = nodeDistanceEvaluatorFromConfig.evaluateDistance(node, dc); + if (distance != null) { + LOG.debug("[{}] Evaluator assigned distance {} to node {}", logPrefix, distance, node); + } else { + LOG.debug("[{}] Evaluator did not assign a distance to node {}", logPrefix, node); + } + return distance; + }; + } + + @NonNull + protected NodeDistanceEvaluator nodeDistanceEvaluatorFromConfig() { + NodeDistanceEvaluator evaluator = context.getNodeDistanceEvaluator(profile.getName()); + if (evaluator != null) { + LOG.debug("[{}] Node distance evaluator set programmatically", logPrefix); + } else { + evaluator = + Reflection.buildFromConfig( + context, + profile.getName(), + DefaultDriverOption.LOAD_BALANCING_DISTANCE_EVALUATOR_CLASS, + NodeDistanceEvaluator.class) + .orElse(null); + if (evaluator != null) { + LOG.debug("[{}] Node distance evaluator set from configuration", logPrefix); + } else { + @SuppressWarnings({"unchecked", "deprecation"}) + Predicate nodeFilterFromConfig = + Reflection.buildFromConfig( + context, + profile.getName(), + DefaultDriverOption.LOAD_BALANCING_FILTER_CLASS, + Predicate.class) + .orElse(null); + if (nodeFilterFromConfig != null) { + evaluator = new NodeFilterToDistanceEvaluatorAdapter(nodeFilterFromConfig); + LOG.debug( + "[{}] Node distance evaluator set from deprecated node filter configuration", + logPrefix); + } + } + } + if (evaluator == null) { + evaluator = PASS_THROUGH_DISTANCE_EVALUATOR; + } + return evaluator; + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/helper/DefaultNodeFilterHelper.java b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/helper/DefaultNodeFilterHelper.java deleted file mode 100644 index 9eae21589ed..00000000000 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/helper/DefaultNodeFilterHelper.java +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.datastax.oss.driver.internal.core.loadbalancing.helper; - -import com.datastax.oss.driver.api.core.config.DefaultDriverOption; -import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; -import com.datastax.oss.driver.api.core.metadata.Node; -import com.datastax.oss.driver.internal.core.context.InternalDriverContext; -import com.datastax.oss.driver.internal.core.util.Reflection; -import edu.umd.cs.findbugs.annotations.NonNull; -import edu.umd.cs.findbugs.annotations.Nullable; -import java.util.Map; -import java.util.UUID; -import java.util.function.Predicate; -import net.jcip.annotations.ThreadSafe; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * A {@link NodeFilterHelper} implementation that fetches the user-supplied filter, if any, from the - * programmatic configuration API, or else, from the driver configuration. If no user-supplied - * filter can be retrieved, a dummy filter will be used which accepts all nodes unconditionally. - * - *

          Note that, regardless of the filter supplied by the end user, if a local datacenter is defined - * the filter returned by this implementation will always reject nodes that report a datacenter - * different from the local one. - */ -@ThreadSafe -public class DefaultNodeFilterHelper implements NodeFilterHelper { - - private static final Logger LOG = LoggerFactory.getLogger(DefaultNodeFilterHelper.class); - - @NonNull protected final InternalDriverContext context; - @NonNull protected final DriverExecutionProfile profile; - @NonNull protected final String logPrefix; - - public DefaultNodeFilterHelper( - @NonNull InternalDriverContext context, - @NonNull DriverExecutionProfile profile, - @NonNull String logPrefix) { - this.context = context; - this.profile = profile; - this.logPrefix = logPrefix; - } - - @NonNull - @Override - public Predicate createNodeFilter( - @Nullable String localDc, @NonNull Map nodes) { - Predicate filterFromConfig = nodeFilterFromConfig(); - return node -> { - if (!filterFromConfig.test(node)) { - LOG.debug( - "[{}] Ignoring {} because it doesn't match the user-provided predicate", - logPrefix, - node); - return false; - } else { - return true; - } - }; - } - - @NonNull - protected Predicate nodeFilterFromConfig() { - Predicate filter = context.getNodeFilter(profile.getName()); - if (filter != null) { - LOG.debug("[{}] Node filter set programmatically", logPrefix); - } else { - @SuppressWarnings("unchecked") - Predicate filterFromConfig = - Reflection.buildFromConfig( - context, - profile.getName(), - DefaultDriverOption.LOAD_BALANCING_FILTER_CLASS, - Predicate.class) - .orElse(INCLUDE_ALL_NODES); - filter = filterFromConfig; - LOG.debug("[{}] Node filter set from configuration", logPrefix); - } - return filter; - } -} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/helper/NodeFilterHelper.java b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/helper/NodeDistanceEvaluatorHelper.java similarity index 76% rename from core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/helper/NodeFilterHelper.java rename to core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/helper/NodeDistanceEvaluatorHelper.java index 2b082bfe324..76256db94e1 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/helper/NodeFilterHelper.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/helper/NodeDistanceEvaluatorHelper.java @@ -16,30 +16,31 @@ package com.datastax.oss.driver.internal.core.loadbalancing.helper; import com.datastax.oss.driver.api.core.loadbalancing.LoadBalancingPolicy; +import com.datastax.oss.driver.api.core.loadbalancing.NodeDistanceEvaluator; import com.datastax.oss.driver.api.core.metadata.Node; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import java.util.Map; import java.util.UUID; -import java.util.function.Predicate; import net.jcip.annotations.ThreadSafe; @FunctionalInterface @ThreadSafe -public interface NodeFilterHelper { +public interface NodeDistanceEvaluatorHelper { - Predicate INCLUDE_ALL_NODES = n -> true; + NodeDistanceEvaluator PASS_THROUGH_DISTANCE_EVALUATOR = (node, localDc) -> null; /** - * Creates a new node filter. + * Creates a new node distance evaluator. * * @param localDc The local datacenter, or null if none defined. * @param nodes All the nodes that were known to exist in the cluster (regardless of their state) * when the load balancing policy was {@linkplain LoadBalancingPolicy#init(Map, * LoadBalancingPolicy.DistanceReporter) initialized}. This argument is provided in case - * implementors need to inspect the cluster topology to create the node filter. - * @return the node filter to use. + * implementors need to inspect the cluster topology to create the node distance evaluator. + * @return the node distance evaluator to use. */ @NonNull - Predicate createNodeFilter(@Nullable String localDc, @NonNull Map nodes); + NodeDistanceEvaluator createNodeDistanceEvaluator( + @Nullable String localDc, @NonNull Map nodes); } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/helper/NodeFilterToDistanceEvaluatorAdapter.java b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/helper/NodeFilterToDistanceEvaluatorAdapter.java new file mode 100644 index 00000000000..f95ad626f5a --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/helper/NodeFilterToDistanceEvaluatorAdapter.java @@ -0,0 +1,38 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.loadbalancing.helper; + +import com.datastax.oss.driver.api.core.loadbalancing.NodeDistance; +import com.datastax.oss.driver.api.core.loadbalancing.NodeDistanceEvaluator; +import com.datastax.oss.driver.api.core.metadata.Node; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.function.Predicate; + +public class NodeFilterToDistanceEvaluatorAdapter implements NodeDistanceEvaluator { + + private final Predicate nodeFilter; + + public NodeFilterToDistanceEvaluatorAdapter(@NonNull Predicate nodeFilter) { + this.nodeFilter = nodeFilter; + } + + @Nullable + @Override + public NodeDistance evaluateDistance(@NonNull Node node, @Nullable String localDc) { + return nodeFilter.test(node) ? null : NodeDistance.IGNORED; + } +} diff --git a/core/src/main/resources/reference.conf b/core/src/main/resources/reference.conf index 49ae947d556..ffbc6caebac 100644 --- a/core/src/main/resources/reference.conf +++ b/core/src/main/resources/reference.conf @@ -194,6 +194,27 @@ datastax-java-driver { # are specified, the programmatic value takes precedence. // local-datacenter = datacenter1 + # The class of a custom node distance evaluator. + # + # This option is not required; if present, it must be the fully-qualified name of a class that + # implements `com.datastax.oss.driver.api.core.loadbalancing.NodeDistanceEvaluator`, and has a + # public constructor taking two arguments: the DriverContext and a String representing the + # profile name. + # + # Alternatively, you can pass an instance of your distance evaluator to + # CqlSession.builder().withNodeDistanceEvaluator(). In that case, this option will be ignored. + # + # The evaluator will be invoked each time the policy processes a topology or state change. The + # evaluator's `evaluateDistance` method will be called with the node affected by the change, and + # the local datacenter name (or null if none is defined). If it returns a non-null distance, the + # policy will suggest that distance for the node; if the function returns null, the policy will + # will assign a default distance instead, based on its internal algorithm for computing node + # distances. + // evaluator.class= + + # DEPRECATED. Use evaluator.class instead (see above). If both evaluator.class and filter.class + # are defined, the former wins. + # # A custom filter to include/exclude nodes. # # This option is not required; if present, it must be the fully-qualified name of a class that diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/insights/ExecutionProfileMockUtil.java b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/ExecutionProfileMockUtil.java index de8a4693d5e..ac176ebcc64 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/insights/ExecutionProfileMockUtil.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/ExecutionProfileMockUtil.java @@ -20,7 +20,7 @@ import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.CONNECTION_POOL_LOCAL_SIZE; import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.CONNECTION_POOL_REMOTE_SIZE; import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.HEARTBEAT_INTERVAL; -import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.LOAD_BALANCING_FILTER_CLASS; +import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.LOAD_BALANCING_DISTANCE_EVALUATOR_CLASS; import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER; import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.LOAD_BALANCING_POLICY_CLASS; import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.PROTOCOL_COMPRESSION; @@ -50,7 +50,7 @@ static DriverExecutionProfile mockDefaultExecutionProfile() { when(profile.getDuration(REQUEST_TIMEOUT)).thenReturn(Duration.ofMillis(100)); when(profile.getString(LOAD_BALANCING_POLICY_CLASS)).thenReturn("LoadBalancingPolicyImpl"); - when(profile.isDefined(LOAD_BALANCING_FILTER_CLASS)).thenReturn(true); + when(profile.isDefined(LOAD_BALANCING_DISTANCE_EVALUATOR_CLASS)).thenReturn(true); when(profile.isDefined(LOAD_BALANCING_LOCAL_DATACENTER)).thenReturn(true); when(profile.getString(LOAD_BALANCING_LOCAL_DATACENTER)).thenReturn(DEFAULT_LOCAL_DC); when(profile.isDefined(SPECULATIVE_EXECUTION_MAX)).thenReturn(true); diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/context/MockedDriverContextFactory.java b/core/src/test/java/com/datastax/oss/driver/internal/core/context/MockedDriverContextFactory.java index 580f558dc33..126345f4716 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/context/MockedDriverContextFactory.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/context/MockedDriverContextFactory.java @@ -67,7 +67,7 @@ public static DefaultDriverContext defaultDriverContext( .withSchemaChangeListener(mock(SchemaChangeListener.class)) .withRequestTracker(mock(RequestTracker.class)) .withLocalDatacenters(Maps.newHashMap()) - .withNodeFilters(Maps.newHashMap()) + .withNodeDistanceEvaluators(Maps.newHashMap()) .build(); return new DefaultDriverContext(configLoader, args); } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyDistanceTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyDistanceTest.java index 762720ac014..d327410a93f 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyDistanceTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyDistanceTest.java @@ -23,12 +23,12 @@ import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.loadbalancing.NodeDistance; +import com.datastax.oss.driver.api.core.loadbalancing.NodeDistanceEvaluator; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; import edu.umd.cs.findbugs.annotations.NonNull; import java.util.UUID; -import java.util.function.Predicate; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; @@ -39,7 +39,7 @@ @RunWith(MockitoJUnitRunner.Silent.class) public class BasicLoadBalancingPolicyDistanceTest extends LoadBalancingPolicyTestBase { - @Mock private Predicate filter; + @Mock private NodeDistanceEvaluator nodeDistanceEvaluator; private ImmutableMap nodes; @@ -47,11 +47,8 @@ public class BasicLoadBalancingPolicyDistanceTest extends LoadBalancingPolicyTes @Override public void setup() { super.setup(); - when(filter.test(node1)).thenReturn(true); - when(filter.test(node2)).thenReturn(true); - when(filter.test(node3)).thenReturn(true); - when(filter.test(node4)).thenReturn(true); - when(context.getNodeFilter(DriverExecutionProfile.DEFAULT_NAME)).thenReturn(filter); + when(context.getNodeDistanceEvaluator(DriverExecutionProfile.DEFAULT_NAME)) + .thenReturn(nodeDistanceEvaluator); when(metadataManager.getContactPoints()).thenReturn(ImmutableSet.of(node1, node2, node3)); nodes = ImmutableMap.of( @@ -59,15 +56,20 @@ public void setup() { } @Test - public void should_report_IGNORED_when_excluded_by_filter() { + public void should_report_distance_reported_by_user_distance_reporter() { // Given - given(filter.test(node1)).willReturn(false); + given(node2.getDatacenter()).willReturn("dc2"); + given(nodeDistanceEvaluator.evaluateDistance(node1, "dc1")).willReturn(NodeDistance.LOCAL); + given(nodeDistanceEvaluator.evaluateDistance(node2, "dc1")).willReturn(NodeDistance.REMOTE); + given(nodeDistanceEvaluator.evaluateDistance(node3, "dc1")).willReturn(NodeDistance.IGNORED); BasicLoadBalancingPolicy policy = createPolicy(); // When policy.init(nodes, distanceReporter); // Then - verify(distanceReporter).setDistance(node1, NodeDistance.IGNORED); - assertThat(policy.getLiveNodes().dc("dc1")).containsExactly(node2, node3); + verify(distanceReporter).setDistance(node1, NodeDistance.LOCAL); + verify(distanceReporter).setDistance(node2, NodeDistance.REMOTE); + verify(distanceReporter).setDistance(node3, NodeDistance.IGNORED); + assertThat(policy.getLiveNodes().dc("dc1")).containsExactly(node1); } @Test diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyEventsTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyEventsTest.java index 6640b2b946c..07723446027 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyEventsTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyEventsTest.java @@ -25,12 +25,11 @@ import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.loadbalancing.NodeDistance; -import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.loadbalancing.NodeDistanceEvaluator; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; import edu.umd.cs.findbugs.annotations.NonNull; import java.util.UUID; -import java.util.function.Predicate; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; @@ -41,7 +40,7 @@ @RunWith(MockitoJUnitRunner.Silent.class) public class BasicLoadBalancingPolicyEventsTest extends LoadBalancingPolicyTestBase { - @Mock private Predicate filter; + @Mock private NodeDistanceEvaluator nodeDistanceEvaluator; private BasicLoadBalancingPolicy policy; @@ -49,8 +48,8 @@ public class BasicLoadBalancingPolicyEventsTest extends LoadBalancingPolicyTestB @Override public void setup() { super.setup(); - when(filter.test(any(Node.class))).thenReturn(true); - when(context.getNodeFilter(DriverExecutionProfile.DEFAULT_NAME)).thenReturn(filter); + when(context.getNodeDistanceEvaluator(DriverExecutionProfile.DEFAULT_NAME)) + .thenReturn(nodeDistanceEvaluator); when(metadataManager.getContactPoints()).thenReturn(ImmutableSet.of(node1)); policy = createAndInitPolicy(); reset(distanceReporter); @@ -65,7 +64,7 @@ public void should_remove_down_node_from_live_set() { assertThat(policy.getLiveNodes().dc("dc1")).containsExactly(node1); verify(distanceReporter, never()).setDistance(eq(node2), any(NodeDistance.class)); // should have been called only once, during initialization, but not during onDown - verify(filter).test(node2); + verify(nodeDistanceEvaluator).evaluateDistance(node2, "dc1"); } @Test @@ -77,7 +76,7 @@ public void should_remove_removed_node_from_live_set() { assertThat(policy.getLiveNodes().dc("dc1")).containsExactly(node1); verify(distanceReporter, never()).setDistance(eq(node2), any(NodeDistance.class)); // should have been called only once, during initialization, but not during onRemove - verify(filter).test(node2); + verify(nodeDistanceEvaluator).evaluateDistance(node2, "dc1"); } @Test @@ -87,7 +86,7 @@ public void should_set_added_node_to_local() { // Then verify(distanceReporter).setDistance(node3, NodeDistance.LOCAL); - verify(filter).test(node3); + verify(nodeDistanceEvaluator).evaluateDistance(node3, "dc1"); // Not added to the live set yet, we're waiting for the pool to open assertThat(policy.getLiveNodes().dc("dc1")).containsExactly(node1, node2); } @@ -95,7 +94,7 @@ public void should_set_added_node_to_local() { @Test public void should_ignore_added_node_when_filtered() { // Given - when(filter.test(node3)).thenReturn(false); + when(nodeDistanceEvaluator.evaluateDistance(node3, "dc1")).thenReturn(NodeDistance.IGNORED); // When policy.onAdd(node3); @@ -126,21 +125,21 @@ public void should_add_up_node_to_live_set() { // Then verify(distanceReporter).setDistance(node3, NodeDistance.LOCAL); - verify(filter).test(node3); + verify(nodeDistanceEvaluator).evaluateDistance(node3, "dc1"); assertThat(policy.getLiveNodes().dc("dc1")).containsExactly(node1, node2, node3); } @Test public void should_ignore_up_node_when_filtered() { // Given - when(filter.test(node3)).thenReturn(false); + when(nodeDistanceEvaluator.evaluateDistance(node3, "dc1")).thenReturn(NodeDistance.IGNORED); // When policy.onUp(node3); // Then verify(distanceReporter).setDistance(node3, NodeDistance.IGNORED); - verify(filter).test(node3); + verify(nodeDistanceEvaluator).evaluateDistance(node3, "dc1"); assertThat(policy.getLiveNodes().dc("dc1")).containsExactly(node1, node2); } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyInitTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyInitTest.java index 56caff5c0aa..7fd16361817 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyInitTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyInitTest.java @@ -195,10 +195,11 @@ public void should_not_ignore_nodes_from_remote_dcs_if_local_dc_not_set() { } @Test - public void should_ignore_nodes_excluded_by_filter() { + public void should_ignore_nodes_excluded_by_distance_reporter() { // Given - when(context.getNodeFilter(DriverExecutionProfile.DEFAULT_NAME)) - .thenReturn(node -> node.equals(node1)); + when(metadataManager.getContactPoints()).thenReturn(ImmutableSet.of(node1, node2)); + when(context.getNodeDistanceEvaluator(DriverExecutionProfile.DEFAULT_NAME)) + .thenReturn((node, dc) -> node.equals(node1) ? NodeDistance.IGNORED : null); BasicLoadBalancingPolicy policy = createPolicy(); @@ -209,10 +210,10 @@ public void should_ignore_nodes_excluded_by_filter() { distanceReporter); // Then - verify(distanceReporter).setDistance(node1, NodeDistance.LOCAL); - verify(distanceReporter).setDistance(node2, NodeDistance.IGNORED); - verify(distanceReporter).setDistance(node3, NodeDistance.IGNORED); - assertThat(policy.getLiveNodes().dc("dc1")).containsExactly(node1); + verify(distanceReporter).setDistance(node1, NodeDistance.IGNORED); + verify(distanceReporter).setDistance(node2, NodeDistance.LOCAL); + verify(distanceReporter).setDistance(node3, NodeDistance.LOCAL); + assertThat(policy.getLiveNodes().dc("dc1")).containsExactly(node2, node3); } @NonNull diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyInitTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyInitTest.java index b57f0050985..e58d0e8b6bd 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyInitTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyInitTest.java @@ -215,11 +215,11 @@ public void should_ignore_nodes_from_remote_dcs() { } @Test - public void should_ignore_nodes_excluded_by_filter() { + public void should_ignore_nodes_excluded_by_distance_reporter() { // Given when(metadataManager.getContactPoints()).thenReturn(ImmutableSet.of(node1, node2)); - when(context.getNodeFilter(DriverExecutionProfile.DEFAULT_NAME)) - .thenReturn(node -> node.equals(node1)); + when(context.getNodeDistanceEvaluator(DriverExecutionProfile.DEFAULT_NAME)) + .thenReturn((node, dc) -> node.equals(node1) ? NodeDistance.IGNORED : null); BasicLoadBalancingPolicy policy = createPolicy(); @@ -230,10 +230,10 @@ public void should_ignore_nodes_excluded_by_filter() { distanceReporter); // Then - verify(distanceReporter).setDistance(node1, NodeDistance.LOCAL); - verify(distanceReporter).setDistance(node2, NodeDistance.IGNORED); - verify(distanceReporter).setDistance(node3, NodeDistance.IGNORED); - assertThat(policy.getLiveNodes().dc("dc1")).containsExactly(node1); + verify(distanceReporter).setDistance(node1, NodeDistance.IGNORED); + verify(distanceReporter).setDistance(node2, NodeDistance.LOCAL); + verify(distanceReporter).setDistance(node3, NodeDistance.LOCAL); + assertThat(policy.getLiveNodes().dc("dc1")).containsExactly(node2, node3); } @NonNull diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyInitTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyInitTest.java index c6202c3432b..6efe9661d89 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyInitTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyInitTest.java @@ -185,13 +185,13 @@ public void should_ignore_nodes_from_remote_dcs() { } @Test - public void should_ignore_nodes_excluded_by_filter() { + public void should_ignore_nodes_excluded_by_distance_reporter() { // Given when(metadataManager.getContactPoints()).thenReturn(ImmutableSet.of(node1, node2)); - when(context.getNodeFilter(DriverExecutionProfile.DEFAULT_NAME)) - .thenReturn(node -> node.equals(node1)); + when(context.getNodeDistanceEvaluator(DriverExecutionProfile.DEFAULT_NAME)) + .thenReturn((node, dc) -> node.equals(node1) ? NodeDistance.IGNORED : null); - DefaultLoadBalancingPolicy policy = createPolicy(); + BasicLoadBalancingPolicy policy = createPolicy(); // When policy.init( @@ -200,10 +200,10 @@ public void should_ignore_nodes_excluded_by_filter() { distanceReporter); // Then - verify(distanceReporter).setDistance(node1, NodeDistance.LOCAL); - verify(distanceReporter).setDistance(node2, NodeDistance.IGNORED); - verify(distanceReporter).setDistance(node3, NodeDistance.IGNORED); - assertThat(policy.getLiveNodes().dc("dc1")).containsExactly(node1); + verify(distanceReporter).setDistance(node1, NodeDistance.IGNORED); + verify(distanceReporter).setDistance(node2, NodeDistance.LOCAL); + verify(distanceReporter).setDistance(node3, NodeDistance.LOCAL); + assertThat(policy.getLiveNodes().dc("dc1")).containsExactly(node2, node3); } @NonNull diff --git a/manual/core/load_balancing/README.md b/manual/core/load_balancing/README.md index 3d09f0282b6..cb9dde4196c 100644 --- a/manual/core/load_balancing/README.md +++ b/manual/core/load_balancing/README.md @@ -346,37 +346,40 @@ nodes in query plans will likely fail, causing the query plans to eventually try instead. If the local datacenter unavailability persists, local nodes will be eventually marked down and will be removed from query plans completely from query plans, until they are back up again. -#### Optional node filtering +#### Customizing node distance assignment -Finally, the default policy accepts an optional node filter that gets applied just after the test -for inclusion in the local DC. If a node doesn't pass this test, it will be set at distance -`IGNORED` and the driver will never try to connect to it. This is a good way to exclude nodes on -some custom criteria. +Finally, all the driver the built-in policies accept an optional node distance evaluator that gets +invoked each time a node is added to the cluster or comes back up. If the evaluator returns a +non-null distance for the node, that distance will be used, otherwise the driver will use its +built-in logic to assign a default distance to it. This is a good way to exclude nodes or to adjust +their distance according to custom, dynamic criteria. -You can pass the filter through the configuration: +You can pass the node distance evaluator through the configuration: ``` datastax-java-driver.basic.load-balancing-policy { class = DefaultLoadBalancingPolicy local-datacenter = datacenter1 - filter-class = com.acme.MyNodeFilter + evaluator.class = com.acme.MyNodeDistanceEvaluator } ``` -The filter class must implement `java.util.function.predicate`, and have a public constructor -that takes a [DriverContext] argument: `public MyNodeFilter(DriverContext context)`. +The node distance evaluator class must implement [NodeDistanceEvaluator], and have a public +constructor that takes a [DriverContext] argument: `public MyNodeDistanceEvaluator(DriverContext +context)`. -Sometimes it's more convenient to pass the filter programmatically; you can do that with -`SessionBuilder.withNodeFilter`: +Sometimes it's more convenient to pass the evaluator programmatically; you can do that with +`SessionBuilder.withNodeDistanceEvaluator`: ```java -List whiteList = ... +Map distances = ... CqlSession session = CqlSession.builder() - .withNodeFilter(whiteList::contains) + .withNodeDistanceEvaluator((node, dc) -> distances.get(node)) .build(); ``` -If a programmatic filter is provided, the configuration option is ignored. +If a programmatic node distance evaluator evaluator is provided, the configuration option is +ignored. ### Custom implementation @@ -429,5 +432,6 @@ Then it uses the "closest" distance for any given node. For example: [getRoutingKeyspace()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKeyspace-- [getRoutingToken()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/Request.html#getRoutingToken-- [getRoutingKey()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- +[NodeDistanceEvaluator]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/loadbalancing/NodeDistanceEvaluator.html [`nodetool status`]: https://docs.datastax.com/en/dse/6.7/dse-dev/datastax_enterprise/tools/nodetool/toolsStatus.html [cqlsh]: https://docs.datastax.com/en/dse/6.7/cql/cql/cql_using/startCqlshStandalone.html diff --git a/upgrade_guide/README.md b/upgrade_guide/README.md index 55166b5b60b..bf40f0fce47 100644 --- a/upgrade_guide/README.md +++ b/upgrade_guide/README.md @@ -24,6 +24,50 @@ are encouraged to try out the new `TaggingMetricIdGenerator`, as it generates me that will look more familiar to users of libraries such as Micrometer or MicroProfile Metrics (and look nicer when exported to Prometheus or Graphite). +#### New `NodeDistanceEvaluator` API + +All driver built-in load-balancing policies now accept a new optional component called +[NodeDistanceEvaluator]. This component gets invoked each time a node is added to the cluster or +comes back up. If the evaluator returns a non-null distance for the node, that distance will be +used, otherwise the driver will use its built-in logic to assign a default distance to it. + +[NodeDistanceEvaluator]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/loadbalancing/NodeDistanceEvaluator.html + +This component replaces the old "node filter" component. As a consequence, all `withNodeFilter` +methods in `SessionBuilder` are now deprecated and should be replaced by the equivalent +`withNodeDistanceEvaluator` methods. + +If you have an existing node filter implementation, it can be converted to a `NodeDistanceEvaluator` +very easily: + +```java +Predicate nodeFilter = ... +NodeDistanceEvaluator nodeEvaluator = + (node, dc) -> nodeFilter.test(node) ? null : NodeDistance.IGNORED; +``` + +The above can also be achieved by an adapter class as shown below: + +```java +public class NodeFilterToDistanceEvaluatorAdapter implements NodeDistanceEvaluator { + + private final Predicate nodeFilter; + + public NodeFilterToDistanceEvaluatorAdapter(@NonNull Predicate nodeFilter) { + this.nodeFilter = nodeFilter; + } + + @Nullable @Override + public NodeDistance evaluateDistance(@NonNull Node node, @Nullable String localDc) { + return nodeFilter.test(node) ? null : NodeDistance.IGNORED; + } +} +``` + +Finally, the `datastax-java-driver.basic.load-balancing-policy.filter.class` configuration option +has been deprecated; it should be replaced with a node distance evaluator class defined by the +`datastax-java-driver.basic.load-balancing-policy.evaluator.class` option instead. + ### 4.10.0 #### Cross-datacenter failover From f84648f9d77ac62ea5fbf493f52a1e823b337b87 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 22 Mar 2021 20:10:59 +0100 Subject: [PATCH 670/979] Fix flaky metrics integration tests --- .../oss/driver/core/metrics/DropwizardMetricsIT.java | 5 +++-- .../oss/driver/metrics/micrometer/MicrometerMetricsIT.java | 5 +++-- .../driver/metrics/microprofile/MicroProfileMetricsIT.java | 5 +++-- 3 files changed, 9 insertions(+), 6 deletions(-) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metrics/DropwizardMetricsIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metrics/DropwizardMetricsIT.java index 57bda625ce2..9a769c3c02d 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/metrics/DropwizardMetricsIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metrics/DropwizardMetricsIT.java @@ -16,6 +16,7 @@ package com.datastax.oss.driver.core.metrics; import static org.assertj.core.api.Assertions.assertThat; +import static org.awaitility.Awaitility.await; import com.codahale.metrics.Counter; import com.codahale.metrics.Gauge; @@ -95,7 +96,7 @@ protected void assertMetricsPresent(CqlSession session) { break; case CQL_REQUESTS: assertThat(m).isInstanceOf(Timer.class); - assertThat(((Timer) m).getCount()).isEqualTo(30); + await().untilAsserted(() -> assertThat(((Timer) m).getCount()).isEqualTo(30)); break; case CQL_PREPARED_CACHE_SIZE: assertThat(m).isInstanceOf(Gauge.class); @@ -144,7 +145,7 @@ protected void assertMetricsPresent(CqlSession session) { break; case CQL_MESSAGES: assertThat(m).isInstanceOf(Timer.class); - assertThat(((Timer) m).getCount()).isEqualTo(10); + await().untilAsserted(() -> assertThat(((Timer) m).getCount()).isEqualTo(10)); break; case READ_TIMEOUTS: case WRITE_TIMEOUTS: diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/metrics/micrometer/MicrometerMetricsIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/metrics/micrometer/MicrometerMetricsIT.java index ddfc8913d63..8c546c239cd 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/metrics/micrometer/MicrometerMetricsIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/metrics/micrometer/MicrometerMetricsIT.java @@ -16,6 +16,7 @@ package com.datastax.oss.driver.metrics.micrometer; import static org.assertj.core.api.Assertions.assertThat; +import static org.awaitility.Awaitility.await; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.metadata.Node; @@ -86,7 +87,7 @@ protected void assertMetricsPresent(CqlSession session) { break; case CQL_REQUESTS: assertThat(m).isInstanceOf(Timer.class); - assertThat(((Timer) m).count()).isEqualTo(30); + await().untilAsserted(() -> assertThat(((Timer) m).count()).isEqualTo(30)); break; case CQL_PREPARED_CACHE_SIZE: assertThat(m).isInstanceOf(Gauge.class); @@ -128,7 +129,7 @@ protected void assertMetricsPresent(CqlSession session) { break; case CQL_MESSAGES: assertThat(m).isInstanceOf(Timer.class); - assertThat(((Timer) m).count()).isEqualTo(10); + await().untilAsserted(() -> assertThat(((Timer) m).count()).isEqualTo(10)); break; case READ_TIMEOUTS: case WRITE_TIMEOUTS: diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/metrics/microprofile/MicroProfileMetricsIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/metrics/microprofile/MicroProfileMetricsIT.java index 67a7f83c982..13d28936ad1 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/metrics/microprofile/MicroProfileMetricsIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/metrics/microprofile/MicroProfileMetricsIT.java @@ -16,6 +16,7 @@ package com.datastax.oss.driver.metrics.microprofile; import static org.assertj.core.api.Assertions.assertThat; +import static org.awaitility.Awaitility.await; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.metadata.Node; @@ -91,7 +92,7 @@ protected void assertMetricsPresent(CqlSession session) { break; case CQL_REQUESTS: assertThat(m).isInstanceOf(Timer.class); - assertThat(((Timer) m).getCount()).isEqualTo(30); + await().untilAsserted(() -> assertThat(((Timer) m).getCount()).isEqualTo(30)); break; case CQL_PREPARED_CACHE_SIZE: assertThat(m).isInstanceOf(Gauge.class); @@ -134,7 +135,7 @@ protected void assertMetricsPresent(CqlSession session) { break; case CQL_MESSAGES: assertThat(m).isInstanceOf(Timer.class); - assertThat(((Timer) m).getCount()).isEqualTo(10); + await().untilAsserted(() -> assertThat(((Timer) m).getCount()).isEqualTo(10)); break; case READ_TIMEOUTS: case WRITE_TIMEOUTS: From ef56d561d97adcae48e0e6e8807f334aedc0d783 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 23 Mar 2021 16:53:56 +0100 Subject: [PATCH 671/979] Minor enhancement to the documentation of metrics.node.expire-after option --- core/src/main/resources/reference.conf | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/core/src/main/resources/reference.conf b/core/src/main/resources/reference.conf index ffbc6caebac..26a6dbc3dcd 100644 --- a/core/src/main/resources/reference.conf +++ b/core/src/main/resources/reference.conf @@ -1820,9 +1820,8 @@ datastax-java-driver { # The time after which the node level metrics will be evicted. # # This is used to unregister stale metrics if a node leaves the cluster or gets a new address. - # The eviction will happen only if none of the enabled node-level metrics is updated for a - # given node for a given time. When this interval elapses, all metrics for the idle node are - # removed. + # If the node does not come back up when this interval elapses, all its metrics are removed + # from the registry. # # The lowest allowed value is 5 minutes. If you try to set it lower, the driver will log a # warning and use 5 minutes. From 1f60c94e7af8c10bd0230b1fe649014602bfc42e Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Wed, 24 Mar 2021 11:58:24 +0100 Subject: [PATCH 672/979] JAVA-2930: Allow Micrometer to record histograms for timers (#1542) --- changelog/README.md | 1 + .../api/core/config/DseDriverOption.java | 41 ++++++ .../api/core/config/DefaultDriverOption.java | 39 +++++ .../driver/api/core/config/OptionsMap.java | 8 ++ .../api/core/config/TypedDriverOption.java | 57 ++++++++ core/src/main/resources/reference.conf | 107 +++++--------- manual/core/metrics/README.md | 9 +- .../micrometer/MicrometerMetricUpdater.java | 19 ++- .../MicrometerNodeMetricUpdater.java | 40 ++++++ .../MicrometerSessionMetricUpdater.java | 84 +++++++++++ .../MicrometerNodeMetricUpdaterTest.java | 124 +++++++++++----- .../MicrometerSessionMetricUpdaterTest.java | 134 ++++++++++++++++++ 12 files changed, 552 insertions(+), 111 deletions(-) create mode 100644 metrics/micrometer/src/test/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerSessionMetricUpdaterTest.java diff --git a/changelog/README.md b/changelog/README.md index 4a5c5399f04..74a96b61d02 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.11.0 (in progress) +- [improvement] JAVA-2930: Allow Micrometer to record histograms for timers - [improvement] JAVA-2914: Transform node filter into a more flexible node distance evaluator - [improvement] JAVA-2929: Revisit node-level metric eviction - [new feature] JAVA-2830: Add mapper support for Java streams diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/config/DseDriverOption.java b/core/src/main/java/com/datastax/dse/driver/api/core/config/DseDriverOption.java index ac493719ef6..74907c177b6 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/config/DseDriverOption.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/config/DseDriverOption.java @@ -247,6 +247,47 @@ public enum DseDriverOption implements DriverOption { *

          Value-type: {@link java.time.Duration Duration} */ METRICS_NODE_GRAPH_MESSAGES_INTERVAL("advanced.metrics.node.graph-messages.refresh-interval"), + + /** + * The shortest latency that we expect to record for continuous requests. + * + *

          Value-type: {@link java.time.Duration Duration} + */ + CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_LOWEST( + "advanced.metrics.session.continuous-cql-requests.lowest-latency"), + /** + * Optional service-level objectives to meet, as a list of latencies to track. + * + *

          Value-type: {@link java.time.Duration Duration} + */ + CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_SLO( + "advanced.metrics.session.continuous-cql-requests.slo"), + + /** + * The shortest latency that we expect to record for graph requests. + * + *

          Value-type: {@link java.time.Duration Duration} + */ + METRICS_SESSION_GRAPH_REQUESTS_LOWEST("advanced.metrics.session.graph-requests.lowest-latency"), + /** + * Optional service-level objectives to meet, as a list of latencies to track. + * + *

          Value-type: {@link java.time.Duration Duration} + */ + METRICS_SESSION_GRAPH_REQUESTS_SLO("advanced.metrics.session.graph-requests.slo"), + + /** + * The shortest latency that we expect to record for graph requests. + * + *

          Value-type: {@link java.time.Duration Duration} + */ + METRICS_NODE_GRAPH_MESSAGES_LOWEST("advanced.metrics.node.graph-messages.lowest-latency"), + /** + * Optional service-level objectives to meet, as a list of latencies to track. + * + *

          Value-type: {@link java.time.Duration Duration} + */ + METRICS_NODE_GRAPH_MESSAGES_SLO("advanced.metrics.node.graph-messages.slo"), ; private final String path; diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java index fde3e87857a..8a916705b1b 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java @@ -863,6 +863,45 @@ public enum DefaultDriverOption implements DriverOption { *

          Value-Type: {@link String} */ LOAD_BALANCING_DISTANCE_EVALUATOR_CLASS("basic.load-balancing-policy.evaluator.class"), + + /** + * The shortest latency that we expect to record for requests. + * + *

          Value-type: {@link java.time.Duration Duration} + */ + METRICS_SESSION_CQL_REQUESTS_LOWEST("advanced.metrics.session.cql-requests.lowest-latency"), + /** + * Optional service-level objectives to meet, as a list of latencies to track. + * + *

          Value-type: List of {@link java.time.Duration Duration} + */ + METRICS_SESSION_CQL_REQUESTS_SLO("advanced.metrics.session.cql-requests.slo"), + + /** + * The shortest latency that we expect to record for throttling. + * + *

          Value-type: {@link java.time.Duration Duration} + */ + METRICS_SESSION_THROTTLING_LOWEST("advanced.metrics.session.throttling.delay.lowest-latency"), + /** + * Optional service-level objectives to meet, as a list of latencies to track. + * + *

          Value-type: List of {@link java.time.Duration Duration} + */ + METRICS_SESSION_THROTTLING_SLO("advanced.metrics.session.throttling.delay.slo"), + + /** + * The shortest latency that we expect to record for requests. + * + *

          Value-type: {@link java.time.Duration Duration} + */ + METRICS_NODE_CQL_MESSAGES_LOWEST("advanced.metrics.node.cql-messages.lowest-latency"), + /** + * Optional service-level objectives to meet, as a list of latencies to track. + * + *

          Value-type: List of {@link java.time.Duration Duration} + */ + METRICS_NODE_CQL_MESSAGES_SLO("advanced.metrics.node.cql-messages.slo"), ; private final String path; diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java index 9c4758f531f..90e66126dd9 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java @@ -310,14 +310,19 @@ protected static void fillWithDriverDefaults(OptionsMap map) { map.put(TypedDriverOption.MONITOR_REPORTING_ENABLED, true); map.put(TypedDriverOption.METRICS_SESSION_ENABLED, Collections.emptyList()); map.put(TypedDriverOption.METRICS_SESSION_CQL_REQUESTS_HIGHEST, Duration.ofSeconds(3)); + map.put(TypedDriverOption.METRICS_SESSION_CQL_REQUESTS_LOWEST, Duration.ofMillis(1)); map.put(TypedDriverOption.METRICS_SESSION_CQL_REQUESTS_DIGITS, 3); map.put(TypedDriverOption.METRICS_SESSION_CQL_REQUESTS_INTERVAL, Duration.ofMinutes(5)); map.put(TypedDriverOption.METRICS_SESSION_THROTTLING_HIGHEST, Duration.ofSeconds(3)); + map.put(TypedDriverOption.METRICS_SESSION_THROTTLING_LOWEST, Duration.ofMillis(1)); map.put(TypedDriverOption.METRICS_SESSION_THROTTLING_DIGITS, 3); map.put(TypedDriverOption.METRICS_SESSION_THROTTLING_INTERVAL, Duration.ofMinutes(5)); map.put( TypedDriverOption.CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_HIGHEST, Duration.ofMinutes(2)); + map.put( + TypedDriverOption.CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_LOWEST, + Duration.ofMillis(10)); map.put(TypedDriverOption.CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_DIGITS, 3); map.put( TypedDriverOption.CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_INTERVAL, @@ -325,13 +330,16 @@ protected static void fillWithDriverDefaults(OptionsMap map) { map.put(TypedDriverOption.METRICS_FACTORY_CLASS, "DefaultMetricsFactory"); map.put(TypedDriverOption.METRICS_ID_GENERATOR_CLASS, "DefaultMetricIdGenerator"); map.put(TypedDriverOption.METRICS_SESSION_GRAPH_REQUESTS_HIGHEST, Duration.ofSeconds(12)); + map.put(TypedDriverOption.METRICS_SESSION_GRAPH_REQUESTS_LOWEST, Duration.ofMillis(1)); map.put(TypedDriverOption.METRICS_SESSION_GRAPH_REQUESTS_DIGITS, 3); map.put(TypedDriverOption.METRICS_SESSION_GRAPH_REQUESTS_INTERVAL, Duration.ofMinutes(5)); map.put(TypedDriverOption.METRICS_NODE_ENABLED, Collections.emptyList()); map.put(TypedDriverOption.METRICS_NODE_CQL_MESSAGES_HIGHEST, Duration.ofSeconds(3)); + map.put(TypedDriverOption.METRICS_NODE_CQL_MESSAGES_LOWEST, Duration.ofMillis(1)); map.put(TypedDriverOption.METRICS_NODE_CQL_MESSAGES_DIGITS, 3); map.put(TypedDriverOption.METRICS_NODE_CQL_MESSAGES_INTERVAL, Duration.ofMinutes(5)); map.put(TypedDriverOption.METRICS_NODE_GRAPH_MESSAGES_HIGHEST, Duration.ofSeconds(3)); + map.put(TypedDriverOption.METRICS_NODE_GRAPH_MESSAGES_LOWEST, Duration.ofMillis(1)); map.put(TypedDriverOption.METRICS_NODE_GRAPH_MESSAGES_DIGITS, 3); map.put(TypedDriverOption.METRICS_NODE_GRAPH_MESSAGES_INTERVAL, Duration.ofMinutes(5)); map.put(TypedDriverOption.METRICS_NODE_EXPIRE_AFTER, Duration.ofHours(1)); diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java index 094f928d83c..25d6ac97136 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java @@ -363,6 +363,15 @@ public String toString() { public static final TypedDriverOption METRICS_SESSION_CQL_REQUESTS_HIGHEST = new TypedDriverOption<>( DefaultDriverOption.METRICS_SESSION_CQL_REQUESTS_HIGHEST, GenericType.DURATION); + /** The shortest latency that we expect to record for requests. */ + public static final TypedDriverOption METRICS_SESSION_CQL_REQUESTS_LOWEST = + new TypedDriverOption<>( + DefaultDriverOption.METRICS_SESSION_CQL_REQUESTS_LOWEST, GenericType.DURATION); + /** Optional service-level objectives to meet, as a list of latencies to track. */ + public static final TypedDriverOption> METRICS_SESSION_CQL_REQUESTS_SLO = + new TypedDriverOption<>( + DefaultDriverOption.METRICS_SESSION_CQL_REQUESTS_SLO, + GenericType.listOf(GenericType.DURATION)); /** * The number of significant decimal digits to which internal structures will maintain for * requests. @@ -378,6 +387,15 @@ public String toString() { public static final TypedDriverOption METRICS_SESSION_THROTTLING_HIGHEST = new TypedDriverOption<>( DefaultDriverOption.METRICS_SESSION_THROTTLING_HIGHEST, GenericType.DURATION); + /** The shortest latency that we expect to record for throttling. */ + public static final TypedDriverOption METRICS_SESSION_THROTTLING_LOWEST = + new TypedDriverOption<>( + DefaultDriverOption.METRICS_SESSION_THROTTLING_LOWEST, GenericType.DURATION); + /** Optional service-level objectives to meet, as a list of latencies to track. */ + public static final TypedDriverOption> METRICS_SESSION_THROTTLING_SLO = + new TypedDriverOption<>( + DefaultDriverOption.METRICS_SESSION_THROTTLING_SLO, + GenericType.listOf(GenericType.DURATION)); /** * The number of significant decimal digits to which internal structures will maintain for * throttling. @@ -393,6 +411,15 @@ public String toString() { public static final TypedDriverOption METRICS_NODE_CQL_MESSAGES_HIGHEST = new TypedDriverOption<>( DefaultDriverOption.METRICS_NODE_CQL_MESSAGES_HIGHEST, GenericType.DURATION); + /** The shortest latency that we expect to record for requests. */ + public static final TypedDriverOption METRICS_NODE_CQL_MESSAGES_LOWEST = + new TypedDriverOption<>( + DefaultDriverOption.METRICS_NODE_CQL_MESSAGES_LOWEST, GenericType.DURATION); + /** Optional service-level objectives to meet, as a list of latencies to track. */ + public static final TypedDriverOption> METRICS_NODE_CQL_MESSAGES_SLO = + new TypedDriverOption<>( + DefaultDriverOption.METRICS_NODE_CQL_MESSAGES_SLO, + GenericType.listOf(GenericType.DURATION)); /** * The number of significant decimal digits to which internal structures will maintain for * requests. @@ -621,6 +648,18 @@ public String toString() { new TypedDriverOption<>( DseDriverOption.CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_HIGHEST, GenericType.DURATION); + /** The shortest latency that we expect to record for continuous requests. */ + public static final TypedDriverOption + CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_LOWEST = + new TypedDriverOption<>( + DseDriverOption.CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_LOWEST, + GenericType.DURATION); + /** Optional service-level objectives to meet, as a list of latencies to track. */ + public static final TypedDriverOption> + CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_SLO = + new TypedDriverOption<>( + DseDriverOption.CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_SLO, + GenericType.listOf(GenericType.DURATION)); /** * The number of significant decimal digits to which internal structures will maintain for * continuous requests. @@ -686,6 +725,15 @@ public String toString() { public static final TypedDriverOption METRICS_SESSION_GRAPH_REQUESTS_HIGHEST = new TypedDriverOption<>( DseDriverOption.METRICS_SESSION_GRAPH_REQUESTS_HIGHEST, GenericType.DURATION); + /** The shortest latency that we expect to record for graph requests. */ + public static final TypedDriverOption METRICS_SESSION_GRAPH_REQUESTS_LOWEST = + new TypedDriverOption<>( + DseDriverOption.METRICS_SESSION_GRAPH_REQUESTS_LOWEST, GenericType.DURATION); + /** Optional service-level objectives to meet, as a list of latencies to track. */ + public static final TypedDriverOption> METRICS_SESSION_GRAPH_REQUESTS_SLO = + new TypedDriverOption<>( + DseDriverOption.METRICS_SESSION_GRAPH_REQUESTS_SLO, + GenericType.listOf(GenericType.DURATION)); /** * The number of significant decimal digits to which internal structures will maintain for graph * requests. @@ -701,6 +749,15 @@ public String toString() { public static final TypedDriverOption METRICS_NODE_GRAPH_MESSAGES_HIGHEST = new TypedDriverOption<>( DseDriverOption.METRICS_NODE_GRAPH_MESSAGES_HIGHEST, GenericType.DURATION); + /** The shortest latency that we expect to record for graph requests. */ + public static final TypedDriverOption METRICS_NODE_GRAPH_MESSAGES_LOWEST = + new TypedDriverOption<>( + DseDriverOption.METRICS_NODE_GRAPH_MESSAGES_LOWEST, GenericType.DURATION); + /** Optional service-level objectives to meet, as a list of latencies to track. */ + public static final TypedDriverOption> METRICS_NODE_GRAPH_MESSAGES_SLO = + new TypedDriverOption<>( + DseDriverOption.METRICS_NODE_GRAPH_MESSAGES_SLO, + GenericType.listOf(GenericType.DURATION)); /** * The number of significant decimal digits to which internal structures will maintain for graph * requests. diff --git a/core/src/main/resources/reference.conf b/core/src/main/resources/reference.conf index 26a6dbc3dcd..0d56febf841 100644 --- a/core/src/main/resources/reference.conf +++ b/core/src/main/resources/reference.conf @@ -1515,10 +1515,11 @@ datastax-java-driver { # Extra configuration (for the metrics that need it) - # Required: if the 'cql-requests' metric is enabled, and Dropwizard / HdrHistogram is used. + # Required: if the 'cql-requests' metric is enabled, and Dropwizard or Micrometer is used. # Modifiable at runtime: no # Overridable in a profile: no cql-requests { + # The largest latency that we expect to record. # # This should be slightly higher than request.timeout (in theory, readings can't be higher @@ -1526,14 +1527,21 @@ datastax-java-driver { # # This is used to scale internal data structures. If a higher recording is encountered at # runtime, it is discarded and a warning is logged. + # Valid for: Dropwizard, Micrometer. highest-latency = 3 seconds + # The shortest latency that we expect to record. This is used to scale internal data + # structures. + # Valid for: Micrometer. + lowest-latency = 1 millisecond + # The number of significant decimal digits to which internal structures will maintain # value resolution and separation (for example, 3 means that recordings up to 1 second # will be recorded with a resolution of 1 millisecond or better). # - # This must be between 0 and 5. If the value is out of range, it defaults to 3 and a - # warning is logged. + # For Dropwizard, this must be between 0 and 5. If the value is out of range, it defaults to + # 3 and a warning is logged. + # Valid for: Dropwizard, Micrometer. significant-digits = 3 # The interval at which percentile data is refreshed. @@ -1552,96 +1560,51 @@ datastax-java-driver { # # Note that this does not apply to the total count and rates (those are updated in real # time). + # Valid for: Dropwizard. refresh-interval = 5 minutes + + # An optional list of latencies to track as part of the application's service-level + # objectives (SLOs). + # + # If defined, the histogram is guaranteed to contain these boundaries alongside other + # buckets used to generate aggregable percentile approximations. + # Valid for: Micrometer. + // slo = [ 100 milliseconds, 500 milliseconds, 1 second ] + } - # Required: if the 'throttling.delay' metric is enabled, and Dropwizard / HdrHistogram is - # used. + # Required: if the 'throttling.delay' metric is enabled, and Dropwizard or Micrometer is used. # Modifiable at runtime: no # Overridable in a profile: no throttling.delay { highest-latency = 3 seconds + lowest-latency = 1 millisecond significant-digits = 3 refresh-interval = 5 minutes + // slo = [ 100 milliseconds, 500 milliseconds, 1 second ] } - # Required: if the 'continuous-cql-requests' metric is enabled, and Dropwizard / HdrHistogram - # is used + # Required: if the 'continuous-cql-requests' metric is enabled, and Dropwizard or Micrometer + # is used. # Modifiable at runtime: no # Overridable in a profile: no continuous-cql-requests { - - # The largest latency that we expect to record for a continuous paging request. - # - # This is used to scale internal data structures. If a higher recording is encountered at - # runtime, it is discarded and a warning is logged. highest-latency = 120 seconds - - # The number of significant decimal digits to which internal structures will maintain - # value resolution and separation (for example, 3 means that recordings up to 1 second - # will be recorded with a resolution of 1 millisecond or better). - # - # This must be between 0 and 5. If the value is out of range, it defaults to 3 and a - # warning is logged. + lowest-latency = 10 milliseconds significant-digits = 3 - - # The interval at which percentile data is refreshed. - # - # The driver records latency data in a "live" histogram, and serves results from a cached - # snapshot. Each time the snapshot gets older than the interval, the two are switched. - # Note that this switch happens upon fetching the metrics, so if you never fetch the - # recording interval might grow higher (that shouldn't be an issue in a production - # environment because you would typically have a metrics reporter that exports to a - # monitoring tool at a regular interval). - # - # In practice, this means that if you set this to 5 minutes, you're looking at data from a - # 5-minute interval in the past, that is at most 5 minutes old. If you fetch the metrics - # at a faster pace, you will observe the same data for 5 minutes until the interval - # expires. - # - # Note that this does not apply to the total count and rates (those are updated in real - # time). refresh-interval = 5 minutes + // slo = [ 100 milliseconds, 500 milliseconds, 1 second ] } - # Required: if the 'graph-requests' metric is enabled, and Dropwizard / HdrHistogram is used + # Required: if the 'graph-requests' metric is enabled, and Dropwizard or Micrometer is used. # Modifiable at runtime: no # Overridable in a profile: no graph-requests { - # The largest latency that we expect to record. - # - # This should be slightly higher than basic.graph.timeout (in theory, readings can't be higher - # than the timeout, but there might be a small overhead due to internal scheduling). - # - # This is used to scale internal data structures. If a higher recording is encountered at - # runtime, it is discarded and a warning is logged. highest-latency = 12 seconds - - # The number of significant decimal digits to which internal structures will maintain - # value resolution and separation (for example, 3 means that recordings up to 1 second - # will be recorded with a resolution of 1 millisecond or better). - # - # This must be between 0 and 5. If the value is out of range, it defaults to 3 and a - # warning is logged. + lowest-latency = 1 millisecond significant-digits = 3 - - # The interval at which percentile data is refreshed. - # - # The driver records latency data in a "live" histogram, and serves results from a cached - # snapshot. Each time the snapshot gets older than the interval, the two are switched. - # Note that this switch happens upon fetching the metrics, so if you never fetch the - # recording interval might grow higher (that shouldn't be an issue in a production - # environment because you would typically have a metrics reporter that exports to a - # monitoring tool at a regular interval). - # - # In practice, this means that if you set this to 5 minutes, you're looking at data from a - # 5-minute interval in the past, that is at most 5 minutes old. If you fetch the metrics - # at a faster pace, you will observe the same data for 5 minutes until the interval - # expires. - # - # Note that this does not apply to the total count and rates (those are updated in real - # time). refresh-interval = 5 minutes + // slo = [ 100 milliseconds, 500 milliseconds, 1 second ] } } # The node-level metrics (all disabled by default). @@ -1797,24 +1760,28 @@ datastax-java-driver { # See cql-requests in the `session` section # - # Required: if the 'cql-messages' metric is enabled, and Dropwizard / HdrHistogram is used + # Required: if the 'cql-messages' metric is enabled, and Dropwizard or Micrometer is used. # Modifiable at runtime: no # Overridable in a profile: no cql-messages { highest-latency = 3 seconds + lowest-latency = 1 millisecond significant-digits = 3 refresh-interval = 5 minutes + // slo = [ 100 milliseconds, 500 milliseconds, 1 second ] } # See graph-requests in the `session` section # - # Required: if the 'graph-messages' metric is enabled, and Dropwizard / HdrHistogram is used + # Required: if the 'graph-messages' metric is enabled, and Dropwizard or Micrometer is used. # Modifiable at runtime: no # Overridable in a profile: no graph-messages { highest-latency = 3 seconds + lowest-latency = 1 millisecond significant-digits = 3 refresh-interval = 5 minutes + // slo = [ 100 milliseconds, 500 milliseconds, 1 second ] } # The time after which the node level metrics will be evicted. diff --git a/manual/core/metrics/README.md b/manual/core/metrics/README.md index 46bfe7689bd..ab6eb8c7a9f 100644 --- a/manual/core/metrics/README.md +++ b/manual/core/metrics/README.md @@ -156,10 +156,11 @@ datastax-java-driver.advanced.metrics { If you specify a metric that doesn't exist, it will be ignored, and a warning will be logged. -Finally, if you are using Dropwizard and enabled any metric of timer type, such as `cql-requests`, -it is also possible to provide additional configuration to fine-tune the underlying histogram's -characteristics and precision, such as its highest expected latency, its number of significant -digits to use, and its refresh interval. Again, see the [reference configuration] for more details. +Finally, if you are using Dropwizard or Micrometer and enabled any metric of timer type, such as +`cql-requests`, it is also possible to provide additional configuration to fine-tune the underlying +histogram's characteristics and precision, such as its highest expected latency, its number of +significant digits to use, and its refresh interval. Again, see the [reference configuration] for +more details. ### Selecting a metric identifier style diff --git a/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerMetricUpdater.java b/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerMetricUpdater.java index 89f6c03bff2..c30dcc121ab 100644 --- a/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerMetricUpdater.java +++ b/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerMetricUpdater.java @@ -130,7 +130,10 @@ protected DistributionSummary getOrCreateDistributionSummaryFor(MetricT metric) m -> { MetricId id = getMetricId(m); Iterable tags = MicrometerTags.toMicrometerTags(id.getTags()); - return DistributionSummary.builder(id.getName()).tags(tags).register(registry); + DistributionSummary.Builder builder = + DistributionSummary.builder(id.getName()).tags(tags); + builder = configureDistributionSummary(builder, metric, id); + return builder.register(registry); }); } @@ -141,7 +144,19 @@ protected Timer getOrCreateTimerFor(MetricT metric) { m -> { MetricId id = getMetricId(m); Iterable tags = MicrometerTags.toMicrometerTags(id.getTags()); - return Timer.builder(id.getName()).tags(tags).register(registry); + Timer.Builder builder = Timer.builder(id.getName()).tags(tags); + builder = configureTimer(builder, metric, id); + return builder.register(registry); }); } + + protected Timer.Builder configureTimer(Timer.Builder builder, MetricT metric, MetricId id) { + return builder.publishPercentileHistogram(); + } + + @SuppressWarnings("unused") + protected DistributionSummary.Builder configureDistributionSummary( + DistributionSummary.Builder builder, MetricT metric, MetricId id) { + return builder.publishPercentileHistogram(); + } } diff --git a/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerNodeMetricUpdater.java b/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerNodeMetricUpdater.java index a0c4cbd05a6..0f5dada2bf3 100644 --- a/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerNodeMetricUpdater.java +++ b/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerNodeMetricUpdater.java @@ -15,7 +15,9 @@ */ package com.datastax.oss.driver.internal.metrics.micrometer; +import com.datastax.dse.driver.api.core.config.DseDriverOption; import com.datastax.dse.driver.api.core.metrics.DseNodeMetric; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.metrics.DefaultNodeMetric; @@ -24,6 +26,8 @@ import com.datastax.oss.driver.internal.core.metrics.MetricId; import com.datastax.oss.driver.internal.core.metrics.NodeMetricUpdater; import io.micrometer.core.instrument.MeterRegistry; +import io.micrometer.core.instrument.Timer; +import java.time.Duration; import java.util.Set; import net.jcip.annotations.ThreadSafe; @@ -88,4 +92,40 @@ protected void startMetricsExpirationTimeout() { protected void cancelMetricsExpirationTimeout() { super.cancelMetricsExpirationTimeout(); } + + @Override + protected Timer.Builder configureTimer(Timer.Builder builder, NodeMetric metric, MetricId id) { + DriverExecutionProfile profile = context.getConfig().getDefaultProfile(); + if (metric == DefaultNodeMetric.CQL_MESSAGES) { + return builder + .publishPercentileHistogram() + .minimumExpectedValue( + profile.getDuration(DefaultDriverOption.METRICS_NODE_CQL_MESSAGES_LOWEST)) + .maximumExpectedValue( + profile.getDuration(DefaultDriverOption.METRICS_NODE_CQL_MESSAGES_HIGHEST)) + .serviceLevelObjectives( + profile.isDefined(DefaultDriverOption.METRICS_NODE_CQL_MESSAGES_SLO) + ? profile + .getDurationList(DefaultDriverOption.METRICS_NODE_CQL_MESSAGES_SLO) + .toArray(new Duration[0]) + : null) + .percentilePrecision( + profile.getInt(DefaultDriverOption.METRICS_NODE_CQL_MESSAGES_DIGITS)); + } else if (metric == DseNodeMetric.GRAPH_MESSAGES) { + return builder + .publishPercentileHistogram() + .minimumExpectedValue( + profile.getDuration(DseDriverOption.METRICS_NODE_GRAPH_MESSAGES_LOWEST)) + .maximumExpectedValue( + profile.getDuration(DseDriverOption.METRICS_NODE_GRAPH_MESSAGES_HIGHEST)) + .serviceLevelObjectives( + profile.isDefined(DseDriverOption.METRICS_NODE_GRAPH_MESSAGES_SLO) + ? profile + .getDurationList(DseDriverOption.METRICS_NODE_GRAPH_MESSAGES_SLO) + .toArray(new Duration[0]) + : null) + .percentilePrecision(profile.getInt(DseDriverOption.METRICS_NODE_GRAPH_MESSAGES_DIGITS)); + } + return super.configureTimer(builder, metric, id); + } } diff --git a/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerSessionMetricUpdater.java b/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerSessionMetricUpdater.java index 93106ea77a9..bb361b85f22 100644 --- a/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerSessionMetricUpdater.java +++ b/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerSessionMetricUpdater.java @@ -15,7 +15,9 @@ */ package com.datastax.oss.driver.internal.metrics.micrometer; +import com.datastax.dse.driver.api.core.config.DseDriverOption; import com.datastax.dse.driver.api.core.metrics.DseSessionMetric; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; import com.datastax.oss.driver.api.core.metrics.SessionMetric; @@ -23,6 +25,8 @@ import com.datastax.oss.driver.internal.core.metrics.MetricId; import com.datastax.oss.driver.internal.core.metrics.SessionMetricUpdater; import io.micrometer.core.instrument.MeterRegistry; +import io.micrometer.core.instrument.Timer; +import java.time.Duration; import java.util.Set; import net.jcip.annotations.ThreadSafe; @@ -55,4 +59,84 @@ public MicrometerSessionMetricUpdater( protected MetricId getMetricId(SessionMetric metric) { return context.getMetricIdGenerator().sessionMetricId(metric); } + + @Override + protected Timer.Builder configureTimer(Timer.Builder builder, SessionMetric metric, MetricId id) { + DriverExecutionProfile profile = context.getConfig().getDefaultProfile(); + if (metric == DefaultSessionMetric.CQL_REQUESTS) { + return builder + .publishPercentileHistogram() + .minimumExpectedValue( + profile.getDuration(DefaultDriverOption.METRICS_SESSION_CQL_REQUESTS_LOWEST)) + .maximumExpectedValue( + profile.getDuration(DefaultDriverOption.METRICS_SESSION_CQL_REQUESTS_HIGHEST)) + .serviceLevelObjectives( + profile.isDefined(DefaultDriverOption.METRICS_SESSION_CQL_REQUESTS_SLO) + ? profile + .getDurationList(DefaultDriverOption.METRICS_SESSION_CQL_REQUESTS_SLO) + .toArray(new Duration[0]) + : null) + .percentilePrecision( + profile.isDefined(DefaultDriverOption.METRICS_SESSION_CQL_REQUESTS_DIGITS) + ? profile.getInt(DefaultDriverOption.METRICS_SESSION_CQL_REQUESTS_DIGITS) + : null); + } else if (metric == DefaultSessionMetric.THROTTLING_DELAY) { + return builder + .publishPercentileHistogram() + .minimumExpectedValue( + profile.getDuration(DefaultDriverOption.METRICS_SESSION_THROTTLING_LOWEST)) + .maximumExpectedValue( + profile.getDuration(DefaultDriverOption.METRICS_SESSION_THROTTLING_HIGHEST)) + .serviceLevelObjectives( + profile.isDefined(DefaultDriverOption.METRICS_SESSION_THROTTLING_SLO) + ? profile + .getDurationList(DefaultDriverOption.METRICS_SESSION_THROTTLING_SLO) + .toArray(new Duration[0]) + : null) + .percentilePrecision( + profile.isDefined(DefaultDriverOption.METRICS_SESSION_THROTTLING_DIGITS) + ? profile.getInt(DefaultDriverOption.METRICS_SESSION_THROTTLING_DIGITS) + : null); + } else if (metric == DseSessionMetric.CONTINUOUS_CQL_REQUESTS) { + return builder + .publishPercentileHistogram() + .minimumExpectedValue( + profile.getDuration( + DseDriverOption.CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_LOWEST)) + .maximumExpectedValue( + profile.getDuration( + DseDriverOption.CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_HIGHEST)) + .serviceLevelObjectives( + profile.isDefined(DseDriverOption.CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_SLO) + ? profile + .getDurationList( + DseDriverOption.CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_SLO) + .toArray(new Duration[0]) + : null) + .percentilePrecision( + profile.isDefined( + DseDriverOption.CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_DIGITS) + ? profile.getInt( + DseDriverOption.CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_DIGITS) + : null); + } else if (metric == DseSessionMetric.GRAPH_REQUESTS) { + return builder + .publishPercentileHistogram() + .minimumExpectedValue( + profile.getDuration(DseDriverOption.METRICS_SESSION_GRAPH_REQUESTS_LOWEST)) + .maximumExpectedValue( + profile.getDuration(DseDriverOption.METRICS_SESSION_GRAPH_REQUESTS_HIGHEST)) + .serviceLevelObjectives( + profile.isDefined(DseDriverOption.METRICS_SESSION_GRAPH_REQUESTS_SLO) + ? profile + .getDurationList(DseDriverOption.METRICS_SESSION_GRAPH_REQUESTS_SLO) + .toArray(new Duration[0]) + : null) + .percentilePrecision( + profile.isDefined(DseDriverOption.METRICS_SESSION_GRAPH_REQUESTS_DIGITS) + ? profile.getInt(DseDriverOption.METRICS_SESSION_GRAPH_REQUESTS_DIGITS) + : null); + } + return super.configureTimer(builder, metric, id); + } } diff --git a/metrics/micrometer/src/test/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerNodeMetricUpdaterTest.java b/metrics/micrometer/src/test/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerNodeMetricUpdaterTest.java index badea84e6db..a874344fef0 100644 --- a/metrics/micrometer/src/test/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerNodeMetricUpdaterTest.java +++ b/metrics/micrometer/src/test/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerNodeMetricUpdaterTest.java @@ -17,34 +17,47 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.spy; import static org.mockito.Mockito.timeout; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import ch.qos.logback.classic.Level; +import com.datastax.dse.driver.api.core.config.DseDriverOption; +import com.datastax.dse.driver.api.core.metrics.DseNodeMetric; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverConfig; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.config.DriverOption; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.metrics.DefaultNodeMetric; import com.datastax.oss.driver.api.core.metrics.NodeMetric; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.internal.core.metrics.AbstractMetricUpdater; +import com.datastax.oss.driver.internal.core.metrics.DefaultMetricId; +import com.datastax.oss.driver.internal.core.metrics.MetricId; +import com.datastax.oss.driver.internal.core.metrics.MetricIdGenerator; import com.datastax.oss.driver.internal.core.util.LoggerTest; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import com.tngtech.java.junit.dataprovider.DataProvider; import com.tngtech.java.junit.dataprovider.DataProviderRunner; import com.tngtech.java.junit.dataprovider.UseDataProvider; +import io.micrometer.core.instrument.Timer; +import io.micrometer.core.instrument.distribution.HistogramSnapshot; import io.micrometer.core.instrument.simple.SimpleMeterRegistry; import java.time.Duration; +import java.util.Arrays; import java.util.Collections; import java.util.Set; -import java.util.function.Supplier; +import java.util.concurrent.TimeUnit; import org.junit.Test; import org.junit.runner.RunWith; @RunWith(DataProviderRunner.class) public class MicrometerNodeMetricUpdaterTest { + private static final MetricId METRIC_ID = new DefaultMetricId("irrelevant", ImmutableMap.of()); + @Test public void should_log_warning_when_provided_eviction_time_setting_is_too_low() { // given @@ -54,6 +67,7 @@ public void should_log_warning_when_provided_eviction_time_setting_is_too_low() InternalDriverContext context = mock(InternalDriverContext.class); DriverExecutionProfile profile = mock(DriverExecutionProfile.class); DriverConfig config = mock(DriverConfig.class); + MetricIdGenerator generator = mock(MetricIdGenerator.class); Set enabledMetrics = Collections.singleton(DefaultNodeMetric.CQL_MESSAGES); Duration expireAfter = AbstractMetricUpdater.MIN_EXPIRE_AFTER.minusMinutes(1); @@ -61,27 +75,13 @@ public void should_log_warning_when_provided_eviction_time_setting_is_too_low() when(context.getSessionName()).thenReturn("prefix"); when(context.getConfig()).thenReturn(config); when(config.getDefaultProfile()).thenReturn(profile); + when(context.getMetricIdGenerator()).thenReturn(generator); when(profile.getDuration(DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER)) .thenReturn(expireAfter); + when(generator.nodeMetricId(node, DefaultNodeMetric.CQL_MESSAGES)).thenReturn(METRIC_ID); MicrometerNodeMetricUpdater updater = - new MicrometerNodeMetricUpdater(node, context, enabledMetrics, new SimpleMeterRegistry()) { - @Override - protected void initializeGauge( - NodeMetric metric, DriverExecutionProfile profile, Supplier supplier) { - // do nothing - } - - @Override - protected void initializeCounter(NodeMetric metric, DriverExecutionProfile profile) { - // do nothing - } - - @Override - protected void initializeTimer(NodeMetric metric, DriverExecutionProfile profile) { - // do nothing - } - }; + new MicrometerNodeMetricUpdater(node, context, enabledMetrics, new SimpleMeterRegistry()); // then assertThat(updater.getExpireAfter()).isEqualTo(AbstractMetricUpdater.MIN_EXPIRE_AFTER); @@ -107,33 +107,20 @@ public void should_not_log_warning_when_provided_eviction_time_setting_is_accept InternalDriverContext context = mock(InternalDriverContext.class); DriverExecutionProfile profile = mock(DriverExecutionProfile.class); DriverConfig config = mock(DriverConfig.class); + MetricIdGenerator generator = mock(MetricIdGenerator.class); Set enabledMetrics = Collections.singleton(DefaultNodeMetric.CQL_MESSAGES); // when when(context.getSessionName()).thenReturn("prefix"); when(context.getConfig()).thenReturn(config); when(config.getDefaultProfile()).thenReturn(profile); + when(context.getMetricIdGenerator()).thenReturn(generator); when(profile.getDuration(DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER)) .thenReturn(expireAfter); + when(generator.nodeMetricId(node, DefaultNodeMetric.CQL_MESSAGES)).thenReturn(METRIC_ID); MicrometerNodeMetricUpdater updater = - new MicrometerNodeMetricUpdater(node, context, enabledMetrics, new SimpleMeterRegistry()) { - @Override - protected void initializeGauge( - NodeMetric metric, DriverExecutionProfile profile, Supplier supplier) { - // do nothing - } - - @Override - protected void initializeCounter(NodeMetric metric, DriverExecutionProfile profile) { - // do nothing - } - - @Override - protected void initializeTimer(NodeMetric metric, DriverExecutionProfile profile) { - // do nothing - } - }; + new MicrometerNodeMetricUpdater(node, context, enabledMetrics, new SimpleMeterRegistry()); // then assertThat(updater.getExpireAfter()).isEqualTo(expireAfter); @@ -147,4 +134,71 @@ public static Object[][] acceptableEvictionTimes() { {AbstractMetricUpdater.MIN_EXPIRE_AFTER.plusMinutes(1)} }; } + + @Test + @UseDataProvider(value = "timerMetrics") + public void should_create_timer( + NodeMetric metric, + DriverOption lowest, + DriverOption highest, + DriverOption digits, + DriverOption sla) { + // given + Node node = mock(Node.class); + InternalDriverContext context = mock(InternalDriverContext.class); + DriverExecutionProfile profile = mock(DriverExecutionProfile.class); + DriverConfig config = mock(DriverConfig.class); + MetricIdGenerator generator = mock(MetricIdGenerator.class); + Set enabledMetrics = Collections.singleton(metric); + + // when + when(context.getSessionName()).thenReturn("prefix"); + when(context.getConfig()).thenReturn(config); + when(config.getDefaultProfile()).thenReturn(profile); + when(context.getMetricIdGenerator()).thenReturn(generator); + when(profile.getDuration(DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER)) + .thenReturn(Duration.ofHours(1)); + when(profile.getDuration(lowest)).thenReturn(Duration.ofMillis(10)); + when(profile.getDuration(highest)).thenReturn(Duration.ofSeconds(1)); + when(profile.getInt(digits)).thenReturn(5); + when(profile.isDefined(sla)).thenReturn(true); + when(profile.getDurationList(sla)) + .thenReturn(Arrays.asList(Duration.ofMillis(100), Duration.ofMillis(500))); + when(generator.nodeMetricId(node, metric)).thenReturn(METRIC_ID); + + SimpleMeterRegistry registry = spy(new SimpleMeterRegistry()); + MicrometerNodeMetricUpdater updater = + new MicrometerNodeMetricUpdater(node, context, enabledMetrics, registry); + + for (int i = 0; i < 10; i++) { + updater.updateTimer(metric, null, 100, TimeUnit.MILLISECONDS); + } + + // then + Timer timer = registry.find(METRIC_ID.getName()).timer(); + assertThat(timer).isNotNull(); + assertThat(timer.count()).isEqualTo(10); + HistogramSnapshot snapshot = timer.takeSnapshot(); + assertThat(snapshot.histogramCounts()).hasSize(2); + } + + @DataProvider + public static Object[][] timerMetrics() { + return new Object[][] { + { + DefaultNodeMetric.CQL_MESSAGES, + DefaultDriverOption.METRICS_NODE_CQL_MESSAGES_LOWEST, + DefaultDriverOption.METRICS_NODE_CQL_MESSAGES_HIGHEST, + DefaultDriverOption.METRICS_NODE_CQL_MESSAGES_DIGITS, + DefaultDriverOption.METRICS_NODE_CQL_MESSAGES_SLO, + }, + { + DseNodeMetric.GRAPH_MESSAGES, + DseDriverOption.METRICS_NODE_GRAPH_MESSAGES_LOWEST, + DseDriverOption.METRICS_NODE_GRAPH_MESSAGES_HIGHEST, + DseDriverOption.METRICS_NODE_GRAPH_MESSAGES_DIGITS, + DseDriverOption.METRICS_NODE_GRAPH_MESSAGES_SLO, + }, + }; + } } diff --git a/metrics/micrometer/src/test/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerSessionMetricUpdaterTest.java b/metrics/micrometer/src/test/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerSessionMetricUpdaterTest.java new file mode 100644 index 00000000000..09b3e44bac4 --- /dev/null +++ b/metrics/micrometer/src/test/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerSessionMetricUpdaterTest.java @@ -0,0 +1,134 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.metrics.micrometer; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.when; + +import com.datastax.dse.driver.api.core.config.DseDriverOption; +import com.datastax.dse.driver.api.core.metrics.DseSessionMetric; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverConfig; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.config.DriverOption; +import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; +import com.datastax.oss.driver.api.core.metrics.SessionMetric; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.metrics.DefaultMetricId; +import com.datastax.oss.driver.internal.core.metrics.MetricId; +import com.datastax.oss.driver.internal.core.metrics.MetricIdGenerator; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import io.micrometer.core.instrument.Timer; +import io.micrometer.core.instrument.distribution.HistogramSnapshot; +import io.micrometer.core.instrument.simple.SimpleMeterRegistry; +import java.time.Duration; +import java.util.Arrays; +import java.util.Collections; +import java.util.Set; +import java.util.concurrent.TimeUnit; +import org.junit.Test; +import org.junit.runner.RunWith; + +@RunWith(DataProviderRunner.class) +public class MicrometerSessionMetricUpdaterTest { + + private static final MetricId METRIC_ID = new DefaultMetricId("irrelevant", ImmutableMap.of()); + + @Test + @UseDataProvider(value = "timerMetrics") + public void should_create_timer( + SessionMetric metric, + DriverOption lowest, + DriverOption highest, + DriverOption digits, + DriverOption sla) { + // given + InternalDriverContext context = mock(InternalDriverContext.class); + DriverExecutionProfile profile = mock(DriverExecutionProfile.class); + DriverConfig config = mock(DriverConfig.class); + MetricIdGenerator generator = mock(MetricIdGenerator.class); + Set enabledMetrics = Collections.singleton(metric); + + // when + when(context.getSessionName()).thenReturn("prefix"); + when(context.getConfig()).thenReturn(config); + when(config.getDefaultProfile()).thenReturn(profile); + when(context.getMetricIdGenerator()).thenReturn(generator); + when(profile.getDuration(DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER)) + .thenReturn(Duration.ofHours(1)); + when(profile.getDuration(lowest)).thenReturn(Duration.ofMillis(10)); + when(profile.getDuration(highest)).thenReturn(Duration.ofSeconds(1)); + when(profile.getInt(digits)).thenReturn(5); + when(profile.isDefined(sla)).thenReturn(true); + when(profile.getDurationList(sla)) + .thenReturn(Arrays.asList(Duration.ofMillis(100), Duration.ofMillis(500))); + when(generator.sessionMetricId(metric)).thenReturn(METRIC_ID); + + SimpleMeterRegistry registry = spy(new SimpleMeterRegistry()); + MicrometerSessionMetricUpdater updater = + new MicrometerSessionMetricUpdater(context, enabledMetrics, registry); + + for (int i = 0; i < 10; i++) { + updater.updateTimer(metric, null, 100, TimeUnit.MILLISECONDS); + } + + // then + Timer timer = registry.find(METRIC_ID.getName()).timer(); + assertThat(timer).isNotNull(); + assertThat(timer.count()).isEqualTo(10); + HistogramSnapshot snapshot = timer.takeSnapshot(); + assertThat(snapshot.histogramCounts()).hasSize(2); + } + + @DataProvider + public static Object[][] timerMetrics() { + return new Object[][] { + { + DefaultSessionMetric.CQL_REQUESTS, + DefaultDriverOption.METRICS_SESSION_CQL_REQUESTS_LOWEST, + DefaultDriverOption.METRICS_SESSION_CQL_REQUESTS_HIGHEST, + DefaultDriverOption.METRICS_SESSION_CQL_REQUESTS_DIGITS, + DefaultDriverOption.METRICS_SESSION_CQL_REQUESTS_SLO, + }, + { + DseSessionMetric.GRAPH_REQUESTS, + DseDriverOption.METRICS_SESSION_GRAPH_REQUESTS_LOWEST, + DseDriverOption.METRICS_SESSION_GRAPH_REQUESTS_HIGHEST, + DseDriverOption.METRICS_SESSION_GRAPH_REQUESTS_DIGITS, + DseDriverOption.METRICS_SESSION_GRAPH_REQUESTS_SLO, + }, + { + DseSessionMetric.CONTINUOUS_CQL_REQUESTS, + DseDriverOption.CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_LOWEST, + DseDriverOption.CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_HIGHEST, + DseDriverOption.CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_DIGITS, + DseDriverOption.CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_SLO, + }, + { + DefaultSessionMetric.THROTTLING_DELAY, + DefaultDriverOption.METRICS_SESSION_THROTTLING_LOWEST, + DefaultDriverOption.METRICS_SESSION_THROTTLING_HIGHEST, + DefaultDriverOption.METRICS_SESSION_THROTTLING_DIGITS, + DefaultDriverOption.METRICS_SESSION_THROTTLING_SLO, + }, + }; + } +} From 9b42e3894cc5cb797f707114c8f487a4a7c3dde7 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Wed, 24 Mar 2021 14:37:17 +0100 Subject: [PATCH 673/979] Upgrade native-protocol to 1.5.0 --- bom/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bom/pom.xml b/bom/pom.xml index 6aaae045061..cd1abf49230 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -71,7 +71,7 @@ com.datastax.oss native-protocol - 1.4.13-SNAPSHOT + 1.5.0 com.datastax.oss From a6e1deebc731c5fc21fd0c80289ea8017486192d Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Wed, 24 Mar 2021 15:25:51 +0100 Subject: [PATCH 674/979] Fix failing test in MicrometerNodeMetricUpdaterTest --- .../micrometer/MicrometerNodeMetricUpdaterTest.java | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/metrics/micrometer/src/test/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerNodeMetricUpdaterTest.java b/metrics/micrometer/src/test/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerNodeMetricUpdaterTest.java index a874344fef0..fbdfb7b2355 100644 --- a/metrics/micrometer/src/test/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerNodeMetricUpdaterTest.java +++ b/metrics/micrometer/src/test/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerNodeMetricUpdaterTest.java @@ -79,6 +79,11 @@ public void should_log_warning_when_provided_eviction_time_setting_is_too_low() when(profile.getDuration(DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER)) .thenReturn(expireAfter); when(generator.nodeMetricId(node, DefaultNodeMetric.CQL_MESSAGES)).thenReturn(METRIC_ID); + when(profile.getDuration(DefaultDriverOption.METRICS_NODE_CQL_MESSAGES_HIGHEST)) + .thenReturn(Duration.ofSeconds(10)); + when(profile.getDuration(DefaultDriverOption.METRICS_NODE_CQL_MESSAGES_LOWEST)) + .thenReturn(Duration.ofMillis(1)); + when(profile.getInt(DefaultDriverOption.METRICS_NODE_CQL_MESSAGES_DIGITS)).thenReturn(5); MicrometerNodeMetricUpdater updater = new MicrometerNodeMetricUpdater(node, context, enabledMetrics, new SimpleMeterRegistry()); @@ -118,6 +123,11 @@ public void should_not_log_warning_when_provided_eviction_time_setting_is_accept when(profile.getDuration(DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER)) .thenReturn(expireAfter); when(generator.nodeMetricId(node, DefaultNodeMetric.CQL_MESSAGES)).thenReturn(METRIC_ID); + when(profile.getDuration(DefaultDriverOption.METRICS_NODE_CQL_MESSAGES_HIGHEST)) + .thenReturn(Duration.ofSeconds(10)); + when(profile.getDuration(DefaultDriverOption.METRICS_NODE_CQL_MESSAGES_LOWEST)) + .thenReturn(Duration.ofMillis(1)); + when(profile.getInt(DefaultDriverOption.METRICS_NODE_CQL_MESSAGES_DIGITS)).thenReturn(5); MicrometerNodeMetricUpdater updater = new MicrometerNodeMetricUpdater(node, context, enabledMetrics, new SimpleMeterRegistry()); From adbdb93727a15f6f110d6cb60bfabc65ed8deecb Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Wed, 24 Mar 2021 15:30:32 +0100 Subject: [PATCH 675/979] Update version in docs --- README.md | 4 +- changelog/README.md | 2 +- manual/case_sensitivity/README.md | 10 +-- manual/core/README.md | 26 +++---- manual/core/address_resolution/README.md | 2 +- manual/core/async/README.md | 2 +- manual/core/authentication/README.md | 16 ++-- manual/core/bom/README.md | 4 +- manual/core/configuration/README.md | 20 ++--- manual/core/control_connection/README.md | 2 +- manual/core/custom_codecs/README.md | 74 +++++++++---------- manual/core/detachable_types/README.md | 14 ++-- manual/core/dse/geotypes/README.md | 6 +- manual/core/dse/graph/README.md | 4 +- manual/core/dse/graph/fluent/README.md | 4 +- .../core/dse/graph/fluent/explicit/README.md | 12 +-- manual/core/dse/graph/results/README.md | 6 +- manual/core/dse/graph/script/README.md | 6 +- manual/core/idempotence/README.md | 4 +- manual/core/load_balancing/README.md | 10 +-- manual/core/metadata/README.md | 6 +- manual/core/metadata/node/README.md | 28 +++---- manual/core/metadata/schema/README.md | 20 ++--- manual/core/metadata/token/README.md | 4 +- manual/core/native_protocol/README.md | 6 +- manual/core/non_blocking/README.md | 44 +++++------ manual/core/paging/README.md | 12 +-- manual/core/performance/README.md | 10 +-- manual/core/pooling/README.md | 2 +- manual/core/query_timestamps/README.md | 4 +- manual/core/reactive/README.md | 24 +++--- manual/core/reconnection/README.md | 8 +- manual/core/request_tracker/README.md | 4 +- manual/core/retries/README.md | 36 ++++----- manual/core/speculative_execution/README.md | 2 +- manual/core/ssl/README.md | 6 +- manual/core/statements/README.md | 8 +- manual/core/statements/batch/README.md | 6 +- .../statements/per_query_keyspace/README.md | 2 +- manual/core/statements/prepared/README.md | 8 +- manual/core/statements/simple/README.md | 6 +- manual/core/temporal_types/README.md | 8 +- manual/core/throttling/README.md | 6 +- manual/core/tracing/README.md | 12 +-- manual/core/tuples/README.md | 4 +- manual/core/udts/README.md | 4 +- manual/developer/common/concurrency/README.md | 4 +- manual/mapper/config/kotlin/README.md | 2 +- manual/mapper/config/record/README.md | 2 +- manual/mapper/config/scala/README.md | 2 +- manual/mapper/daos/README.md | 8 +- manual/mapper/daos/custom_types/README.md | 10 +-- manual/mapper/daos/delete/README.md | 18 ++--- manual/mapper/daos/getentity/README.md | 18 ++--- manual/mapper/daos/increment/README.md | 12 +-- manual/mapper/daos/insert/README.md | 14 ++-- manual/mapper/daos/null_saving/README.md | 10 +-- manual/mapper/daos/query/README.md | 24 +++--- manual/mapper/daos/queryprovider/README.md | 16 ++-- manual/mapper/daos/select/README.md | 28 +++---- manual/mapper/daos/setentity/README.md | 10 +-- .../daos/statement_attributes/README.md | 2 +- manual/mapper/daos/update/README.md | 12 +-- manual/mapper/entities/README.md | 36 ++++----- manual/mapper/mapper/README.md | 10 +-- manual/osgi/README.md | 6 +- manual/query_builder/README.md | 10 +-- manual/query_builder/condition/README.md | 2 +- manual/query_builder/delete/README.md | 4 +- manual/query_builder/insert/README.md | 2 +- manual/query_builder/relation/README.md | 4 +- manual/query_builder/schema/README.md | 2 +- .../query_builder/schema/aggregate/README.md | 2 +- .../query_builder/schema/function/README.md | 2 +- manual/query_builder/schema/index/README.md | 2 +- .../query_builder/schema/keyspace/README.md | 2 +- .../schema/materialized_view/README.md | 4 +- manual/query_builder/schema/table/README.md | 6 +- manual/query_builder/schema/type/README.md | 2 +- manual/query_builder/select/README.md | 4 +- manual/query_builder/term/README.md | 4 +- manual/query_builder/truncate/README.md | 2 +- manual/query_builder/update/README.md | 4 +- upgrade_guide/README.md | 38 +++++----- 84 files changed, 424 insertions(+), 424 deletions(-) diff --git a/README.md b/README.md index 5b7c323ec85..5a137ac947a 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@ *If you're reading this on github.com, please note that this is the readme for the development version and that some features described here might not yet have been released. You can find the documentation for latest version through [DataStax Docs] or via the release tags, e.g. -[4.10.0](https://github.com/datastax/java-driver/tree/4.10.0).* +[4.11.0](https://github.com/datastax/java-driver/tree/4.11.0).* A modern, feature-rich and highly tunable Java client library for [Apache Cassandra®] \(2.1+) and [DataStax Enterprise] \(4.7+), and [DataStax Astra], using exclusively Cassandra's binary protocol @@ -82,7 +82,7 @@ See the [upgrade guide](upgrade_guide/) for details. * [Changelog] * [FAQ] -[API docs]: https://docs.datastax.com/en/drivers/java/4.10 +[API docs]: https://docs.datastax.com/en/drivers/java/4.11 [JIRA]: https://datastax-oss.atlassian.net/browse/JAVA [Mailing list]: https://groups.google.com/a/lists.datastax.com/forum/#!forum/java-driver-user [@dsJavaDriver]: https://twitter.com/dsJavaDriver diff --git a/changelog/README.md b/changelog/README.md index 74a96b61d02..342336ae918 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -2,7 +2,7 @@ -### 4.11.0 (in progress) +### 4.11.0 - [improvement] JAVA-2930: Allow Micrometer to record histograms for timers - [improvement] JAVA-2914: Transform node filter into a more flexible node distance evaluator diff --git a/manual/case_sensitivity/README.md b/manual/case_sensitivity/README.md index 576ee41823d..4138ae42d89 100644 --- a/manual/case_sensitivity/README.md +++ b/manual/case_sensitivity/README.md @@ -106,11 +106,11 @@ For "consuming" methods, string overloads are also provided for convenience, for * in other cases, the string is always assumed to be in CQL form, and converted on the fly with `CqlIdentifier.fromCql`. -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/CqlIdentifier.html -[Row]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/Row.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/data/UdtValue.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/data/AccessibleByName.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/CqlIdentifier.html +[Row]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/Row.html +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/data/UdtValue.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/data/AccessibleByName.html ### Good practices diff --git a/manual/core/README.md b/manual/core/README.md index 7ff1fc3ab0e..6a316245c50 100644 --- a/manual/core/README.md +++ b/manual/core/README.md @@ -314,18 +314,18 @@ for (ColumnDefinitions.Definition definition : row.getColumnDefinitions()) { } ``` -[CqlSession]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/CqlSession.html -[CqlSession#builder()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/CqlSession.html#builder-- -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/ResultSet.html -[Row]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/Row.html -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/CqlIdentifier.html -[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/data/AccessibleByName.html -[GenericType]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/reflect/GenericType.html -[CqlDuration]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/data/CqlDuration.html -[TupleValue]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/data/TupleValue.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/data/UdtValue.html -[SessionBuilder.addContactPoint()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoint-java.net.InetSocketAddress- -[SessionBuilder.addContactPoints()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoints-java.util.Collection- -[SessionBuilder.withLocalDatacenter()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withLocalDatacenter-java.lang.String- +[CqlSession]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/CqlSession.html +[CqlSession#builder()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/CqlSession.html#builder-- +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/ResultSet.html +[Row]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/Row.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/CqlIdentifier.html +[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/data/AccessibleByName.html +[GenericType]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/reflect/GenericType.html +[CqlDuration]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/data/CqlDuration.html +[TupleValue]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/data/TupleValue.html +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/data/UdtValue.html +[SessionBuilder.addContactPoint()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoint-java.net.InetSocketAddress- +[SessionBuilder.addContactPoints()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoints-java.util.Collection- +[SessionBuilder.withLocalDatacenter()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withLocalDatacenter-java.lang.String- [CASSANDRA-10145]: https://issues.apache.org/jira/browse/CASSANDRA-10145 \ No newline at end of file diff --git a/manual/core/address_resolution/README.md b/manual/core/address_resolution/README.md index b3b0ff0dd17..61954f38c43 100644 --- a/manual/core/address_resolution/README.md +++ b/manual/core/address_resolution/README.md @@ -124,7 +124,7 @@ Cassandra node: domain name of the target instance. Then it performs a forward DNS lookup of the domain name; the EC2 DNS does the private/public switch automatically based on location). -[AddressTranslator]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/addresstranslation/AddressTranslator.html +[AddressTranslator]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/addresstranslation/AddressTranslator.html [cassandra.yaml]: https://docs.datastax.com/en/cassandra/3.x/cassandra/configuration/configCassandra_yaml.html [rpc_address]: https://docs.datastax.com/en/cassandra/3.x/cassandra/configuration/configCassandra_yaml.html?scroll=configCassandra_yaml__rpc_address diff --git a/manual/core/async/README.md b/manual/core/async/README.md index d3d936a2758..aeaaa53e5ce 100644 --- a/manual/core/async/README.md +++ b/manual/core/async/README.md @@ -207,4 +207,4 @@ documentation for more details and an example. [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html \ No newline at end of file +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html \ No newline at end of file diff --git a/manual/core/authentication/README.md b/manual/core/authentication/README.md index 90181f3b98e..8b3a2a61099 100644 --- a/manual/core/authentication/README.md +++ b/manual/core/authentication/README.md @@ -215,12 +215,12 @@ session.execute(statement); [SASL]: https://en.wikipedia.org/wiki/Simple_Authentication_and_Security_Layer -[AuthProvider]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/auth/AuthProvider.html -[DriverContext]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/context/DriverContext.html -[PlainTextAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderBase.html -[DseGssApiAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderBase.html -[ProgrammaticDseGssApiAuthProvider]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/auth/ProgrammaticDseGssApiAuthProvider.html -[ProxyAuthentication.executeAs]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/auth/ProxyAuthentication.html#executeAs-java.lang.String-StatementT- -[SessionBuilder.withAuthCredentials]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthCredentials-java.lang.String-java.lang.String- -[SessionBuilder.withAuthProvider]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthProvider-com.datastax.oss.driver.api.core.auth.AuthProvider- +[AuthProvider]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/auth/AuthProvider.html +[DriverContext]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/context/DriverContext.html +[PlainTextAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderBase.html +[DseGssApiAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderBase.html +[ProgrammaticDseGssApiAuthProvider]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/auth/ProgrammaticDseGssApiAuthProvider.html +[ProxyAuthentication.executeAs]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/auth/ProxyAuthentication.html#executeAs-java.lang.String-StatementT- +[SessionBuilder.withAuthCredentials]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthCredentials-java.lang.String-java.lang.String- +[SessionBuilder.withAuthProvider]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthProvider-com.datastax.oss.driver.api.core.auth.AuthProvider- [reference.conf]: ../configuration/reference/ \ No newline at end of file diff --git a/manual/core/bom/README.md b/manual/core/bom/README.md index cc896b138a1..690ed1ce3cc 100644 --- a/manual/core/bom/README.md +++ b/manual/core/bom/README.md @@ -13,7 +13,7 @@ To import the driver's BOM, add the following section in your application's own com.datastax.oss java-driver-bom - 4.10.0 + 4.11.0 pom import @@ -65,7 +65,7 @@ good idea to extract a property to keep it in sync with the BOM: ```xml - 4.10.0 + 4.11.0 diff --git a/manual/core/configuration/README.md b/manual/core/configuration/README.md index 8d8b11e1065..55e7bf6d91c 100644 --- a/manual/core/configuration/README.md +++ b/manual/core/configuration/README.md @@ -520,16 +520,16 @@ config.getDefaultProfile().getString(MyCustomOption.ADMIN_EMAIL); config.getDefaultProfile().getInt(MyCustomOption.AWESOMENESS_FACTOR); ``` -[DriverConfig]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/config/DriverConfig.html -[DriverExecutionProfile]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/config/DriverExecutionProfile.html -[DriverContext]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/context/DriverContext.html -[DriverOption]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/config/DriverOption.html -[DefaultDriverOption]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/config/DefaultDriverOption.html -[DriverConfigLoader]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html -[DriverConfigLoader.fromClasspath]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromClasspath-java.lang.String- -[DriverConfigLoader.fromFile]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromFile-java.io.File- -[DriverConfigLoader.fromUrl]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromUrl-java.net.URL- -[DriverConfigLoader.programmaticBuilder]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#programmaticBuilder-- +[DriverConfig]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/config/DriverConfig.html +[DriverExecutionProfile]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/config/DriverExecutionProfile.html +[DriverContext]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/context/DriverContext.html +[DriverOption]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/config/DriverOption.html +[DefaultDriverOption]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/config/DefaultDriverOption.html +[DriverConfigLoader]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html +[DriverConfigLoader.fromClasspath]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromClasspath-java.lang.String- +[DriverConfigLoader.fromFile]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromFile-java.io.File- +[DriverConfigLoader.fromUrl]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromUrl-java.net.URL- +[DriverConfigLoader.programmaticBuilder]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#programmaticBuilder-- [Typesafe Config]: https://github.com/typesafehub/config [config standard behavior]: https://github.com/typesafehub/config#standard-behavior diff --git a/manual/core/control_connection/README.md b/manual/core/control_connection/README.md index 77977089641..a1d9a345cee 100644 --- a/manual/core/control_connection/README.md +++ b/manual/core/control_connection/README.md @@ -23,4 +23,4 @@ There are a few options to fine tune the control connection behavior in the `advanced.control-connection` and `advanced.metadata` sections; see the [metadata](../metadata/) pages and the [reference configuration](../configuration/reference/) for all the details. -[Node.getOpenConnections]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- \ No newline at end of file +[Node.getOpenConnections]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- \ No newline at end of file diff --git a/manual/core/custom_codecs/README.md b/manual/core/custom_codecs/README.md index 53d2a785d02..b90ea31ad10 100644 --- a/manual/core/custom_codecs/README.md +++ b/manual/core/custom_codecs/README.md @@ -660,13 +660,13 @@ private static String formatRow(Row row) { } ``` -[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html -[GenericType]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/reflect/GenericType.html -[TypeCodec]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html -[format()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html#format-JavaTypeT- -[parse()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html#parse-java.lang.String- -[MappingCodec]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/MappingCodec.html -[SessionBuilder.addTypeCodecs]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addTypeCodecs-com.datastax.oss.driver.api.core.type.codec.TypeCodec...- +[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html +[GenericType]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/reflect/GenericType.html +[TypeCodec]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html +[format()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html#format-JavaTypeT- +[parse()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html#parse-java.lang.String- +[MappingCodec]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/MappingCodec.html +[SessionBuilder.addTypeCodecs]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addTypeCodecs-com.datastax.oss.driver.api.core.type.codec.TypeCodec...- [Enums]: https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html [Enum.name()]: https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html#name-- @@ -680,36 +680,36 @@ private static String formatRow(Row row) { [java.time.LocalDateTime]: https://docs.oracle.com/javase/8/docs/api/java/time/LocalDateTime.html [java.time.ZoneId]: https://docs.oracle.com/javase/8/docs/api/java/time/ZoneId.html -[ExtraTypeCodecs]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html -[ExtraTypeCodecs.BLOB_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#BLOB_TO_ARRAY -[ExtraTypeCodecs.BOOLEAN_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#BOOLEAN_LIST_TO_ARRAY -[ExtraTypeCodecs.BYTE_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#BYTE_LIST_TO_ARRAY -[ExtraTypeCodecs.SHORT_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#SHORT_LIST_TO_ARRAY -[ExtraTypeCodecs.INT_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#INT_LIST_TO_ARRAY -[ExtraTypeCodecs.LONG_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#LONG_LIST_TO_ARRAY -[ExtraTypeCodecs.FLOAT_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#FLOAT_LIST_TO_ARRAY -[ExtraTypeCodecs.DOUBLE_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#DOUBLE_LIST_TO_ARRAY -[ExtraTypeCodecs.listToArrayOf(TypeCodec)]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#listToArrayOf-com.datastax.oss.driver.api.core.type.codec.TypeCodec- -[ExtraTypeCodecs.TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#TIMESTAMP_UTC -[ExtraTypeCodecs.timestampAt(ZoneId)]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#timestampAt-java.time.ZoneId- -[ExtraTypeCodecs.TIMESTAMP_MILLIS_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#TIMESTAMP_MILLIS_SYSTEM -[ExtraTypeCodecs.TIMESTAMP_MILLIS_UTC]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#TIMESTAMP_MILLIS_UTC -[ExtraTypeCodecs.timestampMillisAt(ZoneId)]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#timestampMillisAt-java.time.ZoneId- -[ExtraTypeCodecs.ZONED_TIMESTAMP_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#ZONED_TIMESTAMP_SYSTEM -[ExtraTypeCodecs.ZONED_TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#ZONED_TIMESTAMP_UTC -[ExtraTypeCodecs.zonedTimestampAt(ZoneId)]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#zonedTimestampAt-java.time.ZoneId- -[ExtraTypeCodecs.LOCAL_TIMESTAMP_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#LOCAL_TIMESTAMP_SYSTEM -[ExtraTypeCodecs.LOCAL_TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#LOCAL_TIMESTAMP_UTC -[ExtraTypeCodecs.localTimestampAt(ZoneId)]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#localTimestampAt-java.time.ZoneId- -[ExtraTypeCodecs.ZONED_TIMESTAMP_PERSISTED]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#ZONED_TIMESTAMP_PERSISTED -[ExtraTypeCodecs.optionalOf(TypeCodec)]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#optionalOf-com.datastax.oss.driver.api.core.type.codec.TypeCodec- -[ExtraTypeCodecs.enumNamesOf(Class)]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#enumNamesOf-java.lang.Class- -[ExtraTypeCodecs.enumOrdinalsOf(Class)]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#enumOrdinalsOf-java.lang.Class- -[ExtraTypeCodecs.json(Class)]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#json-java.lang.Class- -[ExtraTypeCodecs.json(Class, ObjectMapper)]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#json-java.lang.Class-com.fasterxml.jackson.databind.ObjectMapper- - -[TypeCodecs.BLOB]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#BLOB -[TypeCodecs.TIMESTAMP]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#TIMESTAMP +[ExtraTypeCodecs]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html +[ExtraTypeCodecs.BLOB_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#BLOB_TO_ARRAY +[ExtraTypeCodecs.BOOLEAN_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#BOOLEAN_LIST_TO_ARRAY +[ExtraTypeCodecs.BYTE_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#BYTE_LIST_TO_ARRAY +[ExtraTypeCodecs.SHORT_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#SHORT_LIST_TO_ARRAY +[ExtraTypeCodecs.INT_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#INT_LIST_TO_ARRAY +[ExtraTypeCodecs.LONG_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#LONG_LIST_TO_ARRAY +[ExtraTypeCodecs.FLOAT_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#FLOAT_LIST_TO_ARRAY +[ExtraTypeCodecs.DOUBLE_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#DOUBLE_LIST_TO_ARRAY +[ExtraTypeCodecs.listToArrayOf(TypeCodec)]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#listToArrayOf-com.datastax.oss.driver.api.core.type.codec.TypeCodec- +[ExtraTypeCodecs.TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#TIMESTAMP_UTC +[ExtraTypeCodecs.timestampAt(ZoneId)]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#timestampAt-java.time.ZoneId- +[ExtraTypeCodecs.TIMESTAMP_MILLIS_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#TIMESTAMP_MILLIS_SYSTEM +[ExtraTypeCodecs.TIMESTAMP_MILLIS_UTC]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#TIMESTAMP_MILLIS_UTC +[ExtraTypeCodecs.timestampMillisAt(ZoneId)]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#timestampMillisAt-java.time.ZoneId- +[ExtraTypeCodecs.ZONED_TIMESTAMP_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#ZONED_TIMESTAMP_SYSTEM +[ExtraTypeCodecs.ZONED_TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#ZONED_TIMESTAMP_UTC +[ExtraTypeCodecs.zonedTimestampAt(ZoneId)]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#zonedTimestampAt-java.time.ZoneId- +[ExtraTypeCodecs.LOCAL_TIMESTAMP_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#LOCAL_TIMESTAMP_SYSTEM +[ExtraTypeCodecs.LOCAL_TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#LOCAL_TIMESTAMP_UTC +[ExtraTypeCodecs.localTimestampAt(ZoneId)]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#localTimestampAt-java.time.ZoneId- +[ExtraTypeCodecs.ZONED_TIMESTAMP_PERSISTED]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#ZONED_TIMESTAMP_PERSISTED +[ExtraTypeCodecs.optionalOf(TypeCodec)]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#optionalOf-com.datastax.oss.driver.api.core.type.codec.TypeCodec- +[ExtraTypeCodecs.enumNamesOf(Class)]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#enumNamesOf-java.lang.Class- +[ExtraTypeCodecs.enumOrdinalsOf(Class)]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#enumOrdinalsOf-java.lang.Class- +[ExtraTypeCodecs.json(Class)]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#json-java.lang.Class- +[ExtraTypeCodecs.json(Class, ObjectMapper)]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#json-java.lang.Class-com.fasterxml.jackson.databind.ObjectMapper- + +[TypeCodecs.BLOB]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#BLOB +[TypeCodecs.TIMESTAMP]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#TIMESTAMP [ObjectMapper]: http://fasterxml.github.io/jackson-databind/javadoc/2.10/com/fasterxml/jackson/databind/ObjectMapper.html diff --git a/manual/core/detachable_types/README.md b/manual/core/detachable_types/README.md index f81f376831c..737dd1b41c9 100644 --- a/manual/core/detachable_types/README.md +++ b/manual/core/detachable_types/README.md @@ -137,13 +137,13 @@ Even then, the defaults used by detached objects might be good enough for you: Otherwise, just make sure you reattach objects any time you deserialize them or create them from scratch. -[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html -[CodecRegistry#DEFAULT]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html#DEFAULT -[DataType]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/DataType.html -[Detachable]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/detach/Detachable.html -[Session]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/Session.html -[ColumnDefinition]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/ColumnDefinition.html -[Row]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/Row.html +[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html +[CodecRegistry#DEFAULT]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html#DEFAULT +[DataType]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/DataType.html +[Detachable]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/detach/Detachable.html +[Session]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/Session.html +[ColumnDefinition]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/ColumnDefinition.html +[Row]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/Row.html [Java serialization]: https://docs.oracle.com/javase/tutorial/jndi/objects/serial.html [protocol specifications]: https://github.com/datastax/native-protocol/tree/1.x/src/main/resources diff --git a/manual/core/dse/geotypes/README.md b/manual/core/dse/geotypes/README.md index a7c676e8cdf..f6329a2fd5a 100644 --- a/manual/core/dse/geotypes/README.md +++ b/manual/core/dse/geotypes/README.md @@ -166,9 +166,9 @@ All geospatial types interoperate with three standard formats: [ESRI]: https://github.com/Esri/geometry-api-java -[LineString]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/data/geometry/LineString.html -[Point]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/data/geometry/Point.html -[Polygon]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/data/geometry/Polygon.html +[LineString]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/data/geometry/LineString.html +[Point]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/data/geometry/Point.html +[Polygon]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/data/geometry/Polygon.html [Well-known text]: https://en.wikipedia.org/wiki/Well-known_text [Well-known binary]: https://en.wikipedia.org/wiki/Well-known_text#Well-known_binary diff --git a/manual/core/dse/graph/README.md b/manual/core/dse/graph/README.md index 1bf8d7825ba..d18b1959c93 100644 --- a/manual/core/dse/graph/README.md +++ b/manual/core/dse/graph/README.md @@ -74,8 +74,8 @@ fluent API returns Apache TinkerPop™ types directly. [Apache TinkerPop™]: http://tinkerpop.apache.org/ -[CqlSession]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/CqlSession.html -[GraphSession]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/graph/GraphSession.html +[CqlSession]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/CqlSession.html +[GraphSession]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/graph/GraphSession.html [DSE developer guide]: https://docs.datastax.com/en/dse/6.0/dse-dev/datastax_enterprise/graph/graphTOC.html [Gremlin]: https://docs.datastax.com/en/dse/6.0/dse-dev/datastax_enterprise/graph/dseGraphAbout.html#dseGraphAbout__what-is-cql diff --git a/manual/core/dse/graph/fluent/README.md b/manual/core/dse/graph/fluent/README.md index 2d907231e58..090ccc248fb 100644 --- a/manual/core/dse/graph/fluent/README.md +++ b/manual/core/dse/graph/fluent/README.md @@ -109,8 +109,8 @@ All the DSE predicates are available on the driver side: .values("name"); ``` -[Search]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/graph/predicates/Search.html -[Geo]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/graph/predicates/Geo.html +[Search]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/graph/predicates/Search.html +[Geo]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/graph/predicates/Geo.html [Apache TinkerPop™]: http://tinkerpop.apache.org/ [TinkerPop DSL]: http://tinkerpop.apache.org/docs/current/reference/#dsl diff --git a/manual/core/dse/graph/fluent/explicit/README.md b/manual/core/dse/graph/fluent/explicit/README.md index 5ba18bc6891..533a257d218 100644 --- a/manual/core/dse/graph/fluent/explicit/README.md +++ b/manual/core/dse/graph/fluent/explicit/README.md @@ -105,9 +105,9 @@ added in a future version. See also the [parent page](../) for topics common to all fluent traversals. -[FluentGraphStatement]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html -[FluentGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html#newInstance-org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal- -[FluentGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html#builder-org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal- -[BatchGraphStatement]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html -[BatchGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html#newInstance-- -[BatchGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html#builder-- +[FluentGraphStatement]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html +[FluentGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html#newInstance-org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal- +[FluentGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html#builder-org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal- +[BatchGraphStatement]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html +[BatchGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html#newInstance-- +[BatchGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html#builder-- diff --git a/manual/core/dse/graph/results/README.md b/manual/core/dse/graph/results/README.md index e40f29d7f9a..7c9356b19b6 100644 --- a/manual/core/dse/graph/results/README.md +++ b/manual/core/dse/graph/results/README.md @@ -137,8 +137,8 @@ If a type doesn't have a corresponding `asXxx()` method, use the variant that ta UUID uuid = graphNode.as(UUID.class); ``` -[GraphNode]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/graph/GraphNode.html -[GraphResultSet]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/graph/GraphResultSet.html -[AsyncGraphResultSet]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/graph/AsyncGraphResultSet.html +[GraphNode]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/graph/GraphNode.html +[GraphResultSet]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/graph/GraphResultSet.html +[AsyncGraphResultSet]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/graph/AsyncGraphResultSet.html [DSE data types]: https://docs.datastax.com/en/dse/6.0/dse-dev/datastax_enterprise/graph/reference/refDSEGraphDataTypes.html \ No newline at end of file diff --git a/manual/core/dse/graph/script/README.md b/manual/core/dse/graph/script/README.md index 2869ce22a4b..f26ab58bdf4 100644 --- a/manual/core/dse/graph/script/README.md +++ b/manual/core/dse/graph/script/README.md @@ -101,6 +101,6 @@ Note however that some types of queries can only be performed through the script * configuration; * DSE graph schema queries. -[ScriptGraphStatement]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html -[ScriptGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html#newInstance-java.lang.String- -[ScriptGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html#builder-java.lang.String- \ No newline at end of file +[ScriptGraphStatement]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html +[ScriptGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html#newInstance-java.lang.String- +[ScriptGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html#builder-java.lang.String- \ No newline at end of file diff --git a/manual/core/idempotence/README.md b/manual/core/idempotence/README.md index 505ad2a40be..1bb97d92c64 100644 --- a/manual/core/idempotence/README.md +++ b/manual/core/idempotence/README.md @@ -60,5 +60,5 @@ assert bs.isIdempotent(); The query builder tries to infer idempotence automatically; refer to [its manual](../../query_builder/idempotence/) for more details. -[Statement.setIdempotent]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/Statement.html#setIdempotent-java.lang.Boolean- -[StatementBuilder.setIdempotence]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/StatementBuilder.html#setIdempotence-java.lang.Boolean- +[Statement.setIdempotent]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/Statement.html#setIdempotent-java.lang.Boolean- +[StatementBuilder.setIdempotence]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/StatementBuilder.html#setIdempotence-java.lang.Boolean- diff --git a/manual/core/load_balancing/README.md b/manual/core/load_balancing/README.md index cb9dde4196c..fdac96702fe 100644 --- a/manual/core/load_balancing/README.md +++ b/manual/core/load_balancing/README.md @@ -426,12 +426,12 @@ Then it uses the "closest" distance for any given node. For example: * policy1 changes its suggestion to IGNORED. node1 is set to REMOTE; * policy1 changes its suggestion to REMOTE. node1 stays at REMOTE. -[DriverContext]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/context/DriverContext.html -[LoadBalancingPolicy]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/loadbalancing/LoadBalancingPolicy.html +[DriverContext]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/context/DriverContext.html +[LoadBalancingPolicy]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/loadbalancing/LoadBalancingPolicy.html [BasicLoadBalancingPolicy]: https://github.com/datastax/java-driver/blob/4.x/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicy.java -[getRoutingKeyspace()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKeyspace-- -[getRoutingToken()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/Request.html#getRoutingToken-- -[getRoutingKey()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- +[getRoutingKeyspace()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKeyspace-- +[getRoutingToken()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/Request.html#getRoutingToken-- +[getRoutingKey()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- [NodeDistanceEvaluator]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/loadbalancing/NodeDistanceEvaluator.html [`nodetool status`]: https://docs.datastax.com/en/dse/6.7/dse-dev/datastax_enterprise/tools/nodetool/toolsStatus.html [cqlsh]: https://docs.datastax.com/en/dse/6.7/cql/cql/cql_using/startCqlshStandalone.html diff --git a/manual/core/metadata/README.md b/manual/core/metadata/README.md index 51b7c4621ea..8b5557dd994 100644 --- a/manual/core/metadata/README.md +++ b/manual/core/metadata/README.md @@ -56,6 +56,6 @@ new keyspace in the schema metadata before the token metadata was updated. Schema and node state events are debounced. This allows you to control how often the metadata gets refreshed. See the [Performance](../performance/#debouncing) page for more details. -[Session#getMetadata]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/Session.html#getMetadata-- -[Metadata]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/metadata/Metadata.html -[Node]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/metadata/Node.html \ No newline at end of file +[Session#getMetadata]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/Session.html#getMetadata-- +[Metadata]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/metadata/Metadata.html +[Node]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/metadata/Node.html \ No newline at end of file diff --git a/manual/core/metadata/node/README.md b/manual/core/metadata/node/README.md index 31503c10be4..715b26df732 100644 --- a/manual/core/metadata/node/README.md +++ b/manual/core/metadata/node/README.md @@ -112,17 +112,17 @@ beyond the scope of this document; if you're interested, study the `TopologyMoni the source code. -[Metadata#getNodes]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/metadata/Metadata.html#getNodes-- -[Node]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/metadata/Node.html -[Node#getState()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/metadata/Node.html#getState-- -[Node#getDatacenter()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/metadata/Node.html#getDatacenter-- -[Node#getRack()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/metadata/Node.html#getRack-- -[Node#getDistance()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/metadata/Node.html#getDistance-- -[Node#getExtras()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/metadata/Node.html#getExtras-- -[Node#getOpenConnections()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- -[Node#isReconnecting()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/metadata/Node.html#isReconnecting-- -[NodeState]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/metadata/NodeState.html -[NodeStateListener]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/metadata/NodeStateListener.html -[NodeStateListenerBase]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/metadata/NodeStateListenerBase.html -[SessionBuilder.withNodeStateListener]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withNodeStateListener-com.datastax.oss.driver.api.core.metadata.NodeStateListener- -[DseNodeProperties]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/metadata/DseNodeProperties.html \ No newline at end of file +[Metadata#getNodes]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/metadata/Metadata.html#getNodes-- +[Node]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/metadata/Node.html +[Node#getState()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/metadata/Node.html#getState-- +[Node#getDatacenter()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/metadata/Node.html#getDatacenter-- +[Node#getRack()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/metadata/Node.html#getRack-- +[Node#getDistance()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/metadata/Node.html#getDistance-- +[Node#getExtras()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/metadata/Node.html#getExtras-- +[Node#getOpenConnections()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- +[Node#isReconnecting()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/metadata/Node.html#isReconnecting-- +[NodeState]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/metadata/NodeState.html +[NodeStateListener]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/metadata/NodeStateListener.html +[NodeStateListenerBase]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/metadata/NodeStateListenerBase.html +[SessionBuilder.withNodeStateListener]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withNodeStateListener-com.datastax.oss.driver.api.core.metadata.NodeStateListener- +[DseNodeProperties]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/metadata/DseNodeProperties.html \ No newline at end of file diff --git a/manual/core/metadata/schema/README.md b/manual/core/metadata/schema/README.md index fc02e8a8cdc..4fb70ac326c 100644 --- a/manual/core/metadata/schema/README.md +++ b/manual/core/metadata/schema/README.md @@ -307,16 +307,16 @@ unavailable for the excluded keyspaces. If you issue schema-altering requests from the driver (e.g. `session.execute("CREATE TABLE ..")`), take a look at the [Performance](../../performance/#schema-updates) page for a few tips. -[Metadata#getKeyspaces]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/metadata/Metadata.html#getKeyspaces-- -[SchemaChangeListener]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListener.html -[SchemaChangeListenerBase]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListenerBase.html -[Session#setSchemaMetadataEnabled]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/Session.html#setSchemaMetadataEnabled-java.lang.Boolean- -[Session#checkSchemaAgreementAsync]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/Session.html#checkSchemaAgreementAsync-- -[SessionBuilder#withSchemaChangeListener]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSchemaChangeListener-com.datastax.oss.driver.api.core.metadata.schema.SchemaChangeListener- -[ExecutionInfo#isSchemaInAgreement]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#isSchemaInAgreement-- -[com.datastax.dse.driver.api.core.metadata.schema]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/metadata/schema/package-frame.html -[DseFunctionMetadata]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/metadata/schema/DseFunctionMetadata.html -[DseAggregateMetadata]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/metadata/schema/DseAggregateMetadata.html +[Metadata#getKeyspaces]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/metadata/Metadata.html#getKeyspaces-- +[SchemaChangeListener]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListener.html +[SchemaChangeListenerBase]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListenerBase.html +[Session#setSchemaMetadataEnabled]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/Session.html#setSchemaMetadataEnabled-java.lang.Boolean- +[Session#checkSchemaAgreementAsync]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/Session.html#checkSchemaAgreementAsync-- +[SessionBuilder#withSchemaChangeListener]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSchemaChangeListener-com.datastax.oss.driver.api.core.metadata.schema.SchemaChangeListener- +[ExecutionInfo#isSchemaInAgreement]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#isSchemaInAgreement-- +[com.datastax.dse.driver.api.core.metadata.schema]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/metadata/schema/package-frame.html +[DseFunctionMetadata]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/metadata/schema/DseFunctionMetadata.html +[DseAggregateMetadata]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/metadata/schema/DseAggregateMetadata.html [JAVA-750]: https://datastax-oss.atlassian.net/browse/JAVA-750 [java.util.regex.Pattern]: https://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html diff --git a/manual/core/metadata/token/README.md b/manual/core/metadata/token/README.md index 8c98f389cef..e3dc6d903c3 100644 --- a/manual/core/metadata/token/README.md +++ b/manual/core/metadata/token/README.md @@ -169,5 +169,5 @@ on [schema metadata](../schema/). If schema metadata is disabled or filtered, to also be unavailable for the excluded keyspaces. -[Metadata#getTokenMap]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/metadata/Metadata.html#getTokenMap-- -[TokenMap]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/metadata/TokenMap.html \ No newline at end of file +[Metadata#getTokenMap]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/metadata/Metadata.html#getTokenMap-- +[TokenMap]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/metadata/TokenMap.html \ No newline at end of file diff --git a/manual/core/native_protocol/README.md b/manual/core/native_protocol/README.md index 616b04beeda..e2ed651de6d 100644 --- a/manual/core/native_protocol/README.md +++ b/manual/core/native_protocol/README.md @@ -135,6 +135,6 @@ If you want to see the details of mixed cluster negotiation, enable `DEBUG` leve [protocol spec]: https://github.com/datastax/native-protocol/tree/1.x/src/main/resources [driver3]: https://docs.datastax.com/en/developer/java-driver/3.10/manual/native_protocol/ -[ExecutionInfo.getWarnings]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getWarnings-- -[Request.getCustomPayload]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/Request.html#getCustomPayload-- -[AttachmentPoint.getProtocolVersion]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/detach/AttachmentPoint.html#getProtocolVersion-- +[ExecutionInfo.getWarnings]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getWarnings-- +[Request.getCustomPayload]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/Request.html#getCustomPayload-- +[AttachmentPoint.getProtocolVersion]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/detach/AttachmentPoint.html#getProtocolVersion-- diff --git a/manual/core/non_blocking/README.md b/manual/core/non_blocking/README.md index 8ddbcfd8069..efb068912b2 100644 --- a/manual/core/non_blocking/README.md +++ b/manual/core/non_blocking/README.md @@ -49,22 +49,22 @@ For example, calling any synchronous method declared in [`SyncCqlSession`], such will block until the result is available. These methods should never be used in non-blocking applications. -[`SyncCqlSession`]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/SyncCqlSession.html` -[`execute`]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/SyncCqlSession.html#execute-com.datastax.oss.driver.api.core.cql.Statement- +[`SyncCqlSession`]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/SyncCqlSession.html` +[`execute`]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/SyncCqlSession.html#execute-com.datastax.oss.driver.api.core.cql.Statement- However, the asynchronous methods declared in [`AsyncCqlSession`], such as [`executeAsync`], are all safe for use in non-blocking applications; the statement execution and asynchronous result delivery is guaranteed to never block. -[`AsyncCqlSession`]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/AsyncCqlSession.html -[`executeAsync`]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/AsyncCqlSession.html#executeAsync-com.datastax.oss.driver.api.core.cql.Statement- +[`AsyncCqlSession`]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/AsyncCqlSession.html +[`executeAsync`]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/AsyncCqlSession.html#executeAsync-com.datastax.oss.driver.api.core.cql.Statement- The same applies to the methods declared in [`ReactiveSession`] such as [`executeReactive`]: the returned publisher will never block when subscribed to, until the final results are delivered to the subscriber. -[`ReactiveSession`]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.html -[`executeReactive`]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.html#executeReactive-com.datastax.oss.driver.api.core.cql.Statement- +[`ReactiveSession`]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.html +[`executeReactive`]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.html#executeReactive-com.datastax.oss.driver.api.core.cql.Statement- There is one exception though: continuous paging queries (a feature specific to DSE) have a special execution model which uses internal locks for coordination. Although such locks are only held for @@ -77,10 +77,10 @@ reactive APIs like [`executeContinuouslyAsync`] and [`executeContinuouslyReactiv though, continuous paging is extremely efficient and can safely be used in most non-blocking contexts, unless they require strict lock-freedom. -[`ContinuousSession`]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/cql/continuous/ContinuousSession.html -[`ContinuousReactiveSession`]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousReactiveSession.html -[`executeContinuouslyAsync`]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/cql/continuous/ContinuousSession.html#executeContinuouslyAsync-com.datastax.oss.driver.api.core.cql.Statement- -[`executeContinuouslyReactive`]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousReactiveSession.html#executeContinuouslyReactive-com.datastax.oss.driver.api.core.cql.Statement- +[`ContinuousSession`]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/cql/continuous/ContinuousSession.html +[`ContinuousReactiveSession`]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousReactiveSession.html +[`executeContinuouslyAsync`]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/cql/continuous/ContinuousSession.html#executeContinuouslyAsync-com.datastax.oss.driver.api.core.cql.Statement- +[`executeContinuouslyReactive`]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousReactiveSession.html#executeContinuouslyReactive-com.datastax.oss.driver.api.core.cql.Statement- #### Driver lock-free guarantees per session lifecycle phases @@ -110,8 +110,8 @@ Similarly, a call to [`SessionBuilder.build()`] should be considered blocking as calling thread and wait until the method returns. For this reason, calls to `SessionBuilder.build()` should be avoided in non-blocking applications. -[`SessionBuilder.buildAsync()`]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/SessionBuilder.html#buildAsync-- -[`SessionBuilder.build()`]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/SessionBuilder.html#build-- +[`SessionBuilder.buildAsync()`]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/SessionBuilder.html#buildAsync-- +[`SessionBuilder.build()`]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/SessionBuilder.html#build-- Once the session is initialized, however, the driver is guaranteed to be non-blocking during the session's lifecycle, and under normal operation, unless otherwise noted elsewhere in this document. @@ -121,8 +121,8 @@ during that phase. Therefore, calls to any method declared in [`AsyncAutoCloseab asynchronous ones like [`closeAsync()`], should also be preferably deferred until the application is shut down and lock-freedom enforcement is disabled. -[`AsyncAutoCloseable`]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/AsyncAutoCloseable.html -[`closeAsync()`]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/AsyncAutoCloseable.html#closeAsync-- +[`AsyncAutoCloseable`]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/AsyncAutoCloseable.html +[`closeAsync()`]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/AsyncAutoCloseable.html#closeAsync-- #### Driver lock-free guarantees for specific components @@ -131,7 +131,7 @@ Certain driver components are not implemented in lock-free algorithms. For example, [`SafeInitNodeStateListener`] is implemented with internal locks for coordination. It should not be used if strict lock-freedom is enforced. -[`SafeInitNodeStateListener`]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/metadata/SafeInitNodeStateListener.html +[`SafeInitNodeStateListener`]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/metadata/SafeInitNodeStateListener.html The same is valid for both built-in [request throttlers]: @@ -143,7 +143,7 @@ use locks internally, and depending on how many requests are being executed in p contention on these locks can be high: in short, if your application enforces strict lock-freedom, then these components should not be used. -[request throttlers]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/throttling/RequestThrottler.html +[request throttlers]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/throttling/RequestThrottler.html Other components may be lock-free, *except* for their first invocation. This is the case of the following items: @@ -151,8 +151,8 @@ following items: * All built-in implementations of [`TimestampGenerator`], upon instantiation; * The utility method [`Uuids.timeBased()`]. -[`TimestampGenerator`]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/time/TimestampGenerator.html -[`Uuids.timeBased()`]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/uuid/Uuids.html#timeBased-- +[`TimestampGenerator`]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/time/TimestampGenerator.html +[`Uuids.timeBased()`]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/uuid/Uuids.html#timeBased-- Both components need to access native libraries when they get initialized and this may involve hitting the local filesystem, thus causing the initialization to become a blocking call. @@ -172,7 +172,7 @@ One component, the codec registry, can block when its [`register`] method is cal therefore advised that codecs should be registered during application startup exclusively. See the [custom codecs](../custom_codecs) section for more details about registering codecs. -[`register`]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/registry/MutableCodecRegistry.html#register-com.datastax.oss.driver.api.core.type.codec.TypeCodec- +[`register`]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/registry/MutableCodecRegistry.html#register-com.datastax.oss.driver.api.core.type.codec.TypeCodec- Finally, a few internal components also use locks, but only during session initialization; once the session is ready, they are either discarded, or don't use locks anymore for the rest of the @@ -213,7 +213,7 @@ lock-freedom enforcement tools could report calls to that method, but it was imp these calls. Thanks to [JAVA-2449], released with driver 4.10.0, `Uuids.random()` became a non-blocking call and random UUIDs can now be safely generated in non-blocking applications. -[`Uuids.random()`]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/uuid/Uuids.html#random-- +[`Uuids.random()`]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/uuid/Uuids.html#random-- [JAVA-2449]: https://datastax-oss.atlassian.net/browse/JAVA-2449 #### Driver lock-free guarantees when reloading the configuration @@ -228,8 +228,8 @@ detectors. If that is the case, it is advised to disable hot-reloading by settin `datastax-java-driver.basic.config-reload-interval` option to 0. See the manual page on [configuration](../configuration) for more information. -[`DriverConfigLoader`]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html -[hot-reloading]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#supportsReloading-- +[`DriverConfigLoader`]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html +[hot-reloading]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#supportsReloading-- #### Driver lock-free guarantees when connecting to DSE diff --git a/manual/core/paging/README.md b/manual/core/paging/README.md index ffae1dfb6fb..288127679a0 100644 --- a/manual/core/paging/README.md +++ b/manual/core/paging/README.md @@ -253,12 +253,12 @@ protocol page size and the logical page size to the same value. The [driver examples] include two complete web service implementations demonstrating forward-only and offset paging. -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/ResultSet.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html -[AsyncPagingIterable.hasMorePages]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/AsyncPagingIterable.html#hasMorePages-- -[AsyncPagingIterable.fetchNextPage]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/AsyncPagingIterable.html#fetchNextPage-- -[OffsetPager]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/paging/OffsetPager.html -[PagingState]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/PagingState.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/ResultSet.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[AsyncPagingIterable.hasMorePages]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/AsyncPagingIterable.html#hasMorePages-- +[AsyncPagingIterable.fetchNextPage]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/AsyncPagingIterable.html#fetchNextPage-- +[OffsetPager]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/paging/OffsetPager.html +[PagingState]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/PagingState.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html diff --git a/manual/core/performance/README.md b/manual/core/performance/README.md index 691b4735aea..6f6cfb9f2fb 100644 --- a/manual/core/performance/README.md +++ b/manual/core/performance/README.md @@ -345,8 +345,8 @@ possible to reuse the same event loop group for I/O, admin tasks, and even your (the driver's internal code is fully asynchronous so it will never block any thread). The timer is the only one that will have to stay on a separate thread. -[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/data/AccessibleByName.html -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/CqlIdentifier.html -[CqlSession.prepare(SimpleStatement)]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/CqlSession.html#prepare-com.datastax.oss.driver.api.core.cql.SimpleStatement- -[GenericType]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/reflect/GenericType.html -[Statement.setNode()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/Statement.html#setNode-com.datastax.oss.driver.api.core.metadata.Node- \ No newline at end of file +[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/data/AccessibleByName.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/CqlIdentifier.html +[CqlSession.prepare(SimpleStatement)]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/CqlSession.html#prepare-com.datastax.oss.driver.api.core.cql.SimpleStatement- +[GenericType]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/reflect/GenericType.html +[Statement.setNode()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/Statement.html#setNode-com.datastax.oss.driver.api.core.metadata.Node- \ No newline at end of file diff --git a/manual/core/pooling/README.md b/manual/core/pooling/README.md index 8fb17c02f8b..e6ba907b95f 100644 --- a/manual/core/pooling/README.md +++ b/manual/core/pooling/README.md @@ -170,5 +170,5 @@ you experience the issue, here's what to look out for: Try adding more connections per node. Thanks to the driver's hot-reload mechanism, you can do that at runtime and see the effects immediately. -[CqlSession]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/CqlSession.html +[CqlSession]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/CqlSession.html [CASSANDRA-8086]: https://issues.apache.org/jira/browse/CASSANDRA-8086 \ No newline at end of file diff --git a/manual/core/query_timestamps/README.md b/manual/core/query_timestamps/README.md index a3264c1f0e1..c98d1030392 100644 --- a/manual/core/query_timestamps/README.md +++ b/manual/core/query_timestamps/README.md @@ -187,9 +187,9 @@ Here is the order of precedence of all the methods described so far: 3. otherwise, if the timestamp generator assigned a timestamp, use it; 4. otherwise, let the server assign the timestamp. -[TimestampGenerator]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/time/TimestampGenerator.html +[TimestampGenerator]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/time/TimestampGenerator.html [gettimeofday]: http://man7.org/linux/man-pages/man2/settimeofday.2.html [JNR]: https://github.com/jnr/jnr-posix [Lightweight transactions]: https://docs.datastax.com/en/dse/6.0/cql/cql/cql_using/useInsertLWT.html -[Statement.setQueryTimestamp()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/Statement.html#setQueryTimestamp-long- +[Statement.setQueryTimestamp()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/Statement.html#setQueryTimestamp-long- diff --git a/manual/core/reactive/README.md b/manual/core/reactive/README.md index 57b3a98cebb..c8485c982bd 100644 --- a/manual/core/reactive/README.md +++ b/manual/core/reactive/README.md @@ -367,18 +367,18 @@ Note that the driver already has a [built-in retry mechanism] that can transpare queries; the above example should be seen as a demonstration of application-level retries, when a more fine-grained control of what should be retried, and how, is required. -[CqlSession]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/CqlSession.html -[ReactiveSession]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/ResultSet.html -[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html -[ReactiveRow]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html -[Row]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/Row.html -[getColumnDefinitions]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#getColumnDefinitions-- -[getExecutionInfos]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#getExecutionInfos-- -[wasApplied]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#wasApplied-- -[ReactiveRow.getColumnDefinitions]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#getColumnDefinitions-- -[ReactiveRow.getExecutionInfo]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#getExecutionInfo-- -[ReactiveRow.wasApplied]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#wasApplied-- +[CqlSession]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/CqlSession.html +[ReactiveSession]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/ResultSet.html +[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html +[ReactiveRow]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html +[Row]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/Row.html +[getColumnDefinitions]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#getColumnDefinitions-- +[getExecutionInfos]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#getExecutionInfos-- +[wasApplied]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#wasApplied-- +[ReactiveRow.getColumnDefinitions]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#getColumnDefinitions-- +[ReactiveRow.getExecutionInfo]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#getExecutionInfo-- +[ReactiveRow.wasApplied]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#wasApplied-- [built-in retry mechanism]: ../retries/ [request throttling]: ../throttling/ diff --git a/manual/core/reconnection/README.md b/manual/core/reconnection/README.md index 08a1e30f4c9..36b2d55b832 100644 --- a/manual/core/reconnection/README.md +++ b/manual/core/reconnection/README.md @@ -84,7 +84,7 @@ Note that the session is not accessible until it is fully ready: the `CqlSession call — or the future returned by `buildAsync()` — will not complete until the connection was established. -[ConstantReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/internal/core/connection/ConstantReconnectionPolicy.html -[DriverContext]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/context/DriverContext.html -[ExponentialReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/internal/core/connection/ExponentialReconnectionPolicy.html -[ReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/connection/ReconnectionPolicy.html \ No newline at end of file +[ConstantReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/internal/core/connection/ConstantReconnectionPolicy.html +[DriverContext]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/context/DriverContext.html +[ExponentialReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/internal/core/connection/ExponentialReconnectionPolicy.html +[ReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/connection/ReconnectionPolicy.html \ No newline at end of file diff --git a/manual/core/request_tracker/README.md b/manual/core/request_tracker/README.md index 1d52fbceb26..e959c09dfd7 100644 --- a/manual/core/request_tracker/README.md +++ b/manual/core/request_tracker/README.md @@ -117,5 +117,5 @@ all FROM users WHERE user_id=? [v0=42] com.datastax.oss.driver.api.core.servererrors.InvalidQueryException: Undefined column name all ``` -[RequestTracker]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/tracker/RequestTracker.html -[SessionBuilder.withRequestTracker]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withRequestTracker-com.datastax.oss.driver.api.core.tracker.RequestTracker- \ No newline at end of file +[RequestTracker]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/tracker/RequestTracker.html +[SessionBuilder.withRequestTracker]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withRequestTracker-com.datastax.oss.driver.api.core.tracker.RequestTracker- \ No newline at end of file diff --git a/manual/core/retries/README.md b/manual/core/retries/README.md index fbcd8e92720..df8ac5b9003 100644 --- a/manual/core/retries/README.md +++ b/manual/core/retries/README.md @@ -231,21 +231,21 @@ configuration). Each request uses its declared profile's policy. If it doesn't declare any profile, or if the profile doesn't have a dedicated policy, then the default profile's policy is used. -[AllNodesFailedException]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/AllNodesFailedException.html -[ClosedConnectionException]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/connection/ClosedConnectionException.html -[DriverTimeoutException]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/DriverTimeoutException.html -[FunctionFailureException]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/servererrors/FunctionFailureException.html -[HeartbeatException]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/connection/HeartbeatException.html -[ProtocolError]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/servererrors/ProtocolError.html -[OverloadedException]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/servererrors/OverloadedException.html -[QueryValidationException]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/servererrors/QueryValidationException.html -[ReadFailureException]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/servererrors/ReadFailureException.html -[ReadTimeoutException]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/servererrors/ReadTimeoutException.html -[RetryDecision]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/retry/RetryDecision.html -[RetryPolicy]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/retry/RetryPolicy.html -[RetryVerdict]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/retry/RetryVerdict.html -[ServerError]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/servererrors/ServerError.html -[TruncateException]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/servererrors/TruncateException.html -[UnavailableException]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/servererrors/UnavailableException.html -[WriteFailureException]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/servererrors/WriteFailureException.html -[WriteTimeoutException]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/servererrors/WriteTimeoutException.html +[AllNodesFailedException]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/AllNodesFailedException.html +[ClosedConnectionException]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/connection/ClosedConnectionException.html +[DriverTimeoutException]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/DriverTimeoutException.html +[FunctionFailureException]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/servererrors/FunctionFailureException.html +[HeartbeatException]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/connection/HeartbeatException.html +[ProtocolError]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/servererrors/ProtocolError.html +[OverloadedException]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/servererrors/OverloadedException.html +[QueryValidationException]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/servererrors/QueryValidationException.html +[ReadFailureException]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/servererrors/ReadFailureException.html +[ReadTimeoutException]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/servererrors/ReadTimeoutException.html +[RetryDecision]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/retry/RetryDecision.html +[RetryPolicy]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/retry/RetryPolicy.html +[RetryVerdict]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/retry/RetryVerdict.html +[ServerError]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/servererrors/ServerError.html +[TruncateException]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/servererrors/TruncateException.html +[UnavailableException]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/servererrors/UnavailableException.html +[WriteFailureException]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/servererrors/WriteFailureException.html +[WriteTimeoutException]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/servererrors/WriteTimeoutException.html diff --git a/manual/core/speculative_execution/README.md b/manual/core/speculative_execution/README.md index 698ac2f42a4..f5ba057d431 100644 --- a/manual/core/speculative_execution/README.md +++ b/manual/core/speculative_execution/README.md @@ -250,4 +250,4 @@ profiles have the same configuration). Each request uses its declared profile's policy. If it doesn't declare any profile, or if the profile doesn't have a dedicated policy, then the default profile's policy is used. -[SpeculativeExecutionPolicy]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/specex/SpeculativeExecutionPolicy.html \ No newline at end of file +[SpeculativeExecutionPolicy]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/specex/SpeculativeExecutionPolicy.html \ No newline at end of file diff --git a/manual/core/ssl/README.md b/manual/core/ssl/README.md index cdbc5ca817c..bac3786c624 100644 --- a/manual/core/ssl/README.md +++ b/manual/core/ssl/README.md @@ -204,6 +204,6 @@ the box, but with a bit of custom development it is fairly easy to add. See [dsClientToNode]: https://docs.datastax.com/en/cassandra/3.0/cassandra/configuration/secureSSLClientToNode.html [pickle]: http://thelastpickle.com/blog/2015/09/30/hardening-cassandra-step-by-step-part-1-server-to-server.html [JSSE system properties]: http://docs.oracle.com/javase/6/docs/technotes/guides/security/jsse/JSSERefGuide.html#Customization -[SessionBuilder.withSslEngineFactory]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSslEngineFactory-com.datastax.oss.driver.api.core.ssl.SslEngineFactory- -[SessionBuilder.withSslContext]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSslContext-javax.net.ssl.SSLContext- -[ProgrammaticSslEngineFactory]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/ssl/ProgrammaticSslEngineFactory.html +[SessionBuilder.withSslEngineFactory]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSslEngineFactory-com.datastax.oss.driver.api.core.ssl.SslEngineFactory- +[SessionBuilder.withSslContext]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSslContext-javax.net.ssl.SSLContext- +[ProgrammaticSslEngineFactory]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/ssl/ProgrammaticSslEngineFactory.html diff --git a/manual/core/statements/README.md b/manual/core/statements/README.md index 90dec7a960d..2e1dd1aa75f 100644 --- a/manual/core/statements/README.md +++ b/manual/core/statements/README.md @@ -59,7 +59,7 @@ the [configuration](../configuration/). Namely, these are: idempotent flag, quer consistency levels and page size. We recommended the configuration approach whenever possible (you can create execution profiles to capture common combinations of those options). -[Statement]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/Statement.html -[StatementBuilder]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/StatementBuilder.html -[execute]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/Session.html#execute-com.datastax.oss.driver.api.core.cql.Statement- -[executeAsync]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/Session.html#executeAsync-com.datastax.oss.driver.api.core.cql.Statement- +[Statement]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/Statement.html +[StatementBuilder]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/StatementBuilder.html +[execute]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/Session.html#execute-com.datastax.oss.driver.api.core.cql.Statement- +[executeAsync]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/Session.html#executeAsync-com.datastax.oss.driver.api.core.cql.Statement- diff --git a/manual/core/statements/batch/README.md b/manual/core/statements/batch/README.md index b1cb7c70967..df917e33af5 100644 --- a/manual/core/statements/batch/README.md +++ b/manual/core/statements/batch/README.md @@ -61,8 +61,8 @@ In addition, simple statements with named parameters are currently not supported due to a [protocol limitation][CASSANDRA-10246] that will be fixed in a future version). If you try to execute such a batch, an `IllegalArgumentException` is thrown. -[BatchStatement]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/BatchStatement.html -[BatchStatement.newInstance()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/BatchStatement.html#newInstance-com.datastax.oss.driver.api.core.cql.BatchType- -[BatchStatement.builder()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/BatchStatement.html#builder-com.datastax.oss.driver.api.core.cql.BatchType- +[BatchStatement]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/BatchStatement.html +[BatchStatement.newInstance()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/BatchStatement.html#newInstance-com.datastax.oss.driver.api.core.cql.BatchType- +[BatchStatement.builder()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/BatchStatement.html#builder-com.datastax.oss.driver.api.core.cql.BatchType- [batch_dse]: http://docs.datastax.com/en/dse/6.7/cql/cql/cql_using/useBatch.html [CASSANDRA-10246]: https://issues.apache.org/jira/browse/CASSANDRA-10246 diff --git a/manual/core/statements/per_query_keyspace/README.md b/manual/core/statements/per_query_keyspace/README.md index 66e9d451387..152816071d2 100644 --- a/manual/core/statements/per_query_keyspace/README.md +++ b/manual/core/statements/per_query_keyspace/README.md @@ -124,6 +124,6 @@ SimpleStatement statement = At some point in the future, when Cassandra 4 becomes prevalent and using a per-query keyspace is the norm, we'll probably deprecate `setRoutingKeyspace()`. -[token-aware routing]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- +[token-aware routing]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- [CASSANDRA-10145]: https://issues.apache.org/jira/browse/CASSANDRA-10145 \ No newline at end of file diff --git a/manual/core/statements/prepared/README.md b/manual/core/statements/prepared/README.md index e74813d8e6d..afb24ac38ed 100644 --- a/manual/core/statements/prepared/README.md +++ b/manual/core/statements/prepared/README.md @@ -330,10 +330,10 @@ With Cassandra 4 and [native protocol](../../native_protocol/) v5, this issue is new version with the response; the driver updates its local cache transparently, and the client can observe the new columns in the result set. -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[Session.prepare]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/CqlSession.html#prepare-com.datastax.oss.driver.api.core.cql.SimpleStatement- +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[Session.prepare]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/CqlSession.html#prepare-com.datastax.oss.driver.api.core.cql.SimpleStatement- [CASSANDRA-10786]: https://issues.apache.org/jira/browse/CASSANDRA-10786 [CASSANDRA-10813]: https://issues.apache.org/jira/browse/CASSANDRA-10813 [guava eviction]: https://github.com/google/guava/wiki/CachesExplained#reference-based-eviction -[PreparedStatement.bind]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/PreparedStatement.html#bind-java.lang.Object...- -[PreparedStatement.boundStatementBuilder]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/PreparedStatement.html#boundStatementBuilder-java.lang.Object...- +[PreparedStatement.bind]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/PreparedStatement.html#bind-java.lang.Object...- +[PreparedStatement.boundStatementBuilder]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/PreparedStatement.html#boundStatementBuilder-java.lang.Object...- diff --git a/manual/core/statements/simple/README.md b/manual/core/statements/simple/README.md index deff971fea9..e614885039a 100644 --- a/manual/core/statements/simple/README.md +++ b/manual/core/statements/simple/README.md @@ -182,6 +182,6 @@ session.execute( Or you could also use [prepared statements](../prepared/), which don't have this limitation since parameter types are known in advance. -[SimpleStatement]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/SimpleStatement.html -[SimpleStatement.newInstance()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/SimpleStatement.html#newInstance-java.lang.String- -[SimpleStatement.builder()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/SimpleStatement.html#builder-java.lang.String- +[SimpleStatement]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/SimpleStatement.html +[SimpleStatement.newInstance()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/SimpleStatement.html#newInstance-java.lang.String- +[SimpleStatement.builder()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/SimpleStatement.html#builder-java.lang.String- diff --git a/manual/core/temporal_types/README.md b/manual/core/temporal_types/README.md index a9985099fd7..e30f8fca947 100644 --- a/manual/core/temporal_types/README.md +++ b/manual/core/temporal_types/README.md @@ -146,7 +146,7 @@ System.out.println(dateTime.minus(CqlDuration.from("1h15s15ns"))); // prints "2018-10-03T22:59:44.999999985-07:00[America/Los_Angeles]" ``` -[CqlDuration]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/data/CqlDuration.html -[TypeCodecs.ZONED_TIMESTAMP_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#ZONED_TIMESTAMP_SYSTEM -[TypeCodecs.ZONED_TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#ZONED_TIMESTAMP_UTC -[TypeCodecs.zonedTimestampAt()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#zonedTimestampAt-java.time.ZoneId- \ No newline at end of file +[CqlDuration]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/data/CqlDuration.html +[TypeCodecs.ZONED_TIMESTAMP_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#ZONED_TIMESTAMP_SYSTEM +[TypeCodecs.ZONED_TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#ZONED_TIMESTAMP_UTC +[TypeCodecs.zonedTimestampAt()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#zonedTimestampAt-java.time.ZoneId- \ No newline at end of file diff --git a/manual/core/throttling/README.md b/manual/core/throttling/README.md index 6e779811aa0..3b5c025032f 100644 --- a/manual/core/throttling/README.md +++ b/manual/core/throttling/README.md @@ -145,6 +145,6 @@ datastax-java-driver { If you enable `throttling.delay`, make sure to also check the associated extra options to correctly size the underlying histograms (`metrics.session.throttling.delay.*`). -[RequestThrottlingException]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/RequestThrottlingException.html -[AllNodesFailedException]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/AllNodesFailedException.html -[BusyConnectionException]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/connection/BusyConnectionException.html \ No newline at end of file +[RequestThrottlingException]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/RequestThrottlingException.html +[AllNodesFailedException]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/AllNodesFailedException.html +[BusyConnectionException]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/connection/BusyConnectionException.html \ No newline at end of file diff --git a/manual/core/tracing/README.md b/manual/core/tracing/README.md index 3708988ad39..07d4416efba 100644 --- a/manual/core/tracing/README.md +++ b/manual/core/tracing/README.md @@ -113,9 +113,9 @@ for (TraceEvent event : trace.getEvents()) { If you call `getQueryTrace()` for a statement that didn't have tracing enabled, an exception is thrown. -[ExecutionInfo]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html -[QueryTrace]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/QueryTrace.html -[Statement.setTracing()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/Statement.html#setTracing-boolean- -[StatementBuilder.setTracing()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/StatementBuilder.html#setTracing-- -[ExecutionInfo.getTracingId()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getTracingId-- -[ExecutionInfo.getQueryTrace()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getQueryTrace-- +[ExecutionInfo]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html +[QueryTrace]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/QueryTrace.html +[Statement.setTracing()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/Statement.html#setTracing-boolean- +[StatementBuilder.setTracing()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/StatementBuilder.html#setTracing-- +[ExecutionInfo.getTracingId()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getTracingId-- +[ExecutionInfo.getQueryTrace()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getQueryTrace-- diff --git a/manual/core/tuples/README.md b/manual/core/tuples/README.md index 2de2788c87a..1662aa3beb7 100644 --- a/manual/core/tuples/README.md +++ b/manual/core/tuples/README.md @@ -139,5 +139,5 @@ BoundStatement bs = [cql_doc]: https://docs.datastax.com/en/cql/3.3/cql/cql_reference/tupleType.html -[TupleType]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/TupleType.html -[TupleValue]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/data/TupleValue.html +[TupleType]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/TupleType.html +[TupleValue]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/data/TupleValue.html diff --git a/manual/core/udts/README.md b/manual/core/udts/README.md index 6cadf225623..8fdd72050a2 100644 --- a/manual/core/udts/README.md +++ b/manual/core/udts/README.md @@ -135,5 +135,5 @@ session.execute(bs); [cql_doc]: https://docs.datastax.com/en/cql/3.3/cql/cql_reference/cqlRefUDType.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/data/UdtValue.html -[UserDefinedType]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/UserDefinedType.html \ No newline at end of file +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/data/UdtValue.html +[UserDefinedType]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/UserDefinedType.html \ No newline at end of file diff --git a/manual/developer/common/concurrency/README.md b/manual/developer/common/concurrency/README.md index 545eda62533..2ab2ab7db35 100644 --- a/manual/developer/common/concurrency/README.md +++ b/manual/developer/common/concurrency/README.md @@ -101,8 +101,8 @@ public interface ExecutionInfo { When a public API method is blocking, this is generally clearly stated in its javadocs. -[`ExecutionInfo.getQueryTrace()`]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getQueryTrace-- -[`SyncCqlSession`]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/SyncCqlSession.html` +[`ExecutionInfo.getQueryTrace()`]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getQueryTrace-- +[`SyncCqlSession`]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/SyncCqlSession.html` `BlockingOperation` is a utility to check that those methods aren't called on I/O threads, which could introduce deadlocks. diff --git a/manual/mapper/config/kotlin/README.md b/manual/mapper/config/kotlin/README.md index 1ff4fa80910..58e104513de 100644 --- a/manual/mapper/config/kotlin/README.md +++ b/manual/mapper/config/kotlin/README.md @@ -106,4 +106,4 @@ before compilation: [build.gradle]: https://github.com/DataStax-Examples/object-mapper-jvm/blob/master/kotlin/build.gradle [UserDao.kt]: https://github.com/DataStax-Examples/object-mapper-jvm/blob/master/kotlin/src/main/kotlin/com/datastax/examples/mapper/killrvideo/user/UserDao.kt -[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html +[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html diff --git a/manual/mapper/config/record/README.md b/manual/mapper/config/record/README.md index de7e3159816..d296439870f 100644 --- a/manual/mapper/config/record/README.md +++ b/manual/mapper/config/record/README.md @@ -27,7 +27,7 @@ You need to build with Java 14, and pass the `--enable-preview` flag to both the runtime JVM. See [pom.xml] in the example. -[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html +[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html [DataStax-Examples/object-mapper-jvm/record]: https://github.com/DataStax-Examples/object-mapper-jvm/tree/master/record [pom.xml]: https://github.com/DataStax-Examples/object-mapper-jvm/blob/master/record/pom.xml diff --git a/manual/mapper/config/scala/README.md b/manual/mapper/config/scala/README.md index b925895c80c..f5d2e120637 100644 --- a/manual/mapper/config/scala/README.md +++ b/manual/mapper/config/scala/README.md @@ -54,4 +54,4 @@ mapper builder. [DataStax-Examples/object-mapper-jvm/scala]: https://github.com/DataStax-Examples/object-mapper-jvm/tree/master/scala [build.sbt]: https://github.com/DataStax-Examples/object-mapper-jvm/blob/master/scala/build.sbt -[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html +[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html diff --git a/manual/mapper/daos/README.md b/manual/mapper/daos/README.md index 091f669f269..6bbd1b9f35a 100644 --- a/manual/mapper/daos/README.md +++ b/manual/mapper/daos/README.md @@ -148,8 +148,8 @@ In this case, any annotations declared in `Dao1` would be chosen over `Dao2`. To control how the hierarchy is scanned, annotate interfaces with [@HierarchyScanStrategy]. -[@Dao]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/Dao.html -[@DaoFactory]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.html -[@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html -[@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html +[@Dao]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/Dao.html +[@DaoFactory]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.html +[@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html +[@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html [Entity Inheritance]: ../entities/#inheritance diff --git a/manual/mapper/daos/custom_types/README.md b/manual/mapper/daos/custom_types/README.md index 705549502de..6b8a6707b48 100644 --- a/manual/mapper/daos/custom_types/README.md +++ b/manual/mapper/daos/custom_types/README.md @@ -236,8 +236,8 @@ flag: With this configuration, if a DAO method declares a non built-in return type, it will be surfaced as a compiler error. -[EntityHelper]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/entity/EntityHelper.html -[GenericType]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/reflect/GenericType.html -[MapperContext]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/MapperContext.html -[MapperResultProducer]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/result/MapperResultProducer.html -[MapperResultProducerService]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/result/MapperResultProducerService.html +[EntityHelper]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/entity/EntityHelper.html +[GenericType]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/reflect/GenericType.html +[MapperContext]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/MapperContext.html +[MapperResultProducer]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/result/MapperResultProducer.html +[MapperResultProducerService]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/result/MapperResultProducerService.html diff --git a/manual/mapper/daos/delete/README.md b/manual/mapper/daos/delete/README.md index 00831156973..1526079f611 100644 --- a/manual/mapper/daos/delete/README.md +++ b/manual/mapper/daos/delete/README.md @@ -151,15 +151,15 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the [naming strategy](../../entities/#naming-strategy)). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html -[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html -[@Delete]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/Delete.html -[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/ResultSet.html -[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html +[@Delete]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/Delete.html +[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/ResultSet.html +[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html diff --git a/manual/mapper/daos/getentity/README.md b/manual/mapper/daos/getentity/README.md index 6eff3b1e88b..207f84e136d 100644 --- a/manual/mapper/daos/getentity/README.md +++ b/manual/mapper/daos/getentity/README.md @@ -82,15 +82,15 @@ If the return type doesn't match the parameter type (for example [PagingIterable [AsyncResultSet]), the mapper processor will issue a compile-time error. -[@GetEntity]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html -[GettableByName]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/data/GettableByName.html -[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/PagingIterable.html -[PagingIterable.spliterator]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/PagingIterable.html#spliterator-- -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/ResultSet.html -[Row]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/Row.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/data/UdtValue.html +[@GetEntity]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[GettableByName]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/data/GettableByName.html +[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/PagingIterable.html +[PagingIterable.spliterator]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/PagingIterable.html#spliterator-- +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/ResultSet.html +[Row]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/Row.html +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/data/UdtValue.html [Stream]: https://docs.oracle.com/javase/8/docs/api/java/util/stream/Stream.html diff --git a/manual/mapper/daos/increment/README.md b/manual/mapper/daos/increment/README.md index ede46a8a629..834bdb752f1 100644 --- a/manual/mapper/daos/increment/README.md +++ b/manual/mapper/daos/increment/README.md @@ -75,12 +75,12 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the naming convention). -[@Increment]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/Increment.html -[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html -[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html -[@CqlName]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/CqlName.html +[@Increment]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/Increment.html +[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html +[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html +[@CqlName]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/CqlName.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html diff --git a/manual/mapper/daos/insert/README.md b/manual/mapper/daos/insert/README.md index 0daf347c5b3..f7bf250b304 100644 --- a/manual/mapper/daos/insert/README.md +++ b/manual/mapper/daos/insert/README.md @@ -108,13 +108,13 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the [naming strategy](../../entities/#naming-strategy)). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@Insert]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/Insert.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/ResultSet.html -[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- -[ResultSet#getExecutionInfo()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/ResultSet.html#getExecutionInfo-- -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@Insert]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/Insert.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/ResultSet.html +[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- +[ResultSet#getExecutionInfo()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/ResultSet.html#getExecutionInfo-- +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html diff --git a/manual/mapper/daos/null_saving/README.md b/manual/mapper/daos/null_saving/README.md index 265f2b5a278..bf754441703 100644 --- a/manual/mapper/daos/null_saving/README.md +++ b/manual/mapper/daos/null_saving/README.md @@ -93,10 +93,10 @@ public interface UserDao extends InventoryDao { } ``` -[@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[MapperException]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/MapperException.html -[DO_NOT_SET]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#DO_NOT_SET -[SET_TO_NULL]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#SET_TO_NULL +[@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[MapperException]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/MapperException.html +[DO_NOT_SET]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#DO_NOT_SET +[SET_TO_NULL]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#SET_TO_NULL [CASSANDRA-7304]: https://issues.apache.org/jira/browse/CASSANDRA-7304 diff --git a/manual/mapper/daos/query/README.md b/manual/mapper/daos/query/README.md index 81ac435b9cd..b95b118c48f 100644 --- a/manual/mapper/daos/query/README.md +++ b/manual/mapper/daos/query/README.md @@ -113,18 +113,18 @@ Then: query succeeds or not depends on whether the session that the mapper was built with has a [default keyspace]. -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@Query]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/Query.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/ResultSet.html -[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- -[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/PagingIterable.html -[PagingIterable.spliterator]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/PagingIterable.html#spliterator-- -[Row]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/Row.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html -[MappedReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/mapper/reactive/MappedReactiveResultSet.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@Query]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/Query.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/ResultSet.html +[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- +[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/PagingIterable.html +[PagingIterable.spliterator]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/PagingIterable.html#spliterator-- +[Row]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/Row.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html +[MappedReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/mapper/reactive/MappedReactiveResultSet.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html diff --git a/manual/mapper/daos/queryprovider/README.md b/manual/mapper/daos/queryprovider/README.md index 79423b674c3..e1e71bcc608 100644 --- a/manual/mapper/daos/queryprovider/README.md +++ b/manual/mapper/daos/queryprovider/README.md @@ -137,11 +137,11 @@ Here is the full implementation: the desired [PagingIterable][PagingIterable]. -[@QueryProvider]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html -[providerClass]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerClass-- -[entityHelpers]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#entityHelpers-- -[providerMethod]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerMethod-- -[MapperContext]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/MapperContext.html -[EntityHelper]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/EntityHelper.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/ResultSet.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/PagingIterable.html +[@QueryProvider]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html +[providerClass]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerClass-- +[entityHelpers]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#entityHelpers-- +[providerMethod]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerMethod-- +[MapperContext]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/MapperContext.html +[EntityHelper]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/EntityHelper.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/ResultSet.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/PagingIterable.html diff --git a/manual/mapper/daos/select/README.md b/manual/mapper/daos/select/README.md index 1b00ae17438..3cb6cc168c5 100644 --- a/manual/mapper/daos/select/README.md +++ b/manual/mapper/daos/select/README.md @@ -155,20 +155,20 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the [naming strategy](../../entities/#naming-strategy)). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html -[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html -[@Select]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/Select.html -[allowFiltering()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/Select.html#allowFiltering-- -[customWhereClause()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/Select.html#customWhereClause-- -[groupBy()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/Select.html#groupBy-- -[limit()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/Select.html#limit-- -[orderBy()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/Select.html#orderBy-- -[perPartitionLimit()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/Select.html#perPartitionLimit-- -[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/PagingIterable.html -[PagingIterable.spliterator]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/PagingIterable.html#spliterator-- -[MappedReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/mapper/reactive/MappedReactiveResultSet.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html +[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html +[@Select]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/Select.html +[allowFiltering()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/Select.html#allowFiltering-- +[customWhereClause()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/Select.html#customWhereClause-- +[groupBy()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/Select.html#groupBy-- +[limit()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/Select.html#limit-- +[orderBy()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/Select.html#orderBy-- +[perPartitionLimit()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/Select.html#perPartitionLimit-- +[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/PagingIterable.html +[PagingIterable.spliterator]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/PagingIterable.html#spliterator-- +[MappedReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/mapper/reactive/MappedReactiveResultSet.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html diff --git a/manual/mapper/daos/setentity/README.md b/manual/mapper/daos/setentity/README.md index f1f5646265b..54925fac574 100644 --- a/manual/mapper/daos/setentity/README.md +++ b/manual/mapper/daos/setentity/README.md @@ -63,8 +63,8 @@ BoundStatement bind(Product product, BoundStatement statement); If you use a void method with [BoundStatement], the mapper processor will issue a compile-time warning. -[@SetEntity]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/SetEntity.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[BoundStatementBuilder]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/BoundStatementBuilder.html -[SettableByName]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/data/SettableByName.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/data/UdtValue.html +[@SetEntity]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/SetEntity.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[BoundStatementBuilder]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/BoundStatementBuilder.html +[SettableByName]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/data/SettableByName.html +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/data/UdtValue.html diff --git a/manual/mapper/daos/statement_attributes/README.md b/manual/mapper/daos/statement_attributes/README.md index be9c2a2a23a..1793c8ac806 100644 --- a/manual/mapper/daos/statement_attributes/README.md +++ b/manual/mapper/daos/statement_attributes/README.md @@ -60,4 +60,4 @@ Product product = dao.findById(1, builder -> builder.setConsistencyLevel(DefaultConsistencyLevel.QUORUM)); ``` -[@StatementAttributes]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/StatementAttributes.html \ No newline at end of file +[@StatementAttributes]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/StatementAttributes.html \ No newline at end of file diff --git a/manual/mapper/daos/update/README.md b/manual/mapper/daos/update/README.md index 22c974ff894..44fcdec02c6 100644 --- a/manual/mapper/daos/update/README.md +++ b/manual/mapper/daos/update/README.md @@ -143,13 +143,13 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the naming convention). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@Update]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/Update.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@Update]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/Update.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html [Boolean]: https://docs.oracle.com/javase/8/docs/api/index.html?java/lang/Boolean.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/ResultSet.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/ResultSet.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html diff --git a/manual/mapper/entities/README.md b/manual/mapper/entities/README.md index 27039d76d51..35b457f9bb9 100644 --- a/manual/mapper/entities/README.md +++ b/manual/mapper/entities/README.md @@ -555,22 +555,22 @@ the same level. To control how the class hierarchy is scanned, annotate classes with [@HierarchyScanStrategy]. -[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html -[@CqlName]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/CqlName.html -[@Dao]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/Dao.html -[@Entity]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/Entity.html -[NameConverter]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/entity/naming/NameConverter.html -[NamingConvention]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/entity/naming/NamingConvention.html -[@NamingStrategy]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/NamingStrategy.html -[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html -[@Computed]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/Computed.html -[@Select]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/Select.html -[@Insert]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/Insert.html -[@Update]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/Update.html -[@GetEntity]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html -[@Query]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/Query.html +[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html +[@CqlName]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/CqlName.html +[@Dao]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/Dao.html +[@Entity]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/Entity.html +[NameConverter]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/entity/naming/NameConverter.html +[NamingConvention]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/entity/naming/NamingConvention.html +[@NamingStrategy]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/NamingStrategy.html +[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html +[@Computed]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/Computed.html +[@Select]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/Select.html +[@Insert]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/Insert.html +[@Update]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/Update.html +[@GetEntity]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html +[@Query]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/Query.html [aliases]: http://cassandra.apache.org/doc/latest/cql/dml.html?#aliases -[@Transient]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/Transient.html -[@TransientProperties]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/TransientProperties.html -[@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html -[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html +[@Transient]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/Transient.html +[@TransientProperties]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/TransientProperties.html +[@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html +[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html diff --git a/manual/mapper/mapper/README.md b/manual/mapper/mapper/README.md index 8d6ee979a75..bebb2a62133 100644 --- a/manual/mapper/mapper/README.md +++ b/manual/mapper/mapper/README.md @@ -230,8 +230,8 @@ InventoryMapper inventoryMapper = new InventoryMapperBuilder(session) You can also permanently disable validation of an individual entity by annotating it with `@SchemaHint(targetElement = NONE)`. -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/CqlIdentifier.html -[@DaoFactory]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.html -[@DaoKeyspace]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/DaoKeyspace.html -[@DaoTable]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/DaoTable.html -[@Mapper]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/mapper/annotations/Mapper.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/CqlIdentifier.html +[@DaoFactory]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.html +[@DaoKeyspace]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/DaoKeyspace.html +[@DaoTable]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/DaoTable.html +[@Mapper]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/Mapper.html diff --git a/manual/osgi/README.md b/manual/osgi/README.md index 65b1efdb85a..be8430c31e7 100644 --- a/manual/osgi/README.md +++ b/manual/osgi/README.md @@ -138,7 +138,7 @@ starting the driver: [driver configuration]: ../core/configuration [OSGi]:https://www.osgi.org [JNR]: https://github.com/jnr/jnr-posix -[withClassLoader()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withClassLoader-java.lang.ClassLoader- +[withClassLoader()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withClassLoader-java.lang.ClassLoader- [JAVA-1127]:https://datastax-oss.atlassian.net/browse/JAVA-1127 -[DriverConfigLoader.fromDefaults(ClassLoader)]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromDefaults-java.lang.ClassLoader- -[DriverConfigLoader.programmaticBuilder(ClassLoader)]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#programmaticBuilder-java.lang.ClassLoader- +[DriverConfigLoader.fromDefaults(ClassLoader)]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromDefaults-java.lang.ClassLoader- +[DriverConfigLoader.programmaticBuilder(ClassLoader)]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#programmaticBuilder-java.lang.ClassLoader- diff --git a/manual/query_builder/README.md b/manual/query_builder/README.md index ab7c39ffb1a..f4f1c12db73 100644 --- a/manual/query_builder/README.md +++ b/manual/query_builder/README.md @@ -212,8 +212,8 @@ For a complete tour of the API, browse the child pages in this manual: * [Terms](term/) * [Idempotence](idempotence/) -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/CqlIdentifier.html -[DseQueryBuilder]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/querybuilder/DseQueryBuilder.html -[DseSchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/dse/driver/api/querybuilder/DseSchemaBuilder.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/CqlIdentifier.html +[DseQueryBuilder]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/querybuilder/DseQueryBuilder.html +[DseSchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/querybuilder/DseSchemaBuilder.html diff --git a/manual/query_builder/condition/README.md b/manual/query_builder/condition/README.md index f4d406da249..8a58139085e 100644 --- a/manual/query_builder/condition/README.md +++ b/manual/query_builder/condition/README.md @@ -132,4 +132,4 @@ It is mutually exclusive with column conditions: if you previously specified col the statement, they will be ignored; conversely, adding a column condition cancels a previous IF EXISTS clause. -[Condition]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/querybuilder/condition/Condition.html +[Condition]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/querybuilder/condition/Condition.html diff --git a/manual/query_builder/delete/README.md b/manual/query_builder/delete/README.md index c349f6c8c44..36f0400c43a 100644 --- a/manual/query_builder/delete/README.md +++ b/manual/query_builder/delete/README.md @@ -141,5 +141,5 @@ deleteFrom("user") Conditions are a common feature used by UPDATE and DELETE, so they have a [dedicated page](../condition) in this manual. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[Selector]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/querybuilder/select/Selector.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[Selector]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/querybuilder/select/Selector.html diff --git a/manual/query_builder/insert/README.md b/manual/query_builder/insert/README.md index ae17f467843..3442ed55bab 100644 --- a/manual/query_builder/insert/README.md +++ b/manual/query_builder/insert/README.md @@ -114,4 +114,4 @@ is executed. This is distinctly different than setting the value to null. Passin this method will only remove the USING TTL clause from the query, which will not alter the TTL (if one is set) in Cassandra. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html \ No newline at end of file +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html \ No newline at end of file diff --git a/manual/query_builder/relation/README.md b/manual/query_builder/relation/README.md index 8b6862e4fed..ba55a2c38b8 100644 --- a/manual/query_builder/relation/README.md +++ b/manual/query_builder/relation/README.md @@ -201,5 +201,5 @@ This should be used with caution, as it's possible to generate invalid CQL that execution time; on the other hand, it can be used as a workaround to handle new CQL features that are not yet covered by the query builder. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[Relation]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/querybuilder/relation/Relation.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[Relation]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/querybuilder/relation/Relation.html diff --git a/manual/query_builder/schema/README.md b/manual/query_builder/schema/README.md index 140b1cf430f..38fc701d798 100644 --- a/manual/query_builder/schema/README.md +++ b/manual/query_builder/schema/README.md @@ -44,4 +44,4 @@ element type: * [function](function/) * [aggregate](aggregate/) -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/aggregate/README.md b/manual/query_builder/schema/aggregate/README.md index b2d005495a6..78de7419719 100644 --- a/manual/query_builder/schema/aggregate/README.md +++ b/manual/query_builder/schema/aggregate/README.md @@ -76,4 +76,4 @@ dropAggregate("average").ifExists(); // DROP AGGREGATE IF EXISTS average ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/function/README.md b/manual/query_builder/schema/function/README.md index 7a782dcd6d6..5cf564f99eb 100644 --- a/manual/query_builder/schema/function/README.md +++ b/manual/query_builder/schema/function/README.md @@ -92,4 +92,4 @@ dropFunction("log").ifExists(); // DROP FUNCTION IF EXISTS log ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/index/README.md b/manual/query_builder/schema/index/README.md index 0c2603ef0fe..183f4e35fec 100644 --- a/manual/query_builder/schema/index/README.md +++ b/manual/query_builder/schema/index/README.md @@ -99,4 +99,4 @@ dropIndex("my_idx").ifExists(); // DROP INDEX IF EXISTS my_idx ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/keyspace/README.md b/manual/query_builder/schema/keyspace/README.md index 9347b3bbb56..643a1354ac7 100644 --- a/manual/query_builder/schema/keyspace/README.md +++ b/manual/query_builder/schema/keyspace/README.md @@ -83,6 +83,6 @@ dropKeyspace("cycling").ifExists(); // DROP KEYSPACE IF EXISTS cycling ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/materialized_view/README.md b/manual/query_builder/schema/materialized_view/README.md index 609395d2b6e..20617636769 100644 --- a/manual/query_builder/schema/materialized_view/README.md +++ b/manual/query_builder/schema/materialized_view/README.md @@ -85,5 +85,5 @@ dropTable("cyclist_by_age").ifExists(); // DROP MATERIALIZED VIEW IF EXISTS cyclist_by_age ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html -[RelationStructure]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/querybuilder/schema/RelationStructure.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[RelationStructure]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/querybuilder/schema/RelationStructure.html diff --git a/manual/query_builder/schema/table/README.md b/manual/query_builder/schema/table/README.md index d743f584002..b7f923d90cc 100644 --- a/manual/query_builder/schema/table/README.md +++ b/manual/query_builder/schema/table/README.md @@ -107,6 +107,6 @@ dropTable("cyclist_name").ifExists(); // DROP TABLE IF EXISTS cyclist_name ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html -[CreateTableWithOptions]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/querybuilder/schema/CreateTableWithOptions.html -[AlterTableWithOptions]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/querybuilder/schema/AlterTableWithOptions.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[CreateTableWithOptions]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/querybuilder/schema/CreateTableWithOptions.html +[AlterTableWithOptions]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/querybuilder/schema/AlterTableWithOptions.html diff --git a/manual/query_builder/schema/type/README.md b/manual/query_builder/schema/type/README.md index cd23078f1eb..f08f3305bcc 100644 --- a/manual/query_builder/schema/type/README.md +++ b/manual/query_builder/schema/type/README.md @@ -88,4 +88,4 @@ dropTable("address").ifExists(); // DROP TYPE IF EXISTS address ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/select/README.md b/manual/query_builder/select/README.md index 1b71ea8434c..9ed26281765 100644 --- a/manual/query_builder/select/README.md +++ b/manual/query_builder/select/README.md @@ -391,5 +391,5 @@ selectFrom("user").all().allowFiltering(); // SELECT * FROM user ALLOW FILTERING ``` -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[Selector]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/querybuilder/select/Selector.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[Selector]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/querybuilder/select/Selector.html diff --git a/manual/query_builder/term/README.md b/manual/query_builder/term/README.md index eb95162e3d3..ca650d5d196 100644 --- a/manual/query_builder/term/README.md +++ b/manual/query_builder/term/README.md @@ -105,5 +105,5 @@ This should be used with caution, as it's possible to generate invalid CQL that execution time; on the other hand, it can be used as a workaround to handle new CQL features that are not yet covered by the query builder. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html \ No newline at end of file +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html \ No newline at end of file diff --git a/manual/query_builder/truncate/README.md b/manual/query_builder/truncate/README.md index b1f87d276e9..b513b6a52ab 100644 --- a/manual/query_builder/truncate/README.md +++ b/manual/query_builder/truncate/README.md @@ -17,4 +17,4 @@ Truncate truncate2 = truncate(CqlIdentifier.fromCql("mytable")); Note that, at this stage, the query is ready to build. After creating a TRUNCATE query it does not take any values. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html diff --git a/manual/query_builder/update/README.md b/manual/query_builder/update/README.md index 2c75f869220..2069bb54541 100644 --- a/manual/query_builder/update/README.md +++ b/manual/query_builder/update/README.md @@ -251,5 +251,5 @@ update("foo") Conditions are a common feature used by UPDATE and DELETE, so they have a [dedicated page](../condition) in this manual. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[Assignment]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/querybuilder/update/Assignment.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[Assignment]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/querybuilder/update/Assignment.html diff --git a/upgrade_guide/README.md b/upgrade_guide/README.md index bf40f0fce47..0f5d46a9ca8 100644 --- a/upgrade_guide/README.md +++ b/upgrade_guide/README.md @@ -68,7 +68,7 @@ Finally, the `datastax-java-driver.basic.load-balancing-policy.filter.class` con has been deprecated; it should be replaced with a node distance evaluator class defined by the `datastax-java-driver.basic.load-balancing-policy.evaluator.class` option instead. -### 4.10.0 +### 4.11.0 #### Cross-datacenter failover @@ -87,26 +87,26 @@ interface that allows custom retry policies to customize the request before it i For this reason, the following methods in the `RetryPolicy` interface were added; they all return a `RetryVerdict` instance: -1. [`onReadTimeoutVerdict`](https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/retry/RetryPolicy.html#onReadTimeoutVerdict-com.datastax.oss.driver.api.core.session.Request-com.datastax.oss.driver.api.core.ConsistencyLevel-int-int-boolean-int-) -2. [`onWriteTimeoutVerdict`](https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/retry/RetryPolicy.html#onWriteTimeoutVerdict-com.datastax.oss.driver.api.core.session.Request-com.datastax.oss.driver.api.core.ConsistencyLevel-com.datastax.oss.driver.api.core.servererrors.WriteType-int-int-int-) -3. [`onUnavailableVerdict`](https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/retry/RetryPolicy.html#onUnavailableVerdict-com.datastax.oss.driver.api.core.session.Request-com.datastax.oss.driver.api.core.ConsistencyLevel-int-int-int-) -4. [`onRequestAbortedVerdict`](https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/retry/RetryPolicy.html#onRequestAbortedVerdict-com.datastax.oss.driver.api.core.session.Request-java.lang.Throwable-int-) -5. [`onErrorResponseVerdict`](https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/retry/RetryPolicy.html#onErrorResponseVerdict-com.datastax.oss.driver.api.core.session.Request-com.datastax.oss.driver.api.core.servererrors.CoordinatorException-int-) +1. [`onReadTimeoutVerdict`](https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/retry/RetryPolicy.html#onReadTimeoutVerdict-com.datastax.oss.driver.api.core.session.Request-com.datastax.oss.driver.api.core.ConsistencyLevel-int-int-boolean-int-) +2. [`onWriteTimeoutVerdict`](https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/retry/RetryPolicy.html#onWriteTimeoutVerdict-com.datastax.oss.driver.api.core.session.Request-com.datastax.oss.driver.api.core.ConsistencyLevel-com.datastax.oss.driver.api.core.servererrors.WriteType-int-int-int-) +3. [`onUnavailableVerdict`](https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/retry/RetryPolicy.html#onUnavailableVerdict-com.datastax.oss.driver.api.core.session.Request-com.datastax.oss.driver.api.core.ConsistencyLevel-int-int-int-) +4. [`onRequestAbortedVerdict`](https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/retry/RetryPolicy.html#onRequestAbortedVerdict-com.datastax.oss.driver.api.core.session.Request-java.lang.Throwable-int-) +5. [`onErrorResponseVerdict`](https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/retry/RetryPolicy.html#onErrorResponseVerdict-com.datastax.oss.driver.api.core.session.Request-com.datastax.oss.driver.api.core.servererrors.CoordinatorException-int-) The following methods were deprecated and will be removed in the next major version: -1. [`onReadTimeout`](https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/retry/RetryPolicy.html#onReadTimeout-com.datastax.oss.driver.api.core.session.Request-com.datastax.oss.driver.api.core.ConsistencyLevel-int-int-boolean-int-) -2. [`onWriteTimeout`](https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/retry/RetryPolicy.html#onWriteTimeout-com.datastax.oss.driver.api.core.session.Request-com.datastax.oss.driver.api.core.ConsistencyLevel-com.datastax.oss.driver.api.core.servererrors.WriteType-int-int-int-) -3. [`onUnavailable`](https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/retry/RetryPolicy.html#onUnavailable-com.datastax.oss.driver.api.core.session.Request-com.datastax.oss.driver.api.core.ConsistencyLevel-int-int-int-) -4. [`onRequestAborted`](https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/retry/RetryPolicy.html#onRequestAborted-com.datastax.oss.driver.api.core.session.Request-java.lang.Throwable-int-) -5. [`onErrorResponse`](https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/retry/RetryPolicy.html#onErrorResponse-com.datastax.oss.driver.api.core.session.Request-com.datastax.oss.driver.api.core.servererrors.CoordinatorException-int-) +1. [`onReadTimeout`](https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/retry/RetryPolicy.html#onReadTimeout-com.datastax.oss.driver.api.core.session.Request-com.datastax.oss.driver.api.core.ConsistencyLevel-int-int-boolean-int-) +2. [`onWriteTimeout`](https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/retry/RetryPolicy.html#onWriteTimeout-com.datastax.oss.driver.api.core.session.Request-com.datastax.oss.driver.api.core.ConsistencyLevel-com.datastax.oss.driver.api.core.servererrors.WriteType-int-int-int-) +3. [`onUnavailable`](https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/retry/RetryPolicy.html#onUnavailable-com.datastax.oss.driver.api.core.session.Request-com.datastax.oss.driver.api.core.ConsistencyLevel-int-int-int-) +4. [`onRequestAborted`](https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/retry/RetryPolicy.html#onRequestAborted-com.datastax.oss.driver.api.core.session.Request-java.lang.Throwable-int-) +5. [`onErrorResponse`](https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/retry/RetryPolicy.html#onErrorResponse-com.datastax.oss.driver.api.core.session.Request-com.datastax.oss.driver.api.core.servererrors.CoordinatorException-int-) Driver 4.10.0 also re-introduced a retry policy whose behavior is equivalent to the `DowngradingConsistencyRetryPolicy` from driver 3.x. See this -[FAQ entry](https://docs.datastax.com/en/developer/java-driver/4.10/faq/#where-is-downgrading-consistency-retry-policy) +[FAQ entry](https://docs.datastax.com/en/developer/java-driver/4.11/faq/#where-is-downgrading-consistency-retry-policy) for more information. -[`RetryVerdict`]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/retry/RetryVerdict.html +[`RetryVerdict`]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/retry/RetryVerdict.html #### Enhancements to the `Uuids` utility class @@ -128,9 +128,9 @@ This release also introduces two new methods for random UUID generation: 2. [Uuids.random(SplittableRandom)]: similar to `Uuids.random()` but uses a `java.util.SplittableRandom` instead. -[Uuids.random()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/uuid/Uuids.html#random-- -[Uuids.random(Random)]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/uuid/Uuids.html#random-java.util.Random- -[Uuids.random(SplittableRandom)]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/uuid/Uuids.html#random-java.util.SplittableRandom- +[Uuids.random()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/uuid/Uuids.html#random-- +[Uuids.random(Random)]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/uuid/Uuids.html#random-java.util.Random- +[Uuids.random(SplittableRandom)]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/uuid/Uuids.html#random-java.util.SplittableRandom- #### System and DSE keyspaces automatically excluded from metadata and token map computation @@ -152,8 +152,8 @@ empty replicas and token ranges for them. If you need the driver to keep computi token map for these keyspaces, you now must modify the following configuration option: `datastax-java-driver.advanced.metadata.schema.refreshed-keyspaces`. -[Metadata.getKeyspaces()]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/metadata/Metadata.html#getKeyspaces-- -[TokenMap]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/metadata/TokenMap.html +[Metadata.getKeyspaces()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/metadata/Metadata.html#getKeyspaces-- +[TokenMap]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/metadata/TokenMap.html #### DSE Graph dependencies are now optional @@ -284,7 +284,7 @@ you can obtain in most web environments by calling `Thread.getContextClassLoader See the javadocs of [SessionBuilder.withClassLoader] for more information. -[SessionBuilder.withClassLoader]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withClassLoader-java.lang.ClassLoader- +[SessionBuilder.withClassLoader]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withClassLoader-java.lang.ClassLoader- ### 4.1.0 From 667bc97f6012e522ced1aba7b0b156f4e60194c0 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Wed, 24 Mar 2021 15:44:02 +0100 Subject: [PATCH 676/979] Various dependency upgrades --- manual/core/integration/README.md | 7 ++++--- pom.xml | 28 ++++++++++++++-------------- 2 files changed, 18 insertions(+), 17 deletions(-) diff --git a/manual/core/integration/README.md b/manual/core/integration/README.md index 4927089f98c..6fab5af9003 100644 --- a/manual/core/integration/README.md +++ b/manual/core/integration/README.md @@ -543,6 +543,7 @@ Here are the recommended TinkerPop versions for each driver version: + @@ -640,6 +641,6 @@ The remaining core driver dependencies are the only ones that are truly mandator [guava]: https://github.com/google/guava/issues/2721 [annotation processing]: https://docs.oracle.com/javase/8/docs/technotes/tools/windows/javac.html#sthref65 -[Session.getMetrics]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/Session.html#getMetrics-- -[SessionBuilder.addContactPoint]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoint-java.net.InetSocketAddress- -[Uuids]: https://docs.datastax.com/en/drivers/java/4.10/com/datastax/oss/driver/api/core/uuid/Uuids.html +[Session.getMetrics]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/Session.html#getMetrics-- +[SessionBuilder.addContactPoint]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoint-java.net.InetSocketAddress- +[Uuids]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/uuid/Uuids.html diff --git a/pom.xml b/pom.xml index eb6ab69f887..9a570206c18 100644 --- a/pom.xml +++ b/pom.xml @@ -46,27 +46,27 @@ UTF-81.4.12.1.12 - 4.1.16 - 4.1.51.Final + 4.1.18 + 4.1.60.Final1.2.1 - 3.4.9 + 3.4.101.7.261.0.3 - 20201115 - 2.12.0 - 2.12.0 + 20210307 + 2.12.2 + 2.12.21.9.121.1.7.31.7.1 - 3.18.1 + 3.19.01.3 - 4.13.1 + 4.13.21.2.36.0.06.0.3 @@ -77,7 +77,7 @@ 2.5.02.1.11.1.4 - 2.2.20 + 2.2.24.0.32.0.0-M192.22.2 @@ -132,7 +132,7 @@ com.github.jnr jnr-posix - 3.1.2 + 3.1.5 io.dropwizard.metrics @@ -411,22 +411,22 @@ io.micrometer micrometer-core - 1.6.1 + 1.6.5 org.eclipse.microprofile.metrics microprofile-metrics-api - 2.3 + 2.3.3 io.smallrye smallrye-metrics - 2.4.4 + 2.4.6 io.projectreactor reactor-bom - 2020.0.2 + 2020.0.5 pom import From 9a273f053cd55bd60d133b5d91e2e05f7f2e4d52 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Wed, 24 Mar 2021 15:52:14 +0100 Subject: [PATCH 677/979] [maven-release-plugin] prepare release 4.11.0 --- bom/pom.xml | 18 +++++++++--------- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- metrics/micrometer/pom.xml | 2 +- metrics/microprofile/pom.xml | 2 +- osgi-tests/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 14 files changed, 23 insertions(+), 23 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index cd1abf49230..0877a5590e8 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.0-SNAPSHOT + 4.11.0 java-driver-bom pom @@ -31,42 +31,42 @@ com.datastax.oss java-driver-core - 4.11.0-SNAPSHOT + 4.11.0 com.datastax.oss java-driver-core-shaded - 4.11.0-SNAPSHOT + 4.11.0 com.datastax.oss java-driver-mapper-processor - 4.11.0-SNAPSHOT + 4.11.0 com.datastax.oss java-driver-mapper-runtime - 4.11.0-SNAPSHOT + 4.11.0 com.datastax.oss java-driver-query-builder - 4.11.0-SNAPSHOT + 4.11.0 com.datastax.oss java-driver-test-infra - 4.11.0-SNAPSHOT + 4.11.0 com.datastax.oss java-driver-metrics-micrometer - 4.11.0-SNAPSHOT + 4.11.0 com.datastax.oss java-driver-metrics-microprofile - 4.11.0-SNAPSHOT + 4.11.0 com.datastax.oss diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index dd07c3c8be8..1926d3db213 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.0-SNAPSHOT + 4.11.0 java-driver-core-shaded DataStax Java driver for Apache Cassandra(R) - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index 5e545545541..1d0b5360d53 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.0-SNAPSHOT + 4.11.0 java-driver-core bundle diff --git a/distribution/pom.xml b/distribution/pom.xml index 65de3f5bbb2..f9f9e615bc9 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.0-SNAPSHOT + 4.11.0 java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index a8aa460b95d..1451b27437a 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.11.0-SNAPSHOT + 4.11.0 java-driver-examples DataStax Java driver for Apache Cassandra(R) - examples. diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 5dbcaf6996c..c781b425074 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.0-SNAPSHOT + 4.11.0 java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 5de6def1d9d..7f0b8592082 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.0-SNAPSHOT + 4.11.0 java-driver-mapper-processor DataStax Java driver for Apache Cassandra(R) - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index f65a8d9f584..4ddc1f66919 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.0-SNAPSHOT + 4.11.0 java-driver-mapper-runtime bundle diff --git a/metrics/micrometer/pom.xml b/metrics/micrometer/pom.xml index 5593610f010..5e2d62fb7f5 100644 --- a/metrics/micrometer/pom.xml +++ b/metrics/micrometer/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.0-SNAPSHOT + 4.11.0 ../../ java-driver-metrics-micrometer diff --git a/metrics/microprofile/pom.xml b/metrics/microprofile/pom.xml index 454562e3ed2..5978c80be5e 100644 --- a/metrics/microprofile/pom.xml +++ b/metrics/microprofile/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.0-SNAPSHOT + 4.11.0 ../../ java-driver-metrics-microprofile diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml index 1fb3438eff1..29d36d0b798 100644 --- a/osgi-tests/pom.xml +++ b/osgi-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.0-SNAPSHOT + 4.11.0 java-driver-osgi-tests jar diff --git a/pom.xml b/pom.xml index 9a570206c18..df1bf41bf81 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ 4.0.0 com.datastax.oss java-driver-parent - 4.11.0-SNAPSHOT + 4.11.0 pom DataStax Java driver for Apache Cassandra(R) A driver for Apache Cassandra(R) 2.1+ that works exclusively with the Cassandra Query Language version 3 (CQL3) and Cassandra's native protocol versions 3 and above. @@ -954,7 +954,7 @@ height="0" width="0" style="display:none;visibility:hidden"> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - HEAD + 4.11.0 diff --git a/query-builder/pom.xml b/query-builder/pom.xml index 5e0e4cf0a89..01ba1b19102 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.0-SNAPSHOT + 4.11.0 java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index 4bd0d80bfe2..3094b1145b7 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.0-SNAPSHOT + 4.11.0 java-driver-test-infra bundle From 0f7ba2703790b45941baddc2c04c2aeb8c8701c7 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Wed, 24 Mar 2021 15:52:23 +0100 Subject: [PATCH 678/979] [maven-release-plugin] prepare for next development iteration --- bom/pom.xml | 18 +++++++++--------- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- metrics/micrometer/pom.xml | 2 +- metrics/microprofile/pom.xml | 2 +- osgi-tests/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 14 files changed, 23 insertions(+), 23 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index 0877a5590e8..f7cf0094d7a 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.0 + 4.11.1-SNAPSHOT java-driver-bom pom @@ -31,42 +31,42 @@ com.datastax.oss java-driver-core - 4.11.0 + 4.11.1-SNAPSHOT com.datastax.oss java-driver-core-shaded - 4.11.0 + 4.11.1-SNAPSHOT com.datastax.oss java-driver-mapper-processor - 4.11.0 + 4.11.1-SNAPSHOT com.datastax.oss java-driver-mapper-runtime - 4.11.0 + 4.11.1-SNAPSHOT com.datastax.oss java-driver-query-builder - 4.11.0 + 4.11.1-SNAPSHOT com.datastax.oss java-driver-test-infra - 4.11.0 + 4.11.1-SNAPSHOT com.datastax.oss java-driver-metrics-micrometer - 4.11.0 + 4.11.1-SNAPSHOT com.datastax.oss java-driver-metrics-microprofile - 4.11.0 + 4.11.1-SNAPSHOT com.datastax.oss diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 1926d3db213..c220da6dc2c 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.0 + 4.11.1-SNAPSHOT java-driver-core-shaded DataStax Java driver for Apache Cassandra(R) - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index 1d0b5360d53..221b7966a02 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.0 + 4.11.1-SNAPSHOT java-driver-core bundle diff --git a/distribution/pom.xml b/distribution/pom.xml index f9f9e615bc9..8e67f2862c6 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.0 + 4.11.1-SNAPSHOT java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index 1451b27437a..5e4abb3347b 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.11.0 + 4.11.1-SNAPSHOT java-driver-examples DataStax Java driver for Apache Cassandra(R) - examples. diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index c781b425074..677e1421c3c 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.0 + 4.11.1-SNAPSHOT java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 7f0b8592082..da37e82a52f 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.0 + 4.11.1-SNAPSHOT java-driver-mapper-processor DataStax Java driver for Apache Cassandra(R) - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index 4ddc1f66919..476c480862c 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.0 + 4.11.1-SNAPSHOT java-driver-mapper-runtime bundle diff --git a/metrics/micrometer/pom.xml b/metrics/micrometer/pom.xml index 5e2d62fb7f5..683b67dd2ca 100644 --- a/metrics/micrometer/pom.xml +++ b/metrics/micrometer/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.0 + 4.11.1-SNAPSHOT ../../ java-driver-metrics-micrometer diff --git a/metrics/microprofile/pom.xml b/metrics/microprofile/pom.xml index 5978c80be5e..e21a2379025 100644 --- a/metrics/microprofile/pom.xml +++ b/metrics/microprofile/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.0 + 4.11.1-SNAPSHOT ../../ java-driver-metrics-microprofile diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml index 29d36d0b798..1d4412be248 100644 --- a/osgi-tests/pom.xml +++ b/osgi-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.0 + 4.11.1-SNAPSHOT java-driver-osgi-tests jar diff --git a/pom.xml b/pom.xml index df1bf41bf81..248b4ab5cfb 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ 4.0.0 com.datastax.oss java-driver-parent - 4.11.0 + 4.11.1-SNAPSHOT pom DataStax Java driver for Apache Cassandra(R) A driver for Apache Cassandra(R) 2.1+ that works exclusively with the Cassandra Query Language version 3 (CQL3) and Cassandra's native protocol versions 3 and above. @@ -954,7 +954,7 @@ height="0" width="0" style="display:none;visibility:hidden"> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - 4.11.0 + HEAD diff --git a/query-builder/pom.xml b/query-builder/pom.xml index 01ba1b19102..cd5ae434f65 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.0 + 4.11.1-SNAPSHOT java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index 3094b1145b7..034d2c35049 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.0 + 4.11.1-SNAPSHOT java-driver-test-infra bundle From bb0a42b6b91d826f1430b4b92307f2f9b9e68f13 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Wed, 24 Mar 2021 17:13:18 +0100 Subject: [PATCH 679/979] Fix wrong release version in upgrade guide --- upgrade_guide/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/upgrade_guide/README.md b/upgrade_guide/README.md index 0f5d46a9ca8..6ada0de2a13 100644 --- a/upgrade_guide/README.md +++ b/upgrade_guide/README.md @@ -68,7 +68,7 @@ Finally, the `datastax-java-driver.basic.load-balancing-policy.filter.class` con has been deprecated; it should be replaced with a node distance evaluator class defined by the `datastax-java-driver.basic.load-balancing-policy.evaluator.class` option instead. -### 4.11.0 +### 4.10.0 #### Cross-datacenter failover From eea59c6d0be60420ce10cde7be0e4be8f7e5453f Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Wed, 24 Mar 2021 17:28:59 +0100 Subject: [PATCH 680/979] Improve metric name description --- manual/core/metrics/README.md | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/manual/core/metrics/README.md b/manual/core/metrics/README.md index ab6eb8c7a9f..65a93eb1fbf 100644 --- a/manual/core/metrics/README.md +++ b/manual/core/metrics/README.md @@ -131,16 +131,6 @@ There are two categories of driver metrics: For example, `pool.open-connections` measures the number of connections open to this particular node. -Metric names are path-like, dot-separated strings. The driver prefixes them with the name of the -session (see `session-name` in the configuration), and in the case of node-level metrics, `nodes` -followed by a textual representation of the node's address. For example: - -``` -s0.connected-nodes => 2 -s0.nodes.127_0_0_1:9042.pool.open-connections => 2 -s0.nodes.127_0_0_2:9042.pool.open-connections => 1 -``` - To find out which metrics are available, see the [reference configuration]. It contains a commented-out line for each metric, with detailed explanations on its intended usage. @@ -171,10 +161,20 @@ The `advanced.metrics.id-generator.class` option is used to customize how the dr metric identifiers. The driver ships with two built-in implementations: - `DefaultMetricIdGenerator`: generates identifiers composed solely of (unique) metric names; it - does not generate tags. It is mostly suitable for use with metrics libraries that do not support - tags, like Dropwizard. -- `TaggingMetricIdGenerator`: generates identifiers composed of name and tags. It is mostly suitable - for use with metrics libraries that support tags, like Micrometer or MicroProfile Metrics. + does not generate tags. All metric names start with the name of the session (see `session-name` in + the configuration), and in the case of node-level metrics, this is followed by `.nodes.`, followed + by a textual representation of the node's address. All names end with the metric distinctive name. + See below for examples. This generator is mostly suitable for use with metrics libraries that do + not support tags, like Dropwizard. + +- `TaggingMetricIdGenerator`: generates identifiers composed of a name and one or two tags. + Session-level metric names start with the `session.` prefix followed by the metric distinctive + name; node-level metric names start with the `nodes.` prefix followed by the metric distinctive + name. Session-level tags will include a `session` tag whose value is the session name (see + `session-name` in the configuration); node-level tags will include the same `session` tag, and + also a `node` tag whose value is the node's address. See below for examples. This generator is + mostly suitable for use with metrics libraries that support tags, like Micrometer or MicroProfile + Metrics. For example, here is how each one of them generates identifiers for the session metric "bytes-sent", assuming that the session is named "s0": From b7e384ea2500c1d0ef9cc882604128e6a486b2da Mon Sep 17 00:00:00 2001 From: Dmitry Sysolyatin Date: Wed, 28 Apr 2021 10:59:55 +0300 Subject: [PATCH 681/979] JAVA-2934: Handle empty non-final pages in ReactiveResultSetSubscription (#1544) --- changelog/README.md | 4 +++ .../ReactiveResultSetSubscription.java | 5 +--- .../ReactiveResultSetSubscriptionTest.java | 30 +++++++++++++++++++ .../core/cql/reactive/TestSubscriber.java | 12 +++++++- 4 files changed, 46 insertions(+), 5 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 342336ae918..1b58dacb630 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -2,6 +2,10 @@ +### 4.12.0 (in progress) + +- [bug] JAVA-2934: Handle empty non-final pages in ReactiveResultSetSubscription + ### 4.11.0 - [improvement] JAVA-2930: Allow Micrometer to record histograms for timers diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/ReactiveResultSetSubscription.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/ReactiveResultSetSubscription.java index 160e71296be..15afd55c06d 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/ReactiveResultSetSubscription.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/reactive/ReactiveResultSetSubscription.java @@ -277,13 +277,10 @@ private Object tryNext() { if (pages.poll() == null) { throw new AssertionError("Queue is empty, this should not happen"); } - current = pages.peek(); // if the next page is readily available, // serve its first row now, no need to wait // for the next drain. - if (current != null && current.hasMoreRows()) { - return current.nextRow(); - } + return tryNext(); } } // No items available right now. diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/ReactiveResultSetSubscriptionTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/ReactiveResultSetSubscriptionTest.java index 9a57f9e03fb..1f6664e1662 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/ReactiveResultSetSubscriptionTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/ReactiveResultSetSubscriptionTest.java @@ -145,4 +145,34 @@ public void should_report_error_on_intermediary_page() { assertThat(wasAppliedSubscriber.getElements()).hasSize(1).containsExactly(true); assertThat(wasAppliedSubscriber.getError()).isNull(); } + + @Test + public void should_handle_empty_non_final_pages() { + CompletableFuture future1 = new CompletableFuture<>(); + CompletableFuture future2 = new CompletableFuture<>(); + CompletableFuture future3 = new CompletableFuture<>(); + MockAsyncResultSet page1 = new MockAsyncResultSet(10, future2); + MockAsyncResultSet page2 = new MockAsyncResultSet(0, future3); + MockAsyncResultSet page3 = new MockAsyncResultSet(10, null); + TestSubscriber mainSubscriber = new TestSubscriber<>(1); + TestSubscriber colDefsSubscriber = new TestSubscriber<>(); + TestSubscriber execInfosSubscriber = new TestSubscriber<>(); + TestSubscriber wasAppliedSubscriber = new TestSubscriber<>(); + ReactiveResultSetSubscription subscription = + new ReactiveResultSetSubscription<>( + mainSubscriber, colDefsSubscriber, execInfosSubscriber, wasAppliedSubscriber); + mainSubscriber.onSubscribe(subscription); + subscription.start(() -> future1); + future1.complete(page1); + future2.complete(page2); + // emulate backpressure + subscription.request(1); + future3.complete(page3); + subscription.request(Long.MAX_VALUE); + mainSubscriber.awaitTermination(); + assertThat(mainSubscriber.getError()).isNull(); + List expected = new ArrayList<>(page1.currentPage()); + expected.addAll(page3.currentPage()); + assertThat(mainSubscriber.getElements()).hasSize(20).extracting("row").isEqualTo(expected); + } } diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/TestSubscriber.java b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/TestSubscriber.java index 607bf57aac5..143dff3486d 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/TestSubscriber.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/TestSubscriber.java @@ -31,16 +31,25 @@ public class TestSubscriber implements Subscriber { private final List elements = new ArrayList<>(); private final CountDownLatch latch = new CountDownLatch(1); + private final long demand; private Subscription subscription; private Throwable error; + public TestSubscriber() { + this.demand = Long.MAX_VALUE; + } + + public TestSubscriber(long demand) { + this.demand = demand; + } + @Override public void onSubscribe(Subscription s) { if (subscription != null) { fail("already subscribed"); } subscription = s; - s.request(Long.MAX_VALUE); + subscription.request(demand); } @Override @@ -71,5 +80,6 @@ public List getElements() { public void awaitTermination() { Uninterruptibles.awaitUninterruptibly(latch, 1, TimeUnit.MINUTES); + if (latch.getCount() > 0) fail("subscriber not terminated"); } } From 9f4939c58b901a02ca6c435de43757e5c7e4e3ab Mon Sep 17 00:00:00 2001 From: Erik Merkle Date: Wed, 28 Apr 2021 03:27:38 -0500 Subject: [PATCH 682/979] JAVA-2396: Support Protocol V6 (#1546) Co-authored-by: Alexandre Dutra --- changelog/README.md | 3 ++- .../oss/driver/internal/core/context/DefaultDriverContext.java | 2 ++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/changelog/README.md b/changelog/README.md index 1b58dacb630..87a37b6c4c1 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -2,8 +2,9 @@ -### 4.12.0 (in progress) +### 4.11.1 +- [bug] JAVA-2936: Support Protocol V6 - [bug] JAVA-2934: Handle empty non-final pages in ReactiveResultSetSubscription ### 4.11.0 diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java index 64925699a64..fd217f9c6c8 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java @@ -91,6 +91,7 @@ import com.datastax.oss.protocol.internal.PrimitiveCodec; import com.datastax.oss.protocol.internal.ProtocolV3ClientCodecs; import com.datastax.oss.protocol.internal.ProtocolV5ClientCodecs; +import com.datastax.oss.protocol.internal.ProtocolV6ClientCodecs; import com.datastax.oss.protocol.internal.SegmentCodec; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; @@ -435,6 +436,7 @@ protected FrameCodec buildFrameCodec() { new ProtocolV3ClientCodecs(), new ProtocolV4ClientCodecsForDse(), new ProtocolV5ClientCodecs(), + new ProtocolV6ClientCodecs(), new DseProtocolV1ClientCodecs(), new DseProtocolV2ClientCodecs()); } From 7dca8e49881576019bd04c6005e59b7380910589 Mon Sep 17 00:00:00 2001 From: Kelvin Long Date: Sat, 24 Apr 2021 04:12:20 +0800 Subject: [PATCH 683/979] JAVA-2910: Add a configuration option to support strong values for prepared statements cache Co-authored-by: Alexandre Dutra --- changelog/README.md | 3 +- .../api/core/config/DefaultDriverOption.java | 7 ++ .../api/core/config/TypedDriverOption.java | 3 + .../internal/core/session/PoolManager.java | 11 ++- core/src/main/resources/reference.conf | 12 ++++ .../core/session/PoolManagerTest.java | 68 +++++++++++++++++++ 6 files changed, 101 insertions(+), 3 deletions(-) create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/session/PoolManagerTest.java diff --git a/changelog/README.md b/changelog/README.md index 87a37b6c4c1..d64928b9350 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -2,8 +2,9 @@ -### 4.11.1 +### 4.11.1 (in progress) +- [bug] JAVA-2910: Add a configuration option to support strong values for prepared statements cache - [bug] JAVA-2936: Support Protocol V6 - [bug] JAVA-2934: Handle empty non-final pages in ReactiveResultSetSubscription diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java index 8a916705b1b..3d2fde238e9 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java @@ -902,6 +902,13 @@ public enum DefaultDriverOption implements DriverOption { *

          Value-type: List of {@link java.time.Duration Duration} */ METRICS_NODE_CQL_MESSAGES_SLO("advanced.metrics.node.cql-messages.slo"), + + /** + * Whether the prepared statements cache use weak values. + * + *

          Value-type: boolean + */ + PREPARED_CACHE_WEAK_VALUES("advanced.prepared-statements.prepared-cache.weak-values"), ; private final String path; diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java index 25d6ac97136..044a7b71de6 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java @@ -521,6 +521,9 @@ public String toString() { /** The request timeout when repreparing. */ public static final TypedDriverOption REPREPARE_TIMEOUT = new TypedDriverOption<>(DefaultDriverOption.REPREPARE_TIMEOUT, GenericType.DURATION); + /** Whether the prepared statements cache use weak values. */ + public static final TypedDriverOption PREPARED_CACHE_WEAK_VALUES = + new TypedDriverOption<>(DefaultDriverOption.PREPARED_CACHE_WEAK_VALUES, GenericType.BOOLEAN); /** The number of threads in the I/O group. */ public static final TypedDriverOption NETTY_IO_SIZE = new TypedDriverOption<>(DefaultDriverOption.NETTY_IO_SIZE, GenericType.INTEGER); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/session/PoolManager.java b/core/src/main/java/com/datastax/oss/driver/internal/core/session/PoolManager.java index 68f3519cf52..7580553b0c7 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/session/PoolManager.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/session/PoolManager.java @@ -82,8 +82,7 @@ public class PoolManager implements AsyncAutoCloseable { // (e.g. DefaultPreparedStatement) which are handled at the protocol level (e.g. // CqlPrepareAsyncProcessor). We keep the two separate to avoid introducing a dependency from the // session to a particular processor implementation. - private final ConcurrentMap repreparePayloads = - new MapMaker().weakValues().makeMap(); + private final ConcurrentMap repreparePayloads; private final String logPrefix; private final EventExecutor adminExecutor; @@ -95,6 +94,14 @@ public PoolManager(InternalDriverContext context) { this.adminExecutor = context.getNettyOptions().adminEventExecutorGroup().next(); this.config = context.getConfig().getDefaultProfile(); this.singleThreaded = new SingleThreaded(context); + + if (config.getBoolean(DefaultDriverOption.PREPARED_CACHE_WEAK_VALUES, true)) { + LOG.debug("[{}] Prepared statements cache configured to use weak values", logPrefix); + this.repreparePayloads = new MapMaker().weakValues().makeMap(); + } else { + LOG.debug("[{}] Prepared statements cache configured to use strong values", logPrefix); + this.repreparePayloads = new MapMaker().makeMap(); + } } public CompletionStage init(CqlIdentifier keyspace) { diff --git a/core/src/main/resources/reference.conf b/core/src/main/resources/reference.conf index 0d56febf841..ab5a33a028d 100644 --- a/core/src/main/resources/reference.conf +++ b/core/src/main/resources/reference.conf @@ -2161,6 +2161,18 @@ datastax-java-driver { # Overridable in a profile: no timeout = ${datastax-java-driver.advanced.connection.init-query-timeout} } + + # How to build the cache of prepared statements. + prepared-cache { + # Whether to use weak references for the prepared statements cache values. + # + # If this option is absent, weak references will be used. + # + # Required: no + # Modifiable at runtime: no + # Overridable in a profile: no + // weak-values = true + } } # Options related to the Netty event loop groups used internally by the driver. diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/session/PoolManagerTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/session/PoolManagerTest.java new file mode 100644 index 00000000000..f99824d9a24 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/session/PoolManagerTest.java @@ -0,0 +1,68 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.session; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.when; + +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverConfig; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.internal.core.context.EventBus; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.context.NettyOptions; +import io.netty.channel.DefaultEventLoopGroup; +import java.util.concurrent.ConcurrentHashMap; +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; + +public class PoolManagerTest { + @Mock private InternalDriverContext context; + @Mock private NettyOptions nettyOptions; + @Mock private DriverConfig config; + @Mock private DriverExecutionProfile defaultProfile; + + @Before + public void setup() { + MockitoAnnotations.initMocks(this); + + DefaultEventLoopGroup adminEventLoopGroup = new DefaultEventLoopGroup(1); + when(nettyOptions.adminEventExecutorGroup()).thenReturn(adminEventLoopGroup); + when(context.getNettyOptions()).thenReturn(nettyOptions); + when(context.getEventBus()).thenReturn(new EventBus("test")); + when(config.getDefaultProfile()).thenReturn(defaultProfile); + when(context.getConfig()).thenReturn(config); + } + + @Test + public void should_use_weak_values_if_config_is_true_or_undefined() { + when(defaultProfile.getBoolean(DefaultDriverOption.PREPARED_CACHE_WEAK_VALUES, true)) + .thenReturn(true); + // As weak values map class is MapMakerInternalMap + assertThat(new PoolManager(context).getRepreparePayloads()) + .isNotInstanceOf(ConcurrentHashMap.class); + } + + @Test + public void should_not_use_weak_values_if_config_is_false() { + when(defaultProfile.getBoolean(DefaultDriverOption.PREPARED_CACHE_WEAK_VALUES, true)) + .thenReturn(false); + assertThat(new PoolManager(context).getRepreparePayloads()) + .isInstanceOf(ConcurrentHashMap.class); + } +} From 5f7b7fa389f97652ab8b5dc283d3ec46b734d8d8 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Wed, 28 Apr 2021 11:21:50 +0200 Subject: [PATCH 684/979] Fix ProtocolVersionInitialNegotiationIT for C* 4.0-rc1 --- .../ProtocolVersionInitialNegotiationIT.java | 20 +++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/ProtocolVersionInitialNegotiationIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/ProtocolVersionInitialNegotiationIT.java index 366e9fe5669..8ba8986b35b 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/ProtocolVersionInitialNegotiationIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/ProtocolVersionInitialNegotiationIT.java @@ -66,8 +66,8 @@ public void should_downgrade_to_v3_dse() { @CassandraRequirement( min = "2.2", - max = "4.0", - description = "Only C* in [2.2,4.0[ has V4 as its highest version") + max = "4.0-rc1", + description = "Only C* in [2.2,4.0-rc1[ has V4 as its highest version") @Test public void should_downgrade_to_v4_oss() { Assume.assumeFalse("This test is only for OSS C*", ccm.getDseVersion().isPresent()); @@ -77,6 +77,18 @@ public void should_downgrade_to_v4_oss() { } } + @CassandraRequirement( + min = "4.0-rc1", + description = "Only C* in [4.0-rc1,*[ has V5 as its highest version") + @Test + public void should_downgrade_to_v5_oss() { + Assume.assumeFalse("This test is only for OSS C*", ccm.getDseVersion().isPresent()); + try (CqlSession session = SessionUtils.newSession(ccm)) { + assertThat(session.getContext().getProtocolVersion().getCode()).isEqualTo(5); + session.execute("select * from system.local"); + } + } + @DseRequirement( min = "5.0", max = "5.1", @@ -142,8 +154,8 @@ public void should_fail_if_provided_v4_is_not_supported_dse() { @CassandraRequirement( min = "2.1", - max = "4.0", - description = "Only C* in [2.1,4.0[ has V5 unsupported or supported as beta") + max = "4.0-rc1", + description = "Only C* in [2.1,4.0-rc1[ has V5 unsupported or supported as beta") @Test public void should_fail_if_provided_v5_is_not_supported_oss() { Assume.assumeFalse("This test is only for OSS C*", ccm.getDseVersion().isPresent()); From a3c8f45cd7e6739079bf0ed620ca6f5781f94a33 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Wed, 28 Apr 2021 11:28:23 +0200 Subject: [PATCH 685/979] Prepare changelog for 4.11.1 release --- changelog/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/changelog/README.md b/changelog/README.md index d64928b9350..503c5e4f5f2 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -2,7 +2,7 @@ -### 4.11.1 (in progress) +### 4.11.1 - [bug] JAVA-2910: Add a configuration option to support strong values for prepared statements cache - [bug] JAVA-2936: Support Protocol V6 From f0c77ef67b23a8e99f318fb167bc568189fad054 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Wed, 28 Apr 2021 11:41:03 +0200 Subject: [PATCH 686/979] [maven-release-plugin] prepare release 4.11.1 --- bom/pom.xml | 18 +++++++++--------- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- metrics/micrometer/pom.xml | 2 +- metrics/microprofile/pom.xml | 2 +- osgi-tests/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 14 files changed, 23 insertions(+), 23 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index f7cf0094d7a..b3d42573a99 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.1-SNAPSHOT + 4.11.1 java-driver-bom pom @@ -31,42 +31,42 @@ com.datastax.oss java-driver-core - 4.11.1-SNAPSHOT + 4.11.1 com.datastax.oss java-driver-core-shaded - 4.11.1-SNAPSHOT + 4.11.1 com.datastax.oss java-driver-mapper-processor - 4.11.1-SNAPSHOT + 4.11.1 com.datastax.oss java-driver-mapper-runtime - 4.11.1-SNAPSHOT + 4.11.1 com.datastax.oss java-driver-query-builder - 4.11.1-SNAPSHOT + 4.11.1 com.datastax.oss java-driver-test-infra - 4.11.1-SNAPSHOT + 4.11.1 com.datastax.oss java-driver-metrics-micrometer - 4.11.1-SNAPSHOT + 4.11.1 com.datastax.oss java-driver-metrics-microprofile - 4.11.1-SNAPSHOT + 4.11.1 com.datastax.oss diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index c220da6dc2c..7ae90273abd 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.1-SNAPSHOT + 4.11.1 java-driver-core-shaded DataStax Java driver for Apache Cassandra(R) - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index 221b7966a02..c74d2bda296 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.1-SNAPSHOT + 4.11.1 java-driver-core bundle diff --git a/distribution/pom.xml b/distribution/pom.xml index 8e67f2862c6..ef08210816d 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.1-SNAPSHOT + 4.11.1 java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index 5e4abb3347b..f89fcffa4b3 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.11.1-SNAPSHOT + 4.11.1 java-driver-examples DataStax Java driver for Apache Cassandra(R) - examples. diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 677e1421c3c..8864d98b0a9 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.1-SNAPSHOT + 4.11.1 java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index da37e82a52f..65df4b6d6d9 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.1-SNAPSHOT + 4.11.1 java-driver-mapper-processor DataStax Java driver for Apache Cassandra(R) - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index 476c480862c..1f82eb873ed 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.1-SNAPSHOT + 4.11.1 java-driver-mapper-runtime bundle diff --git a/metrics/micrometer/pom.xml b/metrics/micrometer/pom.xml index 683b67dd2ca..86b41e3db61 100644 --- a/metrics/micrometer/pom.xml +++ b/metrics/micrometer/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.1-SNAPSHOT + 4.11.1 ../../ java-driver-metrics-micrometer diff --git a/metrics/microprofile/pom.xml b/metrics/microprofile/pom.xml index e21a2379025..9c5111c4c1c 100644 --- a/metrics/microprofile/pom.xml +++ b/metrics/microprofile/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.1-SNAPSHOT + 4.11.1 ../../ java-driver-metrics-microprofile diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml index 1d4412be248..3fb14bc1d95 100644 --- a/osgi-tests/pom.xml +++ b/osgi-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.1-SNAPSHOT + 4.11.1 java-driver-osgi-tests jar diff --git a/pom.xml b/pom.xml index 248b4ab5cfb..02d57ec0979 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ 4.0.0 com.datastax.oss java-driver-parent - 4.11.1-SNAPSHOT + 4.11.1 pom DataStax Java driver for Apache Cassandra(R) A driver for Apache Cassandra(R) 2.1+ that works exclusively with the Cassandra Query Language version 3 (CQL3) and Cassandra's native protocol versions 3 and above. @@ -954,7 +954,7 @@ height="0" width="0" style="display:none;visibility:hidden"> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - HEAD + 4.11.1 diff --git a/query-builder/pom.xml b/query-builder/pom.xml index cd5ae434f65..4e4d96bc8c9 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.1-SNAPSHOT + 4.11.1 java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index 034d2c35049..9cf35c4f38a 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.1-SNAPSHOT + 4.11.1 java-driver-test-infra bundle From 6c51faab0fce702c250cf7bbc652484fdfc16f6e Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Wed, 28 Apr 2021 11:41:13 +0200 Subject: [PATCH 687/979] [maven-release-plugin] prepare for next development iteration --- bom/pom.xml | 18 +++++++++--------- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- metrics/micrometer/pom.xml | 2 +- metrics/microprofile/pom.xml | 2 +- osgi-tests/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 14 files changed, 23 insertions(+), 23 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index b3d42573a99..970e0cbcd26 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.1 + 4.11.2-SNAPSHOT java-driver-bom pom @@ -31,42 +31,42 @@ com.datastax.oss java-driver-core - 4.11.1 + 4.11.2-SNAPSHOT com.datastax.oss java-driver-core-shaded - 4.11.1 + 4.11.2-SNAPSHOT com.datastax.oss java-driver-mapper-processor - 4.11.1 + 4.11.2-SNAPSHOT com.datastax.oss java-driver-mapper-runtime - 4.11.1 + 4.11.2-SNAPSHOT com.datastax.oss java-driver-query-builder - 4.11.1 + 4.11.2-SNAPSHOT com.datastax.oss java-driver-test-infra - 4.11.1 + 4.11.2-SNAPSHOT com.datastax.oss java-driver-metrics-micrometer - 4.11.1 + 4.11.2-SNAPSHOT com.datastax.oss java-driver-metrics-microprofile - 4.11.1 + 4.11.2-SNAPSHOT com.datastax.oss diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 7ae90273abd..7e204b6726c 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.1 + 4.11.2-SNAPSHOT java-driver-core-shaded DataStax Java driver for Apache Cassandra(R) - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index c74d2bda296..44aeda012e4 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.1 + 4.11.2-SNAPSHOT java-driver-core bundle diff --git a/distribution/pom.xml b/distribution/pom.xml index ef08210816d..73891ec302b 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.1 + 4.11.2-SNAPSHOT java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index f89fcffa4b3..08ee8f717e0 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.11.1 + 4.11.2-SNAPSHOT java-driver-examples DataStax Java driver for Apache Cassandra(R) - examples. diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 8864d98b0a9..87c80951e14 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.1 + 4.11.2-SNAPSHOT java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 65df4b6d6d9..99f35eb1986 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.1 + 4.11.2-SNAPSHOT java-driver-mapper-processor DataStax Java driver for Apache Cassandra(R) - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index 1f82eb873ed..f8e8bcd6c06 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.1 + 4.11.2-SNAPSHOT java-driver-mapper-runtime bundle diff --git a/metrics/micrometer/pom.xml b/metrics/micrometer/pom.xml index 86b41e3db61..d5072187884 100644 --- a/metrics/micrometer/pom.xml +++ b/metrics/micrometer/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.1 + 4.11.2-SNAPSHOT ../../ java-driver-metrics-micrometer diff --git a/metrics/microprofile/pom.xml b/metrics/microprofile/pom.xml index 9c5111c4c1c..169d04622e7 100644 --- a/metrics/microprofile/pom.xml +++ b/metrics/microprofile/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.1 + 4.11.2-SNAPSHOT ../../ java-driver-metrics-microprofile diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml index 3fb14bc1d95..58820c4ba3d 100644 --- a/osgi-tests/pom.xml +++ b/osgi-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.1 + 4.11.2-SNAPSHOT java-driver-osgi-tests jar diff --git a/pom.xml b/pom.xml index 02d57ec0979..a306b90a655 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ 4.0.0 com.datastax.oss java-driver-parent - 4.11.1 + 4.11.2-SNAPSHOT pom DataStax Java driver for Apache Cassandra(R) A driver for Apache Cassandra(R) 2.1+ that works exclusively with the Cassandra Query Language version 3 (CQL3) and Cassandra's native protocol versions 3 and above. @@ -954,7 +954,7 @@ height="0" width="0" style="display:none;visibility:hidden"> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - 4.11.1 + HEAD diff --git a/query-builder/pom.xml b/query-builder/pom.xml index 4e4d96bc8c9..8eac5a3ffbd 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.1 + 4.11.2-SNAPSHOT java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index 9cf35c4f38a..5cf30a2048b 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.1 + 4.11.2-SNAPSHOT java-driver-test-infra bundle From 990c7c6ef5d18ed41c2d91e2fc6797dca5394e70 Mon Sep 17 00:00:00 2001 From: Francisco Bento da Silva Neto Date: Thu, 29 Apr 2021 14:58:32 -0300 Subject: [PATCH 688/979] JAVA-2938: OverloadedException message is misleading --- changelog/README.md | 4 ++++ .../driver/api/core/servererrors/OverloadedException.java | 8 ++++++-- .../oss/driver/internal/core/cql/Conversions.java | 2 +- 3 files changed, 11 insertions(+), 3 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 503c5e4f5f2..5e3c2819a51 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -2,6 +2,10 @@ +### 4.11.2 (in progress) + +- [bug] JAVA-2938: OverloadedException message is misleading + ### 4.11.1 - [bug] JAVA-2910: Add a configuration option to support strong values for prepared statements cache diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/servererrors/OverloadedException.java b/core/src/main/java/com/datastax/oss/driver/api/core/servererrors/OverloadedException.java index 4b7b4bb6d9a..300f54a35d5 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/servererrors/OverloadedException.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/servererrors/OverloadedException.java @@ -27,7 +27,7 @@ /** * Thrown when the coordinator reported itself as being overloaded. * - *

          This exception is processed by {@link RetryPolicy#onErrorResponse(Request, + *

          This exception is processed by {@link RetryPolicy#onErrorResponseVerdict(Request, * CoordinatorException, int)}, which will decide if it is rethrown directly to the client or if the * request should be retried. If all other tried nodes also fail, this exception will appear in the * {@link AllNodesFailedException} thrown to the client. @@ -35,7 +35,11 @@ public class OverloadedException extends QueryExecutionException { public OverloadedException(@NonNull Node coordinator) { - super(coordinator, String.format("%s is bootstrapping", coordinator), null, false); + super(coordinator, String.format("%s is overloaded", coordinator), null, false); + } + + public OverloadedException(@NonNull Node coordinator, @NonNull String message) { + super(coordinator, String.format("%s is overloaded: %s", coordinator, message), null, false); } private OverloadedException( diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/Conversions.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/Conversions.java index 242bf673a7a..1031ca01bd2 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/Conversions.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/Conversions.java @@ -462,7 +462,7 @@ public static CoordinatorException toThrowable( unavailable.required, unavailable.alive); case ProtocolConstants.ErrorCode.OVERLOADED: - return new OverloadedException(node); + return new OverloadedException(node, errorMessage.message); case ProtocolConstants.ErrorCode.IS_BOOTSTRAPPING: return new BootstrappingException(node); case ProtocolConstants.ErrorCode.TRUNCATE_ERROR: From 4d29623df9d3d71a382000bf922daa028914e530 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 18 May 2021 15:59:51 +0200 Subject: [PATCH 689/979] Protect against misbehaving listeners during session init --- .../internal/core/session/DefaultSession.java | 33 +++++++++++++++++-- 1 file changed, 30 insertions(+), 3 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/session/DefaultSession.java b/core/src/main/java/com/datastax/oss/driver/internal/core/session/DefaultSession.java index b12c287ca60..b134ff027b5 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/session/DefaultSession.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/session/DefaultSession.java @@ -484,9 +484,36 @@ private void notifyListeners() { t); } } - context.getNodeStateListener().onSessionReady(DefaultSession.this); - schemaListenerNotifier.onSessionReady(DefaultSession.this); - context.getRequestTracker().onSessionReady(DefaultSession.this); + try { + context.getNodeStateListener().onSessionReady(DefaultSession.this); + } catch (Throwable t) { + Loggers.warnWithException( + LOG, + "[{}] Error while notifying {} of session ready", + logPrefix, + context.getNodeStateListener(), + t); + } + try { + schemaListenerNotifier.onSessionReady(DefaultSession.this); + } catch (Throwable t) { + Loggers.warnWithException( + LOG, + "[{}] Error while notifying {} of session ready", + logPrefix, + schemaListenerNotifier, + t); + } + try { + context.getRequestTracker().onSessionReady(DefaultSession.this); + } catch (Throwable t) { + Loggers.warnWithException( + LOG, + "[{}] Error while notifying {} of session ready", + logPrefix, + context.getRequestTracker(), + t); + } } private void onNodeStateChanged(NodeStateEvent event) { From feabdf70ac16bee38fbb1a35e99a4c12197f2768 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 18 May 2021 15:59:11 +0200 Subject: [PATCH 690/979] JAVA-2943: Prevent session leak with wrong keyspace name --- changelog/README.md | 1 + .../internal/core/session/DefaultSession.java | 81 ++++++++++--------- .../oss/driver/core/SessionLeakIT.java | 28 +++++++ 3 files changed, 72 insertions(+), 38 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 5e3c2819a51..b8f7bddc996 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.11.2 (in progress) +- [bug] JAVA-2943: Prevent session leak with wrong keyspace name - [bug] JAVA-2938: OverloadedException message is misleading ### 4.11.1 diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/session/DefaultSession.java b/core/src/main/java/com/datastax/oss/driver/internal/core/session/DefaultSession.java index b134ff027b5..9c04f98466e 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/session/DefaultSession.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/session/DefaultSession.java @@ -384,16 +384,30 @@ private void init(CqlIdentifier keyspace) { .getTopologyMonitor() .init() .thenCompose(v -> metadataManager.refreshNodes()) - .thenAccept(v -> afterInitialNodeListRefresh(keyspace)) - .exceptionally( - error -> { - initFuture.completeExceptionally(error); - RunOrSchedule.on(adminExecutor, this::close); - return null; + .thenCompose(v -> checkProtocolVersion()) + .thenCompose(v -> initialSchemaRefresh()) + .thenCompose(v -> initializePools(keyspace)) + .whenComplete( + (v, error) -> { + if (error == null) { + LOG.debug("[{}] Initialization complete, ready", logPrefix); + notifyListeners(); + initFuture.complete(DefaultSession.this); + } else { + LOG.debug("[{}] Initialization failed, force closing", logPrefix, error); + forceCloseAsync() + .whenComplete( + (v1, error1) -> { + if (error1 != null) { + error.addSuppressed(error1); + } + initFuture.completeExceptionally(error); + }); + } }); } - private void afterInitialNodeListRefresh(CqlIdentifier keyspace) { + private CompletionStage checkProtocolVersion() { try { boolean protocolWasForced = context.getConfig().getDefaultProfile().isDefined(DefaultDriverOption.PROTOCOL_VERSION); @@ -426,48 +440,39 @@ private void afterInitialNodeListRefresh(CqlIdentifier keyspace) { bestVersion); } } - metadataManager + return CompletableFuture.completedFuture(null); + } catch (Throwable throwable) { + return CompletableFutures.failedFuture(throwable); + } + } + + private CompletionStage initialSchemaRefresh() { + try { + return metadataManager .refreshSchema(null, false, true) - .whenComplete( - (metadata, error) -> { - if (error != null) { - Loggers.warnWithException( - LOG, - "[{}] Unexpected error while refreshing schema during initialization, " - + "keeping previous version", - logPrefix, - error); - } - afterInitialSchemaRefresh(keyspace); + .exceptionally( + error -> { + Loggers.warnWithException( + LOG, + "[{}] Unexpected error while refreshing schema during initialization, " + + "proceeding without schema metadata", + logPrefix, + error); + return null; }); } catch (Throwable throwable) { - initFuture.completeExceptionally(throwable); + return CompletableFutures.failedFuture(throwable); } } - private void afterInitialSchemaRefresh(CqlIdentifier keyspace) { + private CompletionStage initializePools(CqlIdentifier keyspace) { try { nodeStateManager.markInitialized(); context.getLoadBalancingPolicyWrapper().init(); context.getConfigLoader().onDriverInit(context); - LOG.debug("[{}] Initialization complete, ready", logPrefix); - poolManager - .init(keyspace) - .whenComplete( - (v, error) -> { - if (error != null) { - initFuture.completeExceptionally(error); - } else { - notifyListeners(); - initFuture.complete(DefaultSession.this); - } - }); + return poolManager.init(keyspace); } catch (Throwable throwable) { - forceCloseAsync() - .whenComplete( - (v, error) -> { - initFuture.completeExceptionally(throwable); - }); + return CompletableFutures.failedFuture(throwable); } } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/SessionLeakIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/SessionLeakIT.java index ef8bf329174..fe70cebbc96 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/SessionLeakIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/SessionLeakIT.java @@ -16,6 +16,7 @@ package com.datastax.oss.driver.core; import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Fail.fail; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.never; import static org.mockito.Mockito.reset; @@ -25,7 +26,9 @@ import ch.qos.logback.classic.Logger; import ch.qos.logback.classic.spi.ILoggingEvent; import ch.qos.logback.core.Appender; +import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.InvalidKeyspaceException; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; import com.datastax.oss.driver.api.core.session.Session; @@ -34,6 +37,7 @@ import com.datastax.oss.driver.categories.IsolatedTests; import com.datastax.oss.driver.internal.core.session.DefaultSession; import com.datastax.oss.simulacron.common.cluster.ClusterSpec; +import com.datastax.oss.simulacron.common.stubbing.PrimeDsl; import java.util.HashSet; import java.util.Set; import org.junit.Before; @@ -103,4 +107,28 @@ public void should_warn_when_session_count_exceeds_threshold() { verify(appender, never()).doAppend(any()); session.close(); } + + @Test + public void should_never_warn_when_session_init_fails() { + SIMULACRON_RULE + .cluster() + .prime(PrimeDsl.when("USE \"non_existent_keyspace\"").then(PrimeDsl.invalid("irrelevant"))); + int threshold = 4; + // Set the config option explicitly, in case it gets overridden in the test application.conf: + DriverConfigLoader configLoader = + DriverConfigLoader.programmaticBuilder() + .withInt(DefaultDriverOption.SESSION_LEAK_THRESHOLD, threshold) + .build(); + // Go over the threshold, no warnings expected + for (int i = 0; i < threshold + 1; i++) { + try (Session session = + SessionUtils.newSession( + SIMULACRON_RULE, CqlIdentifier.fromCql("non_existent_keyspace"), configLoader)) { + fail("Session %s should have failed to initialize", session.getName()); + } catch (InvalidKeyspaceException e) { + assertThat(e.getMessage()).isEqualTo("Invalid keyspace non_existent_keyspace"); + } + } + verify(appender, never()).doAppend(any()); + } } From e2f46f4820e515551fc7d7f06517a31ebe420a89 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 24 May 2021 15:50:52 +0200 Subject: [PATCH 691/979] JAVA-2941: Cannot add a single static column with the alter table API (#1549) --- changelog/README.md | 1 + .../schema/DefaultAlterTable.java | 54 +++++++++---------- .../querybuilder/schema/AlterTableTest.java | 6 +++ 3 files changed, 34 insertions(+), 27 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index b8f7bddc996..ab4d09914c1 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.11.2 (in progress) +- [bug] JAVA-2941: Cannot add a single static column with the alter table API - [bug] JAVA-2943: Prevent session leak with wrong keyspace name - [bug] JAVA-2938: OverloadedException message is misleading diff --git a/query-builder/src/main/java/com/datastax/oss/driver/internal/querybuilder/schema/DefaultAlterTable.java b/query-builder/src/main/java/com/datastax/oss/driver/internal/querybuilder/schema/DefaultAlterTable.java index d575ced177b..ae2171373c6 100644 --- a/query-builder/src/main/java/com/datastax/oss/driver/internal/querybuilder/schema/DefaultAlterTable.java +++ b/query-builder/src/main/java/com/datastax/oss/driver/internal/querybuilder/schema/DefaultAlterTable.java @@ -46,8 +46,8 @@ public class DefaultAlterTable private final CqlIdentifier keyspace; private final CqlIdentifier tableName; - private final ImmutableMap columnsToAddInOrder; - private final ImmutableSet columnsToAdd; + private final ImmutableMap allColumnsToAddInOrder; + private final ImmutableSet columnsToAddRegular; private final ImmutableSet columnsToAddStatic; private final ImmutableSet columnsToDrop; private final ImmutableMap columnsToRename; @@ -79,8 +79,8 @@ public DefaultAlterTable( @Nullable CqlIdentifier keyspace, @NonNull CqlIdentifier tableName, boolean dropCompactStorage, - @NonNull ImmutableMap columnsToAddInOrder, - @NonNull ImmutableSet columnsToAdd, + @NonNull ImmutableMap allColumnsToAddInOrder, + @NonNull ImmutableSet columnsToAddRegular, @NonNull ImmutableSet columnsToAddStatic, @NonNull ImmutableSet columnsToDrop, @NonNull ImmutableMap columnsToRename, @@ -90,8 +90,8 @@ public DefaultAlterTable( this.keyspace = keyspace; this.tableName = tableName; this.dropCompactStorage = dropCompactStorage; - this.columnsToAddInOrder = columnsToAddInOrder; - this.columnsToAdd = columnsToAdd; + this.allColumnsToAddInOrder = allColumnsToAddInOrder; + this.columnsToAddRegular = columnsToAddRegular; this.columnsToAddStatic = columnsToAddStatic; this.columnsToDrop = columnsToDrop; this.columnsToRename = columnsToRename; @@ -108,8 +108,8 @@ public AlterTableAddColumnEnd addColumn( keyspace, tableName, dropCompactStorage, - ImmutableCollections.append(columnsToAddInOrder, columnName, dataType), - appendSet(columnsToAdd, columnName), + ImmutableCollections.append(allColumnsToAddInOrder, columnName, dataType), + appendSet(columnsToAddRegular, columnName), columnsToAddStatic, columnsToDrop, columnsToRename, @@ -126,8 +126,8 @@ public AlterTableAddColumnEnd addStaticColumn( keyspace, tableName, dropCompactStorage, - ImmutableCollections.append(columnsToAddInOrder, columnName, dataType), - columnsToAdd, + ImmutableCollections.append(allColumnsToAddInOrder, columnName, dataType), + columnsToAddRegular, appendSet(columnsToAddStatic, columnName), columnsToDrop, columnsToRename, @@ -143,8 +143,8 @@ public BuildableQuery dropCompactStorage() { keyspace, tableName, true, - columnsToAddInOrder, - columnsToAdd, + allColumnsToAddInOrder, + columnsToAddRegular, columnsToAddStatic, columnsToDrop, columnsToRename, @@ -166,8 +166,8 @@ public AlterTableDropColumnEnd dropColumns(@NonNull CqlIdentifier... columnNames keyspace, tableName, dropCompactStorage, - columnsToAddInOrder, - columnsToAdd, + allColumnsToAddInOrder, + columnsToAddRegular, columnsToAddStatic, builder.build(), columnsToRename, @@ -184,8 +184,8 @@ public AlterTableRenameColumnEnd renameColumn( keyspace, tableName, dropCompactStorage, - columnsToAddInOrder, - columnsToAdd, + allColumnsToAddInOrder, + columnsToAddRegular, columnsToAddStatic, columnsToDrop, ImmutableCollections.append(columnsToRename, from, to), @@ -201,8 +201,8 @@ public BuildableQuery alterColumn(@NonNull CqlIdentifier columnName, @NonNull Da keyspace, tableName, dropCompactStorage, - columnsToAddInOrder, - columnsToAdd, + allColumnsToAddInOrder, + columnsToAddRegular, columnsToAddStatic, columnsToDrop, columnsToRename, @@ -218,8 +218,8 @@ public AlterTableWithOptionsEnd withOption(@NonNull String name, @NonNull Object keyspace, tableName, dropCompactStorage, - columnsToAddInOrder, - columnsToAdd, + allColumnsToAddInOrder, + columnsToAddRegular, columnsToAddStatic, columnsToDrop, columnsToRename, @@ -242,13 +242,13 @@ public String asCql() { .append(" TYPE ") .append(columnToAlterType.asCql(true, true)) .toString(); - } else if (!columnsToAdd.isEmpty()) { + } else if (!allColumnsToAddInOrder.isEmpty()) { builder.append(" ADD "); - if (columnsToAdd.size() > 1) { + if (allColumnsToAddInOrder.size() > 1) { builder.append('('); } boolean first = true; - for (Map.Entry column : columnsToAddInOrder.entrySet()) { + for (Map.Entry column : allColumnsToAddInOrder.entrySet()) { if (first) { first = false; } else { @@ -263,7 +263,7 @@ public String asCql() { builder.append(" STATIC"); } } - if (columnsToAdd.size() > 1) { + if (allColumnsToAddInOrder.size() > 1) { builder.append(')'); } return builder.toString(); @@ -324,13 +324,13 @@ public CqlIdentifier getTable() { } @NonNull - public ImmutableMap getColumnsToAddInOrder() { - return columnsToAddInOrder; + public ImmutableMap getAllColumnsToAddInOrder() { + return allColumnsToAddInOrder; } @NonNull public ImmutableSet getColumnsToAddRegular() { - return columnsToAdd; + return columnsToAddRegular; } @NonNull diff --git a/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/schema/AlterTableTest.java b/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/schema/AlterTableTest.java index 16db985ba9c..8a353095ead 100644 --- a/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/schema/AlterTableTest.java +++ b/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/schema/AlterTableTest.java @@ -40,6 +40,12 @@ public void should_generate_alter_table_with_add_single_column() { .hasCql("ALTER TABLE foo.bar ADD x text"); } + @Test + public void should_generate_alter_table_with_add_single_column_static() { + assertThat(alterTable("foo", "bar").addStaticColumn("x", DataTypes.TEXT)) + .hasCql("ALTER TABLE foo.bar ADD x text STATIC"); + } + @Test public void should_generate_alter_table_with_add_three_columns() { assertThat( From c6b6b7365cb27317bc85da8bc71e2ffda029bdfb Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 18 May 2021 18:34:38 +0200 Subject: [PATCH 692/979] JAVA-2942: GraphStatement.setConsistencyLevel() is not effective --- changelog/README.md | 1 + .../internal/core/graph/GraphConversions.java | 8 +++-- .../core/graph/GraphRequestHandlerTest.java | 32 +++++++++++++++++-- 3 files changed, 36 insertions(+), 5 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index ab4d09914c1..8c50e896ce2 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.11.2 (in progress) +- [bug] JAVA-2942: GraphStatement.setConsistencyLevel() is not effective - [bug] JAVA-2941: Cannot add a single static column with the alter table API - [bug] JAVA-2943: Prevent session leak with wrong keyspace name - [bug] JAVA-2938: OverloadedException message is misleading diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphConversions.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphConversions.java index 53f8e98b0ee..e7b5af29ecd 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphConversions.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphConversions.java @@ -179,9 +179,13 @@ static Message createMessageFromGraphStatement( } } + ConsistencyLevel consistency = statement.getConsistencyLevel(); int consistencyLevel = - DefaultConsistencyLevel.valueOf(config.getString(DefaultDriverOption.REQUEST_CONSISTENCY)) - .getProtocolCode(); + (consistency == null) + ? context + .getConsistencyLevelRegistry() + .nameToCode(config.getString(DefaultDriverOption.REQUEST_CONSISTENCY)) + : consistency.getProtocolCode(); long timestamp = statement.getTimestamp(); if (timestamp == Statement.NO_DEFAULT_TIMESTAMP) { diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java index aed0eb4ade4..2e27dc79cd3 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandlerTest.java @@ -137,7 +137,7 @@ public void should_create_query_message_from_fluent_statement(GraphProtocol grap throws IOException { // initialization GraphRequestHandlerTestHarness harness = GraphRequestHandlerTestHarness.builder().build(); - GraphTraversal traversalTest = + GraphTraversal traversalTest = DseGraph.g.V().has("person", "name", "marko").has("p1", 1L).has("p2", Uuids.random()); GraphStatement graphStatement = FluentGraphStatement.newInstance(traversalTest); @@ -171,6 +171,7 @@ public void should_create_query_message_from_batch_statement(GraphProtocol graph throws IOException { // initialization GraphRequestHandlerTestHarness harness = GraphRequestHandlerTestHarness.builder().build(); + @SuppressWarnings("rawtypes") List traversalsTest = ImmutableList.of( // randomly testing some complex data types. Complete suite of data types test is in @@ -424,7 +425,7 @@ public void should_return_results_for_statements(GraphProtocol graphProtocol, Ve Mockito.spy(new GraphRequestAsyncProcessor(harness.getContext(), graphSupportChecker)); when(p.getGraphBinaryModule()).thenReturn(module); - GraphStatement graphStatement = + GraphStatement graphStatement = ScriptGraphStatement.newInstance("mockQuery").setExecutionProfileName("test-graph"); GraphResultSet grs = new GraphRequestSyncProcessor(p) @@ -487,7 +488,7 @@ public void should_invoke_request_tracker_and_update_metrics( RequestTracker requestTracker = mock(RequestTracker.class); when(harness.getContext().getRequestTracker()).thenReturn(requestTracker); - GraphStatement graphStatement = ScriptGraphStatement.newInstance("mockQuery"); + GraphStatement graphStatement = ScriptGraphStatement.newInstance("mockQuery"); node1Behavior.setResponseSuccess(defaultDseFrameOf(singleGraphRow(graphProtocol, module))); @@ -549,6 +550,31 @@ public void should_invoke_request_tracker_and_update_metrics( verifyNoMoreInteractions(harness.getSession().getMetricUpdater()); } + @Test + public void should_honor_statement_consistency_level() { + // initialization + GraphRequestHandlerTestHarness harness = GraphRequestHandlerTestHarness.builder().build(); + ScriptGraphStatement graphStatement = + ScriptGraphStatement.builder("mockScript") + .setConsistencyLevel(DefaultConsistencyLevel.THREE) + .build(); + + GraphBinaryModule module = createGraphBinaryModule(harness.getContext()); + + // when + DriverExecutionProfile executionProfile = + Conversions.resolveExecutionProfile(graphStatement, harness.getContext()); + + Message m = + GraphConversions.createMessageFromGraphStatement( + graphStatement, GRAPH_BINARY_1_0, executionProfile, harness.getContext(), module); + + // checks + assertThat(m).isInstanceOf(Query.class); + Query q = ((Query) m); + assertThat(q.options.consistency).isEqualTo(DefaultConsistencyLevel.THREE.getProtocolCode()); + } + @DataProvider public static Object[][] dseVersionsWithDefaultGraphProtocol() { // Default GraphSON sub protocol version differs based on DSE version, so test with a version From 761dc9045922f2524e413eadb6f9580944859457 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 18 May 2021 18:35:02 +0200 Subject: [PATCH 693/979] Make field ScriptGraphStatementBuilder.queryParams final --- .../dse/driver/api/core/graph/ScriptGraphStatementBuilder.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/graph/ScriptGraphStatementBuilder.java b/core/src/main/java/com/datastax/dse/driver/api/core/graph/ScriptGraphStatementBuilder.java index 9a8d0d262eb..768264426ec 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/graph/ScriptGraphStatementBuilder.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/graph/ScriptGraphStatementBuilder.java @@ -34,7 +34,7 @@ public class ScriptGraphStatementBuilder private String script; private Boolean isSystemQuery; - private Map queryParams; + private final Map queryParams; public ScriptGraphStatementBuilder() { this.queryParams = Maps.newHashMap(); From 0e80c2573726c676296d78bacd4cabd825e47d88 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 24 May 2021 16:50:50 +0200 Subject: [PATCH 694/979] Fix typo in javadocs of MetricIdGenerator --- .../oss/driver/internal/core/metrics/MetricIdGenerator.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/MetricIdGenerator.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/MetricIdGenerator.java index 7cfd39bf37b..176101034ae 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/MetricIdGenerator.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/MetricIdGenerator.java @@ -41,7 +41,7 @@ public interface MetricIdGenerator { @NonNull MetricId sessionMetricId(@NonNull SessionMetric metric); - /** Generates a {@link MetricId} for the given {@link Node and }{@link NodeMetric}. */ + /** Generates a {@link MetricId} for the given {@link Node} and {@link NodeMetric}. */ @NonNull MetricId nodeMetricId(@NonNull Node node, @NonNull NodeMetric metric); } From 2e25a49ca2647e8fefdda084edd379e8018d9dcf Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 24 May 2021 17:00:42 +0200 Subject: [PATCH 695/979] Set version to 4.12.0-SNAPSHOT --- bom/pom.xml | 18 +++++++++--------- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- metrics/micrometer/pom.xml | 2 +- metrics/microprofile/pom.xml | 2 +- osgi-tests/pom.xml | 2 +- pom.xml | 2 +- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 14 files changed, 22 insertions(+), 22 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index 970e0cbcd26..f50076bb957 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.2-SNAPSHOT + 4.12.0-SNAPSHOT java-driver-bom pom @@ -31,42 +31,42 @@ com.datastax.oss java-driver-core - 4.11.2-SNAPSHOT + 4.12.0-SNAPSHOT com.datastax.oss java-driver-core-shaded - 4.11.2-SNAPSHOT + 4.12.0-SNAPSHOT com.datastax.oss java-driver-mapper-processor - 4.11.2-SNAPSHOT + 4.12.0-SNAPSHOT com.datastax.oss java-driver-mapper-runtime - 4.11.2-SNAPSHOT + 4.12.0-SNAPSHOT com.datastax.oss java-driver-query-builder - 4.11.2-SNAPSHOT + 4.12.0-SNAPSHOT com.datastax.oss java-driver-test-infra - 4.11.2-SNAPSHOT + 4.12.0-SNAPSHOT com.datastax.oss java-driver-metrics-micrometer - 4.11.2-SNAPSHOT + 4.12.0-SNAPSHOT com.datastax.oss java-driver-metrics-microprofile - 4.11.2-SNAPSHOT + 4.12.0-SNAPSHOT com.datastax.oss diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 7e204b6726c..414d305fb2a 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.2-SNAPSHOT + 4.12.0-SNAPSHOT java-driver-core-shaded DataStax Java driver for Apache Cassandra(R) - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index 44aeda012e4..5da7d297d88 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.2-SNAPSHOT + 4.12.0-SNAPSHOT java-driver-core bundle diff --git a/distribution/pom.xml b/distribution/pom.xml index 73891ec302b..cfd4596ddb4 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.2-SNAPSHOT + 4.12.0-SNAPSHOT java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index 08ee8f717e0..cf67d0aa406 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.11.2-SNAPSHOT + 4.12.0-SNAPSHOT java-driver-examples DataStax Java driver for Apache Cassandra(R) - examples. diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 87c80951e14..d30a6cd9abe 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.2-SNAPSHOT + 4.12.0-SNAPSHOT java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 99f35eb1986..53f2c874759 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.2-SNAPSHOT + 4.12.0-SNAPSHOT java-driver-mapper-processor DataStax Java driver for Apache Cassandra(R) - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index f8e8bcd6c06..1890231c202 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.2-SNAPSHOT + 4.12.0-SNAPSHOT java-driver-mapper-runtime bundle diff --git a/metrics/micrometer/pom.xml b/metrics/micrometer/pom.xml index d5072187884..3f54eeb3959 100644 --- a/metrics/micrometer/pom.xml +++ b/metrics/micrometer/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.2-SNAPSHOT + 4.12.0-SNAPSHOT ../../ java-driver-metrics-micrometer diff --git a/metrics/microprofile/pom.xml b/metrics/microprofile/pom.xml index 169d04622e7..48782b475c9 100644 --- a/metrics/microprofile/pom.xml +++ b/metrics/microprofile/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.2-SNAPSHOT + 4.12.0-SNAPSHOT ../../ java-driver-metrics-microprofile diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml index 58820c4ba3d..c88c033d4d6 100644 --- a/osgi-tests/pom.xml +++ b/osgi-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.2-SNAPSHOT + 4.12.0-SNAPSHOT java-driver-osgi-tests jar diff --git a/pom.xml b/pom.xml index a306b90a655..b6602f484a3 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ 4.0.0 com.datastax.oss java-driver-parent - 4.11.2-SNAPSHOT + 4.12.0-SNAPSHOT pom DataStax Java driver for Apache Cassandra(R) A driver for Apache Cassandra(R) 2.1+ that works exclusively with the Cassandra Query Language version 3 (CQL3) and Cassandra's native protocol versions 3 and above. diff --git a/query-builder/pom.xml b/query-builder/pom.xml index 8eac5a3ffbd..7fc68474e2c 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.2-SNAPSHOT + 4.12.0-SNAPSHOT java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index 5cf30a2048b..f1000d40618 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.2-SNAPSHOT + 4.12.0-SNAPSHOT java-driver-test-infra bundle From c5595555dccefb93b7db6854d57af18da9fed1af Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 24 May 2021 16:50:50 +0200 Subject: [PATCH 696/979] Fix typo in javadocs of MetricIdGenerator --- .../oss/driver/internal/core/metrics/MetricIdGenerator.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/MetricIdGenerator.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/MetricIdGenerator.java index 7cfd39bf37b..176101034ae 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/MetricIdGenerator.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/MetricIdGenerator.java @@ -41,7 +41,7 @@ public interface MetricIdGenerator { @NonNull MetricId sessionMetricId(@NonNull SessionMetric metric); - /** Generates a {@link MetricId} for the given {@link Node and }{@link NodeMetric}. */ + /** Generates a {@link MetricId} for the given {@link Node} and {@link NodeMetric}. */ @NonNull MetricId nodeMetricId(@NonNull Node node, @NonNull NodeMetric metric); } From 52f355039969a6ec1996088aedaad39a2eae465a Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 24 May 2021 19:59:37 +0200 Subject: [PATCH 697/979] JAVA-2946: Make MapperResultProducerService instances be located with user-provided class loader --- changelog/README.md | 1 + .../internal/mapper/DefaultMapperContext.java | 18 ++++++++++-------- 2 files changed, 11 insertions(+), 8 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 8c50e896ce2..f08c735c369 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.11.2 (in progress) +- [bug] JAVA-2946: Make MapperResultProducerService instances be located with user-provided class loader - [bug] JAVA-2942: GraphStatement.setConsistencyLevel() is not effective - [bug] JAVA-2941: Cannot add a single static column with the alter table API - [bug] JAVA-2943: Prevent session leak with wrong keyspace name diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DefaultMapperContext.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DefaultMapperContext.java index 9ba08d8c65d..4db909488e8 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DefaultMapperContext.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DefaultMapperContext.java @@ -24,12 +24,12 @@ import com.datastax.oss.driver.api.mapper.entity.naming.NameConverter; import com.datastax.oss.driver.api.mapper.result.MapperResultProducer; import com.datastax.oss.driver.api.mapper.result.MapperResultProducerService; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import com.datastax.oss.protocol.internal.util.collection.NullAllowingImmutableMap; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import java.lang.reflect.InvocationTargetException; -import java.util.List; import java.util.Map; import java.util.Objects; import java.util.ServiceLoader; @@ -38,9 +38,7 @@ public class DefaultMapperContext implements MapperContext { - private static final List RESULT_PRODUCERS = getResultProducers(); - - private static final ConcurrentMap, MapperResultProducer> RESULT_PRODUCER_CACHE = + private final ConcurrentMap, MapperResultProducer> resultProducerCache = new ConcurrentHashMap<>(); private final CqlSession session; @@ -50,6 +48,7 @@ public class DefaultMapperContext implements MapperContext { private final DriverExecutionProfile executionProfile; private final ConcurrentMap, NameConverter> nameConverterCache; private final Map customState; + private final ImmutableList resultProducers; public DefaultMapperContext( @NonNull CqlSession session, @@ -86,6 +85,8 @@ private DefaultMapperContext( this.customState = customState; this.executionProfileName = executionProfileName; this.executionProfile = executionProfile; + this.resultProducers = + locateResultProducers(((InternalDriverContext) session.getContext()).getClassLoader()); } public DefaultMapperContext withDaoParameters( @@ -154,10 +155,10 @@ public Map getCustomState() { @NonNull @Override public MapperResultProducer getResultProducer(@NonNull GenericType resultToProduce) { - return RESULT_PRODUCER_CACHE.computeIfAbsent( + return resultProducerCache.computeIfAbsent( resultToProduce, k -> { - for (MapperResultProducer resultProducer : RESULT_PRODUCERS) { + for (MapperResultProducer resultProducer : resultProducers) { if (resultProducer.canProduce(k)) { return resultProducer; } @@ -185,10 +186,11 @@ private static NameConverter buildNameConverter(Class c } } - private static List getResultProducers() { + private static ImmutableList locateResultProducers( + ClassLoader classLoader) { ImmutableList.Builder result = ImmutableList.builder(); ServiceLoader loader = - ServiceLoader.load(MapperResultProducerService.class); + ServiceLoader.load(MapperResultProducerService.class, classLoader); loader.iterator().forEachRemaining(provider -> result.addAll(provider.getProducers())); return result.build(); } From dfb816e5f7476cd0417ca0741b4326c43b6d832f Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 25 May 2021 16:56:36 +0200 Subject: [PATCH 698/979] Add debug logging and exception handling to DefaultMapperContext.locateResultProducers --- .../internal/mapper/DefaultMapperContext.java | 25 +++++++++++++++---- 1 file changed, 20 insertions(+), 5 deletions(-) diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DefaultMapperContext.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DefaultMapperContext.java index 4db909488e8..92555495098 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DefaultMapperContext.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DefaultMapperContext.java @@ -32,12 +32,17 @@ import java.lang.reflect.InvocationTargetException; import java.util.Map; import java.util.Objects; +import java.util.ServiceConfigurationError; import java.util.ServiceLoader; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class DefaultMapperContext implements MapperContext { + private static final Logger LOGGER = LoggerFactory.getLogger(DefaultMapperContext.class); + private final ConcurrentMap, MapperResultProducer> resultProducerCache = new ConcurrentHashMap<>(); @@ -188,10 +193,20 @@ private static NameConverter buildNameConverter(Class c private static ImmutableList locateResultProducers( ClassLoader classLoader) { - ImmutableList.Builder result = ImmutableList.builder(); - ServiceLoader loader = - ServiceLoader.load(MapperResultProducerService.class, classLoader); - loader.iterator().forEachRemaining(provider -> result.addAll(provider.getProducers())); - return result.build(); + LOGGER.debug( + "Locating result producers with CL = {}, MapperResultProducerService CL = {}", + classLoader, + MapperResultProducerService.class.getClassLoader()); + ImmutableList.Builder builder = ImmutableList.builder(); + try { + ServiceLoader loader = + ServiceLoader.load(MapperResultProducerService.class, classLoader); + loader.iterator().forEachRemaining(provider -> builder.addAll(provider.getProducers())); + } catch (Exception | ServiceConfigurationError e) { + LOGGER.error("Failed to locate result producers", e); + } + ImmutableList producers = builder.build(); + LOGGER.debug("Located {} result producers: {}", producers.size(), producers); + return producers; } } From 58e79503de9a2e8b13464cb38a53c424b6ba2ccf Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 25 May 2021 18:57:55 +0200 Subject: [PATCH 699/979] Disable CloudIT --- .../java/com/datastax/oss/driver/api/core/cloud/CloudIT.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cloud/CloudIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cloud/CloudIT.java index 7fbcd631e86..7874c4719d8 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cloud/CloudIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/api/core/cloud/CloudIT.java @@ -51,11 +51,13 @@ import java.util.List; import javax.net.ssl.SSLContext; import org.junit.ClassRule; +import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; @Category(IsolatedTests.class) +@Ignore("Disabled because it is causing trouble in Jenkins CI") public class CloudIT { private static final String BUNDLE_URL_PATH = "/certs/bundles/creds.zip"; From d23ddafdcd84145f7cb622b6351bad8b345469ea Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Sat, 5 Jun 2021 12:07:10 +0200 Subject: [PATCH 700/979] Fix failing compression tests for protocol v5 --- .../core/compression/DirectCompressionIT.java | 13 ++++++++++++- .../driver/core/compression/HeapCompressionIT.java | 13 ++++++++++++- 2 files changed, 24 insertions(+), 2 deletions(-) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/compression/DirectCompressionIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/compression/DirectCompressionIT.java index ef2d9803369..4f4ff89db47 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/compression/DirectCompressionIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/compression/DirectCompressionIT.java @@ -19,6 +19,7 @@ import static org.assertj.core.api.Assertions.offset; import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; import com.datastax.oss.driver.api.core.cql.ExecutionInfo; @@ -30,6 +31,7 @@ import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.categories.ParallelizableTests; import java.time.Duration; +import org.junit.Assume; import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Test; @@ -69,6 +71,10 @@ public static void setup() { */ @Test public void should_execute_queries_with_snappy_compression() throws Exception { + Assume.assumeTrue( + "Snappy is not supported in OSS C* 4.0+ with protocol v5", + CCM_RULE.getDseVersion().isPresent() + || CCM_RULE.getCassandraVersion().nextStable().compareTo(Version.V4_0_0) < 0); createAndCheckCluster("snappy"); } @@ -112,7 +118,12 @@ private void createAndCheckCluster(String compressorOption) { // We are testing with small responses, so the compressed payload is not even guaranteed to be // smaller. assertThat(executionInfo.getResponseSizeInBytes()).isGreaterThan(0); - assertThat(executionInfo.getCompressedResponseSizeInBytes()).isGreaterThan(0); + if (session.getContext().getProtocolVersion().getCode() == 5) { + // in protocol v5, compression is done at segment level + assertThat(executionInfo.getCompressedResponseSizeInBytes()).isEqualTo(-1); + } else { + assertThat(executionInfo.getCompressedResponseSizeInBytes()).isGreaterThan(0); + } } } } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/compression/HeapCompressionIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/compression/HeapCompressionIT.java index 64b890fd7f6..85692edc481 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/compression/HeapCompressionIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/compression/HeapCompressionIT.java @@ -19,6 +19,7 @@ import static org.assertj.core.api.Assertions.offset; import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; import com.datastax.oss.driver.api.core.cql.ExecutionInfo; @@ -30,6 +31,7 @@ import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.categories.IsolatedTests; import java.time.Duration; +import org.junit.Assume; import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Test; @@ -73,6 +75,10 @@ public static void setup() { */ @Test public void should_execute_queries_with_snappy_compression() throws Exception { + Assume.assumeTrue( + "Snappy is not supported in OSS C* 4.0+ with protocol v5", + CCM_RULE.getDseVersion().isPresent() + || CCM_RULE.getCassandraVersion().nextStable().compareTo(Version.V4_0_0) < 0); createAndCheckCluster("snappy"); } @@ -115,7 +121,12 @@ private void createAndCheckCluster(String compressorOption) { // We are testing with small responses, so the compressed payload is not even guaranteed to be // smaller. assertThat(executionInfo.getResponseSizeInBytes()).isGreaterThan(0); - assertThat(executionInfo.getCompressedResponseSizeInBytes()).isGreaterThan(0); + if (session.getContext().getProtocolVersion().getCode() == 5) { + // in protocol v5, compression is done at segment level + assertThat(executionInfo.getCompressedResponseSizeInBytes()).isEqualTo(-1); + } else { + assertThat(executionInfo.getCompressedResponseSizeInBytes()).isGreaterThan(0); + } } } } From ad833714aae298e7a38c65ec3e32100e4961b9c4 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Sat, 5 Jun 2021 12:07:34 +0200 Subject: [PATCH 701/979] Remove call to deprecated RetryPolicy method --- .../datastax/oss/driver/core/connection/FrameLengthIT.java | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/connection/FrameLengthIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/connection/FrameLengthIT.java index c4f39711687..3c42bd8f630 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/connection/FrameLengthIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/connection/FrameLengthIT.java @@ -29,7 +29,7 @@ import com.datastax.oss.driver.api.core.context.DriverContext; import com.datastax.oss.driver.api.core.cql.AsyncResultSet; import com.datastax.oss.driver.api.core.cql.SimpleStatement; -import com.datastax.oss.driver.api.core.retry.RetryDecision; +import com.datastax.oss.driver.api.core.retry.RetryVerdict; import com.datastax.oss.driver.api.core.session.Request; import com.datastax.oss.driver.api.testinfra.loadbalancing.SortingLoadBalancingPolicy; import com.datastax.oss.driver.api.testinfra.session.SessionRule; @@ -122,10 +122,9 @@ public AlwaysRetryAbortedPolicy(DriverContext context, String profileName) { } @Override - @Deprecated - public RetryDecision onRequestAborted( + public RetryVerdict onRequestAbortedVerdict( @NonNull Request request, @NonNull Throwable error, int retryCount) { - return RetryDecision.RETRY_NEXT; + return RetryVerdict.RETRY_NEXT; } } } From edb2dc277b07887200374301b637f9a31ac0346e Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Sat, 5 Jun 2021 12:52:41 +0200 Subject: [PATCH 702/979] Fix failing test for C* 4.0 --- .../oss/driver/core/cql/QueryTraceIT.java | 32 +++++++++++++++++-- 1 file changed, 30 insertions(+), 2 deletions(-) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/QueryTraceIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/QueryTraceIT.java index 567a1263310..8c12e7154fc 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/QueryTraceIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/QueryTraceIT.java @@ -22,6 +22,7 @@ import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.api.core.cql.ExecutionInfo; import com.datastax.oss.driver.api.core.cql.QueryTrace; +import com.datastax.oss.driver.api.core.cql.Row; import com.datastax.oss.driver.api.core.cql.SimpleStatement; import com.datastax.oss.driver.api.core.metadata.EndPoint; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; @@ -86,7 +87,20 @@ public void should_fetch_trace_when_tracing_enabled() { assertThat(queryTrace.getRequestType()).isEqualTo("Execute CQL3 query"); assertThat(queryTrace.getDurationMicros()).isPositive(); assertThat(queryTrace.getCoordinatorAddress().getAddress()).isEqualTo(nodeAddress); - assertThat(queryTrace.getCoordinatorAddress().getPort()).isEqualTo(expectPorts ? 7000 : 0); + if (expectPorts) { + Row row = + SESSION_RULE + .session() + .execute( + "SELECT coordinator_port FROM system_traces.sessions WHERE session_id = " + + queryTrace.getTracingId()) + .one(); + assertThat(row).isNotNull(); + int expectedPort = row.getInt(0); + assertThat(queryTrace.getCoordinatorAddress().getPort()).isEqualTo(expectedPort); + } else { + assertThat(queryTrace.getCoordinatorAddress().getPort()).isEqualTo(0); + } assertThat(queryTrace.getParameters()) .containsEntry("consistency_level", "LOCAL_ONE") .containsEntry("page_size", "5000") @@ -98,6 +112,20 @@ public void should_fetch_trace_when_tracing_enabled() { InetSocketAddress sourceAddress0 = queryTrace.getEvents().get(0).getSourceAddress(); assertThat(sourceAddress0).isNotNull(); assertThat(sourceAddress0.getAddress()).isEqualTo(nodeAddress); - assertThat(sourceAddress0.getPort()).isEqualTo(expectPorts ? 7000 : 0); + if (expectPorts) { + Row row = + SESSION_RULE + .session() + .execute( + "SELECT source_port FROM system_traces.events WHERE session_id = " + + queryTrace.getTracingId() + + " LIMIT 1") + .one(); + assertThat(row).isNotNull(); + int expectedPort = row.getInt(0); + assertThat(sourceAddress0.getPort()).isEqualTo(expectedPort); + } else { + assertThat(sourceAddress0.getPort()).isEqualTo(0); + } } } From 464d3d23ee0fef8285d48a7b9865f7d076e12842 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Sat, 5 Jun 2021 13:53:18 +0200 Subject: [PATCH 703/979] Fix failing test for C* 4.0 --- .../com/datastax/oss/driver/internal/osgi/OsgiSnappyIT.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/OsgiSnappyIT.java b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/OsgiSnappyIT.java index f524de74fad..9794cf27435 100644 --- a/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/OsgiSnappyIT.java +++ b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/OsgiSnappyIT.java @@ -16,6 +16,7 @@ package com.datastax.oss.driver.internal.osgi; import com.datastax.oss.driver.api.osgi.service.MailboxService; +import com.datastax.oss.driver.api.testinfra.CassandraRequirement; import com.datastax.oss.driver.internal.osgi.checks.DefaultServiceChecks; import com.datastax.oss.driver.internal.osgi.support.BundleOptions; import com.datastax.oss.driver.internal.osgi.support.CcmExamReactorFactory; @@ -30,6 +31,7 @@ @RunWith(CcmPaxExam.class) @ExamReactorStrategy(CcmExamReactorFactory.class) +@CassandraRequirement(max = "3.99") public class OsgiSnappyIT { @Inject MailboxService service; From b87c6f5afa15965de8b257758ff5e788bc46d9c1 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Wed, 26 May 2021 13:46:39 +0200 Subject: [PATCH 704/979] Fix unreliable TTL tests --- .../java/com/datastax/oss/driver/mapper/UpdateIT.java | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateIT.java index 00df838b2a8..dd6993ee40d 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateIT.java @@ -158,6 +158,7 @@ public void should_update_entity_with_timestamp() { "SELECT WRITETIME(description) FROM product WHERE id = ?", FLAMETHROWER.getId())) .one(); + assertThat(row).isNotNull(); long writeTime = row.getLong(0); assertThat(writeTime).isEqualTo(timestamp); } @@ -176,6 +177,7 @@ public void should_update_entity_with_timestamp_literal() { "SELECT WRITETIME(description) FROM product WHERE id = ?", FLAMETHROWER.getId())) .one(); + assertThat(row).isNotNull(); long writeTime = row.getLong(0); assertThat(writeTime).isEqualTo(1000L); } @@ -194,8 +196,9 @@ public void should_update_entity_with_ttl() { SimpleStatement.newInstance( "SELECT TTL(description) FROM product WHERE id = ?", FLAMETHROWER.getId())) .one(); + assertThat(row).isNotNull(); int writeTime = row.getInt(0); - assertThat(writeTime).isEqualTo(ttl); + assertThat(writeTime).isBetween(ttl - 10, ttl); } @Test @@ -211,8 +214,9 @@ public void should_update_entity_with_ttl_literal() { SimpleStatement.newInstance( "SELECT TTL(description) FROM product WHERE id = ?", FLAMETHROWER.getId())) .one(); + assertThat(row).isNotNull(); int writeTime = row.getInt(0); - assertThat(writeTime).isEqualTo(1000); + assertThat(writeTime).isBetween(990, 1000); } @Test @@ -231,6 +235,7 @@ public void should_update_entity_with_timestamp_asynchronously() { "SELECT WRITETIME(description) FROM product WHERE id = ?", FLAMETHROWER.getId())) .one(); + assertThat(row).isNotNull(); long writeTime = row.getLong(0); assertThat(writeTime).isEqualTo(timestamp); } From 9a34929bc2306eeb4e8727d9ff934cb81c61ab27 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Wed, 26 May 2021 15:00:37 +0200 Subject: [PATCH 705/979] Remove DSE 6.8.0 from list of available CCM backends --- Jenkinsfile | 14 ++++---------- 1 file changed, 4 insertions(+), 10 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index 95eb2aa20b0..35d31bfcb4a 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -220,8 +220,7 @@ pipeline { 'dse-5.1', // Legacy DataStax Enterprise 'dse-6.0', // Previous DataStax Enterprise 'dse-6.7', // Previous DataStax Enterprise - 'dse-6.8.0', // Current DataStax Enterprise - 'dse-6.8', // Development DataStax Enterprise + 'dse-6.8', // Current DataStax Enterprise 'ALL'], description: '''Apache Cassandra® and DataStax Enterprise server version to use for adhoc BUILD-AND-EXECUTE-TESTS builds

          Driver versionTinkerPop version
          4.11.03.4.10
          4.10.03.4.9
          4.9.03.4.8
          4.8.03.4.5
          @@ -271,13 +270,9 @@ pipeline { - - - - - +
          dse-6.7 DataStax Enterprise v6.7.x
          dse-6.8.0DataStax Enterprise v6.8.0
          dse-6.8DataStax Enterprise v6.8.x (CURRENTLY UNDER DEVELOPMENT)DataStax Enterprise v6.8.x
          ''') choice( @@ -403,7 +398,7 @@ pipeline { name 'SERVER_VERSION' values '3.11', // Latest stable Apache CassandraⓇ '4.0', // Development Apache CassandraⓇ - 'dse-6.8.0' // Current DataStax Enterprise + 'dse-6.8' // Current DataStax Enterprise } } @@ -521,8 +516,7 @@ pipeline { 'dse-5.1', // Legacy DataStax Enterprise 'dse-6.0', // Previous DataStax Enterprise 'dse-6.7', // Previous DataStax Enterprise - 'dse-6.8.0', // Current DataStax Enterprise - 'dse-6.8' // Development DataStax Enterprise + 'dse-6.8' // Current DataStax Enterprise } } when { From 1d5b12ec5d3c3f7ccb163ff04f38450bb097f290 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Wed, 26 May 2021 15:17:26 +0200 Subject: [PATCH 706/979] Simplify nightly and weekend jobs --- Jenkinsfile | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index 35d31bfcb4a..f922d9f7c5f 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -358,11 +358,11 @@ pipeline { # Every weeknight (Monday - Friday) around 2:00 AM ### JDK8 tests against 2.1, 3.0, DSE 4.8, DSE 5.0, DSE 5.1, DSE-6.0 and DSE 6.7 H 2 * * 1-5 %CI_SCHEDULE=WEEKNIGHTS;CI_SCHEDULE_SERVER_VERSIONS=2.1 3.0 dse-4.8 dse-5.0 dse-5.1 dse-6.0 dse-6.7;CI_SCHEDULE_JABBA_VERSION=1.8 - ### JDK11 tests against 3.11, 4.0, DSE 6.7 and DSE 6.8.0 - H 2 * * 1-5 %CI_SCHEDULE=WEEKNIGHTS;CI_SCHEDULE_SERVER_VERSIONS=3.11 4.0 dse-6.7 dse-6.8.0;CI_SCHEDULE_JABBA_VERSION=openjdk@1.11 + ### JDK11 tests against 3.11, 4.0 and DSE 6.8 + H 2 * * 1-5 %CI_SCHEDULE=WEEKNIGHTS;CI_SCHEDULE_SERVER_VERSIONS=3.11 4.0 dse-6.8;CI_SCHEDULE_JABBA_VERSION=openjdk@1.11 # Every weekend (Sunday) around 12:00 PM noon - ### JDK14 tests against 3.11, 4.0, DSE 6.7, DSE 6.8.0 and DSE 6.8.X - H 12 * * 0 %CI_SCHEDULE=WEEKENDS;CI_SCHEDULE_SERVER_VERSIONS=3.11 4.0 dse-6.7 dse-6.8.0 dse-6.8;CI_SCHEDULE_JABBA_VERSION=openjdk@1.14 + ### JDK14 tests against 3.11, 4.0, DSE 6.7 and DSE 6.8 + H 12 * * 0 %CI_SCHEDULE=WEEKENDS;CI_SCHEDULE_SERVER_VERSIONS=3.11 4.0 dse-6.7 dse-6.8 ;CI_SCHEDULE_JABBA_VERSION=openjdk@1.14 """ : "") } From a54326fb48a4970fe61c98df99baaaab29a25a42 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 7 Jun 2021 11:52:26 +0200 Subject: [PATCH 707/979] Truncate generated Instants at millis --- .../dse/driver/api/core/graph/CoreGraphDataTypeITBase.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CoreGraphDataTypeITBase.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CoreGraphDataTypeITBase.java index 333110096a7..5273367ace0 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CoreGraphDataTypeITBase.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CoreGraphDataTypeITBase.java @@ -44,6 +44,7 @@ import java.time.LocalDate; import java.time.LocalTime; import java.time.ZoneId; +import java.time.temporal.ChronoUnit; import java.util.Arrays; import java.util.Map; import org.junit.Test; @@ -83,7 +84,7 @@ public void should_create_and_retrieve_correct_data_with_types() { .put("Text", "test") .put("Time", LocalTime.now(ZoneId.systemDefault())) .put("Timeuuid", Uuids.timeBased()) - .put("Timestamp", Instant.now()) + .put("Timestamp", Instant.now().truncatedTo(ChronoUnit.MILLIS)) .put("Uuid", java.util.UUID.randomUUID()) .put("Varint", BigInteger.valueOf(3234)) .put("Blob", ByteBuffer.wrap(new byte[] {1, 2, 3})) From 67eb54e06e7089adf0e7d089699ca4fcd788fd53 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 7 Jun 2021 11:52:40 +0200 Subject: [PATCH 708/979] Fix failing BlockHound tests for Java 13+ --- integration-tests/pom.xml | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 87c80951e14..8097819fde0 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -254,6 +254,7 @@ false ${project.build.directory}/failsafe-reports/failsafe-summary-isolated.xml ${skipIsolatedITs} + ${blockhound.argline} @@ -316,4 +317,16 @@ + + + jdk 13+ + + [13,) + + + + -XX:+AllowRedefinitionToAddDeleteMethods + + +
          From f3334e2630a82c69bcdb8b171d631e834154e5e9 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 7 Jun 2021 11:53:15 +0200 Subject: [PATCH 709/979] Revisit Nightly and Weekend schedules --- Jenkinsfile | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index f922d9f7c5f..cc8e0afc7da 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -356,13 +356,13 @@ pipeline { // schedules only run against release branches (i.e. 3.x, 4.x, 4.5.x, etc.) parameterizedCron(branchPatternCron.matcher(env.BRANCH_NAME).matches() ? """ # Every weeknight (Monday - Friday) around 2:00 AM - ### JDK8 tests against 2.1, 3.0, DSE 4.8, DSE 5.0, DSE 5.1, DSE-6.0 and DSE 6.7 - H 2 * * 1-5 %CI_SCHEDULE=WEEKNIGHTS;CI_SCHEDULE_SERVER_VERSIONS=2.1 3.0 dse-4.8 dse-5.0 dse-5.1 dse-6.0 dse-6.7;CI_SCHEDULE_JABBA_VERSION=1.8 + ### JDK8 tests against 2.1, 3.0, DSE 4.8, DSE 5.0, DSE 5.1, DSE-6.0, DSE 6.7 and DSE 6.8 + H 2 * * 1-5 %CI_SCHEDULE=WEEKNIGHTS;CI_SCHEDULE_SERVER_VERSIONS=2.1 3.0 dse-4.8 dse-5.0 dse-5.1 dse-6.0 dse-6.7 dse-6.8;CI_SCHEDULE_JABBA_VERSION=1.8 ### JDK11 tests against 3.11, 4.0 and DSE 6.8 H 2 * * 1-5 %CI_SCHEDULE=WEEKNIGHTS;CI_SCHEDULE_SERVER_VERSIONS=3.11 4.0 dse-6.8;CI_SCHEDULE_JABBA_VERSION=openjdk@1.11 # Every weekend (Sunday) around 12:00 PM noon - ### JDK14 tests against 3.11, 4.0, DSE 6.7 and DSE 6.8 - H 12 * * 0 %CI_SCHEDULE=WEEKENDS;CI_SCHEDULE_SERVER_VERSIONS=3.11 4.0 dse-6.7 dse-6.8 ;CI_SCHEDULE_JABBA_VERSION=openjdk@1.14 + ### JDK14 tests against 3.11, 4.0 and DSE 6.8 + H 12 * * 0 %CI_SCHEDULE=WEEKENDS;CI_SCHEDULE_SERVER_VERSIONS=3.11 4.0 dse-6.8;CI_SCHEDULE_JABBA_VERSION=openjdk@1.14 """ : "") } From f082768cf7bcb032bfa83d7a6da4ee21d97c3afb Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Thu, 10 Jun 2021 16:00:36 +0200 Subject: [PATCH 710/979] Remove unused method --- .../com/datastax/oss/driver/api/testinfra/ccm/CcmRule.java | 4 ---- 1 file changed, 4 deletions(-) diff --git a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CcmRule.java b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CcmRule.java index eb12b6969e2..a5169c0aef8 100644 --- a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CcmRule.java +++ b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CcmRule.java @@ -97,10 +97,6 @@ public void evaluate() { return super.apply(base, description); } - public void reloadCore(int node, String keyspace, String table, boolean reindex) { - ccmBridge.reloadCore(node, keyspace, table, reindex); - } - public static CcmRule getInstance() { return INSTANCE; } From f20d22664bf0e22ac6c80a1847420267e2f7f697 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Thu, 10 Jun 2021 16:00:57 +0200 Subject: [PATCH 711/979] Make field final --- .../oss/driver/api/testinfra/ccm/CustomCcmRule.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CustomCcmRule.java b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CustomCcmRule.java index 1e502238e99..4ea1b3843f3 100644 --- a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CustomCcmRule.java +++ b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CustomCcmRule.java @@ -28,7 +28,7 @@ */ public class CustomCcmRule extends BaseCcmRule { - private static AtomicReference current = new AtomicReference<>(); + private static final AtomicReference CURRENT = new AtomicReference<>(); CustomCcmRule(CcmBridge ccmBridge) { super(ccmBridge); @@ -36,9 +36,9 @@ public class CustomCcmRule extends BaseCcmRule { @Override protected void before() { - if (current.get() == null && current.compareAndSet(null, this)) { + if (CURRENT.get() == null && CURRENT.compareAndSet(null, this)) { super.before(); - } else if (current.get() != this) { + } else if (CURRENT.get() != this) { throw new IllegalStateException( "Attempting to use a Ccm rule while another is in use. This is disallowed"); } @@ -47,7 +47,7 @@ protected void before() { @Override protected void after() { super.after(); - current.compareAndSet(this, null); + CURRENT.compareAndSet(this, null); } public CcmBridge getCcmBridge() { From 89a80c14cd08ab5cbc5e83481678c18cadbc1c0f Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Thu, 10 Jun 2021 16:07:59 +0200 Subject: [PATCH 712/979] Add a log prefix to Debouncer --- .../core/metadata/MetadataManager.java | 1 + .../core/metadata/NodeStateManager.java | 1 + .../core/util/concurrent/Debouncer.java | 34 ++++++++++++++++--- 3 files changed, 31 insertions(+), 5 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/MetadataManager.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/MetadataManager.java index 594c37430d4..0f28d354c46 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/MetadataManager.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/MetadataManager.java @@ -314,6 +314,7 @@ private class SingleThreaded { private SingleThreaded(InternalDriverContext context, DriverExecutionProfile config) { this.schemaRefreshDebouncer = new Debouncer<>( + logPrefix + "|metadata debouncer", adminExecutor, this::coalesceSchemaRequests, this::startSchemaRequest, diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/NodeStateManager.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/NodeStateManager.java index dab95b58a3e..1412168d4f8 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/NodeStateManager.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/NodeStateManager.java @@ -103,6 +103,7 @@ private SingleThreaded(InternalDriverContext context) { DriverExecutionProfile config = context.getConfig().getDefaultProfile(); this.topologyEventDebouncer = new Debouncer<>( + logPrefix + "|topology debouncer", adminExecutor, this::coalesceTopologyEvents, this::flushTopologyEvents, diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/util/concurrent/Debouncer.java b/core/src/main/java/com/datastax/oss/driver/internal/core/util/concurrent/Debouncer.java index ded770a3d48..3e9cd4a0085 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/util/concurrent/Debouncer.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/util/concurrent/Debouncer.java @@ -44,6 +44,7 @@ public class Debouncer { private static final Logger LOG = LoggerFactory.getLogger(Debouncer.class); + private final String logPrefix; private final EventExecutor adminExecutor; private final Consumer onFlush; private final Duration window; @@ -69,6 +70,27 @@ public Debouncer( Consumer onFlush, Duration window, long maxEvents) { + this("debouncer", adminExecutor, coalescer, onFlush, window, maxEvents); + } + + /** + * Creates a new instance. + * + * @param logPrefix the log prefix to use in log messages. + * @param adminExecutor the executor that will be used to schedule all tasks. + * @param coalescer how to transform a batch of events into a result. + * @param onFlush what to do with a result. + * @param window the time window. + * @param maxEvents the maximum number of accumulated events before a flush is forced. + */ + public Debouncer( + String logPrefix, + EventExecutor adminExecutor, + Function, CoalescedT> coalescer, + Consumer onFlush, + Duration window, + long maxEvents) { + this.logPrefix = logPrefix; this.coalescer = coalescer; Preconditions.checkArgument(maxEvents >= 1, "maxEvents should be at least 1"); this.adminExecutor = adminExecutor; @@ -85,7 +107,8 @@ public void receive(IncomingT element) { } if (window.isZero() || maxEvents == 1) { LOG.debug( - "Received {}, flushing immediately (window = {}, maxEvents = {})", + "[{}] Received {}, flushing immediately (window = {}, maxEvents = {})", + logPrefix, element, window, maxEvents); @@ -94,12 +117,13 @@ public void receive(IncomingT element) { currentBatch.add(element); if (currentBatch.size() == maxEvents) { LOG.debug( - "Received {}, flushing immediately (because {} accumulated events)", + "[{}] Received {}, flushing immediately (because {} accumulated events)", + logPrefix, element, maxEvents); flushNow(); } else { - LOG.debug("Received {}, scheduling next flush in {}", element, window); + LOG.debug("[{}] Received {}, scheduling next flush in {}", logPrefix, element, window); scheduleFlush(); } } @@ -107,7 +131,7 @@ public void receive(IncomingT element) { public void flushNow() { assert adminExecutor.inEventLoop(); - LOG.debug("Flushing now"); + LOG.debug("[{}] Flushing now", logPrefix); cancelNextFlush(); if (!currentBatch.isEmpty()) { onFlush.accept(coalescer.apply(currentBatch)); @@ -127,7 +151,7 @@ private void cancelNextFlush() { if (nextFlush != null && !nextFlush.isDone()) { boolean cancelled = nextFlush.cancel(true); if (cancelled) { - LOG.debug("Cancelled existing scheduled flush"); + LOG.debug("[{}] Cancelled existing scheduled flush", logPrefix); } } } From 48f56552d8cb46375b21cafe5c59822b5b9c941f Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Thu, 10 Jun 2021 16:27:26 +0200 Subject: [PATCH 713/979] Add exception for the removal of CcmRule::reloadCore --- test-infra/revapi.json | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/test-infra/revapi.json b/test-infra/revapi.json index cf79d3b87f6..3cfbc8b5337 100644 --- a/test-infra/revapi.json +++ b/test-infra/revapi.json @@ -166,6 +166,11 @@ "old": "missing-class com.datastax.oss.simulacron.server.Server", "new": "missing-class com.datastax.oss.simulacron.server.Server", "justification": "Dependency was made optional" + }, + { + "code": "java.method.removed", + "old": "method void com.datastax.oss.driver.api.testinfra.ccm.CcmRule::reloadCore(int, java.lang.String, java.lang.String, boolean)", + "justification": "Modifying the state of a globally shared CCM instance is dangerous" } ] } From cc037c7614b2ae1a59dc66cbb12f232d97db5616 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Thu, 10 Jun 2021 16:36:32 +0200 Subject: [PATCH 714/979] Switch SchemaChangesIT to serial tests --- .../driver/core/metadata/SchemaChangesIT.java | 167 ++++++++---------- 1 file changed, 78 insertions(+), 89 deletions(-) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaChangesIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaChangesIT.java index 77861a5c57f..d45176ca825 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaChangesIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaChangesIT.java @@ -25,14 +25,12 @@ import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; import com.datastax.oss.driver.api.core.metadata.Metadata; -import com.datastax.oss.driver.api.core.metadata.schema.ColumnMetadata; import com.datastax.oss.driver.api.core.metadata.schema.SchemaChangeListener; import com.datastax.oss.driver.api.core.type.DataTypes; import com.datastax.oss.driver.api.testinfra.CassandraRequirement; -import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; -import com.datastax.oss.driver.categories.ParallelizableTests; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import java.time.Duration; @@ -42,22 +40,19 @@ import java.util.function.BiConsumer; import java.util.function.Consumer; import java.util.function.Function; -import org.assertj.core.api.Assertions; import org.junit.Before; -import org.junit.Rule; +import org.junit.ClassRule; import org.junit.Test; -import org.junit.experimental.categories.Category; import org.junit.rules.RuleChain; import org.junit.rules.TestRule; -@Category(ParallelizableTests.class) public class SchemaChangesIT { - private CcmRule ccmRule = CcmRule.getInstance(); + private static final CustomCcmRule CCM_RULE = CustomCcmRule.builder().build(); // A client that we only use to set up the tests - private SessionRule adminSessionRule = - SessionRule.builder(ccmRule) + private static final SessionRule ADMIN_SESSION_RULE = + SessionRule.builder(CCM_RULE) .withConfigLoader( SessionUtils.configLoaderBuilder() .withDuration(DefaultDriverOption.REQUEST_TIMEOUT, Duration.ofSeconds(30)) @@ -65,15 +60,16 @@ public class SchemaChangesIT { .build()) .build(); - @Rule public TestRule chain = RuleChain.outerRule(ccmRule).around(adminSessionRule); + @ClassRule + public static TestRule chain = RuleChain.outerRule(CCM_RULE).around(ADMIN_SESSION_RULE); @Before public void setup() { // Always drop and re-create the keyspace to start from a clean state - adminSessionRule + ADMIN_SESSION_RULE .session() - .execute(String.format("DROP KEYSPACE %s", adminSessionRule.keyspace())); - SessionUtils.createKeyspace(adminSessionRule.session(), adminSessionRule.keyspace()); + .execute(String.format("DROP KEYSPACE %s", ADMIN_SESSION_RULE.keyspace())); + SessionUtils.createKeyspace(ADMIN_SESSION_RULE.session(), ADMIN_SESSION_RULE.keyspace()); } @Test @@ -87,9 +83,9 @@ public void should_handle_keyspace_creation() { newKeyspaceId), metadata -> metadata.getKeyspace(newKeyspaceId), keyspace -> { - Assertions.assertThat(keyspace.getName()).isEqualTo(newKeyspaceId); - Assertions.assertThat(keyspace.isDurableWrites()).isTrue(); - Assertions.assertThat(keyspace.getReplication()) + assertThat(keyspace.getName()).isEqualTo(newKeyspaceId); + assertThat(keyspace.isDurableWrites()).isTrue(); + assertThat(keyspace.getReplication()) .hasSize(2) .containsEntry("class", "org.apache.cassandra.locator.SimpleStrategy") .containsEntry("replication_factor", "1"); @@ -128,7 +124,7 @@ public void should_handle_keyspace_update() { + "AND durable_writes = 'false'", newKeyspaceId.asCql(true)), metadata -> metadata.getKeyspace(newKeyspaceId), - newKeyspace -> Assertions.assertThat(newKeyspace.isDurableWrites()).isFalse(), + newKeyspace -> assertThat(newKeyspace.isDurableWrites()).isFalse(), (listener, oldKeyspace, newKeyspace) -> verify(listener).onKeyspaceUpdated(newKeyspace, oldKeyspace), newKeyspaceId); @@ -141,22 +137,20 @@ public void should_handle_table_creation() { "CREATE TABLE foo(k int primary key)", metadata -> metadata - .getKeyspace(adminSessionRule.keyspace()) + .getKeyspace(ADMIN_SESSION_RULE.keyspace()) .orElseThrow(IllegalStateException::new) .getTable(CqlIdentifier.fromInternal("foo")), table -> { - Assertions.assertThat(table.getKeyspace()).isEqualTo(adminSessionRule.keyspace()); - Assertions.assertThat(table.getName().asInternal()).isEqualTo("foo"); - Assertions.assertThat(table.getColumns()) - .containsOnlyKeys(CqlIdentifier.fromInternal("k")); - Assertions.assertThat(table.getColumn(CqlIdentifier.fromInternal("k"))) + assertThat(table.getKeyspace()).isEqualTo(ADMIN_SESSION_RULE.keyspace()); + assertThat(table.getName().asInternal()).isEqualTo("foo"); + assertThat(table.getColumns()).containsOnlyKeys(CqlIdentifier.fromInternal("k")); + assertThat(table.getColumn(CqlIdentifier.fromInternal("k"))) .hasValueSatisfying( k -> { - Assertions.assertThat(k.getType()).isEqualTo(DataTypes.INT); - Assertions.assertThat(table.getPartitionKey()) - .containsExactly(k); + assertThat(k.getType()).isEqualTo(DataTypes.INT); + assertThat(table.getPartitionKey()).containsExactly(k); }); - Assertions.assertThat(table.getClusteringColumns()).isEmpty(); + assertThat(table.getClusteringColumns()).isEmpty(); }, (listener, table) -> verify(listener).onTableCreated(table)); } @@ -168,7 +162,7 @@ public void should_handle_table_drop() { "DROP TABLE foo", metadata -> metadata - .getKeyspace(adminSessionRule.keyspace()) + .getKeyspace(ADMIN_SESSION_RULE.keyspace()) .flatMap(ks -> ks.getTable(CqlIdentifier.fromInternal("foo"))), (listener, oldTable) -> verify(listener).onTableDropped(oldTable)); } @@ -180,10 +174,9 @@ public void should_handle_table_update() { "ALTER TABLE foo ADD v int", metadata -> metadata - .getKeyspace(adminSessionRule.keyspace()) + .getKeyspace(ADMIN_SESSION_RULE.keyspace()) .flatMap(ks -> ks.getTable(CqlIdentifier.fromInternal("foo"))), - newTable -> - Assertions.assertThat(newTable.getColumn(CqlIdentifier.fromInternal("v"))).isPresent(), + newTable -> assertThat(newTable.getColumn(CqlIdentifier.fromInternal("v"))).isPresent(), (listener, oldTable, newTable) -> verify(listener).onTableUpdated(newTable, oldTable)); } @@ -194,14 +187,13 @@ public void should_handle_type_creation() { "CREATE TYPE t(i int)", metadata -> metadata - .getKeyspace(adminSessionRule.keyspace()) + .getKeyspace(ADMIN_SESSION_RULE.keyspace()) .flatMap(ks -> ks.getUserDefinedType(CqlIdentifier.fromInternal("t"))), type -> { - Assertions.assertThat(type.getKeyspace()).isEqualTo(adminSessionRule.keyspace()); - Assertions.assertThat(type.getName().asInternal()).isEqualTo("t"); - Assertions.assertThat(type.getFieldNames()) - .containsExactly(CqlIdentifier.fromInternal("i")); - Assertions.assertThat(type.getFieldTypes()).containsExactly(DataTypes.INT); + assertThat(type.getKeyspace()).isEqualTo(ADMIN_SESSION_RULE.keyspace()); + assertThat(type.getName().asInternal()).isEqualTo("t"); + assertThat(type.getFieldNames()).containsExactly(CqlIdentifier.fromInternal("i")); + assertThat(type.getFieldTypes()).containsExactly(DataTypes.INT); }, (listener, type) -> verify(listener).onUserDefinedTypeCreated(type)); } @@ -213,7 +205,7 @@ public void should_handle_type_drop() { "DROP TYPE t", metadata -> metadata - .getKeyspace(adminSessionRule.keyspace()) + .getKeyspace(ADMIN_SESSION_RULE.keyspace()) .flatMap(ks -> ks.getUserDefinedType(CqlIdentifier.fromInternal("t"))), (listener, oldType) -> verify(listener).onUserDefinedTypeDropped(oldType)); } @@ -225,10 +217,10 @@ public void should_handle_type_update() { "ALTER TYPE t ADD j int", metadata -> metadata - .getKeyspace(adminSessionRule.keyspace()) + .getKeyspace(ADMIN_SESSION_RULE.keyspace()) .flatMap(ks -> ks.getUserDefinedType(CqlIdentifier.fromInternal("t"))), newType -> - Assertions.assertThat(newType.getFieldNames()) + assertThat(newType.getFieldNames()) .containsExactly(CqlIdentifier.fromInternal("i"), CqlIdentifier.fromInternal("j")), (listener, oldType, newType) -> verify(listener).onUserDefinedTypeUpdated(newType, oldType)); @@ -246,16 +238,16 @@ public void should_handle_view_creation() { + "WITH CLUSTERING ORDER BY (score DESC, user DESC)", metadata -> metadata - .getKeyspace(adminSessionRule.keyspace()) + .getKeyspace(ADMIN_SESSION_RULE.keyspace()) .flatMap(ks -> ks.getView(CqlIdentifier.fromInternal("highscores"))), view -> { - Assertions.assertThat(view.getKeyspace()).isEqualTo(adminSessionRule.keyspace()); - Assertions.assertThat(view.getName().asInternal()).isEqualTo("highscores"); - Assertions.assertThat(view.getBaseTable().asInternal()).isEqualTo("scores"); - Assertions.assertThat(view.includesAllColumns()).isFalse(); - Assertions.assertThat(view.getWhereClause()) + assertThat(view.getKeyspace()).isEqualTo(ADMIN_SESSION_RULE.keyspace()); + assertThat(view.getName().asInternal()).isEqualTo("highscores"); + assertThat(view.getBaseTable().asInternal()).isEqualTo("scores"); + assertThat(view.includesAllColumns()).isFalse(); + assertThat(view.getWhereClause()) .hasValue("game IS NOT NULL AND score IS NOT NULL AND user IS NOT NULL"); - Assertions.assertThat(view.getColumns()) + assertThat(view.getColumns()) .containsOnlyKeys( CqlIdentifier.fromInternal("game"), CqlIdentifier.fromInternal("score"), @@ -278,7 +270,7 @@ public void should_handle_view_drop() { "DROP MATERIALIZED VIEW highscores", metadata -> metadata - .getKeyspace(adminSessionRule.keyspace()) + .getKeyspace(ADMIN_SESSION_RULE.keyspace()) .flatMap(ks -> ks.getView(CqlIdentifier.fromInternal("highscores"))), (listener, oldView) -> verify(listener).onViewDropped(oldView)); } @@ -297,10 +289,10 @@ public void should_handle_view_update() { "ALTER MATERIALIZED VIEW highscores WITH comment = 'The best score for each game'", metadata -> metadata - .getKeyspace(adminSessionRule.keyspace()) + .getKeyspace(ADMIN_SESSION_RULE.keyspace()) .flatMap(ks -> ks.getView(CqlIdentifier.fromInternal("highscores"))), newView -> - Assertions.assertThat(newView.getOptions().get(CqlIdentifier.fromInternal("comment"))) + assertThat(newView.getOptions().get(CqlIdentifier.fromInternal("comment"))) .isEqualTo("The best score for each game"), (listener, oldView, newView) -> verify(listener).onViewUpdated(newView, oldView)); } @@ -314,17 +306,16 @@ public void should_handle_function_creation() { + "LANGUAGE java AS 'return i;'", metadata -> metadata - .getKeyspace(adminSessionRule.keyspace()) + .getKeyspace(ADMIN_SESSION_RULE.keyspace()) .flatMap(ks -> ks.getFunction(CqlIdentifier.fromInternal("id"), DataTypes.INT)), function -> { - Assertions.assertThat(function.getKeyspace()).isEqualTo(adminSessionRule.keyspace()); - Assertions.assertThat(function.getSignature().getName().asInternal()).isEqualTo("id"); - Assertions.assertThat(function.getSignature().getParameterTypes()) - .containsExactly(DataTypes.INT); - Assertions.assertThat(function.getReturnType()).isEqualTo(DataTypes.INT); - Assertions.assertThat(function.getLanguage()).isEqualTo("java"); - Assertions.assertThat(function.isCalledOnNullInput()).isFalse(); - Assertions.assertThat(function.getBody()).isEqualTo("return i;"); + assertThat(function.getKeyspace()).isEqualTo(ADMIN_SESSION_RULE.keyspace()); + assertThat(function.getSignature().getName().asInternal()).isEqualTo("id"); + assertThat(function.getSignature().getParameterTypes()).containsExactly(DataTypes.INT); + assertThat(function.getReturnType()).isEqualTo(DataTypes.INT); + assertThat(function.getLanguage()).isEqualTo("java"); + assertThat(function.isCalledOnNullInput()).isFalse(); + assertThat(function.getBody()).isEqualTo("return i;"); }, (listener, function) -> verify(listener).onFunctionCreated(function)); } @@ -339,7 +330,7 @@ public void should_handle_function_drop() { "DROP FUNCTION id", metadata -> metadata - .getKeyspace(adminSessionRule.keyspace()) + .getKeyspace(ADMIN_SESSION_RULE.keyspace()) .flatMap(ks -> ks.getFunction(CqlIdentifier.fromInternal("id"), DataTypes.INT)), (listener, oldFunction) -> verify(listener).onFunctionDropped(oldFunction)); } @@ -356,9 +347,9 @@ public void should_handle_function_update() { + "LANGUAGE java AS 'return j;'", metadata -> metadata - .getKeyspace(adminSessionRule.keyspace()) + .getKeyspace(ADMIN_SESSION_RULE.keyspace()) .flatMap(ks -> ks.getFunction(CqlIdentifier.fromInternal("id"), DataTypes.INT)), - newFunction -> Assertions.assertThat(newFunction.getBody()).isEqualTo("return j;"), + newFunction -> assertThat(newFunction.getBody()).isEqualTo("return j;"), (listener, oldFunction, newFunction) -> verify(listener).onFunctionUpdated(newFunction, oldFunction)); } @@ -372,20 +363,18 @@ public void should_handle_aggregate_creation() { "CREATE AGGREGATE sum(int) SFUNC plus STYPE int INITCOND 0", metadata -> metadata - .getKeyspace(adminSessionRule.keyspace()) + .getKeyspace(ADMIN_SESSION_RULE.keyspace()) .flatMap(ks -> ks.getAggregate(CqlIdentifier.fromInternal("sum"), DataTypes.INT)), aggregate -> { - Assertions.assertThat(aggregate.getKeyspace()).isEqualTo(adminSessionRule.keyspace()); - Assertions.assertThat(aggregate.getSignature().getName().asInternal()).isEqualTo("sum"); - Assertions.assertThat(aggregate.getSignature().getParameterTypes()) - .containsExactly(DataTypes.INT); - Assertions.assertThat(aggregate.getStateType()).isEqualTo(DataTypes.INT); - Assertions.assertThat(aggregate.getStateFuncSignature().getName().asInternal()) - .isEqualTo("plus"); - Assertions.assertThat(aggregate.getStateFuncSignature().getParameterTypes()) + assertThat(aggregate.getKeyspace()).isEqualTo(ADMIN_SESSION_RULE.keyspace()); + assertThat(aggregate.getSignature().getName().asInternal()).isEqualTo("sum"); + assertThat(aggregate.getSignature().getParameterTypes()).containsExactly(DataTypes.INT); + assertThat(aggregate.getStateType()).isEqualTo(DataTypes.INT); + assertThat(aggregate.getStateFuncSignature().getName().asInternal()).isEqualTo("plus"); + assertThat(aggregate.getStateFuncSignature().getParameterTypes()) .containsExactly(DataTypes.INT, DataTypes.INT); - Assertions.assertThat(aggregate.getFinalFuncSignature()).isEmpty(); - Assertions.assertThat(aggregate.getInitCond()).hasValue(0); + assertThat(aggregate.getFinalFuncSignature()).isEmpty(); + assertThat(aggregate.getInitCond()).hasValue(0); }, (listener, aggregate) -> verify(listener).onAggregateCreated(aggregate)); } @@ -401,7 +390,7 @@ public void should_handle_aggregate_drop() { "DROP AGGREGATE sum", metadata -> metadata - .getKeyspace(adminSessionRule.keyspace()) + .getKeyspace(ADMIN_SESSION_RULE.keyspace()) .flatMap(ks -> ks.getAggregate(CqlIdentifier.fromInternal("sum"), DataTypes.INT)), (listener, oldAggregate) -> verify(listener).onAggregateDropped(oldAggregate)); } @@ -418,9 +407,9 @@ public void should_handle_aggregate_update() { "CREATE AGGREGATE sum(int) SFUNC plus STYPE int INITCOND 1", metadata -> metadata - .getKeyspace(adminSessionRule.keyspace()) + .getKeyspace(ADMIN_SESSION_RULE.keyspace()) .flatMap(ks -> ks.getAggregate(CqlIdentifier.fromInternal("sum"), DataTypes.INT)), - newAggregate -> Assertions.assertThat(newAggregate.getInitCond()).hasValue(1), + newAggregate -> assertThat(newAggregate.getInitCond()).hasValue(1), (listener, oldAggregate, newAggregate) -> verify(listener).onAggregateUpdated(newAggregate, oldAggregate)); } @@ -434,7 +423,7 @@ private void should_handle_creation( CqlIdentifier... keyspaces) { if (beforeStatement != null) { - adminSessionRule.session().execute(beforeStatement); + ADMIN_SESSION_RULE.session().execute(beforeStatement); } SchemaChangeListener listener1 = mock(SchemaChangeListener.class); @@ -456,9 +445,9 @@ private void should_handle_creation( try (CqlSession session1 = SessionUtils.newSession( - ccmRule, adminSessionRule.keyspace(), null, listener1, null, loader); + CCM_RULE, ADMIN_SESSION_RULE.keyspace(), null, listener1, null, loader); CqlSession session2 = - SessionUtils.newSession(ccmRule, null, null, listener2, null, loader)) { + SessionUtils.newSession(CCM_RULE, null, null, listener2, null, loader)) { session1.execute(createStatement); @@ -489,7 +478,7 @@ private void should_handle_drop( CqlIdentifier... keyspaces) { for (String statement : beforeStatements) { - adminSessionRule.session().execute(statement); + ADMIN_SESSION_RULE.session().execute(statement); } SchemaChangeListener listener1 = mock(SchemaChangeListener.class); @@ -507,9 +496,9 @@ private void should_handle_drop( try (CqlSession session1 = SessionUtils.newSession( - ccmRule, adminSessionRule.keyspace(), null, listener1, null, loader); + CCM_RULE, ADMIN_SESSION_RULE.keyspace(), null, listener1, null, loader); CqlSession session2 = - SessionUtils.newSession(ccmRule, null, null, listener2, null, loader)) { + SessionUtils.newSession(CCM_RULE, null, null, listener2, null, loader)) { T oldElement = extract.apply(session1.getMetadata()).orElseThrow(AssertionError::new); assertThat(oldElement).isNotNull(); @@ -539,7 +528,7 @@ private void should_handle_update( CqlIdentifier... keyspaces) { for (String statement : beforeStatements) { - adminSessionRule.session().execute(statement); + ADMIN_SESSION_RULE.session().execute(statement); } SchemaChangeListener listener1 = mock(SchemaChangeListener.class); @@ -556,9 +545,9 @@ private void should_handle_update( try (CqlSession session1 = SessionUtils.newSession( - ccmRule, adminSessionRule.keyspace(), null, listener1, null, loader); + CCM_RULE, ADMIN_SESSION_RULE.keyspace(), null, listener1, null, loader); CqlSession session2 = - SessionUtils.newSession(ccmRule, null, null, listener2, null, loader)) { + SessionUtils.newSession(CCM_RULE, null, null, listener2, null, loader)) { T oldElement = extract.apply(session1.getMetadata()).orElseThrow(AssertionError::new); assertThat(oldElement).isNotNull(); @@ -593,7 +582,7 @@ private void should_handle_update_via_drop_and_recreate( CqlIdentifier... keyspaces) { for (String statement : beforeStatements) { - adminSessionRule.session().execute(statement); + ADMIN_SESSION_RULE.session().execute(statement); } SchemaChangeListener listener1 = mock(SchemaChangeListener.class); @@ -609,9 +598,9 @@ private void should_handle_update_via_drop_and_recreate( .build(); try (CqlSession session1 = SessionUtils.newSession( - ccmRule, adminSessionRule.keyspace(), null, listener1, null, loader); + CCM_RULE, ADMIN_SESSION_RULE.keyspace(), null, listener1, null, loader); CqlSession session2 = - SessionUtils.newSession(ccmRule, null, null, listener2, null, loader)) { + SessionUtils.newSession(CCM_RULE, null, null, listener2, null, loader)) { T oldElement = extract.apply(session1.getMetadata()).orElseThrow(AssertionError::new); assertThat(oldElement).isNotNull(); From 4519c9f199be17f4af10c24b8440cd8c2befea9e Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Thu, 10 Jun 2021 16:47:41 +0200 Subject: [PATCH 715/979] Switch tests to parallel tests --- .../java/com/datastax/oss/driver/core/SerializationIT.java | 3 +++ .../core/retry/ConsistencyDowngradingRetryPolicyIT.java | 6 +++--- .../datastax/oss/driver/core/throttling/ThrottlingIT.java | 3 +++ .../datastax/oss/driver/core/tracker/RequestLoggerIT.java | 6 +++++- .../datastax/oss/driver/mapper/StatementAttributesIT.java | 3 +++ 5 files changed, 17 insertions(+), 4 deletions(-) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/SerializationIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/SerializationIT.java index afaffc44a0b..f9bdff1b656 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/SerializationIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/SerializationIT.java @@ -29,14 +29,17 @@ import com.datastax.oss.driver.api.core.type.DataTypes; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.api.testinfra.simulacron.SimulacronRule; +import com.datastax.oss.driver.categories.ParallelizableTests; import com.datastax.oss.driver.internal.SerializationHelper; import com.datastax.oss.simulacron.common.cluster.ClusterSpec; import org.junit.Before; import org.junit.ClassRule; import org.junit.Test; +import org.junit.experimental.categories.Category; import org.junit.rules.RuleChain; import org.junit.rules.TestRule; +@Category(ParallelizableTests.class) public class SerializationIT { private static final SimulacronRule SIMULACRON_RULE = new SimulacronRule(ClusterSpec.builder().withNodes(1)); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/retry/ConsistencyDowngradingRetryPolicyIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/retry/ConsistencyDowngradingRetryPolicyIT.java index 2a34a7cd639..a312d6162bf 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/retry/ConsistencyDowngradingRetryPolicyIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/retry/ConsistencyDowngradingRetryPolicyIT.java @@ -60,6 +60,7 @@ import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.api.testinfra.simulacron.QueryCounter; import com.datastax.oss.driver.api.testinfra.simulacron.SimulacronRule; +import com.datastax.oss.driver.categories.ParallelizableTests; import com.datastax.oss.driver.internal.core.retry.ConsistencyDowngradingRetryPolicy; import com.datastax.oss.driver.internal.core.retry.ConsistencyDowngradingRetryVerdict; import com.datastax.oss.simulacron.common.cluster.ClusterSpec; @@ -83,12 +84,14 @@ import org.junit.ClassRule; import org.junit.Rule; import org.junit.Test; +import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.mockito.ArgumentCaptor; import org.slf4j.LoggerFactory; import org.slf4j.helpers.MessageFormatter; @RunWith(DataProviderRunner.class) +@Category(ParallelizableTests.class) public class ConsistencyDowngradingRetryPolicyIT { @ClassRule @@ -130,7 +133,6 @@ public class ConsistencyDowngradingRetryPolicyIT { .setConsistencyLevel(DefaultConsistencyLevel.LOCAL_SERIAL) .build(); - @SuppressWarnings("deprecation") private final QueryCounter localQuorumCounter = QueryCounter.builder(SIMULACRON_RULE.cluster()) .withFilter( @@ -139,7 +141,6 @@ public class ConsistencyDowngradingRetryPolicyIT { && l.getConsistency().equals(ConsistencyLevel.LOCAL_QUORUM)) .build(); - @SuppressWarnings("deprecation") private final QueryCounter oneCounter = QueryCounter.builder(SIMULACRON_RULE.cluster()) .withFilter( @@ -147,7 +148,6 @@ public class ConsistencyDowngradingRetryPolicyIT { l.getQuery().equals(QUERY_STR) && l.getConsistency().equals(ConsistencyLevel.ONE)) .build(); - @SuppressWarnings("deprecation") private final QueryCounter localSerialCounter = QueryCounter.builder(SIMULACRON_RULE.cluster()) .withFilter( diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/throttling/ThrottlingIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/throttling/ThrottlingIT.java index ef90cac4e2e..f9491676fba 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/throttling/ThrottlingIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/throttling/ThrottlingIT.java @@ -24,13 +24,16 @@ import com.datastax.oss.driver.api.core.config.DriverConfigLoader; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.api.testinfra.simulacron.SimulacronRule; +import com.datastax.oss.driver.categories.ParallelizableTests; import com.datastax.oss.driver.internal.core.session.throttling.ConcurrencyLimitingRequestThrottler; import com.datastax.oss.simulacron.common.cluster.ClusterSpec; import com.datastax.oss.simulacron.common.stubbing.PrimeDsl; import java.util.concurrent.TimeUnit; import org.junit.Rule; import org.junit.Test; +import org.junit.experimental.categories.Category; +@Category(ParallelizableTests.class) public class ThrottlingIT { private static final String QUERY = "select * from foo"; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/tracker/RequestLoggerIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/tracker/RequestLoggerIT.java index 0c848fdb970..e67d0fdc462 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/tracker/RequestLoggerIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/tracker/RequestLoggerIT.java @@ -40,6 +40,7 @@ import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.api.testinfra.simulacron.SimulacronRule; +import com.datastax.oss.driver.categories.ParallelizableTests; import com.datastax.oss.driver.internal.core.tracker.RequestLogger; import com.datastax.oss.simulacron.common.cluster.ClusterSpec; import com.datastax.oss.simulacron.common.codec.ConsistencyLevel; @@ -52,6 +53,7 @@ import org.junit.Before; import org.junit.Rule; import org.junit.Test; +import org.junit.experimental.categories.Category; import org.junit.rules.RuleChain; import org.junit.rules.TestRule; import org.junit.runner.RunWith; @@ -64,6 +66,7 @@ import org.slf4j.LoggerFactory; @RunWith(MockitoJUnitRunner.class) +@Category(ParallelizableTests.class) public class RequestLoggerIT { private static final Pattern LOG_PREFIX_PER_REQUEST = Pattern.compile("\\[s\\d*\\|\\d*]"); @@ -80,7 +83,8 @@ public class RequestLoggerIT { private static final String QUERY = "SELECT release_version FROM system.local"; - private SimulacronRule simulacronRule = new SimulacronRule(ClusterSpec.builder().withNodes(3)); + private final SimulacronRule simulacronRule = + new SimulacronRule(ClusterSpec.builder().withNodes(3)); private final DriverConfigLoader requestLoader = SessionUtils.configLoaderBuilder() diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/StatementAttributesIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/StatementAttributesIT.java index 46a10d465ad..6f957c8a36c 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/StatementAttributesIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/StatementAttributesIT.java @@ -38,6 +38,7 @@ import com.datastax.oss.driver.api.mapper.annotations.Update; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.api.testinfra.simulacron.SimulacronRule; +import com.datastax.oss.driver.categories.ParallelizableTests; import com.datastax.oss.protocol.internal.Message; import com.datastax.oss.protocol.internal.request.Execute; import com.datastax.oss.simulacron.common.cluster.ClusterQueryLogReport; @@ -56,9 +57,11 @@ import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Test; +import org.junit.experimental.categories.Category; import org.junit.rules.RuleChain; import org.junit.rules.TestRule; +@Category(ParallelizableTests.class) public class StatementAttributesIT { private static final SimulacronRule SIMULACRON_RULE = From e1d637532eb0ef3e04af528e87ce605041afaf75 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Thu, 10 Jun 2021 16:48:36 +0200 Subject: [PATCH 716/979] Remove unnecessary SuppressWarnings annotation --- .../oss/driver/core/retry/DefaultRetryPolicyIT.java | 1 - .../oss/driver/core/retry/PerProfileRetryPolicyIT.java | 1 - .../oss/driver/core/specex/SpeculativeExecutionIT.java | 3 +-- .../oss/driver/api/testinfra/simulacron/QueryCounter.java | 6 +++--- 4 files changed, 4 insertions(+), 7 deletions(-) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/retry/DefaultRetryPolicyIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/retry/DefaultRetryPolicyIT.java index 8e496db350f..6474ee91a4c 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/retry/DefaultRetryPolicyIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/retry/DefaultRetryPolicyIT.java @@ -103,7 +103,6 @@ public class DefaultRetryPolicyIT { private Level oldLevel; private String logPrefix; - @SuppressWarnings("deprecation") private final QueryCounter counter = QueryCounter.builder(SIMULACRON_RULE.cluster()) .withFilter((l) -> l.getQuery().equals(queryStr)) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/retry/PerProfileRetryPolicyIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/retry/PerProfileRetryPolicyIT.java index 2e7665dcc6f..e3dd15e2f0e 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/retry/PerProfileRetryPolicyIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/retry/PerProfileRetryPolicyIT.java @@ -82,7 +82,6 @@ public class PerProfileRetryPolicyIT { private static String QUERY_STRING = "select * from foo"; private static final SimpleStatement QUERY = SimpleStatement.newInstance(QUERY_STRING); - @SuppressWarnings("deprecation") private final QueryCounter counter = QueryCounter.builder(SIMULACRON_RULE.cluster()) .withFilter((l) -> l.getQuery().equals(QUERY_STRING)) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/specex/SpeculativeExecutionIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/specex/SpeculativeExecutionIT.java index f1cae68f0b0..95d1c8e9cb9 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/specex/SpeculativeExecutionIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/specex/SpeculativeExecutionIT.java @@ -52,7 +52,7 @@ public class SpeculativeExecutionIT { // Note: it looks like shorter delays cause precision issues with Netty timers private static final long SPECULATIVE_DELAY = 1000; - private static String QUERY_STRING = "select * from foo"; + private static final String QUERY_STRING = "select * from foo"; private static final SimpleStatement QUERY = SimpleStatement.newInstance(QUERY_STRING); // Shared across all tests methods. @@ -60,7 +60,6 @@ public class SpeculativeExecutionIT { public static final SimulacronRule SIMULACRON_RULE = new SimulacronRule(ClusterSpec.builder().withNodes(3)); - @SuppressWarnings("deprecation") private final QueryCounter counter = QueryCounter.builder(SIMULACRON_RULE.cluster()) .withFilter((l) -> l.getQuery().equals(QUERY_STRING)) diff --git a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/simulacron/QueryCounter.java b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/simulacron/QueryCounter.java index 5d305736da3..bad5a112431 100644 --- a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/simulacron/QueryCounter.java +++ b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/simulacron/QueryCounter.java @@ -112,11 +112,11 @@ public void assertNodeCounts(int... counts) { public static class QueryCounterBuilder { - @SuppressWarnings("deprecation") - private static Predicate DEFAULT_FILTER = (q) -> !q.getQuery().isEmpty(); + private static final Predicate DEFAULT_FILTER = (q) -> !q.getQuery().isEmpty(); + + private final BoundTopic topic; private Predicate queryLogFilter = DEFAULT_FILTER; - private BoundTopic topic; private NotificationMode notificationMode = NotificationMode.BEFORE_PROCESSING; private long beforeTimeout = 1; private TimeUnit beforeUnit = TimeUnit.SECONDS; From bb145ebb4174b8385dd65c2b1b5b8c04a67e3a3c Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Thu, 10 Jun 2021 16:48:53 +0200 Subject: [PATCH 717/979] Address minor compiler warnings --- .../core/heartbeat/HeartbeatDisabledIT.java | 2 +- .../oss/driver/core/heartbeat/HeartbeatIT.java | 2 +- .../PerProfileLoadBalancingPolicyIT.java | 15 ++++++++++----- 3 files changed, 12 insertions(+), 7 deletions(-) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/heartbeat/HeartbeatDisabledIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/heartbeat/HeartbeatDisabledIT.java index fd2f37d82af..f0afc0058c0 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/heartbeat/HeartbeatDisabledIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/heartbeat/HeartbeatDisabledIT.java @@ -44,7 +44,7 @@ public void should_not_send_heartbeat_when_disabled() throws InterruptedExceptio SessionUtils.configLoaderBuilder() .withDuration(DefaultDriverOption.HEARTBEAT_INTERVAL, Duration.ofSeconds(0)) .build(); - try (CqlSession session = SessionUtils.newSession(SIMULACRON_RULE, loader)) { + try (CqlSession ignored = SessionUtils.newSession(SIMULACRON_RULE, loader)) { AtomicInteger heartbeats = registerHeartbeatListener(); SECONDS.sleep(35); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/heartbeat/HeartbeatIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/heartbeat/HeartbeatIT.java index 14a72a43fa1..1dbc055a5af 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/heartbeat/HeartbeatIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/heartbeat/HeartbeatIT.java @@ -119,7 +119,7 @@ public void should_send_heartbeat_on_control_connection() { ProgrammaticDriverConfigLoaderBuilder loader = SessionUtils.configLoaderBuilder() .withInt(DefaultDriverOption.CONNECTION_POOL_LOCAL_SIZE, 0); - try (CqlSession session = newSession(loader)) { + try (CqlSession ignored = newSession(loader)) { AtomicInteger heartbeats = countHeartbeatsOnControlConnection(); await() .pollInterval(500, TimeUnit.MILLISECONDS) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/loadbalancing/PerProfileLoadBalancingPolicyIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/loadbalancing/PerProfileLoadBalancingPolicyIT.java index 2ee5aca6aee..12d5c800d88 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/loadbalancing/PerProfileLoadBalancingPolicyIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/loadbalancing/PerProfileLoadBalancingPolicyIT.java @@ -33,6 +33,7 @@ import com.datastax.oss.driver.api.testinfra.simulacron.SimulacronRule; import com.datastax.oss.driver.categories.ParallelizableTests; import com.datastax.oss.simulacron.common.cluster.ClusterSpec; +import java.util.Objects; import org.junit.Before; import org.junit.BeforeClass; import org.junit.ClassRule; @@ -93,7 +94,7 @@ public static void setup() { for (Node node : SESSION_RULE.session().getMetadata().getNodes().values()) { // if node is in dc2 it should be ignored, otherwise (dc1, dc3) it should be local. NodeDistance expectedDistance = - node.getDatacenter().equals("dc2") ? NodeDistance.IGNORED : NodeDistance.LOCAL; + Objects.equals(node.getDatacenter(), "dc2") ? NodeDistance.IGNORED : NodeDistance.LOCAL; assertThat(node.getDistance()).isEqualTo(expectedDistance); } } @@ -101,10 +102,12 @@ public static void setup() { @Test public void should_use_policy_from_request_profile() { // Since profile1 uses dc3 as localDC, only those nodes should receive these queries. - Statement statement = QUERY.setExecutionProfileName("profile1"); + Statement statement = QUERY.setExecutionProfileName("profile1"); for (int i = 0; i < 10; i++) { ResultSet result = SESSION_RULE.session().execute(statement); - assertThat(result.getExecutionInfo().getCoordinator().getDatacenter()).isEqualTo("dc3"); + Node coordinator = result.getExecutionInfo().getCoordinator(); + assertThat(coordinator).isNotNull(); + assertThat(coordinator.getDatacenter()).isEqualTo("dc3"); } assertQueryInDc(0, 0); @@ -115,10 +118,12 @@ public void should_use_policy_from_request_profile() { @Test public void should_use_policy_from_config_when_not_configured_in_request_profile() { // Since profile2 does not define an lbp config, it should use default which uses dc1. - Statement statement = QUERY.setExecutionProfileName("profile2"); + Statement statement = QUERY.setExecutionProfileName("profile2"); for (int i = 0; i < 10; i++) { ResultSet result = SESSION_RULE.session().execute(statement); - assertThat(result.getExecutionInfo().getCoordinator().getDatacenter()).isEqualTo("dc1"); + Node coordinator = result.getExecutionInfo().getCoordinator(); + assertThat(coordinator).isNotNull(); + assertThat(coordinator.getDatacenter()).isEqualTo("dc1"); } assertQueryInDc(0, 5); From 7233016ee2125a5fd658e87993cb8ba7a969cd1a Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Thu, 10 Jun 2021 17:04:17 +0200 Subject: [PATCH 718/979] JAVA-2945: Reinstate InternalDriverContext.getNodeFilter method (#1553) --- changelog/README.md | 1 + .../internal/core/context/DefaultDriverContext.java | 11 +++++++++++ .../core/context/InternalDriverContext.java | 13 +++++++++++++ 3 files changed, 25 insertions(+) diff --git a/changelog/README.md b/changelog/README.md index f08c735c369..9312b8e8749 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.11.2 (in progress) +- [bug] JAVA-2945: Reinstate InternalDriverContext.getNodeFilter method - [bug] JAVA-2946: Make MapperResultProducerService instances be located with user-provided class loader - [bug] JAVA-2942: GraphStatement.setConsistencyLevel() is not effective - [bug] JAVA-2941: Cannot add a single static column with the alter table API diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java index fd217f9c6c8..e09e5ee3b5c 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java @@ -225,6 +225,7 @@ public class DefaultDriverContext implements InternalDriverContext { private final SchemaChangeListener schemaChangeListenerFromBuilder; private final RequestTracker requestTrackerFromBuilder; private final Map localDatacentersFromBuilder; + private final Map> nodeFiltersFromBuilder; private final Map nodeDistanceEvaluatorsFromBuilder; private final ClassLoader classLoader; private final InetSocketAddress cloudProxyAddress; @@ -277,6 +278,9 @@ public DefaultDriverContext( "sslEngineFactory", () -> buildSslEngineFactory(programmaticArguments.getSslEngineFactory()), cycleDetector); + @SuppressWarnings("deprecation") + Map> nodeFilters = programmaticArguments.getNodeFilters(); + this.nodeFiltersFromBuilder = nodeFilters; this.nodeDistanceEvaluatorsFromBuilder = programmaticArguments.getNodeDistanceEvaluators(); this.classLoader = programmaticArguments.getClassLoader(); this.cloudProxyAddress = programmaticArguments.getCloudProxyAddress(); @@ -909,6 +913,13 @@ public String getLocalDatacenter(@NonNull String profileName) { return localDatacentersFromBuilder.get(profileName); } + @Nullable + @Override + @Deprecated + public Predicate getNodeFilter(@NonNull String profileName) { + return nodeFiltersFromBuilder.get(profileName); + } + @Nullable @Override public NodeDistanceEvaluator getNodeDistanceEvaluator(@NonNull String profileName) { diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/context/InternalDriverContext.java b/core/src/main/java/com/datastax/oss/driver/internal/core/context/InternalDriverContext.java index 3e3f21d0e41..0997258c149 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/context/InternalDriverContext.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/context/InternalDriverContext.java @@ -17,6 +17,7 @@ import com.datastax.oss.driver.api.core.context.DriverContext; import com.datastax.oss.driver.api.core.loadbalancing.NodeDistanceEvaluator; +import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.session.SessionBuilder; import com.datastax.oss.driver.internal.core.ConsistencyLevelRegistry; import com.datastax.oss.driver.internal.core.ProtocolVersionRegistry; @@ -49,6 +50,7 @@ import java.util.List; import java.util.Map; import java.util.Optional; +import java.util.function.Predicate; /** Extends the driver context with additional components that are not exposed by our public API. */ public interface InternalDriverContext extends DriverContext { @@ -136,6 +138,17 @@ public interface InternalDriverContext extends DriverContext { @Nullable String getLocalDatacenter(@NonNull String profileName); + /** + * This is the filter from {@link SessionBuilder#withNodeFilter(String, Predicate)}. If the filter + * for this profile was specified through the configuration instead, this method will return + * {@code null}. + * + * @deprecated Use {@link #getNodeDistanceEvaluator(String)} instead. + */ + @Nullable + @Deprecated + Predicate getNodeFilter(@NonNull String profileName); + /** * This is the node distance evaluator from {@link * SessionBuilder#withNodeDistanceEvaluator(String, NodeDistanceEvaluator)}. If the evaluator for From 38f698d5736f8979e7cc5a3a3802338b4617a9ae Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Thu, 10 Jun 2021 17:04:59 +0200 Subject: [PATCH 719/979] JAVA-2947: Release buffer after decoding multi-slice frame (#1554) --- changelog/README.md | 1 + .../core/protocol/SegmentToFrameDecoder.java | 43 +++-- .../protocol/SegmentToFrameDecoderTest.java | 3 +- .../NettyResourceLeakDetectionIT.java | 177 ++++++++++++++++++ 4 files changed, 207 insertions(+), 17 deletions(-) create mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/core/connection/NettyResourceLeakDetectionIT.java diff --git a/changelog/README.md b/changelog/README.md index f08c735c369..5620ac9f320 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.11.2 (in progress) +- [bug] JAVA-2947: Release buffer after decoding multi-slice frame - [bug] JAVA-2946: Make MapperResultProducerService instances be located with user-provided class loader - [bug] JAVA-2942: GraphStatement.setConsistencyLevel() is not effective - [bug] JAVA-2941: Cannot add a single static column with the alter table API diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/SegmentToFrameDecoder.java b/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/SegmentToFrameDecoder.java index 13564e47bff..66b8d0876c0 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/SegmentToFrameDecoder.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/SegmentToFrameDecoder.java @@ -69,14 +69,19 @@ protected void decode( private void decodeSelfContained(Segment segment, List out) { ByteBuf payload = segment.payload; int frameCount = 0; - do { - Frame frame = frameCodec.decode(payload); - LOG.trace( - "[{}] Decoded response frame {} from self-contained segment", logPrefix, frame.streamId); - out.add(frame); - frameCount += 1; - } while (payload.isReadable()); - payload.release(); + try { + do { + Frame frame = frameCodec.decode(payload); + LOG.trace( + "[{}] Decoded response frame {} from self-contained segment", + logPrefix, + frame.streamId); + out.add(frame); + frameCount += 1; + } while (payload.isReadable()); + } finally { + payload.release(); + } LOG.trace("[{}] Done processing self-contained segment ({} frames)", logPrefix, frameCount); } @@ -89,28 +94,34 @@ private void decodeSlice(Segment segment, ByteBufAllocator allocator, L } accumulatedSlices.add(slice); accumulatedLength += slice.readableBytes(); + int accumulatedSlicesSize = accumulatedSlices.size(); LOG.trace( "[{}] Decoded slice {}, {}/{} bytes", logPrefix, - accumulatedSlices.size(), + accumulatedSlicesSize, accumulatedLength, targetLength); assert accumulatedLength <= targetLength; if (accumulatedLength == targetLength) { // We've received enough data to reassemble the whole message - CompositeByteBuf encodedFrame = allocator.compositeBuffer(accumulatedSlices.size()); + CompositeByteBuf encodedFrame = allocator.compositeBuffer(accumulatedSlicesSize); encodedFrame.addComponents(true, accumulatedSlices); - Frame frame = frameCodec.decode(encodedFrame); + Frame frame; + try { + frame = frameCodec.decode(encodedFrame); + } finally { + encodedFrame.release(); + // Reset our state + targetLength = UNKNOWN_LENGTH; + accumulatedSlices.clear(); + accumulatedLength = 0; + } LOG.trace( "[{}] Decoded response frame {} from {} slices", logPrefix, frame.streamId, - accumulatedSlices.size()); + accumulatedSlicesSize); out.add(frame); - // Reset our state - targetLength = UNKNOWN_LENGTH; - accumulatedSlices.clear(); - accumulatedLength = 0; } } } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/protocol/SegmentToFrameDecoderTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/protocol/SegmentToFrameDecoderTest.java index 2bb93f0901b..363beb1b4af 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/protocol/SegmentToFrameDecoderTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/protocol/SegmentToFrameDecoderTest.java @@ -73,7 +73,8 @@ public void should_decode_sequence_of_slices() { encodeFrame(new AuthResponse(Bytes.fromHexString("0x" + Strings.repeat("aa", 1011)))); int sliceLength = 100; do { - ByteBuf payload = encodedFrame.readSlice(Math.min(sliceLength, encodedFrame.readableBytes())); + ByteBuf payload = + encodedFrame.readRetainedSlice(Math.min(sliceLength, encodedFrame.readableBytes())); channel.writeInbound(new Segment<>(payload, false)); } while (encodedFrame.isReadable()); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/connection/NettyResourceLeakDetectionIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/connection/NettyResourceLeakDetectionIT.java new file mode 100644 index 00000000000..ada5ae9a61b --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/connection/NettyResourceLeakDetectionIT.java @@ -0,0 +1,177 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.core.connection; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.verify; + +import ch.qos.logback.classic.Logger; +import ch.qos.logback.classic.spi.ILoggingEvent; +import ch.qos.logback.core.Appender; +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.Version; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverConfigLoader; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.cql.Row; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import com.datastax.oss.driver.api.testinfra.session.SessionUtils; +import com.datastax.oss.driver.categories.IsolatedTests; +import com.datastax.oss.driver.shaded.guava.common.base.Strings; +import com.datastax.oss.protocol.internal.Segment; +import com.datastax.oss.protocol.internal.util.Bytes; +import io.netty.util.ResourceLeakDetector; +import io.netty.util.ResourceLeakDetector.Level; +import java.nio.ByteBuffer; +import java.util.List; +import org.junit.After; +import org.junit.Assume; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnitRunner; +import org.slf4j.LoggerFactory; + +@Category(IsolatedTests.class) +@RunWith(MockitoJUnitRunner.class) +public class NettyResourceLeakDetectionIT { + + static { + ResourceLeakDetector.setLevel(Level.PARANOID); + } + + private static final CustomCcmRule CCM_RULE = CustomCcmRule.builder().build(); + + private static final SessionRule SESSION_RULE = SessionRule.builder(CCM_RULE).build(); + + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); + + private static final ByteBuffer LARGE_PAYLOAD = + Bytes.fromHexString("0x" + Strings.repeat("ab", Segment.MAX_PAYLOAD_LENGTH + 100)); + + @Mock private Appender appender; + + @BeforeClass + public static void createTables() { + CqlSession session = SESSION_RULE.session(); + DriverExecutionProfile slowProfile = SessionUtils.slowProfile(session); + session.execute( + SimpleStatement.newInstance( + "CREATE TABLE IF NOT EXISTS leak_test_small (key int PRIMARY KEY, value int)") + .setExecutionProfile(slowProfile)); + session.execute( + SimpleStatement.newInstance( + "CREATE TABLE IF NOT EXISTS leak_test_large (key int PRIMARY KEY, value blob)") + .setExecutionProfile(slowProfile)); + } + + @Before + public void setupLogger() { + Logger logger = (Logger) LoggerFactory.getLogger(ResourceLeakDetector.class); + logger.setLevel(ch.qos.logback.classic.Level.ERROR); + logger.addAppender(appender); + } + + @After + public void resetLogger() { + Logger logger = (Logger) LoggerFactory.getLogger(ResourceLeakDetector.class); + logger.detachAppender(appender); + } + + @Test + public void should_not_leak_uncompressed() { + doLeakDetectionTest(SESSION_RULE.session()); + } + + @Test + public void should_not_leak_compressed_lz4() { + DriverConfigLoader loader = + SessionUtils.configLoaderBuilder() + .withString(DefaultDriverOption.PROTOCOL_COMPRESSION, "lz4") + .build(); + try (CqlSession session = SessionUtils.newSession(CCM_RULE, SESSION_RULE.keyspace(), loader)) { + doLeakDetectionTest(session); + } + } + + @Test + public void should_not_leak_compressed_snappy() { + Assume.assumeTrue( + "Snappy is not supported in OSS C* 4.0+ with protocol v5", + CCM_RULE.getDseVersion().isPresent() + || CCM_RULE.getCassandraVersion().nextStable().compareTo(Version.V4_0_0) < 0); + DriverConfigLoader loader = + SessionUtils.configLoaderBuilder() + .withString(DefaultDriverOption.PROTOCOL_COMPRESSION, "snappy") + .build(); + try (CqlSession session = SessionUtils.newSession(CCM_RULE, SESSION_RULE.keyspace(), loader)) { + doLeakDetectionTest(session); + } + } + + private void doLeakDetectionTest(CqlSession session) { + for (int i = 0; i < 10; i++) { + testSmallMessages(session); + verify(appender, never()).doAppend(any()); + System.gc(); + testLargeMessages(session); + verify(appender, never()).doAppend(any()); + System.gc(); + } + } + + private void testSmallMessages(CqlSession session) { + // trigger some activity using small requests and responses; in v5, these messages should fit in + // one single, self-contained segment + for (int i = 0; i < 1000; i++) { + session.execute("INSERT INTO leak_test_small (key, value) VALUES (?,?)", i, i); + } + List rows = session.execute("SELECT value FROM leak_test_small").all(); + assertThat(rows).hasSize(1000); + for (Row row : rows) { + assertThat(row).isNotNull(); + int actual = row.getInt(0); + assertThat(actual).isGreaterThanOrEqualTo(0).isLessThan(1000); + } + } + + private void testLargeMessages(CqlSession session) { + // trigger some activity using large requests and responses; in v5, these messages are likely to + // be split in multiple segments + for (int i = 0; i < 100; i++) { + session.execute( + "INSERT INTO leak_test_large (key, value) VALUES (?,?)", i, LARGE_PAYLOAD.duplicate()); + } + List rows = session.execute("SELECT value FROM leak_test_large").all(); + assertThat(rows).hasSize(100); + for (Row row : rows) { + assertThat(row).isNotNull(); + ByteBuffer actual = row.getByteBuffer(0); + assertThat(actual).isEqualTo(LARGE_PAYLOAD.duplicate()); + } + } +} From 890f901f3112006022e3115ea738828608b17413 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Thu, 10 Jun 2021 17:13:19 +0200 Subject: [PATCH 720/979] Address errorprone error on QueryCounter --- .../oss/driver/api/testinfra/simulacron/QueryCounter.java | 1 + 1 file changed, 1 insertion(+) diff --git a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/simulacron/QueryCounter.java b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/simulacron/QueryCounter.java index bad5a112431..1be6e219395 100644 --- a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/simulacron/QueryCounter.java +++ b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/simulacron/QueryCounter.java @@ -112,6 +112,7 @@ public void assertNodeCounts(int... counts) { public static class QueryCounterBuilder { + @SuppressWarnings("UnnecessaryLambda") private static final Predicate DEFAULT_FILTER = (q) -> !q.getQuery().isEmpty(); private final BoundTopic topic; From 4ee99e0aec11187b24559812f54919dcbbed7a21 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Thu, 10 Jun 2021 17:04:17 +0200 Subject: [PATCH 721/979] JAVA-2945: Reinstate InternalDriverContext.getNodeFilter method (#1553) --- changelog/README.md | 1 + .../internal/core/context/DefaultDriverContext.java | 11 +++++++++++ .../core/context/InternalDriverContext.java | 13 +++++++++++++ 3 files changed, 25 insertions(+) diff --git a/changelog/README.md b/changelog/README.md index 5620ac9f320..52d34c9b02a 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.11.2 (in progress) +- [bug] JAVA-2945: Reinstate InternalDriverContext.getNodeFilter method - [bug] JAVA-2947: Release buffer after decoding multi-slice frame - [bug] JAVA-2946: Make MapperResultProducerService instances be located with user-provided class loader - [bug] JAVA-2942: GraphStatement.setConsistencyLevel() is not effective diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java index fd217f9c6c8..e09e5ee3b5c 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java @@ -225,6 +225,7 @@ public class DefaultDriverContext implements InternalDriverContext { private final SchemaChangeListener schemaChangeListenerFromBuilder; private final RequestTracker requestTrackerFromBuilder; private final Map localDatacentersFromBuilder; + private final Map> nodeFiltersFromBuilder; private final Map nodeDistanceEvaluatorsFromBuilder; private final ClassLoader classLoader; private final InetSocketAddress cloudProxyAddress; @@ -277,6 +278,9 @@ public DefaultDriverContext( "sslEngineFactory", () -> buildSslEngineFactory(programmaticArguments.getSslEngineFactory()), cycleDetector); + @SuppressWarnings("deprecation") + Map> nodeFilters = programmaticArguments.getNodeFilters(); + this.nodeFiltersFromBuilder = nodeFilters; this.nodeDistanceEvaluatorsFromBuilder = programmaticArguments.getNodeDistanceEvaluators(); this.classLoader = programmaticArguments.getClassLoader(); this.cloudProxyAddress = programmaticArguments.getCloudProxyAddress(); @@ -909,6 +913,13 @@ public String getLocalDatacenter(@NonNull String profileName) { return localDatacentersFromBuilder.get(profileName); } + @Nullable + @Override + @Deprecated + public Predicate getNodeFilter(@NonNull String profileName) { + return nodeFiltersFromBuilder.get(profileName); + } + @Nullable @Override public NodeDistanceEvaluator getNodeDistanceEvaluator(@NonNull String profileName) { diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/context/InternalDriverContext.java b/core/src/main/java/com/datastax/oss/driver/internal/core/context/InternalDriverContext.java index 3e3f21d0e41..0997258c149 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/context/InternalDriverContext.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/context/InternalDriverContext.java @@ -17,6 +17,7 @@ import com.datastax.oss.driver.api.core.context.DriverContext; import com.datastax.oss.driver.api.core.loadbalancing.NodeDistanceEvaluator; +import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.session.SessionBuilder; import com.datastax.oss.driver.internal.core.ConsistencyLevelRegistry; import com.datastax.oss.driver.internal.core.ProtocolVersionRegistry; @@ -49,6 +50,7 @@ import java.util.List; import java.util.Map; import java.util.Optional; +import java.util.function.Predicate; /** Extends the driver context with additional components that are not exposed by our public API. */ public interface InternalDriverContext extends DriverContext { @@ -136,6 +138,17 @@ public interface InternalDriverContext extends DriverContext { @Nullable String getLocalDatacenter(@NonNull String profileName); + /** + * This is the filter from {@link SessionBuilder#withNodeFilter(String, Predicate)}. If the filter + * for this profile was specified through the configuration instead, this method will return + * {@code null}. + * + * @deprecated Use {@link #getNodeDistanceEvaluator(String)} instead. + */ + @Nullable + @Deprecated + Predicate getNodeFilter(@NonNull String profileName); + /** * This is the node distance evaluator from {@link * SessionBuilder#withNodeDistanceEvaluator(String, NodeDistanceEvaluator)}. If the evaluator for From 6f115e72f419aafb265617b7ebf9f4553c43921f Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 11 Jun 2021 15:10:49 +0200 Subject: [PATCH 722/979] Fix timeout issues --- .../cql/continuous/ContinuousPagingIT.java | 26 +++++++++---------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousPagingIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousPagingIT.java index 92bfaa42e06..3ba00e4095b 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousPagingIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousPagingIT.java @@ -43,6 +43,7 @@ import java.time.Duration; import java.util.Collections; import java.util.Iterator; +import java.util.Objects; import java.util.concurrent.CancellationException; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionStage; @@ -183,7 +184,10 @@ public void simple_statement_paging_should_be_resilient_to_schema_change() { .getDefaultProfile() .withInt(DseDriverOption.CONTINUOUS_PAGING_MAX_ENQUEUED_PAGES, 1) .withInt(DseDriverOption.CONTINUOUS_PAGING_PAGE_SIZE, 1) - .withInt(DefaultDriverOption.REQUEST_TIMEOUT, 120000000); + .withDuration( + DseDriverOption.CONTINUOUS_PAGING_TIMEOUT_FIRST_PAGE, Duration.ofSeconds(30)) + .withDuration( + DseDriverOption.CONTINUOUS_PAGING_TIMEOUT_OTHER_PAGES, Duration.ofSeconds(30)); ContinuousResultSet result = session.executeContinuously(simple.setExecutionProfile(profile)); Iterator it = result.iterator(); // First row should have a non-null values. @@ -193,11 +197,7 @@ public void simple_statement_paging_should_be_resilient_to_schema_change() { // Make schema change to add b, its metadata should NOT be present in subsequent rows. CqlSession schemaChangeSession = SessionUtils.newSession( - ccmRule, - session.getKeyspace().orElseThrow(IllegalStateException::new), - SessionUtils.configLoaderBuilder() - .withDuration(DefaultDriverOption.REQUEST_TIMEOUT, Duration.ofSeconds(30)) - .build()); + ccmRule, session.getKeyspace().orElseThrow(IllegalStateException::new)); SimpleStatement statement = SimpleStatement.newInstance("ALTER TABLE test_prepare add b int") .setExecutionProfile(sessionRule.slowProfile()); @@ -251,7 +251,11 @@ public void prepared_statement_paging_should_be_resilient_to_schema_change() { .getConfig() .getDefaultProfile() .withInt(DseDriverOption.CONTINUOUS_PAGING_MAX_ENQUEUED_PAGES, 1) - .withInt(DseDriverOption.CONTINUOUS_PAGING_PAGE_SIZE, 1); + .withInt(DseDriverOption.CONTINUOUS_PAGING_PAGE_SIZE, 1) + .withDuration( + DseDriverOption.CONTINUOUS_PAGING_TIMEOUT_FIRST_PAGE, Duration.ofSeconds(30)) + .withDuration( + DseDriverOption.CONTINUOUS_PAGING_TIMEOUT_OTHER_PAGES, Duration.ofSeconds(30)); ContinuousResultSet result = session.executeContinuously(prepared.bind("foo").setExecutionProfile(profile)); Iterator it = result.iterator(); @@ -262,11 +266,7 @@ public void prepared_statement_paging_should_be_resilient_to_schema_change() { // Make schema change to drop v, its metadata should be present, values will be null. CqlSession schemaChangeSession = SessionUtils.newSession( - ccmRule, - session.getKeyspace().orElseThrow(IllegalStateException::new), - SessionUtils.configLoaderBuilder() - .withDuration(DefaultDriverOption.REQUEST_TIMEOUT, Duration.ofSeconds(30)) - .build()); + ccmRule, session.getKeyspace().orElseThrow(IllegalStateException::new)); schemaChangeSession.execute("ALTER TABLE test_prep DROP v;"); while (it.hasNext()) { // Each row should have a value for k, v should still be present, but null since column was @@ -276,7 +276,7 @@ public void prepared_statement_paging_should_be_resilient_to_schema_change() { if (ccmRule .getDseVersion() .orElseThrow(IllegalStateException::new) - .compareTo(Version.parse("6.0.0")) + .compareTo(Objects.requireNonNull(Version.parse("6.0.0"))) >= 0) { // DSE 6 only, v should be null here since dropped. // Not reliable for 5.1 since we may have gotten page queued before schema changed. From 12f329ed78b2b96aa0d9d7a2ab23caa20b634a96 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 11 Jun 2021 15:23:34 +0200 Subject: [PATCH 723/979] Improve assertion messages --- .../cql/continuous/ContinuousPagingITBase.java | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousPagingITBase.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousPagingITBase.java index a4c937d9311..c2bc7de9791 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousPagingITBase.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousPagingITBase.java @@ -105,17 +105,21 @@ public static Object[][] pagingOptions() { protected void validateMetrics(CqlSession session) { Node node = session.getMetadata().getNodes().values().iterator().next(); - assertThat(session.getMetrics()).isPresent(); + assertThat(session.getMetrics()).as("assert session.getMetrics() present").isPresent(); Metrics metrics = session.getMetrics().get(); - assertThat(metrics.getNodeMetric(node, DefaultNodeMetric.CQL_MESSAGES)).isPresent(); + assertThat(metrics.getNodeMetric(node, DefaultNodeMetric.CQL_MESSAGES)) + .as("assert metrics.getNodeMetric(node, DefaultNodeMetric.CQL_MESSAGES) present") + .isPresent(); Timer messages = (Timer) metrics.getNodeMetric(node, DefaultNodeMetric.CQL_MESSAGES).get(); - assertThat(messages.getCount()).isGreaterThan(0); - assertThat(messages.getMeanRate()).isGreaterThan(0); - assertThat(metrics.getSessionMetric(DseSessionMetric.CONTINUOUS_CQL_REQUESTS)).isPresent(); + assertThat(messages.getCount()).as("assert messages.getCount() >= 0").isGreaterThan(0); + assertThat(messages.getMeanRate()).as("assert messages.getMeanRate() >= 0").isGreaterThan(0); + assertThat(metrics.getSessionMetric(DseSessionMetric.CONTINUOUS_CQL_REQUESTS)) + .as("assert metrics.getSessionMetric(DseSessionMetric.CONTINUOUS_CQL_REQUESTS) present") + .isPresent(); Timer requests = (Timer) metrics.getSessionMetric(DseSessionMetric.CONTINUOUS_CQL_REQUESTS).get(); - assertThat(requests.getCount()).isGreaterThan(0); - assertThat(requests.getMeanRate()).isGreaterThan(0); + assertThat(requests.getCount()).as("assert requests.getCount() >= 0").isGreaterThan(0); + assertThat(requests.getMeanRate()).as("assert requests.getMeanRate() >= 0").isGreaterThan(0); } public static class Options { From e70f0c9178ff8121891a6975c11998acc5bd7253 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 11 Jun 2021 15:24:08 +0200 Subject: [PATCH 724/979] Fix failing assumptions --- .../driver/core/metadata/SchemaChangesIT.java | 43 ++++++++++++++----- 1 file changed, 32 insertions(+), 11 deletions(-) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaChangesIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaChangesIT.java index d45176ca825..12d817de091 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaChangesIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaChangesIT.java @@ -16,19 +16,22 @@ package com.datastax.oss.driver.core.metadata; import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assumptions.assumeThat; import static org.awaitility.Awaitility.await; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; import com.datastax.oss.driver.api.core.metadata.Metadata; import com.datastax.oss.driver.api.core.metadata.schema.SchemaChangeListener; import com.datastax.oss.driver.api.core.type.DataTypes; -import com.datastax.oss.driver.api.testinfra.CassandraRequirement; +import com.datastax.oss.driver.api.testinfra.ccm.CcmBridge; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule.Builder; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.google.common.collect.ImmutableList; @@ -48,7 +51,16 @@ public class SchemaChangesIT { - private static final CustomCcmRule CCM_RULE = CustomCcmRule.builder().build(); + static { + Builder builder = CustomCcmRule.builder(); + if (!CcmBridge.DSE_ENABLEMENT + && CcmBridge.VERSION.nextStable().compareTo(Version.V4_0_0) >= 0) { + builder.withCassandraConfiguration("enable_materialized_views", true); + } + CCM_RULE = builder.build(); + } + + private static final CustomCcmRule CCM_RULE; // A client that we only use to set up the tests private static final SessionRule ADMIN_SESSION_RULE = @@ -227,8 +239,9 @@ public void should_handle_type_update() { } @Test - @CassandraRequirement(min = "3.0") public void should_handle_view_creation() { + assumeThat(CCM_RULE.getCcmBridge().getCassandraVersion().compareTo(Version.V3_0_0) >= 0) + .isTrue(); should_handle_creation( "CREATE TABLE scores(user text, game text, score int, PRIMARY KEY (user, game))", "CREATE MATERIALIZED VIEW highscores " @@ -257,8 +270,9 @@ public void should_handle_view_creation() { } @Test - @CassandraRequirement(min = "3.0") public void should_handle_view_drop() { + assumeThat(CCM_RULE.getCcmBridge().getCassandraVersion().compareTo(Version.V3_0_0) >= 0) + .isTrue(); should_handle_drop( ImmutableList.of( "CREATE TABLE scores(user text, game text, score int, PRIMARY KEY (user, game))", @@ -276,8 +290,9 @@ public void should_handle_view_drop() { } @Test - @CassandraRequirement(min = "3.0") public void should_handle_view_update() { + assumeThat(CCM_RULE.getCcmBridge().getCassandraVersion().compareTo(Version.V3_0_0) >= 0) + .isTrue(); should_handle_update( ImmutableList.of( "CREATE TABLE scores(user text, game text, score int, PRIMARY KEY (user, game))", @@ -298,8 +313,9 @@ public void should_handle_view_update() { } @Test - @CassandraRequirement(min = "2.2") public void should_handle_function_creation() { + assumeThat(CCM_RULE.getCcmBridge().getCassandraVersion().compareTo(Version.V2_2_0) >= 0) + .isTrue(); should_handle_creation( null, "CREATE FUNCTION id(i int) RETURNS NULL ON NULL INPUT RETURNS int " @@ -321,8 +337,9 @@ public void should_handle_function_creation() { } @Test - @CassandraRequirement(min = "2.2") public void should_handle_function_drop() { + assumeThat(CCM_RULE.getCcmBridge().getCassandraVersion().compareTo(Version.V2_2_0) >= 0) + .isTrue(); should_handle_drop( ImmutableList.of( "CREATE FUNCTION id(i int) RETURNS NULL ON NULL INPUT RETURNS int " @@ -336,8 +353,9 @@ public void should_handle_function_drop() { } @Test - @CassandraRequirement(min = "2.2") public void should_handle_function_update() { + assumeThat(CCM_RULE.getCcmBridge().getCassandraVersion().compareTo(Version.V2_2_0) >= 0) + .isTrue(); should_handle_update_via_drop_and_recreate( ImmutableList.of( "CREATE FUNCTION id(i int) RETURNS NULL ON NULL INPUT RETURNS int " @@ -355,8 +373,9 @@ public void should_handle_function_update() { } @Test - @CassandraRequirement(min = "2.2") public void should_handle_aggregate_creation() { + assumeThat(CCM_RULE.getCcmBridge().getCassandraVersion().compareTo(Version.V2_2_0) >= 0) + .isTrue(); should_handle_creation( "CREATE FUNCTION plus(i int, j int) RETURNS NULL ON NULL INPUT RETURNS int " + "LANGUAGE java AS 'return i+j;'", @@ -380,8 +399,9 @@ public void should_handle_aggregate_creation() { } @Test - @CassandraRequirement(min = "2.2") public void should_handle_aggregate_drop() { + assumeThat(CCM_RULE.getCcmBridge().getCassandraVersion().compareTo(Version.V2_2_0) >= 0) + .isTrue(); should_handle_drop( ImmutableList.of( "CREATE FUNCTION plus(i int, j int) RETURNS NULL ON NULL INPUT RETURNS int " @@ -396,8 +416,9 @@ public void should_handle_aggregate_drop() { } @Test - @CassandraRequirement(min = "2.2") public void should_handle_aggregate_update() { + assumeThat(CCM_RULE.getCcmBridge().getCassandraVersion().compareTo(Version.V2_2_0) >= 0) + .isTrue(); should_handle_update_via_drop_and_recreate( ImmutableList.of( "CREATE FUNCTION plus(i int, j int) RETURNS NULL ON NULL INPUT RETURNS int " From da712c776ae48124e736299a5a5002806ec501b3 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 11 Jun 2021 15:24:25 +0200 Subject: [PATCH 725/979] Use slow profile for DDL statement --- .../test/java/com/datastax/oss/driver/mapper/UpdateIT.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateIT.java index dd6993ee40d..b531eaa6bd8 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateIT.java @@ -68,7 +68,9 @@ public static void setup() { session.execute( SimpleStatement.builder(query).setExecutionProfile(SESSION_RULE.slowProfile()).build()); } - session.execute("CREATE TABLE only_p_k(id uuid PRIMARY KEY)"); + session.execute( + SimpleStatement.newInstance("CREATE TABLE only_p_k(id uuid PRIMARY KEY)") + .setExecutionProfile(SESSION_RULE.slowProfile())); inventoryMapper = new UpdateIT_InventoryMapperBuilder(session).build(); dao = inventoryMapper.productDao(SESSION_RULE.keyspace()); From 869eb5581dd70cc6a93e7425d556770ee9b19ef4 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 11 Jun 2021 15:24:40 +0200 Subject: [PATCH 726/979] Move test to serial tests --- .../com/datastax/oss/driver/core/PeersV2NodeRefreshIT.java | 3 --- 1 file changed, 3 deletions(-) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/PeersV2NodeRefreshIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/PeersV2NodeRefreshIT.java index bd0191f359a..089c4d4fa53 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/PeersV2NodeRefreshIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/PeersV2NodeRefreshIT.java @@ -20,7 +20,6 @@ import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.metadata.EndPoint; import com.datastax.oss.driver.api.core.metadata.Node; -import com.datastax.oss.driver.categories.ParallelizableTests; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.protocol.internal.request.Query; import com.datastax.oss.simulacron.common.cluster.ClusterSpec; @@ -31,10 +30,8 @@ import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; -import org.junit.experimental.categories.Category; /** Test for JAVA-2654. */ -@Category(ParallelizableTests.class) public class PeersV2NodeRefreshIT { private static Server peersV2Server; From 4d7de91daacae82d9be0673e471dee0926703cc2 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 11 Jun 2021 17:23:32 +0200 Subject: [PATCH 727/979] Fix errorprone error --- .../com/datastax/oss/driver/core/metadata/SchemaChangesIT.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaChangesIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaChangesIT.java index 12d817de091..fc6d1a84788 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaChangesIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaChangesIT.java @@ -31,7 +31,6 @@ import com.datastax.oss.driver.api.core.type.DataTypes; import com.datastax.oss.driver.api.testinfra.ccm.CcmBridge; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; -import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule.Builder; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.google.common.collect.ImmutableList; @@ -52,7 +51,7 @@ public class SchemaChangesIT { static { - Builder builder = CustomCcmRule.builder(); + CustomCcmRule.Builder builder = CustomCcmRule.builder(); if (!CcmBridge.DSE_ENABLEMENT && CcmBridge.VERSION.nextStable().compareTo(Version.V4_0_0) >= 0) { builder.withCassandraConfiguration("enable_materialized_views", true); From 8bdd81f70ab18aaa6360b376abd17b1c5c4b4e30 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Sat, 12 Jun 2021 19:07:57 +0200 Subject: [PATCH 728/979] JAVA-2944: Upgrade MicroProfile Metrics to 3.0 (#1551) --- changelog/README.md | 4 ++++ .../microprofile/MicroProfileMetricUpdater.java | 3 ++- pom.xml | 4 ++-- upgrade_guide/README.md | 15 +++++++++++++++ 4 files changed, 23 insertions(+), 3 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 52d34c9b02a..07a35a9352e 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -2,6 +2,10 @@ +### 4.12.0 (in progress) + +- [improvement] JAVA-2944: Upgrade MicroProfile Metrics to 3.0 + ### 4.11.2 (in progress) - [bug] JAVA-2945: Reinstate InternalDriverContext.getNodeFilter method diff --git a/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileMetricUpdater.java b/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileMetricUpdater.java index ea06e2bff47..3dcf0512702 100644 --- a/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileMetricUpdater.java +++ b/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileMetricUpdater.java @@ -20,6 +20,7 @@ import com.datastax.oss.driver.internal.core.metrics.AbstractMetricUpdater; import com.datastax.oss.driver.internal.core.metrics.MetricId; import edu.umd.cs.findbugs.annotations.Nullable; +import java.time.Duration; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; @@ -75,7 +76,7 @@ public void markMeter(MetricT metric, @Nullable String profileName, long amount) public void updateTimer( MetricT metric, @Nullable String profileName, long duration, TimeUnit unit) { if (isEnabled(metric, profileName)) { - getOrCreateTimerFor(metric).update(duration, unit); + getOrCreateTimerFor(metric).update(Duration.ofNanos(unit.toNanos(duration))); } } diff --git a/pom.xml b/pom.xml index b6602f484a3..72e4194db9a 100644 --- a/pom.xml +++ b/pom.xml @@ -416,12 +416,12 @@ org.eclipse.microprofile.metrics microprofile-metrics-api - 2.3.3 + 3.0 io.smallrye smallrye-metrics - 2.4.6 + 3.0.3 io.projectreactor diff --git a/upgrade_guide/README.md b/upgrade_guide/README.md index 6ada0de2a13..1b92cb972d6 100644 --- a/upgrade_guide/README.md +++ b/upgrade_guide/README.md @@ -1,5 +1,20 @@ ## Upgrade guide +### 4.12.0 + +#### MicroProfile Metrics upgraded to 3.0 + +The MicroProfile Metrics library has been upgraded from version 2.4 to 3.0. Since this upgrade +involves backwards-incompatible binary changes, users of this library and of the +`java-driver-metrics-microprofile` module are required to take the appropriate action: + +* If your application is still using MicroProfile Metrics < 3.0, you can still upgrade the core + driver to 4.12, but you now must keep `java-driver-metrics-microprofile` in version 4.11 or lower, + as newer versions will not work. + +* If your application is using MicroProfile Metrics >= 3.0, then you must upgrade to driver 4.12 or + higher, as previous versions of `java-driver-metrics-microprofile` will not work. + ### 4.11.0 #### Native protocol V5 is now production-ready From 390a01cfd115f1dba237f1b8c44bef3b90b3fb17 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Sat, 12 Jun 2021 19:11:02 +0200 Subject: [PATCH 729/979] JAVA-2935: Make GetEntity and SetEntity methods resilient to incomplete data (#1556) --- changelog/README.md | 1 + .../api/core/cql/BoundStatementBuilder.java | 6 + .../user/CreateUserQueryProvider.java | 4 +- .../killrvideo/user/LoginQueryProvider.java | 2 +- .../video/CreateVideoQueryProvider.java | 2 +- .../oss/driver/mapper/GetEntityIT.java | 111 ++++++++- .../mapper/GuavaFutureProducerService.java | 2 +- .../oss/driver/mapper/ImmutableEntityIT.java | 152 +++++++++++- .../oss/driver/mapper/InventoryITBase.java | 2 + .../oss/driver/mapper/NestedUdtIT.java | 216 ++++++++++++++---- .../oss/driver/mapper/QueryProviderIT.java | 4 +- .../oss/driver/mapper/SetEntityIT.java | 85 ++++++- manual/mapper/daos/getentity/README.md | 48 ++++ manual/mapper/daos/setentity/README.md | 49 ++++ .../dao/DaoGetEntityMethodGenerator.java | 16 +- .../dao/DaoInsertMethodGenerator.java | 2 +- .../dao/DaoSetEntityMethodGenerator.java | 7 +- .../dao/DaoUpdateMethodGenerator.java | 5 +- .../EntityHelperGetMethodGenerator.java | 123 +++++++--- .../EntityHelperSetMethodGenerator.java | 11 +- .../generation/GeneratedCodePatterns.java | 42 ++-- .../mapper/reactive/ReactiveDaoBase.java | 2 +- .../driver/api/mapper/annotations/Entity.java | 5 +- .../api/mapper/annotations/GetEntity.java | 16 +- .../api/mapper/annotations/SetEntity.java | 13 ++ .../api/mapper/entity/EntityHelper.java | 81 +++++-- .../oss/driver/internal/mapper/DaoBase.java | 11 +- .../mapper/entity/EntityHelperBase.java | 47 +++- upgrade_guide/README.md | 20 ++ 29 files changed, 946 insertions(+), 139 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 07a35a9352e..971a108e469 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.12.0 (in progress) +- [improvement] JAVA-2935: Make GetEntity and SetEntity methods resilient to incomplete data - [improvement] JAVA-2944: Upgrade MicroProfile Metrics to 3.0 ### 4.11.2 (in progress) diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/cql/BoundStatementBuilder.java b/core/src/main/java/com/datastax/oss/driver/api/core/cql/BoundStatementBuilder.java index 5cdd07f2a61..090cea49bc6 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/cql/BoundStatementBuilder.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/cql/BoundStatementBuilder.java @@ -99,6 +99,12 @@ public BoundStatementBuilder(@NonNull BoundStatement template) { this.node = template.getNode(); } + /** The prepared statement that was used to create this statement. */ + @NonNull + public PreparedStatement getPreparedStatement() { + return preparedStatement; + } + @NonNull @Override public List allIndicesOf(@NonNull CqlIdentifier id) { diff --git a/examples/src/main/java/com/datastax/oss/driver/examples/mapper/killrvideo/user/CreateUserQueryProvider.java b/examples/src/main/java/com/datastax/oss/driver/examples/mapper/killrvideo/user/CreateUserQueryProvider.java index dd70ce39f95..0200c429e3a 100644 --- a/examples/src/main/java/com/datastax/oss/driver/examples/mapper/killrvideo/user/CreateUserQueryProvider.java +++ b/examples/src/main/java/com/datastax/oss/driver/examples/mapper/killrvideo/user/CreateUserQueryProvider.java @@ -116,14 +116,14 @@ private boolean insertCredentialsIfNotExists(String email, char[] password, UUID UserCredentials credentials = new UserCredentials(Objects.requireNonNull(email), passwordHash, userId); BoundStatementBuilder insertCredentials = preparedInsertCredentials.boundStatementBuilder(); - credentialsHelper.set(credentials, insertCredentials, NullSavingStrategy.DO_NOT_SET); + credentialsHelper.set(credentials, insertCredentials, NullSavingStrategy.DO_NOT_SET, false); ResultSet resultSet = session.execute(insertCredentials.build()); return resultSet.wasApplied(); } private void insertUser(User user) { BoundStatementBuilder insertUser = preparedInsertUser.boundStatementBuilder(); - userHelper.set(user, insertUser, NullSavingStrategy.DO_NOT_SET); + userHelper.set(user, insertUser, NullSavingStrategy.DO_NOT_SET, false); session.execute(insertUser.build()); } } diff --git a/examples/src/main/java/com/datastax/oss/driver/examples/mapper/killrvideo/user/LoginQueryProvider.java b/examples/src/main/java/com/datastax/oss/driver/examples/mapper/killrvideo/user/LoginQueryProvider.java index 7b968a65bc2..3790ea7dc68 100644 --- a/examples/src/main/java/com/datastax/oss/driver/examples/mapper/killrvideo/user/LoginQueryProvider.java +++ b/examples/src/main/java/com/datastax/oss/driver/examples/mapper/killrvideo/user/LoginQueryProvider.java @@ -62,7 +62,7 @@ Optional login(String email, char[] password) { throw new IllegalStateException( "Should have found matching row for userid " + userid); } else { - return Optional.of(userHelper.get(userRow)); + return Optional.of(userHelper.get(userRow, false)); } } else { return Optional.empty(); diff --git a/examples/src/main/java/com/datastax/oss/driver/examples/mapper/killrvideo/video/CreateVideoQueryProvider.java b/examples/src/main/java/com/datastax/oss/driver/examples/mapper/killrvideo/video/CreateVideoQueryProvider.java index 6ec1c7b1aaf..cb02f70d046 100644 --- a/examples/src/main/java/com/datastax/oss/driver/examples/mapper/killrvideo/video/CreateVideoQueryProvider.java +++ b/examples/src/main/java/com/datastax/oss/driver/examples/mapper/killrvideo/video/CreateVideoQueryProvider.java @@ -95,7 +95,7 @@ private static PreparedStatement prepareInsert( private static BoundStatement bind( PreparedStatement preparedStatement, T entity, EntityHelper entityHelper) { BoundStatementBuilder boundStatement = preparedStatement.boundStatementBuilder(); - entityHelper.set(entity, boundStatement, NullSavingStrategy.DO_NOT_SET); + entityHelper.set(entity, boundStatement, NullSavingStrategy.DO_NOT_SET, false); return boundStatement.build(); } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/GetEntityIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/GetEntityIT.java index 2ca29a688a9..643284fb225 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/GetEntityIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/GetEntityIT.java @@ -16,6 +16,7 @@ package com.datastax.oss.driver.mapper; import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.catchThrowable; import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.CqlSession; @@ -25,6 +26,8 @@ import com.datastax.oss.driver.api.core.cql.ResultSet; import com.datastax.oss.driver.api.core.cql.Row; import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.core.data.UdtValue; +import com.datastax.oss.driver.api.core.type.UserDefinedType; import com.datastax.oss.driver.api.mapper.annotations.Dao; import com.datastax.oss.driver.api.mapper.annotations.DaoFactory; import com.datastax.oss.driver.api.mapper.annotations.DaoKeyspace; @@ -38,6 +41,7 @@ import com.datastax.oss.driver.categories.ParallelizableTests; import com.datastax.oss.driver.internal.core.util.concurrent.CompletableFutures; import com.datastax.oss.driver.shaded.guava.common.collect.Sets; +import java.util.UUID; import java.util.stream.Stream; import org.junit.BeforeClass; import org.junit.ClassRule; @@ -56,6 +60,8 @@ public class GetEntityIT extends InventoryITBase { @ClassRule public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); + private static final UUID PRODUCT_2D_ID = UUID.randomUUID(); + private static ProductDao dao; @BeforeClass @@ -67,6 +73,18 @@ public static void setup() { SimpleStatement.builder(query).setExecutionProfile(SESSION_RULE.slowProfile()).build()); } + UserDefinedType dimensions2d = + session + .getKeyspace() + .flatMap(ks -> session.getMetadata().getKeyspace(ks)) + .flatMap(ks -> ks.getUserDefinedType("dimensions2d")) + .orElseThrow(AssertionError::new); + session.execute( + "INSERT INTO product2d (id, description, dimensions) VALUES (?, ?, ?)", + PRODUCT_2D_ID, + "2D product", + dimensions2d.newValue(12, 34)); + InventoryMapper inventoryMapper = new GetEntityIT_InventoryMapperBuilder(session).build(); dao = inventoryMapper.productDao(SESSION_RULE.keyspace()); @@ -75,12 +93,13 @@ public static void setup() { } @Test - public void should_get_entity_from_row() { + public void should_get_entity_from_complete_row() { CqlSession session = SESSION_RULE.session(); ResultSet rs = session.execute( SimpleStatement.newInstance( - "SELECT * FROM product WHERE id = ?", FLAMETHROWER.getId())); + "SELECT id, description, dimensions, now() FROM product WHERE id = ?", + FLAMETHROWER.getId())); Row row = rs.one(); assertThat(row).isNotNull(); @@ -88,6 +107,84 @@ public void should_get_entity_from_row() { assertThat(product).isEqualTo(FLAMETHROWER); } + @Test + public void should_not_get_entity_from_partial_row_when_not_lenient() { + CqlSession session = SESSION_RULE.session(); + ResultSet rs = + session.execute( + SimpleStatement.newInstance( + "SELECT id, description, now() FROM product WHERE id = ?", FLAMETHROWER.getId())); + Row row = rs.one(); + assertThat(row).isNotNull(); + + Throwable error = catchThrowable(() -> dao.get(row)); + assertThat(error).hasMessage("dimensions is not a column in this row"); + } + + @Test + public void should_get_entity_from_partial_row_when_lenient() { + CqlSession session = SESSION_RULE.session(); + ResultSet rs = + session.execute( + SimpleStatement.newInstance( + "SELECT id, dimensions FROM product2d WHERE id = ?", PRODUCT_2D_ID)); + Row row = rs.one(); + assertThat(row).isNotNull(); + + Product product = dao.getLenient(row); + assertThat(product.getId()).isEqualTo(PRODUCT_2D_ID); + assertThat(product.getDescription()).isNull(); + assertThat(product.getDimensions()).isNotNull(); + assertThat(product.getDimensions().getWidth()).isEqualTo(12); + assertThat(product.getDimensions().getHeight()).isEqualTo(34); + assertThat(product.getDimensions().getLength()).isZero(); + } + + @Test + public void should_get_entity_from_complete_udt_value() { + CqlSession session = SESSION_RULE.session(); + ResultSet rs = + session.execute( + SimpleStatement.newInstance( + "SELECT dimensions FROM product WHERE id = ?", FLAMETHROWER.getId())); + Row row = rs.one(); + assertThat(row).isNotNull(); + + Dimensions dimensions = dao.get(row.getUdtValue(0)); + assertThat(dimensions).isEqualTo(FLAMETHROWER.getDimensions()); + } + + @Test + public void should_not_get_entity_from_partial_udt_value_when_not_lenient() { + CqlSession session = SESSION_RULE.session(); + ResultSet rs = + session.execute( + SimpleStatement.newInstance( + "SELECT dimensions FROM product2d WHERE id = ?", PRODUCT_2D_ID)); + Row row = rs.one(); + assertThat(row).isNotNull(); + + Throwable error = catchThrowable(() -> dao.get(row.getUdtValue(0))); + assertThat(error).hasMessage("length is not a field in this UDT"); + } + + @Test + public void should_get_entity_from_partial_udt_value_when_lenient() { + CqlSession session = SESSION_RULE.session(); + ResultSet rs = + session.execute( + SimpleStatement.newInstance( + "SELECT dimensions FROM product2d WHERE id = ?", PRODUCT_2D_ID)); + Row row = rs.one(); + assertThat(row).isNotNull(); + + Dimensions dimensions = dao.getLenient(row.getUdtValue(0)); + assertThat(dimensions).isNotNull(); + assertThat(dimensions.getWidth()).isEqualTo(12); + assertThat(dimensions.getHeight()).isEqualTo(34); + assertThat(dimensions.getLength()).isZero(); + } + @Test public void should_get_entity_from_first_row_of_result_set() { CqlSession session = SESSION_RULE.session(); @@ -144,9 +241,19 @@ public interface InventoryMapper { @Dao @DefaultNullSavingStrategy(NullSavingStrategy.SET_TO_NULL) public interface ProductDao { + @GetEntity Product get(Row row); + @GetEntity(lenient = true) + Product getLenient(Row row); + + @GetEntity + Dimensions get(UdtValue row); + + @GetEntity(lenient = true) + Dimensions getLenient(UdtValue row); + @GetEntity PagingIterable get(ResultSet resultSet); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/GuavaFutureProducerService.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/GuavaFutureProducerService.java index d1a44428aba..bebb2adeaa1 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/GuavaFutureProducerService.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/GuavaFutureProducerService.java @@ -106,7 +106,7 @@ protected Object convert( @NonNull AsyncResultSet resultSet, @Nullable EntityHelper entityHelper) { assert entityHelper != null; Row row = resultSet.one(); - return (row == null) ? null : entityHelper.get(row); + return (row == null) ? null : entityHelper.get(row, false); } } } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/ImmutableEntityIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/ImmutableEntityIT.java index 9ed1666f848..cfbb5b67e67 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/ImmutableEntityIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/ImmutableEntityIT.java @@ -20,7 +20,10 @@ import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.cql.Row; import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.core.data.UdtValue; +import com.datastax.oss.driver.api.core.type.UserDefinedType; import com.datastax.oss.driver.api.mapper.MapperBuilder; import com.datastax.oss.driver.api.mapper.annotations.Computed; import com.datastax.oss.driver.api.mapper.annotations.CqlName; @@ -29,6 +32,7 @@ import com.datastax.oss.driver.api.mapper.annotations.DaoKeyspace; import com.datastax.oss.driver.api.mapper.annotations.DefaultNullSavingStrategy; import com.datastax.oss.driver.api.mapper.annotations.Entity; +import com.datastax.oss.driver.api.mapper.annotations.GetEntity; import com.datastax.oss.driver.api.mapper.annotations.Insert; import com.datastax.oss.driver.api.mapper.annotations.Mapper; import com.datastax.oss.driver.api.mapper.annotations.PartitionKey; @@ -56,6 +60,8 @@ public class ImmutableEntityIT extends InventoryITBase { @ClassRule public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); + private static final UUID PRODUCT_2D_ID = UUID.randomUUID(); + private static ImmutableProductDao dao; @BeforeClass @@ -67,6 +73,18 @@ public static void setup() { SimpleStatement.builder(query).setExecutionProfile(SESSION_RULE.slowProfile()).build()); } + UserDefinedType dimensions2d = + session + .getKeyspace() + .flatMap(ks -> session.getMetadata().getKeyspace(ks)) + .flatMap(ks -> ks.getUserDefinedType("dimensions2d")) + .orElseThrow(AssertionError::new); + session.execute( + "INSERT INTO product2d (id, description, dimensions) VALUES (?, ?, ?)", + PRODUCT_2D_ID, + "2D product", + dimensions2d.newValue(12, 34)); + InventoryMapper mapper = InventoryMapper.builder(session).build(); dao = mapper.immutableProductDao(SESSION_RULE.keyspace()); } @@ -74,25 +92,90 @@ public static void setup() { @Test public void should_insert_and_retrieve_immutable_entities() { ImmutableProduct originalProduct = - new ImmutableProduct(UUID.randomUUID(), "mock description", new Dimensions(1, 2, 3), -1); + new ImmutableProduct( + UUID.randomUUID(), "mock description", new ImmutableDimensions(1, 2, 3), -1); dao.save(originalProduct); ImmutableProduct retrievedProduct = dao.findById(originalProduct.id()); assertThat(retrievedProduct).isEqualTo(originalProduct); } + @Test + public void should_map_immutable_entity_from_complete_row() { + ImmutableProduct originalProduct = + new ImmutableProduct( + UUID.randomUUID(), "mock description", new ImmutableDimensions(1, 2, 3), -1); + dao.save(originalProduct); + Row row = + SESSION_RULE + .session() + .execute( + "SELECT id, description, dimensions, writetime(description) AS writetime, now() " + + "FROM product WHERE id = ?", + originalProduct.id()) + .one(); + ImmutableProduct retrievedProduct = dao.mapStrict(row); + assertThat(retrievedProduct.id()).isEqualTo(originalProduct.id()); + assertThat(retrievedProduct.description()).isEqualTo(originalProduct.description()); + assertThat(retrievedProduct.dimensions()).isEqualTo(originalProduct.dimensions()); + assertThat(retrievedProduct.writetime()).isGreaterThan(0); + } + + @Test + public void should_map_immutable_entity_from_partial_row_when_lenient() { + Row row = + SESSION_RULE + .session() + .execute("SELECT id, dimensions FROM product2d WHERE id = ?", PRODUCT_2D_ID) + .one(); + ImmutableProduct retrievedProduct = dao.mapLenient(row); + assertThat(retrievedProduct.id()).isEqualTo(PRODUCT_2D_ID); + assertThat(retrievedProduct.dimensions()).isEqualTo(new ImmutableDimensions(0, 12, 34)); + assertThat(retrievedProduct.description()).isNull(); + assertThat(retrievedProduct.writetime()).isZero(); + } + + @Test + public void should_map_immutable_entity_from_complete_udt() { + ImmutableProduct originalProduct = + new ImmutableProduct( + UUID.randomUUID(), "mock description", new ImmutableDimensions(1, 2, 3), -1); + dao.save(originalProduct); + Row row = + SESSION_RULE + .session() + .execute("SELECT dimensions FROM product WHERE id = ?", originalProduct.id()) + .one(); + assertThat(row).isNotNull(); + ImmutableDimensions retrievedDimensions = dao.mapStrict(row.getUdtValue(0)); + assertThat(retrievedDimensions).isEqualTo(originalProduct.dimensions()); + } + + @Test + public void should_map_immutable_entity_from_partial_udt_when_lenient() { + Row row = + SESSION_RULE + .session() + .execute("SELECT dimensions FROM product2d WHERE id = ?", PRODUCT_2D_ID) + .one(); + assertThat(row).isNotNull(); + ImmutableDimensions retrievedDimensions = dao.mapLenient(row.getUdtValue(0)); + assertThat(retrievedDimensions).isEqualTo(new ImmutableDimensions(0, 12, 34)); + } + @Entity @CqlName("product") @PropertyStrategy(getterStyle = FLUENT, mutable = false) public static class ImmutableProduct { @PartitionKey private final UUID id; private final String description; - private final Dimensions dimensions; + private final ImmutableDimensions dimensions; @Computed("writetime(description)") private final long writetime; - public ImmutableProduct(UUID id, String description, Dimensions dimensions, long writetime) { + public ImmutableProduct( + UUID id, String description, ImmutableDimensions dimensions, long writetime) { this.id = id; this.description = description; this.dimensions = dimensions; @@ -107,7 +190,7 @@ public String description() { return description; } - public Dimensions dimensions() { + public ImmutableDimensions dimensions() { return dimensions; } @@ -135,6 +218,55 @@ public int hashCode() { } } + @Entity + @PropertyStrategy(mutable = false) + public static class ImmutableDimensions { + + private final int length; + private final int width; + private final int height; + + public ImmutableDimensions(int length, int width, int height) { + this.length = length; + this.width = width; + this.height = height; + } + + public int getLength() { + return length; + } + + public int getWidth() { + return width; + } + + public int getHeight() { + return height; + } + + @Override + public boolean equals(Object other) { + if (this == other) { + return true; + } else if (other instanceof ImmutableDimensions) { + ImmutableDimensions that = (ImmutableDimensions) other; + return this.length == that.length && this.width == that.width && this.height == that.height; + } else { + return false; + } + } + + @Override + public int hashCode() { + return Objects.hash(length, width, height); + } + + @Override + public String toString() { + return "Dimensions{length=" + length + ", width=" + width + ", height=" + height + '}'; + } + } + @Mapper public interface InventoryMapper { static MapperBuilder builder(CqlSession session) { @@ -153,5 +285,17 @@ public interface ImmutableProductDao { @Insert void save(ImmutableProduct product); + + @GetEntity + ImmutableProduct mapStrict(Row row); + + @GetEntity(lenient = true) + ImmutableProduct mapLenient(Row row); + + @GetEntity + ImmutableDimensions mapStrict(UdtValue udt); + + @GetEntity(lenient = true) + ImmutableDimensions mapLenient(UdtValue udt); } } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/InventoryITBase.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/InventoryITBase.java index 2b094c4dbe8..3a0435c5da1 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/InventoryITBase.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/InventoryITBase.java @@ -62,6 +62,8 @@ protected static List createStatements(CcmRule ccmRule) { .add( "CREATE TYPE dimensions(length int, width int, height int)", "CREATE TABLE product(id uuid PRIMARY KEY, description text, dimensions frozen)", + "CREATE TYPE dimensions2d(width int, height int)", + "CREATE TABLE product2d(id uuid PRIMARY KEY, description text, dimensions frozen)", "CREATE TABLE product_without_id(id uuid, clustering int, description text, " + "PRIMARY KEY((id), clustering))", "CREATE TABLE product_sale(id uuid, day text, ts uuid, customer_id int, price " diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/NestedUdtIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/NestedUdtIT.java index 6bcbde6ffff..b7b8742e53c 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/NestedUdtIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/NestedUdtIT.java @@ -16,11 +16,17 @@ package com.datastax.oss.driver.mapper; import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.catchThrowable; import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.cql.BoundStatementBuilder; +import com.datastax.oss.driver.api.core.cql.PreparedStatement; +import com.datastax.oss.driver.api.core.cql.ResultSet; +import com.datastax.oss.driver.api.core.cql.Row; import com.datastax.oss.driver.api.core.cql.SimpleStatement; -import com.datastax.oss.driver.api.core.data.GettableByName; +import com.datastax.oss.driver.api.core.data.UdtValue; +import com.datastax.oss.driver.api.core.type.UserDefinedType; import com.datastax.oss.driver.api.mapper.annotations.Dao; import com.datastax.oss.driver.api.mapper.annotations.DaoFactory; import com.datastax.oss.driver.api.mapper.annotations.DaoKeyspace; @@ -30,6 +36,7 @@ import com.datastax.oss.driver.api.mapper.annotations.Mapper; import com.datastax.oss.driver.api.mapper.annotations.PartitionKey; import com.datastax.oss.driver.api.mapper.annotations.Select; +import com.datastax.oss.driver.api.mapper.annotations.SetEntity; import com.datastax.oss.driver.api.mapper.entity.saving.NullSavingStrategy; import com.datastax.oss.driver.api.testinfra.CassandraRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; @@ -43,6 +50,7 @@ import java.util.Objects; import java.util.Set; import java.util.UUID; +import org.assertj.core.util.Lists; import org.junit.Before; import org.junit.BeforeClass; import org.junit.ClassRule; @@ -63,21 +71,24 @@ public class NestedUdtIT { @ClassRule public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); - private static UUID CONTAINER_ID = UUID.randomUUID(); + private static final UUID CONTAINER_ID = UUID.randomUUID(); + private static final Container SAMPLE_CONTAINER = new Container( CONTAINER_ID, - ImmutableList.of(new Type1("a"), new Type1("b")), + ImmutableList.of(new Type1("a1", "a2"), new Type1("b1", "b2")), ImmutableMap.of( "cd", - ImmutableList.of(new Type1("c"), new Type1("d")), + ImmutableList.of(new Type1("c1", "c2"), new Type1("d1", "d2")), "ef", - ImmutableList.of(new Type1("e"), new Type1("f"))), + ImmutableList.of(new Type1("e1", "e2"), new Type1("f1", "f2"))), ImmutableMap.of( - new Type1("12"), - ImmutableSet.of(ImmutableList.of(new Type2(1)), ImmutableList.of(new Type2(2)))), + new Type1("12", "34"), + ImmutableSet.of( + ImmutableList.of(new Type2(1, 2)), ImmutableList.of(new Type2(3, 4)))), ImmutableMap.of( - new Type1("12"), ImmutableMap.of("12", ImmutableSet.of(new Type2(1), new Type2(2))))); + new Type1("12", "34"), + ImmutableMap.of("12", ImmutableSet.of(new Type2(1, 2), new Type2(3, 4))))); private static final Container SAMPLE_CONTAINER_NULL_LIST = new Container( @@ -85,14 +96,16 @@ public class NestedUdtIT { null, ImmutableMap.of( "cd", - ImmutableList.of(new Type1("c"), new Type1("d")), + ImmutableList.of(new Type1("c1", "c2"), new Type1("d1", "d2")), "ef", - ImmutableList.of(new Type1("e"), new Type1("f"))), + ImmutableList.of(new Type1("e1", "e2"), new Type1("f1", "f2"))), ImmutableMap.of( - new Type1("12"), - ImmutableSet.of(ImmutableList.of(new Type2(1)), ImmutableList.of(new Type2(2)))), + new Type1("12", "34"), + ImmutableSet.of( + ImmutableList.of(new Type2(1, 2)), ImmutableList.of(new Type2(3, 4)))), ImmutableMap.of( - new Type1("12"), ImmutableMap.of("12", ImmutableSet.of(new Type2(1), new Type2(2))))); + new Type1("12", "34"), + ImmutableMap.of("12", ImmutableSet.of(new Type2(1, 2), new Type2(3, 4))))); private static ContainerDao containerDao; @@ -102,18 +115,39 @@ public static void setup() { for (String query : ImmutableList.of( - "CREATE TYPE type1(s text)", - "CREATE TYPE type2(i int)", + "CREATE TYPE type1(s1 text, s2 text)", + "CREATE TYPE type2(i1 int, i2 int)", + "CREATE TYPE type1_partial(s1 text)", + "CREATE TYPE type2_partial(i1 int)", "CREATE TABLE container(id uuid PRIMARY KEY, " + "list frozen>, " + "map1 frozen>>, " + "map2 frozen>>>," + "map3 frozen>>>" + + ")", + "CREATE TABLE container_partial(id uuid PRIMARY KEY, " + + "list frozen>, " + + "map1 frozen>>, " + + "map2 frozen>>>," + + "map3 frozen>>>" + ")")) { session.execute( SimpleStatement.builder(query).setExecutionProfile(SESSION_RULE.slowProfile()).build()); } + UserDefinedType type1Partial = + session + .getKeyspace() + .flatMap(ks -> session.getMetadata().getKeyspace(ks)) + .flatMap(ks -> ks.getUserDefinedType("type1_partial")) + .orElseThrow(AssertionError::new); + + session.execute( + SimpleStatement.newInstance( + "INSERT INTO container_partial (id, list) VALUES (?, ?)", + SAMPLE_CONTAINER.getId(), + Lists.newArrayList(type1Partial.newValue("a"), type1Partial.newValue("b")))); + UdtsMapper udtsMapper = new NestedUdtIT_UdtsMapperBuilder(session).build(); containerDao = udtsMapper.containerDao(SESSION_RULE.keyspace()); } @@ -165,6 +199,71 @@ public void should_insert_set_to_null_udts() { assertThat(retrievedEntitySecond.list).isEmpty(); } + @Test + public void should_get_entity_from_complete_row() { + CqlSession session = SESSION_RULE.session(); + containerDao.save(SAMPLE_CONTAINER); + ResultSet rs = + session.execute( + SimpleStatement.newInstance( + "SELECT * FROM container WHERE id = ?", SAMPLE_CONTAINER.getId())); + Row row = rs.one(); + assertThat(row).isNotNull(); + Container actual = containerDao.get(row); + assertThat(actual).isEqualTo(SAMPLE_CONTAINER); + } + + @Test + public void should_not_get_entity_from_partial_row_when_not_lenient() { + CqlSession session = SESSION_RULE.session(); + containerDao.save(SAMPLE_CONTAINER); + ResultSet rs = + session.execute( + SimpleStatement.newInstance( + "SELECT id FROM container WHERE id = ?", SAMPLE_CONTAINER.getId())); + Row row = rs.one(); + assertThat(row).isNotNull(); + Throwable error = catchThrowable(() -> containerDao.get(row)); + assertThat(error).hasMessage("list is not a column in this row"); + } + + @Test + public void should_get_entity_from_partial_row_when_lenient() { + CqlSession session = SESSION_RULE.session(); + ResultSet rs = + session.execute( + SimpleStatement.newInstance( + "SELECT id, list FROM container_partial WHERE id = ?", SAMPLE_CONTAINER.getId())); + Row row = rs.one(); + assertThat(row).isNotNull(); + Container actual = containerDao.getLenient(row); + assertThat(actual.getId()).isEqualTo(SAMPLE_CONTAINER.getId()); + assertThat(actual.getList()).containsExactly(new Type1("a", null), new Type1("b", null)); + assertThat(actual.getMap1()).isNull(); + assertThat(actual.getMap2()).isNull(); + assertThat(actual.getMap3()).isNull(); + } + + @Test + public void should_set_entity_on_partial_statement_builder_when_lenient() { + CqlSession session = SESSION_RULE.session(); + PreparedStatement ps = + session.prepare("INSERT INTO container_partial (id, list) VALUES (?, ?)"); + BoundStatementBuilder builder = ps.boundStatementBuilder(); + containerDao.setLenient(SAMPLE_CONTAINER, builder); + assertThat(builder.getUuid(0)).isEqualTo(SAMPLE_CONTAINER.getId()); + assertThat(builder.getList(1, UdtValue.class)).hasSize(2); + } + + @Test + public void should_not_set_entity_on_partial_statement_builder_when_not_lenient() { + CqlSession session = SESSION_RULE.session(); + PreparedStatement ps = session.prepare("INSERT INTO container (id, list) VALUES (?, ?)"); + Throwable error = + catchThrowable(() -> containerDao.set(SAMPLE_CONTAINER, ps.boundStatementBuilder())); + assertThat(error).hasMessage("map1 is not a variable in this bound statement"); + } + @Mapper public interface UdtsMapper { @DaoFactory @@ -187,7 +286,16 @@ public interface ContainerDao { void saveSetToNull(Container container); @GetEntity - Container get(GettableByName source); + Container get(Row source); + + @GetEntity(lenient = true) + Container getLenient(Row source); + + @SetEntity + void set(Container container, BoundStatementBuilder target); + + @SetEntity(lenient = true) + void setLenient(Container container, BoundStatementBuilder target); } @Entity @@ -278,73 +386,93 @@ public int hashCode() { @Entity public static class Type1 { - private String s; + private String s1; + private String s2; public Type1() {} - public Type1(String s) { - this.s = s; + public Type1(String s1, String s2) { + this.s1 = s1; + this.s2 = s2; } - public String getS() { - return s; + public String getS1() { + return s1; } - public void setS(String s) { - this.s = s; + public void setS1(String s1) { + this.s1 = s1; + } + + public String getS2() { + return s2; + } + + public void setS2(String s2) { + this.s2 = s2; } @Override - public boolean equals(Object other) { - if (other == this) { + public boolean equals(Object o) { + if (this == o) { return true; - } else if (other instanceof Type1) { - Type1 that = (Type1) other; - return Objects.equals(this.s, that.s); - } else { + } + if (!(o instanceof Type1)) { return false; } + Type1 type1 = (Type1) o; + return Objects.equals(s1, type1.s1) && Objects.equals(s2, type1.s2); } @Override public int hashCode() { - return s == null ? 0 : s.hashCode(); + return Objects.hash(s1, s2); } } @Entity public static class Type2 { - private int i; + private int i1; + private int i2; public Type2() {} - public Type2(int i) { - this.i = i; + public Type2(int i1, int i2) { + this.i1 = i1; + this.i2 = i2; } - public int getI() { - return i; + public int getI1() { + return i1; } - public void setI(int i) { - this.i = i; + public void setI1(int i1) { + this.i1 = i1; + } + + public int getI2() { + return i2; + } + + public void setI2(int i2) { + this.i2 = i2; } @Override - public boolean equals(Object other) { - if (other == this) { + public boolean equals(Object o) { + if (this == o) { return true; - } else if (other instanceof Type2) { - Type2 that = (Type2) other; - return this.i == that.i; - } else { + } + if (!(o instanceof Type2)) { return false; } + Type2 type2 = (Type2) o; + return i1 == type2.i1 && i2 == type2.i2; } @Override public int hashCode() { - return i; + return Objects.hash(i1, i2); } } } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/QueryProviderIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/QueryProviderIT.java index 1fb92637b22..3fc54d2826d 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/QueryProviderIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/QueryProviderIT.java @@ -168,7 +168,9 @@ public PagingIterable findSlice(int id, Integer month, Integer da boundStatementBuilder = boundStatementBuilder.setInt("day", day); } } - return session.execute(boundStatementBuilder.build()).map(sensorReadingHelper::get); + return session + .execute(boundStatementBuilder.build()) + .map(row -> sensorReadingHelper.get(row, false)); } } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SetEntityIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SetEntityIT.java index 32be286325f..f6fe6c6e25d 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SetEntityIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SetEntityIT.java @@ -16,6 +16,7 @@ package com.datastax.oss.driver.mapper; import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.catchThrowable; import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.CqlSession; @@ -52,8 +53,7 @@ public class SetEntityIT extends InventoryITBase { public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); private static ProductDao dao; - - private static InventoryMapper inventoryMapper; + private static UserDefinedType dimensions2d; @BeforeClass public static void setup() { @@ -64,8 +64,14 @@ public static void setup() { SimpleStatement.builder(query).setExecutionProfile(SESSION_RULE.slowProfile()).build()); } - inventoryMapper = new SetEntityIT_InventoryMapperBuilder(session).build(); + InventoryMapper inventoryMapper = new SetEntityIT_InventoryMapperBuilder(session).build(); dao = inventoryMapper.productDao(SESSION_RULE.keyspace()); + dimensions2d = + session + .getKeyspace() + .flatMap(ks -> session.getMetadata().getKeyspace(ks)) + .flatMap(ks -> ks.getUserDefinedType("dimensions2d")) + .orElseThrow(AssertionError::new); } @Test @@ -144,10 +150,74 @@ public void should_set_entity_on_udt_value() { assertThat(udtValue.getInt("height")).isEqualTo(dimensions.getHeight()); } + @Test + public void should_set_entity_on_partial_statement_when_lenient() { + CqlSession session = SESSION_RULE.session(); + PreparedStatement ps = session.prepare("INSERT INTO product (id, description) VALUES (?, ?)"); + BoundStatement bound = dao.setLenient(FLAMETHROWER, ps.bind()); + assertThat(bound.getUuid(0)).isEqualTo(FLAMETHROWER.getId()); + assertThat(bound.getString(1)).isEqualTo(FLAMETHROWER.getDescription()); + } + + @Test + public void should_set_entity_on_partial_statement_builder_when_lenient() { + CqlSession session = SESSION_RULE.session(); + PreparedStatement ps = session.prepare("INSERT INTO product (id, description) VALUES (?, ?)"); + BoundStatementBuilder builder = ps.boundStatementBuilder(); + dao.setLenient(FLAMETHROWER, builder); + assertThat(builder.getUuid(0)).isEqualTo(FLAMETHROWER.getId()); + assertThat(builder.getString(1)).isEqualTo(FLAMETHROWER.getDescription()); + } + + @Test + @SuppressWarnings("ResultOfMethodCallIgnored") + public void should_set_entity_on_partial_udt_when_lenient() { + CqlSession session = SESSION_RULE.session(); + PreparedStatement ps = session.prepare("INSERT INTO product2d (id, dimensions) VALUES (?, ?)"); + BoundStatementBuilder builder = ps.boundStatementBuilder(); + builder.setUuid(0, FLAMETHROWER.getId()); + UdtValue dimensionsUdt = dimensions2d.newValue(); + Dimensions dimensions = new Dimensions(12, 34, 56); + dao.setLenient(dimensions, dimensionsUdt); + builder.setUdtValue(1, dimensionsUdt); + assertThat(dimensionsUdt.getInt("width")).isEqualTo(34); + assertThat(dimensionsUdt.getInt("height")).isEqualTo(56); + } + + @Test + public void should_not_set_entity_on_partial_statement_when_not_lenient() { + CqlSession session = SESSION_RULE.session(); + PreparedStatement ps = session.prepare("INSERT INTO product (id, description) VALUES (?, ?)"); + Throwable error = catchThrowable(() -> dao.set(FLAMETHROWER, ps.bind())); + assertThat(error).hasMessage("dimensions is not a variable in this bound statement"); + } + + @Test + public void should_not_set_entity_on_partial_statement_builder_when_not_lenient() { + CqlSession session = SESSION_RULE.session(); + PreparedStatement ps = session.prepare("INSERT INTO product (id, description) VALUES (?, ?)"); + Throwable error = catchThrowable(() -> dao.set(ps.boundStatementBuilder(), FLAMETHROWER)); + assertThat(error).hasMessage("dimensions is not a variable in this bound statement"); + } + + @Test + @SuppressWarnings("ResultOfMethodCallIgnored") + public void should_not_set_entity_on_partial_udt_when_not_lenient() { + CqlSession session = SESSION_RULE.session(); + PreparedStatement ps = session.prepare("INSERT INTO product2d (id, dimensions) VALUES (?, ?)"); + BoundStatementBuilder builder = ps.boundStatementBuilder(); + builder.setUuid(0, FLAMETHROWER.getId()); + UdtValue dimensionsUdt = dimensions2d.newValue(); + Dimensions dimensions = new Dimensions(12, 34, 56); + Throwable error = catchThrowable(() -> dao.set(dimensions, dimensionsUdt)); + assertThat(error).hasMessage("length is not a field in this UDT"); + } + private static void assertMatches(GettableByName data, Product entity) { assertThat(data.getUuid("id")).isEqualTo(entity.getId()); assertThat(data.getString("description")).isEqualTo(entity.getDescription()); UdtValue udtValue = data.getUdtValue("dimensions"); + assertThat(udtValue).isNotNull(); assertThat(udtValue.getType().getName().asInternal()).isEqualTo("dimensions"); assertThat(udtValue.getInt("length")).isEqualTo(entity.getDimensions().getLength()); assertThat(udtValue.getInt("width")).isEqualTo(entity.getDimensions().getWidth()); @@ -177,5 +247,14 @@ public interface ProductDao { @SetEntity void set(Dimensions dimensions, UdtValue udtValue); + + @SetEntity(lenient = true) + BoundStatement setLenient(Product product, BoundStatement boundStatement); + + @SetEntity(lenient = true) + void setLenient(Product product, BoundStatementBuilder builder); + + @SetEntity(lenient = true) + void setLenient(Dimensions dimensions, UdtValue udtValue); } } diff --git a/manual/mapper/daos/getentity/README.md b/manual/mapper/daos/getentity/README.md index 207f84e136d..e495f964557 100644 --- a/manual/mapper/daos/getentity/README.md +++ b/manual/mapper/daos/getentity/README.md @@ -23,6 +23,54 @@ product.setDescription(row.get("description", String.class)); It does not perform a query. Instead, those methods are intended for cases where you already have a query result, and just need the conversion logic. +### Lenient mode + +By default, the mapper operates in "strict" mode: the source row must contain a matching column for +every property in the entity definition, *including computed ones*. If such a column is not found, +an error will be thrown. + +Starting with driver 4.12.0, the `@GetEntity` annotation has a new `lenient` attribute. If this +attribute is explicitly set to `true`, the mapper will operate in "lenient" mode: all entity +properties that have a matching column in the source row will be set. However, *unmatched properties +will be left untouched*. + +As an example to illustrate how lenient mode works, assume that we have the following entity and +DAO: + +```java +@Entity class Product { + + @PartitionKey int id; + String description; + float price; + // other members omitted +} + +interface ProductDao { + + @GetEntity(lenient = true) + Product getLenient(Row row); + +} +``` + +Then the following code would be possible: + +```java +// row does not contain the price column +Row row = session.execute("SELECT id, description FROM product").one(); +Product product = productDao.getLenient(row); +assert product.price == 0.0; +``` + +Since no `price` column was found in the source row, `product.price` wasn't set and was left to its +default value (0.0). Without lenient mode, the code above would throw an error instead. + +Lenient mode allows to achieve the equivalent of driver 3.x [manual mapping +feature](https://docs.datastax.com/en/developer/java-driver/3.10/manual/object_mapper/using/#manual-mapping). + +**Beware that lenient mode may result in incomplete entities being produced.** + ### Parameters The method must have a single parameter. The following types are allowed: diff --git a/manual/mapper/daos/setentity/README.md b/manual/mapper/daos/setentity/README.md index 54925fac574..3d887b34a91 100644 --- a/manual/mapper/daos/setentity/README.md +++ b/manual/mapper/daos/setentity/README.md @@ -21,6 +21,55 @@ boundStatement = boundStatement.set("description", product.getDescription(), Str It does not perform a query. Instead, those methods are intended for cases where you will execute the query yourself, and just need the conversion logic. +### Lenient mode + +By default, the mapper operates in "strict" mode: the target statement must contain a matching +column for every property in the entity definition, *except computed ones*. If such a column is not +found, an error will be thrown. + +Starting with driver 4.12.0, the `@SetEntity` annotation has a new `lenient` attribute. If this +attribute is explicitly set to `true`, the mapper will operate in "lenient" mode: all entity +properties that have a matching column in the target statement will be set. However, *unmatched +properties will be left untouched*. + +As an example to illustrate how lenient mode works, assume that we have the following entity and +DAO: + +```java +@Entity class Product { + + @PartitionKey int id; + String description; + float price; + // other members omitted +} + +interface ProductDao { + + @SetEntity(lenient = true) + BoundStatement setLenient(Product product, BoundStatement stmt); + +} +``` + +Then the following code would be possible: + +```java +Product product = new Product(1, "scented candle", 12.99); +// stmt does not contain the price column +BoundStatement stmt = session.prepare("INSERT INTO product (id, description) VALUES (?, ?)").bind(); +stmt = productDao.setLenient(product, stmt); +``` + +Since no `price` column was found in the target statement, `product.price` wasn't read (if the +statement is executed, the resulting row in the database will have a price of zero). Without lenient +mode, the code above would throw an error instead. + +Lenient mode allows to achieve the equivalent of driver 3.x [manual mapping +feature](https://docs.datastax.com/en/developer/java-driver/3.10/manual/object_mapper/using/#manual-mapping). + +**Beware that lenient mode may result in incomplete rows being inserted in the database.** + ### Parameters The method must have two parameters: one is the entity instance, the other must be a subtype of diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoGetEntityMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoGetEntityMethodGenerator.java index 1d4d52fe940..04205891f61 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoGetEntityMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoGetEntityMethodGenerator.java @@ -54,6 +54,8 @@ private enum Transformation { STREAM, } + private final boolean lenient; + public DaoGetEntityMethodGenerator( ExecutableElement methodElement, Map typeParameters, @@ -61,6 +63,7 @@ public DaoGetEntityMethodGenerator( DaoImplementationSharedCode enclosingClass, ProcessorContext context) { super(methodElement, typeParameters, processedType, enclosingClass, context); + lenient = methodElement.getAnnotation(GetEntity.class).lenient(); } @Override @@ -170,23 +173,26 @@ public Optional generate() { GeneratedCodePatterns.override(methodElement, typeParameters); switch (transformation) { case NONE: - overridingMethodBuilder.addStatement("return $L.get($L)", helperFieldName, parameterName); + overridingMethodBuilder.addStatement( + "return $L.get($L, $L)", helperFieldName, parameterName, lenient); break; case ONE: overridingMethodBuilder .addStatement("$T row = $L.one()", Row.class, parameterName) - .addStatement("return (row == null) ? null : $L.get(row)", helperFieldName); + .addStatement( + "return (row == null) ? null : $L.get(row, $L)", helperFieldName, lenient); break; case MAP: overridingMethodBuilder.addStatement( - "return $L.map($L::get)", parameterName, helperFieldName); + "return $L.map(row -> $L.get(row, $L))", parameterName, helperFieldName, lenient); break; case STREAM: overridingMethodBuilder.addStatement( - "return $T.stream($L.map($L::get).spliterator(), false)", + "return $T.stream($L.map(row -> $L.get(row, $L)).spliterator(), false)", StreamSupport.class, parameterName, - helperFieldName); + helperFieldName, + lenient); break; } return Optional.of(overridingMethodBuilder.build()); diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoInsertMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoInsertMethodGenerator.java index 945cfeda370..8359c3ce505 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoInsertMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoInsertMethodGenerator.java @@ -159,7 +159,7 @@ public Optional generate() { Insert.class, Insert::nullSavingStrategy, methodElement, enclosingClass); createStatementBlock.addStatement( - "$1L.set($2L, boundStatementBuilder, $3T.$4L)", + "$1L.set($2L, boundStatementBuilder, $3T.$4L, false)", helperFieldName, entityParameterName, NullSavingStrategy.class, diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSetEntityMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSetEntityMethodGenerator.java index 71b40976a90..d3e4a69aaa6 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSetEntityMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSetEntityMethodGenerator.java @@ -35,6 +35,7 @@ public class DaoSetEntityMethodGenerator extends DaoMethodGenerator { private final NullSavingStrategyValidation nullSavingStrategyValidation; + private final boolean lenient; public DaoSetEntityMethodGenerator( ExecutableElement methodElement, @@ -44,6 +45,7 @@ public DaoSetEntityMethodGenerator( ProcessorContext context) { super(methodElement, typeParameters, processedType, enclosingClass, context); nullSavingStrategyValidation = new NullSavingStrategyValidation(context); + lenient = methodElement.getAnnotation(SetEntity.class).lenient(); } @Override @@ -130,13 +132,14 @@ public Optional generate() { return Optional.of( GeneratedCodePatterns.override(methodElement, typeParameters) .addStatement( - "$1L$2L.set($3L, $4L, $5T.$6L)", + "$1L$2L.set($3L, $4L, $5T.$6L, $7L)", isVoid ? "" : "return ", helperFieldName, entityParameterName, targetParameterName, NullSavingStrategy.class, - nullSavingStrategy) + nullSavingStrategy, + lenient) .build()); } } diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoUpdateMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoUpdateMethodGenerator.java index 288778ee9c8..be9c53a7021 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoUpdateMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoUpdateMethodGenerator.java @@ -152,7 +152,7 @@ public Optional generate() { // We generated an update by primary key (see maybeAddWhereClause), all entity properties are // present as placeholders. createStatementBlock.addStatement( - "$1L.set($2L, boundStatementBuilder, $3T.$4L)", + "$1L.set($2L, boundStatementBuilder, $3T.$4L, false)", helperFieldName, entityParameterName, NullSavingStrategy.class, @@ -171,7 +171,8 @@ public Optional generate() { "boundStatementBuilder", createStatementBlock, enclosingClass, - true); + true, + false); } } diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperGetMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperGetMethodGenerator.java index f0a84517c63..adcf8dd2634 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperGetMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperGetMethodGenerator.java @@ -55,6 +55,7 @@ public Optional generate() { .addModifiers(Modifier.PUBLIC) .addParameter( ParameterSpec.builder(ClassName.get(GettableByName.class), "source").build()) + .addParameter(ParameterSpec.builder(TypeName.BOOLEAN, "lenient").build()) .returns(entityDefinition.getClassName()); TypeName returnType = entityDefinition.getClassName(); @@ -75,20 +76,57 @@ public Optional generate() { String setterName = property.getSetterName(); String propertyValueName = enclosingClass.getNameIndex().uniqueField("propertyValue"); propertyValueNames.add(propertyValueName); - getBuilder.addCode("\n"); + if (type instanceof PropertyType.Simple) { TypeName typeName = ((PropertyType.Simple) type).typeName; String primitiveAccessor = GeneratedCodePatterns.PRIMITIVE_ACCESSORS.get(typeName); if (primitiveAccessor != null) { // Primitive type: use dedicated getter, since it is optimized to avoid boxing // int propertyValue1 = source.getInt("length"); - getBuilder.addStatement( - "$T $L = source.get$L($L)", typeName, propertyValueName, primitiveAccessor, cqlName); + if (mutable) { + getBuilder + .beginControlFlow("if (!lenient || hasProperty(source, $L))", cqlName) + .addStatement( + "$T $L = source.get$L($L)", + typeName, + propertyValueName, + primitiveAccessor, + cqlName) + .addStatement("$L.$L($L)", resultName, setterName, propertyValueName) + .endControlFlow(); + } else { + getBuilder.addStatement( + "$T $L = !lenient || hasProperty(source, $L) ? source.get$L($L) : $L", + typeName, + propertyValueName, + cqlName, + primitiveAccessor, + cqlName, + typeName.equals(TypeName.BOOLEAN) ? false : 0); + } } else if (typeName instanceof ClassName) { // Unparameterized class: use the generic, class-based getter: // UUID propertyValue1 = source.get("id", UUID.class); - getBuilder.addStatement( - "$T $L = source.get($L, $T.class)", typeName, propertyValueName, cqlName, typeName); + if (mutable) { + getBuilder + .beginControlFlow("if (!lenient || hasProperty(source, $L))", cqlName) + .addStatement( + "$T $L = source.get($L, $T.class)", + typeName, + propertyValueName, + cqlName, + typeName) + .addStatement("$L.$L($L)", resultName, setterName, propertyValueName) + .endControlFlow(); + } else { + getBuilder.addStatement( + "$T $L = !lenient || hasProperty(source, $L) ? source.get($L, $T.class) : null", + typeName, + propertyValueName, + cqlName, + cqlName, + typeName); + } } else { // Parameterized type: create a constant and use the GenericType-based getter: // private static final GenericType> GENERIC_TYPE = @@ -97,37 +135,56 @@ public Optional generate() { // Note that lists, sets and maps of unparameterized classes also fall under that // category. Their getter creates a GenericType under the hood, so there's no performance // advantage in calling them instead of the generic get(). - getBuilder.addStatement( - "$T $L = source.get($L, $L)", - typeName, - propertyValueName, - cqlName, - enclosingClass.addGenericTypeConstant(typeName)); + if (mutable) { + getBuilder + .beginControlFlow("if (!lenient || hasProperty(source, $L))", cqlName) + .addStatement( + "$T $L = source.get($L, $L)", + typeName, + propertyValueName, + cqlName, + enclosingClass.addGenericTypeConstant(typeName)) + .addStatement("$L.$L($L)", resultName, setterName, propertyValueName) + .endControlFlow(); + } else { + getBuilder.addStatement( + "$T $L = !lenient || hasProperty(source, $L) ? source.get($L, $L) : null", + typeName, + propertyValueName, + cqlName, + cqlName, + enclosingClass.addGenericTypeConstant(typeName)); + } } } else if (type instanceof PropertyType.SingleEntity) { ClassName entityClass = ((PropertyType.SingleEntity) type).entityName; // Other entity class: the CQL column is a mapped UDT: // Dimensions propertyValue1; // UdtValue udtValue1 = source.getUdtValue("dimensions"); - // if (udtValue1 == null) { - // propertyValue1 = null; - // } else { - // propertyValue1 = dimensionsHelper.get(udtValue1); - // } - getBuilder.addStatement("$T $L", entityClass, propertyValueName); - + // propertyValue1 = udtValue1 == null ? null : dimensionsHelper.get(udtValue1); String udtValueName = enclosingClass.getNameIndex().uniqueField("udtValue"); + if (mutable) { + getBuilder.beginControlFlow("if (!lenient || hasProperty(source, $L))", cqlName); + getBuilder.addStatement("$T $L", entityClass, propertyValueName); + } else { + getBuilder.addStatement("$T $L = null", entityClass, propertyValueName); + getBuilder.beginControlFlow("if (!lenient || hasProperty(source, $L))", cqlName); + } getBuilder.addStatement( "$T $L = source.getUdtValue($L)", UdtValue.class, udtValueName, cqlName); - getBuilder - .beginControlFlow("if ($L == null)", udtValueName) - .addStatement("$L = null", propertyValueName) - .nextControlFlow("else"); - // Get underlying udt object and set it on return type String childHelper = enclosingClass.addEntityHelperField(entityClass); - getBuilder.addStatement("$L = $L.get($L)", propertyValueName, childHelper, udtValueName); + getBuilder.addStatement( + "$L = $L == null ? null : $L.get($L, lenient)", + propertyValueName, + udtValueName, + childHelper, + udtValueName); + + if (mutable) { + getBuilder.addStatement("$L.$L($L)", resultName, setterName, propertyValueName); + } getBuilder.endControlFlow(); } else { // Collection of other entity class(es): the CQL column is a collection of mapped UDTs @@ -140,7 +197,13 @@ public Optional generate() { // traverse rawCollection1 and convert all UdtValue into entity classes, recursing // into nested collections if necessary // } - getBuilder.addStatement("$T $L", type.asTypeName(), propertyValueName); + if (mutable) { + getBuilder.beginControlFlow("if (!lenient || hasProperty(source, $L))", cqlName); + getBuilder.addStatement("$T $L", type.asTypeName(), propertyValueName); + } else { + getBuilder.addStatement("$T $L = null", type.asTypeName(), propertyValueName); + getBuilder.beginControlFlow("if (!lenient || hasProperty(source, $L))", cqlName); + } String rawCollectionName = enclosingClass.getNameIndex().uniqueField("rawCollection"); TypeName rawCollectionType = type.asRawTypeName(); @@ -157,10 +220,11 @@ public Optional generate() { .nextControlFlow("else"); convertUdtsIntoEntities(rawCollectionName, propertyValueName, type, getBuilder); getBuilder.endControlFlow(); - } - if (mutable) { - getBuilder.addStatement("$L.$L($L)", resultName, setterName, propertyValueName); + if (mutable) { + getBuilder.addStatement("$L.$L($L)", resultName, setterName, propertyValueName); + } + getBuilder.endControlFlow(); } } @@ -197,7 +261,8 @@ private void convertUdtsIntoEntities( if (type instanceof PropertyType.SingleEntity) { ClassName entityClass = ((PropertyType.SingleEntity) type).entityName; String entityHelperName = enclosingClass.addEntityHelperField(entityClass); - getBuilder.addStatement("$L = $L.get($L)", mappedObjectName, entityHelperName, rawObjectName); + getBuilder.addStatement( + "$L = $L.get($L, lenient)", mappedObjectName, entityHelperName, rawObjectName); } else if (type instanceof PropertyType.EntityList) { getBuilder.addStatement( "$L = $T.newArrayListWithExpectedSize($L.size())", diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperSetMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperSetMethodGenerator.java index 5e3042c10ac..482b4b5a8e5 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperSetMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperSetMethodGenerator.java @@ -25,6 +25,7 @@ import com.squareup.javapoet.MethodSpec; import com.squareup.javapoet.ParameterSpec; import com.squareup.javapoet.ParameterizedTypeName; +import com.squareup.javapoet.TypeName; import com.squareup.javapoet.TypeVariableName; import java.util.Optional; import javax.lang.model.element.Modifier; @@ -43,8 +44,6 @@ public EntityHelperSetMethodGenerator( @Override public Optional generate() { - // TODO add an ignore mechanism? this fails if a property is missing on the target. - // The method's type variable: > TypeVariableName settableT = TypeVariableName.get("SettableT"); settableT = @@ -60,10 +59,15 @@ public Optional generate() { .addParameter(ParameterSpec.builder(settableT, "target").build()) .addParameter( ParameterSpec.builder(NullSavingStrategy.class, "nullSavingStrategy").build()) + .addParameter(ParameterSpec.builder(TypeName.BOOLEAN, "lenient").build()) .returns(settableT); CodeBlock.Builder injectBodyBuilder = CodeBlock.builder(); for (PropertyDefinition property : entityDefinition.getAllColumns()) { + + injectBodyBuilder.beginControlFlow( + "if (!lenient || hasProperty(target, $L))", property.getCqlName()); + GeneratedCodePatterns.setValue( property.getCqlName(), property.getType(), @@ -71,7 +75,10 @@ public Optional generate() { "target", injectBodyBuilder, enclosingClass, + true, true); + + injectBodyBuilder.endControlFlow(); } injectBodyBuilder.add("\n").addStatement("return target"); return Optional.of(injectBuilder.addCode(injectBodyBuilder.build()).build()); diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/util/generation/GeneratedCodePatterns.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/util/generation/GeneratedCodePatterns.java index 48574a48721..2cbc7027b60 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/util/generation/GeneratedCodePatterns.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/util/generation/GeneratedCodePatterns.java @@ -186,7 +186,8 @@ public static void bindParameters( "boundStatementBuilder", methodBuilder, enclosingClass, - useNullSavingStrategy); + useNullSavingStrategy, + false); } } @@ -214,7 +215,8 @@ public static void setValue( String targetName, CodeBlock.Builder methodBuilder, BindableHandlingSharedCode enclosingClass) { - setValue(cqlName, type, valueExtractor, targetName, methodBuilder, enclosingClass, false); + setValue( + cqlName, type, valueExtractor, targetName, methodBuilder, enclosingClass, false, false); } public static void setValue( @@ -224,8 +226,8 @@ public static void setValue( String targetName, CodeBlock.Builder methodBuilder, BindableHandlingSharedCode enclosingClass, - boolean useNullSavingStrategy) { - methodBuilder.add("\n"); + boolean useNullSavingStrategy, + boolean useLeniency) { if (type instanceof PropertyType.Simple) { TypeName typeName = ((PropertyType.Simple) type).typeName; @@ -293,12 +295,13 @@ public static void setValue( // driver doesn't have the ability to send partial UDT, unset values values will be // serialized to null - set NullSavingStrategy.DO_NOT_SET explicitly .addStatement( - "$L.set($L, $L, $T.$L)", + "$L.set($L, $L, $T.$L, $L)", childHelper, valueName, udtValueName, NullSavingStrategy.class, - NullSavingStrategy.DO_NOT_SET) + NullSavingStrategy.DO_NOT_SET, + useLeniency ? "lenient" : false) .addStatement("$1L = $1L.setUdtValue($2L, $3L)", targetName, cqlName, udtValueName); if (useNullSavingStrategy) { methodBuilder.nextControlFlow( @@ -331,7 +334,8 @@ public static void setValue( currentCqlType, udtTypesBuilder, conversionCodeBuilder, - enclosingClass); + enclosingClass, + useLeniency); methodBuilder .add(udtTypesBuilder.build()) @@ -439,7 +443,8 @@ public static void setValue( targetName, methodBuilder, enclosingClass, - useNullSavingStrategy); + useNullSavingStrategy, + false); } /** @@ -455,6 +460,7 @@ public static void setValue( * variables that extract the required {@link UserDefinedType} instances from the target * container. * @param conversionBuilder the code block to generate the conversion code into. + * @param useLeniency whether the 'lenient' boolean variable is in scope. */ private static void convertEntitiesIntoUdts( String mappedObjectName, @@ -463,7 +469,8 @@ private static void convertEntitiesIntoUdts( CodeBlock currentCqlType, CodeBlock.Builder udtTypesBuilder, CodeBlock.Builder conversionBuilder, - BindableHandlingSharedCode enclosingClass) { + BindableHandlingSharedCode enclosingClass, + boolean useLeniency) { if (type instanceof PropertyType.SingleEntity) { ClassName entityClass = ((PropertyType.SingleEntity) type).entityName; @@ -480,12 +487,13 @@ private static void convertEntitiesIntoUdts( // driver doesn't have the ability to send partial UDT, unset values values will be // serialized to null - set NullSavingStrategy.DO_NOT_SET explicitly .addStatement( - "$L.set($L, $L, $T.$L)", + "$L.set($L, $L, $T.$L, $L)", entityHelperName, mappedObjectName, rawObjectName, NullSavingStrategy.class, - NullSavingStrategy.DO_NOT_SET); + NullSavingStrategy.DO_NOT_SET, + useLeniency ? "lenient" : false); } else if (type instanceof PropertyType.EntityList) { TypeName rawCollectionType = type.asRawTypeName(); conversionBuilder.addStatement( @@ -506,7 +514,8 @@ private static void convertEntitiesIntoUdts( CodeBlock.of("(($T) $L).getElementType()", ListType.class, currentCqlType), udtTypesBuilder, conversionBuilder, - enclosingClass); + enclosingClass, + useLeniency); conversionBuilder.addStatement("$L.add($L)", rawObjectName, rawElementName).endControlFlow(); } else if (type instanceof PropertyType.EntitySet) { TypeName rawCollectionType = type.asRawTypeName(); @@ -528,7 +537,8 @@ private static void convertEntitiesIntoUdts( CodeBlock.of("(($T) $L).getElementType()", SetType.class, currentCqlType), udtTypesBuilder, conversionBuilder, - enclosingClass); + enclosingClass, + useLeniency); conversionBuilder.addStatement("$L.add($L)", rawObjectName, rawElementName).endControlFlow(); } else if (type instanceof PropertyType.EntityMap) { TypeName rawCollectionType = type.asRawTypeName(); @@ -562,7 +572,8 @@ private static void convertEntitiesIntoUdts( CodeBlock.of("(($T) $L).getKeyType()", MapType.class, currentCqlType), udtTypesBuilder, conversionBuilder, - enclosingClass); + enclosingClass, + useLeniency); } String mappedValueName = CodeBlock.of("$L.getValue()", mappedEntryName).toString(); String rawValueName; @@ -577,7 +588,8 @@ private static void convertEntitiesIntoUdts( CodeBlock.of("(($T) $L).getValueType()", MapType.class, currentCqlType), udtTypesBuilder, conversionBuilder, - enclosingClass); + enclosingClass, + useLeniency); } conversionBuilder .addStatement("$L.put($L, $L)", rawObjectName, rawKeyName, rawValueName) diff --git a/mapper-runtime/src/main/java/com/datastax/dse/driver/internal/mapper/reactive/ReactiveDaoBase.java b/mapper-runtime/src/main/java/com/datastax/dse/driver/internal/mapper/reactive/ReactiveDaoBase.java index 23c21c6f5f7..f5979cd6fb3 100644 --- a/mapper-runtime/src/main/java/com/datastax/dse/driver/internal/mapper/reactive/ReactiveDaoBase.java +++ b/mapper-runtime/src/main/java/com/datastax/dse/driver/internal/mapper/reactive/ReactiveDaoBase.java @@ -35,6 +35,6 @@ protected ReactiveResultSet executeReactive(Statement statement) { protected MappedReactiveResultSet executeReactiveAndMap( Statement statement, EntityHelper entityHelper) { ReactiveResultSet source = executeReactive(statement); - return new DefaultMappedReactiveResultSet<>(source, entityHelper::get); + return new DefaultMappedReactiveResultSet<>(source, row -> entityHelper.get(row, false)); } } diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Entity.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Entity.java index a8046f33adc..f358e961846 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Entity.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/Entity.java @@ -47,8 +47,9 @@ * getDescription}) and has no parameters. The name of the property is obtained by removing * the "get" prefix and decapitalizing ({@code description}), and the type of the property is * the return type of the getter. - *
        4. there must be a matching setter method ({@code setDescription}), with a single - * parameter that has the same type as the property (the return type does not matter). + *
        5. unless the entity is {@linkplain PropertyStrategy#mutable() immutable}, there must + * be a matching setter method ({@code setDescription}), with a single parameter that has the + * same type as the property (the return type does not matter). * * * There may also be a matching field ({@code description}) that has the same type as the diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/GetEntity.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/GetEntity.java index ff4d5a8805c..0d01b8f373f 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/GetEntity.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/GetEntity.java @@ -102,4 +102,18 @@ */ @Target(ElementType.METHOD) @Retention(RetentionPolicy.RUNTIME) -public @interface GetEntity {} +public @interface GetEntity { + + /** + * Whether to tolerate missing columns in the source data structure. + * + *

          If {@code false} (the default), then the source must contain a matching column for every + * property in the entity definition, including computed ones. If such a column is not + * found, an {@link IllegalArgumentException} will be thrown. + * + *

          If {@code true}, the mapper will operate on a best-effort basis and attempt to read all + * entity properties that have a matching column in the source, leaving unmatched properties + * untouched. Beware that this may result in a partially-populated entity instance. + */ + boolean lenient() default false; +} diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/SetEntity.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/SetEntity.java index 834c549c3b7..818b1272be5 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/SetEntity.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/annotations/SetEntity.java @@ -98,4 +98,17 @@ * or {@link NullSavingStrategy#DO_NOT_SET}. */ NullSavingStrategy nullSavingStrategy() default NullSavingStrategy.DO_NOT_SET; + + /** + * Whether to tolerate missing columns in the target data structure. + * + *

          If {@code false} (the default), then the target must contain a matching column for every + * property in the entity definition, except computed ones. If such a column is not + * found, an {@link IllegalArgumentException} will be thrown. + * + *

          If {@code true}, the mapper will operate on a best-effort basis and attempt to write all + * entity properties that have a matching column in the target, leaving unmatched properties + * untouched. Beware that this may result in a partially-populated target. + */ + boolean lenient() default false; } diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/entity/EntityHelper.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/entity/EntityHelper.java index 81f3144d529..c80242b0904 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/entity/EntityHelper.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/api/mapper/entity/EntityHelper.java @@ -49,7 +49,20 @@ public interface EntityHelper { /** * Sets the properties of an entity instance into a target data structure. * - *

          For example: + * @deprecated Use {@link #set(Object, SettableByName, NullSavingStrategy, boolean)} instead. + */ + @NonNull + @Deprecated + > SettableT set( + @NonNull EntityT entity, + @NonNull SettableT target, + @NonNull NullSavingStrategy nullSavingStrategy); + + /** + * Sets the properties of an entity instance into a target data structure. + * + *

          The generated code will attempt to write all entity properties in the target data structure. + * For example: * *

          {@code
              * target = target.set("id", entity.getId(), UUID.class);
          @@ -59,25 +72,50 @@ public interface EntityHelper {
              *
              * The column names are inferred from the naming strategy for this entity.
              *
          +   * 

          The target will typically be one of the built-in driver subtypes: {@link BoundStatement}, + * {@link BoundStatementBuilder} or {@link UdtValue}. Note that the default {@link BoundStatement} + * implementation is immutable, therefore this argument won't be modified in-place: you need to + * use the return value to get the resulting structure. + * + *

          If {@code lenient} is {@code true}, the mapper will operate on a best-effort basis and + * attempt to write all entity properties that have a matching column in the target, leaving + * unmatched properties untouched. Beware that this may result in a partially-populated target. + * + *

          If {@code lenient} is {@code false}, then the target must contain a matching column for + * every property in the entity definition, except computed ones. If such a column is not + * found, an {@link IllegalArgumentException} will be thrown. + * * @param entity the entity that the values will be read from. - * @param target the data structure to fill. This will typically be one of the built-in driver - * subtypes: {@link BoundStatement}, {@link BoundStatementBuilder} or {@link UdtValue}. Note - * that the default {@link BoundStatement} implementation is immutable, therefore this - * argument won't be modified in-place: you need to use the return value to get the resulting - * structure. + * @param target the data structure to fill. + * @param lenient whether to tolerate incomplete targets. * @return the data structure resulting from the assignments. This is useful for immutable target * implementations (see above), otherwise it will be the same as {@code target}. + * @throws IllegalArgumentException if lenient is false and the target does not contain matching + * columns for every entity property. */ @NonNull - > SettableT set( + default > SettableT set( @NonNull EntityT entity, @NonNull SettableT target, - @NonNull NullSavingStrategy nullSavingStrategy); + @NonNull NullSavingStrategy nullSavingStrategy, + boolean lenient) { + return set(entity, target, nullSavingStrategy); + } /** * Gets values from a data structure to fill an entity instance. * - *

          For example: + * @deprecated Use {@link #get(GettableByName, boolean)} instead. + */ + @NonNull + @Deprecated + EntityT get(@NonNull GettableByName source); + + /** + * Gets values from a data structure to fill an entity instance. + * + *

          The generated code will attempt to read all entity properties from the source data + * structure. For example: * *

          {@code
              * User returnValue = new User();
          @@ -88,14 +126,29 @@ > SettableT set(
              *
              * The column names are inferred from the naming strategy for this entity.
              *
          -   * @param source the data structure to read from. This will typically be one of the built-in
          -   *     driver subtypes: {@link Row} or {@link UdtValue} ({@link BoundStatement} and {@link
          -   *     BoundStatementBuilder} are also possible, although it's less likely that data would be read
          -   *     back from them in this manner).
          +   * 

          The source will typically be one of the built-in driver subtypes: {@link Row} or {@link + * UdtValue} ({@link BoundStatement} and {@link BoundStatementBuilder} are also possible, although + * it's less likely that data would be read back from them in this manner). + * + *

          If {@code lenient} is {@code true}, the mapper will operate on a best-effort basis and + * attempt to read all entity properties that have a matching column in the source, leaving + * unmatched properties untouched. Beware that this may result in a partially-populated entity + * instance. + * + *

          If {@code lenient} is {@code false}, then the source must contain a matching column for + * every property in the entity definition, including computed ones. If such a column is + * not found, an {@link IllegalArgumentException} will be thrown. + * + * @param source the data structure to read from. + * @param lenient whether to tolerate incomplete sources. * @return the resulting entity. + * @throws IllegalArgumentException if lenient is false and the source does not contain matching + * columns for every entity property. */ @NonNull - EntityT get(@NonNull GettableByName source); + default EntityT get(@NonNull GettableByName source, boolean lenient) { + return get(source); + } /** * Builds an insert query for this entity. diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DaoBase.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DaoBase.java index 4af39a59b84..a9e5721c398 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DaoBase.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DaoBase.java @@ -224,12 +224,12 @@ protected EntityT executeAndMapToSingleEntity( private EntityT asEntity(Row row, EntityHelper entityHelper) { return (row == null - // Special case for INSERT IF NOT EXISTS. If the row did not exists, the query returns + // Special case for INSERT IF NOT EXISTS. If the row did not exist, the query returns // only [applied], we want to return null to indicate there was no previous entity || (row.getColumnDefinitions().size() == 1 && row.getColumnDefinitions().get(0).getName().equals(APPLIED))) ? null - : entityHelper.get(row); + : entityHelper.get(row, false); } protected Optional executeAndMapToOptionalEntity( @@ -239,12 +239,13 @@ protected Optional executeAndMapToOptionalEntity( protected PagingIterable executeAndMapToEntityIterable( Statement statement, EntityHelper entityHelper) { - return execute(statement).map(entityHelper::get); + return execute(statement).map(row -> entityHelper.get(row, false)); } protected Stream executeAndMapToEntityStream( Statement statement, EntityHelper entityHelper) { - return StreamSupport.stream(execute(statement).map(entityHelper::get).spliterator(), false); + return StreamSupport.stream( + execute(statement).map(row -> entityHelper.get(row, false)).spliterator(), false); } protected CompletableFuture executeAsync(Statement statement) { @@ -287,7 +288,7 @@ protected CompletableFuture> executeAsyncAndMapToOpt protected CompletableFuture> executeAsyncAndMapToEntityIterable( Statement statement, EntityHelper entityHelper) { - return executeAsync(statement).thenApply(rs -> rs.map(entityHelper::get)); + return executeAsync(statement).thenApply(rs -> rs.map(row -> entityHelper.get(row, false))); } protected static void throwIfProtocolVersionV3(MapperContext context) { diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/entity/EntityHelperBase.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/entity/EntityHelperBase.java index d1198cff5d4..8be10fc15d8 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/entity/EntityHelperBase.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/entity/EntityHelperBase.java @@ -16,6 +16,13 @@ package com.datastax.oss.driver.internal.mapper.entity; import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.cql.BoundStatement; +import com.datastax.oss.driver.api.core.cql.BoundStatementBuilder; +import com.datastax.oss.driver.api.core.cql.Row; +import com.datastax.oss.driver.api.core.data.AccessibleByName; +import com.datastax.oss.driver.api.core.data.GettableByName; +import com.datastax.oss.driver.api.core.data.SettableByName; +import com.datastax.oss.driver.api.core.data.UdtValue; import com.datastax.oss.driver.api.core.metadata.schema.ColumnMetadata; import com.datastax.oss.driver.api.core.metadata.schema.KeyspaceMetadata; import com.datastax.oss.driver.api.core.type.DataType; @@ -30,6 +37,7 @@ import com.datastax.oss.driver.api.mapper.annotations.DaoKeyspace; import com.datastax.oss.driver.api.mapper.annotations.Entity; import com.datastax.oss.driver.api.mapper.entity.EntityHelper; +import com.datastax.oss.driver.api.mapper.entity.saving.NullSavingStrategy; import com.datastax.oss.driver.internal.core.util.CollectionsUtils; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; @@ -76,6 +84,23 @@ public CqlIdentifier getTableId() { return tableId; } + @NonNull + @Override + @Deprecated + public > SettableT set( + @NonNull EntityT entity, + @NonNull SettableT target, + @NonNull NullSavingStrategy nullSavingStrategy) { + return set(entity, target, nullSavingStrategy, false); + } + + @NonNull + @Override + @Deprecated + public EntityT get(@NonNull GettableByName source) { + return get(source, false); + } + public void throwIfKeyspaceMissing() { if (this.getKeyspaceId() == null && !context.getSession().getKeyspace().isPresent()) { throw new MapperException( @@ -201,6 +226,26 @@ public void throwMissingTypesIfNotEmpty( public boolean keyspaceNamePresent( Map keyspaces, CqlIdentifier keyspaceId) { - return keyspaces.keySet().contains(keyspaceId); + return keyspaces.containsKey(keyspaceId); + } + + public boolean hasProperty(AccessibleByName source, String name) { + if (source instanceof Row) { + return ((Row) source).getColumnDefinitions().contains(name); + } else if (source instanceof UdtValue) { + return ((UdtValue) source).getType().contains(name); + } else if (source instanceof BoundStatement) { + return ((BoundStatement) source) + .getPreparedStatement() + .getVariableDefinitions() + .contains(name); + } else if (source instanceof BoundStatementBuilder) { + return ((BoundStatementBuilder) source) + .getPreparedStatement() + .getVariableDefinitions() + .contains(name); + } + // other implementations: assume the property is present + return true; } } diff --git a/upgrade_guide/README.md b/upgrade_guide/README.md index 1b92cb972d6..c0f6fee32e5 100644 --- a/upgrade_guide/README.md +++ b/upgrade_guide/README.md @@ -15,6 +15,26 @@ involves backwards-incompatible binary changes, users of this library and of the * If your application is using MicroProfile Metrics >= 3.0, then you must upgrade to driver 4.12 or higher, as previous versions of `java-driver-metrics-microprofile` will not work. +#### Mapper `@GetEntity` and `@SetEntity` methods can now be lenient + +Thanks to [JAVA-2935](https://datastax-oss.atlassian.net/browse/JAVA-2935), `@GetEntity` and +`@SetEntity` methods now have a new `lenient` attribute. + +If the attribute is `false` (the default value), then the source row or the target statement must +contain a matching column for every property in the entity definition, *including computed ones*. If +such a column is not found, an error will be thrown. This corresponds to the mapper's current +behavior prior to the introduction of the new attribute. + +If the new attribute is explicitly set to `true` however, the mapper will operate on a best-effort +basis and attempt to read or write all entity properties that have a matching column in the source +row or in the target statement, *leaving unmatched properties untouched*. + +This new, lenient behavior allows to achieve the equivalent of driver 3.x +[lenient mapping](https://docs.datastax.com/en/developer/java-driver/3.10/manual/object_mapper/using/#manual-mapping). + +Read the manual pages on [@GetEntity](../manual/mapper/daos/getentity) methods and +[@SetEntity](../manual/mapper/daos/setentity) methods for more details and examples of lenient mode. + ### 4.11.0 #### Native protocol V5 is now production-ready From c6cab7303f88ffd285ede10330effeb6bc7a5f74 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 14 Jun 2021 12:02:58 +0200 Subject: [PATCH 730/979] JAVA-2932: Make DefaultDriverConfigLoader.close() resilient to terminated executors (#1557) --- changelog/README.md | 1 + .../config/typesafe/DefaultDriverConfigLoader.java | 11 +++++++++-- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 52d34c9b02a..8fbb4385f16 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.11.2 (in progress) +- [bug] JAVA-2932: Make DefaultDriverConfigLoader.close() resilient to terminated executors - [bug] JAVA-2945: Reinstate InternalDriverContext.getNodeFilter method - [bug] JAVA-2947: Release buffer after decoding multi-slice frame - [bug] JAVA-2946: Make MapperResultProducerService instances be located with user-provided class loader diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultDriverConfigLoader.java b/core/src/main/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultDriverConfigLoader.java index 9f87960adc6..2c679e3f520 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultDriverConfigLoader.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/config/typesafe/DefaultDriverConfigLoader.java @@ -39,6 +39,7 @@ import java.time.Duration; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionStage; +import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.TimeUnit; import java.util.function.Supplier; import net.jcip.annotations.ThreadSafe; @@ -235,8 +236,14 @@ public Supplier getConfigSupplier() { @Override public void close() { SingleThreaded singleThreaded = this.singleThreaded; - if (singleThreaded != null) { - RunOrSchedule.on(singleThreaded.adminExecutor, singleThreaded::close); + if (singleThreaded != null && !singleThreaded.adminExecutor.terminationFuture().isDone()) { + try { + RunOrSchedule.on(singleThreaded.adminExecutor, singleThreaded::close); + } catch (RejectedExecutionException e) { + // Checking the future is racy, there is still a tiny window that could get us here. + // We can safely ignore this error because, if the execution is rejected, the periodic + // reload task, if any, has been already cancelled. + } } } From 262cc4f99dcf127ff24bf347131f19e5a173a62d Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 14 Jun 2021 12:00:57 +0200 Subject: [PATCH 731/979] Fix raw usage of generic class --- .../core/util/concurrent/ScheduledTaskCapturingEventLoop.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/util/concurrent/ScheduledTaskCapturingEventLoop.java b/core/src/test/java/com/datastax/oss/driver/internal/core/util/concurrent/ScheduledTaskCapturingEventLoop.java index 79f56fb3215..540ee86a8a8 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/util/concurrent/ScheduledTaskCapturingEventLoop.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/util/concurrent/ScheduledTaskCapturingEventLoop.java @@ -47,7 +47,7 @@ @SuppressWarnings("FunctionalInterfaceClash") // does not matter for test code public class ScheduledTaskCapturingEventLoop extends DefaultEventLoop { - private final BlockingQueue capturedTasks = new ArrayBlockingQueue<>(100); + private final BlockingQueue> capturedTasks = new ArrayBlockingQueue<>(100); public ScheduledTaskCapturingEventLoop(EventLoopGroup parent) { super(parent); From 29d11d9cf9f61cafed508ad0e8e0904f35b23121 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 14 Jun 2021 12:01:41 +0200 Subject: [PATCH 732/979] Replace mention of "cluster" by "session" in DriverConfigLoader.close() --- .../datastax/oss/driver/api/core/config/DriverConfigLoader.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/DriverConfigLoader.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/DriverConfigLoader.java index fcc7ea41689..6bbd8d2c96e 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/DriverConfigLoader.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/DriverConfigLoader.java @@ -376,7 +376,7 @@ static DriverConfigLoader compose( boolean supportsReloading(); /** - * Called when the cluster closes. This is a good time to release any external resource, for + * Called when the session closes. This is a good time to release any external resource, for * example cancel a scheduled reloading task. */ @Override From 8b77ec2745f7d6297c3e1a32a1a6d5614e5dcc48 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Sat, 12 Jun 2021 19:03:13 +0200 Subject: [PATCH 733/979] Wait until index is built --- .../mapper/SelectCustomWhereClauseIT.java | 35 +++++++++++-------- 1 file changed, 21 insertions(+), 14 deletions(-) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SelectCustomWhereClauseIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SelectCustomWhereClauseIT.java index e86ff9f2d5b..3afcc03e451 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SelectCustomWhereClauseIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SelectCustomWhereClauseIT.java @@ -16,6 +16,7 @@ package com.datastax.oss.driver.mapper; import static com.datastax.oss.driver.assertions.Assertions.assertThat; +import static org.awaitility.Awaitility.await; import static org.junit.Assume.assumeFalse; import com.datastax.oss.driver.api.core.CqlIdentifier; @@ -36,8 +37,8 @@ import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.categories.ParallelizableTests; import com.datastax.oss.driver.internal.core.util.concurrent.CompletableFutures; +import java.time.Duration; import java.util.concurrent.CompletionStage; -import org.junit.Before; import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Test; @@ -73,29 +74,35 @@ public static void setup() { InventoryMapper inventoryMapper = new SelectCustomWhereClauseIT_InventoryMapperBuilder(session).build(); dao = inventoryMapper.productDao(SESSION_RULE.keyspace()); - } - - @Before - public void insertData() { dao.save(FLAMETHROWER); dao.save(MP3_DOWNLOAD); } @Test public void should_select_with_custom_clause() { - PagingIterable products = dao.findByDescription("%mp3%"); - assertThat(products.one()).isEqualTo(MP3_DOWNLOAD); - assertThat(products.iterator()).isExhausted(); + await() + .atMost(Duration.ofMinutes(1)) + .untilAsserted( + () -> { + PagingIterable products = dao.findByDescription("%mp3%"); + assertThat(products.one()).isEqualTo(MP3_DOWNLOAD); + assertThat(products.iterator()).isExhausted(); + }); } @Test public void should_select_with_custom_clause_asynchronously() { - MappedAsyncPagingIterable iterable = - CompletableFutures.getUninterruptibly( - dao.findByDescriptionAsync("%mp3%").toCompletableFuture()); - assertThat(iterable.one()).isEqualTo(MP3_DOWNLOAD); - assertThat(iterable.currentPage().iterator()).isExhausted(); - assertThat(iterable.hasMorePages()).isFalse(); + await() + .atMost(Duration.ofMinutes(1)) + .untilAsserted( + () -> { + MappedAsyncPagingIterable iterable = + CompletableFutures.getUninterruptibly( + dao.findByDescriptionAsync("%mp3%").toCompletableFuture()); + assertThat(iterable.one()).isEqualTo(MP3_DOWNLOAD); + assertThat(iterable.currentPage().iterator()).isExhausted(); + assertThat(iterable.hasMorePages()).isFalse(); + }); } @Mapper From 3fde31eb8cea42ab4e342eb1ab555fe56c413ac8 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 14 Jun 2021 14:23:43 +0200 Subject: [PATCH 734/979] Prepare changelog for 4.11.2 release --- changelog/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/changelog/README.md b/changelog/README.md index 8fbb4385f16..48c5d442249 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -2,7 +2,7 @@ -### 4.11.2 (in progress) +### 4.11.2 - [bug] JAVA-2932: Make DefaultDriverConfigLoader.close() resilient to terminated executors - [bug] JAVA-2945: Reinstate InternalDriverContext.getNodeFilter method From 130ab655b4a45bae3dd6683bdb5427c88de48850 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 14 Jun 2021 14:28:10 +0200 Subject: [PATCH 735/979] [maven-release-plugin] prepare release 4.11.2 --- bom/pom.xml | 18 +++++++++--------- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- metrics/micrometer/pom.xml | 2 +- metrics/microprofile/pom.xml | 2 +- osgi-tests/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 14 files changed, 23 insertions(+), 23 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index 970e0cbcd26..5bcee4affef 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.2-SNAPSHOT + 4.11.2 java-driver-bom pom @@ -31,42 +31,42 @@ com.datastax.oss java-driver-core - 4.11.2-SNAPSHOT + 4.11.2 com.datastax.oss java-driver-core-shaded - 4.11.2-SNAPSHOT + 4.11.2 com.datastax.oss java-driver-mapper-processor - 4.11.2-SNAPSHOT + 4.11.2 com.datastax.oss java-driver-mapper-runtime - 4.11.2-SNAPSHOT + 4.11.2 com.datastax.oss java-driver-query-builder - 4.11.2-SNAPSHOT + 4.11.2 com.datastax.oss java-driver-test-infra - 4.11.2-SNAPSHOT + 4.11.2 com.datastax.oss java-driver-metrics-micrometer - 4.11.2-SNAPSHOT + 4.11.2 com.datastax.oss java-driver-metrics-microprofile - 4.11.2-SNAPSHOT + 4.11.2 com.datastax.oss diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 7e204b6726c..d35d412a916 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.2-SNAPSHOT + 4.11.2 java-driver-core-shaded DataStax Java driver for Apache Cassandra(R) - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index 44aeda012e4..d8edccc65fc 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.2-SNAPSHOT + 4.11.2 java-driver-core bundle diff --git a/distribution/pom.xml b/distribution/pom.xml index 73891ec302b..71e5a37e8f5 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.2-SNAPSHOT + 4.11.2 java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index 08ee8f717e0..2a0f99f9105 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.11.2-SNAPSHOT + 4.11.2 java-driver-examples DataStax Java driver for Apache Cassandra(R) - examples. diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 8097819fde0..ea602b1b840 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.2-SNAPSHOT + 4.11.2 java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 99f35eb1986..a2851350807 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.2-SNAPSHOT + 4.11.2 java-driver-mapper-processor DataStax Java driver for Apache Cassandra(R) - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index f8e8bcd6c06..7de0fe21945 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.2-SNAPSHOT + 4.11.2 java-driver-mapper-runtime bundle diff --git a/metrics/micrometer/pom.xml b/metrics/micrometer/pom.xml index d5072187884..944500c52cd 100644 --- a/metrics/micrometer/pom.xml +++ b/metrics/micrometer/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.2-SNAPSHOT + 4.11.2 ../../ java-driver-metrics-micrometer diff --git a/metrics/microprofile/pom.xml b/metrics/microprofile/pom.xml index 169d04622e7..bab6a2a2a3e 100644 --- a/metrics/microprofile/pom.xml +++ b/metrics/microprofile/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.2-SNAPSHOT + 4.11.2 ../../ java-driver-metrics-microprofile diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml index 58820c4ba3d..195b4308551 100644 --- a/osgi-tests/pom.xml +++ b/osgi-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.2-SNAPSHOT + 4.11.2 java-driver-osgi-tests jar diff --git a/pom.xml b/pom.xml index a306b90a655..656861cb5d8 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ 4.0.0 com.datastax.oss java-driver-parent - 4.11.2-SNAPSHOT + 4.11.2 pom DataStax Java driver for Apache Cassandra(R) A driver for Apache Cassandra(R) 2.1+ that works exclusively with the Cassandra Query Language version 3 (CQL3) and Cassandra's native protocol versions 3 and above. @@ -954,7 +954,7 @@ height="0" width="0" style="display:none;visibility:hidden"> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - HEAD + 4.11.2 diff --git a/query-builder/pom.xml b/query-builder/pom.xml index 8eac5a3ffbd..c3b65acf3ab 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.2-SNAPSHOT + 4.11.2 java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index 5cf30a2048b..b5a3d724789 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.2-SNAPSHOT + 4.11.2 java-driver-test-infra bundle From 0d97dfd8d9e43721b41939e3bdbcd0a696d381c4 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 14 Jun 2021 14:28:20 +0200 Subject: [PATCH 736/979] [maven-release-plugin] prepare for next development iteration --- bom/pom.xml | 18 +++++++++--------- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- metrics/micrometer/pom.xml | 2 +- metrics/microprofile/pom.xml | 2 +- osgi-tests/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 14 files changed, 23 insertions(+), 23 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index 5bcee4affef..f107d8bb9b5 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.2 + 4.11.3-SNAPSHOT java-driver-bom pom @@ -31,42 +31,42 @@ com.datastax.oss java-driver-core - 4.11.2 + 4.11.3-SNAPSHOT com.datastax.oss java-driver-core-shaded - 4.11.2 + 4.11.3-SNAPSHOT com.datastax.oss java-driver-mapper-processor - 4.11.2 + 4.11.3-SNAPSHOT com.datastax.oss java-driver-mapper-runtime - 4.11.2 + 4.11.3-SNAPSHOT com.datastax.oss java-driver-query-builder - 4.11.2 + 4.11.3-SNAPSHOT com.datastax.oss java-driver-test-infra - 4.11.2 + 4.11.3-SNAPSHOT com.datastax.oss java-driver-metrics-micrometer - 4.11.2 + 4.11.3-SNAPSHOT com.datastax.oss java-driver-metrics-microprofile - 4.11.2 + 4.11.3-SNAPSHOT com.datastax.oss diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index d35d412a916..2dc72176cce 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.2 + 4.11.3-SNAPSHOT java-driver-core-shaded DataStax Java driver for Apache Cassandra(R) - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index d8edccc65fc..9fcd5642208 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.2 + 4.11.3-SNAPSHOT java-driver-core bundle diff --git a/distribution/pom.xml b/distribution/pom.xml index 71e5a37e8f5..b7e5bd41edb 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.2 + 4.11.3-SNAPSHOT java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index 2a0f99f9105..c2b3a76164f 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.11.2 + 4.11.3-SNAPSHOT java-driver-examples DataStax Java driver for Apache Cassandra(R) - examples. diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index ea602b1b840..d306d17b365 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.2 + 4.11.3-SNAPSHOT java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index a2851350807..a8d29a6154f 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.2 + 4.11.3-SNAPSHOT java-driver-mapper-processor DataStax Java driver for Apache Cassandra(R) - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index 7de0fe21945..dbfda81840e 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.2 + 4.11.3-SNAPSHOT java-driver-mapper-runtime bundle diff --git a/metrics/micrometer/pom.xml b/metrics/micrometer/pom.xml index 944500c52cd..0c4c444bf17 100644 --- a/metrics/micrometer/pom.xml +++ b/metrics/micrometer/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.2 + 4.11.3-SNAPSHOT ../../ java-driver-metrics-micrometer diff --git a/metrics/microprofile/pom.xml b/metrics/microprofile/pom.xml index bab6a2a2a3e..e5946089c8f 100644 --- a/metrics/microprofile/pom.xml +++ b/metrics/microprofile/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.2 + 4.11.3-SNAPSHOT ../../ java-driver-metrics-microprofile diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml index 195b4308551..bb094c7c158 100644 --- a/osgi-tests/pom.xml +++ b/osgi-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.2 + 4.11.3-SNAPSHOT java-driver-osgi-tests jar diff --git a/pom.xml b/pom.xml index 656861cb5d8..6f40a34b9a8 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ 4.0.0 com.datastax.oss java-driver-parent - 4.11.2 + 4.11.3-SNAPSHOT pom DataStax Java driver for Apache Cassandra(R) A driver for Apache Cassandra(R) 2.1+ that works exclusively with the Cassandra Query Language version 3 (CQL3) and Cassandra's native protocol versions 3 and above. @@ -954,7 +954,7 @@ height="0" width="0" style="display:none;visibility:hidden"> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - 4.11.2 + HEAD diff --git a/query-builder/pom.xml b/query-builder/pom.xml index c3b65acf3ab..601a40ee9d8 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.2 + 4.11.3-SNAPSHOT java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index b5a3d724789..2b23d248516 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.2 + 4.11.3-SNAPSHOT java-driver-test-infra bundle From 7070f075cc3a1c245f3ad891aa164977925b1e33 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 14 Jun 2021 15:21:11 +0200 Subject: [PATCH 737/979] Update version in docs --- README.md | 4 +- changelog/README.md | 13 +++- manual/case_sensitivity/README.md | 10 +-- manual/core/README.md | 26 +++---- manual/core/address_resolution/README.md | 2 +- manual/core/async/README.md | 2 +- manual/core/authentication/README.md | 16 ++-- manual/core/bom/README.md | 4 +- manual/core/configuration/README.md | 20 ++--- manual/core/control_connection/README.md | 2 +- manual/core/custom_codecs/README.md | 74 +++++++++---------- manual/core/detachable_types/README.md | 14 ++-- manual/core/dse/geotypes/README.md | 6 +- manual/core/dse/graph/README.md | 4 +- manual/core/dse/graph/fluent/README.md | 4 +- .../core/dse/graph/fluent/explicit/README.md | 12 +-- manual/core/dse/graph/results/README.md | 6 +- manual/core/dse/graph/script/README.md | 6 +- manual/core/idempotence/README.md | 4 +- manual/core/integration/README.md | 7 +- manual/core/load_balancing/README.md | 12 +-- manual/core/metadata/README.md | 6 +- manual/core/metadata/node/README.md | 28 +++---- manual/core/metadata/schema/README.md | 20 ++--- manual/core/metadata/token/README.md | 4 +- manual/core/native_protocol/README.md | 6 +- manual/core/non_blocking/README.md | 44 +++++------ manual/core/paging/README.md | 12 +-- manual/core/performance/README.md | 10 +-- manual/core/pooling/README.md | 2 +- manual/core/query_timestamps/README.md | 4 +- manual/core/reactive/README.md | 24 +++--- manual/core/reconnection/README.md | 8 +- manual/core/request_tracker/README.md | 4 +- manual/core/retries/README.md | 36 ++++----- manual/core/speculative_execution/README.md | 2 +- manual/core/ssl/README.md | 6 +- manual/core/statements/README.md | 8 +- manual/core/statements/batch/README.md | 6 +- .../statements/per_query_keyspace/README.md | 2 +- manual/core/statements/prepared/README.md | 8 +- manual/core/statements/simple/README.md | 6 +- manual/core/temporal_types/README.md | 8 +- manual/core/throttling/README.md | 6 +- manual/core/tracing/README.md | 12 +-- manual/core/tuples/README.md | 4 +- manual/core/udts/README.md | 4 +- manual/developer/common/concurrency/README.md | 4 +- manual/mapper/config/kotlin/README.md | 2 +- manual/mapper/config/record/README.md | 2 +- manual/mapper/config/scala/README.md | 2 +- manual/mapper/daos/README.md | 8 +- manual/mapper/daos/custom_types/README.md | 10 +-- manual/mapper/daos/delete/README.md | 18 ++--- manual/mapper/daos/getentity/README.md | 18 ++--- manual/mapper/daos/increment/README.md | 12 +-- manual/mapper/daos/insert/README.md | 14 ++-- manual/mapper/daos/null_saving/README.md | 10 +-- manual/mapper/daos/query/README.md | 24 +++--- manual/mapper/daos/queryprovider/README.md | 16 ++-- manual/mapper/daos/select/README.md | 28 +++---- manual/mapper/daos/setentity/README.md | 10 +-- .../daos/statement_attributes/README.md | 2 +- manual/mapper/daos/update/README.md | 12 +-- manual/mapper/entities/README.md | 36 ++++----- manual/mapper/mapper/README.md | 10 +-- manual/osgi/README.md | 6 +- manual/query_builder/README.md | 10 +-- manual/query_builder/condition/README.md | 2 +- manual/query_builder/delete/README.md | 4 +- manual/query_builder/insert/README.md | 2 +- manual/query_builder/relation/README.md | 4 +- manual/query_builder/schema/README.md | 2 +- .../query_builder/schema/aggregate/README.md | 2 +- .../query_builder/schema/function/README.md | 2 +- manual/query_builder/schema/index/README.md | 2 +- .../query_builder/schema/keyspace/README.md | 2 +- .../schema/materialized_view/README.md | 4 +- manual/query_builder/schema/table/README.md | 6 +- manual/query_builder/schema/type/README.md | 2 +- manual/query_builder/select/README.md | 4 +- manual/query_builder/term/README.md | 4 +- manual/query_builder/truncate/README.md | 2 +- manual/query_builder/update/README.md | 4 +- 84 files changed, 421 insertions(+), 409 deletions(-) diff --git a/README.md b/README.md index 5a137ac947a..19b09697684 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@ *If you're reading this on github.com, please note that this is the readme for the development version and that some features described here might not yet have been released. You can find the documentation for latest version through [DataStax Docs] or via the release tags, e.g. -[4.11.0](https://github.com/datastax/java-driver/tree/4.11.0).* +[4.12.0](https://github.com/datastax/java-driver/tree/4.12.0).* A modern, feature-rich and highly tunable Java client library for [Apache Cassandra®] \(2.1+) and [DataStax Enterprise] \(4.7+), and [DataStax Astra], using exclusively Cassandra's binary protocol @@ -82,7 +82,7 @@ See the [upgrade guide](upgrade_guide/) for details. * [Changelog] * [FAQ] -[API docs]: https://docs.datastax.com/en/drivers/java/4.11 +[API docs]: https://docs.datastax.com/en/drivers/java/4.12 [JIRA]: https://datastax-oss.atlassian.net/browse/JAVA [Mailing list]: https://groups.google.com/a/lists.datastax.com/forum/#!forum/java-driver-user [@dsJavaDriver]: https://twitter.com/dsJavaDriver diff --git a/changelog/README.md b/changelog/README.md index 49a04dffa06..c301cfe4a54 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -2,11 +2,22 @@ -### 4.12.0 (in progress) +### 4.12.0 - [improvement] JAVA-2935: Make GetEntity and SetEntity methods resilient to incomplete data - [improvement] JAVA-2944: Upgrade MicroProfile Metrics to 3.0 +Merged from 4.11.x: + +- [bug] JAVA-2932: Make DefaultDriverConfigLoader.close() resilient to terminated executors +- [bug] JAVA-2945: Reinstate InternalDriverContext.getNodeFilter method +- [bug] JAVA-2947: Release buffer after decoding multi-slice frame +- [bug] JAVA-2946: Make MapperResultProducerService instances be located with user-provided class loader +- [bug] JAVA-2942: GraphStatement.setConsistencyLevel() is not effective +- [bug] JAVA-2941: Cannot add a single static column with the alter table API +- [bug] JAVA-2943: Prevent session leak with wrong keyspace name +- [bug] JAVA-2938: OverloadedException message is misleading + ### 4.11.2 - [bug] JAVA-2932: Make DefaultDriverConfigLoader.close() resilient to terminated executors diff --git a/manual/case_sensitivity/README.md b/manual/case_sensitivity/README.md index 4138ae42d89..7e85cd2d091 100644 --- a/manual/case_sensitivity/README.md +++ b/manual/case_sensitivity/README.md @@ -106,11 +106,11 @@ For "consuming" methods, string overloads are also provided for convenience, for * in other cases, the string is always assumed to be in CQL form, and converted on the fly with `CqlIdentifier.fromCql`. -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/CqlIdentifier.html -[Row]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/Row.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/data/UdtValue.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/data/AccessibleByName.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/CqlIdentifier.html +[Row]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/Row.html +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/data/UdtValue.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/data/AccessibleByName.html ### Good practices diff --git a/manual/core/README.md b/manual/core/README.md index 6a316245c50..4648e53f4bd 100644 --- a/manual/core/README.md +++ b/manual/core/README.md @@ -314,18 +314,18 @@ for (ColumnDefinitions.Definition definition : row.getColumnDefinitions()) { } ``` -[CqlSession]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/CqlSession.html -[CqlSession#builder()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/CqlSession.html#builder-- -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/ResultSet.html -[Row]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/Row.html -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/CqlIdentifier.html -[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/data/AccessibleByName.html -[GenericType]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/reflect/GenericType.html -[CqlDuration]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/data/CqlDuration.html -[TupleValue]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/data/TupleValue.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/data/UdtValue.html -[SessionBuilder.addContactPoint()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoint-java.net.InetSocketAddress- -[SessionBuilder.addContactPoints()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoints-java.util.Collection- -[SessionBuilder.withLocalDatacenter()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withLocalDatacenter-java.lang.String- +[CqlSession]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/CqlSession.html +[CqlSession#builder()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/CqlSession.html#builder-- +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/ResultSet.html +[Row]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/Row.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/CqlIdentifier.html +[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/data/AccessibleByName.html +[GenericType]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/reflect/GenericType.html +[CqlDuration]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/data/CqlDuration.html +[TupleValue]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/data/TupleValue.html +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/data/UdtValue.html +[SessionBuilder.addContactPoint()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoint-java.net.InetSocketAddress- +[SessionBuilder.addContactPoints()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoints-java.util.Collection- +[SessionBuilder.withLocalDatacenter()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withLocalDatacenter-java.lang.String- [CASSANDRA-10145]: https://issues.apache.org/jira/browse/CASSANDRA-10145 \ No newline at end of file diff --git a/manual/core/address_resolution/README.md b/manual/core/address_resolution/README.md index 61954f38c43..2407f589f78 100644 --- a/manual/core/address_resolution/README.md +++ b/manual/core/address_resolution/README.md @@ -124,7 +124,7 @@ Cassandra node: domain name of the target instance. Then it performs a forward DNS lookup of the domain name; the EC2 DNS does the private/public switch automatically based on location). -[AddressTranslator]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/addresstranslation/AddressTranslator.html +[AddressTranslator]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/addresstranslation/AddressTranslator.html [cassandra.yaml]: https://docs.datastax.com/en/cassandra/3.x/cassandra/configuration/configCassandra_yaml.html [rpc_address]: https://docs.datastax.com/en/cassandra/3.x/cassandra/configuration/configCassandra_yaml.html?scroll=configCassandra_yaml__rpc_address diff --git a/manual/core/async/README.md b/manual/core/async/README.md index aeaaa53e5ce..50005ffb7a8 100644 --- a/manual/core/async/README.md +++ b/manual/core/async/README.md @@ -207,4 +207,4 @@ documentation for more details and an example. [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html \ No newline at end of file +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html \ No newline at end of file diff --git a/manual/core/authentication/README.md b/manual/core/authentication/README.md index 8b3a2a61099..9a11ad0a8e0 100644 --- a/manual/core/authentication/README.md +++ b/manual/core/authentication/README.md @@ -215,12 +215,12 @@ session.execute(statement); [SASL]: https://en.wikipedia.org/wiki/Simple_Authentication_and_Security_Layer -[AuthProvider]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/auth/AuthProvider.html -[DriverContext]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/context/DriverContext.html -[PlainTextAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderBase.html -[DseGssApiAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderBase.html -[ProgrammaticDseGssApiAuthProvider]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/auth/ProgrammaticDseGssApiAuthProvider.html -[ProxyAuthentication.executeAs]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/auth/ProxyAuthentication.html#executeAs-java.lang.String-StatementT- -[SessionBuilder.withAuthCredentials]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthCredentials-java.lang.String-java.lang.String- -[SessionBuilder.withAuthProvider]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthProvider-com.datastax.oss.driver.api.core.auth.AuthProvider- +[AuthProvider]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/auth/AuthProvider.html +[DriverContext]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/context/DriverContext.html +[PlainTextAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderBase.html +[DseGssApiAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderBase.html +[ProgrammaticDseGssApiAuthProvider]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/auth/ProgrammaticDseGssApiAuthProvider.html +[ProxyAuthentication.executeAs]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/auth/ProxyAuthentication.html#executeAs-java.lang.String-StatementT- +[SessionBuilder.withAuthCredentials]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthCredentials-java.lang.String-java.lang.String- +[SessionBuilder.withAuthProvider]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthProvider-com.datastax.oss.driver.api.core.auth.AuthProvider- [reference.conf]: ../configuration/reference/ \ No newline at end of file diff --git a/manual/core/bom/README.md b/manual/core/bom/README.md index 690ed1ce3cc..357bd6c58b7 100644 --- a/manual/core/bom/README.md +++ b/manual/core/bom/README.md @@ -13,7 +13,7 @@ To import the driver's BOM, add the following section in your application's own com.datastax.oss java-driver-bom - 4.11.0 + 4.12.0 pom import @@ -65,7 +65,7 @@ good idea to extract a property to keep it in sync with the BOM: ```xml - 4.11.0 + 4.12.0 diff --git a/manual/core/configuration/README.md b/manual/core/configuration/README.md index 55e7bf6d91c..656115b18a3 100644 --- a/manual/core/configuration/README.md +++ b/manual/core/configuration/README.md @@ -520,16 +520,16 @@ config.getDefaultProfile().getString(MyCustomOption.ADMIN_EMAIL); config.getDefaultProfile().getInt(MyCustomOption.AWESOMENESS_FACTOR); ``` -[DriverConfig]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/config/DriverConfig.html -[DriverExecutionProfile]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/config/DriverExecutionProfile.html -[DriverContext]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/context/DriverContext.html -[DriverOption]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/config/DriverOption.html -[DefaultDriverOption]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/config/DefaultDriverOption.html -[DriverConfigLoader]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html -[DriverConfigLoader.fromClasspath]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromClasspath-java.lang.String- -[DriverConfigLoader.fromFile]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromFile-java.io.File- -[DriverConfigLoader.fromUrl]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromUrl-java.net.URL- -[DriverConfigLoader.programmaticBuilder]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#programmaticBuilder-- +[DriverConfig]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/config/DriverConfig.html +[DriverExecutionProfile]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/config/DriverExecutionProfile.html +[DriverContext]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/context/DriverContext.html +[DriverOption]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/config/DriverOption.html +[DefaultDriverOption]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/config/DefaultDriverOption.html +[DriverConfigLoader]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html +[DriverConfigLoader.fromClasspath]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromClasspath-java.lang.String- +[DriverConfigLoader.fromFile]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromFile-java.io.File- +[DriverConfigLoader.fromUrl]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromUrl-java.net.URL- +[DriverConfigLoader.programmaticBuilder]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#programmaticBuilder-- [Typesafe Config]: https://github.com/typesafehub/config [config standard behavior]: https://github.com/typesafehub/config#standard-behavior diff --git a/manual/core/control_connection/README.md b/manual/core/control_connection/README.md index a1d9a345cee..2cb88323cd1 100644 --- a/manual/core/control_connection/README.md +++ b/manual/core/control_connection/README.md @@ -23,4 +23,4 @@ There are a few options to fine tune the control connection behavior in the `advanced.control-connection` and `advanced.metadata` sections; see the [metadata](../metadata/) pages and the [reference configuration](../configuration/reference/) for all the details. -[Node.getOpenConnections]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- \ No newline at end of file +[Node.getOpenConnections]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- \ No newline at end of file diff --git a/manual/core/custom_codecs/README.md b/manual/core/custom_codecs/README.md index b90ea31ad10..3413f9d5934 100644 --- a/manual/core/custom_codecs/README.md +++ b/manual/core/custom_codecs/README.md @@ -660,13 +660,13 @@ private static String formatRow(Row row) { } ``` -[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html -[GenericType]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/reflect/GenericType.html -[TypeCodec]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html -[format()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html#format-JavaTypeT- -[parse()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html#parse-java.lang.String- -[MappingCodec]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/MappingCodec.html -[SessionBuilder.addTypeCodecs]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addTypeCodecs-com.datastax.oss.driver.api.core.type.codec.TypeCodec...- +[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html +[GenericType]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/reflect/GenericType.html +[TypeCodec]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html +[format()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html#format-JavaTypeT- +[parse()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html#parse-java.lang.String- +[MappingCodec]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/MappingCodec.html +[SessionBuilder.addTypeCodecs]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addTypeCodecs-com.datastax.oss.driver.api.core.type.codec.TypeCodec...- [Enums]: https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html [Enum.name()]: https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html#name-- @@ -680,36 +680,36 @@ private static String formatRow(Row row) { [java.time.LocalDateTime]: https://docs.oracle.com/javase/8/docs/api/java/time/LocalDateTime.html [java.time.ZoneId]: https://docs.oracle.com/javase/8/docs/api/java/time/ZoneId.html -[ExtraTypeCodecs]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html -[ExtraTypeCodecs.BLOB_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#BLOB_TO_ARRAY -[ExtraTypeCodecs.BOOLEAN_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#BOOLEAN_LIST_TO_ARRAY -[ExtraTypeCodecs.BYTE_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#BYTE_LIST_TO_ARRAY -[ExtraTypeCodecs.SHORT_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#SHORT_LIST_TO_ARRAY -[ExtraTypeCodecs.INT_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#INT_LIST_TO_ARRAY -[ExtraTypeCodecs.LONG_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#LONG_LIST_TO_ARRAY -[ExtraTypeCodecs.FLOAT_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#FLOAT_LIST_TO_ARRAY -[ExtraTypeCodecs.DOUBLE_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#DOUBLE_LIST_TO_ARRAY -[ExtraTypeCodecs.listToArrayOf(TypeCodec)]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#listToArrayOf-com.datastax.oss.driver.api.core.type.codec.TypeCodec- -[ExtraTypeCodecs.TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#TIMESTAMP_UTC -[ExtraTypeCodecs.timestampAt(ZoneId)]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#timestampAt-java.time.ZoneId- -[ExtraTypeCodecs.TIMESTAMP_MILLIS_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#TIMESTAMP_MILLIS_SYSTEM -[ExtraTypeCodecs.TIMESTAMP_MILLIS_UTC]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#TIMESTAMP_MILLIS_UTC -[ExtraTypeCodecs.timestampMillisAt(ZoneId)]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#timestampMillisAt-java.time.ZoneId- -[ExtraTypeCodecs.ZONED_TIMESTAMP_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#ZONED_TIMESTAMP_SYSTEM -[ExtraTypeCodecs.ZONED_TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#ZONED_TIMESTAMP_UTC -[ExtraTypeCodecs.zonedTimestampAt(ZoneId)]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#zonedTimestampAt-java.time.ZoneId- -[ExtraTypeCodecs.LOCAL_TIMESTAMP_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#LOCAL_TIMESTAMP_SYSTEM -[ExtraTypeCodecs.LOCAL_TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#LOCAL_TIMESTAMP_UTC -[ExtraTypeCodecs.localTimestampAt(ZoneId)]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#localTimestampAt-java.time.ZoneId- -[ExtraTypeCodecs.ZONED_TIMESTAMP_PERSISTED]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#ZONED_TIMESTAMP_PERSISTED -[ExtraTypeCodecs.optionalOf(TypeCodec)]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#optionalOf-com.datastax.oss.driver.api.core.type.codec.TypeCodec- -[ExtraTypeCodecs.enumNamesOf(Class)]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#enumNamesOf-java.lang.Class- -[ExtraTypeCodecs.enumOrdinalsOf(Class)]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#enumOrdinalsOf-java.lang.Class- -[ExtraTypeCodecs.json(Class)]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#json-java.lang.Class- -[ExtraTypeCodecs.json(Class, ObjectMapper)]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#json-java.lang.Class-com.fasterxml.jackson.databind.ObjectMapper- - -[TypeCodecs.BLOB]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#BLOB -[TypeCodecs.TIMESTAMP]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#TIMESTAMP +[ExtraTypeCodecs]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html +[ExtraTypeCodecs.BLOB_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#BLOB_TO_ARRAY +[ExtraTypeCodecs.BOOLEAN_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#BOOLEAN_LIST_TO_ARRAY +[ExtraTypeCodecs.BYTE_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#BYTE_LIST_TO_ARRAY +[ExtraTypeCodecs.SHORT_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#SHORT_LIST_TO_ARRAY +[ExtraTypeCodecs.INT_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#INT_LIST_TO_ARRAY +[ExtraTypeCodecs.LONG_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#LONG_LIST_TO_ARRAY +[ExtraTypeCodecs.FLOAT_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#FLOAT_LIST_TO_ARRAY +[ExtraTypeCodecs.DOUBLE_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#DOUBLE_LIST_TO_ARRAY +[ExtraTypeCodecs.listToArrayOf(TypeCodec)]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#listToArrayOf-com.datastax.oss.driver.api.core.type.codec.TypeCodec- +[ExtraTypeCodecs.TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#TIMESTAMP_UTC +[ExtraTypeCodecs.timestampAt(ZoneId)]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#timestampAt-java.time.ZoneId- +[ExtraTypeCodecs.TIMESTAMP_MILLIS_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#TIMESTAMP_MILLIS_SYSTEM +[ExtraTypeCodecs.TIMESTAMP_MILLIS_UTC]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#TIMESTAMP_MILLIS_UTC +[ExtraTypeCodecs.timestampMillisAt(ZoneId)]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#timestampMillisAt-java.time.ZoneId- +[ExtraTypeCodecs.ZONED_TIMESTAMP_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#ZONED_TIMESTAMP_SYSTEM +[ExtraTypeCodecs.ZONED_TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#ZONED_TIMESTAMP_UTC +[ExtraTypeCodecs.zonedTimestampAt(ZoneId)]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#zonedTimestampAt-java.time.ZoneId- +[ExtraTypeCodecs.LOCAL_TIMESTAMP_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#LOCAL_TIMESTAMP_SYSTEM +[ExtraTypeCodecs.LOCAL_TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#LOCAL_TIMESTAMP_UTC +[ExtraTypeCodecs.localTimestampAt(ZoneId)]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#localTimestampAt-java.time.ZoneId- +[ExtraTypeCodecs.ZONED_TIMESTAMP_PERSISTED]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#ZONED_TIMESTAMP_PERSISTED +[ExtraTypeCodecs.optionalOf(TypeCodec)]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#optionalOf-com.datastax.oss.driver.api.core.type.codec.TypeCodec- +[ExtraTypeCodecs.enumNamesOf(Class)]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#enumNamesOf-java.lang.Class- +[ExtraTypeCodecs.enumOrdinalsOf(Class)]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#enumOrdinalsOf-java.lang.Class- +[ExtraTypeCodecs.json(Class)]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#json-java.lang.Class- +[ExtraTypeCodecs.json(Class, ObjectMapper)]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#json-java.lang.Class-com.fasterxml.jackson.databind.ObjectMapper- + +[TypeCodecs.BLOB]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#BLOB +[TypeCodecs.TIMESTAMP]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#TIMESTAMP [ObjectMapper]: http://fasterxml.github.io/jackson-databind/javadoc/2.10/com/fasterxml/jackson/databind/ObjectMapper.html diff --git a/manual/core/detachable_types/README.md b/manual/core/detachable_types/README.md index 737dd1b41c9..927cb3e372d 100644 --- a/manual/core/detachable_types/README.md +++ b/manual/core/detachable_types/README.md @@ -137,13 +137,13 @@ Even then, the defaults used by detached objects might be good enough for you: Otherwise, just make sure you reattach objects any time you deserialize them or create them from scratch. -[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html -[CodecRegistry#DEFAULT]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html#DEFAULT -[DataType]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/DataType.html -[Detachable]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/detach/Detachable.html -[Session]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/Session.html -[ColumnDefinition]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/ColumnDefinition.html -[Row]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/Row.html +[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html +[CodecRegistry#DEFAULT]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html#DEFAULT +[DataType]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/DataType.html +[Detachable]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/detach/Detachable.html +[Session]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/Session.html +[ColumnDefinition]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/ColumnDefinition.html +[Row]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/Row.html [Java serialization]: https://docs.oracle.com/javase/tutorial/jndi/objects/serial.html [protocol specifications]: https://github.com/datastax/native-protocol/tree/1.x/src/main/resources diff --git a/manual/core/dse/geotypes/README.md b/manual/core/dse/geotypes/README.md index f6329a2fd5a..ad081b246c4 100644 --- a/manual/core/dse/geotypes/README.md +++ b/manual/core/dse/geotypes/README.md @@ -166,9 +166,9 @@ All geospatial types interoperate with three standard formats: [ESRI]: https://github.com/Esri/geometry-api-java -[LineString]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/data/geometry/LineString.html -[Point]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/data/geometry/Point.html -[Polygon]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/data/geometry/Polygon.html +[LineString]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/data/geometry/LineString.html +[Point]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/data/geometry/Point.html +[Polygon]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/data/geometry/Polygon.html [Well-known text]: https://en.wikipedia.org/wiki/Well-known_text [Well-known binary]: https://en.wikipedia.org/wiki/Well-known_text#Well-known_binary diff --git a/manual/core/dse/graph/README.md b/manual/core/dse/graph/README.md index d18b1959c93..aa8409db760 100644 --- a/manual/core/dse/graph/README.md +++ b/manual/core/dse/graph/README.md @@ -74,8 +74,8 @@ fluent API returns Apache TinkerPop™ types directly. [Apache TinkerPop™]: http://tinkerpop.apache.org/ -[CqlSession]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/CqlSession.html -[GraphSession]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/graph/GraphSession.html +[CqlSession]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/CqlSession.html +[GraphSession]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/graph/GraphSession.html [DSE developer guide]: https://docs.datastax.com/en/dse/6.0/dse-dev/datastax_enterprise/graph/graphTOC.html [Gremlin]: https://docs.datastax.com/en/dse/6.0/dse-dev/datastax_enterprise/graph/dseGraphAbout.html#dseGraphAbout__what-is-cql diff --git a/manual/core/dse/graph/fluent/README.md b/manual/core/dse/graph/fluent/README.md index 090ccc248fb..15784ba833a 100644 --- a/manual/core/dse/graph/fluent/README.md +++ b/manual/core/dse/graph/fluent/README.md @@ -109,8 +109,8 @@ All the DSE predicates are available on the driver side: .values("name"); ``` -[Search]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/graph/predicates/Search.html -[Geo]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/graph/predicates/Geo.html +[Search]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/graph/predicates/Search.html +[Geo]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/graph/predicates/Geo.html [Apache TinkerPop™]: http://tinkerpop.apache.org/ [TinkerPop DSL]: http://tinkerpop.apache.org/docs/current/reference/#dsl diff --git a/manual/core/dse/graph/fluent/explicit/README.md b/manual/core/dse/graph/fluent/explicit/README.md index 533a257d218..bef059fe467 100644 --- a/manual/core/dse/graph/fluent/explicit/README.md +++ b/manual/core/dse/graph/fluent/explicit/README.md @@ -105,9 +105,9 @@ added in a future version. See also the [parent page](../) for topics common to all fluent traversals. -[FluentGraphStatement]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html -[FluentGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html#newInstance-org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal- -[FluentGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html#builder-org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal- -[BatchGraphStatement]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html -[BatchGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html#newInstance-- -[BatchGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html#builder-- +[FluentGraphStatement]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html +[FluentGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html#newInstance-org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal- +[FluentGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html#builder-org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal- +[BatchGraphStatement]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html +[BatchGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html#newInstance-- +[BatchGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html#builder-- diff --git a/manual/core/dse/graph/results/README.md b/manual/core/dse/graph/results/README.md index 7c9356b19b6..1d3891b0994 100644 --- a/manual/core/dse/graph/results/README.md +++ b/manual/core/dse/graph/results/README.md @@ -137,8 +137,8 @@ If a type doesn't have a corresponding `asXxx()` method, use the variant that ta UUID uuid = graphNode.as(UUID.class); ``` -[GraphNode]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/graph/GraphNode.html -[GraphResultSet]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/graph/GraphResultSet.html -[AsyncGraphResultSet]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/graph/AsyncGraphResultSet.html +[GraphNode]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/graph/GraphNode.html +[GraphResultSet]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/graph/GraphResultSet.html +[AsyncGraphResultSet]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/graph/AsyncGraphResultSet.html [DSE data types]: https://docs.datastax.com/en/dse/6.0/dse-dev/datastax_enterprise/graph/reference/refDSEGraphDataTypes.html \ No newline at end of file diff --git a/manual/core/dse/graph/script/README.md b/manual/core/dse/graph/script/README.md index f26ab58bdf4..8bdf9799b63 100644 --- a/manual/core/dse/graph/script/README.md +++ b/manual/core/dse/graph/script/README.md @@ -101,6 +101,6 @@ Note however that some types of queries can only be performed through the script * configuration; * DSE graph schema queries. -[ScriptGraphStatement]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html -[ScriptGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html#newInstance-java.lang.String- -[ScriptGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html#builder-java.lang.String- \ No newline at end of file +[ScriptGraphStatement]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html +[ScriptGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html#newInstance-java.lang.String- +[ScriptGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html#builder-java.lang.String- \ No newline at end of file diff --git a/manual/core/idempotence/README.md b/manual/core/idempotence/README.md index 1bb97d92c64..f10bcae0721 100644 --- a/manual/core/idempotence/README.md +++ b/manual/core/idempotence/README.md @@ -60,5 +60,5 @@ assert bs.isIdempotent(); The query builder tries to infer idempotence automatically; refer to [its manual](../../query_builder/idempotence/) for more details. -[Statement.setIdempotent]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/Statement.html#setIdempotent-java.lang.Boolean- -[StatementBuilder.setIdempotence]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/StatementBuilder.html#setIdempotence-java.lang.Boolean- +[Statement.setIdempotent]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/Statement.html#setIdempotent-java.lang.Boolean- +[StatementBuilder.setIdempotence]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/StatementBuilder.html#setIdempotence-java.lang.Boolean- diff --git a/manual/core/integration/README.md b/manual/core/integration/README.md index 6fab5af9003..6a41fa4751d 100644 --- a/manual/core/integration/README.md +++ b/manual/core/integration/README.md @@ -543,6 +543,7 @@ Here are the recommended TinkerPop versions for each driver version: + @@ -641,6 +642,6 @@ The remaining core driver dependencies are the only ones that are truly mandator [guava]: https://github.com/google/guava/issues/2721 [annotation processing]: https://docs.oracle.com/javase/8/docs/technotes/tools/windows/javac.html#sthref65 -[Session.getMetrics]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/Session.html#getMetrics-- -[SessionBuilder.addContactPoint]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoint-java.net.InetSocketAddress- -[Uuids]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/uuid/Uuids.html +[Session.getMetrics]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/Session.html#getMetrics-- +[SessionBuilder.addContactPoint]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoint-java.net.InetSocketAddress- +[Uuids]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/uuid/Uuids.html diff --git a/manual/core/load_balancing/README.md b/manual/core/load_balancing/README.md index fdac96702fe..d74546d6e99 100644 --- a/manual/core/load_balancing/README.md +++ b/manual/core/load_balancing/README.md @@ -426,12 +426,12 @@ Then it uses the "closest" distance for any given node. For example: * policy1 changes its suggestion to IGNORED. node1 is set to REMOTE; * policy1 changes its suggestion to REMOTE. node1 stays at REMOTE. -[DriverContext]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/context/DriverContext.html -[LoadBalancingPolicy]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/loadbalancing/LoadBalancingPolicy.html +[DriverContext]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/context/DriverContext.html +[LoadBalancingPolicy]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/loadbalancing/LoadBalancingPolicy.html [BasicLoadBalancingPolicy]: https://github.com/datastax/java-driver/blob/4.x/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicy.java -[getRoutingKeyspace()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKeyspace-- -[getRoutingToken()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/Request.html#getRoutingToken-- -[getRoutingKey()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- -[NodeDistanceEvaluator]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/loadbalancing/NodeDistanceEvaluator.html +[getRoutingKeyspace()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKeyspace-- +[getRoutingToken()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/Request.html#getRoutingToken-- +[getRoutingKey()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- +[NodeDistanceEvaluator]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/loadbalancing/NodeDistanceEvaluator.html [`nodetool status`]: https://docs.datastax.com/en/dse/6.7/dse-dev/datastax_enterprise/tools/nodetool/toolsStatus.html [cqlsh]: https://docs.datastax.com/en/dse/6.7/cql/cql/cql_using/startCqlshStandalone.html diff --git a/manual/core/metadata/README.md b/manual/core/metadata/README.md index 8b5557dd994..117c802c966 100644 --- a/manual/core/metadata/README.md +++ b/manual/core/metadata/README.md @@ -56,6 +56,6 @@ new keyspace in the schema metadata before the token metadata was updated. Schema and node state events are debounced. This allows you to control how often the metadata gets refreshed. See the [Performance](../performance/#debouncing) page for more details. -[Session#getMetadata]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/Session.html#getMetadata-- -[Metadata]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/metadata/Metadata.html -[Node]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/metadata/Node.html \ No newline at end of file +[Session#getMetadata]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/Session.html#getMetadata-- +[Metadata]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/metadata/Metadata.html +[Node]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/metadata/Node.html \ No newline at end of file diff --git a/manual/core/metadata/node/README.md b/manual/core/metadata/node/README.md index 715b26df732..fb5780a30ff 100644 --- a/manual/core/metadata/node/README.md +++ b/manual/core/metadata/node/README.md @@ -112,17 +112,17 @@ beyond the scope of this document; if you're interested, study the `TopologyMoni the source code. -[Metadata#getNodes]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/metadata/Metadata.html#getNodes-- -[Node]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/metadata/Node.html -[Node#getState()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/metadata/Node.html#getState-- -[Node#getDatacenter()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/metadata/Node.html#getDatacenter-- -[Node#getRack()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/metadata/Node.html#getRack-- -[Node#getDistance()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/metadata/Node.html#getDistance-- -[Node#getExtras()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/metadata/Node.html#getExtras-- -[Node#getOpenConnections()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- -[Node#isReconnecting()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/metadata/Node.html#isReconnecting-- -[NodeState]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/metadata/NodeState.html -[NodeStateListener]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/metadata/NodeStateListener.html -[NodeStateListenerBase]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/metadata/NodeStateListenerBase.html -[SessionBuilder.withNodeStateListener]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withNodeStateListener-com.datastax.oss.driver.api.core.metadata.NodeStateListener- -[DseNodeProperties]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/metadata/DseNodeProperties.html \ No newline at end of file +[Metadata#getNodes]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/metadata/Metadata.html#getNodes-- +[Node]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/metadata/Node.html +[Node#getState()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/metadata/Node.html#getState-- +[Node#getDatacenter()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/metadata/Node.html#getDatacenter-- +[Node#getRack()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/metadata/Node.html#getRack-- +[Node#getDistance()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/metadata/Node.html#getDistance-- +[Node#getExtras()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/metadata/Node.html#getExtras-- +[Node#getOpenConnections()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- +[Node#isReconnecting()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/metadata/Node.html#isReconnecting-- +[NodeState]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/metadata/NodeState.html +[NodeStateListener]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/metadata/NodeStateListener.html +[NodeStateListenerBase]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/metadata/NodeStateListenerBase.html +[SessionBuilder.withNodeStateListener]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withNodeStateListener-com.datastax.oss.driver.api.core.metadata.NodeStateListener- +[DseNodeProperties]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/metadata/DseNodeProperties.html \ No newline at end of file diff --git a/manual/core/metadata/schema/README.md b/manual/core/metadata/schema/README.md index 4fb70ac326c..43a3ec1880a 100644 --- a/manual/core/metadata/schema/README.md +++ b/manual/core/metadata/schema/README.md @@ -307,16 +307,16 @@ unavailable for the excluded keyspaces. If you issue schema-altering requests from the driver (e.g. `session.execute("CREATE TABLE ..")`), take a look at the [Performance](../../performance/#schema-updates) page for a few tips. -[Metadata#getKeyspaces]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/metadata/Metadata.html#getKeyspaces-- -[SchemaChangeListener]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListener.html -[SchemaChangeListenerBase]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListenerBase.html -[Session#setSchemaMetadataEnabled]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/Session.html#setSchemaMetadataEnabled-java.lang.Boolean- -[Session#checkSchemaAgreementAsync]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/Session.html#checkSchemaAgreementAsync-- -[SessionBuilder#withSchemaChangeListener]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSchemaChangeListener-com.datastax.oss.driver.api.core.metadata.schema.SchemaChangeListener- -[ExecutionInfo#isSchemaInAgreement]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#isSchemaInAgreement-- -[com.datastax.dse.driver.api.core.metadata.schema]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/metadata/schema/package-frame.html -[DseFunctionMetadata]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/metadata/schema/DseFunctionMetadata.html -[DseAggregateMetadata]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/metadata/schema/DseAggregateMetadata.html +[Metadata#getKeyspaces]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/metadata/Metadata.html#getKeyspaces-- +[SchemaChangeListener]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListener.html +[SchemaChangeListenerBase]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListenerBase.html +[Session#setSchemaMetadataEnabled]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/Session.html#setSchemaMetadataEnabled-java.lang.Boolean- +[Session#checkSchemaAgreementAsync]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/Session.html#checkSchemaAgreementAsync-- +[SessionBuilder#withSchemaChangeListener]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSchemaChangeListener-com.datastax.oss.driver.api.core.metadata.schema.SchemaChangeListener- +[ExecutionInfo#isSchemaInAgreement]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#isSchemaInAgreement-- +[com.datastax.dse.driver.api.core.metadata.schema]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/metadata/schema/package-frame.html +[DseFunctionMetadata]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/metadata/schema/DseFunctionMetadata.html +[DseAggregateMetadata]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/metadata/schema/DseAggregateMetadata.html [JAVA-750]: https://datastax-oss.atlassian.net/browse/JAVA-750 [java.util.regex.Pattern]: https://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html diff --git a/manual/core/metadata/token/README.md b/manual/core/metadata/token/README.md index e3dc6d903c3..db830c64bae 100644 --- a/manual/core/metadata/token/README.md +++ b/manual/core/metadata/token/README.md @@ -169,5 +169,5 @@ on [schema metadata](../schema/). If schema metadata is disabled or filtered, to also be unavailable for the excluded keyspaces. -[Metadata#getTokenMap]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/metadata/Metadata.html#getTokenMap-- -[TokenMap]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/metadata/TokenMap.html \ No newline at end of file +[Metadata#getTokenMap]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/metadata/Metadata.html#getTokenMap-- +[TokenMap]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/metadata/TokenMap.html \ No newline at end of file diff --git a/manual/core/native_protocol/README.md b/manual/core/native_protocol/README.md index e2ed651de6d..4d43687f792 100644 --- a/manual/core/native_protocol/README.md +++ b/manual/core/native_protocol/README.md @@ -135,6 +135,6 @@ If you want to see the details of mixed cluster negotiation, enable `DEBUG` leve [protocol spec]: https://github.com/datastax/native-protocol/tree/1.x/src/main/resources [driver3]: https://docs.datastax.com/en/developer/java-driver/3.10/manual/native_protocol/ -[ExecutionInfo.getWarnings]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getWarnings-- -[Request.getCustomPayload]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/Request.html#getCustomPayload-- -[AttachmentPoint.getProtocolVersion]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/detach/AttachmentPoint.html#getProtocolVersion-- +[ExecutionInfo.getWarnings]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getWarnings-- +[Request.getCustomPayload]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/Request.html#getCustomPayload-- +[AttachmentPoint.getProtocolVersion]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/detach/AttachmentPoint.html#getProtocolVersion-- diff --git a/manual/core/non_blocking/README.md b/manual/core/non_blocking/README.md index efb068912b2..61a6618f757 100644 --- a/manual/core/non_blocking/README.md +++ b/manual/core/non_blocking/README.md @@ -49,22 +49,22 @@ For example, calling any synchronous method declared in [`SyncCqlSession`], such will block until the result is available. These methods should never be used in non-blocking applications. -[`SyncCqlSession`]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/SyncCqlSession.html` -[`execute`]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/SyncCqlSession.html#execute-com.datastax.oss.driver.api.core.cql.Statement- +[`SyncCqlSession`]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/SyncCqlSession.html` +[`execute`]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/SyncCqlSession.html#execute-com.datastax.oss.driver.api.core.cql.Statement- However, the asynchronous methods declared in [`AsyncCqlSession`], such as [`executeAsync`], are all safe for use in non-blocking applications; the statement execution and asynchronous result delivery is guaranteed to never block. -[`AsyncCqlSession`]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/AsyncCqlSession.html -[`executeAsync`]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/AsyncCqlSession.html#executeAsync-com.datastax.oss.driver.api.core.cql.Statement- +[`AsyncCqlSession`]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/AsyncCqlSession.html +[`executeAsync`]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/AsyncCqlSession.html#executeAsync-com.datastax.oss.driver.api.core.cql.Statement- The same applies to the methods declared in [`ReactiveSession`] such as [`executeReactive`]: the returned publisher will never block when subscribed to, until the final results are delivered to the subscriber. -[`ReactiveSession`]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.html -[`executeReactive`]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.html#executeReactive-com.datastax.oss.driver.api.core.cql.Statement- +[`ReactiveSession`]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.html +[`executeReactive`]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.html#executeReactive-com.datastax.oss.driver.api.core.cql.Statement- There is one exception though: continuous paging queries (a feature specific to DSE) have a special execution model which uses internal locks for coordination. Although such locks are only held for @@ -77,10 +77,10 @@ reactive APIs like [`executeContinuouslyAsync`] and [`executeContinuouslyReactiv though, continuous paging is extremely efficient and can safely be used in most non-blocking contexts, unless they require strict lock-freedom. -[`ContinuousSession`]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/cql/continuous/ContinuousSession.html -[`ContinuousReactiveSession`]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousReactiveSession.html -[`executeContinuouslyAsync`]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/cql/continuous/ContinuousSession.html#executeContinuouslyAsync-com.datastax.oss.driver.api.core.cql.Statement- -[`executeContinuouslyReactive`]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousReactiveSession.html#executeContinuouslyReactive-com.datastax.oss.driver.api.core.cql.Statement- +[`ContinuousSession`]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/cql/continuous/ContinuousSession.html +[`ContinuousReactiveSession`]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousReactiveSession.html +[`executeContinuouslyAsync`]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/cql/continuous/ContinuousSession.html#executeContinuouslyAsync-com.datastax.oss.driver.api.core.cql.Statement- +[`executeContinuouslyReactive`]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousReactiveSession.html#executeContinuouslyReactive-com.datastax.oss.driver.api.core.cql.Statement- #### Driver lock-free guarantees per session lifecycle phases @@ -110,8 +110,8 @@ Similarly, a call to [`SessionBuilder.build()`] should be considered blocking as calling thread and wait until the method returns. For this reason, calls to `SessionBuilder.build()` should be avoided in non-blocking applications. -[`SessionBuilder.buildAsync()`]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/SessionBuilder.html#buildAsync-- -[`SessionBuilder.build()`]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/SessionBuilder.html#build-- +[`SessionBuilder.buildAsync()`]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/SessionBuilder.html#buildAsync-- +[`SessionBuilder.build()`]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/SessionBuilder.html#build-- Once the session is initialized, however, the driver is guaranteed to be non-blocking during the session's lifecycle, and under normal operation, unless otherwise noted elsewhere in this document. @@ -121,8 +121,8 @@ during that phase. Therefore, calls to any method declared in [`AsyncAutoCloseab asynchronous ones like [`closeAsync()`], should also be preferably deferred until the application is shut down and lock-freedom enforcement is disabled. -[`AsyncAutoCloseable`]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/AsyncAutoCloseable.html -[`closeAsync()`]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/AsyncAutoCloseable.html#closeAsync-- +[`AsyncAutoCloseable`]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/AsyncAutoCloseable.html +[`closeAsync()`]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/AsyncAutoCloseable.html#closeAsync-- #### Driver lock-free guarantees for specific components @@ -131,7 +131,7 @@ Certain driver components are not implemented in lock-free algorithms. For example, [`SafeInitNodeStateListener`] is implemented with internal locks for coordination. It should not be used if strict lock-freedom is enforced. -[`SafeInitNodeStateListener`]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/metadata/SafeInitNodeStateListener.html +[`SafeInitNodeStateListener`]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/metadata/SafeInitNodeStateListener.html The same is valid for both built-in [request throttlers]: @@ -143,7 +143,7 @@ use locks internally, and depending on how many requests are being executed in p contention on these locks can be high: in short, if your application enforces strict lock-freedom, then these components should not be used. -[request throttlers]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/throttling/RequestThrottler.html +[request throttlers]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/throttling/RequestThrottler.html Other components may be lock-free, *except* for their first invocation. This is the case of the following items: @@ -151,8 +151,8 @@ following items: * All built-in implementations of [`TimestampGenerator`], upon instantiation; * The utility method [`Uuids.timeBased()`]. -[`TimestampGenerator`]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/time/TimestampGenerator.html -[`Uuids.timeBased()`]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/uuid/Uuids.html#timeBased-- +[`TimestampGenerator`]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/time/TimestampGenerator.html +[`Uuids.timeBased()`]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/uuid/Uuids.html#timeBased-- Both components need to access native libraries when they get initialized and this may involve hitting the local filesystem, thus causing the initialization to become a blocking call. @@ -172,7 +172,7 @@ One component, the codec registry, can block when its [`register`] method is cal therefore advised that codecs should be registered during application startup exclusively. See the [custom codecs](../custom_codecs) section for more details about registering codecs. -[`register`]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/registry/MutableCodecRegistry.html#register-com.datastax.oss.driver.api.core.type.codec.TypeCodec- +[`register`]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/registry/MutableCodecRegistry.html#register-com.datastax.oss.driver.api.core.type.codec.TypeCodec- Finally, a few internal components also use locks, but only during session initialization; once the session is ready, they are either discarded, or don't use locks anymore for the rest of the @@ -213,7 +213,7 @@ lock-freedom enforcement tools could report calls to that method, but it was imp these calls. Thanks to [JAVA-2449], released with driver 4.10.0, `Uuids.random()` became a non-blocking call and random UUIDs can now be safely generated in non-blocking applications. -[`Uuids.random()`]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/uuid/Uuids.html#random-- +[`Uuids.random()`]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/uuid/Uuids.html#random-- [JAVA-2449]: https://datastax-oss.atlassian.net/browse/JAVA-2449 #### Driver lock-free guarantees when reloading the configuration @@ -228,8 +228,8 @@ detectors. If that is the case, it is advised to disable hot-reloading by settin `datastax-java-driver.basic.config-reload-interval` option to 0. See the manual page on [configuration](../configuration) for more information. -[`DriverConfigLoader`]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html -[hot-reloading]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#supportsReloading-- +[`DriverConfigLoader`]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html +[hot-reloading]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#supportsReloading-- #### Driver lock-free guarantees when connecting to DSE diff --git a/manual/core/paging/README.md b/manual/core/paging/README.md index 288127679a0..7c33aa5b43a 100644 --- a/manual/core/paging/README.md +++ b/manual/core/paging/README.md @@ -253,12 +253,12 @@ protocol page size and the logical page size to the same value. The [driver examples] include two complete web service implementations demonstrating forward-only and offset paging. -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/ResultSet.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html -[AsyncPagingIterable.hasMorePages]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/AsyncPagingIterable.html#hasMorePages-- -[AsyncPagingIterable.fetchNextPage]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/AsyncPagingIterable.html#fetchNextPage-- -[OffsetPager]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/paging/OffsetPager.html -[PagingState]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/PagingState.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/ResultSet.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[AsyncPagingIterable.hasMorePages]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/AsyncPagingIterable.html#hasMorePages-- +[AsyncPagingIterable.fetchNextPage]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/AsyncPagingIterable.html#fetchNextPage-- +[OffsetPager]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/paging/OffsetPager.html +[PagingState]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/PagingState.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html diff --git a/manual/core/performance/README.md b/manual/core/performance/README.md index 6f6cfb9f2fb..46bd567aa0f 100644 --- a/manual/core/performance/README.md +++ b/manual/core/performance/README.md @@ -345,8 +345,8 @@ possible to reuse the same event loop group for I/O, admin tasks, and even your (the driver's internal code is fully asynchronous so it will never block any thread). The timer is the only one that will have to stay on a separate thread. -[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/data/AccessibleByName.html -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/CqlIdentifier.html -[CqlSession.prepare(SimpleStatement)]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/CqlSession.html#prepare-com.datastax.oss.driver.api.core.cql.SimpleStatement- -[GenericType]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/reflect/GenericType.html -[Statement.setNode()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/Statement.html#setNode-com.datastax.oss.driver.api.core.metadata.Node- \ No newline at end of file +[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/data/AccessibleByName.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/CqlIdentifier.html +[CqlSession.prepare(SimpleStatement)]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/CqlSession.html#prepare-com.datastax.oss.driver.api.core.cql.SimpleStatement- +[GenericType]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/reflect/GenericType.html +[Statement.setNode()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/Statement.html#setNode-com.datastax.oss.driver.api.core.metadata.Node- \ No newline at end of file diff --git a/manual/core/pooling/README.md b/manual/core/pooling/README.md index e6ba907b95f..c57808c692a 100644 --- a/manual/core/pooling/README.md +++ b/manual/core/pooling/README.md @@ -170,5 +170,5 @@ you experience the issue, here's what to look out for: Try adding more connections per node. Thanks to the driver's hot-reload mechanism, you can do that at runtime and see the effects immediately. -[CqlSession]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/CqlSession.html +[CqlSession]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/CqlSession.html [CASSANDRA-8086]: https://issues.apache.org/jira/browse/CASSANDRA-8086 \ No newline at end of file diff --git a/manual/core/query_timestamps/README.md b/manual/core/query_timestamps/README.md index c98d1030392..6851fdd7e57 100644 --- a/manual/core/query_timestamps/README.md +++ b/manual/core/query_timestamps/README.md @@ -187,9 +187,9 @@ Here is the order of precedence of all the methods described so far: 3. otherwise, if the timestamp generator assigned a timestamp, use it; 4. otherwise, let the server assign the timestamp. -[TimestampGenerator]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/time/TimestampGenerator.html +[TimestampGenerator]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/time/TimestampGenerator.html [gettimeofday]: http://man7.org/linux/man-pages/man2/settimeofday.2.html [JNR]: https://github.com/jnr/jnr-posix [Lightweight transactions]: https://docs.datastax.com/en/dse/6.0/cql/cql/cql_using/useInsertLWT.html -[Statement.setQueryTimestamp()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/Statement.html#setQueryTimestamp-long- +[Statement.setQueryTimestamp()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/Statement.html#setQueryTimestamp-long- diff --git a/manual/core/reactive/README.md b/manual/core/reactive/README.md index c8485c982bd..d70a9bd4094 100644 --- a/manual/core/reactive/README.md +++ b/manual/core/reactive/README.md @@ -367,18 +367,18 @@ Note that the driver already has a [built-in retry mechanism] that can transpare queries; the above example should be seen as a demonstration of application-level retries, when a more fine-grained control of what should be retried, and how, is required. -[CqlSession]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/CqlSession.html -[ReactiveSession]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/ResultSet.html -[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html -[ReactiveRow]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html -[Row]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/Row.html -[getColumnDefinitions]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#getColumnDefinitions-- -[getExecutionInfos]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#getExecutionInfos-- -[wasApplied]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#wasApplied-- -[ReactiveRow.getColumnDefinitions]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#getColumnDefinitions-- -[ReactiveRow.getExecutionInfo]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#getExecutionInfo-- -[ReactiveRow.wasApplied]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#wasApplied-- +[CqlSession]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/CqlSession.html +[ReactiveSession]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/ResultSet.html +[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html +[ReactiveRow]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html +[Row]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/Row.html +[getColumnDefinitions]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#getColumnDefinitions-- +[getExecutionInfos]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#getExecutionInfos-- +[wasApplied]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#wasApplied-- +[ReactiveRow.getColumnDefinitions]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#getColumnDefinitions-- +[ReactiveRow.getExecutionInfo]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#getExecutionInfo-- +[ReactiveRow.wasApplied]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#wasApplied-- [built-in retry mechanism]: ../retries/ [request throttling]: ../throttling/ diff --git a/manual/core/reconnection/README.md b/manual/core/reconnection/README.md index 36b2d55b832..3ec3c8ef3cc 100644 --- a/manual/core/reconnection/README.md +++ b/manual/core/reconnection/README.md @@ -84,7 +84,7 @@ Note that the session is not accessible until it is fully ready: the `CqlSession call — or the future returned by `buildAsync()` — will not complete until the connection was established. -[ConstantReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/internal/core/connection/ConstantReconnectionPolicy.html -[DriverContext]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/context/DriverContext.html -[ExponentialReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/internal/core/connection/ExponentialReconnectionPolicy.html -[ReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/connection/ReconnectionPolicy.html \ No newline at end of file +[ConstantReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/internal/core/connection/ConstantReconnectionPolicy.html +[DriverContext]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/context/DriverContext.html +[ExponentialReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/internal/core/connection/ExponentialReconnectionPolicy.html +[ReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/connection/ReconnectionPolicy.html \ No newline at end of file diff --git a/manual/core/request_tracker/README.md b/manual/core/request_tracker/README.md index e959c09dfd7..4aa9fff0404 100644 --- a/manual/core/request_tracker/README.md +++ b/manual/core/request_tracker/README.md @@ -117,5 +117,5 @@ all FROM users WHERE user_id=? [v0=42] com.datastax.oss.driver.api.core.servererrors.InvalidQueryException: Undefined column name all ``` -[RequestTracker]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/tracker/RequestTracker.html -[SessionBuilder.withRequestTracker]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withRequestTracker-com.datastax.oss.driver.api.core.tracker.RequestTracker- \ No newline at end of file +[RequestTracker]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/tracker/RequestTracker.html +[SessionBuilder.withRequestTracker]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withRequestTracker-com.datastax.oss.driver.api.core.tracker.RequestTracker- \ No newline at end of file diff --git a/manual/core/retries/README.md b/manual/core/retries/README.md index df8ac5b9003..eb24ead3ad1 100644 --- a/manual/core/retries/README.md +++ b/manual/core/retries/README.md @@ -231,21 +231,21 @@ configuration). Each request uses its declared profile's policy. If it doesn't declare any profile, or if the profile doesn't have a dedicated policy, then the default profile's policy is used. -[AllNodesFailedException]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/AllNodesFailedException.html -[ClosedConnectionException]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/connection/ClosedConnectionException.html -[DriverTimeoutException]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/DriverTimeoutException.html -[FunctionFailureException]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/servererrors/FunctionFailureException.html -[HeartbeatException]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/connection/HeartbeatException.html -[ProtocolError]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/servererrors/ProtocolError.html -[OverloadedException]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/servererrors/OverloadedException.html -[QueryValidationException]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/servererrors/QueryValidationException.html -[ReadFailureException]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/servererrors/ReadFailureException.html -[ReadTimeoutException]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/servererrors/ReadTimeoutException.html -[RetryDecision]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/retry/RetryDecision.html -[RetryPolicy]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/retry/RetryPolicy.html -[RetryVerdict]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/retry/RetryVerdict.html -[ServerError]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/servererrors/ServerError.html -[TruncateException]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/servererrors/TruncateException.html -[UnavailableException]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/servererrors/UnavailableException.html -[WriteFailureException]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/servererrors/WriteFailureException.html -[WriteTimeoutException]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/servererrors/WriteTimeoutException.html +[AllNodesFailedException]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/AllNodesFailedException.html +[ClosedConnectionException]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/connection/ClosedConnectionException.html +[DriverTimeoutException]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/DriverTimeoutException.html +[FunctionFailureException]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/servererrors/FunctionFailureException.html +[HeartbeatException]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/connection/HeartbeatException.html +[ProtocolError]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/servererrors/ProtocolError.html +[OverloadedException]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/servererrors/OverloadedException.html +[QueryValidationException]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/servererrors/QueryValidationException.html +[ReadFailureException]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/servererrors/ReadFailureException.html +[ReadTimeoutException]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/servererrors/ReadTimeoutException.html +[RetryDecision]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/retry/RetryDecision.html +[RetryPolicy]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/retry/RetryPolicy.html +[RetryVerdict]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/retry/RetryVerdict.html +[ServerError]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/servererrors/ServerError.html +[TruncateException]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/servererrors/TruncateException.html +[UnavailableException]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/servererrors/UnavailableException.html +[WriteFailureException]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/servererrors/WriteFailureException.html +[WriteTimeoutException]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/servererrors/WriteTimeoutException.html diff --git a/manual/core/speculative_execution/README.md b/manual/core/speculative_execution/README.md index f5ba057d431..6e0e86f8606 100644 --- a/manual/core/speculative_execution/README.md +++ b/manual/core/speculative_execution/README.md @@ -250,4 +250,4 @@ profiles have the same configuration). Each request uses its declared profile's policy. If it doesn't declare any profile, or if the profile doesn't have a dedicated policy, then the default profile's policy is used. -[SpeculativeExecutionPolicy]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/specex/SpeculativeExecutionPolicy.html \ No newline at end of file +[SpeculativeExecutionPolicy]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/specex/SpeculativeExecutionPolicy.html \ No newline at end of file diff --git a/manual/core/ssl/README.md b/manual/core/ssl/README.md index bac3786c624..d21bf0e7839 100644 --- a/manual/core/ssl/README.md +++ b/manual/core/ssl/README.md @@ -204,6 +204,6 @@ the box, but with a bit of custom development it is fairly easy to add. See [dsClientToNode]: https://docs.datastax.com/en/cassandra/3.0/cassandra/configuration/secureSSLClientToNode.html [pickle]: http://thelastpickle.com/blog/2015/09/30/hardening-cassandra-step-by-step-part-1-server-to-server.html [JSSE system properties]: http://docs.oracle.com/javase/6/docs/technotes/guides/security/jsse/JSSERefGuide.html#Customization -[SessionBuilder.withSslEngineFactory]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSslEngineFactory-com.datastax.oss.driver.api.core.ssl.SslEngineFactory- -[SessionBuilder.withSslContext]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSslContext-javax.net.ssl.SSLContext- -[ProgrammaticSslEngineFactory]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/ssl/ProgrammaticSslEngineFactory.html +[SessionBuilder.withSslEngineFactory]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSslEngineFactory-com.datastax.oss.driver.api.core.ssl.SslEngineFactory- +[SessionBuilder.withSslContext]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSslContext-javax.net.ssl.SSLContext- +[ProgrammaticSslEngineFactory]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/ssl/ProgrammaticSslEngineFactory.html diff --git a/manual/core/statements/README.md b/manual/core/statements/README.md index 2e1dd1aa75f..b8a0621bc1d 100644 --- a/manual/core/statements/README.md +++ b/manual/core/statements/README.md @@ -59,7 +59,7 @@ the [configuration](../configuration/). Namely, these are: idempotent flag, quer consistency levels and page size. We recommended the configuration approach whenever possible (you can create execution profiles to capture common combinations of those options). -[Statement]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/Statement.html -[StatementBuilder]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/StatementBuilder.html -[execute]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/Session.html#execute-com.datastax.oss.driver.api.core.cql.Statement- -[executeAsync]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/Session.html#executeAsync-com.datastax.oss.driver.api.core.cql.Statement- +[Statement]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/Statement.html +[StatementBuilder]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/StatementBuilder.html +[execute]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/Session.html#execute-com.datastax.oss.driver.api.core.cql.Statement- +[executeAsync]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/Session.html#executeAsync-com.datastax.oss.driver.api.core.cql.Statement- diff --git a/manual/core/statements/batch/README.md b/manual/core/statements/batch/README.md index df917e33af5..1cb479ce2bc 100644 --- a/manual/core/statements/batch/README.md +++ b/manual/core/statements/batch/README.md @@ -61,8 +61,8 @@ In addition, simple statements with named parameters are currently not supported due to a [protocol limitation][CASSANDRA-10246] that will be fixed in a future version). If you try to execute such a batch, an `IllegalArgumentException` is thrown. -[BatchStatement]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/BatchStatement.html -[BatchStatement.newInstance()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/BatchStatement.html#newInstance-com.datastax.oss.driver.api.core.cql.BatchType- -[BatchStatement.builder()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/BatchStatement.html#builder-com.datastax.oss.driver.api.core.cql.BatchType- +[BatchStatement]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/BatchStatement.html +[BatchStatement.newInstance()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/BatchStatement.html#newInstance-com.datastax.oss.driver.api.core.cql.BatchType- +[BatchStatement.builder()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/BatchStatement.html#builder-com.datastax.oss.driver.api.core.cql.BatchType- [batch_dse]: http://docs.datastax.com/en/dse/6.7/cql/cql/cql_using/useBatch.html [CASSANDRA-10246]: https://issues.apache.org/jira/browse/CASSANDRA-10246 diff --git a/manual/core/statements/per_query_keyspace/README.md b/manual/core/statements/per_query_keyspace/README.md index 152816071d2..cd5295805f4 100644 --- a/manual/core/statements/per_query_keyspace/README.md +++ b/manual/core/statements/per_query_keyspace/README.md @@ -124,6 +124,6 @@ SimpleStatement statement = At some point in the future, when Cassandra 4 becomes prevalent and using a per-query keyspace is the norm, we'll probably deprecate `setRoutingKeyspace()`. -[token-aware routing]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- +[token-aware routing]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- [CASSANDRA-10145]: https://issues.apache.org/jira/browse/CASSANDRA-10145 \ No newline at end of file diff --git a/manual/core/statements/prepared/README.md b/manual/core/statements/prepared/README.md index afb24ac38ed..e7e7d4007d4 100644 --- a/manual/core/statements/prepared/README.md +++ b/manual/core/statements/prepared/README.md @@ -330,10 +330,10 @@ With Cassandra 4 and [native protocol](../../native_protocol/) v5, this issue is new version with the response; the driver updates its local cache transparently, and the client can observe the new columns in the result set. -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[Session.prepare]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/CqlSession.html#prepare-com.datastax.oss.driver.api.core.cql.SimpleStatement- +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[Session.prepare]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/CqlSession.html#prepare-com.datastax.oss.driver.api.core.cql.SimpleStatement- [CASSANDRA-10786]: https://issues.apache.org/jira/browse/CASSANDRA-10786 [CASSANDRA-10813]: https://issues.apache.org/jira/browse/CASSANDRA-10813 [guava eviction]: https://github.com/google/guava/wiki/CachesExplained#reference-based-eviction -[PreparedStatement.bind]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/PreparedStatement.html#bind-java.lang.Object...- -[PreparedStatement.boundStatementBuilder]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/PreparedStatement.html#boundStatementBuilder-java.lang.Object...- +[PreparedStatement.bind]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/PreparedStatement.html#bind-java.lang.Object...- +[PreparedStatement.boundStatementBuilder]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/PreparedStatement.html#boundStatementBuilder-java.lang.Object...- diff --git a/manual/core/statements/simple/README.md b/manual/core/statements/simple/README.md index e614885039a..a07fa66126b 100644 --- a/manual/core/statements/simple/README.md +++ b/manual/core/statements/simple/README.md @@ -182,6 +182,6 @@ session.execute( Or you could also use [prepared statements](../prepared/), which don't have this limitation since parameter types are known in advance. -[SimpleStatement]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/SimpleStatement.html -[SimpleStatement.newInstance()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/SimpleStatement.html#newInstance-java.lang.String- -[SimpleStatement.builder()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/SimpleStatement.html#builder-java.lang.String- +[SimpleStatement]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/SimpleStatement.html +[SimpleStatement.newInstance()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/SimpleStatement.html#newInstance-java.lang.String- +[SimpleStatement.builder()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/SimpleStatement.html#builder-java.lang.String- diff --git a/manual/core/temporal_types/README.md b/manual/core/temporal_types/README.md index e30f8fca947..cf5273b4e0f 100644 --- a/manual/core/temporal_types/README.md +++ b/manual/core/temporal_types/README.md @@ -146,7 +146,7 @@ System.out.println(dateTime.minus(CqlDuration.from("1h15s15ns"))); // prints "2018-10-03T22:59:44.999999985-07:00[America/Los_Angeles]" ``` -[CqlDuration]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/data/CqlDuration.html -[TypeCodecs.ZONED_TIMESTAMP_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#ZONED_TIMESTAMP_SYSTEM -[TypeCodecs.ZONED_TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#ZONED_TIMESTAMP_UTC -[TypeCodecs.zonedTimestampAt()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#zonedTimestampAt-java.time.ZoneId- \ No newline at end of file +[CqlDuration]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/data/CqlDuration.html +[TypeCodecs.ZONED_TIMESTAMP_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#ZONED_TIMESTAMP_SYSTEM +[TypeCodecs.ZONED_TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#ZONED_TIMESTAMP_UTC +[TypeCodecs.zonedTimestampAt()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#zonedTimestampAt-java.time.ZoneId- \ No newline at end of file diff --git a/manual/core/throttling/README.md b/manual/core/throttling/README.md index 3b5c025032f..eb69b160a99 100644 --- a/manual/core/throttling/README.md +++ b/manual/core/throttling/README.md @@ -145,6 +145,6 @@ datastax-java-driver { If you enable `throttling.delay`, make sure to also check the associated extra options to correctly size the underlying histograms (`metrics.session.throttling.delay.*`). -[RequestThrottlingException]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/RequestThrottlingException.html -[AllNodesFailedException]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/AllNodesFailedException.html -[BusyConnectionException]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/connection/BusyConnectionException.html \ No newline at end of file +[RequestThrottlingException]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/RequestThrottlingException.html +[AllNodesFailedException]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/AllNodesFailedException.html +[BusyConnectionException]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/connection/BusyConnectionException.html \ No newline at end of file diff --git a/manual/core/tracing/README.md b/manual/core/tracing/README.md index 07d4416efba..7864ac147a1 100644 --- a/manual/core/tracing/README.md +++ b/manual/core/tracing/README.md @@ -113,9 +113,9 @@ for (TraceEvent event : trace.getEvents()) { If you call `getQueryTrace()` for a statement that didn't have tracing enabled, an exception is thrown. -[ExecutionInfo]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html -[QueryTrace]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/QueryTrace.html -[Statement.setTracing()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/Statement.html#setTracing-boolean- -[StatementBuilder.setTracing()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/StatementBuilder.html#setTracing-- -[ExecutionInfo.getTracingId()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getTracingId-- -[ExecutionInfo.getQueryTrace()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getQueryTrace-- +[ExecutionInfo]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html +[QueryTrace]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/QueryTrace.html +[Statement.setTracing()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/Statement.html#setTracing-boolean- +[StatementBuilder.setTracing()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/StatementBuilder.html#setTracing-- +[ExecutionInfo.getTracingId()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getTracingId-- +[ExecutionInfo.getQueryTrace()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getQueryTrace-- diff --git a/manual/core/tuples/README.md b/manual/core/tuples/README.md index 1662aa3beb7..cfecdf3f8f8 100644 --- a/manual/core/tuples/README.md +++ b/manual/core/tuples/README.md @@ -139,5 +139,5 @@ BoundStatement bs = [cql_doc]: https://docs.datastax.com/en/cql/3.3/cql/cql_reference/tupleType.html -[TupleType]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/TupleType.html -[TupleValue]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/data/TupleValue.html +[TupleType]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/TupleType.html +[TupleValue]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/data/TupleValue.html diff --git a/manual/core/udts/README.md b/manual/core/udts/README.md index 8fdd72050a2..90f7bff59ff 100644 --- a/manual/core/udts/README.md +++ b/manual/core/udts/README.md @@ -135,5 +135,5 @@ session.execute(bs); [cql_doc]: https://docs.datastax.com/en/cql/3.3/cql/cql_reference/cqlRefUDType.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/data/UdtValue.html -[UserDefinedType]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/UserDefinedType.html \ No newline at end of file +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/data/UdtValue.html +[UserDefinedType]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/UserDefinedType.html \ No newline at end of file diff --git a/manual/developer/common/concurrency/README.md b/manual/developer/common/concurrency/README.md index 2ab2ab7db35..c0697d439c8 100644 --- a/manual/developer/common/concurrency/README.md +++ b/manual/developer/common/concurrency/README.md @@ -101,8 +101,8 @@ public interface ExecutionInfo { When a public API method is blocking, this is generally clearly stated in its javadocs. -[`ExecutionInfo.getQueryTrace()`]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getQueryTrace-- -[`SyncCqlSession`]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/SyncCqlSession.html` +[`ExecutionInfo.getQueryTrace()`]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getQueryTrace-- +[`SyncCqlSession`]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/SyncCqlSession.html` `BlockingOperation` is a utility to check that those methods aren't called on I/O threads, which could introduce deadlocks. diff --git a/manual/mapper/config/kotlin/README.md b/manual/mapper/config/kotlin/README.md index 58e104513de..e016cfec768 100644 --- a/manual/mapper/config/kotlin/README.md +++ b/manual/mapper/config/kotlin/README.md @@ -106,4 +106,4 @@ before compilation: [build.gradle]: https://github.com/DataStax-Examples/object-mapper-jvm/blob/master/kotlin/build.gradle [UserDao.kt]: https://github.com/DataStax-Examples/object-mapper-jvm/blob/master/kotlin/src/main/kotlin/com/datastax/examples/mapper/killrvideo/user/UserDao.kt -[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html +[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html diff --git a/manual/mapper/config/record/README.md b/manual/mapper/config/record/README.md index d296439870f..520eeb773b7 100644 --- a/manual/mapper/config/record/README.md +++ b/manual/mapper/config/record/README.md @@ -27,7 +27,7 @@ You need to build with Java 14, and pass the `--enable-preview` flag to both the runtime JVM. See [pom.xml] in the example. -[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html +[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html [DataStax-Examples/object-mapper-jvm/record]: https://github.com/DataStax-Examples/object-mapper-jvm/tree/master/record [pom.xml]: https://github.com/DataStax-Examples/object-mapper-jvm/blob/master/record/pom.xml diff --git a/manual/mapper/config/scala/README.md b/manual/mapper/config/scala/README.md index f5d2e120637..6d2ed670fc8 100644 --- a/manual/mapper/config/scala/README.md +++ b/manual/mapper/config/scala/README.md @@ -54,4 +54,4 @@ mapper builder. [DataStax-Examples/object-mapper-jvm/scala]: https://github.com/DataStax-Examples/object-mapper-jvm/tree/master/scala [build.sbt]: https://github.com/DataStax-Examples/object-mapper-jvm/blob/master/scala/build.sbt -[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html +[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html diff --git a/manual/mapper/daos/README.md b/manual/mapper/daos/README.md index 6bbd1b9f35a..9a15815acb0 100644 --- a/manual/mapper/daos/README.md +++ b/manual/mapper/daos/README.md @@ -148,8 +148,8 @@ In this case, any annotations declared in `Dao1` would be chosen over `Dao2`. To control how the hierarchy is scanned, annotate interfaces with [@HierarchyScanStrategy]. -[@Dao]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/Dao.html -[@DaoFactory]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.html -[@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html -[@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html +[@Dao]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/Dao.html +[@DaoFactory]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.html +[@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html +[@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html [Entity Inheritance]: ../entities/#inheritance diff --git a/manual/mapper/daos/custom_types/README.md b/manual/mapper/daos/custom_types/README.md index 6b8a6707b48..7d39e8b7038 100644 --- a/manual/mapper/daos/custom_types/README.md +++ b/manual/mapper/daos/custom_types/README.md @@ -236,8 +236,8 @@ flag: With this configuration, if a DAO method declares a non built-in return type, it will be surfaced as a compiler error. -[EntityHelper]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/entity/EntityHelper.html -[GenericType]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/reflect/GenericType.html -[MapperContext]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/MapperContext.html -[MapperResultProducer]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/result/MapperResultProducer.html -[MapperResultProducerService]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/result/MapperResultProducerService.html +[EntityHelper]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/entity/EntityHelper.html +[GenericType]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/reflect/GenericType.html +[MapperContext]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/MapperContext.html +[MapperResultProducer]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/result/MapperResultProducer.html +[MapperResultProducerService]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/result/MapperResultProducerService.html diff --git a/manual/mapper/daos/delete/README.md b/manual/mapper/daos/delete/README.md index 1526079f611..290180038e1 100644 --- a/manual/mapper/daos/delete/README.md +++ b/manual/mapper/daos/delete/README.md @@ -151,15 +151,15 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the [naming strategy](../../entities/#naming-strategy)). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html -[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html -[@Delete]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/Delete.html -[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/ResultSet.html -[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html +[@Delete]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/Delete.html +[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/ResultSet.html +[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html diff --git a/manual/mapper/daos/getentity/README.md b/manual/mapper/daos/getentity/README.md index e495f964557..63a82e96e14 100644 --- a/manual/mapper/daos/getentity/README.md +++ b/manual/mapper/daos/getentity/README.md @@ -130,15 +130,15 @@ If the return type doesn't match the parameter type (for example [PagingIterable [AsyncResultSet]), the mapper processor will issue a compile-time error. -[@GetEntity]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html -[GettableByName]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/data/GettableByName.html -[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/PagingIterable.html -[PagingIterable.spliterator]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/PagingIterable.html#spliterator-- -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/ResultSet.html -[Row]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/Row.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/data/UdtValue.html +[@GetEntity]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[GettableByName]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/data/GettableByName.html +[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/PagingIterable.html +[PagingIterable.spliterator]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/PagingIterable.html#spliterator-- +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/ResultSet.html +[Row]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/Row.html +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/data/UdtValue.html [Stream]: https://docs.oracle.com/javase/8/docs/api/java/util/stream/Stream.html diff --git a/manual/mapper/daos/increment/README.md b/manual/mapper/daos/increment/README.md index 834bdb752f1..c8680e9b6ab 100644 --- a/manual/mapper/daos/increment/README.md +++ b/manual/mapper/daos/increment/README.md @@ -75,12 +75,12 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the naming convention). -[@Increment]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/Increment.html -[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html -[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html -[@CqlName]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/CqlName.html +[@Increment]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/Increment.html +[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html +[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html +[@CqlName]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/CqlName.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html diff --git a/manual/mapper/daos/insert/README.md b/manual/mapper/daos/insert/README.md index f7bf250b304..79337a093a9 100644 --- a/manual/mapper/daos/insert/README.md +++ b/manual/mapper/daos/insert/README.md @@ -108,13 +108,13 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the [naming strategy](../../entities/#naming-strategy)). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@Insert]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/Insert.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/ResultSet.html -[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- -[ResultSet#getExecutionInfo()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/ResultSet.html#getExecutionInfo-- -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@Insert]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/Insert.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/ResultSet.html +[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- +[ResultSet#getExecutionInfo()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/ResultSet.html#getExecutionInfo-- +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html diff --git a/manual/mapper/daos/null_saving/README.md b/manual/mapper/daos/null_saving/README.md index bf754441703..80be5c17652 100644 --- a/manual/mapper/daos/null_saving/README.md +++ b/manual/mapper/daos/null_saving/README.md @@ -93,10 +93,10 @@ public interface UserDao extends InventoryDao { } ``` -[@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[MapperException]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/MapperException.html -[DO_NOT_SET]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#DO_NOT_SET -[SET_TO_NULL]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#SET_TO_NULL +[@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[MapperException]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/MapperException.html +[DO_NOT_SET]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#DO_NOT_SET +[SET_TO_NULL]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#SET_TO_NULL [CASSANDRA-7304]: https://issues.apache.org/jira/browse/CASSANDRA-7304 diff --git a/manual/mapper/daos/query/README.md b/manual/mapper/daos/query/README.md index b95b118c48f..bde13b37a41 100644 --- a/manual/mapper/daos/query/README.md +++ b/manual/mapper/daos/query/README.md @@ -113,18 +113,18 @@ Then: query succeeds or not depends on whether the session that the mapper was built with has a [default keyspace]. -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@Query]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/Query.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/ResultSet.html -[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- -[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/PagingIterable.html -[PagingIterable.spliterator]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/PagingIterable.html#spliterator-- -[Row]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/Row.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html -[MappedReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/mapper/reactive/MappedReactiveResultSet.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@Query]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/Query.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/ResultSet.html +[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- +[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/PagingIterable.html +[PagingIterable.spliterator]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/PagingIterable.html#spliterator-- +[Row]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/Row.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html +[MappedReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/mapper/reactive/MappedReactiveResultSet.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html diff --git a/manual/mapper/daos/queryprovider/README.md b/manual/mapper/daos/queryprovider/README.md index e1e71bcc608..61e2212079e 100644 --- a/manual/mapper/daos/queryprovider/README.md +++ b/manual/mapper/daos/queryprovider/README.md @@ -137,11 +137,11 @@ Here is the full implementation: the desired [PagingIterable][PagingIterable]. -[@QueryProvider]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html -[providerClass]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerClass-- -[entityHelpers]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#entityHelpers-- -[providerMethod]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerMethod-- -[MapperContext]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/MapperContext.html -[EntityHelper]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/EntityHelper.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/ResultSet.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/PagingIterable.html +[@QueryProvider]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html +[providerClass]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerClass-- +[entityHelpers]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#entityHelpers-- +[providerMethod]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerMethod-- +[MapperContext]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/MapperContext.html +[EntityHelper]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/EntityHelper.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/ResultSet.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/PagingIterable.html diff --git a/manual/mapper/daos/select/README.md b/manual/mapper/daos/select/README.md index 3cb6cc168c5..e9403d913ec 100644 --- a/manual/mapper/daos/select/README.md +++ b/manual/mapper/daos/select/README.md @@ -155,20 +155,20 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the [naming strategy](../../entities/#naming-strategy)). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html -[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html -[@Select]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/Select.html -[allowFiltering()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/Select.html#allowFiltering-- -[customWhereClause()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/Select.html#customWhereClause-- -[groupBy()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/Select.html#groupBy-- -[limit()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/Select.html#limit-- -[orderBy()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/Select.html#orderBy-- -[perPartitionLimit()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/Select.html#perPartitionLimit-- -[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/PagingIterable.html -[PagingIterable.spliterator]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/PagingIterable.html#spliterator-- -[MappedReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/mapper/reactive/MappedReactiveResultSet.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html +[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html +[@Select]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/Select.html +[allowFiltering()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/Select.html#allowFiltering-- +[customWhereClause()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/Select.html#customWhereClause-- +[groupBy()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/Select.html#groupBy-- +[limit()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/Select.html#limit-- +[orderBy()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/Select.html#orderBy-- +[perPartitionLimit()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/Select.html#perPartitionLimit-- +[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/PagingIterable.html +[PagingIterable.spliterator]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/PagingIterable.html#spliterator-- +[MappedReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/mapper/reactive/MappedReactiveResultSet.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html diff --git a/manual/mapper/daos/setentity/README.md b/manual/mapper/daos/setentity/README.md index 3d887b34a91..5fc441b9bf8 100644 --- a/manual/mapper/daos/setentity/README.md +++ b/manual/mapper/daos/setentity/README.md @@ -112,8 +112,8 @@ BoundStatement bind(Product product, BoundStatement statement); If you use a void method with [BoundStatement], the mapper processor will issue a compile-time warning. -[@SetEntity]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/SetEntity.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[BoundStatementBuilder]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/BoundStatementBuilder.html -[SettableByName]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/data/SettableByName.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/data/UdtValue.html +[@SetEntity]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/SetEntity.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[BoundStatementBuilder]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/BoundStatementBuilder.html +[SettableByName]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/data/SettableByName.html +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/data/UdtValue.html diff --git a/manual/mapper/daos/statement_attributes/README.md b/manual/mapper/daos/statement_attributes/README.md index 1793c8ac806..4ca17cfbb96 100644 --- a/manual/mapper/daos/statement_attributes/README.md +++ b/manual/mapper/daos/statement_attributes/README.md @@ -60,4 +60,4 @@ Product product = dao.findById(1, builder -> builder.setConsistencyLevel(DefaultConsistencyLevel.QUORUM)); ``` -[@StatementAttributes]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/StatementAttributes.html \ No newline at end of file +[@StatementAttributes]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/StatementAttributes.html \ No newline at end of file diff --git a/manual/mapper/daos/update/README.md b/manual/mapper/daos/update/README.md index 44fcdec02c6..8ffa06a1b4c 100644 --- a/manual/mapper/daos/update/README.md +++ b/manual/mapper/daos/update/README.md @@ -143,13 +143,13 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the naming convention). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@Update]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/Update.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@Update]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/Update.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html [Boolean]: https://docs.oracle.com/javase/8/docs/api/index.html?java/lang/Boolean.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/ResultSet.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/ResultSet.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html diff --git a/manual/mapper/entities/README.md b/manual/mapper/entities/README.md index 35b457f9bb9..41f8a2a2a4f 100644 --- a/manual/mapper/entities/README.md +++ b/manual/mapper/entities/README.md @@ -555,22 +555,22 @@ the same level. To control how the class hierarchy is scanned, annotate classes with [@HierarchyScanStrategy]. -[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html -[@CqlName]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/CqlName.html -[@Dao]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/Dao.html -[@Entity]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/Entity.html -[NameConverter]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/entity/naming/NameConverter.html -[NamingConvention]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/entity/naming/NamingConvention.html -[@NamingStrategy]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/NamingStrategy.html -[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html -[@Computed]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/Computed.html -[@Select]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/Select.html -[@Insert]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/Insert.html -[@Update]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/Update.html -[@GetEntity]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html -[@Query]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/Query.html +[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html +[@CqlName]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/CqlName.html +[@Dao]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/Dao.html +[@Entity]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/Entity.html +[NameConverter]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/entity/naming/NameConverter.html +[NamingConvention]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/entity/naming/NamingConvention.html +[@NamingStrategy]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/NamingStrategy.html +[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html +[@Computed]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/Computed.html +[@Select]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/Select.html +[@Insert]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/Insert.html +[@Update]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/Update.html +[@GetEntity]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html +[@Query]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/Query.html [aliases]: http://cassandra.apache.org/doc/latest/cql/dml.html?#aliases -[@Transient]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/Transient.html -[@TransientProperties]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/TransientProperties.html -[@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html -[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html +[@Transient]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/Transient.html +[@TransientProperties]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/TransientProperties.html +[@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html +[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html diff --git a/manual/mapper/mapper/README.md b/manual/mapper/mapper/README.md index bebb2a62133..032381e39bf 100644 --- a/manual/mapper/mapper/README.md +++ b/manual/mapper/mapper/README.md @@ -230,8 +230,8 @@ InventoryMapper inventoryMapper = new InventoryMapperBuilder(session) You can also permanently disable validation of an individual entity by annotating it with `@SchemaHint(targetElement = NONE)`. -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/CqlIdentifier.html -[@DaoFactory]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.html -[@DaoKeyspace]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/DaoKeyspace.html -[@DaoTable]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/DaoTable.html -[@Mapper]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/mapper/annotations/Mapper.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/CqlIdentifier.html +[@DaoFactory]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.html +[@DaoKeyspace]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/DaoKeyspace.html +[@DaoTable]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/DaoTable.html +[@Mapper]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/Mapper.html diff --git a/manual/osgi/README.md b/manual/osgi/README.md index be8430c31e7..670625960cb 100644 --- a/manual/osgi/README.md +++ b/manual/osgi/README.md @@ -138,7 +138,7 @@ starting the driver: [driver configuration]: ../core/configuration [OSGi]:https://www.osgi.org [JNR]: https://github.com/jnr/jnr-posix -[withClassLoader()]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withClassLoader-java.lang.ClassLoader- +[withClassLoader()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withClassLoader-java.lang.ClassLoader- [JAVA-1127]:https://datastax-oss.atlassian.net/browse/JAVA-1127 -[DriverConfigLoader.fromDefaults(ClassLoader)]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromDefaults-java.lang.ClassLoader- -[DriverConfigLoader.programmaticBuilder(ClassLoader)]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#programmaticBuilder-java.lang.ClassLoader- +[DriverConfigLoader.fromDefaults(ClassLoader)]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromDefaults-java.lang.ClassLoader- +[DriverConfigLoader.programmaticBuilder(ClassLoader)]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#programmaticBuilder-java.lang.ClassLoader- diff --git a/manual/query_builder/README.md b/manual/query_builder/README.md index f4f1c12db73..6f2e6e2afd6 100644 --- a/manual/query_builder/README.md +++ b/manual/query_builder/README.md @@ -212,8 +212,8 @@ For a complete tour of the API, browse the child pages in this manual: * [Terms](term/) * [Idempotence](idempotence/) -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/CqlIdentifier.html -[DseQueryBuilder]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/querybuilder/DseQueryBuilder.html -[DseSchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/dse/driver/api/querybuilder/DseSchemaBuilder.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/CqlIdentifier.html +[DseQueryBuilder]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/querybuilder/DseQueryBuilder.html +[DseSchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/querybuilder/DseSchemaBuilder.html diff --git a/manual/query_builder/condition/README.md b/manual/query_builder/condition/README.md index 8a58139085e..3d8682c32cf 100644 --- a/manual/query_builder/condition/README.md +++ b/manual/query_builder/condition/README.md @@ -132,4 +132,4 @@ It is mutually exclusive with column conditions: if you previously specified col the statement, they will be ignored; conversely, adding a column condition cancels a previous IF EXISTS clause. -[Condition]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/querybuilder/condition/Condition.html +[Condition]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/querybuilder/condition/Condition.html diff --git a/manual/query_builder/delete/README.md b/manual/query_builder/delete/README.md index 36f0400c43a..0fbcf27ea77 100644 --- a/manual/query_builder/delete/README.md +++ b/manual/query_builder/delete/README.md @@ -141,5 +141,5 @@ deleteFrom("user") Conditions are a common feature used by UPDATE and DELETE, so they have a [dedicated page](../condition) in this manual. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[Selector]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/querybuilder/select/Selector.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[Selector]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/querybuilder/select/Selector.html diff --git a/manual/query_builder/insert/README.md b/manual/query_builder/insert/README.md index 3442ed55bab..2d2c66fd526 100644 --- a/manual/query_builder/insert/README.md +++ b/manual/query_builder/insert/README.md @@ -114,4 +114,4 @@ is executed. This is distinctly different than setting the value to null. Passin this method will only remove the USING TTL clause from the query, which will not alter the TTL (if one is set) in Cassandra. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html \ No newline at end of file +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html \ No newline at end of file diff --git a/manual/query_builder/relation/README.md b/manual/query_builder/relation/README.md index ba55a2c38b8..6e6cb03829e 100644 --- a/manual/query_builder/relation/README.md +++ b/manual/query_builder/relation/README.md @@ -201,5 +201,5 @@ This should be used with caution, as it's possible to generate invalid CQL that execution time; on the other hand, it can be used as a workaround to handle new CQL features that are not yet covered by the query builder. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[Relation]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/querybuilder/relation/Relation.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[Relation]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/querybuilder/relation/Relation.html diff --git a/manual/query_builder/schema/README.md b/manual/query_builder/schema/README.md index 38fc701d798..b285a68acef 100644 --- a/manual/query_builder/schema/README.md +++ b/manual/query_builder/schema/README.md @@ -44,4 +44,4 @@ element type: * [function](function/) * [aggregate](aggregate/) -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/aggregate/README.md b/manual/query_builder/schema/aggregate/README.md index 78de7419719..a2aea551f47 100644 --- a/manual/query_builder/schema/aggregate/README.md +++ b/manual/query_builder/schema/aggregate/README.md @@ -76,4 +76,4 @@ dropAggregate("average").ifExists(); // DROP AGGREGATE IF EXISTS average ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/function/README.md b/manual/query_builder/schema/function/README.md index 5cf564f99eb..f9dfc5d0af8 100644 --- a/manual/query_builder/schema/function/README.md +++ b/manual/query_builder/schema/function/README.md @@ -92,4 +92,4 @@ dropFunction("log").ifExists(); // DROP FUNCTION IF EXISTS log ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/index/README.md b/manual/query_builder/schema/index/README.md index 183f4e35fec..8e1ed2d8125 100644 --- a/manual/query_builder/schema/index/README.md +++ b/manual/query_builder/schema/index/README.md @@ -99,4 +99,4 @@ dropIndex("my_idx").ifExists(); // DROP INDEX IF EXISTS my_idx ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/keyspace/README.md b/manual/query_builder/schema/keyspace/README.md index 643a1354ac7..b772b9b0d6d 100644 --- a/manual/query_builder/schema/keyspace/README.md +++ b/manual/query_builder/schema/keyspace/README.md @@ -83,6 +83,6 @@ dropKeyspace("cycling").ifExists(); // DROP KEYSPACE IF EXISTS cycling ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/materialized_view/README.md b/manual/query_builder/schema/materialized_view/README.md index 20617636769..54354907a5e 100644 --- a/manual/query_builder/schema/materialized_view/README.md +++ b/manual/query_builder/schema/materialized_view/README.md @@ -85,5 +85,5 @@ dropTable("cyclist_by_age").ifExists(); // DROP MATERIALIZED VIEW IF EXISTS cyclist_by_age ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html -[RelationStructure]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/querybuilder/schema/RelationStructure.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[RelationStructure]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/querybuilder/schema/RelationStructure.html diff --git a/manual/query_builder/schema/table/README.md b/manual/query_builder/schema/table/README.md index b7f923d90cc..12b2577e21c 100644 --- a/manual/query_builder/schema/table/README.md +++ b/manual/query_builder/schema/table/README.md @@ -107,6 +107,6 @@ dropTable("cyclist_name").ifExists(); // DROP TABLE IF EXISTS cyclist_name ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html -[CreateTableWithOptions]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/querybuilder/schema/CreateTableWithOptions.html -[AlterTableWithOptions]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/querybuilder/schema/AlterTableWithOptions.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[CreateTableWithOptions]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/querybuilder/schema/CreateTableWithOptions.html +[AlterTableWithOptions]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/querybuilder/schema/AlterTableWithOptions.html diff --git a/manual/query_builder/schema/type/README.md b/manual/query_builder/schema/type/README.md index f08f3305bcc..fe0791c970d 100644 --- a/manual/query_builder/schema/type/README.md +++ b/manual/query_builder/schema/type/README.md @@ -88,4 +88,4 @@ dropTable("address").ifExists(); // DROP TYPE IF EXISTS address ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/select/README.md b/manual/query_builder/select/README.md index 9ed26281765..b0c13bc438f 100644 --- a/manual/query_builder/select/README.md +++ b/manual/query_builder/select/README.md @@ -391,5 +391,5 @@ selectFrom("user").all().allowFiltering(); // SELECT * FROM user ALLOW FILTERING ``` -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[Selector]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/querybuilder/select/Selector.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[Selector]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/querybuilder/select/Selector.html diff --git a/manual/query_builder/term/README.md b/manual/query_builder/term/README.md index ca650d5d196..a0995a4b516 100644 --- a/manual/query_builder/term/README.md +++ b/manual/query_builder/term/README.md @@ -105,5 +105,5 @@ This should be used with caution, as it's possible to generate invalid CQL that execution time; on the other hand, it can be used as a workaround to handle new CQL features that are not yet covered by the query builder. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html \ No newline at end of file +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html \ No newline at end of file diff --git a/manual/query_builder/truncate/README.md b/manual/query_builder/truncate/README.md index b513b6a52ab..88ffcf47de8 100644 --- a/manual/query_builder/truncate/README.md +++ b/manual/query_builder/truncate/README.md @@ -17,4 +17,4 @@ Truncate truncate2 = truncate(CqlIdentifier.fromCql("mytable")); Note that, at this stage, the query is ready to build. After creating a TRUNCATE query it does not take any values. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html diff --git a/manual/query_builder/update/README.md b/manual/query_builder/update/README.md index 2069bb54541..4771641e40b 100644 --- a/manual/query_builder/update/README.md +++ b/manual/query_builder/update/README.md @@ -251,5 +251,5 @@ update("foo") Conditions are a common feature used by UPDATE and DELETE, so they have a [dedicated page](../condition) in this manual. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[Assignment]: https://docs.datastax.com/en/drivers/java/4.11/com/datastax/oss/driver/api/querybuilder/update/Assignment.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[Assignment]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/querybuilder/update/Assignment.html From a2b797b420726a183b58e900b4a83bdfe193fd8d Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 14 Jun 2021 15:34:18 +0200 Subject: [PATCH 738/979] [maven-release-plugin] prepare release 4.12.0 --- bom/pom.xml | 18 +++++++++--------- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- metrics/micrometer/pom.xml | 2 +- metrics/microprofile/pom.xml | 2 +- osgi-tests/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 14 files changed, 23 insertions(+), 23 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index f50076bb957..e81b7e2c7e7 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.12.0-SNAPSHOT + 4.12.0 java-driver-bom pom @@ -31,42 +31,42 @@ com.datastax.oss java-driver-core - 4.12.0-SNAPSHOT + 4.12.0 com.datastax.oss java-driver-core-shaded - 4.12.0-SNAPSHOT + 4.12.0 com.datastax.oss java-driver-mapper-processor - 4.12.0-SNAPSHOT + 4.12.0 com.datastax.oss java-driver-mapper-runtime - 4.12.0-SNAPSHOT + 4.12.0 com.datastax.oss java-driver-query-builder - 4.12.0-SNAPSHOT + 4.12.0 com.datastax.oss java-driver-test-infra - 4.12.0-SNAPSHOT + 4.12.0 com.datastax.oss java-driver-metrics-micrometer - 4.12.0-SNAPSHOT + 4.12.0 com.datastax.oss java-driver-metrics-microprofile - 4.12.0-SNAPSHOT + 4.12.0 com.datastax.oss diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 414d305fb2a..4d223d4bbbe 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.12.0-SNAPSHOT + 4.12.0 java-driver-core-shaded DataStax Java driver for Apache Cassandra(R) - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index 5da7d297d88..b7c60b23507 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.12.0-SNAPSHOT + 4.12.0 java-driver-core bundle diff --git a/distribution/pom.xml b/distribution/pom.xml index cfd4596ddb4..be6ad61af87 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.12.0-SNAPSHOT + 4.12.0 java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index cf67d0aa406..851eab03e3c 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.12.0-SNAPSHOT + 4.12.0 java-driver-examples DataStax Java driver for Apache Cassandra(R) - examples. diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 0c13ec6f589..676b2508547 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.12.0-SNAPSHOT + 4.12.0 java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 53f2c874759..1aa3e8f46ae 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.12.0-SNAPSHOT + 4.12.0 java-driver-mapper-processor DataStax Java driver for Apache Cassandra(R) - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index 1890231c202..aaa0b4194e4 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.12.0-SNAPSHOT + 4.12.0 java-driver-mapper-runtime bundle diff --git a/metrics/micrometer/pom.xml b/metrics/micrometer/pom.xml index 3f54eeb3959..7eef3bd5912 100644 --- a/metrics/micrometer/pom.xml +++ b/metrics/micrometer/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.12.0-SNAPSHOT + 4.12.0 ../../ java-driver-metrics-micrometer diff --git a/metrics/microprofile/pom.xml b/metrics/microprofile/pom.xml index 48782b475c9..39a5d63d129 100644 --- a/metrics/microprofile/pom.xml +++ b/metrics/microprofile/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.12.0-SNAPSHOT + 4.12.0 ../../ java-driver-metrics-microprofile diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml index c88c033d4d6..7e0917ca279 100644 --- a/osgi-tests/pom.xml +++ b/osgi-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.12.0-SNAPSHOT + 4.12.0 java-driver-osgi-tests jar diff --git a/pom.xml b/pom.xml index 72e4194db9a..f913367f87f 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ 4.0.0 com.datastax.oss java-driver-parent - 4.12.0-SNAPSHOT + 4.12.0 pom DataStax Java driver for Apache Cassandra(R) A driver for Apache Cassandra(R) 2.1+ that works exclusively with the Cassandra Query Language version 3 (CQL3) and Cassandra's native protocol versions 3 and above. @@ -954,7 +954,7 @@ height="0" width="0" style="display:none;visibility:hidden"> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - HEAD + 4.12.0 diff --git a/query-builder/pom.xml b/query-builder/pom.xml index 7fc68474e2c..0475d2c16e2 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.12.0-SNAPSHOT + 4.12.0 java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index f1000d40618..ac5343f7850 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.12.0-SNAPSHOT + 4.12.0 java-driver-test-infra bundle From e9edbcd1d32504be866229544ee5d139380d5c8b Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 14 Jun 2021 15:34:29 +0200 Subject: [PATCH 739/979] [maven-release-plugin] prepare for next development iteration --- bom/pom.xml | 18 +++++++++--------- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- metrics/micrometer/pom.xml | 2 +- metrics/microprofile/pom.xml | 2 +- osgi-tests/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 14 files changed, 23 insertions(+), 23 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index e81b7e2c7e7..e47c97e29e3 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.12.0 + 4.13.0-SNAPSHOT java-driver-bom pom @@ -31,42 +31,42 @@ com.datastax.oss java-driver-core - 4.12.0 + 4.13.0-SNAPSHOT com.datastax.oss java-driver-core-shaded - 4.12.0 + 4.13.0-SNAPSHOT com.datastax.oss java-driver-mapper-processor - 4.12.0 + 4.13.0-SNAPSHOT com.datastax.oss java-driver-mapper-runtime - 4.12.0 + 4.13.0-SNAPSHOT com.datastax.oss java-driver-query-builder - 4.12.0 + 4.13.0-SNAPSHOT com.datastax.oss java-driver-test-infra - 4.12.0 + 4.13.0-SNAPSHOT com.datastax.oss java-driver-metrics-micrometer - 4.12.0 + 4.13.0-SNAPSHOT com.datastax.oss java-driver-metrics-microprofile - 4.12.0 + 4.13.0-SNAPSHOT com.datastax.oss diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 4d223d4bbbe..076dbfa6918 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.12.0 + 4.13.0-SNAPSHOT java-driver-core-shaded DataStax Java driver for Apache Cassandra(R) - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index b7c60b23507..34d1b7478b6 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.12.0 + 4.13.0-SNAPSHOT java-driver-core bundle diff --git a/distribution/pom.xml b/distribution/pom.xml index be6ad61af87..2a579b0440a 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.12.0 + 4.13.0-SNAPSHOT java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index 851eab03e3c..b95827307ad 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.12.0 + 4.13.0-SNAPSHOT java-driver-examples DataStax Java driver for Apache Cassandra(R) - examples. diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 676b2508547..c5b6cc77594 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.12.0 + 4.13.0-SNAPSHOT java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 1aa3e8f46ae..cc14f3d2e8b 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.12.0 + 4.13.0-SNAPSHOT java-driver-mapper-processor DataStax Java driver for Apache Cassandra(R) - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index aaa0b4194e4..68d76446bf1 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.12.0 + 4.13.0-SNAPSHOT java-driver-mapper-runtime bundle diff --git a/metrics/micrometer/pom.xml b/metrics/micrometer/pom.xml index 7eef3bd5912..8c2755c7c62 100644 --- a/metrics/micrometer/pom.xml +++ b/metrics/micrometer/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.12.0 + 4.13.0-SNAPSHOT ../../ java-driver-metrics-micrometer diff --git a/metrics/microprofile/pom.xml b/metrics/microprofile/pom.xml index 39a5d63d129..78c04909c55 100644 --- a/metrics/microprofile/pom.xml +++ b/metrics/microprofile/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.12.0 + 4.13.0-SNAPSHOT ../../ java-driver-metrics-microprofile diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml index 7e0917ca279..12e8b9641dc 100644 --- a/osgi-tests/pom.xml +++ b/osgi-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.12.0 + 4.13.0-SNAPSHOT java-driver-osgi-tests jar diff --git a/pom.xml b/pom.xml index f913367f87f..5049d6af22e 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ 4.0.0 com.datastax.oss java-driver-parent - 4.12.0 + 4.13.0-SNAPSHOT pom DataStax Java driver for Apache Cassandra(R) A driver for Apache Cassandra(R) 2.1+ that works exclusively with the Cassandra Query Language version 3 (CQL3) and Cassandra's native protocol versions 3 and above. @@ -954,7 +954,7 @@ height="0" width="0" style="display:none;visibility:hidden"> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - 4.12.0 + HEAD diff --git a/query-builder/pom.xml b/query-builder/pom.xml index 0475d2c16e2..3ae527917a1 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.12.0 + 4.13.0-SNAPSHOT java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index ac5343f7850..d29046b4934 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.12.0 + 4.13.0-SNAPSHOT java-driver-test-infra bundle From 5f3026bbd370152985532042865f42029ff8a871 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Mon, 14 Jun 2021 15:51:25 +0200 Subject: [PATCH 740/979] Minor correction regarding GetEntity/SetEntity leniency --- upgrade_guide/README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/upgrade_guide/README.md b/upgrade_guide/README.md index c0f6fee32e5..4e0de35703c 100644 --- a/upgrade_guide/README.md +++ b/upgrade_guide/README.md @@ -21,9 +21,9 @@ Thanks to [JAVA-2935](https://datastax-oss.atlassian.net/browse/JAVA-2935), `@Ge `@SetEntity` methods now have a new `lenient` attribute. If the attribute is `false` (the default value), then the source row or the target statement must -contain a matching column for every property in the entity definition, *including computed ones*. If -such a column is not found, an error will be thrown. This corresponds to the mapper's current -behavior prior to the introduction of the new attribute. +contain a matching column for every property in the entity definition. If such a column is not +found, an error will be thrown. This corresponds to the mapper's current behavior prior to the +introduction of the new attribute. If the new attribute is explicitly set to `true` however, the mapper will operate on a best-effort basis and attempt to read or write all entity properties that have a matching column in the source From a92bc7dcf81243ae39cb21e1f2b5b78dac271e24 Mon Sep 17 00:00:00 2001 From: David Garcia Date: Fri, 9 Jul 2021 10:36:38 +0100 Subject: [PATCH 741/979] Fixed CHANGELOG indentation (#1558) --- changelog/README.md | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index c301cfe4a54..a517b58557d 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -568,28 +568,28 @@ changelog](https://docs.datastax.com/en/developer/java-driver-dse/latest/changel - [bug] JAVA-1499: Wait for load balancing policy at cluster initialization - [new feature] JAVA-1495: Add prepared statements -## 3.10.2 +### 3.10.2 - [bug] JAVA-2860: Avoid NPE if channel initialization crashes. -## 3.10.1 +### 3.10.1 - [bug] JAVA-2857: Fix NPE when built statements without parameters are logged at TRACE level. - [bug] JAVA-2843: Successfully parse DSE table schema in OSS driver. -## 3.10.0 +### 3.10.0 - [improvement] JAVA-2676: Don't reschedule flusher after empty runs - [new feature] JAVA-2772: Support new protocol v5 message format -## 3.9.0 +### 3.9.0 - [bug] JAVA-2627: Avoid logging error message including stack trace in request handler. - [new feature] JAVA-2706: Add now_in_seconds to protocol v5 query messages. - [improvement] JAVA-2730: Add support for Cassandra® 4.0 table options - [improvement] JAVA-2702: Transient Replication Support for Cassandra® 4.0 -## 3.8.0 +### 3.8.0 - [new feature] JAVA-2356: Support for DataStax Cloud API. - [improvement] JAVA-2483: Allow to provide secure bundle via URL. From edbf967ff5dadd44f4e9b46c7d72293f7a1774a4 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 9 Jul 2021 12:18:41 +0200 Subject: [PATCH 742/979] Increase timeout --- .../driver/core/retry/ConsistencyDowngradingRetryPolicyIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/retry/ConsistencyDowngradingRetryPolicyIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/retry/ConsistencyDowngradingRetryPolicyIT.java index a312d6162bf..b0707f0356f 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/retry/ConsistencyDowngradingRetryPolicyIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/retry/ConsistencyDowngradingRetryPolicyIT.java @@ -284,7 +284,7 @@ public void should_retry_on_same_on_read_timeout_when_enough_responses_but_data_ oneCounter.assertTotalCount(0); // expect 2 messages: RETRY_SAME, then RETHROW - verify(appender, timeout(500).times(2)).doAppend(loggingEventCaptor.capture()); + verify(appender, timeout(2000).times(2)).doAppend(loggingEventCaptor.capture()); List loggedEvents = loggingEventCaptor.getAllValues(); assertThat(loggedEvents).hasSize(2); assertThat(loggedEvents.get(0).getFormattedMessage()) From 4af33a009d8c7376af53eb0f3f825ed6b83bdb75 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 9 Jul 2021 12:19:11 +0200 Subject: [PATCH 743/979] Use slow profile for DDL queries --- .../api/core/cql/continuous/ContinuousPagingIT.java | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousPagingIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousPagingIT.java index 3ba00e4095b..a0a3aaf3cf5 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousPagingIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousPagingIT.java @@ -240,7 +240,9 @@ public void simple_statement_paging_should_be_resilient_to_schema_change() { public void prepared_statement_paging_should_be_resilient_to_schema_change() { CqlSession session = sessionRule.session(); // Create table and prepare select * query against it. - session.execute("CREATE TABLE test_prep (k text PRIMARY KEY, v int)"); + session.execute( + SimpleStatement.newInstance("CREATE TABLE test_prep (k text PRIMARY KEY, v int)") + .setExecutionProfile(SessionUtils.slowProfile(session))); for (int i = 0; i < 100; i++) { session.execute(String.format("INSERT INTO test_prep (k, v) VALUES ('foo', %d)", i)); } @@ -267,7 +269,9 @@ public void prepared_statement_paging_should_be_resilient_to_schema_change() { CqlSession schemaChangeSession = SessionUtils.newSession( ccmRule, session.getKeyspace().orElseThrow(IllegalStateException::new)); - schemaChangeSession.execute("ALTER TABLE test_prep DROP v;"); + schemaChangeSession.execute( + SimpleStatement.newInstance("ALTER TABLE test_prep DROP v;") + .setExecutionProfile(SessionUtils.slowProfile(schemaChangeSession))); while (it.hasNext()) { // Each row should have a value for k, v should still be present, but null since column was // dropped. From 6d537c6c4670bf006ca9c7580f714b8e45375306 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 9 Jul 2021 12:19:35 +0200 Subject: [PATCH 744/979] Test nullity of fields in tearDown method --- .../datastax/oss/driver/core/PeersV2NodeRefreshIT.java | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/PeersV2NodeRefreshIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/PeersV2NodeRefreshIT.java index 089c4d4fa53..52a5b6eef53 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/PeersV2NodeRefreshIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/PeersV2NodeRefreshIT.java @@ -45,8 +45,12 @@ public static void setup() { @AfterClass public static void tearDown() { - cluster.stop(); - peersV2Server.close(); + if (cluster != null) { + cluster.stop(); + } + if (peersV2Server != null) { + peersV2Server.close(); + } } @Test From c4c561f7d154b50fec50be9da3b4ca9c4c34f4e2 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 9 Jul 2021 16:08:15 +0200 Subject: [PATCH 745/979] Remove spurious line breaks from generated code --- .../mapper/processor/dao/DaoDeleteMethodGenerator.java | 5 ++--- .../mapper/processor/dao/DaoIncrementMethodGenerator.java | 5 ++--- .../mapper/processor/dao/DaoInsertMethodGenerator.java | 5 ++--- .../mapper/processor/dao/DaoQueryMethodGenerator.java | 5 ++--- .../mapper/processor/dao/DaoSelectMethodGenerator.java | 5 ++--- .../mapper/processor/dao/DaoUpdateMethodGenerator.java | 5 ++--- .../processor/entity/EntityHelperGetMethodGenerator.java | 1 - .../processor/entity/EntityHelperSetMethodGenerator.java | 2 +- .../processor/util/generation/GeneratedCodePatterns.java | 1 - 9 files changed, 13 insertions(+), 21 deletions(-) diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGenerator.java index e80ff89eadf..3f141e31e8c 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoDeleteMethodGenerator.java @@ -279,9 +279,8 @@ public Optional generate() { } } - createStatementBlock - .add("\n") - .addStatement("$T boundStatement = boundStatementBuilder.build()", BoundStatement.class); + createStatementBlock.addStatement( + "$T boundStatement = boundStatementBuilder.build()", BoundStatement.class); return crudMethod(createStatementBlock, returnType, helperFieldName); } diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoIncrementMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoIncrementMethodGenerator.java index 2b064490f0a..0d0c279b809 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoIncrementMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoIncrementMethodGenerator.java @@ -217,9 +217,8 @@ public Optional generate() { context, false); - updateStatementBlock - .add("\n") - .addStatement("$T boundStatement = boundStatementBuilder.build()", BoundStatement.class); + updateStatementBlock.addStatement( + "$T boundStatement = boundStatementBuilder.build()", BoundStatement.class); return crudMethod(updateStatementBlock, returnType, helperFieldName); } diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoInsertMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoInsertMethodGenerator.java index 945cfeda370..9ac0fe9260f 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoInsertMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoInsertMethodGenerator.java @@ -176,9 +176,8 @@ public Optional generate() { } } - createStatementBlock - .add("\n") - .addStatement("$T boundStatement = boundStatementBuilder.build()", BoundStatement.class); + createStatementBlock.addStatement( + "$T boundStatement = boundStatementBuilder.build()", BoundStatement.class); return crudMethod(createStatementBlock, returnType, helperFieldName); } diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoQueryMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoQueryMethodGenerator.java index 6d6ec0fc8a7..d9bb9547c4d 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoQueryMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoQueryMethodGenerator.java @@ -125,9 +125,8 @@ public Optional generate() { GeneratedCodePatterns.bindParameters( parameters, createStatementBlock, enclosingClass, context, true); - createStatementBlock - .add("\n") - .addStatement("$T boundStatement = boundStatementBuilder.build()", BoundStatement.class); + createStatementBlock.addStatement( + "$T boundStatement = boundStatementBuilder.build()", BoundStatement.class); return crudMethod(createStatementBlock, returnType, helperFieldName); } else { diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGenerator.java index 8c8878b133b..4d0d7457da6 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGenerator.java @@ -202,9 +202,8 @@ public Optional generate() { } } - createStatementBlock - .add("\n") - .addStatement("$T boundStatement = boundStatementBuilder.build()", BoundStatement.class); + createStatementBlock.addStatement( + "$T boundStatement = boundStatementBuilder.build()", BoundStatement.class); return crudMethod(createStatementBlock, returnType, helperFieldName); } diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoUpdateMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoUpdateMethodGenerator.java index 288778ee9c8..50f99264c40 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoUpdateMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoUpdateMethodGenerator.java @@ -187,9 +187,8 @@ public Optional generate() { } } - createStatementBlock - .add("\n") - .addStatement("$T boundStatement = boundStatementBuilder.build()", BoundStatement.class); + createStatementBlock.addStatement( + "$T boundStatement = boundStatementBuilder.build()", BoundStatement.class); return crudMethod(createStatementBlock, returnType, helperFieldName); } diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperGetMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperGetMethodGenerator.java index f0a84517c63..5e98f1a20b3 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperGetMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperGetMethodGenerator.java @@ -75,7 +75,6 @@ public Optional generate() { String setterName = property.getSetterName(); String propertyValueName = enclosingClass.getNameIndex().uniqueField("propertyValue"); propertyValueNames.add(propertyValueName); - getBuilder.addCode("\n"); if (type instanceof PropertyType.Simple) { TypeName typeName = ((PropertyType.Simple) type).typeName; String primitiveAccessor = GeneratedCodePatterns.PRIMITIVE_ACCESSORS.get(typeName); diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperSetMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperSetMethodGenerator.java index 5e3042c10ac..c102c231698 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperSetMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/entity/EntityHelperSetMethodGenerator.java @@ -73,7 +73,7 @@ public Optional generate() { enclosingClass, true); } - injectBodyBuilder.add("\n").addStatement("return target"); + injectBodyBuilder.addStatement("return target"); return Optional.of(injectBuilder.addCode(injectBodyBuilder.build()).build()); } } diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/util/generation/GeneratedCodePatterns.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/util/generation/GeneratedCodePatterns.java index 48574a48721..78d1a02aa03 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/util/generation/GeneratedCodePatterns.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/util/generation/GeneratedCodePatterns.java @@ -225,7 +225,6 @@ public static void setValue( CodeBlock.Builder methodBuilder, BindableHandlingSharedCode enclosingClass, boolean useNullSavingStrategy) { - methodBuilder.add("\n"); if (type instanceof PropertyType.Simple) { TypeName typeName = ((PropertyType.Simple) type).typeName; From 18aa11249b7eabdb505f8d4fc4dd49d40e62da63 Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Fri, 9 Jul 2021 09:52:22 -0500 Subject: [PATCH 746/979] JAVA-2950: Remove reference to Reflection class from DependencyCheck (#1561) Decoupled dependency definition from logic to check for presence of dependencies. This check logic has been moved to two discrete classes, one for regular lookups and one for Graal build-time lookups which has to be a bit more restrictive about what it can use. --- changelog/README.md | 4 + .../type/codec/DseTypeCodecsRegistrar.java | 6 +- .../DseTypeCodecsRegistrarSubstitutions.java | 6 +- .../core/context/DefaultDriverContext.java | 6 +- .../core/metrics/DefaultMetricsFactory.java | 6 +- .../DefaultMetricsFactorySubstitutions.java | 6 +- .../protocol/CompressorSubstitutions.java | 6 +- .../internal/core/protocol/Lz4Compressor.java | 6 +- .../core/protocol/SnappyCompressor.java | 5 +- .../session/BuiltInRequestProcessors.java | 12 ++- ...BuiltInRequestProcessorsSubstitutions.java | 15 ++-- .../core/util/DefaultDependencyChecker.java | 59 ++++++++++++++ .../driver/internal/core/util/Dependency.java | 60 ++++++++++++++ .../internal/core/util/DependencyCheck.java | 79 ------------------- .../core/util/GraalDependencyChecker.java | 60 ++++++++++++++ 15 files changed, 232 insertions(+), 104 deletions(-) create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/util/DefaultDependencyChecker.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/util/Dependency.java delete mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/util/DependencyCheck.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/util/GraalDependencyChecker.java diff --git a/changelog/README.md b/changelog/README.md index 48c5d442249..778ef17730a 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -2,6 +2,10 @@ +### 4.11.3 (in progress) + +- [bug] JAVA-2950: Remove reference to Reflection class from DependencyCheck + ### 4.11.2 - [bug] JAVA-2932: Make DefaultDriverConfigLoader.close() resilient to terminated executors diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/type/codec/DseTypeCodecsRegistrar.java b/core/src/main/java/com/datastax/dse/driver/internal/core/type/codec/DseTypeCodecsRegistrar.java index 5075caa68b2..5035e7be095 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/type/codec/DseTypeCodecsRegistrar.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/type/codec/DseTypeCodecsRegistrar.java @@ -15,9 +15,11 @@ */ package com.datastax.dse.driver.internal.core.type.codec; +import static com.datastax.oss.driver.internal.core.util.Dependency.ESRI; + import com.datastax.dse.driver.api.core.type.codec.DseTypeCodecs; import com.datastax.oss.driver.api.core.type.codec.registry.MutableCodecRegistry; -import com.datastax.oss.driver.internal.core.util.DependencyCheck; +import com.datastax.oss.driver.internal.core.util.DefaultDependencyChecker; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -27,7 +29,7 @@ public class DseTypeCodecsRegistrar { public static void registerDseCodecs(MutableCodecRegistry registry) { registry.register(DseTypeCodecs.DATE_RANGE); - if (DependencyCheck.ESRI.isPresent()) { + if (DefaultDependencyChecker.isPresent(ESRI)) { registry.register(DseTypeCodecs.LINE_STRING, DseTypeCodecs.POINT, DseTypeCodecs.POLYGON); } else { LOG.debug("ESRI was not found on the classpath: geo codecs will not be available"); diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/type/codec/DseTypeCodecsRegistrarSubstitutions.java b/core/src/main/java/com/datastax/dse/driver/internal/core/type/codec/DseTypeCodecsRegistrarSubstitutions.java index 51c4958824d..5464673c373 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/type/codec/DseTypeCodecsRegistrarSubstitutions.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/type/codec/DseTypeCodecsRegistrarSubstitutions.java @@ -15,9 +15,11 @@ */ package com.datastax.dse.driver.internal.core.type.codec; +import static com.datastax.oss.driver.internal.core.util.Dependency.ESRI; + import com.datastax.dse.driver.api.core.type.codec.DseTypeCodecs; import com.datastax.oss.driver.api.core.type.codec.registry.MutableCodecRegistry; -import com.datastax.oss.driver.internal.core.util.DependencyCheck; +import com.datastax.oss.driver.internal.core.util.GraalDependencyChecker; import com.oracle.svm.core.annotate.Substitute; import com.oracle.svm.core.annotate.TargetClass; import java.util.function.BooleanSupplier; @@ -37,7 +39,7 @@ public static void registerDseCodecs(MutableCodecRegistry registry) { public static class EsriMissing implements BooleanSupplier { @Override public boolean getAsBoolean() { - return !DependencyCheck.ESRI.isPresent(); + return !GraalDependencyChecker.isPresent(ESRI); } } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java index e09e5ee3b5c..fc73d47ca8d 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java @@ -15,6 +15,8 @@ */ package com.datastax.oss.driver.internal.core.context; +import static com.datastax.oss.driver.internal.core.util.Dependency.JACKSON; + import com.datastax.dse.driver.api.core.config.DseDriverOption; import com.datastax.dse.driver.internal.core.InsightsClientLifecycleListener; import com.datastax.dse.driver.internal.core.tracker.MultiplexingRequestTracker; @@ -82,7 +84,7 @@ import com.datastax.oss.driver.internal.core.tracker.NoopRequestTracker; import com.datastax.oss.driver.internal.core.tracker.RequestLogFormatter; import com.datastax.oss.driver.internal.core.type.codec.registry.DefaultCodecRegistry; -import com.datastax.oss.driver.internal.core.util.DependencyCheck; +import com.datastax.oss.driver.internal.core.util.DefaultDependencyChecker; import com.datastax.oss.driver.internal.core.util.Reflection; import com.datastax.oss.driver.internal.core.util.concurrent.CycleDetector; import com.datastax.oss.driver.internal.core.util.concurrent.LazyReference; @@ -655,7 +657,7 @@ protected Optional buildAuthProvider(AuthProvider authProviderFrom } protected List buildLifecycleListeners() { - if (DependencyCheck.JACKSON.isPresent()) { + if (DefaultDependencyChecker.isPresent(JACKSON)) { return Collections.singletonList(new InsightsClientLifecycleListener(this, initStackTrace)); } else { if (config.getDefaultProfile().getBoolean(DseDriverOption.MONITOR_REPORTING_ENABLED)) { diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DefaultMetricsFactory.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DefaultMetricsFactory.java index e6d78d30dc1..2aba13a4e1c 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DefaultMetricsFactory.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DefaultMetricsFactory.java @@ -15,10 +15,12 @@ */ package com.datastax.oss.driver.internal.core.metrics; +import static com.datastax.oss.driver.internal.core.util.Dependency.DROPWIZARD; + import com.datastax.oss.driver.api.core.context.DriverContext; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.metrics.Metrics; -import com.datastax.oss.driver.internal.core.util.DependencyCheck; +import com.datastax.oss.driver.internal.core.util.DefaultDependencyChecker; import java.util.Optional; import net.jcip.annotations.ThreadSafe; import org.slf4j.Logger; @@ -33,7 +35,7 @@ public class DefaultMetricsFactory implements MetricsFactory { @SuppressWarnings("unused") public DefaultMetricsFactory(DriverContext context) { - if (DependencyCheck.DROPWIZARD.isPresent()) { + if (DefaultDependencyChecker.isPresent(DROPWIZARD)) { this.delegate = new DropwizardMetricsFactory(context); } else { this.delegate = new NoopMetricsFactory(context); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DefaultMetricsFactorySubstitutions.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DefaultMetricsFactorySubstitutions.java index 3965efc8354..4fa3a49a7d0 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DefaultMetricsFactorySubstitutions.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DefaultMetricsFactorySubstitutions.java @@ -15,8 +15,10 @@ */ package com.datastax.oss.driver.internal.core.metrics; +import static com.datastax.oss.driver.internal.core.util.Dependency.DROPWIZARD; + import com.datastax.oss.driver.api.core.context.DriverContext; -import com.datastax.oss.driver.internal.core.util.DependencyCheck; +import com.datastax.oss.driver.internal.core.util.GraalDependencyChecker; import com.oracle.svm.core.annotate.Alias; import com.oracle.svm.core.annotate.Delete; import com.oracle.svm.core.annotate.Substitute; @@ -49,7 +51,7 @@ public static final class DeleteDropwizardMetricsFactory {} public static class DropwizardMissing implements BooleanSupplier { @Override public boolean getAsBoolean() { - return !DependencyCheck.DROPWIZARD.isPresent(); + return !GraalDependencyChecker.isPresent(DROPWIZARD); } } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/CompressorSubstitutions.java b/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/CompressorSubstitutions.java index c760344940c..889e4e1c137 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/CompressorSubstitutions.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/CompressorSubstitutions.java @@ -15,9 +15,11 @@ */ package com.datastax.oss.driver.internal.core.protocol; +import static com.datastax.oss.driver.internal.core.util.Dependency.LZ4; + import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.context.DriverContext; -import com.datastax.oss.driver.internal.core.util.DependencyCheck; +import com.datastax.oss.driver.internal.core.util.GraalDependencyChecker; import com.datastax.oss.protocol.internal.Compressor; import com.oracle.svm.core.annotate.Delete; import com.oracle.svm.core.annotate.Substitute; @@ -91,7 +93,7 @@ public static final class DeleteSnappyCompressor {} public static class Lz4Present implements BooleanSupplier { @Override public boolean getAsBoolean() { - return DependencyCheck.LZ4.isPresent(); + return GraalDependencyChecker.isPresent(LZ4); } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/Lz4Compressor.java b/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/Lz4Compressor.java index e3b2ce1a344..f3bfc2ed84d 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/Lz4Compressor.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/Lz4Compressor.java @@ -15,8 +15,10 @@ */ package com.datastax.oss.driver.internal.core.protocol; +import static com.datastax.oss.driver.internal.core.util.Dependency.LZ4; + import com.datastax.oss.driver.api.core.context.DriverContext; -import com.datastax.oss.driver.internal.core.util.DependencyCheck; +import com.datastax.oss.driver.internal.core.util.DefaultDependencyChecker; import com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting; import io.netty.buffer.ByteBuf; import java.nio.ByteBuffer; @@ -41,7 +43,7 @@ public Lz4Compressor(DriverContext context) { @VisibleForTesting Lz4Compressor(String sessionName) { - if (DependencyCheck.LZ4.isPresent()) { + if (DefaultDependencyChecker.isPresent(LZ4)) { LZ4Factory lz4Factory = LZ4Factory.fastestInstance(); LOG.info("[{}] Using {}", sessionName, lz4Factory.toString()); this.compressor = lz4Factory.fastCompressor(); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/SnappyCompressor.java b/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/SnappyCompressor.java index fbfd3eff9b2..229a044f1a0 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/SnappyCompressor.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/SnappyCompressor.java @@ -16,7 +16,8 @@ package com.datastax.oss.driver.internal.core.protocol; import com.datastax.oss.driver.api.core.context.DriverContext; -import com.datastax.oss.driver.internal.core.util.DependencyCheck; +import com.datastax.oss.driver.internal.core.util.DefaultDependencyChecker; +import com.datastax.oss.driver.internal.core.util.Dependency; import io.netty.buffer.ByteBuf; import java.io.IOException; import java.nio.ByteBuffer; @@ -34,7 +35,7 @@ public class SnappyCompressor extends ByteBufCompressor { public SnappyCompressor(@SuppressWarnings("unused") DriverContext context) { - if (!DependencyCheck.SNAPPY.isPresent()) { + if (!DefaultDependencyChecker.isPresent(Dependency.SNAPPY)) { throw new IllegalStateException( "Could not find the Snappy library on the classpath " + "(the driver declares it as an optional dependency, " diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/session/BuiltInRequestProcessors.java b/core/src/main/java/com/datastax/oss/driver/internal/core/session/BuiltInRequestProcessors.java index a4690847838..e502714f1b2 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/session/BuiltInRequestProcessors.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/session/BuiltInRequestProcessors.java @@ -15,6 +15,9 @@ */ package com.datastax.oss.driver.internal.core.session; +import static com.datastax.oss.driver.internal.core.util.Dependency.REACTIVE_STREAMS; +import static com.datastax.oss.driver.internal.core.util.Dependency.TINKERPOP; + import com.datastax.dse.driver.internal.core.cql.continuous.ContinuousCqlRequestAsyncProcessor; import com.datastax.dse.driver.internal.core.cql.continuous.ContinuousCqlRequestSyncProcessor; import com.datastax.dse.driver.internal.core.cql.continuous.reactive.ContinuousCqlRequestReactiveProcessor; @@ -28,7 +31,7 @@ import com.datastax.oss.driver.internal.core.cql.CqlPrepareSyncProcessor; import com.datastax.oss.driver.internal.core.cql.CqlRequestAsyncProcessor; import com.datastax.oss.driver.internal.core.cql.CqlRequestSyncProcessor; -import com.datastax.oss.driver.internal.core.util.DependencyCheck; +import com.datastax.oss.driver.internal.core.util.DefaultDependencyChecker; import java.util.ArrayList; import java.util.List; import org.slf4j.Logger; @@ -41,18 +44,19 @@ public class BuiltInRequestProcessors { public static List> createDefaultProcessors(DefaultDriverContext context) { List> processors = new ArrayList<>(); addBasicProcessors(processors); - if (DependencyCheck.TINKERPOP.isPresent()) { + if (DefaultDependencyChecker.isPresent(TINKERPOP)) { addGraphProcessors(context, processors); } else { LOG.debug("Tinkerpop was not found on the classpath: graph extensions will not be available"); } - if (DependencyCheck.REACTIVE_STREAMS.isPresent()) { + if (DefaultDependencyChecker.isPresent(REACTIVE_STREAMS)) { addReactiveProcessors(processors); } else { LOG.debug( "Reactive Streams was not found on the classpath: reactive extensions will not be available"); } - if (DependencyCheck.REACTIVE_STREAMS.isPresent() && DependencyCheck.TINKERPOP.isPresent()) { + if (DefaultDependencyChecker.isPresent(REACTIVE_STREAMS) + && DefaultDependencyChecker.isPresent(TINKERPOP)) { addGraphReactiveProcessors(context, processors); } return processors; diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/session/BuiltInRequestProcessorsSubstitutions.java b/core/src/main/java/com/datastax/oss/driver/internal/core/session/BuiltInRequestProcessorsSubstitutions.java index e0afbb06892..4485caed33d 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/session/BuiltInRequestProcessorsSubstitutions.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/session/BuiltInRequestProcessorsSubstitutions.java @@ -15,8 +15,11 @@ */ package com.datastax.oss.driver.internal.core.session; +import static com.datastax.oss.driver.internal.core.util.Dependency.REACTIVE_STREAMS; +import static com.datastax.oss.driver.internal.core.util.Dependency.TINKERPOP; + import com.datastax.oss.driver.internal.core.context.DefaultDriverContext; -import com.datastax.oss.driver.internal.core.util.DependencyCheck; +import com.datastax.oss.driver.internal.core.util.GraalDependencyChecker; import com.oracle.svm.core.annotate.Substitute; import com.oracle.svm.core.annotate.TargetClass; import java.util.ArrayList; @@ -67,22 +70,24 @@ public static final class BuiltInRequestProcessorsGraphPresentReactiveMissing { public static class GraphMissingReactiveMissing implements BooleanSupplier { @Override public boolean getAsBoolean() { - return !DependencyCheck.TINKERPOP.isPresent() - && !DependencyCheck.REACTIVE_STREAMS.isPresent(); + return !GraalDependencyChecker.isPresent(TINKERPOP) + && !GraalDependencyChecker.isPresent(REACTIVE_STREAMS); } } public static class GraphMissingReactivePresent implements BooleanSupplier { @Override public boolean getAsBoolean() { - return !DependencyCheck.TINKERPOP.isPresent() && DependencyCheck.REACTIVE_STREAMS.isPresent(); + return !GraalDependencyChecker.isPresent(TINKERPOP) + && GraalDependencyChecker.isPresent(REACTIVE_STREAMS); } } public static class GraphPresentReactiveMissing implements BooleanSupplier { @Override public boolean getAsBoolean() { - return DependencyCheck.TINKERPOP.isPresent() && !DependencyCheck.REACTIVE_STREAMS.isPresent(); + return GraalDependencyChecker.isPresent(TINKERPOP) + && !GraalDependencyChecker.isPresent(REACTIVE_STREAMS); } } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/util/DefaultDependencyChecker.java b/core/src/main/java/com/datastax/oss/driver/internal/core/util/DefaultDependencyChecker.java new file mode 100644 index 00000000000..8dfabb66105 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/util/DefaultDependencyChecker.java @@ -0,0 +1,59 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.util; + +import java.util.concurrent.ConcurrentHashMap; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * A checker for the presence of various {@link Dependency} instances at runtime. Predicate tests + * for Graal substitutions should NOT use this class; see {@link GraalDependencyChecker} for more + * information. + */ +public class DefaultDependencyChecker { + + private static final Logger LOG = LoggerFactory.getLogger(DefaultDependencyChecker.class); + + private static ConcurrentHashMap CACHE = new ConcurrentHashMap<>(); + + /** + * Return true iff we can find all classes for the dependency on the classpath, false otherwise + * + * @param dependency the dependency to search for + * @return true if the dependency is available, false otherwise + */ + public static boolean isPresent(Dependency dependency) { + try { + return CACHE.computeIfAbsent( + dependency, + (dep) -> { + for (String classNameToTest : dependency.classes()) { + // Always use the driver class loader, assuming that the driver classes and + // the dependency classes are either being loaded by the same class loader, + // or – as in OSGi deployments – by two distinct, but compatible class loaders. + if (Reflection.loadClass(null, classNameToTest) == null) { + return false; + } + } + return true; + }); + } catch (Exception e) { + LOG.warn("Unexpected exception when checking for dependency " + dependency, e); + return false; + } + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/util/Dependency.java b/core/src/main/java/com/datastax/oss/driver/internal/core/util/Dependency.java new file mode 100644 index 00000000000..bbefe698d55 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/util/Dependency.java @@ -0,0 +1,60 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.util; + +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; + +/** + * A set of driver optional dependencies and a common mechanism to test the presence of such + * dependencies on the application's classpath. + * + *

          We use the given fully-qualified names of classes to test the presence of the whole dependency + * on the classpath, including its transitive dependencies if applicable. This assumes that if these + * classes are present, then the entire library is present and functional, and vice versa. + * + *

          Note: some of the libraries declared here may be shaded; in these cases the shade plugin will + * replace the package names listed above with names starting with {@code + * com.datastax.oss.driver.shaded.*}, but the presence check would still work as expected. + */ +public enum Dependency { + SNAPPY("org.xerial.snappy.Snappy"), + LZ4("net.jpountz.lz4.LZ4Compressor"), + ESRI("com.esri.core.geometry.ogc.OGCGeometry"), + TINKERPOP( + // gremlin-core + "org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal", + // tinkergraph-gremlin + "org.apache.tinkerpop.gremlin.tinkergraph.structure.TinkerIoRegistryV3d0"), + REACTIVE_STREAMS("org.reactivestreams.Publisher"), + JACKSON( + // jackson-core + "com.fasterxml.jackson.core.JsonParser", + // jackson-databind + "com.fasterxml.jackson.databind.ObjectMapper"), + DROPWIZARD("com.codahale.metrics.MetricRegistry"), + ; + + @SuppressWarnings("ImmutableEnumChecker") + private final ImmutableList clzs; + + Dependency(String... classNames) { + clzs = ImmutableList.copyOf(classNames); + } + + public Iterable classes() { + return this.clzs; + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/util/DependencyCheck.java b/core/src/main/java/com/datastax/oss/driver/internal/core/util/DependencyCheck.java deleted file mode 100644 index 0accb5388a0..00000000000 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/util/DependencyCheck.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.datastax.oss.driver.internal.core.util; - -import com.datastax.oss.driver.shaded.guava.common.base.Supplier; -import com.datastax.oss.driver.shaded.guava.common.base.Suppliers; - -/** - * A set of driver optional dependencies and a common mechanism to test the presence of such - * dependencies on the application's classpath. - */ -public enum DependencyCheck { - SNAPPY("org.xerial.snappy.Snappy"), - LZ4("net.jpountz.lz4.LZ4Compressor"), - ESRI("com.esri.core.geometry.ogc.OGCGeometry"), - TINKERPOP( - // gremlin-core - "org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal", - // tinkergraph-gremlin - "org.apache.tinkerpop.gremlin.tinkergraph.structure.TinkerIoRegistryV3d0"), - REACTIVE_STREAMS("org.reactivestreams.Publisher"), - JACKSON( - // jackson-core - "com.fasterxml.jackson.core.JsonParser", - // jackson-databind - "com.fasterxml.jackson.databind.ObjectMapper"), - DROPWIZARD("com.codahale.metrics.MetricRegistry"), - ; - - @SuppressWarnings("ImmutableEnumChecker") - private final Supplier present; - - /** - * We use the given fully-qualified names of classes to test the presence of the whole dependency - * on the classpath, including its transitive dependencies if applicable. This assumes that if - * these classes are present, then the entire library is present and functional, and vice versa. - * - *

          Note: some of the libraries declared here may be shaded; in these cases the shade plugin - * will replace the package names listed above with names starting with {@code - * com.datastax.oss.driver.shaded.*}, but the presence check would still work as expected. - */ - DependencyCheck(String... classNamesToTest) { - this.present = - Suppliers.memoize( - () -> { - for (String classNameToTest : classNamesToTest) { - // Always use the driver class loader, assuming that the driver classes and - // the dependency classes are either being loaded by the same class loader, - // or – as in OSGi deployments – by two distinct, but compatible class loaders. - if (Reflection.loadClass(null, classNameToTest) == null) { - return false; - } - } - return true; - }); - } - - /** - * Checks if the dependency is present on the application's classpath and is loadable. - * - * @return true if the dependency is present and loadable, false otherwise. - */ - public boolean isPresent() { - return present.get(); - } -} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/util/GraalDependencyChecker.java b/core/src/main/java/com/datastax/oss/driver/internal/core/util/GraalDependencyChecker.java new file mode 100644 index 00000000000..b91e5716b0d --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/util/GraalDependencyChecker.java @@ -0,0 +1,60 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.util; + +import java.util.concurrent.ConcurrentHashMap; + +/** + * A dependency checker implementation which should be safe to use for build-time checks when + * building Graal native images. This class is similar to {@link DefaultDependencyChecker} but + * doesn't introduce any external dependencies which might complicate the native image build + * process. Expectation is that this will be most prominently used in the various predicate classes + * which determine whether or not Graal substitutions should be used. + */ +public class GraalDependencyChecker { + + private static final ConcurrentHashMap CACHE = new ConcurrentHashMap<>(); + + /** + * Return true iff we can find all classes for the dependency on the classpath, false otherwise + * + * @param dependency the dependency to search for + * @return true if the dependency is available, false otherwise + */ + public static boolean isPresent(Dependency dependency) { + try { + return CACHE.computeIfAbsent( + dependency, + (dep) -> { + for (String classNameToTest : dependency.classes()) { + // Note that this lands in a pretty similar spot to + // Reflection.loadClass() with a null class loader + // arg. Major difference here is that we avoid the + // more complex exception handling/logging ops in + // that code. + try { + Class.forName(classNameToTest); + } catch (LinkageError | Exception e) { + return false; + } + } + return true; + }); + } catch (Exception e) { + return false; + } + } +} From 5b39de639d00d7be3e41ab99a328511a32593a5f Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Thu, 29 Jul 2021 13:26:43 +0200 Subject: [PATCH 747/979] Fix links to RetryPolicy methods in javadocs of driver error classes --- .../driver/api/core/connection/HeartbeatException.java | 4 ++-- .../api/core/servererrors/ReadFailureException.java | 2 +- .../api/core/servererrors/ReadTimeoutException.java | 8 ++++---- .../oss/driver/api/core/servererrors/ServerError.java | 2 +- .../driver/api/core/servererrors/TruncateException.java | 2 +- .../api/core/servererrors/UnavailableException.java | 8 ++++---- .../api/core/servererrors/WriteFailureException.java | 2 +- .../api/core/servererrors/WriteTimeoutException.java | 8 ++++---- 8 files changed, 18 insertions(+), 18 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/connection/HeartbeatException.java b/core/src/main/java/com/datastax/oss/driver/api/core/connection/HeartbeatException.java index 183f7c5366e..e74446d2583 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/connection/HeartbeatException.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/connection/HeartbeatException.java @@ -28,8 +28,8 @@ * *

          Heartbeat queries are sent automatically on idle connections, to ensure that they are still * alive. If a heartbeat query fails, the connection is closed, and all pending queries are aborted. - * The exception will be passed to {@link RetryPolicy#onRequestAborted(Request, Throwable, int)}, - * which decides what to do next (the default policy retries the query on the next node). + * The exception will be passed to {@link RetryPolicy#onRequestAbortedVerdict(Request, Throwable, + * int)}, which decides what to do next (the default policy retries the query on the next node). */ public class HeartbeatException extends DriverException { diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/servererrors/ReadFailureException.java b/core/src/main/java/com/datastax/oss/driver/api/core/servererrors/ReadFailureException.java index adecf20ccbe..494102b120e 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/servererrors/ReadFailureException.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/servererrors/ReadFailureException.java @@ -33,7 +33,7 @@ *

          This happens when some of the replicas that were contacted by the coordinator replied with an * error. * - *

          This exception is processed by {@link RetryPolicy#onErrorResponse(Request, + *

          This exception is processed by {@link RetryPolicy#onErrorResponseVerdict(Request, * CoordinatorException, int)}, which will decide if it is rethrown directly to the client or if the * request should be retried. If all other tried nodes also fail, this exception will appear in the * {@link AllNodesFailedException} thrown to the client. diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/servererrors/ReadTimeoutException.java b/core/src/main/java/com/datastax/oss/driver/api/core/servererrors/ReadTimeoutException.java index 1d199a695eb..cac44b4983d 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/servererrors/ReadTimeoutException.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/servererrors/ReadTimeoutException.java @@ -27,10 +27,10 @@ /** * A server-side timeout during a read query. * - *

          This exception is processed by {@link RetryPolicy#onReadTimeout(Request, ConsistencyLevel, - * int, int, boolean, int)}, which will decide if it is rethrown directly to the client or if the - * request should be retried. If all other tried nodes also fail, this exception will appear in the - * {@link AllNodesFailedException} thrown to the client. + *

          This exception is processed by {@link RetryPolicy#onReadTimeoutVerdict(Request, + * ConsistencyLevel, int, int, boolean, int)}, which will decide if it is rethrown directly to the + * client or if the request should be retried. If all other tried nodes also fail, this exception + * will appear in the {@link AllNodesFailedException} thrown to the client. */ public class ReadTimeoutException extends QueryConsistencyException { diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/servererrors/ServerError.java b/core/src/main/java/com/datastax/oss/driver/api/core/servererrors/ServerError.java index 9afe5ea45b3..6cc0c48f984 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/servererrors/ServerError.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/servererrors/ServerError.java @@ -29,7 +29,7 @@ * *

          This should be considered as a server bug and reported as such. * - *

          This exception is processed by {@link RetryPolicy#onErrorResponse(Request, + *

          This exception is processed by {@link RetryPolicy#onErrorResponseVerdict(Request, * CoordinatorException, int)}, which will decide if it is rethrown directly to the client or if the * request should be retried. If all other tried nodes also fail, this exception will appear in the * {@link AllNodesFailedException} thrown to the client. diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/servererrors/TruncateException.java b/core/src/main/java/com/datastax/oss/driver/api/core/servererrors/TruncateException.java index 12f265e135d..39e79c53182 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/servererrors/TruncateException.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/servererrors/TruncateException.java @@ -27,7 +27,7 @@ /** * An error during a truncation operation. * - *

          This exception is processed by {@link RetryPolicy#onErrorResponse(Request, + *

          This exception is processed by {@link RetryPolicy#onErrorResponseVerdict(Request, * CoordinatorException, int)}, which will decide if it is rethrown directly to the client or if the * request should be retried. If all other tried nodes also fail, this exception will appear in the * {@link AllNodesFailedException} thrown to the client. diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/servererrors/UnavailableException.java b/core/src/main/java/com/datastax/oss/driver/api/core/servererrors/UnavailableException.java index 98e119791d8..39df831251a 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/servererrors/UnavailableException.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/servererrors/UnavailableException.java @@ -28,10 +28,10 @@ * Thrown when the coordinator knows there is not enough replicas alive to perform a query with the * requested consistency level. * - *

          This exception is processed by {@link RetryPolicy#onUnavailable(Request, ConsistencyLevel, - * int, int, int)}, which will decide if it is rethrown directly to the client or if the request - * should be retried. If all other tried nodes also fail, this exception will appear in the {@link - * AllNodesFailedException} thrown to the client. + *

          This exception is processed by {@link RetryPolicy#onUnavailableVerdict(Request, + * ConsistencyLevel, int, int, int)}, which will decide if it is rethrown directly to the client or + * if the request should be retried. If all other tried nodes also fail, this exception will appear + * in the {@link AllNodesFailedException} thrown to the client. */ public class UnavailableException extends QueryExecutionException { private final ConsistencyLevel consistencyLevel; diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/servererrors/WriteFailureException.java b/core/src/main/java/com/datastax/oss/driver/api/core/servererrors/WriteFailureException.java index f2589ff1b65..aa3ca7431fc 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/servererrors/WriteFailureException.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/servererrors/WriteFailureException.java @@ -33,7 +33,7 @@ *

          This happens when some of the replicas that were contacted by the coordinator replied with an * error. * - *

          This exception is processed by {@link RetryPolicy#onErrorResponse(Request, + *

          This exception is processed by {@link RetryPolicy#onErrorResponseVerdict(Request, * CoordinatorException, int)}, which will decide if it is rethrown directly to the client or if the * request should be retried. If all other tried nodes also fail, this exception will appear in the * {@link AllNodesFailedException} thrown to the client. diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/servererrors/WriteTimeoutException.java b/core/src/main/java/com/datastax/oss/driver/api/core/servererrors/WriteTimeoutException.java index 600b5e36895..4cf922ce5fe 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/servererrors/WriteTimeoutException.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/servererrors/WriteTimeoutException.java @@ -28,10 +28,10 @@ /** * A server-side timeout during a write query. * - *

          This exception is processed by {@link RetryPolicy#onWriteTimeout(Request, ConsistencyLevel, - * WriteType, int, int, int)}, which will decide if it is rethrown directly to the client or if the - * request should be retried. If all other tried nodes also fail, this exception will appear in the - * {@link AllNodesFailedException} thrown to the client. + *

          This exception is processed by {@link RetryPolicy#onWriteTimeoutVerdict(Request, + * ConsistencyLevel, WriteType, int, int, int)}, which will decide if it is rethrown directly to the + * client or if the request should be retried. If all other tried nodes also fail, this exception + * will appear in the {@link AllNodesFailedException} thrown to the client. */ public class WriteTimeoutException extends QueryConsistencyException { From 456f173012e557d4536664586b3682adb706dd1f Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Thu, 29 Jul 2021 14:22:15 +0200 Subject: [PATCH 748/979] Fix compiler warnings in SessionUtils --- .../api/testinfra/session/SessionUtils.java | 25 ++++++++++--------- 1 file changed, 13 insertions(+), 12 deletions(-) diff --git a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/session/SessionUtils.java b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/session/SessionUtils.java index 34f5554ccb2..3b9824698fe 100644 --- a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/session/SessionUtils.java +++ b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/session/SessionUtils.java @@ -29,6 +29,7 @@ import com.datastax.oss.driver.api.core.session.Session; import com.datastax.oss.driver.api.core.session.SessionBuilder; import com.datastax.oss.driver.api.testinfra.CassandraResourceRule; +import com.datastax.oss.driver.internal.core.loadbalancing.helper.NodeFilterToDistanceEvaluatorAdapter; import java.lang.reflect.Method; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Predicate; @@ -130,34 +131,34 @@ public static SessionT newSession( return newSession(cassandraResourceRule, keyspace, null, null, null, loader); } - private static SessionBuilder builder( + private static SessionBuilder builder( CassandraResourceRule cassandraResource, CqlIdentifier keyspace, NodeStateListener nodeStateListener, SchemaChangeListener schemaChangeListener, Predicate nodeFilter) { - SessionBuilder builder = - baseBuilder() - .addContactEndPoints(cassandraResource.getContactPoints()) - .withKeyspace(keyspace) - .withNodeStateListener(nodeStateListener) - .withSchemaChangeListener(schemaChangeListener); + SessionBuilder builder = baseBuilder(); + builder + .addContactEndPoints(cassandraResource.getContactPoints()) + .withKeyspace(keyspace) + .withNodeStateListener(nodeStateListener) + .withSchemaChangeListener(schemaChangeListener); if (nodeFilter != null) { - builder = builder.withNodeFilter(nodeFilter); + builder.withNodeDistanceEvaluator(new NodeFilterToDistanceEvaluatorAdapter(nodeFilter)); } return builder; } - @SuppressWarnings({"unchecked", "TypeParameterUnusedInFormals"}) + @SuppressWarnings({"TypeParameterUnusedInFormals"}) public static SessionT newSession( CassandraResourceRule cassandraResource, CqlIdentifier keyspace, NodeStateListener nodeStateListener, SchemaChangeListener schemaChangeListener, Predicate nodeFilter) { - SessionBuilder builder = + SessionBuilder builder = builder(cassandraResource, keyspace, nodeStateListener, schemaChangeListener, nodeFilter); - return (SessionT) builder.build(); + return builder.build(); } @SuppressWarnings({"unchecked", "TypeParameterUnusedInFormals"}) @@ -168,7 +169,7 @@ public static SessionT newSession( SchemaChangeListener schemaChangeListener, Predicate nodeFilter, DriverConfigLoader loader) { - SessionBuilder builder = + SessionBuilder builder = builder(cassandraResource, keyspace, nodeStateListener, schemaChangeListener, nodeFilter); return (SessionT) builder.withConfigLoader(loader).build(); } From 6368e2e9effee4f118b75b217789dfe8d1d6dc50 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Thu, 29 Jul 2021 14:22:29 +0200 Subject: [PATCH 749/979] Move test-infra module down --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 6f40a34b9a8..2abaa6ce103 100644 --- a/pom.xml +++ b/pom.xml @@ -32,9 +32,9 @@ query-builder mapper-runtime mapper-processor - test-infra metrics/micrometer metrics/microprofile + test-infra integration-tests osgi-tests distribution From 4ee475e4454b5ec0f408dc7313d3b934b40166e5 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Thu, 29 Jul 2021 17:06:37 +0200 Subject: [PATCH 750/979] Make field final --- .../oss/driver/internal/core/time/AtomicTimestampGenerator.java | 2 +- .../oss/driver/internal/core/util/DefaultDependencyChecker.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/time/AtomicTimestampGenerator.java b/core/src/main/java/com/datastax/oss/driver/internal/core/time/AtomicTimestampGenerator.java index 28bd5fdf2e4..c6e13716c54 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/time/AtomicTimestampGenerator.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/time/AtomicTimestampGenerator.java @@ -46,7 +46,7 @@ @ThreadSafe public class AtomicTimestampGenerator extends MonotonicTimestampGenerator { - private AtomicLong lastRef = new AtomicLong(0); + private final AtomicLong lastRef = new AtomicLong(0); public AtomicTimestampGenerator(DriverContext context) { super(context); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/util/DefaultDependencyChecker.java b/core/src/main/java/com/datastax/oss/driver/internal/core/util/DefaultDependencyChecker.java index 8dfabb66105..755c4a1252a 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/util/DefaultDependencyChecker.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/util/DefaultDependencyChecker.java @@ -28,7 +28,7 @@ public class DefaultDependencyChecker { private static final Logger LOG = LoggerFactory.getLogger(DefaultDependencyChecker.class); - private static ConcurrentHashMap CACHE = new ConcurrentHashMap<>(); + private static final ConcurrentHashMap CACHE = new ConcurrentHashMap<>(); /** * Return true iff we can find all classes for the dependency on the classpath, false otherwise From 0606b495c3088dddeec4d213e6603214f7b184c5 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Thu, 29 Jul 2021 18:15:10 +0200 Subject: [PATCH 751/979] JAVA-2949: Provide mapper support for CompletionStage> (#1563) --- changelog/README.md | 1 + .../oss/driver/mapper/QueryReturnTypesIT.java | 11 ++++++++++ .../datastax/oss/driver/mapper/SelectIT.java | 17 ++++++++++++++++ manual/mapper/daos/select/README.md | 5 +++++ .../dao/DaoSelectMethodGenerator.java | 2 ++ .../dao/DefaultDaoReturnTypeKind.java | 20 +++++++++++++++++++ .../dao/DefaultDaoReturnTypeParser.java | 1 + .../dao/DaoQueryMethodGeneratorTest.java | 8 +++++--- .../dao/DaoSelectMethodGeneratorTest.java | 4 ++-- .../oss/driver/internal/mapper/DaoBase.java | 8 ++++++++ 10 files changed, 72 insertions(+), 5 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 778ef17730a..8f994136575 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.11.3 (in progress) +- [bug] JAVA-2949: Provide mapper support for CompletionStage> - [bug] JAVA-2950: Remove reference to Reflection class from DependencyCheck ### 4.11.2 diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/QueryReturnTypesIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/QueryReturnTypesIT.java index 8002bf19f6a..990b38ac2e1 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/QueryReturnTypesIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/QueryReturnTypesIT.java @@ -47,6 +47,7 @@ import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionStage; +import java.util.concurrent.ExecutionException; import java.util.stream.Stream; import org.junit.Before; import org.junit.BeforeClass; @@ -239,6 +240,13 @@ public void should_execute_async_query_and_map_to_iterable() { assertThat(iterable.hasMorePages()).isFalse(); } + @Test + public void should_execute_query_and_map_to_stream_async() + throws ExecutionException, InterruptedException { + CompletableFuture> stream = dao.findByIdAsStreamAsync(1); + assertThat(stream.get()).hasSize(10); + } + @Dao @DefaultNullSavingStrategy(NullSavingStrategy.SET_TO_NULL) public interface TestDao { @@ -300,6 +308,9 @@ public interface TestDao { @Query("SELECT * FROM ${qualifiedTableId} WHERE id = :id") CompletableFuture> findByIdAsync(int id); + + @Query("SELECT * FROM ${qualifiedTableId} WHERE id = :id") + CompletableFuture> findByIdAsStreamAsync(int id); } @Entity diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SelectIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SelectIT.java index 2dda4b7e63a..ccd98fad15a 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SelectIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SelectIT.java @@ -18,6 +18,7 @@ import static org.assertj.core.api.Assertions.assertThat; import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.MappedAsyncPagingIterable; import com.datastax.oss.driver.api.core.PagingIterable; import com.datastax.oss.driver.api.core.cql.SimpleStatement; import com.datastax.oss.driver.api.mapper.annotations.Dao; @@ -100,11 +101,21 @@ public void should_select_all() { assertThat(dao.all().all()).hasSize(2); } + @Test + public void should_select_all_async() { + assertThat(CompletableFutures.getUninterruptibly(dao.allAsync()).currentPage()).hasSize(2); + } + @Test public void should_select_all_stream() { assertThat(dao.stream()).hasSize(2); } + @Test + public void should_select_all_stream_async() { + assertThat(CompletableFutures.getUninterruptibly(dao.streamAsync())).hasSize(2); + } + @Test public void should_select_by_primary_key_asynchronously() { assertThat(CompletableFutures.getUninterruptibly(dao.findByIdAsync(FLAMETHROWER.getId()))) @@ -211,9 +222,15 @@ public interface ProductDao { @Select PagingIterable all(); + @Select + CompletionStage> allAsync(); + @Select Stream stream(); + @Select + CompletionStage> streamAsync(); + @Select Optional findOptionalById(UUID productId); diff --git a/manual/mapper/daos/select/README.md b/manual/mapper/daos/select/README.md index 3cb6cc168c5..4900c67387c 100644 --- a/manual/mapper/daos/select/README.md +++ b/manual/mapper/daos/select/README.md @@ -135,6 +135,11 @@ In all cases, the method can return: @Select(customWhereClause = "description LIKE :searchString") CompletionStage> findByDescriptionAsync(String searchString); ``` + + For streams, even if the initial query is executed asynchronously, traversing the returned + stream may block the traversing thread. Blocking calls can indeed be required as more results + are fetched from the server in the background. For this reason, _the usage of + `CompletionStage>` cannot be considered as a fully asynchronous execution method_. * a [MappedReactiveResultSet] of the entity class. diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGenerator.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGenerator.java index 4d0d7457da6..70b24add090 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGenerator.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGenerator.java @@ -20,6 +20,7 @@ import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.FUTURE_OF_ASYNC_PAGING_ITERABLE; import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.FUTURE_OF_ENTITY; import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.FUTURE_OF_OPTIONAL_ENTITY; +import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.FUTURE_OF_STREAM; import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.MAPPED_REACTIVE_RESULT_SET; import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.OPTIONAL_ENTITY; import static com.datastax.oss.driver.internal.mapper.processor.dao.DefaultDaoReturnTypeKind.PAGING_ITERABLE; @@ -71,6 +72,7 @@ protected Set getSupportedReturnTypes() { PAGING_ITERABLE, STREAM, FUTURE_OF_ASYNC_PAGING_ITERABLE, + FUTURE_OF_STREAM, MAPPED_REACTIVE_RESULT_SET, CUSTOM); } diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DefaultDaoReturnTypeKind.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DefaultDaoReturnTypeKind.java index 41c841cd2e7..a4f111bd9ce 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DefaultDaoReturnTypeKind.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DefaultDaoReturnTypeKind.java @@ -475,6 +475,26 @@ public CodeBlock wrapWithErrorHandling( return innerBlock; } }, + + FUTURE_OF_STREAM { + @Override + public void addExecuteStatement( + CodeBlock.Builder methodBuilder, + String helperFieldName, + ExecutableElement methodElement, + Map typeParameters) { + methodBuilder.addStatement( + "return executeAsyncAndMapToEntityStream(boundStatement, $L)", helperFieldName); + } + + @Override + public CodeBlock wrapWithErrorHandling( + CodeBlock innerBlock, + ExecutableElement methodElement, + Map typeParameters) { + return wrapWithErrorHandling(innerBlock, FAILED_FUTURE); + } + }, ; @Override diff --git a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DefaultDaoReturnTypeParser.java b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DefaultDaoReturnTypeParser.java index 5d7c18c63cf..786d7d4b830 100644 --- a/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DefaultDaoReturnTypeParser.java +++ b/mapper-processor/src/main/java/com/datastax/oss/driver/internal/mapper/processor/dao/DefaultDaoReturnTypeParser.java @@ -101,6 +101,7 @@ public class DefaultDaoReturnTypeParser implements DaoReturnTypeParser { .put( MappedAsyncPagingIterable.class, DefaultDaoReturnTypeKind.FUTURE_OF_ASYNC_PAGING_ITERABLE) + .put(Stream.class, DefaultDaoReturnTypeKind.FUTURE_OF_STREAM) .build(); protected final ProcessorContext context; diff --git a/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoQueryMethodGeneratorTest.java b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoQueryMethodGeneratorTest.java index ea0f28badce..bda92a40b48 100644 --- a/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoQueryMethodGeneratorTest.java +++ b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoQueryMethodGeneratorTest.java @@ -43,9 +43,11 @@ public static Object[][] invalidSignatures() { { "Invalid return type: Query methods must return one of [VOID, BOOLEAN, LONG, ROW, " + "ENTITY, OPTIONAL_ENTITY, RESULT_SET, BOUND_STATEMENT, PAGING_ITERABLE, FUTURE_OF_VOID, " - + "FUTURE_OF_BOOLEAN, FUTURE_OF_LONG, FUTURE_OF_ROW, FUTURE_OF_ENTITY, " - + "FUTURE_OF_OPTIONAL_ENTITY, FUTURE_OF_ASYNC_RESULT_SET, " - + "FUTURE_OF_ASYNC_PAGING_ITERABLE, REACTIVE_RESULT_SET, MAPPED_REACTIVE_RESULT_SET, STREAM]", + + "FUTURE_OF_BOOLEAN, FUTURE_OF_LONG, FUTURE_OF_ROW, " + + "FUTURE_OF_ENTITY, FUTURE_OF_OPTIONAL_ENTITY, " + + "FUTURE_OF_ASYNC_RESULT_SET, FUTURE_OF_ASYNC_PAGING_ITERABLE, " + + "REACTIVE_RESULT_SET, MAPPED_REACTIVE_RESULT_SET, " + + "STREAM, FUTURE_OF_STREAM]", MethodSpec.methodBuilder("select") .addAnnotation( AnnotationSpec.builder(Query.class) diff --git a/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGeneratorTest.java b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGeneratorTest.java index c133d19e41a..4f9307f121c 100644 --- a/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGeneratorTest.java +++ b/mapper-processor/src/test/java/com/datastax/oss/driver/internal/mapper/processor/dao/DaoSelectMethodGeneratorTest.java @@ -42,7 +42,7 @@ public static Object[][] invalidSignatures() { { "Invalid return type: Select methods must return one of [ENTITY, OPTIONAL_ENTITY, " + "FUTURE_OF_ENTITY, FUTURE_OF_OPTIONAL_ENTITY, PAGING_ITERABLE, STREAM, " - + "FUTURE_OF_ASYNC_PAGING_ITERABLE, MAPPED_REACTIVE_RESULT_SET]", + + "FUTURE_OF_ASYNC_PAGING_ITERABLE, FUTURE_OF_STREAM, MAPPED_REACTIVE_RESULT_SET]", MethodSpec.methodBuilder("select") .addAnnotation(Select.class) .addModifiers(Modifier.PUBLIC, Modifier.ABSTRACT) @@ -52,7 +52,7 @@ public static Object[][] invalidSignatures() { { "Invalid return type: Select methods must return one of [ENTITY, OPTIONAL_ENTITY, " + "FUTURE_OF_ENTITY, FUTURE_OF_OPTIONAL_ENTITY, PAGING_ITERABLE, STREAM, " - + "FUTURE_OF_ASYNC_PAGING_ITERABLE, MAPPED_REACTIVE_RESULT_SET]", + + "FUTURE_OF_ASYNC_PAGING_ITERABLE, FUTURE_OF_STREAM, MAPPED_REACTIVE_RESULT_SET]", MethodSpec.methodBuilder("select") .addAnnotation(Select.class) .addModifiers(Modifier.PUBLIC, Modifier.ABSTRACT) diff --git a/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DaoBase.java b/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DaoBase.java index 4af39a59b84..862d31b0b3d 100644 --- a/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DaoBase.java +++ b/mapper-runtime/src/main/java/com/datastax/oss/driver/internal/mapper/DaoBase.java @@ -35,6 +35,7 @@ import com.datastax.oss.driver.api.mapper.entity.saving.NullSavingStrategy; import com.datastax.oss.driver.internal.core.ConsistencyLevelRegistry; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.cql.ResultSets; import com.datastax.oss.protocol.internal.ProtocolConstants; import java.time.Duration; import java.util.Optional; @@ -290,6 +291,13 @@ CompletableFuture> executeAsyncAndMapToEntity return executeAsync(statement).thenApply(rs -> rs.map(entityHelper::get)); } + protected CompletableFuture> executeAsyncAndMapToEntityStream( + Statement statement, EntityHelper entityHelper) { + return executeAsync(statement) + .thenApply(ResultSets::newInstance) + .thenApply(rs -> StreamSupport.stream(rs.map(entityHelper::get).spliterator(), false)); + } + protected static void throwIfProtocolVersionV3(MapperContext context) { if (isProtocolVersionV3(context)) { throw new MapperException( From 4da0ccb1b8f715e4392bce78b00aab0bdcf82420 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Thu, 29 Jul 2021 18:16:46 +0200 Subject: [PATCH 752/979] Switch tests to Cassandra 4.0 by default (#1566) * Switch CCM to Cassandra 4.0 by default * Add a few startup tweaks for C* 3+ and 4+ --- .../driver/core/cql/PreparedStatementIT.java | 4 +- .../oss/driver/core/metadata/SchemaIT.java | 18 +++++- .../src/test/resources/logback-test.xml | 2 +- .../driver/api/testinfra/ccm/CcmBridge.java | 62 +++++++++---------- .../DefaultCcmBridgeBuilderCustomizer.java | 8 +++ 5 files changed, 58 insertions(+), 36 deletions(-) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PreparedStatementIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PreparedStatementIT.java index 1e7e91084ba..1b07edb53af 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PreparedStatementIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PreparedStatementIT.java @@ -279,7 +279,9 @@ public void should_fail_to_reprepare_if_query_becomes_invalid() { Throwable t = catchThrowable(() -> session.execute(ps.bind())); // Then - assertThat(t).isInstanceOf(InvalidQueryException.class).hasMessage("Undefined column name d"); + assertThat(t) + .isInstanceOf(InvalidQueryException.class) + .hasMessageContaining("Undefined column name d"); } @Test diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaIT.java index dc915e25c77..1e2803c7ef4 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaIT.java @@ -226,7 +226,8 @@ public void should_get_virtual_metadata() { assertThat(tm).isNotNull(); assertThat(tm.getName().toString()).isEqualTo("sstable_tasks"); assertThat(tm.isVirtual()).isTrue(); - assertThat(tm.getColumns().size()).isEqualTo(7); + // DSE 6.8+ reports 7 columns, Cassandra 4+ reports 8 columns + assertThat(tm.getColumns().size()).isGreaterThanOrEqualTo(7); assertThat(tm.getIndexes().size()).isEqualTo(0); assertThat(tm.getPartitionKey().size()).isEqualTo(1); assertThat(tm.getPartitionKey().get(0).getName().toString()).isEqualTo("keyspace_name"); @@ -235,11 +236,24 @@ public void should_get_virtual_metadata() { assertThat(tm.getOptions().size()).isEqualTo(0); assertThat(tm.getKeyspace()).isEqualTo(kmd.getName()); assertThat(tm.describe(true)) - .isEqualTo( + .isIn( + // DSE 6.8+ + "/* VIRTUAL TABLE system_views.sstable_tasks (\n" + + " keyspace_name text,\n" + + " table_name text,\n" + + " task_id uuid,\n" + + " kind text,\n" + + " progress bigint,\n" + + " total bigint,\n" + + " unit text,\n" + + " PRIMARY KEY (keyspace_name, table_name, task_id)\n" + + "); */", + // Cassandra 4.0 "/* VIRTUAL TABLE system_views.sstable_tasks (\n" + " keyspace_name text,\n" + " table_name text,\n" + " task_id uuid,\n" + + " completion_ratio double,\n" + " kind text,\n" + " progress bigint,\n" + " total bigint,\n" diff --git a/integration-tests/src/test/resources/logback-test.xml b/integration-tests/src/test/resources/logback-test.xml index 36dd79c1040..b3668ff68b0 100644 --- a/integration-tests/src/test/resources/logback-test.xml +++ b/integration-tests/src/test/resources/logback-test.xml @@ -29,7 +29,7 @@ - + diff --git a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CcmBridge.java b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CcmBridge.java index c41aa0b278f..cef9e13c4b6 100644 --- a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CcmBridge.java +++ b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CcmBridge.java @@ -49,28 +49,10 @@ public class CcmBridge implements AutoCloseable { - private static final Logger logger = LoggerFactory.getLogger(CcmBridge.class); - - private final int[] nodes; - - private final Path configDirectory; - - private final AtomicBoolean started = new AtomicBoolean(); - - private final AtomicBoolean created = new AtomicBoolean(); - - private final String ipPrefix; - - private final Map cassandraConfiguration; - private final Map dseConfiguration; - private final List rawDseYaml; - private final List createOptions; - private final List dseWorkloads; - - private final String jvmArgs; + private static final Logger LOG = LoggerFactory.getLogger(CcmBridge.class); public static final Version VERSION = - Objects.requireNonNull(Version.parse(System.getProperty("ccm.version", "3.11.0"))); + Objects.requireNonNull(Version.parse(System.getProperty("ccm.version", "4.0.0"))); public static final String INSTALL_DIRECTORY = System.getProperty("ccm.directory"); @@ -126,6 +108,26 @@ public class CcmBridge implements AutoCloseable { private static final Version V3_0_15 = Version.parse("3.0.15"); private static final Version V2_1_19 = Version.parse("2.1.19"); + static { + if (DSE_ENABLEMENT) { + LOG.info("CCM Bridge configured with DSE version {}", VERSION); + } else { + LOG.info("CCM Bridge configured with Apache Cassandra version {}", VERSION); + } + } + + private final int[] nodes; + private final Path configDirectory; + private final AtomicBoolean started = new AtomicBoolean(); + private final AtomicBoolean created = new AtomicBoolean(); + private final String ipPrefix; + private final Map cassandraConfiguration; + private final Map dseConfiguration; + private final List rawDseYaml; + private final List createOptions; + private final List dseWorkloads; + private final String jvmArgs; + private CcmBridge( Path configDirectory, int[] nodes, @@ -141,10 +143,7 @@ private CcmBridge( // Hack to ensure that the default DC is always called 'dc1': pass a list ('-nX:0') even if // there is only one DC (with '-nX', CCM configures `SimpleSnitch`, which hard-codes the name // to 'datacenter1') - int[] tmp = new int[2]; - tmp[0] = nodes[0]; - tmp[1] = 0; - this.nodes = tmp; + this.nodes = new int[] {nodes[0], 0}; } else { this.nodes = nodes; } @@ -351,9 +350,9 @@ private void executeCheckLogError() { private void execute(CommandLine cli, boolean forceErrorLogging) { if (forceErrorLogging) { - logger.error("Executing: " + cli); + LOG.error("Executing: " + cli); } else { - logger.debug("Executing: " + cli); + LOG.debug("Executing: " + cli); } ExecuteWatchdog watchDog = new ExecuteWatchdog(TimeUnit.MINUTES.toMillis(10)); try (LogOutputStream outStream = @@ -361,9 +360,9 @@ private void execute(CommandLine cli, boolean forceErrorLogging) { @Override protected void processLine(String line, int logLevel) { if (forceErrorLogging) { - logger.error("ccmout> {}", line); + LOG.error("ccmout> {}", line); } else { - logger.debug("ccmout> {}", line); + LOG.debug("ccmout> {}", line); } } }; @@ -371,7 +370,7 @@ protected void processLine(String line, int logLevel) { new LogOutputStream() { @Override protected void processLine(String line, int logLevel) { - logger.error("ccmerr> {}", line); + LOG.error("ccmerr> {}", line); } }) { Executor executor = new DefaultExecutor(); @@ -381,8 +380,7 @@ protected void processLine(String line, int logLevel) { int retValue = executor.execute(cli); if (retValue != 0) { - logger.error( - "Non-zero exit code ({}) returned from executing ccm command: {}", retValue, cli); + LOG.error("Non-zero exit code ({}) returned from executing ccm command: {}", retValue, cli); } } catch (IOException ex) { if (watchDog.killedProcess()) { @@ -413,7 +411,7 @@ private static File createTempStore(String storePath) { f.deleteOnExit(); Resources.copy(CcmBridge.class.getResource(storePath), os); } catch (IOException e) { - logger.warn("Failure to write keystore, SSL-enabled servers may fail to start.", e); + LOG.warn("Failure to write keystore, SSL-enabled servers may fail to start.", e); } return f; } diff --git a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/DefaultCcmBridgeBuilderCustomizer.java b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/DefaultCcmBridgeBuilderCustomizer.java index 8dfe6e99b6e..96a0ac5fdce 100644 --- a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/DefaultCcmBridgeBuilderCustomizer.java +++ b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/DefaultCcmBridgeBuilderCustomizer.java @@ -17,6 +17,8 @@ import com.datastax.oss.driver.api.core.Version; +/** @see CcmRule */ +@SuppressWarnings("unused") public class DefaultCcmBridgeBuilderCustomizer { public static CcmBridge.Builder configureBuilder(CcmBridge.Builder builder) { @@ -25,6 +27,12 @@ public static CcmBridge.Builder configureBuilder(CcmBridge.Builder builder) { builder.withCassandraConfiguration("enable_materialized_views", true); builder.withCassandraConfiguration("enable_sasi_indexes", true); } + if (CcmBridge.VERSION.nextStable().compareTo(Version.V3_0_0) >= 0) { + builder.withJvmArgs("-Dcassandra.superuser_setup_delay_ms=0"); + builder.withJvmArgs("-Dcassandra.skip_wait_for_gossip_to_settle=0"); + builder.withCassandraConfiguration("num_tokens", "1"); + builder.withCassandraConfiguration("initial_token", "0"); + } return builder; } } From 5642d4df822e56386cfbe7579284d8db1168d002 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Thu, 29 Jul 2021 18:29:19 +0200 Subject: [PATCH 753/979] Update changelog with 4.13.0 --- changelog/README.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/changelog/README.md b/changelog/README.md index 18e34c8591c..dba51c891ab 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -2,6 +2,13 @@ +### 4.13.0 (in progress) + +Merged from 4.12.x: + +- [bug] JAVA-2949: Provide mapper support for CompletionStage> +- [bug] JAVA-2950: Remove reference to Reflection class from DependencyCheck + ### 4.12.1 (in progress) Merged from 4.11.x: From f08db2ef5fcc70b3486bd0b9ad74e9356a1be7bc Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Thu, 29 Jul 2021 18:49:33 +0200 Subject: [PATCH 754/979] JAVA-2951: Accept multiple node state listeners, schema change listeners and request trackers (#1565) --- changelog/README.md | 2 + .../api/core/config/DefaultDriverOption.java | 30 ++ .../api/core/config/OngoingConfigOptions.java | 10 + .../driver/api/core/config/OptionsMap.java | 3 - .../api/core/config/TypedDriverOption.java | 43 +- .../loadbalancing/LoadBalancingPolicy.java | 26 +- .../api/core/metadata/NodeStateListener.java | 8 +- .../metadata/schema/SchemaChangeListener.java | 5 +- .../core/session/ProgrammaticArguments.java | 63 +++ .../api/core/session/SessionBuilder.java | 76 ++- .../api/core/tracker/RequestTracker.java | 6 +- .../core/context/DefaultDriverContext.java | 162 +++++-- .../DefaultLoadBalancingPolicy.java | 9 +- .../MultiplexingNodeStateListener.java | 125 +++++ .../core/metadata/NoopNodeStateListener.java | 21 +- .../MultiplexingSchemaChangeListener.java | 209 ++++++++ .../schema/NoopSchemaChangeListener.java | 21 +- .../tracker/MultiplexingRequestTracker.java | 122 +++-- .../core/tracker/NoopRequestTracker.java | 20 +- .../internal/core/tracker/RequestLogger.java | 4 +- .../driver/internal/core/util/Reflection.java | 85 +++- core/src/main/resources/reference.conf | 66 +-- ...ringLoadBalancingPolicyDcFailoverTest.java | 10 - ...erringLoadBalancingPolicyDistanceTest.java | 10 - ...nferringLoadBalancingPolicyEventsTest.java | 10 - ...cInferringLoadBalancingPolicyInitTest.java | 10 - ...aultLoadBalancingPolicyDcFailoverTest.java | 10 - ...efaultLoadBalancingPolicyDistanceTest.java | 10 - .../DefaultLoadBalancingPolicyEventsTest.java | 10 - .../DefaultLoadBalancingPolicyInitTest.java | 10 - ...faultLoadBalancingPolicyQueryPlanTest.java | 2 - ...LoadBalancingPolicyRequestTrackerTest.java | 2 - .../MultiplexingNodeStateListenerTest.java | 194 ++++++++ .../MultiplexingSchemaChangeListenerTest.java | 450 ++++++++++++++++++ .../MultiplexingRequestTrackerTest.java | 211 ++++++++ .../core/config/DriverConfigValidationIT.java | 43 +- .../oss/driver/core/session/ListenersIT.java | 151 +++++- .../driver/core/tracker/RequestLoggerIT.java | 19 +- manual/core/metadata/node/README.md | 25 +- manual/core/metadata/schema/README.md | 22 +- manual/core/request_tracker/README.md | 30 +- upgrade_guide/README.md | 63 +++ 42 files changed, 2053 insertions(+), 355 deletions(-) create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/metadata/MultiplexingNodeStateListener.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/MultiplexingSchemaChangeListener.java rename core/src/main/java/com/datastax/{dse => oss}/driver/internal/core/tracker/MultiplexingRequestTracker.java (53%) create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/metadata/MultiplexingNodeStateListenerTest.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/MultiplexingSchemaChangeListenerTest.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/tracker/MultiplexingRequestTrackerTest.java diff --git a/changelog/README.md b/changelog/README.md index dba51c891ab..ed90fea7c97 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,8 @@ ### 4.13.0 (in progress) +- [improvement] JAVA-2951: Accept multiple node state listeners, schema change listeners and request trackers + Merged from 4.12.x: - [bug] JAVA-2949: Provide mapper support for CompletionStage> diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java index 3d2fde238e9..b2fba21d6a3 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java @@ -296,7 +296,10 @@ public enum DefaultDriverOption implements DriverOption { * The class of a session-wide component that tracks the outcome of requests. * *

          Value-type: {@link String} + * + * @deprecated Use {@link #REQUEST_TRACKER_CLASSES} instead. */ + @Deprecated REQUEST_TRACKER_CLASS("advanced.request-tracker.class"), /** * Whether to log successful requests. @@ -388,14 +391,20 @@ public enum DefaultDriverOption implements DriverOption { * The class of a session-wide component that listens for node state changes. * *

          Value-type: {@link String} + * + * @deprecated Use {@link #METADATA_NODE_STATE_LISTENER_CLASSES} instead. */ + @Deprecated METADATA_NODE_STATE_LISTENER_CLASS("advanced.node-state-listener.class"), /** * The class of a session-wide component that listens for schema changes. * *

          Value-type: {@link String} + * + * @deprecated Use {@link #METADATA_SCHEMA_CHANGE_LISTENER_CLASSES} instead. */ + @Deprecated METADATA_SCHEMA_CHANGE_LISTENER_CLASS("advanced.schema-change-listener.class"), /** @@ -909,6 +918,27 @@ public enum DefaultDriverOption implements DriverOption { *

          Value-type: boolean */ PREPARED_CACHE_WEAK_VALUES("advanced.prepared-statements.prepared-cache.weak-values"), + + /** + * The classes of session-wide components that track the outcome of requests. + * + *

          Value-type: List of {@link String} + */ + REQUEST_TRACKER_CLASSES("advanced.request-tracker.classes"), + + /** + * The classes of session-wide components that listen for node state changes. + * + *

          Value-type: List of {@link String} + */ + METADATA_NODE_STATE_LISTENER_CLASSES("advanced.node-state-listener.classes"), + + /** + * The classes of session-wide components that listen for schema changes. + * + *

          Value-type: List of {@link String} + */ + METADATA_SCHEMA_CHANGE_LISTENER_CLASSES("advanced.schema-change-listener.classes"), ; private final String path; diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/OngoingConfigOptions.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/OngoingConfigOptions.java index 0345150d770..62b76a9d39e 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/OngoingConfigOptions.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/OngoingConfigOptions.java @@ -19,6 +19,7 @@ import java.time.Duration; import java.util.List; import java.util.Map; +import java.util.stream.Collectors; /** An object where config options can be set programmatically. */ public interface OngoingConfigOptions> { @@ -59,6 +60,15 @@ default SelfT withClass(@NonNull DriverOption option, @NonNull Class value) { return withString(option, value.getName()); } + /** + * Note that this is just a shortcut to call {@link #withStringList(DriverOption, List)} with + * class names obtained from {@link Class#getName()}. + */ + @NonNull + default SelfT withClassList(@NonNull DriverOption option, @NonNull List> values) { + return withStringList(option, values.stream().map(Class::getName).collect(Collectors.toList())); + } + @NonNull SelfT withStringList(@NonNull DriverOption option, @NonNull List value); diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java index 90e66126dd9..8f5aa01592e 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java @@ -280,10 +280,7 @@ protected static void fillWithDriverDefaults(OptionsMap map) { map.put(TypedDriverOption.TIMESTAMP_GENERATOR_DRIFT_WARNING_THRESHOLD, Duration.ofSeconds(1)); map.put(TypedDriverOption.TIMESTAMP_GENERATOR_DRIFT_WARNING_INTERVAL, Duration.ofSeconds(10)); map.put(TypedDriverOption.TIMESTAMP_GENERATOR_FORCE_JAVA_CLOCK, false); - map.put(TypedDriverOption.REQUEST_TRACKER_CLASS, "NoopRequestTracker"); map.put(TypedDriverOption.REQUEST_THROTTLER_CLASS, "PassThroughRequestThrottler"); - map.put(TypedDriverOption.METADATA_NODE_STATE_LISTENER_CLASS, "NoopNodeStateListener"); - map.put(TypedDriverOption.METADATA_SCHEMA_CHANGE_LISTENER_CLASS, "NoopSchemaChangeListener"); map.put(TypedDriverOption.ADDRESS_TRANSLATOR_CLASS, "PassThroughAddressTranslator"); map.put(TypedDriverOption.RESOLVE_CONTACT_POINTS, true); map.put(TypedDriverOption.PROTOCOL_MAX_FRAME_LENGTH, 256L * 1024 * 1024); diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java index 044a7b71de6..bce8f923c77 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java @@ -254,9 +254,21 @@ public String toString() { public static final TypedDriverOption TIMESTAMP_GENERATOR_DRIFT_WARNING_INTERVAL = new TypedDriverOption<>( DefaultDriverOption.TIMESTAMP_GENERATOR_DRIFT_WARNING_INTERVAL, GenericType.DURATION); - /** The class of a session-wide component that tracks the outcome of requests. */ + + /** + * The class of a session-wide component that tracks the outcome of requests. + * + * @deprecated Use {@link #REQUEST_TRACKER_CLASSES} instead. + */ + @Deprecated public static final TypedDriverOption REQUEST_TRACKER_CLASS = new TypedDriverOption<>(DefaultDriverOption.REQUEST_TRACKER_CLASS, GenericType.STRING); + + /** The classes of session-wide components that track the outcome of requests. */ + public static final TypedDriverOption> REQUEST_TRACKER_CLASSES = + new TypedDriverOption<>( + DefaultDriverOption.REQUEST_TRACKER_CLASSES, GenericType.listOf(String.class)); + /** Whether to log successful requests. */ public static final TypedDriverOption REQUEST_LOGGER_SUCCESS_ENABLED = new TypedDriverOption<>( @@ -312,14 +324,39 @@ public String toString() { public static final TypedDriverOption REQUEST_THROTTLER_DRAIN_INTERVAL = new TypedDriverOption<>( DefaultDriverOption.REQUEST_THROTTLER_DRAIN_INTERVAL, GenericType.DURATION); - /** The class of a session-wide component that listens for node state changes. */ + + /** + * The class of a session-wide component that listens for node state changes. + * + * @deprecated Use {@link #METADATA_NODE_STATE_LISTENER_CLASSES} instead. + */ + @Deprecated public static final TypedDriverOption METADATA_NODE_STATE_LISTENER_CLASS = new TypedDriverOption<>( DefaultDriverOption.METADATA_NODE_STATE_LISTENER_CLASS, GenericType.STRING); - /** The class of a session-wide component that listens for schema changes. */ + + /** + * The class of a session-wide component that listens for schema changes. + * + * @deprecated Use {@link #METADATA_SCHEMA_CHANGE_LISTENER_CLASSES} instead. + */ + @Deprecated public static final TypedDriverOption METADATA_SCHEMA_CHANGE_LISTENER_CLASS = new TypedDriverOption<>( DefaultDriverOption.METADATA_SCHEMA_CHANGE_LISTENER_CLASS, GenericType.STRING); + + /** The classes of session-wide components that listen for node state changes. */ + public static final TypedDriverOption> METADATA_NODE_STATE_LISTENER_CLASSES = + new TypedDriverOption<>( + DefaultDriverOption.METADATA_NODE_STATE_LISTENER_CLASSES, + GenericType.listOf(String.class)); + + /** The classes of session-wide components that listen for schema changes. */ + public static final TypedDriverOption> METADATA_SCHEMA_CHANGE_LISTENER_CLASSES = + new TypedDriverOption<>( + DefaultDriverOption.METADATA_SCHEMA_CHANGE_LISTENER_CLASSES, + GenericType.listOf(String.class)); + /** * The class of the address translator to use to convert the addresses sent by Cassandra nodes * into ones that the driver uses to connect. diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/loadbalancing/LoadBalancingPolicy.java b/core/src/main/java/com/datastax/oss/driver/api/core/loadbalancing/LoadBalancingPolicy.java index 425e11c0c5a..d5663cce42c 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/loadbalancing/LoadBalancingPolicy.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/loadbalancing/LoadBalancingPolicy.java @@ -19,22 +19,40 @@ import com.datastax.oss.driver.api.core.metadata.NodeState; import com.datastax.oss.driver.api.core.session.Request; import com.datastax.oss.driver.api.core.session.Session; +import com.datastax.oss.driver.api.core.tracker.RequestTracker; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import java.util.Map; +import java.util.Optional; import java.util.Queue; import java.util.UUID; /** Decides which Cassandra nodes to contact for each query. */ public interface LoadBalancingPolicy extends AutoCloseable { + /** + * Returns an optional {@link RequestTracker} to be registered with the session. Registering a + * request tracker allows load-balancing policies to track node latencies in order to pick the + * fastest ones. + * + *

          This method is invoked only once during session configuration, and before any other methods + * in this interface. Note that at this point, the driver hasn't connected to any node yet. + * + * @since 4.13.0 + */ + @NonNull + default Optional getRequestTracker() { + return Optional.empty(); + } + /** * Initializes this policy with the nodes discovered during driver initialization. * *

          This method is guaranteed to be called exactly once per instance, and before any other - * method in this class. At this point, the driver has successfully connected to one of the - * contact points, and performed a first refresh of topology information (by default, the contents - * of {@code system.peers}), to discover other nodes in the cluster. + * method in this interface except {@link #getRequestTracker()}. At this point, the driver has + * successfully connected to one of the contact points, and performed a first refresh of topology + * information (by default, the contents of {@code system.peers}), to discover other nodes in the + * cluster. * *

          This method must call {@link DistanceReporter#setDistance(Node, NodeDistance) * distanceReporter.setDistance} for each provided node (otherwise that node will stay at distance @@ -50,7 +68,7 @@ public interface LoadBalancingPolicy extends AutoCloseable { * @param nodes all the nodes that are known to exist in the cluster (regardless of their state) * at the time of invocation. * @param distanceReporter an object that will be used by the policy to signal distance changes. - * Implementations will typically store a this in a field, since new nodes may get {@link + * Implementations will typically store this in a field, since new nodes may get {@link * #onAdd(Node) added} later and will need to have their distance set (or the policy might * change distances dynamically over time). */ diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/metadata/NodeStateListener.java b/core/src/main/java/com/datastax/oss/driver/api/core/metadata/NodeStateListener.java index 3d665a3dc3d..66c5654ddd4 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/metadata/NodeStateListener.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/metadata/NodeStateListener.java @@ -23,8 +23,9 @@ /** * A listener that gets notified when nodes states change. * - *

          An implementation of this interface can be registered in the configuration, or with {@link - * SessionBuilder#withNodeStateListener(NodeStateListener)}. + *

          Implementations of this interface can be registered either via the configuration (see {@code + * reference.conf} in the manual or core driver JAR), or programmatically via {@link + * SessionBuilder#addNodeStateListener(NodeStateListener)}. * *

          Note that the methods defined by this interface will be executed by internal driver threads, * and are therefore expected to have short execution times. If you need to perform long @@ -33,6 +34,9 @@ * *

          If you implement this interface but don't need to implement all the methods, extend {@link * NodeStateListenerBase}. + * + *

          If your implementation of this interface requires access to a fully-initialized session, + * consider wrapping it in a {@link SafeInitNodeStateListener}. */ public interface NodeStateListener extends AutoCloseable { diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListener.java b/core/src/main/java/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListener.java index bfcd53b8a16..4ea47713df3 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListener.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListener.java @@ -23,8 +23,9 @@ /** * Tracks schema changes. * - *

          An implementation of this interface can be registered in the configuration, or with {@link - * SessionBuilder#withSchemaChangeListener(SchemaChangeListener)}. + *

          Implementations of this interface can be registered either via the configuration (see {@code + * reference.conf} in the manual or core driver JAR), or programmatically via {@link + * SessionBuilder#addSchemaChangeListener(SchemaChangeListener)}. * *

          Note that the methods defined by this interface will be executed by internal driver threads, * and are therefore expected to have short execution times. If you need to perform long diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/session/ProgrammaticArguments.java b/core/src/main/java/com/datastax/oss/driver/api/core/session/ProgrammaticArguments.java index 9e4f034ef00..b8b2bd8b723 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/session/ProgrammaticArguments.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/session/ProgrammaticArguments.java @@ -25,6 +25,9 @@ import com.datastax.oss.driver.api.core.type.codec.TypeCodec; import com.datastax.oss.driver.api.core.type.codec.registry.MutableCodecRegistry; import com.datastax.oss.driver.internal.core.loadbalancing.helper.NodeFilterToDistanceEvaluatorAdapter; +import com.datastax.oss.driver.internal.core.metadata.MultiplexingNodeStateListener; +import com.datastax.oss.driver.internal.core.metadata.schema.MultiplexingSchemaChangeListener; +import com.datastax.oss.driver.internal.core.tracker.MultiplexingRequestTracker; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import edu.umd.cs.findbugs.annotations.NonNull; @@ -33,6 +36,7 @@ import java.util.List; import java.util.Map; import java.util.Map.Entry; +import java.util.Objects; import java.util.UUID; import java.util.function.Predicate; @@ -217,18 +221,77 @@ public Builder withNodeStateListener(@Nullable NodeStateListener nodeStateListen return this; } + @NonNull + public Builder addNodeStateListener(@NonNull NodeStateListener nodeStateListener) { + Objects.requireNonNull(nodeStateListener, "nodeStateListener cannot be null"); + if (this.nodeStateListener == null) { + this.nodeStateListener = nodeStateListener; + } else { + NodeStateListener previousListener = this.nodeStateListener; + if (previousListener instanceof MultiplexingNodeStateListener) { + ((MultiplexingNodeStateListener) previousListener).register(nodeStateListener); + } else { + MultiplexingNodeStateListener multiplexingNodeStateListener = + new MultiplexingNodeStateListener(); + multiplexingNodeStateListener.register(previousListener); + multiplexingNodeStateListener.register(nodeStateListener); + this.nodeStateListener = multiplexingNodeStateListener; + } + } + return this; + } + @NonNull public Builder withSchemaChangeListener(@Nullable SchemaChangeListener schemaChangeListener) { this.schemaChangeListener = schemaChangeListener; return this; } + @NonNull + public Builder addSchemaChangeListener(@NonNull SchemaChangeListener schemaChangeListener) { + Objects.requireNonNull(schemaChangeListener, "schemaChangeListener cannot be null"); + if (this.schemaChangeListener == null) { + this.schemaChangeListener = schemaChangeListener; + } else { + SchemaChangeListener previousListener = this.schemaChangeListener; + if (previousListener instanceof MultiplexingSchemaChangeListener) { + ((MultiplexingSchemaChangeListener) previousListener).register(schemaChangeListener); + } else { + MultiplexingSchemaChangeListener multiplexingSchemaChangeListener = + new MultiplexingSchemaChangeListener(); + multiplexingSchemaChangeListener.register(previousListener); + multiplexingSchemaChangeListener.register(schemaChangeListener); + this.schemaChangeListener = multiplexingSchemaChangeListener; + } + } + return this; + } + @NonNull public Builder withRequestTracker(@Nullable RequestTracker requestTracker) { this.requestTracker = requestTracker; return this; } + @NonNull + public Builder addRequestTracker(@NonNull RequestTracker requestTracker) { + Objects.requireNonNull(requestTracker, "requestTracker cannot be null"); + if (this.requestTracker == null) { + this.requestTracker = requestTracker; + } else { + RequestTracker previousTracker = this.requestTracker; + if (previousTracker instanceof MultiplexingRequestTracker) { + ((MultiplexingRequestTracker) previousTracker).register(requestTracker); + } else { + MultiplexingRequestTracker multiplexingRequestTracker = new MultiplexingRequestTracker(); + multiplexingRequestTracker.register(previousTracker); + multiplexingRequestTracker.register(requestTracker); + this.requestTracker = multiplexingRequestTracker; + } + } + return this; + } + @NonNull public Builder withLocalDatacenter( @NonNull String profileName, @NonNull String localDatacenter) { diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java b/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java index 990044b66c9..02070063d3b 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java @@ -220,8 +220,11 @@ public SelfT addTypeCodecs(@NonNull TypeCodec... typeCodecs) { /** * Registers a node state listener to use with the session. * - *

          If the listener is specified programmatically with this method, it overrides the - * configuration (that is, the {@code metadata.node-state-listener.class} option will be ignored). + *

          Listeners can be registered in two ways: either programmatically with this method, or via + * the configuration using the {@code advanced.metadata.node-state-listener.classes} option. + * + *

          This method unregisters any previously-registered listener. If you intend to register more + * than one listener, use {@link #addNodeStateListener(NodeStateListener)} instead. */ @NonNull public SelfT withNodeStateListener(@Nullable NodeStateListener nodeStateListener) { @@ -229,12 +232,32 @@ public SelfT withNodeStateListener(@Nullable NodeStateListener nodeStateListener return self; } + /** + * Registers a node state listener to use with the session, without removing previously-registered + * listeners. + * + *

          Listeners can be registered in two ways: either programmatically with this method, or via + * the configuration using the {@code advanced.metadata.node-state-listener.classes} option. + * + *

          Unlike {@link #withNodeStateListener(NodeStateListener)}, this method adds the new listener + * to the list of already-registered listeners, thus allowing applications to register multiple + * listeners. When multiple listeners are registered, they are notified in sequence every time a + * new listener event is triggered. + */ + @NonNull + public SelfT addNodeStateListener(@NonNull NodeStateListener nodeStateListener) { + programmaticArgumentsBuilder.addNodeStateListener(nodeStateListener); + return self; + } + /** * Registers a schema change listener to use with the session. * - *

          If the listener is specified programmatically with this method, it overrides the - * configuration (that is, the {@code metadata.schema-change-listener.class} option will be - * ignored). + *

          Listeners can be registered in two ways: either programmatically with this method, or via + * the configuration using the {@code advanced.metadata.schema-change-listener.classes} option. + * + *

          This method unregisters any previously-registered listener. If you intend to register more + * than one listener, use {@link #addSchemaChangeListener(SchemaChangeListener)} instead. */ @NonNull public SelfT withSchemaChangeListener(@Nullable SchemaChangeListener schemaChangeListener) { @@ -242,11 +265,32 @@ public SelfT withSchemaChangeListener(@Nullable SchemaChangeListener schemaChang return self; } + /** + * Registers a schema change listener to use with the session, without removing + * previously-registered listeners. + * + *

          Listeners can be registered in two ways: either programmatically with this method, or via + * the configuration using the {@code advanced.metadata.schema-change-listener.classes} option. + * + *

          Unlike {@link #withSchemaChangeListener(SchemaChangeListener)}, this method adds the new + * listener to the list of already-registered listeners, thus allowing applications to register + * multiple listeners. When multiple listeners are registered, they are notified in sequence every + * time a new listener event is triggered. + */ + @NonNull + public SelfT addSchemaChangeListener(@NonNull SchemaChangeListener schemaChangeListener) { + programmaticArgumentsBuilder.addSchemaChangeListener(schemaChangeListener); + return self; + } + /** * Registers a request tracker to use with the session. * - *

          If the tracker is specified programmatically with this method, it overrides the - * configuration (that is, the {@code request.tracker.class} option will be ignored). + *

          Trackers can be registered in two ways: either programmatically with this method, or via the + * configuration using the {@code advanced.request-tracker.classes} option. + * + *

          This method unregisters any previously-registered tracker. If you intend to register more + * than one tracker, use {@link #addRequestTracker(RequestTracker)} instead. */ @NonNull public SelfT withRequestTracker(@Nullable RequestTracker requestTracker) { @@ -254,6 +298,24 @@ public SelfT withRequestTracker(@Nullable RequestTracker requestTracker) { return self; } + /** + * Registers a request tracker to use with the session, without removing previously-registered + * trackers. + * + *

          Trackers can be registered in two ways: either programmatically with this method, or via the + * configuration using the {@code advanced.request-tracker.classes} option. + * + *

          Unlike {@link #withRequestTracker(RequestTracker)}, this method adds the new tracker to the + * list of already-registered trackers, thus allowing applications to register multiple trackers. + * When multiple trackers are registered, they are notified in sequence every time a new tracker + * event is triggered. + */ + @NonNull + public SelfT addRequestTracker(@NonNull RequestTracker requestTracker) { + programmaticArgumentsBuilder.addRequestTracker(requestTracker); + return self; + } + /** * Registers an authentication provider to use with the session. * diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/tracker/RequestTracker.java b/core/src/main/java/com/datastax/oss/driver/api/core/tracker/RequestTracker.java index 4d8f08e25a3..c88a61b037d 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/tracker/RequestTracker.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/tracker/RequestTracker.java @@ -27,9 +27,9 @@ /** * Tracks request execution for a session. * - *

          There is exactly one tracker per {@link Session}. It can be provided either via the - * configuration (see {@code reference.conf} in the manual or core driver JAR), or programmatically - * via {@link SessionBuilder#withRequestTracker(RequestTracker)}. + *

          Implementations of this interface can be registered either via the configuration (see {@code + * reference.conf} in the manual or core driver JAR), or programmatically via {@link + * SessionBuilder#addRequestTracker(RequestTracker)}. */ public interface RequestTracker extends AutoCloseable { diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java index fc73d47ca8d..2dc0e45d7b8 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java @@ -19,7 +19,6 @@ import com.datastax.dse.driver.api.core.config.DseDriverOption; import com.datastax.dse.driver.internal.core.InsightsClientLifecycleListener; -import com.datastax.dse.driver.internal.core.tracker.MultiplexingRequestTracker; import com.datastax.dse.driver.internal.core.type.codec.DseTypeCodecsRegistrar; import com.datastax.dse.protocol.internal.DseProtocolV1ClientCodecs; import com.datastax.dse.protocol.internal.DseProtocolV2ClientCodecs; @@ -59,7 +58,11 @@ import com.datastax.oss.driver.internal.core.metadata.DefaultTopologyMonitor; import com.datastax.oss.driver.internal.core.metadata.LoadBalancingPolicyWrapper; import com.datastax.oss.driver.internal.core.metadata.MetadataManager; +import com.datastax.oss.driver.internal.core.metadata.MultiplexingNodeStateListener; +import com.datastax.oss.driver.internal.core.metadata.NoopNodeStateListener; import com.datastax.oss.driver.internal.core.metadata.TopologyMonitor; +import com.datastax.oss.driver.internal.core.metadata.schema.MultiplexingSchemaChangeListener; +import com.datastax.oss.driver.internal.core.metadata.schema.NoopSchemaChangeListener; import com.datastax.oss.driver.internal.core.metadata.schema.parsing.DefaultSchemaParserFactory; import com.datastax.oss.driver.internal.core.metadata.schema.parsing.SchemaParserFactory; import com.datastax.oss.driver.internal.core.metadata.schema.queries.DefaultSchemaQueriesFactory; @@ -81,6 +84,7 @@ import com.datastax.oss.driver.internal.core.session.RequestProcessorRegistry; import com.datastax.oss.driver.internal.core.ssl.JdkSslHandlerFactory; import com.datastax.oss.driver.internal.core.ssl.SslHandlerFactory; +import com.datastax.oss.driver.internal.core.tracker.MultiplexingRequestTracker; import com.datastax.oss.driver.internal.core.tracker.NoopRequestTracker; import com.datastax.oss.driver.internal.core.tracker.RequestLogFormatter; import com.datastax.oss.driver.internal.core.type.codec.registry.DefaultCodecRegistry; @@ -99,6 +103,7 @@ import edu.umd.cs.findbugs.annotations.Nullable; import io.netty.buffer.ByteBuf; import java.net.InetSocketAddress; +import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; @@ -585,63 +590,120 @@ protected RequestThrottler buildRequestThrottler() { protected NodeStateListener buildNodeStateListener( NodeStateListener nodeStateListenerFromBuilder) { - return (nodeStateListenerFromBuilder != null) - ? nodeStateListenerFromBuilder - : Reflection.buildFromConfig( - this, - DefaultDriverOption.METADATA_NODE_STATE_LISTENER_CLASS, - NodeStateListener.class, - "com.datastax.oss.driver.internal.core.metadata") - .orElseThrow( - () -> - new IllegalArgumentException( - String.format( - "Missing node state listener, check your configuration (%s)", - DefaultDriverOption.METADATA_NODE_STATE_LISTENER_CLASS))); + List listeners = new ArrayList<>(); + if (nodeStateListenerFromBuilder != null) { + listeners.add(nodeStateListenerFromBuilder); + } + DefaultDriverOption newOption = DefaultDriverOption.METADATA_NODE_STATE_LISTENER_CLASSES; + @SuppressWarnings("deprecation") + DefaultDriverOption legacyOption = DefaultDriverOption.METADATA_NODE_STATE_LISTENER_CLASS; + DriverExecutionProfile profile = config.getDefaultProfile(); + if (profile.isDefined(newOption)) { + listeners.addAll( + Reflection.buildFromConfigList( + this, + newOption, + NodeStateListener.class, + "com.datastax.oss.driver.internal.core.metadata")); + } + if (profile.isDefined(legacyOption)) { + LOG.warn( + "Option {} has been deprecated and will be removed in a future release; please use option {} instead.", + legacyOption, + newOption); + Reflection.buildFromConfig( + this, + legacyOption, + NodeStateListener.class, + "com.datastax.oss.driver.internal.core.metadata") + .ifPresent(listeners::add); + } + if (listeners.isEmpty()) { + return new NoopNodeStateListener(this); + } else if (listeners.size() == 1) { + return listeners.get(0); + } else { + return new MultiplexingNodeStateListener(listeners); + } } protected SchemaChangeListener buildSchemaChangeListener( SchemaChangeListener schemaChangeListenerFromBuilder) { - return (schemaChangeListenerFromBuilder != null) - ? schemaChangeListenerFromBuilder - : Reflection.buildFromConfig( - this, - DefaultDriverOption.METADATA_SCHEMA_CHANGE_LISTENER_CLASS, - SchemaChangeListener.class, - "com.datastax.oss.driver.internal.core.metadata.schema") - .orElseThrow( - () -> - new IllegalArgumentException( - String.format( - "Missing schema change listener, check your configuration (%s)", - DefaultDriverOption.METADATA_SCHEMA_CHANGE_LISTENER_CLASS))); + List listeners = new ArrayList<>(); + if (schemaChangeListenerFromBuilder != null) { + listeners.add(schemaChangeListenerFromBuilder); + } + DefaultDriverOption newOption = DefaultDriverOption.METADATA_SCHEMA_CHANGE_LISTENER_CLASSES; + @SuppressWarnings("deprecation") + DefaultDriverOption legacyOption = DefaultDriverOption.METADATA_SCHEMA_CHANGE_LISTENER_CLASS; + DriverExecutionProfile profile = config.getDefaultProfile(); + if (profile.isDefined(newOption)) { + listeners.addAll( + Reflection.buildFromConfigList( + this, + newOption, + SchemaChangeListener.class, + "com.datastax.oss.driver.internal.core.metadata.schema")); + } + if (profile.isDefined(legacyOption)) { + LOG.warn( + "Option {} has been deprecated and will be removed in a future release; please use option {} instead.", + legacyOption, + newOption); + Reflection.buildFromConfig( + this, + legacyOption, + SchemaChangeListener.class, + "com.datastax.oss.driver.internal.core.metadata.schema") + .ifPresent(listeners::add); + } + if (listeners.isEmpty()) { + return new NoopSchemaChangeListener(this); + } else if (listeners.size() == 1) { + return listeners.get(0); + } else { + return new MultiplexingSchemaChangeListener(listeners); + } } protected RequestTracker buildRequestTracker(RequestTracker requestTrackerFromBuilder) { - RequestTracker requestTrackerFromConfig = - (requestTrackerFromBuilder != null) - ? requestTrackerFromBuilder - : Reflection.buildFromConfig( - this, - DefaultDriverOption.REQUEST_TRACKER_CLASS, - RequestTracker.class, - "com.datastax.oss.driver.internal.core.tracker") - .orElseThrow( - () -> - new IllegalArgumentException( - String.format( - "Missing request tracker, check your configuration (%s)", - DefaultDriverOption.REQUEST_TRACKER_CLASS))); - - // The default LBP needs to add its own tracker - if (requestTrackerFromConfig instanceof MultiplexingRequestTracker) { - return requestTrackerFromConfig; + List trackers = new ArrayList<>(); + if (requestTrackerFromBuilder != null) { + trackers.add(requestTrackerFromBuilder); + } + for (LoadBalancingPolicy lbp : this.getLoadBalancingPolicies().values()) { + lbp.getRequestTracker().ifPresent(trackers::add); + } + DefaultDriverOption newOption = DefaultDriverOption.REQUEST_TRACKER_CLASSES; + @SuppressWarnings("deprecation") + DefaultDriverOption legacyOption = DefaultDriverOption.REQUEST_TRACKER_CLASS; + DriverExecutionProfile profile = config.getDefaultProfile(); + if (profile.isDefined(newOption)) { + trackers.addAll( + Reflection.buildFromConfigList( + this, + newOption, + RequestTracker.class, + "com.datastax.oss.driver.internal.core.tracker")); + } + if (profile.isDefined(legacyOption)) { + LOG.warn( + "Option {} has been deprecated and will be removed in a future release; please use option {} instead.", + legacyOption, + newOption); + Reflection.buildFromConfig( + this, + legacyOption, + RequestTracker.class, + "com.datastax.oss.driver.internal.core.tracker") + .ifPresent(trackers::add); + } + if (trackers.isEmpty()) { + return new NoopRequestTracker(this); + } else if (trackers.size() == 1) { + return trackers.get(0); } else { - MultiplexingRequestTracker multiplexingRequestTracker = new MultiplexingRequestTracker(); - if (!(requestTrackerFromConfig instanceof NoopRequestTracker)) { - multiplexingRequestTracker.register(requestTrackerFromConfig); - } - return multiplexingRequestTracker; + return new MultiplexingRequestTracker(trackers); } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicy.java b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicy.java index 175f9556eaf..f79fa55b520 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicy.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicy.java @@ -18,7 +18,6 @@ import static java.util.concurrent.TimeUnit.MILLISECONDS; import static java.util.concurrent.TimeUnit.MINUTES; -import com.datastax.dse.driver.internal.core.tracker.MultiplexingRequestTracker; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.context.DriverContext; @@ -105,11 +104,13 @@ public DefaultLoadBalancingPolicy(@NonNull DriverContext context, @NonNull Strin profile.getBoolean(DefaultDriverOption.LOAD_BALANCING_POLICY_SLOW_AVOIDANCE, true); } + @NonNull @Override - public void init(@NonNull Map nodes, @NonNull DistanceReporter distanceReporter) { - super.init(nodes, distanceReporter); + public Optional getRequestTracker() { if (avoidSlowReplicas) { - ((MultiplexingRequestTracker) context.getRequestTracker()).register(this); + return Optional.of(this); + } else { + return Optional.empty(); } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/MultiplexingNodeStateListener.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/MultiplexingNodeStateListener.java new file mode 100644 index 00000000000..b57e05c152e --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/MultiplexingNodeStateListener.java @@ -0,0 +1,125 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.metadata; + +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.metadata.NodeStateListener; +import com.datastax.oss.driver.api.core.session.Session; +import com.datastax.oss.driver.internal.core.util.Loggers; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.Objects; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.function.Consumer; +import net.jcip.annotations.ThreadSafe; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Combines multiple node state listeners into a single one. + * + *

          Any exception thrown by a child listener is caught and logged. + */ +@ThreadSafe +public class MultiplexingNodeStateListener implements NodeStateListener { + + private static final Logger LOG = LoggerFactory.getLogger(MultiplexingNodeStateListener.class); + + private final List listeners = new CopyOnWriteArrayList<>(); + + public MultiplexingNodeStateListener() {} + + public MultiplexingNodeStateListener(NodeStateListener... listeners) { + this(Arrays.asList(listeners)); + } + + public MultiplexingNodeStateListener(Collection listeners) { + addListeners(listeners); + } + + private void addListeners(Collection source) { + for (NodeStateListener listener : source) { + addListener(listener); + } + } + + private void addListener(NodeStateListener toAdd) { + Objects.requireNonNull(toAdd, "listener cannot be null"); + if (toAdd instanceof MultiplexingNodeStateListener) { + addListeners(((MultiplexingNodeStateListener) toAdd).listeners); + } else { + listeners.add(toAdd); + } + } + + public void register(@NonNull NodeStateListener listener) { + addListener(listener); + } + + @Override + public void onAdd(@NonNull Node node) { + invokeListeners(listener -> listener.onAdd(node), "onAdd"); + } + + @Override + public void onUp(@NonNull Node node) { + invokeListeners(listener -> listener.onUp(node), "onUp"); + } + + @Override + public void onDown(@NonNull Node node) { + invokeListeners(listener -> listener.onDown(node), "onDown"); + } + + @Override + public void onRemove(@NonNull Node node) { + invokeListeners(listener -> listener.onRemove(node), "onRemove"); + } + + @Override + public void onSessionReady(@NonNull Session session) { + invokeListeners(listener -> listener.onSessionReady(session), "onSessionReady"); + } + + @Override + public void close() throws Exception { + for (NodeStateListener listener : listeners) { + try { + listener.close(); + } catch (Exception e) { + Loggers.warnWithException( + LOG, "Unexpected error while closing node state listener {}.", listener, e); + } + } + } + + private void invokeListeners(@NonNull Consumer action, String event) { + for (NodeStateListener listener : listeners) { + try { + action.accept(listener); + } catch (Exception e) { + Loggers.warnWithException( + LOG, + "Unexpected error while notifying node state listener {} of an {} event.", + listener, + event, + e); + } + } + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/NoopNodeStateListener.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/NoopNodeStateListener.java index 2e70d8efb6a..8c9c97fd915 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/NoopNodeStateListener.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/NoopNodeStateListener.java @@ -16,29 +16,12 @@ package com.datastax.oss.driver.internal.core.metadata; import com.datastax.oss.driver.api.core.context.DriverContext; -import com.datastax.oss.driver.api.core.metadata.NodeStateListener; import com.datastax.oss.driver.api.core.metadata.NodeStateListenerBase; -import com.datastax.oss.driver.api.core.session.SessionBuilder; import net.jcip.annotations.ThreadSafe; /** - * Default node state listener implementation with empty methods. - * - *

          To activate this listener, modify the {@code advanced.node-state-listener} section in the - * driver configuration, for example: - * - *

          - * datastax-java-driver {
          - *   advanced.node-state-listener {
          - *     class = NoopNodeStateListener
          - *   }
          - * }
          - * 
          - * - * See {@code reference.conf} (in the manual or core driver JAR) for more details. - * - *

          Note that if a listener is specified programmatically with {@link - * SessionBuilder#withNodeStateListener(NodeStateListener)}, the configuration is ignored. + * Default node state listener implementation with empty methods. This implementation is used when + * no listeners were registered, neither programmatically nor through the configuration. */ @ThreadSafe public class NoopNodeStateListener extends NodeStateListenerBase { diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/MultiplexingSchemaChangeListener.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/MultiplexingSchemaChangeListener.java new file mode 100644 index 00000000000..e7c277a29e0 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/MultiplexingSchemaChangeListener.java @@ -0,0 +1,209 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.metadata.schema; + +import com.datastax.oss.driver.api.core.metadata.schema.AggregateMetadata; +import com.datastax.oss.driver.api.core.metadata.schema.FunctionMetadata; +import com.datastax.oss.driver.api.core.metadata.schema.KeyspaceMetadata; +import com.datastax.oss.driver.api.core.metadata.schema.SchemaChangeListener; +import com.datastax.oss.driver.api.core.metadata.schema.TableMetadata; +import com.datastax.oss.driver.api.core.metadata.schema.ViewMetadata; +import com.datastax.oss.driver.api.core.session.Session; +import com.datastax.oss.driver.api.core.type.UserDefinedType; +import com.datastax.oss.driver.internal.core.util.Loggers; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.Objects; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.function.Consumer; +import net.jcip.annotations.ThreadSafe; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Combines multiple schema change listeners into a single one. + * + *

          Any exception thrown by a child listener is caught and logged. + */ +@ThreadSafe +public class MultiplexingSchemaChangeListener implements SchemaChangeListener { + + private static final Logger LOG = LoggerFactory.getLogger(MultiplexingSchemaChangeListener.class); + + private final List listeners = new CopyOnWriteArrayList<>(); + + public MultiplexingSchemaChangeListener() {} + + public MultiplexingSchemaChangeListener(SchemaChangeListener... listeners) { + this(Arrays.asList(listeners)); + } + + public MultiplexingSchemaChangeListener(Collection listeners) { + addListeners(listeners); + } + + private void addListeners(Collection source) { + for (SchemaChangeListener listener : source) { + addListener(listener); + } + } + + private void addListener(SchemaChangeListener toAdd) { + Objects.requireNonNull(toAdd, "listener cannot be null"); + if (toAdd instanceof MultiplexingSchemaChangeListener) { + addListeners(((MultiplexingSchemaChangeListener) toAdd).listeners); + } else { + listeners.add(toAdd); + } + } + + public void register(@NonNull SchemaChangeListener listener) { + addListener(listener); + } + + @Override + public void onKeyspaceCreated(@NonNull KeyspaceMetadata keyspace) { + invokeListeners(listener -> listener.onKeyspaceCreated(keyspace), "onKeyspaceCreated"); + } + + @Override + public void onKeyspaceDropped(@NonNull KeyspaceMetadata keyspace) { + invokeListeners(listener -> listener.onKeyspaceDropped(keyspace), "onKeyspaceDropped"); + } + + @Override + public void onKeyspaceUpdated( + @NonNull KeyspaceMetadata current, @NonNull KeyspaceMetadata previous) { + invokeListeners(listener -> listener.onKeyspaceUpdated(current, previous), "onKeyspaceUpdated"); + } + + @Override + public void onTableCreated(@NonNull TableMetadata table) { + invokeListeners(listener -> listener.onTableCreated(table), "onTableCreated"); + } + + @Override + public void onTableDropped(@NonNull TableMetadata table) { + invokeListeners(listener -> listener.onTableDropped(table), "onTableDropped"); + } + + @Override + public void onTableUpdated(@NonNull TableMetadata current, @NonNull TableMetadata previous) { + invokeListeners(listener -> listener.onTableUpdated(current, previous), "onTableUpdated"); + } + + @Override + public void onUserDefinedTypeCreated(@NonNull UserDefinedType type) { + invokeListeners( + listener -> listener.onUserDefinedTypeCreated(type), "onUserDefinedTypeCreated"); + } + + @Override + public void onUserDefinedTypeDropped(@NonNull UserDefinedType type) { + invokeListeners( + listener -> listener.onUserDefinedTypeDropped(type), "onUserDefinedTypeDropped"); + } + + @Override + public void onUserDefinedTypeUpdated( + @NonNull UserDefinedType current, @NonNull UserDefinedType previous) { + invokeListeners( + listener -> listener.onUserDefinedTypeUpdated(current, previous), + "onUserDefinedTypeUpdated"); + } + + @Override + public void onFunctionCreated(@NonNull FunctionMetadata function) { + invokeListeners(listener -> listener.onFunctionCreated(function), "onFunctionCreated"); + } + + @Override + public void onFunctionDropped(@NonNull FunctionMetadata function) { + invokeListeners(listener -> listener.onFunctionDropped(function), "onFunctionDropped"); + } + + @Override + public void onFunctionUpdated( + @NonNull FunctionMetadata current, @NonNull FunctionMetadata previous) { + invokeListeners(listener -> listener.onFunctionUpdated(current, previous), "onFunctionUpdated"); + } + + @Override + public void onAggregateCreated(@NonNull AggregateMetadata aggregate) { + invokeListeners(listener -> listener.onAggregateCreated(aggregate), "onAggregateCreated"); + } + + @Override + public void onAggregateDropped(@NonNull AggregateMetadata aggregate) { + invokeListeners(listener -> listener.onAggregateDropped(aggregate), "onAggregateDropped"); + } + + @Override + public void onAggregateUpdated( + @NonNull AggregateMetadata current, @NonNull AggregateMetadata previous) { + invokeListeners( + listener -> listener.onAggregateUpdated(current, previous), "onAggregateUpdated"); + } + + @Override + public void onViewCreated(@NonNull ViewMetadata view) { + invokeListeners(listener -> listener.onViewCreated(view), "onViewCreated"); + } + + @Override + public void onViewDropped(@NonNull ViewMetadata view) { + invokeListeners(listener -> listener.onViewDropped(view), "onViewDropped"); + } + + @Override + public void onViewUpdated(@NonNull ViewMetadata current, @NonNull ViewMetadata previous) { + invokeListeners(listener -> listener.onViewUpdated(current, previous), "onViewUpdated"); + } + + @Override + public void onSessionReady(@NonNull Session session) { + invokeListeners(listener -> listener.onSessionReady(session), "onSessionReady"); + } + + @Override + public void close() throws Exception { + for (SchemaChangeListener listener : listeners) { + try { + listener.close(); + } catch (Exception e) { + Loggers.warnWithException( + LOG, "Unexpected error while closing schema change listener {}.", listener, e); + } + } + } + + private void invokeListeners(@NonNull Consumer action, String event) { + for (SchemaChangeListener listener : listeners) { + try { + action.accept(listener); + } catch (Exception e) { + Loggers.warnWithException( + LOG, + "Unexpected error while notifying schema change listener {} of an {} event.", + listener, + event, + e); + } + } + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/NoopSchemaChangeListener.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/NoopSchemaChangeListener.java index 2df3935a80f..46d9226a7f7 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/NoopSchemaChangeListener.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/NoopSchemaChangeListener.java @@ -16,29 +16,12 @@ package com.datastax.oss.driver.internal.core.metadata.schema; import com.datastax.oss.driver.api.core.context.DriverContext; -import com.datastax.oss.driver.api.core.metadata.schema.SchemaChangeListener; import com.datastax.oss.driver.api.core.metadata.schema.SchemaChangeListenerBase; -import com.datastax.oss.driver.api.core.session.SessionBuilder; import net.jcip.annotations.ThreadSafe; /** - * Default schema change listener implementation with empty methods. - * - *

          To activate this listener, modify the {@code advanced.schema-change-listener} section in the - * driver configuration, for example: - * - *

          - * datastax-java-driver {
          - *   advanced.schema-change-listener {
          - *     class = NoopSchemaChangeListener
          - *   }
          - * }
          - * 
          - * - * See {@code reference.conf} (in the manual or core driver JAR) for more details. - * - *

          Note that if a listener is specified programmatically with {@link - * SessionBuilder#withSchemaChangeListener(SchemaChangeListener)}, the configuration is ignored. + * Default schema change listener implementation with empty methods. This implementation is used + * when no listeners were registered, neither programmatically nor through the configuration. */ @ThreadSafe public class NoopSchemaChangeListener extends SchemaChangeListenerBase { diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/tracker/MultiplexingRequestTracker.java b/core/src/main/java/com/datastax/oss/driver/internal/core/tracker/MultiplexingRequestTracker.java similarity index 53% rename from core/src/main/java/com/datastax/dse/driver/internal/core/tracker/MultiplexingRequestTracker.java rename to core/src/main/java/com/datastax/oss/driver/internal/core/tracker/MultiplexingRequestTracker.java index 694748f9965..8bce9840e2f 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/tracker/MultiplexingRequestTracker.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/tracker/MultiplexingRequestTracker.java @@ -13,36 +13,65 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.dse.driver.internal.core.tracker; +package com.datastax.oss.driver.internal.core.tracker; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.session.Request; import com.datastax.oss.driver.api.core.session.Session; import com.datastax.oss.driver.api.core.tracker.RequestTracker; +import com.datastax.oss.driver.internal.core.util.Loggers; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.Arrays; +import java.util.Collection; import java.util.List; +import java.util.Objects; import java.util.concurrent.CopyOnWriteArrayList; +import java.util.function.Consumer; +import net.jcip.annotations.ThreadSafe; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Combines multiple request trackers into a single one. * - *

          The default context always wraps any user-provided tracker into this, in case other internal - * components need to add their own trackers later (see InternalDriverContext.buildRequestTracker). - * - *

          We also use it to catch and log any unexpected exception thrown by a tracker. + *

          Any exception thrown by a child tracker is caught and logged. */ +@ThreadSafe public class MultiplexingRequestTracker implements RequestTracker { private static final Logger LOG = LoggerFactory.getLogger(MultiplexingRequestTracker.class); private final List trackers = new CopyOnWriteArrayList<>(); - public void register(RequestTracker tracker) { - trackers.add(tracker); + public MultiplexingRequestTracker() {} + + public MultiplexingRequestTracker(RequestTracker... trackers) { + this(Arrays.asList(trackers)); + } + + public MultiplexingRequestTracker(Collection trackers) { + addTrackers(trackers); + } + + private void addTrackers(Collection source) { + for (RequestTracker tracker : source) { + addTracker(tracker); + } + } + + private void addTracker(RequestTracker toAdd) { + Objects.requireNonNull(toAdd, "tracker cannot be null"); + if (toAdd instanceof MultiplexingRequestTracker) { + addTrackers(((MultiplexingRequestTracker) toAdd).trackers); + } else { + trackers.add(toAdd); + } + } + + public void register(@NonNull RequestTracker tracker) { + addTracker(tracker); } @Override @@ -52,13 +81,10 @@ public void onSuccess( @NonNull DriverExecutionProfile executionProfile, @NonNull Node node, @NonNull String logPrefix) { - for (RequestTracker tracker : trackers) { - try { - tracker.onSuccess(request, latencyNanos, executionProfile, node, logPrefix); - } catch (Throwable t) { - LOG.error("[{}] Unexpected error while invoking request tracker", logPrefix, t); - } - } + invokeTrackers( + tracker -> tracker.onSuccess(request, latencyNanos, executionProfile, node, logPrefix), + logPrefix, + "onSuccess"); } @Override @@ -69,13 +95,10 @@ public void onError( @NonNull DriverExecutionProfile executionProfile, @Nullable Node node, @NonNull String logPrefix) { - for (RequestTracker tracker : trackers) { - try { - tracker.onError(request, error, latencyNanos, executionProfile, node, logPrefix); - } catch (Throwable t) { - LOG.error("[{}] Unexpected error while invoking request tracker", logPrefix, t); - } - } + invokeTrackers( + tracker -> tracker.onError(request, error, latencyNanos, executionProfile, node, logPrefix), + logPrefix, + "onError"); } @Override @@ -85,13 +108,10 @@ public void onNodeSuccess( @NonNull DriverExecutionProfile executionProfile, @NonNull Node node, @NonNull String logPrefix) { - for (RequestTracker tracker : trackers) { - try { - tracker.onNodeSuccess(request, latencyNanos, executionProfile, node, logPrefix); - } catch (Throwable t) { - LOG.error("[{}] Unexpected error while invoking request tracker", logPrefix, t); - } - } + invokeTrackers( + tracker -> tracker.onNodeSuccess(request, latencyNanos, executionProfile, node, logPrefix), + logPrefix, + "onNodeSuccess"); } @Override @@ -102,42 +122,44 @@ public void onNodeError( @NonNull DriverExecutionProfile executionProfile, @NonNull Node node, @NonNull String logPrefix) { - for (RequestTracker tracker : trackers) { - try { - tracker.onNodeError(request, error, latencyNanos, executionProfile, node, logPrefix); - } catch (Throwable t) { - LOG.error("[{}] Unexpected error while invoking request tracker", logPrefix, t); - } - } + invokeTrackers( + tracker -> + tracker.onNodeError(request, error, latencyNanos, executionProfile, node, logPrefix), + logPrefix, + "onNodeError"); } @Override public void onSessionReady(@NonNull Session session) { - for (RequestTracker tracker : trackers) { - try { - tracker.onSessionReady(session); - } catch (Throwable t) { - LOG.error("[{}] Unexpected error while invoking request tracker", session.getName(), t); - } - } + invokeTrackers(tracker -> tracker.onSessionReady(session), session.getName(), "onSessionReady"); } @Override public void close() throws Exception { - Exception toThrow = null; for (RequestTracker tracker : trackers) { try { tracker.close(); } catch (Exception e) { - if (toThrow == null) { - toThrow = e; - } else { - toThrow.addSuppressed(e); - } + Loggers.warnWithException( + LOG, "Unexpected error while closing request tracker {}.", tracker, e); } } - if (toThrow != null) { - throw toThrow; + } + + private void invokeTrackers( + @NonNull Consumer action, String logPrefix, String event) { + for (RequestTracker tracker : trackers) { + try { + action.accept(tracker); + } catch (Exception e) { + Loggers.warnWithException( + LOG, + "[{}] Unexpected error while notifying request tracker {} of an {} event.", + logPrefix, + tracker, + event, + e); + } } } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/tracker/NoopRequestTracker.java b/core/src/main/java/com/datastax/oss/driver/internal/core/tracker/NoopRequestTracker.java index 0cec5dd2691..6297f51e789 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/tracker/NoopRequestTracker.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/tracker/NoopRequestTracker.java @@ -19,29 +19,13 @@ import com.datastax.oss.driver.api.core.context.DriverContext; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.session.Request; -import com.datastax.oss.driver.api.core.session.SessionBuilder; import com.datastax.oss.driver.api.core.tracker.RequestTracker; import edu.umd.cs.findbugs.annotations.NonNull; import net.jcip.annotations.ThreadSafe; /** - * A no-op request tracker. - * - *

          To activate this tracker, modify the {@code advanced.request-tracker} section in the driver - * configuration, for example: - * - *

          - * datastax-java-driver {
          - *   advanced.request-tracker {
          - *     class = NoopRequestTracker
          - *   }
          - * }
          - * 
          - * - * See {@code reference.conf} (in the manual or core driver JAR) for more details. - * - *

          Note that if a tracker is specified programmatically with {@link - * SessionBuilder#withRequestTracker(RequestTracker)}, the configuration is ignored. + * Default request tracker implementation with empty methods. This implementation is used when no + * trackers were registered, neither programmatically nor through the configuration. */ @ThreadSafe public class NoopRequestTracker implements RequestTracker { diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/tracker/RequestLogger.java b/core/src/main/java/com/datastax/oss/driver/internal/core/tracker/RequestLogger.java index bdd99fc5ab7..489c4e61cbe 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/tracker/RequestLogger.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/tracker/RequestLogger.java @@ -38,7 +38,7 @@ *

            * datastax-java-driver {
            *   advanced.request-tracker {
          - *     class = RequestLogger
          + *     classes = [RequestLogger]
            *     logs {
            *       success { enabled = true }
            *       slow { enabled = true, threshold = 1 second }
          @@ -56,7 +56,7 @@
            * See {@code reference.conf} (in the manual or core driver JAR) for more details.
            *
            * 

          Note that if a tracker is specified programmatically with {@link - * SessionBuilder#withRequestTracker(RequestTracker)}, the configuration is ignored. + * SessionBuilder#addRequestTracker(RequestTracker)}, the configuration is ignored. */ @ThreadSafe public class RequestLogger implements RequestTracker { diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/util/Reflection.java b/core/src/main/java/com/datastax/oss/driver/internal/core/util/Reflection.java index 9526a144536..ea371f6ddb8 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/util/Reflection.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/util/Reflection.java @@ -21,6 +21,7 @@ import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.shaded.guava.common.base.Joiner; import com.datastax.oss.driver.shaded.guava.common.base.Preconditions; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import com.datastax.oss.driver.shaded.guava.common.collect.ListMultimap; import com.datastax.oss.driver.shaded.guava.common.collect.MultimapBuilder; @@ -29,6 +30,7 @@ import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.util.Collection; +import java.util.List; import java.util.Map; import java.util.Optional; import org.slf4j.Logger; @@ -108,6 +110,36 @@ public static Optional buildFromConfig( return buildFromConfig(context, null, classNameOption, expectedSuperType, defaultPackages); } + /** + * Tries to create a list of instances, given an option defined in the driver configuration. + * + *

          For example: + * + *

          +   * my-policy.classes = [my.package.MyPolicyImpl1,my.package.MyPolicyImpl2]
          +   * 
          + * + * Each class will be instantiated via reflection, and must have a constructor that takes a {@link + * DriverContext} argument. + * + * @param context the driver context. + * @param classNamesOption the option that indicates the class list. It will be looked up in the + * default profile of the configuration stored in the context. + * @param expectedSuperType a super-type that the classes are expected to implement/extend. + * @param defaultPackages the default packages to prepend to the class names if they are not + * qualified. They will be tried in order, the first one that matches an existing class will + * be used. + * @return the list of new instances, or an empty list if {@code classNamesOption} is not defined + * in the configuration. + */ + public static ImmutableList buildFromConfigList( + InternalDriverContext context, + DriverOption classNamesOption, + Class expectedSuperType, + String... defaultPackages) { + return buildFromConfigList(context, null, classNamesOption, expectedSuperType, defaultPackages); + } + /** * Tries to create multiple instances of a class, given options defined in the driver * configuration and possibly overridden in profiles. @@ -199,6 +231,57 @@ public static Optional buildFromConfig( } String className = config.getString(classNameOption); + return Optional.of( + resolveClass( + context, profileName, expectedSuperType, configPath, className, defaultPackages)); + } + + /** + * @param profileName if null, this is a global policy, use the default profile and look for a + * one-arg constructor. If not null, this is a per-profile policy, look for a two-arg + * constructor. + */ + public static ImmutableList buildFromConfigList( + InternalDriverContext context, + String profileName, + DriverOption classNamesOption, + Class expectedSuperType, + String... defaultPackages) { + + DriverExecutionProfile config = + (profileName == null) + ? context.getConfig().getDefaultProfile() + : context.getConfig().getProfile(profileName); + + String configPath = classNamesOption.getPath(); + LOG.debug( + "Creating a list of {} from config option {}", + expectedSuperType.getSimpleName(), + configPath); + + if (!config.isDefined(classNamesOption)) { + LOG.debug("Option is not defined, skipping"); + return ImmutableList.of(); + } + + List classNames = config.getStringList(classNamesOption); + ImmutableList.Builder components = ImmutableList.builder(); + for (String className : classNames) { + components.add( + resolveClass( + context, profileName, expectedSuperType, configPath, className, defaultPackages)); + } + return components.build(); + } + + @NonNull + private static ComponentT resolveClass( + InternalDriverContext context, + String profileName, + Class expectedSuperType, + String configPath, + String className, + String[] defaultPackages) { Class clazz = null; if (className.contains(".")) { LOG.debug("Building from fully-qualified name {}", className); @@ -245,7 +328,7 @@ public static Optional buildFromConfig( (profileName == null) ? constructor.newInstance(context) : constructor.newInstance(context, profileName); - return Optional.of(instance); + return instance; } catch (Exception e) { // ITE just wraps an exception thrown by the constructor, get rid of it: Throwable cause = (e instanceof InvocationTargetException) ? e.getCause() : e; diff --git a/core/src/main/resources/reference.conf b/core/src/main/resources/reference.conf index ab5a33a028d..44e454fe42d 100644 --- a/core/src/main/resources/reference.conf +++ b/core/src/main/resources/reference.conf @@ -818,23 +818,29 @@ datastax-java-driver { force-java-clock = false } - # A session-wide component that tracks the outcome of requests. - # - # Required: yes - # Modifiable at runtime: no (but custom implementations may elect to watch configuration changes - # and allow child options to be changed at runtime). - # Overridable in a profile: no + # Request trackers are session-wide components that get notified of the outcome of requests. advanced.request-tracker { - # The class of the tracker. If it is not qualified, the driver assumes that it resides in the - # package com.datastax.oss.driver.internal.core.tracker. + # The list of trackers to register. # - # The driver provides the following implementations out of the box: - # - NoopRequestTracker: does nothing. + # This must be a list of class names, either fully-qualified or non-qualified; if the latter, + # the driver assumes that the class resides in the package + # com.datastax.oss.driver.internal.core.tracker. + # + # All classes specified here must implement + # com.datastax.oss.driver.api.core.tracker.RequestTracker and have a public constructor with a + # DriverContext argument. + # + # The driver provides the following implementation out of the box: # - RequestLogger: logs requests (see the parameters below). # - # You can also specify a custom class that implements RequestTracker and has a public - # constructor with a DriverContext argument. - class = NoopRequestTracker + # You can also pass instances of your trackers programmatically with + # CqlSession.builder().addRequestTracker(). + # + # Required: no + # Modifiable at runtime: no (but custom implementations may elect to watch configuration changes + # and allow child options to be changed at runtime). + # Overridable in a profile: no + #classes = [RequestLogger,com.example.app.MyTracker] # Parameters for RequestLogger. All of them can be overridden in a profile, and changed at # runtime (the new values will be taken into account for requests logged after the change). @@ -937,39 +943,37 @@ datastax-java-driver { // drain-interval = 10 milliseconds } - # A session-wide component that listens for node state changes. If it is not qualified, the driver - # assumes that it resides in the package com.datastax.oss.driver.internal.core.metadata. + # The list of node state listeners to register. Node state listeners are session-wide + # components that listen for node state changes (e.g., when nodes go down or back up). # - # The driver provides a single no-op implementation out of the box: NoopNodeStateListener. - # - # You can also specify a custom class that implements NodeStateListener and has a public + # This must be a list of fully-qualified class names; classes specified here must implement + # com.datastax.oss.driver.api.core.metadata.NodeStateListener and have a public # constructor with a DriverContext argument. # - # Alternatively, you can pass an instance of your listener programmatically with - # CqlSession.builder().withNodeStateListener(). In that case, this option will be ignored. + # You can also pass instances of your listeners programmatically with + # CqlSession.builder().addNodeStateListener(). # - # Required: unless a listener has been provided programmatically + # Required: no # Modifiable at runtime: no (but custom implementations may elect to watch configuration changes # and allow child options to be changed at runtime). # Overridable in a profile: no - advanced.node-state-listener.class = NoopNodeStateListener + #advanced.node-state-listener.classes = [com.example.app.MyListener1,com.example.app.MyListener2] - # A session-wide component that listens for node state changes. If it is not qualified, the driver - # assumes that it resides in the package com.datastax.oss.driver.internal.core.metadata.schema. + # The list of schema change listeners to register. Schema change listeners are session-wide + # components that listen for schema changes (e.g., when tables are created or dropped). # - # The driver provides a single no-op implementation out of the box: NoopSchemaChangeListener. - # - # You can also specify a custom class that implements SchemaChangeListener and has a public + # This must be a list of fully-qualified class names; classes specified here must implement + # com.datastax.oss.driver.api.core.metadata.schema.SchemaChangeListener and have a public # constructor with a DriverContext argument. # - # Alternatively, you can pass an instance of your listener programmatically with - # CqlSession.builder().withSchemaChangeListener(). In that case, this option will be ignored. + # You can also pass instances of your listeners programmatically with + # CqlSession.builder().addSchemaChangeListener(). # - # Required: unless a listener has been provided programmatically + # Required: no # Modifiable at runtime: no (but custom implementations may elect to watch configuration changes # and allow child options to be changed at runtime). # Overridable in a profile: no - advanced.schema-change-listener.class = NoopSchemaChangeListener + #advanced.schema-change-listener.classes = [com.example.app.MyListener1,com.example.app.MyListener2] # The address translator to use to convert the addresses sent by Cassandra nodes into ones that # the driver uses to connect. diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyDcFailoverTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyDcFailoverTest.java index 38faf7d1beb..25a4e4f1686 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyDcFailoverTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyDcFailoverTest.java @@ -17,17 +17,14 @@ import static com.datastax.oss.driver.api.core.config.DriverExecutionProfile.DEFAULT_NAME; import static org.assertj.core.api.Assertions.assertThat; -import static org.mockito.BDDMockito.given; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.when; -import com.datastax.dse.driver.internal.core.tracker.MultiplexingRequestTracker; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import java.util.Map; import java.util.UUID; -import org.junit.Before; import org.junit.runner.RunWith; import org.mockito.junit.MockitoJUnitRunner; @@ -36,13 +33,6 @@ public class DcInferringLoadBalancingPolicyDcFailoverTest extends BasicLoadBalancingPolicyDcFailoverTest { - @Override - @Before - public void setup() { - given(context.getRequestTracker()).willReturn(new MultiplexingRequestTracker()); - super.setup(); - } - @Override protected DcInferringLoadBalancingPolicy createAndInitPolicy() { when(node4.getDatacenter()).thenReturn("dc2"); diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyDistanceTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyDistanceTest.java index 6d644edcf2a..7207d3a057d 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyDistanceTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyDistanceTest.java @@ -17,12 +17,9 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.catchThrowable; -import static org.mockito.BDDMockito.given; -import com.datastax.dse.driver.internal.core.tracker.MultiplexingRequestTracker; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import edu.umd.cs.findbugs.annotations.NonNull; -import org.junit.Before; import org.junit.runner.RunWith; import org.mockito.junit.MockitoJUnitRunner; @@ -31,13 +28,6 @@ public class DcInferringLoadBalancingPolicyDistanceTest extends BasicLoadBalancingPolicyDistanceTest { - @Override - @Before - public void setup() { - given(context.getRequestTracker()).willReturn(new MultiplexingRequestTracker()); - super.setup(); - } - @Override public void should_report_LOCAL_when_dc_agnostic() { // This policy cannot operate when contact points are from different DCs diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyEventsTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyEventsTest.java index aa01ff08acf..f87a1524301 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyEventsTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyEventsTest.java @@ -17,24 +17,14 @@ import static com.datastax.oss.driver.Assertions.assertThat; import static com.datastax.oss.driver.api.core.config.DriverExecutionProfile.DEFAULT_NAME; -import static org.mockito.BDDMockito.given; import static org.mockito.Mockito.reset; -import com.datastax.dse.driver.internal.core.tracker.MultiplexingRequestTracker; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import edu.umd.cs.findbugs.annotations.NonNull; import java.util.UUID; -import org.junit.Before; public class DcInferringLoadBalancingPolicyEventsTest extends BasicLoadBalancingPolicyEventsTest { - @Override - @Before - public void setup() { - given(context.getRequestTracker()).willReturn(new MultiplexingRequestTracker()); - super.setup(); - } - @Override @NonNull protected BasicLoadBalancingPolicy createAndInitPolicy() { diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyInitTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyInitTest.java index e58d0e8b6bd..4cf355d59ee 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyInitTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DcInferringLoadBalancingPolicyInitTest.java @@ -18,7 +18,6 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.catchThrowable; import static org.assertj.core.api.Assertions.filter; -import static org.mockito.BDDMockito.given; import static org.mockito.Mockito.atLeast; import static org.mockito.Mockito.never; import static org.mockito.Mockito.verify; @@ -26,7 +25,6 @@ import ch.qos.logback.classic.Level; import ch.qos.logback.classic.spi.ILoggingEvent; -import com.datastax.dse.driver.internal.core.tracker.MultiplexingRequestTracker; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.loadbalancing.NodeDistance; @@ -35,18 +33,10 @@ import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; import edu.umd.cs.findbugs.annotations.NonNull; import java.util.UUID; -import org.junit.Before; import org.junit.Test; public class DcInferringLoadBalancingPolicyInitTest extends LoadBalancingPolicyTestBase { - @Override - @Before - public void setup() { - given(context.getRequestTracker()).willReturn(new MultiplexingRequestTracker()); - super.setup(); - } - @Test public void should_use_local_dc_if_provided_via_config() { // Given diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyDcFailoverTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyDcFailoverTest.java index 34302a196e7..d002b8e475f 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyDcFailoverTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyDcFailoverTest.java @@ -17,17 +17,14 @@ import static com.datastax.oss.driver.api.core.config.DriverExecutionProfile.DEFAULT_NAME; import static org.assertj.core.api.Assertions.assertThat; -import static org.mockito.BDDMockito.given; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.when; -import com.datastax.dse.driver.internal.core.tracker.MultiplexingRequestTracker; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import java.util.Map; import java.util.UUID; -import org.junit.Before; import org.junit.runner.RunWith; import org.mockito.junit.MockitoJUnitRunner; @@ -36,13 +33,6 @@ public class DefaultLoadBalancingPolicyDcFailoverTest extends BasicLoadBalancingPolicyDcFailoverTest { - @Override - @Before - public void setup() { - given(context.getRequestTracker()).willReturn(new MultiplexingRequestTracker()); - super.setup(); - } - @Override protected DefaultLoadBalancingPolicy createAndInitPolicy() { when(node4.getDatacenter()).thenReturn("dc2"); diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyDistanceTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyDistanceTest.java index 8db9d0d1019..a859c7b1f09 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyDistanceTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyDistanceTest.java @@ -17,12 +17,9 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.catchThrowable; -import static org.mockito.BDDMockito.given; -import com.datastax.dse.driver.internal.core.tracker.MultiplexingRequestTracker; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import edu.umd.cs.findbugs.annotations.NonNull; -import org.junit.Before; import org.junit.runner.RunWith; import org.mockito.junit.MockitoJUnitRunner; @@ -30,13 +27,6 @@ @RunWith(MockitoJUnitRunner.Silent.class) public class DefaultLoadBalancingPolicyDistanceTest extends BasicLoadBalancingPolicyDistanceTest { - @Override - @Before - public void setup() { - given(context.getRequestTracker()).willReturn(new MultiplexingRequestTracker()); - super.setup(); - } - @Override public void should_report_LOCAL_when_dc_agnostic() { // This policy cannot operate in dc-agnostic mode diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyEventsTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyEventsTest.java index 22f80b1f36d..b4b48da25ba 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyEventsTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyEventsTest.java @@ -17,24 +17,14 @@ import static com.datastax.oss.driver.Assertions.assertThat; import static com.datastax.oss.driver.api.core.config.DriverExecutionProfile.DEFAULT_NAME; -import static org.mockito.BDDMockito.given; import static org.mockito.Mockito.reset; -import com.datastax.dse.driver.internal.core.tracker.MultiplexingRequestTracker; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import edu.umd.cs.findbugs.annotations.NonNull; import java.util.UUID; -import org.junit.Before; public class DefaultLoadBalancingPolicyEventsTest extends BasicLoadBalancingPolicyEventsTest { - @Override - @Before - public void setup() { - given(context.getRequestTracker()).willReturn(new MultiplexingRequestTracker()); - super.setup(); - } - @Override @NonNull protected DefaultLoadBalancingPolicy createAndInitPolicy() { diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyInitTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyInitTest.java index 6efe9661d89..e9fd5c68944 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyInitTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyInitTest.java @@ -18,7 +18,6 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.assertj.core.api.Assertions.filter; -import static org.mockito.BDDMockito.given; import static org.mockito.Mockito.atLeast; import static org.mockito.Mockito.never; import static org.mockito.Mockito.verify; @@ -26,7 +25,6 @@ import ch.qos.logback.classic.Level; import ch.qos.logback.classic.spi.ILoggingEvent; -import com.datastax.dse.driver.internal.core.tracker.MultiplexingRequestTracker; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.loadbalancing.NodeDistance; @@ -35,18 +33,10 @@ import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; import edu.umd.cs.findbugs.annotations.NonNull; import java.util.UUID; -import org.junit.Before; import org.junit.Test; public class DefaultLoadBalancingPolicyInitTest extends LoadBalancingPolicyTestBase { - @Override - @Before - public void setup() { - given(context.getRequestTracker()).willReturn(new MultiplexingRequestTracker()); - super.setup(); - } - @Test public void should_use_local_dc_if_provided_via_config() { // Given diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyQueryPlanTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyQueryPlanTest.java index 207a5b409b7..22cfc3a76d9 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyQueryPlanTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyQueryPlanTest.java @@ -25,7 +25,6 @@ import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; -import com.datastax.dse.driver.internal.core.tracker.MultiplexingRequestTracker; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.internal.core.pool.ChannelPool; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; @@ -59,7 +58,6 @@ public class DefaultLoadBalancingPolicyQueryPlanTest extends BasicLoadBalancingP @Before @Override public void setup() { - given(context.getRequestTracker()).willReturn(new MultiplexingRequestTracker()); nanoTime = T1; diceRoll = 4; given(node4.getDatacenter()).willReturn("dc1"); diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyRequestTrackerTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyRequestTrackerTest.java index f87f5b38f43..6dfad480708 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyRequestTrackerTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyRequestTrackerTest.java @@ -19,7 +19,6 @@ import static com.datastax.oss.driver.api.core.config.DriverExecutionProfile.DEFAULT_NAME; import static org.mockito.BDDMockito.given; -import com.datastax.dse.driver.internal.core.tracker.MultiplexingRequestTracker; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.session.Request; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; @@ -41,7 +40,6 @@ public class DefaultLoadBalancingPolicyRequestTrackerTest extends LoadBalancingP @Before @Override public void setup() { - given(context.getRequestTracker()).willReturn(new MultiplexingRequestTracker()); super.setup(); given(metadataManager.getContactPoints()).willReturn(ImmutableSet.of(node1)); policy = diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/MultiplexingNodeStateListenerTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/MultiplexingNodeStateListenerTest.java new file mode 100644 index 00000000000..4d868e4afce --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/MultiplexingNodeStateListenerTest.java @@ -0,0 +1,194 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.metadata; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.BDDMockito.willThrow; +import static org.mockito.Mockito.verify; + +import ch.qos.logback.classic.Level; +import ch.qos.logback.classic.Logger; +import ch.qos.logback.classic.spi.ILoggingEvent; +import ch.qos.logback.core.Appender; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.metadata.NodeStateListener; +import com.datastax.oss.driver.api.core.session.Session; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.ArgumentCaptor; +import org.mockito.Captor; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnitRunner; +import org.slf4j.LoggerFactory; + +@RunWith(MockitoJUnitRunner.Strict.class) +public class MultiplexingNodeStateListenerTest { + + @Mock private NodeStateListener child1; + @Mock private NodeStateListener child2; + @Mock private Node node; + @Mock private Session session; + + @Mock private Appender appender; + @Captor private ArgumentCaptor loggingEventCaptor; + + private Logger logger; + private Level initialLogLevel; + + @Before + public void addAppenders() { + logger = (Logger) LoggerFactory.getLogger(MultiplexingNodeStateListener.class); + initialLogLevel = logger.getLevel(); + logger.setLevel(Level.WARN); + logger.addAppender(appender); + } + + @After + public void removeAppenders() { + logger.detachAppender(appender); + logger.setLevel(initialLogLevel); + } + + @Test + public void should_register() { + // given + MultiplexingNodeStateListener listener = new MultiplexingNodeStateListener(); + // when + listener.register(child1); + listener.register(child2); + // then + assertThat(listener).extracting("listeners").asList().hasSize(2).contains(child1, child2); + } + + @Test + public void should_flatten_child_multiplexing_listener_via_constructor() { + // given + MultiplexingNodeStateListener listener = + new MultiplexingNodeStateListener(new MultiplexingNodeStateListener(child1, child2)); + // when + // then + assertThat(listener).extracting("listeners").asList().hasSize(2).contains(child1, child2); + } + + @Test + public void should_flatten_child_multiplexing_listener_via_register() { + // given + MultiplexingNodeStateListener listener = new MultiplexingNodeStateListener(); + // when + listener.register(new MultiplexingNodeStateListener(child1, child2)); + // then + assertThat(listener).extracting("listeners").asList().hasSize(2).contains(child1, child2); + } + + @Test + public void should_notify_onUp() { + // given + MultiplexingNodeStateListener listener = new MultiplexingNodeStateListener(child1, child2); + willThrow(new NullPointerException()).given(child1).onUp(node); + // when + listener.onUp(node); + // then + verify(child1).onUp(node); + verify(child2).onUp(node); + verify(appender).doAppend(loggingEventCaptor.capture()); + assertThat(loggingEventCaptor.getAllValues().stream().map(ILoggingEvent::getFormattedMessage)) + .contains( + "Unexpected error while notifying node state listener child1 of an onUp event. (NullPointerException: null)"); + } + + @Test + public void should_notify_onDown() { + // given + MultiplexingNodeStateListener listener = new MultiplexingNodeStateListener(child1, child2); + willThrow(new NullPointerException()).given(child1).onDown(node); + // when + listener.onDown(node); + // then + verify(child1).onDown(node); + verify(child2).onDown(node); + verify(appender).doAppend(loggingEventCaptor.capture()); + assertThat(loggingEventCaptor.getAllValues().stream().map(ILoggingEvent::getFormattedMessage)) + .contains( + "Unexpected error while notifying node state listener child1 of an onDown event. (NullPointerException: null)"); + } + + @Test + public void should_notify_onAdd() { + // given + MultiplexingNodeStateListener listener = new MultiplexingNodeStateListener(child1, child2); + willThrow(new NullPointerException()).given(child1).onAdd(node); + // when + listener.onAdd(node); + // then + verify(child1).onAdd(node); + verify(child2).onAdd(node); + verify(appender).doAppend(loggingEventCaptor.capture()); + assertThat(loggingEventCaptor.getAllValues().stream().map(ILoggingEvent::getFormattedMessage)) + .contains( + "Unexpected error while notifying node state listener child1 of an onAdd event. (NullPointerException: null)"); + } + + @Test + public void should_notify_onRemove() { + // given + MultiplexingNodeStateListener listener = new MultiplexingNodeStateListener(child1, child2); + willThrow(new NullPointerException()).given(child1).onRemove(node); + // when + listener.onRemove(node); + // then + verify(child1).onRemove(node); + verify(child2).onRemove(node); + verify(appender).doAppend(loggingEventCaptor.capture()); + assertThat(loggingEventCaptor.getAllValues().stream().map(ILoggingEvent::getFormattedMessage)) + .contains( + "Unexpected error while notifying node state listener child1 of an onRemove event. (NullPointerException: null)"); + } + + @Test + public void should_notify_onSessionReady() { + // given + MultiplexingNodeStateListener listener = new MultiplexingNodeStateListener(child1, child2); + willThrow(new NullPointerException()).given(child1).onSessionReady(session); + // when + listener.onSessionReady(session); + // then + verify(child1).onSessionReady(session); + verify(child2).onSessionReady(session); + verify(appender).doAppend(loggingEventCaptor.capture()); + assertThat(loggingEventCaptor.getAllValues().stream().map(ILoggingEvent::getFormattedMessage)) + .contains( + "Unexpected error while notifying node state listener child1 of an onSessionReady event. (NullPointerException: null)"); + } + + @Test + public void should_notify_close() throws Exception { + // given + MultiplexingNodeStateListener listener = new MultiplexingNodeStateListener(child1, child2); + Exception child1Error = new NullPointerException(); + willThrow(child1Error).given(child1).close(); + // when + listener.close(); + // then + verify(child1).close(); + verify(child2).close(); + verify(appender).doAppend(loggingEventCaptor.capture()); + assertThat(loggingEventCaptor.getAllValues().stream().map(ILoggingEvent::getFormattedMessage)) + .contains( + "Unexpected error while closing node state listener child1. (NullPointerException: null)"); + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/MultiplexingSchemaChangeListenerTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/MultiplexingSchemaChangeListenerTest.java new file mode 100644 index 00000000000..440c7035394 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/MultiplexingSchemaChangeListenerTest.java @@ -0,0 +1,450 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.metadata.schema; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.BDDMockito.willThrow; +import static org.mockito.Mockito.verify; + +import ch.qos.logback.classic.Level; +import ch.qos.logback.classic.Logger; +import ch.qos.logback.classic.spi.ILoggingEvent; +import ch.qos.logback.core.Appender; +import com.datastax.oss.driver.api.core.metadata.schema.AggregateMetadata; +import com.datastax.oss.driver.api.core.metadata.schema.FunctionMetadata; +import com.datastax.oss.driver.api.core.metadata.schema.KeyspaceMetadata; +import com.datastax.oss.driver.api.core.metadata.schema.SchemaChangeListener; +import com.datastax.oss.driver.api.core.metadata.schema.TableMetadata; +import com.datastax.oss.driver.api.core.metadata.schema.ViewMetadata; +import com.datastax.oss.driver.api.core.session.Session; +import com.datastax.oss.driver.api.core.type.UserDefinedType; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.ArgumentCaptor; +import org.mockito.Captor; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnitRunner; +import org.slf4j.LoggerFactory; + +@RunWith(MockitoJUnitRunner.Strict.class) +public class MultiplexingSchemaChangeListenerTest { + + @Mock private SchemaChangeListener child1; + @Mock private SchemaChangeListener child2; + @Mock private Session session; + @Mock private KeyspaceMetadata keyspace1, keyspace2; + @Mock private TableMetadata table1, table2; + @Mock private UserDefinedType userDefinedType1, userDefinedType2; + @Mock private FunctionMetadata function1, function2; + @Mock private AggregateMetadata aggregate1, aggregate2; + @Mock private ViewMetadata view1, view2; + + @Mock private Appender appender; + @Captor private ArgumentCaptor loggingEventCaptor; + + private Logger logger; + private Level initialLogLevel; + + @Before + public void addAppenders() { + logger = (Logger) LoggerFactory.getLogger(MultiplexingSchemaChangeListener.class); + initialLogLevel = logger.getLevel(); + logger.setLevel(Level.WARN); + logger.addAppender(appender); + } + + @After + public void removeAppenders() { + logger.detachAppender(appender); + logger.setLevel(initialLogLevel); + } + + @Test + public void should_register() { + // given + MultiplexingSchemaChangeListener listener = new MultiplexingSchemaChangeListener(); + // when + listener.register(child1); + listener.register(child2); + // then + assertThat(listener).extracting("listeners").asList().hasSize(2).contains(child1, child2); + } + + @Test + public void should_flatten_child_multiplexing_listener_via_constructor() { + // given + MultiplexingSchemaChangeListener listener = + new MultiplexingSchemaChangeListener(new MultiplexingSchemaChangeListener(child1, child2)); + // when + // then + assertThat(listener).extracting("listeners").asList().hasSize(2).contains(child1, child2); + } + + @Test + public void should_flatten_child_multiplexing_listener_via_register() { + // given + MultiplexingSchemaChangeListener listener = new MultiplexingSchemaChangeListener(); + // when + listener.register(new MultiplexingSchemaChangeListener(child1, child2)); + // then + assertThat(listener).extracting("listeners").asList().hasSize(2).contains(child1, child2); + } + + @Test + public void should_notify_onKeyspaceCreated() { + // given + MultiplexingSchemaChangeListener listener = + new MultiplexingSchemaChangeListener(child1, child2); + willThrow(new NullPointerException()).given(child1).onKeyspaceCreated(keyspace1); + // when + listener.onKeyspaceCreated(keyspace1); + // then + verify(child1).onKeyspaceCreated(keyspace1); + verify(child2).onKeyspaceCreated(keyspace1); + verify(appender).doAppend(loggingEventCaptor.capture()); + assertThat(loggingEventCaptor.getAllValues().stream().map(ILoggingEvent::getFormattedMessage)) + .contains( + "Unexpected error while notifying schema change listener child1 of an onKeyspaceCreated event. (NullPointerException: null)"); + } + + @Test + public void should_notify_onKeyspaceDropped() { + // given + MultiplexingSchemaChangeListener listener = + new MultiplexingSchemaChangeListener(child1, child2); + willThrow(new NullPointerException()).given(child1).onKeyspaceDropped(keyspace1); + // when + listener.onKeyspaceDropped(keyspace1); + // then + verify(child1).onKeyspaceDropped(keyspace1); + verify(child2).onKeyspaceDropped(keyspace1); + verify(appender).doAppend(loggingEventCaptor.capture()); + assertThat(loggingEventCaptor.getAllValues().stream().map(ILoggingEvent::getFormattedMessage)) + .contains( + "Unexpected error while notifying schema change listener child1 of an onKeyspaceDropped event. (NullPointerException: null)"); + } + + @Test + public void should_notify_onKeyspaceUpdated() { + // given + MultiplexingSchemaChangeListener listener = + new MultiplexingSchemaChangeListener(child1, child2); + willThrow(new NullPointerException()).given(child1).onKeyspaceUpdated(keyspace1, keyspace2); + // when + listener.onKeyspaceUpdated(keyspace1, keyspace2); + // then + verify(child1).onKeyspaceUpdated(keyspace1, keyspace2); + verify(child2).onKeyspaceUpdated(keyspace1, keyspace2); + verify(appender).doAppend(loggingEventCaptor.capture()); + assertThat(loggingEventCaptor.getAllValues().stream().map(ILoggingEvent::getFormattedMessage)) + .contains( + "Unexpected error while notifying schema change listener child1 of an onKeyspaceUpdated event. (NullPointerException: null)"); + } + + @Test + public void should_notify_onTableCreated() { + // given + MultiplexingSchemaChangeListener listener = + new MultiplexingSchemaChangeListener(child1, child2); + willThrow(new NullPointerException()).given(child1).onTableCreated(table1); + // when + listener.onTableCreated(table1); + // then + verify(child1).onTableCreated(table1); + verify(child2).onTableCreated(table1); + verify(appender).doAppend(loggingEventCaptor.capture()); + assertThat(loggingEventCaptor.getAllValues().stream().map(ILoggingEvent::getFormattedMessage)) + .contains( + "Unexpected error while notifying schema change listener child1 of an onTableCreated event. (NullPointerException: null)"); + } + + @Test + public void should_notify_onTableDropped() { + // given + MultiplexingSchemaChangeListener listener = + new MultiplexingSchemaChangeListener(child1, child2); + willThrow(new NullPointerException()).given(child1).onTableDropped(table1); + // when + listener.onTableDropped(table1); + // then + verify(child1).onTableDropped(table1); + verify(child2).onTableDropped(table1); + verify(appender).doAppend(loggingEventCaptor.capture()); + assertThat(loggingEventCaptor.getAllValues().stream().map(ILoggingEvent::getFormattedMessage)) + .contains( + "Unexpected error while notifying schema change listener child1 of an onTableDropped event. (NullPointerException: null)"); + } + + @Test + public void should_notify_onTableUpdated() { + // given + MultiplexingSchemaChangeListener listener = + new MultiplexingSchemaChangeListener(child1, child2); + willThrow(new NullPointerException()).given(child1).onTableUpdated(table1, table2); + // when + listener.onTableUpdated(table1, table2); + // then + verify(child1).onTableUpdated(table1, table2); + verify(child2).onTableUpdated(table1, table2); + verify(appender).doAppend(loggingEventCaptor.capture()); + assertThat(loggingEventCaptor.getAllValues().stream().map(ILoggingEvent::getFormattedMessage)) + .contains( + "Unexpected error while notifying schema change listener child1 of an onTableUpdated event. (NullPointerException: null)"); + } + + @Test + public void should_notify_onUserDefinedTypeCreated() { + // given + MultiplexingSchemaChangeListener listener = + new MultiplexingSchemaChangeListener(child1, child2); + willThrow(new NullPointerException()).given(child1).onUserDefinedTypeCreated(userDefinedType1); + // when + listener.onUserDefinedTypeCreated(userDefinedType1); + // then + verify(child1).onUserDefinedTypeCreated(userDefinedType1); + verify(child2).onUserDefinedTypeCreated(userDefinedType1); + verify(appender).doAppend(loggingEventCaptor.capture()); + assertThat(loggingEventCaptor.getAllValues().stream().map(ILoggingEvent::getFormattedMessage)) + .contains( + "Unexpected error while notifying schema change listener child1 of an onUserDefinedTypeCreated event. (NullPointerException: null)"); + } + + @Test + public void should_notify_onUserDefinedTypeDropped() { + // given + MultiplexingSchemaChangeListener listener = + new MultiplexingSchemaChangeListener(child1, child2); + willThrow(new NullPointerException()).given(child1).onUserDefinedTypeDropped(userDefinedType1); + // when + listener.onUserDefinedTypeDropped(userDefinedType1); + // then + verify(child1).onUserDefinedTypeDropped(userDefinedType1); + verify(child2).onUserDefinedTypeDropped(userDefinedType1); + verify(appender).doAppend(loggingEventCaptor.capture()); + assertThat(loggingEventCaptor.getAllValues().stream().map(ILoggingEvent::getFormattedMessage)) + .contains( + "Unexpected error while notifying schema change listener child1 of an onUserDefinedTypeDropped event. (NullPointerException: null)"); + } + + @Test + public void should_notify_onUserDefinedTypeUpdated() { + // given + MultiplexingSchemaChangeListener listener = + new MultiplexingSchemaChangeListener(child1, child2); + willThrow(new NullPointerException()) + .given(child1) + .onUserDefinedTypeUpdated(userDefinedType1, userDefinedType2); + // when + listener.onUserDefinedTypeUpdated(userDefinedType1, userDefinedType2); + // then + verify(child1).onUserDefinedTypeUpdated(userDefinedType1, userDefinedType2); + verify(child2).onUserDefinedTypeUpdated(userDefinedType1, userDefinedType2); + verify(appender).doAppend(loggingEventCaptor.capture()); + assertThat(loggingEventCaptor.getAllValues().stream().map(ILoggingEvent::getFormattedMessage)) + .contains( + "Unexpected error while notifying schema change listener child1 of an onUserDefinedTypeUpdated event. (NullPointerException: null)"); + } + + @Test + public void should_notify_onFunctionCreated() { + // given + MultiplexingSchemaChangeListener listener = + new MultiplexingSchemaChangeListener(child1, child2); + willThrow(new NullPointerException()).given(child1).onFunctionCreated(function1); + // when + listener.onFunctionCreated(function1); + // then + verify(child1).onFunctionCreated(function1); + verify(child2).onFunctionCreated(function1); + verify(appender).doAppend(loggingEventCaptor.capture()); + assertThat(loggingEventCaptor.getAllValues().stream().map(ILoggingEvent::getFormattedMessage)) + .contains( + "Unexpected error while notifying schema change listener child1 of an onFunctionCreated event. (NullPointerException: null)"); + } + + @Test + public void should_notify_onFunctionDropped() { + // given + MultiplexingSchemaChangeListener listener = + new MultiplexingSchemaChangeListener(child1, child2); + willThrow(new NullPointerException()).given(child1).onFunctionDropped(function1); + // when + listener.onFunctionDropped(function1); + // then + verify(child1).onFunctionDropped(function1); + verify(child2).onFunctionDropped(function1); + verify(appender).doAppend(loggingEventCaptor.capture()); + assertThat(loggingEventCaptor.getAllValues().stream().map(ILoggingEvent::getFormattedMessage)) + .contains( + "Unexpected error while notifying schema change listener child1 of an onFunctionDropped event. (NullPointerException: null)"); + } + + @Test + public void should_notify_onFunctionUpdated() { + // given + MultiplexingSchemaChangeListener listener = + new MultiplexingSchemaChangeListener(child1, child2); + willThrow(new NullPointerException()).given(child1).onFunctionUpdated(function1, function2); + // when + listener.onFunctionUpdated(function1, function2); + // then + verify(child1).onFunctionUpdated(function1, function2); + verify(child2).onFunctionUpdated(function1, function2); + verify(appender).doAppend(loggingEventCaptor.capture()); + assertThat(loggingEventCaptor.getAllValues().stream().map(ILoggingEvent::getFormattedMessage)) + .contains( + "Unexpected error while notifying schema change listener child1 of an onFunctionUpdated event. (NullPointerException: null)"); + } + + @Test + public void should_notify_onAggregateCreated() { + // given + MultiplexingSchemaChangeListener listener = + new MultiplexingSchemaChangeListener(child1, child2); + willThrow(new NullPointerException()).given(child1).onAggregateCreated(aggregate1); + // when + listener.onAggregateCreated(aggregate1); + // then + verify(child1).onAggregateCreated(aggregate1); + verify(child2).onAggregateCreated(aggregate1); + verify(appender).doAppend(loggingEventCaptor.capture()); + assertThat(loggingEventCaptor.getAllValues().stream().map(ILoggingEvent::getFormattedMessage)) + .contains( + "Unexpected error while notifying schema change listener child1 of an onAggregateCreated event. (NullPointerException: null)"); + } + + @Test + public void should_notify_onAggregateDropped() { + // given + MultiplexingSchemaChangeListener listener = + new MultiplexingSchemaChangeListener(child1, child2); + willThrow(new NullPointerException()).given(child1).onAggregateDropped(aggregate1); + // when + listener.onAggregateDropped(aggregate1); + // then + verify(child1).onAggregateDropped(aggregate1); + verify(child2).onAggregateDropped(aggregate1); + verify(appender).doAppend(loggingEventCaptor.capture()); + assertThat(loggingEventCaptor.getAllValues().stream().map(ILoggingEvent::getFormattedMessage)) + .contains( + "Unexpected error while notifying schema change listener child1 of an onAggregateDropped event. (NullPointerException: null)"); + } + + @Test + public void should_notify_onAggregateUpdated() { + // given + MultiplexingSchemaChangeListener listener = + new MultiplexingSchemaChangeListener(child1, child2); + willThrow(new NullPointerException()).given(child1).onAggregateUpdated(aggregate1, aggregate2); + // when + listener.onAggregateUpdated(aggregate1, aggregate2); + // then + verify(child1).onAggregateUpdated(aggregate1, aggregate2); + verify(child2).onAggregateUpdated(aggregate1, aggregate2); + verify(appender).doAppend(loggingEventCaptor.capture()); + assertThat(loggingEventCaptor.getAllValues().stream().map(ILoggingEvent::getFormattedMessage)) + .contains( + "Unexpected error while notifying schema change listener child1 of an onAggregateUpdated event. (NullPointerException: null)"); + } + + @Test + public void should_notify_onViewCreated() { + // given + MultiplexingSchemaChangeListener listener = + new MultiplexingSchemaChangeListener(child1, child2); + willThrow(new NullPointerException()).given(child1).onViewCreated(view1); + // when + listener.onViewCreated(view1); + // then + verify(child1).onViewCreated(view1); + verify(child2).onViewCreated(view1); + verify(appender).doAppend(loggingEventCaptor.capture()); + assertThat(loggingEventCaptor.getAllValues().stream().map(ILoggingEvent::getFormattedMessage)) + .contains( + "Unexpected error while notifying schema change listener child1 of an onViewCreated event. (NullPointerException: null)"); + } + + @Test + public void should_notify_onViewDropped() { + // given + MultiplexingSchemaChangeListener listener = + new MultiplexingSchemaChangeListener(child1, child2); + willThrow(new NullPointerException()).given(child1).onViewDropped(view1); + // when + listener.onViewDropped(view1); + // then + verify(child1).onViewDropped(view1); + verify(child2).onViewDropped(view1); + verify(appender).doAppend(loggingEventCaptor.capture()); + assertThat(loggingEventCaptor.getAllValues().stream().map(ILoggingEvent::getFormattedMessage)) + .contains( + "Unexpected error while notifying schema change listener child1 of an onViewDropped event. (NullPointerException: null)"); + } + + @Test + public void should_notify_onViewUpdated() { + // given + MultiplexingSchemaChangeListener listener = + new MultiplexingSchemaChangeListener(child1, child2); + willThrow(new NullPointerException()).given(child1).onViewUpdated(view1, view2); + // when + listener.onViewUpdated(view1, view2); + // then + verify(child1).onViewUpdated(view1, view2); + verify(child2).onViewUpdated(view1, view2); + verify(appender).doAppend(loggingEventCaptor.capture()); + assertThat(loggingEventCaptor.getAllValues().stream().map(ILoggingEvent::getFormattedMessage)) + .contains( + "Unexpected error while notifying schema change listener child1 of an onViewUpdated event. (NullPointerException: null)"); + } + + @Test + public void should_notify_onSessionReady() { + // given + MultiplexingSchemaChangeListener listener = + new MultiplexingSchemaChangeListener(child1, child2); + willThrow(new NullPointerException()).given(child1).onSessionReady(session); + // when + listener.onSessionReady(session); + // then + verify(child1).onSessionReady(session); + verify(child2).onSessionReady(session); + verify(appender).doAppend(loggingEventCaptor.capture()); + assertThat(loggingEventCaptor.getAllValues().stream().map(ILoggingEvent::getFormattedMessage)) + .contains( + "Unexpected error while notifying schema change listener child1 of an onSessionReady event. (NullPointerException: null)"); + } + + @Test + public void should_notify_close() throws Exception { + // given + MultiplexingSchemaChangeListener listener = + new MultiplexingSchemaChangeListener(child1, child2); + Exception child1Error = new NullPointerException(); + willThrow(child1Error).given(child1).close(); + // when + listener.close(); + // then + verify(child1).close(); + verify(child2).close(); + verify(appender).doAppend(loggingEventCaptor.capture()); + assertThat(loggingEventCaptor.getAllValues().stream().map(ILoggingEvent::getFormattedMessage)) + .contains( + "Unexpected error while closing schema change listener child1. (NullPointerException: null)"); + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/tracker/MultiplexingRequestTrackerTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/tracker/MultiplexingRequestTrackerTest.java new file mode 100644 index 00000000000..6e65aeafb5f --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/tracker/MultiplexingRequestTrackerTest.java @@ -0,0 +1,211 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.tracker; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.BDDMockito.given; +import static org.mockito.BDDMockito.willThrow; +import static org.mockito.Mockito.verify; + +import ch.qos.logback.classic.Level; +import ch.qos.logback.classic.Logger; +import ch.qos.logback.classic.spi.ILoggingEvent; +import ch.qos.logback.core.Appender; +import com.datastax.oss.driver.api.core.DriverExecutionException; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.session.Request; +import com.datastax.oss.driver.api.core.session.Session; +import com.datastax.oss.driver.api.core.tracker.RequestTracker; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.ArgumentCaptor; +import org.mockito.Captor; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnitRunner; +import org.slf4j.LoggerFactory; + +@RunWith(MockitoJUnitRunner.Strict.class) +public class MultiplexingRequestTrackerTest { + + @Mock private RequestTracker child1; + @Mock private RequestTracker child2; + @Mock private Request request; + @Mock private DriverExecutionProfile profile; + @Mock private Node node; + @Mock private Session session; + + @Mock private Appender appender; + @Captor private ArgumentCaptor loggingEventCaptor; + + private Logger logger; + private Level initialLogLevel; + + private final Exception error = new DriverExecutionException(new NullPointerException()); + + @Before + public void addAppenders() { + logger = (Logger) LoggerFactory.getLogger(MultiplexingRequestTracker.class); + initialLogLevel = logger.getLevel(); + logger.setLevel(Level.WARN); + logger.addAppender(appender); + } + + @After + public void removeAppenders() { + logger.detachAppender(appender); + logger.setLevel(initialLogLevel); + } + + @Test + public void should_register() { + // given + MultiplexingRequestTracker tracker = new MultiplexingRequestTracker(); + // when + tracker.register(child1); + tracker.register(child2); + // then + assertThat(tracker).extracting("trackers").asList().hasSize(2).contains(child1, child2); + } + + @Test + public void should_flatten_child_multiplexing_tracker_via_constructor() { + // given + MultiplexingRequestTracker tracker = + new MultiplexingRequestTracker(new MultiplexingRequestTracker(child1, child2)); + // when + // then + assertThat(tracker).extracting("trackers").asList().hasSize(2).contains(child1, child2); + } + + @Test + public void should_flatten_child_multiplexing_tracker_via_register() { + // given + MultiplexingRequestTracker tracker = new MultiplexingRequestTracker(); + // when + tracker.register(new MultiplexingRequestTracker(child1, child2)); + // then + assertThat(tracker).extracting("trackers").asList().hasSize(2).contains(child1, child2); + } + + @Test + public void should_notify_onSuccess() { + // given + MultiplexingRequestTracker tracker = new MultiplexingRequestTracker(child1, child2); + willThrow(new NullPointerException()) + .given(child1) + .onSuccess(request, 123456L, profile, node, "test"); + // when + tracker.onSuccess(request, 123456L, profile, node, "test"); + // then + verify(child1).onSuccess(request, 123456L, profile, node, "test"); + verify(child2).onSuccess(request, 123456L, profile, node, "test"); + verify(appender).doAppend(loggingEventCaptor.capture()); + assertThat(loggingEventCaptor.getAllValues().stream().map(ILoggingEvent::getFormattedMessage)) + .contains( + "[test] Unexpected error while notifying request tracker child1 of an onSuccess event. (NullPointerException: null)"); + } + + @Test + public void should_notify_onError() { + // given + MultiplexingRequestTracker tracker = new MultiplexingRequestTracker(child1, child2); + willThrow(new NullPointerException()) + .given(child1) + .onError(request, error, 123456L, profile, node, "test"); + // when + tracker.onError(request, error, 123456L, profile, node, "test"); + // then + verify(child1).onError(request, error, 123456L, profile, node, "test"); + verify(child2).onError(request, error, 123456L, profile, node, "test"); + verify(appender).doAppend(loggingEventCaptor.capture()); + assertThat(loggingEventCaptor.getAllValues().stream().map(ILoggingEvent::getFormattedMessage)) + .contains( + "[test] Unexpected error while notifying request tracker child1 of an onError event. (NullPointerException: null)"); + } + + @Test + public void should_notify_onNodeSuccess() { + // given + MultiplexingRequestTracker tracker = new MultiplexingRequestTracker(child1, child2); + willThrow(new NullPointerException()) + .given(child1) + .onNodeSuccess(request, 123456L, profile, node, "test"); + // when + tracker.onNodeSuccess(request, 123456L, profile, node, "test"); + // then + verify(child1).onNodeSuccess(request, 123456L, profile, node, "test"); + verify(child2).onNodeSuccess(request, 123456L, profile, node, "test"); + verify(appender).doAppend(loggingEventCaptor.capture()); + assertThat(loggingEventCaptor.getAllValues().stream().map(ILoggingEvent::getFormattedMessage)) + .contains( + "[test] Unexpected error while notifying request tracker child1 of an onNodeSuccess event. (NullPointerException: null)"); + } + + @Test + public void should_notify_onNodeError() { + // given + MultiplexingRequestTracker tracker = new MultiplexingRequestTracker(child1, child2); + willThrow(new NullPointerException()) + .given(child1) + .onNodeError(request, error, 123456L, profile, node, "test"); + // when + tracker.onNodeError(request, error, 123456L, profile, node, "test"); + // then + verify(child1).onNodeError(request, error, 123456L, profile, node, "test"); + verify(child2).onNodeError(request, error, 123456L, profile, node, "test"); + verify(appender).doAppend(loggingEventCaptor.capture()); + assertThat(loggingEventCaptor.getAllValues().stream().map(ILoggingEvent::getFormattedMessage)) + .contains( + "[test] Unexpected error while notifying request tracker child1 of an onNodeError event. (NullPointerException: null)"); + } + + @Test + public void should_notify_onSessionReady() { + // given + MultiplexingRequestTracker tracker = new MultiplexingRequestTracker(child1, child2); + willThrow(new NullPointerException()).given(child1).onSessionReady(session); + given(session.getName()).willReturn("test"); + // when + tracker.onSessionReady(session); + // then + verify(child1).onSessionReady(session); + verify(child2).onSessionReady(session); + verify(appender).doAppend(loggingEventCaptor.capture()); + assertThat(loggingEventCaptor.getAllValues().stream().map(ILoggingEvent::getFormattedMessage)) + .contains( + "[test] Unexpected error while notifying request tracker child1 of an onSessionReady event. (NullPointerException: null)"); + } + + @Test + public void should_notify_close() throws Exception { + // given + MultiplexingRequestTracker tracker = new MultiplexingRequestTracker(child1, child2); + Exception child1Error = new NullPointerException(); + willThrow(child1Error).given(child1).close(); + // when + tracker.close(); + // then + verify(child1).close(); + verify(child2).close(); + verify(appender).doAppend(loggingEventCaptor.capture()); + assertThat(loggingEventCaptor.getAllValues().stream().map(ILoggingEvent::getFormattedMessage)) + .contains( + "Unexpected error while closing request tracker child1. (NullPointerException: null)"); + } +} diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/config/DriverConfigValidationIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/config/DriverConfigValidationIT.java index db3c56bceac..fcafd262a91 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/config/DriverConfigValidationIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/config/DriverConfigValidationIT.java @@ -24,6 +24,7 @@ import com.datastax.oss.driver.api.testinfra.simulacron.SimulacronRule; import com.datastax.oss.driver.categories.ParallelizableTests; import com.datastax.oss.simulacron.common.cluster.ClusterSpec; +import java.util.Collections; import org.junit.ClassRule; import org.junit.Test; import org.junit.experimental.categories.Category; @@ -44,27 +45,43 @@ public void should_fail_to_init_with_invalid_policy() { should_fail_to_init_with_invalid_policy(DefaultDriverOption.AUTH_PROVIDER_CLASS); should_fail_to_init_with_invalid_policy(DefaultDriverOption.SSL_ENGINE_FACTORY_CLASS); should_fail_to_init_with_invalid_policy(DefaultDriverOption.TIMESTAMP_GENERATOR_CLASS); - should_fail_to_init_with_invalid_policy(DefaultDriverOption.REQUEST_TRACKER_CLASS); should_fail_to_init_with_invalid_policy(DefaultDriverOption.REQUEST_THROTTLER_CLASS); - should_fail_to_init_with_invalid_policy(DefaultDriverOption.METADATA_NODE_STATE_LISTENER_CLASS); - should_fail_to_init_with_invalid_policy( - DefaultDriverOption.METADATA_SCHEMA_CHANGE_LISTENER_CLASS); should_fail_to_init_with_invalid_policy(DefaultDriverOption.ADDRESS_TRANSLATOR_CLASS); } + @Test + public void should_fail_to_init_with_invalid_components() { + should_fail_to_init_with_invalid_components(DefaultDriverOption.REQUEST_TRACKER_CLASSES); + should_fail_to_init_with_invalid_components( + DefaultDriverOption.METADATA_NODE_STATE_LISTENER_CLASSES); + should_fail_to_init_with_invalid_components( + DefaultDriverOption.METADATA_SCHEMA_CHANGE_LISTENER_CLASSES); + } + private void should_fail_to_init_with_invalid_policy(DefaultDriverOption option) { DriverConfigLoader loader = SessionUtils.configLoaderBuilder().withString(option, "AClassThatDoesNotExist").build(); + assertConfigError(option, loader); + } + + private void should_fail_to_init_with_invalid_components(DefaultDriverOption option) { + DriverConfigLoader loader = + SessionUtils.configLoaderBuilder() + .withStringList(option, Collections.singletonList("AClassThatDoesNotExist")) + .build(); + assertConfigError(option, loader); + } + + private void assertConfigError(DefaultDriverOption option, DriverConfigLoader loader) { assertThatThrownBy(() -> SessionUtils.newSession(SIMULACRON_RULE, loader)) .satisfies( - error -> { - assertThat(error) - .isInstanceOf(IllegalArgumentException.class) - .hasMessageContaining( - "Can't find class AClassThatDoesNotExist " - + "(specified by " - + option.getPath() - + ")"); - }); + error -> + assertThat(error) + .isInstanceOf(IllegalArgumentException.class) + .hasMessageContaining( + "Can't find class AClassThatDoesNotExist " + + "(specified by " + + option.getPath() + + ")")); } } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/session/ListenersIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/session/ListenersIT.java index 0c6bbe3e061..690c00a0e9b 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/session/ListenersIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/session/ListenersIT.java @@ -20,15 +20,22 @@ import static org.mockito.Mockito.verify; import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.context.DriverContext; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.metadata.NodeStateListener; +import com.datastax.oss.driver.api.core.metadata.NodeStateListenerBase; import com.datastax.oss.driver.api.core.metadata.SafeInitNodeStateListener; import com.datastax.oss.driver.api.core.metadata.schema.SchemaChangeListener; +import com.datastax.oss.driver.api.core.metadata.schema.SchemaChangeListenerBase; +import com.datastax.oss.driver.api.core.session.Session; import com.datastax.oss.driver.api.core.tracker.RequestTracker; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.api.testinfra.simulacron.SimulacronRule; import com.datastax.oss.driver.categories.ParallelizableTests; import com.datastax.oss.simulacron.common.cluster.ClusterSpec; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Collections; import org.junit.ClassRule; import org.junit.Test; import org.junit.experimental.categories.Category; @@ -47,30 +54,146 @@ public class ListenersIT { public static final SimulacronRule SIMULACRON_RULE = new SimulacronRule(ClusterSpec.builder().withNodes(1)); - @Mock private NodeStateListener nodeListener; - @Mock private SchemaChangeListener schemaListener; - @Mock private RequestTracker requestTracker; - @Captor private ArgumentCaptor nodeCaptor; + @Mock private NodeStateListener nodeListener1; + @Mock private NodeStateListener nodeListener2; + @Mock private SchemaChangeListener schemaListener1; + @Mock private SchemaChangeListener schemaListener2; + @Mock private RequestTracker requestTracker1; + @Mock private RequestTracker requestTracker2; + + @Captor private ArgumentCaptor nodeCaptor1; + @Captor private ArgumentCaptor nodeCaptor2; @Test - public void should_inject_session_in_listeners() { + public void should_inject_session_in_listeners() throws Exception { try (CqlSession session = (CqlSession) SessionUtils.baseBuilder() .addContactEndPoints(SIMULACRON_RULE.getContactPoints()) - .withNodeStateListener(new SafeInitNodeStateListener(nodeListener, true)) - .withSchemaChangeListener(schemaListener) - .withRequestTracker(requestTracker) + .addNodeStateListener(new SafeInitNodeStateListener(nodeListener1, true)) + .addNodeStateListener(new SafeInitNodeStateListener(nodeListener2, true)) + .addSchemaChangeListener(schemaListener1) + .addSchemaChangeListener(schemaListener2) + .addRequestTracker(requestTracker1) + .addRequestTracker(requestTracker2) + .withConfigLoader( + SessionUtils.configLoaderBuilder() + .withClassList( + DefaultDriverOption.METADATA_NODE_STATE_LISTENER_CLASSES, + Collections.singletonList(MyNodeStateListener.class)) + .withClassList( + DefaultDriverOption.METADATA_SCHEMA_CHANGE_LISTENER_CLASSES, + Collections.singletonList(MySchemaChangeListener.class)) + .withClassList( + DefaultDriverOption.REQUEST_TRACKER_CLASSES, + Collections.singletonList(MyRequestTracker.class)) + .build()) .build()) { - InOrder inOrder = inOrder(nodeListener); - inOrder.verify(nodeListener).onSessionReady(session); - inOrder.verify(nodeListener).onUp(nodeCaptor.capture()); - assertThat(nodeCaptor.getValue().getEndPoint()) + InOrder inOrder1 = inOrder(nodeListener1); + inOrder1.verify(nodeListener1).onSessionReady(session); + inOrder1.verify(nodeListener1).onUp(nodeCaptor1.capture()); + + InOrder inOrder2 = inOrder(nodeListener2); + inOrder2.verify(nodeListener2).onSessionReady(session); + inOrder2.verify(nodeListener2).onUp(nodeCaptor2.capture()); + + assertThat(nodeCaptor1.getValue().getEndPoint()) + .isEqualTo(SIMULACRON_RULE.getContactPoints().iterator().next()); + + assertThat(nodeCaptor2.getValue().getEndPoint()) .isEqualTo(SIMULACRON_RULE.getContactPoints().iterator().next()); - verify(schemaListener).onSessionReady(session); - verify(requestTracker).onSessionReady(session); + verify(schemaListener1).onSessionReady(session); + verify(schemaListener2).onSessionReady(session); + + verify(requestTracker1).onSessionReady(session); + verify(requestTracker2).onSessionReady(session); + + assertThat(MyNodeStateListener.onSessionReadyCalled).isTrue(); + assertThat(MyNodeStateListener.onUpCalled).isTrue(); + + assertThat(MySchemaChangeListener.onSessionReadyCalled).isTrue(); + + assertThat(MyRequestTracker.onSessionReadyCalled).isTrue(); + } + + verify(nodeListener1).close(); + verify(nodeListener2).close(); + + verify(schemaListener1).close(); + verify(schemaListener2).close(); + + verify(requestTracker1).close(); + verify(requestTracker2).close(); + + assertThat(MyNodeStateListener.closeCalled).isTrue(); + assertThat(MySchemaChangeListener.closeCalled).isTrue(); + assertThat(MyRequestTracker.closeCalled).isTrue(); + } + + public static class MyNodeStateListener extends SafeInitNodeStateListener { + + private static volatile boolean onSessionReadyCalled = false; + private static volatile boolean onUpCalled = false; + private static volatile boolean closeCalled = false; + + public MyNodeStateListener(@SuppressWarnings("unused") DriverContext ignored) { + super( + new NodeStateListenerBase() { + + @Override + public void onSessionReady(@NonNull Session session) { + onSessionReadyCalled = true; + } + + @Override + public void onUp(@NonNull Node node) { + onUpCalled = true; + } + + @Override + public void close() { + closeCalled = true; + } + }, + true); + } + } + + public static class MySchemaChangeListener extends SchemaChangeListenerBase { + + private static volatile boolean onSessionReadyCalled = false; + private static volatile boolean closeCalled = false; + + public MySchemaChangeListener(@SuppressWarnings("unused") DriverContext ignored) {} + + @Override + public void onSessionReady(@NonNull Session session) { + onSessionReadyCalled = true; + } + + @Override + public void close() throws Exception { + closeCalled = true; + } + } + + public static class MyRequestTracker implements RequestTracker { + + private static volatile boolean onSessionReadyCalled = false; + private static volatile boolean closeCalled = false; + + public MyRequestTracker(@SuppressWarnings("unused") DriverContext ignored) {} + + @Override + public void onSessionReady(@NonNull Session session) { + onSessionReadyCalled = true; + } + + @Override + public void close() throws Exception { + closeCalled = true; } } } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/tracker/RequestLoggerIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/tracker/RequestLoggerIT.java index e67d0fdc462..41a462ae1c8 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/tracker/RequestLoggerIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/tracker/RequestLoggerIT.java @@ -45,6 +45,7 @@ import com.datastax.oss.simulacron.common.cluster.ClusterSpec; import com.datastax.oss.simulacron.common.codec.ConsistencyLevel; import java.time.Duration; +import java.util.Collections; import java.util.List; import java.util.concurrent.TimeUnit; import java.util.function.Predicate; @@ -88,7 +89,9 @@ public class RequestLoggerIT { private final DriverConfigLoader requestLoader = SessionUtils.configLoaderBuilder() - .withClass(DefaultDriverOption.REQUEST_TRACKER_CLASS, RequestLogger.class) + .withClassList( + DefaultDriverOption.REQUEST_TRACKER_CLASSES, + Collections.singletonList(RequestLogger.class)) .withBoolean(DefaultDriverOption.REQUEST_LOGGER_SUCCESS_ENABLED, true) .withBoolean(DefaultDriverOption.REQUEST_LOGGER_SLOW_ENABLED, true) .withBoolean(DefaultDriverOption.REQUEST_LOGGER_ERROR_ENABLED, true) @@ -115,12 +118,14 @@ public class RequestLoggerIT { .withBoolean(DefaultDriverOption.REQUEST_LOGGER_STACK_TRACES, false) .build(); - private SessionRule sessionRuleRequest = + private final SessionRule sessionRuleRequest = SessionRule.builder(simulacronRule).withConfigLoader(requestLoader).build(); private final DriverConfigLoader nodeLoader = SessionUtils.configLoaderBuilder() - .withClass(DefaultDriverOption.REQUEST_TRACKER_CLASS, RequestNodeLoggerExample.class) + .withClassList( + DefaultDriverOption.REQUEST_TRACKER_CLASSES, + Collections.singletonList(RequestNodeLoggerExample.class)) .withBoolean(DefaultDriverOption.REQUEST_LOGGER_SUCCESS_ENABLED, true) .withBoolean(DefaultDriverOption.REQUEST_LOGGER_SLOW_ENABLED, true) .withBoolean(DefaultDriverOption.REQUEST_LOGGER_ERROR_ENABLED, true) @@ -150,14 +155,16 @@ public class RequestLoggerIT { DefaultDriverOption.LOAD_BALANCING_POLICY_CLASS, SortingLoadBalancingPolicy.class) .build(); - private SessionRule sessionRuleNode = + private final SessionRule sessionRuleNode = SessionRule.builder(simulacronRule).withConfigLoader(nodeLoader).build(); - private SessionRule sessionRuleDefaults = + private final SessionRule sessionRuleDefaults = SessionRule.builder(simulacronRule) .withConfigLoader( SessionUtils.configLoaderBuilder() - .withClass(DefaultDriverOption.REQUEST_TRACKER_CLASS, RequestLogger.class) + .withClassList( + DefaultDriverOption.REQUEST_TRACKER_CLASSES, + Collections.singletonList(RequestLogger.class)) .withBoolean(DefaultDriverOption.REQUEST_LOGGER_SUCCESS_ENABLED, true) .withBoolean(DefaultDriverOption.REQUEST_LOGGER_ERROR_ENABLED, true) .startProfile("low-threshold") diff --git a/manual/core/metadata/node/README.md b/manual/core/metadata/node/README.md index fb5780a30ff..455313758c6 100644 --- a/manual/core/metadata/node/README.md +++ b/manual/core/metadata/node/README.md @@ -7,7 +7,7 @@ actively connected). * [Node] instances are mutable, the fields will update in real time. * getting notifications: - [CqlSession.builder().withNodeStateListener][SessionBuilder.withNodeStateListener]. + [CqlSession.builder().addNodeStateListener][SessionBuilder.addNodeStateListener]. ----- @@ -59,8 +59,10 @@ Object rawDseVersion = node.getExtras().get(DseNodeProperties.DSE_VERSION); Version dseVersion = (rawDseVersion == null) ? null : (Version) rawDseVersion; ``` +### Notifications + If you need to follow node state changes, you don't need to poll the metadata manually; instead, -you can register a listener to get notified when changes occur: +you can register one or more listeners to get notified when changes occur: ```java NodeStateListener listener = @@ -71,13 +73,28 @@ NodeStateListener listener = } }; CqlSession session = CqlSession.builder() - .withNodeStateListener(listener) + .addNodeStateListener(listener) .build(); ``` See [NodeStateListener] for the list of available methods. [NodeStateListenerBase] is a convenience implementation with empty methods, for when you only need to override a few of them. +It is also possible to register one or more listeners via the configuration: + +```hocon +datastax-java-driver { + advanced { + node-state-listener.classes = [com.example.app.MyNodeStateListener1,com.example.app.MyNodeStateListener2] + } +} +``` + +Listeners registered via configuration will be instantiated with reflection; they must have a public +constructor taking a `DriverContext` argument. + +The two registration methods (programmatic and via the configuration) can be used simultaneously. + ### Advanced topics #### Forcing a node down @@ -124,5 +141,5 @@ the source code. [NodeState]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/metadata/NodeState.html [NodeStateListener]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/metadata/NodeStateListener.html [NodeStateListenerBase]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/metadata/NodeStateListenerBase.html -[SessionBuilder.withNodeStateListener]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withNodeStateListener-com.datastax.oss.driver.api.core.metadata.NodeStateListener- +[SessionBuilder.addNodeStateListener]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addNodeStateListener-com.datastax.oss.driver.api.core.metadata.NodeStateListener- [DseNodeProperties]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/metadata/DseNodeProperties.html \ No newline at end of file diff --git a/manual/core/metadata/schema/README.md b/manual/core/metadata/schema/README.md index 43a3ec1880a..f9e1f762a23 100644 --- a/manual/core/metadata/schema/README.md +++ b/manual/core/metadata/schema/README.md @@ -6,7 +6,7 @@ * immutable (must invoke again to observe changes). * getting notifications: - [CqlSession.builder().withSchemaChangeListener][SessionBuilder#withSchemaChangeListener]. + [CqlSession.builder().addSchemaChangeListener][SessionBuilder#addSchemaChangeListener]. * enabling/disabling: `advanced.metadata.schema.enabled` in the configuration, or [session.setSchemaMetadataEnabled()][Session#setSchemaMetadataEnabled]. * filtering: `advanced.metadata.schema.refreshed-keyspaces` in the configuration. @@ -70,7 +70,7 @@ All other types (keyspaces, tables, etc.) are identical to their OSS counterpart ### Notifications If you need to follow schema changes, you don't need to poll the metadata manually; instead, -you can register a listener to get notified when changes occur: +you can register one or more listeners to get notified when changes occur: ```java SchemaChangeListener listener = @@ -81,7 +81,7 @@ SchemaChangeListener listener = } }; CqlSession session = CqlSession.builder() - .withSchemaChangeListener(listener) + .addSchemaChangeListener(listener) .build(); session.execute("CREATE TABLE test.foo (k int PRIMARY KEY)"); @@ -90,6 +90,20 @@ session.execute("CREATE TABLE test.foo (k int PRIMARY KEY)"); See [SchemaChangeListener] for the list of available methods. [SchemaChangeListenerBase] is a convenience implementation with empty methods, for when you only need to override a few of them. +It is also possible to register one or more listeners via the configuration: + +```hocon +datastax-java-driver { + advanced { + schema-change-listener.classes = [com.example.app.MySchemaChangeListener1,com.example.app.MySchemaChangeListener2] + } +} +``` + +Listeners registered via configuration will be instantiated with reflection; they must have a public +constructor taking a `DriverContext` argument. + +The two registration methods (programmatic and via the configuration) can be used simultaneously. ### Configuration @@ -312,7 +326,7 @@ take a look at the [Performance](../../performance/#schema-updates) page for a f [SchemaChangeListenerBase]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListenerBase.html [Session#setSchemaMetadataEnabled]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/Session.html#setSchemaMetadataEnabled-java.lang.Boolean- [Session#checkSchemaAgreementAsync]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/Session.html#checkSchemaAgreementAsync-- -[SessionBuilder#withSchemaChangeListener]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSchemaChangeListener-com.datastax.oss.driver.api.core.metadata.schema.SchemaChangeListener- +[SessionBuilder#addSchemaChangeListener]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addSchemaChangeListener-com.datastax.oss.driver.api.core.metadata.schema.SchemaChangeListener- [ExecutionInfo#isSchemaInAgreement]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#isSchemaInAgreement-- [com.datastax.dse.driver.api.core.metadata.schema]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/metadata/schema/package-frame.html [DseFunctionMetadata]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/metadata/schema/DseFunctionMetadata.html diff --git a/manual/core/request_tracker/README.md b/manual/core/request_tracker/README.md index 4aa9fff0404..e1c9c1d6345 100644 --- a/manual/core/request_tracker/README.md +++ b/manual/core/request_tracker/README.md @@ -7,7 +7,7 @@ Callback that gets invoked for every request: success or error, globally and for * `advanced.request-tracker` in the configuration; defaults to none, also available: request logger, or write your own. * or programmatically: - [CqlSession.builder().withRequestTracker()][SessionBuilder.withRequestTracker]. + [CqlSession.builder().addRequestTracker()][SessionBuilder.addRequestTracker]. ----- @@ -16,28 +16,34 @@ every application request. The driver comes with an optional implementation that ### Configuration -The tracker is enabled in the [configuration](../configuration/). The default implementation does -nothing: +Request trackers can be declared in the [configuration](../configuration/) as follows: ``` datastax-java-driver.advanced.request-tracker { - class = NoopRequestTracker + classes = [com.example.app.MyTracker1,com.example.app.MyTracker2] } ``` -To use a different tracker, specify the name of a class that implements [RequestTracker]. One such -class is the built-in request logger (see the next section), you can also create your own -implementation. +By default, no tracker is registered. To register your own trackers, specify the name of a class +that implements [RequestTracker]. One such class is the built-in request logger (see the next +section), but you can also create your own implementation. + +Also, trackers registered via configuration will be instantiated with reflection; they must have a +public constructor taking a `DriverContext` argument. Sometimes you have a tracker instance already in your code, and need to pass it programmatically instead of referencing a class. The session builder has a method for that: ```java -RequestTracker myTracker = ...; -CqlSession session = CqlSession.builder().withRequestTracker(myTracker).build(); +RequestTracker myTracker1 = ...; +RequestTracker myTracker2 = ...; +CqlSession session = CqlSession.builder() + .addRequestTracker(myTracker1) + .addRequestTracker(myTracker2) + .build(); ``` -When you provide the tracker in this manner, the configuration will be ignored. +The two registration methods (programmatic and via the configuration) can be used simultaneously. ### Request logger @@ -46,7 +52,7 @@ requests as "slow" above a given threshold, limit the line size for large querie ``` datastax-java-driver.advanced.request-tracker { - class = RequestLogger + classes = [RequestLogger] logs { # Whether to log successful requests. @@ -118,4 +124,4 @@ com.datastax.oss.driver.api.core.servererrors.InvalidQueryException: Undefined c ``` [RequestTracker]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/tracker/RequestTracker.html -[SessionBuilder.withRequestTracker]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withRequestTracker-com.datastax.oss.driver.api.core.tracker.RequestTracker- \ No newline at end of file +[SessionBuilder.addRequestTracker]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addRequestTracker-com.datastax.oss.driver.api.core.tracker.RequestTracker- \ No newline at end of file diff --git a/upgrade_guide/README.md b/upgrade_guide/README.md index 4e0de35703c..c2fc9c13981 100644 --- a/upgrade_guide/README.md +++ b/upgrade_guide/README.md @@ -1,5 +1,68 @@ ## Upgrade guide +### 4.13.0 + +#### Registration of multiple listeners and trackers + +[JAVA-2951](https://datastax-oss.atlassian.net/browse/JAVA-2951) introduced the ability to register +more than one instance of the following interfaces: + +* [RequestTracker](https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/tracker/RequestTracker.html) +* [NodeStateListener](https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/metadata/NodeStateListener.html) +* [SchemaChangeListener](https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListener.html) + +Multiple components can now be registered both programmatically and through the configuration. _If +both approaches are used, components will add up and will all be registered_ (whereas previously, +the programmatic approach would take precedence over the configuration one). + +When using the programmatic approach to register multiple components, you should use the new +`SessionBuilder` methods `addRequestTracker`, `addNodeStateListener` and `addSchemaChangeListener`: + +```java +CqlSessionBuilder builder = CqlSession.builder(); +builder + .addRequestTracker(tracker1) + .addRequestTracker(tracker2); +builder + .addNodeStateListener(nodeStateListener1) + .addNodeStateListener(nodeStateListener2); +builder + .addSchemaChangeListener(schemaChangeListener1) + .addSchemaChangeListener(schemaChangeListener2); +``` + +To support registration of multiple components through the configuration, the following +configuration options were deprecated because they only allow one component to be declared: + +* `advanced.request-tracker.class` +* `advanced.node-state-listener.class` +* `advanced.schema-change-listener.class` + +They are still honored, but the driver will log a warning if they are used. They should now be +replaced with the following ones, that accept a list of classes to instantiate, instead of just +one: + +* `advanced.request-tracker.classes` +* `advanced.node-state-listener.classes` +* `advanced.schema-change-listener.classes` + +Example: + +```hocon +datastax-java-driver { + advanced { + # RequestLogger is a driver built-in tracker + request-tracker.classes = [RequestLogger,com.example.app.MyRequestTracker] + node-state-listener.classes = [com.example.app.MyNodeStateListener1,com.example.app.MyNodeStateListener2] + schema-change-listener.classes = [com.example.app.MySchemaChangeListener] + } +} +``` + +When more than one component of the same type is registered, the driver will distribute received +signals to all components in sequence, by order of their registration, starting with the +programmatically-provided ones. If a component throws an error, the error is intercepted and logged. + ### 4.12.0 #### MicroProfile Metrics upgraded to 3.0 From 7e74450ebe9d70136b164a5679ba8e40b8cc0b9e Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Thu, 29 Jul 2021 18:51:28 +0200 Subject: [PATCH 755/979] JAVA-2953: Promote ProgrammaticPlainTextAuthProvider to the public API and add credentials hot-reload (#1564) --- changelog/README.md | 2 + .../DseProgrammaticPlainTextAuthProvider.java | 32 ----- .../core/auth/PlainTextAuthProviderBase.java | 6 +- .../ProgrammaticPlainTextAuthProvider.java | 134 ++++++++++++++++++ .../api/core/session/SessionBuilder.java | 2 +- .../core/auth/PlainTextAuthProvider.java | 6 +- .../ProgrammaticPlainTextAuthProvider.java | 66 --------- .../driver/internal/core/util/Strings.java | 16 +++ ...ProgrammaticPlainTextAuthProviderTest.java | 99 +++++++++++++ .../core/auth/PlainTextAuthProviderIT.java | 52 ++++++- manual/core/authentication/README.md | 23 ++- 11 files changed, 330 insertions(+), 108 deletions(-) delete mode 100644 core/src/main/java/com/datastax/dse/driver/internal/core/auth/DseProgrammaticPlainTextAuthProvider.java create mode 100644 core/src/main/java/com/datastax/oss/driver/api/core/auth/ProgrammaticPlainTextAuthProvider.java delete mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/auth/ProgrammaticPlainTextAuthProvider.java create mode 100644 core/src/test/java/com/datastax/oss/driver/api/core/auth/ProgrammaticPlainTextAuthProviderTest.java diff --git a/changelog/README.md b/changelog/README.md index ed90fea7c97..014d4f97835 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,8 @@ ### 4.13.0 (in progress) +- [improvement] JAVA-2953: Promote ProgrammaticPlainTextAuthProvider to the public API and add + credentials hot-reload - [improvement] JAVA-2951: Accept multiple node state listeners, schema change listeners and request trackers Merged from 4.12.x: diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/auth/DseProgrammaticPlainTextAuthProvider.java b/core/src/main/java/com/datastax/dse/driver/internal/core/auth/DseProgrammaticPlainTextAuthProvider.java deleted file mode 100644 index a8feb1cd2ba..00000000000 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/auth/DseProgrammaticPlainTextAuthProvider.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.datastax.dse.driver.internal.core.auth; - -import com.datastax.oss.driver.internal.core.auth.ProgrammaticPlainTextAuthProvider; - -/** - * @deprecated The driver's default plain text providers now support both Apache Cassandra and DSE. - * This type was preserved for backward compatibility, but {@link - * ProgrammaticPlainTextAuthProvider} should be used instead. - */ -@Deprecated -public class DseProgrammaticPlainTextAuthProvider extends ProgrammaticPlainTextAuthProvider { - - public DseProgrammaticPlainTextAuthProvider( - String username, String password, String authorizationId) { - super(username, password, authorizationId); - } -} diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderBase.java b/core/src/main/java/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderBase.java index e3869ba8319..c9241577d6b 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderBase.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderBase.java @@ -36,7 +36,8 @@ * Common infrastructure for plain text auth providers. * *

          This can be reused to write an implementation that retrieves the credentials from another - * source than the configuration. + * source than the configuration. The driver offers one built-in implementation: {@link + * ProgrammaticPlainTextAuthProvider}. */ @ThreadSafe public abstract class PlainTextAuthProviderBase implements AuthProvider { @@ -58,6 +59,9 @@ protected PlainTextAuthProviderBase(@NonNull String logPrefix) { * Retrieves the credentials from the underlying source. * *

          This is invoked every time the driver opens a new connection. + * + * @param endPoint The endpoint being contacted. + * @param serverAuthenticator The authenticator class sent by the endpoint. */ @NonNull protected abstract Credentials getCredentials( diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/auth/ProgrammaticPlainTextAuthProvider.java b/core/src/main/java/com/datastax/oss/driver/api/core/auth/ProgrammaticPlainTextAuthProvider.java new file mode 100644 index 00000000000..7166e72b1f5 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/api/core/auth/ProgrammaticPlainTextAuthProvider.java @@ -0,0 +1,134 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.core.auth; + +import com.datastax.oss.driver.api.core.metadata.EndPoint; +import com.datastax.oss.driver.api.core.session.SessionBuilder; +import com.datastax.oss.driver.internal.core.util.Strings; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Objects; +import net.jcip.annotations.ThreadSafe; + +/** + * A simple plaintext {@link AuthProvider} that receives the credentials programmatically instead of + * pulling them from the configuration. + * + *

          To use this class, create an instance with the appropriate credentials to use and pass it to + * your session builder: + * + *

          + * AuthProvider authProvider = new ProgrammaticPlainTextAuthProvider("...", "...");
          + * CqlSession session =
          + *     CqlSession.builder()
          + *         .addContactEndPoints(...)
          + *         .withAuthProvider(authProvider)
          + *         .build();
          + * 
          + * + *

          It also offers the possibility of changing the credentials at runtime. The new credentials + * will be used for all connections initiated after the change. + * + *

          Implementation Note: this implementation is not particularly suited for highly-sensitive + * applications: it stores the credentials to use as private fields, and even if the fields are char + * arrays rather than strings to make it difficult to dump their contents, they are never cleared + * until the provider itself is garbage-collected, which typically only happens when the session is + * closed. + * + * @see SessionBuilder#withAuthProvider(AuthProvider) + * @see SessionBuilder#withAuthCredentials(String, String) + * @see SessionBuilder#withAuthCredentials(String, String, String) + */ +@ThreadSafe +public class ProgrammaticPlainTextAuthProvider extends PlainTextAuthProviderBase { + + private volatile char[] username; + private volatile char[] password; + private volatile char[] authorizationId; + + /** Builds an instance for simple username/password authentication. */ + public ProgrammaticPlainTextAuthProvider(@NonNull String username, @NonNull String password) { + this(username, password, ""); + } + + /** + * Builds an instance for username/password authentication, and proxy authentication with the + * given authorizationId. + * + *

          This feature is only available with Datastax Enterprise. If the target server is Apache + * Cassandra, use {@link #ProgrammaticPlainTextAuthProvider(String, String)} instead, or set the + * authorizationId to an empty string. + */ + public ProgrammaticPlainTextAuthProvider( + @NonNull String username, @NonNull String password, @NonNull String authorizationId) { + // This will typically be built before the session so we don't know the log prefix yet. Pass an + // empty string, it's only used in one log message. + super(""); + this.username = Strings.requireNotEmpty(username, "username").toCharArray(); + this.password = Strings.requireNotEmpty(password, "password").toCharArray(); + this.authorizationId = + Objects.requireNonNull(authorizationId, "authorizationId cannot be null").toCharArray(); + } + + /** + * Changes the username. + * + *

          The new credentials will be used for all connections initiated after this method was called. + * + * @param username the new name. + */ + public void setUsername(@NonNull String username) { + this.username = Strings.requireNotEmpty(username, "username").toCharArray(); + } + + /** + * Changes the password. + * + *

          The new credentials will be used for all connections initiated after this method was called. + * + * @param password the new password. + */ + public void setPassword(@NonNull String password) { + this.password = Strings.requireNotEmpty(password, "password").toCharArray(); + } + + /** + * Changes the authorization id. + * + *

          The new credentials will be used for all connections initiated after this method was called. + * + *

          This feature is only available with Datastax Enterprise. If the target server is Apache + * Cassandra, this method should not be used. + * + * @param authorizationId the new authorization id. + */ + public void setAuthorizationId(@NonNull String authorizationId) { + this.authorizationId = + Objects.requireNonNull(authorizationId, "authorizationId cannot be null").toCharArray(); + } + + /** + * {@inheritDoc} + * + *

          This implementation disregards the endpoint being connected to as well as the authenticator + * class sent by the server, and always returns the same credentials. + */ + @NonNull + @Override + protected Credentials getCredentials( + @NonNull EndPoint endPoint, @NonNull String serverAuthenticator) { + return new Credentials(username.clone(), password.clone(), authorizationId.clone()); + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java b/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java index 02070063d3b..966372fa20d 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java @@ -19,6 +19,7 @@ import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.auth.AuthProvider; import com.datastax.oss.driver.api.core.auth.PlainTextAuthProviderBase; +import com.datastax.oss.driver.api.core.auth.ProgrammaticPlainTextAuthProvider; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverConfig; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; @@ -37,7 +38,6 @@ import com.datastax.oss.driver.api.core.type.codec.registry.MutableCodecRegistry; import com.datastax.oss.driver.api.core.uuid.Uuids; import com.datastax.oss.driver.internal.core.ContactPoints; -import com.datastax.oss.driver.internal.core.auth.ProgrammaticPlainTextAuthProvider; import com.datastax.oss.driver.internal.core.config.cloud.CloudConfig; import com.datastax.oss.driver.internal.core.config.cloud.CloudConfigFactory; import com.datastax.oss.driver.internal.core.config.typesafe.DefaultDriverConfigLoader; diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/auth/PlainTextAuthProvider.java b/core/src/main/java/com/datastax/oss/driver/internal/core/auth/PlainTextAuthProvider.java index de951b881f1..73f320bbcdf 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/auth/PlainTextAuthProvider.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/auth/PlainTextAuthProvider.java @@ -46,7 +46,11 @@ * } *

          * - * See {@code reference.conf} (in the manual or core driver JAR) for more details. + * The authentication provider cannot be changed at runtime; however, the credentials can be changed + * at runtime: the new ones will be used for new connection attempts once the configuration gets + * {@linkplain com.datastax.oss.driver.api.core.config.DriverConfigLoader#reload() reloaded}. + * + *

          See {@code reference.conf} (in the manual or core driver JAR) for more details. */ @ThreadSafe public class PlainTextAuthProvider extends PlainTextAuthProviderBase { diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/auth/ProgrammaticPlainTextAuthProvider.java b/core/src/main/java/com/datastax/oss/driver/internal/core/auth/ProgrammaticPlainTextAuthProvider.java deleted file mode 100644 index ba0bc4b41db..00000000000 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/auth/ProgrammaticPlainTextAuthProvider.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.datastax.oss.driver.internal.core.auth; - -import com.datastax.oss.driver.api.core.auth.PlainTextAuthProviderBase; -import com.datastax.oss.driver.api.core.metadata.EndPoint; -import com.datastax.oss.driver.api.core.session.SessionBuilder; -import edu.umd.cs.findbugs.annotations.NonNull; -import net.jcip.annotations.ThreadSafe; - -/** - * Alternative plaintext auth provider that receives the credentials programmatically instead of - * pulling them from the configuration. - * - * @see SessionBuilder#withAuthCredentials(String, String) - * @see SessionBuilder#withAuthCredentials(String, String, String) - */ -@ThreadSafe -public class ProgrammaticPlainTextAuthProvider extends PlainTextAuthProviderBase { - private final String username; - private final String password; - private final String authorizationId; - - /** Builds an instance for simple username/password authentication. */ - public ProgrammaticPlainTextAuthProvider(String username, String password) { - this(username, password, ""); - } - - /** - * Builds an instance for username/password authentication, and proxy authentication with the - * given authorizationId. - * - *

          This feature is only available with Datastax Enterprise. If the target server is Apache - * Cassandra, the authorizationId will be ignored. - */ - public ProgrammaticPlainTextAuthProvider( - String username, String password, String authorizationId) { - // This will typically be built before the session so we don't know the log prefix yet. Pass an - // empty string, it's only used in one log message. - super(""); - this.username = username; - this.password = password; - this.authorizationId = authorizationId; - } - - @NonNull - @Override - protected Credentials getCredentials( - @NonNull EndPoint endPoint, @NonNull String serverAuthenticator) { - return new Credentials( - username.toCharArray(), password.toCharArray(), authorizationId.toCharArray()); - } -} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/util/Strings.java b/core/src/main/java/com/datastax/oss/driver/internal/core/util/Strings.java index b0aa9128c76..7d7aa454971 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/util/Strings.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/util/Strings.java @@ -18,6 +18,7 @@ import com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; import java.util.Locale; +import java.util.Objects; public class Strings { @@ -255,6 +256,21 @@ public static boolean isLongLiteral(String str) { return true; } + /** + * Checks whether the given text is not null and not empty. + * + * @param text The text to check. + * @param name The name of the argument. + * @return The text (for method chaining). + */ + public static String requireNotEmpty(String text, String name) { + Objects.requireNonNull(text, name + " cannot be null"); + if (text.isEmpty()) { + throw new IllegalArgumentException(name + " cannot be empty"); + } + return text; + } + private Strings() {} private static final ImmutableSet RESERVED_KEYWORDS = diff --git a/core/src/test/java/com/datastax/oss/driver/api/core/auth/ProgrammaticPlainTextAuthProviderTest.java b/core/src/test/java/com/datastax/oss/driver/api/core/auth/ProgrammaticPlainTextAuthProviderTest.java new file mode 100644 index 00000000000..d12b2fe1b3a --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/api/core/auth/ProgrammaticPlainTextAuthProviderTest.java @@ -0,0 +1,99 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.core.auth; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.api.core.auth.PlainTextAuthProviderBase.Credentials; +import com.datastax.oss.driver.api.core.metadata.EndPoint; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnitRunner; + +@RunWith(MockitoJUnitRunner.Strict.class) +public class ProgrammaticPlainTextAuthProviderTest { + + @Mock private EndPoint endpoint; + + @Test + public void should_return_correct_credentials_without_authorization_id() { + // given + ProgrammaticPlainTextAuthProvider provider = + new ProgrammaticPlainTextAuthProvider("user", "pass"); + // when + Credentials credentials = provider.getCredentials(endpoint, "irrelevant"); + // then + assertThat(credentials.getUsername()).isEqualTo("user".toCharArray()); + assertThat(credentials.getPassword()).isEqualTo("pass".toCharArray()); + assertThat(credentials.getAuthorizationId()).isEqualTo(new char[0]); + } + + @Test + public void should_return_correct_credentials_with_authorization_id() { + // given + ProgrammaticPlainTextAuthProvider provider = + new ProgrammaticPlainTextAuthProvider("user", "pass", "proxy"); + // when + Credentials credentials = provider.getCredentials(endpoint, "irrelevant"); + // then + assertThat(credentials.getUsername()).isEqualTo("user".toCharArray()); + assertThat(credentials.getPassword()).isEqualTo("pass".toCharArray()); + assertThat(credentials.getAuthorizationId()).isEqualTo("proxy".toCharArray()); + } + + @Test + public void should_change_username() { + // given + ProgrammaticPlainTextAuthProvider provider = + new ProgrammaticPlainTextAuthProvider("user", "pass"); + // when + provider.setUsername("user2"); + Credentials credentials = provider.getCredentials(endpoint, "irrelevant"); + // then + assertThat(credentials.getUsername()).isEqualTo("user2".toCharArray()); + assertThat(credentials.getPassword()).isEqualTo("pass".toCharArray()); + assertThat(credentials.getAuthorizationId()).isEqualTo(new char[0]); + } + + @Test + public void should_change_password() { + // given + ProgrammaticPlainTextAuthProvider provider = + new ProgrammaticPlainTextAuthProvider("user", "pass"); + // when + provider.setPassword("pass2"); + Credentials credentials = provider.getCredentials(endpoint, "irrelevant"); + // then + assertThat(credentials.getUsername()).isEqualTo("user".toCharArray()); + assertThat(credentials.getPassword()).isEqualTo("pass2".toCharArray()); + assertThat(credentials.getAuthorizationId()).isEqualTo(new char[0]); + } + + @Test + public void should_change_authorization_id() { + // given + ProgrammaticPlainTextAuthProvider provider = + new ProgrammaticPlainTextAuthProvider("user", "pass", "proxy"); + // when + provider.setAuthorizationId("proxy2"); + Credentials credentials = provider.getCredentials(endpoint, "irrelevant"); + // then + assertThat(credentials.getUsername()).isEqualTo("user".toCharArray()); + assertThat(credentials.getPassword()).isEqualTo("pass".toCharArray()); + assertThat(credentials.getAuthorizationId()).isEqualTo("proxy2".toCharArray()); + } +} diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/auth/PlainTextAuthProviderIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/auth/PlainTextAuthProviderIT.java index 8558f1d650c..4ff36a44755 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/auth/PlainTextAuthProviderIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/auth/PlainTextAuthProviderIT.java @@ -18,13 +18,15 @@ import com.datastax.oss.driver.api.core.AllNodesFailedException; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.Version; +import com.datastax.oss.driver.api.core.auth.AuthProvider; +import com.datastax.oss.driver.api.core.auth.ProgrammaticPlainTextAuthProvider; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; import com.datastax.oss.driver.api.core.session.SessionBuilder; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.internal.core.auth.PlainTextAuthProvider; -import com.google.common.util.concurrent.Uninterruptibles; +import com.datastax.oss.driver.shaded.guava.common.util.concurrent.Uninterruptibles; import java.util.concurrent.TimeUnit; import org.junit.BeforeClass; import org.junit.ClassRule; @@ -63,7 +65,7 @@ public void should_connect_with_credentials() { @Test public void should_connect_with_programmatic_credentials() { - SessionBuilder builder = + SessionBuilder builder = SessionUtils.baseBuilder() .addContactEndPoints(CCM_RULE.getContactPoints()) .withAuthCredentials("cassandra", "cassandra"); @@ -73,6 +75,26 @@ public void should_connect_with_programmatic_credentials() { } } + @Test + public void should_connect_with_programmatic_provider() { + + AuthProvider authProvider = new ProgrammaticPlainTextAuthProvider("cassandra", "cassandra"); + SessionBuilder builder = + SessionUtils.baseBuilder() + .addContactEndPoints(CCM_RULE.getContactPoints()) + // Open more than one connection in order to validate that the provider is creating + // valid Credentials for every invocation of PlainTextAuthProviderBase.getCredentials. + .withConfigLoader( + SessionUtils.configLoaderBuilder() + .withInt(DefaultDriverOption.CONNECTION_POOL_LOCAL_SIZE, 4) + .build()) + .withAuthProvider(authProvider); + + try (CqlSession session = (CqlSession) builder.build()) { + session.execute("select * from system.local"); + } + } + @Test(expected = AllNodesFailedException.class) public void should_not_connect_with_invalid_credentials() { DriverConfigLoader loader = @@ -86,6 +108,32 @@ public void should_not_connect_with_invalid_credentials() { } } + @Test(expected = AllNodesFailedException.class) + public void should_not_connect_with_invalid_programmatic_credentials() { + SessionBuilder builder = + SessionUtils.baseBuilder() + .addContactEndPoints(CCM_RULE.getContactPoints()) + .withAuthCredentials("baduser", "badpass"); + + try (CqlSession session = (CqlSession) builder.build()) { + session.execute("select * from system.local"); + } + } + + @Test(expected = AllNodesFailedException.class) + public void should_not_connect_with_invalid_programmatic_provider() { + + AuthProvider authProvider = new ProgrammaticPlainTextAuthProvider("baduser", "badpass"); + SessionBuilder builder = + SessionUtils.baseBuilder() + .addContactEndPoints(CCM_RULE.getContactPoints()) + .withAuthProvider(authProvider); + + try (CqlSession session = (CqlSession) builder.build()) { + session.execute("select * from system.local"); + } + } + @Test(expected = AllNodesFailedException.class) public void should_not_connect_without_credentials() { try (CqlSession session = SessionUtils.newSession(CCM_RULE)) { diff --git a/manual/core/authentication/README.md b/manual/core/authentication/README.md index 9a11ad0a8e0..2a5ee302e09 100644 --- a/manual/core/authentication/README.md +++ b/manual/core/authentication/README.md @@ -108,8 +108,20 @@ CqlSession session = .build(); ``` -For convenience, there are shortcuts that take the credentials directly. This is equivalent to -using `PlainTextAuthProvider` in the configuration: +The driver also offers a simple, built-in plain text authentication provider: +[ProgrammaticPlainTextAuthProvider]. The following is equivalent to using `PlainTextAuthProvider` in +the configuration: + +```java +AuthProvider authProvider = new ProgrammaticPlainTextAuthProvider("user", "pass"); + +CqlSession session = + CqlSession.builder() + .withAuthProvider(authProvider) + .build(); +``` + +For convenience, there are shortcuts that take the credentials directly: ```java CqlSession session = @@ -124,9 +136,9 @@ CqlSession session = .build(); ``` -One downside of `withAuthCredentials` is that the credentials are stored in clear text in memory; -this means they are vulnerable to an attacker who is able to perform memory dumps. If this is not -acceptable for you, consider writing your own [AuthProvider] implementation; +One downside of the driver's built-in authentication providers is that the credentials are stored in +clear text in memory; this means they are vulnerable to an attacker who is able to perform memory +dumps. If this is not acceptable for you, consider writing your own [AuthProvider] implementation; [PlainTextAuthProviderBase] is a good starting point. Similarly, [ProgrammaticDseGssApiAuthProvider] lets you configure GSSAPI programmatically: @@ -218,6 +230,7 @@ session.execute(statement); [AuthProvider]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/auth/AuthProvider.html [DriverContext]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/context/DriverContext.html [PlainTextAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderBase.html +[ProgrammaticPlainTextAuthProvider]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/auth/ProgrammaticPlainTextAuthProvider.html [DseGssApiAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderBase.html [ProgrammaticDseGssApiAuthProvider]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/auth/ProgrammaticDseGssApiAuthProvider.html [ProxyAuthentication.executeAs]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/auth/ProxyAuthentication.html#executeAs-java.lang.String-StatementT- From 7aaa0a2065bad86cb32537dc58f08af1854fc9f0 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 30 Jul 2021 15:12:57 +0200 Subject: [PATCH 756/979] Update changelog for 4.11.3 release --- changelog/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/changelog/README.md b/changelog/README.md index 8f994136575..d08774824c1 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -2,7 +2,7 @@ -### 4.11.3 (in progress) +### 4.11.3 - [bug] JAVA-2949: Provide mapper support for CompletionStage> - [bug] JAVA-2950: Remove reference to Reflection class from DependencyCheck From dd4a6b6628202ea3f4381073fbb7d6a35493ab9f Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 30 Jul 2021 15:16:43 +0200 Subject: [PATCH 757/979] [maven-release-plugin] prepare release 4.11.3 --- bom/pom.xml | 18 +++++++++--------- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- metrics/micrometer/pom.xml | 2 +- metrics/microprofile/pom.xml | 2 +- osgi-tests/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 14 files changed, 23 insertions(+), 23 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index f107d8bb9b5..6b4f118624a 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.3-SNAPSHOT + 4.11.3 java-driver-bom pom @@ -31,42 +31,42 @@ com.datastax.oss java-driver-core - 4.11.3-SNAPSHOT + 4.11.3 com.datastax.oss java-driver-core-shaded - 4.11.3-SNAPSHOT + 4.11.3 com.datastax.oss java-driver-mapper-processor - 4.11.3-SNAPSHOT + 4.11.3 com.datastax.oss java-driver-mapper-runtime - 4.11.3-SNAPSHOT + 4.11.3 com.datastax.oss java-driver-query-builder - 4.11.3-SNAPSHOT + 4.11.3 com.datastax.oss java-driver-test-infra - 4.11.3-SNAPSHOT + 4.11.3 com.datastax.oss java-driver-metrics-micrometer - 4.11.3-SNAPSHOT + 4.11.3 com.datastax.oss java-driver-metrics-microprofile - 4.11.3-SNAPSHOT + 4.11.3 com.datastax.oss diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 2dc72176cce..c6ac9e401e2 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.3-SNAPSHOT + 4.11.3 java-driver-core-shaded DataStax Java driver for Apache Cassandra(R) - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index 9fcd5642208..d3187f82856 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.3-SNAPSHOT + 4.11.3 java-driver-core bundle diff --git a/distribution/pom.xml b/distribution/pom.xml index b7e5bd41edb..0dc408aa030 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.3-SNAPSHOT + 4.11.3 java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index c2b3a76164f..6587a5d050e 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.11.3-SNAPSHOT + 4.11.3 java-driver-examples DataStax Java driver for Apache Cassandra(R) - examples. diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index d306d17b365..3c7d2f957ec 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.3-SNAPSHOT + 4.11.3 java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index a8d29a6154f..6cd2206e545 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.3-SNAPSHOT + 4.11.3 java-driver-mapper-processor DataStax Java driver for Apache Cassandra(R) - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index dbfda81840e..fc7063ecffb 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.3-SNAPSHOT + 4.11.3 java-driver-mapper-runtime bundle diff --git a/metrics/micrometer/pom.xml b/metrics/micrometer/pom.xml index 0c4c444bf17..49c35d751b3 100644 --- a/metrics/micrometer/pom.xml +++ b/metrics/micrometer/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.3-SNAPSHOT + 4.11.3 ../../ java-driver-metrics-micrometer diff --git a/metrics/microprofile/pom.xml b/metrics/microprofile/pom.xml index e5946089c8f..1a7b087473f 100644 --- a/metrics/microprofile/pom.xml +++ b/metrics/microprofile/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.3-SNAPSHOT + 4.11.3 ../../ java-driver-metrics-microprofile diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml index bb094c7c158..1013e668376 100644 --- a/osgi-tests/pom.xml +++ b/osgi-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.3-SNAPSHOT + 4.11.3 java-driver-osgi-tests jar diff --git a/pom.xml b/pom.xml index 2abaa6ce103..c16cc4c10f9 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ 4.0.0 com.datastax.oss java-driver-parent - 4.11.3-SNAPSHOT + 4.11.3 pom DataStax Java driver for Apache Cassandra(R) A driver for Apache Cassandra(R) 2.1+ that works exclusively with the Cassandra Query Language version 3 (CQL3) and Cassandra's native protocol versions 3 and above. @@ -954,7 +954,7 @@ height="0" width="0" style="display:none;visibility:hidden"> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - HEAD + 4.11.3 diff --git a/query-builder/pom.xml b/query-builder/pom.xml index 601a40ee9d8..405a18d5b59 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.3-SNAPSHOT + 4.11.3 java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index 2b23d248516..d0a0c1bd054 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.3-SNAPSHOT + 4.11.3 java-driver-test-infra bundle From 508726b9af2d0d7a60ff6ffa5fb7a48c53932e46 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 30 Jul 2021 15:16:53 +0200 Subject: [PATCH 758/979] [maven-release-plugin] prepare for next development iteration --- bom/pom.xml | 18 +++++++++--------- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- metrics/micrometer/pom.xml | 2 +- metrics/microprofile/pom.xml | 2 +- osgi-tests/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 14 files changed, 23 insertions(+), 23 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index 6b4f118624a..5a6b74123dd 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.3 + 4.11.4-SNAPSHOT java-driver-bom pom @@ -31,42 +31,42 @@ com.datastax.oss java-driver-core - 4.11.3 + 4.11.4-SNAPSHOT com.datastax.oss java-driver-core-shaded - 4.11.3 + 4.11.4-SNAPSHOT com.datastax.oss java-driver-mapper-processor - 4.11.3 + 4.11.4-SNAPSHOT com.datastax.oss java-driver-mapper-runtime - 4.11.3 + 4.11.4-SNAPSHOT com.datastax.oss java-driver-query-builder - 4.11.3 + 4.11.4-SNAPSHOT com.datastax.oss java-driver-test-infra - 4.11.3 + 4.11.4-SNAPSHOT com.datastax.oss java-driver-metrics-micrometer - 4.11.3 + 4.11.4-SNAPSHOT com.datastax.oss java-driver-metrics-microprofile - 4.11.3 + 4.11.4-SNAPSHOT com.datastax.oss diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index c6ac9e401e2..e54646d2481 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.3 + 4.11.4-SNAPSHOT java-driver-core-shaded DataStax Java driver for Apache Cassandra(R) - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index d3187f82856..ba63f6d5d86 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.3 + 4.11.4-SNAPSHOT java-driver-core bundle diff --git a/distribution/pom.xml b/distribution/pom.xml index 0dc408aa030..fc94a248875 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.3 + 4.11.4-SNAPSHOT java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index 6587a5d050e..8838521df56 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.11.3 + 4.11.4-SNAPSHOT java-driver-examples DataStax Java driver for Apache Cassandra(R) - examples. diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 3c7d2f957ec..ed859be608d 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.3 + 4.11.4-SNAPSHOT java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 6cd2206e545..327f4ae114b 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.3 + 4.11.4-SNAPSHOT java-driver-mapper-processor DataStax Java driver for Apache Cassandra(R) - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index fc7063ecffb..e76da145a0f 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.3 + 4.11.4-SNAPSHOT java-driver-mapper-runtime bundle diff --git a/metrics/micrometer/pom.xml b/metrics/micrometer/pom.xml index 49c35d751b3..3228a2f962f 100644 --- a/metrics/micrometer/pom.xml +++ b/metrics/micrometer/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.3 + 4.11.4-SNAPSHOT ../../ java-driver-metrics-micrometer diff --git a/metrics/microprofile/pom.xml b/metrics/microprofile/pom.xml index 1a7b087473f..e0e6c6bbde9 100644 --- a/metrics/microprofile/pom.xml +++ b/metrics/microprofile/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.3 + 4.11.4-SNAPSHOT ../../ java-driver-metrics-microprofile diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml index 1013e668376..cb620c33c30 100644 --- a/osgi-tests/pom.xml +++ b/osgi-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.3 + 4.11.4-SNAPSHOT java-driver-osgi-tests jar diff --git a/pom.xml b/pom.xml index c16cc4c10f9..b2406e29c33 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ 4.0.0 com.datastax.oss java-driver-parent - 4.11.3 + 4.11.4-SNAPSHOT pom DataStax Java driver for Apache Cassandra(R) A driver for Apache Cassandra(R) 2.1+ that works exclusively with the Cassandra Query Language version 3 (CQL3) and Cassandra's native protocol versions 3 and above. @@ -954,7 +954,7 @@ height="0" width="0" style="display:none;visibility:hidden"> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - 4.11.3 + HEAD diff --git a/query-builder/pom.xml b/query-builder/pom.xml index 405a18d5b59..67bc6192b07 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.3 + 4.11.4-SNAPSHOT java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index d0a0c1bd054..12faa3ac8e6 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.11.3 + 4.11.4-SNAPSHOT java-driver-test-infra bundle From 59e7647b98501ae5a88f97d8d4404c89cecd1371 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 30 Jul 2021 15:26:41 +0200 Subject: [PATCH 759/979] Update changelog for 4.12.1 release --- changelog/README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 18e34c8591c..c74d6293a3b 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -2,7 +2,7 @@ -### 4.12.1 (in progress) +### 4.12.1 Merged from 4.11.x: @@ -25,7 +25,7 @@ Merged from 4.11.x: - [bug] JAVA-2943: Prevent session leak with wrong keyspace name - [bug] JAVA-2938: OverloadedException message is misleading -### 4.11.3 (in progress) +### 4.11.3 - [bug] JAVA-2949: Provide mapper support for CompletionStage> - [bug] JAVA-2950: Remove reference to Reflection class from DependencyCheck From 57d7abd0707011f794cd0e91acd1c0901d922297 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 30 Jul 2021 15:30:29 +0200 Subject: [PATCH 760/979] [maven-release-plugin] prepare release 4.12.1 --- bom/pom.xml | 18 +++++++++--------- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- metrics/micrometer/pom.xml | 2 +- metrics/microprofile/pom.xml | 2 +- osgi-tests/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 14 files changed, 23 insertions(+), 23 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index e47c97e29e3..78b6ec22679 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.13.0-SNAPSHOT + 4.12.1 java-driver-bom pom @@ -31,42 +31,42 @@ com.datastax.oss java-driver-core - 4.13.0-SNAPSHOT + 4.12.1 com.datastax.oss java-driver-core-shaded - 4.13.0-SNAPSHOT + 4.12.1 com.datastax.oss java-driver-mapper-processor - 4.13.0-SNAPSHOT + 4.12.1 com.datastax.oss java-driver-mapper-runtime - 4.13.0-SNAPSHOT + 4.12.1 com.datastax.oss java-driver-query-builder - 4.13.0-SNAPSHOT + 4.12.1 com.datastax.oss java-driver-test-infra - 4.13.0-SNAPSHOT + 4.12.1 com.datastax.oss java-driver-metrics-micrometer - 4.13.0-SNAPSHOT + 4.12.1 com.datastax.oss java-driver-metrics-microprofile - 4.13.0-SNAPSHOT + 4.12.1 com.datastax.oss diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 076dbfa6918..a1eab3ebb40 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.13.0-SNAPSHOT + 4.12.1 java-driver-core-shaded DataStax Java driver for Apache Cassandra(R) - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index 34d1b7478b6..fc7abebf8bc 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.13.0-SNAPSHOT + 4.12.1 java-driver-core bundle diff --git a/distribution/pom.xml b/distribution/pom.xml index 2a579b0440a..c5c6e450caf 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.13.0-SNAPSHOT + 4.12.1 java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index b95827307ad..cad5b2d0df9 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.13.0-SNAPSHOT + 4.12.1 java-driver-examples DataStax Java driver for Apache Cassandra(R) - examples. diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index c5b6cc77594..ef0cb58b58f 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.13.0-SNAPSHOT + 4.12.1 java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index cc14f3d2e8b..351e591ac4b 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.13.0-SNAPSHOT + 4.12.1 java-driver-mapper-processor DataStax Java driver for Apache Cassandra(R) - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index 68d76446bf1..dd55163b126 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.13.0-SNAPSHOT + 4.12.1 java-driver-mapper-runtime bundle diff --git a/metrics/micrometer/pom.xml b/metrics/micrometer/pom.xml index 8c2755c7c62..78bfef51110 100644 --- a/metrics/micrometer/pom.xml +++ b/metrics/micrometer/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.13.0-SNAPSHOT + 4.12.1 ../../ java-driver-metrics-micrometer diff --git a/metrics/microprofile/pom.xml b/metrics/microprofile/pom.xml index 78c04909c55..987eeb739c6 100644 --- a/metrics/microprofile/pom.xml +++ b/metrics/microprofile/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.13.0-SNAPSHOT + 4.12.1 ../../ java-driver-metrics-microprofile diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml index 12e8b9641dc..75cbab65ce2 100644 --- a/osgi-tests/pom.xml +++ b/osgi-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.13.0-SNAPSHOT + 4.12.1 java-driver-osgi-tests jar diff --git a/pom.xml b/pom.xml index aa72a4c7c0f..c0c20c8cd08 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ 4.0.0 com.datastax.oss java-driver-parent - 4.13.0-SNAPSHOT + 4.12.1 pom DataStax Java driver for Apache Cassandra(R) A driver for Apache Cassandra(R) 2.1+ that works exclusively with the Cassandra Query Language version 3 (CQL3) and Cassandra's native protocol versions 3 and above. @@ -954,7 +954,7 @@ height="0" width="0" style="display:none;visibility:hidden"> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - HEAD + 4.12.1 diff --git a/query-builder/pom.xml b/query-builder/pom.xml index 3ae527917a1..a1d378b47ed 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.13.0-SNAPSHOT + 4.12.1 java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index d29046b4934..0e70ff32d6d 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.13.0-SNAPSHOT + 4.12.1 java-driver-test-infra bundle From f9e0d4b08ef0c882c6a6f2ef606303c3ed3912b3 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 30 Jul 2021 15:30:39 +0200 Subject: [PATCH 761/979] [maven-release-plugin] prepare for next development iteration --- bom/pom.xml | 18 +++++++++--------- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- metrics/micrometer/pom.xml | 2 +- metrics/microprofile/pom.xml | 2 +- osgi-tests/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 14 files changed, 23 insertions(+), 23 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index 78b6ec22679..8d0c87a8760 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.12.1 + 4.12.2-SNAPSHOT java-driver-bom pom @@ -31,42 +31,42 @@ com.datastax.oss java-driver-core - 4.12.1 + 4.12.2-SNAPSHOT com.datastax.oss java-driver-core-shaded - 4.12.1 + 4.12.2-SNAPSHOT com.datastax.oss java-driver-mapper-processor - 4.12.1 + 4.12.2-SNAPSHOT com.datastax.oss java-driver-mapper-runtime - 4.12.1 + 4.12.2-SNAPSHOT com.datastax.oss java-driver-query-builder - 4.12.1 + 4.12.2-SNAPSHOT com.datastax.oss java-driver-test-infra - 4.12.1 + 4.12.2-SNAPSHOT com.datastax.oss java-driver-metrics-micrometer - 4.12.1 + 4.12.2-SNAPSHOT com.datastax.oss java-driver-metrics-microprofile - 4.12.1 + 4.12.2-SNAPSHOT com.datastax.oss diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index a1eab3ebb40..2fd0c88b588 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.12.1 + 4.12.2-SNAPSHOT java-driver-core-shaded DataStax Java driver for Apache Cassandra(R) - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index fc7abebf8bc..9dac054760c 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.12.1 + 4.12.2-SNAPSHOT java-driver-core bundle diff --git a/distribution/pom.xml b/distribution/pom.xml index c5c6e450caf..8c3b8789f59 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.12.1 + 4.12.2-SNAPSHOT java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index cad5b2d0df9..8cd1d67153e 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.12.1 + 4.12.2-SNAPSHOT java-driver-examples DataStax Java driver for Apache Cassandra(R) - examples. diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index ef0cb58b58f..2858b60ab69 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.12.1 + 4.12.2-SNAPSHOT java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 351e591ac4b..ccfaed89f67 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.12.1 + 4.12.2-SNAPSHOT java-driver-mapper-processor DataStax Java driver for Apache Cassandra(R) - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index dd55163b126..7a73250d3a4 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.12.1 + 4.12.2-SNAPSHOT java-driver-mapper-runtime bundle diff --git a/metrics/micrometer/pom.xml b/metrics/micrometer/pom.xml index 78bfef51110..fe0be368d73 100644 --- a/metrics/micrometer/pom.xml +++ b/metrics/micrometer/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.12.1 + 4.12.2-SNAPSHOT ../../ java-driver-metrics-micrometer diff --git a/metrics/microprofile/pom.xml b/metrics/microprofile/pom.xml index 987eeb739c6..dc51e51c5b4 100644 --- a/metrics/microprofile/pom.xml +++ b/metrics/microprofile/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.12.1 + 4.12.2-SNAPSHOT ../../ java-driver-metrics-microprofile diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml index 75cbab65ce2..e037bab5b9e 100644 --- a/osgi-tests/pom.xml +++ b/osgi-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.12.1 + 4.12.2-SNAPSHOT java-driver-osgi-tests jar diff --git a/pom.xml b/pom.xml index c0c20c8cd08..a5d2e268b8a 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ 4.0.0 com.datastax.oss java-driver-parent - 4.12.1 + 4.12.2-SNAPSHOT pom DataStax Java driver for Apache Cassandra(R) A driver for Apache Cassandra(R) 2.1+ that works exclusively with the Cassandra Query Language version 3 (CQL3) and Cassandra's native protocol versions 3 and above. @@ -954,7 +954,7 @@ height="0" width="0" style="display:none;visibility:hidden"> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - 4.12.1 + HEAD diff --git a/query-builder/pom.xml b/query-builder/pom.xml index a1d378b47ed..eb4ac4c809f 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.12.1 + 4.12.2-SNAPSHOT java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index 0e70ff32d6d..76b95bef301 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.12.1 + 4.12.2-SNAPSHOT java-driver-test-infra bundle From 4270f93277249abb513bc2abf2ff7a7c481b1d0d Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Fri, 30 Jul 2021 09:40:43 -0500 Subject: [PATCH 762/979] JAVA-2940: Add GraalVM native image build configurations (#1560) --- changelog/README.md | 1 + core-shaded/pom.xml | 33 +- .../internal/core/channel/ChannelFactory.java | 197 ++++++----- .../java-driver-core/native-image.properties | 7 + .../java-driver-core/proxy.json | 3 + .../java-driver-core/reflection.json | 154 +++++++++ manual/core/compression/README.md | 3 +- manual/core/graalvm/README.md | 315 ++++++++++++++++++ manual/core/integration/README.md | 2 + .../native-image.properties | 1 + .../reflection.json | 6 + .../native-image.properties | 1 + .../reflection.json | 6 + pom.xml | 1 + 14 files changed, 641 insertions(+), 89 deletions(-) create mode 100644 core/src/main/resources/META-INF/native-image/com.datastax.oss/java-driver-core/native-image.properties create mode 100644 core/src/main/resources/META-INF/native-image/com.datastax.oss/java-driver-core/proxy.json create mode 100644 core/src/main/resources/META-INF/native-image/com.datastax.oss/java-driver-core/reflection.json create mode 100644 manual/core/graalvm/README.md create mode 100644 metrics/micrometer/src/main/resources/META-INF/native-image/com.datastax.oss/java-driver-metrics-micrometer/native-image.properties create mode 100644 metrics/micrometer/src/main/resources/META-INF/native-image/com.datastax.oss/java-driver-metrics-micrometer/reflection.json create mode 100644 metrics/microprofile/src/main/resources/META-INF/native-image/com.datastax.oss/java-driver-metrics-microprofile/native-image.properties create mode 100644 metrics/microprofile/src/main/resources/META-INF/native-image/com.datastax.oss/java-driver-metrics-microprofile/reflection.json diff --git a/changelog/README.md b/changelog/README.md index 014d4f97835..7bae766a602 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -4,6 +4,7 @@ ### 4.13.0 (in progress) +- [improvement] JAVA-2940: Add GraalVM native image build configurations - [improvement] JAVA-2953: Promote ProgrammaticPlainTextAuthProvider to the public API and add credentials hot-reload - [improvement] JAVA-2951: Accept multiple node state listeners, schema change listeners and request trackers diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 076dbfa6918..ad368ee9259 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -148,6 +148,10 @@ + io.netty com.datastax.oss.driver.shaded.netty @@ -174,7 +178,9 @@ com.datastax.oss:* - META-INF/** + + META-INF/MANIFEST.MF + META-INF/maven/** @@ -253,6 +259,31 @@ + + + com.google.code.maven-replacer-plugin + replacer + 1.5.3 + + + shade-graalvm-files + package + + replace + + + + + false + ${project.build.directory}/classes/META-INF/native-image/com.datastax.oss/java-driver-core/reflection.json,${project.build.directory}/shaded-sources/META-INF/native-image/com.datastax.oss/java-driver-core/reflection.json + + + io.netty + com.datastax.oss.driver.shaded.netty + + + + maven-javadoc-plugin diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ChannelFactory.java b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ChannelFactory.java index 4e2defdce49..f2363cbd29c 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ChannelFactory.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/ChannelFactory.java @@ -272,98 +272,121 @@ ChannelInitializer initializer( DriverChannelOptions options, NodeMetricUpdater nodeMetricUpdater, CompletableFuture resultFuture) { - return new ChannelInitializer() { - @Override - protected void initChannel(Channel channel) { - try { - DriverExecutionProfile defaultConfig = context.getConfig().getDefaultProfile(); - - long setKeyspaceTimeoutMillis = - defaultConfig - .getDuration(DefaultDriverOption.CONNECTION_SET_KEYSPACE_TIMEOUT) - .toMillis(); - int maxFrameLength = - (int) defaultConfig.getBytes(DefaultDriverOption.PROTOCOL_MAX_FRAME_LENGTH); - int maxRequestsPerConnection = - defaultConfig.getInt(DefaultDriverOption.CONNECTION_MAX_REQUESTS); - int maxOrphanRequests = - defaultConfig.getInt(DefaultDriverOption.CONNECTION_MAX_ORPHAN_REQUESTS); - if (maxOrphanRequests >= maxRequestsPerConnection) { - if (LOGGED_ORPHAN_WARNING.compareAndSet(false, true)) { - LOG.warn( - "[{}] Invalid value for {}: {}. It must be lower than {}. " - + "Defaulting to {} (1/4 of max-requests) instead.", - logPrefix, - DefaultDriverOption.CONNECTION_MAX_ORPHAN_REQUESTS.getPath(), - maxOrphanRequests, - DefaultDriverOption.CONNECTION_MAX_REQUESTS.getPath(), - maxRequestsPerConnection / 4); - } - maxOrphanRequests = maxRequestsPerConnection / 4; - } + return new ChannelFactoryInitializer( + endPoint, protocolVersion, options, nodeMetricUpdater, resultFuture); + }; + + class ChannelFactoryInitializer extends ChannelInitializer { + + private final EndPoint endPoint; + private final ProtocolVersion protocolVersion; + private final DriverChannelOptions options; + private final NodeMetricUpdater nodeMetricUpdater; + private final CompletableFuture resultFuture; + + ChannelFactoryInitializer( + EndPoint endPoint, + ProtocolVersion protocolVersion, + DriverChannelOptions options, + NodeMetricUpdater nodeMetricUpdater, + CompletableFuture resultFuture) { + + this.endPoint = endPoint; + this.protocolVersion = protocolVersion; + this.options = options; + this.nodeMetricUpdater = nodeMetricUpdater; + this.resultFuture = resultFuture; + } - InFlightHandler inFlightHandler = - new InFlightHandler( - protocolVersion, - new StreamIdGenerator(maxRequestsPerConnection), - maxOrphanRequests, - setKeyspaceTimeoutMillis, - channel.newPromise(), - options.eventCallback, - options.ownerLogPrefix); - HeartbeatHandler heartbeatHandler = new HeartbeatHandler(defaultConfig); - ProtocolInitHandler initHandler = - new ProtocolInitHandler( - context, - protocolVersion, - clusterName, - endPoint, - options, - heartbeatHandler, - productType == null); - - ChannelPipeline pipeline = channel.pipeline(); - context - .getSslHandlerFactory() - .map(f -> f.newSslHandler(channel, endPoint)) - .map(h -> pipeline.addLast(SSL_HANDLER_NAME, h)); - - // Only add meter handlers on the pipeline if metrics are enabled. - SessionMetricUpdater sessionMetricUpdater = - context.getMetricsFactory().getSessionUpdater(); - if (nodeMetricUpdater.isEnabled(DefaultNodeMetric.BYTES_RECEIVED, null) - || sessionMetricUpdater.isEnabled(DefaultSessionMetric.BYTES_RECEIVED, null)) { - pipeline.addLast( - INBOUND_TRAFFIC_METER_NAME, - new InboundTrafficMeter(nodeMetricUpdater, sessionMetricUpdater)); + @Override + protected void initChannel(Channel channel) { + try { + DriverExecutionProfile defaultConfig = context.getConfig().getDefaultProfile(); + + long setKeyspaceTimeoutMillis = + defaultConfig + .getDuration(DefaultDriverOption.CONNECTION_SET_KEYSPACE_TIMEOUT) + .toMillis(); + int maxFrameLength = + (int) defaultConfig.getBytes(DefaultDriverOption.PROTOCOL_MAX_FRAME_LENGTH); + int maxRequestsPerConnection = + defaultConfig.getInt(DefaultDriverOption.CONNECTION_MAX_REQUESTS); + int maxOrphanRequests = + defaultConfig.getInt(DefaultDriverOption.CONNECTION_MAX_ORPHAN_REQUESTS); + if (maxOrphanRequests >= maxRequestsPerConnection) { + if (LOGGED_ORPHAN_WARNING.compareAndSet(false, true)) { + LOG.warn( + "[{}] Invalid value for {}: {}. It must be lower than {}. " + + "Defaulting to {} (1/4 of max-requests) instead.", + logPrefix, + DefaultDriverOption.CONNECTION_MAX_ORPHAN_REQUESTS.getPath(), + maxOrphanRequests, + DefaultDriverOption.CONNECTION_MAX_REQUESTS.getPath(), + maxRequestsPerConnection / 4); } + maxOrphanRequests = maxRequestsPerConnection / 4; + } - if (nodeMetricUpdater.isEnabled(DefaultNodeMetric.BYTES_SENT, null) - || sessionMetricUpdater.isEnabled(DefaultSessionMetric.BYTES_SENT, null)) { - pipeline.addLast( - OUTBOUND_TRAFFIC_METER_NAME, - new OutboundTrafficMeter(nodeMetricUpdater, sessionMetricUpdater)); - } + InFlightHandler inFlightHandler = + new InFlightHandler( + protocolVersion, + new StreamIdGenerator(maxRequestsPerConnection), + maxOrphanRequests, + setKeyspaceTimeoutMillis, + channel.newPromise(), + options.eventCallback, + options.ownerLogPrefix); + HeartbeatHandler heartbeatHandler = new HeartbeatHandler(defaultConfig); + ProtocolInitHandler initHandler = + new ProtocolInitHandler( + context, + protocolVersion, + clusterName, + endPoint, + options, + heartbeatHandler, + productType == null); + + ChannelPipeline pipeline = channel.pipeline(); + context + .getSslHandlerFactory() + .map(f -> f.newSslHandler(channel, endPoint)) + .map(h -> pipeline.addLast(SSL_HANDLER_NAME, h)); + + // Only add meter handlers on the pipeline if metrics are enabled. + SessionMetricUpdater sessionMetricUpdater = context.getMetricsFactory().getSessionUpdater(); + if (nodeMetricUpdater.isEnabled(DefaultNodeMetric.BYTES_RECEIVED, null) + || sessionMetricUpdater.isEnabled(DefaultSessionMetric.BYTES_RECEIVED, null)) { + pipeline.addLast( + INBOUND_TRAFFIC_METER_NAME, + new InboundTrafficMeter(nodeMetricUpdater, sessionMetricUpdater)); + } - pipeline - .addLast( - FRAME_TO_BYTES_ENCODER_NAME, - new FrameEncoder(context.getFrameCodec(), maxFrameLength)) - .addLast( - BYTES_TO_FRAME_DECODER_NAME, - new FrameDecoder(context.getFrameCodec(), maxFrameLength)) - // Note: HeartbeatHandler is inserted here once init completes - .addLast(INFLIGHT_HANDLER_NAME, inFlightHandler) - .addLast(INIT_HANDLER_NAME, initHandler); - - context.getNettyOptions().afterChannelInitialized(channel); - } catch (Throwable t) { - // If the init handler throws an exception, Netty swallows it and closes the channel. We - // want to propagate it instead, so fail the outer future (the result of connect()). - resultFuture.completeExceptionally(t); - throw t; + if (nodeMetricUpdater.isEnabled(DefaultNodeMetric.BYTES_SENT, null) + || sessionMetricUpdater.isEnabled(DefaultSessionMetric.BYTES_SENT, null)) { + pipeline.addLast( + OUTBOUND_TRAFFIC_METER_NAME, + new OutboundTrafficMeter(nodeMetricUpdater, sessionMetricUpdater)); } + + pipeline + .addLast( + FRAME_TO_BYTES_ENCODER_NAME, + new FrameEncoder(context.getFrameCodec(), maxFrameLength)) + .addLast( + BYTES_TO_FRAME_DECODER_NAME, + new FrameDecoder(context.getFrameCodec(), maxFrameLength)) + // Note: HeartbeatHandler is inserted here once init completes + .addLast(INFLIGHT_HANDLER_NAME, inFlightHandler) + .addLast(INIT_HANDLER_NAME, initHandler); + + context.getNettyOptions().afterChannelInitialized(channel); + } catch (Throwable t) { + // If the init handler throws an exception, Netty swallows it and closes the channel. We + // want to propagate it instead, so fail the outer future (the result of connect()). + resultFuture.completeExceptionally(t); + throw t; } - }; + } } } diff --git a/core/src/main/resources/META-INF/native-image/com.datastax.oss/java-driver-core/native-image.properties b/core/src/main/resources/META-INF/native-image/com.datastax.oss/java-driver-core/native-image.properties new file mode 100644 index 00000000000..b2fb10d32c8 --- /dev/null +++ b/core/src/main/resources/META-INF/native-image/com.datastax.oss/java-driver-core/native-image.properties @@ -0,0 +1,7 @@ +Args=-H:IncludeResources=reference\\.conf \ + -H:IncludeResources=application\\.conf \ + -H:IncludeResources=application\\.json \ + -H:IncludeResources=application\\.properties \ + -H:IncludeResources=.*Driver\\.properties \ + -H:DynamicProxyConfigurationResources=${.}/proxy.json \ + -H:ReflectionConfigurationResources=${.}/reflection.json diff --git a/core/src/main/resources/META-INF/native-image/com.datastax.oss/java-driver-core/proxy.json b/core/src/main/resources/META-INF/native-image/com.datastax.oss/java-driver-core/proxy.json new file mode 100644 index 00000000000..37cf6fcf805 --- /dev/null +++ b/core/src/main/resources/META-INF/native-image/com.datastax.oss/java-driver-core/proxy.json @@ -0,0 +1,3 @@ +[ + ["java.lang.reflect.TypeVariable"] +] diff --git a/core/src/main/resources/META-INF/native-image/com.datastax.oss/java-driver-core/reflection.json b/core/src/main/resources/META-INF/native-image/com.datastax.oss/java-driver-core/reflection.json new file mode 100644 index 00000000000..6082b853611 --- /dev/null +++ b/core/src/main/resources/META-INF/native-image/com.datastax.oss/java-driver-core/reflection.json @@ -0,0 +1,154 @@ +[ + { + "name": "com.datastax.oss.driver.internal.core.loadbalancing.DefaultLoadBalancingPolicy", + "methods": [ { "name": "", "parameterTypes": [ "com.datastax.oss.driver.api.core.context.DriverContext", "java.lang.String" ] } ] + }, + { + "name": "com.datastax.oss.driver.internal.core.loadbalancing.DcInferringLoadBalancingPolicy", + "methods": [ { "name": "", "parameterTypes": [ "com.datastax.oss.driver.api.core.context.DriverContext", "java.lang.String" ] } ] + }, + { + "name": "com.datastax.oss.driver.internal.core.loadbalancing.BasicLoadBalancingPolicy", + "methods": [ { "name": "", "parameterTypes": [ "com.datastax.oss.driver.api.core.context.DriverContext", "java.lang.String" ] } ] + }, + { + "name": "com.datastax.oss.driver.internal.core.connection.ExponentialReconnectionPolicy", + "methods": [ { "name": "", "parameterTypes": [ "com.datastax.oss.driver.api.core.context.DriverContext" ] } ] + }, + { + "name": "com.datastax.oss.driver.internal.core.connection.ConstantReconnectionPolicy", + "methods": [ { "name": "", "parameterTypes": [ "com.datastax.oss.driver.api.core.context.DriverContext" ] } ] + }, + { + "name": "com.datastax.oss.driver.internal.core.retry.DefaultRetryPolicy", + "methods": [ { "name": "", "parameterTypes": [ "com.datastax.oss.driver.api.core.context.DriverContext", "java.lang.String" ] } ] + }, + { + "name": "com.datastax.oss.driver.internal.core.retry.ConsistencyDowngradingRetryPolicy", + "methods": [ { "name": "", "parameterTypes": [ "com.datastax.oss.driver.api.core.context.DriverContext", "java.lang.String" ] } ] + }, + { + "name": "com.datastax.oss.driver.internal.core.specex.NoSpeculativeExecutionPolicy", + "methods": [ { "name": "", "parameterTypes": [ "com.datastax.oss.driver.api.core.context.DriverContext", "java.lang.String" ] } ] + }, + { + "name": "com.datastax.oss.driver.internal.core.specex.ConstantSpeculativeExecutionPolicy", + "methods": [ { "name": "", "parameterTypes": [ "com.datastax.oss.driver.api.core.context.DriverContext", "java.lang.String" ] } ] + }, + { + "name": "com.datastax.oss.driver.internal.core.time.AtomicTimestampGenerator", + "methods": [ { "name": "", "parameterTypes": [ "com.datastax.oss.driver.api.core.context.DriverContext" ] } ] + }, + { + "name": "com.datastax.oss.driver.internal.core.time.ThreadLocalTimestampGenerator", + "methods": [ { "name": "", "parameterTypes": [ "com.datastax.oss.driver.api.core.context.DriverContext" ] } ] + }, + { + "name": "com.datastax.oss.driver.internal.core.time.ServerSideTimestampGenerator", + "methods": [ { "name": "", "parameterTypes": [ "com.datastax.oss.driver.api.core.context.DriverContext" ] } ] + }, + { + "name": "com.datastax.oss.driver.internal.core.tracker.NoopRequestTracker", + "methods": [ { "name": "", "parameterTypes": [ "com.datastax.oss.driver.api.core.context.DriverContext" ] } ] + }, + { + "name": "com.datastax.oss.driver.internal.core.tracker.RequestLogger", + "methods": [ { "name": "", "parameterTypes": [ "com.datastax.oss.driver.api.core.context.DriverContext" ] } ] + }, + { + "name": "com.datastax.oss.driver.internal.core.session.throttling.PassThroughRequestThrottler", + "methods": [ { "name": "", "parameterTypes": [ "com.datastax.oss.driver.api.core.context.DriverContext" ] } ] + }, + { + "name": "com.datastax.oss.driver.internal.core.session.throttling.RateLimitingRequestThrottler", + "methods": [ { "name": "", "parameterTypes": [ "com.datastax.oss.driver.api.core.context.DriverContext" ] } ] + }, + { + "name": "com.datastax.oss.driver.internal.core.session.throttling.ConcurrencyLimitingRequestThrottler", + "methods": [ { "name": "", "parameterTypes": [ "com.datastax.oss.driver.api.core.context.DriverContext" ] } ] + }, + { + "name": "com.datastax.oss.driver.internal.core.metadata.NoopNodeStateListener", + "methods": [ { "name": "", "parameterTypes": [ "com.datastax.oss.driver.api.core.context.DriverContext" ] } ] + }, + { + "name": "com.datastax.oss.driver.internal.core.metadata.schema.NoopSchemaChangeListener", + "methods": [ { "name": "", "parameterTypes": [ "com.datastax.oss.driver.api.core.context.DriverContext" ] } ] + }, + { + "name": "com.datastax.oss.driver.internal.core.addresstranslation.PassThroughAddressTranslator", + "methods": [ { "name": "", "parameterTypes": [ "com.datastax.oss.driver.api.core.context.DriverContext" ] } ] + }, + { + "name": "com.datastax.oss.driver.internal.core.addresstranslation.Ec2MultiRegionAddressTranslator", + "methods": [ { "name": "", "parameterTypes": [ "com.datastax.oss.driver.api.core.context.DriverContext" ] } ] + }, + { + "name": "com.datastax.oss.driver.internal.core.auth.PlainTextAuthProvider", + "methods": [ { "name": "", "parameterTypes": [ "com.datastax.oss.driver.api.core.context.DriverContext" ] } ] + }, + { + "name": "com.datastax.dse.driver.internal.core.auth.DseGssApiAuthProvider", + "methods": [ { "name": "", "parameterTypes": [ "com.datastax.oss.driver.api.core.context.DriverContext" ] } ] + }, + { + "name": "com.datastax.oss.driver.internal.core.ssl.DefaultSslEngineFactory", + "methods": [ { "name": "", "parameterTypes": [ "com.datastax.oss.driver.api.core.context.DriverContext" ] } ] + }, + { + "name": "com.datastax.oss.driver.internal.core.metrics.DefaultMetricsFactory", + "methods": [ { "name": "", "parameterTypes": [ "com.datastax.oss.driver.api.core.context.DriverContext" ] } ] + }, + { + "name": "com.datastax.oss.driver.internal.core.metrics.NoopMetricsFactory", + "methods": [ { "name": "", "parameterTypes": [ "com.datastax.oss.driver.api.core.context.DriverContext" ] } ] + }, + { + "name": "com.datastax.oss.driver.internal.core.metrics.DropwizardMetricsFactory", + "methods": [ { "name": "", "parameterTypes": [ "com.datastax.oss.driver.api.core.context.DriverContext" ] } ] + }, + { + "name": "com.datastax.oss.driver.internal.core.metrics.DefaultMetricIdGenerator", + "methods": [ { "name": "", "parameterTypes": [ "com.datastax.oss.driver.api.core.context.DriverContext" ] } ] + }, + { + "name": "com.datastax.oss.driver.internal.core.metrics.TaggingMetricIdGenerator", + "methods": [ { "name": "", "parameterTypes": [ "com.datastax.oss.driver.api.core.context.DriverContext" ] } ] + }, + { + "name": "io.netty.channel.socket.nio.NioSocketChannel", + "methods": [ { "name": "", "parameterTypes": [] } ] + }, + { + "name": "io.netty.buffer.AbstractByteBufAllocator", + "methods": [ { "name": "toLeakAwareBuffer", "parameterTypes": ["io.netty.buffer.ByteBuf" ] } ] + }, + { + "name": "io.netty.util.ReferenceCountUtil", + "methods": [ { "name": "touch", "parameterTypes": ["java.lang.Object", "java.lang.Object" ] } ] + }, + { + "name" : "io.netty.util.internal.shaded.org.jctools.queues.MpscArrayQueueProducerIndexField", + "fields": [ {"name": "producerIndex", "allowUnsafeAccess": true} ] + }, + { + "name" : "io.netty.util.internal.shaded.org.jctools.queues.MpscArrayQueueProducerLimitField", + "fields": [ {"name": "producerLimit", "allowUnsafeAccess": true} ] + }, + { + "name" : "io.netty.util.internal.shaded.org.jctools.queues.MpscArrayQueueConsumerIndexField", + "fields": [ {"name": "consumerIndex", "allowUnsafeAccess": true} ] + }, + { + "name" : "io.netty.util.internal.shaded.org.jctools.queues.BaseMpscLinkedArrayQueueProducerFields", + "fields": [ {"name": "producerIndex", "allowUnsafeAccess": true} ] + }, + { + "name" : "io.netty.util.internal.shaded.org.jctools.queues.BaseMpscLinkedArrayQueueColdProducerFields", + "fields": [ {"name": "producerLimit", "allowUnsafeAccess": true} ] + }, + { + "name" : "io.netty.util.internal.shaded.org.jctools.queues.BaseMpscLinkedArrayQueueConsumerFields", + "fields": [ {"name": "consumerIndex", "allowUnsafeAccess": true} ] + } +] diff --git a/manual/core/compression/README.md b/manual/core/compression/README.md index 32d18a8ac2f..0697ea1737b 100644 --- a/manual/core/compression/README.md +++ b/manual/core/compression/README.md @@ -27,7 +27,6 @@ datastax-java-driver { Compression must be set before opening a session, it cannot be changed at runtime. - Two algorithms are supported out of the box: [LZ4](https://github.com/jpountz/lz4-java) and [Snappy](http://google.github.io/snappy/). The LZ4 implementation is a good first choice; it offers fallback implementations in case native libraries fail to load and @@ -79,6 +78,8 @@ Dependency: ``` +**Important: Snappy is not supported when building a [GraalVM native image](../graalvm).** + Always double-check the exact Snappy version needed; you can find it in the driver's [parent POM]. [parent POM]: https://search.maven.org/search?q=g:com.datastax.oss%20AND%20a:java-driver-parent&core=gav \ No newline at end of file diff --git a/manual/core/graalvm/README.md b/manual/core/graalvm/README.md new file mode 100644 index 00000000000..04983f46ecc --- /dev/null +++ b/manual/core/graalvm/README.md @@ -0,0 +1,315 @@ +## Using the driver in GraalVM native images + +### Quick overview + +* [GraalVM Native images](https://www.graalvm.org/reference-manual/native-image/) using the driver + can be built with no additional configuration starting with driver 4.13.0. +* But extra configurations are required in a few cases: + * When using [reactive programming](../reactive); + * When using [Jackson](../integration#Jackson); + * When using LZ4 [compression](../compression/); + * Depending on the [logging backend](../logging) in use. +* DSE-specific features: + * [Geospatial types](../dse/geotypes) are supported. + * [DSE Graph](../dse/graph) is not officially supported, although it may work. +* The [shaded jar](../shaded_jar) is not officially supported, although it may work. + +----- + +### Concepts + +Starting with version 4.13.0, the driver ships with [embedded GraalVM configuration files] that +allow GraalVM native images including the driver to be built without hassle, barring a few +exceptions and caveats listed below. + +[embedded GraalVM configuration files]:https://www.graalvm.org/reference-manual/native-image/BuildConfiguration/#embedding-a-configuration-file + +### Classes instantiated by reflection + +The driver instantiates its components by reflection. The actual classes that will be instantiated +in this way need to be registered for reflection. All built-in implementations of various driver +components, such as `LoadBalancingPolicy` or `TimestampGenerator`, are automatically registered for +reflection, along with a few other internal components tha are also instantiated by reflection. +_You don't need to manually register any of these built-in implementations_. + +But if you intend to use a custom implementation in lieu of a driver built-in class, then it is your +responsibility to register that custom implementation for reflection. + +For example, assuming that you have the following load balancing policy implementation: + +```java + +package com.example.app; + +import com.datastax.oss.driver.api.core.context.DriverContext; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.internal.core.loadbalancing.DefaultLoadBalancingPolicy; +import java.util.Map; +import java.util.Optional; +import java.util.UUID; + +public class CustomLoadBalancingPolicy extends DefaultLoadBalancingPolicy { + + public CustomLoadBalancingPolicy(DriverContext context, String profileName) { + super(context, profileName); + } + // rest of class omitted for brevity +} +``` + +And assuming that you declared the above class in your application.conf file as follows: + +```hocon +datastax-java-driver.basic{ + load-balancing-policy.class = com.example.app.CustomLoadBalancingPolicy +} +``` + +Then you will have to register that class for reflection: + +1. Create the following reflection.json file, or add the entry to an existing file: + +```json +[ + { "name": "com.example.app.CustomLoadBalancingPolicy", "allPublicConstructors": true } +] +``` + +2. When invoking the native image builder, add a `-H:ReflectionConfigurationFiles=reflection.json` + flag and point it to the file created above. + +Note: some frameworks allow you to simplify the registration process. For example, Quarkus offers +the `io.quarkus.runtime.annotations.RegisterForReflection` annotation that you can use to annotate +your class: + +```java +@RegisterForReflection +public class CustomLoadBalancingPolicy extends DefaultLoadBalancingPolicy { + //... +} +``` + +In this case, no other manual configuration is required for the above class to be correctly +registered for reflection. + +### Configuration resources + +The default driver [configuration](../configuration) mechanism is based on the TypeSafe Config +library. TypeSafe Config looks for a few classpath resources when initializing the configuration: +`reference.conf`, `application.conf`, `application.json`, `application.properties`. _These classpath +resources are all automatically included in the native image: you should not need to do it +manually_. See [Accessing Resources in Native Images] for more information on how classpath +resources are handled in native images. + +[Accessing Resources in Native Images]: https://www.graalvm.org/reference-manual/native-image/Resources/ + +### Configuring the logging backend + +When configuring [logging](../logging), the choice of a backend must be considered carefully, as +most logging backends resort to reflection during their configuration phase. + +By default, GraalVM native images provide support for the java.util.logging (JUL) backend. See +[this page](https://www.graalvm.org/reference-manual/native-image/Logging/) for more information. + +For other logging backends, please refer to the logging library documentation to find out if GraalVM +native images are supported. + +### Using reactive-style programming + +The [reactive execution model](../reactive) is compatible with GraalVM native images, but the +following configurations must be added: + +1. Create the following reflection.json file, or add the entry to an existing file: + +```json +[ + { "name": "org.reactivestreams.Publisher" } +] +``` + +2. When invoking the native image builder, add a `-H:ReflectionConfigurationFiles=reflection.json` + flag and point it to the file created above. + +### Using the Jackson JSON library + +[Jackson](https://github.com/FasterXML/jackson) is used in [a few places](../integration#Jackson) in +the driver, but is an optional dependency; if you intend to use Jackson, the following +configurations must be added: + +1. Create the following reflection.json file, or add these entries to an existing file: + +```json +[ + { "name": "com.fasterxml.jackson.core.JsonParser" }, + { "name": "com.fasterxml.jackson.databind.ObjectMapper" } +] +``` + +**Important**: when using the shaded jar – which is not officially supported on GraalVM native +images, see below for more details – replace the above entries with the below ones: + +```json +[ + { "name": "com.datastax.oss.driver.shaded.fasterxml.jackson.core.JsonParser" }, + { "name": "com.datastax.oss.driver.shaded.fasterxml.jackson.databind.ObjectMapper" } +] +``` +2. When invoking the native image builder, add a `-H:ReflectionConfigurationFiles=reflection.json` + flag and point it to the file created above. + +### Enabling compression + +When using [compression](../compression/), only LZ4 can be enabled in native images. **Snappy +compression is not supported.** + +In order for LZ4 compression to work in a native image, the following additional GraalVM +configuration is required: + +1. Create the following reflection.json file, or add these entries to an existing file: + +```json +[ + { "name" : "net.jpountz.lz4.LZ4Compressor" }, + { + "name" : "net.jpountz.lz4.LZ4JNICompressor", + "allDeclaredConstructors": true, + "allPublicFields": true + }, + { + "name" : "net.jpountz.lz4.LZ4JavaSafeCompressor", + "allDeclaredConstructors": true, + "allPublicFields": true + }, + { + "name" : "net.jpountz.lz4.LZ4JavaUnsafeCompressor", + "allDeclaredConstructors": true, + "allPublicFields": true + }, + { + "name" : "net.jpountz.lz4.LZ4HCJavaSafeCompressor", + "allDeclaredConstructors": true, + "allPublicFields": true + }, + { + "name" : "net.jpountz.lz4.LZ4HCJavaUnsafeCompressor", + "allDeclaredConstructors": true, + "allPublicFields": true + }, + { + "name" : "net.jpountz.lz4.LZ4JavaSafeSafeDecompressor", + "allDeclaredConstructors": true, + "allPublicFields": true + }, + { + "name" : "net.jpountz.lz4.LZ4JavaSafeFastDecompressor", + "allDeclaredConstructors": true, + "allPublicFields": true + }, + { + "name" : "net.jpountz.lz4.LZ4JavaUnsafeSafeDecompressor", + "allDeclaredConstructors": true, + "allPublicFields": true + }, + { + "name" : "net.jpountz.lz4.LZ4JavaUnsafeFastDecompressor", + "allDeclaredConstructors": true, + "allPublicFields": true + } +] +``` + +2. When invoking the native image builder, add a `-H:ReflectionConfigurationFiles=reflection.json` + flag and point it to the file created above. + +### Native calls + +The driver performs a few [native calls](../integration#Native-libraries) using +[JNR](https://github.com/jnr). + +Starting with driver 4.7.0, native calls are also possible in a GraalVM native image, without any +extra configuration. + +### Using DataStax Enterprise (DSE) features + +#### DSE Geospatial types + +DSE [Geospatial types](../dse/geotypes) are supported on GraalVM native images; the following +configurations must be added: + +1. Create the following reflection.json file, or add the entry to an existing file: + +```json +[ + { "name": "com.esri.core.geometry.ogc.OGCGeometry" } +] +``` + +**Important**: when using the shaded jar – which is not officially supported on GraalVM native +images, as stated above – replace the above entry with the below one: + +```json +[ + { "name": "com.datastax.oss.driver.shaded.esri.core.geometry.ogc.OGCGeometry" } +] +``` + +2. When invoking the native image builder, add a `-H:ReflectionConfigurationFiles=reflection.json` + flag and point it to the file created above. + +#### DSE Graph + +**[DSE Graph](../dse/graph) is not officially supported on GraalVM native images.** + +The following configuration can be used as a starting point for users wishing to build a native +image for a DSE Graph application. DataStax does not guarantee however that the below configuration +will work in all cases. If the native image build fails, a good option is to use GraalVM's +[Tracing Agent](https://www.graalvm.org/reference-manual/native-image/Agent/) to understand why. + +1. Create the following reflection.json file, or add these entries to an existing file: + +```json +[ + { "name": "org.apache.tinkerpop.gremlin.tinkergraph.structure.TinkerIoRegistryV3d0" }, + { "name": "org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal" }, + { "name": "org.apache.tinkerpop.gremlin.structure.Graph", + "allDeclaredConstructors": true, + "allPublicConstructors": true, + "allDeclaredMethods": true, + "allPublicMethods": true + }, + { "name": "org.apache.tinkerpop.gremlin.tinkergraph.structure.TinkerGraph", + "allDeclaredConstructors": true, + "allPublicConstructors": true, + "allDeclaredMethods": true, + "allPublicMethods": true + }, + { "name": " org.apache.tinkerpop.gremlin.structure.util.empty.EmptyGraph", + "allDeclaredConstructors": true, + "allPublicConstructors": true, + "allDeclaredMethods": true, + "allPublicMethods": true + }, + { "name": "org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource", + "allDeclaredConstructors": true, + "allPublicConstructors": true, + "allDeclaredMethods": true, + "allPublicMethods": true + } +] +``` + +2. When invoking the native image builder, add the following flags: + +``` +-H:ReflectionConfigurationFiles=reflection.json +--initialize-at-build-time=org.apache.tinkerpop.gremlin.tinkergraph.structure.TinkerIoRegistryV3d0 +--initialize-at-build-time=org.apache.tinkerpop.shaded.jackson.databind.deser.std.StdDeserializer +``` + +### Using the shaded jar + +**The [shaded jar](../shaded_jar) is not officially supported in a GraalVM native image.** + +However, it has been reported that the shaded jar can be included in a GraalVM native image as a +drop-in replacement for the regular driver jar for simple applications, without any extra GraalVM +configuration. diff --git a/manual/core/integration/README.md b/manual/core/integration/README.md index 6a41fa4751d..6faf87cf4ad 100644 --- a/manual/core/integration/README.md +++ b/manual/core/integration/README.md @@ -6,6 +6,8 @@ * explanations about [driver dependencies](#driver-dependencies) and when they can be manually excluded. +Note: guidelines to build a GraalVM native image can be found [here](../graalvm). + ----- ### Which artifact(s) should I use? diff --git a/metrics/micrometer/src/main/resources/META-INF/native-image/com.datastax.oss/java-driver-metrics-micrometer/native-image.properties b/metrics/micrometer/src/main/resources/META-INF/native-image/com.datastax.oss/java-driver-metrics-micrometer/native-image.properties new file mode 100644 index 00000000000..4971c6cb7ee --- /dev/null +++ b/metrics/micrometer/src/main/resources/META-INF/native-image/com.datastax.oss/java-driver-metrics-micrometer/native-image.properties @@ -0,0 +1 @@ +Args = -H:ReflectionConfigurationResources=${.}/reflection.json diff --git a/metrics/micrometer/src/main/resources/META-INF/native-image/com.datastax.oss/java-driver-metrics-micrometer/reflection.json b/metrics/micrometer/src/main/resources/META-INF/native-image/com.datastax.oss/java-driver-metrics-micrometer/reflection.json new file mode 100644 index 00000000000..638cac60af1 --- /dev/null +++ b/metrics/micrometer/src/main/resources/META-INF/native-image/com.datastax.oss/java-driver-metrics-micrometer/reflection.json @@ -0,0 +1,6 @@ +[ + { + "name": "com.datastax.oss.driver.internal.metrics.micrometer.MicrometerMetricsFactory", + "methods": [ { "name": "", "parameterTypes": [ "com.datastax.oss.driver.api.core.context.DriverContext" ] } ] + } +] diff --git a/metrics/microprofile/src/main/resources/META-INF/native-image/com.datastax.oss/java-driver-metrics-microprofile/native-image.properties b/metrics/microprofile/src/main/resources/META-INF/native-image/com.datastax.oss/java-driver-metrics-microprofile/native-image.properties new file mode 100644 index 00000000000..4971c6cb7ee --- /dev/null +++ b/metrics/microprofile/src/main/resources/META-INF/native-image/com.datastax.oss/java-driver-metrics-microprofile/native-image.properties @@ -0,0 +1 @@ +Args = -H:ReflectionConfigurationResources=${.}/reflection.json diff --git a/metrics/microprofile/src/main/resources/META-INF/native-image/com.datastax.oss/java-driver-metrics-microprofile/reflection.json b/metrics/microprofile/src/main/resources/META-INF/native-image/com.datastax.oss/java-driver-metrics-microprofile/reflection.json new file mode 100644 index 00000000000..6d408897551 --- /dev/null +++ b/metrics/microprofile/src/main/resources/META-INF/native-image/com.datastax.oss/java-driver-metrics-microprofile/reflection.json @@ -0,0 +1,6 @@ +[ + { + "name": "com.datastax.oss.driver.internal.metrics.microprofile.MicroProfileMetricsFactory", + "methods": [ { "name": "", "parameterTypes": [ "com.datastax.oss.driver.api.core.context.DriverContext" ] } ] + } +] diff --git a/pom.xml b/pom.xml index aa72a4c7c0f..ac1097f76b3 100644 --- a/pom.xml +++ b/pom.xml @@ -664,6 +664,7 @@ limitations under the License.]]> **/pom.xml + src/**/native-image.properties **/src/main/config/ide/** From 5009dc8607d5eef843c21a845e96af863f76527a Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 30 Jul 2021 16:41:51 +0200 Subject: [PATCH 763/979] Update changelog for 4.13.0 release --- changelog/README.md | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 7bae766a602..48a16bc6f28 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -2,19 +2,20 @@ -### 4.13.0 (in progress) +### 4.13.0 - [improvement] JAVA-2940: Add GraalVM native image build configurations - [improvement] JAVA-2953: Promote ProgrammaticPlainTextAuthProvider to the public API and add credentials hot-reload -- [improvement] JAVA-2951: Accept multiple node state listeners, schema change listeners and request trackers +- [improvement] JAVA-2951: Accept multiple node state listeners, schema change listeners and request + trackers Merged from 4.12.x: - [bug] JAVA-2949: Provide mapper support for CompletionStage> - [bug] JAVA-2950: Remove reference to Reflection class from DependencyCheck -### 4.12.1 (in progress) +### 4.12.1 Merged from 4.11.x: @@ -37,7 +38,7 @@ Merged from 4.11.x: - [bug] JAVA-2943: Prevent session leak with wrong keyspace name - [bug] JAVA-2938: OverloadedException message is misleading -### 4.11.3 (in progress) +### 4.11.3 - [bug] JAVA-2949: Provide mapper support for CompletionStage> - [bug] JAVA-2950: Remove reference to Reflection class from DependencyCheck From 706e03c5b46d5c1de801c0228e409ff6727bc045 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 30 Jul 2021 16:48:00 +0200 Subject: [PATCH 764/979] Update manual for 4.13.0 release --- README.md | 4 +- manual/case_sensitivity/README.md | 10 +-- manual/core/README.md | 26 +++---- manual/core/address_resolution/README.md | 2 +- manual/core/async/README.md | 2 +- manual/core/authentication/README.md | 16 ++-- manual/core/bom/README.md | 4 +- manual/core/configuration/README.md | 20 ++--- manual/core/control_connection/README.md | 2 +- manual/core/custom_codecs/README.md | 74 +++++++++---------- manual/core/detachable_types/README.md | 14 ++-- manual/core/dse/geotypes/README.md | 6 +- manual/core/dse/graph/README.md | 4 +- manual/core/dse/graph/fluent/README.md | 4 +- .../core/dse/graph/fluent/explicit/README.md | 12 +-- manual/core/dse/graph/results/README.md | 6 +- manual/core/dse/graph/script/README.md | 6 +- manual/core/idempotence/README.md | 4 +- manual/core/integration/README.md | 7 +- manual/core/load_balancing/README.md | 12 +-- manual/core/metadata/README.md | 6 +- manual/core/metadata/node/README.md | 28 +++---- manual/core/metadata/schema/README.md | 20 ++--- manual/core/metadata/token/README.md | 4 +- manual/core/native_protocol/README.md | 6 +- manual/core/non_blocking/README.md | 44 +++++------ manual/core/paging/README.md | 12 +-- manual/core/performance/README.md | 10 +-- manual/core/pooling/README.md | 2 +- manual/core/query_timestamps/README.md | 4 +- manual/core/reactive/README.md | 24 +++--- manual/core/reconnection/README.md | 8 +- manual/core/request_tracker/README.md | 4 +- manual/core/retries/README.md | 36 ++++----- manual/core/speculative_execution/README.md | 2 +- manual/core/ssl/README.md | 6 +- manual/core/statements/README.md | 8 +- manual/core/statements/batch/README.md | 6 +- .../statements/per_query_keyspace/README.md | 2 +- manual/core/statements/prepared/README.md | 8 +- manual/core/statements/simple/README.md | 6 +- manual/core/temporal_types/README.md | 8 +- manual/core/throttling/README.md | 6 +- manual/core/tracing/README.md | 12 +-- manual/core/tuples/README.md | 4 +- manual/core/udts/README.md | 4 +- manual/developer/common/concurrency/README.md | 4 +- manual/mapper/config/kotlin/README.md | 2 +- manual/mapper/config/record/README.md | 2 +- manual/mapper/config/scala/README.md | 2 +- manual/mapper/daos/README.md | 8 +- manual/mapper/daos/custom_types/README.md | 10 +-- manual/mapper/daos/delete/README.md | 18 ++--- manual/mapper/daos/getentity/README.md | 18 ++--- manual/mapper/daos/increment/README.md | 12 +-- manual/mapper/daos/insert/README.md | 14 ++-- manual/mapper/daos/null_saving/README.md | 10 +-- manual/mapper/daos/query/README.md | 24 +++--- manual/mapper/daos/queryprovider/README.md | 16 ++-- manual/mapper/daos/select/README.md | 28 +++---- manual/mapper/daos/setentity/README.md | 10 +-- .../daos/statement_attributes/README.md | 2 +- manual/mapper/daos/update/README.md | 12 +-- manual/mapper/entities/README.md | 36 ++++----- manual/mapper/mapper/README.md | 10 +-- manual/osgi/README.md | 6 +- manual/query_builder/README.md | 10 +-- manual/query_builder/condition/README.md | 2 +- manual/query_builder/delete/README.md | 4 +- manual/query_builder/insert/README.md | 2 +- manual/query_builder/relation/README.md | 4 +- manual/query_builder/schema/README.md | 2 +- .../query_builder/schema/aggregate/README.md | 2 +- .../query_builder/schema/function/README.md | 2 +- manual/query_builder/schema/index/README.md | 2 +- .../query_builder/schema/keyspace/README.md | 2 +- .../schema/materialized_view/README.md | 4 +- manual/query_builder/schema/table/README.md | 6 +- manual/query_builder/schema/type/README.md | 2 +- manual/query_builder/select/README.md | 4 +- manual/query_builder/term/README.md | 4 +- manual/query_builder/truncate/README.md | 2 +- manual/query_builder/update/README.md | 4 +- 83 files changed, 409 insertions(+), 408 deletions(-) diff --git a/README.md b/README.md index 19b09697684..b61f2464900 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@ *If you're reading this on github.com, please note that this is the readme for the development version and that some features described here might not yet have been released. You can find the documentation for latest version through [DataStax Docs] or via the release tags, e.g. -[4.12.0](https://github.com/datastax/java-driver/tree/4.12.0).* +[4.13.0](https://github.com/datastax/java-driver/tree/4.13.0).* A modern, feature-rich and highly tunable Java client library for [Apache Cassandra®] \(2.1+) and [DataStax Enterprise] \(4.7+), and [DataStax Astra], using exclusively Cassandra's binary protocol @@ -82,7 +82,7 @@ See the [upgrade guide](upgrade_guide/) for details. * [Changelog] * [FAQ] -[API docs]: https://docs.datastax.com/en/drivers/java/4.12 +[API docs]: https://docs.datastax.com/en/drivers/java/4.13 [JIRA]: https://datastax-oss.atlassian.net/browse/JAVA [Mailing list]: https://groups.google.com/a/lists.datastax.com/forum/#!forum/java-driver-user [@dsJavaDriver]: https://twitter.com/dsJavaDriver diff --git a/manual/case_sensitivity/README.md b/manual/case_sensitivity/README.md index 7e85cd2d091..947502cd5b0 100644 --- a/manual/case_sensitivity/README.md +++ b/manual/case_sensitivity/README.md @@ -106,11 +106,11 @@ For "consuming" methods, string overloads are also provided for convenience, for * in other cases, the string is always assumed to be in CQL form, and converted on the fly with `CqlIdentifier.fromCql`. -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/CqlIdentifier.html -[Row]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/Row.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/data/UdtValue.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/data/AccessibleByName.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/CqlIdentifier.html +[Row]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/Row.html +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/data/UdtValue.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/data/AccessibleByName.html ### Good practices diff --git a/manual/core/README.md b/manual/core/README.md index 4648e53f4bd..f6418b2739f 100644 --- a/manual/core/README.md +++ b/manual/core/README.md @@ -314,18 +314,18 @@ for (ColumnDefinitions.Definition definition : row.getColumnDefinitions()) { } ``` -[CqlSession]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/CqlSession.html -[CqlSession#builder()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/CqlSession.html#builder-- -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/ResultSet.html -[Row]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/Row.html -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/CqlIdentifier.html -[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/data/AccessibleByName.html -[GenericType]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/reflect/GenericType.html -[CqlDuration]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/data/CqlDuration.html -[TupleValue]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/data/TupleValue.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/data/UdtValue.html -[SessionBuilder.addContactPoint()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoint-java.net.InetSocketAddress- -[SessionBuilder.addContactPoints()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoints-java.util.Collection- -[SessionBuilder.withLocalDatacenter()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withLocalDatacenter-java.lang.String- +[CqlSession]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/CqlSession.html +[CqlSession#builder()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/CqlSession.html#builder-- +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/ResultSet.html +[Row]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/Row.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/CqlIdentifier.html +[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/data/AccessibleByName.html +[GenericType]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/reflect/GenericType.html +[CqlDuration]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/data/CqlDuration.html +[TupleValue]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/data/TupleValue.html +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/data/UdtValue.html +[SessionBuilder.addContactPoint()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoint-java.net.InetSocketAddress- +[SessionBuilder.addContactPoints()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoints-java.util.Collection- +[SessionBuilder.withLocalDatacenter()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withLocalDatacenter-java.lang.String- [CASSANDRA-10145]: https://issues.apache.org/jira/browse/CASSANDRA-10145 \ No newline at end of file diff --git a/manual/core/address_resolution/README.md b/manual/core/address_resolution/README.md index 2407f589f78..11194b92e6b 100644 --- a/manual/core/address_resolution/README.md +++ b/manual/core/address_resolution/README.md @@ -124,7 +124,7 @@ Cassandra node: domain name of the target instance. Then it performs a forward DNS lookup of the domain name; the EC2 DNS does the private/public switch automatically based on location). -[AddressTranslator]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/addresstranslation/AddressTranslator.html +[AddressTranslator]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/addresstranslation/AddressTranslator.html [cassandra.yaml]: https://docs.datastax.com/en/cassandra/3.x/cassandra/configuration/configCassandra_yaml.html [rpc_address]: https://docs.datastax.com/en/cassandra/3.x/cassandra/configuration/configCassandra_yaml.html?scroll=configCassandra_yaml__rpc_address diff --git a/manual/core/async/README.md b/manual/core/async/README.md index 50005ffb7a8..e2bac78d755 100644 --- a/manual/core/async/README.md +++ b/manual/core/async/README.md @@ -207,4 +207,4 @@ documentation for more details and an example. [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html \ No newline at end of file +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html \ No newline at end of file diff --git a/manual/core/authentication/README.md b/manual/core/authentication/README.md index 2a5ee302e09..7843ae2d1f8 100644 --- a/manual/core/authentication/README.md +++ b/manual/core/authentication/README.md @@ -227,13 +227,13 @@ session.execute(statement); [SASL]: https://en.wikipedia.org/wiki/Simple_Authentication_and_Security_Layer -[AuthProvider]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/auth/AuthProvider.html -[DriverContext]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/context/DriverContext.html -[PlainTextAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderBase.html +[AuthProvider]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/auth/AuthProvider.html +[DriverContext]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/context/DriverContext.html +[PlainTextAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderBase.html [ProgrammaticPlainTextAuthProvider]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/auth/ProgrammaticPlainTextAuthProvider.html -[DseGssApiAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderBase.html -[ProgrammaticDseGssApiAuthProvider]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/auth/ProgrammaticDseGssApiAuthProvider.html -[ProxyAuthentication.executeAs]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/auth/ProxyAuthentication.html#executeAs-java.lang.String-StatementT- -[SessionBuilder.withAuthCredentials]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthCredentials-java.lang.String-java.lang.String- -[SessionBuilder.withAuthProvider]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthProvider-com.datastax.oss.driver.api.core.auth.AuthProvider- +[DseGssApiAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderBase.html +[ProgrammaticDseGssApiAuthProvider]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/auth/ProgrammaticDseGssApiAuthProvider.html +[ProxyAuthentication.executeAs]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/auth/ProxyAuthentication.html#executeAs-java.lang.String-StatementT- +[SessionBuilder.withAuthCredentials]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthCredentials-java.lang.String-java.lang.String- +[SessionBuilder.withAuthProvider]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthProvider-com.datastax.oss.driver.api.core.auth.AuthProvider- [reference.conf]: ../configuration/reference/ \ No newline at end of file diff --git a/manual/core/bom/README.md b/manual/core/bom/README.md index 357bd6c58b7..c31df62a3d6 100644 --- a/manual/core/bom/README.md +++ b/manual/core/bom/README.md @@ -13,7 +13,7 @@ To import the driver's BOM, add the following section in your application's own com.datastax.oss java-driver-bom - 4.12.0 + 4.13.0 pom import @@ -65,7 +65,7 @@ good idea to extract a property to keep it in sync with the BOM: ```xml - 4.12.0 + 4.13.0 diff --git a/manual/core/configuration/README.md b/manual/core/configuration/README.md index 656115b18a3..ef1c6e801b2 100644 --- a/manual/core/configuration/README.md +++ b/manual/core/configuration/README.md @@ -520,16 +520,16 @@ config.getDefaultProfile().getString(MyCustomOption.ADMIN_EMAIL); config.getDefaultProfile().getInt(MyCustomOption.AWESOMENESS_FACTOR); ``` -[DriverConfig]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/config/DriverConfig.html -[DriverExecutionProfile]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/config/DriverExecutionProfile.html -[DriverContext]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/context/DriverContext.html -[DriverOption]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/config/DriverOption.html -[DefaultDriverOption]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/config/DefaultDriverOption.html -[DriverConfigLoader]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html -[DriverConfigLoader.fromClasspath]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromClasspath-java.lang.String- -[DriverConfigLoader.fromFile]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromFile-java.io.File- -[DriverConfigLoader.fromUrl]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromUrl-java.net.URL- -[DriverConfigLoader.programmaticBuilder]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#programmaticBuilder-- +[DriverConfig]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/config/DriverConfig.html +[DriverExecutionProfile]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/config/DriverExecutionProfile.html +[DriverContext]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/context/DriverContext.html +[DriverOption]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/config/DriverOption.html +[DefaultDriverOption]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/config/DefaultDriverOption.html +[DriverConfigLoader]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html +[DriverConfigLoader.fromClasspath]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromClasspath-java.lang.String- +[DriverConfigLoader.fromFile]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromFile-java.io.File- +[DriverConfigLoader.fromUrl]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromUrl-java.net.URL- +[DriverConfigLoader.programmaticBuilder]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#programmaticBuilder-- [Typesafe Config]: https://github.com/typesafehub/config [config standard behavior]: https://github.com/typesafehub/config#standard-behavior diff --git a/manual/core/control_connection/README.md b/manual/core/control_connection/README.md index 2cb88323cd1..3b33639059e 100644 --- a/manual/core/control_connection/README.md +++ b/manual/core/control_connection/README.md @@ -23,4 +23,4 @@ There are a few options to fine tune the control connection behavior in the `advanced.control-connection` and `advanced.metadata` sections; see the [metadata](../metadata/) pages and the [reference configuration](../configuration/reference/) for all the details. -[Node.getOpenConnections]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- \ No newline at end of file +[Node.getOpenConnections]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- \ No newline at end of file diff --git a/manual/core/custom_codecs/README.md b/manual/core/custom_codecs/README.md index 3413f9d5934..ca3c20d7538 100644 --- a/manual/core/custom_codecs/README.md +++ b/manual/core/custom_codecs/README.md @@ -660,13 +660,13 @@ private static String formatRow(Row row) { } ``` -[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html -[GenericType]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/reflect/GenericType.html -[TypeCodec]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html -[format()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html#format-JavaTypeT- -[parse()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html#parse-java.lang.String- -[MappingCodec]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/MappingCodec.html -[SessionBuilder.addTypeCodecs]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addTypeCodecs-com.datastax.oss.driver.api.core.type.codec.TypeCodec...- +[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html +[GenericType]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/reflect/GenericType.html +[TypeCodec]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html +[format()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html#format-JavaTypeT- +[parse()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html#parse-java.lang.String- +[MappingCodec]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/MappingCodec.html +[SessionBuilder.addTypeCodecs]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addTypeCodecs-com.datastax.oss.driver.api.core.type.codec.TypeCodec...- [Enums]: https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html [Enum.name()]: https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html#name-- @@ -680,36 +680,36 @@ private static String formatRow(Row row) { [java.time.LocalDateTime]: https://docs.oracle.com/javase/8/docs/api/java/time/LocalDateTime.html [java.time.ZoneId]: https://docs.oracle.com/javase/8/docs/api/java/time/ZoneId.html -[ExtraTypeCodecs]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html -[ExtraTypeCodecs.BLOB_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#BLOB_TO_ARRAY -[ExtraTypeCodecs.BOOLEAN_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#BOOLEAN_LIST_TO_ARRAY -[ExtraTypeCodecs.BYTE_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#BYTE_LIST_TO_ARRAY -[ExtraTypeCodecs.SHORT_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#SHORT_LIST_TO_ARRAY -[ExtraTypeCodecs.INT_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#INT_LIST_TO_ARRAY -[ExtraTypeCodecs.LONG_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#LONG_LIST_TO_ARRAY -[ExtraTypeCodecs.FLOAT_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#FLOAT_LIST_TO_ARRAY -[ExtraTypeCodecs.DOUBLE_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#DOUBLE_LIST_TO_ARRAY -[ExtraTypeCodecs.listToArrayOf(TypeCodec)]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#listToArrayOf-com.datastax.oss.driver.api.core.type.codec.TypeCodec- -[ExtraTypeCodecs.TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#TIMESTAMP_UTC -[ExtraTypeCodecs.timestampAt(ZoneId)]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#timestampAt-java.time.ZoneId- -[ExtraTypeCodecs.TIMESTAMP_MILLIS_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#TIMESTAMP_MILLIS_SYSTEM -[ExtraTypeCodecs.TIMESTAMP_MILLIS_UTC]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#TIMESTAMP_MILLIS_UTC -[ExtraTypeCodecs.timestampMillisAt(ZoneId)]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#timestampMillisAt-java.time.ZoneId- -[ExtraTypeCodecs.ZONED_TIMESTAMP_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#ZONED_TIMESTAMP_SYSTEM -[ExtraTypeCodecs.ZONED_TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#ZONED_TIMESTAMP_UTC -[ExtraTypeCodecs.zonedTimestampAt(ZoneId)]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#zonedTimestampAt-java.time.ZoneId- -[ExtraTypeCodecs.LOCAL_TIMESTAMP_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#LOCAL_TIMESTAMP_SYSTEM -[ExtraTypeCodecs.LOCAL_TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#LOCAL_TIMESTAMP_UTC -[ExtraTypeCodecs.localTimestampAt(ZoneId)]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#localTimestampAt-java.time.ZoneId- -[ExtraTypeCodecs.ZONED_TIMESTAMP_PERSISTED]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#ZONED_TIMESTAMP_PERSISTED -[ExtraTypeCodecs.optionalOf(TypeCodec)]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#optionalOf-com.datastax.oss.driver.api.core.type.codec.TypeCodec- -[ExtraTypeCodecs.enumNamesOf(Class)]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#enumNamesOf-java.lang.Class- -[ExtraTypeCodecs.enumOrdinalsOf(Class)]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#enumOrdinalsOf-java.lang.Class- -[ExtraTypeCodecs.json(Class)]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#json-java.lang.Class- -[ExtraTypeCodecs.json(Class, ObjectMapper)]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#json-java.lang.Class-com.fasterxml.jackson.databind.ObjectMapper- - -[TypeCodecs.BLOB]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#BLOB -[TypeCodecs.TIMESTAMP]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#TIMESTAMP +[ExtraTypeCodecs]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html +[ExtraTypeCodecs.BLOB_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#BLOB_TO_ARRAY +[ExtraTypeCodecs.BOOLEAN_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#BOOLEAN_LIST_TO_ARRAY +[ExtraTypeCodecs.BYTE_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#BYTE_LIST_TO_ARRAY +[ExtraTypeCodecs.SHORT_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#SHORT_LIST_TO_ARRAY +[ExtraTypeCodecs.INT_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#INT_LIST_TO_ARRAY +[ExtraTypeCodecs.LONG_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#LONG_LIST_TO_ARRAY +[ExtraTypeCodecs.FLOAT_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#FLOAT_LIST_TO_ARRAY +[ExtraTypeCodecs.DOUBLE_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#DOUBLE_LIST_TO_ARRAY +[ExtraTypeCodecs.listToArrayOf(TypeCodec)]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#listToArrayOf-com.datastax.oss.driver.api.core.type.codec.TypeCodec- +[ExtraTypeCodecs.TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#TIMESTAMP_UTC +[ExtraTypeCodecs.timestampAt(ZoneId)]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#timestampAt-java.time.ZoneId- +[ExtraTypeCodecs.TIMESTAMP_MILLIS_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#TIMESTAMP_MILLIS_SYSTEM +[ExtraTypeCodecs.TIMESTAMP_MILLIS_UTC]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#TIMESTAMP_MILLIS_UTC +[ExtraTypeCodecs.timestampMillisAt(ZoneId)]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#timestampMillisAt-java.time.ZoneId- +[ExtraTypeCodecs.ZONED_TIMESTAMP_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#ZONED_TIMESTAMP_SYSTEM +[ExtraTypeCodecs.ZONED_TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#ZONED_TIMESTAMP_UTC +[ExtraTypeCodecs.zonedTimestampAt(ZoneId)]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#zonedTimestampAt-java.time.ZoneId- +[ExtraTypeCodecs.LOCAL_TIMESTAMP_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#LOCAL_TIMESTAMP_SYSTEM +[ExtraTypeCodecs.LOCAL_TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#LOCAL_TIMESTAMP_UTC +[ExtraTypeCodecs.localTimestampAt(ZoneId)]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#localTimestampAt-java.time.ZoneId- +[ExtraTypeCodecs.ZONED_TIMESTAMP_PERSISTED]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#ZONED_TIMESTAMP_PERSISTED +[ExtraTypeCodecs.optionalOf(TypeCodec)]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#optionalOf-com.datastax.oss.driver.api.core.type.codec.TypeCodec- +[ExtraTypeCodecs.enumNamesOf(Class)]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#enumNamesOf-java.lang.Class- +[ExtraTypeCodecs.enumOrdinalsOf(Class)]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#enumOrdinalsOf-java.lang.Class- +[ExtraTypeCodecs.json(Class)]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#json-java.lang.Class- +[ExtraTypeCodecs.json(Class, ObjectMapper)]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#json-java.lang.Class-com.fasterxml.jackson.databind.ObjectMapper- + +[TypeCodecs.BLOB]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#BLOB +[TypeCodecs.TIMESTAMP]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#TIMESTAMP [ObjectMapper]: http://fasterxml.github.io/jackson-databind/javadoc/2.10/com/fasterxml/jackson/databind/ObjectMapper.html diff --git a/manual/core/detachable_types/README.md b/manual/core/detachable_types/README.md index 927cb3e372d..4a0cc9044dc 100644 --- a/manual/core/detachable_types/README.md +++ b/manual/core/detachable_types/README.md @@ -137,13 +137,13 @@ Even then, the defaults used by detached objects might be good enough for you: Otherwise, just make sure you reattach objects any time you deserialize them or create them from scratch. -[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html -[CodecRegistry#DEFAULT]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html#DEFAULT -[DataType]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/DataType.html -[Detachable]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/detach/Detachable.html -[Session]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/Session.html -[ColumnDefinition]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/ColumnDefinition.html -[Row]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/Row.html +[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html +[CodecRegistry#DEFAULT]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html#DEFAULT +[DataType]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/DataType.html +[Detachable]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/detach/Detachable.html +[Session]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/Session.html +[ColumnDefinition]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/ColumnDefinition.html +[Row]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/Row.html [Java serialization]: https://docs.oracle.com/javase/tutorial/jndi/objects/serial.html [protocol specifications]: https://github.com/datastax/native-protocol/tree/1.x/src/main/resources diff --git a/manual/core/dse/geotypes/README.md b/manual/core/dse/geotypes/README.md index ad081b246c4..7116ff79886 100644 --- a/manual/core/dse/geotypes/README.md +++ b/manual/core/dse/geotypes/README.md @@ -166,9 +166,9 @@ All geospatial types interoperate with three standard formats: [ESRI]: https://github.com/Esri/geometry-api-java -[LineString]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/data/geometry/LineString.html -[Point]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/data/geometry/Point.html -[Polygon]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/data/geometry/Polygon.html +[LineString]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/data/geometry/LineString.html +[Point]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/data/geometry/Point.html +[Polygon]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/data/geometry/Polygon.html [Well-known text]: https://en.wikipedia.org/wiki/Well-known_text [Well-known binary]: https://en.wikipedia.org/wiki/Well-known_text#Well-known_binary diff --git a/manual/core/dse/graph/README.md b/manual/core/dse/graph/README.md index aa8409db760..19d5312b6df 100644 --- a/manual/core/dse/graph/README.md +++ b/manual/core/dse/graph/README.md @@ -74,8 +74,8 @@ fluent API returns Apache TinkerPop™ types directly. [Apache TinkerPop™]: http://tinkerpop.apache.org/ -[CqlSession]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/CqlSession.html -[GraphSession]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/graph/GraphSession.html +[CqlSession]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/CqlSession.html +[GraphSession]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/graph/GraphSession.html [DSE developer guide]: https://docs.datastax.com/en/dse/6.0/dse-dev/datastax_enterprise/graph/graphTOC.html [Gremlin]: https://docs.datastax.com/en/dse/6.0/dse-dev/datastax_enterprise/graph/dseGraphAbout.html#dseGraphAbout__what-is-cql diff --git a/manual/core/dse/graph/fluent/README.md b/manual/core/dse/graph/fluent/README.md index 15784ba833a..443841b09ba 100644 --- a/manual/core/dse/graph/fluent/README.md +++ b/manual/core/dse/graph/fluent/README.md @@ -109,8 +109,8 @@ All the DSE predicates are available on the driver side: .values("name"); ``` -[Search]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/graph/predicates/Search.html -[Geo]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/graph/predicates/Geo.html +[Search]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/graph/predicates/Search.html +[Geo]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/graph/predicates/Geo.html [Apache TinkerPop™]: http://tinkerpop.apache.org/ [TinkerPop DSL]: http://tinkerpop.apache.org/docs/current/reference/#dsl diff --git a/manual/core/dse/graph/fluent/explicit/README.md b/manual/core/dse/graph/fluent/explicit/README.md index bef059fe467..3157f46d01a 100644 --- a/manual/core/dse/graph/fluent/explicit/README.md +++ b/manual/core/dse/graph/fluent/explicit/README.md @@ -105,9 +105,9 @@ added in a future version. See also the [parent page](../) for topics common to all fluent traversals. -[FluentGraphStatement]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html -[FluentGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html#newInstance-org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal- -[FluentGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html#builder-org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal- -[BatchGraphStatement]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html -[BatchGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html#newInstance-- -[BatchGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html#builder-- +[FluentGraphStatement]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html +[FluentGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html#newInstance-org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal- +[FluentGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html#builder-org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal- +[BatchGraphStatement]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html +[BatchGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html#newInstance-- +[BatchGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html#builder-- diff --git a/manual/core/dse/graph/results/README.md b/manual/core/dse/graph/results/README.md index 1d3891b0994..6be3bed590b 100644 --- a/manual/core/dse/graph/results/README.md +++ b/manual/core/dse/graph/results/README.md @@ -137,8 +137,8 @@ If a type doesn't have a corresponding `asXxx()` method, use the variant that ta UUID uuid = graphNode.as(UUID.class); ``` -[GraphNode]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/graph/GraphNode.html -[GraphResultSet]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/graph/GraphResultSet.html -[AsyncGraphResultSet]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/graph/AsyncGraphResultSet.html +[GraphNode]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/graph/GraphNode.html +[GraphResultSet]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/graph/GraphResultSet.html +[AsyncGraphResultSet]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/graph/AsyncGraphResultSet.html [DSE data types]: https://docs.datastax.com/en/dse/6.0/dse-dev/datastax_enterprise/graph/reference/refDSEGraphDataTypes.html \ No newline at end of file diff --git a/manual/core/dse/graph/script/README.md b/manual/core/dse/graph/script/README.md index 8bdf9799b63..58173506349 100644 --- a/manual/core/dse/graph/script/README.md +++ b/manual/core/dse/graph/script/README.md @@ -101,6 +101,6 @@ Note however that some types of queries can only be performed through the script * configuration; * DSE graph schema queries. -[ScriptGraphStatement]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html -[ScriptGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html#newInstance-java.lang.String- -[ScriptGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html#builder-java.lang.String- \ No newline at end of file +[ScriptGraphStatement]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html +[ScriptGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html#newInstance-java.lang.String- +[ScriptGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html#builder-java.lang.String- \ No newline at end of file diff --git a/manual/core/idempotence/README.md b/manual/core/idempotence/README.md index f10bcae0721..0c24efda673 100644 --- a/manual/core/idempotence/README.md +++ b/manual/core/idempotence/README.md @@ -60,5 +60,5 @@ assert bs.isIdempotent(); The query builder tries to infer idempotence automatically; refer to [its manual](../../query_builder/idempotence/) for more details. -[Statement.setIdempotent]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/Statement.html#setIdempotent-java.lang.Boolean- -[StatementBuilder.setIdempotence]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/StatementBuilder.html#setIdempotence-java.lang.Boolean- +[Statement.setIdempotent]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/Statement.html#setIdempotent-java.lang.Boolean- +[StatementBuilder.setIdempotence]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/StatementBuilder.html#setIdempotence-java.lang.Boolean- diff --git a/manual/core/integration/README.md b/manual/core/integration/README.md index 6faf87cf4ad..23380063cdf 100644 --- a/manual/core/integration/README.md +++ b/manual/core/integration/README.md @@ -545,6 +545,7 @@ Here are the recommended TinkerPop versions for each driver version:

          Driver versionTinkerPop version
          4.12.03.4.10
          4.11.03.4.10
          4.10.03.4.9
          4.9.03.4.8
          + @@ -644,6 +645,6 @@ The remaining core driver dependencies are the only ones that are truly mandator [guava]: https://github.com/google/guava/issues/2721 [annotation processing]: https://docs.oracle.com/javase/8/docs/technotes/tools/windows/javac.html#sthref65 -[Session.getMetrics]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/Session.html#getMetrics-- -[SessionBuilder.addContactPoint]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoint-java.net.InetSocketAddress- -[Uuids]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/uuid/Uuids.html +[Session.getMetrics]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/Session.html#getMetrics-- +[SessionBuilder.addContactPoint]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoint-java.net.InetSocketAddress- +[Uuids]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/uuid/Uuids.html diff --git a/manual/core/load_balancing/README.md b/manual/core/load_balancing/README.md index d74546d6e99..2997d427106 100644 --- a/manual/core/load_balancing/README.md +++ b/manual/core/load_balancing/README.md @@ -426,12 +426,12 @@ Then it uses the "closest" distance for any given node. For example: * policy1 changes its suggestion to IGNORED. node1 is set to REMOTE; * policy1 changes its suggestion to REMOTE. node1 stays at REMOTE. -[DriverContext]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/context/DriverContext.html -[LoadBalancingPolicy]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/loadbalancing/LoadBalancingPolicy.html +[DriverContext]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/context/DriverContext.html +[LoadBalancingPolicy]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/loadbalancing/LoadBalancingPolicy.html [BasicLoadBalancingPolicy]: https://github.com/datastax/java-driver/blob/4.x/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicy.java -[getRoutingKeyspace()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKeyspace-- -[getRoutingToken()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/Request.html#getRoutingToken-- -[getRoutingKey()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- -[NodeDistanceEvaluator]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/loadbalancing/NodeDistanceEvaluator.html +[getRoutingKeyspace()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKeyspace-- +[getRoutingToken()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/Request.html#getRoutingToken-- +[getRoutingKey()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- +[NodeDistanceEvaluator]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/loadbalancing/NodeDistanceEvaluator.html [`nodetool status`]: https://docs.datastax.com/en/dse/6.7/dse-dev/datastax_enterprise/tools/nodetool/toolsStatus.html [cqlsh]: https://docs.datastax.com/en/dse/6.7/cql/cql/cql_using/startCqlshStandalone.html diff --git a/manual/core/metadata/README.md b/manual/core/metadata/README.md index 117c802c966..f160575729a 100644 --- a/manual/core/metadata/README.md +++ b/manual/core/metadata/README.md @@ -56,6 +56,6 @@ new keyspace in the schema metadata before the token metadata was updated. Schema and node state events are debounced. This allows you to control how often the metadata gets refreshed. See the [Performance](../performance/#debouncing) page for more details. -[Session#getMetadata]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/Session.html#getMetadata-- -[Metadata]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/metadata/Metadata.html -[Node]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/metadata/Node.html \ No newline at end of file +[Session#getMetadata]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/Session.html#getMetadata-- +[Metadata]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/metadata/Metadata.html +[Node]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/metadata/Node.html \ No newline at end of file diff --git a/manual/core/metadata/node/README.md b/manual/core/metadata/node/README.md index 455313758c6..4548f95aeee 100644 --- a/manual/core/metadata/node/README.md +++ b/manual/core/metadata/node/README.md @@ -129,17 +129,17 @@ beyond the scope of this document; if you're interested, study the `TopologyMoni the source code. -[Metadata#getNodes]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/metadata/Metadata.html#getNodes-- -[Node]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/metadata/Node.html -[Node#getState()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/metadata/Node.html#getState-- -[Node#getDatacenter()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/metadata/Node.html#getDatacenter-- -[Node#getRack()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/metadata/Node.html#getRack-- -[Node#getDistance()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/metadata/Node.html#getDistance-- -[Node#getExtras()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/metadata/Node.html#getExtras-- -[Node#getOpenConnections()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- -[Node#isReconnecting()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/metadata/Node.html#isReconnecting-- -[NodeState]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/metadata/NodeState.html -[NodeStateListener]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/metadata/NodeStateListener.html -[NodeStateListenerBase]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/metadata/NodeStateListenerBase.html -[SessionBuilder.addNodeStateListener]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addNodeStateListener-com.datastax.oss.driver.api.core.metadata.NodeStateListener- -[DseNodeProperties]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/metadata/DseNodeProperties.html \ No newline at end of file +[Metadata#getNodes]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/metadata/Metadata.html#getNodes-- +[Node]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/metadata/Node.html +[Node#getState()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/metadata/Node.html#getState-- +[Node#getDatacenter()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/metadata/Node.html#getDatacenter-- +[Node#getRack()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/metadata/Node.html#getRack-- +[Node#getDistance()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/metadata/Node.html#getDistance-- +[Node#getExtras()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/metadata/Node.html#getExtras-- +[Node#getOpenConnections()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- +[Node#isReconnecting()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/metadata/Node.html#isReconnecting-- +[NodeState]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/metadata/NodeState.html +[NodeStateListener]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/metadata/NodeStateListener.html +[NodeStateListenerBase]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/metadata/NodeStateListenerBase.html +[SessionBuilder.addNodeStateListener]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addNodeStateListener-com.datastax.oss.driver.api.core.metadata.NodeStateListener- +[DseNodeProperties]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/metadata/DseNodeProperties.html \ No newline at end of file diff --git a/manual/core/metadata/schema/README.md b/manual/core/metadata/schema/README.md index f9e1f762a23..52711d11bd1 100644 --- a/manual/core/metadata/schema/README.md +++ b/manual/core/metadata/schema/README.md @@ -321,16 +321,16 @@ unavailable for the excluded keyspaces. If you issue schema-altering requests from the driver (e.g. `session.execute("CREATE TABLE ..")`), take a look at the [Performance](../../performance/#schema-updates) page for a few tips. -[Metadata#getKeyspaces]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/metadata/Metadata.html#getKeyspaces-- -[SchemaChangeListener]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListener.html -[SchemaChangeListenerBase]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListenerBase.html -[Session#setSchemaMetadataEnabled]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/Session.html#setSchemaMetadataEnabled-java.lang.Boolean- -[Session#checkSchemaAgreementAsync]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/Session.html#checkSchemaAgreementAsync-- -[SessionBuilder#addSchemaChangeListener]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addSchemaChangeListener-com.datastax.oss.driver.api.core.metadata.schema.SchemaChangeListener- -[ExecutionInfo#isSchemaInAgreement]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#isSchemaInAgreement-- -[com.datastax.dse.driver.api.core.metadata.schema]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/metadata/schema/package-frame.html -[DseFunctionMetadata]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/metadata/schema/DseFunctionMetadata.html -[DseAggregateMetadata]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/metadata/schema/DseAggregateMetadata.html +[Metadata#getKeyspaces]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/metadata/Metadata.html#getKeyspaces-- +[SchemaChangeListener]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListener.html +[SchemaChangeListenerBase]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListenerBase.html +[Session#setSchemaMetadataEnabled]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/Session.html#setSchemaMetadataEnabled-java.lang.Boolean- +[Session#checkSchemaAgreementAsync]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/Session.html#checkSchemaAgreementAsync-- +[SessionBuilder#addSchemaChangeListener]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addSchemaChangeListener-com.datastax.oss.driver.api.core.metadata.schema.SchemaChangeListener- +[ExecutionInfo#isSchemaInAgreement]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#isSchemaInAgreement-- +[com.datastax.dse.driver.api.core.metadata.schema]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/metadata/schema/package-frame.html +[DseFunctionMetadata]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/metadata/schema/DseFunctionMetadata.html +[DseAggregateMetadata]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/metadata/schema/DseAggregateMetadata.html [JAVA-750]: https://datastax-oss.atlassian.net/browse/JAVA-750 [java.util.regex.Pattern]: https://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html diff --git a/manual/core/metadata/token/README.md b/manual/core/metadata/token/README.md index db830c64bae..ea9f4a99832 100644 --- a/manual/core/metadata/token/README.md +++ b/manual/core/metadata/token/README.md @@ -169,5 +169,5 @@ on [schema metadata](../schema/). If schema metadata is disabled or filtered, to also be unavailable for the excluded keyspaces. -[Metadata#getTokenMap]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/metadata/Metadata.html#getTokenMap-- -[TokenMap]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/metadata/TokenMap.html \ No newline at end of file +[Metadata#getTokenMap]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/metadata/Metadata.html#getTokenMap-- +[TokenMap]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/metadata/TokenMap.html \ No newline at end of file diff --git a/manual/core/native_protocol/README.md b/manual/core/native_protocol/README.md index 4d43687f792..08d551765dc 100644 --- a/manual/core/native_protocol/README.md +++ b/manual/core/native_protocol/README.md @@ -135,6 +135,6 @@ If you want to see the details of mixed cluster negotiation, enable `DEBUG` leve [protocol spec]: https://github.com/datastax/native-protocol/tree/1.x/src/main/resources [driver3]: https://docs.datastax.com/en/developer/java-driver/3.10/manual/native_protocol/ -[ExecutionInfo.getWarnings]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getWarnings-- -[Request.getCustomPayload]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/Request.html#getCustomPayload-- -[AttachmentPoint.getProtocolVersion]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/detach/AttachmentPoint.html#getProtocolVersion-- +[ExecutionInfo.getWarnings]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getWarnings-- +[Request.getCustomPayload]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/Request.html#getCustomPayload-- +[AttachmentPoint.getProtocolVersion]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/detach/AttachmentPoint.html#getProtocolVersion-- diff --git a/manual/core/non_blocking/README.md b/manual/core/non_blocking/README.md index 61a6618f757..dbffcff1fdb 100644 --- a/manual/core/non_blocking/README.md +++ b/manual/core/non_blocking/README.md @@ -49,22 +49,22 @@ For example, calling any synchronous method declared in [`SyncCqlSession`], such will block until the result is available. These methods should never be used in non-blocking applications. -[`SyncCqlSession`]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/SyncCqlSession.html` -[`execute`]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/SyncCqlSession.html#execute-com.datastax.oss.driver.api.core.cql.Statement- +[`SyncCqlSession`]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/SyncCqlSession.html` +[`execute`]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/SyncCqlSession.html#execute-com.datastax.oss.driver.api.core.cql.Statement- However, the asynchronous methods declared in [`AsyncCqlSession`], such as [`executeAsync`], are all safe for use in non-blocking applications; the statement execution and asynchronous result delivery is guaranteed to never block. -[`AsyncCqlSession`]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/AsyncCqlSession.html -[`executeAsync`]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/AsyncCqlSession.html#executeAsync-com.datastax.oss.driver.api.core.cql.Statement- +[`AsyncCqlSession`]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/AsyncCqlSession.html +[`executeAsync`]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/AsyncCqlSession.html#executeAsync-com.datastax.oss.driver.api.core.cql.Statement- The same applies to the methods declared in [`ReactiveSession`] such as [`executeReactive`]: the returned publisher will never block when subscribed to, until the final results are delivered to the subscriber. -[`ReactiveSession`]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.html -[`executeReactive`]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.html#executeReactive-com.datastax.oss.driver.api.core.cql.Statement- +[`ReactiveSession`]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.html +[`executeReactive`]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.html#executeReactive-com.datastax.oss.driver.api.core.cql.Statement- There is one exception though: continuous paging queries (a feature specific to DSE) have a special execution model which uses internal locks for coordination. Although such locks are only held for @@ -77,10 +77,10 @@ reactive APIs like [`executeContinuouslyAsync`] and [`executeContinuouslyReactiv though, continuous paging is extremely efficient and can safely be used in most non-blocking contexts, unless they require strict lock-freedom. -[`ContinuousSession`]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/cql/continuous/ContinuousSession.html -[`ContinuousReactiveSession`]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousReactiveSession.html -[`executeContinuouslyAsync`]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/cql/continuous/ContinuousSession.html#executeContinuouslyAsync-com.datastax.oss.driver.api.core.cql.Statement- -[`executeContinuouslyReactive`]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousReactiveSession.html#executeContinuouslyReactive-com.datastax.oss.driver.api.core.cql.Statement- +[`ContinuousSession`]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/cql/continuous/ContinuousSession.html +[`ContinuousReactiveSession`]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousReactiveSession.html +[`executeContinuouslyAsync`]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/cql/continuous/ContinuousSession.html#executeContinuouslyAsync-com.datastax.oss.driver.api.core.cql.Statement- +[`executeContinuouslyReactive`]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousReactiveSession.html#executeContinuouslyReactive-com.datastax.oss.driver.api.core.cql.Statement- #### Driver lock-free guarantees per session lifecycle phases @@ -110,8 +110,8 @@ Similarly, a call to [`SessionBuilder.build()`] should be considered blocking as calling thread and wait until the method returns. For this reason, calls to `SessionBuilder.build()` should be avoided in non-blocking applications. -[`SessionBuilder.buildAsync()`]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/SessionBuilder.html#buildAsync-- -[`SessionBuilder.build()`]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/SessionBuilder.html#build-- +[`SessionBuilder.buildAsync()`]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/SessionBuilder.html#buildAsync-- +[`SessionBuilder.build()`]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/SessionBuilder.html#build-- Once the session is initialized, however, the driver is guaranteed to be non-blocking during the session's lifecycle, and under normal operation, unless otherwise noted elsewhere in this document. @@ -121,8 +121,8 @@ during that phase. Therefore, calls to any method declared in [`AsyncAutoCloseab asynchronous ones like [`closeAsync()`], should also be preferably deferred until the application is shut down and lock-freedom enforcement is disabled. -[`AsyncAutoCloseable`]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/AsyncAutoCloseable.html -[`closeAsync()`]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/AsyncAutoCloseable.html#closeAsync-- +[`AsyncAutoCloseable`]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/AsyncAutoCloseable.html +[`closeAsync()`]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/AsyncAutoCloseable.html#closeAsync-- #### Driver lock-free guarantees for specific components @@ -131,7 +131,7 @@ Certain driver components are not implemented in lock-free algorithms. For example, [`SafeInitNodeStateListener`] is implemented with internal locks for coordination. It should not be used if strict lock-freedom is enforced. -[`SafeInitNodeStateListener`]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/metadata/SafeInitNodeStateListener.html +[`SafeInitNodeStateListener`]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/metadata/SafeInitNodeStateListener.html The same is valid for both built-in [request throttlers]: @@ -143,7 +143,7 @@ use locks internally, and depending on how many requests are being executed in p contention on these locks can be high: in short, if your application enforces strict lock-freedom, then these components should not be used. -[request throttlers]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/throttling/RequestThrottler.html +[request throttlers]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/throttling/RequestThrottler.html Other components may be lock-free, *except* for their first invocation. This is the case of the following items: @@ -151,8 +151,8 @@ following items: * All built-in implementations of [`TimestampGenerator`], upon instantiation; * The utility method [`Uuids.timeBased()`]. -[`TimestampGenerator`]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/time/TimestampGenerator.html -[`Uuids.timeBased()`]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/uuid/Uuids.html#timeBased-- +[`TimestampGenerator`]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/time/TimestampGenerator.html +[`Uuids.timeBased()`]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/uuid/Uuids.html#timeBased-- Both components need to access native libraries when they get initialized and this may involve hitting the local filesystem, thus causing the initialization to become a blocking call. @@ -172,7 +172,7 @@ One component, the codec registry, can block when its [`register`] method is cal therefore advised that codecs should be registered during application startup exclusively. See the [custom codecs](../custom_codecs) section for more details about registering codecs. -[`register`]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/registry/MutableCodecRegistry.html#register-com.datastax.oss.driver.api.core.type.codec.TypeCodec- +[`register`]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/registry/MutableCodecRegistry.html#register-com.datastax.oss.driver.api.core.type.codec.TypeCodec- Finally, a few internal components also use locks, but only during session initialization; once the session is ready, they are either discarded, or don't use locks anymore for the rest of the @@ -213,7 +213,7 @@ lock-freedom enforcement tools could report calls to that method, but it was imp these calls. Thanks to [JAVA-2449], released with driver 4.10.0, `Uuids.random()` became a non-blocking call and random UUIDs can now be safely generated in non-blocking applications. -[`Uuids.random()`]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/uuid/Uuids.html#random-- +[`Uuids.random()`]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/uuid/Uuids.html#random-- [JAVA-2449]: https://datastax-oss.atlassian.net/browse/JAVA-2449 #### Driver lock-free guarantees when reloading the configuration @@ -228,8 +228,8 @@ detectors. If that is the case, it is advised to disable hot-reloading by settin `datastax-java-driver.basic.config-reload-interval` option to 0. See the manual page on [configuration](../configuration) for more information. -[`DriverConfigLoader`]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html -[hot-reloading]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#supportsReloading-- +[`DriverConfigLoader`]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html +[hot-reloading]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#supportsReloading-- #### Driver lock-free guarantees when connecting to DSE diff --git a/manual/core/paging/README.md b/manual/core/paging/README.md index 7c33aa5b43a..9f753a77181 100644 --- a/manual/core/paging/README.md +++ b/manual/core/paging/README.md @@ -253,12 +253,12 @@ protocol page size and the logical page size to the same value. The [driver examples] include two complete web service implementations demonstrating forward-only and offset paging. -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/ResultSet.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html -[AsyncPagingIterable.hasMorePages]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/AsyncPagingIterable.html#hasMorePages-- -[AsyncPagingIterable.fetchNextPage]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/AsyncPagingIterable.html#fetchNextPage-- -[OffsetPager]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/paging/OffsetPager.html -[PagingState]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/PagingState.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/ResultSet.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[AsyncPagingIterable.hasMorePages]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/AsyncPagingIterable.html#hasMorePages-- +[AsyncPagingIterable.fetchNextPage]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/AsyncPagingIterable.html#fetchNextPage-- +[OffsetPager]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/paging/OffsetPager.html +[PagingState]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/PagingState.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html diff --git a/manual/core/performance/README.md b/manual/core/performance/README.md index 46bd567aa0f..a26067dbed5 100644 --- a/manual/core/performance/README.md +++ b/manual/core/performance/README.md @@ -345,8 +345,8 @@ possible to reuse the same event loop group for I/O, admin tasks, and even your (the driver's internal code is fully asynchronous so it will never block any thread). The timer is the only one that will have to stay on a separate thread. -[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/data/AccessibleByName.html -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/CqlIdentifier.html -[CqlSession.prepare(SimpleStatement)]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/CqlSession.html#prepare-com.datastax.oss.driver.api.core.cql.SimpleStatement- -[GenericType]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/reflect/GenericType.html -[Statement.setNode()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/Statement.html#setNode-com.datastax.oss.driver.api.core.metadata.Node- \ No newline at end of file +[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/data/AccessibleByName.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/CqlIdentifier.html +[CqlSession.prepare(SimpleStatement)]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/CqlSession.html#prepare-com.datastax.oss.driver.api.core.cql.SimpleStatement- +[GenericType]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/reflect/GenericType.html +[Statement.setNode()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/Statement.html#setNode-com.datastax.oss.driver.api.core.metadata.Node- \ No newline at end of file diff --git a/manual/core/pooling/README.md b/manual/core/pooling/README.md index c57808c692a..84dee0ec80e 100644 --- a/manual/core/pooling/README.md +++ b/manual/core/pooling/README.md @@ -170,5 +170,5 @@ you experience the issue, here's what to look out for: Try adding more connections per node. Thanks to the driver's hot-reload mechanism, you can do that at runtime and see the effects immediately. -[CqlSession]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/CqlSession.html +[CqlSession]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/CqlSession.html [CASSANDRA-8086]: https://issues.apache.org/jira/browse/CASSANDRA-8086 \ No newline at end of file diff --git a/manual/core/query_timestamps/README.md b/manual/core/query_timestamps/README.md index 6851fdd7e57..bde4b0722e9 100644 --- a/manual/core/query_timestamps/README.md +++ b/manual/core/query_timestamps/README.md @@ -187,9 +187,9 @@ Here is the order of precedence of all the methods described so far: 3. otherwise, if the timestamp generator assigned a timestamp, use it; 4. otherwise, let the server assign the timestamp. -[TimestampGenerator]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/time/TimestampGenerator.html +[TimestampGenerator]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/time/TimestampGenerator.html [gettimeofday]: http://man7.org/linux/man-pages/man2/settimeofday.2.html [JNR]: https://github.com/jnr/jnr-posix [Lightweight transactions]: https://docs.datastax.com/en/dse/6.0/cql/cql/cql_using/useInsertLWT.html -[Statement.setQueryTimestamp()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/Statement.html#setQueryTimestamp-long- +[Statement.setQueryTimestamp()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/Statement.html#setQueryTimestamp-long- diff --git a/manual/core/reactive/README.md b/manual/core/reactive/README.md index d70a9bd4094..bd32969f3c0 100644 --- a/manual/core/reactive/README.md +++ b/manual/core/reactive/README.md @@ -367,18 +367,18 @@ Note that the driver already has a [built-in retry mechanism] that can transpare queries; the above example should be seen as a demonstration of application-level retries, when a more fine-grained control of what should be retried, and how, is required. -[CqlSession]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/CqlSession.html -[ReactiveSession]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/ResultSet.html -[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html -[ReactiveRow]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html -[Row]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/Row.html -[getColumnDefinitions]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#getColumnDefinitions-- -[getExecutionInfos]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#getExecutionInfos-- -[wasApplied]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#wasApplied-- -[ReactiveRow.getColumnDefinitions]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#getColumnDefinitions-- -[ReactiveRow.getExecutionInfo]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#getExecutionInfo-- -[ReactiveRow.wasApplied]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#wasApplied-- +[CqlSession]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/CqlSession.html +[ReactiveSession]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/ResultSet.html +[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html +[ReactiveRow]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html +[Row]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/Row.html +[getColumnDefinitions]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#getColumnDefinitions-- +[getExecutionInfos]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#getExecutionInfos-- +[wasApplied]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#wasApplied-- +[ReactiveRow.getColumnDefinitions]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#getColumnDefinitions-- +[ReactiveRow.getExecutionInfo]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#getExecutionInfo-- +[ReactiveRow.wasApplied]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#wasApplied-- [built-in retry mechanism]: ../retries/ [request throttling]: ../throttling/ diff --git a/manual/core/reconnection/README.md b/manual/core/reconnection/README.md index 3ec3c8ef3cc..b5b6b3e10b4 100644 --- a/manual/core/reconnection/README.md +++ b/manual/core/reconnection/README.md @@ -84,7 +84,7 @@ Note that the session is not accessible until it is fully ready: the `CqlSession call — or the future returned by `buildAsync()` — will not complete until the connection was established. -[ConstantReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/internal/core/connection/ConstantReconnectionPolicy.html -[DriverContext]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/context/DriverContext.html -[ExponentialReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/internal/core/connection/ExponentialReconnectionPolicy.html -[ReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/connection/ReconnectionPolicy.html \ No newline at end of file +[ConstantReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/internal/core/connection/ConstantReconnectionPolicy.html +[DriverContext]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/context/DriverContext.html +[ExponentialReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/internal/core/connection/ExponentialReconnectionPolicy.html +[ReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/connection/ReconnectionPolicy.html \ No newline at end of file diff --git a/manual/core/request_tracker/README.md b/manual/core/request_tracker/README.md index e1c9c1d6345..af66851c748 100644 --- a/manual/core/request_tracker/README.md +++ b/manual/core/request_tracker/README.md @@ -123,5 +123,5 @@ all FROM users WHERE user_id=? [v0=42] com.datastax.oss.driver.api.core.servererrors.InvalidQueryException: Undefined column name all ``` -[RequestTracker]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/tracker/RequestTracker.html -[SessionBuilder.addRequestTracker]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addRequestTracker-com.datastax.oss.driver.api.core.tracker.RequestTracker- \ No newline at end of file +[RequestTracker]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/tracker/RequestTracker.html +[SessionBuilder.addRequestTracker]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addRequestTracker-com.datastax.oss.driver.api.core.tracker.RequestTracker- \ No newline at end of file diff --git a/manual/core/retries/README.md b/manual/core/retries/README.md index eb24ead3ad1..ec07dc2cea2 100644 --- a/manual/core/retries/README.md +++ b/manual/core/retries/README.md @@ -231,21 +231,21 @@ configuration). Each request uses its declared profile's policy. If it doesn't declare any profile, or if the profile doesn't have a dedicated policy, then the default profile's policy is used. -[AllNodesFailedException]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/AllNodesFailedException.html -[ClosedConnectionException]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/connection/ClosedConnectionException.html -[DriverTimeoutException]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/DriverTimeoutException.html -[FunctionFailureException]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/servererrors/FunctionFailureException.html -[HeartbeatException]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/connection/HeartbeatException.html -[ProtocolError]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/servererrors/ProtocolError.html -[OverloadedException]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/servererrors/OverloadedException.html -[QueryValidationException]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/servererrors/QueryValidationException.html -[ReadFailureException]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/servererrors/ReadFailureException.html -[ReadTimeoutException]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/servererrors/ReadTimeoutException.html -[RetryDecision]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/retry/RetryDecision.html -[RetryPolicy]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/retry/RetryPolicy.html -[RetryVerdict]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/retry/RetryVerdict.html -[ServerError]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/servererrors/ServerError.html -[TruncateException]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/servererrors/TruncateException.html -[UnavailableException]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/servererrors/UnavailableException.html -[WriteFailureException]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/servererrors/WriteFailureException.html -[WriteTimeoutException]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/servererrors/WriteTimeoutException.html +[AllNodesFailedException]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/AllNodesFailedException.html +[ClosedConnectionException]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/connection/ClosedConnectionException.html +[DriverTimeoutException]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/DriverTimeoutException.html +[FunctionFailureException]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/servererrors/FunctionFailureException.html +[HeartbeatException]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/connection/HeartbeatException.html +[ProtocolError]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/servererrors/ProtocolError.html +[OverloadedException]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/servererrors/OverloadedException.html +[QueryValidationException]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/servererrors/QueryValidationException.html +[ReadFailureException]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/servererrors/ReadFailureException.html +[ReadTimeoutException]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/servererrors/ReadTimeoutException.html +[RetryDecision]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/retry/RetryDecision.html +[RetryPolicy]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/retry/RetryPolicy.html +[RetryVerdict]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/retry/RetryVerdict.html +[ServerError]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/servererrors/ServerError.html +[TruncateException]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/servererrors/TruncateException.html +[UnavailableException]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/servererrors/UnavailableException.html +[WriteFailureException]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/servererrors/WriteFailureException.html +[WriteTimeoutException]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/servererrors/WriteTimeoutException.html diff --git a/manual/core/speculative_execution/README.md b/manual/core/speculative_execution/README.md index 6e0e86f8606..fe044e93df7 100644 --- a/manual/core/speculative_execution/README.md +++ b/manual/core/speculative_execution/README.md @@ -250,4 +250,4 @@ profiles have the same configuration). Each request uses its declared profile's policy. If it doesn't declare any profile, or if the profile doesn't have a dedicated policy, then the default profile's policy is used. -[SpeculativeExecutionPolicy]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/specex/SpeculativeExecutionPolicy.html \ No newline at end of file +[SpeculativeExecutionPolicy]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/specex/SpeculativeExecutionPolicy.html \ No newline at end of file diff --git a/manual/core/ssl/README.md b/manual/core/ssl/README.md index d21bf0e7839..91bf0fc1878 100644 --- a/manual/core/ssl/README.md +++ b/manual/core/ssl/README.md @@ -204,6 +204,6 @@ the box, but with a bit of custom development it is fairly easy to add. See [dsClientToNode]: https://docs.datastax.com/en/cassandra/3.0/cassandra/configuration/secureSSLClientToNode.html [pickle]: http://thelastpickle.com/blog/2015/09/30/hardening-cassandra-step-by-step-part-1-server-to-server.html [JSSE system properties]: http://docs.oracle.com/javase/6/docs/technotes/guides/security/jsse/JSSERefGuide.html#Customization -[SessionBuilder.withSslEngineFactory]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSslEngineFactory-com.datastax.oss.driver.api.core.ssl.SslEngineFactory- -[SessionBuilder.withSslContext]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSslContext-javax.net.ssl.SSLContext- -[ProgrammaticSslEngineFactory]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/ssl/ProgrammaticSslEngineFactory.html +[SessionBuilder.withSslEngineFactory]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSslEngineFactory-com.datastax.oss.driver.api.core.ssl.SslEngineFactory- +[SessionBuilder.withSslContext]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSslContext-javax.net.ssl.SSLContext- +[ProgrammaticSslEngineFactory]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/ssl/ProgrammaticSslEngineFactory.html diff --git a/manual/core/statements/README.md b/manual/core/statements/README.md index b8a0621bc1d..156933f6649 100644 --- a/manual/core/statements/README.md +++ b/manual/core/statements/README.md @@ -59,7 +59,7 @@ the [configuration](../configuration/). Namely, these are: idempotent flag, quer consistency levels and page size. We recommended the configuration approach whenever possible (you can create execution profiles to capture common combinations of those options). -[Statement]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/Statement.html -[StatementBuilder]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/StatementBuilder.html -[execute]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/Session.html#execute-com.datastax.oss.driver.api.core.cql.Statement- -[executeAsync]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/Session.html#executeAsync-com.datastax.oss.driver.api.core.cql.Statement- +[Statement]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/Statement.html +[StatementBuilder]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/StatementBuilder.html +[execute]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/Session.html#execute-com.datastax.oss.driver.api.core.cql.Statement- +[executeAsync]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/Session.html#executeAsync-com.datastax.oss.driver.api.core.cql.Statement- diff --git a/manual/core/statements/batch/README.md b/manual/core/statements/batch/README.md index 1cb479ce2bc..917482cae3e 100644 --- a/manual/core/statements/batch/README.md +++ b/manual/core/statements/batch/README.md @@ -61,8 +61,8 @@ In addition, simple statements with named parameters are currently not supported due to a [protocol limitation][CASSANDRA-10246] that will be fixed in a future version). If you try to execute such a batch, an `IllegalArgumentException` is thrown. -[BatchStatement]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/BatchStatement.html -[BatchStatement.newInstance()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/BatchStatement.html#newInstance-com.datastax.oss.driver.api.core.cql.BatchType- -[BatchStatement.builder()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/BatchStatement.html#builder-com.datastax.oss.driver.api.core.cql.BatchType- +[BatchStatement]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/BatchStatement.html +[BatchStatement.newInstance()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/BatchStatement.html#newInstance-com.datastax.oss.driver.api.core.cql.BatchType- +[BatchStatement.builder()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/BatchStatement.html#builder-com.datastax.oss.driver.api.core.cql.BatchType- [batch_dse]: http://docs.datastax.com/en/dse/6.7/cql/cql/cql_using/useBatch.html [CASSANDRA-10246]: https://issues.apache.org/jira/browse/CASSANDRA-10246 diff --git a/manual/core/statements/per_query_keyspace/README.md b/manual/core/statements/per_query_keyspace/README.md index cd5295805f4..03c2584e2c9 100644 --- a/manual/core/statements/per_query_keyspace/README.md +++ b/manual/core/statements/per_query_keyspace/README.md @@ -124,6 +124,6 @@ SimpleStatement statement = At some point in the future, when Cassandra 4 becomes prevalent and using a per-query keyspace is the norm, we'll probably deprecate `setRoutingKeyspace()`. -[token-aware routing]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- +[token-aware routing]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- [CASSANDRA-10145]: https://issues.apache.org/jira/browse/CASSANDRA-10145 \ No newline at end of file diff --git a/manual/core/statements/prepared/README.md b/manual/core/statements/prepared/README.md index e7e7d4007d4..b795e5f138f 100644 --- a/manual/core/statements/prepared/README.md +++ b/manual/core/statements/prepared/README.md @@ -330,10 +330,10 @@ With Cassandra 4 and [native protocol](../../native_protocol/) v5, this issue is new version with the response; the driver updates its local cache transparently, and the client can observe the new columns in the result set. -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[Session.prepare]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/CqlSession.html#prepare-com.datastax.oss.driver.api.core.cql.SimpleStatement- +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[Session.prepare]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/CqlSession.html#prepare-com.datastax.oss.driver.api.core.cql.SimpleStatement- [CASSANDRA-10786]: https://issues.apache.org/jira/browse/CASSANDRA-10786 [CASSANDRA-10813]: https://issues.apache.org/jira/browse/CASSANDRA-10813 [guava eviction]: https://github.com/google/guava/wiki/CachesExplained#reference-based-eviction -[PreparedStatement.bind]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/PreparedStatement.html#bind-java.lang.Object...- -[PreparedStatement.boundStatementBuilder]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/PreparedStatement.html#boundStatementBuilder-java.lang.Object...- +[PreparedStatement.bind]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/PreparedStatement.html#bind-java.lang.Object...- +[PreparedStatement.boundStatementBuilder]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/PreparedStatement.html#boundStatementBuilder-java.lang.Object...- diff --git a/manual/core/statements/simple/README.md b/manual/core/statements/simple/README.md index a07fa66126b..3b87d7521f3 100644 --- a/manual/core/statements/simple/README.md +++ b/manual/core/statements/simple/README.md @@ -182,6 +182,6 @@ session.execute( Or you could also use [prepared statements](../prepared/), which don't have this limitation since parameter types are known in advance. -[SimpleStatement]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/SimpleStatement.html -[SimpleStatement.newInstance()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/SimpleStatement.html#newInstance-java.lang.String- -[SimpleStatement.builder()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/SimpleStatement.html#builder-java.lang.String- +[SimpleStatement]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/SimpleStatement.html +[SimpleStatement.newInstance()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/SimpleStatement.html#newInstance-java.lang.String- +[SimpleStatement.builder()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/SimpleStatement.html#builder-java.lang.String- diff --git a/manual/core/temporal_types/README.md b/manual/core/temporal_types/README.md index cf5273b4e0f..5b35bfd1750 100644 --- a/manual/core/temporal_types/README.md +++ b/manual/core/temporal_types/README.md @@ -146,7 +146,7 @@ System.out.println(dateTime.minus(CqlDuration.from("1h15s15ns"))); // prints "2018-10-03T22:59:44.999999985-07:00[America/Los_Angeles]" ``` -[CqlDuration]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/data/CqlDuration.html -[TypeCodecs.ZONED_TIMESTAMP_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#ZONED_TIMESTAMP_SYSTEM -[TypeCodecs.ZONED_TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#ZONED_TIMESTAMP_UTC -[TypeCodecs.zonedTimestampAt()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#zonedTimestampAt-java.time.ZoneId- \ No newline at end of file +[CqlDuration]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/data/CqlDuration.html +[TypeCodecs.ZONED_TIMESTAMP_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#ZONED_TIMESTAMP_SYSTEM +[TypeCodecs.ZONED_TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#ZONED_TIMESTAMP_UTC +[TypeCodecs.zonedTimestampAt()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#zonedTimestampAt-java.time.ZoneId- \ No newline at end of file diff --git a/manual/core/throttling/README.md b/manual/core/throttling/README.md index eb69b160a99..ced51bca1a5 100644 --- a/manual/core/throttling/README.md +++ b/manual/core/throttling/README.md @@ -145,6 +145,6 @@ datastax-java-driver { If you enable `throttling.delay`, make sure to also check the associated extra options to correctly size the underlying histograms (`metrics.session.throttling.delay.*`). -[RequestThrottlingException]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/RequestThrottlingException.html -[AllNodesFailedException]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/AllNodesFailedException.html -[BusyConnectionException]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/connection/BusyConnectionException.html \ No newline at end of file +[RequestThrottlingException]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/RequestThrottlingException.html +[AllNodesFailedException]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/AllNodesFailedException.html +[BusyConnectionException]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/connection/BusyConnectionException.html \ No newline at end of file diff --git a/manual/core/tracing/README.md b/manual/core/tracing/README.md index 7864ac147a1..f15dc0c69d9 100644 --- a/manual/core/tracing/README.md +++ b/manual/core/tracing/README.md @@ -113,9 +113,9 @@ for (TraceEvent event : trace.getEvents()) { If you call `getQueryTrace()` for a statement that didn't have tracing enabled, an exception is thrown. -[ExecutionInfo]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html -[QueryTrace]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/QueryTrace.html -[Statement.setTracing()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/Statement.html#setTracing-boolean- -[StatementBuilder.setTracing()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/StatementBuilder.html#setTracing-- -[ExecutionInfo.getTracingId()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getTracingId-- -[ExecutionInfo.getQueryTrace()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getQueryTrace-- +[ExecutionInfo]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html +[QueryTrace]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/QueryTrace.html +[Statement.setTracing()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/Statement.html#setTracing-boolean- +[StatementBuilder.setTracing()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/StatementBuilder.html#setTracing-- +[ExecutionInfo.getTracingId()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getTracingId-- +[ExecutionInfo.getQueryTrace()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getQueryTrace-- diff --git a/manual/core/tuples/README.md b/manual/core/tuples/README.md index cfecdf3f8f8..b6222f65439 100644 --- a/manual/core/tuples/README.md +++ b/manual/core/tuples/README.md @@ -139,5 +139,5 @@ BoundStatement bs = [cql_doc]: https://docs.datastax.com/en/cql/3.3/cql/cql_reference/tupleType.html -[TupleType]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/TupleType.html -[TupleValue]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/data/TupleValue.html +[TupleType]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/TupleType.html +[TupleValue]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/data/TupleValue.html diff --git a/manual/core/udts/README.md b/manual/core/udts/README.md index 90f7bff59ff..06c6006a903 100644 --- a/manual/core/udts/README.md +++ b/manual/core/udts/README.md @@ -135,5 +135,5 @@ session.execute(bs); [cql_doc]: https://docs.datastax.com/en/cql/3.3/cql/cql_reference/cqlRefUDType.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/data/UdtValue.html -[UserDefinedType]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/UserDefinedType.html \ No newline at end of file +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/data/UdtValue.html +[UserDefinedType]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/UserDefinedType.html \ No newline at end of file diff --git a/manual/developer/common/concurrency/README.md b/manual/developer/common/concurrency/README.md index c0697d439c8..981c1e7292b 100644 --- a/manual/developer/common/concurrency/README.md +++ b/manual/developer/common/concurrency/README.md @@ -101,8 +101,8 @@ public interface ExecutionInfo { When a public API method is blocking, this is generally clearly stated in its javadocs. -[`ExecutionInfo.getQueryTrace()`]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getQueryTrace-- -[`SyncCqlSession`]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/SyncCqlSession.html` +[`ExecutionInfo.getQueryTrace()`]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getQueryTrace-- +[`SyncCqlSession`]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/SyncCqlSession.html` `BlockingOperation` is a utility to check that those methods aren't called on I/O threads, which could introduce deadlocks. diff --git a/manual/mapper/config/kotlin/README.md b/manual/mapper/config/kotlin/README.md index e016cfec768..a583718a16e 100644 --- a/manual/mapper/config/kotlin/README.md +++ b/manual/mapper/config/kotlin/README.md @@ -106,4 +106,4 @@ before compilation: [build.gradle]: https://github.com/DataStax-Examples/object-mapper-jvm/blob/master/kotlin/build.gradle [UserDao.kt]: https://github.com/DataStax-Examples/object-mapper-jvm/blob/master/kotlin/src/main/kotlin/com/datastax/examples/mapper/killrvideo/user/UserDao.kt -[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html +[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html diff --git a/manual/mapper/config/record/README.md b/manual/mapper/config/record/README.md index 520eeb773b7..89363b45bf0 100644 --- a/manual/mapper/config/record/README.md +++ b/manual/mapper/config/record/README.md @@ -27,7 +27,7 @@ You need to build with Java 14, and pass the `--enable-preview` flag to both the runtime JVM. See [pom.xml] in the example. -[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html +[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html [DataStax-Examples/object-mapper-jvm/record]: https://github.com/DataStax-Examples/object-mapper-jvm/tree/master/record [pom.xml]: https://github.com/DataStax-Examples/object-mapper-jvm/blob/master/record/pom.xml diff --git a/manual/mapper/config/scala/README.md b/manual/mapper/config/scala/README.md index 6d2ed670fc8..6174c274fb7 100644 --- a/manual/mapper/config/scala/README.md +++ b/manual/mapper/config/scala/README.md @@ -54,4 +54,4 @@ mapper builder. [DataStax-Examples/object-mapper-jvm/scala]: https://github.com/DataStax-Examples/object-mapper-jvm/tree/master/scala [build.sbt]: https://github.com/DataStax-Examples/object-mapper-jvm/blob/master/scala/build.sbt -[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html +[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html diff --git a/manual/mapper/daos/README.md b/manual/mapper/daos/README.md index 9a15815acb0..6ee6dfc0aab 100644 --- a/manual/mapper/daos/README.md +++ b/manual/mapper/daos/README.md @@ -148,8 +148,8 @@ In this case, any annotations declared in `Dao1` would be chosen over `Dao2`. To control how the hierarchy is scanned, annotate interfaces with [@HierarchyScanStrategy]. -[@Dao]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/Dao.html -[@DaoFactory]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.html -[@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html -[@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html +[@Dao]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/Dao.html +[@DaoFactory]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.html +[@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html +[@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html [Entity Inheritance]: ../entities/#inheritance diff --git a/manual/mapper/daos/custom_types/README.md b/manual/mapper/daos/custom_types/README.md index 7d39e8b7038..179f61a0b22 100644 --- a/manual/mapper/daos/custom_types/README.md +++ b/manual/mapper/daos/custom_types/README.md @@ -236,8 +236,8 @@ flag: With this configuration, if a DAO method declares a non built-in return type, it will be surfaced as a compiler error. -[EntityHelper]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/entity/EntityHelper.html -[GenericType]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/reflect/GenericType.html -[MapperContext]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/MapperContext.html -[MapperResultProducer]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/result/MapperResultProducer.html -[MapperResultProducerService]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/result/MapperResultProducerService.html +[EntityHelper]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/entity/EntityHelper.html +[GenericType]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/reflect/GenericType.html +[MapperContext]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/MapperContext.html +[MapperResultProducer]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/result/MapperResultProducer.html +[MapperResultProducerService]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/result/MapperResultProducerService.html diff --git a/manual/mapper/daos/delete/README.md b/manual/mapper/daos/delete/README.md index 290180038e1..c067d049106 100644 --- a/manual/mapper/daos/delete/README.md +++ b/manual/mapper/daos/delete/README.md @@ -151,15 +151,15 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the [naming strategy](../../entities/#naming-strategy)). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html -[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html -[@Delete]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/Delete.html -[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/ResultSet.html -[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html +[@Delete]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/Delete.html +[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/ResultSet.html +[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html diff --git a/manual/mapper/daos/getentity/README.md b/manual/mapper/daos/getentity/README.md index 63a82e96e14..9995e073ef8 100644 --- a/manual/mapper/daos/getentity/README.md +++ b/manual/mapper/daos/getentity/README.md @@ -130,15 +130,15 @@ If the return type doesn't match the parameter type (for example [PagingIterable [AsyncResultSet]), the mapper processor will issue a compile-time error. -[@GetEntity]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html -[GettableByName]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/data/GettableByName.html -[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/PagingIterable.html -[PagingIterable.spliterator]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/PagingIterable.html#spliterator-- -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/ResultSet.html -[Row]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/Row.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/data/UdtValue.html +[@GetEntity]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[GettableByName]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/data/GettableByName.html +[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/PagingIterable.html +[PagingIterable.spliterator]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/PagingIterable.html#spliterator-- +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/ResultSet.html +[Row]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/Row.html +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/data/UdtValue.html [Stream]: https://docs.oracle.com/javase/8/docs/api/java/util/stream/Stream.html diff --git a/manual/mapper/daos/increment/README.md b/manual/mapper/daos/increment/README.md index c8680e9b6ab..2445490b8a1 100644 --- a/manual/mapper/daos/increment/README.md +++ b/manual/mapper/daos/increment/README.md @@ -75,12 +75,12 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the naming convention). -[@Increment]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/Increment.html -[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html -[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html -[@CqlName]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/CqlName.html +[@Increment]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/Increment.html +[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html +[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html +[@CqlName]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/CqlName.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html diff --git a/manual/mapper/daos/insert/README.md b/manual/mapper/daos/insert/README.md index 79337a093a9..624cf20f311 100644 --- a/manual/mapper/daos/insert/README.md +++ b/manual/mapper/daos/insert/README.md @@ -108,13 +108,13 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the [naming strategy](../../entities/#naming-strategy)). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@Insert]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/Insert.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/ResultSet.html -[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- -[ResultSet#getExecutionInfo()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/ResultSet.html#getExecutionInfo-- -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@Insert]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/Insert.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/ResultSet.html +[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- +[ResultSet#getExecutionInfo()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/ResultSet.html#getExecutionInfo-- +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html diff --git a/manual/mapper/daos/null_saving/README.md b/manual/mapper/daos/null_saving/README.md index 80be5c17652..e94fed266c1 100644 --- a/manual/mapper/daos/null_saving/README.md +++ b/manual/mapper/daos/null_saving/README.md @@ -93,10 +93,10 @@ public interface UserDao extends InventoryDao { } ``` -[@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[MapperException]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/MapperException.html -[DO_NOT_SET]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#DO_NOT_SET -[SET_TO_NULL]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#SET_TO_NULL +[@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[MapperException]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/MapperException.html +[DO_NOT_SET]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#DO_NOT_SET +[SET_TO_NULL]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#SET_TO_NULL [CASSANDRA-7304]: https://issues.apache.org/jira/browse/CASSANDRA-7304 diff --git a/manual/mapper/daos/query/README.md b/manual/mapper/daos/query/README.md index bde13b37a41..1ccc0f8e7b7 100644 --- a/manual/mapper/daos/query/README.md +++ b/manual/mapper/daos/query/README.md @@ -113,18 +113,18 @@ Then: query succeeds or not depends on whether the session that the mapper was built with has a [default keyspace]. -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@Query]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/Query.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/ResultSet.html -[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- -[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/PagingIterable.html -[PagingIterable.spliterator]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/PagingIterable.html#spliterator-- -[Row]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/Row.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html -[MappedReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/mapper/reactive/MappedReactiveResultSet.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@Query]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/Query.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/ResultSet.html +[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- +[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/PagingIterable.html +[PagingIterable.spliterator]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/PagingIterable.html#spliterator-- +[Row]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/Row.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html +[MappedReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/mapper/reactive/MappedReactiveResultSet.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html diff --git a/manual/mapper/daos/queryprovider/README.md b/manual/mapper/daos/queryprovider/README.md index 61e2212079e..acf9de0fea4 100644 --- a/manual/mapper/daos/queryprovider/README.md +++ b/manual/mapper/daos/queryprovider/README.md @@ -137,11 +137,11 @@ Here is the full implementation: the desired [PagingIterable][PagingIterable]. -[@QueryProvider]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html -[providerClass]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerClass-- -[entityHelpers]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#entityHelpers-- -[providerMethod]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerMethod-- -[MapperContext]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/MapperContext.html -[EntityHelper]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/EntityHelper.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/ResultSet.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/PagingIterable.html +[@QueryProvider]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html +[providerClass]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerClass-- +[entityHelpers]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#entityHelpers-- +[providerMethod]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerMethod-- +[MapperContext]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/MapperContext.html +[EntityHelper]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/EntityHelper.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/ResultSet.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/PagingIterable.html diff --git a/manual/mapper/daos/select/README.md b/manual/mapper/daos/select/README.md index f5a3e8d33f4..f42b41e4632 100644 --- a/manual/mapper/daos/select/README.md +++ b/manual/mapper/daos/select/README.md @@ -160,20 +160,20 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the [naming strategy](../../entities/#naming-strategy)). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html -[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html -[@Select]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/Select.html -[allowFiltering()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/Select.html#allowFiltering-- -[customWhereClause()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/Select.html#customWhereClause-- -[groupBy()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/Select.html#groupBy-- -[limit()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/Select.html#limit-- -[orderBy()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/Select.html#orderBy-- -[perPartitionLimit()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/Select.html#perPartitionLimit-- -[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/PagingIterable.html -[PagingIterable.spliterator]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/PagingIterable.html#spliterator-- -[MappedReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/mapper/reactive/MappedReactiveResultSet.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html +[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html +[@Select]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/Select.html +[allowFiltering()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/Select.html#allowFiltering-- +[customWhereClause()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/Select.html#customWhereClause-- +[groupBy()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/Select.html#groupBy-- +[limit()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/Select.html#limit-- +[orderBy()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/Select.html#orderBy-- +[perPartitionLimit()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/Select.html#perPartitionLimit-- +[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/PagingIterable.html +[PagingIterable.spliterator]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/PagingIterable.html#spliterator-- +[MappedReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/mapper/reactive/MappedReactiveResultSet.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html diff --git a/manual/mapper/daos/setentity/README.md b/manual/mapper/daos/setentity/README.md index 5fc441b9bf8..0929daaf847 100644 --- a/manual/mapper/daos/setentity/README.md +++ b/manual/mapper/daos/setentity/README.md @@ -112,8 +112,8 @@ BoundStatement bind(Product product, BoundStatement statement); If you use a void method with [BoundStatement], the mapper processor will issue a compile-time warning. -[@SetEntity]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/SetEntity.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[BoundStatementBuilder]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/BoundStatementBuilder.html -[SettableByName]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/data/SettableByName.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/data/UdtValue.html +[@SetEntity]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/SetEntity.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[BoundStatementBuilder]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/BoundStatementBuilder.html +[SettableByName]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/data/SettableByName.html +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/data/UdtValue.html diff --git a/manual/mapper/daos/statement_attributes/README.md b/manual/mapper/daos/statement_attributes/README.md index 4ca17cfbb96..57d73956e64 100644 --- a/manual/mapper/daos/statement_attributes/README.md +++ b/manual/mapper/daos/statement_attributes/README.md @@ -60,4 +60,4 @@ Product product = dao.findById(1, builder -> builder.setConsistencyLevel(DefaultConsistencyLevel.QUORUM)); ``` -[@StatementAttributes]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/StatementAttributes.html \ No newline at end of file +[@StatementAttributes]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/StatementAttributes.html \ No newline at end of file diff --git a/manual/mapper/daos/update/README.md b/manual/mapper/daos/update/README.md index 8ffa06a1b4c..8920e8f75f7 100644 --- a/manual/mapper/daos/update/README.md +++ b/manual/mapper/daos/update/README.md @@ -143,13 +143,13 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the naming convention). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@Update]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/Update.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@Update]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/Update.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html [Boolean]: https://docs.oracle.com/javase/8/docs/api/index.html?java/lang/Boolean.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/ResultSet.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/ResultSet.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html diff --git a/manual/mapper/entities/README.md b/manual/mapper/entities/README.md index 41f8a2a2a4f..2d584bed282 100644 --- a/manual/mapper/entities/README.md +++ b/manual/mapper/entities/README.md @@ -555,22 +555,22 @@ the same level. To control how the class hierarchy is scanned, annotate classes with [@HierarchyScanStrategy]. -[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html -[@CqlName]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/CqlName.html -[@Dao]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/Dao.html -[@Entity]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/Entity.html -[NameConverter]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/entity/naming/NameConverter.html -[NamingConvention]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/entity/naming/NamingConvention.html -[@NamingStrategy]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/NamingStrategy.html -[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html -[@Computed]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/Computed.html -[@Select]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/Select.html -[@Insert]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/Insert.html -[@Update]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/Update.html -[@GetEntity]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html -[@Query]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/Query.html +[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html +[@CqlName]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/CqlName.html +[@Dao]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/Dao.html +[@Entity]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/Entity.html +[NameConverter]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/entity/naming/NameConverter.html +[NamingConvention]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/entity/naming/NamingConvention.html +[@NamingStrategy]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/NamingStrategy.html +[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html +[@Computed]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/Computed.html +[@Select]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/Select.html +[@Insert]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/Insert.html +[@Update]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/Update.html +[@GetEntity]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html +[@Query]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/Query.html [aliases]: http://cassandra.apache.org/doc/latest/cql/dml.html?#aliases -[@Transient]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/Transient.html -[@TransientProperties]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/TransientProperties.html -[@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html -[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html +[@Transient]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/Transient.html +[@TransientProperties]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/TransientProperties.html +[@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html +[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html diff --git a/manual/mapper/mapper/README.md b/manual/mapper/mapper/README.md index 032381e39bf..3c723aacb73 100644 --- a/manual/mapper/mapper/README.md +++ b/manual/mapper/mapper/README.md @@ -230,8 +230,8 @@ InventoryMapper inventoryMapper = new InventoryMapperBuilder(session) You can also permanently disable validation of an individual entity by annotating it with `@SchemaHint(targetElement = NONE)`. -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/CqlIdentifier.html -[@DaoFactory]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.html -[@DaoKeyspace]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/DaoKeyspace.html -[@DaoTable]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/DaoTable.html -[@Mapper]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/mapper/annotations/Mapper.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/CqlIdentifier.html +[@DaoFactory]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.html +[@DaoKeyspace]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/DaoKeyspace.html +[@DaoTable]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/DaoTable.html +[@Mapper]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/Mapper.html diff --git a/manual/osgi/README.md b/manual/osgi/README.md index 670625960cb..d0ee00538ad 100644 --- a/manual/osgi/README.md +++ b/manual/osgi/README.md @@ -138,7 +138,7 @@ starting the driver: [driver configuration]: ../core/configuration [OSGi]:https://www.osgi.org [JNR]: https://github.com/jnr/jnr-posix -[withClassLoader()]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withClassLoader-java.lang.ClassLoader- +[withClassLoader()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withClassLoader-java.lang.ClassLoader- [JAVA-1127]:https://datastax-oss.atlassian.net/browse/JAVA-1127 -[DriverConfigLoader.fromDefaults(ClassLoader)]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromDefaults-java.lang.ClassLoader- -[DriverConfigLoader.programmaticBuilder(ClassLoader)]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#programmaticBuilder-java.lang.ClassLoader- +[DriverConfigLoader.fromDefaults(ClassLoader)]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromDefaults-java.lang.ClassLoader- +[DriverConfigLoader.programmaticBuilder(ClassLoader)]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#programmaticBuilder-java.lang.ClassLoader- diff --git a/manual/query_builder/README.md b/manual/query_builder/README.md index 6f2e6e2afd6..fe508fca68b 100644 --- a/manual/query_builder/README.md +++ b/manual/query_builder/README.md @@ -212,8 +212,8 @@ For a complete tour of the API, browse the child pages in this manual: * [Terms](term/) * [Idempotence](idempotence/) -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/CqlIdentifier.html -[DseQueryBuilder]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/querybuilder/DseQueryBuilder.html -[DseSchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/dse/driver/api/querybuilder/DseSchemaBuilder.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/CqlIdentifier.html +[DseQueryBuilder]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/querybuilder/DseQueryBuilder.html +[DseSchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/querybuilder/DseSchemaBuilder.html diff --git a/manual/query_builder/condition/README.md b/manual/query_builder/condition/README.md index 3d8682c32cf..1517b5a106a 100644 --- a/manual/query_builder/condition/README.md +++ b/manual/query_builder/condition/README.md @@ -132,4 +132,4 @@ It is mutually exclusive with column conditions: if you previously specified col the statement, they will be ignored; conversely, adding a column condition cancels a previous IF EXISTS clause. -[Condition]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/querybuilder/condition/Condition.html +[Condition]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/querybuilder/condition/Condition.html diff --git a/manual/query_builder/delete/README.md b/manual/query_builder/delete/README.md index 0fbcf27ea77..c39d97869cb 100644 --- a/manual/query_builder/delete/README.md +++ b/manual/query_builder/delete/README.md @@ -141,5 +141,5 @@ deleteFrom("user") Conditions are a common feature used by UPDATE and DELETE, so they have a [dedicated page](../condition) in this manual. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[Selector]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/querybuilder/select/Selector.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[Selector]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/querybuilder/select/Selector.html diff --git a/manual/query_builder/insert/README.md b/manual/query_builder/insert/README.md index 2d2c66fd526..74f87c5b204 100644 --- a/manual/query_builder/insert/README.md +++ b/manual/query_builder/insert/README.md @@ -114,4 +114,4 @@ is executed. This is distinctly different than setting the value to null. Passin this method will only remove the USING TTL clause from the query, which will not alter the TTL (if one is set) in Cassandra. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html \ No newline at end of file +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html \ No newline at end of file diff --git a/manual/query_builder/relation/README.md b/manual/query_builder/relation/README.md index 6e6cb03829e..9f0193825d5 100644 --- a/manual/query_builder/relation/README.md +++ b/manual/query_builder/relation/README.md @@ -201,5 +201,5 @@ This should be used with caution, as it's possible to generate invalid CQL that execution time; on the other hand, it can be used as a workaround to handle new CQL features that are not yet covered by the query builder. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[Relation]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/querybuilder/relation/Relation.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[Relation]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/querybuilder/relation/Relation.html diff --git a/manual/query_builder/schema/README.md b/manual/query_builder/schema/README.md index b285a68acef..ea012439f11 100644 --- a/manual/query_builder/schema/README.md +++ b/manual/query_builder/schema/README.md @@ -44,4 +44,4 @@ element type: * [function](function/) * [aggregate](aggregate/) -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/aggregate/README.md b/manual/query_builder/schema/aggregate/README.md index a2aea551f47..99d7b893b22 100644 --- a/manual/query_builder/schema/aggregate/README.md +++ b/manual/query_builder/schema/aggregate/README.md @@ -76,4 +76,4 @@ dropAggregate("average").ifExists(); // DROP AGGREGATE IF EXISTS average ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/function/README.md b/manual/query_builder/schema/function/README.md index f9dfc5d0af8..508ff737eac 100644 --- a/manual/query_builder/schema/function/README.md +++ b/manual/query_builder/schema/function/README.md @@ -92,4 +92,4 @@ dropFunction("log").ifExists(); // DROP FUNCTION IF EXISTS log ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/index/README.md b/manual/query_builder/schema/index/README.md index 8e1ed2d8125..ecbd9c4559c 100644 --- a/manual/query_builder/schema/index/README.md +++ b/manual/query_builder/schema/index/README.md @@ -99,4 +99,4 @@ dropIndex("my_idx").ifExists(); // DROP INDEX IF EXISTS my_idx ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/keyspace/README.md b/manual/query_builder/schema/keyspace/README.md index b772b9b0d6d..ccdbcea3d1f 100644 --- a/manual/query_builder/schema/keyspace/README.md +++ b/manual/query_builder/schema/keyspace/README.md @@ -83,6 +83,6 @@ dropKeyspace("cycling").ifExists(); // DROP KEYSPACE IF EXISTS cycling ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/materialized_view/README.md b/manual/query_builder/schema/materialized_view/README.md index 54354907a5e..89c0a687801 100644 --- a/manual/query_builder/schema/materialized_view/README.md +++ b/manual/query_builder/schema/materialized_view/README.md @@ -85,5 +85,5 @@ dropTable("cyclist_by_age").ifExists(); // DROP MATERIALIZED VIEW IF EXISTS cyclist_by_age ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html -[RelationStructure]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/querybuilder/schema/RelationStructure.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[RelationStructure]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/querybuilder/schema/RelationStructure.html diff --git a/manual/query_builder/schema/table/README.md b/manual/query_builder/schema/table/README.md index 12b2577e21c..da5ea6ff015 100644 --- a/manual/query_builder/schema/table/README.md +++ b/manual/query_builder/schema/table/README.md @@ -107,6 +107,6 @@ dropTable("cyclist_name").ifExists(); // DROP TABLE IF EXISTS cyclist_name ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html -[CreateTableWithOptions]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/querybuilder/schema/CreateTableWithOptions.html -[AlterTableWithOptions]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/querybuilder/schema/AlterTableWithOptions.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[CreateTableWithOptions]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/querybuilder/schema/CreateTableWithOptions.html +[AlterTableWithOptions]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/querybuilder/schema/AlterTableWithOptions.html diff --git a/manual/query_builder/schema/type/README.md b/manual/query_builder/schema/type/README.md index fe0791c970d..734bcf65d8e 100644 --- a/manual/query_builder/schema/type/README.md +++ b/manual/query_builder/schema/type/README.md @@ -88,4 +88,4 @@ dropTable("address").ifExists(); // DROP TYPE IF EXISTS address ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/select/README.md b/manual/query_builder/select/README.md index b0c13bc438f..1ca33a1c2f5 100644 --- a/manual/query_builder/select/README.md +++ b/manual/query_builder/select/README.md @@ -391,5 +391,5 @@ selectFrom("user").all().allowFiltering(); // SELECT * FROM user ALLOW FILTERING ``` -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[Selector]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/querybuilder/select/Selector.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[Selector]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/querybuilder/select/Selector.html diff --git a/manual/query_builder/term/README.md b/manual/query_builder/term/README.md index a0995a4b516..28169a31d9f 100644 --- a/manual/query_builder/term/README.md +++ b/manual/query_builder/term/README.md @@ -105,5 +105,5 @@ This should be used with caution, as it's possible to generate invalid CQL that execution time; on the other hand, it can be used as a workaround to handle new CQL features that are not yet covered by the query builder. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html \ No newline at end of file +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html \ No newline at end of file diff --git a/manual/query_builder/truncate/README.md b/manual/query_builder/truncate/README.md index 88ffcf47de8..3a6c7609b5d 100644 --- a/manual/query_builder/truncate/README.md +++ b/manual/query_builder/truncate/README.md @@ -17,4 +17,4 @@ Truncate truncate2 = truncate(CqlIdentifier.fromCql("mytable")); Note that, at this stage, the query is ready to build. After creating a TRUNCATE query it does not take any values. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html diff --git a/manual/query_builder/update/README.md b/manual/query_builder/update/README.md index 4771641e40b..0d6c8a40644 100644 --- a/manual/query_builder/update/README.md +++ b/manual/query_builder/update/README.md @@ -251,5 +251,5 @@ update("foo") Conditions are a common feature used by UPDATE and DELETE, so they have a [dedicated page](../condition) in this manual. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[Assignment]: https://docs.datastax.com/en/drivers/java/4.12/com/datastax/oss/driver/api/querybuilder/update/Assignment.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[Assignment]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/querybuilder/update/Assignment.html From ab58fae478442cc3b0bb9cf814e456c106355103 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 30 Jul 2021 16:51:34 +0200 Subject: [PATCH 765/979] Update upgrade guide for 4.13.0 release --- upgrade_guide/README.md | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/upgrade_guide/README.md b/upgrade_guide/README.md index c2fc9c13981..0be2af16a1e 100644 --- a/upgrade_guide/README.md +++ b/upgrade_guide/README.md @@ -2,6 +2,17 @@ ### 4.13.0 +#### Enhanced support for GraalVM native images + +[JAVA-2940](https://datastax-oss.atlassian.net/browse/JAVA-2940) introduced an enhanced support for +building GraalVM native images. + +If you were building a native image for your application, please verify your native image builder +configuration. Most of the extra configuration required until now is likely to not be necessary +anymore. + +Refer to this [manual page](../manual/core/graalvm) for details. + #### Registration of multiple listeners and trackers [JAVA-2951](https://datastax-oss.atlassian.net/browse/JAVA-2951) introduced the ability to register From abd011c795a99a9401b7c15380f371aaa541b94c Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 30 Jul 2021 16:55:46 +0200 Subject: [PATCH 766/979] [maven-release-plugin] prepare release 4.13.0 --- bom/pom.xml | 18 +++++++++--------- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- metrics/micrometer/pom.xml | 2 +- metrics/microprofile/pom.xml | 2 +- osgi-tests/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 14 files changed, 23 insertions(+), 23 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index e47c97e29e3..2f0b8dca529 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.13.0-SNAPSHOT + 4.13.0 java-driver-bom pom @@ -31,42 +31,42 @@ com.datastax.oss java-driver-core - 4.13.0-SNAPSHOT + 4.13.0 com.datastax.oss java-driver-core-shaded - 4.13.0-SNAPSHOT + 4.13.0 com.datastax.oss java-driver-mapper-processor - 4.13.0-SNAPSHOT + 4.13.0 com.datastax.oss java-driver-mapper-runtime - 4.13.0-SNAPSHOT + 4.13.0 com.datastax.oss java-driver-query-builder - 4.13.0-SNAPSHOT + 4.13.0 com.datastax.oss java-driver-test-infra - 4.13.0-SNAPSHOT + 4.13.0 com.datastax.oss java-driver-metrics-micrometer - 4.13.0-SNAPSHOT + 4.13.0 com.datastax.oss java-driver-metrics-microprofile - 4.13.0-SNAPSHOT + 4.13.0 com.datastax.oss diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index ad368ee9259..cfe61ce8327 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.13.0-SNAPSHOT + 4.13.0 java-driver-core-shaded DataStax Java driver for Apache Cassandra(R) - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index 34d1b7478b6..7be614c5be2 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.13.0-SNAPSHOT + 4.13.0 java-driver-core bundle diff --git a/distribution/pom.xml b/distribution/pom.xml index 2a579b0440a..45f971071ed 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.13.0-SNAPSHOT + 4.13.0 java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index b95827307ad..95ffacf5e38 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.13.0-SNAPSHOT + 4.13.0 java-driver-examples DataStax Java driver for Apache Cassandra(R) - examples. diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index c5b6cc77594..bf9cae4d6ec 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.13.0-SNAPSHOT + 4.13.0 java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index cc14f3d2e8b..064faf59375 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.13.0-SNAPSHOT + 4.13.0 java-driver-mapper-processor DataStax Java driver for Apache Cassandra(R) - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index 68d76446bf1..5038565cad2 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.13.0-SNAPSHOT + 4.13.0 java-driver-mapper-runtime bundle diff --git a/metrics/micrometer/pom.xml b/metrics/micrometer/pom.xml index 8c2755c7c62..02b556a4770 100644 --- a/metrics/micrometer/pom.xml +++ b/metrics/micrometer/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.13.0-SNAPSHOT + 4.13.0 ../../ java-driver-metrics-micrometer diff --git a/metrics/microprofile/pom.xml b/metrics/microprofile/pom.xml index 78c04909c55..30e8c909b4c 100644 --- a/metrics/microprofile/pom.xml +++ b/metrics/microprofile/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.13.0-SNAPSHOT + 4.13.0 ../../ java-driver-metrics-microprofile diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml index 12e8b9641dc..719bba02ef8 100644 --- a/osgi-tests/pom.xml +++ b/osgi-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.13.0-SNAPSHOT + 4.13.0 java-driver-osgi-tests jar diff --git a/pom.xml b/pom.xml index ac1097f76b3..18c7eb6d79a 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ 4.0.0 com.datastax.oss java-driver-parent - 4.13.0-SNAPSHOT + 4.13.0 pom DataStax Java driver for Apache Cassandra(R) A driver for Apache Cassandra(R) 2.1+ that works exclusively with the Cassandra Query Language version 3 (CQL3) and Cassandra's native protocol versions 3 and above. @@ -955,7 +955,7 @@ height="0" width="0" style="display:none;visibility:hidden"> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - HEAD + 4.13.0 diff --git a/query-builder/pom.xml b/query-builder/pom.xml index 3ae527917a1..c44abaeaec8 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.13.0-SNAPSHOT + 4.13.0 java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index d29046b4934..21cb1c93a4c 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.13.0-SNAPSHOT + 4.13.0 java-driver-test-infra bundle From 522df5ab020b285206f50231aac1511181d078bd Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 30 Jul 2021 16:55:57 +0200 Subject: [PATCH 767/979] [maven-release-plugin] prepare for next development iteration --- bom/pom.xml | 18 +++++++++--------- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- metrics/micrometer/pom.xml | 2 +- metrics/microprofile/pom.xml | 2 +- osgi-tests/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 14 files changed, 23 insertions(+), 23 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index 2f0b8dca529..a228074bf7a 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.13.0 + 4.13.1-SNAPSHOT java-driver-bom pom @@ -31,42 +31,42 @@ com.datastax.oss java-driver-core - 4.13.0 + 4.13.1-SNAPSHOT com.datastax.oss java-driver-core-shaded - 4.13.0 + 4.13.1-SNAPSHOT com.datastax.oss java-driver-mapper-processor - 4.13.0 + 4.13.1-SNAPSHOT com.datastax.oss java-driver-mapper-runtime - 4.13.0 + 4.13.1-SNAPSHOT com.datastax.oss java-driver-query-builder - 4.13.0 + 4.13.1-SNAPSHOT com.datastax.oss java-driver-test-infra - 4.13.0 + 4.13.1-SNAPSHOT com.datastax.oss java-driver-metrics-micrometer - 4.13.0 + 4.13.1-SNAPSHOT com.datastax.oss java-driver-metrics-microprofile - 4.13.0 + 4.13.1-SNAPSHOT com.datastax.oss diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index cfe61ce8327..b84dd855234 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.13.0 + 4.13.1-SNAPSHOT java-driver-core-shaded DataStax Java driver for Apache Cassandra(R) - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index 7be614c5be2..13dfaca11fa 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.13.0 + 4.13.1-SNAPSHOT java-driver-core bundle diff --git a/distribution/pom.xml b/distribution/pom.xml index 45f971071ed..0da803804e4 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.13.0 + 4.13.1-SNAPSHOT java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index 95ffacf5e38..130204c6918 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.13.0 + 4.13.1-SNAPSHOT java-driver-examples DataStax Java driver for Apache Cassandra(R) - examples. diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index bf9cae4d6ec..20fd13ea85d 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.13.0 + 4.13.1-SNAPSHOT java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 064faf59375..84b0e39d825 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.13.0 + 4.13.1-SNAPSHOT java-driver-mapper-processor DataStax Java driver for Apache Cassandra(R) - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index 5038565cad2..bf9be82718c 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.13.0 + 4.13.1-SNAPSHOT java-driver-mapper-runtime bundle diff --git a/metrics/micrometer/pom.xml b/metrics/micrometer/pom.xml index 02b556a4770..82b113dd1a8 100644 --- a/metrics/micrometer/pom.xml +++ b/metrics/micrometer/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.13.0 + 4.13.1-SNAPSHOT ../../ java-driver-metrics-micrometer diff --git a/metrics/microprofile/pom.xml b/metrics/microprofile/pom.xml index 30e8c909b4c..af31fdc5fc2 100644 --- a/metrics/microprofile/pom.xml +++ b/metrics/microprofile/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.13.0 + 4.13.1-SNAPSHOT ../../ java-driver-metrics-microprofile diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml index 719bba02ef8..7cf7adf20bc 100644 --- a/osgi-tests/pom.xml +++ b/osgi-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.13.0 + 4.13.1-SNAPSHOT java-driver-osgi-tests jar diff --git a/pom.xml b/pom.xml index 18c7eb6d79a..9de5ceb0e30 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ 4.0.0 com.datastax.oss java-driver-parent - 4.13.0 + 4.13.1-SNAPSHOT pom DataStax Java driver for Apache Cassandra(R) A driver for Apache Cassandra(R) 2.1+ that works exclusively with the Cassandra Query Language version 3 (CQL3) and Cassandra's native protocol versions 3 and above. @@ -955,7 +955,7 @@ height="0" width="0" style="display:none;visibility:hidden"> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - 4.13.0 + HEAD diff --git a/query-builder/pom.xml b/query-builder/pom.xml index c44abaeaec8..069af2819e9 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.13.0 + 4.13.1-SNAPSHOT java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index 21cb1c93a4c..6578516f10e 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.13.0 + 4.13.1-SNAPSHOT java-driver-test-infra bundle From 2e1250635f0713d523f14bb50e8cd6f21bebc9fc Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 10 Aug 2021 11:57:56 +0200 Subject: [PATCH 768/979] Minor fixes to doc pages --- manual/core/graalvm/README.md | 10 +++++----- manual/core/non_blocking/README.md | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/manual/core/graalvm/README.md b/manual/core/graalvm/README.md index 04983f46ecc..6ee713a2b30 100644 --- a/manual/core/graalvm/README.md +++ b/manual/core/graalvm/README.md @@ -1,9 +1,9 @@ -## Using the driver in GraalVM native images +## GraalVM native images ### Quick overview -* [GraalVM Native images](https://www.graalvm.org/reference-manual/native-image/) using the driver - can be built with no additional configuration starting with driver 4.13.0. +* [GraalVM native images](https://www.graalvm.org/reference-manual/native-image/) can be built with + no additional configuration starting with driver 4.13.0. * But extra configurations are required in a few cases: * When using [reactive programming](../reactive); * When using [Jackson](../integration#Jackson); @@ -132,7 +132,7 @@ following configurations must be added: ### Using the Jackson JSON library -[Jackson](https://github.com/FasterXML/jackson) is used in [a few places](../integration#Jackson) in +[Jackson](https://github.com/FasterXML/jackson) is used in [a few places](../integration#jackson) in the driver, but is an optional dependency; if you intend to use Jackson, the following configurations must be added: @@ -223,7 +223,7 @@ configuration is required: ### Native calls -The driver performs a few [native calls](../integration#Native-libraries) using +The driver performs a few [native calls](../integration#native-libraries) using [JNR](https://github.com/jnr). Starting with driver 4.7.0, native calls are also possible in a GraalVM native image, without any diff --git a/manual/core/non_blocking/README.md b/manual/core/non_blocking/README.md index dbffcff1fdb..8876022f2f0 100644 --- a/manual/core/non_blocking/README.md +++ b/manual/core/non_blocking/README.md @@ -29,7 +29,7 @@ primitives such as atomic variables and CAS (compare-and-swap) instructions. A further distinction is generally established between "lock-free" and "wait-free" algorithms: the former ones allow progress of the overall system, while the latter ones allow each thread to make -progress at any time. This distinction is however rather theoretical and is outside of the scope of +progress at any time. This distinction is however rather theoretical and is outside the scope of this document. [lock-free]: https://www.baeldung.com/lock-free-programming From 85b527faffa3fafca16a05d763a7be6de9e0a58a Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 10 Aug 2021 11:58:44 +0200 Subject: [PATCH 769/979] Wait until metrics are asserted --- .../continuous/ContinuousPagingITBase.java | 28 ++++++++++++++++--- 1 file changed, 24 insertions(+), 4 deletions(-) diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousPagingITBase.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousPagingITBase.java index c2bc7de9791..58a76fe1c61 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousPagingITBase.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousPagingITBase.java @@ -16,6 +16,7 @@ package com.datastax.dse.driver.api.core.cql.continuous; import static org.assertj.core.api.Assertions.assertThat; +import static org.awaitility.Awaitility.await; import com.codahale.metrics.Timer; import com.datastax.dse.driver.api.core.config.DseDriverOption; @@ -30,6 +31,7 @@ import com.datastax.oss.driver.api.core.metrics.DefaultNodeMetric; import com.datastax.oss.driver.api.core.metrics.Metrics; import com.tngtech.java.junit.dataprovider.DataProvider; +import java.time.Duration; import java.util.UUID; public abstract class ContinuousPagingITBase { @@ -111,15 +113,33 @@ protected void validateMetrics(CqlSession session) { .as("assert metrics.getNodeMetric(node, DefaultNodeMetric.CQL_MESSAGES) present") .isPresent(); Timer messages = (Timer) metrics.getNodeMetric(node, DefaultNodeMetric.CQL_MESSAGES).get(); - assertThat(messages.getCount()).as("assert messages.getCount() >= 0").isGreaterThan(0); - assertThat(messages.getMeanRate()).as("assert messages.getMeanRate() >= 0").isGreaterThan(0); + await() + .atMost(Duration.ofSeconds(5)) + .untilAsserted( + () -> { + assertThat(messages.getCount()) + .as("assert messages.getCount() >= 0") + .isGreaterThan(0); + assertThat(messages.getMeanRate()) + .as("assert messages.getMeanRate() >= 0") + .isGreaterThan(0); + }); assertThat(metrics.getSessionMetric(DseSessionMetric.CONTINUOUS_CQL_REQUESTS)) .as("assert metrics.getSessionMetric(DseSessionMetric.CONTINUOUS_CQL_REQUESTS) present") .isPresent(); Timer requests = (Timer) metrics.getSessionMetric(DseSessionMetric.CONTINUOUS_CQL_REQUESTS).get(); - assertThat(requests.getCount()).as("assert requests.getCount() >= 0").isGreaterThan(0); - assertThat(requests.getMeanRate()).as("assert requests.getMeanRate() >= 0").isGreaterThan(0); + await() + .atMost(Duration.ofSeconds(5)) + .untilAsserted( + () -> { + assertThat(requests.getCount()) + .as("assert requests.getCount() >= 0") + .isGreaterThan(0); + assertThat(requests.getMeanRate()) + .as("assert requests.getMeanRate() >= 0") + .isGreaterThan(0); + }); } public static class Options { From ec387044d1a6b77c6b00ac28d90cdff9ad44bb93 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 10 Aug 2021 14:03:48 +0200 Subject: [PATCH 770/979] Raise test timeouts --- .../oss/driver/core/tracker/RequestLoggerIT.java | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/tracker/RequestLoggerIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/tracker/RequestLoggerIT.java index 41a462ae1c8..252c22c5f7b 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/tracker/RequestLoggerIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/tracker/RequestLoggerIT.java @@ -214,7 +214,7 @@ public void should_log_successful_request() { sessionRuleRequest.session().execute(QUERY); // Then - verify(appender, timeout(500)).doAppend(loggingEventCaptor.capture()); + verify(appender, timeout(5000)).doAppend(loggingEventCaptor.capture()); assertThat(loggingEventCaptor.getValue().getFormattedMessage()) .contains("Success", "[0 values]", QUERY) .matches(WITH_PER_REQUEST_PREFIX); @@ -229,7 +229,7 @@ public void should_log_successful_request_with_defaults() { sessionRuleDefaults.session().execute(QUERY); // Then - verify(appender, timeout(500)).doAppend(loggingEventCaptor.capture()); + verify(appender, timeout(5000)).doAppend(loggingEventCaptor.capture()); assertThat(loggingEventCaptor.getValue().getFormattedMessage()) .contains("Success", "[0 values]", QUERY) .matches(WITH_PER_REQUEST_PREFIX); @@ -249,7 +249,7 @@ public void should_log_failed_request_with_stack_trace() { } // Then - verify(appender, timeout(500)).doAppend(loggingEventCaptor.capture()); + verify(appender, timeout(5000)).doAppend(loggingEventCaptor.capture()); ILoggingEvent log = loggingEventCaptor.getValue(); assertThat(log.getFormattedMessage()) .contains("Error", "[0 values]", QUERY) @@ -272,7 +272,7 @@ public void should_log_failed_request_with_stack_trace_with_defaults() { } // Then - verify(appender, timeout(500)).doAppend(loggingEventCaptor.capture()); + verify(appender, timeout(5000)).doAppend(loggingEventCaptor.capture()); ILoggingEvent log = loggingEventCaptor.getValue(); assertThat(log.getFormattedMessage()) .contains("Error", "[0 values]", QUERY, ServerError.class.getName()) @@ -295,7 +295,7 @@ public void should_log_failed_request_without_stack_trace() { } // Then - verify(appender, timeout(500)).doAppend(loggingEventCaptor.capture()); + verify(appender, timeout(5000)).doAppend(loggingEventCaptor.capture()); ILoggingEvent log = loggingEventCaptor.getValue(); assertThat(log.getFormattedMessage()) .contains("Error", "[0 values]", QUERY, ServerError.class.getName()) @@ -314,7 +314,7 @@ public void should_log_slow_request() { .execute(SimpleStatement.builder(QUERY).setExecutionProfileName("low-threshold").build()); // Then - verify(appender, timeout(500)).doAppend(loggingEventCaptor.capture()); + verify(appender, timeout(5000)).doAppend(loggingEventCaptor.capture()); assertThat(loggingEventCaptor.getValue().getFormattedMessage()) .contains("Slow", "[0 values]", QUERY) .matches(WITH_PER_REQUEST_PREFIX); @@ -359,7 +359,7 @@ public void should_log_failed_nodes_on_successful_request() { .execute(SimpleStatement.builder(QUERY).setExecutionProfileName("sorting-lbp").build()); // Then - verify(appender, new Timeout(500, VerificationModeFactory.times(3))) + verify(appender, new Timeout(5000, VerificationModeFactory.times(3))) .doAppend(loggingEventCaptor.capture()); List events = loggingEventCaptor.getAllValues(); assertThat(events.get(0).getFormattedMessage()) @@ -392,7 +392,7 @@ public void should_log_successful_nodes_on_successful_request() { sessionRuleNode.session().execute(QUERY); // Then - verify(appender, new Timeout(500, VerificationModeFactory.times(2))) + verify(appender, new Timeout(5000, VerificationModeFactory.times(2))) .doAppend(loggingEventCaptor.capture()); List events = loggingEventCaptor.getAllValues(); assertThat(events.get(0).getFormattedMessage()) From 2a20841f160aa7fa4d429881bc359b538fc94488 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 10 Aug 2021 14:37:44 +0200 Subject: [PATCH 771/979] Raise server-side timeouts for TokenIT tests --- .../core/metadata/ByteOrderedTokenIT.java | 9 ++++++++- .../core/metadata/ByteOrderedTokenVnodesIT.java | 4 ++++ .../driver/core/metadata/Murmur3TokenIT.java | 9 ++++++++- .../core/metadata/Murmur3TokenVnodesIT.java | 9 ++++++++- .../oss/driver/core/metadata/RandomTokenIT.java | 9 ++++++++- .../core/metadata/RandomTokenVnodesIT.java | 4 ++++ .../oss/driver/core/metadata/TokenITBase.java | 17 ++++++++++++++--- 7 files changed, 54 insertions(+), 7 deletions(-) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/ByteOrderedTokenIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/ByteOrderedTokenIT.java index 63473704c8a..28795b6c4c4 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/ByteOrderedTokenIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/ByteOrderedTokenIT.java @@ -36,7 +36,14 @@ public class ByteOrderedTokenIT extends TokenITBase { private static final CustomCcmRule CCM_RULE = - CustomCcmRule.builder().withNodes(3).withCreateOption("-p ByteOrderedPartitioner").build(); + CustomCcmRule.builder() + .withNodes(3) + .withCreateOption("-p ByteOrderedPartitioner") + .withCassandraConfiguration("range_request_timeout_in_ms", 45_000) + .withCassandraConfiguration("read_request_timeout_in_ms", 45_000) + .withCassandraConfiguration("write_request_timeout_in_ms", 45_000) + .withCassandraConfiguration("request_timeout_in_ms", 45_000) + .build(); private static final SessionRule SESSION_RULE = SessionRule.builder(CCM_RULE) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/ByteOrderedTokenVnodesIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/ByteOrderedTokenVnodesIT.java index 62fd20719dd..1009013c734 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/ByteOrderedTokenVnodesIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/ByteOrderedTokenVnodesIT.java @@ -40,6 +40,10 @@ public class ByteOrderedTokenVnodesIT extends TokenITBase { .withNodes(3) .withCreateOption("-p ByteOrderedPartitioner") .withCreateOption("--vnodes") + .withCassandraConfiguration("range_request_timeout_in_ms", 45_000) + .withCassandraConfiguration("read_request_timeout_in_ms", 45_000) + .withCassandraConfiguration("write_request_timeout_in_ms", 45_000) + .withCassandraConfiguration("request_timeout_in_ms", 45_000) .build(); private static final SessionRule SESSION_RULE = diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/Murmur3TokenIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/Murmur3TokenIT.java index e3a6faaaa44..9b3e4d77447 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/Murmur3TokenIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/Murmur3TokenIT.java @@ -29,7 +29,14 @@ public class Murmur3TokenIT extends TokenITBase { - private static final CustomCcmRule CCM_RULE = CustomCcmRule.builder().withNodes(3).build(); + private static final CustomCcmRule CCM_RULE = + CustomCcmRule.builder() + .withNodes(3) + .withCassandraConfiguration("range_request_timeout_in_ms", 45_000) + .withCassandraConfiguration("read_request_timeout_in_ms", 45_000) + .withCassandraConfiguration("write_request_timeout_in_ms", 45_000) + .withCassandraConfiguration("request_timeout_in_ms", 45_000) + .build(); private static final SessionRule SESSION_RULE = SessionRule.builder(CCM_RULE) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/Murmur3TokenVnodesIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/Murmur3TokenVnodesIT.java index 28c219e8a91..3dcf8f88b17 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/Murmur3TokenVnodesIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/Murmur3TokenVnodesIT.java @@ -35,7 +35,14 @@ public class Murmur3TokenVnodesIT extends TokenITBase { private static final CustomCcmRule CCM_RULE = - CustomCcmRule.builder().withNodes(3).withCreateOption("--vnodes").build(); + CustomCcmRule.builder() + .withNodes(3) + .withCreateOption("--vnodes") + .withCassandraConfiguration("range_request_timeout_in_ms", 45_000) + .withCassandraConfiguration("read_request_timeout_in_ms", 45_000) + .withCassandraConfiguration("write_request_timeout_in_ms", 45_000) + .withCassandraConfiguration("request_timeout_in_ms", 45_000) + .build(); private static final SessionRule SESSION_RULE = SessionRule.builder(CCM_RULE) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/RandomTokenIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/RandomTokenIT.java index 4134fd8c8a2..075679ea1d1 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/RandomTokenIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/RandomTokenIT.java @@ -30,7 +30,14 @@ public class RandomTokenIT extends TokenITBase { private static final CustomCcmRule CCM_RULE = - CustomCcmRule.builder().withNodes(3).withCreateOption("-p RandomPartitioner").build(); + CustomCcmRule.builder() + .withNodes(3) + .withCreateOption("-p RandomPartitioner") + .withCassandraConfiguration("range_request_timeout_in_ms", 45_000) + .withCassandraConfiguration("read_request_timeout_in_ms", 45_000) + .withCassandraConfiguration("write_request_timeout_in_ms", 45_000) + .withCassandraConfiguration("request_timeout_in_ms", 45_000) + .build(); private static final SessionRule SESSION_RULE = SessionRule.builder(CCM_RULE) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/RandomTokenVnodesIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/RandomTokenVnodesIT.java index 08b226bdc51..1545bd46104 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/RandomTokenVnodesIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/RandomTokenVnodesIT.java @@ -39,6 +39,10 @@ public class RandomTokenVnodesIT extends TokenITBase { .withNodes(3) .withCreateOption("-p RandomPartitioner") .withCreateOption("--vnodes") + .withCassandraConfiguration("range_request_timeout_in_ms", 45_000) + .withCassandraConfiguration("read_request_timeout_in_ms", 45_000) + .withCassandraConfiguration("write_request_timeout_in_ms", 45_000) + .withCassandraConfiguration("request_timeout_in_ms", 45_000) .build(); private static final SessionRule SESSION_RULE = diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/TokenITBase.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/TokenITBase.java index 4ebe1bc4e7b..4ca72c94be3 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/TokenITBase.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/TokenITBase.java @@ -102,6 +102,7 @@ public void should_be_consistent_with_range_queries() { int key = 1; ProtocolVersion protocolVersion = session().getContext().getProtocolVersion(); ByteBuffer serializedKey = TypeCodecs.INT.encodePrimitive(key, protocolVersion); + assertThat(serializedKey).isNotNull(); Set replicas = tokenMap.getReplicas(KS1, serializedKey); assertThat(replicas).hasSize(1); Node replica = replicas.iterator().next(); @@ -133,7 +134,7 @@ public void should_be_consistent_with_range_queries() { private List rangeQuery(PreparedStatement rangeStatement, TokenRange range) { List rows = Lists.newArrayList(); for (TokenRange subRange : range.unwrap()) { - Statement statement = rangeStatement.bind(subRange.getStart(), subRange.getEnd()); + Statement statement = rangeStatement.bind(subRange.getStart(), subRange.getEnd()); session().execute(statement).forEach(rows::add); } return rows; @@ -156,10 +157,11 @@ private List rangeQuery(PreparedStatement rangeStatement, TokenRange range) public void should_get_token_from_row_and_set_token_in_query() { ResultSet rs = session().execute("SELECT token(i) FROM foo WHERE i = 1"); Row row = rs.one(); + assertThat(row).isNotNull(); // Get by index: Token token = row.getToken(0); - assertThat(token).isInstanceOf(expectedTokenType); + assertThat(token).isNotNull().isInstanceOf(expectedTokenType); // Get by name: the generated column name depends on the Cassandra version. String tokenColumnName = @@ -173,10 +175,12 @@ public void should_get_token_from_row_and_set_token_in_query() { // Bind with setToken by index row = session().execute(pst.bind().setToken(0, token)).one(); + assertThat(row).isNotNull(); assertThat(row.getInt(0)).isEqualTo(1); // Bind with setToken by name row = session().execute(pst.bind().setToken("partition key token", token)).one(); + assertThat(row).isNotNull(); assertThat(row.getInt(0)).isEqualTo(1); } @@ -190,17 +194,20 @@ public void should_get_token_from_row_and_set_token_in_query() { @Test public void should_get_token_from_row_and_set_token_in_query_with_binding_and_aliasing() { Row row = session().execute("SELECT token(i) AS t FROM foo WHERE i = 1").one(); + assertThat(row).isNotNull(); Token token = row.getToken("t"); - assertThat(token).isInstanceOf(expectedTokenType); + assertThat(token).isNotNull().isInstanceOf(expectedTokenType); PreparedStatement pst = session().prepare("SELECT * FROM foo WHERE token(i) = :myToken"); row = session().execute(pst.bind().setToken("myToken", token)).one(); + assertThat(row).isNotNull(); assertThat(row.getInt(0)).isEqualTo(1); row = session() .execute(SimpleStatement.newInstance("SELECT * FROM foo WHERE token(i) = ?", token)) .one(); + assertThat(row).isNotNull(); assertThat(row.getInt(0)).isEqualTo(1); } @@ -216,6 +223,7 @@ public void should_get_token_from_row_and_set_token_in_query_with_binding_and_al @Test(expected = IllegalArgumentException.class) public void should_raise_exception_when_getting_token_on_non_token_column() { Row row = session().execute("SELECT i FROM foo WHERE i = 1").one(); + assertThat(row).isNotNull(); row.getToken(0); } @@ -237,11 +245,13 @@ public void should_expose_consistent_ranges() { } private void checkRanges(Session session) { + assertThat(session.getMetadata().getTokenMap()).isPresent(); TokenMap tokenMap = session.getMetadata().getTokenMap().get(); checkRanges(tokenMap.getTokenRanges()); } private void checkRanges(Session session, CqlIdentifier keyspace, int replicationFactor) { + assertThat(session.getMetadata().getTokenMap()).isPresent(); TokenMap tokenMap = session.getMetadata().getTokenMap().get(); List allRangesWithDuplicates = Lists.newArrayList(); @@ -347,6 +357,7 @@ public void should_create_token_from_partition_key() { TokenMap tokenMap = getTokenMap(); Row row = session().execute("SELECT token(i) FROM foo WHERE i = 1").one(); + assertThat(row).isNotNull(); Token expected = row.getToken(0); ProtocolVersion protocolVersion = session().getContext().getProtocolVersion(); From ebe7a5e0c2c22c64d429909a7d4f7ca45af7a456 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 10 Aug 2021 14:40:39 +0200 Subject: [PATCH 772/979] Reduce weeknights schedule with JDK 8 --- Jenkinsfile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index cc8e0afc7da..0f9a28265d3 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -356,8 +356,8 @@ pipeline { // schedules only run against release branches (i.e. 3.x, 4.x, 4.5.x, etc.) parameterizedCron(branchPatternCron.matcher(env.BRANCH_NAME).matches() ? """ # Every weeknight (Monday - Friday) around 2:00 AM - ### JDK8 tests against 2.1, 3.0, DSE 4.8, DSE 5.0, DSE 5.1, DSE-6.0, DSE 6.7 and DSE 6.8 - H 2 * * 1-5 %CI_SCHEDULE=WEEKNIGHTS;CI_SCHEDULE_SERVER_VERSIONS=2.1 3.0 dse-4.8 dse-5.0 dse-5.1 dse-6.0 dse-6.7 dse-6.8;CI_SCHEDULE_JABBA_VERSION=1.8 + ### JDK8 tests against 2.1, 3.0, DSE 4.8, DSE 5.0, DSE 5.1, DSE-6.0 and DSE 6.7 + H 2 * * 1-5 %CI_SCHEDULE=WEEKNIGHTS;CI_SCHEDULE_SERVER_VERSIONS=2.1 3.0 dse-4.8 dse-5.0 dse-5.1 dse-6.0 dse-6.7;CI_SCHEDULE_JABBA_VERSION=1.8 ### JDK11 tests against 3.11, 4.0 and DSE 6.8 H 2 * * 1-5 %CI_SCHEDULE=WEEKNIGHTS;CI_SCHEDULE_SERVER_VERSIONS=3.11 4.0 dse-6.8;CI_SCHEDULE_JABBA_VERSION=openjdk@1.11 # Every weekend (Sunday) around 12:00 PM noon From 99f441583854d1a9b7be9b8899170da5f7d1813f Mon Sep 17 00:00:00 2001 From: Madhavan Date: Thu, 12 Aug 2021 07:26:15 -0400 Subject: [PATCH 773/979] Update AstraDB links (#1569) --- manual/cloud/README.md | 22 +++++++++------------- 1 file changed, 9 insertions(+), 13 deletions(-) diff --git a/manual/cloud/README.md b/manual/cloud/README.md index 161a8308c73..f483d9ae918 100644 --- a/manual/cloud/README.md +++ b/manual/cloud/README.md @@ -7,14 +7,13 @@ driver is configured in an application and that you will need to obtain a *secur ### Prerequisites 1. [Download][Download Maven] and [install][Install Maven] Maven. -2. Create an Astra database on [GCP][Create an Astra database - GCP] or - [AWS][Create an Astra database - AWS]; alternatively, have a team member provide access to their - Astra database (instructions for [GCP][Access an Astra database - GCP] and - [AWS][Access an Astra database - AWS]) to obtain database connection details. +2. Create an Astra database on [AWS/Azure/GCP][Create an Astra database - AWS/Azure/GCP]; + alternatively, have a team member provide access to their + Astra database (instructions for [AWS/Azure/GCP][Access an Astra database - AWS/Azure/GCP] to + obtain database connection details. 3. Download the secure connect bundle (instructions for - [GCP][Download the secure connect bundle - GCP] and - [AWS][Download the secure connect bundle - AWS]), that contains connection information such as - contact points and certificates. + [AWS/Azure/GCP][Download the secure connect bundle - AWS/Azure/GCP], that contains connection + information such as contact points and certificates. ### Procedure @@ -125,11 +124,8 @@ public class Main { [Download Maven]: https://maven.apache.org/download.cgi [Install Maven]: https://maven.apache.org/install.html -[Create an Astra database - GCP]: https://docs.datastax.com/en/astra/gcp/doc/dscloud/astra/dscloudGettingStarted.html#dscloudCreateCluster -[Create an Astra database - AWS]: https://docs.datastax.com/en/astra/aws/doc/dscloud/astra/dscloudGettingStarted.html#dscloudCreateCluster -[Access an Astra database - GCP]: https://docs.datastax.com/en/astra/gcp/doc/dscloud/astra/dscloudShareClusterDetails.html -[Access an Astra database - AWS]: https://docs.datastax.com/en/astra/aws/doc/dscloud/astra/dscloudShareClusterDetails.html -[Download the secure connect bundle - GCP]: https://docs.datastax.com/en/astra/gcp/doc/dscloud/astra/dscloudObtainingCredentials.html -[Download the secure connect bundle - AWS]: https://docs.datastax.com/en/astra/aws/doc/dscloud/astra/dscloudObtainingCredentials.html +[Create an Astra database - AWS/Azure/GCP]: https://docs.datastax.com/en/astra/docs/creating-your-astra-database.html +[Access an Astra database - AWS/Azure/GCP]: https://docs.datastax.com/en/astra/docs/obtaining-database-credentials.html#_sharing_your_secure_connect_bundle +[Download the secure connect bundle - AWS/Azure/GCP]: https://docs.datastax.com/en/astra/docs/obtaining-database-credentials.html [minimal project structure]: ../core/integration/#minimal-project-structure [driver documentation]: ../core/configuration/ From 364d9e8b59723201516c3c221085242f0b5d8695 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 13 Aug 2021 14:07:18 +0200 Subject: [PATCH 774/979] Minor javadoc fixes --- .../oss/driver/api/core/connection/BusyConnectionException.java | 2 +- .../driver/internal/core/adminrequest/AdminRequestHandler.java | 2 +- .../com/datastax/oss/driver/internal/core/pool/ChannelPool.java | 1 - 3 files changed, 2 insertions(+), 3 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/connection/BusyConnectionException.java b/core/src/main/java/com/datastax/oss/driver/api/core/connection/BusyConnectionException.java index bbe513351ba..a215bc3347a 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/connection/BusyConnectionException.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/connection/BusyConnectionException.java @@ -25,7 +25,7 @@ * requests. * *

          This might happen under heavy load. The driver will automatically try the next node in the - * query plan. Therefore the only way that the client can observe this exception is as part of a + * query plan. Therefore, the only way that the client can observe this exception is as part of a * {@link AllNodesFailedException}. */ public class BusyConnectionException extends DriverException { diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/adminrequest/AdminRequestHandler.java b/core/src/main/java/com/datastax/oss/driver/internal/core/adminrequest/AdminRequestHandler.java index 60e033c344d..148c1099a04 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/adminrequest/AdminRequestHandler.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/adminrequest/AdminRequestHandler.java @@ -47,7 +47,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -/** Handles the lifecyle of an admin request (such as a node refresh or schema refresh query). */ +/** Handles the lifecycle of an admin request (such as a node refresh or schema refresh query). */ @ThreadSafe public class AdminRequestHandler implements ResponseCallback { private static final Logger LOG = LoggerFactory.getLogger(AdminRequestHandler.class); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/pool/ChannelPool.java b/core/src/main/java/com/datastax/oss/driver/internal/core/pool/ChannelPool.java index 24891972763..42b7c9e90c8 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/pool/ChannelPool.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/pool/ChannelPool.java @@ -143,7 +143,6 @@ public boolean isInvalidKeyspace() { * request path, and we want to avoid complex check-then-act semantics; therefore this might * race and return a channel that is already closed, or {@code null}. In those cases, it is up * to the caller to fail fast and move to the next node. - *

          There is no need to return the channel. */ public DriverChannel next() { return channels.next(); From 4dc7de39a1a88fff52d647a3757238fc672f4ae2 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 13 Aug 2021 14:08:13 +0200 Subject: [PATCH 775/979] Make field final in StreamIdGenerator --- .../oss/driver/internal/core/channel/StreamIdGenerator.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/StreamIdGenerator.java b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/StreamIdGenerator.java index 934eeefc061..451bb7cff6e 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/StreamIdGenerator.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/StreamIdGenerator.java @@ -34,7 +34,7 @@ class StreamIdGenerator { private final int maxAvailableIds; // unset = available, set = borrowed (note that this is the opposite of the 3.x implementation) private final BitSet ids; - private AtomicInteger availableIds; + private final AtomicInteger availableIds; StreamIdGenerator(int maxAvailableIds) { this.maxAvailableIds = maxAvailableIds; From 387f75714da6e23f0f68cea2815a5e9088a3ae74 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 17 Aug 2021 11:55:59 +0200 Subject: [PATCH 776/979] Raise test timeouts in ShutdownIT --- .../java/com/datastax/oss/driver/core/session/ShutdownIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/session/ShutdownIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/session/ShutdownIT.java index 9f84c9144de..0e5dd780cec 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/session/ShutdownIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/session/ShutdownIT.java @@ -120,7 +120,7 @@ public void should_fail_requests_when_session_is_closed() throws Exception { } TimeUnit.MILLISECONDS.sleep(100); session.forceCloseAsync(); - assertThat(gotSessionClosedError.await(1, TimeUnit.SECONDS)) + assertThat(gotSessionClosedError.await(10, TimeUnit.SECONDS)) .as("Expected to get the 'Session is closed' error shortly after shutting down") .isTrue(); requestExecutor.shutdownNow(); From b0b640e4fe9b73df96407f501718be4dba7d441e Mon Sep 17 00:00:00 2001 From: Ankit Barsainya <71305148+AnkitBarsainyaPSL@users.noreply.github.com> Date: Wed, 18 Aug 2021 15:14:33 +0530 Subject: [PATCH 777/979] Update log message to inform about incorrect CL for read repair (#1571) --- .../oss/driver/api/core/servererrors/ReadTimeoutException.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/servererrors/ReadTimeoutException.java b/core/src/main/java/com/datastax/oss/driver/api/core/servererrors/ReadTimeoutException.java index cac44b4983d..21afa245e13 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/servererrors/ReadTimeoutException.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/servererrors/ReadTimeoutException.java @@ -45,7 +45,8 @@ public ReadTimeoutException( this( coordinator, String.format( - "Cassandra timeout during read query at consistency %s (%s)", + "Cassandra timeout during read query at consistency %s (%s). " + + "In case this was generated during read repair, the consistency level is not representative of the actual consistency.", consistencyLevel, formatDetails(received, blockFor, dataPresent)), consistencyLevel, received, From 6da12e31e60d132b1f50a3004d45397c4db912ae Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Thu, 19 Aug 2021 11:14:53 +0200 Subject: [PATCH 778/979] Raise verify timeout in ChannelPoolTestBase --- .../oss/driver/internal/core/pool/ChannelPoolResizeTest.java | 2 +- .../oss/driver/internal/core/pool/ChannelPoolTestBase.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/pool/ChannelPoolResizeTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/pool/ChannelPoolResizeTest.java index da1c5e3d2a2..acd7be61d3e 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/pool/ChannelPoolResizeTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/pool/ChannelPoolResizeTest.java @@ -125,7 +125,7 @@ public void should_shrink_during_reconnection() throws Exception { factoryHelper.waitForCalls(node, 2); - // Pool should have shrinked back to 2. We keep the most recent channels so 1 and 2 get closed. + // Pool should have shrunk back to 2. We keep the most recent channels so 1 and 2 get closed. inOrder.verify(eventBus, VERIFY_TIMEOUT.times(2)).fire(ChannelEvent.channelOpened(node)); inOrder.verify(eventBus, VERIFY_TIMEOUT.times(2)).fire(ChannelEvent.channelClosed(node)); inOrder.verify(eventBus, VERIFY_TIMEOUT).fire(ChannelEvent.reconnectionStopped(node)); diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/pool/ChannelPoolTestBase.java b/core/src/test/java/com/datastax/oss/driver/internal/core/pool/ChannelPoolTestBase.java index cc18e7d2842..7e9b1aa5b5e 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/pool/ChannelPoolTestBase.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/pool/ChannelPoolTestBase.java @@ -50,7 +50,7 @@ abstract class ChannelPoolTestBase { /** How long we wait when verifying mocks for async invocations */ - protected static final VerificationWithTimeout VERIFY_TIMEOUT = timeout(500); + protected static final VerificationWithTimeout VERIFY_TIMEOUT = timeout(2000); @Mock protected InternalDriverContext context; @Mock private DriverConfig config; From 2a1e37a21cc6da8d9f78c2001a1c1c9edd9c79a7 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Thu, 19 Aug 2021 11:16:45 +0200 Subject: [PATCH 779/979] Raise sleep time in ShutdownIT --- .../java/com/datastax/oss/driver/core/session/ShutdownIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/session/ShutdownIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/session/ShutdownIT.java index 0e5dd780cec..6a1bac5cc9b 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/session/ShutdownIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/session/ShutdownIT.java @@ -118,7 +118,7 @@ public void should_fail_requests_when_session_is_closed() throws Exception { } }); } - TimeUnit.MILLISECONDS.sleep(100); + TimeUnit.MILLISECONDS.sleep(1000); session.forceCloseAsync(); assertThat(gotSessionClosedError.await(10, TimeUnit.SECONDS)) .as("Expected to get the 'Session is closed' error shortly after shutting down") From 0a7b1c86d077b74ceb203bd356b214727ab416e1 Mon Sep 17 00:00:00 2001 From: Ammar Khaku Date: Tue, 24 Aug 2021 01:13:50 -0700 Subject: [PATCH 780/979] JAVA-2959: Don't throw NoNodeAvailableException when all connections busy (#1570) For cases in which there are no connections available to send requests to a Node in the query plan, collect the error rather than silently skipping over the node. The error will be thrown as part of an AllNodesFailedException if all nodes fail. This can happen when we've saturated the max in-flight requests across all nodes or when the request is directed to a particular node and it has no connections available (or all its connections are saturated). Note that in the latter case we used to throw a NoNodeAvailableException but we now throw AllNodesFailedException. --- changelog/README.md | 4 ++ .../ContinuousRequestHandlerBase.java | 11 ++-- .../core/graph/GraphRequestHandler.java | 3 + .../api/core/NodeUnavailableException.java | 60 +++++++++++++++++++ .../internal/core/cql/CqlPrepareHandler.java | 3 + .../internal/core/cql/CqlRequestHandler.java | 3 + ...ousCqlRequestHandlerNodeTargetingTest.java | 13 +++- .../core/cql/CqlPrepareHandlerTest.java | 36 +++++++++++ .../core/cql/CqlRequestHandlerTest.java | 42 +++++++++++++ upgrade_guide/README.md | 9 +++ 10 files changed, 178 insertions(+), 6 deletions(-) create mode 100644 core/src/main/java/com/datastax/oss/driver/api/core/NodeUnavailableException.java diff --git a/changelog/README.md b/changelog/README.md index 48a16bc6f28..9b3c2f4a609 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -2,6 +2,10 @@ +### 4.14.0 (in progress) + +- [improvement] JAVA-2959: Don't throw NoNodeAvailableException when all connections busy + ### 4.13.0 - [improvement] JAVA-2940: Add GraalVM native image build configurations diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousRequestHandlerBase.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousRequestHandlerBase.java index f97bc684e37..91fc1ef2f3a 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousRequestHandlerBase.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousRequestHandlerBase.java @@ -24,6 +24,7 @@ import com.datastax.oss.driver.api.core.AllNodesFailedException; import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.DriverTimeoutException; +import com.datastax.oss.driver.api.core.NodeUnavailableException; import com.datastax.oss.driver.api.core.ProtocolVersion; import com.datastax.oss.driver.api.core.RequestThrottlingException; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; @@ -348,6 +349,8 @@ private void sendRequest( channel = session.getChannel(node, logPrefix); if (channel != null) { break; + } else { + recordError(node, new NodeUnavailableException(node)); } } } @@ -455,6 +458,10 @@ CompletableFuture getPendingResult() { } } + private void recordError(@NonNull Node node, @NonNull Throwable error) { + errors.add(new AbstractMap.SimpleEntry<>(node, error)); + } + /** * Handles the interaction with a single node in the query plan. * @@ -1433,10 +1440,6 @@ private void reenableAutoReadIfNeeded() { // ERROR HANDLING - private void recordError(@NonNull Node node, @NonNull Throwable error) { - errors.add(new AbstractMap.SimpleEntry<>(node, error)); - } - private void trackNodeError(@NonNull Node node, @NonNull Throwable error) { if (nodeErrorReported.compareAndSet(false, true)) { long latencyNanos = System.nanoTime() - this.messageStartTimeNanos; diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java index ca84f1c634a..a710f447512 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java @@ -24,6 +24,7 @@ import com.datastax.oss.driver.api.core.AllNodesFailedException; import com.datastax.oss.driver.api.core.DriverException; import com.datastax.oss.driver.api.core.DriverTimeoutException; +import com.datastax.oss.driver.api.core.NodeUnavailableException; import com.datastax.oss.driver.api.core.RequestThrottlingException; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; @@ -253,6 +254,8 @@ private void sendRequest( channel = session.getChannel(node, logPrefix); if (channel != null) { break; + } else { + recordError(node, new NodeUnavailableException(node)); } } } diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/NodeUnavailableException.java b/core/src/main/java/com/datastax/oss/driver/api/core/NodeUnavailableException.java new file mode 100644 index 00000000000..69c3dca0691 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/api/core/NodeUnavailableException.java @@ -0,0 +1,60 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.core; + +import com.datastax.oss.driver.api.core.metadata.Node; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Objects; + +/** + * Indicates that a {@link Node} was selected in a query plan, but it had no connection available. + * + *

          A common reason to encounter this error is when the configured number of connections per node + * and requests per connection is not high enough to absorb the overall request rate. This can be + * mitigated by tuning the following options: + * + *

            + *
          • {@code advanced.connection.pool.local.size}; + *
          • {@code advanced.connection.pool.remote.size}; + *
          • {@code advanced.connection.max-requests-per-connection}. + *
          + * + * See {@code reference.conf} for more details. + * + *

          Another possibility is when you are trying to direct a request {@linkplain + * com.datastax.oss.driver.api.core.cql.Statement#setNode(Node) to a particular node}, but that node + * has no connections available. + */ +public class NodeUnavailableException extends DriverException { + + private final Node node; + + public NodeUnavailableException(Node node) { + super("No connection was available to " + node, null, null, true); + this.node = Objects.requireNonNull(node); + } + + @NonNull + public Node getNode() { + return node; + } + + @Override + @NonNull + public DriverException copy() { + return new NodeUnavailableException(node); + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlPrepareHandler.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlPrepareHandler.java index d60a6c65260..68fa386b12b 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlPrepareHandler.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlPrepareHandler.java @@ -18,6 +18,7 @@ import com.datastax.oss.driver.api.core.AllNodesFailedException; import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.DriverTimeoutException; +import com.datastax.oss.driver.api.core.NodeUnavailableException; import com.datastax.oss.driver.api.core.ProtocolVersion; import com.datastax.oss.driver.api.core.RequestThrottlingException; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; @@ -188,6 +189,8 @@ private void sendRequest(PrepareRequest request, Node node, int retryCount) { channel = session.getChannel(node, logPrefix); if (channel != null) { break; + } else { + recordError(node, new NodeUnavailableException(node)); } } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java index 743d11c9ad4..dba2dc38460 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java @@ -19,6 +19,7 @@ import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.DriverException; import com.datastax.oss.driver.api.core.DriverTimeoutException; +import com.datastax.oss.driver.api.core.NodeUnavailableException; import com.datastax.oss.driver.api.core.RequestThrottlingException; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; @@ -251,6 +252,8 @@ private void sendRequest( channel = session.getChannel(node, logPrefix); if (channel != null) { break; + } else { + recordError(node, new NodeUnavailableException(node)); } } } diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerNodeTargetingTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerNodeTargetingTest.java index 3d560d964b0..f245d8599ae 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerNodeTargetingTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousCqlRequestHandlerNodeTargetingTest.java @@ -25,13 +25,17 @@ import com.datastax.dse.driver.DseTestFixtures; import com.datastax.dse.driver.api.core.DseProtocolVersion; import com.datastax.dse.driver.api.core.cql.continuous.ContinuousAsyncResultSet; -import com.datastax.oss.driver.api.core.NoNodeAvailableException; +import com.datastax.oss.driver.api.core.AllNodesFailedException; +import com.datastax.oss.driver.api.core.NodeUnavailableException; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.session.Request; import com.datastax.oss.driver.api.core.session.Session; import com.datastax.oss.driver.internal.core.cql.RequestHandlerTestHarness; import com.datastax.oss.driver.internal.core.metadata.LoadBalancingPolicyWrapper; import com.tngtech.java.junit.dataprovider.UseDataProvider; +import java.util.List; +import java.util.Map; import java.util.concurrent.CompletionStage; import org.junit.Test; import org.mockito.InOrder; @@ -67,7 +71,12 @@ public void should_fail_if_targeted_node_not_available(DseProtocolVersion versio assertThatStage(resultSetFuture) .isFailed( error -> { - assertThat(error).isInstanceOf(NoNodeAvailableException.class); + assertThat(error).isInstanceOf(AllNodesFailedException.class); + Map> errors = + ((AllNodesFailedException) error).getAllErrors(); + assertThat(errors).hasSize(1); + List nodeErrors = errors.values().iterator().next(); + assertThat(nodeErrors).singleElement().isInstanceOf(NodeUnavailableException.class); invocations .verify(loadBalancingPolicy, never()) .newQueryPlan(any(Request.class), anyString(), any(Session.class)); diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/cql/CqlPrepareHandlerTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/cql/CqlPrepareHandlerTest.java index 0bafdb41305..f0bf319e581 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/cql/CqlPrepareHandlerTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/cql/CqlPrepareHandlerTest.java @@ -25,6 +25,8 @@ import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; +import com.datastax.oss.driver.api.core.AllNodesFailedException; +import com.datastax.oss.driver.api.core.NodeUnavailableException; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.cql.ColumnDefinitions; @@ -47,6 +49,7 @@ import com.datastax.oss.protocol.internal.response.result.RowsMetadata; import com.datastax.oss.protocol.internal.util.Bytes; import java.nio.ByteBuffer; +import java.util.List; import java.util.Map; import java.util.concurrent.CompletionStage; import org.junit.Before; @@ -228,6 +231,39 @@ public void should_not_retry_initial_prepare_if_unrecoverable_error() { } } + @Test + public void should_fail_if_nodes_unavailable() { + RequestHandlerTestHarness.Builder harnessBuilder = RequestHandlerTestHarness.builder(); + try (RequestHandlerTestHarness harness = + harnessBuilder.withEmptyPool(node1).withEmptyPool(node2).build()) { + CompletionStage prepareFuture = + new CqlPrepareHandler(PREPARE_REQUEST, harness.getSession(), harness.getContext(), "test") + .handle(); + assertThatStage(prepareFuture) + .isFailed( + error -> { + assertThat(error).isInstanceOf(AllNodesFailedException.class); + Map> allErrors = + ((AllNodesFailedException) error).getAllErrors(); + assertThat(allErrors).hasSize(2); + assertThat(allErrors) + .hasEntrySatisfying( + node1, + nodeErrors -> + assertThat(nodeErrors) + .singleElement() + .isInstanceOf(NodeUnavailableException.class)); + assertThat(allErrors) + .hasEntrySatisfying( + node2, + nodeErrors -> + assertThat(nodeErrors) + .singleElement() + .isInstanceOf(NodeUnavailableException.class)); + }); + } + } + @Test public void should_fail_if_retry_policy_ignores_error() { RequestHandlerTestHarness.Builder harnessBuilder = diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandlerTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandlerTest.java index 5f41fc42f62..42cd492aad9 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandlerTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandlerTest.java @@ -21,9 +21,11 @@ import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; +import com.datastax.oss.driver.api.core.AllNodesFailedException; import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.DriverTimeoutException; import com.datastax.oss.driver.api.core.NoNodeAvailableException; +import com.datastax.oss.driver.api.core.NodeUnavailableException; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.cql.AsyncResultSet; import com.datastax.oss.driver.api.core.cql.BoundStatement; @@ -32,6 +34,7 @@ import com.datastax.oss.driver.api.core.cql.PreparedStatement; import com.datastax.oss.driver.api.core.cql.Row; import com.datastax.oss.driver.api.core.cql.Statement; +import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.internal.core.session.RepreparePayload; import com.datastax.oss.driver.internal.core.util.concurrent.CapturingTimer.CapturedTimeout; import com.datastax.oss.protocol.internal.request.Prepare; @@ -43,6 +46,8 @@ import java.time.Duration; import java.util.Collections; import java.util.Iterator; +import java.util.List; +import java.util.Map; import java.util.concurrent.CompletionStage; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; @@ -105,6 +110,43 @@ public void should_fail_if_no_node_available() { } } + @Test + public void should_fail_if_nodes_unavailable() { + RequestHandlerTestHarness.Builder harnessBuilder = RequestHandlerTestHarness.builder(); + try (RequestHandlerTestHarness harness = + harnessBuilder.withEmptyPool(node1).withEmptyPool(node2).build()) { + CompletionStage resultSetFuture = + new CqlRequestHandler( + UNDEFINED_IDEMPOTENCE_STATEMENT, + harness.getSession(), + harness.getContext(), + "test") + .handle(); + assertThatStage(resultSetFuture) + .isFailed( + error -> { + assertThat(error).isInstanceOf(AllNodesFailedException.class); + Map> allErrors = + ((AllNodesFailedException) error).getAllErrors(); + assertThat(allErrors).hasSize(2); + assertThat(allErrors) + .hasEntrySatisfying( + node1, + nodeErrors -> + assertThat(nodeErrors) + .singleElement() + .isInstanceOf(NodeUnavailableException.class)); + assertThat(allErrors) + .hasEntrySatisfying( + node2, + nodeErrors -> + assertThat(nodeErrors) + .singleElement() + .isInstanceOf(NodeUnavailableException.class)); + }); + } + } + @Test public void should_time_out_if_first_node_takes_too_long_to_respond() throws Exception { RequestHandlerTestHarness.Builder harnessBuilder = RequestHandlerTestHarness.builder(); diff --git a/upgrade_guide/README.md b/upgrade_guide/README.md index 0be2af16a1e..e48a75ceb4e 100644 --- a/upgrade_guide/README.md +++ b/upgrade_guide/README.md @@ -1,5 +1,14 @@ ## Upgrade guide +### 4.14.0 + +#### AllNodesFailedException instead of NoNodeAvailableException in certain cases + +[JAVA-2959](https://datastax-oss.atlassian.net/browse/JAVA-2959) changed the behavior for when a +request cannot be executed because all nodes tried were busy. Previously you would get back a +`NoNodeAvailableException` but you will now get back an `AllNodesFailedException` where the +`getAllErrors` map contains a `NodeUnavailableException` for that node. + ### 4.13.0 #### Enhanced support for GraalVM native images From d954af9d59c6493c78cbbcdfbada89c1e341087a Mon Sep 17 00:00:00 2001 From: Jeff DiNoto Date: Wed, 5 Jan 2022 14:07:14 -0500 Subject: [PATCH 781/979] Fossa scanning workflow (4.x) (#1578) --- .github/workflows/dep-lic-scan.yaml | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) create mode 100644 .github/workflows/dep-lic-scan.yaml diff --git a/.github/workflows/dep-lic-scan.yaml b/.github/workflows/dep-lic-scan.yaml new file mode 100644 index 00000000000..afb197bf137 --- /dev/null +++ b/.github/workflows/dep-lic-scan.yaml @@ -0,0 +1,23 @@ +name: Dependency and License Scan +on: + push: + branches: + - '4.x' + - '3.x' + paths-ignore: + - 'manual/**' + - 'faq/**' + - 'upgrade_guide/**' + - 'changelog/**' +jobs: + scan-repo: + runs-on: ubuntu-latest + steps: + - name: Check out code + uses: actions/checkout@v2 + - name: Install Fossa CLI + run: | + curl -H 'Cache-Control: no-cache' https://raw.githubusercontent.com/fossas/fossa-cli/master/install-latest.sh | bash -s -- -b . + - name: Scan for dependencies and licenses + run: | + FOSSA_API_KEY=${{ secrets.FOSSA_PUSH_ONLY_API_KEY }} ./fossa analyze From cbb8194c2101cac4521e52fb007af8bfc0601c39 Mon Sep 17 00:00:00 2001 From: Tore Trettenes Date: Fri, 7 Jan 2022 22:25:02 +0100 Subject: [PATCH 782/979] Switch Esri Geometry API to an optional dependency (#1575) Co-authored-by: Tore Trettenes --- core-shaded/pom.xml | 5 +++++ core/pom.xml | 1 + osgi-tests/pom.xml | 4 ++++ 3 files changed, 10 insertions(+) diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index b84dd855234..bf49e92d36d 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -88,6 +88,11 @@ org.hdrhistogram HdrHistogram + + com.esri.geometry + esri-geometry-api + true + org.apache.tinkerpop gremlin-core diff --git a/core/pom.xml b/core/pom.xml index 13dfaca11fa..dce03062123 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -90,6 +90,7 @@ com.esri.geometry esri-geometry-api + true org.apache.tinkerpop diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml index 7cf7adf20bc..281c9ff67b3 100644 --- a/osgi-tests/pom.xml +++ b/osgi-tests/pom.xml @@ -78,6 +78,10 @@ org.reactivestreams reactive-streams + + com.esri.geometry + esri-geometry-api + org.apache.tinkerpop gremlin-core From 415f789f4e1db7c003ee110153965fc91b70fdf5 Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Tue, 25 Jan 2022 16:23:51 -0600 Subject: [PATCH 783/979] JAVA-2982: Follow-up to recent PR making ESRI an optional dependency (#1580) --- core-shaded/pom.xml | 42 ++++++------------------------- integration-tests/pom.xml | 5 ++++ manual/core/integration/README.md | 21 ++++++++++++++-- upgrade_guide/README.md | 20 +++++++++++++++ 4 files changed, 51 insertions(+), 37 deletions(-) diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index bf49e92d36d..a88b6987392 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -146,9 +146,6 @@ --> com.datastax.oss:java-driver-core io.netty:* - com.esri.geometry:* - org.json:* - org.codehaus.jackson:* com.fasterxml.jackson.core:* @@ -161,18 +158,6 @@ io.netty com.datastax.oss.driver.shaded.netty - - com.esri - com.datastax.oss.driver.shaded.esri - - - org.json - com.datastax.oss.driver.shaded.json - - - org.codehaus.jackson - com.datastax.oss.driver.shaded.codehaus.jackson - com.fasterxml.jackson com.datastax.oss.driver.shaded.fasterxml.jackson @@ -194,24 +179,6 @@ META-INF/** - - com.esri.geometry:* - - META-INF/** - - - - org.json:* - - META-INF/** - - - - org.codehaus.jackson:* - - META-INF/** - - com.fasterxml.jackson.core:* @@ -311,6 +278,11 @@ jctools-core 2.1.2 + + com.esri.geometry + esri-geometry-api + 1.2.1 + @@ -340,7 +312,7 @@ !com.datastax.oss.driver.shaded.netty.*, !com.datastax.oss.driver.shaded.esri.*, !com.datastax.oss.driver.shaded.json.*, !com.datastax.oss.driver.shaded.codehaus.jackson.*, !com.datastax.oss.driver.shaded.fasterxml.jackson.*, + -->!com.datastax.oss.driver.shaded.netty.*, !com.datastax.oss.driver.shaded.fasterxml.jackson.*, - com.datastax.oss.driver.api.core.*, com.datastax.oss.driver.internal.core.*, com.datastax.dse.driver.api.core.*, com.datastax.dse.driver.internal.core.*, com.datastax.oss.driver.shaded.netty.*, com.datastax.oss.driver.shaded.esri.*, com.datastax.oss.driver.shaded.json.*, com.datastax.oss.driver.shaded.codehaus.jackson.*, com.datastax.oss.driver.shaded.fasterxml.jackson.*, + com.datastax.oss.driver.api.core.*, com.datastax.oss.driver.internal.core.*, com.datastax.dse.driver.api.core.*, com.datastax.dse.driver.internal.core.*, com.datastax.oss.driver.shaded.netty.*, com.datastax.oss.driver.shaded.fasterxml.jackson.*, true diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 20fd13ea85d..96e682bd087 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -211,6 +211,11 @@ blockhound-junit-platform test + + com.esri.geometry + esri-geometry-api + test + diff --git a/manual/core/integration/README.md b/manual/core/integration/README.md index 23380063cdf..7202dac2d94 100644 --- a/manual/core/integration/README.md +++ b/manual/core/integration/README.md @@ -479,8 +479,9 @@ don't use any of the above features, you can safely exclude the dependency: Our [geospatial types](../dse/geotypes/) implementation is based on the [Esri Geometry API](https://github.com/Esri/geometry-api-java). -Esri is declared as a required dependency, but the driver can operate normally without it. If you -don't use geospatial types anywhere in your application, you can exclude the dependency: +For driver versions >= 4.4.0 and < 4.14.0 Esri is declared as a required dependency, +although the driver can operate normally without it. If you don't use geospatial types +anywhere in your application you can exclude the dependency: ```xml @@ -496,6 +497,22 @@ don't use geospatial types anywhere in your application, you can exclude the dep ``` +Starting with driver 4.14.0 Esri has been changed to an optional dependency. You no longer have to +explicitly exclude the dependency if it's not used, but if you do wish to make use of the Esri +library you must now explicitly specify it as a dependency : + +```xml + + com.esri.geometry + esri-geometry-api + ${esri.version} + +``` + +In the dependency specification above you should use any 1.2.x version of Esri (we recommend +1.2.1). These versions are older than the current 2.x versions of the library but they are +guaranteed to be fully compatible with DSE. + #### TinkerPop [Apache TinkerPop™](http://tinkerpop.apache.org/) is used in our [graph API](../dse/graph/), diff --git a/upgrade_guide/README.md b/upgrade_guide/README.md index e48a75ceb4e..297c4ca7fda 100644 --- a/upgrade_guide/README.md +++ b/upgrade_guide/README.md @@ -9,6 +9,26 @@ request cannot be executed because all nodes tried were busy. Previously you wou `NoNodeAvailableException` but you will now get back an `AllNodesFailedException` where the `getAllErrors` map contains a `NodeUnavailableException` for that node. +#### Esri Geometry dependency now optional + +Previous versions of the Java driver defined a mandatory dependency on the Esri geometry library. +This library offered support for primitive geometric types supported by DSE. As of driver 4.14.0 +this dependency is now optional. + +If you do not use DSE (or if you do but do not use the support for geometric types within DSE) you +should experience no disruption. If you are using geometric types with DSE you'll now need to +explicitly declare a dependency on the Esri library: + +```xml + + com.esri.geometry + esri-geometry-api + ${esri.version} + +``` + +See the [integration](../manual/core/integration/#esri) section in the manual for more details. + ### 4.13.0 #### Enhanced support for GraalVM native images From 85efee414916aea235ae8578fb3343d7733b747f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=A0tefan=20Miklo=C5=A1ovi=C4=8D?= Date: Fri, 11 Feb 2022 17:07:46 +0100 Subject: [PATCH 784/979] JAVA-2992 include options into DefaultTableMetadata equals and hash methods (#1588) --- .../internal/core/metadata/schema/DefaultTableMetadata.java | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/DefaultTableMetadata.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/DefaultTableMetadata.java index e877e322993..34cb8ac9966 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/DefaultTableMetadata.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/DefaultTableMetadata.java @@ -142,7 +142,8 @@ public boolean equals(Object other) { && Objects.equals(this.partitionKey, that.getPartitionKey()) && Objects.equals(this.clusteringColumns, that.getClusteringColumns()) && Objects.equals(this.columns, that.getColumns()) - && Objects.equals(this.indexes, that.getIndexes()); + && Objects.equals(this.indexes, that.getIndexes()) + && Objects.equals(this.options, that.getOptions()); } else { return false; } @@ -159,7 +160,8 @@ public int hashCode() { partitionKey, clusteringColumns, columns, - indexes); + indexes, + options); } @Override From 73f9141c44bb69f4c38f6a70649dbb3a4d6b2797 Mon Sep 17 00:00:00 2001 From: Schnikonos Date: Mon, 14 Feb 2022 07:01:40 +0100 Subject: [PATCH 785/979] JAVA-2987 BasicLoadBalancingPolicy remote compute nodes check all liveNodes (#1576) * BasicLoadBalancingPolicy remote compute nodes -> don't presume local dc nodes to be up and among the liveNodes that where found (can happen if the local dc wasn't up when the application started) * BasicLoadBalancingPolicy: follow suggestion from @absurdfarce for remote computeNodes to make the code cleaner and more efficient * BasicLoadBalancingPolicy: fix formatting --- .../BasicLoadBalancingPolicy.java | 30 +++++++------------ 1 file changed, 10 insertions(+), 20 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicy.java b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicy.java index 395412272ce..ba1c3e39f30 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicy.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicy.java @@ -42,6 +42,7 @@ import com.datastax.oss.driver.internal.core.util.collection.LazyQueryPlan; import com.datastax.oss.driver.internal.core.util.collection.QueryPlan; import com.datastax.oss.driver.internal.core.util.collection.SimpleQueryPlan; +import com.datastax.oss.driver.shaded.guava.common.base.Predicates; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import java.nio.ByteBuffer; @@ -322,30 +323,19 @@ protected Queue maybeAddDcFailover(@Nullable Request request, @NonNull Que @Override protected Object[] computeNodes() { - Object[] dcs = liveNodes.dcs().toArray(); - if (dcs.length <= 1) { - return EMPTY_NODES; - } - Object[] remoteNodes = new Object[(dcs.length - 1) * maxNodesPerRemoteDc]; - int remoteNodesLength = 0; - for (Object dc : dcs) { - if (!dc.equals(localDc)) { - Object[] remoteNodesInDc = liveNodes.dc((String) dc).toArray(); - for (int i = 0; i < maxNodesPerRemoteDc && i < remoteNodesInDc.length; i++) { - remoteNodes[remoteNodesLength++] = remoteNodesInDc[i]; - } - } - } + Set dcs = liveNodes.dcs(); + Object[] remoteNodes = + dcs.stream() + .filter(Predicates.not(Predicates.equalTo(localDc))) + .flatMap(dc -> liveNodes.dc(dc).stream().limit(maxNodesPerRemoteDc)) + .toArray(); + + int remoteNodesLength = remoteNodes.length; if (remoteNodesLength == 0) { return EMPTY_NODES; } shuffleHead(remoteNodes, remoteNodesLength); - if (remoteNodes.length == remoteNodesLength) { - return remoteNodes; - } - Object[] trimmedRemoteNodes = new Object[remoteNodesLength]; - System.arraycopy(remoteNodes, 0, trimmedRemoteNodes, 0, remoteNodesLength); - return trimmedRemoteNodes; + return remoteNodes; } }; From 71e57575d2f569101d5fae6062e195165a1e65a6 Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Mon, 14 Feb 2022 14:53:09 -0600 Subject: [PATCH 786/979] Minor optimization to previous commit --- .../internal/core/loadbalancing/BasicLoadBalancingPolicy.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicy.java b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicy.java index ba1c3e39f30..9e8184879ea 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicy.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicy.java @@ -323,9 +323,8 @@ protected Queue maybeAddDcFailover(@Nullable Request request, @NonNull Que @Override protected Object[] computeNodes() { - Set dcs = liveNodes.dcs(); Object[] remoteNodes = - dcs.stream() + liveNodes.dcs().stream() .filter(Predicates.not(Predicates.equalTo(localDc))) .flatMap(dc -> liveNodes.dc(dc).stream().limit(maxNodesPerRemoteDc)) .toArray(); From c1e3a634077999f42a8dd78ec63b952e70fb144b Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Wed, 16 Feb 2022 10:34:05 -0600 Subject: [PATCH 787/979] JAVA-2976: Protocol v5 error codes CAS_WRITE_UNKNOWN, CDC_WRITE_FAILURE not supported (4.x) (#1586) --- bom/pom.xml | 2 +- .../CASWriteUnknownException.java | 84 +++++++++++++++++++ .../CDCWriteFailureException.java | 58 +++++++++++++ .../driver/internal/core/cql/Conversions.java | 12 +++ .../internal/core/util/ProtocolUtils.java | 4 + 5 files changed, 159 insertions(+), 1 deletion(-) create mode 100644 core/src/main/java/com/datastax/oss/driver/api/core/servererrors/CASWriteUnknownException.java create mode 100644 core/src/main/java/com/datastax/oss/driver/api/core/servererrors/CDCWriteFailureException.java diff --git a/bom/pom.xml b/bom/pom.xml index a228074bf7a..7eb3b983ec4 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -71,7 +71,7 @@ com.datastax.oss native-protocol - 1.5.0 + 1.5.1 com.datastax.oss diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/servererrors/CASWriteUnknownException.java b/core/src/main/java/com/datastax/oss/driver/api/core/servererrors/CASWriteUnknownException.java new file mode 100644 index 00000000000..856f8951e7e --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/api/core/servererrors/CASWriteUnknownException.java @@ -0,0 +1,84 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.core.servererrors; + +import com.datastax.oss.driver.api.core.AllNodesFailedException; +import com.datastax.oss.driver.api.core.ConsistencyLevel; +import com.datastax.oss.driver.api.core.DriverException; +import com.datastax.oss.driver.api.core.cql.ExecutionInfo; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.retry.RetryPolicy; +import com.datastax.oss.driver.api.core.session.Request; +import edu.umd.cs.findbugs.annotations.NonNull; + +/** + * The result of a CAS operation is in an unknown state. + * + *

          This exception is processed by {@link RetryPolicy#onErrorResponseVerdict(Request, + * CoordinatorException, int)} , which will decide if it is rethrown directly to the client or if + * the request should be retried. If all other tried nodes also fail, this exception will appear in + * the {@link AllNodesFailedException} thrown to the client. + */ +public class CASWriteUnknownException extends QueryConsistencyException { + + public CASWriteUnknownException( + @NonNull Node coordinator, + @NonNull ConsistencyLevel consistencyLevel, + int received, + int blockFor) { + this( + coordinator, + String.format( + "CAS operation result is unknown - proposal was not accepted by a quorum. (%d / %d)", + received, blockFor), + consistencyLevel, + received, + blockFor, + null, + false); + } + + private CASWriteUnknownException( + @NonNull Node coordinator, + @NonNull String message, + @NonNull ConsistencyLevel consistencyLevel, + int received, + int blockFor, + ExecutionInfo executionInfo, + boolean writableStackTrace) { + super( + coordinator, + message, + consistencyLevel, + received, + blockFor, + executionInfo, + writableStackTrace); + } + + @NonNull + @Override + public DriverException copy() { + return new CASWriteUnknownException( + getCoordinator(), + getMessage(), + getConsistencyLevel(), + getReceived(), + getBlockFor(), + getExecutionInfo(), + true); + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/servererrors/CDCWriteFailureException.java b/core/src/main/java/com/datastax/oss/driver/api/core/servererrors/CDCWriteFailureException.java new file mode 100644 index 00000000000..f6f6552e9b4 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/api/core/servererrors/CDCWriteFailureException.java @@ -0,0 +1,58 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.core.servererrors; + +import com.datastax.oss.driver.api.core.AllNodesFailedException; +import com.datastax.oss.driver.api.core.DriverException; +import com.datastax.oss.driver.api.core.cql.ExecutionInfo; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.api.core.retry.RetryPolicy; +import com.datastax.oss.driver.api.core.session.Request; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; + +/** + * An attempt was made to write to a commitlog segment which doesn't support CDC mutations. + * + *

          This exception is processed by {@link RetryPolicy#onErrorResponseVerdict(Request, + * CoordinatorException, int)}, which will decide if it is rethrown directly to the client or if the + * request should be retried. If all other tried nodes also fail, this exception will appear in the + * {@link AllNodesFailedException} thrown to the client. + */ +public class CDCWriteFailureException extends QueryExecutionException { + + public CDCWriteFailureException(@NonNull Node coordinator) { + super(coordinator, "Commitlog does not support CDC mutations", null, false); + } + + public CDCWriteFailureException(@NonNull Node coordinator, @NonNull String message) { + super(coordinator, "Commitlog does not support CDC mutations", null, false); + } + + private CDCWriteFailureException( + @NonNull Node coordinator, + @NonNull String message, + @Nullable ExecutionInfo executionInfo, + boolean writableStackTrace) { + super(coordinator, message, executionInfo, writableStackTrace); + } + + @NonNull + @Override + public DriverException copy() { + return new CDCWriteFailureException(getCoordinator(), getMessage(), getExecutionInfo(), true); + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/Conversions.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/Conversions.java index 1031ca01bd2..6a01f364000 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/Conversions.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/Conversions.java @@ -40,6 +40,8 @@ import com.datastax.oss.driver.api.core.retry.RetryPolicy; import com.datastax.oss.driver.api.core.servererrors.AlreadyExistsException; import com.datastax.oss.driver.api.core.servererrors.BootstrappingException; +import com.datastax.oss.driver.api.core.servererrors.CASWriteUnknownException; +import com.datastax.oss.driver.api.core.servererrors.CDCWriteFailureException; import com.datastax.oss.driver.api.core.servererrors.CoordinatorException; import com.datastax.oss.driver.api.core.servererrors.FunctionFailureException; import com.datastax.oss.driver.api.core.servererrors.InvalidConfigurationInQueryException; @@ -75,6 +77,7 @@ import com.datastax.oss.protocol.internal.response.Error; import com.datastax.oss.protocol.internal.response.Result; import com.datastax.oss.protocol.internal.response.error.AlreadyExists; +import com.datastax.oss.protocol.internal.response.error.CASWriteUnknown; import com.datastax.oss.protocol.internal.response.error.ReadFailure; import com.datastax.oss.protocol.internal.response.error.ReadTimeout; import com.datastax.oss.protocol.internal.response.error.Unavailable; @@ -505,6 +508,15 @@ public static CoordinatorException toThrowable( context.getWriteTypeRegistry().fromName(writeFailure.writeType), writeFailure.numFailures, writeFailure.reasonMap); + case ProtocolConstants.ErrorCode.CDC_WRITE_FAILURE: + return new CDCWriteFailureException(node, errorMessage.message); + case ProtocolConstants.ErrorCode.CAS_WRITE_UNKNOWN: + CASWriteUnknown casFailure = (CASWriteUnknown) errorMessage; + return new CASWriteUnknownException( + node, + context.getConsistencyLevelRegistry().codeToLevel(casFailure.consistencyLevel), + casFailure.received, + casFailure.blockFor); case ProtocolConstants.ErrorCode.SYNTAX_ERROR: return new SyntaxError(node, errorMessage.message); case ProtocolConstants.ErrorCode.UNAUTHORIZED: diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/util/ProtocolUtils.java b/core/src/main/java/com/datastax/oss/driver/internal/core/util/ProtocolUtils.java index 06b47479eee..386fd3be525 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/util/ProtocolUtils.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/util/ProtocolUtils.java @@ -95,6 +95,10 @@ public static String errorCodeString(int errorCode) { return "FUNCTION_FAILURE"; case ProtocolConstants.ErrorCode.WRITE_FAILURE: return "WRITE_FAILURE"; + case ProtocolConstants.ErrorCode.CDC_WRITE_FAILURE: + return "CDC_WRITE_FAILURE"; + case ProtocolConstants.ErrorCode.CAS_WRITE_UNKNOWN: + return "CAS_WRITE_UNKNOWN"; case ProtocolConstants.ErrorCode.SYNTAX_ERROR: return "SYNTAX_ERROR"; case ProtocolConstants.ErrorCode.UNAUTHORIZED: From 42cb6583f3b87b92ab07b02ba05c07c029d07994 Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Wed, 16 Feb 2022 11:56:06 -0600 Subject: [PATCH 788/979] Changelog updates --- changelog/README.md | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/changelog/README.md b/changelog/README.md index 9b3c2f4a609..fd6dccdb3a0 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -2,8 +2,12 @@ -### 4.14.0 (in progress) +### 4.14.0 +- [bug] JAVA-2976: Support missing protocol v5 error codes CAS_WRITE_UNKNOWN, CDC_WRITE_FAILURE +- [bug] JAVA-2987: BasicLoadBalancingPolicy remote computation assumes local DC is up and live +- [bug] JAVA-2992: Include options into DefaultTableMetadata equals and hash methods +- [improvement] JAVA-2982: Switch Esri geometry lib to an optional dependency - [improvement] JAVA-2959: Don't throw NoNodeAvailableException when all connections busy ### 4.13.0 @@ -597,6 +601,17 @@ changelog](https://docs.datastax.com/en/developer/java-driver-dse/latest/changel - [bug] JAVA-1499: Wait for load balancing policy at cluster initialization - [new feature] JAVA-1495: Add prepared statements +## 3.11.1 +- [bug] JAVA-2967: Support native transport peer information for DSE 6.8. +- [bug] JAVA-2976: Support missing protocol v5 error codes CAS_WRITE_UNKNOWN, CDC_WRITE_FAILURE. + +## 3.11.0 + +- [improvement] JAVA-2705: Remove protocol v5 beta status, add v6-beta. +- [bug] JAVA-2923: Detect and use Guava's new HostAndPort.getHost method. +- [bug] JAVA-2922: Switch to modern framing format inside a channel handler. +- [bug] JAVA-2924: Consider protocol version unsupported when server requires USE_BETA flag for it. + ### 3.10.2 - [bug] JAVA-2860: Avoid NPE if channel initialization crashes. From ce2cd11ab692375f5c28b2c6548c98c3339e336e Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Thu, 17 Feb 2022 10:35:03 -0600 Subject: [PATCH 789/979] Update manual for 4.14.0 release --- README.md | 4 +- manual/case_sensitivity/README.md | 10 +-- manual/core/README.md | 26 +++---- manual/core/address_resolution/README.md | 2 +- manual/core/async/README.md | 2 +- manual/core/authentication/README.md | 18 ++--- manual/core/bom/README.md | 6 +- manual/core/configuration/README.md | 20 ++--- manual/core/control_connection/README.md | 2 +- manual/core/custom_codecs/README.md | 74 +++++++++---------- manual/core/detachable_types/README.md | 14 ++-- manual/core/dse/geotypes/README.md | 6 +- manual/core/dse/graph/README.md | 4 +- manual/core/dse/graph/fluent/README.md | 4 +- .../core/dse/graph/fluent/explicit/README.md | 12 +-- manual/core/dse/graph/results/README.md | 6 +- manual/core/dse/graph/script/README.md | 6 +- manual/core/idempotence/README.md | 4 +- manual/core/integration/README.md | 7 +- manual/core/load_balancing/README.md | 12 +-- manual/core/metadata/README.md | 6 +- manual/core/metadata/node/README.md | 28 +++---- manual/core/metadata/schema/README.md | 20 ++--- manual/core/metadata/token/README.md | 4 +- manual/core/native_protocol/README.md | 6 +- manual/core/non_blocking/README.md | 44 +++++------ manual/core/paging/README.md | 12 +-- manual/core/performance/README.md | 10 +-- manual/core/pooling/README.md | 2 +- manual/core/query_timestamps/README.md | 4 +- manual/core/reactive/README.md | 24 +++--- manual/core/reconnection/README.md | 8 +- manual/core/request_tracker/README.md | 4 +- manual/core/retries/README.md | 36 ++++----- manual/core/speculative_execution/README.md | 2 +- manual/core/ssl/README.md | 6 +- manual/core/statements/README.md | 8 +- manual/core/statements/batch/README.md | 6 +- .../statements/per_query_keyspace/README.md | 2 +- manual/core/statements/prepared/README.md | 8 +- manual/core/statements/simple/README.md | 6 +- manual/core/temporal_types/README.md | 8 +- manual/core/throttling/README.md | 6 +- manual/core/tracing/README.md | 12 +-- manual/core/tuples/README.md | 4 +- manual/core/udts/README.md | 4 +- manual/developer/common/concurrency/README.md | 4 +- manual/mapper/config/kotlin/README.md | 2 +- manual/mapper/config/record/README.md | 2 +- manual/mapper/config/scala/README.md | 2 +- manual/mapper/daos/README.md | 8 +- manual/mapper/daos/custom_types/README.md | 10 +-- manual/mapper/daos/delete/README.md | 18 ++--- manual/mapper/daos/getentity/README.md | 18 ++--- manual/mapper/daos/increment/README.md | 12 +-- manual/mapper/daos/insert/README.md | 14 ++-- manual/mapper/daos/null_saving/README.md | 10 +-- manual/mapper/daos/query/README.md | 24 +++--- manual/mapper/daos/queryprovider/README.md | 16 ++-- manual/mapper/daos/select/README.md | 28 +++---- manual/mapper/daos/setentity/README.md | 10 +-- .../daos/statement_attributes/README.md | 2 +- manual/mapper/daos/update/README.md | 12 +-- manual/mapper/entities/README.md | 36 ++++----- manual/mapper/mapper/README.md | 10 +-- manual/osgi/README.md | 6 +- manual/query_builder/README.md | 10 +-- manual/query_builder/condition/README.md | 2 +- manual/query_builder/delete/README.md | 4 +- manual/query_builder/insert/README.md | 2 +- manual/query_builder/relation/README.md | 4 +- manual/query_builder/schema/README.md | 2 +- .../query_builder/schema/aggregate/README.md | 2 +- .../query_builder/schema/function/README.md | 2 +- manual/query_builder/schema/index/README.md | 2 +- .../query_builder/schema/keyspace/README.md | 2 +- .../schema/materialized_view/README.md | 4 +- manual/query_builder/schema/table/README.md | 6 +- manual/query_builder/schema/type/README.md | 2 +- manual/query_builder/select/README.md | 4 +- manual/query_builder/term/README.md | 4 +- manual/query_builder/truncate/README.md | 2 +- manual/query_builder/update/README.md | 4 +- 83 files changed, 411 insertions(+), 410 deletions(-) diff --git a/README.md b/README.md index b61f2464900..daffaff95c8 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@ *If you're reading this on github.com, please note that this is the readme for the development version and that some features described here might not yet have been released. You can find the documentation for latest version through [DataStax Docs] or via the release tags, e.g. -[4.13.0](https://github.com/datastax/java-driver/tree/4.13.0).* +[4.14.0](https://github.com/datastax/java-driver/tree/4.14.0).* A modern, feature-rich and highly tunable Java client library for [Apache Cassandra®] \(2.1+) and [DataStax Enterprise] \(4.7+), and [DataStax Astra], using exclusively Cassandra's binary protocol @@ -82,7 +82,7 @@ See the [upgrade guide](upgrade_guide/) for details. * [Changelog] * [FAQ] -[API docs]: https://docs.datastax.com/en/drivers/java/4.13 +[API docs]: https://docs.datastax.com/en/drivers/java/4.14 [JIRA]: https://datastax-oss.atlassian.net/browse/JAVA [Mailing list]: https://groups.google.com/a/lists.datastax.com/forum/#!forum/java-driver-user [@dsJavaDriver]: https://twitter.com/dsJavaDriver diff --git a/manual/case_sensitivity/README.md b/manual/case_sensitivity/README.md index 947502cd5b0..865354f41df 100644 --- a/manual/case_sensitivity/README.md +++ b/manual/case_sensitivity/README.md @@ -106,11 +106,11 @@ For "consuming" methods, string overloads are also provided for convenience, for * in other cases, the string is always assumed to be in CQL form, and converted on the fly with `CqlIdentifier.fromCql`. -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/CqlIdentifier.html -[Row]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/Row.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/data/UdtValue.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/data/AccessibleByName.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/CqlIdentifier.html +[Row]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/Row.html +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/data/UdtValue.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/data/AccessibleByName.html ### Good practices diff --git a/manual/core/README.md b/manual/core/README.md index f6418b2739f..8cc6b670b99 100644 --- a/manual/core/README.md +++ b/manual/core/README.md @@ -314,18 +314,18 @@ for (ColumnDefinitions.Definition definition : row.getColumnDefinitions()) { } ``` -[CqlSession]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/CqlSession.html -[CqlSession#builder()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/CqlSession.html#builder-- -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/ResultSet.html -[Row]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/Row.html -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/CqlIdentifier.html -[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/data/AccessibleByName.html -[GenericType]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/reflect/GenericType.html -[CqlDuration]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/data/CqlDuration.html -[TupleValue]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/data/TupleValue.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/data/UdtValue.html -[SessionBuilder.addContactPoint()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoint-java.net.InetSocketAddress- -[SessionBuilder.addContactPoints()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoints-java.util.Collection- -[SessionBuilder.withLocalDatacenter()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withLocalDatacenter-java.lang.String- +[CqlSession]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/CqlSession.html +[CqlSession#builder()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/CqlSession.html#builder-- +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/ResultSet.html +[Row]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/Row.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/CqlIdentifier.html +[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/data/AccessibleByName.html +[GenericType]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/reflect/GenericType.html +[CqlDuration]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/data/CqlDuration.html +[TupleValue]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/data/TupleValue.html +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/data/UdtValue.html +[SessionBuilder.addContactPoint()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoint-java.net.InetSocketAddress- +[SessionBuilder.addContactPoints()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoints-java.util.Collection- +[SessionBuilder.withLocalDatacenter()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withLocalDatacenter-java.lang.String- [CASSANDRA-10145]: https://issues.apache.org/jira/browse/CASSANDRA-10145 \ No newline at end of file diff --git a/manual/core/address_resolution/README.md b/manual/core/address_resolution/README.md index 11194b92e6b..fe8c967a62a 100644 --- a/manual/core/address_resolution/README.md +++ b/manual/core/address_resolution/README.md @@ -124,7 +124,7 @@ Cassandra node: domain name of the target instance. Then it performs a forward DNS lookup of the domain name; the EC2 DNS does the private/public switch automatically based on location). -[AddressTranslator]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/addresstranslation/AddressTranslator.html +[AddressTranslator]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/addresstranslation/AddressTranslator.html [cassandra.yaml]: https://docs.datastax.com/en/cassandra/3.x/cassandra/configuration/configCassandra_yaml.html [rpc_address]: https://docs.datastax.com/en/cassandra/3.x/cassandra/configuration/configCassandra_yaml.html?scroll=configCassandra_yaml__rpc_address diff --git a/manual/core/async/README.md b/manual/core/async/README.md index e2bac78d755..1daecfd61ee 100644 --- a/manual/core/async/README.md +++ b/manual/core/async/README.md @@ -207,4 +207,4 @@ documentation for more details and an example. [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html \ No newline at end of file +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html \ No newline at end of file diff --git a/manual/core/authentication/README.md b/manual/core/authentication/README.md index 7843ae2d1f8..45742c3aac2 100644 --- a/manual/core/authentication/README.md +++ b/manual/core/authentication/README.md @@ -227,13 +227,13 @@ session.execute(statement); [SASL]: https://en.wikipedia.org/wiki/Simple_Authentication_and_Security_Layer -[AuthProvider]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/auth/AuthProvider.html -[DriverContext]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/context/DriverContext.html -[PlainTextAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderBase.html -[ProgrammaticPlainTextAuthProvider]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/auth/ProgrammaticPlainTextAuthProvider.html -[DseGssApiAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderBase.html -[ProgrammaticDseGssApiAuthProvider]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/auth/ProgrammaticDseGssApiAuthProvider.html -[ProxyAuthentication.executeAs]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/auth/ProxyAuthentication.html#executeAs-java.lang.String-StatementT- -[SessionBuilder.withAuthCredentials]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthCredentials-java.lang.String-java.lang.String- -[SessionBuilder.withAuthProvider]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthProvider-com.datastax.oss.driver.api.core.auth.AuthProvider- +[AuthProvider]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/auth/AuthProvider.html +[DriverContext]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/context/DriverContext.html +[PlainTextAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderBase.html +[ProgrammaticPlainTextAuthProvider]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/auth/ProgrammaticPlainTextAuthProvider.html +[DseGssApiAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderBase.html +[ProgrammaticDseGssApiAuthProvider]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/auth/ProgrammaticDseGssApiAuthProvider.html +[ProxyAuthentication.executeAs]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/auth/ProxyAuthentication.html#executeAs-java.lang.String-StatementT- +[SessionBuilder.withAuthCredentials]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthCredentials-java.lang.String-java.lang.String- +[SessionBuilder.withAuthProvider]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthProvider-com.datastax.oss.driver.api.core.auth.AuthProvider- [reference.conf]: ../configuration/reference/ \ No newline at end of file diff --git a/manual/core/bom/README.md b/manual/core/bom/README.md index c31df62a3d6..f1741a43372 100644 --- a/manual/core/bom/README.md +++ b/manual/core/bom/README.md @@ -13,7 +13,7 @@ To import the driver's BOM, add the following section in your application's own com.datastax.oss java-driver-bom - 4.13.0 + 4.14.0 pom import @@ -65,7 +65,7 @@ good idea to extract a property to keep it in sync with the BOM: ```xml - 4.13.0 + 4.14.0 @@ -104,4 +104,4 @@ good idea to extract a property to keep it in sync with the BOM: ``` -[MCOMPILER-391]: https://issues.apache.org/jira/browse/MCOMPILER-391 \ No newline at end of file +[MCOMPILER-391]: https://issues.apache.org/jira/browse/MCOMPILER-391 diff --git a/manual/core/configuration/README.md b/manual/core/configuration/README.md index ef1c6e801b2..7dc9fd73afc 100644 --- a/manual/core/configuration/README.md +++ b/manual/core/configuration/README.md @@ -520,16 +520,16 @@ config.getDefaultProfile().getString(MyCustomOption.ADMIN_EMAIL); config.getDefaultProfile().getInt(MyCustomOption.AWESOMENESS_FACTOR); ``` -[DriverConfig]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/config/DriverConfig.html -[DriverExecutionProfile]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/config/DriverExecutionProfile.html -[DriverContext]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/context/DriverContext.html -[DriverOption]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/config/DriverOption.html -[DefaultDriverOption]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/config/DefaultDriverOption.html -[DriverConfigLoader]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html -[DriverConfigLoader.fromClasspath]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromClasspath-java.lang.String- -[DriverConfigLoader.fromFile]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromFile-java.io.File- -[DriverConfigLoader.fromUrl]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromUrl-java.net.URL- -[DriverConfigLoader.programmaticBuilder]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#programmaticBuilder-- +[DriverConfig]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/config/DriverConfig.html +[DriverExecutionProfile]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/config/DriverExecutionProfile.html +[DriverContext]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/context/DriverContext.html +[DriverOption]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/config/DriverOption.html +[DefaultDriverOption]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/config/DefaultDriverOption.html +[DriverConfigLoader]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html +[DriverConfigLoader.fromClasspath]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromClasspath-java.lang.String- +[DriverConfigLoader.fromFile]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromFile-java.io.File- +[DriverConfigLoader.fromUrl]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromUrl-java.net.URL- +[DriverConfigLoader.programmaticBuilder]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#programmaticBuilder-- [Typesafe Config]: https://github.com/typesafehub/config [config standard behavior]: https://github.com/typesafehub/config#standard-behavior diff --git a/manual/core/control_connection/README.md b/manual/core/control_connection/README.md index 3b33639059e..d8a9cddc718 100644 --- a/manual/core/control_connection/README.md +++ b/manual/core/control_connection/README.md @@ -23,4 +23,4 @@ There are a few options to fine tune the control connection behavior in the `advanced.control-connection` and `advanced.metadata` sections; see the [metadata](../metadata/) pages and the [reference configuration](../configuration/reference/) for all the details. -[Node.getOpenConnections]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- \ No newline at end of file +[Node.getOpenConnections]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- \ No newline at end of file diff --git a/manual/core/custom_codecs/README.md b/manual/core/custom_codecs/README.md index ca3c20d7538..92a0274577b 100644 --- a/manual/core/custom_codecs/README.md +++ b/manual/core/custom_codecs/README.md @@ -660,13 +660,13 @@ private static String formatRow(Row row) { } ``` -[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html -[GenericType]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/reflect/GenericType.html -[TypeCodec]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html -[format()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html#format-JavaTypeT- -[parse()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html#parse-java.lang.String- -[MappingCodec]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/MappingCodec.html -[SessionBuilder.addTypeCodecs]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addTypeCodecs-com.datastax.oss.driver.api.core.type.codec.TypeCodec...- +[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html +[GenericType]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/reflect/GenericType.html +[TypeCodec]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html +[format()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html#format-JavaTypeT- +[parse()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html#parse-java.lang.String- +[MappingCodec]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/MappingCodec.html +[SessionBuilder.addTypeCodecs]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addTypeCodecs-com.datastax.oss.driver.api.core.type.codec.TypeCodec...- [Enums]: https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html [Enum.name()]: https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html#name-- @@ -680,36 +680,36 @@ private static String formatRow(Row row) { [java.time.LocalDateTime]: https://docs.oracle.com/javase/8/docs/api/java/time/LocalDateTime.html [java.time.ZoneId]: https://docs.oracle.com/javase/8/docs/api/java/time/ZoneId.html -[ExtraTypeCodecs]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html -[ExtraTypeCodecs.BLOB_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#BLOB_TO_ARRAY -[ExtraTypeCodecs.BOOLEAN_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#BOOLEAN_LIST_TO_ARRAY -[ExtraTypeCodecs.BYTE_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#BYTE_LIST_TO_ARRAY -[ExtraTypeCodecs.SHORT_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#SHORT_LIST_TO_ARRAY -[ExtraTypeCodecs.INT_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#INT_LIST_TO_ARRAY -[ExtraTypeCodecs.LONG_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#LONG_LIST_TO_ARRAY -[ExtraTypeCodecs.FLOAT_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#FLOAT_LIST_TO_ARRAY -[ExtraTypeCodecs.DOUBLE_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#DOUBLE_LIST_TO_ARRAY -[ExtraTypeCodecs.listToArrayOf(TypeCodec)]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#listToArrayOf-com.datastax.oss.driver.api.core.type.codec.TypeCodec- -[ExtraTypeCodecs.TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#TIMESTAMP_UTC -[ExtraTypeCodecs.timestampAt(ZoneId)]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#timestampAt-java.time.ZoneId- -[ExtraTypeCodecs.TIMESTAMP_MILLIS_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#TIMESTAMP_MILLIS_SYSTEM -[ExtraTypeCodecs.TIMESTAMP_MILLIS_UTC]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#TIMESTAMP_MILLIS_UTC -[ExtraTypeCodecs.timestampMillisAt(ZoneId)]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#timestampMillisAt-java.time.ZoneId- -[ExtraTypeCodecs.ZONED_TIMESTAMP_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#ZONED_TIMESTAMP_SYSTEM -[ExtraTypeCodecs.ZONED_TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#ZONED_TIMESTAMP_UTC -[ExtraTypeCodecs.zonedTimestampAt(ZoneId)]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#zonedTimestampAt-java.time.ZoneId- -[ExtraTypeCodecs.LOCAL_TIMESTAMP_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#LOCAL_TIMESTAMP_SYSTEM -[ExtraTypeCodecs.LOCAL_TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#LOCAL_TIMESTAMP_UTC -[ExtraTypeCodecs.localTimestampAt(ZoneId)]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#localTimestampAt-java.time.ZoneId- -[ExtraTypeCodecs.ZONED_TIMESTAMP_PERSISTED]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#ZONED_TIMESTAMP_PERSISTED -[ExtraTypeCodecs.optionalOf(TypeCodec)]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#optionalOf-com.datastax.oss.driver.api.core.type.codec.TypeCodec- -[ExtraTypeCodecs.enumNamesOf(Class)]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#enumNamesOf-java.lang.Class- -[ExtraTypeCodecs.enumOrdinalsOf(Class)]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#enumOrdinalsOf-java.lang.Class- -[ExtraTypeCodecs.json(Class)]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#json-java.lang.Class- -[ExtraTypeCodecs.json(Class, ObjectMapper)]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#json-java.lang.Class-com.fasterxml.jackson.databind.ObjectMapper- - -[TypeCodecs.BLOB]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#BLOB -[TypeCodecs.TIMESTAMP]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#TIMESTAMP +[ExtraTypeCodecs]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html +[ExtraTypeCodecs.BLOB_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#BLOB_TO_ARRAY +[ExtraTypeCodecs.BOOLEAN_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#BOOLEAN_LIST_TO_ARRAY +[ExtraTypeCodecs.BYTE_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#BYTE_LIST_TO_ARRAY +[ExtraTypeCodecs.SHORT_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#SHORT_LIST_TO_ARRAY +[ExtraTypeCodecs.INT_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#INT_LIST_TO_ARRAY +[ExtraTypeCodecs.LONG_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#LONG_LIST_TO_ARRAY +[ExtraTypeCodecs.FLOAT_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#FLOAT_LIST_TO_ARRAY +[ExtraTypeCodecs.DOUBLE_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#DOUBLE_LIST_TO_ARRAY +[ExtraTypeCodecs.listToArrayOf(TypeCodec)]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#listToArrayOf-com.datastax.oss.driver.api.core.type.codec.TypeCodec- +[ExtraTypeCodecs.TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#TIMESTAMP_UTC +[ExtraTypeCodecs.timestampAt(ZoneId)]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#timestampAt-java.time.ZoneId- +[ExtraTypeCodecs.TIMESTAMP_MILLIS_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#TIMESTAMP_MILLIS_SYSTEM +[ExtraTypeCodecs.TIMESTAMP_MILLIS_UTC]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#TIMESTAMP_MILLIS_UTC +[ExtraTypeCodecs.timestampMillisAt(ZoneId)]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#timestampMillisAt-java.time.ZoneId- +[ExtraTypeCodecs.ZONED_TIMESTAMP_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#ZONED_TIMESTAMP_SYSTEM +[ExtraTypeCodecs.ZONED_TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#ZONED_TIMESTAMP_UTC +[ExtraTypeCodecs.zonedTimestampAt(ZoneId)]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#zonedTimestampAt-java.time.ZoneId- +[ExtraTypeCodecs.LOCAL_TIMESTAMP_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#LOCAL_TIMESTAMP_SYSTEM +[ExtraTypeCodecs.LOCAL_TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#LOCAL_TIMESTAMP_UTC +[ExtraTypeCodecs.localTimestampAt(ZoneId)]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#localTimestampAt-java.time.ZoneId- +[ExtraTypeCodecs.ZONED_TIMESTAMP_PERSISTED]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#ZONED_TIMESTAMP_PERSISTED +[ExtraTypeCodecs.optionalOf(TypeCodec)]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#optionalOf-com.datastax.oss.driver.api.core.type.codec.TypeCodec- +[ExtraTypeCodecs.enumNamesOf(Class)]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#enumNamesOf-java.lang.Class- +[ExtraTypeCodecs.enumOrdinalsOf(Class)]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#enumOrdinalsOf-java.lang.Class- +[ExtraTypeCodecs.json(Class)]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#json-java.lang.Class- +[ExtraTypeCodecs.json(Class, ObjectMapper)]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#json-java.lang.Class-com.fasterxml.jackson.databind.ObjectMapper- + +[TypeCodecs.BLOB]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#BLOB +[TypeCodecs.TIMESTAMP]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#TIMESTAMP [ObjectMapper]: http://fasterxml.github.io/jackson-databind/javadoc/2.10/com/fasterxml/jackson/databind/ObjectMapper.html diff --git a/manual/core/detachable_types/README.md b/manual/core/detachable_types/README.md index 4a0cc9044dc..a0f38d11f74 100644 --- a/manual/core/detachable_types/README.md +++ b/manual/core/detachable_types/README.md @@ -137,13 +137,13 @@ Even then, the defaults used by detached objects might be good enough for you: Otherwise, just make sure you reattach objects any time you deserialize them or create them from scratch. -[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html -[CodecRegistry#DEFAULT]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html#DEFAULT -[DataType]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/DataType.html -[Detachable]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/detach/Detachable.html -[Session]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/Session.html -[ColumnDefinition]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/ColumnDefinition.html -[Row]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/Row.html +[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html +[CodecRegistry#DEFAULT]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html#DEFAULT +[DataType]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/DataType.html +[Detachable]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/detach/Detachable.html +[Session]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/Session.html +[ColumnDefinition]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/ColumnDefinition.html +[Row]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/Row.html [Java serialization]: https://docs.oracle.com/javase/tutorial/jndi/objects/serial.html [protocol specifications]: https://github.com/datastax/native-protocol/tree/1.x/src/main/resources diff --git a/manual/core/dse/geotypes/README.md b/manual/core/dse/geotypes/README.md index 7116ff79886..79470ec946b 100644 --- a/manual/core/dse/geotypes/README.md +++ b/manual/core/dse/geotypes/README.md @@ -166,9 +166,9 @@ All geospatial types interoperate with three standard formats: [ESRI]: https://github.com/Esri/geometry-api-java -[LineString]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/data/geometry/LineString.html -[Point]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/data/geometry/Point.html -[Polygon]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/data/geometry/Polygon.html +[LineString]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/data/geometry/LineString.html +[Point]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/data/geometry/Point.html +[Polygon]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/data/geometry/Polygon.html [Well-known text]: https://en.wikipedia.org/wiki/Well-known_text [Well-known binary]: https://en.wikipedia.org/wiki/Well-known_text#Well-known_binary diff --git a/manual/core/dse/graph/README.md b/manual/core/dse/graph/README.md index 19d5312b6df..bc9669634ee 100644 --- a/manual/core/dse/graph/README.md +++ b/manual/core/dse/graph/README.md @@ -74,8 +74,8 @@ fluent API returns Apache TinkerPop™ types directly. [Apache TinkerPop™]: http://tinkerpop.apache.org/ -[CqlSession]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/CqlSession.html -[GraphSession]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/graph/GraphSession.html +[CqlSession]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/CqlSession.html +[GraphSession]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/graph/GraphSession.html [DSE developer guide]: https://docs.datastax.com/en/dse/6.0/dse-dev/datastax_enterprise/graph/graphTOC.html [Gremlin]: https://docs.datastax.com/en/dse/6.0/dse-dev/datastax_enterprise/graph/dseGraphAbout.html#dseGraphAbout__what-is-cql diff --git a/manual/core/dse/graph/fluent/README.md b/manual/core/dse/graph/fluent/README.md index 443841b09ba..b7027490b33 100644 --- a/manual/core/dse/graph/fluent/README.md +++ b/manual/core/dse/graph/fluent/README.md @@ -109,8 +109,8 @@ All the DSE predicates are available on the driver side: .values("name"); ``` -[Search]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/graph/predicates/Search.html -[Geo]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/graph/predicates/Geo.html +[Search]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/graph/predicates/Search.html +[Geo]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/graph/predicates/Geo.html [Apache TinkerPop™]: http://tinkerpop.apache.org/ [TinkerPop DSL]: http://tinkerpop.apache.org/docs/current/reference/#dsl diff --git a/manual/core/dse/graph/fluent/explicit/README.md b/manual/core/dse/graph/fluent/explicit/README.md index 3157f46d01a..b7741a0de2b 100644 --- a/manual/core/dse/graph/fluent/explicit/README.md +++ b/manual/core/dse/graph/fluent/explicit/README.md @@ -105,9 +105,9 @@ added in a future version. See also the [parent page](../) for topics common to all fluent traversals. -[FluentGraphStatement]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html -[FluentGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html#newInstance-org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal- -[FluentGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html#builder-org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal- -[BatchGraphStatement]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html -[BatchGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html#newInstance-- -[BatchGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html#builder-- +[FluentGraphStatement]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html +[FluentGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html#newInstance-org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal- +[FluentGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html#builder-org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal- +[BatchGraphStatement]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html +[BatchGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html#newInstance-- +[BatchGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html#builder-- diff --git a/manual/core/dse/graph/results/README.md b/manual/core/dse/graph/results/README.md index 6be3bed590b..fa98525c756 100644 --- a/manual/core/dse/graph/results/README.md +++ b/manual/core/dse/graph/results/README.md @@ -137,8 +137,8 @@ If a type doesn't have a corresponding `asXxx()` method, use the variant that ta UUID uuid = graphNode.as(UUID.class); ``` -[GraphNode]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/graph/GraphNode.html -[GraphResultSet]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/graph/GraphResultSet.html -[AsyncGraphResultSet]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/graph/AsyncGraphResultSet.html +[GraphNode]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/graph/GraphNode.html +[GraphResultSet]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/graph/GraphResultSet.html +[AsyncGraphResultSet]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/graph/AsyncGraphResultSet.html [DSE data types]: https://docs.datastax.com/en/dse/6.0/dse-dev/datastax_enterprise/graph/reference/refDSEGraphDataTypes.html \ No newline at end of file diff --git a/manual/core/dse/graph/script/README.md b/manual/core/dse/graph/script/README.md index 58173506349..b191cc7db7c 100644 --- a/manual/core/dse/graph/script/README.md +++ b/manual/core/dse/graph/script/README.md @@ -101,6 +101,6 @@ Note however that some types of queries can only be performed through the script * configuration; * DSE graph schema queries. -[ScriptGraphStatement]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html -[ScriptGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html#newInstance-java.lang.String- -[ScriptGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html#builder-java.lang.String- \ No newline at end of file +[ScriptGraphStatement]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html +[ScriptGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html#newInstance-java.lang.String- +[ScriptGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html#builder-java.lang.String- \ No newline at end of file diff --git a/manual/core/idempotence/README.md b/manual/core/idempotence/README.md index 0c24efda673..8eb9135488a 100644 --- a/manual/core/idempotence/README.md +++ b/manual/core/idempotence/README.md @@ -60,5 +60,5 @@ assert bs.isIdempotent(); The query builder tries to infer idempotence automatically; refer to [its manual](../../query_builder/idempotence/) for more details. -[Statement.setIdempotent]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/Statement.html#setIdempotent-java.lang.Boolean- -[StatementBuilder.setIdempotence]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/StatementBuilder.html#setIdempotence-java.lang.Boolean- +[Statement.setIdempotent]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/Statement.html#setIdempotent-java.lang.Boolean- +[StatementBuilder.setIdempotence]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/StatementBuilder.html#setIdempotence-java.lang.Boolean- diff --git a/manual/core/integration/README.md b/manual/core/integration/README.md index 7202dac2d94..75695763158 100644 --- a/manual/core/integration/README.md +++ b/manual/core/integration/README.md @@ -562,6 +562,7 @@ Here are the recommended TinkerPop versions for each driver version:

          Driver versionTinkerPop version
          4.13.03.4.10
          4.12.03.4.10
          4.11.03.4.10
          4.10.03.4.9
          + @@ -662,6 +663,6 @@ The remaining core driver dependencies are the only ones that are truly mandator [guava]: https://github.com/google/guava/issues/2721 [annotation processing]: https://docs.oracle.com/javase/8/docs/technotes/tools/windows/javac.html#sthref65 -[Session.getMetrics]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/Session.html#getMetrics-- -[SessionBuilder.addContactPoint]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoint-java.net.InetSocketAddress- -[Uuids]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/uuid/Uuids.html +[Session.getMetrics]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/Session.html#getMetrics-- +[SessionBuilder.addContactPoint]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoint-java.net.InetSocketAddress- +[Uuids]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/uuid/Uuids.html diff --git a/manual/core/load_balancing/README.md b/manual/core/load_balancing/README.md index 2997d427106..abc950fe378 100644 --- a/manual/core/load_balancing/README.md +++ b/manual/core/load_balancing/README.md @@ -426,12 +426,12 @@ Then it uses the "closest" distance for any given node. For example: * policy1 changes its suggestion to IGNORED. node1 is set to REMOTE; * policy1 changes its suggestion to REMOTE. node1 stays at REMOTE. -[DriverContext]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/context/DriverContext.html -[LoadBalancingPolicy]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/loadbalancing/LoadBalancingPolicy.html +[DriverContext]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/context/DriverContext.html +[LoadBalancingPolicy]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/loadbalancing/LoadBalancingPolicy.html [BasicLoadBalancingPolicy]: https://github.com/datastax/java-driver/blob/4.x/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicy.java -[getRoutingKeyspace()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKeyspace-- -[getRoutingToken()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/Request.html#getRoutingToken-- -[getRoutingKey()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- -[NodeDistanceEvaluator]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/loadbalancing/NodeDistanceEvaluator.html +[getRoutingKeyspace()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKeyspace-- +[getRoutingToken()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/Request.html#getRoutingToken-- +[getRoutingKey()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- +[NodeDistanceEvaluator]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/loadbalancing/NodeDistanceEvaluator.html [`nodetool status`]: https://docs.datastax.com/en/dse/6.7/dse-dev/datastax_enterprise/tools/nodetool/toolsStatus.html [cqlsh]: https://docs.datastax.com/en/dse/6.7/cql/cql/cql_using/startCqlshStandalone.html diff --git a/manual/core/metadata/README.md b/manual/core/metadata/README.md index f160575729a..34358bfdf5e 100644 --- a/manual/core/metadata/README.md +++ b/manual/core/metadata/README.md @@ -56,6 +56,6 @@ new keyspace in the schema metadata before the token metadata was updated. Schema and node state events are debounced. This allows you to control how often the metadata gets refreshed. See the [Performance](../performance/#debouncing) page for more details. -[Session#getMetadata]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/Session.html#getMetadata-- -[Metadata]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/metadata/Metadata.html -[Node]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/metadata/Node.html \ No newline at end of file +[Session#getMetadata]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/Session.html#getMetadata-- +[Metadata]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/metadata/Metadata.html +[Node]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/metadata/Node.html \ No newline at end of file diff --git a/manual/core/metadata/node/README.md b/manual/core/metadata/node/README.md index 4548f95aeee..1555c2ad1a5 100644 --- a/manual/core/metadata/node/README.md +++ b/manual/core/metadata/node/README.md @@ -129,17 +129,17 @@ beyond the scope of this document; if you're interested, study the `TopologyMoni the source code. -[Metadata#getNodes]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/metadata/Metadata.html#getNodes-- -[Node]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/metadata/Node.html -[Node#getState()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/metadata/Node.html#getState-- -[Node#getDatacenter()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/metadata/Node.html#getDatacenter-- -[Node#getRack()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/metadata/Node.html#getRack-- -[Node#getDistance()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/metadata/Node.html#getDistance-- -[Node#getExtras()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/metadata/Node.html#getExtras-- -[Node#getOpenConnections()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- -[Node#isReconnecting()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/metadata/Node.html#isReconnecting-- -[NodeState]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/metadata/NodeState.html -[NodeStateListener]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/metadata/NodeStateListener.html -[NodeStateListenerBase]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/metadata/NodeStateListenerBase.html -[SessionBuilder.addNodeStateListener]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addNodeStateListener-com.datastax.oss.driver.api.core.metadata.NodeStateListener- -[DseNodeProperties]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/metadata/DseNodeProperties.html \ No newline at end of file +[Metadata#getNodes]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/metadata/Metadata.html#getNodes-- +[Node]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/metadata/Node.html +[Node#getState()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/metadata/Node.html#getState-- +[Node#getDatacenter()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/metadata/Node.html#getDatacenter-- +[Node#getRack()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/metadata/Node.html#getRack-- +[Node#getDistance()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/metadata/Node.html#getDistance-- +[Node#getExtras()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/metadata/Node.html#getExtras-- +[Node#getOpenConnections()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- +[Node#isReconnecting()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/metadata/Node.html#isReconnecting-- +[NodeState]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/metadata/NodeState.html +[NodeStateListener]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/metadata/NodeStateListener.html +[NodeStateListenerBase]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/metadata/NodeStateListenerBase.html +[SessionBuilder.addNodeStateListener]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addNodeStateListener-com.datastax.oss.driver.api.core.metadata.NodeStateListener- +[DseNodeProperties]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/metadata/DseNodeProperties.html \ No newline at end of file diff --git a/manual/core/metadata/schema/README.md b/manual/core/metadata/schema/README.md index 52711d11bd1..c42e56e5735 100644 --- a/manual/core/metadata/schema/README.md +++ b/manual/core/metadata/schema/README.md @@ -321,16 +321,16 @@ unavailable for the excluded keyspaces. If you issue schema-altering requests from the driver (e.g. `session.execute("CREATE TABLE ..")`), take a look at the [Performance](../../performance/#schema-updates) page for a few tips. -[Metadata#getKeyspaces]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/metadata/Metadata.html#getKeyspaces-- -[SchemaChangeListener]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListener.html -[SchemaChangeListenerBase]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListenerBase.html -[Session#setSchemaMetadataEnabled]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/Session.html#setSchemaMetadataEnabled-java.lang.Boolean- -[Session#checkSchemaAgreementAsync]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/Session.html#checkSchemaAgreementAsync-- -[SessionBuilder#addSchemaChangeListener]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addSchemaChangeListener-com.datastax.oss.driver.api.core.metadata.schema.SchemaChangeListener- -[ExecutionInfo#isSchemaInAgreement]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#isSchemaInAgreement-- -[com.datastax.dse.driver.api.core.metadata.schema]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/metadata/schema/package-frame.html -[DseFunctionMetadata]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/metadata/schema/DseFunctionMetadata.html -[DseAggregateMetadata]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/metadata/schema/DseAggregateMetadata.html +[Metadata#getKeyspaces]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/metadata/Metadata.html#getKeyspaces-- +[SchemaChangeListener]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListener.html +[SchemaChangeListenerBase]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListenerBase.html +[Session#setSchemaMetadataEnabled]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/Session.html#setSchemaMetadataEnabled-java.lang.Boolean- +[Session#checkSchemaAgreementAsync]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/Session.html#checkSchemaAgreementAsync-- +[SessionBuilder#addSchemaChangeListener]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addSchemaChangeListener-com.datastax.oss.driver.api.core.metadata.schema.SchemaChangeListener- +[ExecutionInfo#isSchemaInAgreement]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#isSchemaInAgreement-- +[com.datastax.dse.driver.api.core.metadata.schema]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/metadata/schema/package-frame.html +[DseFunctionMetadata]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/metadata/schema/DseFunctionMetadata.html +[DseAggregateMetadata]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/metadata/schema/DseAggregateMetadata.html [JAVA-750]: https://datastax-oss.atlassian.net/browse/JAVA-750 [java.util.regex.Pattern]: https://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html diff --git a/manual/core/metadata/token/README.md b/manual/core/metadata/token/README.md index ea9f4a99832..475274fd4e4 100644 --- a/manual/core/metadata/token/README.md +++ b/manual/core/metadata/token/README.md @@ -169,5 +169,5 @@ on [schema metadata](../schema/). If schema metadata is disabled or filtered, to also be unavailable for the excluded keyspaces. -[Metadata#getTokenMap]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/metadata/Metadata.html#getTokenMap-- -[TokenMap]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/metadata/TokenMap.html \ No newline at end of file +[Metadata#getTokenMap]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/metadata/Metadata.html#getTokenMap-- +[TokenMap]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/metadata/TokenMap.html \ No newline at end of file diff --git a/manual/core/native_protocol/README.md b/manual/core/native_protocol/README.md index 08d551765dc..d64aaccda85 100644 --- a/manual/core/native_protocol/README.md +++ b/manual/core/native_protocol/README.md @@ -135,6 +135,6 @@ If you want to see the details of mixed cluster negotiation, enable `DEBUG` leve [protocol spec]: https://github.com/datastax/native-protocol/tree/1.x/src/main/resources [driver3]: https://docs.datastax.com/en/developer/java-driver/3.10/manual/native_protocol/ -[ExecutionInfo.getWarnings]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getWarnings-- -[Request.getCustomPayload]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/Request.html#getCustomPayload-- -[AttachmentPoint.getProtocolVersion]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/detach/AttachmentPoint.html#getProtocolVersion-- +[ExecutionInfo.getWarnings]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getWarnings-- +[Request.getCustomPayload]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/Request.html#getCustomPayload-- +[AttachmentPoint.getProtocolVersion]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/detach/AttachmentPoint.html#getProtocolVersion-- diff --git a/manual/core/non_blocking/README.md b/manual/core/non_blocking/README.md index 8876022f2f0..0960379936b 100644 --- a/manual/core/non_blocking/README.md +++ b/manual/core/non_blocking/README.md @@ -49,22 +49,22 @@ For example, calling any synchronous method declared in [`SyncCqlSession`], such will block until the result is available. These methods should never be used in non-blocking applications. -[`SyncCqlSession`]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/SyncCqlSession.html` -[`execute`]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/SyncCqlSession.html#execute-com.datastax.oss.driver.api.core.cql.Statement- +[`SyncCqlSession`]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/SyncCqlSession.html` +[`execute`]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/SyncCqlSession.html#execute-com.datastax.oss.driver.api.core.cql.Statement- However, the asynchronous methods declared in [`AsyncCqlSession`], such as [`executeAsync`], are all safe for use in non-blocking applications; the statement execution and asynchronous result delivery is guaranteed to never block. -[`AsyncCqlSession`]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/AsyncCqlSession.html -[`executeAsync`]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/AsyncCqlSession.html#executeAsync-com.datastax.oss.driver.api.core.cql.Statement- +[`AsyncCqlSession`]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/AsyncCqlSession.html +[`executeAsync`]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/AsyncCqlSession.html#executeAsync-com.datastax.oss.driver.api.core.cql.Statement- The same applies to the methods declared in [`ReactiveSession`] such as [`executeReactive`]: the returned publisher will never block when subscribed to, until the final results are delivered to the subscriber. -[`ReactiveSession`]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.html -[`executeReactive`]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.html#executeReactive-com.datastax.oss.driver.api.core.cql.Statement- +[`ReactiveSession`]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.html +[`executeReactive`]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.html#executeReactive-com.datastax.oss.driver.api.core.cql.Statement- There is one exception though: continuous paging queries (a feature specific to DSE) have a special execution model which uses internal locks for coordination. Although such locks are only held for @@ -77,10 +77,10 @@ reactive APIs like [`executeContinuouslyAsync`] and [`executeContinuouslyReactiv though, continuous paging is extremely efficient and can safely be used in most non-blocking contexts, unless they require strict lock-freedom. -[`ContinuousSession`]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/cql/continuous/ContinuousSession.html -[`ContinuousReactiveSession`]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousReactiveSession.html -[`executeContinuouslyAsync`]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/cql/continuous/ContinuousSession.html#executeContinuouslyAsync-com.datastax.oss.driver.api.core.cql.Statement- -[`executeContinuouslyReactive`]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousReactiveSession.html#executeContinuouslyReactive-com.datastax.oss.driver.api.core.cql.Statement- +[`ContinuousSession`]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/cql/continuous/ContinuousSession.html +[`ContinuousReactiveSession`]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousReactiveSession.html +[`executeContinuouslyAsync`]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/cql/continuous/ContinuousSession.html#executeContinuouslyAsync-com.datastax.oss.driver.api.core.cql.Statement- +[`executeContinuouslyReactive`]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousReactiveSession.html#executeContinuouslyReactive-com.datastax.oss.driver.api.core.cql.Statement- #### Driver lock-free guarantees per session lifecycle phases @@ -110,8 +110,8 @@ Similarly, a call to [`SessionBuilder.build()`] should be considered blocking as calling thread and wait until the method returns. For this reason, calls to `SessionBuilder.build()` should be avoided in non-blocking applications. -[`SessionBuilder.buildAsync()`]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/SessionBuilder.html#buildAsync-- -[`SessionBuilder.build()`]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/SessionBuilder.html#build-- +[`SessionBuilder.buildAsync()`]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/SessionBuilder.html#buildAsync-- +[`SessionBuilder.build()`]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/SessionBuilder.html#build-- Once the session is initialized, however, the driver is guaranteed to be non-blocking during the session's lifecycle, and under normal operation, unless otherwise noted elsewhere in this document. @@ -121,8 +121,8 @@ during that phase. Therefore, calls to any method declared in [`AsyncAutoCloseab asynchronous ones like [`closeAsync()`], should also be preferably deferred until the application is shut down and lock-freedom enforcement is disabled. -[`AsyncAutoCloseable`]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/AsyncAutoCloseable.html -[`closeAsync()`]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/AsyncAutoCloseable.html#closeAsync-- +[`AsyncAutoCloseable`]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/AsyncAutoCloseable.html +[`closeAsync()`]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/AsyncAutoCloseable.html#closeAsync-- #### Driver lock-free guarantees for specific components @@ -131,7 +131,7 @@ Certain driver components are not implemented in lock-free algorithms. For example, [`SafeInitNodeStateListener`] is implemented with internal locks for coordination. It should not be used if strict lock-freedom is enforced. -[`SafeInitNodeStateListener`]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/metadata/SafeInitNodeStateListener.html +[`SafeInitNodeStateListener`]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/metadata/SafeInitNodeStateListener.html The same is valid for both built-in [request throttlers]: @@ -143,7 +143,7 @@ use locks internally, and depending on how many requests are being executed in p contention on these locks can be high: in short, if your application enforces strict lock-freedom, then these components should not be used. -[request throttlers]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/throttling/RequestThrottler.html +[request throttlers]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/throttling/RequestThrottler.html Other components may be lock-free, *except* for their first invocation. This is the case of the following items: @@ -151,8 +151,8 @@ following items: * All built-in implementations of [`TimestampGenerator`], upon instantiation; * The utility method [`Uuids.timeBased()`]. -[`TimestampGenerator`]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/time/TimestampGenerator.html -[`Uuids.timeBased()`]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/uuid/Uuids.html#timeBased-- +[`TimestampGenerator`]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/time/TimestampGenerator.html +[`Uuids.timeBased()`]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/uuid/Uuids.html#timeBased-- Both components need to access native libraries when they get initialized and this may involve hitting the local filesystem, thus causing the initialization to become a blocking call. @@ -172,7 +172,7 @@ One component, the codec registry, can block when its [`register`] method is cal therefore advised that codecs should be registered during application startup exclusively. See the [custom codecs](../custom_codecs) section for more details about registering codecs. -[`register`]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/registry/MutableCodecRegistry.html#register-com.datastax.oss.driver.api.core.type.codec.TypeCodec- +[`register`]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/registry/MutableCodecRegistry.html#register-com.datastax.oss.driver.api.core.type.codec.TypeCodec- Finally, a few internal components also use locks, but only during session initialization; once the session is ready, they are either discarded, or don't use locks anymore for the rest of the @@ -213,7 +213,7 @@ lock-freedom enforcement tools could report calls to that method, but it was imp these calls. Thanks to [JAVA-2449], released with driver 4.10.0, `Uuids.random()` became a non-blocking call and random UUIDs can now be safely generated in non-blocking applications. -[`Uuids.random()`]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/uuid/Uuids.html#random-- +[`Uuids.random()`]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/uuid/Uuids.html#random-- [JAVA-2449]: https://datastax-oss.atlassian.net/browse/JAVA-2449 #### Driver lock-free guarantees when reloading the configuration @@ -228,8 +228,8 @@ detectors. If that is the case, it is advised to disable hot-reloading by settin `datastax-java-driver.basic.config-reload-interval` option to 0. See the manual page on [configuration](../configuration) for more information. -[`DriverConfigLoader`]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html -[hot-reloading]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#supportsReloading-- +[`DriverConfigLoader`]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html +[hot-reloading]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#supportsReloading-- #### Driver lock-free guarantees when connecting to DSE diff --git a/manual/core/paging/README.md b/manual/core/paging/README.md index 9f753a77181..d08d92e8f36 100644 --- a/manual/core/paging/README.md +++ b/manual/core/paging/README.md @@ -253,12 +253,12 @@ protocol page size and the logical page size to the same value. The [driver examples] include two complete web service implementations demonstrating forward-only and offset paging. -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/ResultSet.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html -[AsyncPagingIterable.hasMorePages]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/AsyncPagingIterable.html#hasMorePages-- -[AsyncPagingIterable.fetchNextPage]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/AsyncPagingIterable.html#fetchNextPage-- -[OffsetPager]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/paging/OffsetPager.html -[PagingState]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/PagingState.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/ResultSet.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[AsyncPagingIterable.hasMorePages]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/AsyncPagingIterable.html#hasMorePages-- +[AsyncPagingIterable.fetchNextPage]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/AsyncPagingIterable.html#fetchNextPage-- +[OffsetPager]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/paging/OffsetPager.html +[PagingState]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/PagingState.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html diff --git a/manual/core/performance/README.md b/manual/core/performance/README.md index a26067dbed5..90b379c59d6 100644 --- a/manual/core/performance/README.md +++ b/manual/core/performance/README.md @@ -345,8 +345,8 @@ possible to reuse the same event loop group for I/O, admin tasks, and even your (the driver's internal code is fully asynchronous so it will never block any thread). The timer is the only one that will have to stay on a separate thread. -[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/data/AccessibleByName.html -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/CqlIdentifier.html -[CqlSession.prepare(SimpleStatement)]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/CqlSession.html#prepare-com.datastax.oss.driver.api.core.cql.SimpleStatement- -[GenericType]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/reflect/GenericType.html -[Statement.setNode()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/Statement.html#setNode-com.datastax.oss.driver.api.core.metadata.Node- \ No newline at end of file +[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/data/AccessibleByName.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/CqlIdentifier.html +[CqlSession.prepare(SimpleStatement)]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/CqlSession.html#prepare-com.datastax.oss.driver.api.core.cql.SimpleStatement- +[GenericType]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/reflect/GenericType.html +[Statement.setNode()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/Statement.html#setNode-com.datastax.oss.driver.api.core.metadata.Node- \ No newline at end of file diff --git a/manual/core/pooling/README.md b/manual/core/pooling/README.md index 84dee0ec80e..d0d2de7d128 100644 --- a/manual/core/pooling/README.md +++ b/manual/core/pooling/README.md @@ -170,5 +170,5 @@ you experience the issue, here's what to look out for: Try adding more connections per node. Thanks to the driver's hot-reload mechanism, you can do that at runtime and see the effects immediately. -[CqlSession]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/CqlSession.html +[CqlSession]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/CqlSession.html [CASSANDRA-8086]: https://issues.apache.org/jira/browse/CASSANDRA-8086 \ No newline at end of file diff --git a/manual/core/query_timestamps/README.md b/manual/core/query_timestamps/README.md index bde4b0722e9..bc01ce41d4d 100644 --- a/manual/core/query_timestamps/README.md +++ b/manual/core/query_timestamps/README.md @@ -187,9 +187,9 @@ Here is the order of precedence of all the methods described so far: 3. otherwise, if the timestamp generator assigned a timestamp, use it; 4. otherwise, let the server assign the timestamp. -[TimestampGenerator]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/time/TimestampGenerator.html +[TimestampGenerator]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/time/TimestampGenerator.html [gettimeofday]: http://man7.org/linux/man-pages/man2/settimeofday.2.html [JNR]: https://github.com/jnr/jnr-posix [Lightweight transactions]: https://docs.datastax.com/en/dse/6.0/cql/cql/cql_using/useInsertLWT.html -[Statement.setQueryTimestamp()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/Statement.html#setQueryTimestamp-long- +[Statement.setQueryTimestamp()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/Statement.html#setQueryTimestamp-long- diff --git a/manual/core/reactive/README.md b/manual/core/reactive/README.md index bd32969f3c0..6073ac4bf98 100644 --- a/manual/core/reactive/README.md +++ b/manual/core/reactive/README.md @@ -367,18 +367,18 @@ Note that the driver already has a [built-in retry mechanism] that can transpare queries; the above example should be seen as a demonstration of application-level retries, when a more fine-grained control of what should be retried, and how, is required. -[CqlSession]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/CqlSession.html -[ReactiveSession]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/ResultSet.html -[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html -[ReactiveRow]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html -[Row]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/Row.html -[getColumnDefinitions]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#getColumnDefinitions-- -[getExecutionInfos]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#getExecutionInfos-- -[wasApplied]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#wasApplied-- -[ReactiveRow.getColumnDefinitions]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#getColumnDefinitions-- -[ReactiveRow.getExecutionInfo]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#getExecutionInfo-- -[ReactiveRow.wasApplied]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#wasApplied-- +[CqlSession]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/CqlSession.html +[ReactiveSession]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/ResultSet.html +[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html +[ReactiveRow]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html +[Row]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/Row.html +[getColumnDefinitions]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#getColumnDefinitions-- +[getExecutionInfos]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#getExecutionInfos-- +[wasApplied]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#wasApplied-- +[ReactiveRow.getColumnDefinitions]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#getColumnDefinitions-- +[ReactiveRow.getExecutionInfo]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#getExecutionInfo-- +[ReactiveRow.wasApplied]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#wasApplied-- [built-in retry mechanism]: ../retries/ [request throttling]: ../throttling/ diff --git a/manual/core/reconnection/README.md b/manual/core/reconnection/README.md index b5b6b3e10b4..c383b887fcc 100644 --- a/manual/core/reconnection/README.md +++ b/manual/core/reconnection/README.md @@ -84,7 +84,7 @@ Note that the session is not accessible until it is fully ready: the `CqlSession call — or the future returned by `buildAsync()` — will not complete until the connection was established. -[ConstantReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/internal/core/connection/ConstantReconnectionPolicy.html -[DriverContext]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/context/DriverContext.html -[ExponentialReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/internal/core/connection/ExponentialReconnectionPolicy.html -[ReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/connection/ReconnectionPolicy.html \ No newline at end of file +[ConstantReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/internal/core/connection/ConstantReconnectionPolicy.html +[DriverContext]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/context/DriverContext.html +[ExponentialReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/internal/core/connection/ExponentialReconnectionPolicy.html +[ReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/connection/ReconnectionPolicy.html \ No newline at end of file diff --git a/manual/core/request_tracker/README.md b/manual/core/request_tracker/README.md index af66851c748..4186139c0ba 100644 --- a/manual/core/request_tracker/README.md +++ b/manual/core/request_tracker/README.md @@ -123,5 +123,5 @@ all FROM users WHERE user_id=? [v0=42] com.datastax.oss.driver.api.core.servererrors.InvalidQueryException: Undefined column name all ``` -[RequestTracker]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/tracker/RequestTracker.html -[SessionBuilder.addRequestTracker]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addRequestTracker-com.datastax.oss.driver.api.core.tracker.RequestTracker- \ No newline at end of file +[RequestTracker]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/tracker/RequestTracker.html +[SessionBuilder.addRequestTracker]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addRequestTracker-com.datastax.oss.driver.api.core.tracker.RequestTracker- \ No newline at end of file diff --git a/manual/core/retries/README.md b/manual/core/retries/README.md index ec07dc2cea2..6e150eb77e4 100644 --- a/manual/core/retries/README.md +++ b/manual/core/retries/README.md @@ -231,21 +231,21 @@ configuration). Each request uses its declared profile's policy. If it doesn't declare any profile, or if the profile doesn't have a dedicated policy, then the default profile's policy is used. -[AllNodesFailedException]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/AllNodesFailedException.html -[ClosedConnectionException]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/connection/ClosedConnectionException.html -[DriverTimeoutException]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/DriverTimeoutException.html -[FunctionFailureException]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/servererrors/FunctionFailureException.html -[HeartbeatException]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/connection/HeartbeatException.html -[ProtocolError]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/servererrors/ProtocolError.html -[OverloadedException]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/servererrors/OverloadedException.html -[QueryValidationException]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/servererrors/QueryValidationException.html -[ReadFailureException]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/servererrors/ReadFailureException.html -[ReadTimeoutException]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/servererrors/ReadTimeoutException.html -[RetryDecision]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/retry/RetryDecision.html -[RetryPolicy]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/retry/RetryPolicy.html -[RetryVerdict]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/retry/RetryVerdict.html -[ServerError]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/servererrors/ServerError.html -[TruncateException]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/servererrors/TruncateException.html -[UnavailableException]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/servererrors/UnavailableException.html -[WriteFailureException]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/servererrors/WriteFailureException.html -[WriteTimeoutException]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/servererrors/WriteTimeoutException.html +[AllNodesFailedException]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/AllNodesFailedException.html +[ClosedConnectionException]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/connection/ClosedConnectionException.html +[DriverTimeoutException]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/DriverTimeoutException.html +[FunctionFailureException]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/servererrors/FunctionFailureException.html +[HeartbeatException]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/connection/HeartbeatException.html +[ProtocolError]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/servererrors/ProtocolError.html +[OverloadedException]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/servererrors/OverloadedException.html +[QueryValidationException]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/servererrors/QueryValidationException.html +[ReadFailureException]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/servererrors/ReadFailureException.html +[ReadTimeoutException]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/servererrors/ReadTimeoutException.html +[RetryDecision]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/retry/RetryDecision.html +[RetryPolicy]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/retry/RetryPolicy.html +[RetryVerdict]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/retry/RetryVerdict.html +[ServerError]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/servererrors/ServerError.html +[TruncateException]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/servererrors/TruncateException.html +[UnavailableException]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/servererrors/UnavailableException.html +[WriteFailureException]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/servererrors/WriteFailureException.html +[WriteTimeoutException]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/servererrors/WriteTimeoutException.html diff --git a/manual/core/speculative_execution/README.md b/manual/core/speculative_execution/README.md index fe044e93df7..cf6675e9dbf 100644 --- a/manual/core/speculative_execution/README.md +++ b/manual/core/speculative_execution/README.md @@ -250,4 +250,4 @@ profiles have the same configuration). Each request uses its declared profile's policy. If it doesn't declare any profile, or if the profile doesn't have a dedicated policy, then the default profile's policy is used. -[SpeculativeExecutionPolicy]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/specex/SpeculativeExecutionPolicy.html \ No newline at end of file +[SpeculativeExecutionPolicy]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/specex/SpeculativeExecutionPolicy.html \ No newline at end of file diff --git a/manual/core/ssl/README.md b/manual/core/ssl/README.md index 91bf0fc1878..2e293d7e346 100644 --- a/manual/core/ssl/README.md +++ b/manual/core/ssl/README.md @@ -204,6 +204,6 @@ the box, but with a bit of custom development it is fairly easy to add. See [dsClientToNode]: https://docs.datastax.com/en/cassandra/3.0/cassandra/configuration/secureSSLClientToNode.html [pickle]: http://thelastpickle.com/blog/2015/09/30/hardening-cassandra-step-by-step-part-1-server-to-server.html [JSSE system properties]: http://docs.oracle.com/javase/6/docs/technotes/guides/security/jsse/JSSERefGuide.html#Customization -[SessionBuilder.withSslEngineFactory]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSslEngineFactory-com.datastax.oss.driver.api.core.ssl.SslEngineFactory- -[SessionBuilder.withSslContext]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSslContext-javax.net.ssl.SSLContext- -[ProgrammaticSslEngineFactory]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/ssl/ProgrammaticSslEngineFactory.html +[SessionBuilder.withSslEngineFactory]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSslEngineFactory-com.datastax.oss.driver.api.core.ssl.SslEngineFactory- +[SessionBuilder.withSslContext]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSslContext-javax.net.ssl.SSLContext- +[ProgrammaticSslEngineFactory]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/ssl/ProgrammaticSslEngineFactory.html diff --git a/manual/core/statements/README.md b/manual/core/statements/README.md index 156933f6649..08646b77609 100644 --- a/manual/core/statements/README.md +++ b/manual/core/statements/README.md @@ -59,7 +59,7 @@ the [configuration](../configuration/). Namely, these are: idempotent flag, quer consistency levels and page size. We recommended the configuration approach whenever possible (you can create execution profiles to capture common combinations of those options). -[Statement]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/Statement.html -[StatementBuilder]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/StatementBuilder.html -[execute]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/Session.html#execute-com.datastax.oss.driver.api.core.cql.Statement- -[executeAsync]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/Session.html#executeAsync-com.datastax.oss.driver.api.core.cql.Statement- +[Statement]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/Statement.html +[StatementBuilder]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/StatementBuilder.html +[execute]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/Session.html#execute-com.datastax.oss.driver.api.core.cql.Statement- +[executeAsync]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/Session.html#executeAsync-com.datastax.oss.driver.api.core.cql.Statement- diff --git a/manual/core/statements/batch/README.md b/manual/core/statements/batch/README.md index 917482cae3e..051a3a35df9 100644 --- a/manual/core/statements/batch/README.md +++ b/manual/core/statements/batch/README.md @@ -61,8 +61,8 @@ In addition, simple statements with named parameters are currently not supported due to a [protocol limitation][CASSANDRA-10246] that will be fixed in a future version). If you try to execute such a batch, an `IllegalArgumentException` is thrown. -[BatchStatement]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/BatchStatement.html -[BatchStatement.newInstance()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/BatchStatement.html#newInstance-com.datastax.oss.driver.api.core.cql.BatchType- -[BatchStatement.builder()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/BatchStatement.html#builder-com.datastax.oss.driver.api.core.cql.BatchType- +[BatchStatement]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/BatchStatement.html +[BatchStatement.newInstance()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/BatchStatement.html#newInstance-com.datastax.oss.driver.api.core.cql.BatchType- +[BatchStatement.builder()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/BatchStatement.html#builder-com.datastax.oss.driver.api.core.cql.BatchType- [batch_dse]: http://docs.datastax.com/en/dse/6.7/cql/cql/cql_using/useBatch.html [CASSANDRA-10246]: https://issues.apache.org/jira/browse/CASSANDRA-10246 diff --git a/manual/core/statements/per_query_keyspace/README.md b/manual/core/statements/per_query_keyspace/README.md index 03c2584e2c9..4100e864660 100644 --- a/manual/core/statements/per_query_keyspace/README.md +++ b/manual/core/statements/per_query_keyspace/README.md @@ -124,6 +124,6 @@ SimpleStatement statement = At some point in the future, when Cassandra 4 becomes prevalent and using a per-query keyspace is the norm, we'll probably deprecate `setRoutingKeyspace()`. -[token-aware routing]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- +[token-aware routing]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- [CASSANDRA-10145]: https://issues.apache.org/jira/browse/CASSANDRA-10145 \ No newline at end of file diff --git a/manual/core/statements/prepared/README.md b/manual/core/statements/prepared/README.md index b795e5f138f..29ad525fc42 100644 --- a/manual/core/statements/prepared/README.md +++ b/manual/core/statements/prepared/README.md @@ -330,10 +330,10 @@ With Cassandra 4 and [native protocol](../../native_protocol/) v5, this issue is new version with the response; the driver updates its local cache transparently, and the client can observe the new columns in the result set. -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[Session.prepare]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/CqlSession.html#prepare-com.datastax.oss.driver.api.core.cql.SimpleStatement- +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[Session.prepare]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/CqlSession.html#prepare-com.datastax.oss.driver.api.core.cql.SimpleStatement- [CASSANDRA-10786]: https://issues.apache.org/jira/browse/CASSANDRA-10786 [CASSANDRA-10813]: https://issues.apache.org/jira/browse/CASSANDRA-10813 [guava eviction]: https://github.com/google/guava/wiki/CachesExplained#reference-based-eviction -[PreparedStatement.bind]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/PreparedStatement.html#bind-java.lang.Object...- -[PreparedStatement.boundStatementBuilder]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/PreparedStatement.html#boundStatementBuilder-java.lang.Object...- +[PreparedStatement.bind]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/PreparedStatement.html#bind-java.lang.Object...- +[PreparedStatement.boundStatementBuilder]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/PreparedStatement.html#boundStatementBuilder-java.lang.Object...- diff --git a/manual/core/statements/simple/README.md b/manual/core/statements/simple/README.md index 3b87d7521f3..d4f62e0a207 100644 --- a/manual/core/statements/simple/README.md +++ b/manual/core/statements/simple/README.md @@ -182,6 +182,6 @@ session.execute( Or you could also use [prepared statements](../prepared/), which don't have this limitation since parameter types are known in advance. -[SimpleStatement]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/SimpleStatement.html -[SimpleStatement.newInstance()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/SimpleStatement.html#newInstance-java.lang.String- -[SimpleStatement.builder()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/SimpleStatement.html#builder-java.lang.String- +[SimpleStatement]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/SimpleStatement.html +[SimpleStatement.newInstance()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/SimpleStatement.html#newInstance-java.lang.String- +[SimpleStatement.builder()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/SimpleStatement.html#builder-java.lang.String- diff --git a/manual/core/temporal_types/README.md b/manual/core/temporal_types/README.md index 5b35bfd1750..2bb0573ce90 100644 --- a/manual/core/temporal_types/README.md +++ b/manual/core/temporal_types/README.md @@ -146,7 +146,7 @@ System.out.println(dateTime.minus(CqlDuration.from("1h15s15ns"))); // prints "2018-10-03T22:59:44.999999985-07:00[America/Los_Angeles]" ``` -[CqlDuration]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/data/CqlDuration.html -[TypeCodecs.ZONED_TIMESTAMP_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#ZONED_TIMESTAMP_SYSTEM -[TypeCodecs.ZONED_TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#ZONED_TIMESTAMP_UTC -[TypeCodecs.zonedTimestampAt()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#zonedTimestampAt-java.time.ZoneId- \ No newline at end of file +[CqlDuration]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/data/CqlDuration.html +[TypeCodecs.ZONED_TIMESTAMP_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#ZONED_TIMESTAMP_SYSTEM +[TypeCodecs.ZONED_TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#ZONED_TIMESTAMP_UTC +[TypeCodecs.zonedTimestampAt()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#zonedTimestampAt-java.time.ZoneId- \ No newline at end of file diff --git a/manual/core/throttling/README.md b/manual/core/throttling/README.md index ced51bca1a5..f1496cbf176 100644 --- a/manual/core/throttling/README.md +++ b/manual/core/throttling/README.md @@ -145,6 +145,6 @@ datastax-java-driver { If you enable `throttling.delay`, make sure to also check the associated extra options to correctly size the underlying histograms (`metrics.session.throttling.delay.*`). -[RequestThrottlingException]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/RequestThrottlingException.html -[AllNodesFailedException]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/AllNodesFailedException.html -[BusyConnectionException]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/connection/BusyConnectionException.html \ No newline at end of file +[RequestThrottlingException]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/RequestThrottlingException.html +[AllNodesFailedException]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/AllNodesFailedException.html +[BusyConnectionException]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/connection/BusyConnectionException.html \ No newline at end of file diff --git a/manual/core/tracing/README.md b/manual/core/tracing/README.md index f15dc0c69d9..858b089ffbe 100644 --- a/manual/core/tracing/README.md +++ b/manual/core/tracing/README.md @@ -113,9 +113,9 @@ for (TraceEvent event : trace.getEvents()) { If you call `getQueryTrace()` for a statement that didn't have tracing enabled, an exception is thrown. -[ExecutionInfo]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html -[QueryTrace]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/QueryTrace.html -[Statement.setTracing()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/Statement.html#setTracing-boolean- -[StatementBuilder.setTracing()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/StatementBuilder.html#setTracing-- -[ExecutionInfo.getTracingId()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getTracingId-- -[ExecutionInfo.getQueryTrace()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getQueryTrace-- +[ExecutionInfo]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html +[QueryTrace]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/QueryTrace.html +[Statement.setTracing()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/Statement.html#setTracing-boolean- +[StatementBuilder.setTracing()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/StatementBuilder.html#setTracing-- +[ExecutionInfo.getTracingId()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getTracingId-- +[ExecutionInfo.getQueryTrace()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getQueryTrace-- diff --git a/manual/core/tuples/README.md b/manual/core/tuples/README.md index b6222f65439..e5bda3947a6 100644 --- a/manual/core/tuples/README.md +++ b/manual/core/tuples/README.md @@ -139,5 +139,5 @@ BoundStatement bs = [cql_doc]: https://docs.datastax.com/en/cql/3.3/cql/cql_reference/tupleType.html -[TupleType]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/TupleType.html -[TupleValue]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/data/TupleValue.html +[TupleType]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/TupleType.html +[TupleValue]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/data/TupleValue.html diff --git a/manual/core/udts/README.md b/manual/core/udts/README.md index 06c6006a903..99c34f234c4 100644 --- a/manual/core/udts/README.md +++ b/manual/core/udts/README.md @@ -135,5 +135,5 @@ session.execute(bs); [cql_doc]: https://docs.datastax.com/en/cql/3.3/cql/cql_reference/cqlRefUDType.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/data/UdtValue.html -[UserDefinedType]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/UserDefinedType.html \ No newline at end of file +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/data/UdtValue.html +[UserDefinedType]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/UserDefinedType.html \ No newline at end of file diff --git a/manual/developer/common/concurrency/README.md b/manual/developer/common/concurrency/README.md index 981c1e7292b..36db9562032 100644 --- a/manual/developer/common/concurrency/README.md +++ b/manual/developer/common/concurrency/README.md @@ -101,8 +101,8 @@ public interface ExecutionInfo { When a public API method is blocking, this is generally clearly stated in its javadocs. -[`ExecutionInfo.getQueryTrace()`]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getQueryTrace-- -[`SyncCqlSession`]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/SyncCqlSession.html` +[`ExecutionInfo.getQueryTrace()`]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getQueryTrace-- +[`SyncCqlSession`]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/SyncCqlSession.html` `BlockingOperation` is a utility to check that those methods aren't called on I/O threads, which could introduce deadlocks. diff --git a/manual/mapper/config/kotlin/README.md b/manual/mapper/config/kotlin/README.md index a583718a16e..50809f8a7f2 100644 --- a/manual/mapper/config/kotlin/README.md +++ b/manual/mapper/config/kotlin/README.md @@ -106,4 +106,4 @@ before compilation: [build.gradle]: https://github.com/DataStax-Examples/object-mapper-jvm/blob/master/kotlin/build.gradle [UserDao.kt]: https://github.com/DataStax-Examples/object-mapper-jvm/blob/master/kotlin/src/main/kotlin/com/datastax/examples/mapper/killrvideo/user/UserDao.kt -[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html +[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html diff --git a/manual/mapper/config/record/README.md b/manual/mapper/config/record/README.md index 89363b45bf0..8d6f9621b47 100644 --- a/manual/mapper/config/record/README.md +++ b/manual/mapper/config/record/README.md @@ -27,7 +27,7 @@ You need to build with Java 14, and pass the `--enable-preview` flag to both the runtime JVM. See [pom.xml] in the example. -[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html +[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html [DataStax-Examples/object-mapper-jvm/record]: https://github.com/DataStax-Examples/object-mapper-jvm/tree/master/record [pom.xml]: https://github.com/DataStax-Examples/object-mapper-jvm/blob/master/record/pom.xml diff --git a/manual/mapper/config/scala/README.md b/manual/mapper/config/scala/README.md index 6174c274fb7..e0d1970c209 100644 --- a/manual/mapper/config/scala/README.md +++ b/manual/mapper/config/scala/README.md @@ -54,4 +54,4 @@ mapper builder. [DataStax-Examples/object-mapper-jvm/scala]: https://github.com/DataStax-Examples/object-mapper-jvm/tree/master/scala [build.sbt]: https://github.com/DataStax-Examples/object-mapper-jvm/blob/master/scala/build.sbt -[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html +[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html diff --git a/manual/mapper/daos/README.md b/manual/mapper/daos/README.md index 6ee6dfc0aab..c55a10cc3ba 100644 --- a/manual/mapper/daos/README.md +++ b/manual/mapper/daos/README.md @@ -148,8 +148,8 @@ In this case, any annotations declared in `Dao1` would be chosen over `Dao2`. To control how the hierarchy is scanned, annotate interfaces with [@HierarchyScanStrategy]. -[@Dao]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/Dao.html -[@DaoFactory]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.html -[@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html -[@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html +[@Dao]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/Dao.html +[@DaoFactory]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.html +[@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html +[@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html [Entity Inheritance]: ../entities/#inheritance diff --git a/manual/mapper/daos/custom_types/README.md b/manual/mapper/daos/custom_types/README.md index 179f61a0b22..ed5fd69a535 100644 --- a/manual/mapper/daos/custom_types/README.md +++ b/manual/mapper/daos/custom_types/README.md @@ -236,8 +236,8 @@ flag: With this configuration, if a DAO method declares a non built-in return type, it will be surfaced as a compiler error. -[EntityHelper]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/entity/EntityHelper.html -[GenericType]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/reflect/GenericType.html -[MapperContext]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/MapperContext.html -[MapperResultProducer]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/result/MapperResultProducer.html -[MapperResultProducerService]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/result/MapperResultProducerService.html +[EntityHelper]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/entity/EntityHelper.html +[GenericType]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/reflect/GenericType.html +[MapperContext]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/MapperContext.html +[MapperResultProducer]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/result/MapperResultProducer.html +[MapperResultProducerService]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/result/MapperResultProducerService.html diff --git a/manual/mapper/daos/delete/README.md b/manual/mapper/daos/delete/README.md index c067d049106..6d4ad9854cc 100644 --- a/manual/mapper/daos/delete/README.md +++ b/manual/mapper/daos/delete/README.md @@ -151,15 +151,15 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the [naming strategy](../../entities/#naming-strategy)). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html -[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html -[@Delete]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/Delete.html -[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/ResultSet.html -[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html +[@Delete]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/Delete.html +[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/ResultSet.html +[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html diff --git a/manual/mapper/daos/getentity/README.md b/manual/mapper/daos/getentity/README.md index 9995e073ef8..6c37bb1169b 100644 --- a/manual/mapper/daos/getentity/README.md +++ b/manual/mapper/daos/getentity/README.md @@ -130,15 +130,15 @@ If the return type doesn't match the parameter type (for example [PagingIterable [AsyncResultSet]), the mapper processor will issue a compile-time error. -[@GetEntity]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html -[GettableByName]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/data/GettableByName.html -[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/PagingIterable.html -[PagingIterable.spliterator]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/PagingIterable.html#spliterator-- -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/ResultSet.html -[Row]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/Row.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/data/UdtValue.html +[@GetEntity]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[GettableByName]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/data/GettableByName.html +[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/PagingIterable.html +[PagingIterable.spliterator]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/PagingIterable.html#spliterator-- +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/ResultSet.html +[Row]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/Row.html +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/data/UdtValue.html [Stream]: https://docs.oracle.com/javase/8/docs/api/java/util/stream/Stream.html diff --git a/manual/mapper/daos/increment/README.md b/manual/mapper/daos/increment/README.md index 2445490b8a1..1c2c1f24d3c 100644 --- a/manual/mapper/daos/increment/README.md +++ b/manual/mapper/daos/increment/README.md @@ -75,12 +75,12 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the naming convention). -[@Increment]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/Increment.html -[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html -[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html -[@CqlName]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/CqlName.html +[@Increment]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/Increment.html +[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html +[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html +[@CqlName]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/CqlName.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html diff --git a/manual/mapper/daos/insert/README.md b/manual/mapper/daos/insert/README.md index 624cf20f311..2d4a08fb694 100644 --- a/manual/mapper/daos/insert/README.md +++ b/manual/mapper/daos/insert/README.md @@ -108,13 +108,13 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the [naming strategy](../../entities/#naming-strategy)). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@Insert]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/Insert.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/ResultSet.html -[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- -[ResultSet#getExecutionInfo()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/ResultSet.html#getExecutionInfo-- -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@Insert]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/Insert.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/ResultSet.html +[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- +[ResultSet#getExecutionInfo()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/ResultSet.html#getExecutionInfo-- +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html diff --git a/manual/mapper/daos/null_saving/README.md b/manual/mapper/daos/null_saving/README.md index e94fed266c1..aca631062db 100644 --- a/manual/mapper/daos/null_saving/README.md +++ b/manual/mapper/daos/null_saving/README.md @@ -93,10 +93,10 @@ public interface UserDao extends InventoryDao { } ``` -[@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[MapperException]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/MapperException.html -[DO_NOT_SET]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#DO_NOT_SET -[SET_TO_NULL]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#SET_TO_NULL +[@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[MapperException]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/MapperException.html +[DO_NOT_SET]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#DO_NOT_SET +[SET_TO_NULL]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#SET_TO_NULL [CASSANDRA-7304]: https://issues.apache.org/jira/browse/CASSANDRA-7304 diff --git a/manual/mapper/daos/query/README.md b/manual/mapper/daos/query/README.md index 1ccc0f8e7b7..d45063eb74c 100644 --- a/manual/mapper/daos/query/README.md +++ b/manual/mapper/daos/query/README.md @@ -113,18 +113,18 @@ Then: query succeeds or not depends on whether the session that the mapper was built with has a [default keyspace]. -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@Query]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/Query.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/ResultSet.html -[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- -[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/PagingIterable.html -[PagingIterable.spliterator]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/PagingIterable.html#spliterator-- -[Row]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/Row.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html -[MappedReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/mapper/reactive/MappedReactiveResultSet.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@Query]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/Query.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/ResultSet.html +[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- +[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/PagingIterable.html +[PagingIterable.spliterator]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/PagingIterable.html#spliterator-- +[Row]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/Row.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html +[MappedReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/mapper/reactive/MappedReactiveResultSet.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html diff --git a/manual/mapper/daos/queryprovider/README.md b/manual/mapper/daos/queryprovider/README.md index acf9de0fea4..e4958928c59 100644 --- a/manual/mapper/daos/queryprovider/README.md +++ b/manual/mapper/daos/queryprovider/README.md @@ -137,11 +137,11 @@ Here is the full implementation: the desired [PagingIterable][PagingIterable]. -[@QueryProvider]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html -[providerClass]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerClass-- -[entityHelpers]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#entityHelpers-- -[providerMethod]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerMethod-- -[MapperContext]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/MapperContext.html -[EntityHelper]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/EntityHelper.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/ResultSet.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/PagingIterable.html +[@QueryProvider]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html +[providerClass]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerClass-- +[entityHelpers]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#entityHelpers-- +[providerMethod]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerMethod-- +[MapperContext]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/MapperContext.html +[EntityHelper]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/EntityHelper.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/ResultSet.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/PagingIterable.html diff --git a/manual/mapper/daos/select/README.md b/manual/mapper/daos/select/README.md index f42b41e4632..857e176552d 100644 --- a/manual/mapper/daos/select/README.md +++ b/manual/mapper/daos/select/README.md @@ -160,20 +160,20 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the [naming strategy](../../entities/#naming-strategy)). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html -[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html -[@Select]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/Select.html -[allowFiltering()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/Select.html#allowFiltering-- -[customWhereClause()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/Select.html#customWhereClause-- -[groupBy()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/Select.html#groupBy-- -[limit()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/Select.html#limit-- -[orderBy()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/Select.html#orderBy-- -[perPartitionLimit()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/Select.html#perPartitionLimit-- -[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/PagingIterable.html -[PagingIterable.spliterator]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/PagingIterable.html#spliterator-- -[MappedReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/mapper/reactive/MappedReactiveResultSet.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html +[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html +[@Select]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/Select.html +[allowFiltering()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/Select.html#allowFiltering-- +[customWhereClause()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/Select.html#customWhereClause-- +[groupBy()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/Select.html#groupBy-- +[limit()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/Select.html#limit-- +[orderBy()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/Select.html#orderBy-- +[perPartitionLimit()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/Select.html#perPartitionLimit-- +[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/PagingIterable.html +[PagingIterable.spliterator]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/PagingIterable.html#spliterator-- +[MappedReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/mapper/reactive/MappedReactiveResultSet.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html diff --git a/manual/mapper/daos/setentity/README.md b/manual/mapper/daos/setentity/README.md index 0929daaf847..de6701ada50 100644 --- a/manual/mapper/daos/setentity/README.md +++ b/manual/mapper/daos/setentity/README.md @@ -112,8 +112,8 @@ BoundStatement bind(Product product, BoundStatement statement); If you use a void method with [BoundStatement], the mapper processor will issue a compile-time warning. -[@SetEntity]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/SetEntity.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[BoundStatementBuilder]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/BoundStatementBuilder.html -[SettableByName]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/data/SettableByName.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/data/UdtValue.html +[@SetEntity]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/SetEntity.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[BoundStatementBuilder]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/BoundStatementBuilder.html +[SettableByName]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/data/SettableByName.html +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/data/UdtValue.html diff --git a/manual/mapper/daos/statement_attributes/README.md b/manual/mapper/daos/statement_attributes/README.md index 57d73956e64..141b619dd7f 100644 --- a/manual/mapper/daos/statement_attributes/README.md +++ b/manual/mapper/daos/statement_attributes/README.md @@ -60,4 +60,4 @@ Product product = dao.findById(1, builder -> builder.setConsistencyLevel(DefaultConsistencyLevel.QUORUM)); ``` -[@StatementAttributes]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/StatementAttributes.html \ No newline at end of file +[@StatementAttributes]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/StatementAttributes.html \ No newline at end of file diff --git a/manual/mapper/daos/update/README.md b/manual/mapper/daos/update/README.md index 8920e8f75f7..610bc9fb4d7 100644 --- a/manual/mapper/daos/update/README.md +++ b/manual/mapper/daos/update/README.md @@ -143,13 +143,13 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the naming convention). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@Update]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/Update.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@Update]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/Update.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html [Boolean]: https://docs.oracle.com/javase/8/docs/api/index.html?java/lang/Boolean.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/ResultSet.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/ResultSet.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html diff --git a/manual/mapper/entities/README.md b/manual/mapper/entities/README.md index 2d584bed282..72edc82ea66 100644 --- a/manual/mapper/entities/README.md +++ b/manual/mapper/entities/README.md @@ -555,22 +555,22 @@ the same level. To control how the class hierarchy is scanned, annotate classes with [@HierarchyScanStrategy]. -[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html -[@CqlName]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/CqlName.html -[@Dao]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/Dao.html -[@Entity]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/Entity.html -[NameConverter]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/entity/naming/NameConverter.html -[NamingConvention]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/entity/naming/NamingConvention.html -[@NamingStrategy]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/NamingStrategy.html -[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html -[@Computed]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/Computed.html -[@Select]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/Select.html -[@Insert]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/Insert.html -[@Update]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/Update.html -[@GetEntity]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html -[@Query]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/Query.html +[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html +[@CqlName]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/CqlName.html +[@Dao]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/Dao.html +[@Entity]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/Entity.html +[NameConverter]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/entity/naming/NameConverter.html +[NamingConvention]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/entity/naming/NamingConvention.html +[@NamingStrategy]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/NamingStrategy.html +[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html +[@Computed]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/Computed.html +[@Select]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/Select.html +[@Insert]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/Insert.html +[@Update]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/Update.html +[@GetEntity]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html +[@Query]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/Query.html [aliases]: http://cassandra.apache.org/doc/latest/cql/dml.html?#aliases -[@Transient]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/Transient.html -[@TransientProperties]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/TransientProperties.html -[@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html -[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html +[@Transient]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/Transient.html +[@TransientProperties]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/TransientProperties.html +[@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html +[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html diff --git a/manual/mapper/mapper/README.md b/manual/mapper/mapper/README.md index 3c723aacb73..0a81816f9a3 100644 --- a/manual/mapper/mapper/README.md +++ b/manual/mapper/mapper/README.md @@ -230,8 +230,8 @@ InventoryMapper inventoryMapper = new InventoryMapperBuilder(session) You can also permanently disable validation of an individual entity by annotating it with `@SchemaHint(targetElement = NONE)`. -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/CqlIdentifier.html -[@DaoFactory]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.html -[@DaoKeyspace]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/DaoKeyspace.html -[@DaoTable]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/DaoTable.html -[@Mapper]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/mapper/annotations/Mapper.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/CqlIdentifier.html +[@DaoFactory]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.html +[@DaoKeyspace]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/DaoKeyspace.html +[@DaoTable]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/DaoTable.html +[@Mapper]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/Mapper.html diff --git a/manual/osgi/README.md b/manual/osgi/README.md index d0ee00538ad..966d89c12a6 100644 --- a/manual/osgi/README.md +++ b/manual/osgi/README.md @@ -138,7 +138,7 @@ starting the driver: [driver configuration]: ../core/configuration [OSGi]:https://www.osgi.org [JNR]: https://github.com/jnr/jnr-posix -[withClassLoader()]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withClassLoader-java.lang.ClassLoader- +[withClassLoader()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withClassLoader-java.lang.ClassLoader- [JAVA-1127]:https://datastax-oss.atlassian.net/browse/JAVA-1127 -[DriverConfigLoader.fromDefaults(ClassLoader)]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromDefaults-java.lang.ClassLoader- -[DriverConfigLoader.programmaticBuilder(ClassLoader)]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#programmaticBuilder-java.lang.ClassLoader- +[DriverConfigLoader.fromDefaults(ClassLoader)]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromDefaults-java.lang.ClassLoader- +[DriverConfigLoader.programmaticBuilder(ClassLoader)]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#programmaticBuilder-java.lang.ClassLoader- diff --git a/manual/query_builder/README.md b/manual/query_builder/README.md index fe508fca68b..ab4369f2016 100644 --- a/manual/query_builder/README.md +++ b/manual/query_builder/README.md @@ -212,8 +212,8 @@ For a complete tour of the API, browse the child pages in this manual: * [Terms](term/) * [Idempotence](idempotence/) -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/CqlIdentifier.html -[DseQueryBuilder]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/querybuilder/DseQueryBuilder.html -[DseSchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/dse/driver/api/querybuilder/DseSchemaBuilder.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/CqlIdentifier.html +[DseQueryBuilder]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/querybuilder/DseQueryBuilder.html +[DseSchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/querybuilder/DseSchemaBuilder.html diff --git a/manual/query_builder/condition/README.md b/manual/query_builder/condition/README.md index 1517b5a106a..01897774c85 100644 --- a/manual/query_builder/condition/README.md +++ b/manual/query_builder/condition/README.md @@ -132,4 +132,4 @@ It is mutually exclusive with column conditions: if you previously specified col the statement, they will be ignored; conversely, adding a column condition cancels a previous IF EXISTS clause. -[Condition]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/querybuilder/condition/Condition.html +[Condition]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/querybuilder/condition/Condition.html diff --git a/manual/query_builder/delete/README.md b/manual/query_builder/delete/README.md index c39d97869cb..2aff86a6825 100644 --- a/manual/query_builder/delete/README.md +++ b/manual/query_builder/delete/README.md @@ -141,5 +141,5 @@ deleteFrom("user") Conditions are a common feature used by UPDATE and DELETE, so they have a [dedicated page](../condition) in this manual. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[Selector]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/querybuilder/select/Selector.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[Selector]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/querybuilder/select/Selector.html diff --git a/manual/query_builder/insert/README.md b/manual/query_builder/insert/README.md index 74f87c5b204..269afba7437 100644 --- a/manual/query_builder/insert/README.md +++ b/manual/query_builder/insert/README.md @@ -114,4 +114,4 @@ is executed. This is distinctly different than setting the value to null. Passin this method will only remove the USING TTL clause from the query, which will not alter the TTL (if one is set) in Cassandra. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html \ No newline at end of file +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html \ No newline at end of file diff --git a/manual/query_builder/relation/README.md b/manual/query_builder/relation/README.md index 9f0193825d5..c4d4990affa 100644 --- a/manual/query_builder/relation/README.md +++ b/manual/query_builder/relation/README.md @@ -201,5 +201,5 @@ This should be used with caution, as it's possible to generate invalid CQL that execution time; on the other hand, it can be used as a workaround to handle new CQL features that are not yet covered by the query builder. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[Relation]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/querybuilder/relation/Relation.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[Relation]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/querybuilder/relation/Relation.html diff --git a/manual/query_builder/schema/README.md b/manual/query_builder/schema/README.md index ea012439f11..de6d4bacd6c 100644 --- a/manual/query_builder/schema/README.md +++ b/manual/query_builder/schema/README.md @@ -44,4 +44,4 @@ element type: * [function](function/) * [aggregate](aggregate/) -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/aggregate/README.md b/manual/query_builder/schema/aggregate/README.md index 99d7b893b22..fbdf36147e3 100644 --- a/manual/query_builder/schema/aggregate/README.md +++ b/manual/query_builder/schema/aggregate/README.md @@ -76,4 +76,4 @@ dropAggregate("average").ifExists(); // DROP AGGREGATE IF EXISTS average ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/function/README.md b/manual/query_builder/schema/function/README.md index 508ff737eac..c77f6431b73 100644 --- a/manual/query_builder/schema/function/README.md +++ b/manual/query_builder/schema/function/README.md @@ -92,4 +92,4 @@ dropFunction("log").ifExists(); // DROP FUNCTION IF EXISTS log ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/index/README.md b/manual/query_builder/schema/index/README.md index ecbd9c4559c..9c590c1e79e 100644 --- a/manual/query_builder/schema/index/README.md +++ b/manual/query_builder/schema/index/README.md @@ -99,4 +99,4 @@ dropIndex("my_idx").ifExists(); // DROP INDEX IF EXISTS my_idx ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/keyspace/README.md b/manual/query_builder/schema/keyspace/README.md index ccdbcea3d1f..a07af3479f4 100644 --- a/manual/query_builder/schema/keyspace/README.md +++ b/manual/query_builder/schema/keyspace/README.md @@ -83,6 +83,6 @@ dropKeyspace("cycling").ifExists(); // DROP KEYSPACE IF EXISTS cycling ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/materialized_view/README.md b/manual/query_builder/schema/materialized_view/README.md index 89c0a687801..55c9cc41c07 100644 --- a/manual/query_builder/schema/materialized_view/README.md +++ b/manual/query_builder/schema/materialized_view/README.md @@ -85,5 +85,5 @@ dropTable("cyclist_by_age").ifExists(); // DROP MATERIALIZED VIEW IF EXISTS cyclist_by_age ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html -[RelationStructure]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/querybuilder/schema/RelationStructure.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[RelationStructure]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/querybuilder/schema/RelationStructure.html diff --git a/manual/query_builder/schema/table/README.md b/manual/query_builder/schema/table/README.md index da5ea6ff015..e0ed365375b 100644 --- a/manual/query_builder/schema/table/README.md +++ b/manual/query_builder/schema/table/README.md @@ -107,6 +107,6 @@ dropTable("cyclist_name").ifExists(); // DROP TABLE IF EXISTS cyclist_name ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html -[CreateTableWithOptions]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/querybuilder/schema/CreateTableWithOptions.html -[AlterTableWithOptions]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/querybuilder/schema/AlterTableWithOptions.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[CreateTableWithOptions]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/querybuilder/schema/CreateTableWithOptions.html +[AlterTableWithOptions]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/querybuilder/schema/AlterTableWithOptions.html diff --git a/manual/query_builder/schema/type/README.md b/manual/query_builder/schema/type/README.md index 734bcf65d8e..c5302843f7d 100644 --- a/manual/query_builder/schema/type/README.md +++ b/manual/query_builder/schema/type/README.md @@ -88,4 +88,4 @@ dropTable("address").ifExists(); // DROP TYPE IF EXISTS address ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/select/README.md b/manual/query_builder/select/README.md index 1ca33a1c2f5..42fd0410bcd 100644 --- a/manual/query_builder/select/README.md +++ b/manual/query_builder/select/README.md @@ -391,5 +391,5 @@ selectFrom("user").all().allowFiltering(); // SELECT * FROM user ALLOW FILTERING ``` -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[Selector]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/querybuilder/select/Selector.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[Selector]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/querybuilder/select/Selector.html diff --git a/manual/query_builder/term/README.md b/manual/query_builder/term/README.md index 28169a31d9f..7fb94e0f31f 100644 --- a/manual/query_builder/term/README.md +++ b/manual/query_builder/term/README.md @@ -105,5 +105,5 @@ This should be used with caution, as it's possible to generate invalid CQL that execution time; on the other hand, it can be used as a workaround to handle new CQL features that are not yet covered by the query builder. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html \ No newline at end of file +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html \ No newline at end of file diff --git a/manual/query_builder/truncate/README.md b/manual/query_builder/truncate/README.md index 3a6c7609b5d..d877433dd2e 100644 --- a/manual/query_builder/truncate/README.md +++ b/manual/query_builder/truncate/README.md @@ -17,4 +17,4 @@ Truncate truncate2 = truncate(CqlIdentifier.fromCql("mytable")); Note that, at this stage, the query is ready to build. After creating a TRUNCATE query it does not take any values. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html diff --git a/manual/query_builder/update/README.md b/manual/query_builder/update/README.md index 0d6c8a40644..1161d093bc4 100644 --- a/manual/query_builder/update/README.md +++ b/manual/query_builder/update/README.md @@ -251,5 +251,5 @@ update("foo") Conditions are a common feature used by UPDATE and DELETE, so they have a [dedicated page](../condition) in this manual. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[Assignment]: https://docs.datastax.com/en/drivers/java/4.13/com/datastax/oss/driver/api/querybuilder/update/Assignment.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[Assignment]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/querybuilder/update/Assignment.html From 02def26cacb7657e9a32266d2a5a92e1d444cbb2 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 18 Feb 2022 09:37:09 -0300 Subject: [PATCH 790/979] [maven-release-plugin] prepare release 4.14.0 --- bom/pom.xml | 18 +++++++++--------- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- metrics/micrometer/pom.xml | 2 +- metrics/microprofile/pom.xml | 2 +- osgi-tests/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 14 files changed, 23 insertions(+), 23 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index 7eb3b983ec4..f1d0a71ecf1 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.13.1-SNAPSHOT + 4.14.0 java-driver-bom pom @@ -31,42 +31,42 @@ com.datastax.oss java-driver-core - 4.13.1-SNAPSHOT + 4.14.0 com.datastax.oss java-driver-core-shaded - 4.13.1-SNAPSHOT + 4.14.0 com.datastax.oss java-driver-mapper-processor - 4.13.1-SNAPSHOT + 4.14.0 com.datastax.oss java-driver-mapper-runtime - 4.13.1-SNAPSHOT + 4.14.0 com.datastax.oss java-driver-query-builder - 4.13.1-SNAPSHOT + 4.14.0 com.datastax.oss java-driver-test-infra - 4.13.1-SNAPSHOT + 4.14.0 com.datastax.oss java-driver-metrics-micrometer - 4.13.1-SNAPSHOT + 4.14.0 com.datastax.oss java-driver-metrics-microprofile - 4.13.1-SNAPSHOT + 4.14.0 com.datastax.oss diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index a88b6987392..ad58d447bf2 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.13.1-SNAPSHOT + 4.14.0 java-driver-core-shaded DataStax Java driver for Apache Cassandra(R) - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index dce03062123..2278a520c21 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.13.1-SNAPSHOT + 4.14.0 java-driver-core bundle diff --git a/distribution/pom.xml b/distribution/pom.xml index 0da803804e4..0894e2b9601 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.13.1-SNAPSHOT + 4.14.0 java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index 130204c6918..5abc68f5b4c 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.13.1-SNAPSHOT + 4.14.0 java-driver-examples DataStax Java driver for Apache Cassandra(R) - examples. diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 96e682bd087..630f34d9926 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.13.1-SNAPSHOT + 4.14.0 java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 84b0e39d825..0c4e9a713d0 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.13.1-SNAPSHOT + 4.14.0 java-driver-mapper-processor DataStax Java driver for Apache Cassandra(R) - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index bf9be82718c..1a53602bf44 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.13.1-SNAPSHOT + 4.14.0 java-driver-mapper-runtime bundle diff --git a/metrics/micrometer/pom.xml b/metrics/micrometer/pom.xml index 82b113dd1a8..427b9ff0ba6 100644 --- a/metrics/micrometer/pom.xml +++ b/metrics/micrometer/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.13.1-SNAPSHOT + 4.14.0 ../../ java-driver-metrics-micrometer diff --git a/metrics/microprofile/pom.xml b/metrics/microprofile/pom.xml index af31fdc5fc2..a7161cbb0ea 100644 --- a/metrics/microprofile/pom.xml +++ b/metrics/microprofile/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.13.1-SNAPSHOT + 4.14.0 ../../ java-driver-metrics-microprofile diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml index 281c9ff67b3..4a0166a3bd6 100644 --- a/osgi-tests/pom.xml +++ b/osgi-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.13.1-SNAPSHOT + 4.14.0 java-driver-osgi-tests jar diff --git a/pom.xml b/pom.xml index 9de5ceb0e30..c7d73108a70 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ 4.0.0 com.datastax.oss java-driver-parent - 4.13.1-SNAPSHOT + 4.14.0 pom DataStax Java driver for Apache Cassandra(R) A driver for Apache Cassandra(R) 2.1+ that works exclusively with the Cassandra Query Language version 3 (CQL3) and Cassandra's native protocol versions 3 and above. @@ -955,7 +955,7 @@ height="0" width="0" style="display:none;visibility:hidden"> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - HEAD + 4.14.0 diff --git a/query-builder/pom.xml b/query-builder/pom.xml index 069af2819e9..b9dd3675ddd 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.13.1-SNAPSHOT + 4.14.0 java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index 6578516f10e..03d506407a9 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.13.1-SNAPSHOT + 4.14.0 java-driver-test-infra bundle From 5f46d3ed022fbbec1980344ebe69f8d5d4e1c7f9 Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Fri, 18 Feb 2022 09:37:16 -0300 Subject: [PATCH 791/979] [maven-release-plugin] prepare for next development iteration --- bom/pom.xml | 18 +++++++++--------- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- metrics/micrometer/pom.xml | 2 +- metrics/microprofile/pom.xml | 2 +- osgi-tests/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 14 files changed, 23 insertions(+), 23 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index f1d0a71ecf1..a3baefbdb71 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.14.0 + 4.14.1-SNAPSHOT java-driver-bom pom @@ -31,42 +31,42 @@ com.datastax.oss java-driver-core - 4.14.0 + 4.14.1-SNAPSHOT com.datastax.oss java-driver-core-shaded - 4.14.0 + 4.14.1-SNAPSHOT com.datastax.oss java-driver-mapper-processor - 4.14.0 + 4.14.1-SNAPSHOT com.datastax.oss java-driver-mapper-runtime - 4.14.0 + 4.14.1-SNAPSHOT com.datastax.oss java-driver-query-builder - 4.14.0 + 4.14.1-SNAPSHOT com.datastax.oss java-driver-test-infra - 4.14.0 + 4.14.1-SNAPSHOT com.datastax.oss java-driver-metrics-micrometer - 4.14.0 + 4.14.1-SNAPSHOT com.datastax.oss java-driver-metrics-microprofile - 4.14.0 + 4.14.1-SNAPSHOT com.datastax.oss diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index ad58d447bf2..1b2eddc6a3b 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.14.0 + 4.14.1-SNAPSHOT java-driver-core-shaded DataStax Java driver for Apache Cassandra(R) - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index 2278a520c21..c191e688b51 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.14.0 + 4.14.1-SNAPSHOT java-driver-core bundle diff --git a/distribution/pom.xml b/distribution/pom.xml index 0894e2b9601..63267f03d8a 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.14.0 + 4.14.1-SNAPSHOT java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index 5abc68f5b4c..ee9c3a8af63 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.14.0 + 4.14.1-SNAPSHOT java-driver-examples DataStax Java driver for Apache Cassandra(R) - examples. diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 630f34d9926..24d337c8aab 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.14.0 + 4.14.1-SNAPSHOT java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 0c4e9a713d0..486358cd1c0 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.14.0 + 4.14.1-SNAPSHOT java-driver-mapper-processor DataStax Java driver for Apache Cassandra(R) - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index 1a53602bf44..e7e3408f39f 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.14.0 + 4.14.1-SNAPSHOT java-driver-mapper-runtime bundle diff --git a/metrics/micrometer/pom.xml b/metrics/micrometer/pom.xml index 427b9ff0ba6..03f4524837b 100644 --- a/metrics/micrometer/pom.xml +++ b/metrics/micrometer/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.14.0 + 4.14.1-SNAPSHOT ../../ java-driver-metrics-micrometer diff --git a/metrics/microprofile/pom.xml b/metrics/microprofile/pom.xml index a7161cbb0ea..3df7a6f929b 100644 --- a/metrics/microprofile/pom.xml +++ b/metrics/microprofile/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.14.0 + 4.14.1-SNAPSHOT ../../ java-driver-metrics-microprofile diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml index 4a0166a3bd6..2cf8856baee 100644 --- a/osgi-tests/pom.xml +++ b/osgi-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.14.0 + 4.14.1-SNAPSHOT java-driver-osgi-tests jar diff --git a/pom.xml b/pom.xml index c7d73108a70..7caa9668bc2 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ 4.0.0 com.datastax.oss java-driver-parent - 4.14.0 + 4.14.1-SNAPSHOT pom DataStax Java driver for Apache Cassandra(R) A driver for Apache Cassandra(R) 2.1+ that works exclusively with the Cassandra Query Language version 3 (CQL3) and Cassandra's native protocol versions 3 and above. @@ -955,7 +955,7 @@ height="0" width="0" style="display:none;visibility:hidden"> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - 4.14.0 + HEAD diff --git a/query-builder/pom.xml b/query-builder/pom.xml index b9dd3675ddd..867f9249ce9 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.14.0 + 4.14.1-SNAPSHOT java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index 03d506407a9..bd510000d8d 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.14.0 + 4.14.1-SNAPSHOT java-driver-test-infra bundle From 55e6e40afcd7df2be931285b323379f9ea8c6c82 Mon Sep 17 00:00:00 2001 From: Om Sharma Date: Wed, 6 Apr 2022 20:58:47 +0100 Subject: [PATCH 792/979] JAVA-3003: CVE-2014-4043 Update com.github.jnr:jnr-posix to 3.1.15 to remove the vulnerability (#1589) --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 7caa9668bc2..5c3f1c60b3b 100644 --- a/pom.xml +++ b/pom.xml @@ -132,7 +132,7 @@ com.github.jnr jnr-posix - 3.1.5 + 3.1.15 io.dropwizard.metrics From 6304cb725a858e107ac826edfa6c8a6983504d78 Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Fri, 15 Apr 2022 11:54:43 -0500 Subject: [PATCH 793/979] JAVA-2977: Upgrade Netty to 4.1.75 (4.x) (#1592) --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 5c3f1c60b3b..42abf4d5c53 100644 --- a/pom.xml +++ b/pom.xml @@ -47,7 +47,7 @@ 1.4.1 2.1.12 4.1.18 - 4.1.60.Final + 4.1.75.Final 1.2.1 !com.google.protobuf.*, !com.jcraft.jzlib.*, !com.ning.compress.*, !lzma.sdk.*, !net.jpountz.xxhash.*, !org.bouncycastle.*, !org.conscrypt.*, !org.apache.commons.logging.*, !org.apache.log4j.*, !org.apache.logging.log4j.*, !org.eclipse.jetty.*, !org.jboss.marshalling.*, !sun.misc.*, !sun.security.*, !com.barchart.udt.*, !com.fasterxml.aalto.*, !com.sun.nio.sctp.*, !gnu.io.*, !org.xml.sax.*, !org.w3c.dom.*, * + -->!com.google.protobuf.*, !com.jcraft.jzlib.*, !com.ning.compress.*, !lzma.sdk.*, !net.jpountz.xxhash.*, !org.bouncycastle.*, !org.conscrypt.*, !org.apache.commons.logging.*, !org.apache.log4j.*, !org.apache.logging.log4j.*, !org.eclipse.jetty.*, !org.jboss.marshalling.*, !sun.misc.*, !sun.security.*, !com.barchart.udt.*, !com.fasterxml.aalto.*, !com.sun.nio.sctp.*, !gnu.io.*, !org.xml.sax.*, !org.w3c.dom.*, !com.aayushatharva.brotli4j.*, !com.github.luben.zstd.*, * - 3.4.10 + 3.5.3 1.7.26 1.0.3 20210307 - 2.12.2 - 2.12.2 + 2.13.2 + 2.13.2.2 1.9.12 1.1.7.3 @@ -81,7 +81,7 @@ 4.0.3 2.0.0-M19 2.22.2 - 21.0.0.2 + 22.0.0.2 false ${skipTests} From 5d038049cbbf989ff28c73f4e193199e46650d51 Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Wed, 27 Apr 2022 14:48:31 -0500 Subject: [PATCH 795/979] Update version in docs to 4.14.1 --- README.md | 2 +- changelog/README.md | 6 ++++++ manual/core/bom/README.md | 4 ++-- 3 files changed, 9 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index daffaff95c8..dbfcf5c308e 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@ *If you're reading this on github.com, please note that this is the readme for the development version and that some features described here might not yet have been released. You can find the documentation for latest version through [DataStax Docs] or via the release tags, e.g. -[4.14.0](https://github.com/datastax/java-driver/tree/4.14.0).* +[4.14.1](https://github.com/datastax/java-driver/tree/4.14.1).* A modern, feature-rich and highly tunable Java client library for [Apache Cassandra®] \(2.1+) and [DataStax Enterprise] \(4.7+), and [DataStax Astra], using exclusively Cassandra's binary protocol diff --git a/changelog/README.md b/changelog/README.md index fd6dccdb3a0..eb17219a060 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -2,6 +2,12 @@ +### 4.14.1 + +- [improvement] JAVA-3013: Upgrade dependencies to address CVEs and other security issues, 4.14.1 edition +- [improvement] JAVA-3003: Update jnr-posix to address CVE-2014-4043 +- [improvement] JAVA-2977: Update Netty to resolve higher-priority CVEs + ### 4.14.0 - [bug] JAVA-2976: Support missing protocol v5 error codes CAS_WRITE_UNKNOWN, CDC_WRITE_FAILURE diff --git a/manual/core/bom/README.md b/manual/core/bom/README.md index f1741a43372..df16bda4492 100644 --- a/manual/core/bom/README.md +++ b/manual/core/bom/README.md @@ -13,7 +13,7 @@ To import the driver's BOM, add the following section in your application's own com.datastax.oss java-driver-bom - 4.14.0 + 4.14.1 pom import @@ -65,7 +65,7 @@ good idea to extract a property to keep it in sync with the BOM: ```xml - 4.14.0 + 4.14.1 From 33154cfb33d041dc7c6a4cd70bfeb0692ac07145 Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Wed, 27 Apr 2022 15:02:21 -0500 Subject: [PATCH 796/979] [maven-release-plugin] prepare release 4.14.1 --- bom/pom.xml | 18 +++++++++--------- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- metrics/micrometer/pom.xml | 2 +- metrics/microprofile/pom.xml | 2 +- osgi-tests/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 14 files changed, 23 insertions(+), 23 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index a3baefbdb71..fb0aa7c1ace 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.14.1-SNAPSHOT + 4.14.1 java-driver-bom pom @@ -31,42 +31,42 @@ com.datastax.oss java-driver-core - 4.14.1-SNAPSHOT + 4.14.1 com.datastax.oss java-driver-core-shaded - 4.14.1-SNAPSHOT + 4.14.1 com.datastax.oss java-driver-mapper-processor - 4.14.1-SNAPSHOT + 4.14.1 com.datastax.oss java-driver-mapper-runtime - 4.14.1-SNAPSHOT + 4.14.1 com.datastax.oss java-driver-query-builder - 4.14.1-SNAPSHOT + 4.14.1 com.datastax.oss java-driver-test-infra - 4.14.1-SNAPSHOT + 4.14.1 com.datastax.oss java-driver-metrics-micrometer - 4.14.1-SNAPSHOT + 4.14.1 com.datastax.oss java-driver-metrics-microprofile - 4.14.1-SNAPSHOT + 4.14.1 com.datastax.oss diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 0ff78b2d319..7fb092040cb 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.14.1-SNAPSHOT + 4.14.1 java-driver-core-shaded DataStax Java driver for Apache Cassandra(R) - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index c191e688b51..26bcf0673c0 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.14.1-SNAPSHOT + 4.14.1 java-driver-core bundle diff --git a/distribution/pom.xml b/distribution/pom.xml index 63267f03d8a..475b8a6926e 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.14.1-SNAPSHOT + 4.14.1 java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index ee9c3a8af63..b9e22507d2d 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.14.1-SNAPSHOT + 4.14.1 java-driver-examples DataStax Java driver for Apache Cassandra(R) - examples. diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 24d337c8aab..29b5945f60a 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.14.1-SNAPSHOT + 4.14.1 java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 486358cd1c0..9d78d920de6 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.14.1-SNAPSHOT + 4.14.1 java-driver-mapper-processor DataStax Java driver for Apache Cassandra(R) - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index e7e3408f39f..a2efa79426f 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.14.1-SNAPSHOT + 4.14.1 java-driver-mapper-runtime bundle diff --git a/metrics/micrometer/pom.xml b/metrics/micrometer/pom.xml index 03f4524837b..2683fea3a48 100644 --- a/metrics/micrometer/pom.xml +++ b/metrics/micrometer/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.14.1-SNAPSHOT + 4.14.1 ../../ java-driver-metrics-micrometer diff --git a/metrics/microprofile/pom.xml b/metrics/microprofile/pom.xml index 3df7a6f929b..e934c102593 100644 --- a/metrics/microprofile/pom.xml +++ b/metrics/microprofile/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.14.1-SNAPSHOT + 4.14.1 ../../ java-driver-metrics-microprofile diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml index 2cf8856baee..484ab5b0883 100644 --- a/osgi-tests/pom.xml +++ b/osgi-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.14.1-SNAPSHOT + 4.14.1 java-driver-osgi-tests jar diff --git a/pom.xml b/pom.xml index 34ecf8dba71..6137eb7d5d5 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ 4.0.0 com.datastax.oss java-driver-parent - 4.14.1-SNAPSHOT + 4.14.1 pom DataStax Java driver for Apache Cassandra(R) A driver for Apache Cassandra(R) 2.1+ that works exclusively with the Cassandra Query Language version 3 (CQL3) and Cassandra's native protocol versions 3 and above. @@ -955,7 +955,7 @@ height="0" width="0" style="display:none;visibility:hidden"> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - HEAD + 4.14.1 diff --git a/query-builder/pom.xml b/query-builder/pom.xml index 867f9249ce9..8d6890db008 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.14.1-SNAPSHOT + 4.14.1 java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index bd510000d8d..b2ddad07bd6 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.14.1-SNAPSHOT + 4.14.1 java-driver-test-infra bundle From 82d61d8ed68271023b822afb00134b48b1624456 Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Wed, 27 Apr 2022 15:02:25 -0500 Subject: [PATCH 797/979] [maven-release-plugin] prepare for next development iteration --- bom/pom.xml | 18 +++++++++--------- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- metrics/micrometer/pom.xml | 2 +- metrics/microprofile/pom.xml | 2 +- osgi-tests/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 14 files changed, 23 insertions(+), 23 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index fb0aa7c1ace..01fada01ad6 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.14.1 + 4.14.2-SNAPSHOT java-driver-bom pom @@ -31,42 +31,42 @@ com.datastax.oss java-driver-core - 4.14.1 + 4.14.2-SNAPSHOT com.datastax.oss java-driver-core-shaded - 4.14.1 + 4.14.2-SNAPSHOT com.datastax.oss java-driver-mapper-processor - 4.14.1 + 4.14.2-SNAPSHOT com.datastax.oss java-driver-mapper-runtime - 4.14.1 + 4.14.2-SNAPSHOT com.datastax.oss java-driver-query-builder - 4.14.1 + 4.14.2-SNAPSHOT com.datastax.oss java-driver-test-infra - 4.14.1 + 4.14.2-SNAPSHOT com.datastax.oss java-driver-metrics-micrometer - 4.14.1 + 4.14.2-SNAPSHOT com.datastax.oss java-driver-metrics-microprofile - 4.14.1 + 4.14.2-SNAPSHOT com.datastax.oss diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 7fb092040cb..000e39af026 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.14.1 + 4.14.2-SNAPSHOT java-driver-core-shaded DataStax Java driver for Apache Cassandra(R) - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index 26bcf0673c0..4bdc5f1234c 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.14.1 + 4.14.2-SNAPSHOT java-driver-core bundle diff --git a/distribution/pom.xml b/distribution/pom.xml index 475b8a6926e..d65c82bca5d 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.14.1 + 4.14.2-SNAPSHOT java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index b9e22507d2d..1089928dfc7 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.14.1 + 4.14.2-SNAPSHOT java-driver-examples DataStax Java driver for Apache Cassandra(R) - examples. diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 29b5945f60a..9392ae28c66 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.14.1 + 4.14.2-SNAPSHOT java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 9d78d920de6..3c2c7052247 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.14.1 + 4.14.2-SNAPSHOT java-driver-mapper-processor DataStax Java driver for Apache Cassandra(R) - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index a2efa79426f..062b3381687 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.14.1 + 4.14.2-SNAPSHOT java-driver-mapper-runtime bundle diff --git a/metrics/micrometer/pom.xml b/metrics/micrometer/pom.xml index 2683fea3a48..62fcd1a477a 100644 --- a/metrics/micrometer/pom.xml +++ b/metrics/micrometer/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.14.1 + 4.14.2-SNAPSHOT ../../ java-driver-metrics-micrometer diff --git a/metrics/microprofile/pom.xml b/metrics/microprofile/pom.xml index e934c102593..fc562771206 100644 --- a/metrics/microprofile/pom.xml +++ b/metrics/microprofile/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.14.1 + 4.14.2-SNAPSHOT ../../ java-driver-metrics-microprofile diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml index 484ab5b0883..1720a69000d 100644 --- a/osgi-tests/pom.xml +++ b/osgi-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.14.1 + 4.14.2-SNAPSHOT java-driver-osgi-tests jar diff --git a/pom.xml b/pom.xml index 6137eb7d5d5..8120beed458 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ 4.0.0 com.datastax.oss java-driver-parent - 4.14.1 + 4.14.2-SNAPSHOT pom DataStax Java driver for Apache Cassandra(R) A driver for Apache Cassandra(R) 2.1+ that works exclusively with the Cassandra Query Language version 3 (CQL3) and Cassandra's native protocol versions 3 and above. @@ -955,7 +955,7 @@ height="0" width="0" style="display:none;visibility:hidden"> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - 4.14.1 + HEAD diff --git a/query-builder/pom.xml b/query-builder/pom.xml index 8d6890db008..043b72cee93 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.14.1 + 4.14.2-SNAPSHOT java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index b2ddad07bd6..0b8354c1bd3 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.14.1 + 4.14.2-SNAPSHOT java-driver-test-infra bundle From b1cf8a898b57d38cce2c6317a9c45af9cda698e8 Mon Sep 17 00:00:00 2001 From: Ammar Khaku Date: Tue, 24 May 2022 09:00:05 -0700 Subject: [PATCH 798/979] Update dangling Cluster references in reference.conf to Session (#1596) --- core/src/main/resources/reference.conf | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/core/src/main/resources/reference.conf b/core/src/main/resources/reference.conf index 44e454fe42d..d3c3c4f737f 100644 --- a/core/src/main/resources/reference.conf +++ b/core/src/main/resources/reference.conf @@ -1041,7 +1041,7 @@ datastax-java-driver { # an incompatible node joins the cluster later, connection will fail and the driver will force # it down (i.e. never try to connect to it again). # - # You can check the actual version at runtime with Cluster.getContext().getProtocolVersion(). + # You can check the actual version at runtime with Session.getContext().getProtocolVersion(). # # Required: no # Modifiable at runtime: no @@ -1925,7 +1925,7 @@ datastax-java-driver { max-events = 20 } - # Options relating to schema metadata (Cluster.getMetadata.getKeyspaces). + # Options relating to schema metadata (Session.getMetadata.getKeyspaces). # This metadata is exposed by the driver for informational purposes, and is also necessary for # token-aware routing. schema { @@ -1934,7 +1934,7 @@ datastax-java-driver { # # Required: yes # Modifiable at runtime: yes, the new value will be used for refreshes issued after the - # change. It can also be overridden programmatically via Cluster.setSchemaMetadataEnabled. + # change. It can also be overridden programmatically via Session.setSchemaMetadataEnabled. # Overridable in a profile: no enabled = true @@ -2013,7 +2013,7 @@ datastax-java-driver { } } - # Whether token metadata (Cluster.getMetadata.getTokenMap) is enabled. + # Whether token metadata (Session.getMetadata.getTokenMap) is enabled. # This metadata is exposed by the driver for informational purposes, and is also necessary for # token-aware routing. # If this is false, it will remain empty, or to the last known value. Note that its computation From decabed60caf61412499ef320982b7b74ff45a63 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=A0tefan=20Miklo=C5=A1ovi=C4=8D?= Date: Thu, 26 May 2022 17:46:48 +0200 Subject: [PATCH 799/979] JAVA-2995: CodecNotFoundException doesn't extend DriverException (#1598) Co-authored-by: Alexandre Dutra --- core/revapi.json | 7 ++++ .../type/codec/CodecNotFoundException.java | 11 ++++- upgrade_guide/README.md | 40 +++++++++++++++++++ 3 files changed, 56 insertions(+), 2 deletions(-) diff --git a/core/revapi.json b/core/revapi.json index fe066e27b55..af719e9987e 100644 --- a/core/revapi.json +++ b/core/revapi.json @@ -6817,6 +6817,13 @@ "new": "method void com.fasterxml.jackson.databind.type.TypeBase::serialize(com.fasterxml.jackson.core.JsonGenerator, com.fasterxml.jackson.databind.SerializerProvider) throws java.io.IOException", "exception": "com.fasterxml.jackson.core.JsonProcessingException", "justification": "Upgrade deps around JAVA-2977 to address outstanding CVEs" + }, + { + "code": "java.class.nonFinalClassInheritsFromNewClass", + "old": "class com.datastax.oss.driver.api.core.type.codec.CodecNotFoundException", + "new": "class com.datastax.oss.driver.api.core.type.codec.CodecNotFoundException", + "superClass": "com.datastax.oss.driver.api.core.DriverException", + "justification": "Make CodecNotFoundException to extend DriverException as all other driver exceptions do" } ] } diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/type/codec/CodecNotFoundException.java b/core/src/main/java/com/datastax/oss/driver/api/core/type/codec/CodecNotFoundException.java index 4d46f253915..9396c2547d9 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/type/codec/CodecNotFoundException.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/type/codec/CodecNotFoundException.java @@ -15,6 +15,7 @@ */ package com.datastax.oss.driver.api.core.type.codec; +import com.datastax.oss.driver.api.core.DriverException; import com.datastax.oss.driver.api.core.type.DataType; import com.datastax.oss.driver.api.core.type.codec.registry.CodecRegistry; import com.datastax.oss.driver.api.core.type.reflect.GenericType; @@ -22,7 +23,7 @@ import edu.umd.cs.findbugs.annotations.Nullable; /** Thrown when a suitable {@link TypeCodec} cannot be found by the {@link CodecRegistry}. */ -public class CodecNotFoundException extends RuntimeException { +public class CodecNotFoundException extends DriverException { private final DataType cqlType; @@ -48,7 +49,7 @@ public CodecNotFoundException( private CodecNotFoundException( String msg, Throwable cause, DataType cqlType, GenericType javaType) { - super(msg, cause); + super(msg, null, cause, true); this.cqlType = cqlType; this.javaType = javaType; } @@ -62,4 +63,10 @@ public DataType getCqlType() { public GenericType getJavaType() { return javaType; } + + @NonNull + @Override + public DriverException copy() { + return new CodecNotFoundException(getMessage(), getCause(), getCqlType(), getJavaType()); + } } diff --git a/upgrade_guide/README.md b/upgrade_guide/README.md index 297c4ca7fda..68d03fbfd6e 100644 --- a/upgrade_guide/README.md +++ b/upgrade_guide/README.md @@ -1,5 +1,45 @@ ## Upgrade guide +### 4.15.0 + +#### CodecNotFoundException now extends DriverException + +Before [JAVA-2995](https://datastax-oss.atlassian.net/browse/JAVA-2959), `CodecNotFoundException` +was extending `RuntimeException`. This is a discrepancy as all other exceptions extend +`DriverException`, which in turn extends `RuntimeException`. + +This was causing integrators to do workarounds in order to react on all exceptions correctly. + +The change introduced by JAVA-2995 shouldn't be a problem for most users. But if your code was using +a logic such as below, it won't compile anymore: + +```java +try { + doSomethingWithDriver(); +} catch(DriverException e) { +} catch(CodecNotFoundException e) { +} +``` + +You need to either reverse the catch order and catch `CodecNotFoundException` first: + +```java +try { + doSomethingWithDriver(); +} catch(CodecNotFoundException e) { +} catch(DriverException e) { +} +``` + +Or catch only `DriverException`: + +```java +try { + doSomethingWithDriver(); +} catch(DriverException e) { +} +``` + ### 4.14.0 #### AllNodesFailedException instead of NoNodeAvailableException in certain cases From 8511588f6efcf5b7d57e3a5876152ce7623b0499 Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Thu, 26 May 2022 10:52:28 -0500 Subject: [PATCH 800/979] Minor fix to upgrade guide after last commit --- upgrade_guide/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/upgrade_guide/README.md b/upgrade_guide/README.md index 68d03fbfd6e..125c04db034 100644 --- a/upgrade_guide/README.md +++ b/upgrade_guide/README.md @@ -4,7 +4,7 @@ #### CodecNotFoundException now extends DriverException -Before [JAVA-2995](https://datastax-oss.atlassian.net/browse/JAVA-2959), `CodecNotFoundException` +Before [JAVA-2995](https://datastax-oss.atlassian.net/browse/JAVA-2995), `CodecNotFoundException` was extending `RuntimeException`. This is a discrepancy as all other exceptions extend `DriverException`, which in turn extends `RuntimeException`. From a76d38e4ec86c18850381ed4c01afd21e3bd1e6e Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Tue, 21 Jun 2022 17:04:39 -0500 Subject: [PATCH 801/979] JAVA-3023: Upgrade Netty to 4.1.77 (4.x) (#1600) --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 8120beed458..87e983b088d 100644 --- a/pom.xml +++ b/pom.xml @@ -47,7 +47,7 @@ 1.4.1 2.1.12 4.1.18 - 4.1.75.Final + 4.1.77.Final 1.2.1 +### 4.14.2 (in progress) + +- [bug] JAVA-3002 JAVA-3005: Refresh entire node list when a new node is added + ### 4.14.1 - [improvement] JAVA-3013: Upgrade dependencies to address CVEs and other security issues, 4.14.1 edition diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/NodeStateManager.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/NodeStateManager.java index 1412168d4f8..298b6b89d94 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/NodeStateManager.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/NodeStateManager.java @@ -185,10 +185,10 @@ private void onDebouncedTopologyEvent(TopologyEvent event) { } } else { LOG.debug( - "[{}] Received UP event for unknown node {}, adding it", + "[{}] Received UP event for unknown node {}, refreshing node list", logPrefix, event.broadcastRpcAddress); - metadataManager.addNode(event.broadcastRpcAddress); + metadataManager.refreshNodes(); } break; case SUGGEST_DOWN: diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/NodeStateManagerTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/NodeStateManagerTest.java index d1b2f47d8dc..2680fa36893 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/NodeStateManagerTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/NodeStateManagerTest.java @@ -147,7 +147,7 @@ public void should_apply_up_event_if_node_is_unknown_or_down() { } @Test - public void should_add_node_if_up_event_and_not_in_metadata() { + public void should_refresh_node_list_if_up_event_and_not_in_metadata() { // Given new NodeStateManager(context); @@ -157,7 +157,7 @@ public void should_add_node_if_up_event_and_not_in_metadata() { // Then verify(eventBus, never()).fire(any(NodeStateEvent.class)); - verify(metadataManager).addNode(NEW_ADDRESS); + verify(metadataManager).refreshNodes(); } @Test From fb24e9767f5c6301574ed7265f30c098f40eed9a Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Wed, 13 Jul 2022 14:24:52 -0500 Subject: [PATCH 803/979] JAVA-3021: Update docs to replace withPrimaryKey with withPartitionKey (#1599) --- manual/query_builder/schema/table/README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/manual/query_builder/schema/table/README.md b/manual/query_builder/schema/table/README.md index e0ed365375b..a3000ee70db 100644 --- a/manual/query_builder/schema/table/README.md +++ b/manual/query_builder/schema/table/README.md @@ -31,12 +31,12 @@ CreateTable create = createTable("cycling", "cyclist_name").withPartitionKey("id A table with only one column is not so typical however. At this point you may provide partition, clustering, regular and static columns using any of the following API methods: -* `withPrimaryKey(name, dataType)` +* `withPartitionKey(name, dataType)` * `withClusteringColumn(name, dataType)` * `withColumn(name, dataType)` * `withStaticColumn(name, dataType)` -Primary key precedence is driven by the order of `withPrimaryKey` and `withClusteringKey` +Primary key precedence is driven by the order of `withPartitionKey` and `withClusteringKey` invocations, for example: From df74d101f7fa1afd9b7ff31e73d81c3e6e226e47 Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Wed, 13 Jul 2022 12:37:58 -0500 Subject: [PATCH 804/979] Some minor changelog updates --- changelog/README.md | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/changelog/README.md b/changelog/README.md index 9c18d471ca9..e102cd00767 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -2,15 +2,19 @@ -### 4.14.2 (in progress) +### 4.15.0 (in progress) -- [bug] JAVA-3002 JAVA-3005: Refresh entire node list when a new node is added +- [bug] JAVA-3021: Update table SchemaBuilder page to replace withPrimaryKey with withPartitionKey +- [bug] JAVA-3005: Node list refresh behavior in 4.x is different from 3.x +- [bug] JAVA-3002: spring-boot app keeps connecting to IP of replaced node +- [improvement] JAVA-3023 Upgrade Netty to 4.1.77 +- [improvement] JAVA-2995: CodecNotFoundException doesn't extend DriverException ### 4.14.1 - [improvement] JAVA-3013: Upgrade dependencies to address CVEs and other security issues, 4.14.1 edition -- [improvement] JAVA-3003: Update jnr-posix to address CVE-2014-4043 - [improvement] JAVA-2977: Update Netty to resolve higher-priority CVEs +- [improvement] JAVA-3003: Update jnr-posix to address CVE-2014-4043 ### 4.14.0 From 43a6ac5b6f0552c40bbeafb1a810108b63432044 Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Wed, 13 Jul 2022 12:43:37 -0500 Subject: [PATCH 805/979] Upgrade version to 4.15.0-SNAPSHOT --- bom/pom.xml | 18 +++++++++--------- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- metrics/micrometer/pom.xml | 2 +- metrics/microprofile/pom.xml | 2 +- osgi-tests/pom.xml | 2 +- pom.xml | 2 +- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 14 files changed, 22 insertions(+), 22 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index 01fada01ad6..73a9cebdf7e 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.14.2-SNAPSHOT + 4.15.0-SNAPSHOT java-driver-bom pom @@ -31,42 +31,42 @@ com.datastax.oss java-driver-core - 4.14.2-SNAPSHOT + 4.15.0-SNAPSHOT com.datastax.oss java-driver-core-shaded - 4.14.2-SNAPSHOT + 4.15.0-SNAPSHOT com.datastax.oss java-driver-mapper-processor - 4.14.2-SNAPSHOT + 4.15.0-SNAPSHOT com.datastax.oss java-driver-mapper-runtime - 4.14.2-SNAPSHOT + 4.15.0-SNAPSHOT com.datastax.oss java-driver-query-builder - 4.14.2-SNAPSHOT + 4.15.0-SNAPSHOT com.datastax.oss java-driver-test-infra - 4.14.2-SNAPSHOT + 4.15.0-SNAPSHOT com.datastax.oss java-driver-metrics-micrometer - 4.14.2-SNAPSHOT + 4.15.0-SNAPSHOT com.datastax.oss java-driver-metrics-microprofile - 4.14.2-SNAPSHOT + 4.15.0-SNAPSHOT com.datastax.oss diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 000e39af026..742dc8e2d67 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.14.2-SNAPSHOT + 4.15.0-SNAPSHOT java-driver-core-shaded DataStax Java driver for Apache Cassandra(R) - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index 4bdc5f1234c..a53d476df0f 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.14.2-SNAPSHOT + 4.15.0-SNAPSHOT java-driver-core bundle diff --git a/distribution/pom.xml b/distribution/pom.xml index d65c82bca5d..d5f8ad1f64b 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.14.2-SNAPSHOT + 4.15.0-SNAPSHOT java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index 1089928dfc7..35ff3b29856 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.14.2-SNAPSHOT + 4.15.0-SNAPSHOT java-driver-examples DataStax Java driver for Apache Cassandra(R) - examples. diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 9392ae28c66..f857df7c3d2 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.14.2-SNAPSHOT + 4.15.0-SNAPSHOT java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 3c2c7052247..6d5b31a7a00 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.14.2-SNAPSHOT + 4.15.0-SNAPSHOT java-driver-mapper-processor DataStax Java driver for Apache Cassandra(R) - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index 062b3381687..385e0050153 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.14.2-SNAPSHOT + 4.15.0-SNAPSHOT java-driver-mapper-runtime bundle diff --git a/metrics/micrometer/pom.xml b/metrics/micrometer/pom.xml index 62fcd1a477a..9e08a931cfc 100644 --- a/metrics/micrometer/pom.xml +++ b/metrics/micrometer/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.14.2-SNAPSHOT + 4.15.0-SNAPSHOT ../../ java-driver-metrics-micrometer diff --git a/metrics/microprofile/pom.xml b/metrics/microprofile/pom.xml index fc562771206..7569c81ef81 100644 --- a/metrics/microprofile/pom.xml +++ b/metrics/microprofile/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.14.2-SNAPSHOT + 4.15.0-SNAPSHOT ../../ java-driver-metrics-microprofile diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml index 1720a69000d..2c6b90094ae 100644 --- a/osgi-tests/pom.xml +++ b/osgi-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.14.2-SNAPSHOT + 4.15.0-SNAPSHOT java-driver-osgi-tests jar diff --git a/pom.xml b/pom.xml index 87e983b088d..3f9cb1227a8 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ 4.0.0 com.datastax.oss java-driver-parent - 4.14.2-SNAPSHOT + 4.15.0-SNAPSHOT pom DataStax Java driver for Apache Cassandra(R) A driver for Apache Cassandra(R) 2.1+ that works exclusively with the Cassandra Query Language version 3 (CQL3) and Cassandra's native protocol versions 3 and above. diff --git a/query-builder/pom.xml b/query-builder/pom.xml index 043b72cee93..f958bf9f092 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.14.2-SNAPSHOT + 4.15.0-SNAPSHOT java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index 0b8354c1bd3..035436526a6 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.14.2-SNAPSHOT + 4.15.0-SNAPSHOT java-driver-test-infra bundle From 65d2c19c401175dcc6c370560dd5f783d05b05b9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=A0tefan=20Miklo=C5=A1ovi=C4=8D?= Date: Thu, 14 Jul 2022 23:06:02 +0200 Subject: [PATCH 806/979] JAVA-3022: Implementation of address translator for fixed hostname (#1597) --- .../FixedHostNameAddressTranslator.java | 72 +++++++++++++++++++ core/src/main/resources/reference.conf | 3 + .../FixedHostNameAddressTranslatorTest.java | 46 ++++++++++++ 3 files changed, 121 insertions(+) create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/addresstranslation/FixedHostNameAddressTranslator.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/addresstranslation/FixedHostNameAddressTranslatorTest.java diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/addresstranslation/FixedHostNameAddressTranslator.java b/core/src/main/java/com/datastax/oss/driver/internal/core/addresstranslation/FixedHostNameAddressTranslator.java new file mode 100644 index 00000000000..80009de8c3c --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/addresstranslation/FixedHostNameAddressTranslator.java @@ -0,0 +1,72 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.addresstranslation; + +import com.datastax.oss.driver.api.core.addresstranslation.AddressTranslator; +import com.datastax.oss.driver.api.core.config.DriverOption; +import com.datastax.oss.driver.api.core.context.DriverContext; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.net.InetSocketAddress; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * This translator always returns same hostname, no matter what IP address a node has but still + * using its native transport port. + * + *

          The translator can be used for scenarios when all nodes are behind some kind of proxy, and it + * is not tailored for one concrete use case. One can use this, for example, for AWS PrivateLink as + * all nodes would be exposed to consumer - behind one hostname pointing to AWS Endpoint. + */ +public class FixedHostNameAddressTranslator implements AddressTranslator { + + private static final Logger LOG = LoggerFactory.getLogger(FixedHostNameAddressTranslator.class); + + public static final String ADDRESS_TRANSLATOR_ADVERTISED_HOSTNAME = + "advanced.address-translator.advertised-hostname"; + + public static DriverOption ADDRESS_TRANSLATOR_ADVERTISED_HOSTNAME_OPTION = + new DriverOption() { + @NonNull + @Override + public String getPath() { + return ADDRESS_TRANSLATOR_ADVERTISED_HOSTNAME; + } + }; + + private final String advertisedHostname; + private final String logPrefix; + + public FixedHostNameAddressTranslator(@NonNull DriverContext context) { + logPrefix = context.getSessionName(); + advertisedHostname = + context + .getConfig() + .getDefaultProfile() + .getString(ADDRESS_TRANSLATOR_ADVERTISED_HOSTNAME_OPTION); + } + + @NonNull + @Override + public InetSocketAddress translate(@NonNull InetSocketAddress address) { + final int port = address.getPort(); + LOG.debug("[{}] Resolved {}:{} to {}:{}", logPrefix, address, port, advertisedHostname, port); + return new InetSocketAddress(advertisedHostname, port); + } + + @Override + public void close() {} +} diff --git a/core/src/main/resources/reference.conf b/core/src/main/resources/reference.conf index d3c3c4f737f..f7e2cd76ad1 100644 --- a/core/src/main/resources/reference.conf +++ b/core/src/main/resources/reference.conf @@ -990,6 +990,7 @@ datastax-java-driver { # # The driver provides the following implementations out of the box: # - PassThroughAddressTranslator: returns all addresses unchanged + # - FixedHostNameAddressTranslator: translates all addresses to a specific hostname. # - Ec2MultiRegionAddressTranslator: suitable for an Amazon multi-region EC2 deployment where # clients are also deployed in EC2. It optimizes network costs by favoring private IPs over # public ones whenever possible. @@ -997,6 +998,8 @@ datastax-java-driver { # You can also specify a custom class that implements AddressTranslator and has a public # constructor with a DriverContext argument. class = PassThroughAddressTranslator + # This property has to be set only in case you use FixedHostNameAddressTranslator. + # advertised-hostname = mycustomhostname } # Whether to resolve the addresses passed to `basic.contact-points`. diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/addresstranslation/FixedHostNameAddressTranslatorTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/addresstranslation/FixedHostNameAddressTranslatorTest.java new file mode 100644 index 00000000000..d7ee0e2d880 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/addresstranslation/FixedHostNameAddressTranslatorTest.java @@ -0,0 +1,46 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.addresstranslation; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.internal.core.context.DefaultDriverContext; +import com.datastax.oss.driver.internal.core.context.MockedDriverContextFactory; +import java.net.InetSocketAddress; +import java.util.Optional; +import org.junit.Test; + +public class FixedHostNameAddressTranslatorTest { + + @Test + public void should_translate_address() { + DriverExecutionProfile defaultProfile = mock(DriverExecutionProfile.class); + when(defaultProfile.getString( + FixedHostNameAddressTranslator.ADDRESS_TRANSLATOR_ADVERTISED_HOSTNAME_OPTION)) + .thenReturn("myaddress"); + DefaultDriverContext defaultDriverContext = + MockedDriverContextFactory.defaultDriverContext(Optional.of(defaultProfile)); + + FixedHostNameAddressTranslator translator = + new FixedHostNameAddressTranslator(defaultDriverContext); + InetSocketAddress address = new InetSocketAddress("192.0.2.5", 6061); + + assertThat(translator.translate(address)).isEqualTo(new InetSocketAddress("myaddress", 6061)); + } +} From 03961c9510ff9a111e07b57c4950a8e8966dfff4 Mon Sep 17 00:00:00 2001 From: Sergey Matvienko <79898499+smatvienko-tb@users.noreply.github.com> Date: Wed, 7 Sep 2022 19:21:37 +0300 Subject: [PATCH 807/979] JAVA-3041: Update Guava session sample code to use ProgrammaticArguments (#1606) Enabling support for Astra in the Guava ListenableFuture examples requires use of the new ProgrammaticArguments constructor/method --- .../guava/api/GuavaSessionBuilder.java | 28 ++----------------- .../guava/internal/GuavaDriverContext.java | 28 ++----------------- 2 files changed, 6 insertions(+), 50 deletions(-) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/example/guava/api/GuavaSessionBuilder.java b/integration-tests/src/test/java/com/datastax/oss/driver/example/guava/api/GuavaSessionBuilder.java index 1fe041fcfe7..6356b632f3e 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/example/guava/api/GuavaSessionBuilder.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/example/guava/api/GuavaSessionBuilder.java @@ -18,40 +18,18 @@ import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; import com.datastax.oss.driver.api.core.context.DriverContext; -import com.datastax.oss.driver.api.core.metadata.Node; -import com.datastax.oss.driver.api.core.metadata.NodeStateListener; -import com.datastax.oss.driver.api.core.metadata.schema.SchemaChangeListener; +import com.datastax.oss.driver.api.core.session.ProgrammaticArguments; import com.datastax.oss.driver.api.core.session.SessionBuilder; -import com.datastax.oss.driver.api.core.tracker.RequestTracker; -import com.datastax.oss.driver.api.core.type.codec.TypeCodec; import com.datastax.oss.driver.example.guava.internal.DefaultGuavaSession; import com.datastax.oss.driver.example.guava.internal.GuavaDriverContext; import edu.umd.cs.findbugs.annotations.NonNull; -import java.util.List; -import java.util.Map; -import java.util.function.Predicate; public class GuavaSessionBuilder extends SessionBuilder { @Override protected DriverContext buildContext( - DriverConfigLoader configLoader, - List> typeCodecs, - NodeStateListener nodeStateListener, - SchemaChangeListener schemaChangeListener, - RequestTracker requestTracker, - Map localDatacenters, - Map> nodeFilters, - ClassLoader classLoader) { - return new GuavaDriverContext( - configLoader, - typeCodecs, - nodeStateListener, - schemaChangeListener, - requestTracker, - localDatacenters, - nodeFilters, - classLoader); + DriverConfigLoader configLoader, ProgrammaticArguments programmaticArguments) { + return new GuavaDriverContext(configLoader, programmaticArguments); } @Override diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/example/guava/internal/GuavaDriverContext.java b/integration-tests/src/test/java/com/datastax/oss/driver/example/guava/internal/GuavaDriverContext.java index 3ecf6a1b128..5a8b44be739 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/example/guava/internal/GuavaDriverContext.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/example/guava/internal/GuavaDriverContext.java @@ -18,11 +18,7 @@ import com.datastax.oss.driver.api.core.config.DriverConfigLoader; import com.datastax.oss.driver.api.core.cql.PrepareRequest; import com.datastax.oss.driver.api.core.cql.Statement; -import com.datastax.oss.driver.api.core.metadata.Node; -import com.datastax.oss.driver.api.core.metadata.NodeStateListener; -import com.datastax.oss.driver.api.core.metadata.schema.SchemaChangeListener; -import com.datastax.oss.driver.api.core.tracker.RequestTracker; -import com.datastax.oss.driver.api.core.type.codec.TypeCodec; +import com.datastax.oss.driver.api.core.session.ProgrammaticArguments; import com.datastax.oss.driver.example.guava.api.GuavaSession; import com.datastax.oss.driver.internal.core.context.DefaultDriverContext; import com.datastax.oss.driver.internal.core.cql.CqlPrepareAsyncProcessor; @@ -30,9 +26,6 @@ import com.datastax.oss.driver.internal.core.cql.CqlRequestAsyncProcessor; import com.datastax.oss.driver.internal.core.cql.CqlRequestSyncProcessor; import com.datastax.oss.driver.internal.core.session.RequestProcessorRegistry; -import java.util.List; -import java.util.Map; -import java.util.function.Predicate; /** * A Custom {@link DefaultDriverContext} that overrides {@link #getRequestProcessorRegistry()} to @@ -41,23 +34,8 @@ public class GuavaDriverContext extends DefaultDriverContext { public GuavaDriverContext( - DriverConfigLoader configLoader, - List> typeCodecs, - NodeStateListener nodeStateListener, - SchemaChangeListener schemaChangeListener, - RequestTracker requestTracker, - Map localDatacenters, - Map> nodeFilters, - ClassLoader classLoader) { - super( - configLoader, - typeCodecs, - nodeStateListener, - schemaChangeListener, - requestTracker, - localDatacenters, - nodeFilters, - classLoader); + DriverConfigLoader configLoader, ProgrammaticArguments programmaticArguments) { + super(configLoader, programmaticArguments); } @Override From 8439108eb0dc54787819a2c5b81eccedce607fe1 Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Wed, 14 Sep 2022 22:03:01 +0000 Subject: [PATCH 808/979] Docs + changelog updates --- README.md | 2 +- changelog/README.md | 4 +++- manual/core/bom/README.md | 4 ++-- manual/core/integration/README.md | 1 + 4 files changed, 7 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index dbfcf5c308e..d2930ba6e66 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@ *If you're reading this on github.com, please note that this is the readme for the development version and that some features described here might not yet have been released. You can find the documentation for latest version through [DataStax Docs] or via the release tags, e.g. -[4.14.1](https://github.com/datastax/java-driver/tree/4.14.1).* +[4.15.0](https://github.com/datastax/java-driver/tree/4.15.0).* A modern, feature-rich and highly tunable Java client library for [Apache Cassandra®] \(2.1+) and [DataStax Enterprise] \(4.7+), and [DataStax Astra], using exclusively Cassandra's binary protocol diff --git a/changelog/README.md b/changelog/README.md index e102cd00767..a0ec32f7a8b 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -2,8 +2,10 @@ -### 4.15.0 (in progress) +### 4.15.0 +- [improvement] JAVA-3041: Update Guava session sample code to use ProgrammaticArguments +- [improvement] JAVA-3022: Implement AddressTranslator for AWS PrivateLink - [bug] JAVA-3021: Update table SchemaBuilder page to replace withPrimaryKey with withPartitionKey - [bug] JAVA-3005: Node list refresh behavior in 4.x is different from 3.x - [bug] JAVA-3002: spring-boot app keeps connecting to IP of replaced node diff --git a/manual/core/bom/README.md b/manual/core/bom/README.md index df16bda4492..dc8f12eb599 100644 --- a/manual/core/bom/README.md +++ b/manual/core/bom/README.md @@ -13,7 +13,7 @@ To import the driver's BOM, add the following section in your application's own com.datastax.oss java-driver-bom - 4.14.1 + 4.15.0 pom import @@ -65,7 +65,7 @@ good idea to extract a property to keep it in sync with the BOM: ```xml - 4.14.1 + 4.15.0 diff --git a/manual/core/integration/README.md b/manual/core/integration/README.md index 2935e84a485..9900d3b3f90 100644 --- a/manual/core/integration/README.md +++ b/manual/core/integration/README.md @@ -562,6 +562,7 @@ Here are the recommended TinkerPop versions for each driver version:

          Driver versionTinkerPop version
          4.14.03.4.10
          4.13.03.4.10
          4.12.03.4.10
          4.11.03.4.10
          + From f5b4c2586961884d2c0ddaf6189ad4d5836bf293 Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Mon, 19 Sep 2022 18:51:06 +0000 Subject: [PATCH 809/979] [maven-release-plugin] prepare release 4.15.0 --- bom/pom.xml | 18 +++++++++--------- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- metrics/micrometer/pom.xml | 2 +- metrics/microprofile/pom.xml | 2 +- osgi-tests/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 14 files changed, 23 insertions(+), 23 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index 73a9cebdf7e..0893a09571d 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.15.0-SNAPSHOT + 4.15.0 java-driver-bom pom @@ -31,42 +31,42 @@ com.datastax.oss java-driver-core - 4.15.0-SNAPSHOT + 4.15.0 com.datastax.oss java-driver-core-shaded - 4.15.0-SNAPSHOT + 4.15.0 com.datastax.oss java-driver-mapper-processor - 4.15.0-SNAPSHOT + 4.15.0 com.datastax.oss java-driver-mapper-runtime - 4.15.0-SNAPSHOT + 4.15.0 com.datastax.oss java-driver-query-builder - 4.15.0-SNAPSHOT + 4.15.0 com.datastax.oss java-driver-test-infra - 4.15.0-SNAPSHOT + 4.15.0 com.datastax.oss java-driver-metrics-micrometer - 4.15.0-SNAPSHOT + 4.15.0 com.datastax.oss java-driver-metrics-microprofile - 4.15.0-SNAPSHOT + 4.15.0 com.datastax.oss diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 742dc8e2d67..03a5f245e70 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.15.0-SNAPSHOT + 4.15.0 java-driver-core-shaded DataStax Java driver for Apache Cassandra(R) - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index a53d476df0f..b65c9786b2d 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.15.0-SNAPSHOT + 4.15.0 java-driver-core bundle diff --git a/distribution/pom.xml b/distribution/pom.xml index d5f8ad1f64b..e4f9d490a73 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.15.0-SNAPSHOT + 4.15.0 java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index 35ff3b29856..5ebef2b974e 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.15.0-SNAPSHOT + 4.15.0 java-driver-examples DataStax Java driver for Apache Cassandra(R) - examples. diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index f857df7c3d2..2d8c04f5150 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.15.0-SNAPSHOT + 4.15.0 java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 6d5b31a7a00..b2e375723c5 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.15.0-SNAPSHOT + 4.15.0 java-driver-mapper-processor DataStax Java driver for Apache Cassandra(R) - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index 385e0050153..39f4b187289 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.15.0-SNAPSHOT + 4.15.0 java-driver-mapper-runtime bundle diff --git a/metrics/micrometer/pom.xml b/metrics/micrometer/pom.xml index 9e08a931cfc..9407e7d34d5 100644 --- a/metrics/micrometer/pom.xml +++ b/metrics/micrometer/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.15.0-SNAPSHOT + 4.15.0 ../../ java-driver-metrics-micrometer diff --git a/metrics/microprofile/pom.xml b/metrics/microprofile/pom.xml index 7569c81ef81..b49a633a699 100644 --- a/metrics/microprofile/pom.xml +++ b/metrics/microprofile/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.15.0-SNAPSHOT + 4.15.0 ../../ java-driver-metrics-microprofile diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml index 2c6b90094ae..9727ac3e93a 100644 --- a/osgi-tests/pom.xml +++ b/osgi-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.15.0-SNAPSHOT + 4.15.0 java-driver-osgi-tests jar diff --git a/pom.xml b/pom.xml index 3f9cb1227a8..46d3d197362 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ 4.0.0 com.datastax.oss java-driver-parent - 4.15.0-SNAPSHOT + 4.15.0 pom DataStax Java driver for Apache Cassandra(R) A driver for Apache Cassandra(R) 2.1+ that works exclusively with the Cassandra Query Language version 3 (CQL3) and Cassandra's native protocol versions 3 and above. @@ -955,7 +955,7 @@ height="0" width="0" style="display:none;visibility:hidden"> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - HEAD + 4.15.0 diff --git a/query-builder/pom.xml b/query-builder/pom.xml index f958bf9f092..58f21ea1c9b 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.15.0-SNAPSHOT + 4.15.0 java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index 035436526a6..db568c12334 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.15.0-SNAPSHOT + 4.15.0 java-driver-test-infra bundle From f6cc4d787b1118e4240d584241a97bbbea1c6607 Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Mon, 19 Sep 2022 18:51:11 +0000 Subject: [PATCH 810/979] [maven-release-plugin] prepare for next development iteration --- bom/pom.xml | 18 +++++++++--------- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- metrics/micrometer/pom.xml | 2 +- metrics/microprofile/pom.xml | 2 +- osgi-tests/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 14 files changed, 23 insertions(+), 23 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index 0893a09571d..837fcde0f89 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.15.0 + 4.15.1-SNAPSHOT java-driver-bom pom @@ -31,42 +31,42 @@ com.datastax.oss java-driver-core - 4.15.0 + 4.15.1-SNAPSHOT com.datastax.oss java-driver-core-shaded - 4.15.0 + 4.15.1-SNAPSHOT com.datastax.oss java-driver-mapper-processor - 4.15.0 + 4.15.1-SNAPSHOT com.datastax.oss java-driver-mapper-runtime - 4.15.0 + 4.15.1-SNAPSHOT com.datastax.oss java-driver-query-builder - 4.15.0 + 4.15.1-SNAPSHOT com.datastax.oss java-driver-test-infra - 4.15.0 + 4.15.1-SNAPSHOT com.datastax.oss java-driver-metrics-micrometer - 4.15.0 + 4.15.1-SNAPSHOT com.datastax.oss java-driver-metrics-microprofile - 4.15.0 + 4.15.1-SNAPSHOT com.datastax.oss diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 03a5f245e70..0cf5700b493 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.15.0 + 4.15.1-SNAPSHOT java-driver-core-shaded DataStax Java driver for Apache Cassandra(R) - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index b65c9786b2d..288b0df925a 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.15.0 + 4.15.1-SNAPSHOT java-driver-core bundle diff --git a/distribution/pom.xml b/distribution/pom.xml index e4f9d490a73..7a077be1553 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.15.0 + 4.15.1-SNAPSHOT java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index 5ebef2b974e..21417669a5f 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.15.0 + 4.15.1-SNAPSHOT java-driver-examples DataStax Java driver for Apache Cassandra(R) - examples. diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 2d8c04f5150..17e4ff393ef 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.15.0 + 4.15.1-SNAPSHOT java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index b2e375723c5..81c1a78c95a 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.15.0 + 4.15.1-SNAPSHOT java-driver-mapper-processor DataStax Java driver for Apache Cassandra(R) - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index 39f4b187289..70121a8b9ad 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.15.0 + 4.15.1-SNAPSHOT java-driver-mapper-runtime bundle diff --git a/metrics/micrometer/pom.xml b/metrics/micrometer/pom.xml index 9407e7d34d5..f1536f493d1 100644 --- a/metrics/micrometer/pom.xml +++ b/metrics/micrometer/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.15.0 + 4.15.1-SNAPSHOT ../../ java-driver-metrics-micrometer diff --git a/metrics/microprofile/pom.xml b/metrics/microprofile/pom.xml index b49a633a699..b16f8b17c41 100644 --- a/metrics/microprofile/pom.xml +++ b/metrics/microprofile/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.15.0 + 4.15.1-SNAPSHOT ../../ java-driver-metrics-microprofile diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml index 9727ac3e93a..1e78ce04975 100644 --- a/osgi-tests/pom.xml +++ b/osgi-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.15.0 + 4.15.1-SNAPSHOT java-driver-osgi-tests jar diff --git a/pom.xml b/pom.xml index 46d3d197362..2ac1715fee9 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ 4.0.0 com.datastax.oss java-driver-parent - 4.15.0 + 4.15.1-SNAPSHOT pom DataStax Java driver for Apache Cassandra(R) A driver for Apache Cassandra(R) 2.1+ that works exclusively with the Cassandra Query Language version 3 (CQL3) and Cassandra's native protocol versions 3 and above. @@ -955,7 +955,7 @@ height="0" width="0" style="display:none;visibility:hidden"> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - 4.15.0 + HEAD diff --git a/query-builder/pom.xml b/query-builder/pom.xml index 58f21ea1c9b..96559c8588e 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.15.0 + 4.15.1-SNAPSHOT java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index db568c12334..873de05d8c4 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.15.0 + 4.15.1-SNAPSHOT java-driver-test-infra bundle From 0a61884b7fcb62a839d15af94686eb1d9d4b440e Mon Sep 17 00:00:00 2001 From: Ammar Khaku Date: Fri, 7 Oct 2022 12:43:48 -0700 Subject: [PATCH 811/979] Standardize on DataStax vs Datastax since that appears to be more common (#1608) Updates to documentation only --- README.md | 2 +- .../driver/api/core/cql/continuous/ContinuousSession.java | 8 ++++---- .../continuous/reactive/ContinuousReactiveSession.java | 4 ++-- .../datastax/dse/driver/api/core/graph/GraphSession.java | 4 ++-- .../java/com/datastax/oss/driver/api/core/CqlSession.java | 2 +- .../driver/api/core/auth/PlainTextAuthProviderBase.java | 2 +- .../api/core/auth/ProgrammaticPlainTextAuthProvider.java | 4 ++-- .../oss/driver/api/core/config/DefaultDriverOption.java | 2 +- .../oss/driver/api/core/config/TypedDriverOption.java | 2 +- .../oss/driver/api/core/session/SessionBuilder.java | 2 +- .../driver/internal/core/auth/PlainTextAuthProvider.java | 2 +- core/src/main/resources/reference.conf | 4 ++-- manual/core/dse/README.md | 4 ++-- manual/core/integration/README.md | 2 +- manual/core/metadata/node/README.md | 4 ++-- manual/query_builder/README.md | 4 ++-- upgrade_guide/README.md | 6 +++--- 17 files changed, 29 insertions(+), 29 deletions(-) diff --git a/README.md b/README.md index d2930ba6e66..0b5a61520f5 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# Datastax Java Driver for Apache Cassandra® +# DataStax Java Driver for Apache Cassandra® [![Maven Central](https://maven-badges.herokuapp.com/maven-central/com.datastax.oss/java-driver-core/badge.svg)](https://maven-badges.herokuapp.com/maven-central/com.datastax.oss/java-driver-core) diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousSession.java b/core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousSession.java index f98b0a5d1fa..bf05fce92b1 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousSession.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousSession.java @@ -30,8 +30,8 @@ /** * A session that has the ability to execute continuous paging queries. * - *

          Continuous paging is a new method of streaming bulk amounts of records from Datastax - * Enterprise (DSE) to the Datastax Java Driver, available since DSE 5.1. It is mainly intended to + *

          Continuous paging is a new method of streaming bulk amounts of records from DataStax + * Enterprise (DSE) to the DataStax Java Driver, available since DSE 5.1. It is mainly intended to * be leveraged by DSE * Analytics and Apache Spark™, or by any similar analytics tool that needs to read large @@ -76,7 +76,7 @@ public interface ContinuousSession extends Session { * *

          See {@link ContinuousSession} for more explanations about continuous paging. * - *

          This feature is only available with Datastax Enterprise. Executing continuous queries + *

          This feature is only available with DataStax Enterprise. Executing continuous queries * against an Apache Cassandra© cluster will result in a runtime error. * * @param statement the query to execute. @@ -99,7 +99,7 @@ default ContinuousResultSet executeContinuously(@NonNull Statement statement) * *

          See {@link ContinuousSession} for more explanations about continuous paging. * - *

          This feature is only available with Datastax Enterprise. Executing continuous queries + *

          This feature is only available with DataStax Enterprise. Executing continuous queries * against an Apache Cassandra© cluster will result in a runtime error. * * @param statement the query to execute. diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousReactiveSession.java b/core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousReactiveSession.java index 9661f9bf5a1..20392fb81ec 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousReactiveSession.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousReactiveSession.java @@ -51,7 +51,7 @@ public interface ContinuousReactiveSession extends Session { * *

          See {@link ContinuousSession} for more explanations about continuous paging. * - *

          This feature is only available with Datastax Enterprise. Executing continuous queries + *

          This feature is only available with DataStax Enterprise. Executing continuous queries * against an Apache Cassandra® cluster will result in a runtime error. * * @param query the query to execute. @@ -68,7 +68,7 @@ default ContinuousReactiveResultSet executeContinuouslyReactive(@NonNull String * *

          See {@link ContinuousSession} for more explanations about continuous paging. * - *

          This feature is only available with Datastax Enterprise. Executing continuous queries + *

          This feature is only available with DataStax Enterprise. Executing continuous queries * against an Apache Cassandra® cluster will result in a runtime error. * * @param statement the statement to execute. diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphSession.java b/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphSession.java index 2c022ff4d49..6cd447e9c2e 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphSession.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/graph/GraphSession.java @@ -49,7 +49,7 @@ public interface GraphSession extends Session { * configuration and schema. * * - *

          This feature is only available with Datastax Enterprise. Executing graph queries against an + *

          This feature is only available with DataStax Enterprise. Executing graph queries against an * Apache Cassandra® cluster will result in a runtime error. * * @see GraphResultSet @@ -67,7 +67,7 @@ default GraphResultSet execute(@NonNull GraphStatement graphStatement) { * Executes a graph statement asynchronously (the call returns as soon as the statement was sent, * generally before the result is available). * - *

          This feature is only available with Datastax Enterprise. Executing graph queries against an + *

          This feature is only available with DataStax Enterprise. Executing graph queries against an * Apache Cassandra® cluster will result in a runtime error. * * @see #execute(GraphStatement) diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/CqlSession.java b/core/src/main/java/com/datastax/oss/driver/api/core/CqlSession.java index 2392182bf67..86eb88e2f0b 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/CqlSession.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/CqlSession.java @@ -32,7 +32,7 @@ * *

            *
          • CQL requests: synchronous, asynchronous or reactive mode; - *
          • requests specific to Datastax Enterprise: graph and continuous paging. + *
          • requests specific to DataStax Enterprise: graph and continuous paging. *
          * * Client applications can use this interface even if they don't need all the features. In diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderBase.java b/core/src/main/java/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderBase.java index c9241577d6b..9624724b226 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderBase.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderBase.java @@ -100,7 +100,7 @@ public static class Credentials { * Builds an instance for username/password authentication, and proxy authentication with the * given authorizationId. * - *

          This feature is only available with Datastax Enterprise. If the target server is Apache + *

          This feature is only available with DataStax Enterprise. If the target server is Apache * Cassandra, the authorizationId will be ignored. */ public Credentials( diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/auth/ProgrammaticPlainTextAuthProvider.java b/core/src/main/java/com/datastax/oss/driver/api/core/auth/ProgrammaticPlainTextAuthProvider.java index 7166e72b1f5..50de327884d 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/auth/ProgrammaticPlainTextAuthProvider.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/auth/ProgrammaticPlainTextAuthProvider.java @@ -67,7 +67,7 @@ public ProgrammaticPlainTextAuthProvider(@NonNull String username, @NonNull Stri * Builds an instance for username/password authentication, and proxy authentication with the * given authorizationId. * - *

          This feature is only available with Datastax Enterprise. If the target server is Apache + *

          This feature is only available with DataStax Enterprise. If the target server is Apache * Cassandra, use {@link #ProgrammaticPlainTextAuthProvider(String, String)} instead, or set the * authorizationId to an empty string. */ @@ -109,7 +109,7 @@ public void setPassword(@NonNull String password) { * *

          The new credentials will be used for all connections initiated after this method was called. * - *

          This feature is only available with Datastax Enterprise. If the target server is Apache + *

          This feature is only available with DataStax Enterprise. If the target server is Apache * Cassandra, this method should not be used. * * @param authorizationId the new authorization id. diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java index b2fba21d6a3..e7e75d952fa 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java @@ -793,7 +793,7 @@ public enum DefaultDriverOption implements DriverOption { NETTY_DAEMON("advanced.netty.daemon"), /** - * The location of the cloud secure bundle used to connect to Datastax Apache Cassandra as a + * The location of the cloud secure bundle used to connect to DataStax Apache Cassandra as a * service. * *

          Value-type: {@link String} diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java index bce8f923c77..2428be064ce 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java @@ -617,7 +617,7 @@ public String toString() { public static final TypedDriverOption NETTY_DAEMON = new TypedDriverOption<>(DefaultDriverOption.NETTY_DAEMON, GenericType.BOOLEAN); /** - * The location of the cloud secure bundle used to connect to Datastax Apache Cassandra as a + * The location of the cloud secure bundle used to connect to DataStax Apache Cassandra as a * service. */ public static final TypedDriverOption CLOUD_SECURE_CONNECT_BUNDLE = diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java b/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java index 966372fa20d..2cfd7bf55a0 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java @@ -351,7 +351,7 @@ public SelfT withAuthCredentials(@NonNull String username, @NonNull String passw * Configures the session to use DSE plaintext authentication with the given username and * password, and perform proxy authentication with the given authorization id. * - *

          This feature is only available in Datastax Enterprise. If connecting to Apache Cassandra, + *

          This feature is only available in DataStax Enterprise. If connecting to Apache Cassandra, * the authorization id will be ignored; it is recommended to use {@link * #withAuthCredentials(String, String)} instead. * diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/auth/PlainTextAuthProvider.java b/core/src/main/java/com/datastax/oss/driver/internal/core/auth/PlainTextAuthProvider.java index 73f320bbcdf..857ef456136 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/auth/PlainTextAuthProvider.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/auth/PlainTextAuthProvider.java @@ -39,7 +39,7 @@ * username = cassandra * password = cassandra * - * // If connecting to Datastax Enterprise, this additional option allows proxy authentication + * // If connecting to DataStax Enterprise, this additional option allows proxy authentication * // (login as another user or role) * authorization-id = userOrRole * } diff --git a/core/src/main/resources/reference.conf b/core/src/main/resources/reference.conf index f7e2cd76ad1..ee83280032e 100644 --- a/core/src/main/resources/reference.conf +++ b/core/src/main/resources/reference.conf @@ -241,7 +241,7 @@ datastax-java-driver { slow-replica-avoidance = true } basic.cloud { - # The location of the cloud secure bundle used to connect to Datastax Apache Cassandra as a + # The location of the cloud secure bundle used to connect to DataStax Apache Cassandra as a # service. # This setting must be a valid URL. # If the protocol is not specified, it is implicitly assumed to be the `file://` protocol, @@ -683,7 +683,7 @@ datastax-java-driver { # # The driver provides two implementations: # - PlainTextAuthProvider: uses plain-text credentials. It requires the `username` and - # `password` options below. When connecting to Datastax Enterprise, an optional + # `password` options below. When connecting to DataStax Enterprise, an optional # `authorization-id` can also be specified. # For backward compatibility with previous driver versions, you can also use the class name # "DsePlainTextAuthProvider" for this provider. diff --git a/manual/core/dse/README.md b/manual/core/dse/README.md index e0d41ef38c7..8df3568e1ff 100644 --- a/manual/core/dse/README.md +++ b/manual/core/dse/README.md @@ -1,6 +1,6 @@ ## DSE-specific features -Some driver features only work with Datastax Enterprise: +Some driver features only work with DataStax Enterprise: * [Graph](graph/); * [Geospatial types](geotypes/); @@ -8,4 +8,4 @@ Some driver features only work with Datastax Enterprise: Note that, if you don't use these features, you might be able to exclude certain dependencies in order to limit the number of JARs in your classpath. See the -[Integration](../integration/#driver-dependencies) page. \ No newline at end of file +[Integration](../integration/#driver-dependencies) page. diff --git a/manual/core/integration/README.md b/manual/core/integration/README.md index 9900d3b3f90..3f42b1dabfe 100644 --- a/manual/core/integration/README.md +++ b/manual/core/integration/README.md @@ -453,7 +453,7 @@ dependency: [Jackson](https://github.com/FasterXML/jackson) is used: -* when connecting to [Datastax Astra](../../cloud/); +* when connecting to [DataStax Astra](../../cloud/); * when Insights monitoring is enabled; * when [Json codecs](../custom_codecs) are being used. diff --git a/manual/core/metadata/node/README.md b/manual/core/metadata/node/README.md index 1555c2ad1a5..ae66a468fd3 100644 --- a/manual/core/metadata/node/README.md +++ b/manual/core/metadata/node/README.md @@ -51,7 +51,7 @@ but in general it represents the proximity to the client, and `LOCAL` nodes will coordinators. They also influence pooling options. [Node#getExtras()] contains additional free-form properties. This is intended for future evolution -or custom driver extensions. In particular, if the driver is connected to Datastax Enterprise, the +or custom driver extensions. In particular, if the driver is connected to DataStax Enterprise, the map will contain additional information under the keys defined in [DseNodeProperties]: ```java @@ -142,4 +142,4 @@ the source code. [NodeStateListener]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/metadata/NodeStateListener.html [NodeStateListenerBase]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/metadata/NodeStateListenerBase.html [SessionBuilder.addNodeStateListener]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addNodeStateListener-com.datastax.oss.driver.api.core.metadata.NodeStateListener- -[DseNodeProperties]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/metadata/DseNodeProperties.html \ No newline at end of file +[DseNodeProperties]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/metadata/DseNodeProperties.html diff --git a/manual/query_builder/README.md b/manual/query_builder/README.md index ab4369f2016..4677fb84145 100644 --- a/manual/query_builder/README.md +++ b/manual/query_builder/README.md @@ -70,13 +70,13 @@ SimpleStatement statement = select.build(); SimpleStatementBuilder builder = select.builder(); ``` -#### Datastax Enterprise +#### DataStax Enterprise The driver provides two additional entry points for DSE-specific queries: [DseQueryBuilder] and [DseSchemaBuilder]. They extend their respective non-DSE counterparts, so anything that is available on the default query builder can also be done with the DSE query builder. -We recommend that you use those classes if you are targeting Datastax Enterprise; they will be +We recommend that you use those classes if you are targeting DataStax Enterprise; they will be enriched in the future if DSE adds custom CQL syntax. Currently, the only difference is the support for the `DETERMINISTIC` and `MONOTONIC` keywords when diff --git a/upgrade_guide/README.md b/upgrade_guide/README.md index 125c04db034..785f51290da 100644 --- a/upgrade_guide/README.md +++ b/upgrade_guide/README.md @@ -372,7 +372,7 @@ least 4.6.1. ### 4.4.0 -Datastax Enterprise support is now available directly in the main driver. There is no longer a +DataStax Enterprise support is now available directly in the main driver. There is no longer a separate DSE driver. #### For Apache Cassandra® users @@ -388,7 +388,7 @@ Apart from that, the only visible change is that DSE-specific features are now e lean, you can exclude some dependencies when you don't use the corresponding DSE features; see the [Integration>Driver dependencies](../manual/core/integration/#driver-dependencies) section. -#### For Datastax Enterprise users +#### For DataStax Enterprise users Adjust your Maven coordinates to use the unified artifact: @@ -514,7 +514,7 @@ We have dropped support for legacy protocol versions v1 and v2. As a result, the compatible with: * **Apache Cassandra®: 2.1 and above**; -* **Datastax Enterprise: 4.7 and above**. +* **DataStax Enterprise: 4.7 and above**. #### Packages From 0351c4fa0297332054114dc730c7ab460650fa55 Mon Sep 17 00:00:00 2001 From: Benoit TELLIER Date: Wed, 16 Nov 2022 02:54:54 +0700 Subject: [PATCH 812/979] Improve IdentifierIndex firstIndexOf performance (#1615) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Improve IdentifierIndex firstIndexOf performance Avoids doing two hasmap lookups and instead does a single one, thus improving performances (CPU) of rougthly 33%. In applications reading lots of rows and not managing manually indexes (which is tricky, boiler plate and error prone), IdentifierIndex::firstIndexOf is typically a CPU hotspots. In reactive applications it might even run on the driver event loop. As such, performances gains here are welcome. ## Before ``` Benchmark Mode Cnt Score Error Units IdentifierIndexBench.complexGetFirst avgt 5 0.046 ± 0.005 us/op IdentifierIndexBench.simpleGetFirst avgt 5 0.046 ± 0.002 us/op ``` ## After ``` Benchmark Mode Cnt Score Error Units IdentifierIndexBench.complexGetFirst avgt 5 0.028 ± 0.002 us/op IdentifierIndexBench.simpleGetFirst avgt 5 0.030 ± 0.002 us/op ``` * Use ImmutableListMultimap within IdentifierIndex This unlocks massive performance gains upon reads, for a minor slow down at instanciation time, which won't be felt by applications relying on prepared statements. ## Before ``` Benchmark Mode Cnt Score Error Units IdentifierIndexBench.complexAllIndices avgt 5 0.007 ± 0.001 us/op IdentifierIndexBench.complexGetFirst avgt 5 0.026 ± 0.002 us/op IdentifierIndexBench.createComplex avgt 5 0.759 ± 0.059 us/op IdentifierIndexBench.createSimple avgt 5 0.427 ± 0.048 us/op IdentifierIndexBench.simpleAllIndices avgt 5 0.007 ± 0.001 us/op IdentifierIndexBench.simpleGetFirst avgt 5 0.027 ± 0.002 us/op ``` ## After ``` Benchmark Mode Cnt Score Error Units IdentifierIndexBench.complexAllIndices avgt 5 0.004 ± 0.001 us/op IdentifierIndexBench.complexGetFirst avgt 5 0.005 ± 0.001 us/op IdentifierIndexBench.createComplex avgt 5 0.680 ± 0.020 us/op IdentifierIndexBench.createSimple avgt 5 0.538 ± 0.096 us/op IdentifierIndexBench.simpleAllIndices avgt 5 0.004 ± 0.001 us/op IdentifierIndexBench.simpleGetFirst avgt 5 0.005 ± 0.001 us/op ``` --- .../internal/core/data/IdentifierIndex.java | 21 ++++++++++++------- 1 file changed, 14 insertions(+), 7 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/data/IdentifierIndex.java b/core/src/main/java/com/datastax/oss/driver/internal/core/data/IdentifierIndex.java index 29396f08440..74c98ed7f4e 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/data/IdentifierIndex.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/data/IdentifierIndex.java @@ -20,8 +20,11 @@ import com.datastax.oss.driver.api.core.data.GettableById; import com.datastax.oss.driver.api.core.data.GettableByName; import com.datastax.oss.driver.internal.core.util.Strings; +import com.datastax.oss.driver.shaded.guava.common.collect.ArrayListMultimap; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableListMultimap; import com.datastax.oss.driver.shaded.guava.common.collect.LinkedListMultimap; import com.datastax.oss.driver.shaded.guava.common.collect.ListMultimap; +import java.util.Iterator; import java.util.List; import java.util.Locale; import net.jcip.annotations.Immutable; @@ -40,9 +43,9 @@ public class IdentifierIndex { private final ListMultimap byCaseInsensitiveName; public IdentifierIndex(List ids) { - this.byId = LinkedListMultimap.create(ids.size()); - this.byCaseSensitiveName = LinkedListMultimap.create(ids.size()); - this.byCaseInsensitiveName = LinkedListMultimap.create(ids.size()); + ImmutableListMultimap.Builder byId = ImmutableListMultimap.builder(); + ImmutableListMultimap.Builder byCaseSensitiveName = ImmutableListMultimap.builder(); + ImmutableListMultimap.Builder byCaseInsensitiveName = ImmutableListMultimap.builder(); int i = 0; for (CqlIdentifier id : ids) { @@ -51,6 +54,10 @@ public IdentifierIndex(List ids) { byCaseInsensitiveName.put(id.asInternal().toLowerCase(Locale.ROOT), i); i += 1; } + + this.byId = byId.build(); + this.byCaseSensitiveName = byCaseSensitiveName.build(); + this.byCaseInsensitiveName = byCaseInsensitiveName.build(); } /** @@ -68,8 +75,8 @@ public List allIndicesOf(String name) { * AccessibleByName}, or -1 if it's not in the list. */ public int firstIndexOf(String name) { - List indices = allIndicesOf(name); - return indices.isEmpty() ? -1 : indices.get(0); + Iterator indices = allIndicesOf(name).iterator(); + return indices.hasNext() ? -1 : indices.next(); } /** Returns all occurrences of a given identifier. */ @@ -79,7 +86,7 @@ public List allIndicesOf(CqlIdentifier id) { /** Returns the first occurrence of a given identifier, or -1 if it's not in the list. */ public int firstIndexOf(CqlIdentifier id) { - List indices = allIndicesOf(id); - return indices.isEmpty() ? -1 : indices.get(0); + Iterator indices = allIndicesOf(id).iterator(); + return indices.hasNext() ? -1 : indices.next(); } } From f9ced726c20dcedd11401e2e01f9f01321857329 Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Tue, 15 Nov 2022 14:04:15 -0600 Subject: [PATCH 813/979] Maven formatting fixes after last commit --- .../oss/driver/internal/core/data/IdentifierIndex.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/data/IdentifierIndex.java b/core/src/main/java/com/datastax/oss/driver/internal/core/data/IdentifierIndex.java index 74c98ed7f4e..6b1d8fbb3a6 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/data/IdentifierIndex.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/data/IdentifierIndex.java @@ -20,9 +20,7 @@ import com.datastax.oss.driver.api.core.data.GettableById; import com.datastax.oss.driver.api.core.data.GettableByName; import com.datastax.oss.driver.internal.core.util.Strings; -import com.datastax.oss.driver.shaded.guava.common.collect.ArrayListMultimap; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableListMultimap; -import com.datastax.oss.driver.shaded.guava.common.collect.LinkedListMultimap; import com.datastax.oss.driver.shaded.guava.common.collect.ListMultimap; import java.util.Iterator; import java.util.List; @@ -44,8 +42,10 @@ public class IdentifierIndex { public IdentifierIndex(List ids) { ImmutableListMultimap.Builder byId = ImmutableListMultimap.builder(); - ImmutableListMultimap.Builder byCaseSensitiveName = ImmutableListMultimap.builder(); - ImmutableListMultimap.Builder byCaseInsensitiveName = ImmutableListMultimap.builder(); + ImmutableListMultimap.Builder byCaseSensitiveName = + ImmutableListMultimap.builder(); + ImmutableListMultimap.Builder byCaseInsensitiveName = + ImmutableListMultimap.builder(); int i = 0; for (CqlIdentifier id : ids) { From a40bbc2e451f03c6328ccfcbe0ac6560fd0492cf Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Tue, 15 Nov 2022 14:11:34 -0600 Subject: [PATCH 814/979] JAVA-3045: Fix GraalVM native image support for GraalVM 22.2 (#1612) --- .../internal/core/protocol/CompressorSubstitutions.java | 9 --------- .../oss/driver/internal/core/util/Dependency.java | 8 +++++--- 2 files changed, 5 insertions(+), 12 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/CompressorSubstitutions.java b/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/CompressorSubstitutions.java index 889e4e1c137..f4578720f23 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/CompressorSubstitutions.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/CompressorSubstitutions.java @@ -21,7 +21,6 @@ import com.datastax.oss.driver.api.core.context.DriverContext; import com.datastax.oss.driver.internal.core.util.GraalDependencyChecker; import com.datastax.oss.protocol.internal.Compressor; -import com.oracle.svm.core.annotate.Delete; import com.oracle.svm.core.annotate.Substitute; import com.oracle.svm.core.annotate.TargetClass; import io.netty.buffer.ByteBuf; @@ -82,14 +81,6 @@ public static Compressor newInstance(String name, DriverContext context } } - @TargetClass(value = Lz4Compressor.class, onlyWith = Lz4Missing.class) - @Delete - public static final class DeleteLz4Compressor {} - - @TargetClass(value = SnappyCompressor.class) - @Delete - public static final class DeleteSnappyCompressor {} - public static class Lz4Present implements BooleanSupplier { @Override public boolean getAsBoolean() { diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/util/Dependency.java b/core/src/main/java/com/datastax/oss/driver/internal/core/util/Dependency.java index bbefe698d55..72db22e5c5b 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/util/Dependency.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/util/Dependency.java @@ -15,7 +15,9 @@ */ package com.datastax.oss.driver.internal.core.util; -import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; /** * A set of driver optional dependencies and a common mechanism to test the presence of such @@ -48,10 +50,10 @@ public enum Dependency { ; @SuppressWarnings("ImmutableEnumChecker") - private final ImmutableList clzs; + private final List clzs; Dependency(String... classNames) { - clzs = ImmutableList.copyOf(classNames); + clzs = Collections.unmodifiableList(Arrays.asList(classNames)); } public Iterable classes() { From 97d9c4c7083a12d177be91ffc81b690e33483292 Mon Sep 17 00:00:00 2001 From: Benoit TELLIER Date: Sat, 19 Nov 2022 05:39:48 +0700 Subject: [PATCH 815/979] Optimizations for TypeSafeDriverConfig (#1616) * TypesafeDriverConfig: getProfile can avoid calling containsKey On a typical applicative workload, 0.53% of CPU is spend resolving configuration profiles. By getting the profile first and failing if it is null we can easily cut that in half. * TypesafeDriverConfig: optimize getDefaultProfile We could easily get a dedicated field for the default profile thus avoiding recurring maps lookups. * fixup! TypesafeDriverConfig: optimize getDefaultProfile --- .../config/typesafe/TypesafeDriverConfig.java | 19 ++++++++++++++----- .../typesafe/TypesafeDriverConfigTest.java | 8 ++++++++ 2 files changed, 22 insertions(+), 5 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/config/typesafe/TypesafeDriverConfig.java b/core/src/main/java/com/datastax/oss/driver/internal/core/config/typesafe/TypesafeDriverConfig.java index cc3f841436b..ca5d919c604 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/config/typesafe/TypesafeDriverConfig.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/config/typesafe/TypesafeDriverConfig.java @@ -32,6 +32,7 @@ import edu.umd.cs.findbugs.annotations.NonNull; import java.net.URL; import java.util.Map; +import java.util.Optional; import java.util.concurrent.ConcurrentHashMap; import net.jcip.annotations.ThreadSafe; import org.slf4j.Logger; @@ -50,6 +51,8 @@ public class TypesafeDriverConfig implements DriverConfig { private final Map defaultOverrides = new ConcurrentHashMap<>(); + private final TypesafeDriverExecutionProfile.Base defaultProfile; + public TypesafeDriverConfig(Config config) { this.lastLoadedConfig = config; Map profileConfigs = extractProfiles(config); @@ -62,6 +65,7 @@ public TypesafeDriverConfig(Config config) { new TypesafeDriverExecutionProfile.Base(entry.getKey(), entry.getValue())); } this.profiles = builder.build(); + this.defaultProfile = profiles.get(DriverExecutionProfile.DEFAULT_NAME); } /** @return whether the configuration changed */ @@ -136,14 +140,19 @@ private Map extractProfiles(Config sourceConfig) { return result.build(); } + @Override + public DriverExecutionProfile getDefaultProfile() { + return defaultProfile; + } + @NonNull @Override public DriverExecutionProfile getProfile(@NonNull String profileName) { - Preconditions.checkArgument( - profiles.containsKey(profileName), - "Unknown profile '%s'. Check your configuration.", - profileName); - return profiles.get(profileName); + if (profileName.equals(DriverExecutionProfile.DEFAULT_NAME)) { + return defaultProfile; + } + return Optional.ofNullable(profiles.get(profileName)) + .orElseThrow(() -> new IllegalArgumentException(String.format("Unknown profile '%s'. Check your configuration.", profileName))); } @NonNull diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/config/typesafe/TypesafeDriverConfigTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/config/typesafe/TypesafeDriverConfigTest.java index 019d50ab2a0..b39479647e9 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/config/typesafe/TypesafeDriverConfigTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/config/typesafe/TypesafeDriverConfigTest.java @@ -171,6 +171,14 @@ public void should_enumerate_options() { entry("int1", 45)); } + @Test + public void should_update_default_profile_on_reload() { + TypesafeDriverConfig config = parse("int1 = 42\n profiles { profile1 { int1 = 43 } }"); + assertThat(config.getDefaultProfile().getInt(MockOptions.INT1)).isEqualTo(42); + config.reload(ConfigFactory.parseString("int1 = 44\n profiles { profile1 { int1 = 45 } }")); + assertThat(config.getDefaultProfile().getInt(MockOptions.INT1)).isEqualTo(44); + } + private TypesafeDriverConfig parse(String configString) { Config config = ConfigFactory.parseString(configString); return new TypesafeDriverConfig(config); From a650ee43712cc0448ce6d263db30f15eb2f55c2c Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Thu, 17 Nov 2022 22:12:00 -0600 Subject: [PATCH 816/979] Formatting fix after recent TypeSafe config changes. Also fixed a logic bug in previous commit re: IdentifierIndex --- .../internal/core/config/typesafe/TypesafeDriverConfig.java | 6 ++++-- .../oss/driver/internal/core/data/IdentifierIndex.java | 4 ++-- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/config/typesafe/TypesafeDriverConfig.java b/core/src/main/java/com/datastax/oss/driver/internal/core/config/typesafe/TypesafeDriverConfig.java index ca5d919c604..c65e9035e0a 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/config/typesafe/TypesafeDriverConfig.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/config/typesafe/TypesafeDriverConfig.java @@ -21,7 +21,6 @@ import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.config.DriverOption; import com.datastax.oss.driver.internal.core.util.Loggers; -import com.datastax.oss.driver.shaded.guava.common.base.Preconditions; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import com.typesafe.config.Config; import com.typesafe.config.ConfigObject; @@ -152,7 +151,10 @@ public DriverExecutionProfile getProfile(@NonNull String profileName) { return defaultProfile; } return Optional.ofNullable(profiles.get(profileName)) - .orElseThrow(() -> new IllegalArgumentException(String.format("Unknown profile '%s'. Check your configuration.", profileName))); + .orElseThrow( + () -> + new IllegalArgumentException( + String.format("Unknown profile '%s'. Check your configuration.", profileName))); } @NonNull diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/data/IdentifierIndex.java b/core/src/main/java/com/datastax/oss/driver/internal/core/data/IdentifierIndex.java index 6b1d8fbb3a6..868617e0c1e 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/data/IdentifierIndex.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/data/IdentifierIndex.java @@ -76,7 +76,7 @@ public List allIndicesOf(String name) { */ public int firstIndexOf(String name) { Iterator indices = allIndicesOf(name).iterator(); - return indices.hasNext() ? -1 : indices.next(); + return indices.hasNext() ? indices.next() : -1; } /** Returns all occurrences of a given identifier. */ @@ -87,6 +87,6 @@ public List allIndicesOf(CqlIdentifier id) { /** Returns the first occurrence of a given identifier, or -1 if it's not in the list. */ public int firstIndexOf(CqlIdentifier id) { Iterator indices = allIndicesOf(id).iterator(); - return indices.hasNext() ? -1 : indices.next(); + return indices.hasNext() ? indices.next() : -1; } } From f92384469f7d9be863dd88d34a8f7896497c72e0 Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Mon, 12 Dec 2022 12:38:04 -0600 Subject: [PATCH 817/979] Trying to work around JENKINS-37984 --- Jenkinsfile | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index 0f9a28265d3..51e2400982f 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -175,7 +175,9 @@ def describeAdhocAndScheduledTestingStage() { // branch pattern for cron // should match 3.x, 4.x, 4.5.x, etc -def branchPatternCron = ~"((\\d+(\\.[\\dx]+)+))" +def branchPatternCron() = { + ~"((\\d+(\\.[\\dx]+)+))" +} pipeline { agent none @@ -354,7 +356,7 @@ pipeline { triggers { // schedules only run against release branches (i.e. 3.x, 4.x, 4.5.x, etc.) - parameterizedCron(branchPatternCron.matcher(env.BRANCH_NAME).matches() ? """ + parameterizedCron(branchPatternCron().matcher(env.BRANCH_NAME).matches() ? """ # Every weeknight (Monday - Friday) around 2:00 AM ### JDK8 tests against 2.1, 3.0, DSE 4.8, DSE 5.0, DSE 5.1, DSE-6.0 and DSE 6.7 H 2 * * 1-5 %CI_SCHEDULE=WEEKNIGHTS;CI_SCHEDULE_SERVER_VERSIONS=2.1 3.0 dse-4.8 dse-5.0 dse-5.1 dse-6.0 dse-6.7;CI_SCHEDULE_JABBA_VERSION=1.8 From d4a52aa2c91e2abd99ca5583b09ea6c01f4db5e6 Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Mon, 12 Dec 2022 12:40:41 -0600 Subject: [PATCH 818/979] Hey, let's try not to mangle the Groovy --- Jenkinsfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Jenkinsfile b/Jenkinsfile index 51e2400982f..c5e8ac69b7f 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -175,7 +175,7 @@ def describeAdhocAndScheduledTestingStage() { // branch pattern for cron // should match 3.x, 4.x, 4.5.x, etc -def branchPatternCron() = { +def branchPatternCron() { ~"((\\d+(\\.[\\dx]+)+))" } From 8ce9497bdef9c5a6ff7e24187e042dfbff9844c2 Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Thu, 15 Dec 2022 17:12:46 -0600 Subject: [PATCH 819/979] Removing submission of CI metrics temporarily after AWS conversion --- Jenkinsfile | 24 ------------------------ 1 file changed, 24 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index c5e8ac69b7f..bf90ebbacda 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -133,25 +133,6 @@ ${status} after ${currentBuild.durationString - ' and counting'}""" } } -def submitCIMetrics(buildType) { - long durationMs = currentBuild.duration - long durationSec = durationMs / 1000 - long nowSec = (currentBuild.startTimeInMillis + durationMs) / 1000 - def branchNameNoPeriods = env.BRANCH_NAME.replaceAll('\\.', '_') - def durationMetric = "okr.ci.java.${env.DRIVER_METRIC_TYPE}.${buildType}.${branchNameNoPeriods} ${durationSec} ${nowSec}" - - timeout(time: 1, unit: 'MINUTES') { - withCredentials([string(credentialsId: 'lab-grafana-address', variable: 'LAB_GRAFANA_ADDRESS'), - string(credentialsId: 'lab-grafana-port', variable: 'LAB_GRAFANA_PORT')]) { - withEnv(["DURATION_METRIC=${durationMetric}"]) { - sh label: 'Send runtime metrics to labgrafana', script: '''#!/bin/bash -le - echo "${DURATION_METRIC}" | nc -q 5 ${LAB_GRAFANA_ADDRESS} ${LAB_GRAFANA_PORT} - ''' - } - } - } -} - def describePerCommitStage() { script { currentBuild.displayName = "Per-Commit build" @@ -462,11 +443,6 @@ pipeline { } } post { - always { - node('master') { - submitCIMetrics('commit') - } - } aborted { notifySlack('aborted') } From 1b455ff5fc204edaff8897d2f1ea73b7e6e88574 Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Thu, 5 Jan 2023 01:24:14 -0600 Subject: [PATCH 820/979] Forgot to copy over a few 3.x releases into the changelog in this branch --- changelog/README.md | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/changelog/README.md b/changelog/README.md index a0ec32f7a8b..17025eb417b 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -617,7 +617,17 @@ changelog](https://docs.datastax.com/en/developer/java-driver-dse/latest/changel - [bug] JAVA-1499: Wait for load balancing policy at cluster initialization - [new feature] JAVA-1495: Add prepared statements +## 3.11.3 + +- [improvement] JAVA-3023: Upgrade Netty to 4.1.77, 3.x edition + +## 3.11.2 + +- [improvement] JAVA-3008: Upgrade Netty to 4.1.75, 3.x edition +- [improvement] JAVA-2984: Upgrade Jackson to resolve high-priority CVEs + ## 3.11.1 + - [bug] JAVA-2967: Support native transport peer information for DSE 6.8. - [bug] JAVA-2976: Support missing protocol v5 error codes CAS_WRITE_UNKNOWN, CDC_WRITE_FAILURE. From 355844408325df5045fc5b10d99737f24003042d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=A3o=20Reis?= Date: Fri, 24 Feb 2023 17:15:24 +0000 Subject: [PATCH 821/979] fix dse builds on 4.x (#1625) --- Jenkinsfile | 61 +++++++++++++++++++++++++++++++++-------------------- 1 file changed, 38 insertions(+), 23 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index bf90ebbacda..67ed4fcd769 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -31,6 +31,21 @@ def initializeEnvironment() { . ${CCM_ENVIRONMENT_SHELL} ${SERVER_VERSION} ''' + if (env.SERVER_VERSION.split('-')[0] == 'dse') { + env.DSE_FIXED_VERSION = env.SERVER_VERSION.split('-')[1] + sh label: 'Update environment for DataStax Enterprise', script: '''#!/bin/bash -le + cat >> ${HOME}/environment.txt << ENVIRONMENT_EOF +CCM_CASSANDRA_VERSION=${DSE_FIXED_VERSION} # maintain for backwards compatibility +CCM_VERSION=${DSE_FIXED_VERSION} +CCM_SERVER_TYPE=dse +DSE_VERSION=${DSE_FIXED_VERSION} +CCM_IS_DSE=true +CCM_BRANCH=${DSE_FIXED_VERSION} +DSE_BRANCH=${DSE_FIXED_VERSION} +ENVIRONMENT_EOF + ''' + } + sh label: 'Display Java and environment information',script: '''#!/bin/bash -le # Load CCM environment variables set -o allexport @@ -198,12 +213,12 @@ pipeline { '3.0', // Previous Apache CassandraⓇ '3.11', // Current Apache CassandraⓇ '4.0', // Development Apache CassandraⓇ - 'dse-4.8', // Previous EOSL DataStax Enterprise - 'dse-5.0', // Long Term Support DataStax Enterprise - 'dse-5.1', // Legacy DataStax Enterprise - 'dse-6.0', // Previous DataStax Enterprise - 'dse-6.7', // Previous DataStax Enterprise - 'dse-6.8', // Current DataStax Enterprise + 'dse-4.8.16', // Previous EOSL DataStax Enterprise + 'dse-5.0.15', // Long Term Support DataStax Enterprise + 'dse-5.1.35', // Legacy DataStax Enterprise + 'dse-6.0.18', // Previous DataStax Enterprise + 'dse-6.7.17', // Previous DataStax Enterprise + 'dse-6.8.30', // Current DataStax Enterprise 'ALL'], description: '''Apache Cassandra® and DataStax Enterprise server version to use for adhoc BUILD-AND-EXECUTE-TESTS builds

          Driver versionTinkerPop version
          4.15.03.5.3
          4.14.13.5.3
          4.14.03.4.10
          4.13.03.4.10
          @@ -234,27 +249,27 @@ pipeline { - + - + - + - + - + - +
          Apache Cassandra® v4.x (CURRENTLY UNDER DEVELOPMENT)
          dse-4.8dse-4.8.16 DataStax Enterprise v4.8.x (END OF SERVICE LIFE)
          dse-5.0dse-5.0.15 DataStax Enterprise v5.0.x (Long Term Support)
          dse-5.1dse-5.1.35 DataStax Enterprise v5.1.x
          dse-6.0dse-6.0.18 DataStax Enterprise v6.0.x
          dse-6.7dse-6.7.17 DataStax Enterprise v6.7.x
          dse-6.8dse-6.8.30 DataStax Enterprise v6.8.x
          ''') @@ -339,13 +354,13 @@ pipeline { // schedules only run against release branches (i.e. 3.x, 4.x, 4.5.x, etc.) parameterizedCron(branchPatternCron().matcher(env.BRANCH_NAME).matches() ? """ # Every weeknight (Monday - Friday) around 2:00 AM - ### JDK8 tests against 2.1, 3.0, DSE 4.8, DSE 5.0, DSE 5.1, DSE-6.0 and DSE 6.7 - H 2 * * 1-5 %CI_SCHEDULE=WEEKNIGHTS;CI_SCHEDULE_SERVER_VERSIONS=2.1 3.0 dse-4.8 dse-5.0 dse-5.1 dse-6.0 dse-6.7;CI_SCHEDULE_JABBA_VERSION=1.8 + ### JDK8 tests against 2.1, 3.0, DSE 4.8, DSE 5.0, DSE 5.1, dse-6.0.18 and DSE 6.7 + H 2 * * 1-5 %CI_SCHEDULE=WEEKNIGHTS;CI_SCHEDULE_SERVER_VERSIONS=2.1 3.0 dse-4.8.16 dse-5.0.15 dse-5.1.35 dse-6.0.18 dse-6.7.17;CI_SCHEDULE_JABBA_VERSION=1.8 ### JDK11 tests against 3.11, 4.0 and DSE 6.8 - H 2 * * 1-5 %CI_SCHEDULE=WEEKNIGHTS;CI_SCHEDULE_SERVER_VERSIONS=3.11 4.0 dse-6.8;CI_SCHEDULE_JABBA_VERSION=openjdk@1.11 + H 2 * * 1-5 %CI_SCHEDULE=WEEKNIGHTS;CI_SCHEDULE_SERVER_VERSIONS=3.11 4.0 dse-6.8.30;CI_SCHEDULE_JABBA_VERSION=openjdk@1.11 # Every weekend (Sunday) around 12:00 PM noon ### JDK14 tests against 3.11, 4.0 and DSE 6.8 - H 12 * * 0 %CI_SCHEDULE=WEEKENDS;CI_SCHEDULE_SERVER_VERSIONS=3.11 4.0 dse-6.8;CI_SCHEDULE_JABBA_VERSION=openjdk@1.14 + H 12 * * 0 %CI_SCHEDULE=WEEKENDS;CI_SCHEDULE_SERVER_VERSIONS=3.11 4.0 dse-6.8.30;CI_SCHEDULE_JABBA_VERSION=openjdk@1.14 """ : "") } @@ -381,7 +396,7 @@ pipeline { name 'SERVER_VERSION' values '3.11', // Latest stable Apache CassandraⓇ '4.0', // Development Apache CassandraⓇ - 'dse-6.8' // Current DataStax Enterprise + 'dse-6.8.30' // Current DataStax Enterprise } } @@ -489,12 +504,12 @@ pipeline { '3.0', // Previous Apache CassandraⓇ '3.11', // Current Apache CassandraⓇ '4.0', // Development Apache CassandraⓇ - 'dse-4.8', // Previous EOSL DataStax Enterprise - 'dse-5.0', // Last EOSL DataStax Enterprise - 'dse-5.1', // Legacy DataStax Enterprise - 'dse-6.0', // Previous DataStax Enterprise - 'dse-6.7', // Previous DataStax Enterprise - 'dse-6.8' // Current DataStax Enterprise + 'dse-4.8.16', // Previous EOSL DataStax Enterprise + 'dse-5.0.15', // Last EOSL DataStax Enterprise + 'dse-5.1.35', // Legacy DataStax Enterprise + 'dse-6.0.18', // Previous DataStax Enterprise + 'dse-6.7.17', // Previous DataStax Enterprise + 'dse-6.8.30' // Current DataStax Enterprise } } when { From 741df6f98a1aad172d7dce2a760fac6c280a208c Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Tue, 4 Apr 2023 13:48:27 -0500 Subject: [PATCH 822/979] JAVA-3042: Add support for ad-hoc Java 17 builds (#1629) --- Jenkinsfile | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/Jenkinsfile b/Jenkinsfile index 67ed4fcd769..1859ceb7689 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -281,7 +281,8 @@ pipeline { 'openjdk@1.11', // OpenJDK version 11 'openjdk@1.12', // OpenJDK version 12 'openjdk@1.13', // OpenJDK version 13 - 'openjdk@1.14'], // OpenJDK version 14 + 'openjdk@1.14', // OpenJDK version 14 + 'openjdk@1.17'], // OpenJDK version 17 description: '''JDK version to use for TESTING when running adhoc BUILD-AND-EXECUTE-TESTS builds. All builds will use JDK8 for building the driver @@ -318,6 +319,10 @@ pipeline { + + + +
          openjdk@1.14 OpenJDK version 14
          openjdk@1.17OpenJDK version 17
          ''') booleanParam( name: 'SKIP_SERIAL_ITS', From f91979f99dd7271c036a3dae8d35b71e59fe396d Mon Sep 17 00:00:00 2001 From: Emelia <105240296+emeliawilkinson24@users.noreply.github.com> Date: Tue, 9 May 2023 10:39:22 -0400 Subject: [PATCH 823/979] DOC-2813 (#1632) Added error handling guidance linking to a helpful blog post plus Jamie's addition. --- README.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/README.md b/README.md index 0b5a61520f5..c9003f606f1 100644 --- a/README.md +++ b/README.md @@ -71,6 +71,10 @@ remain unchanged, and the new API will look very familiar to 2.x and 3.x users. See the [upgrade guide](upgrade_guide/) for details. +## Error Handling + +See the [Cassandra error handling done right blog](https://www.datastax.com/blog/cassandra-error-handling-done-right) for error handling with the DataStax Java Driver for Apache Cassandra™. + ## Useful links * [Manual](manual/) From cfeb55f8ba9c35bd3b19c9442b429a564b565440 Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Mon, 5 Jun 2023 11:42:14 -0500 Subject: [PATCH 824/979] JAVA-3060: Add vector type, codec + support for parsing CQL type (#1639) --- .../oss/driver/api/core/data/CqlVector.java | 91 ++++++++++++ .../driver/api/core/data/GettableById.java | 18 +++ .../driver/api/core/data/GettableByIndex.java | 12 ++ .../driver/api/core/data/GettableByName.java | 18 +++ .../driver/api/core/data/SettableById.java | 21 +++ .../driver/api/core/data/SettableByIndex.java | 13 ++ .../driver/api/core/data/SettableByName.java | 21 +++ .../driver/api/core/type/CqlVectorType.java | 86 +++++++++++ .../oss/driver/api/core/type/DataTypes.java | 28 +++- .../api/core/type/codec/TypeCodecs.java | 9 ++ .../api/core/type/reflect/GenericType.java | 8 ++ .../schema/parsing/DataTypeCqlNameParser.java | 11 ++ .../core/type/codec/CqlVectorCodec.java | 135 ++++++++++++++++++ .../time/PersistentZonedTimestampCodec.java | 2 +- .../codec/registry/CachingCodecRegistry.java | 7 + .../core/type/codec/CqlVectorCodecTest.java | 115 +++++++++++++++ .../examples/datatypes/TuplesMapped.java | 4 +- 17 files changed, 593 insertions(+), 6 deletions(-) create mode 100644 core/src/main/java/com/datastax/oss/driver/api/core/data/CqlVector.java create mode 100644 core/src/main/java/com/datastax/oss/driver/api/core/type/CqlVectorType.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/CqlVectorCodec.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/CqlVectorCodecTest.java diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/data/CqlVector.java b/core/src/main/java/com/datastax/oss/driver/api/core/data/CqlVector.java new file mode 100644 index 00000000000..c34cc38a10a --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/api/core/data/CqlVector.java @@ -0,0 +1,91 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.core.data; + +import com.datastax.oss.driver.shaded.guava.common.base.Joiner; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; +import com.datastax.oss.driver.shaded.guava.common.collect.Iterators; +import java.util.Arrays; + +/** An n-dimensional vector defined in CQL */ +public class CqlVector { + + private final ImmutableList values; + + private CqlVector(ImmutableList values) { + this.values = values; + } + + public static Builder builder() { + return new Builder(); + } + + public Iterable getValues() { + return values; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } else if (o instanceof CqlVector) { + CqlVector that = (CqlVector) o; + return this.values.equals(that.values); + } else { + return false; + } + } + + @Override + public int hashCode() { + return Arrays.hashCode(values.toArray()); + } + + @Override + public String toString() { + + String contents = Joiner.on(", ").join(this.values); + return "CqlVector{" + contents + '}'; + } + + public static class Builder { + + private ImmutableList.Builder listBuilder; + + private Builder() { + listBuilder = new ImmutableList.Builder(); + } + + public Builder add(T element) { + listBuilder.add(element); + return this; + } + + public Builder add(T... elements) { + listBuilder.addAll(Iterators.forArray(elements)); + return this; + } + + public Builder addAll(Iterable iter) { + listBuilder.addAll(iter); + return this; + } + + public CqlVector build() { + return new CqlVector(listBuilder.build()); + } + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableById.java b/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableById.java index a6c46e4abe8..7d2ea3a7f8b 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableById.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableById.java @@ -515,6 +515,24 @@ default CqlDuration getCqlDuration(@NonNull CqlIdentifier id) { return getCqlDuration(firstIndexOf(id)); } + /** + * Returns the value for the first occurrence of {@code id} as a vector. + * + *

          By default, this works with CQL type {@code vector}. + * + *

          If an identifier appears multiple times, this can only be used to access the first value. + * For the other ones, use positional getters. + * + *

          If you want to avoid the overhead of building a {@code CqlIdentifier}, use the variant of + * this method that takes a string argument. + * + * @throws IllegalArgumentException if the id is invalid. + */ + @Nullable + default CqlVector getCqlVector(@NonNull CqlIdentifier id) { + return getCqlVector(firstIndexOf(id)); + } + /** * Returns the value for the first occurrence of {@code id} as a token. * diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableByIndex.java b/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableByIndex.java index 9e3502732c9..0da60bef285 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableByIndex.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableByIndex.java @@ -436,6 +436,18 @@ default CqlDuration getCqlDuration(int i) { return get(i, CqlDuration.class); } + /** + * Returns the {@code i}th value as a vector. + * + *

          By default, this works with CQL type {@code vector}. + * + * @throws IndexOutOfBoundsException if the index is invalid. + */ + @Nullable + default CqlVector getCqlVector(int i) { + return get(i, CqlVector.class); + } + /** * Returns the {@code i}th value as a token. * diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableByName.java b/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableByName.java index abbb16aeb75..40e5532f85a 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableByName.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableByName.java @@ -511,6 +511,24 @@ default CqlDuration getCqlDuration(@NonNull String name) { return getCqlDuration(firstIndexOf(name)); } + /** + * Returns the value for the first occurrence of {@code name} as a vector. + * + *

          By default, this works with CQL type {@code vector}. + * + *

          If an identifier appears multiple times, this can only be used to access the first value. + * For the other ones, use positional getters. + * + *

          This method deals with case sensitivity in the way explained in the documentation of {@link + * AccessibleByName}. + * + * @throws IllegalArgumentException if the name is invalid. + */ + @Nullable + default CqlVector getCqlVector(@NonNull String name) { + return getCqlVector(firstIndexOf(name)); + } + /** * Returns the value for the first occurrence of {@code name} as a token. * diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableById.java b/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableById.java index 0d3cba5601d..58eb7098028 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableById.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableById.java @@ -559,6 +559,27 @@ default SelfT setCqlDuration(@NonNull CqlIdentifier id, @Nullable CqlDuration v) return result; } + /** + * Sets the value for all occurrences of {@code id} to the provided duration. + * + *

          By default, this works with CQL type {@code vector}. + * + *

          If you want to avoid the overhead of building a {@code CqlIdentifier}, use the variant of + * this method that takes a string argument. + * + * @throws IllegalArgumentException if the id is invalid. + */ + @NonNull + @CheckReturnValue + default SelfT setCqlVector(@NonNull CqlIdentifier id, @Nullable CqlVector v) { + SelfT result = null; + for (Integer i : allIndicesOf(id)) { + result = (result == null ? this : result).setCqlVector(i, v); + } + assert result != null; // allIndices throws if there are no results + return result; + } + /** * Sets the value for all occurrences of {@code id} to the provided token. * diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableByIndex.java b/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableByIndex.java index 7cd0b5671ff..bdee1de7b6f 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableByIndex.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableByIndex.java @@ -414,6 +414,19 @@ default SelfT setCqlDuration(int i, @Nullable CqlDuration v) { return set(i, v, CqlDuration.class); } + /** + * Sets the {@code i}th value to the provided duration. + * + *

          By default, this works with CQL type {@code vector}. + * + * @throws IndexOutOfBoundsException if the index is invalid. + */ + @NonNull + @CheckReturnValue + default SelfT setCqlVector(int i, @Nullable CqlVector v) { + return set(i, v, CqlVector.class); + } + /** * Sets the {@code i}th value to the provided token. * diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableByName.java b/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableByName.java index d93f4ebf5b2..26e5340bdce 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableByName.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableByName.java @@ -558,6 +558,27 @@ default SelfT setCqlDuration(@NonNull String name, @Nullable CqlDuration v) { return result; } + /** + * Sets the value for all occurrences of {@code name} to the provided duration. + * + *

          By default, this works with CQL type {@code vector}. + * + *

          This method deals with case sensitivity in the way explained in the documentation of {@link + * AccessibleByName}. + * + * @throws IllegalArgumentException if the name is invalid. + */ + @NonNull + @CheckReturnValue + default SelfT setCqlVector(@NonNull String name, @Nullable CqlVector v) { + SelfT result = null; + for (Integer i : allIndicesOf(name)) { + result = (result == null ? this : result).setCqlVector(i, v); + } + assert result != null; // allIndices throws if there are no results + return result; + } + /** * Sets the value for all occurrences of {@code name} to the provided token. * diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/type/CqlVectorType.java b/core/src/main/java/com/datastax/oss/driver/api/core/type/CqlVectorType.java new file mode 100644 index 00000000000..528a688451a --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/api/core/type/CqlVectorType.java @@ -0,0 +1,86 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.core.type; + +import com.datastax.oss.driver.api.core.detach.AttachmentPoint; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Objects; + +public class CqlVectorType implements CustomType { + + public static final String CQLVECTOR_CLASS_NAME = "org.apache.cassandra.db.marshal.VectorType"; + + private final DataType subtype; + private final int dimensions; + + public CqlVectorType(DataType subtype, int dimensions) { + + this.dimensions = dimensions; + this.subtype = subtype; + } + + public int getDimensions() { + return this.dimensions; + } + + public DataType getSubtype() { + return this.subtype; + } + + @NonNull + @Override + public String getClassName() { + return CQLVECTOR_CLASS_NAME; + } + + @NonNull + @Override + public String asCql(boolean includeFrozen, boolean pretty) { + return String.format("'%s(%d)'", getClassName(), getDimensions()); + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } else if (o instanceof CqlVectorType) { + CqlVectorType that = (CqlVectorType) o; + return that.subtype.equals(this.subtype) && that.dimensions == this.dimensions; + } else { + return false; + } + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), subtype, dimensions); + } + + @Override + public String toString() { + return String.format("CqlVector(%s, %d)", getSubtype(), getDimensions()); + } + + @Override + public boolean isDetached() { + return false; + } + + @Override + public void attach(@NonNull AttachmentPoint attachmentPoint) { + // nothing to do + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/type/DataTypes.java b/core/src/main/java/com/datastax/oss/driver/api/core/type/DataTypes.java index 0a61314ca71..f8cc7042516 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/type/DataTypes.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/type/DataTypes.java @@ -15,17 +15,21 @@ */ package com.datastax.oss.driver.api.core.type; +import com.datastax.oss.driver.api.core.detach.AttachmentPoint; import com.datastax.oss.driver.api.core.detach.Detachable; +import com.datastax.oss.driver.internal.core.metadata.schema.parsing.DataTypeClassNameParser; import com.datastax.oss.driver.internal.core.type.DefaultCustomType; import com.datastax.oss.driver.internal.core.type.DefaultListType; import com.datastax.oss.driver.internal.core.type.DefaultMapType; import com.datastax.oss.driver.internal.core.type.DefaultSetType; import com.datastax.oss.driver.internal.core.type.DefaultTupleType; import com.datastax.oss.driver.internal.core.type.PrimitiveType; +import com.datastax.oss.driver.shaded.guava.common.base.Splitter; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import com.datastax.oss.protocol.internal.ProtocolConstants; import edu.umd.cs.findbugs.annotations.NonNull; import java.util.Arrays; +import java.util.List; /** Constants and factory methods to obtain data type instances. */ public class DataTypes { @@ -51,14 +55,26 @@ public class DataTypes { public static final DataType TINYINT = new PrimitiveType(ProtocolConstants.DataType.TINYINT); public static final DataType DURATION = new PrimitiveType(ProtocolConstants.DataType.DURATION); + private static final DataTypeClassNameParser classNameParser = new DataTypeClassNameParser(); + private static final Splitter paramSplitter = Splitter.on(',').trimResults(); + @NonNull public static DataType custom(@NonNull String className) { + // In protocol v4, duration is implemented as a custom type - if ("org.apache.cassandra.db.marshal.DurationType".equals(className)) { - return DURATION; - } else { - return new DefaultCustomType(className); + if (className.equals("org.apache.cassandra.db.marshal.DurationType")) return DURATION; + + /* Vector support is currently implemented as a custom type but is also parameterized */ + if (className.startsWith(CqlVectorType.CQLVECTOR_CLASS_NAME)) { + List params = + paramSplitter.splitToList( + className.substring( + CqlVectorType.CQLVECTOR_CLASS_NAME.length() + 1, className.length() - 1)); + DataType subType = classNameParser.parse(params.get(0), AttachmentPoint.NONE); + int dimensions = Integer.parseInt(params.get(1)); + return new CqlVectorType(subType, dimensions); } + return new DefaultCustomType(className); } @NonNull @@ -118,4 +134,8 @@ public static MapType frozenMapOf(@NonNull DataType keyType, @NonNull DataType v public static TupleType tupleOf(@NonNull DataType... componentTypes) { return new DefaultTupleType(ImmutableList.copyOf(Arrays.asList(componentTypes))); } + + public static CqlVectorType vectorOf(DataType subtype, int dimensions) { + return new CqlVectorType(subtype, dimensions); + } } diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.java b/core/src/main/java/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.java index 6cd4b68a042..07fbf309988 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.java @@ -16,8 +16,10 @@ package com.datastax.oss.driver.api.core.type.codec; import com.datastax.oss.driver.api.core.data.CqlDuration; +import com.datastax.oss.driver.api.core.data.CqlVector; import com.datastax.oss.driver.api.core.data.TupleValue; import com.datastax.oss.driver.api.core.data.UdtValue; +import com.datastax.oss.driver.api.core.type.CqlVectorType; import com.datastax.oss.driver.api.core.type.CustomType; import com.datastax.oss.driver.api.core.type.DataType; import com.datastax.oss.driver.api.core.type.DataTypes; @@ -28,6 +30,7 @@ import com.datastax.oss.driver.internal.core.type.codec.BooleanCodec; import com.datastax.oss.driver.internal.core.type.codec.CounterCodec; import com.datastax.oss.driver.internal.core.type.codec.CqlDurationCodec; +import com.datastax.oss.driver.internal.core.type.codec.CqlVectorCodec; import com.datastax.oss.driver.internal.core.type.codec.CustomCodec; import com.datastax.oss.driver.internal.core.type.codec.DateCodec; import com.datastax.oss.driver.internal.core.type.codec.DecimalCodec; @@ -205,6 +208,12 @@ public static TypeCodec tupleOf(@NonNull TupleType cqlType) { return new TupleCodec(cqlType); } + public static TypeCodec> vectorOf( + @NonNull CqlVectorType type, @NonNull TypeCodec subtypeCodec) { + return new CqlVectorCodec( + DataTypes.vectorOf(subtypeCodec.getCqlType(), type.getDimensions()), subtypeCodec); + } + /** * Builds a new codec that maps a CQL user defined type to the driver's {@link UdtValue}, for the * given type definition. diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/type/reflect/GenericType.java b/core/src/main/java/com/datastax/oss/driver/api/core/type/reflect/GenericType.java index a1977e39f23..db573b3451b 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/type/reflect/GenericType.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/type/reflect/GenericType.java @@ -16,6 +16,7 @@ package com.datastax.oss.driver.api.core.type.reflect; import com.datastax.oss.driver.api.core.data.CqlDuration; +import com.datastax.oss.driver.api.core.data.CqlVector; import com.datastax.oss.driver.api.core.data.GettableByIndex; import com.datastax.oss.driver.api.core.data.TupleValue; import com.datastax.oss.driver.api.core.data.UdtValue; @@ -165,6 +166,13 @@ public static GenericType> mapOf( return new GenericType<>(token); } + @NonNull + public static GenericType> vectorOf(@NonNull GenericType elementType) { + TypeToken> token = + new TypeToken>() {}.where(new TypeParameter() {}, elementType.token); + return new GenericType<>(token); + } + @NonNull public static GenericType arrayOf(@NonNull Class componentType) { TypeToken token = diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/DataTypeCqlNameParser.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/DataTypeCqlNameParser.java index 3523aa5c459..76a2301522d 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/DataTypeCqlNameParser.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/DataTypeCqlNameParser.java @@ -16,6 +16,7 @@ package com.datastax.oss.driver.internal.core.metadata.schema.parsing; import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.type.CqlVectorType; import com.datastax.oss.driver.api.core.type.DataType; import com.datastax.oss.driver.api.core.type.DataTypes; import com.datastax.oss.driver.api.core.type.UserDefinedType; @@ -137,6 +138,16 @@ private DataType parse( return new DefaultTupleType(componentTypesBuilder.build(), context); } + if (type.equalsIgnoreCase("vector")) { + if (parameters.size() != 2) { + throw new IllegalArgumentException( + String.format("Expecting two parameters for vector custom type, got %s", parameters)); + } + DataType subType = parse(parameters.get(0), keyspaceId, false, userTypes, context); + int dimensions = Integer.parseInt(parameters.get(1)); + return new CqlVectorType(subType, dimensions); + } + throw new IllegalArgumentException("Could not parse type name " + toParse); } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/CqlVectorCodec.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/CqlVectorCodec.java new file mode 100644 index 00000000000..4b739862d33 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/CqlVectorCodec.java @@ -0,0 +1,135 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.type.codec; + +import com.datastax.oss.driver.api.core.ProtocolVersion; +import com.datastax.oss.driver.api.core.data.CqlVector; +import com.datastax.oss.driver.api.core.type.CqlVectorType; +import com.datastax.oss.driver.api.core.type.DataType; +import com.datastax.oss.driver.api.core.type.codec.TypeCodec; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import com.datastax.oss.driver.shaded.guava.common.base.Splitter; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; +import com.datastax.oss.driver.shaded.guava.common.collect.Iterables; +import com.datastax.oss.driver.shaded.guava.common.collect.Streams; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.nio.ByteBuffer; +import java.util.Iterator; + +public class CqlVectorCodec implements TypeCodec> { + + private final CqlVectorType cqlType; + private final GenericType> javaType; + private final TypeCodec subtypeCodec; + + public CqlVectorCodec(CqlVectorType cqlType, TypeCodec subtypeCodec) { + this.cqlType = cqlType; + this.subtypeCodec = subtypeCodec; + this.javaType = GenericType.vectorOf(subtypeCodec.getJavaType()); + } + + @NonNull + @Override + public GenericType> getJavaType() { + return this.javaType; + } + + @NonNull + @Override + public DataType getCqlType() { + return this.cqlType; + } + + @Nullable + @Override + public ByteBuffer encode( + @Nullable CqlVector value, @NonNull ProtocolVersion protocolVersion) { + if (value == null || cqlType.getDimensions() <= 0) { + return null; + } + ByteBuffer[] valueBuffs = new ByteBuffer[cqlType.getDimensions()]; + Iterator values = value.getValues().iterator(); + int allValueBuffsSize = 0; + for (int i = 0; i < cqlType.getDimensions(); ++i) { + ByteBuffer valueBuff = this.subtypeCodec.encode(values.next(), protocolVersion); + allValueBuffsSize += valueBuff.limit(); + valueBuff.rewind(); + valueBuffs[i] = valueBuff; + } + /* Since we already did an early return for <= 0 dimensions above */ + assert valueBuffs.length > 0; + ByteBuffer rv = ByteBuffer.allocate(allValueBuffsSize); + for (int i = 0; i < cqlType.getDimensions(); ++i) { + rv.put(valueBuffs[i]); + } + rv.flip(); + return rv; + } + + @Nullable + @Override + public CqlVector decode( + @Nullable ByteBuffer bytes, @NonNull ProtocolVersion protocolVersion) { + if (bytes == null || bytes.remaining() == 0) { + return null; + } + + /* Determine element size by dividing count of remaining bytes by number of elements. This should have a remainder + of zero if we assume all elements are of uniform size (which is really a terrible assumption). + + TODO: We should probably tweak serialization format for vectors if we're going to allow them for arbitrary subtypes. + Elements should at least precede themselves with their size (along the lines of what lists do). */ + int elementSize = Math.floorDiv(bytes.remaining(), cqlType.getDimensions()); + if (!(bytes.remaining() % cqlType.getDimensions() == 0)) { + throw new IllegalArgumentException( + String.format( + "Expected elements of uniform size, observed %d elements with total bytes %d", + cqlType.getDimensions(), bytes.remaining())); + } + + ImmutableList.Builder builder = ImmutableList.builder(); + for (int i = 0; i < cqlType.getDimensions(); ++i) { + ByteBuffer slice = bytes.slice(); + slice.limit(elementSize); + builder.add(this.subtypeCodec.decode(slice, protocolVersion)); + bytes.position(bytes.position() + elementSize); + } + + /* Restore the input ByteBuffer to its original state */ + bytes.rewind(); + + return CqlVector.builder().addAll(builder.build()).build(); + } + + @NonNull + @Override + public String format(@Nullable CqlVector value) { + return value == null ? "NULL" : Iterables.toString(value.getValues()); + } + + @Nullable + @Override + public CqlVector parse(@Nullable String value) { + if (value == null || value.isEmpty() || value.equalsIgnoreCase("NULL")) return null; + + ImmutableList values = + Streams.stream(Splitter.on(", ").split(value.substring(1, value.length() - 1))) + .map(subtypeCodec::parse) + .collect(ImmutableList.toImmutableList()); + return CqlVector.builder().addAll(values).build(); + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/time/PersistentZonedTimestampCodec.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/time/PersistentZonedTimestampCodec.java index 0cb1681d344..ac819d0044e 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/time/PersistentZonedTimestampCodec.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/time/PersistentZonedTimestampCodec.java @@ -98,7 +98,7 @@ protected TupleValue outerToInner(@Nullable ZonedDateTime value) { } else { Instant instant = value.toInstant(); String zoneId = value.getZone().toString(); - return getCqlType().newValue(instant, zoneId); + return this.getCqlType().newValue(instant, zoneId); } } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistry.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistry.java index 35ea21f38c7..18b1f55e106 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistry.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistry.java @@ -16,8 +16,10 @@ package com.datastax.oss.driver.internal.core.type.codec.registry; import com.datastax.oss.driver.api.core.data.CqlDuration; +import com.datastax.oss.driver.api.core.data.CqlVector; import com.datastax.oss.driver.api.core.data.TupleValue; import com.datastax.oss.driver.api.core.data.UdtValue; +import com.datastax.oss.driver.api.core.type.CqlVectorType; import com.datastax.oss.driver.api.core.type.CustomType; import com.datastax.oss.driver.api.core.type.DataType; import com.datastax.oss.driver.api.core.type.DataTypes; @@ -602,6 +604,11 @@ protected TypeCodec createCodec( } else if (cqlType instanceof UserDefinedType && UdtValue.class.isAssignableFrom(token.getRawType())) { return TypeCodecs.udtOf((UserDefinedType) cqlType); + } else if (cqlType instanceof CqlVectorType + && CqlVector.class.isAssignableFrom(token.getRawType())) { + CqlVectorType vectorType = (CqlVectorType) cqlType; + TypeCodec subtypeCodec = codecFor(vectorType.getSubtype()); + return TypeCodecs.vectorOf((CqlVectorType) cqlType, subtypeCodec); } else if (cqlType instanceof CustomType && ByteBuffer.class.isAssignableFrom(token.getRawType())) { return TypeCodecs.custom(cqlType); diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/CqlVectorCodecTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/CqlVectorCodecTest.java new file mode 100644 index 00000000000..eac142f6ebe --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/CqlVectorCodecTest.java @@ -0,0 +1,115 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.type.codec; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +import com.datastax.oss.driver.api.core.data.CqlVector; +import com.datastax.oss.driver.api.core.type.CqlVectorType; +import com.datastax.oss.driver.api.core.type.DataTypes; +import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import org.junit.Test; + +public class CqlVectorCodecTest extends CodecTestBase> { + + private static final CqlVector VECTOR = CqlVector.builder().add(1.0f, 2.5f).build(); + + private static final String VECTOR_HEX_STRING = "0x" + "3f800000" + "40200000"; + + private static final String FORMATTED_VECTOR = "[1.0, 2.5]"; + + public CqlVectorCodecTest() { + CqlVectorType vectorType = DataTypes.vectorOf(DataTypes.FLOAT, 2); + this.codec = TypeCodecs.vectorOf(vectorType, TypeCodecs.FLOAT); + } + + @Test + public void should_encode() { + assertThat(encode(VECTOR)).isEqualTo(VECTOR_HEX_STRING); + assertThat(encode(null)).isNull(); + } + + @Test + public void should_decode() { + assertThat(decode(VECTOR_HEX_STRING)).isEqualTo(VECTOR); + assertThat(decode("0x")).isNull(); + assertThat(decode(null)).isNull(); + } + + @Test + public void decode_throws_if_too_few_bytes() { + // Dropping 4 bytes would knock off exactly 1 float, anything less than that would be something + // we couldn't parse a float out of + for (int i = 1; i <= 3; ++i) { + // 2 chars of hex encoded string = 1 byte + int lastIndex = VECTOR_HEX_STRING.length() - (2 * i); + assertThatThrownBy(() -> decode(VECTOR_HEX_STRING.substring(0, lastIndex))) + .isInstanceOf(IllegalArgumentException.class); + } + } + + @Test + public void should_format() { + assertThat(format(VECTOR)).isEqualTo(FORMATTED_VECTOR); + assertThat(format(null)).isEqualTo("NULL"); + } + + @Test + public void should_parse() { + assertThat(parse(FORMATTED_VECTOR)).isEqualTo(VECTOR); + assertThat(parse("NULL")).isNull(); + assertThat(parse("null")).isNull(); + assertThat(parse("")).isNull(); + assertThat(parse(null)).isNull(); + } + + @Test + public void should_accept_data_type() { + assertThat(codec.accepts(new CqlVectorType(DataTypes.FLOAT, 2))).isTrue(); + assertThat(codec.accepts(DataTypes.INT)).isFalse(); + } + + @Test + public void should_accept_vector_type_correct_dimension_only() { + assertThat(codec.accepts(new CqlVectorType(DataTypes.FLOAT, 0))).isFalse(); + assertThat(codec.accepts(new CqlVectorType(DataTypes.FLOAT, 1))).isFalse(); + assertThat(codec.accepts(new CqlVectorType(DataTypes.FLOAT, 2))).isTrue(); + for (int i = 3; i < 1000; ++i) { + assertThat(codec.accepts(new CqlVectorType(DataTypes.FLOAT, i))).isFalse(); + } + } + + @Test + public void should_accept_generic_type() { + assertThat(codec.accepts(GenericType.vectorOf(GenericType.FLOAT))).isTrue(); + assertThat(codec.accepts(GenericType.vectorOf(GenericType.INTEGER))).isFalse(); + assertThat(codec.accepts(GenericType.of(Integer.class))).isFalse(); + } + + @Test + public void should_accept_raw_type() { + assertThat(codec.accepts(CqlVector.class)).isTrue(); + assertThat(codec.accepts(Integer.class)).isFalse(); + } + + @Test + public void should_accept_object() { + assertThat(codec.accepts(VECTOR)).isTrue(); + assertThat(codec.accepts(Integer.MIN_VALUE)).isFalse(); + } +} diff --git a/examples/src/main/java/com/datastax/oss/driver/examples/datatypes/TuplesMapped.java b/examples/src/main/java/com/datastax/oss/driver/examples/datatypes/TuplesMapped.java index 817736df1cd..14afaaaee27 100644 --- a/examples/src/main/java/com/datastax/oss/driver/examples/datatypes/TuplesMapped.java +++ b/examples/src/main/java/com/datastax/oss/driver/examples/datatypes/TuplesMapped.java @@ -119,7 +119,9 @@ protected Coordinates innerToOuter(@Nullable TupleValue value) { @Nullable @Override protected TupleValue outerToInner(@Nullable Coordinates value) { - return value == null ? null : getCqlType().newValue().setInt(0, value.x).setInt(1, value.y); + return value == null + ? null + : this.getCqlType().newValue().setInt(0, value.x).setInt(1, value.y); } } From 52a919b85a40093aba7b13963fac8fa39407f7d4 Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Mon, 5 Jun 2023 15:42:14 -0500 Subject: [PATCH 825/979] JAVA-3058: Clear prepared statement cache on UDT type change event (#1638) --- .../core/cql/CqlPrepareAsyncProcessor.java | 85 ++++++++++- .../session/BuiltInRequestProcessors.java | 9 +- ...BuiltInRequestProcessorsSubstitutions.java | 6 +- .../driver/core/cql/PreparedStatementIT.java | 142 ++++++++++++++++++ .../guava/internal/GuavaDriverContext.java | 4 +- 5 files changed, 237 insertions(+), 9 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlPrepareAsyncProcessor.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlPrepareAsyncProcessor.java index 4e0b51fe482..ff935aefb0a 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlPrepareAsyncProcessor.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlPrepareAsyncProcessor.java @@ -18,31 +18,112 @@ import com.datastax.oss.driver.api.core.cql.PrepareRequest; import com.datastax.oss.driver.api.core.cql.PreparedStatement; import com.datastax.oss.driver.api.core.session.Request; +import com.datastax.oss.driver.api.core.type.DataType; +import com.datastax.oss.driver.api.core.type.ListType; +import com.datastax.oss.driver.api.core.type.MapType; +import com.datastax.oss.driver.api.core.type.SetType; +import com.datastax.oss.driver.api.core.type.TupleType; +import com.datastax.oss.driver.api.core.type.UserDefinedType; import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import com.datastax.oss.driver.internal.core.context.DefaultDriverContext; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import com.datastax.oss.driver.internal.core.metadata.schema.events.TypeChangeEvent; import com.datastax.oss.driver.internal.core.session.DefaultSession; import com.datastax.oss.driver.internal.core.session.RequestProcessor; import com.datastax.oss.driver.internal.core.util.concurrent.CompletableFutures; +import com.datastax.oss.driver.internal.core.util.concurrent.RunOrSchedule; import com.datastax.oss.driver.shaded.guava.common.cache.Cache; import com.datastax.oss.driver.shaded.guava.common.cache.CacheBuilder; +import com.datastax.oss.driver.shaded.guava.common.collect.Iterables; +import com.datastax.oss.protocol.internal.ProtocolConstants; +import edu.umd.cs.findbugs.annotations.NonNull; +import io.netty.util.concurrent.EventExecutor; +import java.util.Map; +import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionStage; import java.util.concurrent.ExecutionException; import net.jcip.annotations.ThreadSafe; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @ThreadSafe public class CqlPrepareAsyncProcessor implements RequestProcessor> { + private static final Logger LOG = LoggerFactory.getLogger(CqlPrepareAsyncProcessor.class); + protected final Cache> cache; public CqlPrepareAsyncProcessor() { - this(CacheBuilder.newBuilder().weakValues().build()); + this(Optional.empty()); + } + + public CqlPrepareAsyncProcessor(@NonNull Optional context) { + this(CacheBuilder.newBuilder().weakValues().build(), context); } protected CqlPrepareAsyncProcessor( - Cache> cache) { + Cache> cache, + Optional context) { + this.cache = cache; + context.ifPresent( + (ctx) -> { + LOG.info("Adding handler to invalidate cached prepared statements on type changes"); + EventExecutor adminExecutor = ctx.getNettyOptions().adminEventExecutorGroup().next(); + ctx.getEventBus() + .register( + TypeChangeEvent.class, RunOrSchedule.on(adminExecutor, this::onTypeChanged)); + }); + } + + private static boolean typeMatches(UserDefinedType oldType, DataType typeToCheck) { + + switch (typeToCheck.getProtocolCode()) { + case ProtocolConstants.DataType.UDT: + UserDefinedType udtType = (UserDefinedType) typeToCheck; + return udtType.equals(oldType) + ? true + : Iterables.any(udtType.getFieldTypes(), (testType) -> typeMatches(oldType, testType)); + case ProtocolConstants.DataType.LIST: + ListType listType = (ListType) typeToCheck; + return typeMatches(oldType, listType.getElementType()); + case ProtocolConstants.DataType.SET: + SetType setType = (SetType) typeToCheck; + return typeMatches(oldType, setType.getElementType()); + case ProtocolConstants.DataType.MAP: + MapType mapType = (MapType) typeToCheck; + return typeMatches(oldType, mapType.getKeyType()) + || typeMatches(oldType, mapType.getValueType()); + case ProtocolConstants.DataType.TUPLE: + TupleType tupleType = (TupleType) typeToCheck; + return Iterables.any( + tupleType.getComponentTypes(), (testType) -> typeMatches(oldType, testType)); + default: + return false; + } + } + + private void onTypeChanged(TypeChangeEvent event) { + for (Map.Entry> entry : + this.cache.asMap().entrySet()) { + + try { + PreparedStatement stmt = entry.getValue().get(); + if (Iterables.any( + stmt.getResultSetDefinitions(), (def) -> typeMatches(event.oldType, def.getType())) + || Iterables.any( + stmt.getVariableDefinitions(), + (def) -> typeMatches(event.oldType, def.getType()))) { + + this.cache.invalidate(entry.getKey()); + this.cache.cleanUp(); + } + } catch (Exception e) { + LOG.info("Exception while invalidating prepared statement cache due to UDT change", e); + } + } } @Override diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/session/BuiltInRequestProcessors.java b/core/src/main/java/com/datastax/oss/driver/internal/core/session/BuiltInRequestProcessors.java index e502714f1b2..c2f24d56e6d 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/session/BuiltInRequestProcessors.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/session/BuiltInRequestProcessors.java @@ -34,6 +34,7 @@ import com.datastax.oss.driver.internal.core.util.DefaultDependencyChecker; import java.util.ArrayList; import java.util.List; +import java.util.Optional; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -43,7 +44,7 @@ public class BuiltInRequestProcessors { public static List> createDefaultProcessors(DefaultDriverContext context) { List> processors = new ArrayList<>(); - addBasicProcessors(processors); + addBasicProcessors(processors, context); if (DefaultDependencyChecker.isPresent(TINKERPOP)) { addGraphProcessors(context, processors); } else { @@ -62,7 +63,8 @@ public class BuiltInRequestProcessors { return processors; } - public static void addBasicProcessors(List> processors) { + public static void addBasicProcessors( + List> processors, DefaultDriverContext context) { // regular requests (sync and async) CqlRequestAsyncProcessor cqlRequestAsyncProcessor = new CqlRequestAsyncProcessor(); CqlRequestSyncProcessor cqlRequestSyncProcessor = @@ -71,7 +73,8 @@ public static void addBasicProcessors(List> processors) { processors.add(cqlRequestSyncProcessor); // prepare requests (sync and async) - CqlPrepareAsyncProcessor cqlPrepareAsyncProcessor = new CqlPrepareAsyncProcessor(); + CqlPrepareAsyncProcessor cqlPrepareAsyncProcessor = + new CqlPrepareAsyncProcessor(Optional.of(context)); CqlPrepareSyncProcessor cqlPrepareSyncProcessor = new CqlPrepareSyncProcessor(cqlPrepareAsyncProcessor); processors.add(cqlPrepareAsyncProcessor); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/session/BuiltInRequestProcessorsSubstitutions.java b/core/src/main/java/com/datastax/oss/driver/internal/core/session/BuiltInRequestProcessorsSubstitutions.java index 4485caed33d..868ab35b177 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/session/BuiltInRequestProcessorsSubstitutions.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/session/BuiltInRequestProcessorsSubstitutions.java @@ -36,7 +36,7 @@ public static final class BuiltInRequestProcessorsGraphMissingReactiveMissing { public static List> createDefaultProcessors( DefaultDriverContext context) { List> processors = new ArrayList<>(); - BuiltInRequestProcessors.addBasicProcessors(processors); + BuiltInRequestProcessors.addBasicProcessors(processors, context); return processors; } } @@ -48,7 +48,7 @@ public static final class BuiltInRequestProcessorsGraphMissingReactivePresent { public static List> createDefaultProcessors( DefaultDriverContext context) { List> processors = new ArrayList<>(); - BuiltInRequestProcessors.addBasicProcessors(processors); + BuiltInRequestProcessors.addBasicProcessors(processors, context); BuiltInRequestProcessors.addReactiveProcessors(processors); return processors; } @@ -61,7 +61,7 @@ public static final class BuiltInRequestProcessorsGraphPresentReactiveMissing { public static List> createDefaultProcessors( DefaultDriverContext context) { List> processors = new ArrayList<>(); - BuiltInRequestProcessors.addBasicProcessors(processors); + BuiltInRequestProcessors.addBasicProcessors(processors, context); BuiltInRequestProcessors.addGraphProcessors(context, processors); return processors; } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PreparedStatementIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PreparedStatementIT.java index 1b07edb53af..fe2df250a79 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PreparedStatementIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PreparedStatementIT.java @@ -39,14 +39,20 @@ import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.categories.ParallelizableTests; +import com.datastax.oss.driver.internal.core.context.DefaultDriverContext; +import com.datastax.oss.driver.internal.core.metadata.schema.events.TypeChangeEvent; import com.datastax.oss.driver.internal.core.metadata.token.DefaultTokenMap; import com.datastax.oss.driver.internal.core.metadata.token.TokenFactory; import com.datastax.oss.driver.internal.core.util.concurrent.CompletableFutures; +import com.datastax.oss.driver.shaded.guava.common.util.concurrent.Uninterruptibles; import com.datastax.oss.protocol.internal.util.Bytes; import com.google.common.collect.ImmutableList; import java.nio.ByteBuffer; import java.time.Duration; import java.util.concurrent.CompletionStage; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; +import java.util.function.Consumer; import junit.framework.TestCase; import org.junit.Before; import org.junit.Rule; @@ -444,6 +450,142 @@ public void should_fail_fast_if_id_changes_on_reprepare() { } } + private void invalidationResultSetTest(Consumer createFn) { + + try (CqlSession session = sessionWithCacheSizeMetric()) { + + assertThat(getPreparedCacheSize(session)).isEqualTo(0); + createFn.accept(session); + + session.prepare("select f from test_table_1 where e = ?"); + session.prepare("select h from test_table_2 where g = ?"); + assertThat(getPreparedCacheSize(session)).isEqualTo(2); + + CountDownLatch latch = new CountDownLatch(1); + DefaultDriverContext ctx = (DefaultDriverContext) session.getContext(); + ctx.getEventBus() + .register( + TypeChangeEvent.class, + (e) -> { + assertThat(e.oldType.getName().toString()).isEqualTo("test_type_2"); + latch.countDown(); + }); + + session.execute("ALTER TYPE test_type_2 add i blob"); + Uninterruptibles.awaitUninterruptibly(latch, 2, TimeUnit.SECONDS); + + assertThat(getPreparedCacheSize(session)).isEqualTo(1); + } + } + + private void invalidationVariableDefsTest(Consumer createFn, boolean isCollection) { + + try (CqlSession session = sessionWithCacheSizeMetric()) { + + assertThat(getPreparedCacheSize(session)).isEqualTo(0); + createFn.accept(session); + + String fStr = isCollection ? "f contains ?" : "f = ?"; + session.prepare(String.format("select e from test_table_1 where %s allow filtering", fStr)); + String hStr = isCollection ? "h contains ?" : "h = ?"; + session.prepare(String.format("select g from test_table_2 where %s allow filtering", hStr)); + assertThat(getPreparedCacheSize(session)).isEqualTo(2); + + CountDownLatch latch = new CountDownLatch(1); + DefaultDriverContext ctx = (DefaultDriverContext) session.getContext(); + ctx.getEventBus() + .register( + TypeChangeEvent.class, + (e) -> { + assertThat(e.oldType.getName().toString()).isEqualTo("test_type_2"); + latch.countDown(); + }); + + session.execute("ALTER TYPE test_type_2 add i blob"); + Uninterruptibles.awaitUninterruptibly(latch, 2, TimeUnit.SECONDS); + + assertThat(getPreparedCacheSize(session)).isEqualTo(1); + } + } + + Consumer setupCacheEntryTestBasic = + (session) -> { + session.execute("CREATE TYPE test_type_1 (a text, b int)"); + session.execute("CREATE TYPE test_type_2 (c int, d text)"); + session.execute("CREATE TABLE test_table_1 (e int primary key, f frozen)"); + session.execute("CREATE TABLE test_table_2 (g int primary key, h frozen)"); + }; + + @Test + public void should_invalidate_cache_entry_on_basic_udt_change_result_set() { + invalidationResultSetTest(setupCacheEntryTestBasic); + } + + @Test + public void should_invalidate_cache_entry_on_basic_udt_change_variable_defs() { + invalidationVariableDefsTest(setupCacheEntryTestBasic, false); + } + + Consumer setupCacheEntryTestCollection = + (session) -> { + session.execute("CREATE TYPE test_type_1 (a text, b int)"); + session.execute("CREATE TYPE test_type_2 (c int, d text)"); + session.execute( + "CREATE TABLE test_table_1 (e int primary key, f list>)"); + session.execute( + "CREATE TABLE test_table_2 (g int primary key, h list>)"); + }; + + @Test + public void should_invalidate_cache_entry_on_collection_udt_change_result_set() { + invalidationResultSetTest(setupCacheEntryTestCollection); + } + + @Test + public void should_invalidate_cache_entry_on_collection_udt_change_variable_defs() { + invalidationVariableDefsTest(setupCacheEntryTestCollection, true); + } + + Consumer setupCacheEntryTestTuple = + (session) -> { + session.execute("CREATE TYPE test_type_1 (a text, b int)"); + session.execute("CREATE TYPE test_type_2 (c int, d text)"); + session.execute( + "CREATE TABLE test_table_1 (e int primary key, f tuple)"); + session.execute( + "CREATE TABLE test_table_2 (g int primary key, h tuple)"); + }; + + @Test + public void should_invalidate_cache_entry_on_tuple_udt_change_result_set() { + invalidationResultSetTest(setupCacheEntryTestTuple); + } + + @Test + public void should_invalidate_cache_entry_on_tuple_udt_change_variable_defs() { + invalidationVariableDefsTest(setupCacheEntryTestTuple, false); + } + + Consumer setupCacheEntryTestNested = + (session) -> { + session.execute("CREATE TYPE test_type_1 (a text, b int)"); + session.execute("CREATE TYPE test_type_2 (c int, d text)"); + session.execute("CREATE TYPE test_type_3 (e frozen, f int)"); + session.execute("CREATE TYPE test_type_4 (g int, h frozen)"); + session.execute("CREATE TABLE test_table_1 (e int primary key, f frozen)"); + session.execute("CREATE TABLE test_table_2 (g int primary key, h frozen)"); + }; + + @Test + public void should_invalidate_cache_entry_on_nested_udt_change_result_set() { + invalidationResultSetTest(setupCacheEntryTestNested); + } + + @Test + public void should_invalidate_cache_entry_on_nested_udt_change_variable_defs() { + invalidationVariableDefsTest(setupCacheEntryTestNested, false); + } + @Test public void should_infer_routing_information_when_partition_key_is_bound() { should_infer_routing_information_when_partition_key_is_bound( diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/example/guava/internal/GuavaDriverContext.java b/integration-tests/src/test/java/com/datastax/oss/driver/example/guava/internal/GuavaDriverContext.java index 5a8b44be739..7eaa90eefed 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/example/guava/internal/GuavaDriverContext.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/example/guava/internal/GuavaDriverContext.java @@ -26,6 +26,7 @@ import com.datastax.oss.driver.internal.core.cql.CqlRequestAsyncProcessor; import com.datastax.oss.driver.internal.core.cql.CqlRequestSyncProcessor; import com.datastax.oss.driver.internal.core.session.RequestProcessorRegistry; +import java.util.Optional; /** * A Custom {@link DefaultDriverContext} that overrides {@link #getRequestProcessorRegistry()} to @@ -44,7 +45,8 @@ public RequestProcessorRegistry buildRequestProcessorRegistry() { // use GuavaRequestAsyncProcessor to return ListenableFutures in async methods. CqlRequestAsyncProcessor cqlRequestAsyncProcessor = new CqlRequestAsyncProcessor(); - CqlPrepareAsyncProcessor cqlPrepareAsyncProcessor = new CqlPrepareAsyncProcessor(); + CqlPrepareAsyncProcessor cqlPrepareAsyncProcessor = + new CqlPrepareAsyncProcessor(Optional.of(this)); CqlRequestSyncProcessor cqlRequestSyncProcessor = new CqlRequestSyncProcessor(cqlRequestAsyncProcessor); From 985d498d51d10adcb741cb692a156a0e6457bb54 Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Mon, 5 Jun 2023 16:00:15 -0500 Subject: [PATCH 826/979] Docs + changelog updates, 4.16.0 version --- README.md | 2 +- changelog/README.md | 7 +++++++ manual/core/bom/README.md | 4 ++-- manual/core/integration/README.md | 1 + 4 files changed, 11 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index c9003f606f1..739baedced3 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@ *If you're reading this on github.com, please note that this is the readme for the development version and that some features described here might not yet have been released. You can find the documentation for latest version through [DataStax Docs] or via the release tags, e.g. -[4.15.0](https://github.com/datastax/java-driver/tree/4.15.0).* +[4.16.0](https://github.com/datastax/java-driver/tree/4.16.0).* A modern, feature-rich and highly tunable Java client library for [Apache Cassandra®] \(2.1+) and [DataStax Enterprise] \(4.7+), and [DataStax Astra], using exclusively Cassandra's binary protocol diff --git a/changelog/README.md b/changelog/README.md index 17025eb417b..30bb1194674 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -2,6 +2,13 @@ +### 4.16.0 + +- [improvement] JAVA-3058: Clear prepared statement cache on UDT type change event +- [improvement] JAVA-3060: Add vector type, codec + support for parsing CQL type +- [improvement] DOC-2813: Add error handling guidance linking to a helpful blog post +- [improvement] JAVA-3045: Fix GraalVM native image support for GraalVM 22.2 + ### 4.15.0 - [improvement] JAVA-3041: Update Guava session sample code to use ProgrammaticArguments diff --git a/manual/core/bom/README.md b/manual/core/bom/README.md index dc8f12eb599..935489beb7e 100644 --- a/manual/core/bom/README.md +++ b/manual/core/bom/README.md @@ -13,7 +13,7 @@ To import the driver's BOM, add the following section in your application's own com.datastax.oss java-driver-bom - 4.15.0 + 4.16.0 pom import @@ -65,7 +65,7 @@ good idea to extract a property to keep it in sync with the BOM: ```xml - 4.15.0 + 4.16.0 diff --git a/manual/core/integration/README.md b/manual/core/integration/README.md index 3f42b1dabfe..02d0d97027b 100644 --- a/manual/core/integration/README.md +++ b/manual/core/integration/README.md @@ -562,6 +562,7 @@ Here are the recommended TinkerPop versions for each driver version: + From 1d87ffe4abde2b3e87f7a92ad4a5b06f226af1cc Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Mon, 5 Jun 2023 16:08:54 -0500 Subject: [PATCH 827/979] [maven-release-plugin] prepare release 4.16.0 --- bom/pom.xml | 18 +++++++++--------- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- metrics/micrometer/pom.xml | 2 +- metrics/microprofile/pom.xml | 2 +- osgi-tests/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 14 files changed, 23 insertions(+), 23 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index 837fcde0f89..c4362554165 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.15.1-SNAPSHOT + 4.16.0 java-driver-bom pom @@ -31,42 +31,42 @@ com.datastax.oss java-driver-core - 4.15.1-SNAPSHOT + 4.16.0 com.datastax.oss java-driver-core-shaded - 4.15.1-SNAPSHOT + 4.16.0 com.datastax.oss java-driver-mapper-processor - 4.15.1-SNAPSHOT + 4.16.0 com.datastax.oss java-driver-mapper-runtime - 4.15.1-SNAPSHOT + 4.16.0 com.datastax.oss java-driver-query-builder - 4.15.1-SNAPSHOT + 4.16.0 com.datastax.oss java-driver-test-infra - 4.15.1-SNAPSHOT + 4.16.0 com.datastax.oss java-driver-metrics-micrometer - 4.15.1-SNAPSHOT + 4.16.0 com.datastax.oss java-driver-metrics-microprofile - 4.15.1-SNAPSHOT + 4.16.0 com.datastax.oss diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 0cf5700b493..ebe689d0781 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.15.1-SNAPSHOT + 4.16.0 java-driver-core-shaded DataStax Java driver for Apache Cassandra(R) - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index 288b0df925a..7f0fc893a53 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.15.1-SNAPSHOT + 4.16.0 java-driver-core bundle diff --git a/distribution/pom.xml b/distribution/pom.xml index 7a077be1553..389301e9f28 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.15.1-SNAPSHOT + 4.16.0 java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index 21417669a5f..6e70b274528 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.15.1-SNAPSHOT + 4.16.0 java-driver-examples DataStax Java driver for Apache Cassandra(R) - examples. diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 17e4ff393ef..a62d52cbd78 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.15.1-SNAPSHOT + 4.16.0 java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 81c1a78c95a..7c838bf0c06 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.15.1-SNAPSHOT + 4.16.0 java-driver-mapper-processor DataStax Java driver for Apache Cassandra(R) - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index 70121a8b9ad..42b64edbd63 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.15.1-SNAPSHOT + 4.16.0 java-driver-mapper-runtime bundle diff --git a/metrics/micrometer/pom.xml b/metrics/micrometer/pom.xml index f1536f493d1..2b6b4eab14c 100644 --- a/metrics/micrometer/pom.xml +++ b/metrics/micrometer/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.15.1-SNAPSHOT + 4.16.0 ../../ java-driver-metrics-micrometer diff --git a/metrics/microprofile/pom.xml b/metrics/microprofile/pom.xml index b16f8b17c41..5bab7401cc3 100644 --- a/metrics/microprofile/pom.xml +++ b/metrics/microprofile/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.15.1-SNAPSHOT + 4.16.0 ../../ java-driver-metrics-microprofile diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml index 1e78ce04975..3e9e9134d35 100644 --- a/osgi-tests/pom.xml +++ b/osgi-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.15.1-SNAPSHOT + 4.16.0 java-driver-osgi-tests jar diff --git a/pom.xml b/pom.xml index 2ac1715fee9..b8c7e75369c 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ 4.0.0 com.datastax.oss java-driver-parent - 4.15.1-SNAPSHOT + 4.16.0 pom DataStax Java driver for Apache Cassandra(R) A driver for Apache Cassandra(R) 2.1+ that works exclusively with the Cassandra Query Language version 3 (CQL3) and Cassandra's native protocol versions 3 and above. @@ -955,7 +955,7 @@ height="0" width="0" style="display:none;visibility:hidden"> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - HEAD + 4.16.0 diff --git a/query-builder/pom.xml b/query-builder/pom.xml index 96559c8588e..d7967d3dbac 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.15.1-SNAPSHOT + 4.16.0 java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index 873de05d8c4..5a0113cd9af 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.15.1-SNAPSHOT + 4.16.0 java-driver-test-infra bundle From 5d3968b40cea5874fff500849c44b018f36e23d6 Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Mon, 5 Jun 2023 16:08:57 -0500 Subject: [PATCH 828/979] [maven-release-plugin] prepare for next development iteration --- bom/pom.xml | 18 +++++++++--------- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- metrics/micrometer/pom.xml | 2 +- metrics/microprofile/pom.xml | 2 +- osgi-tests/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 14 files changed, 23 insertions(+), 23 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index c4362554165..79ea3485cda 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.16.0 + 4.16.1-SNAPSHOT java-driver-bom pom @@ -31,42 +31,42 @@ com.datastax.oss java-driver-core - 4.16.0 + 4.16.1-SNAPSHOT com.datastax.oss java-driver-core-shaded - 4.16.0 + 4.16.1-SNAPSHOT com.datastax.oss java-driver-mapper-processor - 4.16.0 + 4.16.1-SNAPSHOT com.datastax.oss java-driver-mapper-runtime - 4.16.0 + 4.16.1-SNAPSHOT com.datastax.oss java-driver-query-builder - 4.16.0 + 4.16.1-SNAPSHOT com.datastax.oss java-driver-test-infra - 4.16.0 + 4.16.1-SNAPSHOT com.datastax.oss java-driver-metrics-micrometer - 4.16.0 + 4.16.1-SNAPSHOT com.datastax.oss java-driver-metrics-microprofile - 4.16.0 + 4.16.1-SNAPSHOT com.datastax.oss diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index ebe689d0781..fc89c2e7338 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.16.0 + 4.16.1-SNAPSHOT java-driver-core-shaded DataStax Java driver for Apache Cassandra(R) - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index 7f0fc893a53..d78ff6a3f02 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.16.0 + 4.16.1-SNAPSHOT java-driver-core bundle diff --git a/distribution/pom.xml b/distribution/pom.xml index 389301e9f28..bd7d0c811ef 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.16.0 + 4.16.1-SNAPSHOT java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index 6e70b274528..85db139e7f4 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.16.0 + 4.16.1-SNAPSHOT java-driver-examples DataStax Java driver for Apache Cassandra(R) - examples. diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index a62d52cbd78..112ac7e73bc 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.16.0 + 4.16.1-SNAPSHOT java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 7c838bf0c06..60d66d06584 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.16.0 + 4.16.1-SNAPSHOT java-driver-mapper-processor DataStax Java driver for Apache Cassandra(R) - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index 42b64edbd63..2e5f74f83b7 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.16.0 + 4.16.1-SNAPSHOT java-driver-mapper-runtime bundle diff --git a/metrics/micrometer/pom.xml b/metrics/micrometer/pom.xml index 2b6b4eab14c..ef4662d38ca 100644 --- a/metrics/micrometer/pom.xml +++ b/metrics/micrometer/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.16.0 + 4.16.1-SNAPSHOT ../../ java-driver-metrics-micrometer diff --git a/metrics/microprofile/pom.xml b/metrics/microprofile/pom.xml index 5bab7401cc3..c038a1567c5 100644 --- a/metrics/microprofile/pom.xml +++ b/metrics/microprofile/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.16.0 + 4.16.1-SNAPSHOT ../../ java-driver-metrics-microprofile diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml index 3e9e9134d35..c1aebc6718b 100644 --- a/osgi-tests/pom.xml +++ b/osgi-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.16.0 + 4.16.1-SNAPSHOT java-driver-osgi-tests jar diff --git a/pom.xml b/pom.xml index b8c7e75369c..dbfc01b886e 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ 4.0.0 com.datastax.oss java-driver-parent - 4.16.0 + 4.16.1-SNAPSHOT pom DataStax Java driver for Apache Cassandra(R) A driver for Apache Cassandra(R) 2.1+ that works exclusively with the Cassandra Query Language version 3 (CQL3) and Cassandra's native protocol versions 3 and above. @@ -955,7 +955,7 @@ height="0" width="0" style="display:none;visibility:hidden"> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - 4.16.0 + HEAD diff --git a/query-builder/pom.xml b/query-builder/pom.xml index d7967d3dbac..41395bda871 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.16.0 + 4.16.1-SNAPSHOT java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index 5a0113cd9af..4c67bc35a1b 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.16.0 + 4.16.1-SNAPSHOT java-driver-test-infra bundle From a3a8a4fbefc842f785bfc2942e54229ac942f834 Mon Sep 17 00:00:00 2001 From: Chris Lin <99268912+chrislin22@users.noreply.github.com> Date: Thu, 22 Jun 2023 03:46:51 -0400 Subject: [PATCH 829/979] added Snyk monitor and snyk clean up after PR closed/merged (#1646) * added snyk monitor and clean up * used jdk 11 and add options * Update snyk-cli-scan.yml added explicit mvn package prepare for snyk * Update snyk-cli-scan.yml * use jdk 8 * added .snyk.ignore.example and .snyk * triggered by branch 4.x * address a few high CVEs identified by snyk scan * ignore graal-sdk CVE for now until we can move off java8 * clean up snyk yaml files * JAVA-3050: Upgrade Netty 4.1.94 to address recent CVEs osgi-tests/BundleOptions.java - since netty-4.1.78, netty-handler additionally depends on netty-transport-native-unix-common so we need to pull that in when configuring pax exam --------- Co-authored-by: weideng1 Co-authored-by: Henry Hughes --- .github/workflows/snyk-cli-scan.yml | 47 +++++++++++++++++++ .github/workflows/snyk-pr-cleanup.yml | 16 +++++++ .snyk | 19 ++++++++ .snyk.ignore.example | 9 ++++ .../internal/osgi/support/BundleOptions.java | 1 + pom.xml | 10 ++-- 6 files changed, 97 insertions(+), 5 deletions(-) create mode 100644 .github/workflows/snyk-cli-scan.yml create mode 100644 .github/workflows/snyk-pr-cleanup.yml create mode 100644 .snyk create mode 100644 .snyk.ignore.example diff --git a/.github/workflows/snyk-cli-scan.yml b/.github/workflows/snyk-cli-scan.yml new file mode 100644 index 00000000000..50d303a128b --- /dev/null +++ b/.github/workflows/snyk-cli-scan.yml @@ -0,0 +1,47 @@ +name: 🔬 Snyk cli SCA + +on: + push: + branches: [ 4.x ] + pull_request: + branches: [ 4.x ] + workflow_dispatch: + +env: + SNYK_SEVERITY_THRESHOLD_LEVEL: high + +jobs: + snyk-cli-scan: + runs-on: ubuntu-latest + steps: + - name: Git checkout + uses: actions/checkout@v3 + + - name: prepare for snyk scan + uses: datastax/shared-github-actions/actions/snyk-prepare@main + + - name: Set up JDK 8 + uses: actions/setup-java@v3 + with: + distribution: 'temurin' + java-version: '8' + cache: maven + + - name: run maven install prepare for snyk + run: | + mvn -B -V install -DskipTests -Dmaven.javadoc.skip=true + + - name: snyk scan java + uses: datastax/shared-github-actions/actions/snyk-scan-java@main + with: + directories: . + SNYK_TOKEN: ${{ secrets.SNYK_TOKEN }} + SNYK_ORG_ID: ${{ secrets.SNYK_ORG_ID }} + extra-snyk-options: "-DskipTests -Dmaven.javadoc.skip=true" + + - name: Snyk scan result + uses: datastax/shared-github-actions/actions/snyk-process-scan-results@main + with: + gh_repo_token: ${{ secrets.GITHUB_TOKEN }} + SNYK_TOKEN: ${{ secrets.SNYK_TOKEN }} + SNYK_ORG_ID: ${{ secrets.SNYK_ORG_ID }} diff --git a/.github/workflows/snyk-pr-cleanup.yml b/.github/workflows/snyk-pr-cleanup.yml new file mode 100644 index 00000000000..7cf018a59fc --- /dev/null +++ b/.github/workflows/snyk-pr-cleanup.yml @@ -0,0 +1,16 @@ +name: 🗑️ Snyk PR cleanup - merged/closed + +on: + pull_request: + types: + - closed + branches: + - snyk-monitor + workflow_dispatch: + +jobs: + snyk_project_cleanup_when_pr_closed: + uses: datastax/shared-github-actions/.github/workflows/snyk-pr-cleanup.yml@main + secrets: + snyk_token: ${{ secrets.SNYK_TOKEN }} + snyk_org_id: ${{ secrets.SNYK_ORG_ID }} diff --git a/.snyk b/.snyk new file mode 100644 index 00000000000..3c6284addca --- /dev/null +++ b/.snyk @@ -0,0 +1,19 @@ +# Snyk (https://snyk.io) policy file, patches or ignores known vulnerabilities. +version: v1.22.2 +# ignores vulnerabilities until expiry date; change duration by modifying expiry date +ignore: + SNYK-JAVA-ORGGRAALVMSDK-2767964: + - '*': + reason: cannot upgrade to graal-sdk 22.1.0+ until we move off Java8, which is slated for later this year + expires: 2024-01-10T00:00:00.000Z + created: 2023-06-21T00:00:00.000Z + SNYK-JAVA-ORGGRAALVMSDK-2769618: + - '*': + reason: cannot upgrade to graal-sdk 22.1.0+ until we move off Java8, which is slated for later this year + expires: 2024-01-10T00:00:00.000Z + created: 2023-06-21T00:00:00.000Z + SNYK-JAVA-ORGGRAALVMSDK-5457933: + - '*': + reason: cannot upgrade to graal-sdk 22.1.0+ until we move off Java8, which is slated for later this year + expires: 2024-01-10T00:00:00.000Z + created: 2023-06-21T00:00:00.000Z diff --git a/.snyk.ignore.example b/.snyk.ignore.example new file mode 100644 index 00000000000..a4690b27223 --- /dev/null +++ b/.snyk.ignore.example @@ -0,0 +1,9 @@ +# Snyk (https://snyk.io) policy file, patches or ignores known vulnerabilities. +version: v1.22.2 +# ignores vulnerabilities until expiry date; change duration by modifying expiry date +ignore: + SNYK-PYTHON-URLLIB3-1533435: + - '*': + reason: state your ignore reason here + expires: 2030-01-01T00:00:00.000Z + created: 2022-03-21T00:00:00.000Z \ No newline at end of file diff --git a/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/BundleOptions.java b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/BundleOptions.java index c4416768b4a..b7dac833f3d 100644 --- a/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/BundleOptions.java +++ b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/BundleOptions.java @@ -91,6 +91,7 @@ public static CompositeOption nettyBundles() { mavenBundle("io.netty", "netty-codec").versionAsInProject(), mavenBundle("io.netty", "netty-common").versionAsInProject(), mavenBundle("io.netty", "netty-transport").versionAsInProject(), + mavenBundle("io.netty", "netty-transport-native-unix-common").versionAsInProject(), mavenBundle("io.netty", "netty-resolver").versionAsInProject()); } diff --git a/pom.xml b/pom.xml index dbfc01b886e..b56f22d6454 100644 --- a/pom.xml +++ b/pom.xml @@ -47,21 +47,21 @@ 1.4.1 2.1.12 4.1.18 - 4.1.77.Final + 4.1.94.Final 1.2.1 - 3.5.3 + 3.5.6 1.7.26 1.0.3 - 20210307 + 20230227 2.13.2 2.13.2.2 1.9.12 - 1.1.7.3 + 1.1.10.1 1.7.1 3.19.0 @@ -73,7 +73,7 @@ 4.13.4 0.11.0 1.1.4 - 2.28 + 2.31 2.5.0 2.1.1 1.1.4 From fba2cf6e91c7c7a666c217df070493b83d201990 Mon Sep 17 00:00:00 2001 From: Henry Hughes Date: Thu, 22 Jun 2023 11:43:16 -0700 Subject: [PATCH 830/979] JAVA-3071: OsgiGraphIT.test_graph fails with dse-6.8.30 Test fails because tinkerpop classes cannot be loaded which gates enabling GraphRequest* processors Add missing dependencies required by TinkerIoRegistryV3d0 to the osgi testing bundle - com.sun.mail:mailapi:1.6.4 - org.apache.commons:commons-text:1.8 - org.apache.commons:commons-configuration2:2.7 --- .../oss/driver/internal/osgi/support/BundleOptions.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/BundleOptions.java b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/BundleOptions.java index b7dac833f3d..6e7d82787f4 100644 --- a/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/BundleOptions.java +++ b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/BundleOptions.java @@ -152,7 +152,9 @@ public static CompositeOption tinkerpopBundles() { .overwriteManifest(WrappedUrlProvisionOption.OverwriteMode.FULL), // Note: the versions below are hard-coded because they shouldn't change very often, // but if the tests fail because of them, we should consider parameterizing them - mavenBundle("commons-configuration", "commons-configuration", "1.10"), + mavenBundle("com.sun.mail", "mailapi", "1.6.4"), + mavenBundle("org.apache.commons", "commons-text", "1.8"), + mavenBundle("org.apache.commons", "commons-configuration2", "2.7"), CoreOptions.wrappedBundle(mavenBundle("commons-logging", "commons-logging", "1.1.1")) .exports("org.apache.commons.logging.*") .bundleVersion("1.1.1") From 9c44c72450b6d62a8363426c7a13d61b83746a0d Mon Sep 17 00:00:00 2001 From: Henry Hughes Date: Thu, 15 Jun 2023 19:01:44 -0700 Subject: [PATCH 831/979] JAVA-3065: PreparedStatementIT#should_fail_fast_if_id_changes_on_reprepare fails with recent C*/DSE versions PreparedStatementIT.java - add multiple backend requirements to should_fail_fast_if_id_changes_on_reprepare covering versions impacted by CASSANDRA-15252 - add handle_id_changes_on_reprepare to test CASSANDRA-15252 for versions which include the fix BackendRequirement.java - new repeatable annotation for specifying multiple ranges of backend requirements for tests VersionRequirementTest.java - tests for multiple ranges of backend requirements refactor BaseCcmRule.java annotation logic and move to VersionRequirements.java remove duplicated annotation code from CcmPaxExam.java and EmbeddedAdsRule --- .../driver/api/core/auth/EmbeddedAdsRule.java | 59 ++---- .../driver/core/cql/PreparedStatementIT.java | 47 ++++- .../internal/osgi/support/CcmPaxExam.java | 84 ++------ .../driver/api/testinfra/ccm/BaseCcmRule.java | 87 ++------- .../requirement/BackendRequirement.java | 36 ++++ .../requirement/BackendRequirements.java | 25 +++ .../testinfra/requirement/BackendType.java | 32 +++ .../requirement/VersionRequirement.java | 166 ++++++++++++++++ .../requirement/VersionRequirementTest.java | 184 ++++++++++++++++++ 9 files changed, 534 insertions(+), 186 deletions(-) create mode 100644 test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/requirement/BackendRequirement.java create mode 100644 test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/requirement/BackendRequirements.java create mode 100644 test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/requirement/BackendType.java create mode 100644 test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/requirement/VersionRequirement.java create mode 100644 test-infra/src/test/java/com/datastax/oss/driver/api/testinfra/requirement/VersionRequirementTest.java diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/EmbeddedAdsRule.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/EmbeddedAdsRule.java index cbec842a2cf..0903eb9b298 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/EmbeddedAdsRule.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/EmbeddedAdsRule.java @@ -20,12 +20,14 @@ import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; -import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CcmBridge; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; +import com.datastax.oss.driver.api.testinfra.requirement.VersionRequirement; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import java.io.File; +import java.util.Collection; import java.util.HashMap; import java.util.Map; import org.junit.AssumptionViolatedException; @@ -151,50 +153,25 @@ protected void before() { } } - private Statement buildErrorStatement( - Version requirement, Version actual, String description, boolean lessThan) { - return new Statement() { - - @Override - public void evaluate() { - throw new AssumptionViolatedException( - String.format( - "Test requires %s %s %s but %s is configured. Description: %s", - lessThan ? "less than" : "at least", "DSE", requirement, actual, description)); - } - }; - } - @Override public Statement apply(Statement base, Description description) { - DseRequirement dseRequirement = description.getAnnotation(DseRequirement.class); - if (dseRequirement != null) { - if (!CcmBridge.DSE_ENABLEMENT) { - return new Statement() { - @Override - public void evaluate() { - throw new AssumptionViolatedException("Test Requires DSE but C* is configured."); - } - }; - } else { - Version dseVersion = CcmBridge.VERSION; - if (!dseRequirement.min().isEmpty()) { - Version minVersion = Version.parse(dseRequirement.min()); - if (minVersion.compareTo(dseVersion) > 0) { - return buildErrorStatement(dseVersion, dseVersion, dseRequirement.description(), false); - } + BackendType backend = CcmBridge.DSE_ENABLEMENT ? BackendType.DSE : BackendType.CASSANDRA; + Version version = CcmBridge.VERSION; + + Collection requirements = VersionRequirement.fromAnnotations(description); + + if (VersionRequirement.meetsAny(requirements, backend, version)) { + return super.apply(base, description); + } else { + // requirements not met, throw reasoning assumption to skip test + return new Statement() { + @Override + public void evaluate() { + throw new AssumptionViolatedException( + VersionRequirement.buildReasonString(requirements, backend, version)); } - - if (!dseRequirement.max().isEmpty()) { - Version maxVersion = Version.parse(dseRequirement.max()); - - if (maxVersion.compareTo(dseVersion) <= 0) { - return buildErrorStatement(dseVersion, dseVersion, dseRequirement.description(), true); - } - } - } + }; } - return super.apply(base, description); } @Override diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PreparedStatementIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PreparedStatementIT.java index fe2df250a79..c3494a6ee96 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PreparedStatementIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PreparedStatementIT.java @@ -16,7 +16,7 @@ package com.datastax.oss.driver.core.cql; import static org.assertj.core.api.Assertions.assertThat; -import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.assertj.core.api.Assertions.assertThatCode; import static org.assertj.core.api.Assertions.catchThrowable; import com.codahale.metrics.Gauge; @@ -36,6 +36,8 @@ import com.datastax.oss.driver.api.core.type.DataTypes; import com.datastax.oss.driver.api.testinfra.CassandraRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.categories.ParallelizableTests; @@ -54,6 +56,7 @@ import java.util.concurrent.TimeUnit; import java.util.function.Consumer; import junit.framework.TestCase; +import org.assertj.core.api.AbstractThrowableAssert; import org.junit.Before; import org.junit.Rule; import org.junit.Test; @@ -425,13 +428,11 @@ public void should_create_separate_instances_for_different_statement_parameters( } /** - * This test relies on CASSANDRA-15252 to reproduce the error condition. If the bug gets fixed in - * Cassandra, we'll need to add a version restriction. + * This method reproduces CASSANDRA-15252 which is fixed in 3.0.26/3.11.12/4.0.2. * * @see CASSANDRA-15252 */ - @Test - public void should_fail_fast_if_id_changes_on_reprepare() { + private AbstractThrowableAssert assertableReprepareAfterIdChange() { try (CqlSession session = SessionUtils.newSession(ccmRule)) { PreparedStatement preparedStatement = session.prepare( @@ -444,12 +445,42 @@ public void should_fail_fast_if_id_changes_on_reprepare() { executeDdl("DROP TABLE prepared_statement_test"); executeDdl("CREATE TABLE prepared_statement_test (a int PRIMARY KEY, b int, c int)"); - assertThatThrownBy(() -> session.execute(preparedStatement.bind(1))) - .isInstanceOf(IllegalStateException.class) - .hasMessageContaining("ID mismatch while trying to reprepare"); + return assertThatCode(() -> session.execute(preparedStatement.bind(1))); } } + // Add version bounds to the DSE requirement if there is a version containing fix for + // CASSANDRA-15252 + @BackendRequirement( + type = BackendType.DSE, + description = "No DSE version contains fix for CASSANDRA-15252") + @BackendRequirement(type = BackendType.CASSANDRA, minInclusive = "3.0.0", maxExclusive = "3.0.26") + @BackendRequirement( + type = BackendType.CASSANDRA, + minInclusive = "3.11.0", + maxExclusive = "3.11.12") + @BackendRequirement(type = BackendType.CASSANDRA, minInclusive = "4.0.0", maxExclusive = "4.0.2") + @Test + public void should_fail_fast_if_id_changes_on_reprepare() { + assertableReprepareAfterIdChange() + .isInstanceOf(IllegalStateException.class) + .hasMessageContaining("ID mismatch while trying to reprepare"); + } + + @BackendRequirement( + type = BackendType.CASSANDRA, + minInclusive = "3.0.26", + maxExclusive = "3.11.0") + @BackendRequirement( + type = BackendType.CASSANDRA, + minInclusive = "3.11.12", + maxExclusive = "4.0.0") + @BackendRequirement(type = BackendType.CASSANDRA, minInclusive = "4.0.2") + @Test + public void handle_id_changes_on_reprepare() { + assertableReprepareAfterIdChange().doesNotThrowAnyException(); + } + private void invalidationResultSetTest(Consumer createFn) { try (CqlSession session = sessionWithCacheSizeMetric()) { diff --git a/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/CcmPaxExam.java b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/CcmPaxExam.java index 8697a0d790d..4a1700639b4 100644 --- a/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/CcmPaxExam.java +++ b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/CcmPaxExam.java @@ -18,10 +18,9 @@ import static com.datastax.oss.driver.internal.osgi.support.CcmStagedReactor.CCM_BRIDGE; import com.datastax.oss.driver.api.core.Version; -import com.datastax.oss.driver.api.testinfra.CassandraRequirement; -import com.datastax.oss.driver.api.testinfra.DseRequirement; -import java.util.Objects; -import java.util.Optional; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; +import com.datastax.oss.driver.api.testinfra.requirement.VersionRequirement; +import java.util.Collection; import org.junit.AssumptionViolatedException; import org.junit.runner.Description; import org.junit.runner.notification.Failure; @@ -38,69 +37,20 @@ public CcmPaxExam(Class klass) throws InitializationError { @Override public void run(RunNotifier notifier) { Description description = getDescription(); - CassandraRequirement cassandraRequirement = - description.getAnnotation(CassandraRequirement.class); - if (cassandraRequirement != null) { - if (!cassandraRequirement.min().isEmpty()) { - Version minVersion = Objects.requireNonNull(Version.parse(cassandraRequirement.min())); - if (minVersion.compareTo(CCM_BRIDGE.getCassandraVersion()) > 0) { - fireRequirementsNotMet(notifier, description, cassandraRequirement.min(), false, false); - return; - } - } - if (!cassandraRequirement.max().isEmpty()) { - Version maxVersion = Objects.requireNonNull(Version.parse(cassandraRequirement.max())); - if (maxVersion.compareTo(CCM_BRIDGE.getCassandraVersion()) <= 0) { - fireRequirementsNotMet(notifier, description, cassandraRequirement.max(), true, false); - return; - } - } - } - DseRequirement dseRequirement = description.getAnnotation(DseRequirement.class); - if (dseRequirement != null) { - Optional dseVersionOption = CCM_BRIDGE.getDseVersion(); - if (!dseVersionOption.isPresent()) { - notifier.fireTestAssumptionFailed( - new Failure( - description, - new AssumptionViolatedException("Test Requires DSE but C* is configured."))); - return; - } else { - Version dseVersion = dseVersionOption.get(); - if (!dseRequirement.min().isEmpty()) { - Version minVersion = Objects.requireNonNull(Version.parse(dseRequirement.min())); - if (minVersion.compareTo(dseVersion) > 0) { - fireRequirementsNotMet(notifier, description, dseRequirement.min(), false, true); - return; - } - } - if (!dseRequirement.max().isEmpty()) { - Version maxVersion = Objects.requireNonNull(Version.parse(dseRequirement.max())); - if (maxVersion.compareTo(dseVersion) <= 0) { - fireRequirementsNotMet(notifier, description, dseRequirement.min(), true, true); - return; - } - } - } - } - super.run(notifier); - } + BackendType backend = + CCM_BRIDGE.getDseVersion().isPresent() ? BackendType.DSE : BackendType.CASSANDRA; + Version version = CCM_BRIDGE.getDseVersion().orElseGet(CCM_BRIDGE::getCassandraVersion); - private void fireRequirementsNotMet( - RunNotifier notifier, - Description description, - String requirement, - boolean lessThan, - boolean dse) { - AssumptionViolatedException e = - new AssumptionViolatedException( - String.format( - "Test requires %s %s %s but %s is configured. Description: %s", - lessThan ? "less than" : "at least", - dse ? "DSE" : "C*", - requirement, - dse ? CCM_BRIDGE.getDseVersion().orElse(null) : CCM_BRIDGE.getCassandraVersion(), - description)); - notifier.fireTestAssumptionFailed(new Failure(description, e)); + Collection requirements = + VersionRequirement.fromAnnotations(getDescription()); + if (VersionRequirement.meetsAny(requirements, backend, version)) { + super.run(notifier); + } else { + // requirements not met, throw reasoning assumption to skip test + AssumptionViolatedException e = + new AssumptionViolatedException( + VersionRequirement.buildReasonString(requirements, backend, version)); + notifier.fireTestAssumptionFailed(new Failure(description, e)); + } } } diff --git a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/BaseCcmRule.java b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/BaseCcmRule.java index c902434aac2..d4830dd249e 100644 --- a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/BaseCcmRule.java +++ b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/BaseCcmRule.java @@ -18,9 +18,10 @@ import com.datastax.oss.driver.api.core.DefaultProtocolVersion; import com.datastax.oss.driver.api.core.ProtocolVersion; import com.datastax.oss.driver.api.core.Version; -import com.datastax.oss.driver.api.testinfra.CassandraRequirement; import com.datastax.oss.driver.api.testinfra.CassandraResourceRule; -import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; +import com.datastax.oss.driver.api.testinfra.requirement.VersionRequirement; +import java.util.Collection; import java.util.Optional; import org.junit.AssumptionViolatedException; import org.junit.runner.Description; @@ -55,80 +56,26 @@ protected void after() { ccmBridge.remove(); } - private Statement buildErrorStatement( - Version requirement, String description, boolean lessThan, boolean dse) { - return new Statement() { - - @Override - public void evaluate() { - throw new AssumptionViolatedException( - String.format( - "Test requires %s %s %s but %s is configured. Description: %s", - lessThan ? "less than" : "at least", - dse ? "DSE" : "C*", - requirement, - dse ? ccmBridge.getDseVersion().orElse(null) : ccmBridge.getCassandraVersion(), - description)); - } - }; - } - @Override public Statement apply(Statement base, Description description) { - // If test is annotated with CassandraRequirement or DseRequirement, ensure configured CCM - // cluster meets those requirements. - CassandraRequirement cassandraRequirement = - description.getAnnotation(CassandraRequirement.class); + BackendType backend = + ccmBridge.getDseVersion().isPresent() ? BackendType.DSE : BackendType.CASSANDRA; + Version version = ccmBridge.getDseVersion().orElseGet(ccmBridge::getCassandraVersion); - if (cassandraRequirement != null) { - // if the configured cassandra cassandraRequirement exceeds the one being used skip this test. - if (!cassandraRequirement.min().isEmpty()) { - Version minVersion = Version.parse(cassandraRequirement.min()); - if (minVersion.compareTo(ccmBridge.getCassandraVersion()) > 0) { - return buildErrorStatement(minVersion, cassandraRequirement.description(), false, false); - } - } + Collection requirements = VersionRequirement.fromAnnotations(description); - if (!cassandraRequirement.max().isEmpty()) { - // if the test version exceeds the maximum configured one, fail out. - Version maxVersion = Version.parse(cassandraRequirement.max()); - - if (maxVersion.compareTo(ccmBridge.getCassandraVersion()) <= 0) { - return buildErrorStatement(maxVersion, cassandraRequirement.description(), true, false); - } - } - } - - DseRequirement dseRequirement = description.getAnnotation(DseRequirement.class); - if (dseRequirement != null) { - Optional dseVersionOption = ccmBridge.getDseVersion(); - if (!dseVersionOption.isPresent()) { - return new Statement() { - - @Override - public void evaluate() { - throw new AssumptionViolatedException("Test Requires DSE but C* is configured."); - } - }; - } else { - Version dseVersion = dseVersionOption.get(); - if (!dseRequirement.min().isEmpty()) { - Version minVersion = Version.parse(dseRequirement.min()); - if (minVersion.compareTo(dseVersion) > 0) { - return buildErrorStatement(minVersion, dseRequirement.description(), false, true); - } - } - - if (!dseRequirement.max().isEmpty()) { - Version maxVersion = Version.parse(dseRequirement.max()); - - if (maxVersion.compareTo(dseVersion) <= 0) { - return buildErrorStatement(maxVersion, dseRequirement.description(), true, true); - } + if (VersionRequirement.meetsAny(requirements, backend, version)) { + return super.apply(base, description); + } else { + // requirements not met, throw reasoning assumption to skip test + return new Statement() { + @Override + public void evaluate() { + throw new AssumptionViolatedException( + VersionRequirement.buildReasonString(requirements, backend, version)); } - } + }; } - return super.apply(base, description); } public Version getCassandraVersion() { diff --git a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/requirement/BackendRequirement.java b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/requirement/BackendRequirement.java new file mode 100644 index 00000000000..ec034dbbac2 --- /dev/null +++ b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/requirement/BackendRequirement.java @@ -0,0 +1,36 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.testinfra.requirement; + +import java.lang.annotation.Repeatable; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; + +/** + * Annotation for a Class or Method that defines a database backend Version requirement. If the + * type/version in use does not meet the requirement, the test is skipped. + */ +@Repeatable(BackendRequirements.class) +@Retention(RetentionPolicy.RUNTIME) +public @interface BackendRequirement { + BackendType type(); + + String minInclusive() default ""; + + String maxExclusive() default ""; + + String description() default ""; +} diff --git a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/requirement/BackendRequirements.java b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/requirement/BackendRequirements.java new file mode 100644 index 00000000000..09786c1215b --- /dev/null +++ b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/requirement/BackendRequirements.java @@ -0,0 +1,25 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.testinfra.requirement; + +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; + +/** Annotation to allow @BackendRequirement to be repeatable. */ +@Retention(RetentionPolicy.RUNTIME) +public @interface BackendRequirements { + BackendRequirement[] value(); +} diff --git a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/requirement/BackendType.java b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/requirement/BackendType.java new file mode 100644 index 00000000000..eae7067c161 --- /dev/null +++ b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/requirement/BackendType.java @@ -0,0 +1,32 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.testinfra.requirement; + +public enum BackendType { + CASSANDRA("C*"), + DSE("Dse"), + ; + + final String friendlyName; + + BackendType(String friendlyName) { + this.friendlyName = friendlyName; + } + + public String getFriendlyName() { + return friendlyName; + } +} diff --git a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/requirement/VersionRequirement.java b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/requirement/VersionRequirement.java new file mode 100644 index 00000000000..28a72bc92ad --- /dev/null +++ b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/requirement/VersionRequirement.java @@ -0,0 +1,166 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.testinfra.requirement; + +import com.datastax.oss.driver.api.core.Version; +import com.datastax.oss.driver.api.testinfra.CassandraRequirement; +import com.datastax.oss.driver.api.testinfra.DseRequirement; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Optional; +import java.util.stream.Collectors; +import org.junit.runner.Description; + +/** + * Used to unify the requirements specified by + * annotations @CassandraRequirement, @DseRequirment, @BackendRequirement + */ +public class VersionRequirement { + final BackendType backendType; + final Optional minInclusive; + final Optional maxExclusive; + final String description; + + public VersionRequirement( + BackendType backendType, String minInclusive, String maxExclusive, String description) { + this.backendType = backendType; + this.minInclusive = + minInclusive.isEmpty() ? Optional.empty() : Optional.of(Version.parse(minInclusive)); + this.maxExclusive = + maxExclusive.isEmpty() ? Optional.empty() : Optional.of(Version.parse(maxExclusive)); + this.description = description; + } + + public BackendType getBackendType() { + return backendType; + } + + public Optional getMinInclusive() { + return minInclusive; + } + + public Optional getMaxExclusive() { + return maxExclusive; + } + + public String readableString() { + final String versionRange; + if (minInclusive.isPresent() && maxExclusive.isPresent()) { + versionRange = + String.format("%s or greater, but less than %s", minInclusive.get(), maxExclusive.get()); + } else if (minInclusive.isPresent()) { + versionRange = String.format("%s or greater", minInclusive.get()); + } else if (maxExclusive.isPresent()) { + versionRange = String.format("less than %s", maxExclusive.get()); + } else { + versionRange = "any version"; + } + + if (!description.isEmpty()) { + return String.format("%s %s [%s]", backendType.getFriendlyName(), versionRange, description); + } else { + return String.format("%s %s", backendType.getFriendlyName(), versionRange); + } + } + + public static VersionRequirement fromBackendRequirement(BackendRequirement requirement) { + return new VersionRequirement( + requirement.type(), + requirement.minInclusive(), + requirement.maxExclusive(), + requirement.description()); + } + + public static VersionRequirement fromCassandraRequirement(CassandraRequirement requirement) { + return new VersionRequirement( + BackendType.CASSANDRA, requirement.min(), requirement.max(), requirement.description()); + } + + public static VersionRequirement fromDseRequirement(DseRequirement requirement) { + return new VersionRequirement( + BackendType.DSE, requirement.min(), requirement.max(), requirement.description()); + } + + public static Collection fromAnnotations(Description description) { + // collect all requirement annotation types + CassandraRequirement cassandraRequirement = + description.getAnnotation(CassandraRequirement.class); + DseRequirement dseRequirement = description.getAnnotation(DseRequirement.class); + BackendRequirements backendRequirements = description.getAnnotation(BackendRequirements.class); + + // build list of required versions + Collection requirements = new ArrayList<>(); + if (cassandraRequirement != null) { + requirements.add(VersionRequirement.fromCassandraRequirement(cassandraRequirement)); + } + if (dseRequirement != null) { + requirements.add(VersionRequirement.fromDseRequirement(dseRequirement)); + } + if (backendRequirements != null) { + Arrays.stream(backendRequirements.value()) + .forEach(r -> requirements.add(VersionRequirement.fromBackendRequirement(r))); + } + return requirements; + } + + public static boolean meetsAny( + Collection requirements, + BackendType configuredBackend, + Version configuredVersion) { + // special case: if there are no requirements then any backend/version is sufficient + if (requirements.isEmpty()) { + return true; + } + + return requirements.stream() + .anyMatch( + requirement -> { + // requirement is different db type + if (requirement.getBackendType() != configuredBackend) { + return false; + } + + // configured version is less than requirement min + if (requirement.getMinInclusive().isPresent()) { + if (requirement.getMinInclusive().get().compareTo(configuredVersion) > 0) { + return false; + } + } + + // configured version is greater than or equal to requirement max + if (requirement.getMaxExclusive().isPresent()) { + if (requirement.getMaxExclusive().get().compareTo(configuredVersion) <= 0) { + return false; + } + } + + // backend type and version range match + return true; + }); + } + + public static String buildReasonString( + Collection requirements, BackendType backend, Version version) { + return String.format( + "Test requires one of:\n%s\nbut configuration is %s %s.", + requirements.stream() + .map(req -> String.format(" - %s", req.readableString())) + .collect(Collectors.joining("\n")), + backend.getFriendlyName(), + version); + } +} diff --git a/test-infra/src/test/java/com/datastax/oss/driver/api/testinfra/requirement/VersionRequirementTest.java b/test-infra/src/test/java/com/datastax/oss/driver/api/testinfra/requirement/VersionRequirementTest.java new file mode 100644 index 00000000000..51a362d2ce5 --- /dev/null +++ b/test-infra/src/test/java/com/datastax/oss/driver/api/testinfra/requirement/VersionRequirementTest.java @@ -0,0 +1,184 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.testinfra.requirement; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.api.core.Version; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; +import java.util.Collections; +import java.util.List; +import org.junit.Test; + +public class VersionRequirementTest { + // backend aliases + private static BackendType CASSANDRA = BackendType.CASSANDRA; + private static BackendType DSE = BackendType.DSE; + + // version numbers + private static Version V_0_0_0 = Version.parse("0.0.0"); + private static Version V_0_1_0 = Version.parse("0.1.0"); + private static Version V_1_0_0 = Version.parse("1.0.0"); + private static Version V_1_0_1 = Version.parse("1.0.1"); + private static Version V_1_1_0 = Version.parse("1.1.0"); + private static Version V_2_0_0 = Version.parse("2.0.0"); + private static Version V_2_0_1 = Version.parse("2.0.1"); + private static Version V_3_0_0 = Version.parse("3.0.0"); + private static Version V_3_1_0 = Version.parse("3.1.0"); + private static Version V_4_0_0 = Version.parse("4.0.0"); + + // requirements + private static VersionRequirement CASSANDRA_ANY = new VersionRequirement(CASSANDRA, "", "", ""); + private static VersionRequirement CASSANDRA_FROM_1_0_0 = + new VersionRequirement(CASSANDRA, "1.0.0", "", ""); + private static VersionRequirement CASSANDRA_TO_1_0_0 = + new VersionRequirement(CASSANDRA, "", "1.0.0", ""); + private static VersionRequirement CASSANDRA_FROM_1_0_0_TO_2_0_0 = + new VersionRequirement(CASSANDRA, "1.0.0", "2.0.0", ""); + private static VersionRequirement CASSANDRA_FROM_1_1_0 = + new VersionRequirement(CASSANDRA, "1.1.0", "", ""); + private static VersionRequirement CASSANDRA_FROM_3_0_0_TO_3_1_0 = + new VersionRequirement(CASSANDRA, "3.0.0", "3.1.0", ""); + private static VersionRequirement DSE_ANY = new VersionRequirement(DSE, "", "", ""); + + @Test + public void empty_requirements() { + List req = Collections.emptyList(); + + assertThat(VersionRequirement.meetsAny(req, CASSANDRA, V_0_0_0)).isTrue(); + assertThat(VersionRequirement.meetsAny(req, CASSANDRA, V_1_0_0)).isTrue(); + assertThat(VersionRequirement.meetsAny(req, DSE, V_0_0_0)).isTrue(); + assertThat(VersionRequirement.meetsAny(req, DSE, V_1_0_0)).isTrue(); + } + + @Test + public void single_requirement_any_version() { + List anyCassandra = Collections.singletonList(CASSANDRA_ANY); + List anyDse = Collections.singletonList(DSE_ANY); + + assertThat(VersionRequirement.meetsAny(anyCassandra, CASSANDRA, V_0_0_0)).isTrue(); + assertThat(VersionRequirement.meetsAny(anyCassandra, CASSANDRA, V_1_0_0)).isTrue(); + assertThat(VersionRequirement.meetsAny(anyDse, DSE, V_0_0_0)).isTrue(); + assertThat(VersionRequirement.meetsAny(anyDse, DSE, V_1_0_0)).isTrue(); + + assertThat(VersionRequirement.meetsAny(anyDse, CASSANDRA, V_0_0_0)).isFalse(); + assertThat(VersionRequirement.meetsAny(anyDse, CASSANDRA, V_1_0_0)).isFalse(); + assertThat(VersionRequirement.meetsAny(anyCassandra, DSE, V_0_0_0)).isFalse(); + assertThat(VersionRequirement.meetsAny(anyCassandra, DSE, V_1_0_0)).isFalse(); + } + + @Test + public void single_requirement_min_only() { + List req = Collections.singletonList(CASSANDRA_FROM_1_0_0); + + assertThat(VersionRequirement.meetsAny(req, CASSANDRA, V_1_0_0)).isTrue(); + assertThat(VersionRequirement.meetsAny(req, CASSANDRA, V_1_0_1)).isTrue(); + assertThat(VersionRequirement.meetsAny(req, CASSANDRA, V_1_1_0)).isTrue(); + assertThat(VersionRequirement.meetsAny(req, CASSANDRA, V_2_0_0)).isTrue(); + + assertThat(VersionRequirement.meetsAny(req, DSE, V_1_0_0)).isFalse(); + assertThat(VersionRequirement.meetsAny(req, CASSANDRA, V_0_0_0)).isFalse(); + assertThat(VersionRequirement.meetsAny(req, CASSANDRA, V_0_1_0)).isFalse(); + } + + @Test + public void single_requirement_max_only() { + List req = Collections.singletonList(CASSANDRA_TO_1_0_0); + + assertThat(VersionRequirement.meetsAny(req, CASSANDRA, V_0_0_0)).isTrue(); + assertThat(VersionRequirement.meetsAny(req, CASSANDRA, V_0_1_0)).isTrue(); + + assertThat(VersionRequirement.meetsAny(req, DSE, V_0_0_0)).isFalse(); + assertThat(VersionRequirement.meetsAny(req, CASSANDRA, V_1_0_0)).isFalse(); + assertThat(VersionRequirement.meetsAny(req, CASSANDRA, V_1_0_1)).isFalse(); + } + + @Test + public void single_requirement_min_and_max() { + List req = Collections.singletonList(CASSANDRA_FROM_1_0_0_TO_2_0_0); + + assertThat(VersionRequirement.meetsAny(req, CASSANDRA, V_1_0_0)).isTrue(); + assertThat(VersionRequirement.meetsAny(req, CASSANDRA, V_1_0_1)).isTrue(); + assertThat(VersionRequirement.meetsAny(req, CASSANDRA, V_1_1_0)).isTrue(); + + assertThat(VersionRequirement.meetsAny(req, DSE, V_1_0_0)).isFalse(); + assertThat(VersionRequirement.meetsAny(req, CASSANDRA, V_0_0_0)).isFalse(); + assertThat(VersionRequirement.meetsAny(req, CASSANDRA, V_0_1_0)).isFalse(); + assertThat(VersionRequirement.meetsAny(req, CASSANDRA, V_2_0_0)).isFalse(); + assertThat(VersionRequirement.meetsAny(req, CASSANDRA, V_2_0_1)).isFalse(); + } + + @Test + public void multi_requirement_any_version() { + List req = ImmutableList.of(CASSANDRA_ANY, DSE_ANY); + + assertThat(VersionRequirement.meetsAny(req, CASSANDRA, V_1_0_0)).isTrue(); + assertThat(VersionRequirement.meetsAny(req, DSE, V_1_0_0)).isTrue(); + } + + @Test + public void multi_db_requirement_min_one_any_other() { + List req = ImmutableList.of(CASSANDRA_FROM_1_0_0, DSE_ANY); + + assertThat(VersionRequirement.meetsAny(req, CASSANDRA, V_1_0_0)).isTrue(); + assertThat(VersionRequirement.meetsAny(req, CASSANDRA, V_2_0_0)).isTrue(); + assertThat(VersionRequirement.meetsAny(req, DSE, V_0_0_0)).isTrue(); + assertThat(VersionRequirement.meetsAny(req, DSE, V_1_0_0)).isTrue(); + + assertThat(VersionRequirement.meetsAny(req, CASSANDRA, V_0_0_0)).isFalse(); + } + + @Test + public void multi_requirement_two_ranges() { + List req = + ImmutableList.of(CASSANDRA_FROM_1_0_0_TO_2_0_0, CASSANDRA_FROM_3_0_0_TO_3_1_0); + + assertThat(VersionRequirement.meetsAny(req, CASSANDRA, V_1_0_0)).isTrue(); + assertThat(VersionRequirement.meetsAny(req, CASSANDRA, V_1_1_0)).isTrue(); + assertThat(VersionRequirement.meetsAny(req, CASSANDRA, V_3_0_0)).isTrue(); + + assertThat(VersionRequirement.meetsAny(req, DSE, V_1_0_0)).isFalse(); + assertThat(VersionRequirement.meetsAny(req, CASSANDRA, V_0_0_0)).isFalse(); + assertThat(VersionRequirement.meetsAny(req, CASSANDRA, V_2_0_0)).isFalse(); + assertThat(VersionRequirement.meetsAny(req, CASSANDRA, V_3_1_0)).isFalse(); + assertThat(VersionRequirement.meetsAny(req, CASSANDRA, V_4_0_0)).isFalse(); + } + + @Test + public void multi_requirement_overlapping() { + List req = + ImmutableList.of(CASSANDRA_FROM_1_0_0_TO_2_0_0, CASSANDRA_FROM_1_1_0); + + assertThat(VersionRequirement.meetsAny(req, CASSANDRA, V_1_0_0)).isTrue(); + assertThat(VersionRequirement.meetsAny(req, CASSANDRA, V_1_1_0)).isTrue(); + assertThat(VersionRequirement.meetsAny(req, CASSANDRA, V_2_0_0)).isTrue(); + + assertThat(VersionRequirement.meetsAny(req, DSE, V_1_0_0)).isFalse(); + assertThat(VersionRequirement.meetsAny(req, CASSANDRA, V_0_0_0)).isFalse(); + } + + @Test + public void multi_requirement_not_range() { + List req = ImmutableList.of(CASSANDRA_TO_1_0_0, CASSANDRA_FROM_1_1_0); + + assertThat(VersionRequirement.meetsAny(req, CASSANDRA, V_0_0_0)).isTrue(); + assertThat(VersionRequirement.meetsAny(req, CASSANDRA, V_1_1_0)).isTrue(); + assertThat(VersionRequirement.meetsAny(req, CASSANDRA, V_2_0_0)).isTrue(); + + assertThat(VersionRequirement.meetsAny(req, CASSANDRA, V_1_0_0)).isFalse(); + assertThat(VersionRequirement.meetsAny(req, CASSANDRA, V_1_0_1)).isFalse(); + } +} From 6cf075fc68eccb06e38e3e26deee75f491fa0fe2 Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Tue, 27 Jun 2023 10:46:46 -0500 Subject: [PATCH 832/979] JAVA-3061 Remove CqlVector, represent CQL vector types as Lists (#1656) --- core/revapi.json | 65 ++++++++++++- .../oss/driver/api/core/data/CqlVector.java | 91 ------------------- .../driver/api/core/data/GettableById.java | 5 +- .../driver/api/core/data/GettableByIndex.java | 4 +- .../driver/api/core/data/GettableByName.java | 5 +- .../driver/api/core/data/SettableById.java | 9 +- .../driver/api/core/data/SettableByIndex.java | 7 +- .../driver/api/core/data/SettableByName.java | 7 +- .../driver/api/core/type/ContainerType.java | 28 ++++++ .../oss/driver/api/core/type/DataTypes.java | 16 +++- .../oss/driver/api/core/type/ListType.java | 5 +- .../oss/driver/api/core/type/SetType.java | 5 +- .../oss/driver/api/core/type/VectorType.java | 25 +++++ .../api/core/type/codec/TypeCodecs.java | 11 +-- .../api/core/type/reflect/GenericType.java | 8 -- .../schema/parsing/DataTypeCqlNameParser.java | 4 +- .../core/type/DefaultVectorType.java} | 34 ++++--- .../{CqlVectorCodec.java => VectorCodec.java} | 76 ++++++++++------ .../codec/registry/CachingCodecRegistry.java | 56 +++++------- ...torCodecTest.java => VectorCodecTest.java} | 52 ++++++++--- 20 files changed, 290 insertions(+), 223 deletions(-) delete mode 100644 core/src/main/java/com/datastax/oss/driver/api/core/data/CqlVector.java create mode 100644 core/src/main/java/com/datastax/oss/driver/api/core/type/ContainerType.java create mode 100644 core/src/main/java/com/datastax/oss/driver/api/core/type/VectorType.java rename core/src/main/java/com/datastax/oss/driver/{api/core/type/CqlVectorType.java => internal/core/type/DefaultVectorType.java} (64%) rename core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/{CqlVectorCodec.java => VectorCodec.java} (62%) rename core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/{CqlVectorCodecTest.java => VectorCodecTest.java} (62%) diff --git a/core/revapi.json b/core/revapi.json index af719e9987e..f844479cd29 100644 --- a/core/revapi.json +++ b/core/revapi.json @@ -6824,7 +6824,70 @@ "new": "class com.datastax.oss.driver.api.core.type.codec.CodecNotFoundException", "superClass": "com.datastax.oss.driver.api.core.DriverException", "justification": "Make CodecNotFoundException to extend DriverException as all other driver exceptions do" - } + }, + { + "code": "java.class.removed", + "old": "class com.datastax.oss.driver.api.core.data.CqlVector", + "justification": "Refactoring in JAVA-3061" + }, + { + "code": "java.method.removed", + "old": "method com.datastax.oss.driver.api.core.data.CqlVector com.datastax.oss.driver.api.core.data.GettableById::getCqlVector(com.datastax.oss.driver.api.core.CqlIdentifier)", + "justification": "Refactoring in JAVA-3061" + }, + { + "code": "java.method.removed", + "old": "method com.datastax.oss.driver.api.core.data.CqlVector com.datastax.oss.driver.api.core.data.GettableByIndex::getCqlVector(int)", + "justification": "Refactoring in JAVA-3061" + }, + { + "code": "java.method.removed", + "old": "method com.datastax.oss.driver.api.core.data.CqlVector com.datastax.oss.driver.api.core.data.GettableByName::getCqlVector(java.lang.String)", + "justification": "Refactoring in JAVA-3061" + }, + { + "code": "java.method.removed", + "old": "method SelfT com.datastax.oss.driver.api.core.data.SettableById>::setCqlVector(com.datastax.oss.driver.api.core.CqlIdentifier, com.datastax.oss.driver.api.core.data.CqlVector)", + "justification": "Refactoring in JAVA-3061" + }, + { + "code": "java.method.removed", + "old": "method SelfT com.datastax.oss.driver.api.core.data.SettableByIndex>::setCqlVector(int, com.datastax.oss.driver.api.core.data.CqlVector)", + "justification": "Refactoring in JAVA-3061" + }, + { + "code": "java.method.removed", + "old": "method SelfT com.datastax.oss.driver.api.core.data.SettableByName>::setCqlVector(java.lang.String, com.datastax.oss.driver.api.core.data.CqlVector)", + "justification": "Refactoring in JAVA-3061" + }, + { + "code": "java.class.removed", + "old": "class com.datastax.oss.driver.api.core.type.CqlVectorType", + "justification": "Refactoring in JAVA-3061" + }, + { + "code": "java.method.returnTypeChanged", + "old": "method com.datastax.oss.driver.api.core.type.CqlVectorType com.datastax.oss.driver.api.core.type.DataTypes::vectorOf(com.datastax.oss.driver.api.core.type.DataType, int)", + "new": "method com.datastax.oss.driver.api.core.type.VectorType com.datastax.oss.driver.api.core.type.DataTypes::vectorOf(com.datastax.oss.driver.api.core.type.DataType, int)", + "justification": "Refactoring in JAVA-3061" + }, + { + "code": "java.method.parameterTypeChanged", + "old": "parameter com.datastax.oss.driver.api.core.type.codec.TypeCodec> com.datastax.oss.driver.api.core.type.codec.TypeCodecs::vectorOf(===com.datastax.oss.driver.api.core.type.CqlVectorType===, com.datastax.oss.driver.api.core.type.codec.TypeCodec)", + "new": "parameter com.datastax.oss.driver.api.core.type.codec.TypeCodec> com.datastax.oss.driver.api.core.type.codec.TypeCodecs::vectorOf(===com.datastax.oss.driver.api.core.type.VectorType===, com.datastax.oss.driver.api.core.type.codec.TypeCodec)", + "justification": "Refactoring in JAVA-3061" + }, + { + "code": "java.method.returnTypeTypeParametersChanged", + "old": "method com.datastax.oss.driver.api.core.type.codec.TypeCodec> com.datastax.oss.driver.api.core.type.codec.TypeCodecs::vectorOf(com.datastax.oss.driver.api.core.type.CqlVectorType, com.datastax.oss.driver.api.core.type.codec.TypeCodec)", + "new": "method com.datastax.oss.driver.api.core.type.codec.TypeCodec> com.datastax.oss.driver.api.core.type.codec.TypeCodecs::vectorOf(com.datastax.oss.driver.api.core.type.VectorType, com.datastax.oss.driver.api.core.type.codec.TypeCodec)", + "justification": "Refactoring in JAVA-3061" + }, + { + "code": "java.method.removed", + "old": "method com.datastax.oss.driver.api.core.type.reflect.GenericType> com.datastax.oss.driver.api.core.type.reflect.GenericType::vectorOf(com.datastax.oss.driver.api.core.type.reflect.GenericType)", + "justification": "Refactoring in JAVA-3061" + } ] } } diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/data/CqlVector.java b/core/src/main/java/com/datastax/oss/driver/api/core/data/CqlVector.java deleted file mode 100644 index c34cc38a10a..00000000000 --- a/core/src/main/java/com/datastax/oss/driver/api/core/data/CqlVector.java +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Copyright DataStax, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.datastax.oss.driver.api.core.data; - -import com.datastax.oss.driver.shaded.guava.common.base.Joiner; -import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; -import com.datastax.oss.driver.shaded.guava.common.collect.Iterators; -import java.util.Arrays; - -/** An n-dimensional vector defined in CQL */ -public class CqlVector { - - private final ImmutableList values; - - private CqlVector(ImmutableList values) { - this.values = values; - } - - public static Builder builder() { - return new Builder(); - } - - public Iterable getValues() { - return values; - } - - @Override - public boolean equals(Object o) { - if (o == this) { - return true; - } else if (o instanceof CqlVector) { - CqlVector that = (CqlVector) o; - return this.values.equals(that.values); - } else { - return false; - } - } - - @Override - public int hashCode() { - return Arrays.hashCode(values.toArray()); - } - - @Override - public String toString() { - - String contents = Joiner.on(", ").join(this.values); - return "CqlVector{" + contents + '}'; - } - - public static class Builder { - - private ImmutableList.Builder listBuilder; - - private Builder() { - listBuilder = new ImmutableList.Builder(); - } - - public Builder add(T element) { - listBuilder.add(element); - return this; - } - - public Builder add(T... elements) { - listBuilder.addAll(Iterators.forArray(elements)); - return this; - } - - public Builder addAll(Iterable iter) { - listBuilder.addAll(iter); - return this; - } - - public CqlVector build() { - return new CqlVector(listBuilder.build()); - } - } -} diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableById.java b/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableById.java index 7d2ea3a7f8b..6c6cf95a568 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableById.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableById.java @@ -529,8 +529,9 @@ default CqlDuration getCqlDuration(@NonNull CqlIdentifier id) { * @throws IllegalArgumentException if the id is invalid. */ @Nullable - default CqlVector getCqlVector(@NonNull CqlIdentifier id) { - return getCqlVector(firstIndexOf(id)); + default List getVector( + @NonNull CqlIdentifier id, @NonNull Class elementsClass) { + return getVector(firstIndexOf(id), elementsClass); } /** diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableByIndex.java b/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableByIndex.java index 0da60bef285..a805342defc 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableByIndex.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableByIndex.java @@ -444,8 +444,8 @@ default CqlDuration getCqlDuration(int i) { * @throws IndexOutOfBoundsException if the index is invalid. */ @Nullable - default CqlVector getCqlVector(int i) { - return get(i, CqlVector.class); + default List getVector(int i, @NonNull Class elementsClass) { + return get(i, GenericType.listOf(elementsClass)); } /** diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableByName.java b/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableByName.java index 40e5532f85a..3214994c04a 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableByName.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableByName.java @@ -525,8 +525,9 @@ default CqlDuration getCqlDuration(@NonNull String name) { * @throws IllegalArgumentException if the name is invalid. */ @Nullable - default CqlVector getCqlVector(@NonNull String name) { - return getCqlVector(firstIndexOf(name)); + default List getVector( + @NonNull String name, @NonNull Class elementsClass) { + return getList(firstIndexOf(name), elementsClass); } /** diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableById.java b/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableById.java index 58eb7098028..3c17f0cb6f1 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableById.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableById.java @@ -560,7 +560,7 @@ default SelfT setCqlDuration(@NonNull CqlIdentifier id, @Nullable CqlDuration v) } /** - * Sets the value for all occurrences of {@code id} to the provided duration. + * Sets the value for all occurrences of {@code id} to the provided {@code vector}. * *

          By default, this works with CQL type {@code vector}. * @@ -571,10 +571,13 @@ default SelfT setCqlDuration(@NonNull CqlIdentifier id, @Nullable CqlDuration v) */ @NonNull @CheckReturnValue - default SelfT setCqlVector(@NonNull CqlIdentifier id, @Nullable CqlVector v) { + default SelfT setVector( + @NonNull CqlIdentifier id, + @Nullable List v, + @NonNull Class elementsClass) { SelfT result = null; for (Integer i : allIndicesOf(id)) { - result = (result == null ? this : result).setCqlVector(i, v); + result = (result == null ? this : result).setVector(i, v, elementsClass); } assert result != null; // allIndices throws if there are no results return result; diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableByIndex.java b/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableByIndex.java index bdee1de7b6f..52bc92d4c09 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableByIndex.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableByIndex.java @@ -415,7 +415,7 @@ default SelfT setCqlDuration(int i, @Nullable CqlDuration v) { } /** - * Sets the {@code i}th value to the provided duration. + * Sets the {@code i}th value to the provided vector. * *

          By default, this works with CQL type {@code vector}. * @@ -423,8 +423,9 @@ default SelfT setCqlDuration(int i, @Nullable CqlDuration v) { */ @NonNull @CheckReturnValue - default SelfT setCqlVector(int i, @Nullable CqlVector v) { - return set(i, v, CqlVector.class); + default SelfT setVector( + int i, @Nullable List v, @NonNull Class elementsClass) { + return set(i, v, GenericType.listOf(elementsClass)); } /** diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableByName.java b/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableByName.java index 26e5340bdce..559ad40cbff 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableByName.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableByName.java @@ -559,7 +559,7 @@ default SelfT setCqlDuration(@NonNull String name, @Nullable CqlDuration v) { } /** - * Sets the value for all occurrences of {@code name} to the provided duration. + * Sets the value for all occurrences of {@code name} to the provided vector. * *

          By default, this works with CQL type {@code vector}. * @@ -570,10 +570,11 @@ default SelfT setCqlDuration(@NonNull String name, @Nullable CqlDuration v) { */ @NonNull @CheckReturnValue - default SelfT setCqlVector(@NonNull String name, @Nullable CqlVector v) { + default SelfT setVector( + @NonNull String name, @Nullable List v, @NonNull Class elementsClass) { SelfT result = null; for (Integer i : allIndicesOf(name)) { - result = (result == null ? this : result).setCqlVector(i, v); + result = (result == null ? this : result).setVector(i, v, elementsClass); } assert result != null; // allIndices throws if there are no results return result; diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/type/ContainerType.java b/core/src/main/java/com/datastax/oss/driver/api/core/type/ContainerType.java new file mode 100644 index 00000000000..79c16b25e05 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/api/core/type/ContainerType.java @@ -0,0 +1,28 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.core.type; + +import edu.umd.cs.findbugs.annotations.NonNull; + +/** + * Representation of a type which "contains" some other type. This might be a collection type or it + * could be some other kind of container; the term is deliberately left somewhat vague. + */ +public interface ContainerType { + + @NonNull + DataType getElementType(); +} diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/type/DataTypes.java b/core/src/main/java/com/datastax/oss/driver/api/core/type/DataTypes.java index f8cc7042516..6f28ceda45a 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/type/DataTypes.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/type/DataTypes.java @@ -23,6 +23,7 @@ import com.datastax.oss.driver.internal.core.type.DefaultMapType; import com.datastax.oss.driver.internal.core.type.DefaultSetType; import com.datastax.oss.driver.internal.core.type.DefaultTupleType; +import com.datastax.oss.driver.internal.core.type.DefaultVectorType; import com.datastax.oss.driver.internal.core.type.PrimitiveType; import com.datastax.oss.driver.shaded.guava.common.base.Splitter; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; @@ -65,14 +66,19 @@ public static DataType custom(@NonNull String className) { if (className.equals("org.apache.cassandra.db.marshal.DurationType")) return DURATION; /* Vector support is currently implemented as a custom type but is also parameterized */ - if (className.startsWith(CqlVectorType.CQLVECTOR_CLASS_NAME)) { + if (className.startsWith(DefaultVectorType.VECTOR_CLASS_NAME)) { List params = paramSplitter.splitToList( className.substring( - CqlVectorType.CQLVECTOR_CLASS_NAME.length() + 1, className.length() - 1)); + DefaultVectorType.VECTOR_CLASS_NAME.length() + 1, className.length() - 1)); DataType subType = classNameParser.parse(params.get(0), AttachmentPoint.NONE); int dimensions = Integer.parseInt(params.get(1)); - return new CqlVectorType(subType, dimensions); + if (dimensions <= 0) { + throw new IllegalArgumentException( + String.format( + "Request to create vector of size %d, size must be positive", dimensions)); + } + return new DefaultVectorType(subType, dimensions); } return new DefaultCustomType(className); } @@ -135,7 +141,7 @@ public static TupleType tupleOf(@NonNull DataType... componentTypes) { return new DefaultTupleType(ImmutableList.copyOf(Arrays.asList(componentTypes))); } - public static CqlVectorType vectorOf(DataType subtype, int dimensions) { - return new CqlVectorType(subtype, dimensions); + public static VectorType vectorOf(DataType subtype, int dimensions) { + return new DefaultVectorType(subtype, dimensions); } } diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/type/ListType.java b/core/src/main/java/com/datastax/oss/driver/api/core/type/ListType.java index 1bafb1693d7..3ea946387fb 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/type/ListType.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/type/ListType.java @@ -18,10 +18,7 @@ import com.datastax.oss.protocol.internal.ProtocolConstants; import edu.umd.cs.findbugs.annotations.NonNull; -public interface ListType extends DataType { - - @NonNull - DataType getElementType(); +public interface ListType extends DataType, ContainerType { boolean isFrozen(); diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/type/SetType.java b/core/src/main/java/com/datastax/oss/driver/api/core/type/SetType.java index eadd0a702e3..c4bb43c1a37 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/type/SetType.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/type/SetType.java @@ -18,10 +18,7 @@ import com.datastax.oss.protocol.internal.ProtocolConstants; import edu.umd.cs.findbugs.annotations.NonNull; -public interface SetType extends DataType { - - @NonNull - DataType getElementType(); +public interface SetType extends DataType, ContainerType { boolean isFrozen(); diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/type/VectorType.java b/core/src/main/java/com/datastax/oss/driver/api/core/type/VectorType.java new file mode 100644 index 00000000000..657ea9d690f --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/api/core/type/VectorType.java @@ -0,0 +1,25 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.core.type; + +/** + * Type representing a Cassandra vector type as described in CEP-30. At the moment this is + * implemented as a custom type so we include the CustomType interface as well. + */ +public interface VectorType extends CustomType, ContainerType { + + int getDimensions(); +} diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.java b/core/src/main/java/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.java index 07fbf309988..e824e7f41fc 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.java @@ -16,21 +16,19 @@ package com.datastax.oss.driver.api.core.type.codec; import com.datastax.oss.driver.api.core.data.CqlDuration; -import com.datastax.oss.driver.api.core.data.CqlVector; import com.datastax.oss.driver.api.core.data.TupleValue; import com.datastax.oss.driver.api.core.data.UdtValue; -import com.datastax.oss.driver.api.core.type.CqlVectorType; import com.datastax.oss.driver.api.core.type.CustomType; import com.datastax.oss.driver.api.core.type.DataType; import com.datastax.oss.driver.api.core.type.DataTypes; import com.datastax.oss.driver.api.core.type.TupleType; import com.datastax.oss.driver.api.core.type.UserDefinedType; +import com.datastax.oss.driver.api.core.type.VectorType; import com.datastax.oss.driver.internal.core.type.codec.BigIntCodec; import com.datastax.oss.driver.internal.core.type.codec.BlobCodec; import com.datastax.oss.driver.internal.core.type.codec.BooleanCodec; import com.datastax.oss.driver.internal.core.type.codec.CounterCodec; import com.datastax.oss.driver.internal.core.type.codec.CqlDurationCodec; -import com.datastax.oss.driver.internal.core.type.codec.CqlVectorCodec; import com.datastax.oss.driver.internal.core.type.codec.CustomCodec; import com.datastax.oss.driver.internal.core.type.codec.DateCodec; import com.datastax.oss.driver.internal.core.type.codec.DecimalCodec; @@ -51,6 +49,7 @@ import com.datastax.oss.driver.internal.core.type.codec.UdtCodec; import com.datastax.oss.driver.internal.core.type.codec.UuidCodec; import com.datastax.oss.driver.internal.core.type.codec.VarIntCodec; +import com.datastax.oss.driver.internal.core.type.codec.VectorCodec; import com.datastax.oss.driver.shaded.guava.common.base.Charsets; import com.datastax.oss.driver.shaded.guava.common.base.Preconditions; import edu.umd.cs.findbugs.annotations.NonNull; @@ -208,9 +207,9 @@ public static TypeCodec tupleOf(@NonNull TupleType cqlType) { return new TupleCodec(cqlType); } - public static TypeCodec> vectorOf( - @NonNull CqlVectorType type, @NonNull TypeCodec subtypeCodec) { - return new CqlVectorCodec( + public static TypeCodec> vectorOf( + @NonNull VectorType type, @NonNull TypeCodec subtypeCodec) { + return new VectorCodec( DataTypes.vectorOf(subtypeCodec.getCqlType(), type.getDimensions()), subtypeCodec); } diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/type/reflect/GenericType.java b/core/src/main/java/com/datastax/oss/driver/api/core/type/reflect/GenericType.java index db573b3451b..a1977e39f23 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/type/reflect/GenericType.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/type/reflect/GenericType.java @@ -16,7 +16,6 @@ package com.datastax.oss.driver.api.core.type.reflect; import com.datastax.oss.driver.api.core.data.CqlDuration; -import com.datastax.oss.driver.api.core.data.CqlVector; import com.datastax.oss.driver.api.core.data.GettableByIndex; import com.datastax.oss.driver.api.core.data.TupleValue; import com.datastax.oss.driver.api.core.data.UdtValue; @@ -166,13 +165,6 @@ public static GenericType> mapOf( return new GenericType<>(token); } - @NonNull - public static GenericType> vectorOf(@NonNull GenericType elementType) { - TypeToken> token = - new TypeToken>() {}.where(new TypeParameter() {}, elementType.token); - return new GenericType<>(token); - } - @NonNull public static GenericType arrayOf(@NonNull Class componentType) { TypeToken token = diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/DataTypeCqlNameParser.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/DataTypeCqlNameParser.java index 76a2301522d..b5f36c89111 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/DataTypeCqlNameParser.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/DataTypeCqlNameParser.java @@ -16,13 +16,13 @@ package com.datastax.oss.driver.internal.core.metadata.schema.parsing; import com.datastax.oss.driver.api.core.CqlIdentifier; -import com.datastax.oss.driver.api.core.type.CqlVectorType; import com.datastax.oss.driver.api.core.type.DataType; import com.datastax.oss.driver.api.core.type.DataTypes; import com.datastax.oss.driver.api.core.type.UserDefinedType; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.internal.core.metadata.schema.ShallowUserDefinedType; import com.datastax.oss.driver.internal.core.type.DefaultTupleType; +import com.datastax.oss.driver.internal.core.type.DefaultVectorType; import com.datastax.oss.driver.internal.core.type.codec.ParseUtils; import com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; @@ -145,7 +145,7 @@ private DataType parse( } DataType subType = parse(parameters.get(0), keyspaceId, false, userTypes, context); int dimensions = Integer.parseInt(parameters.get(1)); - return new CqlVectorType(subType, dimensions); + return new DefaultVectorType(subType, dimensions); } throw new IllegalArgumentException("Could not parse type name " + toParse); diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/type/CqlVectorType.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/DefaultVectorType.java similarity index 64% rename from core/src/main/java/com/datastax/oss/driver/api/core/type/CqlVectorType.java rename to core/src/main/java/com/datastax/oss/driver/internal/core/type/DefaultVectorType.java index 528a688451a..2517e7db30e 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/type/CqlVectorType.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/DefaultVectorType.java @@ -13,37 +13,46 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.datastax.oss.driver.api.core.type; +package com.datastax.oss.driver.internal.core.type; import com.datastax.oss.driver.api.core.detach.AttachmentPoint; +import com.datastax.oss.driver.api.core.type.DataType; +import com.datastax.oss.driver.api.core.type.VectorType; import edu.umd.cs.findbugs.annotations.NonNull; import java.util.Objects; +import net.jcip.annotations.Immutable; -public class CqlVectorType implements CustomType { +@Immutable +public class DefaultVectorType implements VectorType { - public static final String CQLVECTOR_CLASS_NAME = "org.apache.cassandra.db.marshal.VectorType"; + public static final String VECTOR_CLASS_NAME = "org.apache.cassandra.db.marshal.VectorType"; private final DataType subtype; private final int dimensions; - public CqlVectorType(DataType subtype, int dimensions) { + public DefaultVectorType(DataType subtype, int dimensions) { this.dimensions = dimensions; this.subtype = subtype; } - public int getDimensions() { - return this.dimensions; + /* ============== ContainerType interface ============== */ + @Override + public DataType getElementType() { + return this.subtype; } - public DataType getSubtype() { - return this.subtype; + /* ============== VectorType interface ============== */ + @Override + public int getDimensions() { + return this.dimensions; } + /* ============== CustomType interface ============== */ @NonNull @Override public String getClassName() { - return CQLVECTOR_CLASS_NAME; + return VECTOR_CLASS_NAME; } @NonNull @@ -52,12 +61,13 @@ public String asCql(boolean includeFrozen, boolean pretty) { return String.format("'%s(%d)'", getClassName(), getDimensions()); } + /* ============== General class implementation ============== */ @Override public boolean equals(Object o) { if (o == this) { return true; - } else if (o instanceof CqlVectorType) { - CqlVectorType that = (CqlVectorType) o; + } else if (o instanceof DefaultVectorType) { + DefaultVectorType that = (DefaultVectorType) o; return that.subtype.equals(this.subtype) && that.dimensions == this.dimensions; } else { return false; @@ -71,7 +81,7 @@ public int hashCode() { @Override public String toString() { - return String.format("CqlVector(%s, %d)", getSubtype(), getDimensions()); + return String.format("Vector(%s, %d)", getElementType(), getDimensions()); } @Override diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/CqlVectorCodec.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/VectorCodec.java similarity index 62% rename from core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/CqlVectorCodec.java rename to core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/VectorCodec.java index 4b739862d33..75b3e46ddfd 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/CqlVectorCodec.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/VectorCodec.java @@ -16,35 +16,37 @@ package com.datastax.oss.driver.internal.core.type.codec; import com.datastax.oss.driver.api.core.ProtocolVersion; -import com.datastax.oss.driver.api.core.data.CqlVector; -import com.datastax.oss.driver.api.core.type.CqlVectorType; import com.datastax.oss.driver.api.core.type.DataType; +import com.datastax.oss.driver.api.core.type.VectorType; import com.datastax.oss.driver.api.core.type.codec.TypeCodec; import com.datastax.oss.driver.api.core.type.reflect.GenericType; import com.datastax.oss.driver.shaded.guava.common.base.Splitter; -import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import com.datastax.oss.driver.shaded.guava.common.collect.Iterables; import com.datastax.oss.driver.shaded.guava.common.collect.Streams; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import java.nio.ByteBuffer; +import java.util.ArrayList; import java.util.Iterator; +import java.util.List; +import java.util.NoSuchElementException; +import java.util.stream.Collectors; -public class CqlVectorCodec implements TypeCodec> { +public class VectorCodec implements TypeCodec> { - private final CqlVectorType cqlType; - private final GenericType> javaType; + private final VectorType cqlType; + private final GenericType> javaType; private final TypeCodec subtypeCodec; - public CqlVectorCodec(CqlVectorType cqlType, TypeCodec subtypeCodec) { + public VectorCodec(VectorType cqlType, TypeCodec subtypeCodec) { this.cqlType = cqlType; this.subtypeCodec = subtypeCodec; - this.javaType = GenericType.vectorOf(subtypeCodec.getJavaType()); + this.javaType = GenericType.listOf(subtypeCodec.getJavaType()); } @NonNull @Override - public GenericType> getJavaType() { + public GenericType> getJavaType() { return this.javaType; } @@ -57,15 +59,34 @@ public DataType getCqlType() { @Nullable @Override public ByteBuffer encode( - @Nullable CqlVector value, @NonNull ProtocolVersion protocolVersion) { + @Nullable List value, @NonNull ProtocolVersion protocolVersion) { if (value == null || cqlType.getDimensions() <= 0) { return null; } ByteBuffer[] valueBuffs = new ByteBuffer[cqlType.getDimensions()]; - Iterator values = value.getValues().iterator(); + Iterator values = value.iterator(); int allValueBuffsSize = 0; for (int i = 0; i < cqlType.getDimensions(); ++i) { - ByteBuffer valueBuff = this.subtypeCodec.encode(values.next(), protocolVersion); + ByteBuffer valueBuff; + SubtypeT valueObj; + + try { + valueObj = values.next(); + } catch (NoSuchElementException nsee) { + throw new IllegalArgumentException( + String.format( + "Not enough elements; must provide elements for %d dimensions", + cqlType.getDimensions())); + } + + try { + valueBuff = this.subtypeCodec.encode(valueObj, protocolVersion); + } catch (ClassCastException e) { + throw new IllegalArgumentException("Invalid type for element: " + valueObj.getClass()); + } + if (valueBuff == null) { + throw new NullPointerException("Vector elements cannot encode to CQL NULL"); + } allValueBuffsSize += valueBuff.limit(); valueBuff.rewind(); valueBuffs[i] = valueBuff; @@ -82,7 +103,7 @@ public ByteBuffer encode( @Nullable @Override - public CqlVector decode( + public List decode( @Nullable ByteBuffer bytes, @NonNull ProtocolVersion protocolVersion) { if (bytes == null || bytes.remaining() == 0) { return null; @@ -101,35 +122,38 @@ Elements should at least precede themselves with their size (along the lines of cqlType.getDimensions(), bytes.remaining())); } - ImmutableList.Builder builder = ImmutableList.builder(); + List rv = new ArrayList(cqlType.getDimensions()); for (int i = 0; i < cqlType.getDimensions(); ++i) { ByteBuffer slice = bytes.slice(); slice.limit(elementSize); - builder.add(this.subtypeCodec.decode(slice, protocolVersion)); + rv.add(this.subtypeCodec.decode(slice, protocolVersion)); bytes.position(bytes.position() + elementSize); } /* Restore the input ByteBuffer to its original state */ bytes.rewind(); - return CqlVector.builder().addAll(builder.build()).build(); + return rv; } @NonNull @Override - public String format(@Nullable CqlVector value) { - return value == null ? "NULL" : Iterables.toString(value.getValues()); + public String format(@Nullable List value) { + return value == null ? "NULL" : Iterables.toString(value); } @Nullable @Override - public CqlVector parse(@Nullable String value) { - if (value == null || value.isEmpty() || value.equalsIgnoreCase("NULL")) return null; - - ImmutableList values = - Streams.stream(Splitter.on(", ").split(value.substring(1, value.length() - 1))) - .map(subtypeCodec::parse) - .collect(ImmutableList.toImmutableList()); - return CqlVector.builder().addAll(values).build(); + public List parse(@Nullable String value) { + return (value == null || value.isEmpty() || value.equalsIgnoreCase("NULL")) + ? null + : this.from(value); + } + + private List from(@Nullable String value) { + + return Streams.stream(Splitter.on(", ").split(value.substring(1, value.length() - 1))) + .map(subtypeCodec::parse) + .collect(Collectors.toCollection(ArrayList::new)); } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistry.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistry.java index 18b1f55e106..ca282c3e355 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistry.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistry.java @@ -16,18 +16,9 @@ package com.datastax.oss.driver.internal.core.type.codec.registry; import com.datastax.oss.driver.api.core.data.CqlDuration; -import com.datastax.oss.driver.api.core.data.CqlVector; import com.datastax.oss.driver.api.core.data.TupleValue; import com.datastax.oss.driver.api.core.data.UdtValue; -import com.datastax.oss.driver.api.core.type.CqlVectorType; -import com.datastax.oss.driver.api.core.type.CustomType; -import com.datastax.oss.driver.api.core.type.DataType; -import com.datastax.oss.driver.api.core.type.DataTypes; -import com.datastax.oss.driver.api.core.type.ListType; -import com.datastax.oss.driver.api.core.type.MapType; -import com.datastax.oss.driver.api.core.type.SetType; -import com.datastax.oss.driver.api.core.type.TupleType; -import com.datastax.oss.driver.api.core.type.UserDefinedType; +import com.datastax.oss.driver.api.core.type.*; import com.datastax.oss.driver.api.core.type.codec.CodecNotFoundException; import com.datastax.oss.driver.api.core.type.codec.TypeCodec; import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; @@ -342,7 +333,7 @@ protected GenericType inspectType(@NonNull Object value, @Nullable DataType c } GenericType elementType = inspectType( - firstElement, cqlType == null ? null : ((ListType) cqlType).getElementType()); + firstElement, cqlType == null ? null : ((ContainerType) cqlType).getElementType()); return GenericType.listOf(elementType); } } else if (value instanceof Set) { @@ -547,6 +538,18 @@ protected DataType inferCqlTypeFromValue(@NonNull Object value) { return null; } + private TypeCodec getElementCodec( + ContainerType cqlType, TypeToken token, boolean isJavaCovariant) { + + DataType elementCqlType = cqlType.getElementType(); + if (token.getType() instanceof ParameterizedType) { + Type[] typeArguments = ((ParameterizedType) token.getType()).getActualTypeArguments(); + GenericType elementJavaType = GenericType.of(typeArguments[0]); + return uncheckedCast(codecFor(elementCqlType, elementJavaType, isJavaCovariant)); + } + return codecFor(elementCqlType); + } + // Try to create a codec when we haven't found it in the cache @NonNull protected TypeCodec createCodec( @@ -561,26 +564,12 @@ protected TypeCodec createCodec( } else { // Both non-null TypeToken token = javaType.__getToken(); if (cqlType instanceof ListType && List.class.isAssignableFrom(token.getRawType())) { - DataType elementCqlType = ((ListType) cqlType).getElementType(); - TypeCodec elementCodec; - if (token.getType() instanceof ParameterizedType) { - Type[] typeArguments = ((ParameterizedType) token.getType()).getActualTypeArguments(); - GenericType elementJavaType = GenericType.of(typeArguments[0]); - elementCodec = uncheckedCast(codecFor(elementCqlType, elementJavaType, isJavaCovariant)); - } else { - elementCodec = codecFor(elementCqlType); - } + TypeCodec elementCodec = + getElementCodec((ContainerType) cqlType, token, isJavaCovariant); return TypeCodecs.listOf(elementCodec); } else if (cqlType instanceof SetType && Set.class.isAssignableFrom(token.getRawType())) { - DataType elementCqlType = ((SetType) cqlType).getElementType(); - TypeCodec elementCodec; - if (token.getType() instanceof ParameterizedType) { - Type[] typeArguments = ((ParameterizedType) token.getType()).getActualTypeArguments(); - GenericType elementJavaType = GenericType.of(typeArguments[0]); - elementCodec = uncheckedCast(codecFor(elementCqlType, elementJavaType, isJavaCovariant)); - } else { - elementCodec = codecFor(elementCqlType); - } + TypeCodec elementCodec = + getElementCodec((ContainerType) cqlType, token, isJavaCovariant); return TypeCodecs.setOf(elementCodec); } else if (cqlType instanceof MapType && Map.class.isAssignableFrom(token.getRawType())) { DataType keyCqlType = ((MapType) cqlType).getKeyType(); @@ -604,11 +593,10 @@ protected TypeCodec createCodec( } else if (cqlType instanceof UserDefinedType && UdtValue.class.isAssignableFrom(token.getRawType())) { return TypeCodecs.udtOf((UserDefinedType) cqlType); - } else if (cqlType instanceof CqlVectorType - && CqlVector.class.isAssignableFrom(token.getRawType())) { - CqlVectorType vectorType = (CqlVectorType) cqlType; - TypeCodec subtypeCodec = codecFor(vectorType.getSubtype()); - return TypeCodecs.vectorOf((CqlVectorType) cqlType, subtypeCodec); + } else if (cqlType instanceof VectorType && List.class.isAssignableFrom(token.getRawType())) { + VectorType vectorType = (VectorType) cqlType; + TypeCodec elementCodec = getElementCodec(vectorType, token, isJavaCovariant); + return TypeCodecs.vectorOf(vectorType, elementCodec); } else if (cqlType instanceof CustomType && ByteBuffer.class.isAssignableFrom(token.getRawType())) { return TypeCodecs.custom(cqlType); diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/CqlVectorCodecTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/VectorCodecTest.java similarity index 62% rename from core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/CqlVectorCodecTest.java rename to core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/VectorCodecTest.java index eac142f6ebe..9b463dcb53e 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/CqlVectorCodecTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/VectorCodecTest.java @@ -18,23 +18,25 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; -import com.datastax.oss.driver.api.core.data.CqlVector; -import com.datastax.oss.driver.api.core.type.CqlVectorType; import com.datastax.oss.driver.api.core.type.DataTypes; +import com.datastax.oss.driver.api.core.type.VectorType; import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import com.datastax.oss.driver.internal.core.type.DefaultVectorType; +import com.google.common.collect.Lists; +import java.util.List; import org.junit.Test; -public class CqlVectorCodecTest extends CodecTestBase> { +public class VectorCodecTest extends CodecTestBase> { - private static final CqlVector VECTOR = CqlVector.builder().add(1.0f, 2.5f).build(); + private static final List VECTOR = Lists.newArrayList(1.0f, 2.5f); private static final String VECTOR_HEX_STRING = "0x" + "3f800000" + "40200000"; private static final String FORMATTED_VECTOR = "[1.0, 2.5]"; - public CqlVectorCodecTest() { - CqlVectorType vectorType = DataTypes.vectorOf(DataTypes.FLOAT, 2); + public VectorCodecTest() { + VectorType vectorType = DataTypes.vectorOf(DataTypes.FLOAT, 2); this.codec = TypeCodecs.vectorOf(vectorType, TypeCodecs.FLOAT); } @@ -44,6 +46,26 @@ public void should_encode() { assertThat(encode(null)).isNull(); } + /** Too few eleements will cause an exception, extra elements will be silently ignored */ + @Test + public void should_throw_on_encode_with_too_few_elements() { + assertThatThrownBy(() -> encode(VECTOR.subList(0, 1))) + .isInstanceOf(IllegalArgumentException.class); + } + + @Test + public void should_throw_on_encode_with_empty_list() { + assertThatThrownBy(() -> encode(Lists.newArrayList())) + .isInstanceOf(IllegalArgumentException.class); + } + + @Test + public void should_encode_with_too_many_elements() { + List doubleVector = Lists.newArrayList(VECTOR); + doubleVector.addAll(VECTOR); + assertThat(encode(doubleVector)).isEqualTo(VECTOR_HEX_STRING); + } + @Test public void should_decode() { assertThat(decode(VECTOR_HEX_STRING)).isEqualTo(VECTOR); @@ -52,7 +74,7 @@ public void should_decode() { } @Test - public void decode_throws_if_too_few_bytes() { + public void should_throw_on_decode_if_too_few_bytes() { // Dropping 4 bytes would knock off exactly 1 float, anything less than that would be something // we couldn't parse a float out of for (int i = 1; i <= 3; ++i) { @@ -80,30 +102,30 @@ public void should_parse() { @Test public void should_accept_data_type() { - assertThat(codec.accepts(new CqlVectorType(DataTypes.FLOAT, 2))).isTrue(); + assertThat(codec.accepts(new DefaultVectorType(DataTypes.FLOAT, 2))).isTrue(); assertThat(codec.accepts(DataTypes.INT)).isFalse(); } @Test public void should_accept_vector_type_correct_dimension_only() { - assertThat(codec.accepts(new CqlVectorType(DataTypes.FLOAT, 0))).isFalse(); - assertThat(codec.accepts(new CqlVectorType(DataTypes.FLOAT, 1))).isFalse(); - assertThat(codec.accepts(new CqlVectorType(DataTypes.FLOAT, 2))).isTrue(); + assertThat(codec.accepts(new DefaultVectorType(DataTypes.FLOAT, 0))).isFalse(); + assertThat(codec.accepts(new DefaultVectorType(DataTypes.FLOAT, 1))).isFalse(); + assertThat(codec.accepts(new DefaultVectorType(DataTypes.FLOAT, 2))).isTrue(); for (int i = 3; i < 1000; ++i) { - assertThat(codec.accepts(new CqlVectorType(DataTypes.FLOAT, i))).isFalse(); + assertThat(codec.accepts(new DefaultVectorType(DataTypes.FLOAT, i))).isFalse(); } } @Test public void should_accept_generic_type() { - assertThat(codec.accepts(GenericType.vectorOf(GenericType.FLOAT))).isTrue(); - assertThat(codec.accepts(GenericType.vectorOf(GenericType.INTEGER))).isFalse(); + assertThat(codec.accepts(GenericType.listOf(GenericType.FLOAT))).isTrue(); + assertThat(codec.accepts(GenericType.listOf(GenericType.INTEGER))).isFalse(); assertThat(codec.accepts(GenericType.of(Integer.class))).isFalse(); } @Test public void should_accept_raw_type() { - assertThat(codec.accepts(CqlVector.class)).isTrue(); + assertThat(codec.accepts(List.class)).isTrue(); assertThat(codec.accepts(Integer.class)).isFalse(); } From 7d01a5b424ac8898f2e3ba1c598cbaf505b9b4c9 Mon Sep 17 00:00:00 2001 From: Henry Hughes Date: Fri, 16 Jun 2023 17:32:40 -0700 Subject: [PATCH 833/979] JAVA-3068: Use fully qualified table CQL in should_not_allow_unset_value_when_protocol_less_than_v4 to work around intermittent server error --- .../oss/driver/core/cql/BatchStatementIT.java | 15 +++++++++++---- .../oss/driver/core/cql/BoundStatementCcmIT.java | 15 +++++++++++---- 2 files changed, 22 insertions(+), 8 deletions(-) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BatchStatementIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BatchStatementIT.java index 358ed2b0f02..cc960b6c27c 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BatchStatementIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BatchStatementIT.java @@ -16,6 +16,7 @@ package com.datastax.oss.driver.core.cql; import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; @@ -338,16 +339,20 @@ public void should_fail_counter_batch_with_non_counter_increment() { sessionRule.session().execute(batchStatement); } - @Test(expected = IllegalStateException.class) + @Test public void should_not_allow_unset_value_when_protocol_less_than_v4() { // CREATE TABLE test (k0 text, k1 int, v int, PRIMARY KEY (k0, k1)) DriverConfigLoader loader = SessionUtils.configLoaderBuilder() .withString(DefaultDriverOption.PROTOCOL_VERSION, "V3") .build(); - try (CqlSession v3Session = SessionUtils.newSession(ccmRule, sessionRule.keyspace(), loader)) { + try (CqlSession v3Session = SessionUtils.newSession(ccmRule, loader)) { + // Intentionally use fully qualified table here to avoid warnings as these are not supported + // by v3 protocol version, see JAVA-3068 PreparedStatement prepared = - v3Session.prepare("INSERT INTO test (k0, k1, v) values (?, ?, ?)"); + v3Session.prepare( + String.format( + "INSERT INTO %s.test (k0, k1, v) values (?, ?, ?)", sessionRule.keyspace())); BatchStatementBuilder builder = BatchStatement.builder(DefaultBatchType.LOGGED); builder.addStatements( @@ -361,7 +366,9 @@ public void should_not_allow_unset_value_when_protocol_less_than_v4() { .unset(2) .build()); - v3Session.execute(builder.build()); + assertThatThrownBy(() -> v3Session.execute(builder.build())) + .isInstanceOf(IllegalStateException.class) + .hasMessageContaining("Unset value at index"); } } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BoundStatementCcmIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BoundStatementCcmIT.java index 75748d37a6c..106b2823dc1 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BoundStatementCcmIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BoundStatementCcmIT.java @@ -16,6 +16,7 @@ package com.datastax.oss.driver.core.cql; import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.assertj.core.api.Assumptions.assumeThat; import com.datastax.oss.driver.api.core.ConsistencyLevel; @@ -126,19 +127,25 @@ public void setupSchema() { .build()); } - @Test(expected = IllegalStateException.class) + @Test public void should_not_allow_unset_value_when_protocol_less_than_v4() { DriverConfigLoader loader = SessionUtils.configLoaderBuilder() .withString(DefaultDriverOption.PROTOCOL_VERSION, "V3") .build(); - try (CqlSession v3Session = SessionUtils.newSession(ccmRule, sessionRule.keyspace(), loader)) { - PreparedStatement prepared = v3Session.prepare("INSERT INTO test2 (k, v0) values (?, ?)"); + try (CqlSession v3Session = SessionUtils.newSession(ccmRule, loader)) { + // Intentionally use fully qualified table here to avoid warnings as these are not supported + // by v3 protocol version, see JAVA-3068 + PreparedStatement prepared = + v3Session.prepare( + String.format("INSERT INTO %s.test2 (k, v0) values (?, ?)", sessionRule.keyspace())); BoundStatement boundStatement = prepared.boundStatementBuilder().setString(0, name.getMethodName()).unset(1).build(); - v3Session.execute(boundStatement); + assertThatThrownBy(() -> v3Session.execute(boundStatement)) + .isInstanceOf(IllegalStateException.class) + .hasMessageContaining("Unset value at index"); } } From dec8ac9f8d8f2b19d9675310dbda1edf47358874 Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Thu, 15 Jun 2023 00:10:24 -0500 Subject: [PATCH 834/979] JAVA-3062: Figure out a better solution for PreparedStatementIT tests around JAVA-3058 PreparedStatementIT.java: - Make tests resistant to JVM GC clearing items from prepared statement cache mid-test PreparedStatementCachingIT.java: - Prepared statement tests related to caching - Uses custom session builder and driver context to use strong statement cache - Move to IsolatedTests category because it uses system properties - Consolidate to single invalidationResultSetTest method - Verify exact set of types change events seen - Best-effort check no duplicated type-change/cache-removal events were fired SessionUtils.java - SESSION_BUILDER_CLASS_PROPERTY property should be read dynamically --- .../core/cql/PreparedStatementCachingIT.java | 403 ++++++++++++++++++ .../driver/core/cql/PreparedStatementIT.java | 180 ++------ .../api/testinfra/session/SessionUtils.java | 21 +- 3 files changed, 446 insertions(+), 158 deletions(-) create mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PreparedStatementCachingIT.java diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PreparedStatementCachingIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PreparedStatementCachingIT.java new file mode 100644 index 00000000000..879d4264734 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PreparedStatementCachingIT.java @@ -0,0 +1,403 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.core.cql; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.codahale.metrics.Gauge; +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverConfigLoader; +import com.datastax.oss.driver.api.core.context.DriverContext; +import com.datastax.oss.driver.api.core.cql.PrepareRequest; +import com.datastax.oss.driver.api.core.cql.PreparedStatement; +import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; +import com.datastax.oss.driver.api.core.session.ProgrammaticArguments; +import com.datastax.oss.driver.api.core.session.SessionBuilder; +import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import com.datastax.oss.driver.api.testinfra.session.SessionUtils; +import com.datastax.oss.driver.categories.IsolatedTests; +import com.datastax.oss.driver.internal.core.context.DefaultDriverContext; +import com.datastax.oss.driver.internal.core.cql.CqlPrepareAsyncProcessor; +import com.datastax.oss.driver.internal.core.cql.CqlPrepareSyncProcessor; +import com.datastax.oss.driver.internal.core.metadata.schema.events.TypeChangeEvent; +import com.datastax.oss.driver.internal.core.session.BuiltInRequestProcessors; +import com.datastax.oss.driver.internal.core.session.RequestProcessor; +import com.datastax.oss.driver.internal.core.session.RequestProcessorRegistry; +import com.datastax.oss.driver.shaded.guava.common.cache.CacheBuilder; +import com.datastax.oss.driver.shaded.guava.common.cache.RemovalListener; +import com.datastax.oss.driver.shaded.guava.common.util.concurrent.Uninterruptibles; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableSet; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.nio.ByteBuffer; +import java.time.Duration; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; +import java.util.Set; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Consumer; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Rule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +// These tests must be isolated because setup modifies SessionUtils.SESSION_BUILDER_CLASS_PROPERTY +@Category(IsolatedTests.class) +public class PreparedStatementCachingIT { + + private CustomCcmRule ccmRule = CustomCcmRule.builder().build(); + + private SessionRule sessionRule = + SessionRule.builder(ccmRule) + .withConfigLoader( + SessionUtils.configLoaderBuilder() + .withInt(DefaultDriverOption.REQUEST_PAGE_SIZE, 2) + .withDuration(DefaultDriverOption.REQUEST_TIMEOUT, Duration.ofSeconds(30)) + .build()) + .build(); + + @Rule public TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); + + private static class PreparedStatementRemovalEvent { + + private final ByteBuffer queryId; + + public PreparedStatementRemovalEvent(ByteBuffer queryId) { + this.queryId = queryId; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || !(o instanceof PreparedStatementRemovalEvent)) return false; + PreparedStatementRemovalEvent that = (PreparedStatementRemovalEvent) o; + return Objects.equals(queryId, that.queryId); + } + + @Override + public int hashCode() { + return Objects.hash(queryId); + } + + @Override + public String toString() { + return "PreparedStatementRemovalEvent{" + "queryId=" + queryId + '}'; + } + } + + private static class TestCqlPrepareAsyncProcessor extends CqlPrepareAsyncProcessor { + + private static final Logger LOG = + LoggerFactory.getLogger(PreparedStatementCachingIT.TestCqlPrepareAsyncProcessor.class); + + private static RemovalListener> + buildCacheRemoveCallback(@NonNull Optional context) { + return (evt) -> { + try { + CompletableFuture future = evt.getValue(); + ByteBuffer queryId = Uninterruptibles.getUninterruptibly(future).getId(); + context.ifPresent( + ctx -> ctx.getEventBus().fire(new PreparedStatementRemovalEvent(queryId))); + } catch (Exception e) { + LOG.error("Unable to register removal handler", e); + } + }; + } + + public TestCqlPrepareAsyncProcessor(@NonNull Optional context) { + // Default CqlPrepareAsyncProcessor uses weak values here as well. We avoid doing so + // to prevent cache entries from unexpectedly disappearing mid-test. + super( + CacheBuilder.newBuilder().removalListener(buildCacheRemoveCallback(context)).build(), + context); + } + } + + private static class TestDefaultDriverContext extends DefaultDriverContext { + public TestDefaultDriverContext( + DriverConfigLoader configLoader, ProgrammaticArguments programmaticArguments) { + super(configLoader, programmaticArguments); + } + + @Override + protected RequestProcessorRegistry buildRequestProcessorRegistry() { + // Re-create the processor cache to insert the TestCqlPrepareAsyncProcessor with it's strong + // prepared statement cache, see JAVA-3062 + List> processors = + BuiltInRequestProcessors.createDefaultProcessors(this); + processors.removeIf((processor) -> processor instanceof CqlPrepareAsyncProcessor); + processors.removeIf((processor) -> processor instanceof CqlPrepareSyncProcessor); + CqlPrepareAsyncProcessor asyncProcessor = new TestCqlPrepareAsyncProcessor(Optional.of(this)); + processors.add(2, asyncProcessor); + processors.add(3, new CqlPrepareSyncProcessor(asyncProcessor)); + return new RequestProcessorRegistry( + getSessionName(), processors.toArray(new RequestProcessor[0])); + } + } + + private static class TestSessionBuilder extends SessionBuilder { + + @Override + protected Object wrap(@NonNull CqlSession defaultSession) { + return defaultSession; + } + + @Override + protected DriverContext buildContext( + DriverConfigLoader configLoader, ProgrammaticArguments programmaticArguments) { + return new TestDefaultDriverContext(configLoader, programmaticArguments); + } + } + + @BeforeClass + public static void setup() { + System.setProperty( + SessionUtils.SESSION_BUILDER_CLASS_PROPERTY, PreparedStatementCachingIT.class.getName()); + } + + @AfterClass + public static void teardown() { + System.clearProperty(SessionUtils.SESSION_BUILDER_CLASS_PROPERTY); + } + + public static SessionBuilder builder() { + return new TestSessionBuilder(); + } + + private void invalidationResultSetTest( + Consumer setupTestSchema, Set expectedChangedTypes) { + invalidationTestInner( + setupTestSchema, + "select f from test_table_1 where e = ?", + "select h from test_table_2 where g = ?", + expectedChangedTypes); + } + + private void invalidationVariableDefsTest( + Consumer setupTestSchema, + boolean isCollection, + Set expectedChangedTypes) { + String condition = isCollection ? "contains ?" : "= ?"; + invalidationTestInner( + setupTestSchema, + String.format("select e from test_table_1 where f %s allow filtering", condition), + String.format("select g from test_table_2 where h %s allow filtering", condition), + expectedChangedTypes); + } + + private void invalidationTestInner( + Consumer setupTestSchema, + String preparedStmtQueryType1, + String preparedStmtQueryType2, + Set expectedChangedTypes) { + + try (CqlSession session = sessionWithCacheSizeMetric()) { + + assertThat(getPreparedCacheSize(session)).isEqualTo(0); + setupTestSchema.accept(session); + + session.prepare(preparedStmtQueryType1); + ByteBuffer queryId2 = session.prepare(preparedStmtQueryType2).getId(); + assertThat(getPreparedCacheSize(session)).isEqualTo(2); + + CountDownLatch preparedStmtCacheRemoveLatch = new CountDownLatch(1); + CountDownLatch typeChangeEventLatch = new CountDownLatch(expectedChangedTypes.size()); + + DefaultDriverContext ctx = (DefaultDriverContext) session.getContext(); + Map changedTypes = new ConcurrentHashMap<>(); + AtomicReference> removedQueryIds = + new AtomicReference<>(Optional.empty()); + AtomicReference> typeChangeEventError = + new AtomicReference<>(Optional.empty()); + AtomicReference> removedQueryEventError = + new AtomicReference<>(Optional.empty()); + ctx.getEventBus() + .register( + TypeChangeEvent.class, + (e) -> { + // expect one event per type changed and for every parent type that nests it + if (Boolean.TRUE.equals( + changedTypes.putIfAbsent(e.oldType.getName().toString(), true))) { + // store an error if we see duplicate change event + // any non-empty error will fail the test so it's OK to do this multiple times + typeChangeEventError.set(Optional.of("Duplicate type change event " + e)); + } + typeChangeEventLatch.countDown(); + }); + ctx.getEventBus() + .register( + PreparedStatementRemovalEvent.class, + (e) -> { + if (!removedQueryIds.compareAndSet(Optional.empty(), Optional.of(e.queryId))) { + // store an error if we see multiple cache invalidation events + // any non-empty error will fail the test so it's OK to do this multiple times + removedQueryEventError.set( + Optional.of("Unable to set reference for PS removal event")); + } + preparedStmtCacheRemoveLatch.countDown(); + }); + + // alter test_type_2 to trigger cache invalidation and above events + session.execute("ALTER TYPE test_type_2 add i blob"); + + // wait for latches and fail if they don't reach zero before timeout + assertThat( + Uninterruptibles.awaitUninterruptibly( + preparedStmtCacheRemoveLatch, 10, TimeUnit.SECONDS)) + .withFailMessage("preparedStmtCacheRemoveLatch did not trigger before timeout") + .isTrue(); + assertThat(Uninterruptibles.awaitUninterruptibly(typeChangeEventLatch, 10, TimeUnit.SECONDS)) + .withFailMessage("typeChangeEventLatch did not trigger before timeout") + .isTrue(); + + /* Okay, the latch triggered so cache processing should now be done. Let's validate :allthethings: */ + assertThat(changedTypes.keySet()).isEqualTo(expectedChangedTypes); + assertThat(removedQueryIds.get()).isNotEmpty().get().isEqualTo(queryId2); + assertThat(getPreparedCacheSize(session)).isEqualTo(1); + + // check no errors were seen in callback (and report those as fail msgs) + // if something is broken these may still succeed due to timing + // but shouldn't intermittently fail if the code is working properly + assertThat(typeChangeEventError.get()) + .withFailMessage(() -> typeChangeEventError.get().get()) + .isEmpty(); + assertThat(removedQueryEventError.get()) + .withFailMessage(() -> removedQueryEventError.get().get()) + .isEmpty(); + } + } + + Consumer setupCacheEntryTestBasic = + (session) -> { + session.execute("CREATE TYPE test_type_1 (a text, b int)"); + session.execute("CREATE TYPE test_type_2 (c int, d text)"); + session.execute("CREATE TABLE test_table_1 (e int primary key, f frozen)"); + session.execute("CREATE TABLE test_table_2 (g int primary key, h frozen)"); + }; + + @Test + public void should_invalidate_cache_entry_on_basic_udt_change_result_set() { + invalidationResultSetTest(setupCacheEntryTestBasic, ImmutableSet.of("test_type_2")); + } + + @Test + public void should_invalidate_cache_entry_on_basic_udt_change_variable_defs() { + invalidationVariableDefsTest(setupCacheEntryTestBasic, false, ImmutableSet.of("test_type_2")); + } + + Consumer setupCacheEntryTestCollection = + (session) -> { + session.execute("CREATE TYPE test_type_1 (a text, b int)"); + session.execute("CREATE TYPE test_type_2 (c int, d text)"); + session.execute( + "CREATE TABLE test_table_1 (e int primary key, f list>)"); + session.execute( + "CREATE TABLE test_table_2 (g int primary key, h list>)"); + }; + + @Test + public void should_invalidate_cache_entry_on_collection_udt_change_result_set() { + invalidationResultSetTest(setupCacheEntryTestCollection, ImmutableSet.of("test_type_2")); + } + + @Test + public void should_invalidate_cache_entry_on_collection_udt_change_variable_defs() { + invalidationVariableDefsTest( + setupCacheEntryTestCollection, true, ImmutableSet.of("test_type_2")); + } + + Consumer setupCacheEntryTestTuple = + (session) -> { + session.execute("CREATE TYPE test_type_1 (a text, b int)"); + session.execute("CREATE TYPE test_type_2 (c int, d text)"); + session.execute( + "CREATE TABLE test_table_1 (e int primary key, f tuple)"); + session.execute( + "CREATE TABLE test_table_2 (g int primary key, h tuple)"); + }; + + @Test + public void should_invalidate_cache_entry_on_tuple_udt_change_result_set() { + invalidationResultSetTest(setupCacheEntryTestTuple, ImmutableSet.of("test_type_2")); + } + + @Test + public void should_invalidate_cache_entry_on_tuple_udt_change_variable_defs() { + invalidationVariableDefsTest(setupCacheEntryTestTuple, false, ImmutableSet.of("test_type_2")); + } + + Consumer setupCacheEntryTestNested = + (session) -> { + session.execute("CREATE TYPE test_type_1 (a text, b int)"); + session.execute("CREATE TYPE test_type_2 (c int, d text)"); + session.execute("CREATE TYPE test_type_3 (e frozen, f int)"); + session.execute("CREATE TYPE test_type_4 (g int, h frozen)"); + session.execute("CREATE TABLE test_table_1 (e int primary key, f frozen)"); + session.execute("CREATE TABLE test_table_2 (g int primary key, h frozen)"); + }; + + @Test + public void should_invalidate_cache_entry_on_nested_udt_change_result_set() { + invalidationResultSetTest( + setupCacheEntryTestNested, ImmutableSet.of("test_type_2", "test_type_4")); + } + + @Test + public void should_invalidate_cache_entry_on_nested_udt_change_variable_defs() { + invalidationVariableDefsTest( + setupCacheEntryTestNested, false, ImmutableSet.of("test_type_2", "test_type_4")); + } + + /* ========================= Infrastructure copied from PreparedStatementIT ========================= */ + private CqlSession sessionWithCacheSizeMetric() { + return SessionUtils.newSession( + ccmRule, + sessionRule.keyspace(), + SessionUtils.configLoaderBuilder() + .withInt(DefaultDriverOption.REQUEST_PAGE_SIZE, 2) + .withDuration(DefaultDriverOption.REQUEST_TIMEOUT, Duration.ofSeconds(30)) + .withStringList( + DefaultDriverOption.METRICS_SESSION_ENABLED, + ImmutableList.of(DefaultSessionMetric.CQL_PREPARED_CACHE_SIZE.getPath())) + .build()); + } + + @SuppressWarnings("unchecked") + private static long getPreparedCacheSize(CqlSession session) { + return session + .getMetrics() + .flatMap(metrics -> metrics.getSessionMetric(DefaultSessionMetric.CQL_PREPARED_CACHE_SIZE)) + .map(metric -> ((Gauge) metric).getValue()) + .orElseThrow( + () -> + new AssertionError( + "Could not access metric " + + DefaultSessionMetric.CQL_PREPARED_CACHE_SIZE.getPath())); + } +} diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PreparedStatementIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PreparedStatementIT.java index c3494a6ee96..490158980fb 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PreparedStatementIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PreparedStatementIT.java @@ -41,20 +41,14 @@ import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.categories.ParallelizableTests; -import com.datastax.oss.driver.internal.core.context.DefaultDriverContext; -import com.datastax.oss.driver.internal.core.metadata.schema.events.TypeChangeEvent; import com.datastax.oss.driver.internal.core.metadata.token.DefaultTokenMap; import com.datastax.oss.driver.internal.core.metadata.token.TokenFactory; import com.datastax.oss.driver.internal.core.util.concurrent.CompletableFutures; -import com.datastax.oss.driver.shaded.guava.common.util.concurrent.Uninterruptibles; import com.datastax.oss.protocol.internal.util.Bytes; import com.google.common.collect.ImmutableList; import java.nio.ByteBuffer; import java.time.Duration; import java.util.concurrent.CompletionStage; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.TimeUnit; -import java.util.function.Consumer; import junit.framework.TestCase; import org.assertj.core.api.AbstractThrowableAssert; import org.junit.Before; @@ -377,9 +371,15 @@ public void should_return_same_instance_when_repreparing_query() { assertThat(getPreparedCacheSize(session)).isEqualTo(0); String query = "SELECT * FROM prepared_statement_test WHERE a = ?"; - // When - PreparedStatement preparedStatement1 = session.prepare(query); - PreparedStatement preparedStatement2 = session.prepare(query); + // Send prepare requests, keep hold of CompletionStage objects to prevent them being removed + // from CqlPrepareAsyncProcessor#cache, see JAVA-3062 + CompletionStage preparedStatement1Future = session.prepareAsync(query); + CompletionStage preparedStatement2Future = session.prepareAsync(query); + + PreparedStatement preparedStatement1 = + CompletableFutures.getUninterruptibly(preparedStatement1Future); + PreparedStatement preparedStatement2 = + CompletableFutures.getUninterruptibly(preparedStatement2Future); // Then assertThat(preparedStatement1).isSameAs(preparedStatement2); @@ -394,11 +394,17 @@ public void should_create_separate_instances_for_differently_formatted_queries() // Given assertThat(getPreparedCacheSize(session)).isEqualTo(0); - // When + // Send prepare requests, keep hold of CompletionStage objects to prevent them being removed + // from CqlPrepareAsyncProcessor#cache, see JAVA-3062 + CompletionStage preparedStatement1Future = + session.prepareAsync("SELECT * FROM prepared_statement_test WHERE a = ?"); + CompletionStage preparedStatement2Future = + session.prepareAsync("select * from prepared_statement_test where a = ?"); + PreparedStatement preparedStatement1 = - session.prepare("SELECT * FROM prepared_statement_test WHERE a = ?"); + CompletableFutures.getUninterruptibly(preparedStatement1Future); PreparedStatement preparedStatement2 = - session.prepare("select * from prepared_statement_test where a = ?"); + CompletableFutures.getUninterruptibly(preparedStatement2Future); // Then assertThat(preparedStatement1).isNotSameAs(preparedStatement2); @@ -414,9 +420,17 @@ public void should_create_separate_instances_for_different_statement_parameters( SimpleStatement statement = SimpleStatement.newInstance("SELECT * FROM prepared_statement_test"); - // When - PreparedStatement preparedStatement1 = session.prepare(statement.setPageSize(1)); - PreparedStatement preparedStatement2 = session.prepare(statement.setPageSize(4)); + // Send prepare requests, keep hold of CompletionStage objects to prevent them being removed + // from CqlPrepareAsyncProcessor#cache, see JAVA-3062 + CompletionStage preparedStatement1Future = + session.prepareAsync(statement.setPageSize(1)); + CompletionStage preparedStatement2Future = + session.prepareAsync(statement.setPageSize(4)); + + PreparedStatement preparedStatement1 = + CompletableFutures.getUninterruptibly(preparedStatement1Future); + PreparedStatement preparedStatement2 = + CompletableFutures.getUninterruptibly(preparedStatement2Future); // Then assertThat(preparedStatement1).isNotSameAs(preparedStatement2); @@ -481,142 +495,6 @@ public void handle_id_changes_on_reprepare() { assertableReprepareAfterIdChange().doesNotThrowAnyException(); } - private void invalidationResultSetTest(Consumer createFn) { - - try (CqlSession session = sessionWithCacheSizeMetric()) { - - assertThat(getPreparedCacheSize(session)).isEqualTo(0); - createFn.accept(session); - - session.prepare("select f from test_table_1 where e = ?"); - session.prepare("select h from test_table_2 where g = ?"); - assertThat(getPreparedCacheSize(session)).isEqualTo(2); - - CountDownLatch latch = new CountDownLatch(1); - DefaultDriverContext ctx = (DefaultDriverContext) session.getContext(); - ctx.getEventBus() - .register( - TypeChangeEvent.class, - (e) -> { - assertThat(e.oldType.getName().toString()).isEqualTo("test_type_2"); - latch.countDown(); - }); - - session.execute("ALTER TYPE test_type_2 add i blob"); - Uninterruptibles.awaitUninterruptibly(latch, 2, TimeUnit.SECONDS); - - assertThat(getPreparedCacheSize(session)).isEqualTo(1); - } - } - - private void invalidationVariableDefsTest(Consumer createFn, boolean isCollection) { - - try (CqlSession session = sessionWithCacheSizeMetric()) { - - assertThat(getPreparedCacheSize(session)).isEqualTo(0); - createFn.accept(session); - - String fStr = isCollection ? "f contains ?" : "f = ?"; - session.prepare(String.format("select e from test_table_1 where %s allow filtering", fStr)); - String hStr = isCollection ? "h contains ?" : "h = ?"; - session.prepare(String.format("select g from test_table_2 where %s allow filtering", hStr)); - assertThat(getPreparedCacheSize(session)).isEqualTo(2); - - CountDownLatch latch = new CountDownLatch(1); - DefaultDriverContext ctx = (DefaultDriverContext) session.getContext(); - ctx.getEventBus() - .register( - TypeChangeEvent.class, - (e) -> { - assertThat(e.oldType.getName().toString()).isEqualTo("test_type_2"); - latch.countDown(); - }); - - session.execute("ALTER TYPE test_type_2 add i blob"); - Uninterruptibles.awaitUninterruptibly(latch, 2, TimeUnit.SECONDS); - - assertThat(getPreparedCacheSize(session)).isEqualTo(1); - } - } - - Consumer setupCacheEntryTestBasic = - (session) -> { - session.execute("CREATE TYPE test_type_1 (a text, b int)"); - session.execute("CREATE TYPE test_type_2 (c int, d text)"); - session.execute("CREATE TABLE test_table_1 (e int primary key, f frozen)"); - session.execute("CREATE TABLE test_table_2 (g int primary key, h frozen)"); - }; - - @Test - public void should_invalidate_cache_entry_on_basic_udt_change_result_set() { - invalidationResultSetTest(setupCacheEntryTestBasic); - } - - @Test - public void should_invalidate_cache_entry_on_basic_udt_change_variable_defs() { - invalidationVariableDefsTest(setupCacheEntryTestBasic, false); - } - - Consumer setupCacheEntryTestCollection = - (session) -> { - session.execute("CREATE TYPE test_type_1 (a text, b int)"); - session.execute("CREATE TYPE test_type_2 (c int, d text)"); - session.execute( - "CREATE TABLE test_table_1 (e int primary key, f list>)"); - session.execute( - "CREATE TABLE test_table_2 (g int primary key, h list>)"); - }; - - @Test - public void should_invalidate_cache_entry_on_collection_udt_change_result_set() { - invalidationResultSetTest(setupCacheEntryTestCollection); - } - - @Test - public void should_invalidate_cache_entry_on_collection_udt_change_variable_defs() { - invalidationVariableDefsTest(setupCacheEntryTestCollection, true); - } - - Consumer setupCacheEntryTestTuple = - (session) -> { - session.execute("CREATE TYPE test_type_1 (a text, b int)"); - session.execute("CREATE TYPE test_type_2 (c int, d text)"); - session.execute( - "CREATE TABLE test_table_1 (e int primary key, f tuple)"); - session.execute( - "CREATE TABLE test_table_2 (g int primary key, h tuple)"); - }; - - @Test - public void should_invalidate_cache_entry_on_tuple_udt_change_result_set() { - invalidationResultSetTest(setupCacheEntryTestTuple); - } - - @Test - public void should_invalidate_cache_entry_on_tuple_udt_change_variable_defs() { - invalidationVariableDefsTest(setupCacheEntryTestTuple, false); - } - - Consumer setupCacheEntryTestNested = - (session) -> { - session.execute("CREATE TYPE test_type_1 (a text, b int)"); - session.execute("CREATE TYPE test_type_2 (c int, d text)"); - session.execute("CREATE TYPE test_type_3 (e frozen, f int)"); - session.execute("CREATE TYPE test_type_4 (g int, h frozen)"); - session.execute("CREATE TABLE test_table_1 (e int primary key, f frozen)"); - session.execute("CREATE TABLE test_table_2 (g int primary key, h frozen)"); - }; - - @Test - public void should_invalidate_cache_entry_on_nested_udt_change_result_set() { - invalidationResultSetTest(setupCacheEntryTestNested); - } - - @Test - public void should_invalidate_cache_entry_on_nested_udt_change_variable_defs() { - invalidationVariableDefsTest(setupCacheEntryTestNested, false); - } - @Test public void should_infer_routing_information_when_partition_key_is_bound() { should_infer_routing_information_when_partition_key_is_bound( diff --git a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/session/SessionUtils.java b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/session/SessionUtils.java index 3b9824698fe..bc4aa0dbb7c 100644 --- a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/session/SessionUtils.java +++ b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/session/SessionUtils.java @@ -61,39 +61,46 @@ * SessionRule} provides a simpler alternative. */ public class SessionUtils { + + public static final String SESSION_BUILDER_CLASS_PROPERTY = "session.builder"; + private static final Logger LOG = LoggerFactory.getLogger(SessionUtils.class); private static final AtomicInteger keyspaceId = new AtomicInteger(); private static final String DEFAULT_SESSION_CLASS_NAME = CqlSession.class.getName(); - private static final String SESSION_BUILDER_CLASS = - System.getProperty("session.builder", DEFAULT_SESSION_CLASS_NAME); + + private static String getSessionBuilderClass() { + return System.getProperty(SESSION_BUILDER_CLASS_PROPERTY, DEFAULT_SESSION_CLASS_NAME); + } @SuppressWarnings("unchecked") public static SessionBuilder baseBuilder() { + String sessionBuilderClass = getSessionBuilderClass(); try { - Class clazz = Class.forName(SESSION_BUILDER_CLASS); + Class clazz = Class.forName(sessionBuilderClass); Method m = clazz.getMethod("builder"); return (SessionBuilder) m.invoke(null); } catch (Exception e) { LOG.warn( "Could not construct SessionBuilder from {} using builder(), using default " + "implementation.", - SESSION_BUILDER_CLASS, + sessionBuilderClass, e); return (SessionBuilder) CqlSession.builder(); } } public static ProgrammaticDriverConfigLoaderBuilder configLoaderBuilder() { + String sessionBuilderClass = getSessionBuilderClass(); try { - Class clazz = Class.forName(SESSION_BUILDER_CLASS); + Class clazz = Class.forName(sessionBuilderClass); Method m = clazz.getMethod("configLoaderBuilder"); return (ProgrammaticDriverConfigLoaderBuilder) m.invoke(null); } catch (Exception e) { - if (!SESSION_BUILDER_CLASS.equals(DEFAULT_SESSION_CLASS_NAME)) { + if (!sessionBuilderClass.equals(DEFAULT_SESSION_CLASS_NAME)) { LOG.warn( "Could not construct ProgrammaticDriverConfigLoaderBuilder from {} using " + "configLoaderBuilder(), using default implementation.", - SESSION_BUILDER_CLASS, + sessionBuilderClass, e); } return DriverConfigLoader.programmaticBuilder(); From 56a480620c3af81a10742b55a73483349876bae6 Mon Sep 17 00:00:00 2001 From: Chris Lin <99268912+chrislin22@users.noreply.github.com> Date: Thu, 29 Jun 2023 16:11:34 -0400 Subject: [PATCH 835/979] fxied typo on branch closing --- .github/workflows/snyk-pr-cleanup.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/snyk-pr-cleanup.yml b/.github/workflows/snyk-pr-cleanup.yml index 7cf018a59fc..e6866195dcd 100644 --- a/.github/workflows/snyk-pr-cleanup.yml +++ b/.github/workflows/snyk-pr-cleanup.yml @@ -5,7 +5,7 @@ on: types: - closed branches: - - snyk-monitor + - 4.x workflow_dispatch: jobs: From c20e2aee4afdeb20bc6fdeafaae42b67478306b4 Mon Sep 17 00:00:00 2001 From: Chris Lin <99268912+chrislin22@users.noreply.github.com> Date: Thu, 29 Jun 2023 16:36:35 -0400 Subject: [PATCH 836/979] Update .github/workflows/snyk-pr-cleanup.yml Co-authored-by: Madhavan --- .github/workflows/snyk-pr-cleanup.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/snyk-pr-cleanup.yml b/.github/workflows/snyk-pr-cleanup.yml index e6866195dcd..9c3136bef82 100644 --- a/.github/workflows/snyk-pr-cleanup.yml +++ b/.github/workflows/snyk-pr-cleanup.yml @@ -5,7 +5,7 @@ on: types: - closed branches: - - 4.x + - 4.x workflow_dispatch: jobs: From 1bb721dfb397b81c8e3fc33b738df1358de94ada Mon Sep 17 00:00:00 2001 From: Henry Hughes Date: Tue, 6 Jun 2023 18:20:51 -0700 Subject: [PATCH 837/979] JAVA-3042: Support testing against Java17 Add property 'testJavaHome' to specify a different JDK for surefire/failsafe to run tests to facilitate testing with different JDKs. pom.xml: - Add '--add-opens java.base/jdk.internal.util.random=ALL-UNNAMED' as maven-surefire-plugin argLine to support deep reflection for mockito, only loaded for JDK17 Dependency updates: - jacoco-maven-plugin -> 0.8.10, resolves "Error while instrumenting path/to/class" with JDK17 - maven-bundle-plugin -> 5.1.1, resolves java.util.ConcurrentModificationException [FELIX-6259] with JDK17 - blockhound-junit-platform -> 1.0.8.RELEASE, earlier version did not pick up -XX:+AllowRedefinitionToAddDeleteMethods properly Jenkinsfile: - Add matrix axis for JABBER_VERSION for each of JDK8, JDK11, JDK17 - Always run maven with JDK8, use testJavaHome to set JDK version for testing --- Jenkinsfile | 46 ++++++++++++++++++++++++------ core/pom.xml | 2 ++ integration-tests/pom.xml | 15 ++-------- mapper-runtime/pom.xml | 1 + osgi-tests/pom.xml | 2 ++ pom.xml | 59 ++++++++++++++++++++++++++++++++++++--- 6 files changed, 100 insertions(+), 25 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index 1859ceb7689..3ecb70e0d30 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -18,6 +18,24 @@ def initializeEnvironment() { env.MAVEN_HOME = "${env.HOME}/.mvn/apache-maven-3.3.9" env.PATH = "${env.MAVEN_HOME}/bin:${env.PATH}" + + /* + * As of JAVA-3042 JAVA_HOME is always set to JDK8 and this is currently necessary for mvn compile and DSE Search/Graph. + * To facilitate testing with JDK11/17 we feed the appropriate JAVA_HOME into the maven build via commandline. + * + * Maven command-line flags: + * - -DtestJavaHome=/path/to/java/home: overrides JAVA_HOME for surefire/failsafe tests, defaults to environment JAVA_HOME. + * - -Ptest-jdk-N: enables profile for running tests with a specific JDK version (substitute N for 8/11/17). + * + * Note test-jdk-N is also automatically loaded based off JAVA_HOME SDK version so testing with an older SDK is not supported. + * + * Environment variables: + * - JAVA_HOME: Path to JDK used for mvn (all steps except surefire/failsafe), Cassandra, DSE. + * - JAVA8_HOME: Path to JDK8 used for Cassandra/DSE if ccm determines JAVA_HOME is not compatible with the chosen backend. + * - TEST_JAVA_HOME: PATH to JDK used for surefire/failsafe testing. + * - TEST_JAVA_VERSION: TEST_JAVA_HOME SDK version number [8/11/17], used to configure test-jdk-N profile in maven (see above) + */ + env.JAVA_HOME = sh(label: 'Get JAVA_HOME',script: '''#!/bin/bash -le . ${JABBA_SHELL} jabba which ${JABBA_VERSION}''', returnStdout: true).trim() @@ -25,9 +43,15 @@ def initializeEnvironment() { . ${JABBA_SHELL} jabba which 1.8''', returnStdout: true).trim() + env.TEST_JAVA_HOME = sh(label: 'Get TEST_JAVA_HOME',script: '''#!/bin/bash -le + . ${JABBA_SHELL} + jabba which ${JABBA_VERSION}''', returnStdout: true).trim() + env.TEST_JAVA_VERSION = sh(label: 'Get TEST_JAVA_VERSION',script: '''#!/bin/bash -le + echo "${JABBA_VERSION##*.}"''', returnStdout: true).trim() + sh label: 'Download Apache CassandraⓇ or DataStax Enterprise',script: '''#!/bin/bash -le . ${JABBA_SHELL} - jabba use ${JABBA_VERSION} + jabba use 1.8 . ${CCM_ENVIRONMENT_SHELL} ${SERVER_VERSION} ''' @@ -53,7 +77,7 @@ ENVIRONMENT_EOF set +o allexport . ${JABBA_SHELL} - jabba use ${JABBA_VERSION} + jabba use 1.8 java -version mvn -v @@ -80,7 +104,7 @@ def executeTests() { set +o allexport . ${JABBA_SHELL} - jabba use ${JABBA_VERSION} + jabba use 1.8 if [ "${JABBA_VERSION}" != "1.8" ]; then SKIP_JAVADOCS=true @@ -94,7 +118,9 @@ def executeTests() { fi printenv | sort - mvn -B -V ${INTEGRATION_TESTS_FILTER_ARGUMENT} verify \ + mvn -B -V ${INTEGRATION_TESTS_FILTER_ARGUMENT} -T 1 verify \ + -Ptest-jdk-${TEST_JAVA_VERSION} \ + -DtestJavaHome=${TEST_JAVA_HOME} \ -DfailIfNoTests=false \ -Dmaven.test.failure.ignore=true \ -Dmaven.javadoc.skip=${SKIP_JAVADOCS} \ @@ -403,15 +429,17 @@ pipeline { '4.0', // Development Apache CassandraⓇ 'dse-6.8.30' // Current DataStax Enterprise } + axis { + name 'JABBA_VERSION' + values '1.8', // jdk8 + 'openjdk@1.11', // jdk11 + 'openjdk@1.17' // jdk17 + } } agent { label "${OS_VERSION}" } - environment { - // Per-commit builds are only going to run against JDK8 - JABBA_VERSION = '1.8' - } stages { stage('Initialize-Environment') { @@ -431,7 +459,7 @@ pipeline { } stage('Build-Driver') { steps { - buildDriver(env.JABBA_VERSION) + buildDriver('default') } } stage('Execute-Tests') { diff --git a/core/pom.xml b/core/pom.xml index d78ff6a3f02..db2af9ed156 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -248,6 +248,8 @@ maven-surefire-plugin + ${testing.jvm}/bin/java + ${mockitoopens.argline} 1 diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 112ac7e73bc..16b900a0fb2 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -229,6 +229,7 @@ integration-test + ${testing.jvm}/bin/java com.datastax.oss.driver.categories.ParallelizableTests classes 8 @@ -245,6 +246,7 @@ com.datastax.oss.driver.categories.ParallelizableTests, com.datastax.oss.driver.categories.IsolatedTests ${project.build.directory}/failsafe-reports/failsafe-summary-serial.xml ${skipSerialITs} + ${testing.jvm}/bin/java @@ -260,6 +262,7 @@ ${project.build.directory}/failsafe-reports/failsafe-summary-isolated.xml ${skipIsolatedITs} ${blockhound.argline} + ${testing.jvm}/bin/java @@ -322,16 +325,4 @@ - - - jdk 13+ - - [13,) - - - - -XX:+AllowRedefinitionToAddDeleteMethods - - - diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index 2e5f74f83b7..b98fd5028c1 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -122,6 +122,7 @@ maven-surefire-plugin + ${testing.jvm}/bin/java 1 diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml index c1aebc6718b..cfa28c25dba 100644 --- a/osgi-tests/pom.xml +++ b/osgi-tests/pom.xml @@ -220,6 +220,7 @@ maven-surefire-plugin + ${testing.jvm}/bin/java ${project.basedir}/src/test/resources/logback-test.xml @@ -237,6 +238,7 @@ + ${testing.jvm}/bin/java ${project.basedir}/src/test/resources/logback-test.xml diff --git a/pom.xml b/pom.xml index b56f22d6454..eae8a54ac55 100644 --- a/pom.xml +++ b/pom.xml @@ -433,12 +433,12 @@ io.projectreactor.tools blockhound - 1.0.4.RELEASE + 1.0.8.RELEASE io.projectreactor.tools blockhound-junit-platform - 1.0.4.RELEASE + 1.0.8.RELEASE @@ -533,12 +533,12 @@ org.jacoco jacoco-maven-plugin - 0.8.5 + 0.8.10 org.apache.felix maven-bundle-plugin - 4.2.1 + 5.1.1 org.revapi @@ -936,6 +936,57 @@ height="0" width="0" style="display:none;visibility:hidden"> true + + + test-jdk-environment + + + !testJavaHome + + + + ${env.JAVA_HOME} + + + + + test-jdk-specified + + + testJavaHome + + + + ${testJavaHome} + + + + + test-jdk-8 + + [8,) + + + + + test-jdk-11 + + [11,) + + + + + test-jdk-17 + + [17,) + + + + -XX:+AllowRedefinitionToAddDeleteMethods + + --add-opens java.base/jdk.internal.util.random=ALL-UNNAMED + + From a572d5f6ac0adeb07d0b4b0fae90cbcb3b22a9b4 Mon Sep 17 00:00:00 2001 From: Dom Del Nano Date: Fri, 7 Jul 2023 15:32:52 -0700 Subject: [PATCH 838/979] Update native protocol documentation to use a valid (capitalized) protocol version (#1637) --- manual/core/native_protocol/README.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/manual/core/native_protocol/README.md b/manual/core/native_protocol/README.md index d64aaccda85..b28c72f6300 100644 --- a/manual/core/native_protocol/README.md +++ b/manual/core/native_protocol/README.md @@ -62,7 +62,8 @@ the [configuration](../configuration/): ``` datastax-java-driver { advanced.protocol { - version = v3 + # The V in the version parameter must be capitalized + version = V3 } } ``` From fc79bb7ae57a3c1fbd9500418402a4bbec0d6665 Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Fri, 7 Jul 2023 17:40:04 -0500 Subject: [PATCH 839/979] Follow-up to previous commit --- .../core/DefaultProtocolVersionRegistryTest.java | 10 ++++++++++ manual/core/native_protocol/README.md | 4 +++- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/DefaultProtocolVersionRegistryTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/DefaultProtocolVersionRegistryTest.java index 9d81a3bdd3d..0bf571da20c 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/DefaultProtocolVersionRegistryTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/DefaultProtocolVersionRegistryTest.java @@ -24,6 +24,7 @@ import static com.datastax.oss.driver.internal.core.DefaultProtocolFeature.DATE_TYPE; import static com.datastax.oss.driver.internal.core.DefaultProtocolFeature.SMALLINT_AND_TINYINT_TYPES; import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; import com.datastax.dse.driver.api.core.DseProtocolVersion; import com.datastax.dse.driver.api.core.metadata.DseNodeProperties; @@ -51,6 +52,15 @@ public void should_find_version_by_name() { assertThat(registry.fromName("DSE_V1")).isEqualTo(DseProtocolVersion.DSE_V1); } + + @Test + public void should_fail_to_find_version_by_name_different_case() { + assertThatThrownBy(() -> registry.fromName("v4")).isInstanceOf(IllegalArgumentException.class); + assertThatThrownBy(() -> registry.fromName("dse_v1")).isInstanceOf(IllegalArgumentException.class); + assertThatThrownBy(() -> registry.fromName("dDSE_v1")).isInstanceOf(IllegalArgumentException.class); + assertThatThrownBy(() -> registry.fromName("dse_v1")).isInstanceOf(IllegalArgumentException.class); + } + @Test public void should_downgrade_if_lower_version_available() { Optional downgraded = registry.downgrade(V4); diff --git a/manual/core/native_protocol/README.md b/manual/core/native_protocol/README.md index b28c72f6300..2bc075be3be 100644 --- a/manual/core/native_protocol/README.md +++ b/manual/core/native_protocol/README.md @@ -62,12 +62,14 @@ the [configuration](../configuration/): ``` datastax-java-driver { advanced.protocol { - # The V in the version parameter must be capitalized version = V3 } } ``` +Note that the protocol version you specify above is case sensitive so make sure to only use uppercase letters. +"V3" is correct, "v3" is not. + If you force a version that is too high for the server, you'll get an error: ``` From e2fb42d82949dfb161809ef3c2d5d563419cee24 Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Fri, 7 Jul 2023 23:17:05 -0500 Subject: [PATCH 840/979] JAVA-3061 Re-introduce an improved CqlVector, add support for accessing vectors directly as float arrays (#1666) --- core/revapi.json | 65 +++++- .../oss/driver/api/core/data/CqlVector.java | 193 +++++++++++++++++ .../driver/api/core/data/GettableById.java | 2 +- .../driver/api/core/data/GettableByIndex.java | 5 +- .../driver/api/core/data/GettableByName.java | 4 +- .../driver/api/core/data/SettableById.java | 4 +- .../driver/api/core/data/SettableByIndex.java | 6 +- .../driver/api/core/data/SettableByName.java | 6 +- .../api/core/type/codec/ExtraTypeCodecs.java | 8 + .../api/core/type/codec/TypeCodecs.java | 8 +- .../api/core/type/reflect/GenericType.java | 18 ++ .../internal/core/type/codec/VectorCodec.java | 38 ++-- .../vector/AbstractVectorToArrayCodec.java | 140 +++++++++++++ .../vector/FloatVectorToArrayCodec.java | 105 ++++++++++ .../codec/registry/CachingCodecRegistry.java | 83 +++++++- .../driver/api/core/data/CqlVectorTest.java | 198 ++++++++++++++++++ .../core/type/codec/VectorCodecTest.java | 26 +-- ...CachingCodecRegistryTestDataProviders.java | 50 +++++ 18 files changed, 901 insertions(+), 58 deletions(-) create mode 100644 core/src/main/java/com/datastax/oss/driver/api/core/data/CqlVector.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/vector/AbstractVectorToArrayCodec.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/vector/FloatVectorToArrayCodec.java create mode 100644 core/src/test/java/com/datastax/oss/driver/api/core/data/CqlVectorTest.java diff --git a/core/revapi.json b/core/revapi.json index f844479cd29..63e2cef5a1e 100644 --- a/core/revapi.json +++ b/core/revapi.json @@ -6887,7 +6887,70 @@ "code": "java.method.removed", "old": "method com.datastax.oss.driver.api.core.type.reflect.GenericType> com.datastax.oss.driver.api.core.type.reflect.GenericType::vectorOf(com.datastax.oss.driver.api.core.type.reflect.GenericType)", "justification": "Refactoring in JAVA-3061" - } + }, + { + "code": "java.class.removed", + "old": "class com.datastax.oss.driver.api.core.data.CqlVector.Builder", + "justification": "Refactorings in PR 1666" + }, + { + "code": "java.method.removed", + "old": "method com.datastax.oss.driver.api.core.data.CqlVector.Builder com.datastax.oss.driver.api.core.data.CqlVector::builder()", + "justification": "Refactorings in PR 1666" + }, + { + "code": "java.method.removed", + "old": "method java.lang.Iterable com.datastax.oss.driver.api.core.data.CqlVector::getValues()", + "justification": "Refactorings in PR 1666" + }, + { + "code": "java.generics.formalTypeParameterChanged", + "old": "class com.datastax.oss.driver.api.core.data.CqlVector", + "new": "class com.datastax.oss.driver.api.core.data.CqlVector", + "justification": "Refactorings in PR 1666" + }, + { + "code": "java.method.parameterTypeChanged", + "old": "parameter com.datastax.oss.driver.api.core.type.codec.TypeCodec> com.datastax.oss.driver.api.core.type.codec.TypeCodecs::vectorOf(===com.datastax.oss.driver.api.core.type.CqlVectorType===, com.datastax.oss.driver.api.core.type.codec.TypeCodec)", + "new": "parameter com.datastax.oss.driver.api.core.type.codec.TypeCodec> com.datastax.oss.driver.api.core.type.codec.TypeCodecs::vectorOf(===com.datastax.oss.driver.api.core.type.VectorType===, com.datastax.oss.driver.api.core.type.codec.TypeCodec)", + "justification": "Refactorings in PR 1666" + }, + { + "code": "java.method.parameterTypeParameterChanged", + "old": "parameter com.datastax.oss.driver.api.core.type.codec.TypeCodec> com.datastax.oss.driver.api.core.type.codec.TypeCodecs::vectorOf(com.datastax.oss.driver.api.core.type.CqlVectorType, ===com.datastax.oss.driver.api.core.type.codec.TypeCodec===)", + "new": "parameter com.datastax.oss.driver.api.core.type.codec.TypeCodec> com.datastax.oss.driver.api.core.type.codec.TypeCodecs::vectorOf(com.datastax.oss.driver.api.core.type.VectorType, ===com.datastax.oss.driver.api.core.type.codec.TypeCodec===)", + "justification": "Refactorings in PR 1666" + }, + { + "code": "java.method.returnTypeTypeParametersChanged", + "old": "method com.datastax.oss.driver.api.core.type.codec.TypeCodec> com.datastax.oss.driver.api.core.type.codec.TypeCodecs::vectorOf(com.datastax.oss.driver.api.core.type.CqlVectorType, com.datastax.oss.driver.api.core.type.codec.TypeCodec)", + "new": "method com.datastax.oss.driver.api.core.type.codec.TypeCodec> com.datastax.oss.driver.api.core.type.codec.TypeCodecs::vectorOf(com.datastax.oss.driver.api.core.type.VectorType, com.datastax.oss.driver.api.core.type.codec.TypeCodec)", + "justification": "Refactorings in PR 1666" + }, + { + "code": "java.generics.formalTypeParameterChanged", + "old": "method com.datastax.oss.driver.api.core.type.codec.TypeCodec> com.datastax.oss.driver.api.core.type.codec.TypeCodecs::vectorOf(com.datastax.oss.driver.api.core.type.CqlVectorType, com.datastax.oss.driver.api.core.type.codec.TypeCodec)", + "new": "method com.datastax.oss.driver.api.core.type.codec.TypeCodec> com.datastax.oss.driver.api.core.type.codec.TypeCodecs::vectorOf(com.datastax.oss.driver.api.core.type.VectorType, com.datastax.oss.driver.api.core.type.codec.TypeCodec)", + "justification": "Refactorings in PR 1666" + }, + { + "code": "java.method.parameterTypeParameterChanged", + "old": "parameter com.datastax.oss.driver.api.core.type.reflect.GenericType> com.datastax.oss.driver.api.core.type.reflect.GenericType::vectorOf(===com.datastax.oss.driver.api.core.type.reflect.GenericType===)", + "new": "parameter com.datastax.oss.driver.api.core.type.reflect.GenericType> com.datastax.oss.driver.api.core.type.reflect.GenericType::vectorOf(===com.datastax.oss.driver.api.core.type.reflect.GenericType===)", + "justification": "Refactorings in PR 1666" + }, + { + "code": "java.method.returnTypeTypeParametersChanged", + "old": "method com.datastax.oss.driver.api.core.type.reflect.GenericType> com.datastax.oss.driver.api.core.type.reflect.GenericType::vectorOf(com.datastax.oss.driver.api.core.type.reflect.GenericType)", + "new": "method com.datastax.oss.driver.api.core.type.reflect.GenericType> com.datastax.oss.driver.api.core.type.reflect.GenericType::vectorOf(com.datastax.oss.driver.api.core.type.reflect.GenericType)", + "justification": "Refactorings in PR 1666" + }, + { + "code": "java.generics.formalTypeParameterChanged", + "old": "method com.datastax.oss.driver.api.core.type.reflect.GenericType> com.datastax.oss.driver.api.core.type.reflect.GenericType::vectorOf(com.datastax.oss.driver.api.core.type.reflect.GenericType)", + "new": "method com.datastax.oss.driver.api.core.type.reflect.GenericType> com.datastax.oss.driver.api.core.type.reflect.GenericType::vectorOf(com.datastax.oss.driver.api.core.type.reflect.GenericType)", + "justification": "Refactorings in PR 1666" + } ] } } diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/data/CqlVector.java b/core/src/main/java/com/datastax/oss/driver/api/core/data/CqlVector.java new file mode 100644 index 00000000000..152d0f40823 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/api/core/data/CqlVector.java @@ -0,0 +1,193 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.core.data; + +import com.datastax.oss.driver.api.core.type.codec.TypeCodec; +import com.datastax.oss.driver.shaded.guava.common.base.Preconditions; +import com.datastax.oss.driver.shaded.guava.common.base.Predicates; +import com.datastax.oss.driver.shaded.guava.common.base.Splitter; +import com.datastax.oss.driver.shaded.guava.common.collect.Iterables; +import com.datastax.oss.driver.shaded.guava.common.collect.Streams; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Iterator; +import java.util.List; +import java.util.Objects; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +/** + * Representation of a vector as defined in CQL. + * + *

          A CQL vector is a fixed-length array of non-null numeric values. These properties don't map + * cleanly to an existing class in the standard JDK Collections hierarchy so we provide this value + * object instead. Like other value object collections returned by the driver instances of this + * class are not immutable; think of these value objects as a representation of a vector stored in + * the database as an initial step in some additional computation. + * + *

          While we don't implement any Collection APIs we do implement Iterable. We also attempt to play + * nice with the Streams API in order to better facilitate integration with data pipelines. Finally, + * where possible we've tried to make the API of this class similar to the equivalent methods on + * {@link List}. + */ +public class CqlVector implements Iterable { + + /** + * Create a new CqlVector containing the specified values. + * + * @param vals the collection of values to wrap. + * @return a CqlVector wrapping those values + */ + public static CqlVector newInstance(V... vals) { + + // Note that Array.asList() guarantees the return of an array which implements RandomAccess + return new CqlVector(Arrays.asList(vals)); + } + + /** + * Create a new CqlVector that "wraps" an existing ArrayList. Modifications to the passed + * ArrayList will also be reflected in the returned CqlVector. + * + * @param list the collection of values to wrap. + * @return a CqlVector wrapping those values + */ + public static CqlVector newInstance(List list) { + Preconditions.checkArgument(list != null, "Input list should not be null"); + return new CqlVector(list); + } + + /** + * Create a new CqlVector instance from the specified string representation. Note that this method + * is intended to mirror {@link #toString()}; passing this method the output from a toString + * call on some CqlVector should return a CqlVector that is equal to the origin instance. + * + * @param str a String representation of a CqlVector + * @param subtypeCodec + * @return a new CqlVector built from the String representation + */ + public static CqlVector from( + @NonNull String str, @NonNull TypeCodec subtypeCodec) { + Preconditions.checkArgument(str != null, "Cannot create CqlVector from null string"); + Preconditions.checkArgument(!str.isEmpty(), "Cannot create CqlVector from empty string"); + ArrayList vals = + Streams.stream(Splitter.on(", ").split(str.substring(1, str.length() - 1))) + .map(subtypeCodec::parse) + .collect(Collectors.toCollection(ArrayList::new)); + return new CqlVector(vals); + } + + private final List list; + + private CqlVector(@NonNull List list) { + + Preconditions.checkArgument( + Iterables.all(list, Predicates.notNull()), "CqlVectors cannot contain null values"); + this.list = list; + } + + /** + * Retrieve the value at the specified index. Modelled after {@link List#get(int)} + * + * @param idx the index to retrieve + * @return the value at the specified index + */ + public T get(int idx) { + return list.get(idx); + } + + /** + * Update the value at the specified index. Modelled after {@link List#set(int, Object)} + * + * @param idx the index to set + * @param val the new value for the specified index + * @return the old value for the specified index + */ + public T set(int idx, T val) { + return list.set(idx, val); + } + + /** + * Return the size of this vector. Modelled after {@link List#size()} + * + * @return the vector size + */ + public int size() { + return this.list.size(); + } + + /** + * Return a CqlVector consisting of the contents of a portion of this vector. Modelled after + * {@link List#subList(int, int)} + * + * @param from the index to start from (inclusive) + * @param to the index to end on (exclusive) + * @return a new CqlVector wrapping the sublist + */ + public CqlVector subVector(int from, int to) { + return new CqlVector(this.list.subList(from, to)); + } + + /** + * Return a boolean indicating whether the vector is empty. Modelled after {@link List#isEmpty()} + * + * @return true if the list is empty, false otherwise + */ + public boolean isEmpty() { + return this.list.isEmpty(); + } + + /** + * Create an {@link Iterator} for this vector + * + * @return the generated iterator + */ + @Override + public Iterator iterator() { + return this.list.iterator(); + } + + /** + * Create a {@link Stream} of the values in this vector + * + * @return the Stream instance + */ + public Stream stream() { + return this.list.stream(); + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } else if (o instanceof CqlVector) { + CqlVector that = (CqlVector) o; + return this.list.equals(that.list); + } else { + return false; + } + } + + @Override + public int hashCode() { + return Objects.hash(list); + } + + @Override + public String toString() { + return Iterables.toString(this.list); + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableById.java b/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableById.java index 6c6cf95a568..1b4197667e9 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableById.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableById.java @@ -529,7 +529,7 @@ default CqlDuration getCqlDuration(@NonNull CqlIdentifier id) { * @throws IllegalArgumentException if the id is invalid. */ @Nullable - default List getVector( + default CqlVector getVector( @NonNull CqlIdentifier id, @NonNull Class elementsClass) { return getVector(firstIndexOf(id), elementsClass); } diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableByIndex.java b/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableByIndex.java index a805342defc..0efb003ca24 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableByIndex.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableByIndex.java @@ -444,8 +444,9 @@ default CqlDuration getCqlDuration(int i) { * @throws IndexOutOfBoundsException if the index is invalid. */ @Nullable - default List getVector(int i, @NonNull Class elementsClass) { - return get(i, GenericType.listOf(elementsClass)); + default CqlVector getVector( + int i, @NonNull Class elementsClass) { + return get(i, GenericType.vectorOf(elementsClass)); } /** diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableByName.java b/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableByName.java index 3214994c04a..377f8292002 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableByName.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableByName.java @@ -525,9 +525,9 @@ default CqlDuration getCqlDuration(@NonNull String name) { * @throws IllegalArgumentException if the name is invalid. */ @Nullable - default List getVector( + default CqlVector getVector( @NonNull String name, @NonNull Class elementsClass) { - return getList(firstIndexOf(name), elementsClass); + return getVector(firstIndexOf(name), elementsClass); } /** diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableById.java b/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableById.java index 3c17f0cb6f1..84055b0e964 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableById.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableById.java @@ -571,9 +571,9 @@ default SelfT setCqlDuration(@NonNull CqlIdentifier id, @Nullable CqlDuration v) */ @NonNull @CheckReturnValue - default SelfT setVector( + default SelfT setVector( @NonNull CqlIdentifier id, - @Nullable List v, + @Nullable CqlVector v, @NonNull Class elementsClass) { SelfT result = null; for (Integer i : allIndicesOf(id)) { diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableByIndex.java b/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableByIndex.java index 52bc92d4c09..01e6d5cdf58 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableByIndex.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableByIndex.java @@ -423,9 +423,9 @@ default SelfT setCqlDuration(int i, @Nullable CqlDuration v) { */ @NonNull @CheckReturnValue - default SelfT setVector( - int i, @Nullable List v, @NonNull Class elementsClass) { - return set(i, v, GenericType.listOf(elementsClass)); + default SelfT setVector( + int i, @Nullable CqlVector v, @NonNull Class elementsClass) { + return set(i, v, GenericType.vectorOf(elementsClass)); } /** diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableByName.java b/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableByName.java index 559ad40cbff..a78753789e3 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableByName.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableByName.java @@ -570,8 +570,10 @@ default SelfT setCqlDuration(@NonNull String name, @Nullable CqlDuration v) { */ @NonNull @CheckReturnValue - default SelfT setVector( - @NonNull String name, @Nullable List v, @NonNull Class elementsClass) { + default SelfT setVector( + @NonNull String name, + @Nullable CqlVector v, + @NonNull Class elementsClass) { SelfT result = null; for (Integer i : allIndicesOf(name)) { result = (result == null ? this : result).setVector(i, v, elementsClass); diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.java b/core/src/main/java/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.java index 6bf044ebf03..65571e01f75 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.java @@ -16,8 +16,10 @@ package com.datastax.oss.driver.api.core.type.codec; import com.datastax.oss.driver.api.core.session.SessionBuilder; +import com.datastax.oss.driver.api.core.type.DataTypes; import com.datastax.oss.driver.api.core.type.codec.registry.MutableCodecRegistry; import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import com.datastax.oss.driver.internal.core.type.DefaultVectorType; import com.datastax.oss.driver.internal.core.type.codec.SimpleBlobCodec; import com.datastax.oss.driver.internal.core.type.codec.TimestampCodec; import com.datastax.oss.driver.internal.core.type.codec.extras.OptionalCodec; @@ -36,6 +38,7 @@ import com.datastax.oss.driver.internal.core.type.codec.extras.time.PersistentZonedTimestampCodec; import com.datastax.oss.driver.internal.core.type.codec.extras.time.TimestampMillisCodec; import com.datastax.oss.driver.internal.core.type.codec.extras.time.ZonedTimestampCodec; +import com.datastax.oss.driver.internal.core.type.codec.extras.vector.FloatVectorToArrayCodec; import com.fasterxml.jackson.databind.ObjectMapper; import edu.umd.cs.findbugs.annotations.NonNull; import java.nio.ByteBuffer; @@ -479,4 +482,9 @@ public static TypeCodec json( @NonNull Class javaType, @NonNull ObjectMapper objectMapper) { return new JsonCodec<>(javaType, objectMapper); } + + /** Builds a new codec that maps CQL float vectors of the specified size to an array of floats. */ + public static TypeCodec floatVectorToArray(int dimensions) { + return new FloatVectorToArrayCodec(new DefaultVectorType(DataTypes.FLOAT, dimensions)); + } } diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.java b/core/src/main/java/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.java index e824e7f41fc..d4cf3ddb0c0 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.java @@ -16,6 +16,7 @@ package com.datastax.oss.driver.api.core.type.codec; import com.datastax.oss.driver.api.core.data.CqlDuration; +import com.datastax.oss.driver.api.core.data.CqlVector; import com.datastax.oss.driver.api.core.data.TupleValue; import com.datastax.oss.driver.api.core.data.UdtValue; import com.datastax.oss.driver.api.core.type.CustomType; @@ -207,12 +208,17 @@ public static TypeCodec tupleOf(@NonNull TupleType cqlType) { return new TupleCodec(cqlType); } - public static TypeCodec> vectorOf( + public static TypeCodec> vectorOf( @NonNull VectorType type, @NonNull TypeCodec subtypeCodec) { return new VectorCodec( DataTypes.vectorOf(subtypeCodec.getCqlType(), type.getDimensions()), subtypeCodec); } + public static TypeCodec> vectorOf( + int dimensions, @NonNull TypeCodec subtypeCodec) { + return new VectorCodec(DataTypes.vectorOf(subtypeCodec.getCqlType(), dimensions), subtypeCodec); + } + /** * Builds a new codec that maps a CQL user defined type to the driver's {@link UdtValue}, for the * given type definition. diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/type/reflect/GenericType.java b/core/src/main/java/com/datastax/oss/driver/api/core/type/reflect/GenericType.java index a1977e39f23..350e869ae52 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/type/reflect/GenericType.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/type/reflect/GenericType.java @@ -16,6 +16,7 @@ package com.datastax.oss.driver.api.core.type.reflect; import com.datastax.oss.driver.api.core.data.CqlDuration; +import com.datastax.oss.driver.api.core.data.CqlVector; import com.datastax.oss.driver.api.core.data.GettableByIndex; import com.datastax.oss.driver.api.core.data.TupleValue; import com.datastax.oss.driver.api.core.data.UdtValue; @@ -147,6 +148,23 @@ public static GenericType> setOf(@NonNull GenericType elementType) return new GenericType<>(token); } + @NonNull + public static GenericType> vectorOf( + @NonNull Class elementType) { + TypeToken> token = + new TypeToken>() {}.where( + new TypeParameter() {}, TypeToken.of(elementType)); + return new GenericType<>(token); + } + + @NonNull + public static GenericType> vectorOf( + @NonNull GenericType elementType) { + TypeToken> token = + new TypeToken>() {}.where(new TypeParameter() {}, elementType.token); + return new GenericType<>(token); + } + @NonNull public static GenericType> mapOf( @NonNull Class keyType, @NonNull Class valueType) { diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/VectorCodec.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/VectorCodec.java index 75b3e46ddfd..a94ae728725 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/VectorCodec.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/VectorCodec.java @@ -16,13 +16,13 @@ package com.datastax.oss.driver.internal.core.type.codec; import com.datastax.oss.driver.api.core.ProtocolVersion; +import com.datastax.oss.driver.api.core.data.CqlVector; import com.datastax.oss.driver.api.core.type.DataType; import com.datastax.oss.driver.api.core.type.VectorType; import com.datastax.oss.driver.api.core.type.codec.TypeCodec; import com.datastax.oss.driver.api.core.type.reflect.GenericType; -import com.datastax.oss.driver.shaded.guava.common.base.Splitter; +import com.datastax.oss.driver.internal.core.type.DefaultVectorType; import com.datastax.oss.driver.shaded.guava.common.collect.Iterables; -import com.datastax.oss.driver.shaded.guava.common.collect.Streams; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import java.nio.ByteBuffer; @@ -30,23 +30,26 @@ import java.util.Iterator; import java.util.List; import java.util.NoSuchElementException; -import java.util.stream.Collectors; -public class VectorCodec implements TypeCodec> { +public class VectorCodec implements TypeCodec> { private final VectorType cqlType; - private final GenericType> javaType; + private final GenericType> javaType; private final TypeCodec subtypeCodec; - public VectorCodec(VectorType cqlType, TypeCodec subtypeCodec) { + public VectorCodec(@NonNull VectorType cqlType, @NonNull TypeCodec subtypeCodec) { this.cqlType = cqlType; this.subtypeCodec = subtypeCodec; - this.javaType = GenericType.listOf(subtypeCodec.getJavaType()); + this.javaType = GenericType.vectorOf(subtypeCodec.getJavaType()); + } + + public VectorCodec(int dimensions, @NonNull TypeCodec subtypeCodec) { + this(new DefaultVectorType(subtypeCodec.getCqlType(), dimensions), subtypeCodec); } @NonNull @Override - public GenericType> getJavaType() { + public GenericType> getJavaType() { return this.javaType; } @@ -59,7 +62,7 @@ public DataType getCqlType() { @Nullable @Override public ByteBuffer encode( - @Nullable List value, @NonNull ProtocolVersion protocolVersion) { + @Nullable CqlVector value, @NonNull ProtocolVersion protocolVersion) { if (value == null || cqlType.getDimensions() <= 0) { return null; } @@ -103,7 +106,7 @@ public ByteBuffer encode( @Nullable @Override - public List decode( + public CqlVector decode( @Nullable ByteBuffer bytes, @NonNull ProtocolVersion protocolVersion) { if (bytes == null || bytes.remaining() == 0) { return null; @@ -133,27 +136,20 @@ Elements should at least precede themselves with their size (along the lines of /* Restore the input ByteBuffer to its original state */ bytes.rewind(); - return rv; + return CqlVector.newInstance(rv); } @NonNull @Override - public String format(@Nullable List value) { + public String format(@Nullable CqlVector value) { return value == null ? "NULL" : Iterables.toString(value); } @Nullable @Override - public List parse(@Nullable String value) { + public CqlVector parse(@Nullable String value) { return (value == null || value.isEmpty() || value.equalsIgnoreCase("NULL")) ? null - : this.from(value); - } - - private List from(@Nullable String value) { - - return Streams.stream(Splitter.on(", ").split(value.substring(1, value.length() - 1))) - .map(subtypeCodec::parse) - .collect(Collectors.toCollection(ArrayList::new)); + : CqlVector.from(value, this.subtypeCodec); } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/vector/AbstractVectorToArrayCodec.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/vector/AbstractVectorToArrayCodec.java new file mode 100644 index 00000000000..79db9f6bc8a --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/vector/AbstractVectorToArrayCodec.java @@ -0,0 +1,140 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.type.codec.extras.vector; + +import com.datastax.oss.driver.api.core.ProtocolVersion; +import com.datastax.oss.driver.api.core.type.DataType; +import com.datastax.oss.driver.api.core.type.VectorType; +import com.datastax.oss.driver.api.core.type.codec.TypeCodec; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.lang.reflect.Array; +import java.nio.ByteBuffer; +import java.util.Objects; + +/** Common super-class for all codecs which map a CQL vector type onto a primitive array */ +public abstract class AbstractVectorToArrayCodec implements TypeCodec { + + @NonNull protected final VectorType cqlType; + @NonNull protected final GenericType javaType; + + /** + * @param cqlType The CQL type. Must be a list type. + * @param arrayType The Java type. Must be an array class. + */ + protected AbstractVectorToArrayCodec( + @NonNull VectorType cqlType, @NonNull GenericType arrayType) { + this.cqlType = Objects.requireNonNull(cqlType, "cqlType cannot be null"); + this.javaType = Objects.requireNonNull(arrayType, "arrayType cannot be null"); + if (!arrayType.isArray()) { + throw new IllegalArgumentException("Expecting Java array class, got " + arrayType); + } + } + + @NonNull + @Override + public GenericType getJavaType() { + return this.javaType; + } + + @NonNull + @Override + public DataType getCqlType() { + return this.cqlType; + } + + @Nullable + @Override + public ByteBuffer encode(@Nullable ArrayT array, @NonNull ProtocolVersion protocolVersion) { + if (array == null) { + return null; + } + int length = Array.getLength(array); + int totalSize = length * sizeOfComponentType(); + ByteBuffer output = ByteBuffer.allocate(totalSize); + for (int i = 0; i < length; i++) { + serializeElement(output, array, i, protocolVersion); + } + output.flip(); + return output; + } + + @Nullable + @Override + public ArrayT decode(@Nullable ByteBuffer bytes, @NonNull ProtocolVersion protocolVersion) { + if (bytes == null || bytes.remaining() == 0) { + throw new IllegalArgumentException( + "Input ByteBuffer must not be null and must have non-zero remaining bytes"); + } + ByteBuffer input = bytes.duplicate(); + int length = this.cqlType.getDimensions(); + int elementSize = sizeOfComponentType(); + ArrayT array = newInstance(); + for (int i = 0; i < length; i++) { + // Null elements can happen on the decode path, but we cannot tolerate them + if (elementSize < 0) { + throw new NullPointerException("Primitive arrays cannot store null elements"); + } else { + deserializeElement(input, array, i, protocolVersion); + } + } + return array; + } + + /** + * Creates a new array instance with a size matching the specified vector. + * + * @return a new array instance with a size matching the specified vector. + */ + @NonNull + protected abstract ArrayT newInstance(); + + /** + * Return the size in bytes of the array component type. + * + * @return the size in bytes of the array component type. + */ + protected abstract int sizeOfComponentType(); + + /** + * Write the {@code index}th element of {@code array} to {@code output}. + * + * @param output The ByteBuffer to write to. + * @param array The array to read from. + * @param index The element index. + * @param protocolVersion The protocol version to use. + */ + protected abstract void serializeElement( + @NonNull ByteBuffer output, + @NonNull ArrayT array, + int index, + @NonNull ProtocolVersion protocolVersion); + + /** + * Read the {@code index}th element of {@code array} from {@code input}. + * + * @param input The ByteBuffer to read from. + * @param array The array to write to. + * @param index The element index. + * @param protocolVersion The protocol version to use. + */ + protected abstract void deserializeElement( + @NonNull ByteBuffer input, + @NonNull ArrayT array, + int index, + @NonNull ProtocolVersion protocolVersion); +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/vector/FloatVectorToArrayCodec.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/vector/FloatVectorToArrayCodec.java new file mode 100644 index 00000000000..80c035e96d3 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/vector/FloatVectorToArrayCodec.java @@ -0,0 +1,105 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.type.codec.extras.vector; + +import com.datastax.oss.driver.api.core.ProtocolVersion; +import com.datastax.oss.driver.api.core.type.VectorType; +import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import com.datastax.oss.driver.internal.core.type.codec.FloatCodec; +import com.datastax.oss.driver.shaded.guava.common.base.Preconditions; +import com.datastax.oss.driver.shaded.guava.common.base.Splitter; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.nio.ByteBuffer; +import java.util.Arrays; +import java.util.Iterator; +import java.util.Objects; + +/** A codec that maps CQL vectors to the Java type {@code float[]}. */ +public class FloatVectorToArrayCodec extends AbstractVectorToArrayCodec { + + public FloatVectorToArrayCodec(VectorType type) { + super(type, GenericType.of(float[].class)); + } + + @Override + public boolean accepts(@NonNull Class javaClass) { + Objects.requireNonNull(javaClass); + return float[].class.equals(javaClass); + } + + @Override + public boolean accepts(@NonNull Object value) { + Objects.requireNonNull(value); + return value instanceof float[]; + } + + @NonNull + @Override + protected float[] newInstance() { + return new float[cqlType.getDimensions()]; + } + + @Override + protected int sizeOfComponentType() { + return 4; + } + + @Override + protected void serializeElement( + @NonNull ByteBuffer output, + @NonNull float[] array, + int index, + @NonNull ProtocolVersion protocolVersion) { + output.putFloat(array[index]); + } + + @Override + protected void deserializeElement( + @NonNull ByteBuffer input, + @NonNull float[] array, + int index, + @NonNull ProtocolVersion protocolVersion) { + array[index] = input.getFloat(); + } + + @NonNull + @Override + public String format(@Nullable float[] value) { + return value == null ? "NULL" : Arrays.toString(value); + } + + @Nullable + @Override + public float[] parse(@Nullable String str) { + Preconditions.checkArgument(str != null, "Cannot create float array from null string"); + Preconditions.checkArgument(!str.isEmpty(), "Cannot create float array from empty string"); + + FloatCodec codec = new FloatCodec(); + float[] rv = this.newInstance(); + Iterator strIter = + Splitter.on(", ").trimResults().split(str.substring(1, str.length() - 1)).iterator(); + for (int i = 0; i < rv.length; ++i) { + String strVal = strIter.next(); + if (strVal == null) { + throw new IllegalArgumentException("Null element observed in float array string"); + } + Float f = codec.parse(strVal); + rv[i] = f.floatValue(); + } + return rv; + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistry.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistry.java index ca282c3e355..cb5d45255e1 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistry.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistry.java @@ -16,6 +16,7 @@ package com.datastax.oss.driver.internal.core.type.codec.registry; import com.datastax.oss.driver.api.core.data.CqlDuration; +import com.datastax.oss.driver.api.core.data.CqlVector; import com.datastax.oss.driver.api.core.data.TupleValue; import com.datastax.oss.driver.api.core.data.UdtValue; import com.datastax.oss.driver.api.core.type.*; @@ -371,6 +372,23 @@ protected GenericType inspectType(@NonNull Object value, @Nullable DataType c inspectType(firstValue, cqlType == null ? null : ((MapType) cqlType).getValueType()); return GenericType.mapOf(keyType, valueType); } + } else if (value instanceof CqlVector) { + CqlVector vector = (CqlVector) value; + if (vector.isEmpty()) { + return cqlType == null ? JAVA_TYPE_FOR_EMPTY_CQLVECTORS : inferJavaTypeFromCqlType(cqlType); + } else { + Object firstElement = vector.iterator().next(); + if (firstElement == null) { + throw new IllegalArgumentException( + "Can't infer vector codec because the first element is null " + + "(note that CQL does not allow null values in collections)"); + } + GenericType elementType = + (GenericType) + inspectType( + firstElement, cqlType == null ? null : ((VectorType) cqlType).getElementType()); + return GenericType.vectorOf(elementType); + } } else { // There's not much more we can do return GenericType.of(value.getClass()); @@ -390,6 +408,11 @@ protected GenericType inferJavaTypeFromCqlType(@NonNull DataType cqlType) { DataType valueType = ((MapType) cqlType).getValueType(); return GenericType.mapOf( inferJavaTypeFromCqlType(keyType), inferJavaTypeFromCqlType(valueType)); + } else if (cqlType instanceof VectorType) { + DataType elementType = ((VectorType) cqlType).getElementType(); + GenericType numberType = + (GenericType) inferJavaTypeFromCqlType(elementType); + return GenericType.vectorOf(numberType); } switch (cqlType.getProtocolCode()) { case ProtocolConstants.DataType.CUSTOM: @@ -492,6 +515,22 @@ protected DataType inferCqlTypeFromValue(@NonNull Object value) { return null; } return DataTypes.mapOf(keyType, valueType); + } else if (value instanceof CqlVector) { + CqlVector vector = (CqlVector) value; + if (vector.isEmpty()) { + return CQL_TYPE_FOR_EMPTY_VECTORS; + } + Object firstElement = vector.iterator().next(); + if (firstElement == null) { + throw new IllegalArgumentException( + "Can't infer vector codec because the first element is null " + + "(note that CQL does not allow null values in collections)"); + } + DataType elementType = inferCqlTypeFromValue(firstElement); + if (elementType == null) { + return null; + } + return DataTypes.vectorOf(elementType, vector.size()); } Class javaClass = value.getClass(); if (ByteBuffer.class.isAssignableFrom(javaClass)) { @@ -538,7 +577,7 @@ protected DataType inferCqlTypeFromValue(@NonNull Object value) { return null; } - private TypeCodec getElementCodec( + private TypeCodec getElementCodecForCqlAndJavaType( ContainerType cqlType, TypeToken token, boolean isJavaCovariant) { DataType elementCqlType = cqlType.getElementType(); @@ -550,6 +589,14 @@ private TypeCodec getElementCodec( return codecFor(elementCqlType); } + private TypeCodec getElementCodecForJavaType( + ParameterizedType parameterizedType, boolean isJavaCovariant) { + + Type[] typeArguments = parameterizedType.getActualTypeArguments(); + GenericType elementType = GenericType.of(typeArguments[0]); + return codecFor(elementType, isJavaCovariant); + } + // Try to create a codec when we haven't found it in the cache @NonNull protected TypeCodec createCodec( @@ -565,11 +612,11 @@ protected TypeCodec createCodec( TypeToken token = javaType.__getToken(); if (cqlType instanceof ListType && List.class.isAssignableFrom(token.getRawType())) { TypeCodec elementCodec = - getElementCodec((ContainerType) cqlType, token, isJavaCovariant); + getElementCodecForCqlAndJavaType((ContainerType) cqlType, token, isJavaCovariant); return TypeCodecs.listOf(elementCodec); } else if (cqlType instanceof SetType && Set.class.isAssignableFrom(token.getRawType())) { TypeCodec elementCodec = - getElementCodec((ContainerType) cqlType, token, isJavaCovariant); + getElementCodecForCqlAndJavaType((ContainerType) cqlType, token, isJavaCovariant); return TypeCodecs.setOf(elementCodec); } else if (cqlType instanceof MapType && Map.class.isAssignableFrom(token.getRawType())) { DataType keyCqlType = ((MapType) cqlType).getKeyType(); @@ -593,9 +640,14 @@ protected TypeCodec createCodec( } else if (cqlType instanceof UserDefinedType && UdtValue.class.isAssignableFrom(token.getRawType())) { return TypeCodecs.udtOf((UserDefinedType) cqlType); - } else if (cqlType instanceof VectorType && List.class.isAssignableFrom(token.getRawType())) { + } else if (cqlType instanceof VectorType + && CqlVector.class.isAssignableFrom(token.getRawType())) { VectorType vectorType = (VectorType) cqlType; - TypeCodec elementCodec = getElementCodec(vectorType, token, isJavaCovariant); + /* For a vector type we'll always get back an instance of TypeCodec due to the + * type of CqlVector... but getElementCodecForCqlAndJavaType() is a generalized function that can't + * return this more precise type. Thus the cast here. */ + TypeCodec elementCodec = + uncheckedCast(getElementCodecForCqlAndJavaType(vectorType, token, isJavaCovariant)); return TypeCodecs.vectorOf(vectorType, elementCodec); } else if (cqlType instanceof CustomType && ByteBuffer.class.isAssignableFrom(token.getRawType())) { @@ -612,15 +664,13 @@ protected TypeCodec createCodec(@NonNull GenericType javaType, boolean isJ TypeToken token = javaType.__getToken(); if (List.class.isAssignableFrom(token.getRawType()) && token.getType() instanceof ParameterizedType) { - Type[] typeArguments = ((ParameterizedType) token.getType()).getActualTypeArguments(); - GenericType elementType = GenericType.of(typeArguments[0]); - TypeCodec elementCodec = codecFor(elementType, isJavaCovariant); + TypeCodec elementCodec = + getElementCodecForJavaType((ParameterizedType) token.getType(), isJavaCovariant); return TypeCodecs.listOf(elementCodec); } else if (Set.class.isAssignableFrom(token.getRawType()) && token.getType() instanceof ParameterizedType) { - Type[] typeArguments = ((ParameterizedType) token.getType()).getActualTypeArguments(); - GenericType elementType = GenericType.of(typeArguments[0]); - TypeCodec elementCodec = codecFor(elementType, isJavaCovariant); + TypeCodec elementCodec = + getElementCodecForJavaType((ParameterizedType) token.getType(), isJavaCovariant); return TypeCodecs.setOf(elementCodec); } else if (Map.class.isAssignableFrom(token.getRawType()) && token.getType() instanceof ParameterizedType) { @@ -631,6 +681,9 @@ protected TypeCodec createCodec(@NonNull GenericType javaType, boolean isJ TypeCodec valueCodec = codecFor(valueType, isJavaCovariant); return TypeCodecs.mapOf(keyCodec, valueCodec); } + /* Note that this method cannot generate TypeCodec instances for any CqlVector type. VectorCodec needs + * to know the dimensions of the vector it will be operating on and there's no way to determine that from + * the Java type alone. */ throw new CodecNotFoundException(null, javaType); } @@ -652,6 +705,11 @@ protected TypeCodec createCodec(@NonNull DataType cqlType) { TypeCodec keyCodec = codecFor(keyType); TypeCodec valueCodec = codecFor(valueType); return TypeCodecs.mapOf(keyCodec, valueCodec); + } else if (cqlType instanceof VectorType) { + VectorType vectorType = (VectorType) cqlType; + TypeCodec elementCodec = + uncheckedCast(codecFor(vectorType.getElementType())); + return TypeCodecs.vectorOf(vectorType, elementCodec); } else if (cqlType instanceof TupleType) { return TypeCodecs.tupleOf((TupleType) cqlType); } else if (cqlType instanceof UserDefinedType) { @@ -687,8 +745,11 @@ private static TypeCodec uncheckedCast( GenericType.setOf(Boolean.class); private static final GenericType> JAVA_TYPE_FOR_EMPTY_MAPS = GenericType.mapOf(Boolean.class, Boolean.class); + private static final GenericType> JAVA_TYPE_FOR_EMPTY_CQLVECTORS = + GenericType.vectorOf(Number.class); private static final DataType CQL_TYPE_FOR_EMPTY_LISTS = DataTypes.listOf(DataTypes.BOOLEAN); private static final DataType CQL_TYPE_FOR_EMPTY_SETS = DataTypes.setOf(DataTypes.BOOLEAN); private static final DataType CQL_TYPE_FOR_EMPTY_MAPS = DataTypes.mapOf(DataTypes.BOOLEAN, DataTypes.BOOLEAN); + private static final DataType CQL_TYPE_FOR_EMPTY_VECTORS = DataTypes.vectorOf(DataTypes.INT, 0); } diff --git a/core/src/test/java/com/datastax/oss/driver/api/core/data/CqlVectorTest.java b/core/src/test/java/com/datastax/oss/driver/api/core/data/CqlVectorTest.java new file mode 100644 index 00000000000..ecf8f1249d0 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/api/core/data/CqlVectorTest.java @@ -0,0 +1,198 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.core.data; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; +import com.datastax.oss.driver.shaded.guava.common.collect.Iterators; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.stream.Collectors; +import org.assertj.core.util.Lists; +import org.junit.Test; + +public class CqlVectorTest { + + private static final Float[] VECTOR_ARGS = {1.0f, 2.5f}; + + private void validate_built_vector(CqlVector vec) { + + assertThat(vec.size()).isEqualTo(2); + assertThat(vec.isEmpty()).isFalse(); + assertThat(vec.get(0)).isEqualTo(VECTOR_ARGS[0]); + assertThat(vec.get(1)).isEqualTo(VECTOR_ARGS[1]); + } + + @Test + public void should_build_vector_from_elements() { + + validate_built_vector(CqlVector.newInstance(VECTOR_ARGS)); + } + + @Test + public void should_build_vector_from_list() { + + validate_built_vector(CqlVector.newInstance(Lists.newArrayList(VECTOR_ARGS))); + } + + @Test + public void should_build_vector_from_tostring_output() { + + CqlVector vector1 = CqlVector.newInstance(VECTOR_ARGS); + CqlVector vector2 = CqlVector.from(vector1.toString(), TypeCodecs.FLOAT); + assertThat(vector2).isEqualTo(vector1); + } + + @Test + public void should_throw_from_null_string() { + + assertThatThrownBy( + () -> { + CqlVector.from(null, TypeCodecs.FLOAT); + }) + .isInstanceOf(IllegalArgumentException.class); + } + + @Test + public void should_throw_from_empty_string() { + + assertThatThrownBy( + () -> { + CqlVector.from("", TypeCodecs.FLOAT); + }) + .isInstanceOf(IllegalArgumentException.class); + } + + @Test + public void should_throw_when_building_with_nulls() { + + assertThatThrownBy( + () -> { + CqlVector.newInstance(1.1f, null, 2.2f); + }) + .isInstanceOf(IllegalArgumentException.class); + + Float[] theArray = new Float[] {1.1f, null, 2.2f}; + assertThatThrownBy( + () -> { + CqlVector.newInstance(theArray); + }) + .isInstanceOf(IllegalArgumentException.class); + + List theList = Lists.newArrayList(1.1f, null, 2.2f); + assertThatThrownBy( + () -> { + CqlVector.newInstance(theList); + }) + .isInstanceOf(IllegalArgumentException.class); + } + + @Test + public void should_build_empty_vector() { + + CqlVector vector = CqlVector.newInstance(); + assertThat(vector.isEmpty()).isTrue(); + assertThat(vector.size()).isEqualTo(0); + } + + @Test + public void should_behave_mostly_like_a_list() { + + CqlVector vector = CqlVector.newInstance(VECTOR_ARGS); + assertThat(vector.get(0)).isEqualTo(VECTOR_ARGS[0]); + Float newVal = VECTOR_ARGS[0] * 2; + vector.set(0, newVal); + assertThat(vector.get(0)).isEqualTo(newVal); + assertThat(vector.isEmpty()).isFalse(); + assertThat(vector.size()).isEqualTo(2); + assertThat(Iterators.toArray(vector.iterator(), Float.class)).isEqualTo(VECTOR_ARGS); + } + + @Test + public void should_play_nicely_with_streams() { + + CqlVector vector = CqlVector.newInstance(VECTOR_ARGS); + List results = + vector.stream() + .map((f) -> f * 2) + .collect(Collectors.toCollection(() -> new ArrayList())); + for (int i = 0; i < vector.size(); ++i) { + assertThat(results.get(i)).isEqualTo(vector.get(i) * 2); + } + } + + @Test + public void should_reflect_changes_to_mutable_list() { + + List theList = Lists.newArrayList(1.1f, 2.2f, 3.3f); + CqlVector vector = CqlVector.newInstance(theList); + assertThat(vector.size()).isEqualTo(3); + assertThat(vector.get(2)).isEqualTo(3.3f); + + float newVal1 = 4.4f; + theList.set(2, newVal1); + assertThat(vector.size()).isEqualTo(3); + assertThat(vector.get(2)).isEqualTo(newVal1); + + float newVal2 = 5.5f; + theList.add(newVal2); + assertThat(vector.size()).isEqualTo(4); + assertThat(vector.get(3)).isEqualTo(newVal2); + } + + @Test + public void should_reflect_changes_to_array() { + + Float[] theArray = new Float[] {1.1f, 2.2f, 3.3f}; + CqlVector vector = CqlVector.newInstance(theArray); + assertThat(vector.size()).isEqualTo(3); + assertThat(vector.get(2)).isEqualTo(3.3f); + + float newVal1 = 4.4f; + theArray[2] = newVal1; + assertThat(vector.size()).isEqualTo(3); + assertThat(vector.get(2)).isEqualTo(newVal1); + } + + @Test + public void should_correctly_compare_vectors() { + + Float[] args = VECTOR_ARGS.clone(); + CqlVector vector1 = CqlVector.newInstance(args); + CqlVector vector2 = CqlVector.newInstance(args); + CqlVector vector3 = CqlVector.newInstance(Lists.newArrayList(args)); + assertThat(vector1).isNotSameAs(vector2); + assertThat(vector1).isEqualTo(vector2); + assertThat(vector1).isNotSameAs(vector3); + assertThat(vector1).isEqualTo(vector3); + + Float[] differentArgs = args.clone(); + float newVal = differentArgs[0] * 2; + differentArgs[0] = newVal; + CqlVector vector4 = CqlVector.newInstance(differentArgs); + assertThat(vector1).isNotSameAs(vector4); + assertThat(vector1).isNotEqualTo(vector4); + + Float[] biggerArgs = Arrays.copyOf(args, args.length + 1); + biggerArgs[biggerArgs.length - 1] = newVal; + CqlVector vector5 = CqlVector.newInstance(biggerArgs); + assertThat(vector1).isNotSameAs(vector5); + assertThat(vector1).isNotEqualTo(vector5); + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/VectorCodecTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/VectorCodecTest.java index 9b463dcb53e..82ec7b5ed67 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/VectorCodecTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/VectorCodecTest.java @@ -18,18 +18,20 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; +import com.datastax.oss.driver.api.core.data.CqlVector; import com.datastax.oss.driver.api.core.type.DataTypes; import com.datastax.oss.driver.api.core.type.VectorType; import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; import com.datastax.oss.driver.api.core.type.reflect.GenericType; import com.datastax.oss.driver.internal.core.type.DefaultVectorType; -import com.google.common.collect.Lists; -import java.util.List; +import java.util.Arrays; import org.junit.Test; -public class VectorCodecTest extends CodecTestBase> { +public class VectorCodecTest extends CodecTestBase> { - private static final List VECTOR = Lists.newArrayList(1.0f, 2.5f); + private static final Float[] VECTOR_ARGS = {1.0f, 2.5f}; + + private static final CqlVector VECTOR = CqlVector.newInstance(VECTOR_ARGS); private static final String VECTOR_HEX_STRING = "0x" + "3f800000" + "40200000"; @@ -49,21 +51,21 @@ public void should_encode() { /** Too few eleements will cause an exception, extra elements will be silently ignored */ @Test public void should_throw_on_encode_with_too_few_elements() { - assertThatThrownBy(() -> encode(VECTOR.subList(0, 1))) + assertThatThrownBy(() -> encode(VECTOR.subVector(0, 1))) .isInstanceOf(IllegalArgumentException.class); } @Test public void should_throw_on_encode_with_empty_list() { - assertThatThrownBy(() -> encode(Lists.newArrayList())) + assertThatThrownBy(() -> encode(CqlVector.newInstance())) .isInstanceOf(IllegalArgumentException.class); } @Test public void should_encode_with_too_many_elements() { - List doubleVector = Lists.newArrayList(VECTOR); - doubleVector.addAll(VECTOR); - assertThat(encode(doubleVector)).isEqualTo(VECTOR_HEX_STRING); + Float[] doubledVectorContents = Arrays.copyOf(VECTOR_ARGS, VECTOR_ARGS.length * 2); + System.arraycopy(VECTOR_ARGS, 0, doubledVectorContents, VECTOR_ARGS.length, VECTOR_ARGS.length); + assertThat(encode(CqlVector.newInstance(doubledVectorContents))).isEqualTo(VECTOR_HEX_STRING); } @Test @@ -118,14 +120,14 @@ public void should_accept_vector_type_correct_dimension_only() { @Test public void should_accept_generic_type() { - assertThat(codec.accepts(GenericType.listOf(GenericType.FLOAT))).isTrue(); - assertThat(codec.accepts(GenericType.listOf(GenericType.INTEGER))).isFalse(); + assertThat(codec.accepts(GenericType.vectorOf(GenericType.FLOAT))).isTrue(); + assertThat(codec.accepts(GenericType.vectorOf(GenericType.INTEGER))).isFalse(); assertThat(codec.accepts(GenericType.of(Integer.class))).isFalse(); } @Test public void should_accept_raw_type() { - assertThat(codec.accepts(List.class)).isTrue(); + assertThat(codec.accepts(CqlVector.class)).isTrue(); assertThat(codec.accepts(Integer.class)).isFalse(); } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistryTestDataProviders.java b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistryTestDataProviders.java index 64bbd800c92..a0d0b77ca87 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistryTestDataProviders.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistryTestDataProviders.java @@ -17,6 +17,7 @@ import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.data.CqlDuration; +import com.datastax.oss.driver.api.core.data.CqlVector; import com.datastax.oss.driver.api.core.data.TupleValue; import com.datastax.oss.driver.api.core.data.UdtValue; import com.datastax.oss.driver.api.core.type.DataTypes; @@ -285,6 +286,55 @@ public static Object[][] collectionsWithCqlAndJavaTypes() ImmutableMap.of( ImmutableMap.of(udtValue, udtValue), ImmutableMap.of(tupleValue, tupleValue)) }, + // vectors + { + DataTypes.vectorOf(DataTypes.INT, 1), + GenericType.vectorOf(Integer.class), + GenericType.vectorOf(Integer.class), + CqlVector.newInstance(1) + }, + { + DataTypes.vectorOf(DataTypes.BIGINT, 1), + GenericType.vectorOf(Long.class), + GenericType.vectorOf(Long.class), + CqlVector.newInstance(1l) + }, + { + DataTypes.vectorOf(DataTypes.SMALLINT, 1), + GenericType.vectorOf(Short.class), + GenericType.vectorOf(Short.class), + CqlVector.newInstance((short) 1) + }, + { + DataTypes.vectorOf(DataTypes.TINYINT, 1), + GenericType.vectorOf(Byte.class), + GenericType.vectorOf(Byte.class), + CqlVector.newInstance((byte) 1) + }, + { + DataTypes.vectorOf(DataTypes.FLOAT, 1), + GenericType.vectorOf(Float.class), + GenericType.vectorOf(Float.class), + CqlVector.newInstance(1.0f) + }, + { + DataTypes.vectorOf(DataTypes.DOUBLE, 1), + GenericType.vectorOf(Double.class), + GenericType.vectorOf(Double.class), + CqlVector.newInstance(1.0d) + }, + { + DataTypes.vectorOf(DataTypes.DECIMAL, 1), + GenericType.vectorOf(BigDecimal.class), + GenericType.vectorOf(BigDecimal.class), + CqlVector.newInstance(BigDecimal.ONE) + }, + { + DataTypes.vectorOf(DataTypes.VARINT, 1), + GenericType.vectorOf(BigInteger.class), + GenericType.vectorOf(BigInteger.class), + CqlVector.newInstance(BigInteger.ONE) + }, }; } From ced78da9d7bade3859952de83509f8223a784020 Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Fri, 7 Jul 2023 18:56:41 -0500 Subject: [PATCH 841/979] Formatting fix --- .../core/DefaultProtocolVersionRegistryTest.java | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/DefaultProtocolVersionRegistryTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/DefaultProtocolVersionRegistryTest.java index 0bf571da20c..05de9989572 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/DefaultProtocolVersionRegistryTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/DefaultProtocolVersionRegistryTest.java @@ -52,13 +52,15 @@ public void should_find_version_by_name() { assertThat(registry.fromName("DSE_V1")).isEqualTo(DseProtocolVersion.DSE_V1); } - @Test public void should_fail_to_find_version_by_name_different_case() { assertThatThrownBy(() -> registry.fromName("v4")).isInstanceOf(IllegalArgumentException.class); - assertThatThrownBy(() -> registry.fromName("dse_v1")).isInstanceOf(IllegalArgumentException.class); - assertThatThrownBy(() -> registry.fromName("dDSE_v1")).isInstanceOf(IllegalArgumentException.class); - assertThatThrownBy(() -> registry.fromName("dse_v1")).isInstanceOf(IllegalArgumentException.class); + assertThatThrownBy(() -> registry.fromName("dse_v1")) + .isInstanceOf(IllegalArgumentException.class); + assertThatThrownBy(() -> registry.fromName("dDSE_v1")) + .isInstanceOf(IllegalArgumentException.class); + assertThatThrownBy(() -> registry.fromName("dse_v1")) + .isInstanceOf(IllegalArgumentException.class); } @Test From f3ff47180f30d86fa2bff2d53126c4f07a86b8a4 Mon Sep 17 00:00:00 2001 From: hhughes Date: Mon, 10 Jul 2023 17:07:12 -0700 Subject: [PATCH 842/979] JAVA-3085: Tell graal to initialize com.datastax.oss.driver.internal.core.util.Dependency at build-time (#1674) --- .../com.datastax.oss/java-driver-core/native-image.properties | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/core/src/main/resources/META-INF/native-image/com.datastax.oss/java-driver-core/native-image.properties b/core/src/main/resources/META-INF/native-image/com.datastax.oss/java-driver-core/native-image.properties index b2fb10d32c8..7900d35f81a 100644 --- a/core/src/main/resources/META-INF/native-image/com.datastax.oss/java-driver-core/native-image.properties +++ b/core/src/main/resources/META-INF/native-image/com.datastax.oss/java-driver-core/native-image.properties @@ -4,4 +4,5 @@ Args=-H:IncludeResources=reference\\.conf \ -H:IncludeResources=application\\.properties \ -H:IncludeResources=.*Driver\\.properties \ -H:DynamicProxyConfigurationResources=${.}/proxy.json \ - -H:ReflectionConfigurationResources=${.}/reflection.json + -H:ReflectionConfigurationResources=${.}/reflection.json \ + --initialize-at-build-time=com.datastax.oss.driver.internal.core.util.Dependency From 63ed4ce61cc531592190a0764cdb0d0c3686efe5 Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Tue, 11 Jul 2023 00:23:43 -0500 Subject: [PATCH 843/979] JAVA-3000 Native protocol docs still list C* 4.0 as unreleased, v5 in beta (#1673) --- manual/core/native_protocol/README.md | 21 +++++++++------------ manual/developer/native_protocol/README.md | 6 ++---- 2 files changed, 11 insertions(+), 16 deletions(-) diff --git a/manual/core/native_protocol/README.md b/manual/core/native_protocol/README.md index 2bc075be3be..23df7ed9eec 100644 --- a/manual/core/native_protocol/README.md +++ b/manual/core/native_protocol/README.md @@ -24,18 +24,15 @@ only available with specific protocol versions. Java driver 4 supports protocol versions 3 to 5. By default, the version is negotiated with the first node the driver connects to: -| Cassandra version | Negotiated protocol version with driver 4 ¹ | -|---------------------|-------------------------------------------------| -| 2.1.x | v3 | -| 2.2.x | v4 | -| 3.x | v4 | -| 4.x ² | v5 | +| Cassandra version | Negotiated protocol version with driver 4 ¹ | +|-------------------|-------------------------------------------------| +| 2.1.x | v3 | +| 2.2.x | v4 | +| 3.x | v4 | +| 4.x | v5 | *(1) for previous driver versions, see the [3.x documentation][driver3]* -*(2) at the time of writing, Cassandra 4 is not released yet. Protocol v5 support is still in beta, -and must be enabled explicitly (negotiation will yield v4).* - Since version 4.5.0, the driver can also use DSE protocols when all nodes are running a version of DSE. The table below shows the protocol matrix for these cases: @@ -43,10 +40,10 @@ DSE. The table below shows the protocol matrix for these cases: |---------------------|-------------------------------------------------| | 4.7/4.8 | v3 | | 5.0 | v4 | -| 5.1 | DSE_V1 ³ | -| 6.0/6.7/6.8 | DSE_V2 ³ | +| 5.1 | DSE_V1 ² | +| 6.0/6.7/6.8 | DSE_V2 ² | -*(3) DSE Protocols are chosen before other Cassandra native protocols.* +*(2) DSE Protocols are chosen before other Cassandra native protocols.* ### Controlling the protocol version diff --git a/manual/developer/native_protocol/README.md b/manual/developer/native_protocol/README.md index 11c936d272b..cbda8f794ff 100644 --- a/manual/developer/native_protocol/README.md +++ b/manual/developer/native_protocol/README.md @@ -9,10 +9,8 @@ This part of the code lives in its own project: The protocol specifications are available in [native-protocol/src/main/resources](https://github.com/datastax/native-protocol/tree/1.x/src/main/resources). -These files originally come from Cassandra, we copy them over for easy access. Note that, if the -latest version is a beta (this is the case for v5 at the time of writing -- September 2019), the -specification might not be up to date. Always compare with the latest revision in -[cassandra/doc](https://github.com/apache/cassandra/tree/trunk/doc). +These files originally come from Cassandra, we copy them over for easy access. Authoritative specifications can +always be found in [cassandra/doc](https://github.com/apache/cassandra/tree/trunk/doc). For a broad overview of how protocol types are used in the driver, let's step through an example: From dfc1164ef3fb76e212daf577a85b5381198890f6 Mon Sep 17 00:00:00 2001 From: hhughes Date: Wed, 12 Jul 2023 07:52:33 -0700 Subject: [PATCH 844/979] JAVA-3070: Make CqlVector and CqlDuration serializable (#1676) --- .../oss/driver/api/core/data/CqlDuration.java | 8 ++- .../oss/driver/api/core/data/CqlVector.java | 62 ++++++++++++++++++- .../driver/api/core/data/CqlDurationTest.java | 15 +++++ .../driver/api/core/data/CqlVectorTest.java | 36 +++++++++++ 4 files changed, 119 insertions(+), 2 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/data/CqlDuration.java b/core/src/main/java/com/datastax/oss/driver/api/core/data/CqlDuration.java index 8ec509ea7f6..5bb07b92923 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/data/CqlDuration.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/data/CqlDuration.java @@ -20,6 +20,7 @@ import com.datastax.oss.driver.shaded.guava.common.base.Preconditions; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import edu.umd.cs.findbugs.annotations.NonNull; +import java.io.Serializable; import java.time.Duration; import java.time.Period; import java.time.temporal.ChronoUnit; @@ -42,7 +43,9 @@ * in time, regardless of the calendar). */ @Immutable -public final class CqlDuration implements TemporalAmount { +public final class CqlDuration implements TemporalAmount, Serializable { + + private static final long serialVersionUID = 1L; @VisibleForTesting static final long NANOS_PER_MICRO = 1000L; @VisibleForTesting static final long NANOS_PER_MILLI = 1000 * NANOS_PER_MICRO; @@ -75,8 +78,11 @@ public final class CqlDuration implements TemporalAmount { private static final ImmutableList TEMPORAL_UNITS = ImmutableList.of(ChronoUnit.MONTHS, ChronoUnit.DAYS, ChronoUnit.NANOS); + /** @serial */ private final int months; + /** @serial */ private final int days; + /** @serial */ private final long nanoseconds; private CqlDuration(int months, int days, long nanoseconds) { diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/data/CqlVector.java b/core/src/main/java/com/datastax/oss/driver/api/core/data/CqlVector.java index 152d0f40823..2889ea5eb24 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/data/CqlVector.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/data/CqlVector.java @@ -22,6 +22,12 @@ import com.datastax.oss.driver.shaded.guava.common.collect.Iterables; import com.datastax.oss.driver.shaded.guava.common.collect.Streams; import edu.umd.cs.findbugs.annotations.NonNull; +import java.io.IOException; +import java.io.InvalidObjectException; +import java.io.ObjectInputStream; +import java.io.ObjectOutputStream; +import java.io.ObjectStreamException; +import java.io.Serializable; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; @@ -44,7 +50,7 @@ * where possible we've tried to make the API of this class similar to the equivalent methods on * {@link List}. */ -public class CqlVector implements Iterable { +public class CqlVector implements Iterable, Serializable { /** * Create a new CqlVector containing the specified values. @@ -190,4 +196,58 @@ public int hashCode() { public String toString() { return Iterables.toString(this.list); } + + /** + * Serialization proxy for CqlVector. Allows serialization regardless of implementation of list + * field. + * + * @param inner type of CqlVector, assume Number is always Serializable. + */ + private static class SerializationProxy implements Serializable { + + private static final long serialVersionUID = 1; + + private transient List list; + + SerializationProxy(CqlVector vector) { + this.list = vector.list; + } + + // Reconstruct CqlVector's list of elements. + private void readObject(ObjectInputStream stream) throws IOException, ClassNotFoundException { + stream.defaultReadObject(); + + int size = stream.readInt(); + list = new ArrayList<>(size); + for (int i = 0; i < size; i++) { + list.add((T) stream.readObject()); + } + } + + // Return deserialized proxy object as CqlVector. + private Object readResolve() throws ObjectStreamException { + return new CqlVector(list); + } + + // Write size of CqlVector followed by items in order. + private void writeObject(ObjectOutputStream stream) throws IOException { + stream.defaultWriteObject(); + + stream.writeInt(list.size()); + for (T item : list) { + stream.writeObject(item); + } + } + } + + /** @serialData The number of elements in the vector, followed by each element in-order. */ + private Object writeReplace() { + return new SerializationProxy(this); + } + + private void readObject(@SuppressWarnings("unused") ObjectInputStream stream) + throws InvalidObjectException { + // Should never be called since we serialized a proxy + throw new InvalidObjectException("Proxy required"); + } } diff --git a/core/src/test/java/com/datastax/oss/driver/api/core/data/CqlDurationTest.java b/core/src/test/java/com/datastax/oss/driver/api/core/data/CqlDurationTest.java index 56c0b00b5e3..f5c263f0594 100644 --- a/core/src/test/java/com/datastax/oss/driver/api/core/data/CqlDurationTest.java +++ b/core/src/test/java/com/datastax/oss/driver/api/core/data/CqlDurationTest.java @@ -20,6 +20,7 @@ import static org.assertj.core.api.Assertions.fail; import com.datastax.oss.driver.TestDataProviders; +import com.datastax.oss.driver.internal.SerializationHelper; import com.tngtech.java.junit.dataprovider.DataProviderRunner; import com.tngtech.java.junit.dataprovider.UseDataProvider; import java.time.ZonedDateTime; @@ -190,4 +191,18 @@ public void should_subtract_from_temporal() { assertThat(dateTime.minus(CqlDuration.from("1h15s15ns"))) .isEqualTo("2018-10-03T22:59:44.999999985-07:00[America/Los_Angeles]"); } + + @Test + public void should_serialize_and_deserialize() throws Exception { + CqlDuration initial = CqlDuration.from("3mo2d15s"); + CqlDuration deserialized = SerializationHelper.serializeAndDeserialize(initial); + assertThat(deserialized).isEqualTo(initial); + } + + @Test + public void should_serialize_and_deserialize_negative() throws Exception { + CqlDuration initial = CqlDuration.from("-2d15m"); + CqlDuration deserialized = SerializationHelper.serializeAndDeserialize(initial); + assertThat(deserialized).isEqualTo(initial); + } } diff --git a/core/src/test/java/com/datastax/oss/driver/api/core/data/CqlVectorTest.java b/core/src/test/java/com/datastax/oss/driver/api/core/data/CqlVectorTest.java index ecf8f1249d0..75dfbc26e42 100644 --- a/core/src/test/java/com/datastax/oss/driver/api/core/data/CqlVectorTest.java +++ b/core/src/test/java/com/datastax/oss/driver/api/core/data/CqlVectorTest.java @@ -19,9 +19,12 @@ import static org.assertj.core.api.Assertions.assertThatThrownBy; import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; +import com.datastax.oss.driver.internal.SerializationHelper; import com.datastax.oss.driver.shaded.guava.common.collect.Iterators; +import java.util.AbstractList; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.List; import java.util.stream.Collectors; import org.assertj.core.util.Lists; @@ -195,4 +198,37 @@ public void should_correctly_compare_vectors() { assertThat(vector1).isNotSameAs(vector5); assertThat(vector1).isNotEqualTo(vector5); } + + @Test + public void should_serialize_and_deserialize() throws Exception { + CqlVector initial = CqlVector.newInstance(VECTOR_ARGS); + CqlVector deserialized = SerializationHelper.serializeAndDeserialize(initial); + assertThat(deserialized).isEqualTo(initial); + } + + @Test + public void should_serialize_and_deserialize_empty_vector() throws Exception { + CqlVector initial = CqlVector.newInstance(Collections.emptyList()); + CqlVector deserialized = SerializationHelper.serializeAndDeserialize(initial); + assertThat(deserialized).isEqualTo(initial); + } + + @Test + public void should_serialize_and_deserialize_unserializable_list() throws Exception { + CqlVector initial = + CqlVector.newInstance( + new AbstractList() { + @Override + public Float get(int index) { + return VECTOR_ARGS[index]; + } + + @Override + public int size() { + return VECTOR_ARGS.length; + } + }); + CqlVector deserialized = SerializationHelper.serializeAndDeserialize(initial); + assertThat(deserialized).isEqualTo(initial); + } } From 41991c898dc479d413967faafc3ae9f7ab969aba Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Wed, 12 Jul 2023 13:38:32 -0500 Subject: [PATCH 845/979] JAVA-3083: Doc updates for new features in 4.17.0 (#1677) --- manual/core/README.md | 58 ++++++++-------- manual/core/custom_codecs/README.md | 12 ++++ upgrade_guide/README.md | 101 +++++++++++++++++++++------- 3 files changed, 120 insertions(+), 51 deletions(-) diff --git a/manual/core/README.md b/manual/core/README.md index 8cc6b670b99..349a810f3aa 100644 --- a/manual/core/README.md +++ b/manual/core/README.md @@ -231,34 +231,35 @@ See [AccessibleByName] for an explanation of the conversion rules. ##### CQL to Java type mapping -| CQL3 data type | Getter name | Java type | See also | -|---------------------|----------------|----------------------|-------------------------------------| -| ascii | getString | java.lang.String | | -| bigint | getLong | long | | -| blob | getByteBuffer | java.nio.ByteBuffer | | -| boolean | getBoolean | boolean | | -| counter | getLong | long | | -| date | getLocalDate | java.time.LocalDate | [Temporal types](temporal_types/) | -| decimal | getBigDecimal | java.math.BigDecimal | | -| double | getDouble | double | | -| duration | getCqlDuration | [CqlDuration] | [Temporal types](temporal_types/) | -| float | getFloat | float | | -| inet | getInetAddress | java.net.InetAddress | | -| int | getInt | int | | -| list | getList | java.util.List | | -| map | getMap | java.util.Map | | -| set | getSet | java.util.Set | | -| smallint | getShort | short | | -| text | getString | java.lang.String | | -| time | getLocalTime | java.time.LocalTime | [Temporal types](temporal_types/) | -| timestamp | getInstant | java.time.Instant | [Temporal types](temporal_types/) | -| timeuuid | getUuid | java.util.UUID | | -| tinyint | getByte | byte | | -| tuple | getTupleValue | [TupleValue] | [Tuples](tuples/) | -| user-defined types | getUDTValue | [UDTValue] | [User-defined types](udts/) | -| uuid | getUuid | java.util.UUID | | -| varchar | getString | java.lang.String | | -| varint | getBigInteger | java.math.BigInteger | | +| CQL3 data type | Getter name | Java type | See also | +|--------------------|----------------|----------------------|-----------------------------------| +| ascii | getString | java.lang.String | | +| bigint | getLong | long | | +| blob | getByteBuffer | java.nio.ByteBuffer | | +| boolean | getBoolean | boolean | | +| counter | getLong | long | | +| date | getLocalDate | java.time.LocalDate | [Temporal types](temporal_types/) | +| decimal | getBigDecimal | java.math.BigDecimal | | +| double | getDouble | double | | +| duration | getCqlDuration | [CqlDuration] | [Temporal types](temporal_types/) | +| float | getFloat | float | | +| inet | getInetAddress | java.net.InetAddress | | +| int | getInt | int | | +| list | getList | java.util.List | | +| map | getMap | java.util.Map | | +| set | getSet | java.util.Set | | +| smallint | getShort | short | | +| text | getString | java.lang.String | | +| time | getLocalTime | java.time.LocalTime | [Temporal types](temporal_types/) | +| timestamp | getInstant | java.time.Instant | [Temporal types](temporal_types/) | +| timeuuid | getUuid | java.util.UUID | | +| tinyint | getByte | byte | | +| tuple | getTupleValue | [TupleValue] | [Tuples](tuples/) | +| user-defined types | getUDTValue | [UDTValue] | [User-defined types](udts/) | +| uuid | getUuid | java.util.UUID | | +| varchar | getString | java.lang.String | | +| varint | getBigInteger | java.math.BigInteger | | +| vector | getVector | [CqlVector] | [Custom Codecs](custom_codecs/) | Sometimes the driver has to infer a CQL type from a Java type (for example when handling the values of [simple statements](statements/simple/)); for those that have multiple CQL equivalents, it makes @@ -322,6 +323,7 @@ for (ColumnDefinitions.Definition definition : row.getColumnDefinitions()) { [AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/data/AccessibleByName.html [GenericType]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/reflect/GenericType.html [CqlDuration]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/data/CqlDuration.html +[CqlVector]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/data/CqlVector.html [TupleValue]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/data/TupleValue.html [UdtValue]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/data/UdtValue.html [SessionBuilder.addContactPoint()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoint-java.net.InetSocketAddress- diff --git a/manual/core/custom_codecs/README.md b/manual/core/custom_codecs/README.md index 92a0274577b..ae873d0b60d 100644 --- a/manual/core/custom_codecs/README.md +++ b/manual/core/custom_codecs/README.md @@ -256,6 +256,17 @@ that maps instances of that class to Json strings, using a newly-allocated, defa It is also possible to pass a custom `ObjectMapper` instance using [ExtraTypeCodecs.json(Class, ObjectMapper)] instead. +#### Mapping CQL vectors to Java array + +By default, the driver maps CQL `vector` to the [CqlVector] value type. If you prefer to deal with +arrays, the driver offers the following codec: + +| Codec | CQL type | Java type | +|-------------------------------------------|-----------------|-----------| +| [ExtraTypeCodecs.floatVectorToArray(int)] | `vector` | `float[]` | + +This release only provides a codec for vectors containing float values. + ### Writing codecs If none of the driver built-in codecs above suits you, it is also possible to roll your own. @@ -707,6 +718,7 @@ private static String formatRow(Row row) { [ExtraTypeCodecs.enumOrdinalsOf(Class)]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#enumOrdinalsOf-java.lang.Class- [ExtraTypeCodecs.json(Class)]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#json-java.lang.Class- [ExtraTypeCodecs.json(Class, ObjectMapper)]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#json-java.lang.Class-com.fasterxml.jackson.databind.ObjectMapper- +[ExtraTypeCodecs.floatVectorToArray(int)]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#floatVectorToArray-int- [TypeCodecs.BLOB]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#BLOB [TypeCodecs.TIMESTAMP]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#TIMESTAMP diff --git a/upgrade_guide/README.md b/upgrade_guide/README.md index 785f51290da..6310f220c3d 100644 --- a/upgrade_guide/README.md +++ b/upgrade_guide/README.md @@ -1,5 +1,60 @@ ## Upgrade guide +### 4.17.0 + +#### Beta support for Java17 + +With the completion of [JAVA-3042](https://datastax-oss.atlassian.net/browse/JAVA-3042) the driver now passes our automated test matrix for Java driver releases. +While all features function normally when run with Java 17 tests, we do not offer full support for this +platform until we've received feedback from other users in the ecosystem. + +If you discover an issue with the Java driver running on Java 17, please let us know. We will triage and address Java 17 issues. + +#### Updated API for vector search + +The 4.16.0 release introduced support for the CQL `vector` datatype. This release modifies the `CqlVector` +value type used to represent a CQL vector to make it easier to use. `CqlVector` now implements the Iterable interface +as well as several methods modelled on the JDK's List interface. For more, see +[JAVA-3060](https://datastax-oss.atlassian.net/browse/JAVA-3060). + +The builder interface was replaced with factory methods that resemble similar methods on `CqlDuration`. +For example, the following code will create a keyspace and table, populate that table with some data, and then execute +a query that will return a `vector` type. This data is retrieved directly via `Row.getVector()` and the resulting +`CqlVector` value object can be interrogated directly. + +```java +try (CqlSession session = new CqlSessionBuilder().withLocalDatacenter("datacenter1").build()) { + + session.execute("DROP KEYSPACE IF EXISTS test"); + session.execute("CREATE KEYSPACE test WITH replication = {'class': 'SimpleStrategy', 'replication_factor': 1}"); + session.execute("CREATE TABLE test.foo(i int primary key, j vector)"); + session.execute("CREAT CUSTOM INDEX ann_index ON test.foo(j) USING 'StorageAttachedIndex'"); + session.execute("INSERT INTO test.foo (i, j) VALUES (1, [8, 2.3, 58])"); + session.execute("INSERT INTO test.foo (i, j) VALUES (2, [1.2, 3.4, 5.6])"); + session.execute("INSERT INTO test.foo (i, j) VALUES (5, [23, 18, 3.9])"); + ResultSet rs=session.execute("SELECT j FROM test.foo WHERE j ann of [3.4, 7.8, 9.1] limit 1"); + for (Row row : rs){ + CqlVector v = row.getVector(0, Float.class); + System.out.println(v); + if (Iterables.size(v) != 3) { + throw new RuntimeException("Expected vector with three dimensions"); + } + } +} +``` + +You can also use the `CqlVector` type with prepared statements: + +```java +PreparedStatement preparedInsert = session.prepare("INSERT INTO test.foo (i, j) VALUES (?,?)"); +CqlVector vector = CqlVector.newInstance(1.4f, 2.5f, 3.6f); +session.execute(preparedInsert.bind(3, vector)); +``` + +In some cases, it makes sense to access the vector directly as an array of some numerical type. This version +supports such use cases by providing a codec which translates a CQL vector to and from a primitive array. Only float arrays are supported. +You can find more information about this codec in the manual documentation on [custom codecs](../manual/core/custom_codecs/) + ### 4.15.0 #### CodecNotFoundException now extends DriverException @@ -15,7 +70,7 @@ a logic such as below, it won't compile anymore: ```java try { - doSomethingWithDriver(); + doSomethingWithDriver(); } catch(DriverException e) { } catch(CodecNotFoundException e) { } @@ -25,7 +80,7 @@ You need to either reverse the catch order and catch `CodecNotFoundException` fi ```java try { - doSomethingWithDriver(); + doSomethingWithDriver(); } catch(CodecNotFoundException e) { } catch(DriverException e) { } @@ -35,7 +90,7 @@ Or catch only `DriverException`: ```java try { - doSomethingWithDriver(); + doSomethingWithDriver(); } catch(DriverException e) { } ``` @@ -229,16 +284,16 @@ The above can also be achieved by an adapter class as shown below: ```java public class NodeFilterToDistanceEvaluatorAdapter implements NodeDistanceEvaluator { - private final Predicate nodeFilter; + private final Predicate nodeFilter; - public NodeFilterToDistanceEvaluatorAdapter(@NonNull Predicate nodeFilter) { - this.nodeFilter = nodeFilter; - } + public NodeFilterToDistanceEvaluatorAdapter(@NonNull Predicate nodeFilter) { + this.nodeFilter = nodeFilter; + } - @Nullable @Override - public NodeDistance evaluateDistance(@NonNull Node node, @Nullable String localDc) { - return nodeFilter.test(node) ? null : NodeDistance.IGNORED; - } + @Nullable @Override + public NodeDistance evaluateDistance(@NonNull Node node, @Nullable String localDc) { + return nodeFilter.test(node) ? null : NodeDistance.IGNORED; + } } ``` @@ -531,7 +586,7 @@ import com.datastax.driver.core.Row; import com.datastax.driver.core.SimpleStatement; SimpleStatement statement = - new SimpleStatement("SELECT release_version FROM system.local"); + new SimpleStatement("SELECT release_version FROM system.local"); ResultSet resultSet = session.execute(statement); Row row = resultSet.one(); System.out.println(row.getString("release_version")); @@ -543,7 +598,7 @@ import com.datastax.oss.driver.api.core.cql.Row; import com.datastax.oss.driver.api.core.cql.SimpleStatement; SimpleStatement statement = - SimpleStatement.newInstance("SELECT release_version FROM system.local"); + SimpleStatement.newInstance("SELECT release_version FROM system.local"); ResultSet resultSet = session.execute(statement); Row row = resultSet.one(); System.out.println(row.getString("release_version")); @@ -606,9 +661,9 @@ datastax-java-driver { // Application code: SimpleStatement statement1 = - SimpleStatement.newInstance("...").setExecutionProfileName("profile1"); + SimpleStatement.newInstance("...").setExecutionProfileName("profile1"); SimpleStatement statement2 = - SimpleStatement.newInstance("...").setExecutionProfileName("profile2"); + SimpleStatement.newInstance("...").setExecutionProfileName("profile2"); ``` The configuration can be reloaded periodically at runtime: @@ -727,13 +782,13 @@ propagating its own consistency level to its bound statements: ```java PreparedStatement ps1 = - session.prepare( - SimpleStatement.newInstance("SELECT * FROM product WHERE sku = ?") - .setConsistencyLevel(DefaultConsistencyLevel.ONE)); + session.prepare( + SimpleStatement.newInstance("SELECT * FROM product WHERE sku = ?") + .setConsistencyLevel(DefaultConsistencyLevel.ONE)); PreparedStatement ps2 = - session.prepare( - SimpleStatement.newInstance("SELECT * FROM product WHERE sku = ?") - .setConsistencyLevel(DefaultConsistencyLevel.TWO)); + session.prepare( + SimpleStatement.newInstance("SELECT * FROM product WHERE sku = ?") + .setConsistencyLevel(DefaultConsistencyLevel.TWO)); assert ps1 != ps2; @@ -834,8 +889,8 @@ Optional ks = metadata.getKeyspace("test"); assert !ks.isPresent(); session.execute( - "CREATE KEYSPACE IF NOT EXISTS test " - + "WITH replication = {'class': 'SimpleStrategy', 'replication_factor': 1}"); + "CREATE KEYSPACE IF NOT EXISTS test " + + "WITH replication = {'class': 'SimpleStrategy', 'replication_factor': 1}"); // This is still the same metadata from before the CREATE ks = metadata.getKeyspace("test"); From 95e3f2f7752d87a6c6ed91fd6f2fa2937b35f868 Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Wed, 12 Jul 2023 17:19:41 -0500 Subject: [PATCH 846/979] 4.17.0 release documentation updates (#1681) --- README.md | 4 +- bom/pom.xml | 18 ++--- changelog/README.md | 8 ++ core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- manual/case_sensitivity/README.md | 10 +-- manual/core/README.md | 28 +++---- manual/core/address_resolution/README.md | 2 +- manual/core/async/README.md | 2 +- manual/core/authentication/README.md | 18 ++--- manual/core/bom/README.md | 4 +- manual/core/configuration/README.md | 20 ++--- manual/core/control_connection/README.md | 2 +- manual/core/custom_codecs/README.md | 76 +++++++++---------- manual/core/detachable_types/README.md | 14 ++-- manual/core/dse/geotypes/README.md | 6 +- manual/core/dse/graph/README.md | 4 +- manual/core/dse/graph/fluent/README.md | 4 +- .../core/dse/graph/fluent/explicit/README.md | 12 +-- manual/core/dse/graph/results/README.md | 6 +- manual/core/dse/graph/script/README.md | 6 +- manual/core/idempotence/README.md | 4 +- manual/core/integration/README.md | 7 +- manual/core/load_balancing/README.md | 12 +-- manual/core/metadata/README.md | 6 +- manual/core/metadata/node/README.md | 28 +++---- manual/core/metadata/schema/README.md | 20 ++--- manual/core/metadata/token/README.md | 4 +- manual/core/native_protocol/README.md | 6 +- manual/core/non_blocking/README.md | 44 +++++------ manual/core/paging/README.md | 12 +-- manual/core/performance/README.md | 10 +-- manual/core/pooling/README.md | 2 +- manual/core/query_timestamps/README.md | 4 +- manual/core/reactive/README.md | 24 +++--- manual/core/reconnection/README.md | 8 +- manual/core/request_tracker/README.md | 4 +- manual/core/retries/README.md | 36 ++++----- manual/core/speculative_execution/README.md | 2 +- manual/core/ssl/README.md | 6 +- manual/core/statements/README.md | 8 +- manual/core/statements/batch/README.md | 6 +- .../statements/per_query_keyspace/README.md | 2 +- manual/core/statements/prepared/README.md | 8 +- manual/core/statements/simple/README.md | 6 +- manual/core/temporal_types/README.md | 8 +- manual/core/throttling/README.md | 6 +- manual/core/tracing/README.md | 12 +-- manual/core/tuples/README.md | 4 +- manual/core/udts/README.md | 4 +- manual/developer/common/concurrency/README.md | 4 +- manual/mapper/config/kotlin/README.md | 2 +- manual/mapper/config/record/README.md | 2 +- manual/mapper/config/scala/README.md | 2 +- manual/mapper/daos/README.md | 8 +- manual/mapper/daos/custom_types/README.md | 10 +-- manual/mapper/daos/delete/README.md | 18 ++--- manual/mapper/daos/getentity/README.md | 18 ++--- manual/mapper/daos/increment/README.md | 12 +-- manual/mapper/daos/insert/README.md | 14 ++-- manual/mapper/daos/null_saving/README.md | 10 +-- manual/mapper/daos/query/README.md | 24 +++--- manual/mapper/daos/queryprovider/README.md | 16 ++-- manual/mapper/daos/select/README.md | 28 +++---- manual/mapper/daos/setentity/README.md | 10 +-- .../daos/statement_attributes/README.md | 2 +- manual/mapper/daos/update/README.md | 12 +-- manual/mapper/entities/README.md | 36 ++++----- manual/mapper/mapper/README.md | 10 +-- manual/osgi/README.md | 6 +- manual/query_builder/README.md | 10 +-- manual/query_builder/condition/README.md | 2 +- manual/query_builder/delete/README.md | 4 +- manual/query_builder/insert/README.md | 2 +- manual/query_builder/relation/README.md | 4 +- manual/query_builder/schema/README.md | 2 +- .../query_builder/schema/aggregate/README.md | 2 +- .../query_builder/schema/function/README.md | 2 +- manual/query_builder/schema/index/README.md | 2 +- .../query_builder/schema/keyspace/README.md | 2 +- .../schema/materialized_view/README.md | 4 +- manual/query_builder/schema/table/README.md | 6 +- manual/query_builder/schema/type/README.md | 2 +- manual/query_builder/select/README.md | 4 +- manual/query_builder/term/README.md | 4 +- manual/query_builder/truncate/README.md | 2 +- manual/query_builder/update/README.md | 4 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- metrics/micrometer/pom.xml | 2 +- metrics/microprofile/pom.xml | 2 +- osgi-tests/pom.xml | 2 +- pom.xml | 2 +- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 98 files changed, 442 insertions(+), 433 deletions(-) diff --git a/README.md b/README.md index 739baedced3..e8a85027ab9 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@ *If you're reading this on github.com, please note that this is the readme for the development version and that some features described here might not yet have been released. You can find the documentation for latest version through [DataStax Docs] or via the release tags, e.g. -[4.16.0](https://github.com/datastax/java-driver/tree/4.16.0).* +[4.17.0](https://github.com/datastax/java-driver/tree/4.17.0).* A modern, feature-rich and highly tunable Java client library for [Apache Cassandra®] \(2.1+) and [DataStax Enterprise] \(4.7+), and [DataStax Astra], using exclusively Cassandra's binary protocol @@ -86,7 +86,7 @@ See the [Cassandra error handling done right blog](https://www.datastax.com/blog * [Changelog] * [FAQ] -[API docs]: https://docs.datastax.com/en/drivers/java/4.14 +[API docs]: https://docs.datastax.com/en/drivers/java/4.17 [JIRA]: https://datastax-oss.atlassian.net/browse/JAVA [Mailing list]: https://groups.google.com/a/lists.datastax.com/forum/#!forum/java-driver-user [@dsJavaDriver]: https://twitter.com/dsJavaDriver diff --git a/bom/pom.xml b/bom/pom.xml index 79ea3485cda..547658aea3b 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.16.1-SNAPSHOT + 4.17.0-SNAPSHOT java-driver-bom pom @@ -31,42 +31,42 @@ com.datastax.oss java-driver-core - 4.16.1-SNAPSHOT + 4.17.0-SNAPSHOT com.datastax.oss java-driver-core-shaded - 4.16.1-SNAPSHOT + 4.17.0-SNAPSHOT com.datastax.oss java-driver-mapper-processor - 4.16.1-SNAPSHOT + 4.17.0-SNAPSHOT com.datastax.oss java-driver-mapper-runtime - 4.16.1-SNAPSHOT + 4.17.0-SNAPSHOT com.datastax.oss java-driver-query-builder - 4.16.1-SNAPSHOT + 4.17.0-SNAPSHOT com.datastax.oss java-driver-test-infra - 4.16.1-SNAPSHOT + 4.17.0-SNAPSHOT com.datastax.oss java-driver-metrics-micrometer - 4.16.1-SNAPSHOT + 4.17.0-SNAPSHOT com.datastax.oss java-driver-metrics-microprofile - 4.16.1-SNAPSHOT + 4.17.0-SNAPSHOT com.datastax.oss diff --git a/changelog/README.md b/changelog/README.md index 30bb1194674..6c8c236a6a4 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -2,6 +2,14 @@ +### 4.17.0 + +- [improvement] JAVA-3070: Make CqlVector and CqlDuration serializable +- [improvement] JAVA-3085: Initialize c.d.o.d.i.core.util.Dependency at Graal native image build-time +- [improvement] JAVA-3061: CqlVector API improvements, add support for accessing vectors directly as float arrays +- [improvement] JAVA-3042: Enable automated testing for Java17 +- [improvement] JAVA-3050: Upgrade Netty to 4.1.94 + ### 4.16.0 - [improvement] JAVA-3058: Clear prepared statement cache on UDT type change event diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index fc89c2e7338..bfeb0234b14 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.16.1-SNAPSHOT + 4.17.0-SNAPSHOT java-driver-core-shaded DataStax Java driver for Apache Cassandra(R) - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index db2af9ed156..e29d1e95325 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.16.1-SNAPSHOT + 4.17.0-SNAPSHOT java-driver-core bundle diff --git a/distribution/pom.xml b/distribution/pom.xml index bd7d0c811ef..c478f049dcf 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.16.1-SNAPSHOT + 4.17.0-SNAPSHOT java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index 85db139e7f4..8af4d4c9a6d 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.16.1-SNAPSHOT + 4.17.0-SNAPSHOT java-driver-examples DataStax Java driver for Apache Cassandra(R) - examples. diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 16b900a0fb2..45cc458a586 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.16.1-SNAPSHOT + 4.17.0-SNAPSHOT java-driver-integration-tests jar diff --git a/manual/case_sensitivity/README.md b/manual/case_sensitivity/README.md index 865354f41df..15a71eb76c5 100644 --- a/manual/case_sensitivity/README.md +++ b/manual/case_sensitivity/README.md @@ -106,11 +106,11 @@ For "consuming" methods, string overloads are also provided for convenience, for * in other cases, the string is always assumed to be in CQL form, and converted on the fly with `CqlIdentifier.fromCql`. -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/CqlIdentifier.html -[Row]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/Row.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/data/UdtValue.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/data/AccessibleByName.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/CqlIdentifier.html +[Row]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/Row.html +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/data/UdtValue.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/data/AccessibleByName.html ### Good practices diff --git a/manual/core/README.md b/manual/core/README.md index 349a810f3aa..a11c5e624be 100644 --- a/manual/core/README.md +++ b/manual/core/README.md @@ -315,19 +315,19 @@ for (ColumnDefinitions.Definition definition : row.getColumnDefinitions()) { } ``` -[CqlSession]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/CqlSession.html -[CqlSession#builder()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/CqlSession.html#builder-- -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/ResultSet.html -[Row]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/Row.html -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/CqlIdentifier.html -[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/data/AccessibleByName.html -[GenericType]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/reflect/GenericType.html -[CqlDuration]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/data/CqlDuration.html -[CqlVector]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/data/CqlVector.html -[TupleValue]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/data/TupleValue.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/data/UdtValue.html -[SessionBuilder.addContactPoint()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoint-java.net.InetSocketAddress- -[SessionBuilder.addContactPoints()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoints-java.util.Collection- -[SessionBuilder.withLocalDatacenter()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withLocalDatacenter-java.lang.String- +[CqlSession]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/CqlSession.html +[CqlSession#builder()]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/CqlSession.html#builder-- +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/ResultSet.html +[Row]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/Row.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/CqlIdentifier.html +[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/data/AccessibleByName.html +[GenericType]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/type/reflect/GenericType.html +[CqlDuration]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/data/CqlDuration.html +[CqlVector]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/data/CqlVector.html +[TupleValue]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/data/TupleValue.html +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/data/UdtValue.html +[SessionBuilder.addContactPoint()]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoint-java.net.InetSocketAddress- +[SessionBuilder.addContactPoints()]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoints-java.util.Collection- +[SessionBuilder.withLocalDatacenter()]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withLocalDatacenter-java.lang.String- [CASSANDRA-10145]: https://issues.apache.org/jira/browse/CASSANDRA-10145 \ No newline at end of file diff --git a/manual/core/address_resolution/README.md b/manual/core/address_resolution/README.md index fe8c967a62a..433ffe58a75 100644 --- a/manual/core/address_resolution/README.md +++ b/manual/core/address_resolution/README.md @@ -124,7 +124,7 @@ Cassandra node: domain name of the target instance. Then it performs a forward DNS lookup of the domain name; the EC2 DNS does the private/public switch automatically based on location). -[AddressTranslator]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/addresstranslation/AddressTranslator.html +[AddressTranslator]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/addresstranslation/AddressTranslator.html [cassandra.yaml]: https://docs.datastax.com/en/cassandra/3.x/cassandra/configuration/configCassandra_yaml.html [rpc_address]: https://docs.datastax.com/en/cassandra/3.x/cassandra/configuration/configCassandra_yaml.html?scroll=configCassandra_yaml__rpc_address diff --git a/manual/core/async/README.md b/manual/core/async/README.md index 1daecfd61ee..d64ee2c9b85 100644 --- a/manual/core/async/README.md +++ b/manual/core/async/README.md @@ -207,4 +207,4 @@ documentation for more details and an example. [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html \ No newline at end of file +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html \ No newline at end of file diff --git a/manual/core/authentication/README.md b/manual/core/authentication/README.md index 45742c3aac2..ebb52bfc5a8 100644 --- a/manual/core/authentication/README.md +++ b/manual/core/authentication/README.md @@ -227,13 +227,13 @@ session.execute(statement); [SASL]: https://en.wikipedia.org/wiki/Simple_Authentication_and_Security_Layer -[AuthProvider]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/auth/AuthProvider.html -[DriverContext]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/context/DriverContext.html -[PlainTextAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderBase.html -[ProgrammaticPlainTextAuthProvider]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/auth/ProgrammaticPlainTextAuthProvider.html -[DseGssApiAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderBase.html -[ProgrammaticDseGssApiAuthProvider]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/auth/ProgrammaticDseGssApiAuthProvider.html -[ProxyAuthentication.executeAs]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/auth/ProxyAuthentication.html#executeAs-java.lang.String-StatementT- -[SessionBuilder.withAuthCredentials]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthCredentials-java.lang.String-java.lang.String- -[SessionBuilder.withAuthProvider]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthProvider-com.datastax.oss.driver.api.core.auth.AuthProvider- +[AuthProvider]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/auth/AuthProvider.html +[DriverContext]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/context/DriverContext.html +[PlainTextAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/auth/PlainTextAuthProviderBase.html +[ProgrammaticPlainTextAuthProvider]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/auth/ProgrammaticPlainTextAuthProvider.html +[DseGssApiAuthProviderBase]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderBase.html +[ProgrammaticDseGssApiAuthProvider]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/dse/driver/api/core/auth/ProgrammaticDseGssApiAuthProvider.html +[ProxyAuthentication.executeAs]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/dse/driver/api/core/auth/ProxyAuthentication.html#executeAs-java.lang.String-StatementT- +[SessionBuilder.withAuthCredentials]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthCredentials-java.lang.String-java.lang.String- +[SessionBuilder.withAuthProvider]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthProvider-com.datastax.oss.driver.api.core.auth.AuthProvider- [reference.conf]: ../configuration/reference/ \ No newline at end of file diff --git a/manual/core/bom/README.md b/manual/core/bom/README.md index 935489beb7e..d0797264263 100644 --- a/manual/core/bom/README.md +++ b/manual/core/bom/README.md @@ -13,7 +13,7 @@ To import the driver's BOM, add the following section in your application's own com.datastax.oss java-driver-bom - 4.16.0 + 4.17.0 pom import @@ -65,7 +65,7 @@ good idea to extract a property to keep it in sync with the BOM: ```xml - 4.16.0 + 4.17.0 diff --git a/manual/core/configuration/README.md b/manual/core/configuration/README.md index 7dc9fd73afc..bccfb7d3fce 100644 --- a/manual/core/configuration/README.md +++ b/manual/core/configuration/README.md @@ -520,16 +520,16 @@ config.getDefaultProfile().getString(MyCustomOption.ADMIN_EMAIL); config.getDefaultProfile().getInt(MyCustomOption.AWESOMENESS_FACTOR); ``` -[DriverConfig]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/config/DriverConfig.html -[DriverExecutionProfile]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/config/DriverExecutionProfile.html -[DriverContext]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/context/DriverContext.html -[DriverOption]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/config/DriverOption.html -[DefaultDriverOption]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/config/DefaultDriverOption.html -[DriverConfigLoader]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html -[DriverConfigLoader.fromClasspath]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromClasspath-java.lang.String- -[DriverConfigLoader.fromFile]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromFile-java.io.File- -[DriverConfigLoader.fromUrl]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromUrl-java.net.URL- -[DriverConfigLoader.programmaticBuilder]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#programmaticBuilder-- +[DriverConfig]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/config/DriverConfig.html +[DriverExecutionProfile]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/config/DriverExecutionProfile.html +[DriverContext]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/context/DriverContext.html +[DriverOption]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/config/DriverOption.html +[DefaultDriverOption]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/config/DefaultDriverOption.html +[DriverConfigLoader]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html +[DriverConfigLoader.fromClasspath]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromClasspath-java.lang.String- +[DriverConfigLoader.fromFile]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromFile-java.io.File- +[DriverConfigLoader.fromUrl]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromUrl-java.net.URL- +[DriverConfigLoader.programmaticBuilder]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#programmaticBuilder-- [Typesafe Config]: https://github.com/typesafehub/config [config standard behavior]: https://github.com/typesafehub/config#standard-behavior diff --git a/manual/core/control_connection/README.md b/manual/core/control_connection/README.md index d8a9cddc718..570919fdc94 100644 --- a/manual/core/control_connection/README.md +++ b/manual/core/control_connection/README.md @@ -23,4 +23,4 @@ There are a few options to fine tune the control connection behavior in the `advanced.control-connection` and `advanced.metadata` sections; see the [metadata](../metadata/) pages and the [reference configuration](../configuration/reference/) for all the details. -[Node.getOpenConnections]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- \ No newline at end of file +[Node.getOpenConnections]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- \ No newline at end of file diff --git a/manual/core/custom_codecs/README.md b/manual/core/custom_codecs/README.md index ae873d0b60d..e68e5d78029 100644 --- a/manual/core/custom_codecs/README.md +++ b/manual/core/custom_codecs/README.md @@ -671,13 +671,13 @@ private static String formatRow(Row row) { } ``` -[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html -[GenericType]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/reflect/GenericType.html -[TypeCodec]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html -[format()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html#format-JavaTypeT- -[parse()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html#parse-java.lang.String- -[MappingCodec]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/MappingCodec.html -[SessionBuilder.addTypeCodecs]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addTypeCodecs-com.datastax.oss.driver.api.core.type.codec.TypeCodec...- +[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html +[GenericType]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/type/reflect/GenericType.html +[TypeCodec]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html +[format()]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html#format-JavaTypeT- +[parse()]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/type/codec/TypeCodec.html#parse-java.lang.String- +[MappingCodec]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/type/codec/MappingCodec.html +[SessionBuilder.addTypeCodecs]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addTypeCodecs-com.datastax.oss.driver.api.core.type.codec.TypeCodec...- [Enums]: https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html [Enum.name()]: https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html#name-- @@ -691,37 +691,37 @@ private static String formatRow(Row row) { [java.time.LocalDateTime]: https://docs.oracle.com/javase/8/docs/api/java/time/LocalDateTime.html [java.time.ZoneId]: https://docs.oracle.com/javase/8/docs/api/java/time/ZoneId.html -[ExtraTypeCodecs]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html -[ExtraTypeCodecs.BLOB_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#BLOB_TO_ARRAY -[ExtraTypeCodecs.BOOLEAN_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#BOOLEAN_LIST_TO_ARRAY -[ExtraTypeCodecs.BYTE_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#BYTE_LIST_TO_ARRAY -[ExtraTypeCodecs.SHORT_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#SHORT_LIST_TO_ARRAY -[ExtraTypeCodecs.INT_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#INT_LIST_TO_ARRAY -[ExtraTypeCodecs.LONG_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#LONG_LIST_TO_ARRAY -[ExtraTypeCodecs.FLOAT_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#FLOAT_LIST_TO_ARRAY -[ExtraTypeCodecs.DOUBLE_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#DOUBLE_LIST_TO_ARRAY -[ExtraTypeCodecs.listToArrayOf(TypeCodec)]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#listToArrayOf-com.datastax.oss.driver.api.core.type.codec.TypeCodec- -[ExtraTypeCodecs.TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#TIMESTAMP_UTC -[ExtraTypeCodecs.timestampAt(ZoneId)]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#timestampAt-java.time.ZoneId- -[ExtraTypeCodecs.TIMESTAMP_MILLIS_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#TIMESTAMP_MILLIS_SYSTEM -[ExtraTypeCodecs.TIMESTAMP_MILLIS_UTC]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#TIMESTAMP_MILLIS_UTC -[ExtraTypeCodecs.timestampMillisAt(ZoneId)]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#timestampMillisAt-java.time.ZoneId- -[ExtraTypeCodecs.ZONED_TIMESTAMP_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#ZONED_TIMESTAMP_SYSTEM -[ExtraTypeCodecs.ZONED_TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#ZONED_TIMESTAMP_UTC -[ExtraTypeCodecs.zonedTimestampAt(ZoneId)]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#zonedTimestampAt-java.time.ZoneId- -[ExtraTypeCodecs.LOCAL_TIMESTAMP_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#LOCAL_TIMESTAMP_SYSTEM -[ExtraTypeCodecs.LOCAL_TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#LOCAL_TIMESTAMP_UTC -[ExtraTypeCodecs.localTimestampAt(ZoneId)]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#localTimestampAt-java.time.ZoneId- -[ExtraTypeCodecs.ZONED_TIMESTAMP_PERSISTED]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#ZONED_TIMESTAMP_PERSISTED -[ExtraTypeCodecs.optionalOf(TypeCodec)]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#optionalOf-com.datastax.oss.driver.api.core.type.codec.TypeCodec- -[ExtraTypeCodecs.enumNamesOf(Class)]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#enumNamesOf-java.lang.Class- -[ExtraTypeCodecs.enumOrdinalsOf(Class)]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#enumOrdinalsOf-java.lang.Class- -[ExtraTypeCodecs.json(Class)]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#json-java.lang.Class- -[ExtraTypeCodecs.json(Class, ObjectMapper)]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#json-java.lang.Class-com.fasterxml.jackson.databind.ObjectMapper- -[ExtraTypeCodecs.floatVectorToArray(int)]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#floatVectorToArray-int- - -[TypeCodecs.BLOB]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#BLOB -[TypeCodecs.TIMESTAMP]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#TIMESTAMP +[ExtraTypeCodecs]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html +[ExtraTypeCodecs.BLOB_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#BLOB_TO_ARRAY +[ExtraTypeCodecs.BOOLEAN_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#BOOLEAN_LIST_TO_ARRAY +[ExtraTypeCodecs.BYTE_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#BYTE_LIST_TO_ARRAY +[ExtraTypeCodecs.SHORT_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#SHORT_LIST_TO_ARRAY +[ExtraTypeCodecs.INT_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#INT_LIST_TO_ARRAY +[ExtraTypeCodecs.LONG_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#LONG_LIST_TO_ARRAY +[ExtraTypeCodecs.FLOAT_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#FLOAT_LIST_TO_ARRAY +[ExtraTypeCodecs.DOUBLE_LIST_TO_ARRAY]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#DOUBLE_LIST_TO_ARRAY +[ExtraTypeCodecs.listToArrayOf(TypeCodec)]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#listToArrayOf-com.datastax.oss.driver.api.core.type.codec.TypeCodec- +[ExtraTypeCodecs.TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#TIMESTAMP_UTC +[ExtraTypeCodecs.timestampAt(ZoneId)]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#timestampAt-java.time.ZoneId- +[ExtraTypeCodecs.TIMESTAMP_MILLIS_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#TIMESTAMP_MILLIS_SYSTEM +[ExtraTypeCodecs.TIMESTAMP_MILLIS_UTC]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#TIMESTAMP_MILLIS_UTC +[ExtraTypeCodecs.timestampMillisAt(ZoneId)]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#timestampMillisAt-java.time.ZoneId- +[ExtraTypeCodecs.ZONED_TIMESTAMP_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#ZONED_TIMESTAMP_SYSTEM +[ExtraTypeCodecs.ZONED_TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#ZONED_TIMESTAMP_UTC +[ExtraTypeCodecs.zonedTimestampAt(ZoneId)]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#zonedTimestampAt-java.time.ZoneId- +[ExtraTypeCodecs.LOCAL_TIMESTAMP_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#LOCAL_TIMESTAMP_SYSTEM +[ExtraTypeCodecs.LOCAL_TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#LOCAL_TIMESTAMP_UTC +[ExtraTypeCodecs.localTimestampAt(ZoneId)]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#localTimestampAt-java.time.ZoneId- +[ExtraTypeCodecs.ZONED_TIMESTAMP_PERSISTED]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#ZONED_TIMESTAMP_PERSISTED +[ExtraTypeCodecs.optionalOf(TypeCodec)]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#optionalOf-com.datastax.oss.driver.api.core.type.codec.TypeCodec- +[ExtraTypeCodecs.enumNamesOf(Class)]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#enumNamesOf-java.lang.Class- +[ExtraTypeCodecs.enumOrdinalsOf(Class)]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#enumOrdinalsOf-java.lang.Class- +[ExtraTypeCodecs.json(Class)]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#json-java.lang.Class- +[ExtraTypeCodecs.json(Class, ObjectMapper)]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#json-java.lang.Class-com.fasterxml.jackson.databind.ObjectMapper- +[ExtraTypeCodecs.floatVectorToArray(int)]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/type/codec/ExtraTypeCodecs.html#floatVectorToArray-int- + +[TypeCodecs.BLOB]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#BLOB +[TypeCodecs.TIMESTAMP]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#TIMESTAMP [ObjectMapper]: http://fasterxml.github.io/jackson-databind/javadoc/2.10/com/fasterxml/jackson/databind/ObjectMapper.html diff --git a/manual/core/detachable_types/README.md b/manual/core/detachable_types/README.md index a0f38d11f74..3218eecc5e6 100644 --- a/manual/core/detachable_types/README.md +++ b/manual/core/detachable_types/README.md @@ -137,13 +137,13 @@ Even then, the defaults used by detached objects might be good enough for you: Otherwise, just make sure you reattach objects any time you deserialize them or create them from scratch. -[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html -[CodecRegistry#DEFAULT]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html#DEFAULT -[DataType]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/DataType.html -[Detachable]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/detach/Detachable.html -[Session]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/Session.html -[ColumnDefinition]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/ColumnDefinition.html -[Row]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/Row.html +[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html +[CodecRegistry#DEFAULT]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html#DEFAULT +[DataType]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/type/DataType.html +[Detachable]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/detach/Detachable.html +[Session]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/session/Session.html +[ColumnDefinition]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/ColumnDefinition.html +[Row]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/Row.html [Java serialization]: https://docs.oracle.com/javase/tutorial/jndi/objects/serial.html [protocol specifications]: https://github.com/datastax/native-protocol/tree/1.x/src/main/resources diff --git a/manual/core/dse/geotypes/README.md b/manual/core/dse/geotypes/README.md index 79470ec946b..79a4c034052 100644 --- a/manual/core/dse/geotypes/README.md +++ b/manual/core/dse/geotypes/README.md @@ -166,9 +166,9 @@ All geospatial types interoperate with three standard formats: [ESRI]: https://github.com/Esri/geometry-api-java -[LineString]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/data/geometry/LineString.html -[Point]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/data/geometry/Point.html -[Polygon]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/data/geometry/Polygon.html +[LineString]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/dse/driver/api/core/data/geometry/LineString.html +[Point]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/dse/driver/api/core/data/geometry/Point.html +[Polygon]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/dse/driver/api/core/data/geometry/Polygon.html [Well-known text]: https://en.wikipedia.org/wiki/Well-known_text [Well-known binary]: https://en.wikipedia.org/wiki/Well-known_text#Well-known_binary diff --git a/manual/core/dse/graph/README.md b/manual/core/dse/graph/README.md index bc9669634ee..9d6ef39f2f3 100644 --- a/manual/core/dse/graph/README.md +++ b/manual/core/dse/graph/README.md @@ -74,8 +74,8 @@ fluent API returns Apache TinkerPop™ types directly. [Apache TinkerPop™]: http://tinkerpop.apache.org/ -[CqlSession]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/CqlSession.html -[GraphSession]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/graph/GraphSession.html +[CqlSession]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/CqlSession.html +[GraphSession]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/dse/driver/api/core/graph/GraphSession.html [DSE developer guide]: https://docs.datastax.com/en/dse/6.0/dse-dev/datastax_enterprise/graph/graphTOC.html [Gremlin]: https://docs.datastax.com/en/dse/6.0/dse-dev/datastax_enterprise/graph/dseGraphAbout.html#dseGraphAbout__what-is-cql diff --git a/manual/core/dse/graph/fluent/README.md b/manual/core/dse/graph/fluent/README.md index b7027490b33..9201470b6a5 100644 --- a/manual/core/dse/graph/fluent/README.md +++ b/manual/core/dse/graph/fluent/README.md @@ -109,8 +109,8 @@ All the DSE predicates are available on the driver side: .values("name"); ``` -[Search]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/graph/predicates/Search.html -[Geo]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/graph/predicates/Geo.html +[Search]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/dse/driver/api/core/graph/predicates/Search.html +[Geo]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/dse/driver/api/core/graph/predicates/Geo.html [Apache TinkerPop™]: http://tinkerpop.apache.org/ [TinkerPop DSL]: http://tinkerpop.apache.org/docs/current/reference/#dsl diff --git a/manual/core/dse/graph/fluent/explicit/README.md b/manual/core/dse/graph/fluent/explicit/README.md index b7741a0de2b..f3d8072dcb9 100644 --- a/manual/core/dse/graph/fluent/explicit/README.md +++ b/manual/core/dse/graph/fluent/explicit/README.md @@ -105,9 +105,9 @@ added in a future version. See also the [parent page](../) for topics common to all fluent traversals. -[FluentGraphStatement]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html -[FluentGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html#newInstance-org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal- -[FluentGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html#builder-org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal- -[BatchGraphStatement]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html -[BatchGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html#newInstance-- -[BatchGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html#builder-- +[FluentGraphStatement]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html +[FluentGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html#newInstance-org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal- +[FluentGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/dse/driver/api/core/graph/FluentGraphStatement.html#builder-org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal- +[BatchGraphStatement]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html +[BatchGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html#newInstance-- +[BatchGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/dse/driver/api/core/graph/BatchGraphStatement.html#builder-- diff --git a/manual/core/dse/graph/results/README.md b/manual/core/dse/graph/results/README.md index fa98525c756..9e1a357a443 100644 --- a/manual/core/dse/graph/results/README.md +++ b/manual/core/dse/graph/results/README.md @@ -137,8 +137,8 @@ If a type doesn't have a corresponding `asXxx()` method, use the variant that ta UUID uuid = graphNode.as(UUID.class); ``` -[GraphNode]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/graph/GraphNode.html -[GraphResultSet]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/graph/GraphResultSet.html -[AsyncGraphResultSet]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/graph/AsyncGraphResultSet.html +[GraphNode]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/dse/driver/api/core/graph/GraphNode.html +[GraphResultSet]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/dse/driver/api/core/graph/GraphResultSet.html +[AsyncGraphResultSet]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/dse/driver/api/core/graph/AsyncGraphResultSet.html [DSE data types]: https://docs.datastax.com/en/dse/6.0/dse-dev/datastax_enterprise/graph/reference/refDSEGraphDataTypes.html \ No newline at end of file diff --git a/manual/core/dse/graph/script/README.md b/manual/core/dse/graph/script/README.md index b191cc7db7c..2b98664ea16 100644 --- a/manual/core/dse/graph/script/README.md +++ b/manual/core/dse/graph/script/README.md @@ -101,6 +101,6 @@ Note however that some types of queries can only be performed through the script * configuration; * DSE graph schema queries. -[ScriptGraphStatement]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html -[ScriptGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html#newInstance-java.lang.String- -[ScriptGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html#builder-java.lang.String- \ No newline at end of file +[ScriptGraphStatement]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html +[ScriptGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html#newInstance-java.lang.String- +[ScriptGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html#builder-java.lang.String- \ No newline at end of file diff --git a/manual/core/idempotence/README.md b/manual/core/idempotence/README.md index 8eb9135488a..3746825390a 100644 --- a/manual/core/idempotence/README.md +++ b/manual/core/idempotence/README.md @@ -60,5 +60,5 @@ assert bs.isIdempotent(); The query builder tries to infer idempotence automatically; refer to [its manual](../../query_builder/idempotence/) for more details. -[Statement.setIdempotent]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/Statement.html#setIdempotent-java.lang.Boolean- -[StatementBuilder.setIdempotence]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/StatementBuilder.html#setIdempotence-java.lang.Boolean- +[Statement.setIdempotent]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/Statement.html#setIdempotent-java.lang.Boolean- +[StatementBuilder.setIdempotence]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/StatementBuilder.html#setIdempotence-java.lang.Boolean- diff --git a/manual/core/integration/README.md b/manual/core/integration/README.md index 02d0d97027b..32237df8818 100644 --- a/manual/core/integration/README.md +++ b/manual/core/integration/README.md @@ -562,6 +562,7 @@ Here are the recommended TinkerPop versions for each driver version:

          Driver versionTinkerPop version
          4.16.03.5.3
          4.15.03.5.3
          4.14.13.5.3
          4.14.03.4.10
          + @@ -666,6 +667,6 @@ The remaining core driver dependencies are the only ones that are truly mandator [guava]: https://github.com/google/guava/issues/2721 [annotation processing]: https://docs.oracle.com/javase/8/docs/technotes/tools/windows/javac.html#sthref65 -[Session.getMetrics]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/Session.html#getMetrics-- -[SessionBuilder.addContactPoint]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoint-java.net.InetSocketAddress- -[Uuids]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/uuid/Uuids.html +[Session.getMetrics]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/session/Session.html#getMetrics-- +[SessionBuilder.addContactPoint]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoint-java.net.InetSocketAddress- +[Uuids]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/uuid/Uuids.html diff --git a/manual/core/load_balancing/README.md b/manual/core/load_balancing/README.md index abc950fe378..2b60dcb1580 100644 --- a/manual/core/load_balancing/README.md +++ b/manual/core/load_balancing/README.md @@ -426,12 +426,12 @@ Then it uses the "closest" distance for any given node. For example: * policy1 changes its suggestion to IGNORED. node1 is set to REMOTE; * policy1 changes its suggestion to REMOTE. node1 stays at REMOTE. -[DriverContext]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/context/DriverContext.html -[LoadBalancingPolicy]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/loadbalancing/LoadBalancingPolicy.html +[DriverContext]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/context/DriverContext.html +[LoadBalancingPolicy]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/loadbalancing/LoadBalancingPolicy.html [BasicLoadBalancingPolicy]: https://github.com/datastax/java-driver/blob/4.x/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicy.java -[getRoutingKeyspace()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKeyspace-- -[getRoutingToken()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/Request.html#getRoutingToken-- -[getRoutingKey()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- -[NodeDistanceEvaluator]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/loadbalancing/NodeDistanceEvaluator.html +[getRoutingKeyspace()]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKeyspace-- +[getRoutingToken()]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/session/Request.html#getRoutingToken-- +[getRoutingKey()]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- +[NodeDistanceEvaluator]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/loadbalancing/NodeDistanceEvaluator.html [`nodetool status`]: https://docs.datastax.com/en/dse/6.7/dse-dev/datastax_enterprise/tools/nodetool/toolsStatus.html [cqlsh]: https://docs.datastax.com/en/dse/6.7/cql/cql/cql_using/startCqlshStandalone.html diff --git a/manual/core/metadata/README.md b/manual/core/metadata/README.md index 34358bfdf5e..1bb07483869 100644 --- a/manual/core/metadata/README.md +++ b/manual/core/metadata/README.md @@ -56,6 +56,6 @@ new keyspace in the schema metadata before the token metadata was updated. Schema and node state events are debounced. This allows you to control how often the metadata gets refreshed. See the [Performance](../performance/#debouncing) page for more details. -[Session#getMetadata]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/Session.html#getMetadata-- -[Metadata]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/metadata/Metadata.html -[Node]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/metadata/Node.html \ No newline at end of file +[Session#getMetadata]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/session/Session.html#getMetadata-- +[Metadata]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/metadata/Metadata.html +[Node]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/metadata/Node.html \ No newline at end of file diff --git a/manual/core/metadata/node/README.md b/manual/core/metadata/node/README.md index ae66a468fd3..0f0b6176f42 100644 --- a/manual/core/metadata/node/README.md +++ b/manual/core/metadata/node/README.md @@ -129,17 +129,17 @@ beyond the scope of this document; if you're interested, study the `TopologyMoni the source code. -[Metadata#getNodes]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/metadata/Metadata.html#getNodes-- -[Node]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/metadata/Node.html -[Node#getState()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/metadata/Node.html#getState-- -[Node#getDatacenter()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/metadata/Node.html#getDatacenter-- -[Node#getRack()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/metadata/Node.html#getRack-- -[Node#getDistance()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/metadata/Node.html#getDistance-- -[Node#getExtras()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/metadata/Node.html#getExtras-- -[Node#getOpenConnections()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- -[Node#isReconnecting()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/metadata/Node.html#isReconnecting-- -[NodeState]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/metadata/NodeState.html -[NodeStateListener]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/metadata/NodeStateListener.html -[NodeStateListenerBase]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/metadata/NodeStateListenerBase.html -[SessionBuilder.addNodeStateListener]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addNodeStateListener-com.datastax.oss.driver.api.core.metadata.NodeStateListener- -[DseNodeProperties]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/metadata/DseNodeProperties.html +[Metadata#getNodes]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/metadata/Metadata.html#getNodes-- +[Node]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/metadata/Node.html +[Node#getState()]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/metadata/Node.html#getState-- +[Node#getDatacenter()]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/metadata/Node.html#getDatacenter-- +[Node#getRack()]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/metadata/Node.html#getRack-- +[Node#getDistance()]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/metadata/Node.html#getDistance-- +[Node#getExtras()]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/metadata/Node.html#getExtras-- +[Node#getOpenConnections()]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- +[Node#isReconnecting()]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/metadata/Node.html#isReconnecting-- +[NodeState]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/metadata/NodeState.html +[NodeStateListener]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/metadata/NodeStateListener.html +[NodeStateListenerBase]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/metadata/NodeStateListenerBase.html +[SessionBuilder.addNodeStateListener]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addNodeStateListener-com.datastax.oss.driver.api.core.metadata.NodeStateListener- +[DseNodeProperties]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/dse/driver/api/core/metadata/DseNodeProperties.html diff --git a/manual/core/metadata/schema/README.md b/manual/core/metadata/schema/README.md index c42e56e5735..ed2c4c70750 100644 --- a/manual/core/metadata/schema/README.md +++ b/manual/core/metadata/schema/README.md @@ -321,16 +321,16 @@ unavailable for the excluded keyspaces. If you issue schema-altering requests from the driver (e.g. `session.execute("CREATE TABLE ..")`), take a look at the [Performance](../../performance/#schema-updates) page for a few tips. -[Metadata#getKeyspaces]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/metadata/Metadata.html#getKeyspaces-- -[SchemaChangeListener]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListener.html -[SchemaChangeListenerBase]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListenerBase.html -[Session#setSchemaMetadataEnabled]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/Session.html#setSchemaMetadataEnabled-java.lang.Boolean- -[Session#checkSchemaAgreementAsync]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/Session.html#checkSchemaAgreementAsync-- -[SessionBuilder#addSchemaChangeListener]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addSchemaChangeListener-com.datastax.oss.driver.api.core.metadata.schema.SchemaChangeListener- -[ExecutionInfo#isSchemaInAgreement]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#isSchemaInAgreement-- -[com.datastax.dse.driver.api.core.metadata.schema]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/metadata/schema/package-frame.html -[DseFunctionMetadata]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/metadata/schema/DseFunctionMetadata.html -[DseAggregateMetadata]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/metadata/schema/DseAggregateMetadata.html +[Metadata#getKeyspaces]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/metadata/Metadata.html#getKeyspaces-- +[SchemaChangeListener]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListener.html +[SchemaChangeListenerBase]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/metadata/schema/SchemaChangeListenerBase.html +[Session#setSchemaMetadataEnabled]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/session/Session.html#setSchemaMetadataEnabled-java.lang.Boolean- +[Session#checkSchemaAgreementAsync]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/session/Session.html#checkSchemaAgreementAsync-- +[SessionBuilder#addSchemaChangeListener]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addSchemaChangeListener-com.datastax.oss.driver.api.core.metadata.schema.SchemaChangeListener- +[ExecutionInfo#isSchemaInAgreement]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#isSchemaInAgreement-- +[com.datastax.dse.driver.api.core.metadata.schema]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/dse/driver/api/core/metadata/schema/package-frame.html +[DseFunctionMetadata]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/dse/driver/api/core/metadata/schema/DseFunctionMetadata.html +[DseAggregateMetadata]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/dse/driver/api/core/metadata/schema/DseAggregateMetadata.html [JAVA-750]: https://datastax-oss.atlassian.net/browse/JAVA-750 [java.util.regex.Pattern]: https://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html diff --git a/manual/core/metadata/token/README.md b/manual/core/metadata/token/README.md index 475274fd4e4..1c6a9c08ae7 100644 --- a/manual/core/metadata/token/README.md +++ b/manual/core/metadata/token/README.md @@ -169,5 +169,5 @@ on [schema metadata](../schema/). If schema metadata is disabled or filtered, to also be unavailable for the excluded keyspaces. -[Metadata#getTokenMap]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/metadata/Metadata.html#getTokenMap-- -[TokenMap]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/metadata/TokenMap.html \ No newline at end of file +[Metadata#getTokenMap]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/metadata/Metadata.html#getTokenMap-- +[TokenMap]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/metadata/TokenMap.html \ No newline at end of file diff --git a/manual/core/native_protocol/README.md b/manual/core/native_protocol/README.md index 23df7ed9eec..73f5cad3f21 100644 --- a/manual/core/native_protocol/README.md +++ b/manual/core/native_protocol/README.md @@ -135,6 +135,6 @@ If you want to see the details of mixed cluster negotiation, enable `DEBUG` leve [protocol spec]: https://github.com/datastax/native-protocol/tree/1.x/src/main/resources [driver3]: https://docs.datastax.com/en/developer/java-driver/3.10/manual/native_protocol/ -[ExecutionInfo.getWarnings]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getWarnings-- -[Request.getCustomPayload]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/Request.html#getCustomPayload-- -[AttachmentPoint.getProtocolVersion]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/detach/AttachmentPoint.html#getProtocolVersion-- +[ExecutionInfo.getWarnings]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getWarnings-- +[Request.getCustomPayload]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/session/Request.html#getCustomPayload-- +[AttachmentPoint.getProtocolVersion]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/detach/AttachmentPoint.html#getProtocolVersion-- diff --git a/manual/core/non_blocking/README.md b/manual/core/non_blocking/README.md index 0960379936b..a40779f8e11 100644 --- a/manual/core/non_blocking/README.md +++ b/manual/core/non_blocking/README.md @@ -49,22 +49,22 @@ For example, calling any synchronous method declared in [`SyncCqlSession`], such will block until the result is available. These methods should never be used in non-blocking applications. -[`SyncCqlSession`]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/SyncCqlSession.html` -[`execute`]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/SyncCqlSession.html#execute-com.datastax.oss.driver.api.core.cql.Statement- +[`SyncCqlSession`]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/SyncCqlSession.html` +[`execute`]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/SyncCqlSession.html#execute-com.datastax.oss.driver.api.core.cql.Statement- However, the asynchronous methods declared in [`AsyncCqlSession`], such as [`executeAsync`], are all safe for use in non-blocking applications; the statement execution and asynchronous result delivery is guaranteed to never block. -[`AsyncCqlSession`]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/AsyncCqlSession.html -[`executeAsync`]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/AsyncCqlSession.html#executeAsync-com.datastax.oss.driver.api.core.cql.Statement- +[`AsyncCqlSession`]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/AsyncCqlSession.html +[`executeAsync`]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/AsyncCqlSession.html#executeAsync-com.datastax.oss.driver.api.core.cql.Statement- The same applies to the methods declared in [`ReactiveSession`] such as [`executeReactive`]: the returned publisher will never block when subscribed to, until the final results are delivered to the subscriber. -[`ReactiveSession`]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.html -[`executeReactive`]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.html#executeReactive-com.datastax.oss.driver.api.core.cql.Statement- +[`ReactiveSession`]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.html +[`executeReactive`]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.html#executeReactive-com.datastax.oss.driver.api.core.cql.Statement- There is one exception though: continuous paging queries (a feature specific to DSE) have a special execution model which uses internal locks for coordination. Although such locks are only held for @@ -77,10 +77,10 @@ reactive APIs like [`executeContinuouslyAsync`] and [`executeContinuouslyReactiv though, continuous paging is extremely efficient and can safely be used in most non-blocking contexts, unless they require strict lock-freedom. -[`ContinuousSession`]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/cql/continuous/ContinuousSession.html -[`ContinuousReactiveSession`]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousReactiveSession.html -[`executeContinuouslyAsync`]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/cql/continuous/ContinuousSession.html#executeContinuouslyAsync-com.datastax.oss.driver.api.core.cql.Statement- -[`executeContinuouslyReactive`]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousReactiveSession.html#executeContinuouslyReactive-com.datastax.oss.driver.api.core.cql.Statement- +[`ContinuousSession`]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/dse/driver/api/core/cql/continuous/ContinuousSession.html +[`ContinuousReactiveSession`]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousReactiveSession.html +[`executeContinuouslyAsync`]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/dse/driver/api/core/cql/continuous/ContinuousSession.html#executeContinuouslyAsync-com.datastax.oss.driver.api.core.cql.Statement- +[`executeContinuouslyReactive`]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousReactiveSession.html#executeContinuouslyReactive-com.datastax.oss.driver.api.core.cql.Statement- #### Driver lock-free guarantees per session lifecycle phases @@ -110,8 +110,8 @@ Similarly, a call to [`SessionBuilder.build()`] should be considered blocking as calling thread and wait until the method returns. For this reason, calls to `SessionBuilder.build()` should be avoided in non-blocking applications. -[`SessionBuilder.buildAsync()`]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/SessionBuilder.html#buildAsync-- -[`SessionBuilder.build()`]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/SessionBuilder.html#build-- +[`SessionBuilder.buildAsync()`]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/session/SessionBuilder.html#buildAsync-- +[`SessionBuilder.build()`]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/session/SessionBuilder.html#build-- Once the session is initialized, however, the driver is guaranteed to be non-blocking during the session's lifecycle, and under normal operation, unless otherwise noted elsewhere in this document. @@ -121,8 +121,8 @@ during that phase. Therefore, calls to any method declared in [`AsyncAutoCloseab asynchronous ones like [`closeAsync()`], should also be preferably deferred until the application is shut down and lock-freedom enforcement is disabled. -[`AsyncAutoCloseable`]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/AsyncAutoCloseable.html -[`closeAsync()`]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/AsyncAutoCloseable.html#closeAsync-- +[`AsyncAutoCloseable`]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/AsyncAutoCloseable.html +[`closeAsync()`]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/AsyncAutoCloseable.html#closeAsync-- #### Driver lock-free guarantees for specific components @@ -131,7 +131,7 @@ Certain driver components are not implemented in lock-free algorithms. For example, [`SafeInitNodeStateListener`] is implemented with internal locks for coordination. It should not be used if strict lock-freedom is enforced. -[`SafeInitNodeStateListener`]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/metadata/SafeInitNodeStateListener.html +[`SafeInitNodeStateListener`]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/metadata/SafeInitNodeStateListener.html The same is valid for both built-in [request throttlers]: @@ -143,7 +143,7 @@ use locks internally, and depending on how many requests are being executed in p contention on these locks can be high: in short, if your application enforces strict lock-freedom, then these components should not be used. -[request throttlers]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/throttling/RequestThrottler.html +[request throttlers]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/session/throttling/RequestThrottler.html Other components may be lock-free, *except* for their first invocation. This is the case of the following items: @@ -151,8 +151,8 @@ following items: * All built-in implementations of [`TimestampGenerator`], upon instantiation; * The utility method [`Uuids.timeBased()`]. -[`TimestampGenerator`]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/time/TimestampGenerator.html -[`Uuids.timeBased()`]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/uuid/Uuids.html#timeBased-- +[`TimestampGenerator`]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/time/TimestampGenerator.html +[`Uuids.timeBased()`]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/uuid/Uuids.html#timeBased-- Both components need to access native libraries when they get initialized and this may involve hitting the local filesystem, thus causing the initialization to become a blocking call. @@ -172,7 +172,7 @@ One component, the codec registry, can block when its [`register`] method is cal therefore advised that codecs should be registered during application startup exclusively. See the [custom codecs](../custom_codecs) section for more details about registering codecs. -[`register`]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/registry/MutableCodecRegistry.html#register-com.datastax.oss.driver.api.core.type.codec.TypeCodec- +[`register`]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/type/codec/registry/MutableCodecRegistry.html#register-com.datastax.oss.driver.api.core.type.codec.TypeCodec- Finally, a few internal components also use locks, but only during session initialization; once the session is ready, they are either discarded, or don't use locks anymore for the rest of the @@ -213,7 +213,7 @@ lock-freedom enforcement tools could report calls to that method, but it was imp these calls. Thanks to [JAVA-2449], released with driver 4.10.0, `Uuids.random()` became a non-blocking call and random UUIDs can now be safely generated in non-blocking applications. -[`Uuids.random()`]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/uuid/Uuids.html#random-- +[`Uuids.random()`]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/uuid/Uuids.html#random-- [JAVA-2449]: https://datastax-oss.atlassian.net/browse/JAVA-2449 #### Driver lock-free guarantees when reloading the configuration @@ -228,8 +228,8 @@ detectors. If that is the case, it is advised to disable hot-reloading by settin `datastax-java-driver.basic.config-reload-interval` option to 0. See the manual page on [configuration](../configuration) for more information. -[`DriverConfigLoader`]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html -[hot-reloading]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#supportsReloading-- +[`DriverConfigLoader`]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html +[hot-reloading]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#supportsReloading-- #### Driver lock-free guarantees when connecting to DSE diff --git a/manual/core/paging/README.md b/manual/core/paging/README.md index d08d92e8f36..761a6bfbc66 100644 --- a/manual/core/paging/README.md +++ b/manual/core/paging/README.md @@ -253,12 +253,12 @@ protocol page size and the logical page size to the same value. The [driver examples] include two complete web service implementations demonstrating forward-only and offset paging. -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/ResultSet.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html -[AsyncPagingIterable.hasMorePages]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/AsyncPagingIterable.html#hasMorePages-- -[AsyncPagingIterable.fetchNextPage]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/AsyncPagingIterable.html#fetchNextPage-- -[OffsetPager]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/paging/OffsetPager.html -[PagingState]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/PagingState.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/ResultSet.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[AsyncPagingIterable.hasMorePages]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/AsyncPagingIterable.html#hasMorePages-- +[AsyncPagingIterable.fetchNextPage]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/AsyncPagingIterable.html#fetchNextPage-- +[OffsetPager]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/paging/OffsetPager.html +[PagingState]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/PagingState.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html diff --git a/manual/core/performance/README.md b/manual/core/performance/README.md index 90b379c59d6..aaaebdaa6c9 100644 --- a/manual/core/performance/README.md +++ b/manual/core/performance/README.md @@ -345,8 +345,8 @@ possible to reuse the same event loop group for I/O, admin tasks, and even your (the driver's internal code is fully asynchronous so it will never block any thread). The timer is the only one that will have to stay on a separate thread. -[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/data/AccessibleByName.html -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/CqlIdentifier.html -[CqlSession.prepare(SimpleStatement)]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/CqlSession.html#prepare-com.datastax.oss.driver.api.core.cql.SimpleStatement- -[GenericType]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/reflect/GenericType.html -[Statement.setNode()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/Statement.html#setNode-com.datastax.oss.driver.api.core.metadata.Node- \ No newline at end of file +[AccessibleByName]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/data/AccessibleByName.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/CqlIdentifier.html +[CqlSession.prepare(SimpleStatement)]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/CqlSession.html#prepare-com.datastax.oss.driver.api.core.cql.SimpleStatement- +[GenericType]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/type/reflect/GenericType.html +[Statement.setNode()]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/Statement.html#setNode-com.datastax.oss.driver.api.core.metadata.Node- \ No newline at end of file diff --git a/manual/core/pooling/README.md b/manual/core/pooling/README.md index d0d2de7d128..ad9e6f97a02 100644 --- a/manual/core/pooling/README.md +++ b/manual/core/pooling/README.md @@ -170,5 +170,5 @@ you experience the issue, here's what to look out for: Try adding more connections per node. Thanks to the driver's hot-reload mechanism, you can do that at runtime and see the effects immediately. -[CqlSession]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/CqlSession.html +[CqlSession]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/CqlSession.html [CASSANDRA-8086]: https://issues.apache.org/jira/browse/CASSANDRA-8086 \ No newline at end of file diff --git a/manual/core/query_timestamps/README.md b/manual/core/query_timestamps/README.md index bc01ce41d4d..c851e023e14 100644 --- a/manual/core/query_timestamps/README.md +++ b/manual/core/query_timestamps/README.md @@ -187,9 +187,9 @@ Here is the order of precedence of all the methods described so far: 3. otherwise, if the timestamp generator assigned a timestamp, use it; 4. otherwise, let the server assign the timestamp. -[TimestampGenerator]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/time/TimestampGenerator.html +[TimestampGenerator]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/time/TimestampGenerator.html [gettimeofday]: http://man7.org/linux/man-pages/man2/settimeofday.2.html [JNR]: https://github.com/jnr/jnr-posix [Lightweight transactions]: https://docs.datastax.com/en/dse/6.0/cql/cql/cql_using/useInsertLWT.html -[Statement.setQueryTimestamp()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/Statement.html#setQueryTimestamp-long- +[Statement.setQueryTimestamp()]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/Statement.html#setQueryTimestamp-long- diff --git a/manual/core/reactive/README.md b/manual/core/reactive/README.md index 6073ac4bf98..d0182c4fbc2 100644 --- a/manual/core/reactive/README.md +++ b/manual/core/reactive/README.md @@ -367,18 +367,18 @@ Note that the driver already has a [built-in retry mechanism] that can transpare queries; the above example should be seen as a demonstration of application-level retries, when a more fine-grained control of what should be retried, and how, is required. -[CqlSession]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/CqlSession.html -[ReactiveSession]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/ResultSet.html -[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html -[ReactiveRow]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html -[Row]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/Row.html -[getColumnDefinitions]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#getColumnDefinitions-- -[getExecutionInfos]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#getExecutionInfos-- -[wasApplied]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#wasApplied-- -[ReactiveRow.getColumnDefinitions]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#getColumnDefinitions-- -[ReactiveRow.getExecutionInfo]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#getExecutionInfo-- -[ReactiveRow.wasApplied]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#wasApplied-- +[CqlSession]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/CqlSession.html +[ReactiveSession]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/dse/driver/api/core/cql/reactive/ReactiveSession.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/ResultSet.html +[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html +[ReactiveRow]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html +[Row]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/Row.html +[getColumnDefinitions]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#getColumnDefinitions-- +[getExecutionInfos]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#getExecutionInfos-- +[wasApplied]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html#wasApplied-- +[ReactiveRow.getColumnDefinitions]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#getColumnDefinitions-- +[ReactiveRow.getExecutionInfo]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#getExecutionInfo-- +[ReactiveRow.wasApplied]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/dse/driver/api/core/cql/reactive/ReactiveRow.html#wasApplied-- [built-in retry mechanism]: ../retries/ [request throttling]: ../throttling/ diff --git a/manual/core/reconnection/README.md b/manual/core/reconnection/README.md index c383b887fcc..b27dd19aa27 100644 --- a/manual/core/reconnection/README.md +++ b/manual/core/reconnection/README.md @@ -84,7 +84,7 @@ Note that the session is not accessible until it is fully ready: the `CqlSession call — or the future returned by `buildAsync()` — will not complete until the connection was established. -[ConstantReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/internal/core/connection/ConstantReconnectionPolicy.html -[DriverContext]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/context/DriverContext.html -[ExponentialReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/internal/core/connection/ExponentialReconnectionPolicy.html -[ReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/connection/ReconnectionPolicy.html \ No newline at end of file +[ConstantReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/internal/core/connection/ConstantReconnectionPolicy.html +[DriverContext]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/context/DriverContext.html +[ExponentialReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/internal/core/connection/ExponentialReconnectionPolicy.html +[ReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/connection/ReconnectionPolicy.html \ No newline at end of file diff --git a/manual/core/request_tracker/README.md b/manual/core/request_tracker/README.md index 4186139c0ba..0862654e53f 100644 --- a/manual/core/request_tracker/README.md +++ b/manual/core/request_tracker/README.md @@ -123,5 +123,5 @@ all FROM users WHERE user_id=? [v0=42] com.datastax.oss.driver.api.core.servererrors.InvalidQueryException: Undefined column name all ``` -[RequestTracker]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/tracker/RequestTracker.html -[SessionBuilder.addRequestTracker]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addRequestTracker-com.datastax.oss.driver.api.core.tracker.RequestTracker- \ No newline at end of file +[RequestTracker]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/tracker/RequestTracker.html +[SessionBuilder.addRequestTracker]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addRequestTracker-com.datastax.oss.driver.api.core.tracker.RequestTracker- \ No newline at end of file diff --git a/manual/core/retries/README.md b/manual/core/retries/README.md index 6e150eb77e4..cdd3a5740a2 100644 --- a/manual/core/retries/README.md +++ b/manual/core/retries/README.md @@ -231,21 +231,21 @@ configuration). Each request uses its declared profile's policy. If it doesn't declare any profile, or if the profile doesn't have a dedicated policy, then the default profile's policy is used. -[AllNodesFailedException]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/AllNodesFailedException.html -[ClosedConnectionException]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/connection/ClosedConnectionException.html -[DriverTimeoutException]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/DriverTimeoutException.html -[FunctionFailureException]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/servererrors/FunctionFailureException.html -[HeartbeatException]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/connection/HeartbeatException.html -[ProtocolError]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/servererrors/ProtocolError.html -[OverloadedException]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/servererrors/OverloadedException.html -[QueryValidationException]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/servererrors/QueryValidationException.html -[ReadFailureException]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/servererrors/ReadFailureException.html -[ReadTimeoutException]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/servererrors/ReadTimeoutException.html -[RetryDecision]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/retry/RetryDecision.html -[RetryPolicy]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/retry/RetryPolicy.html -[RetryVerdict]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/retry/RetryVerdict.html -[ServerError]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/servererrors/ServerError.html -[TruncateException]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/servererrors/TruncateException.html -[UnavailableException]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/servererrors/UnavailableException.html -[WriteFailureException]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/servererrors/WriteFailureException.html -[WriteTimeoutException]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/servererrors/WriteTimeoutException.html +[AllNodesFailedException]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/AllNodesFailedException.html +[ClosedConnectionException]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/connection/ClosedConnectionException.html +[DriverTimeoutException]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/DriverTimeoutException.html +[FunctionFailureException]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/servererrors/FunctionFailureException.html +[HeartbeatException]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/connection/HeartbeatException.html +[ProtocolError]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/servererrors/ProtocolError.html +[OverloadedException]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/servererrors/OverloadedException.html +[QueryValidationException]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/servererrors/QueryValidationException.html +[ReadFailureException]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/servererrors/ReadFailureException.html +[ReadTimeoutException]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/servererrors/ReadTimeoutException.html +[RetryDecision]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/retry/RetryDecision.html +[RetryPolicy]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/retry/RetryPolicy.html +[RetryVerdict]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/retry/RetryVerdict.html +[ServerError]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/servererrors/ServerError.html +[TruncateException]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/servererrors/TruncateException.html +[UnavailableException]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/servererrors/UnavailableException.html +[WriteFailureException]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/servererrors/WriteFailureException.html +[WriteTimeoutException]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/servererrors/WriteTimeoutException.html diff --git a/manual/core/speculative_execution/README.md b/manual/core/speculative_execution/README.md index cf6675e9dbf..53913a6eda7 100644 --- a/manual/core/speculative_execution/README.md +++ b/manual/core/speculative_execution/README.md @@ -250,4 +250,4 @@ profiles have the same configuration). Each request uses its declared profile's policy. If it doesn't declare any profile, or if the profile doesn't have a dedicated policy, then the default profile's policy is used. -[SpeculativeExecutionPolicy]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/specex/SpeculativeExecutionPolicy.html \ No newline at end of file +[SpeculativeExecutionPolicy]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/specex/SpeculativeExecutionPolicy.html \ No newline at end of file diff --git a/manual/core/ssl/README.md b/manual/core/ssl/README.md index 2e293d7e346..37396c6d4c0 100644 --- a/manual/core/ssl/README.md +++ b/manual/core/ssl/README.md @@ -204,6 +204,6 @@ the box, but with a bit of custom development it is fairly easy to add. See [dsClientToNode]: https://docs.datastax.com/en/cassandra/3.0/cassandra/configuration/secureSSLClientToNode.html [pickle]: http://thelastpickle.com/blog/2015/09/30/hardening-cassandra-step-by-step-part-1-server-to-server.html [JSSE system properties]: http://docs.oracle.com/javase/6/docs/technotes/guides/security/jsse/JSSERefGuide.html#Customization -[SessionBuilder.withSslEngineFactory]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSslEngineFactory-com.datastax.oss.driver.api.core.ssl.SslEngineFactory- -[SessionBuilder.withSslContext]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSslContext-javax.net.ssl.SSLContext- -[ProgrammaticSslEngineFactory]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/ssl/ProgrammaticSslEngineFactory.html +[SessionBuilder.withSslEngineFactory]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSslEngineFactory-com.datastax.oss.driver.api.core.ssl.SslEngineFactory- +[SessionBuilder.withSslContext]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withSslContext-javax.net.ssl.SSLContext- +[ProgrammaticSslEngineFactory]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/ssl/ProgrammaticSslEngineFactory.html diff --git a/manual/core/statements/README.md b/manual/core/statements/README.md index 08646b77609..f02806fb940 100644 --- a/manual/core/statements/README.md +++ b/manual/core/statements/README.md @@ -59,7 +59,7 @@ the [configuration](../configuration/). Namely, these are: idempotent flag, quer consistency levels and page size. We recommended the configuration approach whenever possible (you can create execution profiles to capture common combinations of those options). -[Statement]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/Statement.html -[StatementBuilder]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/StatementBuilder.html -[execute]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/Session.html#execute-com.datastax.oss.driver.api.core.cql.Statement- -[executeAsync]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/Session.html#executeAsync-com.datastax.oss.driver.api.core.cql.Statement- +[Statement]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/Statement.html +[StatementBuilder]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/StatementBuilder.html +[execute]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/session/Session.html#execute-com.datastax.oss.driver.api.core.cql.Statement- +[executeAsync]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/session/Session.html#executeAsync-com.datastax.oss.driver.api.core.cql.Statement- diff --git a/manual/core/statements/batch/README.md b/manual/core/statements/batch/README.md index 051a3a35df9..05e803770eb 100644 --- a/manual/core/statements/batch/README.md +++ b/manual/core/statements/batch/README.md @@ -61,8 +61,8 @@ In addition, simple statements with named parameters are currently not supported due to a [protocol limitation][CASSANDRA-10246] that will be fixed in a future version). If you try to execute such a batch, an `IllegalArgumentException` is thrown. -[BatchStatement]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/BatchStatement.html -[BatchStatement.newInstance()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/BatchStatement.html#newInstance-com.datastax.oss.driver.api.core.cql.BatchType- -[BatchStatement.builder()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/BatchStatement.html#builder-com.datastax.oss.driver.api.core.cql.BatchType- +[BatchStatement]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/BatchStatement.html +[BatchStatement.newInstance()]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/BatchStatement.html#newInstance-com.datastax.oss.driver.api.core.cql.BatchType- +[BatchStatement.builder()]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/BatchStatement.html#builder-com.datastax.oss.driver.api.core.cql.BatchType- [batch_dse]: http://docs.datastax.com/en/dse/6.7/cql/cql/cql_using/useBatch.html [CASSANDRA-10246]: https://issues.apache.org/jira/browse/CASSANDRA-10246 diff --git a/manual/core/statements/per_query_keyspace/README.md b/manual/core/statements/per_query_keyspace/README.md index 4100e864660..f9076b5b5b6 100644 --- a/manual/core/statements/per_query_keyspace/README.md +++ b/manual/core/statements/per_query_keyspace/README.md @@ -124,6 +124,6 @@ SimpleStatement statement = At some point in the future, when Cassandra 4 becomes prevalent and using a per-query keyspace is the norm, we'll probably deprecate `setRoutingKeyspace()`. -[token-aware routing]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- +[token-aware routing]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- [CASSANDRA-10145]: https://issues.apache.org/jira/browse/CASSANDRA-10145 \ No newline at end of file diff --git a/manual/core/statements/prepared/README.md b/manual/core/statements/prepared/README.md index 29ad525fc42..d5a4739c11b 100644 --- a/manual/core/statements/prepared/README.md +++ b/manual/core/statements/prepared/README.md @@ -330,10 +330,10 @@ With Cassandra 4 and [native protocol](../../native_protocol/) v5, this issue is new version with the response; the driver updates its local cache transparently, and the client can observe the new columns in the result set. -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[Session.prepare]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/CqlSession.html#prepare-com.datastax.oss.driver.api.core.cql.SimpleStatement- +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[Session.prepare]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/CqlSession.html#prepare-com.datastax.oss.driver.api.core.cql.SimpleStatement- [CASSANDRA-10786]: https://issues.apache.org/jira/browse/CASSANDRA-10786 [CASSANDRA-10813]: https://issues.apache.org/jira/browse/CASSANDRA-10813 [guava eviction]: https://github.com/google/guava/wiki/CachesExplained#reference-based-eviction -[PreparedStatement.bind]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/PreparedStatement.html#bind-java.lang.Object...- -[PreparedStatement.boundStatementBuilder]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/PreparedStatement.html#boundStatementBuilder-java.lang.Object...- +[PreparedStatement.bind]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/PreparedStatement.html#bind-java.lang.Object...- +[PreparedStatement.boundStatementBuilder]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/PreparedStatement.html#boundStatementBuilder-java.lang.Object...- diff --git a/manual/core/statements/simple/README.md b/manual/core/statements/simple/README.md index d4f62e0a207..df56698b4ee 100644 --- a/manual/core/statements/simple/README.md +++ b/manual/core/statements/simple/README.md @@ -182,6 +182,6 @@ session.execute( Or you could also use [prepared statements](../prepared/), which don't have this limitation since parameter types are known in advance. -[SimpleStatement]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/SimpleStatement.html -[SimpleStatement.newInstance()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/SimpleStatement.html#newInstance-java.lang.String- -[SimpleStatement.builder()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/SimpleStatement.html#builder-java.lang.String- +[SimpleStatement]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/SimpleStatement.html +[SimpleStatement.newInstance()]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/SimpleStatement.html#newInstance-java.lang.String- +[SimpleStatement.builder()]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/SimpleStatement.html#builder-java.lang.String- diff --git a/manual/core/temporal_types/README.md b/manual/core/temporal_types/README.md index 2bb0573ce90..2128f822694 100644 --- a/manual/core/temporal_types/README.md +++ b/manual/core/temporal_types/README.md @@ -146,7 +146,7 @@ System.out.println(dateTime.minus(CqlDuration.from("1h15s15ns"))); // prints "2018-10-03T22:59:44.999999985-07:00[America/Los_Angeles]" ``` -[CqlDuration]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/data/CqlDuration.html -[TypeCodecs.ZONED_TIMESTAMP_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#ZONED_TIMESTAMP_SYSTEM -[TypeCodecs.ZONED_TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#ZONED_TIMESTAMP_UTC -[TypeCodecs.zonedTimestampAt()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#zonedTimestampAt-java.time.ZoneId- \ No newline at end of file +[CqlDuration]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/data/CqlDuration.html +[TypeCodecs.ZONED_TIMESTAMP_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#ZONED_TIMESTAMP_SYSTEM +[TypeCodecs.ZONED_TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#ZONED_TIMESTAMP_UTC +[TypeCodecs.zonedTimestampAt()]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#zonedTimestampAt-java.time.ZoneId- \ No newline at end of file diff --git a/manual/core/throttling/README.md b/manual/core/throttling/README.md index f1496cbf176..0e1605dafb5 100644 --- a/manual/core/throttling/README.md +++ b/manual/core/throttling/README.md @@ -145,6 +145,6 @@ datastax-java-driver { If you enable `throttling.delay`, make sure to also check the associated extra options to correctly size the underlying histograms (`metrics.session.throttling.delay.*`). -[RequestThrottlingException]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/RequestThrottlingException.html -[AllNodesFailedException]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/AllNodesFailedException.html -[BusyConnectionException]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/connection/BusyConnectionException.html \ No newline at end of file +[RequestThrottlingException]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/RequestThrottlingException.html +[AllNodesFailedException]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/AllNodesFailedException.html +[BusyConnectionException]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/connection/BusyConnectionException.html \ No newline at end of file diff --git a/manual/core/tracing/README.md b/manual/core/tracing/README.md index 858b089ffbe..f3154600f9f 100644 --- a/manual/core/tracing/README.md +++ b/manual/core/tracing/README.md @@ -113,9 +113,9 @@ for (TraceEvent event : trace.getEvents()) { If you call `getQueryTrace()` for a statement that didn't have tracing enabled, an exception is thrown. -[ExecutionInfo]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html -[QueryTrace]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/QueryTrace.html -[Statement.setTracing()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/Statement.html#setTracing-boolean- -[StatementBuilder.setTracing()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/StatementBuilder.html#setTracing-- -[ExecutionInfo.getTracingId()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getTracingId-- -[ExecutionInfo.getQueryTrace()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getQueryTrace-- +[ExecutionInfo]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html +[QueryTrace]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/QueryTrace.html +[Statement.setTracing()]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/Statement.html#setTracing-boolean- +[StatementBuilder.setTracing()]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/StatementBuilder.html#setTracing-- +[ExecutionInfo.getTracingId()]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getTracingId-- +[ExecutionInfo.getQueryTrace()]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getQueryTrace-- diff --git a/manual/core/tuples/README.md b/manual/core/tuples/README.md index e5bda3947a6..69c2f24a46b 100644 --- a/manual/core/tuples/README.md +++ b/manual/core/tuples/README.md @@ -139,5 +139,5 @@ BoundStatement bs = [cql_doc]: https://docs.datastax.com/en/cql/3.3/cql/cql_reference/tupleType.html -[TupleType]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/TupleType.html -[TupleValue]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/data/TupleValue.html +[TupleType]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/type/TupleType.html +[TupleValue]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/data/TupleValue.html diff --git a/manual/core/udts/README.md b/manual/core/udts/README.md index 99c34f234c4..f45cf658b89 100644 --- a/manual/core/udts/README.md +++ b/manual/core/udts/README.md @@ -135,5 +135,5 @@ session.execute(bs); [cql_doc]: https://docs.datastax.com/en/cql/3.3/cql/cql_reference/cqlRefUDType.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/data/UdtValue.html -[UserDefinedType]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/UserDefinedType.html \ No newline at end of file +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/data/UdtValue.html +[UserDefinedType]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/type/UserDefinedType.html \ No newline at end of file diff --git a/manual/developer/common/concurrency/README.md b/manual/developer/common/concurrency/README.md index 36db9562032..a09d1c9fd63 100644 --- a/manual/developer/common/concurrency/README.md +++ b/manual/developer/common/concurrency/README.md @@ -101,8 +101,8 @@ public interface ExecutionInfo { When a public API method is blocking, this is generally clearly stated in its javadocs. -[`ExecutionInfo.getQueryTrace()`]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getQueryTrace-- -[`SyncCqlSession`]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/SyncCqlSession.html` +[`ExecutionInfo.getQueryTrace()`]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/ExecutionInfo.html#getQueryTrace-- +[`SyncCqlSession`]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/SyncCqlSession.html` `BlockingOperation` is a utility to check that those methods aren't called on I/O threads, which could introduce deadlocks. diff --git a/manual/mapper/config/kotlin/README.md b/manual/mapper/config/kotlin/README.md index 50809f8a7f2..4ee234ffa14 100644 --- a/manual/mapper/config/kotlin/README.md +++ b/manual/mapper/config/kotlin/README.md @@ -106,4 +106,4 @@ before compilation: [build.gradle]: https://github.com/DataStax-Examples/object-mapper-jvm/blob/master/kotlin/build.gradle [UserDao.kt]: https://github.com/DataStax-Examples/object-mapper-jvm/blob/master/kotlin/src/main/kotlin/com/datastax/examples/mapper/killrvideo/user/UserDao.kt -[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html +[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html diff --git a/manual/mapper/config/record/README.md b/manual/mapper/config/record/README.md index 8d6f9621b47..7466812fc9b 100644 --- a/manual/mapper/config/record/README.md +++ b/manual/mapper/config/record/README.md @@ -27,7 +27,7 @@ You need to build with Java 14, and pass the `--enable-preview` flag to both the runtime JVM. See [pom.xml] in the example. -[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html +[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html [DataStax-Examples/object-mapper-jvm/record]: https://github.com/DataStax-Examples/object-mapper-jvm/tree/master/record [pom.xml]: https://github.com/DataStax-Examples/object-mapper-jvm/blob/master/record/pom.xml diff --git a/manual/mapper/config/scala/README.md b/manual/mapper/config/scala/README.md index e0d1970c209..b043bd784ad 100644 --- a/manual/mapper/config/scala/README.md +++ b/manual/mapper/config/scala/README.md @@ -54,4 +54,4 @@ mapper builder. [DataStax-Examples/object-mapper-jvm/scala]: https://github.com/DataStax-Examples/object-mapper-jvm/tree/master/scala [build.sbt]: https://github.com/DataStax-Examples/object-mapper-jvm/blob/master/scala/build.sbt -[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html +[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html diff --git a/manual/mapper/daos/README.md b/manual/mapper/daos/README.md index c55a10cc3ba..e76dde55314 100644 --- a/manual/mapper/daos/README.md +++ b/manual/mapper/daos/README.md @@ -148,8 +148,8 @@ In this case, any annotations declared in `Dao1` would be chosen over `Dao2`. To control how the hierarchy is scanned, annotate interfaces with [@HierarchyScanStrategy]. -[@Dao]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/Dao.html -[@DaoFactory]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.html -[@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html -[@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html +[@Dao]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/annotations/Dao.html +[@DaoFactory]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.html +[@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html +[@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html [Entity Inheritance]: ../entities/#inheritance diff --git a/manual/mapper/daos/custom_types/README.md b/manual/mapper/daos/custom_types/README.md index ed5fd69a535..75e9733cb2f 100644 --- a/manual/mapper/daos/custom_types/README.md +++ b/manual/mapper/daos/custom_types/README.md @@ -236,8 +236,8 @@ flag: With this configuration, if a DAO method declares a non built-in return type, it will be surfaced as a compiler error. -[EntityHelper]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/entity/EntityHelper.html -[GenericType]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/reflect/GenericType.html -[MapperContext]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/MapperContext.html -[MapperResultProducer]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/result/MapperResultProducer.html -[MapperResultProducerService]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/result/MapperResultProducerService.html +[EntityHelper]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/entity/EntityHelper.html +[GenericType]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/type/reflect/GenericType.html +[MapperContext]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/MapperContext.html +[MapperResultProducer]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/result/MapperResultProducer.html +[MapperResultProducerService]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/result/MapperResultProducerService.html diff --git a/manual/mapper/daos/delete/README.md b/manual/mapper/daos/delete/README.md index 6d4ad9854cc..10f4ad249d2 100644 --- a/manual/mapper/daos/delete/README.md +++ b/manual/mapper/daos/delete/README.md @@ -151,15 +151,15 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the [naming strategy](../../entities/#naming-strategy)). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html -[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html -[@Delete]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/Delete.html -[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/ResultSet.html -[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html +[@Delete]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/annotations/Delete.html +[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/ResultSet.html +[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html diff --git a/manual/mapper/daos/getentity/README.md b/manual/mapper/daos/getentity/README.md index 6c37bb1169b..cea11e34d17 100644 --- a/manual/mapper/daos/getentity/README.md +++ b/manual/mapper/daos/getentity/README.md @@ -130,15 +130,15 @@ If the return type doesn't match the parameter type (for example [PagingIterable [AsyncResultSet]), the mapper processor will issue a compile-time error. -[@GetEntity]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html -[GettableByName]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/data/GettableByName.html -[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/PagingIterable.html -[PagingIterable.spliterator]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/PagingIterable.html#spliterator-- -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/ResultSet.html -[Row]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/Row.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/data/UdtValue.html +[@GetEntity]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[GettableByName]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/data/GettableByName.html +[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/PagingIterable.html +[PagingIterable.spliterator]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/PagingIterable.html#spliterator-- +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/ResultSet.html +[Row]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/Row.html +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/data/UdtValue.html [Stream]: https://docs.oracle.com/javase/8/docs/api/java/util/stream/Stream.html diff --git a/manual/mapper/daos/increment/README.md b/manual/mapper/daos/increment/README.md index 1c2c1f24d3c..c8e90a51627 100644 --- a/manual/mapper/daos/increment/README.md +++ b/manual/mapper/daos/increment/README.md @@ -75,12 +75,12 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the naming convention). -[@Increment]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/Increment.html -[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html -[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html -[@CqlName]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/CqlName.html +[@Increment]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/annotations/Increment.html +[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html +[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html +[@CqlName]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/annotations/CqlName.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html diff --git a/manual/mapper/daos/insert/README.md b/manual/mapper/daos/insert/README.md index 2d4a08fb694..bfd95229e1b 100644 --- a/manual/mapper/daos/insert/README.md +++ b/manual/mapper/daos/insert/README.md @@ -108,13 +108,13 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the [naming strategy](../../entities/#naming-strategy)). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@Insert]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/Insert.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/ResultSet.html -[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- -[ResultSet#getExecutionInfo()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/ResultSet.html#getExecutionInfo-- -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@Insert]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/annotations/Insert.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/ResultSet.html +[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- +[ResultSet#getExecutionInfo()]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/ResultSet.html#getExecutionInfo-- +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html diff --git a/manual/mapper/daos/null_saving/README.md b/manual/mapper/daos/null_saving/README.md index aca631062db..e2858f43b4d 100644 --- a/manual/mapper/daos/null_saving/README.md +++ b/manual/mapper/daos/null_saving/README.md @@ -93,10 +93,10 @@ public interface UserDao extends InventoryDao { } ``` -[@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[MapperException]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/MapperException.html -[DO_NOT_SET]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#DO_NOT_SET -[SET_TO_NULL]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#SET_TO_NULL +[@DefaultNullSavingStrategy]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/annotations/DefaultNullSavingStrategy.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[MapperException]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/MapperException.html +[DO_NOT_SET]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#DO_NOT_SET +[SET_TO_NULL]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/entity/saving/NullSavingStrategy.html#SET_TO_NULL [CASSANDRA-7304]: https://issues.apache.org/jira/browse/CASSANDRA-7304 diff --git a/manual/mapper/daos/query/README.md b/manual/mapper/daos/query/README.md index d45063eb74c..0d4293b5f15 100644 --- a/manual/mapper/daos/query/README.md +++ b/manual/mapper/daos/query/README.md @@ -113,18 +113,18 @@ Then: query succeeds or not depends on whether the session that the mapper was built with has a [default keyspace]. -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@Query]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/Query.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/ResultSet.html -[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- -[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/PagingIterable.html -[PagingIterable.spliterator]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/PagingIterable.html#spliterator-- -[Row]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/Row.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html -[MappedReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/mapper/reactive/MappedReactiveResultSet.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@Query]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/annotations/Query.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/ResultSet.html +[ResultSet#wasApplied()]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/ResultSet.html#wasApplied-- +[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/PagingIterable.html +[PagingIterable.spliterator]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/PagingIterable.html#spliterator-- +[Row]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/Row.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html +[MappedReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/dse/driver/api/mapper/reactive/MappedReactiveResultSet.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html diff --git a/manual/mapper/daos/queryprovider/README.md b/manual/mapper/daos/queryprovider/README.md index e4958928c59..7c750bcce1f 100644 --- a/manual/mapper/daos/queryprovider/README.md +++ b/manual/mapper/daos/queryprovider/README.md @@ -137,11 +137,11 @@ Here is the full implementation: the desired [PagingIterable][PagingIterable]. -[@QueryProvider]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html -[providerClass]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerClass-- -[entityHelpers]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#entityHelpers-- -[providerMethod]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerMethod-- -[MapperContext]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/MapperContext.html -[EntityHelper]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/EntityHelper.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/ResultSet.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/PagingIterable.html +[@QueryProvider]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html +[providerClass]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerClass-- +[entityHelpers]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#entityHelpers-- +[providerMethod]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/annotations/QueryProvider.html#providerMethod-- +[MapperContext]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/MapperContext.html +[EntityHelper]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/EntityHelper.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/ResultSet.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/PagingIterable.html diff --git a/manual/mapper/daos/select/README.md b/manual/mapper/daos/select/README.md index 857e176552d..9d5357ad546 100644 --- a/manual/mapper/daos/select/README.md +++ b/manual/mapper/daos/select/README.md @@ -160,20 +160,20 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the [naming strategy](../../entities/#naming-strategy)). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html -[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html -[@Select]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/Select.html -[allowFiltering()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/Select.html#allowFiltering-- -[customWhereClause()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/Select.html#customWhereClause-- -[groupBy()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/Select.html#groupBy-- -[limit()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/Select.html#limit-- -[orderBy()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/Select.html#orderBy-- -[perPartitionLimit()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/Select.html#perPartitionLimit-- -[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html -[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/PagingIterable.html -[PagingIterable.spliterator]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/PagingIterable.html#spliterator-- -[MappedReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/mapper/reactive/MappedReactiveResultSet.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html +[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html +[@Select]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/annotations/Select.html +[allowFiltering()]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/annotations/Select.html#allowFiltering-- +[customWhereClause()]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/annotations/Select.html#customWhereClause-- +[groupBy()]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/annotations/Select.html#groupBy-- +[limit()]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/annotations/Select.html#limit-- +[orderBy()]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/annotations/Select.html#orderBy-- +[perPartitionLimit()]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/annotations/Select.html#perPartitionLimit-- +[MappedAsyncPagingIterable]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/MappedAsyncPagingIterable.html +[PagingIterable]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/PagingIterable.html +[PagingIterable.spliterator]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/PagingIterable.html#spliterator-- +[MappedReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/dse/driver/api/mapper/reactive/MappedReactiveResultSet.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html diff --git a/manual/mapper/daos/setentity/README.md b/manual/mapper/daos/setentity/README.md index de6701ada50..cedb6e3dc45 100644 --- a/manual/mapper/daos/setentity/README.md +++ b/manual/mapper/daos/setentity/README.md @@ -112,8 +112,8 @@ BoundStatement bind(Product product, BoundStatement statement); If you use a void method with [BoundStatement], the mapper processor will issue a compile-time warning. -[@SetEntity]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/SetEntity.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[BoundStatementBuilder]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/BoundStatementBuilder.html -[SettableByName]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/data/SettableByName.html -[UdtValue]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/data/UdtValue.html +[@SetEntity]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/annotations/SetEntity.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[BoundStatementBuilder]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/BoundStatementBuilder.html +[SettableByName]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/data/SettableByName.html +[UdtValue]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/data/UdtValue.html diff --git a/manual/mapper/daos/statement_attributes/README.md b/manual/mapper/daos/statement_attributes/README.md index 141b619dd7f..aa11e065b4f 100644 --- a/manual/mapper/daos/statement_attributes/README.md +++ b/manual/mapper/daos/statement_attributes/README.md @@ -60,4 +60,4 @@ Product product = dao.findById(1, builder -> builder.setConsistencyLevel(DefaultConsistencyLevel.QUORUM)); ``` -[@StatementAttributes]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/StatementAttributes.html \ No newline at end of file +[@StatementAttributes]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/annotations/StatementAttributes.html \ No newline at end of file diff --git a/manual/mapper/daos/update/README.md b/manual/mapper/daos/update/README.md index 610bc9fb4d7..6a14a4a6140 100644 --- a/manual/mapper/daos/update/README.md +++ b/manual/mapper/daos/update/README.md @@ -143,13 +143,13 @@ If a table was specified when creating the DAO, then the generated query targets Otherwise, it uses the default table name for the entity (which is determined by the name of the entity class and the naming convention). -[default keyspace]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- -[@Update]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/Update.html +[default keyspace]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withKeyspace-com.datastax.oss.driver.api.core.CqlIdentifier- +[@Update]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/annotations/Update.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html [Boolean]: https://docs.oracle.com/javase/8/docs/api/index.html?java/lang/Boolean.html [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html [CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html -[ResultSet]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/ResultSet.html -[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/cql/BoundStatement.html -[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html +[ResultSet]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/ResultSet.html +[BoundStatement]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/BoundStatement.html +[ReactiveResultSet]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/dse/driver/api/core/cql/reactive/ReactiveResultSet.html diff --git a/manual/mapper/entities/README.md b/manual/mapper/entities/README.md index 72edc82ea66..b857203ef32 100644 --- a/manual/mapper/entities/README.md +++ b/manual/mapper/entities/README.md @@ -555,22 +555,22 @@ the same level. To control how the class hierarchy is scanned, annotate classes with [@HierarchyScanStrategy]. -[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html -[@CqlName]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/CqlName.html -[@Dao]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/Dao.html -[@Entity]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/Entity.html -[NameConverter]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/entity/naming/NameConverter.html -[NamingConvention]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/entity/naming/NamingConvention.html -[@NamingStrategy]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/NamingStrategy.html -[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html -[@Computed]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/Computed.html -[@Select]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/Select.html -[@Insert]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/Insert.html -[@Update]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/Update.html -[@GetEntity]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html -[@Query]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/Query.html +[@ClusteringColumn]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/annotations/ClusteringColumn.html +[@CqlName]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/annotations/CqlName.html +[@Dao]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/annotations/Dao.html +[@Entity]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/annotations/Entity.html +[NameConverter]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/entity/naming/NameConverter.html +[NamingConvention]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/entity/naming/NamingConvention.html +[@NamingStrategy]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/annotations/NamingStrategy.html +[@PartitionKey]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/annotations/PartitionKey.html +[@Computed]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/annotations/Computed.html +[@Select]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/annotations/Select.html +[@Insert]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/annotations/Insert.html +[@Update]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/annotations/Update.html +[@GetEntity]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/annotations/GetEntity.html +[@Query]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/annotations/Query.html [aliases]: http://cassandra.apache.org/doc/latest/cql/dml.html?#aliases -[@Transient]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/Transient.html -[@TransientProperties]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/TransientProperties.html -[@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html -[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html +[@Transient]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/annotations/Transient.html +[@TransientProperties]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/annotations/TransientProperties.html +[@HierarchyScanStrategy]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/annotations/HierarchyScanStrategy.html +[@PropertyStrategy]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/annotations/PropertyStrategy.html diff --git a/manual/mapper/mapper/README.md b/manual/mapper/mapper/README.md index 0a81816f9a3..18be59df1c4 100644 --- a/manual/mapper/mapper/README.md +++ b/manual/mapper/mapper/README.md @@ -230,8 +230,8 @@ InventoryMapper inventoryMapper = new InventoryMapperBuilder(session) You can also permanently disable validation of an individual entity by annotating it with `@SchemaHint(targetElement = NONE)`. -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/CqlIdentifier.html -[@DaoFactory]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.html -[@DaoKeyspace]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/DaoKeyspace.html -[@DaoTable]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/DaoTable.html -[@Mapper]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/mapper/annotations/Mapper.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/CqlIdentifier.html +[@DaoFactory]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/annotations/DaoFactory.html +[@DaoKeyspace]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/annotations/DaoKeyspace.html +[@DaoTable]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/annotations/DaoTable.html +[@Mapper]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/annotations/Mapper.html diff --git a/manual/osgi/README.md b/manual/osgi/README.md index 966d89c12a6..88254334f25 100644 --- a/manual/osgi/README.md +++ b/manual/osgi/README.md @@ -138,7 +138,7 @@ starting the driver: [driver configuration]: ../core/configuration [OSGi]:https://www.osgi.org [JNR]: https://github.com/jnr/jnr-posix -[withClassLoader()]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withClassLoader-java.lang.ClassLoader- +[withClassLoader()]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withClassLoader-java.lang.ClassLoader- [JAVA-1127]:https://datastax-oss.atlassian.net/browse/JAVA-1127 -[DriverConfigLoader.fromDefaults(ClassLoader)]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromDefaults-java.lang.ClassLoader- -[DriverConfigLoader.programmaticBuilder(ClassLoader)]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#programmaticBuilder-java.lang.ClassLoader- +[DriverConfigLoader.fromDefaults(ClassLoader)]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#fromDefaults-java.lang.ClassLoader- +[DriverConfigLoader.programmaticBuilder(ClassLoader)]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/config/DriverConfigLoader.html#programmaticBuilder-java.lang.ClassLoader- diff --git a/manual/query_builder/README.md b/manual/query_builder/README.md index 4677fb84145..b9ea6a36205 100644 --- a/manual/query_builder/README.md +++ b/manual/query_builder/README.md @@ -212,8 +212,8 @@ For a complete tour of the API, browse the child pages in this manual: * [Terms](term/) * [Idempotence](idempotence/) -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html -[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/CqlIdentifier.html -[DseQueryBuilder]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/querybuilder/DseQueryBuilder.html -[DseSchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/dse/driver/api/querybuilder/DseSchemaBuilder.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/CqlIdentifier.html +[DseQueryBuilder]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/dse/driver/api/querybuilder/DseQueryBuilder.html +[DseSchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/dse/driver/api/querybuilder/DseSchemaBuilder.html diff --git a/manual/query_builder/condition/README.md b/manual/query_builder/condition/README.md index 01897774c85..0530b33d5bc 100644 --- a/manual/query_builder/condition/README.md +++ b/manual/query_builder/condition/README.md @@ -132,4 +132,4 @@ It is mutually exclusive with column conditions: if you previously specified col the statement, they will be ignored; conversely, adding a column condition cancels a previous IF EXISTS clause. -[Condition]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/querybuilder/condition/Condition.html +[Condition]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/querybuilder/condition/Condition.html diff --git a/manual/query_builder/delete/README.md b/manual/query_builder/delete/README.md index 2aff86a6825..031291c311f 100644 --- a/manual/query_builder/delete/README.md +++ b/manual/query_builder/delete/README.md @@ -141,5 +141,5 @@ deleteFrom("user") Conditions are a common feature used by UPDATE and DELETE, so they have a [dedicated page](../condition) in this manual. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[Selector]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/querybuilder/select/Selector.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[Selector]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/querybuilder/select/Selector.html diff --git a/manual/query_builder/insert/README.md b/manual/query_builder/insert/README.md index 269afba7437..ede99602af0 100644 --- a/manual/query_builder/insert/README.md +++ b/manual/query_builder/insert/README.md @@ -114,4 +114,4 @@ is executed. This is distinctly different than setting the value to null. Passin this method will only remove the USING TTL clause from the query, which will not alter the TTL (if one is set) in Cassandra. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html \ No newline at end of file +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html \ No newline at end of file diff --git a/manual/query_builder/relation/README.md b/manual/query_builder/relation/README.md index c4d4990affa..3c72e28cbee 100644 --- a/manual/query_builder/relation/README.md +++ b/manual/query_builder/relation/README.md @@ -201,5 +201,5 @@ This should be used with caution, as it's possible to generate invalid CQL that execution time; on the other hand, it can be used as a workaround to handle new CQL features that are not yet covered by the query builder. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[Relation]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/querybuilder/relation/Relation.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[Relation]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/querybuilder/relation/Relation.html diff --git a/manual/query_builder/schema/README.md b/manual/query_builder/schema/README.md index de6d4bacd6c..e4021c3068f 100644 --- a/manual/query_builder/schema/README.md +++ b/manual/query_builder/schema/README.md @@ -44,4 +44,4 @@ element type: * [function](function/) * [aggregate](aggregate/) -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/aggregate/README.md b/manual/query_builder/schema/aggregate/README.md index fbdf36147e3..42f1952a105 100644 --- a/manual/query_builder/schema/aggregate/README.md +++ b/manual/query_builder/schema/aggregate/README.md @@ -76,4 +76,4 @@ dropAggregate("average").ifExists(); // DROP AGGREGATE IF EXISTS average ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/function/README.md b/manual/query_builder/schema/function/README.md index c77f6431b73..7d02f0f8349 100644 --- a/manual/query_builder/schema/function/README.md +++ b/manual/query_builder/schema/function/README.md @@ -92,4 +92,4 @@ dropFunction("log").ifExists(); // DROP FUNCTION IF EXISTS log ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/index/README.md b/manual/query_builder/schema/index/README.md index 9c590c1e79e..8541831c1f2 100644 --- a/manual/query_builder/schema/index/README.md +++ b/manual/query_builder/schema/index/README.md @@ -99,4 +99,4 @@ dropIndex("my_idx").ifExists(); // DROP INDEX IF EXISTS my_idx ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/keyspace/README.md b/manual/query_builder/schema/keyspace/README.md index a07af3479f4..25e165f32c1 100644 --- a/manual/query_builder/schema/keyspace/README.md +++ b/manual/query_builder/schema/keyspace/README.md @@ -83,6 +83,6 @@ dropKeyspace("cycling").ifExists(); // DROP KEYSPACE IF EXISTS cycling ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/schema/materialized_view/README.md b/manual/query_builder/schema/materialized_view/README.md index 55c9cc41c07..7bcdda0bd3f 100644 --- a/manual/query_builder/schema/materialized_view/README.md +++ b/manual/query_builder/schema/materialized_view/README.md @@ -85,5 +85,5 @@ dropTable("cyclist_by_age").ifExists(); // DROP MATERIALIZED VIEW IF EXISTS cyclist_by_age ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html -[RelationStructure]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/querybuilder/schema/RelationStructure.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[RelationStructure]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/querybuilder/schema/RelationStructure.html diff --git a/manual/query_builder/schema/table/README.md b/manual/query_builder/schema/table/README.md index a3000ee70db..8a68d676851 100644 --- a/manual/query_builder/schema/table/README.md +++ b/manual/query_builder/schema/table/README.md @@ -107,6 +107,6 @@ dropTable("cyclist_name").ifExists(); // DROP TABLE IF EXISTS cyclist_name ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html -[CreateTableWithOptions]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/querybuilder/schema/CreateTableWithOptions.html -[AlterTableWithOptions]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/querybuilder/schema/AlterTableWithOptions.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[CreateTableWithOptions]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/querybuilder/schema/CreateTableWithOptions.html +[AlterTableWithOptions]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/querybuilder/schema/AlterTableWithOptions.html diff --git a/manual/query_builder/schema/type/README.md b/manual/query_builder/schema/type/README.md index c5302843f7d..e474dc29419 100644 --- a/manual/query_builder/schema/type/README.md +++ b/manual/query_builder/schema/type/README.md @@ -88,4 +88,4 @@ dropTable("address").ifExists(); // DROP TYPE IF EXISTS address ``` -[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html +[SchemaBuilder]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/querybuilder/SchemaBuilder.html diff --git a/manual/query_builder/select/README.md b/manual/query_builder/select/README.md index 42fd0410bcd..19f0085508a 100644 --- a/manual/query_builder/select/README.md +++ b/manual/query_builder/select/README.md @@ -391,5 +391,5 @@ selectFrom("user").all().allowFiltering(); // SELECT * FROM user ALLOW FILTERING ``` -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[Selector]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/querybuilder/select/Selector.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[Selector]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/querybuilder/select/Selector.html diff --git a/manual/query_builder/term/README.md b/manual/query_builder/term/README.md index 7fb94e0f31f..214dedb3274 100644 --- a/manual/query_builder/term/README.md +++ b/manual/query_builder/term/README.md @@ -105,5 +105,5 @@ This should be used with caution, as it's possible to generate invalid CQL that execution time; on the other hand, it can be used as a workaround to handle new CQL features that are not yet covered by the query builder. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html \ No newline at end of file +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html \ No newline at end of file diff --git a/manual/query_builder/truncate/README.md b/manual/query_builder/truncate/README.md index d877433dd2e..9b37160c0c9 100644 --- a/manual/query_builder/truncate/README.md +++ b/manual/query_builder/truncate/README.md @@ -17,4 +17,4 @@ Truncate truncate2 = truncate(CqlIdentifier.fromCql("mytable")); Note that, at this stage, the query is ready to build. After creating a TRUNCATE query it does not take any values. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html diff --git a/manual/query_builder/update/README.md b/manual/query_builder/update/README.md index 1161d093bc4..d85f71f11cc 100644 --- a/manual/query_builder/update/README.md +++ b/manual/query_builder/update/README.md @@ -251,5 +251,5 @@ update("foo") Conditions are a common feature used by UPDATE and DELETE, so they have a [dedicated page](../condition) in this manual. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[Assignment]: https://docs.datastax.com/en/drivers/java/4.14/com/datastax/oss/driver/api/querybuilder/update/Assignment.html +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html +[Assignment]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/querybuilder/update/Assignment.html diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 60d66d06584..bfa72c7fa33 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.16.1-SNAPSHOT + 4.17.0-SNAPSHOT java-driver-mapper-processor DataStax Java driver for Apache Cassandra(R) - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index b98fd5028c1..104eb50bcde 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.16.1-SNAPSHOT + 4.17.0-SNAPSHOT java-driver-mapper-runtime bundle diff --git a/metrics/micrometer/pom.xml b/metrics/micrometer/pom.xml index ef4662d38ca..53ff1746e05 100644 --- a/metrics/micrometer/pom.xml +++ b/metrics/micrometer/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.16.1-SNAPSHOT + 4.17.0-SNAPSHOT ../../ java-driver-metrics-micrometer diff --git a/metrics/microprofile/pom.xml b/metrics/microprofile/pom.xml index c038a1567c5..0096fbc7cd9 100644 --- a/metrics/microprofile/pom.xml +++ b/metrics/microprofile/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.16.1-SNAPSHOT + 4.17.0-SNAPSHOT ../../ java-driver-metrics-microprofile diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml index cfa28c25dba..1bce8e362a5 100644 --- a/osgi-tests/pom.xml +++ b/osgi-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.16.1-SNAPSHOT + 4.17.0-SNAPSHOT java-driver-osgi-tests jar diff --git a/pom.xml b/pom.xml index eae8a54ac55..83b9122c2c2 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ 4.0.0 com.datastax.oss java-driver-parent - 4.16.1-SNAPSHOT + 4.17.0-SNAPSHOT pom DataStax Java driver for Apache Cassandra(R) A driver for Apache Cassandra(R) 2.1+ that works exclusively with the Cassandra Query Language version 3 (CQL3) and Cassandra's native protocol versions 3 and above. diff --git a/query-builder/pom.xml b/query-builder/pom.xml index 41395bda871..8a45cdf37c7 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.16.1-SNAPSHOT + 4.17.0-SNAPSHOT java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index 4c67bc35a1b..692285142ed 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.16.1-SNAPSHOT + 4.17.0-SNAPSHOT java-driver-test-infra bundle From c3f85ee7ba20f57a05ecd6d48a4aeada82055895 Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Wed, 12 Jul 2023 17:34:30 -0500 Subject: [PATCH 847/979] [maven-release-plugin] prepare release 4.17.0 --- bom/pom.xml | 18 +++++++++--------- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- metrics/micrometer/pom.xml | 2 +- metrics/microprofile/pom.xml | 2 +- osgi-tests/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 14 files changed, 23 insertions(+), 23 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index 547658aea3b..5caea7d1b80 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.17.0-SNAPSHOT + 4.17.0 java-driver-bom pom @@ -31,42 +31,42 @@ com.datastax.oss java-driver-core - 4.17.0-SNAPSHOT + 4.17.0 com.datastax.oss java-driver-core-shaded - 4.17.0-SNAPSHOT + 4.17.0 com.datastax.oss java-driver-mapper-processor - 4.17.0-SNAPSHOT + 4.17.0 com.datastax.oss java-driver-mapper-runtime - 4.17.0-SNAPSHOT + 4.17.0 com.datastax.oss java-driver-query-builder - 4.17.0-SNAPSHOT + 4.17.0 com.datastax.oss java-driver-test-infra - 4.17.0-SNAPSHOT + 4.17.0 com.datastax.oss java-driver-metrics-micrometer - 4.17.0-SNAPSHOT + 4.17.0 com.datastax.oss java-driver-metrics-microprofile - 4.17.0-SNAPSHOT + 4.17.0 com.datastax.oss diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index bfeb0234b14..148d9626340 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.17.0-SNAPSHOT + 4.17.0 java-driver-core-shaded DataStax Java driver for Apache Cassandra(R) - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index e29d1e95325..e16c4cab79a 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.17.0-SNAPSHOT + 4.17.0 java-driver-core bundle diff --git a/distribution/pom.xml b/distribution/pom.xml index c478f049dcf..6287bbc2f2f 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.17.0-SNAPSHOT + 4.17.0 java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index 8af4d4c9a6d..de94f86bfd5 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.17.0-SNAPSHOT + 4.17.0 java-driver-examples DataStax Java driver for Apache Cassandra(R) - examples. diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 45cc458a586..1b58648ac49 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.17.0-SNAPSHOT + 4.17.0 java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index bfa72c7fa33..6c9b632608d 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.17.0-SNAPSHOT + 4.17.0 java-driver-mapper-processor DataStax Java driver for Apache Cassandra(R) - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index 104eb50bcde..ac1b3c69c8c 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.17.0-SNAPSHOT + 4.17.0 java-driver-mapper-runtime bundle diff --git a/metrics/micrometer/pom.xml b/metrics/micrometer/pom.xml index 53ff1746e05..d0e4f4c9916 100644 --- a/metrics/micrometer/pom.xml +++ b/metrics/micrometer/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.17.0-SNAPSHOT + 4.17.0 ../../ java-driver-metrics-micrometer diff --git a/metrics/microprofile/pom.xml b/metrics/microprofile/pom.xml index 0096fbc7cd9..939ab981358 100644 --- a/metrics/microprofile/pom.xml +++ b/metrics/microprofile/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.17.0-SNAPSHOT + 4.17.0 ../../ java-driver-metrics-microprofile diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml index 1bce8e362a5..4020913cacb 100644 --- a/osgi-tests/pom.xml +++ b/osgi-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.17.0-SNAPSHOT + 4.17.0 java-driver-osgi-tests jar diff --git a/pom.xml b/pom.xml index 83b9122c2c2..c7eb22a8dee 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ 4.0.0 com.datastax.oss java-driver-parent - 4.17.0-SNAPSHOT + 4.17.0 pom DataStax Java driver for Apache Cassandra(R) A driver for Apache Cassandra(R) 2.1+ that works exclusively with the Cassandra Query Language version 3 (CQL3) and Cassandra's native protocol versions 3 and above. @@ -1006,7 +1006,7 @@ height="0" width="0" style="display:none;visibility:hidden"> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - HEAD + 4.17.0 diff --git a/query-builder/pom.xml b/query-builder/pom.xml index 8a45cdf37c7..3d999ac0def 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.17.0-SNAPSHOT + 4.17.0 java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index 692285142ed..6b517768527 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.17.0-SNAPSHOT + 4.17.0 java-driver-test-infra bundle From 8101fbf9ec91d952e90022606090a4be9f24aace Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Wed, 12 Jul 2023 17:34:38 -0500 Subject: [PATCH 848/979] [maven-release-plugin] prepare for next development iteration --- bom/pom.xml | 18 +++++++++--------- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- metrics/micrometer/pom.xml | 2 +- metrics/microprofile/pom.xml | 2 +- osgi-tests/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 14 files changed, 23 insertions(+), 23 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index 5caea7d1b80..a60b9903fdc 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.17.0 + 4.17.1-SNAPSHOT java-driver-bom pom @@ -31,42 +31,42 @@ com.datastax.oss java-driver-core - 4.17.0 + 4.17.1-SNAPSHOT com.datastax.oss java-driver-core-shaded - 4.17.0 + 4.17.1-SNAPSHOT com.datastax.oss java-driver-mapper-processor - 4.17.0 + 4.17.1-SNAPSHOT com.datastax.oss java-driver-mapper-runtime - 4.17.0 + 4.17.1-SNAPSHOT com.datastax.oss java-driver-query-builder - 4.17.0 + 4.17.1-SNAPSHOT com.datastax.oss java-driver-test-infra - 4.17.0 + 4.17.1-SNAPSHOT com.datastax.oss java-driver-metrics-micrometer - 4.17.0 + 4.17.1-SNAPSHOT com.datastax.oss java-driver-metrics-microprofile - 4.17.0 + 4.17.1-SNAPSHOT com.datastax.oss diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 148d9626340..5d0c902f31b 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.17.0 + 4.17.1-SNAPSHOT java-driver-core-shaded DataStax Java driver for Apache Cassandra(R) - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index e16c4cab79a..c085bd1bcf0 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.17.0 + 4.17.1-SNAPSHOT java-driver-core bundle diff --git a/distribution/pom.xml b/distribution/pom.xml index 6287bbc2f2f..a1f857ac0f4 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.17.0 + 4.17.1-SNAPSHOT java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index de94f86bfd5..00d07f29c16 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ java-driver-parent com.datastax.oss - 4.17.0 + 4.17.1-SNAPSHOT java-driver-examples DataStax Java driver for Apache Cassandra(R) - examples. diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 1b58648ac49..30f3207d06c 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.17.0 + 4.17.1-SNAPSHOT java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 6c9b632608d..70a319b7775 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.17.0 + 4.17.1-SNAPSHOT java-driver-mapper-processor DataStax Java driver for Apache Cassandra(R) - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index ac1b3c69c8c..e578cc4959f 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.17.0 + 4.17.1-SNAPSHOT java-driver-mapper-runtime bundle diff --git a/metrics/micrometer/pom.xml b/metrics/micrometer/pom.xml index d0e4f4c9916..e75c14e8c7e 100644 --- a/metrics/micrometer/pom.xml +++ b/metrics/micrometer/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.17.0 + 4.17.1-SNAPSHOT ../../ java-driver-metrics-micrometer diff --git a/metrics/microprofile/pom.xml b/metrics/microprofile/pom.xml index 939ab981358..af6f3b88616 100644 --- a/metrics/microprofile/pom.xml +++ b/metrics/microprofile/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.17.0 + 4.17.1-SNAPSHOT ../../ java-driver-metrics-microprofile diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml index 4020913cacb..c80bca37e1d 100644 --- a/osgi-tests/pom.xml +++ b/osgi-tests/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.17.0 + 4.17.1-SNAPSHOT java-driver-osgi-tests jar diff --git a/pom.xml b/pom.xml index c7eb22a8dee..0c20405b68e 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ 4.0.0 com.datastax.oss java-driver-parent - 4.17.0 + 4.17.1-SNAPSHOT pom DataStax Java driver for Apache Cassandra(R) A driver for Apache Cassandra(R) 2.1+ that works exclusively with the Cassandra Query Language version 3 (CQL3) and Cassandra's native protocol versions 3 and above. @@ -1006,7 +1006,7 @@ height="0" width="0" style="display:none;visibility:hidden"> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - 4.17.0 + HEAD diff --git a/query-builder/pom.xml b/query-builder/pom.xml index 3d999ac0def..d5b53451712 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.17.0 + 4.17.1-SNAPSHOT java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index 6b517768527..474a915ffa1 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ com.datastax.oss java-driver-parent - 4.17.0 + 4.17.1-SNAPSHOT java-driver-test-infra bundle From 9d891b120069fd2029c4743c14bc7df248e2e14b Mon Sep 17 00:00:00 2001 From: hhughes Date: Thu, 13 Jul 2023 17:33:57 -0700 Subject: [PATCH 849/979] JAVA-3089: Forbid wildcard imports (#1680) --- .../type/codec/registry/CachingCodecRegistry.java | 11 ++++++++++- .../oss/driver/internal/core/os/JnrLibcTest.java | 2 +- .../datastax/oss/driver/mapper/DefaultKeyspaceIT.java | 4 ++-- pom.xml | 1 + 4 files changed, 14 insertions(+), 4 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistry.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistry.java index cb5d45255e1..34c503bfce5 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistry.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistry.java @@ -19,7 +19,16 @@ import com.datastax.oss.driver.api.core.data.CqlVector; import com.datastax.oss.driver.api.core.data.TupleValue; import com.datastax.oss.driver.api.core.data.UdtValue; -import com.datastax.oss.driver.api.core.type.*; +import com.datastax.oss.driver.api.core.type.ContainerType; +import com.datastax.oss.driver.api.core.type.CustomType; +import com.datastax.oss.driver.api.core.type.DataType; +import com.datastax.oss.driver.api.core.type.DataTypes; +import com.datastax.oss.driver.api.core.type.ListType; +import com.datastax.oss.driver.api.core.type.MapType; +import com.datastax.oss.driver.api.core.type.SetType; +import com.datastax.oss.driver.api.core.type.TupleType; +import com.datastax.oss.driver.api.core.type.UserDefinedType; +import com.datastax.oss.driver.api.core.type.VectorType; import com.datastax.oss.driver.api.core.type.codec.CodecNotFoundException; import com.datastax.oss.driver.api.core.type.codec.TypeCodec; import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/os/JnrLibcTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/os/JnrLibcTest.java index e3bf9a876db..9fd58a2b163 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/os/JnrLibcTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/os/JnrLibcTest.java @@ -15,7 +15,7 @@ */ package com.datastax.oss.driver.internal.core.os; -import static org.assertj.core.api.Assertions.*; +import static org.assertj.core.api.Assertions.assertThat; import java.time.Instant; import java.time.temporal.ChronoUnit; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/DefaultKeyspaceIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/DefaultKeyspaceIT.java index 55f7aff9b62..8d9614bf97c 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/DefaultKeyspaceIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/DefaultKeyspaceIT.java @@ -15,7 +15,6 @@ */ package com.datastax.oss.driver.mapper; -import static com.datastax.oss.driver.api.mapper.MapperBuilder.*; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; @@ -25,6 +24,7 @@ import com.datastax.oss.driver.api.core.cql.Row; import com.datastax.oss.driver.api.core.cql.SimpleStatement; import com.datastax.oss.driver.api.core.servererrors.InvalidQueryException; +import com.datastax.oss.driver.api.mapper.MapperBuilder; import com.datastax.oss.driver.api.mapper.MapperException; import com.datastax.oss.driver.api.mapper.annotations.Dao; import com.datastax.oss.driver.api.mapper.annotations.DaoFactory; @@ -122,7 +122,7 @@ public void should_fail_to_insert_if_default_ks_and_dao_ks_not_provided() { () -> { InventoryMapperKsNotSet mapper = new DefaultKeyspaceIT_InventoryMapperKsNotSetBuilder(SESSION_RULE.session()) - .withCustomState(SCHEMA_VALIDATION_ENABLED_SETTING, false) + .withCustomState(MapperBuilder.SCHEMA_VALIDATION_ENABLED_SETTING, false) .build(); mapper.productDaoDefaultKsNotSet(); }) diff --git a/pom.xml b/pom.xml index 0c20405b68e..19adba12170 100644 --- a/pom.xml +++ b/pom.xml @@ -588,6 +588,7 @@ -Xep:FutureReturnValueIgnored:OFF -Xep:PreferJavaTimeOverload:OFF -Xep:AnnotateFormatMethod:OFF + -Xep:WildcardImport:WARN -XepExcludedPaths:.*/target/(?:generated-sources|generated-test-sources)/.* true From e8b25ab368a5cc8a59983ca8bd7b2020d07b19bd Mon Sep 17 00:00:00 2001 From: hhughes Date: Thu, 20 Jul 2023 12:16:13 -0700 Subject: [PATCH 850/979] JAVA-3077: ListenersIT intermittently failing with: Wanted but not invoked: schemaListener1.onSessionReady (#1670) ListenersIT.java: - Add 500ms wait on SchemaListener#onSessionReady call verification - Add latch to wait for MySchemaChangeListener#onSessionReady - Add some comments around which listeners/methods need synchronizations and why/not --- .../oss/driver/core/session/ListenersIT.java | 26 +++++++++++++++---- 1 file changed, 21 insertions(+), 5 deletions(-) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/session/ListenersIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/session/ListenersIT.java index 690c00a0e9b..a7b847ad1e8 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/session/ListenersIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/session/ListenersIT.java @@ -17,6 +17,7 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.inOrder; +import static org.mockito.Mockito.timeout; import static org.mockito.Mockito.verify; import com.datastax.oss.driver.api.core.CqlSession; @@ -33,9 +34,12 @@ import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.api.testinfra.simulacron.SimulacronRule; import com.datastax.oss.driver.categories.ParallelizableTests; +import com.datastax.oss.driver.shaded.guava.common.util.concurrent.Uninterruptibles; import com.datastax.oss.simulacron.common.cluster.ClusterSpec; import edu.umd.cs.findbugs.annotations.NonNull; import java.util.Collections; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; import org.junit.ClassRule; import org.junit.Test; import org.junit.experimental.categories.Category; @@ -90,6 +94,9 @@ public void should_inject_session_in_listeners() throws Exception { .build()) .build()) { + // These NodeStateListeners are wrapped with SafeInitNodeStateListener which delays #onUp + // callbacks until #onSessionReady is called, these will all happen during session + // initialization InOrder inOrder1 = inOrder(nodeListener1); inOrder1.verify(nodeListener1).onSessionReady(session); inOrder1.verify(nodeListener1).onUp(nodeCaptor1.capture()); @@ -104,20 +111,29 @@ public void should_inject_session_in_listeners() throws Exception { assertThat(nodeCaptor2.getValue().getEndPoint()) .isEqualTo(SIMULACRON_RULE.getContactPoints().iterator().next()); - verify(schemaListener1).onSessionReady(session); - verify(schemaListener2).onSessionReady(session); + // SchemaChangeListener#onSessionReady is called asynchronously from AdminExecutor so we may + // have to wait a little + verify(schemaListener1, timeout(500).times(1)).onSessionReady(session); + verify(schemaListener2, timeout(500).times(1)).onSessionReady(session); + // Request tracker #onSessionReady is called synchronously during session initialization verify(requestTracker1).onSessionReady(session); verify(requestTracker2).onSessionReady(session); assertThat(MyNodeStateListener.onSessionReadyCalled).isTrue(); assertThat(MyNodeStateListener.onUpCalled).isTrue(); - assertThat(MySchemaChangeListener.onSessionReadyCalled).isTrue(); + // SchemaChangeListener#onSessionReady is called asynchronously from AdminExecutor so we may + // have to wait a little + assertThat( + Uninterruptibles.awaitUninterruptibly( + MySchemaChangeListener.onSessionReadyLatch, 500, TimeUnit.MILLISECONDS)) + .isTrue(); assertThat(MyRequestTracker.onSessionReadyCalled).isTrue(); } + // CqlSession#close waits for all listener close methods to be called verify(nodeListener1).close(); verify(nodeListener2).close(); @@ -163,14 +179,14 @@ public void close() { public static class MySchemaChangeListener extends SchemaChangeListenerBase { - private static volatile boolean onSessionReadyCalled = false; + private static CountDownLatch onSessionReadyLatch = new CountDownLatch(1); private static volatile boolean closeCalled = false; public MySchemaChangeListener(@SuppressWarnings("unused") DriverContext ignored) {} @Override public void onSessionReady(@NonNull Session session) { - onSessionReadyCalled = true; + onSessionReadyLatch.countDown(); } @Override From ec93ef9cdbde1b5fc5a694ebf09b375f76bcb373 Mon Sep 17 00:00:00 2001 From: hhughes Date: Thu, 20 Jul 2023 13:46:13 -0700 Subject: [PATCH 851/979] JAVA-3084: Add integration test coverage for ExtraTypeCodecs (#1679) --- .../core/type/codec/ExtraTypeCodecsIT.java | 298 ++++++++++++++++++ 1 file changed, 298 insertions(+) create mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/core/type/codec/ExtraTypeCodecsIT.java diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/type/codec/ExtraTypeCodecsIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/type/codec/ExtraTypeCodecsIT.java new file mode 100644 index 00000000000..853f6993aec --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/type/codec/ExtraTypeCodecsIT.java @@ -0,0 +1,298 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.core.type.codec; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.Version; +import com.datastax.oss.driver.api.core.cql.BoundStatement; +import com.datastax.oss.driver.api.core.cql.PreparedStatement; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; +import com.datastax.oss.driver.api.core.type.codec.ExtraTypeCodecs; +import com.datastax.oss.driver.api.core.type.codec.TypeCodec; +import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; +import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; +import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import com.datastax.oss.driver.categories.ParallelizableTests; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.time.Instant; +import java.time.LocalDateTime; +import java.time.ZoneId; +import java.time.ZonedDateTime; +import java.time.temporal.ChronoUnit; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Objects; +import java.util.Optional; +import java.util.UUID; +import java.util.stream.Stream; +import org.junit.Assume; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +@Category(ParallelizableTests.class) +public class ExtraTypeCodecsIT { + + private static final CcmRule CCM_RULE = CcmRule.getInstance(); + + private static final SessionRule SESSION_RULE = SessionRule.builder(CCM_RULE).build(); + + @ClassRule + public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); + + private enum TableField { + cql_text("text_value", "text"), + cql_int("integer_value", "int"), + cql_vector("vector_value", "vector"), + cql_list_of_text("list_of_text_value", "list"), + cql_timestamp("timestamp_value", "timestamp"), + cql_boolean("boolean_value", "boolean"), + ; + + final String name; + final String ty; + + TableField(String name, String ty) { + this.name = name; + this.ty = ty; + } + + private String definition() { + return String.format("%s %s", name, ty); + } + } + + @BeforeClass + public static void setupSchema() { + List fieldDefinitions = new ArrayList<>(); + fieldDefinitions.add("key uuid PRIMARY KEY"); + Stream.of(TableField.values()) + .forEach( + tf -> { + // TODO: Move this check to BackendRequirementRule once JAVA-3069 is resolved. + if (tf == TableField.cql_vector + && CCM_RULE.getCassandraVersion().compareTo(Version.parse("5.0")) < 0) { + // don't add vector type before cassandra version 5.0 + return; + } + fieldDefinitions.add(tf.definition()); + }); + SESSION_RULE + .session() + .execute( + SimpleStatement.builder( + String.format( + "CREATE TABLE IF NOT EXISTS extra_type_codecs_it (%s)", + String.join(", ", fieldDefinitions))) + .setExecutionProfile(SESSION_RULE.slowProfile()) + .build()); + } + + private void insertAndRead(TableField field, T value, TypeCodec codec) { + CqlSession session = SESSION_RULE.session(); + // write value under new key using provided codec + UUID key = UUID.randomUUID(); + + PreparedStatement preparedInsert = + session.prepare( + SimpleStatement.builder( + String.format( + "INSERT INTO extra_type_codecs_it (key, %s) VALUES (?, ?)", field.name)) + .build()); + BoundStatement boundInsert = + preparedInsert + .boundStatementBuilder() + .setUuid("key", key) + .set(field.name, value, codec) + .build(); + session.execute(boundInsert); + + // read value using provided codec and assert result + PreparedStatement preparedSelect = + session.prepare( + SimpleStatement.builder( + String.format("SELECT %s FROM extra_type_codecs_it WHERE key = ?", field.name)) + .build()); + BoundStatement boundSelect = preparedSelect.boundStatementBuilder().setUuid("key", key).build(); + assertThat(session.execute(boundSelect).one().get(field.name, codec)).isEqualTo(value); + } + + @Test + public void enum_names_of() { + insertAndRead( + TableField.cql_text, TestEnum.value1, ExtraTypeCodecs.enumNamesOf(TestEnum.class)); + } + + @Test + public void enum_ordinals_of() { + insertAndRead( + TableField.cql_int, TestEnum.value1, ExtraTypeCodecs.enumOrdinalsOf(TestEnum.class)); + } + + // Also requires -Dccm.branch=vsearch and the ability to build that branch locally + @BackendRequirement(type = BackendType.CASSANDRA, minInclusive = "5.0.0") + @Test + public void float_to_vector_array() { + // @BackRequirement on test methods that use @ClassRule to configure CcmRule require @Rule + // BackendRequirementRule included with fix JAVA-3069. Until then we will ignore this test with + // an assume. + Assume.assumeTrue( + "Requires Cassandra 5.0 or greater", + CCM_RULE.getCassandraVersion().compareTo(Version.parse("5.0")) >= 0); + insertAndRead( + TableField.cql_vector, + new float[] {1.1f, 0f, Float.NaN}, + ExtraTypeCodecs.floatVectorToArray(3)); + } + + @Test + public void json_java_class() { + insertAndRead( + TableField.cql_text, + new TestJsonAnnotatedPojo("example", Arrays.asList(1, 2, 3)), + ExtraTypeCodecs.json(TestJsonAnnotatedPojo.class)); + } + + @Test + public void json_java_class_and_object_mapper() { + insertAndRead( + TableField.cql_text, + TestPojo.create(1, "abc", "def"), + ExtraTypeCodecs.json(TestPojo.class, new ObjectMapper())); + } + + @Test + public void list_to_array_of() { + insertAndRead( + TableField.cql_list_of_text, + new String[] {"hello", "kitty"}, + ExtraTypeCodecs.listToArrayOf(TypeCodecs.TEXT)); + } + + @Test + public void local_timestamp_at() { + ZoneId systemZoneId = ZoneId.systemDefault(); + insertAndRead( + TableField.cql_timestamp, + LocalDateTime.now(systemZoneId).truncatedTo(ChronoUnit.MILLIS), + ExtraTypeCodecs.localTimestampAt(systemZoneId)); + } + + @Test + public void optional_of() { + insertAndRead( + TableField.cql_boolean, Optional.empty(), ExtraTypeCodecs.optionalOf(TypeCodecs.BOOLEAN)); + insertAndRead( + TableField.cql_boolean, Optional.of(true), ExtraTypeCodecs.optionalOf(TypeCodecs.BOOLEAN)); + } + + @Test + public void timestamp_at() { + ZoneId systemZoneId = ZoneId.systemDefault(); + insertAndRead( + TableField.cql_timestamp, + Instant.now().truncatedTo(ChronoUnit.MILLIS), + ExtraTypeCodecs.timestampAt(systemZoneId)); + } + + @Test + public void timestamp_millis_at() { + ZoneId systemZoneId = ZoneId.systemDefault(); + insertAndRead( + TableField.cql_timestamp, + Instant.now().toEpochMilli(), + ExtraTypeCodecs.timestampMillisAt(systemZoneId)); + } + + @Test + public void zoned_timestamp_at() { + ZoneId systemZoneId = ZoneId.systemDefault(); + insertAndRead( + TableField.cql_timestamp, + ZonedDateTime.now(systemZoneId).truncatedTo(ChronoUnit.MILLIS), + ExtraTypeCodecs.zonedTimestampAt(systemZoneId)); + } + + private enum TestEnum { + value1, + value2, + value3, + } + + // Public for JSON serialization + public static final class TestJsonAnnotatedPojo { + public final String info; + public final List values; + + @JsonCreator + public TestJsonAnnotatedPojo( + @JsonProperty("info") String info, @JsonProperty("values") List values) { + this.info = info; + this.values = values; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TestJsonAnnotatedPojo testJsonAnnotatedPojo = (TestJsonAnnotatedPojo) o; + return Objects.equals(info, testJsonAnnotatedPojo.info) + && Objects.equals(values, testJsonAnnotatedPojo.values); + } + + @Override + public int hashCode() { + return Objects.hash(info, values); + } + } + + public static final class TestPojo { + public int id; + public String[] messages; + + public static TestPojo create(int id, String... messages) { + TestPojo obj = new TestPojo(); + obj.id = id; + obj.messages = messages; + return obj; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TestPojo testPojo = (TestPojo) o; + return id == testPojo.id && Arrays.equals(messages, testPojo.messages); + } + + @Override + public int hashCode() { + int result = Objects.hash(id); + result = 31 * result + Arrays.hashCode(messages); + return result; + } + } +} From 60c9cbc51f29c3ae262f9ce53dfd923600efaab6 Mon Sep 17 00:00:00 2001 From: hhughes Date: Thu, 10 Aug 2023 11:04:57 -0700 Subject: [PATCH 852/979] JAVA-3100: Update jackson-databind to 2.13.4.1 and (#1694) jackson-jaxrs-json-provider to 2.13.4 to address recent CVEs Additional: - Remove unused maven property legacy-jackson.version --- core/revapi.json | 6 ++++++ pom.xml | 5 ++--- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/core/revapi.json b/core/revapi.json index 63e2cef5a1e..318e29709ec 100644 --- a/core/revapi.json +++ b/core/revapi.json @@ -6950,6 +6950,12 @@ "old": "method com.datastax.oss.driver.api.core.type.reflect.GenericType> com.datastax.oss.driver.api.core.type.reflect.GenericType::vectorOf(com.datastax.oss.driver.api.core.type.reflect.GenericType)", "new": "method com.datastax.oss.driver.api.core.type.reflect.GenericType> com.datastax.oss.driver.api.core.type.reflect.GenericType::vectorOf(com.datastax.oss.driver.api.core.type.reflect.GenericType)", "justification": "Refactorings in PR 1666" + }, + { + "code": "java.method.returnTypeChangedCovariantly", + "old": "method java.lang.Throwable java.lang.Throwable::fillInStackTrace() @ com.fasterxml.jackson.databind.deser.UnresolvedForwardReference", + "new": "method com.fasterxml.jackson.databind.deser.UnresolvedForwardReference com.fasterxml.jackson.databind.deser.UnresolvedForwardReference::fillInStackTrace()", + "justification": "Upgrade jackson-databind to 2.13.4.1 to address CVEs, API change cause: https://github.com/FasterXML/jackson-databind/issues/3419" } ] } diff --git a/pom.xml b/pom.xml index 19adba12170..8fa1bc52a34 100644 --- a/pom.xml +++ b/pom.xml @@ -57,9 +57,8 @@ 1.7.26 1.0.3 20230227 - 2.13.2 - 2.13.2.2 - 1.9.12 + 2.13.4 + 2.13.4.1 1.1.10.1 1.7.1 From ae99f7d83d4696411b4a21567f6febb489e964fe Mon Sep 17 00:00:00 2001 From: hhughes Date: Fri, 18 Aug 2023 15:55:11 -0700 Subject: [PATCH 853/979] JAVA-3095: Fix CREATE keyword in vector search example in upgrade guide. (#1693) --- upgrade_guide/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/upgrade_guide/README.md b/upgrade_guide/README.md index 6310f220c3d..4f8288b96a1 100644 --- a/upgrade_guide/README.md +++ b/upgrade_guide/README.md @@ -28,7 +28,7 @@ try (CqlSession session = new CqlSessionBuilder().withLocalDatacenter("datacente session.execute("DROP KEYSPACE IF EXISTS test"); session.execute("CREATE KEYSPACE test WITH replication = {'class': 'SimpleStrategy', 'replication_factor': 1}"); session.execute("CREATE TABLE test.foo(i int primary key, j vector)"); - session.execute("CREAT CUSTOM INDEX ann_index ON test.foo(j) USING 'StorageAttachedIndex'"); + session.execute("CREATE CUSTOM INDEX ann_index ON test.foo(j) USING 'StorageAttachedIndex'"); session.execute("INSERT INTO test.foo (i, j) VALUES (1, [8, 2.3, 58])"); session.execute("INSERT INTO test.foo (i, j) VALUES (2, [1.2, 3.4, 5.6])"); session.execute("INSERT INTO test.foo (i, j) VALUES (5, [23, 18, 3.9])"); From f5605eabe58a092bbc9a11219c5007349e46ce75 Mon Sep 17 00:00:00 2001 From: hhughes Date: Fri, 18 Aug 2023 17:24:59 -0700 Subject: [PATCH 854/979] Update 4.x changelog for 3.11.4 release (#1691) --- changelog/README.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/changelog/README.md b/changelog/README.md index 6c8c236a6a4..cb272907b66 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -632,6 +632,11 @@ changelog](https://docs.datastax.com/en/developer/java-driver-dse/latest/changel - [bug] JAVA-1499: Wait for load balancing policy at cluster initialization - [new feature] JAVA-1495: Add prepared statements +## 3.11.4 +- [improvement] JAVA-3079: Upgrade Netty to 4.1.94, 3.x edition +- [improvement] JAVA-3082: Fix maven build for Apple-silicon +- [improvement] PR 1671: Fix LatencyAwarePolicy scale docstring + ## 3.11.3 - [improvement] JAVA-3023: Upgrade Netty to 4.1.77, 3.x edition From 4d6e2e793797325f8d2c6edcfb2593615cd39f62 Mon Sep 17 00:00:00 2001 From: Benoit TELLIER Date: Mon, 21 Aug 2023 12:25:26 +0700 Subject: [PATCH 855/979] Improve ByteBufPrimitiveCodec readBytes (#1617) --- .../core/protocol/ByteBufPrimitiveCodec.java | 21 +------ .../protocol/ByteBufPrimitiveCodecTest.java | 59 +++++++++++++++++++ 2 files changed, 62 insertions(+), 18 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/ByteBufPrimitiveCodec.java b/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/ByteBufPrimitiveCodec.java index b7fc6350636..44815e99229 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/ByteBufPrimitiveCodec.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/protocol/ByteBufPrimitiveCodec.java @@ -114,8 +114,9 @@ public int readUnsignedShort(ByteBuf source) { public ByteBuffer readBytes(ByteBuf source) { int length = readInt(source); if (length < 0) return null; - ByteBuf slice = source.readSlice(length); - return ByteBuffer.wrap(readRawBytes(slice)); + byte[] bytes = new byte[length]; + source.readBytes(bytes); + return ByteBuffer.wrap(bytes); } @Override @@ -220,22 +221,6 @@ public void writeShortBytes(byte[] bytes, ByteBuf dest) { dest.writeBytes(bytes); } - // Reads *all* readable bytes from a buffer and return them. - // If the buffer is backed by an array, this will return the underlying array directly, without - // copy. - private static byte[] readRawBytes(ByteBuf buffer) { - if (buffer.hasArray() && buffer.readableBytes() == buffer.array().length) { - // Move the readerIndex just so we consistently consume the input - buffer.readerIndex(buffer.writerIndex()); - return buffer.array(); - } - - // Otherwise, just read the bytes in a new array - byte[] bytes = new byte[buffer.readableBytes()]; - buffer.readBytes(bytes); - return bytes; - } - private static String readString(ByteBuf source, int length) { try { String str = source.toString(source.readerIndex(), length, CharsetUtil.UTF_8); diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/protocol/ByteBufPrimitiveCodecTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/protocol/ByteBufPrimitiveCodecTest.java index 18ebb79ea59..2690de71cb0 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/protocol/ByteBufPrimitiveCodecTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/protocol/ByteBufPrimitiveCodecTest.java @@ -164,6 +164,65 @@ public void should_read_bytes() { assertThat(Bytes.toHexString(bytes)).isEqualTo("0xcafebabe"); } + @Test + public void should_read_bytes_when_extra_data() { + ByteBuf source = + ByteBufs.wrap( + // length (as an int) + 0x00, + 0x00, + 0x00, + 0x04, + // contents + 0xca, + 0xfe, + 0xba, + 0xbe, + 0xde, + 0xda, + 0xdd); + ByteBuffer bytes = codec.readBytes(source); + assertThat(Bytes.toHexString(bytes)).isEqualTo("0xcafebabe"); + } + + @Test + public void read_bytes_should_udpate_reader_index() { + ByteBuf source = + ByteBufs.wrap( + // length (as an int) + 0x00, + 0x00, + 0x00, + 0x04, + // contents + 0xca, + 0xfe, + 0xba, + 0xbe, + 0xde, + 0xda, + 0xdd); + codec.readBytes(source); + + assertThat(source.readerIndex()).isEqualTo(8); + } + + @Test + public void read_bytes_should_throw_when_not_enough_content() { + ByteBuf source = + ByteBufs.wrap( + // length (as an int) : 4 bytes + 0x00, + 0x00, + 0x00, + 0x04, + // contents : only 2 bytes + 0xca, + 0xfe); + assertThatThrownBy(() -> codec.readBytes(source)) + .isInstanceOf(IndexOutOfBoundsException.class); + } + @Test public void should_read_null_bytes() { ByteBuf source = ByteBufs.wrap(0xFF, 0xFF, 0xFF, 0xFF); // -1 (as an int) From 511ac4ecca3a575e73a51e32a3db47a509eb9859 Mon Sep 17 00:00:00 2001 From: Chris Lin <99268912+chrislin22@users.noreply.github.com> Date: Mon, 21 Aug 2023 11:52:36 -0400 Subject: [PATCH 856/979] removed auto trigger snyk and clean on PR --- .github/workflows/snyk-cli-scan.yml | 4 ---- .github/workflows/snyk-pr-cleanup.yml | 5 ----- 2 files changed, 9 deletions(-) diff --git a/.github/workflows/snyk-cli-scan.yml b/.github/workflows/snyk-cli-scan.yml index 50d303a128b..f78bc163934 100644 --- a/.github/workflows/snyk-cli-scan.yml +++ b/.github/workflows/snyk-cli-scan.yml @@ -1,10 +1,6 @@ name: 🔬 Snyk cli SCA on: - push: - branches: [ 4.x ] - pull_request: - branches: [ 4.x ] workflow_dispatch: env: diff --git a/.github/workflows/snyk-pr-cleanup.yml b/.github/workflows/snyk-pr-cleanup.yml index 9c3136bef82..27208c8c0a8 100644 --- a/.github/workflows/snyk-pr-cleanup.yml +++ b/.github/workflows/snyk-pr-cleanup.yml @@ -1,11 +1,6 @@ name: 🗑️ Snyk PR cleanup - merged/closed on: - pull_request: - types: - - closed - branches: - - 4.x workflow_dispatch: jobs: From 1b19116d0d70dc95a2783b8ef53a269770060698 Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Mon, 21 Aug 2023 12:02:35 -0500 Subject: [PATCH 857/979] Fixing formatting error from recent commit --- .../internal/core/protocol/ByteBufPrimitiveCodecTest.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/protocol/ByteBufPrimitiveCodecTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/protocol/ByteBufPrimitiveCodecTest.java index 2690de71cb0..e2bfb43e891 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/protocol/ByteBufPrimitiveCodecTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/protocol/ByteBufPrimitiveCodecTest.java @@ -219,8 +219,7 @@ public void read_bytes_should_throw_when_not_enough_content() { // contents : only 2 bytes 0xca, 0xfe); - assertThatThrownBy(() -> codec.readBytes(source)) - .isInstanceOf(IndexOutOfBoundsException.class); + assertThatThrownBy(() -> codec.readBytes(source)).isInstanceOf(IndexOutOfBoundsException.class); } @Test From 9982bc6328b8d8a0599ca86b2386a49e95b1411b Mon Sep 17 00:00:00 2001 From: Chris Lin <99268912+chrislin22@users.noreply.github.com> Date: Mon, 21 Aug 2023 14:13:25 -0400 Subject: [PATCH 858/979] removed all snyk related stuff --- .github/workflows/snyk-cli-scan.yml | 43 --------------------------- .github/workflows/snyk-pr-cleanup.yml | 11 ------- 2 files changed, 54 deletions(-) delete mode 100644 .github/workflows/snyk-cli-scan.yml delete mode 100644 .github/workflows/snyk-pr-cleanup.yml diff --git a/.github/workflows/snyk-cli-scan.yml b/.github/workflows/snyk-cli-scan.yml deleted file mode 100644 index f78bc163934..00000000000 --- a/.github/workflows/snyk-cli-scan.yml +++ /dev/null @@ -1,43 +0,0 @@ -name: 🔬 Snyk cli SCA - -on: - workflow_dispatch: - -env: - SNYK_SEVERITY_THRESHOLD_LEVEL: high - -jobs: - snyk-cli-scan: - runs-on: ubuntu-latest - steps: - - name: Git checkout - uses: actions/checkout@v3 - - - name: prepare for snyk scan - uses: datastax/shared-github-actions/actions/snyk-prepare@main - - - name: Set up JDK 8 - uses: actions/setup-java@v3 - with: - distribution: 'temurin' - java-version: '8' - cache: maven - - - name: run maven install prepare for snyk - run: | - mvn -B -V install -DskipTests -Dmaven.javadoc.skip=true - - - name: snyk scan java - uses: datastax/shared-github-actions/actions/snyk-scan-java@main - with: - directories: . - SNYK_TOKEN: ${{ secrets.SNYK_TOKEN }} - SNYK_ORG_ID: ${{ secrets.SNYK_ORG_ID }} - extra-snyk-options: "-DskipTests -Dmaven.javadoc.skip=true" - - - name: Snyk scan result - uses: datastax/shared-github-actions/actions/snyk-process-scan-results@main - with: - gh_repo_token: ${{ secrets.GITHUB_TOKEN }} - SNYK_TOKEN: ${{ secrets.SNYK_TOKEN }} - SNYK_ORG_ID: ${{ secrets.SNYK_ORG_ID }} diff --git a/.github/workflows/snyk-pr-cleanup.yml b/.github/workflows/snyk-pr-cleanup.yml deleted file mode 100644 index 27208c8c0a8..00000000000 --- a/.github/workflows/snyk-pr-cleanup.yml +++ /dev/null @@ -1,11 +0,0 @@ -name: 🗑️ Snyk PR cleanup - merged/closed - -on: - workflow_dispatch: - -jobs: - snyk_project_cleanup_when_pr_closed: - uses: datastax/shared-github-actions/.github/workflows/snyk-pr-cleanup.yml@main - secrets: - snyk_token: ${{ secrets.SNYK_TOKEN }} - snyk_org_id: ${{ secrets.SNYK_ORG_ID }} From d94a8f06252d100ed1a405ca3809e059fbf8a4e0 Mon Sep 17 00:00:00 2001 From: hhughes Date: Mon, 21 Aug 2023 13:40:55 -0700 Subject: [PATCH 859/979] JAVA-3111: upgrade jackson-databind to 2.13.4.2 to address gradle dependency issue (#1708) --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 8fa1bc52a34..b74cfeee652 100644 --- a/pom.xml +++ b/pom.xml @@ -58,7 +58,7 @@ 1.0.3 20230227 2.13.4 - 2.13.4.1 + 2.13.4.2 1.1.10.1 1.7.1 From fd446ce2870dafa3fe2654deb6e95506f62d2851 Mon Sep 17 00:00:00 2001 From: hhughes Date: Mon, 21 Aug 2023 13:47:23 -0700 Subject: [PATCH 860/979] JAVA-3069: Refactor duplicated tests with different requirements to use @BackendRequirement (#1667) Refactor @BackendRequirement test skipping logic into new rule, BackendRequirementRule. --- .../DseGssApiAuthProviderAlternateIT.java | 8 +- .../core/auth/DseGssApiAuthProviderIT.java | 8 +- .../core/auth/DsePlainTextAuthProviderIT.java | 8 +- .../core/auth/DseProxyAuthenticationIT.java | 8 +- .../driver/api/core/auth/EmbeddedAdsRule.java | 15 +- .../cql/continuous/ContinuousPagingIT.java | 8 +- .../reactive/ContinuousPagingReactiveIT.java | 8 +- .../api/core/data/geometry/LineStringIT.java | 5 +- .../api/core/data/geometry/PointIT.java | 5 +- .../api/core/data/geometry/PolygonIT.java | 5 +- .../api/core/data/time/DateRangeIT.java | 5 +- .../graph/ClassicGraphGeoSearchIndexIT.java | 8 +- .../graph/ClassicGraphTextSearchIndexIT.java | 8 +- .../core/graph/CoreGraphGeoSearchIndexIT.java | 8 +- .../graph/CoreGraphTextSearchIndexIT.java | 8 +- .../api/core/graph/CqlCollectionIT.java | 8 +- .../api/core/graph/GraphAuthenticationIT.java | 8 +- .../driver/api/core/graph/GraphPagingIT.java | 8 +- .../graph/GraphSpeculativeExecutionIT.java | 8 +- .../graph/GraphTextSearchIndexITBase.java | 9 +- .../api/core/graph/GraphTimeoutsIT.java | 8 +- .../DefaultReactiveGraphResultSetIT.java | 8 +- .../remote/ClassicGraphDataTypeRemoteIT.java | 8 +- .../remote/ClassicGraphTraversalRemoteIT.java | 8 +- .../remote/CoreGraphDataTypeRemoteIT.java | 8 +- .../remote/CoreGraphTraversalRemoteIT.java | 8 +- .../GraphTraversalMetaPropertiesRemoteIT.java | 8 +- ...GraphTraversalMultiPropertiesRemoteIT.java | 8 +- .../remote/GraphTraversalRemoteITBase.java | 5 +- .../ClassicGraphDataTypeFluentIT.java | 8 +- .../ClassicGraphDataTypeScriptIT.java | 8 +- .../ClassicGraphTraversalBatchIT.java | 8 +- .../statement/ClassicGraphTraversalIT.java | 8 +- .../statement/CoreGraphDataTypeFluentIT.java | 8 +- .../statement/CoreGraphDataTypeScriptIT.java | 8 +- .../statement/CoreGraphTraversalBatchIT.java | 8 +- .../graph/statement/CoreGraphTraversalIT.java | 8 +- .../GraphTraversalMetaPropertiesIT.java | 8 +- .../GraphTraversalMultiPropertiesIT.java | 8 +- .../api/core/insights/InsightsClientIT.java | 8 +- .../schema/DseAggregateMetadataIT.java | 8 +- .../schema/DseFunctionMetadataIT.java | 8 +- .../schema/KeyspaceGraphMetadataIT.java | 5 +- .../TableGraphMetadataCaseSensitiveIT.java | 5 +- .../metadata/schema/TableGraphMetadataIT.java | 5 +- .../ProtocolVersionInitialNegotiationIT.java | 240 +++++++----------- .../NettyResourceLeakDetectionIT.java | 21 +- .../oss/driver/core/cql/BatchStatementIT.java | 5 +- .../driver/core/cql/BoundStatementCcmIT.java | 5 +- .../core/cql/ExecutionInfoWarningsIT.java | 9 +- .../oss/driver/core/cql/NowInSecondsIT.java | 13 +- .../driver/core/cql/PerRequestKeyspaceIT.java | 19 +- .../driver/core/cql/PreparedStatementIT.java | 13 +- .../core/metadata/ByteOrderedTokenIT.java | 8 +- .../metadata/ByteOrderedTokenVnodesIT.java | 8 +- .../core/metadata/Murmur3TokenVnodesIT.java | 8 +- .../driver/core/metadata/NodeMetadataIT.java | 5 +- .../core/metadata/RandomTokenVnodesIT.java | 8 +- .../oss/driver/core/metadata/SchemaIT.java | 13 +- .../DriverBlockHoundIntegrationCcmIT.java | 8 +- .../mapper/DefaultNullSavingStrategyIT.java | 8 +- .../datastax/oss/driver/mapper/DeleteIT.java | 8 +- .../driver/mapper/IncrementWithNullsIT.java | 5 +- .../oss/driver/mapper/NestedUdtIT.java | 8 +- .../oss/driver/mapper/PrimitivesIT.java | 8 +- .../oss/driver/mapper/SchemaValidationIT.java | 8 +- .../mapper/SelectCustomWhereClauseIT.java | 8 +- .../driver/mapper/SelectOtherClausesIT.java | 8 +- .../driver/mapper/UpdateCustomIfClauseIT.java | 8 +- .../oss/driver/mapper/UpdateReactiveIT.java | 8 +- .../oss/driver/querybuilder/JsonInsertIT.java | 8 +- .../driver/internal/osgi/OsgiGeoTypesIT.java | 8 +- .../oss/driver/internal/osgi/OsgiGraphIT.java | 8 +- .../driver/internal/osgi/OsgiSnappyIT.java | 5 +- .../internal/osgi/support/CcmPaxExam.java | 18 +- .../api/testinfra/CassandraRequirement.java | 4 + .../driver/api/testinfra/DseRequirement.java | 4 + .../driver/api/testinfra/ccm/BaseCcmRule.java | 14 +- .../requirement/BackendRequirementRule.java | 59 +++++ .../requirement/VersionRequirement.java | 6 + 80 files changed, 577 insertions(+), 361 deletions(-) create mode 100644 test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/requirement/BackendRequirementRule.java diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderAlternateIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderAlternateIT.java index 3b56e3edf65..44ba5b08eed 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderAlternateIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderAlternateIT.java @@ -22,7 +22,8 @@ import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.cql.Row; -import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import com.tngtech.java.junit.dataprovider.DataProvider; @@ -32,7 +33,10 @@ import org.junit.Test; import org.junit.runner.RunWith; -@DseRequirement(min = "5.0", description = "Required for DseAuthenticator") +@BackendRequirement( + type = BackendType.DSE, + minInclusive = "5.0", + description = "Required for DseAuthenticator") @RunWith(DataProviderRunner.class) public class DseGssApiAuthProviderAlternateIT { @ClassRule public static EmbeddedAdsRule ads = new EmbeddedAdsRule(true); diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderIT.java index b8884e68b27..d357c2c678d 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseGssApiAuthProviderIT.java @@ -24,7 +24,8 @@ import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.auth.AuthenticationException; import com.datastax.oss.driver.api.core.cql.ResultSet; -import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import java.util.List; import java.util.Map; @@ -32,7 +33,10 @@ import org.junit.ClassRule; import org.junit.Test; -@DseRequirement(min = "5.0", description = "Required for DseAuthenticator") +@BackendRequirement( + type = BackendType.DSE, + minInclusive = "5.0", + description = "Required for DseAuthenticator") public class DseGssApiAuthProviderIT { @ClassRule public static EmbeddedAdsRule ads = new EmbeddedAdsRule(); diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DsePlainTextAuthProviderIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DsePlainTextAuthProviderIT.java index 08629a1f17e..c3c98c51d00 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DsePlainTextAuthProviderIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DsePlainTextAuthProviderIT.java @@ -24,8 +24,9 @@ import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.api.core.auth.AuthenticationException; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; -import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.internal.core.auth.PlainTextAuthProvider; import com.datastax.oss.driver.shaded.guava.common.util.concurrent.Uninterruptibles; @@ -35,7 +36,10 @@ import org.junit.ClassRule; import org.junit.Test; -@DseRequirement(min = "5.0", description = "Required for DseAuthenticator") +@BackendRequirement( + type = BackendType.DSE, + minInclusive = "5.0", + description = "Required for DseAuthenticator") public class DsePlainTextAuthProviderIT { @ClassRule diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseProxyAuthenticationIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseProxyAuthenticationIT.java index 7b4bf6be433..385b9206311 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseProxyAuthenticationIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseProxyAuthenticationIT.java @@ -27,7 +27,8 @@ import com.datastax.oss.driver.api.core.cql.ResultSet; import com.datastax.oss.driver.api.core.cql.SimpleStatement; import com.datastax.oss.driver.api.core.servererrors.UnauthorizedException; -import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.internal.core.auth.PlainTextAuthProvider; import java.util.List; @@ -36,7 +37,10 @@ import org.junit.ClassRule; import org.junit.Test; -@DseRequirement(min = "5.1", description = "Required for DseAuthenticator with proxy") +@BackendRequirement( + type = BackendType.DSE, + minInclusive = "5.1", + description = "Required for DseAuthenticator with proxy") public class DseProxyAuthenticationIT { private static String bobPrincipal; private static String charliePrincipal; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/EmbeddedAdsRule.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/EmbeddedAdsRule.java index 0903eb9b298..6590c056198 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/EmbeddedAdsRule.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/EmbeddedAdsRule.java @@ -18,16 +18,12 @@ import com.datastax.dse.driver.api.core.config.DseDriverOption; import com.datastax.dse.driver.internal.core.auth.DseGssApiAuthProvider; import com.datastax.oss.driver.api.core.CqlSession; -import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; -import com.datastax.oss.driver.api.testinfra.ccm.CcmBridge; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; -import com.datastax.oss.driver.api.testinfra.requirement.BackendType; -import com.datastax.oss.driver.api.testinfra.requirement.VersionRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirementRule; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import java.io.File; -import java.util.Collection; import java.util.HashMap; import java.util.Map; import org.junit.AssumptionViolatedException; @@ -155,12 +151,7 @@ protected void before() { @Override public Statement apply(Statement base, Description description) { - BackendType backend = CcmBridge.DSE_ENABLEMENT ? BackendType.DSE : BackendType.CASSANDRA; - Version version = CcmBridge.VERSION; - - Collection requirements = VersionRequirement.fromAnnotations(description); - - if (VersionRequirement.meetsAny(requirements, backend, version)) { + if (BackendRequirementRule.meetsDescriptionRequirements(description)) { return super.apply(base, description); } else { // requirements not met, throw reasoning assumption to skip test @@ -168,7 +159,7 @@ public Statement apply(Statement base, Description description) { @Override public void evaluate() { throw new AssumptionViolatedException( - VersionRequirement.buildReasonString(requirements, backend, version)); + BackendRequirementRule.buildReasonString(description)); } }; } diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousPagingIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousPagingIT.java index a0a3aaf3cf5..42600609d50 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousPagingIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousPagingIT.java @@ -30,8 +30,9 @@ import com.datastax.oss.driver.api.core.cql.SimpleStatement; import com.datastax.oss.driver.api.core.cql.Statement; import com.datastax.oss.driver.api.core.metrics.DefaultNodeMetric; -import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.categories.ParallelizableTests; @@ -58,8 +59,9 @@ import org.junit.rules.TestRule; import org.junit.runner.RunWith; -@DseRequirement( - min = "5.1.0", +@BackendRequirement( + type = BackendType.DSE, + minInclusive = "5.1.0", description = "Continuous paging is only available from 5.1.0 onwards") @Category(ParallelizableTests.class) @RunWith(DataProviderRunner.class) diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousPagingReactiveIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousPagingReactiveIT.java index 927a3dfc286..658dfeba2da 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousPagingReactiveIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/reactive/ContinuousPagingReactiveIT.java @@ -27,8 +27,9 @@ import com.datastax.oss.driver.api.core.cql.ExecutionInfo; import com.datastax.oss.driver.api.core.cql.SimpleStatement; import com.datastax.oss.driver.api.core.metrics.DefaultNodeMetric; -import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.categories.ParallelizableTests; @@ -47,8 +48,9 @@ import org.junit.rules.TestRule; import org.junit.runner.RunWith; -@DseRequirement( - min = "5.1.0", +@BackendRequirement( + type = BackendType.DSE, + minInclusive = "5.1.0", description = "Continuous paging is only available from 5.1.0 onwards") @Category(ParallelizableTests.class) @RunWith(DataProviderRunner.class) diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/data/geometry/LineStringIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/data/geometry/LineStringIT.java index 2261c3fae2d..6012ae4ba89 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/data/geometry/LineStringIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/data/geometry/LineStringIT.java @@ -22,8 +22,9 @@ import com.datastax.oss.driver.api.core.cql.Row; import com.datastax.oss.driver.api.core.cql.SimpleStatement; import com.datastax.oss.driver.api.core.uuid.Uuids; -import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import java.util.List; import java.util.UUID; @@ -34,7 +35,7 @@ import org.junit.rules.RuleChain; import org.junit.rules.TestRule; -@DseRequirement(min = "5.0") +@BackendRequirement(type = BackendType.DSE, minInclusive = "5.0") public class LineStringIT extends GeometryIT { private static CcmRule ccm = CcmRule.getInstance(); diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/data/geometry/PointIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/data/geometry/PointIT.java index fcb3a3b5ae4..b6bbe1e9492 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/data/geometry/PointIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/data/geometry/PointIT.java @@ -16,8 +16,9 @@ package com.datastax.dse.driver.api.core.data.geometry; import com.datastax.oss.driver.api.core.CqlSession; -import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import org.assertj.core.util.Lists; import org.junit.BeforeClass; @@ -25,7 +26,7 @@ import org.junit.rules.RuleChain; import org.junit.rules.TestRule; -@DseRequirement(min = "5.0") +@BackendRequirement(type = BackendType.DSE, minInclusive = "5.0") public class PointIT extends GeometryIT { private static CcmRule ccm = CcmRule.getInstance(); diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/data/geometry/PolygonIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/data/geometry/PolygonIT.java index 9d7bfc3292f..2d5dccf8759 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/data/geometry/PolygonIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/data/geometry/PolygonIT.java @@ -22,8 +22,9 @@ import com.datastax.oss.driver.api.core.cql.Row; import com.datastax.oss.driver.api.core.cql.SimpleStatement; import com.datastax.oss.driver.api.core.uuid.Uuids; -import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import java.util.UUID; import org.assertj.core.util.Lists; @@ -33,7 +34,7 @@ import org.junit.rules.RuleChain; import org.junit.rules.TestRule; -@DseRequirement(min = "5.0") +@BackendRequirement(type = BackendType.DSE, minInclusive = "5.0") public class PolygonIT extends GeometryIT { private static CcmRule ccm = CcmRule.getInstance(); diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/data/time/DateRangeIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/data/time/DateRangeIT.java index 958fac68ab9..5b2976c89d7 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/data/time/DateRangeIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/data/time/DateRangeIT.java @@ -25,8 +25,9 @@ import com.datastax.oss.driver.api.core.data.TupleValue; import com.datastax.oss.driver.api.core.data.UdtValue; import com.datastax.oss.driver.api.core.servererrors.InvalidQueryException; -import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.categories.ParallelizableTests; @@ -44,7 +45,7 @@ import org.junit.rules.TestRule; @Category({ParallelizableTests.class}) -@DseRequirement(min = "5.1") +@BackendRequirement(type = BackendType.DSE, minInclusive = "5.1") public class DateRangeIT { private static CcmRule ccmRule = CcmRule.getInstance(); diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/ClassicGraphGeoSearchIndexIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/ClassicGraphGeoSearchIndexIT.java index df4c3385a79..eaf18f1a9ae 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/ClassicGraphGeoSearchIndexIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/ClassicGraphGeoSearchIndexIT.java @@ -16,8 +16,9 @@ package com.datastax.dse.driver.api.core.graph; import com.datastax.oss.driver.api.core.CqlSession; -import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.shaded.guava.common.base.Joiner; import com.datastax.oss.driver.shaded.guava.common.collect.Lists; @@ -30,7 +31,10 @@ import org.junit.rules.RuleChain; import org.junit.rules.TestRule; -@DseRequirement(min = "5.1", description = "DSE 5.1 required for graph geo indexing") +@BackendRequirement( + type = BackendType.DSE, + minInclusive = "5.1", + description = "DSE 5.1 required for graph geo indexing") public class ClassicGraphGeoSearchIndexIT extends GraphGeoSearchIndexITBase { private static final CustomCcmRule CCM_RULE = CustomCcmRule.builder().withDseWorkloads("graph", "solr").build(); diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/ClassicGraphTextSearchIndexIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/ClassicGraphTextSearchIndexIT.java index 315c092f682..bb85c1e1223 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/ClassicGraphTextSearchIndexIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/ClassicGraphTextSearchIndexIT.java @@ -16,8 +16,9 @@ package com.datastax.dse.driver.api.core.graph; import com.datastax.oss.driver.api.core.CqlSession; -import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.shaded.guava.common.base.Joiner; import com.datastax.oss.driver.shaded.guava.common.collect.Lists; @@ -30,7 +31,10 @@ import org.junit.rules.RuleChain; import org.junit.rules.TestRule; -@DseRequirement(min = "5.1", description = "DSE 5.1 required for graph geo indexing") +@BackendRequirement( + type = BackendType.DSE, + minInclusive = "5.1", + description = "DSE 5.1 required for graph geo indexing") public class ClassicGraphTextSearchIndexIT extends GraphTextSearchIndexITBase { private static final CustomCcmRule CCM_RULE = CustomCcmRule.builder().withDseWorkloads("graph", "solr").build(); diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CoreGraphGeoSearchIndexIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CoreGraphGeoSearchIndexIT.java index 42b0e3378a5..279222c5bb1 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CoreGraphGeoSearchIndexIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CoreGraphGeoSearchIndexIT.java @@ -16,8 +16,9 @@ package com.datastax.dse.driver.api.core.graph; import com.datastax.oss.driver.api.core.CqlSession; -import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.shaded.guava.common.collect.Lists; import java.util.Collection; @@ -28,7 +29,10 @@ import org.junit.rules.RuleChain; import org.junit.rules.TestRule; -@DseRequirement(min = "6.8.0", description = "DSE 6.8.0 required for Core graph support") +@BackendRequirement( + type = BackendType.DSE, + minInclusive = "6.8.0", + description = "DSE 6.8.0 required for Core graph support") public class CoreGraphGeoSearchIndexIT extends GraphGeoSearchIndexITBase { private static final CustomCcmRule CCM_RULE = diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CoreGraphTextSearchIndexIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CoreGraphTextSearchIndexIT.java index 9617746e026..e1b784574e1 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CoreGraphTextSearchIndexIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CoreGraphTextSearchIndexIT.java @@ -16,8 +16,9 @@ package com.datastax.dse.driver.api.core.graph; import com.datastax.oss.driver.api.core.CqlSession; -import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.shaded.guava.common.collect.Lists; import java.util.Collection; @@ -28,7 +29,10 @@ import org.junit.rules.RuleChain; import org.junit.rules.TestRule; -@DseRequirement(min = "6.8.0", description = "DSE 6.8.0 required for Core graph support") +@BackendRequirement( + type = BackendType.DSE, + minInclusive = "6.8.0", + description = "DSE 6.8.0 required for Core graph support") public class CoreGraphTextSearchIndexIT extends GraphTextSearchIndexITBase { private static final CustomCcmRule CCM_RULE = diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CqlCollectionIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CqlCollectionIT.java index ee8d4ac943d..74f441504f8 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CqlCollectionIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/CqlCollectionIT.java @@ -22,8 +22,9 @@ import com.datastax.dse.driver.api.core.graph.predicates.CqlCollection; import com.datastax.oss.driver.api.core.CqlSession; -import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.CqlSessionRuleBuilder; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; @@ -41,7 +42,10 @@ import org.junit.rules.RuleChain; import org.junit.rules.TestRule; -@DseRequirement(min = "6.8", description = "DSE 6.8.0 required for collection predicates support") +@BackendRequirement( + type = BackendType.DSE, + minInclusive = "6.8", + description = "DSE 6.8.0 required for collection predicates support") public class CqlCollectionIT { private static final CustomCcmRule CCM_RULE = diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphAuthenticationIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphAuthenticationIT.java index aaaafc26248..6d70e994666 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphAuthenticationIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphAuthenticationIT.java @@ -22,8 +22,9 @@ import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; -import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.internal.core.auth.PlainTextAuthProvider; import com.datastax.oss.driver.shaded.guava.common.util.concurrent.Uninterruptibles; @@ -32,7 +33,10 @@ import org.junit.ClassRule; import org.junit.Test; -@DseRequirement(min = "5.0.0", description = "DSE 5 required for Graph") +@BackendRequirement( + type = BackendType.DSE, + minInclusive = "5.0.0", + description = "DSE 5 required for Graph") public class GraphAuthenticationIT { @ClassRule diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphPagingIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphPagingIT.java index 335aceb9b84..b2cefb1b4a3 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphPagingIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphPagingIT.java @@ -32,8 +32,9 @@ import com.datastax.oss.driver.api.core.cql.ExecutionInfo; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.metrics.Metrics; -import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.internal.core.util.CountingIterator; @@ -53,7 +54,10 @@ import org.junit.rules.TestRule; import org.junit.runner.RunWith; -@DseRequirement(min = "6.8.0", description = "Graph paging requires DSE 6.8+") +@BackendRequirement( + type = BackendType.DSE, + minInclusive = "6.8.0", + description = "Graph paging requires DSE 6.8+") @RunWith(DataProviderRunner.class) public class GraphPagingIT { diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphSpeculativeExecutionIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphSpeculativeExecutionIT.java index fcacddb787f..4d8aec69264 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphSpeculativeExecutionIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphSpeculativeExecutionIT.java @@ -20,8 +20,9 @@ import com.datastax.dse.driver.api.core.config.DseDriverOption; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; -import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.internal.core.specex.ConstantSpeculativeExecutionPolicy; import com.datastax.oss.driver.internal.core.specex.NoSpeculativeExecutionPolicy; @@ -33,7 +34,10 @@ import org.junit.Test; import org.junit.runner.RunWith; -@DseRequirement(min = "6.8.0", description = "DSE 6.8 required for graph paging") +@BackendRequirement( + type = BackendType.DSE, + minInclusive = "6.8.0", + description = "DSE 6.8 required for graph paging") @RunWith(DataProviderRunner.class) public class GraphSpeculativeExecutionIT { diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphTextSearchIndexITBase.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphTextSearchIndexITBase.java index 9a8b3d2eedc..1330a433ff8 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphTextSearchIndexITBase.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphTextSearchIndexITBase.java @@ -18,7 +18,8 @@ import static org.assertj.core.api.Assertions.assertThat; import com.datastax.dse.driver.api.core.graph.predicates.Search; -import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.tngtech.java.junit.dataprovider.DataProvider; import com.tngtech.java.junit.dataprovider.DataProviderRunner; import com.tngtech.java.junit.dataprovider.UseDataProvider; @@ -102,7 +103,7 @@ public void search_by_regex(String indexType) { */ @UseDataProvider("indexTypes") @Test - @DseRequirement(min = "5.1.0") + @BackendRequirement(type = BackendType.DSE, minInclusive = "5.1.0") public void search_by_fuzzy(String indexType) { // Alias matches 'awrio' fuzzy GraphTraversal traversal = @@ -185,7 +186,7 @@ public void search_by_token_regex(String indexType) { */ @UseDataProvider("indexTypes") @Test - @DseRequirement(min = "5.1.0") + @BackendRequirement(type = BackendType.DSE, minInclusive = "5.1.0") public void search_by_token_fuzzy(String indexType) { // Description containing 'lives' fuzzy GraphTraversal traversal = @@ -210,7 +211,7 @@ public void search_by_token_fuzzy(String indexType) { */ @UseDataProvider("indexTypes") @Test - @DseRequirement(min = "5.1.0") + @BackendRequirement(type = BackendType.DSE, minInclusive = "5.1.0") public void search_by_phrase(String indexType) { // Full name contains phrase "Paul Joe" GraphTraversal traversal = diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphTimeoutsIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphTimeoutsIT.java index 4b8ec8d2d19..bc3fa00be9d 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphTimeoutsIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/GraphTimeoutsIT.java @@ -24,8 +24,9 @@ import com.datastax.oss.driver.api.core.DriverTimeoutException; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.servererrors.InvalidQueryException; -import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import java.time.Duration; import org.junit.ClassRule; @@ -33,7 +34,10 @@ import org.junit.rules.RuleChain; import org.junit.rules.TestRule; -@DseRequirement(min = "5.0.0", description = "DSE 5 required for Graph") +@BackendRequirement( + type = BackendType.DSE, + minInclusive = "5.0.0", + description = "DSE 5 required for Graph") public class GraphTimeoutsIT { public static CustomCcmRule ccmRule = CustomCcmRule.builder().withDseWorkloads("graph").build(); diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/reactive/DefaultReactiveGraphResultSetIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/reactive/DefaultReactiveGraphResultSetIT.java index 9c46891b4ab..0e183efd4ea 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/reactive/DefaultReactiveGraphResultSetIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/reactive/DefaultReactiveGraphResultSetIT.java @@ -23,8 +23,9 @@ import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.cql.ExecutionInfo; -import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.tngtech.java.junit.dataprovider.DataProvider; import com.tngtech.java.junit.dataprovider.DataProviderRunner; @@ -39,7 +40,10 @@ import org.junit.rules.TestRule; import org.junit.runner.RunWith; -@DseRequirement(min = "6.8.0", description = "Graph paging requires DSE 6.8+") +@BackendRequirement( + type = BackendType.DSE, + minInclusive = "6.8.0", + description = "Graph paging requires DSE 6.8+") @RunWith(DataProviderRunner.class) public class DefaultReactiveGraphResultSetIT { diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/ClassicGraphDataTypeRemoteIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/ClassicGraphDataTypeRemoteIT.java index 5eb70d01604..6d5c8187718 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/ClassicGraphDataTypeRemoteIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/ClassicGraphDataTypeRemoteIT.java @@ -21,8 +21,9 @@ import com.datastax.dse.driver.api.core.graph.SampleGraphScripts; import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; import com.datastax.oss.driver.api.core.CqlSession; -import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import org.apache.tinkerpop.gremlin.process.traversal.AnonymousTraversalSource; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; @@ -32,7 +33,10 @@ import org.junit.rules.RuleChain; import org.junit.rules.TestRule; -@DseRequirement(min = "5.0.3", description = "DSE 5.0.3 required for remote TinkerPop support") +@BackendRequirement( + type = BackendType.DSE, + minInclusive = "5.0.3", + description = "DSE 5.0.3 required for remote TinkerPop support") public class ClassicGraphDataTypeRemoteIT extends ClassicGraphDataTypeITBase { private static final CustomCcmRule CCM_RULE = GraphTestSupport.GRAPH_CCM_RULE_BUILDER.build(); diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/ClassicGraphTraversalRemoteIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/ClassicGraphTraversalRemoteIT.java index a855d38333a..c2c56468c5c 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/ClassicGraphTraversalRemoteIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/ClassicGraphTraversalRemoteIT.java @@ -23,8 +23,9 @@ import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; import com.datastax.dse.driver.api.core.graph.SocialTraversalSource; import com.datastax.oss.driver.api.core.CqlSession; -import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; import org.junit.BeforeClass; @@ -32,8 +33,9 @@ import org.junit.rules.RuleChain; import org.junit.rules.TestRule; -@DseRequirement( - min = "5.0.9", +@BackendRequirement( + type = BackendType.DSE, + minInclusive = "5.0.9", description = "DSE 5.0.9 required for inserting edges and vertices script.") public class ClassicGraphTraversalRemoteIT extends GraphTraversalRemoteITBase { diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/CoreGraphDataTypeRemoteIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/CoreGraphDataTypeRemoteIT.java index 40deb724757..21c8a48758f 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/CoreGraphDataTypeRemoteIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/CoreGraphDataTypeRemoteIT.java @@ -21,8 +21,9 @@ import com.datastax.dse.driver.api.core.graph.DseGraph; import com.datastax.dse.driver.api.core.graph.GraphTestSupport; import com.datastax.oss.driver.api.core.CqlSession; -import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.tngtech.java.junit.dataprovider.DataProviderRunner; import java.util.Map; @@ -35,7 +36,10 @@ import org.junit.rules.TestRule; import org.junit.runner.RunWith; -@DseRequirement(min = "6.8.0", description = "DSE 6.8.0 required for Core graph support") +@BackendRequirement( + type = BackendType.DSE, + minInclusive = "6.8.0", + description = "DSE 6.8.0 required for Core graph support") @RunWith(DataProviderRunner.class) public class CoreGraphDataTypeRemoteIT extends CoreGraphDataTypeITBase { diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/CoreGraphTraversalRemoteIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/CoreGraphTraversalRemoteIT.java index dfd45cdfb8e..e4afc6939eb 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/CoreGraphTraversalRemoteIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/CoreGraphTraversalRemoteIT.java @@ -23,8 +23,9 @@ import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; import com.datastax.dse.driver.api.core.graph.SocialTraversalSource; import com.datastax.oss.driver.api.core.CqlSession; -import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; import org.junit.BeforeClass; @@ -32,7 +33,10 @@ import org.junit.rules.RuleChain; import org.junit.rules.TestRule; -@DseRequirement(min = "6.8", description = "DSE 6.8 required for Core graph support") +@BackendRequirement( + type = BackendType.DSE, + minInclusive = "6.8", + description = "DSE 6.8 required for Core graph support") public class CoreGraphTraversalRemoteIT extends GraphTraversalRemoteITBase { private static final CustomCcmRule CCM_RULE = GraphTestSupport.GRAPH_CCM_RULE_BUILDER.build(); diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalMetaPropertiesRemoteIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalMetaPropertiesRemoteIT.java index a40b7c6d397..b95d1c596e2 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalMetaPropertiesRemoteIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalMetaPropertiesRemoteIT.java @@ -23,8 +23,9 @@ import com.datastax.dse.driver.api.core.graph.GraphTestSupport; import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; import com.datastax.oss.driver.api.core.CqlSession; -import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import org.apache.tinkerpop.gremlin.process.traversal.AnonymousTraversalSource; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; @@ -37,7 +38,10 @@ // INFO: meta props are going away in NGDG -@DseRequirement(min = "5.0.3", description = "DSE 5.0.3 required for remote TinkerPop support") +@BackendRequirement( + type = BackendType.DSE, + minInclusive = "5.0.3", + description = "DSE 5.0.3 required for remote TinkerPop support") public class GraphTraversalMetaPropertiesRemoteIT { private static final CustomCcmRule CCM_RULE = GraphTestSupport.GRAPH_CCM_RULE_BUILDER.build(); diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalMultiPropertiesRemoteIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalMultiPropertiesRemoteIT.java index 6dcd6bda336..6267fc6719e 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalMultiPropertiesRemoteIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalMultiPropertiesRemoteIT.java @@ -23,8 +23,9 @@ import com.datastax.dse.driver.api.core.graph.GraphTestSupport; import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; import com.datastax.oss.driver.api.core.CqlSession; -import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import java.util.Iterator; import org.apache.tinkerpop.gremlin.process.traversal.AnonymousTraversalSource; @@ -37,7 +38,10 @@ import org.junit.rules.TestRule; // INFO: multi props are not supported in Core -@DseRequirement(min = "5.0.3", description = "DSE 5.0.3 required for remote TinkerPop support") +@BackendRequirement( + type = BackendType.DSE, + minInclusive = "5.0.3", + description = "DSE 5.0.3 required for remote TinkerPop support") public class GraphTraversalMultiPropertiesRemoteIT { private static final CustomCcmRule CCM_RULE = GraphTestSupport.GRAPH_CCM_RULE_BUILDER.build(); diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalRemoteITBase.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalRemoteITBase.java index 4177f5a1477..9d8ecdf7382 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalRemoteITBase.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalRemoteITBase.java @@ -28,8 +28,9 @@ import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.api.core.servererrors.InvalidQueryException; -import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import java.util.Arrays; import java.util.Collections; import java.util.List; @@ -583,7 +584,7 @@ public void should_handle_asynchronous_execution_graph_binary() { * @test_category dse:graph */ @Test - @DseRequirement(min = "5.1.0") + @BackendRequirement(type = BackendType.DSE, minInclusive = "5.1.0") public void should_fail_future_returned_from_promise_on_query_error() throws Exception { CompletableFuture future = graphTraversalSource().V("invalidid").peerPressure().promise(Traversal::next); diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/ClassicGraphDataTypeFluentIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/ClassicGraphDataTypeFluentIT.java index 35c05deb01f..5696d1e2e0c 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/ClassicGraphDataTypeFluentIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/ClassicGraphDataTypeFluentIT.java @@ -22,8 +22,9 @@ import com.datastax.dse.driver.api.core.graph.SampleGraphScripts; import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; import com.datastax.oss.driver.api.core.CqlSession; -import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import org.apache.tinkerpop.gremlin.structure.Vertex; import org.junit.BeforeClass; @@ -31,7 +32,10 @@ import org.junit.rules.RuleChain; import org.junit.rules.TestRule; -@DseRequirement(min = "5.0.3", description = "DSE 5.0.3 required for fluent API support") +@BackendRequirement( + type = BackendType.DSE, + minInclusive = "5.0.3", + description = "DSE 5.0.3 required for fluent API support") public class ClassicGraphDataTypeFluentIT extends ClassicGraphDataTypeITBase { private static final CustomCcmRule CCM_RULE = GraphTestSupport.CCM_BUILDER_WITH_GRAPH.build(); diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/ClassicGraphDataTypeScriptIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/ClassicGraphDataTypeScriptIT.java index 9a12e5ca54a..b462ab4ecde 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/ClassicGraphDataTypeScriptIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/ClassicGraphDataTypeScriptIT.java @@ -20,8 +20,9 @@ import com.datastax.dse.driver.api.core.graph.SampleGraphScripts; import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; import com.datastax.oss.driver.api.core.CqlSession; -import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import org.apache.tinkerpop.gremlin.structure.Vertex; import org.junit.BeforeClass; @@ -29,7 +30,10 @@ import org.junit.rules.RuleChain; import org.junit.rules.TestRule; -@DseRequirement(min = "5.0.4", description = "DSE 5.0.4 required for script API with GraphSON 2") +@BackendRequirement( + type = BackendType.DSE, + minInclusive = "5.0.4", + description = "DSE 5.0.4 required for script API with GraphSON 2") public class ClassicGraphDataTypeScriptIT extends ClassicGraphDataTypeITBase { private static final CustomCcmRule CCM_RULE = GraphTestSupport.CCM_BUILDER_WITH_GRAPH.build(); diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/ClassicGraphTraversalBatchIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/ClassicGraphTraversalBatchIT.java index 780d8af6ed4..8f6a92e27ba 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/ClassicGraphTraversalBatchIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/ClassicGraphTraversalBatchIT.java @@ -19,8 +19,9 @@ import com.datastax.dse.driver.api.core.graph.SampleGraphScripts; import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; import com.datastax.oss.driver.api.core.CqlSession; -import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; import org.apache.tinkerpop.gremlin.structure.util.empty.EmptyGraph; @@ -29,7 +30,10 @@ import org.junit.rules.RuleChain; import org.junit.rules.TestRule; -@DseRequirement(min = "6.0", description = "DSE 6.0 required for BatchGraphStatement.") +@BackendRequirement( + type = BackendType.DSE, + minInclusive = "6.0", + description = "DSE 6.0 required for BatchGraphStatement.") public class ClassicGraphTraversalBatchIT extends GraphTraversalBatchITBase { private static final CustomCcmRule CCM_RULE = GraphTestSupport.GRAPH_CCM_RULE_BUILDER.build(); diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/ClassicGraphTraversalIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/ClassicGraphTraversalIT.java index f1dd053692b..c1da29e3519 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/ClassicGraphTraversalIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/ClassicGraphTraversalIT.java @@ -20,8 +20,9 @@ import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; import com.datastax.dse.driver.api.core.graph.SocialTraversalSource; import com.datastax.oss.driver.api.core.CqlSession; -import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; import org.apache.tinkerpop.gremlin.structure.util.empty.EmptyGraph; @@ -30,8 +31,9 @@ import org.junit.rules.RuleChain; import org.junit.rules.TestRule; -@DseRequirement( - min = "5.0.9", +@BackendRequirement( + type = BackendType.DSE, + minInclusive = "5.0.9", description = "DSE 5.0.9 required for inserting edges and vertices script.") public class ClassicGraphTraversalIT extends GraphTraversalITBase { diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/CoreGraphDataTypeFluentIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/CoreGraphDataTypeFluentIT.java index 613aa006005..7b40239ada0 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/CoreGraphDataTypeFluentIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/CoreGraphDataTypeFluentIT.java @@ -22,8 +22,9 @@ import com.datastax.dse.driver.api.core.graph.FluentGraphStatement; import com.datastax.dse.driver.api.core.graph.GraphTestSupport; import com.datastax.oss.driver.api.core.CqlSession; -import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.tngtech.java.junit.dataprovider.DataProviderRunner; import java.util.Map; @@ -34,7 +35,10 @@ import org.junit.rules.TestRule; import org.junit.runner.RunWith; -@DseRequirement(min = "6.8.0", description = "DSE 6.8.0 required for Core graph support") +@BackendRequirement( + type = BackendType.DSE, + minInclusive = "6.8.0", + description = "DSE 6.8.0 required for Core graph support") @RunWith(DataProviderRunner.class) public class CoreGraphDataTypeFluentIT extends CoreGraphDataTypeITBase { diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/CoreGraphDataTypeScriptIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/CoreGraphDataTypeScriptIT.java index 3bb73739f3e..e53f28937b1 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/CoreGraphDataTypeScriptIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/CoreGraphDataTypeScriptIT.java @@ -20,8 +20,9 @@ import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; import com.datastax.dse.driver.api.core.graph.ScriptGraphStatementBuilder; import com.datastax.oss.driver.api.core.CqlSession; -import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.tngtech.java.junit.dataprovider.DataProviderRunner; import java.util.Map; @@ -30,7 +31,10 @@ import org.junit.rules.TestRule; import org.junit.runner.RunWith; -@DseRequirement(min = "6.8.0", description = "DSE 6.8.0 required for Core graph support") +@BackendRequirement( + type = BackendType.DSE, + minInclusive = "6.8.0", + description = "DSE 6.8.0 required for Core graph support") @RunWith(DataProviderRunner.class) public class CoreGraphDataTypeScriptIT extends CoreGraphDataTypeITBase { diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/CoreGraphTraversalBatchIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/CoreGraphTraversalBatchIT.java index 51f50fb25a6..9ec2b892a50 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/CoreGraphTraversalBatchIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/CoreGraphTraversalBatchIT.java @@ -19,8 +19,9 @@ import com.datastax.dse.driver.api.core.graph.SampleGraphScripts; import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; import com.datastax.oss.driver.api.core.CqlSession; -import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; import org.apache.tinkerpop.gremlin.structure.util.empty.EmptyGraph; @@ -29,7 +30,10 @@ import org.junit.rules.RuleChain; import org.junit.rules.TestRule; -@DseRequirement(min = "6.8.0", description = "DSE 6.8.0 required for Core graph support") +@BackendRequirement( + type = BackendType.DSE, + minInclusive = "6.8.0", + description = "DSE 6.8.0 required for Core graph support") public class CoreGraphTraversalBatchIT extends GraphTraversalBatchITBase { private static final CustomCcmRule CCM_RULE = GraphTestSupport.GRAPH_CCM_RULE_BUILDER.build(); diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/CoreGraphTraversalIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/CoreGraphTraversalIT.java index 8bafe312916..a8ff90dc0ed 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/CoreGraphTraversalIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/CoreGraphTraversalIT.java @@ -20,8 +20,9 @@ import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; import com.datastax.dse.driver.api.core.graph.SocialTraversalSource; import com.datastax.oss.driver.api.core.CqlSession; -import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource; import org.apache.tinkerpop.gremlin.structure.util.empty.EmptyGraph; @@ -30,7 +31,10 @@ import org.junit.rules.RuleChain; import org.junit.rules.TestRule; -@DseRequirement(min = "6.8.0", description = "DSE 6.8.0 required for Core graph support") +@BackendRequirement( + type = BackendType.DSE, + minInclusive = "6.8.0", + description = "DSE 6.8.0 required for Core graph support") public class CoreGraphTraversalIT extends GraphTraversalITBase { private static final CustomCcmRule CCM_RULE = GraphTestSupport.CCM_BUILDER_WITH_GRAPH.build(); diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalMetaPropertiesIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalMetaPropertiesIT.java index ea3ee972c24..0c406c4534f 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalMetaPropertiesIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalMetaPropertiesIT.java @@ -27,8 +27,9 @@ import com.datastax.dse.driver.api.core.graph.GraphTestSupport; import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; import com.datastax.oss.driver.api.core.CqlSession; -import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import org.apache.tinkerpop.gremlin.structure.Vertex; import org.apache.tinkerpop.gremlin.structure.VertexProperty; @@ -39,7 +40,10 @@ // INFO: meta props are going away in NGDG -@DseRequirement(min = "5.0.3", description = "DSE 5.0.3 required for remote TinkerPop support") +@BackendRequirement( + type = BackendType.DSE, + minInclusive = "5.0.3", + description = "DSE 5.0.3 required for remote TinkerPop support") public class GraphTraversalMetaPropertiesIT { private static final CustomCcmRule CCM_RULE = GraphTestSupport.GRAPH_CCM_RULE_BUILDER.build(); diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalMultiPropertiesIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalMultiPropertiesIT.java index 78bd336dc0a..8dc6532766c 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalMultiPropertiesIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalMultiPropertiesIT.java @@ -25,8 +25,9 @@ import com.datastax.dse.driver.api.core.graph.GraphTestSupport; import com.datastax.dse.driver.api.core.graph.ScriptGraphStatement; import com.datastax.oss.driver.api.core.CqlSession; -import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import java.util.Iterator; import org.apache.tinkerpop.gremlin.structure.Vertex; @@ -37,7 +38,10 @@ import org.junit.rules.TestRule; // INFO: multi props are not supported in Core -@DseRequirement(min = "5.0.3", description = "DSE 5.0.3 required for remote TinkerPop support") +@BackendRequirement( + type = BackendType.DSE, + minInclusive = "5.0.3", + description = "DSE 5.0.3 required for remote TinkerPop support") public class GraphTraversalMultiPropertiesIT { private static final CustomCcmRule CCM_RULE = GraphTestSupport.GRAPH_CCM_RULE_BUILDER.build(); diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/insights/InsightsClientIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/insights/InsightsClientIT.java index 00389706fcb..c8da3dcd2ca 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/insights/InsightsClientIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/insights/InsightsClientIT.java @@ -18,8 +18,9 @@ import com.datastax.dse.driver.internal.core.insights.InsightsClient; import com.datastax.dse.driver.internal.core.insights.configuration.InsightsConfiguration; import com.datastax.oss.driver.api.core.CqlSession; -import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import io.netty.util.concurrent.DefaultEventExecutor; @@ -31,7 +32,10 @@ import org.junit.rules.RuleChain; import org.junit.rules.TestRule; -@DseRequirement(min = "6.7.0", description = "DSE 6.7.0 required for Insights support") +@BackendRequirement( + type = BackendType.DSE, + minInclusive = "6.7.0", + description = "DSE 6.7.0 required for Insights support") public class InsightsClientIT { private static final StackTraceElement[] EMPTY_STACK_TRACE = {}; diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/DseAggregateMetadataIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/DseAggregateMetadataIT.java index a7f1a4fd25a..ed96c74eac2 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/DseAggregateMetadataIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/DseAggregateMetadataIT.java @@ -22,8 +22,9 @@ import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.api.core.metadata.schema.AggregateMetadata; import com.datastax.oss.driver.api.core.type.DataTypes; -import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import java.util.Objects; import java.util.Optional; @@ -32,7 +33,10 @@ import org.junit.rules.RuleChain; import org.junit.rules.TestRule; -@DseRequirement(min = "5.0", description = "DSE 5.0+ required function/aggregate support") +@BackendRequirement( + type = BackendType.DSE, + minInclusive = "5.0", + description = "DSE 5.0+ required function/aggregate support") public class DseAggregateMetadataIT extends AbstractMetadataIT { private static final CcmRule CCM_RULE = CcmRule.getInstance(); diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/DseFunctionMetadataIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/DseFunctionMetadataIT.java index 66ed45ce9e0..ecfadcfacfb 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/DseFunctionMetadataIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/DseFunctionMetadataIT.java @@ -24,8 +24,9 @@ import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.api.core.metadata.schema.FunctionMetadata; import com.datastax.oss.driver.api.core.type.DataTypes; -import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import java.util.Objects; import java.util.Optional; @@ -34,7 +35,10 @@ import org.junit.rules.RuleChain; import org.junit.rules.TestRule; -@DseRequirement(min = "5.0", description = "DSE 5.0+ required function/aggregate support") +@BackendRequirement( + type = BackendType.DSE, + minInclusive = "5.0", + description = "DSE 5.0+ required function/aggregate support") public class DseFunctionMetadataIT extends AbstractMetadataIT { private static final CcmRule CCM_RULE = CcmRule.getInstance(); diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/KeyspaceGraphMetadataIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/KeyspaceGraphMetadataIT.java index 6feae130b8c..9abb7918183 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/KeyspaceGraphMetadataIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/KeyspaceGraphMetadataIT.java @@ -20,8 +20,9 @@ import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.metadata.Metadata; -import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.CqlSessionRuleBuilder; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.categories.ParallelizableTests; @@ -32,7 +33,7 @@ import org.junit.rules.TestRule; @Category(ParallelizableTests.class) -@DseRequirement(min = "6.8") +@BackendRequirement(type = BackendType.DSE, minInclusive = "6.8") public class KeyspaceGraphMetadataIT { private static final CcmRule CCM_RULE = CcmRule.getInstance(); diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/TableGraphMetadataCaseSensitiveIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/TableGraphMetadataCaseSensitiveIT.java index 77bfeb13896..442f33a216f 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/TableGraphMetadataCaseSensitiveIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/TableGraphMetadataCaseSensitiveIT.java @@ -20,8 +20,9 @@ import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.metadata.Metadata; -import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.CqlSessionRuleBuilder; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.categories.ParallelizableTests; @@ -38,7 +39,7 @@ * case-sensitive column names in its tables. See JAVA-2492 for more information. */ @Category(ParallelizableTests.class) -@DseRequirement(min = "6.8") +@BackendRequirement(type = BackendType.DSE, minInclusive = "6.8") public class TableGraphMetadataCaseSensitiveIT { private static final CcmRule CCM_RULE = CcmRule.getInstance(); diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/TableGraphMetadataIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/TableGraphMetadataIT.java index 933951dd7f8..7992e6ff6ba 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/TableGraphMetadataIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/TableGraphMetadataIT.java @@ -20,8 +20,9 @@ import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.metadata.Metadata; -import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.CqlSessionRuleBuilder; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.categories.ParallelizableTests; @@ -33,7 +34,7 @@ import org.junit.rules.TestRule; @Category(ParallelizableTests.class) -@DseRequirement(min = "6.8") +@BackendRequirement(type = BackendType.DSE, minInclusive = "6.8") public class TableGraphMetadataIT { private static final CcmRule CCM_RULE = CcmRule.getInstance(); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/ProtocolVersionInitialNegotiationIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/ProtocolVersionInitialNegotiationIT.java index 8ba8986b35b..e640f25e50e 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/ProtocolVersionInitialNegotiationIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/ProtocolVersionInitialNegotiationIT.java @@ -26,12 +26,11 @@ import com.datastax.oss.driver.api.core.UnsupportedProtocolVersionException; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; -import com.datastax.oss.driver.api.testinfra.CassandraRequirement; -import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.categories.ParallelizableTests; -import org.junit.Assume; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; @@ -42,68 +41,57 @@ public class ProtocolVersionInitialNegotiationIT { @Rule public CcmRule ccm = CcmRule.getInstance(); - @CassandraRequirement( - min = "2.1", - max = "2.2", + @BackendRequirement( + type = BackendType.CASSANDRA, + minInclusive = "2.1", + maxExclusive = "2.2", description = "Only C* in [2.1,2.2[ has V3 as its highest version") + @BackendRequirement( + type = BackendType.DSE, + maxExclusive = "5.0", + description = "Only DSE in [*,5.0[ has V3 as its highest version") @Test - public void should_downgrade_to_v3_oss() { - Assume.assumeFalse("This test is only for OSS C*", ccm.getDseVersion().isPresent()); + public void should_downgrade_to_v3() { try (CqlSession session = SessionUtils.newSession(ccm)) { assertThat(session.getContext().getProtocolVersion().getCode()).isEqualTo(3); session.execute("select * from system.local"); } } - @DseRequirement(max = "5.0", description = "Only DSE in [*,5.0[ has V3 as its highest version") - @Test - public void should_downgrade_to_v3_dse() { - try (CqlSession session = SessionUtils.newSession(ccm)) { - assertThat(session.getContext().getProtocolVersion().getCode()).isEqualTo(3); - session.execute("select * from system.local"); - } - } - - @CassandraRequirement( - min = "2.2", - max = "4.0-rc1", + @BackendRequirement( + type = BackendType.CASSANDRA, + minInclusive = "2.2", + maxExclusive = "4.0-rc1", description = "Only C* in [2.2,4.0-rc1[ has V4 as its highest version") + @BackendRequirement( + type = BackendType.DSE, + minInclusive = "5.0", + maxExclusive = "5.1", + description = "Only DSE in [5.0,5.1[ has V4 as its highest version") @Test - public void should_downgrade_to_v4_oss() { - Assume.assumeFalse("This test is only for OSS C*", ccm.getDseVersion().isPresent()); + public void should_downgrade_to_v4() { try (CqlSession session = SessionUtils.newSession(ccm)) { assertThat(session.getContext().getProtocolVersion().getCode()).isEqualTo(4); session.execute("select * from system.local"); } } - @CassandraRequirement( - min = "4.0-rc1", + @BackendRequirement( + type = BackendType.CASSANDRA, + minInclusive = "4.0-rc1", description = "Only C* in [4.0-rc1,*[ has V5 as its highest version") @Test public void should_downgrade_to_v5_oss() { - Assume.assumeFalse("This test is only for OSS C*", ccm.getDseVersion().isPresent()); try (CqlSession session = SessionUtils.newSession(ccm)) { assertThat(session.getContext().getProtocolVersion().getCode()).isEqualTo(5); session.execute("select * from system.local"); } } - @DseRequirement( - min = "5.0", - max = "5.1", - description = "Only DSE in [5.0,5.1[ has V4 as its highest version") - @Test - public void should_downgrade_to_v4_dse() { - try (CqlSession session = SessionUtils.newSession(ccm)) { - assertThat(session.getContext().getProtocolVersion().getCode()).isEqualTo(4); - session.execute("select * from system.local"); - } - } - - @DseRequirement( - min = "5.1", - max = "6.0", + @BackendRequirement( + type = BackendType.DSE, + minInclusive = "5.1", + maxExclusive = "6.0", description = "Only DSE in [5.1,6.0[ has DSE_V1 as its highest version") @Test public void should_downgrade_to_dse_v1() { @@ -113,29 +101,16 @@ public void should_downgrade_to_dse_v1() { } } - @CassandraRequirement(max = "2.2", description = "Only C* in [*,2.2[ has V4 unsupported") - @Test - public void should_fail_if_provided_v4_is_not_supported_oss() { - Assume.assumeFalse("This test is only for OSS C*", ccm.getDseVersion().isPresent()); - DriverConfigLoader loader = - SessionUtils.configLoaderBuilder() - .withString(DefaultDriverOption.PROTOCOL_VERSION, "V4") - .build(); - try (CqlSession ignored = SessionUtils.newSession(ccm, loader)) { - fail("Expected an AllNodesFailedException"); - } catch (AllNodesFailedException anfe) { - Throwable cause = anfe.getAllErrors().values().iterator().next().get(0); - assertThat(cause).isInstanceOf(UnsupportedProtocolVersionException.class); - UnsupportedProtocolVersionException unsupportedException = - (UnsupportedProtocolVersionException) cause; - assertThat(unsupportedException.getAttemptedVersions()) - .containsOnly(DefaultProtocolVersion.V4); - } - } - - @DseRequirement(max = "5.0", description = "Only DSE in [*,5.0[ has V4 unsupported") + @BackendRequirement( + type = BackendType.CASSANDRA, + maxExclusive = "2.2", + description = "Only C* in [*,2.2[ has V4 unsupported") + @BackendRequirement( + type = BackendType.DSE, + maxExclusive = "5.0", + description = "Only DSE in [*,5.0[ has V4 unsupported") @Test - public void should_fail_if_provided_v4_is_not_supported_dse() { + public void should_fail_if_provided_v4_is_not_supported() { DriverConfigLoader loader = SessionUtils.configLoaderBuilder() .withString(DefaultDriverOption.PROTOCOL_VERSION, "V4") @@ -152,34 +127,17 @@ public void should_fail_if_provided_v4_is_not_supported_dse() { } } - @CassandraRequirement( - min = "2.1", - max = "4.0-rc1", + @BackendRequirement( + type = BackendType.CASSANDRA, + minInclusive = "2.1", + maxExclusive = "4.0-rc1", description = "Only C* in [2.1,4.0-rc1[ has V5 unsupported or supported as beta") - @Test - public void should_fail_if_provided_v5_is_not_supported_oss() { - Assume.assumeFalse("This test is only for OSS C*", ccm.getDseVersion().isPresent()); - DriverConfigLoader loader = - SessionUtils.configLoaderBuilder() - .withString(DefaultDriverOption.PROTOCOL_VERSION, "V5") - .build(); - try (CqlSession ignored = SessionUtils.newSession(ccm, loader)) { - fail("Expected an AllNodesFailedException"); - } catch (AllNodesFailedException anfe) { - Throwable cause = anfe.getAllErrors().values().iterator().next().get(0); - assertThat(cause).isInstanceOf(UnsupportedProtocolVersionException.class); - UnsupportedProtocolVersionException unsupportedException = - (UnsupportedProtocolVersionException) cause; - assertThat(unsupportedException.getAttemptedVersions()) - .containsOnly(DefaultProtocolVersion.V5); - } - } - - @DseRequirement( - max = "7.0", + @BackendRequirement( + type = BackendType.DSE, + maxExclusive = "7.0", description = "Only DSE in [*,7.0[ has V5 unsupported or supported as beta") @Test - public void should_fail_if_provided_v5_is_not_supported_dse() { + public void should_fail_if_provided_v5_is_not_supported() { DriverConfigLoader loader = SessionUtils.configLoaderBuilder() .withString(DefaultDriverOption.PROTOCOL_VERSION, "V5") @@ -196,7 +154,10 @@ public void should_fail_if_provided_v5_is_not_supported_dse() { } } - @DseRequirement(max = "5.1", description = "Only DSE in [*,5.1[ has DSE_V1 unsupported") + @BackendRequirement( + type = BackendType.DSE, + maxExclusive = "5.1", + description = "Only DSE in [*,5.1[ has DSE_V1 unsupported") @Test public void should_fail_if_provided_dse_v1_is_not_supported() { DriverConfigLoader loader = @@ -215,7 +176,10 @@ public void should_fail_if_provided_dse_v1_is_not_supported() { } } - @DseRequirement(max = "6.0", description = "Only DSE in [*,6.0[ has DSE_V2 unsupported") + @BackendRequirement( + type = BackendType.DSE, + maxExclusive = "6.0", + description = "Only DSE in [*,6.0[ has DSE_V2 unsupported") @Test public void should_fail_if_provided_dse_v2_is_not_supported() { DriverConfigLoader loader = @@ -235,10 +199,12 @@ public void should_fail_if_provided_dse_v2_is_not_supported() { } /** Note that this test will need to be updated as new protocol versions are introduced. */ - @CassandraRequirement(min = "4.0", description = "Only C* in [4.0,*[ has V5 supported") + @BackendRequirement( + type = BackendType.CASSANDRA, + minInclusive = "4.0", + description = "Only C* in [4.0,*[ has V5 supported") @Test - public void should_not_downgrade_if_server_supports_latest_version_oss() { - Assume.assumeFalse("This test is only for OSS C*", ccm.getDseVersion().isPresent()); + public void should_not_downgrade_if_server_supports_latest_version() { try (CqlSession session = SessionUtils.newSession(ccm)) { assertThat(session.getContext().getProtocolVersion()).isEqualTo(ProtocolVersion.V5); session.execute("select * from system.local"); @@ -246,7 +212,10 @@ public void should_not_downgrade_if_server_supports_latest_version_oss() { } /** Note that this test will need to be updated as new protocol versions are introduced. */ - @DseRequirement(min = "6.0", description = "Only DSE in [6.0,*[ has DSE_V2 supported") + @BackendRequirement( + type = BackendType.DSE, + minInclusive = "6.0", + description = "Only DSE in [6.0,*[ has DSE_V2 supported") @Test public void should_not_downgrade_if_server_supports_latest_version_dse() { try (CqlSession session = SessionUtils.newSession(ccm)) { @@ -255,10 +224,16 @@ public void should_not_downgrade_if_server_supports_latest_version_dse() { } } - @CassandraRequirement(min = "2.1", description = "Only C* in [2.1,*[ has V3 supported") + @BackendRequirement( + type = BackendType.CASSANDRA, + minInclusive = "2.1", + description = "Only C* in [2.1,*[ has V3 supported") + @BackendRequirement( + type = BackendType.DSE, + minInclusive = "4.8", + description = "Only DSE in [4.8,*[ has V3 supported") @Test - public void should_use_explicitly_provided_v3_oss() { - Assume.assumeFalse("This test is only for OSS C*", ccm.getDseVersion().isPresent()); + public void should_use_explicitly_provided_v3() { DriverConfigLoader loader = SessionUtils.configLoaderBuilder() .withString(DefaultDriverOption.PROTOCOL_VERSION, "V3") @@ -269,23 +244,16 @@ public void should_use_explicitly_provided_v3_oss() { } } - @DseRequirement(min = "4.8", description = "Only DSE in [4.8,*[ has V3 supported") + @BackendRequirement( + type = BackendType.CASSANDRA, + minInclusive = "2.2", + description = "Only C* in [2.2,*[ has V4 supported") + @BackendRequirement( + type = BackendType.DSE, + minInclusive = "5.0", + description = "Only DSE in [5.0,*[ has V4 supported") @Test - public void should_use_explicitly_provided_v3_dse() { - DriverConfigLoader loader = - SessionUtils.configLoaderBuilder() - .withString(DefaultDriverOption.PROTOCOL_VERSION, "V3") - .build(); - try (CqlSession session = SessionUtils.newSession(ccm, loader)) { - assertThat(session.getContext().getProtocolVersion().getCode()).isEqualTo(3); - session.execute("select * from system.local"); - } - } - - @CassandraRequirement(min = "2.2", description = "Only C* in [2.2,*[ has V4 supported") - @Test - public void should_use_explicitly_provided_v4_oss() { - Assume.assumeFalse("This test is only for OSS C*", ccm.getDseVersion().isPresent()); + public void should_use_explicitly_provided_v4() { DriverConfigLoader loader = SessionUtils.configLoaderBuilder() .withString(DefaultDriverOption.PROTOCOL_VERSION, "V4") @@ -296,36 +264,16 @@ public void should_use_explicitly_provided_v4_oss() { } } - @DseRequirement(min = "5.0", description = "Only DSE in [5.0,*[ has V4 supported") - @Test - public void should_use_explicitly_provided_v4_dse() { - DriverConfigLoader loader = - SessionUtils.configLoaderBuilder() - .withString(DefaultDriverOption.PROTOCOL_VERSION, "V4") - .build(); - try (CqlSession session = SessionUtils.newSession(ccm, loader)) { - assertThat(session.getContext().getProtocolVersion().getCode()).isEqualTo(4); - session.execute("select * from system.local"); - } - } - - @CassandraRequirement(min = "4.0", description = "Only C* in [4.0,*[ has V5 supported") - @Test - public void should_use_explicitly_provided_v5_oss() { - Assume.assumeFalse("This test is only for OSS C*", ccm.getDseVersion().isPresent()); - DriverConfigLoader loader = - SessionUtils.configLoaderBuilder() - .withString(DefaultDriverOption.PROTOCOL_VERSION, "V5") - .build(); - try (CqlSession session = SessionUtils.newSession(ccm, loader)) { - assertThat(session.getContext().getProtocolVersion().getCode()).isEqualTo(5); - session.execute("select * from system.local"); - } - } - - @DseRequirement(min = "7.0", description = "Only DSE in [7.0,*[ has V5 supported") + @BackendRequirement( + type = BackendType.CASSANDRA, + minInclusive = "4.0", + description = "Only C* in [4.0,*[ has V5 supported") + @BackendRequirement( + type = BackendType.DSE, + minInclusive = "7.0", + description = "Only DSE in [7.0,*[ has V5 supported") @Test - public void should_use_explicitly_provided_v5_dse() { + public void should_use_explicitly_provided_v5() { DriverConfigLoader loader = SessionUtils.configLoaderBuilder() .withString(DefaultDriverOption.PROTOCOL_VERSION, "V5") @@ -336,7 +284,10 @@ public void should_use_explicitly_provided_v5_dse() { } } - @DseRequirement(min = "5.1", description = "Only DSE in [5.1,*[ has DSE_V1 supported") + @BackendRequirement( + type = BackendType.DSE, + minInclusive = "5.1", + description = "Only DSE in [5.1,*[ has DSE_V1 supported") @Test public void should_use_explicitly_provided_dse_v1() { DriverConfigLoader loader = @@ -349,7 +300,10 @@ public void should_use_explicitly_provided_dse_v1() { } } - @DseRequirement(min = "6.0", description = "Only DSE in [6.0,*[ has DSE_V2 supported") + @BackendRequirement( + type = BackendType.DSE, + minInclusive = "6.0", + description = "Only DSE in [6.0,*[ has DSE_V2 supported") @Test public void should_use_explicitly_provided_dse_v2() { DriverConfigLoader loader = diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/connection/NettyResourceLeakDetectionIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/connection/NettyResourceLeakDetectionIT.java index ada5ae9a61b..c3334360a23 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/connection/NettyResourceLeakDetectionIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/connection/NettyResourceLeakDetectionIT.java @@ -24,13 +24,15 @@ import ch.qos.logback.classic.spi.ILoggingEvent; import ch.qos.logback.core.Appender; import com.datastax.oss.driver.api.core.CqlSession; -import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.cql.Row; import com.datastax.oss.driver.api.core.cql.SimpleStatement; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirementRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.categories.IsolatedTests; @@ -42,10 +44,10 @@ import java.nio.ByteBuffer; import java.util.List; import org.junit.After; -import org.junit.Assume; import org.junit.Before; import org.junit.BeforeClass; import org.junit.ClassRule; +import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.RuleChain; @@ -70,6 +72,10 @@ public class NettyResourceLeakDetectionIT { @ClassRule public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); + // Separately use BackendRequirementRule with @Rule so backend requirements are evaluated for each + // test method. + @Rule public final BackendRequirementRule backendRequirementRule = new BackendRequirementRule(); + private static final ByteBuffer LARGE_PAYLOAD = Bytes.fromHexString("0x" + Strings.repeat("ab", Segment.MAX_PAYLOAD_LENGTH + 100)); @@ -118,12 +124,15 @@ public void should_not_leak_compressed_lz4() { } } + @BackendRequirement( + type = BackendType.DSE, + description = "Snappy is not supported in OSS C* 4.0+ with protocol v5") + @BackendRequirement( + type = BackendType.CASSANDRA, + maxExclusive = "4.0.0", + description = "Snappy is not supported in OSS C* 4.0+ with protocol v5") @Test public void should_not_leak_compressed_snappy() { - Assume.assumeTrue( - "Snappy is not supported in OSS C* 4.0+ with protocol v5", - CCM_RULE.getDseVersion().isPresent() - || CCM_RULE.getCassandraVersion().nextStable().compareTo(Version.V4_0_0) < 0); DriverConfigLoader loader = SessionUtils.configLoaderBuilder() .withString(DefaultDriverOption.PROTOCOL_COMPRESSION, "snappy") diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BatchStatementIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BatchStatementIT.java index cc960b6c27c..9a481aa1f85 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BatchStatementIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BatchStatementIT.java @@ -31,8 +31,9 @@ import com.datastax.oss.driver.api.core.cql.SimpleStatement; import com.datastax.oss.driver.api.core.cql.Statement; import com.datastax.oss.driver.api.core.servererrors.InvalidQueryException; -import com.datastax.oss.driver.api.testinfra.CassandraRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.categories.ParallelizableTests; @@ -121,7 +122,7 @@ public void should_execute_batch_of_bound_statements_with_variables() { } @Test - @CassandraRequirement(min = "2.2") + @BackendRequirement(type = BackendType.CASSANDRA, minInclusive = "2.2") public void should_execute_batch_of_bound_statements_with_unset_values() { // Build a batch of batchCount statements with bound statements, each with their own positional // variables. diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BoundStatementCcmIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BoundStatementCcmIT.java index 106b2823dc1..c8f3c2ea45e 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BoundStatementCcmIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BoundStatementCcmIT.java @@ -37,8 +37,9 @@ import com.datastax.oss.driver.api.core.cql.Statement; import com.datastax.oss.driver.api.core.metadata.token.Token; import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; -import com.datastax.oss.driver.api.testinfra.CassandraRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.categories.ParallelizableTests; @@ -353,7 +354,7 @@ public void should_propagate_attributes_when_preparing_a_simple_statement() { // Test for JAVA-2066 @Test - @CassandraRequirement(min = "2.2") + @BackendRequirement(type = BackendType.CASSANDRA, minInclusive = "2.2") public void should_compute_routing_key_when_indices_randomly_distributed() { try (CqlSession session = SessionUtils.newSession(ccmRule, sessionRule.keyspace())) { diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/ExecutionInfoWarningsIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/ExecutionInfoWarningsIT.java index e3648c93424..702ff66db3e 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/ExecutionInfoWarningsIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/ExecutionInfoWarningsIT.java @@ -30,8 +30,9 @@ import com.datastax.oss.driver.api.core.cql.ResultSet; import com.datastax.oss.driver.api.core.cql.SimpleStatement; import com.datastax.oss.driver.api.core.cql.Statement; -import com.datastax.oss.driver.api.testinfra.CassandraRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.internal.core.cql.CqlRequestHandler; @@ -116,7 +117,7 @@ public void cleanupLogger() { } @Test - @CassandraRequirement(min = "3.0") + @BackendRequirement(type = BackendType.CASSANDRA, minInclusive = "3.0") public void should_execute_query_and_log_server_side_warnings() { final String query = "SELECT count(*) FROM test;"; Statement st = SimpleStatement.builder(query).build(); @@ -140,7 +141,7 @@ public void should_execute_query_and_log_server_side_warnings() { } @Test - @CassandraRequirement(min = "3.0") + @BackendRequirement(type = BackendType.CASSANDRA, minInclusive = "3.0") public void should_execute_query_and_not_log_server_side_warnings() { final String query = "SELECT count(*) FROM test;"; Statement st = @@ -158,7 +159,7 @@ public void should_execute_query_and_not_log_server_side_warnings() { } @Test - @CassandraRequirement(min = "2.2") + @BackendRequirement(type = BackendType.CASSANDRA, minInclusive = "2.2") public void should_expose_warnings_on_execution_info() { // the default batch size warn threshold is 5 * 1024 bytes, but after CASSANDRA-10876 there must // be multiple mutations in a batch to trigger this warning so the batch includes 2 different diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/NowInSecondsIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/NowInSecondsIT.java index 2b570329d51..16c3f43c990 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/NowInSecondsIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/NowInSecondsIT.java @@ -24,9 +24,9 @@ import com.datastax.oss.driver.api.core.cql.ResultSet; import com.datastax.oss.driver.api.core.cql.SimpleStatement; import com.datastax.oss.driver.api.core.cql.Statement; -import com.datastax.oss.driver.api.testinfra.CassandraRequirement; -import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.categories.ParallelizableTests; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; @@ -39,10 +39,11 @@ import org.junit.rules.TestRule; @Category(ParallelizableTests.class) -@CassandraRequirement(min = "4.0") -@DseRequirement( - // Use next version -- not sure if it will be in by then, but as a reminder to check - min = "7.0", +@BackendRequirement(type = BackendType.CASSANDRA, minInclusive = "4.0") +// Use next version -- not sure if it will be in by then, but as a reminder to check +@BackendRequirement( + type = BackendType.DSE, + minInclusive = "7.0", description = "Feature not available in DSE yet") public class NowInSecondsIT { diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PerRequestKeyspaceIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PerRequestKeyspaceIT.java index de6be0afe61..501ed5f5718 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PerRequestKeyspaceIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PerRequestKeyspaceIT.java @@ -28,8 +28,9 @@ import com.datastax.oss.driver.api.core.cql.Row; import com.datastax.oss.driver.api.core.cql.SimpleStatement; import com.datastax.oss.driver.api.core.cql.Statement; -import com.datastax.oss.driver.api.testinfra.CassandraRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.categories.ParallelizableTests; @@ -74,14 +75,14 @@ public void setupSchema() { } @Test - @CassandraRequirement(min = "2.2") + @BackendRequirement(type = BackendType.CASSANDRA, minInclusive = "2.2") public void should_reject_simple_statement_with_keyspace_in_protocol_v4() { should_reject_statement_with_keyspace_in_protocol_v4( SimpleStatement.newInstance("SELECT * FROM foo").setKeyspace(sessionRule.keyspace())); } @Test - @CassandraRequirement(min = "2.2") + @BackendRequirement(type = BackendType.CASSANDRA, minInclusive = "2.2") public void should_reject_batch_statement_with_explicit_keyspace_in_protocol_v4() { SimpleStatement statementWithoutKeyspace = SimpleStatement.newInstance( @@ -94,7 +95,7 @@ public void should_reject_batch_statement_with_explicit_keyspace_in_protocol_v4( } @Test - @CassandraRequirement(min = "2.2") + @BackendRequirement(type = BackendType.CASSANDRA, minInclusive = "2.2") public void should_reject_batch_statement_with_inferred_keyspace_in_protocol_v4() { SimpleStatement statementWithKeyspace = SimpleStatement.newInstance( @@ -120,7 +121,7 @@ private void should_reject_statement_with_keyspace_in_protocol_v4(Statement stat } @Test - @CassandraRequirement(min = "4.0") + @BackendRequirement(type = BackendType.CASSANDRA, minInclusive = "4.0") public void should_execute_simple_statement_with_keyspace() { CqlSession session = sessionRule.session(); session.execute( @@ -138,7 +139,7 @@ public void should_execute_simple_statement_with_keyspace() { } @Test - @CassandraRequirement(min = "4.0") + @BackendRequirement(type = BackendType.CASSANDRA, minInclusive = "4.0") public void should_execute_batch_with_explicit_keyspace() { CqlSession session = sessionRule.session(); session.execute( @@ -162,7 +163,7 @@ public void should_execute_batch_with_explicit_keyspace() { } @Test - @CassandraRequirement(min = "4.0") + @BackendRequirement(type = BackendType.CASSANDRA, minInclusive = "4.0") public void should_execute_batch_with_inferred_keyspace() { CqlSession session = sessionRule.session(); session.execute( @@ -194,7 +195,7 @@ public void should_execute_batch_with_inferred_keyspace() { } @Test - @CassandraRequirement(min = "4.0") + @BackendRequirement(type = BackendType.CASSANDRA, minInclusive = "4.0") public void should_prepare_statement_with_keyspace() { CqlSession session = sessionRule.session(); PreparedStatement prepared = @@ -214,7 +215,7 @@ public void should_prepare_statement_with_keyspace() { } @Test - @CassandraRequirement(min = "4.0") + @BackendRequirement(type = BackendType.CASSANDRA, minInclusive = "4.0") public void should_reprepare_statement_with_keyspace_on_the_fly() { // Create a separate session because we don't want it to have a default keyspace try (CqlSession session = SessionUtils.newSession(ccmRule)) { diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PreparedStatementIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PreparedStatementIT.java index 490158980fb..964bc7fe34d 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PreparedStatementIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PreparedStatementIT.java @@ -34,7 +34,6 @@ import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; import com.datastax.oss.driver.api.core.servererrors.InvalidQueryException; import com.datastax.oss.driver.api.core.type.DataTypes; -import com.datastax.oss.driver.api.testinfra.CassandraRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; import com.datastax.oss.driver.api.testinfra.requirement.BackendType; @@ -148,7 +147,7 @@ public void should_have_non_empty_variable_definitions_for_select_query_with_bou } @Test - @CassandraRequirement(min = "4.0") + @BackendRequirement(type = BackendType.CASSANDRA, minInclusive = "4.0") public void should_update_metadata_when_schema_changed_across_executions() { // Given CqlSession session = sessionRule.session(); @@ -177,7 +176,7 @@ public void should_update_metadata_when_schema_changed_across_executions() { } @Test - @CassandraRequirement(min = "4.0") + @BackendRequirement(type = BackendType.CASSANDRA, minInclusive = "4.0") public void should_update_metadata_when_schema_changed_across_pages() { // Given CqlSession session = sessionRule.session(); @@ -222,7 +221,7 @@ public void should_update_metadata_when_schema_changed_across_pages() { } @Test - @CassandraRequirement(min = "4.0") + @BackendRequirement(type = BackendType.CASSANDRA, minInclusive = "4.0") public void should_update_metadata_when_schema_changed_across_sessions() { // Given CqlSession session1 = sessionRule.session(); @@ -269,7 +268,7 @@ public void should_update_metadata_when_schema_changed_across_sessions() { } @Test - @CassandraRequirement(min = "4.0") + @BackendRequirement(type = BackendType.CASSANDRA, minInclusive = "4.0") public void should_fail_to_reprepare_if_query_becomes_invalid() { // Given CqlSession session = sessionRule.session(); @@ -288,13 +287,13 @@ public void should_fail_to_reprepare_if_query_becomes_invalid() { } @Test - @CassandraRequirement(min = "4.0") + @BackendRequirement(type = BackendType.CASSANDRA, minInclusive = "4.0") public void should_not_store_metadata_for_conditional_updates() { should_not_store_metadata_for_conditional_updates(sessionRule.session()); } @Test - @CassandraRequirement(min = "2.2") + @BackendRequirement(type = BackendType.CASSANDRA, minInclusive = "2.2") public void should_not_store_metadata_for_conditional_updates_in_legacy_protocol() { DriverConfigLoader loader = SessionUtils.configLoaderBuilder() diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/ByteOrderedTokenIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/ByteOrderedTokenIT.java index 28795b6c4c4..c3eeae19a35 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/ByteOrderedTokenIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/ByteOrderedTokenIT.java @@ -17,8 +17,9 @@ import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; -import com.datastax.oss.driver.api.testinfra.CassandraRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.internal.core.metadata.token.ByteOrderedToken; @@ -28,8 +29,9 @@ import org.junit.rules.RuleChain; import org.junit.rules.TestRule; -@CassandraRequirement( - max = "4.0-beta4", +@BackendRequirement( + type = BackendType.CASSANDRA, + maxExclusive = "4.0-beta4", description = "Token allocation is not compatible with this partitioner, " + "but is enabled by default in C* 4.0 (see CASSANDRA-7032 and CASSANDRA-13701)") diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/ByteOrderedTokenVnodesIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/ByteOrderedTokenVnodesIT.java index 1009013c734..561a59a0847 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/ByteOrderedTokenVnodesIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/ByteOrderedTokenVnodesIT.java @@ -17,8 +17,9 @@ import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; -import com.datastax.oss.driver.api.testinfra.CassandraRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.internal.core.metadata.token.ByteOrderedToken; @@ -28,8 +29,9 @@ import org.junit.rules.RuleChain; import org.junit.rules.TestRule; -@CassandraRequirement( - max = "4.0-beta4", +@BackendRequirement( + type = BackendType.CASSANDRA, + maxExclusive = "4.0-beta4", description = "Token allocation is not compatible with this partitioner, " + "but is enabled by default in C* 4.0 (see CASSANDRA-7032 and CASSANDRA-13701)") diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/Murmur3TokenVnodesIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/Murmur3TokenVnodesIT.java index 3dcf8f88b17..6f15a668e9e 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/Murmur3TokenVnodesIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/Murmur3TokenVnodesIT.java @@ -17,8 +17,9 @@ import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; -import com.datastax.oss.driver.api.testinfra.CassandraRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.internal.core.metadata.token.Murmur3Token; @@ -28,8 +29,9 @@ import org.junit.rules.RuleChain; import org.junit.rules.TestRule; -@CassandraRequirement( - max = "4.0-beta4", +@BackendRequirement( + type = BackendType.CASSANDRA, + maxExclusive = "4.0-beta4", // TODO Re-enable when CASSANDRA-16364 is fixed description = "TODO Re-enable when CASSANDRA-16364 is fixed") public class Murmur3TokenVnodesIT extends TokenITBase { diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/NodeMetadataIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/NodeMetadataIT.java index 32e8c3929a5..ba35cf824b3 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/NodeMetadataIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/NodeMetadataIT.java @@ -23,9 +23,10 @@ import com.datastax.oss.driver.api.core.loadbalancing.NodeDistance; import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.metadata.NodeState; -import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CcmBridge; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.categories.ParallelizableTests; import com.datastax.oss.driver.internal.core.context.EventBus; @@ -90,7 +91,7 @@ public void should_expose_node_metadata() { } @Test - @DseRequirement(min = "5.1") + @BackendRequirement(type = BackendType.DSE, minInclusive = "5.1") public void should_expose_dse_node_properties() { try (CqlSession session = SessionUtils.newSession(ccmRule)) { diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/RandomTokenVnodesIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/RandomTokenVnodesIT.java index 1545bd46104..2f56e118b73 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/RandomTokenVnodesIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/RandomTokenVnodesIT.java @@ -17,8 +17,9 @@ import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; -import com.datastax.oss.driver.api.testinfra.CassandraRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.internal.core.metadata.token.RandomToken; @@ -28,8 +29,9 @@ import org.junit.rules.RuleChain; import org.junit.rules.TestRule; -@CassandraRequirement( - max = "4.0-beta4", +@BackendRequirement( + type = BackendType.CASSANDRA, + maxExclusive = "4.0-beta4", // TODO Re-enable when CASSANDRA-16364 is fixed description = "TODO Re-enable when CASSANDRA-16364 is fixed") public class RandomTokenVnodesIT extends TokenITBase { diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaIT.java index 1e2803c7ef4..266c24f2c45 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaIT.java @@ -31,8 +31,9 @@ import com.datastax.oss.driver.api.core.metadata.schema.KeyspaceMetadata; import com.datastax.oss.driver.api.core.metadata.schema.TableMetadata; import com.datastax.oss.driver.api.core.type.DataTypes; -import com.datastax.oss.driver.api.testinfra.CassandraRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.categories.ParallelizableTests; @@ -185,7 +186,10 @@ public void should_refresh_schema_manually() { } } - @CassandraRequirement(min = "4.0", description = "virtual tables introduced in 4.0") + @BackendRequirement( + type = BackendType.CASSANDRA, + minInclusive = "4.0", + description = "virtual tables introduced in 4.0") @Test public void should_get_virtual_metadata() { skipIfDse60(); @@ -269,7 +273,10 @@ public void should_get_virtual_metadata() { } } - @CassandraRequirement(min = "4.0", description = "virtual tables introduced in 4.0") + @BackendRequirement( + type = BackendType.CASSANDRA, + minInclusive = "4.0", + description = "virtual tables introduced in 4.0") @Test public void should_exclude_virtual_keyspaces_from_token_map() { skipIfDse60(); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/internal/core/util/concurrent/DriverBlockHoundIntegrationCcmIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/internal/core/util/concurrent/DriverBlockHoundIntegrationCcmIT.java index e771b28116a..949e62eae28 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/internal/core/util/concurrent/DriverBlockHoundIntegrationCcmIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/internal/core/util/concurrent/DriverBlockHoundIntegrationCcmIT.java @@ -26,8 +26,9 @@ import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.cql.Row; import com.datastax.oss.driver.api.core.cql.SimpleStatement; -import com.datastax.oss.driver.api.testinfra.DseRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.categories.IsolatedTests; import java.time.Duration; @@ -51,8 +52,9 @@ * {@link DriverBlockHoundIntegration} are being applied, and especially when continuous paging is * used. */ -@DseRequirement( - min = "5.1.0", +@BackendRequirement( + type = BackendType.DSE, + minInclusive = "5.1.0", description = "Continuous paging is only available from 5.1.0 onwards") @Category(IsolatedTests.class) public class DriverBlockHoundIntegrationCcmIT extends ContinuousPagingITBase { diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/DefaultNullSavingStrategyIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/DefaultNullSavingStrategyIT.java index cbcf0cc4f5e..be17e70963d 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/DefaultNullSavingStrategyIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/DefaultNullSavingStrategyIT.java @@ -35,8 +35,9 @@ import com.datastax.oss.driver.api.mapper.annotations.SetEntity; import com.datastax.oss.driver.api.mapper.annotations.Update; import com.datastax.oss.driver.api.mapper.entity.saving.NullSavingStrategy; -import com.datastax.oss.driver.api.testinfra.CassandraRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.categories.ParallelizableTests; import java.util.function.BiConsumer; @@ -53,7 +54,10 @@ * DefaultNullSavingStrategy} annotation. */ @Category(ParallelizableTests.class) -@CassandraRequirement(min = "2.2", description = "support for unset values") +@BackendRequirement( + type = BackendType.CASSANDRA, + minInclusive = "2.2", + description = "support for unset values") public class DefaultNullSavingStrategyIT { private static final CcmRule CCM_RULE = CcmRule.getInstance(); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/DeleteIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/DeleteIT.java index 18ff14cee43..4ddccc48b52 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/DeleteIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/DeleteIT.java @@ -36,8 +36,9 @@ import com.datastax.oss.driver.api.mapper.annotations.Mapper; import com.datastax.oss.driver.api.mapper.annotations.Select; import com.datastax.oss.driver.api.mapper.entity.saving.NullSavingStrategy; -import com.datastax.oss.driver.api.testinfra.CassandraRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.categories.ParallelizableTests; import com.datastax.oss.driver.internal.core.util.concurrent.CompletableFutures; @@ -53,8 +54,9 @@ import org.junit.rules.TestRule; @Category(ParallelizableTests.class) -@CassandraRequirement( - min = "3.0", +@BackendRequirement( + type = BackendType.CASSANDRA, + minInclusive = "3.0", description = ">= in WHERE clause not supported in legacy versions") public class DeleteIT extends InventoryITBase { diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/IncrementWithNullsIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/IncrementWithNullsIT.java index 642bb9c17b9..3d0cef6afce 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/IncrementWithNullsIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/IncrementWithNullsIT.java @@ -28,8 +28,9 @@ import com.datastax.oss.driver.api.mapper.annotations.Mapper; import com.datastax.oss.driver.api.mapper.annotations.Select; import com.datastax.oss.driver.api.mapper.entity.saving.NullSavingStrategy; -import com.datastax.oss.driver.api.testinfra.CassandraRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.categories.ParallelizableTests; import com.datastax.oss.driver.mapper.IncrementIT.ProductRating; @@ -42,7 +43,7 @@ import org.junit.rules.TestRule; @Category(ParallelizableTests.class) -@CassandraRequirement(min = "2.2") +@BackendRequirement(type = BackendType.CASSANDRA, minInclusive = "2.2") public class IncrementWithNullsIT { private static final CcmRule CCM_RULE = CcmRule.getInstance(); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/NestedUdtIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/NestedUdtIT.java index b7b8742e53c..73f89c19a07 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/NestedUdtIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/NestedUdtIT.java @@ -38,8 +38,9 @@ import com.datastax.oss.driver.api.mapper.annotations.Select; import com.datastax.oss.driver.api.mapper.annotations.SetEntity; import com.datastax.oss.driver.api.mapper.entity.saving.NullSavingStrategy; -import com.datastax.oss.driver.api.testinfra.CassandraRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.categories.ParallelizableTests; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; @@ -61,7 +62,10 @@ /** Tests that entities with UDTs nested at various levels are properly mapped. */ @Category(ParallelizableTests.class) -@CassandraRequirement(min = "2.2", description = "support for unset values") +@BackendRequirement( + type = BackendType.CASSANDRA, + minInclusive = "2.2", + description = "support for unset values") public class NestedUdtIT { private static final CcmRule CCM_RULE = CcmRule.getInstance(); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/PrimitivesIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/PrimitivesIT.java index 9cc4004690d..c97e6c084c8 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/PrimitivesIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/PrimitivesIT.java @@ -29,8 +29,9 @@ import com.datastax.oss.driver.api.mapper.annotations.PartitionKey; import com.datastax.oss.driver.api.mapper.annotations.Select; import com.datastax.oss.driver.api.mapper.entity.saving.NullSavingStrategy; -import com.datastax.oss.driver.api.testinfra.CassandraRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.categories.ParallelizableTests; import java.util.Objects; @@ -42,7 +43,10 @@ import org.junit.rules.TestRule; @Category(ParallelizableTests.class) -@CassandraRequirement(min = "2.2", description = "smallint is a reserved keyword in 2.1") +@BackendRequirement( + type = BackendType.CASSANDRA, + minInclusive = "2.2", + description = "smallint is a reserved keyword in 2.1") public class PrimitivesIT { private static final CcmRule CCM_RULE = CcmRule.getInstance(); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SchemaValidationIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SchemaValidationIT.java index 9abaa714996..6e9d75c4325 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SchemaValidationIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SchemaValidationIT.java @@ -43,8 +43,9 @@ import com.datastax.oss.driver.api.mapper.annotations.Select; import com.datastax.oss.driver.api.mapper.annotations.Update; import com.datastax.oss.driver.api.mapper.entity.EntityHelper; -import com.datastax.oss.driver.api.testinfra.CassandraRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.categories.ParallelizableTests; import com.datastax.oss.driver.internal.core.util.LoggerTest; @@ -61,7 +62,10 @@ import org.junit.rules.TestRule; @Category(ParallelizableTests.class) -@CassandraRequirement(min = "3.4", description = "Creates a SASI index") +@BackendRequirement( + type = BackendType.CASSANDRA, + minInclusive = "3.4", + description = "Creates a SASI index") public class SchemaValidationIT extends InventoryITBase { private static CcmRule ccm = CcmRule.getInstance(); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SelectCustomWhereClauseIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SelectCustomWhereClauseIT.java index 3afcc03e451..498c9894346 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SelectCustomWhereClauseIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SelectCustomWhereClauseIT.java @@ -32,8 +32,9 @@ import com.datastax.oss.driver.api.mapper.annotations.Insert; import com.datastax.oss.driver.api.mapper.annotations.Mapper; import com.datastax.oss.driver.api.mapper.annotations.Select; -import com.datastax.oss.driver.api.testinfra.CassandraRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.categories.ParallelizableTests; import com.datastax.oss.driver.internal.core.util.concurrent.CompletableFutures; @@ -47,7 +48,10 @@ import org.junit.rules.TestRule; @Category(ParallelizableTests.class) -@CassandraRequirement(min = "3.4", description = "Creates a SASI index") +@BackendRequirement( + type = BackendType.CASSANDRA, + minInclusive = "3.4", + description = "Creates a SASI index") public class SelectCustomWhereClauseIT extends InventoryITBase { private static final CcmRule CCM_RULE = CcmRule.getInstance(); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SelectOtherClausesIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SelectOtherClausesIT.java index 5b479a13f55..96b64f69a80 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SelectOtherClausesIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SelectOtherClausesIT.java @@ -33,8 +33,9 @@ import com.datastax.oss.driver.api.mapper.annotations.Mapper; import com.datastax.oss.driver.api.mapper.annotations.PartitionKey; import com.datastax.oss.driver.api.mapper.annotations.Select; -import com.datastax.oss.driver.api.testinfra.CassandraRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.categories.ParallelizableTests; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; @@ -49,7 +50,10 @@ import org.junit.rules.TestRule; @Category(ParallelizableTests.class) -@CassandraRequirement(min = "3.6", description = "Uses PER PARTITION LIMIT") +@BackendRequirement( + type = BackendType.CASSANDRA, + minInclusive = "3.6", + description = "Uses PER PARTITION LIMIT") public class SelectOtherClausesIT { private static final CcmRule CCM_RULE = CcmRule.getInstance(); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateCustomIfClauseIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateCustomIfClauseIT.java index 53773a20ee1..d930e9135ce 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateCustomIfClauseIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateCustomIfClauseIT.java @@ -30,8 +30,9 @@ import com.datastax.oss.driver.api.mapper.annotations.Mapper; import com.datastax.oss.driver.api.mapper.annotations.Select; import com.datastax.oss.driver.api.mapper.annotations.Update; -import com.datastax.oss.driver.api.testinfra.CassandraRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.categories.ParallelizableTests; import com.datastax.oss.driver.internal.core.util.concurrent.CompletableFutures; @@ -46,7 +47,10 @@ import org.junit.rules.TestRule; @Category(ParallelizableTests.class) -@CassandraRequirement(min = "3.11.0", description = "UDT fields in IF clause") +@BackendRequirement( + type = BackendType.CASSANDRA, + minInclusive = "3.11.0", + description = "UDT fields in IF clause") public class UpdateCustomIfClauseIT extends InventoryITBase { private static final CcmRule CCM_RULE = CcmRule.getInstance(); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateReactiveIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateReactiveIT.java index 6eb2f83793c..98102b30ebe 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateReactiveIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateReactiveIT.java @@ -31,8 +31,9 @@ import com.datastax.oss.driver.api.mapper.annotations.Select; import com.datastax.oss.driver.api.mapper.annotations.Update; import com.datastax.oss.driver.api.mapper.entity.saving.NullSavingStrategy; -import com.datastax.oss.driver.api.testinfra.CassandraRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.categories.ParallelizableTests; import io.reactivex.Flowable; @@ -47,8 +48,9 @@ import org.junit.rules.TestRule; @Category(ParallelizableTests.class) -@CassandraRequirement( - min = "3.6", +@BackendRequirement( + type = BackendType.CASSANDRA, + minInclusive = "3.6", description = "Uses UDT fields in IF conditions (CASSANDRA-7423)") public class UpdateReactiveIT extends InventoryITBase { diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/querybuilder/JsonInsertIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/querybuilder/JsonInsertIT.java index 9b6ed735d40..09937e61a64 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/querybuilder/JsonInsertIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/querybuilder/JsonInsertIT.java @@ -30,8 +30,9 @@ import com.datastax.oss.driver.api.core.type.codec.CodecNotFoundException; import com.datastax.oss.driver.api.core.type.codec.ExtraTypeCodecs; import com.datastax.oss.driver.api.core.type.codec.TypeCodec; -import com.datastax.oss.driver.api.testinfra.CassandraRequirement; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.categories.ParallelizableTests; @@ -49,7 +50,10 @@ import org.junit.rules.TestRule; @Category(ParallelizableTests.class) -@CassandraRequirement(min = "2.2", description = "JSON support in Cassandra was added in 2.2") +@BackendRequirement( + type = BackendType.CASSANDRA, + minInclusive = "2.2", + description = "JSON support in Cassandra was added in 2.2") public class JsonInsertIT { private static final CcmRule CCM_RULE = CcmRule.getInstance(); diff --git a/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/OsgiGeoTypesIT.java b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/OsgiGeoTypesIT.java index ef18fade1fe..c80e8449a39 100644 --- a/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/OsgiGeoTypesIT.java +++ b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/OsgiGeoTypesIT.java @@ -19,7 +19,8 @@ import com.datastax.oss.driver.api.osgi.service.MailboxService; import com.datastax.oss.driver.api.osgi.service.geo.GeoMailboxService; -import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.internal.osgi.checks.DefaultServiceChecks; import com.datastax.oss.driver.internal.osgi.checks.GeoServiceChecks; import com.datastax.oss.driver.internal.osgi.support.BundleOptions; @@ -35,7 +36,10 @@ @RunWith(CcmPaxExam.class) @ExamReactorStrategy(CcmExamReactorFactory.class) -@DseRequirement(min = "5.0", description = "Requires geo types") +@BackendRequirement( + type = BackendType.DSE, + minInclusive = "5.0", + description = "Requires geo types") public class OsgiGeoTypesIT { @Inject MailboxService service; diff --git a/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/OsgiGraphIT.java b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/OsgiGraphIT.java index a34c7946b8b..0b7cef9530f 100644 --- a/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/OsgiGraphIT.java +++ b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/OsgiGraphIT.java @@ -19,7 +19,8 @@ import com.datastax.oss.driver.api.osgi.service.MailboxService; import com.datastax.oss.driver.api.osgi.service.graph.GraphMailboxService; -import com.datastax.oss.driver.api.testinfra.DseRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.internal.osgi.checks.DefaultServiceChecks; import com.datastax.oss.driver.internal.osgi.checks.GraphServiceChecks; import com.datastax.oss.driver.internal.osgi.support.BundleOptions; @@ -35,7 +36,10 @@ @RunWith(CcmPaxExam.class) @ExamReactorStrategy(CcmExamReactorFactory.class) -@DseRequirement(min = "6.8", description = "Requires Core Graph") +@BackendRequirement( + type = BackendType.DSE, + minInclusive = "6.8", + description = "Requires Core Graph") public class OsgiGraphIT { @Inject MailboxService service; diff --git a/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/OsgiSnappyIT.java b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/OsgiSnappyIT.java index 9794cf27435..e3722530b82 100644 --- a/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/OsgiSnappyIT.java +++ b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/OsgiSnappyIT.java @@ -16,7 +16,8 @@ package com.datastax.oss.driver.internal.osgi; import com.datastax.oss.driver.api.osgi.service.MailboxService; -import com.datastax.oss.driver.api.testinfra.CassandraRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.internal.osgi.checks.DefaultServiceChecks; import com.datastax.oss.driver.internal.osgi.support.BundleOptions; import com.datastax.oss.driver.internal.osgi.support.CcmExamReactorFactory; @@ -31,7 +32,7 @@ @RunWith(CcmPaxExam.class) @ExamReactorStrategy(CcmExamReactorFactory.class) -@CassandraRequirement(max = "3.99") +@BackendRequirement(type = BackendType.CASSANDRA, maxExclusive = "4.0.0") public class OsgiSnappyIT { @Inject MailboxService service; diff --git a/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/CcmPaxExam.java b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/CcmPaxExam.java index 4a1700639b4..8e33b0c9b11 100644 --- a/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/CcmPaxExam.java +++ b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/CcmPaxExam.java @@ -15,12 +15,7 @@ */ package com.datastax.oss.driver.internal.osgi.support; -import static com.datastax.oss.driver.internal.osgi.support.CcmStagedReactor.CCM_BRIDGE; - -import com.datastax.oss.driver.api.core.Version; -import com.datastax.oss.driver.api.testinfra.requirement.BackendType; -import com.datastax.oss.driver.api.testinfra.requirement.VersionRequirement; -import java.util.Collection; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirementRule; import org.junit.AssumptionViolatedException; import org.junit.runner.Description; import org.junit.runner.notification.Failure; @@ -37,19 +32,12 @@ public CcmPaxExam(Class klass) throws InitializationError { @Override public void run(RunNotifier notifier) { Description description = getDescription(); - BackendType backend = - CCM_BRIDGE.getDseVersion().isPresent() ? BackendType.DSE : BackendType.CASSANDRA; - Version version = CCM_BRIDGE.getDseVersion().orElseGet(CCM_BRIDGE::getCassandraVersion); - - Collection requirements = - VersionRequirement.fromAnnotations(getDescription()); - if (VersionRequirement.meetsAny(requirements, backend, version)) { + if (BackendRequirementRule.meetsDescriptionRequirements(description)) { super.run(notifier); } else { // requirements not met, throw reasoning assumption to skip test AssumptionViolatedException e = - new AssumptionViolatedException( - VersionRequirement.buildReasonString(requirements, backend, version)); + new AssumptionViolatedException(BackendRequirementRule.buildReasonString(description)); notifier.fireTestAssumptionFailed(new Failure(description, e)); } } diff --git a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/CassandraRequirement.java b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/CassandraRequirement.java index e28757e420f..df50d42c825 100644 --- a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/CassandraRequirement.java +++ b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/CassandraRequirement.java @@ -21,7 +21,11 @@ /** * Annotation for a Class or Method that defines a Cassandra Version requirement. If the cassandra * version in use does not meet the version requirement, the test is skipped. + * + * @deprecated Replaced by {@link + * com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement} */ +@Deprecated @Retention(RetentionPolicy.RUNTIME) public @interface CassandraRequirement { diff --git a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/DseRequirement.java b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/DseRequirement.java index c80c6914282..f9c0ccd293a 100644 --- a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/DseRequirement.java +++ b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/DseRequirement.java @@ -21,7 +21,11 @@ /** * Annotation for a Class or Method that defines a DSE Version requirement. If the DSE version in * use does not meet the version requirement or DSE isn't used at all, the test is skipped. + * + * @deprecated Replaced by {@link + * com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement} */ +@Deprecated @Retention(RetentionPolicy.RUNTIME) public @interface DseRequirement { diff --git a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/BaseCcmRule.java b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/BaseCcmRule.java index d4830dd249e..29c8da9c7c9 100644 --- a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/BaseCcmRule.java +++ b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/BaseCcmRule.java @@ -19,9 +19,7 @@ import com.datastax.oss.driver.api.core.ProtocolVersion; import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.api.testinfra.CassandraResourceRule; -import com.datastax.oss.driver.api.testinfra.requirement.BackendType; -import com.datastax.oss.driver.api.testinfra.requirement.VersionRequirement; -import java.util.Collection; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirementRule; import java.util.Optional; import org.junit.AssumptionViolatedException; import org.junit.runner.Description; @@ -58,13 +56,7 @@ protected void after() { @Override public Statement apply(Statement base, Description description) { - BackendType backend = - ccmBridge.getDseVersion().isPresent() ? BackendType.DSE : BackendType.CASSANDRA; - Version version = ccmBridge.getDseVersion().orElseGet(ccmBridge::getCassandraVersion); - - Collection requirements = VersionRequirement.fromAnnotations(description); - - if (VersionRequirement.meetsAny(requirements, backend, version)) { + if (BackendRequirementRule.meetsDescriptionRequirements(description)) { return super.apply(base, description); } else { // requirements not met, throw reasoning assumption to skip test @@ -72,7 +64,7 @@ public Statement apply(Statement base, Description description) { @Override public void evaluate() { throw new AssumptionViolatedException( - VersionRequirement.buildReasonString(requirements, backend, version)); + BackendRequirementRule.buildReasonString(description)); } }; } diff --git a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/requirement/BackendRequirementRule.java b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/requirement/BackendRequirementRule.java new file mode 100644 index 00000000000..2f48331a0ff --- /dev/null +++ b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/requirement/BackendRequirementRule.java @@ -0,0 +1,59 @@ +/* + * Copyright DataStax, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.testinfra.requirement; + +import com.datastax.oss.driver.api.core.Version; +import com.datastax.oss.driver.api.testinfra.ccm.CcmBridge; +import org.junit.AssumptionViolatedException; +import org.junit.rules.ExternalResource; +import org.junit.runner.Description; +import org.junit.runners.model.Statement; + +public class BackendRequirementRule extends ExternalResource { + @Override + public Statement apply(Statement base, Description description) { + if (meetsDescriptionRequirements(description)) { + return super.apply(base, description); + } else { + // requirements not met, throw reasoning assumption to skip test + return new Statement() { + @Override + public void evaluate() { + throw new AssumptionViolatedException(buildReasonString(description)); + } + }; + } + } + + protected static BackendType getBackendType() { + return CcmBridge.DSE_ENABLEMENT ? BackendType.DSE : BackendType.CASSANDRA; + } + + protected static Version getVersion() { + return CcmBridge.VERSION; + } + + public static boolean meetsDescriptionRequirements(Description description) { + return VersionRequirement.meetsAny( + VersionRequirement.fromAnnotations(description), getBackendType(), getVersion()); + } + + /* Note, duplicating annotation processing from #meetsDescriptionRequirements */ + public static String buildReasonString(Description description) { + return VersionRequirement.buildReasonString( + VersionRequirement.fromAnnotations(description), getBackendType(), getVersion()); + } +} diff --git a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/requirement/VersionRequirement.java b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/requirement/VersionRequirement.java index 28a72bc92ad..c83792286b3 100644 --- a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/requirement/VersionRequirement.java +++ b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/requirement/VersionRequirement.java @@ -100,6 +100,9 @@ public static Collection fromAnnotations(Description descrip CassandraRequirement cassandraRequirement = description.getAnnotation(CassandraRequirement.class); DseRequirement dseRequirement = description.getAnnotation(DseRequirement.class); + // matches methods/classes with one @BackendRequirement annotation + BackendRequirement backendRequirement = description.getAnnotation(BackendRequirement.class); + // matches methods/classes with two or more @BackendRequirement annotations BackendRequirements backendRequirements = description.getAnnotation(BackendRequirements.class); // build list of required versions @@ -110,6 +113,9 @@ public static Collection fromAnnotations(Description descrip if (dseRequirement != null) { requirements.add(VersionRequirement.fromDseRequirement(dseRequirement)); } + if (backendRequirement != null) { + requirements.add(VersionRequirement.fromBackendRequirement(backendRequirement)); + } if (backendRequirements != null) { Arrays.stream(backendRequirements.value()) .forEach(r -> requirements.add(VersionRequirement.fromBackendRequirement(r))); From ff4e003601b2d92eee3f4a9ea881d1e38ebfc8e4 Mon Sep 17 00:00:00 2001 From: hhughes Date: Mon, 21 Aug 2023 13:55:22 -0700 Subject: [PATCH 861/979] JAVA-3076: NullSavingStrategyIT sometimes fails with ProtocolError: Must not send frame with WARNING flag for native protocol version < 4 (#1669) NullSavingStrategyIT.java: - Remove V3 protocol configuration from class SessionRule - Create new session configured with V3 protocol in @BeforeClass method to use in tests --- .../driver/mapper/NullSavingStrategyIT.java | 63 ++++++++++++++----- 1 file changed, 48 insertions(+), 15 deletions(-) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/NullSavingStrategyIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/NullSavingStrategyIT.java index 32f71041b19..99d7abef4a2 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/NullSavingStrategyIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/NullSavingStrategyIT.java @@ -36,9 +36,11 @@ import com.datastax.oss.driver.api.mapper.entity.saving.NullSavingStrategy; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.categories.ParallelizableTests; import java.util.Objects; import java.util.UUID; +import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Test; @@ -51,13 +53,24 @@ public class NullSavingStrategyIT { private static final CcmRule CCM_RULE = CcmRule.getInstance(); - private static final SessionRule SESSION_RULE = - SessionRule.builder(CCM_RULE) - .withConfigLoader( - DriverConfigLoader.programmaticBuilder() - .withString(DefaultDriverOption.PROTOCOL_VERSION, "V3") - .build()) - .build(); + private static final SessionRule SESSION_RULE = SessionRule.builder(CCM_RULE).build(); + + // JAVA-3076: V3 protocol calls that could trigger cassandra to issue client warnings appear to be + // inherently unstable when used at the same time as V4+ protocol clients (common since this is + // part of the parallelizable test suite). + // + // For this test we'll use latest protocol version for SessionRule set-up, which creates the + // keyspace and could potentially result in warning about too many keyspaces, and then create a + // new client for the tests to use, which they access via the static InventoryMapper instance + // `mapper`. + // + // This additional client is created in the @BeforeClass method #setup() and guaranteed to be + // closed in @AfterClass method #teardown(). + // + // Note: The standard junit runner executes rules before class/test setup so the order of + // execution will be CcmRule#before > SessionRule#before > NullSavingStrategyIT#setup, meaning + // CCM_RULE/SESSION_RULE should be fully initialized by the time #setup() is invoked. + private static CqlSession v3Session; @ClassRule public static final TestRule CHAIN = RuleChain.outerRule(CCM_RULE).around(SESSION_RULE); @@ -66,14 +79,34 @@ public class NullSavingStrategyIT { @BeforeClass public static void setup() { - CqlSession session = SESSION_RULE.session(); - session.execute( - SimpleStatement.builder( - "CREATE TABLE product_simple(id uuid PRIMARY KEY, description text)") - .setExecutionProfile(SESSION_RULE.slowProfile()) - .build()); - - mapper = new NullSavingStrategyIT_InventoryMapperBuilder(session).build(); + // setup table for use in tests, this can use the default session + SESSION_RULE + .session() + .execute( + SimpleStatement.builder( + "CREATE TABLE product_simple(id uuid PRIMARY KEY, description text)") + .setExecutionProfile(SESSION_RULE.slowProfile()) + .build()); + + // Create V3 protocol session for use in tests, will be closed in #teardown() + v3Session = + SessionUtils.newSession( + CCM_RULE, + SESSION_RULE.keyspace(), + DriverConfigLoader.programmaticBuilder() + .withString(DefaultDriverOption.PROTOCOL_VERSION, "V3") + .build()); + + // Hand V3 session to InventoryMapper which the tests will use to perform db calls + mapper = new NullSavingStrategyIT_InventoryMapperBuilder(v3Session).build(); + } + + @AfterClass + public static void teardown() { + // Close V3 session (SESSION_RULE will be closed separately by @ClassRule handling) + if (v3Session != null) { + v3Session.close(); + } } @Test From d990f92ba01c3ecaaa3f31a0f1fb327f77a96617 Mon Sep 17 00:00:00 2001 From: hhughes Date: Tue, 22 Aug 2023 15:27:53 -0700 Subject: [PATCH 862/979] JAVA-3104: Do not eagerly pre-allocate array when deserializing CqlVector (#1714) --- .../oss/driver/api/core/data/CqlVector.java | 2 +- .../driver/api/core/data/CqlVectorTest.java | 22 +++++++++++++++++++ 2 files changed, 23 insertions(+), 1 deletion(-) diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/data/CqlVector.java b/core/src/main/java/com/datastax/oss/driver/api/core/data/CqlVector.java index 2889ea5eb24..4d388e3062c 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/data/CqlVector.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/data/CqlVector.java @@ -218,7 +218,7 @@ private void readObject(ObjectInputStream stream) throws IOException, ClassNotFo stream.defaultReadObject(); int size = stream.readInt(); - list = new ArrayList<>(size); + list = new ArrayList<>(); for (int i = 0; i < size; i++) { list.add((T) stream.readObject()); } diff --git a/core/src/test/java/com/datastax/oss/driver/api/core/data/CqlVectorTest.java b/core/src/test/java/com/datastax/oss/driver/api/core/data/CqlVectorTest.java index 75dfbc26e42..ff28edf85ba 100644 --- a/core/src/test/java/com/datastax/oss/driver/api/core/data/CqlVectorTest.java +++ b/core/src/test/java/com/datastax/oss/driver/api/core/data/CqlVectorTest.java @@ -17,16 +17,22 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.assertj.core.api.Assertions.fail; import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; import com.datastax.oss.driver.internal.SerializationHelper; import com.datastax.oss.driver.shaded.guava.common.collect.Iterators; +import java.io.ByteArrayInputStream; +import java.io.ObjectInputStream; +import java.io.ObjectStreamException; import java.util.AbstractList; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.stream.Collectors; +import org.apache.commons.codec.DecoderException; +import org.apache.commons.codec.binary.Hex; import org.assertj.core.util.Lists; import org.junit.Test; @@ -231,4 +237,20 @@ public int size() { CqlVector deserialized = SerializationHelper.serializeAndDeserialize(initial); assertThat(deserialized).isEqualTo(initial); } + + @Test + public void should_not_use_preallocate_serialized_size() throws DecoderException { + // serialized CqlVector(1.0f, 2.5f, 3.0f) with size field adjusted to Integer.MAX_VALUE + byte[] suspiciousBytes = + Hex.decodeHex( + "aced000573720042636f6d2e64617461737461782e6f73732e6472697665722e6170692e636f72652e646174612e43716c566563746f722453657269616c697a6174696f6e50726f78790000000000000001030000787077047fffffff7372000f6a6176612e6c616e672e466c6f6174daedc9a2db3cf0ec02000146000576616c7565787200106a6176612e6c616e672e4e756d62657286ac951d0b94e08b02000078703f8000007371007e0002402000007371007e00024040000078" + .toCharArray()); + try { + new ObjectInputStream(new ByteArrayInputStream(suspiciousBytes)).readObject(); + fail("Should not be able to deserialize bytes with incorrect size field"); + } catch (Exception e) { + // check we fail to deserialize, rather than OOM + assertThat(e).isInstanceOf(ObjectStreamException.class); + } + } } From acd1cc31c6c64193e044c0f92f311cb1143035f4 Mon Sep 17 00:00:00 2001 From: Madhavan Date: Thu, 7 Sep 2023 12:19:59 -0400 Subject: [PATCH 863/979] Fix hyperlink on the https://docs.datastax.com/en/developer/java-driver/latest/manual/mapper/mapper/#mapper-builder website (#1723) --- manual/mapper/mapper/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/manual/mapper/mapper/README.md b/manual/mapper/mapper/README.md index 18be59df1c4..894143f0b9b 100644 --- a/manual/mapper/mapper/README.md +++ b/manual/mapper/mapper/README.md @@ -39,7 +39,7 @@ public interface InventoryMapper { ``` The builder allows you to create a mapper instance, by wrapping a core `CqlSession` (if you need -more details on how to create a session, refer to the [core driver documentation](../core/)). +more details on how to create a session, refer to the [core driver documentation](../../core/)). ```java CqlSession session = CqlSession.builder().build(); From 3c4aa0e9c162db590ba3cb21b93faa200f812e48 Mon Sep 17 00:00:00 2001 From: hhughes Date: Thu, 7 Sep 2023 13:52:46 -0700 Subject: [PATCH 864/979] JAVA-3116: update surefire/failsafe to 3.0.0 to fix issue running tests with specified jvm (#1719) Additionally: - Set --jvm_version=8 when running dse 6.8.19+ with graph workloads (DSP-23501) - Update commons-configuration2 to 2.9.0 + deps in BundleOptions to support java17 - Update felix framework version to 7.0.1 for java17 (FELIX-6287) - Pick up newer bndlib for ArrayIndexOutOfBounds error printed with OsgiGraphIT (bndtools/bnd issue#3405) - Update pax-url-wrap to 2.6.4 (and bring pax-url-reference up to the same version) - Force newer tinybundles version 3.0.0 (default 2.1.1 version required older bndlib) --- .../internal/osgi/support/BundleOptions.java | 5 +- pom.xml | 14 ++++-- .../driver/api/testinfra/ccm/CcmBridge.java | 49 ++++++++++++++++++- 3 files changed, 60 insertions(+), 8 deletions(-) diff --git a/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/BundleOptions.java b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/BundleOptions.java index 6e7d82787f4..aa62b623f4b 100644 --- a/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/BundleOptions.java +++ b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/BundleOptions.java @@ -152,9 +152,10 @@ public static CompositeOption tinkerpopBundles() { .overwriteManifest(WrappedUrlProvisionOption.OverwriteMode.FULL), // Note: the versions below are hard-coded because they shouldn't change very often, // but if the tests fail because of them, we should consider parameterizing them - mavenBundle("com.sun.mail", "mailapi", "1.6.4"), + mavenBundle("com.sun.activation", "jakarta.activation", "2.0.1"), + mavenBundle("com.sun.mail", "mailapi", "2.0.1"), mavenBundle("org.apache.commons", "commons-text", "1.8"), - mavenBundle("org.apache.commons", "commons-configuration2", "2.7"), + mavenBundle("org.apache.commons", "commons-configuration2", "2.9.0"), CoreOptions.wrappedBundle(mavenBundle("commons-logging", "commons-logging", "1.1.1")) .exports("org.apache.commons.logging.*") .bundleVersion("1.1.1") diff --git a/pom.xml b/pom.xml index b74cfeee652..efde974bb28 100644 --- a/pom.xml +++ b/pom.xml @@ -68,8 +68,9 @@ 4.13.2 1.2.3 6.0.0 - 6.0.3 + 7.0.1 4.13.4 + 2.6.4 0.11.0 1.1.4 2.31 @@ -79,7 +80,7 @@ 2.2.2 4.0.3 2.0.0-M19 - 2.22.2 + 3.0.0 22.0.0.2 false ${skipTests} @@ -269,12 +270,17 @@ org.ops4j.pax.url pax-url-wrap - 2.6.3 + ${pax-url.version} org.ops4j.pax.url pax-url-reference - 2.6.2 + ${pax-url.version} + + + org.ops4j.pax.tinybundles + tinybundles + 3.0.0 org.glassfish diff --git a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CcmBridge.java b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CcmBridge.java index cef9e13c4b6..6985516f84b 100644 --- a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CcmBridge.java +++ b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CcmBridge.java @@ -44,6 +44,7 @@ import org.apache.commons.exec.Executor; import org.apache.commons.exec.LogOutputStream; import org.apache.commons.exec.PumpStreamHandler; +import org.assertj.core.util.Lists; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -267,8 +268,11 @@ public void reloadCore(int node, String keyspace, String table, boolean reindex) public void start() { if (started.compareAndSet(false, true)) { + List cmdAndArgs = Lists.newArrayList("start", jvmArgs, "--wait-for-binary-proto"); + overrideJvmVersionForDseWorkloads() + .ifPresent(jvmVersion -> cmdAndArgs.add(String.format("--jvm_version=%d", jvmVersion))); try { - execute("start", jvmArgs, "--wait-for-binary-proto"); + execute(cmdAndArgs.toArray(new String[0])); } catch (RuntimeException re) { // if something went wrong starting CCM, see if we can also dump the error executeCheckLogError(); @@ -296,7 +300,10 @@ public void resume(int n) { } public void start(int n) { - execute("node" + n, "start"); + List cmdAndArgs = Lists.newArrayList("node" + n, "start"); + overrideJvmVersionForDseWorkloads() + .ifPresent(jvmVersion -> cmdAndArgs.add(String.format("--jvm_version=%d", jvmVersion))); + execute(cmdAndArgs.toArray(new String[0])); } public void stop(int n) { @@ -416,6 +423,44 @@ private static File createTempStore(String storePath) { return f; } + /** + * Get the current JVM major version (1.8.0_372 -> 8, 11.0.19 -> 11) + * + * @return major version of current JVM + */ + private static int getCurrentJvmMajorVersion() { + String version = System.getProperty("java.version"); + if (version.startsWith("1.")) { + version = version.substring(2, 3); + } else { + int dot = version.indexOf("."); + if (dot != -1) { + version = version.substring(0, dot); + } + } + return Integer.parseInt(version); + } + + private Optional overrideJvmVersionForDseWorkloads() { + if (getCurrentJvmMajorVersion() <= 8) { + return Optional.empty(); + } + + if (!DSE_ENABLEMENT || !getDseVersion().isPresent()) { + return Optional.empty(); + } + + if (getDseVersion().get().compareTo(Version.parse("6.8.19")) < 0) { + return Optional.empty(); + } + + if (dseWorkloads.contains("graph")) { + return Optional.of(8); + } + + return Optional.empty(); + } + public static Builder builder() { return new Builder(); } From 18498127e0dc060d569f3e66731b21263022ffe2 Mon Sep 17 00:00:00 2001 From: Henry Hughes Date: Mon, 11 Sep 2023 08:11:36 -0700 Subject: [PATCH 865/979] Update 4.x changelog for 3.11.5 release (#1731) --- changelog/README.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/changelog/README.md b/changelog/README.md index cb272907b66..54d0d7a6c37 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -632,6 +632,10 @@ changelog](https://docs.datastax.com/en/developer/java-driver-dse/latest/changel - [bug] JAVA-1499: Wait for load balancing policy at cluster initialization - [new feature] JAVA-1495: Add prepared statements +## 3.11.5 +- [improvement] JAVA-3114: Shade io.dropwizard.metrics:metrics-core in shaded driver +- [improvement] JAVA-3115: SchemaChangeListener#onKeyspaceChanged can fire when keyspace has not changed if using SimpleStrategy replication + ## 3.11.4 - [improvement] JAVA-3079: Upgrade Netty to 4.1.94, 3.x edition - [improvement] JAVA-3082: Fix maven build for Apple-silicon From 06947df149626dff539c24dc95257759b2a42709 Mon Sep 17 00:00:00 2001 From: nparaddi-walmart <121308948+nparaddi-walmart@users.noreply.github.com> Date: Mon, 11 Sep 2023 10:16:47 -0700 Subject: [PATCH 866/979] =?UTF-8?q?add=20support=20for=20publishing=20perc?= =?UTF-8?q?entile=20time=20series=20for=20the=20histogram=20m=E2=80=A6=20(?= =?UTF-8?q?#1689)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * add support for publishing percentile time series for the histogram metrics cql-requests, cql-messages and throttling delay. Motivation: Histogram metrics is generating too many metrics overloading the promethous servers. if application has 500 Vms and 1000 cassandra nodes, The histogram metrics generates 100*500*1000 = 50,000,000 time series every 30 seconds. This is just too much metrics. Let us say we can generate percentile 95 timeseries for for every cassandra nodes, then we only have 1*500 = 500 metrics and in applciation side, we can ignore the _bucket time series. This way there will be very less metrics. Modifications: add configurable pre-defined percentiles to Micrometer Timer.Builder.publishPercentiles. This change is being added to cql-requests, cql-messages and throttling delay. Result: Based on the configuration, we will see additonal quantile time series for cql-requests, cql-messages and throttling delay histogram metrics. * add support for publishing percentile time series for the histogram metrics cql-requests, cql-messages and throttling delay. Motivation: Histogram metrics is generating too many metrics overloading the promethous servers. if application has 500 Vms and 1000 cassandra nodes, The histogram metrics generates 100*500*1000 = 50,000,000 time series every 30 seconds. This is just too much metrics. Let us say we can generate percentile 95 timeseries for for every cassandra nodes, then we only have 1*500 = 500 metrics and in applciation side, we can ignore the _bucket time series. This way there will be very less metrics. Modifications: add configurable pre-defined percentiles to Micrometer Timer.Builder.publishPercentiles. This change is being added to cql-requests, cql-messages and throttling delay. Result: Based on the configuration, we will see additonal quantile time series for cql-requests, cql-messages and throttling delay histogram metrics. * using helper method as suggested in review * fixes as per review comments * add configuration option which switches aggregable histogram generation on/off for all metric flavors [default=on] * updating java doc * rename method to publishPercentilesIfDefined * renmae method --------- Co-authored-by: Nagappa Paraddi --- .../api/core/config/DseDriverOption.java | 28 ++++++++ .../api/core/config/DefaultDriverOption.java | 35 ++++++++++ .../driver/api/core/config/OptionsMap.java | 1 + .../api/core/config/TypedDriverOption.java | 45 +++++++++++++ core/src/main/resources/reference.conf | 25 +++++++- .../micrometer/MicrometerMetricUpdater.java | 26 +++++++- .../MicrometerNodeMetricUpdater.java | 15 +++-- .../MicrometerSessionMetricUpdater.java | 29 ++++++--- .../MicrometerNodeMetricUpdaterTest.java | 64 ++++++++++++++++++- .../MicrometerSessionMetricUpdaterTest.java | 64 ++++++++++++++++++- 10 files changed, 311 insertions(+), 21 deletions(-) diff --git a/core/src/main/java/com/datastax/dse/driver/api/core/config/DseDriverOption.java b/core/src/main/java/com/datastax/dse/driver/api/core/config/DseDriverOption.java index 74907c177b6..3ad6ed683bf 100644 --- a/core/src/main/java/com/datastax/dse/driver/api/core/config/DseDriverOption.java +++ b/core/src/main/java/com/datastax/dse/driver/api/core/config/DseDriverOption.java @@ -288,6 +288,34 @@ public enum DseDriverOption implements DriverOption { *

          Value-type: {@link java.time.Duration Duration} */ METRICS_NODE_GRAPH_MESSAGES_SLO("advanced.metrics.node.graph-messages.slo"), + /** + * Optional list of percentiles to publish for graph-requests metric. Produces an additional time + * series for each requested percentile. This percentile is computed locally, and so can't be + * aggregated with percentiles computed across other dimensions (e.g. in a different instance). + * + *

          Value type: {@link java.util.List List}<{@link Double}> + */ + METRICS_SESSION_GRAPH_REQUESTS_PUBLISH_PERCENTILES( + "advanced.metrics.session.graph-requests.publish-percentiles"), + /** + * Optional list of percentiles to publish for node graph-messages metric. Produces an additional + * time series for each requested percentile. This percentile is computed locally, and so can't be + * aggregated with percentiles computed across other dimensions (e.g. in a different instance). + * + *

          Value type: {@link java.util.List List}<{@link Double}> + */ + METRICS_NODE_GRAPH_MESSAGES_PUBLISH_PERCENTILES( + "advanced.metrics.node.graph-messages.publish-percentiles"), + /** + * Optional list of percentiles to publish for continuous paging requests metric. Produces an + * additional time series for each requested percentile. This percentile is computed locally, and + * so can't be aggregated with percentiles computed across other dimensions (e.g. in a different + * instance). + * + *

          Value type: {@link java.util.List List}<{@link Double}> + */ + CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_PUBLISH_PERCENTILES( + "advanced.metrics.session.continuous-cql-requests.publish-percentiles"), ; private final String path; diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java index e7e75d952fa..71d07236f1e 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java @@ -939,6 +939,41 @@ public enum DefaultDriverOption implements DriverOption { *

          Value-type: List of {@link String} */ METADATA_SCHEMA_CHANGE_LISTENER_CLASSES("advanced.schema-change-listener.classes"), + /** + * Optional list of percentiles to publish for cql-requests metric. Produces an additional time + * series for each requested percentile. This percentile is computed locally, and so can't be + * aggregated with percentiles computed across other dimensions (e.g. in a different instance). + * + *

          Value type: {@link java.util.List List}<{@link Double}> + */ + METRICS_SESSION_CQL_REQUESTS_PUBLISH_PERCENTILES( + "advanced.metrics.session.cql-requests.publish-percentiles"), + /** + * Optional list of percentiles to publish for node cql-messages metric. Produces an additional + * time series for each requested percentile. This percentile is computed locally, and so can't be + * aggregated with percentiles computed across other dimensions (e.g. in a different instance). + * + *

          Value type: {@link java.util.List List}<{@link Double}> + */ + METRICS_NODE_CQL_MESSAGES_PUBLISH_PERCENTILES( + "advanced.metrics.node.cql-messages.publish-percentiles"), + /** + * Optional list of percentiles to publish for throttling delay metric.Produces an additional time + * series for each requested percentile. This percentile is computed locally, and so can't be + * aggregated with percentiles computed across other dimensions (e.g. in a different instance). + * + *

          Value type: {@link java.util.List List}<{@link Double}> + */ + METRICS_SESSION_THROTTLING_PUBLISH_PERCENTILES( + "advanced.metrics.session.throttling.delay.publish-percentiles"), + /** + * Adds histogram buckets used to generate aggregable percentile approximations in monitoring + * systems that have query facilities to do so (e.g. Prometheus histogram_quantile, Atlas + * percentiles). + * + *

          Value-type: boolean + */ + METRICS_GENERATE_AGGREGABLE_HISTOGRAMS("advanced.metrics.histograms.generate-aggregable"), ; private final String path; diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java index 8f5aa01592e..2c7a1169984 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java @@ -378,6 +378,7 @@ protected static void fillWithDriverDefaults(OptionsMap map) { map.put(TypedDriverOption.COALESCER_INTERVAL, Duration.of(10, ChronoUnit.MICROS)); map.put(TypedDriverOption.LOAD_BALANCING_DC_FAILOVER_MAX_NODES_PER_REMOTE_DC, 0); map.put(TypedDriverOption.LOAD_BALANCING_DC_FAILOVER_ALLOW_FOR_LOCAL_CONSISTENCY_LEVELS, false); + map.put(TypedDriverOption.METRICS_GENERATE_AGGREGABLE_HISTOGRAMS, true); } @Immutable diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java index 2428be064ce..3f790e9c0dd 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java @@ -388,6 +388,10 @@ public String toString() { /** The consistency level to use for trace queries. */ public static final TypedDriverOption REQUEST_TRACE_CONSISTENCY = new TypedDriverOption<>(DefaultDriverOption.REQUEST_TRACE_CONSISTENCY, GenericType.STRING); + /** Whether or not to publish aggregable histogram for metrics */ + public static final TypedDriverOption METRICS_GENERATE_AGGREGABLE_HISTOGRAMS = + new TypedDriverOption<>( + DefaultDriverOption.METRICS_GENERATE_AGGREGABLE_HISTOGRAMS, GenericType.BOOLEAN); /** List of enabled session-level metrics. */ public static final TypedDriverOption> METRICS_SESSION_ENABLED = new TypedDriverOption<>( @@ -409,6 +413,12 @@ public String toString() { new TypedDriverOption<>( DefaultDriverOption.METRICS_SESSION_CQL_REQUESTS_SLO, GenericType.listOf(GenericType.DURATION)); + /** Optional pre-defined percentile of cql requests to publish, as a list of percentiles . */ + public static final TypedDriverOption> + METRICS_SESSION_CQL_REQUESTS_PUBLISH_PERCENTILES = + new TypedDriverOption<>( + DefaultDriverOption.METRICS_SESSION_CQL_REQUESTS_PUBLISH_PERCENTILES, + GenericType.listOf(GenericType.DOUBLE)); /** * The number of significant decimal digits to which internal structures will maintain for * requests. @@ -433,6 +443,12 @@ public String toString() { new TypedDriverOption<>( DefaultDriverOption.METRICS_SESSION_THROTTLING_SLO, GenericType.listOf(GenericType.DURATION)); + /** Optional pre-defined percentile of throttling delay to publish, as a list of percentiles . */ + public static final TypedDriverOption> + METRICS_SESSION_THROTTLING_PUBLISH_PERCENTILES = + new TypedDriverOption<>( + DefaultDriverOption.METRICS_SESSION_THROTTLING_PUBLISH_PERCENTILES, + GenericType.listOf(GenericType.DOUBLE)); /** * The number of significant decimal digits to which internal structures will maintain for * throttling. @@ -457,6 +473,12 @@ public String toString() { new TypedDriverOption<>( DefaultDriverOption.METRICS_NODE_CQL_MESSAGES_SLO, GenericType.listOf(GenericType.DURATION)); + /** Optional pre-defined percentile of node cql messages to publish, as a list of percentiles . */ + public static final TypedDriverOption> + METRICS_NODE_CQL_MESSAGES_PUBLISH_PERCENTILES = + new TypedDriverOption<>( + DefaultDriverOption.METRICS_NODE_CQL_MESSAGES_PUBLISH_PERCENTILES, + GenericType.listOf(GenericType.DOUBLE)); /** * The number of significant decimal digits to which internal structures will maintain for * requests. @@ -700,6 +722,15 @@ public String toString() { new TypedDriverOption<>( DseDriverOption.CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_SLO, GenericType.listOf(GenericType.DURATION)); + /** + * Optional pre-defined percentile of continuous paging cql requests to publish, as a list of + * percentiles . + */ + public static final TypedDriverOption> + CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_PUBLISH_PERCENTILES = + new TypedDriverOption<>( + DseDriverOption.CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_PUBLISH_PERCENTILES, + GenericType.listOf(GenericType.DOUBLE)); /** * The number of significant decimal digits to which internal structures will maintain for * continuous requests. @@ -774,6 +805,12 @@ public String toString() { new TypedDriverOption<>( DseDriverOption.METRICS_SESSION_GRAPH_REQUESTS_SLO, GenericType.listOf(GenericType.DURATION)); + /** Optional pre-defined percentile of graph requests to publish, as a list of percentiles . */ + public static final TypedDriverOption> + METRICS_SESSION_GRAPH_REQUESTS_PUBLISH_PERCENTILES = + new TypedDriverOption<>( + DseDriverOption.METRICS_SESSION_GRAPH_REQUESTS_PUBLISH_PERCENTILES, + GenericType.listOf(GenericType.DOUBLE)); /** * The number of significant decimal digits to which internal structures will maintain for graph * requests. @@ -798,6 +835,14 @@ public String toString() { new TypedDriverOption<>( DseDriverOption.METRICS_NODE_GRAPH_MESSAGES_SLO, GenericType.listOf(GenericType.DURATION)); + /** + * Optional pre-defined percentile of node graph requests to publish, as a list of percentiles . + */ + public static final TypedDriverOption> + METRICS_NODE_GRAPH_MESSAGES_PUBLISH_PERCENTILES = + new TypedDriverOption<>( + DseDriverOption.METRICS_NODE_GRAPH_MESSAGES_PUBLISH_PERCENTILES, + GenericType.listOf(GenericType.DOUBLE)); /** * The number of significant decimal digits to which internal structures will maintain for graph * requests. diff --git a/core/src/main/resources/reference.conf b/core/src/main/resources/reference.conf index ee83280032e..4c58a1698e1 100644 --- a/core/src/main/resources/reference.conf +++ b/core/src/main/resources/reference.conf @@ -1434,6 +1434,16 @@ datastax-java-driver { // prefix = "cassandra" } + histograms { + # Adds histogram buckets used to generate aggregable percentile approximations in monitoring + # systems that have query facilities to do so (e.g. Prometheus histogram_quantile, Atlas percentiles). + # + # Required: no + # Modifiable at runtime: no + # Overridable in a profile: no + generate-aggregable = true + } + # The session-level metrics (all disabled by default). # # Required: yes @@ -1526,7 +1536,7 @@ datastax-java-driver { # Modifiable at runtime: no # Overridable in a profile: no cql-requests { - + # The largest latency that we expect to record. # # This should be slightly higher than request.timeout (in theory, readings can't be higher @@ -1569,7 +1579,7 @@ datastax-java-driver { # time). # Valid for: Dropwizard. refresh-interval = 5 minutes - + # An optional list of latencies to track as part of the application's service-level # objectives (SLOs). # @@ -1577,7 +1587,11 @@ datastax-java-driver { # buckets used to generate aggregable percentile approximations. # Valid for: Micrometer. // slo = [ 100 milliseconds, 500 milliseconds, 1 second ] - + + # An optional list of percentiles to be published by Micrometer. Produces an additional time series for each requested percentile. + # This percentile is computed locally, and so can't be aggregated with percentiles computed across other dimensions (e.g. in a different instance) + # Valid for: Micrometer. + // publish-percentiles = [ 0.75, 0.95, 0.99 ] } # Required: if the 'throttling.delay' metric is enabled, and Dropwizard or Micrometer is used. @@ -1589,6 +1603,7 @@ datastax-java-driver { significant-digits = 3 refresh-interval = 5 minutes // slo = [ 100 milliseconds, 500 milliseconds, 1 second ] + // publish-percentiles = [ 0.75, 0.95, 0.99 ] } # Required: if the 'continuous-cql-requests' metric is enabled, and Dropwizard or Micrometer @@ -1601,6 +1616,7 @@ datastax-java-driver { significant-digits = 3 refresh-interval = 5 minutes // slo = [ 100 milliseconds, 500 milliseconds, 1 second ] + // publish-percentiles = [ 0.75, 0.95, 0.99 ] } # Required: if the 'graph-requests' metric is enabled, and Dropwizard or Micrometer is used. @@ -1612,6 +1628,7 @@ datastax-java-driver { significant-digits = 3 refresh-interval = 5 minutes // slo = [ 100 milliseconds, 500 milliseconds, 1 second ] + // publish-percentiles = [ 0.75, 0.95, 0.99 ] } } # The node-level metrics (all disabled by default). @@ -1776,6 +1793,7 @@ datastax-java-driver { significant-digits = 3 refresh-interval = 5 minutes // slo = [ 100 milliseconds, 500 milliseconds, 1 second ] + // publish-percentiles = [ 0.75, 0.95, 0.99 ] } # See graph-requests in the `session` section @@ -1789,6 +1807,7 @@ datastax-java-driver { significant-digits = 3 refresh-interval = 5 minutes // slo = [ 100 milliseconds, 500 milliseconds, 1 second ] + // publish-percentiles = [ 0.75, 0.95, 0.99 ] } # The time after which the node level metrics will be evicted. diff --git a/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerMetricUpdater.java b/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerMetricUpdater.java index c30dcc121ab..4785da14543 100644 --- a/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerMetricUpdater.java +++ b/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerMetricUpdater.java @@ -15,7 +15,9 @@ */ package com.datastax.oss.driver.internal.metrics.micrometer; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.config.DriverOption; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.internal.core.metrics.AbstractMetricUpdater; import com.datastax.oss.driver.internal.core.metrics.MetricId; @@ -27,6 +29,7 @@ import io.micrometer.core.instrument.MeterRegistry; import io.micrometer.core.instrument.Tag; import io.micrometer.core.instrument.Timer; +import java.util.List; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; @@ -151,12 +154,31 @@ protected Timer getOrCreateTimerFor(MetricT metric) { } protected Timer.Builder configureTimer(Timer.Builder builder, MetricT metric, MetricId id) { - return builder.publishPercentileHistogram(); + DriverExecutionProfile profile = context.getConfig().getDefaultProfile(); + if (profile.getBoolean(DefaultDriverOption.METRICS_GENERATE_AGGREGABLE_HISTOGRAMS)) { + builder.publishPercentileHistogram(); + } + return builder; } @SuppressWarnings("unused") protected DistributionSummary.Builder configureDistributionSummary( DistributionSummary.Builder builder, MetricT metric, MetricId id) { - return builder.publishPercentileHistogram(); + DriverExecutionProfile profile = context.getConfig().getDefaultProfile(); + if (profile.getBoolean(DefaultDriverOption.METRICS_GENERATE_AGGREGABLE_HISTOGRAMS)) { + builder.publishPercentileHistogram(); + } + return builder; + } + + static double[] toDoubleArray(List doubleList) { + return doubleList.stream().mapToDouble(Double::doubleValue).toArray(); + } + + static void configurePercentilesPublishIfDefined( + Timer.Builder builder, DriverExecutionProfile profile, DriverOption driverOption) { + if (profile.isDefined(driverOption)) { + builder.publishPercentiles(toDoubleArray(profile.getDoubleList(driverOption))); + } } } diff --git a/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerNodeMetricUpdater.java b/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerNodeMetricUpdater.java index 0f5dada2bf3..d6359bca327 100644 --- a/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerNodeMetricUpdater.java +++ b/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerNodeMetricUpdater.java @@ -96,9 +96,9 @@ protected void cancelMetricsExpirationTimeout() { @Override protected Timer.Builder configureTimer(Timer.Builder builder, NodeMetric metric, MetricId id) { DriverExecutionProfile profile = context.getConfig().getDefaultProfile(); + super.configureTimer(builder, metric, id); if (metric == DefaultNodeMetric.CQL_MESSAGES) { - return builder - .publishPercentileHistogram() + builder .minimumExpectedValue( profile.getDuration(DefaultDriverOption.METRICS_NODE_CQL_MESSAGES_LOWEST)) .maximumExpectedValue( @@ -111,9 +111,11 @@ protected Timer.Builder configureTimer(Timer.Builder builder, NodeMetric metric, : null) .percentilePrecision( profile.getInt(DefaultDriverOption.METRICS_NODE_CQL_MESSAGES_DIGITS)); + + configurePercentilesPublishIfDefined( + builder, profile, DefaultDriverOption.METRICS_NODE_CQL_MESSAGES_PUBLISH_PERCENTILES); } else if (metric == DseNodeMetric.GRAPH_MESSAGES) { - return builder - .publishPercentileHistogram() + builder .minimumExpectedValue( profile.getDuration(DseDriverOption.METRICS_NODE_GRAPH_MESSAGES_LOWEST)) .maximumExpectedValue( @@ -125,7 +127,10 @@ protected Timer.Builder configureTimer(Timer.Builder builder, NodeMetric metric, .toArray(new Duration[0]) : null) .percentilePrecision(profile.getInt(DseDriverOption.METRICS_NODE_GRAPH_MESSAGES_DIGITS)); + + configurePercentilesPublishIfDefined( + builder, profile, DseDriverOption.METRICS_NODE_GRAPH_MESSAGES_PUBLISH_PERCENTILES); } - return super.configureTimer(builder, metric, id); + return builder; } } diff --git a/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerSessionMetricUpdater.java b/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerSessionMetricUpdater.java index bb361b85f22..f9387f1685a 100644 --- a/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerSessionMetricUpdater.java +++ b/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerSessionMetricUpdater.java @@ -63,9 +63,9 @@ protected MetricId getMetricId(SessionMetric metric) { @Override protected Timer.Builder configureTimer(Timer.Builder builder, SessionMetric metric, MetricId id) { DriverExecutionProfile profile = context.getConfig().getDefaultProfile(); + super.configureTimer(builder, metric, id); if (metric == DefaultSessionMetric.CQL_REQUESTS) { - return builder - .publishPercentileHistogram() + builder .minimumExpectedValue( profile.getDuration(DefaultDriverOption.METRICS_SESSION_CQL_REQUESTS_LOWEST)) .maximumExpectedValue( @@ -80,9 +80,11 @@ protected Timer.Builder configureTimer(Timer.Builder builder, SessionMetric metr profile.isDefined(DefaultDriverOption.METRICS_SESSION_CQL_REQUESTS_DIGITS) ? profile.getInt(DefaultDriverOption.METRICS_SESSION_CQL_REQUESTS_DIGITS) : null); + + configurePercentilesPublishIfDefined( + builder, profile, DefaultDriverOption.METRICS_SESSION_CQL_REQUESTS_PUBLISH_PERCENTILES); } else if (metric == DefaultSessionMetric.THROTTLING_DELAY) { - return builder - .publishPercentileHistogram() + builder .minimumExpectedValue( profile.getDuration(DefaultDriverOption.METRICS_SESSION_THROTTLING_LOWEST)) .maximumExpectedValue( @@ -97,9 +99,11 @@ protected Timer.Builder configureTimer(Timer.Builder builder, SessionMetric metr profile.isDefined(DefaultDriverOption.METRICS_SESSION_THROTTLING_DIGITS) ? profile.getInt(DefaultDriverOption.METRICS_SESSION_THROTTLING_DIGITS) : null); + + configurePercentilesPublishIfDefined( + builder, profile, DefaultDriverOption.METRICS_SESSION_THROTTLING_PUBLISH_PERCENTILES); } else if (metric == DseSessionMetric.CONTINUOUS_CQL_REQUESTS) { - return builder - .publishPercentileHistogram() + builder .minimumExpectedValue( profile.getDuration( DseDriverOption.CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_LOWEST)) @@ -119,9 +123,13 @@ protected Timer.Builder configureTimer(Timer.Builder builder, SessionMetric metr ? profile.getInt( DseDriverOption.CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_DIGITS) : null); + + configurePercentilesPublishIfDefined( + builder, + profile, + DseDriverOption.CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_PUBLISH_PERCENTILES); } else if (metric == DseSessionMetric.GRAPH_REQUESTS) { - return builder - .publishPercentileHistogram() + builder .minimumExpectedValue( profile.getDuration(DseDriverOption.METRICS_SESSION_GRAPH_REQUESTS_LOWEST)) .maximumExpectedValue( @@ -136,7 +144,10 @@ protected Timer.Builder configureTimer(Timer.Builder builder, SessionMetric metr profile.isDefined(DseDriverOption.METRICS_SESSION_GRAPH_REQUESTS_DIGITS) ? profile.getInt(DseDriverOption.METRICS_SESSION_GRAPH_REQUESTS_DIGITS) : null); + + configurePercentilesPublishIfDefined( + builder, profile, DseDriverOption.METRICS_SESSION_GRAPH_REQUESTS_PUBLISH_PERCENTILES); } - return super.configureTimer(builder, metric, id); + return builder; } } diff --git a/metrics/micrometer/src/test/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerNodeMetricUpdaterTest.java b/metrics/micrometer/src/test/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerNodeMetricUpdaterTest.java index fbdfb7b2355..e5482aad910 100644 --- a/metrics/micrometer/src/test/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerNodeMetricUpdaterTest.java +++ b/metrics/micrometer/src/test/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerNodeMetricUpdaterTest.java @@ -152,7 +152,8 @@ public void should_create_timer( DriverOption lowest, DriverOption highest, DriverOption digits, - DriverOption sla) { + DriverOption sla, + DriverOption percentiles) { // given Node node = mock(Node.class); InternalDriverContext context = mock(InternalDriverContext.class); @@ -174,6 +175,8 @@ public void should_create_timer( when(profile.isDefined(sla)).thenReturn(true); when(profile.getDurationList(sla)) .thenReturn(Arrays.asList(Duration.ofMillis(100), Duration.ofMillis(500))); + when(profile.isDefined(percentiles)).thenReturn(true); + when(profile.getDoubleList(percentiles)).thenReturn(Arrays.asList(0.75, 0.95, 0.99)); when(generator.nodeMetricId(node, metric)).thenReturn(METRIC_ID); SimpleMeterRegistry registry = spy(new SimpleMeterRegistry()); @@ -190,6 +193,63 @@ public void should_create_timer( assertThat(timer.count()).isEqualTo(10); HistogramSnapshot snapshot = timer.takeSnapshot(); assertThat(snapshot.histogramCounts()).hasSize(2); + assertThat(snapshot.percentileValues()).hasSize(3); + assertThat(snapshot.percentileValues()) + .satisfiesExactlyInAnyOrder( + valuePercentile -> assertThat(valuePercentile.percentile()).isEqualTo(0.75), + valuePercentile -> assertThat(valuePercentile.percentile()).isEqualTo(0.95), + valuePercentile -> assertThat(valuePercentile.percentile()).isEqualTo(0.99)); + } + + @Test + @UseDataProvider(value = "timerMetrics") + public void should_not_create_sla_percentiles( + NodeMetric metric, + DriverOption lowest, + DriverOption highest, + DriverOption digits, + DriverOption sla, + DriverOption percentiles) { + // given + Node node = mock(Node.class); + InternalDriverContext context = mock(InternalDriverContext.class); + DriverExecutionProfile profile = mock(DriverExecutionProfile.class); + DriverConfig config = mock(DriverConfig.class); + MetricIdGenerator generator = mock(MetricIdGenerator.class); + Set enabledMetrics = Collections.singleton(metric); + + // when + when(context.getSessionName()).thenReturn("prefix"); + when(context.getConfig()).thenReturn(config); + when(config.getDefaultProfile()).thenReturn(profile); + when(context.getMetricIdGenerator()).thenReturn(generator); + when(profile.getDuration(DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER)) + .thenReturn(Duration.ofHours(1)); + when(profile.getDuration(lowest)).thenReturn(Duration.ofMillis(10)); + when(profile.getDuration(highest)).thenReturn(Duration.ofSeconds(1)); + when(profile.getInt(digits)).thenReturn(5); + when(profile.isDefined(sla)).thenReturn(false); + when(profile.getDurationList(sla)) + .thenReturn(Arrays.asList(Duration.ofMillis(100), Duration.ofMillis(500))); + when(profile.isDefined(percentiles)).thenReturn(false); + when(profile.getDoubleList(percentiles)).thenReturn(Arrays.asList(0.75, 0.95, 0.99)); + when(generator.nodeMetricId(node, metric)).thenReturn(METRIC_ID); + + SimpleMeterRegistry registry = spy(new SimpleMeterRegistry()); + MicrometerNodeMetricUpdater updater = + new MicrometerNodeMetricUpdater(node, context, enabledMetrics, registry); + + for (int i = 0; i < 10; i++) { + updater.updateTimer(metric, null, 100, TimeUnit.MILLISECONDS); + } + + // then + Timer timer = registry.find(METRIC_ID.getName()).timer(); + assertThat(timer).isNotNull(); + assertThat(timer.count()).isEqualTo(10); + HistogramSnapshot snapshot = timer.takeSnapshot(); + assertThat(snapshot.histogramCounts()).hasSize(0); + assertThat(snapshot.percentileValues()).hasSize(0); } @DataProvider @@ -201,6 +261,7 @@ public static Object[][] timerMetrics() { DefaultDriverOption.METRICS_NODE_CQL_MESSAGES_HIGHEST, DefaultDriverOption.METRICS_NODE_CQL_MESSAGES_DIGITS, DefaultDriverOption.METRICS_NODE_CQL_MESSAGES_SLO, + DefaultDriverOption.METRICS_NODE_CQL_MESSAGES_PUBLISH_PERCENTILES, }, { DseNodeMetric.GRAPH_MESSAGES, @@ -208,6 +269,7 @@ public static Object[][] timerMetrics() { DseDriverOption.METRICS_NODE_GRAPH_MESSAGES_HIGHEST, DseDriverOption.METRICS_NODE_GRAPH_MESSAGES_DIGITS, DseDriverOption.METRICS_NODE_GRAPH_MESSAGES_SLO, + DseDriverOption.METRICS_NODE_GRAPH_MESSAGES_PUBLISH_PERCENTILES, }, }; } diff --git a/metrics/micrometer/src/test/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerSessionMetricUpdaterTest.java b/metrics/micrometer/src/test/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerSessionMetricUpdaterTest.java index 09b3e44bac4..1e2d210335f 100644 --- a/metrics/micrometer/src/test/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerSessionMetricUpdaterTest.java +++ b/metrics/micrometer/src/test/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerSessionMetricUpdaterTest.java @@ -59,7 +59,8 @@ public void should_create_timer( DriverOption lowest, DriverOption highest, DriverOption digits, - DriverOption sla) { + DriverOption sla, + DriverOption percentiles) { // given InternalDriverContext context = mock(InternalDriverContext.class); DriverExecutionProfile profile = mock(DriverExecutionProfile.class); @@ -80,6 +81,8 @@ public void should_create_timer( when(profile.isDefined(sla)).thenReturn(true); when(profile.getDurationList(sla)) .thenReturn(Arrays.asList(Duration.ofMillis(100), Duration.ofMillis(500))); + when(profile.isDefined(percentiles)).thenReturn(true); + when(profile.getDoubleList(percentiles)).thenReturn(Arrays.asList(0.75, 0.95, 0.99)); when(generator.sessionMetricId(metric)).thenReturn(METRIC_ID); SimpleMeterRegistry registry = spy(new SimpleMeterRegistry()); @@ -96,6 +99,61 @@ public void should_create_timer( assertThat(timer.count()).isEqualTo(10); HistogramSnapshot snapshot = timer.takeSnapshot(); assertThat(snapshot.histogramCounts()).hasSize(2); + assertThat(snapshot.percentileValues()).hasSize(3); + assertThat(snapshot.percentileValues()) + .satisfiesExactlyInAnyOrder( + valuePercentile -> assertThat(valuePercentile.percentile()).isEqualTo(0.75), + valuePercentile -> assertThat(valuePercentile.percentile()).isEqualTo(0.95), + valuePercentile -> assertThat(valuePercentile.percentile()).isEqualTo(0.99)); + } + + @Test + @UseDataProvider(value = "timerMetrics") + public void should_not_create_sla_percentiles( + SessionMetric metric, + DriverOption lowest, + DriverOption highest, + DriverOption digits, + DriverOption sla, + DriverOption percentiles) { + // given + InternalDriverContext context = mock(InternalDriverContext.class); + DriverExecutionProfile profile = mock(DriverExecutionProfile.class); + DriverConfig config = mock(DriverConfig.class); + MetricIdGenerator generator = mock(MetricIdGenerator.class); + Set enabledMetrics = Collections.singleton(metric); + + // when + when(context.getSessionName()).thenReturn("prefix"); + when(context.getConfig()).thenReturn(config); + when(config.getDefaultProfile()).thenReturn(profile); + when(context.getMetricIdGenerator()).thenReturn(generator); + when(profile.getDuration(DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER)) + .thenReturn(Duration.ofHours(1)); + when(profile.isDefined(sla)).thenReturn(false); + when(profile.getDurationList(sla)) + .thenReturn(Arrays.asList(Duration.ofMillis(100), Duration.ofMillis(500))); + when(profile.getBoolean(DefaultDriverOption.METRICS_GENERATE_AGGREGABLE_HISTOGRAMS)) + .thenReturn(true); + when(profile.isDefined(percentiles)).thenReturn(false); + when(profile.getDoubleList(percentiles)).thenReturn(Arrays.asList(0.75, 0.95, 0.99)); + when(generator.sessionMetricId(metric)).thenReturn(METRIC_ID); + + SimpleMeterRegistry registry = new SimpleMeterRegistry(); + MicrometerSessionMetricUpdater updater = + new MicrometerSessionMetricUpdater(context, enabledMetrics, registry); + + for (int i = 0; i < 10; i++) { + updater.updateTimer(metric, null, 100, TimeUnit.MILLISECONDS); + } + + // then + Timer timer = registry.find(METRIC_ID.getName()).timer(); + assertThat(timer).isNotNull(); + assertThat(timer.count()).isEqualTo(10); + HistogramSnapshot snapshot = timer.takeSnapshot(); + assertThat(snapshot.histogramCounts()).hasSize(0); + assertThat(snapshot.percentileValues()).hasSize(0); } @DataProvider @@ -107,6 +165,7 @@ public static Object[][] timerMetrics() { DefaultDriverOption.METRICS_SESSION_CQL_REQUESTS_HIGHEST, DefaultDriverOption.METRICS_SESSION_CQL_REQUESTS_DIGITS, DefaultDriverOption.METRICS_SESSION_CQL_REQUESTS_SLO, + DefaultDriverOption.METRICS_SESSION_CQL_REQUESTS_PUBLISH_PERCENTILES, }, { DseSessionMetric.GRAPH_REQUESTS, @@ -114,6 +173,7 @@ public static Object[][] timerMetrics() { DseDriverOption.METRICS_SESSION_GRAPH_REQUESTS_HIGHEST, DseDriverOption.METRICS_SESSION_GRAPH_REQUESTS_DIGITS, DseDriverOption.METRICS_SESSION_GRAPH_REQUESTS_SLO, + DseDriverOption.METRICS_SESSION_GRAPH_REQUESTS_PUBLISH_PERCENTILES, }, { DseSessionMetric.CONTINUOUS_CQL_REQUESTS, @@ -121,6 +181,7 @@ public static Object[][] timerMetrics() { DseDriverOption.CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_HIGHEST, DseDriverOption.CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_DIGITS, DseDriverOption.CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_SLO, + DseDriverOption.CONTINUOUS_PAGING_METRICS_SESSION_CQL_REQUESTS_PUBLISH_PERCENTILES }, { DefaultSessionMetric.THROTTLING_DELAY, @@ -128,6 +189,7 @@ public static Object[][] timerMetrics() { DefaultDriverOption.METRICS_SESSION_THROTTLING_HIGHEST, DefaultDriverOption.METRICS_SESSION_THROTTLING_DIGITS, DefaultDriverOption.METRICS_SESSION_THROTTLING_SLO, + DefaultDriverOption.METRICS_SESSION_THROTTLING_PUBLISH_PERCENTILES }, }; } From 16260261d3df50fcf24fac1fc2d37896c4a111bf Mon Sep 17 00:00:00 2001 From: mck Date: Thu, 13 Jul 2023 15:45:41 +0200 Subject: [PATCH 867/979] Copyright to ASF (see https://incubator.apache.org/ip-clearance/cassandra-java-driver.html ) patch by Mick Semb Wever; reviewed by Henry Hughes for CASSANDRA-18611 --- NOTICE.txt | 20 ++++++++++++ README.md | 8 ++--- bom/pom.xml | 16 +++++----- core-shaded/pom.xml | 16 +++++----- core-shaded/src/assembly/shaded-jar.xml | 14 +++++---- core/pom.xml | 16 +++++----- .../driver/api/core/DseProtocolVersion.java | 14 +++++---- .../dse/driver/api/core/DseSession.java | 14 +++++---- .../driver/api/core/DseSessionBuilder.java | 14 +++++---- .../api/core/auth/BaseDseAuthenticator.java | 14 +++++---- .../core/auth/DseGssApiAuthProviderBase.java | 14 +++++---- .../auth/DsePlainTextAuthProviderBase.java | 14 +++++---- .../ProgrammaticDseGssApiAuthProvider.java | 16 +++++----- .../api/core/auth/ProxyAuthentication.java | 14 +++++---- .../core/config/DseDriverConfigLoader.java | 14 +++++---- .../api/core/config/DseDriverOption.java | 14 +++++---- .../continuous/ContinuousAsyncResultSet.java | 14 +++++---- .../cql/continuous/ContinuousResultSet.java | 14 +++++---- .../cql/continuous/ContinuousSession.java | 18 ++++++----- .../reactive/ContinuousReactiveResultSet.java | 14 +++++---- .../reactive/ContinuousReactiveSession.java | 14 +++++---- .../cql/reactive/ReactiveQueryMetadata.java | 14 +++++---- .../core/cql/reactive/ReactiveResultSet.java | 14 +++++---- .../api/core/cql/reactive/ReactiveRow.java | 14 +++++---- .../core/cql/reactive/ReactiveSession.java | 14 +++++---- .../api/core/cql/reactive/package-info.java | 14 +++++---- .../api/core/data/geometry/Geometry.java | 14 +++++---- .../api/core/data/geometry/LineString.java | 14 +++++---- .../driver/api/core/data/geometry/Point.java | 14 +++++---- .../api/core/data/geometry/Polygon.java | 14 +++++---- .../driver/api/core/data/time/DateRange.java | 14 +++++---- .../api/core/data/time/DateRangeBound.java | 14 +++++---- .../core/data/time/DateRangePrecision.java | 14 +++++---- .../api/core/graph/AsyncGraphResultSet.java | 14 +++++---- .../api/core/graph/BatchGraphStatement.java | 14 +++++---- .../graph/BatchGraphStatementBuilder.java | 14 +++++---- .../dse/driver/api/core/graph/DseGraph.java | 14 +++++---- .../DseGraphRemoteConnectionBuilder.java | 14 +++++---- .../api/core/graph/FluentGraphStatement.java | 14 +++++---- .../graph/FluentGraphStatementBuilder.java | 14 +++++---- .../api/core/graph/GraphExecutionInfo.java | 14 +++++---- .../dse/driver/api/core/graph/GraphNode.java | 14 +++++---- .../driver/api/core/graph/GraphResultSet.java | 14 +++++---- .../driver/api/core/graph/GraphSession.java | 14 +++++---- .../driver/api/core/graph/GraphStatement.java | 14 +++++---- .../core/graph/GraphStatementBuilderBase.java | 14 +++++---- .../api/core/graph/PagingEnabledOptions.java | 14 +++++---- .../api/core/graph/ScriptGraphStatement.java | 14 +++++---- .../graph/ScriptGraphStatementBuilder.java | 14 +++++---- .../core/graph/predicates/CqlCollection.java | 14 +++++---- .../driver/api/core/graph/predicates/Geo.java | 14 +++++---- .../api/core/graph/predicates/Search.java | 14 +++++---- .../graph/reactive/ReactiveGraphNode.java | 14 +++++---- .../reactive/ReactiveGraphResultSet.java | 14 +++++---- .../graph/reactive/ReactiveGraphSession.java | 14 +++++---- .../api/core/metadata/DseNodeProperties.java | 14 +++++---- .../metadata/schema/DseAggregateMetadata.java | 14 +++++---- .../metadata/schema/DseColumnMetadata.java | 14 +++++---- .../core/metadata/schema/DseEdgeMetadata.java | 14 +++++---- .../metadata/schema/DseFunctionMetadata.java | 14 +++++---- .../schema/DseGraphKeyspaceMetadata.java | 14 +++++---- .../schema/DseGraphTableMetadata.java | 14 +++++---- .../metadata/schema/DseIndexMetadata.java | 14 +++++---- .../metadata/schema/DseKeyspaceMetadata.java | 14 +++++---- .../metadata/schema/DseRelationMetadata.java | 14 +++++---- .../metadata/schema/DseTableMetadata.java | 14 +++++---- .../metadata/schema/DseVertexMetadata.java | 14 +++++---- .../core/metadata/schema/DseViewMetadata.java | 14 +++++---- .../api/core/metrics/DseNodeMetric.java | 14 +++++---- .../api/core/metrics/DseSessionMetric.java | 14 +++++---- .../servererrors/UnfitClientException.java | 14 +++++---- .../driver/api/core/type/DseDataTypes.java | 14 +++++---- .../api/core/type/codec/DseTypeCodecs.java | 14 +++++---- .../internal/core/DseProtocolFeature.java | 14 +++++---- .../core/InsightsClientLifecycleListener.java | 14 +++++---- .../driver/internal/core/auth/AuthUtils.java | 14 +++++---- .../core/auth/DseGssApiAuthProvider.java | 16 +++++----- .../core/auth/DsePlainTextAuthProvider.java | 14 +++++---- .../internal/core/cql/DseConversions.java | 14 +++++---- .../ContinuousCqlRequestAsyncProcessor.java | 14 +++++---- .../ContinuousCqlRequestHandler.java | 14 +++++---- .../ContinuousCqlRequestSyncProcessor.java | 14 +++++---- .../ContinuousRequestHandlerBase.java | 14 +++++---- .../DefaultContinuousAsyncResultSet.java | 14 +++++---- .../DefaultContinuousResultSet.java | 14 +++++---- ...ContinuousCqlRequestReactiveProcessor.java | 14 +++++---- .../DefaultContinuousReactiveResultSet.java | 14 +++++---- .../reactive/CqlRequestReactiveProcessor.java | 14 +++++---- .../reactive/DefaultReactiveResultSet.java | 14 +++++---- .../core/cql/reactive/DefaultReactiveRow.java | 14 +++++---- .../core/cql/reactive/EmptySubscription.java | 14 +++++---- .../core/cql/reactive/FailedPublisher.java | 14 +++++---- .../cql/reactive/FailedReactiveResultSet.java | 14 +++++---- .../core/cql/reactive/ReactiveOperators.java | 14 +++++---- .../cql/reactive/ReactiveResultSetBase.java | 14 +++++---- .../ReactiveResultSetSubscription.java | 14 +++++---- .../cql/reactive/SimpleUnicastProcessor.java | 14 +++++---- .../core/data/geometry/DefaultGeometry.java | 14 +++++---- .../core/data/geometry/DefaultLineString.java | 14 +++++---- .../core/data/geometry/DefaultPoint.java | 14 +++++---- .../core/data/geometry/DefaultPolygon.java | 14 +++++---- .../internal/core/data/geometry/Distance.java | 14 +++++---- .../geometry/DistanceSerializationProxy.java | 14 +++++---- .../data/geometry/WkbSerializationProxy.java | 14 +++++---- .../internal/core/data/geometry/WkbUtil.java | 14 +++++---- .../internal/core/graph/ByteBufUtil.java | 14 +++++---- .../core/graph/BytecodeGraphStatement.java | 14 +++++---- .../graph/ContinuousAsyncGraphResultSet.java | 14 +++++---- .../graph/ContinuousGraphRequestHandler.java | 14 +++++---- .../core/graph/CqlCollectionPredicate.java | 14 +++++---- .../graph/DefaultAsyncGraphResultSet.java | 14 +++++---- .../graph/DefaultBatchGraphStatement.java | 14 +++++---- .../DefaultDseRemoteConnectionBuilder.java | 14 +++++---- .../graph/DefaultFluentGraphStatement.java | 14 +++++---- .../graph/DefaultScriptGraphStatement.java | 14 +++++---- .../core/graph/DseGraphRemoteConnection.java | 14 +++++---- .../core/graph/DseGraphTraversal.java | 14 +++++---- .../internal/core/graph/DsePredicate.java | 14 +++++---- .../internal/core/graph/EditDistance.java | 14 +++++---- .../internal/core/graph/GeoPredicate.java | 14 +++++---- .../driver/internal/core/graph/GeoUtils.java | 14 +++++---- .../internal/core/graph/GraphConversions.java | 14 +++++---- .../graph/GraphExecutionInfoConverter.java | 14 +++++---- .../internal/core/graph/GraphProtocol.java | 14 +++++---- .../graph/GraphRequestAsyncProcessor.java | 14 +++++---- .../core/graph/GraphRequestHandler.java | 14 +++++---- .../core/graph/GraphRequestSyncProcessor.java | 14 +++++---- .../core/graph/GraphResultIterator.java | 14 +++++---- .../internal/core/graph/GraphResultSets.java | 14 +++++---- .../internal/core/graph/GraphSON1SerdeTP.java | 14 +++++---- .../internal/core/graph/GraphSON2SerdeTP.java | 14 +++++---- .../internal/core/graph/GraphSONUtils.java | 14 +++++---- .../core/graph/GraphStatementBase.java | 14 +++++---- .../core/graph/GraphSupportChecker.java | 14 +++++---- .../internal/core/graph/LegacyGraphNode.java | 14 +++++---- .../core/graph/MultiPageGraphResultSet.java | 14 +++++---- .../internal/core/graph/ObjectGraphNode.java | 14 +++++---- .../internal/core/graph/SearchPredicate.java | 14 +++++---- .../internal/core/graph/SearchUtils.java | 14 +++++---- .../core/graph/SinglePageGraphResultSet.java | 14 +++++---- .../core/graph/TinkerpopBufferUtil.java | 14 +++++---- ...actDynamicGraphBinaryCustomSerializer.java | 14 +++++---- ...ractSimpleGraphBinaryCustomSerializer.java | 14 +++++---- .../binary/ComplexTypeSerializerUtil.java | 14 +++++---- .../graph/binary/CqlDurationSerializer.java | 14 +++++---- .../core/graph/binary/DistanceSerializer.java | 14 +++++---- .../graph/binary/EditDistanceSerializer.java | 14 +++++---- .../core/graph/binary/GeometrySerializer.java | 14 +++++---- .../core/graph/binary/GraphBinaryModule.java | 14 +++++---- .../core/graph/binary/GraphBinaryUtils.java | 14 +++++---- .../graph/binary/LineStringSerializer.java | 14 +++++---- .../core/graph/binary/PairSerializer.java | 14 +++++---- .../core/graph/binary/PointSerializer.java | 14 +++++---- .../core/graph/binary/PolygonSerializer.java | 14 +++++---- .../graph/binary/TupleValueSerializer.java | 14 +++++---- .../core/graph/binary/UdtValueSerializer.java | 14 +++++---- .../graph/binary/buffer/DseNettyBuffer.java | 14 +++++---- .../binary/buffer/DseNettyBufferFactory.java | 14 +++++---- .../reactive/DefaultReactiveGraphNode.java | 14 +++++---- .../DefaultReactiveGraphResultSet.java | 14 +++++---- .../FailedReactiveGraphResultSet.java | 14 +++++---- .../ReactiveGraphRequestProcessor.java | 14 +++++---- .../ReactiveGraphResultSetSubscription.java | 14 +++++---- .../core/insights/AddressFormatter.java | 14 +++++---- .../insights/ConfigAntiPatternsFinder.java | 14 +++++---- .../core/insights/DataCentersFinder.java | 14 +++++---- .../insights/ExecutionProfilesInfoFinder.java | 14 +++++---- .../core/insights/InsightsClient.java | 14 +++++---- .../insights/InsightsSupportVerifier.java | 14 +++++---- .../internal/core/insights/PackageUtil.java | 14 +++++---- .../core/insights/PlatformInfoFinder.java | 14 +++++---- .../ReconnectionPolicyInfoFinder.java | 14 +++++---- .../configuration/InsightsConfiguration.java | 14 +++++---- .../InsightEventFormatException.java | 14 +++++---- .../insights/schema/AuthProviderType.java | 14 +++++---- .../core/insights/schema/Insight.java | 14 +++++---- .../core/insights/schema/InsightMetadata.java | 14 +++++---- .../core/insights/schema/InsightType.java | 14 +++++---- .../insights/schema/InsightsPlatformInfo.java | 14 +++++---- .../insights/schema/InsightsStartupData.java | 14 +++++---- .../insights/schema/InsightsStatusData.java | 14 +++++---- .../insights/schema/LoadBalancingInfo.java | 14 +++++---- .../schema/PoolSizeByHostDistance.java | 14 +++++---- .../schema/ReconnectionPolicyInfo.java | 14 +++++---- .../internal/core/insights/schema/SSL.java | 14 +++++---- .../insights/schema/SessionStateForNode.java | 14 +++++---- .../schema/SpecificExecutionProfile.java | 14 +++++---- .../schema/SpeculativeExecutionInfo.java | 14 +++++---- .../DseDcInferringLoadBalancingPolicy.java | 14 +++++---- .../loadbalancing/DseLoadBalancingPolicy.java | 14 +++++---- .../schema/DefaultDseAggregateMetadata.java | 14 +++++---- .../schema/DefaultDseColumnMetadata.java | 14 +++++---- .../schema/DefaultDseEdgeMetadata.java | 14 +++++---- .../schema/DefaultDseFunctionMetadata.java | 14 +++++---- .../schema/DefaultDseIndexMetadata.java | 14 +++++---- .../schema/DefaultDseKeyspaceMetadata.java | 14 +++++---- .../schema/DefaultDseTableMetadata.java | 14 +++++---- .../schema/DefaultDseVertexMetadata.java | 14 +++++---- .../schema/DefaultDseViewMetadata.java | 14 +++++---- .../core/metadata/schema/ScriptHelper.java | 14 +++++---- .../schema/parsing/DseAggregateParser.java | 14 +++++---- .../schema/parsing/DseFunctionParser.java | 14 +++++---- .../schema/parsing/DseSchemaParser.java | 14 +++++---- .../schema/parsing/DseTableParser.java | 14 +++++---- .../schema/parsing/DseViewParser.java | 14 +++++---- .../TinkerpopBufferPrimitiveCodec.java | 14 +++++---- .../internal/core/search/DateRangeUtil.java | 14 +++++---- .../core/session/DefaultDseSession.java | 14 +++++---- .../type/codec/DseTypeCodecsRegistrar.java | 14 +++++---- .../DseTypeCodecsRegistrarSubstitutions.java | 14 +++++---- .../type/codec/geometry/GeometryCodec.java | 14 +++++---- .../type/codec/geometry/LineStringCodec.java | 14 +++++---- .../core/type/codec/geometry/PointCodec.java | 14 +++++---- .../type/codec/geometry/PolygonCodec.java | 14 +++++---- .../core/type/codec/time/DateRangeCodec.java | 14 +++++---- .../concurrent/BoundedConcurrentQueue.java | 14 +++++---- .../api/core/AllNodesFailedException.java | 14 +++++---- .../driver/api/core/AsyncAutoCloseable.java | 14 +++++---- .../driver/api/core/AsyncPagingIterable.java | 14 +++++---- .../oss/driver/api/core/ConsistencyLevel.java | 14 +++++---- .../oss/driver/api/core/CqlIdentifier.java | 14 +++++---- .../oss/driver/api/core/CqlSession.java | 14 +++++---- .../driver/api/core/CqlSessionBuilder.java | 14 +++++---- .../api/core/DefaultConsistencyLevel.java | 14 +++++---- .../api/core/DefaultProtocolVersion.java | 14 +++++---- .../oss/driver/api/core/DriverException.java | 14 +++++---- .../api/core/DriverExecutionException.java | 14 +++++---- .../api/core/DriverTimeoutException.java | 14 +++++---- .../api/core/InvalidKeyspaceException.java | 14 +++++---- .../api/core/MappedAsyncPagingIterable.java | 14 +++++---- .../oss/driver/api/core/MavenCoordinates.java | 14 +++++---- .../api/core/NoNodeAvailableException.java | 14 +++++---- .../api/core/NodeUnavailableException.java | 14 +++++---- .../oss/driver/api/core/PagingIterable.java | 14 +++++---- .../oss/driver/api/core/ProtocolVersion.java | 14 +++++---- .../api/core/RequestThrottlingException.java | 14 +++++---- .../UnsupportedProtocolVersionException.java | 14 +++++---- .../datastax/oss/driver/api/core/Version.java | 14 +++++---- .../addresstranslation/AddressTranslator.java | 14 +++++---- .../driver/api/core/auth/AuthProvider.java | 14 +++++---- .../core/auth/AuthenticationException.java | 14 +++++---- .../driver/api/core/auth/Authenticator.java | 14 +++++---- .../core/auth/PlainTextAuthProviderBase.java | 14 +++++---- .../ProgrammaticPlainTextAuthProvider.java | 14 +++++---- .../api/core/auth/SyncAuthenticator.java | 14 +++++---- .../driver/api/core/auth/package-info.java | 14 +++++---- .../api/core/config/DefaultDriverOption.java | 14 +++++---- .../driver/api/core/config/DriverConfig.java | 14 +++++---- .../api/core/config/DriverConfigLoader.java | 14 +++++---- .../core/config/DriverExecutionProfile.java | 14 +++++---- .../driver/api/core/config/DriverOption.java | 14 +++++---- .../api/core/config/OngoingConfigOptions.java | 14 +++++---- .../driver/api/core/config/OptionsMap.java | 14 +++++---- ...ProgrammaticDriverConfigLoaderBuilder.java | 14 +++++---- .../api/core/config/TypedDriverOption.java | 14 +++++---- .../driver/api/core/config/package-info.java | 14 +++++---- .../connection/BusyConnectionException.java | 14 +++++---- .../connection/ClosedConnectionException.java | 14 +++++---- .../connection/ConnectionInitException.java | 14 +++++---- .../core/connection/CrcMismatchException.java | 14 +++++---- .../connection/FrameTooLongException.java | 14 +++++---- .../core/connection/HeartbeatException.java | 14 +++++---- .../core/connection/ReconnectionPolicy.java | 14 +++++---- .../api/core/connection/package-info.java | 14 +++++---- .../api/core/context/DriverContext.java | 14 +++++---- .../driver/api/core/cql/AsyncCqlSession.java | 14 +++++---- .../driver/api/core/cql/AsyncResultSet.java | 14 +++++---- .../driver/api/core/cql/BatchStatement.java | 14 +++++---- .../api/core/cql/BatchStatementBuilder.java | 14 +++++---- .../oss/driver/api/core/cql/BatchType.java | 14 +++++---- .../api/core/cql/BatchableStatement.java | 14 +++++---- .../oss/driver/api/core/cql/Bindable.java | 14 +++++---- .../driver/api/core/cql/BoundStatement.java | 14 +++++---- .../api/core/cql/BoundStatementBuilder.java | 14 +++++---- .../driver/api/core/cql/ColumnDefinition.java | 14 +++++---- .../api/core/cql/ColumnDefinitions.java | 14 +++++---- .../driver/api/core/cql/DefaultBatchType.java | 14 +++++---- .../driver/api/core/cql/ExecutionInfo.java | 14 +++++---- .../oss/driver/api/core/cql/PagingState.java | 14 +++++---- .../driver/api/core/cql/PrepareRequest.java | 14 +++++---- .../api/core/cql/PreparedStatement.java | 14 +++++---- .../oss/driver/api/core/cql/QueryTrace.java | 14 +++++---- .../oss/driver/api/core/cql/ResultSet.java | 14 +++++---- .../datastax/oss/driver/api/core/cql/Row.java | 14 +++++---- .../driver/api/core/cql/SimpleStatement.java | 14 +++++---- .../api/core/cql/SimpleStatementBuilder.java | 14 +++++---- .../oss/driver/api/core/cql/Statement.java | 14 +++++---- .../driver/api/core/cql/StatementBuilder.java | 14 +++++---- .../driver/api/core/cql/SyncCqlSession.java | 14 +++++---- .../oss/driver/api/core/cql/TraceEvent.java | 14 +++++---- .../driver/api/core/data/AccessibleById.java | 14 +++++---- .../api/core/data/AccessibleByIndex.java | 14 +++++---- .../api/core/data/AccessibleByName.java | 14 +++++---- .../oss/driver/api/core/data/ByteUtils.java | 14 +++++---- .../oss/driver/api/core/data/CqlDuration.java | 14 +++++---- .../oss/driver/api/core/data/CqlVector.java | 14 +++++---- .../oss/driver/api/core/data/Data.java | 14 +++++---- .../driver/api/core/data/GettableById.java | 14 +++++---- .../driver/api/core/data/GettableByIndex.java | 14 +++++---- .../driver/api/core/data/GettableByName.java | 14 +++++---- .../driver/api/core/data/SettableById.java | 14 +++++---- .../driver/api/core/data/SettableByIndex.java | 14 +++++---- .../driver/api/core/data/SettableByName.java | 14 +++++---- .../oss/driver/api/core/data/TupleValue.java | 14 +++++---- .../oss/driver/api/core/data/UdtValue.java | 14 +++++---- .../api/core/detach/AttachmentPoint.java | 14 +++++---- .../driver/api/core/detach/Detachable.java | 16 +++++----- .../loadbalancing/LoadBalancingPolicy.java | 14 +++++---- .../api/core/loadbalancing/NodeDistance.java | 14 +++++---- .../loadbalancing/NodeDistanceEvaluator.java | 14 +++++---- .../driver/api/core/metadata/EndPoint.java | 14 +++++---- .../driver/api/core/metadata/Metadata.java | 14 +++++---- .../oss/driver/api/core/metadata/Node.java | 14 +++++---- .../driver/api/core/metadata/NodeState.java | 14 +++++---- .../api/core/metadata/NodeStateListener.java | 14 +++++---- .../core/metadata/NodeStateListenerBase.java | 14 +++++---- .../metadata/SafeInitNodeStateListener.java | 14 +++++---- .../driver/api/core/metadata/TokenMap.java | 14 +++++---- .../metadata/schema/AggregateMetadata.java | 14 +++++---- .../core/metadata/schema/ClusteringOrder.java | 14 +++++---- .../core/metadata/schema/ColumnMetadata.java | 14 +++++---- .../api/core/metadata/schema/Describable.java | 14 +++++---- .../metadata/schema/FunctionMetadata.java | 14 +++++---- .../metadata/schema/FunctionSignature.java | 14 +++++---- .../api/core/metadata/schema/IndexKind.java | 14 +++++---- .../core/metadata/schema/IndexMetadata.java | 14 +++++---- .../metadata/schema/KeyspaceMetadata.java | 14 +++++---- .../metadata/schema/RelationMetadata.java | 14 +++++---- .../metadata/schema/SchemaChangeListener.java | 14 +++++---- .../schema/SchemaChangeListenerBase.java | 14 +++++---- .../core/metadata/schema/TableMetadata.java | 14 +++++---- .../core/metadata/schema/ViewMetadata.java | 14 +++++---- .../driver/api/core/metadata/token/Token.java | 14 +++++---- .../api/core/metadata/token/TokenRange.java | 14 +++++---- .../api/core/metrics/DefaultNodeMetric.java | 14 +++++---- .../core/metrics/DefaultSessionMetric.java | 14 +++++---- .../oss/driver/api/core/metrics/Metrics.java | 14 +++++---- .../driver/api/core/metrics/NodeMetric.java | 14 +++++---- .../api/core/metrics/SessionMetric.java | 14 +++++---- .../oss/driver/api/core/package-info.java | 14 +++++---- .../driver/api/core/paging/OffsetPager.java | 14 +++++---- .../driver/api/core/retry/RetryDecision.java | 14 +++++---- .../driver/api/core/retry/RetryPolicy.java | 14 +++++---- .../driver/api/core/retry/RetryVerdict.java | 14 +++++---- .../servererrors/AlreadyExistsException.java | 14 +++++---- .../servererrors/BootstrappingException.java | 14 +++++---- .../CASWriteUnknownException.java | 14 +++++---- .../CDCWriteFailureException.java | 14 +++++---- .../servererrors/CoordinatorException.java | 14 +++++---- .../core/servererrors/DefaultWriteType.java | 14 +++++---- .../FunctionFailureException.java | 14 +++++---- .../InvalidConfigurationInQueryException.java | 14 +++++---- .../servererrors/InvalidQueryException.java | 14 +++++---- .../servererrors/OverloadedException.java | 14 +++++---- .../api/core/servererrors/ProtocolError.java | 14 +++++---- .../QueryConsistencyException.java | 14 +++++---- .../servererrors/QueryExecutionException.java | 14 +++++---- .../QueryValidationException.java | 14 +++++---- .../servererrors/ReadFailureException.java | 14 +++++---- .../servererrors/ReadTimeoutException.java | 14 +++++---- .../api/core/servererrors/ServerError.java | 14 +++++---- .../api/core/servererrors/SyntaxError.java | 14 +++++---- .../core/servererrors/TruncateException.java | 14 +++++---- .../servererrors/UnauthorizedException.java | 14 +++++---- .../servererrors/UnavailableException.java | 14 +++++---- .../servererrors/WriteFailureException.java | 14 +++++---- .../servererrors/WriteTimeoutException.java | 14 +++++---- .../api/core/servererrors/WriteType.java | 14 +++++---- .../core/session/ProgrammaticArguments.java | 14 +++++---- .../oss/driver/api/core/session/Request.java | 14 +++++---- .../oss/driver/api/core/session/Session.java | 14 +++++---- .../api/core/session/SessionBuilder.java | 14 +++++---- .../session/throttling/RequestThrottler.java | 14 +++++---- .../core/session/throttling/Throttled.java | 14 +++++---- .../specex/SpeculativeExecutionPolicy.java | 14 +++++---- .../ssl/ProgrammaticSslEngineFactory.java | 14 +++++---- .../driver/api/core/ssl/SslEngineFactory.java | 14 +++++---- .../oss/driver/api/core/ssl/package-info.java | 14 +++++---- .../api/core/time/TimestampGenerator.java | 14 +++++---- .../api/core/tracker/RequestTracker.java | 14 +++++---- .../driver/api/core/type/ContainerType.java | 14 +++++---- .../oss/driver/api/core/type/CustomType.java | 14 +++++---- .../oss/driver/api/core/type/DataType.java | 14 +++++---- .../oss/driver/api/core/type/DataTypes.java | 14 +++++---- .../oss/driver/api/core/type/ListType.java | 14 +++++---- .../oss/driver/api/core/type/MapType.java | 14 +++++---- .../oss/driver/api/core/type/SetType.java | 14 +++++---- .../oss/driver/api/core/type/TupleType.java | 14 +++++---- .../driver/api/core/type/UserDefinedType.java | 14 +++++---- .../oss/driver/api/core/type/VectorType.java | 14 +++++---- .../type/codec/CodecNotFoundException.java | 14 +++++---- .../api/core/type/codec/ExtraTypeCodecs.java | 14 +++++---- .../api/core/type/codec/MappingCodec.java | 14 +++++---- .../type/codec/PrimitiveBooleanCodec.java | 14 +++++---- .../core/type/codec/PrimitiveByteCodec.java | 14 +++++---- .../core/type/codec/PrimitiveDoubleCodec.java | 14 +++++---- .../core/type/codec/PrimitiveFloatCodec.java | 14 +++++---- .../core/type/codec/PrimitiveIntCodec.java | 14 +++++---- .../core/type/codec/PrimitiveLongCodec.java | 14 +++++---- .../core/type/codec/PrimitiveShortCodec.java | 14 +++++---- .../driver/api/core/type/codec/TypeCodec.java | 14 +++++---- .../api/core/type/codec/TypeCodecs.java | 14 +++++---- .../type/codec/registry/CodecRegistry.java | 14 +++++---- .../codec/registry/MutableCodecRegistry.java | 14 +++++---- .../api/core/type/reflect/GenericType.java | 14 +++++---- .../type/reflect/GenericTypeParameter.java | 14 +++++---- .../oss/driver/api/core/uuid/Uuids.java | 14 +++++---- .../datastax/oss/driver/api/package-info.java | 14 +++++---- .../core/AsyncPagingIterableWrapper.java | 14 +++++---- .../core/ConsistencyLevelRegistry.java | 14 +++++---- .../driver/internal/core/ContactPoints.java | 14 +++++---- .../driver/internal/core/CqlIdentifiers.java | 14 +++++---- .../core/DefaultConsistencyLevelRegistry.java | 14 +++++---- .../core/DefaultMavenCoordinates.java | 14 +++++---- .../internal/core/DefaultProtocolFeature.java | 14 +++++---- .../core/DefaultProtocolVersionRegistry.java | 14 +++++---- .../internal/core/PagingIterableWrapper.java | 14 +++++---- .../driver/internal/core/ProtocolFeature.java | 14 +++++---- .../core/ProtocolVersionRegistry.java | 14 +++++---- .../Ec2MultiRegionAddressTranslator.java | 14 +++++---- .../FixedHostNameAddressTranslator.java | 14 +++++---- .../PassThroughAddressTranslator.java | 14 +++++---- .../adminrequest/AdminRequestHandler.java | 14 +++++---- .../core/adminrequest/AdminResult.java | 14 +++++---- .../internal/core/adminrequest/AdminRow.java | 14 +++++---- .../ThrottledAdminRequestHandler.java | 14 +++++---- .../UnexpectedResponseException.java | 14 +++++---- .../core/adminrequest/package-info.java | 14 +++++---- .../core/auth/PlainTextAuthProvider.java | 14 +++++---- .../internal/core/channel/ChannelEvent.java | 14 +++++---- .../internal/core/channel/ChannelFactory.java | 14 +++++---- .../core/channel/ChannelHandlerRequest.java | 14 +++++---- .../channel/ClusterNameMismatchException.java | 14 +++++---- .../core/channel/ConnectInitHandler.java | 14 +++++---- .../core/channel/DefaultWriteCoalescer.java | 14 +++++---- .../internal/core/channel/DriverChannel.java | 14 +++++---- .../core/channel/DriverChannelOptions.java | 14 +++++---- .../internal/core/channel/EventCallback.java | 14 +++++---- .../core/channel/HeartbeatHandler.java | 14 +++++---- .../core/channel/InFlightHandler.java | 14 +++++---- .../core/channel/InboundTrafficMeter.java | 14 +++++---- .../core/channel/OutboundTrafficMeter.java | 14 +++++---- .../channel/PassThroughWriteCoalescer.java | 14 +++++---- .../core/channel/ProtocolInitHandler.java | 14 +++++---- .../core/channel/ResponseCallback.java | 14 +++++---- .../core/channel/StreamIdGenerator.java | 14 +++++---- .../internal/core/channel/WriteCoalescer.java | 14 +++++---- .../internal/core/channel/package-info.java | 14 +++++---- .../core/config/ConfigChangeEvent.java | 14 +++++---- .../core/config/DerivedExecutionProfile.java | 14 +++++---- .../config/DriverOptionConfigBuilder.java | 14 +++++---- .../core/config/cloud/CloudConfig.java | 14 +++++---- .../core/config/cloud/CloudConfigFactory.java | 14 +++++---- .../composite/CompositeDriverConfig.java | 14 +++++---- .../CompositeDriverConfigLoader.java | 14 +++++---- .../CompositeDriverExecutionProfile.java | 14 +++++---- .../core/config/map/MapBasedDriverConfig.java | 14 +++++---- .../map/MapBasedDriverConfigLoader.java | 14 +++++---- .../map/MapBasedDriverExecutionProfile.java | 14 +++++---- .../typesafe/DefaultDriverConfigLoader.java | 14 +++++---- .../DefaultDriverConfigLoaderBuilder.java | 14 +++++---- ...ProgrammaticDriverConfigLoaderBuilder.java | 14 +++++---- .../config/typesafe/TypesafeDriverConfig.java | 14 +++++---- .../TypesafeDriverExecutionProfile.java | 14 +++++---- .../core/config/typesafe/package-info.java | 14 +++++---- .../ConstantReconnectionPolicy.java | 14 +++++---- .../ExponentialReconnectionPolicy.java | 14 +++++---- .../core/context/DefaultDriverContext.java | 14 +++++---- .../core/context/DefaultNettyOptions.java | 14 +++++---- .../internal/core/context/EventBus.java | 14 +++++---- .../core/context/InternalDriverContext.java | 14 +++++---- .../core/context/LifecycleListener.java | 14 +++++---- .../internal/core/context/NettyOptions.java | 14 +++++---- .../core/context/StartupOptionsBuilder.java | 14 +++++---- .../core/control/ControlConnection.java | 14 +++++---- .../driver/internal/core/cql/Conversions.java | 14 +++++---- .../core/cql/CqlPrepareAsyncProcessor.java | 14 +++++---- .../internal/core/cql/CqlPrepareHandler.java | 14 +++++---- .../core/cql/CqlPrepareSyncProcessor.java | 14 +++++---- .../core/cql/CqlRequestAsyncProcessor.java | 14 +++++---- .../internal/core/cql/CqlRequestHandler.java | 14 +++++---- .../core/cql/CqlRequestSyncProcessor.java | 14 +++++---- .../core/cql/DefaultAsyncResultSet.java | 14 +++++---- .../core/cql/DefaultBatchStatement.java | 14 +++++---- .../core/cql/DefaultBoundStatement.java | 14 +++++---- .../core/cql/DefaultColumnDefinition.java | 14 +++++---- .../core/cql/DefaultColumnDefinitions.java | 14 +++++---- .../core/cql/DefaultExecutionInfo.java | 14 +++++---- .../internal/core/cql/DefaultPagingState.java | 14 +++++---- .../core/cql/DefaultPrepareRequest.java | 14 +++++---- .../core/cql/DefaultPreparedStatement.java | 14 +++++---- .../internal/core/cql/DefaultQueryTrace.java | 14 +++++---- .../driver/internal/core/cql/DefaultRow.java | 14 +++++---- .../core/cql/DefaultSimpleStatement.java | 14 +++++---- .../internal/core/cql/DefaultTraceEvent.java | 14 +++++---- .../core/cql/EmptyColumnDefinitions.java | 14 +++++---- .../internal/core/cql/MultiPageResultSet.java | 14 +++++---- .../core/cql/PagingIterableSpliterator.java | 14 +++++---- .../internal/core/cql/QueryTraceFetcher.java | 14 +++++---- .../driver/internal/core/cql/ResultSets.java | 14 +++++---- .../core/cql/SinglePageResultSet.java | 14 +++++---- .../internal/core/data/DefaultTupleValue.java | 14 +++++---- .../internal/core/data/DefaultUdtValue.java | 14 +++++---- .../internal/core/data/IdentifierIndex.java | 14 +++++---- .../internal/core/data/ValuesHelper.java | 14 +++++---- .../BasicLoadBalancingPolicy.java | 14 +++++---- .../DcInferringLoadBalancingPolicy.java | 14 +++++---- .../DefaultLoadBalancingPolicy.java | 14 +++++---- .../DefaultNodeDistanceEvaluatorHelper.java | 14 +++++---- .../helper/InferringLocalDcHelper.java | 14 +++++---- .../loadbalancing/helper/LocalDcHelper.java | 14 +++++---- .../helper/MandatoryLocalDcHelper.java | 14 +++++---- .../helper/NodeDistanceEvaluatorHelper.java | 14 +++++---- .../NodeFilterToDistanceEvaluatorAdapter.java | 14 +++++---- .../helper/OptionalLocalDcHelper.java | 14 +++++---- .../nodeset/DcAgnosticNodeSet.java | 14 +++++---- .../loadbalancing/nodeset/MultiDcNodeSet.java | 14 +++++---- .../core/loadbalancing/nodeset/NodeSet.java | 14 +++++---- .../nodeset/SingleDcNodeSet.java | 14 +++++---- .../core/metadata/AddNodeRefresh.java | 14 +++++---- .../core/metadata/CloudTopologyMonitor.java | 14 +++++---- .../core/metadata/DefaultEndPoint.java | 14 +++++---- .../core/metadata/DefaultMetadata.java | 14 +++++---- .../internal/core/metadata/DefaultNode.java | 14 +++++---- .../core/metadata/DefaultNodeInfo.java | 14 +++++---- .../core/metadata/DefaultTopologyMonitor.java | 14 +++++---- .../internal/core/metadata/DistanceEvent.java | 14 +++++---- .../core/metadata/FullNodeListRefresh.java | 14 +++++---- .../core/metadata/InitialNodeListRefresh.java | 14 +++++---- .../metadata/LoadBalancingPolicyWrapper.java | 14 +++++---- .../core/metadata/MetadataManager.java | 14 +++++---- .../core/metadata/MetadataRefresh.java | 14 +++++---- .../MultiplexingNodeStateListener.java | 14 +++++---- .../internal/core/metadata/NodeInfo.java | 14 +++++---- .../core/metadata/NodeStateEvent.java | 14 +++++---- .../core/metadata/NodeStateManager.java | 14 +++++---- .../internal/core/metadata/NodesRefresh.java | 14 +++++---- .../core/metadata/NoopNodeStateListener.java | 14 +++++---- .../core/metadata/PeerRowValidator.java | 14 +++++---- .../core/metadata/RemoveNodeRefresh.java | 14 +++++---- .../core/metadata/SchemaAgreementChecker.java | 14 +++++---- .../internal/core/metadata/SniEndPoint.java | 14 +++++---- .../core/metadata/TokensChangedRefresh.java | 14 +++++---- .../internal/core/metadata/TopologyEvent.java | 14 +++++---- .../core/metadata/TopologyMonitor.java | 14 +++++---- .../schema/DefaultAggregateMetadata.java | 14 +++++---- .../schema/DefaultColumnMetadata.java | 14 +++++---- .../schema/DefaultFunctionMetadata.java | 14 +++++---- .../metadata/schema/DefaultIndexMetadata.java | 14 +++++---- .../schema/DefaultKeyspaceMetadata.java | 14 +++++---- .../metadata/schema/DefaultTableMetadata.java | 14 +++++---- .../metadata/schema/DefaultViewMetadata.java | 14 +++++---- .../MultiplexingSchemaChangeListener.java | 14 +++++---- .../schema/NoopSchemaChangeListener.java | 14 +++++---- .../metadata/schema/SchemaChangeType.java | 14 +++++---- .../core/metadata/schema/ScriptBuilder.java | 14 +++++---- .../schema/ShallowUserDefinedType.java | 14 +++++---- .../schema/events/AggregateChangeEvent.java | 14 +++++---- .../schema/events/FunctionChangeEvent.java | 14 +++++---- .../schema/events/KeyspaceChangeEvent.java | 14 +++++---- .../schema/events/TableChangeEvent.java | 14 +++++---- .../schema/events/TypeChangeEvent.java | 14 +++++---- .../schema/events/ViewChangeEvent.java | 14 +++++---- .../schema/parsing/AggregateParser.java | 14 +++++---- .../schema/parsing/CassandraSchemaParser.java | 14 +++++---- .../DataTypeClassNameCompositeParser.java | 14 +++++---- .../parsing/DataTypeClassNameParser.java | 14 +++++---- .../schema/parsing/DataTypeCqlNameParser.java | 14 +++++---- .../schema/parsing/DataTypeParser.java | 14 +++++---- .../parsing/DefaultSchemaParserFactory.java | 14 +++++---- .../schema/parsing/FunctionParser.java | 14 +++++---- .../metadata/schema/parsing/RawColumn.java | 14 +++++---- .../schema/parsing/RelationParser.java | 14 +++++---- .../metadata/schema/parsing/SchemaParser.java | 14 +++++---- .../schema/parsing/SchemaParserFactory.java | 14 +++++---- .../schema/parsing/SimpleJsonParser.java | 14 +++++---- .../metadata/schema/parsing/TableParser.java | 14 +++++---- .../schema/parsing/UserDefinedTypeParser.java | 14 +++++---- .../metadata/schema/parsing/ViewParser.java | 14 +++++---- .../queries/Cassandra21SchemaQueries.java | 14 +++++---- .../queries/Cassandra22SchemaQueries.java | 14 +++++---- .../queries/Cassandra3SchemaQueries.java | 14 +++++---- .../queries/Cassandra4SchemaQueries.java | 14 +++++---- .../queries/CassandraSchemaQueries.java | 14 +++++---- .../schema/queries/CassandraSchemaRows.java | 14 +++++---- .../queries/DefaultSchemaQueriesFactory.java | 14 +++++---- .../schema/queries/Dse68SchemaQueries.java | 14 +++++---- .../schema/queries/KeyspaceFilter.java | 14 +++++---- .../queries/RuleBasedKeyspaceFilter.java | 14 +++++---- .../schema/queries/SchemaQueries.java | 14 +++++---- .../schema/queries/SchemaQueriesFactory.java | 14 +++++---- .../metadata/schema/queries/SchemaRows.java | 14 +++++---- .../schema/refresh/SchemaRefresh.java | 14 +++++---- .../core/metadata/token/ByteOrderedToken.java | 14 +++++---- .../token/ByteOrderedTokenFactory.java | 14 +++++---- .../metadata/token/ByteOrderedTokenRange.java | 14 +++++---- .../token/CanonicalNodeSetBuilder.java | 14 +++++---- .../DefaultReplicationStrategyFactory.java | 14 +++++---- .../token/DefaultTokenFactoryRegistry.java | 14 +++++---- .../core/metadata/token/DefaultTokenMap.java | 14 +++++---- .../token/EverywhereReplicationStrategy.java | 14 +++++---- .../core/metadata/token/KeyspaceTokenMap.java | 14 +++++---- .../token/LocalReplicationStrategy.java | 14 +++++---- .../core/metadata/token/Murmur3Token.java | 14 +++++---- .../metadata/token/Murmur3TokenFactory.java | 14 +++++---- .../metadata/token/Murmur3TokenRange.java | 14 +++++---- .../NetworkTopologyReplicationStrategy.java | 14 +++++---- .../core/metadata/token/RandomToken.java | 14 +++++---- .../metadata/token/RandomTokenFactory.java | 14 +++++---- .../core/metadata/token/RandomTokenRange.java | 14 +++++---- .../metadata/token/ReplicationFactor.java | 14 +++++---- .../metadata/token/ReplicationStrategy.java | 14 +++++---- .../token/ReplicationStrategyFactory.java | 14 +++++---- .../token/SimpleReplicationStrategy.java | 14 +++++---- .../core/metadata/token/TokenFactory.java | 14 +++++---- .../metadata/token/TokenFactoryRegistry.java | 14 +++++---- .../core/metadata/token/TokenRangeBase.java | 14 +++++---- .../core/metrics/AbstractMetricUpdater.java | 14 +++++---- .../core/metrics/DefaultMetricId.java | 14 +++++---- .../metrics/DefaultMetricIdGenerator.java | 14 +++++---- .../internal/core/metrics/DefaultMetrics.java | 14 +++++---- .../core/metrics/DefaultMetricsFactory.java | 14 +++++---- .../DefaultMetricsFactorySubstitutions.java | 14 +++++---- .../core/metrics/DropwizardMetricUpdater.java | 14 +++++---- .../metrics/DropwizardMetricsFactory.java | 14 +++++---- .../metrics/DropwizardNodeMetricUpdater.java | 14 +++++---- .../DropwizardSessionMetricUpdater.java | 14 +++++---- .../internal/core/metrics/HdrReservoir.java | 14 +++++---- .../internal/core/metrics/MetricId.java | 14 +++++---- .../core/metrics/MetricIdGenerator.java | 14 +++++---- .../internal/core/metrics/MetricPaths.java | 14 +++++---- .../internal/core/metrics/MetricUpdater.java | 14 +++++---- .../internal/core/metrics/MetricsFactory.java | 14 +++++---- .../core/metrics/NodeMetricUpdater.java | 14 +++++---- .../core/metrics/NoopMetricsFactory.java | 14 +++++---- .../core/metrics/NoopNodeMetricUpdater.java | 14 +++++---- .../metrics/NoopSessionMetricUpdater.java | 14 +++++---- .../core/metrics/SessionMetricUpdater.java | 14 +++++---- .../metrics/TaggingMetricIdGenerator.java | 14 +++++---- .../oss/driver/internal/core/os/CpuInfo.java | 14 +++++---- .../driver/internal/core/os/EmptyLibc.java | 14 +++++---- .../driver/internal/core/os/GraalGetpid.java | 14 +++++---- .../driver/internal/core/os/GraalLibc.java | 14 +++++---- .../oss/driver/internal/core/os/JnrLibc.java | 14 +++++---- .../internal/core/os/JnrLibcSubstitution.java | 14 +++++---- .../oss/driver/internal/core/os/Libc.java | 14 +++++---- .../oss/driver/internal/core/os/Native.java | 14 +++++---- .../internal/core/pool/ChannelPool.java | 14 +++++---- .../core/pool/ChannelPoolFactory.java | 14 +++++---- .../driver/internal/core/pool/ChannelSet.java | 14 +++++---- .../core/protocol/BuiltInCompressors.java | 14 +++++---- .../core/protocol/ByteBufCompressor.java | 14 +++++---- .../core/protocol/ByteBufPrimitiveCodec.java | 14 +++++---- .../core/protocol/ByteBufSegmentBuilder.java | 14 +++++---- .../core/protocol/BytesToSegmentDecoder.java | 14 +++++---- .../protocol/CompressorSubstitutions.java | 14 +++++---- .../internal/core/protocol/FrameDecoder.java | 14 +++++---- .../core/protocol/FrameDecodingException.java | 14 +++++---- .../internal/core/protocol/FrameEncoder.java | 14 +++++---- .../core/protocol/FrameToSegmentEncoder.java | 14 +++++---- .../internal/core/protocol/Lz4Compressor.java | 14 +++++---- .../core/protocol/SegmentToBytesEncoder.java | 14 +++++---- .../core/protocol/SegmentToFrameDecoder.java | 14 +++++---- .../core/protocol/SnappyCompressor.java | 14 +++++---- .../internal/core/protocol/package-info.java | 14 +++++---- .../ConsistencyDowngradingRetryPolicy.java | 14 +++++---- .../ConsistencyDowngradingRetryVerdict.java | 14 +++++---- .../core/retry/DefaultRetryPolicy.java | 14 +++++---- .../core/retry/DefaultRetryVerdict.java | 14 +++++---- .../DefaultWriteTypeRegistry.java | 14 +++++---- .../core/servererrors/WriteTypeRegistry.java | 14 +++++---- .../session/BuiltInRequestProcessors.java | 14 +++++---- ...BuiltInRequestProcessorsSubstitutions.java | 14 +++++---- .../internal/core/session/DefaultSession.java | 14 +++++---- .../internal/core/session/PoolManager.java | 14 +++++---- .../internal/core/session/ReprepareOnUp.java | 14 +++++---- .../core/session/RepreparePayload.java | 14 +++++---- .../core/session/RequestProcessor.java | 14 +++++---- .../session/RequestProcessorRegistry.java | 14 +++++---- .../core/session/SchemaListenerNotifier.java | 14 +++++---- .../internal/core/session/SessionWrapper.java | 14 +++++---- .../ConcurrencyLimitingRequestThrottler.java | 14 +++++---- .../core/session/throttling/NanoClock.java | 14 +++++---- .../PassThroughRequestThrottler.java | 14 +++++---- .../RateLimitingRequestThrottler.java | 14 +++++---- .../ConstantSpeculativeExecutionPolicy.java | 14 +++++---- .../specex/NoSpeculativeExecutionPolicy.java | 14 +++++---- .../core/ssl/DefaultSslEngineFactory.java | 14 +++++---- .../core/ssl/JdkSslHandlerFactory.java | 14 +++++---- .../core/ssl/SniSslEngineFactory.java | 14 +++++---- .../internal/core/ssl/SslHandlerFactory.java | 14 +++++---- .../core/time/AtomicTimestampGenerator.java | 14 +++++---- .../oss/driver/internal/core/time/Clock.java | 14 +++++---- .../driver/internal/core/time/JavaClock.java | 14 +++++---- .../time/MonotonicTimestampGenerator.java | 14 +++++---- .../internal/core/time/NativeClock.java | 14 +++++---- .../time/ServerSideTimestampGenerator.java | 14 +++++---- .../time/ThreadLocalTimestampGenerator.java | 14 +++++---- .../tracker/MultiplexingRequestTracker.java | 14 +++++---- .../core/tracker/NoopRequestTracker.java | 14 +++++---- .../core/tracker/RequestLogFormatter.java | 14 +++++---- .../internal/core/tracker/RequestLogger.java | 14 +++++---- .../internal/core/type/DataTypeHelper.java | 14 +++++---- .../internal/core/type/DefaultCustomType.java | 14 +++++---- .../internal/core/type/DefaultListType.java | 14 +++++---- .../internal/core/type/DefaultMapType.java | 14 +++++---- .../internal/core/type/DefaultSetType.java | 14 +++++---- .../internal/core/type/DefaultTupleType.java | 14 +++++---- .../core/type/DefaultUserDefinedType.java | 14 +++++---- .../internal/core/type/DefaultVectorType.java | 14 +++++---- .../internal/core/type/PrimitiveType.java | 14 +++++---- .../core/type/UserDefinedTypeBuilder.java | 14 +++++---- .../internal/core/type/codec/BigIntCodec.java | 14 +++++---- .../internal/core/type/codec/BlobCodec.java | 14 +++++---- .../core/type/codec/BooleanCodec.java | 14 +++++---- .../core/type/codec/CounterCodec.java | 14 +++++---- .../core/type/codec/CqlDurationCodec.java | 14 +++++---- .../internal/core/type/codec/CustomCodec.java | 14 +++++---- .../internal/core/type/codec/DateCodec.java | 14 +++++---- .../core/type/codec/DecimalCodec.java | 14 +++++---- .../internal/core/type/codec/DoubleCodec.java | 14 +++++---- .../internal/core/type/codec/FloatCodec.java | 14 +++++---- .../internal/core/type/codec/InetCodec.java | 14 +++++---- .../internal/core/type/codec/IntCodec.java | 14 +++++---- .../internal/core/type/codec/ListCodec.java | 14 +++++---- .../internal/core/type/codec/MapCodec.java | 14 +++++---- .../internal/core/type/codec/ParseUtils.java | 14 +++++---- .../internal/core/type/codec/SetCodec.java | 14 +++++---- .../core/type/codec/SimpleBlobCodec.java | 14 +++++---- .../core/type/codec/SmallIntCodec.java | 14 +++++---- .../internal/core/type/codec/StringCodec.java | 14 +++++---- .../internal/core/type/codec/TimeCodec.java | 14 +++++---- .../core/type/codec/TimeUuidCodec.java | 14 +++++---- .../core/type/codec/TimestampCodec.java | 14 +++++---- .../core/type/codec/TinyIntCodec.java | 14 +++++---- .../internal/core/type/codec/TupleCodec.java | 14 +++++---- .../internal/core/type/codec/UdtCodec.java | 14 +++++---- .../internal/core/type/codec/UuidCodec.java | 14 +++++---- .../internal/core/type/codec/VarIntCodec.java | 14 +++++---- .../internal/core/type/codec/VectorCodec.java | 14 +++++---- .../core/type/codec/extras/OptionalCodec.java | 14 +++++---- .../array/AbstractListToArrayCodec.java | 14 +++++---- .../AbstractPrimitiveListToArrayCodec.java | 14 +++++---- .../extras/array/BooleanListToArrayCodec.java | 14 +++++---- .../extras/array/ByteListToArrayCodec.java | 14 +++++---- .../extras/array/DoubleListToArrayCodec.java | 14 +++++---- .../extras/array/FloatListToArrayCodec.java | 14 +++++---- .../extras/array/IntListToArrayCodec.java | 14 +++++---- .../extras/array/LongListToArrayCodec.java | 14 +++++---- .../extras/array/ObjectListToArrayCodec.java | 14 +++++---- .../extras/array/ShortListToArrayCodec.java | 14 +++++---- .../codec/extras/enums/EnumNameCodec.java | 14 +++++---- .../codec/extras/enums/EnumOrdinalCodec.java | 14 +++++---- .../type/codec/extras/json/JsonCodec.java | 14 +++++---- .../extras/time/LocalTimestampCodec.java | 14 +++++---- .../time/PersistentZonedTimestampCodec.java | 14 +++++---- .../extras/time/TimestampMillisCodec.java | 14 +++++---- .../extras/time/ZonedTimestampCodec.java | 14 +++++---- .../vector/AbstractVectorToArrayCodec.java | 14 +++++---- .../vector/FloatVectorToArrayCodec.java | 14 +++++---- .../codec/registry/CachingCodecRegistry.java | 14 +++++---- .../registry/CodecRegistryConstants.java | 14 +++++---- .../codec/registry/DefaultCodecRegistry.java | 14 +++++---- .../internal/core/type/util/VIntCoding.java | 14 +++++---- .../driver/internal/core/util/ArrayUtils.java | 14 +++++---- .../internal/core/util/CollectionsUtils.java | 14 +++++---- .../internal/core/util/CountingIterator.java | 14 +++++---- .../core/util/DefaultDependencyChecker.java | 14 +++++---- .../driver/internal/core/util/Dependency.java | 14 +++++---- .../internal/core/util/DirectedGraph.java | 14 +++++---- .../core/util/GraalDependencyChecker.java | 14 +++++---- .../driver/internal/core/util/Loggers.java | 14 +++++---- .../driver/internal/core/util/NanoTime.java | 14 +++++---- .../internal/core/util/ProtocolUtils.java | 14 +++++---- .../driver/internal/core/util/Reflection.java | 14 +++++---- .../driver/internal/core/util/RoutingKey.java | 14 +++++---- .../oss/driver/internal/core/util/Sizes.java | 14 +++++---- .../driver/internal/core/util/Strings.java | 14 +++++---- .../util/collection/CompositeQueryPlan.java | 14 +++++---- .../core/util/collection/EmptyQueryPlan.java | 14 +++++---- .../core/util/collection/LazyQueryPlan.java | 14 +++++---- .../core/util/collection/QueryPlan.java | 14 +++++---- .../core/util/collection/QueryPlanBase.java | 14 +++++---- .../core/util/collection/SimpleQueryPlan.java | 14 +++++---- .../util/concurrent/BlockingOperation.java | 14 +++++---- .../util/concurrent/CompletableFutures.java | 14 +++++---- .../core/util/concurrent/CycleDetector.java | 14 +++++---- .../core/util/concurrent/Debouncer.java | 14 +++++---- .../DriverBlockHoundIntegration.java | 14 +++++---- .../core/util/concurrent/LazyReference.java | 14 +++++---- .../core/util/concurrent/PromiseCombiner.java | 14 +++++---- .../core/util/concurrent/Reconnection.java | 14 +++++---- .../util/concurrent/ReplayingEventFilter.java | 14 +++++---- .../core/util/concurrent/RunOrSchedule.java | 14 +++++---- .../util/concurrent/UncaughtExceptions.java | 14 +++++---- .../internal/core/util/package-info.java | 14 +++++---- .../oss/driver/internal/package-info.java | 14 +++++---- .../com/datastax/oss/driver/Driver.properties | 16 +++++----- core/src/main/resources/reference.conf | 2 +- .../com/datastax/dse/driver/Assertions.java | 14 +++++---- .../dse/driver/DriverRunListener.java | 14 +++++---- .../dse/driver/DseTestDataProviders.java | 14 +++++---- .../datastax/dse/driver/DseTestFixtures.java | 14 +++++---- .../dse/driver/TinkerpopBufferAssert.java | 14 +++++---- .../data/time/DateRangePrecisionTest.java | 14 +++++---- .../api/core/data/time/DateRangeTest.java | 14 +++++---- .../graph/predicates/CqlCollectionTest.java | 14 +++++---- .../api/core/graph/predicates/GeoTest.java | 14 +++++---- .../api/core/graph/predicates/SearchTest.java | 14 +++++---- .../driver/internal/DependencyCheckTest.java | 14 +++++---- .../internal/DependencyCheckTestBase.java | 14 +++++---- .../context/DseStartupOptionsBuilderTest.java | 14 +++++---- ...ousCqlRequestHandlerNodeTargetingTest.java | 14 +++++---- ...tinuousCqlRequestHandlerReprepareTest.java | 14 +++++---- .../ContinuousCqlRequestHandlerRetryTest.java | 14 +++++---- .../ContinuousCqlRequestHandlerTest.java | 14 +++++---- .../ContinuousCqlRequestHandlerTestBase.java | 14 +++++---- .../DefaultContinuousAsyncResultSetTest.java | 14 +++++---- .../DefaultContinuousResultSetTest.java | 14 +++++---- ...inuousCqlRequestReactiveProcessorTest.java | 14 +++++---- .../CqlRequestReactiveProcessorTest.java | 14 +++++---- .../DefaultReactiveResultSetTckTest.java | 14 +++++---- .../core/cql/reactive/MockAsyncResultSet.java | 14 +++++---- .../internal/core/cql/reactive/MockRow.java | 14 +++++---- .../ReactiveResultSetSubscriptionTest.java | 14 +++++---- .../SimpleUnicastProcessorTckTest.java | 14 +++++---- .../reactive/SimpleUnicastProcessorTest.java | 14 +++++---- .../core/cql/reactive/TestSubscriber.java | 14 +++++---- .../data/geometry/DefaultLineStringTest.java | 14 +++++---- .../core/data/geometry/DefaultPointTest.java | 14 +++++---- .../data/geometry/DefaultPolygonTest.java | 14 +++++---- .../core/data/geometry/DistanceTest.java | 14 +++++---- .../data/geometry/SerializationUtils.java | 14 +++++---- ...equestHandlerSpeculativeExecutionTest.java | 14 +++++---- .../ContinuousGraphRequestHandlerTest.java | 14 +++++---- .../GraphExecutionInfoConverterTest.java | 14 +++++---- .../internal/core/graph/GraphNodeTest.java | 14 +++++---- .../core/graph/GraphRequestHandlerTest.java | 14 +++++---- .../graph/GraphRequestHandlerTestHarness.java | 14 +++++---- .../core/graph/GraphResultSetTestBase.java | 14 +++++---- .../core/graph/GraphResultSetsTest.java | 14 +++++---- .../graph/GraphStatementBuilderBaseTest.java | 14 +++++---- .../core/graph/GraphSupportCheckerTest.java | 14 +++++---- .../internal/core/graph/GraphTestUtils.java | 14 +++++---- .../core/graph/binary/GraphDataTypesTest.java | 14 +++++---- .../ReactiveGraphRequestProcessorTest.java | 14 +++++---- .../refresh/GraphSchemaRefreshTest.java | 14 +++++---- .../core/insights/AddressFormatterTest.java | 14 +++++---- .../ConfigAntiPatternsFinderTest.java | 14 +++++---- .../core/insights/DataCentersFinderTest.java | 14 +++++---- .../insights/ExecutionProfileMockUtil.java | 14 +++++---- .../ExecutionProfilesInfoFinderTest.java | 14 +++++---- .../core/insights/InsightsClientTest.java | 14 +++++---- .../insights/InsightsSupportVerifierTest.java | 14 +++++---- .../core/insights/PackageUtilTest.java | 14 +++++---- .../core/insights/PlatformInfoFinderTest.java | 14 +++++---- .../ReconnectionPolicyInfoFinderTest.java | 14 +++++---- .../TinkerpopBufferPrimitiveCodecTest.java | 14 +++++---- .../codec/geometry/GeometryCodecTest.java | 14 +++++---- .../codec/geometry/LineStringCodecTest.java | 14 +++++---- .../type/codec/geometry/PointCodecTest.java | 14 +++++---- .../type/codec/geometry/PolygonCodecTest.java | 14 +++++---- .../type/codec/time/DateRangeCodecTest.java | 14 +++++---- .../BoundedConcurrentQueueTest.java | 14 +++++---- .../com/datastax/oss/driver/Assertions.java | 14 +++++---- .../datastax/oss/driver/ByteBufAssert.java | 14 +++++---- .../oss/driver/DriverRunListener.java | 14 +++++---- .../oss/driver/TestDataProviders.java | 14 +++++---- .../api/core/AllNodesFailedExceptionTest.java | 14 +++++---- .../driver/api/core/CqlIdentifierTest.java | 14 +++++---- .../oss/driver/api/core/VersionAssert.java | 14 +++++---- .../oss/driver/api/core/VersionTest.java | 14 +++++---- ...ProgrammaticPlainTextAuthProviderTest.java | 14 +++++---- .../api/core/config/OptionsMapTest.java | 14 +++++---- .../core/config/TypedDriverOptionTest.java | 14 +++++---- .../api/core/cql/StatementBuilderTest.java | 14 +++++---- .../api/core/cql/StatementProfileTest.java | 14 +++++---- .../driver/api/core/data/CqlDurationTest.java | 14 +++++---- .../driver/api/core/data/CqlVectorTest.java | 14 +++++---- .../SafeInitNodeStateListenerTest.java | 14 +++++---- .../api/core/paging/OffsetPagerAsyncTest.java | 14 +++++---- .../api/core/paging/OffsetPagerSyncTest.java | 14 +++++---- .../api/core/paging/OffsetPagerTestBase.java | 14 +++++---- .../core/paging/OffsetPagerTestFixture.java | 14 +++++---- ...ConsistencyDowngradingRetryPolicyTest.java | 14 +++++---- .../core/retry/DefaultRetryPolicyTest.java | 14 +++++---- .../api/core/retry/RetryPolicyTestBase.java | 14 +++++---- ...onstantSpeculativeExecutionPolicyTest.java | 14 +++++---- .../api/core/type/UserDefinedTypeTest.java | 14 +++++---- .../core/type/reflect/GenericTypeTest.java | 14 +++++---- .../oss/driver/api/core/uuid/UuidsTest.java | 14 +++++---- .../driver/internal/SerializationHelper.java | 14 +++++---- .../core/AsyncPagingIterableWrapperTest.java | 14 +++++---- .../internal/core/CompletionStageAssert.java | 14 +++++---- .../internal/core/ContactPointsTest.java | 14 +++++---- .../DefaultProtocolVersionRegistryTest.java | 14 +++++---- .../internal/core/DriverConfigAssert.java | 14 +++++---- .../core/MockAsyncPagingIterable.java | 14 +++++---- .../internal/core/MockPagingIterable.java | 14 +++++---- .../internal/core/NettyFutureAssert.java | 14 +++++---- .../core/PagingIterableWrapperTest.java | 14 +++++---- .../driver/internal/core/TestResponses.java | 14 +++++---- .../Ec2MultiRegionAddressTranslatorTest.java | 14 +++++---- .../FixedHostNameAddressTranslatorTest.java | 14 +++++---- .../ChannelFactoryAvailableIdsTest.java | 14 +++++---- .../ChannelFactoryClusterNameTest.java | 14 +++++---- ...ChannelFactoryProtocolNegotiationTest.java | 14 +++++---- .../ChannelFactorySupportedOptionsTest.java | 14 +++++---- .../core/channel/ChannelFactoryTestBase.java | 14 +++++---- .../core/channel/ChannelHandlerTestBase.java | 14 +++++---- .../core/channel/ConnectInitHandlerTest.java | 14 +++++---- .../core/channel/DriverChannelTest.java | 14 +++++---- .../core/channel/EmbeddedEndPoint.java | 14 +++++---- .../core/channel/InFlightHandlerTest.java | 14 +++++---- .../internal/core/channel/LocalEndPoint.java | 14 +++++---- .../core/channel/MockAuthenticator.java | 14 +++++---- .../channel/MockChannelFactoryHelper.java | 14 +++++---- .../core/channel/MockResponseCallback.java | 14 +++++---- .../core/channel/ProtocolInitHandlerTest.java | 14 +++++---- .../core/channel/StreamIdGeneratorTest.java | 14 +++++---- .../internal/core/config/MockOptions.java | 14 +++++---- .../core/config/MockTypedOptions.java | 14 +++++---- .../config/cloud/CloudConfigFactoryTest.java | 14 +++++---- .../CompositeDriverConfigReloadTest.java | 14 +++++---- .../composite/CompositeDriverConfigTest.java | 14 +++++---- .../map/MapBasedDriverConfigLoaderTest.java | 14 +++++---- .../config/map/MapBasedDriverConfigTest.java | 14 +++++---- .../DefaultDriverConfigLoaderTest.java | 14 +++++---- ...rammaticDriverConfigLoaderBuilderTest.java | 14 +++++---- ...eSafeDriverConfigOverrideDefaultsTest.java | 14 +++++---- .../typesafe/TypesafeDriverConfigTest.java | 14 +++++---- .../ExponentialReconnectionPolicyTest.java | 14 +++++---- .../context/DefaultDriverContextTest.java | 14 +++++---- .../context/MockedDriverContextFactory.java | 14 +++++---- .../context/StartupOptionsBuilderTest.java | 14 +++++---- .../core/context/bus/EventBusTest.java | 14 +++++---- .../control/ControlConnectionEventsTest.java | 14 +++++---- .../core/control/ControlConnectionTest.java | 14 +++++---- .../control/ControlConnectionTestBase.java | 14 +++++---- .../internal/core/cql/ConversionsTest.java | 14 +++++---- .../core/cql/CqlPrepareHandlerTest.java | 14 +++++---- .../core/cql/CqlRequestHandlerRetryTest.java | 14 +++++---- ...equestHandlerSpeculativeExecutionTest.java | 14 +++++---- .../core/cql/CqlRequestHandlerTest.java | 14 +++++---- .../core/cql/CqlRequestHandlerTestBase.java | 14 +++++---- .../cql/CqlRequestHandlerTrackerTest.java | 14 +++++---- .../core/cql/DefaultAsyncResultSetTest.java | 14 +++++---- .../cql/PagingIterableSpliteratorTest.java | 14 +++++---- .../internal/core/cql/PoolBehavior.java | 14 +++++---- .../core/cql/QueryTraceFetcherTest.java | 14 +++++---- .../core/cql/RequestHandlerTestHarness.java | 14 +++++---- .../internal/core/cql/ResultSetTestBase.java | 14 +++++---- .../internal/core/cql/ResultSetsTest.java | 14 +++++---- .../internal/core/cql/StatementSizeTest.java | 14 +++++---- .../core/data/AccessibleByIdTestBase.java | 14 +++++---- .../core/data/AccessibleByIndexTestBase.java | 14 +++++---- .../core/data/DefaultTupleValueTest.java | 14 +++++---- .../core/data/DefaultUdtValueTest.java | 14 +++++---- .../core/data/IdentifierIndexTest.java | 14 +++++---- ...asicLoadBalancingPolicyDcAgnosticTest.java | 14 +++++---- ...asicLoadBalancingPolicyDcFailoverTest.java | 14 +++++---- .../BasicLoadBalancingPolicyDistanceTest.java | 14 +++++---- .../BasicLoadBalancingPolicyEventsTest.java | 14 +++++---- .../BasicLoadBalancingPolicyInitTest.java | 14 +++++---- ...BasicLoadBalancingPolicyQueryPlanTest.java | 14 +++++---- ...ringLoadBalancingPolicyDcFailoverTest.java | 14 +++++---- ...erringLoadBalancingPolicyDistanceTest.java | 14 +++++---- ...nferringLoadBalancingPolicyEventsTest.java | 14 +++++---- ...cInferringLoadBalancingPolicyInitTest.java | 14 +++++---- ...rringLoadBalancingPolicyQueryPlanTest.java | 14 +++++---- ...aultLoadBalancingPolicyDcFailoverTest.java | 14 +++++---- ...efaultLoadBalancingPolicyDistanceTest.java | 14 +++++---- .../DefaultLoadBalancingPolicyEventsTest.java | 14 +++++---- .../DefaultLoadBalancingPolicyInitTest.java | 14 +++++---- ...faultLoadBalancingPolicyQueryPlanTest.java | 14 +++++---- ...LoadBalancingPolicyRequestTrackerTest.java | 14 +++++---- .../LoadBalancingPolicyTestBase.java | 14 +++++---- .../nodeset/DcAgnosticNodeSetTest.java | 14 +++++---- .../nodeset/MultiDcNodeSetTest.java | 14 +++++---- .../nodeset/SingleDcNodeSetTest.java | 14 +++++---- .../core/metadata/AddNodeRefreshTest.java | 14 +++++---- .../core/metadata/DefaultEndPointTest.java | 14 +++++---- .../metadata/DefaultMetadataTokenMapTest.java | 14 +++++---- .../core/metadata/DefaultNodeTest.java | 14 +++++---- .../metadata/DefaultTopologyMonitorTest.java | 14 +++++---- .../metadata/FullNodeListRefreshTest.java | 14 +++++---- .../metadata/InitialNodeListRefreshTest.java | 14 +++++---- .../LoadBalancingPolicyWrapperTest.java | 14 +++++---- .../core/metadata/MetadataManagerTest.java | 14 +++++---- .../MultiplexingNodeStateListenerTest.java | 14 +++++---- .../core/metadata/NodeStateManagerTest.java | 14 +++++---- .../core/metadata/PeerRowValidatorTest.java | 14 +++++---- .../core/metadata/RemoveNodeRefreshTest.java | 14 +++++---- .../metadata/SchemaAgreementCheckerTest.java | 14 +++++---- .../core/metadata/TestNodeFactory.java | 14 +++++---- .../metadata/schema/IndexMetadataTest.java | 14 +++++---- .../MultiplexingSchemaChangeListenerTest.java | 14 +++++---- .../schema/parsing/AggregateParserTest.java | 14 +++++---- .../parsing/DataTypeClassNameParserTest.java | 14 +++++---- .../parsing/DataTypeCqlNameParserTest.java | 14 +++++---- .../schema/parsing/FunctionParserTest.java | 14 +++++---- .../schema/parsing/SchemaParserTest.java | 14 +++++---- .../schema/parsing/SchemaParserTestBase.java | 14 +++++---- .../schema/parsing/TableParserTest.java | 14 +++++---- .../UserDefinedTypeListParserTest.java | 14 +++++---- .../schema/parsing/ViewParserTest.java | 14 +++++---- .../queries/Cassandra21SchemaQueriesTest.java | 14 +++++---- .../queries/Cassandra22SchemaQueriesTest.java | 14 +++++---- .../queries/Cassandra3SchemaQueriesTest.java | 14 +++++---- .../DefaultSchemaQueriesFactoryTest.java | 14 +++++---- .../schema/queries/KeyspaceFilterTest.java | 14 +++++---- .../schema/queries/SchemaQueriesTest.java | 14 +++++---- .../schema/refresh/SchemaRefreshTest.java | 14 +++++---- .../token/ByteOrderedTokenRangeTest.java | 14 +++++---- .../metadata/token/DefaultTokenMapTest.java | 14 +++++---- .../metadata/token/Murmur3TokenRangeTest.java | 14 +++++---- ...etworkTopologyReplicationStrategyTest.java | 14 +++++---- .../metadata/token/RandomTokenRangeTest.java | 14 +++++---- .../metadata/token/ReplicationFactorTest.java | 14 +++++---- .../token/SimpleReplicationStrategyTest.java | 14 +++++---- .../core/metadata/token/TokenRangeAssert.java | 14 +++++---- .../core/metadata/token/TokenRangeTest.java | 14 +++++---- .../metrics/DefaultMetricIdGeneratorTest.java | 14 +++++---- .../core/metrics/DefaultMetricIdTest.java | 14 +++++---- .../metrics/DropwizardMetricsFactoryTest.java | 14 +++++---- .../DropwizardNodeMetricUpdaterTest.java | 14 +++++---- .../core/metrics/NoopMetricsFactoryTest.java | 14 +++++---- .../metrics/TaggingMetricIdGeneratorTest.java | 14 +++++---- .../driver/internal/core/os/JnrLibcTest.java | 14 +++++---- .../driver/internal/core/os/NativeTest.java | 14 +++++---- .../core/pool/ChannelPoolInitTest.java | 14 +++++---- .../core/pool/ChannelPoolKeyspaceTest.java | 14 +++++---- .../core/pool/ChannelPoolReconnectTest.java | 14 +++++---- .../core/pool/ChannelPoolResizeTest.java | 14 +++++---- .../core/pool/ChannelPoolShutdownTest.java | 14 +++++---- .../core/pool/ChannelPoolTestBase.java | 14 +++++---- .../internal/core/pool/ChannelSetTest.java | 14 +++++---- .../core/protocol/BuiltInCompressorsTest.java | 14 +++++---- .../protocol/ByteBufPrimitiveCodecTest.java | 14 +++++---- .../protocol/BytesToSegmentDecoderTest.java | 14 +++++---- .../core/protocol/FrameDecoderTest.java | 14 +++++---- .../protocol/SegmentToFrameDecoderTest.java | 14 +++++---- .../core/protocol/SliceWriteListenerTest.java | 14 +++++---- .../core/session/DefaultSessionPoolsTest.java | 14 +++++---- .../session/MockChannelPoolFactoryHelper.java | 14 +++++---- .../core/session/PoolManagerTest.java | 14 +++++---- .../core/session/ReprepareOnUpTest.java | 14 +++++---- ...ncurrencyLimitingRequestThrottlerTest.java | 14 +++++---- .../session/throttling/MockThrottled.java | 14 +++++---- .../RateLimitingRequestThrottlerTest.java | 14 +++++---- .../session/throttling/SettableNanoClock.java | 14 +++++---- .../time/AtomicTimestampGeneratorTest.java | 14 +++++---- .../MonotonicTimestampGeneratorTestBase.java | 14 +++++---- .../ThreadLocalTimestampGeneratorTest.java | 14 +++++---- .../MultiplexingRequestTrackerTest.java | 14 +++++---- .../core/tracker/RequestLogFormatterTest.java | 14 +++++---- .../core/type/DataTypeDetachableTest.java | 14 +++++---- .../core/type/DataTypeSerializationTest.java | 14 +++++---- .../internal/core/type/PrimitiveTypeTest.java | 14 +++++---- .../core/type/codec/AsciiCodecTest.java | 14 +++++---- .../core/type/codec/BigIntCodecTest.java | 14 +++++---- .../core/type/codec/BlobCodecTest.java | 14 +++++---- .../core/type/codec/BooleanCodecTest.java | 14 +++++---- .../core/type/codec/CodecTestBase.java | 14 +++++---- .../core/type/codec/CounterCodecTest.java | 14 +++++---- .../core/type/codec/CqlDurationCodecTest.java | 14 +++++---- .../core/type/codec/CqlIntToStringCodec.java | 14 +++++---- .../core/type/codec/CustomCodecTest.java | 14 +++++---- .../core/type/codec/DateCodecTest.java | 14 +++++---- .../core/type/codec/DecimalCodecTest.java | 14 +++++---- .../core/type/codec/DoubleCodecTest.java | 14 +++++---- .../core/type/codec/FloatCodecTest.java | 14 +++++---- .../core/type/codec/InetCodecTest.java | 14 +++++---- .../core/type/codec/IntCodecTest.java | 14 +++++---- .../core/type/codec/ListCodecTest.java | 14 +++++---- .../core/type/codec/MapCodecTest.java | 14 +++++---- .../core/type/codec/MappingCodecTest.java | 14 +++++---- .../core/type/codec/SetCodecTest.java | 14 +++++---- .../core/type/codec/SimpleBlobCodecTest.java | 14 +++++---- .../core/type/codec/SmallIntCodecTest.java | 14 +++++---- .../core/type/codec/TextCodecTest.java | 14 +++++---- .../core/type/codec/TimeCodecTest.java | 14 +++++---- .../core/type/codec/TimeUuidCodecTest.java | 14 +++++---- .../core/type/codec/TimestampCodecTest.java | 14 +++++---- .../core/type/codec/TinyIntCodecTest.java | 14 +++++---- .../core/type/codec/TupleCodecTest.java | 14 +++++---- .../core/type/codec/UdtCodecTest.java | 14 +++++---- .../core/type/codec/UuidCodecTest.java | 14 +++++---- .../core/type/codec/VarintCodecTest.java | 14 +++++---- .../core/type/codec/VectorCodecTest.java | 14 +++++---- .../type/codec/extras/OptionalCodecTest.java | 14 +++++---- .../extras/array/BooleanArrayCodecTest.java | 14 +++++---- .../extras/array/ByteArrayCodecTest.java | 14 +++++---- .../extras/array/DoubleArrayCodecTest.java | 14 +++++---- .../extras/array/FloatArrayCodecTest.java | 14 +++++---- .../codec/extras/array/IntArrayCodecTest.java | 14 +++++---- .../extras/array/LongArrayCodecTest.java | 14 +++++---- .../extras/array/ObjectArrayCodecTest.java | 14 +++++---- .../extras/array/ShortArrayCodecTest.java | 14 +++++---- .../codec/extras/enums/EnumNameCodecTest.java | 14 +++++---- .../extras/enums/EnumOrdinalCodecTest.java | 14 +++++---- .../type/codec/extras/json/JsonCodecTest.java | 14 +++++---- .../extras/time/LocalTimestampCodecTest.java | 14 +++++---- .../PersistentZonedTimestampCodecTest.java | 14 +++++---- .../extras/time/TimestampMillisCodecTest.java | 14 +++++---- .../extras/time/ZonedTimestampCodecTest.java | 14 +++++---- .../registry/CachingCodecRegistryTest.java | 14 +++++---- ...CachingCodecRegistryTestDataProviders.java | 14 +++++---- .../internal/core/util/ArrayUtilsTest.java | 14 +++++---- .../driver/internal/core/util/ByteBufs.java | 14 +++++---- .../core/util/CollectionsUtilsTest.java | 14 +++++---- .../internal/core/util/DirectedGraphTest.java | 14 +++++---- .../driver/internal/core/util/LoggerTest.java | 14 +++++---- .../internal/core/util/ReflectionTest.java | 14 +++++---- .../internal/core/util/StringsTest.java | 14 +++++---- .../collection/CompositeQueryPlanTest.java | 14 +++++---- .../util/collection/LazyQueryPlanTest.java | 14 +++++---- .../util/collection/QueryPlanTestBase.java | 14 +++++---- .../util/collection/SimpleQueryPlanTest.java | 14 +++++---- .../core/util/concurrent/CapturingTimer.java | 14 +++++---- .../util/concurrent/CycleDetectorTest.java | 14 +++++---- .../core/util/concurrent/DebouncerTest.java | 14 +++++---- .../util/concurrent/PromiseCombinerTest.java | 14 +++++---- .../util/concurrent/ReconnectionTest.java | 14 +++++---- .../concurrent/ReplayingEventFilterTest.java | 14 +++++---- .../ScheduledTaskCapturingEventLoop.java | 14 +++++---- .../ScheduledTaskCapturingEventLoopTest.java | 14 +++++---- .../config/customApplication.properties | 14 +++++---- .../insights/malformed-pom.properties | 14 +++++---- .../test/resources/insights/pom.properties | 14 +++++---- core/src/test/resources/logback-test.xml | 14 +++++---- core/src/test/resources/project.properties | 14 +++++---- distribution/pom.xml | 20 ++++++------ distribution/src/assembly/binary-tarball.xml | 14 +++++---- docs.yaml | 4 +-- examples/README.md | 4 +-- examples/pom.xml | 18 ++++++----- .../astra/AstraReadCassandraVersion.java | 16 +++++----- .../basic/CreateAndPopulateKeyspace.java | 16 +++++----- .../examples/basic/ReadCassandraVersion.java | 16 +++++----- .../basic/ReadTopologyAndSchemaMetadata.java | 16 +++++----- .../concurrent/LimitConcurrencyCustom.java | 16 +++++----- .../LimitConcurrencyCustomAsync.java | 16 +++++----- .../LimitConcurrencyRequestThrottler.java | 14 +++++---- .../oss/driver/examples/datatypes/Blobs.java | 16 +++++----- .../examples/datatypes/CustomCodecs.java | 14 +++++---- .../examples/datatypes/TuplesMapped.java | 16 +++++----- .../examples/datatypes/TuplesSimple.java | 16 +++++----- .../datatypes/UserDefinedTypesMapped.java | 16 +++++----- .../datatypes/UserDefinedTypesSimple.java | 16 +++++----- .../failover/CrossDatacenterFailover.java | 16 +++++----- .../driver/examples/json/PlainTextJson.java | 14 +++++---- .../json/jackson/JacksonJsonColumn.java | 14 +++++---- .../json/jackson/JacksonJsonFunction.java | 14 +++++---- .../examples/json/jackson/JacksonJsonRow.java | 14 +++++---- .../examples/json/jsr/Jsr353JsonCodec.java | 14 +++++---- .../examples/json/jsr/Jsr353JsonColumn.java | 14 +++++---- .../examples/json/jsr/Jsr353JsonFunction.java | 14 +++++---- .../examples/json/jsr/Jsr353JsonRow.java | 14 +++++---- .../mapper/KillrVideoMapperExample.java | 14 +++++---- .../mapper/killrvideo/KillrVideoMapper.java | 14 +++++---- .../user/CreateUserQueryProvider.java | 14 +++++---- .../killrvideo/user/LoginQueryProvider.java | 14 +++++---- .../killrvideo/user/PasswordHashing.java | 14 +++++---- .../examples/mapper/killrvideo/user/User.java | 14 +++++---- .../killrvideo/user/UserCredentials.java | 14 +++++---- .../mapper/killrvideo/user/UserDao.java | 14 +++++---- .../video/CreateVideoQueryProvider.java | 14 +++++---- .../mapper/killrvideo/video/LatestVideo.java | 14 +++++---- .../mapper/killrvideo/video/UserVideo.java | 14 +++++---- .../mapper/killrvideo/video/Video.java | 14 +++++---- .../mapper/killrvideo/video/VideoBase.java | 14 +++++---- .../mapper/killrvideo/video/VideoByTag.java | 14 +++++---- .../mapper/killrvideo/video/VideoDao.java | 14 +++++---- .../examples/paging/ForwardPagingRestUi.java | 14 +++++---- .../examples/paging/RandomPagingRestUi.java | 14 +++++---- .../examples/retry/DowngradingRetry.java | 16 +++++----- examples/src/main/resources/logback.xml | 14 +++++---- integration-tests/pom.xml | 16 +++++----- .../DseGssApiAuthProviderAlternateIT.java | 14 +++++---- .../core/auth/DseGssApiAuthProviderIT.java | 14 +++++---- .../core/auth/DsePlainTextAuthProviderIT.java | 14 +++++---- .../core/auth/DseProxyAuthenticationIT.java | 14 +++++---- .../dse/driver/api/core/auth/EmbeddedAds.java | 14 +++++---- .../driver/api/core/auth/EmbeddedAdsRule.java | 14 +++++---- .../driver/api/core/auth/KerberosUtils.java | 14 +++++---- .../cql/continuous/ContinuousPagingIT.java | 14 +++++---- .../continuous/ContinuousPagingITBase.java | 14 +++++---- .../reactive/ContinuousPagingReactiveIT.java | 14 +++++---- .../api/core/data/geometry/GeometryIT.java | 14 +++++---- .../api/core/data/geometry/LineStringIT.java | 14 +++++---- .../api/core/data/geometry/PointIT.java | 14 +++++---- .../api/core/data/geometry/PolygonIT.java | 14 +++++---- .../api/core/data/time/DateRangeIT.java | 14 +++++---- .../graph/ClassicGraphDataTypeITBase.java | 14 +++++---- .../graph/ClassicGraphGeoSearchIndexIT.java | 14 +++++---- .../graph/ClassicGraphTextSearchIndexIT.java | 14 +++++---- .../core/graph/CoreGraphDataTypeITBase.java | 14 +++++---- .../core/graph/CoreGraphGeoSearchIndexIT.java | 14 +++++---- .../graph/CoreGraphTextSearchIndexIT.java | 14 +++++---- .../api/core/graph/CqlCollectionIT.java | 14 +++++---- .../api/core/graph/GraphAuthenticationIT.java | 14 +++++---- .../core/graph/GraphGeoSearchIndexITBase.java | 14 +++++---- .../driver/api/core/graph/GraphPagingIT.java | 14 +++++---- .../graph/GraphSpeculativeExecutionIT.java | 14 +++++---- .../api/core/graph/GraphTestSupport.java | 14 +++++---- .../graph/GraphTextSearchIndexITBase.java | 14 +++++---- .../api/core/graph/GraphTimeoutsIT.java | 14 +++++---- .../api/core/graph/SampleGraphScripts.java | 14 +++++---- .../api/core/graph/SocialTraversalDsl.java | 14 +++++---- .../core/graph/SocialTraversalSourceDsl.java | 14 +++++---- .../api/core/graph/TinkerEdgeAssert.java | 14 +++++---- .../api/core/graph/TinkerElementAssert.java | 14 +++++---- .../api/core/graph/TinkerGraphAssertions.java | 14 +++++---- .../api/core/graph/TinkerPathAssert.java | 14 +++++---- .../api/core/graph/TinkerTreeAssert.java | 14 +++++---- .../api/core/graph/TinkerVertexAssert.java | 14 +++++---- .../graph/TinkerVertexPropertyAssert.java | 14 +++++---- .../DefaultReactiveGraphResultSetIT.java | 14 +++++---- .../remote/ClassicGraphDataTypeRemoteIT.java | 14 +++++---- .../remote/ClassicGraphTraversalRemoteIT.java | 14 +++++---- .../remote/CoreGraphDataTypeRemoteIT.java | 14 +++++---- .../remote/CoreGraphTraversalRemoteIT.java | 14 +++++---- .../GraphTraversalMetaPropertiesRemoteIT.java | 14 +++++---- ...GraphTraversalMultiPropertiesRemoteIT.java | 14 +++++---- .../remote/GraphTraversalRemoteITBase.java | 14 +++++---- .../ClassicGraphDataTypeFluentIT.java | 14 +++++---- .../ClassicGraphDataTypeScriptIT.java | 14 +++++---- .../ClassicGraphTraversalBatchIT.java | 14 +++++---- .../statement/ClassicGraphTraversalIT.java | 14 +++++---- .../statement/CoreGraphDataTypeFluentIT.java | 14 +++++---- .../statement/CoreGraphDataTypeScriptIT.java | 14 +++++---- .../statement/CoreGraphTraversalBatchIT.java | 14 +++++---- .../graph/statement/CoreGraphTraversalIT.java | 14 +++++---- .../statement/GraphTraversalBatchITBase.java | 14 +++++---- .../graph/statement/GraphTraversalITBase.java | 14 +++++---- .../GraphTraversalMetaPropertiesIT.java | 14 +++++---- .../GraphTraversalMultiPropertiesIT.java | 14 +++++---- .../api/core/insights/InsightsClientIT.java | 14 +++++---- .../metadata/schema/AbstractMetadataIT.java | 14 +++++---- .../schema/DseAggregateMetadataIT.java | 14 +++++---- .../schema/DseFunctionMetadataIT.java | 14 +++++---- .../schema/KeyspaceGraphMetadataIT.java | 14 +++++---- .../TableGraphMetadataCaseSensitiveIT.java | 14 +++++---- .../metadata/schema/TableGraphMetadataIT.java | 14 +++++---- .../oss/driver/api/core/cloud/CloudIT.java | 14 +++++---- .../driver/api/core/cloud/SniProxyRule.java | 14 +++++---- .../driver/api/core/cloud/SniProxyServer.java | 14 +++++---- .../oss/driver/core/AllNodesFailedIT.java | 14 +++++---- .../datastax/oss/driver/core/ConnectIT.java | 14 +++++---- .../oss/driver/core/ConnectKeyspaceIT.java | 14 +++++---- .../oss/driver/core/PeersV2NodeRefreshIT.java | 14 +++++---- .../oss/driver/core/PoolBalancingIT.java | 14 +++++---- .../ProtocolVersionInitialNegotiationIT.java | 14 +++++---- .../core/ProtocolVersionMixedClusterIT.java | 14 +++++---- .../oss/driver/core/SerializationIT.java | 14 +++++---- .../oss/driver/core/SessionLeakIT.java | 14 +++++---- .../core/auth/PlainTextAuthProviderIT.java | 14 +++++---- .../core/compression/DirectCompressionIT.java | 14 +++++---- .../core/compression/HeapCompressionIT.java | 14 +++++---- .../core/config/DriverConfigValidationIT.java | 14 +++++---- .../config/DriverExecutionProfileCcmIT.java | 14 +++++---- .../DriverExecutionProfileReloadIT.java | 14 +++++---- .../DriverExecutionProfileSimulacronIT.java | 14 +++++---- .../core/config/MapBasedConfigLoaderIT.java | 14 +++++---- .../connection/ChannelSocketOptionsIT.java | 14 +++++---- .../driver/core/connection/FrameLengthIT.java | 14 +++++---- .../NettyResourceLeakDetectionIT.java | 14 +++++---- .../core/context/LifecycleListenerIT.java | 14 +++++---- .../oss/driver/core/cql/AsyncResultSetIT.java | 14 +++++---- .../oss/driver/core/cql/BatchStatementIT.java | 14 +++++---- .../driver/core/cql/BoundStatementCcmIT.java | 14 +++++---- .../core/cql/BoundStatementSimulacronIT.java | 14 +++++---- .../core/cql/ExecutionInfoWarningsIT.java | 14 +++++---- .../oss/driver/core/cql/NowInSecondsIT.java | 14 +++++---- .../core/cql/PagingIterableSpliteratorIT.java | 14 +++++---- .../oss/driver/core/cql/PagingStateIT.java | 14 +++++---- .../driver/core/cql/PerRequestKeyspaceIT.java | 14 +++++---- .../core/cql/PreparedStatementCachingIT.java | 14 +++++---- .../driver/core/cql/PreparedStatementIT.java | 14 +++++---- .../oss/driver/core/cql/QueryTraceIT.java | 14 +++++---- .../driver/core/cql/SimpleStatementCcmIT.java | 14 +++++---- .../core/cql/SimpleStatementSimulacronIT.java | 14 +++++---- .../reactive/DefaultReactiveResultSetIT.java | 14 +++++---- .../core/cql/reactive/ReactiveRetryIT.java | 14 +++++---- .../oss/driver/core/data/DataTypeIT.java | 14 +++++---- .../core/heartbeat/HeartbeatDisabledIT.java | 14 +++++---- .../driver/core/heartbeat/HeartbeatIT.java | 14 +++++---- .../AllLoadBalancingPoliciesSimulacronIT.java | 14 +++++---- .../DefaultLoadBalancingPolicyIT.java | 14 +++++---- .../core/loadbalancing/NodeTargetingIT.java | 14 +++++---- .../PerProfileLoadBalancingPolicyIT.java | 14 +++++---- .../core/metadata/ByteOrderedTokenIT.java | 14 +++++---- .../metadata/ByteOrderedTokenVnodesIT.java | 14 +++++---- .../core/metadata/CaseSensitiveUdtIT.java | 14 +++++---- .../oss/driver/core/metadata/DescribeIT.java | 14 +++++---- .../oss/driver/core/metadata/MetadataIT.java | 14 +++++---- .../driver/core/metadata/Murmur3TokenIT.java | 14 +++++---- .../core/metadata/Murmur3TokenVnodesIT.java | 14 +++++---- .../driver/core/metadata/NodeMetadataIT.java | 14 +++++---- .../oss/driver/core/metadata/NodeStateIT.java | 14 +++++---- .../driver/core/metadata/RandomTokenIT.java | 14 +++++---- .../core/metadata/RandomTokenVnodesIT.java | 14 +++++---- .../core/metadata/SchemaAgreementIT.java | 14 +++++---- .../driver/core/metadata/SchemaChangesIT.java | 14 +++++---- .../oss/driver/core/metadata/SchemaIT.java | 14 +++++---- .../oss/driver/core/metadata/TokenITBase.java | 14 +++++---- .../core/metrics/DropwizardMetricsIT.java | 14 +++++---- .../driver/core/metrics/MetricsITBase.java | 14 +++++---- .../ConsistencyDowngradingRetryPolicyIT.java | 14 +++++---- .../core/retry/DefaultRetryPolicyIT.java | 14 +++++---- .../core/retry/PerProfileRetryPolicyIT.java | 14 +++++---- .../oss/driver/core/session/AddedNodeIT.java | 14 +++++---- .../oss/driver/core/session/ExceptionIT.java | 14 +++++---- .../oss/driver/core/session/ListenersIT.java | 14 +++++---- .../driver/core/session/RemovedNodeIT.java | 14 +++++---- .../core/session/RequestProcessorIT.java | 14 +++++---- .../oss/driver/core/session/ShutdownIT.java | 14 +++++---- .../core/specex/SpeculativeExecutionIT.java | 14 +++++---- ...tSslEngineFactoryHostnameValidationIT.java | 14 +++++---- .../core/ssl/DefaultSslEngineFactoryIT.java | 14 +++++---- ...efaultSslEngineFactoryPropertyBasedIT.java | 14 +++++---- ...eFactoryPropertyBasedWithClientAuthIT.java | 14 +++++---- ...faultSslEngineFactoryWithClientAuthIT.java | 14 +++++---- .../driver/core/ssl/ProgrammaticSslIT.java | 14 +++++---- .../driver/core/throttling/ThrottlingIT.java | 14 +++++---- .../driver/core/tracker/RequestLoggerIT.java | 14 +++++---- .../tracker/RequestNodeLoggerExample.java | 14 +++++---- .../core/type/codec/CqlIntToStringCodec.java | 14 +++++---- .../core/type/codec/ExtraTypeCodecsIT.java | 14 +++++---- .../type/codec/registry/CodecRegistryIT.java | 14 +++++---- .../example/guava/api/GuavaSession.java | 14 +++++---- .../guava/api/GuavaSessionBuilder.java | 14 +++++---- .../example/guava/api/GuavaSessionUtils.java | 14 +++++---- .../guava/internal/DefaultGuavaSession.java | 14 +++++---- .../guava/internal/GuavaDriverContext.java | 14 +++++---- .../internal/GuavaRequestAsyncProcessor.java | 14 +++++---- .../example/guava/internal/KeyRequest.java | 14 +++++---- .../guava/internal/KeyRequestProcessor.java | 14 +++++---- .../DriverBlockHoundIntegrationCcmIT.java | 14 +++++---- .../DriverBlockHoundIntegrationIT.java | 14 +++++---- .../oss/driver/mapper/ComputedIT.java | 14 +++++---- .../oss/driver/mapper/CustomResultTypeIT.java | 14 +++++---- .../oss/driver/mapper/DefaultKeyspaceIT.java | 14 +++++---- .../mapper/DefaultNullSavingStrategyIT.java | 14 +++++---- .../datastax/oss/driver/mapper/DeleteIT.java | 14 +++++---- .../oss/driver/mapper/DeleteReactiveIT.java | 14 +++++---- .../driver/mapper/EntityPolymorphismIT.java | 14 +++++---- .../oss/driver/mapper/FluentEntityIT.java | 14 +++++---- .../oss/driver/mapper/GetEntityIT.java | 14 +++++---- .../mapper/GuavaFutureProducerService.java | 14 +++++---- .../oss/driver/mapper/ImmutableEntityIT.java | 14 +++++---- .../oss/driver/mapper/IncrementIT.java | 14 +++++---- .../driver/mapper/IncrementWithNullsIT.java | 14 +++++---- .../datastax/oss/driver/mapper/InsertIT.java | 14 +++++---- .../oss/driver/mapper/InsertReactiveIT.java | 14 +++++---- .../oss/driver/mapper/InventoryITBase.java | 14 +++++---- .../oss/driver/mapper/NamingStrategyIT.java | 14 +++++---- .../oss/driver/mapper/NestedUdtIT.java | 14 +++++---- .../driver/mapper/NullSavingStrategyIT.java | 14 +++++---- .../oss/driver/mapper/PrimitivesIT.java | 14 +++++---- .../datastax/oss/driver/mapper/ProfileIT.java | 14 +++++---- .../mapper/QueryKeyspaceAndTableIT.java | 14 +++++---- .../oss/driver/mapper/QueryProviderIT.java | 14 +++++---- .../oss/driver/mapper/QueryReactiveIT.java | 14 +++++---- .../oss/driver/mapper/QueryReturnTypesIT.java | 14 +++++---- .../oss/driver/mapper/SchemaValidationIT.java | 14 +++++---- .../mapper/SelectCustomWhereClauseIT.java | 14 +++++---- .../datastax/oss/driver/mapper/SelectIT.java | 14 +++++---- .../driver/mapper/SelectOtherClausesIT.java | 14 +++++---- .../oss/driver/mapper/SelectReactiveIT.java | 14 +++++---- .../oss/driver/mapper/SetEntityIT.java | 14 +++++---- .../driver/mapper/StatementAttributesIT.java | 14 +++++---- .../oss/driver/mapper/TransientIT.java | 14 +++++---- .../datastax/oss/driver/mapper/UdtKeyIT.java | 14 +++++---- .../driver/mapper/UpdateCustomIfClauseIT.java | 14 +++++---- .../datastax/oss/driver/mapper/UpdateIT.java | 14 +++++---- .../oss/driver/mapper/UpdateNamingIT.java | 14 +++++---- .../oss/driver/mapper/UpdateReactiveIT.java | 14 +++++---- .../micrometer/MicrometerMetricsIT.java | 14 +++++---- .../microprofile/MicroProfileMetricsIT.java | 14 +++++---- .../oss/driver/querybuilder/JsonInsertIT.java | 14 +++++---- .../src/test/resources/logback-test.xml | 14 +++++---- manual/case_sensitivity/README.md | 2 +- manual/cloud/README.md | 2 +- manual/core/detachable_types/README.md | 2 +- manual/core/dse/graph/results/README.md | 2 +- manual/core/integration/README.md | 4 +-- manual/core/native_protocol/README.md | 2 +- manual/core/non_blocking/README.md | 2 +- mapper-processor/pom.xml | 16 +++++----- .../mapper/processor/CodeGenerator.java | 14 +++++---- .../processor/CodeGeneratorFactory.java | 14 +++++---- .../mapper/processor/DecoratedMessager.java | 14 +++++---- .../DefaultCodeGeneratorFactory.java | 14 +++++---- .../processor/DefaultProcessorContext.java | 14 +++++---- .../mapper/processor/GeneratedNames.java | 14 +++++---- .../mapper/processor/JavaPoetFiler.java | 14 +++++---- .../mapper/processor/MapperProcessor.java | 14 +++++---- .../mapper/processor/MethodGenerator.java | 14 +++++---- .../mapper/processor/ProcessorContext.java | 14 +++++---- .../processor/SingleFileCodeGenerator.java | 14 +++++---- .../dao/DaoDeleteMethodGenerator.java | 14 +++++---- .../dao/DaoGetEntityMethodGenerator.java | 14 +++++---- .../dao/DaoImplementationGenerator.java | 14 +++++---- .../dao/DaoImplementationSharedCode.java | 14 +++++---- .../dao/DaoIncrementMethodGenerator.java | 14 +++++---- .../dao/DaoInsertMethodGenerator.java | 14 +++++---- .../processor/dao/DaoMethodGenerator.java | 14 +++++---- .../dao/DaoQueryMethodGenerator.java | 14 +++++---- .../dao/DaoQueryProviderMethodGenerator.java | 14 +++++---- .../mapper/processor/dao/DaoReturnType.java | 14 +++++---- .../processor/dao/DaoReturnTypeKind.java | 14 +++++---- .../processor/dao/DaoReturnTypeParser.java | 14 +++++---- .../dao/DaoSelectMethodGenerator.java | 14 +++++---- .../dao/DaoSetEntityMethodGenerator.java | 14 +++++---- .../dao/DaoUpdateMethodGenerator.java | 14 +++++---- .../dao/DefaultDaoReturnTypeKind.java | 14 +++++---- .../dao/DefaultDaoReturnTypeParser.java | 14 +++++---- .../mapper/processor/dao/EntityUtils.java | 14 +++++---- .../processor/dao/LoggingGenerator.java | 14 +++++---- .../dao/NullSavingStrategyValidation.java | 14 +++++---- .../entity/BuiltInNameConversions.java | 14 +++++---- .../processor/entity/CqlNameGenerator.java | 14 +++++---- .../entity/DefaultEntityDefinition.java | 14 +++++---- .../entity/DefaultEntityFactory.java | 14 +++++---- .../entity/DefaultPropertyDefinition.java | 14 +++++---- .../processor/entity/EntityDefinition.java | 14 +++++---- .../processor/entity/EntityFactory.java | 14 +++++---- ...lperDeleteByPrimaryKeyMethodGenerator.java | 14 +++++---- ...eleteByPrimaryKeyPartsMethodGenerator.java | 14 +++++---- ...ntityHelperDeleteStartMethodGenerator.java | 14 +++++---- .../entity/EntityHelperGenerator.java | 14 +++++---- .../EntityHelperGetMethodGenerator.java | 14 +++++---- .../EntityHelperInsertMethodGenerator.java | 14 +++++---- ...HelperSchemaValidationMethodGenerator.java | 14 +++++---- ...lperSelectByPrimaryKeyMethodGenerator.java | 14 +++++---- ...electByPrimaryKeyPartsMethodGenerator.java | 14 +++++---- ...ntityHelperSelectStartMethodGenerator.java | 14 +++++---- .../EntityHelperSetMethodGenerator.java | 14 +++++---- ...lperUpdateByPrimaryKeyMethodGenerator.java | 14 +++++---- ...ntityHelperUpdateStartMethodGenerator.java | 14 +++++---- .../processor/entity/PropertyDefinition.java | 14 +++++---- .../mapper/MapperBuilderGenerator.java | 14 +++++---- .../MapperDaoFactoryMethodGenerator.java | 14 +++++---- .../processor/mapper/MapperGenerator.java | 14 +++++---- .../mapper/MapperImplementationGenerator.java | 14 +++++---- .../MapperImplementationSharedCode.java | 14 +++++---- .../processor/util/AnnotationScanner.java | 14 +++++---- .../mapper/processor/util/Capitalizer.java | 14 +++++---- .../mapper/processor/util/Classes.java | 14 +++++---- .../processor/util/HierarchyScanner.java | 14 +++++---- .../mapper/processor/util/NameIndex.java | 14 +++++---- .../processor/util/ResolvedAnnotation.java | 14 +++++---- .../BindableHandlingSharedCode.java | 14 +++++---- .../generation/GeneratedCodePatterns.java | 14 +++++---- .../GenericTypeConstantGenerator.java | 14 +++++---- .../util/generation/PropertyType.java | 14 +++++---- .../mapper/processor/DependencyCheckTest.java | 14 +++++---- .../mapper/entity/EntityHelperBaseTest.java | 14 +++++---- .../mapper/processor/MapperProcessorTest.java | 14 +++++---- .../processor/dao/DaoAnnotationTest.java | 14 +++++---- .../dao/DaoDeleteMethodGeneratorTest.java | 14 +++++---- .../dao/DaoGetEntityMethodGeneratorTest.java | 14 +++++---- .../dao/DaoImplementationGeneratorTest.java | 14 +++++---- .../dao/DaoInsertMethodGeneratorTest.java | 14 +++++---- .../processor/dao/DaoMethodGeneratorTest.java | 14 +++++---- .../dao/DaoQueryMethodGeneratorTest.java | 14 +++++---- .../DaoQueryProviderMethodGeneratorTest.java | 14 +++++---- .../dao/DaoSelectMethodGeneratorTest.java | 14 +++++---- .../dao/DaoSetEntityMethodGeneratorTest.java | 14 +++++---- .../dao/DaoUpdateMethodGeneratorTest.java | 14 +++++---- .../dao/compiled/CompiledProduct.java | 14 +++++---- .../dao/compiled/CompiledProductDao.java | 14 +++++---- .../DaoCompiledMethodGeneratorTest.java | 14 +++++---- .../entity/BuiltInNameConversionsTest.java | 14 +++++---- .../entity/EntityAnnotationTest.java | 14 +++++---- .../entity/EntityNamingStrategyTest.java | 14 +++++---- .../entity/EntityPropertyAnnotationsTest.java | 14 +++++---- .../mapper/MapperAnnotationTest.java | 14 +++++---- .../MapperDaoFactoryMethodGeneratorTest.java | 14 +++++---- .../MapperImplementationGeneratorTest.java | 14 +++++---- .../mapper/MapperMethodGeneratorTest.java | 14 +++++---- .../processor/util/CapitalizerTest.java | 14 +++++---- .../processor/util/HierarchyScannerTest.java | 14 +++++---- .../src/test/resources/logback-test.xml | 14 +++++---- .../src/test/resources/project.properties | 14 +++++---- mapper-runtime/pom.xml | 16 +++++----- .../reactive/MappedReactiveResultSet.java | 14 +++++---- .../DefaultMappedReactiveResultSet.java | 14 +++++---- .../FailedMappedReactiveResultSet.java | 14 +++++---- .../mapper/reactive/ReactiveDaoBase.java | 14 +++++---- .../oss/driver/api/mapper/MapperBuilder.java | 14 +++++---- .../oss/driver/api/mapper/MapperContext.java | 14 +++++---- .../driver/api/mapper/MapperException.java | 14 +++++---- .../mapper/annotations/ClusteringColumn.java | 16 +++++----- .../api/mapper/annotations/Computed.java | 14 +++++---- .../api/mapper/annotations/CqlName.java | 14 +++++---- .../driver/api/mapper/annotations/Dao.java | 16 +++++----- .../api/mapper/annotations/DaoFactory.java | 16 +++++----- .../api/mapper/annotations/DaoKeyspace.java | 14 +++++---- .../api/mapper/annotations/DaoProfile.java | 14 +++++---- .../api/mapper/annotations/DaoTable.java | 14 +++++---- .../DefaultNullSavingStrategy.java | 16 +++++----- .../driver/api/mapper/annotations/Delete.java | 16 +++++----- .../driver/api/mapper/annotations/Entity.java | 14 +++++---- .../api/mapper/annotations/GetEntity.java | 16 +++++----- .../annotations/HierarchyScanStrategy.java | 14 +++++---- .../api/mapper/annotations/Increment.java | 16 +++++----- .../driver/api/mapper/annotations/Insert.java | 16 +++++----- .../driver/api/mapper/annotations/Mapper.java | 16 +++++----- .../mapper/annotations/NamingStrategy.java | 14 +++++---- .../api/mapper/annotations/PartitionKey.java | 14 +++++---- .../mapper/annotations/PropertyStrategy.java | 14 +++++---- .../driver/api/mapper/annotations/Query.java | 16 +++++----- .../api/mapper/annotations/QueryProvider.java | 14 +++++---- .../api/mapper/annotations/SchemaHint.java | 14 +++++---- .../driver/api/mapper/annotations/Select.java | 16 +++++----- .../api/mapper/annotations/SetEntity.java | 16 +++++----- .../annotations/StatementAttributes.java | 14 +++++---- .../api/mapper/annotations/Transient.java | 14 +++++---- .../annotations/TransientProperties.java | 14 +++++---- .../driver/api/mapper/annotations/Update.java | 16 +++++----- .../api/mapper/entity/EntityHelper.java | 14 +++++---- .../api/mapper/entity/naming/GetterStyle.java | 14 +++++---- .../mapper/entity/naming/NameConverter.java | 14 +++++---- .../entity/naming/NamingConvention.java | 14 +++++---- .../api/mapper/entity/naming/SetterStyle.java | 14 +++++---- .../entity/saving/NullSavingStrategy.java | 14 +++++---- .../mapper/result/MapperResultProducer.java | 14 +++++---- .../result/MapperResultProducerService.java | 14 +++++---- .../oss/driver/internal/mapper/DaoBase.java | 14 +++++---- .../driver/internal/mapper/DaoCacheKey.java | 14 +++++---- .../internal/mapper/DefaultMapperContext.java | 14 +++++---- .../mapper/entity/EntityHelperBase.java | 14 +++++---- .../api/mapper/DependencyCheckTest.java | 14 +++++---- .../MappedReactiveResultSetTckTest.java | 14 +++++---- .../mapper/reactive/MockAsyncResultSet.java | 14 +++++---- .../driver/api/mapper/reactive/MockRow.java | 14 +++++---- .../api/mapper/reactive/TestSubscriber.java | 14 +++++---- .../src/test/resources/project.properties | 14 +++++---- metrics/micrometer/pom.xml | 16 +++++----- .../micrometer/MicrometerMetricUpdater.java | 14 +++++---- .../micrometer/MicrometerMetricsFactory.java | 14 +++++---- .../MicrometerNodeMetricUpdater.java | 14 +++++---- .../MicrometerSessionMetricUpdater.java | 14 +++++---- .../metrics/micrometer/MicrometerTags.java | 14 +++++---- .../MicrometerMetricsFactoryTest.java | 14 +++++---- .../MicrometerNodeMetricUpdaterTest.java | 14 +++++---- .../MicrometerSessionMetricUpdaterTest.java | 14 +++++---- metrics/microprofile/pom.xml | 16 +++++----- .../MicroProfileMetricUpdater.java | 14 +++++---- .../MicroProfileMetricsFactory.java | 14 +++++---- .../MicroProfileNodeMetricUpdater.java | 14 +++++---- .../MicroProfileSessionMetricUpdater.java | 14 +++++---- .../microprofile/MicroProfileTags.java | 14 +++++---- .../MicroProfileMetricsFactoryTest.java | 14 +++++---- .../MicroProfileNodeMetricsUpdaterTest.java | 14 +++++---- osgi-tests/README.md | 2 +- osgi-tests/pom.xml | 16 +++++----- .../driver/api/osgi/CustomRetryPolicy.java | 14 +++++---- .../api/osgi/service/MailboxException.java | 14 +++++---- .../api/osgi/service/MailboxMessage.java | 14 +++++---- .../api/osgi/service/MailboxService.java | 14 +++++---- .../osgi/service/geo/GeoMailboxMessage.java | 14 +++++---- .../osgi/service/geo/GeoMailboxService.java | 14 +++++---- .../service/graph/GraphMailboxService.java | 14 +++++---- .../reactive/ReactiveMailboxService.java | 14 +++++---- .../internal/osgi/MailboxActivator.java | 14 +++++---- .../internal/osgi/service/MailboxMapper.java | 14 +++++---- .../osgi/service/MailboxMessageDao.java | 14 +++++---- .../osgi/service/MailboxServiceImpl.java | 14 +++++---- .../osgi/service/geo/GeoMailboxMapper.java | 14 +++++---- .../service/geo/GeoMailboxMessageDao.java | 14 +++++---- .../service/geo/GeoMailboxServiceImpl.java | 14 +++++---- .../graph/GraphMailboxServiceImpl.java | 14 +++++---- .../reactive/ReactiveMailboxMapper.java | 14 +++++---- .../reactive/ReactiveMailboxMessageDao.java | 14 +++++---- .../reactive/ReactiveMailboxServiceImpl.java | 14 +++++---- .../osgi/OsgiCustomLoadBalancingPolicyIT.java | 14 +++++---- .../driver/internal/osgi/OsgiDefaultIT.java | 14 +++++---- .../driver/internal/osgi/OsgiGeoTypesIT.java | 14 +++++---- .../oss/driver/internal/osgi/OsgiGraphIT.java | 14 +++++---- .../oss/driver/internal/osgi/OsgiLz4IT.java | 14 +++++---- .../driver/internal/osgi/OsgiReactiveIT.java | 14 +++++---- .../driver/internal/osgi/OsgiShadedIT.java | 14 +++++---- .../driver/internal/osgi/OsgiSnappyIT.java | 14 +++++---- .../osgi/checks/DefaultServiceChecks.java | 14 +++++---- .../osgi/checks/GeoServiceChecks.java | 14 +++++---- .../osgi/checks/GraphServiceChecks.java | 14 +++++---- .../osgi/checks/ReactiveServiceChecks.java | 14 +++++---- .../internal/osgi/support/BundleOptions.java | 14 +++++---- .../osgi/support/CcmExamReactorFactory.java | 14 +++++---- .../internal/osgi/support/CcmPaxExam.java | 14 +++++---- .../osgi/support/CcmStagedReactor.java | 14 +++++---- osgi-tests/src/test/resources/exam.properties | 14 +++++---- .../src/test/resources/logback-test.xml | 14 +++++---- performance/README.md | 2 +- pom.xml | 31 ++++++++++--------- query-builder/pom.xml | 16 +++++----- .../api/querybuilder/DseQueryBuilder.java | 14 +++++---- .../api/querybuilder/DseSchemaBuilder.java | 14 +++++---- .../driver/api/querybuilder/package-info.java | 14 +++++---- .../querybuilder/schema/AlterDseKeyspace.java | 14 +++++---- .../schema/AlterDseKeyspaceStart.java | 14 +++++---- .../schema/AlterDseTableAddColumn.java | 14 +++++---- .../schema/AlterDseTableAddColumnEnd.java | 14 +++++---- .../schema/AlterDseTableDropColumn.java | 14 +++++---- .../schema/AlterDseTableDropColumnEnd.java | 14 +++++---- .../schema/AlterDseTableRenameColumn.java | 14 +++++---- .../schema/AlterDseTableRenameColumnEnd.java | 14 +++++---- .../schema/AlterDseTableStart.java | 14 +++++---- .../schema/AlterDseTableWithOptions.java | 14 +++++---- .../schema/AlterDseTableWithOptionsEnd.java | 14 +++++---- .../schema/CreateDseAggregateEnd.java | 14 +++++---- .../schema/CreateDseAggregateStart.java | 14 +++++---- .../schema/CreateDseAggregateStateFunc.java | 14 +++++---- .../schema/CreateDseFunctionEnd.java | 14 +++++---- .../schema/CreateDseFunctionStart.java | 14 +++++---- .../schema/CreateDseFunctionWithLanguage.java | 14 +++++---- .../CreateDseFunctionWithNullOption.java | 14 +++++---- .../schema/CreateDseFunctionWithType.java | 14 +++++---- .../schema/CreateDseKeyspace.java | 14 +++++---- .../schema/CreateDseKeyspaceStart.java | 14 +++++---- .../querybuilder/schema/CreateDseTable.java | 14 +++++---- .../schema/CreateDseTableStart.java | 14 +++++---- .../schema/CreateDseTableWithOptions.java | 14 +++++---- .../querybuilder/schema/DseGraphEdgeSide.java | 14 +++++---- .../schema/DseRelationOptions.java | 14 +++++---- .../schema/DseRelationStructure.java | 14 +++++---- .../schema/DseTableGraphOptions.java | 14 +++++---- .../schema/OngoingDsePartitionKey.java | 14 +++++---- .../api/querybuilder/schema/package-info.java | 14 +++++---- .../schema/DefaultAlterDseKeyspace.java | 14 +++++---- .../schema/DefaultAlterDseTable.java | 14 +++++---- .../schema/DefaultCreateDseAggregate.java | 14 +++++---- .../schema/DefaultCreateDseFunction.java | 14 +++++---- .../schema/DefaultCreateDseKeyspace.java | 14 +++++---- .../schema/DefaultCreateDseTable.java | 14 +++++---- .../schema/DefaultDseGraphEdgeSide.java | 14 +++++---- .../schema/DseTableEdgeOperation.java | 14 +++++---- .../schema/DseTableGraphOperationType.java | 14 +++++---- .../schema/DseTableVertexOperation.java | 14 +++++---- .../querybuilder/schema/package-info.java | 14 +++++---- .../driver/api/querybuilder/BindMarker.java | 14 +++++---- .../api/querybuilder/BuildableQuery.java | 14 +++++---- .../driver/api/querybuilder/CqlSnippet.java | 14 +++++---- .../oss/driver/api/querybuilder/Literal.java | 14 +++++---- .../driver/api/querybuilder/QueryBuilder.java | 14 +++++---- .../oss/driver/api/querybuilder/Raw.java | 14 +++++---- .../api/querybuilder/SchemaBuilder.java | 14 +++++---- .../api/querybuilder/condition/Condition.java | 14 +++++---- .../condition/ConditionBuilder.java | 14 +++++---- .../condition/ConditionalStatement.java | 14 +++++---- .../api/querybuilder/delete/Delete.java | 14 +++++---- .../querybuilder/delete/DeleteSelection.java | 14 +++++---- .../api/querybuilder/insert/Insert.java | 14 +++++---- .../api/querybuilder/insert/InsertInto.java | 14 +++++---- .../api/querybuilder/insert/JsonInsert.java | 14 +++++---- .../querybuilder/insert/OngoingValues.java | 14 +++++---- .../querybuilder/insert/RegularInsert.java | 14 +++++---- .../relation/ArithmeticRelationBuilder.java | 14 +++++---- .../ColumnComponentRelationBuilder.java | 14 +++++---- .../relation/ColumnRelationBuilder.java | 14 +++++---- .../relation/InRelationBuilder.java | 14 +++++---- .../relation/MultiColumnRelationBuilder.java | 14 +++++---- .../relation/OngoingWhereClause.java | 14 +++++---- .../api/querybuilder/relation/Relation.java | 14 +++++---- .../relation/TokenRelationBuilder.java | 14 +++++---- .../querybuilder/schema/AlterKeyspace.java | 14 +++++---- .../schema/AlterKeyspaceStart.java | 14 +++++---- .../schema/AlterMaterializedView.java | 14 +++++---- .../schema/AlterMaterializedViewStart.java | 14 +++++---- .../schema/AlterTableAddColumn.java | 14 +++++---- .../schema/AlterTableAddColumnEnd.java | 14 +++++---- .../schema/AlterTableDropColumn.java | 14 +++++---- .../schema/AlterTableDropColumnEnd.java | 14 +++++---- .../schema/AlterTableRenameColumn.java | 14 +++++---- .../schema/AlterTableRenameColumnEnd.java | 14 +++++---- .../querybuilder/schema/AlterTableStart.java | 14 +++++---- .../schema/AlterTableWithOptions.java | 14 +++++---- .../schema/AlterTableWithOptionsEnd.java | 14 +++++---- .../schema/AlterTypeRenameField.java | 14 +++++---- .../schema/AlterTypeRenameFieldEnd.java | 14 +++++---- .../querybuilder/schema/AlterTypeStart.java | 14 +++++---- .../schema/CreateAggregateEnd.java | 14 +++++---- .../schema/CreateAggregateStart.java | 14 +++++---- .../schema/CreateAggregateStateFunc.java | 14 +++++---- .../schema/CreateFunctionEnd.java | 14 +++++---- .../schema/CreateFunctionStart.java | 14 +++++---- .../schema/CreateFunctionWithLanguage.java | 14 +++++---- .../schema/CreateFunctionWithNullOption.java | 14 +++++---- .../schema/CreateFunctionWithType.java | 14 +++++---- .../api/querybuilder/schema/CreateIndex.java | 14 +++++---- .../schema/CreateIndexOnTable.java | 14 +++++---- .../querybuilder/schema/CreateIndexStart.java | 14 +++++---- .../querybuilder/schema/CreateKeyspace.java | 14 +++++---- .../schema/CreateKeyspaceStart.java | 14 +++++---- .../schema/CreateMaterializedView.java | 14 +++++---- .../CreateMaterializedViewPrimaryKey.java | 14 +++++---- ...CreateMaterializedViewPrimaryKeyStart.java | 14 +++++---- .../CreateMaterializedViewSelection.java | 14 +++++---- ...eMaterializedViewSelectionWithColumns.java | 14 +++++---- .../schema/CreateMaterializedViewStart.java | 14 +++++---- .../schema/CreateMaterializedViewWhere.java | 14 +++++---- .../CreateMaterializedViewWhereStart.java | 14 +++++---- .../api/querybuilder/schema/CreateTable.java | 14 +++++---- .../querybuilder/schema/CreateTableStart.java | 14 +++++---- .../schema/CreateTableWithOptions.java | 14 +++++---- .../api/querybuilder/schema/CreateType.java | 14 +++++---- .../querybuilder/schema/CreateTypeStart.java | 14 +++++---- .../driver/api/querybuilder/schema/Drop.java | 14 +++++---- .../querybuilder/schema/KeyspaceOptions.java | 14 +++++---- .../schema/KeyspaceReplicationOptions.java | 14 +++++---- .../schema/OngoingCreateType.java | 14 +++++---- .../schema/OngoingPartitionKey.java | 14 +++++---- .../querybuilder/schema/OptionProvider.java | 14 +++++---- .../querybuilder/schema/RelationOptions.java | 14 +++++---- .../schema/RelationStructure.java | 14 +++++---- .../schema/compaction/CompactionStrategy.java | 14 +++++---- .../compaction/LeveledCompactionStrategy.java | 14 +++++---- .../SizeTieredCompactionStrategy.java | 14 +++++---- .../TimeWindowCompactionStrategy.java | 14 +++++---- .../querybuilder/select/OngoingSelection.java | 14 +++++---- .../api/querybuilder/select/Select.java | 14 +++++---- .../api/querybuilder/select/SelectFrom.java | 14 +++++---- .../api/querybuilder/select/Selector.java | 14 +++++---- .../driver/api/querybuilder/term/Term.java | 14 +++++---- .../api/querybuilder/truncate/Truncate.java | 14 +++++---- .../api/querybuilder/update/Assignment.java | 14 +++++---- .../update/OngoingAssignment.java | 14 +++++---- .../api/querybuilder/update/Update.java | 14 +++++---- .../api/querybuilder/update/UpdateStart.java | 14 +++++---- .../update/UpdateWithAssignments.java | 14 +++++---- .../querybuilder/ArithmeticOperator.java | 14 +++++---- .../internal/querybuilder/CqlHelper.java | 14 +++++---- .../internal/querybuilder/DefaultLiteral.java | 14 +++++---- .../internal/querybuilder/DefaultRaw.java | 14 +++++---- .../querybuilder/ImmutableCollections.java | 14 +++++---- .../condition/DefaultCondition.java | 14 +++++---- .../condition/DefaultConditionBuilder.java | 14 +++++---- .../querybuilder/delete/DefaultDelete.java | 14 +++++---- .../querybuilder/insert/DefaultInsert.java | 14 +++++---- .../lhs/ColumnComponentLeftOperand.java | 14 +++++---- .../querybuilder/lhs/ColumnLeftOperand.java | 14 +++++---- .../querybuilder/lhs/FieldLeftOperand.java | 14 +++++---- .../querybuilder/lhs/LeftOperand.java | 14 +++++---- .../querybuilder/lhs/TokenLeftOperand.java | 14 +++++---- .../querybuilder/lhs/TupleLeftOperand.java | 14 +++++---- .../relation/CustomIndexRelation.java | 14 +++++---- ...DefaultColumnComponentRelationBuilder.java | 14 +++++---- .../DefaultColumnRelationBuilder.java | 14 +++++---- .../DefaultMultiColumnRelationBuilder.java | 14 +++++---- .../relation/DefaultRelation.java | 14 +++++---- .../relation/DefaultTokenRelationBuilder.java | 14 +++++---- .../schema/DefaultAlterKeyspace.java | 14 +++++---- .../schema/DefaultAlterMaterializedView.java | 14 +++++---- .../schema/DefaultAlterTable.java | 14 +++++---- .../querybuilder/schema/DefaultAlterType.java | 14 +++++---- .../schema/DefaultCreateAggregate.java | 14 +++++---- .../schema/DefaultCreateFunction.java | 14 +++++---- .../schema/DefaultCreateIndex.java | 14 +++++---- .../schema/DefaultCreateKeyspace.java | 14 +++++---- .../schema/DefaultCreateMaterializedView.java | 14 +++++---- .../schema/DefaultCreateTable.java | 14 +++++---- .../schema/DefaultCreateType.java | 14 +++++---- .../querybuilder/schema/DefaultDrop.java | 14 +++++---- .../schema/DefaultDropKeyspace.java | 14 +++++---- .../querybuilder/schema/OptionsUtils.java | 14 +++++---- .../internal/querybuilder/schema/Utils.java | 14 +++++---- .../compaction/DefaultCompactionStrategy.java | 14 +++++---- .../DefaultLeveledCompactionStrategy.java | 14 +++++---- .../DefaultSizeTieredCompactionStrategy.java | 14 +++++---- .../DefaultTimeWindowCompactionStrategy.java | 14 +++++---- .../querybuilder/select/AllSelector.java | 14 +++++---- .../select/ArithmeticSelector.java | 14 +++++---- .../select/BinaryArithmeticSelector.java | 14 +++++---- .../querybuilder/select/CastSelector.java | 14 +++++---- .../select/CollectionSelector.java | 14 +++++---- .../querybuilder/select/ColumnSelector.java | 14 +++++---- .../querybuilder/select/CountAllSelector.java | 14 +++++---- .../select/DefaultBindMarker.java | 14 +++++---- .../querybuilder/select/DefaultSelect.java | 14 +++++---- .../querybuilder/select/ElementSelector.java | 14 +++++---- .../querybuilder/select/FieldSelector.java | 14 +++++---- .../querybuilder/select/FunctionSelector.java | 14 +++++---- .../querybuilder/select/ListSelector.java | 14 +++++---- .../querybuilder/select/MapSelector.java | 14 +++++---- .../querybuilder/select/OppositeSelector.java | 14 +++++---- .../querybuilder/select/RangeSelector.java | 14 +++++---- .../querybuilder/select/SetSelector.java | 14 +++++---- .../querybuilder/select/TupleSelector.java | 14 +++++---- .../querybuilder/select/TypeHintSelector.java | 14 +++++---- .../querybuilder/term/ArithmeticTerm.java | 14 +++++---- .../term/BinaryArithmeticTerm.java | 14 +++++---- .../querybuilder/term/FunctionTerm.java | 14 +++++---- .../querybuilder/term/OppositeTerm.java | 14 +++++---- .../internal/querybuilder/term/TupleTerm.java | 14 +++++---- .../querybuilder/term/TypeHintTerm.java | 14 +++++---- .../truncate/DefaultTruncate.java | 14 +++++---- .../querybuilder/update/AppendAssignment.java | 14 +++++---- .../update/AppendListElementAssignment.java | 14 +++++---- .../update/AppendMapEntryAssignment.java | 14 +++++---- .../update/AppendSetElementAssignment.java | 14 +++++---- .../update/CollectionAssignment.java | 14 +++++---- .../update/CollectionElementAssignment.java | 14 +++++---- .../update/CounterAssignment.java | 14 +++++---- .../update/DecrementAssignment.java | 14 +++++---- .../update/DefaultAssignment.java | 14 +++++---- .../querybuilder/update/DefaultUpdate.java | 14 +++++---- .../update/IncrementAssignment.java | 14 +++++---- .../update/PrependAssignment.java | 14 +++++---- .../update/PrependListElementAssignment.java | 14 +++++---- .../update/PrependMapEntryAssignment.java | 14 +++++---- .../update/PrependSetElementAssignment.java | 14 +++++---- .../querybuilder/update/RemoveAssignment.java | 14 +++++---- .../update/RemoveListElementAssignment.java | 14 +++++---- .../update/RemoveMapEntryAssignment.java | 14 +++++---- .../update/RemoveSetElementAssignment.java | 14 +++++---- .../driver/api/querybuilder/Assertions.java | 14 +++++---- .../querybuilder/BuildableQueryAssert.java | 14 +++++---- .../api/querybuilder/CqlSnippetAssert.java | 14 +++++---- .../schema/AlterDseKeyspaceTest.java | 14 +++++---- .../schema/AlterDseTableTest.java | 14 +++++---- .../schema/CreateDseKeyspaceTest.java | 14 +++++---- .../schema/CreateDseTableTest.java | 14 +++++---- .../querybuilder/DependencyCheckTest.java | 14 +++++---- .../schema/CreateDseAggregateTest.java | 14 +++++---- .../schema/CreateDseFunctionTest.java | 14 +++++---- .../driver/api/querybuilder/Assertions.java | 14 +++++---- .../querybuilder/BuildableQueryAssert.java | 14 +++++---- .../api/querybuilder/BuildableQueryTest.java | 14 +++++---- .../driver/api/querybuilder/CharsetCodec.java | 14 +++++---- .../api/querybuilder/CqlSnippetAssert.java | 14 +++++---- .../api/querybuilder/TokenLiteralTest.java | 14 +++++---- .../querybuilder/condition/ConditionTest.java | 14 +++++---- .../delete/DeleteFluentConditionTest.java | 14 +++++---- .../delete/DeleteFluentRelationTest.java | 14 +++++---- .../delete/DeleteIdempotenceTest.java | 14 +++++---- .../delete/DeleteSelectorTest.java | 14 +++++---- .../delete/DeleteTimestampTest.java | 14 +++++---- .../insert/InsertIdempotenceTest.java | 14 +++++---- .../querybuilder/insert/JsonInsertTest.java | 14 +++++---- .../insert/RegularInsertTest.java | 14 +++++---- .../querybuilder/relation/RelationTest.java | 14 +++++---- .../api/querybuilder/relation/TermTest.java | 14 +++++---- .../schema/AlterKeyspaceTest.java | 14 +++++---- .../schema/AlterMaterializedViewTest.java | 14 +++++---- .../querybuilder/schema/AlterTableTest.java | 14 +++++---- .../querybuilder/schema/AlterTypeTest.java | 14 +++++---- .../schema/CreateAggregateTest.java | 14 +++++---- .../schema/CreateFunctionTest.java | 14 +++++---- .../querybuilder/schema/CreateIndexTest.java | 14 +++++---- .../schema/CreateKeyspaceTest.java | 14 +++++---- .../schema/CreateMaterializedViewTest.java | 14 +++++---- .../querybuilder/schema/CreateTableTest.java | 14 +++++---- .../querybuilder/schema/CreateTypeTest.java | 14 +++++---- .../schema/DropAggregateTest.java | 14 +++++---- .../querybuilder/schema/DropFunctionTest.java | 14 +++++---- .../querybuilder/schema/DropIndexTest.java | 14 +++++---- .../querybuilder/schema/DropKeyspaceTest.java | 14 +++++---- .../schema/DropMaterializedViewTest.java | 14 +++++---- .../querybuilder/schema/DropTableTest.java | 14 +++++---- .../api/querybuilder/schema/DropTypeTest.java | 14 +++++---- .../select/SelectAllowFilteringTest.java | 14 +++++---- .../select/SelectFluentRelationTest.java | 14 +++++---- .../select/SelectGroupByTest.java | 14 +++++---- .../querybuilder/select/SelectLimitTest.java | 14 +++++---- .../select/SelectOrderingTest.java | 14 +++++---- .../select/SelectSelectorTest.java | 14 +++++---- .../querybuilder/truncate/TruncateTest.java | 14 +++++---- .../update/UpdateFluentAssignmentTest.java | 14 +++++---- .../update/UpdateFluentConditionTest.java | 14 +++++---- .../update/UpdateFluentRelationTest.java | 14 +++++---- .../update/UpdateIdempotenceTest.java | 14 +++++---- .../querybuilder/update/UpdateUsingTest.java | 14 +++++---- .../src/test/resources/project.properties | 14 +++++---- test-infra/pom.xml | 16 +++++----- .../api/testinfra/CassandraRequirement.java | 14 +++++---- .../api/testinfra/CassandraResourceRule.java | 14 +++++---- .../driver/api/testinfra/DseRequirement.java | 14 +++++---- .../driver/api/testinfra/ccm/BaseCcmRule.java | 14 +++++---- .../driver/api/testinfra/ccm/CcmBridge.java | 14 +++++---- .../oss/driver/api/testinfra/ccm/CcmRule.java | 14 +++++---- .../api/testinfra/ccm/CustomCcmRule.java | 14 +++++---- .../DefaultCcmBridgeBuilderCustomizer.java | 14 +++++---- .../loadbalancing/NodeComparator.java | 14 +++++---- .../SortingLoadBalancingPolicy.java | 14 +++++---- .../requirement/BackendRequirement.java | 14 +++++---- .../requirement/BackendRequirementRule.java | 14 +++++---- .../requirement/BackendRequirements.java | 14 +++++---- .../testinfra/requirement/BackendType.java | 14 +++++---- .../requirement/VersionRequirement.java | 14 +++++---- .../session/CqlSessionRuleBuilder.java | 14 +++++---- .../api/testinfra/session/SessionRule.java | 14 +++++---- .../testinfra/session/SessionRuleBuilder.java | 14 +++++---- .../api/testinfra/session/SessionUtils.java | 14 +++++---- .../testinfra/simulacron/QueryCounter.java | 14 +++++---- .../testinfra/simulacron/SimulacronRule.java | 14 +++++---- .../api/testinfra/utils/ConditionChecker.java | 14 +++++---- .../driver/api/testinfra/utils/NodeUtils.java | 14 +++++---- .../oss/driver/assertions/Assertions.java | 14 +++++---- .../driver/assertions/NodeMetadataAssert.java | 14 +++++---- .../oss/driver/categories/IsolatedTests.java | 14 +++++---- .../categories/ParallelizableTests.java | 14 +++++---- .../requirement/VersionRequirementTest.java | 14 +++++---- upgrade_guide/README.md | 6 ++-- 1907 files changed, 15235 insertions(+), 11430 deletions(-) create mode 100644 NOTICE.txt diff --git a/NOTICE.txt b/NOTICE.txt new file mode 100644 index 00000000000..477f0645ed9 --- /dev/null +++ b/NOTICE.txt @@ -0,0 +1,20 @@ +Apache Cassandra Java Driver +Copyright 2012- The Apache Software Foundation + +This product includes software developed by The Apache Software +Foundation (http://www.apache.org/). + +JNR project +Copyright (C) 2008-2010 Wayne Meissner +This product includes software developed as part of the JNR project ( https://github.com/jnr/jnr-ffi )s. +see core/src/main/java/com/datastax/oss/driver/internal/core/os/CpuInfo.java + +Protocol Buffers +Copyright 2008 Google Inc. +This product includes software developed as part of the Protocol Buffers project ( https://developers.google.com/protocol-buffers/ ). +see core/src/main/java/com/datastax/oss/driver/internal/core/type/util/VIntCoding.java + +Guava +Copyright (C) 2007 The Guava Authors +This product includes software developed as part of the Guava project ( https://guava.dev ). +see core/src/main/java/com/datastax/oss/driver/internal/core/util/CountingIterator.java \ No newline at end of file diff --git a/README.md b/README.md index e8a85027ab9..78aee1887db 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# DataStax Java Driver for Apache Cassandra® +# Java Driver for Apache Cassandra® [![Maven Central](https://maven-badges.herokuapp.com/maven-central/com.datastax.oss/java-driver-core/badge.svg)](https://maven-badges.herokuapp.com/maven-central/com.datastax.oss/java-driver-core) @@ -66,14 +66,14 @@ offering. See the dedicated [manual page](manual/cloud/) for more details. ## Migrating from previous versions -Java driver 4 is **not binary compatible** with previous versions. However, most of the concepts +Java Driver 4 is **not binary compatible** with previous versions. However, most of the concepts remain unchanged, and the new API will look very familiar to 2.x and 3.x users. See the [upgrade guide](upgrade_guide/) for details. ## Error Handling -See the [Cassandra error handling done right blog](https://www.datastax.com/blog/cassandra-error-handling-done-right) for error handling with the DataStax Java Driver for Apache Cassandra™. +See the [Cassandra error handling done right blog](https://www.datastax.com/blog/cassandra-error-handling-done-right) for error handling with the Java Driver for Apache Cassandra™. ## Useful links @@ -81,7 +81,7 @@ See the [Cassandra error handling done right blog](https://www.datastax.com/blog * [API docs] * Bug tracking: [JIRA] * [Mailing list] -* Twitter: [@dsJavaDriver] tweets Java driver releases and important announcements (low frequency). +* Twitter: [@dsJavaDriver] tweets Java Driver releases and important announcements (low frequency). [@DataStaxEng] has more news, including other drivers, Cassandra, and DSE. * [Changelog] * [FAQ] diff --git a/bom/pom.xml b/bom/pom.xml index a60b9903fdc..33c454fcf75 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -1,13 +1,15 @@ jar - DataStax Java driver for Apache Cassandra(R) - binary distribution + Apache Cassandra Java Driver - binary distribution + + 4.0.0 + + com.datastax.oss + java-driver-parent + 4.17.1-SNAPSHOT + + java-driver-distribution-tests + Apache Cassandra Java Driver - distribution tests + + + + ${project.groupId} + java-driver-bom + ${project.version} + pom + import + + + + + + com.datastax.oss + java-driver-test-infra + test + + + com.datastax.oss + java-driver-query-builder + test + + + com.datastax.oss + java-driver-mapper-processor + test + + + com.datastax.oss + java-driver-mapper-runtime + test + + + com.datastax.oss + java-driver-core + test + + + com.datastax.oss + java-driver-metrics-micrometer + test + + + com.datastax.oss + java-driver-metrics-microprofile + test + + + junit + junit + test + + + + + + org.apache.maven.plugins + maven-surefire-plugin + + ${testing.jvm}/bin/java + ${mockitoopens.argline} + 1 + + + + org.revapi + revapi-maven-plugin + + true + + + + maven-install-plugin + + true + + + + maven-deploy-plugin + + true + + + + org.sonatype.plugins + nexus-staging-maven-plugin + + true + + + + + diff --git a/distribution-tests/src/test/java/com/datastax/oss/driver/api/core/DriverDependencyTest.java b/distribution-tests/src/test/java/com/datastax/oss/driver/api/core/DriverDependencyTest.java new file mode 100644 index 00000000000..16952e3d771 --- /dev/null +++ b/distribution-tests/src/test/java/com/datastax/oss/driver/api/core/DriverDependencyTest.java @@ -0,0 +1,84 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.core; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.api.core.session.Session; +import com.datastax.oss.driver.api.mapper.MapperBuilder; +import com.datastax.oss.driver.api.querybuilder.QueryBuilder; +import com.datastax.oss.driver.api.testinfra.CassandraResourceRule; +import com.datastax.oss.driver.internal.core.util.Reflection; +import com.datastax.oss.driver.internal.mapper.processor.MapperProcessor; +import com.datastax.oss.driver.internal.metrics.micrometer.MicrometerMetricsFactory; +import com.datastax.oss.driver.internal.metrics.microprofile.MicroProfileMetricsFactory; +import org.junit.Test; + +public class DriverDependencyTest { + @Test + public void should_include_core_jar() { + assertThat(Reflection.loadClass(null, "com.datastax.oss.driver.api.core.session.Session")) + .isEqualTo(Session.class); + } + + @Test + public void should_include_query_builder_jar() { + assertThat(Reflection.loadClass(null, "com.datastax.oss.driver.api.querybuilder.QueryBuilder")) + .isEqualTo(QueryBuilder.class); + } + + @Test + public void should_include_mapper_processor_jar() { + assertThat( + Reflection.loadClass( + null, "com.datastax.oss.driver.internal.mapper.processor.MapperProcessor")) + .isEqualTo(MapperProcessor.class); + } + + @Test + public void should_include_mapper_runtime_jar() { + assertThat(Reflection.loadClass(null, "com.datastax.oss.driver.api.mapper.MapperBuilder")) + .isEqualTo(MapperBuilder.class); + } + + @Test + public void should_include_metrics_micrometer_jar() { + assertThat( + Reflection.loadClass( + null, + "com.datastax.oss.driver.internal.metrics.micrometer.MicrometerMetricsFactory")) + .isEqualTo(MicrometerMetricsFactory.class); + } + + @Test + public void should_include_metrics_microprofile_jar() { + assertThat( + Reflection.loadClass( + null, + "com.datastax.oss.driver.internal.metrics.microprofile.MicroProfileMetricsFactory")) + .isEqualTo(MicroProfileMetricsFactory.class); + } + + @Test + public void should_include_test_infra_jar() { + assertThat( + Reflection.loadClass( + null, "com.datastax.oss.driver.api.testinfra.CassandraResourceRule")) + .isEqualTo(CassandraResourceRule.class); + } +} diff --git a/distribution-tests/src/test/java/com/datastax/oss/driver/api/core/OptionalDependencyTest.java b/distribution-tests/src/test/java/com/datastax/oss/driver/api/core/OptionalDependencyTest.java new file mode 100644 index 00000000000..28626413487 --- /dev/null +++ b/distribution-tests/src/test/java/com/datastax/oss/driver/api/core/OptionalDependencyTest.java @@ -0,0 +1,54 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.core; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.internal.core.util.Dependency; +import com.datastax.oss.driver.internal.core.util.Reflection; +import org.junit.Test; + +public class OptionalDependencyTest { + @Test + public void should_not_include_snappy_jar() { + Dependency.SNAPPY + .classes() + .forEach(clazz -> assertThat(Reflection.loadClass(null, clazz)).isNull()); + } + + @Test + public void should_not_include_l4z_jar() { + Dependency.LZ4 + .classes() + .forEach(clazz -> assertThat(Reflection.loadClass(null, clazz)).isNull()); + } + + @Test + public void should_not_include_esri_jar() { + Dependency.ESRI + .classes() + .forEach(clazz -> assertThat(Reflection.loadClass(null, clazz)).isNull()); + } + + @Test + public void should_not_include_tinkerpop_jar() { + Dependency.TINKERPOP + .classes() + .forEach(clazz -> assertThat(Reflection.loadClass(null, clazz)).isNull()); + } +} diff --git a/distribution-tests/src/test/java/com/datastax/oss/driver/api/core/ProvidedDependencyTest.java b/distribution-tests/src/test/java/com/datastax/oss/driver/api/core/ProvidedDependencyTest.java new file mode 100644 index 00000000000..1070bbc2fb1 --- /dev/null +++ b/distribution-tests/src/test/java/com/datastax/oss/driver/api/core/ProvidedDependencyTest.java @@ -0,0 +1,45 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.core; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.internal.core.util.Reflection; +import org.junit.Test; + +public class ProvidedDependencyTest { + @Test + public void should_not_include_graal_sdk_jar() { + assertThat(Reflection.loadClass(null, "org.graalvm.nativeimage.VMRuntime")).isNull(); + } + + @Test + public void should_not_include_spotbugs_annotations_jar() { + assertThat(Reflection.loadClass(null, "edu.umd.cs.findbugs.annotations.NonNull")).isNull(); + } + + @Test + public void should_not_include_jicp_annotations_jar() { + assertThat(Reflection.loadClass(null, "net.jcip.annotations.ThreadSafe")).isNull(); + } + + @Test + public void should_not_include_blockhound_jar() { + assertThat(Reflection.loadClass(null, "reactor.blockhound.BlockHoundRuntime")).isNull(); + } +} diff --git a/examples/pom.xml b/examples/pom.xml index ec87d205ad8..a597f634d9a 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -142,6 +142,11 @@ io.projectreactor reactor-core + + com.github.spotbugs + spotbugs-annotations + provided + diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 73c7e77b2c4..db77efb5166 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -83,6 +83,11 @@ java-driver-metrics-microprofile test + + com.github.stephenc.jcip + jcip-annotations + test + com.github.spotbugs spotbugs-annotations diff --git a/manual/core/integration/README.md b/manual/core/integration/README.md index 37e04b230a2..16ed68f9e9b 100644 --- a/manual/core/integration/README.md +++ b/manual/core/integration/README.md @@ -610,25 +610,22 @@ The driver team uses annotations to document certain aspects of the code: * nullability with [SpotBugs](https://spotbugs.github.io/) annotations `@Nullable` and `@NonNull`. This is mostly used during development; while these annotations are retained in class files, they -serve no purpose at runtime. If you want to minimize the number of JARs in your classpath, you can -exclude them: +serve no purpose at runtime. This class is an optional dependency of the driver. If you wish to +make use of these annotations in your own code you have to explicitly depend on these jars: ```xml - - com.datastax.oss - java-driver-core - ${driver.version} - - - com.github.stephenc.jcip - jcip-annotations - - - com.github.spotbugs - spotbugs-annotations - - - + + + com.github.stephenc.jcip + jcip-annotations + 1.0-1 + + + com.github.spotbugs + spotbugs-annotations + 3.1.12 + + ``` However, there is one case when excluding those dependencies won't work: if you use [annotation diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 9f6c2572554..f9814b3dea4 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -54,10 +54,12 @@ com.github.stephenc.jcip jcip-annotations + provided com.github.spotbugs spotbugs-annotations + provided com.google.testing.compile diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index 0b9bf61928f..3957bbe1505 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -51,10 +51,12 @@ com.github.stephenc.jcip jcip-annotations + provided com.github.spotbugs spotbugs-annotations + provided junit diff --git a/metrics/micrometer/pom.xml b/metrics/micrometer/pom.xml index e7751bafa61..1c28b636b86 100644 --- a/metrics/micrometer/pom.xml +++ b/metrics/micrometer/pom.xml @@ -59,6 +59,16 @@ + + com.github.stephenc.jcip + jcip-annotations + provided + + + com.github.spotbugs + spotbugs-annotations + provided + ch.qos.logback logback-classic diff --git a/metrics/microprofile/pom.xml b/metrics/microprofile/pom.xml index 15b2818141d..0d2d5873330 100644 --- a/metrics/microprofile/pom.xml +++ b/metrics/microprofile/pom.xml @@ -59,6 +59,16 @@ + + com.github.stephenc.jcip + jcip-annotations + provided + + + com.github.spotbugs + spotbugs-annotations + provided + io.smallrye smallrye-metrics diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml index 366555fd995..a5085050930 100644 --- a/osgi-tests/pom.xml +++ b/osgi-tests/pom.xml @@ -56,9 +56,15 @@ com.datastax.oss java-driver-mapper-runtime + + com.github.stephenc.jcip + jcip-annotations + provided + com.github.spotbugs spotbugs-annotations + provided ch.qos.logback diff --git a/pom.xml b/pom.xml index 2d366502f3e..71ecd2a7915 100644 --- a/pom.xml +++ b/pom.xml @@ -39,6 +39,7 @@ integration-tests osgi-tests distribution + distribution-tests examples bom diff --git a/query-builder/pom.xml b/query-builder/pom.xml index 504596140d6..5ecbebf367b 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -51,10 +51,12 @@ com.github.stephenc.jcip jcip-annotations + provided com.github.spotbugs spotbugs-annotations + provided junit diff --git a/test-infra/pom.xml b/test-infra/pom.xml index 21f8605a441..cf1da84f7dd 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -48,7 +48,7 @@ com.github.spotbugs spotbugs-annotations - true + provided junit From e4429a20e4620739297933b54edba5cfb874de92 Mon Sep 17 00:00:00 2001 From: Henry Hughes Date: Fri, 10 Nov 2023 13:46:58 -0800 Subject: [PATCH 870/979] Changes as per RAT patch by Claude Warren; reviewed by Henry Hughes, Mick Semb Wever for CASSANDRA-18969 --- .asf.yaml | 17 +++++++++++++++ .snyk | 16 ++++++++++++++ .snyk.ignore.example | 9 -------- .travis.yml | 17 +++++++++++++++ CONTRIBUTING.md | 19 +++++++++++++++++ Jenkinsfile | 18 ++++++++++++++++ changelog/README.md | 19 +++++++++++++++++ core/console.scala | 21 ++++++++++++++++++- .../java-driver-core/native-image.properties | 17 +++++++++++++++ core/src/main/resources/reference.conf | 17 +++++++++++++++ core/src/test/resources/application.conf | 19 ++++++++++++++++- .../resources/config/customApplication.conf | 17 +++++++++++++++ docs.yaml | 17 +++++++++++++++ examples/README.md | 19 +++++++++++++++++ examples/src/main/resources/application.conf | 19 ++++++++++++++++- .../src/main/resources/killrvideo_schema.cql | 19 +++++++++++++++++ faq/README.md | 19 +++++++++++++++++ install-snapshots.sh | 16 ++++++++++++++ .../src/test/resources/DescribeIT/dse/4.8.cql | 21 ++++++++++++++++++- .../src/test/resources/DescribeIT/dse/5.0.cql | 21 ++++++++++++++++++- .../src/test/resources/DescribeIT/dse/5.1.cql | 21 ++++++++++++++++++- .../src/test/resources/DescribeIT/dse/6.8.cql | 19 +++++++++++++++++ .../src/test/resources/DescribeIT/oss/2.1.cql | 21 ++++++++++++++++++- .../src/test/resources/DescribeIT/oss/2.2.cql | 21 ++++++++++++++++++- .../src/test/resources/DescribeIT/oss/3.0.cql | 21 ++++++++++++++++++- .../test/resources/DescribeIT/oss/3.11.cql | 21 ++++++++++++++++++- .../src/test/resources/DescribeIT/oss/4.0.cql | 19 +++++++++++++++++ .../src/test/resources/application.conf | 19 ++++++++++++++++- manual/README.md | 21 ++++++++++++++++++- manual/api_conventions/README.md | 21 ++++++++++++++++++- manual/case_sensitivity/README.md | 21 ++++++++++++++++++- manual/cloud/README.md | 19 +++++++++++++++++ manual/core/README.md | 21 ++++++++++++++++++- manual/core/address_resolution/README.md | 19 +++++++++++++++++ manual/core/async/README.md | 21 ++++++++++++++++++- manual/core/authentication/README.md | 21 ++++++++++++++++++- manual/core/bom/README.md | 19 +++++++++++++++++ manual/core/compression/README.md | 21 ++++++++++++++++++- manual/core/configuration/README.md | 19 +++++++++++++++++ .../core/configuration/reference/README.rst | 18 ++++++++++++++++ manual/core/control_connection/README.md | 21 ++++++++++++++++++- manual/core/custom_codecs/README.md | 21 ++++++++++++++++++- manual/core/detachable_types/README.md | 19 +++++++++++++++++ manual/core/dse/README.md | 19 +++++++++++++++++ manual/core/dse/geotypes/README.md | 19 +++++++++++++++++ manual/core/dse/graph/README.md | 19 +++++++++++++++++ manual/core/dse/graph/fluent/README.md | 19 +++++++++++++++++ .../core/dse/graph/fluent/explicit/README.md | 19 +++++++++++++++++ .../core/dse/graph/fluent/implicit/README.md | 19 +++++++++++++++++ manual/core/dse/graph/options/README.md | 21 ++++++++++++++++++- manual/core/dse/graph/results/README.md | 21 ++++++++++++++++++- manual/core/dse/graph/script/README.md | 21 ++++++++++++++++++- manual/core/graalvm/README.md | 19 +++++++++++++++++ manual/core/idempotence/README.md | 19 +++++++++++++++++ manual/core/integration/README.md | 19 +++++++++++++++++ manual/core/load_balancing/README.md | 19 +++++++++++++++++ manual/core/logging/README.md | 21 ++++++++++++++++++- manual/core/metadata/README.md | 21 ++++++++++++++++++- manual/core/metadata/node/README.md | 19 +++++++++++++++++ manual/core/metadata/schema/README.md | 19 +++++++++++++++++ manual/core/metadata/token/README.md | 21 ++++++++++++++++++- manual/core/metrics/README.md | 21 ++++++++++++++++++- manual/core/native_protocol/README.md | 19 +++++++++++++++++ manual/core/non_blocking/README.md | 19 +++++++++++++++++ manual/core/paging/README.md | 19 +++++++++++++++++ manual/core/performance/README.md | 21 ++++++++++++++++++- manual/core/pooling/README.md | 21 ++++++++++++++++++- manual/core/query_timestamps/README.md | 19 +++++++++++++++++ manual/core/reactive/README.md | 19 +++++++++++++++++ manual/core/reconnection/README.md | 21 ++++++++++++++++++- manual/core/request_tracker/README.md | 21 ++++++++++++++++++- manual/core/retries/README.md | 19 +++++++++++++++++ manual/core/shaded_jar/README.md | 19 +++++++++++++++++ manual/core/speculative_execution/README.md | 21 ++++++++++++++++++- manual/core/ssl/README.md | 19 +++++++++++++++++ manual/core/statements/README.md | 19 +++++++++++++++++ manual/core/statements/batch/README.md | 19 +++++++++++++++++ .../statements/per_query_keyspace/README.md | 21 ++++++++++++++++++- manual/core/statements/prepared/README.md | 19 +++++++++++++++++ manual/core/statements/simple/README.md | 19 +++++++++++++++++ manual/core/temporal_types/README.md | 21 ++++++++++++++++++- manual/core/throttling/README.md | 21 ++++++++++++++++++- manual/core/tracing/README.md | 19 +++++++++++++++++ manual/core/tuples/README.md | 19 +++++++++++++++++ manual/core/udts/README.md | 21 ++++++++++++++++++- manual/developer/README.md | 21 ++++++++++++++++++- manual/developer/admin/README.md | 21 ++++++++++++++++++- manual/developer/common/README.md | 19 +++++++++++++++++ manual/developer/common/concurrency/README.md | 19 +++++++++++++++++ manual/developer/common/context/README.md | 19 +++++++++++++++++ manual/developer/common/event_bus/README.md | 19 +++++++++++++++++ manual/developer/native_protocol/README.md | 19 +++++++++++++++++ manual/developer/netty_pipeline/README.md | 21 ++++++++++++++++++- manual/developer/request_execution/README.md | 19 +++++++++++++++++ manual/mapper/README.md | 19 +++++++++++++++++ manual/mapper/config/README.md | 19 +++++++++++++++++ manual/mapper/config/kotlin/README.md | 19 +++++++++++++++++ manual/mapper/config/lombok/README.md | 19 +++++++++++++++++ manual/mapper/config/record/README.md | 19 +++++++++++++++++ manual/mapper/config/scala/README.md | 19 +++++++++++++++++ manual/mapper/daos/README.md | 19 +++++++++++++++++ manual/mapper/daos/custom_types/README.md | 19 +++++++++++++++++ manual/mapper/daos/delete/README.md | 21 ++++++++++++++++++- manual/mapper/daos/getentity/README.md | 19 +++++++++++++++++ manual/mapper/daos/increment/README.md | 19 +++++++++++++++++ manual/mapper/daos/insert/README.md | 19 +++++++++++++++++ manual/mapper/daos/null_saving/README.md | 19 +++++++++++++++++ manual/mapper/daos/query/README.md | 19 +++++++++++++++++ manual/mapper/daos/queryprovider/README.md | 19 +++++++++++++++++ manual/mapper/daos/select/README.md | 19 +++++++++++++++++ manual/mapper/daos/setentity/README.md | 19 +++++++++++++++++ .../daos/statement_attributes/README.md | 21 ++++++++++++++++++- manual/mapper/daos/update/README.md | 19 +++++++++++++++++ manual/mapper/entities/README.md | 19 +++++++++++++++++ manual/mapper/mapper/README.md | 19 +++++++++++++++++ manual/osgi/README.md | 19 +++++++++++++++++ manual/query_builder/README.md | 19 +++++++++++++++++ manual/query_builder/condition/README.md | 19 +++++++++++++++++ manual/query_builder/delete/README.md | 19 +++++++++++++++++ manual/query_builder/idempotence/README.md | 21 ++++++++++++++++++- manual/query_builder/insert/README.md | 21 ++++++++++++++++++- manual/query_builder/relation/README.md | 19 +++++++++++++++++ manual/query_builder/schema/README.md | 19 +++++++++++++++++ .../query_builder/schema/aggregate/README.md | 19 +++++++++++++++++ .../query_builder/schema/function/README.md | 19 +++++++++++++++++ manual/query_builder/schema/index/README.md | 19 +++++++++++++++++ .../query_builder/schema/keyspace/README.md | 19 +++++++++++++++++ .../schema/materialized_view/README.md | 19 +++++++++++++++++ manual/query_builder/schema/table/README.md | 19 +++++++++++++++++ manual/query_builder/schema/type/README.md | 19 +++++++++++++++++ manual/query_builder/select/README.md | 19 +++++++++++++++++ manual/query_builder/term/README.md | 21 ++++++++++++++++++- manual/query_builder/truncate/README.md | 19 +++++++++++++++++ manual/query_builder/update/README.md | 19 +++++++++++++++++ mapper-processor/CONTRIBUTING.md | 19 +++++++++++++++++ .../native-image.properties | 17 +++++++++++++++ .../native-image.properties | 17 +++++++++++++++ osgi-tests/README.md | 21 ++++++++++++++++++- .../src/main/resources/application.conf | 19 ++++++++++++++++- performance/README.md | 19 +++++++++++++++++ performance/duration-test.yaml | 17 +++++++++++++++ performance/graphite-setup.yaml | 17 +++++++++++++++ pre-commit.sh | 16 ++++++++++++++ upgrade_guide/README.md | 19 +++++++++++++++++ 144 files changed, 2724 insertions(+), 55 deletions(-) delete mode 100644 .snyk.ignore.example diff --git a/.asf.yaml b/.asf.yaml index c6549f8ee81..5ebca4b6e33 100644 --- a/.asf.yaml +++ b/.asf.yaml @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + notifications: commits: commits@cassandra.apache.org issues: commits@cassandra.apache.org diff --git a/.snyk b/.snyk index 3c6284addca..a081b17225c 100644 --- a/.snyk +++ b/.snyk @@ -1,3 +1,19 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. # Snyk (https://snyk.io) policy file, patches or ignores known vulnerabilities. version: v1.22.2 # ignores vulnerabilities until expiry date; change duration by modifying expiry date diff --git a/.snyk.ignore.example b/.snyk.ignore.example deleted file mode 100644 index a4690b27223..00000000000 --- a/.snyk.ignore.example +++ /dev/null @@ -1,9 +0,0 @@ -# Snyk (https://snyk.io) policy file, patches or ignores known vulnerabilities. -version: v1.22.2 -# ignores vulnerabilities until expiry date; change duration by modifying expiry date -ignore: - SNYK-PYTHON-URLLIB3-1533435: - - '*': - reason: state your ignore reason here - expires: 2030-01-01T00:00:00.000Z - created: 2022-03-21T00:00:00.000Z \ No newline at end of file diff --git a/.travis.yml b/.travis.yml index 7b868941bc3..84d40ce1356 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + language: java dist: trusty sudo: false diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 927c7a7aa8c..53857383cf2 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,3 +1,22 @@ + + # Contributing guidelines ## Code formatting diff --git a/Jenkinsfile b/Jenkinsfile index 3ecb70e0d30..c8247769631 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -1,4 +1,22 @@ #!groovy +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ def initializeEnvironment() { env.DRIVER_DISPLAY_NAME = 'CassandraⓇ Java Driver' diff --git a/changelog/README.md b/changelog/README.md index 54d0d7a6c37..8ff2913b72d 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -1,3 +1,22 @@ + + ## Changelog diff --git a/core/console.scala b/core/console.scala index 0ae13620ff8..491add7edea 100644 --- a/core/console.scala +++ b/core/console.scala @@ -1,3 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + /* * Allows quick manual tests from the Scala console: * @@ -36,4 +55,4 @@ println("********************************************") def fire(event: AnyRef)(implicit session: CqlSession): Unit = { session.getContext.asInstanceOf[InternalDriverContext].getEventBus().fire(event) -} \ No newline at end of file +} diff --git a/core/src/main/resources/META-INF/native-image/com.datastax.oss/java-driver-core/native-image.properties b/core/src/main/resources/META-INF/native-image/com.datastax.oss/java-driver-core/native-image.properties index 7900d35f81a..2baa59f3b07 100644 --- a/core/src/main/resources/META-INF/native-image/com.datastax.oss/java-driver-core/native-image.properties +++ b/core/src/main/resources/META-INF/native-image/com.datastax.oss/java-driver-core/native-image.properties @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + Args=-H:IncludeResources=reference\\.conf \ -H:IncludeResources=application\\.conf \ -H:IncludeResources=application\\.json \ diff --git a/core/src/main/resources/reference.conf b/core/src/main/resources/reference.conf index d9cd8e079d2..9e4fb9c7948 100644 --- a/core/src/main/resources/reference.conf +++ b/core/src/main/resources/reference.conf @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + # Reference configuration for the Java Driver for Apache Cassandra®. # # Unless you use a custom mechanism to load your configuration (see diff --git a/core/src/test/resources/application.conf b/core/src/test/resources/application.conf index 75cd8820639..efea37cc078 100644 --- a/core/src/test/resources/application.conf +++ b/core/src/test/resources/application.conf @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + datastax-java-driver { basic.request.timeout = ${datastax-java-driver.advanced.connection.init-query-timeout} -} \ No newline at end of file +} diff --git a/core/src/test/resources/config/customApplication.conf b/core/src/test/resources/config/customApplication.conf index 92b5f492b9c..c3e3dc7b468 100644 --- a/core/src/test/resources/config/customApplication.conf +++ b/core/src/test/resources/config/customApplication.conf @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + datastax-java-driver { // Check that references to other options in `reference.conf` are correctly resolved basic.request.timeout = ${datastax-java-driver.advanced.connection.init-query-timeout} diff --git a/docs.yaml b/docs.yaml index 0731c398a1b..7c679a0f47e 100644 --- a/docs.yaml +++ b/docs.yaml @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + title: Java Driver summary: Java Driver for Apache Cassandra® homepage: http://docs.datastax.com/en/developer/java-driver diff --git a/examples/README.md b/examples/README.md index 5c8df3d2568..9d2210d8a4a 100644 --- a/examples/README.md +++ b/examples/README.md @@ -1,3 +1,22 @@ + + # Java Driver for Apache Cassandra(R) - Examples This module contains examples of how to use the Java Driver for diff --git a/examples/src/main/resources/application.conf b/examples/src/main/resources/application.conf index 12cb19a84d0..170c08d973a 100644 --- a/examples/src/main/resources/application.conf +++ b/examples/src/main/resources/application.conf @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + datastax-java-driver { basic.contact-points = ["127.0.0.1:9042"] basic { @@ -19,4 +36,4 @@ datastax-java-driver { basic.request.timeout = 10 seconds } } -} \ No newline at end of file +} diff --git a/examples/src/main/resources/killrvideo_schema.cql b/examples/src/main/resources/killrvideo_schema.cql index 24728d550d0..0c604ba5922 100644 --- a/examples/src/main/resources/killrvideo_schema.cql +++ b/examples/src/main/resources/killrvideo_schema.cql @@ -1,3 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + // User credentials, keyed by email address so we can authenticate CREATE TABLE IF NOT EXISTS user_credentials ( email text, diff --git a/faq/README.md b/faq/README.md index 315bf934cd2..97cb4decd00 100644 --- a/faq/README.md +++ b/faq/README.md @@ -1,3 +1,22 @@ + + ## Frequently asked questions ### I'm modifying a statement and the changes get ignored, why? diff --git a/install-snapshots.sh b/install-snapshots.sh index 4f5d79665ab..795b4098f52 100755 --- a/install-snapshots.sh +++ b/install-snapshots.sh @@ -1,4 +1,20 @@ #!/bin/sh +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. # Install dependencies in the Travis build environment if they are snapshots. # See .travis.yml diff --git a/integration-tests/src/test/resources/DescribeIT/dse/4.8.cql b/integration-tests/src/test/resources/DescribeIT/dse/4.8.cql index 05408ba0924..35eee187776 100644 --- a/integration-tests/src/test/resources/DescribeIT/dse/4.8.cql +++ b/integration-tests/src/test/resources/DescribeIT/dse/4.8.cql @@ -1,3 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + CREATE KEYSPACE ks_0 WITH replication = { 'class' : 'org.apache.cassandra.locator.SimpleStrategy', 'replication_factor': '1' } AND durable_writes = true; CREATE TYPE ks_0.btype ( @@ -63,4 +82,4 @@ CREATE TABLE ks_0.ztable ( AND memtable_flush_period_in_ms = 0 AND min_index_interval = 128 AND read_repair_chance = 0.0 - AND speculative_retry = '99.0PERCENTILE'; \ No newline at end of file + AND speculative_retry = '99.0PERCENTILE'; diff --git a/integration-tests/src/test/resources/DescribeIT/dse/5.0.cql b/integration-tests/src/test/resources/DescribeIT/dse/5.0.cql index 25b42c58d68..077c9dd1399 100644 --- a/integration-tests/src/test/resources/DescribeIT/dse/5.0.cql +++ b/integration-tests/src/test/resources/DescribeIT/dse/5.0.cql @@ -1,3 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + CREATE KEYSPACE ks_0 WITH replication = { 'class' : 'org.apache.cassandra.locator.SimpleStrategy', 'replication_factor': '1' } AND durable_writes = true; CREATE TYPE ks_0.btype ( @@ -185,4 +204,4 @@ CREATE AGGREGATE ks_0.mean(int) SFUNC avgstate STYPE tuple FINALFUNC avgfinal - INITCOND (0,0); \ No newline at end of file + INITCOND (0,0); diff --git a/integration-tests/src/test/resources/DescribeIT/dse/5.1.cql b/integration-tests/src/test/resources/DescribeIT/dse/5.1.cql index 25b42c58d68..077c9dd1399 100644 --- a/integration-tests/src/test/resources/DescribeIT/dse/5.1.cql +++ b/integration-tests/src/test/resources/DescribeIT/dse/5.1.cql @@ -1,3 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + CREATE KEYSPACE ks_0 WITH replication = { 'class' : 'org.apache.cassandra.locator.SimpleStrategy', 'replication_factor': '1' } AND durable_writes = true; CREATE TYPE ks_0.btype ( @@ -185,4 +204,4 @@ CREATE AGGREGATE ks_0.mean(int) SFUNC avgstate STYPE tuple FINALFUNC avgfinal - INITCOND (0,0); \ No newline at end of file + INITCOND (0,0); diff --git a/integration-tests/src/test/resources/DescribeIT/dse/6.8.cql b/integration-tests/src/test/resources/DescribeIT/dse/6.8.cql index 416c397ba97..76871de4e1f 100644 --- a/integration-tests/src/test/resources/DescribeIT/dse/6.8.cql +++ b/integration-tests/src/test/resources/DescribeIT/dse/6.8.cql @@ -1,3 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + CREATE KEYSPACE ks_0 WITH replication = { 'class' : 'org.apache.cassandra.locator.SimpleStrategy', 'replication_factor': '1' } AND durable_writes = true; CREATE TYPE ks_0.btype ( diff --git a/integration-tests/src/test/resources/DescribeIT/oss/2.1.cql b/integration-tests/src/test/resources/DescribeIT/oss/2.1.cql index 05408ba0924..35eee187776 100644 --- a/integration-tests/src/test/resources/DescribeIT/oss/2.1.cql +++ b/integration-tests/src/test/resources/DescribeIT/oss/2.1.cql @@ -1,3 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + CREATE KEYSPACE ks_0 WITH replication = { 'class' : 'org.apache.cassandra.locator.SimpleStrategy', 'replication_factor': '1' } AND durable_writes = true; CREATE TYPE ks_0.btype ( @@ -63,4 +82,4 @@ CREATE TABLE ks_0.ztable ( AND memtable_flush_period_in_ms = 0 AND min_index_interval = 128 AND read_repair_chance = 0.0 - AND speculative_retry = '99.0PERCENTILE'; \ No newline at end of file + AND speculative_retry = '99.0PERCENTILE'; diff --git a/integration-tests/src/test/resources/DescribeIT/oss/2.2.cql b/integration-tests/src/test/resources/DescribeIT/oss/2.2.cql index 5b4442133c3..e35703b30cc 100644 --- a/integration-tests/src/test/resources/DescribeIT/oss/2.2.cql +++ b/integration-tests/src/test/resources/DescribeIT/oss/2.2.cql @@ -1,3 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + CREATE KEYSPACE ks_0 WITH replication = { 'class' : 'org.apache.cassandra.locator.SimpleStrategy', 'replication_factor': '1' } AND durable_writes = true; CREATE TYPE ks_0.btype ( @@ -110,4 +129,4 @@ CREATE AGGREGATE ks_0.mean(int) SFUNC avgstate STYPE tuple FINALFUNC avgfinal - INITCOND (0,0); \ No newline at end of file + INITCOND (0,0); diff --git a/integration-tests/src/test/resources/DescribeIT/oss/3.0.cql b/integration-tests/src/test/resources/DescribeIT/oss/3.0.cql index 25b42c58d68..077c9dd1399 100644 --- a/integration-tests/src/test/resources/DescribeIT/oss/3.0.cql +++ b/integration-tests/src/test/resources/DescribeIT/oss/3.0.cql @@ -1,3 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + CREATE KEYSPACE ks_0 WITH replication = { 'class' : 'org.apache.cassandra.locator.SimpleStrategy', 'replication_factor': '1' } AND durable_writes = true; CREATE TYPE ks_0.btype ( @@ -185,4 +204,4 @@ CREATE AGGREGATE ks_0.mean(int) SFUNC avgstate STYPE tuple FINALFUNC avgfinal - INITCOND (0,0); \ No newline at end of file + INITCOND (0,0); diff --git a/integration-tests/src/test/resources/DescribeIT/oss/3.11.cql b/integration-tests/src/test/resources/DescribeIT/oss/3.11.cql index 25b42c58d68..077c9dd1399 100644 --- a/integration-tests/src/test/resources/DescribeIT/oss/3.11.cql +++ b/integration-tests/src/test/resources/DescribeIT/oss/3.11.cql @@ -1,3 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + CREATE KEYSPACE ks_0 WITH replication = { 'class' : 'org.apache.cassandra.locator.SimpleStrategy', 'replication_factor': '1' } AND durable_writes = true; CREATE TYPE ks_0.btype ( @@ -185,4 +204,4 @@ CREATE AGGREGATE ks_0.mean(int) SFUNC avgstate STYPE tuple FINALFUNC avgfinal - INITCOND (0,0); \ No newline at end of file + INITCOND (0,0); diff --git a/integration-tests/src/test/resources/DescribeIT/oss/4.0.cql b/integration-tests/src/test/resources/DescribeIT/oss/4.0.cql index 15ff0f5e9dc..a78bed4b816 100644 --- a/integration-tests/src/test/resources/DescribeIT/oss/4.0.cql +++ b/integration-tests/src/test/resources/DescribeIT/oss/4.0.cql @@ -1,3 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + CREATE KEYSPACE ks_0 WITH replication = { 'class' : 'org.apache.cassandra.locator.SimpleStrategy', 'replication_factor': '1' } AND durable_writes = true; CREATE TYPE ks_0.btype ( diff --git a/integration-tests/src/test/resources/application.conf b/integration-tests/src/test/resources/application.conf index 668a71059cf..f3ab31bcb76 100644 --- a/integration-tests/src/test/resources/application.conf +++ b/integration-tests/src/test/resources/application.conf @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + # Configuration overrides for integration tests datastax-java-driver { basic { @@ -45,4 +62,4 @@ datastax-java-driver { } } } -} \ No newline at end of file +} diff --git a/manual/README.md b/manual/README.md index c3111debe2f..049ddc8c8e9 100644 --- a/manual/README.md +++ b/manual/README.md @@ -1,3 +1,22 @@ + + ## Manual Driver modules: @@ -14,4 +33,4 @@ Common topics: * [API conventions](api_conventions/) * [Case sensitivity](case_sensitivity/) * [OSGi](osgi/) -* [Cloud](cloud/) \ No newline at end of file +* [Cloud](cloud/) diff --git a/manual/api_conventions/README.md b/manual/api_conventions/README.md index a76067ebef2..553392658dd 100644 --- a/manual/api_conventions/README.md +++ b/manual/api_conventions/README.md @@ -1,3 +1,22 @@ + + ## API conventions In previous versions, the driver relied solely on Java visibility rules: everything was either @@ -41,4 +60,4 @@ internalContext.getEventBus().fire(TopologyEvent.forceDown(address)); So the risk of unintentionally using the internal API is very low. To double-check, you can always grep `import com.datastax.oss.driver.internal` in your source files. -[semantic versioning]: http://semver.org/ \ No newline at end of file +[semantic versioning]: http://semver.org/ diff --git a/manual/case_sensitivity/README.md b/manual/case_sensitivity/README.md index 7430b65eabd..e9dbf1bf9a8 100644 --- a/manual/case_sensitivity/README.md +++ b/manual/case_sensitivity/README.md @@ -1,3 +1,22 @@ + + ## Case sensitivity ### In Cassandra @@ -130,4 +149,4 @@ If you worry about readability, use snake case (`shopping_cart`), or simply stic The only reason to use case sensitivity should be if you don't control the data model. In that case, either pass quoted strings to the driver, or use `CqlIdentifier` instances (stored as -constants to avoid creating them over and over). \ No newline at end of file +constants to avoid creating them over and over). diff --git a/manual/cloud/README.md b/manual/cloud/README.md index 5149f140708..48197c49425 100644 --- a/manual/cloud/README.md +++ b/manual/cloud/README.md @@ -1,3 +1,22 @@ + + ## Connecting to Astra (Cloud) Using the Java Driver to connect to a DataStax Astra database is almost identical to using diff --git a/manual/core/README.md b/manual/core/README.md index a11c5e624be..a8f97cc4106 100644 --- a/manual/core/README.md +++ b/manual/core/README.md @@ -1,3 +1,22 @@ + + ## Core driver The core module handles cluster connectivity and request execution. It is published under the @@ -330,4 +349,4 @@ for (ColumnDefinitions.Definition definition : row.getColumnDefinitions()) { [SessionBuilder.addContactPoints()]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addContactPoints-java.util.Collection- [SessionBuilder.withLocalDatacenter()]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withLocalDatacenter-java.lang.String- -[CASSANDRA-10145]: https://issues.apache.org/jira/browse/CASSANDRA-10145 \ No newline at end of file +[CASSANDRA-10145]: https://issues.apache.org/jira/browse/CASSANDRA-10145 diff --git a/manual/core/address_resolution/README.md b/manual/core/address_resolution/README.md index 433ffe58a75..84efb4a796c 100644 --- a/manual/core/address_resolution/README.md +++ b/manual/core/address_resolution/README.md @@ -1,3 +1,22 @@ + + ## Address resolution ### Quick overview diff --git a/manual/core/async/README.md b/manual/core/async/README.md index d64ee2c9b85..5b4bac3dccf 100644 --- a/manual/core/async/README.md +++ b/manual/core/async/README.md @@ -1,3 +1,22 @@ + + ## Asynchronous programming ### Quick overview @@ -207,4 +226,4 @@ documentation for more details and an example. [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html -[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html \ No newline at end of file +[AsyncResultSet]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/AsyncResultSet.html diff --git a/manual/core/authentication/README.md b/manual/core/authentication/README.md index ebb52bfc5a8..516e47f558f 100644 --- a/manual/core/authentication/README.md +++ b/manual/core/authentication/README.md @@ -1,3 +1,22 @@ + + ## Authentication ### Quick overview @@ -236,4 +255,4 @@ session.execute(statement); [ProxyAuthentication.executeAs]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/dse/driver/api/core/auth/ProxyAuthentication.html#executeAs-java.lang.String-StatementT- [SessionBuilder.withAuthCredentials]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthCredentials-java.lang.String-java.lang.String- [SessionBuilder.withAuthProvider]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/session/SessionBuilder.html#withAuthProvider-com.datastax.oss.driver.api.core.auth.AuthProvider- -[reference.conf]: ../configuration/reference/ \ No newline at end of file +[reference.conf]: ../configuration/reference/ diff --git a/manual/core/bom/README.md b/manual/core/bom/README.md index d0797264263..b2a8f205554 100644 --- a/manual/core/bom/README.md +++ b/manual/core/bom/README.md @@ -1,3 +1,22 @@ + + ## Bill of Materials (BOM) A "Bill Of Materials" is a special Maven descriptor that defines the versions of a set of related diff --git a/manual/core/compression/README.md b/manual/core/compression/README.md index 0697ea1737b..9e84fde917d 100644 --- a/manual/core/compression/README.md +++ b/manual/core/compression/README.md @@ -1,3 +1,22 @@ + + ## Compression ### Quick overview @@ -82,4 +101,4 @@ Dependency: Always double-check the exact Snappy version needed; you can find it in the driver's [parent POM]. -[parent POM]: https://search.maven.org/search?q=g:com.datastax.oss%20AND%20a:java-driver-parent&core=gav \ No newline at end of file +[parent POM]: https://search.maven.org/search?q=g:com.datastax.oss%20AND%20a:java-driver-parent&core=gav diff --git a/manual/core/configuration/README.md b/manual/core/configuration/README.md index bccfb7d3fce..a30b79842bb 100644 --- a/manual/core/configuration/README.md +++ b/manual/core/configuration/README.md @@ -1,3 +1,22 @@ + + ## Configuration ### Quick overview diff --git a/manual/core/configuration/reference/README.rst b/manual/core/configuration/reference/README.rst index e6da9306a75..d4989ecf641 100644 --- a/manual/core/configuration/reference/README.rst +++ b/manual/core/configuration/reference/README.rst @@ -1,3 +1,21 @@ +.. + Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + Reference configuration ----------------------- diff --git a/manual/core/control_connection/README.md b/manual/core/control_connection/README.md index 570919fdc94..38544797aed 100644 --- a/manual/core/control_connection/README.md +++ b/manual/core/control_connection/README.md @@ -1,3 +1,22 @@ + + ## Control connection The control connection is a dedicated connection used for administrative tasks: @@ -23,4 +42,4 @@ There are a few options to fine tune the control connection behavior in the `advanced.control-connection` and `advanced.metadata` sections; see the [metadata](../metadata/) pages and the [reference configuration](../configuration/reference/) for all the details. -[Node.getOpenConnections]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- \ No newline at end of file +[Node.getOpenConnections]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/metadata/Node.html#getOpenConnections-- diff --git a/manual/core/custom_codecs/README.md b/manual/core/custom_codecs/README.md index e68e5d78029..f3b7be1e3d9 100644 --- a/manual/core/custom_codecs/README.md +++ b/manual/core/custom_codecs/README.md @@ -1,3 +1,22 @@ + + ## Custom codecs ### Quick overview @@ -726,4 +745,4 @@ private static String formatRow(Row row) { [ObjectMapper]: http://fasterxml.github.io/jackson-databind/javadoc/2.10/com/fasterxml/jackson/databind/ObjectMapper.html -[CQL blob example]: https://github.com/datastax/java-driver/blob/4.x/examples/src/main/java/com/datastax/oss/driver/examples/datatypes/Blobs.java \ No newline at end of file +[CQL blob example]: https://github.com/datastax/java-driver/blob/4.x/examples/src/main/java/com/datastax/oss/driver/examples/datatypes/Blobs.java diff --git a/manual/core/detachable_types/README.md b/manual/core/detachable_types/README.md index afd7a3ab0f1..7968835dd8a 100644 --- a/manual/core/detachable_types/README.md +++ b/manual/core/detachable_types/README.md @@ -1,3 +1,22 @@ + + ## Detachable types ### Quick overview diff --git a/manual/core/dse/README.md b/manual/core/dse/README.md index 8df3568e1ff..75abeafb3d7 100644 --- a/manual/core/dse/README.md +++ b/manual/core/dse/README.md @@ -1,3 +1,22 @@ + + ## DSE-specific features Some driver features only work with DataStax Enterprise: diff --git a/manual/core/dse/geotypes/README.md b/manual/core/dse/geotypes/README.md index 79a4c034052..eb414de4f8d 100644 --- a/manual/core/dse/geotypes/README.md +++ b/manual/core/dse/geotypes/README.md @@ -1,3 +1,22 @@ + + ## Geospatial types The driver comes with client-side representations of the DSE geospatial data types: [Point], diff --git a/manual/core/dse/graph/README.md b/manual/core/dse/graph/README.md index 9d6ef39f2f3..6bcacd44c4e 100644 --- a/manual/core/dse/graph/README.md +++ b/manual/core/dse/graph/README.md @@ -1,3 +1,22 @@ + + ## Graph The driver provides full support for DSE graph, the distributed graph database available in DataStax diff --git a/manual/core/dse/graph/fluent/README.md b/manual/core/dse/graph/fluent/README.md index 9201470b6a5..c1645fdb234 100644 --- a/manual/core/dse/graph/fluent/README.md +++ b/manual/core/dse/graph/fluent/README.md @@ -1,3 +1,22 @@ + + ## Fluent API The driver depends on [Apache TinkerPop™], a graph computing framework that provides a fluent API to diff --git a/manual/core/dse/graph/fluent/explicit/README.md b/manual/core/dse/graph/fluent/explicit/README.md index f3d8072dcb9..163180a4a8a 100644 --- a/manual/core/dse/graph/fluent/explicit/README.md +++ b/manual/core/dse/graph/fluent/explicit/README.md @@ -1,3 +1,22 @@ + + ## Explicit execution Fluent traversals can be wrapped into a [FluentGraphStatement] and passed to the session: diff --git a/manual/core/dse/graph/fluent/implicit/README.md b/manual/core/dse/graph/fluent/implicit/README.md index 797189a9ae1..f838c376022 100644 --- a/manual/core/dse/graph/fluent/implicit/README.md +++ b/manual/core/dse/graph/fluent/implicit/README.md @@ -1,3 +1,22 @@ + + ## Implicit execution Instead of passing traversals to the driver, you can create a *remote traversal source* connected to diff --git a/manual/core/dse/graph/options/README.md b/manual/core/dse/graph/options/README.md index ad439448aa0..e4649ff34f3 100644 --- a/manual/core/dse/graph/options/README.md +++ b/manual/core/dse/graph/options/README.md @@ -1,3 +1,22 @@ + + ## Graph options There are various [configuration](../../../configuration/) options that control the execution of @@ -157,4 +176,4 @@ not explicitly set through `advanced.graph.sub-protocol` in configuration, the v which the driver is connected will determine the default sub-protocol version used by the driver. For DSE 6.8.0 and later, the driver will pick "graph-binary-1.0" as the default sub-protocol version. For DSE 6.7.x and older (or in cases where the driver can't determine the DSE version), the -driver will pick "graphson-2.0" as the default sub-protocol version. \ No newline at end of file +driver will pick "graphson-2.0" as the default sub-protocol version. diff --git a/manual/core/dse/graph/results/README.md b/manual/core/dse/graph/results/README.md index abbc56f68fc..3b4d25fa012 100644 --- a/manual/core/dse/graph/results/README.md +++ b/manual/core/dse/graph/results/README.md @@ -1,3 +1,22 @@ + + ## Handling graph results [Script queries](../script/) and [explicit fluent traversals](../fluent/explicit/) return graph @@ -141,4 +160,4 @@ UUID uuid = graphNode.as(UUID.class); [GraphResultSet]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/dse/driver/api/core/graph/GraphResultSet.html [AsyncGraphResultSet]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/dse/driver/api/core/graph/AsyncGraphResultSet.html -[DSE data types]: https://docs.datastax.com/en/dse/6.0/dse-dev/datastax_enterprise/graph/reference/refDSEGraphDataTypes.html \ No newline at end of file +[DSE data types]: https://docs.datastax.com/en/dse/6.0/dse-dev/datastax_enterprise/graph/reference/refDSEGraphDataTypes.html diff --git a/manual/core/dse/graph/script/README.md b/manual/core/dse/graph/script/README.md index 2b98664ea16..cec8e4e94ef 100644 --- a/manual/core/dse/graph/script/README.md +++ b/manual/core/dse/graph/script/README.md @@ -1,3 +1,22 @@ + + ## Script API The script API handles Gremlin-groovy requests provided as plain Java strings. To execute a script, @@ -103,4 +122,4 @@ Note however that some types of queries can only be performed through the script [ScriptGraphStatement]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html [ScriptGraphStatement.newInstance]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html#newInstance-java.lang.String- -[ScriptGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html#builder-java.lang.String- \ No newline at end of file +[ScriptGraphStatement.builder]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/dse/driver/api/core/graph/ScriptGraphStatement.html#builder-java.lang.String- diff --git a/manual/core/graalvm/README.md b/manual/core/graalvm/README.md index 6ee713a2b30..d20fb739f19 100644 --- a/manual/core/graalvm/README.md +++ b/manual/core/graalvm/README.md @@ -1,3 +1,22 @@ + + ## GraalVM native images ### Quick overview diff --git a/manual/core/idempotence/README.md b/manual/core/idempotence/README.md index 3746825390a..be784dfa40b 100644 --- a/manual/core/idempotence/README.md +++ b/manual/core/idempotence/README.md @@ -1,3 +1,22 @@ + + ## Query idempotence ### Quick overview diff --git a/manual/core/integration/README.md b/manual/core/integration/README.md index 16ed68f9e9b..1f102c2189e 100644 --- a/manual/core/integration/README.md +++ b/manual/core/integration/README.md @@ -1,3 +1,22 @@ + + ## Integration ### Quick overview diff --git a/manual/core/load_balancing/README.md b/manual/core/load_balancing/README.md index 2b60dcb1580..3f391c14f56 100644 --- a/manual/core/load_balancing/README.md +++ b/manual/core/load_balancing/README.md @@ -1,3 +1,22 @@ + + ## Load balancing ### Quick overview diff --git a/manual/core/logging/README.md b/manual/core/logging/README.md index ff0ee5303b6..e3f8bfa7777 100644 --- a/manual/core/logging/README.md +++ b/manual/core/logging/README.md @@ -1,3 +1,22 @@ + + ## Logging ### Quick overview @@ -215,4 +234,4 @@ console). [SLF4J]: https://www.slf4j.org/ [binding]: https://www.slf4j.org/manual.html#swapping [Logback]: http://logback.qos.ch -[Log4J]: https://logging.apache.org/log4j \ No newline at end of file +[Log4J]: https://logging.apache.org/log4j diff --git a/manual/core/metadata/README.md b/manual/core/metadata/README.md index 1bb07483869..73609ee0542 100644 --- a/manual/core/metadata/README.md +++ b/manual/core/metadata/README.md @@ -1,3 +1,22 @@ + + ## Metadata ### Quick overview @@ -58,4 +77,4 @@ refreshed. See the [Performance](../performance/#debouncing) page for more detai [Session#getMetadata]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/session/Session.html#getMetadata-- [Metadata]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/metadata/Metadata.html -[Node]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/metadata/Node.html \ No newline at end of file +[Node]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/metadata/Node.html diff --git a/manual/core/metadata/node/README.md b/manual/core/metadata/node/README.md index 0f0b6176f42..fea04e5f262 100644 --- a/manual/core/metadata/node/README.md +++ b/manual/core/metadata/node/README.md @@ -1,3 +1,22 @@ + + ## Node metadata ### Quick overview diff --git a/manual/core/metadata/schema/README.md b/manual/core/metadata/schema/README.md index ed2c4c70750..20521d1def4 100644 --- a/manual/core/metadata/schema/README.md +++ b/manual/core/metadata/schema/README.md @@ -1,3 +1,22 @@ + + ## Schema metadata ### Quick overview diff --git a/manual/core/metadata/token/README.md b/manual/core/metadata/token/README.md index 1c6a9c08ae7..4d7cd9252df 100644 --- a/manual/core/metadata/token/README.md +++ b/manual/core/metadata/token/README.md @@ -1,3 +1,22 @@ + + ## Token metadata ### Quick overview @@ -170,4 +189,4 @@ also be unavailable for the excluded keyspaces. [Metadata#getTokenMap]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/metadata/Metadata.html#getTokenMap-- -[TokenMap]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/metadata/TokenMap.html \ No newline at end of file +[TokenMap]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/metadata/TokenMap.html diff --git a/manual/core/metrics/README.md b/manual/core/metrics/README.md index 65a93eb1fbf..b5dda977d5c 100644 --- a/manual/core/metrics/README.md +++ b/manual/core/metrics/README.md @@ -1,3 +1,22 @@ + + ## Metrics ### Quick overview @@ -346,4 +365,4 @@ CSV files, SLF4J logs and Graphite. Refer to their [manual][Dropwizard manual] f [Micrometer Metrics]: https://micrometer.io/docs [Micrometer JMX]: https://micrometer.io/docs/registry/jmx [MicroProfile Metrics]: https://github.com/eclipse/microprofile-metrics -[reference configuration]: ../configuration/reference/ \ No newline at end of file +[reference configuration]: ../configuration/reference/ diff --git a/manual/core/native_protocol/README.md b/manual/core/native_protocol/README.md index 33eca0b09d0..42146e63f42 100644 --- a/manual/core/native_protocol/README.md +++ b/manual/core/native_protocol/README.md @@ -1,3 +1,22 @@ + + ## Native protocol ### Quick overview diff --git a/manual/core/non_blocking/README.md b/manual/core/non_blocking/README.md index 7dd184707ec..7abe9d856a3 100644 --- a/manual/core/non_blocking/README.md +++ b/manual/core/non_blocking/README.md @@ -1,3 +1,22 @@ + + ## Non-blocking programming ### Quick overview diff --git a/manual/core/paging/README.md b/manual/core/paging/README.md index 761a6bfbc66..2df92bd69d1 100644 --- a/manual/core/paging/README.md +++ b/manual/core/paging/README.md @@ -1,3 +1,22 @@ + + ## Paging ### Quick overview diff --git a/manual/core/performance/README.md b/manual/core/performance/README.md index aaaebdaa6c9..3afb321968e 100644 --- a/manual/core/performance/README.md +++ b/manual/core/performance/README.md @@ -1,3 +1,22 @@ + + ## Performance This page is intended as a checklist for everything related to driver performance. Most of the @@ -349,4 +368,4 @@ the only one that will have to stay on a separate thread. [CqlIdentifier]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/CqlIdentifier.html [CqlSession.prepare(SimpleStatement)]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/CqlSession.html#prepare-com.datastax.oss.driver.api.core.cql.SimpleStatement- [GenericType]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/type/reflect/GenericType.html -[Statement.setNode()]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/Statement.html#setNode-com.datastax.oss.driver.api.core.metadata.Node- \ No newline at end of file +[Statement.setNode()]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/cql/Statement.html#setNode-com.datastax.oss.driver.api.core.metadata.Node- diff --git a/manual/core/pooling/README.md b/manual/core/pooling/README.md index ad9e6f97a02..578de6b4abd 100644 --- a/manual/core/pooling/README.md +++ b/manual/core/pooling/README.md @@ -1,3 +1,22 @@ + + ## Connection pooling ### Quick overview @@ -171,4 +190,4 @@ Try adding more connections per node. Thanks to the driver's hot-reload mechanis at runtime and see the effects immediately. [CqlSession]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/CqlSession.html -[CASSANDRA-8086]: https://issues.apache.org/jira/browse/CASSANDRA-8086 \ No newline at end of file +[CASSANDRA-8086]: https://issues.apache.org/jira/browse/CASSANDRA-8086 diff --git a/manual/core/query_timestamps/README.md b/manual/core/query_timestamps/README.md index c851e023e14..4498afe21c4 100644 --- a/manual/core/query_timestamps/README.md +++ b/manual/core/query_timestamps/README.md @@ -1,3 +1,22 @@ + + ## Query timestamps ### Quick overview diff --git a/manual/core/reactive/README.md b/manual/core/reactive/README.md index d0182c4fbc2..37a2e3411b8 100644 --- a/manual/core/reactive/README.md +++ b/manual/core/reactive/README.md @@ -1,3 +1,22 @@ + + ## Reactive Style Programming The driver provides built-in support for reactive queries. The [CqlSession] interface extends diff --git a/manual/core/reconnection/README.md b/manual/core/reconnection/README.md index b27dd19aa27..3eb6dad9c05 100644 --- a/manual/core/reconnection/README.md +++ b/manual/core/reconnection/README.md @@ -1,3 +1,22 @@ + + ## Reconnection ### Quick overview @@ -87,4 +106,4 @@ was established. [ConstantReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/internal/core/connection/ConstantReconnectionPolicy.html [DriverContext]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/context/DriverContext.html [ExponentialReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/internal/core/connection/ExponentialReconnectionPolicy.html -[ReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/connection/ReconnectionPolicy.html \ No newline at end of file +[ReconnectionPolicy]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/connection/ReconnectionPolicy.html diff --git a/manual/core/request_tracker/README.md b/manual/core/request_tracker/README.md index 0862654e53f..c135abfe53f 100644 --- a/manual/core/request_tracker/README.md +++ b/manual/core/request_tracker/README.md @@ -1,3 +1,22 @@ + + ## Request tracker ### Quick overview @@ -124,4 +143,4 @@ com.datastax.oss.driver.api.core.servererrors.InvalidQueryException: Undefined c ``` [RequestTracker]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/tracker/RequestTracker.html -[SessionBuilder.addRequestTracker]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addRequestTracker-com.datastax.oss.driver.api.core.tracker.RequestTracker- \ No newline at end of file +[SessionBuilder.addRequestTracker]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/session/SessionBuilder.html#addRequestTracker-com.datastax.oss.driver.api.core.tracker.RequestTracker- diff --git a/manual/core/retries/README.md b/manual/core/retries/README.md index cdd3a5740a2..e92f8e214aa 100644 --- a/manual/core/retries/README.md +++ b/manual/core/retries/README.md @@ -1,3 +1,22 @@ + + ## Retries ### Quick overview diff --git a/manual/core/shaded_jar/README.md b/manual/core/shaded_jar/README.md index 2f52e44c6a4..a6dfac9053e 100644 --- a/manual/core/shaded_jar/README.md +++ b/manual/core/shaded_jar/README.md @@ -1,3 +1,22 @@ + + ## Using the shaded JAR The default `java-driver-core` JAR depends on a number of [third party diff --git a/manual/core/speculative_execution/README.md b/manual/core/speculative_execution/README.md index 53913a6eda7..5666d6a1363 100644 --- a/manual/core/speculative_execution/README.md +++ b/manual/core/speculative_execution/README.md @@ -1,3 +1,22 @@ + + ## Speculative query execution ### Quick overview @@ -250,4 +269,4 @@ profiles have the same configuration). Each request uses its declared profile's policy. If it doesn't declare any profile, or if the profile doesn't have a dedicated policy, then the default profile's policy is used. -[SpeculativeExecutionPolicy]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/specex/SpeculativeExecutionPolicy.html \ No newline at end of file +[SpeculativeExecutionPolicy]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/specex/SpeculativeExecutionPolicy.html diff --git a/manual/core/ssl/README.md b/manual/core/ssl/README.md index 37396c6d4c0..b8aa9b89192 100644 --- a/manual/core/ssl/README.md +++ b/manual/core/ssl/README.md @@ -1,3 +1,22 @@ + + ## SSL ### Quick overview diff --git a/manual/core/statements/README.md b/manual/core/statements/README.md index f02806fb940..394e81ae00e 100644 --- a/manual/core/statements/README.md +++ b/manual/core/statements/README.md @@ -1,3 +1,22 @@ + + ## Statements ### Quick overview diff --git a/manual/core/statements/batch/README.md b/manual/core/statements/batch/README.md index 05e803770eb..f080fe16ab0 100644 --- a/manual/core/statements/batch/README.md +++ b/manual/core/statements/batch/README.md @@ -1,3 +1,22 @@ + + ## Batch statements ### Quick overview diff --git a/manual/core/statements/per_query_keyspace/README.md b/manual/core/statements/per_query_keyspace/README.md index f9076b5b5b6..9a7ffa338c9 100644 --- a/manual/core/statements/per_query_keyspace/README.md +++ b/manual/core/statements/per_query_keyspace/README.md @@ -1,3 +1,22 @@ + + ## Per-query keyspace ### Quick overview @@ -126,4 +145,4 @@ the norm, we'll probably deprecate `setRoutingKeyspace()`. [token-aware routing]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/session/Request.html#getRoutingKey-- -[CASSANDRA-10145]: https://issues.apache.org/jira/browse/CASSANDRA-10145 \ No newline at end of file +[CASSANDRA-10145]: https://issues.apache.org/jira/browse/CASSANDRA-10145 diff --git a/manual/core/statements/prepared/README.md b/manual/core/statements/prepared/README.md index d5a4739c11b..5a87b238cbc 100644 --- a/manual/core/statements/prepared/README.md +++ b/manual/core/statements/prepared/README.md @@ -1,3 +1,22 @@ + + ## Prepared statements ### Quick overview diff --git a/manual/core/statements/simple/README.md b/manual/core/statements/simple/README.md index df56698b4ee..13ddbb7a389 100644 --- a/manual/core/statements/simple/README.md +++ b/manual/core/statements/simple/README.md @@ -1,3 +1,22 @@ + + ## Simple statements ### Quick overview diff --git a/manual/core/temporal_types/README.md b/manual/core/temporal_types/README.md index 2128f822694..6542d5b8dac 100644 --- a/manual/core/temporal_types/README.md +++ b/manual/core/temporal_types/README.md @@ -1,3 +1,22 @@ + + ## Temporal types ### Quick overview @@ -149,4 +168,4 @@ System.out.println(dateTime.minus(CqlDuration.from("1h15s15ns"))); [CqlDuration]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/data/CqlDuration.html [TypeCodecs.ZONED_TIMESTAMP_SYSTEM]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#ZONED_TIMESTAMP_SYSTEM [TypeCodecs.ZONED_TIMESTAMP_UTC]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#ZONED_TIMESTAMP_UTC -[TypeCodecs.zonedTimestampAt()]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#zonedTimestampAt-java.time.ZoneId- \ No newline at end of file +[TypeCodecs.zonedTimestampAt()]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.html#zonedTimestampAt-java.time.ZoneId- diff --git a/manual/core/throttling/README.md b/manual/core/throttling/README.md index 0e1605dafb5..275c0cb5b40 100644 --- a/manual/core/throttling/README.md +++ b/manual/core/throttling/README.md @@ -1,3 +1,22 @@ + + ## Request throttling ### Quick overview @@ -147,4 +166,4 @@ size the underlying histograms (`metrics.session.throttling.delay.*`). [RequestThrottlingException]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/RequestThrottlingException.html [AllNodesFailedException]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/AllNodesFailedException.html -[BusyConnectionException]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/connection/BusyConnectionException.html \ No newline at end of file +[BusyConnectionException]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/connection/BusyConnectionException.html diff --git a/manual/core/tracing/README.md b/manual/core/tracing/README.md index f3154600f9f..f9beca8e49b 100644 --- a/manual/core/tracing/README.md +++ b/manual/core/tracing/README.md @@ -1,3 +1,22 @@ + + ## Query tracing ### Quick overview diff --git a/manual/core/tuples/README.md b/manual/core/tuples/README.md index 69c2f24a46b..d0684b77569 100644 --- a/manual/core/tuples/README.md +++ b/manual/core/tuples/README.md @@ -1,3 +1,22 @@ + + ## Tuples ### Quick overview diff --git a/manual/core/udts/README.md b/manual/core/udts/README.md index f45cf658b89..a22057030ae 100644 --- a/manual/core/udts/README.md +++ b/manual/core/udts/README.md @@ -1,3 +1,22 @@ + + ## User-defined types ### Quick overview @@ -136,4 +155,4 @@ session.execute(bs); [cql_doc]: https://docs.datastax.com/en/cql/3.3/cql/cql_reference/cqlRefUDType.html [UdtValue]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/data/UdtValue.html -[UserDefinedType]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/type/UserDefinedType.html \ No newline at end of file +[UserDefinedType]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/type/UserDefinedType.html diff --git a/manual/developer/README.md b/manual/developer/README.md index 975ab16c176..b6e0bda16ed 100644 --- a/manual/developer/README.md +++ b/manual/developer/README.md @@ -1,3 +1,22 @@ + + ## Developer docs This section explains how driver internals work. The intended audience is: @@ -16,4 +35,4 @@ from lowest to highest level: * [Request execution](request_execution/): higher-level handling of user requests and responses; * [Administrative tasks](admin/): everything else (cluster state and metadata). -If you're reading this on GitHub, the `.nav` file in each directory contains a suggested order. \ No newline at end of file +If you're reading this on GitHub, the `.nav` file in each directory contains a suggested order. diff --git a/manual/developer/admin/README.md b/manual/developer/admin/README.md index def3b6a2927..0ebd9e2d746 100644 --- a/manual/developer/admin/README.md +++ b/manual/developer/admin/README.md @@ -1,3 +1,22 @@ + + ## Administrative tasks Aside from the main task of [executing user requests](../request_execution), the driver also needs @@ -320,4 +339,4 @@ It's less likely that this will be overridden directly. But the schema querying abstracted behind two factories that handle the differences between Cassandra versions: `SchemaQueriesFactory` and `SchemaParserFactory`. These are pluggable by [extending the context](../common/context/#overriding-a-context-component) and overriding the corresponding -`buildXxx` methods. \ No newline at end of file +`buildXxx` methods. diff --git a/manual/developer/common/README.md b/manual/developer/common/README.md index c227f0826a5..13ad8639e62 100644 --- a/manual/developer/common/README.md +++ b/manual/developer/common/README.md @@ -1,3 +1,22 @@ + + ## Common infrastructure This covers utilities or concept that are shared throughout the codebase: diff --git a/manual/developer/common/concurrency/README.md b/manual/developer/common/concurrency/README.md index a09d1c9fd63..fb493930d6e 100644 --- a/manual/developer/common/concurrency/README.md +++ b/manual/developer/common/concurrency/README.md @@ -1,3 +1,22 @@ + + ## Concurrency The driver is a highly concurrent environment. We try to use thread confinement to simplify the diff --git a/manual/developer/common/context/README.md b/manual/developer/common/context/README.md index 3c6143e970d..e20d5ad0ddb 100644 --- a/manual/developer/common/context/README.md +++ b/manual/developer/common/context/README.md @@ -1,3 +1,22 @@ + + ## Driver context The context holds the driver's internal components. It is exposed in the public API as diff --git a/manual/developer/common/event_bus/README.md b/manual/developer/common/event_bus/README.md index 837f8c69082..74729ac6656 100644 --- a/manual/developer/common/event_bus/README.md +++ b/manual/developer/common/event_bus/README.md @@ -1,3 +1,22 @@ + + ## Event bus `EventBus` is a bare-bones messaging mechanism, to decouple components from each other, and diff --git a/manual/developer/native_protocol/README.md b/manual/developer/native_protocol/README.md index cbda8f794ff..b96553fc51b 100644 --- a/manual/developer/native_protocol/README.md +++ b/manual/developer/native_protocol/README.md @@ -1,3 +1,22 @@ + + ## Native protocol layer The native protocol layer encodes protocol messages into binary, before they are sent over the diff --git a/manual/developer/netty_pipeline/README.md b/manual/developer/netty_pipeline/README.md index 58a32a67a59..b596832e202 100644 --- a/manual/developer/netty_pipeline/README.md +++ b/manual/developer/netty_pipeline/README.md @@ -1,3 +1,22 @@ + + ## Netty pipeline With the [protocol layer](../native_protocol) in place, the next step is to build the logic for a @@ -158,4 +177,4 @@ boringssl. This requires a bit of custom development against the internal API: [SslContext]: https://netty.io/4.1/api/io/netty/handler/ssl/SslContext.html [SslContext.newHandler]: https://netty.io/4.1/api/io/netty/handler/ssl/SslContext.html#newHandler-io.netty.buffer.ByteBufAllocator- -[SslContextBuilder.forClient]: https://netty.io/4.1/api/io/netty/handler/ssl/SslContextBuilder.html#forClient-- \ No newline at end of file +[SslContextBuilder.forClient]: https://netty.io/4.1/api/io/netty/handler/ssl/SslContextBuilder.html#forClient-- diff --git a/manual/developer/request_execution/README.md b/manual/developer/request_execution/README.md index c6ec04e3b1a..38a0a55fbd7 100644 --- a/manual/developer/request_execution/README.md +++ b/manual/developer/request_execution/README.md @@ -1,3 +1,22 @@ + + ## Request execution The [Netty pipeline](../netty_pipeline/) gives us the ability to send low-level protocol messages on diff --git a/manual/mapper/README.md b/manual/mapper/README.md index 8e745bf44f9..2c64897243f 100644 --- a/manual/mapper/README.md +++ b/manual/mapper/README.md @@ -1,3 +1,22 @@ + + ## Mapper The mapper generates the boilerplate to execute queries and convert the results into diff --git a/manual/mapper/config/README.md b/manual/mapper/config/README.md index 5a6df9d2ba7..8adc0e63b33 100644 --- a/manual/mapper/config/README.md +++ b/manual/mapper/config/README.md @@ -1,3 +1,22 @@ + + ## Integration ### Builds tools diff --git a/manual/mapper/config/kotlin/README.md b/manual/mapper/config/kotlin/README.md index 4ee234ffa14..07dcf20f4bf 100644 --- a/manual/mapper/config/kotlin/README.md +++ b/manual/mapper/config/kotlin/README.md @@ -1,3 +1,22 @@ + + ## Kotlin [Kotlin](https://kotlinlang.org/) is an alternative language for the JVM. Its compact syntax and diff --git a/manual/mapper/config/lombok/README.md b/manual/mapper/config/lombok/README.md index e2a4f0263c8..b87f8f79ea4 100644 --- a/manual/mapper/config/lombok/README.md +++ b/manual/mapper/config/lombok/README.md @@ -1,3 +1,22 @@ + + ## Lombok [Lombok](https://projectlombok.org/) is a popular library that automates repetitive code, such as diff --git a/manual/mapper/config/record/README.md b/manual/mapper/config/record/README.md index 7466812fc9b..95530d52742 100644 --- a/manual/mapper/config/record/README.md +++ b/manual/mapper/config/record/README.md @@ -1,3 +1,22 @@ + + ## Java 14 Records Java 14 introduced [Record] as a lightweight, immutable alternative to POJOs. You can map annotated diff --git a/manual/mapper/config/scala/README.md b/manual/mapper/config/scala/README.md index b043bd784ad..2cb75273d0b 100644 --- a/manual/mapper/config/scala/README.md +++ b/manual/mapper/config/scala/README.md @@ -1,3 +1,22 @@ + + ## Scala [Scala](https://www.scala-lang.org/) is an alternative language for the JVM. It doesn't support diff --git a/manual/mapper/daos/README.md b/manual/mapper/daos/README.md index e76dde55314..d12172bf056 100644 --- a/manual/mapper/daos/README.md +++ b/manual/mapper/daos/README.md @@ -1,3 +1,22 @@ + + ## DAOs ### Quick overview diff --git a/manual/mapper/daos/custom_types/README.md b/manual/mapper/daos/custom_types/README.md index 75e9733cb2f..19f689655a7 100644 --- a/manual/mapper/daos/custom_types/README.md +++ b/manual/mapper/daos/custom_types/README.md @@ -1,3 +1,22 @@ + + ## Custom result types The mapper supports a pre-defined set of built-in types for DAO method results. For example, a diff --git a/manual/mapper/daos/delete/README.md b/manual/mapper/daos/delete/README.md index 10f4ad249d2..e67ecdc8a6e 100644 --- a/manual/mapper/daos/delete/README.md +++ b/manual/mapper/daos/delete/README.md @@ -1,3 +1,22 @@ + + ## Delete methods Annotate a DAO method with [@Delete] to generate a query that deletes an [Entity](../../entities): @@ -163,4 +182,4 @@ entity class and the [naming strategy](../../entities/#naming-strategy)). [CompletionStage]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletionStage.html -[CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html \ No newline at end of file +[CompletableFuture]: https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html diff --git a/manual/mapper/daos/getentity/README.md b/manual/mapper/daos/getentity/README.md index cea11e34d17..abb7cb076c8 100644 --- a/manual/mapper/daos/getentity/README.md +++ b/manual/mapper/daos/getentity/README.md @@ -1,3 +1,22 @@ + + ## GetEntity methods Annotate a DAO method with [@GetEntity] to convert a core driver data structure into one or more diff --git a/manual/mapper/daos/increment/README.md b/manual/mapper/daos/increment/README.md index c8e90a51627..44b017be2e1 100644 --- a/manual/mapper/daos/increment/README.md +++ b/manual/mapper/daos/increment/README.md @@ -1,3 +1,22 @@ + + ## Increment methods Annotate a DAO method with [@Increment] to generate a query that updates a counter table that is diff --git a/manual/mapper/daos/insert/README.md b/manual/mapper/daos/insert/README.md index bfd95229e1b..b90ffa33a32 100644 --- a/manual/mapper/daos/insert/README.md +++ b/manual/mapper/daos/insert/README.md @@ -1,3 +1,22 @@ + + ## Insert methods Annotate a DAO method with [@Insert] to generate a query that inserts an [Entity](../../entities): diff --git a/manual/mapper/daos/null_saving/README.md b/manual/mapper/daos/null_saving/README.md index e2858f43b4d..eed98934356 100644 --- a/manual/mapper/daos/null_saving/README.md +++ b/manual/mapper/daos/null_saving/README.md @@ -1,3 +1,22 @@ + + ## Null saving strategy The null saving strategy controls how null entity properties are handled when writing to the diff --git a/manual/mapper/daos/query/README.md b/manual/mapper/daos/query/README.md index 0d4293b5f15..a11753da880 100644 --- a/manual/mapper/daos/query/README.md +++ b/manual/mapper/daos/query/README.md @@ -1,3 +1,22 @@ + + ## Query methods Annotate a DAO method with [@Query] to provide your own query string: diff --git a/manual/mapper/daos/queryprovider/README.md b/manual/mapper/daos/queryprovider/README.md index 7c750bcce1f..593a3a6b1a4 100644 --- a/manual/mapper/daos/queryprovider/README.md +++ b/manual/mapper/daos/queryprovider/README.md @@ -1,3 +1,22 @@ + + ## Query provider methods Annotate a DAO method with [@QueryProvider] to delegate the execution of the query to one of your diff --git a/manual/mapper/daos/select/README.md b/manual/mapper/daos/select/README.md index 9d5357ad546..fb6c4ca2077 100644 --- a/manual/mapper/daos/select/README.md +++ b/manual/mapper/daos/select/README.md @@ -1,3 +1,22 @@ + + ## Select methods Annotate a DAO method with [@Select] to generate a query that selects one or more rows, and maps diff --git a/manual/mapper/daos/setentity/README.md b/manual/mapper/daos/setentity/README.md index cedb6e3dc45..eeb7957f62e 100644 --- a/manual/mapper/daos/setentity/README.md +++ b/manual/mapper/daos/setentity/README.md @@ -1,3 +1,22 @@ + + ## SetEntity methods Annotate a DAO method with [@SetEntity] to fill a core driver data structure from an diff --git a/manual/mapper/daos/statement_attributes/README.md b/manual/mapper/daos/statement_attributes/README.md index aa11e065b4f..f772df36775 100644 --- a/manual/mapper/daos/statement_attributes/README.md +++ b/manual/mapper/daos/statement_attributes/README.md @@ -1,3 +1,22 @@ + + ## Statement attributes The [@Delete](../delete/), [@Insert](../insert/), [@Query](../query/), [@Select](../select/) and @@ -60,4 +79,4 @@ Product product = dao.findById(1, builder -> builder.setConsistencyLevel(DefaultConsistencyLevel.QUORUM)); ``` -[@StatementAttributes]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/annotations/StatementAttributes.html \ No newline at end of file +[@StatementAttributes]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/mapper/annotations/StatementAttributes.html diff --git a/manual/mapper/daos/update/README.md b/manual/mapper/daos/update/README.md index 6a14a4a6140..87e9286c800 100644 --- a/manual/mapper/daos/update/README.md +++ b/manual/mapper/daos/update/README.md @@ -1,3 +1,22 @@ + + ## Update methods Annotate a DAO method with [@Update] to generate a query that updates one or more diff --git a/manual/mapper/entities/README.md b/manual/mapper/entities/README.md index b857203ef32..978c781245f 100644 --- a/manual/mapper/entities/README.md +++ b/manual/mapper/entities/README.md @@ -1,3 +1,22 @@ + + ## Entities ### Quick overview diff --git a/manual/mapper/mapper/README.md b/manual/mapper/mapper/README.md index 894143f0b9b..752424c9a3b 100644 --- a/manual/mapper/mapper/README.md +++ b/manual/mapper/mapper/README.md @@ -1,3 +1,22 @@ + + ## Mapper interface ### Quick overview diff --git a/manual/osgi/README.md b/manual/osgi/README.md index 88254334f25..92cd4625b68 100644 --- a/manual/osgi/README.md +++ b/manual/osgi/README.md @@ -1,3 +1,22 @@ + + # OSGi The driver is available as an [OSGi] bundle. More specifically, the following maven artifacts are diff --git a/manual/query_builder/README.md b/manual/query_builder/README.md index b9ea6a36205..c17cd30d161 100644 --- a/manual/query_builder/README.md +++ b/manual/query_builder/README.md @@ -1,3 +1,22 @@ + + ## Query builder The query builder is a utility to **generate CQL queries programmatically**. For example, it could diff --git a/manual/query_builder/condition/README.md b/manual/query_builder/condition/README.md index 0530b33d5bc..1a6a37eb2ef 100644 --- a/manual/query_builder/condition/README.md +++ b/manual/query_builder/condition/README.md @@ -1,3 +1,22 @@ + + ## Conditions A condition is a clause that appears after the IF keyword in a conditional [UPDATE](../update/) or diff --git a/manual/query_builder/delete/README.md b/manual/query_builder/delete/README.md index 031291c311f..8e97920ae9f 100644 --- a/manual/query_builder/delete/README.md +++ b/manual/query_builder/delete/README.md @@ -1,3 +1,22 @@ + + ## DELETE To start a DELETE query, use one of the `deleteFrom` methods in [QueryBuilder]. There are several diff --git a/manual/query_builder/idempotence/README.md b/manual/query_builder/idempotence/README.md index 9fd6d39114d..2f97151d277 100644 --- a/manual/query_builder/idempotence/README.md +++ b/manual/query_builder/idempotence/README.md @@ -1,3 +1,22 @@ + + ## Idempotence in the query builder When you generate a statement (or a statement builder) from the query builder, it automatically @@ -225,4 +244,4 @@ sequential history that is correct. From our clients' point of view, there were But overall the column changed from 1 to 2. There is no ordering of the two operations that can explain that change. We broke linearizability by doing a transparent retry at step 6. -[linearizability]: https://en.wikipedia.org/wiki/Linearizability#Definition_of_linearizability \ No newline at end of file +[linearizability]: https://en.wikipedia.org/wiki/Linearizability#Definition_of_linearizability diff --git a/manual/query_builder/insert/README.md b/manual/query_builder/insert/README.md index ede99602af0..6bac896d9b8 100644 --- a/manual/query_builder/insert/README.md +++ b/manual/query_builder/insert/README.md @@ -1,3 +1,22 @@ + + ## INSERT To start an INSERT query, use one of the `insertInto` methods in [QueryBuilder]. There are @@ -114,4 +133,4 @@ is executed. This is distinctly different than setting the value to null. Passin this method will only remove the USING TTL clause from the query, which will not alter the TTL (if one is set) in Cassandra. -[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html \ No newline at end of file +[QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html diff --git a/manual/query_builder/relation/README.md b/manual/query_builder/relation/README.md index 3c72e28cbee..eb1c728888e 100644 --- a/manual/query_builder/relation/README.md +++ b/manual/query_builder/relation/README.md @@ -1,3 +1,22 @@ + + ## Relations A relation is a clause that appears after the WHERE keyword, and restricts the rows that the diff --git a/manual/query_builder/schema/README.md b/manual/query_builder/schema/README.md index e4021c3068f..0472c8e8c6f 100644 --- a/manual/query_builder/schema/README.md +++ b/manual/query_builder/schema/README.md @@ -1,3 +1,22 @@ + + # Schema builder The schema builder is an additional API provided by [java-driver-query-builder](../) that enables diff --git a/manual/query_builder/schema/aggregate/README.md b/manual/query_builder/schema/aggregate/README.md index 42f1952a105..a54f8703d69 100644 --- a/manual/query_builder/schema/aggregate/README.md +++ b/manual/query_builder/schema/aggregate/README.md @@ -1,3 +1,22 @@ + + ## Aggregate Aggregates enable users to apply User-defined functions (UDF) to rows in a data set and combine diff --git a/manual/query_builder/schema/function/README.md b/manual/query_builder/schema/function/README.md index 7d02f0f8349..001327626b1 100644 --- a/manual/query_builder/schema/function/README.md +++ b/manual/query_builder/schema/function/README.md @@ -1,3 +1,22 @@ + + ## Function User-defined functions (UDF) enable users to create user code written in JSR-232 compliant scripting diff --git a/manual/query_builder/schema/index/README.md b/manual/query_builder/schema/index/README.md index 8541831c1f2..c0c9448dfab 100644 --- a/manual/query_builder/schema/index/README.md +++ b/manual/query_builder/schema/index/README.md @@ -1,3 +1,22 @@ + + # Index An index provides a means of expanding the query capabilities of a table. [SchemaBuilder] offers diff --git a/manual/query_builder/schema/keyspace/README.md b/manual/query_builder/schema/keyspace/README.md index 25e165f32c1..572e8af1658 100644 --- a/manual/query_builder/schema/keyspace/README.md +++ b/manual/query_builder/schema/keyspace/README.md @@ -1,3 +1,22 @@ + + ## Keyspace A keyspace is a top-level namespace that defines a name, replication strategy and configurable diff --git a/manual/query_builder/schema/materialized_view/README.md b/manual/query_builder/schema/materialized_view/README.md index 7bcdda0bd3f..c4f495f95aa 100644 --- a/manual/query_builder/schema/materialized_view/README.md +++ b/manual/query_builder/schema/materialized_view/README.md @@ -1,3 +1,22 @@ + + ## Materialized View Materialized Views are an experimental feature introduced in Apache Cassandra 3.0 that provide a diff --git a/manual/query_builder/schema/table/README.md b/manual/query_builder/schema/table/README.md index 8a68d676851..090f8a1f67b 100644 --- a/manual/query_builder/schema/table/README.md +++ b/manual/query_builder/schema/table/README.md @@ -1,3 +1,22 @@ + + ## Table Data in Apache Cassandra is stored in tables. [SchemaBuilder] offers API methods for creating, diff --git a/manual/query_builder/schema/type/README.md b/manual/query_builder/schema/type/README.md index e474dc29419..c289ad776a8 100644 --- a/manual/query_builder/schema/type/README.md +++ b/manual/query_builder/schema/type/README.md @@ -1,3 +1,22 @@ + + ## Type User-defined types are special types that can associate multiple named fields to a single column. diff --git a/manual/query_builder/select/README.md b/manual/query_builder/select/README.md index 19f0085508a..92c058608e7 100644 --- a/manual/query_builder/select/README.md +++ b/manual/query_builder/select/README.md @@ -1,3 +1,22 @@ + + ## SELECT Start your SELECT with the `selectFrom` method in [QueryBuilder]. There are several variants diff --git a/manual/query_builder/term/README.md b/manual/query_builder/term/README.md index 214dedb3274..460ed8dcb10 100644 --- a/manual/query_builder/term/README.md +++ b/manual/query_builder/term/README.md @@ -1,3 +1,22 @@ + + ## Terms A term is an expression that does not involve the value of a column. It is used: @@ -106,4 +125,4 @@ execution time; on the other hand, it can be used as a workaround to handle new are not yet covered by the query builder. [QueryBuilder]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/querybuilder/QueryBuilder.html -[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html \ No newline at end of file +[CodecRegistry]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/type/codec/registry/CodecRegistry.html diff --git a/manual/query_builder/truncate/README.md b/manual/query_builder/truncate/README.md index 9b37160c0c9..c8cd6945123 100644 --- a/manual/query_builder/truncate/README.md +++ b/manual/query_builder/truncate/README.md @@ -1,3 +1,22 @@ + + ## TRUNCATE To create a TRUNCATE query, use one of the `truncate` methods in [QueryBuilder]. There are several diff --git a/manual/query_builder/update/README.md b/manual/query_builder/update/README.md index d85f71f11cc..15502f52bb7 100644 --- a/manual/query_builder/update/README.md +++ b/manual/query_builder/update/README.md @@ -1,3 +1,22 @@ + + ## UPDATE To start an UPDATE query, use one of the `update` methods in [QueryBuilder]. There are several diff --git a/mapper-processor/CONTRIBUTING.md b/mapper-processor/CONTRIBUTING.md index 11659a9f936..c6d324106c4 100644 --- a/mapper-processor/CONTRIBUTING.md +++ b/mapper-processor/CONTRIBUTING.md @@ -1,3 +1,22 @@ + + # Mapper contributing guidelines Everything in the [main contribution guidelines](../CONTRIBUTING.md) also applies to the mapper. diff --git a/metrics/micrometer/src/main/resources/META-INF/native-image/com.datastax.oss/java-driver-metrics-micrometer/native-image.properties b/metrics/micrometer/src/main/resources/META-INF/native-image/com.datastax.oss/java-driver-metrics-micrometer/native-image.properties index 4971c6cb7ee..fdbf4ccc7c2 100644 --- a/metrics/micrometer/src/main/resources/META-INF/native-image/com.datastax.oss/java-driver-metrics-micrometer/native-image.properties +++ b/metrics/micrometer/src/main/resources/META-INF/native-image/com.datastax.oss/java-driver-metrics-micrometer/native-image.properties @@ -1 +1,18 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + Args = -H:ReflectionConfigurationResources=${.}/reflection.json diff --git a/metrics/microprofile/src/main/resources/META-INF/native-image/com.datastax.oss/java-driver-metrics-microprofile/native-image.properties b/metrics/microprofile/src/main/resources/META-INF/native-image/com.datastax.oss/java-driver-metrics-microprofile/native-image.properties index 4971c6cb7ee..fdbf4ccc7c2 100644 --- a/metrics/microprofile/src/main/resources/META-INF/native-image/com.datastax.oss/java-driver-metrics-microprofile/native-image.properties +++ b/metrics/microprofile/src/main/resources/META-INF/native-image/com.datastax.oss/java-driver-metrics-microprofile/native-image.properties @@ -1 +1,18 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + Args = -H:ReflectionConfigurationResources=${.}/reflection.json diff --git a/osgi-tests/README.md b/osgi-tests/README.md index 1647bad6949..89ad0ba27c8 100644 --- a/osgi-tests/README.md +++ b/osgi-tests/README.md @@ -1,3 +1,22 @@ + + # Java Driver OSGi Tests This module contains OSGi tests for the driver. @@ -45,4 +64,4 @@ First, you can enable DEBUG logs for the Pax Exam framework by editing the Alternatively, you can debug the remote OSGi container by passing the system property `-Dosgi.debug=true`. In this case the framework will prompt for a -remote debugger on port 5005. \ No newline at end of file +remote debugger on port 5005. diff --git a/osgi-tests/src/main/resources/application.conf b/osgi-tests/src/main/resources/application.conf index 8f795524ed2..0c3e8e76c98 100644 --- a/osgi-tests/src/main/resources/application.conf +++ b/osgi-tests/src/main/resources/application.conf @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + # Configuration overrides for integration tests datastax-java-driver { basic { @@ -39,4 +56,4 @@ datastax-java-driver { } } } -} \ No newline at end of file +} diff --git a/performance/README.md b/performance/README.md index f13c76d18cc..ff66a453e9b 100644 --- a/performance/README.md +++ b/performance/README.md @@ -1,3 +1,22 @@ + + # How to run the Driver duration tests Note: the procedure described in this page is currently only accessible to DataStax employees. diff --git a/performance/duration-test.yaml b/performance/duration-test.yaml index 8a50e0de3b5..6e718f2add8 100644 --- a/performance/duration-test.yaml +++ b/performance/duration-test.yaml @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + # Possible values: cassandra or dse server_type: cassandra # Server version (e.g. 3.11.7 or 6.8.8) diff --git a/performance/graphite-setup.yaml b/performance/graphite-setup.yaml index 04c37aecfd9..99bb8ecc8cc 100644 --- a/performance/graphite-setup.yaml +++ b/performance/graphite-setup.yaml @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + # How long should the Graphite server be kept alive, default: 15 days keep_alive: 15d # Cloud-specific settings diff --git a/pre-commit.sh b/pre-commit.sh index c87ea5bf9ca..912564ae81e 100755 --- a/pre-commit.sh +++ b/pre-commit.sh @@ -1,4 +1,20 @@ #!/usr/bin/env bash +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. # STASH_NAME="pre-commit-$(date +%s)" # git stash save --keep-index $STASH_NAME diff --git a/upgrade_guide/README.md b/upgrade_guide/README.md index de624bc436f..e79e8f8cc6d 100644 --- a/upgrade_guide/README.md +++ b/upgrade_guide/README.md @@ -1,3 +1,22 @@ + + ## Upgrade guide ### 4.17.0 From 0e4f40121d65d600b123cea4bfe365dbc09bfbad Mon Sep 17 00:00:00 2001 From: Henry Hughes Date: Mon, 13 Nov 2023 17:05:59 -0800 Subject: [PATCH 871/979] Move copyright notices to LICENSE, add bundled ASL dep notices to NOTICE patch by Claude Warren; reviewed by Henry Hughes, Mick Semb Wever for CASSANDRA-18969 --- LICENSE | 21 +++++ NOTICE.txt | 254 ++++++++++++++++++++++++++++++++++++++++++++++++++--- 2 files changed, 261 insertions(+), 14 deletions(-) diff --git a/LICENSE b/LICENSE index d6456956733..a157e31d058 100644 --- a/LICENSE +++ b/LICENSE @@ -200,3 +200,24 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. + +Apache Cassandra Java Driver bundles code and files from the following projects: + +JNR project +Copyright (C) 2008-2010 Wayne Meissner +This product includes software developed as part of the JNR project ( https://github.com/jnr/jnr-ffi )s. +see core/src/main/java/com/datastax/oss/driver/internal/core/os/CpuInfo.java + +Protocol Buffers +Copyright 2008 Google Inc. +This product includes software developed as part of the Protocol Buffers project ( https://developers.google.com/protocol-buffers/ ). +see core/src/main/java/com/datastax/oss/driver/internal/core/type/util/VIntCoding.java + +Guava +Copyright (C) 2007 The Guava Authors +This product includes software developed as part of the Guava project ( https://guava.dev ). +see core/src/main/java/com/datastax/oss/driver/internal/core/util/CountingIterator.java + +Copyright (C) 2018 Christian Stein +This product includes software developed by Christian Stein +see ci/install-jdk.sh diff --git a/NOTICE.txt b/NOTICE.txt index 477f0645ed9..b7a91be2318 100644 --- a/NOTICE.txt +++ b/NOTICE.txt @@ -4,17 +4,243 @@ Copyright 2012- The Apache Software Foundation This product includes software developed by The Apache Software Foundation (http://www.apache.org/). -JNR project -Copyright (C) 2008-2010 Wayne Meissner -This product includes software developed as part of the JNR project ( https://github.com/jnr/jnr-ffi )s. -see core/src/main/java/com/datastax/oss/driver/internal/core/os/CpuInfo.java - -Protocol Buffers -Copyright 2008 Google Inc. -This product includes software developed as part of the Protocol Buffers project ( https://developers.google.com/protocol-buffers/ ). -see core/src/main/java/com/datastax/oss/driver/internal/core/type/util/VIntCoding.java - -Guava -Copyright (C) 2007 The Guava Authors -This product includes software developed as part of the Guava project ( https://guava.dev ). -see core/src/main/java/com/datastax/oss/driver/internal/core/util/CountingIterator.java \ No newline at end of file +================================================================== +io.netty:netty-handler NOTICE.txt +================================================================== +This product contains the extensions to Java Collections Framework which has +been derived from the works by JSR-166 EG, Doug Lea, and Jason T. Greene: + + * LICENSE: + * license/LICENSE.jsr166y.txt (Public Domain) + * HOMEPAGE: + * http://gee.cs.oswego.edu/cgi-bin/viewcvs.cgi/jsr166/ + * http://viewvc.jboss.org/cgi-bin/viewvc.cgi/jbosscache/experimental/jsr166/ + +This product contains a modified version of Robert Harder's Public Domain +Base64 Encoder and Decoder, which can be obtained at: + + * LICENSE: + * license/LICENSE.base64.txt (Public Domain) + * HOMEPAGE: + * http://iharder.sourceforge.net/current/java/base64/ + +This product contains a modified portion of 'Webbit', an event based +WebSocket and HTTP server, which can be obtained at: + + * LICENSE: + * license/LICENSE.webbit.txt (BSD License) + * HOMEPAGE: + * https://github.com/joewalnes/webbit + +This product contains a modified portion of 'SLF4J', a simple logging +facade for Java, which can be obtained at: + + * LICENSE: + * license/LICENSE.slf4j.txt (MIT License) + * HOMEPAGE: + * https://www.slf4j.org/ + +This product contains a modified portion of 'Apache Harmony', an open source +Java SE, which can be obtained at: + + * NOTICE: + * license/NOTICE.harmony.txt + * LICENSE: + * license/LICENSE.harmony.txt (Apache License 2.0) + * HOMEPAGE: + * https://archive.apache.org/dist/harmony/ + +This product contains a modified portion of 'jbzip2', a Java bzip2 compression +and decompression library written by Matthew J. Francis. It can be obtained at: + + * LICENSE: + * license/LICENSE.jbzip2.txt (MIT License) + * HOMEPAGE: + * https://code.google.com/p/jbzip2/ + +This product contains a modified portion of 'libdivsufsort', a C API library to construct +the suffix array and the Burrows-Wheeler transformed string for any input string of +a constant-size alphabet written by Yuta Mori. It can be obtained at: + + * LICENSE: + * license/LICENSE.libdivsufsort.txt (MIT License) + * HOMEPAGE: + * https://github.com/y-256/libdivsufsort + +This product contains a modified portion of Nitsan Wakart's 'JCTools', Java Concurrency Tools for the JVM, + which can be obtained at: + + * LICENSE: + * license/LICENSE.jctools.txt (ASL2 License) + * HOMEPAGE: + * https://github.com/JCTools/JCTools + +This product optionally depends on 'JZlib', a re-implementation of zlib in +pure Java, which can be obtained at: + + * LICENSE: + * license/LICENSE.jzlib.txt (BSD style License) + * HOMEPAGE: + * http://www.jcraft.com/jzlib/ + +This product optionally depends on 'Compress-LZF', a Java library for encoding and +decoding data in LZF format, written by Tatu Saloranta. It can be obtained at: + + * LICENSE: + * license/LICENSE.compress-lzf.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/ning/compress + +This product optionally depends on 'lz4', a LZ4 Java compression +and decompression library written by Adrien Grand. It can be obtained at: + + * LICENSE: + * license/LICENSE.lz4.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/jpountz/lz4-java + +This product optionally depends on 'lzma-java', a LZMA Java compression +and decompression library, which can be obtained at: + + * LICENSE: + * license/LICENSE.lzma-java.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/jponge/lzma-java + +This product optionally depends on 'zstd-jni', a zstd-jni Java compression +and decompression library, which can be obtained at: + + * LICENSE: + * license/LICENSE.zstd-jni.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/luben/zstd-jni + +This product contains a modified portion of 'jfastlz', a Java port of FastLZ compression +and decompression library written by William Kinney. It can be obtained at: + + * LICENSE: + * license/LICENSE.jfastlz.txt (MIT License) + * HOMEPAGE: + * https://code.google.com/p/jfastlz/ + +This product contains a modified portion of and optionally depends on 'Protocol Buffers', Google's data +interchange format, which can be obtained at: + + * LICENSE: + * license/LICENSE.protobuf.txt (New BSD License) + * HOMEPAGE: + * https://github.com/google/protobuf + +This product optionally depends on 'Bouncy Castle Crypto APIs' to generate +a temporary self-signed X.509 certificate when the JVM does not provide the +equivalent functionality. It can be obtained at: + + * LICENSE: + * license/LICENSE.bouncycastle.txt (MIT License) + * HOMEPAGE: + * https://www.bouncycastle.org/ + +This product optionally depends on 'Snappy', a compression library produced +by Google Inc, which can be obtained at: + + * LICENSE: + * license/LICENSE.snappy.txt (New BSD License) + * HOMEPAGE: + * https://github.com/google/snappy + +This product optionally depends on 'JBoss Marshalling', an alternative Java +serialization API, which can be obtained at: + + * LICENSE: + * license/LICENSE.jboss-marshalling.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/jboss-remoting/jboss-marshalling + +This product optionally depends on 'Caliper', Google's micro- +benchmarking framework, which can be obtained at: + + * LICENSE: + * license/LICENSE.caliper.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/google/caliper + +This product optionally depends on 'Apache Commons Logging', a logging +framework, which can be obtained at: + + * LICENSE: + * license/LICENSE.commons-logging.txt (Apache License 2.0) + * HOMEPAGE: + * https://commons.apache.org/logging/ + +This product optionally depends on 'Apache Log4J', a logging framework, which +can be obtained at: + + * LICENSE: + * license/LICENSE.log4j.txt (Apache License 2.0) + * HOMEPAGE: + * https://logging.apache.org/log4j/ + +This product optionally depends on 'Aalto XML', an ultra-high performance +non-blocking XML processor, which can be obtained at: + + * LICENSE: + * license/LICENSE.aalto-xml.txt (Apache License 2.0) + * HOMEPAGE: + * https://wiki.fasterxml.com/AaltoHome + +This product contains a modified version of 'HPACK', a Java implementation of +the HTTP/2 HPACK algorithm written by Twitter. It can be obtained at: + + * LICENSE: + * license/LICENSE.hpack.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/twitter/hpack + +This product contains a modified version of 'HPACK', a Java implementation of +the HTTP/2 HPACK algorithm written by Cory Benfield. It can be obtained at: + + * LICENSE: + * license/LICENSE.hyper-hpack.txt (MIT License) + * HOMEPAGE: + * https://github.com/python-hyper/hpack/ + +This product contains a modified version of 'HPACK', a Java implementation of +the HTTP/2 HPACK algorithm written by Tatsuhiro Tsujikawa. It can be obtained at: + + * LICENSE: + * license/LICENSE.nghttp2-hpack.txt (MIT License) + * HOMEPAGE: + * https://github.com/nghttp2/nghttp2/ + +This product contains a modified portion of 'Apache Commons Lang', a Java library +provides utilities for the java.lang API, which can be obtained at: + + * LICENSE: + * license/LICENSE.commons-lang.txt (Apache License 2.0) + * HOMEPAGE: + * https://commons.apache.org/proper/commons-lang/ + + +This product contains the Maven wrapper scripts from 'Maven Wrapper', that provides an easy way to ensure a user has everything necessary to run the Maven build. + + * LICENSE: + * license/LICENSE.mvn-wrapper.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/takari/maven-wrapper + +This product contains the dnsinfo.h header file, that provides a way to retrieve the system DNS configuration on MacOS. +This private header is also used by Apple's open source + mDNSResponder (https://opensource.apple.com/tarballs/mDNSResponder/). + + * LICENSE: + * license/LICENSE.dnsinfo.txt (Apple Public Source License 2.0) + * HOMEPAGE: + * https://www.opensource.apple.com/source/configd/configd-453.19/dnsinfo/dnsinfo.h + +This product optionally depends on 'Brotli4j', Brotli compression and +decompression for Java., which can be obtained at: + + * LICENSE: + * license/LICENSE.brotli4j.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/hyperxpro/Brotli4j From 12e3e3ea027c51c5807e5e46ba542f894edfa4e7 Mon Sep 17 00:00:00 2001 From: Henry Hughes Date: Thu, 16 Nov 2023 23:15:10 -0800 Subject: [PATCH 872/979] Add LICENSE and NOTICE.txt/NOTICE_binary to published jars LICENSE + NOTICE.txt is added to source jars, LICENSE + NOTICE_binary.txt is added to regular jars. Make parent project inherit from apache pom. Updated NOTICE wording to "developed at ..." per latest instructions. patch by Henry Hughes; reviewed by Mick Semb Wever for CASSANDRA-18969 --- NOTICE.txt | 243 +----------------- NOTICE_binary.txt | 249 +++++++++++++++++++ core-shaded/pom.xml | 13 + core/pom.xml | 8 + distribution/src/assembly/binary-tarball.xml | 1 + mapper-processor/pom.xml | 13 + mapper-runtime/pom.xml | 13 + metrics/micrometer/pom.xml | 13 + metrics/microprofile/pom.xml | 13 + pom.xml | 21 ++ query-builder/pom.xml | 13 + test-infra/pom.xml | 13 + 12 files changed, 371 insertions(+), 242 deletions(-) create mode 100644 NOTICE_binary.txt diff --git a/NOTICE.txt b/NOTICE.txt index b7a91be2318..8e27ae3e52f 100644 --- a/NOTICE.txt +++ b/NOTICE.txt @@ -1,246 +1,5 @@ Apache Cassandra Java Driver Copyright 2012- The Apache Software Foundation -This product includes software developed by The Apache Software +This product includes software developed at The Apache Software Foundation (http://www.apache.org/). - -================================================================== -io.netty:netty-handler NOTICE.txt -================================================================== -This product contains the extensions to Java Collections Framework which has -been derived from the works by JSR-166 EG, Doug Lea, and Jason T. Greene: - - * LICENSE: - * license/LICENSE.jsr166y.txt (Public Domain) - * HOMEPAGE: - * http://gee.cs.oswego.edu/cgi-bin/viewcvs.cgi/jsr166/ - * http://viewvc.jboss.org/cgi-bin/viewvc.cgi/jbosscache/experimental/jsr166/ - -This product contains a modified version of Robert Harder's Public Domain -Base64 Encoder and Decoder, which can be obtained at: - - * LICENSE: - * license/LICENSE.base64.txt (Public Domain) - * HOMEPAGE: - * http://iharder.sourceforge.net/current/java/base64/ - -This product contains a modified portion of 'Webbit', an event based -WebSocket and HTTP server, which can be obtained at: - - * LICENSE: - * license/LICENSE.webbit.txt (BSD License) - * HOMEPAGE: - * https://github.com/joewalnes/webbit - -This product contains a modified portion of 'SLF4J', a simple logging -facade for Java, which can be obtained at: - - * LICENSE: - * license/LICENSE.slf4j.txt (MIT License) - * HOMEPAGE: - * https://www.slf4j.org/ - -This product contains a modified portion of 'Apache Harmony', an open source -Java SE, which can be obtained at: - - * NOTICE: - * license/NOTICE.harmony.txt - * LICENSE: - * license/LICENSE.harmony.txt (Apache License 2.0) - * HOMEPAGE: - * https://archive.apache.org/dist/harmony/ - -This product contains a modified portion of 'jbzip2', a Java bzip2 compression -and decompression library written by Matthew J. Francis. It can be obtained at: - - * LICENSE: - * license/LICENSE.jbzip2.txt (MIT License) - * HOMEPAGE: - * https://code.google.com/p/jbzip2/ - -This product contains a modified portion of 'libdivsufsort', a C API library to construct -the suffix array and the Burrows-Wheeler transformed string for any input string of -a constant-size alphabet written by Yuta Mori. It can be obtained at: - - * LICENSE: - * license/LICENSE.libdivsufsort.txt (MIT License) - * HOMEPAGE: - * https://github.com/y-256/libdivsufsort - -This product contains a modified portion of Nitsan Wakart's 'JCTools', Java Concurrency Tools for the JVM, - which can be obtained at: - - * LICENSE: - * license/LICENSE.jctools.txt (ASL2 License) - * HOMEPAGE: - * https://github.com/JCTools/JCTools - -This product optionally depends on 'JZlib', a re-implementation of zlib in -pure Java, which can be obtained at: - - * LICENSE: - * license/LICENSE.jzlib.txt (BSD style License) - * HOMEPAGE: - * http://www.jcraft.com/jzlib/ - -This product optionally depends on 'Compress-LZF', a Java library for encoding and -decoding data in LZF format, written by Tatu Saloranta. It can be obtained at: - - * LICENSE: - * license/LICENSE.compress-lzf.txt (Apache License 2.0) - * HOMEPAGE: - * https://github.com/ning/compress - -This product optionally depends on 'lz4', a LZ4 Java compression -and decompression library written by Adrien Grand. It can be obtained at: - - * LICENSE: - * license/LICENSE.lz4.txt (Apache License 2.0) - * HOMEPAGE: - * https://github.com/jpountz/lz4-java - -This product optionally depends on 'lzma-java', a LZMA Java compression -and decompression library, which can be obtained at: - - * LICENSE: - * license/LICENSE.lzma-java.txt (Apache License 2.0) - * HOMEPAGE: - * https://github.com/jponge/lzma-java - -This product optionally depends on 'zstd-jni', a zstd-jni Java compression -and decompression library, which can be obtained at: - - * LICENSE: - * license/LICENSE.zstd-jni.txt (Apache License 2.0) - * HOMEPAGE: - * https://github.com/luben/zstd-jni - -This product contains a modified portion of 'jfastlz', a Java port of FastLZ compression -and decompression library written by William Kinney. It can be obtained at: - - * LICENSE: - * license/LICENSE.jfastlz.txt (MIT License) - * HOMEPAGE: - * https://code.google.com/p/jfastlz/ - -This product contains a modified portion of and optionally depends on 'Protocol Buffers', Google's data -interchange format, which can be obtained at: - - * LICENSE: - * license/LICENSE.protobuf.txt (New BSD License) - * HOMEPAGE: - * https://github.com/google/protobuf - -This product optionally depends on 'Bouncy Castle Crypto APIs' to generate -a temporary self-signed X.509 certificate when the JVM does not provide the -equivalent functionality. It can be obtained at: - - * LICENSE: - * license/LICENSE.bouncycastle.txt (MIT License) - * HOMEPAGE: - * https://www.bouncycastle.org/ - -This product optionally depends on 'Snappy', a compression library produced -by Google Inc, which can be obtained at: - - * LICENSE: - * license/LICENSE.snappy.txt (New BSD License) - * HOMEPAGE: - * https://github.com/google/snappy - -This product optionally depends on 'JBoss Marshalling', an alternative Java -serialization API, which can be obtained at: - - * LICENSE: - * license/LICENSE.jboss-marshalling.txt (Apache License 2.0) - * HOMEPAGE: - * https://github.com/jboss-remoting/jboss-marshalling - -This product optionally depends on 'Caliper', Google's micro- -benchmarking framework, which can be obtained at: - - * LICENSE: - * license/LICENSE.caliper.txt (Apache License 2.0) - * HOMEPAGE: - * https://github.com/google/caliper - -This product optionally depends on 'Apache Commons Logging', a logging -framework, which can be obtained at: - - * LICENSE: - * license/LICENSE.commons-logging.txt (Apache License 2.0) - * HOMEPAGE: - * https://commons.apache.org/logging/ - -This product optionally depends on 'Apache Log4J', a logging framework, which -can be obtained at: - - * LICENSE: - * license/LICENSE.log4j.txt (Apache License 2.0) - * HOMEPAGE: - * https://logging.apache.org/log4j/ - -This product optionally depends on 'Aalto XML', an ultra-high performance -non-blocking XML processor, which can be obtained at: - - * LICENSE: - * license/LICENSE.aalto-xml.txt (Apache License 2.0) - * HOMEPAGE: - * https://wiki.fasterxml.com/AaltoHome - -This product contains a modified version of 'HPACK', a Java implementation of -the HTTP/2 HPACK algorithm written by Twitter. It can be obtained at: - - * LICENSE: - * license/LICENSE.hpack.txt (Apache License 2.0) - * HOMEPAGE: - * https://github.com/twitter/hpack - -This product contains a modified version of 'HPACK', a Java implementation of -the HTTP/2 HPACK algorithm written by Cory Benfield. It can be obtained at: - - * LICENSE: - * license/LICENSE.hyper-hpack.txt (MIT License) - * HOMEPAGE: - * https://github.com/python-hyper/hpack/ - -This product contains a modified version of 'HPACK', a Java implementation of -the HTTP/2 HPACK algorithm written by Tatsuhiro Tsujikawa. It can be obtained at: - - * LICENSE: - * license/LICENSE.nghttp2-hpack.txt (MIT License) - * HOMEPAGE: - * https://github.com/nghttp2/nghttp2/ - -This product contains a modified portion of 'Apache Commons Lang', a Java library -provides utilities for the java.lang API, which can be obtained at: - - * LICENSE: - * license/LICENSE.commons-lang.txt (Apache License 2.0) - * HOMEPAGE: - * https://commons.apache.org/proper/commons-lang/ - - -This product contains the Maven wrapper scripts from 'Maven Wrapper', that provides an easy way to ensure a user has everything necessary to run the Maven build. - - * LICENSE: - * license/LICENSE.mvn-wrapper.txt (Apache License 2.0) - * HOMEPAGE: - * https://github.com/takari/maven-wrapper - -This product contains the dnsinfo.h header file, that provides a way to retrieve the system DNS configuration on MacOS. -This private header is also used by Apple's open source - mDNSResponder (https://opensource.apple.com/tarballs/mDNSResponder/). - - * LICENSE: - * license/LICENSE.dnsinfo.txt (Apple Public Source License 2.0) - * HOMEPAGE: - * https://www.opensource.apple.com/source/configd/configd-453.19/dnsinfo/dnsinfo.h - -This product optionally depends on 'Brotli4j', Brotli compression and -decompression for Java., which can be obtained at: - - * LICENSE: - * license/LICENSE.brotli4j.txt (Apache License 2.0) - * HOMEPAGE: - * https://github.com/hyperxpro/Brotli4j diff --git a/NOTICE_binary.txt b/NOTICE_binary.txt new file mode 100644 index 00000000000..c60d8ceb245 --- /dev/null +++ b/NOTICE_binary.txt @@ -0,0 +1,249 @@ +Apache Cassandra Java Driver +Copyright 2012- The Apache Software Foundation + +This product includes software developed at The Apache Software +Foundation (http://www.apache.org/). + +This compiled product also includes Apache-licensed dependencies +that contain the following NOTICE information: + +================================================================== +io.netty:netty-handler NOTICE.txt +================================================================== +This product contains the extensions to Java Collections Framework which has +been derived from the works by JSR-166 EG, Doug Lea, and Jason T. Greene: + + * LICENSE: + * license/LICENSE.jsr166y.txt (Public Domain) + * HOMEPAGE: + * http://gee.cs.oswego.edu/cgi-bin/viewcvs.cgi/jsr166/ + * http://viewvc.jboss.org/cgi-bin/viewvc.cgi/jbosscache/experimental/jsr166/ + +This product contains a modified version of Robert Harder's Public Domain +Base64 Encoder and Decoder, which can be obtained at: + + * LICENSE: + * license/LICENSE.base64.txt (Public Domain) + * HOMEPAGE: + * http://iharder.sourceforge.net/current/java/base64/ + +This product contains a modified portion of 'Webbit', an event based +WebSocket and HTTP server, which can be obtained at: + + * LICENSE: + * license/LICENSE.webbit.txt (BSD License) + * HOMEPAGE: + * https://github.com/joewalnes/webbit + +This product contains a modified portion of 'SLF4J', a simple logging +facade for Java, which can be obtained at: + + * LICENSE: + * license/LICENSE.slf4j.txt (MIT License) + * HOMEPAGE: + * https://www.slf4j.org/ + +This product contains a modified portion of 'Apache Harmony', an open source +Java SE, which can be obtained at: + + * NOTICE: + * license/NOTICE.harmony.txt + * LICENSE: + * license/LICENSE.harmony.txt (Apache License 2.0) + * HOMEPAGE: + * https://archive.apache.org/dist/harmony/ + +This product contains a modified portion of 'jbzip2', a Java bzip2 compression +and decompression library written by Matthew J. Francis. It can be obtained at: + + * LICENSE: + * license/LICENSE.jbzip2.txt (MIT License) + * HOMEPAGE: + * https://code.google.com/p/jbzip2/ + +This product contains a modified portion of 'libdivsufsort', a C API library to construct +the suffix array and the Burrows-Wheeler transformed string for any input string of +a constant-size alphabet written by Yuta Mori. It can be obtained at: + + * LICENSE: + * license/LICENSE.libdivsufsort.txt (MIT License) + * HOMEPAGE: + * https://github.com/y-256/libdivsufsort + +This product contains a modified portion of Nitsan Wakart's 'JCTools', Java Concurrency Tools for the JVM, + which can be obtained at: + + * LICENSE: + * license/LICENSE.jctools.txt (ASL2 License) + * HOMEPAGE: + * https://github.com/JCTools/JCTools + +This product optionally depends on 'JZlib', a re-implementation of zlib in +pure Java, which can be obtained at: + + * LICENSE: + * license/LICENSE.jzlib.txt (BSD style License) + * HOMEPAGE: + * http://www.jcraft.com/jzlib/ + +This product optionally depends on 'Compress-LZF', a Java library for encoding and +decoding data in LZF format, written by Tatu Saloranta. It can be obtained at: + + * LICENSE: + * license/LICENSE.compress-lzf.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/ning/compress + +This product optionally depends on 'lz4', a LZ4 Java compression +and decompression library written by Adrien Grand. It can be obtained at: + + * LICENSE: + * license/LICENSE.lz4.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/jpountz/lz4-java + +This product optionally depends on 'lzma-java', a LZMA Java compression +and decompression library, which can be obtained at: + + * LICENSE: + * license/LICENSE.lzma-java.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/jponge/lzma-java + +This product optionally depends on 'zstd-jni', a zstd-jni Java compression +and decompression library, which can be obtained at: + + * LICENSE: + * license/LICENSE.zstd-jni.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/luben/zstd-jni + +This product contains a modified portion of 'jfastlz', a Java port of FastLZ compression +and decompression library written by William Kinney. It can be obtained at: + + * LICENSE: + * license/LICENSE.jfastlz.txt (MIT License) + * HOMEPAGE: + * https://code.google.com/p/jfastlz/ + +This product contains a modified portion of and optionally depends on 'Protocol Buffers', Google's data +interchange format, which can be obtained at: + + * LICENSE: + * license/LICENSE.protobuf.txt (New BSD License) + * HOMEPAGE: + * https://github.com/google/protobuf + +This product optionally depends on 'Bouncy Castle Crypto APIs' to generate +a temporary self-signed X.509 certificate when the JVM does not provide the +equivalent functionality. It can be obtained at: + + * LICENSE: + * license/LICENSE.bouncycastle.txt (MIT License) + * HOMEPAGE: + * https://www.bouncycastle.org/ + +This product optionally depends on 'Snappy', a compression library produced +by Google Inc, which can be obtained at: + + * LICENSE: + * license/LICENSE.snappy.txt (New BSD License) + * HOMEPAGE: + * https://github.com/google/snappy + +This product optionally depends on 'JBoss Marshalling', an alternative Java +serialization API, which can be obtained at: + + * LICENSE: + * license/LICENSE.jboss-marshalling.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/jboss-remoting/jboss-marshalling + +This product optionally depends on 'Caliper', Google's micro- +benchmarking framework, which can be obtained at: + + * LICENSE: + * license/LICENSE.caliper.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/google/caliper + +This product optionally depends on 'Apache Commons Logging', a logging +framework, which can be obtained at: + + * LICENSE: + * license/LICENSE.commons-logging.txt (Apache License 2.0) + * HOMEPAGE: + * https://commons.apache.org/logging/ + +This product optionally depends on 'Apache Log4J', a logging framework, which +can be obtained at: + + * LICENSE: + * license/LICENSE.log4j.txt (Apache License 2.0) + * HOMEPAGE: + * https://logging.apache.org/log4j/ + +This product optionally depends on 'Aalto XML', an ultra-high performance +non-blocking XML processor, which can be obtained at: + + * LICENSE: + * license/LICENSE.aalto-xml.txt (Apache License 2.0) + * HOMEPAGE: + * https://wiki.fasterxml.com/AaltoHome + +This product contains a modified version of 'HPACK', a Java implementation of +the HTTP/2 HPACK algorithm written by Twitter. It can be obtained at: + + * LICENSE: + * license/LICENSE.hpack.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/twitter/hpack + +This product contains a modified version of 'HPACK', a Java implementation of +the HTTP/2 HPACK algorithm written by Cory Benfield. It can be obtained at: + + * LICENSE: + * license/LICENSE.hyper-hpack.txt (MIT License) + * HOMEPAGE: + * https://github.com/python-hyper/hpack/ + +This product contains a modified version of 'HPACK', a Java implementation of +the HTTP/2 HPACK algorithm written by Tatsuhiro Tsujikawa. It can be obtained at: + + * LICENSE: + * license/LICENSE.nghttp2-hpack.txt (MIT License) + * HOMEPAGE: + * https://github.com/nghttp2/nghttp2/ + +This product contains a modified portion of 'Apache Commons Lang', a Java library +provides utilities for the java.lang API, which can be obtained at: + + * LICENSE: + * license/LICENSE.commons-lang.txt (Apache License 2.0) + * HOMEPAGE: + * https://commons.apache.org/proper/commons-lang/ + + +This product contains the Maven wrapper scripts from 'Maven Wrapper', that provides an easy way to ensure a user has everything necessary to run the Maven build. + + * LICENSE: + * license/LICENSE.mvn-wrapper.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/takari/maven-wrapper + +This product contains the dnsinfo.h header file, that provides a way to retrieve the system DNS configuration on MacOS. +This private header is also used by Apple's open source + mDNSResponder (https://opensource.apple.com/tarballs/mDNSResponder/). + + * LICENSE: + * license/LICENSE.dnsinfo.txt (Apple Public Source License 2.0) + * HOMEPAGE: + * https://www.opensource.apple.com/source/configd/configd-453.19/dnsinfo/dnsinfo.h + +This product optionally depends on 'Brotli4j', Brotli compression and +decompression for Java., which can be obtained at: + + * LICENSE: + * license/LICENSE.brotli4j.txt (Apache License 2.0) + * HOMEPAGE: + * https://github.com/hyperxpro/Brotli4j diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index fa321503e02..e651ee1cf75 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -128,6 +128,19 @@ 4) assembly plugin re-creates the shaded jar by packing target/classes + manifest + shaded pom --> + + + src/main/resources + + + ${project.basedir}/.. + + LICENSE + NOTICE_binary.txt + + META-INF + + maven-shade-plugin diff --git a/core/pom.xml b/core/pom.xml index f4c7e2a5547..a1858c3afb0 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -208,6 +208,14 @@ false + + ${project.basedir}/.. + + LICENSE + NOTICE_binary.txt + + META-INF + diff --git a/distribution/src/assembly/binary-tarball.xml b/distribution/src/assembly/binary-tarball.xml index 17364aa858a..9f44898713d 100644 --- a/distribution/src/assembly/binary-tarball.xml +++ b/distribution/src/assembly/binary-tarball.xml @@ -157,6 +157,7 @@ README* LICENSE* + NOTICE* diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index f9814b3dea4..f845bf85a07 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -99,6 +99,19 @@ + + + src/main/resources + + + ${project.basedir}/.. + + LICENSE + NOTICE_binary.txt + + META-INF + + src/test/resources diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index 3957bbe1505..8967581e05a 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -96,6 +96,19 @@ + + + src/main/resources + + + ${project.basedir}/.. + + LICENSE + NOTICE_binary.txt + + META-INF + + src/test/resources diff --git a/metrics/micrometer/pom.xml b/metrics/micrometer/pom.xml index 1c28b636b86..c202e9113d5 100644 --- a/metrics/micrometer/pom.xml +++ b/metrics/micrometer/pom.xml @@ -102,6 +102,19 @@ + + + src/main/resources + + + ${project.basedir}/../.. + + LICENSE + NOTICE_binary.txt + + META-INF + + maven-jar-plugin diff --git a/metrics/microprofile/pom.xml b/metrics/microprofile/pom.xml index 0d2d5873330..f0045c35974 100644 --- a/metrics/microprofile/pom.xml +++ b/metrics/microprofile/pom.xml @@ -107,6 +107,19 @@ + + + src/main/resources + + + ${project.basedir}/../.. + + LICENSE + NOTICE_binary.txt + + META-INF + + maven-jar-plugin diff --git a/pom.xml b/pom.xml index 71ecd2a7915..aac375139b5 100644 --- a/pom.xml +++ b/pom.xml @@ -20,6 +20,11 @@ --> 4.0.0 + + org.apache + apache + 23 + com.datastax.oss java-driver-parent 4.17.1-SNAPSHOT @@ -753,6 +758,14 @@ limitations under the License.]]> jar-no-fork + + + NOTICE.txt + + + NOTICE_binary.txt + + @@ -910,6 +923,14 @@ height="0" width="0" style="display:none;visibility:hidden"> + + org.apache.maven.plugins + maven-remote-resources-plugin + 1.7.0 + + true + + diff --git a/query-builder/pom.xml b/query-builder/pom.xml index 5ecbebf367b..d35fd834748 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -81,6 +81,19 @@ + + + src/main/resources + + + ${project.basedir}/.. + + LICENSE + NOTICE_binary.txt + + META-INF + + src/test/resources diff --git a/test-infra/pom.xml b/test-infra/pom.xml index cf1da84f7dd..3b20ad2f4f1 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -76,6 +76,19 @@ + + + src/main/resources + + + ${project.basedir}/.. + + LICENSE + NOTICE_binary.txt + + META-INF + + maven-jar-plugin From f11622308d031bf85047c4811e737aeb6ae236e9 Mon Sep 17 00:00:00 2001 From: Henry Hughes Date: Mon, 4 Dec 2023 13:01:09 -0800 Subject: [PATCH 873/979] Compliance changes for generated source and binary distributable tarballs * Source files missing from sources jars due to maven-source-plugin include rule * New submodule to generate distribution source tarball * Binary/source tarball artifacts should be prefixed with apache-cassandra * Change groupId to org.apache.cassandra * Remove javadoc jars (javadoc plugin is still used for leak detections) * Create binary versions for LICENSE and NOTICE, with licenses and entries for asm, HdrHistogram, jnr-posix, jnr-x86asm, reactive-streams, slf4j-api * Add checksums to distribution tarballs, and clean toplevel readme a little patch by Henry Hughes; reviewed by Mick Semb Wever for CASSANDRA-18969 --- .github/workflows/dep-lic-scan.yaml | 16 + LICENSE_binary | 247 ++++ README.md | 21 +- bom/pom.xml | 18 +- core-shaded/pom.xml | 45 +- core/pom.xml | 3 +- core/src/main/resources/reference.conf | 4 +- distribution-source/pom.xml | 125 ++ .../src/assembly/source-tarball.xml | 43 + distribution-tests/pom.xml | 16 +- distribution/pom.xml | 62 +- distribution/src/assembly/binary-tarball.xml | 44 +- examples/pom.xml | 4 +- integration-tests/pom.xml | 16 +- licenses/HdrHistogram.txt | 41 + licenses/asm.txt | 27 + licenses/jnr-posix.txt | 1076 +++++++++++++++++ licenses/jnr-x86asm.txt | 24 + licenses/reactive-streams.txt | 7 + licenses/slf4j-api.txt | 21 + manual/core/README.md | 2 +- manual/core/bom/README.md | 12 +- manual/core/configuration/README.md | 2 +- manual/core/integration/README.md | 18 +- manual/core/metrics/README.md | 8 +- manual/core/shaded_jar/README.md | 10 +- manual/mapper/README.md | 4 +- manual/mapper/config/README.md | 8 +- manual/mapper/config/kotlin/README.md | 2 +- manual/query_builder/README.md | 2 +- mapper-processor/pom.xml | 7 +- mapper-runtime/pom.xml | 5 +- metrics/micrometer/pom.xml | 7 +- metrics/microprofile/pom.xml | 7 +- osgi-tests/pom.xml | 12 +- pom.xml | 39 +- query-builder/pom.xml | 7 +- test-infra/pom.xml | 5 +- 38 files changed, 1789 insertions(+), 228 deletions(-) create mode 100644 LICENSE_binary create mode 100644 distribution-source/pom.xml create mode 100644 distribution-source/src/assembly/source-tarball.xml create mode 100644 licenses/HdrHistogram.txt create mode 100644 licenses/asm.txt create mode 100644 licenses/jnr-posix.txt create mode 100644 licenses/jnr-x86asm.txt create mode 100644 licenses/reactive-streams.txt create mode 100644 licenses/slf4j-api.txt diff --git a/.github/workflows/dep-lic-scan.yaml b/.github/workflows/dep-lic-scan.yaml index afb197bf137..54fabe2dc8f 100644 --- a/.github/workflows/dep-lic-scan.yaml +++ b/.github/workflows/dep-lic-scan.yaml @@ -1,3 +1,19 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# name: Dependency and License Scan on: push: diff --git a/LICENSE_binary b/LICENSE_binary new file mode 100644 index 00000000000..b59c6ec22bb --- /dev/null +++ b/LICENSE_binary @@ -0,0 +1,247 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +Apache Cassandra Java Driver bundles code and files from the following projects: + +JNR project +Copyright (C) 2008-2010 Wayne Meissner +This product includes software developed as part of the JNR project ( https://github.com/jnr/jnr-ffi )s. +see core/src/main/java/com/datastax/oss/driver/internal/core/os/CpuInfo.java + +Protocol Buffers +Copyright 2008 Google Inc. +This product includes software developed as part of the Protocol Buffers project ( https://developers.google.com/protocol-buffers/ ). +see core/src/main/java/com/datastax/oss/driver/internal/core/type/util/VIntCoding.java + +Guava +Copyright (C) 2007 The Guava Authors +This product includes software developed as part of the Guava project ( https://guava.dev ). +see core/src/main/java/com/datastax/oss/driver/internal/core/util/CountingIterator.java + +Copyright (C) 2018 Christian Stein +This product includes software developed by Christian Stein +see ci/install-jdk.sh + +This product bundles Java Native Runtime - POSIX 3.1.15, +which is available under the Eclipse Public License version 2.0. +see licenses/jnr-posix.txt + +This product bundles jnr-x86asm 1.0.2, +which is available under the MIT License. +see licenses/jnr-x86asm.txt + +This product bundles ASM 9.2: a very small and fast Java bytecode manipulation framework, +which is available under the 3-Clause BSD License. +see licenses/asm.txt + +This product bundles HdrHistogram 2.1.12: A High Dynamic Range (HDR) Histogram, +which is available under the 2-Clause BSD License. +see licenses/HdrHistogram.txt + +This product bundles The Simple Logging Facade for Java (SLF4J) API 1.7.26, +which is available under the MIT License. +see licenses/slf4j-api.txt + +This product bundles Reactive Streams 1.0.3, +which is available under the MIT License. +see licenses/reactive-streams.txt diff --git a/README.md b/README.md index b43b90b71d8..2e8fe862f49 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,7 @@ # Java Driver for Apache Cassandra® +:warning: The java-driver has recently been donated by Datastax to The Apache Software Foundation and the Apache Cassandra project. Bear with us as we move assets and coordinates. + [![Maven Central](https://maven-badges.herokuapp.com/maven-central/com.datastax.oss/java-driver-core/badge.svg)](https://maven-badges.herokuapp.com/maven-central/com.datastax.oss/java-driver-core) *If you're reading this on github.com, please note that this is the readme for the development @@ -13,8 +15,6 @@ and Cassandra Query Language (CQL) v3. [DataStax Docs]: http://docs.datastax.com/en/developer/java-driver/ [Apache Cassandra®]: http://cassandra.apache.org/ -[DataStax Enterprise]: https://www.datastax.com/products/datastax-enterprise -[DataStax Astra]: https://www.datastax.com/products/datastax-astra ## Getting the driver @@ -23,19 +23,19 @@ are multiple modules, all prefixed with `java-driver-`. ```xml - com.datastax.oss + org.apache.cassandra java-driver-core ${driver.version} - com.datastax.oss + org.apache.cassandra java-driver-query-builder ${driver.version} - com.datastax.oss + org.apache.cassandra java-driver-mapper-runtime ${driver.version} @@ -59,11 +59,6 @@ It requires Java 8 or higher. Disclaimer: Some DataStax/DataStax Enterprise products might partially work on big-endian systems, but DataStax does not officially support these systems. -## Connecting to DataStax Astra - -The driver comes with built-in support for Astra, DataStax's cloud-native Cassandra-as-a-service -offering. See the dedicated [manual page](manual/cloud/) for more details. - ## Migrating from previous versions Java Driver 4 is **not binary compatible** with previous versions. However, most of the concepts @@ -81,16 +76,12 @@ See the [Cassandra error handling done right blog](https://www.datastax.com/blog * [API docs] * Bug tracking: [JIRA] * [Mailing list] -* Twitter: [@dsJavaDriver] tweets Java Driver releases and important announcements (low frequency). - [@DataStaxEng] has more news, including other drivers, Cassandra, and DSE. * [Changelog] * [FAQ] [API docs]: https://docs.datastax.com/en/drivers/java/4.17 [JIRA]: https://datastax-oss.atlassian.net/browse/JAVA [Mailing list]: https://groups.google.com/a/lists.datastax.com/forum/#!forum/java-driver-user -[@dsJavaDriver]: https://twitter.com/dsJavaDriver -[@DataStaxEng]: https://twitter.com/datastaxeng [Changelog]: changelog/ [FAQ]: faq/ @@ -115,3 +106,5 @@ limitations under the License. Apache Cassandra, Apache, Tomcat, Lucene, Solr, Hadoop, Spark, TinkerPop, and Cassandra are trademarks of the [Apache Software Foundation](http://www.apache.org/) or its subsidiaries in Canada, the United States and/or other countries. + +Binary artifacts of this product bundle Java Native Runtime libraries, which is available under the Eclipse Public License version 2.0. \ No newline at end of file diff --git a/bom/pom.xml b/bom/pom.xml index 33c454fcf75..973171c5c5d 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -21,7 +21,7 @@ 4.0.0 - com.datastax.oss + org.apache.cassandra java-driver-parent 4.17.1-SNAPSHOT @@ -31,42 +31,42 @@ - com.datastax.oss + org.apache.cassandra java-driver-core 4.17.1-SNAPSHOT - com.datastax.oss + org.apache.cassandra java-driver-core-shaded 4.17.1-SNAPSHOT - com.datastax.oss + org.apache.cassandra java-driver-mapper-processor 4.17.1-SNAPSHOT - com.datastax.oss + org.apache.cassandra java-driver-mapper-runtime 4.17.1-SNAPSHOT - com.datastax.oss + org.apache.cassandra java-driver-query-builder 4.17.1-SNAPSHOT - com.datastax.oss + org.apache.cassandra java-driver-test-infra 4.17.1-SNAPSHOT - com.datastax.oss + org.apache.cassandra java-driver-metrics-micrometer 4.17.1-SNAPSHOT - com.datastax.oss + org.apache.cassandra java-driver-metrics-microprofile 4.17.1-SNAPSHOT diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index e651ee1cf75..55250d59d6e 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -21,7 +21,7 @@ 4.0.0 - com.datastax.oss + org.apache.cassandra java-driver-parent 4.17.1-SNAPSHOT @@ -44,7 +44,7 @@ this dependency will be removed from the final pom by the shade plugin. --> - com.datastax.oss + org.apache.cassandra java-driver-core - com.datastax.oss:java-driver-core + org.apache.cassandra:java-driver-core io.netty:* com.fasterxml.jackson.core:* @@ -183,7 +184,7 @@ - com.datastax.oss:* + org.apache.cassandra:* META-INF/MANIFEST.MF @@ -219,7 +220,7 @@ - com.datastax.oss + org.apache.cassandra java-driver-core-shaded jar ${project.build.outputDirectory} @@ -237,7 +238,7 @@ - com.datastax.oss + org.apache.cassandra java-driver-core-shaded jar sources @@ -273,38 +274,6 @@ - - maven-javadoc-plugin - - - attach-javadocs - - jar - - - ${project.build.directory}/shaded-sources - com.datastax.*.driver.internal*,com.datastax.oss.driver.shaded* - - - - org.jctools - jctools-core - 2.1.2 - - - com.esri.geometry - esri-geometry-api - 1.2.1 - - - - - - org.apache.felix maven-bundle-plugin diff --git a/core/pom.xml b/core/pom.xml index a1858c3afb0..ebe7a286b21 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ 4.0.0 - com.datastax.oss + org.apache.cassandra java-driver-parent 4.17.1-SNAPSHOT @@ -213,6 +213,7 @@ LICENSE NOTICE_binary.txt + NOTICE.txt META-INF diff --git a/core/src/main/resources/reference.conf b/core/src/main/resources/reference.conf index 9e4fb9c7948..75bed97e498 100644 --- a/core/src/main/resources/reference.conf +++ b/core/src/main/resources/reference.conf @@ -1370,8 +1370,8 @@ datastax-java-driver { # To select Micrometer, set the value to "MicrometerMetricsFactory", and to select # MicroProfile Metrics, set the value to "MicroProfileMetricsFactory". For these libraries to # be used, you will also need to add an additional dependency: - # - Micrometer: com.datastax.oss:java-driver-metrics-micrometer - # - MicroProfile: com.datastax.oss:java-driver-metrics-microprofile + # - Micrometer: org.apache.cassandra:java-driver-metrics-micrometer + # - MicroProfile: org.apache.cassandra:java-driver-metrics-microprofile # # If you would like to use another metrics library, set this value to the fully-qualified name # of a class that implements com.datastax.oss.driver.internal.core.metrics.MetricsFactory. diff --git a/distribution-source/pom.xml b/distribution-source/pom.xml new file mode 100644 index 00000000000..d4db09c7091 --- /dev/null +++ b/distribution-source/pom.xml @@ -0,0 +1,125 @@ + + + + 4.0.0 + + org.apache.cassandra + java-driver-parent + 4.17.1-SNAPSHOT + + java-driver-distribution-source + pom + Apache Cassandra Java Driver - source distribution + + apache-cassandra-java-driver-${project.version}-source + + + maven-jar-plugin + + + + default-jar + none + + + + + maven-source-plugin + + true + + + + maven-install-plugin + + true + + + + maven-deploy-plugin + + true + + + + org.revapi + revapi-maven-plugin + + true + + + + org.sonatype.plugins + nexus-staging-maven-plugin + + true + + + + + + + release + + + + maven-assembly-plugin + + + assemble-source-tarball + package + + single + + + + + false + + src/assembly/source-tarball.xml + + posix + + + + net.nicoulaj.maven.plugins + checksum-maven-plugin + 1.7 + + + + artifacts + + + + + true + + sha256 + sha512 + + + + + + + + diff --git a/distribution-source/src/assembly/source-tarball.xml b/distribution-source/src/assembly/source-tarball.xml new file mode 100644 index 00000000000..b3e2d0f463a --- /dev/null +++ b/distribution-source/src/assembly/source-tarball.xml @@ -0,0 +1,43 @@ + + + + source-tarball + + tar.gz + + + + .. + . + true + + + **/*.iml + **/.classpath + **/.project + **/.java-version + **/.flattened-pom.xml + **/dependency-reduced-pom.xml + **/${project.build.directory}/** + + + + diff --git a/distribution-tests/pom.xml b/distribution-tests/pom.xml index ba1bae8511b..fd5378afc25 100644 --- a/distribution-tests/pom.xml +++ b/distribution-tests/pom.xml @@ -21,7 +21,7 @@ 4.0.0 - com.datastax.oss + org.apache.cassandra java-driver-parent 4.17.1-SNAPSHOT @@ -40,37 +40,37 @@ - com.datastax.oss + org.apache.cassandra java-driver-test-infra test - com.datastax.oss + org.apache.cassandra java-driver-query-builder test - com.datastax.oss + org.apache.cassandra java-driver-mapper-processor test - com.datastax.oss + org.apache.cassandra java-driver-mapper-runtime test - com.datastax.oss + org.apache.cassandra java-driver-core test - com.datastax.oss + org.apache.cassandra java-driver-metrics-micrometer test - com.datastax.oss + org.apache.cassandra java-driver-metrics-microprofile test diff --git a/distribution/pom.xml b/distribution/pom.xml index 0a1d3d71e65..706157d9a98 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -21,7 +21,7 @@ 4.0.0 - com.datastax.oss + org.apache.cassandra java-driver-parent 4.17.1-SNAPSHOT @@ -67,7 +67,7 @@ - datastax-java-driver-${project.version} + apache-cassandra-java-driver-${project.version} maven-jar-plugin @@ -118,45 +118,6 @@ release - - maven-javadoc-plugin - - - attach-javadocs - package - - jar - - - true - Java Driver for Apache Cassandra® ${project.version} API - Apache Cassandra Java Driver ${project.version} API - - - org.lz4 - lz4-java - ${lz4.version} - - - org.xerial.snappy - snappy-java - ${snappy.version} - - - org.apache.tinkerpop - gremlin-core - ${tinkerpop.version} - - - org.apache.tinkerpop - tinkergraph-gremlin - ${tinkerpop.version} - - - - - - maven-assembly-plugin @@ -176,6 +137,25 @@ posix + + net.nicoulaj.maven.plugins + checksum-maven-plugin + 1.7 + + + + artifacts + + + + + true + + sha256 + sha512 + + + diff --git a/distribution/src/assembly/binary-tarball.xml b/distribution/src/assembly/binary-tarball.xml index 9f44898713d..0d025fafb2c 100644 --- a/distribution/src/assembly/binary-tarball.xml +++ b/distribution/src/assembly/binary-tarball.xml @@ -29,7 +29,7 @@ true - com.datastax.oss:java-driver-core + org.apache.cassandra:java-driver-core lib/core @@ -42,9 +42,9 @@ For some reason, we need to exclude all other modules here, even though our moduleSet targets core only --> - com.datastax.oss:java-driver-query-builder - com.datastax.oss:java-driver-mapper-runtime - com.datastax.oss:java-driver-mapper-processor + org.apache.cassandra:java-driver-query-builder + org.apache.cassandra:java-driver-mapper-runtime + org.apache.cassandra:java-driver-mapper-processor true @@ -55,7 +55,7 @@ true - com.datastax.oss:java-driver-query-builder + org.apache.cassandra:java-driver-query-builder lib/query-builder @@ -63,9 +63,9 @@ - com.datastax.oss:java-driver-core - com.datastax.oss:java-driver-mapper-runtime - com.datastax.oss:java-driver-mapper-processor + org.apache.cassandra:java-driver-core + org.apache.cassandra:java-driver-mapper-runtime + org.apache.cassandra:java-driver-mapper-processor com.datastax.oss:java-driver-shaded-guava com.github.stephenc.jcip:jcip-annotations @@ -80,7 +80,7 @@ true - com.datastax.oss:java-driver-mapper-runtime + org.apache.cassandra:java-driver-mapper-runtime lib/mapper-runtime @@ -88,9 +88,9 @@ - com.datastax.oss:java-driver-core - com.datastax.oss:java-driver-query-builder - com.datastax.oss:java-driver-mapper-processor + org.apache.cassandra:java-driver-core + org.apache.cassandra:java-driver-query-builder + org.apache.cassandra:java-driver-mapper-processor com.datastax.oss:java-driver-shaded-guava com.github.stephenc.jcip:jcip-annotations @@ -105,7 +105,7 @@ true - com.datastax.oss:java-driver-mapper-processor + org.apache.cassandra:java-driver-mapper-processor lib/mapper-processor @@ -113,9 +113,9 @@ - com.datastax.oss:java-driver-core - com.datastax.oss:java-driver-query-builder - com.datastax.oss:java-driver-mapper-runtime + org.apache.cassandra:java-driver-core + org.apache.cassandra:java-driver-query-builder + org.apache.cassandra:java-driver-mapper-runtime com.datastax.oss:java-driver-shaded-guava com.github.stephenc.jcip:jcip-annotations @@ -130,10 +130,10 @@ true - com.datastax.oss:java-driver-core - com.datastax.oss:java-driver-query-builder - com.datastax.oss:java-driver-mapper-runtime - com.datastax.oss:java-driver-mapper-processor + org.apache.cassandra:java-driver-core + org.apache.cassandra:java-driver-query-builder + org.apache.cassandra:java-driver-mapper-runtime + org.apache.cassandra:java-driver-mapper-processor false @@ -156,8 +156,8 @@ . README* - LICENSE* - NOTICE* + LICENSE_binary + NOTICE_binary.txt diff --git a/examples/pom.xml b/examples/pom.xml index a597f634d9a..c971c0355ae 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -22,7 +22,7 @@ 4.0.0 java-driver-parent - com.datastax.oss + org.apache.cassandra 4.17.1-SNAPSHOT java-driver-examples @@ -157,7 +157,7 @@ 1.8 - com.datastax.oss + org.apache.cassandra java-driver-mapper-processor ${project.version} diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index db77efb5166..5e12c2f9ae2 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -21,7 +21,7 @@ 4.0.0 - com.datastax.oss + org.apache.cassandra java-driver-parent 4.17.1-SNAPSHOT @@ -47,39 +47,39 @@ - com.datastax.oss + org.apache.cassandra java-driver-test-infra test - com.datastax.oss + org.apache.cassandra java-driver-query-builder test - com.datastax.oss + org.apache.cassandra java-driver-mapper-processor test true - com.datastax.oss + org.apache.cassandra java-driver-mapper-runtime test - com.datastax.oss + org.apache.cassandra java-driver-core test-jar test - com.datastax.oss + org.apache.cassandra java-driver-metrics-micrometer test - com.datastax.oss + org.apache.cassandra java-driver-metrics-microprofile test diff --git a/licenses/HdrHistogram.txt b/licenses/HdrHistogram.txt new file mode 100644 index 00000000000..401ccfb0ec5 --- /dev/null +++ b/licenses/HdrHistogram.txt @@ -0,0 +1,41 @@ +The code in this repository code was Written by Gil Tene, Michael Barker, +and Matt Warren, and released to the public domain, as explained at +http://creativecommons.org/publicdomain/zero/1.0/ + +For users of this code who wish to consume it under the "BSD" license +rather than under the public domain or CC0 contribution text mentioned +above, the code found under this directory is *also* provided under the +following license (commonly referred to as the BSD 2-Clause License). This +license does not detract from the above stated release of the code into +the public domain, and simply represents an additional license granted by +the Author. + +----------------------------------------------------------------------------- +** Beginning of "BSD 2-Clause License" text. ** + + Copyright (c) 2012, 2013, 2014, 2015, 2016 Gil Tene + Copyright (c) 2014 Michael Barker + Copyright (c) 2014 Matt Warren + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF + THE POSSIBILITY OF SUCH DAMAGE. diff --git a/licenses/asm.txt b/licenses/asm.txt new file mode 100644 index 00000000000..c71bb7bac5d --- /dev/null +++ b/licenses/asm.txt @@ -0,0 +1,27 @@ +ASM: a very small and fast Java bytecode manipulation framework +Copyright (c) 2000-2011 INRIA, France Telecom +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. +3. Neither the name of the copyright holders nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF +THE POSSIBILITY OF SUCH DAMAGE. diff --git a/licenses/jnr-posix.txt b/licenses/jnr-posix.txt new file mode 100644 index 00000000000..4dc4217a306 --- /dev/null +++ b/licenses/jnr-posix.txt @@ -0,0 +1,1076 @@ +jnr-posix is released under a tri EPL/GPL/LGPL license. You can use it, +redistribute it and/or modify it under the terms of the: + + Eclipse Public License version 2.0 + OR + GNU General Public License version 2 + OR + GNU Lesser General Public License version 2.1 + +The complete text of the Eclipse Public License is as follows: + + Eclipse Public License - v 2.0 + + THE ACCOMPANYING PROGRAM IS PROVIDED UNDER THE TERMS OF THIS ECLIPSE + PUBLIC LICENSE ("AGREEMENT"). ANY USE, REPRODUCTION OR DISTRIBUTION + OF THE PROGRAM CONSTITUTES RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT. + + 1. DEFINITIONS + + "Contribution" means: + + a) in the case of the initial Contributor, the initial content + Distributed under this Agreement, and + + b) in the case of each subsequent Contributor: + i) changes to the Program, and + ii) additions to the Program; + where such changes and/or additions to the Program originate from + and are Distributed by that particular Contributor. A Contribution + "originates" from a Contributor if it was added to the Program by + such Contributor itself or anyone acting on such Contributor's behalf. + Contributions do not include changes or additions to the Program that + are not Modified Works. + + "Contributor" means any person or entity that Distributes the Program. + + "Licensed Patents" mean patent claims licensable by a Contributor which + are necessarily infringed by the use or sale of its Contribution alone + or when combined with the Program. + + "Program" means the Contributions Distributed in accordance with this + Agreement. + + "Recipient" means anyone who receives the Program under this Agreement + or any Secondary License (as applicable), including Contributors. + + "Derivative Works" shall mean any work, whether in Source Code or other + form, that is based on (or derived from) the Program and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. + + "Modified Works" shall mean any work in Source Code or other form that + results from an addition to, deletion from, or modification of the + contents of the Program, including, for purposes of clarity any new file + in Source Code form that contains any contents of the Program. Modified + Works shall not include works that contain only declarations, + interfaces, types, classes, structures, or files of the Program solely + in each case in order to link to, bind by name, or subclass the Program + or Modified Works thereof. + + "Distribute" means the acts of a) distributing or b) making available + in any manner that enables the transfer of a copy. + + "Source Code" means the form of a Program preferred for making + modifications, including but not limited to software source code, + documentation source, and configuration files. + + "Secondary License" means either the GNU General Public License, + Version 2.0, or any later versions of that license, including any + exceptions or additional permissions as identified by the initial + Contributor. + + 2. GRANT OF RIGHTS + + a) Subject to the terms of this Agreement, each Contributor hereby + grants Recipient a non-exclusive, worldwide, royalty-free copyright + license to reproduce, prepare Derivative Works of, publicly display, + publicly perform, Distribute and sublicense the Contribution of such + Contributor, if any, and such Derivative Works. + + b) Subject to the terms of this Agreement, each Contributor hereby + grants Recipient a non-exclusive, worldwide, royalty-free patent + license under Licensed Patents to make, use, sell, offer to sell, + import and otherwise transfer the Contribution of such Contributor, + if any, in Source Code or other form. This patent license shall + apply to the combination of the Contribution and the Program if, at + the time the Contribution is added by the Contributor, such addition + of the Contribution causes such combination to be covered by the + Licensed Patents. The patent license shall not apply to any other + combinations which include the Contribution. No hardware per se is + licensed hereunder. + + c) Recipient understands that although each Contributor grants the + licenses to its Contributions set forth herein, no assurances are + provided by any Contributor that the Program does not infringe the + patent or other intellectual property rights of any other entity. + Each Contributor disclaims any liability to Recipient for claims + brought by any other entity based on infringement of intellectual + property rights or otherwise. As a condition to exercising the + rights and licenses granted hereunder, each Recipient hereby + assumes sole responsibility to secure any other intellectual + property rights needed, if any. For example, if a third party + patent license is required to allow Recipient to Distribute the + Program, it is Recipient's responsibility to acquire that license + before distributing the Program. + + d) Each Contributor represents that to its knowledge it has + sufficient copyright rights in its Contribution, if any, to grant + the copyright license set forth in this Agreement. + + e) Notwithstanding the terms of any Secondary License, no + Contributor makes additional grants to any Recipient (other than + those set forth in this Agreement) as a result of such Recipient's + receipt of the Program under the terms of a Secondary License + (if permitted under the terms of Section 3). + + 3. REQUIREMENTS + + 3.1 If a Contributor Distributes the Program in any form, then: + + a) the Program must also be made available as Source Code, in + accordance with section 3.2, and the Contributor must accompany + the Program with a statement that the Source Code for the Program + is available under this Agreement, and informs Recipients how to + obtain it in a reasonable manner on or through a medium customarily + used for software exchange; and + + b) the Contributor may Distribute the Program under a license + different than this Agreement, provided that such license: + i) effectively disclaims on behalf of all other Contributors all + warranties and conditions, express and implied, including + warranties or conditions of title and non-infringement, and + implied warranties or conditions of merchantability and fitness + for a particular purpose; + + ii) effectively excludes on behalf of all other Contributors all + liability for damages, including direct, indirect, special, + incidental and consequential damages, such as lost profits; + + iii) does not attempt to limit or alter the recipients' rights + in the Source Code under section 3.2; and + + iv) requires any subsequent distribution of the Program by any + party to be under a license that satisfies the requirements + of this section 3. + + 3.2 When the Program is Distributed as Source Code: + + a) it must be made available under this Agreement, or if the + Program (i) is combined with other material in a separate file or + files made available under a Secondary License, and (ii) the initial + Contributor attached to the Source Code the notice described in + Exhibit A of this Agreement, then the Program may be made available + under the terms of such Secondary Licenses, and + + b) a copy of this Agreement must be included with each copy of + the Program. + + 3.3 Contributors may not remove or alter any copyright, patent, + trademark, attribution notices, disclaimers of warranty, or limitations + of liability ("notices") contained within the Program from any copy of + the Program which they Distribute, provided that Contributors may add + their own appropriate notices. + + 4. COMMERCIAL DISTRIBUTION + + Commercial distributors of software may accept certain responsibilities + with respect to end users, business partners and the like. While this + license is intended to facilitate the commercial use of the Program, + the Contributor who includes the Program in a commercial product + offering should do so in a manner which does not create potential + liability for other Contributors. Therefore, if a Contributor includes + the Program in a commercial product offering, such Contributor + ("Commercial Contributor") hereby agrees to defend and indemnify every + other Contributor ("Indemnified Contributor") against any losses, + damages and costs (collectively "Losses") arising from claims, lawsuits + and other legal actions brought by a third party against the Indemnified + Contributor to the extent caused by the acts or omissions of such + Commercial Contributor in connection with its distribution of the Program + in a commercial product offering. The obligations in this section do not + apply to any claims or Losses relating to any actual or alleged + intellectual property infringement. In order to qualify, an Indemnified + Contributor must: a) promptly notify the Commercial Contributor in + writing of such claim, and b) allow the Commercial Contributor to control, + and cooperate with the Commercial Contributor in, the defense and any + related settlement negotiations. The Indemnified Contributor may + participate in any such claim at its own expense. + + For example, a Contributor might include the Program in a commercial + product offering, Product X. That Contributor is then a Commercial + Contributor. If that Commercial Contributor then makes performance + claims, or offers warranties related to Product X, those performance + claims and warranties are such Commercial Contributor's responsibility + alone. Under this section, the Commercial Contributor would have to + defend claims against the other Contributors related to those performance + claims and warranties, and if a court requires any other Contributor to + pay any damages as a result, the Commercial Contributor must pay + those damages. + + 5. NO WARRANTY + + EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, AND TO THE EXTENT + PERMITTED BY APPLICABLE LAW, THE PROGRAM IS PROVIDED ON AN "AS IS" + BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR + IMPLIED INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OR CONDITIONS OF + TITLE, NON-INFRINGEMENT, MERCHANTABILITY OR FITNESS FOR A PARTICULAR + PURPOSE. Each Recipient is solely responsible for determining the + appropriateness of using and distributing the Program and assumes all + risks associated with its exercise of rights under this Agreement, + including but not limited to the risks and costs of program errors, + compliance with applicable laws, damage to or loss of data, programs + or equipment, and unavailability or interruption of operations. + + 6. DISCLAIMER OF LIABILITY + + EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, AND TO THE EXTENT + PERMITTED BY APPLICABLE LAW, NEITHER RECIPIENT NOR ANY CONTRIBUTORS + SHALL HAVE ANY LIABILITY FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, + EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING WITHOUT LIMITATION LOST + PROFITS), HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + ARISING IN ANY WAY OUT OF THE USE OR DISTRIBUTION OF THE PROGRAM OR THE + EXERCISE OF ANY RIGHTS GRANTED HEREUNDER, EVEN IF ADVISED OF THE + POSSIBILITY OF SUCH DAMAGES. + + 7. GENERAL + + If any provision of this Agreement is invalid or unenforceable under + applicable law, it shall not affect the validity or enforceability of + the remainder of the terms of this Agreement, and without further + action by the parties hereto, such provision shall be reformed to the + minimum extent necessary to make such provision valid and enforceable. + + If Recipient institutes patent litigation against any entity + (including a cross-claim or counterclaim in a lawsuit) alleging that the + Program itself (excluding combinations of the Program with other software + or hardware) infringes such Recipient's patent(s), then such Recipient's + rights granted under Section 2(b) shall terminate as of the date such + litigation is filed. + + All Recipient's rights under this Agreement shall terminate if it + fails to comply with any of the material terms or conditions of this + Agreement and does not cure such failure in a reasonable period of + time after becoming aware of such noncompliance. If all Recipient's + rights under this Agreement terminate, Recipient agrees to cease use + and distribution of the Program as soon as reasonably practicable. + However, Recipient's obligations under this Agreement and any licenses + granted by Recipient relating to the Program shall continue and survive. + + Everyone is permitted to copy and distribute copies of this Agreement, + but in order to avoid inconsistency the Agreement is copyrighted and + may only be modified in the following manner. The Agreement Steward + reserves the right to publish new versions (including revisions) of + this Agreement from time to time. No one other than the Agreement + Steward has the right to modify this Agreement. The Eclipse Foundation + is the initial Agreement Steward. The Eclipse Foundation may assign the + responsibility to serve as the Agreement Steward to a suitable separate + entity. Each new version of the Agreement will be given a distinguishing + version number. The Program (including Contributions) may always be + Distributed subject to the version of the Agreement under which it was + received. In addition, after a new version of the Agreement is published, + Contributor may elect to Distribute the Program (including its + Contributions) under the new version. + + Except as expressly stated in Sections 2(a) and 2(b) above, Recipient + receives no rights or licenses to the intellectual property of any + Contributor under this Agreement, whether expressly, by implication, + estoppel or otherwise. All rights in the Program not expressly granted + under this Agreement are reserved. Nothing in this Agreement is intended + to be enforceable by any entity that is not a Contributor or Recipient. + No third-party beneficiary rights are created under this Agreement. + + Exhibit A - Form of Secondary Licenses Notice + + "This Source Code may also be made available under the following + Secondary Licenses when the conditions for such availability set forth + in the Eclipse Public License, v. 2.0 are satisfied: {name license(s), + version(s), and exceptions or additional permissions here}." + + Simply including a copy of this Agreement, including this Exhibit A + is not sufficient to license the Source Code under Secondary Licenses. + + If it is not possible or desirable to put the notice in a particular + file, then You may include the notice in a location (such as a LICENSE + file in a relevant directory) where a recipient would be likely to + look for such a notice. + + You may add additional accurate notices of copyright ownership. + +The complete text of the GNU General Public License v2 is as follows: + + GNU GENERAL PUBLIC LICENSE + Version 2, June 1991 + + Copyright (C) 1989, 1991 Free Software Foundation, Inc. + 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The licenses for most software are designed to take away your + freedom to share and change it. By contrast, the GNU General Public + License is intended to guarantee your freedom to share and change free + software--to make sure the software is free for all its users. This + General Public License applies to most of the Free Software + Foundation's software and to any other program whose authors commit to + using it. (Some other Free Software Foundation software is covered by + the GNU Library General Public License instead.) You can apply it to + your programs, too. + + When we speak of free software, we are referring to freedom, not + price. Our General Public Licenses are designed to make sure that you + have the freedom to distribute copies of free software (and charge for + this service if you wish), that you receive source code or can get it + if you want it, that you can change the software or use pieces of it + in new free programs; and that you know you can do these things. + + To protect your rights, we need to make restrictions that forbid + anyone to deny you these rights or to ask you to surrender the rights. + These restrictions translate to certain responsibilities for you if you + distribute copies of the software, or if you modify it. + + For example, if you distribute copies of such a program, whether + gratis or for a fee, you must give the recipients all the rights that + you have. You must make sure that they, too, receive or can get the + source code. And you must show them these terms so they know their + rights. + + We protect your rights with two steps: (1) copyright the software, and + (2) offer you this license which gives you legal permission to copy, + distribute and/or modify the software. + + Also, for each author's protection and ours, we want to make certain + that everyone understands that there is no warranty for this free + software. If the software is modified by someone else and passed on, we + want its recipients to know that what they have is not the original, so + that any problems introduced by others will not reflect on the original + authors' reputations. + + Finally, any free program is threatened constantly by software + patents. We wish to avoid the danger that redistributors of a free + program will individually obtain patent licenses, in effect making the + program proprietary. To prevent this, we have made it clear that any + patent must be licensed for everyone's free use or not licensed at all. + + The precise terms and conditions for copying, distribution and + modification follow. + + GNU GENERAL PUBLIC LICENSE + TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. This License applies to any program or other work which contains + a notice placed by the copyright holder saying it may be distributed + under the terms of this General Public License. The "Program", below, + refers to any such program or work, and a "work based on the Program" + means either the Program or any derivative work under copyright law: + that is to say, a work containing the Program or a portion of it, + either verbatim or with modifications and/or translated into another + language. (Hereinafter, translation is included without limitation in + the term "modification".) Each licensee is addressed as "you". + + Activities other than copying, distribution and modification are not + covered by this License; they are outside its scope. The act of + running the Program is not restricted, and the output from the Program + is covered only if its contents constitute a work based on the + Program (independent of having been made by running the Program). + Whether that is true depends on what the Program does. + + 1. You may copy and distribute verbatim copies of the Program's + source code as you receive it, in any medium, provided that you + conspicuously and appropriately publish on each copy an appropriate + copyright notice and disclaimer of warranty; keep intact all the + notices that refer to this License and to the absence of any warranty; + and give any other recipients of the Program a copy of this License + along with the Program. + + You may charge a fee for the physical act of transferring a copy, and + you may at your option offer warranty protection in exchange for a fee. + + 2. You may modify your copy or copies of the Program or any portion + of it, thus forming a work based on the Program, and copy and + distribute such modifications or work under the terms of Section 1 + above, provided that you also meet all of these conditions: + + a) You must cause the modified files to carry prominent notices + stating that you changed the files and the date of any change. + + b) You must cause any work that you distribute or publish, that in + whole or in part contains or is derived from the Program or any + part thereof, to be licensed as a whole at no charge to all third + parties under the terms of this License. + + c) If the modified program normally reads commands interactively + when run, you must cause it, when started running for such + interactive use in the most ordinary way, to print or display an + announcement including an appropriate copyright notice and a + notice that there is no warranty (or else, saying that you provide + a warranty) and that users may redistribute the program under + these conditions, and telling the user how to view a copy of this + License. (Exception: if the Program itself is interactive but + does not normally print such an announcement, your work based on + the Program is not required to print an announcement.) + + These requirements apply to the modified work as a whole. If + identifiable sections of that work are not derived from the Program, + and can be reasonably considered independent and separate works in + themselves, then this License, and its terms, do not apply to those + sections when you distribute them as separate works. But when you + distribute the same sections as part of a whole which is a work based + on the Program, the distribution of the whole must be on the terms of + this License, whose permissions for other licensees extend to the + entire whole, and thus to each and every part regardless of who wrote it. + + Thus, it is not the intent of this section to claim rights or contest + your rights to work written entirely by you; rather, the intent is to + exercise the right to control the distribution of derivative or + collective works based on the Program. + + In addition, mere aggregation of another work not based on the Program + with the Program (or with a work based on the Program) on a volume of + a storage or distribution medium does not bring the other work under + the scope of this License. + + 3. You may copy and distribute the Program (or a work based on it, + under Section 2) in object code or executable form under the terms of + Sections 1 and 2 above provided that you also do one of the following: + + a) Accompany it with the complete corresponding machine-readable + source code, which must be distributed under the terms of Sections + 1 and 2 above on a medium customarily used for software interchange; or, + + b) Accompany it with a written offer, valid for at least three + years, to give any third party, for a charge no more than your + cost of physically performing source distribution, a complete + machine-readable copy of the corresponding source code, to be + distributed under the terms of Sections 1 and 2 above on a medium + customarily used for software interchange; or, + + c) Accompany it with the information you received as to the offer + to distribute corresponding source code. (This alternative is + allowed only for noncommercial distribution and only if you + received the program in object code or executable form with such + an offer, in accord with Subsection b above.) + + The source code for a work means the preferred form of the work for + making modifications to it. For an executable work, complete source + code means all the source code for all modules it contains, plus any + associated interface definition files, plus the scripts used to + control compilation and installation of the executable. However, as a + special exception, the source code distributed need not include + anything that is normally distributed (in either source or binary + form) with the major components (compiler, kernel, and so on) of the + operating system on which the executable runs, unless that component + itself accompanies the executable. + + If distribution of executable or object code is made by offering + access to copy from a designated place, then offering equivalent + access to copy the source code from the same place counts as + distribution of the source code, even though third parties are not + compelled to copy the source along with the object code. + + 4. You may not copy, modify, sublicense, or distribute the Program + except as expressly provided under this License. Any attempt + otherwise to copy, modify, sublicense or distribute the Program is + void, and will automatically terminate your rights under this License. + However, parties who have received copies, or rights, from you under + this License will not have their licenses terminated so long as such + parties remain in full compliance. + + 5. You are not required to accept this License, since you have not + signed it. However, nothing else grants you permission to modify or + distribute the Program or its derivative works. These actions are + prohibited by law if you do not accept this License. Therefore, by + modifying or distributing the Program (or any work based on the + Program), you indicate your acceptance of this License to do so, and + all its terms and conditions for copying, distributing or modifying + the Program or works based on it. + + 6. Each time you redistribute the Program (or any work based on the + Program), the recipient automatically receives a license from the + original licensor to copy, distribute or modify the Program subject to + these terms and conditions. You may not impose any further + restrictions on the recipients' exercise of the rights granted herein. + You are not responsible for enforcing compliance by third parties to + this License. + + 7. If, as a consequence of a court judgment or allegation of patent + infringement or for any other reason (not limited to patent issues), + conditions are imposed on you (whether by court order, agreement or + otherwise) that contradict the conditions of this License, they do not + excuse you from the conditions of this License. If you cannot + distribute so as to satisfy simultaneously your obligations under this + License and any other pertinent obligations, then as a consequence you + may not distribute the Program at all. For example, if a patent + license would not permit royalty-free redistribution of the Program by + all those who receive copies directly or indirectly through you, then + the only way you could satisfy both it and this License would be to + refrain entirely from distribution of the Program. + + If any portion of this section is held invalid or unenforceable under + any particular circumstance, the balance of the section is intended to + apply and the section as a whole is intended to apply in other + circumstances. + + It is not the purpose of this section to induce you to infringe any + patents or other property right claims or to contest validity of any + such claims; this section has the sole purpose of protecting the + integrity of the free software distribution system, which is + implemented by public license practices. Many people have made + generous contributions to the wide range of software distributed + through that system in reliance on consistent application of that + system; it is up to the author/donor to decide if he or she is willing + to distribute software through any other system and a licensee cannot + impose that choice. + + This section is intended to make thoroughly clear what is believed to + be a consequence of the rest of this License. + + 8. If the distribution and/or use of the Program is restricted in + certain countries either by patents or by copyrighted interfaces, the + original copyright holder who places the Program under this License + may add an explicit geographical distribution limitation excluding + those countries, so that distribution is permitted only in or among + countries not thus excluded. In such case, this License incorporates + the limitation as if written in the body of this License. + + 9. The Free Software Foundation may publish revised and/or new versions + of the General Public License from time to time. Such new versions will + be similar in spirit to the present version, but may differ in detail to + address new problems or concerns. + + Each version is given a distinguishing version number. If the Program + specifies a version number of this License which applies to it and "any + later version", you have the option of following the terms and conditions + either of that version or of any later version published by the Free + Software Foundation. If the Program does not specify a version number of + this License, you may choose any version ever published by the Free Software + Foundation. + + 10. If you wish to incorporate parts of the Program into other free + programs whose distribution conditions are different, write to the author + to ask for permission. For software which is copyrighted by the Free + Software Foundation, write to the Free Software Foundation; we sometimes + make exceptions for this. Our decision will be guided by the two goals + of preserving the free status of all derivatives of our free software and + of promoting the sharing and reuse of software generally. + + NO WARRANTY + + 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY + FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN + OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES + PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED + OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF + MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS + TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE + PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, + REPAIR OR CORRECTION. + + 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING + WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR + REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, + INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING + OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED + TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY + YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER + PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE + POSSIBILITY OF SUCH DAMAGES. + + END OF TERMS AND CONDITIONS + +The complete text of the GNU Lesser General Public License 2.1 is as follows: + + GNU LESSER GENERAL PUBLIC LICENSE + Version 2.1, February 1999 + + Copyright (C) 1991, 1999 Free Software Foundation, Inc. + 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + [This is the first released version of the Lesser GPL. It also counts + as the successor of the GNU Library Public License, version 2, hence + the version number 2.1.] + + Preamble + + The licenses for most software are designed to take away your + freedom to share and change it. By contrast, the GNU General Public + Licenses are intended to guarantee your freedom to share and change + free software--to make sure the software is free for all its users. + + This license, the Lesser General Public License, applies to some + specially designated software packages--typically libraries--of the + Free Software Foundation and other authors who decide to use it. You + can use it too, but we suggest you first think carefully about whether + this license or the ordinary General Public License is the better + strategy to use in any particular case, based on the explanations below. + + When we speak of free software, we are referring to freedom of use, + not price. Our General Public Licenses are designed to make sure that + you have the freedom to distribute copies of free software (and charge + for this service if you wish); that you receive source code or can get + it if you want it; that you can change the software and use pieces of + it in new free programs; and that you are informed that you can do + these things. + + To protect your rights, we need to make restrictions that forbid + distributors to deny you these rights or to ask you to surrender these + rights. These restrictions translate to certain responsibilities for + you if you distribute copies of the library or if you modify it. + + For example, if you distribute copies of the library, whether gratis + or for a fee, you must give the recipients all the rights that we gave + you. You must make sure that they, too, receive or can get the source + code. If you link other code with the library, you must provide + complete object files to the recipients, so that they can relink them + with the library after making changes to the library and recompiling + it. And you must show them these terms so they know their rights. + + We protect your rights with a two-step method: (1) we copyright the + library, and (2) we offer you this license, which gives you legal + permission to copy, distribute and/or modify the library. + + To protect each distributor, we want to make it very clear that + there is no warranty for the free library. Also, if the library is + modified by someone else and passed on, the recipients should know + that what they have is not the original version, so that the original + author's reputation will not be affected by problems that might be + introduced by others. + + Finally, software patents pose a constant threat to the existence of + any free program. We wish to make sure that a company cannot + effectively restrict the users of a free program by obtaining a + restrictive license from a patent holder. Therefore, we insist that + any patent license obtained for a version of the library must be + consistent with the full freedom of use specified in this license. + + Most GNU software, including some libraries, is covered by the + ordinary GNU General Public License. This license, the GNU Lesser + General Public License, applies to certain designated libraries, and + is quite different from the ordinary General Public License. We use + this license for certain libraries in order to permit linking those + libraries into non-free programs. + + When a program is linked with a library, whether statically or using + a shared library, the combination of the two is legally speaking a + combined work, a derivative of the original library. The ordinary + General Public License therefore permits such linking only if the + entire combination fits its criteria of freedom. The Lesser General + Public License permits more lax criteria for linking other code with + the library. + + We call this license the "Lesser" General Public License because it + does Less to protect the user's freedom than the ordinary General + Public License. It also provides other free software developers Less + of an advantage over competing non-free programs. These disadvantages + are the reason we use the ordinary General Public License for many + libraries. However, the Lesser license provides advantages in certain + special circumstances. + + For example, on rare occasions, there may be a special need to + encourage the widest possible use of a certain library, so that it becomes + a de-facto standard. To achieve this, non-free programs must be + allowed to use the library. A more frequent case is that a free + library does the same job as widely used non-free libraries. In this + case, there is little to gain by limiting the free library to free + software only, so we use the Lesser General Public License. + + In other cases, permission to use a particular library in non-free + programs enables a greater number of people to use a large body of + free software. For example, permission to use the GNU C Library in + non-free programs enables many more people to use the whole GNU + operating system, as well as its variant, the GNU/Linux operating + system. + + Although the Lesser General Public License is Less protective of the + users' freedom, it does ensure that the user of a program that is + linked with the Library has the freedom and the wherewithal to run + that program using a modified version of the Library. + + The precise terms and conditions for copying, distribution and + modification follow. Pay close attention to the difference between a + "work based on the library" and a "work that uses the library". The + former contains code derived from the library, whereas the latter must + be combined with the library in order to run. + + GNU LESSER GENERAL PUBLIC LICENSE + TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. This License Agreement applies to any software library or other + program which contains a notice placed by the copyright holder or + other authorized party saying it may be distributed under the terms of + this Lesser General Public License (also called "this License"). + Each licensee is addressed as "you". + + A "library" means a collection of software functions and/or data + prepared so as to be conveniently linked with application programs + (which use some of those functions and data) to form executables. + + The "Library", below, refers to any such software library or work + which has been distributed under these terms. A "work based on the + Library" means either the Library or any derivative work under + copyright law: that is to say, a work containing the Library or a + portion of it, either verbatim or with modifications and/or translated + straightforwardly into another language. (Hereinafter, translation is + included without limitation in the term "modification".) + + "Source code" for a work means the preferred form of the work for + making modifications to it. For a library, complete source code means + all the source code for all modules it contains, plus any associated + interface definition files, plus the scripts used to control compilation + and installation of the library. + + Activities other than copying, distribution and modification are not + covered by this License; they are outside its scope. The act of + running a program using the Library is not restricted, and output from + such a program is covered only if its contents constitute a work based + on the Library (independent of the use of the Library in a tool for + writing it). Whether that is true depends on what the Library does + and what the program that uses the Library does. + + 1. You may copy and distribute verbatim copies of the Library's + complete source code as you receive it, in any medium, provided that + you conspicuously and appropriately publish on each copy an + appropriate copyright notice and disclaimer of warranty; keep intact + all the notices that refer to this License and to the absence of any + warranty; and distribute a copy of this License along with the + Library. + + You may charge a fee for the physical act of transferring a copy, + and you may at your option offer warranty protection in exchange for a + fee. + + 2. You may modify your copy or copies of the Library or any portion + of it, thus forming a work based on the Library, and copy and + distribute such modifications or work under the terms of Section 1 + above, provided that you also meet all of these conditions: + + a) The modified work must itself be a software library. + + b) You must cause the files modified to carry prominent notices + stating that you changed the files and the date of any change. + + c) You must cause the whole of the work to be licensed at no + charge to all third parties under the terms of this License. + + d) If a facility in the modified Library refers to a function or a + table of data to be supplied by an application program that uses + the facility, other than as an argument passed when the facility + is invoked, then you must make a good faith effort to ensure that, + in the event an application does not supply such function or + table, the facility still operates, and performs whatever part of + its purpose remains meaningful. + + (For example, a function in a library to compute square roots has + a purpose that is entirely well-defined independent of the + application. Therefore, Subsection 2d requires that any + application-supplied function or table used by this function must + be optional: if the application does not supply it, the square + root function must still compute square roots.) + + These requirements apply to the modified work as a whole. If + identifiable sections of that work are not derived from the Library, + and can be reasonably considered independent and separate works in + themselves, then this License, and its terms, do not apply to those + sections when you distribute them as separate works. But when you + distribute the same sections as part of a whole which is a work based + on the Library, the distribution of the whole must be on the terms of + this License, whose permissions for other licensees extend to the + entire whole, and thus to each and every part regardless of who wrote + it. + + Thus, it is not the intent of this section to claim rights or contest + your rights to work written entirely by you; rather, the intent is to + exercise the right to control the distribution of derivative or + collective works based on the Library. + + In addition, mere aggregation of another work not based on the Library + with the Library (or with a work based on the Library) on a volume of + a storage or distribution medium does not bring the other work under + the scope of this License. + + 3. You may opt to apply the terms of the ordinary GNU General Public + License instead of this License to a given copy of the Library. To do + this, you must alter all the notices that refer to this License, so + that they refer to the ordinary GNU General Public License, version 2, + instead of to this License. (If a newer version than version 2 of the + ordinary GNU General Public License has appeared, then you can specify + that version instead if you wish.) Do not make any other change in + these notices. + + Once this change is made in a given copy, it is irreversible for + that copy, so the ordinary GNU General Public License applies to all + subsequent copies and derivative works made from that copy. + + This option is useful when you wish to copy part of the code of + the Library into a program that is not a library. + + 4. You may copy and distribute the Library (or a portion or + derivative of it, under Section 2) in object code or executable form + under the terms of Sections 1 and 2 above provided that you accompany + it with the complete corresponding machine-readable source code, which + must be distributed under the terms of Sections 1 and 2 above on a + medium customarily used for software interchange. + + If distribution of object code is made by offering access to copy + from a designated place, then offering equivalent access to copy the + source code from the same place satisfies the requirement to + distribute the source code, even though third parties are not + compelled to copy the source along with the object code. + + 5. A program that contains no derivative of any portion of the + Library, but is designed to work with the Library by being compiled or + linked with it, is called a "work that uses the Library". Such a + work, in isolation, is not a derivative work of the Library, and + therefore falls outside the scope of this License. + + However, linking a "work that uses the Library" with the Library + creates an executable that is a derivative of the Library (because it + contains portions of the Library), rather than a "work that uses the + library". The executable is therefore covered by this License. + Section 6 states terms for distribution of such executables. + + When a "work that uses the Library" uses material from a header file + that is part of the Library, the object code for the work may be a + derivative work of the Library even though the source code is not. + Whether this is true is especially significant if the work can be + linked without the Library, or if the work is itself a library. The + threshold for this to be true is not precisely defined by law. + + If such an object file uses only numerical parameters, data + structure layouts and accessors, and small macros and small inline + functions (ten lines or less in length), then the use of the object + file is unrestricted, regardless of whether it is legally a derivative + work. (Executables containing this object code plus portions of the + Library will still fall under Section 6.) + + Otherwise, if the work is a derivative of the Library, you may + distribute the object code for the work under the terms of Section 6. + Any executables containing that work also fall under Section 6, + whether or not they are linked directly with the Library itself. + + 6. As an exception to the Sections above, you may also combine or + link a "work that uses the Library" with the Library to produce a + work containing portions of the Library, and distribute that work + under terms of your choice, provided that the terms permit + modification of the work for the customer's own use and reverse + engineering for debugging such modifications. + + You must give prominent notice with each copy of the work that the + Library is used in it and that the Library and its use are covered by + this License. You must supply a copy of this License. If the work + during execution displays copyright notices, you must include the + copyright notice for the Library among them, as well as a reference + directing the user to the copy of this License. Also, you must do one + of these things: + + a) Accompany the work with the complete corresponding + machine-readable source code for the Library including whatever + changes were used in the work (which must be distributed under + Sections 1 and 2 above); and, if the work is an executable linked + with the Library, with the complete machine-readable "work that + uses the Library", as object code and/or source code, so that the + user can modify the Library and then relink to produce a modified + executable containing the modified Library. (It is understood + that the user who changes the contents of definitions files in the + Library will not necessarily be able to recompile the application + to use the modified definitions.) + + b) Use a suitable shared library mechanism for linking with the + Library. A suitable mechanism is one that (1) uses at run time a + copy of the library already present on the user's computer system, + rather than copying library functions into the executable, and (2) + will operate properly with a modified version of the library, if + the user installs one, as long as the modified version is + interface-compatible with the version that the work was made with. + + c) Accompany the work with a written offer, valid for at + least three years, to give the same user the materials + specified in Subsection 6a, above, for a charge no more + than the cost of performing this distribution. + + d) If distribution of the work is made by offering access to copy + from a designated place, offer equivalent access to copy the above + specified materials from the same place. + + e) Verify that the user has already received a copy of these + materials or that you have already sent this user a copy. + + For an executable, the required form of the "work that uses the + Library" must include any data and utility programs needed for + reproducing the executable from it. However, as a special exception, + the materials to be distributed need not include anything that is + normally distributed (in either source or binary form) with the major + components (compiler, kernel, and so on) of the operating system on + which the executable runs, unless that component itself accompanies + the executable. + + It may happen that this requirement contradicts the license + restrictions of other proprietary libraries that do not normally + accompany the operating system. Such a contradiction means you cannot + use both them and the Library together in an executable that you + distribute. + + 7. You may place library facilities that are a work based on the + Library side-by-side in a single library together with other library + facilities not covered by this License, and distribute such a combined + library, provided that the separate distribution of the work based on + the Library and of the other library facilities is otherwise + permitted, and provided that you do these two things: + + a) Accompany the combined library with a copy of the same work + based on the Library, uncombined with any other library + facilities. This must be distributed under the terms of the + Sections above. + + b) Give prominent notice with the combined library of the fact + that part of it is a work based on the Library, and explaining + where to find the accompanying uncombined form of the same work. + + 8. You may not copy, modify, sublicense, link with, or distribute + the Library except as expressly provided under this License. Any + attempt otherwise to copy, modify, sublicense, link with, or + distribute the Library is void, and will automatically terminate your + rights under this License. However, parties who have received copies, + or rights, from you under this License will not have their licenses + terminated so long as such parties remain in full compliance. + + 9. You are not required to accept this License, since you have not + signed it. However, nothing else grants you permission to modify or + distribute the Library or its derivative works. These actions are + prohibited by law if you do not accept this License. Therefore, by + modifying or distributing the Library (or any work based on the + Library), you indicate your acceptance of this License to do so, and + all its terms and conditions for copying, distributing or modifying + the Library or works based on it. + + 10. Each time you redistribute the Library (or any work based on the + Library), the recipient automatically receives a license from the + original licensor to copy, distribute, link with or modify the Library + subject to these terms and conditions. You may not impose any further + restrictions on the recipients' exercise of the rights granted herein. + You are not responsible for enforcing compliance by third parties with + this License. + + 11. If, as a consequence of a court judgment or allegation of patent + infringement or for any other reason (not limited to patent issues), + conditions are imposed on you (whether by court order, agreement or + otherwise) that contradict the conditions of this License, they do not + excuse you from the conditions of this License. If you cannot + distribute so as to satisfy simultaneously your obligations under this + License and any other pertinent obligations, then as a consequence you + may not distribute the Library at all. For example, if a patent + license would not permit royalty-free redistribution of the Library by + all those who receive copies directly or indirectly through you, then + the only way you could satisfy both it and this License would be to + refrain entirely from distribution of the Library. + + If any portion of this section is held invalid or unenforceable under any + particular circumstance, the balance of the section is intended to apply, + and the section as a whole is intended to apply in other circumstances. + + It is not the purpose of this section to induce you to infringe any + patents or other property right claims or to contest validity of any + such claims; this section has the sole purpose of protecting the + integrity of the free software distribution system which is + implemented by public license practices. Many people have made + generous contributions to the wide range of software distributed + through that system in reliance on consistent application of that + system; it is up to the author/donor to decide if he or she is willing + to distribute software through any other system and a licensee cannot + impose that choice. + + This section is intended to make thoroughly clear what is believed to + be a consequence of the rest of this License. + + 12. If the distribution and/or use of the Library is restricted in + certain countries either by patents or by copyrighted interfaces, the + original copyright holder who places the Library under this License may add + an explicit geographical distribution limitation excluding those countries, + so that distribution is permitted only in or among countries not thus + excluded. In such case, this License incorporates the limitation as if + written in the body of this License. + + 13. The Free Software Foundation may publish revised and/or new + versions of the Lesser General Public License from time to time. + Such new versions will be similar in spirit to the present version, + but may differ in detail to address new problems or concerns. + + Each version is given a distinguishing version number. If the Library + specifies a version number of this License which applies to it and + "any later version", you have the option of following the terms and + conditions either of that version or of any later version published by + the Free Software Foundation. If the Library does not specify a + license version number, you may choose any version ever published by + the Free Software Foundation. + + 14. If you wish to incorporate parts of the Library into other free + programs whose distribution conditions are incompatible with these, + write to the author to ask for permission. For software which is + copyrighted by the Free Software Foundation, write to the Free + Software Foundation; we sometimes make exceptions for this. Our + decision will be guided by the two goals of preserving the free status + of all derivatives of our free software and of promoting the sharing + and reuse of software generally. + + NO WARRANTY + + 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO + WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. + EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR + OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY + KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE + LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME + THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN + WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY + AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU + FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR + CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE + LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING + RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A + FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF + SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH + DAMAGES. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Libraries + + If you develop a new library, and you want it to be of the greatest + possible use to the public, we recommend making it free software that + everyone can redistribute and change. You can do so by permitting + redistribution under these terms (or, alternatively, under the terms of the + ordinary General Public License). + + To apply these terms, attach the following notices to the library. It is + safest to attach them to the start of each source file to most effectively + convey the exclusion of warranty; and each file should have at least the + "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This library is free software; you can redistribute it and/or + modify it under the terms of the GNU Lesser General Public + License as published by the Free Software Foundation; either + version 2.1 of the License, or (at your option) any later version. + + This library is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + Lesser General Public License for more details. + + You should have received a copy of the GNU Lesser General Public + License along with this library; if not, write to the Free Software + Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA + + Also add information on how to contact you by electronic and paper mail. + + You should also get your employer (if you work as a programmer) or your + school, if any, to sign a "copyright disclaimer" for the library, if + necessary. Here is a sample; alter the names: + + Yoyodyne, Inc., hereby disclaims all copyright interest in the + library `Frob' (a library for tweaking knobs) written by James Random Hacker. + + , 1 April 1990 + Ty Coon, President of Vice + + That's all there is to it! diff --git a/licenses/jnr-x86asm.txt b/licenses/jnr-x86asm.txt new file mode 100644 index 00000000000..c9583db05fd --- /dev/null +++ b/licenses/jnr-x86asm.txt @@ -0,0 +1,24 @@ + + Copyright (C) 2010 Wayne Meissner + Copyright (c) 2008-2009, Petr Kobalicek + + Permission is hereby granted, free of charge, to any person + obtaining a copy of this software and associated documentation + files (the "Software"), to deal in the Software without + restriction, including without limitation the rights to use, + copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the + Software is furnished to do so, subject to the following + conditions: + + The above copyright notice and this permission notice shall be + included in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES + OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT + HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, + WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR + OTHER DEALINGS IN THE SOFTWARE. diff --git a/licenses/reactive-streams.txt b/licenses/reactive-streams.txt new file mode 100644 index 00000000000..1e141c13ddb --- /dev/null +++ b/licenses/reactive-streams.txt @@ -0,0 +1,7 @@ +MIT No Attribution + +Copyright 2014 Reactive Streams + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/licenses/slf4j-api.txt b/licenses/slf4j-api.txt new file mode 100644 index 00000000000..bb09a9ad4ec --- /dev/null +++ b/licenses/slf4j-api.txt @@ -0,0 +1,21 @@ +Copyright (c) 2004-2023 QOS.ch +All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/manual/core/README.md b/manual/core/README.md index a8f97cc4106..5ca4cd7872f 100644 --- a/manual/core/README.md +++ b/manual/core/README.md @@ -24,7 +24,7 @@ following coordinates: ```xml - com.datastax.oss + org.apache.cassandra java-driver-core ${driver.version} diff --git a/manual/core/bom/README.md b/manual/core/bom/README.md index b2a8f205554..235edcf632c 100644 --- a/manual/core/bom/README.md +++ b/manual/core/bom/README.md @@ -30,7 +30,7 @@ To import the driver's BOM, add the following section in your application's own - com.datastax.oss + org.apache.cassandra java-driver-bom 4.17.0 pom @@ -47,7 +47,7 @@ This allows you to omit the version when you later reference the driver artifact ... - com.datastax.oss + org.apache.cassandra java-driver-query-builder @@ -71,7 +71,7 @@ scope: ```xml - com.datastax.oss + org.apache.cassandra java-driver-mapper-processor provided @@ -89,7 +89,7 @@ good idea to extract a property to keep it in sync with the BOM: - com.datastax.oss + org.apache.cassandra java-driver-bom ${java-driver.version} pom @@ -100,7 +100,7 @@ good idea to extract a property to keep it in sync with the BOM: - com.datastax.oss + org.apache.cassandra java-driver-mapper-runtime @@ -112,7 +112,7 @@ good idea to extract a property to keep it in sync with the BOM: - com.datastax.oss + org.apache.cassandra java-driver-mapper-processor ${java-driver.version} diff --git a/manual/core/configuration/README.md b/manual/core/configuration/README.md index a30b79842bb..deefadbe3d4 100644 --- a/manual/core/configuration/README.md +++ b/manual/core/configuration/README.md @@ -376,7 +376,7 @@ using Maven, this can be achieved as follows: ```xml - com.datastax.oss + org.apache.cassandra java-driver-core ... diff --git a/manual/core/integration/README.md b/manual/core/integration/README.md index 1f102c2189e..2dfc0155c63 100644 --- a/manual/core/integration/README.md +++ b/manual/core/integration/README.md @@ -149,7 +149,7 @@ dependencies, and tell Maven that we're going to use Java 8: - com.datastax.oss + org.apache.cassandra java-driver-core ${driver.version} @@ -374,7 +374,7 @@ In that case, you can exclude the dependency: ```xml - com.datastax.oss + org.apache.cassandra java-driver-core ${driver.version} @@ -402,7 +402,7 @@ are not available on your platform, you can exclude the following dependency: ```xml - com.datastax.oss + org.apache.cassandra java-driver-core ${driver.version} @@ -433,7 +433,7 @@ your application, then you can remove the dependency: ```xml - com.datastax.oss + org.apache.cassandra java-driver-core ${driver.version} @@ -456,7 +456,7 @@ dependency: ```xml - com.datastax.oss + org.apache.cassandra java-driver-core ${driver.version} @@ -481,7 +481,7 @@ don't use any of the above features, you can safely exclude the dependency: ```xml - com.datastax.oss + org.apache.cassandra java-driver-core ${driver.version} @@ -504,7 +504,7 @@ anywhere in your application you can exclude the dependency: ```xml - com.datastax.oss + org.apache.cassandra java-driver-core ${driver.version} @@ -544,7 +544,7 @@ you can exclude the TinkerPop dependencies: ```xml - com.datastax.oss + org.apache.cassandra java-driver-core ${driver.version} @@ -608,7 +608,7 @@ without it. If you never call any of the `executeReactive` methods, you can excl ```xml - com.datastax.oss + org.apache.cassandra java-driver-core ${driver.version} diff --git a/manual/core/metrics/README.md b/manual/core/metrics/README.md index b5dda977d5c..ef5d9b453f0 100644 --- a/manual/core/metrics/README.md +++ b/manual/core/metrics/README.md @@ -56,7 +56,7 @@ module contains the actual bindings for Micrometer, and depends itself on the Mi ```xml - com.datastax.oss + org.apache.cassandra java-driver-metrics-micrometer ${driver.version} @@ -67,7 +67,7 @@ driver, because they are not relevant when using Micrometer: ```xml - com.datastax.oss + org.apache.cassandra java-driver-core @@ -100,7 +100,7 @@ library: ```xml - com.datastax.oss + org.apache.cassandra java-driver-metrics-microprofile ${driver.version} @@ -111,7 +111,7 @@ driver, because they are not relevant when using MicroProfile Metrics: ```xml - com.datastax.oss + org.apache.cassandra java-driver-core diff --git a/manual/core/shaded_jar/README.md b/manual/core/shaded_jar/README.md index a6dfac9053e..8e183c0efb5 100644 --- a/manual/core/shaded_jar/README.md +++ b/manual/core/shaded_jar/README.md @@ -29,7 +29,7 @@ dependency to `java-driver-core` by: ```xml - com.datastax.oss + org.apache.cassandra java-driver-core-shaded ${driver.version} @@ -40,18 +40,18 @@ you need to remove its dependency to the non-shaded JAR: ```xml - com.datastax.oss + org.apache.cassandra java-driver-core-shaded ${driver.version} - com.datastax.oss + org.apache.cassandra java-driver-query-builder ${driver.version} - com.datastax.oss + org.apache.cassandra java-driver-core @@ -70,7 +70,7 @@ Notes: ```xml - com.datastax.oss + org.apache.cassandra java-driver-core ${driver.version} diff --git a/manual/mapper/README.md b/manual/mapper/README.md index 2c64897243f..27005b671ad 100644 --- a/manual/mapper/README.md +++ b/manual/mapper/README.md @@ -22,8 +22,8 @@ under the License. The mapper generates the boilerplate to execute queries and convert the results into application-level objects. -It is published as two artifacts: `com.datastax.oss:java-driver-mapper-processor` and -`com.datastax.oss:java-driver-mapper-runtime`. See [Integration](config/) for detailed instructions +It is published as two artifacts: `org.apache.cassandra:java-driver-mapper-processor` and +`org.apache.cassandra:java-driver-mapper-runtime`. See [Integration](config/) for detailed instructions for different build tools. ### Quick start diff --git a/manual/mapper/config/README.md b/manual/mapper/config/README.md index 8adc0e63b33..1e4f9981306 100644 --- a/manual/mapper/config/README.md +++ b/manual/mapper/config/README.md @@ -40,7 +40,7 @@ configuration (make sure you use version 3.5 or higher): - com.datastax.oss + org.apache.cassandra java-driver-mapper-runtime ${java-driver.version} @@ -56,7 +56,7 @@ configuration (make sure you use version 3.5 or higher): 1.8 - com.datastax.oss + org.apache.cassandra java-driver-mapper-processor ${java-driver.version} @@ -80,13 +80,13 @@ as a regular dependency in the "provided" scope: ```xml - com.datastax.oss + org.apache.cassandra java-driver-mapper-processor ${java-driver.version} provided - com.datastax.oss + org.apache.cassandra java-driver-mapper-runtime ${java-driver.version} diff --git a/manual/mapper/config/kotlin/README.md b/manual/mapper/config/kotlin/README.md index 07dcf20f4bf..a78bf04fb79 100644 --- a/manual/mapper/config/kotlin/README.md +++ b/manual/mapper/config/kotlin/README.md @@ -98,7 +98,7 @@ before compilation: - com.datastax.oss + org.apache.cassandra java-driver-mapper-processor ${java-driver.version} diff --git a/manual/query_builder/README.md b/manual/query_builder/README.md index c17cd30d161..d1932b329e7 100644 --- a/manual/query_builder/README.md +++ b/manual/query_builder/README.md @@ -31,7 +31,7 @@ To use it in your application, add the following dependency: ```xml - com.datastax.oss + org.apache.cassandra java-driver-query-builder ${driver.version} diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index f845bf85a07..9c13fd8b9c8 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -21,7 +21,7 @@ 4.0.0 - com.datastax.oss + org.apache.cassandra java-driver-parent 4.17.1-SNAPSHOT @@ -40,7 +40,7 @@ - com.datastax.oss + org.apache.cassandra java-driver-mapper-runtime @@ -92,7 +92,7 @@ test - com.datastax.oss + org.apache.cassandra java-driver-core test test-jar @@ -108,6 +108,7 @@ LICENSE NOTICE_binary.txt + NOTICE.txt META-INF diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index 8967581e05a..beddd6dcaf9 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -21,7 +21,7 @@ 4.0.0 - com.datastax.oss + org.apache.cassandra java-driver-parent 4.17.1-SNAPSHOT @@ -89,7 +89,7 @@ test - com.datastax.oss + org.apache.cassandra java-driver-core test test-jar @@ -105,6 +105,7 @@ LICENSE NOTICE_binary.txt + NOTICE.txt META-INF diff --git a/metrics/micrometer/pom.xml b/metrics/micrometer/pom.xml index c202e9113d5..8796edcf149 100644 --- a/metrics/micrometer/pom.xml +++ b/metrics/micrometer/pom.xml @@ -21,7 +21,7 @@ 4.0.0 - com.datastax.oss + org.apache.cassandra java-driver-parent 4.17.1-SNAPSHOT ../../ @@ -46,7 +46,7 @@ micrometer-core - com.datastax.oss + org.apache.cassandra java-driver-core @@ -95,7 +95,7 @@ test - com.datastax.oss + org.apache.cassandra java-driver-core test test-jar @@ -111,6 +111,7 @@ LICENSE NOTICE_binary.txt + NOTICE.txt META-INF diff --git a/metrics/microprofile/pom.xml b/metrics/microprofile/pom.xml index f0045c35974..7f9bed419a7 100644 --- a/metrics/microprofile/pom.xml +++ b/metrics/microprofile/pom.xml @@ -21,7 +21,7 @@ 4.0.0 - com.datastax.oss + org.apache.cassandra java-driver-parent 4.17.1-SNAPSHOT ../../ @@ -46,7 +46,7 @@ microprofile-metrics-api - com.datastax.oss + org.apache.cassandra java-driver-core @@ -100,7 +100,7 @@ test - com.datastax.oss + org.apache.cassandra java-driver-core test test-jar @@ -116,6 +116,7 @@ LICENSE NOTICE_binary.txt + NOTICE.txt META-INF diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml index a5085050930..b9d119c1ad7 100644 --- a/osgi-tests/pom.xml +++ b/osgi-tests/pom.xml @@ -21,7 +21,7 @@ 4.0.0 - com.datastax.oss + org.apache.cassandra java-driver-parent 4.17.1-SNAPSHOT @@ -41,19 +41,19 @@ - com.datastax.oss + org.apache.cassandra java-driver-core - com.datastax.oss + org.apache.cassandra java-driver-query-builder - com.datastax.oss + org.apache.cassandra java-driver-mapper-processor - com.datastax.oss + org.apache.cassandra java-driver-mapper-runtime @@ -104,7 +104,7 @@ provided - com.datastax.oss + org.apache.cassandra java-driver-test-infra test diff --git a/pom.xml b/pom.xml index aac375139b5..3f707a93dc8 100644 --- a/pom.xml +++ b/pom.xml @@ -25,7 +25,7 @@ apache 23 - com.datastax.oss + org.apache.cassandra java-driver-parent 4.17.1-SNAPSHOT pom @@ -43,6 +43,7 @@ test-infra integration-tests osgi-tests + distribution-source distribution distribution-tests examples @@ -52,6 +53,7 @@ UTF-8 UTF-8 1.4.1 + 2.1.12 4.1.18 4.1.94.Final @@ -61,7 +63,9 @@ manual/core/integration/README.md --> 3.5.6 + 1.7.26 + 1.0.3 20230227 2.13.4 @@ -95,7 +99,7 @@ - com.datastax.oss + org.apache.cassandra java-driver-core ${project.version} test-jar @@ -139,6 +143,7 @@ com.github.jnr jnr-posix + 3.1.15 @@ -567,6 +572,10 @@ + + + com.datastax.oss:${project.artifactId}:RELEASE + @@ -759,10 +768,8 @@ limitations under the License.]]> jar-no-fork - - NOTICE.txt - + LICENSE_binary NOTICE_binary.txt @@ -793,28 +800,6 @@ limitations under the License.]]> - - attach-javadocs - - jar - - -

          - - - - -]]>
          - - --allow-script-in-comments - - - check-api-leaks diff --git a/query-builder/pom.xml b/query-builder/pom.xml index d35fd834748..9d649a30649 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -21,7 +21,7 @@ 4.0.0 - com.datastax.oss + org.apache.cassandra java-driver-parent 4.17.1-SNAPSHOT @@ -41,7 +41,7 @@ - com.datastax.oss + org.apache.cassandra java-driver-core @@ -74,7 +74,7 @@ test - com.datastax.oss + org.apache.cassandra java-driver-core test test-jar @@ -90,6 +90,7 @@ LICENSE NOTICE_binary.txt + NOTICE.txt META-INF diff --git a/test-infra/pom.xml b/test-infra/pom.xml index 3b20ad2f4f1..02354e09564 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -21,7 +21,7 @@ 4.0.0 - com.datastax.oss + org.apache.cassandra java-driver-parent 4.17.1-SNAPSHOT @@ -41,7 +41,7 @@ - com.datastax.oss + org.apache.cassandra java-driver-core ${project.parent.version} @@ -85,6 +85,7 @@ LICENSE NOTICE_binary.txt + NOTICE.txt META-INF From 105d378fce16804a8af4c26cf732340a0c63b3c9 Mon Sep 17 00:00:00 2001 From: mck Date: Thu, 7 Dec 2023 23:36:48 +0100 Subject: [PATCH 874/979] [maven-release-plugin] prepare release 4.18.0 --- bom/pom.xml | 18 +++++++++--------- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution-source/pom.xml | 2 +- distribution-tests/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- metrics/micrometer/pom.xml | 2 +- metrics/microprofile/pom.xml | 2 +- osgi-tests/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 16 files changed, 25 insertions(+), 25 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index 973171c5c5d..74189e14d65 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.17.1-SNAPSHOT + 4.18.0 java-driver-bom pom @@ -33,42 +33,42 @@ org.apache.cassandra java-driver-core - 4.17.1-SNAPSHOT + 4.18.0 org.apache.cassandra java-driver-core-shaded - 4.17.1-SNAPSHOT + 4.18.0 org.apache.cassandra java-driver-mapper-processor - 4.17.1-SNAPSHOT + 4.18.0 org.apache.cassandra java-driver-mapper-runtime - 4.17.1-SNAPSHOT + 4.18.0 org.apache.cassandra java-driver-query-builder - 4.17.1-SNAPSHOT + 4.18.0 org.apache.cassandra java-driver-test-infra - 4.17.1-SNAPSHOT + 4.18.0 org.apache.cassandra java-driver-metrics-micrometer - 4.17.1-SNAPSHOT + 4.18.0 org.apache.cassandra java-driver-metrics-microprofile - 4.17.1-SNAPSHOT + 4.18.0 com.datastax.oss diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 55250d59d6e..75858b611a8 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.17.1-SNAPSHOT + 4.18.0 java-driver-core-shaded Apache Cassandra Java Driver - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index ebe7a286b21..fc71515d61b 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.17.1-SNAPSHOT + 4.18.0 java-driver-core bundle diff --git a/distribution-source/pom.xml b/distribution-source/pom.xml index d4db09c7091..fa54a25376e 100644 --- a/distribution-source/pom.xml +++ b/distribution-source/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.17.1-SNAPSHOT + 4.18.0 java-driver-distribution-source pom diff --git a/distribution-tests/pom.xml b/distribution-tests/pom.xml index fd5378afc25..9d168679c6a 100644 --- a/distribution-tests/pom.xml +++ b/distribution-tests/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.17.1-SNAPSHOT + 4.18.0 java-driver-distribution-tests Apache Cassandra Java Driver - distribution tests diff --git a/distribution/pom.xml b/distribution/pom.xml index 706157d9a98..55b9ace5233 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.17.1-SNAPSHOT + 4.18.0 java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index c971c0355ae..e5e48d59557 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -23,7 +23,7 @@ java-driver-parent org.apache.cassandra - 4.17.1-SNAPSHOT + 4.18.0 java-driver-examples Apache Cassandra Java Driver - examples. diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 5e12c2f9ae2..356eb8d0571 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.17.1-SNAPSHOT + 4.18.0 java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 9c13fd8b9c8..f2699cb4cb4 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.17.1-SNAPSHOT + 4.18.0 java-driver-mapper-processor Apache Cassandra Java Driver - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index beddd6dcaf9..c8b035c30d0 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.17.1-SNAPSHOT + 4.18.0 java-driver-mapper-runtime bundle diff --git a/metrics/micrometer/pom.xml b/metrics/micrometer/pom.xml index 8796edcf149..56ddabd5af0 100644 --- a/metrics/micrometer/pom.xml +++ b/metrics/micrometer/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.17.1-SNAPSHOT + 4.18.0 ../../ java-driver-metrics-micrometer diff --git a/metrics/microprofile/pom.xml b/metrics/microprofile/pom.xml index 7f9bed419a7..fda46cdfac3 100644 --- a/metrics/microprofile/pom.xml +++ b/metrics/microprofile/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.17.1-SNAPSHOT + 4.18.0 ../../ java-driver-metrics-microprofile diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml index b9d119c1ad7..0906c6f72f2 100644 --- a/osgi-tests/pom.xml +++ b/osgi-tests/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.17.1-SNAPSHOT + 4.18.0 java-driver-osgi-tests jar diff --git a/pom.xml b/pom.xml index 3f707a93dc8..072749ce7e0 100644 --- a/pom.xml +++ b/pom.xml @@ -27,7 +27,7 @@ org.apache.cassandra java-driver-parent - 4.17.1-SNAPSHOT + 4.18.0 pom Apache Cassandra Java Driver https://github.com/datastax/java-driver @@ -1022,7 +1022,7 @@ limitations under the License.]]> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - HEAD + 4.18.0 diff --git a/query-builder/pom.xml b/query-builder/pom.xml index 9d649a30649..9c40ad6d277 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.17.1-SNAPSHOT + 4.18.0 java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index 02354e09564..fe7a3f35b3f 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.17.1-SNAPSHOT + 4.18.0 java-driver-test-infra bundle From 7637a5b2438c2758215e8f6f469a63780c6af75d Mon Sep 17 00:00:00 2001 From: mck Date: Thu, 7 Dec 2023 23:36:54 +0100 Subject: [PATCH 875/979] [maven-release-plugin] prepare for next development iteration --- bom/pom.xml | 18 +++++++++--------- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution-source/pom.xml | 2 +- distribution-tests/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- metrics/micrometer/pom.xml | 2 +- metrics/microprofile/pom.xml | 2 +- osgi-tests/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 16 files changed, 25 insertions(+), 25 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index 74189e14d65..72e00c48355 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.18.0 + 4.18.1-SNAPSHOT java-driver-bom pom @@ -33,42 +33,42 @@ org.apache.cassandra java-driver-core - 4.18.0 + 4.18.1-SNAPSHOT org.apache.cassandra java-driver-core-shaded - 4.18.0 + 4.18.1-SNAPSHOT org.apache.cassandra java-driver-mapper-processor - 4.18.0 + 4.18.1-SNAPSHOT org.apache.cassandra java-driver-mapper-runtime - 4.18.0 + 4.18.1-SNAPSHOT org.apache.cassandra java-driver-query-builder - 4.18.0 + 4.18.1-SNAPSHOT org.apache.cassandra java-driver-test-infra - 4.18.0 + 4.18.1-SNAPSHOT org.apache.cassandra java-driver-metrics-micrometer - 4.18.0 + 4.18.1-SNAPSHOT org.apache.cassandra java-driver-metrics-microprofile - 4.18.0 + 4.18.1-SNAPSHOT com.datastax.oss diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 75858b611a8..c2768c3a642 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.18.0 + 4.18.1-SNAPSHOT java-driver-core-shaded Apache Cassandra Java Driver - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index fc71515d61b..c54c6b8c642 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.18.0 + 4.18.1-SNAPSHOT java-driver-core bundle diff --git a/distribution-source/pom.xml b/distribution-source/pom.xml index fa54a25376e..8c4f695afdd 100644 --- a/distribution-source/pom.xml +++ b/distribution-source/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.18.0 + 4.18.1-SNAPSHOT java-driver-distribution-source pom diff --git a/distribution-tests/pom.xml b/distribution-tests/pom.xml index 9d168679c6a..099bddba900 100644 --- a/distribution-tests/pom.xml +++ b/distribution-tests/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.18.0 + 4.18.1-SNAPSHOT java-driver-distribution-tests Apache Cassandra Java Driver - distribution tests diff --git a/distribution/pom.xml b/distribution/pom.xml index 55b9ace5233..8933d3f5f3a 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.18.0 + 4.18.1-SNAPSHOT java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index e5e48d59557..7e2d7f1b6d0 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -23,7 +23,7 @@ java-driver-parent org.apache.cassandra - 4.18.0 + 4.18.1-SNAPSHOT java-driver-examples Apache Cassandra Java Driver - examples. diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 356eb8d0571..5c684e90b2a 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.18.0 + 4.18.1-SNAPSHOT java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index f2699cb4cb4..768327591d6 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.18.0 + 4.18.1-SNAPSHOT java-driver-mapper-processor Apache Cassandra Java Driver - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index c8b035c30d0..95ead75ddd8 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.18.0 + 4.18.1-SNAPSHOT java-driver-mapper-runtime bundle diff --git a/metrics/micrometer/pom.xml b/metrics/micrometer/pom.xml index 56ddabd5af0..1405ae0b6c2 100644 --- a/metrics/micrometer/pom.xml +++ b/metrics/micrometer/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.18.0 + 4.18.1-SNAPSHOT ../../ java-driver-metrics-micrometer diff --git a/metrics/microprofile/pom.xml b/metrics/microprofile/pom.xml index fda46cdfac3..6ba084396d1 100644 --- a/metrics/microprofile/pom.xml +++ b/metrics/microprofile/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.18.0 + 4.18.1-SNAPSHOT ../../ java-driver-metrics-microprofile diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml index 0906c6f72f2..859a69400b9 100644 --- a/osgi-tests/pom.xml +++ b/osgi-tests/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.18.0 + 4.18.1-SNAPSHOT java-driver-osgi-tests jar diff --git a/pom.xml b/pom.xml index 072749ce7e0..350d0518496 100644 --- a/pom.xml +++ b/pom.xml @@ -27,7 +27,7 @@ org.apache.cassandra java-driver-parent - 4.18.0 + 4.18.1-SNAPSHOT pom Apache Cassandra Java Driver https://github.com/datastax/java-driver @@ -1022,7 +1022,7 @@ limitations under the License.]]> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - 4.18.0 + HEAD diff --git a/query-builder/pom.xml b/query-builder/pom.xml index 9c40ad6d277..f1828b62462 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.18.0 + 4.18.1-SNAPSHOT java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index fe7a3f35b3f..9089d4d1019 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.18.0 + 4.18.1-SNAPSHOT java-driver-test-infra bundle From 346cab5b3e8a5f1888ba2633fa530c5934009ba0 Mon Sep 17 00:00:00 2001 From: mck Date: Fri, 8 Dec 2023 00:16:34 +0100 Subject: [PATCH 876/979] Remove distributionManagement, the apache parent defines this for us --- pom.xml | 6 ------ 1 file changed, 6 deletions(-) diff --git a/pom.xml b/pom.xml index 350d0518496..221e1f69a86 100644 --- a/pom.xml +++ b/pom.xml @@ -1004,12 +1004,6 @@ limitations under the License.]]> - - - ossrh - https://oss.sonatype.org/service/local/staging/deploy/maven2/ - - Apache 2 From 8352d4c733da70461dc5cb7763a98d4d0c960925 Mon Sep 17 00:00:00 2001 From: mck Date: Fri, 8 Dec 2023 10:08:03 +0100 Subject: [PATCH 877/979] Remove fossa dependency analysis github action ASF does not have a subscription for fossa --- .github/workflows/dep-lic-scan.yaml | 39 ----------------------------- 1 file changed, 39 deletions(-) delete mode 100644 .github/workflows/dep-lic-scan.yaml diff --git a/.github/workflows/dep-lic-scan.yaml b/.github/workflows/dep-lic-scan.yaml deleted file mode 100644 index 54fabe2dc8f..00000000000 --- a/.github/workflows/dep-lic-scan.yaml +++ /dev/null @@ -1,39 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -name: Dependency and License Scan -on: - push: - branches: - - '4.x' - - '3.x' - paths-ignore: - - 'manual/**' - - 'faq/**' - - 'upgrade_guide/**' - - 'changelog/**' -jobs: - scan-repo: - runs-on: ubuntu-latest - steps: - - name: Check out code - uses: actions/checkout@v2 - - name: Install Fossa CLI - run: | - curl -H 'Cache-Control: no-cache' https://raw.githubusercontent.com/fossas/fossa-cli/master/install-latest.sh | bash -s -- -b . - - name: Scan for dependencies and licenses - run: | - FOSSA_API_KEY=${{ secrets.FOSSA_PUSH_ONLY_API_KEY }} ./fossa analyze From 8d5849cb38995b312f29314d18256c0c3e94cf07 Mon Sep 17 00:00:00 2001 From: mck Date: Fri, 8 Dec 2023 19:48:41 +0100 Subject: [PATCH 878/979] Remove ASL header from test resource files (that was breaking integration tests) patch by Mick Semb Wever; reviewed by Wei Deng for CASSANDRA-18970 --- .../src/test/resources/DescribeIT/dse/4.8.cql | 18 ------------------ .../src/test/resources/DescribeIT/dse/5.0.cql | 18 ------------------ .../src/test/resources/DescribeIT/dse/5.1.cql | 18 ------------------ .../src/test/resources/DescribeIT/dse/6.8.cql | 18 ------------------ .../src/test/resources/DescribeIT/oss/2.1.cql | 18 ------------------ .../src/test/resources/DescribeIT/oss/2.2.cql | 18 ------------------ .../src/test/resources/DescribeIT/oss/3.0.cql | 18 ------------------ .../src/test/resources/DescribeIT/oss/3.11.cql | 18 ------------------ .../src/test/resources/DescribeIT/oss/4.0.cql | 18 ------------------ 9 files changed, 162 deletions(-) diff --git a/integration-tests/src/test/resources/DescribeIT/dse/4.8.cql b/integration-tests/src/test/resources/DescribeIT/dse/4.8.cql index 35eee187776..ea6ca93bcbf 100644 --- a/integration-tests/src/test/resources/DescribeIT/dse/4.8.cql +++ b/integration-tests/src/test/resources/DescribeIT/dse/4.8.cql @@ -1,21 +1,3 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ CREATE KEYSPACE ks_0 WITH replication = { 'class' : 'org.apache.cassandra.locator.SimpleStrategy', 'replication_factor': '1' } AND durable_writes = true; diff --git a/integration-tests/src/test/resources/DescribeIT/dse/5.0.cql b/integration-tests/src/test/resources/DescribeIT/dse/5.0.cql index 077c9dd1399..2572df52e24 100644 --- a/integration-tests/src/test/resources/DescribeIT/dse/5.0.cql +++ b/integration-tests/src/test/resources/DescribeIT/dse/5.0.cql @@ -1,21 +1,3 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ CREATE KEYSPACE ks_0 WITH replication = { 'class' : 'org.apache.cassandra.locator.SimpleStrategy', 'replication_factor': '1' } AND durable_writes = true; diff --git a/integration-tests/src/test/resources/DescribeIT/dse/5.1.cql b/integration-tests/src/test/resources/DescribeIT/dse/5.1.cql index 077c9dd1399..2572df52e24 100644 --- a/integration-tests/src/test/resources/DescribeIT/dse/5.1.cql +++ b/integration-tests/src/test/resources/DescribeIT/dse/5.1.cql @@ -1,21 +1,3 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ CREATE KEYSPACE ks_0 WITH replication = { 'class' : 'org.apache.cassandra.locator.SimpleStrategy', 'replication_factor': '1' } AND durable_writes = true; diff --git a/integration-tests/src/test/resources/DescribeIT/dse/6.8.cql b/integration-tests/src/test/resources/DescribeIT/dse/6.8.cql index 76871de4e1f..bdeb4737748 100644 --- a/integration-tests/src/test/resources/DescribeIT/dse/6.8.cql +++ b/integration-tests/src/test/resources/DescribeIT/dse/6.8.cql @@ -1,21 +1,3 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ CREATE KEYSPACE ks_0 WITH replication = { 'class' : 'org.apache.cassandra.locator.SimpleStrategy', 'replication_factor': '1' } AND durable_writes = true; diff --git a/integration-tests/src/test/resources/DescribeIT/oss/2.1.cql b/integration-tests/src/test/resources/DescribeIT/oss/2.1.cql index 35eee187776..ea6ca93bcbf 100644 --- a/integration-tests/src/test/resources/DescribeIT/oss/2.1.cql +++ b/integration-tests/src/test/resources/DescribeIT/oss/2.1.cql @@ -1,21 +1,3 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ CREATE KEYSPACE ks_0 WITH replication = { 'class' : 'org.apache.cassandra.locator.SimpleStrategy', 'replication_factor': '1' } AND durable_writes = true; diff --git a/integration-tests/src/test/resources/DescribeIT/oss/2.2.cql b/integration-tests/src/test/resources/DescribeIT/oss/2.2.cql index e35703b30cc..a4035ffa90e 100644 --- a/integration-tests/src/test/resources/DescribeIT/oss/2.2.cql +++ b/integration-tests/src/test/resources/DescribeIT/oss/2.2.cql @@ -1,21 +1,3 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ CREATE KEYSPACE ks_0 WITH replication = { 'class' : 'org.apache.cassandra.locator.SimpleStrategy', 'replication_factor': '1' } AND durable_writes = true; diff --git a/integration-tests/src/test/resources/DescribeIT/oss/3.0.cql b/integration-tests/src/test/resources/DescribeIT/oss/3.0.cql index 077c9dd1399..2572df52e24 100644 --- a/integration-tests/src/test/resources/DescribeIT/oss/3.0.cql +++ b/integration-tests/src/test/resources/DescribeIT/oss/3.0.cql @@ -1,21 +1,3 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ CREATE KEYSPACE ks_0 WITH replication = { 'class' : 'org.apache.cassandra.locator.SimpleStrategy', 'replication_factor': '1' } AND durable_writes = true; diff --git a/integration-tests/src/test/resources/DescribeIT/oss/3.11.cql b/integration-tests/src/test/resources/DescribeIT/oss/3.11.cql index 077c9dd1399..2572df52e24 100644 --- a/integration-tests/src/test/resources/DescribeIT/oss/3.11.cql +++ b/integration-tests/src/test/resources/DescribeIT/oss/3.11.cql @@ -1,21 +1,3 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ CREATE KEYSPACE ks_0 WITH replication = { 'class' : 'org.apache.cassandra.locator.SimpleStrategy', 'replication_factor': '1' } AND durable_writes = true; diff --git a/integration-tests/src/test/resources/DescribeIT/oss/4.0.cql b/integration-tests/src/test/resources/DescribeIT/oss/4.0.cql index a78bed4b816..abc70728206 100644 --- a/integration-tests/src/test/resources/DescribeIT/oss/4.0.cql +++ b/integration-tests/src/test/resources/DescribeIT/oss/4.0.cql @@ -1,21 +1,3 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ CREATE KEYSPACE ks_0 WITH replication = { 'class' : 'org.apache.cassandra.locator.SimpleStrategy', 'replication_factor': '1' } AND durable_writes = true; From 8e73232102d6275b4f13de9d089d3a9b224c9727 Mon Sep 17 00:00:00 2001 From: Abe Ratnofsky Date: Thu, 18 Jan 2024 14:20:44 -0500 Subject: [PATCH 879/979] CASSANDRA-19180: Support reloading keystore in cassandra-java-driver --- .../api/core/config/DefaultDriverOption.java | 6 + .../api/core/config/TypedDriverOption.java | 6 + .../core/ssl/DefaultSslEngineFactory.java | 35 ++- .../core/ssl/ReloadingKeyManagerFactory.java | 257 +++++++++++++++++ core/src/main/resources/reference.conf | 7 + .../ssl/ReloadingKeyManagerFactoryTest.java | 272 ++++++++++++++++++ .../ReloadingKeyManagerFactoryTest/README.md | 39 +++ .../certs/client-alternate.keystore | Bin 0 -> 2467 bytes .../certs/client-original.keystore | Bin 0 -> 2457 bytes .../certs/client.truststore | Bin 0 -> 1002 bytes .../certs/server.keystore | Bin 0 -> 2407 bytes .../certs/server.truststore | Bin 0 -> 1890 bytes manual/core/ssl/README.md | 10 +- upgrade_guide/README.md | 11 + 14 files changed, 627 insertions(+), 16 deletions(-) create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/ssl/ReloadingKeyManagerFactory.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/ssl/ReloadingKeyManagerFactoryTest.java create mode 100644 core/src/test/resources/ReloadingKeyManagerFactoryTest/README.md create mode 100644 core/src/test/resources/ReloadingKeyManagerFactoryTest/certs/client-alternate.keystore create mode 100644 core/src/test/resources/ReloadingKeyManagerFactoryTest/certs/client-original.keystore create mode 100644 core/src/test/resources/ReloadingKeyManagerFactoryTest/certs/client.truststore create mode 100644 core/src/test/resources/ReloadingKeyManagerFactoryTest/certs/server.keystore create mode 100644 core/src/test/resources/ReloadingKeyManagerFactoryTest/certs/server.truststore diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java index 4c0668570b2..c10a8237c43 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java @@ -255,6 +255,12 @@ public enum DefaultDriverOption implements DriverOption { *

          Value-type: {@link String} */ SSL_KEYSTORE_PASSWORD("advanced.ssl-engine-factory.keystore-password"), + /** + * The duration between attempts to reload the keystore. + * + *

          Value-type: {@link java.time.Duration} + */ + SSL_KEYSTORE_RELOAD_INTERVAL("advanced.ssl-engine-factory.keystore-reload-interval"), /** * The location of the truststore file. * diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java index ec36079730f..88c012fa351 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java @@ -235,6 +235,12 @@ public String toString() { /** The keystore password. */ public static final TypedDriverOption SSL_KEYSTORE_PASSWORD = new TypedDriverOption<>(DefaultDriverOption.SSL_KEYSTORE_PASSWORD, GenericType.STRING); + + /** The duration between attempts to reload the keystore. */ + public static final TypedDriverOption SSL_KEYSTORE_RELOAD_INTERVAL = + new TypedDriverOption<>( + DefaultDriverOption.SSL_KEYSTORE_RELOAD_INTERVAL, GenericType.DURATION); + /** The location of the truststore file. */ public static final TypedDriverOption SSL_TRUSTSTORE_PATH = new TypedDriverOption<>(DefaultDriverOption.SSL_TRUSTSTORE_PATH, GenericType.STRING); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/ssl/DefaultSslEngineFactory.java b/core/src/main/java/com/datastax/oss/driver/internal/core/ssl/DefaultSslEngineFactory.java index 085b36dc539..55a6e9c7da8 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/ssl/DefaultSslEngineFactory.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/ssl/DefaultSslEngineFactory.java @@ -27,11 +27,12 @@ import java.net.InetSocketAddress; import java.net.SocketAddress; import java.nio.file.Files; +import java.nio.file.Path; import java.nio.file.Paths; import java.security.KeyStore; import java.security.SecureRandom; +import java.time.Duration; import java.util.List; -import javax.net.ssl.KeyManagerFactory; import javax.net.ssl.SSLContext; import javax.net.ssl.SSLEngine; import javax.net.ssl.SSLParameters; @@ -54,6 +55,7 @@ * truststore-password = password123 * keystore-path = /path/to/client.keystore * keystore-password = password123 + * keystore-reload-interval = 30 minutes * } * } * @@ -66,6 +68,7 @@ public class DefaultSslEngineFactory implements SslEngineFactory { private final SSLContext sslContext; private final String[] cipherSuites; private final boolean requireHostnameValidation; + private ReloadingKeyManagerFactory kmf; /** Builds a new instance from the driver configuration. */ public DefaultSslEngineFactory(DriverContext driverContext) { @@ -132,20 +135,8 @@ protected SSLContext buildContext(DriverExecutionProfile config) throws Exceptio } // initialize keystore if configured. - KeyManagerFactory kmf = null; if (config.isDefined(DefaultDriverOption.SSL_KEYSTORE_PATH)) { - try (InputStream ksf = - Files.newInputStream( - Paths.get(config.getString(DefaultDriverOption.SSL_KEYSTORE_PATH)))) { - KeyStore ks = KeyStore.getInstance("JKS"); - char[] password = - config.isDefined(DefaultDriverOption.SSL_KEYSTORE_PASSWORD) - ? config.getString(DefaultDriverOption.SSL_KEYSTORE_PASSWORD).toCharArray() - : null; - ks.load(ksf, password); - kmf = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm()); - kmf.init(ks, password); - } + kmf = buildReloadingKeyManagerFactory(config); } context.init( @@ -159,8 +150,22 @@ protected SSLContext buildContext(DriverExecutionProfile config) throws Exceptio } } + private ReloadingKeyManagerFactory buildReloadingKeyManagerFactory( + DriverExecutionProfile config) { + Path keystorePath = Paths.get(config.getString(DefaultDriverOption.SSL_KEYSTORE_PATH)); + String password = + config.isDefined(DefaultDriverOption.SSL_KEYSTORE_PASSWORD) + ? config.getString(DefaultDriverOption.SSL_KEYSTORE_PASSWORD) + : null; + Duration reloadInterval = + config.isDefined(DefaultDriverOption.SSL_KEYSTORE_RELOAD_INTERVAL) + ? config.getDuration(DefaultDriverOption.SSL_KEYSTORE_RELOAD_INTERVAL) + : Duration.ZERO; + return ReloadingKeyManagerFactory.create(keystorePath, password, reloadInterval); + } + @Override public void close() throws Exception { - // nothing to do + kmf.close(); } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/ssl/ReloadingKeyManagerFactory.java b/core/src/main/java/com/datastax/oss/driver/internal/core/ssl/ReloadingKeyManagerFactory.java new file mode 100644 index 00000000000..9aaee701114 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/ssl/ReloadingKeyManagerFactory.java @@ -0,0 +1,257 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.ssl; + +import com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting; +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.net.Socket; +import java.nio.file.Files; +import java.nio.file.Path; +import java.security.KeyStore; +import java.security.KeyStoreException; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.security.Principal; +import java.security.PrivateKey; +import java.security.Provider; +import java.security.UnrecoverableKeyException; +import java.security.cert.CertificateException; +import java.security.cert.X509Certificate; +import java.time.Duration; +import java.util.Arrays; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; +import javax.net.ssl.KeyManager; +import javax.net.ssl.KeyManagerFactory; +import javax.net.ssl.KeyManagerFactorySpi; +import javax.net.ssl.ManagerFactoryParameters; +import javax.net.ssl.SSLEngine; +import javax.net.ssl.X509ExtendedKeyManager; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class ReloadingKeyManagerFactory extends KeyManagerFactory implements AutoCloseable { + private static final Logger logger = LoggerFactory.getLogger(ReloadingKeyManagerFactory.class); + private static final String KEYSTORE_TYPE = "JKS"; + private Path keystorePath; + private String keystorePassword; + private ScheduledExecutorService executor; + private final Spi spi; + + // We're using a single thread executor so this shouldn't need to be volatile, since all updates + // to lastDigest should come from the same thread + private volatile byte[] lastDigest; + + /** + * Create a new {@link ReloadingKeyManagerFactory} with the given keystore file and password, + * reloading from the file's content at the given interval. This function will do an initial + * reload before returning, to confirm that the file exists and is readable. + * + * @param keystorePath the keystore file to reload + * @param keystorePassword the keystore password + * @param reloadInterval the duration between reload attempts. Set to {@link + * java.time.Duration#ZERO} to disable scheduled reloading. + * @return + */ + public static ReloadingKeyManagerFactory create( + Path keystorePath, String keystorePassword, Duration reloadInterval) { + KeyManagerFactory kmf; + try { + kmf = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm()); + } catch (NoSuchAlgorithmException e) { + throw new RuntimeException(e); + } + + KeyStore ks; + try (InputStream ksf = Files.newInputStream(keystorePath)) { + ks = KeyStore.getInstance(KEYSTORE_TYPE); + ks.load(ksf, keystorePassword.toCharArray()); + } catch (IOException | CertificateException | KeyStoreException | NoSuchAlgorithmException e) { + throw new RuntimeException(e); + } + try { + kmf.init(ks, keystorePassword.toCharArray()); + } catch (KeyStoreException | NoSuchAlgorithmException | UnrecoverableKeyException e) { + throw new RuntimeException(e); + } + + ReloadingKeyManagerFactory reloadingKeyManagerFactory = new ReloadingKeyManagerFactory(kmf); + reloadingKeyManagerFactory.start(keystorePath, keystorePassword, reloadInterval); + return reloadingKeyManagerFactory; + } + + @VisibleForTesting + protected ReloadingKeyManagerFactory(KeyManagerFactory initial) { + this( + new Spi((X509ExtendedKeyManager) initial.getKeyManagers()[0]), + initial.getProvider(), + initial.getAlgorithm()); + } + + private ReloadingKeyManagerFactory(Spi spi, Provider provider, String algorithm) { + super(spi, provider, algorithm); + this.spi = spi; + } + + private void start(Path keystorePath, String keystorePassword, Duration reloadInterval) { + this.keystorePath = keystorePath; + this.keystorePassword = keystorePassword; + this.executor = + Executors.newScheduledThreadPool( + 1, + runnable -> { + Thread t = Executors.defaultThreadFactory().newThread(runnable); + t.setDaemon(true); + return t; + }); + + // Ensure that reload is called once synchronously, to make sure the file exists etc. + reload(); + + if (!reloadInterval.isZero()) + this.executor.scheduleWithFixedDelay( + this::reload, + reloadInterval.toMillis(), + reloadInterval.toMillis(), + TimeUnit.MILLISECONDS); + } + + @VisibleForTesting + void reload() { + try { + reload0(); + } catch (Exception e) { + logger.warn("Failed to reload", e); + } + } + + private synchronized void reload0() + throws NoSuchAlgorithmException, IOException, KeyStoreException, CertificateException, + UnrecoverableKeyException { + logger.debug("Checking KeyStore file {} for updates", keystorePath); + + final byte[] keyStoreBytes = Files.readAllBytes(keystorePath); + final byte[] newDigest = digest(keyStoreBytes); + if (lastDigest != null && Arrays.equals(lastDigest, digest(keyStoreBytes))) { + logger.debug("KeyStore file content has not changed; skipping update"); + return; + } + + final KeyStore keyStore = KeyStore.getInstance(KEYSTORE_TYPE); + try (InputStream inputStream = new ByteArrayInputStream(keyStoreBytes)) { + keyStore.load(inputStream, keystorePassword.toCharArray()); + } + KeyManagerFactory kmf = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm()); + kmf.init(keyStore, keystorePassword.toCharArray()); + logger.info("Detected updates to KeyStore file {}", keystorePath); + + this.spi.keyManager.set((X509ExtendedKeyManager) kmf.getKeyManagers()[0]); + this.lastDigest = newDigest; + } + + @Override + public void close() throws Exception { + if (executor != null) { + executor.shutdown(); + } + } + + private static byte[] digest(byte[] payload) throws NoSuchAlgorithmException { + final MessageDigest digest = MessageDigest.getInstance("SHA-256"); + return digest.digest(payload); + } + + private static class Spi extends KeyManagerFactorySpi { + DelegatingKeyManager keyManager; + + Spi(X509ExtendedKeyManager initial) { + this.keyManager = new DelegatingKeyManager(initial); + } + + @Override + protected void engineInit(KeyStore ks, char[] password) { + throw new UnsupportedOperationException(); + } + + @Override + protected void engineInit(ManagerFactoryParameters spec) { + throw new UnsupportedOperationException(); + } + + @Override + protected KeyManager[] engineGetKeyManagers() { + return new KeyManager[] {keyManager}; + } + } + + private static class DelegatingKeyManager extends X509ExtendedKeyManager { + AtomicReference delegate; + + DelegatingKeyManager(X509ExtendedKeyManager initial) { + delegate = new AtomicReference<>(initial); + } + + void set(X509ExtendedKeyManager keyManager) { + delegate.set(keyManager); + } + + @Override + public String chooseEngineClientAlias(String[] keyType, Principal[] issuers, SSLEngine engine) { + return delegate.get().chooseEngineClientAlias(keyType, issuers, engine); + } + + @Override + public String chooseEngineServerAlias(String keyType, Principal[] issuers, SSLEngine engine) { + return delegate.get().chooseEngineServerAlias(keyType, issuers, engine); + } + + @Override + public String[] getClientAliases(String keyType, Principal[] issuers) { + return delegate.get().getClientAliases(keyType, issuers); + } + + @Override + public String chooseClientAlias(String[] keyType, Principal[] issuers, Socket socket) { + return delegate.get().chooseClientAlias(keyType, issuers, socket); + } + + @Override + public String[] getServerAliases(String keyType, Principal[] issuers) { + return delegate.get().getServerAliases(keyType, issuers); + } + + @Override + public String chooseServerAlias(String keyType, Principal[] issuers, Socket socket) { + return delegate.get().chooseServerAlias(keyType, issuers, socket); + } + + @Override + public X509Certificate[] getCertificateChain(String alias) { + return delegate.get().getCertificateChain(alias); + } + + @Override + public PrivateKey getPrivateKey(String alias) { + return delegate.get().getPrivateKey(alias); + } + } +} diff --git a/core/src/main/resources/reference.conf b/core/src/main/resources/reference.conf index 75bed97e498..d1ac22e553b 100644 --- a/core/src/main/resources/reference.conf +++ b/core/src/main/resources/reference.conf @@ -790,6 +790,13 @@ datastax-java-driver { // truststore-password = password123 // keystore-path = /path/to/client.keystore // keystore-password = password123 + + # The duration between attempts to reload the keystore from the contents of the file specified + # by `keystore-path`. This is mainly relevant in environments where certificates have short + # lifetimes and applications are restarted infrequently, since an expired client certificate + # will prevent new connections from being established until the application is restarted. If + # not set, defaults to not reload the keystore. + // keystore-reload-interval = 30 minutes } # The generator that assigns a microsecond timestamp to each request. diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/ssl/ReloadingKeyManagerFactoryTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/ssl/ReloadingKeyManagerFactoryTest.java new file mode 100644 index 00000000000..d291924b800 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/ssl/ReloadingKeyManagerFactoryTest.java @@ -0,0 +1,272 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.ssl; + +import static java.nio.file.StandardCopyOption.COPY_ATTRIBUTES; +import static java.nio.file.StandardCopyOption.REPLACE_EXISTING; + +import java.io.IOException; +import java.io.InputStream; +import java.math.BigInteger; +import java.net.InetSocketAddress; +import java.net.SocketAddress; +import java.net.SocketException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.security.KeyManagementException; +import java.security.KeyStore; +import java.security.NoSuchAlgorithmException; +import java.security.SecureRandom; +import java.security.cert.X509Certificate; +import java.time.Duration; +import java.util.Optional; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.TimeUnit; +import java.util.function.Consumer; +import java.util.function.Supplier; +import javax.net.ssl.KeyManagerFactory; +import javax.net.ssl.SSLContext; +import javax.net.ssl.SSLPeerUnverifiedException; +import javax.net.ssl.SSLServerSocket; +import javax.net.ssl.SSLSocket; +import javax.net.ssl.TrustManagerFactory; +import org.junit.Assert; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class ReloadingKeyManagerFactoryTest { + private static final Logger logger = + LoggerFactory.getLogger(ReloadingKeyManagerFactoryTest.class); + + static final Path CERT_BASE = + Paths.get( + ReloadingKeyManagerFactoryTest.class + .getResource( + String.format("/%s/certs/", ReloadingKeyManagerFactoryTest.class.getSimpleName())) + .getPath()); + static final Path SERVER_KEYSTORE_PATH = CERT_BASE.resolve("server.keystore"); + static final Path SERVER_TRUSTSTORE_PATH = CERT_BASE.resolve("server.truststore"); + + static final Path ORIGINAL_CLIENT_KEYSTORE_PATH = CERT_BASE.resolve("client-original.keystore"); + static final Path ALTERNATE_CLIENT_KEYSTORE_PATH = CERT_BASE.resolve("client-alternate.keystore"); + static final BigInteger ORIGINAL_CLIENT_KEYSTORE_CERT_SERIAL = + convertSerial("7372a966"); // 1936894310 + static final BigInteger ALTERNATE_CLIENT_KEYSTORE_CERT_SERIAL = + convertSerial("e50bf31"); // 240172849 + + // File at this path will change content + static final Path TMP_CLIENT_KEYSTORE_PATH; + + static { + try { + TMP_CLIENT_KEYSTORE_PATH = + Files.createTempFile(ReloadingKeyManagerFactoryTest.class.getSimpleName(), null); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + static final Path CLIENT_TRUSTSTORE_PATH = CERT_BASE.resolve("client.truststore"); + static final String CERTSTORE_PASSWORD = "changeit"; + static final Duration NO_SCHEDULED_RELOAD = Duration.ofMillis(0); + + private static TrustManagerFactory buildTrustManagerFactory() { + TrustManagerFactory tmf; + try (InputStream tsf = Files.newInputStream(CLIENT_TRUSTSTORE_PATH)) { + KeyStore ts = KeyStore.getInstance("JKS"); + char[] password = CERTSTORE_PASSWORD.toCharArray(); + ts.load(tsf, password); + tmf = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); + tmf.init(ts); + } catch (Exception e) { + throw new RuntimeException(e); + } + return tmf; + } + + private static SSLContext buildServerSslContext() { + try { + SSLContext context = SSLContext.getInstance("SSL"); + + TrustManagerFactory tmf; + try (InputStream tsf = Files.newInputStream(SERVER_TRUSTSTORE_PATH)) { + KeyStore ts = KeyStore.getInstance("JKS"); + char[] password = CERTSTORE_PASSWORD.toCharArray(); + ts.load(tsf, password); + tmf = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); + tmf.init(ts); + } + + KeyManagerFactory kmf; + try (InputStream ksf = Files.newInputStream(SERVER_KEYSTORE_PATH)) { + KeyStore ks = KeyStore.getInstance("JKS"); + char[] password = CERTSTORE_PASSWORD.toCharArray(); + ks.load(ksf, password); + kmf = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm()); + kmf.init(ks, password); + } + + context.init(kmf.getKeyManagers(), tmf.getTrustManagers(), new SecureRandom()); + return context; + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + @Test + public void client_certificates_should_reload() throws Exception { + Files.copy( + ORIGINAL_CLIENT_KEYSTORE_PATH, TMP_CLIENT_KEYSTORE_PATH, REPLACE_EXISTING, COPY_ATTRIBUTES); + + final BlockingQueue> peerCertificates = + new LinkedBlockingQueue<>(1); + + // Create a listening socket. Make sure there's no backlog so each accept is in order. + SSLContext serverSslContext = buildServerSslContext(); + final SSLServerSocket server = + (SSLServerSocket) serverSslContext.getServerSocketFactory().createServerSocket(); + server.bind(new InetSocketAddress(0), 1); + server.setUseClientMode(false); + server.setNeedClientAuth(true); + Thread serverThread = + new Thread( + () -> { + while (true) { + try { + logger.info("Server accepting client"); + final SSLSocket conn = (SSLSocket) server.accept(); + logger.info("Server accepted client {}", conn); + conn.addHandshakeCompletedListener( + event -> { + boolean offer; + try { + // Transfer certificates to client thread once handshake is complete, so + // it can safely close + // the socket + offer = + peerCertificates.offer( + Optional.of((X509Certificate[]) event.getPeerCertificates())); + } catch (SSLPeerUnverifiedException e) { + offer = peerCertificates.offer(Optional.empty()); + } + Assert.assertTrue(offer); + }); + logger.info("Server starting handshake"); + // Without this, client handshake blocks + conn.startHandshake(); + } catch (IOException e) { + // Not sure why I sometimes see ~thousands of these locally + if (e instanceof SocketException && e.getMessage().contains("Socket closed")) + return; + logger.info("Server accept error", e); + } + } + }); + serverThread.setName(String.format("%s-serverThread", this.getClass().getSimpleName())); + serverThread.setDaemon(true); + serverThread.start(); + + final ReloadingKeyManagerFactory kmf = + ReloadingKeyManagerFactory.create( + TMP_CLIENT_KEYSTORE_PATH, CERTSTORE_PASSWORD, NO_SCHEDULED_RELOAD); + // Need a tmf that tells the server to send its certs + final TrustManagerFactory tmf = buildTrustManagerFactory(); + + // Check original client certificate + testClientCertificates( + kmf, + tmf, + server.getLocalSocketAddress(), + () -> { + try { + return peerCertificates.poll(10, TimeUnit.SECONDS); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + }, + certs -> { + Assert.assertEquals(1, certs.length); + X509Certificate cert = certs[0]; + Assert.assertEquals(ORIGINAL_CLIENT_KEYSTORE_CERT_SERIAL, cert.getSerialNumber()); + }); + + // Update keystore content + logger.info("Updating keystore file with new content"); + Files.copy( + ALTERNATE_CLIENT_KEYSTORE_PATH, + TMP_CLIENT_KEYSTORE_PATH, + REPLACE_EXISTING, + COPY_ATTRIBUTES); + kmf.reload(); + + // Check that alternate client certificate was applied + testClientCertificates( + kmf, + tmf, + server.getLocalSocketAddress(), + () -> { + try { + return peerCertificates.poll(30, TimeUnit.SECONDS); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + }, + certs -> { + Assert.assertEquals(1, certs.length); + X509Certificate cert = certs[0]; + Assert.assertEquals(ALTERNATE_CLIENT_KEYSTORE_CERT_SERIAL, cert.getSerialNumber()); + }); + + kmf.close(); + server.close(); + } + + private static void testClientCertificates( + KeyManagerFactory kmf, + TrustManagerFactory tmf, + SocketAddress serverAddress, + Supplier> certsSupplier, + Consumer certsConsumer) + throws NoSuchAlgorithmException, KeyManagementException, IOException { + SSLContext clientSslContext = SSLContext.getInstance("TLS"); + clientSslContext.init(kmf.getKeyManagers(), tmf.getTrustManagers(), null); + final SSLSocket client = (SSLSocket) clientSslContext.getSocketFactory().createSocket(); + logger.info("Client connecting"); + client.connect(serverAddress); + logger.info("Client doing handshake"); + client.startHandshake(); + + final Optional lastCertificate = certsSupplier.get(); + logger.info("Client got its certificate back from the server; closing socket"); + client.close(); + Assert.assertNotNull(lastCertificate); + Assert.assertTrue(lastCertificate.isPresent()); + logger.info("Client got its certificate back from server: {}", lastCertificate); + + certsConsumer.accept(lastCertificate.get()); + } + + private static BigInteger convertSerial(String hex) { + final BigInteger serial = new BigInteger(Integer.valueOf(hex, 16).toString()); + logger.info("Serial hex {} is {}", hex, serial); + return serial; + } +} diff --git a/core/src/test/resources/ReloadingKeyManagerFactoryTest/README.md b/core/src/test/resources/ReloadingKeyManagerFactoryTest/README.md new file mode 100644 index 00000000000..9ff9b622e5b --- /dev/null +++ b/core/src/test/resources/ReloadingKeyManagerFactoryTest/README.md @@ -0,0 +1,39 @@ +# How to create cert stores for ReloadingKeyManagerFactoryTest + +Need the following cert stores: +- `server.keystore` +- `client-original.keystore` +- `client-alternate.keystore` +- `server.truststore`: trusts `client-original.keystore` and `client-alternate.keystore` +- `client.truststore`: trusts `server.keystore` + +We shouldn't need any signing requests or chains of trust, since truststores are just including certs directly. + +First create the three keystores: +``` +$ keytool -genkeypair -keyalg RSA -alias server -keystore server.keystore -dname "CN=server" -storepass changeit -keypass changeit +$ keytool -genkeypair -keyalg RSA -alias client-original -keystore client-original.keystore -dname "CN=client-original" -storepass changeit -keypass changeit +$ keytool -genkeypair -keyalg RSA -alias client-alternate -keystore client-alternate.keystore -dname "CN=client-alternate" -storepass changeit -keypass changeit +``` + +Note that we need to use `-keyalg RSA` because keytool's default keyalg is DSA, which TLS 1.3 doesn't support. If DSA is +used, the handshake will fail due to the server not being able to find any authentication schemes compatible with its +x509 certificate ("Unavailable authentication scheme"). + +Then export all the certs: +``` +$ keytool -exportcert -keystore server.keystore -alias server -file server.cert -storepass changeit +$ keytool -exportcert -keystore client-original.keystore -alias client-original -file client-original.cert -storepass changeit +$ keytool -exportcert -keystore client-alternate.keystore -alias client-alternate -file client-alternate.cert -storepass changeit +``` + +Then create the server.truststore that trusts the two client certs: +``` +$ keytool -import -file client-original.cert -alias client-original -keystore server.truststore -storepass changeit +$ keytool -import -file client-alternate.cert -alias client-alternate -keystore server.truststore -storepass changeit +``` + +Then create the client.truststore that trusts the server cert: +``` +$ keytool -import -file server.cert -alias server -keystore client.truststore -storepass changeit +``` diff --git a/core/src/test/resources/ReloadingKeyManagerFactoryTest/certs/client-alternate.keystore b/core/src/test/resources/ReloadingKeyManagerFactoryTest/certs/client-alternate.keystore new file mode 100644 index 0000000000000000000000000000000000000000..91cee636a0baab4d6e8b706c76da6bffd8398f37 GIT binary patch literal 2467 zcmY+Ec|6pK8^>qN3{8_AWE>&JwIR$TN$z_vtXZ)}Nu%63a;C=hbw!RwWSlc{Ws$S6 z?)zw#9248H$k9kO3L)WVf3M$f_qBgKujl!^pXc-Z`N05i8W$LX0pJeYaK(6B{5CI` z2TTXx=1>4`a)b>q04)4pE0`}7fO#EZx1)05M*Pop;y9R#4nX%X0CXFpz(_!+E1^f*j_q18d2Sn?o!MLvP>&51fDW$t2UVlHvQ9@L9oBPQ8A4sTlWF&YRT3u*E0+WN?r!T&q}?d|KEf^LS?aLVfMGdgs>) zdDeG(T&lkR>AznZ7kDPQmt{{~r8qf%C@nBTBdgEv+0{x833;>;&$$=wbH;@m?OAE~ z_lZ{Z2Gf{`(y{enkkglPGu5l%9!q(aI-Bm3uj28?q6UKfMha;49c4w0Q9~5^aYS^Z!Pz8jD(=dY?@B=e?!tkO zX2_`>5>KSdKikfskn;0b5OYgJUj4TYn}wNQ#;uD;R6mPpp?iKW+Ub(057Tv|Zgk-I z3J9ldZ?`==HU=N>@AbZ#^n=n3a8FvRCT(*GlP?42t_}3+@*!0?kW3{#RT|n_O4!S^ z>L<~h3Y7Fb>tEyt+&iADmEjvkz!U0ZQYkA?E95)8TgA1L4+x=}7FCAm&06FWsm-dTCS z3C!o|GYVee4{W~qg1MWrZA4@K>EXHqtA>WEB&cE6$6N_u=tjD<6>OQ0w6_g-$4|Y z!ffsmsSo5p{?4Bt47zm`Np^Q^xXa;H92~ULr@mGSmU;sF<(u%R9i%=AR$Lb<$ub?o zY&h%rr(I+DGdUrkvk-@$2Iq|(?LO2LY)yZW(np&~hmNNA0`*d6p+Rj-^HzR zg_Jdhk9x`;7Qv<)$2-&)J0zb%ONU2OGD+ItZk7D5v|BXSKlLfcMLD%73N7v`Oun`6UCnKV=c9LPO|A}*qF%!Jp_mF;W|MxHS zipQrcwGekW>i}(@byZP83q`4xz{ZdiUkr>|~P7r|2`1L_+j%C>Jn_=tSx->)hdrBZ@0BQ!QhdOOkZ7lt$_E-5icT!0N_Ckw z^~ByKk!lHAk(x2OxWA7bSZG$O-(T1~l;lxNy5fty zj8XkND&c}y>C;l68=zYtPmn9f7Ze0i29b_b(2)u_(xks84Ei4f3M&W|xa{fUDuva+ zV9#l2T+mRzh|$0Rkk-GCc(~~Rr0NKjbAdre?a4nI@V`=*`>)hR6>_mdop6;eEn6^^ zHXr-C`hTPUH+7=`h=(%PNAIePZRStSje^Mw^LgS<=-A-IEiC<@s4vMYmfFK3*oS6~ISaf^7ZcrrhN`2JM- zev+3IAaAI6c=`RA;%E2G{6~!i{KkLL9WAmE32ILpGGFdOMWa?IL8(AMXEi@rM-6hx zD>`)k+ulL#iu~f4frGb=j6%MrS8?{uqK+}D313GOIW@C%5&n5mUY^1!jX_9lwft0M znYN{V>sdej3C$!yX9YRg5b@WA_c97_^9{27I8tc(AC!|=QDhraIb_oNg!XOADd$gZ z^nnmUV}mNQ;F|9i@!ObA$Zj z{6!%d5LPC&Mx}atTNGMW*$rKB4jyOe(cFNn4dx0PoU6kx7vOj&?jA?YPp711-D!Q7 zh|~NSX)+%%^MVQPFWGbUn}GcBNlBr-(*y5QGr-BJ*b3b#iqq78CMJw?>$G~`w53;Q z!gvucIyYy&mP99{bL{FW7KKvK2p+m@3J zF>r2}(hm@>Q!o%zPy~f#DSpsZ)k-d0n@SIhTL{zjhYFs5wZbXUDbZeHHiXN*8`~c? M&hzAgfaB@^0fMoC8UO$Q literal 0 HcmV?d00001 diff --git a/core/src/test/resources/ReloadingKeyManagerFactoryTest/certs/client-original.keystore b/core/src/test/resources/ReloadingKeyManagerFactoryTest/certs/client-original.keystore new file mode 100644 index 0000000000000000000000000000000000000000..74e31f7bc6f1ab222cf2c8ac5feb2324d1f01fdf GIT binary patch literal 2457 zcmY+EX*d)L7sqGJjAbkr5r&Z^ySZkDJF;uYmUXTrhLFaRZ3x+l24fo)vQr~lVT>)y zWQ&`mOEHm%L`4}(l##9LeV_Ne_qiX=bI$qw&pBVt^Zbz{o;(g92ub3x<>XN%TaelO zKrSGa#A5^|@#r04Z6pa2_b&+&1SUcJkMPB#@Z#kC-xePf$U!B6_mCto8>zwx{XafB zE(sQiJyu#?hE}x;Zt9+oJV^9#!tTg%fB*ybU=nyJt|{279;mNR@WY$Nh1VBYwZB4q z)vNJTDLf5-5?yZU2}#LPa#)km>Pc1rx4Fm9NHzb_yre-&<^B}C;D5^Yl5Oq zPg~(r7!&;^XWpmjpgiWfW(V$CjpbUUlk-$cL#X&Hxvx?j@i8DSHii38c~1BD5H+fY z$#CO10`N#1T1?6CRz53eU-97eLR-2UKnVZcpEHlkd^y@pvbq8mc3|VwKU&5cW^=nf zBF?RleCf9ynn00_^XA+$`l__nr4mS%qDPF^t_wQQfKx1>VpqUJF0(&}>B|+nr(@}P zkGWqs8Cn%x)JGq_66|=c(z$fOT%fBaIKM@Nk`00)-;N~dFdTLB!i9L>MAnJ>uY}Z1 z433YD(CH=HcJKJ~Pwm)f8%xvgiiZC=FoluLch{+!{P?2Lv?fElEfLkIa7C+pv$mb% z@z!w-28P1q*KySfEztD!%4%YfR}yN1V)x%WLSN`UqhMw}Q|?BXOKmNyVK1vx=6?KO zgOj@1oZiK^(o`!_2j3#w&+&=k>o?r0hfu+^%fdPhQI&jz=q=p^Wr0eyU7OOPqyJ$- zM#4_uy5v_cE3Z!x4Qj!DU3V2!i(Xz68S$z{M+!>dp)G6)rwqy070Zo11Uq6*tr7EW zWR|y&Q%Zli7`OhJ<2@x&qfTEaA(E^2HYX}pfb(qi&1!~k@57b1D~-2yIe$_g%M*?( zrB>}r+?399xdSFxtXg^R{z3>oLGrHu{d)C|Bes)&hubYkTQLzeSgCgZOz6l}d7F+Z z!LtNs`p@K-H`*%lv10>=FmEG;zzI{w`;RCm?W)F}PitE2ZZ{dE*~o&jgVJY0YKED+ z@xW5|Cn?PDBh%U{`Kc;*F8%>qyG93+&b66)P$c_` z0RJHz`=Q_A-Z>@tl$)y(x6wpXiWeliYbibQ{0)y^qlmN<`1Pc|_fjekY$o6mE}-p^ zAZxiSv4FnKU`^O=+PCkU?o9QD|*#3`Se?s3d_}{~qGvq>?~YN2r_w2slc`|0uwJc`oOFJl8HAW>o$` zgK;g8Bdyr;+9x4${m_3sH<1MLN-hjgO5KX57hO|!v3B~XmRZ(trMsy?$KPclb+$C( z(AAMn%Oa|6iTs8t=zol}O#7yI{?Ecq9+f7y4cz^{>&y@-f(lDYFUAL&<>aQ^;NQGy zl3q=%I*sa5R|dKzSjs}GDc$^P?+$JaFlQ7;jlG9L^tg`QI_T(4SsDSC7>u!@Jz=Ms zKXFEqzLw;V3xB3mkSj3WIL$)?6P{oewGD%`ubkdnG70>%0awbH>x8w;tMlC7*y4tK2Q)9 z4a^XDn8=OtR*ZPo=5YA)VJ)tQmNr?d0$&oPu}Z`QbzP>&0ljdDsH`z|CV<#xum8o* zXL@=pJ_e&gSJmNP2Z1-)f~#XrBF-t#`n2p9$2X>|sx$B-beQ!yLt09BZp+E8)bOx1 z+~xL*hR>UY-&hFWU1$r!e#BN{)wXY|8sTWfDmDlw-3>o@5sNmMc7dcsx8sJJW6 z*@3gxIQagW(SP~%VxT<4U>93(d&m4qFEc(PKs)XPaeVX5h2A@?{`W(J1mjqQK=z7U z;bi(@U5(jQ3hx@e{e5WpB2k8skUaHJ45@Ei83^sfao*da`6w{eqm^D*b~7<{1CzJZ z^LRhp(I=mIVPtOPx7}m;MU|s=nOEwprlp4+Upo&N54AX7+`3+u?>MElT@{^o*-g3X z;;#44#N#pvqFJOJbWc~bI-uX*{kc^<&EmnSLv}^p6@f%OhYyVo=#`wzcdnS!M%P({ zX8)aSUfcQF#i0Z$R)%ZS;`PgN`_@Mnx%lhC5$9gjpUmhV=L_g#VCt1Jz`7;puYN-s zB4v?0oDeku5C;qb0E>kFtX=4tCoCI=1qcLsrT_o{ literal 0 HcmV?d00001 diff --git a/core/src/test/resources/ReloadingKeyManagerFactoryTest/certs/client.truststore b/core/src/test/resources/ReloadingKeyManagerFactoryTest/certs/client.truststore new file mode 100644 index 0000000000000000000000000000000000000000..3ce9a720dbc7557774ebbfbae2975235cfc52e46 GIT binary patch literal 1002 zcmV2`Yw2hW8Bt2LUiC1_~;MNQUV75 z8*8{D>rwp3j1%KTjZuVQ0s{cUP=JC1FrCLfv|Un~ufMqg!U2hEV#UbjV}nkvFlQfT zws%zX^-+$Sav!Xsm&~zLgw(N++mklJptmni0Rd*-42bRCxb#@tBkj1vJL2}Aa=vBC zPMQtbU(SfTvZUg5FrGs^!)BInqQn-V3mMPE>}8wYgV%;IL=Doa$bq*zeba|fhmr)? z{WT@X3G>&d^g3rY(d5aI5(f{}<3lUb2H{J3blG0qi^|N$X^v_Z2pUJvE)*qjm_z?h$x`Hv=IZIyBR9UJF?_KKH53{`;22J8C$gMyNS`7;2EE>X%=a@JIp|6v;!T zY;ymI?&;~1Z#4z=p%*1c$Bdb*6=c!+anhyT~Bbzz0m z_T}hw@W2;-Z>~!h?<)CRno)GNEK^>E<1jMm(BoHpZX}YC63RfgymuL$A>CPT14vHj1ohI0Pzii-mXDj|cvW60od7&MgQ|M=%~BpCiE z;EkLZ4;%5>y+OunWCmmk(MayYhI)mv6 zPQ4~K1*bj`RgbTL_>hv$Sh%;2Rb_%*VqR(zqe-TP|YJMAoGHB-b!c%=c ze(Od0212~nI#ej7BRB&JhQIk!s?ELnvSH5q`bC|Dhp*Cm`d0=z<}I|ZnD}XV86|bd z%QRlh1eb=$jG?I!bqfwlJ42C|%s%ps9{?+s!-%>%>wnm8G#-WS(ZXJ<@($?OUYlMu zpX(32Z!5lbESMj%;x@yv8K62i*e6ZzK`NCJuIBhvX`u^Jh1`Zd?f5)bQ7jb-_?oo7 zc5+Am4oT|aVMa5CE646n>jC4+;pXm)FZ@{!o>wi@SMAIht`*2+>wBH*7`Z2kTpzav zk3~EII`>{M`=uy#dP~2Gt(kSP+;pE>jRp5K^a)7@_x6S?RYO^LA1-!BWKkQ!ouu}Q{MHNp&Z2RG>1!*oDXq%oDhG; z8v@FxP5iW#rtKZQnbKXqaW&mtRHio;U%A$d`O?6UxjpfINFA~_8d2h6t)0P{^Er^2cx;GxP3321=tPI!2$=sg>+BX(XnLKY2*_au_IXLI$-tmXla zcln~;$JZlNaFPcO>2-SdUEV!#jgIIXkg#Csw(=jIib}A@W0%;@iCi~}Mz|`t^s2QK zf<5Y$VqJquw0tBy1zvovIp5noKYlRR=hD%oY9I5!BmVqTEQ z=dt$zj$UeKO|VwWUfQb?F?@egF365BQ$1=4nBv|&JIqaCG1(M;N+Oxh%JW2Hs`&S7 zfmxw!{5(15tTvPI9Z#EsM823-f79Pg#ATkXe0#v!Qg`Hmj}_)q?2RLNGTWbjRd;xe zP9bk8YsP67rk!i;x*MSvM{YNXQ)o|Y-YZ(E_>j@#WP70p+U(e!_@t7aet3;2e^$Hc zjsUQEe6lGIB|NAp;gM1J%UyTFRgR?~68d_!NR*?!+TGgxggUxCnuyF(3v_fId=C!=Zk%KYs>EXqnj-=W!|NWYY04~vh>aX&6=+HH-s#pS_yBB z6%rntQ8K_F|5haou5?nA3lIUg0tg30{natz|AYcca4?@^V8|7ek_JXeSwllz?XSX+ z*>eA0VuzNH*^+*u1P~DLbHx0U0RLrJ=wF7Nbm?oAULYOKWN&Dzcr<+%3%mXDzlJ4| z*$hOl3XRj95W|~)EE8!YGfM$kduN4Qn=YIiPXatQY}pC>&DH?HYtdAD@S~UsnFsWO zr)P^$%i)fqZAV{|p9LUkbRq#&EHO7C0my01{y02H`rI6k2im2Dor)Biy! zmugcQDoJ^*AYp8*IuuengO9zbD>F0c34CCFxI6!dHY34vq$e0_=5HyfGEuioKUM52 zQ~M~^{UArogqrE@#OX`;_^lIa2{kZ!=mifHOmZ(5l{0qTD@?Ju<%NfL(_ojAHE-;W zWLxzo7Y5Q1nYn={y)m1I6brk8f5)&5nxQt>@vnAMV zS0>C=qorZ?Ocn8*yg}3+qgl-!idDD=KNqFRwnSfTFs0W>Su+RSPvyTGJ*eP>U0}E2 zUd(Ug7hygN2wgn3-?%z;bK5X2-QWgpcg2h#D8coD{n<8>tu2zNkMava`T7YSjDTU$ z(}LEM$O%nTg6aOV?UO)V&ISQ3;`FGHBf6<#lQsIBd(`o+Q5>4x>6`tz>)7S^BAmJh9y5p&h<*DSy5s~;t8=< zR#V8GzPzlLE;Wi}gBI#}#aop(EEe+nu4MH4E&?OnilWT3#A~Jr8x+@|<9Dw*6@pQ7 zaf+R(xNR+;HpNOugkAo~P}fA-==>K|CN;=tYA>;%J`^YEI`b_jkDuKgb$tK$Sm9&B z0HDI^vbs8SH(lbKpTPpaW8*#uF(R=TPsMY29`1EBY(|DZflTAqW}w3xAC#kvsW!ps zm%$llg@{IU;uBp7(I+Q z1_p)5o@4_FK>%Pl&pd$|>s7ZHh%-Hzg^h8Zm58wi!y`cUL7^SRjK#Odlc~S&q_HPu LD}&g8$tC{*$&6Q$ literal 0 HcmV?d00001 diff --git a/core/src/test/resources/ReloadingKeyManagerFactoryTest/certs/server.truststore b/core/src/test/resources/ReloadingKeyManagerFactoryTest/certs/server.truststore new file mode 100644 index 0000000000000000000000000000000000000000..c9b06b5fbe1c2a81cfdcb7319bb82acbb0f6b4a0 GIT binary patch literal 1890 zcmV-o2c7sZf(Kp#0Ru3C2NwnjDuzgg_YDCD0ic2h2n2!$1TcaJ05F0E{00dshDe6@ z4FLxRpn?YNFoFi@0s#Opf(GIS2`Yw2hW8Bt2LUiC1_~;MNQUntF0&nl&gP8eutUG)C z)w>s(%-}4h_Gb1JtX(Gt7Lf@UC}iJX!76zfiIVH0c~njU_FTR+|0xpktwPDK`^Y9v zC5e446hb8OZ#U75j;Ms?U|uJJ;Ee>{0HK4!Z~CZ{>MP)R$nHJ8b^`v&p*PR534D#` zUX=-?!((kjc;`_o$6t3Q8Vm!@C1CabTumt4?w~^ir$fdubO0%R$@dTK@Hp~`1LsH$ ztnm<6t_iJ)*rx{1Z8$SlU#C_zKD!z+Hg5}=&!4Ad|CLESP~?q%$ZGwZiD0Z+d)JfG z{h#&*pKYW3`CuXL0oSVz-W`PbfNk0ccywE}7`4vJiNBuK!M=um?6L>23_8fv$3Xkf zla7@l0l`1gRepm3i51mk9d2a16lX)NWpbN4Nb~*UlTqHm@>p#j()|tMaG{!EK?txB zz9TXuosh|w=K7qc#CVE=bZkix_tumaFKcxM$PC9NV%fbGEf+KbT*^uz5=105W|(Gz zh1WSXjvo@eD3v^to!L5ki(e-mkiE`5xl~T2WPugTRw%T2ev{!*lYLn)`A_@qfY%1B zTfV1dpKPoyX>GX*-L)BMKNCgSIqdP;;pr1k>qN<6L6Y>N4k$Q11k5RSivFQSO~BVJ zc@jUS2tpjyl+t?636-1+=#xyVuQGI&C3jziYebkuEB%={ht9Zx7*(fy6Jlk-E!|Vk zx8C6hY3&W{elehl-nRQnklm4qmxyWM@)BS7_N*p43FLryqfhgYi6tz@qvNXwF4!^7 zqo8wN&kRPfY$$<`^%L{QuaL1Rq?#)xz-HOAiWg_(q090rB_K=e+sz)6(z+ghLOGO{ zMCk?=+l_7=Z>;vt${=nyv>CdN=1b*ZO8N@4w>g8DINf3{H{$aE4n>Z6x??J#u{T&o z%54l3cU!1JGMS5EC9e3a1Z8Y&Wp@t>lrOHcs37ug7T>pDgWMczgBy_~}LR>q0Y1Pl{9%j7O)q`KwU6Ksh zRdxKfk~-^lky#Z70A6rIeXi5CEBvr9@(ymtO%n0pEDfBK$I-g_eCFut@fHJT&~?tW ztaY3)I_gBKpC})fn3FTMdRZ=7eIz-MkNNr+V(M^b0c~Lwp zCupPHv*t-fLt_mv3as*+x+yvQH0sqjX~Z);uyx$a7ZvCkHpnuqmo{W*bFF7tFDZeW z_i~-$^{(ho^1Io^|J7>^gaBQtB{o37w+wztlzW{qg7m(d*}JjQ!hq*;4?S|medoRl zEqSS3CV58g+Aerc>21P}Efcs`T{qP?C+bV>W+L~~S$k#BSdXGOjCix%{+>yiZYhs} zYlCX#u_bUbj0RqExB>PJ$*|0HFA<8Y-)Cwrpmt6vysGvweeUqv`qwWNXi)Sj z!1SW!%WDc|3}%u8`*V~tAzXv2RP)L#Y}Gg3ciTdabi9l{SV$)t<4KkPPi&FDR-{OP zTsp#f;>qOE1u6|Ner4vS0|B^Jvf`P*5z0kJ?KHdPVKoLha>0|* zhWhKFT6?9;Xs5l|qcA=&AutIB1uG5%0vZJX1Qg`kli9U7#PfFfuLGPqc&FXlm~;db ctoVTJaCWa4Yfw+L!h=^Pt>>Pu0s{etpskvUfB*mh literal 0 HcmV?d00001 diff --git a/manual/core/ssl/README.md b/manual/core/ssl/README.md index b8aa9b89192..913c7bc6c9a 100644 --- a/manual/core/ssl/README.md +++ b/manual/core/ssl/README.md @@ -94,11 +94,13 @@ If you're using a CA, sign the client certificate with it (see the blog post lin this page). Then the nodes' truststores only need to contain the CA's certificate (which should already be the case if you've followed the steps for inter-node encryption). +`DefaultSslEngineFactory` supports client keystore reloading; see property +`advanced.ssl-engine-factory.keystore-reload-interval`. ### Driver configuration By default, the driver's SSL support is based on the JDK's built-in implementation: JSSE (Java -Secure Socket Extension),. +Secure Socket Extension). To enable it, you need to define an engine factory in the [configuration](../configuration/). @@ -126,6 +128,12 @@ datastax-java-driver { // truststore-password = password123 // keystore-path = /path/to/client.keystore // keystore-password = password123 + + # The duration between attempts to reload the keystore from the contents of the file specified + # by `keystore-path`. This is mainly relevant in environments where certificates have short + # lifetimes and applications are restarted infrequently, since an expired client certificate + # will prevent new connections from being established until the application is restarted. + // keystore-reload-interval = 30 minutes } } ``` diff --git a/upgrade_guide/README.md b/upgrade_guide/README.md index e79e8f8cc6d..c6df74ffc2a 100644 --- a/upgrade_guide/README.md +++ b/upgrade_guide/README.md @@ -19,6 +19,17 @@ under the License. ## Upgrade guide +### NEW VERSION PLACEHOLDER + +#### Keystore reloading in DefaultSslEngineFactory + +`DefaultSslEngineFactory` now includes an optional keystore reloading interval, for detecting changes in the local +client keystore file. This is relevant in environments with mTLS enabled and short-lived client certificates, especially +when an application restart might not always happen between a new keystore becoming available and the previous +keystore certificate expiring. + +This feature is disabled by default for compatibility. To enable, see `keystore-reload-interval` in `reference.conf`. + ### 4.17.0 #### Beta support for Java17 From c7719aed14705b735571ecbfbda23d3b8506eb11 Mon Sep 17 00:00:00 2001 From: Abe Ratnofsky Date: Tue, 23 Jan 2024 16:09:35 -0500 Subject: [PATCH 880/979] PR feedback: avoid extra exception wrapping, provide thread naming, improve error messages, etc. --- .../api/core/config/DefaultDriverOption.java | 12 ++--- .../core/ssl/DefaultSslEngineFactory.java | 4 +- .../core/ssl/ReloadingKeyManagerFactory.java | 44 +++++++++---------- 3 files changed, 28 insertions(+), 32 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java index c10a8237c43..afe16e96886 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java @@ -255,12 +255,6 @@ public enum DefaultDriverOption implements DriverOption { *

          Value-type: {@link String} */ SSL_KEYSTORE_PASSWORD("advanced.ssl-engine-factory.keystore-password"), - /** - * The duration between attempts to reload the keystore. - * - *

          Value-type: {@link java.time.Duration} - */ - SSL_KEYSTORE_RELOAD_INTERVAL("advanced.ssl-engine-factory.keystore-reload-interval"), /** * The location of the truststore file. * @@ -982,6 +976,12 @@ public enum DefaultDriverOption implements DriverOption { *

          Value-type: boolean */ METRICS_GENERATE_AGGREGABLE_HISTOGRAMS("advanced.metrics.histograms.generate-aggregable"), + /** + * The duration between attempts to reload the keystore. + * + *

          Value-type: {@link java.time.Duration} + */ + SSL_KEYSTORE_RELOAD_INTERVAL("advanced.ssl-engine-factory.keystore-reload-interval"), ; private final String path; diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/ssl/DefaultSslEngineFactory.java b/core/src/main/java/com/datastax/oss/driver/internal/core/ssl/DefaultSslEngineFactory.java index 55a6e9c7da8..adf23f8e89a 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/ssl/DefaultSslEngineFactory.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/ssl/DefaultSslEngineFactory.java @@ -150,8 +150,8 @@ protected SSLContext buildContext(DriverExecutionProfile config) throws Exceptio } } - private ReloadingKeyManagerFactory buildReloadingKeyManagerFactory( - DriverExecutionProfile config) { + private ReloadingKeyManagerFactory buildReloadingKeyManagerFactory(DriverExecutionProfile config) + throws Exception { Path keystorePath = Paths.get(config.getString(DefaultDriverOption.SSL_KEYSTORE_PATH)); String password = config.isDefined(DefaultDriverOption.SSL_KEYSTORE_PASSWORD) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/ssl/ReloadingKeyManagerFactory.java b/core/src/main/java/com/datastax/oss/driver/internal/core/ssl/ReloadingKeyManagerFactory.java index 9aaee701114..540ddfd79fa 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/ssl/ReloadingKeyManagerFactory.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/ssl/ReloadingKeyManagerFactory.java @@ -73,26 +73,17 @@ public class ReloadingKeyManagerFactory extends KeyManagerFactory implements Aut * @return */ public static ReloadingKeyManagerFactory create( - Path keystorePath, String keystorePassword, Duration reloadInterval) { - KeyManagerFactory kmf; - try { - kmf = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm()); - } catch (NoSuchAlgorithmException e) { - throw new RuntimeException(e); - } + Path keystorePath, String keystorePassword, Duration reloadInterval) + throws UnrecoverableKeyException, KeyStoreException, NoSuchAlgorithmException, + CertificateException, IOException { + KeyManagerFactory kmf = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm()); KeyStore ks; try (InputStream ksf = Files.newInputStream(keystorePath)) { ks = KeyStore.getInstance(KEYSTORE_TYPE); ks.load(ksf, keystorePassword.toCharArray()); - } catch (IOException | CertificateException | KeyStoreException | NoSuchAlgorithmException e) { - throw new RuntimeException(e); - } - try { - kmf.init(ks, keystorePassword.toCharArray()); - } catch (KeyStoreException | NoSuchAlgorithmException | UnrecoverableKeyException e) { - throw new RuntimeException(e); } + kmf.init(ks, keystorePassword.toCharArray()); ReloadingKeyManagerFactory reloadingKeyManagerFactory = new ReloadingKeyManagerFactory(kmf); reloadingKeyManagerFactory.start(keystorePath, keystorePassword, reloadInterval); @@ -115,24 +106,26 @@ private ReloadingKeyManagerFactory(Spi spi, Provider provider, String algorithm) private void start(Path keystorePath, String keystorePassword, Duration reloadInterval) { this.keystorePath = keystorePath; this.keystorePassword = keystorePassword; - this.executor = - Executors.newScheduledThreadPool( - 1, - runnable -> { - Thread t = Executors.defaultThreadFactory().newThread(runnable); - t.setDaemon(true); - return t; - }); // Ensure that reload is called once synchronously, to make sure the file exists etc. reload(); - if (!reloadInterval.isZero()) + if (!reloadInterval.isZero()) { + this.executor = + Executors.newScheduledThreadPool( + 1, + runnable -> { + Thread t = Executors.defaultThreadFactory().newThread(runnable); + t.setName(String.format("%s-%%d", this.getClass().getSimpleName())); + t.setDaemon(true); + return t; + }); this.executor.scheduleWithFixedDelay( this::reload, reloadInterval.toMillis(), reloadInterval.toMillis(), TimeUnit.MILLISECONDS); + } } @VisibleForTesting @@ -140,7 +133,10 @@ void reload() { try { reload0(); } catch (Exception e) { - logger.warn("Failed to reload", e); + String msg = + "Failed to reload KeyStore. If this continues to happen, your client may use stale identity" + + "certificates and fail to re-establish connections to Cassandra hosts."; + logger.warn(msg, e); } } From ea2e475185b5863ef6eed347f57286d6a3bfd8a9 Mon Sep 17 00:00:00 2001 From: Abe Ratnofsky Date: Fri, 2 Feb 2024 14:56:22 -0500 Subject: [PATCH 881/979] Address PR feedback: reload-interval to use Optional internally and null in config, rather than using sentinel Duration.ZERO --- .../core/ssl/DefaultSslEngineFactory.java | 14 ++++----- .../core/ssl/ReloadingKeyManagerFactory.java | 29 +++++++++++++------ .../ssl/ReloadingKeyManagerFactoryTest.java | 4 +-- 3 files changed, 27 insertions(+), 20 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/ssl/DefaultSslEngineFactory.java b/core/src/main/java/com/datastax/oss/driver/internal/core/ssl/DefaultSslEngineFactory.java index adf23f8e89a..bb95dc738c7 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/ssl/DefaultSslEngineFactory.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/ssl/DefaultSslEngineFactory.java @@ -33,6 +33,7 @@ import java.security.SecureRandom; import java.time.Duration; import java.util.List; +import java.util.Optional; import javax.net.ssl.SSLContext; import javax.net.ssl.SSLEngine; import javax.net.ssl.SSLParameters; @@ -153,14 +154,11 @@ protected SSLContext buildContext(DriverExecutionProfile config) throws Exceptio private ReloadingKeyManagerFactory buildReloadingKeyManagerFactory(DriverExecutionProfile config) throws Exception { Path keystorePath = Paths.get(config.getString(DefaultDriverOption.SSL_KEYSTORE_PATH)); - String password = - config.isDefined(DefaultDriverOption.SSL_KEYSTORE_PASSWORD) - ? config.getString(DefaultDriverOption.SSL_KEYSTORE_PASSWORD) - : null; - Duration reloadInterval = - config.isDefined(DefaultDriverOption.SSL_KEYSTORE_RELOAD_INTERVAL) - ? config.getDuration(DefaultDriverOption.SSL_KEYSTORE_RELOAD_INTERVAL) - : Duration.ZERO; + String password = config.getString(DefaultDriverOption.SSL_KEYSTORE_PASSWORD, null); + Optional reloadInterval = + Optional.ofNullable( + config.getDuration(DefaultDriverOption.SSL_KEYSTORE_RELOAD_INTERVAL, null)); + return ReloadingKeyManagerFactory.create(keystorePath, password, reloadInterval); } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/ssl/ReloadingKeyManagerFactory.java b/core/src/main/java/com/datastax/oss/driver/internal/core/ssl/ReloadingKeyManagerFactory.java index 540ddfd79fa..8a9e11bb2e9 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/ssl/ReloadingKeyManagerFactory.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/ssl/ReloadingKeyManagerFactory.java @@ -36,6 +36,7 @@ import java.security.cert.X509Certificate; import java.time.Duration; import java.util.Arrays; +import java.util.Optional; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; @@ -68,12 +69,12 @@ public class ReloadingKeyManagerFactory extends KeyManagerFactory implements Aut * * @param keystorePath the keystore file to reload * @param keystorePassword the keystore password - * @param reloadInterval the duration between reload attempts. Set to {@link - * java.time.Duration#ZERO} to disable scheduled reloading. + * @param reloadInterval the duration between reload attempts. Set to {@link Optional#empty()} to + * disable scheduled reloading. * @return */ - public static ReloadingKeyManagerFactory create( - Path keystorePath, String keystorePassword, Duration reloadInterval) + static ReloadingKeyManagerFactory create( + Path keystorePath, String keystorePassword, Optional reloadInterval) throws UnrecoverableKeyException, KeyStoreException, NoSuchAlgorithmException, CertificateException, IOException { KeyManagerFactory kmf = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm()); @@ -103,14 +104,24 @@ private ReloadingKeyManagerFactory(Spi spi, Provider provider, String algorithm) this.spi = spi; } - private void start(Path keystorePath, String keystorePassword, Duration reloadInterval) { + private void start( + Path keystorePath, String keystorePassword, Optional reloadInterval) { this.keystorePath = keystorePath; this.keystorePassword = keystorePassword; // Ensure that reload is called once synchronously, to make sure the file exists etc. reload(); - if (!reloadInterval.isZero()) { + if (!reloadInterval.isPresent() || reloadInterval.get().isZero()) { + final String msg = + "KeyStore reloading is disabled. If your Cassandra cluster requires client certificates, " + + "client application restarts are infrequent, and client certificates have short lifetimes, then your client " + + "may fail to re-establish connections to Cassandra hosts. To enable KeyStore reloading, see " + + "`advanced.ssl-engine-factory.keystore-reload-interval` in reference.conf."; + logger.info(msg); + } else { + logger.info("KeyStore reloading is enabled with interval {}", reloadInterval.get()); + this.executor = Executors.newScheduledThreadPool( 1, @@ -122,8 +133,8 @@ private void start(Path keystorePath, String keystorePassword, Duration reloadIn }); this.executor.scheduleWithFixedDelay( this::reload, - reloadInterval.toMillis(), - reloadInterval.toMillis(), + reloadInterval.get().toMillis(), + reloadInterval.get().toMillis(), TimeUnit.MILLISECONDS); } } @@ -135,7 +146,7 @@ void reload() { } catch (Exception e) { String msg = "Failed to reload KeyStore. If this continues to happen, your client may use stale identity" - + "certificates and fail to re-establish connections to Cassandra hosts."; + + " certificates and fail to re-establish connections to Cassandra hosts."; logger.warn(msg, e); } } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/ssl/ReloadingKeyManagerFactoryTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/ssl/ReloadingKeyManagerFactoryTest.java index d291924b800..d07b45c21df 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/ssl/ReloadingKeyManagerFactoryTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/ssl/ReloadingKeyManagerFactoryTest.java @@ -34,7 +34,6 @@ import java.security.NoSuchAlgorithmException; import java.security.SecureRandom; import java.security.cert.X509Certificate; -import java.time.Duration; import java.util.Optional; import java.util.concurrent.BlockingQueue; import java.util.concurrent.LinkedBlockingQueue; @@ -86,7 +85,6 @@ public class ReloadingKeyManagerFactoryTest { static final Path CLIENT_TRUSTSTORE_PATH = CERT_BASE.resolve("client.truststore"); static final String CERTSTORE_PASSWORD = "changeit"; - static final Duration NO_SCHEDULED_RELOAD = Duration.ofMillis(0); private static TrustManagerFactory buildTrustManagerFactory() { TrustManagerFactory tmf; @@ -186,7 +184,7 @@ public void client_certificates_should_reload() throws Exception { final ReloadingKeyManagerFactory kmf = ReloadingKeyManagerFactory.create( - TMP_CLIENT_KEYSTORE_PATH, CERTSTORE_PASSWORD, NO_SCHEDULED_RELOAD); + TMP_CLIENT_KEYSTORE_PATH, CERTSTORE_PASSWORD, Optional.empty()); // Need a tmf that tells the server to send its certs final TrustManagerFactory tmf = buildTrustManagerFactory(); From 7e2c6579af564be6d1b161ec4159ecf517c190b4 Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Tue, 6 Feb 2024 15:18:59 -0600 Subject: [PATCH 882/979] CASSANDRA-19352: Support native_transport_(address|port) + native_transport_port_ssl for DSE 6.8 (4.x edition) patch by absurdfarce; reviewed by absurdfarce and adutra for CASSANDRA-19352 --- .../core/metadata/DefaultTopologyMonitor.java | 76 ++++++-- .../metadata/DefaultTopologyMonitorTest.java | 180 ++++++++++++++++-- 2 files changed, 223 insertions(+), 33 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultTopologyMonitor.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultTopologyMonitor.java index 87008b05cec..f3dc988cfbc 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultTopologyMonitor.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/DefaultTopologyMonitor.java @@ -34,6 +34,7 @@ import com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; +import com.datastax.oss.driver.shaded.guava.common.collect.Iterators; import com.datastax.oss.protocol.internal.ProtocolConstants; import com.datastax.oss.protocol.internal.response.Error; import edu.umd.cs.findbugs.annotations.NonNull; @@ -69,6 +70,10 @@ public class DefaultTopologyMonitor implements TopologyMonitor { // Assume topology queries never need paging private static final int INFINITE_PAGE_SIZE = -1; + // A few system.peers columns which get special handling below + private static final String NATIVE_PORT = "native_port"; + private static final String NATIVE_TRANSPORT_PORT = "native_transport_port"; + private final String logPrefix; private final InternalDriverContext context; private final ControlConnection controlConnection; @@ -494,28 +499,65 @@ private void savePort(DriverChannel channel) { @Nullable protected InetSocketAddress getBroadcastRpcAddress( @NonNull AdminRow row, @NonNull EndPoint localEndPoint) { - // in system.peers or system.local - InetAddress broadcastRpcInetAddress = row.getInetAddress("rpc_address"); + + InetAddress broadcastRpcInetAddress = null; + Iterator addrCandidates = + Iterators.forArray( + // in system.peers_v2 (Cassandra >= 4.0) + "native_address", + // DSE 6.8 introduced native_transport_address and native_transport_port for the + // listen address. + "native_transport_address", + // in system.peers or system.local + "rpc_address"); + + while (broadcastRpcInetAddress == null && addrCandidates.hasNext()) + broadcastRpcInetAddress = row.getInetAddress(addrCandidates.next()); + // This could only happen if system tables are corrupted, but handle gracefully if (broadcastRpcInetAddress == null) { - // in system.peers_v2 (Cassandra >= 4.0) - broadcastRpcInetAddress = row.getInetAddress("native_address"); - if (broadcastRpcInetAddress == null) { - // This could only happen if system tables are corrupted, but handle gracefully - return null; + LOG.warn( + "[{}] Unable to determine broadcast RPC IP address, returning null. " + + "This is likely due to a misconfiguration or invalid system tables. " + + "Please validate the contents of system.local and/or {}.", + logPrefix, + getPeerTableName()); + return null; + } + + Integer broadcastRpcPort = null; + Iterator portCandidates = + Iterators.forArray( + // in system.peers_v2 (Cassandra >= 4.0) + NATIVE_PORT, + // DSE 6.8 introduced native_transport_address and native_transport_port for the + // listen address. + NATIVE_TRANSPORT_PORT, + // system.local for Cassandra >= 4.0 + "rpc_port"); + + while ((broadcastRpcPort == null || broadcastRpcPort == 0) && portCandidates.hasNext()) { + + String colName = portCandidates.next(); + broadcastRpcPort = row.getInteger(colName); + // Support override for SSL port (if enabled) in DSE + if (NATIVE_TRANSPORT_PORT.equals(colName) && context.getSslEngineFactory().isPresent()) { + + String sslColName = colName + "_ssl"; + broadcastRpcPort = row.getInteger(sslColName); } } - // system.local for Cassandra >= 4.0 - Integer broadcastRpcPort = row.getInteger("rpc_port"); + // use the default port if no port information was found in the row; + // note that in rare situations, the default port might not be known, in which case we + // report zero, as advertised in the javadocs of Node and NodeInfo. if (broadcastRpcPort == null || broadcastRpcPort == 0) { - // system.peers_v2 - broadcastRpcPort = row.getInteger("native_port"); - if (broadcastRpcPort == null || broadcastRpcPort == 0) { - // use the default port if no port information was found in the row; - // note that in rare situations, the default port might not be known, in which case we - // report zero, as advertised in the javadocs of Node and NodeInfo. - broadcastRpcPort = port == -1 ? 0 : port; - } + + LOG.warn( + "[{}] Unable to determine broadcast RPC port. " + + "Trying to fall back to port used by the control connection.", + logPrefix); + broadcastRpcPort = port == -1 ? 0 : port; } + InetSocketAddress broadcastRpcAddress = new InetSocketAddress(broadcastRpcInetAddress, broadcastRpcPort); if (row.contains("peer") && broadcastRpcAddress.equals(localEndPoint.resolve())) { diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/DefaultTopologyMonitorTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/DefaultTopologyMonitorTest.java index cc275eb1624..dd40f233518 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/DefaultTopologyMonitorTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/DefaultTopologyMonitorTest.java @@ -38,6 +38,7 @@ import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverConfig; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.ssl.SslEngineFactory; import com.datastax.oss.driver.internal.core.addresstranslation.PassThroughAddressTranslator; import com.datastax.oss.driver.internal.core.adminrequest.AdminResult; import com.datastax.oss.driver.internal.core.adminrequest.AdminRow; @@ -50,9 +51,11 @@ import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; import com.datastax.oss.driver.shaded.guava.common.collect.Iterators; +import com.datastax.oss.driver.shaded.guava.common.collect.Maps; import com.datastax.oss.protocol.internal.Message; import com.datastax.oss.protocol.internal.ProtocolConstants; import com.datastax.oss.protocol.internal.response.Error; +import com.google.common.collect.Streams; import com.tngtech.java.junit.dataprovider.DataProvider; import com.tngtech.java.junit.dataprovider.DataProviderRunner; import com.tngtech.java.junit.dataprovider.UseDataProvider; @@ -95,6 +98,8 @@ public class DefaultTopologyMonitorTest { @Mock private Appender appender; @Captor private ArgumentCaptor loggingEventCaptor; + @Mock private SslEngineFactory sslEngineFactory; + private DefaultNode node1; private DefaultNode node2; @@ -414,18 +419,6 @@ public void should_skip_invalid_peers_row_v2(String columnToCheck) { + "This is likely a gossip or snitch issue, this node will be ignored."); } - @DataProvider - public static Object[][] columnsToCheckV1() { - return new Object[][] {{"rpc_address"}, {"host_id"}, {"data_center"}, {"rack"}, {"tokens"}}; - } - - @DataProvider - public static Object[][] columnsToCheckV2() { - return new Object[][] { - {"native_address"}, {"native_port"}, {"host_id"}, {"data_center"}, {"rack"}, {"tokens"} - }; - } - @Test public void should_stop_executing_queries_once_closed() { // Given @@ -443,9 +436,9 @@ public void should_stop_executing_queries_once_closed() { public void should_warn_when_control_host_found_in_system_peers() { // Given AdminRow local = mockLocalRow(1, node1.getHostId()); - AdminRow peer3 = mockPeersRow(3, UUID.randomUUID()); - AdminRow peer2 = mockPeersRow(2, node2.getHostId()); AdminRow peer1 = mockPeersRow(1, node2.getHostId()); // invalid + AdminRow peer2 = mockPeersRow(2, node2.getHostId()); + AdminRow peer3 = mockPeersRow(3, UUID.randomUUID()); topologyMonitor.stubQueries( new StubbedQuery("SELECT * FROM system.local", mockResult(local)), new StubbedQuery("SELECT * FROM system.peers_v2", Collections.emptyMap(), null, true), @@ -462,7 +455,7 @@ public void should_warn_when_control_host_found_in_system_peers() { .hasSize(3) .extractingResultOf("getEndPoint") .containsOnlyOnce(node1.getEndPoint())); - assertLog( + assertLogContains( Level.WARN, "[null] Control node /127.0.0.1:9042 has an entry for itself in system.peers: " + "this entry will be ignored. This is likely due to a misconfiguration; " @@ -492,7 +485,7 @@ public void should_warn_when_control_host_found_in_system_peers_v2() { .hasSize(3) .extractingResultOf("getEndPoint") .containsOnlyOnce(node1.getEndPoint())); - assertLog( + assertLogContains( Level.WARN, "[null] Control node /127.0.0.1:9042 has an entry for itself in system.peers_v2: " + "this entry will be ignored. This is likely due to a misconfiguration; " @@ -500,6 +493,116 @@ public void should_warn_when_control_host_found_in_system_peers_v2() { + "all nodes in your cluster."); } + // Confirm the base case of extracting peer info from peers_v2, no SSL involved + @Test + public void should_get_peer_address_info_peers_v2() { + // Given + AdminRow local = mockLocalRow(1, node1.getHostId()); + AdminRow peer2 = mockPeersV2Row(3, node2.getHostId()); + AdminRow peer1 = mockPeersV2Row(2, node1.getHostId()); + topologyMonitor.isSchemaV2 = true; + topologyMonitor.stubQueries( + new StubbedQuery("SELECT * FROM system.local", mockResult(local)), + new StubbedQuery("SELECT * FROM system.peers_v2", mockResult(peer2, peer1))); + when(context.getSslEngineFactory()).thenReturn(Optional.empty()); + + // When + CompletionStage> futureInfos = topologyMonitor.refreshNodeList(); + + // Then + assertThatStage(futureInfos) + .isSuccess( + infos -> { + Iterator iterator = infos.iterator(); + // First NodeInfo is for local, skip past that + iterator.next(); + NodeInfo peer2nodeInfo = iterator.next(); + assertThat(peer2nodeInfo.getEndPoint().resolve()) + .isEqualTo(new InetSocketAddress("127.0.0.3", 9042)); + NodeInfo peer1nodeInfo = iterator.next(); + assertThat(peer1nodeInfo.getEndPoint().resolve()) + .isEqualTo(new InetSocketAddress("127.0.0.2", 9042)); + }); + } + + // Confirm the base case of extracting peer info from DSE peers table, no SSL involved + @Test + public void should_get_peer_address_info_peers_dse() { + // Given + AdminRow local = mockLocalRow(1, node1.getHostId()); + AdminRow peer2 = mockPeersRowDse(3, node2.getHostId()); + AdminRow peer1 = mockPeersRowDse(2, node1.getHostId()); + topologyMonitor.isSchemaV2 = true; + topologyMonitor.stubQueries( + new StubbedQuery("SELECT * FROM system.local", mockResult(local)), + new StubbedQuery("SELECT * FROM system.peers_v2", Maps.newHashMap(), null, true), + new StubbedQuery("SELECT * FROM system.peers", mockResult(peer2, peer1))); + when(context.getSslEngineFactory()).thenReturn(Optional.empty()); + + // When + CompletionStage> futureInfos = topologyMonitor.refreshNodeList(); + + // Then + assertThatStage(futureInfos) + .isSuccess( + infos -> { + Iterator iterator = infos.iterator(); + // First NodeInfo is for local, skip past that + iterator.next(); + NodeInfo peer2nodeInfo = iterator.next(); + assertThat(peer2nodeInfo.getEndPoint().resolve()) + .isEqualTo(new InetSocketAddress("127.0.0.3", 9042)); + NodeInfo peer1nodeInfo = iterator.next(); + assertThat(peer1nodeInfo.getEndPoint().resolve()) + .isEqualTo(new InetSocketAddress("127.0.0.2", 9042)); + }); + } + + // Confirm the base case of extracting peer info from DSE peers table, this time with SSL + @Test + public void should_get_peer_address_info_peers_dse_with_ssl() { + // Given + AdminRow local = mockLocalRow(1, node1.getHostId()); + AdminRow peer2 = mockPeersRowDseWithSsl(3, node2.getHostId()); + AdminRow peer1 = mockPeersRowDseWithSsl(2, node1.getHostId()); + topologyMonitor.isSchemaV2 = true; + topologyMonitor.stubQueries( + new StubbedQuery("SELECT * FROM system.local", mockResult(local)), + new StubbedQuery("SELECT * FROM system.peers_v2", Maps.newHashMap(), null, true), + new StubbedQuery("SELECT * FROM system.peers", mockResult(peer2, peer1))); + when(context.getSslEngineFactory()).thenReturn(Optional.of(sslEngineFactory)); + + // When + CompletionStage> futureInfos = topologyMonitor.refreshNodeList(); + + // Then + assertThatStage(futureInfos) + .isSuccess( + infos -> { + Iterator iterator = infos.iterator(); + // First NodeInfo is for local, skip past that + iterator.next(); + NodeInfo peer2nodeInfo = iterator.next(); + assertThat(peer2nodeInfo.getEndPoint().resolve()) + .isEqualTo(new InetSocketAddress("127.0.0.3", 9043)); + NodeInfo peer1nodeInfo = iterator.next(); + assertThat(peer1nodeInfo.getEndPoint().resolve()) + .isEqualTo(new InetSocketAddress("127.0.0.2", 9043)); + }); + } + + @DataProvider + public static Object[][] columnsToCheckV1() { + return new Object[][] {{"rpc_address"}, {"host_id"}, {"data_center"}, {"rack"}, {"tokens"}}; + } + + @DataProvider + public static Object[][] columnsToCheckV2() { + return new Object[][] { + {"native_address"}, {"native_port"}, {"host_id"}, {"data_center"}, {"rack"}, {"tokens"} + }; + } + /** Mocks the query execution logic. */ private static class TestTopologyMonitor extends DefaultTopologyMonitor { @@ -641,6 +744,43 @@ private AdminRow mockPeersV2Row(int i, UUID hostId) { } } + // Mock row for DSE ~6.8 + private AdminRow mockPeersRowDse(int i, UUID hostId) { + try { + AdminRow row = mock(AdminRow.class); + when(row.contains("peer")).thenReturn(true); + when(row.isNull("data_center")).thenReturn(false); + when(row.getString("data_center")).thenReturn("dc" + i); + when(row.getString("dse_version")).thenReturn("6.8.30"); + when(row.contains("graph")).thenReturn(true); + when(row.isNull("host_id")).thenReturn(hostId == null); + when(row.getUuid("host_id")).thenReturn(hostId); + when(row.getInetAddress("peer")).thenReturn(InetAddress.getByName("127.0.0." + i)); + when(row.isNull("rack")).thenReturn(false); + when(row.getString("rack")).thenReturn("rack" + i); + when(row.isNull("native_transport_address")).thenReturn(false); + when(row.getInetAddress("native_transport_address")) + .thenReturn(InetAddress.getByName("127.0.0." + i)); + when(row.isNull("native_transport_port")).thenReturn(false); + when(row.getInteger("native_transport_port")).thenReturn(9042); + when(row.isNull("tokens")).thenReturn(false); + when(row.getSetOfString("tokens")).thenReturn(ImmutableSet.of("token" + i)); + when(row.isNull("rpc_address")).thenReturn(false); + + return row; + } catch (UnknownHostException e) { + fail("unexpected", e); + return null; + } + } + + private AdminRow mockPeersRowDseWithSsl(int i, UUID hostId) { + AdminRow row = mockPeersRowDse(i, hostId); + when(row.isNull("native_transport_port_ssl")).thenReturn(false); + when(row.getInteger("native_transport_port_ssl")).thenReturn(9043); + return row; + } + private AdminResult mockResult(AdminRow... rows) { AdminResult result = mock(AdminResult.class); when(result.iterator()).thenReturn(Iterators.forArray(rows)); @@ -654,4 +794,12 @@ private void assertLog(Level level, String message) { assertThat(logs).hasSize(1); assertThat(logs.iterator().next().getFormattedMessage()).contains(message); } + + private void assertLogContains(Level level, String message) { + verify(appender, atLeast(1)).doAppend(loggingEventCaptor.capture()); + Iterable logs = + filter(loggingEventCaptor.getAllValues()).with("level", level).get(); + assertThat( + Streams.stream(logs).map(ILoggingEvent::getFormattedMessage).anyMatch(message::contains)); + } } From 4c7133c72e136d23dbcea795e0041df764568931 Mon Sep 17 00:00:00 2001 From: Andy Tolbert <6889771+tolbertam@users.noreply.github.com> Date: Tue, 23 Jan 2024 10:21:02 -0600 Subject: [PATCH 883/979] Replace uses of AttributeKey.newInstance The java driver uses netty channel attributes to decorate a connection's channel with the cluster name (returned from the system.local table) and the map from the OPTIONS response, both of which are obtained on connection initialization. There's an issue here that I wouldn't expect to see in practice in that the AttributeKey's used are created using AttributeKey.newInstance, which throws an exception if an AttributeKey of that name is defined anywhere else in evaluated code. This change attempts to resolve this issue by changing AttributeKey initialiation in DriverChannel from newInstance to valueOf, which avoids throwing an exception if an AttributeKey of the same name was previously instantiated. patch by Andy Tolbert; reviewed by Bret McGuire, Alexandre Dutra, Abe Ratnofsky for CASSANDRA-19290 --- .../oss/driver/internal/core/channel/DriverChannel.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/DriverChannel.java b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/DriverChannel.java index 50932bed8c8..e40aa6f3097 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/DriverChannel.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/DriverChannel.java @@ -49,9 +49,9 @@ @ThreadSafe public class DriverChannel { - static final AttributeKey CLUSTER_NAME_KEY = AttributeKey.newInstance("cluster_name"); + static final AttributeKey CLUSTER_NAME_KEY = AttributeKey.valueOf("cluster_name"); static final AttributeKey>> OPTIONS_KEY = - AttributeKey.newInstance("options"); + AttributeKey.valueOf("options"); @SuppressWarnings("RedundantStringConstructorCall") static final Object GRACEFUL_CLOSE_MESSAGE = new String("GRACEFUL_CLOSE_MESSAGE"); From 40a9a49d50fac6abed2a5bb2cc2627e4085a399b Mon Sep 17 00:00:00 2001 From: Ekaterina Dimitrova Date: Mon, 29 Jan 2024 14:07:59 -0500 Subject: [PATCH 884/979] Fix data corruption in VectorCodec when using heap buffers patch by Ekaterina Dimitrova; reviewed by Alexandre Dutra and Bret McGuire for CASSANDRA-19333 --- .../internal/core/type/codec/VectorCodec.java | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/VectorCodec.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/VectorCodec.java index 1b663a29d9e..2c4d2200b13 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/VectorCodec.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/VectorCodec.java @@ -127,17 +127,19 @@ Elements should at least precede themselves with their size (along the lines of cqlType.getDimensions(), bytes.remaining())); } + ByteBuffer slice = bytes.slice(); List rv = new ArrayList(cqlType.getDimensions()); for (int i = 0; i < cqlType.getDimensions(); ++i) { - ByteBuffer slice = bytes.slice(); - slice.limit(elementSize); + // Set the limit for the current element + int originalPosition = slice.position(); + slice.limit(originalPosition + elementSize); rv.add(this.subtypeCodec.decode(slice, protocolVersion)); - bytes.position(bytes.position() + elementSize); + // Move to the start of the next element + slice.position(originalPosition + elementSize); + // Reset the limit to the end of the buffer + slice.limit(slice.capacity()); } - /* Restore the input ByteBuffer to its original state */ - bytes.rewind(); - return CqlVector.newInstance(rv); } From 98e25040f5e69db1092ccafb6665d8e92779cc46 Mon Sep 17 00:00:00 2001 From: absurdfarce Date: Thu, 28 Mar 2024 15:37:22 -0500 Subject: [PATCH 885/979] CASSANDRA-19504: Improve state management for Java versions in Jenkinsfile patch by Bret McGuire; reviewed by Bret McGuire for CASSANDRA-19504 --- Jenkinsfile | 19 ++++++++++--------- pom.xml | 1 + 2 files changed, 11 insertions(+), 9 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index c8247769631..8d2b74c5b08 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -61,12 +61,6 @@ def initializeEnvironment() { . ${JABBA_SHELL} jabba which 1.8''', returnStdout: true).trim() - env.TEST_JAVA_HOME = sh(label: 'Get TEST_JAVA_HOME',script: '''#!/bin/bash -le - . ${JABBA_SHELL} - jabba which ${JABBA_VERSION}''', returnStdout: true).trim() - env.TEST_JAVA_VERSION = sh(label: 'Get TEST_JAVA_VERSION',script: '''#!/bin/bash -le - echo "${JABBA_VERSION##*.}"''', returnStdout: true).trim() - sh label: 'Download Apache CassandraⓇ or DataStax Enterprise',script: '''#!/bin/bash -le . ${JABBA_SHELL} jabba use 1.8 @@ -115,7 +109,12 @@ def buildDriver(jabbaVersion) { } def executeTests() { - sh label: 'Execute tests', script: '''#!/bin/bash -le + def testJavaHome = sh(label: 'Get TEST_JAVA_HOME',script: '''#!/bin/bash -le + . ${JABBA_SHELL} + jabba which ${JABBA_VERSION}''', returnStdout: true).trim() + def testJavaVersion = (JABBA_VERSION =~ /.*\.(\d+)/)[0][1] + + def executeTestScript = '''#!/bin/bash -le # Load CCM environment variables set -o allexport . ${HOME}/environment.txt @@ -137,8 +136,8 @@ def executeTests() { printenv | sort mvn -B -V ${INTEGRATION_TESTS_FILTER_ARGUMENT} -T 1 verify \ - -Ptest-jdk-${TEST_JAVA_VERSION} \ - -DtestJavaHome=${TEST_JAVA_HOME} \ + -Ptest-jdk-'''+testJavaVersion+''' \ + -DtestJavaHome='''+testJavaHome+''' \ -DfailIfNoTests=false \ -Dmaven.test.failure.ignore=true \ -Dmaven.javadoc.skip=${SKIP_JAVADOCS} \ @@ -149,6 +148,8 @@ def executeTests() { ${ISOLATED_ITS_ARGUMENT} \ ${PARALLELIZABLE_ITS_ARGUMENT} ''' + echo "Invoking Maven with parameters test-jdk-${testJavaVersion} and testJavaHome = ${testJavaHome}" + sh label: 'Execute tests', script: executeTestScript } def executeCodeCoverage() { diff --git a/pom.xml b/pom.xml index 221e1f69a86..7decc96633a 100644 --- a/pom.xml +++ b/pom.xml @@ -728,6 +728,7 @@ limitations under the License.]]> maven-surefire-plugin + ${testing.jvm}/bin/java ${project.basedir}/src/test/resources/logback-test.xml From 4aa5abe701e529fd9be0c9b55214dad6f85f0649 Mon Sep 17 00:00:00 2001 From: Emelia <105240296+emeliawilkinson24@users.noreply.github.com> Date: Fri, 17 Nov 2023 15:22:47 -0500 Subject: [PATCH 886/979] Update README.md Typo carried over from old docs, needed closing parenthesis. --- manual/cloud/README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/manual/cloud/README.md b/manual/cloud/README.md index 48197c49425..9116b03dac3 100644 --- a/manual/cloud/README.md +++ b/manual/cloud/README.md @@ -28,10 +28,10 @@ driver is configured in an application and that you will need to obtain a *secur 1. [Download][Download Maven] and [install][Install Maven] Maven. 2. Create an Astra database on [AWS/Azure/GCP][Create an Astra database - AWS/Azure/GCP]; alternatively, have a team member provide access to their - Astra database (instructions for [AWS/Azure/GCP][Access an Astra database - AWS/Azure/GCP] to + Astra database (see instructions for [AWS/Azure/GCP][Access an Astra database - AWS/Azure/GCP]) to obtain database connection details. -3. Download the secure connect bundle (instructions for - [AWS/Azure/GCP][Download the secure connect bundle - AWS/Azure/GCP], that contains connection +3. Download the secure connect bundle (see instructions for + [AWS/Azure/GCP][Download the secure connect bundle - AWS/Azure/GCP]) that contains connection information such as contact points and certificates. ### Procedure From 9c41aab6fd0a55d977a9844610d230b1e69868d7 Mon Sep 17 00:00:00 2001 From: absurdfarce Date: Mon, 8 Apr 2024 11:00:46 -0500 Subject: [PATCH 887/979] Update link to JIRA to ASF instance. Also include information about populating the component field. Patch by Bret McGuire; reviewed by Bret McGuire, Alexandre Dutra --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 2e8fe862f49..c53c8f2db29 100644 --- a/README.md +++ b/README.md @@ -74,13 +74,13 @@ See the [Cassandra error handling done right blog](https://www.datastax.com/blog * [Manual](manual/) * [API docs] -* Bug tracking: [JIRA] +* Bug tracking: [JIRA]. Make sure to select the "Client/java-driver" component when filing new tickets! * [Mailing list] * [Changelog] * [FAQ] [API docs]: https://docs.datastax.com/en/drivers/java/4.17 -[JIRA]: https://datastax-oss.atlassian.net/browse/JAVA +[JIRA]: https://issues.apache.org/jira/issues/?jql=project%20%3D%20CASSANDRA%20AND%20component%20%3D%20%22Client%2Fjava-driver%22%20ORDER%20BY%20key%20DESC [Mailing list]: https://groups.google.com/a/lists.datastax.com/forum/#!forum/java-driver-user [Changelog]: changelog/ [FAQ]: faq/ From 6c48329199862215abc22170769fd1a165e80a15 Mon Sep 17 00:00:00 2001 From: Ammar Khaku Date: Thu, 14 Mar 2024 16:55:59 -0700 Subject: [PATCH 888/979] CASSANDRA-19468 Don't swallow exception during metadata refresh If an exception was thrown while getting new metadata as part of schema refresh it died on the admin executor instead of being propagated to the CompletableFuture argument. Instead, catch those exceptions and hand them off to the CompletableFuture. patch by Ammar Khaku; reviewed by Chris Lohfink, Bret McGuire for CASSANDRA-19468 --- .../core/metadata/MetadataManager.java | 53 ++++++++++--------- .../core/metadata/MetadataManagerTest.java | 23 ++++++++ 2 files changed, 52 insertions(+), 24 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/MetadataManager.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/MetadataManager.java index 28e8b18f127..c9abfb7a625 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/MetadataManager.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/MetadataManager.java @@ -437,30 +437,35 @@ private void startSchemaRequest(CompletableFuture refreshFu if (agreementError != null) { refreshFuture.completeExceptionally(agreementError); } else { - schemaQueriesFactory - .newInstance() - .execute() - .thenApplyAsync(this::parseAndApplySchemaRows, adminExecutor) - .whenComplete( - (newMetadata, metadataError) -> { - if (metadataError != null) { - refreshFuture.completeExceptionally(metadataError); - } else { - refreshFuture.complete( - new RefreshSchemaResult(newMetadata, schemaInAgreement)); - } - - firstSchemaRefreshFuture.complete(null); - - currentSchemaRefresh = null; - // If another refresh was enqueued during this one, run it now - if (queuedSchemaRefresh != null) { - CompletableFuture tmp = - this.queuedSchemaRefresh; - this.queuedSchemaRefresh = null; - startSchemaRequest(tmp); - } - }); + try { + schemaQueriesFactory + .newInstance() + .execute() + .thenApplyAsync(this::parseAndApplySchemaRows, adminExecutor) + .whenComplete( + (newMetadata, metadataError) -> { + if (metadataError != null) { + refreshFuture.completeExceptionally(metadataError); + } else { + refreshFuture.complete( + new RefreshSchemaResult(newMetadata, schemaInAgreement)); + } + + firstSchemaRefreshFuture.complete(null); + + currentSchemaRefresh = null; + // If another refresh was enqueued during this one, run it now + if (queuedSchemaRefresh != null) { + CompletableFuture tmp = + this.queuedSchemaRefresh; + this.queuedSchemaRefresh = null; + startSchemaRequest(tmp); + } + }); + } catch (Throwable t) { + LOG.debug("[{}] Exception getting new metadata", logPrefix, t); + refreshFuture.completeExceptionally(t); + } } }); } else if (queuedSchemaRefresh == null) { diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/MetadataManagerTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/MetadataManagerTest.java index 460f99abd85..375209d9fcf 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/MetadataManagerTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/MetadataManagerTest.java @@ -20,6 +20,7 @@ import static com.datastax.oss.driver.Assertions.assertThat; import static com.datastax.oss.driver.Assertions.assertThatStage; import static org.awaitility.Awaitility.await; +import static org.mockito.ArgumentMatchers.anyBoolean; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.timeout; import static org.mockito.Mockito.verify; @@ -33,6 +34,7 @@ import com.datastax.oss.driver.internal.core.context.EventBus; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.internal.core.context.NettyOptions; +import com.datastax.oss.driver.internal.core.control.ControlConnection; import com.datastax.oss.driver.internal.core.metadata.schema.parsing.SchemaParserFactory; import com.datastax.oss.driver.internal.core.metadata.schema.queries.SchemaQueriesFactory; import com.datastax.oss.driver.internal.core.metrics.MetricsFactory; @@ -64,6 +66,7 @@ public class MetadataManagerTest { @Mock private InternalDriverContext context; @Mock private NettyOptions nettyOptions; + @Mock private ControlConnection controlConnection; @Mock private TopologyMonitor topologyMonitor; @Mock private DriverConfig config; @Mock private DriverExecutionProfile defaultProfile; @@ -85,6 +88,7 @@ public void setup() { when(context.getNettyOptions()).thenReturn(nettyOptions); when(context.getTopologyMonitor()).thenReturn(topologyMonitor); + when(context.getControlConnection()).thenReturn(controlConnection); when(defaultProfile.getDuration(DefaultDriverOption.METADATA_SCHEMA_WINDOW)) .thenReturn(Duration.ZERO); @@ -286,6 +290,25 @@ public void should_remove_node() { assertThat(refresh.broadcastRpcAddressToRemove).isEqualTo(broadcastRpcAddress2); } + @Test + public void refreshSchema_should_work() { + // Given + IllegalStateException expectedException = new IllegalStateException("Error we're testing"); + when(schemaQueriesFactory.newInstance()).thenThrow(expectedException); + when(topologyMonitor.refreshNodeList()).thenReturn(CompletableFuture.completedFuture(ImmutableList.of(mock(NodeInfo.class)))); + when(topologyMonitor.checkSchemaAgreement()).thenReturn(CompletableFuture.completedFuture(Boolean.TRUE)); + when(controlConnection.init(anyBoolean(), anyBoolean(), anyBoolean())).thenReturn(CompletableFuture.completedFuture(null)); + metadataManager.refreshNodes(); // required internal state setup for this + waitForPendingAdminTasks(() -> metadataManager.refreshes.size() == 1); // sanity check + + // When + CompletionStage result = metadataManager.refreshSchema("foo", true, true); + + // Then + waitForPendingAdminTasks(() -> result.toCompletableFuture().isDone()); + assertThatStage(result).isFailed(t -> assertThat(t).isEqualTo(expectedException)); + } + private static class TestMetadataManager extends MetadataManager { private List refreshes = new CopyOnWriteArrayList<>(); From 388a46b9c10b5653c71ac8840bcda0c91b59bce4 Mon Sep 17 00:00:00 2001 From: janehe Date: Fri, 12 Apr 2024 13:20:33 -0700 Subject: [PATCH 889/979] patch by Jane He; reviewed by Alexandre Dutra and Bret McGuire for CASSANDRA-19457 --- .../core/metrics/AbstractMetricUpdater.java | 2 -- .../core/metrics/DropwizardMetricUpdater.java | 2 +- .../internal/core/metrics/MetricUpdater.java | 2 ++ .../core/metrics/NoopNodeMetricUpdater.java | 5 +++++ .../core/metrics/NoopSessionMetricUpdater.java | 3 +++ .../internal/core/session/DefaultSession.java | 15 +++++++++++++++ .../driver/core/metrics/DropwizardMetricsIT.java | 6 ++++++ .../oss/driver/core/metrics/MetricsITBase.java | 10 ++++++++-- .../metrics/micrometer/MicrometerMetricsIT.java | 6 ++++++ .../microprofile/MicroProfileMetricsIT.java | 6 ++++++ .../micrometer/MicrometerMetricUpdater.java | 2 +- .../microprofile/MicroProfileMetricUpdater.java | 2 +- 12 files changed, 54 insertions(+), 7 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/AbstractMetricUpdater.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/AbstractMetricUpdater.java index fcfe56b605e..5e2392a2e7f 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/AbstractMetricUpdater.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/AbstractMetricUpdater.java @@ -180,6 +180,4 @@ protected Timeout newTimeout() { expireAfter.toNanos(), TimeUnit.NANOSECONDS); } - - protected abstract void clearMetrics(); } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricUpdater.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricUpdater.java index 8590917be21..9377fb3a17e 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricUpdater.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/DropwizardMetricUpdater.java @@ -91,7 +91,7 @@ public void updateTimer( } @Override - protected void clearMetrics() { + public void clearMetrics() { for (MetricT metric : metrics.keySet()) { MetricId id = getMetricId(metric); registry.remove(id.getName()); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/MetricUpdater.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/MetricUpdater.java index c4b432f3c50..c07d1b136af 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/MetricUpdater.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/MetricUpdater.java @@ -46,4 +46,6 @@ default void markMeter(MetricT metric, @Nullable String profileName) { void updateTimer(MetricT metric, @Nullable String profileName, long duration, TimeUnit unit); boolean isEnabled(MetricT metric, @Nullable String profileName); + + void clearMetrics(); } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/NoopNodeMetricUpdater.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/NoopNodeMetricUpdater.java index 45f0797c7b5..8d216990331 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/NoopNodeMetricUpdater.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/NoopNodeMetricUpdater.java @@ -53,4 +53,9 @@ public boolean isEnabled(NodeMetric metric, String profileName) { // since methods don't do anything, return false return false; } + + @Override + public void clearMetrics() { + // nothing to do + } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/NoopSessionMetricUpdater.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/NoopSessionMetricUpdater.java index 1666261590c..7099a8ddcac 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/NoopSessionMetricUpdater.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/NoopSessionMetricUpdater.java @@ -53,4 +53,7 @@ public boolean isEnabled(SessionMetric metric, String profileName) { // since methods don't do anything, return false return false; } + + @Override + public void clearMetrics() {} } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/session/DefaultSession.java b/core/src/main/java/com/datastax/oss/driver/internal/core/session/DefaultSession.java index af9dc183f7e..cb1271c9cba 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/session/DefaultSession.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/session/DefaultSession.java @@ -34,6 +34,7 @@ import com.datastax.oss.driver.internal.core.channel.DriverChannel; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.internal.core.context.LifecycleListener; +import com.datastax.oss.driver.internal.core.metadata.DefaultNode; import com.datastax.oss.driver.internal.core.metadata.MetadataManager; import com.datastax.oss.driver.internal.core.metadata.MetadataManager.RefreshSchemaResult; import com.datastax.oss.driver.internal.core.metadata.NodeStateEvent; @@ -546,6 +547,13 @@ private void close() { closePolicies(); + // clear metrics to prevent memory leak + for (Node n : metadataManager.getMetadata().getNodes().values()) { + ((DefaultNode) n).getMetricUpdater().clearMetrics(); + } + + DefaultSession.this.metricUpdater.clearMetrics(); + List> childrenCloseStages = new ArrayList<>(); for (AsyncAutoCloseable closeable : internalComponentsToClose()) { childrenCloseStages.add(closeable.closeAsync()); @@ -565,6 +573,13 @@ private void forceClose() { logPrefix, (closeWasCalled ? "" : "not ")); + // clear metrics to prevent memory leak + for (Node n : metadataManager.getMetadata().getNodes().values()) { + ((DefaultNode) n).getMetricUpdater().clearMetrics(); + } + + DefaultSession.this.metricUpdater.clearMetrics(); + if (closeWasCalled) { // onChildrenClosed has already been scheduled for (AsyncAutoCloseable closeable : internalComponentsToClose()) { diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metrics/DropwizardMetricsIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metrics/DropwizardMetricsIT.java index 6cbe443f2a6..e0184516e21 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/metrics/DropwizardMetricsIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metrics/DropwizardMetricsIT.java @@ -198,6 +198,12 @@ protected void assertNodeMetricsNotEvicted(CqlSession session, Node node) { } } + @Override + protected void assertMetricsNotPresent(Object registry) { + MetricRegistry dropwizardRegistry = (MetricRegistry) registry; + assertThat(dropwizardRegistry.getMetrics()).isEmpty(); + } + @Override protected void assertNodeMetricsEvicted(CqlSession session, Node node) { InternalDriverContext context = (InternalDriverContext) session.getContext(); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metrics/MetricsITBase.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metrics/MetricsITBase.java index 7fac3f98f52..e6121217619 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/metrics/MetricsITBase.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metrics/MetricsITBase.java @@ -83,8 +83,10 @@ public void resetSimulacron() { @Test @UseDataProvider("descriptorsAndPrefixes") - public void should_expose_metrics_if_enabled(Class metricIdGenerator, String prefix) { + public void should_expose_metrics_if_enabled_and_clear_metrics_if_closed( + Class metricIdGenerator, String prefix) { + Object registry = newMetricRegistry(); Assume.assumeFalse( "Cannot use metric tags with Dropwizard", metricIdGenerator.getSimpleName().contains("Tagging") @@ -101,12 +103,14 @@ public void should_expose_metrics_if_enabled(Class metricIdGenerator, String CqlSession.builder() .addContactEndPoints(simulacron().getContactPoints()) .withConfigLoader(loader) - .withMetricRegistry(newMetricRegistry()) + .withMetricRegistry(registry) .build()) { session.prepare("irrelevant"); queryAllNodes(session); assertMetricsPresent(session); + } finally { + assertMetricsNotPresent(registry); } } @@ -262,4 +266,6 @@ private DefaultNode findNode(CqlSession session, int id) { return (DefaultNode) session.getMetadata().findNode(address1).orElseThrow(IllegalStateException::new); } + + protected abstract void assertMetricsNotPresent(Object registry); } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/metrics/micrometer/MicrometerMetricsIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/metrics/micrometer/MicrometerMetricsIT.java index 5fe64719327..c38df1e2026 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/metrics/micrometer/MicrometerMetricsIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/metrics/micrometer/MicrometerMetricsIT.java @@ -186,6 +186,12 @@ protected void assertNodeMetricsNotEvicted(CqlSession session, Node node) { } } + @Override + protected void assertMetricsNotPresent(Object registry) { + MeterRegistry micrometerRegistry = (MeterRegistry) registry; + assertThat(micrometerRegistry.getMeters()).isEmpty(); + } + @Override protected void assertNodeMetricsEvicted(CqlSession session, Node node) { InternalDriverContext context = (InternalDriverContext) session.getContext(); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/metrics/microprofile/MicroProfileMetricsIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/metrics/microprofile/MicroProfileMetricsIT.java index 1294be3deae..aa04c058a49 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/metrics/microprofile/MicroProfileMetricsIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/metrics/microprofile/MicroProfileMetricsIT.java @@ -188,6 +188,12 @@ protected void assertNodeMetricsNotEvicted(CqlSession session, Node node) { } } + @Override + protected void assertMetricsNotPresent(Object registry) { + MetricRegistry metricRegistry = (MetricRegistry) registry; + assertThat(metricRegistry.getMetrics()).isEmpty(); + } + @Override protected void assertNodeMetricsEvicted(CqlSession session, Node node) { InternalDriverContext context = (InternalDriverContext) session.getContext(); diff --git a/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerMetricUpdater.java b/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerMetricUpdater.java index 7a4a27991e3..b9507c8b7cf 100644 --- a/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerMetricUpdater.java +++ b/metrics/micrometer/src/main/java/com/datastax/oss/driver/internal/metrics/micrometer/MicrometerMetricUpdater.java @@ -83,7 +83,7 @@ public void updateTimer( } @Override - protected void clearMetrics() { + public void clearMetrics() { for (Meter metric : metrics.values()) { registry.remove(metric); } diff --git a/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileMetricUpdater.java b/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileMetricUpdater.java index a46e82ee624..df44fd69c51 100644 --- a/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileMetricUpdater.java +++ b/metrics/microprofile/src/main/java/com/datastax/oss/driver/internal/metrics/microprofile/MicroProfileMetricUpdater.java @@ -83,7 +83,7 @@ public void updateTimer( } @Override - protected void clearMetrics() { + public void clearMetrics() { for (MetricT metric : metrics.keySet()) { MetricId id = getMetricId(metric); Tag[] tags = MicroProfileTags.toMicroProfileTags(id.getTags()); From c8b17ac38b48ca580b4862571cdfd7b7633a5793 Mon Sep 17 00:00:00 2001 From: Bret McGuire Date: Mon, 19 Feb 2024 23:07:18 -0600 Subject: [PATCH 890/979] Changelog updates to reflect work that went out in 4.18.0 Patch by Bret McGuire; reviewed by Bret McGuire, Alexandre Dutra for PR 1914 --- changelog/README.md | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/changelog/README.md b/changelog/README.md index 8ff2913b72d..7807ef15f95 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -21,6 +21,16 @@ under the License. +### 4.18.0 + +- [improvement] PR 1689: Add support for publishing percentile time series for the histogram metrics (nparaddi-walmart) +- [improvement] JAVA-3104: Do not eagerly pre-allocate array when deserializing CqlVector +- [improvement] JAVA-3111: upgrade jackson-databind to 2.13.4.2 to address gradle dependency issue +- [improvement] PR 1617: Improve ByteBufPrimitiveCodec readBytes (chibenwa) +- [improvement] JAVA-3095: Fix CREATE keyword in vector search example in upgrade guide +- [improvement] JAVA-3100: Update jackson-databind to 2.13.4.1 and jackson-jaxrs-json-provider to 2.13.4 to address recent CVEs +- [improvement] JAVA-3089: Forbid wildcard imports + ### 4.17.0 - [improvement] JAVA-3070: Make CqlVector and CqlDuration serializable From 3c08f8efa24cddb33b807a5e1f8f16824632a611 Mon Sep 17 00:00:00 2001 From: absurdfarce Date: Wed, 17 Apr 2024 01:34:40 -0500 Subject: [PATCH 891/979] Fixes to get past code formatting issues patch by Bret McGuire; reviewed by Bret McGuire for PR 1928 --- .../core/metadata/MetadataManager.java | 46 +++++++++---------- .../core/metadata/MetadataManagerTest.java | 12 +++-- 2 files changed, 31 insertions(+), 27 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/MetadataManager.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/MetadataManager.java index c9abfb7a625..efb04bde5e1 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/MetadataManager.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/MetadataManager.java @@ -439,29 +439,29 @@ private void startSchemaRequest(CompletableFuture refreshFu } else { try { schemaQueriesFactory - .newInstance() - .execute() - .thenApplyAsync(this::parseAndApplySchemaRows, adminExecutor) - .whenComplete( - (newMetadata, metadataError) -> { - if (metadataError != null) { - refreshFuture.completeExceptionally(metadataError); - } else { - refreshFuture.complete( - new RefreshSchemaResult(newMetadata, schemaInAgreement)); - } - - firstSchemaRefreshFuture.complete(null); - - currentSchemaRefresh = null; - // If another refresh was enqueued during this one, run it now - if (queuedSchemaRefresh != null) { - CompletableFuture tmp = - this.queuedSchemaRefresh; - this.queuedSchemaRefresh = null; - startSchemaRequest(tmp); - } - }); + .newInstance() + .execute() + .thenApplyAsync(this::parseAndApplySchemaRows, adminExecutor) + .whenComplete( + (newMetadata, metadataError) -> { + if (metadataError != null) { + refreshFuture.completeExceptionally(metadataError); + } else { + refreshFuture.complete( + new RefreshSchemaResult(newMetadata, schemaInAgreement)); + } + + firstSchemaRefreshFuture.complete(null); + + currentSchemaRefresh = null; + // If another refresh was enqueued during this one, run it now + if (queuedSchemaRefresh != null) { + CompletableFuture tmp = + this.queuedSchemaRefresh; + this.queuedSchemaRefresh = null; + startSchemaRequest(tmp); + } + }); } catch (Throwable t) { LOG.debug("[{}] Exception getting new metadata", logPrefix, t); refreshFuture.completeExceptionally(t); diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/MetadataManagerTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/MetadataManagerTest.java index 375209d9fcf..f9a909400f9 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/MetadataManagerTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/MetadataManagerTest.java @@ -295,14 +295,18 @@ public void refreshSchema_should_work() { // Given IllegalStateException expectedException = new IllegalStateException("Error we're testing"); when(schemaQueriesFactory.newInstance()).thenThrow(expectedException); - when(topologyMonitor.refreshNodeList()).thenReturn(CompletableFuture.completedFuture(ImmutableList.of(mock(NodeInfo.class)))); - when(topologyMonitor.checkSchemaAgreement()).thenReturn(CompletableFuture.completedFuture(Boolean.TRUE)); - when(controlConnection.init(anyBoolean(), anyBoolean(), anyBoolean())).thenReturn(CompletableFuture.completedFuture(null)); + when(topologyMonitor.refreshNodeList()) + .thenReturn(CompletableFuture.completedFuture(ImmutableList.of(mock(NodeInfo.class)))); + when(topologyMonitor.checkSchemaAgreement()) + .thenReturn(CompletableFuture.completedFuture(Boolean.TRUE)); + when(controlConnection.init(anyBoolean(), anyBoolean(), anyBoolean())) + .thenReturn(CompletableFuture.completedFuture(null)); metadataManager.refreshNodes(); // required internal state setup for this waitForPendingAdminTasks(() -> metadataManager.refreshes.size() == 1); // sanity check // When - CompletionStage result = metadataManager.refreshSchema("foo", true, true); + CompletionStage result = + metadataManager.refreshSchema("foo", true, true); // Then waitForPendingAdminTasks(() -> result.toCompletableFuture().isDone()); From 07265b4a6830a47752bf31eb4f631b9917863da2 Mon Sep 17 00:00:00 2001 From: absurdfarce Date: Tue, 23 Apr 2024 00:38:48 -0500 Subject: [PATCH 892/979] Initial fix to unit tests patch by Bret McGuire; reviewed by Bret McGuire for PR 1930 --- .../driver/internal/core/session/DefaultSession.java | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/session/DefaultSession.java b/core/src/main/java/com/datastax/oss/driver/internal/core/session/DefaultSession.java index cb1271c9cba..6f063ae9a50 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/session/DefaultSession.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/session/DefaultSession.java @@ -39,6 +39,7 @@ import com.datastax.oss.driver.internal.core.metadata.MetadataManager.RefreshSchemaResult; import com.datastax.oss.driver.internal.core.metadata.NodeStateEvent; import com.datastax.oss.driver.internal.core.metadata.NodeStateManager; +import com.datastax.oss.driver.internal.core.metrics.NodeMetricUpdater; import com.datastax.oss.driver.internal.core.metrics.SessionMetricUpdater; import com.datastax.oss.driver.internal.core.pool.ChannelPool; import com.datastax.oss.driver.internal.core.util.Loggers; @@ -549,10 +550,11 @@ private void close() { // clear metrics to prevent memory leak for (Node n : metadataManager.getMetadata().getNodes().values()) { - ((DefaultNode) n).getMetricUpdater().clearMetrics(); + NodeMetricUpdater updater = ((DefaultNode) n).getMetricUpdater(); + if (updater != null) updater.clearMetrics(); } - DefaultSession.this.metricUpdater.clearMetrics(); + if (metricUpdater != null) metricUpdater.clearMetrics(); List> childrenCloseStages = new ArrayList<>(); for (AsyncAutoCloseable closeable : internalComponentsToClose()) { @@ -575,10 +577,11 @@ private void forceClose() { // clear metrics to prevent memory leak for (Node n : metadataManager.getMetadata().getNodes().values()) { - ((DefaultNode) n).getMetricUpdater().clearMetrics(); + NodeMetricUpdater updater = ((DefaultNode) n).getMetricUpdater(); + if (updater != null) updater.clearMetrics(); } - DefaultSession.this.metricUpdater.clearMetrics(); + if (metricUpdater != null) metricUpdater.clearMetrics(); if (closeWasCalled) { // onChildrenClosed has already been scheduled From 1492d6ced9d54bdd68deb043a0bfe232eaa2a8fc Mon Sep 17 00:00:00 2001 From: absurdfarce Date: Fri, 29 Mar 2024 00:46:46 -0500 Subject: [PATCH 893/979] CASSANDRA-19292: Enable Jenkins to test against Cassandra 4.1.x patch by Bret McGuire; reviewed by Bret McGuire, Alexandre Dutra for CASSANDRA-19292 --- Jenkinsfile | 20 ++++-- .../datastax/oss/driver/api/core/Version.java | 1 + .../oss/driver/core/metadata/SchemaIT.java | 13 ++++ .../driver/api/testinfra/ccm/CcmBridge.java | 61 ++++++++++++++++++- 4 files changed, 86 insertions(+), 9 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index 8d2b74c5b08..0bfa4ca7f4a 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -256,8 +256,10 @@ pipeline { choices: ['2.1', // Legacy Apache CassandraⓇ '2.2', // Legacy Apache CassandraⓇ '3.0', // Previous Apache CassandraⓇ - '3.11', // Current Apache CassandraⓇ - '4.0', // Development Apache CassandraⓇ + '3.11', // Previous Apache CassandraⓇ + '4.0', // Previous Apache CassandraⓇ + '4.1', // Current Apache CassandraⓇ + '5.0', // Development Apache CassandraⓇ 'dse-4.8.16', // Previous EOSL DataStax Enterprise 'dse-5.0.15', // Long Term Support DataStax Enterprise 'dse-5.1.35', // Legacy DataStax Enterprise @@ -291,7 +293,11 @@ pipeline {

          - + + + + + @@ -445,7 +451,7 @@ pipeline { axis { name 'SERVER_VERSION' values '3.11', // Latest stable Apache CassandraⓇ - '4.0', // Development Apache CassandraⓇ + '4.1', // Development Apache CassandraⓇ 'dse-6.8.30' // Current DataStax Enterprise } axis { @@ -554,8 +560,10 @@ pipeline { name 'SERVER_VERSION' values '2.1', // Legacy Apache CassandraⓇ '3.0', // Previous Apache CassandraⓇ - '3.11', // Current Apache CassandraⓇ - '4.0', // Development Apache CassandraⓇ + '3.11', // Previous Apache CassandraⓇ + '4.0', // Previous Apache CassandraⓇ + '4.1', // Current Apache CassandraⓇ + '5.0', // Development Apache CassandraⓇ 'dse-4.8.16', // Previous EOSL DataStax Enterprise 'dse-5.0.15', // Last EOSL DataStax Enterprise 'dse-5.1.35', // Legacy DataStax Enterprise diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/Version.java b/core/src/main/java/com/datastax/oss/driver/api/core/Version.java index cc4931fe2fa..3f12c54faf7 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/Version.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/Version.java @@ -52,6 +52,7 @@ public class Version implements Comparable, Serializable { @NonNull public static final Version V2_2_0 = Objects.requireNonNull(parse("2.2.0")); @NonNull public static final Version V3_0_0 = Objects.requireNonNull(parse("3.0.0")); @NonNull public static final Version V4_0_0 = Objects.requireNonNull(parse("4.0.0")); + @NonNull public static final Version V4_1_0 = Objects.requireNonNull(parse("4.1.0")); @NonNull public static final Version V5_0_0 = Objects.requireNonNull(parse("5.0.0")); @NonNull public static final Version V6_7_0 = Objects.requireNonNull(parse("6.7.0")); @NonNull public static final Version V6_8_0 = Objects.requireNonNull(parse("6.8.0")); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaIT.java index caa96a647be..6495b451df7 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaIT.java @@ -265,6 +265,19 @@ public void should_get_virtual_metadata() { + " total bigint,\n" + " unit text,\n" + " PRIMARY KEY (keyspace_name, table_name, task_id)\n" + + "); */", + // Cassandra 4.1 + "/* VIRTUAL TABLE system_views.sstable_tasks (\n" + + " keyspace_name text,\n" + + " table_name text,\n" + + " task_id timeuuid,\n" + + " completion_ratio double,\n" + + " kind text,\n" + + " progress bigint,\n" + + " sstables int,\n" + + " total bigint,\n" + + " unit text,\n" + + " PRIMARY KEY (keyspace_name, table_name, task_id)\n" + "); */"); // ColumnMetadata is as expected ColumnMetadata cm = tm.getColumn("progress").get(); diff --git a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CcmBridge.java b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CcmBridge.java index 5f845243bf8..98739e7715d 100644 --- a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CcmBridge.java +++ b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CcmBridge.java @@ -236,12 +236,33 @@ public void create() { Arrays.stream(nodes).mapToObj(n -> "" + n).collect(Collectors.joining(":")), createOptions.stream().collect(Collectors.joining(" "))); + Version cassandraVersion = getCassandraVersion(); for (Map.Entry conf : cassandraConfiguration.entrySet()) { - execute("updateconf", String.format("%s:%s", conf.getKey(), conf.getValue())); + String originalKey = conf.getKey(); + Object originalValue = conf.getValue(); + execute( + "updateconf", + String.join( + ":", + getConfigKey(originalKey, originalValue, cassandraVersion), + getConfigValue(originalKey, originalValue, cassandraVersion))); } - if (getCassandraVersion().compareTo(Version.V2_2_0) >= 0) { - execute("updateconf", "enable_user_defined_functions:true"); + + // If we're dealing with anything more recent than 2.2 explicitly enable UDF... but run it + // through our conversion process to make + // sure more recent versions don't have a problem. + if (cassandraVersion.compareTo(Version.V2_2_0) >= 0) { + String originalKey = "enable_user_defined_functions"; + Object originalValue = "true"; + execute( + "updateconf", + String.join( + ":", + getConfigKey(originalKey, originalValue, cassandraVersion), + getConfigValue(originalKey, originalValue, cassandraVersion))); } + + // Note that we aren't performing any substitution on DSE key/value props (at least for now) if (DSE_ENABLEMENT) { for (Map.Entry conf : dseConfiguration.entrySet()) { execute("updatedseconf", String.format("%s:%s", conf.getKey(), conf.getValue())); @@ -463,6 +484,40 @@ private Optional overrideJvmVersionForDseWorkloads() { return Optional.empty(); } + private static String IN_MS_STR = "_in_ms"; + private static int IN_MS_STR_LENGTH = IN_MS_STR.length(); + private static String ENABLE_STR = "enable_"; + private static int ENABLE_STR_LENGTH = ENABLE_STR.length(); + private static String IN_KB_STR = "_in_kb"; + private static int IN_KB_STR_LENGTH = IN_KB_STR.length(); + + @SuppressWarnings("unused") + private String getConfigKey(String originalKey, Object originalValue, Version cassandraVersion) { + + // At least for now we won't support substitutions on nested keys. This requires an extra + // traversal of the string + // but we'll live with that for now + if (originalKey.contains(".")) return originalKey; + if (cassandraVersion.compareTo(Version.V4_1_0) < 0) return originalKey; + if (originalKey.endsWith(IN_MS_STR)) + return originalKey.substring(0, originalKey.length() - IN_MS_STR_LENGTH); + if (originalKey.startsWith(ENABLE_STR)) + return originalKey.substring(ENABLE_STR_LENGTH) + "_enabled"; + if (originalKey.endsWith(IN_KB_STR)) + return originalKey.substring(0, originalKey.length() - IN_KB_STR_LENGTH); + return originalKey; + } + + private String getConfigValue( + String originalKey, Object originalValue, Version cassandraVersion) { + + String originalValueStr = originalValue.toString(); + if (cassandraVersion.compareTo(Version.V4_1_0) < 0) return originalValueStr; + if (originalKey.endsWith(IN_MS_STR)) return originalValueStr + "ms"; + if (originalKey.endsWith(IN_KB_STR)) return originalValueStr + "KiB"; + return originalValueStr; + } + public static Builder builder() { return new Builder(); } From b9760b473b6e6e30f5da5f743e37e02150e13e39 Mon Sep 17 00:00:00 2001 From: Nitin Chhabra Date: Thu, 30 Nov 2023 12:38:23 -0800 Subject: [PATCH 894/979] JAVA-3142: Ability to specify ordering of remote local dc's via new configuration for graceful automatic failovers patch by Nitin Chhabra; reviewed by Alexandre Dutra, Andy Tolbert, and Bret McGuire for JAVA-3142 --- .../api/core/config/DefaultDriverOption.java | 8 +- .../driver/api/core/config/OptionsMap.java | 2 + .../api/core/config/TypedDriverOption.java | 10 + .../BasicLoadBalancingPolicy.java | 84 ++++++-- core/src/main/resources/reference.conf | 5 + ...BalancingPolicyPreferredRemoteDcsTest.java | 184 ++++++++++++++++++ 6 files changed, 271 insertions(+), 22 deletions(-) create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyPreferredRemoteDcsTest.java diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java index afe16e96886..11f2702c3cf 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java @@ -982,7 +982,13 @@ public enum DefaultDriverOption implements DriverOption { *

          Value-type: {@link java.time.Duration} */ SSL_KEYSTORE_RELOAD_INTERVAL("advanced.ssl-engine-factory.keystore-reload-interval"), - ; + /** + * Ordered preference list of remote dcs optionally supplied for automatic failover. + * + *

          Value type: {@link java.util.List List}<{@link String}> + */ + LOAD_BALANCING_DC_FAILOVER_PREFERRED_REMOTE_DCS( + "advanced.load-balancing-policy.dc-failover.preferred-remote-dcs"); private final String path; diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java index 8906e1dd349..98faf3e590c 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/OptionsMap.java @@ -381,6 +381,8 @@ protected static void fillWithDriverDefaults(OptionsMap map) { map.put(TypedDriverOption.LOAD_BALANCING_DC_FAILOVER_MAX_NODES_PER_REMOTE_DC, 0); map.put(TypedDriverOption.LOAD_BALANCING_DC_FAILOVER_ALLOW_FOR_LOCAL_CONSISTENCY_LEVELS, false); map.put(TypedDriverOption.METRICS_GENERATE_AGGREGABLE_HISTOGRAMS, true); + map.put( + TypedDriverOption.LOAD_BALANCING_DC_FAILOVER_PREFERRED_REMOTE_DCS, ImmutableList.of("")); } @Immutable diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java index 88c012fa351..ca60b67f0ba 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java @@ -892,6 +892,16 @@ public String toString() { DefaultDriverOption.LOAD_BALANCING_DC_FAILOVER_ALLOW_FOR_LOCAL_CONSISTENCY_LEVELS, GenericType.BOOLEAN); + /** + * Ordered preference list of remote dcs optionally supplied for automatic failover and included + * in query plan. This feature is enabled only when max-nodes-per-remote-dc is greater than 0. + */ + public static final TypedDriverOption> + LOAD_BALANCING_DC_FAILOVER_PREFERRED_REMOTE_DCS = + new TypedDriverOption<>( + DefaultDriverOption.LOAD_BALANCING_DC_FAILOVER_PREFERRED_REMOTE_DCS, + GenericType.listOf(String.class)); + private static Iterable> introspectBuiltInValues() { try { ImmutableList.Builder> result = ImmutableList.builder(); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicy.java b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicy.java index b1adec3f143..587ef4183bd 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicy.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicy.java @@ -45,10 +45,14 @@ import com.datastax.oss.driver.internal.core.util.collection.QueryPlan; import com.datastax.oss.driver.internal.core.util.collection.SimpleQueryPlan; import com.datastax.oss.driver.shaded.guava.common.base.Predicates; +import com.datastax.oss.driver.shaded.guava.common.collect.Lists; +import com.datastax.oss.driver.shaded.guava.common.collect.Sets; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import java.nio.ByteBuffer; import java.util.Collections; +import java.util.LinkedHashSet; +import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; @@ -117,6 +121,7 @@ public class BasicLoadBalancingPolicy implements LoadBalancingPolicy { private volatile NodeDistanceEvaluator nodeDistanceEvaluator; private volatile String localDc; private volatile NodeSet liveNodes; + private final LinkedHashSet preferredRemoteDcs; public BasicLoadBalancingPolicy(@NonNull DriverContext context, @NonNull String profileName) { this.context = (InternalDriverContext) context; @@ -131,6 +136,11 @@ public BasicLoadBalancingPolicy(@NonNull DriverContext context, @NonNull String this.context .getConsistencyLevelRegistry() .nameToLevel(profile.getString(DefaultDriverOption.REQUEST_CONSISTENCY)); + + preferredRemoteDcs = + new LinkedHashSet<>( + profile.getStringList( + DefaultDriverOption.LOAD_BALANCING_DC_FAILOVER_PREFERRED_REMOTE_DCS)); } /** @@ -320,27 +330,59 @@ protected Queue maybeAddDcFailover(@Nullable Request request, @NonNull Que return local; } } - QueryPlan remote = - new LazyQueryPlan() { - - @Override - protected Object[] computeNodes() { - Object[] remoteNodes = - liveNodes.dcs().stream() - .filter(Predicates.not(Predicates.equalTo(localDc))) - .flatMap(dc -> liveNodes.dc(dc).stream().limit(maxNodesPerRemoteDc)) - .toArray(); - - int remoteNodesLength = remoteNodes.length; - if (remoteNodesLength == 0) { - return EMPTY_NODES; - } - shuffleHead(remoteNodes, remoteNodesLength); - return remoteNodes; - } - }; - - return new CompositeQueryPlan(local, remote); + if (preferredRemoteDcs.isEmpty()) { + return new CompositeQueryPlan(local, buildRemoteQueryPlanAll()); + } + return new CompositeQueryPlan(local, buildRemoteQueryPlanPreferred()); + } + + private QueryPlan buildRemoteQueryPlanAll() { + + return new LazyQueryPlan() { + @Override + protected Object[] computeNodes() { + + Object[] remoteNodes = + liveNodes.dcs().stream() + .filter(Predicates.not(Predicates.equalTo(localDc))) + .flatMap(dc -> liveNodes.dc(dc).stream().limit(maxNodesPerRemoteDc)) + .toArray(); + if (remoteNodes.length == 0) { + return EMPTY_NODES; + } + shuffleHead(remoteNodes, remoteNodes.length); + return remoteNodes; + } + }; + } + + private QueryPlan buildRemoteQueryPlanPreferred() { + + Set dcs = liveNodes.dcs(); + List orderedDcs = Lists.newArrayListWithCapacity(dcs.size()); + orderedDcs.addAll(preferredRemoteDcs); + orderedDcs.addAll(Sets.difference(dcs, preferredRemoteDcs)); + + QueryPlan[] queryPlans = + orderedDcs.stream() + .filter(Predicates.not(Predicates.equalTo(localDc))) + .map( + (dc) -> { + return new LazyQueryPlan() { + @Override + protected Object[] computeNodes() { + Object[] rv = liveNodes.dc(dc).stream().limit(maxNodesPerRemoteDc).toArray(); + if (rv.length == 0) { + return EMPTY_NODES; + } + shuffleHead(rv, rv.length); + return rv; + } + }; + }) + .toArray(QueryPlan[]::new); + + return new CompositeQueryPlan(queryPlans); } /** Exposed as a protected method so that it can be accessed by tests */ diff --git a/core/src/main/resources/reference.conf b/core/src/main/resources/reference.conf index d1ac22e553b..7a56a18e9f1 100644 --- a/core/src/main/resources/reference.conf +++ b/core/src/main/resources/reference.conf @@ -574,6 +574,11 @@ datastax-java-driver { # Modifiable at runtime: no # Overridable in a profile: yes allow-for-local-consistency-levels = false + # Ordered preference list of remote dc's (in order) optionally supplied for automatic failover. While building a query plan, the driver uses the DC's supplied in order together with max-nodes-per-remote-dc + # Required: no + # Modifiable at runtime: no + # Overridable in a profile: no + preferred-remote-dcs = [""] } } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyPreferredRemoteDcsTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyPreferredRemoteDcsTest.java new file mode 100644 index 00000000000..cefdfd31189 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicyPreferredRemoteDcsTest.java @@ -0,0 +1,184 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.loadbalancing; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.atLeast; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.metadata.Node; +import com.datastax.oss.driver.internal.core.metadata.DefaultNode; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; +import java.util.Map; +import java.util.UUID; +import org.junit.Test; +import org.mockito.Mock; + +public class BasicLoadBalancingPolicyPreferredRemoteDcsTest + extends BasicLoadBalancingPolicyDcFailoverTest { + @Mock protected DefaultNode node10; + @Mock protected DefaultNode node11; + @Mock protected DefaultNode node12; + @Mock protected DefaultNode node13; + @Mock protected DefaultNode node14; + + @Override + @Test + public void should_prioritize_single_replica() { + when(request.getRoutingKeyspace()).thenReturn(KEYSPACE); + when(request.getRoutingKey()).thenReturn(ROUTING_KEY); + when(tokenMap.getReplicas(KEYSPACE, ROUTING_KEY)).thenReturn(ImmutableSet.of(node3)); + + // node3 always first, round-robin on the rest + assertThat(policy.newQueryPlan(request, session)) + .containsExactly( + node3, node1, node2, node4, node5, node9, node10, node6, node7, node12, node13); + assertThat(policy.newQueryPlan(request, session)) + .containsExactly( + node3, node2, node4, node5, node1, node9, node10, node6, node7, node12, node13); + assertThat(policy.newQueryPlan(request, session)) + .containsExactly( + node3, node4, node5, node1, node2, node9, node10, node6, node7, node12, node13); + assertThat(policy.newQueryPlan(request, session)) + .containsExactly( + node3, node5, node1, node2, node4, node9, node10, node6, node7, node12, node13); + + // Should not shuffle replicas since there is only one + verify(policy, never()).shuffleHead(any(), eq(1)); + // But should shuffle remote nodes + verify(policy, times(12)).shuffleHead(any(), eq(2)); + } + + @Override + @Test + public void should_prioritize_and_shuffle_replicas() { + when(request.getRoutingKeyspace()).thenReturn(KEYSPACE); + when(request.getRoutingKey()).thenReturn(ROUTING_KEY); + when(tokenMap.getReplicas(KEYSPACE, ROUTING_KEY)) + .thenReturn(ImmutableSet.of(node1, node2, node3, node6, node9)); + + // node 6 and 9 being in a remote DC, they don't get a boost for being a replica + assertThat(policy.newQueryPlan(request, session)) + .containsExactly( + node1, node2, node3, node4, node5, node9, node10, node6, node7, node12, node13); + assertThat(policy.newQueryPlan(request, session)) + .containsExactly( + node1, node2, node3, node5, node4, node9, node10, node6, node7, node12, node13); + + // should shuffle replicas + verify(policy, times(2)).shuffleHead(any(), eq(3)); + // should shuffle remote nodes + verify(policy, times(6)).shuffleHead(any(), eq(2)); + // No power of two choices with only two replicas + verify(session, never()).getPools(); + } + + @Override + protected void assertRoundRobinQueryPlans() { + for (int i = 0; i < 3; i++) { + assertThat(policy.newQueryPlan(request, session)) + .containsExactly( + node1, node2, node3, node4, node5, node9, node10, node6, node7, node12, node13); + assertThat(policy.newQueryPlan(request, session)) + .containsExactly( + node2, node3, node4, node5, node1, node9, node10, node6, node7, node12, node13); + assertThat(policy.newQueryPlan(request, session)) + .containsExactly( + node3, node4, node5, node1, node2, node9, node10, node6, node7, node12, node13); + assertThat(policy.newQueryPlan(request, session)) + .containsExactly( + node4, node5, node1, node2, node3, node9, node10, node6, node7, node12, node13); + assertThat(policy.newQueryPlan(request, session)) + .containsExactly( + node5, node1, node2, node3, node4, node9, node10, node6, node7, node12, node13); + } + + verify(policy, atLeast(15)).shuffleHead(any(), eq(2)); + } + + @Override + protected BasicLoadBalancingPolicy createAndInitPolicy() { + when(node4.getDatacenter()).thenReturn("dc1"); + when(node5.getDatacenter()).thenReturn("dc1"); + when(node6.getDatacenter()).thenReturn("dc2"); + when(node7.getDatacenter()).thenReturn("dc2"); + when(node8.getDatacenter()).thenReturn("dc2"); + when(node9.getDatacenter()).thenReturn("dc3"); + when(node10.getDatacenter()).thenReturn("dc3"); + when(node11.getDatacenter()).thenReturn("dc3"); + when(node12.getDatacenter()).thenReturn("dc4"); + when(node13.getDatacenter()).thenReturn("dc4"); + when(node14.getDatacenter()).thenReturn("dc4"); + + // Accept 2 nodes per remote DC + when(defaultProfile.getInt( + DefaultDriverOption.LOAD_BALANCING_DC_FAILOVER_MAX_NODES_PER_REMOTE_DC)) + .thenReturn(2); + when(defaultProfile.getBoolean( + DefaultDriverOption.LOAD_BALANCING_DC_FAILOVER_ALLOW_FOR_LOCAL_CONSISTENCY_LEVELS)) + .thenReturn(false); + + when(defaultProfile.getStringList( + DefaultDriverOption.LOAD_BALANCING_DC_FAILOVER_PREFERRED_REMOTE_DCS)) + .thenReturn(ImmutableList.of("dc3", "dc2")); + + // Use a subclass to disable shuffling, we just spy to make sure that the shuffling method was + // called (makes tests easier) + BasicLoadBalancingPolicy policy = + spy( + new BasicLoadBalancingPolicy(context, DriverExecutionProfile.DEFAULT_NAME) { + @Override + protected void shuffleHead(Object[] currentNodes, int headLength) { + // nothing (keep in same order) + } + }); + Map nodes = + ImmutableMap.builder() + .put(UUID.randomUUID(), node1) + .put(UUID.randomUUID(), node2) + .put(UUID.randomUUID(), node3) + .put(UUID.randomUUID(), node4) + .put(UUID.randomUUID(), node5) + .put(UUID.randomUUID(), node6) + .put(UUID.randomUUID(), node7) + .put(UUID.randomUUID(), node8) + .put(UUID.randomUUID(), node9) + .put(UUID.randomUUID(), node10) + .put(UUID.randomUUID(), node11) + .put(UUID.randomUUID(), node12) + .put(UUID.randomUUID(), node13) + .put(UUID.randomUUID(), node14) + .build(); + policy.init(nodes, distanceReporter); + assertThat(policy.getLiveNodes().dc("dc1")).containsExactly(node1, node2, node3, node4, node5); + assertThat(policy.getLiveNodes().dc("dc2")).containsExactly(node6, node7); // only 2 allowed + assertThat(policy.getLiveNodes().dc("dc3")).containsExactly(node9, node10); // only 2 allowed + assertThat(policy.getLiveNodes().dc("dc4")).containsExactly(node12, node13); // only 2 allowed + return policy; + } +} From 3a687377449f736ba1ed28bfcff824982b3138c4 Mon Sep 17 00:00:00 2001 From: janehe Date: Wed, 17 Apr 2024 14:59:59 -0700 Subject: [PATCH 895/979] CASSANDRA-19568: Use Jabba to specify Java 1.8 for building the driver patch by Jane He and Bret McGuire; reviewed by Bret McGuire for CASSANDRA-19568 --- Jenkinsfile | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index 0bfa4ca7f4a..69ee0a294c0 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -98,14 +98,16 @@ ENVIRONMENT_EOF } def buildDriver(jabbaVersion) { - withEnv(["BUILD_JABBA_VERSION=${jabbaVersion}"]) { - sh label: 'Build driver', script: '''#!/bin/bash -le - . ${JABBA_SHELL} - jabba use ${BUILD_JABBA_VERSION} + def buildDriverScript = '''#!/bin/bash -le - mvn -B -V install -DskipTests -Dmaven.javadoc.skip=true - ''' - } + . ${JABBA_SHELL} + jabba use '''+jabbaVersion+''' + + echo "Building with Java version '''+jabbaVersion+''' + + mvn -B -V install -DskipTests -Dmaven.javadoc.skip=true + ''' + sh label: 'Build driver', script: buildDriverScript } def executeTests() { @@ -484,7 +486,7 @@ pipeline { } stage('Build-Driver') { steps { - buildDriver('default') + buildDriver('1.8') } } stage('Execute-Tests') { @@ -600,8 +602,7 @@ pipeline { } stage('Build-Driver') { steps { - // Jabba default should be a JDK8 for now - buildDriver('default') + buildDriver('1.8') } } stage('Execute-Tests') { From 4bc346885fd373906ed6106b76df6d494cb51b67 Mon Sep 17 00:00:00 2001 From: absurdfarce Date: Wed, 8 May 2024 21:37:02 -0500 Subject: [PATCH 896/979] ninja-fix CASSANDRA-19568: fixing mangled Groovy --- Jenkinsfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Jenkinsfile b/Jenkinsfile index 69ee0a294c0..d38b7c63849 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -103,7 +103,7 @@ def buildDriver(jabbaVersion) { . ${JABBA_SHELL} jabba use '''+jabbaVersion+''' - echo "Building with Java version '''+jabbaVersion+''' + echo "Building with Java version '''+jabbaVersion+'''" mvn -B -V install -DskipTests -Dmaven.javadoc.skip=true ''' From ac452336356c30b125524f31dfa82cf8a465d716 Mon Sep 17 00:00:00 2001 From: absurdfarce Date: Thu, 16 May 2024 20:55:25 -0500 Subject: [PATCH 897/979] ninja-fix updating repo for releases --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 7decc96633a..b4102b3380f 100644 --- a/pom.xml +++ b/pom.xml @@ -755,7 +755,7 @@ limitations under the License.]]> true ossrh - https://oss.sonatype.org/ + https://repository.apache.org/ false true From 3151129f7043a1b222131989584b07288c404be8 Mon Sep 17 00:00:00 2001 From: Nitin Chhabra Date: Wed, 8 May 2024 16:54:43 -0700 Subject: [PATCH 898/979] JAVA-3142: Improving the documentation for remote local dc's feature patch by Nitin Chhabra; reviewed by Bret McGuire for JAVA-3142 --- core/src/main/resources/reference.conf | 2 ++ 1 file changed, 2 insertions(+) diff --git a/core/src/main/resources/reference.conf b/core/src/main/resources/reference.conf index 7a56a18e9f1..7b1c43f8bea 100644 --- a/core/src/main/resources/reference.conf +++ b/core/src/main/resources/reference.conf @@ -574,7 +574,9 @@ datastax-java-driver { # Modifiable at runtime: no # Overridable in a profile: yes allow-for-local-consistency-levels = false + # Ordered preference list of remote dc's (in order) optionally supplied for automatic failover. While building a query plan, the driver uses the DC's supplied in order together with max-nodes-per-remote-dc + # Users are not required to specify all DCs, when listing preferences via this config # Required: no # Modifiable at runtime: no # Overridable in a profile: no From f60e75842fa99cbb728a716c0236a89caa19b39c Mon Sep 17 00:00:00 2001 From: absurdfarce Date: Fri, 17 May 2024 12:27:53 -0500 Subject: [PATCH 899/979] ninja-fix changlog updates for 4.18.1 --- changelog/README.md | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/changelog/README.md b/changelog/README.md index 7807ef15f95..83ebb44239f 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -21,6 +21,16 @@ under the License. +### 4.18.1 + +- [improvement] JAVA-3142: Ability to specify ordering of remote local dc's via new configuration for graceful automatic failovers +- [bug] CASSANDRA-19457: Object reference in Micrometer metrics prevent GC from reclaiming Session instances +- [improvement] CASSANDRA-19468: Don't swallow exception during metadata refresh +- [bug] CASSANDRA-19333: Fix data corruption in VectorCodec when using heap buffers +- [improvement] CASSANDRA-19290: Replace uses of AttributeKey.newInstance +- [improvement] CASSANDRA-19352: Support native_transport_(address|port) + native_transport_port_ssl for DSE 6.8 (4.x edition) +- [improvement] CASSANDRA-19180: Support reloading keystore in cassandra-java-driver + ### 4.18.0 - [improvement] PR 1689: Add support for publishing percentile time series for the histogram metrics (nparaddi-walmart) From cbdde2878786fa6c4077a21352cbe738875f2106 Mon Sep 17 00:00:00 2001 From: absurdfarce Date: Mon, 20 May 2024 09:57:23 -0500 Subject: [PATCH 900/979] [maven-release-plugin] prepare release 4.18.1 --- bom/pom.xml | 18 +++++++++--------- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution-source/pom.xml | 2 +- distribution-tests/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- metrics/micrometer/pom.xml | 2 +- metrics/microprofile/pom.xml | 2 +- osgi-tests/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 16 files changed, 25 insertions(+), 25 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index 72e00c48355..87920ed984a 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.18.1-SNAPSHOT + 4.18.1 java-driver-bom pom @@ -33,42 +33,42 @@ org.apache.cassandra java-driver-core - 4.18.1-SNAPSHOT + 4.18.1 org.apache.cassandra java-driver-core-shaded - 4.18.1-SNAPSHOT + 4.18.1 org.apache.cassandra java-driver-mapper-processor - 4.18.1-SNAPSHOT + 4.18.1 org.apache.cassandra java-driver-mapper-runtime - 4.18.1-SNAPSHOT + 4.18.1 org.apache.cassandra java-driver-query-builder - 4.18.1-SNAPSHOT + 4.18.1 org.apache.cassandra java-driver-test-infra - 4.18.1-SNAPSHOT + 4.18.1 org.apache.cassandra java-driver-metrics-micrometer - 4.18.1-SNAPSHOT + 4.18.1 org.apache.cassandra java-driver-metrics-microprofile - 4.18.1-SNAPSHOT + 4.18.1 com.datastax.oss diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index c2768c3a642..93a74696c1b 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.18.1-SNAPSHOT + 4.18.1 java-driver-core-shaded Apache Cassandra Java Driver - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index c54c6b8c642..59465763c71 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.18.1-SNAPSHOT + 4.18.1 java-driver-core bundle diff --git a/distribution-source/pom.xml b/distribution-source/pom.xml index 8c4f695afdd..dc0cdfd1a43 100644 --- a/distribution-source/pom.xml +++ b/distribution-source/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.18.1-SNAPSHOT + 4.18.1 java-driver-distribution-source pom diff --git a/distribution-tests/pom.xml b/distribution-tests/pom.xml index 099bddba900..11a0797b3cf 100644 --- a/distribution-tests/pom.xml +++ b/distribution-tests/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.18.1-SNAPSHOT + 4.18.1 java-driver-distribution-tests Apache Cassandra Java Driver - distribution tests diff --git a/distribution/pom.xml b/distribution/pom.xml index 8933d3f5f3a..8bfbeecd8b0 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.18.1-SNAPSHOT + 4.18.1 java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index 7e2d7f1b6d0..0cf0c6389ec 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -23,7 +23,7 @@ java-driver-parent org.apache.cassandra - 4.18.1-SNAPSHOT + 4.18.1 java-driver-examples Apache Cassandra Java Driver - examples. diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 5c684e90b2a..f867bcce7db 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.18.1-SNAPSHOT + 4.18.1 java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 768327591d6..47f816bdc0d 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.18.1-SNAPSHOT + 4.18.1 java-driver-mapper-processor Apache Cassandra Java Driver - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index 95ead75ddd8..ca3a27367c5 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.18.1-SNAPSHOT + 4.18.1 java-driver-mapper-runtime bundle diff --git a/metrics/micrometer/pom.xml b/metrics/micrometer/pom.xml index 1405ae0b6c2..e0e4e3fe709 100644 --- a/metrics/micrometer/pom.xml +++ b/metrics/micrometer/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.18.1-SNAPSHOT + 4.18.1 ../../ java-driver-metrics-micrometer diff --git a/metrics/microprofile/pom.xml b/metrics/microprofile/pom.xml index 6ba084396d1..fcbc7e1a54f 100644 --- a/metrics/microprofile/pom.xml +++ b/metrics/microprofile/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.18.1-SNAPSHOT + 4.18.1 ../../ java-driver-metrics-microprofile diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml index 859a69400b9..e48be59f1c1 100644 --- a/osgi-tests/pom.xml +++ b/osgi-tests/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.18.1-SNAPSHOT + 4.18.1 java-driver-osgi-tests jar diff --git a/pom.xml b/pom.xml index b4102b3380f..14d5d9c84ff 100644 --- a/pom.xml +++ b/pom.xml @@ -27,7 +27,7 @@ org.apache.cassandra java-driver-parent - 4.18.1-SNAPSHOT + 4.18.1 pom Apache Cassandra Java Driver https://github.com/datastax/java-driver @@ -1017,7 +1017,7 @@ limitations under the License.]]> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - HEAD + 4.18.1 diff --git a/query-builder/pom.xml b/query-builder/pom.xml index f1828b62462..eaa974030d3 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.18.1-SNAPSHOT + 4.18.1 java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index 9089d4d1019..25a8ad2f147 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.18.1-SNAPSHOT + 4.18.1 java-driver-test-infra bundle From db4c8075e11d6dc020552d711c2a2e96dc651ad4 Mon Sep 17 00:00:00 2001 From: absurdfarce Date: Mon, 20 May 2024 09:57:26 -0500 Subject: [PATCH 901/979] [maven-release-plugin] prepare for next development iteration --- bom/pom.xml | 18 +++++++++--------- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution-source/pom.xml | 2 +- distribution-tests/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- metrics/micrometer/pom.xml | 2 +- metrics/microprofile/pom.xml | 2 +- osgi-tests/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 16 files changed, 25 insertions(+), 25 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index 87920ed984a..96b7a6ceb18 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.18.1 + 4.18.2-SNAPSHOT java-driver-bom pom @@ -33,42 +33,42 @@ org.apache.cassandra java-driver-core - 4.18.1 + 4.18.2-SNAPSHOT org.apache.cassandra java-driver-core-shaded - 4.18.1 + 4.18.2-SNAPSHOT org.apache.cassandra java-driver-mapper-processor - 4.18.1 + 4.18.2-SNAPSHOT org.apache.cassandra java-driver-mapper-runtime - 4.18.1 + 4.18.2-SNAPSHOT org.apache.cassandra java-driver-query-builder - 4.18.1 + 4.18.2-SNAPSHOT org.apache.cassandra java-driver-test-infra - 4.18.1 + 4.18.2-SNAPSHOT org.apache.cassandra java-driver-metrics-micrometer - 4.18.1 + 4.18.2-SNAPSHOT org.apache.cassandra java-driver-metrics-microprofile - 4.18.1 + 4.18.2-SNAPSHOT com.datastax.oss diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 93a74696c1b..6c139aab127 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.18.1 + 4.18.2-SNAPSHOT java-driver-core-shaded Apache Cassandra Java Driver - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index 59465763c71..33688754f1b 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.18.1 + 4.18.2-SNAPSHOT java-driver-core bundle diff --git a/distribution-source/pom.xml b/distribution-source/pom.xml index dc0cdfd1a43..ee5b52958c3 100644 --- a/distribution-source/pom.xml +++ b/distribution-source/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.18.1 + 4.18.2-SNAPSHOT java-driver-distribution-source pom diff --git a/distribution-tests/pom.xml b/distribution-tests/pom.xml index 11a0797b3cf..fafd8c4678b 100644 --- a/distribution-tests/pom.xml +++ b/distribution-tests/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.18.1 + 4.18.2-SNAPSHOT java-driver-distribution-tests Apache Cassandra Java Driver - distribution tests diff --git a/distribution/pom.xml b/distribution/pom.xml index 8bfbeecd8b0..dfc406baf43 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.18.1 + 4.18.2-SNAPSHOT java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index 0cf0c6389ec..a76cc8d2bf1 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -23,7 +23,7 @@ java-driver-parent org.apache.cassandra - 4.18.1 + 4.18.2-SNAPSHOT java-driver-examples Apache Cassandra Java Driver - examples. diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index f867bcce7db..32cabdb34a7 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.18.1 + 4.18.2-SNAPSHOT java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 47f816bdc0d..61906f41987 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.18.1 + 4.18.2-SNAPSHOT java-driver-mapper-processor Apache Cassandra Java Driver - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index ca3a27367c5..28483ee93ff 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.18.1 + 4.18.2-SNAPSHOT java-driver-mapper-runtime bundle diff --git a/metrics/micrometer/pom.xml b/metrics/micrometer/pom.xml index e0e4e3fe709..8ab939cbb37 100644 --- a/metrics/micrometer/pom.xml +++ b/metrics/micrometer/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.18.1 + 4.18.2-SNAPSHOT ../../ java-driver-metrics-micrometer diff --git a/metrics/microprofile/pom.xml b/metrics/microprofile/pom.xml index fcbc7e1a54f..521a67f9075 100644 --- a/metrics/microprofile/pom.xml +++ b/metrics/microprofile/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.18.1 + 4.18.2-SNAPSHOT ../../ java-driver-metrics-microprofile diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml index e48be59f1c1..5947aff1bc5 100644 --- a/osgi-tests/pom.xml +++ b/osgi-tests/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.18.1 + 4.18.2-SNAPSHOT java-driver-osgi-tests jar diff --git a/pom.xml b/pom.xml index 14d5d9c84ff..082daeb3566 100644 --- a/pom.xml +++ b/pom.xml @@ -27,7 +27,7 @@ org.apache.cassandra java-driver-parent - 4.18.1 + 4.18.2-SNAPSHOT pom Apache Cassandra Java Driver https://github.com/datastax/java-driver @@ -1017,7 +1017,7 @@ limitations under the License.]]> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - 4.18.1 + HEAD diff --git a/query-builder/pom.xml b/query-builder/pom.xml index eaa974030d3..bae0e0c6ca0 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.18.1 + 4.18.2-SNAPSHOT java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index 25a8ad2f147..262627e5536 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.18.1 + 4.18.2-SNAPSHOT java-driver-test-infra bundle From 432e107bc6a2dda19385b7c423d2768e3a879965 Mon Sep 17 00:00:00 2001 From: Lukasz Antoniak Date: Thu, 16 May 2024 14:13:05 +0200 Subject: [PATCH 902/979] CASSANDRA-19635: Run integration tests with C* 5.x patch by Lukasz Antoniak; reviewed by Andy Tolbert, and Bret McGuire for CASSANDRA-19635 --- integration-tests/pom.xml | 3 + .../core/auth/DseProxyAuthenticationIT.java | 60 ++++++++------- .../oss/driver/core/cql/AsyncResultSetIT.java | 18 +++-- .../oss/driver/core/cql/BatchStatementIT.java | 18 +++-- .../driver/core/cql/BoundStatementCcmIT.java | 73 ++++++++++--------- .../core/cql/ExecutionInfoWarningsIT.java | 17 +++-- .../oss/driver/core/cql/PagingStateIT.java | 14 ++-- .../driver/core/cql/PerRequestKeyspaceIT.java | 60 ++++++++------- .../core/cql/PreparedStatementCachingIT.java | 49 ++++++++++--- .../reactive/DefaultReactiveResultSetIT.java | 32 ++++---- .../oss/driver/core/metadata/DescribeIT.java | 23 +++--- .../oss/driver/core/metadata/SchemaIT.java | 14 ++++ .../type/codec/registry/CodecRegistryIT.java | 63 ++++++++-------- .../datastax/oss/driver/mapper/DeleteIT.java | 9 +-- .../oss/driver/mapper/DeleteReactiveIT.java | 18 +++-- .../driver/mapper/EntityPolymorphismIT.java | 38 ++++++---- .../oss/driver/mapper/ImmutableEntityIT.java | 14 +++- .../oss/driver/mapper/InventoryITBase.java | 8 +- .../oss/driver/mapper/NestedUdtIT.java | 48 ++++++------ .../mapper/SelectCustomWhereClauseIT.java | 14 +++- .../oss/driver/mapper/SelectReactiveIT.java | 14 +++- .../datastax/oss/driver/mapper/UpdateIT.java | 21 ++++-- .../osgi/support/CcmStagedReactor.java | 2 +- pom.xml | 11 +++ .../driver/api/testinfra/ccm/BaseCcmRule.java | 4 +- .../driver/api/testinfra/ccm/CcmBridge.java | 20 +++-- .../ccm/SchemaChangeSynchronizer.java | 42 +++++++++++ .../api/testinfra/session/SessionRule.java | 6 +- 28 files changed, 455 insertions(+), 258 deletions(-) create mode 100644 test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/SchemaChangeSynchronizer.java diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 32cabdb34a7..d1b0a736bb0 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -242,6 +242,8 @@ 8 ${project.build.directory}/failsafe-reports/failsafe-summary-parallelized.xml ${skipParallelizableITs} + ${blockhound.argline} + ${testing.jvm}/bin/java @@ -253,6 +255,7 @@ com.datastax.oss.driver.categories.ParallelizableTests, com.datastax.oss.driver.categories.IsolatedTests ${project.build.directory}/failsafe-reports/failsafe-summary-serial.xml ${skipSerialITs} + ${blockhound.argline} ${testing.jvm}/bin/java diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseProxyAuthenticationIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseProxyAuthenticationIT.java index 126a110da1a..a3f1c04afc0 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseProxyAuthenticationIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/auth/DseProxyAuthenticationIT.java @@ -29,6 +29,7 @@ import com.datastax.oss.driver.api.core.cql.ResultSet; import com.datastax.oss.driver.api.core.cql.SimpleStatement; import com.datastax.oss.driver.api.core.servererrors.UnauthorizedException; +import com.datastax.oss.driver.api.testinfra.ccm.SchemaChangeSynchronizer; import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; @@ -57,33 +58,38 @@ public static void addUsers() { @Before public void setupRoles() { - try (CqlSession session = ads.newKeyTabSession()) { - session.execute( - "CREATE ROLE IF NOT EXISTS alice WITH PASSWORD = 'fakePasswordForAlice' AND LOGIN = FALSE"); - session.execute( - "CREATE ROLE IF NOT EXISTS ben WITH PASSWORD = 'fakePasswordForBen' AND LOGIN = TRUE"); - session.execute("CREATE ROLE IF NOT EXISTS 'bob@DATASTAX.COM' WITH LOGIN = TRUE"); - session.execute( - "CREATE ROLE IF NOT EXISTS 'charlie@DATASTAX.COM' WITH PASSWORD = 'fakePasswordForCharlie' AND LOGIN = TRUE"); - session.execute( - "CREATE ROLE IF NOT EXISTS steve WITH PASSWORD = 'fakePasswordForSteve' AND LOGIN = TRUE"); - session.execute( - "CREATE KEYSPACE IF NOT EXISTS aliceks WITH REPLICATION = {'class':'SimpleStrategy', 'replication_factor':'1'}"); - session.execute( - "CREATE TABLE IF NOT EXISTS aliceks.alicetable (key text PRIMARY KEY, value text)"); - session.execute("INSERT INTO aliceks.alicetable (key, value) VALUES ('hello', 'world')"); - session.execute("GRANT ALL ON KEYSPACE aliceks TO alice"); - session.execute("GRANT EXECUTE ON ALL AUTHENTICATION SCHEMES TO 'ben'"); - session.execute("GRANT EXECUTE ON ALL AUTHENTICATION SCHEMES TO 'bob@DATASTAX.COM'"); - session.execute("GRANT EXECUTE ON ALL AUTHENTICATION SCHEMES TO 'steve'"); - session.execute("GRANT EXECUTE ON ALL AUTHENTICATION SCHEMES TO 'charlie@DATASTAX.COM'"); - session.execute("GRANT PROXY.LOGIN ON ROLE 'alice' TO 'ben'"); - session.execute("GRANT PROXY.LOGIN ON ROLE 'alice' TO 'bob@DATASTAX.COM'"); - session.execute("GRANT PROXY.EXECUTE ON ROLE 'alice' TO 'steve'"); - session.execute("GRANT PROXY.EXECUTE ON ROLE 'alice' TO 'charlie@DATASTAX.COM'"); - // ben and bob are allowed to login as alice, but not execute as alice. - // charlie and steve are allowed to execute as alice, but not login as alice. - } + SchemaChangeSynchronizer.withLock( + () -> { + try (CqlSession session = ads.newKeyTabSession()) { + session.execute( + "CREATE ROLE IF NOT EXISTS alice WITH PASSWORD = 'fakePasswordForAlice' AND LOGIN = FALSE"); + session.execute( + "CREATE ROLE IF NOT EXISTS ben WITH PASSWORD = 'fakePasswordForBen' AND LOGIN = TRUE"); + session.execute("CREATE ROLE IF NOT EXISTS 'bob@DATASTAX.COM' WITH LOGIN = TRUE"); + session.execute( + "CREATE ROLE IF NOT EXISTS 'charlie@DATASTAX.COM' WITH PASSWORD = 'fakePasswordForCharlie' AND LOGIN = TRUE"); + session.execute( + "CREATE ROLE IF NOT EXISTS steve WITH PASSWORD = 'fakePasswordForSteve' AND LOGIN = TRUE"); + session.execute( + "CREATE KEYSPACE IF NOT EXISTS aliceks WITH REPLICATION = {'class':'SimpleStrategy', 'replication_factor':'1'}"); + session.execute( + "CREATE TABLE IF NOT EXISTS aliceks.alicetable (key text PRIMARY KEY, value text)"); + session.execute( + "INSERT INTO aliceks.alicetable (key, value) VALUES ('hello', 'world')"); + session.execute("GRANT ALL ON KEYSPACE aliceks TO alice"); + session.execute("GRANT EXECUTE ON ALL AUTHENTICATION SCHEMES TO 'ben'"); + session.execute("GRANT EXECUTE ON ALL AUTHENTICATION SCHEMES TO 'bob@DATASTAX.COM'"); + session.execute("GRANT EXECUTE ON ALL AUTHENTICATION SCHEMES TO 'steve'"); + session.execute( + "GRANT EXECUTE ON ALL AUTHENTICATION SCHEMES TO 'charlie@DATASTAX.COM'"); + session.execute("GRANT PROXY.LOGIN ON ROLE 'alice' TO 'ben'"); + session.execute("GRANT PROXY.LOGIN ON ROLE 'alice' TO 'bob@DATASTAX.COM'"); + session.execute("GRANT PROXY.EXECUTE ON ROLE 'alice' TO 'steve'"); + session.execute("GRANT PROXY.EXECUTE ON ROLE 'alice' TO 'charlie@DATASTAX.COM'"); + // ben and bob are allowed to login as alice, but not execute as alice. + // charlie and steve are allowed to execute as alice, but not login as alice. + } + }); } /** * Validates that a connection may be successfully made as user 'alice' using the credentials of a diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/AsyncResultSetIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/AsyncResultSetIT.java index 2d01043b46a..e109c28525e 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/AsyncResultSetIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/AsyncResultSetIT.java @@ -29,6 +29,7 @@ import com.datastax.oss.driver.api.core.cql.Row; import com.datastax.oss.driver.api.core.cql.SimpleStatement; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.ccm.SchemaChangeSynchronizer; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.categories.ParallelizableTests; @@ -67,13 +68,16 @@ public class AsyncResultSetIT { @BeforeClass public static void setupSchema() { // create table and load data across two partitions so we can test paging across tokens. - SESSION_RULE - .session() - .execute( - SimpleStatement.builder( - "CREATE TABLE IF NOT EXISTS test (k0 text, k1 int, v int, PRIMARY KEY(k0, k1))") - .setExecutionProfile(SESSION_RULE.slowProfile()) - .build()); + SchemaChangeSynchronizer.withLock( + () -> { + SESSION_RULE + .session() + .execute( + SimpleStatement.builder( + "CREATE TABLE IF NOT EXISTS test (k0 text, k1 int, v int, PRIMARY KEY(k0, k1))") + .setExecutionProfile(SESSION_RULE.slowProfile()) + .build()); + }); PreparedStatement prepared = SESSION_RULE.session().prepare("INSERT INTO test (k0, k1, v) VALUES (?, ?, ?)"); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BatchStatementIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BatchStatementIT.java index 04e5798be5a..8b652638729 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BatchStatementIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BatchStatementIT.java @@ -34,6 +34,7 @@ import com.datastax.oss.driver.api.core.cql.Statement; import com.datastax.oss.driver.api.core.servererrors.InvalidQueryException; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.ccm.SchemaChangeSynchronizer; import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; @@ -72,13 +73,16 @@ public void createTable() { "CREATE TABLE counter3 (k0 text PRIMARY KEY, c counter)", }; - for (String schemaStatement : schemaStatements) { - sessionRule - .session() - .execute( - SimpleStatement.newInstance(schemaStatement) - .setExecutionProfile(sessionRule.slowProfile())); - } + SchemaChangeSynchronizer.withLock( + () -> { + for (String schemaStatement : schemaStatements) { + sessionRule + .session() + .execute( + SimpleStatement.newInstance(schemaStatement) + .setExecutionProfile(sessionRule.slowProfile())); + } + }); } @Test diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BoundStatementCcmIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BoundStatementCcmIT.java index 79156fcce50..9e4b62cd230 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BoundStatementCcmIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/BoundStatementCcmIT.java @@ -40,6 +40,7 @@ import com.datastax.oss.driver.api.core.metadata.token.Token; import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.ccm.SchemaChangeSynchronizer; import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; @@ -94,40 +95,44 @@ public class BoundStatementCcmIT { @Before public void setupSchema() { // table where every column forms the primary key. - sessionRule - .session() - .execute( - SimpleStatement.builder( - "CREATE TABLE IF NOT EXISTS test (k text, v int, PRIMARY KEY(k, v))") - .setExecutionProfile(sessionRule.slowProfile()) - .build()); - for (int i = 0; i < 100; i++) { - sessionRule - .session() - .execute( - SimpleStatement.builder("INSERT INTO test (k, v) VALUES (?, ?)") - .addPositionalValues(KEY, i) - .build()); - } - - // table with simple primary key, single cell. - sessionRule - .session() - .execute( - SimpleStatement.builder("CREATE TABLE IF NOT EXISTS test2 (k text primary key, v0 int)") - .setExecutionProfile(sessionRule.slowProfile()) - .build()); - - // table with composite partition key - sessionRule - .session() - .execute( - SimpleStatement.builder( - "CREATE TABLE IF NOT EXISTS test3 " - + "(pk1 int, pk2 int, v int, " - + "PRIMARY KEY ((pk1, pk2)))") - .setExecutionProfile(sessionRule.slowProfile()) - .build()); + SchemaChangeSynchronizer.withLock( + () -> { + sessionRule + .session() + .execute( + SimpleStatement.builder( + "CREATE TABLE IF NOT EXISTS test (k text, v int, PRIMARY KEY(k, v))") + .setExecutionProfile(sessionRule.slowProfile()) + .build()); + for (int i = 0; i < 100; i++) { + sessionRule + .session() + .execute( + SimpleStatement.builder("INSERT INTO test (k, v) VALUES (?, ?)") + .addPositionalValues(KEY, i) + .build()); + } + + // table with simple primary key, single cell. + sessionRule + .session() + .execute( + SimpleStatement.builder( + "CREATE TABLE IF NOT EXISTS test2 (k text primary key, v0 int)") + .setExecutionProfile(sessionRule.slowProfile()) + .build()); + + // table with composite partition key + sessionRule + .session() + .execute( + SimpleStatement.builder( + "CREATE TABLE IF NOT EXISTS test3 " + + "(pk1 int, pk2 int, v int, " + + "PRIMARY KEY ((pk1, pk2)))") + .setExecutionProfile(sessionRule.slowProfile()) + .build()); + }); } @Test diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/ExecutionInfoWarningsIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/ExecutionInfoWarningsIT.java index 5907206d11a..edee9723a38 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/ExecutionInfoWarningsIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/ExecutionInfoWarningsIT.java @@ -33,6 +33,7 @@ import com.datastax.oss.driver.api.core.cql.SimpleStatement; import com.datastax.oss.driver.api.core.cql.Statement; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.ccm.SchemaChangeSynchronizer; import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; @@ -88,12 +89,16 @@ public class ExecutionInfoWarningsIT { @Before public void createSchema() { // table with simple primary key, single cell. - sessionRule - .session() - .execute( - SimpleStatement.builder("CREATE TABLE IF NOT EXISTS test (k int primary key, v text)") - .setExecutionProfile(sessionRule.slowProfile()) - .build()); + SchemaChangeSynchronizer.withLock( + () -> { + sessionRule + .session() + .execute( + SimpleStatement.builder( + "CREATE TABLE IF NOT EXISTS test (k int primary key, v text)") + .setExecutionProfile(sessionRule.slowProfile()) + .build()); + }); for (int i = 0; i < 100; i++) { sessionRule .session() diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PagingStateIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PagingStateIT.java index dcd801f19a4..6d33f35238a 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PagingStateIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PagingStateIT.java @@ -30,6 +30,7 @@ import com.datastax.oss.driver.api.core.type.codec.MappingCodec; import com.datastax.oss.driver.api.core.type.reflect.GenericType; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.ccm.SchemaChangeSynchronizer; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.categories.ParallelizableTests; @@ -55,11 +56,14 @@ public class PagingStateIT { @Before public void setupSchema() { CqlSession session = SESSION_RULE.session(); - session.execute( - SimpleStatement.builder( - "CREATE TABLE IF NOT EXISTS foo (k int, cc int, v int, PRIMARY KEY(k, cc))") - .setExecutionProfile(SESSION_RULE.slowProfile()) - .build()); + SchemaChangeSynchronizer.withLock( + () -> { + session.execute( + SimpleStatement.builder( + "CREATE TABLE IF NOT EXISTS foo (k int, cc int, v int, PRIMARY KEY(k, cc))") + .setExecutionProfile(SESSION_RULE.slowProfile()) + .build()); + }); for (int i = 0; i < 20; i++) { session.execute( SimpleStatement.newInstance("INSERT INTO foo (k, cc, v) VALUES (1, ?, ?)", i, i)); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PerRequestKeyspaceIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PerRequestKeyspaceIT.java index 2b418e76f75..9eb883144db 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PerRequestKeyspaceIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PerRequestKeyspaceIT.java @@ -31,6 +31,7 @@ import com.datastax.oss.driver.api.core.cql.SimpleStatement; import com.datastax.oss.driver.api.core.cql.Statement; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.ccm.SchemaChangeSynchronizer; import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; @@ -67,13 +68,16 @@ public class PerRequestKeyspaceIT { @Before public void setupSchema() { - sessionRule - .session() - .execute( - SimpleStatement.builder( - "CREATE TABLE IF NOT EXISTS foo (k text, cc int, v int, PRIMARY KEY(k, cc))") - .setExecutionProfile(sessionRule.slowProfile()) - .build()); + SchemaChangeSynchronizer.withLock( + () -> { + sessionRule + .session() + .execute( + SimpleStatement.builder( + "CREATE TABLE IF NOT EXISTS foo (k text, cc int, v int, PRIMARY KEY(k, cc))") + .setExecutionProfile(sessionRule.slowProfile()) + .build()); + }); } @Test @@ -220,27 +224,31 @@ public void should_prepare_statement_with_keyspace() { @BackendRequirement(type = BackendType.CASSANDRA, minInclusive = "4.0") public void should_reprepare_statement_with_keyspace_on_the_fly() { // Create a separate session because we don't want it to have a default keyspace - try (CqlSession session = SessionUtils.newSession(ccmRule)) { - executeDdl( - session, - String.format( - "CREATE TABLE IF NOT EXISTS %s.bar (k int primary key)", sessionRule.keyspace())); - PreparedStatement pst = - session.prepare( - SimpleStatement.newInstance("SELECT * FROM bar WHERE k=?") - .setKeyspace(sessionRule.keyspace())); + SchemaChangeSynchronizer.withLock( + () -> { + try (CqlSession session = SessionUtils.newSession(ccmRule)) { + executeDdl( + session, + String.format( + "CREATE TABLE IF NOT EXISTS %s.bar (k int primary key)", + sessionRule.keyspace())); + PreparedStatement pst = + session.prepare( + SimpleStatement.newInstance("SELECT * FROM bar WHERE k=?") + .setKeyspace(sessionRule.keyspace())); - // Drop and re-create the table to invalidate the prepared statement server side - executeDdl(session, String.format("DROP TABLE %s.bar", sessionRule.keyspace())); - executeDdl( - session, - String.format("CREATE TABLE %s.bar (k int primary key)", sessionRule.keyspace())); - assertThat(preparedStatementExistsOnServer(session, pst.getId())).isFalse(); + // Drop and re-create the table to invalidate the prepared statement server side + executeDdl(session, String.format("DROP TABLE %s.bar", sessionRule.keyspace())); + executeDdl( + session, + String.format("CREATE TABLE %s.bar (k int primary key)", sessionRule.keyspace())); + assertThat(preparedStatementExistsOnServer(session, pst.getId())).isFalse(); - // This will re-prepare on the fly - session.execute(pst.bind(0)); - assertThat(preparedStatementExistsOnServer(session, pst.getId())).isTrue(); - } + // This will re-prepare on the fly + session.execute(pst.bind(0)); + assertThat(preparedStatementExistsOnServer(session, pst.getId())).isTrue(); + } + }); } private void executeDdl(CqlSession session, String query) { diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PreparedStatementCachingIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PreparedStatementCachingIT.java index 92c6fd8a12e..05ac3bd0e92 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PreparedStatementCachingIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PreparedStatementCachingIT.java @@ -30,6 +30,7 @@ import com.datastax.oss.driver.api.core.session.ProgrammaticArguments; import com.datastax.oss.driver.api.core.session.SessionBuilder; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.ccm.SchemaChangeSynchronizer; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.categories.IsolatedTests; @@ -305,12 +306,19 @@ private void invalidationTestInner( @Test public void should_invalidate_cache_entry_on_basic_udt_change_result_set() { - invalidationResultSetTest(setupCacheEntryTestBasic, ImmutableSet.of("test_type_2")); + SchemaChangeSynchronizer.withLock( + () -> { + invalidationResultSetTest(setupCacheEntryTestBasic, ImmutableSet.of("test_type_2")); + }); } @Test public void should_invalidate_cache_entry_on_basic_udt_change_variable_defs() { - invalidationVariableDefsTest(setupCacheEntryTestBasic, false, ImmutableSet.of("test_type_2")); + SchemaChangeSynchronizer.withLock( + () -> { + invalidationVariableDefsTest( + setupCacheEntryTestBasic, false, ImmutableSet.of("test_type_2")); + }); } Consumer setupCacheEntryTestCollection = @@ -325,13 +333,19 @@ public void should_invalidate_cache_entry_on_basic_udt_change_variable_defs() { @Test public void should_invalidate_cache_entry_on_collection_udt_change_result_set() { - invalidationResultSetTest(setupCacheEntryTestCollection, ImmutableSet.of("test_type_2")); + SchemaChangeSynchronizer.withLock( + () -> { + invalidationResultSetTest(setupCacheEntryTestCollection, ImmutableSet.of("test_type_2")); + }); } @Test public void should_invalidate_cache_entry_on_collection_udt_change_variable_defs() { - invalidationVariableDefsTest( - setupCacheEntryTestCollection, true, ImmutableSet.of("test_type_2")); + SchemaChangeSynchronizer.withLock( + () -> { + invalidationVariableDefsTest( + setupCacheEntryTestCollection, true, ImmutableSet.of("test_type_2")); + }); } Consumer setupCacheEntryTestTuple = @@ -346,12 +360,19 @@ public void should_invalidate_cache_entry_on_collection_udt_change_variable_defs @Test public void should_invalidate_cache_entry_on_tuple_udt_change_result_set() { - invalidationResultSetTest(setupCacheEntryTestTuple, ImmutableSet.of("test_type_2")); + SchemaChangeSynchronizer.withLock( + () -> { + invalidationResultSetTest(setupCacheEntryTestTuple, ImmutableSet.of("test_type_2")); + }); } @Test public void should_invalidate_cache_entry_on_tuple_udt_change_variable_defs() { - invalidationVariableDefsTest(setupCacheEntryTestTuple, false, ImmutableSet.of("test_type_2")); + SchemaChangeSynchronizer.withLock( + () -> { + invalidationVariableDefsTest( + setupCacheEntryTestTuple, false, ImmutableSet.of("test_type_2")); + }); } Consumer setupCacheEntryTestNested = @@ -366,14 +387,20 @@ public void should_invalidate_cache_entry_on_tuple_udt_change_variable_defs() { @Test public void should_invalidate_cache_entry_on_nested_udt_change_result_set() { - invalidationResultSetTest( - setupCacheEntryTestNested, ImmutableSet.of("test_type_2", "test_type_4")); + SchemaChangeSynchronizer.withLock( + () -> { + invalidationResultSetTest( + setupCacheEntryTestNested, ImmutableSet.of("test_type_2", "test_type_4")); + }); } @Test public void should_invalidate_cache_entry_on_nested_udt_change_variable_defs() { - invalidationVariableDefsTest( - setupCacheEntryTestNested, false, ImmutableSet.of("test_type_2", "test_type_4")); + SchemaChangeSynchronizer.withLock( + () -> { + invalidationVariableDefsTest( + setupCacheEntryTestNested, false, ImmutableSet.of("test_type_2", "test_type_4")); + }); } /* ========================= Infrastructure copied from PreparedStatementIT ========================= */ diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/reactive/DefaultReactiveResultSetIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/reactive/DefaultReactiveResultSetIT.java index cfb6a56fac2..c00cf064e51 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/reactive/DefaultReactiveResultSetIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/reactive/DefaultReactiveResultSetIT.java @@ -32,6 +32,7 @@ import com.datastax.oss.driver.api.core.cql.PreparedStatement; import com.datastax.oss.driver.api.core.cql.SimpleStatement; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.ccm.SchemaChangeSynchronizer; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.categories.ParallelizableTests; import com.datastax.oss.driver.internal.core.cql.EmptyColumnDefinitions; @@ -64,20 +65,23 @@ public class DefaultReactiveResultSetIT { @BeforeClass public static void initialize() { CqlSession session = sessionRule.session(); - session.execute("DROP TABLE IF EXISTS test_reactive_read"); - session.execute("DROP TABLE IF EXISTS test_reactive_write"); - session.checkSchemaAgreement(); - session.execute( - SimpleStatement.builder( - "CREATE TABLE test_reactive_read (pk int, cc int, v int, PRIMARY KEY ((pk), cc))") - .setExecutionProfile(sessionRule.slowProfile()) - .build()); - session.execute( - SimpleStatement.builder( - "CREATE TABLE test_reactive_write (pk int, cc int, v int, PRIMARY KEY ((pk), cc))") - .setExecutionProfile(sessionRule.slowProfile()) - .build()); - session.checkSchemaAgreement(); + SchemaChangeSynchronizer.withLock( + () -> { + session.execute("DROP TABLE IF EXISTS test_reactive_read"); + session.execute("DROP TABLE IF EXISTS test_reactive_write"); + session.checkSchemaAgreement(); + session.execute( + SimpleStatement.builder( + "CREATE TABLE test_reactive_read (pk int, cc int, v int, PRIMARY KEY ((pk), cc))") + .setExecutionProfile(sessionRule.slowProfile()) + .build()); + session.execute( + SimpleStatement.builder( + "CREATE TABLE test_reactive_write (pk int, cc int, v int, PRIMARY KEY ((pk), cc))") + .setExecutionProfile(sessionRule.slowProfile()) + .build()); + session.checkSchemaAgreement(); + }); for (int i = 0; i < 1000; i++) { session.execute( SimpleStatement.builder("INSERT INTO test_reactive_read (pk, cc, v) VALUES (0, ?, ?)") diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/DescribeIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/DescribeIT.java index d8239f31872..9fbf5e355eb 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/DescribeIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/DescribeIT.java @@ -28,6 +28,7 @@ import com.datastax.oss.driver.api.core.metadata.schema.KeyspaceMetadata; import com.datastax.oss.driver.api.core.metadata.schema.TableMetadata; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.ccm.SchemaChangeSynchronizer; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.categories.ParallelizableTests; @@ -224,15 +225,17 @@ private static String getScriptContents() { private static void setupDatabase() { List statements = STATEMENT_SPLITTER.splitToList(scriptContents); - - // Skip the first statement (CREATE KEYSPACE), we already have a keyspace - for (int i = 1; i < statements.size(); i++) { - String statement = statements.get(i); - try { - SESSION_RULE.session().execute(statement); - } catch (Exception e) { - fail("Error executing statement %s (%s)", statement, e); - } - } + SchemaChangeSynchronizer.withLock( + () -> { + // Skip the first statement (CREATE KEYSPACE), we already have a keyspace + for (int i = 1; i < statements.size(); i++) { + String statement = statements.get(i); + try { + SESSION_RULE.session().execute(statement); + } catch (Exception e) { + fail("Error executing statement %s (%s)", statement, e); + } + } + }); } } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaIT.java index 6495b451df7..805b2d970cc 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaIT.java @@ -278,6 +278,20 @@ public void should_get_virtual_metadata() { + " total bigint,\n" + " unit text,\n" + " PRIMARY KEY (keyspace_name, table_name, task_id)\n" + + "); */", + // Cassandra 5.0 + "/* VIRTUAL TABLE system_views.sstable_tasks (\n" + + " keyspace_name text,\n" + + " table_name text,\n" + + " task_id timeuuid,\n" + + " completion_ratio double,\n" + + " kind text,\n" + + " progress bigint,\n" + + " sstables int,\n" + + " target_directory text,\n" + + " total bigint,\n" + + " unit text,\n" + + " PRIMARY KEY (keyspace_name, table_name, task_id)\n" + "); */"); // ColumnMetadata is as expected ColumnMetadata cm = tm.getColumn("progress").get(); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/type/codec/registry/CodecRegistryIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/type/codec/registry/CodecRegistryIT.java index 2f9a0872b37..74472e8bab9 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/type/codec/registry/CodecRegistryIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/type/codec/registry/CodecRegistryIT.java @@ -38,6 +38,7 @@ import com.datastax.oss.driver.api.core.type.codec.registry.MutableCodecRegistry; import com.datastax.oss.driver.api.core.type.reflect.GenericType; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.ccm.SchemaChangeSynchronizer; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.categories.ParallelizableTests; @@ -78,35 +79,39 @@ public class CodecRegistryIT { @BeforeClass public static void createSchema() { - // table with simple primary key, single cell. - SESSION_RULE - .session() - .execute( - SimpleStatement.builder("CREATE TABLE IF NOT EXISTS test (k text primary key, v int)") - .setExecutionProfile(SESSION_RULE.slowProfile()) - .build()); - // table with map value - SESSION_RULE - .session() - .execute( - SimpleStatement.builder( - "CREATE TABLE IF NOT EXISTS test2 (k0 text, k1 int, v map, primary key (k0, k1))") - .setExecutionProfile(SESSION_RULE.slowProfile()) - .build()); - // table with UDT - SESSION_RULE - .session() - .execute( - SimpleStatement.builder("CREATE TYPE IF NOT EXISTS coordinates (x int, y int)") - .setExecutionProfile(SESSION_RULE.slowProfile()) - .build()); - SESSION_RULE - .session() - .execute( - SimpleStatement.builder( - "CREATE TABLE IF NOT EXISTS test3 (k0 text, k1 int, v map>, primary key (k0, k1))") - .setExecutionProfile(SESSION_RULE.slowProfile()) - .build()); + SchemaChangeSynchronizer.withLock( + () -> { + // table with simple primary key, single cell. + SESSION_RULE + .session() + .execute( + SimpleStatement.builder( + "CREATE TABLE IF NOT EXISTS test (k text primary key, v int)") + .setExecutionProfile(SESSION_RULE.slowProfile()) + .build()); + // table with map value + SESSION_RULE + .session() + .execute( + SimpleStatement.builder( + "CREATE TABLE IF NOT EXISTS test2 (k0 text, k1 int, v map, primary key (k0, k1))") + .setExecutionProfile(SESSION_RULE.slowProfile()) + .build()); + // table with UDT + SESSION_RULE + .session() + .execute( + SimpleStatement.builder("CREATE TYPE IF NOT EXISTS coordinates (x int, y int)") + .setExecutionProfile(SESSION_RULE.slowProfile()) + .build()); + SESSION_RULE + .session() + .execute( + SimpleStatement.builder( + "CREATE TABLE IF NOT EXISTS test3 (k0 text, k1 int, v map>, primary key (k0, k1))") + .setExecutionProfile(SESSION_RULE.slowProfile()) + .build()); + }); } // A simple codec that allows float values to be used for cassandra int column type. diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/DeleteIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/DeleteIT.java index 8918e6020ec..0acdbeae53a 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/DeleteIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/DeleteIT.java @@ -38,11 +38,10 @@ import com.datastax.oss.driver.api.mapper.annotations.Mapper; import com.datastax.oss.driver.api.mapper.annotations.Select; import com.datastax.oss.driver.api.mapper.entity.saving.NullSavingStrategy; -import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; -import com.datastax.oss.driver.categories.ParallelizableTests; import com.datastax.oss.driver.internal.core.util.concurrent.CompletableFutures; import java.util.UUID; import java.util.concurrent.CompletableFuture; @@ -51,18 +50,18 @@ import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Test; -import org.junit.experimental.categories.Category; import org.junit.rules.RuleChain; import org.junit.rules.TestRule; -@Category(ParallelizableTests.class) +// Do not run LWT tests in parallel because they may interfere. Tests operate on the same row. @BackendRequirement( type = BackendType.CASSANDRA, minInclusive = "3.0", description = ">= in WHERE clause not supported in legacy versions") public class DeleteIT extends InventoryITBase { - private static final CcmRule CCM_RULE = CcmRule.getInstance(); + private static CustomCcmRule CCM_RULE = + CustomCcmRule.builder().withCassandraConfiguration("enable_sasi_indexes", "true").build(); private static final SessionRule SESSION_RULE = SessionRule.builder(CCM_RULE).build(); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/DeleteReactiveIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/DeleteReactiveIT.java index 928fbd6fb8a..3a418c73653 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/DeleteReactiveIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/DeleteReactiveIT.java @@ -24,6 +24,7 @@ import com.datastax.dse.driver.api.mapper.reactive.MappedReactiveResultSet; import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.api.core.cql.SimpleStatement; import com.datastax.oss.driver.api.mapper.annotations.Dao; import com.datastax.oss.driver.api.mapper.annotations.DaoFactory; @@ -34,28 +35,35 @@ import com.datastax.oss.driver.api.mapper.annotations.Mapper; import com.datastax.oss.driver.api.mapper.annotations.Select; import com.datastax.oss.driver.api.mapper.entity.saving.NullSavingStrategy; -import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.ccm.CcmBridge; +import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; import com.datastax.oss.driver.api.testinfra.session.SessionRule; -import com.datastax.oss.driver.categories.ParallelizableTests; import io.reactivex.Flowable; import java.util.UUID; import org.junit.Before; import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Test; -import org.junit.experimental.categories.Category; import org.junit.rules.RuleChain; import org.junit.rules.TestRule; -@Category(ParallelizableTests.class) +// Do not run LWT tests in parallel because they may interfere. Tests operate on the same row. public class DeleteReactiveIT extends InventoryITBase { - private static CcmRule ccmRule = CcmRule.getInstance(); + private static CustomCcmRule ccmRule = configureCcm(CustomCcmRule.builder()).build(); private static SessionRule sessionRule = SessionRule.builder(ccmRule).build(); @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); + private static CustomCcmRule.Builder configureCcm(CustomCcmRule.Builder builder) { + if (!CcmBridge.DSE_ENABLEMENT + && CcmBridge.VERSION.nextStable().compareTo(Version.V4_0_0) >= 0) { + builder.withCassandraConfiguration("enable_sasi_indexes", true); + } + return builder; + } + private static DseProductDao dao; @BeforeClass diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/EntityPolymorphismIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/EntityPolymorphismIT.java index 08b806af684..3e532e97c00 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/EntityPolymorphismIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/EntityPolymorphismIT.java @@ -47,6 +47,7 @@ import com.datastax.oss.driver.api.mapper.annotations.Update; import com.datastax.oss.driver.api.mapper.entity.saving.NullSavingStrategy; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.ccm.SchemaChangeSynchronizer; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.categories.ParallelizableTests; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; @@ -83,22 +84,27 @@ public class EntityPolymorphismIT { @BeforeClass public static void setup() { CqlSession session = SESSION_RULE.session(); - for (String query : - ImmutableList.of( - "CREATE TYPE point2d (\"X\" int, \"Y\" int)", - "CREATE TYPE point3d (\"X\" int, \"Y\" int, \"Z\" int)", - "CREATE TABLE circles (circle_id uuid PRIMARY KEY, center2d frozen, radius " - + "double, tags set)", - "CREATE TABLE rectangles (rect_id uuid PRIMARY KEY, bottom_left frozen, top_right frozen, tags set)", - "CREATE TABLE squares (square_id uuid PRIMARY KEY, bottom_left frozen, top_right frozen, tags set)", - "CREATE TABLE spheres (sphere_id uuid PRIMARY KEY, center3d frozen, radius " - + "double, tags set)", - "CREATE TABLE devices (device_id uuid PRIMARY KEY, name text)", - "CREATE TABLE tracked_devices (device_id uuid PRIMARY KEY, name text, location text)", - "CREATE TABLE simple_devices (id uuid PRIMARY KEY, in_use boolean)")) { - session.execute( - SimpleStatement.builder(query).setExecutionProfile(SESSION_RULE.slowProfile()).build()); - } + SchemaChangeSynchronizer.withLock( + () -> { + for (String query : + ImmutableList.of( + "CREATE TYPE point2d (\"X\" int, \"Y\" int)", + "CREATE TYPE point3d (\"X\" int, \"Y\" int, \"Z\" int)", + "CREATE TABLE circles (circle_id uuid PRIMARY KEY, center2d frozen, radius " + + "double, tags set)", + "CREATE TABLE rectangles (rect_id uuid PRIMARY KEY, bottom_left frozen, top_right frozen, tags set)", + "CREATE TABLE squares (square_id uuid PRIMARY KEY, bottom_left frozen, top_right frozen, tags set)", + "CREATE TABLE spheres (sphere_id uuid PRIMARY KEY, center3d frozen, radius " + + "double, tags set)", + "CREATE TABLE devices (device_id uuid PRIMARY KEY, name text)", + "CREATE TABLE tracked_devices (device_id uuid PRIMARY KEY, name text, location text)", + "CREATE TABLE simple_devices (id uuid PRIMARY KEY, in_use boolean)")) { + session.execute( + SimpleStatement.builder(query) + .setExecutionProfile(SESSION_RULE.slowProfile()) + .build()); + } + }); mapper = new EntityPolymorphismIT_TestMapperBuilder(session).build(); } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/ImmutableEntityIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/ImmutableEntityIT.java index 555b02c0283..bdfe92a23f9 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/ImmutableEntityIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/ImmutableEntityIT.java @@ -42,6 +42,7 @@ import com.datastax.oss.driver.api.mapper.annotations.Select; import com.datastax.oss.driver.api.mapper.entity.saving.NullSavingStrategy; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.ccm.SchemaChangeSynchronizer; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.categories.ParallelizableTests; import java.util.Objects; @@ -70,10 +71,15 @@ public class ImmutableEntityIT extends InventoryITBase { public static void setup() { CqlSession session = SESSION_RULE.session(); - for (String query : createStatements(CCM_RULE)) { - session.execute( - SimpleStatement.builder(query).setExecutionProfile(SESSION_RULE.slowProfile()).build()); - } + SchemaChangeSynchronizer.withLock( + () -> { + for (String query : createStatements(CCM_RULE)) { + session.execute( + SimpleStatement.builder(query) + .setExecutionProfile(SESSION_RULE.slowProfile()) + .build()); + } + }); UserDefinedType dimensions2d = session diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/InventoryITBase.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/InventoryITBase.java index 75ceee1f2a5..2be025b3739 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/InventoryITBase.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/InventoryITBase.java @@ -22,7 +22,7 @@ import com.datastax.oss.driver.api.mapper.annotations.ClusteringColumn; import com.datastax.oss.driver.api.mapper.annotations.Entity; import com.datastax.oss.driver.api.mapper.annotations.PartitionKey; -import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.ccm.BaseCcmRule; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import java.util.List; import java.util.Objects; @@ -58,7 +58,7 @@ public abstract class InventoryITBase { protected static ProductSale MP3_DOWNLOAD_SALE_1 = new ProductSale(MP3_DOWNLOAD.getId(), DATE_3, 7, Uuids.startOf(915192000), 0.99, 12); - protected static List createStatements(CcmRule ccmRule) { + protected static List createStatements(BaseCcmRule ccmRule) { ImmutableList.Builder builder = ImmutableList.builder() .add( @@ -92,13 +92,13 @@ protected static List createStatements(CcmRule ccmRule) { private static final Version MINIMUM_SASI_VERSION = Version.parse("3.4.0"); private static final Version BROKEN_SASI_VERSION = Version.parse("6.8.0"); - protected static boolean isSasiBroken(CcmRule ccmRule) { + protected static boolean isSasiBroken(BaseCcmRule ccmRule) { Optional dseVersion = ccmRule.getDseVersion(); // creating SASI indexes is broken in DSE 6.8.0 return dseVersion.isPresent() && dseVersion.get().compareTo(BROKEN_SASI_VERSION) == 0; } - protected static boolean supportsSASI(CcmRule ccmRule) { + protected static boolean supportsSASI(BaseCcmRule ccmRule) { return ccmRule.getCassandraVersion().compareTo(MINIMUM_SASI_VERSION) >= 0; } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/NestedUdtIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/NestedUdtIT.java index 43d41a9c93b..d61b6f6e628 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/NestedUdtIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/NestedUdtIT.java @@ -41,6 +41,7 @@ import com.datastax.oss.driver.api.mapper.annotations.SetEntity; import com.datastax.oss.driver.api.mapper.entity.saving.NullSavingStrategy; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.ccm.SchemaChangeSynchronizer; import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; @@ -119,27 +120,32 @@ public class NestedUdtIT { public static void setup() { CqlSession session = SESSION_RULE.session(); - for (String query : - ImmutableList.of( - "CREATE TYPE type1(s1 text, s2 text)", - "CREATE TYPE type2(i1 int, i2 int)", - "CREATE TYPE type1_partial(s1 text)", - "CREATE TYPE type2_partial(i1 int)", - "CREATE TABLE container(id uuid PRIMARY KEY, " - + "list frozen>, " - + "map1 frozen>>, " - + "map2 frozen>>>," - + "map3 frozen>>>" - + ")", - "CREATE TABLE container_partial(id uuid PRIMARY KEY, " - + "list frozen>, " - + "map1 frozen>>, " - + "map2 frozen>>>," - + "map3 frozen>>>" - + ")")) { - session.execute( - SimpleStatement.builder(query).setExecutionProfile(SESSION_RULE.slowProfile()).build()); - } + SchemaChangeSynchronizer.withLock( + () -> { + for (String query : + ImmutableList.of( + "CREATE TYPE type1(s1 text, s2 text)", + "CREATE TYPE type2(i1 int, i2 int)", + "CREATE TYPE type1_partial(s1 text)", + "CREATE TYPE type2_partial(i1 int)", + "CREATE TABLE container(id uuid PRIMARY KEY, " + + "list frozen>, " + + "map1 frozen>>, " + + "map2 frozen>>>," + + "map3 frozen>>>" + + ")", + "CREATE TABLE container_partial(id uuid PRIMARY KEY, " + + "list frozen>, " + + "map1 frozen>>, " + + "map2 frozen>>>," + + "map3 frozen>>>" + + ")")) { + session.execute( + SimpleStatement.builder(query) + .setExecutionProfile(SESSION_RULE.slowProfile()) + .build()); + } + }); UserDefinedType type1Partial = session diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SelectCustomWhereClauseIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SelectCustomWhereClauseIT.java index e2a0f1e9987..3df1ccd21a7 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SelectCustomWhereClauseIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SelectCustomWhereClauseIT.java @@ -35,6 +35,7 @@ import com.datastax.oss.driver.api.mapper.annotations.Mapper; import com.datastax.oss.driver.api.mapper.annotations.Select; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.ccm.SchemaChangeSynchronizer; import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; @@ -72,10 +73,15 @@ public static void setup() { CqlSession session = SESSION_RULE.session(); - for (String query : createStatements(CCM_RULE)) { - session.execute( - SimpleStatement.builder(query).setExecutionProfile(SESSION_RULE.slowProfile()).build()); - } + SchemaChangeSynchronizer.withLock( + () -> { + for (String query : createStatements(CCM_RULE)) { + session.execute( + SimpleStatement.builder(query) + .setExecutionProfile(SESSION_RULE.slowProfile()) + .build()); + } + }); InventoryMapper inventoryMapper = new SelectCustomWhereClauseIT_InventoryMapperBuilder(session).build(); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SelectReactiveIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SelectReactiveIT.java index 0ea07e552f7..79e4d2b33ea 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SelectReactiveIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SelectReactiveIT.java @@ -34,6 +34,7 @@ import com.datastax.oss.driver.api.mapper.annotations.Select; import com.datastax.oss.driver.api.mapper.entity.saving.NullSavingStrategy; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.ccm.SchemaChangeSynchronizer; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.categories.ParallelizableTests; import io.reactivex.Flowable; @@ -61,10 +62,15 @@ public class SelectReactiveIT extends InventoryITBase { public static void setup() { CqlSession session = sessionRule.session(); - for (String query : createStatements(ccmRule)) { - session.execute( - SimpleStatement.builder(query).setExecutionProfile(sessionRule.slowProfile()).build()); - } + SchemaChangeSynchronizer.withLock( + () -> { + for (String query : createStatements(ccmRule)) { + session.execute( + SimpleStatement.builder(query) + .setExecutionProfile(sessionRule.slowProfile()) + .build()); + } + }); DseInventoryMapper inventoryMapper = new SelectReactiveIT_DseInventoryMapperBuilder(session).build(); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateIT.java index 27b4d6e9d90..3fac733c900 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/UpdateIT.java @@ -37,6 +37,7 @@ import com.datastax.oss.driver.api.mapper.annotations.Update; import com.datastax.oss.driver.api.mapper.entity.saving.NullSavingStrategy; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.ccm.SchemaChangeSynchronizer; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.categories.ParallelizableTests; import com.datastax.oss.driver.internal.core.util.concurrent.CompletableFutures; @@ -65,14 +66,18 @@ public class UpdateIT extends InventoryITBase { @BeforeClass public static void setup() { CqlSession session = SESSION_RULE.session(); - - for (String query : createStatements(CCM_RULE)) { - session.execute( - SimpleStatement.builder(query).setExecutionProfile(SESSION_RULE.slowProfile()).build()); - } - session.execute( - SimpleStatement.newInstance("CREATE TABLE only_p_k(id uuid PRIMARY KEY)") - .setExecutionProfile(SESSION_RULE.slowProfile())); + SchemaChangeSynchronizer.withLock( + () -> { + for (String query : createStatements(CCM_RULE)) { + session.execute( + SimpleStatement.builder(query) + .setExecutionProfile(SESSION_RULE.slowProfile()) + .build()); + } + session.execute( + SimpleStatement.newInstance("CREATE TABLE only_p_k(id uuid PRIMARY KEY)") + .setExecutionProfile(SESSION_RULE.slowProfile())); + }); inventoryMapper = new UpdateIT_InventoryMapperBuilder(session).build(); dao = inventoryMapper.productDao(SESSION_RULE.keyspace()); diff --git a/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/CcmStagedReactor.java b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/CcmStagedReactor.java index 8a520488e5c..8b140930870 100644 --- a/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/CcmStagedReactor.java +++ b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/CcmStagedReactor.java @@ -81,7 +81,7 @@ public synchronized void afterSuite() { if (running) { LOGGER.info("Stopping CCM"); CCM_BRIDGE.stop(); - CCM_BRIDGE.remove(); + CCM_BRIDGE.close(); running = false; LOGGER.info("CCM stopped"); } diff --git a/pom.xml b/pom.xml index 082daeb3566..94311719e5f 100644 --- a/pom.xml +++ b/pom.xml @@ -991,6 +991,17 @@ limitations under the License.]]> [11,) + + + test-jdk-14 + + [14,) + + + + -XX:+AllowRedefinitionToAddDeleteMethods + + test-jdk-17 diff --git a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/BaseCcmRule.java b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/BaseCcmRule.java index b8b684ee5b2..65210acd2a2 100644 --- a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/BaseCcmRule.java +++ b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/BaseCcmRule.java @@ -38,7 +38,7 @@ public abstract class BaseCcmRule extends CassandraResourceRule { new Thread( () -> { try { - ccmBridge.remove(); + ccmBridge.close(); } catch (Exception e) { // silently remove as may have already been removed. } @@ -53,7 +53,7 @@ protected void before() { @Override protected void after() { - ccmBridge.remove(); + ccmBridge.close(); } @Override diff --git a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CcmBridge.java b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CcmBridge.java index 98739e7715d..995513e3919 100644 --- a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CcmBridge.java +++ b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CcmBridge.java @@ -197,9 +197,10 @@ public Version getCassandraVersion() { } private String getCcmVersionString(Version version) { - // for 4.0 pre-releases, the CCM version string needs to be "4.0-alpha1" or "4.0-alpha2" - // Version.toString() always adds a patch value, even if it's not specified when parsing. - if (version.getMajor() == 4 + // for 4.0 or 5.0 pre-releases, the CCM version string needs to be "4.0-alpha1", "4.0-alpha2" or + // "5.0-beta1" Version.toString() always adds a patch value, even if it's not specified when + // parsing. + if (version.getMajor() >= 4 && version.getMinor() == 0 && version.getPatch() == 0 && version.getPreReleaseLabels() != null) { @@ -292,8 +293,7 @@ public void reloadCore(int node, String keyspace, String table, boolean reindex) public void start() { if (started.compareAndSet(false, true)) { List cmdAndArgs = Lists.newArrayList("start", jvmArgs, "--wait-for-binary-proto"); - overrideJvmVersionForDseWorkloads() - .ifPresent(jvmVersion -> cmdAndArgs.add(String.format("--jvm_version=%d", jvmVersion))); + updateJvmVersion(cmdAndArgs); try { execute(cmdAndArgs.toArray(new String[0])); } catch (RuntimeException re) { @@ -324,9 +324,13 @@ public void resume(int n) { public void start(int n) { List cmdAndArgs = Lists.newArrayList("node" + n, "start"); + updateJvmVersion(cmdAndArgs); + execute(cmdAndArgs.toArray(new String[0])); + } + + private void updateJvmVersion(List cmdAndArgs) { overrideJvmVersionForDseWorkloads() .ifPresent(jvmVersion -> cmdAndArgs.add(String.format("--jvm_version=%d", jvmVersion))); - execute(cmdAndArgs.toArray(new String[0])); } public void stop(int n) { @@ -423,7 +427,9 @@ protected void processLine(String line, int logLevel) { @Override public void close() { - remove(); + if (created.compareAndSet(true, false)) { + remove(); + } } /** diff --git a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/SchemaChangeSynchronizer.java b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/SchemaChangeSynchronizer.java new file mode 100644 index 00000000000..093d1d3f9f9 --- /dev/null +++ b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/SchemaChangeSynchronizer.java @@ -0,0 +1,42 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.testinfra.ccm; + +import java.util.concurrent.Semaphore; + +/** + * Running multiple parallel integration tests may fail due to query timeout when trying to apply + * several schema changes at once. Limit concurrently executed DDLs to 5. + */ +public class SchemaChangeSynchronizer { + private static final Semaphore lock = new Semaphore(5); + + public static void withLock(Runnable callback) { + try { + lock.acquire(); + try { + callback.run(); + } finally { + lock.release(); + } + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new RuntimeException("Thread interrupted wile waiting to obtain DDL lock", e); + } + } +} diff --git a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/session/SessionRule.java b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/session/SessionRule.java index ce3903bcfcb..5396e5c6cc6 100644 --- a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/session/SessionRule.java +++ b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/session/SessionRule.java @@ -29,6 +29,7 @@ import com.datastax.oss.driver.api.core.session.Session; import com.datastax.oss.driver.api.testinfra.CassandraResourceRule; import com.datastax.oss.driver.api.testinfra.ccm.BaseCcmRule; +import com.datastax.oss.driver.api.testinfra.ccm.SchemaChangeSynchronizer; import com.datastax.oss.driver.api.testinfra.simulacron.SimulacronRule; import java.util.Objects; import java.util.Optional; @@ -195,7 +196,10 @@ protected void after() { ScriptGraphStatement.SYNC); } if (keyspace != null) { - SessionUtils.dropKeyspace(session, keyspace, slowProfile); + SchemaChangeSynchronizer.withLock( + () -> { + SessionUtils.dropKeyspace(session, keyspace, slowProfile); + }); } session.close(); } From 811acb2fe77464f679a09226a03c1995694c51b4 Mon Sep 17 00:00:00 2001 From: Lukasz Antoniak Date: Tue, 4 Jun 2024 10:45:28 +0200 Subject: [PATCH 903/979] CASSANDRA-19635: Configure Jenkins to run integration tests with C* 5.x patch by Lukasz Antoniak; reviewed by Bret McGuire for CASSANDRA-19635 --- Jenkinsfile | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index d38b7c63849..4f1ef95d101 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -261,7 +261,7 @@ pipeline { '3.11', // Previous Apache CassandraⓇ '4.0', // Previous Apache CassandraⓇ '4.1', // Current Apache CassandraⓇ - '5.0', // Development Apache CassandraⓇ + '5.0-beta1', // Development Apache CassandraⓇ 'dse-4.8.16', // Previous EOSL DataStax Enterprise 'dse-5.0.15', // Long Term Support DataStax Enterprise 'dse-5.1.35', // Legacy DataStax Enterprise @@ -411,14 +411,14 @@ pipeline { triggers { // schedules only run against release branches (i.e. 3.x, 4.x, 4.5.x, etc.) parameterizedCron(branchPatternCron().matcher(env.BRANCH_NAME).matches() ? """ - # Every weeknight (Monday - Friday) around 2:00 AM - ### JDK8 tests against 2.1, 3.0, DSE 4.8, DSE 5.0, DSE 5.1, dse-6.0.18 and DSE 6.7 - H 2 * * 1-5 %CI_SCHEDULE=WEEKNIGHTS;CI_SCHEDULE_SERVER_VERSIONS=2.1 3.0 dse-4.8.16 dse-5.0.15 dse-5.1.35 dse-6.0.18 dse-6.7.17;CI_SCHEDULE_JABBA_VERSION=1.8 - ### JDK11 tests against 3.11, 4.0 and DSE 6.8 - H 2 * * 1-5 %CI_SCHEDULE=WEEKNIGHTS;CI_SCHEDULE_SERVER_VERSIONS=3.11 4.0 dse-6.8.30;CI_SCHEDULE_JABBA_VERSION=openjdk@1.11 - # Every weekend (Sunday) around 12:00 PM noon - ### JDK14 tests against 3.11, 4.0 and DSE 6.8 - H 12 * * 0 %CI_SCHEDULE=WEEKENDS;CI_SCHEDULE_SERVER_VERSIONS=3.11 4.0 dse-6.8.30;CI_SCHEDULE_JABBA_VERSION=openjdk@1.14 + # Every weekend (Saturday, Sunday) around 2:00 AM + ### JDK8 tests against 2.1, 3.0, 4.0, DSE 4.8, DSE 5.0, DSE 5.1, dse-6.0.18 and DSE 6.7 + H 2 * * 0 %CI_SCHEDULE=WEEKENDS;CI_SCHEDULE_SERVER_VERSIONS=2.1 3.0 4.0 dse-4.8.16 dse-5.0.15 dse-5.1.35 dse-6.0.18 dse-6.7.17;CI_SCHEDULE_JABBA_VERSION=1.8 + # Every weeknight (Monday - Friday) around 12:00 PM noon + ### JDK11 tests against 3.11, 4.1, 5.0-beta1 and DSE 6.8 + H 12 * * 1-5 %CI_SCHEDULE=WEEKNIGHTS;CI_SCHEDULE_SERVER_VERSIONS=3.11 4.1 5.0-beta1 dse-6.8.30;CI_SCHEDULE_JABBA_VERSION=openjdk@1.11 + ### JDK17 tests against 3.11, 4.1, 5.0-beta1 and DSE 6.8 + H 12 * * 1-5 %CI_SCHEDULE=WEEKNIGHTS;CI_SCHEDULE_SERVER_VERSIONS=3.11 4.1 5.0-beta1 dse-6.8.30;CI_SCHEDULE_JABBA_VERSION=openjdk@1.17 """ : "") } @@ -452,8 +452,8 @@ pipeline { axes { axis { name 'SERVER_VERSION' - values '3.11', // Latest stable Apache CassandraⓇ - '4.1', // Development Apache CassandraⓇ + values '3.11', // Latest stable Apache CassandraⓇ + '4.1', // Development Apache CassandraⓇ 'dse-6.8.30' // Current DataStax Enterprise } axis { @@ -565,7 +565,7 @@ pipeline { '3.11', // Previous Apache CassandraⓇ '4.0', // Previous Apache CassandraⓇ '4.1', // Current Apache CassandraⓇ - '5.0', // Development Apache CassandraⓇ + '5.0-beta1', // Development Apache CassandraⓇ 'dse-4.8.16', // Previous EOSL DataStax Enterprise 'dse-5.0.15', // Last EOSL DataStax Enterprise 'dse-5.1.35', // Legacy DataStax Enterprise From 85bb4065098b887d2dda26eb14423ce4fc687045 Mon Sep 17 00:00:00 2001 From: Brad Schoening Date: Tue, 4 Jun 2024 17:30:41 -0400 Subject: [PATCH 904/979] update badge URL to org.apache.cassandra/java-driver-core --- README.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index c53c8f2db29..2a30cb68c9a 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,8 @@ :warning: The java-driver has recently been donated by Datastax to The Apache Software Foundation and the Apache Cassandra project. Bear with us as we move assets and coordinates. -[![Maven Central](https://maven-badges.herokuapp.com/maven-central/com.datastax.oss/java-driver-core/badge.svg)](https://maven-badges.herokuapp.com/maven-central/com.datastax.oss/java-driver-core) +[![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://opensource.org/licenses/Apache-2.0) +[![Maven Central](https://maven-badges.herokuapp.com/maven-central/org.apache.cassandra/java-driver-core/badge.svg)](https://maven-badges.herokuapp.com/maven-central/org.apache.cassandra/java-driver-core) *If you're reading this on github.com, please note that this is the readme for the development version and that some features described here might not yet have been released. You can find the From d0a1e44a4415c7a0489f8c35ee9ce49e20d7bc61 Mon Sep 17 00:00:00 2001 From: Benoit Tellier Date: Sun, 22 Jan 2023 13:58:19 +0700 Subject: [PATCH 905/979] Limit calls to Conversions.resolveExecutionProfile Those repeated calls account for a non-negligible portion of my application CPU (0.6%) and can definitly be a final field so that it gets resolved only once per CqlRequestHandler. patch by Benoit Tellier; reviewed by Andy Tolbert, and Bret McGuire reference: https://github.com/apache/cassandra-java-driver/pull/1623 --- .../ContinuousRequestHandlerBase.java | 15 +++--- .../core/graph/GraphRequestHandler.java | 15 +++--- .../driver/internal/core/cql/Conversions.java | 38 +++++++++++-- .../internal/core/cql/CqlPrepareHandler.java | 11 ++-- .../internal/core/cql/CqlRequestHandler.java | 54 +++++++------------ 5 files changed, 75 insertions(+), 58 deletions(-) diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousRequestHandlerBase.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousRequestHandlerBase.java index 44df3b3a03d..9a7be344721 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousRequestHandlerBase.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousRequestHandlerBase.java @@ -648,12 +648,13 @@ public void operationComplete(@NonNull Future future) { } } else { LOG.trace("[{}] Request sent on {}", logPrefix, channel); - if (scheduleSpeculativeExecution && Conversions.resolveIdempotence(statement, context)) { + if (scheduleSpeculativeExecution + && Conversions.resolveIdempotence(statement, executionProfile)) { int nextExecution = executionIndex + 1; // Note that `node` is the first node of the execution, it might not be the "slow" one // if there were retries, but in practice retries are rare. long nextDelay = - Conversions.resolveSpeculativeExecutionPolicy(statement, context) + Conversions.resolveSpeculativeExecutionPolicy(context, executionProfile) .nextExecution(node, keyspace, statement, nextExecution); if (nextDelay >= 0) { scheduleSpeculativeExecution(nextExecution, nextDelay); @@ -787,12 +788,12 @@ public void onFailure(@NonNull Throwable error) { cancelTimeout(pageTimeout); LOG.trace(String.format("[%s] Request failure", logPrefix), error); RetryVerdict verdict; - if (!Conversions.resolveIdempotence(statement, context) + if (!Conversions.resolveIdempotence(statement, executionProfile) || error instanceof FrameTooLongException) { verdict = RetryVerdict.RETHROW; } else { try { - RetryPolicy retryPolicy = Conversions.resolveRetryPolicy(statement, context); + RetryPolicy retryPolicy = Conversions.resolveRetryPolicy(context, executionProfile); verdict = retryPolicy.onRequestAbortedVerdict(statement, error, retryCount); } catch (Throwable cause) { abort( @@ -945,7 +946,7 @@ private void processRecoverableError(@NonNull CoordinatorException error) { assert lock.isHeldByCurrentThread(); NodeMetricUpdater metricUpdater = ((DefaultNode) node).getMetricUpdater(); RetryVerdict verdict; - RetryPolicy retryPolicy = Conversions.resolveRetryPolicy(statement, context); + RetryPolicy retryPolicy = Conversions.resolveRetryPolicy(context, executionProfile); if (error instanceof ReadTimeoutException) { ReadTimeoutException readTimeout = (ReadTimeoutException) error; verdict = @@ -964,7 +965,7 @@ private void processRecoverableError(@NonNull CoordinatorException error) { DefaultNodeMetric.IGNORES_ON_READ_TIMEOUT); } else if (error instanceof WriteTimeoutException) { WriteTimeoutException writeTimeout = (WriteTimeoutException) error; - if (Conversions.resolveIdempotence(statement, context)) { + if (Conversions.resolveIdempotence(statement, executionProfile)) { verdict = retryPolicy.onWriteTimeoutVerdict( statement, @@ -999,7 +1000,7 @@ private void processRecoverableError(@NonNull CoordinatorException error) { DefaultNodeMetric.IGNORES_ON_UNAVAILABLE); } else { verdict = - Conversions.resolveIdempotence(statement, context) + Conversions.resolveIdempotence(statement, executionProfile) ? retryPolicy.onErrorResponseVerdict(statement, error, retryCount) : RetryVerdict.RETHROW; updateErrorMetrics( diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java index c2298458805..702da69b855 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java @@ -557,12 +557,13 @@ public void operationComplete(Future future) { cancel(); } else { inFlightCallbacks.add(this); - if (scheduleNextExecution && Conversions.resolveIdempotence(statement, context)) { + if (scheduleNextExecution + && Conversions.resolveIdempotence(statement, executionProfile)) { int nextExecution = execution + 1; long nextDelay; try { nextDelay = - Conversions.resolveSpeculativeExecutionPolicy(statement, context) + Conversions.resolveSpeculativeExecutionPolicy(context, executionProfile) .nextExecution(node, null, statement, nextExecution); } catch (Throwable cause) { // This is a bug in the policy, but not fatal since we have at least one other @@ -678,7 +679,7 @@ private void processErrorResponse(Error errorMessage) { trackNodeError(node, error, NANOTIME_NOT_MEASURED_YET); setFinalError(statement, error, node, execution); } else { - RetryPolicy retryPolicy = Conversions.resolveRetryPolicy(statement, context); + RetryPolicy retryPolicy = Conversions.resolveRetryPolicy(context, executionProfile); RetryVerdict verdict; if (error instanceof ReadTimeoutException) { ReadTimeoutException readTimeout = (ReadTimeoutException) error; @@ -699,7 +700,7 @@ private void processErrorResponse(Error errorMessage) { } else if (error instanceof WriteTimeoutException) { WriteTimeoutException writeTimeout = (WriteTimeoutException) error; verdict = - Conversions.resolveIdempotence(statement, context) + Conversions.resolveIdempotence(statement, executionProfile) ? retryPolicy.onWriteTimeoutVerdict( statement, writeTimeout.getConsistencyLevel(), @@ -731,7 +732,7 @@ private void processErrorResponse(Error errorMessage) { DefaultNodeMetric.IGNORES_ON_UNAVAILABLE); } else { verdict = - Conversions.resolveIdempotence(statement, context) + Conversions.resolveIdempotence(statement, executionProfile) ? retryPolicy.onErrorResponseVerdict(statement, error, retryCount) : RetryVerdict.RETHROW; updateErrorMetrics( @@ -810,12 +811,12 @@ public void onFailure(Throwable error) { } LOG.trace("[{}] Request failure, processing: {}", logPrefix, error); RetryVerdict verdict; - if (!Conversions.resolveIdempotence(statement, context) + if (!Conversions.resolveIdempotence(statement, executionProfile) || error instanceof FrameTooLongException) { verdict = RetryVerdict.RETHROW; } else { try { - RetryPolicy retryPolicy = Conversions.resolveRetryPolicy(statement, context); + RetryPolicy retryPolicy = Conversions.resolveRetryPolicy(context, executionProfile); verdict = retryPolicy.onRequestAbortedVerdict(statement, error, retryCount); } catch (Throwable cause) { setFinalError( diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/Conversions.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/Conversions.java index 529664c6666..ff9384b3e24 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/Conversions.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/Conversions.java @@ -535,29 +535,59 @@ public static CoordinatorException toThrowable( } } + /** Use {@link #resolveIdempotence(Request, DriverExecutionProfile)} instead. */ + @Deprecated public static boolean resolveIdempotence(Request request, InternalDriverContext context) { + return resolveIdempotence(request, resolveExecutionProfile(request, context)); + } + + public static boolean resolveIdempotence( + Request request, DriverExecutionProfile executionProfile) { Boolean requestIsIdempotent = request.isIdempotent(); - DriverExecutionProfile executionProfile = resolveExecutionProfile(request, context); return (requestIsIdempotent == null) ? executionProfile.getBoolean(DefaultDriverOption.REQUEST_DEFAULT_IDEMPOTENCE) : requestIsIdempotent; } + /** Use {@link #resolveRequestTimeout(Request, DriverExecutionProfile)} instead. */ + @Deprecated public static Duration resolveRequestTimeout(Request request, InternalDriverContext context) { - DriverExecutionProfile executionProfile = resolveExecutionProfile(request, context); - return request.getTimeout() != null - ? request.getTimeout() + return resolveRequestTimeout(request, resolveExecutionProfile(request, context)); + } + + public static Duration resolveRequestTimeout( + Request request, DriverExecutionProfile executionProfile) { + Duration timeout = request.getTimeout(); + return timeout != null + ? timeout : executionProfile.getDuration(DefaultDriverOption.REQUEST_TIMEOUT); } + /** Use {@link #resolveRetryPolicy(InternalDriverContext, DriverExecutionProfile)} instead. */ + @Deprecated public static RetryPolicy resolveRetryPolicy(Request request, InternalDriverContext context) { DriverExecutionProfile executionProfile = resolveExecutionProfile(request, context); return context.getRetryPolicy(executionProfile.getName()); } + public static RetryPolicy resolveRetryPolicy( + InternalDriverContext context, DriverExecutionProfile executionProfile) { + return context.getRetryPolicy(executionProfile.getName()); + } + + /** + * Use {@link #resolveSpeculativeExecutionPolicy(InternalDriverContext, DriverExecutionProfile)} + * instead. + */ + @Deprecated public static SpeculativeExecutionPolicy resolveSpeculativeExecutionPolicy( Request request, InternalDriverContext context) { DriverExecutionProfile executionProfile = resolveExecutionProfile(request, context); return context.getSpeculativeExecutionPolicy(executionProfile.getName()); } + + public static SpeculativeExecutionPolicy resolveSpeculativeExecutionPolicy( + InternalDriverContext context, DriverExecutionProfile executionProfile) { + return context.getSpeculativeExecutionPolicy(executionProfile.getName()); + } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlPrepareHandler.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlPrepareHandler.java index 6faa8eee59f..8fe1adb20b1 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlPrepareHandler.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlPrepareHandler.java @@ -92,6 +92,7 @@ public class CqlPrepareHandler implements Throttled { private final Timeout scheduledTimeout; private final RequestThrottler throttler; private final Boolean prepareOnAllNodes; + private final DriverExecutionProfile executionProfile; private volatile InitialPrepareCallback initialCallback; // The errors on the nodes that were already tried (lazily initialized on the first error). @@ -111,7 +112,7 @@ protected CqlPrepareHandler( this.initialRequest = request; this.session = session; this.context = context; - DriverExecutionProfile executionProfile = Conversions.resolveExecutionProfile(request, context); + executionProfile = Conversions.resolveExecutionProfile(request, context); this.queryPlan = context .getLoadBalancingPolicyWrapper() @@ -131,7 +132,7 @@ protected CqlPrepareHandler( }); this.timer = context.getNettyOptions().getTimer(); - Duration timeout = Conversions.resolveRequestTimeout(request, context); + Duration timeout = Conversions.resolveRequestTimeout(request, executionProfile); this.scheduledTimeout = scheduleTimeout(timeout); this.prepareOnAllNodes = executionProfile.getBoolean(DefaultDriverOption.PREPARE_ON_ALL_NODES); @@ -292,7 +293,7 @@ private CompletionStage prepareOnOtherNode(PrepareRequest request, Node no false, toPrepareMessage(request), request.getCustomPayload(), - Conversions.resolveRequestTimeout(request, context), + Conversions.resolveRequestTimeout(request, executionProfile), throttler, session.getMetricUpdater(), logPrefix); @@ -419,7 +420,7 @@ private void processErrorResponse(Error errorMessage) { } else { // Because prepare requests are known to always be idempotent, we call the retry policy // directly, without checking the flag. - RetryPolicy retryPolicy = Conversions.resolveRetryPolicy(request, context); + RetryPolicy retryPolicy = Conversions.resolveRetryPolicy(context, executionProfile); RetryVerdict verdict = retryPolicy.onErrorResponseVerdict(request, error, retryCount); processRetryVerdict(verdict, error); } @@ -457,7 +458,7 @@ public void onFailure(Throwable error) { LOG.trace("[{}] Request failure, processing: {}", logPrefix, error.toString()); RetryVerdict verdict; try { - RetryPolicy retryPolicy = Conversions.resolveRetryPolicy(request, context); + RetryPolicy retryPolicy = Conversions.resolveRetryPolicy(context, executionProfile); verdict = retryPolicy.onRequestAbortedVerdict(request, error, retryCount); } catch (Throwable cause) { setFinalError( diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java index e7e334d57d8..a1c6b0e5466 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java @@ -126,6 +126,7 @@ public class CqlRequestHandler implements Throttled { private final RequestThrottler throttler; private final RequestTracker requestTracker; private final SessionMetricUpdater sessionMetricUpdater; + private final DriverExecutionProfile executionProfile; // The errors on the nodes that were already tried (lazily initialized on the first error). // We don't use a map because nodes can appear multiple times. @@ -167,7 +168,8 @@ protected CqlRequestHandler( this.sessionMetricUpdater = session.getMetricUpdater(); this.timer = context.getNettyOptions().getTimer(); - Duration timeout = Conversions.resolveRequestTimeout(statement, context); + this.executionProfile = Conversions.resolveExecutionProfile(initialStatement, context); + Duration timeout = Conversions.resolveRequestTimeout(statement, executionProfile); this.scheduledTimeout = scheduleTimeout(timeout); this.throttler = context.getRequestThrottler(); @@ -176,8 +178,6 @@ protected CqlRequestHandler( @Override public void onThrottleReady(boolean wasDelayed) { - DriverExecutionProfile executionProfile = - Conversions.resolveExecutionProfile(initialStatement, context); if (wasDelayed // avoid call to nanoTime() if metric is disabled: && sessionMetricUpdater.isEnabled( @@ -276,8 +276,6 @@ private void sendRequest( retryCount, scheduleNextExecution, logPrefix); - DriverExecutionProfile executionProfile = - Conversions.resolveExecutionProfile(statement, context); Message message = Conversions.toMessage(statement, executionProfile, context); channel .write(message, statement.isTracing(), statement.getCustomPayload(), nodeResponseCallback) @@ -336,37 +334,28 @@ private void setFinalResult( totalLatencyNanos = completionTimeNanos - startTimeNanos; long nodeLatencyNanos = completionTimeNanos - callback.nodeStartTimeNanos; requestTracker.onNodeSuccess( - callback.statement, - nodeLatencyNanos, - callback.executionProfile, - callback.node, - logPrefix); + callback.statement, nodeLatencyNanos, executionProfile, callback.node, logPrefix); requestTracker.onSuccess( - callback.statement, - totalLatencyNanos, - callback.executionProfile, - callback.node, - logPrefix); + callback.statement, totalLatencyNanos, executionProfile, callback.node, logPrefix); } if (sessionMetricUpdater.isEnabled( - DefaultSessionMetric.CQL_REQUESTS, callback.executionProfile.getName())) { + DefaultSessionMetric.CQL_REQUESTS, executionProfile.getName())) { if (completionTimeNanos == NANOTIME_NOT_MEASURED_YET) { completionTimeNanos = System.nanoTime(); totalLatencyNanos = completionTimeNanos - startTimeNanos; } sessionMetricUpdater.updateTimer( DefaultSessionMetric.CQL_REQUESTS, - callback.executionProfile.getName(), + executionProfile.getName(), totalLatencyNanos, TimeUnit.NANOSECONDS); } } // log the warnings if they have NOT been disabled if (!executionInfo.getWarnings().isEmpty() - && callback.executionProfile.getBoolean(DefaultDriverOption.REQUEST_LOG_WARNINGS) + && executionProfile.getBoolean(DefaultDriverOption.REQUEST_LOG_WARNINGS) && LOG.isWarnEnabled()) { - logServerWarnings( - callback.statement, callback.executionProfile, executionInfo.getWarnings()); + logServerWarnings(callback.statement, executionProfile, executionInfo.getWarnings()); } } catch (Throwable error) { setFinalError(callback.statement, error, callback.node, -1); @@ -418,21 +407,17 @@ private ExecutionInfo buildExecutionInfo( schemaInAgreement, session, context, - callback.executionProfile); + executionProfile); } @Override public void onThrottleFailure(@NonNull RequestThrottlingException error) { - DriverExecutionProfile executionProfile = - Conversions.resolveExecutionProfile(initialStatement, context); sessionMetricUpdater.incrementCounter( DefaultSessionMetric.THROTTLING_ERRORS, executionProfile.getName()); setFinalError(initialStatement, error, null, -1); } private void setFinalError(Statement statement, Throwable error, Node node, int execution) { - DriverExecutionProfile executionProfile = - Conversions.resolveExecutionProfile(statement, context); if (error instanceof DriverException) { ((DriverException) error) .setExecutionInfo( @@ -475,7 +460,6 @@ private class NodeResponseCallback private final long nodeStartTimeNanos = System.nanoTime(); private final Statement statement; - private final DriverExecutionProfile executionProfile; private final Node node; private final Queue queryPlan; private final DriverChannel channel; @@ -505,7 +489,6 @@ private NodeResponseCallback( this.retryCount = retryCount; this.scheduleNextExecution = scheduleNextExecution; this.logPrefix = logPrefix + "|" + execution; - this.executionProfile = Conversions.resolveExecutionProfile(statement, context); } // this gets invoked once the write completes. @@ -544,12 +527,13 @@ public void operationComplete(Future future) throws Exception { cancel(); } else { inFlightCallbacks.add(this); - if (scheduleNextExecution && Conversions.resolveIdempotence(statement, context)) { + if (scheduleNextExecution + && Conversions.resolveIdempotence(statement, executionProfile)) { int nextExecution = execution + 1; long nextDelay; try { nextDelay = - Conversions.resolveSpeculativeExecutionPolicy(statement, context) + Conversions.resolveSpeculativeExecutionPolicy(context, executionProfile) .nextExecution(node, keyspace, statement, nextExecution); } catch (Throwable cause) { // This is a bug in the policy, but not fatal since we have at least one other @@ -697,7 +681,7 @@ private void processErrorResponse(Error errorMessage) { true, reprepareMessage, repreparePayload.customPayload, - Conversions.resolveRequestTimeout(statement, context), + Conversions.resolveRequestTimeout(statement, executionProfile), throttler, sessionMetricUpdater, logPrefix); @@ -767,7 +751,7 @@ private void processErrorResponse(Error errorMessage) { trackNodeError(node, error, NANOTIME_NOT_MEASURED_YET); setFinalError(statement, error, node, execution); } else { - RetryPolicy retryPolicy = Conversions.resolveRetryPolicy(statement, context); + RetryPolicy retryPolicy = Conversions.resolveRetryPolicy(context, executionProfile); RetryVerdict verdict; if (error instanceof ReadTimeoutException) { ReadTimeoutException readTimeout = (ReadTimeoutException) error; @@ -788,7 +772,7 @@ private void processErrorResponse(Error errorMessage) { } else if (error instanceof WriteTimeoutException) { WriteTimeoutException writeTimeout = (WriteTimeoutException) error; verdict = - Conversions.resolveIdempotence(statement, context) + Conversions.resolveIdempotence(statement, executionProfile) ? retryPolicy.onWriteTimeoutVerdict( statement, writeTimeout.getConsistencyLevel(), @@ -820,7 +804,7 @@ private void processErrorResponse(Error errorMessage) { DefaultNodeMetric.IGNORES_ON_UNAVAILABLE); } else { verdict = - Conversions.resolveIdempotence(statement, context) + Conversions.resolveIdempotence(statement, executionProfile) ? retryPolicy.onErrorResponseVerdict(statement, error, retryCount) : RetryVerdict.RETHROW; updateErrorMetrics( @@ -899,12 +883,12 @@ public void onFailure(Throwable error) { } LOG.trace("[{}] Request failure, processing: {}", logPrefix, error); RetryVerdict verdict; - if (!Conversions.resolveIdempotence(statement, context) + if (!Conversions.resolveIdempotence(statement, executionProfile) || error instanceof FrameTooLongException) { verdict = RetryVerdict.RETHROW; } else { try { - RetryPolicy retryPolicy = Conversions.resolveRetryPolicy(statement, context); + RetryPolicy retryPolicy = Conversions.resolveRetryPolicy(context, executionProfile); verdict = retryPolicy.onRequestAbortedVerdict(statement, error, retryCount); } catch (Throwable cause) { setFinalError( From a17f7be614a09ab81bc2982b7f7ab3a123b4ab28 Mon Sep 17 00:00:00 2001 From: Stefan Miklosovic Date: Thu, 22 Aug 2024 14:28:46 +0200 Subject: [PATCH 906/979] autolink JIRA tickets in commit messages patch by Stefan Miklosovic; reviewed by Michael Semb Wever for CASSANDRA-19854 --- .asf.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.asf.yaml b/.asf.yaml index 5ebca4b6e33..ad58f536398 100644 --- a/.asf.yaml +++ b/.asf.yaml @@ -19,6 +19,7 @@ notifications: commits: commits@cassandra.apache.org issues: commits@cassandra.apache.org pullrequests: pr@cassandra.apache.org + jira_options: link worklog github: description: "Java Driver for Apache Cassandra®" @@ -31,6 +32,5 @@ github: wiki: false issues: false projects: false - -notifications: - jira_options: link worklog + autolink_jira: + - CASSANDRA From 0962794b2ec724d9939cd47380e68b979b46f693 Mon Sep 17 00:00:00 2001 From: Ammar Khaku Date: Sun, 20 Nov 2022 19:14:07 -0800 Subject: [PATCH 907/979] Don't return empty routing key when partition key is unbound DefaultBoundStatement#getRoutingKey has logic to infer the routing key when no one has explicitly called setRoutingKey or otherwise set the routing key on the statement. It however doesn't check for cases where nothing has been bound yet on the statement. This causes more problems if the user decides to get a BoundStatementBuilder from the PreparedStatement, set some fields on it, and then copy it by constructing new BoundStatementBuilder objects with the BoundStatement as a parameter, since the empty ByteBuffer gets copied to all bound statements, resulting in all requests being targeted to the same Cassandra node in a token-aware load balancing policy. patch by Ammar Khaku; reviewed by Andy Tolbert, and Bret McGuire reference: https://github.com/apache/cassandra-java-driver/pull/1620 --- .../core/cql/DefaultBoundStatement.java | 3 ++- .../driver/core/cql/PreparedStatementIT.java | 19 +++++++++++++++++++ 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultBoundStatement.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultBoundStatement.java index fb6b8fd7b27..3cf99c1be6e 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultBoundStatement.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/DefaultBoundStatement.java @@ -360,7 +360,8 @@ public ByteBuffer getRoutingKey() { if (indices.isEmpty()) { return null; } else if (indices.size() == 1) { - return getBytesUnsafe(indices.get(0)); + int index = indices.get(0); + return isSet(index) ? getBytesUnsafe(index) : null; } else { ByteBuffer[] components = new ByteBuffer[indices.size()]; for (int i = 0; i < components.length; i++) { diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PreparedStatementIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PreparedStatementIT.java index c0df01e3519..5671a7684e5 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PreparedStatementIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PreparedStatementIT.java @@ -527,6 +527,25 @@ private void should_infer_routing_information_when_partition_key_is_bound(String assertThat(tokenFactory.hash(boundStatement.getRoutingKey())).isEqualTo(expectedToken); } + @Test + public void should_return_null_routing_information_when_single_partition_key_is_unbound() { + should_return_null_routing_information_when_single_partition_key_is_unbound( + "SELECT a FROM prepared_statement_test WHERE a = ?"); + should_return_null_routing_information_when_single_partition_key_is_unbound( + "INSERT INTO prepared_statement_test (a) VALUES (?)"); + should_return_null_routing_information_when_single_partition_key_is_unbound( + "UPDATE prepared_statement_test SET b = 1 WHERE a = ?"); + should_return_null_routing_information_when_single_partition_key_is_unbound( + "DELETE FROM prepared_statement_test WHERE a = ?"); + } + + private void should_return_null_routing_information_when_single_partition_key_is_unbound( + String queryString) { + CqlSession session = sessionRule.session(); + BoundStatement boundStatement = session.prepare(queryString).bind(); + assertThat(boundStatement.getRoutingKey()).isNull(); + } + private static Iterable firstPageOf(CompletionStage stage) { return CompletableFutures.getUninterruptibly(stage).currentPage(); } From e6ae1933667066bf16ac3ac5203a2a6fdadd1946 Mon Sep 17 00:00:00 2001 From: Lukasz Antoniak Date: Fri, 16 Aug 2024 11:06:09 +0200 Subject: [PATCH 908/979] JAVA-3167: CompletableFutures.allSuccessful() may return never completed future patch by Lukasz Antoniak; reviewed by Andy Tolbert, and Bret McGuire for JAVA-3167 --- .../util/concurrent/CompletableFutures.java | 5 +++- .../concurrent/CompletableFuturesTest.java | 29 +++++++++++++++++++ 2 files changed, 33 insertions(+), 1 deletion(-) create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/util/concurrent/CompletableFuturesTest.java diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/util/concurrent/CompletableFutures.java b/core/src/main/java/com/datastax/oss/driver/internal/core/util/concurrent/CompletableFutures.java index 03265bd1d77..275b2ddfeef 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/util/concurrent/CompletableFutures.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/util/concurrent/CompletableFutures.java @@ -100,7 +100,10 @@ public static CompletionStage allSuccessful(List> i } else { Throwable finalError = errors.get(0); for (int i = 1; i < errors.size(); i++) { - finalError.addSuppressed(errors.get(i)); + Throwable suppressedError = errors.get(i); + if (finalError != suppressedError) { + finalError.addSuppressed(suppressedError); + } } result.completeExceptionally(finalError); } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/util/concurrent/CompletableFuturesTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/util/concurrent/CompletableFuturesTest.java new file mode 100644 index 00000000000..8a710e02d50 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/util/concurrent/CompletableFuturesTest.java @@ -0,0 +1,29 @@ +package com.datastax.oss.driver.internal.core.util.concurrent; + +import static org.assertj.core.api.Assertions.assertThat; + +import java.util.Arrays; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; +import org.junit.Test; + +public class CompletableFuturesTest { + @Test + public void should_not_suppress_identical_exceptions() throws Exception { + RuntimeException error = new RuntimeException(); + CompletableFuture future1 = new CompletableFuture<>(); + future1.completeExceptionally(error); + CompletableFuture future2 = new CompletableFuture<>(); + future2.completeExceptionally(error); + try { + // if timeout exception is thrown, it indicates that CompletableFutures.allSuccessful() + // did not complete the returned future and potentially caller will wait infinitely + CompletableFutures.allSuccessful(Arrays.asList(future1, future2)) + .toCompletableFuture() + .get(1, TimeUnit.SECONDS); + } catch (ExecutionException e) { + assertThat(e.getCause()).isEqualTo(error); + } + } +} From 5ee12acfff720047db7611a6f54450c1646031a3 Mon Sep 17 00:00:00 2001 From: absurdfarce Date: Tue, 3 Sep 2024 16:06:04 -0500 Subject: [PATCH 909/979] ninja-fix Various test fixes --- .../concurrent/CompletableFuturesTest.java | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/util/concurrent/CompletableFuturesTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/util/concurrent/CompletableFuturesTest.java index 8a710e02d50..04f96f185fd 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/util/concurrent/CompletableFuturesTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/util/concurrent/CompletableFuturesTest.java @@ -1,6 +1,24 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package com.datastax.oss.driver.internal.core.util.concurrent; import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.Assert.fail; import java.util.Arrays; import java.util.concurrent.CompletableFuture; @@ -22,6 +40,7 @@ public void should_not_suppress_identical_exceptions() throws Exception { CompletableFutures.allSuccessful(Arrays.asList(future1, future2)) .toCompletableFuture() .get(1, TimeUnit.SECONDS); + fail(); } catch (ExecutionException e) { assertThat(e.getCause()).isEqualTo(error); } From 9cfb4f6712e392c1b6c87db268565fd3b27d0c5c Mon Sep 17 00:00:00 2001 From: Lukasz Antoniak Date: Thu, 5 Sep 2024 13:04:52 +0200 Subject: [PATCH 910/979] Run integration tests with DSE 6.9.0 patch by Lukasz Antoniak; reviewed by Bret McGuire reference: https://github.com/apache/cassandra-java-driver/pull/1955 --- Jenkinsfile | 19 +++++++++++++------ .../datastax/oss/driver/api/core/Version.java | 1 + .../driver/api/testinfra/ccm/CcmBridge.java | 3 ++- 3 files changed, 16 insertions(+), 7 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index 4f1ef95d101..4cc20d79604 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -268,6 +268,7 @@ pipeline { 'dse-6.0.18', // Previous DataStax Enterprise 'dse-6.7.17', // Previous DataStax Enterprise 'dse-6.8.30', // Current DataStax Enterprise + 'dse-6.9.0', // Current DataStax Enterprise 'ALL'], description: '''Apache Cassandra® and DataStax Enterprise server version to use for adhoc BUILD-AND-EXECUTE-TESTS builds

          Driver versionTinkerPop version
          4.17.03.5.3
          4.16.03.5.3
          4.15.03.5.3
          4.14.13.5.3
          4.0Apache Cassandra® v4.x (CURRENTLY UNDER DEVELOPMENT)Apache Cassandra® v4.0.x
          4.1Apache Cassandra® v4.1.x
          dse-4.8.16
          @@ -325,6 +326,10 @@ pipeline { + + + +
          dse-6.8.30 DataStax Enterprise v6.8.x
          dse-6.9.0DataStax Enterprise v6.9.x
          ''') choice( name: 'ADHOC_BUILD_AND_EXECUTE_TESTS_JABBA_VERSION', @@ -416,9 +421,9 @@ pipeline { H 2 * * 0 %CI_SCHEDULE=WEEKENDS;CI_SCHEDULE_SERVER_VERSIONS=2.1 3.0 4.0 dse-4.8.16 dse-5.0.15 dse-5.1.35 dse-6.0.18 dse-6.7.17;CI_SCHEDULE_JABBA_VERSION=1.8 # Every weeknight (Monday - Friday) around 12:00 PM noon ### JDK11 tests against 3.11, 4.1, 5.0-beta1 and DSE 6.8 - H 12 * * 1-5 %CI_SCHEDULE=WEEKNIGHTS;CI_SCHEDULE_SERVER_VERSIONS=3.11 4.1 5.0-beta1 dse-6.8.30;CI_SCHEDULE_JABBA_VERSION=openjdk@1.11 + H 12 * * 1-5 %CI_SCHEDULE=WEEKNIGHTS;CI_SCHEDULE_SERVER_VERSIONS=3.11 4.1 5.0-beta1 dse-6.8.30 dse-6.9.0;CI_SCHEDULE_JABBA_VERSION=openjdk@1.11 ### JDK17 tests against 3.11, 4.1, 5.0-beta1 and DSE 6.8 - H 12 * * 1-5 %CI_SCHEDULE=WEEKNIGHTS;CI_SCHEDULE_SERVER_VERSIONS=3.11 4.1 5.0-beta1 dse-6.8.30;CI_SCHEDULE_JABBA_VERSION=openjdk@1.17 + H 12 * * 1-5 %CI_SCHEDULE=WEEKNIGHTS;CI_SCHEDULE_SERVER_VERSIONS=3.11 4.1 5.0-beta1 dse-6.8.30 dse-6.9.0;CI_SCHEDULE_JABBA_VERSION=openjdk@1.17 """ : "") } @@ -452,9 +457,10 @@ pipeline { axes { axis { name 'SERVER_VERSION' - values '3.11', // Latest stable Apache CassandraⓇ - '4.1', // Development Apache CassandraⓇ - 'dse-6.8.30' // Current DataStax Enterprise + values '3.11', // Latest stable Apache CassandraⓇ + '4.1', // Development Apache CassandraⓇ + 'dse-6.8.30', // Current DataStax Enterprise + 'dse-6.9.0' // Current DataStax Enterprise } axis { name 'JABBA_VERSION' @@ -571,7 +577,8 @@ pipeline { 'dse-5.1.35', // Legacy DataStax Enterprise 'dse-6.0.18', // Previous DataStax Enterprise 'dse-6.7.17', // Previous DataStax Enterprise - 'dse-6.8.30' // Current DataStax Enterprise + 'dse-6.8.30', // Current DataStax Enterprise + 'dse-6.9.0' // Current DataStax Enterprise } } when { diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/Version.java b/core/src/main/java/com/datastax/oss/driver/api/core/Version.java index 3f12c54faf7..4de006da268 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/Version.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/Version.java @@ -56,6 +56,7 @@ public class Version implements Comparable, Serializable { @NonNull public static final Version V5_0_0 = Objects.requireNonNull(parse("5.0.0")); @NonNull public static final Version V6_7_0 = Objects.requireNonNull(parse("6.7.0")); @NonNull public static final Version V6_8_0 = Objects.requireNonNull(parse("6.8.0")); + @NonNull public static final Version V6_9_0 = Objects.requireNonNull(parse("6.9.0")); private final int major; private final int minor; diff --git a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CcmBridge.java b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CcmBridge.java index 995513e3919..5b0c114a5fe 100644 --- a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CcmBridge.java +++ b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CcmBridge.java @@ -479,7 +479,8 @@ private Optional overrideJvmVersionForDseWorkloads() { return Optional.empty(); } - if (getDseVersion().get().compareTo(Version.parse("6.8.19")) < 0) { + if (getDseVersion().get().compareTo(Version.V6_9_0) >= 0) { + // DSE 6.9.0 supports only JVM 11 onwards (also with graph workload) return Optional.empty(); } From c961012000efffd3d50476d4549487f7fc538c01 Mon Sep 17 00:00:00 2001 From: Henry Hughes Date: Wed, 23 Aug 2023 15:35:42 -0700 Subject: [PATCH 911/979] JAVA-3117: Call CcmCustomRule#after if CcmCustomRule#before fails to allow subsequent tests to run patch by Henry Hughes; reviewed by Alexandre Dutra and Andy Tolbert for JAVA-3117 --- .../api/testinfra/ccm/CustomCcmRule.java | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CustomCcmRule.java b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CustomCcmRule.java index 58bafd438f8..cf150b12f55 100644 --- a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CustomCcmRule.java +++ b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CustomCcmRule.java @@ -18,6 +18,8 @@ package com.datastax.oss.driver.api.testinfra.ccm; import java.util.concurrent.atomic.AtomicReference; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A rule that creates a ccm cluster that can be used in a test. This should be used if you plan on @@ -30,6 +32,7 @@ */ public class CustomCcmRule extends BaseCcmRule { + private static final Logger LOG = LoggerFactory.getLogger(CustomCcmRule.class); private static final AtomicReference CURRENT = new AtomicReference<>(); CustomCcmRule(CcmBridge ccmBridge) { @@ -39,7 +42,20 @@ public class CustomCcmRule extends BaseCcmRule { @Override protected void before() { if (CURRENT.get() == null && CURRENT.compareAndSet(null, this)) { - super.before(); + try { + super.before(); + } catch (Exception e) { + // ExternalResource will not call after() when before() throws an exception + // Let's try and clean up and release the lock we have in CURRENT + LOG.warn( + "Error in CustomCcmRule before() method, attempting to clean up leftover state", e); + try { + after(); + } catch (Exception e1) { + LOG.warn("Error cleaning up CustomCcmRule before() failure", e1); + } + throw e; + } } else if (CURRENT.get() != this) { throw new IllegalStateException( "Attempting to use a Ccm rule while another is in use. This is disallowed"); From 77805f5103354cadb360384f4f41e0eca73d72f4 Mon Sep 17 00:00:00 2001 From: Lukasz Antoniak Date: Mon, 2 Sep 2024 06:44:53 +0200 Subject: [PATCH 912/979] JAVA-3149: Support request cancellation in request throttler patch by Lukasz Antoniak; reviewed by Andy Tolbert and Chris Lohfink for JAVA-3149 --- .../ContinuousRequestHandlerBase.java | 1 + .../core/graph/GraphRequestHandler.java | 1 + .../session/throttling/RequestThrottler.java | 8 +++++ .../internal/core/cql/CqlPrepareHandler.java | 1 + .../internal/core/cql/CqlRequestHandler.java | 1 + .../ConcurrencyLimitingRequestThrottler.java | 16 +++++++++ .../PassThroughRequestThrottler.java | 5 +++ .../RateLimitingRequestThrottler.java | 12 +++++++ ...ncurrencyLimitingRequestThrottlerTest.java | 5 +++ .../RateLimitingRequestThrottlerTest.java | 13 ++++++- .../driver/core/throttling/ThrottlingIT.java | 34 +++++++++++++++---- 11 files changed, 89 insertions(+), 8 deletions(-) diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousRequestHandlerBase.java b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousRequestHandlerBase.java index 9a7be344721..0453022cb6a 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousRequestHandlerBase.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/cql/continuous/ContinuousRequestHandlerBase.java @@ -410,6 +410,7 @@ public void cancel() { cancelScheduledTasks(null); cancelGlobalTimeout(); + throttler.signalCancel(this); } private void cancelGlobalTimeout() { diff --git a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java index 702da69b855..5c9ceb00df2 100644 --- a/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java +++ b/core/src/main/java/com/datastax/dse/driver/internal/core/graph/GraphRequestHandler.java @@ -153,6 +153,7 @@ public class GraphRequestHandler implements Throttled { try { if (t instanceof CancellationException) { cancelScheduledTasks(); + context.getRequestThrottler().signalCancel(this); } } catch (Throwable t2) { Loggers.warnWithException(LOG, "[{}] Uncaught exception", logPrefix, t2); diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/session/throttling/RequestThrottler.java b/core/src/main/java/com/datastax/oss/driver/api/core/session/throttling/RequestThrottler.java index cb55fac336c..7e2b41ebbdb 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/session/throttling/RequestThrottler.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/session/throttling/RequestThrottler.java @@ -56,4 +56,12 @@ public interface RequestThrottler extends Closeable { * perform time-based eviction on pending requests. */ void signalTimeout(@NonNull Throttled request); + + /** + * Signals that a request has been cancelled. This indicates to the throttler that another request + * might be started. + */ + default void signalCancel(@NonNull Throttled request) { + // no-op for backward compatibility purposes + } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlPrepareHandler.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlPrepareHandler.java index 8fe1adb20b1..1ee1f303ab2 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlPrepareHandler.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlPrepareHandler.java @@ -124,6 +124,7 @@ protected CqlPrepareHandler( try { if (t instanceof CancellationException) { cancelTimeout(); + context.getRequestThrottler().signalCancel(this); } } catch (Throwable t2) { Loggers.warnWithException(LOG, "[{}] Uncaught exception", logPrefix, t2); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java index a1c6b0e5466..0808bdce63f 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java @@ -152,6 +152,7 @@ protected CqlRequestHandler( try { if (t instanceof CancellationException) { cancelScheduledTasks(); + context.getRequestThrottler().signalCancel(this); } } catch (Throwable t2) { Loggers.warnWithException(LOG, "[{}] Uncaught exception", logPrefix, t2); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/session/throttling/ConcurrencyLimitingRequestThrottler.java b/core/src/main/java/com/datastax/oss/driver/internal/core/session/throttling/ConcurrencyLimitingRequestThrottler.java index e8f27467c6f..438bed0953b 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/session/throttling/ConcurrencyLimitingRequestThrottler.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/session/throttling/ConcurrencyLimitingRequestThrottler.java @@ -145,6 +145,22 @@ public void signalTimeout(@NonNull Throttled request) { } } + @Override + public void signalCancel(@NonNull Throttled request) { + lock.lock(); + try { + if (!closed) { + if (queue.remove(request)) { // The request has been cancelled before it was active + LOG.trace("[{}] Removing cancelled request from the queue", logPrefix); + } else { + onRequestDone(); + } + } + } finally { + lock.unlock(); + } + } + @SuppressWarnings("GuardedBy") // this method is only called with the lock held private void onRequestDone() { assert lock.isHeldByCurrentThread(); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/session/throttling/PassThroughRequestThrottler.java b/core/src/main/java/com/datastax/oss/driver/internal/core/session/throttling/PassThroughRequestThrottler.java index 714c712a4e8..2210e4b26f1 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/session/throttling/PassThroughRequestThrottler.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/session/throttling/PassThroughRequestThrottler.java @@ -69,6 +69,11 @@ public void signalTimeout(@NonNull Throttled request) { // nothing to do } + @Override + public void signalCancel(@NonNull Throttled request) { + // nothing to do + } + @Override public void close() throws IOException { // nothing to do diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/session/throttling/RateLimitingRequestThrottler.java b/core/src/main/java/com/datastax/oss/driver/internal/core/session/throttling/RateLimitingRequestThrottler.java index 6536804ffee..03a693dc0fe 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/session/throttling/RateLimitingRequestThrottler.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/session/throttling/RateLimitingRequestThrottler.java @@ -198,6 +198,18 @@ public void signalTimeout(@NonNull Throttled request) { } } + @Override + public void signalCancel(@NonNull Throttled request) { + lock.lock(); + try { + if (!closed && queue.remove(request)) { // The request has been cancelled before it was active + LOG.trace("[{}] Removing cancelled request from the queue", logPrefix); + } + } finally { + lock.unlock(); + } + } + @Override public void close() { lock.lock(); diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/session/throttling/ConcurrencyLimitingRequestThrottlerTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/session/throttling/ConcurrencyLimitingRequestThrottlerTest.java index b587ac3daa2..c01b26c1e9f 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/session/throttling/ConcurrencyLimitingRequestThrottlerTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/session/throttling/ConcurrencyLimitingRequestThrottlerTest.java @@ -88,6 +88,11 @@ public void should_allow_new_request_when_active_one_times_out() { should_allow_new_request_when_active_one_completes(throttler::signalTimeout); } + @Test + public void should_allow_new_request_when_active_one_canceled() { + should_allow_new_request_when_active_one_completes(throttler::signalCancel); + } + private void should_allow_new_request_when_active_one_completes( Consumer completeCallback) { // Given diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/session/throttling/RateLimitingRequestThrottlerTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/session/throttling/RateLimitingRequestThrottlerTest.java index 7336fb447b6..0e0fe7c1c65 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/session/throttling/RateLimitingRequestThrottlerTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/session/throttling/RateLimitingRequestThrottlerTest.java @@ -25,6 +25,7 @@ import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverConfig; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.session.throttling.Throttled; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.internal.core.context.NettyOptions; import com.datastax.oss.driver.internal.core.util.concurrent.ScheduledTaskCapturingEventLoop; @@ -33,6 +34,7 @@ import java.time.Duration; import java.util.List; import java.util.concurrent.TimeUnit; +import java.util.function.Consumer; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; @@ -164,6 +166,15 @@ public void should_reject_when_queue_is_full() { @Test public void should_remove_timed_out_request_from_queue() { + testRemoveInvalidEventFromQueue(throttler::signalTimeout); + } + + @Test + public void should_remove_cancel_request_from_queue() { + testRemoveInvalidEventFromQueue(throttler::signalCancel); + } + + private void testRemoveInvalidEventFromQueue(Consumer completeCallback) { // Given for (int i = 0; i < 5; i++) { throttler.register(new MockThrottled()); @@ -174,7 +185,7 @@ public void should_remove_timed_out_request_from_queue() { throttler.register(queued2); // When - throttler.signalTimeout(queued1); + completeCallback.accept(queued1); // Then assertThatStage(queued2.started).isNotDone(); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/throttling/ThrottlingIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/throttling/ThrottlingIT.java index a6e7295eb09..6fa1a37355b 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/throttling/ThrottlingIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/throttling/ThrottlingIT.java @@ -24,13 +24,16 @@ import com.datastax.oss.driver.api.core.RequestThrottlingException; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; +import com.datastax.oss.driver.api.core.cql.AsyncResultSet; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.api.testinfra.simulacron.SimulacronRule; import com.datastax.oss.driver.categories.ParallelizableTests; import com.datastax.oss.driver.internal.core.session.throttling.ConcurrencyLimitingRequestThrottler; import com.datastax.oss.simulacron.common.cluster.ClusterSpec; import com.datastax.oss.simulacron.common.stubbing.PrimeDsl; +import java.util.concurrent.CompletionStage; import java.util.concurrent.TimeUnit; +import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; @@ -39,21 +42,20 @@ public class ThrottlingIT { private static final String QUERY = "select * from foo"; + private static final int maxConcurrentRequests = 10; + private static final int maxQueueSize = 10; @Rule public SimulacronRule simulacron = new SimulacronRule(ClusterSpec.builder().withNodes(1)); - @Test - public void should_reject_request_when_throttling_by_concurrency() { + private DriverConfigLoader loader = null; + @Before + public void setUp() { // Add a delay so that requests don't complete during the test simulacron .cluster() .prime(PrimeDsl.when(QUERY).then(PrimeDsl.noRows()).delay(5, TimeUnit.SECONDS)); - - int maxConcurrentRequests = 10; - int maxQueueSize = 10; - - DriverConfigLoader loader = + loader = SessionUtils.configLoaderBuilder() .withClass( DefaultDriverOption.REQUEST_THROTTLER_CLASS, @@ -63,7 +65,10 @@ public void should_reject_request_when_throttling_by_concurrency() { maxConcurrentRequests) .withInt(DefaultDriverOption.REQUEST_THROTTLER_MAX_QUEUE_SIZE, maxQueueSize) .build(); + } + @Test + public void should_reject_request_when_throttling_by_concurrency() { try (CqlSession session = SessionUtils.newSession(simulacron, loader)) { // Saturate the session and fill the queue @@ -81,4 +86,19 @@ public void should_reject_request_when_throttling_by_concurrency() { + "(concurrent requests: 10, queue size: 10)"); } } + + @Test + public void should_propagate_cancel_to_throttler() { + try (CqlSession session = SessionUtils.newSession(simulacron, loader)) { + + // Try to saturate the session and fill the queue + for (int i = 0; i < maxConcurrentRequests + maxQueueSize; i++) { + CompletionStage future = session.executeAsync(QUERY); + future.toCompletableFuture().cancel(true); + } + + // The next query should be successful, because the previous queries were cancelled + session.execute(QUERY); + } + } } From 4fb51081a3a71b2017ddc3f43a7945e3a9d19e25 Mon Sep 17 00:00:00 2001 From: Lukasz Antoniak Date: Mon, 10 Jun 2024 12:42:29 +0200 Subject: [PATCH 913/979] Fix C* 3.0 tests failing on Jenkins patch by Lukasz Antoniak; reviewed by Bret McGuire reference: #1939 --- .../test/java/com/datastax/oss/driver/mapper/DeleteIT.java | 3 +-- .../com/datastax/oss/driver/mapper/InventoryITBase.java | 6 +++++- .../oss/driver/mapper/SelectCustomWhereClauseIT.java | 2 +- 3 files changed, 7 insertions(+), 4 deletions(-) diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/DeleteIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/DeleteIT.java index 0acdbeae53a..03e3597501c 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/DeleteIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/DeleteIT.java @@ -60,8 +60,7 @@ description = ">= in WHERE clause not supported in legacy versions") public class DeleteIT extends InventoryITBase { - private static CustomCcmRule CCM_RULE = - CustomCcmRule.builder().withCassandraConfiguration("enable_sasi_indexes", "true").build(); + private static CustomCcmRule CCM_RULE = CustomCcmRule.builder().build(); private static final SessionRule SESSION_RULE = SessionRule.builder(CCM_RULE).build(); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/InventoryITBase.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/InventoryITBase.java index 2be025b3739..9495003ae49 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/InventoryITBase.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/InventoryITBase.java @@ -59,6 +59,10 @@ public abstract class InventoryITBase { new ProductSale(MP3_DOWNLOAD.getId(), DATE_3, 7, Uuids.startOf(915192000), 0.99, 12); protected static List createStatements(BaseCcmRule ccmRule) { + return createStatements(ccmRule, false); + } + + protected static List createStatements(BaseCcmRule ccmRule, boolean requiresSasiIndex) { ImmutableList.Builder builder = ImmutableList.builder() .add( @@ -71,7 +75,7 @@ protected static List createStatements(BaseCcmRule ccmRule) { "CREATE TABLE product_sale(id uuid, day text, ts uuid, customer_id int, price " + "double, count int, PRIMARY KEY ((id, day), customer_id, ts))"); - if (supportsSASI(ccmRule) && !isSasiBroken(ccmRule)) { + if (requiresSasiIndex && supportsSASI(ccmRule) && !isSasiBroken(ccmRule)) { builder.add( "CREATE CUSTOM INDEX product_description ON product(description) " + "USING 'org.apache.cassandra.index.sasi.SASIIndex' " diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SelectCustomWhereClauseIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SelectCustomWhereClauseIT.java index 3df1ccd21a7..1f1b92b8623 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SelectCustomWhereClauseIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/SelectCustomWhereClauseIT.java @@ -75,7 +75,7 @@ public static void setup() { SchemaChangeSynchronizer.withLock( () -> { - for (String query : createStatements(CCM_RULE)) { + for (String query : createStatements(CCM_RULE, true)) { session.execute( SimpleStatement.builder(query) .setExecutionProfile(SESSION_RULE.slowProfile()) From 6d3ba47631ebde78460168a2d33c4facde0bd731 Mon Sep 17 00:00:00 2001 From: Jason Koch Date: Mon, 12 Aug 2024 22:52:13 -0700 Subject: [PATCH 914/979] Reduce lock held duration in ConcurrencyLimitingRequestThrottler It might take some (small) time for callback handling when the throttler request proceeds to submission. Before this change, the throttler proceed request will happen while holding the lock, preventing other tasks from proceeding when there is spare capacity and even preventing tasks from enqueuing until the callback completes. By tracking the expected outcome, we can perform the callback outside of the lock. This means that request registration and submission can proceed even when a long callback is being processed. patch by Jason Koch; Reviewed by Andy Tolbert and Chris Lohfink for CASSANDRA-19922 --- .../ConcurrencyLimitingRequestThrottler.java | 39 ++++- ...ncurrencyLimitingRequestThrottlerTest.java | 143 ++++++++++++++++-- .../session/throttling/MockThrottled.java | 30 +++- .../RateLimitingRequestThrottlerTest.java | 30 ++-- 4 files changed, 206 insertions(+), 36 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/session/throttling/ConcurrencyLimitingRequestThrottler.java b/core/src/main/java/com/datastax/oss/driver/internal/core/session/throttling/ConcurrencyLimitingRequestThrottler.java index 438bed0953b..ffe0ffe9650 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/session/throttling/ConcurrencyLimitingRequestThrottler.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/session/throttling/ConcurrencyLimitingRequestThrottler.java @@ -25,6 +25,7 @@ import com.datastax.oss.driver.api.core.session.throttling.Throttled; import com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting; import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; import java.util.ArrayDeque; import java.util.Deque; import java.util.concurrent.locks.ReentrantLock; @@ -87,6 +88,8 @@ public ConcurrencyLimitingRequestThrottler(DriverContext context) { @Override public void register(@NonNull Throttled request) { + boolean notifyReadyRequired = false; + lock.lock(); try { if (closed) { @@ -96,7 +99,7 @@ public void register(@NonNull Throttled request) { // We have capacity for one more concurrent request LOG.trace("[{}] Starting newly registered request", logPrefix); concurrentRequests += 1; - request.onThrottleReady(false); + notifyReadyRequired = true; } else if (queue.size() < maxQueueSize) { LOG.trace("[{}] Enqueuing request", logPrefix); queue.add(request); @@ -112,16 +115,26 @@ public void register(@NonNull Throttled request) { } finally { lock.unlock(); } + + // no need to hold the lock while allowing the task to progress + if (notifyReadyRequired) { + request.onThrottleReady(false); + } } @Override public void signalSuccess(@NonNull Throttled request) { + Throttled nextRequest = null; lock.lock(); try { - onRequestDone(); + nextRequest = onRequestDoneAndDequeNext(); } finally { lock.unlock(); } + + if (nextRequest != null) { + nextRequest.onThrottleReady(true); + } } @Override @@ -131,48 +144,62 @@ public void signalError(@NonNull Throttled request, @NonNull Throwable error) { @Override public void signalTimeout(@NonNull Throttled request) { + Throttled nextRequest = null; lock.lock(); try { if (!closed) { if (queue.remove(request)) { // The request timed out before it was active LOG.trace("[{}] Removing timed out request from the queue", logPrefix); } else { - onRequestDone(); + nextRequest = onRequestDoneAndDequeNext(); } } } finally { lock.unlock(); } + + if (nextRequest != null) { + nextRequest.onThrottleReady(true); + } } @Override public void signalCancel(@NonNull Throttled request) { + Throttled nextRequest = null; lock.lock(); try { if (!closed) { if (queue.remove(request)) { // The request has been cancelled before it was active LOG.trace("[{}] Removing cancelled request from the queue", logPrefix); } else { - onRequestDone(); + nextRequest = onRequestDoneAndDequeNext(); } } } finally { lock.unlock(); } + + if (nextRequest != null) { + nextRequest.onThrottleReady(true); + } } @SuppressWarnings("GuardedBy") // this method is only called with the lock held - private void onRequestDone() { + @Nullable + private Throttled onRequestDoneAndDequeNext() { assert lock.isHeldByCurrentThread(); if (!closed) { if (queue.isEmpty()) { concurrentRequests -= 1; } else { LOG.trace("[{}] Starting dequeued request", logPrefix); - queue.poll().onThrottleReady(true); // don't touch concurrentRequests since we finished one but started another + return queue.poll(); } } + + // no next task was dequeued + return null; } @Override diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/session/throttling/ConcurrencyLimitingRequestThrottlerTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/session/throttling/ConcurrencyLimitingRequestThrottlerTest.java index c01b26c1e9f..7eb682070cd 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/session/throttling/ConcurrencyLimitingRequestThrottlerTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/session/throttling/ConcurrencyLimitingRequestThrottlerTest.java @@ -29,6 +29,7 @@ import com.datastax.oss.driver.api.core.session.throttling.Throttled; import com.datastax.oss.driver.shaded.guava.common.collect.Lists; import java.util.List; +import java.util.concurrent.CountDownLatch; import java.util.function.Consumer; import org.junit.Before; import org.junit.Test; @@ -67,7 +68,7 @@ public void should_start_immediately_when_under_capacity() { throttler.register(request); // Then - assertThatStage(request.started).isSuccess(wasDelayed -> assertThat(wasDelayed).isFalse()); + assertThatStage(request.ended).isSuccess(wasDelayed -> assertThat(wasDelayed).isFalse()); assertThat(throttler.getConcurrentRequests()).isEqualTo(1); assertThat(throttler.getQueue()).isEmpty(); } @@ -98,7 +99,7 @@ private void should_allow_new_request_when_active_one_completes( // Given MockThrottled first = new MockThrottled(); throttler.register(first); - assertThatStage(first.started).isSuccess(wasDelayed -> assertThat(wasDelayed).isFalse()); + assertThatStage(first.ended).isSuccess(wasDelayed -> assertThat(wasDelayed).isFalse()); for (int i = 0; i < 4; i++) { // fill to capacity throttler.register(new MockThrottled()); } @@ -113,7 +114,7 @@ private void should_allow_new_request_when_active_one_completes( throttler.register(incoming); // Then - assertThatStage(incoming.started).isSuccess(wasDelayed -> assertThat(wasDelayed).isFalse()); + assertThatStage(incoming.ended).isSuccess(wasDelayed -> assertThat(wasDelayed).isFalse()); assertThat(throttler.getConcurrentRequests()).isEqualTo(5); assertThat(throttler.getQueue()).isEmpty(); } @@ -132,7 +133,7 @@ public void should_enqueue_when_over_capacity() { throttler.register(incoming); // Then - assertThatStage(incoming.started).isNotDone(); + assertThatStage(incoming.ended).isNotDone(); assertThat(throttler.getConcurrentRequests()).isEqualTo(5); assertThat(throttler.getQueue()).containsExactly(incoming); } @@ -157,20 +158,20 @@ private void should_dequeue_when_active_completes(Consumer completeCa // Given MockThrottled first = new MockThrottled(); throttler.register(first); - assertThatStage(first.started).isSuccess(wasDelayed -> assertThat(wasDelayed).isFalse()); + assertThatStage(first.ended).isSuccess(wasDelayed -> assertThat(wasDelayed).isFalse()); for (int i = 0; i < 4; i++) { throttler.register(new MockThrottled()); } MockThrottled incoming = new MockThrottled(); throttler.register(incoming); - assertThatStage(incoming.started).isNotDone(); + assertThatStage(incoming.ended).isNotDone(); // When completeCallback.accept(first); // Then - assertThatStage(incoming.started).isSuccess(wasDelayed -> assertThat(wasDelayed).isTrue()); + assertThatStage(incoming.ended).isSuccess(wasDelayed -> assertThat(wasDelayed).isTrue()); assertThat(throttler.getConcurrentRequests()).isEqualTo(5); assertThat(throttler.getQueue()).isEmpty(); } @@ -189,7 +190,7 @@ public void should_reject_when_queue_is_full() { throttler.register(incoming); // Then - assertThatStage(incoming.started) + assertThatStage(incoming.ended) .isFailed(error -> assertThat(error).isInstanceOf(RequestThrottlingException.class)); } @@ -208,7 +209,7 @@ public void should_remove_timed_out_request_from_queue() { throttler.signalTimeout(queued1); // Then - assertThatStage(queued2.started).isNotDone(); + assertThatStage(queued2.ended).isNotDone(); assertThat(throttler.getConcurrentRequests()).isEqualTo(5); assertThat(throttler.getQueue()).hasSize(1); } @@ -223,7 +224,7 @@ public void should_reject_enqueued_when_closing() { for (int i = 0; i < 10; i++) { MockThrottled request = new MockThrottled(); throttler.register(request); - assertThatStage(request.started).isNotDone(); + assertThatStage(request.ended).isNotDone(); enqueued.add(request); } @@ -232,7 +233,7 @@ public void should_reject_enqueued_when_closing() { // Then for (MockThrottled request : enqueued) { - assertThatStage(request.started) + assertThatStage(request.ended) .isFailed(error -> assertThat(error).isInstanceOf(RequestThrottlingException.class)); } @@ -241,7 +242,125 @@ public void should_reject_enqueued_when_closing() { throttler.register(request); // Then - assertThatStage(request.started) + assertThatStage(request.ended) .isFailed(error -> assertThat(error).isInstanceOf(RequestThrottlingException.class)); } + + @Test + public void should_run_throttle_callbacks_concurrently() throws InterruptedException { + // Given + + // a task is enqueued, which when in onThrottleReady, will stall latch countDown()ed + // register() should automatically start onThrottleReady on same thread + + // start a parallel thread + CountDownLatch firstRelease = new CountDownLatch(1); + MockThrottled first = new MockThrottled(firstRelease); + Runnable r = + () -> { + throttler.register(first); + first.ended.toCompletableFuture().thenRun(() -> throttler.signalSuccess(first)); + }; + Thread t = new Thread(r); + t.start(); + + // wait for the registration threads to reach await state + assertThatStage(first.started).isSuccess(); + assertThatStage(first.ended).isNotDone(); + + // When + // we concurrently submit a second shorter task + MockThrottled second = new MockThrottled(); + // (on a second thread, so that we can join and force a timeout in case + // registration is delayed) + Thread t2 = new Thread(() -> throttler.register(second)); + t2.start(); + t2.join(1_000); + + // Then + // registration will trigger callback, should complete ~immediately + assertThatStage(second.ended).isSuccess(wasDelayed -> assertThat(wasDelayed).isFalse()); + // first should still be unfinished + assertThatStage(first.started).isDone(); + assertThatStage(first.ended).isNotDone(); + // now finish, and verify + firstRelease.countDown(); + assertThatStage(first.ended).isSuccess(wasDelayed -> assertThat(wasDelayed).isFalse()); + + t.join(1_000); + } + + @Test + public void should_enqueue_tasks_quickly_when_callbacks_blocked() throws InterruptedException { + // Given + + // Multiple tasks are registered, up to the limit, and proceed into their + // callback + + // start five parallel threads + final int THREADS = 5; + Thread[] threads = new Thread[THREADS]; + CountDownLatch[] latches = new CountDownLatch[THREADS]; + MockThrottled[] throttled = new MockThrottled[THREADS]; + for (int i = 0; i < threads.length; i++) { + latches[i] = new CountDownLatch(1); + final MockThrottled itThrottled = new MockThrottled(latches[i]); + throttled[i] = itThrottled; + threads[i] = + new Thread( + () -> { + throttler.register(itThrottled); + itThrottled + .ended + .toCompletableFuture() + .thenRun(() -> throttler.signalSuccess(itThrottled)); + }); + threads[i].start(); + } + + // wait for the registration threads to be launched + // they are all waiting now + for (int i = 0; i < throttled.length; i++) { + assertThatStage(throttled[i].started).isSuccess(); + assertThatStage(throttled[i].ended).isNotDone(); + } + + // When + // we concurrently submit another task + MockThrottled last = new MockThrottled(); + throttler.register(last); + + // Then + // registration will enqueue the callback, and it should not + // take any time to proceed (ie: we should not be blocked) + // and there should be an element in the queue + assertThatStage(last.started).isNotDone(); + assertThatStage(last.ended).isNotDone(); + assertThat(throttler.getQueue()).containsExactly(last); + + // we still have not released, so old throttled threads should be waiting + for (int i = 0; i < throttled.length; i++) { + assertThatStage(throttled[i].started).isDone(); + assertThatStage(throttled[i].ended).isNotDone(); + } + + // now let us release .. + for (int i = 0; i < latches.length; i++) { + latches[i].countDown(); + } + + // .. and check everything finished up OK + for (int i = 0; i < latches.length; i++) { + assertThatStage(throttled[i].started).isSuccess(); + assertThatStage(throttled[i].ended).isSuccess(); + } + + // for good measure, we will also wait for the enqueued to complete + assertThatStage(last.started).isSuccess(); + assertThatStage(last.ended).isSuccess(); + + for (int i = 0; i < threads.length; i++) { + threads[i].join(1_000); + } + } } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/session/throttling/MockThrottled.java b/core/src/test/java/com/datastax/oss/driver/internal/core/session/throttling/MockThrottled.java index b7cd0ee8a54..9e54e3d511f 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/session/throttling/MockThrottled.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/session/throttling/MockThrottled.java @@ -19,21 +19,45 @@ import com.datastax.oss.driver.api.core.RequestThrottlingException; import com.datastax.oss.driver.api.core.session.throttling.Throttled; +import com.datastax.oss.driver.shaded.guava.common.util.concurrent.Uninterruptibles; import edu.umd.cs.findbugs.annotations.NonNull; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionStage; +import java.util.concurrent.CountDownLatch; class MockThrottled implements Throttled { + final CompletionStage started = new CompletableFuture<>(); + final CompletionStage ended = new CompletableFuture<>(); + final CountDownLatch canRelease; - final CompletionStage started = new CompletableFuture<>(); + public MockThrottled() { + this(new CountDownLatch(0)); + } + + /* + * The releaseLatch can be provided to add some delay before the + * task readiness/fail callbacks complete. This can be used, eg, to + * imitate a slow callback. + */ + public MockThrottled(CountDownLatch releaseLatch) { + this.canRelease = releaseLatch; + } @Override public void onThrottleReady(boolean wasDelayed) { - started.toCompletableFuture().complete(wasDelayed); + started.toCompletableFuture().complete(null); + awaitRelease(); + ended.toCompletableFuture().complete(wasDelayed); } @Override public void onThrottleFailure(@NonNull RequestThrottlingException error) { - started.toCompletableFuture().completeExceptionally(error); + started.toCompletableFuture().complete(null); + awaitRelease(); + ended.toCompletableFuture().completeExceptionally(error); + } + + private void awaitRelease() { + Uninterruptibles.awaitUninterruptibly(canRelease); } } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/session/throttling/RateLimitingRequestThrottlerTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/session/throttling/RateLimitingRequestThrottlerTest.java index 0e0fe7c1c65..1e15610bf7b 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/session/throttling/RateLimitingRequestThrottlerTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/session/throttling/RateLimitingRequestThrottlerTest.java @@ -98,7 +98,7 @@ public void should_start_immediately_when_under_capacity() { throttler.register(request); // Then - assertThatStage(request.started).isSuccess(wasDelayed -> assertThat(wasDelayed).isFalse()); + assertThatStage(request.ended).isSuccess(wasDelayed -> assertThat(wasDelayed).isFalse()); assertThat(throttler.getStoredPermits()).isEqualTo(4); assertThat(throttler.getQueue()).isEmpty(); } @@ -117,7 +117,7 @@ public void should_allow_new_request_when_under_rate() { throttler.register(request); // Then - assertThatStage(request.started).isSuccess(wasDelayed -> assertThat(wasDelayed).isFalse()); + assertThatStage(request.ended).isSuccess(wasDelayed -> assertThat(wasDelayed).isFalse()); assertThat(throttler.getStoredPermits()).isEqualTo(0); assertThat(throttler.getQueue()).isEmpty(); } @@ -136,7 +136,7 @@ public void should_enqueue_when_over_rate() { throttler.register(request); // Then - assertThatStage(request.started).isNotDone(); + assertThatStage(request.ended).isNotDone(); assertThat(throttler.getStoredPermits()).isEqualTo(0); assertThat(throttler.getQueue()).containsExactly(request); @@ -160,7 +160,7 @@ public void should_reject_when_queue_is_full() { throttler.register(request); // Then - assertThatStage(request.started) + assertThatStage(request.ended) .isFailed(error -> assertThat(error).isInstanceOf(RequestThrottlingException.class)); } @@ -188,7 +188,7 @@ private void testRemoveInvalidEventFromQueue(Consumer completeCallbac completeCallback.accept(queued1); // Then - assertThatStage(queued2.started).isNotDone(); + assertThatStage(queued2.ended).isNotDone(); assertThat(throttler.getStoredPermits()).isEqualTo(0); assertThat(throttler.getQueue()).containsExactly(queued2); } @@ -202,10 +202,10 @@ public void should_dequeue_when_draining_task_runs() { MockThrottled queued1 = new MockThrottled(); throttler.register(queued1); - assertThatStage(queued1.started).isNotDone(); + assertThatStage(queued1.ended).isNotDone(); MockThrottled queued2 = new MockThrottled(); throttler.register(queued2); - assertThatStage(queued2.started).isNotDone(); + assertThatStage(queued2.ended).isNotDone(); assertThat(throttler.getStoredPermits()).isEqualTo(0); assertThat(throttler.getQueue()).hasSize(2); @@ -230,8 +230,8 @@ public void should_dequeue_when_draining_task_runs() { task.run(); // Then - assertThatStage(queued1.started).isSuccess(wasDelayed -> assertThat(wasDelayed).isTrue()); - assertThatStage(queued2.started).isNotDone(); + assertThatStage(queued1.ended).isSuccess(wasDelayed -> assertThat(wasDelayed).isTrue()); + assertThatStage(queued2.ended).isNotDone(); assertThat(throttler.getStoredPermits()).isEqualTo(0); assertThat(throttler.getQueue()).containsExactly(queued2); // task reschedules itself since it did not empty the queue @@ -244,7 +244,7 @@ public void should_dequeue_when_draining_task_runs() { task.run(); // Then - assertThatStage(queued2.started).isSuccess(wasDelayed -> assertThat(wasDelayed).isTrue()); + assertThatStage(queued2.ended).isSuccess(wasDelayed -> assertThat(wasDelayed).isTrue()); assertThat(throttler.getStoredPermits()).isEqualTo(0); assertThat(throttler.getQueue()).isEmpty(); assertThat(adminExecutor.nextTask()).isNull(); @@ -286,14 +286,14 @@ public void should_keep_accumulating_time_if_no_permits_created() { // Then MockThrottled queued = new MockThrottled(); throttler.register(queued); - assertThatStage(queued.started).isNotDone(); + assertThatStage(queued.ended).isNotDone(); // When clock.add(ONE_HUNDRED_MILLISECONDS); adminExecutor.nextTask().run(); // Then - assertThatStage(queued.started).isSuccess(wasDelayed -> assertThat(wasDelayed).isTrue()); + assertThatStage(queued.ended).isSuccess(wasDelayed -> assertThat(wasDelayed).isTrue()); } @Test @@ -306,7 +306,7 @@ public void should_reject_enqueued_when_closing() { for (int i = 0; i < 10; i++) { MockThrottled request = new MockThrottled(); throttler.register(request); - assertThatStage(request.started).isNotDone(); + assertThatStage(request.ended).isNotDone(); enqueued.add(request); } @@ -315,7 +315,7 @@ public void should_reject_enqueued_when_closing() { // Then for (MockThrottled request : enqueued) { - assertThatStage(request.started) + assertThatStage(request.ended) .isFailed(error -> assertThat(error).isInstanceOf(RequestThrottlingException.class)); } @@ -324,7 +324,7 @@ public void should_reject_enqueued_when_closing() { throttler.register(request); // Then - assertThatStage(request.started) + assertThatStage(request.ended) .isFailed(error -> assertThat(error).isInstanceOf(RequestThrottlingException.class)); } } From 306bf3744a3d24ad01700e4e1d3c14b9a696927f Mon Sep 17 00:00:00 2001 From: Ammar Khaku Date: Fri, 30 Sep 2022 15:36:36 -0700 Subject: [PATCH 915/979] Annotate BatchStatement, Statement, SimpleStatement methods with CheckReturnValue Since the driver's default implementation is for BatchStatement and SimpleStatement methods to be immutable, we should annotate those methods with @CheckReturnValue. Statement#setNowInSeconds implementations are immutable so annotate that too. patch by Ammar Khaku; reviewed by Andy Tolbert and Bret McGuire reference: https://github.com/apache/cassandra-java-driver/pull/1607 --- core/revapi.json | 133 ++++++++++++++++++ .../driver/api/core/cql/BatchStatement.java | 8 ++ .../driver/api/core/cql/SimpleStatement.java | 7 + .../oss/driver/api/core/cql/Statement.java | 1 + 4 files changed, 149 insertions(+) diff --git a/core/revapi.json b/core/revapi.json index 318e29709ec..1c875895d6c 100644 --- a/core/revapi.json +++ b/core/revapi.json @@ -6956,6 +6956,139 @@ "old": "method java.lang.Throwable java.lang.Throwable::fillInStackTrace() @ com.fasterxml.jackson.databind.deser.UnresolvedForwardReference", "new": "method com.fasterxml.jackson.databind.deser.UnresolvedForwardReference com.fasterxml.jackson.databind.deser.UnresolvedForwardReference::fillInStackTrace()", "justification": "Upgrade jackson-databind to 2.13.4.1 to address CVEs, API change cause: https://github.com/FasterXML/jackson-databind/issues/3419" + }, + { + "code": "java.annotation.added", + "old": "method SelfT com.datastax.oss.driver.api.core.cql.Statement>::setNowInSeconds(int) @ com.datastax.oss.driver.api.core.cql.BatchStatement", + "new": "method SelfT com.datastax.oss.driver.api.core.cql.Statement>::setNowInSeconds(int) @ com.datastax.oss.driver.api.core.cql.BatchStatement", + "annotation": "@edu.umd.cs.findbugs.annotations.CheckReturnValue", + "justification": "Annotate mutating methods with @CheckReturnValue" + }, + { + "code": "java.annotation.added", + "old": "method SelfT com.datastax.oss.driver.api.core.cql.Statement>::setNowInSeconds(int) @ com.datastax.oss.driver.api.core.cql.BatchableStatement>", + "new": "method SelfT com.datastax.oss.driver.api.core.cql.Statement>::setNowInSeconds(int) @ com.datastax.oss.driver.api.core.cql.BatchStatement", + "annotation": "@edu.umd.cs.findbugs.annotations.CheckReturnValue", + "justification": "Annotate mutating methods with @CheckReturnValue" + }, + { + "code": "java.annotation.added", + "old": "method SelfT com.datastax.oss.driver.api.core.cql.Statement>::setNowInSeconds(int) @ com.datastax.oss.driver.api.core.cql.BatchableStatement>", + "new": "method SelfT com.datastax.oss.driver.api.core.cql.Statement>::setNowInSeconds(int) @ com.datastax.oss.driver.api.core.cql.BatchableStatement>", + "annotation": "@edu.umd.cs.findbugs.annotations.CheckReturnValue", + "justification": "Annotate mutating methods with @CheckReturnValue" + }, + { + "code": "java.annotation.added", + "old": "method SelfT com.datastax.oss.driver.api.core.cql.Statement>::setNowInSeconds(int) @ com.datastax.oss.driver.api.core.cql.BoundStatement", + "new": "method SelfT com.datastax.oss.driver.api.core.cql.Statement>::setNowInSeconds(int) @ com.datastax.oss.driver.api.core.cql.BoundStatement", + "annotation": "@edu.umd.cs.findbugs.annotations.CheckReturnValue", + "justification": "Annotate mutating methods with @CheckReturnValue" + }, + { + "code": "java.annotation.added", + "old": "method SelfT com.datastax.oss.driver.api.core.cql.Statement>::setNowInSeconds(int) @ com.datastax.oss.driver.api.core.cql.SimpleStatement", + "new": "method SelfT com.datastax.oss.driver.api.core.cql.Statement>::setNowInSeconds(int) @ com.datastax.oss.driver.api.core.cql.SimpleStatement", + "annotation": "@edu.umd.cs.findbugs.annotations.CheckReturnValue", + "justification": "Annotate mutating methods with @CheckReturnValue" + }, + { + "code": "java.annotation.added", + "old": "method SelfT com.datastax.oss.driver.api.core.cql.Statement>::setNowInSeconds(int)", + "new": "method SelfT com.datastax.oss.driver.api.core.cql.Statement>::setNowInSeconds(int)", + "annotation": "@edu.umd.cs.findbugs.annotations.CheckReturnValue", + "justification": "Annotate mutating methods with @CheckReturnValue" + }, + { + "code": "java.annotation.added", + "old": "method com.datastax.oss.driver.api.core.cql.BatchStatement com.datastax.oss.driver.api.core.cql.BatchStatement::add(com.datastax.oss.driver.api.core.cql.BatchableStatement)", + "new": "method com.datastax.oss.driver.api.core.cql.BatchStatement com.datastax.oss.driver.api.core.cql.BatchStatement::add(com.datastax.oss.driver.api.core.cql.BatchableStatement)", + "annotation": "@edu.umd.cs.findbugs.annotations.CheckReturnValue", + "justification": "Annotate mutating methods with @CheckReturnValue" + }, + { + "code": "java.annotation.added", + "old": "method com.datastax.oss.driver.api.core.cql.BatchStatement com.datastax.oss.driver.api.core.cql.BatchStatement::addAll(com.datastax.oss.driver.api.core.cql.BatchableStatement[])", + "new": "method com.datastax.oss.driver.api.core.cql.BatchStatement com.datastax.oss.driver.api.core.cql.BatchStatement::addAll(com.datastax.oss.driver.api.core.cql.BatchableStatement[])", + "annotation": "@edu.umd.cs.findbugs.annotations.CheckReturnValue", + "justification": "Annotate mutating methods with @CheckReturnValue" + }, + { + "code": "java.annotation.added", + "old": "method com.datastax.oss.driver.api.core.cql.BatchStatement com.datastax.oss.driver.api.core.cql.BatchStatement::addAll(java.lang.Iterable>)", + "new": "method com.datastax.oss.driver.api.core.cql.BatchStatement com.datastax.oss.driver.api.core.cql.BatchStatement::addAll(java.lang.Iterable>)", + "annotation": "@edu.umd.cs.findbugs.annotations.CheckReturnValue", + "justification": "Annotate mutating methods with @CheckReturnValue" + }, + { + "code": "java.annotation.added", + "old": "method com.datastax.oss.driver.api.core.cql.BatchStatement com.datastax.oss.driver.api.core.cql.BatchStatement::clear()", + "new": "method com.datastax.oss.driver.api.core.cql.BatchStatement com.datastax.oss.driver.api.core.cql.BatchStatement::clear()", + "annotation": "@edu.umd.cs.findbugs.annotations.CheckReturnValue", + "justification": "Annotate mutating methods with @CheckReturnValue" + }, + { + "code": "java.annotation.added", + "old": "method com.datastax.oss.driver.api.core.cql.BatchStatement com.datastax.oss.driver.api.core.cql.BatchStatement::setBatchType(com.datastax.oss.driver.api.core.cql.BatchType)", + "new": "method com.datastax.oss.driver.api.core.cql.BatchStatement com.datastax.oss.driver.api.core.cql.BatchStatement::setBatchType(com.datastax.oss.driver.api.core.cql.BatchType)", + "annotation": "@edu.umd.cs.findbugs.annotations.CheckReturnValue", + "justification": "Annotate mutating methods with @CheckReturnValue" + }, + { + "code": "java.annotation.added", + "old": "method com.datastax.oss.driver.api.core.cql.BatchStatement com.datastax.oss.driver.api.core.cql.BatchStatement::setKeyspace(com.datastax.oss.driver.api.core.CqlIdentifier)", + "new": "method com.datastax.oss.driver.api.core.cql.BatchStatement com.datastax.oss.driver.api.core.cql.BatchStatement::setKeyspace(com.datastax.oss.driver.api.core.CqlIdentifier)", + "annotation": "@edu.umd.cs.findbugs.annotations.CheckReturnValue", + "justification": "JAVA-2161: Annotate mutating methods with @CheckReturnValue" + }, + { + "code": "java.annotation.added", + "old": "method com.datastax.oss.driver.api.core.cql.BatchStatement com.datastax.oss.driver.api.core.cql.BatchStatement::setKeyspace(java.lang.String)", + "new": "method com.datastax.oss.driver.api.core.cql.BatchStatement com.datastax.oss.driver.api.core.cql.BatchStatement::setKeyspace(java.lang.String)", + "annotation": "@edu.umd.cs.findbugs.annotations.CheckReturnValue", + "justification": "JAVA-2161: Annotate mutating methods with @CheckReturnValue" + }, + { + "code": "java.annotation.added", + "old": "method com.datastax.oss.driver.api.core.cql.SimpleStatement com.datastax.oss.driver.api.core.cql.SimpleStatement::setQuery(java.lang.String)", + "new": "method com.datastax.oss.driver.api.core.cql.SimpleStatement com.datastax.oss.driver.api.core.cql.SimpleStatement::setQuery(java.lang.String)", + "annotation": "@edu.umd.cs.findbugs.annotations.CheckReturnValue", + "justification": "Annotate mutating methods with @CheckReturnValue" + }, + { + "code": "java.annotation.added", + "old": "method com.datastax.oss.driver.api.core.cql.SimpleStatement com.datastax.oss.driver.api.core.cql.SimpleStatement::setKeyspace(com.datastax.oss.driver.api.core.CqlIdentifier)", + "new": "method com.datastax.oss.driver.api.core.cql.SimpleStatement com.datastax.oss.driver.api.core.cql.SimpleStatement::setKeyspace(com.datastax.oss.driver.api.core.CqlIdentifier)", + "annotation": "@edu.umd.cs.findbugs.annotations.CheckReturnValue", + "justification": "Annotate mutating methods with @CheckReturnValue" + }, + { + "code": "java.annotation.added", + "old": "method com.datastax.oss.driver.api.core.cql.SimpleStatement com.datastax.oss.driver.api.core.cql.SimpleStatement::setKeyspace(java.lang.String)", + "new": "method com.datastax.oss.driver.api.core.cql.SimpleStatement com.datastax.oss.driver.api.core.cql.SimpleStatement::setKeyspace(java.lang.String)", + "annotation": "@edu.umd.cs.findbugs.annotations.CheckReturnValue", + "justification": "Annotate mutating methods with @CheckReturnValue" + }, + { + "code": "java.annotation.added", + "old": "method com.datastax.oss.driver.api.core.cql.SimpleStatement com.datastax.oss.driver.api.core.cql.SimpleStatement::setPositionalValues(java.util.List)", + "new": "method com.datastax.oss.driver.api.core.cql.SimpleStatement com.datastax.oss.driver.api.core.cql.SimpleStatement::setPositionalValues(java.util.List)", + "annotation": "@edu.umd.cs.findbugs.annotations.CheckReturnValue", + "justification": "Annotate mutating methods with @CheckReturnValue" + }, + { + "code": "java.annotation.added", + "old": "method com.datastax.oss.driver.api.core.cql.SimpleStatement com.datastax.oss.driver.api.core.cql.SimpleStatement::setNamedValuesWithIds(java.util.Map)", + "new": "method com.datastax.oss.driver.api.core.cql.SimpleStatement com.datastax.oss.driver.api.core.cql.SimpleStatement::setNamedValuesWithIds(java.util.Map)", + "annotation": "@edu.umd.cs.findbugs.annotations.CheckReturnValue", + "justification": "Annotate mutating methods with @CheckReturnValue" + }, + { + "code": "java.annotation.added", + "old": "method com.datastax.oss.driver.api.core.cql.SimpleStatement com.datastax.oss.driver.api.core.cql.SimpleStatement::setNamedValues(java.util.Map)", + "new": "method com.datastax.oss.driver.api.core.cql.SimpleStatement com.datastax.oss.driver.api.core.cql.SimpleStatement::setNamedValues(java.util.Map)", + "annotation": "@edu.umd.cs.findbugs.annotations.CheckReturnValue", + "justification": "Annotate mutating methods with @CheckReturnValue" } ] } diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/cql/BatchStatement.java b/core/src/main/java/com/datastax/oss/driver/api/core/cql/BatchStatement.java index 0f37ed71ce2..9deb33c6007 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/cql/BatchStatement.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/cql/BatchStatement.java @@ -26,6 +26,7 @@ import com.datastax.oss.driver.internal.core.util.Sizes; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import com.datastax.oss.protocol.internal.PrimitiveSizes; +import edu.umd.cs.findbugs.annotations.CheckReturnValue; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import java.util.ArrayList; @@ -164,6 +165,7 @@ static BatchStatementBuilder builder(@NonNull BatchStatement template) { * method. However custom implementations may choose to be mutable and return the same instance. */ @NonNull + @CheckReturnValue BatchStatement setBatchType(@NonNull BatchType newBatchType); /** @@ -180,6 +182,7 @@ static BatchStatementBuilder builder(@NonNull BatchStatement template) { * @see Request#getKeyspace() */ @NonNull + @CheckReturnValue BatchStatement setKeyspace(@Nullable CqlIdentifier newKeyspace); /** @@ -187,6 +190,7 @@ static BatchStatementBuilder builder(@NonNull BatchStatement template) { * setKeyspace(CqlIdentifier.fromCql(newKeyspaceName))}. */ @NonNull + @CheckReturnValue default BatchStatement setKeyspace(@NonNull String newKeyspaceName) { return setKeyspace(CqlIdentifier.fromCql(newKeyspaceName)); } @@ -201,6 +205,7 @@ default BatchStatement setKeyspace(@NonNull String newKeyspaceName) { * method. However custom implementations may choose to be mutable and return the same instance. */ @NonNull + @CheckReturnValue BatchStatement add(@NonNull BatchableStatement statement); /** @@ -213,10 +218,12 @@ default BatchStatement setKeyspace(@NonNull String newKeyspaceName) { * method. However custom implementations may choose to be mutable and return the same instance. */ @NonNull + @CheckReturnValue BatchStatement addAll(@NonNull Iterable> statements); /** @see #addAll(Iterable) */ @NonNull + @CheckReturnValue default BatchStatement addAll(@NonNull BatchableStatement... statements) { return addAll(Arrays.asList(statements)); } @@ -231,6 +238,7 @@ default BatchStatement addAll(@NonNull BatchableStatement... statements) { * method. However custom implementations may choose to be mutable and return the same instance. */ @NonNull + @CheckReturnValue BatchStatement clear(); @Override diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/cql/SimpleStatement.java b/core/src/main/java/com/datastax/oss/driver/api/core/cql/SimpleStatement.java index fd5f456f11c..ef04cd14a5b 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/cql/SimpleStatement.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/cql/SimpleStatement.java @@ -28,6 +28,7 @@ import com.datastax.oss.protocol.internal.PrimitiveSizes; import com.datastax.oss.protocol.internal.util.collection.NullAllowingImmutableList; import com.datastax.oss.protocol.internal.util.collection.NullAllowingImmutableMap; +import edu.umd.cs.findbugs.annotations.CheckReturnValue; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import java.util.List; @@ -197,6 +198,7 @@ static SimpleStatementBuilder builder(@NonNull SimpleStatement template) { * @see #setNamedValuesWithIds(Map) */ @NonNull + @CheckReturnValue SimpleStatement setQuery(@NonNull String newQuery); /** @@ -209,6 +211,7 @@ static SimpleStatementBuilder builder(@NonNull SimpleStatement template) { * @see Request#getKeyspace() */ @NonNull + @CheckReturnValue SimpleStatement setKeyspace(@Nullable CqlIdentifier newKeyspace); /** @@ -216,6 +219,7 @@ static SimpleStatementBuilder builder(@NonNull SimpleStatement template) { * setKeyspace(CqlIdentifier.fromCql(newKeyspaceName))}. */ @NonNull + @CheckReturnValue default SimpleStatement setKeyspace(@NonNull String newKeyspaceName) { return setKeyspace(CqlIdentifier.fromCql(newKeyspaceName)); } @@ -236,6 +240,7 @@ default SimpleStatement setKeyspace(@NonNull String newKeyspaceName) { * @see #setQuery(String) */ @NonNull + @CheckReturnValue SimpleStatement setPositionalValues(@NonNull List newPositionalValues); @NonNull @@ -256,6 +261,7 @@ default SimpleStatement setKeyspace(@NonNull String newKeyspaceName) { * @see #setQuery(String) */ @NonNull + @CheckReturnValue SimpleStatement setNamedValuesWithIds(@NonNull Map newNamedValues); /** @@ -263,6 +269,7 @@ default SimpleStatement setKeyspace(@NonNull String newKeyspaceName) { * converted on the fly with {@link CqlIdentifier#fromCql(String)}. */ @NonNull + @CheckReturnValue default SimpleStatement setNamedValues(@NonNull Map newNamedValues) { return setNamedValuesWithIds(DefaultSimpleStatement.wrapKeys(newNamedValues)); } diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/cql/Statement.java b/core/src/main/java/com/datastax/oss/driver/api/core/cql/Statement.java index 594c627e324..d70c56686c5 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/cql/Statement.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/cql/Statement.java @@ -513,6 +513,7 @@ default int getNowInSeconds() { * @see #NO_NOW_IN_SECONDS */ @NonNull + @CheckReturnValue @SuppressWarnings("unchecked") default SelfT setNowInSeconds(int nowInSeconds) { return (SelfT) this; From 8444c79ff843a072e5c1a1d8de5140a47051e2a0 Mon Sep 17 00:00:00 2001 From: absurdfarce Date: Mon, 16 Sep 2024 16:30:43 -0500 Subject: [PATCH 916/979] Remove "beta" support for Java17 from docs patch by Bret McGuire; reviewed by Andy Tolbert and Alexandre Dutra reference: https://github.com/apache/cassandra-java-driver/pull/1962 --- upgrade_guide/README.md | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/upgrade_guide/README.md b/upgrade_guide/README.md index c6df74ffc2a..56d55aaab36 100644 --- a/upgrade_guide/README.md +++ b/upgrade_guide/README.md @@ -19,7 +19,7 @@ under the License. ## Upgrade guide -### NEW VERSION PLACEHOLDER +### 4.18.1 #### Keystore reloading in DefaultSslEngineFactory @@ -32,12 +32,9 @@ This feature is disabled by default for compatibility. To enable, see `keystore- ### 4.17.0 -#### Beta support for Java17 +#### Support for Java17 With the completion of [JAVA-3042](https://datastax-oss.atlassian.net/browse/JAVA-3042) the driver now passes our automated test matrix for Java Driver releases. -While all features function normally when run with Java 17 tests, we do not offer full support for this -platform until we've received feedback from other users in the ecosystem. - If you discover an issue with the Java Driver running on Java 17, please let us know. We will triage and address Java 17 issues. #### Updated API for vector search From a40e7587b175cc198fb533eadabd31e94f837369 Mon Sep 17 00:00:00 2001 From: Christian Aistleitner Date: Thu, 6 Jun 2024 09:14:16 +0200 Subject: [PATCH 917/979] Fix uncaught exception during graceful channel shutdown after exceeding max orphan ids patch by Christian Aistleitner; reviewed by Andy Tolbert, and Bret McGuire for #1938 --- .../core/channel/InFlightHandler.java | 4 +- .../core/channel/InFlightHandlerTest.java | 63 ++++++++++++++++++- 2 files changed, 64 insertions(+), 3 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/InFlightHandler.java b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/InFlightHandler.java index 9060f80b7cd..90b02f358cd 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/channel/InFlightHandler.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/channel/InFlightHandler.java @@ -199,14 +199,14 @@ private void startGracefulShutdown(ChannelHandlerContext ctx) { LOG.debug("[{}] No pending queries, completing graceful shutdown now", logPrefix); ctx.channel().close(); } else { - // remove heartbeat handler from pipeline if present. + // Remove heartbeat handler from pipeline if present. ChannelHandler heartbeatHandler = ctx.pipeline().get(ChannelFactory.HEARTBEAT_HANDLER_NAME); if (heartbeatHandler != null) { ctx.pipeline().remove(heartbeatHandler); } LOG.debug("[{}] There are pending queries, delaying graceful shutdown", logPrefix); closingGracefully = true; - closeStartedFuture.setSuccess(); + closeStartedFuture.trySuccess(); } } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/channel/InFlightHandlerTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/channel/InFlightHandlerTest.java index 79a575d9eb6..35049e99af1 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/channel/InFlightHandlerTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/channel/InFlightHandlerTest.java @@ -39,7 +39,9 @@ import io.netty.channel.ChannelFuture; import io.netty.channel.ChannelPromise; import java.net.InetSocketAddress; +import java.util.ArrayList; import java.util.Collections; +import java.util.List; import java.util.concurrent.TimeUnit; import org.junit.Before; import org.junit.Test; @@ -256,7 +258,7 @@ public void should_refuse_new_writes_during_graceful_close() { } @Test - public void should_close_gracefully_if_orphan_ids_above_max_and_pending_requests() { + public void should_close_gracefully_if_orphan_ids_above_max_and_pending_request() { // Given addToPipeline(); // Generate n orphan ids by writing and cancelling the requests: @@ -311,6 +313,65 @@ public void should_close_gracefully_if_orphan_ids_above_max_and_pending_requests assertThat(channel.closeFuture()).isSuccess(); } + @Test + public void should_close_gracefully_if_orphan_ids_above_max_and_multiple_pending_requests() { + // Given + addToPipeline(); + // Generate n orphan ids by writing and cancelling the requests. + for (int i = 0; i < MAX_ORPHAN_IDS; i++) { + when(streamIds.acquire()).thenReturn(i); + MockResponseCallback responseCallback = new MockResponseCallback(); + channel + .writeAndFlush( + new DriverChannel.RequestMessage(QUERY, false, Frame.NO_PAYLOAD, responseCallback)) + .awaitUninterruptibly(); + channel.writeAndFlush(responseCallback).awaitUninterruptibly(); + } + // Generate 3 additional requests that are pending and not cancelled. + List pendingResponseCallbacks = new ArrayList<>(); + for (int i = 0; i < 3; i++) { + when(streamIds.acquire()).thenReturn(MAX_ORPHAN_IDS + i); + MockResponseCallback responseCallback = new MockResponseCallback(); + channel + .writeAndFlush( + new DriverChannel.RequestMessage(QUERY, false, Frame.NO_PAYLOAD, responseCallback)) + .awaitUninterruptibly(); + pendingResponseCallbacks.add(responseCallback); + } + + // When + // Generate the n+1th orphan id that makes us go above the threshold by canceling one if the + // pending requests. + channel.writeAndFlush(pendingResponseCallbacks.remove(0)).awaitUninterruptibly(); + + // Then + // Channel should be closing gracefully but there's no way to observe that from the outside + // besides writing another request and check that it's rejected. + assertThat(channel.closeFuture()).isNotDone(); + ChannelFuture otherWriteFuture = + channel.writeAndFlush( + new DriverChannel.RequestMessage( + QUERY, false, Frame.NO_PAYLOAD, new MockResponseCallback())); + assertThat(otherWriteFuture).isFailed(); + assertThat(otherWriteFuture.cause()) + .isInstanceOf(IllegalStateException.class) + .hasMessage("Channel is closing"); + + // When + // Cancel the remaining pending requests causing the n+ith orphan ids above the threshold. + for (MockResponseCallback pendingResponseCallback : pendingResponseCallbacks) { + ChannelFuture future = channel.writeAndFlush(pendingResponseCallback).awaitUninterruptibly(); + + // Then + // The future should succeed even though the channel has started closing gracefully. + assertThat(future).isSuccess(); + } + + // Then + // The graceful shutdown completes. + assertThat(channel.closeFuture()).isSuccess(); + } + @Test public void should_close_immediately_if_orphan_ids_above_max_and_no_pending_requests() { // Given From eb57fd7d46fb8b84655e66a3ca8e9ceef77b5164 Mon Sep 17 00:00:00 2001 From: janehe Date: Wed, 18 Sep 2024 08:49:37 +0000 Subject: [PATCH 918/979] Build a public CI for Apache Cassandra Java Driver patch by Siyao (Jane) He; reviewed by Mick Semb Wever for CASSANDRA-19832 --- Jenkinsfile-asf | 80 +++++++++++++++++++++++++++++ Jenkinsfile => Jenkinsfile-datastax | 0 ci/create-user.sh | 60 ++++++++++++++++++++++ ci/run-tests.sh | 10 ++++ 4 files changed, 150 insertions(+) create mode 100644 Jenkinsfile-asf rename Jenkinsfile => Jenkinsfile-datastax (100%) create mode 100644 ci/create-user.sh create mode 100755 ci/run-tests.sh diff --git a/Jenkinsfile-asf b/Jenkinsfile-asf new file mode 100644 index 00000000000..a1be4bcd4f6 --- /dev/null +++ b/Jenkinsfile-asf @@ -0,0 +1,80 @@ +#!groovy + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +pipeline { + agent { + label 'cassandra-small' + } + + triggers { + // schedules only run against release branches (i.e. 3.x, 4.x, 4.5.x, etc.) + cron(branchPatternCron().matcher(env.BRANCH_NAME).matches() ? '@weekly' : '') + } + + stages { + stage('Matrix') { + matrix { + axes { + axis { + name 'TEST_JAVA_VERSION' + values 'openjdk@1.8.0-292', 'openjdk@1.11.0-9', 'openjdk@17' + } + axis { + name 'SERVER_VERSION' + values '3.11.17', + '4.0.13', + '5.0-beta1' + } + } + stages { + stage('Tests') { + agent { + label 'cassandra-medium' + } + steps { + script { + executeTests() + junit testResults: '**/target/surefire-reports/TEST-*.xml', allowEmptyResults: true + junit testResults: '**/target/failsafe-reports/TEST-*.xml', allowEmptyResults: true + } + } + } + } + } + } + } +} + +def executeTests() { + def testJavaMajorVersion = (TEST_JAVA_VERSION =~ /@(?:1\.)?(\d+)/)[0][1] + sh """ + container_id=\$(docker run -td -e TEST_JAVA_VERSION=${TEST_JAVA_VERSION} -e SERVER_VERSION=${SERVER_VERSION} -e TEST_JAVA_MAJOR_VERSION=${testJavaMajorVersion} -v \$(pwd):/home/docker/cassandra-java-driver apache.jfrog.io/cassan-docker/apache/cassandra-java-driver-testing-ubuntu2204 'sleep 2h') + docker exec --user root \$container_id bash -c \"sudo bash /home/docker/cassandra-java-driver/ci/create-user.sh docker \$(id -u) \$(id -g) /home/docker/cassandra-java-driver\" + docker exec --user docker \$container_id './cassandra-java-driver/ci/run-tests.sh' + ( nohup docker stop \$container_id >/dev/null 2>/dev/null & ) + """ +} + +// branch pattern for cron +// should match 3.x, 4.x, 4.5.x, etc +def branchPatternCron() { + ~'((\\d+(\\.[\\dx]+)+))' +} diff --git a/Jenkinsfile b/Jenkinsfile-datastax similarity index 100% rename from Jenkinsfile rename to Jenkinsfile-datastax diff --git a/ci/create-user.sh b/ci/create-user.sh new file mode 100644 index 00000000000..fb193df9a00 --- /dev/null +++ b/ci/create-user.sh @@ -0,0 +1,60 @@ +#!/bin/bash +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +################################ +# +# Prep +# +################################ + +if [ "$1" == "-h" ]; then + echo "$0 [-h] " + echo " this script is used internally by other scripts in the same directory to create a user with the running host user's same uid and gid" + exit 1 +fi + +# arguments +username=$1 +uid=$2 +gid=$3 +BUILD_HOME=$4 + +################################ +# +# Main +# +################################ + +# disable git directory ownership checks +su ${username} -c "git config --global safe.directory '*'" + +if grep "^ID=" /etc/os-release | grep -q 'debian\|ubuntu' ; then + deluser docker + adduser --quiet --disabled-login --no-create-home --uid $uid --gecos ${username} ${username} + groupmod --non-unique -g $gid $username + gpasswd -a ${username} sudo >/dev/null +else + adduser --no-create-home --uid $uid ${username} +fi + +# sudo priviledges +echo "${username} ALL=(root) NOPASSWD:ALL" > /etc/sudoers.d/${username} +chmod 0440 /etc/sudoers.d/${username} + +# proper permissions +chown -R ${username}:${username} /home/docker +chmod og+wx ${BUILD_HOME} \ No newline at end of file diff --git a/ci/run-tests.sh b/ci/run-tests.sh new file mode 100755 index 00000000000..02a8070e7a9 --- /dev/null +++ b/ci/run-tests.sh @@ -0,0 +1,10 @@ +#!/bin/bash -x + +. ~/.jabba/jabba.sh +. ~/env.txt +cd $(dirname "$(readlink -f "$0")")/.. +printenv | sort +mvn -B -V install -DskipTests -Dmaven.javadoc.skip=true +jabba use ${TEST_JAVA_VERSION} +printenv | sort +mvn -B -V verify -T 1 -Ptest-jdk-${TEST_JAVA_MAJOR_VERSION} -DtestJavaHome=$(jabba which ${TEST_JAVA_VERSION}) -Dccm.version=${SERVER_VERSION} -Dccm.dse=false -Dmaven.test.failure.ignore=true -Dmaven.javadoc.skip=true From 72c729b1ba95695fed467ca3734de7a39a2b3201 Mon Sep 17 00:00:00 2001 From: Lukasz Antoniak Date: Thu, 3 Oct 2024 09:32:17 +0200 Subject: [PATCH 919/979] CASSANDRA-19932: Allow to define extensions while creating table patch by Lukasz Antoniak; reviewed by Bret McGuire and Chris Lohfink --- .../schema/CreateTableWithOptions.java | 11 +++++ .../schema/RawOptionsWrapper.java | 45 +++++++++++++++++++ .../querybuilder/schema/CreateTableTest.java | 9 +++- 3 files changed, 64 insertions(+), 1 deletion(-) create mode 100644 query-builder/src/main/java/com/datastax/oss/driver/internal/querybuilder/schema/RawOptionsWrapper.java diff --git a/query-builder/src/main/java/com/datastax/oss/driver/api/querybuilder/schema/CreateTableWithOptions.java b/query-builder/src/main/java/com/datastax/oss/driver/api/querybuilder/schema/CreateTableWithOptions.java index 4dd3193da15..c7bddf575fb 100644 --- a/query-builder/src/main/java/com/datastax/oss/driver/api/querybuilder/schema/CreateTableWithOptions.java +++ b/query-builder/src/main/java/com/datastax/oss/driver/api/querybuilder/schema/CreateTableWithOptions.java @@ -18,7 +18,11 @@ package com.datastax.oss.driver.api.querybuilder.schema; import com.datastax.oss.driver.api.querybuilder.BuildableQuery; +import com.datastax.oss.driver.internal.querybuilder.schema.RawOptionsWrapper; +import com.datastax.oss.driver.shaded.guava.common.collect.Maps; +import edu.umd.cs.findbugs.annotations.CheckReturnValue; import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Map; public interface CreateTableWithOptions extends BuildableQuery, RelationStructure { @@ -26,4 +30,11 @@ public interface CreateTableWithOptions /** Enables COMPACT STORAGE in the CREATE TABLE statement. */ @NonNull CreateTableWithOptions withCompactStorage(); + + /** Attaches custom metadata to CQL table definition. */ + @NonNull + @CheckReturnValue + default CreateTableWithOptions withExtensions(@NonNull Map extensions) { + return withOption("extensions", Maps.transformValues(extensions, RawOptionsWrapper::of)); + } } diff --git a/query-builder/src/main/java/com/datastax/oss/driver/internal/querybuilder/schema/RawOptionsWrapper.java b/query-builder/src/main/java/com/datastax/oss/driver/internal/querybuilder/schema/RawOptionsWrapper.java new file mode 100644 index 00000000000..64cdb50f887 --- /dev/null +++ b/query-builder/src/main/java/com/datastax/oss/driver/internal/querybuilder/schema/RawOptionsWrapper.java @@ -0,0 +1,45 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.querybuilder.schema; + +import com.datastax.oss.driver.api.core.data.ByteUtils; + +/** + * Wrapper class to indicate that the contained String value should be understood to represent a CQL + * literal that can be included directly in a CQL statement (i.e. without escaping). + */ +public class RawOptionsWrapper { + private final String val; + + private RawOptionsWrapper(String val) { + this.val = val; + } + + public static RawOptionsWrapper of(String val) { + return new RawOptionsWrapper(val); + } + + public static RawOptionsWrapper of(byte[] val) { + return new RawOptionsWrapper(ByteUtils.toHexString(val)); + } + + @Override + public String toString() { + return this.val; + } +} diff --git a/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/schema/CreateTableTest.java b/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/schema/CreateTableTest.java index d32c66f629b..7a5542c51f0 100644 --- a/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/schema/CreateTableTest.java +++ b/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/schema/CreateTableTest.java @@ -28,6 +28,7 @@ import com.datastax.oss.driver.api.querybuilder.schema.compaction.TimeWindowCompactionStrategy.CompactionWindowUnit; import com.datastax.oss.driver.api.querybuilder.schema.compaction.TimeWindowCompactionStrategy.TimestampResolution; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import java.nio.charset.StandardCharsets; import org.junit.Test; public class CreateTableTest { @@ -169,6 +170,12 @@ public void should_generate_create_table_with_options() { .withComment("Hello world") .withDcLocalReadRepairChance(0.54) .withDefaultTimeToLiveSeconds(86400) + .withExtensions( + ImmutableMap.of( + "key1", + "apache".getBytes(StandardCharsets.UTF_8), + "key2", + "cassandra".getBytes(StandardCharsets.UTF_8))) .withGcGraceSeconds(864000) .withMemtableFlushPeriodInMs(10000) .withMinIndexInterval(1024) @@ -176,7 +183,7 @@ public void should_generate_create_table_with_options() { .withReadRepairChance(0.55) .withSpeculativeRetry("99percentile")) .hasCql( - "CREATE TABLE bar (k int PRIMARY KEY,v text) WITH bloom_filter_fp_chance=0.42 AND cdc=false AND comment='Hello world' AND dclocal_read_repair_chance=0.54 AND default_time_to_live=86400 AND gc_grace_seconds=864000 AND memtable_flush_period_in_ms=10000 AND min_index_interval=1024 AND max_index_interval=4096 AND read_repair_chance=0.55 AND speculative_retry='99percentile'"); + "CREATE TABLE bar (k int PRIMARY KEY,v text) WITH bloom_filter_fp_chance=0.42 AND cdc=false AND comment='Hello world' AND dclocal_read_repair_chance=0.54 AND default_time_to_live=86400 AND extensions={'key1':0x617061636865,'key2':0x63617373616e647261} AND gc_grace_seconds=864000 AND memtable_flush_period_in_ms=10000 AND min_index_interval=1024 AND max_index_interval=4096 AND read_repair_chance=0.55 AND speculative_retry='99percentile'"); } @Test From 8ebcd9f85afb548f38e953fb1190d9ff04d8df5a Mon Sep 17 00:00:00 2001 From: Abe Ratnofsky Date: Tue, 15 Oct 2024 19:24:00 -0400 Subject: [PATCH 920/979] Fix DefaultSslEngineFactory missing null check on close patch by Abe Ratnofsky; reviewed by Andy Tolbert and Chris Lohfink for CASSANDRA-20001 --- .../oss/driver/internal/core/ssl/DefaultSslEngineFactory.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/ssl/DefaultSslEngineFactory.java b/core/src/main/java/com/datastax/oss/driver/internal/core/ssl/DefaultSslEngineFactory.java index bb95dc738c7..475ec38d578 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/ssl/DefaultSslEngineFactory.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/ssl/DefaultSslEngineFactory.java @@ -164,6 +164,6 @@ private ReloadingKeyManagerFactory buildReloadingKeyManagerFactory(DriverExecuti @Override public void close() throws Exception { - kmf.close(); + if (kmf != null) kmf.close(); } } From f98e3433b91b49e0facfbce8e94e01e304714968 Mon Sep 17 00:00:00 2001 From: absurdfarce Date: Wed, 2 Oct 2024 18:04:19 -0500 Subject: [PATCH 921/979] Query builder support for NOT CQL syntax patch by Bret McGuire; reviewed by Bret McGuire and Andy Tolbert for CASSANDRA-19930 --- .../relation/ColumnRelationBuilder.java | 24 +++++++ .../relation/InRelationBuilder.java | 32 +++++++++ .../querybuilder/relation/RelationTest.java | 69 ++++++++++++++++++- 3 files changed, 124 insertions(+), 1 deletion(-) diff --git a/query-builder/src/main/java/com/datastax/oss/driver/api/querybuilder/relation/ColumnRelationBuilder.java b/query-builder/src/main/java/com/datastax/oss/driver/api/querybuilder/relation/ColumnRelationBuilder.java index 613e72291b7..247d61eaed5 100644 --- a/query-builder/src/main/java/com/datastax/oss/driver/api/querybuilder/relation/ColumnRelationBuilder.java +++ b/query-builder/src/main/java/com/datastax/oss/driver/api/querybuilder/relation/ColumnRelationBuilder.java @@ -46,4 +46,28 @@ default ResultT contains(@NonNull Term term) { default ResultT containsKey(@NonNull Term term) { return build(" CONTAINS KEY ", term); } + + /** + * Builds a NOT CONTAINS relation for the column. + * + *

          Note that NOT CONTAINS support is only available in Cassandra 5.1 or later. See CASSANDRA-18584 for more + * information. + */ + @NonNull + default ResultT notContains(@NonNull Term term) { + return build(" NOT CONTAINS ", term); + } + + /** + * Builds a NOT CONTAINS KEY relation for the column. + * + *

          Note that NOT CONTAINS KEY support is only available in Cassandra 5.1 or later. See CASSANDRA-18584 for more + * information. + */ + @NonNull + default ResultT notContainsKey(@NonNull Term term) { + return build(" NOT CONTAINS KEY ", term); + } } diff --git a/query-builder/src/main/java/com/datastax/oss/driver/api/querybuilder/relation/InRelationBuilder.java b/query-builder/src/main/java/com/datastax/oss/driver/api/querybuilder/relation/InRelationBuilder.java index d3fc8dce91d..afaa19ff724 100644 --- a/query-builder/src/main/java/com/datastax/oss/driver/api/querybuilder/relation/InRelationBuilder.java +++ b/query-builder/src/main/java/com/datastax/oss/driver/api/querybuilder/relation/InRelationBuilder.java @@ -50,6 +50,38 @@ default ResultT in(@NonNull Term... alternatives) { return in(Arrays.asList(alternatives)); } + /** + * Builds a NOT IN relation where the whole set of possible values is a bound variable, as in + * {@code NOT IN ?}. + * + *

          Note that NOT IN support is only available in Cassandra 5.1 or later. See CASSANDRA-18584 for more + * information. + */ + @NonNull + default ResultT notIn(@NonNull BindMarker bindMarker) { + return build(" NOT IN ", bindMarker); + } + + /** + * Builds an IN relation where the arguments are the possible values, as in {@code IN (term1, + * term2...)}. + * + *

          Note that NOT IN support is only available in Cassandra 5.1 or later. See CASSANDRA-18584 for more + * information. + */ + @NonNull + default ResultT notIn(@NonNull Iterable alternatives) { + return build(" NOT IN ", QueryBuilder.tuple(alternatives)); + } + + /** Var-arg equivalent of {@link #notIn(Iterable)} . */ + @NonNull + default ResultT notIn(@NonNull Term... alternatives) { + return notIn(Arrays.asList(alternatives)); + } + @NonNull ResultT build(@NonNull String operator, @Nullable Term rightOperand); } diff --git a/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/relation/RelationTest.java b/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/relation/RelationTest.java index 515a336f5f4..ec121eaa050 100644 --- a/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/relation/RelationTest.java +++ b/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/relation/RelationTest.java @@ -19,10 +19,12 @@ import static com.datastax.oss.driver.api.querybuilder.Assertions.assertThat; import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.bindMarker; +import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.literal; import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.raw; import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.selectFrom; import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.tuple; +import org.assertj.core.util.Lists; import org.junit.Test; public class RelationTest { @@ -42,13 +44,78 @@ public void should_generate_is_not_null_relation() { } @Test - public void should_generate_in_relation() { + public void should_generate_contains_relation() { + assertThat(selectFrom("foo").all().where(Relation.column("k").contains(literal(1)))) + .hasCql("SELECT * FROM foo WHERE k CONTAINS 1"); + } + + @Test + public void should_generate_contains_key_relation() { + assertThat(selectFrom("foo").all().where(Relation.column("k").containsKey(literal(1)))) + .hasCql("SELECT * FROM foo WHERE k CONTAINS KEY 1"); + } + + @Test + public void should_generate_not_contains_relation() { + assertThat(selectFrom("foo").all().where(Relation.column("k").notContains(literal(1)))) + .hasCql("SELECT * FROM foo WHERE k NOT CONTAINS 1"); + } + + @Test + public void should_generate_not_contains_key_relation() { + assertThat(selectFrom("foo").all().where(Relation.column("k").notContainsKey(literal(1)))) + .hasCql("SELECT * FROM foo WHERE k NOT CONTAINS KEY 1"); + } + + @Test + public void should_generate_in_relation_bind_markers() { assertThat(selectFrom("foo").all().where(Relation.column("k").in(bindMarker()))) .hasCql("SELECT * FROM foo WHERE k IN ?"); assertThat(selectFrom("foo").all().where(Relation.column("k").in(bindMarker(), bindMarker()))) .hasCql("SELECT * FROM foo WHERE k IN (?,?)"); } + @Test + public void should_generate_in_relation_terms() { + assertThat( + selectFrom("foo") + .all() + .where( + Relation.column("k") + .in(Lists.newArrayList(literal(1), literal(2), literal(3))))) + .hasCql("SELECT * FROM foo WHERE k IN (1,2,3)"); + assertThat( + selectFrom("foo") + .all() + .where(Relation.column("k").in(literal(1), literal(2), literal(3)))) + .hasCql("SELECT * FROM foo WHERE k IN (1,2,3)"); + } + + @Test + public void should_generate_not_in_relation_bind_markers() { + assertThat(selectFrom("foo").all().where(Relation.column("k").notIn(bindMarker()))) + .hasCql("SELECT * FROM foo WHERE k NOT IN ?"); + assertThat( + selectFrom("foo").all().where(Relation.column("k").notIn(bindMarker(), bindMarker()))) + .hasCql("SELECT * FROM foo WHERE k NOT IN (?,?)"); + } + + @Test + public void should_generate_not_in_relation_terms() { + assertThat( + selectFrom("foo") + .all() + .where( + Relation.column("k") + .notIn(Lists.newArrayList(literal(1), literal(2), literal(3))))) + .hasCql("SELECT * FROM foo WHERE k NOT IN (1,2,3)"); + assertThat( + selectFrom("foo") + .all() + .where(Relation.column("k").notIn(literal(1), literal(2), literal(3)))) + .hasCql("SELECT * FROM foo WHERE k NOT IN (1,2,3)"); + } + @Test public void should_generate_token_relation() { assertThat(selectFrom("foo").all().where(Relation.token("k1", "k2").isEqualTo(bindMarker("t")))) From dfe11a8b671d76be6c4e90981a736325b0e4719b Mon Sep 17 00:00:00 2001 From: Dmitry Kropachev Date: Sat, 14 Sep 2024 06:43:15 -0400 Subject: [PATCH 922/979] Fix CustomCcmRule to drop `CURRENT` flag no matter what If super.after() throws an Exception `CURRENT` flag is never dropped which leads next tests to fail with IllegalStateException("Attempting to use a Ccm rule while another is in use. This is disallowed") Patch by Dmitry Kropachev; reviewed by Andy Tolbert and Bret McGuire for JAVA-3117 --- .../oss/driver/api/testinfra/ccm/CustomCcmRule.java | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CustomCcmRule.java b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CustomCcmRule.java index cf150b12f55..5ea1bf7ed3c 100644 --- a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CustomCcmRule.java +++ b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CustomCcmRule.java @@ -53,6 +53,7 @@ protected void before() { after(); } catch (Exception e1) { LOG.warn("Error cleaning up CustomCcmRule before() failure", e1); + e.addSuppressed(e1); } throw e; } @@ -64,8 +65,11 @@ protected void before() { @Override protected void after() { - super.after(); - CURRENT.compareAndSet(this, null); + try { + super.after(); + } finally { + CURRENT.compareAndSet(this, null); + } } public CcmBridge getCcmBridge() { From 787783770b0a624f4e58d35aeff16eff8bcddc02 Mon Sep 17 00:00:00 2001 From: janehe Date: Wed, 30 Oct 2024 11:34:11 -0700 Subject: [PATCH 923/979] JAVA-3051: Memory leak patch by Jane He; reviewed by Alexandre Dutra and Bret McGuire for JAVA-3051 --- .../core/control/ControlConnection.java | 21 ++-- .../DefaultLoadBalancingPolicy.java | 96 ++++++++++++------- .../metadata/LoadBalancingPolicyWrapper.java | 12 ++- .../core/metrics/AbstractMetricUpdater.java | 5 +- .../internal/core/session/DefaultSession.java | 14 ++- .../util/concurrent/ReplayingEventFilter.java | 1 + ...faultLoadBalancingPolicyQueryPlanTest.java | 9 +- ...LoadBalancingPolicyRequestTrackerTest.java | 52 +++++----- 8 files changed, 126 insertions(+), 84 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/control/ControlConnection.java b/core/src/main/java/com/datastax/oss/driver/internal/core/control/ControlConnection.java index 5ee9c6e7810..5c29a9b704b 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/control/ControlConnection.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/control/ControlConnection.java @@ -253,8 +253,8 @@ private class SingleThreaded { private final Reconnection reconnection; private DriverChannelOptions channelOptions; // The last events received for each node - private final Map lastDistanceEvents = new WeakHashMap<>(); - private final Map lastStateEvents = new WeakHashMap<>(); + private final Map lastNodeDistance = new WeakHashMap<>(); + private final Map lastNodeState = new WeakHashMap<>(); private SingleThreaded(InternalDriverContext context) { this.context = context; @@ -366,8 +366,8 @@ private void connect( .whenCompleteAsync( (channel, error) -> { try { - DistanceEvent lastDistanceEvent = lastDistanceEvents.get(node); - NodeStateEvent lastStateEvent = lastStateEvents.get(node); + NodeDistance lastDistance = lastNodeDistance.get(node); + NodeState lastState = lastNodeState.get(node); if (error != null) { if (closeWasCalled || initFuture.isCancelled()) { onSuccess.run(); // abort, we don't really care about the result @@ -406,8 +406,7 @@ private void connect( channel); channel.forceClose(); onSuccess.run(); - } else if (lastDistanceEvent != null - && lastDistanceEvent.distance == NodeDistance.IGNORED) { + } else if (lastDistance == NodeDistance.IGNORED) { LOG.debug( "[{}] New channel opened ({}) but node became ignored, " + "closing and trying next node", @@ -415,9 +414,9 @@ private void connect( channel); channel.forceClose(); connect(nodes, errors, onSuccess, onFailure); - } else if (lastStateEvent != null - && (lastStateEvent.newState == null /*(removed)*/ - || lastStateEvent.newState == NodeState.FORCED_DOWN)) { + } else if (lastNodeState.containsKey(node) + && (lastState == null /*(removed)*/ + || lastState == NodeState.FORCED_DOWN)) { LOG.debug( "[{}] New channel opened ({}) but node was removed or forced down, " + "closing and trying next node", @@ -534,7 +533,7 @@ private void reconnectNow() { private void onDistanceEvent(DistanceEvent event) { assert adminExecutor.inEventLoop(); - this.lastDistanceEvents.put(event.node, event); + this.lastNodeDistance.put(event.node, event.distance); if (event.distance == NodeDistance.IGNORED && channel != null && !channel.closeFuture().isDone() @@ -549,7 +548,7 @@ private void onDistanceEvent(DistanceEvent event) { private void onStateEvent(NodeStateEvent event) { assert adminExecutor.inEventLoop(); - this.lastStateEvents.put(event.node, event); + this.lastNodeState.put(event.node, event.newState); if ((event.newState == null /*(removed)*/ || event.newState == NodeState.FORCED_DOWN) && channel != null && !channel.closeFuture().isDone() diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicy.java b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicy.java index 47edcdfe53e..0f03cbb3643 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicy.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicy.java @@ -33,15 +33,19 @@ import com.datastax.oss.driver.internal.core.util.ArrayUtils; import com.datastax.oss.driver.internal.core.util.collection.QueryPlan; import com.datastax.oss.driver.internal.core.util.collection.SimpleQueryPlan; +import com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting; +import com.datastax.oss.driver.shaded.guava.common.collect.MapMaker; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import java.util.BitSet; import java.util.Map; import java.util.Optional; +import java.util.OptionalLong; import java.util.Queue; import java.util.Set; import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.atomic.AtomicLongArray; import net.jcip.annotations.ThreadSafe; @@ -96,7 +100,7 @@ public class DefaultLoadBalancingPolicy extends BasicLoadBalancingPolicy impleme private static final int MAX_IN_FLIGHT_THRESHOLD = 10; private static final long RESPONSE_COUNT_RESET_INTERVAL_NANOS = MILLISECONDS.toNanos(200); - protected final Map responseTimes = new ConcurrentHashMap<>(); + protected final ConcurrentMap responseTimes; protected final Map upTimes = new ConcurrentHashMap<>(); private final boolean avoidSlowReplicas; @@ -104,6 +108,7 @@ public DefaultLoadBalancingPolicy(@NonNull DriverContext context, @NonNull Strin super(context, profileName); this.avoidSlowReplicas = profile.getBoolean(DefaultDriverOption.LOAD_BALANCING_POLICY_SLOW_AVOIDANCE, true); + this.responseTimes = new MapMaker().weakKeys().makeMap(); } @NonNull @@ -274,40 +279,19 @@ protected boolean isBusy(@NonNull Node node, @NonNull Session session) { } protected boolean isResponseRateInsufficient(@NonNull Node node, long now) { - // response rate is considered insufficient when less than 2 responses were obtained in - // the past interval delimited by RESPONSE_COUNT_RESET_INTERVAL_NANOS. - if (responseTimes.containsKey(node)) { - AtomicLongArray array = responseTimes.get(node); - if (array.length() == 2) { - long threshold = now - RESPONSE_COUNT_RESET_INTERVAL_NANOS; - long leastRecent = array.get(0); - return leastRecent - threshold < 0; - } - } - return true; + NodeResponseRateSample sample = responseTimes.get(node); + return !(sample == null || sample.hasSufficientResponses(now)); } + /** + * Synchronously updates the response times for the given node. It is synchronous because the + * {@link #DefaultLoadBalancingPolicy(com.datastax.oss.driver.api.core.context.DriverContext, + * java.lang.String) CacheLoader.load} assigned is synchronous. + * + * @param node The node to update. + */ protected void updateResponseTimes(@NonNull Node node) { - responseTimes.compute( - node, - (n, array) -> { - // The array stores at most two timestamps, since we don't need more; - // the first one is always the least recent one, and hence the one to inspect. - long now = nanoTime(); - if (array == null) { - array = new AtomicLongArray(1); - array.set(0, now); - } else if (array.length() == 1) { - long previous = array.get(0); - array = new AtomicLongArray(2); - array.set(0, previous); - array.set(1, now); - } else { - array.set(0, array.get(1)); - array.set(1, now); - } - return array; - }); + this.responseTimes.compute(node, (k, v) -> v == null ? new NodeResponseRateSample() : v.next()); } protected int getInFlight(@NonNull Node node, @NonNull Session session) { @@ -318,4 +302,52 @@ protected int getInFlight(@NonNull Node node, @NonNull Session session) { // processing them). return (pool == null) ? 0 : pool.getInFlight(); } + + protected class NodeResponseRateSample { + + @VisibleForTesting protected final long oldest; + @VisibleForTesting protected final OptionalLong newest; + + private NodeResponseRateSample() { + long now = nanoTime(); + this.oldest = now; + this.newest = OptionalLong.empty(); + } + + private NodeResponseRateSample(long oldestSample) { + this(oldestSample, nanoTime()); + } + + private NodeResponseRateSample(long oldestSample, long newestSample) { + this.oldest = oldestSample; + this.newest = OptionalLong.of(newestSample); + } + + @VisibleForTesting + protected NodeResponseRateSample(AtomicLongArray times) { + assert times.length() >= 1; + this.oldest = times.get(0); + this.newest = (times.length() > 1) ? OptionalLong.of(times.get(1)) : OptionalLong.empty(); + } + + // Our newest sample becomes the oldest in the next generation + private NodeResponseRateSample next() { + return new NodeResponseRateSample(this.getNewestValidSample(), nanoTime()); + } + + // If we have a pair of values return the newest, otherwise we have just one value... so just + // return it + private long getNewestValidSample() { + return this.newest.orElse(this.oldest); + } + + // response rate is considered insufficient when less than 2 responses were obtained in + // the past interval delimited by RESPONSE_COUNT_RESET_INTERVAL_NANOS. + private boolean hasSufficientResponses(long now) { + // If we only have one sample it's an automatic failure + if (!this.newest.isPresent()) return true; + long threshold = now - RESPONSE_COUNT_RESET_INTERVAL_NANOS; + return this.oldest - threshold >= 0; + } + } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/LoadBalancingPolicyWrapper.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/LoadBalancingPolicyWrapper.java index 20d045d4e72..5c8473a3b67 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/LoadBalancingPolicyWrapper.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/LoadBalancingPolicyWrapper.java @@ -38,6 +38,7 @@ import java.util.Map; import java.util.Queue; import java.util.Set; +import java.util.WeakHashMap; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.locks.Lock; @@ -105,7 +106,7 @@ public LoadBalancingPolicyWrapper( // Just an alias to make the rest of the code more readable this.policies = reporters.keySet(); - this.distances = new HashMap<>(); + this.distances = new WeakHashMap<>(); this.logPrefix = context.getSessionName(); context.getEventBus().register(NodeStateEvent.class, this::onNodeStateEvent); @@ -172,6 +173,7 @@ private void onNodeStateEvent(NodeStateEvent event) { // once it has gone through the filter private void processNodeStateEvent(NodeStateEvent event) { + DefaultNode node = event.node; switch (stateRef.get()) { case BEFORE_INIT: case DURING_INIT: @@ -181,13 +183,13 @@ private void processNodeStateEvent(NodeStateEvent event) { case RUNNING: for (LoadBalancingPolicy policy : policies) { if (event.newState == NodeState.UP) { - policy.onUp(event.node); + policy.onUp(node); } else if (event.newState == NodeState.DOWN || event.newState == NodeState.FORCED_DOWN) { - policy.onDown(event.node); + policy.onDown(node); } else if (event.newState == NodeState.UNKNOWN) { - policy.onAdd(event.node); + policy.onAdd(node); } else if (event.newState == null) { - policy.onRemove(event.node); + policy.onRemove(node); } else { LOG.warn("[{}] Unsupported event: {}", logPrefix, event); } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/AbstractMetricUpdater.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/AbstractMetricUpdater.java index 5e2392a2e7f..3d7dc50a7c0 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/AbstractMetricUpdater.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metrics/AbstractMetricUpdater.java @@ -173,9 +173,8 @@ protected Timeout newTimeout() { .getTimer() .newTimeout( t -> { - if (t.isExpired()) { - clearMetrics(); - } + clearMetrics(); + cancelMetricsExpirationTimeout(); }, expireAfter.toNanos(), TimeUnit.NANOSECONDS); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/session/DefaultSession.java b/core/src/main/java/com/datastax/oss/driver/internal/core/session/DefaultSession.java index 6f063ae9a50..b795c30fce7 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/session/DefaultSession.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/session/DefaultSession.java @@ -527,14 +527,18 @@ private void notifyListeners() { private void onNodeStateChanged(NodeStateEvent event) { assert adminExecutor.inEventLoop(); - if (event.newState == null) { - context.getNodeStateListener().onRemove(event.node); + DefaultNode node = event.node; + if (node == null) { + LOG.debug( + "[{}] Node for this event was removed, ignoring state change: {}", logPrefix, event); + } else if (event.newState == null) { + context.getNodeStateListener().onRemove(node); } else if (event.oldState == null && event.newState == NodeState.UNKNOWN) { - context.getNodeStateListener().onAdd(event.node); + context.getNodeStateListener().onAdd(node); } else if (event.newState == NodeState.UP) { - context.getNodeStateListener().onUp(event.node); + context.getNodeStateListener().onUp(node); } else if (event.newState == NodeState.DOWN || event.newState == NodeState.FORCED_DOWN) { - context.getNodeStateListener().onDown(event.node); + context.getNodeStateListener().onDown(node); } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/util/concurrent/ReplayingEventFilter.java b/core/src/main/java/com/datastax/oss/driver/internal/core/util/concurrent/ReplayingEventFilter.java index 12679db7ff0..27ca1b6ff42 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/util/concurrent/ReplayingEventFilter.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/util/concurrent/ReplayingEventFilter.java @@ -82,6 +82,7 @@ public void markReady() { consumer.accept(event); } } finally { + recordedEvents.clear(); stateLock.writeLock().unlock(); } } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyQueryPlanTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyQueryPlanTest.java index 6098653bc2e..fff86a1b750 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyQueryPlanTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyQueryPlanTest.java @@ -203,7 +203,10 @@ public void should_prioritize_and_shuffle_3_or_more_replicas_when_first_unhealth given(pool3.getInFlight()).willReturn(0); given(pool5.getInFlight()).willReturn(0); - dsePolicy.responseTimes.put(node1, new AtomicLongArray(new long[] {T0, T0})); // unhealthy + dsePolicy.responseTimes.put( + node1, + dsePolicy + .new NodeResponseRateSample(new AtomicLongArray(new long[] {T0, T0}))); // unhealthy // When Queue plan1 = dsePolicy.newQueryPlan(request, session); @@ -232,7 +235,9 @@ public void should_prioritize_and_shuffle_3_or_more_replicas_when_first_unhealth given(pool3.getInFlight()).willReturn(0); given(pool5.getInFlight()).willReturn(0); - dsePolicy.responseTimes.put(node1, new AtomicLongArray(new long[] {T1, T1})); // healthy + dsePolicy.responseTimes.put( + node1, + dsePolicy.new NodeResponseRateSample(new AtomicLongArray(new long[] {T1, T1}))); // healthy // When Queue plan1 = dsePolicy.newQueryPlan(request, session); diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyRequestTrackerTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyRequestTrackerTest.java index bcc6439a2a5..757af43ef67 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyRequestTrackerTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicyRequestTrackerTest.java @@ -69,11 +69,11 @@ public void should_record_first_response_time_on_node_success() { // Then assertThat(policy.responseTimes) - .hasEntrySatisfying(node1, value -> assertThat(value.get(0)).isEqualTo(123L)) + .hasEntrySatisfying(node1, value -> assertThat(value.oldest).isEqualTo(123L)) .doesNotContainKeys(node2, node3); - assertThat(policy.isResponseRateInsufficient(node1, nextNanoTime)).isTrue(); - assertThat(policy.isResponseRateInsufficient(node2, nextNanoTime)).isTrue(); - assertThat(policy.isResponseRateInsufficient(node3, nextNanoTime)).isTrue(); + assertThat(policy.isResponseRateInsufficient(node1, nextNanoTime)).isFalse(); + assertThat(policy.isResponseRateInsufficient(node2, nextNanoTime)).isFalse(); + assertThat(policy.isResponseRateInsufficient(node3, nextNanoTime)).isFalse(); } @Test @@ -91,13 +91,13 @@ public void should_record_second_response_time_on_node_success() { node1, value -> { // oldest value first - assertThat(value.get(0)).isEqualTo(123); - assertThat(value.get(1)).isEqualTo(456); + assertThat(value.oldest).isEqualTo(123); + assertThat(value.newest.getAsLong()).isEqualTo(456); }) .doesNotContainKeys(node2, node3); assertThat(policy.isResponseRateInsufficient(node1, nextNanoTime)).isFalse(); - assertThat(policy.isResponseRateInsufficient(node2, nextNanoTime)).isTrue(); - assertThat(policy.isResponseRateInsufficient(node3, nextNanoTime)).isTrue(); + assertThat(policy.isResponseRateInsufficient(node2, nextNanoTime)).isFalse(); + assertThat(policy.isResponseRateInsufficient(node3, nextNanoTime)).isFalse(); } @Test @@ -116,14 +116,14 @@ public void should_record_further_response_times_on_node_success() { node1, value -> { // values should rotate left (bubble up) - assertThat(value.get(0)).isEqualTo(456); - assertThat(value.get(1)).isEqualTo(789); + assertThat(value.oldest).isEqualTo(456); + assertThat(value.newest.getAsLong()).isEqualTo(789); }) - .hasEntrySatisfying(node2, value -> assertThat(value.get(0)).isEqualTo(789)) + .hasEntrySatisfying(node2, value -> assertThat(value.oldest).isEqualTo(789)) .doesNotContainKey(node3); assertThat(policy.isResponseRateInsufficient(node1, nextNanoTime)).isFalse(); - assertThat(policy.isResponseRateInsufficient(node2, nextNanoTime)).isTrue(); - assertThat(policy.isResponseRateInsufficient(node3, nextNanoTime)).isTrue(); + assertThat(policy.isResponseRateInsufficient(node2, nextNanoTime)).isFalse(); + assertThat(policy.isResponseRateInsufficient(node3, nextNanoTime)).isFalse(); } @Test @@ -137,11 +137,11 @@ public void should_record_first_response_time_on_node_error() { // Then assertThat(policy.responseTimes) - .hasEntrySatisfying(node1, value -> assertThat(value.get(0)).isEqualTo(123L)) + .hasEntrySatisfying(node1, value -> assertThat(value.oldest).isEqualTo(123L)) .doesNotContainKeys(node2, node3); - assertThat(policy.isResponseRateInsufficient(node1, nextNanoTime)).isTrue(); - assertThat(policy.isResponseRateInsufficient(node2, nextNanoTime)).isTrue(); - assertThat(policy.isResponseRateInsufficient(node3, nextNanoTime)).isTrue(); + assertThat(policy.isResponseRateInsufficient(node1, nextNanoTime)).isFalse(); + assertThat(policy.isResponseRateInsufficient(node2, nextNanoTime)).isFalse(); + assertThat(policy.isResponseRateInsufficient(node3, nextNanoTime)).isFalse(); } @Test @@ -160,13 +160,13 @@ public void should_record_second_response_time_on_node_error() { node1, value -> { // oldest value first - assertThat(value.get(0)).isEqualTo(123); - assertThat(value.get(1)).isEqualTo(456); + assertThat(value.oldest).isEqualTo(123); + assertThat(value.newest.getAsLong()).isEqualTo(456); }) .doesNotContainKeys(node2, node3); assertThat(policy.isResponseRateInsufficient(node1, nextNanoTime)).isFalse(); - assertThat(policy.isResponseRateInsufficient(node2, nextNanoTime)).isTrue(); - assertThat(policy.isResponseRateInsufficient(node3, nextNanoTime)).isTrue(); + assertThat(policy.isResponseRateInsufficient(node2, nextNanoTime)).isFalse(); + assertThat(policy.isResponseRateInsufficient(node3, nextNanoTime)).isFalse(); } @Test @@ -186,13 +186,13 @@ public void should_record_further_response_times_on_node_error() { node1, value -> { // values should rotate left (bubble up) - assertThat(value.get(0)).isEqualTo(456); - assertThat(value.get(1)).isEqualTo(789); + assertThat(value.oldest).isEqualTo(456); + assertThat(value.newest.getAsLong()).isEqualTo(789); }) - .hasEntrySatisfying(node2, value -> assertThat(value.get(0)).isEqualTo(789)) + .hasEntrySatisfying(node2, value -> assertThat(value.oldest).isEqualTo(789)) .doesNotContainKey(node3); assertThat(policy.isResponseRateInsufficient(node1, nextNanoTime)).isFalse(); - assertThat(policy.isResponseRateInsufficient(node2, nextNanoTime)).isTrue(); - assertThat(policy.isResponseRateInsufficient(node3, nextNanoTime)).isTrue(); + assertThat(policy.isResponseRateInsufficient(node2, nextNanoTime)).isFalse(); + assertThat(policy.isResponseRateInsufficient(node3, nextNanoTime)).isFalse(); } } From a4175f33e43344fd1b092aae332ed5979a1bd831 Mon Sep 17 00:00:00 2001 From: "Siyao (Jane) He" Date: Mon, 28 Oct 2024 14:44:22 -0700 Subject: [PATCH 924/979] Automate latest Cassandra versions when running CI patch by Siyao (Jane) He; reviewed by Mick Semb Wever for CASSJAVA-25 --- Jenkinsfile-asf | 7 ++++--- ci/run-tests.sh | 4 +++- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/Jenkinsfile-asf b/Jenkinsfile-asf index a1be4bcd4f6..0217d0455d6 100644 --- a/Jenkinsfile-asf +++ b/Jenkinsfile-asf @@ -39,9 +39,10 @@ pipeline { } axis { name 'SERVER_VERSION' - values '3.11.17', - '4.0.13', - '5.0-beta1' + values '3.11', + '4.0', + '4.1', + '5.0' } } stages { diff --git a/ci/run-tests.sh b/ci/run-tests.sh index 02a8070e7a9..5268bdd7113 100755 --- a/ci/run-tests.sh +++ b/ci/run-tests.sh @@ -6,5 +6,7 @@ cd $(dirname "$(readlink -f "$0")")/.. printenv | sort mvn -B -V install -DskipTests -Dmaven.javadoc.skip=true jabba use ${TEST_JAVA_VERSION} +# Find out the latest patch version of Cassandra +PATCH_SERVER_VERSION=$(curl -s https://downloads.apache.org/cassandra/ | grep -oP '(?<=href=\")[0-9]+\.[0-9]+\.[0-9]+(?=)' | sort -rV | uniq -w 3 | grep $SERVER_VERSION) printenv | sort -mvn -B -V verify -T 1 -Ptest-jdk-${TEST_JAVA_MAJOR_VERSION} -DtestJavaHome=$(jabba which ${TEST_JAVA_VERSION}) -Dccm.version=${SERVER_VERSION} -Dccm.dse=false -Dmaven.test.failure.ignore=true -Dmaven.javadoc.skip=true +mvn -B -V verify -T 1 -Ptest-jdk-${TEST_JAVA_MAJOR_VERSION} -DtestJavaHome=$(jabba which ${TEST_JAVA_VERSION}) -Dccm.version=${PATCH_SERVER_VERSION} -Dccm.dse=false -Dmaven.test.failure.ignore=true -Dmaven.javadoc.skip=true From 01c6151cd4a3c1dbbd4d1251fb453305385668e1 Mon Sep 17 00:00:00 2001 From: Lukasz Antoniak Date: Sun, 8 Sep 2024 15:06:53 +0200 Subject: [PATCH 925/979] Refactor integration tests to support multiple C* distributions. Test with DataStax HCD 1.0.0 patch by Lukasz Antoniak; reviewed by Bret McGuire reference: https://github.com/apache/cassandra-java-driver/pull/1958 --- Jenkinsfile-datastax | 34 +++- .../datastax/oss/driver/api/core/Version.java | 1 + .../cql/continuous/ContinuousPagingIT.java | 8 +- .../remote/GraphTraversalRemoteITBase.java | 5 +- .../graph/statement/GraphTraversalITBase.java | 5 +- .../schema/DseAggregateMetadataIT.java | 6 +- .../schema/DseFunctionMetadataIT.java | 6 +- .../core/compression/DirectCompressionIT.java | 6 +- .../core/compression/HeapCompressionIT.java | 3 +- .../oss/driver/core/cql/QueryTraceIT.java | 3 +- .../oss/driver/core/metadata/DescribeIT.java | 28 +-- .../driver/core/metadata/NodeMetadataIT.java | 7 +- .../driver/core/metadata/SchemaChangesIT.java | 5 +- .../oss/driver/core/metadata/SchemaIT.java | 8 +- .../oss/driver/mapper/DeleteReactiveIT.java | 5 +- .../oss/driver/mapper/InventoryITBase.java | 11 +- .../src/test/resources/DescribeIT/hcd/1.0.cql | 186 ++++++++++++++++++ osgi-tests/README.md | 4 +- .../osgi/support/CcmStagedReactor.java | 8 +- test-infra/revapi.json | 21 ++ .../driver/api/testinfra/ccm/BaseCcmRule.java | 24 ++- .../driver/api/testinfra/ccm/CcmBridge.java | 83 ++++---- .../DefaultCcmBridgeBuilderCustomizer.java | 8 +- .../ccm/DistributionCassandraVersions.java | 57 ++++++ .../requirement/BackendRequirementRule.java | 2 +- .../testinfra/requirement/BackendType.java | 13 +- .../api/testinfra/session/SessionRule.java | 13 +- 27 files changed, 439 insertions(+), 121 deletions(-) create mode 100644 integration-tests/src/test/resources/DescribeIT/hcd/1.0.cql create mode 100644 test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/DistributionCassandraVersions.java diff --git a/Jenkinsfile-datastax b/Jenkinsfile-datastax index 4cc20d79604..af3faafee20 100644 --- a/Jenkinsfile-datastax +++ b/Jenkinsfile-datastax @@ -61,7 +61,7 @@ def initializeEnvironment() { . ${JABBA_SHELL} jabba which 1.8''', returnStdout: true).trim() - sh label: 'Download Apache CassandraⓇ or DataStax Enterprise',script: '''#!/bin/bash -le + sh label: 'Download Apache CassandraⓇ, DataStax Enterprise or DataStax HCD ',script: '''#!/bin/bash -le . ${JABBA_SHELL} jabba use 1.8 . ${CCM_ENVIRONMENT_SHELL} ${SERVER_VERSION} @@ -75,13 +75,26 @@ CCM_CASSANDRA_VERSION=${DSE_FIXED_VERSION} # maintain for backwards compatibilit CCM_VERSION=${DSE_FIXED_VERSION} CCM_SERVER_TYPE=dse DSE_VERSION=${DSE_FIXED_VERSION} -CCM_IS_DSE=true CCM_BRANCH=${DSE_FIXED_VERSION} DSE_BRANCH=${DSE_FIXED_VERSION} ENVIRONMENT_EOF ''' } + if (env.SERVER_VERSION.split('-')[0] == 'hcd') { + env.HCD_FIXED_VERSION = env.SERVER_VERSION.split('-')[1] + sh label: 'Update environment for DataStax HCD', script: '''#!/bin/bash -le + cat >> ${HOME}/environment.txt << ENVIRONMENT_EOF +CCM_CASSANDRA_VERSION=${HCD_FIXED_VERSION} # maintain for backwards compatibility +CCM_VERSION=${HCD_FIXED_VERSION} +CCM_SERVER_TYPE=hcd +HCD_VERSION=${HCD_FIXED_VERSION} +CCM_BRANCH=${HCD_FIXED_VERSION} +HCD_BRANCH=${HCD_FIXED_VERSION} +ENVIRONMENT_EOF + ''' + } + sh label: 'Display Java and environment information',script: '''#!/bin/bash -le # Load CCM environment variables set -o allexport @@ -144,7 +157,7 @@ def executeTests() { -Dmaven.test.failure.ignore=true \ -Dmaven.javadoc.skip=${SKIP_JAVADOCS} \ -Dccm.version=${CCM_CASSANDRA_VERSION} \ - -Dccm.dse=${CCM_IS_DSE} \ + -Dccm.distribution=${CCM_SERVER_TYPE:cassandra} \ -Dproxy.path=${HOME}/proxy \ ${SERIAL_ITS_ARGUMENT} \ ${ISOLATED_ITS_ARGUMENT} \ @@ -269,6 +282,7 @@ pipeline { 'dse-6.7.17', // Previous DataStax Enterprise 'dse-6.8.30', // Current DataStax Enterprise 'dse-6.9.0', // Current DataStax Enterprise + 'hcd-1.0.0', // Current DataStax HCD 'ALL'], description: '''Apache Cassandra® and DataStax Enterprise server version to use for adhoc BUILD-AND-EXECUTE-TESTS builds @@ -330,6 +344,10 @@ pipeline { + + + +
          dse-6.9.0 DataStax Enterprise v6.9.x
          hcd-1.0.0DataStax HCD v1.0.x
          ''') choice( name: 'ADHOC_BUILD_AND_EXECUTE_TESTS_JABBA_VERSION', @@ -421,9 +439,9 @@ pipeline { H 2 * * 0 %CI_SCHEDULE=WEEKENDS;CI_SCHEDULE_SERVER_VERSIONS=2.1 3.0 4.0 dse-4.8.16 dse-5.0.15 dse-5.1.35 dse-6.0.18 dse-6.7.17;CI_SCHEDULE_JABBA_VERSION=1.8 # Every weeknight (Monday - Friday) around 12:00 PM noon ### JDK11 tests against 3.11, 4.1, 5.0-beta1 and DSE 6.8 - H 12 * * 1-5 %CI_SCHEDULE=WEEKNIGHTS;CI_SCHEDULE_SERVER_VERSIONS=3.11 4.1 5.0-beta1 dse-6.8.30 dse-6.9.0;CI_SCHEDULE_JABBA_VERSION=openjdk@1.11 + H 12 * * 1-5 %CI_SCHEDULE=WEEKNIGHTS;CI_SCHEDULE_SERVER_VERSIONS=3.11 4.1 5.0-beta1 dse-6.8.30 dse-6.9.0 hcd-1.0.0;CI_SCHEDULE_JABBA_VERSION=openjdk@1.11 ### JDK17 tests against 3.11, 4.1, 5.0-beta1 and DSE 6.8 - H 12 * * 1-5 %CI_SCHEDULE=WEEKNIGHTS;CI_SCHEDULE_SERVER_VERSIONS=3.11 4.1 5.0-beta1 dse-6.8.30 dse-6.9.0;CI_SCHEDULE_JABBA_VERSION=openjdk@1.17 + H 12 * * 1-5 %CI_SCHEDULE=WEEKNIGHTS;CI_SCHEDULE_SERVER_VERSIONS=3.11 4.1 5.0-beta1 dse-6.8.30 dse-6.9.0 hcd-1.0.0;CI_SCHEDULE_JABBA_VERSION=openjdk@1.17 """ : "") } @@ -460,7 +478,8 @@ pipeline { values '3.11', // Latest stable Apache CassandraⓇ '4.1', // Development Apache CassandraⓇ 'dse-6.8.30', // Current DataStax Enterprise - 'dse-6.9.0' // Current DataStax Enterprise + 'dse-6.9.0', // Current DataStax Enterprise + 'hcd-1.0.0' // Current DataStax HCD } axis { name 'JABBA_VERSION' @@ -578,7 +597,8 @@ pipeline { 'dse-6.0.18', // Previous DataStax Enterprise 'dse-6.7.17', // Previous DataStax Enterprise 'dse-6.8.30', // Current DataStax Enterprise - 'dse-6.9.0' // Current DataStax Enterprise + 'dse-6.9.0', // Current DataStax Enterprise + 'hcd-1.0.0' // Current DataStax HCD } } when { diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/Version.java b/core/src/main/java/com/datastax/oss/driver/api/core/Version.java index 4de006da268..52751e02984 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/Version.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/Version.java @@ -48,6 +48,7 @@ public class Version implements Comparable, Serializable { private static final Pattern pattern = Pattern.compile(VERSION_REGEXP); + @NonNull public static final Version V1_0_0 = Objects.requireNonNull(parse("1.0.0")); @NonNull public static final Version V2_1_0 = Objects.requireNonNull(parse("2.1.0")); @NonNull public static final Version V2_2_0 = Objects.requireNonNull(parse("2.2.0")); @NonNull public static final Version V3_0_0 = Objects.requireNonNull(parse("3.0.0")); diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousPagingIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousPagingIT.java index 24ee5c0373d..45cc84f0719 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousPagingIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/cql/continuous/ContinuousPagingIT.java @@ -46,7 +46,6 @@ import java.time.Duration; import java.util.Collections; import java.util.Iterator; -import java.util.Objects; import java.util.concurrent.CancellationException; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionStage; @@ -281,11 +280,8 @@ public void prepared_statement_paging_should_be_resilient_to_schema_change() { // dropped. Row row = it.next(); assertThat(row.getString("k")).isNotNull(); - if (ccmRule - .getDseVersion() - .orElseThrow(IllegalStateException::new) - .compareTo(Objects.requireNonNull(Version.parse("6.0.0"))) - >= 0) { + if (ccmRule.isDistributionOf( + BackendType.DSE, (dist, cass) -> dist.compareTo(Version.parse("6.0.0")) >= 0)) { // DSE 6 only, v should be null here since dropped. // Not reliable for 5.1 since we may have gotten page queued before schema changed. assertThat(row.isNull("v")).isTrue(); diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalRemoteITBase.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalRemoteITBase.java index 69949951378..3db8a7d1a12 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalRemoteITBase.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/remote/GraphTraversalRemoteITBase.java @@ -30,6 +30,7 @@ import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.api.core.servererrors.InvalidQueryException; +import com.datastax.oss.driver.api.testinfra.ccm.CcmBridge; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; import com.datastax.oss.driver.api.testinfra.requirement.BackendType; @@ -643,9 +644,9 @@ public void should_allow_use_of_dsl_graph_binary() { */ @Test public void should_return_correct_results_when_bulked() { - Optional dseVersion = ccmRule().getCcmBridge().getDseVersion(); Assumptions.assumeThat( - dseVersion.isPresent() && dseVersion.get().compareTo(Version.parse("5.1.2")) > 0) + CcmBridge.isDistributionOf( + BackendType.DSE, (dist, cass) -> dist.compareTo(Version.parse("5.1.2")) > 0)) .isTrue(); List results = graphTraversalSource().E().label().barrier().toList(); diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalITBase.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalITBase.java index 98d9ccf1b80..5bcb01bc165 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalITBase.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/graph/statement/GraphTraversalITBase.java @@ -36,7 +36,9 @@ import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.api.core.servererrors.InvalidQueryException; import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import com.datastax.oss.driver.api.testinfra.ccm.CcmBridge; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import com.datastax.oss.driver.shaded.guava.common.collect.Lists; import java.util.List; @@ -598,7 +600,8 @@ public void should_allow_use_of_dsl_graph_binary() throws Exception { @Test public void should_return_correct_results_when_bulked() { Assumptions.assumeThat( - ccmRule().getCcmBridge().getDseVersion().get().compareTo(Version.parse("5.1.2")) > 0) + CcmBridge.isDistributionOf( + BackendType.DSE, (dist, cass) -> dist.compareTo(Version.parse("5.1.2")) > 0)) .isTrue(); GraphResultSet rs = diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/DseAggregateMetadataIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/DseAggregateMetadataIT.java index b0e989e86a3..4c899fa5e63 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/DseAggregateMetadataIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/DseAggregateMetadataIT.java @@ -106,9 +106,9 @@ public void should_parse_aggregate_with_deterministic() { } private static boolean isDse6OrHigher() { - assumeThat(CCM_RULE.getDseVersion()) + assumeThat(CCM_RULE.isDistributionOf(BackendType.DSE)) .describedAs("DSE required for DseFunctionMetadata tests") - .isPresent(); - return CCM_RULE.getDseVersion().get().compareTo(DSE_6_0_0) >= 0; + .isTrue(); + return CCM_RULE.getDistributionVersion().compareTo(DSE_6_0_0) >= 0; } } diff --git a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/DseFunctionMetadataIT.java b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/DseFunctionMetadataIT.java index 53e2d1be8f8..53559a66b1b 100644 --- a/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/DseFunctionMetadataIT.java +++ b/integration-tests/src/test/java/com/datastax/dse/driver/api/core/metadata/schema/DseFunctionMetadataIT.java @@ -233,9 +233,9 @@ public void should_parse_function_with_deterministic_and_monotonic_on() { } private static boolean isDse6OrHigher() { - assumeThat(CCM_RULE.getDseVersion()) + assumeThat(CCM_RULE.isDistributionOf(BackendType.DSE)) .describedAs("DSE required for DseFunctionMetadata tests") - .isPresent(); - return CCM_RULE.getDseVersion().get().compareTo(DSE_6_0_0) >= 0; + .isTrue(); + return CCM_RULE.getDistributionVersion().compareTo(DSE_6_0_0) >= 0; } } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/compression/DirectCompressionIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/compression/DirectCompressionIT.java index 51f71f85b5c..3dad08f4de6 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/compression/DirectCompressionIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/compression/DirectCompressionIT.java @@ -29,6 +29,7 @@ import com.datastax.oss.driver.api.core.cql.Row; import com.datastax.oss.driver.api.core.cql.SimpleStatement; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.categories.ParallelizableTests; @@ -75,8 +76,9 @@ public static void setup() { public void should_execute_queries_with_snappy_compression() throws Exception { Assume.assumeTrue( "Snappy is not supported in OSS C* 4.0+ with protocol v5", - CCM_RULE.getDseVersion().isPresent() - || CCM_RULE.getCassandraVersion().nextStable().compareTo(Version.V4_0_0) < 0); + !CCM_RULE.isDistributionOf(BackendType.HCD) + && (CCM_RULE.isDistributionOf(BackendType.DSE) + || CCM_RULE.getCassandraVersion().nextStable().compareTo(Version.V4_0_0) < 0)); createAndCheckCluster("snappy"); } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/compression/HeapCompressionIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/compression/HeapCompressionIT.java index 466a9d87ac3..a14c3b29b21 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/compression/HeapCompressionIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/compression/HeapCompressionIT.java @@ -29,6 +29,7 @@ import com.datastax.oss.driver.api.core.cql.Row; import com.datastax.oss.driver.api.core.cql.SimpleStatement; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.categories.IsolatedTests; @@ -79,7 +80,7 @@ public static void setup() { public void should_execute_queries_with_snappy_compression() throws Exception { Assume.assumeTrue( "Snappy is not supported in OSS C* 4.0+ with protocol v5", - CCM_RULE.getDseVersion().isPresent() + CCM_RULE.isDistributionOf(BackendType.DSE) || CCM_RULE.getCassandraVersion().nextStable().compareTo(Version.V4_0_0) < 0); createAndCheckCluster("snappy"); } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/QueryTraceIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/QueryTraceIT.java index f4ac85d6629..37a600efbc4 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/QueryTraceIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/QueryTraceIT.java @@ -28,6 +28,7 @@ import com.datastax.oss.driver.api.core.cql.SimpleStatement; import com.datastax.oss.driver.api.core.metadata.EndPoint; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.categories.ParallelizableTests; import java.net.InetAddress; @@ -82,7 +83,7 @@ public void should_fetch_trace_when_tracing_enabled() { InetAddress nodeAddress = ((InetSocketAddress) contactPoint.resolve()).getAddress(); boolean expectPorts = CCM_RULE.getCassandraVersion().nextStable().compareTo(Version.V4_0_0) >= 0 - && !CCM_RULE.getDseVersion().isPresent(); + && !CCM_RULE.isDistributionOf(BackendType.DSE); QueryTrace queryTrace = executionInfo.getQueryTrace(); assertThat(queryTrace.getTracingId()).isEqualTo(executionInfo.getTracingId()); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/DescribeIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/DescribeIT.java index 9fbf5e355eb..4d6c2a7a3b1 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/DescribeIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/DescribeIT.java @@ -29,6 +29,7 @@ import com.datastax.oss.driver.api.core.metadata.schema.TableMetadata; import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; import com.datastax.oss.driver.api.testinfra.ccm.SchemaChangeSynchronizer; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.datastax.oss.driver.categories.ParallelizableTests; @@ -37,12 +38,14 @@ import com.datastax.oss.driver.internal.core.metadata.schema.DefaultTableMetadata; import com.datastax.oss.driver.shaded.guava.common.base.Charsets; import com.datastax.oss.driver.shaded.guava.common.base.Splitter; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import com.google.common.io.Files; import java.io.File; import java.io.IOException; import java.net.URL; import java.time.Duration; import java.util.List; +import java.util.Map; import java.util.Optional; import java.util.regex.Pattern; import org.junit.BeforeClass; @@ -79,17 +82,23 @@ public class DescribeIT { Splitter.on(Pattern.compile(";\n")).omitEmptyStrings(); private static Version serverVersion; - private static boolean isDse; + + private static final Map scriptFileForBackend = + ImmutableMap.builder() + .put(BackendType.CASSANDRA, "DescribeIT/oss") + .put(BackendType.DSE, "DescribeIT/dse") + .put(BackendType.HCD, "DescribeIT/hcd") + .build(); private static File scriptFile; private static String scriptContents; @BeforeClass public static void setup() { - Optional dseVersion = CCM_RULE.getDseVersion(); - isDse = dseVersion.isPresent(); serverVersion = - isDse ? dseVersion.get().nextStable() : CCM_RULE.getCassandraVersion().nextStable(); + CCM_RULE.isDistributionOf(BackendType.CASSANDRA) + ? CCM_RULE.getCassandraVersion().nextStable() + : CCM_RULE.getDistributionVersion().nextStable(); scriptFile = getScriptFile(); assertThat(scriptFile).exists(); @@ -114,12 +123,12 @@ public void describe_output_should_match_creation_script() throws Exception { "Describe output doesn't match create statements, " + "maybe you need to add a new script in integration-tests/src/test/resources. " + "Server version = %s %s, used script = %s", - isDse ? "DSE" : "Cassandra", serverVersion, scriptFile) + CCM_RULE.getDistribution(), serverVersion, scriptFile) .isEqualTo(scriptContents); } private boolean atLeastVersion(Version dseVersion, Version ossVersion) { - Version comparison = isDse ? dseVersion : ossVersion; + Version comparison = CCM_RULE.isDistributionOf(BackendType.DSE) ? dseVersion : ossVersion; return serverVersion.compareTo(comparison) >= 0; } @@ -138,11 +147,9 @@ public void keyspace_metadata_should_be_serializable() throws Exception { assertThat(ks.getUserDefinedTypes()).isNotEmpty(); assertThat(ks.getTables()).isNotEmpty(); if (atLeastVersion(Version.V5_0_0, Version.V3_0_0)) { - assertThat(ks.getViews()).isNotEmpty(); } if (atLeastVersion(Version.V5_0_0, Version.V2_2_0)) { - assertThat(ks.getFunctions()).isNotEmpty(); assertThat(ks.getAggregates()).isNotEmpty(); } @@ -177,7 +184,7 @@ private static File getScriptFile() { logbackTestUrl); } File resourcesDir = new File(logbackTestUrl.getFile()).getParentFile(); - File scriptsDir = new File(resourcesDir, isDse ? "DescribeIT/dse" : "DescribeIT/oss"); + File scriptsDir = new File(resourcesDir, scriptFileForBackend.get(CCM_RULE.getDistribution())); LOG.debug("Looking for a matching script in directory {}", scriptsDir); File[] candidates = scriptsDir.listFiles(); @@ -204,8 +211,7 @@ private static File getScriptFile() { .as("Could not find create script with version <= %s in %s", serverVersion, scriptsDir) .isNotNull(); - LOG.info( - "Using {} to test against {} {}", bestFile, isDse ? "DSE" : "Cassandra", serverVersion); + LOG.info("Using {} to test against {} {}", bestFile, CCM_RULE.getDistribution(), serverVersion); return bestFile; } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/NodeMetadataIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/NodeMetadataIT.java index c7b51c040b5..8f5680ff41a 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/NodeMetadataIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/NodeMetadataIT.java @@ -62,8 +62,9 @@ public void should_expose_node_metadata() { assertThat(node.getListenAddress().get().getAddress()).isEqualTo(connectAddress.getAddress()); assertThat(node.getDatacenter()).isEqualTo("dc1"); assertThat(node.getRack()).isEqualTo("r1"); - if (!CcmBridge.DSE_ENABLEMENT) { - // CcmBridge does not report accurate C* versions for DSE, only approximated values + if (CcmBridge.isDistributionOf(BackendType.CASSANDRA)) { + // CcmBridge does not report accurate C* versions for other distributions (e.g. DSE), only + // approximated values assertThat(node.getCassandraVersion()).isEqualTo(ccmRule.getCassandraVersion()); } assertThat(node.getState()).isSameAs(NodeState.UP); @@ -106,7 +107,7 @@ public void should_expose_dse_node_properties() { DseNodeProperties.DSE_WORKLOADS, DseNodeProperties.SERVER_ID); assertThat(node.getExtras().get(DseNodeProperties.DSE_VERSION)) - .isEqualTo(ccmRule.getDseVersion().get()); + .isEqualTo(ccmRule.getDistributionVersion()); assertThat(node.getExtras().get(DseNodeProperties.SERVER_ID)).isInstanceOf(String.class); assertThat(node.getExtras().get(DseNodeProperties.DSE_WORKLOADS)).isInstanceOf(Set.class); } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaChangesIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaChangesIT.java index 6f1dcb791c6..85fcfc02cdb 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaChangesIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaChangesIT.java @@ -33,6 +33,7 @@ import com.datastax.oss.driver.api.core.type.DataTypes; import com.datastax.oss.driver.api.testinfra.ccm.CcmBridge; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; import com.google.common.collect.ImmutableList; @@ -54,8 +55,8 @@ public class SchemaChangesIT { static { CustomCcmRule.Builder builder = CustomCcmRule.builder(); - if (!CcmBridge.DSE_ENABLEMENT - && CcmBridge.VERSION.nextStable().compareTo(Version.V4_0_0) >= 0) { + if (!CcmBridge.isDistributionOf( + BackendType.DSE, (dist, cass) -> cass.nextStable().compareTo(Version.V4_0_0) >= 0)) { builder.withCassandraConfiguration("enable_materialized_views", true); } CCM_RULE = builder.build(); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaIT.java index 805b2d970cc..df5571974c1 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/metadata/SchemaIT.java @@ -335,11 +335,9 @@ public void should_exclude_virtual_keyspaces_from_token_map() { private void skipIfDse60() { // Special case: DSE 6.0 reports C* 4.0 but does not support virtual tables - if (ccmRule.getDseVersion().isPresent()) { - Version dseVersion = ccmRule.getDseVersion().get(); - if (dseVersion.compareTo(DSE_MIN_VIRTUAL_TABLES) < 0) { - throw new AssumptionViolatedException("DSE 6.0 does not support virtual tables"); - } + if (!ccmRule.isDistributionOf( + BackendType.DSE, (dist, cass) -> dist.compareTo(DSE_MIN_VIRTUAL_TABLES) >= 0)) { + throw new AssumptionViolatedException("DSE 6.0 does not support virtual tables"); } } } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/DeleteReactiveIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/DeleteReactiveIT.java index 3a418c73653..2eb898021ba 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/DeleteReactiveIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/DeleteReactiveIT.java @@ -37,6 +37,7 @@ import com.datastax.oss.driver.api.mapper.entity.saving.NullSavingStrategy; import com.datastax.oss.driver.api.testinfra.ccm.CcmBridge; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.session.SessionRule; import io.reactivex.Flowable; import java.util.UUID; @@ -57,8 +58,8 @@ public class DeleteReactiveIT extends InventoryITBase { @ClassRule public static TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); private static CustomCcmRule.Builder configureCcm(CustomCcmRule.Builder builder) { - if (!CcmBridge.DSE_ENABLEMENT - && CcmBridge.VERSION.nextStable().compareTo(Version.V4_0_0) >= 0) { + if (!CcmBridge.isDistributionOf( + BackendType.DSE, (dist, cass) -> cass.nextStable().compareTo(Version.V4_0_0) >= 0)) { builder.withCassandraConfiguration("enable_sasi_indexes", true); } return builder; diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/InventoryITBase.java b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/InventoryITBase.java index 9495003ae49..1bd899e4541 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/mapper/InventoryITBase.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/mapper/InventoryITBase.java @@ -23,10 +23,10 @@ import com.datastax.oss.driver.api.mapper.annotations.Entity; import com.datastax.oss.driver.api.mapper.annotations.PartitionKey; import com.datastax.oss.driver.api.testinfra.ccm.BaseCcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import java.util.List; import java.util.Objects; -import java.util.Optional; import java.util.UUID; /** Factors common code for mapper tests that rely on a simple inventory model. */ @@ -93,13 +93,14 @@ protected static List createStatements(BaseCcmRule ccmRule, boolean requ return builder.build(); } - private static final Version MINIMUM_SASI_VERSION = Version.parse("3.4.0"); - private static final Version BROKEN_SASI_VERSION = Version.parse("6.8.0"); + private static final Version MINIMUM_SASI_VERSION = + Objects.requireNonNull(Version.parse("3.4.0")); + private static final Version BROKEN_SASI_VERSION = Objects.requireNonNull(Version.parse("6.8.0")); protected static boolean isSasiBroken(BaseCcmRule ccmRule) { - Optional dseVersion = ccmRule.getDseVersion(); // creating SASI indexes is broken in DSE 6.8.0 - return dseVersion.isPresent() && dseVersion.get().compareTo(BROKEN_SASI_VERSION) == 0; + return ccmRule.isDistributionOf( + BackendType.DSE, (dist, cass) -> dist.compareTo(BROKEN_SASI_VERSION) == 0); } protected static boolean supportsSASI(BaseCcmRule ccmRule) { diff --git a/integration-tests/src/test/resources/DescribeIT/hcd/1.0.cql b/integration-tests/src/test/resources/DescribeIT/hcd/1.0.cql new file mode 100644 index 00000000000..abc70728206 --- /dev/null +++ b/integration-tests/src/test/resources/DescribeIT/hcd/1.0.cql @@ -0,0 +1,186 @@ + +CREATE KEYSPACE ks_0 WITH replication = { 'class' : 'org.apache.cassandra.locator.SimpleStrategy', 'replication_factor': '1' } AND durable_writes = true; + +CREATE TYPE ks_0.btype ( + a text +); + +CREATE TYPE ks_0.xtype ( + d text +); + +CREATE TYPE ks_0.ztype ( + c text, + a int +); + +CREATE TYPE ks_0.ctype ( + z frozen, + x frozen +); + +CREATE TYPE ks_0.atype ( + c frozen +); + +CREATE TABLE ks_0.cyclist_mv ( + cid uuid, + age int, + birthday date, + country text, + name text, + PRIMARY KEY (cid) +) WITH additional_write_policy = '99p' + AND bloom_filter_fp_chance = 0.01 + AND caching = {'keys':'ALL','rows_per_partition':'NONE'} + AND comment = '' + AND compaction = {'class':'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy','max_threshold':'32','min_threshold':'4'} + AND compression = {'chunk_length_in_kb':'64','class':'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND default_time_to_live = 0 + AND extensions = {} + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair = 'BLOCKING' + AND speculative_retry = '99p'; + +CREATE INDEX cyclist_by_country ON ks_0.cyclist_mv (country); + +CREATE TABLE ks_0.rank_by_year_and_name ( + race_year int, + race_name text, + rank int, + cyclist_name text, + PRIMARY KEY ((race_year, race_name), rank) +) WITH CLUSTERING ORDER BY (rank DESC) + AND additional_write_policy = '99p' + AND bloom_filter_fp_chance = 0.01 + AND caching = {'keys':'ALL','rows_per_partition':'NONE'} + AND comment = '' + AND compaction = {'class':'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy','max_threshold':'32','min_threshold':'4'} + AND compression = {'chunk_length_in_kb':'64','class':'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND default_time_to_live = 0 + AND extensions = {} + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair = 'BLOCKING' + AND speculative_retry = '99p'; + +CREATE INDEX rrank ON ks_0.rank_by_year_and_name (rank); + +CREATE INDEX ryear ON ks_0.rank_by_year_and_name (race_year); + +CREATE TABLE ks_0.ztable ( + zkey text, + a frozen, + PRIMARY KEY (zkey) +) WITH additional_write_policy = '99p' + AND bloom_filter_fp_chance = 0.1 + AND caching = {'keys':'ALL','rows_per_partition':'NONE'} + AND comment = '' + AND compaction = {'class':'org.apache.cassandra.db.compaction.LeveledCompactionStrategy','max_threshold':'32','min_threshold':'4','sstable_size_in_mb':'95'} + AND compression = {'chunk_length_in_kb':'64','class':'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND default_time_to_live = 0 + AND extensions = {} + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair = 'BLOCKING' + AND speculative_retry = '99p'; + +CREATE MATERIALIZED VIEW ks_0.cyclist_by_a_age AS +SELECT * FROM ks_0.cyclist_mv +WHERE age IS NOT NULL AND cid IS NOT NULL +PRIMARY KEY (age, cid) WITH additional_write_policy = '99p' + AND bloom_filter_fp_chance = 0.01 + AND caching = {'keys':'ALL','rows_per_partition':'NONE'} + AND comment = '' + AND compaction = {'class':'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy','max_threshold':'32','min_threshold':'4'} + AND compression = {'chunk_length_in_kb':'64','class':'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND extensions = {} + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair = 'BLOCKING' + AND speculative_retry = '99p'; + +CREATE MATERIALIZED VIEW ks_0.cyclist_by_age AS +SELECT + age, + cid, + birthday, + country, + name +FROM ks_0.cyclist_mv +WHERE age IS NOT NULL AND cid IS NOT NULL +PRIMARY KEY (age, cid) WITH additional_write_policy = '99p' + AND bloom_filter_fp_chance = 0.01 + AND caching = {'keys':'ALL','rows_per_partition':'NONE'} + AND comment = 'simple view' + AND compaction = {'class':'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy','max_threshold':'32','min_threshold':'4'} + AND compression = {'chunk_length_in_kb':'64','class':'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND extensions = {} + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair = 'BLOCKING' + AND speculative_retry = '99p'; + +CREATE MATERIALIZED VIEW ks_0.cyclist_by_r_age AS +SELECT + age, + cid, + birthday, + country, + name +FROM ks_0.cyclist_mv +WHERE age IS NOT NULL AND cid IS NOT NULL +PRIMARY KEY (age, cid) WITH additional_write_policy = '99p' + AND bloom_filter_fp_chance = 0.01 + AND caching = {'keys':'ALL','rows_per_partition':'NONE'} + AND comment = '' + AND compaction = {'class':'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy','max_threshold':'32','min_threshold':'4'} + AND compression = {'chunk_length_in_kb':'64','class':'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND extensions = {} + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair = 'BLOCKING' + AND speculative_retry = '99p'; + +CREATE FUNCTION ks_0.avgfinal(state tuple) + CALLED ON NULL INPUT + RETURNS double + LANGUAGE java + AS 'double r = 0; if (state.getInt(0) == 0) return null; r = state.getLong(1); r /= state.getInt(0); return Double.valueOf(r);'; + +CREATE FUNCTION ks_0.avgstate(state tuple,val int) + CALLED ON NULL INPUT + RETURNS tuple + LANGUAGE java + AS 'if (val !=null) { state.setInt(0, state.getInt(0)+1); state.setLong(1, state.getLong(1)+val.intValue()); } return state;'; + +CREATE AGGREGATE ks_0.average(int) + SFUNC avgstate + STYPE tuple + FINALFUNC avgfinal + INITCOND (0,0); + +CREATE AGGREGATE ks_0.mean(int) + SFUNC avgstate + STYPE tuple + FINALFUNC avgfinal + INITCOND (0,0); diff --git a/osgi-tests/README.md b/osgi-tests/README.md index 89ad0ba27c8..1ca6211d427 100644 --- a/osgi-tests/README.md +++ b/osgi-tests/README.md @@ -53,8 +53,8 @@ OSGi ones, you can do so as follows: You can pass the following system properties to your tests: 1. `ccm.version`: the CCM version to use -2. `ccm.dse`: whether to use DSE -3. `osgi.debug`: whether to enable remote debugging of the OSGi container (see +2. `ccm.distribution`: choose target backend type (e.g. DSE, HCD) +3. `osgi.debug`: whether to enable remote debugging of the OSGi container (see below). ## Debugging OSGi tests diff --git a/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/CcmStagedReactor.java b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/CcmStagedReactor.java index 8b140930870..ce4d9095361 100644 --- a/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/CcmStagedReactor.java +++ b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/CcmStagedReactor.java @@ -19,6 +19,7 @@ import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.api.testinfra.ccm.CcmBridge; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import java.util.List; import java.util.Objects; import net.jcip.annotations.GuardedBy; @@ -38,7 +39,7 @@ public class CcmStagedReactor extends AllConfinedStagedReactor { static { CcmBridge.Builder builder = CcmBridge.builder().withNodes(1); - if (CcmBridge.DSE_ENABLEMENT && CcmBridge.VERSION.compareTo(DSE_5_0) >= 0) { + if (CcmBridge.isDistributionOf(BackendType.DSE, (dist, cass) -> dist.compareTo(DSE_5_0) >= 0)) { builder.withDseWorkloads("graph"); } CCM_BRIDGE = builder.build(); @@ -54,11 +55,10 @@ public CcmStagedReactor(List containers, List m @Override public synchronized void beforeSuite() { if (!running) { - boolean dse = CCM_BRIDGE.getDseVersion().isPresent(); LOGGER.info( "Starting CCM, running {} version {}", - dse ? "DSE" : "Cassandra", - dse ? CCM_BRIDGE.getDseVersion().get() : CCM_BRIDGE.getCassandraVersion()); + CcmBridge.DISTRIBUTION, + CcmBridge.getDistributionVersion()); CCM_BRIDGE.create(); CCM_BRIDGE.start(); LOGGER.info("CCM started"); diff --git a/test-infra/revapi.json b/test-infra/revapi.json index 3cfbc8b5337..c75a98cb4af 100644 --- a/test-infra/revapi.json +++ b/test-infra/revapi.json @@ -171,6 +171,27 @@ "code": "java.method.removed", "old": "method void com.datastax.oss.driver.api.testinfra.ccm.CcmRule::reloadCore(int, java.lang.String, java.lang.String, boolean)", "justification": "Modifying the state of a globally shared CCM instance is dangerous" + }, + { + "code": "java.method.removed", + "old": "method java.util.Optional com.datastax.oss.driver.api.testinfra.ccm.BaseCcmRule::getDseVersion()", + "justification": "Method has been replaced with more generic isDistributionOf(BackendType) and getDistributionVersion()" + }, + { + "code": "java.field.removed", + "old": "field com.datastax.oss.driver.api.testinfra.ccm.CcmBridge.DSE_ENABLEMENT", + "justification": "Method has been replaced with more generic isDistributionOf(BackendType) and getDistributionVersion()" + }, + { + "code": "java.method.nowStatic", + "old": "method com.datastax.oss.driver.api.core.Version com.datastax.oss.driver.api.testinfra.ccm.CcmBridge::getCassandraVersion()", + "new": "method com.datastax.oss.driver.api.core.Version com.datastax.oss.driver.api.testinfra.ccm.CcmBridge::getCassandraVersion()", + "justification": "Previous and current implemntation do not relay on non-static fields" + }, + { + "code": "java.method.removed", + "old": "method java.util.Optional com.datastax.oss.driver.api.testinfra.ccm.CcmBridge::getDseVersion()", + "justification": "Method has been replaced with more generic isDistributionOf(BackendType) and getDistributionVersion()" } ] } diff --git a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/BaseCcmRule.java b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/BaseCcmRule.java index 65210acd2a2..882cd55b948 100644 --- a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/BaseCcmRule.java +++ b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/BaseCcmRule.java @@ -22,7 +22,7 @@ import com.datastax.oss.driver.api.core.Version; import com.datastax.oss.driver.api.testinfra.CassandraResourceRule; import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirementRule; -import java.util.Optional; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import org.junit.AssumptionViolatedException; import org.junit.runner.Description; import org.junit.runners.model.Statement; @@ -72,17 +72,29 @@ public void evaluate() { } } - public Version getCassandraVersion() { - return ccmBridge.getCassandraVersion(); + public BackendType getDistribution() { + return CcmBridge.DISTRIBUTION; + } + + public boolean isDistributionOf(BackendType type) { + return CcmBridge.isDistributionOf(type); + } + + public boolean isDistributionOf(BackendType type, CcmBridge.VersionComparator comparator) { + return CcmBridge.isDistributionOf(type, comparator); + } + + public Version getDistributionVersion() { + return CcmBridge.getDistributionVersion(); } - public Optional getDseVersion() { - return ccmBridge.getDseVersion(); + public Version getCassandraVersion() { + return CcmBridge.getCassandraVersion(); } @Override public ProtocolVersion getHighestProtocolVersion() { - if (ccmBridge.getCassandraVersion().compareTo(Version.V2_2_0) >= 0) { + if (CcmBridge.getCassandraVersion().compareTo(Version.V2_2_0) >= 0) { return DefaultProtocolVersion.V4; } else { return DefaultProtocolVersion.V3; diff --git a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CcmBridge.java b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CcmBridge.java index 5b0c114a5fe..f0ce6bc5b0e 100644 --- a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CcmBridge.java +++ b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/CcmBridge.java @@ -18,6 +18,7 @@ package com.datastax.oss.driver.api.testinfra.ccm; import com.datastax.oss.driver.api.core.Version; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.shaded.guava.common.base.Joiner; import com.datastax.oss.driver.shaded.guava.common.io.Resources; import java.io.File; @@ -54,6 +55,9 @@ public class CcmBridge implements AutoCloseable { private static final Logger LOG = LoggerFactory.getLogger(CcmBridge.class); + public static BackendType DISTRIBUTION = + BackendType.valueOf( + System.getProperty("ccm.distribution", BackendType.CASSANDRA.name()).toUpperCase()); public static final Version VERSION = Objects.requireNonNull(Version.parse(System.getProperty("ccm.version", "4.0.0"))); @@ -61,8 +65,6 @@ public class CcmBridge implements AutoCloseable { public static final String BRANCH = System.getProperty("ccm.branch"); - public static final Boolean DSE_ENABLEMENT = Boolean.getBoolean("ccm.dse"); - public static final String CLUSTER_NAME = "ccm_1"; public static final String DEFAULT_CLIENT_TRUSTSTORE_PASSWORD = "fakePasswordForTests"; @@ -101,22 +103,21 @@ public class CcmBridge implements AutoCloseable { createTempStore(DEFAULT_SERVER_LOCALHOST_KEYSTORE_PATH); // major DSE versions - private static final Version V6_0_0 = Version.parse("6.0.0"); - private static final Version V5_1_0 = Version.parse("5.1.0"); - private static final Version V5_0_0 = Version.parse("5.0.0"); + public static final Version V6_0_0 = Version.parse("6.0.0"); + public static final Version V5_1_0 = Version.parse("5.1.0"); + public static final Version V5_0_0 = Version.parse("5.0.0"); // mapped C* versions from DSE versions - private static final Version V4_0_0 = Version.parse("4.0.0"); - private static final Version V3_10 = Version.parse("3.10"); - private static final Version V3_0_15 = Version.parse("3.0.15"); - private static final Version V2_1_19 = Version.parse("2.1.19"); + public static final Version V4_0_0 = Version.parse("4.0.0"); + public static final Version V3_10 = Version.parse("3.10"); + public static final Version V3_0_15 = Version.parse("3.0.15"); + public static final Version V2_1_19 = Version.parse("2.1.19"); + + // mapped C* versions from HCD versions + public static final Version V4_0_11 = Version.parse("4.0.11"); static { - if (DSE_ENABLEMENT) { - LOG.info("CCM Bridge configured with DSE version {}", VERSION); - } else { - LOG.info("CCM Bridge configured with Apache Cassandra version {}", VERSION); - } + LOG.info("CCM Bridge configured with {} version {}", DISTRIBUTION.getFriendlyName(), VERSION); } private final int[] nodes; @@ -175,25 +176,24 @@ private static boolean isWindows() { return System.getProperty("os.name", "").toLowerCase(Locale.US).contains("win"); } - public Optional getDseVersion() { - return DSE_ENABLEMENT ? Optional.of(VERSION) : Optional.empty(); + public static boolean isDistributionOf(BackendType type) { + return DISTRIBUTION == type; + } + + public static boolean isDistributionOf(BackendType type, VersionComparator comparator) { + return isDistributionOf(type) + && comparator.accept(getDistributionVersion(), getCassandraVersion()); + } + + public static Version getDistributionVersion() { + return VERSION; } - public Version getCassandraVersion() { - if (!DSE_ENABLEMENT) { + public static Version getCassandraVersion() { + if (isDistributionOf(BackendType.CASSANDRA)) { return VERSION; - } else { - Version stableVersion = VERSION.nextStable(); - if (stableVersion.compareTo(V6_0_0) >= 0) { - return V4_0_0; - } else if (stableVersion.compareTo(V5_1_0) >= 0) { - return V3_10; - } else if (stableVersion.compareTo(V5_0_0) >= 0) { - return V3_0_15; - } else { - return V2_1_19; - } } + return DistributionCassandraVersions.getCassandraVersion(DISTRIBUTION, VERSION); } private String getCcmVersionString(Version version) { @@ -225,9 +225,7 @@ public void create() { } else { createOptions.add("-v " + getCcmVersionString(VERSION)); } - if (DSE_ENABLEMENT) { - createOptions.add("--dse"); - } + createOptions.addAll(Arrays.asList(DISTRIBUTION.getCcmOptions())); execute( "create", CLUSTER_NAME, @@ -252,7 +250,7 @@ public void create() { // If we're dealing with anything more recent than 2.2 explicitly enable UDF... but run it // through our conversion process to make // sure more recent versions don't have a problem. - if (cassandraVersion.compareTo(Version.V2_2_0) >= 0) { + if (cassandraVersion.compareTo(Version.V2_2_0) >= 0 || isDistributionOf(BackendType.HCD)) { String originalKey = "enable_user_defined_functions"; Object originalValue = "true"; execute( @@ -264,7 +262,7 @@ public void create() { } // Note that we aren't performing any substitution on DSE key/value props (at least for now) - if (DSE_ENABLEMENT) { + if (isDistributionOf(BackendType.DSE)) { for (Map.Entry conf : dseConfiguration.entrySet()) { execute("updatedseconf", String.format("%s:%s", conf.getKey(), conf.getValue())); } @@ -338,11 +336,10 @@ public void stop(int n) { } public void add(int n, String dc) { - if (getDseVersion().isPresent()) { - execute("add", "-i", ipPrefix + n, "-d", dc, "node" + n, "--dse"); - } else { - execute("add", "-i", ipPrefix + n, "-d", dc, "node" + n); - } + List addOptions = new ArrayList<>(); + addOptions.addAll(Arrays.asList("add", "-i", ipPrefix + n, "-d", dc, "node" + n)); + addOptions.addAll(Arrays.asList(DISTRIBUTION.getCcmOptions())); + execute(addOptions.toArray(new String[0])); start(n); } @@ -475,11 +472,11 @@ private Optional overrideJvmVersionForDseWorkloads() { return Optional.empty(); } - if (!DSE_ENABLEMENT || !getDseVersion().isPresent()) { + if (!isDistributionOf(BackendType.DSE)) { return Optional.empty(); } - if (getDseVersion().get().compareTo(Version.V6_9_0) >= 0) { + if (getDistributionVersion().compareTo(Version.V6_9_0) >= 0) { // DSE 6.9.0 supports only JVM 11 onwards (also with graph workload) return Optional.empty(); } @@ -641,4 +638,8 @@ public CcmBridge build() { dseWorkloads); } } + + public interface VersionComparator { + boolean accept(Version distribution, Version cassandra); + } } diff --git a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/DefaultCcmBridgeBuilderCustomizer.java b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/DefaultCcmBridgeBuilderCustomizer.java index ac2507cec53..0819f785446 100644 --- a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/DefaultCcmBridgeBuilderCustomizer.java +++ b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/DefaultCcmBridgeBuilderCustomizer.java @@ -18,18 +18,20 @@ package com.datastax.oss.driver.api.testinfra.ccm; import com.datastax.oss.driver.api.core.Version; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; /** @see CcmRule */ @SuppressWarnings("unused") public class DefaultCcmBridgeBuilderCustomizer { public static CcmBridge.Builder configureBuilder(CcmBridge.Builder builder) { - if (!CcmBridge.DSE_ENABLEMENT - && CcmBridge.VERSION.nextStable().compareTo(Version.V4_0_0) >= 0) { + if (!CcmBridge.isDistributionOf( + BackendType.DSE, (dist, cass) -> dist.nextStable().compareTo(Version.V4_0_0) >= 0) + || CcmBridge.isDistributionOf(BackendType.HCD)) { builder.withCassandraConfiguration("enable_materialized_views", true); builder.withCassandraConfiguration("enable_sasi_indexes", true); } - if (CcmBridge.VERSION.nextStable().compareTo(Version.V3_0_0) >= 0) { + if (CcmBridge.getDistributionVersion().nextStable().compareTo(Version.V3_0_0) >= 0) { builder.withJvmArgs("-Dcassandra.superuser_setup_delay_ms=0"); builder.withJvmArgs("-Dcassandra.skip_wait_for_gossip_to_settle=0"); builder.withCassandraConfiguration("num_tokens", "1"); diff --git a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/DistributionCassandraVersions.java b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/DistributionCassandraVersions.java new file mode 100644 index 00000000000..9f7634d1b37 --- /dev/null +++ b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/ccm/DistributionCassandraVersions.java @@ -0,0 +1,57 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.testinfra.ccm; + +import com.datastax.oss.driver.api.core.Version; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSortedMap; +import java.util.HashMap; +import java.util.Map; + +/** Defines mapping of various distributions to shipped Apache Cassandra version. */ +public abstract class DistributionCassandraVersions { + private static final Map> mappings = + new HashMap<>(); + + static { + { + // DSE + ImmutableSortedMap dse = + ImmutableSortedMap.of( + Version.V1_0_0, CcmBridge.V2_1_19, + Version.V5_0_0, CcmBridge.V3_0_15, + CcmBridge.V5_1_0, CcmBridge.V3_10, + CcmBridge.V6_0_0, CcmBridge.V4_0_0); + mappings.put(BackendType.DSE, dse); + } + { + // HCD + ImmutableSortedMap hcd = + ImmutableSortedMap.of(Version.V1_0_0, CcmBridge.V4_0_11); + mappings.put(BackendType.HCD, hcd); + } + } + + public static Version getCassandraVersion(BackendType type, Version version) { + ImmutableSortedMap mapping = mappings.get(type); + if (mapping == null) { + return null; + } + return mapping.floorEntry(version).getValue(); + } +} diff --git a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/requirement/BackendRequirementRule.java b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/requirement/BackendRequirementRule.java index 6c59e216602..343861571e0 100644 --- a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/requirement/BackendRequirementRule.java +++ b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/requirement/BackendRequirementRule.java @@ -41,7 +41,7 @@ public void evaluate() { } protected static BackendType getBackendType() { - return CcmBridge.DSE_ENABLEMENT ? BackendType.DSE : BackendType.CASSANDRA; + return CcmBridge.DISTRIBUTION; } protected static Version getVersion() { diff --git a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/requirement/BackendType.java b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/requirement/BackendType.java index 1683dd86136..e0058ca324a 100644 --- a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/requirement/BackendType.java +++ b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/requirement/BackendType.java @@ -18,9 +18,9 @@ package com.datastax.oss.driver.api.testinfra.requirement; public enum BackendType { - CASSANDRA("C*"), - DSE("Dse"), - ; + CASSANDRA("Apache Cassandra"), + DSE("DSE"), + HCD("HCD"); final String friendlyName; @@ -31,4 +31,11 @@ public enum BackendType { public String getFriendlyName() { return friendlyName; } + + public String[] getCcmOptions() { + if (this == CASSANDRA) { + return new String[0]; + } + return new String[] {"--" + name().toLowerCase()}; + } } diff --git a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/session/SessionRule.java b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/session/SessionRule.java index 5396e5c6cc6..3b792374769 100644 --- a/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/session/SessionRule.java +++ b/test-infra/src/main/java/com/datastax/oss/driver/api/testinfra/session/SessionRule.java @@ -29,10 +29,11 @@ import com.datastax.oss.driver.api.core.session.Session; import com.datastax.oss.driver.api.testinfra.CassandraResourceRule; import com.datastax.oss.driver.api.testinfra.ccm.BaseCcmRule; +import com.datastax.oss.driver.api.testinfra.ccm.CcmBridge; import com.datastax.oss.driver.api.testinfra.ccm.SchemaChangeSynchronizer; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; import com.datastax.oss.driver.api.testinfra.simulacron.SimulacronRule; import java.util.Objects; -import java.util.Optional; import org.junit.rules.ExternalResource; /** @@ -154,14 +155,12 @@ protected void before() { Statement.SYNC); } if (graphName != null) { - Optional dseVersion = - (cassandraResource instanceof BaseCcmRule) - ? ((BaseCcmRule) cassandraResource).getDseVersion() - : Optional.empty(); - if (!dseVersion.isPresent()) { + BaseCcmRule rule = + (cassandraResource instanceof BaseCcmRule) ? ((BaseCcmRule) cassandraResource) : null; + if (rule == null || !CcmBridge.isDistributionOf(BackendType.DSE)) { throw new IllegalArgumentException("DseSessionRule should work with DSE."); } - if (dseVersion.get().compareTo(V6_8_0) >= 0) { + if (rule.getDistributionVersion().compareTo(V6_8_0) >= 0) { session() .execute( ScriptGraphStatement.newInstance( From e84378681aecdbb87696dc4b53cb6fd336c82b6b Mon Sep 17 00:00:00 2001 From: Stefan Miklosovic Date: Wed, 23 Oct 2024 22:23:39 +0200 Subject: [PATCH 926/979] Fix TableMetadata.describe() when containing a vector column patch by Stefan Miklosovic; reviewed by Bret McGuire for CASSJAVA-2 --- .../internal/core/type/DefaultVectorType.java | 2 +- .../metadata/schema/TableMetadataTest.java | 67 +++++++++++++++++++ 2 files changed, 68 insertions(+), 1 deletion(-) create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/TableMetadataTest.java diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/DefaultVectorType.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/DefaultVectorType.java index 5915adc2fb3..c9180d44edc 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/type/DefaultVectorType.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/DefaultVectorType.java @@ -60,7 +60,7 @@ public String getClassName() { @NonNull @Override public String asCql(boolean includeFrozen, boolean pretty) { - return String.format("'%s(%d)'", getClassName(), getDimensions()); + return String.format("vector<%s, %d>", getElementType().asCql(true, false), getDimensions()); } /* ============== General class implementation ============== */ diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/TableMetadataTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/TableMetadataTest.java new file mode 100644 index 00000000000..03d63230992 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/schema/TableMetadataTest.java @@ -0,0 +1,67 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.metadata.schema; + +import static com.datastax.oss.driver.api.core.CqlIdentifier.fromCql; +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.api.core.metadata.schema.TableMetadata; +import com.datastax.oss.driver.internal.core.type.DefaultVectorType; +import com.datastax.oss.driver.internal.core.type.PrimitiveType; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import com.datastax.oss.protocol.internal.ProtocolConstants.DataType; +import com.google.common.collect.ImmutableList; +import java.util.UUID; +import org.junit.Test; + +public class TableMetadataTest { + + /** Tests CASSJAVA-2 */ + @Test + public void should_describe_table_with_vector_correctly() { + TableMetadata tableMetadata = + new DefaultTableMetadata( + fromCql("ks"), + fromCql("tb"), + UUID.randomUUID(), + false, + false, + ImmutableList.of( + new DefaultColumnMetadata( + fromCql("ks"), + fromCql("ks"), + fromCql("tb"), + new PrimitiveType(DataType.ASCII), + false)), + ImmutableMap.of(), + ImmutableMap.of( + fromCql("a"), + new DefaultColumnMetadata( + fromCql("ks"), + fromCql("ks"), + fromCql("tb"), + new DefaultVectorType(new PrimitiveType(DataType.INT), 3), + false)), + ImmutableMap.of(), + ImmutableMap.of()); + + String describe1 = tableMetadata.describe(true); + + assertThat(describe1).contains("vector,"); + } +} From 62cea5a5f34d5cc6ef335f99829d0ae3cf9cf396 Mon Sep 17 00:00:00 2001 From: absurdfarce Date: Wed, 6 Nov 2024 17:47:30 -0600 Subject: [PATCH 927/979] Move Apache Cassandra 5.x off of beta1 and remove some older Apache Cassandra versions. patch by Bret McGuire; reviewed by Bret McGuire for CASSJAVA-54 --- Jenkinsfile-datastax | 52 +++++++++++++------------------------------- 1 file changed, 15 insertions(+), 37 deletions(-) diff --git a/Jenkinsfile-datastax b/Jenkinsfile-datastax index af3faafee20..cd48f325a29 100644 --- a/Jenkinsfile-datastax +++ b/Jenkinsfile-datastax @@ -268,13 +268,9 @@ pipeline { ''') choice( name: 'ADHOC_BUILD_AND_EXECUTE_TESTS_SERVER_VERSION', - choices: ['2.1', // Legacy Apache CassandraⓇ - '2.2', // Legacy Apache CassandraⓇ - '3.0', // Previous Apache CassandraⓇ - '3.11', // Previous Apache CassandraⓇ - '4.0', // Previous Apache CassandraⓇ - '4.1', // Current Apache CassandraⓇ - '5.0-beta1', // Development Apache CassandraⓇ + choices: ['4.0', // Previous Apache CassandraⓇ + '4.1', // Previous Apache CassandraⓇ + '5.0', // Current Apache CassandraⓇ 'dse-4.8.16', // Previous EOSL DataStax Enterprise 'dse-5.0.15', // Long Term Support DataStax Enterprise 'dse-5.1.35', // Legacy DataStax Enterprise @@ -292,22 +288,6 @@ pipeline { Choice Description - - 2.1 - Apache Cassandra® v2.1.x - - - 2.2 - Apache Cassandra® v2.2.x - - - 3.0 - Apache Cassandra® v3.0.x - - - 3.11 - Apache Cassandra® v3.11.x - 4.0 Apache Cassandra® v4.0.x @@ -316,6 +296,10 @@ pipeline { 4.1 Apache Cassandra® v4.1.x + + 5.0 + Apache Cassandra® v5.0.x + dse-4.8.16 DataStax Enterprise v4.8.x (END OF SERVICE LIFE) @@ -435,13 +419,10 @@ pipeline { // schedules only run against release branches (i.e. 3.x, 4.x, 4.5.x, etc.) parameterizedCron(branchPatternCron().matcher(env.BRANCH_NAME).matches() ? """ # Every weekend (Saturday, Sunday) around 2:00 AM - ### JDK8 tests against 2.1, 3.0, 4.0, DSE 4.8, DSE 5.0, DSE 5.1, dse-6.0.18 and DSE 6.7 - H 2 * * 0 %CI_SCHEDULE=WEEKENDS;CI_SCHEDULE_SERVER_VERSIONS=2.1 3.0 4.0 dse-4.8.16 dse-5.0.15 dse-5.1.35 dse-6.0.18 dse-6.7.17;CI_SCHEDULE_JABBA_VERSION=1.8 + H 2 * * 0 %CI_SCHEDULE=WEEKENDS;CI_SCHEDULE_SERVER_VERSIONS=4.0 4.1 5.0 dse-4.8.16 dse-5.0.15 dse-5.1.35 dse-6.0.18 dse-6.7.17;CI_SCHEDULE_JABBA_VERSION=1.8 # Every weeknight (Monday - Friday) around 12:00 PM noon - ### JDK11 tests against 3.11, 4.1, 5.0-beta1 and DSE 6.8 - H 12 * * 1-5 %CI_SCHEDULE=WEEKNIGHTS;CI_SCHEDULE_SERVER_VERSIONS=3.11 4.1 5.0-beta1 dse-6.8.30 dse-6.9.0 hcd-1.0.0;CI_SCHEDULE_JABBA_VERSION=openjdk@1.11 - ### JDK17 tests against 3.11, 4.1, 5.0-beta1 and DSE 6.8 - H 12 * * 1-5 %CI_SCHEDULE=WEEKNIGHTS;CI_SCHEDULE_SERVER_VERSIONS=3.11 4.1 5.0-beta1 dse-6.8.30 dse-6.9.0 hcd-1.0.0;CI_SCHEDULE_JABBA_VERSION=openjdk@1.17 + H 12 * * 1-5 %CI_SCHEDULE=WEEKNIGHTS;CI_SCHEDULE_SERVER_VERSIONS=4.1 5.0 dse-6.8.30 dse-6.9.0 hcd-1.0.0;CI_SCHEDULE_JABBA_VERSION=openjdk@1.11 + H 12 * * 1-5 %CI_SCHEDULE=WEEKNIGHTS;CI_SCHEDULE_SERVER_VERSIONS=4.1 5.0 dse-6.8.30 dse-6.9.0 hcd-1.0.0;CI_SCHEDULE_JABBA_VERSION=openjdk@1.17 """ : "") } @@ -475,8 +456,8 @@ pipeline { axes { axis { name 'SERVER_VERSION' - values '3.11', // Latest stable Apache CassandraⓇ - '4.1', // Development Apache CassandraⓇ + values '4.0', // Previous Apache CassandraⓇ + '5.0', // Current Apache CassandraⓇ 'dse-6.8.30', // Current DataStax Enterprise 'dse-6.9.0', // Current DataStax Enterprise 'hcd-1.0.0' // Current DataStax HCD @@ -585,12 +566,9 @@ pipeline { axes { axis { name 'SERVER_VERSION' - values '2.1', // Legacy Apache CassandraⓇ - '3.0', // Previous Apache CassandraⓇ - '3.11', // Previous Apache CassandraⓇ - '4.0', // Previous Apache CassandraⓇ - '4.1', // Current Apache CassandraⓇ - '5.0-beta1', // Development Apache CassandraⓇ + values '4.0', // Previous Apache CassandraⓇ + '4.1', // Previous Apache CassandraⓇ + '5.0', // Current Apache CassandraⓇ 'dse-4.8.16', // Previous EOSL DataStax Enterprise 'dse-5.0.15', // Last EOSL DataStax Enterprise 'dse-5.1.35', // Legacy DataStax Enterprise From a322ca265654605123f4d7b889ad736c114f0c7e Mon Sep 17 00:00:00 2001 From: Jeremy Hanna Date: Wed, 27 Nov 2024 12:41:02 -0600 Subject: [PATCH 928/979] Update link to Jira to be CASSJAVA Updating the link to Jira. Previously we had a component in the CASSANDRA Jira project but now we have a project for each driver - in the case of Java, it's CASSJAVA. Added CASSJAVA to .asf.yaml patch by Jeremy Hanna; reviewed by Bret McGuire for CASSJAVA-61 --- .asf.yaml | 1 + README.md | 6 +++--- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/.asf.yaml b/.asf.yaml index ad58f536398..ac29efed9ff 100644 --- a/.asf.yaml +++ b/.asf.yaml @@ -34,3 +34,4 @@ github: projects: false autolink_jira: - CASSANDRA + - CASSJAVA diff --git a/README.md b/README.md index 2a30cb68c9a..b6e1cc337d8 100644 --- a/README.md +++ b/README.md @@ -75,13 +75,13 @@ See the [Cassandra error handling done right blog](https://www.datastax.com/blog * [Manual](manual/) * [API docs] -* Bug tracking: [JIRA]. Make sure to select the "Client/java-driver" component when filing new tickets! +* Bug tracking: [JIRA] * [Mailing list] * [Changelog] * [FAQ] [API docs]: https://docs.datastax.com/en/drivers/java/4.17 -[JIRA]: https://issues.apache.org/jira/issues/?jql=project%20%3D%20CASSANDRA%20AND%20component%20%3D%20%22Client%2Fjava-driver%22%20ORDER%20BY%20key%20DESC +[JIRA]: https://issues.apache.org/jira/issues/?jql=project%20%3D%20CASSJAVA%20ORDER%20BY%20key%20DESC [Mailing list]: https://groups.google.com/a/lists.datastax.com/forum/#!forum/java-driver-user [Changelog]: changelog/ [FAQ]: faq/ @@ -108,4 +108,4 @@ Apache Cassandra, Apache, Tomcat, Lucene, Solr, Hadoop, Spark, TinkerPop, and Ca trademarks of the [Apache Software Foundation](http://www.apache.org/) or its subsidiaries in Canada, the United States and/or other countries. -Binary artifacts of this product bundle Java Native Runtime libraries, which is available under the Eclipse Public License version 2.0. \ No newline at end of file +Binary artifacts of this product bundle Java Native Runtime libraries, which is available under the Eclipse Public License version 2.0. From 7bc085bdfb337b91255573bc3e130815e280954a Mon Sep 17 00:00:00 2001 From: Lukasz Antoniak Date: Wed, 6 Nov 2024 08:31:36 +0100 Subject: [PATCH 929/979] Move DataStax shaded Guava module into Java driver patch by Lukasz Antoniak; reviewed by Alexandre Dutra and Bret McGuire for CASSJAVA-52 --- bom/pom.xml | 10 +- core-shaded/pom.xml | 4 +- core/pom.xml | 4 +- distribution/src/assembly/binary-tarball.xml | 6 +- guava-shaded/pom.xml | 215 ++++++++++++++++++ guava-shaded/src/assembly/shaded-jar.xml | 48 ++++ ...graphicalComparatorHolderSubstitution.java | 39 ++++ .../UnsafeComparatorSubstitution.java | 25 ++ guava-shaded/src/main/javadoc/README.txt | 2 + manual/core/integration/README.md | 2 +- mapper-processor/pom.xml | 4 +- osgi-tests/pom.xml | 4 +- .../internal/osgi/support/BundleOptions.java | 2 +- pom.xml | 3 +- query-builder/pom.xml | 4 +- 15 files changed, 351 insertions(+), 21 deletions(-) create mode 100644 guava-shaded/pom.xml create mode 100644 guava-shaded/src/assembly/shaded-jar.xml create mode 100644 guava-shaded/src/main/java/com/google/common/primitives/LexicographicalComparatorHolderSubstitution.java create mode 100644 guava-shaded/src/main/java/com/google/common/primitives/UnsafeComparatorSubstitution.java create mode 100644 guava-shaded/src/main/javadoc/README.txt diff --git a/bom/pom.xml b/bom/pom.xml index 96b7a6ceb18..08f212f6157 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -55,6 +55,11 @@ java-driver-query-builder 4.18.2-SNAPSHOT + + org.apache.cassandra + java-driver-guava-shaded + 4.18.2-SNAPSHOT + org.apache.cassandra java-driver-test-infra @@ -75,11 +80,6 @@ native-protocol 1.5.1 - - com.datastax.oss - java-driver-shaded-guava - 25.1-jre-graal-sub-1 - diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 6c139aab127..9a708beb2a7 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -57,8 +57,8 @@ native-protocol - com.datastax.oss - java-driver-shaded-guava + org.apache.cassandra + java-driver-guava-shaded com.typesafe diff --git a/core/pom.xml b/core/pom.xml index 33688754f1b..2a48e8bf9ce 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -49,8 +49,8 @@ netty-handler - com.datastax.oss - java-driver-shaded-guava + org.apache.cassandra + java-driver-guava-shaded com.typesafe diff --git a/distribution/src/assembly/binary-tarball.xml b/distribution/src/assembly/binary-tarball.xml index 0d025fafb2c..b6294a25340 100644 --- a/distribution/src/assembly/binary-tarball.xml +++ b/distribution/src/assembly/binary-tarball.xml @@ -66,8 +66,8 @@ org.apache.cassandra:java-driver-core org.apache.cassandra:java-driver-mapper-runtime org.apache.cassandra:java-driver-mapper-processor + org.apache.cassandra:java-driver-guava-shaded - com.datastax.oss:java-driver-shaded-guava com.github.stephenc.jcip:jcip-annotations com.github.spotbugs:spotbugs-annotations @@ -91,8 +91,8 @@ org.apache.cassandra:java-driver-core org.apache.cassandra:java-driver-query-builder org.apache.cassandra:java-driver-mapper-processor + org.apache.cassandra:java-driver-guava-shaded - com.datastax.oss:java-driver-shaded-guava com.github.stephenc.jcip:jcip-annotations com.github.spotbugs:spotbugs-annotations @@ -116,8 +116,8 @@ org.apache.cassandra:java-driver-core org.apache.cassandra:java-driver-query-builder org.apache.cassandra:java-driver-mapper-runtime + org.apache.cassandra:java-driver-guava-shaded - com.datastax.oss:java-driver-shaded-guava com.github.stephenc.jcip:jcip-annotations com.github.spotbugs:spotbugs-annotations diff --git a/guava-shaded/pom.xml b/guava-shaded/pom.xml new file mode 100644 index 00000000000..9854fcc48ba --- /dev/null +++ b/guava-shaded/pom.xml @@ -0,0 +1,215 @@ + + + + 4.0.0 + + org.apache.cassandra + java-driver-parent + 4.18.2-SNAPSHOT + + java-driver-guava-shaded + Apache Cassandra Java Driver - guava shaded dep + Shaded Guava artifact for use in the Java driver for Apache Cassandra® + + + com.google.guava + guava + + + com.google.code.findbugs + jsr305 + + + org.checkerframework + checker-qual + + + com.google.errorprone + error_prone_annotations + + + com.google.j2objc + j2objc-annotations + + + org.codehaus.mojo + animal-sniffer-annotations + + + + + org.graalvm.nativeimage + svm + 20.0.0 + provided + + + + + + maven-shade-plugin + + + shade-guava-dependency + package + + shade + + + + + org.apache.cassandra:java-driver-guava-shaded + com.google.guava:guava + + + + + com.google + com.datastax.oss.driver.shaded.guava + + + + + com.google.guava:* + + META-INF/** + + + + true + true + + + + + + maven-clean-plugin + + + clean-classes + package + + clean + + + ${project.build.outputDirectory} + + + + + + maven-dependency-plugin + + + unpack-shaded-classes + package + + unpack + + + ${project.build.outputDirectory} + + + org.apache.cassandra + java-driver-guava-shaded + ${project.version} + jar + + + + + + + + org.apache.felix + maven-bundle-plugin + + 3.5.0 + true + + + generate-shaded-manifest + package + + manifest + + + + com.datastax.oss.driver.shaded.guava + !com.datastax.oss.driver.shaded.guava.errorprone.*, !org.checkerframework.*, * + javax.annotation.*;resolution:=optional;version="[3.0,4)", javax.crypto.*;resolution:=optional, sun.misc.*;resolution:=optional, !com.oracle.svm.*, !com.datastax.oss.driver.shaded.guava.errorprone.*, !org.checkerframework.*, * + + + + + + + maven-assembly-plugin + + + generate-final-shaded-jar + package + + single + + + + + ${project.build.outputDirectory}/META-INF/MANIFEST.MF + + + src/assembly/shaded-jar.xml + + + false + + + + + + maven-jar-plugin + + + empty-javadoc-jar + + jar + + + javadoc + ${basedir}/src/main/javadoc + + + + + + org.revapi + revapi-maven-plugin + + true + + + + + diff --git a/guava-shaded/src/assembly/shaded-jar.xml b/guava-shaded/src/assembly/shaded-jar.xml new file mode 100644 index 00000000000..d762a27b20f --- /dev/null +++ b/guava-shaded/src/assembly/shaded-jar.xml @@ -0,0 +1,48 @@ + + + + shaded-jar + + jar + + false + + + + ${project.build.outputDirectory} + + META-INF/maven/org.apache.cassandra/java-driver-guava-shaded/pom.xml + + + + + + + + ${project.basedir}/dependency-reduced-pom.xml + META-INF/maven/org.apache.cassandra/java-driver-guava-shaded + pom.xml + + + diff --git a/guava-shaded/src/main/java/com/google/common/primitives/LexicographicalComparatorHolderSubstitution.java b/guava-shaded/src/main/java/com/google/common/primitives/LexicographicalComparatorHolderSubstitution.java new file mode 100644 index 00000000000..95e9c70cdbc --- /dev/null +++ b/guava-shaded/src/main/java/com/google/common/primitives/LexicographicalComparatorHolderSubstitution.java @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.common.primitives; + +import com.oracle.svm.core.annotate.Alias; +import com.oracle.svm.core.annotate.RecomputeFieldValue; +import com.oracle.svm.core.annotate.Substitute; +import com.oracle.svm.core.annotate.TargetClass; +import java.util.Comparator; + +@TargetClass(UnsignedBytes.LexicographicalComparatorHolder.class) +final class LexicographicalComparatorHolderSubstitution { + + @Alias + @RecomputeFieldValue(kind = RecomputeFieldValue.Kind.FromAlias) + static Comparator BEST_COMPARATOR = UnsignedBytes.lexicographicalComparatorJavaImpl(); + + /* All known cases should be covered by the field substitution above... keeping this only + * for sake of completeness */ + @Substitute + static Comparator getBestComparator() { + return UnsignedBytes.lexicographicalComparatorJavaImpl(); + } +} diff --git a/guava-shaded/src/main/java/com/google/common/primitives/UnsafeComparatorSubstitution.java b/guava-shaded/src/main/java/com/google/common/primitives/UnsafeComparatorSubstitution.java new file mode 100644 index 00000000000..549de0b5c02 --- /dev/null +++ b/guava-shaded/src/main/java/com/google/common/primitives/UnsafeComparatorSubstitution.java @@ -0,0 +1,25 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.common.primitives; + +import com.oracle.svm.core.annotate.Delete; +import com.oracle.svm.core.annotate.TargetClass; + +@TargetClass(UnsignedBytes.LexicographicalComparatorHolder.UnsafeComparator.class) +@Delete +final class UnsafeComparatorSubstitution {} diff --git a/guava-shaded/src/main/javadoc/README.txt b/guava-shaded/src/main/javadoc/README.txt new file mode 100644 index 00000000000..57f82b2a265 --- /dev/null +++ b/guava-shaded/src/main/javadoc/README.txt @@ -0,0 +1,2 @@ +This empty JAR is generated for compliance with Maven Central rules. Please refer to the original +Guava API docs. \ No newline at end of file diff --git a/manual/core/integration/README.md b/manual/core/integration/README.md index 2dfc0155c63..f2a96160bce 100644 --- a/manual/core/integration/README.md +++ b/manual/core/integration/README.md @@ -671,7 +671,7 @@ The remaining core driver dependencies are the only ones that are truly mandator * the [native protocol](https://github.com/datastax/native-protocol) layer. This is essentially part of the driver code, but was externalized for reuse in other projects; -* `java-driver-shaded-guava`, a shaded version of [Guava](https://github.com/google/guava). It is +* `java-driver-guava-shaded`, a shaded version of [Guava](https://github.com/google/guava). It is relocated to a different package, and only used by internal driver code, so it should be completely transparent to third-party code; * the [SLF4J](https://www.slf4j.org/) API for [logging](../logging/). diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 61906f41987..6588f17d5f7 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -44,8 +44,8 @@ java-driver-mapper-runtime - com.datastax.oss - java-driver-shaded-guava + org.apache.cassandra + java-driver-guava-shaded com.squareup diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml index 5947aff1bc5..f0e66b656ca 100644 --- a/osgi-tests/pom.xml +++ b/osgi-tests/pom.xml @@ -71,8 +71,8 @@ logback-classic - com.datastax.oss - java-driver-shaded-guava + org.apache.cassandra + java-driver-guava-shaded org.xerial.snappy diff --git a/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/BundleOptions.java b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/BundleOptions.java index 536a6d96c77..3e6171ca530 100644 --- a/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/BundleOptions.java +++ b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/BundleOptions.java @@ -35,7 +35,7 @@ public class BundleOptions { public static CompositeOption commonBundles() { return () -> options( - mavenBundle("com.datastax.oss", "java-driver-shaded-guava").versionAsInProject(), + mavenBundle("org.apache.cassandra", "java-driver-guava-shaded").versionAsInProject(), mavenBundle("io.dropwizard.metrics", "metrics-core").versionAsInProject(), mavenBundle("org.slf4j", "slf4j-api").versionAsInProject(), mavenBundle("org.hdrhistogram", "HdrHistogram").versionAsInProject(), diff --git a/pom.xml b/pom.xml index 94311719e5f..620cf1db4bb 100644 --- a/pom.xml +++ b/pom.xml @@ -40,6 +40,7 @@ mapper-processor metrics/micrometer metrics/microprofile + guava-shaded test-infra integration-tests osgi-tests @@ -110,7 +111,7 @@ ${netty.version} - + com.google.guava guava 25.1-jre diff --git a/query-builder/pom.xml b/query-builder/pom.xml index bae0e0c6ca0..4e09a10e584 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -45,8 +45,8 @@ java-driver-core - com.datastax.oss - java-driver-shaded-guava + org.apache.cassandra + java-driver-guava-shaded com.github.stephenc.jcip From 7ca013fbd5f1ec589df52ef0e6441986f07c11ff Mon Sep 17 00:00:00 2001 From: Ammar Khaku Date: Sat, 22 Apr 2023 16:13:15 -0700 Subject: [PATCH 930/979] JAVA-3057 Allow decoding a UDT that has more fields than expected patch by Ammar Khaku; reviewed by Andy Tolbert and Bret McGuire reference: https://github.com/apache/cassandra-java-driver/pull/1635 --- .../internal/core/type/codec/UdtCodec.java | 10 ++- .../core/type/codec/UdtCodecTest.java | 24 +++--- .../internal/core/type/codec/UdtCodecIT.java | 77 +++++++++++++++++++ 3 files changed, 95 insertions(+), 16 deletions(-) create mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/internal/core/type/codec/UdtCodecIT.java diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/UdtCodec.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/UdtCodec.java index f5177e63b5e..5d0a379f761 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/UdtCodec.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/UdtCodec.java @@ -30,10 +30,14 @@ import java.nio.BufferUnderflowException; import java.nio.ByteBuffer; import net.jcip.annotations.ThreadSafe; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @ThreadSafe public class UdtCodec implements TypeCodec { + private static final Logger LOG = LoggerFactory.getLogger(UdtCodec.class); + private final UserDefinedType cqlType; public UdtCodec(@NonNull UserDefinedType cqlType) { @@ -107,10 +111,8 @@ public UdtValue decode(@Nullable ByteBuffer bytes, @NonNull ProtocolVersion prot int i = 0; while (input.hasRemaining()) { if (i == cqlType.getFieldTypes().size()) { - throw new IllegalArgumentException( - String.format( - "Too many fields in encoded UDT value, expected %d", - cqlType.getFieldTypes().size())); + LOG.debug("Encountered unexpected fields when parsing codec {}", cqlType); + break; } int elementSize = input.getInt(); ByteBuffer element; diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/UdtCodecTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/UdtCodecTest.java index bf7c1e98b26..af94247f937 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/UdtCodecTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/UdtCodecTest.java @@ -136,18 +136,18 @@ public void should_decode_udt() { } @Test - public void should_fail_to_decode_udt_when_too_many_fields() { - assertThatThrownBy( - () -> - decode( - "0x" - + ("00000004" + "00000001") - + "ffffffff" - + ("00000001" + "61") - // extra contents - + "ffffffff")) - .isInstanceOf(IllegalArgumentException.class) - .hasMessage("Too many fields in encoded UDT value, expected 3"); + public void should_decode_udt_when_too_many_fields() { + UdtValue udt = + decode( + "0x" + + ("00000004" + "00000001") + + "ffffffff" + + ("00000001" + "61") + // extra contents + + "ffffffff"); + assertThat(udt.getInt(0)).isEqualTo(1); + assertThat(udt.isNull(1)).isTrue(); + assertThat(udt.getString(2)).isEqualTo("a"); } /** Test for JAVA-2557. Ensures that the codec can decode null fields with any negative length. */ diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/internal/core/type/codec/UdtCodecIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/internal/core/type/codec/UdtCodecIT.java new file mode 100644 index 00000000000..804a078bbe0 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/oss/driver/internal/core/type/codec/UdtCodecIT.java @@ -0,0 +1,77 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.type.codec; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.cql.Row; +import com.datastax.oss.driver.api.core.data.UdtValue; +import com.datastax.oss.driver.api.core.type.UserDefinedType; +import com.datastax.oss.driver.api.core.type.codec.TypeCodec; +import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import com.datastax.oss.driver.categories.ParallelizableTests; +import java.util.Objects; +import org.junit.Rule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +@Category(ParallelizableTests.class) +public class UdtCodecIT { + + private CcmRule ccmRule = CcmRule.getInstance(); + + private SessionRule sessionRule = SessionRule.builder(ccmRule).build(); + + @Rule public TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); + + @Test + public void should_decoding_udt_be_backward_compatible() { + CqlSession session = sessionRule.session(); + session.execute("CREATE TYPE test_type_1 (a text, b int)"); + session.execute("CREATE TABLE test_table_1 (e int primary key, f frozen)"); + // insert a row using version 1 of the UDT schema + session.execute("INSERT INTO test_table_1(e, f) VALUES(1, {a: 'a', b: 1})"); + UserDefinedType udt = + session + .getMetadata() + .getKeyspace(sessionRule.keyspace()) + .flatMap(ks -> ks.getUserDefinedType("test_type_1")) + .orElseThrow(IllegalStateException::new); + TypeCodec oldCodec = session.getContext().getCodecRegistry().codecFor(udt); + // update UDT schema + session.execute("ALTER TYPE test_type_1 add i text"); + // insert a row using version 2 of the UDT schema + session.execute("INSERT INTO test_table_1(e, f) VALUES(2, {a: 'b', b: 2, i: 'b'})"); + Row row = + Objects.requireNonNull(session.execute("SELECT f FROM test_table_1 WHERE e = ?", 2).one()); + // Try to read new row with old codec. Using row.getUdtValue() would not cause any issues, + // because new codec will be automatically registered (using all 3 attributes). + // If application leverages generic row.get(String, Codec) method, data reading with old codec + // should + // be backward-compatible. + UdtValue value = Objects.requireNonNull((UdtValue) row.get("f", oldCodec)); + assertThat(value.getString("a")).isEqualTo("b"); + assertThat(value.getInt("b")).isEqualTo(2); + assertThatThrownBy(() -> value.getString("i")).hasMessage("i is not a field in this UDT"); + } +} From 6a8674f2db92668359196b5492753612b3844594 Mon Sep 17 00:00:00 2001 From: absurdfarce Date: Mon, 11 Nov 2024 11:49:20 -0600 Subject: [PATCH 931/979] CASSJAVA-55 Remove setting "Host" header for metadata requests. With some sysprops enabled this will actually be respected which completely borks Astra routing. patch by Bret McGuire; reviewed by Alexandre Dutra and Bret McGuire for CASSJAVA-55 --- .../driver/internal/core/config/cloud/CloudConfigFactory.java | 1 - 1 file changed, 1 deletion(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/config/cloud/CloudConfigFactory.java b/core/src/main/java/com/datastax/oss/driver/internal/core/config/cloud/CloudConfigFactory.java index b6b2cccc466..817b3263d25 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/config/cloud/CloudConfigFactory.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/config/cloud/CloudConfigFactory.java @@ -229,7 +229,6 @@ protected BufferedReader fetchProxyMetadata( HttpsURLConnection connection = (HttpsURLConnection) metadataServiceUrl.openConnection(); connection.setSSLSocketFactory(sslContext.getSocketFactory()); connection.setRequestMethod("GET"); - connection.setRequestProperty("host", "localhost"); return new BufferedReader( new InputStreamReader(connection.getInputStream(), StandardCharsets.UTF_8)); } catch (ConnectException e) { From 7689c5a81e89bce5598d9976a041068a1f5e2a7f Mon Sep 17 00:00:00 2001 From: SiyaoIsHiding <113857408+SiyaoIsHiding@users.noreply.github.com> Date: Tue, 7 Jan 2025 14:30:58 +0800 Subject: [PATCH 932/979] JAVA-3118: Add support for vector data type in Schema Builder, QueryBuilder patch by Jane He; reviewed by Mick Semb Wever and Bret McGuire for JAVA-3118 reference: #1931 --- manual/query_builder/select/README.md | 23 +++++ query-builder/revapi.json | 10 +++ .../api/querybuilder/select/Select.java | 11 +++ .../querybuilder/select/DefaultSelect.java | 86 +++++++++++++++++-- .../delete/DeleteSelectorTest.java | 11 +++ .../insert/RegularInsertTest.java | 7 ++ .../querybuilder/schema/AlterTableTest.java | 6 ++ .../querybuilder/schema/AlterTypeTest.java | 6 ++ .../querybuilder/schema/CreateTableTest.java | 9 ++ .../querybuilder/schema/CreateTypeTest.java | 9 ++ .../select/SelectOrderingTest.java | 20 +++++ .../select/SelectSelectorTest.java | 43 ++++++++++ 12 files changed, 233 insertions(+), 8 deletions(-) diff --git a/manual/query_builder/select/README.md b/manual/query_builder/select/README.md index 92c058608e7..0425423a402 100644 --- a/manual/query_builder/select/README.md +++ b/manual/query_builder/select/README.md @@ -387,6 +387,29 @@ selectFrom("sensor_data") // SELECT reading FROM sensor_data WHERE id=? ORDER BY date DESC ``` +Vector Search: + +```java + +import com.datastax.oss.driver.api.core.data.CqlVector; + +selectFrom("foo") + .all() + .where(Relation.column("k").isEqualTo(literal(1))) + .orderByAnnOf("c1", CqlVector.newInstance(0.1, 0.2, 0.3)); +// SELECT * FROM foo WHERE k=1 ORDER BY c1 ANN OF [0.1, 0.2, 0.3] + +selectFrom("cycling", "comments_vs") + .column("comment") + .function( + "similarity_cosine", + Selector.column("comment_vector"), + literal(CqlVector.newInstance(0.2, 0.15, 0.3, 0.2, 0.05))) + .orderByAnnOf("comment_vector", CqlVector.newInstance(0.1, 0.15, 0.3, 0.12, 0.05)) + .limit(1); +// SELECT comment,similarity_cosine(comment_vector,[0.2, 0.15, 0.3, 0.2, 0.05]) FROM cycling.comments_vs ORDER BY comment_vector ANN OF [0.1, 0.15, 0.3, 0.12, 0.05] LIMIT 1 +``` + Limits: ```java diff --git a/query-builder/revapi.json b/query-builder/revapi.json index 9d0163b487e..c4d8aa27212 100644 --- a/query-builder/revapi.json +++ b/query-builder/revapi.json @@ -2772,6 +2772,16 @@ "code": "java.method.addedToInterface", "new": "method com.datastax.oss.driver.api.querybuilder.update.UpdateStart com.datastax.oss.driver.api.querybuilder.update.UpdateStart::usingTtl(int)", "justification": "JAVA-2210: Add ability to set TTL for modification queries" + }, + { + "code": "java.method.addedToInterface", + "new": "method com.datastax.oss.driver.api.querybuilder.select.Select com.datastax.oss.driver.api.querybuilder.select.Select::orderByAnnOf(java.lang.String, com.datastax.oss.driver.api.core.data.CqlVector)", + "justification": "JAVA-3118: Add support for vector data type in Schema Builder, QueryBuilder" + }, + { + "code": "java.method.addedToInterface", + "new": "method com.datastax.oss.driver.api.querybuilder.select.Select com.datastax.oss.driver.api.querybuilder.select.Select::orderByAnnOf(com.datastax.oss.driver.api.core.CqlIdentifier, com.datastax.oss.driver.api.core.data.CqlVector)", + "justification": "JAVA-3118: Add support for vector data type in Schema Builder, QueryBuilder" } ] } diff --git a/query-builder/src/main/java/com/datastax/oss/driver/api/querybuilder/select/Select.java b/query-builder/src/main/java/com/datastax/oss/driver/api/querybuilder/select/Select.java index a22b45c35bd..159657989da 100644 --- a/query-builder/src/main/java/com/datastax/oss/driver/api/querybuilder/select/Select.java +++ b/query-builder/src/main/java/com/datastax/oss/driver/api/querybuilder/select/Select.java @@ -18,6 +18,7 @@ package com.datastax.oss.driver.api.querybuilder.select; import com.datastax.oss.driver.api.core.CqlIdentifier; +import com.datastax.oss.driver.api.core.data.CqlVector; import com.datastax.oss.driver.api.core.metadata.schema.ClusteringOrder; import com.datastax.oss.driver.api.querybuilder.BindMarker; import com.datastax.oss.driver.api.querybuilder.BuildableQuery; @@ -146,6 +147,16 @@ default Select orderBy(@NonNull String columnName, @NonNull ClusteringOrder orde return orderBy(CqlIdentifier.fromCql(columnName), order); } + /** + * Shortcut for {@link #orderByAnnOf(CqlIdentifier, CqlVector)}, adding an ORDER BY ... ANN OF ... + * clause + */ + @NonNull + Select orderByAnnOf(@NonNull String columnName, @NonNull CqlVector ann); + + /** Adds the ORDER BY ... ANN OF ... clause, usually used for vector search */ + @NonNull + Select orderByAnnOf(@NonNull CqlIdentifier columnId, @NonNull CqlVector ann); /** * Adds a LIMIT clause to this query with a literal value. * diff --git a/query-builder/src/main/java/com/datastax/oss/driver/internal/querybuilder/select/DefaultSelect.java b/query-builder/src/main/java/com/datastax/oss/driver/internal/querybuilder/select/DefaultSelect.java index 86a2a07a3f2..5daf252a9eb 100644 --- a/query-builder/src/main/java/com/datastax/oss/driver/internal/querybuilder/select/DefaultSelect.java +++ b/query-builder/src/main/java/com/datastax/oss/driver/internal/querybuilder/select/DefaultSelect.java @@ -20,8 +20,10 @@ import com.datastax.oss.driver.api.core.CqlIdentifier; import com.datastax.oss.driver.api.core.cql.SimpleStatement; import com.datastax.oss.driver.api.core.cql.SimpleStatementBuilder; +import com.datastax.oss.driver.api.core.data.CqlVector; import com.datastax.oss.driver.api.core.metadata.schema.ClusteringOrder; import com.datastax.oss.driver.api.querybuilder.BindMarker; +import com.datastax.oss.driver.api.querybuilder.QueryBuilder; import com.datastax.oss.driver.api.querybuilder.relation.Relation; import com.datastax.oss.driver.api.querybuilder.select.Select; import com.datastax.oss.driver.api.querybuilder.select.SelectFrom; @@ -49,6 +51,7 @@ public class DefaultSelect implements SelectFrom, Select { private final ImmutableList relations; private final ImmutableList groupByClauses; private final ImmutableMap orderings; + private final Ann ann; private final Object limit; private final Object perPartitionLimit; private final boolean allowsFiltering; @@ -65,6 +68,7 @@ public DefaultSelect(@Nullable CqlIdentifier keyspace, @NonNull CqlIdentifier ta ImmutableMap.of(), null, null, + null, false); } @@ -74,6 +78,8 @@ public DefaultSelect(@Nullable CqlIdentifier keyspace, @NonNull CqlIdentifier ta * @param selectors if it contains {@link AllSelector#INSTANCE}, that must be the only element. * This isn't re-checked because methods that call this constructor internally already do it, * make sure you do it yourself. + * @param ann Approximate nearest neighbor. ANN ordering does not support secondary ordering or + * ASC order. */ public DefaultSelect( @Nullable CqlIdentifier keyspace, @@ -84,6 +90,7 @@ public DefaultSelect( @NonNull ImmutableList relations, @NonNull ImmutableList groupByClauses, @NonNull ImmutableMap orderings, + @Nullable Ann ann, @Nullable Object limit, @Nullable Object perPartitionLimit, boolean allowsFiltering) { @@ -94,6 +101,9 @@ public DefaultSelect( || (limit instanceof Integer && (Integer) limit > 0) || limit instanceof BindMarker, "limit must be a strictly positive integer or a bind marker"); + Preconditions.checkArgument( + orderings.isEmpty() || ann == null, "ANN ordering does not support secondary ordering"); + this.ann = ann; this.keyspace = keyspace; this.table = table; this.isJson = isJson; @@ -117,6 +127,7 @@ public SelectFrom json() { relations, groupByClauses, orderings, + ann, limit, perPartitionLimit, allowsFiltering); @@ -134,6 +145,7 @@ public SelectFrom distinct() { relations, groupByClauses, orderings, + ann, limit, perPartitionLimit, allowsFiltering); @@ -193,6 +205,7 @@ public Select withSelectors(@NonNull ImmutableList newSelectors) { relations, groupByClauses, orderings, + ann, limit, perPartitionLimit, allowsFiltering); @@ -221,6 +234,7 @@ public Select withRelations(@NonNull ImmutableList newRelations) { newRelations, groupByClauses, orderings, + ann, limit, perPartitionLimit, allowsFiltering); @@ -249,6 +263,7 @@ public Select withGroupByClauses(@NonNull ImmutableList newGroupByClau relations, newGroupByClauses, orderings, + ann, limit, perPartitionLimit, allowsFiltering); @@ -260,6 +275,18 @@ public Select orderBy(@NonNull CqlIdentifier columnId, @NonNull ClusteringOrder return withOrderings(ImmutableCollections.append(orderings, columnId, order)); } + @NonNull + @Override + public Select orderByAnnOf(@NonNull String columnName, @NonNull CqlVector ann) { + return withAnn(new Ann(CqlIdentifier.fromCql(columnName), ann)); + } + + @NonNull + @Override + public Select orderByAnnOf(@NonNull CqlIdentifier columnId, @NonNull CqlVector ann) { + return withAnn(new Ann(columnId, ann)); + } + @NonNull @Override public Select orderByIds(@NonNull Map newOrderings) { @@ -277,6 +304,24 @@ public Select withOrderings(@NonNull ImmutableMap entry : orderings.entrySet()) { - if (first) { - builder.append(" ORDER BY "); - first = false; - } else { - builder.append(","); + if (ann != null) { + builder.append(" ORDER BY ").append(this.ann.columnId.asCql(true)).append(" ANN OF "); + QueryBuilder.literal(ann.vector).appendTo(builder); + } else { + boolean first = true; + for (Map.Entry entry : orderings.entrySet()) { + if (first) { + builder.append(" ORDER BY "); + first = false; + } else { + builder.append(","); + } + builder.append(entry.getKey().asCql(true)).append(" ").append(entry.getValue().name()); } - builder.append(entry.getKey().asCql(true)).append(" ").append(entry.getValue().name()); } if (limit != null) { @@ -499,6 +554,11 @@ public Object getLimit() { return limit; } + @Nullable + public Ann getAnn() { + return ann; + } + @Nullable public Object getPerPartitionLimit() { return perPartitionLimit; @@ -512,4 +572,14 @@ public boolean allowsFiltering() { public String toString() { return asCql(); } + + public static class Ann { + private final CqlVector vector; + private final CqlIdentifier columnId; + + private Ann(CqlIdentifier columnId, CqlVector vector) { + this.vector = vector; + this.columnId = columnId; + } + } } diff --git a/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/delete/DeleteSelectorTest.java b/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/delete/DeleteSelectorTest.java index 23210971bc6..cce4cf51a10 100644 --- a/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/delete/DeleteSelectorTest.java +++ b/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/delete/DeleteSelectorTest.java @@ -22,6 +22,7 @@ import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.deleteFrom; import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.literal; +import com.datastax.oss.driver.api.core.data.CqlVector; import org.junit.Test; public class DeleteSelectorTest { @@ -34,6 +35,16 @@ public void should_generate_column_deletion() { .hasCql("DELETE v FROM ks.foo WHERE k=?"); } + @Test + public void should_generate_vector_deletion() { + assertThat( + deleteFrom("foo") + .column("v") + .whereColumn("k") + .isEqualTo(literal(CqlVector.newInstance(0.1, 0.2)))) + .hasCql("DELETE v FROM foo WHERE k=[0.1, 0.2]"); + } + @Test public void should_generate_field_deletion() { assertThat( diff --git a/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/insert/RegularInsertTest.java b/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/insert/RegularInsertTest.java index 36133445b34..89c833ff1c6 100644 --- a/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/insert/RegularInsertTest.java +++ b/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/insert/RegularInsertTest.java @@ -23,6 +23,7 @@ import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.literal; import static org.assertj.core.api.Assertions.catchThrowable; +import com.datastax.oss.driver.api.core.data.CqlVector; import com.datastax.oss.driver.api.querybuilder.term.Term; import com.datastax.oss.driver.internal.querybuilder.insert.DefaultInsert; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; @@ -41,6 +42,12 @@ public void should_generate_column_assignments() { .hasCql("INSERT INTO foo (a,b) VALUES (?,?)"); } + @Test + public void should_generate_vector_literals() { + assertThat(insertInto("foo").value("a", literal(CqlVector.newInstance(0.1, 0.2, 0.3)))) + .hasCql("INSERT INTO foo (a) VALUES ([0.1, 0.2, 0.3])"); + } + @Test public void should_keep_last_assignment_if_column_listed_twice() { assertThat( diff --git a/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/schema/AlterTableTest.java b/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/schema/AlterTableTest.java index 1567b0848cf..2c99b154b38 100644 --- a/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/schema/AlterTableTest.java +++ b/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/schema/AlterTableTest.java @@ -108,4 +108,10 @@ public void should_generate_alter_table_with_no_compression() { assertThat(alterTable("bar").withNoCompression()) .hasCql("ALTER TABLE bar WITH compression={'sstable_compression':''}"); } + + @Test + public void should_generate_alter_table_with_vector() { + assertThat(alterTable("bar").alterColumn("v", DataTypes.vectorOf(DataTypes.FLOAT, 3))) + .hasCql("ALTER TABLE bar ALTER v TYPE vector"); + } } diff --git a/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/schema/AlterTypeTest.java b/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/schema/AlterTypeTest.java index 2becb9338f9..14bec0a6ce3 100644 --- a/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/schema/AlterTypeTest.java +++ b/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/schema/AlterTypeTest.java @@ -53,4 +53,10 @@ public void should_generate_alter_table_with_rename_three_columns() { assertThat(alterType("bar").renameField("x", "y").renameField("u", "v").renameField("b", "a")) .hasCql("ALTER TYPE bar RENAME x TO y AND u TO v AND b TO a"); } + + @Test + public void should_generate_alter_type_with_vector() { + assertThat(alterType("foo", "bar").alterField("vec", DataTypes.vectorOf(DataTypes.FLOAT, 3))) + .hasCql("ALTER TYPE foo.bar ALTER vec TYPE vector"); + } } diff --git a/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/schema/CreateTableTest.java b/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/schema/CreateTableTest.java index 7a5542c51f0..15cd12c75eb 100644 --- a/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/schema/CreateTableTest.java +++ b/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/schema/CreateTableTest.java @@ -314,4 +314,13 @@ public void should_generate_create_table_time_window_compaction() { .hasCql( "CREATE TABLE bar (k int PRIMARY KEY,v text) WITH compaction={'class':'TimeWindowCompactionStrategy','compaction_window_size':10,'compaction_window_unit':'DAYS','timestamp_resolution':'MICROSECONDS','unsafe_aggressive_sstable_expiration':false}"); } + + @Test + public void should_generate_vector_column() { + assertThat( + createTable("foo") + .withPartitionKey("k", DataTypes.INT) + .withColumn("v", DataTypes.vectorOf(DataTypes.FLOAT, 3))) + .hasCql("CREATE TABLE foo (k int PRIMARY KEY,v vector)"); + } } diff --git a/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/schema/CreateTypeTest.java b/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/schema/CreateTypeTest.java index d881a0500cb..f7c15788a0f 100644 --- a/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/schema/CreateTypeTest.java +++ b/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/schema/CreateTypeTest.java @@ -83,4 +83,13 @@ public void should_create_type_with_collections() { .withField("map", DataTypes.mapOf(DataTypes.INT, DataTypes.TEXT))) .hasCql("CREATE TYPE ks1.type (map map)"); } + + @Test + public void should_create_type_with_vector() { + assertThat( + createType("ks1", "type") + .withField("c1", DataTypes.INT) + .withField("vec", DataTypes.vectorOf(DataTypes.FLOAT, 3))) + .hasCql("CREATE TYPE ks1.type (c1 int,vec vector)"); + } } diff --git a/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/select/SelectOrderingTest.java b/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/select/SelectOrderingTest.java index ff27fde4f8f..a9c618e9559 100644 --- a/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/select/SelectOrderingTest.java +++ b/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/select/SelectOrderingTest.java @@ -23,6 +23,7 @@ import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.literal; import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.selectFrom; +import com.datastax.oss.driver.api.core.data.CqlVector; import com.datastax.oss.driver.api.querybuilder.relation.Relation; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import org.junit.Test; @@ -74,4 +75,23 @@ public void should_replace_previous_ordering() { .orderBy(ImmutableMap.of("c1", DESC, "c2", ASC))) .hasCql("SELECT * FROM foo WHERE k=1 ORDER BY c3 ASC,c1 DESC,c2 ASC"); } + + @Test + public void should_generate_ann_clause() { + assertThat( + selectFrom("foo") + .all() + .where(Relation.column("k").isEqualTo(literal(1))) + .orderByAnnOf("c1", CqlVector.newInstance(0.1, 0.2, 0.3))) + .hasCql("SELECT * FROM foo WHERE k=1 ORDER BY c1 ANN OF [0.1, 0.2, 0.3]"); + } + + @Test(expected = IllegalArgumentException.class) + public void should_fail_when_provided_ann_with_other_orderings() { + selectFrom("foo") + .all() + .where(Relation.column("k").isEqualTo(literal(1))) + .orderBy("c1", ASC) + .orderByAnnOf("c2", CqlVector.newInstance(0.1, 0.2, 0.3)); + } } diff --git a/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/select/SelectSelectorTest.java b/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/select/SelectSelectorTest.java index dc7cc98c6cc..7e03627d4b7 100644 --- a/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/select/SelectSelectorTest.java +++ b/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/select/SelectSelectorTest.java @@ -22,6 +22,7 @@ import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.raw; import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.selectFrom; +import com.datastax.oss.driver.api.core.data.CqlVector; import com.datastax.oss.driver.api.core.type.DataTypes; import com.datastax.oss.driver.api.core.type.codec.CodecNotFoundException; import com.datastax.oss.driver.api.querybuilder.CharsetCodec; @@ -230,6 +231,48 @@ public void should_generate_raw_selector() { .hasCql("SELECT bar,baz FROM foo"); } + @Test + public void should_generate_similarity_functions() { + Select similarity_cosine_clause = + selectFrom("cycling", "comments_vs") + .column("comment") + .function( + "similarity_cosine", + Selector.column("comment_vector"), + literal(CqlVector.newInstance(0.2, 0.15, 0.3, 0.2, 0.05))) + .orderByAnnOf("comment_vector", CqlVector.newInstance(0.1, 0.15, 0.3, 0.12, 0.05)) + .limit(1); + assertThat(similarity_cosine_clause) + .hasCql( + "SELECT comment,similarity_cosine(comment_vector,[0.2, 0.15, 0.3, 0.2, 0.05]) FROM cycling.comments_vs ORDER BY comment_vector ANN OF [0.1, 0.15, 0.3, 0.12, 0.05] LIMIT 1"); + + Select similarity_euclidean_clause = + selectFrom("cycling", "comments_vs") + .column("comment") + .function( + "similarity_euclidean", + Selector.column("comment_vector"), + literal(CqlVector.newInstance(0.2, 0.15, 0.3, 0.2, 0.05))) + .orderByAnnOf("comment_vector", CqlVector.newInstance(0.1, 0.15, 0.3, 0.12, 0.05)) + .limit(1); + assertThat(similarity_euclidean_clause) + .hasCql( + "SELECT comment,similarity_euclidean(comment_vector,[0.2, 0.15, 0.3, 0.2, 0.05]) FROM cycling.comments_vs ORDER BY comment_vector ANN OF [0.1, 0.15, 0.3, 0.12, 0.05] LIMIT 1"); + + Select similarity_dot_product_clause = + selectFrom("cycling", "comments_vs") + .column("comment") + .function( + "similarity_dot_product", + Selector.column("comment_vector"), + literal(CqlVector.newInstance(0.2, 0.15, 0.3, 0.2, 0.05))) + .orderByAnnOf("comment_vector", CqlVector.newInstance(0.1, 0.15, 0.3, 0.12, 0.05)) + .limit(1); + assertThat(similarity_dot_product_clause) + .hasCql( + "SELECT comment,similarity_dot_product(comment_vector,[0.2, 0.15, 0.3, 0.2, 0.05]) FROM cycling.comments_vs ORDER BY comment_vector ANN OF [0.1, 0.15, 0.3, 0.12, 0.05] LIMIT 1"); + } + @Test public void should_alias_selectors() { assertThat(selectFrom("foo").column("bar").as("baz")).hasCql("SELECT bar AS baz FROM foo"); From 8c1009959e988d9f7249151ad260f64aa6afed63 Mon Sep 17 00:00:00 2001 From: Lukasz Antoniak Date: Mon, 23 Dec 2024 08:20:42 +0100 Subject: [PATCH 933/979] Upgrade Guava to 33.3.1-jre patch by Lukasz Antoniak; reviewed by Alexandre Dutra and Bret McGuire for CASSJAVA-53 --- .../internal/core/cql/reactive/TestSubscriber.java | 5 +++-- guava-shaded/pom.xml | 10 ++-------- .../dse/driver/api/mapper/reactive/TestSubscriber.java | 6 +++++- pom.xml | 2 +- 4 files changed, 11 insertions(+), 12 deletions(-) diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/TestSubscriber.java b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/TestSubscriber.java index aed7a4dfc8e..652155e5309 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/TestSubscriber.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/cql/reactive/TestSubscriber.java @@ -81,7 +81,8 @@ public List getElements() { } public void awaitTermination() { - Uninterruptibles.awaitUninterruptibly(latch, 1, TimeUnit.MINUTES); - if (latch.getCount() > 0) fail("subscriber not terminated"); + if (!Uninterruptibles.awaitUninterruptibly(latch, 1, TimeUnit.MINUTES)) { + fail("subscriber not terminated"); + } } } diff --git a/guava-shaded/pom.xml b/guava-shaded/pom.xml index 9854fcc48ba..f480f9258cc 100644 --- a/guava-shaded/pom.xml +++ b/guava-shaded/pom.xml @@ -45,14 +45,6 @@ com.google.errorprone error_prone_annotations - - com.google.j2objc - j2objc-annotations - - - org.codehaus.mojo - animal-sniffer-annotations - @@ -78,6 +70,8 @@ org.apache.cassandra:java-driver-guava-shaded com.google.guava:guava + com.google.guava:failureaccess + com.google.j2objc:j2objc-annotations diff --git a/mapper-runtime/src/test/java/com/datastax/dse/driver/api/mapper/reactive/TestSubscriber.java b/mapper-runtime/src/test/java/com/datastax/dse/driver/api/mapper/reactive/TestSubscriber.java index 6f23cfca98a..6886b9a7622 100644 --- a/mapper-runtime/src/test/java/com/datastax/dse/driver/api/mapper/reactive/TestSubscriber.java +++ b/mapper-runtime/src/test/java/com/datastax/dse/driver/api/mapper/reactive/TestSubscriber.java @@ -17,6 +17,8 @@ */ package com.datastax.dse.driver.api.mapper.reactive; +import static org.assertj.core.api.Fail.fail; + import com.datastax.oss.driver.shaded.guava.common.util.concurrent.Uninterruptibles; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; @@ -70,6 +72,8 @@ public List getElements() { } public void awaitTermination() { - Uninterruptibles.awaitUninterruptibly(latch, 1, TimeUnit.MINUTES); + if (!Uninterruptibles.awaitUninterruptibly(latch, 1, TimeUnit.MINUTES)) { + fail("subscriber not terminated"); + } } } diff --git a/pom.xml b/pom.xml index 620cf1db4bb..c61e6485fd3 100644 --- a/pom.xml +++ b/pom.xml @@ -114,7 +114,7 @@ com.google.guava guava - 25.1-jre + 33.3.1-jre com.typesafe From 75a269d04d49630032be6afedae2ded0c4334e42 Mon Sep 17 00:00:00 2001 From: Lukasz Antoniak Date: Mon, 30 Dec 2024 07:18:06 +0100 Subject: [PATCH 934/979] Do not always cleanup Guava shaded module before packaging --- guava-shaded/pom.xml | 15 --------------- 1 file changed, 15 deletions(-) diff --git a/guava-shaded/pom.xml b/guava-shaded/pom.xml index f480f9258cc..6a22663e956 100644 --- a/guava-shaded/pom.xml +++ b/guava-shaded/pom.xml @@ -94,21 +94,6 @@ - - maven-clean-plugin - - - clean-classes - package - - clean - - - ${project.build.outputDirectory} - - - - maven-dependency-plugin From 01671d99247bdc783c832c128a8570ba846875c4 Mon Sep 17 00:00:00 2001 From: Lukasz Antoniak Date: Fri, 10 Jan 2025 16:05:42 +0100 Subject: [PATCH 935/979] Revert "Do not always cleanup Guava shaded module before packaging" This reverts commit 5be52ec1a8d014c81566180c731b828a591082da. --- guava-shaded/pom.xml | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/guava-shaded/pom.xml b/guava-shaded/pom.xml index 6a22663e956..f480f9258cc 100644 --- a/guava-shaded/pom.xml +++ b/guava-shaded/pom.xml @@ -94,6 +94,21 @@ + + maven-clean-plugin + + + clean-classes + package + + clean + + + ${project.build.outputDirectory} + + + + maven-dependency-plugin From 342e2dcf47afab238f357ac2afde65b079ce6b79 Mon Sep 17 00:00:00 2001 From: Lukasz Antoniak Date: Fri, 10 Jan 2025 16:16:26 +0100 Subject: [PATCH 936/979] Conditionally compile shaded Guava module --- guava-shaded/pom.xml | 32 ++++++++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) diff --git a/guava-shaded/pom.xml b/guava-shaded/pom.xml index f480f9258cc..ca8f0161b04 100644 --- a/guava-shaded/pom.xml +++ b/guava-shaded/pom.xml @@ -56,6 +56,32 @@ + + + org.codehaus.mojo + build-helper-maven-plugin + 1.12 + + + regex-property + + regex-property + + + maven.main.skip + ${java.version} + ^(?!1.8).+ + true + false + + + + maven-shade-plugin @@ -95,6 +121,12 @@ + maven-clean-plugin From 2e0c44c020819c928730e1aac812bf2f475d8256 Mon Sep 17 00:00:00 2001 From: SiyaoIsHiding <113857408+SiyaoIsHiding@users.noreply.github.com> Date: Sun, 26 Jan 2025 22:38:52 +0800 Subject: [PATCH 937/979] JAVA-3143: Extend driver vector support to arbitrary subtypes and fix handling of variable length types (OSS C* 5.0) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit patch by Jane He; reviewed by Bret McGuire and João Reis reference: https://github.com/apache/cassandra-java-driver/pull/1952 --- core/revapi.json | 297 ++++++++++++++++++ .../oss/driver/api/core/data/CqlVector.java | 72 ++++- .../driver/api/core/data/GettableById.java | 2 +- .../driver/api/core/data/GettableByIndex.java | 3 +- .../driver/api/core/data/GettableByName.java | 2 +- .../driver/api/core/data/SettableById.java | 2 +- .../driver/api/core/data/SettableByIndex.java | 2 +- .../driver/api/core/data/SettableByName.java | 2 +- .../oss/driver/api/core/type/DataTypes.java | 19 +- .../driver/api/core/type/codec/TypeCodec.java | 6 + .../api/core/type/codec/TypeCodecs.java | 4 +- .../api/core/type/reflect/GenericType.java | 6 +- .../parsing/DataTypeClassNameParser.java | 8 + .../internal/core/type/DefaultVectorType.java | 2 +- .../internal/core/type/codec/BigIntCodec.java | 7 + .../core/type/codec/BooleanCodec.java | 7 + .../internal/core/type/codec/DoubleCodec.java | 7 + .../internal/core/type/codec/FloatCodec.java | 7 + .../internal/core/type/codec/IntCodec.java | 7 + .../core/type/codec/TimestampCodec.java | 7 + .../internal/core/type/codec/UuidCodec.java | 7 + .../internal/core/type/codec/VectorCodec.java | 86 +++-- .../extras/time/TimestampMillisCodec.java | 7 + .../codec/registry/CachingCodecRegistry.java | 12 +- .../internal/core/type/util/VIntCoding.java | 77 ++++- .../driver/api/core/data/CqlVectorTest.java | 167 +++++----- .../core/type/codec/VectorCodecTest.java | 265 ++++++++++++---- ...CachingCodecRegistryTestDataProviders.java | 20 ++ .../core/type/util/VIntCodingTest.java | 86 +++++ .../oss/driver/core/data/DataTypeIT.java | 38 ++- 30 files changed, 1002 insertions(+), 232 deletions(-) create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/type/util/VIntCodingTest.java diff --git a/core/revapi.json b/core/revapi.json index 1c875895d6c..5aa46a3ccad 100644 --- a/core/revapi.json +++ b/core/revapi.json @@ -7089,6 +7089,303 @@ "new": "method com.datastax.oss.driver.api.core.cql.SimpleStatement com.datastax.oss.driver.api.core.cql.SimpleStatement::setNamedValues(java.util.Map)", "annotation": "@edu.umd.cs.findbugs.annotations.CheckReturnValue", "justification": "Annotate mutating methods with @CheckReturnValue" + }, + { + "code": "java.method.parameterTypeParameterChanged", + "old": "parameter com.datastax.oss.driver.api.core.data.CqlVector com.datastax.oss.driver.api.core.data.CqlVector::from(java.lang.String, ===com.datastax.oss.driver.api.core.type.codec.TypeCodec===)", + "new": "parameter com.datastax.oss.driver.api.core.data.CqlVector com.datastax.oss.driver.api.core.data.CqlVector::from(java.lang.String, ===com.datastax.oss.driver.api.core.type.codec.TypeCodec===)", + "justification": "JAVA-3143: Extend driver vector support to arbitrary subtypes and fix handling of variable length types (OSS C* 5.0)" + }, + { + "code": "java.method.returnTypeTypeParametersChanged", + "old": "method com.datastax.oss.driver.api.core.data.CqlVector com.datastax.oss.driver.api.core.data.CqlVector::from(java.lang.String, com.datastax.oss.driver.api.core.type.codec.TypeCodec)", + "new": "method com.datastax.oss.driver.api.core.data.CqlVector com.datastax.oss.driver.api.core.data.CqlVector::from(java.lang.String, com.datastax.oss.driver.api.core.type.codec.TypeCodec)", + "justification": "JAVA-3143: Extend driver vector support to arbitrary subtypes and fix handling of variable length types (OSS C* 5.0)" + }, + { + "code": "java.generics.formalTypeParameterChanged", + "old": "method com.datastax.oss.driver.api.core.data.CqlVector com.datastax.oss.driver.api.core.data.CqlVector::from(java.lang.String, com.datastax.oss.driver.api.core.type.codec.TypeCodec)", + "new": "method com.datastax.oss.driver.api.core.data.CqlVector com.datastax.oss.driver.api.core.data.CqlVector::from(java.lang.String, com.datastax.oss.driver.api.core.type.codec.TypeCodec)", + "justification": "JAVA-3143: Extend driver vector support to arbitrary subtypes and fix handling of variable length types (OSS C* 5.0)" + }, + { + "code": "java.method.returnTypeChanged", + "old": "method T com.datastax.oss.driver.api.core.data.CqlVector::get(int)", + "new": "method T com.datastax.oss.driver.api.core.data.CqlVector::get(int)", + "justification": "JAVA-3143: Extend driver vector support to arbitrary subtypes and fix handling of variable length types (OSS C* 5.0)" + }, + { + "code": "java.method.returnTypeTypeParametersChanged", + "old": "method java.util.Iterator com.datastax.oss.driver.api.core.data.CqlVector::iterator()", + "new": "method java.util.Iterator com.datastax.oss.driver.api.core.data.CqlVector::iterator()", + "justification": "JAVA-3143: Extend driver vector support to arbitrary subtypes and fix handling of variable length types (OSS C* 5.0)" + }, + { + "code": "java.method.parameterTypeChanged", + "old": "parameter com.datastax.oss.driver.api.core.data.CqlVector com.datastax.oss.driver.api.core.data.CqlVector::newInstance(===V[]===)", + "new": "parameter com.datastax.oss.driver.api.core.data.CqlVector com.datastax.oss.driver.api.core.data.CqlVector::newInstance(===V[]===)", + "justification": "JAVA-3143: Extend driver vector support to arbitrary subtypes and fix handling of variable length types (OSS C* 5.0)" + }, + { + "code": "java.method.returnTypeTypeParametersChanged", + "old": "method com.datastax.oss.driver.api.core.data.CqlVector com.datastax.oss.driver.api.core.data.CqlVector::newInstance(V[])", + "new": "method com.datastax.oss.driver.api.core.data.CqlVector com.datastax.oss.driver.api.core.data.CqlVector::newInstance(V[])", + "justification": "JAVA-3143: Extend driver vector support to arbitrary subtypes and fix handling of variable length types (OSS C* 5.0)" + }, + { + "code": "java.generics.formalTypeParameterChanged", + "old": "method com.datastax.oss.driver.api.core.data.CqlVector com.datastax.oss.driver.api.core.data.CqlVector::newInstance(V[])", + "new": "method com.datastax.oss.driver.api.core.data.CqlVector com.datastax.oss.driver.api.core.data.CqlVector::newInstance(V[])", + "justification": "JAVA-3143: Extend driver vector support to arbitrary subtypes and fix handling of variable length types (OSS C* 5.0)" + }, + { + "code": "java.method.parameterTypeParameterChanged", + "old": "parameter com.datastax.oss.driver.api.core.data.CqlVector com.datastax.oss.driver.api.core.data.CqlVector::newInstance(===java.util.List===)", + "new": "parameter com.datastax.oss.driver.api.core.data.CqlVector com.datastax.oss.driver.api.core.data.CqlVector::newInstance(===java.util.List===)", + "justification": "JAVA-3143: Extend driver vector support to arbitrary subtypes and fix handling of variable length types (OSS C* 5.0)" + }, + { + "code": "java.method.returnTypeTypeParametersChanged", + "old": "method com.datastax.oss.driver.api.core.data.CqlVector com.datastax.oss.driver.api.core.data.CqlVector::newInstance(java.util.List)", + "new": "method com.datastax.oss.driver.api.core.data.CqlVector com.datastax.oss.driver.api.core.data.CqlVector::newInstance(java.util.List)", + "justification": "JAVA-3143: Extend driver vector support to arbitrary subtypes and fix handling of variable length types (OSS C* 5.0)" + }, + { + "code": "java.generics.formalTypeParameterChanged", + "old": "method com.datastax.oss.driver.api.core.data.CqlVector com.datastax.oss.driver.api.core.data.CqlVector::newInstance(java.util.List)", + "new": "method com.datastax.oss.driver.api.core.data.CqlVector com.datastax.oss.driver.api.core.data.CqlVector::newInstance(java.util.List)", + "justification": "JAVA-3143: Extend driver vector support to arbitrary subtypes and fix handling of variable length types (OSS C* 5.0)" + }, + { + "code": "java.method.parameterTypeChanged", + "old": "parameter T com.datastax.oss.driver.api.core.data.CqlVector::set(int, ===T===)", + "new": "parameter T com.datastax.oss.driver.api.core.data.CqlVector::set(int, ===T===)", + "justification": "JAVA-3143: Extend driver vector support to arbitrary subtypes and fix handling of variable length types (OSS C* 5.0)" + }, + { + "code": "java.method.returnTypeChanged", + "old": "method T com.datastax.oss.driver.api.core.data.CqlVector::set(int, T)", + "new": "method T com.datastax.oss.driver.api.core.data.CqlVector::set(int, T)", + "justification": "JAVA-3143: Extend driver vector support to arbitrary subtypes and fix handling of variable length types (OSS C* 5.0)" + }, + { + "code": "java.method.returnTypeTypeParametersChanged", + "old": "method java.util.Spliterator java.lang.Iterable::spliterator() @ com.datastax.oss.driver.api.core.data.CqlVector", + "new": "method java.util.Spliterator java.lang.Iterable::spliterator() @ com.datastax.oss.driver.api.core.data.CqlVector", + "justification": "JAVA-3143: Extend driver vector support to arbitrary subtypes and fix handling of variable length types (OSS C* 5.0)" + }, + { + "code": "java.method.returnTypeTypeParametersChanged", + "old": "method java.util.stream.Stream com.datastax.oss.driver.api.core.data.CqlVector::stream()", + "new": "method java.util.stream.Stream com.datastax.oss.driver.api.core.data.CqlVector::stream()", + "justification": "JAVA-3143: Extend driver vector support to arbitrary subtypes and fix handling of variable length types (OSS C* 5.0)" + }, + { + "code": "java.method.returnTypeTypeParametersChanged", + "old": "method com.datastax.oss.driver.api.core.data.CqlVector com.datastax.oss.driver.api.core.data.CqlVector::subVector(int, int)", + "new": "method com.datastax.oss.driver.api.core.data.CqlVector com.datastax.oss.driver.api.core.data.CqlVector::subVector(int, int)", + "justification": "JAVA-3143: Extend driver vector support to arbitrary subtypes and fix handling of variable length types (OSS C* 5.0)" + }, + { + "code": "java.class.noLongerImplementsInterface", + "old": "class com.datastax.oss.driver.api.core.data.CqlVector", + "new": "class com.datastax.oss.driver.api.core.data.CqlVector", + "interface": "java.lang.Iterable", + "justification": "JAVA-3143: Extend driver vector support to arbitrary subtypes and fix handling of variable length types (OSS C* 5.0)" + }, + { + "code": "java.generics.formalTypeParameterChanged", + "old": "class com.datastax.oss.driver.api.core.data.CqlVector", + "new": "class com.datastax.oss.driver.api.core.data.CqlVector", + "justification": "JAVA-3143: Extend driver vector support to arbitrary subtypes and fix handling of variable length types (OSS C* 5.0)" + }, + { + "code": "java.class.superTypeTypeParametersChanged", + "old": "class com.datastax.oss.driver.api.core.data.CqlVector", + "new": "class com.datastax.oss.driver.api.core.data.CqlVector", + "oldSuperType": "java.lang.Iterable", + "newSuperType": "java.lang.Iterable", + "justification": "JAVA-3143: Extend driver vector support to arbitrary subtypes and fix handling of variable length types (OSS C* 5.0)" + }, + { + "code": "java.method.parameterTypeParameterChanged", + "old": "parameter com.datastax.oss.driver.api.core.data.CqlVector com.datastax.oss.driver.api.core.data.GettableById::getVector(com.datastax.oss.driver.api.core.CqlIdentifier, ===java.lang.Class===)", + "new": "parameter com.datastax.oss.driver.api.core.data.CqlVector com.datastax.oss.driver.api.core.data.GettableById::getVector(com.datastax.oss.driver.api.core.CqlIdentifier, ===java.lang.Class===)", + "justification": "JAVA-3143: Extend driver vector support to arbitrary subtypes and fix handling of variable length types (OSS C* 5.0)" + }, + { + "code": "java.method.returnTypeTypeParametersChanged", + "old": "method com.datastax.oss.driver.api.core.data.CqlVector com.datastax.oss.driver.api.core.data.GettableById::getVector(com.datastax.oss.driver.api.core.CqlIdentifier, java.lang.Class)", + "new": "method com.datastax.oss.driver.api.core.data.CqlVector com.datastax.oss.driver.api.core.data.GettableById::getVector(com.datastax.oss.driver.api.core.CqlIdentifier, java.lang.Class)", + "justification": "JAVA-3143: Extend driver vector support to arbitrary subtypes and fix handling of variable length types (OSS C* 5.0)" + }, + { + "code": "java.generics.formalTypeParameterChanged", + "old": "method com.datastax.oss.driver.api.core.data.CqlVector com.datastax.oss.driver.api.core.data.GettableById::getVector(com.datastax.oss.driver.api.core.CqlIdentifier, java.lang.Class)", + "new": "method com.datastax.oss.driver.api.core.data.CqlVector com.datastax.oss.driver.api.core.data.GettableById::getVector(com.datastax.oss.driver.api.core.CqlIdentifier, java.lang.Class)", + "justification": "JAVA-3143: Extend driver vector support to arbitrary subtypes and fix handling of variable length types (OSS C* 5.0)" + }, + { + "code": "java.method.parameterTypeParameterChanged", + "old": "parameter com.datastax.oss.driver.api.core.data.CqlVector com.datastax.oss.driver.api.core.data.GettableByIndex::getVector(int, ===java.lang.Class===)", + "new": "parameter com.datastax.oss.driver.api.core.data.CqlVector com.datastax.oss.driver.api.core.data.GettableByIndex::getVector(int, ===java.lang.Class===)", + "justification": "JAVA-3143: Extend driver vector support to arbitrary subtypes and fix handling of variable length types (OSS C* 5.0)" + }, + { + "code": "java.method.returnTypeTypeParametersChanged", + "old": "method com.datastax.oss.driver.api.core.data.CqlVector com.datastax.oss.driver.api.core.data.GettableByIndex::getVector(int, java.lang.Class)", + "new": "method com.datastax.oss.driver.api.core.data.CqlVector com.datastax.oss.driver.api.core.data.GettableByIndex::getVector(int, java.lang.Class)", + "justification": "JAVA-3143: Extend driver vector support to arbitrary subtypes and fix handling of variable length types (OSS C* 5.0)" + }, + { + "code": "java.generics.formalTypeParameterChanged", + "old": "method com.datastax.oss.driver.api.core.data.CqlVector com.datastax.oss.driver.api.core.data.GettableByIndex::getVector(int, java.lang.Class)", + "new": "method com.datastax.oss.driver.api.core.data.CqlVector com.datastax.oss.driver.api.core.data.GettableByIndex::getVector(int, java.lang.Class)", + "justification": "JAVA-3143: Extend driver vector support to arbitrary subtypes and fix handling of variable length types (OSS C* 5.0)" + }, + { + "code": "java.method.parameterTypeParameterChanged", + "old": "parameter com.datastax.oss.driver.api.core.data.CqlVector com.datastax.oss.driver.api.core.data.GettableByName::getVector(java.lang.String, ===java.lang.Class===)", + "new": "parameter com.datastax.oss.driver.api.core.data.CqlVector com.datastax.oss.driver.api.core.data.GettableByName::getVector(java.lang.String, ===java.lang.Class===)", + "justification": "JAVA-3143: Extend driver vector support to arbitrary subtypes and fix handling of variable length types (OSS C* 5.0)" + }, + { + "code": "java.method.returnTypeTypeParametersChanged", + "old": "method com.datastax.oss.driver.api.core.data.CqlVector com.datastax.oss.driver.api.core.data.GettableByName::getVector(java.lang.String, java.lang.Class)", + "new": "method com.datastax.oss.driver.api.core.data.CqlVector com.datastax.oss.driver.api.core.data.GettableByName::getVector(java.lang.String, java.lang.Class)", + "justification": "JAVA-3143: Extend driver vector support to arbitrary subtypes and fix handling of variable length types (OSS C* 5.0)" + }, + { + "code": "java.generics.formalTypeParameterChanged", + "old": "method com.datastax.oss.driver.api.core.data.CqlVector com.datastax.oss.driver.api.core.data.GettableByName::getVector(java.lang.String, java.lang.Class)", + "new": "method com.datastax.oss.driver.api.core.data.CqlVector com.datastax.oss.driver.api.core.data.GettableByName::getVector(java.lang.String, java.lang.Class)", + "justification": "JAVA-3143: Extend driver vector support to arbitrary subtypes and fix handling of variable length types (OSS C* 5.0)" + }, + { + "code": "java.method.parameterTypeParameterChanged", + "old": "parameter SelfT com.datastax.oss.driver.api.core.data.SettableById>::setVector(com.datastax.oss.driver.api.core.CqlIdentifier, ===com.datastax.oss.driver.api.core.data.CqlVector===, java.lang.Class)", + "new": "parameter SelfT com.datastax.oss.driver.api.core.data.SettableById>::setVector(com.datastax.oss.driver.api.core.CqlIdentifier, ===com.datastax.oss.driver.api.core.data.CqlVector===, java.lang.Class)", + "justification": "JAVA-3143: Extend driver vector support to arbitrary subtypes and fix handling of variable length types (OSS C* 5.0)" + }, + { + "code": "java.method.parameterTypeParameterChanged", + "old": "parameter SelfT com.datastax.oss.driver.api.core.data.SettableById>::setVector(com.datastax.oss.driver.api.core.CqlIdentifier, com.datastax.oss.driver.api.core.data.CqlVector, ===java.lang.Class===)", + "new": "parameter SelfT com.datastax.oss.driver.api.core.data.SettableById>::setVector(com.datastax.oss.driver.api.core.CqlIdentifier, com.datastax.oss.driver.api.core.data.CqlVector, ===java.lang.Class===)", + "justification": "JAVA-3143: Extend driver vector support to arbitrary subtypes and fix handling of variable length types (OSS C* 5.0)" + }, + { + "code": "java.generics.formalTypeParameterChanged", + "old": "method SelfT com.datastax.oss.driver.api.core.data.SettableById>::setVector(com.datastax.oss.driver.api.core.CqlIdentifier, com.datastax.oss.driver.api.core.data.CqlVector, java.lang.Class)", + "new": "method SelfT com.datastax.oss.driver.api.core.data.SettableById>::setVector(com.datastax.oss.driver.api.core.CqlIdentifier, com.datastax.oss.driver.api.core.data.CqlVector, java.lang.Class)", + "justification": "JAVA-3143: Extend driver vector support to arbitrary subtypes and fix handling of variable length types (OSS C* 5.0)" + }, + { + "code": "java.method.parameterTypeParameterChanged", + "old": "parameter SelfT com.datastax.oss.driver.api.core.data.SettableByIndex>::setVector(int, ===com.datastax.oss.driver.api.core.data.CqlVector===, java.lang.Class)", + "new": "parameter SelfT com.datastax.oss.driver.api.core.data.SettableByIndex>::setVector(int, ===com.datastax.oss.driver.api.core.data.CqlVector===, java.lang.Class)", + "justification": "JAVA-3143: Extend driver vector support to arbitrary subtypes and fix handling of variable length types (OSS C* 5.0)" + }, + { + "code": "java.method.parameterTypeParameterChanged", + "old": "parameter SelfT com.datastax.oss.driver.api.core.data.SettableByIndex>::setVector(int, com.datastax.oss.driver.api.core.data.CqlVector, ===java.lang.Class===)", + "new": "parameter SelfT com.datastax.oss.driver.api.core.data.SettableByIndex>::setVector(int, com.datastax.oss.driver.api.core.data.CqlVector, ===java.lang.Class===)", + "justification": "JAVA-3143: Extend driver vector support to arbitrary subtypes and fix handling of variable length types (OSS C* 5.0)" + }, + { + "code": "java.generics.formalTypeParameterChanged", + "old": "method SelfT com.datastax.oss.driver.api.core.data.SettableByIndex>::setVector(int, com.datastax.oss.driver.api.core.data.CqlVector, java.lang.Class)", + "new": "method SelfT com.datastax.oss.driver.api.core.data.SettableByIndex>::setVector(int, com.datastax.oss.driver.api.core.data.CqlVector, java.lang.Class)", + "justification": "JAVA-3143: Extend driver vector support to arbitrary subtypes and fix handling of variable length types (OSS C* 5.0)" + }, + { + "code": "java.method.parameterTypeParameterChanged", + "old": "parameter SelfT com.datastax.oss.driver.api.core.data.SettableByName>::setVector(java.lang.String, ===com.datastax.oss.driver.api.core.data.CqlVector===, java.lang.Class)", + "new": "parameter SelfT com.datastax.oss.driver.api.core.data.SettableByName>::setVector(java.lang.String, ===com.datastax.oss.driver.api.core.data.CqlVector===, java.lang.Class)", + "justification": "JAVA-3143: Extend driver vector support to arbitrary subtypes and fix handling of variable length types (OSS C* 5.0)" + }, + { + "code": "java.method.parameterTypeParameterChanged", + "old": "parameter SelfT com.datastax.oss.driver.api.core.data.SettableByName>::setVector(java.lang.String, com.datastax.oss.driver.api.core.data.CqlVector, ===java.lang.Class===)", + "new": "parameter SelfT com.datastax.oss.driver.api.core.data.SettableByName>::setVector(java.lang.String, com.datastax.oss.driver.api.core.data.CqlVector, ===java.lang.Class===)", + "justification": "JAVA-3143: Extend driver vector support to arbitrary subtypes and fix handling of variable length types (OSS C* 5.0)" + }, + { + "code": "java.generics.formalTypeParameterChanged", + "old": "method SelfT com.datastax.oss.driver.api.core.data.SettableByName>::setVector(java.lang.String, com.datastax.oss.driver.api.core.data.CqlVector, java.lang.Class)", + "new": "method SelfT com.datastax.oss.driver.api.core.data.SettableByName>::setVector(java.lang.String, com.datastax.oss.driver.api.core.data.CqlVector, java.lang.Class)", + "justification": "JAVA-3143: Extend driver vector support to arbitrary subtypes and fix handling of variable length types (OSS C* 5.0)" + }, + { + "code": "java.method.parameterTypeParameterChanged", + "old": "parameter com.datastax.oss.driver.api.core.type.codec.TypeCodec> com.datastax.oss.driver.api.core.type.codec.TypeCodecs::vectorOf(com.datastax.oss.driver.api.core.type.VectorType, ===com.datastax.oss.driver.api.core.type.codec.TypeCodec===)", + "new": "parameter com.datastax.oss.driver.api.core.type.codec.TypeCodec> com.datastax.oss.driver.api.core.type.codec.TypeCodecs::vectorOf(com.datastax.oss.driver.api.core.type.VectorType, ===com.datastax.oss.driver.api.core.type.codec.TypeCodec===)", + "justification": "JAVA-3143: Extend driver vector support to arbitrary subtypes and fix handling of variable length types (OSS C* 5.0)" + }, + { + "code": "java.method.returnTypeTypeParametersChanged", + "old": "method com.datastax.oss.driver.api.core.type.codec.TypeCodec> com.datastax.oss.driver.api.core.type.codec.TypeCodecs::vectorOf(com.datastax.oss.driver.api.core.type.VectorType, com.datastax.oss.driver.api.core.type.codec.TypeCodec)", + "new": "method com.datastax.oss.driver.api.core.type.codec.TypeCodec> com.datastax.oss.driver.api.core.type.codec.TypeCodecs::vectorOf(com.datastax.oss.driver.api.core.type.VectorType, com.datastax.oss.driver.api.core.type.codec.TypeCodec)", + "justification": "JAVA-3143: Extend driver vector support to arbitrary subtypes and fix handling of variable length types (OSS C* 5.0)" + }, + { + "code": "java.generics.formalTypeParameterChanged", + "old": "method com.datastax.oss.driver.api.core.type.codec.TypeCodec> com.datastax.oss.driver.api.core.type.codec.TypeCodecs::vectorOf(com.datastax.oss.driver.api.core.type.VectorType, com.datastax.oss.driver.api.core.type.codec.TypeCodec)", + "new": "method com.datastax.oss.driver.api.core.type.codec.TypeCodec> com.datastax.oss.driver.api.core.type.codec.TypeCodecs::vectorOf(com.datastax.oss.driver.api.core.type.VectorType, com.datastax.oss.driver.api.core.type.codec.TypeCodec)", + "justification": "JAVA-3143: Extend driver vector support to arbitrary subtypes and fix handling of variable length types (OSS C* 5.0)" + }, + { + "code": "java.method.parameterTypeParameterChanged", + "old": "parameter com.datastax.oss.driver.api.core.type.codec.TypeCodec> com.datastax.oss.driver.api.core.type.codec.TypeCodecs::vectorOf(int, ===com.datastax.oss.driver.api.core.type.codec.TypeCodec===)", + "new": "parameter com.datastax.oss.driver.api.core.type.codec.TypeCodec> com.datastax.oss.driver.api.core.type.codec.TypeCodecs::vectorOf(int, ===com.datastax.oss.driver.api.core.type.codec.TypeCodec===)", + "justification": "JAVA-3143: Extend driver vector support to arbitrary subtypes and fix handling of variable length types (OSS C* 5.0)" + }, + { + "code": "java.method.returnTypeTypeParametersChanged", + "old": "method com.datastax.oss.driver.api.core.type.codec.TypeCodec> com.datastax.oss.driver.api.core.type.codec.TypeCodecs::vectorOf(int, com.datastax.oss.driver.api.core.type.codec.TypeCodec)", + "new": "method com.datastax.oss.driver.api.core.type.codec.TypeCodec> com.datastax.oss.driver.api.core.type.codec.TypeCodecs::vectorOf(int, com.datastax.oss.driver.api.core.type.codec.TypeCodec)", + "justification": "JAVA-3143: Extend driver vector support to arbitrary subtypes and fix handling of variable length types (OSS C* 5.0)" + }, + { + "code": "java.generics.formalTypeParameterChanged", + "old": "method com.datastax.oss.driver.api.core.type.codec.TypeCodec> com.datastax.oss.driver.api.core.type.codec.TypeCodecs::vectorOf(int, com.datastax.oss.driver.api.core.type.codec.TypeCodec)", + "new": "method com.datastax.oss.driver.api.core.type.codec.TypeCodec> com.datastax.oss.driver.api.core.type.codec.TypeCodecs::vectorOf(int, com.datastax.oss.driver.api.core.type.codec.TypeCodec)", + "justification": "JAVA-3143: Extend driver vector support to arbitrary subtypes and fix handling of variable length types (OSS C* 5.0)" + }, + { + "code": "java.method.parameterTypeParameterChanged", + "old": "parameter com.datastax.oss.driver.api.core.type.reflect.GenericType> com.datastax.oss.driver.api.core.type.reflect.GenericType::vectorOf(===com.datastax.oss.driver.api.core.type.reflect.GenericType===)", + "new": "parameter com.datastax.oss.driver.api.core.type.reflect.GenericType> com.datastax.oss.driver.api.core.type.reflect.GenericType::vectorOf(===com.datastax.oss.driver.api.core.type.reflect.GenericType===)", + "justification": "JAVA-3143: Extend driver vector support to arbitrary subtypes and fix handling of variable length types (OSS C* 5.0)" + }, + { + "code": "java.method.returnTypeTypeParametersChanged", + "old": "method com.datastax.oss.driver.api.core.type.reflect.GenericType> com.datastax.oss.driver.api.core.type.reflect.GenericType::vectorOf(com.datastax.oss.driver.api.core.type.reflect.GenericType)", + "new": "method com.datastax.oss.driver.api.core.type.reflect.GenericType> com.datastax.oss.driver.api.core.type.reflect.GenericType::vectorOf(com.datastax.oss.driver.api.core.type.reflect.GenericType)", + "justification": "JAVA-3143: Extend driver vector support to arbitrary subtypes and fix handling of variable length types (OSS C* 5.0)" + }, + { + "code": "java.generics.formalTypeParameterChanged", + "old": "method com.datastax.oss.driver.api.core.type.reflect.GenericType> com.datastax.oss.driver.api.core.type.reflect.GenericType::vectorOf(com.datastax.oss.driver.api.core.type.reflect.GenericType)", + "new": "method com.datastax.oss.driver.api.core.type.reflect.GenericType> com.datastax.oss.driver.api.core.type.reflect.GenericType::vectorOf(com.datastax.oss.driver.api.core.type.reflect.GenericType)", + "justification": "JAVA-3143: Extend driver vector support to arbitrary subtypes and fix handling of variable length types (OSS C* 5.0)" + }, + { + "code": "java.method.parameterTypeParameterChanged", + "old": "parameter com.datastax.oss.driver.api.core.type.reflect.GenericType> com.datastax.oss.driver.api.core.type.reflect.GenericType::vectorOf(===java.lang.Class===)", + "new": "parameter com.datastax.oss.driver.api.core.type.reflect.GenericType> com.datastax.oss.driver.api.core.type.reflect.GenericType::vectorOf(===java.lang.Class===)", + "justification": "JAVA-3143: Extend driver vector support to arbitrary subtypes and fix handling of variable length types (OSS C* 5.0)" + }, + { + "code": "java.method.returnTypeTypeParametersChanged", + "old": "method com.datastax.oss.driver.api.core.type.reflect.GenericType> com.datastax.oss.driver.api.core.type.reflect.GenericType::vectorOf(java.lang.Class)", + "new": "method com.datastax.oss.driver.api.core.type.reflect.GenericType> com.datastax.oss.driver.api.core.type.reflect.GenericType::vectorOf(java.lang.Class)", + "justification": "JAVA-3143: Extend driver vector support to arbitrary subtypes and fix handling of variable length types (OSS C* 5.0)" + }, + { + "code": "java.generics.formalTypeParameterChanged", + "old": "method com.datastax.oss.driver.api.core.type.reflect.GenericType> com.datastax.oss.driver.api.core.type.reflect.GenericType::vectorOf(java.lang.Class)", + "new": "method com.datastax.oss.driver.api.core.type.reflect.GenericType> com.datastax.oss.driver.api.core.type.reflect.GenericType::vectorOf(java.lang.Class)", + "justification": "JAVA-3143: Extend driver vector support to arbitrary subtypes and fix handling of variable length types (OSS C* 5.0)" } ] } diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/data/CqlVector.java b/core/src/main/java/com/datastax/oss/driver/api/core/data/CqlVector.java index 911b6187f6d..8089d551750 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/data/CqlVector.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/data/CqlVector.java @@ -18,11 +18,10 @@ package com.datastax.oss.driver.api.core.data; import com.datastax.oss.driver.api.core.type.codec.TypeCodec; +import com.datastax.oss.driver.internal.core.type.codec.ParseUtils; import com.datastax.oss.driver.shaded.guava.common.base.Preconditions; import com.datastax.oss.driver.shaded.guava.common.base.Predicates; -import com.datastax.oss.driver.shaded.guava.common.base.Splitter; import com.datastax.oss.driver.shaded.guava.common.collect.Iterables; -import com.datastax.oss.driver.shaded.guava.common.collect.Streams; import edu.umd.cs.findbugs.annotations.NonNull; import java.io.IOException; import java.io.InvalidObjectException; @@ -35,7 +34,6 @@ import java.util.Iterator; import java.util.List; import java.util.Objects; -import java.util.stream.Collectors; import java.util.stream.Stream; /** @@ -52,7 +50,7 @@ * where possible we've tried to make the API of this class similar to the equivalent methods on * {@link List}. */ -public class CqlVector implements Iterable, Serializable { +public class CqlVector implements Iterable, Serializable { /** * Create a new CqlVector containing the specified values. @@ -60,7 +58,7 @@ public class CqlVector implements Iterable, Serializable { * @param vals the collection of values to wrap. * @return a CqlVector wrapping those values */ - public static CqlVector newInstance(V... vals) { + public static CqlVector newInstance(V... vals) { // Note that Array.asList() guarantees the return of an array which implements RandomAccess return new CqlVector(Arrays.asList(vals)); @@ -73,29 +71,64 @@ public static CqlVector newInstance(V... vals) { * @param list the collection of values to wrap. * @return a CqlVector wrapping those values */ - public static CqlVector newInstance(List list) { + public static CqlVector newInstance(List list) { Preconditions.checkArgument(list != null, "Input list should not be null"); return new CqlVector(list); } /** - * Create a new CqlVector instance from the specified string representation. Note that this method - * is intended to mirror {@link #toString()}; passing this method the output from a toString - * call on some CqlVector should return a CqlVector that is equal to the origin instance. + * Create a new CqlVector instance from the specified string representation. * * @param str a String representation of a CqlVector * @param subtypeCodec * @return a new CqlVector built from the String representation */ - public static CqlVector from( - @NonNull String str, @NonNull TypeCodec subtypeCodec) { + public static CqlVector from(@NonNull String str, @NonNull TypeCodec subtypeCodec) { Preconditions.checkArgument(str != null, "Cannot create CqlVector from null string"); Preconditions.checkArgument(!str.isEmpty(), "Cannot create CqlVector from empty string"); - ArrayList vals = - Streams.stream(Splitter.on(", ").split(str.substring(1, str.length() - 1))) - .map(subtypeCodec::parse) - .collect(Collectors.toCollection(ArrayList::new)); - return new CqlVector(vals); + if (str.equalsIgnoreCase("NULL")) return null; + + int idx = ParseUtils.skipSpaces(str, 0); + if (str.charAt(idx++) != '[') + throw new IllegalArgumentException( + String.format( + "Cannot parse vector value from \"%s\", at character %d expecting '[' but got '%c'", + str, idx, str.charAt(idx))); + + idx = ParseUtils.skipSpaces(str, idx); + + if (str.charAt(idx) == ']') { + return new CqlVector<>(new ArrayList<>()); + } + + List list = new ArrayList<>(); + while (idx < str.length()) { + int n; + try { + n = ParseUtils.skipCQLValue(str, idx); + } catch (IllegalArgumentException e) { + throw new IllegalArgumentException( + String.format( + "Cannot parse vector value from \"%s\", invalid CQL value at character %d", + str, idx), + e); + } + + list.add(subtypeCodec.parse(str.substring(idx, n))); + idx = n; + + idx = ParseUtils.skipSpaces(str, idx); + if (str.charAt(idx) == ']') return new CqlVector<>(list); + if (str.charAt(idx++) != ',') + throw new IllegalArgumentException( + String.format( + "Cannot parse vector value from \"%s\", at character %d expecting ',' but got '%c'", + str, idx, str.charAt(idx))); + + idx = ParseUtils.skipSpaces(str, idx); + } + throw new IllegalArgumentException( + String.format("Malformed vector value \"%s\", missing closing ']'", str)); } private final List list; @@ -194,6 +227,11 @@ public int hashCode() { return Objects.hash(list); } + /** + * The string representation of the vector. Elements, like strings, may not be properly quoted. + * + * @return the string representation + */ @Override public String toString() { return Iterables.toString(this.list); @@ -205,7 +243,7 @@ public String toString() { * * @param inner type of CqlVector, assume Number is always Serializable. */ - private static class SerializationProxy implements Serializable { + private static class SerializationProxy implements Serializable { private static final long serialVersionUID = 1; diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableById.java b/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableById.java index 0a24214b20a..8393bc9f758 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableById.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableById.java @@ -531,7 +531,7 @@ default CqlDuration getCqlDuration(@NonNull CqlIdentifier id) { * @throws IllegalArgumentException if the id is invalid. */ @Nullable - default CqlVector getVector( + default CqlVector getVector( @NonNull CqlIdentifier id, @NonNull Class elementsClass) { return getVector(firstIndexOf(id), elementsClass); } diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableByIndex.java b/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableByIndex.java index 53541b0ac58..bb75bd9a2b4 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableByIndex.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableByIndex.java @@ -446,8 +446,7 @@ default CqlDuration getCqlDuration(int i) { * @throws IndexOutOfBoundsException if the index is invalid. */ @Nullable - default CqlVector getVector( - int i, @NonNull Class elementsClass) { + default CqlVector getVector(int i, @NonNull Class elementsClass) { return get(i, GenericType.vectorOf(elementsClass)); } diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableByName.java b/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableByName.java index ec3ee362ca8..b0a4660033b 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableByName.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/data/GettableByName.java @@ -527,7 +527,7 @@ default CqlDuration getCqlDuration(@NonNull String name) { * @throws IllegalArgumentException if the name is invalid. */ @Nullable - default CqlVector getVector( + default CqlVector getVector( @NonNull String name, @NonNull Class elementsClass) { return getVector(firstIndexOf(name), elementsClass); } diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableById.java b/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableById.java index 8452446205e..0f5e3cd9daa 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableById.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableById.java @@ -573,7 +573,7 @@ default SelfT setCqlDuration(@NonNull CqlIdentifier id, @Nullable CqlDuration v) */ @NonNull @CheckReturnValue - default SelfT setVector( + default SelfT setVector( @NonNull CqlIdentifier id, @Nullable CqlVector v, @NonNull Class elementsClass) { diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableByIndex.java b/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableByIndex.java index bb55db3adde..4ecdf647590 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableByIndex.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableByIndex.java @@ -425,7 +425,7 @@ default SelfT setCqlDuration(int i, @Nullable CqlDuration v) { */ @NonNull @CheckReturnValue - default SelfT setVector( + default SelfT setVector( int i, @Nullable CqlVector v, @NonNull Class elementsClass) { return set(i, v, GenericType.vectorOf(elementsClass)); } diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableByName.java b/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableByName.java index c25a7074373..afe9ba59f64 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableByName.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/data/SettableByName.java @@ -572,7 +572,7 @@ default SelfT setCqlDuration(@NonNull String name, @Nullable CqlDuration v) { */ @NonNull @CheckReturnValue - default SelfT setVector( + default SelfT setVector( @NonNull String name, @Nullable CqlVector v, @NonNull Class elementsClass) { diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/type/DataTypes.java b/core/src/main/java/com/datastax/oss/driver/api/core/type/DataTypes.java index 3a341eaa5aa..492fc121c71 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/type/DataTypes.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/type/DataTypes.java @@ -27,12 +27,10 @@ import com.datastax.oss.driver.internal.core.type.DefaultTupleType; import com.datastax.oss.driver.internal.core.type.DefaultVectorType; import com.datastax.oss.driver.internal.core.type.PrimitiveType; -import com.datastax.oss.driver.shaded.guava.common.base.Splitter; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import com.datastax.oss.protocol.internal.ProtocolConstants; import edu.umd.cs.findbugs.annotations.NonNull; import java.util.Arrays; -import java.util.List; /** Constants and factory methods to obtain data type instances. */ public class DataTypes { @@ -59,7 +57,6 @@ public class DataTypes { public static final DataType DURATION = new PrimitiveType(ProtocolConstants.DataType.DURATION); private static final DataTypeClassNameParser classNameParser = new DataTypeClassNameParser(); - private static final Splitter paramSplitter = Splitter.on(',').trimResults(); @NonNull public static DataType custom(@NonNull String className) { @@ -68,20 +65,8 @@ public static DataType custom(@NonNull String className) { if (className.equals("org.apache.cassandra.db.marshal.DurationType")) return DURATION; /* Vector support is currently implemented as a custom type but is also parameterized */ - if (className.startsWith(DefaultVectorType.VECTOR_CLASS_NAME)) { - List params = - paramSplitter.splitToList( - className.substring( - DefaultVectorType.VECTOR_CLASS_NAME.length() + 1, className.length() - 1)); - DataType subType = classNameParser.parse(params.get(0), AttachmentPoint.NONE); - int dimensions = Integer.parseInt(params.get(1)); - if (dimensions <= 0) { - throw new IllegalArgumentException( - String.format( - "Request to create vector of size %d, size must be positive", dimensions)); - } - return new DefaultVectorType(subType, dimensions); - } + if (className.startsWith(DefaultVectorType.VECTOR_CLASS_NAME)) + return classNameParser.parse(className, AttachmentPoint.NONE); return new DefaultCustomType(className); } diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/type/codec/TypeCodec.java b/core/src/main/java/com/datastax/oss/driver/api/core/type/codec/TypeCodec.java index 05ae3980823..d6afbe0380a 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/type/codec/TypeCodec.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/type/codec/TypeCodec.java @@ -28,6 +28,7 @@ import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import java.nio.ByteBuffer; +import java.util.Optional; /** * Manages the two-way conversion between a CQL type and a Java type. @@ -234,4 +235,9 @@ default boolean accepts(@NonNull DataType cqlType) { */ @Nullable JavaTypeT parse(@Nullable String value); + + @NonNull + default Optional serializedSize() { + return Optional.empty(); + } } diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.java b/core/src/main/java/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.java index 9f2fd5cc69e..68f1b07b106 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/type/codec/TypeCodecs.java @@ -210,13 +210,13 @@ public static TypeCodec tupleOf(@NonNull TupleType cqlType) { return new TupleCodec(cqlType); } - public static TypeCodec> vectorOf( + public static TypeCodec> vectorOf( @NonNull VectorType type, @NonNull TypeCodec subtypeCodec) { return new VectorCodec( DataTypes.vectorOf(subtypeCodec.getCqlType(), type.getDimensions()), subtypeCodec); } - public static TypeCodec> vectorOf( + public static TypeCodec> vectorOf( int dimensions, @NonNull TypeCodec subtypeCodec) { return new VectorCodec(DataTypes.vectorOf(subtypeCodec.getCqlType(), dimensions), subtypeCodec); } diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/type/reflect/GenericType.java b/core/src/main/java/com/datastax/oss/driver/api/core/type/reflect/GenericType.java index c6482d4f4d1..d22b6f1bfaf 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/type/reflect/GenericType.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/type/reflect/GenericType.java @@ -151,8 +151,7 @@ public static GenericType> setOf(@NonNull GenericType elementType) } @NonNull - public static GenericType> vectorOf( - @NonNull Class elementType) { + public static GenericType> vectorOf(@NonNull Class elementType) { TypeToken> token = new TypeToken>() {}.where( new TypeParameter() {}, TypeToken.of(elementType)); @@ -160,8 +159,7 @@ public static GenericType> vectorOf( } @NonNull - public static GenericType> vectorOf( - @NonNull GenericType elementType) { + public static GenericType> vectorOf(@NonNull GenericType elementType) { TypeToken> token = new TypeToken>() {}.where(new TypeParameter() {}, elementType.token); return new GenericType<>(token); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/DataTypeClassNameParser.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/DataTypeClassNameParser.java index fd6f1a4bd51..bf252d0bc57 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/DataTypeClassNameParser.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/schema/parsing/DataTypeClassNameParser.java @@ -34,6 +34,7 @@ import com.datastax.oss.protocol.internal.util.Bytes; import java.util.ArrayList; import java.util.Collections; +import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -164,6 +165,13 @@ private DataType parse( return new DefaultTupleType(componentTypesBuilder.build(), attachmentPoint); } + if (next.startsWith("org.apache.cassandra.db.marshal.VectorType")) { + Iterator rawTypes = parser.getTypeParameters().iterator(); + DataType subtype = parse(rawTypes.next(), userTypes, attachmentPoint, logPrefix); + int dimensions = Integer.parseInt(rawTypes.next()); + return DataTypes.vectorOf(subtype, dimensions); + } + DataType type = NATIVE_TYPES_BY_CLASS_NAME.get(next); return type == null ? DataTypes.custom(toParse) : type; } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/DefaultVectorType.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/DefaultVectorType.java index c9180d44edc..0b1ced94769 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/type/DefaultVectorType.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/DefaultVectorType.java @@ -78,7 +78,7 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(super.hashCode(), subtype, dimensions); + return Objects.hash(DefaultVectorType.class, subtype, dimensions); } @Override diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/BigIntCodec.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/BigIntCodec.java index 2b3b8255cc1..8496da17fa6 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/BigIntCodec.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/BigIntCodec.java @@ -25,6 +25,7 @@ import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import java.nio.ByteBuffer; +import java.util.Optional; import net.jcip.annotations.ThreadSafe; @ThreadSafe @@ -90,4 +91,10 @@ public Long parse(@Nullable String value) { String.format("Cannot parse 64-bits long value from \"%s\"", value)); } } + + @NonNull + @Override + public Optional serializedSize() { + return Optional.of(8); + } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/BooleanCodec.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/BooleanCodec.java index 7a982a9e6ca..af388982be9 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/BooleanCodec.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/BooleanCodec.java @@ -25,6 +25,7 @@ import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import java.nio.ByteBuffer; +import java.util.Optional; import net.jcip.annotations.ThreadSafe; @ThreadSafe @@ -98,4 +99,10 @@ public Boolean parse(@Nullable String value) { String.format("Cannot parse boolean value from \"%s\"", value)); } } + + @NonNull + @Override + public Optional serializedSize() { + return Optional.of(1); + } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/DoubleCodec.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/DoubleCodec.java index 28eff6f9463..b01847517d9 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/DoubleCodec.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/DoubleCodec.java @@ -25,6 +25,7 @@ import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import java.nio.ByteBuffer; +import java.util.Optional; import net.jcip.annotations.ThreadSafe; @ThreadSafe @@ -90,4 +91,10 @@ public Double parse(@Nullable String value) { String.format("Cannot parse 64-bits double value from \"%s\"", value)); } } + + @NonNull + @Override + public Optional serializedSize() { + return Optional.of(8); + } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/FloatCodec.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/FloatCodec.java index 11786dbc77d..fd851edfad3 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/FloatCodec.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/FloatCodec.java @@ -25,6 +25,7 @@ import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import java.nio.ByteBuffer; +import java.util.Optional; import net.jcip.annotations.ThreadSafe; @ThreadSafe @@ -90,4 +91,10 @@ public Float parse(@Nullable String value) { String.format("Cannot parse 32-bits float value from \"%s\"", value)); } } + + @NonNull + @Override + public Optional serializedSize() { + return Optional.of(4); + } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/IntCodec.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/IntCodec.java index e5bb530ba79..b11b164a445 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/IntCodec.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/IntCodec.java @@ -25,6 +25,7 @@ import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import java.nio.ByteBuffer; +import java.util.Optional; import net.jcip.annotations.ThreadSafe; @ThreadSafe @@ -90,4 +91,10 @@ public Integer parse(@Nullable String value) { String.format("Cannot parse 32-bits int value from \"%s\"", value)); } } + + @NonNull + @Override + public Optional serializedSize() { + return Optional.of(4); + } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/TimestampCodec.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/TimestampCodec.java index eeba3c7c66c..964f774c8d9 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/TimestampCodec.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/TimestampCodec.java @@ -33,6 +33,7 @@ import java.time.Instant; import java.time.ZoneId; import java.util.Date; +import java.util.Optional; import java.util.TimeZone; import net.jcip.annotations.ThreadSafe; @@ -293,4 +294,10 @@ public Instant parse(@Nullable String value) { String.format("Cannot parse timestamp value from \"%s\"", value)); } } + + @NonNull + @Override + public Optional serializedSize() { + return Optional.of(8); + } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/UuidCodec.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/UuidCodec.java index 57feac4ae7e..cc5f48dbe52 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/UuidCodec.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/UuidCodec.java @@ -25,6 +25,7 @@ import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import java.nio.ByteBuffer; +import java.util.Optional; import java.util.UUID; import net.jcip.annotations.ThreadSafe; @@ -95,4 +96,10 @@ public UUID parse(@Nullable String value) { String.format("Cannot parse UUID value from \"%s\"", value), e); } } + + @NonNull + @Override + public Optional serializedSize() { + return Optional.of(16); + } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/VectorCodec.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/VectorCodec.java index 2c4d2200b13..1f8ce1a7166 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/VectorCodec.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/VectorCodec.java @@ -24,7 +24,7 @@ import com.datastax.oss.driver.api.core.type.codec.TypeCodec; import com.datastax.oss.driver.api.core.type.reflect.GenericType; import com.datastax.oss.driver.internal.core.type.DefaultVectorType; -import com.datastax.oss.driver.shaded.guava.common.collect.Iterables; +import com.datastax.oss.driver.internal.core.type.util.VIntCoding; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; import java.nio.ByteBuffer; @@ -32,8 +32,10 @@ import java.util.Iterator; import java.util.List; import java.util.NoSuchElementException; +import java.util.Optional; +import java.util.stream.Collectors; -public class VectorCodec implements TypeCodec> { +public class VectorCodec implements TypeCodec> { private final VectorType cqlType; private final GenericType> javaType; @@ -55,6 +57,14 @@ public GenericType> getJavaType() { return this.javaType; } + @NonNull + @Override + public Optional serializedSize() { + return subtypeCodec.serializedSize().isPresent() + ? Optional.of(subtypeCodec.serializedSize().get() * cqlType.getDimensions()) + : Optional.empty(); + } + @NonNull @Override public DataType getCqlType() { @@ -65,6 +75,7 @@ public DataType getCqlType() { @Override public ByteBuffer encode( @Nullable CqlVector value, @NonNull ProtocolVersion protocolVersion) { + boolean isVarSized = !subtypeCodec.serializedSize().isPresent(); if (value == null || cqlType.getDimensions() <= 0) { return null; } @@ -92,14 +103,28 @@ public ByteBuffer encode( if (valueBuff == null) { throw new NullPointerException("Vector elements cannot encode to CQL NULL"); } - allValueBuffsSize += valueBuff.limit(); + int elementSize = valueBuff.limit(); + if (isVarSized) { + allValueBuffsSize += VIntCoding.computeVIntSize(elementSize); + } + allValueBuffsSize += elementSize; valueBuff.rewind(); valueBuffs[i] = valueBuff; } + // if too many elements, throw + if (values.hasNext()) { + throw new IllegalArgumentException( + String.format( + "Too many elements; must provide elements for %d dimensions", + cqlType.getDimensions())); + } /* Since we already did an early return for <= 0 dimensions above */ assert valueBuffs.length > 0; ByteBuffer rv = ByteBuffer.allocate(allValueBuffsSize); for (int i = 0; i < cqlType.getDimensions(); ++i) { + if (isVarSized) { + VIntCoding.writeUnsignedVInt32(valueBuffs[i].remaining(), rv); + } rv.put(valueBuffs[i]); } rv.flip(); @@ -114,39 +139,58 @@ public CqlVector decode( return null; } - /* Determine element size by dividing count of remaining bytes by number of elements. This should have a remainder - of zero if we assume all elements are of uniform size (which is really a terrible assumption). - - TODO: We should probably tweak serialization format for vectors if we're going to allow them for arbitrary subtypes. - Elements should at least precede themselves with their size (along the lines of what lists do). */ - int elementSize = Math.floorDiv(bytes.remaining(), cqlType.getDimensions()); - if (!(bytes.remaining() % cqlType.getDimensions() == 0)) { - throw new IllegalArgumentException( - String.format( - "Expected elements of uniform size, observed %d elements with total bytes %d", - cqlType.getDimensions(), bytes.remaining())); - } - + // Upfront check for fixed-size types only + subtypeCodec + .serializedSize() + .ifPresent( + (fixed_size) -> { + if (bytes.remaining() != cqlType.getDimensions() * fixed_size) { + throw new IllegalArgumentException( + String.format( + "Expected elements of uniform size, observed %d elements with total bytes %d", + cqlType.getDimensions(), bytes.remaining())); + } + }); + ; ByteBuffer slice = bytes.slice(); List rv = new ArrayList(cqlType.getDimensions()); for (int i = 0; i < cqlType.getDimensions(); ++i) { - // Set the limit for the current element + + int size = + subtypeCodec + .serializedSize() + .orElseGet(() -> VIntCoding.getUnsignedVInt32(slice, slice.position())); + // If we aren't dealing with a fixed-size type we need to move the current slice position + // beyond the vint-encoded size of the current element. Ideally this would be + // serializedSize().ifNotPresent(Consumer) but the Optional API isn't doing us any favors + // there. + if (!subtypeCodec.serializedSize().isPresent()) + slice.position(slice.position() + VIntCoding.computeUnsignedVIntSize(size)); int originalPosition = slice.position(); - slice.limit(originalPosition + elementSize); + slice.limit(originalPosition + size); rv.add(this.subtypeCodec.decode(slice, protocolVersion)); // Move to the start of the next element - slice.position(originalPosition + elementSize); + slice.position(originalPosition + size); // Reset the limit to the end of the buffer slice.limit(slice.capacity()); } + // if too many elements, throw + if (slice.hasRemaining()) { + throw new IllegalArgumentException( + String.format( + "Too many elements; must provide elements for %d dimensions", + cqlType.getDimensions())); + } + return CqlVector.newInstance(rv); } @NonNull @Override - public String format(@Nullable CqlVector value) { - return value == null ? "NULL" : Iterables.toString(value); + public String format(CqlVector value) { + if (value == null) return "NULL"; + return value.stream().map(subtypeCodec::format).collect(Collectors.joining(", ", "[", "]")); } @Nullable diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/time/TimestampMillisCodec.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/time/TimestampMillisCodec.java index a15495a432d..12e3e839d2a 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/time/TimestampMillisCodec.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/extras/time/TimestampMillisCodec.java @@ -31,6 +31,7 @@ import java.time.Instant; import java.time.ZoneId; import java.util.Objects; +import java.util.Optional; import net.jcip.annotations.Immutable; /** @@ -114,4 +115,10 @@ public String format(@Nullable Long value) { Instant instant = value == null ? null : Instant.ofEpochMilli(value); return timestampCodec.format(instant); } + + @NonNull + @Override + public Optional serializedSize() { + return Optional.of(8); + } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistry.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistry.java index 495d6227d93..3af5a30ba27 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistry.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistry.java @@ -394,10 +394,9 @@ protected GenericType inspectType(@NonNull Object value, @Nullable DataType c "Can't infer vector codec because the first element is null " + "(note that CQL does not allow null values in collections)"); } - GenericType elementType = - (GenericType) - inspectType( - firstElement, cqlType == null ? null : ((VectorType) cqlType).getElementType()); + GenericType elementType = + inspectType( + firstElement, cqlType == null ? null : ((VectorType) cqlType).getElementType()); return GenericType.vectorOf(elementType); } } else { @@ -421,8 +420,7 @@ protected GenericType inferJavaTypeFromCqlType(@NonNull DataType cqlType) { inferJavaTypeFromCqlType(keyType), inferJavaTypeFromCqlType(valueType)); } else if (cqlType instanceof VectorType) { DataType elementType = ((VectorType) cqlType).getElementType(); - GenericType numberType = - (GenericType) inferJavaTypeFromCqlType(elementType); + GenericType numberType = inferJavaTypeFromCqlType(elementType); return GenericType.vectorOf(numberType); } switch (cqlType.getProtocolCode()) { @@ -657,7 +655,7 @@ protected TypeCodec createCodec( /* For a vector type we'll always get back an instance of TypeCodec due to the * type of CqlVector... but getElementCodecForCqlAndJavaType() is a generalized function that can't * return this more precise type. Thus the cast here. */ - TypeCodec elementCodec = + TypeCodec elementCodec = uncheckedCast(getElementCodecForCqlAndJavaType(vectorType, token, isJavaCovariant)); return TypeCodecs.vectorOf(vectorType, elementCodec); } else if (cqlType instanceof CustomType diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/util/VIntCoding.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/util/VIntCoding.java index 5ee375a81e5..552f84f2ae1 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/type/util/VIntCoding.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/util/VIntCoding.java @@ -49,6 +49,7 @@ import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; +import java.nio.ByteBuffer; /** * Variable length encoding inspired from Google > 6; } + + public static void writeUnsignedVInt32(int value, ByteBuffer output) { + writeUnsignedVInt((long) value, output); + } + + public static void writeUnsignedVInt(long value, ByteBuffer output) { + int size = VIntCoding.computeUnsignedVIntSize(value); + if (size == 1) { + output.put((byte) value); + return; + } + + output.put(VIntCoding.encodeVInt(value, size), 0, size); + } + + /** + * Read up to a 32-bit integer back, using the unsigned (no zigzag) encoding. + * + *

          Note this method is the same as {@link #readUnsignedVInt(DataInput)}, except that we do + * *not* block if there are not enough bytes in the buffer to reconstruct the value. + * + * @throws VIntOutOfRangeException If the vint doesn't fit into a 32-bit integer + */ + public static int getUnsignedVInt32(ByteBuffer input, int readerIndex) { + return checkedCast(getUnsignedVInt(input, readerIndex)); + } + + public static long getUnsignedVInt(ByteBuffer input, int readerIndex) { + return getUnsignedVInt(input, readerIndex, input.limit()); + } + + public static long getUnsignedVInt(ByteBuffer input, int readerIndex, int readerLimit) { + if (readerIndex < 0) + throw new IllegalArgumentException( + "Reader index should be non-negative, but was " + readerIndex); + + if (readerIndex >= readerLimit) return -1; + + int firstByte = input.get(readerIndex++); + + // Bail out early if this is one byte, necessary or it fails later + if (firstByte >= 0) return firstByte; + + int size = numberOfExtraBytesToRead(firstByte); + if (readerIndex + size > readerLimit) return -1; + + long retval = firstByte & firstByteValueMask(size); + for (int ii = 0; ii < size; ii++) { + byte b = input.get(readerIndex++); + retval <<= 8; + retval |= b & 0xff; + } + + return retval; + } + + public static int checkedCast(long value) { + int result = (int) value; + if ((long) result != value) throw new VIntOutOfRangeException(value); + return result; + } + + /** + * Throw when attempting to decode a vint and the output type doesn't have enough space to fit the + * value that was decoded + */ + public static class VIntOutOfRangeException extends RuntimeException { + public final long value; + + private VIntOutOfRangeException(long value) { + super(value + " is out of range for a 32-bit integer"); + this.value = value; + } + } } diff --git a/core/src/test/java/com/datastax/oss/driver/api/core/data/CqlVectorTest.java b/core/src/test/java/com/datastax/oss/driver/api/core/data/CqlVectorTest.java index 90f4cc6e776..3e0872cb946 100644 --- a/core/src/test/java/com/datastax/oss/driver/api/core/data/CqlVectorTest.java +++ b/core/src/test/java/com/datastax/oss/driver/api/core/data/CqlVectorTest.java @@ -23,56 +23,60 @@ import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; import com.datastax.oss.driver.internal.SerializationHelper; -import com.datastax.oss.driver.shaded.guava.common.collect.Iterators; +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; import java.io.ByteArrayInputStream; import java.io.ObjectInputStream; import java.io.ObjectStreamException; +import java.time.LocalTime; import java.util.AbstractList; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.Iterator; import java.util.List; import java.util.stream.Collectors; import org.apache.commons.codec.DecoderException; import org.apache.commons.codec.binary.Hex; import org.assertj.core.util.Lists; import org.junit.Test; +import org.junit.runner.RunWith; +@RunWith(DataProviderRunner.class) public class CqlVectorTest { - private static final Float[] VECTOR_ARGS = {1.0f, 2.5f}; - - private void validate_built_vector(CqlVector vec) { + @DataProvider + public static Object[][] dataProvider() { + return new Object[][] { + {new Float[] {1.0f, 2.5f}}, + {new LocalTime[] {LocalTime.of(1, 2), LocalTime.of(3, 4)}}, + {new List[] {Arrays.asList(1, 2), Arrays.asList(3, 4)}}, + {new CqlVector[] {CqlVector.newInstance("a", "bc"), CqlVector.newInstance("d", "ef")}} + }; + } + private void validate_built_vector(CqlVector vec, Object[] expectedVals) { assertThat(vec.size()).isEqualTo(2); assertThat(vec.isEmpty()).isFalse(); - assertThat(vec.get(0)).isEqualTo(VECTOR_ARGS[0]); - assertThat(vec.get(1)).isEqualTo(VECTOR_ARGS[1]); + assertThat(vec.get(0)).isEqualTo(expectedVals[0]); + assertThat(vec.get(1)).isEqualTo(expectedVals[1]); } + @UseDataProvider("dataProvider") @Test - public void should_build_vector_from_elements() { - - validate_built_vector(CqlVector.newInstance(VECTOR_ARGS)); + public void should_build_vector_from_elements(Object[] vals) { + validate_built_vector(CqlVector.newInstance(vals), vals); } @Test - public void should_build_vector_from_list() { - - validate_built_vector(CqlVector.newInstance(Lists.newArrayList(VECTOR_ARGS))); - } - - @Test - public void should_build_vector_from_tostring_output() { - - CqlVector vector1 = CqlVector.newInstance(VECTOR_ARGS); - CqlVector vector2 = CqlVector.from(vector1.toString(), TypeCodecs.FLOAT); - assertThat(vector2).isEqualTo(vector1); + @UseDataProvider("dataProvider") + public void should_build_vector_from_list(Object[] vals) { + validate_built_vector(CqlVector.newInstance(Lists.newArrayList(vals)), vals); } @Test public void should_throw_from_null_string() { - assertThatThrownBy( () -> { CqlVector.from(null, TypeCodecs.FLOAT); @@ -116,101 +120,97 @@ public void should_throw_when_building_with_nulls() { @Test public void should_build_empty_vector() { - CqlVector vector = CqlVector.newInstance(); assertThat(vector.isEmpty()).isTrue(); assertThat(vector.size()).isEqualTo(0); } @Test - public void should_behave_mostly_like_a_list() { - - CqlVector vector = CqlVector.newInstance(VECTOR_ARGS); - assertThat(vector.get(0)).isEqualTo(VECTOR_ARGS[0]); - Float newVal = VECTOR_ARGS[0] * 2; - vector.set(0, newVal); - assertThat(vector.get(0)).isEqualTo(newVal); + @UseDataProvider("dataProvider") + public void should_behave_mostly_like_a_list(T[] vals) { + T[] theArray = Arrays.copyOf(vals, vals.length); + CqlVector vector = CqlVector.newInstance(theArray); + assertThat(vector.get(0)).isEqualTo(theArray[0]); + vector.set(0, theArray[1]); + assertThat(vector.get(0)).isEqualTo(theArray[1]); assertThat(vector.isEmpty()).isFalse(); assertThat(vector.size()).isEqualTo(2); - assertThat(Iterators.toArray(vector.iterator(), Float.class)).isEqualTo(VECTOR_ARGS); + Iterator iterator = vector.iterator(); + assertThat(iterator.next()).isEqualTo(theArray[1]); + assertThat(iterator.next()).isEqualTo(theArray[1]); } @Test - public void should_play_nicely_with_streams() { - - CqlVector vector = CqlVector.newInstance(VECTOR_ARGS); - List results = + @UseDataProvider("dataProvider") + public void should_play_nicely_with_streams(T[] vals) { + CqlVector vector = CqlVector.newInstance(vals); + List results = vector.stream() - .map((f) -> f * 2) - .collect(Collectors.toCollection(() -> new ArrayList())); + .map(Object::toString) + .collect(Collectors.toCollection(() -> new ArrayList())); for (int i = 0; i < vector.size(); ++i) { - assertThat(results.get(i)).isEqualTo(vector.get(i) * 2); + assertThat(results.get(i)).isEqualTo(vector.get(i).toString()); } } @Test - public void should_reflect_changes_to_mutable_list() { - - List theList = Lists.newArrayList(1.1f, 2.2f, 3.3f); - CqlVector vector = CqlVector.newInstance(theList); - assertThat(vector.size()).isEqualTo(3); - assertThat(vector.get(2)).isEqualTo(3.3f); - - float newVal1 = 4.4f; - theList.set(2, newVal1); - assertThat(vector.size()).isEqualTo(3); - assertThat(vector.get(2)).isEqualTo(newVal1); + @UseDataProvider("dataProvider") + public void should_reflect_changes_to_mutable_list(T[] vals) { + List theList = Lists.newArrayList(vals); + CqlVector vector = CqlVector.newInstance(theList); + assertThat(vector.size()).isEqualTo(2); + assertThat(vector.get(1)).isEqualTo(vals[1]); - float newVal2 = 5.5f; - theList.add(newVal2); - assertThat(vector.size()).isEqualTo(4); - assertThat(vector.get(3)).isEqualTo(newVal2); + T newVal = vals[0]; + theList.set(1, newVal); + assertThat(vector.size()).isEqualTo(2); + assertThat(vector.get(1)).isEqualTo(newVal); } @Test - public void should_reflect_changes_to_array() { - - Float[] theArray = new Float[] {1.1f, 2.2f, 3.3f}; - CqlVector vector = CqlVector.newInstance(theArray); - assertThat(vector.size()).isEqualTo(3); - assertThat(vector.get(2)).isEqualTo(3.3f); + @UseDataProvider("dataProvider") + public void should_reflect_changes_to_array(T[] vals) { + T[] theArray = Arrays.copyOf(vals, vals.length); + CqlVector vector = CqlVector.newInstance(theArray); + assertThat(vector.size()).isEqualTo(2); + assertThat(vector.get(1)).isEqualTo(theArray[1]); - float newVal1 = 4.4f; - theArray[2] = newVal1; - assertThat(vector.size()).isEqualTo(3); - assertThat(vector.get(2)).isEqualTo(newVal1); + T newVal = theArray[0]; + theArray[1] = newVal; + assertThat(vector.size()).isEqualTo(2); + assertThat(vector.get(1)).isEqualTo(newVal); } @Test - public void should_correctly_compare_vectors() { - - Float[] args = VECTOR_ARGS.clone(); - CqlVector vector1 = CqlVector.newInstance(args); - CqlVector vector2 = CqlVector.newInstance(args); - CqlVector vector3 = CqlVector.newInstance(Lists.newArrayList(args)); + @UseDataProvider("dataProvider") + public void should_correctly_compare_vectors(T[] vals) { + CqlVector vector1 = CqlVector.newInstance(vals); + CqlVector vector2 = CqlVector.newInstance(vals); + CqlVector vector3 = CqlVector.newInstance(Lists.newArrayList(vals)); assertThat(vector1).isNotSameAs(vector2); assertThat(vector1).isEqualTo(vector2); assertThat(vector1).isNotSameAs(vector3); assertThat(vector1).isEqualTo(vector3); - Float[] differentArgs = args.clone(); - float newVal = differentArgs[0] * 2; + T[] differentArgs = Arrays.copyOf(vals, vals.length); + T newVal = differentArgs[1]; differentArgs[0] = newVal; - CqlVector vector4 = CqlVector.newInstance(differentArgs); + CqlVector vector4 = CqlVector.newInstance(differentArgs); assertThat(vector1).isNotSameAs(vector4); assertThat(vector1).isNotEqualTo(vector4); - Float[] biggerArgs = Arrays.copyOf(args, args.length + 1); + T[] biggerArgs = Arrays.copyOf(vals, vals.length + 1); biggerArgs[biggerArgs.length - 1] = newVal; - CqlVector vector5 = CqlVector.newInstance(biggerArgs); + CqlVector vector5 = CqlVector.newInstance(biggerArgs); assertThat(vector1).isNotSameAs(vector5); assertThat(vector1).isNotEqualTo(vector5); } @Test - public void should_serialize_and_deserialize() throws Exception { - CqlVector initial = CqlVector.newInstance(VECTOR_ARGS); - CqlVector deserialized = SerializationHelper.serializeAndDeserialize(initial); + @UseDataProvider("dataProvider") + public void should_serialize_and_deserialize(T[] vals) throws Exception { + CqlVector initial = CqlVector.newInstance(vals); + CqlVector deserialized = SerializationHelper.serializeAndDeserialize(initial); assertThat(deserialized).isEqualTo(initial); } @@ -222,21 +222,22 @@ public void should_serialize_and_deserialize_empty_vector() throws Exception { } @Test - public void should_serialize_and_deserialize_unserializable_list() throws Exception { - CqlVector initial = + @UseDataProvider("dataProvider") + public void should_serialize_and_deserialize_unserializable_list(T[] vals) throws Exception { + CqlVector initial = CqlVector.newInstance( - new AbstractList() { + new AbstractList() { @Override - public Float get(int index) { - return VECTOR_ARGS[index]; + public T get(int index) { + return vals[index]; } @Override public int size() { - return VECTOR_ARGS.length; + return vals.length; } }); - CqlVector deserialized = SerializationHelper.serializeAndDeserialize(initial); + CqlVector deserialized = SerializationHelper.serializeAndDeserialize(initial); assertThat(deserialized).isEqualTo(initial); } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/VectorCodecTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/VectorCodecTest.java index 969d35cbbbe..17c78514127 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/VectorCodecTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/VectorCodecTest.java @@ -20,122 +20,255 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; +import com.datastax.oss.driver.api.core.ProtocolVersion; import com.datastax.oss.driver.api.core.data.CqlVector; +import com.datastax.oss.driver.api.core.type.DataType; import com.datastax.oss.driver.api.core.type.DataTypes; -import com.datastax.oss.driver.api.core.type.VectorType; +import com.datastax.oss.driver.api.core.type.codec.TypeCodec; import com.datastax.oss.driver.api.core.type.codec.TypeCodecs; -import com.datastax.oss.driver.api.core.type.reflect.GenericType; +import com.datastax.oss.driver.api.core.type.codec.registry.CodecRegistry; import com.datastax.oss.driver.internal.core.type.DefaultVectorType; -import java.util.Arrays; +import com.datastax.oss.protocol.internal.util.Bytes; +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import java.nio.ByteBuffer; +import java.time.LocalTime; +import java.util.HashMap; +import org.apache.commons.lang3.ArrayUtils; import org.junit.Test; +import org.junit.runner.RunWith; -public class VectorCodecTest extends CodecTestBase> { +@RunWith(DataProviderRunner.class) +public class VectorCodecTest { - private static final Float[] VECTOR_ARGS = {1.0f, 2.5f}; - - private static final CqlVector VECTOR = CqlVector.newInstance(VECTOR_ARGS); - - private static final String VECTOR_HEX_STRING = "0x" + "3f800000" + "40200000"; - - private static final String FORMATTED_VECTOR = "[1.0, 2.5]"; - - public VectorCodecTest() { - VectorType vectorType = DataTypes.vectorOf(DataTypes.FLOAT, 2); - this.codec = TypeCodecs.vectorOf(vectorType, TypeCodecs.FLOAT); + @DataProvider + public static Object[] dataProvider() { + HashMap map1 = new HashMap<>(); + map1.put(1, "a"); + HashMap map2 = new HashMap<>(); + map2.put(2, "b"); + return new TestDataContainer[] { + new TestDataContainer( + DataTypes.FLOAT, + new Float[] {1.0f, 2.5f}, + "[1.0, 2.5]", + Bytes.fromHexString("0x3f80000040200000")), + new TestDataContainer( + DataTypes.ASCII, + new String[] {"ab", "cde"}, + "['ab', 'cde']", + Bytes.fromHexString("0x02616203636465")), + new TestDataContainer( + DataTypes.BIGINT, + new Long[] {1L, 2L}, + "[1, 2]", + Bytes.fromHexString("0x00000000000000010000000000000002")), + new TestDataContainer( + DataTypes.BLOB, + new ByteBuffer[] {Bytes.fromHexString("0xCAFE"), Bytes.fromHexString("0xABCD")}, + "[0xcafe, 0xabcd]", + Bytes.fromHexString("0x02cafe02abcd")), + new TestDataContainer( + DataTypes.BOOLEAN, + new Boolean[] {true, false}, + "[true, false]", + Bytes.fromHexString("0x0100")), + new TestDataContainer( + DataTypes.TIME, + new LocalTime[] {LocalTime.ofNanoOfDay(1), LocalTime.ofNanoOfDay(2)}, + "['00:00:00.000000001', '00:00:00.000000002']", + Bytes.fromHexString("0x080000000000000001080000000000000002")), + new TestDataContainer( + DataTypes.mapOf(DataTypes.INT, DataTypes.ASCII), + new HashMap[] {map1, map2}, + "[{1:'a'}, {2:'b'}]", + Bytes.fromHexString( + "0x110000000100000004000000010000000161110000000100000004000000020000000162")), + new TestDataContainer( + DataTypes.vectorOf(DataTypes.INT, 1), + new CqlVector[] {CqlVector.newInstance(1), CqlVector.newInstance(2)}, + "[[1], [2]]", + Bytes.fromHexString("0x0000000100000002")), + new TestDataContainer( + DataTypes.vectorOf(DataTypes.TEXT, 1), + new CqlVector[] {CqlVector.newInstance("ab"), CqlVector.newInstance("cdef")}, + "[['ab'], ['cdef']]", + Bytes.fromHexString("0x03026162050463646566")), + new TestDataContainer( + DataTypes.vectorOf(DataTypes.vectorOf(DataTypes.FLOAT, 2), 1), + new CqlVector[] { + CqlVector.newInstance(CqlVector.newInstance(1.0f, 2.5f)), + CqlVector.newInstance(CqlVector.newInstance(3.0f, 4.5f)) + }, + "[[[1.0, 2.5]], [[3.0, 4.5]]]", + Bytes.fromHexString("0x3f800000402000004040000040900000")) + }; } + @UseDataProvider("dataProvider") @Test - public void should_encode() { - assertThat(encode(VECTOR)).isEqualTo(VECTOR_HEX_STRING); - assertThat(encode(null)).isNull(); + public void should_encode(TestDataContainer testData) { + TypeCodec> codec = getCodec(testData.getDataType()); + CqlVector vector = CqlVector.newInstance(testData.getValues()); + assertThat(codec.encode(vector, ProtocolVersion.DEFAULT)).isEqualTo(testData.getBytes()); } - /** Too few eleements will cause an exception, extra elements will be silently ignored */ @Test - public void should_throw_on_encode_with_too_few_elements() { - assertThatThrownBy(() -> encode(VECTOR.subVector(0, 1))) + @UseDataProvider("dataProvider") + public void should_throw_on_encode_with_too_few_elements(TestDataContainer testData) { + TypeCodec> codec = getCodec(testData.getDataType()); + assertThatThrownBy( + () -> + codec.encode( + CqlVector.newInstance(testData.getValues()[0]), ProtocolVersion.DEFAULT)) .isInstanceOf(IllegalArgumentException.class); } @Test - public void should_throw_on_encode_with_empty_list() { - assertThatThrownBy(() -> encode(CqlVector.newInstance())) + @UseDataProvider("dataProvider") + public void should_throw_on_encode_with_too_many_elements(TestDataContainer testData) { + Object[] doubled = ArrayUtils.addAll(testData.getValues(), testData.getValues()); + TypeCodec> codec = getCodec(testData.getDataType()); + assertThatThrownBy(() -> codec.encode(CqlVector.newInstance(doubled), ProtocolVersion.DEFAULT)) .isInstanceOf(IllegalArgumentException.class); } @Test - public void should_encode_with_too_many_elements() { - Float[] doubledVectorContents = Arrays.copyOf(VECTOR_ARGS, VECTOR_ARGS.length * 2); - System.arraycopy(VECTOR_ARGS, 0, doubledVectorContents, VECTOR_ARGS.length, VECTOR_ARGS.length); - assertThat(encode(CqlVector.newInstance(doubledVectorContents))).isEqualTo(VECTOR_HEX_STRING); + @UseDataProvider("dataProvider") + public void should_decode(TestDataContainer testData) { + TypeCodec> codec = getCodec(testData.getDataType()); + assertThat(codec.decode(testData.getBytes(), ProtocolVersion.DEFAULT)) + .isEqualTo(CqlVector.newInstance(testData.getValues())); } @Test - public void should_decode() { - assertThat(decode(VECTOR_HEX_STRING)).isEqualTo(VECTOR); - assertThat(decode("0x")).isNull(); - assertThat(decode(null)).isNull(); + @UseDataProvider("dataProvider") + public void should_throw_on_decode_if_too_few_bytes(TestDataContainer testData) { + TypeCodec> codec = getCodec(testData.getDataType()); + int lastIndex = testData.getBytes().remaining() - 1; + assertThatThrownBy( + () -> + codec.decode( + (ByteBuffer) testData.getBytes().duplicate().limit(lastIndex), + ProtocolVersion.DEFAULT)) + .isInstanceOf(IllegalArgumentException.class); } @Test - public void should_throw_on_decode_if_too_few_bytes() { - // Dropping 4 bytes would knock off exactly 1 float, anything less than that would be something - // we couldn't parse a float out of - for (int i = 1; i <= 3; ++i) { - // 2 chars of hex encoded string = 1 byte - int lastIndex = VECTOR_HEX_STRING.length() - (2 * i); - assertThatThrownBy(() -> decode(VECTOR_HEX_STRING.substring(0, lastIndex))) - .isInstanceOf(IllegalArgumentException.class); - } + @UseDataProvider("dataProvider") + public void should_throw_on_decode_if_too_many_bytes(TestDataContainer testData) { + ByteBuffer doubled = ByteBuffer.allocate(testData.getBytes().remaining() * 2); + doubled.put(testData.getBytes().duplicate()).put(testData.getBytes().duplicate()).flip(); + TypeCodec> codec = getCodec(testData.getDataType()); + assertThatThrownBy(() -> codec.decode(doubled, ProtocolVersion.DEFAULT)) + .isInstanceOf(IllegalArgumentException.class); } @Test - public void should_format() { - assertThat(format(VECTOR)).isEqualTo(FORMATTED_VECTOR); - assertThat(format(null)).isEqualTo("NULL"); + @UseDataProvider("dataProvider") + public void should_format(TestDataContainer testData) { + TypeCodec> codec = getCodec(testData.getDataType()); + CqlVector vector = CqlVector.newInstance(testData.getValues()); + assertThat(codec.format(vector)).isEqualTo(testData.getFormatted()); } @Test - public void should_parse() { - assertThat(parse(FORMATTED_VECTOR)).isEqualTo(VECTOR); - assertThat(parse("NULL")).isNull(); - assertThat(parse("null")).isNull(); - assertThat(parse("")).isNull(); - assertThat(parse(null)).isNull(); + @UseDataProvider("dataProvider") + public void should_parse(TestDataContainer testData) { + TypeCodec> codec = getCodec(testData.getDataType()); + assertThat(codec.parse(testData.getFormatted())) + .isEqualTo(CqlVector.newInstance(testData.getValues())); } @Test - public void should_accept_data_type() { - assertThat(codec.accepts(new DefaultVectorType(DataTypes.FLOAT, 2))).isTrue(); - assertThat(codec.accepts(DataTypes.INT)).isFalse(); + @UseDataProvider("dataProvider") + public void should_accept_data_type(TestDataContainer testData) { + TypeCodec> codec = getCodec(testData.getDataType()); + assertThat(codec.accepts(new DefaultVectorType(testData.getDataType(), 2))).isTrue(); + assertThat(codec.accepts(new DefaultVectorType(DataTypes.custom("non-existent"), 2))).isFalse(); } @Test - public void should_accept_vector_type_correct_dimension_only() { - assertThat(codec.accepts(new DefaultVectorType(DataTypes.FLOAT, 0))).isFalse(); - assertThat(codec.accepts(new DefaultVectorType(DataTypes.FLOAT, 1))).isFalse(); - assertThat(codec.accepts(new DefaultVectorType(DataTypes.FLOAT, 2))).isTrue(); - for (int i = 3; i < 1000; ++i) { - assertThat(codec.accepts(new DefaultVectorType(DataTypes.FLOAT, i))).isFalse(); - } + @UseDataProvider("dataProvider") + public void should_accept_vector_type_correct_dimension_only(TestDataContainer testData) { + TypeCodec> codec = getCodec(testData.getDataType()); + assertThat(codec.accepts(new DefaultVectorType(testData.getDataType(), 0))).isFalse(); + assertThat(codec.accepts(new DefaultVectorType(testData.getDataType(), 1))).isFalse(); + assertThat(codec.accepts(new DefaultVectorType(testData.getDataType(), 3))).isFalse(); } @Test - public void should_accept_generic_type() { - assertThat(codec.accepts(GenericType.vectorOf(GenericType.FLOAT))).isTrue(); - assertThat(codec.accepts(GenericType.vectorOf(GenericType.INTEGER))).isFalse(); - assertThat(codec.accepts(GenericType.of(Integer.class))).isFalse(); + @UseDataProvider("dataProvider") + public void should_accept_generic_type(TestDataContainer testData) { + TypeCodec> codec = getCodec(testData.getDataType()); + assertThat(codec.accepts(codec.getJavaType())).isTrue(); } @Test - public void should_accept_raw_type() { + @UseDataProvider("dataProvider") + public void should_accept_raw_type(TestDataContainer testData) { + TypeCodec> codec = getCodec(testData.getDataType()); assertThat(codec.accepts(CqlVector.class)).isTrue(); assertThat(codec.accepts(Integer.class)).isFalse(); } @Test - public void should_accept_object() { - assertThat(codec.accepts(VECTOR)).isTrue(); + @UseDataProvider("dataProvider") + public void should_accept_object(TestDataContainer testData) { + TypeCodec> codec = getCodec(testData.getDataType()); + CqlVector vector = CqlVector.newInstance(testData.getValues()); + assertThat(codec.accepts(vector)).isTrue(); assertThat(codec.accepts(Integer.MIN_VALUE)).isFalse(); } + + @Test + public void should_handle_null_and_empty() { + TypeCodec> codec = getCodec(DataTypes.FLOAT); + assertThat(codec.encode(null, ProtocolVersion.DEFAULT)).isNull(); + assertThat(codec.decode(Bytes.fromHexString("0x"), ProtocolVersion.DEFAULT)).isNull(); + assertThat(codec.format(null)).isEqualTo("NULL"); + assertThat(codec.parse("NULL")).isNull(); + assertThat(codec.parse("null")).isNull(); + assertThat(codec.parse("")).isNull(); + assertThat(codec.parse(null)).isNull(); + assertThatThrownBy(() -> codec.encode(CqlVector.newInstance(), ProtocolVersion.DEFAULT)) + .isInstanceOf(IllegalArgumentException.class); + } + + private static TypeCodec> getCodec(DataType dataType) { + return TypeCodecs.vectorOf( + DataTypes.vectorOf(dataType, 2), CodecRegistry.DEFAULT.codecFor(dataType)); + } + + private static class TestDataContainer { + private final DataType dataType; + private final Object[] values; + private final String formatted; + private final ByteBuffer bytes; + + public TestDataContainer( + DataType dataType, Object[] values, String formatted, ByteBuffer bytes) { + this.dataType = dataType; + this.values = values; + this.formatted = formatted; + this.bytes = bytes; + } + + public DataType getDataType() { + return dataType; + } + + public Object[] getValues() { + return values; + } + + public String getFormatted() { + return formatted; + } + + public ByteBuffer getBytes() { + return bytes; + } + } } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistryTestDataProviders.java b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistryTestDataProviders.java index 1f3f6bbff97..4c0298bafad 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistryTestDataProviders.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/type/codec/registry/CachingCodecRegistryTestDataProviders.java @@ -337,6 +337,26 @@ public static Object[][] collectionsWithCqlAndJavaTypes() GenericType.vectorOf(BigInteger.class), CqlVector.newInstance(BigInteger.ONE) }, + // vector with arbitrary types + { + DataTypes.vectorOf(DataTypes.TEXT, 2), + GenericType.vectorOf(String.class), + GenericType.vectorOf(String.class), + CqlVector.newInstance("abc", "de") + }, + { + DataTypes.vectorOf(DataTypes.TIME, 2), + GenericType.vectorOf(LocalTime.class), + GenericType.vectorOf(LocalTime.class), + CqlVector.newInstance(LocalTime.MIDNIGHT, LocalTime.NOON) + }, + { + DataTypes.vectorOf(DataTypes.vectorOf(DataTypes.TINYINT, 2), 2), + GenericType.vectorOf(GenericType.vectorOf(Byte.class)), + GenericType.vectorOf(GenericType.vectorOf(Byte.class)), + CqlVector.newInstance( + CqlVector.newInstance((byte) 1, (byte) 2), CqlVector.newInstance((byte) 3, (byte) 4)) + }, }; } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/type/util/VIntCodingTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/type/util/VIntCodingTest.java new file mode 100644 index 00000000000..b85d6d66844 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/type/util/VIntCodingTest.java @@ -0,0 +1,86 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.type.util; + +import static org.junit.Assert.assertEquals; + +import com.tngtech.java.junit.dataprovider.DataProvider; +import com.tngtech.java.junit.dataprovider.DataProviderRunner; +import com.tngtech.java.junit.dataprovider.UseDataProvider; +import java.nio.ByteBuffer; +import org.junit.Test; +import org.junit.runner.RunWith; + +@RunWith(DataProviderRunner.class) +public class VIntCodingTest { + @DataProvider + public static Object[] roundTripTestValues() { + return new Integer[] { + Integer.MAX_VALUE + 1, + Integer.MAX_VALUE, + Integer.MAX_VALUE - 1, + Integer.MIN_VALUE, + Integer.MIN_VALUE + 1, + Integer.MIN_VALUE - 1, + 0, + -1, + 1 + }; + }; + + private static final long[] LONGS = + new long[] { + 53L, + 10201L, + 1097151L, + 168435455L, + 33251130335L, + 3281283447775L, + 417672546086779L, + 52057592037927932L, + 72057594037927937L + }; + + @Test + public void should_compute_unsigned_vint_size() { + for (int i = 0; i < LONGS.length; i++) { + long val = LONGS[i]; + assertEquals(i + 1, VIntCoding.computeUnsignedVIntSize(val)); + } + } + + @Test + @UseDataProvider("roundTripTestValues") + public void should_write_and_read_unsigned_vint_32(int value) { + ByteBuffer bb = ByteBuffer.allocate(9); + + VIntCoding.writeUnsignedVInt32(value, bb); + bb.flip(); + assertEquals(value, VIntCoding.getUnsignedVInt32(bb, 0)); + } + + @Test + @UseDataProvider("roundTripTestValues") + public void should_write_and_read_unsigned_vint(int value) { + ByteBuffer bb = ByteBuffer.allocate(9); + + VIntCoding.writeUnsignedVInt(value, bb); + bb.flip(); + assertEquals(value, VIntCoding.getUnsignedVInt(bb, 0)); + } +} diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/data/DataTypeIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/data/DataTypeIT.java index a33c8704876..e3d891454de 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/data/DataTypeIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/data/DataTypeIT.java @@ -32,6 +32,7 @@ import com.datastax.oss.driver.api.core.cql.SimpleStatement; import com.datastax.oss.driver.api.core.cql.Statement; import com.datastax.oss.driver.api.core.data.CqlDuration; +import com.datastax.oss.driver.api.core.data.CqlVector; import com.datastax.oss.driver.api.core.data.SettableByIndex; import com.datastax.oss.driver.api.core.data.SettableByName; import com.datastax.oss.driver.api.core.data.TupleValue; @@ -183,6 +184,7 @@ public static Object[][] typeSamples() { // 5) include map // 6) include tuple // 7) include udt + // 8) include vector return Arrays.stream(primitiveSamples) .flatMap( o -> { @@ -263,6 +265,30 @@ public static Object[][] typeSamples() { UdtValue udtValue2 = udt.newValue(1, o[1]); samples.add(new Object[] {udt, udtValue2}); + if (CCM_RULE.getCassandraVersion().compareTo(Version.parse("5.0")) >= 0) { + // vector of type + CqlVector vector = CqlVector.newInstance(o[1]); + samples.add(new Object[] {DataTypes.vectorOf(dataType, 1), vector}); + } + + return samples.stream(); + }) + .toArray(Object[][]::new); + } + + @DataProvider + public static Object[][] addVectors() { + Object[][] previousSamples = typeSamples(); + if (CCM_RULE.getCassandraVersion().compareTo(Version.parse("5.0")) < 0) return previousSamples; + return Arrays.stream(previousSamples) + .flatMap( + o -> { + List samples = new ArrayList<>(); + samples.add(o); + if (o[1] == null) return samples.stream(); + DataType dataType = (DataType) o[0]; + CqlVector vector = CqlVector.newInstance(o[1]); + samples.add(new Object[] {DataTypes.vectorOf(dataType, 1), vector}); return samples.stream(); }) .toArray(Object[][]::new); @@ -278,7 +304,7 @@ public static void createTable() { List columnData = new ArrayList<>(); - for (Object[] sample : typeSamples()) { + for (Object[] sample : addVectors()) { DataType dataType = (DataType) sample[0]; if (!typeToColumnName.containsKey(dataType)) { @@ -308,7 +334,7 @@ private static int nextKey() { return keyCounter.incrementAndGet(); } - @UseDataProvider("typeSamples") + @UseDataProvider("addVectors") @Test public void should_insert_non_primary_key_column_simple_statement_using_format( DataType dataType, K value, K expectedPrimitiveValue) { @@ -335,7 +361,7 @@ public void should_insert_non_primary_key_column_simple_statement_using_form readValue(select, dataType, value, expectedPrimitiveValue); } - @UseDataProvider("typeSamples") + @UseDataProvider("addVectors") @Test public void should_insert_non_primary_key_column_simple_statement_positional_value( DataType dataType, K value, K expectedPrimitiveValue) { @@ -358,7 +384,7 @@ public void should_insert_non_primary_key_column_simple_statement_positional readValue(select, dataType, value, expectedPrimitiveValue); } - @UseDataProvider("typeSamples") + @UseDataProvider("addVectors") @Test public void should_insert_non_primary_key_column_simple_statement_named_value( DataType dataType, K value, K expectedPrimitiveValue) { @@ -382,7 +408,7 @@ public void should_insert_non_primary_key_column_simple_statement_named_valu readValue(select, dataType, value, expectedPrimitiveValue); } - @UseDataProvider("typeSamples") + @UseDataProvider("addVectors") @Test public void should_insert_non_primary_key_column_bound_statement_positional_value( DataType dataType, K value, K expectedPrimitiveValue) { @@ -411,7 +437,7 @@ public void should_insert_non_primary_key_column_bound_statement_positional_ readValue(boundSelect, dataType, value, expectedPrimitiveValue); } - @UseDataProvider("typeSamples") + @UseDataProvider("addVectors") @Test public void should_insert_non_primary_key_column_bound_statement_named_value( DataType dataType, K value, K expectedPrimitiveValue) { From 04d34a8989b89c1addaab7f248b1fa9aa535da5e Mon Sep 17 00:00:00 2001 From: Alex Sasnouskikh Date: Thu, 30 Jan 2025 22:36:29 +0000 Subject: [PATCH 938/979] JAVA-3168 Copy node info for contact points on initial node refresh only from first match by endpoint patch by Alex Sasnouskikh; reviewed by Andy Tolbert and Alexandre Dura for JAVA-3168 --- .../core/metadata/InitialNodeListRefresh.java | 36 +++++++++++-------- .../metadata/InitialNodeListRefreshTest.java | 34 ++++++++++++++++-- 2 files changed, 54 insertions(+), 16 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/InitialNodeListRefresh.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/InitialNodeListRefresh.java index c21d5d8171e..517bfca27fa 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/InitialNodeListRefresh.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/InitialNodeListRefresh.java @@ -18,14 +18,16 @@ package com.datastax.oss.driver.internal.core.metadata; import com.datastax.oss.driver.api.core.metadata.EndPoint; -import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.internal.core.metadata.token.TokenFactory; import com.datastax.oss.driver.internal.core.metadata.token.TokenFactoryRegistry; import com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import java.util.ArrayList; import java.util.HashMap; +import java.util.HashSet; +import java.util.List; import java.util.Map; import java.util.Set; import java.util.UUID; @@ -63,22 +65,31 @@ public Result compute( TokenFactory tokenFactory = null; Map newNodes = new HashMap<>(); + // Contact point nodes don't have host ID as well as other info yet, so we fill them with node + // info found on first match by endpoint + Set matchedContactPoints = new HashSet<>(); + List addedNodes = new ArrayList<>(); for (NodeInfo nodeInfo : nodeInfos) { UUID hostId = nodeInfo.getHostId(); if (newNodes.containsKey(hostId)) { LOG.warn( "[{}] Found duplicate entries with host_id {} in system.peers, " - + "keeping only the first one", + + "keeping only the first one {}", logPrefix, - hostId); + hostId, + newNodes.get(hostId)); } else { EndPoint endPoint = nodeInfo.getEndPoint(); - DefaultNode node = findIn(contactPoints, endPoint); - if (node == null) { + DefaultNode contactPointNode = findContactPointNode(endPoint); + DefaultNode node; + if (contactPointNode == null || matchedContactPoints.contains(endPoint)) { node = new DefaultNode(endPoint, context); + addedNodes.add(node); LOG.debug("[{}] Adding new node {}", logPrefix, node); } else { + matchedContactPoints.add(contactPointNode.getEndPoint()); + node = contactPointNode; LOG.debug("[{}] Copying contact point {}", logPrefix, node); } if (tokenMapEnabled && tokenFactory == null && nodeInfo.getPartitioner() != null) { @@ -90,14 +101,11 @@ public Result compute( } ImmutableList.Builder eventsBuilder = ImmutableList.builder(); - - for (DefaultNode newNode : newNodes.values()) { - if (findIn(contactPoints, newNode.getEndPoint()) == null) { - eventsBuilder.add(NodeStateEvent.added(newNode)); - } + for (DefaultNode addedNode : addedNodes) { + eventsBuilder.add(NodeStateEvent.added(addedNode)); } for (DefaultNode contactPoint : contactPoints) { - if (findIn(newNodes.values(), contactPoint.getEndPoint()) == null) { + if (!matchedContactPoints.contains(contactPoint.getEndPoint())) { eventsBuilder.add(NodeStateEvent.removed(contactPoint)); } } @@ -108,10 +116,10 @@ public Result compute( eventsBuilder.build()); } - private DefaultNode findIn(Iterable nodes, EndPoint endPoint) { - for (Node node : nodes) { + private DefaultNode findContactPointNode(EndPoint endPoint) { + for (DefaultNode node : contactPoints) { if (node.getEndPoint().equals(endPoint)) { - return (DefaultNode) node; + return node; } } return null; diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/InitialNodeListRefreshTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/InitialNodeListRefreshTest.java index 095662257f6..3787bf8fe10 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/InitialNodeListRefreshTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/metadata/InitialNodeListRefreshTest.java @@ -48,6 +48,8 @@ public class InitialNodeListRefreshTest { private UUID hostId1; private UUID hostId2; private UUID hostId3; + private UUID hostId4; + private UUID hostId5; @Before public void setup() { @@ -61,10 +63,12 @@ public void setup() { hostId1 = UUID.randomUUID(); hostId2 = UUID.randomUUID(); hostId3 = UUID.randomUUID(); + hostId4 = UUID.randomUUID(); + hostId5 = UUID.randomUUID(); } @Test - public void should_copy_contact_points() { + public void should_copy_contact_points_on_first_endpoint_match_only() { // Given Iterable newInfos = ImmutableList.of( @@ -76,6 +80,17 @@ public void should_copy_contact_points() { DefaultNodeInfo.builder() .withEndPoint(contactPoint2.getEndPoint()) .withHostId(hostId2) + .build(), + DefaultNodeInfo.builder().withEndPoint(endPoint3).withHostId(hostId3).build(), + DefaultNodeInfo.builder() + // address translator can translate node addresses to the same endpoints + .withEndPoint(contactPoint2.getEndPoint()) + .withHostId(hostId4) + .build(), + DefaultNodeInfo.builder() + // address translator can translate node addresses to the same endpoints + .withEndPoint(endPoint3) + .withHostId(hostId5) .build()); InitialNodeListRefresh refresh = new InitialNodeListRefresh(newInfos, ImmutableSet.of(contactPoint1, contactPoint2)); @@ -86,11 +101,26 @@ public void should_copy_contact_points() { // Then // contact points have been copied to the metadata, and completed with missing information Map newNodes = result.newMetadata.getNodes(); - assertThat(newNodes).containsOnlyKeys(hostId1, hostId2); + assertThat(newNodes).containsOnlyKeys(hostId1, hostId2, hostId3, hostId4, hostId5); assertThat(newNodes.get(hostId1)).isEqualTo(contactPoint1); assertThat(contactPoint1.getHostId()).isEqualTo(hostId1); assertThat(newNodes.get(hostId2)).isEqualTo(contactPoint2); assertThat(contactPoint2.getHostId()).isEqualTo(hostId2); + // And + // node has been added for the new endpoint + assertThat(newNodes.get(hostId3).getEndPoint()).isEqualTo(endPoint3); + assertThat(newNodes.get(hostId3).getHostId()).isEqualTo(hostId3); + // And + // nodes have been added for duplicated endpoints + assertThat(newNodes.get(hostId4).getEndPoint()).isEqualTo(contactPoint2.getEndPoint()); + assertThat(newNodes.get(hostId4).getHostId()).isEqualTo(hostId4); + assertThat(newNodes.get(hostId5).getEndPoint()).isEqualTo(endPoint3); + assertThat(newNodes.get(hostId5).getHostId()).isEqualTo(hostId5); + assertThat(result.events) + .containsExactlyInAnyOrder( + NodeStateEvent.added((DefaultNode) newNodes.get(hostId3)), + NodeStateEvent.added((DefaultNode) newNodes.get(hostId4)), + NodeStateEvent.added((DefaultNode) newNodes.get(hostId5))); } @Test From eac7b24d60d87e912ea70ba3ddee2755a6fa28cf Mon Sep 17 00:00:00 2001 From: Luc Boutier Date: Fri, 3 Nov 2023 10:13:13 +0100 Subject: [PATCH 939/979] JAVA-3055: Prevent PreparedStatement cache to be polluted if a request is cancelled. There was a critical issue when the external code cancels a request, indeed the cached CompletableFuture will then always throw a CancellationException. This may happens, for example, when used by reactive like Mono.zip or Mono.firstWithValue. patch by Luc Boutier; reviewed by Alexandre Dutra and Bret McGuire reference: https://github.com/apache/cassandra-java-driver/pull/1757 --- .../driver/internal/core/cql/CqlPrepareAsyncProcessor.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlPrepareAsyncProcessor.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlPrepareAsyncProcessor.java index ffbc8ee046a..d777e35e50f 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlPrepareAsyncProcessor.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlPrepareAsyncProcessor.java @@ -159,7 +159,9 @@ public CompletionStage process( }); } } - return result; + // Return a defensive copy. So if a client cancels its request, the cache won't be impacted + // nor a potential concurrent request. + return result.thenApply(x -> x); // copy() is available only since Java 9 } catch (ExecutionException e) { return CompletableFutures.failedFuture(e.getCause()); } From 64b3568dfa6dd58e8c16d28cafbe8dbf16f1e622 Mon Sep 17 00:00:00 2001 From: absurdfarce Date: Mon, 3 Feb 2025 17:56:54 -0600 Subject: [PATCH 940/979] Expose a decorator for CqlPrepareAsyncProcessor cache rather than the ability to specify an arbitrary cache from scratch. Also bringing tests from https://github.com/apache/cassandra-java-driver/pull/2003 forward with a few minor changes due to this implementation patch by Bret McGuire; reviewed by Bret McGuire and Andy Tolbert reference: #2008 --- .../core/cql/CqlPrepareAsyncProcessor.java | 11 +- .../core/cql/PreparedStatementCachingIT.java | 13 +- .../cql/PreparedStatementCancellationIT.java | 166 ++++++++++++++++++ 3 files changed, 178 insertions(+), 12 deletions(-) create mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PreparedStatementCancellationIT.java diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlPrepareAsyncProcessor.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlPrepareAsyncProcessor.java index d777e35e50f..918d75e9ecb 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlPrepareAsyncProcessor.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlPrepareAsyncProcessor.java @@ -38,6 +38,7 @@ import com.datastax.oss.driver.shaded.guava.common.cache.CacheBuilder; import com.datastax.oss.driver.shaded.guava.common.collect.Iterables; import com.datastax.oss.protocol.internal.ProtocolConstants; +import com.google.common.base.Functions; import edu.umd.cs.findbugs.annotations.NonNull; import io.netty.util.concurrent.EventExecutor; import java.util.Map; @@ -45,6 +46,7 @@ import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionStage; import java.util.concurrent.ExecutionException; +import java.util.function.Function; import net.jcip.annotations.ThreadSafe; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -62,14 +64,15 @@ public CqlPrepareAsyncProcessor() { } public CqlPrepareAsyncProcessor(@NonNull Optional context) { - this(CacheBuilder.newBuilder().weakValues().build(), context); + this(context, Functions.identity()); } protected CqlPrepareAsyncProcessor( - Cache> cache, - Optional context) { + Optional context, + Function, CacheBuilder> decorator) { - this.cache = cache; + CacheBuilder baseCache = CacheBuilder.newBuilder().weakValues(); + this.cache = decorator.apply(baseCache).build(); context.ifPresent( (ctx) -> { LOG.info("Adding handler to invalidate cached prepared statements on type changes"); diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PreparedStatementCachingIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PreparedStatementCachingIT.java index 05ac3bd0e92..617d489fb95 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PreparedStatementCachingIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PreparedStatementCachingIT.java @@ -24,7 +24,6 @@ import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; import com.datastax.oss.driver.api.core.context.DriverContext; -import com.datastax.oss.driver.api.core.cql.PrepareRequest; import com.datastax.oss.driver.api.core.cql.PreparedStatement; import com.datastax.oss.driver.api.core.metrics.DefaultSessionMetric; import com.datastax.oss.driver.api.core.session.ProgrammaticArguments; @@ -41,7 +40,6 @@ import com.datastax.oss.driver.internal.core.session.BuiltInRequestProcessors; import com.datastax.oss.driver.internal.core.session.RequestProcessor; import com.datastax.oss.driver.internal.core.session.RequestProcessorRegistry; -import com.datastax.oss.driver.shaded.guava.common.cache.CacheBuilder; import com.datastax.oss.driver.shaded.guava.common.cache.RemovalListener; import com.datastax.oss.driver.shaded.guava.common.util.concurrent.Uninterruptibles; import com.google.common.collect.ImmutableList; @@ -119,11 +117,12 @@ private static class TestCqlPrepareAsyncProcessor extends CqlPrepareAsyncProcess private static final Logger LOG = LoggerFactory.getLogger(PreparedStatementCachingIT.TestCqlPrepareAsyncProcessor.class); - private static RemovalListener> - buildCacheRemoveCallback(@NonNull Optional context) { + private static RemovalListener buildCacheRemoveCallback( + @NonNull Optional context) { return (evt) -> { try { - CompletableFuture future = evt.getValue(); + CompletableFuture future = + (CompletableFuture) evt.getValue(); ByteBuffer queryId = Uninterruptibles.getUninterruptibly(future).getId(); context.ifPresent( ctx -> ctx.getEventBus().fire(new PreparedStatementRemovalEvent(queryId))); @@ -136,9 +135,7 @@ private static class TestCqlPrepareAsyncProcessor extends CqlPrepareAsyncProcess public TestCqlPrepareAsyncProcessor(@NonNull Optional context) { // Default CqlPrepareAsyncProcessor uses weak values here as well. We avoid doing so // to prevent cache entries from unexpectedly disappearing mid-test. - super( - CacheBuilder.newBuilder().removalListener(buildCacheRemoveCallback(context)).build(), - context); + super(context, builder -> builder.removalListener(buildCacheRemoveCallback(context))); } } diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PreparedStatementCancellationIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PreparedStatementCancellationIT.java new file mode 100644 index 00000000000..d7e581e4606 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/cql/PreparedStatementCancellationIT.java @@ -0,0 +1,166 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.core.cql; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.Assert.fail; + +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.cql.PrepareRequest; +import com.datastax.oss.driver.api.core.cql.PreparedStatement; +import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; +import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import com.datastax.oss.driver.api.testinfra.session.SessionUtils; +import com.datastax.oss.driver.categories.IsolatedTests; +import com.datastax.oss.driver.internal.core.context.DefaultDriverContext; +import com.datastax.oss.driver.internal.core.cql.CqlPrepareAsyncProcessor; +import com.datastax.oss.driver.shaded.guava.common.base.Predicates; +import com.datastax.oss.driver.shaded.guava.common.cache.Cache; +import com.datastax.oss.driver.shaded.guava.common.collect.Iterables; +import java.util.concurrent.CompletableFuture; +import org.junit.After; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +@Category(IsolatedTests.class) +public class PreparedStatementCancellationIT { + + private CustomCcmRule ccmRule = CustomCcmRule.builder().build(); + + private SessionRule sessionRule = SessionRule.builder(ccmRule).build(); + + @Rule public TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); + + @Before + public void setup() { + + CqlSession session = SessionUtils.newSession(ccmRule, sessionRule.keyspace()); + session.execute("DROP TABLE IF EXISTS test_table_1"); + session.execute("CREATE TABLE test_table_1 (k int primary key, v int)"); + session.execute("INSERT INTO test_table_1 (k,v) VALUES (1, 100)"); + session.execute("INSERT INTO test_table_1 (k,v) VALUES (2, 200)"); + session.execute("INSERT INTO test_table_1 (k,v) VALUES (3, 300)"); + session.close(); + } + + @After + public void teardown() { + + CqlSession session = SessionUtils.newSession(ccmRule, sessionRule.keyspace()); + session.execute("DROP TABLE test_table_1"); + session.close(); + } + + private CompletableFuture toCompletableFuture(CqlSession session, String cql) { + + return session.prepareAsync(cql).toCompletableFuture(); + } + + private CqlPrepareAsyncProcessor findProcessor(CqlSession session) { + + DefaultDriverContext context = (DefaultDriverContext) session.getContext(); + return (CqlPrepareAsyncProcessor) + Iterables.find( + context.getRequestProcessorRegistry().getProcessors(), + Predicates.instanceOf(CqlPrepareAsyncProcessor.class)); + } + + @Test + public void should_cache_valid_cql() throws Exception { + + CqlSession session = SessionUtils.newSession(ccmRule, sessionRule.keyspace()); + CqlPrepareAsyncProcessor processor = findProcessor(session); + Cache> cache = processor.getCache(); + assertThat(cache.size()).isEqualTo(0); + + // Make multiple CompletableFuture requests for the specified CQL, then wait until + // the cached request finishes and confirm that all futures got the same values + String cql = "select v from test_table_1 where k = ?"; + CompletableFuture cf1 = toCompletableFuture(session, cql); + CompletableFuture cf2 = toCompletableFuture(session, cql); + assertThat(cache.size()).isEqualTo(1); + + CompletableFuture future = Iterables.get(cache.asMap().values(), 0); + PreparedStatement stmt = future.get(); + + assertThat(cf1.isDone()).isTrue(); + assertThat(cf2.isDone()).isTrue(); + + assertThat(cf1.join()).isEqualTo(stmt); + assertThat(cf2.join()).isEqualTo(stmt); + } + + // A holdover from work done on JAVA-3055. This probably isn't _desired_ behaviour but this test + // documents the fact that the current driver impl will behave in this way. We should probably + // consider changing this in a future release, although it's worthwhile fully considering the + // implications of such a change. + @Test + public void will_cache_invalid_cql() throws Exception { + + CqlSession session = SessionUtils.newSession(ccmRule, sessionRule.keyspace()); + CqlPrepareAsyncProcessor processor = findProcessor(session); + Cache> cache = processor.getCache(); + assertThat(cache.size()).isEqualTo(0); + + // Verify that we get the CompletableFuture even if the CQL is invalid but that nothing is + // cached + String cql = "select v fromfrom test_table_1 where k = ?"; + CompletableFuture cf = toCompletableFuture(session, cql); + + // join() here should throw exceptions due to the invalid syntax... for purposes of this test we + // can ignore this + try { + cf.join(); + fail(); + } catch (Exception e) { + } + + assertThat(cache.size()).isEqualTo(1); + } + + @Test + public void should_not_affect_cache_if_returned_futures_are_cancelled() throws Exception { + + CqlSession session = SessionUtils.newSession(ccmRule, sessionRule.keyspace()); + CqlPrepareAsyncProcessor processor = findProcessor(session); + Cache> cache = processor.getCache(); + assertThat(cache.size()).isEqualTo(0); + + String cql = "select v from test_table_1 where k = ?"; + CompletableFuture cf = toCompletableFuture(session, cql); + + assertThat(cf.isCancelled()).isFalse(); + assertThat(cf.cancel(false)).isTrue(); + assertThat(cf.isCancelled()).isTrue(); + assertThat(cf.isCompletedExceptionally()).isTrue(); + + // Confirm that cancelling the CompletableFuture returned to the user does _not_ cancel the + // future used within the cache. CacheEntry very deliberately doesn't maintain a reference + // to it's contained CompletableFuture so we have to get at this by secondary effects. + assertThat(cache.size()).isEqualTo(1); + CompletableFuture future = Iterables.get(cache.asMap().values(), 0); + PreparedStatement rv = future.get(); + assertThat(rv).isNotNull(); + assertThat(rv.getQuery()).isEqualTo(cql); + assertThat(cache.size()).isEqualTo(1); + } +} From 94e73d9f4e95cd74b319495663862ad50c63f589 Mon Sep 17 00:00:00 2001 From: absurdfarce Date: Wed, 5 Feb 2025 15:33:46 -0600 Subject: [PATCH 941/979] ninja-fix Using shaded Guava classes for import in order to make OSGi class paths happy. Major hat tip to Dmitry Konstantinov for the find here! --- .../oss/driver/internal/core/cql/CqlPrepareAsyncProcessor.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlPrepareAsyncProcessor.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlPrepareAsyncProcessor.java index 918d75e9ecb..a3d11cff054 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlPrepareAsyncProcessor.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlPrepareAsyncProcessor.java @@ -34,11 +34,11 @@ import com.datastax.oss.driver.internal.core.session.RequestProcessor; import com.datastax.oss.driver.internal.core.util.concurrent.CompletableFutures; import com.datastax.oss.driver.internal.core.util.concurrent.RunOrSchedule; +import com.datastax.oss.driver.shaded.guava.common.base.Functions; import com.datastax.oss.driver.shaded.guava.common.cache.Cache; import com.datastax.oss.driver.shaded.guava.common.cache.CacheBuilder; import com.datastax.oss.driver.shaded.guava.common.collect.Iterables; import com.datastax.oss.protocol.internal.ProtocolConstants; -import com.google.common.base.Functions; import edu.umd.cs.findbugs.annotations.NonNull; import io.netty.util.concurrent.EventExecutor; import java.util.Map; From 610b91b160948c96c2d61aa34021db3cdac73aa2 Mon Sep 17 00:00:00 2001 From: absurdfarce Date: Tue, 4 Feb 2025 15:22:14 -0600 Subject: [PATCH 942/979] Changelog updates for 4.19.0 --- changelog/README.md | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/changelog/README.md b/changelog/README.md index 83ebb44239f..08634bcb834 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -21,6 +21,30 @@ under the License. +### 4.19.0 + +- [bug] JAVA-3055: Prevent PreparedStatement cache to be polluted if a request is cancelled. +- [bug] JAVA-3168: Copy node info for contact points on initial node refresh only from first match by endpoint +- [improvement] JAVA-3143: Extend driver vector support to arbitrary subtypes and fix handling of variable length types (OSS C* 5.0) +- [improvement] CASSJAVA-53: Update Guava version used in cassandra-java-driver +- [improvement] JAVA-3118: Add support for vector data type in Schema Builder, QueryBuilder +- [bug] CASSJAVA-55: Remove setting "Host" header for metadata requests +- [bug] JAVA-3057: Allow decoding a UDT that has more fields than expected +- [improvement] CASSJAVA-52: Bring java-driver-shaded-guava into the repo as a submodule +- [bug] CASSJAVA-2: TableMetadata#describe produces invalid CQL when a type of a column is a vector +- [bug] JAVA-3051: Memory leak in DefaultLoadBalancingPolicy measurement of response times +- [improvement] CASSJAVA-14: Query builder support for NOT CQL syntax +- [bug] CASSJAVA-12: DefaultSslEngineFactory missing null check on close +- [improvement] CASSJAVA-46: Expose table extensions via schema builders +- [bug] PR 1938: Fix uncaught exception during graceful channel shutdown after exceeding max orphan ids +- [improvement] PR 1607: Annotate BatchStatement, Statement, SimpleStatement methods with CheckReturnValue +- [improvement] CASSJAVA-41: Reduce lock held duration in ConcurrencyLimitingRequestThrottler +- [bug] JAVA-3149: Async Query Cancellation Not Propagated To RequestThrottler +- [bug] JAVA-3167: CompletableFutures.allSuccessful() may return never completed future +- [bug] PR 1620: Don't return empty routing key when partition key is unbound +- [improvement] PR 1623: Limit calls to Conversions.resolveExecutionProfile +- [improvement] CASSJAVA-29: Update target Cassandra versions for integration tests, support new 5.0.x + ### 4.18.1 - [improvement] JAVA-3142: Ability to specify ordering of remote local dc's via new configuration for graceful automatic failovers From 46444eaabdbd23e9231123198536d070e99aca27 Mon Sep 17 00:00:00 2001 From: absurdfarce Date: Thu, 6 Feb 2025 15:51:51 -0600 Subject: [PATCH 943/979] [maven-release-plugin] prepare release 4.19.0 --- bom/pom.xml | 20 ++++++++++---------- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution-source/pom.xml | 2 +- distribution-tests/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- guava-shaded/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- metrics/micrometer/pom.xml | 2 +- metrics/microprofile/pom.xml | 2 +- osgi-tests/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 17 files changed, 27 insertions(+), 27 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index 08f212f6157..bb39a484f71 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.18.2-SNAPSHOT + 4.19.0 java-driver-bom pom @@ -33,47 +33,47 @@ org.apache.cassandra java-driver-core - 4.18.2-SNAPSHOT + 4.19.0 org.apache.cassandra java-driver-core-shaded - 4.18.2-SNAPSHOT + 4.19.0 org.apache.cassandra java-driver-mapper-processor - 4.18.2-SNAPSHOT + 4.19.0 org.apache.cassandra java-driver-mapper-runtime - 4.18.2-SNAPSHOT + 4.19.0 org.apache.cassandra java-driver-query-builder - 4.18.2-SNAPSHOT + 4.19.0 org.apache.cassandra java-driver-guava-shaded - 4.18.2-SNAPSHOT + 4.19.0 org.apache.cassandra java-driver-test-infra - 4.18.2-SNAPSHOT + 4.19.0 org.apache.cassandra java-driver-metrics-micrometer - 4.18.2-SNAPSHOT + 4.19.0 org.apache.cassandra java-driver-metrics-microprofile - 4.18.2-SNAPSHOT + 4.19.0 com.datastax.oss diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 9a708beb2a7..82f45050098 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.18.2-SNAPSHOT + 4.19.0 java-driver-core-shaded Apache Cassandra Java Driver - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index 2a48e8bf9ce..ea0efdddc08 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.18.2-SNAPSHOT + 4.19.0 java-driver-core bundle diff --git a/distribution-source/pom.xml b/distribution-source/pom.xml index ee5b52958c3..d3275565827 100644 --- a/distribution-source/pom.xml +++ b/distribution-source/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.18.2-SNAPSHOT + 4.19.0 java-driver-distribution-source pom diff --git a/distribution-tests/pom.xml b/distribution-tests/pom.xml index fafd8c4678b..0bb2ccef386 100644 --- a/distribution-tests/pom.xml +++ b/distribution-tests/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.18.2-SNAPSHOT + 4.19.0 java-driver-distribution-tests Apache Cassandra Java Driver - distribution tests diff --git a/distribution/pom.xml b/distribution/pom.xml index dfc406baf43..4edc44d2131 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.18.2-SNAPSHOT + 4.19.0 java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index a76cc8d2bf1..ee90d5c2c65 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -23,7 +23,7 @@ java-driver-parent org.apache.cassandra - 4.18.2-SNAPSHOT + 4.19.0 java-driver-examples Apache Cassandra Java Driver - examples. diff --git a/guava-shaded/pom.xml b/guava-shaded/pom.xml index ca8f0161b04..a0627c376ff 100644 --- a/guava-shaded/pom.xml +++ b/guava-shaded/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.18.2-SNAPSHOT + 4.19.0 java-driver-guava-shaded Apache Cassandra Java Driver - guava shaded dep diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index d1b0a736bb0..1052f96ae35 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.18.2-SNAPSHOT + 4.19.0 java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 6588f17d5f7..0b201db2473 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.18.2-SNAPSHOT + 4.19.0 java-driver-mapper-processor Apache Cassandra Java Driver - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index 28483ee93ff..f846fc68dec 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.18.2-SNAPSHOT + 4.19.0 java-driver-mapper-runtime bundle diff --git a/metrics/micrometer/pom.xml b/metrics/micrometer/pom.xml index 8ab939cbb37..80ec51605ab 100644 --- a/metrics/micrometer/pom.xml +++ b/metrics/micrometer/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.18.2-SNAPSHOT + 4.19.0 ../../ java-driver-metrics-micrometer diff --git a/metrics/microprofile/pom.xml b/metrics/microprofile/pom.xml index 521a67f9075..955160dd621 100644 --- a/metrics/microprofile/pom.xml +++ b/metrics/microprofile/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.18.2-SNAPSHOT + 4.19.0 ../../ java-driver-metrics-microprofile diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml index f0e66b656ca..7b761aa12a0 100644 --- a/osgi-tests/pom.xml +++ b/osgi-tests/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.18.2-SNAPSHOT + 4.19.0 java-driver-osgi-tests jar diff --git a/pom.xml b/pom.xml index c61e6485fd3..ecb800b7ebb 100644 --- a/pom.xml +++ b/pom.xml @@ -27,7 +27,7 @@ org.apache.cassandra java-driver-parent - 4.18.2-SNAPSHOT + 4.19.0 pom Apache Cassandra Java Driver https://github.com/datastax/java-driver @@ -1029,7 +1029,7 @@ limitations under the License.]]> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - HEAD + 4.19.0 diff --git a/query-builder/pom.xml b/query-builder/pom.xml index 4e09a10e584..144906adecb 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.18.2-SNAPSHOT + 4.19.0 java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index 262627e5536..0252683ca2b 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.18.2-SNAPSHOT + 4.19.0 java-driver-test-infra bundle From 90612f6758eb0f0ba964daf054f397a47a90a736 Mon Sep 17 00:00:00 2001 From: absurdfarce Date: Thu, 6 Feb 2025 15:51:54 -0600 Subject: [PATCH 944/979] [maven-release-plugin] prepare for next development iteration --- bom/pom.xml | 20 ++++++++++---------- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution-source/pom.xml | 2 +- distribution-tests/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- guava-shaded/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- metrics/micrometer/pom.xml | 2 +- metrics/microprofile/pom.xml | 2 +- osgi-tests/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 17 files changed, 27 insertions(+), 27 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index bb39a484f71..8e7bc467fe7 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.0 + 4.19.1-SNAPSHOT java-driver-bom pom @@ -33,47 +33,47 @@ org.apache.cassandra java-driver-core - 4.19.0 + 4.19.1-SNAPSHOT org.apache.cassandra java-driver-core-shaded - 4.19.0 + 4.19.1-SNAPSHOT org.apache.cassandra java-driver-mapper-processor - 4.19.0 + 4.19.1-SNAPSHOT org.apache.cassandra java-driver-mapper-runtime - 4.19.0 + 4.19.1-SNAPSHOT org.apache.cassandra java-driver-query-builder - 4.19.0 + 4.19.1-SNAPSHOT org.apache.cassandra java-driver-guava-shaded - 4.19.0 + 4.19.1-SNAPSHOT org.apache.cassandra java-driver-test-infra - 4.19.0 + 4.19.1-SNAPSHOT org.apache.cassandra java-driver-metrics-micrometer - 4.19.0 + 4.19.1-SNAPSHOT org.apache.cassandra java-driver-metrics-microprofile - 4.19.0 + 4.19.1-SNAPSHOT com.datastax.oss diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 82f45050098..451db1dcd1b 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.0 + 4.19.1-SNAPSHOT java-driver-core-shaded Apache Cassandra Java Driver - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index ea0efdddc08..b8d7d5c2d3b 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.0 + 4.19.1-SNAPSHOT java-driver-core bundle diff --git a/distribution-source/pom.xml b/distribution-source/pom.xml index d3275565827..e930f4c0610 100644 --- a/distribution-source/pom.xml +++ b/distribution-source/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.0 + 4.19.1-SNAPSHOT java-driver-distribution-source pom diff --git a/distribution-tests/pom.xml b/distribution-tests/pom.xml index 0bb2ccef386..1c762074673 100644 --- a/distribution-tests/pom.xml +++ b/distribution-tests/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.0 + 4.19.1-SNAPSHOT java-driver-distribution-tests Apache Cassandra Java Driver - distribution tests diff --git a/distribution/pom.xml b/distribution/pom.xml index 4edc44d2131..8f7740e148f 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.0 + 4.19.1-SNAPSHOT java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index ee90d5c2c65..15f082e6864 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -23,7 +23,7 @@ java-driver-parent org.apache.cassandra - 4.19.0 + 4.19.1-SNAPSHOT java-driver-examples Apache Cassandra Java Driver - examples. diff --git a/guava-shaded/pom.xml b/guava-shaded/pom.xml index a0627c376ff..8053af94911 100644 --- a/guava-shaded/pom.xml +++ b/guava-shaded/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.0 + 4.19.1-SNAPSHOT java-driver-guava-shaded Apache Cassandra Java Driver - guava shaded dep diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 1052f96ae35..b489076c257 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.0 + 4.19.1-SNAPSHOT java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 0b201db2473..519f1411ce9 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.0 + 4.19.1-SNAPSHOT java-driver-mapper-processor Apache Cassandra Java Driver - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index f846fc68dec..3a767c2a352 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.0 + 4.19.1-SNAPSHOT java-driver-mapper-runtime bundle diff --git a/metrics/micrometer/pom.xml b/metrics/micrometer/pom.xml index 80ec51605ab..091bb5f3e93 100644 --- a/metrics/micrometer/pom.xml +++ b/metrics/micrometer/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.0 + 4.19.1-SNAPSHOT ../../ java-driver-metrics-micrometer diff --git a/metrics/microprofile/pom.xml b/metrics/microprofile/pom.xml index 955160dd621..5163b5366f4 100644 --- a/metrics/microprofile/pom.xml +++ b/metrics/microprofile/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.0 + 4.19.1-SNAPSHOT ../../ java-driver-metrics-microprofile diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml index 7b761aa12a0..4b41f790145 100644 --- a/osgi-tests/pom.xml +++ b/osgi-tests/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.0 + 4.19.1-SNAPSHOT java-driver-osgi-tests jar diff --git a/pom.xml b/pom.xml index ecb800b7ebb..3fd2d1347c2 100644 --- a/pom.xml +++ b/pom.xml @@ -27,7 +27,7 @@ org.apache.cassandra java-driver-parent - 4.19.0 + 4.19.1-SNAPSHOT pom Apache Cassandra Java Driver https://github.com/datastax/java-driver @@ -1029,7 +1029,7 @@ limitations under the License.]]> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - 4.19.0 + HEAD diff --git a/query-builder/pom.xml b/query-builder/pom.xml index 144906adecb..0bc46f9bb91 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.0 + 4.19.1-SNAPSHOT java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index 0252683ca2b..b0808757ce4 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.0 + 4.19.1-SNAPSHOT java-driver-test-infra bundle From 3bb5b18903636976279506308c60f1911b7b8ed5 Mon Sep 17 00:00:00 2001 From: Abe Ratnofsky Date: Thu, 13 Feb 2025 17:11:36 -0500 Subject: [PATCH 945/979] CASSJAVA-80: Support configuration to disable DNS reverse-lookups for SAN validation patch by Abe Ratnofsky; reviewed by Alexandre Dutra, Andy Tolbert, and Francisco Guerrero for CASSJAVA-80 --- .../api/core/config/DefaultDriverOption.java | 6 ++ .../api/core/config/TypedDriverOption.java | 4 ++ .../ssl/ProgrammaticSslEngineFactory.java | 27 +++++++- .../core/ssl/DefaultSslEngineFactory.java | 26 +++++++- .../core/ssl/SniSslEngineFactory.java | 10 ++- core/src/main/resources/reference.conf | 6 ++ .../core/ssl/DefaultSslEngineFactoryIT.java | 66 +++++++++++++++++++ 7 files changed, 141 insertions(+), 4 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java index 11f2702c3cf..acd4bb9cc1d 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java @@ -243,6 +243,12 @@ public enum DefaultDriverOption implements DriverOption { *

          Value-type: boolean */ SSL_HOSTNAME_VALIDATION("advanced.ssl-engine-factory.hostname-validation"), + /** + * Whether or not to do a DNS reverse-lookup of provided server addresses for SAN addresses. + * + *

          Value-type: boolean + */ + SSL_ALLOW_DNS_REVERSE_LOOKUP_SAN("advanced.ssl-engine-factory.allow-dns-reverse-lookup-san"), /** * The location of the keystore file. * diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java index ca60b67f0ba..93e2b468461 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java @@ -229,6 +229,10 @@ public String toString() { */ public static final TypedDriverOption SSL_HOSTNAME_VALIDATION = new TypedDriverOption<>(DefaultDriverOption.SSL_HOSTNAME_VALIDATION, GenericType.BOOLEAN); + + public static final TypedDriverOption SSL_ALLOW_DNS_REVERSE_LOOKUP_SAN = + new TypedDriverOption<>( + DefaultDriverOption.SSL_ALLOW_DNS_REVERSE_LOOKUP_SAN, GenericType.BOOLEAN); /** The location of the keystore file. */ public static final TypedDriverOption SSL_KEYSTORE_PATH = new TypedDriverOption<>(DefaultDriverOption.SSL_KEYSTORE_PATH, GenericType.STRING); diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/ssl/ProgrammaticSslEngineFactory.java b/core/src/main/java/com/datastax/oss/driver/api/core/ssl/ProgrammaticSslEngineFactory.java index 6dfe4087b91..d65eaa864aa 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/ssl/ProgrammaticSslEngineFactory.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/ssl/ProgrammaticSslEngineFactory.java @@ -45,6 +45,7 @@ public class ProgrammaticSslEngineFactory implements SslEngineFactory { protected final SSLContext sslContext; protected final String[] cipherSuites; protected final boolean requireHostnameValidation; + protected final boolean allowDnsReverseLookupSan; /** * Creates an instance with the given {@link SSLContext}, default cipher suites and no host name @@ -80,9 +81,28 @@ public ProgrammaticSslEngineFactory( @NonNull SSLContext sslContext, @Nullable String[] cipherSuites, boolean requireHostnameValidation) { + this(sslContext, cipherSuites, requireHostnameValidation, true); + } + + /** + * Creates an instance with the given {@link SSLContext}, cipher suites and host name validation. + * + * @param sslContext the {@link SSLContext} to use. + * @param cipherSuites the cipher suites to use, or null to use the default ones. + * @param requireHostnameValidation whether to enable host name validation. If enabled, host name + * validation will be done using HTTPS algorithm. + * @param allowDnsReverseLookupSan whether to allow raw server IPs to be DNS reverse-resolved to + * choose the appropriate Subject Alternative Name. + */ + public ProgrammaticSslEngineFactory( + @NonNull SSLContext sslContext, + @Nullable String[] cipherSuites, + boolean requireHostnameValidation, + boolean allowDnsReverseLookupSan) { this.sslContext = sslContext; this.cipherSuites = cipherSuites; this.requireHostnameValidation = requireHostnameValidation; + this.allowDnsReverseLookupSan = allowDnsReverseLookupSan; } @NonNull @@ -92,7 +112,12 @@ public SSLEngine newSslEngine(@NonNull EndPoint remoteEndpoint) { SocketAddress remoteAddress = remoteEndpoint.resolve(); if (remoteAddress instanceof InetSocketAddress) { InetSocketAddress socketAddress = (InetSocketAddress) remoteAddress; - engine = sslContext.createSSLEngine(socketAddress.getHostName(), socketAddress.getPort()); + engine = + sslContext.createSSLEngine( + allowDnsReverseLookupSan + ? socketAddress.getHostName() + : socketAddress.getHostString(), + socketAddress.getPort()); } else { engine = sslContext.createSSLEngine(); } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/ssl/DefaultSslEngineFactory.java b/core/src/main/java/com/datastax/oss/driver/internal/core/ssl/DefaultSslEngineFactory.java index 475ec38d578..343d3f9e4e7 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/ssl/DefaultSslEngineFactory.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/ssl/DefaultSslEngineFactory.java @@ -22,6 +22,7 @@ import com.datastax.oss.driver.api.core.context.DriverContext; import com.datastax.oss.driver.api.core.metadata.EndPoint; import com.datastax.oss.driver.api.core.ssl.SslEngineFactory; +import com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting; import edu.umd.cs.findbugs.annotations.NonNull; import java.io.InputStream; import java.net.InetSocketAddress; @@ -69,6 +70,7 @@ public class DefaultSslEngineFactory implements SslEngineFactory { private final SSLContext sslContext; private final String[] cipherSuites; private final boolean requireHostnameValidation; + private final boolean allowDnsReverseLookupSan; private ReloadingKeyManagerFactory kmf; /** Builds a new instance from the driver configuration. */ @@ -88,6 +90,28 @@ public DefaultSslEngineFactory(DriverContext driverContext) { } this.requireHostnameValidation = config.getBoolean(DefaultDriverOption.SSL_HOSTNAME_VALIDATION, true); + this.allowDnsReverseLookupSan = + config.getBoolean(DefaultDriverOption.SSL_ALLOW_DNS_REVERSE_LOOKUP_SAN, true); + } + + @VisibleForTesting + protected String hostname(InetSocketAddress addr) { + return allowDnsReverseLookupSan ? hostMaybeFromDnsReverseLookup(addr) : hostNoLookup(addr); + } + + @VisibleForTesting + protected String hostMaybeFromDnsReverseLookup(InetSocketAddress addr) { + // See java.net.InetSocketAddress.getHostName: + // "This method may trigger a name service reverse lookup if the address was created with a + // literal IP address." + return addr.getHostName(); + } + + @VisibleForTesting + protected String hostNoLookup(InetSocketAddress addr) { + // See java.net.InetSocketAddress.getHostString: + // "This has the benefit of not attempting a reverse lookup" + return addr.getHostString(); } @NonNull @@ -97,7 +121,7 @@ public SSLEngine newSslEngine(@NonNull EndPoint remoteEndpoint) { SocketAddress remoteAddress = remoteEndpoint.resolve(); if (remoteAddress instanceof InetSocketAddress) { InetSocketAddress socketAddress = (InetSocketAddress) remoteAddress; - engine = sslContext.createSSLEngine(socketAddress.getHostName(), socketAddress.getPort()); + engine = sslContext.createSSLEngine(hostname(socketAddress), socketAddress.getPort()); } else { engine = sslContext.createSSLEngine(); } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/ssl/SniSslEngineFactory.java b/core/src/main/java/com/datastax/oss/driver/internal/core/ssl/SniSslEngineFactory.java index 98af19045dc..4d2cb69fbfc 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/ssl/SniSslEngineFactory.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/ssl/SniSslEngineFactory.java @@ -38,9 +38,15 @@ public class SniSslEngineFactory implements SslEngineFactory { private final SSLContext sslContext; private final CopyOnWriteArrayList fakePorts = new CopyOnWriteArrayList<>(); + private final boolean allowDnsReverseLookupSan; public SniSslEngineFactory(SSLContext sslContext) { + this(sslContext, true); + } + + public SniSslEngineFactory(SSLContext sslContext, boolean allowDnsReverseLookupSan) { this.sslContext = sslContext; + this.allowDnsReverseLookupSan = allowDnsReverseLookupSan; } @NonNull @@ -71,8 +77,8 @@ public SSLEngine newSslEngine(@NonNull EndPoint remoteEndpoint) { // To avoid that, we create a unique "fake" port for every node. We still get session reuse for // a given node, but not across nodes. This is safe because the advisory port is only used for // session caching. - SSLEngine engine = - sslContext.createSSLEngine(address.getHostName(), getFakePort(sniServerName)); + String peerHost = allowDnsReverseLookupSan ? address.getHostName() : address.getHostString(); + SSLEngine engine = sslContext.createSSLEngine(peerHost, getFakePort(sniServerName)); engine.setUseClientMode(true); SSLParameters parameters = engine.getSSLParameters(); parameters.setServerNames(ImmutableList.of(new SNIHostName(sniServerName))); diff --git a/core/src/main/resources/reference.conf b/core/src/main/resources/reference.conf index 7b1c43f8bea..f09ffd18a10 100644 --- a/core/src/main/resources/reference.conf +++ b/core/src/main/resources/reference.conf @@ -789,6 +789,12 @@ datastax-java-driver { # name matches the hostname of the server being connected to. If not set, defaults to true. // hostname-validation = true + # Whether or not to allow a DNS reverse-lookup of provided server addresses for SAN addresses, + # if cluster endpoints are specified as literal IPs. + # This is left as true for compatibility, but in most environments a DNS reverse-lookup should + # not be necessary to get an address that matches the server certificate SANs. + // allow-dns-reverse-lookup-san = true + # The locations and passwords used to access truststore and keystore contents. # These properties are optional. If either truststore-path or keystore-path are specified, # the driver builds an SSLContext from these files. If neither option is specified, the diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/ssl/DefaultSslEngineFactoryIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/ssl/DefaultSslEngineFactoryIT.java index 5f97e661eb1..a2afeade3ce 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/ssl/DefaultSslEngineFactoryIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/ssl/DefaultSslEngineFactoryIT.java @@ -21,10 +21,13 @@ import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; +import com.datastax.oss.driver.api.core.context.DriverContext; import com.datastax.oss.driver.api.testinfra.ccm.CcmBridge; import com.datastax.oss.driver.api.testinfra.ccm.CustomCcmRule; import com.datastax.oss.driver.api.testinfra.session.SessionUtils; +import com.datastax.oss.driver.assertions.Assertions; import com.datastax.oss.driver.internal.core.ssl.DefaultSslEngineFactory; +import java.net.InetSocketAddress; import org.junit.ClassRule; import org.junit.Test; @@ -88,4 +91,67 @@ public void should_not_connect_if_not_using_ssl() { session.execute("select * from system.local"); } } + + public static class InstrumentedSslEngineFactory extends DefaultSslEngineFactory { + int countReverseLookups = 0; + int countNoLookups = 0; + + public InstrumentedSslEngineFactory(DriverContext driverContext) { + super(driverContext); + } + + @Override + protected String hostMaybeFromDnsReverseLookup(InetSocketAddress addr) { + countReverseLookups++; + return super.hostMaybeFromDnsReverseLookup(addr); + } + + @Override + protected String hostNoLookup(InetSocketAddress addr) { + countNoLookups++; + return super.hostNoLookup(addr); + } + }; + + @Test + public void should_respect_config_for_san_resolution() { + DriverConfigLoader loader = + SessionUtils.configLoaderBuilder() + .withClass( + DefaultDriverOption.SSL_ENGINE_FACTORY_CLASS, InstrumentedSslEngineFactory.class) + .withBoolean(DefaultDriverOption.SSL_HOSTNAME_VALIDATION, false) + .withString( + DefaultDriverOption.SSL_TRUSTSTORE_PATH, + CcmBridge.DEFAULT_CLIENT_TRUSTSTORE_FILE.getAbsolutePath()) + .withString( + DefaultDriverOption.SSL_TRUSTSTORE_PASSWORD, + CcmBridge.DEFAULT_CLIENT_TRUSTSTORE_PASSWORD) + .build(); + try (CqlSession session = SessionUtils.newSession(CCM_RULE, loader)) { + InstrumentedSslEngineFactory ssl = + (InstrumentedSslEngineFactory) session.getContext().getSslEngineFactory().get(); + Assertions.assertThat(ssl.countReverseLookups).isGreaterThan(0); + Assertions.assertThat(ssl.countNoLookups).isEqualTo(0); + } + + loader = + SessionUtils.configLoaderBuilder() + .withClass( + DefaultDriverOption.SSL_ENGINE_FACTORY_CLASS, InstrumentedSslEngineFactory.class) + .withBoolean(DefaultDriverOption.SSL_HOSTNAME_VALIDATION, false) + .withString( + DefaultDriverOption.SSL_TRUSTSTORE_PATH, + CcmBridge.DEFAULT_CLIENT_TRUSTSTORE_FILE.getAbsolutePath()) + .withString( + DefaultDriverOption.SSL_TRUSTSTORE_PASSWORD, + CcmBridge.DEFAULT_CLIENT_TRUSTSTORE_PASSWORD) + .withBoolean(DefaultDriverOption.SSL_ALLOW_DNS_REVERSE_LOOKUP_SAN, false) + .build(); + try (CqlSession session = SessionUtils.newSession(CCM_RULE, loader)) { + InstrumentedSslEngineFactory ssl = + (InstrumentedSslEngineFactory) session.getContext().getSslEngineFactory().get(); + Assertions.assertThat(ssl.countReverseLookups).isEqualTo(0); + Assertions.assertThat(ssl.countNoLookups).isGreaterThan(0); + } + } } From 7982f413a90935a91549aa3ee7cc64fa25d7c113 Mon Sep 17 00:00:00 2001 From: absurdfarce Date: Mon, 3 Mar 2025 11:46:42 -0600 Subject: [PATCH 946/979] ninja-fix Minor fix to CASSJAVA-80 change. Adding new entries to DefaultDriverOption anywhere other than at the end messes with the ordinal guarantees for exisitng apps. We have a check for such a thing in the build; moving this around avoids that concern. --- .../api/core/config/DefaultDriverOption.java | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java index acd4bb9cc1d..6ffd51d86ef 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java @@ -243,12 +243,6 @@ public enum DefaultDriverOption implements DriverOption { *

          Value-type: boolean */ SSL_HOSTNAME_VALIDATION("advanced.ssl-engine-factory.hostname-validation"), - /** - * Whether or not to do a DNS reverse-lookup of provided server addresses for SAN addresses. - * - *

          Value-type: boolean - */ - SSL_ALLOW_DNS_REVERSE_LOOKUP_SAN("advanced.ssl-engine-factory.allow-dns-reverse-lookup-san"), /** * The location of the keystore file. * @@ -994,7 +988,13 @@ public enum DefaultDriverOption implements DriverOption { *

          Value type: {@link java.util.List List}<{@link String}> */ LOAD_BALANCING_DC_FAILOVER_PREFERRED_REMOTE_DCS( - "advanced.load-balancing-policy.dc-failover.preferred-remote-dcs"); + "advanced.load-balancing-policy.dc-failover.preferred-remote-dcs"), + /** + * Whether or not to do a DNS reverse-lookup of provided server addresses for SAN addresses. + * + *

          Value-type: boolean + */ + SSL_ALLOW_DNS_REVERSE_LOOKUP_SAN("advanced.ssl-engine-factory.allow-dns-reverse-lookup-san"); private final String path; From c0cae9bf76024f8004abaa1ddb871b7351fc253e Mon Sep 17 00:00:00 2001 From: absurdfarce Date: Tue, 25 Mar 2025 15:27:55 -0500 Subject: [PATCH 947/979] CASSJAVA-90 Update native-protocol version patch by Bret McGuire; reviewed by Abe Ratnofsky and Bret McGuire for CASSJAVA-90 --- bom/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bom/pom.xml b/bom/pom.xml index 8e7bc467fe7..f03317edc03 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -78,7 +78,7 @@ com.datastax.oss native-protocol - 1.5.1 + 1.5.2 From 204dd09329a073043b5b7cc170a9850e270c5a63 Mon Sep 17 00:00:00 2001 From: janehe Date: Mon, 31 Mar 2025 13:24:31 -0700 Subject: [PATCH 948/979] CASSJAVA-40: Driver testing against Java 21 patch by Jane He; reviewed by Bret McGuire for CASSJAVA-40 --- Jenkinsfile-asf | 4 ++-- .../oss/driver/internal/core/util/ArrayUtils.java | 3 ++- .../internal/core/cql/QueryTraceFetcherTest.java | 2 +- .../driver/internal/core/util/ArrayUtilsTest.java | 4 ++-- pom.xml | 13 +++++++++++++ 5 files changed, 20 insertions(+), 6 deletions(-) diff --git a/Jenkinsfile-asf b/Jenkinsfile-asf index 0217d0455d6..d6318585489 100644 --- a/Jenkinsfile-asf +++ b/Jenkinsfile-asf @@ -35,7 +35,7 @@ pipeline { axes { axis { name 'TEST_JAVA_VERSION' - values 'openjdk@1.8.0-292', 'openjdk@1.11.0-9', 'openjdk@17' + values 'openjdk@1.8.0-292', 'openjdk@1.11.0-9', 'openjdk@17', 'openjdk@1.21.0' } axis { name 'SERVER_VERSION' @@ -67,7 +67,7 @@ pipeline { def executeTests() { def testJavaMajorVersion = (TEST_JAVA_VERSION =~ /@(?:1\.)?(\d+)/)[0][1] sh """ - container_id=\$(docker run -td -e TEST_JAVA_VERSION=${TEST_JAVA_VERSION} -e SERVER_VERSION=${SERVER_VERSION} -e TEST_JAVA_MAJOR_VERSION=${testJavaMajorVersion} -v \$(pwd):/home/docker/cassandra-java-driver apache.jfrog.io/cassan-docker/apache/cassandra-java-driver-testing-ubuntu2204 'sleep 2h') + container_id=\$(docker run -td -e TEST_JAVA_VERSION=${TEST_JAVA_VERSION} -e SERVER_VERSION=${SERVER_VERSION} -e TEST_JAVA_MAJOR_VERSION=${testJavaMajorVersion} -v \$(pwd):/home/docker/cassandra-java-driver janehe158/cassandra-java-driver-dev-env 'sleep 2h') docker exec --user root \$container_id bash -c \"sudo bash /home/docker/cassandra-java-driver/ci/create-user.sh docker \$(id -u) \$(id -g) /home/docker/cassandra-java-driver\" docker exec --user docker \$container_id './cassandra-java-driver/ci/run-tests.sh' ( nohup docker stop \$container_id >/dev/null 2>/dev/null & ) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/util/ArrayUtils.java b/core/src/main/java/com/datastax/oss/driver/internal/core/util/ArrayUtils.java index f5fcb98e8b7..490b1dc7d17 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/util/ArrayUtils.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/util/ArrayUtils.java @@ -18,6 +18,7 @@ package com.datastax.oss.driver.internal.core.util; import edu.umd.cs.findbugs.annotations.NonNull; +import java.util.Random; import java.util.concurrent.ThreadLocalRandom; public class ArrayUtils { @@ -77,7 +78,7 @@ public static void shuffleHead(@NonNull ElementT[] elements, int n) { * Fisher-Yates shuffle */ public static void shuffleHead( - @NonNull ElementT[] elements, int n, @NonNull ThreadLocalRandom random) { + @NonNull ElementT[] elements, int n, @NonNull Random random) { if (n > elements.length) { throw new ArrayIndexOutOfBoundsException( String.format( diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/cql/QueryTraceFetcherTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/cql/QueryTraceFetcherTest.java index b355e0fc9f0..dc238775bc1 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/cql/QueryTraceFetcherTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/cql/QueryTraceFetcherTest.java @@ -79,7 +79,7 @@ public class QueryTraceFetcherTest { @Mock private NettyOptions nettyOptions; @Mock private EventExecutorGroup adminEventExecutorGroup; @Mock private EventExecutor eventExecutor; - @Mock private InetAddress address; + private InetAddress address = InetAddress.getLoopbackAddress(); @Captor private ArgumentCaptor statementCaptor; diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/util/ArrayUtilsTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/util/ArrayUtilsTest.java index c2a7fb70304..c2df6449fdb 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/util/ArrayUtilsTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/util/ArrayUtilsTest.java @@ -22,7 +22,7 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; -import java.util.concurrent.ThreadLocalRandom; +import java.util.Random; import org.junit.Test; public class ArrayUtilsTest { @@ -86,7 +86,7 @@ public void should_not_bubble_down_when_target_index_lower() { @Test public void should_shuffle_head() { String[] array = {"a", "b", "c", "d", "e"}; - ThreadLocalRandom random = mock(ThreadLocalRandom.class); + Random random = mock(Random.class); when(random.nextInt(anyInt())) .thenAnswer( (invocation) -> { diff --git a/pom.xml b/pom.xml index 3fd2d1347c2..088d7b07532 100644 --- a/pom.xml +++ b/pom.xml @@ -1016,6 +1016,19 @@ limitations under the License.]]> --add-opens java.base/jdk.internal.util.random=ALL-UNNAMED + + + test-jdk-21 + + [21,) + + + + -XX:+AllowRedefinitionToAddDeleteMethods + + --add-opens=java.base/jdk.internal.util.random=ALL-UNNAMED + + From 529d56e1742dcd1df3ca55c00fd8e02c0e484c68 Mon Sep 17 00:00:00 2001 From: absurdfarce Date: Mon, 31 Mar 2025 16:00:18 -0500 Subject: [PATCH 949/979] Add support for Java21 builds to test runs (plus a few other small cleanups) patch by Bret McGuire; reviewed by Joao Reis for CASSJAVA-40 --- Jenkinsfile-datastax | 50 ++++++++++++-------------------------------- 1 file changed, 13 insertions(+), 37 deletions(-) diff --git a/Jenkinsfile-datastax b/Jenkinsfile-datastax index cd48f325a29..af1aab6e0f4 100644 --- a/Jenkinsfile-datastax +++ b/Jenkinsfile-datastax @@ -19,22 +19,15 @@ */ def initializeEnvironment() { - env.DRIVER_DISPLAY_NAME = 'CassandraⓇ Java Driver' + env.DRIVER_DISPLAY_NAME = 'Java Driver for Apache CassandraⓇ' env.DRIVER_METRIC_TYPE = 'oss' - if (env.GIT_URL.contains('riptano/java-driver')) { - env.DRIVER_DISPLAY_NAME = 'private ' + env.DRIVER_DISPLAY_NAME - env.DRIVER_METRIC_TYPE = 'oss-private' - } else if (env.GIT_URL.contains('java-dse-driver')) { - env.DRIVER_DISPLAY_NAME = 'DSE Java Driver' - env.DRIVER_METRIC_TYPE = 'dse' - } env.GIT_SHA = "${env.GIT_COMMIT.take(7)}" env.GITHUB_PROJECT_URL = "https://${GIT_URL.replaceFirst(/(git@|http:\/\/|https:\/\/)/, '').replace(':', '/').replace('.git', '')}" env.GITHUB_BRANCH_URL = "${GITHUB_PROJECT_URL}/tree/${env.BRANCH_NAME}" env.GITHUB_COMMIT_URL = "${GITHUB_PROJECT_URL}/commit/${env.GIT_COMMIT}" - env.MAVEN_HOME = "${env.HOME}/.mvn/apache-maven-3.3.9" + env.MAVEN_HOME = "${env.HOME}/.mvn/apache-maven-3.6.3" env.PATH = "${env.MAVEN_HOME}/bin:${env.PATH}" /* @@ -335,14 +328,12 @@ pipeline { ''') choice( name: 'ADHOC_BUILD_AND_EXECUTE_TESTS_JABBA_VERSION', - choices: ['1.8', // Oracle JDK version 1.8 (current default) - 'openjdk@1.9', // OpenJDK version 9 - 'openjdk@1.10', // OpenJDK version 10 + choices: [ + '1.8', // Oracle JDK version 1.8 (current default) 'openjdk@1.11', // OpenJDK version 11 - 'openjdk@1.12', // OpenJDK version 12 - 'openjdk@1.13', // OpenJDK version 13 - 'openjdk@1.14', // OpenJDK version 14 - 'openjdk@1.17'], // OpenJDK version 17 + 'openjdk@1.17', // OpenJDK version 17 + 'openjdk@1.21' // OpenJDK version 21 + ], description: '''JDK version to use for TESTING when running adhoc BUILD-AND-EXECUTE-TESTS builds. All builds will use JDK8 for building the driver @@ -355,34 +346,18 @@ pipeline { - - - - - - - - - - - - - - - - - - - - + + + +
          1.8 Oracle JDK version 1.8 (Used for compiling regardless of choice)
          openjdk@1.9OpenJDK version 9
          openjdk@1.10OpenJDK version 10
          openjdk@1.11 OpenJDK version 11
          openjdk@1.12OpenJDK version 12
          openjdk@1.13OpenJDK version 13
          openjdk@1.14OpenJDK version 14
          openjdk@1.17 OpenJDK version 17
          openjdk@1.21OpenJDK version 21
          ''') booleanParam( name: 'SKIP_SERIAL_ITS', @@ -466,7 +441,8 @@ pipeline { name 'JABBA_VERSION' values '1.8', // jdk8 'openjdk@1.11', // jdk11 - 'openjdk@1.17' // jdk17 + 'openjdk@1.17', // jdk17 + 'openjdk@1.21' // jdk21 } } From f42ab99ccc031bca7db6cf69aec70771d65799e1 Mon Sep 17 00:00:00 2001 From: Abe Ratnofsky Date: Mon, 10 Feb 2025 13:11:06 -0500 Subject: [PATCH 950/979] Upgrade Netty to 4.1.119 patch by Abe Ratnofsky; reviewed by Bret McGuire for CASSJAVA-77 --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 088d7b07532..a8841a9caca 100644 --- a/pom.xml +++ b/pom.xml @@ -57,7 +57,7 @@ 2.1.12 4.1.18 - 4.1.94.Final + 4.1.119.Final 1.2.1 - com.datastax.oss:${project.artifactId}:RELEASE + ${project.groupId}:${project.artifactId}:RELEASE From 0115cd67c18835b89d8888d405d455045737a630 Mon Sep 17 00:00:00 2001 From: Lukasz Antoniak Date: Fri, 11 Apr 2025 13:19:45 +0200 Subject: [PATCH 952/979] Prevent long overflow in SNI address resolution patch by Lukasz Antoniak and Alexandre Dutra; reviewed by Bret McGuire --- .../oss/driver/internal/core/metadata/SniEndPoint.java | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/SniEndPoint.java b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/SniEndPoint.java index ace4e82617d..d1ab8eec98d 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/SniEndPoint.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/metadata/SniEndPoint.java @@ -26,10 +26,10 @@ import java.util.Arrays; import java.util.Comparator; import java.util.Objects; -import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.atomic.AtomicInteger; public class SniEndPoint implements EndPoint { - private static final AtomicLong OFFSET = new AtomicLong(); + private static final AtomicInteger OFFSET = new AtomicInteger(); private final InetSocketAddress proxyAddress; private final String serverName; @@ -64,7 +64,10 @@ public InetSocketAddress resolve() { // The order of the returned address is unspecified. Sort by IP to make sure we get a true // round-robin Arrays.sort(aRecords, IP_COMPARATOR); - int index = (aRecords.length == 1) ? 0 : (int) OFFSET.getAndIncrement() % aRecords.length; + int index = + (aRecords.length == 1) + ? 0 + : OFFSET.getAndUpdate(x -> x == Integer.MAX_VALUE ? 0 : x + 1) % aRecords.length; return new InetSocketAddress(aRecords[index], proxyAddress.getPort()); } catch (UnknownHostException e) { throw new IllegalArgumentException( From 7161d12fb28d16e2928cfe98bbada151ff6ae058 Mon Sep 17 00:00:00 2001 From: janehe Date: Wed, 23 Apr 2025 22:25:03 -0700 Subject: [PATCH 953/979] ninja-fix Revert docker image name (CASSJAVA-40) --- Jenkinsfile-asf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Jenkinsfile-asf b/Jenkinsfile-asf index d6318585489..24800ba9051 100644 --- a/Jenkinsfile-asf +++ b/Jenkinsfile-asf @@ -67,7 +67,7 @@ pipeline { def executeTests() { def testJavaMajorVersion = (TEST_JAVA_VERSION =~ /@(?:1\.)?(\d+)/)[0][1] sh """ - container_id=\$(docker run -td -e TEST_JAVA_VERSION=${TEST_JAVA_VERSION} -e SERVER_VERSION=${SERVER_VERSION} -e TEST_JAVA_MAJOR_VERSION=${testJavaMajorVersion} -v \$(pwd):/home/docker/cassandra-java-driver janehe158/cassandra-java-driver-dev-env 'sleep 2h') + container_id=\$(docker run -td -e TEST_JAVA_VERSION=${TEST_JAVA_VERSION} -e SERVER_VERSION=${SERVER_VERSION} -e TEST_JAVA_MAJOR_VERSION=${testJavaMajorVersion} -v \$(pwd):/home/docker/cassandra-java-driver apache.jfrog.io/cassan-docker/apache/cassandra-java-driver-testing-ubuntu2204 'sleep 2h') docker exec --user root \$container_id bash -c \"sudo bash /home/docker/cassandra-java-driver/ci/create-user.sh docker \$(id -u) \$(id -g) /home/docker/cassandra-java-driver\" docker exec --user docker \$container_id './cassandra-java-driver/ci/run-tests.sh' ( nohup docker stop \$container_id >/dev/null 2>/dev/null & ) From d7e829775c4956d10c888c86b653f7ac2d10fa4b Mon Sep 17 00:00:00 2001 From: Andy Tolbert <6889771+tolbertam@users.noreply.github.com> Date: Thu, 6 Feb 2025 14:57:52 -0600 Subject: [PATCH 954/979] Make guava an optional dependency of java-driver-guava-shaded With CASSJAVA-52, the java-driver-guava-shaded is now in tree. This appears to work great, but there is a slight issue with the dependency tree that allows unshaded guava packages to be imported within the project. It looks like marking guava as an optional dependency in java-driver-guava-shaded resolves this. patch by Andy Tolbert; reviewed by Alexandre Dutra and Dmitry Konstantinov for CASSJAVA-76 --- guava-shaded/pom.xml | 1 + 1 file changed, 1 insertion(+) diff --git a/guava-shaded/pom.xml b/guava-shaded/pom.xml index 8053af94911..ed37e861a96 100644 --- a/guava-shaded/pom.xml +++ b/guava-shaded/pom.xml @@ -46,6 +46,7 @@ error_prone_annotations + true org.graalvm.nativeimage From eb54934d5d2f38757da3a94a8ad4960f66eb4bd6 Mon Sep 17 00:00:00 2001 From: Tatu Saloranta Date: Fri, 15 Nov 2024 19:01:32 -0800 Subject: [PATCH 955/979] Bump Jackson version to la(te)st 2.13, 2.13.5 patch by Tatu Saloranta; reviewed by Bret McGuire reference: https://github.com/apache/cassandra-java-driver/pull/1989 --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index c69dabb84ce..e5cfb58f94d 100644 --- a/pom.xml +++ b/pom.xml @@ -69,8 +69,8 @@ 1.0.3 20230227 - 2.13.4 - 2.13.4.2 + 2.13.5 + ${jackson.version} 1.1.10.1 1.7.1 From 53bb5c8b6580c5797e4f148cdc818885327cd19e Mon Sep 17 00:00:00 2001 From: Tatu Saloranta Date: Fri, 15 Nov 2024 18:41:50 -0800 Subject: [PATCH 956/979] CASSJAVA-68 Improve DefaultCodecRegisry.CacheKey#hashCode() to eliminate Object[] allocation (found via profiler) patch by Tatu Saloranta; reviewed by Dmitry Konstantinov and Bret McGuire for CASSJAVA-68 --- .../core/type/codec/registry/DefaultCodecRegistry.java | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/registry/DefaultCodecRegistry.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/registry/DefaultCodecRegistry.java index cfd053ea56e..4334f22b63d 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/registry/DefaultCodecRegistry.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/registry/DefaultCodecRegistry.java @@ -159,7 +159,10 @@ public boolean equals(Object other) { @Override public int hashCode() { - return Objects.hash(cqlType, javaType, isJavaCovariant); + // NOTE: inlined Objects.hash for performance reasons (avoid Object[] allocation + // seen in profiler allocation traces) + return ((31 + Objects.hashCode(cqlType)) * 31 + Objects.hashCode(javaType)) * 31 + + Boolean.hashCode(isJavaCovariant); } } } From c9facc3c36e7ba0b1d30fb9b10de69f879d34fb5 Mon Sep 17 00:00:00 2001 From: janehe Date: Mon, 5 May 2025 19:37:32 -0700 Subject: [PATCH 957/979] ninja-fix Format fix for previous commit --- .../internal/core/type/codec/registry/DefaultCodecRegistry.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/registry/DefaultCodecRegistry.java b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/registry/DefaultCodecRegistry.java index 4334f22b63d..cc14740e180 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/registry/DefaultCodecRegistry.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/type/codec/registry/DefaultCodecRegistry.java @@ -162,7 +162,7 @@ public int hashCode() { // NOTE: inlined Objects.hash for performance reasons (avoid Object[] allocation // seen in profiler allocation traces) return ((31 + Objects.hashCode(cqlType)) * 31 + Objects.hashCode(javaType)) * 31 - + Boolean.hashCode(isJavaCovariant); + + Boolean.hashCode(isJavaCovariant); } } } From bb9bb11e573258e12e4272379f88e4d8b5f103a0 Mon Sep 17 00:00:00 2001 From: alexsa Date: Thu, 12 Jun 2025 10:02:09 +0200 Subject: [PATCH 958/979] Add SubnetAddressTranslator to translate Cassandra node IPs from private network based on its subnet mask patch by Alex Sasnouskikh; reviewed by Bret McGuire and Andy Tolbert reference: https://github.com/apache/cassandra-java-driver/pull/2013 --- .../api/core/config/DefaultDriverOption.java | 43 ++++- .../api/core/config/TypedDriverOption.java | 14 ++ .../driver/internal/core/ContactPoints.java | 61 ++---- .../FixedHostNameAddressTranslator.java | 20 +- .../core/addresstranslation/Subnet.java | 176 ++++++++++++++++++ .../addresstranslation/SubnetAddress.java | 65 +++++++ .../SubnetAddressTranslator.java | 148 +++++++++++++++ .../internal/core/util/AddressUtils.java | 59 ++++++ core/src/main/resources/reference.conf | 18 +- .../internal/core/ContactPointsTest.java | 4 +- .../FixedHostNameAddressTranslatorTest.java | 5 +- .../addresstranslation/SubnetAddressTest.java | 44 +++++ .../SubnetAddressTranslatorTest.java | 153 +++++++++++++++ .../core/addresstranslation/SubnetTest.java | 118 ++++++++++++ .../internal/core/config/MockOptions.java | 1 + .../typesafe/TypesafeDriverConfigTest.java | 14 +- manual/core/address_resolution/README.md | 49 +++++ 17 files changed, 923 insertions(+), 69 deletions(-) create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/addresstranslation/Subnet.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/addresstranslation/SubnetAddress.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/addresstranslation/SubnetAddressTranslator.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/util/AddressUtils.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/addresstranslation/SubnetAddressTest.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/addresstranslation/SubnetAddressTranslatorTest.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/addresstranslation/SubnetTest.java diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java index 6ffd51d86ef..4e45bf7b117 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java @@ -994,7 +994,48 @@ public enum DefaultDriverOption implements DriverOption { * *

          Value-type: boolean */ - SSL_ALLOW_DNS_REVERSE_LOOKUP_SAN("advanced.ssl-engine-factory.allow-dns-reverse-lookup-san"); + SSL_ALLOW_DNS_REVERSE_LOOKUP_SAN("advanced.ssl-engine-factory.allow-dns-reverse-lookup-san"), + /** + * An address to always translate all node addresses to that same proxy hostname no matter what IP + * address a node has, but still using its native transport port. + * + *

          Value-Type: {@link String} + */ + ADDRESS_TRANSLATOR_ADVERTISED_HOSTNAME("advanced.address-translator.advertised-hostname"), + /** + * A map of Cassandra node subnets (CIDR notations) to target addresses, for example (note quoted + * keys): + * + *

          +   * advanced.address-translator.subnet-addresses {
          +   *   "100.64.0.0/15" = "cassandra.datacenter1.com:9042"
          +   *   "100.66.0.0/15" = "cassandra.datacenter2.com:9042"
          +   *   # IPv6 example:
          +   *   # "::ffff:6440:0/111" = "cassandra.datacenter1.com:9042"
          +   *   # "::ffff:6442:0/111" = "cassandra.datacenter2.com:9042"
          +   * }
          +   * 
          + * + * Note: subnets must be represented as prefix blocks, see {@link + * inet.ipaddr.Address#isPrefixBlock()}. + * + *

          Value type: {@link java.util.Map Map}<{@link String},{@link String}> + */ + ADDRESS_TRANSLATOR_SUBNET_ADDRESSES("advanced.address-translator.subnet-addresses"), + /** + * A default address to fallback to if Cassandra node IP isn't contained in any of the configured + * subnets. + * + *

          Value-Type: {@link String} + */ + ADDRESS_TRANSLATOR_DEFAULT_ADDRESS("advanced.address-translator.default-address"), + /** + * Whether to resolve the addresses on initialization (if true) or on each node (re-)connection + * (if false). Defaults to false. + * + *

          Value-Type: boolean + */ + ADDRESS_TRANSLATOR_RESOLVE_ADDRESSES("advanced.address-translator.resolve-addresses"); private final String path; diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java index 93e2b468461..aa4e4af12dc 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java @@ -896,6 +896,20 @@ public String toString() { DefaultDriverOption.LOAD_BALANCING_DC_FAILOVER_ALLOW_FOR_LOCAL_CONSISTENCY_LEVELS, GenericType.BOOLEAN); + public static final TypedDriverOption ADDRESS_TRANSLATOR_ADVERTISED_HOSTNAME = + new TypedDriverOption<>( + DefaultDriverOption.ADDRESS_TRANSLATOR_ADVERTISED_HOSTNAME, GenericType.STRING); + public static final TypedDriverOption> ADDRESS_TRANSLATOR_SUBNET_ADDRESSES = + new TypedDriverOption<>( + DefaultDriverOption.ADDRESS_TRANSLATOR_SUBNET_ADDRESSES, + GenericType.mapOf(GenericType.STRING, GenericType.STRING)); + public static final TypedDriverOption ADDRESS_TRANSLATOR_DEFAULT_ADDRESS = + new TypedDriverOption<>( + DefaultDriverOption.ADDRESS_TRANSLATOR_DEFAULT_ADDRESS, GenericType.STRING); + public static final TypedDriverOption ADDRESS_TRANSLATOR_RESOLVE_ADDRESSES = + new TypedDriverOption<>( + DefaultDriverOption.ADDRESS_TRANSLATOR_RESOLVE_ADDRESSES, GenericType.BOOLEAN); + /** * Ordered preference list of remote dcs optionally supplied for automatic failover and included * in query plan. This feature is enabled only when max-nodes-per-remote-dc is greater than 0. diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/ContactPoints.java b/core/src/main/java/com/datastax/oss/driver/internal/core/ContactPoints.java index 1ed2a1cebf3..bb65661b72f 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/ContactPoints.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/ContactPoints.java @@ -19,14 +19,11 @@ import com.datastax.oss.driver.api.core.metadata.EndPoint; import com.datastax.oss.driver.internal.core.metadata.DefaultEndPoint; +import com.datastax.oss.driver.internal.core.util.AddressUtils; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; import com.datastax.oss.driver.shaded.guava.common.collect.Sets; -import java.net.InetAddress; import java.net.InetSocketAddress; -import java.net.UnknownHostException; -import java.util.Arrays; import java.util.Collections; -import java.util.HashSet; import java.util.List; import java.util.Set; import org.slf4j.Logger; @@ -41,7 +38,22 @@ public static Set merge( Set result = Sets.newHashSet(programmaticContactPoints); for (String spec : configContactPoints) { - for (InetSocketAddress address : extract(spec, resolve)) { + + Set addresses = Collections.emptySet(); + try { + addresses = AddressUtils.extract(spec, resolve); + } catch (RuntimeException e) { + LOG.warn("Ignoring invalid contact point {} ({})", spec, e.getMessage(), e); + } + + if (addresses.size() > 1) { + LOG.info( + "Contact point {} resolves to multiple addresses, will use them all ({})", + spec, + addresses); + } + + for (InetSocketAddress address : addresses) { DefaultEndPoint endPoint = new DefaultEndPoint(address); boolean wasNew = result.add(endPoint); if (!wasNew) { @@ -51,43 +63,4 @@ public static Set merge( } return ImmutableSet.copyOf(result); } - - private static Set extract(String spec, boolean resolve) { - int separator = spec.lastIndexOf(':'); - if (separator < 0) { - LOG.warn("Ignoring invalid contact point {} (expecting host:port)", spec); - return Collections.emptySet(); - } - - String host = spec.substring(0, separator); - String portSpec = spec.substring(separator + 1); - int port; - try { - port = Integer.parseInt(portSpec); - } catch (NumberFormatException e) { - LOG.warn("Ignoring invalid contact point {} (expecting a number, got {})", spec, portSpec); - return Collections.emptySet(); - } - if (!resolve) { - return ImmutableSet.of(InetSocketAddress.createUnresolved(host, port)); - } else { - try { - InetAddress[] inetAddresses = InetAddress.getAllByName(host); - if (inetAddresses.length > 1) { - LOG.info( - "Contact point {} resolves to multiple addresses, will use them all ({})", - spec, - Arrays.deepToString(inetAddresses)); - } - Set result = new HashSet<>(); - for (InetAddress inetAddress : inetAddresses) { - result.add(new InetSocketAddress(inetAddress, port)); - } - return result; - } catch (UnknownHostException e) { - LOG.warn("Ignoring invalid contact point {} (unknown host {})", spec, host); - return Collections.emptySet(); - } - } - } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/addresstranslation/FixedHostNameAddressTranslator.java b/core/src/main/java/com/datastax/oss/driver/internal/core/addresstranslation/FixedHostNameAddressTranslator.java index 4fb9782f566..5cc6c2518fb 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/addresstranslation/FixedHostNameAddressTranslator.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/addresstranslation/FixedHostNameAddressTranslator.java @@ -17,8 +17,9 @@ */ package com.datastax.oss.driver.internal.core.addresstranslation; +import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.ADDRESS_TRANSLATOR_ADVERTISED_HOSTNAME; + import com.datastax.oss.driver.api.core.addresstranslation.AddressTranslator; -import com.datastax.oss.driver.api.core.config.DriverOption; import com.datastax.oss.driver.api.core.context.DriverContext; import edu.umd.cs.findbugs.annotations.NonNull; import java.net.InetSocketAddress; @@ -37,28 +38,13 @@ public class FixedHostNameAddressTranslator implements AddressTranslator { private static final Logger LOG = LoggerFactory.getLogger(FixedHostNameAddressTranslator.class); - public static final String ADDRESS_TRANSLATOR_ADVERTISED_HOSTNAME = - "advanced.address-translator.advertised-hostname"; - - public static DriverOption ADDRESS_TRANSLATOR_ADVERTISED_HOSTNAME_OPTION = - new DriverOption() { - @NonNull - @Override - public String getPath() { - return ADDRESS_TRANSLATOR_ADVERTISED_HOSTNAME; - } - }; - private final String advertisedHostname; private final String logPrefix; public FixedHostNameAddressTranslator(@NonNull DriverContext context) { logPrefix = context.getSessionName(); advertisedHostname = - context - .getConfig() - .getDefaultProfile() - .getString(ADDRESS_TRANSLATOR_ADVERTISED_HOSTNAME_OPTION); + context.getConfig().getDefaultProfile().getString(ADDRESS_TRANSLATOR_ADVERTISED_HOSTNAME); } @NonNull diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/addresstranslation/Subnet.java b/core/src/main/java/com/datastax/oss/driver/internal/core/addresstranslation/Subnet.java new file mode 100644 index 00000000000..7c25e94e2f9 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/addresstranslation/Subnet.java @@ -0,0 +1,176 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.addresstranslation; + +import com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting; +import com.datastax.oss.driver.shaded.guava.common.base.Splitter; +import java.net.InetAddress; +import java.net.UnknownHostException; +import java.util.Arrays; +import java.util.List; + +class Subnet { + private final byte[] subnet; + private final byte[] networkMask; + private final byte[] upper; + private final byte[] lower; + + private Subnet(byte[] subnet, byte[] networkMask) { + this.subnet = subnet; + this.networkMask = networkMask; + + byte[] upper = new byte[subnet.length]; + byte[] lower = new byte[subnet.length]; + for (int i = 0; i < subnet.length; i++) { + upper[i] = (byte) (subnet[i] | ~networkMask[i]); + lower[i] = (byte) (subnet[i] & networkMask[i]); + } + this.upper = upper; + this.lower = lower; + } + + static Subnet parse(String subnetCIDR) throws UnknownHostException { + List parts = Splitter.on("/").splitToList(subnetCIDR); + if (parts.size() != 2) { + throw new IllegalArgumentException("Invalid subnet: " + subnetCIDR); + } + + boolean isIPv6 = parts.get(0).contains(":"); + byte[] subnet = InetAddress.getByName(parts.get(0)).getAddress(); + if (isIPv4(subnet) && isIPv6) { + subnet = toIPv6(subnet); + } + int prefixLength = Integer.parseInt(parts.get(1)); + validatePrefixLength(subnet, prefixLength); + + byte[] networkMask = toNetworkMask(subnet, prefixLength); + validateSubnetIsPrefixBlock(subnet, networkMask, subnetCIDR); + return new Subnet(subnet, networkMask); + } + + private static byte[] toNetworkMask(byte[] subnet, int prefixLength) { + int fullBytes = prefixLength / 8; + int remainingBits = prefixLength % 8; + byte[] mask = new byte[subnet.length]; + Arrays.fill(mask, 0, fullBytes, (byte) 0xFF); + if (remainingBits > 0) { + mask[fullBytes] = (byte) (0xFF << (8 - remainingBits)); + } + return mask; + } + + private static void validatePrefixLength(byte[] subnet, int prefixLength) { + int max_prefix_length = subnet.length * 8; + if (prefixLength < 0 || max_prefix_length < prefixLength) { + throw new IllegalArgumentException( + String.format( + "Prefix length %s must be within [0; %s]", prefixLength, max_prefix_length)); + } + } + + private static void validateSubnetIsPrefixBlock( + byte[] subnet, byte[] networkMask, String subnetCIDR) { + byte[] prefixBlock = toPrefixBlock(subnet, networkMask); + if (!Arrays.equals(subnet, prefixBlock)) { + throw new IllegalArgumentException( + String.format("Subnet %s must be represented as a network prefix block", subnetCIDR)); + } + } + + private static byte[] toPrefixBlock(byte[] subnet, byte[] networkMask) { + byte[] prefixBlock = new byte[subnet.length]; + for (int i = 0; i < subnet.length; i++) { + prefixBlock[i] = (byte) (subnet[i] & networkMask[i]); + } + return prefixBlock; + } + + @VisibleForTesting + byte[] getSubnet() { + return Arrays.copyOf(subnet, subnet.length); + } + + @VisibleForTesting + byte[] getNetworkMask() { + return Arrays.copyOf(networkMask, networkMask.length); + } + + byte[] getUpper() { + return Arrays.copyOf(upper, upper.length); + } + + byte[] getLower() { + return Arrays.copyOf(lower, lower.length); + } + + boolean isIPv4() { + return isIPv4(subnet); + } + + boolean isIPv6() { + return isIPv6(subnet); + } + + boolean contains(byte[] ip) { + if (isIPv4() && !isIPv4(ip)) { + return false; + } + if (isIPv6() && isIPv4(ip)) { + ip = toIPv6(ip); + } + if (subnet.length != ip.length) { + throw new IllegalArgumentException( + "IP version is unknown: " + Arrays.toString(toZeroBasedByteArray(ip))); + } + for (int i = 0; i < subnet.length; i++) { + if (subnet[i] != (byte) (ip[i] & networkMask[i])) { + return false; + } + } + return true; + } + + private static boolean isIPv4(byte[] ip) { + return ip.length == 4; + } + + private static boolean isIPv6(byte[] ip) { + return ip.length == 16; + } + + private static byte[] toIPv6(byte[] ipv4) { + byte[] ipv6 = new byte[16]; + ipv6[10] = (byte) 0xFF; + ipv6[11] = (byte) 0xFF; + System.arraycopy(ipv4, 0, ipv6, 12, 4); + return ipv6; + } + + @Override + public String toString() { + return Arrays.toString(toZeroBasedByteArray(subnet)); + } + + private static int[] toZeroBasedByteArray(byte[] bytes) { + int[] res = new int[bytes.length]; + for (int i = 0; i < bytes.length; i++) { + res[i] = bytes[i] & 0xFF; + } + return res; + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/addresstranslation/SubnetAddress.java b/core/src/main/java/com/datastax/oss/driver/internal/core/addresstranslation/SubnetAddress.java new file mode 100644 index 00000000000..105e776a507 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/addresstranslation/SubnetAddress.java @@ -0,0 +1,65 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.addresstranslation; + +import java.net.InetSocketAddress; +import java.net.UnknownHostException; + +class SubnetAddress { + private final Subnet subnet; + private final InetSocketAddress address; + + SubnetAddress(String subnetCIDR, InetSocketAddress address) { + try { + this.subnet = Subnet.parse(subnetCIDR); + } catch (UnknownHostException e) { + throw new RuntimeException(e); + } + this.address = address; + } + + InetSocketAddress getAddress() { + return this.address; + } + + boolean isOverlapping(SubnetAddress other) { + Subnet thisSubnet = this.subnet; + Subnet otherSubnet = other.subnet; + return thisSubnet.contains(otherSubnet.getLower()) + || thisSubnet.contains(otherSubnet.getUpper()) + || otherSubnet.contains(thisSubnet.getLower()) + || otherSubnet.contains(thisSubnet.getUpper()); + } + + boolean contains(InetSocketAddress address) { + return subnet.contains(address.getAddress().getAddress()); + } + + boolean isIPv4() { + return subnet.isIPv4(); + } + + boolean isIPv6() { + return subnet.isIPv6(); + } + + @Override + public String toString() { + return "SubnetAddress[subnet=" + subnet + ", address=" + address + "]"; + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/addresstranslation/SubnetAddressTranslator.java b/core/src/main/java/com/datastax/oss/driver/internal/core/addresstranslation/SubnetAddressTranslator.java new file mode 100644 index 00000000000..85f29e3fadd --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/addresstranslation/SubnetAddressTranslator.java @@ -0,0 +1,148 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.addresstranslation; + +import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.ADDRESS_TRANSLATOR_DEFAULT_ADDRESS; +import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.ADDRESS_TRANSLATOR_RESOLVE_ADDRESSES; +import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.ADDRESS_TRANSLATOR_SUBNET_ADDRESSES; + +import com.datastax.oss.driver.api.core.addresstranslation.AddressTranslator; +import com.datastax.oss.driver.api.core.context.DriverContext; +import com.datastax.oss.driver.internal.core.util.AddressUtils; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import java.net.InetSocketAddress; +import java.util.List; +import java.util.Optional; +import java.util.stream.Collectors; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * This translator returns the proxy address of the private subnet containing the Cassandra node IP, + * or default address if no matching subnets, or passes through the original node address if no + * default configured. + * + *

          The translator can be used for scenarios when all nodes are behind some kind of proxy, and + * that proxy is different for nodes located in different subnets (eg. when Cassandra is deployed in + * multiple datacenters/regions). One can use this, for example, for Cassandra on Kubernetes with + * different Cassandra datacenters deployed to different Kubernetes clusters. + */ +public class SubnetAddressTranslator implements AddressTranslator { + private static final Logger LOG = LoggerFactory.getLogger(SubnetAddressTranslator.class); + + private final List subnetAddresses; + + @SuppressWarnings("OptionalUsedAsFieldOrParameterType") + private final Optional defaultAddress; + + private final String logPrefix; + + public SubnetAddressTranslator(@NonNull DriverContext context) { + logPrefix = context.getSessionName(); + boolean resolveAddresses = + context + .getConfig() + .getDefaultProfile() + .getBoolean(ADDRESS_TRANSLATOR_RESOLVE_ADDRESSES, false); + this.subnetAddresses = + context.getConfig().getDefaultProfile().getStringMap(ADDRESS_TRANSLATOR_SUBNET_ADDRESSES) + .entrySet().stream() + .map( + e -> { + // Quoted and/or containing forward slashes map keys in reference.conf are read to + // strings with additional quotes, eg. 100.64.0.0/15 -> '100.64.0."0/15"' or + // "100.64.0.0/15" -> '"100.64.0.0/15"' + String subnetCIDR = e.getKey().replaceAll("\"", ""); + String address = e.getValue(); + return new SubnetAddress(subnetCIDR, parseAddress(address, resolveAddresses)); + }) + .collect(Collectors.toList()); + this.defaultAddress = + Optional.ofNullable( + context + .getConfig() + .getDefaultProfile() + .getString(ADDRESS_TRANSLATOR_DEFAULT_ADDRESS, null)) + .map(address -> parseAddress(address, resolveAddresses)); + + validateSubnetsAreOfSameProtocol(this.subnetAddresses); + validateSubnetsAreNotOverlapping(this.subnetAddresses); + } + + private static void validateSubnetsAreOfSameProtocol(List subnets) { + for (int i = 0; i < subnets.size() - 1; i++) { + for (int j = i + 1; j < subnets.size(); j++) { + SubnetAddress subnet1 = subnets.get(i); + SubnetAddress subnet2 = subnets.get(j); + if (subnet1.isIPv4() != subnet2.isIPv4() && subnet1.isIPv6() != subnet2.isIPv6()) { + throw new IllegalArgumentException( + String.format( + "Configured subnets are of the different protocols: %s, %s", subnet1, subnet2)); + } + } + } + } + + private static void validateSubnetsAreNotOverlapping(List subnets) { + for (int i = 0; i < subnets.size() - 1; i++) { + for (int j = i + 1; j < subnets.size(); j++) { + SubnetAddress subnet1 = subnets.get(i); + SubnetAddress subnet2 = subnets.get(j); + if (subnet1.isOverlapping(subnet2)) { + throw new IllegalArgumentException( + String.format("Configured subnets are overlapping: %s, %s", subnet1, subnet2)); + } + } + } + } + + @NonNull + @Override + public InetSocketAddress translate(@NonNull InetSocketAddress address) { + InetSocketAddress translatedAddress = null; + for (SubnetAddress subnetAddress : subnetAddresses) { + if (subnetAddress.contains(address)) { + translatedAddress = subnetAddress.getAddress(); + } + } + if (translatedAddress == null && defaultAddress.isPresent()) { + translatedAddress = defaultAddress.get(); + } + if (translatedAddress == null) { + translatedAddress = address; + } + LOG.debug("[{}] Translated {} to {}", logPrefix, address, translatedAddress); + return translatedAddress; + } + + @Override + public void close() {} + + @Nullable + private InetSocketAddress parseAddress(String address, boolean resolve) { + try { + InetSocketAddress parsedAddress = AddressUtils.extract(address, resolve).iterator().next(); + LOG.debug("[{}] Parsed {} to {}", logPrefix, address, parsedAddress); + return parsedAddress; + } catch (RuntimeException e) { + throw new IllegalArgumentException( + String.format("Invalid address %s (%s)", address, e.getMessage()), e); + } + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/util/AddressUtils.java b/core/src/main/java/com/datastax/oss/driver/internal/core/util/AddressUtils.java new file mode 100644 index 00000000000..8905edb9192 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/util/AddressUtils.java @@ -0,0 +1,59 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.util; + +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableSet; +import java.net.InetAddress; +import java.net.InetSocketAddress; +import java.net.UnknownHostException; +import java.util.HashSet; +import java.util.Set; + +public class AddressUtils { + + public static Set extract(String address, boolean resolve) { + int separator = address.lastIndexOf(':'); + if (separator < 0) { + throw new IllegalArgumentException("expecting format host:port"); + } + + String host = address.substring(0, separator); + String portString = address.substring(separator + 1); + int port; + try { + port = Integer.parseInt(portString); + } catch (NumberFormatException e) { + throw new IllegalArgumentException("expecting port to be a number, got " + portString, e); + } + if (!resolve) { + return ImmutableSet.of(InetSocketAddress.createUnresolved(host, port)); + } else { + InetAddress[] inetAddresses; + try { + inetAddresses = InetAddress.getAllByName(host); + } catch (UnknownHostException e) { + throw new RuntimeException(e); + } + Set result = new HashSet<>(); + for (InetAddress inetAddress : inetAddresses) { + result.add(new InetSocketAddress(inetAddress, port)); + } + return result; + } + } +} diff --git a/core/src/main/resources/reference.conf b/core/src/main/resources/reference.conf index f09ffd18a10..3c6851a48ee 100644 --- a/core/src/main/resources/reference.conf +++ b/core/src/main/resources/reference.conf @@ -1026,8 +1026,9 @@ datastax-java-driver { # the package com.datastax.oss.driver.internal.core.addresstranslation. # # The driver provides the following implementations out of the box: - # - PassThroughAddressTranslator: returns all addresses unchanged + # - PassThroughAddressTranslator: returns all addresses unchanged. # - FixedHostNameAddressTranslator: translates all addresses to a specific hostname. + # - SubnetAddressTranslator: translates addresses to hostname based on the subnet match. # - Ec2MultiRegionAddressTranslator: suitable for an Amazon multi-region EC2 deployment where # clients are also deployed in EC2. It optimizes network costs by favoring private IPs over # public ones whenever possible. @@ -1035,8 +1036,23 @@ datastax-java-driver { # You can also specify a custom class that implements AddressTranslator and has a public # constructor with a DriverContext argument. class = PassThroughAddressTranslator + # # This property has to be set only in case you use FixedHostNameAddressTranslator. # advertised-hostname = mycustomhostname + # + # These properties are only applicable in case you use SubnetAddressTranslator. + # subnet-addresses { + # "100.64.0.0/15" = "cassandra.datacenter1.com:9042" + # "100.66.0.0/15" = "cassandra.datacenter2.com:9042" + # # IPv6 example: + # # "::ffff:6440:0/111" = "cassandra.datacenter1.com:9042" + # # "::ffff:6442:0/111" = "cassandra.datacenter2.com:9042" + # } + # Optional. When configured, addresses not matching the configured subnets are translated to this address. + # default-address = "cassandra.datacenter1.com:9042" + # Whether to resolve the addresses once on initialization (if true) or on each node (re-)connection (if false). + # If not configured, defaults to false. + # resolve-addresses = false } # Whether to resolve the addresses passed to `basic.contact-points`. diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/ContactPointsTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/ContactPointsTest.java index 9e0d8737619..72b875b8602 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/ContactPointsTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/ContactPointsTest.java @@ -121,7 +121,7 @@ public void should_ignore_malformed_host_and_port_and_warn() { ContactPoints.merge(Collections.emptySet(), ImmutableList.of("foobar"), true); assertThat(endPoints).isEmpty(); - assertLog(Level.WARN, "Ignoring invalid contact point foobar (expecting host:port)"); + assertLog(Level.WARN, "Ignoring invalid contact point foobar (expecting format host:port)"); } @Test @@ -132,7 +132,7 @@ public void should_ignore_malformed_port_and_warn() { assertThat(endPoints).isEmpty(); assertLog( Level.WARN, - "Ignoring invalid contact point 127.0.0.1:foobar (expecting a number, got foobar)"); + "Ignoring invalid contact point 127.0.0.1:foobar (expecting port to be a number, got foobar)"); } @Test diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/addresstranslation/FixedHostNameAddressTranslatorTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/addresstranslation/FixedHostNameAddressTranslatorTest.java index c5e864b4bae..92800998056 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/addresstranslation/FixedHostNameAddressTranslatorTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/addresstranslation/FixedHostNameAddressTranslatorTest.java @@ -17,6 +17,7 @@ */ package com.datastax.oss.driver.internal.core.addresstranslation; +import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.ADDRESS_TRANSLATOR_ADVERTISED_HOSTNAME; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -33,9 +34,7 @@ public class FixedHostNameAddressTranslatorTest { @Test public void should_translate_address() { DriverExecutionProfile defaultProfile = mock(DriverExecutionProfile.class); - when(defaultProfile.getString( - FixedHostNameAddressTranslator.ADDRESS_TRANSLATOR_ADVERTISED_HOSTNAME_OPTION)) - .thenReturn("myaddress"); + when(defaultProfile.getString(ADDRESS_TRANSLATOR_ADVERTISED_HOSTNAME)).thenReturn("myaddress"); DefaultDriverContext defaultDriverContext = MockedDriverContextFactory.defaultDriverContext(Optional.of(defaultProfile)); diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/addresstranslation/SubnetAddressTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/addresstranslation/SubnetAddressTest.java new file mode 100644 index 00000000000..bd505f5dd44 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/addresstranslation/SubnetAddressTest.java @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.addresstranslation; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.mock; + +import java.net.InetSocketAddress; +import org.junit.Test; + +public class SubnetAddressTest { + @Test + public void should_return_return_true_on_overlapping_with_another_subnet_address() { + SubnetAddress subnetAddress1 = + new SubnetAddress("100.64.0.0/15", mock(InetSocketAddress.class)); + SubnetAddress subnetAddress2 = + new SubnetAddress("100.65.0.0/16", mock(InetSocketAddress.class)); + assertThat(subnetAddress1.isOverlapping(subnetAddress2)).isTrue(); + } + + @Test + public void should_return_return_false_on_not_overlapping_with_another_subnet_address() { + SubnetAddress subnetAddress1 = + new SubnetAddress("100.64.0.0/15", mock(InetSocketAddress.class)); + SubnetAddress subnetAddress2 = + new SubnetAddress("100.66.0.0/15", mock(InetSocketAddress.class)); + assertThat(subnetAddress1.isOverlapping(subnetAddress2)).isFalse(); + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/addresstranslation/SubnetAddressTranslatorTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/addresstranslation/SubnetAddressTranslatorTest.java new file mode 100644 index 00000000000..2aa6ae75bc2 --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/addresstranslation/SubnetAddressTranslatorTest.java @@ -0,0 +1,153 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.addresstranslation; + +import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.ADDRESS_TRANSLATOR_DEFAULT_ADDRESS; +import static com.datastax.oss.driver.api.core.config.DefaultDriverOption.ADDRESS_TRANSLATOR_SUBNET_ADDRESSES; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.internal.core.context.DefaultDriverContext; +import com.datastax.oss.driver.internal.core.context.MockedDriverContextFactory; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; +import java.net.InetSocketAddress; +import java.util.Map; +import java.util.Optional; +import org.junit.Test; + +@SuppressWarnings("resource") +public class SubnetAddressTranslatorTest { + + @Test + public void should_translate_to_correct_subnet_address_ipv4() { + Map subnetAddresses = + ImmutableMap.of( + "\"100.64.0.0/15\"", "cassandra.datacenter1.com:19042", + "100.66.0.\"0/15\"", "cassandra.datacenter2.com:19042"); + DefaultDriverContext context = context(subnetAddresses); + SubnetAddressTranslator translator = new SubnetAddressTranslator(context); + InetSocketAddress address = new InetSocketAddress("100.64.0.1", 9042); + assertThat(translator.translate(address)) + .isEqualTo(InetSocketAddress.createUnresolved("cassandra.datacenter1.com", 19042)); + } + + @Test + public void should_translate_to_correct_subnet_address_ipv6() { + Map subnetAddresses = + ImmutableMap.of( + "\"::ffff:6440:0/111\"", "cassandra.datacenter1.com:19042", + "\"::ffff:6442:0/111\"", "cassandra.datacenter2.com:19042"); + DefaultDriverContext context = context(subnetAddresses); + SubnetAddressTranslator translator = new SubnetAddressTranslator(context); + InetSocketAddress address = new InetSocketAddress("::ffff:6440:1", 9042); + assertThat(translator.translate(address)) + .isEqualTo(InetSocketAddress.createUnresolved("cassandra.datacenter1.com", 19042)); + } + + @Test + public void should_translate_to_default_address() { + DefaultDriverContext context = context(ImmutableMap.of()); + when(context + .getConfig() + .getDefaultProfile() + .getString(ADDRESS_TRANSLATOR_DEFAULT_ADDRESS, null)) + .thenReturn("cassandra.com:19042"); + SubnetAddressTranslator translator = new SubnetAddressTranslator(context); + InetSocketAddress address = new InetSocketAddress("100.68.0.1", 9042); + assertThat(translator.translate(address)) + .isEqualTo(InetSocketAddress.createUnresolved("cassandra.com", 19042)); + } + + @Test + public void should_pass_through_not_matched_address() { + DefaultDriverContext context = context(ImmutableMap.of()); + SubnetAddressTranslator translator = new SubnetAddressTranslator(context); + InetSocketAddress address = new InetSocketAddress("100.68.0.1", 9042); + assertThat(translator.translate(address)).isEqualTo(address); + } + + @Test + public void should_fail_on_intersecting_subnets_ipv4() { + Map subnetAddresses = + ImmutableMap.of( + "\"100.64.0.0/15\"", "cassandra.datacenter1.com:19042", + "100.65.0.\"0/16\"", "cassandra.datacenter2.com:19042"); + DefaultDriverContext context = context(subnetAddresses); + assertThatIllegalArgumentException() + .isThrownBy(() -> new SubnetAddressTranslator(context)) + .withMessage( + "Configured subnets are overlapping: " + + String.format( + "SubnetAddress[subnet=[100, 64, 0, 0], address=%s], ", + InetSocketAddress.createUnresolved("cassandra.datacenter1.com", 19042)) + + String.format( + "SubnetAddress[subnet=[100, 65, 0, 0], address=%s]", + InetSocketAddress.createUnresolved("cassandra.datacenter2.com", 19042))); + } + + @Test + public void should_fail_on_intersecting_subnets_ipv6() { + Map subnetAddresses = + ImmutableMap.of( + "\"::ffff:6440:0/111\"", "cassandra.datacenter1.com:19042", + "\"::ffff:6441:0/112\"", "cassandra.datacenter2.com:19042"); + DefaultDriverContext context = context(subnetAddresses); + assertThatIllegalArgumentException() + .isThrownBy(() -> new SubnetAddressTranslator(context)) + .withMessage( + "Configured subnets are overlapping: " + + String.format( + "SubnetAddress[subnet=[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 255, 100, 64, 0, 0], address=%s], ", + InetSocketAddress.createUnresolved("cassandra.datacenter1.com", 19042)) + + String.format( + "SubnetAddress[subnet=[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 255, 100, 65, 0, 0], address=%s]", + InetSocketAddress.createUnresolved("cassandra.datacenter2.com", 19042))); + } + + @Test + public void should_fail_on_subnet_address_without_port() { + Map subnetAddresses = + ImmutableMap.of("\"100.64.0.0/15\"", "cassandra.datacenter1.com"); + DefaultDriverContext context = context(subnetAddresses); + assertThatIllegalArgumentException() + .isThrownBy(() -> new SubnetAddressTranslator(context)) + .withMessage("Invalid address cassandra.datacenter1.com (expecting format host:port)"); + } + + @Test + public void should_fail_on_default_address_without_port() { + DefaultDriverContext context = context(ImmutableMap.of()); + when(context + .getConfig() + .getDefaultProfile() + .getString(ADDRESS_TRANSLATOR_DEFAULT_ADDRESS, null)) + .thenReturn("cassandra.com"); + assertThatIllegalArgumentException() + .isThrownBy(() -> new SubnetAddressTranslator(context)) + .withMessage("Invalid address cassandra.com (expecting format host:port)"); + } + + private static DefaultDriverContext context(Map subnetAddresses) { + DriverExecutionProfile profile = mock(DriverExecutionProfile.class); + when(profile.getStringMap(ADDRESS_TRANSLATOR_SUBNET_ADDRESSES)).thenReturn(subnetAddresses); + return MockedDriverContextFactory.defaultDriverContext(Optional.of(profile)); + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/addresstranslation/SubnetTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/addresstranslation/SubnetTest.java new file mode 100644 index 00000000000..f8ba8929e9e --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/addresstranslation/SubnetTest.java @@ -0,0 +1,118 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.addresstranslation; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException; +import static org.assertj.core.api.Assertions.assertThatNoException; + +import java.net.UnknownHostException; +import org.junit.Test; + +public class SubnetTest { + @Test + public void should_parse_to_correct_ipv4_subnet() throws UnknownHostException { + Subnet subnet = Subnet.parse("100.64.0.0/15"); + assertThat(subnet.getSubnet()).containsExactly(100, 64, 0, 0); + assertThat(subnet.getNetworkMask()).containsExactly(255, 254, 0, 0); + assertThat(subnet.getUpper()).containsExactly(100, 65, 255, 255); + assertThat(subnet.getLower()).containsExactly(100, 64, 0, 0); + } + + @Test + public void should_parse_to_correct_ipv6_subnet() throws UnknownHostException { + Subnet subnet = Subnet.parse("2001:db8:85a3::8a2e:370:0/111"); + assertThat(subnet.getSubnet()) + .containsExactly(32, 1, 13, 184, 133, 163, 0, 0, 0, 0, 138, 46, 3, 112, 0, 0); + assertThat(subnet.getNetworkMask()) + .containsExactly( + 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 254, 0, 0); + assertThat(subnet.getUpper()) + .containsExactly(32, 1, 13, 184, 133, 163, 0, 0, 0, 0, 138, 46, 3, 113, 255, 255); + assertThat(subnet.getLower()) + .containsExactly(32, 1, 13, 184, 133, 163, 0, 0, 0, 0, 138, 46, 3, 112, 0, 0); + } + + @Test + public void should_parse_to_correct_ipv6_subnet_ipv4_convertible() throws UnknownHostException { + Subnet subnet = Subnet.parse("::ffff:6440:0/111"); + assertThat(subnet.getSubnet()) + .containsExactly(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 255, 100, 64, 0, 0); + assertThat(subnet.getNetworkMask()) + .containsExactly( + 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 254, 0, 0); + assertThat(subnet.getUpper()) + .containsExactly(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 255, 100, 65, 255, 255); + assertThat(subnet.getLower()) + .containsExactly(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 255, 100, 64, 0, 0); + } + + @Test + public void should_fail_on_invalid_cidr_format() { + assertThatIllegalArgumentException() + .isThrownBy(() -> Subnet.parse("invalid")) + .withMessage("Invalid subnet: invalid"); + } + + @Test + public void should_parse_bounding_prefix_lengths_correctly() { + assertThatNoException().isThrownBy(() -> Subnet.parse("0.0.0.0/0")); + assertThatNoException().isThrownBy(() -> Subnet.parse("100.64.0.0/32")); + } + + @Test + public void should_fail_on_invalid_prefix_length() { + assertThatIllegalArgumentException() + .isThrownBy(() -> Subnet.parse("100.64.0.0/-1")) + .withMessage("Prefix length -1 must be within [0; 32]"); + assertThatIllegalArgumentException() + .isThrownBy(() -> Subnet.parse("100.64.0.0/33")) + .withMessage("Prefix length 33 must be within [0; 32]"); + } + + @Test + public void should_fail_on_not_prefix_block_subnet_ipv4() { + assertThatIllegalArgumentException() + .isThrownBy(() -> Subnet.parse("100.65.0.0/15")) + .withMessage("Subnet 100.65.0.0/15 must be represented as a network prefix block"); + } + + @Test + public void should_fail_on_not_prefix_block_subnet_ipv6() { + assertThatIllegalArgumentException() + .isThrownBy(() -> Subnet.parse("::ffff:6441:0/111")) + .withMessage("Subnet ::ffff:6441:0/111 must be represented as a network prefix block"); + } + + @Test + public void should_return_true_on_containing_address() throws UnknownHostException { + Subnet subnet = Subnet.parse("100.64.0.0/15"); + assertThat(subnet.contains(new byte[] {100, 64, 0, 0})).isTrue(); + assertThat(subnet.contains(new byte[] {100, 65, (byte) 255, (byte) 255})).isTrue(); + assertThat(subnet.contains(new byte[] {100, 65, 100, 100})).isTrue(); + } + + @Test + public void should_return_false_on_not_containing_address() throws UnknownHostException { + Subnet subnet = Subnet.parse("100.64.0.0/15"); + assertThat(subnet.contains(new byte[] {100, 63, (byte) 255, (byte) 255})).isFalse(); + assertThat(subnet.contains(new byte[] {100, 66, 0, 0})).isFalse(); + // IPv6 cannot be contained by IPv4 subnet. + assertThat(subnet.contains(new byte[16])).isFalse(); + } +} diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/config/MockOptions.java b/core/src/test/java/com/datastax/oss/driver/internal/core/config/MockOptions.java index 25c1e8b26fd..cee57abbfdf 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/config/MockOptions.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/config/MockOptions.java @@ -24,6 +24,7 @@ public enum MockOptions implements DriverOption { INT1("int1"), INT2("int2"), AUTH_PROVIDER("auth_provider"), + SUBNET_ADDRESSES("subnet_addresses"), ; private final String path; diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/config/typesafe/TypesafeDriverConfigTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/config/typesafe/TypesafeDriverConfigTest.java index 16ccb73da9f..4a78c3ccb03 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/config/typesafe/TypesafeDriverConfigTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/config/typesafe/TypesafeDriverConfigTest.java @@ -101,7 +101,6 @@ public void should_fetch_string_map() { parse( "int1 = 42 \n auth_provider { auth_thing_one= one \n auth_thing_two = two \n auth_thing_three = three}"); DriverExecutionProfile base = config.getDefaultProfile(); - base.getStringMap(MockOptions.AUTH_PROVIDER); Map map = base.getStringMap(MockOptions.AUTH_PROVIDER); assertThat(map.entrySet().size()).isEqualTo(3); assertThat(map.get("auth_thing_one")).isEqualTo("one"); @@ -109,6 +108,19 @@ public void should_fetch_string_map() { assertThat(map.get("auth_thing_three")).isEqualTo("three"); } + @Test + public void should_fetch_string_map_with_forward_slash_in_keys() { + TypesafeDriverConfig config = + parse( + "subnet_addresses { 100.64.0.0/15 = \"cassandra.datacenter1.com:9042\" \n \"100.66.0.0/15\" = \"cassandra.datacenter2.com\" \n \"::ffff:6440:0/111\" = \"cassandra.datacenter3.com:19042\" }"); + DriverExecutionProfile base = config.getDefaultProfile(); + Map map = base.getStringMap(MockOptions.SUBNET_ADDRESSES); + assertThat(map.entrySet().size()).isEqualTo(3); + assertThat(map.get("100.64.0.\"0/15\"")).isEqualTo("cassandra.datacenter1.com:9042"); + assertThat(map.get("\"100.66.0.0/15\"")).isEqualTo("cassandra.datacenter2.com"); + assertThat(map.get("\"::ffff:6440:0/111\"")).isEqualTo("cassandra.datacenter3.com:19042"); + } + @Test public void should_create_derived_profile_with_string_map() { TypesafeDriverConfig config = parse("int1 = 42"); diff --git a/manual/core/address_resolution/README.md b/manual/core/address_resolution/README.md index 84efb4a796c..5b2536feb18 100644 --- a/manual/core/address_resolution/README.md +++ b/manual/core/address_resolution/README.md @@ -118,6 +118,55 @@ datastax-java-driver.advanced.address-translator.class = com.mycompany.MyAddress Note: the contact points provided while creating the `CqlSession` are not translated, only addresses retrieved from or sent by Cassandra nodes are. +### Fixed proxy hostname + +If your client applications access Cassandra through some kind of proxy (eg. with AWS PrivateLink when all Cassandra +nodes are exposed via one hostname pointing to AWS Endpoint), you can configure driver with +`FixedHostNameAddressTranslator` to always translate all node addresses to that same proxy hostname, no matter what IP +address a node has but still using its native transport port. + +To use it, specify the following in the [configuration](../configuration): + +``` +datastax-java-driver.advanced.address-translator.class = FixedHostNameAddressTranslator +advertised-hostname = proxyhostname +``` + +### Fixed proxy hostname per subnet + +When running Cassandra in a private network and accessing it from outside of that private network via some kind of +proxy, we have an option to use `FixedHostNameAddressTranslator`. But for multi-datacenter Cassandra deployments, we +want to have more control over routing queries to a specific datacenter (eg. for optimizing latencies), which requires +setting up a separate proxy per datacenter. + +Normally, each Cassandra datacenter nodes are deployed to a different subnet to support internode communications in the +cluster and avoid IP address collisions. So when Cassandra broadcasts its nodes IP addresses, we can determine which +datacenter that node belongs to by checking its IP address against the given datacenter subnet. + +For such scenarios you can use `SubnetAddressTranslator` to translate node IPs to the datacenter proxy address +associated with it. + +To use it, specify the following in the [configuration](../configuration): +``` +datastax-java-driver.advanced.address-translator { + class = SubnetAddressTranslator + subnet-addresses { + "100.64.0.0/15" = "cassandra.datacenter1.com:9042" + "100.66.0.0/15" = "cassandra.datacenter2.com:9042" + # IPv6 example: + # "::ffff:6440:0/111" = "cassandra.datacenter1.com:9042" + # "::ffff:6442:0/111" = "cassandra.datacenter2.com:9042" + } + # Optional. When configured, addresses not matching the configured subnets are translated to this address. + default-address = "cassandra.datacenter1.com:9042" + # Whether to resolve the addresses once on initialization (if true) or on each node (re-)connection (if false). + # If not configured, defaults to false. + resolve-addresses = false +} +``` + +Such setup is common for running Cassandra on Kubernetes with [k8ssandra](https://docs.k8ssandra.io/). + ### EC2 multi-region If you deploy both Cassandra and client applications on Amazon EC2, and your cluster spans multiple regions, you'll have From 29d3531202895fb9866bdd72720202c78a7eaa9b Mon Sep 17 00:00:00 2001 From: Abe Ratnofsky Date: Tue, 20 May 2025 19:06:14 -0400 Subject: [PATCH 959/979] Fix revapi surious complaints about optional dependencies patch by Abe Ratnofsky; reviewed by Bret McGuire for CASSJAVA-102 --- Jenkinsfile-datastax | 2 +- core/revapi.json | 25 +++++++++++++++++++-- mapper-runtime/revapi.json | 6 ++--- pom.xml | 45 ++++++++++++++++++++++++-------------- query-builder/revapi.json | 9 ++++---- test-infra/revapi.json | 4 +--- 6 files changed, 61 insertions(+), 30 deletions(-) diff --git a/Jenkinsfile-datastax b/Jenkinsfile-datastax index af1aab6e0f4..73b977bdf9f 100644 --- a/Jenkinsfile-datastax +++ b/Jenkinsfile-datastax @@ -27,7 +27,7 @@ def initializeEnvironment() { env.GITHUB_BRANCH_URL = "${GITHUB_PROJECT_URL}/tree/${env.BRANCH_NAME}" env.GITHUB_COMMIT_URL = "${GITHUB_PROJECT_URL}/commit/${env.GIT_COMMIT}" - env.MAVEN_HOME = "${env.HOME}/.mvn/apache-maven-3.6.3" + env.MAVEN_HOME = "${env.HOME}/.mvn/apache-maven-3.8.8" env.PATH = "${env.MAVEN_HOME}/bin:${env.PATH}" /* diff --git a/core/revapi.json b/core/revapi.json index 5aa46a3ccad..f39c7d4a7c0 100644 --- a/core/revapi.json +++ b/core/revapi.json @@ -1,5 +1,3 @@ -// Configures Revapi (https://revapi.org/getting-started.html) to check API compatibility between -// successive driver versions. { "revapi": { "java": { @@ -7386,6 +7384,29 @@ "old": "method com.datastax.oss.driver.api.core.type.reflect.GenericType> com.datastax.oss.driver.api.core.type.reflect.GenericType::vectorOf(java.lang.Class)", "new": "method com.datastax.oss.driver.api.core.type.reflect.GenericType> com.datastax.oss.driver.api.core.type.reflect.GenericType::vectorOf(java.lang.Class)", "justification": "JAVA-3143: Extend driver vector support to arbitrary subtypes and fix handling of variable length types (OSS C* 5.0)" + }, + { + "code": "java.class.nonPublicPartOfAPI", + "old": "class com.datastax.oss.driver.internal.core.config.typesafe.TypesafeDriverExecutionProfile.Base", + "justification": "CASSJAVA-102: Fix spurious complaints about optional dependencies" + }, + { + "code": "java.class.nonPublicPartOfAPI", + "old": "class com.fasterxml.jackson.databind.type.TypeParser.MyTokenizer", + "justification": "CASSJAVA-102: Fix spurious complaints about optional dependencies" + }, + { + "code": "java.class.nonPublicPartOfAPI", + "old": "class org.apache.tinkerpop.shaded.jackson.databind.type.TypeParser.MyTokenizer", + "justification": "CASSJAVA-102: Fix spurious complaints about optional dependencies" + }, + { + "code": "java.class.externalClassExposedInAPI", + "justification": "CASSJAVA-102: Migrate revapi config into dedicated config files, ported from pom.xml" + }, + { + "code": "java.method.varargOverloadsOnlyDifferInVarargParameter", + "justification": "CASSJAVA-102: Migrate revapi config into dedicated config files, ported from pom.xml" } ] } diff --git a/mapper-runtime/revapi.json b/mapper-runtime/revapi.json index 18d26a7f7e9..3dc2ea21671 100644 --- a/mapper-runtime/revapi.json +++ b/mapper-runtime/revapi.json @@ -1,5 +1,3 @@ -// Configures Revapi (https://revapi.org/getting-started.html) to check API compatibility between -// successive driver versions. { "revapi": { "java": { @@ -11,7 +9,7 @@ "com\\.datastax\\.(oss|dse)\\.driver\\.internal(\\..+)?", "com\\.datastax\\.oss\\.driver\\.shaded(\\..+)?", "com\\.datastax\\.oss\\.simulacron(\\..+)?", - // Don't re-check sibling modules that this module depends on + "// Don't re-check sibling modules that this module depends on", "com\\.datastax\\.(oss|dse)\\.driver\\.api\\.core(\\..+)?", "com\\.datastax\\.(oss|dse)\\.driver\\.api\\.querybuilder(\\..+)?" ] @@ -22,7 +20,7 @@ { "regex": true, "code": "java.annotation.attributeValueChanged", - "old": "@interface com\.datastax\.oss\.driver\.api\.mapper\.annotations\..*", + "old": "@interface com\\.datastax\\.oss\\.driver\\.api\\.mapper\\.annotations\\..*", "annotationType": "java.lang.annotation.Retention", "attribute": "value", "oldValue": "java.lang.annotation.RetentionPolicy.CLASS", diff --git a/pom.xml b/pom.xml index e5cfb58f94d..2cfeb65e757 100644 --- a/pom.xml +++ b/pom.xml @@ -561,28 +561,23 @@ org.revapi revapi-maven-plugin - 0.10.5 + 0.15.1 false \d+\.\d+\.\d+ - - - - - java.class.externalClassExposedInAPI - - - ${project.groupId}:${project.artifactId}:RELEASE + + revapi.json + org.revapi revapi-java - 0.22.1 + 0.28.4 @@ -596,9 +591,33 @@ flatten-maven-plugin 1.2.1 + + org.apache.maven.plugins + maven-enforcer-plugin + 3.5.0 + + + maven-enforcer-plugin + + + enforce-maven + + enforce + + + + + + [3.8.1,) + + + + + + maven-compiler-plugin @@ -901,12 +920,6 @@ limitations under the License.]]> check - - - - revapi.json - - diff --git a/query-builder/revapi.json b/query-builder/revapi.json index c4d8aa27212..ed97379332c 100644 --- a/query-builder/revapi.json +++ b/query-builder/revapi.json @@ -1,5 +1,3 @@ -// Configures Revapi (https://revapi.org/getting-started.html) to check API compatibility between -// successive driver versions. { "revapi": { "java": { @@ -11,7 +9,7 @@ "com\\.datastax\\.(oss|dse)\\.driver\\.internal(\\..+)?", "com\\.datastax\\.oss\\.driver\\.shaded(\\..+)?", "org\\.assertj(\\..+)?", - // Don't re-check sibling modules that this module depends on + "// Don't re-check sibling modules that this module depends on", "com\\.datastax\\.(oss|dse)\\.driver\\.api\\.core(\\..+)?" ] } @@ -2782,8 +2780,11 @@ "code": "java.method.addedToInterface", "new": "method com.datastax.oss.driver.api.querybuilder.select.Select com.datastax.oss.driver.api.querybuilder.select.Select::orderByAnnOf(com.datastax.oss.driver.api.core.CqlIdentifier, com.datastax.oss.driver.api.core.data.CqlVector)", "justification": "JAVA-3118: Add support for vector data type in Schema Builder, QueryBuilder" + }, + { + "code": "java.method.varargOverloadsOnlyDifferInVarargParameter", + "justification": "CASSJAVA-102: Suppress newly-supported varargs check" } ] } } - diff --git a/test-infra/revapi.json b/test-infra/revapi.json index c75a98cb4af..293d9f4d142 100644 --- a/test-infra/revapi.json +++ b/test-infra/revapi.json @@ -1,5 +1,3 @@ -// Configures Revapi (https://revapi.org/getting-started.html) to check API compatibility between -// successive driver versions. { "revapi": { "java": { @@ -12,7 +10,7 @@ "com\\.datastax\\.oss\\.driver\\.shaded(\\..+)?", "com\\.datastax\\.oss\\.simulacron(\\..+)?", "org\\.assertj(\\..+)?", - // Don't re-check sibling modules that this module depends on + "// Don't re-check sibling modules that this module depends on", "com\\.datastax\\.(oss|dse)\\.driver\\.api\\.core(\\..+)?" ] } From f49e19b8c7e3bff6e5e4e8003484427c95bde027 Mon Sep 17 00:00:00 2001 From: absurdfarce Date: Mon, 7 Jul 2025 10:05:15 -0500 Subject: [PATCH 960/979] ninja-fix: updating OS label in Jenkinsfile after upgrade to Focal for runner --- Jenkinsfile-datastax | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Jenkinsfile-datastax b/Jenkinsfile-datastax index 73b977bdf9f..602f33101ca 100644 --- a/Jenkinsfile-datastax +++ b/Jenkinsfile-datastax @@ -402,7 +402,7 @@ pipeline { } environment { - OS_VERSION = 'ubuntu/bionic64/java-driver' + OS_VERSION = 'ubuntu/focal64/java-driver' JABBA_SHELL = '/usr/lib/jabba/jabba.sh' CCM_ENVIRONMENT_SHELL = '/usr/local/bin/ccm_environment.sh' SERIAL_ITS_ARGUMENT = "-DskipSerialITs=${params.SKIP_SERIAL_ITS}" From 17ebe6092e2877d8c524e07489c4c3d005cfeea5 Mon Sep 17 00:00:00 2001 From: janehe Date: Mon, 14 Jul 2025 15:09:25 -0700 Subject: [PATCH 961/979] ninja-fix: openjdk@1.17.0 instead of openjdk@17 for ASF CI patch by Jane He; reviewed by Bret McGuire and Alexandre Dutra --- Jenkinsfile-asf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Jenkinsfile-asf b/Jenkinsfile-asf index 24800ba9051..4b5041903c1 100644 --- a/Jenkinsfile-asf +++ b/Jenkinsfile-asf @@ -35,7 +35,7 @@ pipeline { axes { axis { name 'TEST_JAVA_VERSION' - values 'openjdk@1.8.0-292', 'openjdk@1.11.0-9', 'openjdk@17', 'openjdk@1.21.0' + values 'openjdk@1.8.0-292', 'openjdk@1.11.0-9', 'openjdk@1.17.0', 'openjdk@1.21.0' } axis { name 'SERVER_VERSION' From ddd6f03d7107df03e6e96e9fe37f434e4c742a9d Mon Sep 17 00:00:00 2001 From: Jason Koch Date: Mon, 17 Mar 2025 10:17:06 -0700 Subject: [PATCH 962/979] Remove unnecessary locking in DefaultNettyOptions This value is initialized at constructor time and marked final, so it can never change. There is no need to have access to this reference synchronized. Patch by Jason Koch; reviewed by Alexandre Dutra, Andy Tolbert and Jane He --- .../oss/driver/internal/core/context/DefaultNettyOptions.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultNettyOptions.java b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultNettyOptions.java index c5d3b3670f0..763a71f8b12 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultNettyOptions.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultNettyOptions.java @@ -200,7 +200,7 @@ public Future onClose() { } @Override - public synchronized Timer getTimer() { + public Timer getTimer() { return timer; } } From f32069bd8abfae75f451ff4f47c44c1cca8dbd1e Mon Sep 17 00:00:00 2001 From: Michael Karsten Date: Fri, 21 Mar 2025 12:09:31 -0700 Subject: [PATCH 963/979] CASSJAVA-89 fix: support schema options that changed in Cassandra 5.0 Patch by Michael Karsten; reviewed by Abe Ratnofsky and Andy Tolbert for CASSJAVA-89 --- .../querybuilder/RelationOptionsIT.java | 131 ++++++++++++++++++ .../querybuilder/schema/RelationOptions.java | 126 ++++++++++++++--- .../schema/CreateDseTableTest.java | 65 +++++++++ .../querybuilder/schema/CreateTableTest.java | 55 ++++++++ 4 files changed, 355 insertions(+), 22 deletions(-) create mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/querybuilder/RelationOptionsIT.java diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/querybuilder/RelationOptionsIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/querybuilder/RelationOptionsIT.java new file mode 100644 index 00000000000..fc571ccf44d --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/oss/driver/querybuilder/RelationOptionsIT.java @@ -0,0 +1,131 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.querybuilder; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.metadata.schema.KeyspaceMetadata; +import com.datastax.oss.driver.api.core.type.DataTypes; +import com.datastax.oss.driver.api.querybuilder.SchemaBuilder; +import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.requirement.BackendRequirement; +import com.datastax.oss.driver.api.testinfra.requirement.BackendType; +import com.datastax.oss.driver.api.testinfra.session.SessionRule; +import com.datastax.oss.driver.categories.ParallelizableTests; +import org.junit.Rule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.rules.RuleChain; +import org.junit.rules.TestName; +import org.junit.rules.TestRule; + +@Category(ParallelizableTests.class) +public class RelationOptionsIT { + + private CcmRule ccmRule = CcmRule.getInstance(); + + private SessionRule sessionRule = SessionRule.builder(ccmRule).build(); + + @Rule public TestRule chain = RuleChain.outerRule(ccmRule).around(sessionRule); + + @Rule public TestName name = new TestName(); + + @Test + @BackendRequirement( + type = BackendType.CASSANDRA, + minInclusive = "3.0", + description = "CRC check chance was moved to top level table in Cassandra 3.0") + public void should_create_table_with_crc_check_chance() { + sessionRule + .session() + .execute( + SchemaBuilder.createTable(name.getMethodName()) + .withPartitionKey("id", DataTypes.INT) + .withColumn("name", DataTypes.TEXT) + .withColumn("age", DataTypes.INT) + .withCRCCheckChance(0.8) + .build()); + KeyspaceMetadata keyspaceMetadata = + sessionRule + .session() + .getMetadata() + .getKeyspace(sessionRule.keyspace()) + .orElseThrow(AssertionError::new); + String describeOutput = keyspaceMetadata.describeWithChildren(true).trim(); + + assertThat(describeOutput).contains("crc_check_chance = 0.8"); + } + + @Test + @BackendRequirement( + type = BackendType.CASSANDRA, + minInclusive = "5.0", + description = "chunk_length_kb was renamed to chunk_length_in_kb in Cassandra 5.0") + public void should_create_table_with_chunk_length_in_kb() { + sessionRule + .session() + .execute( + SchemaBuilder.createTable(name.getMethodName()) + .withPartitionKey("id", DataTypes.INT) + .withColumn("name", DataTypes.TEXT) + .withColumn("age", DataTypes.INT) + .withLZ4Compression(4096) + .build()); + KeyspaceMetadata keyspaceMetadata = + sessionRule + .session() + .getMetadata() + .getKeyspace(sessionRule.keyspace()) + .orElseThrow(AssertionError::new); + String describeOutput = keyspaceMetadata.describeWithChildren(true).trim(); + + assertThat(describeOutput).contains("'class':'org.apache.cassandra.io.compress.LZ4Compressor'"); + assertThat(describeOutput).contains("'chunk_length_in_kb':'4096'"); + } + + @Test + @BackendRequirement( + type = BackendType.CASSANDRA, + minInclusive = "3.0", + maxExclusive = "5.0", + description = + "Deprecated compression options should still work with Cassandra >= 3.0 & < 5.0") + public void should_create_table_with_deprecated_options() { + sessionRule + .session() + .execute( + SchemaBuilder.createTable(name.getMethodName()) + .withPartitionKey("id", DataTypes.INT) + .withColumn("name", DataTypes.TEXT) + .withColumn("age", DataTypes.INT) + .withLZ4Compression(4096, 0.8) + .build()); + KeyspaceMetadata keyspaceMetadata = + sessionRule + .session() + .getMetadata() + .getKeyspace(sessionRule.keyspace()) + .orElseThrow(AssertionError::new); + String describeOutput = keyspaceMetadata.describeWithChildren(true).trim(); + + assertThat(describeOutput).contains("'class':'org.apache.cassandra.io.compress.LZ4Compressor'"); + assertThat(describeOutput).contains("'chunk_length_in_kb':'4096'"); + assertThat(describeOutput).contains("crc_check_chance = 0.8"); + } +} diff --git a/query-builder/src/main/java/com/datastax/oss/driver/api/querybuilder/schema/RelationOptions.java b/query-builder/src/main/java/com/datastax/oss/driver/api/querybuilder/schema/RelationOptions.java index 022562def81..49b342acb7f 100644 --- a/query-builder/src/main/java/com/datastax/oss/driver/api/querybuilder/schema/RelationOptions.java +++ b/query-builder/src/main/java/com/datastax/oss/driver/api/querybuilder/schema/RelationOptions.java @@ -58,6 +58,18 @@ default SelfT withCDC(boolean enabled) { return withOption("cdc", enabled); } + /** + * Defines the crc check chance. + * + *

          Note that using this option with a version of Apache Cassandra less than 3.0 will raise a + * syntax error. + */ + @NonNull + @CheckReturnValue + default SelfT withCRCCheckChance(double crcCheckChance) { + return withOption("crc_check_chance", crcCheckChance); + } + /** * Defines the caching criteria. * @@ -97,22 +109,32 @@ default SelfT withCompaction(@NonNull CompactionStrategy compactionStrategy) } /** - * Configures compression using the LZ4 algorithm with the given chunk length and crc check - * chance. - * - * @see #withCompression(String, int, double) + * @deprecated This method only exists for backward compatibility. Will not work with Apache + * Cassandra 5.0 or later. Use {@link #withLZ4Compression(int)} instead. */ + @Deprecated @NonNull @CheckReturnValue default SelfT withLZ4Compression(int chunkLengthKB, double crcCheckChance) { return withCompression("LZ4Compressor", chunkLengthKB, crcCheckChance); } + /** + * Configures compression using the LZ4 algorithm with the given chunk length. + * + * @see #withCompression(String, int) + */ + @NonNull + @CheckReturnValue + default SelfT withLZ4Compression(int chunkLengthKB) { + return withCompression("LZ4Compressor", chunkLengthKB); + } + /** * Configures compression using the LZ4 algorithm using the default configuration (64kb - * chunk_length, and 1.0 crc_check_chance). + * chunk_length). * - * @see #withCompression(String, int, double) + * @see #withCompression(String, int) */ @NonNull @CheckReturnValue @@ -121,22 +143,57 @@ default SelfT withLZ4Compression() { } /** - * Configures compression using the Snappy algorithm with the given chunk length and crc check - * chance. + * Configures compression using the Zstd algorithm with the given chunk length. * - * @see #withCompression(String, int, double) + * @see #withCompression(String, int) */ @NonNull @CheckReturnValue + default SelfT withZstdCompression(int chunkLengthKB) { + return withCompression("ZstdCompressor", chunkLengthKB); + } + + /** + * Configures compression using the Zstd algorithm using the default configuration (64kb + * chunk_length). + * + * @see #withCompression(String, int) + */ + @NonNull + @CheckReturnValue + default SelfT withZstdCompression() { + return withCompression("ZstdCompressor"); + } + + /** + * @deprecated This method only exists for backward compatibility. Will not work with Apache + * Cassandra 5.0 or later due to removal of deprecated table properties (CASSANDRA-18742). Use + * {@link #withSnappyCompression(int)} instead. + */ + @Deprecated + @NonNull + @CheckReturnValue default SelfT withSnappyCompression(int chunkLengthKB, double crcCheckChance) { return withCompression("SnappyCompressor", chunkLengthKB, crcCheckChance); } + /** + * Configures compression using the Snappy algorithm with the given chunk length. + * + * @see #withCompression(String, int) + */ + @NonNull + @CheckReturnValue + default SelfT withSnappyCompression(int chunkLengthKB) { + return withCompression("SnappyCompressor", chunkLengthKB); + } + /** * Configures compression using the Snappy algorithm using the default configuration (64kb - * chunk_length, and 1.0 crc_check_chance). + * chunk_length). * - * @see #withCompression(String, int, double) + * @see #withCompression(String, int) */ @NonNull @CheckReturnValue @@ -145,22 +202,34 @@ default SelfT withSnappyCompression() { } /** - * Configures compression using the Deflate algorithm with the given chunk length and crc check - * chance. - * - * @see #withCompression(String, int, double) + * @deprecated This method only exists for backward compatibility. Will not work with Apache + * Cassandra 5.0 or later due to removal of deprecated table properties (CASSANDRA-18742). Use + * {@link #withDeflateCompression(int)} instead. */ + @Deprecated @NonNull @CheckReturnValue default SelfT withDeflateCompression(int chunkLengthKB, double crcCheckChance) { return withCompression("DeflateCompressor", chunkLengthKB, crcCheckChance); } + /** + * Configures compression using the Deflate algorithm with the given chunk length. + * + * @see #withCompression(String, int) + */ + @NonNull + @CheckReturnValue + default SelfT withDeflateCompression(int chunkLengthKB) { + return withCompression("DeflateCompressor", chunkLengthKB); + } + /** * Configures compression using the Deflate algorithm using the default configuration (64kb - * chunk_length, and 1.0 crc_check_chance). + * chunk_length). * - * @see #withCompression(String, int, double) + * @see #withCompression(String, int) */ @NonNull @CheckReturnValue @@ -170,13 +239,13 @@ default SelfT withDeflateCompression() { /** * Configures compression using the given algorithm using the default configuration (64kb - * chunk_length, and 1.0 crc_check_chance). + * chunk_length). * *

          Unless specifying a custom compression algorithm implementation, it is recommended to use * {@link #withLZ4Compression()}, {@link #withSnappyCompression()}, or {@link * #withDeflateCompression()}. * - * @see #withCompression(String, int, double) + * @see #withCompression(String, int) */ @NonNull @CheckReturnValue @@ -185,7 +254,7 @@ default SelfT withCompression(@NonNull String compressionAlgorithmName) { } /** - * Configures compression using the given algorithm, chunk length and crc check chance. + * Configures compression using the given algorithm, chunk length. * *

          Unless specifying a custom compression algorithm implementation, it is recommended to use * {@link #withLZ4Compression()}, {@link #withSnappyCompression()}, or {@link @@ -193,11 +262,24 @@ default SelfT withCompression(@NonNull String compressionAlgorithmName) { * * @param compressionAlgorithmName The class name of the compression algorithm. * @param chunkLengthKB The chunk length in KB of compression blocks. Defaults to 64. - * @param crcCheckChance The probability (0.0 to 1.0) that checksum will be checked on each read. - * Defaults to 1.0. */ @NonNull @CheckReturnValue + default SelfT withCompression(@NonNull String compressionAlgorithmName, int chunkLengthKB) { + return withOption( + "compression", + ImmutableMap.of("class", compressionAlgorithmName, "chunk_length_in_kb", chunkLengthKB)); + } + + /** + * @deprecated This method only exists for backward compatibility. Will not work with Apache + * Cassandra 5.0 or later due to removal of deprecated table properties (CASSANDRA-18742). Use + * {@link #withCompression(String, int)} instead. + */ + @NonNull + @CheckReturnValue + @Deprecated default SelfT withCompression( @NonNull String compressionAlgorithmName, int chunkLengthKB, double crcCheckChance) { return withOption( diff --git a/query-builder/src/test/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseTableTest.java b/query-builder/src/test/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseTableTest.java index 7fec9674628..d8ee1c4e380 100644 --- a/query-builder/src/test/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseTableTest.java +++ b/query-builder/src/test/java/com/datastax/dse/driver/api/querybuilder/schema/CreateDseTableTest.java @@ -195,6 +195,17 @@ public void should_generate_create_table_lz4_compression() { @Test public void should_generate_create_table_lz4_compression_options() { + assertThat( + createDseTable("bar") + .withPartitionKey("k", DataTypes.INT) + .withColumn("v", DataTypes.TEXT) + .withLZ4Compression(1024)) + .hasCql( + "CREATE TABLE bar (k int PRIMARY KEY,v text) WITH compression={'class':'LZ4Compressor','chunk_length_in_kb':1024}"); + } + + @Test + public void should_generate_create_table_lz4_compression_options_crc() { assertThat( createDseTable("bar") .withPartitionKey("k", DataTypes.INT) @@ -204,6 +215,28 @@ public void should_generate_create_table_lz4_compression_options() { "CREATE TABLE bar (k int PRIMARY KEY,v text) WITH compression={'class':'LZ4Compressor','chunk_length_kb':1024,'crc_check_chance':0.5}"); } + @Test + public void should_generate_create_table_zstd_compression() { + assertThat( + createDseTable("bar") + .withPartitionKey("k", DataTypes.INT) + .withColumn("v", DataTypes.TEXT) + .withZstdCompression()) + .hasCql( + "CREATE TABLE bar (k int PRIMARY KEY,v text) WITH compression={'class':'ZstdCompressor'}"); + } + + @Test + public void should_generate_create_table_zstd_compression_options() { + assertThat( + createDseTable("bar") + .withPartitionKey("k", DataTypes.INT) + .withColumn("v", DataTypes.TEXT) + .withZstdCompression(1024)) + .hasCql( + "CREATE TABLE bar (k int PRIMARY KEY,v text) WITH compression={'class':'ZstdCompressor','chunk_length_in_kb':1024}"); + } + @Test public void should_generate_create_table_snappy_compression() { assertThat( @@ -217,6 +250,17 @@ public void should_generate_create_table_snappy_compression() { @Test public void should_generate_create_table_snappy_compression_options() { + assertThat( + createDseTable("bar") + .withPartitionKey("k", DataTypes.INT) + .withColumn("v", DataTypes.TEXT) + .withSnappyCompression(2048)) + .hasCql( + "CREATE TABLE bar (k int PRIMARY KEY,v text) WITH compression={'class':'SnappyCompressor','chunk_length_in_kb':2048}"); + } + + @Test + public void should_generate_create_table_snappy_compression_options_crc() { assertThat( createDseTable("bar") .withPartitionKey("k", DataTypes.INT) @@ -239,6 +283,17 @@ public void should_generate_create_table_deflate_compression() { @Test public void should_generate_create_table_deflate_compression_options() { + assertThat( + createDseTable("bar") + .withPartitionKey("k", DataTypes.INT) + .withColumn("v", DataTypes.TEXT) + .withDeflateCompression(4096)) + .hasCql( + "CREATE TABLE bar (k int PRIMARY KEY,v text) WITH compression={'class':'DeflateCompressor','chunk_length_in_kb':4096}"); + } + + @Test + public void should_generate_create_table_deflate_compression_options_crc() { assertThat( createDseTable("bar") .withPartitionKey("k", DataTypes.INT) @@ -389,4 +444,14 @@ public void should_generate_create_table_with_named_edge() { + "FROM person(contributor) " + "TO soft((company_name,software_name),software_version)"); } + + @Test + public void should_generate_create_table_crc_check_chance() { + assertThat( + createDseTable("bar") + .withPartitionKey("k", DataTypes.INT) + .withColumn("v", DataTypes.TEXT) + .withCRCCheckChance(0.8)) + .hasCql("CREATE TABLE bar (k int PRIMARY KEY,v text) WITH crc_check_chance=0.8"); + } } diff --git a/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/schema/CreateTableTest.java b/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/schema/CreateTableTest.java index 15cd12c75eb..31efc278472 100644 --- a/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/schema/CreateTableTest.java +++ b/query-builder/src/test/java/com/datastax/oss/driver/api/querybuilder/schema/CreateTableTest.java @@ -199,6 +199,17 @@ public void should_generate_create_table_lz4_compression() { @Test public void should_generate_create_table_lz4_compression_options() { + assertThat( + createTable("bar") + .withPartitionKey("k", DataTypes.INT) + .withColumn("v", DataTypes.TEXT) + .withLZ4Compression(1024)) + .hasCql( + "CREATE TABLE bar (k int PRIMARY KEY,v text) WITH compression={'class':'LZ4Compressor','chunk_length_in_kb':1024}"); + } + + @Test + public void should_generate_create_table_lz4_compression_options_crc() { assertThat( createTable("bar") .withPartitionKey("k", DataTypes.INT) @@ -208,6 +219,28 @@ public void should_generate_create_table_lz4_compression_options() { "CREATE TABLE bar (k int PRIMARY KEY,v text) WITH compression={'class':'LZ4Compressor','chunk_length_kb':1024,'crc_check_chance':0.5}"); } + @Test + public void should_generate_create_table_zstd_compression() { + assertThat( + createTable("bar") + .withPartitionKey("k", DataTypes.INT) + .withColumn("v", DataTypes.TEXT) + .withZstdCompression()) + .hasCql( + "CREATE TABLE bar (k int PRIMARY KEY,v text) WITH compression={'class':'ZstdCompressor'}"); + } + + @Test + public void should_generate_create_table_zstd_compression_options() { + assertThat( + createTable("bar") + .withPartitionKey("k", DataTypes.INT) + .withColumn("v", DataTypes.TEXT) + .withZstdCompression(1024)) + .hasCql( + "CREATE TABLE bar (k int PRIMARY KEY,v text) WITH compression={'class':'ZstdCompressor','chunk_length_in_kb':1024}"); + } + @Test public void should_generate_create_table_snappy_compression() { assertThat( @@ -221,6 +254,17 @@ public void should_generate_create_table_snappy_compression() { @Test public void should_generate_create_table_snappy_compression_options() { + assertThat( + createTable("bar") + .withPartitionKey("k", DataTypes.INT) + .withColumn("v", DataTypes.TEXT) + .withSnappyCompression(2048)) + .hasCql( + "CREATE TABLE bar (k int PRIMARY KEY,v text) WITH compression={'class':'SnappyCompressor','chunk_length_in_kb':2048}"); + } + + @Test + public void should_generate_create_table_snappy_compression_options_crc() { assertThat( createTable("bar") .withPartitionKey("k", DataTypes.INT) @@ -243,6 +287,17 @@ public void should_generate_create_table_deflate_compression() { @Test public void should_generate_create_table_deflate_compression_options() { + assertThat( + createTable("bar") + .withPartitionKey("k", DataTypes.INT) + .withColumn("v", DataTypes.TEXT) + .withDeflateCompression(4096)) + .hasCql( + "CREATE TABLE bar (k int PRIMARY KEY,v text) WITH compression={'class':'DeflateCompressor','chunk_length_in_kb':4096}"); + } + + @Test + public void should_generate_create_table_deflate_compression_options_crc() { assertThat( createTable("bar") .withPartitionKey("k", DataTypes.INT) From 7e21eb20283f3781a0a748741b768d0adf0fc85b Mon Sep 17 00:00:00 2001 From: Jason Koch Date: Mon, 3 Feb 2025 13:46:32 -0800 Subject: [PATCH 964/979] Eliminate lock in ConcurrencyLimitingRequestThrottler Following from 6d3ba47 this changes the throttler to a complete lock-free implementation. Update the related comments and README now that it is lock-free. Patch by Jason Koch; reviewed by Alexandre Dutra and Andy Tolbert --- .../session/throttling/RequestThrottler.java | 8 +- .../ConcurrencyLimitingRequestThrottler.java | 180 ++++++++---------- manual/core/non_blocking/README.md | 14 +- 3 files changed, 90 insertions(+), 112 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/session/throttling/RequestThrottler.java b/core/src/main/java/com/datastax/oss/driver/api/core/session/throttling/RequestThrottler.java index 7e2b41ebbdb..73d347d533e 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/session/throttling/RequestThrottler.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/session/throttling/RequestThrottler.java @@ -23,10 +23,10 @@ /** * Limits the number of concurrent requests executed by the driver. * - *

          Usage in non-blocking applications: beware that all built-in implementations of this interface - * use locks for internal coordination, and do not qualify as lock-free, with the obvious exception - * of {@code PassThroughRequestThrottler}. If your application enforces strict lock-freedom, then - * request throttling should not be enabled. + *

          Usage in non-blocking applications: beware that some implementations of this interface use + * locks for internal coordination, and do not qualify as lock-free. If your application enforces + * strict lock-freedom, then you should use the {@code PassThroughRequestThrottler} or the {@code + * ConcurrencyLimitingRequestThrottler}. */ public interface RequestThrottler extends Closeable { diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/session/throttling/ConcurrencyLimitingRequestThrottler.java b/core/src/main/java/com/datastax/oss/driver/internal/core/session/throttling/ConcurrencyLimitingRequestThrottler.java index ffe0ffe9650..8146c5b113a 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/session/throttling/ConcurrencyLimitingRequestThrottler.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/session/throttling/ConcurrencyLimitingRequestThrottler.java @@ -26,10 +26,9 @@ import com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; -import java.util.ArrayDeque; import java.util.Deque; -import java.util.concurrent.locks.ReentrantLock; -import net.jcip.annotations.GuardedBy; +import java.util.concurrent.ConcurrentLinkedDeque; +import java.util.concurrent.atomic.AtomicInteger; import net.jcip.annotations.ThreadSafe; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -61,17 +60,12 @@ public class ConcurrencyLimitingRequestThrottler implements RequestThrottler { private final String logPrefix; private final int maxConcurrentRequests; private final int maxQueueSize; - - private final ReentrantLock lock = new ReentrantLock(); - - @GuardedBy("lock") - private int concurrentRequests; - - @GuardedBy("lock") - private final Deque queue = new ArrayDeque<>(); - - @GuardedBy("lock") - private boolean closed; + private final AtomicInteger concurrentRequests = new AtomicInteger(0); + // CLQ is not O(1) for size(), as it forces a full iteration of the queue. So, we track + // the size of the queue explicitly. + private final Deque queue = new ConcurrentLinkedDeque<>(); + private final AtomicInteger queueSize = new AtomicInteger(0); + private volatile boolean closed = false; public ConcurrencyLimitingRequestThrottler(DriverContext context) { this.logPrefix = context.getSessionName(); @@ -88,50 +82,62 @@ public ConcurrencyLimitingRequestThrottler(DriverContext context) { @Override public void register(@NonNull Throttled request) { - boolean notifyReadyRequired = false; + if (closed) { + LOG.trace("[{}] Rejecting request after shutdown", logPrefix); + fail(request, "The session is shutting down"); + return; + } - lock.lock(); - try { - if (closed) { - LOG.trace("[{}] Rejecting request after shutdown", logPrefix); - fail(request, "The session is shutting down"); - } else if (queue.isEmpty() && concurrentRequests < maxConcurrentRequests) { - // We have capacity for one more concurrent request + // Implementation note: Technically the "concurrent requests" or "queue size" + // could read transiently over the limit, but the queue itself will never grow + // beyond the limit since we always check for that condition and revert if + // over-limit. We do this instead of a CAS-loop to avoid the potential loop. + + // If no backlog exists AND we get capacity, we can execute immediately + if (queueSize.get() == 0) { + // Take a claim first, and then check if we are OK to proceed + int newConcurrent = concurrentRequests.incrementAndGet(); + if (newConcurrent <= maxConcurrentRequests) { LOG.trace("[{}] Starting newly registered request", logPrefix); - concurrentRequests += 1; - notifyReadyRequired = true; - } else if (queue.size() < maxQueueSize) { - LOG.trace("[{}] Enqueuing request", logPrefix); - queue.add(request); + request.onThrottleReady(false); + return; } else { - LOG.trace("[{}] Rejecting request because of full queue", logPrefix); - fail( - request, - String.format( - "The session has reached its maximum capacity " - + "(concurrent requests: %d, queue size: %d)", - maxConcurrentRequests, maxQueueSize)); + // We exceeded the limit, decrement the count and fall through to the queuing logic + concurrentRequests.decrementAndGet(); } - } finally { - lock.unlock(); } - // no need to hold the lock while allowing the task to progress - if (notifyReadyRequired) { - request.onThrottleReady(false); + // If we have a backlog, or we failed to claim capacity, try to enqueue + int newQueueSize = queueSize.incrementAndGet(); + if (newQueueSize <= maxQueueSize) { + LOG.trace("[{}] Enqueuing request", logPrefix); + queue.offer(request); + + // Double-check that we were still supposed to be enqueued; it is possible + // that the session was closed while we were enqueuing, it's also possible + // that it is right now removing the request, so we need to check both + if (closed) { + if (queue.remove(request)) { + queueSize.decrementAndGet(); + LOG.trace("[{}] Rejecting late request after shutdown", logPrefix); + fail(request, "The session is shutting down"); + } + } + } else { + LOG.trace("[{}] Rejecting request because of full queue", logPrefix); + queueSize.decrementAndGet(); + fail( + request, + String.format( + "The session has reached its maximum capacity " + + "(concurrent requests: %d, queue size: %d)", + maxConcurrentRequests, maxQueueSize)); } } @Override public void signalSuccess(@NonNull Throttled request) { - Throttled nextRequest = null; - lock.lock(); - try { - nextRequest = onRequestDoneAndDequeNext(); - } finally { - lock.unlock(); - } - + Throttled nextRequest = onRequestDoneAndDequeNext(); if (nextRequest != null) { nextRequest.onThrottleReady(true); } @@ -145,17 +151,13 @@ public void signalError(@NonNull Throttled request, @NonNull Throwable error) { @Override public void signalTimeout(@NonNull Throttled request) { Throttled nextRequest = null; - lock.lock(); - try { - if (!closed) { - if (queue.remove(request)) { // The request timed out before it was active - LOG.trace("[{}] Removing timed out request from the queue", logPrefix); - } else { - nextRequest = onRequestDoneAndDequeNext(); - } + if (!closed) { + if (queue.remove(request)) { // The request timed out before it was active + queueSize.decrementAndGet(); + LOG.trace("[{}] Removing timed out request from the queue", logPrefix); + } else { + nextRequest = onRequestDoneAndDequeNext(); } - } finally { - lock.unlock(); } if (nextRequest != null) { @@ -166,17 +168,13 @@ public void signalTimeout(@NonNull Throttled request) { @Override public void signalCancel(@NonNull Throttled request) { Throttled nextRequest = null; - lock.lock(); - try { - if (!closed) { - if (queue.remove(request)) { // The request has been cancelled before it was active - LOG.trace("[{}] Removing cancelled request from the queue", logPrefix); - } else { - nextRequest = onRequestDoneAndDequeNext(); - } + if (!closed) { + if (queue.remove(request)) { // The request has been cancelled before it was active + queueSize.decrementAndGet(); + LOG.trace("[{}] Removing cancelled request from the queue", logPrefix); + } else { + nextRequest = onRequestDoneAndDequeNext(); } - } finally { - lock.unlock(); } if (nextRequest != null) { @@ -184,17 +182,16 @@ public void signalCancel(@NonNull Throttled request) { } } - @SuppressWarnings("GuardedBy") // this method is only called with the lock held @Nullable private Throttled onRequestDoneAndDequeNext() { - assert lock.isHeldByCurrentThread(); if (!closed) { - if (queue.isEmpty()) { - concurrentRequests -= 1; + Throttled nextRequest = queue.poll(); + if (nextRequest == null) { + concurrentRequests.decrementAndGet(); } else { + queueSize.decrementAndGet(); LOG.trace("[{}] Starting dequeued request", logPrefix); - // don't touch concurrentRequests since we finished one but started another - return queue.poll(); + return nextRequest; } } @@ -204,45 +201,28 @@ private Throttled onRequestDoneAndDequeNext() { @Override public void close() { - lock.lock(); - try { - closed = true; - LOG.debug("[{}] Rejecting {} queued requests after shutdown", logPrefix, queue.size()); - for (Throttled request : queue) { - fail(request, "The session is shutting down"); - } - } finally { - lock.unlock(); + closed = true; + + LOG.debug("[{}] Rejecting {} queued requests after shutdown", logPrefix, queueSize.get()); + Throttled request; + while ((request = queue.poll()) != null) { + queueSize.decrementAndGet(); + fail(request, "The session is shutting down"); } } public int getQueueSize() { - lock.lock(); - try { - return queue.size(); - } finally { - lock.unlock(); - } + return queueSize.get(); } @VisibleForTesting int getConcurrentRequests() { - lock.lock(); - try { - return concurrentRequests; - } finally { - lock.unlock(); - } + return concurrentRequests.get(); } @VisibleForTesting Deque getQueue() { - lock.lock(); - try { - return queue; - } finally { - lock.unlock(); - } + return queue; } private static void fail(Throttled request, String message) { diff --git a/manual/core/non_blocking/README.md b/manual/core/non_blocking/README.md index 7abe9d856a3..f320ffd13d2 100644 --- a/manual/core/non_blocking/README.md +++ b/manual/core/non_blocking/README.md @@ -152,15 +152,13 @@ should not be used if strict lock-freedom is enforced. [`SafeInitNodeStateListener`]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/metadata/SafeInitNodeStateListener.html -The same is valid for both built-in [request throttlers]: +The `RateLimitingRequestThrottler` is currently blocking. The `ConcurrencyLimitingRequestThrottler` +is lock-free. -* `ConcurrencyLimitingRequestThrottler` -* `RateLimitingRequestThrottler` - -See the section about [throttling](../throttling) for details about these components. Again, they -use locks internally, and depending on how many requests are being executed in parallel, the thread -contention on these locks can be high: in short, if your application enforces strict lock-freedom, -then these components should not be used. +See the section about [throttling](../throttling) for details about these components. Depending on +how many requests are being executed in parallel, the thread contention on these locks can be high: +in short, if your application enforces strict lock-freedom, then you should not use the +`RateLimitingRequestThrottler`. [request throttlers]: https://docs.datastax.com/en/drivers/java/4.17/com/datastax/oss/driver/api/core/session/throttling/RequestThrottler.html From 69eebb939c71dbb709099f7bf04b1a8b7e17012f Mon Sep 17 00:00:00 2001 From: Ivan Sopov Date: Mon, 28 Jul 2025 11:28:59 +0300 Subject: [PATCH 965/979] Change groupId in README patch by Ivan Sopov; reviewed by Bret McGuire reference: https://github.com/apache/cassandra-java-driver/pull/2049 --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index b6e1cc337d8..0f6c2bb5a6f 100644 --- a/README.md +++ b/README.md @@ -19,7 +19,7 @@ and Cassandra Query Language (CQL) v3. ## Getting the driver -The driver artifacts are published in Maven central, under the group id [com.datastax.oss]; there +The driver artifacts are published in Maven central, under the group id [org.apache.cassandra]; there are multiple modules, all prefixed with `java-driver-`. ```xml @@ -48,7 +48,7 @@ dependency if you plan to use it. Refer to each module's manual for more details ([core](manual/core/), [query builder](manual/query_builder/), [mapper](manual/mapper)). -[com.datastax.oss]: http://search.maven.org/#search%7Cga%7C1%7Cg%3A%22com.datastax.oss%22 +[org.apache.cassandra]: http://search.maven.org/#search%7Cga%7C1%7Cg%3A%22org.apache.cassandra%22 ## Compatibility From 05e6717253d1c6ae0c5a9ce20fcf5ab448d32ec6 Mon Sep 17 00:00:00 2001 From: Kefu Chai Date: Fri, 12 Jul 2024 09:37:06 +0800 Subject: [PATCH 966/979] manual: correct the codeblock directive patch by Kefu Chai; reviewed by Andy Tolbert and Bret McGuire reference: https://github.com/apache/cassandra-java-driver/pull/1940 --- manual/mapper/daos/getentity/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/manual/mapper/daos/getentity/README.md b/manual/mapper/daos/getentity/README.md index abb7cb076c8..de9a530b558 100644 --- a/manual/mapper/daos/getentity/README.md +++ b/manual/mapper/daos/getentity/README.md @@ -108,7 +108,7 @@ The method can return: * a single entity instance. If the argument is a result set type, the generated code will extract the first row and convert it, or return `null` if the result set is empty. - ````java + ```java @GetEntity Product asProduct(Row row); From ff2d7f26c63e7a3ed3a74906771b246949112414 Mon Sep 17 00:00:00 2001 From: Lukasz Antoniak Date: Mon, 14 Apr 2025 16:40:53 +0200 Subject: [PATCH 967/979] CASSJAVA-92: Local DC provided for nodetool clientstats patch by Lukasz Antoniak; reviewed by Bret McGuire and Abe Ratnofsky for CASSJAVA-92 --- .../loadbalancing/LoadBalancingPolicy.java | 7 + .../core/context/DefaultDriverContext.java | 14 +- .../core/context/StartupOptionsBuilder.java | 28 ++++ .../BasicLoadBalancingPolicy.java | 31 ++++- .../DefaultLoadBalancingPolicy.java | 10 ++ .../helper/OptionalLocalDcHelper.java | 31 +++-- .../FixedHostNameAddressTranslatorTest.java | 3 +- .../SubnetAddressTranslatorTest.java | 3 +- .../context/DefaultDriverContextTest.java | 2 +- .../context/MockedDriverContextFactory.java | 124 +++++++++++++++--- .../context/StartupOptionsBuilderTest.java | 45 ++++++- 11 files changed, 252 insertions(+), 46 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/loadbalancing/LoadBalancingPolicy.java b/core/src/main/java/com/datastax/oss/driver/api/core/loadbalancing/LoadBalancingPolicy.java index d890ae6c100..de0d9db4ebd 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/loadbalancing/LoadBalancingPolicy.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/loadbalancing/LoadBalancingPolicy.java @@ -24,6 +24,7 @@ import com.datastax.oss.driver.api.core.tracker.RequestTracker; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; +import java.util.Collections; import java.util.Map; import java.util.Optional; import java.util.Queue; @@ -76,6 +77,12 @@ default Optional getRequestTracker() { */ void init(@NonNull Map nodes, @NonNull DistanceReporter distanceReporter); + /** Returns map containing details that impact C* node connectivity. */ + @NonNull + default Map getStartupConfiguration() { + return Collections.emptyMap(); + } + /** * Returns the coordinators to use for a new query. * diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java index a24b632f640..0d7db27dfbe 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java @@ -216,8 +216,8 @@ public class DefaultDriverContext implements InternalDriverContext { new LazyReference<>("metricIdGenerator", this::buildMetricIdGenerator, cycleDetector); private final LazyReference requestThrottlerRef = new LazyReference<>("requestThrottler", this::buildRequestThrottler, cycleDetector); - private final LazyReference> startupOptionsRef = - new LazyReference<>("startupOptions", this::buildStartupOptions, cycleDetector); + private final LazyReference startupOptionsRef = + new LazyReference<>("startupOptionsFactory", this::buildStartupOptionsFactory, cycleDetector); private final LazyReference nodeStateListenerRef; private final LazyReference schemaChangeListenerRef; private final LazyReference requestTrackerRef; @@ -335,16 +335,15 @@ public DefaultDriverContext( } /** - * Builds a map of options to send in a Startup message. + * Returns builder of options to send in a Startup message. * * @see #getStartupOptions() */ - protected Map buildStartupOptions() { + protected StartupOptionsBuilder buildStartupOptionsFactory() { return new StartupOptionsBuilder(this) .withClientId(startupClientId) .withApplicationName(startupApplicationName) - .withApplicationVersion(startupApplicationVersion) - .build(); + .withApplicationVersion(startupApplicationVersion); } protected Map buildLoadBalancingPolicies() { @@ -1013,7 +1012,8 @@ public ProtocolVersion getProtocolVersion() { @NonNull @Override public Map getStartupOptions() { - return startupOptionsRef.get(); + // startup options are calculated dynamically and may vary per connection + return startupOptionsRef.get().build(); } protected RequestLogFormatter buildRequestLogFormatter() { diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/context/StartupOptionsBuilder.java b/core/src/main/java/com/datastax/oss/driver/internal/core/context/StartupOptionsBuilder.java index 684d6b01b9c..89a9266b3ac 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/context/StartupOptionsBuilder.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/context/StartupOptionsBuilder.java @@ -19,24 +19,34 @@ import com.datastax.dse.driver.api.core.config.DseDriverOption; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.loadbalancing.LoadBalancingPolicy; import com.datastax.oss.driver.api.core.session.Session; import com.datastax.oss.driver.api.core.uuid.Uuids; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import com.datastax.oss.protocol.internal.request.Startup; import com.datastax.oss.protocol.internal.util.collection.NullAllowingImmutableMap; +import com.fasterxml.jackson.databind.ObjectMapper; import edu.umd.cs.findbugs.annotations.Nullable; import java.util.Map; +import java.util.Optional; import java.util.UUID; import net.jcip.annotations.Immutable; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Immutable public class StartupOptionsBuilder { public static final String DRIVER_NAME_KEY = "DRIVER_NAME"; public static final String DRIVER_VERSION_KEY = "DRIVER_VERSION"; + public static final String DRIVER_BAGGAGE = "DRIVER_BAGGAGE"; public static final String APPLICATION_NAME_KEY = "APPLICATION_NAME"; public static final String APPLICATION_VERSION_KEY = "APPLICATION_VERSION"; public static final String CLIENT_ID_KEY = "CLIENT_ID"; + private static final Logger LOG = LoggerFactory.getLogger(StartupOptionsBuilder.class); + private static final ObjectMapper mapper = new ObjectMapper(); + protected final InternalDriverContext context; private UUID clientId; private String applicationName; @@ -119,6 +129,7 @@ public Map build() { if (applicationVersion != null) { builder.put(APPLICATION_VERSION_KEY, applicationVersion); } + driverBaggage().ifPresent(s -> builder.put(DRIVER_BAGGAGE, s)); return builder.build(); } @@ -142,4 +153,21 @@ protected String getDriverName() { protected String getDriverVersion() { return Session.OSS_DRIVER_COORDINATES.getVersion().toString(); } + + private Optional driverBaggage() { + ImmutableMap.Builder builder = new ImmutableMap.Builder<>(); + for (Map.Entry entry : + context.getLoadBalancingPolicies().entrySet()) { + Map config = entry.getValue().getStartupConfiguration(); + if (!config.isEmpty()) { + builder.put(entry.getKey(), config); + } + } + try { + return Optional.of(mapper.writeValueAsString(builder.build())); + } catch (Exception e) { + LOG.warn("Failed to construct startup driver baggage", e); + return Optional.empty(); + } + } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicy.java b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicy.java index 587ef4183bd..a02a5eb3148 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicy.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/BasicLoadBalancingPolicy.java @@ -45,6 +45,7 @@ import com.datastax.oss.driver.internal.core.util.collection.QueryPlan; import com.datastax.oss.driver.internal.core.util.collection.SimpleQueryPlan; import com.datastax.oss.driver.shaded.guava.common.base.Predicates; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import com.datastax.oss.driver.shaded.guava.common.collect.Lists; import com.datastax.oss.driver.shaded.guava.common.collect.Sets; import edu.umd.cs.findbugs.annotations.NonNull; @@ -155,10 +156,38 @@ public BasicLoadBalancingPolicy(@NonNull DriverContext context, @NonNull String * Before initialization, this method always returns null. */ @Nullable - protected String getLocalDatacenter() { + public String getLocalDatacenter() { return localDc; } + @NonNull + @Override + public Map getStartupConfiguration() { + ImmutableMap.Builder builder = new ImmutableMap.Builder<>(); + if (localDc != null) { + builder.put("localDc", localDc); + } else { + // Local data center may not be discovered prior to connection pool initialization. + // In such scenario, return configured local data center name. + // Note that when using DC inferring load balancing policy, startup configuration + // may not show local DC name, because it will be discovered only once control connection + // is established and datacenter of contact points known. + Optional configuredDc = + new OptionalLocalDcHelper(context, profile, logPrefix).configuredLocalDc(); + configuredDc.ifPresent(d -> builder.put("localDc", d)); + } + if (!preferredRemoteDcs.isEmpty()) { + builder.put("preferredRemoteDcs", preferredRemoteDcs); + } + if (allowDcFailoverForLocalCl) { + builder.put("allowDcFailoverForLocalCl", allowDcFailoverForLocalCl); + } + if (maxNodesPerRemoteDc > 0) { + builder.put("maxNodesPerRemoteDc", maxNodesPerRemoteDc); + } + return ImmutableMap.of(BasicLoadBalancingPolicy.class.getSimpleName(), builder.build()); + } + /** @return The nodes currently considered as live. */ protected NodeSet getLiveNodes() { return liveNodes; diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicy.java b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicy.java index 0f03cbb3643..9c31b606f18 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicy.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicy.java @@ -34,6 +34,7 @@ import com.datastax.oss.driver.internal.core.util.collection.QueryPlan; import com.datastax.oss.driver.internal.core.util.collection.SimpleQueryPlan; import com.datastax.oss.driver.shaded.guava.common.annotations.VisibleForTesting; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import com.datastax.oss.driver.shaded.guava.common.collect.MapMaker; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.Nullable; @@ -350,4 +351,13 @@ private boolean hasSufficientResponses(long now) { return this.oldest - threshold >= 0; } } + + @NonNull + @Override + public Map getStartupConfiguration() { + Map parent = super.getStartupConfiguration(); + return ImmutableMap.of( + DefaultLoadBalancingPolicy.class.getSimpleName(), + parent.get(BasicLoadBalancingPolicy.class.getSimpleName())); + } } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/helper/OptionalLocalDcHelper.java b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/helper/OptionalLocalDcHelper.java index d470f96c42c..c6143f3fa16 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/helper/OptionalLocalDcHelper.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/helper/OptionalLocalDcHelper.java @@ -65,20 +65,14 @@ public OptionalLocalDcHelper( @Override @NonNull public Optional discoverLocalDc(@NonNull Map nodes) { - String localDc = context.getLocalDatacenter(profile.getName()); - if (localDc != null) { - LOG.debug("[{}] Local DC set programmatically: {}", logPrefix, localDc); - checkLocalDatacenterCompatibility(localDc, context.getMetadataManager().getContactPoints()); - return Optional.of(localDc); - } else if (profile.isDefined(DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER)) { - localDc = profile.getString(DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER); - LOG.debug("[{}] Local DC set from configuration: {}", logPrefix, localDc); - checkLocalDatacenterCompatibility(localDc, context.getMetadataManager().getContactPoints()); - return Optional.of(localDc); + Optional localDc = configuredLocalDc(); + if (localDc.isPresent()) { + checkLocalDatacenterCompatibility( + localDc.get(), context.getMetadataManager().getContactPoints()); } else { LOG.debug("[{}] Local DC not set, DC awareness will be disabled", logPrefix); - return Optional.empty(); } + return localDc; } /** @@ -138,4 +132,19 @@ protected String formatDcs(Iterable nodes) { } return String.join(", ", new TreeSet<>(l)); } + + /** @return Local data center set programmatically or from configuration file. */ + @NonNull + public Optional configuredLocalDc() { + String localDc = context.getLocalDatacenter(profile.getName()); + if (localDc != null) { + LOG.debug("[{}] Local DC set programmatically: {}", logPrefix, localDc); + return Optional.of(localDc); + } else if (profile.isDefined(DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER)) { + localDc = profile.getString(DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER); + LOG.debug("[{}] Local DC set from configuration: {}", logPrefix, localDc); + return Optional.of(localDc); + } + return Optional.empty(); + } } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/addresstranslation/FixedHostNameAddressTranslatorTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/addresstranslation/FixedHostNameAddressTranslatorTest.java index 92800998056..3bb9c4bc291 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/addresstranslation/FixedHostNameAddressTranslatorTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/addresstranslation/FixedHostNameAddressTranslatorTest.java @@ -26,7 +26,6 @@ import com.datastax.oss.driver.internal.core.context.DefaultDriverContext; import com.datastax.oss.driver.internal.core.context.MockedDriverContextFactory; import java.net.InetSocketAddress; -import java.util.Optional; import org.junit.Test; public class FixedHostNameAddressTranslatorTest { @@ -36,7 +35,7 @@ public void should_translate_address() { DriverExecutionProfile defaultProfile = mock(DriverExecutionProfile.class); when(defaultProfile.getString(ADDRESS_TRANSLATOR_ADVERTISED_HOSTNAME)).thenReturn("myaddress"); DefaultDriverContext defaultDriverContext = - MockedDriverContextFactory.defaultDriverContext(Optional.of(defaultProfile)); + MockedDriverContextFactory.defaultDriverContext(defaultProfile); FixedHostNameAddressTranslator translator = new FixedHostNameAddressTranslator(defaultDriverContext); diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/addresstranslation/SubnetAddressTranslatorTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/addresstranslation/SubnetAddressTranslatorTest.java index 2aa6ae75bc2..420170654dc 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/addresstranslation/SubnetAddressTranslatorTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/addresstranslation/SubnetAddressTranslatorTest.java @@ -30,7 +30,6 @@ import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import java.net.InetSocketAddress; import java.util.Map; -import java.util.Optional; import org.junit.Test; @SuppressWarnings("resource") @@ -148,6 +147,6 @@ public void should_fail_on_default_address_without_port() { private static DefaultDriverContext context(Map subnetAddresses) { DriverExecutionProfile profile = mock(DriverExecutionProfile.class); when(profile.getStringMap(ADDRESS_TRANSLATOR_SUBNET_ADDRESSES)).thenReturn(subnetAddresses); - return MockedDriverContextFactory.defaultDriverContext(Optional.of(profile)); + return MockedDriverContextFactory.defaultDriverContext(profile); } } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContextTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContextTest.java index baf101508d4..6d4585cb4d7 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContextTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContextTest.java @@ -42,7 +42,7 @@ private DefaultDriverContext buildMockedContext(Optional compressionOpti DriverExecutionProfile defaultProfile = mock(DriverExecutionProfile.class); when(defaultProfile.getString(DefaultDriverOption.PROTOCOL_COMPRESSION, "none")) .thenReturn(compressionOption.orElse("none")); - return MockedDriverContextFactory.defaultDriverContext(Optional.of(defaultProfile)); + return MockedDriverContextFactory.defaultDriverContext(defaultProfile); } private void doCreateCompressorTest(Optional configVal, Class expectedClz) { diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/context/MockedDriverContextFactory.java b/core/src/test/java/com/datastax/oss/driver/internal/core/context/MockedDriverContextFactory.java index 06817326844..a8b25193f54 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/context/MockedDriverContextFactory.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/context/MockedDriverContextFactory.java @@ -24,44 +24,45 @@ import com.datastax.oss.driver.api.core.config.DriverConfig; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; +import com.datastax.oss.driver.api.core.loadbalancing.LoadBalancingPolicy; +import com.datastax.oss.driver.api.core.loadbalancing.NodeDistanceEvaluator; +import com.datastax.oss.driver.api.core.metadata.Node; import com.datastax.oss.driver.api.core.metadata.NodeStateListener; import com.datastax.oss.driver.api.core.metadata.schema.SchemaChangeListener; import com.datastax.oss.driver.api.core.session.ProgrammaticArguments; import com.datastax.oss.driver.api.core.tracker.RequestTracker; +import com.datastax.oss.driver.internal.core.ConsistencyLevelRegistry; +import com.datastax.oss.driver.internal.core.loadbalancing.DefaultLoadBalancingPolicy; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableMap; import com.datastax.oss.driver.shaded.guava.common.collect.Maps; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; import java.time.Duration; +import java.util.Collections; +import java.util.List; +import java.util.Map; import java.util.Optional; +import java.util.UUID; public class MockedDriverContextFactory { public static DefaultDriverContext defaultDriverContext() { - return defaultDriverContext(Optional.empty()); + return defaultDriverContext(MockedDriverContextFactory.defaultProfile("datacenter1")); } public static DefaultDriverContext defaultDriverContext( - Optional profileOption) { - - /* If the caller provided a profile use that, otherwise make a new one */ - final DriverExecutionProfile profile = - profileOption.orElseGet( - () -> { - DriverExecutionProfile blankProfile = mock(DriverExecutionProfile.class); - when(blankProfile.getString(DefaultDriverOption.PROTOCOL_COMPRESSION, "none")) - .thenReturn("none"); - when(blankProfile.getDuration(DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER)) - .thenReturn(Duration.ofMinutes(5)); - when(blankProfile.isDefined(DefaultDriverOption.METRICS_FACTORY_CLASS)) - .thenReturn(true); - when(blankProfile.getString(DefaultDriverOption.METRICS_FACTORY_CLASS)) - .thenReturn("DefaultMetricsFactory"); - return blankProfile; - }); + DriverExecutionProfile defaultProfile, DriverExecutionProfile... profiles) { /* Setup machinery to connect the input DriverExecutionProfile to the config loader */ final DriverConfig driverConfig = mock(DriverConfig.class); final DriverConfigLoader configLoader = mock(DriverConfigLoader.class); when(configLoader.getInitialConfig()).thenReturn(driverConfig); - when(driverConfig.getDefaultProfile()).thenReturn(profile); + when(driverConfig.getDefaultProfile()).thenReturn(defaultProfile); + when(driverConfig.getProfile(defaultProfile.getName())).thenReturn(defaultProfile); + + for (DriverExecutionProfile profile : profiles) { + when(driverConfig.getProfile(profile.getName())).thenReturn(profile); + } ProgrammaticArguments args = ProgrammaticArguments.builder() @@ -71,6 +72,89 @@ public static DefaultDriverContext defaultDriverContext( .withLocalDatacenters(Maps.newHashMap()) .withNodeDistanceEvaluators(Maps.newHashMap()) .build(); - return new DefaultDriverContext(configLoader, args); + + return new DefaultDriverContext(configLoader, args) { + @NonNull + @Override + public Map getLoadBalancingPolicies() { + ImmutableMap.Builder map = ImmutableMap.builder(); + map.put( + defaultProfile.getName(), + mockLoadBalancingPolicy( + this, + defaultProfile.getName(), + defaultProfile.getString(DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER))); + for (DriverExecutionProfile profile : profiles) { + map.put( + profile.getName(), + mockLoadBalancingPolicy( + this, + profile.getName(), + profile.getString(DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER))); + } + return map.build(); + } + + @NonNull + @Override + public ConsistencyLevelRegistry getConsistencyLevelRegistry() { + return mock(ConsistencyLevelRegistry.class); + } + }; + } + + public static DriverExecutionProfile defaultProfile(String localDc) { + return createProfile(DriverExecutionProfile.DEFAULT_NAME, localDc); + } + + public static DriverExecutionProfile createProfile(String name, String localDc) { + DriverExecutionProfile defaultProfile = mock(DriverExecutionProfile.class); + when(defaultProfile.getName()).thenReturn(name); + when(defaultProfile.getString(DefaultDriverOption.PROTOCOL_COMPRESSION, "none")) + .thenReturn("none"); + when(defaultProfile.getDuration(DefaultDriverOption.METRICS_NODE_EXPIRE_AFTER)) + .thenReturn(Duration.ofMinutes(5)); + when(defaultProfile.isDefined(DefaultDriverOption.METRICS_FACTORY_CLASS)).thenReturn(true); + when(defaultProfile.getString(DefaultDriverOption.METRICS_FACTORY_CLASS)) + .thenReturn("DefaultMetricsFactory"); + when(defaultProfile.getString(DefaultDriverOption.LOAD_BALANCING_LOCAL_DATACENTER)) + .thenReturn(localDc); + return defaultProfile; + } + + public static void allowRemoteDcConnectivity( + DriverExecutionProfile profile, + int maxNodesPerRemoteDc, + boolean allowRemoteSatisfyLocalDc, + List preferredRemoteDcs) { + when(profile.getInt(DefaultDriverOption.LOAD_BALANCING_DC_FAILOVER_MAX_NODES_PER_REMOTE_DC)) + .thenReturn(maxNodesPerRemoteDc); + when(profile.getBoolean( + DefaultDriverOption.LOAD_BALANCING_DC_FAILOVER_ALLOW_FOR_LOCAL_CONSISTENCY_LEVELS)) + .thenReturn(allowRemoteSatisfyLocalDc); + when(profile.getStringList(DefaultDriverOption.LOAD_BALANCING_DC_FAILOVER_PREFERRED_REMOTE_DCS)) + .thenReturn(preferredRemoteDcs); + } + + private static LoadBalancingPolicy mockLoadBalancingPolicy( + DefaultDriverContext driverContext, String profile, String localDc) { + LoadBalancingPolicy loadBalancingPolicy = + new DefaultLoadBalancingPolicy(driverContext, profile) { + @NonNull + @Override + protected Optional discoverLocalDc(@NonNull Map nodes) { + return Optional.ofNullable(localDc); + } + + @NonNull + @Override + protected NodeDistanceEvaluator createNodeDistanceEvaluator( + @Nullable String localDc, @NonNull Map nodes) { + return mock(NodeDistanceEvaluator.class); + } + }; + loadBalancingPolicy.init( + Collections.emptyMap(), mock(LoadBalancingPolicy.DistanceReporter.class)); + return loadBalancingPolicy; } } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/context/StartupOptionsBuilderTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/context/StartupOptionsBuilderTest.java index 33811b2793a..d12e50b7e8e 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/context/StartupOptionsBuilderTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/context/StartupOptionsBuilderTest.java @@ -26,10 +26,10 @@ import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverExecutionProfile; import com.datastax.oss.driver.api.core.session.Session; +import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import com.datastax.oss.protocol.internal.request.Startup; import com.tngtech.java.junit.dataprovider.DataProvider; import com.tngtech.java.junit.dataprovider.DataProviderRunner; -import java.util.Optional; import org.junit.Test; import org.junit.runner.RunWith; @@ -41,7 +41,8 @@ private DefaultDriverContext buildMockedContext(String compression) { DriverExecutionProfile defaultProfile = mock(DriverExecutionProfile.class); when(defaultProfile.getString(DefaultDriverOption.PROTOCOL_COMPRESSION, "none")) .thenReturn(compression); - return MockedDriverContextFactory.defaultDriverContext(Optional.of(defaultProfile)); + when(defaultProfile.getName()).thenReturn(DriverExecutionProfile.DEFAULT_NAME); + return MockedDriverContextFactory.defaultDriverContext(defaultProfile); } private void assertDefaultStartupOptions(Startup startup) { @@ -94,4 +95,44 @@ public void should_fail_to_build_startup_options_with_invalid_compression() { new Startup(ctx.getStartupOptions()); }); } + + @Test + public void should_include_all_local_dcs_in_startup_message() { + + DefaultDriverContext ctx = + MockedDriverContextFactory.defaultDriverContext( + MockedDriverContextFactory.defaultProfile("us-west-2"), + MockedDriverContextFactory.createProfile("oltp", "us-east-2"), + MockedDriverContextFactory.createProfile("olap", "eu-central-1")); + Startup startup = new Startup(ctx.getStartupOptions()); + assertThat(startup.options) + .containsEntry( + StartupOptionsBuilder.DRIVER_BAGGAGE, + "{\"default\":{\"DefaultLoadBalancingPolicy\":{\"localDc\":\"us-west-2\"}}," + + "\"oltp\":{\"DefaultLoadBalancingPolicy\":{\"localDc\":\"us-east-2\"}}," + + "\"olap\":{\"DefaultLoadBalancingPolicy\":{\"localDc\":\"eu-central-1\"}}}"); + } + + @Test + public void should_include_all_lbp_details_in_startup_message() { + + DriverExecutionProfile defaultProfile = MockedDriverContextFactory.defaultProfile("dc1"); + DriverExecutionProfile oltpProfile = MockedDriverContextFactory.createProfile("oltp", "dc1"); + MockedDriverContextFactory.allowRemoteDcConnectivity( + oltpProfile, 2, true, ImmutableList.of("dc2", "dc3")); + DefaultDriverContext ctx = + MockedDriverContextFactory.defaultDriverContext(defaultProfile, oltpProfile); + + Startup startup = new Startup(ctx.getStartupOptions()); + + assertThat(startup.options) + .containsEntry( + StartupOptionsBuilder.DRIVER_BAGGAGE, + "{\"default\":{\"DefaultLoadBalancingPolicy\":{\"localDc\":\"dc1\"}}," + + "\"oltp\":{\"DefaultLoadBalancingPolicy\":{" + + "\"localDc\":\"dc1\"," + + "\"preferredRemoteDcs\":[\"dc2\",\"dc3\"]," + + "\"allowDcFailoverForLocalCl\":true," + + "\"maxNodesPerRemoteDc\":2}}}"); + } } From e2c7ad4d11555eeacce6bd436547b403f83eb24f Mon Sep 17 00:00:00 2001 From: janehe Date: Tue, 15 Apr 2025 13:18:34 -0700 Subject: [PATCH 968/979] CASSJAVA-97: Let users inject an ID for each request and write to the custom payload patch by Jane He; reviewed by Abe Ratnofsky and Bret McGuire for CASSJAVA-97 --- core/revapi.json | 5 + .../api/core/config/DefaultDriverOption.java | 6 + .../api/core/config/TypedDriverOption.java | 4 + .../api/core/context/DriverContext.java | 5 + .../core/session/ProgrammaticArguments.java | 17 +++ .../api/core/session/SessionBuilder.java | 22 +++ .../api/core/tracker/RequestIdGenerator.java | 77 +++++++++++ .../api/core/tracker/RequestTracker.java | 41 +++--- .../core/context/DefaultDriverContext.java | 24 ++++ .../internal/core/cql/CqlRequestHandler.java | 59 +++++++-- .../DefaultLoadBalancingPolicy.java | 4 +- .../tracker/MultiplexingRequestTracker.java | 31 +++-- .../core/tracker/NoopRequestTracker.java | 8 +- .../internal/core/tracker/RequestLogger.java | 12 +- .../core/tracker/UuidRequestIdGenerator.java | 43 ++++++ .../tracker/W3CContextRequestIdGenerator.java | 67 ++++++++++ core/src/main/resources/reference.conf | 7 + .../core/cql/RequestHandlerTestHarness.java | 3 + .../core/tracker/RequestIdGeneratorTest.java | 80 +++++++++++ .../core/tracker/RequestIdGeneratorIT.java | 125 ++++++++++++++++++ .../tracker/RequestNodeLoggerExample.java | 8 +- manual/core/request_id/README.md | 48 +++++++ 22 files changed, 637 insertions(+), 59 deletions(-) create mode 100644 core/src/main/java/com/datastax/oss/driver/api/core/tracker/RequestIdGenerator.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/tracker/UuidRequestIdGenerator.java create mode 100644 core/src/main/java/com/datastax/oss/driver/internal/core/tracker/W3CContextRequestIdGenerator.java create mode 100644 core/src/test/java/com/datastax/oss/driver/internal/core/tracker/RequestIdGeneratorTest.java create mode 100644 integration-tests/src/test/java/com/datastax/oss/driver/core/tracker/RequestIdGeneratorIT.java create mode 100644 manual/core/request_id/README.md diff --git a/core/revapi.json b/core/revapi.json index f39c7d4a7c0..8c707659c13 100644 --- a/core/revapi.json +++ b/core/revapi.json @@ -7407,6 +7407,11 @@ { "code": "java.method.varargOverloadsOnlyDifferInVarargParameter", "justification": "CASSJAVA-102: Migrate revapi config into dedicated config files, ported from pom.xml" + }, + { + "code": "java.method.addedToInterface", + "new": "method java.util.Optional com.datastax.oss.driver.api.core.context.DriverContext::getRequestIdGenerator()", + "justification": "CASSJAVA-97: Let users inject an ID for each request and write to the custom payload" } ] } diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java index 4e45bf7b117..60c44193577 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/DefaultDriverOption.java @@ -995,6 +995,12 @@ public enum DefaultDriverOption implements DriverOption { *

          Value-type: boolean */ SSL_ALLOW_DNS_REVERSE_LOOKUP_SAN("advanced.ssl-engine-factory.allow-dns-reverse-lookup-san"), + /** + * The class of session-wide component that generates request IDs. + * + *

          Value-type: {@link String} + */ + REQUEST_ID_GENERATOR_CLASS("advanced.request-id.generator.class"), /** * An address to always translate all node addresses to that same proxy hostname no matter what IP * address a node has, but still using its native transport port. diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java b/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java index aa4e4af12dc..182753300e7 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/config/TypedDriverOption.java @@ -281,6 +281,10 @@ public String toString() { new TypedDriverOption<>( DefaultDriverOption.REQUEST_TRACKER_CLASSES, GenericType.listOf(String.class)); + /** The class of a session-wide component that generates request IDs. */ + public static final TypedDriverOption REQUEST_ID_GENERATOR_CLASS = + new TypedDriverOption<>(DefaultDriverOption.REQUEST_ID_GENERATOR_CLASS, GenericType.STRING); + /** Whether to log successful requests. */ public static final TypedDriverOption REQUEST_LOGGER_SUCCESS_ENABLED = new TypedDriverOption<>( diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/context/DriverContext.java b/core/src/main/java/com/datastax/oss/driver/api/core/context/DriverContext.java index 5b32389e362..6f0afd3df8a 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/context/DriverContext.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/context/DriverContext.java @@ -33,6 +33,7 @@ import com.datastax.oss.driver.api.core.specex.SpeculativeExecutionPolicy; import com.datastax.oss.driver.api.core.ssl.SslEngineFactory; import com.datastax.oss.driver.api.core.time.TimestampGenerator; +import com.datastax.oss.driver.api.core.tracker.RequestIdGenerator; import com.datastax.oss.driver.api.core.tracker.RequestTracker; import edu.umd.cs.findbugs.annotations.NonNull; import java.util.Map; @@ -139,6 +140,10 @@ default SpeculativeExecutionPolicy getSpeculativeExecutionPolicy(@NonNull String @NonNull RequestTracker getRequestTracker(); + /** @return The driver's request ID generator; never {@code null}. */ + @NonNull + Optional getRequestIdGenerator(); + /** @return The driver's request throttler; never {@code null}. */ @NonNull RequestThrottler getRequestThrottler(); diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/session/ProgrammaticArguments.java b/core/src/main/java/com/datastax/oss/driver/api/core/session/ProgrammaticArguments.java index 4e08bd5434c..5e10fb4d915 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/session/ProgrammaticArguments.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/session/ProgrammaticArguments.java @@ -23,6 +23,7 @@ import com.datastax.oss.driver.api.core.metadata.NodeStateListener; import com.datastax.oss.driver.api.core.metadata.schema.SchemaChangeListener; import com.datastax.oss.driver.api.core.ssl.SslEngineFactory; +import com.datastax.oss.driver.api.core.tracker.RequestIdGenerator; import com.datastax.oss.driver.api.core.tracker.RequestTracker; import com.datastax.oss.driver.api.core.type.codec.TypeCodec; import com.datastax.oss.driver.api.core.type.codec.registry.MutableCodecRegistry; @@ -59,6 +60,7 @@ public static Builder builder() { private final NodeStateListener nodeStateListener; private final SchemaChangeListener schemaChangeListener; private final RequestTracker requestTracker; + private final RequestIdGenerator requestIdGenerator; private final Map localDatacenters; private final Map> nodeFilters; private final Map nodeDistanceEvaluators; @@ -77,6 +79,7 @@ private ProgrammaticArguments( @Nullable NodeStateListener nodeStateListener, @Nullable SchemaChangeListener schemaChangeListener, @Nullable RequestTracker requestTracker, + @Nullable RequestIdGenerator requestIdGenerator, @NonNull Map localDatacenters, @NonNull Map> nodeFilters, @NonNull Map nodeDistanceEvaluators, @@ -94,6 +97,7 @@ private ProgrammaticArguments( this.nodeStateListener = nodeStateListener; this.schemaChangeListener = schemaChangeListener; this.requestTracker = requestTracker; + this.requestIdGenerator = requestIdGenerator; this.localDatacenters = localDatacenters; this.nodeFilters = nodeFilters; this.nodeDistanceEvaluators = nodeDistanceEvaluators; @@ -128,6 +132,11 @@ public RequestTracker getRequestTracker() { return requestTracker; } + @Nullable + public RequestIdGenerator getRequestIdGenerator() { + return requestIdGenerator; + } + @NonNull public Map getLocalDatacenters() { return localDatacenters; @@ -196,6 +205,7 @@ public static class Builder { private NodeStateListener nodeStateListener; private SchemaChangeListener schemaChangeListener; private RequestTracker requestTracker; + private RequestIdGenerator requestIdGenerator; private ImmutableMap.Builder localDatacentersBuilder = ImmutableMap.builder(); private final ImmutableMap.Builder> nodeFiltersBuilder = ImmutableMap.builder(); @@ -294,6 +304,12 @@ public Builder addRequestTracker(@NonNull RequestTracker requestTracker) { return this; } + @NonNull + public Builder withRequestIdGenerator(@Nullable RequestIdGenerator requestIdGenerator) { + this.requestIdGenerator = requestIdGenerator; + return this; + } + @NonNull public Builder withLocalDatacenter( @NonNull String profileName, @NonNull String localDatacenter) { @@ -417,6 +433,7 @@ public ProgrammaticArguments build() { nodeStateListener, schemaChangeListener, requestTracker, + requestIdGenerator, localDatacentersBuilder.build(), nodeFiltersBuilder.build(), nodeDistanceEvaluatorsBuilder.build(), diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java b/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java index cbf896a0873..25500119047 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/session/SessionBuilder.java @@ -35,6 +35,7 @@ import com.datastax.oss.driver.api.core.metadata.schema.SchemaChangeListener; import com.datastax.oss.driver.api.core.ssl.ProgrammaticSslEngineFactory; import com.datastax.oss.driver.api.core.ssl.SslEngineFactory; +import com.datastax.oss.driver.api.core.tracker.RequestIdGenerator; import com.datastax.oss.driver.api.core.tracker.RequestTracker; import com.datastax.oss.driver.api.core.type.codec.TypeCodec; import com.datastax.oss.driver.api.core.type.codec.registry.MutableCodecRegistry; @@ -47,6 +48,7 @@ import com.datastax.oss.driver.internal.core.context.InternalDriverContext; import com.datastax.oss.driver.internal.core.metadata.DefaultEndPoint; import com.datastax.oss.driver.internal.core.session.DefaultSession; +import com.datastax.oss.driver.internal.core.tracker.W3CContextRequestIdGenerator; import com.datastax.oss.driver.internal.core.util.concurrent.BlockingOperation; import com.datastax.oss.driver.internal.core.util.concurrent.CompletableFutures; import edu.umd.cs.findbugs.annotations.NonNull; @@ -83,6 +85,8 @@ @NotThreadSafe public abstract class SessionBuilder { + public static final String ASTRA_PAYLOAD_KEY = "traceparent"; + private static final Logger LOG = LoggerFactory.getLogger(SessionBuilder.class); @SuppressWarnings("unchecked") @@ -318,6 +322,17 @@ public SelfT addRequestTracker(@NonNull RequestTracker requestTracker) { return self; } + /** + * Registers a request ID generator. The driver will use the generated ID in the logs and + * optionally add to the custom payload so that users can correlate logs about the same request + * from the Cassandra side. + */ + @NonNull + public SelfT withRequestIdGenerator(@NonNull RequestIdGenerator requestIdGenerator) { + this.programmaticArgumentsBuilder.withRequestIdGenerator(requestIdGenerator); + return self; + } + /** * Registers an authentication provider to use with the session. * @@ -861,6 +876,13 @@ protected final CompletionStage buildDefaultSessionAsync() { List configContactPoints = defaultConfig.getStringList(DefaultDriverOption.CONTACT_POINTS, Collections.emptyList()); if (cloudConfigInputStream != null) { + // override request id generator, unless user has already set it + if (programmaticArguments.getRequestIdGenerator() == null) { + programmaticArgumentsBuilder.withRequestIdGenerator( + new W3CContextRequestIdGenerator(ASTRA_PAYLOAD_KEY)); + LOG.debug( + "A secure connect bundle is provided, using W3CContextRequestIdGenerator as request ID generator."); + } if (!programmaticContactPoints.isEmpty() || !configContactPoints.isEmpty()) { LOG.info( "Both a secure connect bundle and contact points were provided. These are mutually exclusive. The contact points from the secure bundle will have priority."); diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/tracker/RequestIdGenerator.java b/core/src/main/java/com/datastax/oss/driver/api/core/tracker/RequestIdGenerator.java new file mode 100644 index 00000000000..59ac3fdacf7 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/api/core/tracker/RequestIdGenerator.java @@ -0,0 +1,77 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.api.core.tracker; + +import com.datastax.oss.driver.api.core.cql.Statement; +import com.datastax.oss.driver.api.core.session.Request; +import com.datastax.oss.protocol.internal.util.collection.NullAllowingImmutableMap; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.nio.ByteBuffer; +import java.nio.charset.StandardCharsets; +import java.util.Map; + +/** + * Interface responsible for generating request IDs. + * + *

          Note that all request IDs have a parent/child relationship. A "parent ID" can loosely be + * thought of as encompassing a sequence of a request + any attendant retries, speculative + * executions etc. It's scope is identical to that of a {@link + * com.datastax.oss.driver.internal.core.cql.CqlRequestHandler}. A "request ID" represents a single + * request within this larger scope. Note that a request corresponding to a request ID may be + * retried; in that case the retry count will be appended to the corresponding identifier in the + * logs. + */ +public interface RequestIdGenerator { + + String DEFAULT_PAYLOAD_KEY = "request-id"; + + /** + * Generates a unique identifier for the session request. This will be the identifier for the + * entire `session.execute()` call. This identifier will be added to logs, and propagated to + * request trackers. + * + * @return a unique identifier for the session request + */ + String getSessionRequestId(); + + /** + * Generates a unique identifier for the node request. This will be the identifier for the CQL + * request against a particular node. There can be one or more node requests for a single session + * request, due to retries or speculative executions. This identifier will be added to logs, and + * propagated to request trackers. + * + * @param statement the statement to be executed + * @param parentId the session request identifier + * @return a unique identifier for the node request + */ + String getNodeRequestId(@NonNull Request statement, @NonNull String parentId); + + default String getCustomPayloadKey() { + return DEFAULT_PAYLOAD_KEY; + } + + default Statement getDecoratedStatement( + @NonNull Statement statement, @NonNull String requestId) { + Map customPayload = + NullAllowingImmutableMap.builder() + .putAll(statement.getCustomPayload()) + .put(getCustomPayloadKey(), ByteBuffer.wrap(requestId.getBytes(StandardCharsets.UTF_8))) + .build(); + return statement.setCustomPayload(customPayload); + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/tracker/RequestTracker.java b/core/src/main/java/com/datastax/oss/driver/api/core/tracker/RequestTracker.java index d29ee48d352..065b41e496a 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/tracker/RequestTracker.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/tracker/RequestTracker.java @@ -47,21 +47,22 @@ default void onSuccess( @NonNull Node node) {} /** - * Invoked each time a request succeeds. + * Invoked each time a session request succeeds. A session request is a `session.execute()` call * * @param latencyNanos the overall execution time (from the {@link Session#execute(Request, * GenericType) session.execute} call until the result is made available to the client). * @param executionProfile the execution profile of this request. * @param node the node that returned the successful response. - * @param requestLogPrefix the dedicated log prefix for this request + * @param sessionRequestLogPrefix the dedicated log prefix for this request */ default void onSuccess( @NonNull Request request, long latencyNanos, @NonNull DriverExecutionProfile executionProfile, @NonNull Node node, - @NonNull String requestLogPrefix) { - // If client doesn't override onSuccess with requestLogPrefix delegate call to the old method + @NonNull String sessionRequestLogPrefix) { + // If client doesn't override onSuccess with sessionRequestLogPrefix delegate call to the old + // method onSuccess(request, latencyNanos, executionProfile, node); } @@ -78,13 +79,13 @@ default void onError( @Nullable Node node) {} /** - * Invoked each time a request fails. + * Invoked each time a session request fails. A session request is a `session.execute()` call * * @param latencyNanos the overall execution time (from the {@link Session#execute(Request, * GenericType) session.execute} call until the error is propagated to the client). * @param executionProfile the execution profile of this request. * @param node the node that returned the error response, or {@code null} if the error occurred - * @param requestLogPrefix the dedicated log prefix for this request + * @param sessionRequestLogPrefix the dedicated log prefix for this request */ default void onError( @NonNull Request request, @@ -92,8 +93,9 @@ default void onError( long latencyNanos, @NonNull DriverExecutionProfile executionProfile, @Nullable Node node, - @NonNull String requestLogPrefix) { - // If client doesn't override onError with requestLogPrefix delegate call to the old method + @NonNull String sessionRequestLogPrefix) { + // If client doesn't override onError with sessionRequestLogPrefix delegate call to the old + // method onError(request, error, latencyNanos, executionProfile, node); } @@ -110,14 +112,15 @@ default void onNodeError( @NonNull Node node) {} /** - * Invoked each time a request fails at the node level. Similar to {@link #onError(Request, - * Throwable, long, DriverExecutionProfile, Node, String)} but at a per node level. + * Invoked each time a node request fails. A node request is a CQL request sent to a particular + * node. There can be one or more node requests for a single session request, due to retries or + * speculative executions. * * @param latencyNanos the overall execution time (from the {@link Session#execute(Request, * GenericType) session.execute} call until the error is propagated to the client). * @param executionProfile the execution profile of this request. * @param node the node that returned the error response. - * @param requestLogPrefix the dedicated log prefix for this request + * @param nodeRequestLogPrefix the dedicated log prefix for this request */ default void onNodeError( @NonNull Request request, @@ -125,8 +128,9 @@ default void onNodeError( long latencyNanos, @NonNull DriverExecutionProfile executionProfile, @NonNull Node node, - @NonNull String requestLogPrefix) { - // If client doesn't override onNodeError with requestLogPrefix delegate call to the old method + @NonNull String nodeRequestLogPrefix) { + // If client doesn't override onNodeError with nodeRequestLogPrefix delegate call to the old + // method onNodeError(request, error, latencyNanos, executionProfile, node); } @@ -142,22 +146,23 @@ default void onNodeSuccess( @NonNull Node node) {} /** - * Invoked each time a request succeeds at the node level. Similar to {@link #onSuccess(Request, - * long, DriverExecutionProfile, Node, String)} but at per node level. + * Invoked each time a node request succeeds. A node request is a CQL request sent to a particular + * node. There can be one or more node requests for a single session request, due to retries or + * speculative executions. * * @param latencyNanos the overall execution time (from the {@link Session#execute(Request, * GenericType) session.execute} call until the result is made available to the client). * @param executionProfile the execution profile of this request. * @param node the node that returned the successful response. - * @param requestLogPrefix the dedicated log prefix for this request + * @param nodeRequestLogPrefix the dedicated log prefix for this request */ default void onNodeSuccess( @NonNull Request request, long latencyNanos, @NonNull DriverExecutionProfile executionProfile, @NonNull Node node, - @NonNull String requestLogPrefix) { - // If client doesn't override onNodeSuccess with requestLogPrefix delegate call to the old + @NonNull String nodeRequestLogPrefix) { + // If client doesn't override onNodeSuccess with nodeRequestLogPrefix delegate call to the old // method onNodeSuccess(request, latencyNanos, executionProfile, node); } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java index 0d7db27dfbe..3074bda2398 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/context/DefaultDriverContext.java @@ -44,6 +44,7 @@ import com.datastax.oss.driver.api.core.specex.SpeculativeExecutionPolicy; import com.datastax.oss.driver.api.core.ssl.SslEngineFactory; import com.datastax.oss.driver.api.core.time.TimestampGenerator; +import com.datastax.oss.driver.api.core.tracker.RequestIdGenerator; import com.datastax.oss.driver.api.core.tracker.RequestTracker; import com.datastax.oss.driver.api.core.type.codec.TypeCodec; import com.datastax.oss.driver.api.core.type.codec.registry.CodecRegistry; @@ -221,6 +222,7 @@ public class DefaultDriverContext implements InternalDriverContext { private final LazyReference nodeStateListenerRef; private final LazyReference schemaChangeListenerRef; private final LazyReference requestTrackerRef; + private final LazyReference> requestIdGeneratorRef; private final LazyReference> authProviderRef; private final LazyReference> lifecycleListenersRef = new LazyReference<>("lifecycleListeners", this::buildLifecycleListeners, cycleDetector); @@ -282,6 +284,11 @@ public DefaultDriverContext( this.requestTrackerRef = new LazyReference<>( "requestTracker", () -> buildRequestTracker(requestTrackerFromBuilder), cycleDetector); + this.requestIdGeneratorRef = + new LazyReference<>( + "requestIdGenerator", + () -> buildRequestIdGenerator(programmaticArguments.getRequestIdGenerator()), + cycleDetector); this.sslEngineFactoryRef = new LazyReference<>( "sslEngineFactory", @@ -708,6 +715,17 @@ protected RequestTracker buildRequestTracker(RequestTracker requestTrackerFromBu } } + protected Optional buildRequestIdGenerator( + RequestIdGenerator requestIdGenerator) { + return (requestIdGenerator != null) + ? Optional.of(requestIdGenerator) + : Reflection.buildFromConfig( + this, + DefaultDriverOption.REQUEST_ID_GENERATOR_CLASS, + RequestIdGenerator.class, + "com.datastax.oss.driver.internal.core.tracker"); + } + protected Optional buildAuthProvider(AuthProvider authProviderFromBuilder) { return (authProviderFromBuilder != null) ? Optional.of(authProviderFromBuilder) @@ -972,6 +990,12 @@ public RequestTracker getRequestTracker() { return requestTrackerRef.get(); } + @NonNull + @Override + public Optional getRequestIdGenerator() { + return requestIdGeneratorRef.get(); + } + @Nullable @Override public String getLocalDatacenter(@NonNull String profileName) { diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java index 0808bdce63f..6842547b11a 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandler.java @@ -44,6 +44,7 @@ import com.datastax.oss.driver.api.core.servererrors.WriteTimeoutException; import com.datastax.oss.driver.api.core.session.throttling.RequestThrottler; import com.datastax.oss.driver.api.core.session.throttling.Throttled; +import com.datastax.oss.driver.api.core.tracker.RequestIdGenerator; import com.datastax.oss.driver.api.core.tracker.RequestTracker; import com.datastax.oss.driver.internal.core.adminrequest.ThrottledAdminRequestHandler; import com.datastax.oss.driver.internal.core.adminrequest.UnexpectedResponseException; @@ -59,6 +60,7 @@ import com.datastax.oss.driver.internal.core.tracker.RequestLogger; import com.datastax.oss.driver.internal.core.util.Loggers; import com.datastax.oss.driver.internal.core.util.collection.SimpleQueryPlan; +import com.datastax.oss.driver.shaded.guava.common.base.Joiner; import com.datastax.oss.protocol.internal.Frame; import com.datastax.oss.protocol.internal.Message; import com.datastax.oss.protocol.internal.ProtocolConstants; @@ -82,6 +84,7 @@ import java.util.AbstractMap; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.Queue; import java.util.concurrent.CancellationException; import java.util.concurrent.CompletableFuture; @@ -100,7 +103,7 @@ public class CqlRequestHandler implements Throttled { private static final long NANOTIME_NOT_MEASURED_YET = -1; private final long startTimeNanos; - private final String logPrefix; + private final String handlerLogPrefix; private final Statement initialStatement; private final DefaultSession session; private final CqlIdentifier keyspace; @@ -125,6 +128,7 @@ public class CqlRequestHandler implements Throttled { private final List inFlightCallbacks; private final RequestThrottler throttler; private final RequestTracker requestTracker; + private final Optional requestIdGenerator; private final SessionMetricUpdater sessionMetricUpdater; private final DriverExecutionProfile executionProfile; @@ -132,15 +136,25 @@ public class CqlRequestHandler implements Throttled { // We don't use a map because nodes can appear multiple times. private volatile List> errors; + private final Joiner logPrefixJoiner = Joiner.on('|'); + private final String sessionName; + private final String sessionRequestId; + protected CqlRequestHandler( Statement statement, DefaultSession session, InternalDriverContext context, - String sessionLogPrefix) { + String sessionName) { this.startTimeNanos = System.nanoTime(); - this.logPrefix = sessionLogPrefix + "|" + this.hashCode(); - LOG.trace("[{}] Creating new handler for request {}", logPrefix, statement); + this.requestIdGenerator = context.getRequestIdGenerator(); + this.sessionName = sessionName; + this.sessionRequestId = + this.requestIdGenerator + .map(RequestIdGenerator::getSessionRequestId) + .orElse(Integer.toString(this.hashCode())); + this.handlerLogPrefix = logPrefixJoiner.join(sessionName, sessionRequestId); + LOG.trace("[{}] Creating new handler for request {}", handlerLogPrefix, statement); this.initialStatement = statement; this.session = session; @@ -155,7 +169,7 @@ protected CqlRequestHandler( context.getRequestThrottler().signalCancel(this); } } catch (Throwable t2) { - Loggers.warnWithException(LOG, "[{}] Uncaught exception", logPrefix, t2); + Loggers.warnWithException(LOG, "[{}] Uncaught exception", handlerLogPrefix, t2); } return null; }); @@ -250,9 +264,9 @@ private void sendRequest( } Node node = retriedNode; DriverChannel channel = null; - if (node == null || (channel = session.getChannel(node, logPrefix)) == null) { + if (node == null || (channel = session.getChannel(node, handlerLogPrefix)) == null) { while (!result.isDone() && (node = queryPlan.poll()) != null) { - channel = session.getChannel(node, logPrefix); + channel = session.getChannel(node, handlerLogPrefix); if (channel != null) { break; } else { @@ -267,6 +281,16 @@ private void sendRequest( setFinalError(statement, AllNodesFailedException.fromErrors(this.errors), null, -1); } } else { + Statement finalStatement = statement; + String nodeRequestId = + this.requestIdGenerator + .map((g) -> g.getNodeRequestId(finalStatement, sessionRequestId)) + .orElse(Integer.toString(this.hashCode())); + statement = + this.requestIdGenerator + .map((g) -> g.getDecoratedStatement(finalStatement, nodeRequestId)) + .orElse(finalStatement); + NodeResponseCallback nodeResponseCallback = new NodeResponseCallback( statement, @@ -276,7 +300,7 @@ private void sendRequest( currentExecutionIndex, retryCount, scheduleNextExecution, - logPrefix); + logPrefixJoiner.join(this.sessionName, nodeRequestId, currentExecutionIndex)); Message message = Conversions.toMessage(statement, executionProfile, context); channel .write(message, statement.isTracing(), statement.getCustomPayload(), nodeResponseCallback) @@ -335,9 +359,17 @@ private void setFinalResult( totalLatencyNanos = completionTimeNanos - startTimeNanos; long nodeLatencyNanos = completionTimeNanos - callback.nodeStartTimeNanos; requestTracker.onNodeSuccess( - callback.statement, nodeLatencyNanos, executionProfile, callback.node, logPrefix); + callback.statement, + nodeLatencyNanos, + executionProfile, + callback.node, + handlerLogPrefix); requestTracker.onSuccess( - callback.statement, totalLatencyNanos, executionProfile, callback.node, logPrefix); + callback.statement, + totalLatencyNanos, + executionProfile, + callback.node, + handlerLogPrefix); } if (sessionMetricUpdater.isEnabled( DefaultSessionMetric.CQL_REQUESTS, executionProfile.getName())) { @@ -439,7 +471,8 @@ private void setFinalError(Statement statement, Throwable error, Node node, i cancelScheduledTasks(); if (!(requestTracker instanceof NoopRequestTracker)) { long latencyNanos = System.nanoTime() - startTimeNanos; - requestTracker.onError(statement, error, latencyNanos, executionProfile, node, logPrefix); + requestTracker.onError( + statement, error, latencyNanos, executionProfile, node, handlerLogPrefix); } if (error instanceof DriverTimeoutException) { throttler.signalTimeout(this); @@ -489,7 +522,7 @@ private NodeResponseCallback( this.execution = execution; this.retryCount = retryCount; this.scheduleNextExecution = scheduleNextExecution; - this.logPrefix = logPrefix + "|" + execution; + this.logPrefix = logPrefix; } // this gets invoked once the write completes. @@ -567,7 +600,7 @@ private void scheduleSpeculativeExecution(int index, long delay) { if (!result.isDone()) { LOG.trace( "[{}] Starting speculative execution {}", - CqlRequestHandler.this.logPrefix, + CqlRequestHandler.this.handlerLogPrefix, index); activeExecutionsCount.incrementAndGet(); startedSpeculativeExecutionsCount.incrementAndGet(); diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicy.java b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicy.java index 9c31b606f18..8e1c1fe5039 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicy.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/loadbalancing/DefaultLoadBalancingPolicy.java @@ -246,7 +246,7 @@ public void onNodeSuccess( long latencyNanos, @NonNull DriverExecutionProfile executionProfile, @NonNull Node node, - @NonNull String logPrefix) { + @NonNull String nodeRequestLogPrefix) { updateResponseTimes(node); } @@ -257,7 +257,7 @@ public void onNodeError( long latencyNanos, @NonNull DriverExecutionProfile executionProfile, @NonNull Node node, - @NonNull String logPrefix) { + @NonNull String nodeRequestLogPrefix) { updateResponseTimes(node); } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/tracker/MultiplexingRequestTracker.java b/core/src/main/java/com/datastax/oss/driver/internal/core/tracker/MultiplexingRequestTracker.java index d4d20f3eb78..6fe2ba059bd 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/tracker/MultiplexingRequestTracker.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/tracker/MultiplexingRequestTracker.java @@ -82,10 +82,12 @@ public void onSuccess( long latencyNanos, @NonNull DriverExecutionProfile executionProfile, @NonNull Node node, - @NonNull String logPrefix) { + @NonNull String sessionRequestLogPrefix) { invokeTrackers( - tracker -> tracker.onSuccess(request, latencyNanos, executionProfile, node, logPrefix), - logPrefix, + tracker -> + tracker.onSuccess( + request, latencyNanos, executionProfile, node, sessionRequestLogPrefix), + sessionRequestLogPrefix, "onSuccess"); } @@ -96,10 +98,12 @@ public void onError( long latencyNanos, @NonNull DriverExecutionProfile executionProfile, @Nullable Node node, - @NonNull String logPrefix) { + @NonNull String sessionRequestLogPrefix) { invokeTrackers( - tracker -> tracker.onError(request, error, latencyNanos, executionProfile, node, logPrefix), - logPrefix, + tracker -> + tracker.onError( + request, error, latencyNanos, executionProfile, node, sessionRequestLogPrefix), + sessionRequestLogPrefix, "onError"); } @@ -109,10 +113,12 @@ public void onNodeSuccess( long latencyNanos, @NonNull DriverExecutionProfile executionProfile, @NonNull Node node, - @NonNull String logPrefix) { + @NonNull String nodeRequestLogPrefix) { invokeTrackers( - tracker -> tracker.onNodeSuccess(request, latencyNanos, executionProfile, node, logPrefix), - logPrefix, + tracker -> + tracker.onNodeSuccess( + request, latencyNanos, executionProfile, node, nodeRequestLogPrefix), + nodeRequestLogPrefix, "onNodeSuccess"); } @@ -123,11 +129,12 @@ public void onNodeError( long latencyNanos, @NonNull DriverExecutionProfile executionProfile, @NonNull Node node, - @NonNull String logPrefix) { + @NonNull String nodeRequestLogPrefix) { invokeTrackers( tracker -> - tracker.onNodeError(request, error, latencyNanos, executionProfile, node, logPrefix), - logPrefix, + tracker.onNodeError( + request, error, latencyNanos, executionProfile, node, nodeRequestLogPrefix), + nodeRequestLogPrefix, "onNodeError"); } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/tracker/NoopRequestTracker.java b/core/src/main/java/com/datastax/oss/driver/internal/core/tracker/NoopRequestTracker.java index 09ac27e5e75..3821c6ace2d 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/tracker/NoopRequestTracker.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/tracker/NoopRequestTracker.java @@ -42,7 +42,7 @@ public void onSuccess( long latencyNanos, @NonNull DriverExecutionProfile executionProfile, @NonNull Node node, - @NonNull String requestPrefix) { + @NonNull String sessionRequestLogPrefix) { // nothing to do } @@ -53,7 +53,7 @@ public void onError( long latencyNanos, @NonNull DriverExecutionProfile executionProfile, Node node, - @NonNull String requestPrefix) { + @NonNull String sessionRequestLogPrefix) { // nothing to do } @@ -64,7 +64,7 @@ public void onNodeError( long latencyNanos, @NonNull DriverExecutionProfile executionProfile, @NonNull Node node, - @NonNull String requestPrefix) { + @NonNull String nodeRequestLogPrefix) { // nothing to do } @@ -74,7 +74,7 @@ public void onNodeSuccess( long latencyNanos, @NonNull DriverExecutionProfile executionProfile, @NonNull Node node, - @NonNull String requestPrefix) { + @NonNull String nodeRequestLogPrefix) { // nothing to do } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/tracker/RequestLogger.java b/core/src/main/java/com/datastax/oss/driver/internal/core/tracker/RequestLogger.java index 235ef051b40..f242ff89c54 100644 --- a/core/src/main/java/com/datastax/oss/driver/internal/core/tracker/RequestLogger.java +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/tracker/RequestLogger.java @@ -86,7 +86,7 @@ public void onSuccess( long latencyNanos, @NonNull DriverExecutionProfile executionProfile, @NonNull Node node, - @NonNull String logPrefix) { + @NonNull String sessionRequestLogPrefix) { boolean successEnabled = executionProfile.getBoolean(DefaultDriverOption.REQUEST_LOGGER_SUCCESS_ENABLED, false); @@ -129,7 +129,7 @@ public void onSuccess( showValues, maxValues, maxValueLength, - logPrefix); + sessionRequestLogPrefix); } @Override @@ -139,7 +139,7 @@ public void onError( long latencyNanos, @NonNull DriverExecutionProfile executionProfile, Node node, - @NonNull String logPrefix) { + @NonNull String sessionRequestLogPrefix) { if (!executionProfile.getBoolean(DefaultDriverOption.REQUEST_LOGGER_ERROR_ENABLED, false)) { return; @@ -173,7 +173,7 @@ public void onError( maxValues, maxValueLength, showStackTraces, - logPrefix); + sessionRequestLogPrefix); } @Override @@ -183,7 +183,7 @@ public void onNodeError( long latencyNanos, @NonNull DriverExecutionProfile executionProfile, @NonNull Node node, - @NonNull String logPrefix) { + @NonNull String nodeRequestLogPrefix) { // Nothing to do } @@ -193,7 +193,7 @@ public void onNodeSuccess( long latencyNanos, @NonNull DriverExecutionProfile executionProfile, @NonNull Node node, - @NonNull String logPrefix) { + @NonNull String nodeRequestLogPrefix) { // Nothing to do } diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/tracker/UuidRequestIdGenerator.java b/core/src/main/java/com/datastax/oss/driver/internal/core/tracker/UuidRequestIdGenerator.java new file mode 100644 index 00000000000..cc07d6717f4 --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/tracker/UuidRequestIdGenerator.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.tracker; + +import com.datastax.oss.driver.api.core.context.DriverContext; +import com.datastax.oss.driver.api.core.session.Request; +import com.datastax.oss.driver.api.core.tracker.RequestIdGenerator; +import com.datastax.oss.driver.api.core.uuid.Uuids; +import edu.umd.cs.findbugs.annotations.NonNull; + +public class UuidRequestIdGenerator implements RequestIdGenerator { + public UuidRequestIdGenerator(DriverContext context) {} + + /** Generates a random v4 UUID. */ + @Override + public String getSessionRequestId() { + return Uuids.random().toString(); + } + + /** + * {session-request-id}-{random-uuid} All node requests for a session request will have the same + * session request id + */ + @Override + public String getNodeRequestId(@NonNull Request statement, @NonNull String parentId) { + return parentId + "-" + Uuids.random(); + } +} diff --git a/core/src/main/java/com/datastax/oss/driver/internal/core/tracker/W3CContextRequestIdGenerator.java b/core/src/main/java/com/datastax/oss/driver/internal/core/tracker/W3CContextRequestIdGenerator.java new file mode 100644 index 00000000000..fe15b93bc8e --- /dev/null +++ b/core/src/main/java/com/datastax/oss/driver/internal/core/tracker/W3CContextRequestIdGenerator.java @@ -0,0 +1,67 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.tracker; + +import com.datastax.oss.driver.api.core.context.DriverContext; +import com.datastax.oss.driver.api.core.session.Request; +import com.datastax.oss.driver.api.core.tracker.RequestIdGenerator; +import com.datastax.oss.driver.shaded.guava.common.io.BaseEncoding; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.security.SecureRandom; +import java.util.Random; + +public class W3CContextRequestIdGenerator implements RequestIdGenerator { + + private final Random random = new SecureRandom(); + private final BaseEncoding baseEncoding = BaseEncoding.base16().lowerCase(); + private final String payloadKey; + + public W3CContextRequestIdGenerator(DriverContext context) { + payloadKey = RequestIdGenerator.super.getCustomPayloadKey(); + } + + public W3CContextRequestIdGenerator(String payloadKey) { + this.payloadKey = payloadKey; + } + + /** Random 16 bytes, e.g. "4bf92f3577b34da6a3ce929d0e0e4736" */ + @Override + public String getSessionRequestId() { + byte[] bytes = new byte[16]; + random.nextBytes(bytes); + return baseEncoding.encode(bytes); + } + + /** + * Following the format of W3C "traceparent" spec, + * https://www.w3.org/TR/trace-context/#traceparent-header-field-values e.g. + * "00-4bf92f3577b34da6a3ce929d0e0e4736-a3ce929d0e0e4736-01" All node requests in the same session + * request share the same "trace-id" field value + */ + @Override + public String getNodeRequestId(@NonNull Request statement, @NonNull String parentId) { + byte[] bytes = new byte[8]; + random.nextBytes(bytes); + return String.format("00-%s-%s-00", parentId, baseEncoding.encode(bytes)); + } + + @Override + public String getCustomPayloadKey() { + return this.payloadKey; + } +} diff --git a/core/src/main/resources/reference.conf b/core/src/main/resources/reference.conf index 3c6851a48ee..741b1d97654 100644 --- a/core/src/main/resources/reference.conf +++ b/core/src/main/resources/reference.conf @@ -918,6 +918,13 @@ datastax-java-driver { } } + advanced.request-id { + generator { + # The component that generates a unique identifier for each CQL request, and possibly write the id to the custom payload . + // class = W3CContextRequestIdGenerator + } + } + # A session-wide component that controls the rate at which requests are executed. # # Implementations vary, but throttlers generally track a metric that represents the level of diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/cql/RequestHandlerTestHarness.java b/core/src/test/java/com/datastax/oss/driver/internal/core/cql/RequestHandlerTestHarness.java index 9d86302aabf..6ecd6111992 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/cql/RequestHandlerTestHarness.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/cql/RequestHandlerTestHarness.java @@ -61,6 +61,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.Queue; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ConcurrentHashMap; @@ -168,6 +169,8 @@ protected RequestHandlerTestHarness(Builder builder) { when(context.getRequestThrottler()).thenReturn(new PassThroughRequestThrottler(context)); when(context.getRequestTracker()).thenReturn(new NoopRequestTracker(context)); + + when(context.getRequestIdGenerator()).thenReturn(Optional.empty()); } public DefaultSession getSession() { diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/tracker/RequestIdGeneratorTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/tracker/RequestIdGeneratorTest.java new file mode 100644 index 00000000000..fb1883e125f --- /dev/null +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/tracker/RequestIdGeneratorTest.java @@ -0,0 +1,80 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.internal.core.tracker; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.api.core.cql.Statement; +import com.datastax.oss.driver.api.core.tracker.RequestIdGenerator; +import com.datastax.oss.driver.internal.core.context.InternalDriverContext; +import org.apache.commons.lang3.RandomStringUtils; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnitRunner; + +@RunWith(MockitoJUnitRunner.Strict.class) +public class RequestIdGeneratorTest { + @Mock private InternalDriverContext context; + @Mock private Statement statement; + + @Test + public void uuid_generator_should_generate() { + // given + UuidRequestIdGenerator generator = new UuidRequestIdGenerator(context); + // when + String parentId = generator.getSessionRequestId(); + String requestId = generator.getNodeRequestId(statement, parentId); + // then + // e.g. "550e8400-e29b-41d4-a716-446655440000", which is 36 characters long + assertThat(parentId.length()).isEqualTo(36); + // e.g. "550e8400-e29b-41d4-a716-446655440000-550e8400-e29b-41d4-a716-446655440000", which is 73 + // characters long + assertThat(requestId.length()).isEqualTo(73); + } + + @Test + public void w3c_generator_should_generate() { + // given + W3CContextRequestIdGenerator generator = new W3CContextRequestIdGenerator(context); + // when + String parentId = generator.getSessionRequestId(); + String requestId = generator.getNodeRequestId(statement, parentId); + // then + // e.g. "4bf92f3577b34da6a3ce929d0e0e4736", which is 32 characters long + assertThat(parentId.length()).isEqualTo(32); + // According to W3C "traceparent" spec, + // https://www.w3.org/TR/trace-context/#traceparent-header-field-values + // e.g. "00-4bf92f3577b34da6a3ce929d0e0e4736-a3ce929d0e0e4736-01", which 55 characters long + assertThat(requestId.length()).isEqualTo(55); + } + + @Test + public void w3c_generator_default_payloadkey() { + W3CContextRequestIdGenerator w3cGenerator = new W3CContextRequestIdGenerator(context); + assertThat(w3cGenerator.getCustomPayloadKey()) + .isEqualTo(RequestIdGenerator.DEFAULT_PAYLOAD_KEY); + } + + @Test + public void w3c_generator_provided_payloadkey() { + String someString = RandomStringUtils.random(12); + W3CContextRequestIdGenerator w3cGenerator = new W3CContextRequestIdGenerator(someString); + assertThat(w3cGenerator.getCustomPayloadKey()).isEqualTo(someString); + } +} diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/tracker/RequestIdGeneratorIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/tracker/RequestIdGeneratorIT.java new file mode 100644 index 00000000000..2848a8fb629 --- /dev/null +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/tracker/RequestIdGeneratorIT.java @@ -0,0 +1,125 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.datastax.oss.driver.core.tracker; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.datastax.oss.driver.api.core.CqlSession; +import com.datastax.oss.driver.api.core.config.DefaultDriverOption; +import com.datastax.oss.driver.api.core.config.DriverConfigLoader; +import com.datastax.oss.driver.api.core.cql.ResultSet; +import com.datastax.oss.driver.api.core.cql.Statement; +import com.datastax.oss.driver.api.core.session.Request; +import com.datastax.oss.driver.api.core.tracker.RequestIdGenerator; +import com.datastax.oss.driver.api.testinfra.ccm.CcmRule; +import com.datastax.oss.driver.api.testinfra.session.SessionUtils; +import com.datastax.oss.driver.categories.ParallelizableTests; +import com.datastax.oss.protocol.internal.util.collection.NullAllowingImmutableMap; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.nio.ByteBuffer; +import java.nio.charset.StandardCharsets; +import java.util.Map; +import org.junit.Rule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +@Category(ParallelizableTests.class) +public class RequestIdGeneratorIT { + private CcmRule ccmRule = CcmRule.getInstance(); + + @Rule public TestRule chain = RuleChain.outerRule(ccmRule); + + @Test + public void should_write_uuid_to_custom_payload_with_key() { + DriverConfigLoader loader = + SessionUtils.configLoaderBuilder() + .withString(DefaultDriverOption.REQUEST_ID_GENERATOR_CLASS, "UuidRequestIdGenerator") + .build(); + try (CqlSession session = SessionUtils.newSession(ccmRule, loader)) { + String query = "SELECT * FROM system.local"; + ResultSet rs = session.execute(query); + ByteBuffer id = rs.getExecutionInfo().getRequest().getCustomPayload().get("request-id"); + assertThat(id.remaining()).isEqualTo(73); + } + } + + @Test + public void should_write_default_request_id_to_custom_payload_with_key() { + DriverConfigLoader loader = + SessionUtils.configLoaderBuilder() + .withString( + DefaultDriverOption.REQUEST_ID_GENERATOR_CLASS, "W3CContextRequestIdGenerator") + .build(); + try (CqlSession session = SessionUtils.newSession(ccmRule, loader)) { + String query = "SELECT * FROM system.local"; + ResultSet rs = session.execute(query); + ByteBuffer id = rs.getExecutionInfo().getRequest().getCustomPayload().get("request-id"); + assertThat(id.remaining()).isEqualTo(55); + } + } + + @Test + public void should_use_customized_request_id_generator() { + RequestIdGenerator myRequestIdGenerator = + new RequestIdGenerator() { + @Override + public String getSessionRequestId() { + return "123"; + } + + @Override + public String getNodeRequestId(@NonNull Request statement, @NonNull String parentId) { + return "456"; + } + + @Override + public Statement getDecoratedStatement( + @NonNull Statement statement, @NonNull String requestId) { + Map customPayload = + NullAllowingImmutableMap.builder() + .putAll(statement.getCustomPayload()) + .put("trace_key", ByteBuffer.wrap(requestId.getBytes(StandardCharsets.UTF_8))) + .build(); + return statement.setCustomPayload(customPayload); + } + }; + try (CqlSession session = + (CqlSession) + SessionUtils.baseBuilder() + .addContactEndPoints(ccmRule.getContactPoints()) + .withRequestIdGenerator(myRequestIdGenerator) + .build()) { + String query = "SELECT * FROM system.local"; + ResultSet rs = session.execute(query); + ByteBuffer id = rs.getExecutionInfo().getRequest().getCustomPayload().get("trace_key"); + assertThat(id).isEqualTo(ByteBuffer.wrap("456".getBytes(StandardCharsets.UTF_8))); + } + } + + @Test + public void should_not_write_id_to_custom_payload_when_key_is_not_set() { + DriverConfigLoader loader = SessionUtils.configLoaderBuilder().build(); + try (CqlSession session = SessionUtils.newSession(ccmRule, loader)) { + String query = "SELECT * FROM system.local"; + ResultSet rs = session.execute(query); + assertThat(rs.getExecutionInfo().getRequest().getCustomPayload().get("trace_key")).isNull(); + } + } +} diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/tracker/RequestNodeLoggerExample.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/tracker/RequestNodeLoggerExample.java index eae98339637..8eb2fb80a73 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/tracker/RequestNodeLoggerExample.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/tracker/RequestNodeLoggerExample.java @@ -39,7 +39,7 @@ public void onNodeError( long latencyNanos, @NonNull DriverExecutionProfile executionProfile, @NonNull Node node, - @NonNull String logPrefix) { + @NonNull String nodeRequestLogPrefix) { if (!executionProfile.getBoolean(DefaultDriverOption.REQUEST_LOGGER_ERROR_ENABLED)) { return; } @@ -66,7 +66,7 @@ public void onNodeError( maxValues, maxValueLength, showStackTraces, - logPrefix); + nodeRequestLogPrefix); } @Override @@ -75,7 +75,7 @@ public void onNodeSuccess( long latencyNanos, @NonNull DriverExecutionProfile executionProfile, @NonNull Node node, - @NonNull String logPrefix) { + @NonNull String nodeRequestLogPrefix) { boolean successEnabled = executionProfile.getBoolean(DefaultDriverOption.REQUEST_LOGGER_SUCCESS_ENABLED); boolean slowEnabled = @@ -114,6 +114,6 @@ public void onNodeSuccess( showValues, maxValues, maxValueLength, - logPrefix); + nodeRequestLogPrefix); } } diff --git a/manual/core/request_id/README.md b/manual/core/request_id/README.md new file mode 100644 index 00000000000..a766a4419af --- /dev/null +++ b/manual/core/request_id/README.md @@ -0,0 +1,48 @@ + + +## Request Id + +### Quick overview + +Users can inject an identifier for each individual CQL request, and such ID can be written in to the [custom payload](https://github.com/apache/cassandra/blob/trunk/doc/native_protocol_v5.spec) to +correlate a request across the driver and the Apache Cassandra server. + +A request ID generator needs to generate both: +- Session request ID: an identifier for an entire session.execute() call +- Node request ID: an identifier for the execution of a CQL statement against a particular node. There can be one or more node requests for a single session request, due to retries or speculative executions. + +Usage: +* Inject ID generator: set the desired `RequestIdGenerator` in `advanced.request-id.generator.class`. +* Add ID to custom payload: the default behavior of a `RequestIdGenerator` is to add the request ID into the custom payload with the key `request-id`. Override `RequestIdGenerator.getDecoratedStatement` to customize the behavior. + +### Request Id Generator Configuration + +Request ID generator can be declared in the [configuration](../configuration/) as follows: + +``` +datastax-java-driver.advanced.request-id.generator { + class = com.example.app.MyGenerator +} +``` + +To register your own request ID generator, specify the name of the class +that implements `RequestIdGenerator`. + +The generated ID will be added to the log message of `CqlRequestHandler`, and propagated to other classes, e.g. the request trackers. \ No newline at end of file From 104751c166402274f46135b9d367bee1cfdd124d Mon Sep 17 00:00:00 2001 From: absurdfarce Date: Tue, 14 Oct 2025 15:08:52 -0500 Subject: [PATCH 969/979] Changelog entries for 4.19.1 patch by Bret McGuire; reviewed by Andy Tolbert --- changelog/README.md | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/changelog/README.md b/changelog/README.md index 08634bcb834..9e223318e65 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -21,6 +21,24 @@ under the License. +### 4.19.1 + +- [improvement] CASSJAVA-97: Let users inject an ID for each request and write to the custom payload +- [improvement] CASSJAVA-92: Add Local DC to driver connection info and provide visibility with nodetool clientstats +- [bug] PR 2025: Eliminate lock in ConcurrencyLimitingRequestThrottler +- [improvement] CASSJAVA-89: Fix deprecated table configs in Cassandra 5 +- [improvement] PR 2028: Remove unnecessary locking in DefaultNettyOptions +- [improvement] CASSJAVA-102: Fix revapi spurious complaints about optional dependencies +- [improvement] PR 2013: Add SubnetAddressTranslator +- [improvement] CASSJAVA-68: Improve DefaultCodecRegistry.CacheKey#hashCode() to eliminate Object[] allocation +- [improvement] PR 1989: Bump Jackson version to la(te)st 2.13.x, 2.13.5 +- [improvement] CASSJAVA-76: Make guava an optional dependency of java-driver-guava-shaded +- [bug] PR 2035: Prevent long overflow in SNI address resolution +- [improvement] CASSJAVA-77: 4.x: Upgrade Netty to 4.1.119 +- [improvement] CASSJAVA-40: Driver testing against Java 21 +- [improvement] CASSJAVA-90: Update native-protocol +- [improvement] CASSJAVA-80: Support configuration to disable DNS reverse-lookups for SAN validation + ### 4.19.0 - [bug] JAVA-3055: Prevent PreparedStatement cache to be polluted if a request is cancelled. From 77b2baebebfa208e2a7a29f6b09b6cb86e3a4b61 Mon Sep 17 00:00:00 2001 From: Andy Tolbert <6889771+tolbertam@users.noreply.github.com> Date: Tue, 14 Oct 2025 15:32:38 -0500 Subject: [PATCH 970/979] [maven-release-plugin] prepare release 4.19.1 --- bom/pom.xml | 20 ++++++++++---------- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution-source/pom.xml | 2 +- distribution-tests/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- guava-shaded/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- metrics/micrometer/pom.xml | 2 +- metrics/microprofile/pom.xml | 2 +- osgi-tests/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 17 files changed, 27 insertions(+), 27 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index f03317edc03..05e9d74dc5c 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.1-SNAPSHOT + 4.19.1 java-driver-bom pom @@ -33,47 +33,47 @@ org.apache.cassandra java-driver-core - 4.19.1-SNAPSHOT + 4.19.1 org.apache.cassandra java-driver-core-shaded - 4.19.1-SNAPSHOT + 4.19.1 org.apache.cassandra java-driver-mapper-processor - 4.19.1-SNAPSHOT + 4.19.1 org.apache.cassandra java-driver-mapper-runtime - 4.19.1-SNAPSHOT + 4.19.1 org.apache.cassandra java-driver-query-builder - 4.19.1-SNAPSHOT + 4.19.1 org.apache.cassandra java-driver-guava-shaded - 4.19.1-SNAPSHOT + 4.19.1 org.apache.cassandra java-driver-test-infra - 4.19.1-SNAPSHOT + 4.19.1 org.apache.cassandra java-driver-metrics-micrometer - 4.19.1-SNAPSHOT + 4.19.1 org.apache.cassandra java-driver-metrics-microprofile - 4.19.1-SNAPSHOT + 4.19.1 com.datastax.oss diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 451db1dcd1b..4cc8197b6dd 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.1-SNAPSHOT + 4.19.1 java-driver-core-shaded Apache Cassandra Java Driver - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index b8d7d5c2d3b..9ed02b81dd7 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.1-SNAPSHOT + 4.19.1 java-driver-core bundle diff --git a/distribution-source/pom.xml b/distribution-source/pom.xml index e930f4c0610..5328b4bbd36 100644 --- a/distribution-source/pom.xml +++ b/distribution-source/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.1-SNAPSHOT + 4.19.1 java-driver-distribution-source pom diff --git a/distribution-tests/pom.xml b/distribution-tests/pom.xml index 1c762074673..c4b5eb38026 100644 --- a/distribution-tests/pom.xml +++ b/distribution-tests/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.1-SNAPSHOT + 4.19.1 java-driver-distribution-tests Apache Cassandra Java Driver - distribution tests diff --git a/distribution/pom.xml b/distribution/pom.xml index 8f7740e148f..4c5f04de515 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.1-SNAPSHOT + 4.19.1 java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index 15f082e6864..a473320ea44 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -23,7 +23,7 @@ java-driver-parent org.apache.cassandra - 4.19.1-SNAPSHOT + 4.19.1 java-driver-examples Apache Cassandra Java Driver - examples. diff --git a/guava-shaded/pom.xml b/guava-shaded/pom.xml index ed37e861a96..62c1c5fc799 100644 --- a/guava-shaded/pom.xml +++ b/guava-shaded/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.1-SNAPSHOT + 4.19.1 java-driver-guava-shaded Apache Cassandra Java Driver - guava shaded dep diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index b489076c257..16661191427 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.1-SNAPSHOT + 4.19.1 java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 519f1411ce9..d7aaa7f9d67 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.1-SNAPSHOT + 4.19.1 java-driver-mapper-processor Apache Cassandra Java Driver - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index 3a767c2a352..af2936a3805 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.1-SNAPSHOT + 4.19.1 java-driver-mapper-runtime bundle diff --git a/metrics/micrometer/pom.xml b/metrics/micrometer/pom.xml index 091bb5f3e93..3dd89d1737b 100644 --- a/metrics/micrometer/pom.xml +++ b/metrics/micrometer/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.1-SNAPSHOT + 4.19.1 ../../ java-driver-metrics-micrometer diff --git a/metrics/microprofile/pom.xml b/metrics/microprofile/pom.xml index 5163b5366f4..cca6eafc603 100644 --- a/metrics/microprofile/pom.xml +++ b/metrics/microprofile/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.1-SNAPSHOT + 4.19.1 ../../ java-driver-metrics-microprofile diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml index 4b41f790145..e60981f7fc8 100644 --- a/osgi-tests/pom.xml +++ b/osgi-tests/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.1-SNAPSHOT + 4.19.1 java-driver-osgi-tests jar diff --git a/pom.xml b/pom.xml index 2cfeb65e757..355449acf0e 100644 --- a/pom.xml +++ b/pom.xml @@ -27,7 +27,7 @@ org.apache.cassandra java-driver-parent - 4.19.1-SNAPSHOT + 4.19.1 pom Apache Cassandra Java Driver https://github.com/datastax/java-driver @@ -1055,7 +1055,7 @@ limitations under the License.]]> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - HEAD + 4.19.1 diff --git a/query-builder/pom.xml b/query-builder/pom.xml index 0bc46f9bb91..1860cf65d12 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.1-SNAPSHOT + 4.19.1 java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index b0808757ce4..e61f4f2826b 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.1-SNAPSHOT + 4.19.1 java-driver-test-infra bundle From f63108175fd22e2db893c57b0975bc518977ab4b Mon Sep 17 00:00:00 2001 From: Andy Tolbert <6889771+tolbertam@users.noreply.github.com> Date: Tue, 14 Oct 2025 15:32:41 -0500 Subject: [PATCH 971/979] [maven-release-plugin] prepare for next development iteration --- bom/pom.xml | 20 ++++++++++---------- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution-source/pom.xml | 2 +- distribution-tests/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- guava-shaded/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- metrics/micrometer/pom.xml | 2 +- metrics/microprofile/pom.xml | 2 +- osgi-tests/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 17 files changed, 27 insertions(+), 27 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index 05e9d74dc5c..fab36abad4c 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.1 + 4.19.2-SNAPSHOT java-driver-bom pom @@ -33,47 +33,47 @@ org.apache.cassandra java-driver-core - 4.19.1 + 4.19.2-SNAPSHOT org.apache.cassandra java-driver-core-shaded - 4.19.1 + 4.19.2-SNAPSHOT org.apache.cassandra java-driver-mapper-processor - 4.19.1 + 4.19.2-SNAPSHOT org.apache.cassandra java-driver-mapper-runtime - 4.19.1 + 4.19.2-SNAPSHOT org.apache.cassandra java-driver-query-builder - 4.19.1 + 4.19.2-SNAPSHOT org.apache.cassandra java-driver-guava-shaded - 4.19.1 + 4.19.2-SNAPSHOT org.apache.cassandra java-driver-test-infra - 4.19.1 + 4.19.2-SNAPSHOT org.apache.cassandra java-driver-metrics-micrometer - 4.19.1 + 4.19.2-SNAPSHOT org.apache.cassandra java-driver-metrics-microprofile - 4.19.1 + 4.19.2-SNAPSHOT com.datastax.oss diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 4cc8197b6dd..617664eec97 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.1 + 4.19.2-SNAPSHOT java-driver-core-shaded Apache Cassandra Java Driver - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index 9ed02b81dd7..e750b791d3b 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.1 + 4.19.2-SNAPSHOT java-driver-core bundle diff --git a/distribution-source/pom.xml b/distribution-source/pom.xml index 5328b4bbd36..27d6d026a4d 100644 --- a/distribution-source/pom.xml +++ b/distribution-source/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.1 + 4.19.2-SNAPSHOT java-driver-distribution-source pom diff --git a/distribution-tests/pom.xml b/distribution-tests/pom.xml index c4b5eb38026..fbf8cd21076 100644 --- a/distribution-tests/pom.xml +++ b/distribution-tests/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.1 + 4.19.2-SNAPSHOT java-driver-distribution-tests Apache Cassandra Java Driver - distribution tests diff --git a/distribution/pom.xml b/distribution/pom.xml index 4c5f04de515..498d5dc603a 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.1 + 4.19.2-SNAPSHOT java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index a473320ea44..df1e2b2613b 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -23,7 +23,7 @@ java-driver-parent org.apache.cassandra - 4.19.1 + 4.19.2-SNAPSHOT java-driver-examples Apache Cassandra Java Driver - examples. diff --git a/guava-shaded/pom.xml b/guava-shaded/pom.xml index 62c1c5fc799..ad581bc0f98 100644 --- a/guava-shaded/pom.xml +++ b/guava-shaded/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.1 + 4.19.2-SNAPSHOT java-driver-guava-shaded Apache Cassandra Java Driver - guava shaded dep diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 16661191427..8360c8a211f 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.1 + 4.19.2-SNAPSHOT java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index d7aaa7f9d67..5368c24c2b6 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.1 + 4.19.2-SNAPSHOT java-driver-mapper-processor Apache Cassandra Java Driver - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index af2936a3805..a21254da908 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.1 + 4.19.2-SNAPSHOT java-driver-mapper-runtime bundle diff --git a/metrics/micrometer/pom.xml b/metrics/micrometer/pom.xml index 3dd89d1737b..08211d04c2b 100644 --- a/metrics/micrometer/pom.xml +++ b/metrics/micrometer/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.1 + 4.19.2-SNAPSHOT ../../ java-driver-metrics-micrometer diff --git a/metrics/microprofile/pom.xml b/metrics/microprofile/pom.xml index cca6eafc603..95c1ca9bb42 100644 --- a/metrics/microprofile/pom.xml +++ b/metrics/microprofile/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.1 + 4.19.2-SNAPSHOT ../../ java-driver-metrics-microprofile diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml index e60981f7fc8..abb54e159f6 100644 --- a/osgi-tests/pom.xml +++ b/osgi-tests/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.1 + 4.19.2-SNAPSHOT java-driver-osgi-tests jar diff --git a/pom.xml b/pom.xml index 355449acf0e..4e3d7006169 100644 --- a/pom.xml +++ b/pom.xml @@ -27,7 +27,7 @@ org.apache.cassandra java-driver-parent - 4.19.1 + 4.19.2-SNAPSHOT pom Apache Cassandra Java Driver https://github.com/datastax/java-driver @@ -1055,7 +1055,7 @@ limitations under the License.]]> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - 4.19.1 + HEAD diff --git a/query-builder/pom.xml b/query-builder/pom.xml index 1860cf65d12..e1db8485799 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.1 + 4.19.2-SNAPSHOT java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index e61f4f2826b..1a73cde23cd 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.1 + 4.19.2-SNAPSHOT java-driver-test-infra bundle From 8f69c24d6cd6db75c6811fc32ee9ab39121ecaa6 Mon Sep 17 00:00:00 2001 From: janehe Date: Mon, 10 Nov 2025 12:50:59 -0800 Subject: [PATCH 972/979] CASSJAVA-116: Retry or Speculative Execution with RequestIdGenerator throws "Duplicate Key" patch by Jane He; reviewed by Andy Tolbert and Lukasz Atoniak for CASSJAVA-116 --- .../api/core/tracker/RequestIdGenerator.java | 29 +++++---- .../core/cql/CqlRequestHandlerRetryTest.java | 63 +++++++++++++++++++ .../core/cql/RequestHandlerTestHarness.java | 10 ++- .../core/tracker/RequestIdGeneratorIT.java | 21 ++++++- 4 files changed, 110 insertions(+), 13 deletions(-) diff --git a/core/src/main/java/com/datastax/oss/driver/api/core/tracker/RequestIdGenerator.java b/core/src/main/java/com/datastax/oss/driver/api/core/tracker/RequestIdGenerator.java index 59ac3fdacf7..21db3793b01 100644 --- a/core/src/main/java/com/datastax/oss/driver/api/core/tracker/RequestIdGenerator.java +++ b/core/src/main/java/com/datastax/oss/driver/api/core/tracker/RequestIdGenerator.java @@ -19,20 +19,21 @@ import com.datastax.oss.driver.api.core.cql.Statement; import com.datastax.oss.driver.api.core.session.Request; -import com.datastax.oss.protocol.internal.util.collection.NullAllowingImmutableMap; import edu.umd.cs.findbugs.annotations.NonNull; import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; +import java.util.Collections; +import java.util.HashMap; import java.util.Map; /** * Interface responsible for generating request IDs. * - *

          Note that all request IDs have a parent/child relationship. A "parent ID" can loosely be - * thought of as encompassing a sequence of a request + any attendant retries, speculative + *

          Note that all request IDs have a parent/child relationship. A "session request ID" can loosely + * be thought of as encompassing a sequence of a request + any attendant retries, speculative * executions etc. It's scope is identical to that of a {@link - * com.datastax.oss.driver.internal.core.cql.CqlRequestHandler}. A "request ID" represents a single - * request within this larger scope. Note that a request corresponding to a request ID may be + * com.datastax.oss.driver.internal.core.cql.CqlRequestHandler}. A "node request ID" represents a + * single request within this larger scope. Note that a request corresponding to a request ID may be * retried; in that case the retry count will be appended to the corresponding identifier in the * logs. */ @@ -67,11 +68,17 @@ default String getCustomPayloadKey() { default Statement getDecoratedStatement( @NonNull Statement statement, @NonNull String requestId) { - Map customPayload = - NullAllowingImmutableMap.builder() - .putAll(statement.getCustomPayload()) - .put(getCustomPayloadKey(), ByteBuffer.wrap(requestId.getBytes(StandardCharsets.UTF_8))) - .build(); - return statement.setCustomPayload(customPayload); + + Map existing = new HashMap<>(statement.getCustomPayload()); + String key = getCustomPayloadKey(); + + // Add or overwrite + existing.put(key, ByteBuffer.wrap(requestId.getBytes(StandardCharsets.UTF_8))); + + // Allowing null key/values + // Wrap a map inside to be immutable without instanciating a new map + Map unmodifiableMap = Collections.unmodifiableMap(existing); + + return statement.setCustomPayload(unmodifiableMap); } } diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandlerRetryTest.java b/core/src/test/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandlerRetryTest.java index bea52891c18..ccac873c616 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandlerRetryTest.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/cql/CqlRequestHandlerRetryTest.java @@ -48,6 +48,8 @@ import com.datastax.oss.driver.api.core.servererrors.ServerError; import com.datastax.oss.driver.api.core.servererrors.UnavailableException; import com.datastax.oss.driver.api.core.servererrors.WriteTimeoutException; +import com.datastax.oss.driver.api.core.session.Request; +import com.datastax.oss.driver.api.core.tracker.RequestIdGenerator; import com.datastax.oss.protocol.internal.ProtocolConstants; import com.datastax.oss.protocol.internal.response.Error; import com.datastax.oss.protocol.internal.response.error.ReadTimeout; @@ -55,9 +57,13 @@ import com.datastax.oss.protocol.internal.response.error.WriteTimeout; import com.tngtech.java.junit.dataprovider.DataProvider; import com.tngtech.java.junit.dataprovider.UseDataProvider; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.nio.ByteBuffer; +import java.nio.charset.StandardCharsets; import java.util.Iterator; import java.util.concurrent.CompletionStage; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; import org.junit.Test; public class CqlRequestHandlerRetryTest extends CqlRequestHandlerTestBase { @@ -384,6 +390,63 @@ public void should_rethrow_error_if_not_idempotent_and_error_unsafe_or_policy_re } } + @Test + @UseDataProvider("failureAndIdempotent") + public void should_not_fail_with_duplicate_key_when_retrying_with_request_id_generator( + FailureScenario failureScenario, boolean defaultIdempotence, Statement statement) { + + // Create a RequestIdGenerator that uses the same key as the statement's custom payload + RequestIdGenerator requestIdGenerator = + new RequestIdGenerator() { + private AtomicInteger counter = new AtomicInteger(0); + + @Override + public String getSessionRequestId() { + return "session-123"; + } + + @Override + public String getNodeRequestId(@NonNull Request request, @NonNull String parentId) { + return parentId + "-" + counter.getAndIncrement(); + } + }; + + RequestHandlerTestHarness.Builder harnessBuilder = + RequestHandlerTestHarness.builder() + .withDefaultIdempotence(defaultIdempotence) + .withRequestIdGenerator(requestIdGenerator); + failureScenario.mockRequestError(harnessBuilder, node1); + harnessBuilder.withResponse(node2, defaultFrameOf(singleRow())); + + try (RequestHandlerTestHarness harness = harnessBuilder.build()) { + failureScenario.mockRetryPolicyVerdict( + harness.getContext().getRetryPolicy(anyString()), RetryVerdict.RETRY_NEXT); + + CompletionStage resultSetFuture = + new CqlRequestHandler(statement, harness.getSession(), harness.getContext(), "test") + .handle(); + + // The test should succeed without throwing a duplicate key exception + assertThatStage(resultSetFuture) + .isSuccess( + resultSet -> { + Iterator rows = resultSet.currentPage().iterator(); + assertThat(rows.hasNext()).isTrue(); + assertThat(rows.next().getString("message")).isEqualTo("hello, world"); + + ExecutionInfo executionInfo = resultSet.getExecutionInfo(); + assertThat(executionInfo.getCoordinator()).isEqualTo(node2); + assertThat(executionInfo.getErrors()).hasSize(1); + assertThat(executionInfo.getErrors().get(0).getKey()).isEqualTo(node1); + + // Verify that the custom payload still contains the request ID key + // (either the original value or the generated one, depending on implementation) + assertThat(executionInfo.getRequest().getCustomPayload().get("request-id")) + .isEqualTo(ByteBuffer.wrap("session-123-1".getBytes(StandardCharsets.UTF_8))); + }); + } + } + /** * Sets up the mocks to simulate an error from a node, and make the retry policy return a given * decision for that error. diff --git a/core/src/test/java/com/datastax/oss/driver/internal/core/cql/RequestHandlerTestHarness.java b/core/src/test/java/com/datastax/oss/driver/internal/core/cql/RequestHandlerTestHarness.java index 6ecd6111992..6a7657d5809 100644 --- a/core/src/test/java/com/datastax/oss/driver/internal/core/cql/RequestHandlerTestHarness.java +++ b/core/src/test/java/com/datastax/oss/driver/internal/core/cql/RequestHandlerTestHarness.java @@ -37,6 +37,7 @@ import com.datastax.oss.driver.api.core.session.Session; import com.datastax.oss.driver.api.core.specex.SpeculativeExecutionPolicy; import com.datastax.oss.driver.api.core.time.TimestampGenerator; +import com.datastax.oss.driver.api.core.tracker.RequestIdGenerator; import com.datastax.oss.driver.api.core.type.codec.registry.CodecRegistry; import com.datastax.oss.driver.internal.core.DefaultConsistencyLevelRegistry; import com.datastax.oss.driver.internal.core.ProtocolFeature; @@ -170,7 +171,8 @@ protected RequestHandlerTestHarness(Builder builder) { when(context.getRequestTracker()).thenReturn(new NoopRequestTracker(context)); - when(context.getRequestIdGenerator()).thenReturn(Optional.empty()); + when(context.getRequestIdGenerator()) + .thenReturn(Optional.ofNullable(builder.requestIdGenerator)); } public DefaultSession getSession() { @@ -203,6 +205,7 @@ public static class Builder { private final List poolBehaviors = new ArrayList<>(); private boolean defaultIdempotence; private ProtocolVersion protocolVersion; + private RequestIdGenerator requestIdGenerator; /** * Sets the given node as the next one in the query plan; an empty pool will be simulated when @@ -258,6 +261,11 @@ public Builder withProtocolVersion(ProtocolVersion protocolVersion) { return this; } + public Builder withRequestIdGenerator(RequestIdGenerator requestIdGenerator) { + this.requestIdGenerator = requestIdGenerator; + return this; + } + /** * Sets the given node as the next one in the query plan; the test code is responsible of * calling the methods on the returned object to complete the write and the query. diff --git a/integration-tests/src/test/java/com/datastax/oss/driver/core/tracker/RequestIdGeneratorIT.java b/integration-tests/src/test/java/com/datastax/oss/driver/core/tracker/RequestIdGeneratorIT.java index 2848a8fb629..516a62bb1f7 100644 --- a/integration-tests/src/test/java/com/datastax/oss/driver/core/tracker/RequestIdGeneratorIT.java +++ b/integration-tests/src/test/java/com/datastax/oss/driver/core/tracker/RequestIdGeneratorIT.java @@ -17,12 +17,14 @@ */ package com.datastax.oss.driver.core.tracker; +import static com.datastax.oss.driver.Assertions.assertThatStage; import static org.assertj.core.api.Assertions.assertThat; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.config.DefaultDriverOption; import com.datastax.oss.driver.api.core.config.DriverConfigLoader; import com.datastax.oss.driver.api.core.cql.ResultSet; +import com.datastax.oss.driver.api.core.cql.SimpleStatement; import com.datastax.oss.driver.api.core.cql.Statement; import com.datastax.oss.driver.api.core.session.Request; import com.datastax.oss.driver.api.core.tracker.RequestIdGenerator; @@ -119,7 +121,24 @@ public void should_not_write_id_to_custom_payload_when_key_is_not_set() { try (CqlSession session = SessionUtils.newSession(ccmRule, loader)) { String query = "SELECT * FROM system.local"; ResultSet rs = session.execute(query); - assertThat(rs.getExecutionInfo().getRequest().getCustomPayload().get("trace_key")).isNull(); + assertThat(rs.getExecutionInfo().getRequest().getCustomPayload().get("request-id")).isNull(); + } + } + + @Test + public void should_succeed_with_null_value_in_custom_payload() { + DriverConfigLoader loader = + SessionUtils.configLoaderBuilder() + .withString( + DefaultDriverOption.REQUEST_ID_GENERATOR_CLASS, "W3CContextRequestIdGenerator") + .build(); + try (CqlSession session = SessionUtils.newSession(ccmRule, loader)) { + String query = "SELECT * FROM system.local"; + Map customPayload = + new NullAllowingImmutableMap.Builder(1).put("my_key", null).build(); + SimpleStatement statement = + SimpleStatement.newInstance(query).setCustomPayload(customPayload); + assertThatStage(session.executeAsync(statement)).isSuccess(); } } } From 19c60c08b9eecbcfba3c61564bbe15bf3115089a Mon Sep 17 00:00:00 2001 From: absurdfarce Date: Wed, 12 Nov 2025 18:31:56 -0600 Subject: [PATCH 973/979] Changelog updates for 4.19.2 patch by Bret McGuire; reviewed by Lukasz Antoniak and Andy Tolbert reference: https://github.com/apache/cassandra-java-driver/pull/2062 --- changelog/README.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/changelog/README.md b/changelog/README.md index 9e223318e65..b01c3db3bf9 100644 --- a/changelog/README.md +++ b/changelog/README.md @@ -21,6 +21,10 @@ under the License. +### 4.19.2 + +- [bug] CASSJAVA-116: Retry or Speculative Execution with RequestIdGenerator throws "Duplicate Key" + ### 4.19.1 - [improvement] CASSJAVA-97: Let users inject an ID for each request and write to the custom payload From 30b05bd7991617b212b8089787fe9ca829d00154 Mon Sep 17 00:00:00 2001 From: absurdfarce Date: Thu, 13 Nov 2025 12:02:08 -0600 Subject: [PATCH 974/979] [maven-release-plugin] prepare release 4.19.2 --- bom/pom.xml | 20 ++++++++++---------- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution-source/pom.xml | 2 +- distribution-tests/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- guava-shaded/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- metrics/micrometer/pom.xml | 2 +- metrics/microprofile/pom.xml | 2 +- osgi-tests/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 17 files changed, 27 insertions(+), 27 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index fab36abad4c..2e6e476d02a 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.2-SNAPSHOT + 4.19.2 java-driver-bom pom @@ -33,47 +33,47 @@ org.apache.cassandra java-driver-core - 4.19.2-SNAPSHOT + 4.19.2 org.apache.cassandra java-driver-core-shaded - 4.19.2-SNAPSHOT + 4.19.2 org.apache.cassandra java-driver-mapper-processor - 4.19.2-SNAPSHOT + 4.19.2 org.apache.cassandra java-driver-mapper-runtime - 4.19.2-SNAPSHOT + 4.19.2 org.apache.cassandra java-driver-query-builder - 4.19.2-SNAPSHOT + 4.19.2 org.apache.cassandra java-driver-guava-shaded - 4.19.2-SNAPSHOT + 4.19.2 org.apache.cassandra java-driver-test-infra - 4.19.2-SNAPSHOT + 4.19.2 org.apache.cassandra java-driver-metrics-micrometer - 4.19.2-SNAPSHOT + 4.19.2 org.apache.cassandra java-driver-metrics-microprofile - 4.19.2-SNAPSHOT + 4.19.2 com.datastax.oss diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 617664eec97..b8e56b89b82 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.2-SNAPSHOT + 4.19.2 java-driver-core-shaded Apache Cassandra Java Driver - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index e750b791d3b..d8a058537f2 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.2-SNAPSHOT + 4.19.2 java-driver-core bundle diff --git a/distribution-source/pom.xml b/distribution-source/pom.xml index 27d6d026a4d..71600fbee2c 100644 --- a/distribution-source/pom.xml +++ b/distribution-source/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.2-SNAPSHOT + 4.19.2 java-driver-distribution-source pom diff --git a/distribution-tests/pom.xml b/distribution-tests/pom.xml index fbf8cd21076..0c2d802266e 100644 --- a/distribution-tests/pom.xml +++ b/distribution-tests/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.2-SNAPSHOT + 4.19.2 java-driver-distribution-tests Apache Cassandra Java Driver - distribution tests diff --git a/distribution/pom.xml b/distribution/pom.xml index 498d5dc603a..fc8d3ac0f8e 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.2-SNAPSHOT + 4.19.2 java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index df1e2b2613b..29c1aa42001 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -23,7 +23,7 @@ java-driver-parent org.apache.cassandra - 4.19.2-SNAPSHOT + 4.19.2 java-driver-examples Apache Cassandra Java Driver - examples. diff --git a/guava-shaded/pom.xml b/guava-shaded/pom.xml index ad581bc0f98..2da3da4024f 100644 --- a/guava-shaded/pom.xml +++ b/guava-shaded/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.2-SNAPSHOT + 4.19.2 java-driver-guava-shaded Apache Cassandra Java Driver - guava shaded dep diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 8360c8a211f..bf122a19e35 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.2-SNAPSHOT + 4.19.2 java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index 5368c24c2b6..baa2e42c539 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.2-SNAPSHOT + 4.19.2 java-driver-mapper-processor Apache Cassandra Java Driver - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index a21254da908..5076a146901 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.2-SNAPSHOT + 4.19.2 java-driver-mapper-runtime bundle diff --git a/metrics/micrometer/pom.xml b/metrics/micrometer/pom.xml index 08211d04c2b..c5eeee518da 100644 --- a/metrics/micrometer/pom.xml +++ b/metrics/micrometer/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.2-SNAPSHOT + 4.19.2 ../../ java-driver-metrics-micrometer diff --git a/metrics/microprofile/pom.xml b/metrics/microprofile/pom.xml index 95c1ca9bb42..1c6359d2cdd 100644 --- a/metrics/microprofile/pom.xml +++ b/metrics/microprofile/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.2-SNAPSHOT + 4.19.2 ../../ java-driver-metrics-microprofile diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml index abb54e159f6..2fb5f3cb27f 100644 --- a/osgi-tests/pom.xml +++ b/osgi-tests/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.2-SNAPSHOT + 4.19.2 java-driver-osgi-tests jar diff --git a/pom.xml b/pom.xml index 4e3d7006169..c37285e5f7c 100644 --- a/pom.xml +++ b/pom.xml @@ -27,7 +27,7 @@ org.apache.cassandra java-driver-parent - 4.19.2-SNAPSHOT + 4.19.2 pom Apache Cassandra Java Driver https://github.com/datastax/java-driver @@ -1055,7 +1055,7 @@ limitations under the License.]]> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - HEAD + 4.19.2 diff --git a/query-builder/pom.xml b/query-builder/pom.xml index e1db8485799..a62c800cbd3 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.2-SNAPSHOT + 4.19.2 java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index 1a73cde23cd..0f24f638047 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.2-SNAPSHOT + 4.19.2 java-driver-test-infra bundle From 62eade21bfeb16a12ce71013fbaebf1c19b5ae96 Mon Sep 17 00:00:00 2001 From: absurdfarce Date: Thu, 13 Nov 2025 12:02:11 -0600 Subject: [PATCH 975/979] [maven-release-plugin] prepare for next development iteration --- bom/pom.xml | 20 ++++++++++---------- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- distribution-source/pom.xml | 2 +- distribution-tests/pom.xml | 2 +- distribution/pom.xml | 2 +- examples/pom.xml | 2 +- guava-shaded/pom.xml | 2 +- integration-tests/pom.xml | 2 +- mapper-processor/pom.xml | 2 +- mapper-runtime/pom.xml | 2 +- metrics/micrometer/pom.xml | 2 +- metrics/microprofile/pom.xml | 2 +- osgi-tests/pom.xml | 2 +- pom.xml | 4 ++-- query-builder/pom.xml | 2 +- test-infra/pom.xml | 2 +- 17 files changed, 27 insertions(+), 27 deletions(-) diff --git a/bom/pom.xml b/bom/pom.xml index 2e6e476d02a..dd76153a9b1 100644 --- a/bom/pom.xml +++ b/bom/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.2 + 4.19.3-SNAPSHOT java-driver-bom pom @@ -33,47 +33,47 @@ org.apache.cassandra java-driver-core - 4.19.2 + 4.19.3-SNAPSHOT org.apache.cassandra java-driver-core-shaded - 4.19.2 + 4.19.3-SNAPSHOT org.apache.cassandra java-driver-mapper-processor - 4.19.2 + 4.19.3-SNAPSHOT org.apache.cassandra java-driver-mapper-runtime - 4.19.2 + 4.19.3-SNAPSHOT org.apache.cassandra java-driver-query-builder - 4.19.2 + 4.19.3-SNAPSHOT org.apache.cassandra java-driver-guava-shaded - 4.19.2 + 4.19.3-SNAPSHOT org.apache.cassandra java-driver-test-infra - 4.19.2 + 4.19.3-SNAPSHOT org.apache.cassandra java-driver-metrics-micrometer - 4.19.2 + 4.19.3-SNAPSHOT org.apache.cassandra java-driver-metrics-microprofile - 4.19.2 + 4.19.3-SNAPSHOT com.datastax.oss diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index b8e56b89b82..3727ab9422d 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.2 + 4.19.3-SNAPSHOT java-driver-core-shaded Apache Cassandra Java Driver - core with shaded deps diff --git a/core/pom.xml b/core/pom.xml index d8a058537f2..089e15cd933 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.2 + 4.19.3-SNAPSHOT java-driver-core bundle diff --git a/distribution-source/pom.xml b/distribution-source/pom.xml index 71600fbee2c..4c1f11e53a8 100644 --- a/distribution-source/pom.xml +++ b/distribution-source/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.2 + 4.19.3-SNAPSHOT java-driver-distribution-source pom diff --git a/distribution-tests/pom.xml b/distribution-tests/pom.xml index 0c2d802266e..9cef313f8a5 100644 --- a/distribution-tests/pom.xml +++ b/distribution-tests/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.2 + 4.19.3-SNAPSHOT java-driver-distribution-tests Apache Cassandra Java Driver - distribution tests diff --git a/distribution/pom.xml b/distribution/pom.xml index fc8d3ac0f8e..20b9afc1bcd 100644 --- a/distribution/pom.xml +++ b/distribution/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.2 + 4.19.3-SNAPSHOT java-driver-distribution diff --git a/examples/pom.xml b/examples/pom.xml index 29c1aa42001..12e42dfdf53 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -23,7 +23,7 @@ java-driver-parent org.apache.cassandra - 4.19.2 + 4.19.3-SNAPSHOT java-driver-examples Apache Cassandra Java Driver - examples. diff --git a/guava-shaded/pom.xml b/guava-shaded/pom.xml index 2da3da4024f..da2e82e0ab0 100644 --- a/guava-shaded/pom.xml +++ b/guava-shaded/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.2 + 4.19.3-SNAPSHOT java-driver-guava-shaded Apache Cassandra Java Driver - guava shaded dep diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index bf122a19e35..34cb3ef7063 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.2 + 4.19.3-SNAPSHOT java-driver-integration-tests jar diff --git a/mapper-processor/pom.xml b/mapper-processor/pom.xml index baa2e42c539..04d8c98c4f0 100644 --- a/mapper-processor/pom.xml +++ b/mapper-processor/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.2 + 4.19.3-SNAPSHOT java-driver-mapper-processor Apache Cassandra Java Driver - object mapper processor diff --git a/mapper-runtime/pom.xml b/mapper-runtime/pom.xml index 5076a146901..57fbd5d3432 100644 --- a/mapper-runtime/pom.xml +++ b/mapper-runtime/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.2 + 4.19.3-SNAPSHOT java-driver-mapper-runtime bundle diff --git a/metrics/micrometer/pom.xml b/metrics/micrometer/pom.xml index c5eeee518da..37ba8556a53 100644 --- a/metrics/micrometer/pom.xml +++ b/metrics/micrometer/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.2 + 4.19.3-SNAPSHOT ../../ java-driver-metrics-micrometer diff --git a/metrics/microprofile/pom.xml b/metrics/microprofile/pom.xml index 1c6359d2cdd..9893711d340 100644 --- a/metrics/microprofile/pom.xml +++ b/metrics/microprofile/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.2 + 4.19.3-SNAPSHOT ../../ java-driver-metrics-microprofile diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml index 2fb5f3cb27f..bd3a6380d6b 100644 --- a/osgi-tests/pom.xml +++ b/osgi-tests/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.2 + 4.19.3-SNAPSHOT java-driver-osgi-tests jar diff --git a/pom.xml b/pom.xml index c37285e5f7c..6834cdd1882 100644 --- a/pom.xml +++ b/pom.xml @@ -27,7 +27,7 @@ org.apache.cassandra java-driver-parent - 4.19.2 + 4.19.3-SNAPSHOT pom Apache Cassandra Java Driver https://github.com/datastax/java-driver @@ -1055,7 +1055,7 @@ limitations under the License.]]> scm:git:git@github.com:datastax/java-driver.git scm:git:git@github.com:datastax/java-driver.git https://github.com/datastax/java-driver - 4.19.2 + HEAD diff --git a/query-builder/pom.xml b/query-builder/pom.xml index a62c800cbd3..2bfe1bee8f5 100644 --- a/query-builder/pom.xml +++ b/query-builder/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.2 + 4.19.3-SNAPSHOT java-driver-query-builder bundle diff --git a/test-infra/pom.xml b/test-infra/pom.xml index 0f24f638047..5bf2d07f652 100644 --- a/test-infra/pom.xml +++ b/test-infra/pom.xml @@ -23,7 +23,7 @@ org.apache.cassandra java-driver-parent - 4.19.2 + 4.19.3-SNAPSHOT java-driver-test-infra bundle From a7da99556b6c202eecc6d5cb1c6371492729ab49 Mon Sep 17 00:00:00 2001 From: absurdfarce Date: Thu, 4 Dec 2025 12:04:26 -0600 Subject: [PATCH 976/979] Removing interface to Travis CI Patch by Bret McGuire; reviewed by Andy Tolbert and Bret McGuire reference: https://github.com/apache/cassandra-java-driver/pull/2066 --- .travis.yml | 39 --------------------------------------- 1 file changed, 39 deletions(-) delete mode 100644 .travis.yml diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 84d40ce1356..00000000000 --- a/.travis.yml +++ /dev/null @@ -1,39 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -language: java -dist: trusty -sudo: false -# see https://sormuras.github.io/blog/2018-03-20-jdk-matrix.html -matrix: - include: - # 8 - - env: JDK='OpenJDK 8' - jdk: openjdk8 - # 11 - - env: JDK='OpenJDK 11' - # switch to JDK 11 before running tests - before_script: . $TRAVIS_BUILD_DIR/ci/install-jdk.sh -F 11 -L GPL -before_install: - # Require JDK8 for compiling - - jdk_switcher use openjdk8 - - ./install-snapshots.sh -install: mvn install -DskipTests=true -Dmaven.javadoc.skip=true -B -V -script: mvn test -Djacoco.skip=true -Dmaven.test.failure.ignore=true -Dmaven.javadoc.skip=true -B -V -cache: - directories: - - $HOME/.m2 From ded2985f14e9774cb46dd0104b1f045613bc0279 Mon Sep 17 00:00:00 2001 From: "April I. Murphy" <36110273+aimurphy@users.noreply.github.com> Date: Thu, 20 Nov 2025 08:21:36 -0800 Subject: [PATCH 977/979] Replace outdated link in README patch by April Murphy; reviewed by Bret McGuire and Lukasz Antoniak reference: https://github.com/apache/cassandra-java-driver/pull/2064 --- README.md | 4 ---- 1 file changed, 4 deletions(-) diff --git a/README.md b/README.md index 0f6c2bb5a6f..5994a4be8a1 100644 --- a/README.md +++ b/README.md @@ -67,10 +67,6 @@ remain unchanged, and the new API will look very familiar to 2.x and 3.x users. See the [upgrade guide](upgrade_guide/) for details. -## Error Handling - -See the [Cassandra error handling done right blog](https://www.datastax.com/blog/cassandra-error-handling-done-right) for error handling with the Java Driver for Apache Cassandra™. - ## Useful links * [Manual](manual/) From e762df872b7ca02b4cd5cf780bd96f77815c9646 Mon Sep 17 00:00:00 2001 From: Lukasz Antoniak Date: Fri, 19 Dec 2025 13:13:18 +0100 Subject: [PATCH 978/979] ninja-fix: Remove ASF donation message --- README.md | 2 -- 1 file changed, 2 deletions(-) diff --git a/README.md b/README.md index 5994a4be8a1..d8ef01d0964 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,5 @@ # Java Driver for Apache Cassandra® -:warning: The java-driver has recently been donated by Datastax to The Apache Software Foundation and the Apache Cassandra project. Bear with us as we move assets and coordinates. - [![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://opensource.org/licenses/Apache-2.0) [![Maven Central](https://maven-badges.herokuapp.com/maven-central/org.apache.cassandra/java-driver-core/badge.svg)](https://maven-badges.herokuapp.com/maven-central/org.apache.cassandra/java-driver-core) From 595cb29912dc8b55663cc13bafe3f17dc4f91ce6 Mon Sep 17 00:00:00 2001 From: Abe Ratnofsky Date: Mon, 22 Dec 2025 22:20:20 -0800 Subject: [PATCH 979/979] Update LZ4 and Netty dependencies for CVE response The primary goal here is to address CVE-2025-12183. Netty includes a dependency on vulnerable versions of lz4-java, so update to a fixed version of Netty as well. On the C* server side, we opted to move to the new community fork of lz4-java, so match that decision here (CASSANDRA-21052). patch by Abe Ratnofsky; reviewed by Francisco Guerrero for CASSJAVA-113 --- NOTICE_binary.txt | 2 +- core-shaded/pom.xml | 2 +- core/pom.xml | 2 +- core/src/main/resources/reference.conf | 2 +- .../internal/core/insights/PlatformInfoFinderTest.java | 2 +- core/src/test/resources/insights/test-dependencies.txt | 2 +- integration-tests/pom.xml | 2 +- manual/core/compression/README.md | 6 +++--- manual/core/integration/README.md | 2 +- osgi-tests/pom.xml | 2 +- .../oss/driver/internal/osgi/support/BundleOptions.java | 2 +- pom.xml | 6 +++--- 12 files changed, 16 insertions(+), 16 deletions(-) diff --git a/NOTICE_binary.txt b/NOTICE_binary.txt index c60d8ceb245..f6f11c298f6 100644 --- a/NOTICE_binary.txt +++ b/NOTICE_binary.txt @@ -100,7 +100,7 @@ and decompression library written by Adrien Grand. It can be obtained at: * LICENSE: * license/LICENSE.lz4.txt (Apache License 2.0) * HOMEPAGE: - * https://github.com/jpountz/lz4-java + * https://github.com/yawkat/lz4-java This product optionally depends on 'lzma-java', a LZMA Java compression and decompression library, which can be obtained at: diff --git a/core-shaded/pom.xml b/core-shaded/pom.xml index 3727ab9422d..84cb4b15398 100644 --- a/core-shaded/pom.xml +++ b/core-shaded/pom.xml @@ -74,7 +74,7 @@ true - org.lz4 + at.yawk.lz4 lz4-java true diff --git a/core/pom.xml b/core/pom.xml index 089e15cd933..8758d20d78a 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -73,7 +73,7 @@ true - org.lz4 + at.yawk.lz4 lz4-java true diff --git a/core/src/main/resources/reference.conf b/core/src/main/resources/reference.conf index 741b1d97654..4ae83362e29 100644 --- a/core/src/main/resources/reference.conf +++ b/core/src/main/resources/reference.conf @@ -1114,7 +1114,7 @@ datastax-java-driver { # The name of the algorithm used to compress protocol frames. # # The possible values are: - # - lz4: requires net.jpountz.lz4:lz4 in the classpath. + # - lz4: requires at.yawk.lz4:lz4-java in the classpath. # - snappy: requires org.xerial.snappy:snappy-java in the classpath. # - the string "none" to indicate no compression (this is functionally equivalent to omitting # the option). diff --git a/core/src/test/java/com/datastax/dse/driver/internal/core/insights/PlatformInfoFinderTest.java b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/PlatformInfoFinderTest.java index 80294ea6b7d..2a098363d46 100644 --- a/core/src/test/java/com/datastax/dse/driver/internal/core/insights/PlatformInfoFinderTest.java +++ b/core/src/test/java/com/datastax/dse/driver/internal/core/insights/PlatformInfoFinderTest.java @@ -77,7 +77,7 @@ public void should_find_dependencies_from_file() { "com.fasterxml.jackson.core:jackson-annotations", withUnverifiedRuntimeVersion("2.8.11")); expected.put("com.fasterxml.jackson.core:jackson-core", withUnverifiedRuntimeVersion("2.8.11")); expected.put("io.netty:netty-handler", withUnverifiedRuntimeVersion("4.0.56.Final")); - expected.put("org.lz4:lz4-java", withUnverifiedRuntimeVersionOptional("1.4.1")); + expected.put("at.yawk.lz4:lz4-java", withUnverifiedRuntimeVersionOptional("1.10.1")); expected.put("org.hdrhistogram:HdrHistogram", withUnverifiedRuntimeVersionOptional("2.1.10")); expected.put("com.github.jnr:jffi", withUnverifiedRuntimeVersion("1.2.16")); expected.put("io.netty:netty-buffer", withUnverifiedRuntimeVersion("4.0.56.Final")); diff --git a/core/src/test/resources/insights/test-dependencies.txt b/core/src/test/resources/insights/test-dependencies.txt index 6cabe8b257d..e9186a35e6b 100644 --- a/core/src/test/resources/insights/test-dependencies.txt +++ b/core/src/test/resources/insights/test-dependencies.txt @@ -17,7 +17,7 @@ The following files have been resolved: com.fasterxml.jackson.core:jackson-core:jar:2.8.11:compile org.hdrhistogram:HdrHistogram:jar:2.1.10:compile (optional) org.ow2.asm:asm-tree:jar:5.0.3:compile - org.lz4:lz4-java:jar:1.4.1:compile (optional) + at.yawk.lz4:lz4-java:jar:1.10.1:compile (optional) io.netty:netty-transport:jar:4.0.56.Final:compile io.dropwizard.metrics:metrics-core:jar:3.2.2:compile io.netty:netty-common:jar:4.0.56.Final:compile diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 34cb3ef7063..e302e12077f 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -129,7 +129,7 @@ test - org.lz4 + at.yawk.lz4 lz4-java test diff --git a/manual/core/compression/README.md b/manual/core/compression/README.md index 9e84fde917d..9f7ae3c4854 100644 --- a/manual/core/compression/README.md +++ b/manual/core/compression/README.md @@ -46,7 +46,7 @@ datastax-java-driver { Compression must be set before opening a session, it cannot be changed at runtime. -Two algorithms are supported out of the box: [LZ4](https://github.com/jpountz/lz4-java) and +Two algorithms are supported out of the box: [LZ4](https://github.com/yawkat/lz4-java) and [Snappy](http://google.github.io/snappy/). The LZ4 implementation is a good first choice; it offers fallback implementations in case native libraries fail to load and [benchmarks](http://java-performance.info/performance-general-compression/) suggest that it offers @@ -63,9 +63,9 @@ Dependency: ```xml - org.lz4 + at.yawk.lz4 lz4-java - 1.4.1 + 1.10.1 ``` diff --git a/manual/core/integration/README.md b/manual/core/integration/README.md index f2a96160bce..e2c7bc218ee 100644 --- a/manual/core/integration/README.md +++ b/manual/core/integration/README.md @@ -416,7 +416,7 @@ are not available on your platform, you can exclude the following dependency: #### Compression libraries -The driver supports compression with either [LZ4](https://github.com/jpountz/lz4-java) or +The driver supports compression with either [LZ4](https://github.com/yawkat/lz4-java) or [Snappy](http://google.github.io/snappy/). These dependencies are optional; you have to add them explicitly in your application in order to diff --git a/osgi-tests/pom.xml b/osgi-tests/pom.xml index bd3a6380d6b..c2cc4d830f1 100644 --- a/osgi-tests/pom.xml +++ b/osgi-tests/pom.xml @@ -79,7 +79,7 @@ snappy-java - org.lz4 + at.yawk.lz4 lz4-java diff --git a/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/BundleOptions.java b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/BundleOptions.java index 3e6171ca530..378b515aa65 100644 --- a/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/BundleOptions.java +++ b/osgi-tests/src/test/java/com/datastax/oss/driver/internal/osgi/support/BundleOptions.java @@ -117,7 +117,7 @@ public static CompositeOption jacksonBundles() { public static CompositeOption lz4Bundle() { return () -> options( - mavenBundle("org.lz4", "lz4-java").versionAsInProject(), + mavenBundle("at.yawk.lz4", "lz4-java").versionAsInProject(), systemProperty("cassandra.compression").value("LZ4")); } diff --git a/pom.xml b/pom.xml index 6834cdd1882..eb83459cfb4 100644 --- a/pom.xml +++ b/pom.xml @@ -57,7 +57,7 @@ 2.1.12 4.1.18 - 4.1.119.Final + 4.1.130.Final 1.2.1 1.1.10.1 - 1.7.1 + 1.10.1 3.19.0 1.3 @@ -137,7 +137,7 @@ ${snappy.version} - org.lz4 + at.yawk.lz4 lz4-java ${lz4.version}